diff --git a/demo/browser.js b/demo/browser.js
index d6b4ceff..13fec371 100644
--- a/demo/browser.js
+++ b/demo/browser.js
@@ -27,20 +27,25 @@ const ui = {
fillPolygons: false,
useDepth: true,
console: true,
- maxFrames: 10,
+ maxFPSframes: 10,
modelsPreload: true,
modelsWarmup: true,
menuWidth: 0,
menuHeight: 0,
camera: {},
fps: [],
+ buffered: true,
+ bufferedFPSTarget: 24,
+ drawThread: null,
+ framesDraw: 0,
+ framesDetect: 0,
};
// global variables
let menu;
let menuFX;
let worker;
-let timeStamp;
+let lastDetectedResult = {};
// helper function: translates json to human readable string
function str(...msg) {
@@ -65,24 +70,24 @@ const status = (msg) => {
};
// draws processed results and starts processing of a next frame
-function drawResults(input, result, canvas) {
+async function drawResults(input) {
+ const result = lastDetectedResult;
+ const canvas = document.getElementById('canvas');
+
// update fps data
- const elapsed = performance.now() - timeStamp;
- ui.fps.push(1000 / elapsed);
- if (ui.fps.length > ui.maxFrames) ui.fps.shift();
+ // const elapsed = performance.now() - timeStamp;
+ ui.fps.push(1000 / result.performance.total);
+ if (ui.fps.length > ui.maxFPSframes) ui.fps.shift();
// enable for continous performance monitoring
// console.log(result.performance);
- // immediate loop before we even draw results, but limit frame rate to 30
- if (input.srcObject) {
- // eslint-disable-next-line no-use-before-define
- if (elapsed > 33) requestAnimationFrame(() => runHumanDetect(input, canvas));
- // eslint-disable-next-line no-use-before-define
- else setTimeout(() => runHumanDetect(input, canvas), 33 - elapsed);
- }
// draw fps chart
- menu.updateChart('FPS', ui.fps);
+ await menu.updateChart('FPS', ui.fps);
+
+ // get updated canvas
+ result.canvas = await human.image(input, userConfig);
+
// draw image from video
const ctx = canvas.getContext('2d');
ctx.fillStyle = ui.baseBackground;
@@ -95,10 +100,10 @@ function drawResults(input, result, canvas) {
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
}
// draw all results
- draw.face(result.face, canvas, ui, human.facemesh.triangulation);
- draw.body(result.body, canvas, ui);
- draw.hand(result.hand, canvas, ui);
- draw.gesture(result.gesture, canvas, ui);
+ await draw.face(result.face, canvas, ui, human.facemesh.triangulation);
+ await draw.body(result.body, canvas, ui);
+ await draw.hand(result.hand, canvas, ui);
+ await draw.gesture(result.gesture, canvas, ui);
// update log
const engine = human.tf.engine();
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';
@@ -112,6 +117,16 @@ function drawResults(input, result, canvas) {
performance: ${str(result.performance)} FPS:${avg}
${warning}
`;
+
+ ui.framesDraw++;
+ ui.lastFrame = performance.now();
+ // if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded
+ if (ui.buffered && !ui.drawThread) ui.drawThread = setInterval(() => drawResults(input, canvas), 1000 / ui.bufferedFPSTarget);
+ // stop buffering
+ if (!ui.buffered && ui.drawThread) {
+ clearTimeout(ui.drawThread);
+ ui.drawThread = null;
+ }
}
// setup webcam
@@ -197,7 +212,11 @@ function webWorker(input, image, canvas) {
log('warning: image will not show filter effects');
worker.warned = true;
}
- drawResults(input, msg.data.result, canvas);
+ lastDetectedResult = msg.data.result;
+ ui.framesDetect++;
+ if (!ui.drawThread) drawResults(input);
+ // eslint-disable-next-line no-use-before-define
+ requestAnimationFrame(() => runHumanDetect(input, canvas));
});
}
// pass image data as arraybuffer to worker by reference to avoid copy
@@ -206,14 +225,19 @@ function webWorker(input, image, canvas) {
// main processing function when input is webcam, can use direct invocation or web worker
function runHumanDetect(input, canvas) {
- timeStamp = performance.now();
// if live video
const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
if (!live && input.srcObject) {
+ // stop ui refresh
+ if (ui.drawThread) clearTimeout(ui.drawThread);
+ ui.drawThread = null;
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
if (input.paused) log('camera paused');
else if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
+ clearTimeout(ui.drawThread);
+ ui.drawThread = null;
+ log('frame statistics: drawn:', ui.framesDraw, 'detected:', ui.framesDetect);
return;
}
status('');
@@ -228,14 +252,18 @@ function runHumanDetect(input, canvas) {
} else {
human.detect(input, userConfig).then((result) => {
if (result.error) log(result.error);
- else drawResults(input, result, canvas);
+ else {
+ lastDetectedResult = result;
+ if (!ui.drawThread) drawResults(input);
+ ui.framesDetect++;
+ requestAnimationFrame(() => runHumanDetect(input, canvas));
+ }
});
}
}
// main processing function when input is image, can use direct invocation or web worker
async function processImage(input) {
- timeStamp = performance.now();
return new Promise((resolve) => {
const image = new Image();
image.onload = async () => {
@@ -356,6 +384,7 @@ function setupMenu() {
menuFX = new Menu(document.body, '', { top: '1rem', right: '18rem' });
menuFX.addLabel('ui options');
+ menuFX.addBool('buffered output', ui, 'buffered', (val) => ui.buffered = val);
menuFX.addBool('crop & scale', ui, 'crop', () => setupCamera());
menuFX.addBool('camera front/back', ui, 'facing', () => setupCamera());
menuFX.addBool('use 3D depth', ui, 'useDepth');
@@ -387,7 +416,7 @@ async function main() {
log('Human: demo starting ...');
setupMenu();
document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
- human.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
+ // human.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
// this is not required, just pre-loads all models
if (ui.modelsPreload) {
status('loading');
diff --git a/demo/draw.js b/demo/draw.js
index 6f323f00..60ca6a69 100644
--- a/demo/draw.js
+++ b/demo/draw.js
@@ -98,19 +98,27 @@ async function drawFace(result, canvas, ui, triangulation) {
}
}
+const lastDrawnPose = [];
async function drawBody(result, canvas, ui) {
if (!result) return;
const ctx = canvas.getContext('2d');
ctx.lineJoin = 'round';
- for (const pose of result) {
+ for (const i in result) {
+ if (!lastDrawnPose[i] && ui.buffered) lastDrawnPose[i] = { ...result[i] };
ctx.fillStyle = ui.baseColor;
ctx.strokeStyle = ui.baseColor;
ctx.font = ui.baseFont;
ctx.lineWidth = ui.baseLineWidth;
if (ui.drawPoints) {
- for (const point of pose.keypoints) {
+ for (const pt in result[i].keypoints) {
ctx.beginPath();
- ctx.arc(point.position.x, point.position.y, 2, 0, 2 * Math.PI);
+ if (ui.buffered) {
+ lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2;
+ lastDrawnPose[i].keypoints[pt].position.y = (lastDrawnPose[i].keypoints[pt].position.y + result[i].keypoints[pt].position.y) / 2;
+ ctx.arc(lastDrawnPose[i].keypoints[pt].position.x, lastDrawnPose[i].keypoints[pt].position.y, 2, 0, 2 * Math.PI);
+ } else {
+ ctx.arc(result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 2, 0, 2 * Math.PI);
+ }
ctx.fill();
}
}
@@ -118,46 +126,46 @@ async function drawBody(result, canvas, ui) {
const path = new Path2D();
let part;
// torso
- part = pose.keypoints.find((a) => a.part === 'leftShoulder');
+ part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightShoulder');
+ part = result[i].keypoints.find((a) => a.part === 'rightShoulder');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightHip');
+ part = result[i].keypoints.find((a) => a.part === 'rightHip');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'leftHip');
+ part = result[i].keypoints.find((a) => a.part === 'leftHip');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'leftShoulder');
+ part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
path.lineTo(part.position.x, part.position.y);
// legs
- part = pose.keypoints.find((a) => a.part === 'leftHip');
+ part = result[i].keypoints.find((a) => a.part === 'leftHip');
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'leftKnee');
+ part = result[i].keypoints.find((a) => a.part === 'leftKnee');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'leftAnkle');
+ part = result[i].keypoints.find((a) => a.part === 'leftAnkle');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightHip');
+ part = result[i].keypoints.find((a) => a.part === 'rightHip');
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightKnee');
+ part = result[i].keypoints.find((a) => a.part === 'rightKnee');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightAnkle');
+ part = result[i].keypoints.find((a) => a.part === 'rightAnkle');
path.lineTo(part.position.x, part.position.y);
// arms
- part = pose.keypoints.find((a) => a.part === 'rightShoulder');
+ part = result[i].keypoints.find((a) => a.part === 'rightShoulder');
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'leftShoulder');
+ part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'leftElbow');
+ part = result[i].keypoints.find((a) => a.part === 'leftElbow');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'leftWrist');
+ part = result[i].keypoints.find((a) => a.part === 'leftWrist');
path.lineTo(part.position.x, part.position.y);
// arms
- part = pose.keypoints.find((a) => a.part === 'leftShoulder');
+ part = result[i].keypoints.find((a) => a.part === 'leftShoulder');
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightShoulder');
+ part = result[i].keypoints.find((a) => a.part === 'rightShoulder');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightElbow');
+ part = result[i].keypoints.find((a) => a.part === 'rightElbow');
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === 'rightWrist');
+ part = result[i].keypoints.find((a) => a.part === 'rightWrist');
path.lineTo(part.position.x, part.position.y);
// draw all
ctx.stroke(path);
diff --git a/dist/demo-browser-index.js b/dist/demo-browser-index.js
index e6bf1c82..9082908e 100644
--- a/dist/demo-browser-index.js
+++ b/dist/demo-browser-index.js
@@ -98001,7 +98001,7 @@ class Human {
}
async load(userConfig2) {
this.state = "load";
- const timeStamp2 = now2();
+ const timeStamp = now2();
if (userConfig2)
this.config = mergeDeep(this.config, userConfig2);
if (this.firstRun) {
@@ -98041,12 +98041,12 @@ class Human {
if (this.config.hand.enabled && !this.models.handpose)
this.models.handpose = await handpose.load(this.config.hand);
}
- const current = Math.trunc(now2() - timeStamp2);
+ const current = Math.trunc(now2() - timeStamp);
if (current > (this.perf.load || 0))
this.perf.load = current;
}
async checkBackend(force) {
- const timeStamp2 = now2();
+ const timeStamp = now2();
if (this.config.backend && this.config.backend !== "" && force || tf.getBackend() !== this.config.backend) {
this.state = "backend";
this.log("setting backend:", this.config.backend);
@@ -98068,20 +98068,20 @@ class Human {
}
await tf.ready();
}
- const current = Math.trunc(now2() - timeStamp2);
+ const current = Math.trunc(now2() - timeStamp);
if (current > (this.perf.backend || 0))
this.perf.backend = current;
}
async detectFace(input) {
- let timeStamp2;
+ let timeStamp;
let ageRes;
let genderRes;
let emotionRes;
const faceRes = [];
this.state = "run:face";
- timeStamp2 = now2();
+ timeStamp = now2();
const faces = await this.models.facemesh.estimateFaces(input, this.config.face);
- this.perf.face = Math.trunc(now2() - timeStamp2);
+ this.perf.face = Math.trunc(now2() - timeStamp);
for (const face2 of faces) {
this.analyze("Get Face");
if (!face2.image || face2.image.isDisposedInternal) {
@@ -98093,27 +98093,27 @@ class Human {
ageRes = this.config.face.age.enabled ? age.predict(face2.image, this.config) : {};
} else {
this.state = "run:age";
- timeStamp2 = now2();
+ timeStamp = now2();
ageRes = this.config.face.age.enabled ? await age.predict(face2.image, this.config) : {};
- this.perf.age = Math.trunc(now2() - timeStamp2);
+ this.perf.age = Math.trunc(now2() - timeStamp);
}
this.analyze("Start Gender:");
if (this.config.async) {
genderRes = this.config.face.gender.enabled ? gender.predict(face2.image, this.config) : {};
} else {
this.state = "run:gender";
- timeStamp2 = now2();
+ timeStamp = now2();
genderRes = this.config.face.gender.enabled ? await gender.predict(face2.image, this.config) : {};
- this.perf.gender = Math.trunc(now2() - timeStamp2);
+ this.perf.gender = Math.trunc(now2() - timeStamp);
}
this.analyze("Start Emotion:");
if (this.config.async) {
emotionRes = this.config.face.emotion.enabled ? emotion.predict(face2.image, this.config) : {};
} else {
this.state = "run:emotion";
- timeStamp2 = now2();
+ timeStamp = now2();
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face2.image, this.config) : {};
- this.perf.emotion = Math.trunc(now2() - timeStamp2);
+ this.perf.emotion = Math.trunc(now2() - timeStamp);
}
this.analyze("End Emotion:");
if (this.config.async) {
@@ -98148,9 +98148,16 @@ class Human {
}
return faceRes;
}
+ async image(input, userConfig2 = {}) {
+ this.state = "image";
+ this.config = mergeDeep(this.config, userConfig2);
+ const process3 = image.process(input, this.config);
+ process3.tensor.dispose();
+ return process3.canvas;
+ }
async detect(input, userConfig2 = {}) {
this.state = "config";
- let timeStamp2;
+ let timeStamp;
this.config = mergeDeep(this.config, userConfig2);
if (!this.config.videoOptimized)
this.config = mergeDeep(this.config, disableSkipFrames);
@@ -98170,9 +98177,9 @@ class Human {
if (this.config.scoped)
tf.engine().startScope();
this.analyze("Start Scope:");
- timeStamp2 = now2();
+ timeStamp = now2();
const process3 = image.process(input, this.config);
- this.perf.image = Math.trunc(now2() - timeStamp2);
+ this.perf.image = Math.trunc(now2() - timeStamp);
this.analyze("Get Image:");
if (this.config.async) {
faceRes = this.config.face.enabled ? this.detectFace(process3.tensor) : [];
@@ -98180,9 +98187,9 @@ class Human {
delete this.perf.face;
} else {
this.state = "run:face";
- timeStamp2 = now2();
+ timeStamp = now2();
faceRes = this.config.face.enabled ? await this.detectFace(process3.tensor) : [];
- this.perf.face = Math.trunc(now2() - timeStamp2);
+ this.perf.face = Math.trunc(now2() - timeStamp);
}
this.analyze("Start Body:");
if (this.config.async) {
@@ -98191,9 +98198,9 @@ class Human {
delete this.perf.body;
} else {
this.state = "run:body";
- timeStamp2 = now2();
+ timeStamp = now2();
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process3.tensor, this.config) : [];
- this.perf.body = Math.trunc(now2() - timeStamp2);
+ this.perf.body = Math.trunc(now2() - timeStamp);
}
this.analyze("End Body:");
this.analyze("Start Hand:");
@@ -98203,9 +98210,9 @@ class Human {
delete this.perf.hand;
} else {
this.state = "run:hand";
- timeStamp2 = now2();
+ timeStamp = now2();
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process3.tensor, this.config.hand) : [];
- this.perf.hand = Math.trunc(now2() - timeStamp2);
+ this.perf.hand = Math.trunc(now2() - timeStamp);
}
if (this.config.async) {
[faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);
@@ -98216,10 +98223,10 @@ class Human {
this.analyze("End Scope:");
let gestureRes = [];
if (this.config.gesture.enabled) {
- timeStamp2 = now2();
+ timeStamp = now2();
gestureRes = {face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes)};
if (!this.config.async)
- this.perf.gesture = Math.trunc(now2() - timeStamp2);
+ this.perf.gesture = Math.trunc(now2() - timeStamp);
else if (this.perf.gesture)
delete this.perf.gesture;
}
@@ -98337,63 +98344,72 @@ async function drawFace(result, canvas, ui2, triangulation) {
}
}
}
+const lastDrawnPose = [];
async function drawBody(result, canvas, ui2) {
if (!result)
return;
const ctx = canvas.getContext("2d");
ctx.lineJoin = "round";
- for (const pose of result) {
+ for (const i in result) {
+ if (!lastDrawnPose[i] && ui2.buffered)
+ lastDrawnPose[i] = {...result[i]};
ctx.fillStyle = ui2.baseColor;
ctx.strokeStyle = ui2.baseColor;
ctx.font = ui2.baseFont;
ctx.lineWidth = ui2.baseLineWidth;
if (ui2.drawPoints) {
- for (const point of pose.keypoints) {
+ for (const pt in result[i].keypoints) {
ctx.beginPath();
- ctx.arc(point.position.x, point.position.y, 2, 0, 2 * Math.PI);
+ if (ui2.buffered) {
+ lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2;
+ lastDrawnPose[i].keypoints[pt].position.y = (lastDrawnPose[i].keypoints[pt].position.y + result[i].keypoints[pt].position.y) / 2;
+ ctx.arc(lastDrawnPose[i].keypoints[pt].position.x, lastDrawnPose[i].keypoints[pt].position.y, 2, 0, 2 * Math.PI);
+ } else {
+ ctx.arc(result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 2, 0, 2 * Math.PI);
+ }
ctx.fill();
}
}
if (ui2.drawPolygons) {
const path = new Path2D();
let part;
- part = pose.keypoints.find((a) => a.part === "leftShoulder");
+ part = result[i].keypoints.find((a) => a.part === "leftShoulder");
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightShoulder");
+ part = result[i].keypoints.find((a) => a.part === "rightShoulder");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightHip");
+ part = result[i].keypoints.find((a) => a.part === "rightHip");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftHip");
+ part = result[i].keypoints.find((a) => a.part === "leftHip");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftShoulder");
+ part = result[i].keypoints.find((a) => a.part === "leftShoulder");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftHip");
+ part = result[i].keypoints.find((a) => a.part === "leftHip");
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftKnee");
+ part = result[i].keypoints.find((a) => a.part === "leftKnee");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftAnkle");
+ part = result[i].keypoints.find((a) => a.part === "leftAnkle");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightHip");
+ part = result[i].keypoints.find((a) => a.part === "rightHip");
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightKnee");
+ part = result[i].keypoints.find((a) => a.part === "rightKnee");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightAnkle");
+ part = result[i].keypoints.find((a) => a.part === "rightAnkle");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightShoulder");
+ part = result[i].keypoints.find((a) => a.part === "rightShoulder");
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftShoulder");
+ part = result[i].keypoints.find((a) => a.part === "leftShoulder");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftElbow");
+ part = result[i].keypoints.find((a) => a.part === "leftElbow");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftWrist");
+ part = result[i].keypoints.find((a) => a.part === "leftWrist");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "leftShoulder");
+ part = result[i].keypoints.find((a) => a.part === "leftShoulder");
path.moveTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightShoulder");
+ part = result[i].keypoints.find((a) => a.part === "rightShoulder");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightElbow");
+ part = result[i].keypoints.find((a) => a.part === "rightElbow");
path.lineTo(part.position.x, part.position.y);
- part = pose.keypoints.find((a) => a.part === "rightWrist");
+ part = result[i].keypoints.find((a) => a.part === "rightWrist");
path.lineTo(part.position.x, part.position.y);
ctx.stroke(path);
}
@@ -98791,18 +98807,23 @@ const ui = {
fillPolygons: false,
useDepth: true,
console: true,
- maxFrames: 10,
+ maxFPSframes: 10,
modelsPreload: true,
modelsWarmup: true,
menuWidth: 0,
menuHeight: 0,
camera: {},
- fps: []
+ fps: [],
+ buffered: true,
+ bufferedFPSTarget: 24,
+ drawThread: null,
+ framesDraw: 0,
+ framesDetect: 0
};
let menu2;
let menuFX;
let worker;
-let timeStamp;
+let lastDetectedResult = {};
function str(...msg) {
if (!Array.isArray(msg))
return msg;
@@ -98822,18 +98843,14 @@ const log2 = (...msg) => {
const status = (msg) => {
document.getElementById("status").innerText = msg;
};
-function drawResults(input, result, canvas) {
- const elapsed = performance.now() - timeStamp;
- ui.fps.push(1e3 / elapsed);
- if (ui.fps.length > ui.maxFrames)
+async function drawResults(input) {
+ const result = lastDetectedResult;
+ const canvas = document.getElementById("canvas");
+ ui.fps.push(1e3 / result.performance.total);
+ if (ui.fps.length > ui.maxFPSframes)
ui.fps.shift();
- if (input.srcObject) {
- if (elapsed > 33)
- requestAnimationFrame(() => runHumanDetect(input, canvas));
- else
- setTimeout(() => runHumanDetect(input, canvas), 33 - elapsed);
- }
- menu2.updateChart("FPS", ui.fps);
+ await menu2.updateChart("FPS", ui.fps);
+ result.canvas = await human.image(input, userConfig);
const ctx = canvas.getContext("2d");
ctx.fillStyle = ui.baseBackground;
ctx.fillRect(0, 0, canvas.width, canvas.height);
@@ -98846,10 +98863,10 @@ function drawResults(input, result, canvas) {
} else {
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
}
- draw_default.face(result.face, canvas, ui, human.facemesh.triangulation);
- draw_default.body(result.body, canvas, ui);
- draw_default.hand(result.hand, canvas, ui);
- draw_default.gesture(result.gesture, canvas, ui);
+ await draw_default.face(result.face, canvas, ui, human.facemesh.triangulation);
+ await draw_default.body(result.body, canvas, ui);
+ await draw_default.hand(result.hand, canvas, ui);
+ await draw_default.gesture(result.gesture, canvas, ui);
const engine = human.tf.engine();
const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : "";
const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;
@@ -98862,6 +98879,14 @@ function drawResults(input, result, canvas) {
performance: ${str(result.performance)} FPS:${avg}
${warning}
`;
+ ui.framesDraw++;
+ ui.lastFrame = performance.now();
+ if (ui.buffered && !ui.drawThread)
+ ui.drawThread = setInterval(() => drawResults(input, canvas), 1e3 / ui.bufferedFPSTarget);
+ if (!ui.buffered && ui.drawThread) {
+ clearTimeout(ui.drawThread);
+ ui.drawThread = null;
+ }
}
async function setupCamera() {
var _a;
@@ -98944,22 +98969,31 @@ function webWorker(input, image2, canvas) {
log2("warning: image will not show filter effects");
worker.warned = true;
}
- drawResults(input, msg.data.result, canvas);
+ lastDetectedResult = msg.data.result;
+ ui.framesDetect++;
+ if (!ui.drawThread)
+ drawResults(input);
+ requestAnimationFrame(() => runHumanDetect(input, canvas));
});
}
worker.postMessage({image: image2.data.buffer, width: canvas.width, height: canvas.height}, [image2.data.buffer]);
}
function runHumanDetect(input, canvas) {
var _a;
- timeStamp = performance.now();
const live = input.srcObject && input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState > 2 && !input.paused;
if (!live && input.srcObject) {
+ if (ui.drawThread)
+ clearTimeout(ui.drawThread);
+ ui.drawThread = null;
if (input.paused)
log2("camera paused");
else if (input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState <= 2)
setTimeout(() => runHumanDetect(input, canvas), 500);
else
log2(`camera not ready: track state: ${(_a = input.srcObject) == null ? void 0 : _a.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
+ clearTimeout(ui.drawThread);
+ ui.drawThread = null;
+ log2("frame statistics: drawn:", ui.framesDraw, "detected:", ui.framesDetect);
return;
}
status("");
@@ -98973,13 +99007,17 @@ function runHumanDetect(input, canvas) {
human.detect(input, userConfig).then((result) => {
if (result.error)
log2(result.error);
- else
- drawResults(input, result, canvas);
+ else {
+ lastDetectedResult = result;
+ if (!ui.drawThread)
+ drawResults(input);
+ ui.framesDetect++;
+ requestAnimationFrame(() => runHumanDetect(input, canvas));
+ }
});
}
}
async function processImage(input) {
- timeStamp = performance.now();
return new Promise((resolve) => {
const image2 = new Image();
image2.onload = async () => {
@@ -99092,6 +99130,7 @@ function setupMenu() {
menu2.addChart("FPS", "FPS");
menuFX = new menu_default(document.body, "", {top: "1rem", right: "18rem"});
menuFX.addLabel("ui options");
+ menuFX.addBool("buffered output", ui, "buffered", (val) => ui.buffered = val);
menuFX.addBool("crop & scale", ui, "crop", () => setupCamera());
menuFX.addBool("camera front/back", ui, "facing", () => setupCamera());
menuFX.addBool("use 3D depth", ui, "useDepth");
@@ -99122,7 +99161,6 @@ async function main() {
log2("Human: demo starting ...");
setupMenu();
document.getElementById("log").innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;
- human.tf.ENV.set("WEBGL_FORCE_F16_TEXTURES", true);
if (ui.modelsPreload) {
status("loading");
await human.load(userConfig);
diff --git a/dist/demo-browser-index.js.map b/dist/demo-browser-index.js.map
index 3d8fbd52..6ecd5c40 100644
--- a/dist/demo-browser-index.js.map
+++ b/dist/demo-browser-index.js.map
@@ -1,7 +1,7 @@
{
"version": 3,
"sources": ["../node_modules/node-fetch/browser.js", "../node_modules/safe-buffer/index.js", "../node_modules/string_decoder/lib/string_decoder.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/backend.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/util_base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/environment.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/global_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/kernel_names.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/kernel_registry.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/profiler.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor_format.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/types.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/engine.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/device_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/flags.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor_util_env.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/operation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/complex.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor_ops_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/types.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/io_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/router_registry.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/indexed_db.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/local_storage.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/model_management.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/platforms/platform_browser.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/platforms/platform_node.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/buffer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/clone.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/print.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/base_side_effects.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/browser_files.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/progress.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/weights_loader.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/http.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/passthrough.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/io.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mat_mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/one_hot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/confusion_matrix.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/math.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/browser.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/gather_nd_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/scatter_nd_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/serialization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/test_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/globals.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/floorDiv.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/abs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/acos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/acosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/add_n.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/axis_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/all.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/any.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/arg_max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/arg_min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/asin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/asinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/atan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/atan2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/atanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/basic_lstm_cell.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batch_to_space_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/broadcast_to.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ceil.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/clip_by_value.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_input.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_input.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cumsum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depth_to_space.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/diag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dilation2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/broadcast_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/where.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/zeros_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/div_no_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/elu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/erf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/expand_dims.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/expm1.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tile.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/eye.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fill.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/floor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reduce_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/segment_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/gather.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/greater.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/greater_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/imag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/is_finite.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/is_inf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/is_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/maximum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/scalar.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/leaky_relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/less.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/less_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linspace.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/local_response_normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log1p.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/neg.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/softplus.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log_sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log_softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log_sum_exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_and.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_not.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_or.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_xor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_with_argmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/zeros.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ones.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mean.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/minimum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mirror_pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/moments.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/multi_rnn_cell.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/multinomial.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/not_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/real.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ones_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/outer_product.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/space_to_batch_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pow.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/prelu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/prod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rand.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/alea.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xor128.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xorwow.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xorshift7.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xor4096.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/tychei.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/seedrandom.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rand_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/random_gamma.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/random_normal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/random_uniform.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/range.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reciprocal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/relu6.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/round.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rsqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/selu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/separable_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/setdiff1d_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sign.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/fft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/ifft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/irfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/split_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/split.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/rfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/squared_difference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/squeeze.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/stack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/step.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/strided_slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor5d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor6d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/topk.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/truncated_normal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/unsorted_segment_sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/unstack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/variable.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/where_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/where_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/boolean_mask.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/compare.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/binary_ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/norm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/moving_average.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/scatter_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sparse_to_dense_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sparse_to_dense.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/gather_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dropout_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dropout.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal_ops_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/in_top_k.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_filter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused/conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_filter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_input.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused/depthwise_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused/mat_mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused_ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/hamming_window.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/hann_window.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/frame.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/stft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/crop_and_resize.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/flip_left_right.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/rotate_with_offset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/nonmax_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/array_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/non_max_suppression_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_with_score.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_with_score_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_padded.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_padded_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/resize_bilinear.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/resize_nearest_neighbor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linalg/band_part.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linalg/gram_schmidt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linalg/qr.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/loss_ops_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/compute_weighted_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/absolute_difference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/cosine_distance.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/hinge_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/huber_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/log_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/mean_squared_error.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/sigmoid_cross_entropy.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/softmax_cross_entropy.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adadelta_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adagrad_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adam_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adamax_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/sgd_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/momentum_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/rmsprop_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/optimizer_constructors.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/train.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/browser_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rotate_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/array_ops_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/selu_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/erf_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/complex_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/backend_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/split_shared.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/tile_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/topk_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/kernel_impls.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Abs_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Acos_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Acosh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Add_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/AddN_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ArgMax_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ArgMin_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Asin_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Asinh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Atan2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Atan_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Atanh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_3d_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool3D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/BatchMatMul_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/BatchToSpaceND_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/BroadcastTo_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cast_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Ceil_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ClipByValue_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Concat_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2DBackpropInput_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_filter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Conv3D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cos_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cosh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cumsum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/DepthwiseConv2dNative_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Dilation2D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Div_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Elu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Erf_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Exp_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Expm1_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Floor_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/FloorDiv_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/FusedBatchNorm_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/GatherV2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/GreaterEqual_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Identity_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/IsFinite_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/IsInf_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/IsNan_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Log1p_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Log_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/LogSoftmax_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/local_response_normalization_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/LRN_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/min_max_grad_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Max_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Maximum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_3d_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool3D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Min_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Minimum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/MirrorPad_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Mod_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Multiply_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Negate_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/OneHot_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/OnesLike_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/PadV2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Pow_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Prelu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Reciprocal_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Relu6_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Relu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Reshape_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ResizeBilinear_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ResizeNearestNeighbor_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Reverse_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Round_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Rsqrt_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SelectV2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Selu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sigmoid_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sign_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sin_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sinh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Slice_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Softmax_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Softplus_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SpaceToBatchND_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SplitV_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sqrt_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Square_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SquaredDifference_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Step_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sub_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Tan_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Tanh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Tile_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Transpose_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Unpack_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/UnsortedSegmentSum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ZerosLike_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/register_all_gradients.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/abs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/acos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/acosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/add_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/all.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/any.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/arg_max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/arg_min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as_scalar.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as_type.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as5d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/asin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/asinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/atan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/atan2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/atanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/avg_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/batch_to_space_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/batchnorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/broadcast_to.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/ceil.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/clip_by_value.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/conv1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/conv2d_transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cumsum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/depth_to_space.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/depthwise_conv2D_deprecated.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/depthwise_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/dilation2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/div_no_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/div_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/dot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/elu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/erf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/expand_dims.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/expm1.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/fft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/flatten.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/floor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/floorDiv.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/gather.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater_equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/ifft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/irfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/is_finite.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/is_inf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/is_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/leaky_relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less_equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/local_response_normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log_sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log_softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log_sum_exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log1p.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_and.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_not.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_or.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_xor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mat_mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/max_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/maximum_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/maximum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mean.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/minimum_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/minimum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mirror_pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mod_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mul_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/neg.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/norm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/not_equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/not_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/one_hot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/ones_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pow_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pow.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/prelu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/prod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reciprocal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/relu6.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reshape_as.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/resize_bilinear.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/resize_nearest_neighbor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reverse.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/rfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/round.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/rsqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/selu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/separable_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sign.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/softplus.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/space_to_batch_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/split.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/squared_difference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/squared_difference_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/squeeze.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/stack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/step.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/strided_slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sub_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/tanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/tile.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/to_bool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/to_float.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/to_int.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/topk.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/unsorted_segment_sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/unstack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/where.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/zeros_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/register_all_chained_ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/backend/common.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/errors.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/generic_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/constraints.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_constraints.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/keras_format/common.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/common.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/math_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/backend/tfjs_backend.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/keras_format/initializer_config.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/initializers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_initializers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/backend/state.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/types_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/variable_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/variables.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/topology.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/input_layer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/logs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/base_callbacks.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/serialization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/losses.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/metrics.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/optimizers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/user_defined_metadata.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/layer_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/serialization_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/container.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training_dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training_tensors.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/models.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/activations.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/regularizers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/advanced_activations.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/conv_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/convolutional.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/convolutional_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/recurrent.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/convolutional_recurrent.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/core.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/embeddings.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/merge.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/noise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/padding.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/pooling.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/wrappers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_layers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_metrics.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_models.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_regularizers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/callbacks.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/data/compiled_api.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/custom_op/register.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/arithmetic.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/basic_math.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/control.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/convolution.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/creation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/dynamic.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/evaluation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/graph.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/hash_table.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/image.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/logical.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/matrices.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/reduction.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/slice_join.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/spectral.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/transformation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/operation_mapper.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/custom_op/node_value_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ops_for_converter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/arithmetic_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/basic_math_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/tensor_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/tensor_array.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/tensor_list.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/control_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/convolution_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/creation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/dynamic_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/evaluation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/graph_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/hash_table.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/hash_table_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/image_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/logical_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/matrices_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/normalization_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/reduction_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/slice_join_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/spectral_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/transformation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/operation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/execution_context.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/model_analysis.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/graph_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/resource_manager.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/graph_model.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/deep_map.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/deep_clone.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/ring_buffer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/growing_ring_buffer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/lazy_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/datasets/text_line_dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/datasets/csv_dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/microphone_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/webcam_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/datasource.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/string_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/byte_chunk_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/file_chunk_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/url_chunk_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/source_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/sources/file_data_source.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/sources/url_data_source.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/readers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/cpu_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/backend_cpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Abs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/binary_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Complex.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Identity.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Real.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/kernel_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/unary_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/unary_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Ceil.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Expm1.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Floor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Max_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Multiply.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/NotEqual.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Rsqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/SquaredDifference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Transpose_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Unique_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/shared.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Elu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Prelu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Relu6.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/fused_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/BatchMatMul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/_FusedMatMul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Acos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Acosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Asin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Asinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Atan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Atanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/pool_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/AvgPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/AvgPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/BatchNorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Clip.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Imag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv2DBackpropFilter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv2DBackpropInput.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv3D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv3DBackpropFilterV2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv3DBackpropInputV2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Cosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/DepthwiseConv2dNative.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/DepthwiseConv2dNativeBackpropFilter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/DepthwiseConv2dNativeBackpropInput.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Dilation2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Dilation2DBackpropFilter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Dilation2DBackpropInput.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Erf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/fft_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Fill.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FlipLeftRight.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FusedConv2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FusedDepthwiseConv2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IFFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IsFinite.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IsInf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IsNaN.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Log1p.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/LogicalNot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPoolWithArgmax_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPoolWithArgmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MirrorPad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/NonMaxSuppressionV4.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/NonMaxSuppressionV5.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/PadV2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Reciprocal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/RotateWithOffset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Round.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Selu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sign.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Softplus.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/SpaceToBatchND.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Step.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Tanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/register_all_kernels.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/canvas_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/tex_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/webgl_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/flags_webgl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/shared.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/addn_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/addn_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/argminmax_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/packing_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/glsl_version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/shader_compiler_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/shader_compiler.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/argminmax_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/avg_pool_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/binaryop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/binaryop_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/clip_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/clip_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/complex_abs_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_backprop_gpu_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_gpu_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_packed_gpu_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/crop_and_resize_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/cumsum_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/decode_matrix_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/decode_matrix_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/depth_to_space_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/diag_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_float_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_float_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_matrix_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_matrix_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/fill_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gather_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gather_nd_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gpgpu_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gpgpu_context.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gpgpu_math.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/im2col_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/lrn_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/lrn_grad_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/lrn_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/max_pool_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mulmat_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/multinomial_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/onehot_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pack_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pad_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pad_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pool_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reduce_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reshape_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_bilinear_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_bilinear_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_bilinear_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_nearest_neighbor_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_nearest_neighbor_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reverse_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reverse_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/scatter_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/segment_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/select_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/slice_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/slice_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/strided_slice_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/texture_manager.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/tile_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/unaryop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/unaryop_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/unpack_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/backend_webgl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/webgl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Identity.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Complex.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/kernel_funcs_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Atan2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/AvgPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/AvgPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/batchnorm_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/batchnorm_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/BatchNorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NotEqual.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Real.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/int.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/concat_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/concat_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Imag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Concat_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/fft_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FFT_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/flip_left_right_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FlipLeftRight.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FromPixels_utils/from_pixels_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FromPixels_utils/from_pixels_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FromPixels.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/IFFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mean_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/reduce.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Max_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/transpose_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/transpose_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Transpose_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPoolWithArgmax_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPoolWithArgmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Mean_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Mean.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mirror_pad_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mirror_pad_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MirrorPad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/binaryop_complex_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Multiply.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NonMaxSuppressionV3.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NonMaxSuppressionV4.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NonMaxSuppressionV5.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/rotate_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/RotateWithOffset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/SquaredDifference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/register_all_kernels.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/index.js", "../node_modules/@tensorflow/tfjs/src/version.ts", "../node_modules/@tensorflow/tfjs/src/index.ts", "empty:path", "empty:worker_threads", "empty:perf_hooks", "../node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.js", "../node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm.js", "../src/face/blazeface.js", "../src/face/keypoints.js", "../src/face/box.js", "../src/face/util.js", "../src/face/facepipeline.js", "../src/face/uvcoords.js", "../src/face/facemesh.js", "../src/profile.js", "../src/age/age.js", "../src/gender/gender.js", "../src/emotion/emotion.js", "../src/body/modelBase.js", "../src/body/modelMobileNet.js", "../src/body/heapSort.js", "../src/body/buildParts.js", "../src/body/keypoints.js", "../src/body/vectors.js", "../src/body/decodePose.js", "../src/body/decodeMultiple.js", "../src/body/util.js", "../src/body/modelPoseNet.js", "../src/body/posenet.js", "../src/hand/handdetector.js", "../src/hand/handpipeline.js", "../src/hand/anchors.js", "../src/hand/handpose.js", "../src/gesture.js", "../src/imagefx.js", "../src/image.js", "../src/tf.js", "../node_modules/@tensorflow/tfjs-core/src/backends/backend.ts", "../node_modules/@tensorflow/tfjs-core/src/util_base.ts", "../node_modules/@tensorflow/tfjs-core/src/environment.ts", "../node_modules/@tensorflow/tfjs-core/src/global_util.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_registry.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_registry.ts", "../node_modules/@tensorflow/tfjs-core/src/util.ts", "../node_modules/@tensorflow/tfjs-core/src/util.ts", "../node_modules/@tensorflow/tfjs-core/src/profiler.ts", "../node_modules/@tensorflow/tfjs-core/src/tape.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_format.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor.ts", "../node_modules/@tensorflow/tfjs-core/src/types.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_util.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_util.ts", "../node_modules/@tensorflow/tfjs-core/src/engine.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_util_env.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/operation.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/complex.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/tensor_ops_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/buffer.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/cast.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/reshape.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/transpose.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/gather_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/gather_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/scatter_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/scatter_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/slice_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/slice_util.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/mul.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/axis_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/conv_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/concat_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/broadcast_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/elu.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/segment_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/reduce_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/segment_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/scalar.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/sum.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/zeros.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/prelu.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/tensor1d.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/relu.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/relu6.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/split_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/step.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/fused_util.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/backend_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/rotate_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/array_ops_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/selu_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/erf_util.ts", "../node_modules/@tensorflow/tfjs-core/src/log.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/complex_util.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/backend_util.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/backend_util.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/types.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/_FusedMatMul.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/unary_kernel.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Abs.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/binary_kernel.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Add.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/AddN.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Identity.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Transpose.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/kernel_utils.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ArgMax.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/AvgPool.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Reshape.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/BatchMatMul.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Cast.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ClipByValue.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Concat.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Conv2D.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Conv2DBackpropInput.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Cos.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/CropAndResize.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Cumsum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/DepthToSpace.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/DepthwiseConv2dNative.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Div.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Equal.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Exp.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Fill.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FlipLeftRight.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FloorDiv.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FusedBatchNorm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FusedConv2D.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FusedDepthwiseConv2D.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/GatherNd.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/GatherV2.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Greater.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/GreaterEqual.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Less.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/LessEqual.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Log.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/LogicalAnd.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Max.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Maximum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/MaxPool.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Min.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Minimum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Multiply.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Negate.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppression_util.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppressionV3.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppressionV4.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppressionV5.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NotEqual.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/OneHot.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/OnesLike.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/PadV2.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Pow.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Prelu.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Relu.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Relu6.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ResizeBilinear.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Reverse.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/RotateWithOffset.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Rsqrt.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ScatterNd.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/SelectV2.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sigmoid.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sin.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Slice.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Softmax.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Split.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sqrt.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Square.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/SquaredDifference.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/StridedSlice.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sub.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Tanh.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Tile.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Unpack.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ZerosLike.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/register_all_kernels.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/flags_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/backend_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.worker.js", "../node_modules/@tensorflow/tfjs-backend-wasm/src/backend_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/backend_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/base.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/index.ts", "../src/tf.js", "../src/face/triangulation.js", "../src/human.js", "../src/hand/box.js", "../src/hand/util.js", "../src/human.js", "../config.js", "../src/human.js", "../demo/draw.js", "../demo/menu.js", "../demo/browser.js"],
- "sourcesContent": ["\"use strict\";\n\n// ref: https://github.com/tc39/proposal-global\nvar getGlobal = function () {\n\t// the only reliable means to get the global object is\n\t// `Function('return this')()`\n\t// However, this causes CSP violations in Chrome apps.\n\tif (typeof self !== 'undefined') { return self; }\n\tif (typeof window !== 'undefined') { return window; }\n\tif (typeof global !== 'undefined') { return global; }\n\tthrow new Error('unable to locate global object');\n}\n\nvar global = getGlobal();\n\nmodule.exports = exports = global.fetch;\n\n// Needed for TypeScript and Webpack.\nif (global.fetch) {\n\texports.default = global.fetch.bind(global);\n}\n\nexports.Headers = global.Headers;\nexports.Request = global.Request;\nexports.Response = global.Response;", "/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n", "// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/* */\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\n/** Convenient class for storing tensor-related data. */\nexport class DataStorage {\n constructor(backend, dataMover) {\n this.backend = backend;\n this.dataMover = dataMover;\n this.data = new WeakMap();\n this.dataIdsCount = 0;\n }\n get(dataId) {\n if (!this.data.has(dataId)) {\n this.dataMover.moveData(this.backend, dataId);\n }\n return this.data.get(dataId);\n }\n set(dataId, value) {\n this.dataIdsCount++;\n this.data.set(dataId, value);\n }\n has(dataId) {\n return this.data.has(dataId);\n }\n delete(dataId) {\n this.dataIdsCount--;\n return this.data.delete(dataId);\n }\n numDataIds() {\n return this.dataIdsCount;\n }\n}\n/**\n * The interface that defines the kernels that should be implemented when\n * adding a new backend. New backends don't need to implement every one of the\n * methods, this can be done gradually (throw an error for unimplemented\n * methods).\n */\nexport class KernelBackend {\n time(f) {\n return notYetImplemented('time');\n }\n read(dataId) {\n return notYetImplemented('read');\n }\n readSync(dataId) {\n return notYetImplemented('readSync');\n }\n numDataIds() {\n return notYetImplemented('numDataIds');\n }\n disposeData(dataId) {\n return notYetImplemented('disposeData');\n }\n write(values, shape, dtype) {\n return notYetImplemented('write');\n }\n move(dataId, values, shape, dtype) {\n return notYetImplemented('move');\n }\n memory() {\n return notYetImplemented('memory');\n }\n /** Returns the highest precision for floats in bits (e.g. 16 or 32) */\n floatPrecision() {\n return notYetImplemented('floatPrecision');\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n batchMatMul(a, b, transposeA, transposeB) {\n return notYetImplemented('batchMatMul');\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedBatchMatMul');\n }\n slice(x, begin, size) {\n return notYetImplemented('slice');\n }\n stridedSlice(x, begin, end, strides) {\n return notYetImplemented('stridedSlice');\n }\n unstack(x, axis) {\n return notYetImplemented('unstack');\n }\n reverse(a, axis) {\n return notYetImplemented('reverse');\n }\n concat(tensors, axis) {\n return notYetImplemented('concat');\n }\n neg(a) {\n return notYetImplemented('neg');\n }\n add(a, b) {\n return notYetImplemented('add');\n }\n addN(tensors) {\n return notYetImplemented('addN');\n }\n subtract(a, b) {\n return notYetImplemented('subtract');\n }\n multiply(a, b) {\n return notYetImplemented('multiply');\n }\n realDivide(a, b) {\n return notYetImplemented('realDivide');\n }\n floorDiv(a, b) {\n return notYetImplemented('floorDiv');\n }\n sum(x, axes) {\n return notYetImplemented('sum');\n }\n prod(x, axes) {\n return notYetImplemented('prod');\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n return notYetImplemented('unsortedSegmentSum');\n }\n argMin(x, axis) {\n return notYetImplemented('argMin');\n }\n argMax(x, axis) {\n return notYetImplemented('argMax');\n }\n equal(a, b) {\n return notYetImplemented('equal');\n }\n notEqual(a, b) {\n return notYetImplemented('notEqual');\n }\n less(a, b) {\n return notYetImplemented('less');\n }\n lessEqual(a, b) {\n return notYetImplemented('lessEqual');\n }\n greater(a, b) {\n return notYetImplemented('greater');\n }\n greaterEqual(a, b) {\n return notYetImplemented('greaterEqual');\n }\n logicalNot(a) {\n return notYetImplemented('logicalNot');\n }\n logicalAnd(a, b) {\n return notYetImplemented('logicalAnd');\n }\n logicalOr(a, b) {\n return notYetImplemented('logicalOr');\n }\n where(condition) {\n return notYetImplemented('where');\n }\n select(condition, a, b) {\n return notYetImplemented('select');\n }\n topk(x, k, sorted) {\n return notYetImplemented('topk');\n }\n min(x, axes) {\n return notYetImplemented('min');\n }\n minimum(a, b) {\n return notYetImplemented('minimum');\n }\n mod(a, b) {\n return notYetImplemented('mod');\n }\n max(x, axes) {\n return notYetImplemented('max');\n }\n maximum(a, b) {\n return notYetImplemented('maximum');\n }\n all(x, axes) {\n return notYetImplemented('all');\n }\n any(x, axes) {\n return notYetImplemented('any');\n }\n squaredDifference(a, b) {\n return notYetImplemented('squaredDifference');\n }\n ceil(x) {\n return notYetImplemented('ceil');\n }\n floor(x) {\n return notYetImplemented('floor');\n }\n round(x) {\n return notYetImplemented('round');\n }\n sign(x) {\n return notYetImplemented('sign');\n }\n isNaN(x) {\n return notYetImplemented('isNaN');\n }\n isInf(x) {\n return notYetImplemented('isInf');\n }\n isFinite(x) {\n return notYetImplemented('isFinite');\n }\n pow(a, b) {\n return notYetImplemented('pow');\n }\n exp(x) {\n return notYetImplemented('exp');\n }\n expm1(x) {\n return notYetImplemented('expm1');\n }\n softmax(x, dim) {\n return notYetImplemented('softmax');\n }\n log(x) {\n return notYetImplemented('log');\n }\n log1p(x) {\n return notYetImplemented('log1p');\n }\n sqrt(x) {\n return notYetImplemented('sqrt');\n }\n rsqrt(x) {\n return notYetImplemented('rsqrt');\n }\n square(x) {\n return notYetImplemented('square');\n }\n reciprocal(x) {\n return notYetImplemented('reciprocal');\n }\n relu(x) {\n return notYetImplemented('relu');\n }\n relu6(x) {\n return notYetImplemented('relu6');\n }\n prelu(x, a) {\n return notYetImplemented('prelu');\n }\n elu(x) {\n return notYetImplemented('elu');\n }\n eluDer(dy, y) {\n return notYetImplemented('eluDer');\n }\n selu(x) {\n return notYetImplemented('selu');\n }\n int(x) {\n return notYetImplemented('int');\n }\n clip(x, min, max) {\n return notYetImplemented('clip');\n }\n abs(x) {\n return notYetImplemented('abs');\n }\n complexAbs(x) {\n return notYetImplemented('complexAbs');\n }\n sigmoid(x) {\n return notYetImplemented('sigmoid');\n }\n softplus(x) {\n return notYetImplemented('softplus');\n }\n sin(x) {\n return notYetImplemented('sin');\n }\n cos(x) {\n return notYetImplemented('cos');\n }\n tan(x) {\n return notYetImplemented('tan');\n }\n asin(x) {\n return notYetImplemented('asin');\n }\n acos(x) {\n return notYetImplemented('acos');\n }\n atan(x) {\n return notYetImplemented('atan');\n }\n atan2(a, b) {\n return notYetImplemented('atan2');\n }\n sinh(x) {\n return notYetImplemented('sinh');\n }\n cosh(x) {\n return notYetImplemented('cosh');\n }\n tanh(x) {\n return notYetImplemented('tanh');\n }\n asinh(x) {\n return notYetImplemented('asinh');\n }\n acosh(x) {\n return notYetImplemented('acosh');\n }\n atanh(x) {\n return notYetImplemented('atanh');\n }\n erf(x) {\n return notYetImplemented('erf');\n }\n step(x, alpha) {\n return notYetImplemented('step');\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedConv2d');\n }\n conv2d(x, filter, convInfo) {\n return notYetImplemented('conv2d');\n }\n conv2dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv2dDerInput');\n }\n conv2dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv2dDerFilter');\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedDepthwiseConv2D');\n }\n depthwiseConv2D(input, filter, convInfo) {\n return notYetImplemented('depthwiseConv2D');\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n return notYetImplemented('depthwiseConv2DDerInput');\n }\n depthwiseConv2DDerFilter(x, dY, convInfo) {\n return notYetImplemented('depthwiseConv2DDerFilter');\n }\n conv3d(x, filter, convInfo) {\n return notYetImplemented('conv3d');\n }\n conv3dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv3dDerInput');\n }\n conv3dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv3dDerFilter');\n }\n maxPool(x, convInfo) {\n return notYetImplemented('maxPool');\n }\n maxPoolBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPoolBackprop');\n }\n avgPool(x, convInfo) {\n return notYetImplemented('avgPool');\n }\n avgPoolBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPoolBackprop');\n }\n avgPool3d(x, convInfo) {\n return notYetImplemented('avgPool3d');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPool3dBackprop');\n }\n maxPool3d(x, convInfo) {\n return notYetImplemented('maxPool3d');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPool3dBackprop');\n }\n reshape(x, shape) {\n return notYetImplemented('reshape');\n }\n cast(x, dtype) {\n return notYetImplemented('cast');\n }\n tile(x, reps) {\n return notYetImplemented('tile');\n }\n pad(x, paddings, constantValue) {\n return notYetImplemented('pad');\n }\n transpose(x, perm) {\n return notYetImplemented('transpose');\n }\n gather(x, indices, axis) {\n return notYetImplemented('gather');\n }\n gatherND(x, indices) {\n return notYetImplemented('gatherND');\n }\n scatterND(indices, updates, shape) {\n return notYetImplemented('scatterND');\n }\n batchToSpaceND(x, blockShape, crops) {\n return notYetImplemented('batchToSpaceND');\n }\n spaceToBatchND(x, blockShape, paddings) {\n return notYetImplemented('spaceToBatchND');\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n return notYetImplemented('resizeBilinear');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeBilinearBackprop');\n }\n resizeNearestNeighbor(x, newHEight, newWidth, alignCorners) {\n return notYetImplemented('resizeNearestNeighbor');\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeNearestNeighborBackprop');\n }\n batchNorm(x, mean, variance, offset, scale, varianceEpsilon) {\n return notYetImplemented('batchNorm');\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n return notYetImplemented('localResponseNormalization4D');\n }\n LRNGrad(dy, inputImage, outputImage, radius, bias, alpha, beta) {\n return notYetImplemented('LRNGrad');\n }\n multinomial(logits, normalized, numSamples, seed) {\n return notYetImplemented('multinomial');\n }\n oneHot(indices, depth, onValue, offValue) {\n return notYetImplemented('oneHot');\n }\n cumsum(x, axis, exclusive, reverse) {\n return notYetImplemented('cumsum');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return notYetImplemented('nonMaxSuppression');\n }\n fft(x) {\n return notYetImplemented('fft');\n }\n ifft(x) {\n return notYetImplemented('ifft');\n }\n complex(real, imag) {\n return notYetImplemented('complex');\n }\n real(input) {\n return notYetImplemented('real');\n }\n imag(input) {\n return notYetImplemented('imag');\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n return notYetImplemented('cropAndResize');\n }\n depthToSpace(x, blockSize, dataFormat) {\n return notYetImplemented('depthToSpace');\n }\n // Aligns with the \"SplitV\" kernel in TensorFlow.\n split(value, sizeSplits, axis) {\n return notYetImplemented('split');\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n return notYetImplemented('sparseToDense');\n }\n diag(x) {\n return notYetImplemented('diag');\n }\n fill(shape, value, dtype) {\n return notYetImplemented('fill');\n }\n onesLike(x) {\n return notYetImplemented('onesLike');\n }\n zerosLike(x) {\n return notYetImplemented('zerosLike');\n }\n linspace(start, stop, num) {\n return notYetImplemented('linspace');\n }\n dispose() {\n return notYetImplemented('dispose');\n }\n}\nfunction notYetImplemented(kernelName) {\n throw new Error(`'${kernelName}' not yet implemented or not found in the registry. ` +\n `This kernel may not be supported by the tfjs backend you have chosen`);\n}\n//# sourceMappingURL=backend.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Shuffles the array in-place using Fisher-Yates algorithm.\n *\n * ```js\n * const a = [1, 2, 3, 4, 5];\n * tf.util.shuffle(a);\n * console.log(a);\n * ```\n *\n * @param array The array to shuffle in-place.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\n// tslint:disable-next-line:no-any\nexport function shuffle(array) {\n let counter = array.length;\n let temp = 0;\n let index = 0;\n // While there are elements in the array\n while (counter > 0) {\n // Pick a random index\n index = (Math.random() * counter) | 0;\n // Decrease counter by 1\n counter--;\n // And swap the last element with it\n temp = array[counter];\n array[counter] = array[index];\n array[index] = temp;\n }\n}\n/** Clamps a value to a specified range. */\nexport function clamp(min, x, max) {\n return Math.max(min, Math.min(x, max));\n}\nexport function nearestLargerEven(val) {\n return val % 2 === 0 ? val : val + 1;\n}\nexport function sum(arr) {\n let sum = 0;\n for (let i = 0; i < arr.length; i++) {\n sum += arr[i];\n }\n return sum;\n}\n/**\n * Returns a sample from a uniform [a, b) distribution.\n *\n * @param a The minimum support (inclusive).\n * @param b The maximum support (exclusive).\n * @return A pseudorandom number on the half-open interval [a,b).\n */\nexport function randUniform(a, b) {\n const r = Math.random();\n return (b * r) + (1 - r) * a;\n}\n/** Returns the squared Euclidean distance between two vectors. */\nexport function distSquared(a, b) {\n let result = 0;\n for (let i = 0; i < a.length; i++) {\n const diff = Number(a[i]) - Number(b[i]);\n result += diff * diff;\n }\n return result;\n}\n/**\n * Asserts that the expression is true. Otherwise throws an error with the\n * provided message.\n *\n * ```js\n * const x = 2;\n * tf.util.assert(x === 2, 'x is not 2');\n * ```\n *\n * @param expr The expression to assert (as a boolean).\n * @param msg A function that returns the message to report when throwing an\n * error. We use a function for performance reasons.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function assert(expr, msg) {\n if (!expr) {\n throw new Error(typeof msg === 'string' ? msg : msg());\n }\n}\nexport function assertShapesMatch(shapeA, shapeB, errorMessagePrefix = '') {\n assert(arraysEqual(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function assertNonNull(a) {\n assert(a != null, () => `The input to the tensor constructor must be a non-null value.`);\n}\n// NOTE: We explicitly type out what T extends instead of any so that\n// util.flatten on a nested array of number doesn't try to infer T as a\n// number[][], causing us to explicitly type util.flatten().\n/**\n * Flattens an arbitrarily nested array.\n *\n * ```js\n * const a = [[1, 2], [3, 4], [5, [6, [7]]]];\n * const flat = tf.util.flatten(a);\n * console.log(flat);\n * ```\n *\n * @param arr The nested array to flatten.\n * @param result The destination array which holds the elements.\n * @param skipTypedArray If true, avoids flattening the typed arrays. Defaults\n * to false.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function flatten(arr, result = [], skipTypedArray = false) {\n if (result == null) {\n result = [];\n }\n if (Array.isArray(arr) || isTypedArray(arr) && !skipTypedArray) {\n for (let i = 0; i < arr.length; ++i) {\n flatten(arr[i], result, skipTypedArray);\n }\n }\n else {\n result.push(arr);\n }\n return result;\n}\n/**\n * Returns the size (number of elements) of the tensor given its shape.\n *\n * ```js\n * const shape = [3, 4, 2];\n * const size = tf.util.sizeFromShape(shape);\n * console.log(size);\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function sizeFromShape(shape) {\n if (shape.length === 0) {\n // Scalar.\n return 1;\n }\n let size = shape[0];\n for (let i = 1; i < shape.length; i++) {\n size *= shape[i];\n }\n return size;\n}\nexport function isScalarShape(shape) {\n return shape.length === 0;\n}\nexport function arraysEqual(n1, n2) {\n if (n1 === n2) {\n return true;\n }\n if (n1 == null || n2 == null) {\n return false;\n }\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\nexport function isInt(a) {\n return a % 1 === 0;\n}\nexport function tanh(x) {\n // tslint:disable-next-line:no-any\n if (Math.tanh != null) {\n // tslint:disable-next-line:no-any\n return Math.tanh(x);\n }\n if (x === Infinity) {\n return 1;\n }\n else if (x === -Infinity) {\n return -1;\n }\n else {\n const e2x = Math.exp(2 * x);\n return (e2x - 1) / (e2x + 1);\n }\n}\nexport function sizeToSquarishShape(size) {\n const width = Math.ceil(Math.sqrt(size));\n return [width, Math.ceil(size / width)];\n}\n/**\n * Creates a new array with randomized indicies to a given quantity.\n *\n * ```js\n * const randomTen = tf.util.createShuffledIndices(10);\n * console.log(randomTen);\n * ```\n *\n * @param number Quantity of how many shuffled indicies to create.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function createShuffledIndices(n) {\n const shuffledIndices = new Uint32Array(n);\n for (let i = 0; i < n; ++i) {\n shuffledIndices[i] = i;\n }\n shuffle(shuffledIndices);\n return shuffledIndices;\n}\nexport function rightPad(a, size) {\n if (size <= a.length) {\n return a;\n }\n return a + ' '.repeat(size - a.length);\n}\nexport function repeatedTry(checkFn, delayFn = (counter) => 0, maxCounter) {\n return new Promise((resolve, reject) => {\n let tryCount = 0;\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n tryCount++;\n const nextBackoff = delayFn(tryCount);\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n tryFn();\n });\n}\n/**\n * Given the full size of the array and a shape that may contain -1 as the\n * implicit dimension, returns the inferred shape where -1 is replaced.\n * E.g. For shape=[2, -1, 3] and size=24, it will return [2, 4, 3].\n *\n * @param shape The shape, which may contain -1 in some dimension.\n * @param size The full size (number of elements) of the array.\n * @return The inferred shape where -1 is replaced with the inferred size.\n */\nexport function inferFromImplicitShape(shape, size) {\n let shapeProd = 1;\n let implicitIdx = -1;\n for (let i = 0; i < shape.length; ++i) {\n if (shape[i] >= 0) {\n shapeProd *= shape[i];\n }\n else if (shape[i] === -1) {\n if (implicitIdx !== -1) {\n throw Error(`Shapes can only have 1 implicit size. ` +\n `Found -1 at dim ${implicitIdx} and dim ${i}`);\n }\n implicitIdx = i;\n }\n else if (shape[i] < 0) {\n throw Error(`Shapes can not be < 0. Found ${shape[i]} at dim ${i}`);\n }\n }\n if (implicitIdx === -1) {\n if (size > 0 && size !== shapeProd) {\n throw Error(`Size(${size}) must match the product of shape ${shape}`);\n }\n return shape;\n }\n if (shapeProd === 0) {\n throw Error(`Cannot infer the missing size in [${shape}] when ` +\n `there are 0 elements`);\n }\n if (size % shapeProd !== 0) {\n throw Error(`The implicit shape can't be a fractional number. ` +\n `Got ${size} / ${shapeProd}`);\n }\n const newShape = shape.slice();\n newShape[implicitIdx] = size / shapeProd;\n return newShape;\n}\nexport function parseAxisParam(axis, shape) {\n const rank = shape.length;\n // Normalize input\n axis = axis == null ? shape.map((s, i) => i) : [].concat(axis);\n // Check for valid range\n assert(axis.every(ax => ax >= -rank && ax < rank), () => `All values in axis param must be in range [-${rank}, ${rank}) but ` +\n `got axis ${axis}`);\n // Check for only integers\n assert(axis.every(ax => isInt(ax)), () => `All values in axis param must be integers but ` +\n `got axis ${axis}`);\n // Handle negative axis.\n return axis.map(a => a < 0 ? rank + a : a);\n}\n/** Reduces the shape by removing all dimensions of shape 1. */\nexport function squeezeShape(shape, axis) {\n const newShape = [];\n const keptDims = [];\n const isEmptyArray = axis != null && Array.isArray(axis) && axis.length === 0;\n const axes = (axis == null || isEmptyArray) ?\n null :\n parseAxisParam(axis, shape).sort();\n let j = 0;\n for (let i = 0; i < shape.length; ++i) {\n if (axes != null) {\n if (axes[j] === i && shape[i] !== 1) {\n throw new Error(`Can't squeeze axis ${i} since its dim '${shape[i]}' is not 1`);\n }\n if ((axes[j] == null || axes[j] > i) && shape[i] === 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n if (axes[j] <= i) {\n j++;\n }\n }\n if (shape[i] !== 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n }\n return { newShape, keptDims };\n}\nexport function getTypedArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function getArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else if (dtype === 'string') {\n values = new Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function checkConversionForErrors(vals, dtype) {\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n throw Error(`A tensor of type ${dtype} being uploaded contains ${num}.`);\n }\n }\n}\n/** Returns true if the dtype is valid. */\nexport function isValidDtype(dtype) {\n return dtype === 'bool' || dtype === 'complex64' || dtype === 'float32' ||\n dtype === 'int32' || dtype === 'string';\n}\n/**\n * Returns true if the new type can't encode the old type without loss of\n * precision.\n */\nexport function hasEncodingLoss(oldType, newType) {\n if (newType === 'complex64') {\n return false;\n }\n if (newType === 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'int32' && oldType !== 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'bool' && oldType === 'bool') {\n return false;\n }\n return true;\n}\nexport function isTypedArray(a) {\n return a instanceof Float32Array || a instanceof Int32Array ||\n a instanceof Uint8Array;\n}\nexport function bytesPerElement(dtype) {\n if (dtype === 'float32' || dtype === 'int32') {\n return 4;\n }\n else if (dtype === 'complex64') {\n return 8;\n }\n else if (dtype === 'bool') {\n return 1;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n/**\n * Returns the approximate number of bytes allocated in the string array - 2\n * bytes per character. Computing the exact bytes for a native string in JS is\n * not possible since it depends on the encoding of the html page that serves\n * the website.\n */\nexport function bytesFromStringArray(arr) {\n if (arr == null) {\n return 0;\n }\n let bytes = 0;\n arr.forEach(x => bytes += x.length);\n return bytes;\n}\n/** Returns true if the value is a string. */\nexport function isString(value) {\n return typeof value === 'string' || value instanceof String;\n}\nexport function isBoolean(value) {\n return typeof value === 'boolean';\n}\nexport function isNumber(value) {\n return typeof value === 'number';\n}\nexport function inferDtype(values) {\n if (Array.isArray(values)) {\n return inferDtype(values[0]);\n }\n if (values instanceof Float32Array) {\n return 'float32';\n }\n else if (values instanceof Int32Array || values instanceof Uint8Array) {\n return 'int32';\n }\n else if (isNumber(values)) {\n return 'float32';\n }\n else if (isString(values)) {\n return 'string';\n }\n else if (isBoolean(values)) {\n return 'bool';\n }\n return 'float32';\n}\nexport function isFunction(f) {\n return !!(f && f.constructor && f.call && f.apply);\n}\nexport function nearestDivisor(size, start) {\n for (let i = start; i < size; ++i) {\n if (size % i === 0) {\n return i;\n }\n }\n return size;\n}\nexport function computeStrides(shape) {\n const rank = shape.length;\n if (rank < 2) {\n return [];\n }\n // Last dimension has implicit stride of 1, thus having D-1 (instead of D)\n // strides.\n const strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n return strides;\n}\nfunction createNestedArray(offset, shape, a) {\n const ret = new Array();\n if (shape.length === 1) {\n const d = shape[0];\n for (let i = 0; i < d; i++) {\n ret[i] = a[offset + i];\n }\n }\n else {\n const d = shape[0];\n const rest = shape.slice(1);\n const len = rest.reduce((acc, c) => acc * c);\n for (let i = 0; i < d; i++) {\n ret[i] = createNestedArray(offset + i * len, rest, a);\n }\n }\n return ret;\n}\n// Provide a nested array of TypedArray in given shape.\nexport function toNestedArray(shape, a) {\n if (shape.length === 0) {\n // Scalar type should return a single number.\n return a[0];\n }\n const size = shape.reduce((acc, c) => acc * c);\n if (size === 0) {\n // A tensor with shape zero should be turned into empty list.\n return [];\n }\n if (size !== a.length) {\n throw new Error(`[${shape}] does not match the input size ${a.length}.`);\n }\n return createNestedArray(0, shape, a);\n}\nexport function makeOnesTypedArray(size, dtype) {\n const array = makeZerosTypedArray(size, dtype);\n for (let i = 0; i < array.length; i++) {\n array[i] = 1;\n }\n return array;\n}\nexport function makeZerosTypedArray(size, dtype) {\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(size);\n }\n else if (dtype === 'int32') {\n return new Int32Array(size);\n }\n else if (dtype === 'bool') {\n return new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Make nested `TypedArray` filled with zeros.\n * @param shape The shape information for the nested array.\n * @param dtype dtype of the array element.\n */\nexport function makeZerosNestedTypedArray(shape, dtype) {\n const size = shape.reduce((prev, curr) => prev * curr, 1);\n if (dtype == null || dtype === 'float32') {\n return toNestedArray(shape, new Float32Array(size));\n }\n else if (dtype === 'int32') {\n return toNestedArray(shape, new Int32Array(size));\n }\n else if (dtype === 'bool') {\n return toNestedArray(shape, new Uint8Array(size));\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\nexport function assertNonNegativeIntegerDimensions(shape) {\n shape.forEach(dimSize => {\n assert(Number.isInteger(dimSize) && dimSize >= 0, () => `Tensor must have a shape comprised of positive integers but got ` +\n `shape [${shape}].`);\n });\n}\n/**\n * Computes flat index for a given location (multidimentionsal index) in a\n * Tensor/multidimensional array.\n *\n * @param locs Location in the tensor.\n * @param rank Rank of the tensor.\n * @param strides Tensor strides.\n */\nexport function locToIndex(locs, rank, strides) {\n if (rank === 0) {\n return 0;\n }\n else if (rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += strides[i] * locs[i];\n }\n return index;\n}\n/**\n * Computes the location (multidimensional index) in a tensor/multidimentional\n * array for a given flat index.\n *\n * @param index Index in flat array.\n * @param rank Rank of tensor.\n * @param strides Strides of tensor.\n */\nexport function indexToLoc(index, rank, strides) {\n if (rank === 0) {\n return [];\n }\n else if (rank === 1) {\n return [index];\n }\n const locs = new Array(rank);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / strides[i]);\n index -= locs[i] * strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n}\n/**\n * This method asserts whether an object is a Promise instance.\n * @param object\n */\n// tslint:disable-next-line: no-any\nexport function isPromise(object) {\n // We chose to not use 'obj instanceOf Promise' for two reasons:\n // 1. It only reliably works for es6 Promise, not other Promise\n // implementations.\n // 2. It doesn't work with framework that uses zone.js. zone.js monkey patch\n // the async calls, so it is possible the obj (patched) is comparing to a\n // pre-patched Promise.\n return object && object.then && typeof object.then === 'function';\n}\n//# sourceMappingURL=util_base.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isPromise } from './util_base';\n// Expects flags from URL in the format ?tfjsflags=FLAG1:1,FLAG2:true.\nconst TENSORFLOWJS_FLAGS_PREFIX = 'tfjsflags';\n/**\n * The environment contains evaluated flags as well as the registered platform.\n * This is always used as a global singleton and can be retrieved with\n * `tf.env()`.\n *\n * @doc {heading: 'Environment'}\n */\nexport class Environment {\n // tslint:disable-next-line: no-any\n constructor(global) {\n this.global = global;\n this.flags = {};\n this.flagRegistry = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n setPlatform(platformName, platform) {\n if (this.platform != null) {\n console.warn(`Platform ${this.platformName} has already been set. ` +\n `Overwriting the platform with ${platform}.`);\n }\n this.platformName = platformName;\n this.platform = platform;\n }\n registerFlag(flagName, evaluationFn, setHook) {\n this.flagRegistry[flagName] = { evaluationFn, setHook };\n // Override the flag value from the URL. This has to happen here because the\n // environment is initialized before flags get registered.\n if (this.urlFlags[flagName] != null) {\n const flagValue = this.urlFlags[flagName];\n console.warn(`Setting feature override from URL ${flagName}: ${flagValue}.`);\n this.set(flagName, flagValue);\n }\n }\n async getAsync(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n this.flags[flagName] = await this.evaluateFlag(flagName);\n return this.flags[flagName];\n }\n get(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n const flagValue = this.evaluateFlag(flagName);\n if (isPromise(flagValue)) {\n throw new Error(`Flag ${flagName} cannot be synchronously evaluated. ` +\n `Please use getAsync() instead.`);\n }\n this.flags[flagName] = flagValue;\n return this.flags[flagName];\n }\n getNumber(flagName) {\n return this.get(flagName);\n }\n getBool(flagName) {\n return this.get(flagName);\n }\n getFlags() {\n return this.flags;\n }\n // For backwards compatibility.\n get features() {\n return this.flags;\n }\n set(flagName, value) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot set flag ${flagName} as it has not been registered.`);\n }\n this.flags[flagName] = value;\n if (this.flagRegistry[flagName].setHook != null) {\n this.flagRegistry[flagName].setHook(value);\n }\n }\n evaluateFlag(flagName) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot evaluate flag '${flagName}': no evaluation function found.`);\n }\n return this.flagRegistry[flagName].evaluationFn();\n }\n setFlags(flags) {\n this.flags = Object.assign({}, flags);\n }\n reset() {\n this.flags = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n populateURLFlags() {\n if (typeof this.global === 'undefined' ||\n typeof this.global.location === 'undefined' ||\n typeof this.global.location.search === 'undefined') {\n return;\n }\n const urlParams = getQueryParams(this.global.location.search);\n if (TENSORFLOWJS_FLAGS_PREFIX in urlParams) {\n const keyValues = urlParams[TENSORFLOWJS_FLAGS_PREFIX].split(',');\n keyValues.forEach(keyValue => {\n const [key, value] = keyValue.split(':');\n this.urlFlags[key] = parseValue(key, value);\n });\n }\n }\n}\nexport function getQueryParams(queryString) {\n const params = {};\n queryString.replace(/[?&]([^=?&]+)(?:=([^&]*))?/g, (s, ...t) => {\n decodeParam(params, t[0], t[1]);\n return t.join('=');\n });\n return params;\n}\nfunction decodeParam(params, name, value) {\n params[decodeURIComponent(name)] = decodeURIComponent(value || '');\n}\nfunction parseValue(flagName, value) {\n value = value.toLowerCase();\n if (value === 'true' || value === 'false') {\n return value === 'true';\n }\n else if (`${+value}` === value) {\n return +value;\n }\n throw new Error(`Could not parse value flag value ${value} for flag ${flagName}.`);\n}\n/**\n * Returns the current environment (a global singleton).\n *\n * The environment object contains the evaluated feature values as well as the\n * active platform.\n *\n * @doc {heading: 'Environment'}\n */\nexport function env() {\n return ENV;\n}\nexport let ENV = null;\nexport function setEnvironmentGlobal(environment) {\n ENV = environment;\n}\n//# sourceMappingURL=environment.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Note that the identifier globalNameSpace is scoped to this module, but will\n// always resolve to the same global object regardless of how the module is\n// resolved.\n// tslint:disable-next-line:no-any\nlet globalNameSpace;\n// tslint:disable-next-line:no-any\nexport function getGlobalNamespace() {\n if (globalNameSpace == null) {\n // tslint:disable-next-line:no-any\n let ns;\n if (typeof (window) !== 'undefined') {\n ns = window;\n }\n else if (typeof (global) !== 'undefined') {\n ns = global;\n }\n else if (typeof (process) !== 'undefined') {\n ns = process;\n }\n else if (typeof (self) !== 'undefined') {\n ns = self;\n }\n else {\n throw new Error('Could not find a global object');\n }\n globalNameSpace = ns;\n }\n return globalNameSpace;\n}\n// tslint:disable-next-line:no-any\nfunction getGlobalMap() {\n const ns = getGlobalNamespace();\n if (ns._tfGlobals == null) {\n ns._tfGlobals = new Map();\n }\n return ns._tfGlobals;\n}\n/**\n * Returns a globally accessible 'singleton' object.\n *\n * @param key the name of the object\n * @param init a function to initialize to initialize this object\n * the first time it is fetched.\n */\nexport function getGlobal(key, init) {\n const globalMap = getGlobalMap();\n if (globalMap.has(key)) {\n return globalMap.get(key);\n }\n else {\n const singleton = init();\n globalMap.set(key, singleton);\n return globalMap.get(key);\n }\n}\n//# sourceMappingURL=global_util.js.map", "export const Abs = 'Abs';\nexport const Acos = 'Acos';\nexport const Acosh = 'Acosh';\nexport const Add = 'Add';\nexport const AddN = 'AddN';\nexport const All = 'All';\nexport const Any = 'Any';\nexport const ArgMax = 'ArgMax';\nexport const ArgMin = 'ArgMin';\nexport const Asin = 'Asin';\nexport const Asinh = 'Asinh';\nexport const Atan = 'Atan';\nexport const Atanh = 'Atanh';\nexport const Atan2 = 'Atan2';\nexport const AvgPool = 'AvgPool';\nexport const AvgPoolBackprop = 'AvgPoolBackprop';\nexport const AvgPool3D = 'AvgPool3D';\nexport const AvgPool3DBackprop = 'AvgPool3DBackprop';\nexport const BatchMatMul = 'BatchMatMul';\nexport const BatchToSpaceND = 'BatchToSpaceND';\nexport const BroadcastTo = 'BroadcastTo';\nexport const Cast = 'Cast';\nexport const Ceil = 'Ceil';\nexport const ClipByValue = 'ClipByValue';\nexport const Complex = 'Complex';\nexport const Concat = 'Concat';\nexport const Conv2D = 'Conv2D';\nexport const Conv2DBackpropFilter = 'Conv2DBackpropFilter';\nexport const Conv2DBackpropInput = 'Conv2DBackpropInput';\nexport const Conv3D = 'Conv3D';\nexport const Conv3DBackpropFilterV2 = 'Conv3DBackpropFilterV2';\nexport const Conv3DBackpropInputV2 = 'Conv3DBackpropInputV2';\nexport const Cos = 'Cos';\nexport const Cosh = 'Cosh';\nexport const Cumsum = 'Cumsum';\nexport const CropAndResize = 'CropAndResize';\nexport const DepthToSpace = 'DepthToSpace';\nexport const DepthwiseConv2dNative = 'DepthwiseConv2dNative';\nexport const DepthwiseConv2dNativeBackpropFilter = 'DepthwiseConv2dNativeBackpropFilter';\nexport const DepthwiseConv2dNativeBackpropInput = 'DepthwiseConv2dNativeBackpropInput';\nexport const Diag = 'Diag';\nexport const Dilation2D = 'Dilation2D';\nexport const Dilation2DBackpropInput = 'Dilation2DBackpropInput';\nexport const Dilation2DBackpropFilter = 'Dilation2DBackpropFilter';\nexport const Div = 'Div';\nexport const Elu = 'Elu';\nexport const EluGrad = 'EluGrad';\nexport const Erf = 'Erf';\nexport const Equal = 'Equal';\nexport const Exp = 'Exp';\nexport const Expm1 = 'Expm1';\nexport const FFT = 'FFT';\nexport const Fill = 'Fill';\nexport const FlipLeftRight = 'FlipLeftRight';\nexport const Floor = 'Floor';\nexport const FloorDiv = 'FloorDiv';\nexport const FusedBatchNorm = 'FusedBatchNorm';\nexport const GatherV2 = 'GatherV2';\nexport const GatherNd = 'GatherNd';\nexport const Greater = 'Greater';\nexport const GreaterEqual = 'GreaterEqual';\nexport const Identity = 'Identity';\nexport const IFFT = 'IFFT';\nexport const Imag = 'Imag';\nexport const IsFinite = 'IsFinite';\nexport const IsInf = 'IsInf';\nexport const IsNan = 'IsNan';\nexport const Less = 'Less';\nexport const LessEqual = 'LessEqual';\nexport const LinSpace = 'LinSpace';\nexport const Log = 'Log';\nexport const Log1p = 'Log1p';\nexport const LogicalAnd = 'LogicalAnd';\nexport const LogicalNot = 'LogicalNot';\nexport const LogicalOr = 'LogicalOr';\nexport const LogSoftmax = 'LogSoftmax';\nexport const LRN = 'LRN';\nexport const LRNBackprop = 'LRNBackprop';\nexport const Max = 'Max';\nexport const Maximum = 'Maximum';\nexport const MaxPool = 'MaxPool';\nexport const MaxPoolBackprop = 'MaxPoolBackprop';\nexport const MaxPool3D = 'MaxPool3D';\nexport const MaxPool3DBackprop = 'MaxPool3DBackprop';\nexport const MaxPoolWithArgmax = 'MaxPoolWithArgmax';\nexport const Mean = 'Mean';\nexport const Min = 'Min';\nexport const Minimum = 'Minimum';\nexport const MirrorPad = 'MirrorPad';\nexport const Mod = 'Mod';\nexport const Multiply = 'Multiply';\nexport const Negate = 'Negate';\nexport const NotEqual = 'NotEqual';\nexport const NonMaxSuppressionV3 = 'NonMaxSuppressionV3';\nexport const NonMaxSuppressionV4 = 'NonMaxSuppressionV4';\nexport const NonMaxSuppressionV5 = 'NonMaxSuppressionV5';\nexport const OnesLike = 'OnesLike';\nexport const OneHot = 'OneHot';\nexport const PadV2 = 'PadV2';\nexport const Pool = 'Pool';\nexport const Pow = 'Pow';\nexport const Prelu = 'Prelu';\nexport const Prod = 'Prod';\nexport const Range = 'Range';\nexport const Real = 'Real';\nexport const Reciprocal = 'Reciprocal';\nexport const Relu = 'Relu';\nexport const Reshape = 'Reshape';\nexport const ResizeNearestNeighbor = 'ResizeNearestNeighbor';\nexport const ResizeNearestNeighborGrad = 'ResizeNearestNeighborGrad';\nexport const ResizeBilinear = 'ResizeBilinear';\nexport const ResizeBilinearGrad = 'ResizeBilinearGrad';\nexport const Relu6 = 'Relu6';\nexport const Reverse = 'Reverse';\nexport const Round = 'Round';\nexport const Rsqrt = 'Rsqrt';\nexport const ScatterNd = 'ScatterNd';\nexport const SelectV2 = 'SelectV2';\nexport const Selu = 'Selu';\nexport const Slice = 'Slice';\nexport const Sin = 'Sin';\nexport const Sinh = 'Sinh';\nexport const Sign = 'Sign';\nexport const Sigmoid = 'Sigmoid';\nexport const Softplus = 'Softplus';\nexport const Sqrt = 'Sqrt';\nexport const Sum = 'Sum';\nexport const SpaceToBatchND = 'SpaceToBatchND';\nexport const SplitV = 'SplitV';\nexport const Softmax = 'Softmax';\nexport const SquaredDifference = 'SquaredDifference';\nexport const Square = 'Square';\nexport const Sub = 'Sub';\nexport const SparseToDense = 'SparseToDense';\nexport const StridedSlice = 'StridedSlice';\nexport const Tan = 'Tan';\nexport const Tanh = 'Tanh';\nexport const Tile = 'Tile';\nexport const TopK = 'TopK';\nexport const Transpose = 'Transpose';\nexport const Unique = 'Unique';\nexport const Unpack = 'Unpack';\nexport const UnsortedSegmentSum = 'UnsortedSegmentSum';\nexport const ZerosLike = 'ZerosLike';\n/**\n * TensorFlow.js-only kernels\n */\nexport const Step = 'Step';\nexport const FromPixels = 'FromPixels';\nexport const RotateWithOffset = 'RotateWithOffset';\nexport const _FusedMatMul = '_FusedMatMul';\nexport const FusedConv2D = 'FusedConv2D';\nexport const FusedDepthwiseConv2D = 'FusedDepthwiseConv2D';\n//# sourceMappingURL=kernel_names.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport { getGlobal } from './global_util';\nconst kernelRegistry = getGlobal('kernelRegistry', () => new Map());\nconst gradRegistry = getGlobal('gradRegistry', () => new Map());\n/**\n * Returns the kernel function (code) associated with the provided names.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n */\nexport function getKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n return kernelRegistry.get(key);\n}\n/**\n * Returns the registered gradient info associated with the provided kernel.\n * @param kernelName The official TF kernel name.\n */\nexport function getGradient(kernelName) {\n return gradRegistry.get(kernelName);\n}\nexport function getKernelsForBackend(backendName) {\n const it = kernelRegistry.entries();\n const result = [];\n while (true) {\n const { done, value } = it.next();\n if (done) {\n break;\n }\n const [key, config] = value;\n const [backend,] = key.split('_');\n if (backend === backendName) {\n result.push(config);\n }\n }\n return result;\n}\n/**\n * Registers the function (forward pass) for the kernel in a global registry.\n *\n * @param config A config object with the following properties:\n * - `kernelName` The official name of the kernel.\n * - `backendName` The official name of the backend.\n * - `kernelFunc` The function to run during the forward pass of the kernel.\n * - `setupFunc` Optional. Gets called once, after the backend initializes.\n * - `disposeFunc` Optional. Gets called once, right before the backend is\n * disposed.\n */\nexport function registerKernel(config) {\n const { kernelName, backendName } = config;\n const key = makeKey(kernelName, backendName);\n if (kernelRegistry.has(key)) {\n console.warn(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is already registered`);\n }\n kernelRegistry.set(key, config);\n}\n/**\n * Registers a gradient function for a given kernel in the global registry,\n * to be used during the back-propagation of that kernel.\n *\n * @param config An object with the following properties:\n * - `kernelName` The name of the kernel that the gradient function is for.\n * - `gradFunc` The function to run during back-propagation.\n */\nexport function registerGradient(config) {\n const { kernelName } = config;\n if (gradRegistry.has(kernelName)) {\n // TODO (yassogba) after 3.0 assess whether we need to keep this gated\n // to debug mode.\n if (env().getBool('DEBUG')) {\n console.warn(`Overriding the gradient for '${kernelName}'`);\n }\n }\n gradRegistry.set(kernelName, config);\n}\n/**\n * Removes the kernel function from the registry.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n *\n */\nexport function unregisterKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n if (!kernelRegistry.has(key)) {\n throw new Error(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is not registered`);\n }\n kernelRegistry.delete(key);\n}\n/** Removes the registered gradient from the global registry. */\nexport function unregisterGradient(kernelName) {\n if (!gradRegistry.has(kernelName)) {\n throw new Error(`The gradient '${kernelName}' for backend is not registered`);\n }\n gradRegistry.delete(kernelName);\n}\n/**\n * Finds kernels that have already been registered to a backend and re-registers\n * them for a new backend. Useful for registering custom backends.\n * @param registeredBackendName Already registered backend.\n * @param newBackendName New backend.\n */\nexport function copyRegisteredKernels(registeredBackendName, newBackendName) {\n const kernels = getKernelsForBackend(registeredBackendName);\n kernels.forEach(kernelConfig => {\n const newKernelConfig = Object.assign({}, kernelConfig, { backendName: newBackendName });\n registerKernel(newKernelConfig);\n });\n}\nfunction makeKey(kernelName, backendName) {\n return `${backendName}_${kernelName}`;\n}\n//# sourceMappingURL=kernel_registry.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport * as base from './util_base';\nexport * from './util_base';\n/**\n * Create typed array for scalar value. Used for storing in `DataStorage`.\n */\nexport function createScalarValue(value, dtype) {\n if (dtype === 'string') {\n return encodeString(value);\n }\n return toTypedArray([value], dtype);\n}\nfunction noConversionNeeded(a, dtype) {\n return (a instanceof Float32Array && dtype === 'float32') ||\n (a instanceof Int32Array && dtype === 'int32') ||\n (a instanceof Uint8Array && dtype === 'bool');\n}\nexport function toTypedArray(a, dtype) {\n if (dtype === 'string') {\n throw new Error('Cannot convert a string[] to a TypedArray');\n }\n if (Array.isArray(a)) {\n a = base.flatten(a);\n }\n if (env().getBool('DEBUG')) {\n base.checkConversionForErrors(a, dtype);\n }\n if (noConversionNeeded(a, dtype)) {\n return a;\n }\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(a);\n }\n else if (dtype === 'int32') {\n return new Int32Array(a);\n }\n else if (dtype === 'bool') {\n const bool = new Uint8Array(a.length);\n for (let i = 0; i < bool.length; ++i) {\n if (Math.round(a[i]) !== 0) {\n bool[i] = 1;\n }\n }\n return bool;\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Returns the current high-resolution time in milliseconds relative to an\n * arbitrary time in the past. It works across different platforms (node.js,\n * browsers).\n *\n * ```js\n * console.log(tf.util.now());\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function now() {\n return env().platform.now();\n}\n/**\n * Returns a platform-specific implementation of\n * [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n *\n * If `fetch` is defined on the global object (`window`, `process`, etc.),\n * `tf.util.fetch` returns that function.\n *\n * If not, `tf.util.fetch` returns a platform-specific solution.\n *\n * ```js\n * const resource = await tf.util.fetch('https://unpkg.com/@tensorflow/tfjs');\n * // handle response\n * ```\n *\n * @doc {heading: 'Util'}\n */\nexport function fetch(path, requestInits) {\n return env().platform.fetch(path, requestInits);\n}\n/**\n * Encodes the provided string into bytes using the provided encoding scheme.\n *\n * @param s The string to encode.\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function encodeString(s, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.encode(s, encoding);\n}\n/**\n * Decodes the provided bytes into a string using the provided encoding scheme.\n * @param bytes The bytes to decode.\n *\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function decodeString(bytes, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.decode(bytes, encoding);\n}\n//# sourceMappingURL=util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\nexport class Profiler {\n constructor(backendTimer, logger) {\n this.backendTimer = backendTimer;\n this.logger = logger;\n if (logger == null) {\n this.logger = new Logger();\n }\n }\n profileKernel(kernelName, inputs, f) {\n let outputs;\n const holdResultWrapperFn = () => {\n outputs = f();\n };\n const timer = this.backendTimer.time(holdResultWrapperFn);\n for (let i = 0; i < outputs.length; i++) {\n const output = outputs[i];\n // Dangling promise here because we don't want to propagate up\n // asynchronicity.\n output.data().then(tensorVals => {\n checkComputationForErrors(tensorVals, output.dtype, kernelName);\n });\n }\n const kernelProfile = {\n kernelName,\n outputs,\n inputs,\n timeMs: timer.then(timing => timing.kernelMs),\n extraInfo: timer.then(timing => timing.getExtraProfileInfo != null ?\n timing.getExtraProfileInfo() :\n '')\n };\n return kernelProfile;\n }\n logKernelProfile(kernelProfile) {\n const { kernelName, outputs, timeMs, inputs, extraInfo } = kernelProfile;\n outputs.forEach(result => {\n Promise.all([result.data(), timeMs, extraInfo]).then(valueContainer => {\n this.logger.logKernelProfile(kernelName, result, valueContainer[0], valueContainer[1], inputs, valueContainer[2]);\n });\n });\n }\n}\nexport function checkComputationForErrors(vals, dtype, kernelName) {\n if (dtype !== 'float32') {\n // Only floating point computations will generate NaN values\n return false;\n }\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n // Throwing custom exception so behavior is testable.\n console.warn(`Found ${num} in the result of '${kernelName}'`);\n return true;\n }\n }\n return false;\n}\nexport class Logger {\n logKernelProfile(name, result, vals, timeMs, inputs, extraInfo) {\n const time = typeof timeMs === 'number' ? util.rightPad(`${timeMs}ms`, 9) :\n timeMs['error'];\n const paddedName = util.rightPad(name, 25);\n const rank = result.rank;\n const size = result.size;\n const shape = util.rightPad(result.shape.toString(), 14);\n let inputShapesDescription = '';\n for (const name in inputs) {\n const input = inputs[name];\n if (input != null) {\n // The input might be a non-tensor (e.g HTMLImageElement), in which case\n // we claim the output shape as input shape.\n const inputShape = input.shape || result.shape;\n const inputRank = inputShape.length;\n inputShapesDescription +=\n `${name}: ${inputRank}D ${inputRank > 0 ? inputShape : ''} `;\n }\n }\n console.log(`%c${paddedName}\\t%c${time}\\t%c${rank}D ${shape}\\t%c${size}\\t%c${inputShapesDescription}\\t%c${extraInfo}`, 'font-weight:bold', 'color:red', 'color:blue', 'color: orange', 'color: green', 'color: steelblue');\n }\n}\n//# sourceMappingURL=profiler.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\n/**\n * Computes a list of TapeNodes that connect x to y, filtering everything else\n * out and preserving the order of the original tape elements.\n *\n * @param tape The tape elements to filter.\n * @param xs The input Tensors.\n * @param y The output Tensor.\n */\nexport function getFilteredNodesXToY(tape, xs, y) {\n // Forward pass to compute all the nodes and Tensors that are transitively a\n // function of x.\n const tensorsFromX = {};\n const nodesFromX = {};\n for (let i = 0; i < xs.length; i++) {\n tensorsFromX[xs[i].id] = true;\n }\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n for (const inputName in nodeInputs) {\n const input = nodeInputs[inputName];\n let anyInputFromX = false;\n for (let j = 0; j < xs.length; j++) {\n if (tensorsFromX[input.id]) {\n node.outputs.forEach(output => tensorsFromX[output.id] = true);\n anyInputFromX = true;\n nodesFromX[node.id] = true;\n break;\n }\n }\n if (anyInputFromX) {\n break;\n }\n }\n }\n // Backward pass to find all of the nodes and Tensors that lead to y.\n const tensorsLeadToY = {};\n tensorsLeadToY[y.id] = true;\n const nodesToY = {};\n for (let i = tape.length - 1; i >= 0; i--) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n // If any of the outputs lead to y, mark all of the inputs as leading to y.\n for (let j = 0; j < node.outputs.length; j++) {\n if (tensorsLeadToY[node.outputs[j].id]) {\n for (const inputName in nodeInputs) {\n tensorsLeadToY[nodeInputs[inputName].id] = true;\n nodesToY[node.id] = true;\n }\n break;\n }\n }\n }\n // Return the paths that come from x and lead to y.\n const filteredTape = [];\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n if (nodesFromX[node.id] && nodesToY[node.id]) {\n // Prune the inputs from the node that aren't a function of x.\n const prunedInputs = {};\n for (const inputName in node.inputs) {\n const nodeInput = node.inputs[inputName];\n if (tensorsFromX[nodeInput.id]) {\n prunedInputs[inputName] = nodeInput;\n }\n }\n // Copy the node and overwrite inputsAndArgs to the pruned version.\n const prunedNode = Object.assign({}, node);\n prunedNode.inputs = prunedInputs;\n prunedNode.outputs = node.outputs;\n filteredTape.push(prunedNode);\n }\n }\n return filteredTape;\n}\n/**\n * Backpropagate gradients through the filtered TapeNodes.\n *\n * @param tensorAccumulatedGradientMap A map of Tensor to its gradient. This map\n * is mutated by this method.\n * @param filteredTape The filtered TapeNodes to backprop through.\n */\nexport function backpropagateGradients(tensorAccumulatedGradientMap, filteredTape, tidy, add) {\n // Walk the tape backward and keep a map of Tensor to its gradient.\n for (let i = filteredTape.length - 1; i >= 0; i--) {\n const node = filteredTape[i];\n const dys = [];\n node.outputs.forEach(o => {\n const gradTensor = tensorAccumulatedGradientMap[o.id];\n if (gradTensor != null) {\n dys.push(gradTensor);\n }\n else {\n // This particular output is not in the back-propagation subgraph, so it\n // does not affect the final output, thus we put null for its dy.\n dys.push(null);\n }\n });\n if (node.gradient == null) {\n throw new Error(`Cannot compute gradient: gradient function not found ` +\n `for ${node.kernelName}.`);\n }\n // Backprop dy through this node and accumulate gradients over the inputs.\n const inputGradients = node.gradient(dys);\n for (const inputName in node.inputs) {\n if (!(inputName in inputGradients)) {\n throw new Error(`Cannot backprop through input ${inputName}. ` +\n `Available gradients found: ${Object.keys(inputGradients)}.`);\n }\n // Call the gradient function.\n const dx = tidy(() => inputGradients[inputName]());\n if (dx.dtype !== 'float32') {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `${inputName} must have 'float32' dtype, but has '${dx.dtype}'`);\n }\n const x = node.inputs[inputName];\n if (!util.arraysEqual(dx.shape, x.shape)) {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `'${inputName}' has shape '${dx.shape}', which does not match ` +\n `the shape of the input '${x.shape}'`);\n }\n if (tensorAccumulatedGradientMap[x.id] == null) {\n tensorAccumulatedGradientMap[x.id] = dx;\n }\n else {\n const curGradient = tensorAccumulatedGradientMap[x.id];\n tensorAccumulatedGradientMap[x.id] = add(curGradient, dx);\n curGradient.dispose();\n }\n }\n }\n}\n//# sourceMappingURL=tape.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { computeStrides, isString, rightPad, sizeFromShape } from './util';\n// Maximum number of values before we decide to show ellipsis.\nconst FORMAT_LIMIT_NUM_VALS = 20;\n// Number of first and last values to show when displaying a, b,...,y, z.\nconst FORMAT_NUM_FIRST_LAST_VALS = 3;\n// Number of significant digits to show.\nconst FORMAT_NUM_SIG_DIGITS = 7;\nexport function tensorToString(vals, shape, dtype, verbose) {\n const strides = computeStrides(shape);\n const padPerCol = computeMaxSizePerColumn(vals, shape, dtype, strides);\n const rank = shape.length;\n const valsLines = subTensorToString(vals, shape, dtype, strides, padPerCol);\n const lines = ['Tensor'];\n if (verbose) {\n lines.push(` dtype: ${dtype}`);\n lines.push(` rank: ${rank}`);\n lines.push(` shape: [${shape}]`);\n lines.push(` values:`);\n }\n lines.push(valsLines.map(l => ' ' + l).join('\\n'));\n return lines.join('\\n');\n}\nfunction computeMaxSizePerColumn(vals, shape, dtype, strides) {\n const n = sizeFromShape(shape);\n const numCols = strides[strides.length - 1];\n const padPerCol = new Array(numCols).fill(0);\n const rank = shape.length;\n const valuesOrTuples = dtype === 'complex64' ? createComplexTuples(vals) : vals;\n if (rank > 1) {\n for (let row = 0; row < n / numCols; row++) {\n const offset = row * numCols;\n for (let j = 0; j < numCols; j++) {\n padPerCol[j] = Math.max(padPerCol[j], valToString(valuesOrTuples[offset + j], 0, dtype).length);\n }\n }\n }\n return padPerCol;\n}\nfunction valToString(val, pad, dtype) {\n let valStr;\n if (Array.isArray(val)) {\n valStr = `${parseFloat(val[0].toFixed(FORMAT_NUM_SIG_DIGITS))} + ` +\n `${parseFloat(val[1].toFixed(FORMAT_NUM_SIG_DIGITS))}j`;\n }\n else if (isString(val)) {\n valStr = `'${val}'`;\n }\n else if (dtype === 'bool') {\n valStr = boolNumToString(val);\n }\n else {\n valStr = parseFloat(val.toFixed(FORMAT_NUM_SIG_DIGITS)).toString();\n }\n return rightPad(valStr, pad);\n}\nfunction boolNumToString(v) {\n return v === 0 ? 'false' : 'true';\n}\nfunction subTensorToString(vals, shape, dtype, strides, padPerCol, isLast = true) {\n const storagePerElement = dtype === 'complex64' ? 2 : 1;\n const size = shape[0];\n const rank = shape.length;\n if (rank === 0) {\n if (dtype === 'complex64') {\n const complexTuple = createComplexTuples(vals);\n return [valToString(complexTuple[0], 0, dtype)];\n }\n if (dtype === 'bool') {\n return [boolNumToString(vals[0])];\n }\n return [vals[0].toString()];\n }\n if (rank === 1) {\n if (size > FORMAT_LIMIT_NUM_VALS) {\n const firstValsSize = FORMAT_NUM_FIRST_LAST_VALS * storagePerElement;\n let firstVals = Array.from(vals.slice(0, firstValsSize));\n let lastVals = Array.from(vals.slice((size - FORMAT_NUM_FIRST_LAST_VALS) * storagePerElement, size * storagePerElement));\n if (dtype === 'complex64') {\n firstVals = createComplexTuples(firstVals);\n lastVals = createComplexTuples(lastVals);\n }\n return [\n '[' +\n firstVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ', ..., ' +\n lastVals\n .map((x, i) => valToString(x, padPerCol[size - FORMAT_NUM_FIRST_LAST_VALS + i], dtype))\n .join(', ') +\n ']'\n ];\n }\n const displayVals = dtype === 'complex64' ? createComplexTuples(vals) :\n Array.from(vals);\n return [\n '[' +\n displayVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ']'\n ];\n }\n // The array is rank 2 or more.\n const subshape = shape.slice(1);\n const substrides = strides.slice(1);\n const stride = strides[0] * storagePerElement;\n const lines = [];\n if (size > FORMAT_LIMIT_NUM_VALS) {\n for (let i = 0; i < FORMAT_NUM_FIRST_LAST_VALS; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, false /* isLast */));\n }\n lines.push('...');\n for (let i = size - FORMAT_NUM_FIRST_LAST_VALS; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n else {\n for (let i = 0; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n const sep = rank === 2 ? ',' : '';\n lines[0] = '[' + lines[0] + sep;\n for (let i = 1; i < lines.length - 1; i++) {\n lines[i] = ' ' + lines[i] + sep;\n }\n let newLineSep = ',\\n';\n for (let i = 2; i < rank; i++) {\n newLineSep += '\\n';\n }\n lines[lines.length - 1] =\n ' ' + lines[lines.length - 1] + ']' + (isLast ? '' : newLineSep);\n return lines;\n}\nfunction createComplexTuples(vals) {\n const complexTuples = [];\n for (let i = 0; i < vals.length; i += 2) {\n complexTuples.push([vals[i], vals[i + 1]]);\n }\n return complexTuples;\n}\n//# sourceMappingURL=tensor_format.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensorToString } from './tensor_format';\nimport * as util from './util';\nimport { computeStrides, toNestedArray } from './util';\n/**\n * A mutable object, similar to `tf.Tensor`, that allows users to set values\n * at locations before converting to an immutable `tf.Tensor`.\n *\n * See `tf.buffer` for creating a tensor buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class TensorBuffer {\n constructor(shape, dtype, values) {\n this.dtype = dtype;\n this.shape = shape.slice();\n this.size = util.sizeFromShape(shape);\n if (values != null) {\n const n = values.length;\n util.assert(n === this.size, () => `Length of values '${n}' does not match the size ` +\n `inferred by the shape '${this.size}'.`);\n }\n if (dtype === 'complex64') {\n throw new Error(`complex64 dtype TensorBuffers are not supported. Please create ` +\n `a TensorBuffer for the real and imaginary parts separately and ` +\n `call tf.complex(real, imag).`);\n }\n this.values = values || util.getArrayFromDType(dtype, this.size);\n this.strides = computeStrides(shape);\n }\n /**\n * Sets a value in the buffer at a given location.\n *\n * @param value The value to set.\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n set(value, ...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n util.assert(locs.length === this.rank, () => `The number of provided coordinates (${locs.length}) must ` +\n `match the rank (${this.rank})`);\n const index = this.locToIndex(locs);\n this.values[index] = value;\n }\n /**\n * Returns the value in the buffer at the provided location.\n *\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n get(...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n let i = 0;\n for (const loc of locs) {\n if (loc < 0 || loc >= this.shape[i]) {\n const msg = `Requested out of range element at ${locs}. ` +\n ` Buffer shape=${this.shape}`;\n throw new Error(msg);\n }\n i++;\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return this.values[index];\n }\n locToIndex(locs) {\n if (this.rank === 0) {\n return 0;\n }\n else if (this.rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return index;\n }\n indexToLoc(index) {\n if (this.rank === 0) {\n return [];\n }\n else if (this.rank === 1) {\n return [index];\n }\n const locs = new Array(this.shape.length);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / this.strides[i]);\n index -= locs[i] * this.strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Creates an immutable `tf.Tensor` object from the buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n toTensor() {\n return trackerFn().makeTensor(this.values, this.shape, this.dtype);\n }\n}\n// For tracking tensor creation and disposal.\nlet trackerFn = null;\n// Used by chaining methods to call into ops.\nlet opHandler = null;\n// Used to warn about deprecated methods.\nlet deprecationWarningFn = null;\n// This here so that we can use this method on dev branches and keep the\n// functionality at master.\n// tslint:disable-next-line:no-unused-expression\n[deprecationWarningFn];\n/**\n * An external consumer can register itself as the tensor tracker. This way\n * the Tensor class can notify the tracker for every tensor created and\n * disposed.\n */\nexport function setTensorTracker(fn) {\n trackerFn = fn;\n}\n/**\n * An external consumer can register itself as the op handler. This way the\n * Tensor class can have chaining methods that call into ops via the op\n * handler.\n */\nexport function setOpHandler(handler) {\n opHandler = handler;\n}\n/**\n * Sets the deprecation warning function to be used by this file. This way the\n * Tensor class can be a leaf but still use the environment.\n */\nexport function setDeprecationWarningFn(fn) {\n deprecationWarningFn = fn;\n}\n/**\n * A `tf.Tensor` object represents an immutable, multidimensional array of\n * numbers that has a shape and a data type.\n *\n * See `tf.tensor` for details on how to create a `tf.Tensor`.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Tensor {\n constructor(shape, dtype, dataId, id) {\n /** Whether this tensor has been globally kept. */\n this.kept = false;\n this.isDisposedInternal = false;\n this.shape = shape.slice();\n this.dtype = dtype || 'float32';\n this.size = util.sizeFromShape(shape);\n this.strides = computeStrides(shape);\n this.dataId = dataId;\n this.id = id;\n this.rankType = (this.rank < 5 ? this.rank.toString() : 'higher');\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Returns a promise of `tf.TensorBuffer` that holds the underlying data.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async buffer() {\n const vals = await this.data();\n return opHandler.buffer(this.shape, this.dtype, vals);\n }\n /**\n * Returns a `tf.TensorBuffer` that holds the underlying data.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n bufferSync() {\n return opHandler.buffer(this.shape, this.dtype, this.dataSync());\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * asynchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async array() {\n const vals = await this.data();\n return toNestedArray(this.shape, vals);\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * synchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n arraySync() {\n return toNestedArray(this.shape, this.dataSync());\n }\n /**\n * Asynchronously downloads the values from the `tf.Tensor`. Returns a\n * promise of `TypedArray` that resolves when the computation has finished.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async data() {\n this.throwIfDisposed();\n const data = trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n const bytes = await data;\n try {\n return bytes.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /**\n * Synchronously downloads the values from the `tf.Tensor`. This blocks the\n * UI thread until the values are ready, which can cause performance issues.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dataSync() {\n this.throwIfDisposed();\n const data = trackerFn().readSync(this.dataId);\n if (this.dtype === 'string') {\n try {\n return data.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /** Returns the underlying bytes of the tensor's data. */\n async bytes() {\n this.throwIfDisposed();\n const data = await trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n return data;\n }\n else {\n return new Uint8Array(data.buffer);\n }\n }\n /**\n * Disposes `tf.Tensor` from memory.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dispose() {\n if (this.isDisposed) {\n return;\n }\n trackerFn().disposeTensor(this);\n this.isDisposedInternal = true;\n }\n get isDisposed() {\n return this.isDisposedInternal;\n }\n throwIfDisposed() {\n if (this.isDisposed) {\n throw new Error(`Tensor is disposed.`);\n }\n }\n /**\n * Prints the `tf.Tensor`. See `tf.print` for details.\n *\n * @param verbose Whether to print verbose information about the tensor,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n print(verbose = false) {\n return opHandler.print(this, verbose);\n }\n /**\n * Returns a copy of the tensor. See `tf.clone` for details.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n clone() {\n this.throwIfDisposed();\n return opHandler.clone(this);\n }\n /**\n * Returns a human-readable description of the tensor. Useful for logging.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n toString(verbose = false) {\n const vals = this.dataSync();\n return tensorToString(vals, this.shape, this.dtype, verbose);\n }\n cast(dtype) {\n this.throwIfDisposed();\n return opHandler.cast(this, dtype);\n }\n variable(trainable = true, name, dtype) {\n this.throwIfDisposed();\n return trackerFn().makeVariable(this, trainable, name, dtype);\n }\n}\nObject.defineProperty(Tensor, Symbol.hasInstance, {\n value: (instance) => {\n // Implementation note: we should use properties of the object that will be\n // defined before the constructor body has finished executing (methods).\n // This is because when this code is transpiled by babel, babel will call\n // classCallCheck before the constructor body is run.\n // See https://github.com/tensorflow/tfjs/issues/3384 for backstory.\n return !!instance && instance.data != null && instance.dataSync != null &&\n instance.throwIfDisposed != null;\n }\n});\n/**\n * A mutable `tf.Tensor`, useful for persisting state, e.g. for training.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Variable extends Tensor {\n constructor(initialValue, trainable, name, tensorId) {\n super(initialValue.shape, initialValue.dtype, initialValue.dataId, tensorId);\n this.trainable = trainable;\n this.name = name;\n }\n /**\n * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have\n * the same shape and dtype as the old `tf.Tensor`.\n *\n * @param newValue New tensor to be assigned to this variable.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n assign(newValue) {\n if (newValue.dtype !== this.dtype) {\n throw new Error(`dtype of the new value (${newValue.dtype}) and ` +\n `previous value (${this.dtype}) must match`);\n }\n if (!util.arraysEqual(newValue.shape, this.shape)) {\n throw new Error(`shape of the new value (${newValue.shape}) and ` +\n `previous value (${this.shape}) must match`);\n }\n trackerFn().disposeTensor(this);\n this.dataId = newValue.dataId;\n trackerFn().incRef(this, null /* backend */);\n }\n dispose() {\n trackerFn().disposeVariable(this);\n this.isDisposedInternal = true;\n }\n}\nObject.defineProperty(Variable, Symbol.hasInstance, {\n value: (instance) => {\n return instance instanceof Tensor && instance.assign != null &&\n instance.assign instanceof Function;\n }\n});\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Rank;\n(function (Rank) {\n Rank[\"R0\"] = \"R0\";\n Rank[\"R1\"] = \"R1\";\n Rank[\"R2\"] = \"R2\";\n Rank[\"R3\"] = \"R3\";\n Rank[\"R4\"] = \"R4\";\n Rank[\"R5\"] = \"R5\";\n Rank[\"R6\"] = \"R6\";\n})(Rank || (Rank = {}));\n// Looks for upcasting types. Used, for example, in operations with mixed dtype\n// inputs.\nvar UpcastInt32AndMap;\n(function (UpcastInt32AndMap) {\n UpcastInt32AndMap[\"float32\"] = \"float32\";\n UpcastInt32AndMap[\"int32\"] = \"int32\";\n UpcastInt32AndMap[\"bool\"] = \"int32\";\n UpcastInt32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastInt32AndMap || (UpcastInt32AndMap = {}));\nvar UpcastBoolAndMap;\n(function (UpcastBoolAndMap) {\n UpcastBoolAndMap[\"float32\"] = \"float32\";\n UpcastBoolAndMap[\"int32\"] = \"int32\";\n UpcastBoolAndMap[\"bool\"] = \"bool\";\n UpcastBoolAndMap[\"complex64\"] = \"complex64\";\n})(UpcastBoolAndMap || (UpcastBoolAndMap = {}));\nvar UpcastFloat32AndMap;\n(function (UpcastFloat32AndMap) {\n UpcastFloat32AndMap[\"float32\"] = \"float32\";\n UpcastFloat32AndMap[\"int32\"] = \"float32\";\n UpcastFloat32AndMap[\"bool\"] = \"float32\";\n UpcastFloat32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastFloat32AndMap || (UpcastFloat32AndMap = {}));\nvar UpcastComplex64AndMap;\n(function (UpcastComplex64AndMap) {\n UpcastComplex64AndMap[\"float32\"] = \"complex64\";\n UpcastComplex64AndMap[\"int32\"] = \"complex64\";\n UpcastComplex64AndMap[\"bool\"] = \"complex64\";\n UpcastComplex64AndMap[\"complex64\"] = \"complex64\";\n})(UpcastComplex64AndMap || (UpcastComplex64AndMap = {}));\nconst upcastTypeMap = {\n 'float32': UpcastFloat32AndMap,\n 'int32': UpcastInt32AndMap,\n 'bool': UpcastBoolAndMap,\n 'complex64': UpcastComplex64AndMap\n};\nexport function upcastType(typeA, typeB) {\n if (typeA === 'string' || typeB === 'string') {\n if (typeA === 'string' && typeB === 'string') {\n return 'string';\n }\n throw new Error(`Can not upcast ${typeA} with ${typeB}`);\n }\n return upcastTypeMap[typeA][typeB];\n}\n/** Returns the output type after summation. */\nexport function sumOutType(type) {\n return upcastType(type, 'int32');\n}\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from './tensor';\nimport { upcastType } from './types';\nimport { assert } from './util';\nexport function makeTypesMatch(a, b) {\n if (a.dtype === b.dtype) {\n return [a, b];\n }\n const dtype = upcastType(a.dtype, b.dtype);\n return [a.cast(dtype), b.cast(dtype)];\n}\nexport function assertTypesMatch(a, b) {\n assert(a.dtype === b.dtype, () => `The dtypes of the first(${a.dtype}) and` +\n ` second(${b.dtype}) input must match`);\n}\nexport function isTensorInList(tensor, tensorList) {\n return tensorList.some(x => x.id === tensor.id);\n}\n/**\n * Extracts any `Tensor`s found within the provided object.\n *\n * @param container an object that may be a `Tensor` or may directly contain\n * `Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. In general it\n * is safe to pass any object here, except that `Promise`s are not\n * supported.\n * @returns An array of `Tensors` found within the passed object. If the\n * argument is simply a `Tensor', a list containing that `Tensor` is\n * returned. If the object is not a `Tensor` or does not\n * contain `Tensors`, an empty list is returned.\n */\nexport function getTensorsInContainer(result) {\n const list = [];\n const seen = new Set();\n walkTensorContainer(result, list, seen);\n return list;\n}\nfunction walkTensorContainer(container, list, seen) {\n if (container == null) {\n return;\n }\n if (container instanceof Tensor) {\n list.push(container);\n return;\n }\n if (!isIterable(container)) {\n return;\n }\n // Iteration over keys works also for arrays.\n const iterable = container;\n for (const k in iterable) {\n const val = iterable[k];\n if (!seen.has(val)) {\n seen.add(val);\n walkTensorContainer(val, list, seen);\n }\n }\n}\n// tslint:disable-next-line:no-any\nfunction isIterable(obj) {\n return Array.isArray(obj) || typeof obj === 'object';\n}\n//# sourceMappingURL=tensor_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { KernelBackend } from './backends/backend';\nimport { Environment, setEnvironmentGlobal } from './environment';\nimport { getGlobalNamespace } from './global_util';\nimport { Add, Cast } from './kernel_names';\nimport { getGradient, getKernel, getKernelsForBackend } from './kernel_registry';\nimport { Profiler } from './profiler';\nimport { backpropagateGradients, getFilteredNodesXToY } from './tape';\nimport { setTensorTracker, Tensor, Variable } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\nimport * as util from './util';\nimport { bytesFromStringArray, makeOnesTypedArray, now, sizeFromShape } from './util';\nclass EngineState {\n constructor() {\n // Public since optimizers will use it.\n this.registeredVariables = {};\n this.nextTapeNodeId = 0;\n this.numBytes = 0;\n this.numTensors = 0;\n this.numStringTensors = 0;\n this.numDataBuffers = 0;\n // Number of nested tf.grad() statements when computing higher-order\n // gradients. E.g. `1` for first-order gradients and `2` for second-order\n // gradients. Used to track if the tape should be removed after a backprop.\n this.gradientDepth = 0;\n // Number of nested kernel calls. When kernel depth is greater than 1, we turn\n // off the tape.\n this.kernelDepth = 0;\n this.scopeStack = [];\n /**\n * Keeps track of the number of data moves during a kernel execution. We\n * maintain a stack since kernels can call other kernels, recursively.\n */\n this.numDataMovesStack = [];\n this.nextScopeId = 0;\n this.tensorInfo = new WeakMap();\n this.profiling = false;\n this.activeProfile = { newBytes: 0, newTensors: 0, peakBytes: 0, kernels: [], result: null };\n }\n dispose() {\n for (const variableName in this.registeredVariables) {\n this.registeredVariables[variableName].dispose();\n }\n }\n}\nexport class Engine {\n constructor(ENV) {\n this.ENV = ENV;\n this.registry = {};\n this.registryFactory = {};\n this.pendingBackendInitId = 0;\n this.state = new EngineState();\n }\n async ready() {\n if (this.pendingBackendInit != null) {\n return this.pendingBackendInit.then(() => { });\n }\n if (this.backendInstance != null) {\n return;\n }\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const success = await this.initializeBackend(backendName).success;\n if (success) {\n await this.setBackend(backendName);\n return;\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n get backend() {\n if (this.pendingBackendInit != null) {\n throw new Error(`Backend '${this.backendName}' has not yet been initialized. Make ` +\n `sure to await tf.ready() or await tf.setBackend() before calling ` +\n `other methods`);\n }\n if (this.backendInstance == null) {\n const { name, asyncInit } = this.initializeBackendsAndReturnBest();\n if (asyncInit) {\n throw new Error(`The highest priority backend '${name}' has not yet been ` +\n `initialized. Make sure to await tf.ready() or ` +\n `await tf.setBackend() before calling other methods`);\n }\n this.setBackend(name);\n }\n return this.backendInstance;\n }\n backendNames() {\n return Object.keys(this.registryFactory);\n }\n findBackend(backendName) {\n if (!(backendName in this.registry)) {\n // If the backend hasn't been initialized but we have a registry entry for\n // it, initialize it and return it.\n if (backendName in this.registryFactory) {\n const { asyncInit } = this.initializeBackend(backendName);\n if (asyncInit) {\n // Backend is not ready yet.\n return null;\n }\n }\n else {\n return null;\n }\n }\n return this.registry[backendName];\n }\n findBackendFactory(backendName) {\n if (!(backendName in this.registryFactory)) {\n return null;\n }\n return this.registryFactory[backendName].factory;\n }\n registerBackend(backendName, factory, priority = 1) {\n if (backendName in this.registryFactory) {\n console.warn(`${backendName} backend was already registered. ` +\n `Reusing existing backend factory.`);\n return false;\n }\n this.registryFactory[backendName] = { factory, priority };\n return true;\n }\n async setBackend(backendName) {\n if (this.registryFactory[backendName] == null) {\n throw new Error(`Backend name '${backendName}' not found in registry`);\n }\n this.backendName = backendName;\n if (this.registry[backendName] == null) {\n this.backendInstance = null;\n const { success, asyncInit } = this.initializeBackend(backendName);\n const result = asyncInit ? await success : success;\n if (!result) {\n return false;\n }\n }\n this.backendInstance = this.registry[backendName];\n this.setupRegisteredKernels();\n // Reset the profiler.\n this.profiler = new Profiler(this.backendInstance);\n return true;\n }\n setupRegisteredKernels() {\n const kernels = getKernelsForBackend(this.backendName);\n kernels.forEach(kernel => {\n if (kernel.setupFunc != null) {\n kernel.setupFunc(this.backendInstance);\n }\n });\n }\n disposeRegisteredKernels(backendName) {\n const kernels = getKernelsForBackend(backendName);\n kernels.forEach(kernel => {\n if (kernel.disposeFunc != null) {\n kernel.disposeFunc(this.registry[backendName]);\n }\n });\n }\n /**\n * Initializes a backend by looking up the backend name in the factory\n * registry and calling the factory method. Returns a boolean representing\n * whether the initialization of the backend suceeded. Throws an error if\n * there is no backend in the factory registry.\n */\n initializeBackend(backendName) {\n const registryFactoryEntry = this.registryFactory[backendName];\n if (registryFactoryEntry == null) {\n throw new Error(`Cannot initialize backend ${backendName}, no registration found.`);\n }\n try {\n const backend = registryFactoryEntry.factory();\n /* Test if the factory returns a promise.\n Done in a more liberal way than\n previous 'Promise.resolve(backend)===backend'\n as we needed to account for custom Promise\n implementations (e.g. Angular) */\n if (backend && !(backend instanceof KernelBackend)\n && typeof backend.then === 'function') {\n const promiseId = ++this.pendingBackendInitId;\n const success = backend\n .then(backendInstance => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.registry[backendName] = backendInstance;\n this.pendingBackendInit = null;\n return true;\n })\n .catch(err => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.pendingBackendInit = null;\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return false;\n });\n this.pendingBackendInit = success;\n return { success, asyncInit: true };\n }\n else {\n this.registry[backendName] = backend;\n return { success: true, asyncInit: false };\n }\n }\n catch (err) {\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return { success: false, asyncInit: false };\n }\n }\n removeBackend(backendName) {\n if (!(backendName in this.registryFactory)) {\n throw new Error(`${backendName} backend not found in registry`);\n }\n if (this.backendName === backendName && this.pendingBackendInit != null) {\n // There is a pending promise of the backend we want to remove. Make it\n // obsolete.\n this.pendingBackendInitId++;\n }\n if (backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n delete this.registryFactory[backendName];\n // Unset the backend if it is active.\n if (this.backendName === backendName) {\n this.pendingBackendInit = null;\n this.backendName = null;\n this.backendInstance = null;\n }\n }\n getSortedBackends() {\n if (Object.keys(this.registryFactory).length === 0) {\n throw new Error('No backend found in registry.');\n }\n return Object.keys(this.registryFactory).sort((a, b) => {\n // Highest priority comes first.\n return this.registryFactory[b].priority -\n this.registryFactory[a].priority;\n });\n }\n initializeBackendsAndReturnBest() {\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const { success, asyncInit } = this.initializeBackend(backendName);\n if (asyncInit || success) {\n return { name: backendName, asyncInit };\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n moveData(backend, dataId) {\n const info = this.state.tensorInfo.get(dataId);\n const srcBackend = info.backend;\n const values = this.readSync(dataId);\n // Delete the tensor from the old backend and move it to the new\n // backend.\n srcBackend.disposeData(dataId);\n info.backend = backend;\n backend.move(dataId, values, info.shape, info.dtype);\n if (this.shouldCheckForMemLeaks()) {\n // Track the number of moves during a kernel execution to correctly\n // detect memory leaks.\n this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1]++;\n }\n }\n tidy(nameOrFn, fn) {\n let name = null;\n if (fn == null) {\n // Called with only 1 argument.\n if (typeof nameOrFn !== 'function') {\n throw new Error('Please provide a function to tidy()');\n }\n fn = nameOrFn;\n }\n else {\n // Called with 2 arguments.\n if (typeof nameOrFn !== 'string' && !(nameOrFn instanceof String)) {\n throw new Error('When calling with two arguments, the first argument ' +\n 'to tidy() must be a string');\n }\n if (typeof fn !== 'function') {\n throw new Error('When calling with two arguments, the 2nd argument ' +\n 'to tidy() must be a function');\n }\n name = nameOrFn;\n // TODO(nsthorat,smilkov): Do operation logging and performance\n // profiling.\n }\n let result;\n return this.scopedRun(() => this.startScope(name), () => this.endScope(result), () => {\n result = fn();\n if (result instanceof Promise) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n return result;\n });\n }\n scopedRun(start, end, f) {\n start();\n try {\n const res = f();\n end();\n return res;\n }\n catch (ex) {\n end();\n throw ex;\n }\n }\n nextTensorId() {\n return Engine.nextTensorId++;\n }\n nextVariableId() {\n return Engine.nextVariableId++;\n }\n /**\n * This method is called instead of the public-facing tensor.clone() when\n * saving a tensor for backwards pass. It makes sure to add the clone\n * operation to the tape regardless of being called inside a kernel\n * execution.\n *\n * This method will go away once all kernels are modularized since we won't\n * need to turn off the tape inside runKernel().\n */\n clone(x) {\n const y = this.makeTensorFromDataId(x.dataId, x.shape, x.dtype);\n const inputs = { x };\n const grad = (dy) => ({\n x: () => {\n const dtype = 'float32';\n const gradInputs = { x: dy };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast(dy, dtype), gradInputs, null /* grad */, Cast, attrs);\n }\n });\n const saved = [];\n this.addTapeNode(this.state.activeScope.name, inputs, [y], grad, saved, {});\n return y;\n }\n /**\n * Execute a kernel with the given name and return the output tensor.\n *\n * @param kernelName The name of the kernel to execute.\n * @param inputs A map of input names to tensors.\n * @param attrs A map of attribute names to their values. An attribute is a\n * primitive (non-tensor) input to the kernel.\n * @param inputsToSave A list of tensors, inputs to save for the backprop\n * computation.\n * @param outputsToSave A list of booleans, specifying which output to save\n * for the backprop computation. These are booleans since the output\n * tensors are not visible to the user.\n */\n runKernel(kernelName, inputs, attrs, inputsToSave, outputsToSave) {\n const forwardFunc = null;\n const backwardsFunc = null;\n // Call runKernel as a stop-gap until we modularize all kernels.\n // Once we modularize all kernels, we will remove the existing\n // `runKernelFunc`.\n return this.runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave);\n }\n shouldCheckForMemLeaks() {\n return this.ENV.getBool('IS_TEST');\n }\n checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos) {\n const numDataIdsAfter = this.backend.numDataIds();\n // Count the number of data ids associated with the result of the kernel.\n let numOutputDataIds = 0;\n outInfos.forEach(info => {\n // Complex numbers allocate 3 data ids, one for 'real', one for\n // 'imaginary', and one for the container that holds the former two.\n numOutputDataIds += (info.dtype === 'complex64' ? 3 : 1);\n });\n // Account for the number of moves during kernel execution. A \"data move\"\n // can happen in the middle of a kernel execution, placing a new (key,value)\n // pair in the data storage. Since data moves have net zero effect (we\n // always remove the data from the old backend), we have to cancel them out\n // when detecting memory leaks.\n const numMoves = this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1];\n const dataIdsLeaked = numDataIdsAfter - numDataIdsBefore - numOutputDataIds - numMoves;\n if (dataIdsLeaked > 0) {\n throw new Error(`Backend '${this.backendName}' has an internal memory leak ` +\n `(${dataIdsLeaked} data ids) after running '${kernelName}'`);\n }\n }\n /**\n * @deprecated Use `runKernel` for newly added kernels. Keep using this method\n * only for kernels that are not yet fully modularized.\n */\n runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave) {\n let outputs;\n let saved = [];\n const isTapeOn = this.isTapeOn();\n if (kernelName == null) {\n kernelName =\n this.state.activeScope != null ? this.state.activeScope.name : '';\n }\n const startingBytecount = this.state.numBytes;\n const startingNumTensors = this.state.numTensors;\n if (this.shouldCheckForMemLeaks()) {\n this.state.numDataMovesStack.push(0);\n }\n let kernelFunc;\n const kernel = getKernel(kernelName, this.backendName);\n let out;\n if (kernel != null) {\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = kernel.kernelFunc({ inputs, attrs, backend: this.backend });\n const outInfos = Array.isArray(out) ? out : [out];\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos);\n }\n const outTensors = outInfos.map(({ dataId, shape, dtype }) => this.makeTensorFromDataId(dataId, shape, dtype));\n // Save the inputs and outputs.\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (isTapeOn) {\n let tensorsToSave = this.getTensorsForGradient(kernelName, inputs, outTensors);\n if (tensorsToSave == null) {\n // Fallback for ops that call runKernelFunc and pass in\n // inputsToSave and outputsToSave. Currently this is the set of ops\n // with kernel support in the WASM backend. Once those ops and\n // respective gradients are modularised we can remove this path.\n if (outputsToSave == null) {\n outputsToSave = [];\n }\n const outsToSave = outTensors.filter((_, i) => outputsToSave[i]);\n tensorsToSave = (inputsToSave || []).slice().concat(outsToSave);\n }\n saved = this.saveTensorsForBackwardMode(tensorsToSave);\n }\n return outTensors;\n };\n }\n else {\n const saveFunc = (tensors) => {\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (!isTapeOn) {\n return;\n }\n saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n };\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = this.tidy(() => forwardFunc(this.backend, saveFunc));\n const outs = (Array.isArray(out) ? out : [out]);\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outs);\n }\n return outs;\n };\n }\n // Stop recording to a tape when running a kernel.\n let kernelProfile;\n this.scopedRun(() => this.state.kernelDepth++, () => this.state.kernelDepth--, () => {\n if (!this.ENV.getBool('DEBUG') && !this.state.profiling) {\n outputs = kernelFunc();\n }\n else {\n kernelProfile = this.profiler.profileKernel(kernelName, inputs, () => kernelFunc());\n if (this.ENV.getBool('DEBUG')) {\n this.profiler.logKernelProfile(kernelProfile);\n }\n outputs = kernelProfile.outputs;\n }\n });\n if (isTapeOn) {\n this.addTapeNode(kernelName, inputs, outputs, backwardsFunc, saved, attrs);\n }\n if (this.state.profiling) {\n this.state.activeProfile.kernels.push({\n name: kernelName,\n bytesAdded: this.state.numBytes - startingBytecount,\n totalBytesSnapshot: this.state.numBytes,\n tensorsAdded: this.state.numTensors - startingNumTensors,\n totalTensorsSnapshot: this.state.numTensors,\n inputShapes: Object.keys(inputs).map(key => inputs[key] != null ? inputs[key].shape : null),\n outputShapes: outputs.map(item => item.shape),\n kernelTimeMs: kernelProfile.timeMs,\n extraInfo: kernelProfile.extraInfo\n });\n }\n return (Array.isArray(out) ? outputs : outputs[0]);\n }\n /**\n * Saves tensors used in forward mode for use in backward mode.\n *\n * @param tensors the list of tensors to save.\n */\n saveTensorsForBackwardMode(tensors) {\n const saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n return saved;\n }\n /**\n * Returns a list of tensors to save for a given gradient calculation.\n *\n * Returns undefined if their is no registered gradient for this kernel in the\n * gradient registry.\n *\n * @param kernelName name of kernel to look up gradient for.\n * @param inputs a map of input tensors.\n * @param outputs an array of output tensors from forward mode of kernel.\n */\n getTensorsForGradient(kernelName, inputs, outputs) {\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n const inputsToSave = gradConfig.inputsToSave || [];\n const outputsToSave = gradConfig.outputsToSave || [];\n // If saveAllInputs is true, all inputs will be saved. Otherwise, inputs\n // specified in inputsToSave will be saved.\n let inputTensorsToSave;\n if (gradConfig.saveAllInputs) {\n util.assert(Array.isArray(inputs), () => 'saveAllInputs is true, expected inputs to be an array.');\n inputTensorsToSave = Object.keys(inputs).map((key) => inputs[key]);\n }\n else {\n inputTensorsToSave = inputsToSave.map((inputName) => inputs[inputName]);\n }\n const outputTensorsToSave = outputs.filter((_, i) => outputsToSave[i]);\n return inputTensorsToSave.concat(outputTensorsToSave);\n }\n // TODO(yassogba) throw exception here once all runkernelFunc calls with\n // inputsToSave/outputsToSave are removed\n return null;\n }\n /**\n * Internal method used by public APIs for tensor creation. Makes a new\n * tensor with the provided shape, dtype and values. It always\n * creates a new data id and writes the values to the underlying backend.\n */\n makeTensor(values, shape, dtype, backend) {\n if (values == null) {\n throw new Error('Values passed to engine.makeTensor() are null');\n }\n dtype = dtype || 'float32';\n backend = backend || this.backend;\n let backendVals = values;\n if (dtype === 'string' && util.isString(values[0])) {\n backendVals = values.map(d => util.encodeString(d));\n }\n const dataId = backend.write(backendVals, shape, dtype);\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n // Count bytes for string tensors.\n if (dtype === 'string') {\n const info = this.state.tensorInfo.get(dataId);\n const newBytes = bytesFromStringArray(backendVals);\n this.state.numBytes += newBytes - info.bytes;\n info.bytes = newBytes;\n }\n return t;\n }\n /**\n * Internal method used by backends. Makes a new tensor\n * that is a wrapper around an existing data id. It doesn't create\n * a new data id, only increments the ref count used in memory tracking.\n */\n makeTensorFromDataId(dataId, shape, dtype, backend) {\n dtype = dtype || 'float32';\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n return t;\n }\n makeVariable(initialValue, trainable = true, name, dtype) {\n name = name || this.nextVariableId().toString();\n if (dtype != null && dtype !== initialValue.dtype) {\n initialValue = initialValue.cast(dtype);\n }\n const v = new Variable(initialValue, trainable, name, this.nextTensorId());\n if (this.state.registeredVariables[v.name] != null) {\n throw new Error(`Variable with name ${v.name} was already registered`);\n }\n this.state.registeredVariables[v.name] = v;\n this.incRef(v, this.backend);\n return v;\n }\n incRef(a, backend) {\n const refCount = this.state.tensorInfo.has(a.dataId) ?\n this.state.tensorInfo.get(a.dataId).refCount :\n 0;\n this.state.numTensors++;\n if (a.dtype === 'string') {\n this.state.numStringTensors++;\n }\n if (refCount === 0) {\n this.state.numDataBuffers++;\n // Bytes for complex numbers are counted by their components. Bytes for\n // string tensors are counted when writing values.\n let bytes = 0;\n if (a.dtype !== 'complex64' && a.dtype !== 'string') {\n bytes = a.size * util.bytesPerElement(a.dtype);\n }\n this.state.tensorInfo.set(a.dataId, {\n backend: backend || this.backend,\n dtype: a.dtype,\n shape: a.shape,\n bytes,\n refCount: 0\n });\n this.state.numBytes += bytes;\n }\n this.state.tensorInfo.get(a.dataId).refCount++;\n if (!(a instanceof Variable)) {\n this.track(a);\n }\n }\n disposeTensor(a) {\n if (!this.state.tensorInfo.has(a.dataId)) {\n return;\n }\n this.state.numTensors--;\n if (a.dtype === 'string') {\n this.state.numStringTensors--;\n }\n const info = this.state.tensorInfo.get(a.dataId);\n const refCount = info.refCount;\n if (refCount <= 1) {\n // Don't count bytes for complex numbers as they are counted by their\n // components.\n if (a.dtype !== 'complex64') {\n this.state.numBytes -= info.bytes;\n }\n this.state.numDataBuffers--;\n info.backend.disposeData(a.dataId);\n this.state.tensorInfo.delete(a.dataId);\n }\n else {\n this.state.tensorInfo.get(a.dataId).refCount--;\n }\n // TODO(nsthorat): Construct an error and save the stack trace for\n // debugging when in debug mode. Creating a stack trace is too expensive\n // to do unconditionally.\n }\n disposeVariables() {\n for (const varName in this.state.registeredVariables) {\n const v = this.state.registeredVariables[varName];\n this.disposeVariable(v);\n }\n }\n disposeVariable(v) {\n this.disposeTensor(v);\n if (this.state.registeredVariables[v.name] != null) {\n delete this.state.registeredVariables[v.name];\n }\n }\n memory() {\n const info = this.backend.memory();\n info.numTensors = this.state.numTensors;\n info.numDataBuffers = this.state.numDataBuffers;\n info.numBytes = this.state.numBytes;\n if (this.state.numStringTensors > 0) {\n info.unreliable = true;\n if (info.reasons == null) {\n info.reasons = [];\n }\n info.reasons.push('Memory usage by string tensors is approximate ' +\n '(2 bytes per character)');\n }\n return info;\n }\n async profile(query) {\n this.state.profiling = true;\n const startBytes = this.state.numBytes;\n const startNumTensors = this.state.numTensors;\n this.state.activeProfile.kernels = [];\n this.state.activeProfile.result = await query();\n this.state.profiling = false;\n this.state.activeProfile.peakBytes = Math.max(...this.state.activeProfile.kernels.map(d => d.totalBytesSnapshot));\n this.state.activeProfile.newBytes = this.state.numBytes - startBytes;\n this.state.activeProfile.newTensors =\n this.state.numTensors - startNumTensors;\n for (const kernel of this.state.activeProfile.kernels) {\n kernel.kernelTimeMs = await kernel.kernelTimeMs;\n kernel.extraInfo = await kernel.extraInfo;\n }\n return this.state.activeProfile;\n }\n isTapeOn() {\n return this.state.gradientDepth > 0 && this.state.kernelDepth === 0;\n }\n addTapeNode(kernelName, inputs, outputs, gradientsFunc, saved, attrs) {\n const tapeNode = { id: this.state.nextTapeNodeId++, kernelName, inputs, outputs, saved };\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n gradientsFunc = gradConfig.gradFunc;\n }\n if (gradientsFunc != null) {\n tapeNode.gradient = (dys) => {\n // TODO(smilkov): To optimize back-prop, pass dys that are not used in\n // the backprop graph to the user as null instead of zeros\n dys = dys.map((dy, i) => {\n if (dy == null) {\n const output = outputs[i];\n const vals = util.makeZerosTypedArray(output.size, output.dtype);\n return this.makeTensor(vals, output.shape, output.dtype);\n }\n return dy;\n });\n // Grad functions of ops with single outputs expect a dy, while ops\n // with multiple outputs expect dys (array of dy).\n return gradientsFunc(dys.length > 1 ? dys : dys[0], saved, attrs);\n };\n }\n this.state.activeTape.push(tapeNode);\n }\n keep(result) {\n result.kept = true;\n return result;\n }\n startTape() {\n if (this.state.gradientDepth === 0) {\n this.state.activeTape = [];\n }\n this.state.gradientDepth++;\n }\n endTape() {\n this.state.gradientDepth--;\n }\n /**\n * Start a scope. Use this with endScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n startScope(name) {\n const scopeInfo = {\n track: [],\n name: 'unnamed scope',\n id: this.state.nextScopeId++\n };\n if (name) {\n scopeInfo.name = name;\n }\n this.state.scopeStack.push(scopeInfo);\n this.state.activeScope = scopeInfo;\n }\n /**\n * End a scope. Use this with startScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n endScope(result) {\n const tensorsToTrackInParent = getTensorsInContainer(result);\n const tensorsToTrackInParentSet = new Set(tensorsToTrackInParent.map(t => t.id));\n // Dispose the arrays tracked in this scope.\n for (let i = 0; i < this.state.activeScope.track.length; i++) {\n const tensor = this.state.activeScope.track[i];\n if (!tensor.kept && !tensorsToTrackInParentSet.has(tensor.id)) {\n tensor.dispose();\n }\n }\n const oldScope = this.state.scopeStack.pop();\n this.state.activeScope = this.state.scopeStack.length === 0 ?\n null :\n this.state.scopeStack[this.state.scopeStack.length - 1];\n // Track the current result in the parent scope.\n tensorsToTrackInParent.forEach(tensor => {\n // Only track the tensor if was allocated in the inner scope and is not\n // globally kept.\n if (!tensor.kept && tensor.scopeId === oldScope.id) {\n this.track(tensor);\n }\n });\n }\n /**\n * Returns gradients of `f` with respect to each of the `xs`. The gradients\n * returned are of the same length as `xs`, but some might be null if `f`\n * was not a function of that `x`. It also takes optional dy to multiply the\n * gradient, which defaults to `1`.\n */\n gradients(f, xs, dy, allowNoGradients = false) {\n util.assert(xs.length > 0, () => 'gradients() received an empty list of xs.');\n if (dy != null && dy.dtype !== 'float32') {\n throw new Error(`dy must have 'float32' dtype, but has '${dy.dtype}'`);\n }\n const y = this.scopedRun(() => this.startTape(), () => this.endTape(), () => this.tidy('forward', f));\n util.assert(y instanceof Tensor, () => 'The result y returned by f() must be a tensor.');\n // Filter out the nodes that don't connect x => y.\n const filteredTape = getFilteredNodesXToY(this.state.activeTape, xs, y);\n if (!allowNoGradients && filteredTape.length === 0 && xs.length > 0) {\n throw new Error('Cannot compute gradient of y=f(x) with respect to x. Make sure ' +\n 'that the f you passed encloses all operations that lead from x ' +\n 'to y.');\n }\n return this.tidy('backward', () => {\n const accumulatedGradientMap = {};\n accumulatedGradientMap[y.id] = (dy == null) ? ones(y.shape) : dy;\n // Backprop gradients through the filtered nodes.\n backpropagateGradients(accumulatedGradientMap, filteredTape, \n // Pass the tidy function to avoid circular dep with `tape.ts`.\n f => this.tidy(f), \n // Pass an add function to avoide a circular dep with `tape.ts`.\n add);\n const grads = xs.map(x => accumulatedGradientMap[x.id]);\n if (this.state.gradientDepth === 0) {\n // This means that we are not computing higher-order gradients\n // and can clean up the tape.\n this.state.activeTape.forEach(node => {\n for (const tensor of node.saved) {\n tensor.dispose();\n }\n });\n this.state.activeTape = null;\n }\n return { value: y, grads };\n });\n }\n customGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in customGrad(f) must be a function.');\n return (...inputs) => {\n util.assert(inputs.every(t => t instanceof Tensor), () => 'The args passed in customGrad(f)(x1, x2,...) must all be ' +\n 'tensors');\n let res;\n const inputMap = {};\n inputs.forEach((input, i) => {\n inputMap[i] = input;\n });\n return this.runKernelFunc((_, save) => {\n res = f(...[...inputs, save]);\n util.assert(res.value instanceof Tensor, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.value` is a tensor');\n util.assert(util.isFunction(res.gradFunc), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function.');\n return res.value;\n }, inputMap, (dy, saved) => {\n const gradRes = res.gradFunc(dy, saved);\n const grads = Array.isArray(gradRes) ? gradRes : [gradRes];\n util.assert(grads.length === inputs.length, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'the same number of tensors as inputs passed to f(...).');\n util.assert(grads.every(t => t instanceof Tensor), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'a list of only tensors.');\n const gradMap = {};\n grads.forEach((grad, i) => {\n gradMap[i] = () => grad;\n });\n return gradMap;\n });\n };\n }\n readSync(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.readSync(dataId);\n }\n read(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.read(dataId);\n }\n async time(query) {\n const start = now();\n const timingInfo = await this.backend.time(query);\n timingInfo.wallMs = now() - start;\n return timingInfo;\n }\n /**\n * Tracks a Tensor in the current scope to be automatically cleaned up\n * when the current scope ends, and returns the value.\n *\n * @param result The Tensor to track in the current scope.\n */\n track(result) {\n if (this.state.activeScope != null) {\n result.scopeId = this.state.activeScope.id;\n this.state.activeScope.track.push(result);\n }\n return result;\n }\n get registeredVariables() {\n return this.state.registeredVariables;\n }\n /**\n * Resets the engine state. Removes all backends but does not remove\n * registered backend factories.\n */\n reset() {\n // Make any pending promise obsolete.\n this.pendingBackendInitId++;\n this.state.dispose();\n this.ENV.reset();\n this.state = new EngineState();\n for (const backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n this.backendName = null;\n this.backendInstance = null;\n this.pendingBackendInit = null;\n }\n}\nEngine.nextTensorId = 0;\nEngine.nextVariableId = 0;\nfunction ones(shape) {\n const values = makeOnesTypedArray(sizeFromShape(shape), 'float32');\n return ENGINE.makeTensor(values, shape, 'float32');\n}\nexport function getOrMakeEngine() {\n const ns = getGlobalNamespace();\n if (ns._tfengine == null) {\n const environment = new Environment(ns);\n ns._tfengine = new Engine(environment);\n }\n setEnvironmentGlobal(ns._tfengine.ENV);\n // Tell the current tensor interface that the global engine is responsible\n // for tracking.\n setTensorTracker(() => ns._tfengine);\n return ns._tfengine;\n}\nexport const ENGINE = getOrMakeEngine();\n/**\n * A implementation of the add op for use within engine and tape.\n *\n * This allows us to avoid a circular dependency between add.ts and engine.\n * It is exported to be available in tape tests.\n */\nexport function add(a, b) {\n // We duplicate Add here to avoid a circular dependency with add.ts.\n const inputs = { a, b };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.add(a, b);\n save([a, b]);\n return res;\n }, inputs, null /* gradient */, Add);\n}\n//# sourceMappingURL=engine.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line:no-any\nfunction _isNavigatorDefined() {\n return typeof navigator !== 'undefined' && navigator != null;\n}\nexport function isMobile() {\n if (_isNavigatorDefined()) {\n // tslint:disable-next-line:no-any\n const a = navigator.userAgent || navigator.vendor || window.opera;\n // tslint:disable-next-line:max-line-length\n return /(android|bb\\d+|meego).+mobile|avantgo|bada\\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i\n .test(a) ||\n // tslint:disable-next-line:max-line-length\n /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\\-(n|u)|c55\\/|capi|ccwa|cdm\\-|cell|chtm|cldc|cmd\\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\\-s|devi|dica|dmob|do(c|p)o|ds(12|\\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\\-|_)|g1 u|g560|gene|gf\\-5|g\\-mo|go(\\.w|od)|gr(ad|un)|haie|hcit|hd\\-(m|p|t)|hei\\-|hi(pt|ta)|hp( i|ip)|hs\\-c|ht(c(\\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\\-(20|go|ma)|i230|iac( |\\-|\\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\\/)|klon|kpt |kwc\\-|kyo(c|k)|le(no|xi)|lg( g|\\/(k|l|u)|50|54|\\-[a-w])|libw|lynx|m1\\-w|m3ga|m50\\/|ma(te|ui|xo)|mc(01|21|ca)|m\\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\\-2|po(ck|rt|se)|prox|psio|pt\\-g|qa\\-a|qc(07|12|21|32|60|\\-[2-7]|i\\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\\-|oo|p\\-)|sdk\\/|se(c(\\-|0|1)|47|mc|nd|ri)|sgh\\-|shar|sie(\\-|m)|sk\\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\\-|v\\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\\-|tdg\\-|tel(i|m)|tim\\-|t\\-mo|to(pl|sh)|ts(70|m\\-|m3|m5)|tx\\-9|up(\\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\\-|your|zeto|zte\\-/i\n .test(a.substr(0, 4));\n }\n return false;\n}\nexport function isBrowser() {\n return (typeof window !== 'undefined' && window.document != null) ||\n //@ts-ignore\n (typeof WorkerGlobalScope !== 'undefined');\n}\n//# sourceMappingURL=device_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './engine';\nimport * as device_util from './device_util';\nimport { env } from './environment';\nconst ENV = env();\n/**\n * This file contains environment-related flag registrations.\n */\n/** Whether to enable debug mode. */\nENV.registerFlag('DEBUG', () => false, debugValue => {\n if (debugValue) {\n console.warn('Debugging mode is ON. The output of every math call will ' +\n 'be downloaded to CPU and checked for NaNs. ' +\n 'This significantly impacts performance.');\n }\n});\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_BROWSER', () => device_util.isBrowser());\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_NODE', () => (typeof process !== 'undefined') &&\n (typeof process.versions !== 'undefined') &&\n (typeof process.versions.node !== 'undefined'));\n/** Whether this browser is Chrome. */\nENV.registerFlag('IS_CHROME', () => typeof navigator !== 'undefined' && navigator != null &&\n navigator.userAgent != null && /Chrome/.test(navigator.userAgent) &&\n /Google Inc/.test(navigator.vendor));\n/**\n * True when the environment is \"production\" where we disable safety checks\n * to gain performance.\n */\nENV.registerFlag('PROD', () => false);\n/**\n * Whether to do sanity checks when inferring a shape from user-provided\n * values, used when creating a new tensor.\n */\nENV.registerFlag('TENSORLIKE_CHECK_SHAPE_CONSISTENCY', () => ENV.getBool('DEBUG'));\n/** Whether deprecation warnings are enabled. */\nENV.registerFlag('DEPRECATION_WARNINGS_ENABLED', () => true);\n/** True if running unit tests. */\nENV.registerFlag('IS_TEST', () => false);\n//# sourceMappingURL=flags.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { Tensor } from './tensor';\nimport { assert, flatten, inferDtype, isTypedArray, toTypedArray } from './util';\nexport function inferShape(val, dtype) {\n let firstElem = val;\n if (isTypedArray(val)) {\n return dtype === 'string' ? [] : [val.length];\n }\n if (!Array.isArray(val)) {\n return []; // Scalar.\n }\n const shape = [];\n while (Array.isArray(firstElem) ||\n isTypedArray(firstElem) && dtype !== 'string') {\n shape.push(firstElem.length);\n firstElem = firstElem[0];\n }\n if (Array.isArray(val) &&\n env().getBool('TENSORLIKE_CHECK_SHAPE_CONSISTENCY')) {\n deepAssertShapeConsistency(val, shape, []);\n }\n return shape;\n}\nfunction deepAssertShapeConsistency(val, shape, indices) {\n indices = indices || [];\n if (!(Array.isArray(val)) && !isTypedArray(val)) {\n assert(shape.length === 0, () => `Element arr[${indices.join('][')}] is a primitive, ` +\n `but should be an array/TypedArray of ${shape[0]} elements`);\n return;\n }\n assert(shape.length > 0, () => `Element arr[${indices.join('][')}] should be a primitive, ` +\n `but is an array of ${val.length} elements`);\n assert(val.length === shape[0], () => `Element arr[${indices.join('][')}] should have ${shape[0]} ` +\n `elements, but has ${val.length} elements`);\n const subShape = shape.slice(1);\n for (let i = 0; i < val.length; ++i) {\n deepAssertShapeConsistency(val[i], subShape, indices.concat(i));\n }\n}\nfunction assertDtype(expectedDtype, actualDType, argName, functionName) {\n if (expectedDtype == null) {\n return;\n }\n if (expectedDtype !== 'numeric' && expectedDtype !== actualDType ||\n expectedDtype === 'numeric' && actualDType === 'string') {\n throw new Error(`Argument '${argName}' passed to '${functionName}' must ` +\n `be ${expectedDtype} tensor, but got ${actualDType} tensor`);\n }\n}\nexport function convertToTensor(x, argName, functionName, parseAsDtype = 'numeric') {\n if (x instanceof Tensor) {\n assertDtype(parseAsDtype, x.dtype, argName, functionName);\n return x;\n }\n let inferredDtype = inferDtype(x);\n // If the user expects a bool/int/float, use that info to update the\n // inferredDtype when it is not a string.\n if (inferredDtype !== 'string' &&\n ['bool', 'int32', 'float32'].indexOf(parseAsDtype) >= 0) {\n inferredDtype = parseAsDtype;\n }\n assertDtype(parseAsDtype, inferredDtype, argName, functionName);\n if ((x == null) ||\n (!isTypedArray(x) && !Array.isArray(x) && typeof x !== 'number' &&\n typeof x !== 'boolean' && typeof x !== 'string')) {\n const type = x == null ? 'null' : x.constructor.name;\n throw new Error(`Argument '${argName}' passed to '${functionName}' must be a ` +\n `Tensor or TensorLike, but got '${type}'`);\n }\n const inferredShape = inferShape(x, inferredDtype);\n if (!isTypedArray(x) && !Array.isArray(x)) {\n x = [x];\n }\n const skipTypedArray = true;\n const values = inferredDtype !== 'string' ?\n toTypedArray(x, inferredDtype) :\n flatten(x, [], skipTypedArray);\n return ENGINE.makeTensor(values, inferredShape, inferredDtype);\n}\nexport function convertToTensorArray(arg, argName, functionName, parseAsDtype = 'numeric') {\n if (!Array.isArray(arg)) {\n throw new Error(`Argument ${argName} passed to ${functionName} must be a ` +\n '`Tensor[]` or `TensorLike[]`');\n }\n const tensors = arg;\n return tensors.map((t, i) => convertToTensor(t, `${argName}[${i}]`, functionName), parseAsDtype);\n}\n//# sourceMappingURL=tensor_util_env.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { isPromise } from '../util';\nexport const OP_SCOPE_SUFFIX = '__op';\n/**\n * Used for wrapping functions that perform math operations on\n * Tensors. The function will be wrapped in a named scope that cleans all\n * memory usage after the function is done.\n */\nexport function op(f) {\n const keys = Object.keys(f);\n if (keys.length !== 1) {\n throw new Error(`Please provide an object with a single key ` +\n `(operation name) mapping to a function. Got an object with ` +\n `${keys.length} keys.`);\n }\n let opName = keys[0];\n const fn = f[opName];\n // Strip the underscore from the end of the function name.\n if (opName.endsWith('_')) {\n opName = opName.substring(0, opName.length - 1);\n }\n // add an __op suffix to distinguish ops from kernels in tf.profile\n opName = opName + OP_SCOPE_SUFFIX;\n // tslint:disable-next-line:no-any\n const f2 = (...args) => {\n ENGINE.startScope(opName);\n try {\n const result = fn(...args);\n if (isPromise(result)) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n ENGINE.endScope(result);\n return result;\n }\n catch (ex) {\n ENGINE.endScope(null);\n throw ex;\n }\n };\n Object.defineProperty(f2, 'name', { value: opName, configurable: true });\n // tslint:disable-next-line:no-any\n return f2;\n}\n//# sourceMappingURL=operation.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Complex } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Converts two real numbers to a complex number.\n *\n * Given a tensor `real` representing the real part of a complex number, and a\n * tensor `imag` representing the imaginary part of a complex number, this\n * operation returns complex numbers elementwise of the form [r0, i0, r1, i1],\n * where r represents the real part and i represents the imag part.\n *\n * The input tensors real and imag must have the same shape.\n *\n * ```js\n * const real = tf.tensor1d([2.25, 3.25]);\n * const imag = tf.tensor1d([4.75, 5.75]);\n * const complex = tf.complex(real, imag);\n *\n * complex.print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction complex_(real, imag) {\n const $real = convertToTensor(real, 'real', 'complex');\n const $imag = convertToTensor(imag, 'imag', 'complex');\n util.assertShapesMatch($real.shape, $imag.shape, `real and imag shapes, ${$real.shape} and ${$imag.shape}, ` +\n `must match in call to tf.complex().`);\n const forward = (backend) => {\n return backend.complex($real, $imag);\n };\n const inputs = { real: $real, imag: $imag };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Complex);\n}\nexport const complex = op({ complex_ });\n//# sourceMappingURL=complex.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { assert, assertNonNegativeIntegerDimensions, flatten, inferDtype, isTypedArray, sizeFromShape, toTypedArray } from '../util';\n/** This is shared code across all tensor creation methods. */\nexport function makeTensor(values, shape, inferredShape, dtype) {\n if (dtype == null) {\n dtype = inferDtype(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot construct a complex64 tensor directly. ` +\n `Please use tf.complex(real, imag).`);\n }\n if (!isTypedArray(values) && !Array.isArray(values) &&\n typeof values !== 'number' && typeof values !== 'boolean' &&\n typeof values !== 'string') {\n throw new Error('values passed to tensor(values) must be a number/boolean/string or ' +\n 'an array of numbers/booleans/strings, or a TypedArray');\n }\n if (shape != null) {\n assertNonNegativeIntegerDimensions(shape);\n const providedSize = sizeFromShape(shape);\n const inferredSize = sizeFromShape(inferredShape);\n assert(providedSize === inferredSize, () => `Based on the provided shape, [${shape}], the tensor should have ` +\n `${providedSize} values but has ${inferredSize}`);\n for (let i = 0; i < inferredShape.length; ++i) {\n const inferred = inferredShape[i];\n const flatDimsDontMatch = i === inferredShape.length - 1 ?\n inferred !== sizeFromShape(shape.slice(i)) :\n true;\n assert(inferredShape[i] === shape[i] || !flatDimsDontMatch, () => `Error creating a new Tensor. Inferred shape ` +\n `(${inferredShape}) does not match the provided ` +\n `shape (${shape}). `);\n }\n }\n if (!isTypedArray(values) && !Array.isArray(values)) {\n values = [values];\n }\n shape = shape || inferredShape;\n values = dtype !== 'string' ?\n toTypedArray(values, dtype) :\n flatten(values, [], true);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=tensor_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates a `tf.Tensor` with the provided values, shape and dtype.\n *\n * ```js\n * // Pass an array of values to create a vector.\n * tf.tensor([1, 2, 3, 4]).print();\n * ```\n *\n * ```js\n * // Pass a nested array of values to make a matrix or a higher\n * // dimensional tensor.\n * tf.tensor([[1, 2], [3, 4]]).print();\n * ```\n *\n * ```js\n * // Pass a flat array and specify a shape yourself.\n * tf.tensor([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`. If the values are strings,\n * they will be encoded as utf-8 and kept as `Uint8Array[]`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor(values, shape, dtype) {\n const inferredShape = inferShape(values, dtype);\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/* Type definitions for exporting and importing of models. */\n/**\n * A map from Tensor dtype to number of bytes per element of the Tensor.\n */\nexport const DTYPE_VALUE_SIZE_MAP = {\n 'float32': 4,\n 'float16': 2,\n 'int32': 4,\n 'uint16': 2,\n 'uint8': 1,\n 'bool': 1,\n 'complex64': 8\n};\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../ops/complex';\nimport { tensor } from '../ops/tensor';\nimport { sizeFromShape } from '../util';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/** Number of bytes reserved for the length of the string. (32bit integer). */\nconst NUM_BYTES_STRING_LENGTH = 4;\n/**\n * Encode a map from names to weight values as an ArrayBuffer, along with an\n * `Array` of `WeightsManifestEntry` as specification of the encoded weights.\n *\n * This function does not perform sharding.\n *\n * This function is the reverse of `decodeWeights`.\n *\n * @param tensors A map (\"dict\") from names to tensors.\n * @param group Group to which the weights belong (optional).\n * @returns A `Promise` of\n * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s\n * concatenated.\n * - An `Array` of `WeightManifestEntry`s, carrying information including\n * tensor names, `dtype`s and shapes.\n * @throws Error: on unsupported tensor `dtype`.\n */\nexport async function encodeWeights(tensors, group) {\n // TODO(adarob, cais): Support quantization.\n const specs = [];\n const dataPromises = [];\n const names = Array.isArray(tensors) ?\n tensors.map(tensor => tensor.name) :\n Object.keys(tensors);\n for (let i = 0; i < names.length; ++i) {\n const name = names[i];\n const t = Array.isArray(tensors) ? tensors[i].tensor : tensors[name];\n if (t.dtype !== 'float32' && t.dtype !== 'int32' && t.dtype !== 'bool' &&\n t.dtype !== 'string' && t.dtype !== 'complex64') {\n throw new Error(`Unsupported dtype in weight '${name}': ${t.dtype}`);\n }\n const spec = { name, shape: t.shape, dtype: t.dtype };\n if (t.dtype === 'string') {\n const utf8bytes = new Promise(async (resolve) => {\n const vals = await t.bytes();\n const totalNumBytes = vals.reduce((p, c) => p + c.length, 0) +\n NUM_BYTES_STRING_LENGTH * vals.length;\n const bytes = new Uint8Array(totalNumBytes);\n let offset = 0;\n for (let i = 0; i < vals.length; i++) {\n const val = vals[i];\n const bytesOfLength = new Uint8Array(new Uint32Array([val.length]).buffer);\n bytes.set(bytesOfLength, offset);\n offset += NUM_BYTES_STRING_LENGTH;\n bytes.set(val, offset);\n offset += val.length;\n }\n resolve(bytes);\n });\n dataPromises.push(utf8bytes);\n }\n else {\n dataPromises.push(t.data());\n }\n if (group != null) {\n spec.group = group;\n }\n specs.push(spec);\n }\n const tensorValues = await Promise.all(dataPromises);\n return { data: concatenateTypedArrays(tensorValues), specs };\n}\n/**\n * Decode flat ArrayBuffer as weights.\n *\n * This function does not handle sharding.\n *\n * This function is the reverse of `encodeWeights`.\n *\n * @param buffer A flat ArrayBuffer carrying the binary values of the tensors\n * concatenated in the order specified in `specs`.\n * @param specs Specifications of the names, dtypes and shapes of the tensors\n * whose value are encoded by `buffer`.\n * @return A map from tensor name to tensor value, with the names corresponding\n * to names in `specs`.\n * @throws Error, if any of the tensors has unsupported dtype.\n */\nexport function decodeWeights(buffer, specs) {\n // TODO(adarob, cais): Support quantization.\n const out = {};\n let float16Decode;\n let offset = 0;\n for (const spec of specs) {\n const name = spec.name;\n const dtype = spec.dtype;\n const shape = spec.shape;\n const size = sizeFromShape(shape);\n let values;\n if ('quantization' in spec) {\n const quantization = spec.quantization;\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n if (!('min' in quantization && 'scale' in quantization)) {\n throw new Error(`Weight ${spec.name} with quantization ${quantization.dtype} ` +\n `doesn't have corresponding metadata min and scale.`);\n }\n }\n else if (quantization.dtype === 'float16') {\n if (dtype !== 'float32') {\n throw new Error(`Weight ${spec.name} is quantized with ${quantization.dtype} ` +\n `which only supports weights of type float32 not ${dtype}.`);\n }\n }\n else {\n throw new Error(`Weight ${spec.name} has unknown ` +\n `quantization dtype ${quantization.dtype}. ` +\n `Supported quantization dtypes are: ` +\n `'uint8', 'uint16', and 'float16'.`);\n }\n const quantizationSizeFactor = DTYPE_VALUE_SIZE_MAP[quantization.dtype];\n const byteBuffer = buffer.slice(offset, offset + size * quantizationSizeFactor);\n const quantizedArray = (quantization.dtype === 'uint8') ?\n new Uint8Array(byteBuffer) :\n new Uint16Array(byteBuffer);\n if (dtype === 'float32') {\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n values = new Float32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = v * quantization.scale + quantization.min;\n }\n }\n else if (quantization.dtype === 'float16') {\n if (float16Decode === undefined) {\n float16Decode = getFloat16Decoder();\n }\n values = float16Decode(quantizedArray);\n }\n else {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type float32.`);\n }\n }\n else if (dtype === 'int32') {\n if (quantization.dtype !== 'uint8' && quantization.dtype !== 'uint16') {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type int32.`);\n }\n values = new Int32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = Math.round(v * quantization.scale + quantization.min);\n }\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * quantizationSizeFactor;\n }\n else if (dtype === 'string') {\n const size = sizeFromShape(spec.shape);\n values = [];\n for (let i = 0; i < size; i++) {\n const byteLength = new Uint32Array(buffer.slice(offset, offset + NUM_BYTES_STRING_LENGTH))[0];\n offset += NUM_BYTES_STRING_LENGTH;\n const bytes = new Uint8Array(buffer.slice(offset, offset + byteLength));\n values.push(bytes);\n offset += byteLength;\n }\n }\n else {\n const dtypeFactor = DTYPE_VALUE_SIZE_MAP[dtype];\n const byteBuffer = buffer.slice(offset, offset + size * dtypeFactor);\n if (dtype === 'float32') {\n values = new Float32Array(byteBuffer);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(byteBuffer);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(byteBuffer);\n }\n else if (dtype === 'complex64') {\n values = new Float32Array(byteBuffer);\n const real = new Float32Array(values.length / 2);\n const image = new Float32Array(values.length / 2);\n for (let i = 0; i < real.length; i++) {\n real[i] = values[i * 2];\n image[i] = values[i * 2 + 1];\n }\n const realTensor = tensor(real, shape, 'float32');\n const imageTensor = tensor(image, shape, 'float32');\n out[name] = complex(realTensor, imageTensor);\n realTensor.dispose();\n imageTensor.dispose();\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * dtypeFactor;\n }\n if (dtype !== 'complex64') {\n out[name] = tensor(values, shape, dtype);\n }\n }\n return out;\n}\n/**\n * Concatenate TypedArrays into an ArrayBuffer.\n */\nexport function concatenateTypedArrays(xs) {\n // TODO(adarob, cais): Support quantization.\n if (xs === null) {\n throw new Error(`Invalid input value: ${JSON.stringify(xs)}`);\n }\n let totalByteLength = 0;\n // `normalizedXs` is here for this reason: a `TypedArray`'s `buffer'\n // can have a different byte length from that of the `TypedArray` itself,\n // for example, when the `TypedArray` is created from an offset in an\n // `ArrayBuffer`. `normliazedXs` holds `TypedArray`s whose `buffer`s match\n // the `TypedArray` in byte length. If an element of `xs` does not show\n // this property, a new `TypedArray` that satisfy this property will be\n // constructed and pushed into `normalizedXs`.\n const normalizedXs = [];\n xs.forEach((x) => {\n totalByteLength += x.byteLength;\n // tslint:disable:no-any\n normalizedXs.push(x.byteLength === x.buffer.byteLength ? x :\n new x.constructor(x));\n if (!(x instanceof Float32Array || x instanceof Int32Array ||\n x instanceof Uint8Array)) {\n throw new Error(`Unsupported TypedArray subtype: ${x.constructor.name}`);\n }\n // tslint:enable:no-any\n });\n const y = new Uint8Array(totalByteLength);\n let offset = 0;\n normalizedXs.forEach((x) => {\n y.set(new Uint8Array(x.buffer), offset);\n offset += x.byteLength;\n });\n return y.buffer;\n}\n// Use Buffer on Node.js instead of Blob/atob/btoa\nconst useNodeBuffer = typeof Buffer !== 'undefined' &&\n (typeof Blob === 'undefined' || typeof atob === 'undefined' ||\n typeof btoa === 'undefined');\n/**\n * Calculate the byte length of a JavaScript string.\n *\n * Note that a JavaScript string can contain wide characters, therefore the\n * length of the string is not necessarily equal to the byte length.\n *\n * @param str Input string.\n * @returns Byte length.\n */\nexport function stringByteLength(str) {\n if (useNodeBuffer) {\n return Buffer.byteLength(str);\n }\n return new Blob([str]).size;\n}\n/**\n * Encode an ArrayBuffer as a base64 encoded string.\n *\n * @param buffer `ArrayBuffer` to be converted.\n * @returns A string that base64-encodes `buffer`.\n */\nexport function arrayBufferToBase64String(buffer) {\n if (useNodeBuffer) {\n return Buffer.from(buffer).toString('base64');\n }\n const buf = new Uint8Array(buffer);\n let s = '';\n for (let i = 0, l = buf.length; i < l; i++) {\n s += String.fromCharCode(buf[i]);\n }\n return btoa(s);\n}\n/**\n * Decode a base64 string as an ArrayBuffer.\n *\n * @param str Base64 string.\n * @returns Decoded `ArrayBuffer`.\n */\nexport function base64StringToArrayBuffer(str) {\n if (useNodeBuffer) {\n const buf = Buffer.from(str, 'base64');\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n }\n const s = atob(str);\n const buffer = new Uint8Array(s.length);\n for (let i = 0; i < s.length; ++i) {\n buffer.set([s.charCodeAt(i)], i);\n }\n return buffer.buffer;\n}\n/**\n * Concatenate a number of ArrayBuffers into one.\n *\n * @param buffers A number of array buffers to concatenate.\n * @returns Result of concatenating `buffers` in order.\n */\nexport function concatenateArrayBuffers(buffers) {\n if (buffers.length === 1) {\n return buffers[0];\n }\n let totalByteLength = 0;\n buffers.forEach((buffer) => {\n totalByteLength += buffer.byteLength;\n });\n const temp = new Uint8Array(totalByteLength);\n let offset = 0;\n buffers.forEach((buffer) => {\n temp.set(new Uint8Array(buffer), offset);\n offset += buffer.byteLength;\n });\n return temp.buffer;\n}\n/**\n * Get the basename of a path.\n *\n * Behaves in a way analogous to Linux's basename command.\n *\n * @param path\n */\nexport function basename(path) {\n const SEPARATOR = '/';\n path = path.trim();\n while (path.endsWith(SEPARATOR)) {\n path = path.slice(0, path.length - 1);\n }\n const items = path.split(SEPARATOR);\n return items[items.length - 1];\n}\n/**\n * Populate ModelArtifactsInfo fields for a model with JSON topology.\n * @param modelArtifacts\n * @returns A ModelArtifactsInfo object.\n */\nexport function getModelArtifactsInfoForJSON(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('Expected JSON model topology, received ArrayBuffer.');\n }\n return {\n dateSaved: new Date(),\n modelTopologyType: 'JSON',\n modelTopologyBytes: modelArtifacts.modelTopology == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.modelTopology)),\n weightSpecsBytes: modelArtifacts.weightSpecs == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.weightSpecs)),\n weightDataBytes: modelArtifacts.weightData == null ?\n 0 :\n modelArtifacts.weightData.byteLength,\n };\n}\n/**\n * Computes mantisa table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 2048 mantissa lookup values.\n */\nfunction computeFloat16MantisaTable() {\n const convertMantissa = (i) => {\n let m = i << 13;\n let e = 0;\n while ((m & 0x00800000) === 0) {\n e -= 0x00800000;\n m <<= 1;\n }\n m &= ~0x00800000;\n e += 0x38800000;\n return m | e;\n };\n const mantisaTable = new Uint32Array(2048);\n mantisaTable[0] = 0;\n for (let i = 1; i < 1024; i++) {\n mantisaTable[i] = convertMantissa(i);\n }\n for (let i = 1024; i < 2048; i++) {\n mantisaTable[i] = 0x38000000 + ((i - 1024) << 13);\n }\n return mantisaTable;\n}\n/**\n * Computes exponent table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 64 exponent lookup values.\n */\nfunction computeFloat16ExponentTable() {\n const exponentTable = new Uint32Array(64);\n exponentTable[0] = 0;\n exponentTable[31] = 0x47800000;\n exponentTable[32] = 0x80000000;\n exponentTable[63] = 0xc7800000;\n for (let i = 1; i < 31; i++) {\n exponentTable[i] = i << 23;\n }\n for (let i = 33; i < 63; i++) {\n exponentTable[i] = 0x80000000 + ((i - 32) << 23);\n }\n return exponentTable;\n}\n/**\n * Computes offset table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 6d offset values.\n */\nfunction computeFloat16OffsetTable() {\n const offsetTable = new Uint32Array(64);\n for (let i = 0; i < 64; i++) {\n offsetTable[i] = 1024;\n }\n offsetTable[0] = offsetTable[32] = 0;\n return offsetTable;\n}\n/**\n * Retrieve a Float16 decoder which will decode a ByteArray of Float16 values\n * to a Float32Array.\n *\n * @returns Function (buffer: Uint16Array) => Float32Array which decodes\n * the Uint16Array of Float16 bytes to a Float32Array.\n */\nexport function getFloat16Decoder() {\n // Algorithm is based off of\n // http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n // Cache lookup tables\n const mantisaTable = computeFloat16MantisaTable();\n const exponentTable = computeFloat16ExponentTable();\n const offsetTable = computeFloat16OffsetTable();\n return (quantizedArray) => {\n const buffer = new ArrayBuffer(4 * quantizedArray.length);\n const bufferUint32View = new Uint32Array(buffer);\n for (let index = 0; index < quantizedArray.length; index++) {\n const float16Bits = quantizedArray[index];\n const float32Bits = mantisaTable[offsetTable[float16Bits >> 10] + (float16Bits & 0x3ff)] +\n exponentTable[float16Bits >> 10];\n bufferUint32View[index] = float32Bits;\n }\n return new Float32Array(buffer);\n };\n}\n//# sourceMappingURL=io_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class IORouterRegistry {\n constructor() {\n this.saveRouters = [];\n this.loadRouters = [];\n }\n static getInstance() {\n if (IORouterRegistry.instance == null) {\n IORouterRegistry.instance = new IORouterRegistry();\n }\n return IORouterRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerSaveRouter(saveRouter) {\n IORouterRegistry.getInstance().saveRouters.push(saveRouter);\n }\n /**\n * Register a load-handler router.\n *\n * @param loadRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `load` method defined or `null`.\n */\n static registerLoadRouter(loadRouter) {\n IORouterRegistry.getInstance().loadRouters.push(loadRouter);\n }\n /**\n * Look up IOHandler for saving, given a URL-like string.\n *\n * @param url\n * @returns If only one match is found, an instance of IOHandler with the\n * `save` method defined. If no match is found, `null`.\n * @throws Error, if more than one match is found.\n */\n static getSaveHandlers(url) {\n return IORouterRegistry.getHandlers(url, 'save');\n }\n /**\n * Look up IOHandler for loading, given a URL-like string.\n *\n * @param url\n * @param loadOptions Optional, custom load options.\n * @returns All valid handlers for `url`, given the currently registered\n * handler routers.\n */\n static getLoadHandlers(url, loadOptions) {\n return IORouterRegistry.getHandlers(url, 'load', loadOptions);\n }\n static getHandlers(url, handlerType, loadOptions) {\n const validHandlers = [];\n const routers = handlerType === 'load' ?\n IORouterRegistry.getInstance().loadRouters :\n IORouterRegistry.getInstance().saveRouters;\n routers.forEach(router => {\n const handler = router(url, loadOptions);\n if (handler !== null) {\n validHandlers.push(handler);\n }\n });\n return validHandlers;\n }\n}\nexport const registerSaveRouter = (loudRouter) => IORouterRegistry.registerSaveRouter(loudRouter);\nexport const registerLoadRouter = (loudRouter) => IORouterRegistry.registerLoadRouter(loudRouter);\nexport const getSaveHandlers = (url) => IORouterRegistry.getSaveHandlers(url);\nexport const getLoadHandlers = (url, loadOptions) => IORouterRegistry.getLoadHandlers(url, loadOptions);\n//# sourceMappingURL=router_registry.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DATABASE_NAME = 'tensorflowjs';\nconst DATABASE_VERSION = 1;\n// Model data and ModelArtifactsInfo (metadata) are stored in two separate\n// stores for efficient access of the list of stored models and their metadata.\n// 1. The object store for model data: topology, weights and weight manifests.\nconst MODEL_STORE_NAME = 'models_store';\n// 2. The object store for ModelArtifactsInfo, including meta-information such\n// as the type of topology (JSON vs binary), byte size of the topology, byte\n// size of the weights, etc.\nconst INFO_STORE_NAME = 'model_info_store';\n/**\n * Delete the entire database for tensorflow.js, including the models store.\n */\nexport async function deleteDatabase() {\n const idbFactory = getIndexedDBFactory();\n return new Promise((resolve, reject) => {\n const deleteRequest = idbFactory.deleteDatabase(DATABASE_NAME);\n deleteRequest.onsuccess = () => resolve();\n deleteRequest.onerror = error => reject(error);\n });\n}\nfunction getIndexedDBFactory() {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Add more info about what IOHandler subtypes are available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('Failed to obtain IndexedDB factory because the current environment' +\n 'is not a web browser.');\n }\n // tslint:disable-next-line:no-any\n const theWindow = typeof window === 'undefined' ? self : window;\n const factory = theWindow.indexedDB || theWindow.mozIndexedDB ||\n theWindow.webkitIndexedDB || theWindow.msIndexedDB ||\n theWindow.shimIndexedDB;\n if (factory == null) {\n throw new Error('The current browser does not appear to support IndexedDB.');\n }\n return factory;\n}\nfunction setUpDatabase(openRequest) {\n const db = openRequest.result;\n db.createObjectStore(MODEL_STORE_NAME, { keyPath: 'modelPath' });\n db.createObjectStore(INFO_STORE_NAME, { keyPath: 'modelPath' });\n}\n/**\n * IOHandler subclass: Browser IndexedDB.\n *\n * See the doc string of `browserIndexedDB` for more details.\n */\nexport class BrowserIndexedDB {\n constructor(modelPath) {\n this.indexedDB = getIndexedDBFactory();\n if (modelPath == null || !modelPath) {\n throw new Error('For IndexedDB, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n }\n async save(modelArtifacts) {\n // TODO(cais): Support saving GraphDef models.\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n return this.databaseAction(this.modelPath, modelArtifacts);\n }\n async load() {\n return this.databaseAction(this.modelPath);\n }\n /**\n * Perform database action to put model artifacts into or read model artifacts\n * from IndexedDB object store.\n *\n * Whether the action is put or get depends on whether `modelArtifacts` is\n * specified. If it is specified, the action will be put; otherwise the action\n * will be get.\n *\n * @param modelPath A unique string path for the model.\n * @param modelArtifacts If specified, it will be the model artifacts to be\n * stored in IndexedDB.\n * @returns A `Promise` of `SaveResult`, if the action is put, or a `Promise`\n * of `ModelArtifacts`, if the action is get.\n */\n databaseAction(modelPath, modelArtifacts) {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n if (modelArtifacts == null) {\n // Read model out from object store.\n const modelTx = db.transaction(MODEL_STORE_NAME, 'readonly');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const getRequest = modelStore.get(this.modelPath);\n getRequest.onsuccess = () => {\n if (getRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${this.modelPath}' ` +\n `in IndexedDB.`));\n }\n else {\n resolve(getRequest.result.modelArtifacts);\n }\n };\n getRequest.onerror = error => {\n db.close();\n return reject(getRequest.error);\n };\n modelTx.oncomplete = () => db.close();\n }\n else {\n // Put model into object store.\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n // First, put ModelArtifactsInfo into info store.\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n let infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const putInfoRequest = infoStore.put({ modelPath: this.modelPath, modelArtifactsInfo });\n let modelTx;\n putInfoRequest.onsuccess = () => {\n // Second, put model data into model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const putModelRequest = modelStore.put({\n modelPath: this.modelPath,\n modelArtifacts,\n modelArtifactsInfo\n });\n putModelRequest.onsuccess = () => resolve({ modelArtifactsInfo });\n putModelRequest.onerror = error => {\n // If the put-model request fails, roll back the info entry as\n // well.\n infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const deleteInfoRequest = infoStore.delete(this.modelPath);\n deleteInfoRequest.onsuccess = () => {\n db.close();\n return reject(putModelRequest.error);\n };\n deleteInfoRequest.onerror = error => {\n db.close();\n return reject(putModelRequest.error);\n };\n };\n };\n putInfoRequest.onerror = error => {\n db.close();\n return reject(putInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n }\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\nBrowserIndexedDB.URL_SCHEME = 'indexeddb://';\nexport const indexedDBRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserIndexedDB.URL_SCHEME)) {\n return browserIndexedDB(url.slice(BrowserIndexedDB.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(indexedDBRouter);\nIORouterRegistry.registerLoadRouter(indexedDBRouter);\n/**\n * Creates a browser IndexedDB IOHandler for saving and loading models.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save('indexeddb://MyModel'));\n * console.log(saveResult);\n * ```\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `BrowserIndexedDB` (sublcass of `IOHandler`),\n * which can be used with, e.g., `tf.Model.save`.\n */\nexport function browserIndexedDB(modelPath) {\n return new BrowserIndexedDB(modelPath);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserIndexedDB.URL_SCHEME) ?\n key.slice(BrowserIndexedDB.URL_SCHEME.length) :\n key;\n}\nexport class BrowserIndexedDBManager {\n constructor() {\n this.indexedDB = getIndexedDBFactory();\n }\n async listModels() {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const tx = db.transaction(INFO_STORE_NAME, 'readonly');\n const store = tx.objectStore(INFO_STORE_NAME);\n // tslint:disable:max-line-length\n // Need to cast `store` as `any` here because TypeScript's DOM\n // library does not have the `getAll()` method even though the\n // method is supported in the latest version of most mainstream\n // browsers:\n // https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/getAll\n // tslint:enable:max-line-length\n // tslint:disable-next-line:no-any\n const getAllInfoRequest = store.getAll();\n getAllInfoRequest.onsuccess = () => {\n const out = {};\n for (const item of getAllInfoRequest.result) {\n out[item.modelPath] = item.modelArtifactsInfo;\n }\n resolve(out);\n };\n getAllInfoRequest.onerror = error => {\n db.close();\n return reject(getAllInfoRequest.error);\n };\n tx.oncomplete = () => db.close();\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n const infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const getInfoRequest = infoStore.get(path);\n let modelTx;\n getInfoRequest.onsuccess = () => {\n if (getInfoRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${path}' ` +\n `in IndexedDB.`));\n }\n else {\n // First, delete the entry in the info store.\n const deleteInfoRequest = infoStore.delete(path);\n const deleteModelData = () => {\n // Second, delete the entry in the model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const deleteModelRequest = modelStore.delete(path);\n deleteModelRequest.onsuccess = () => resolve(getInfoRequest.result.modelArtifactsInfo);\n deleteModelRequest.onerror = error => reject(getInfoRequest.error);\n };\n // Proceed with deleting model data regardless of whether deletion\n // of info data succeeds or not.\n deleteInfoRequest.onsuccess = deleteModelData;\n deleteInfoRequest.onerror = error => {\n deleteModelData();\n db.close();\n return reject(getInfoRequest.error);\n };\n }\n };\n getInfoRequest.onerror = error => {\n db.close();\n return reject(getInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\n//# sourceMappingURL=indexed_db.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { arrayBufferToBase64String, base64StringToArrayBuffer, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst PATH_SEPARATOR = '/';\nconst PATH_PREFIX = 'tensorflowjs_models';\nconst INFO_SUFFIX = 'info';\nconst MODEL_TOPOLOGY_SUFFIX = 'model_topology';\nconst WEIGHT_SPECS_SUFFIX = 'weight_specs';\nconst WEIGHT_DATA_SUFFIX = 'weight_data';\nconst MODEL_METADATA_SUFFIX = 'model_metadata';\n/**\n * Purge all tensorflow.js-saved model artifacts from local storage.\n *\n * @returns Paths of the models purged.\n */\nexport function purgeLocalStorageArtifacts() {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n throw new Error('purgeLocalStorageModels() cannot proceed because local storage is ' +\n 'unavailable in the current environment.');\n }\n const LS = window.localStorage;\n const purgedModelPaths = [];\n for (let i = 0; i < LS.length; ++i) {\n const key = LS.key(i);\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n if (key.startsWith(prefix) && key.length > prefix.length) {\n LS.removeItem(key);\n const modelName = getModelPathFromKey(key);\n if (purgedModelPaths.indexOf(modelName) === -1) {\n purgedModelPaths.push(modelName);\n }\n }\n }\n return purgedModelPaths;\n}\nfunction getModelKeys(path) {\n return {\n info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR),\n topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR),\n weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR),\n weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR),\n modelMetadata: [PATH_PREFIX, path, MODEL_METADATA_SUFFIX].join(PATH_SEPARATOR)\n };\n}\n/**\n * Get model path from a local-storage key.\n *\n * E.g., 'tensorflowjs_models/my/model/1/info' --> 'my/model/1'\n *\n * @param key\n */\nfunction getModelPathFromKey(key) {\n const items = key.split(PATH_SEPARATOR);\n if (items.length < 3) {\n throw new Error(`Invalid key format: ${key}`);\n }\n return items.slice(1, items.length - 1).join(PATH_SEPARATOR);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserLocalStorage.URL_SCHEME) ?\n key.slice(BrowserLocalStorage.URL_SCHEME.length) :\n key;\n}\n/**\n * IOHandler subclass: Browser Local Storage.\n *\n * See the doc string to `browserLocalStorage` for more details.\n */\nexport class BrowserLocalStorage {\n constructor(modelPath) {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n // TODO(cais): Add more info about what IOHandler subtypes are\n // available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('The current environment does not support local storage.');\n }\n this.LS = window.localStorage;\n if (modelPath == null || !modelPath) {\n throw new Error('For local storage, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n this.keys = getModelKeys(this.modelPath);\n }\n /**\n * Save model artifacts to browser local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @param modelArtifacts The model artifacts to be stored.\n * @returns An instance of SaveResult.\n */\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const topology = JSON.stringify(modelArtifacts.modelTopology);\n const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs);\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n try {\n this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo));\n this.LS.setItem(this.keys.topology, topology);\n this.LS.setItem(this.keys.weightSpecs, weightSpecs);\n this.LS.setItem(this.keys.weightData, arrayBufferToBase64String(modelArtifacts.weightData));\n this.LS.setItem(this.keys.modelMetadata, JSON.stringify({\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata\n }));\n return { modelArtifactsInfo };\n }\n catch (err) {\n // If saving failed, clean up all items saved so far.\n this.LS.removeItem(this.keys.info);\n this.LS.removeItem(this.keys.topology);\n this.LS.removeItem(this.keys.weightSpecs);\n this.LS.removeItem(this.keys.weightData);\n this.LS.removeItem(this.keys.modelMetadata);\n throw new Error(`Failed to save model '${this.modelPath}' to local storage: ` +\n `size quota being exceeded is a possible cause of this failure: ` +\n `modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, ` +\n `weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, ` +\n `weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`);\n }\n }\n }\n /**\n * Load a model from local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @returns The loaded model (if loading succeeds).\n */\n async load() {\n const info = JSON.parse(this.LS.getItem(this.keys.info));\n if (info == null) {\n throw new Error(`In local storage, there is no model with name '${this.modelPath}'`);\n }\n if (info.modelTopologyType !== 'JSON') {\n throw new Error('BrowserLocalStorage does not support loading non-JSON model ' +\n 'topology yet.');\n }\n const out = {};\n // Load topology.\n const topology = JSON.parse(this.LS.getItem(this.keys.topology));\n if (topology == null) {\n throw new Error(`In local storage, the topology of model '${this.modelPath}' ` +\n `is missing.`);\n }\n out.modelTopology = topology;\n // Load weight specs.\n const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs));\n if (weightSpecs == null) {\n throw new Error(`In local storage, the weight specs of model '${this.modelPath}' ` +\n `are missing.`);\n }\n out.weightSpecs = weightSpecs;\n // Load meta-data fields.\n const metadataString = this.LS.getItem(this.keys.modelMetadata);\n if (metadataString != null) {\n const metadata = JSON.parse(metadataString);\n out.format = metadata['format'];\n out.generatedBy = metadata['generatedBy'];\n out.convertedBy = metadata['convertedBy'];\n out.userDefinedMetadata = metadata['userDefinedMetadata'];\n }\n // Load weight data.\n const weightDataBase64 = this.LS.getItem(this.keys.weightData);\n if (weightDataBase64 == null) {\n throw new Error(`In local storage, the binary weight values of model ` +\n `'${this.modelPath}' are missing.`);\n }\n out.weightData = base64StringToArrayBuffer(weightDataBase64);\n return out;\n }\n}\nBrowserLocalStorage.URL_SCHEME = 'localstorage://';\nexport const localStorageRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserLocalStorage.URL_SCHEME)) {\n return browserLocalStorage(url.slice(BrowserLocalStorage.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(localStorageRouter);\nIORouterRegistry.registerLoadRouter(localStorageRouter);\n/**\n * Factory function for local storage IOHandler.\n *\n * This `IOHandler` supports both `save` and `load`.\n *\n * For each model's saved artifacts, four items are saved to local storage.\n * - `${PATH_SEPARATOR}/${modelPath}/info`: Contains meta-info about the\n * model, such as date saved, type of the topology, size in bytes, etc.\n * - `${PATH_SEPARATOR}/${modelPath}/topology`: Model topology. For Keras-\n * style models, this is a stringized JSON.\n * - `${PATH_SEPARATOR}/${modelPath}/weight_specs`: Weight specs of the\n * model, can be used to decode the saved binary weight values (see\n * item below).\n * - `${PATH_SEPARATOR}/${modelPath}/weight_data`: Concatenated binary\n * weight values, stored as a base64-encoded string.\n *\n * Saving may throw an `Error` if the total size of the artifacts exceed the\n * browser-specific quota.\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `IOHandler`, which can be used with, e.g.,\n * `tf.Model.save`.\n */\nexport function browserLocalStorage(modelPath) {\n return new BrowserLocalStorage(modelPath);\n}\nexport class BrowserLocalStorageManager {\n constructor() {\n assert(env().getBool('IS_BROWSER'), () => 'Current environment is not a web browser');\n assert(typeof window === 'undefined' ||\n typeof window.localStorage !== 'undefined', () => 'Current browser does not appear to support localStorage');\n this.LS = window.localStorage;\n }\n async listModels() {\n const out = {};\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n const suffix = PATH_SEPARATOR + INFO_SUFFIX;\n for (let i = 0; i < this.LS.length; ++i) {\n const key = this.LS.key(i);\n if (key.startsWith(prefix) && key.endsWith(suffix)) {\n const modelPath = getModelPathFromKey(key);\n out[modelPath] = JSON.parse(this.LS.getItem(key));\n }\n }\n return out;\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n const keys = getModelKeys(path);\n if (this.LS.getItem(keys.info) == null) {\n throw new Error(`Cannot find model at path '${path}'`);\n }\n const info = JSON.parse(this.LS.getItem(keys.info));\n this.LS.removeItem(keys.info);\n this.LS.removeItem(keys.topology);\n this.LS.removeItem(keys.weightSpecs);\n this.LS.removeItem(keys.weightData);\n return info;\n }\n}\n//# sourceMappingURL=local_storage.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Classes and functions for model management across multiple storage mediums.\n *\n * Supported client actions:\n * - Listing models on all registered storage mediums.\n * - Remove model by URL from any registered storage mediums, by using URL\n * string.\n * - Moving or copying model from one path to another in the same medium or from\n * one medium to another, by using URL strings.\n */\nimport { assert } from '../util';\nimport { IORouterRegistry } from './router_registry';\nconst URL_SCHEME_SUFFIX = '://';\nexport class ModelStoreManagerRegistry {\n constructor() {\n this.managers = {};\n }\n static getInstance() {\n if (ModelStoreManagerRegistry.instance == null) {\n ModelStoreManagerRegistry.instance = new ModelStoreManagerRegistry();\n }\n return ModelStoreManagerRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerManager(scheme, manager) {\n assert(scheme != null, () => 'scheme must not be undefined or null.');\n if (scheme.endsWith(URL_SCHEME_SUFFIX)) {\n scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX));\n }\n assert(scheme.length > 0, () => 'scheme must not be an empty string.');\n const registry = ModelStoreManagerRegistry.getInstance();\n assert(registry.managers[scheme] == null, () => `A model store manager is already registered for scheme '${scheme}'.`);\n registry.managers[scheme] = manager;\n }\n static getManager(scheme) {\n const manager = this.getInstance().managers[scheme];\n if (manager == null) {\n throw new Error(`Cannot find model manager for scheme '${scheme}'`);\n }\n return manager;\n }\n static getSchemes() {\n return Object.keys(this.getInstance().managers);\n }\n}\n/**\n * Helper method for parsing a URL string into a scheme and a path.\n *\n * @param url E.g., 'localstorage://my-model'\n * @returns A dictionary with two fields: scheme and path.\n * Scheme: e.g., 'localstorage' in the example above.\n * Path: e.g., 'my-model' in the example above.\n */\nfunction parseURL(url) {\n if (url.indexOf(URL_SCHEME_SUFFIX) === -1) {\n throw new Error(`The url string provided does not contain a scheme. ` +\n `Supported schemes are: ` +\n `${ModelStoreManagerRegistry.getSchemes().join(',')}`);\n }\n return {\n scheme: url.split(URL_SCHEME_SUFFIX)[0],\n path: url.split(URL_SCHEME_SUFFIX)[1],\n };\n}\nasync function cloneModelInternal(sourceURL, destURL, deleteSource = false) {\n assert(sourceURL !== destURL, () => `Old path and new path are the same: '${sourceURL}'`);\n const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL);\n assert(loadHandlers.length > 0, () => `Copying failed because no load handler is found for source URL ${sourceURL}.`);\n assert(loadHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `load handlers for source URL ${sourceURL}.`);\n const loadHandler = loadHandlers[0];\n const saveHandlers = IORouterRegistry.getSaveHandlers(destURL);\n assert(saveHandlers.length > 0, () => `Copying failed because no save handler is found for destination ` +\n `URL ${destURL}.`);\n assert(saveHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `save handlers for destination URL ${destURL}.`);\n const saveHandler = saveHandlers[0];\n const sourceScheme = parseURL(sourceURL).scheme;\n const sourcePath = parseURL(sourceURL).path;\n const sameMedium = sourceScheme === parseURL(sourceURL).scheme;\n const modelArtifacts = await loadHandler.load();\n // If moving within the same storage medium, remove the old model as soon as\n // the loading is done. Without doing this, it is possible that the combined\n // size of the two models will cause the cloning to fail.\n if (deleteSource && sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n const saveResult = await saveHandler.save(modelArtifacts);\n // If moving between mediums, the deletion is done after the save succeeds.\n // This guards against the case in which saving to the destination medium\n // fails.\n if (deleteSource && !sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n return saveResult.modelArtifactsInfo;\n}\n/**\n * List all models stored in registered storage mediums.\n *\n * For a web browser environment, the registered mediums are Local Storage and\n * IndexedDB.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @returns A `Promise` of a dictionary mapping URLs of existing models to\n * their model artifacts info. URLs include medium-specific schemes, e.g.,\n * 'indexeddb://my/model/1'. Model artifacts info include type of the\n * model's topology, byte sizes of the topology, weights, etc.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function listModels() {\n const schemes = ModelStoreManagerRegistry.getSchemes();\n const out = {};\n for (const scheme of schemes) {\n const schemeOut = await ModelStoreManagerRegistry.getManager(scheme).listModels();\n for (const path in schemeOut) {\n const url = scheme + URL_SCHEME_SUFFIX + path;\n out[url] = schemeOut[path];\n }\n }\n return out;\n}\n/**\n * Remove a model specified by URL from a reigstered storage medium.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @param url A URL to a stored model, with a scheme prefix, e.g.,\n * 'localstorage://my-model-1', 'indexeddb://my/model/2'.\n * @returns ModelArtifactsInfo of the deleted model (if and only if deletion\n * is successful).\n * @throws Error if deletion fails, e.g., if no model exists at `path`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function removeModel(url) {\n const schemeAndPath = parseURL(url);\n const manager = ModelStoreManagerRegistry.getManager(schemeAndPath.scheme);\n return manager.removeModel(schemeAndPath.path);\n}\n/**\n * Copy a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Copying within a storage medium, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Copying between two storage mediums, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Copy the model, from Local Storage to IndexedDB.\n * await tf.io.copyModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove both models.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of copying.\n * @param destURL Destination URL of copying.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function copyModel(sourceURL, destURL) {\n const deleteSource = false;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\n/**\n * Move a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Moving within a storage medium, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Moving between two storage mediums, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Move the model, from Local Storage to IndexedDB.\n * await tf.io.moveModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove the moved model.\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of moving.\n * @param destURL Destination URL of moving.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function moveModel(sourceURL, destURL) {\n const deleteSource = true;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\nexport { moveModel, copyModel, removeModel, listModels };\n//# sourceMappingURL=model_management.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { BrowserIndexedDB, BrowserIndexedDBManager } from '../io/indexed_db';\nimport { BrowserLocalStorage, BrowserLocalStorageManager } from '../io/local_storage';\nimport { ModelStoreManagerRegistry } from '../io/model_management';\nexport class PlatformBrowser {\n fetch(path, init) {\n return fetch(path, init);\n }\n now() {\n return performance.now();\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);\n }\n if (this.textEncoder == null) {\n this.textEncoder = new TextEncoder();\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n return new TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_BROWSER')) {\n env().setPlatform('browser', new PlatformBrowser());\n // Register LocalStorage IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserLocalStorage.URL_SCHEME, new BrowserLocalStorageManager());\n }\n catch (err) {\n }\n // Register IndexedDB IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager());\n }\n catch (err) {\n }\n}\n//# sourceMappingURL=platform_browser.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\n// We are wrapping this within an object so it can be stubbed by Jasmine.\nexport const getNodeFetch = {\n // tslint:disable-next-line:no-require-imports\n importFetch: () => require('node-fetch')\n};\nlet systemFetch;\n// These getters and setters are for testing so we don't export a mutable\n// variable.\nexport function resetSystemFetch() {\n systemFetch = null;\n}\nexport function setSystemFetch(fetchFn) {\n systemFetch = fetchFn;\n}\nexport function getSystemFetch() {\n return systemFetch;\n}\nexport class PlatformNode {\n constructor() {\n // tslint:disable-next-line:no-require-imports\n this.util = require('util');\n // According to the spec, the built-in encoder can do only UTF-8 encoding.\n // https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder/TextEncoder\n this.textEncoder = new this.util.TextEncoder();\n }\n fetch(path, requestInits) {\n if (env().global.fetch != null) {\n return env().global.fetch(path, requestInits);\n }\n if (systemFetch == null) {\n systemFetch = getNodeFetch.importFetch();\n }\n return systemFetch(path, requestInits);\n }\n now() {\n const time = process.hrtime();\n return time[0] * 1000 + time[1] / 1000000;\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Node built-in encoder only supports utf-8, but got ${encoding}`);\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n if (bytes.length === 0) {\n return '';\n }\n return new this.util.TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_NODE')) {\n env().setPlatform('node', new PlatformNode());\n}\n//# sourceMappingURL=platform_node.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport * as util from '../util';\n/**\n * Creates an empty `tf.TensorBuffer` with the specified `shape` and `dtype`.\n *\n * The values are stored in CPU as `TypedArray`. Fill the buffer using\n * `buffer.set()`, or by modifying directly `buffer.values`.\n *\n * When done, call `buffer.toTensor()` to get an immutable `tf.Tensor` with\n * those values.\n *\n * ```js\n * // Create a buffer and set values at particular indices.\n * const buffer = tf.buffer([2, 2]);\n * buffer.set(3, 0, 0);\n * buffer.set(5, 1, 0);\n *\n * // Convert the buffer back to a tensor.\n * buffer.toTensor().print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The dtype of the buffer. Defaults to 'float32'.\n * @param values The values of the buffer as `TypedArray`. Defaults to\n * zeros.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function buffer(shape, dtype = 'float32', values) {\n dtype = dtype || 'float32';\n util.assertNonNegativeIntegerDimensions(shape);\n return new TensorBuffer(shape, dtype, values);\n}\n//# sourceMappingURL=buffer.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cast } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Casts a `tf.Tensor` to a new dtype.\n *\n * ```js\n * const x = tf.tensor1d([1.5, 2.5, 3]);\n * tf.cast(x, 'int32').print();\n * ```\n * @param x The input tensor to be casted.\n * @param dtype The dtype to cast the input tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction cast_(x, dtype) {\n const $x = convertToTensor(x, 'x', 'cast');\n // Sanity checks.\n if (!util.isValidDtype(dtype)) {\n throw new Error(`Failed to cast to unknown dtype ${dtype}`);\n }\n if (dtype === 'string' && $x.dtype !== 'string' ||\n dtype !== 'string' && $x.dtype === 'string') {\n throw new Error('Only strings can be casted to strings');\n }\n const inputs = { x: $x };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast($x, dtype), inputs, null /* grad */, Cast, attrs);\n}\nexport const cast = op({ cast_ });\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Identity } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a new tensor with the same values and shape as the specified\n * tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n *\n * x.clone().print();\n * ```\n *\n * @param x The tensor to clone.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction clone_(x) {\n const $x = convertToTensor(x, 'x', 'clone', null);\n const forward = () => ENGINE.makeTensorFromDataId($x.dataId, $x.shape, $x.dtype);\n const inputs = { x: $x };\n // Note this op is called tf.identity in python. Hence the kernel name used\n // here.\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Identity);\n}\nexport const clone = op({ clone_ });\n//# sourceMappingURL=clone.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Prints information about the `tf.Tensor` including its data.\n *\n * ```js\n * const verbose = true;\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print(verbose);\n * ```\n * @param x The tensor to be printed.\n * @param verbose Whether to print verbose information about the ` Tensor`,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function print(x, verbose = false) {\n console.log(x.toString(verbose));\n}\n//# sourceMappingURL=print.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code for tfjs-core\n// Set up Engine and ENV\nimport { getOrMakeEngine } from './engine';\ngetOrMakeEngine();\n// Register backend-agnostic flags.\nimport './flags';\n// Register platforms\nimport './platforms/platform_browser';\nimport './platforms/platform_node';\n// Set up OpHandler\nimport { buffer } from './ops/buffer';\nimport { cast } from './ops/cast';\nimport { clone } from './ops/clone';\nimport { print } from './ops/print';\nimport { setOpHandler } from './tensor';\nconst opHandler = {\n buffer,\n cast,\n clone,\n print\n};\nsetOpHandler(opHandler);\n//# sourceMappingURL=base_side_effects.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandlers related to files, such as browser-triggered file downloads,\n * user-selected files in browser.\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { basename, concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DEFAULT_FILE_NAME_PREFIX = 'model';\nconst DEFAULT_JSON_EXTENSION_NAME = '.json';\nconst DEFAULT_WEIGHT_DATA_EXTENSION_NAME = '.weights.bin';\nfunction defer(f) {\n return new Promise(resolve => setTimeout(resolve)).then(f);\n}\nexport class BrowserDownloads {\n constructor(fileNamePrefix) {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Provide info on what IOHandlers are available under the\n // current environment.\n throw new Error('browserDownloads() cannot proceed because the current environment ' +\n 'is not a browser.');\n }\n if (fileNamePrefix.startsWith(BrowserDownloads.URL_SCHEME)) {\n fileNamePrefix = fileNamePrefix.slice(BrowserDownloads.URL_SCHEME.length);\n }\n if (fileNamePrefix == null || fileNamePrefix.length === 0) {\n fileNamePrefix = DEFAULT_FILE_NAME_PREFIX;\n }\n this.modelTopologyFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME;\n this.weightDataFileName =\n fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME;\n }\n async save(modelArtifacts) {\n if (typeof (document) === 'undefined') {\n throw new Error('Browser downloads are not supported in ' +\n 'this environment since `document` is not present');\n }\n const weightsURL = window.URL.createObjectURL(new Blob([modelArtifacts.weightData], { type: 'application/octet-stream' }));\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserDownloads.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const weightsManifest = [{\n paths: ['./' + this.weightDataFileName],\n weights: modelArtifacts.weightSpecs\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n weightsManifest\n };\n const modelTopologyAndWeightManifestURL = window.URL.createObjectURL(new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: 'application/json' }));\n // If anchor elements are not provided, create them without attaching them\n // to parents, so that the downloaded file names can be controlled.\n const jsonAnchor = this.jsonAnchor == null ? document.createElement('a') :\n this.jsonAnchor;\n jsonAnchor.download = this.modelTopologyFileName;\n jsonAnchor.href = modelTopologyAndWeightManifestURL;\n // Trigger downloads by evoking a click event on the download anchors.\n // When multiple downloads are started synchronously, Firefox will only\n // save the last one.\n await defer(() => jsonAnchor.dispatchEvent(new MouseEvent('click')));\n if (modelArtifacts.weightData != null) {\n const weightDataAnchor = this.weightDataAnchor == null ?\n document.createElement('a') :\n this.weightDataAnchor;\n weightDataAnchor.download = this.weightDataFileName;\n weightDataAnchor.href = weightsURL;\n await defer(() => weightDataAnchor.dispatchEvent(new MouseEvent('click')));\n }\n return { modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts) };\n }\n }\n}\nBrowserDownloads.URL_SCHEME = 'downloads://';\nclass BrowserFiles {\n constructor(files) {\n if (files == null || files.length < 1) {\n throw new Error(`When calling browserFiles, at least 1 file is required, ` +\n `but received ${files}`);\n }\n this.files = files;\n }\n async load() {\n const jsonFile = this.files[0];\n const weightFiles = this.files.slice(1);\n return new Promise((resolve, reject) => {\n const jsonReader = new FileReader();\n jsonReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const modelJSON = JSON.parse(event.target.result);\n const modelTopology = modelJSON.modelTopology;\n if (modelTopology == null) {\n reject(new Error(`modelTopology field is missing from file ${jsonFile.name}`));\n return;\n }\n if (weightFiles.length === 0) {\n resolve({ modelTopology });\n }\n const weightsManifest = modelJSON.weightsManifest;\n if (weightsManifest == null) {\n reject(new Error(`weightManifest field is missing from file ${jsonFile.name}`));\n return;\n }\n let pathToFile;\n try {\n pathToFile =\n this.checkManifestAndWeightFiles(weightsManifest, weightFiles);\n }\n catch (err) {\n reject(err);\n return;\n }\n const weightSpecs = [];\n const paths = [];\n const perFileBuffers = [];\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n paths.push(path);\n perFileBuffers.push(null);\n });\n weightSpecs.push(...weightsGroup.weights);\n });\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n const weightFileReader = new FileReader();\n weightFileReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const weightData = event.target.result;\n const index = paths.indexOf(path);\n perFileBuffers[index] = weightData;\n if (perFileBuffers.indexOf(null) === -1) {\n resolve({\n modelTopology,\n weightSpecs,\n weightData: concatenateArrayBuffers(perFileBuffers),\n format: modelJSON.format,\n generatedBy: modelJSON.generatedBy,\n convertedBy: modelJSON.convertedBy,\n userDefinedMetadata: modelJSON.userDefinedMetadata\n });\n }\n };\n weightFileReader.onerror = error => reject(`Failed to weights data from file of path '${path}'.`);\n weightFileReader.readAsArrayBuffer(pathToFile[path]);\n });\n });\n };\n jsonReader.onerror = error => reject(`Failed to read model topology and weights manifest JSON ` +\n `from file '${jsonFile.name}'. BrowserFiles supports loading ` +\n `Keras-style tf.Model artifacts only.`);\n jsonReader.readAsText(jsonFile);\n });\n }\n /**\n * Check the compatibility between weights manifest and weight files.\n */\n checkManifestAndWeightFiles(manifest, files) {\n const basenames = [];\n const fileNames = files.map(file => basename(file.name));\n const pathToFile = {};\n for (const group of manifest) {\n group.paths.forEach(path => {\n const pathBasename = basename(path);\n if (basenames.indexOf(pathBasename) !== -1) {\n throw new Error(`Duplicate file basename found in weights manifest: ` +\n `'${pathBasename}'`);\n }\n basenames.push(pathBasename);\n if (fileNames.indexOf(pathBasename) === -1) {\n throw new Error(`Weight file with basename '${pathBasename}' is not provided.`);\n }\n else {\n pathToFile[path] = files[fileNames.indexOf(pathBasename)];\n }\n });\n }\n if (basenames.length !== files.length) {\n throw new Error(`Mismatch in the number of files in weights manifest ` +\n `(${basenames.length}) and the number of weight files provided ` +\n `(${files.length}).`);\n }\n return pathToFile;\n }\n}\nexport const browserDownloadsRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserDownloads.URL_SCHEME)) {\n return browserDownloads(url.slice(BrowserDownloads.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(browserDownloadsRouter);\n/**\n * Creates an IOHandler that triggers file downloads from the browser.\n *\n * The returned `IOHandler` instance can be used as model exporting methods such\n * as `tf.Model.save` and supports only saving.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * const saveResult = await model.save('downloads://mymodel');\n * // This will trigger downloading of two files:\n * // 'mymodel.json' and 'mymodel.weights.bin'.\n * console.log(saveResult);\n * ```\n *\n * @param fileNamePrefix Prefix name of the files to be downloaded. For use with\n * `tf.Model`, `fileNamePrefix` should follow either of the following two\n * formats:\n * 1. `null` or `undefined`, in which case the default file\n * names will be used:\n * - 'model.json' for the JSON file containing the model topology and\n * weights manifest.\n * - 'model.weights.bin' for the binary file containing the binary weight\n * values.\n * 2. A single string or an Array of a single string, as the file name prefix.\n * For example, if `'foo'` is provided, the downloaded JSON\n * file and binary weights file will be named 'foo.json' and\n * 'foo.weights.bin', respectively.\n * @param config Additional configuration for triggering downloads.\n * @returns An instance of `BrowserDownloads` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserDownloads(fileNamePrefix = 'model') {\n return new BrowserDownloads(fileNamePrefix);\n}\n/**\n * Creates an IOHandler that loads model artifacts from user-selected files.\n *\n * This method can be used for loading from files such as user-selected files\n * in the browser.\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * // Note: This code snippet won't run properly without the actual file input\n * // elements in the HTML DOM.\n *\n * // Suppose there are two HTML file input (` `)\n * // elements.\n * const uploadJSONInput = document.getElementById('upload-json');\n * const uploadWeightsInput = document.getElementById('upload-weights');\n * const model = await tf.loadLayersModel(tf.io.browserFiles(\n * [uploadJSONInput.files[0], uploadWeightsInput.files[0]]));\n * ```\n *\n * @param files `File`s to load from. Currently, this function supports only\n * loading from files that contain Keras-style models (i.e., `tf.Model`s), for\n * which an `Array` of `File`s is expected (in that order):\n * - A JSON file containing the model topology and weight manifest.\n * - Optionally, One or more binary files containing the binary weights.\n * These files must have names that match the paths in the `weightsManifest`\n * contained by the aforementioned JSON file, or errors will be thrown\n * during loading. These weights files have the same format as the ones\n * generated by `tensorflowjs_converter` that comes with the `tensorflowjs`\n * Python PIP package. If no weights files are provided, only the model\n * topology will be loaded from the JSON file above.\n * @returns An instance of `Files` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserFiles(files) {\n return new BrowserFiles(files);\n}\n//# sourceMappingURL=browser_files.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../util';\n/**\n * Monitor Promise.all progress, fire onProgress callback function.\n *\n * @param promises Promise list going to be monitored\n * @param onProgress Callback function. Fired when a promise resolved.\n * @param startFraction Optional fraction start. Default to 0.\n * @param endFraction Optional fraction end. Default to 1.\n */\nexport function monitorPromisesProgress(promises, onProgress, startFraction, endFraction) {\n checkPromises(promises);\n startFraction = startFraction == null ? 0 : startFraction;\n endFraction = endFraction == null ? 1 : endFraction;\n checkFraction(startFraction, endFraction);\n let resolvedPromise = 0;\n const registerMonitor = (promise) => {\n promise.then(value => {\n const fraction = startFraction +\n ++resolvedPromise / promises.length * (endFraction - startFraction);\n // pass fraction as parameter to callback function.\n onProgress(fraction);\n return value;\n });\n return promise;\n };\n function checkPromises(promises) {\n assert(promises != null && Array.isArray(promises) && promises.length > 0, () => 'promises must be a none empty array');\n }\n function checkFraction(startFraction, endFraction) {\n assert(startFraction >= 0 && startFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got startFraction ${startFraction}`);\n assert(endFraction >= 0 && endFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got endFraction ${endFraction}`);\n assert(endFraction >= startFraction, () => `startFraction must be no more than endFraction, but ` +\n `got startFraction ${startFraction} and endFraction ` +\n `${endFraction}`);\n }\n return Promise.all(promises.map(registerMonitor));\n}\n//# sourceMappingURL=progress.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\nimport * as util from '../util';\nimport { decodeWeights } from './io_utils';\nimport { monitorPromisesProgress } from './progress';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/**\n * Reads binary weights data from a number of URLs.\n *\n * @param fetchURLs URLs to send the HTTP requests at, using `fetch` calls.\n * @param requestOptions RequestInit (options) for the HTTP requests.\n * @param fetchFunc Optional overriding value for the `window.fetch` function.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns A `Promise` of an Array of `ArrayBuffer`. The Array has the same\n * length as `fetchURLs`.\n */\nexport async function loadWeightsAsArrayBuffer(fetchURLs, loadOptions) {\n if (loadOptions == null) {\n loadOptions = {};\n }\n const fetchFunc = loadOptions.fetchFunc == null ? env().platform.fetch :\n loadOptions.fetchFunc;\n // Create the requests for all of the weights in parallel.\n const requests = fetchURLs.map(fetchURL => fetchFunc(fetchURL, loadOptions.requestInit, { isBinary: true }));\n const fetchStartFraction = 0;\n const fetchEndFraction = 0.5;\n const responses = loadOptions.onProgress == null ?\n await Promise.all(requests) :\n await monitorPromisesProgress(requests, loadOptions.onProgress, fetchStartFraction, fetchEndFraction);\n const bufferPromises = responses.map(response => response.arrayBuffer());\n const bufferStartFraction = 0.5;\n const bufferEndFraction = 1;\n const buffers = loadOptions.onProgress == null ?\n await Promise.all(bufferPromises) :\n await monitorPromisesProgress(bufferPromises, loadOptions.onProgress, bufferStartFraction, bufferEndFraction);\n return buffers;\n}\n/**\n * Reads a weights manifest JSON configuration, fetches the weights and\n * returns them as `Tensor`s.\n *\n * @param manifest The weights manifest JSON.\n * @param filePathPrefix The path prefix for filenames given in the manifest.\n * Defaults to the empty string.\n * @param weightNames The names of the weights to be fetched.\n */\nexport async function loadWeights(manifest, filePathPrefix = '', weightNames, requestInit) {\n // TODO(nsthorat): Groups are currently fetched atomically. If you need a\n // single weight from a group, the whole group will be fetched. At a future\n // date, we should support fetching only the individual shards within a\n // group that are needed to reconstruct the requested weight.\n // TODO(cais): Use `decodeWeights` for implementation.\n const fetchWeights = (fetchUrls) => loadWeightsAsArrayBuffer(fetchUrls, { requestInit });\n const loadWeights = weightsLoaderFactory(fetchWeights);\n return loadWeights(manifest, filePathPrefix, weightNames);\n}\n/**\n * Creates a function, which reads a weights manifest JSON configuration,\n * fetches the weight files using the specified function and returns them as\n * `Tensor`s.\n *\n * ```js\n * // example for creating a nodejs weight loader, which reads the weight files\n * // from disk using fs.readFileSync\n *\n * import * as fs from 'fs'\n *\n * const fetchWeightsFromDisk = (filePaths: string[]) =>\n * filePaths.map(filePath => fs.readFileSync(filePath).buffer)\n *\n * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk)\n *\n * const manifest = JSON.parse(\n * fs.readFileSync('./my_model-weights_manifest').toString()\n * )\n * const weightMap = await loadWeights(manifest, './')\n * ```\n * @param fetchWeightsFunction The function used for fetching the weight files.\n * @returns Weight loading function.\n */\nexport function weightsLoaderFactory(fetchWeightsFunction) {\n return async (manifest, filePathPrefix = '', weightNames) => {\n // Collect all the groups, weights, and their relative offsets to be\n // fetched.\n const groupIndicesToFetchMap = manifest.map(() => false);\n const groupWeightsToFetch = {};\n const weightsFound = weightNames != null ? weightNames.map(() => false) : [];\n const allManifestWeightNames = [];\n manifest.forEach((manifestGroupConfig, groupIndex) => {\n let groupOffset = 0;\n manifestGroupConfig.weights.forEach(weightsEntry => {\n const rawDtype = ('quantization' in weightsEntry) ?\n weightsEntry.quantization.dtype :\n weightsEntry.dtype;\n const weightsBytes = DTYPE_VALUE_SIZE_MAP[rawDtype] *\n util.sizeFromShape(weightsEntry.shape);\n const enqueueWeightsForFetchingFn = () => {\n groupIndicesToFetchMap[groupIndex] = true;\n if (groupWeightsToFetch[groupIndex] == null) {\n groupWeightsToFetch[groupIndex] = [];\n }\n groupWeightsToFetch[groupIndex].push({\n manifestEntry: weightsEntry,\n groupOffset,\n sizeBytes: weightsBytes\n });\n };\n if (weightNames != null) {\n weightNames.forEach((weightName, weightIndex) => {\n if (weightName === weightsEntry.name) {\n enqueueWeightsForFetchingFn();\n weightsFound[weightIndex] = true;\n }\n });\n }\n else {\n enqueueWeightsForFetchingFn();\n }\n allManifestWeightNames.push(weightsEntry.name);\n groupOffset += weightsBytes;\n });\n });\n if (!weightsFound.every(found => found)) {\n const weightsNotFound = weightNames.filter((_, i) => !weightsFound[i]);\n throw new Error(`Could not find weights in manifest with names: ` +\n `${weightsNotFound.join(', ')}. \\n` +\n `Manifest JSON has weights with names: ` +\n `${allManifestWeightNames.join(', ')}.`);\n }\n // Convert the one-hot boolean groupId => shouldFetch map to a list of group\n // IDs.\n const groupIndicesToFetch = groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i) => {\n if (shouldFetch) {\n accumulator.push(i);\n }\n return accumulator;\n }, []);\n const fetchUrls = [];\n groupIndicesToFetch.forEach(i => {\n manifest[i].paths.forEach(filepath => {\n const fetchUrl = filePathPrefix +\n (!filePathPrefix.endsWith('/') ? '/' : '') + filepath;\n fetchUrls.push(fetchUrl);\n });\n });\n const buffers = await fetchWeightsFunction(fetchUrls);\n const weightsTensorMap = {};\n let bufferIndexOffset = 0;\n groupIndicesToFetch.forEach(i => {\n const numBuffers = manifest[i].paths.length;\n let groupBytes = 0;\n for (let i = 0; i < numBuffers; i++) {\n groupBytes += buffers[bufferIndexOffset + i].byteLength;\n }\n // Create a buffer for the whole group.\n const groupBuffer = new ArrayBuffer(groupBytes);\n const groupByteBuffer = new Uint8Array(groupBuffer);\n let groupBufferOffset = 0;\n for (let i = 0; i < numBuffers; i++) {\n const buffer = new Uint8Array(buffers[bufferIndexOffset + i]);\n groupByteBuffer.set(buffer, groupBufferOffset);\n groupBufferOffset += buffer.byteLength;\n }\n const weightsEntries = groupWeightsToFetch[i];\n weightsEntries.forEach(weightsEntry => {\n const byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes);\n const nameToTensorMap = decodeWeights(byteBuffer, [weightsEntry.manifestEntry]);\n for (const name in nameToTensorMap) {\n weightsTensorMap[name] = nameToTensorMap[name];\n }\n });\n bufferIndexOffset += numBuffers;\n });\n return weightsTensorMap;\n };\n}\n//# sourceMappingURL=weights_loader.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandler implementations based on HTTP requests in the web browser.\n *\n * Uses [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n */\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nimport { loadWeightsAsArrayBuffer } from './weights_loader';\nconst OCTET_STREAM_MIME_TYPE = 'application/octet-stream';\nconst JSON_TYPE = 'application/json';\nexport class HTTPRequest {\n constructor(path, loadOptions) {\n this.DEFAULT_METHOD = 'POST';\n if (loadOptions == null) {\n loadOptions = {};\n }\n this.weightPathPrefix = loadOptions.weightPathPrefix;\n this.onProgress = loadOptions.onProgress;\n this.weightUrlConverter = loadOptions.weightUrlConverter;\n if (loadOptions.fetchFunc != null) {\n assert(typeof loadOptions.fetchFunc === 'function', () => 'Must pass a function that matches the signature of ' +\n '`fetch` (see ' +\n 'https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)');\n this.fetch = loadOptions.fetchFunc;\n }\n else {\n this.fetch = env().platform.fetch;\n }\n assert(path != null && path.length > 0, () => 'URL path for http must not be null, undefined or ' +\n 'empty.');\n if (Array.isArray(path)) {\n assert(path.length === 2, () => 'URL paths for http must have a length of 2, ' +\n `(actual length is ${path.length}).`);\n }\n this.path = path;\n if (loadOptions.requestInit != null &&\n loadOptions.requestInit.body != null) {\n throw new Error('requestInit is expected to have no pre-existing body, but has one.');\n }\n this.requestInit = loadOptions.requestInit || {};\n }\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserHTTPRequest.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n const init = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit);\n init.body = new FormData();\n const weightsManifest = [{\n paths: ['./model.weights.bin'],\n weights: modelArtifacts.weightSpecs,\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata,\n weightsManifest\n };\n init.body.append('model.json', new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: JSON_TYPE }), 'model.json');\n if (modelArtifacts.weightData != null) {\n init.body.append('model.weights.bin', new Blob([modelArtifacts.weightData], { type: OCTET_STREAM_MIME_TYPE }), 'model.weights.bin');\n }\n const response = await this.fetch(this.path, init);\n if (response.ok) {\n return {\n modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts),\n responses: [response],\n };\n }\n else {\n throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ` +\n `${response.status}.`);\n }\n }\n /**\n * Load model artifacts via HTTP request(s).\n *\n * See the documentation to `tf.io.http` for details on the saved\n * artifacts.\n *\n * @returns The loaded model artifacts (if loading succeeds).\n */\n async load() {\n const modelConfigRequest = await this.fetch(this.path, this.requestInit);\n if (!modelConfigRequest.ok) {\n throw new Error(`Request to ${this.path} failed with status code ` +\n `${modelConfigRequest.status}. Please verify this URL points to ` +\n `the model JSON of the model to load.`);\n }\n let modelConfig;\n try {\n modelConfig = await modelConfigRequest.json();\n }\n catch (e) {\n let message = `Failed to parse model JSON of response from ${this.path}.`;\n // TODO(nsthorat): Remove this after some time when we're comfortable that\n // .pb files are mostly gone.\n if (this.path.endsWith('.pb')) {\n message += ' Your path contains a .pb file extension. ' +\n 'Support for .pb models have been removed in TensorFlow.js 1.0 ' +\n 'in favor of .json models. You can re-convert your Python ' +\n 'TensorFlow model using the TensorFlow.js 1.0 conversion scripts ' +\n 'or you can convert your.pb models with the \\'pb2json\\'' +\n 'NPM script in the tensorflow/tfjs-converter repository.';\n }\n else {\n message += ' Please make sure the server is serving valid ' +\n 'JSON for this request.';\n }\n throw new Error(message);\n }\n const modelTopology = modelConfig.modelTopology;\n const weightsManifest = modelConfig.weightsManifest;\n const generatedBy = modelConfig.generatedBy;\n const convertedBy = modelConfig.convertedBy;\n const format = modelConfig.format;\n const userDefinedMetadata = modelConfig.userDefinedMetadata;\n // We do not allow both modelTopology and weightsManifest to be missing.\n if (modelTopology == null && weightsManifest == null) {\n throw new Error(`The JSON from HTTP path ${this.path} contains neither model ` +\n `topology or manifest for weights.`);\n }\n let weightSpecs;\n let weightData;\n if (weightsManifest != null) {\n const results = await this.loadWeights(weightsManifest);\n [weightSpecs, weightData] = results;\n }\n const artifacts = {\n modelTopology,\n weightSpecs,\n weightData,\n userDefinedMetadata,\n generatedBy,\n convertedBy,\n format\n };\n const initializer = modelConfig.modelInitializer;\n if (initializer) {\n artifacts.modelInitializer = initializer;\n }\n return artifacts;\n }\n async loadWeights(weightsManifest) {\n const weightPath = Array.isArray(this.path) ? this.path[1] : this.path;\n const [prefix, suffix] = parseUrl(weightPath);\n const pathPrefix = this.weightPathPrefix || prefix;\n const weightSpecs = [];\n for (const entry of weightsManifest) {\n weightSpecs.push(...entry.weights);\n }\n const fetchURLs = [];\n const urlPromises = [];\n for (const weightsGroup of weightsManifest) {\n for (const path of weightsGroup.paths) {\n if (this.weightUrlConverter != null) {\n urlPromises.push(this.weightUrlConverter(path));\n }\n else {\n fetchURLs.push(pathPrefix + path + suffix);\n }\n }\n }\n if (this.weightUrlConverter) {\n fetchURLs.push(...await Promise.all(urlPromises));\n }\n const buffers = await loadWeightsAsArrayBuffer(fetchURLs, {\n requestInit: this.requestInit,\n fetchFunc: this.fetch,\n onProgress: this.onProgress\n });\n return [weightSpecs, concatenateArrayBuffers(buffers)];\n }\n}\nHTTPRequest.URL_SCHEME_REGEX = /^https?:\\/\\//;\n/**\n * Extract the prefix and suffix of the url, where the prefix is the path before\n * the last file, and suffix is the search params after the last file.\n * ```\n * const url = 'http://tfhub.dev/model/1/tensorflowjs_model.pb?tfjs-format=file'\n * [prefix, suffix] = parseUrl(url)\n * // prefix = 'http://tfhub.dev/model/1/'\n * // suffix = '?tfjs-format=file'\n * ```\n * @param url the model url to be parsed.\n */\nexport function parseUrl(url) {\n const lastSlash = url.lastIndexOf('/');\n const lastSearchParam = url.lastIndexOf('?');\n const prefix = url.substring(0, lastSlash);\n const suffix = lastSearchParam > lastSlash ? url.substring(lastSearchParam) : '';\n return [prefix + '/', suffix];\n}\nexport function isHTTPScheme(url) {\n return url.match(HTTPRequest.URL_SCHEME_REGEX) != null;\n}\nexport const httpRouter = (url, loadOptions) => {\n if (typeof fetch === 'undefined' &&\n (loadOptions == null || loadOptions.fetchFunc == null)) {\n // `http` uses `fetch` or `node-fetch`, if one wants to use it in\n // an environment that is not the browser or node they have to setup a\n // global fetch polyfill.\n return null;\n }\n else {\n let isHTTP = true;\n if (Array.isArray(url)) {\n isHTTP = url.every(urlItem => isHTTPScheme(urlItem));\n }\n else {\n isHTTP = isHTTPScheme(url);\n }\n if (isHTTP) {\n return http(url, loadOptions);\n }\n }\n return null;\n};\nIORouterRegistry.registerSaveRouter(httpRouter);\nIORouterRegistry.registerLoadRouter(httpRouter);\n/**\n * Creates an IOHandler subtype that sends model artifacts to HTTP server.\n *\n * An HTTP request of the `multipart/form-data` mime type will be sent to the\n * `path` URL. The form data includes artifacts that represent the topology\n * and/or weights of the model. In the case of Keras-style `tf.Model`, two\n * blobs (files) exist in form-data:\n * - A JSON file consisting of `modelTopology` and `weightsManifest`.\n * - A binary weights file consisting of the concatenated weight values.\n * These files are in the same format as the one generated by\n * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html).\n *\n * The following code snippet exemplifies the client-side code that uses this\n * function:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save(tf.io.http(\n * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}}));\n * console.log(saveResult);\n * ```\n *\n * If the default `POST` method is to be used, without any custom parameters\n * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`:\n *\n * ```js\n * const saveResult = await model.save('http://model-server:5000/upload');\n * ```\n *\n * The following GitHub Gist\n * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864\n * implements a server based on [flask](https://github.com/pallets/flask) that\n * can receive the request. Upon receiving the model artifacts via the requst,\n * this particular server reconsistutes instances of [Keras\n * Models](https://keras.io/models/model/) in memory.\n *\n *\n * @param path A URL path to the model.\n * Can be an absolute HTTP path (e.g.,\n * 'http://localhost:8000/model-upload)') or a relative path (e.g.,\n * './model-upload').\n * @param requestInit Request configurations to be used when sending\n * HTTP request to server using `fetch`. It can contain fields such as\n * `method`, `credentials`, `headers`, `mode`, etc. See\n * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n * for more information. `requestInit` must not have a body, because the\n * body will be set by TensorFlow.js. File blobs representing the model\n * topology (filename: 'model.json') and the weights of the model (filename:\n * 'model.weights.bin') will be appended to the body. If `requestInit` has a\n * `body`, an Error will be thrown.\n * @param loadOptions Optional configuration for the loading. It includes the\n * following fields:\n * - weightPathPrefix Optional, this specifies the path prefix for weight\n * files, by default this is calculated from the path param.\n * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js,\n * the `fetch` from node-fetch can be used here.\n * - onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns An instance of `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function http(path, loadOptions) {\n return new HTTPRequest(path, loadOptions);\n}\n/**\n * Deprecated. Use `tf.io.http`.\n * @param path\n * @param loadOptions\n */\nexport function browserHTTPRequest(path, loadOptions) {\n return http(path, loadOptions);\n}\n//# sourceMappingURL=http.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nclass PassthroughLoader {\n constructor(modelArtifacts) {\n this.modelArtifacts = modelArtifacts;\n }\n async load() {\n return this.modelArtifacts;\n }\n}\nclass PassthroughSaver {\n constructor(saveHandler) {\n this.saveHandler = saveHandler;\n }\n async save(modelArtifacts) {\n return this.saveHandler(modelArtifacts);\n }\n}\n/**\n * Creates an IOHandler that loads model artifacts from memory.\n *\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * const model = await tf.loadLayersModel(tf.io.fromMemory(\n * modelTopology, weightSpecs, weightData));\n * ```\n *\n * @param modelArtifacts a object containing model topology (i.e., parsed from\n * the JSON format).\n * @param weightSpecs An array of `WeightsManifestEntry` objects describing the\n * names, shapes, types, and quantization of the weight data.\n * @param weightData A single `ArrayBuffer` containing the weight data,\n * concatenated in the order described by the weightSpecs.\n * @param trainingConfig Model training configuration. Optional.\n *\n * @returns A passthrough `IOHandler` that simply loads the provided data.\n */\nexport function fromMemory(modelArtifacts, weightSpecs, weightData, trainingConfig) {\n if (arguments.length === 1) {\n const isModelArtifacts = modelArtifacts.modelTopology != null ||\n modelArtifacts.weightSpecs != null;\n if (isModelArtifacts) {\n return new PassthroughLoader(modelArtifacts);\n }\n else {\n // Legacy support: with only modelTopology.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({ modelTopology: modelArtifacts });\n }\n }\n else {\n // Legacy support.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({\n modelTopology: modelArtifacts,\n weightSpecs,\n weightData,\n trainingConfig\n });\n }\n}\n/**\n * Creates an IOHandler that passes saved model artifacts to a callback.\n *\n * ```js\n * function handleSave(artifacts) {\n * // ... do something with the artifacts ...\n * return {modelArtifactsInfo: {...}, ...};\n * }\n *\n * const saveResult = model.save(tf.io.withSaveHandler(handleSave));\n * ```\n *\n * @param saveHandler A function that accepts a `ModelArtifacts` and returns a\n * `SaveResult`.\n */\nexport function withSaveHandler(saveHandler) {\n return new PassthroughSaver(saveHandler);\n}\n//# sourceMappingURL=passthrough.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Importing local_storage and indexed_db is necessary for the routers to be\n// registered.\nimport './indexed_db';\nimport './local_storage';\nimport { browserFiles } from './browser_files';\nimport { browserHTTPRequest, http, isHTTPScheme } from './http';\nimport { concatenateArrayBuffers, decodeWeights, encodeWeights, getModelArtifactsInfoForJSON } from './io_utils';\nimport { fromMemory, withSaveHandler } from './passthrough';\nimport { getLoadHandlers, getSaveHandlers, registerLoadRouter, registerSaveRouter } from './router_registry';\nimport { loadWeights, weightsLoaderFactory } from './weights_loader';\nexport { copyModel, listModels, moveModel, removeModel } from './model_management';\nexport { browserFiles, browserHTTPRequest, concatenateArrayBuffers, decodeWeights, encodeWeights, fromMemory, getLoadHandlers, getModelArtifactsInfoForJSON, getSaveHandlers, http, isHTTPScheme, loadWeights, registerLoadRouter, registerSaveRouter, weightsLoaderFactory, withSaveHandler };\n//# sourceMappingURL=io.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reshape } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Reshapes a `tf.Tensor` to a given shape.\n *\n * Given an input tensor, returns a new tensor with the same values as the\n * input tensor with shape `shape`.\n *\n * If one component of shape is the special value -1, the size of that\n * dimension is computed so that the total size remains constant. In\n * particular, a shape of [-1] flattens into 1-D. At most one component of\n * shape can be -1.\n *\n * If shape is 1-D or higher, then the operation returns a tensor with shape\n * shape filled with the values of tensor. In this case, the number of\n * elements implied by shape must be the same as the number of elements in\n * tensor.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.reshape([2, 2]).print();\n * ```\n *\n * @param x The input tensor to be reshaped.\n * @param shape An array of integers defining the output tensor shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction reshape_(x, shape) {\n const $x = convertToTensor(x, 'x', 'reshape', null);\n const inputs = { x: $x };\n const attrs = { shape };\n const forward = (backend, save) => {\n shape = util.inferFromImplicitShape(shape, $x.size);\n util.assert($x.size === util.sizeFromShape(shape), () => 'new shape and old shape must have the same number of elements.');\n save([$x]);\n return backend.reshape($x, shape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Reshape, attrs);\n}\nexport const reshape = op({ reshape_ });\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchMatMul } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices, A * B. These must be matrices.\n *\n * ```js\n * const a = tf.tensor2d([1, 2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.matMul(b).print(); // or tf.matMul(a, b)\n * ```\n * @param a First matrix in dot product operation.\n * @param b Second matrix in dot product operation.\n * @param transposeA If true, `a` is transposed before multiplication.\n * @param transposeB If true, `b` is transposed before multiplication.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction matMul_(a, b, transposeA = false, transposeB = false) {\n let $a = convertToTensor(a, 'a', 'matMul');\n let $b = convertToTensor(b, 'b', 'matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n save([$a, $b]);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert($a.rank >= 2 && $b.rank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShapeOuterDims = batchDimA > batchDimB ? outerDimsA : outerDimsB;\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n const res3d = backend.batchMatMul(a3D, b3D, transposeA, transposeB);\n return reshape(res3d, outShape);\n };\n const inputs = { a: $a, b: $b };\n const attrs = { transposeA, transposeB };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BatchMatMul, attrs);\n}\nexport const matMul = op({ matMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OneHot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a one-hot `tf.Tensor`. The locations represented by `indices` take\n * value `onValue` (defaults to 1), while all other locations take value\n * `offValue` (defaults to 0). If `indices` is rank `R`, the output has rank\n * `R+1` with the last axis of size `depth`.\n *\n * ```js\n * tf.oneHot(tf.tensor1d([0, 1], 'int32'), 3).print();\n * ```\n *\n * @param indices `tf.Tensor` of indices with dtype `int32`.\n * @param depth The depth of the one hot dimension.\n * @param onValue A number used to fill in the output when the index matches\n * the location.\n * @param offValue A number used to fill in the output when the index does\n * not match the location.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction oneHot_(indices, depth, onValue = 1, offValue = 0) {\n if (depth < 2) {\n throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`);\n }\n const $indices = convertToTensor(indices, 'indices', 'oneHot', 'int32');\n const outShape = [...$indices.shape, depth];\n const forward = (backend, save) => {\n save([$indices]);\n return reshape(backend.oneHot(reshape($indices, [$indices.size]), depth, onValue, offValue), outShape);\n };\n const inputs = { indices: $indices };\n const attrs = { depth, onValue, offValue };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OneHot, attrs);\n}\nexport const oneHot = op({ oneHot_ });\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Transpose } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Transposes the `tf.Tensor`. Permutes the dimensions according to `perm`.\n *\n * The returned `tf.Tensor`'s dimension `i` will correspond to the input\n * dimension `perm[i]`. If `perm` is not given, it is set to `[n-1...0]`,\n * where `n` is the rank of the input `tf.Tensor`. Hence by default, this\n * operation performs a regular matrix transpose on 2-D input `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4, 5, 6], [2, 3]);\n *\n * a.transpose().print(); // or tf.transpose(a)\n * ```\n *\n * @param x The tensor to transpose.\n * @param perm The permutation of the dimensions of a.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction transpose_(x, perm) {\n const $x = convertToTensor(x, 'x', 'transpose');\n if (perm == null) {\n perm = $x.shape.map((s, i) => i).reverse();\n }\n util.assert($x.rank === perm.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of perm ${perm}.`);\n perm.forEach(axis => {\n util.assert(axis >= 0 && axis < $x.rank, () => `All entries in 'perm' must be between 0 and ${$x.rank - 1}` +\n ` but got ${perm}`);\n });\n if ($x.rank <= 1) {\n return $x.clone();\n }\n const inputs = { x: $x };\n const attrs = { perm };\n return ENGINE.runKernelFunc(backend => backend.transpose($x, perm), inputs, null /* gradient */, Transpose, attrs);\n}\nexport const transpose = op({ transpose_ });\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { matMul } from './mat_mul';\nimport { oneHot } from './one_hot';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the confusion matrix from true labels and predicted labels.\n *\n * ```js\n * const labels = tf.tensor1d([0, 1, 2, 1, 0], 'int32');\n * const predictions = tf.tensor1d([0, 2, 2, 1, 0], 'int32');\n * const numClasses = 3;\n * const out = tf.math.confusionMatrix(labels, predictions, numClasses);\n * out.print();\n * // Expected output matrix:\n * // [[2, 0, 0],\n * // [0, 1, 1],\n * // [0, 0, 1]]\n * ```\n *\n * @param labels The target labels, assumed to be 0-based integers\n * for the classes. The shape is `[numExamples]`, where\n * `numExamples` is the number of examples included.\n * @param predictions The predicted classes, assumed to be\n * 0-based integers for the classes. Must have the same shape as `labels`.\n * @param numClasses Number of all classes, as an integer.\n * Its value must be larger than the largest element in `labels` and\n * `predictions`.\n * @returns The confusion matrix as a int32-type 2D tensor. The value at\n * row `r` and column `c` is the number of times examples of actual class\n * `r` were predicted as class `c`.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nexport function confusionMatrix_(labels, predictions, numClasses) {\n const $labels = convertToTensor(labels, 'labels', 'confusionMatrix');\n const $predictions = convertToTensor(predictions, 'predictions', 'confusionMatrix');\n util.assert(numClasses == null || numClasses > 0 && Number.isInteger(numClasses), () => `If provided, numClasses must be a positive integer, ` +\n `but got ${numClasses}`);\n util.assert($labels.rank === 1, () => `Expected the rank of labels to be 1, but got ${$labels.rank}`);\n util.assert($predictions.rank === 1, () => `Expected the rank of predictions to be 1, ` +\n `but got ${$predictions.rank}`);\n util.assert($labels.shape[0] === $predictions.shape[0], () => `Mismatch in the number of examples: ` +\n `${$labels.shape[0]} vs. ${$predictions.shape[0]}. ` +\n `Labels and predictions should have the same number of elements.`);\n util.assert(numClasses > 0 && Number.isInteger(numClasses), () => `numClasses is required to be a positive integer, but got ` +\n `${numClasses}`);\n // TODO(cais): In the future, if oneHot supports tensors inputs for\n // `numClasses`, `confusionMatrix` can make `numClasses` optional.\n const oneHotLabels = oneHot(cast($labels, 'int32'), numClasses);\n const oneHotPredictions = oneHot(cast($predictions, 'int32'), numClasses);\n const oneHotLabelsT = transpose(oneHotLabels);\n const product = matMul(oneHotLabelsT, oneHotPredictions);\n return cast(product, 'int32');\n}\nexport const confusionMatrix = op({ confusionMatrix_ });\n//# sourceMappingURL=confusion_matrix.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Exports under the tf.math.* namespace.\n */\nimport { confusionMatrix } from './ops/confusion_matrix';\nexport { confusionMatrix };\n//# sourceMappingURL=math.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-3 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor3d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor3d([[[1], [2]], [[3], [4]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor3d([1, 2, 3, 4], [2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor3d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 3) {\n throw new Error('tensor3d() requires shape to have three numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 3 && inferredShape.length !== 1) {\n throw new Error('tensor3d() requires values to be number[][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor3d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor3d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FromPixels } from '../kernel_names';\nimport { getKernel } from '../kernel_registry';\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { tensor3d } from './tensor3d';\nlet fromPixels2DContext;\n/**\n * Creates a `tf.Tensor` from an image.\n *\n * ```js\n * const image = new ImageData(1, 1);\n * image.data[0] = 100;\n * image.data[1] = 150;\n * image.data[2] = 200;\n * image.data[3] = 255;\n *\n * tf.browser.fromPixels(image).print();\n * ```\n *\n * @param pixels The input image to construct the tensor from. The\n * supported image types are all 4-channel. You can also pass in an image\n * object with following attributes:\n * `{data: Uint8Array; width: number; height: number}`\n * @param numChannels The number of channels of the output tensor. A\n * numChannels value less than 4 allows you to ignore channels. Defaults to\n * 3 (ignores alpha channel of input image).\n *\n * @doc {heading: 'Browser', namespace: 'browser', ignoreCI: true}\n */\nfunction fromPixels_(pixels, numChannels = 3) {\n // Sanity checks.\n if (numChannels > 4) {\n throw new Error('Cannot construct Tensor with more than 4 channels from pixels.');\n }\n if (pixels == null) {\n throw new Error('pixels passed to tf.browser.fromPixels() can not be null');\n }\n let isPixelData = false;\n let isImageData = false;\n let isVideo = false;\n let isImage = false;\n let isCanvasLike = false;\n if (pixels.data instanceof Uint8Array) {\n isPixelData = true;\n }\n else if (typeof (ImageData) !== 'undefined' && pixels instanceof ImageData) {\n isImageData = true;\n }\n else if (typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement) {\n isVideo = true;\n }\n else if (typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement) {\n isImage = true;\n // tslint:disable-next-line: no-any\n }\n else if (pixels.getContext != null) {\n isCanvasLike = true;\n }\n else {\n throw new Error('pixels passed to tf.browser.fromPixels() must be either an ' +\n `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData ` +\n `in browser, or OffscreenCanvas, ImageData in webworker` +\n ` or {data: Uint32Array, width: number, height: number}, ` +\n `but was ${pixels.constructor.name}`);\n }\n if (isVideo) {\n const HAVE_CURRENT_DATA_READY_STATE = 2;\n if (isVideo &&\n pixels.readyState <\n HAVE_CURRENT_DATA_READY_STATE) {\n throw new Error('The video element has not loaded data yet. Please wait for ' +\n '`loadeddata` event on the element.');\n }\n }\n // If the current backend has 'FromPixels' registered, it has a more\n // efficient way of handling pixel uploads, so we call that.\n const kernel = getKernel(FromPixels, ENGINE.backendName);\n if (kernel != null) {\n const inputs = { pixels };\n const attrs = { numChannels };\n return ENGINE.runKernel(FromPixels, inputs, attrs);\n }\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n let vals;\n if (isCanvasLike) {\n vals =\n // tslint:disable-next-line:no-any\n pixels.getContext('2d').getImageData(0, 0, width, height).data;\n }\n else if (isImageData || isPixelData) {\n vals = pixels.data;\n }\n else if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n vals = fromPixels2DContext.getImageData(0, 0, width, height).data;\n }\n let values;\n if (numChannels === 4) {\n values = new Int32Array(vals);\n }\n else {\n const numPixels = width * height;\n values = new Int32Array(numPixels * numChannels);\n for (let i = 0; i < numPixels; i++) {\n for (let channel = 0; channel < numChannels; ++channel) {\n values[i * numChannels + channel] = vals[i * 4 + channel];\n }\n }\n }\n const outShape = [height, width, numChannels];\n return tensor3d(values, outShape, 'int32');\n}\n/**\n * Draws a `tf.Tensor` of pixel values to a byte array or optionally a\n * canvas.\n *\n * When the dtype of the input is 'float32', we assume values in the range\n * [0-1]. Otherwise, when input is 'int32', we assume values in the range\n * [0-255].\n *\n * Returns a promise that resolves when the canvas has been drawn to.\n *\n * @param img A rank-2 or rank-3 tensor. If rank-2, draws grayscale. If\n * rank-3, must have depth of 1, 3 or 4. When depth of 1, draws\n * grayscale. When depth of 3, we draw with the first three components of\n * the depth dimension corresponding to r, g, b and alpha = 1. When depth of\n * 4, all four components of the depth dimension correspond to r, g, b, a.\n * @param canvas The canvas to draw to.\n *\n * @doc {heading: 'Browser', namespace: 'browser'}\n */\nexport async function toPixels(img, canvas) {\n let $img = convertToTensor(img, 'img', 'toPixels');\n if (!(img instanceof Tensor)) {\n // Assume int32 if user passed a native array.\n const originalImgTensor = $img;\n $img = cast(originalImgTensor, 'int32');\n originalImgTensor.dispose();\n }\n if ($img.rank !== 2 && $img.rank !== 3) {\n throw new Error(`toPixels only supports rank 2 or 3 tensors, got rank ${$img.rank}.`);\n }\n const [height, width] = $img.shape.slice(0, 2);\n const depth = $img.rank === 2 ? 1 : $img.shape[2];\n if (depth > 4 || depth === 2) {\n throw new Error(`toPixels only supports depth of size ` +\n `1, 3 or 4 but got ${depth}`);\n }\n if ($img.dtype !== 'float32' && $img.dtype !== 'int32') {\n throw new Error(`Unsupported type for toPixels: ${$img.dtype}.` +\n ` Please use float32 or int32 tensors.`);\n }\n const data = await $img.data();\n const multiplier = $img.dtype === 'float32' ? 255 : 1;\n const bytes = new Uint8ClampedArray(width * height * 4);\n for (let i = 0; i < height * width; ++i) {\n const rgba = [0, 0, 0, 255];\n for (let d = 0; d < depth; d++) {\n const value = data[i * depth + d];\n if ($img.dtype === 'float32') {\n if (value < 0 || value > 1) {\n throw new Error(`Tensor values for a float32 Tensor must be in the ` +\n `range [0 - 1] but encountered ${value}.`);\n }\n }\n else if ($img.dtype === 'int32') {\n if (value < 0 || value > 255) {\n throw new Error(`Tensor values for a int32 Tensor must be in the ` +\n `range [0 - 255] but encountered ${value}.`);\n }\n }\n if (depth === 1) {\n rgba[0] = value * multiplier;\n rgba[1] = value * multiplier;\n rgba[2] = value * multiplier;\n }\n else {\n rgba[d] = value * multiplier;\n }\n }\n const j = i * 4;\n bytes[j + 0] = Math.round(rgba[0]);\n bytes[j + 1] = Math.round(rgba[1]);\n bytes[j + 2] = Math.round(rgba[2]);\n bytes[j + 3] = Math.round(rgba[3]);\n }\n if (canvas != null) {\n canvas.width = width;\n canvas.height = height;\n const ctx = canvas.getContext('2d');\n const imageData = new ImageData(bytes, width, height);\n ctx.putImageData(imageData, 0, 0);\n }\n if ($img !== img) {\n $img.dispose();\n }\n return bytes;\n}\nexport const fromPixels = op({ fromPixels_ });\n//# sourceMappingURL=browser.js.map", "import { computeStrides } from '../util';\n/**\n * Validate gather nd inputs.\n *\n * @param tensor The tensor contains the source values.\n * @param indices The tensor contains the indices to slice the source.\n *\n * @returns [resultShape, numUpdates, sliceSize, strides]\n */\nexport function prepareAndValidate(tensor, indices) {\n if (tensor.rank < 1) {\n throw new Error('tf.gatherND() expects the input to be rank 1 or higher,' +\n ` but the rank was ${tensor.rank}.`);\n }\n if (indices.rank < 1) {\n throw new Error('tf.gatherND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error('tf.gatherND() expects the indices to be int32 type,' +\n ` but the dtype was ${indices.dtype}.`);\n }\n if (indices.shape[indices.rank - 1] > tensor.rank) {\n throw new Error('index innermost dimension length must be <= tensor rank; saw: ' +\n `${indices.shape[indices.rank - 1]} vs. ${tensor.rank}`);\n }\n if (tensor.size === 0) {\n throw new Error('Requested more than 0 entries, but input is empty.' +\n ` Input shape: ${tensor.shape}.`);\n }\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n // The result shape is\n // indices.shape[:-1] + params.shape[indices.shape[-1]:]\n let nResult = 1;\n for (let i = 0; i < indicesShape.length - 1; ++i) {\n nResult *= indicesShape[i];\n }\n const inputShape = tensor.shape;\n const resultShape = indicesShape.slice();\n resultShape.pop();\n let sliceSize = 1;\n for (let i = sliceRank; i < tensor.rank; ++i) {\n sliceSize *= inputShape[i];\n resultShape.push(inputShape[i]);\n }\n const strides = [...computeStrides(tensor.shape).map(stride => stride / sliceSize),\n 1].slice(0, sliceRank);\n return [resultShape, nResult, sliceSize, strides];\n}\n//# sourceMappingURL=gather_nd_util.js.map", "import { computeStrides, sizeFromShape } from '../util';\n/**\n * Check whether updates.shape = indices.shape[:batchDim] +\n * shape[sliceDim:]\n *\n * @param x The input tensor.\n */\nexport function validateUpdateShape(shape, indices, updates) {\n const sliceDim = (indices.rank > 1) ? indices.shape[indices.rank - 1] : 1;\n const batchDim = (indices.rank > 1) ? indices.rank - 1 : 1;\n const shapeError = 'Must have updates.shape = indices.shape[:batchDim] + ' +\n `shape[sliceDim:], got updates.shape: ${updates.shape}` +\n `, indices.shape: ${indices.shape}, shape: ${shape}` +\n `, sliceDim: ${sliceDim}, and batchDim: ${batchDim}.`;\n if (updates.rank < batchDim) {\n throw new Error(shapeError + ` update.rank < ${batchDim}. `);\n }\n if (shape.length < sliceDim + (updates.rank - batchDim)) {\n throw new Error(shapeError +\n ` Output shape length < ${sliceDim + (updates.rank - batchDim)}`);\n }\n if (updates.rank !== batchDim + shape.length - sliceDim) {\n throw new Error(shapeError + ` update.rank != ${batchDim + shape.length - sliceDim}`);\n }\n for (let d = 0; d < batchDim; ++d) {\n if (updates.shape[d] !== indices.shape[d]) {\n throw new Error(shapeError +\n ` updates.shape[${d}] (${updates.shape[d]}) != indices.shape[${d}] (${indices.shape[d]}).`);\n }\n }\n for (let d = 0; d < updates.rank - batchDim; ++d) {\n if (updates.shape[d + batchDim] !== shape[d + sliceDim]) {\n throw new Error(shapeError +\n ` updates.shape[${d + batchDim}] (${updates.shape[d + batchDim]}) != shape[${d + batchDim}] (${shape[d + batchDim]})`);\n }\n }\n}\n/**\n * Validate scatter nd inputs.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n */\nexport function validateInput(updates, indices, shape) {\n if (indices.rank < 1) {\n throw new Error('tf.scatterND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (updates.rank < 1) {\n throw new Error('tf.scatterND() expects the updates to be rank 1 or higher,' +\n ` but the rank was ${updates.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error(`The dtype of 'indices' should be int32, but got dtype: ${indices.dtype}`);\n }\n if (shape.length < 1) {\n throw new Error(`Output rank must be greater or equal to 1, but got shape: ${shape}`);\n }\n if (shape.length === 0) {\n if (indices.size === 0) {\n throw new Error(`Indices specified for empty output. indices shape: ${indices.shape}`);\n }\n if (updates.size === 0) {\n throw new Error(`Updates specified for empty output. updates shape: ${updates.shape}`);\n }\n }\n validateUpdateShape(shape, indices, updates);\n}\n/**\n * Calculate the shape information for the output.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n *\n * @returns ScatterShapeInfo\n */\nexport function calculateShapes(updates, indices, shape) {\n // Calculate the number of dimensions in indices\n const indicesRank = indices.shape.length;\n const sliceRank = (indicesRank > 1) ? indices.shape[indicesRank - 1] : 1;\n // Calculate the number of elements that make up each slice of our updated\n // tensor. This allows us to work with flattened tensors and copy over whole\n // slices at a time.\n const totalNd = shape.length;\n let sliceSize = 1;\n for (let i = sliceRank; i < totalNd; ++i) {\n sliceSize *= shape[i];\n }\n const safeSliceDim = (sliceRank < 1) ? 1 : sliceRank;\n const numUpdates = sizeFromShape(indices.shape) / safeSliceDim;\n const strides = [...computeStrides(shape.slice(0, sliceRank)), 1];\n const outputSize = sizeFromShape(shape);\n return { sliceRank, numUpdates, sliceSize, strides, outputSize };\n}\n//# sourceMappingURL=scatter_nd_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsValid(input, begin, size) {\n const inputRank = input.shape.length;\n util.assert(inputRank === begin.length, () => `Error in slice${inputRank}D: Length of begin ${begin} must ` +\n `match the rank of the array (${inputRank}).`);\n util.assert(inputRank === size.length, () => `Error in slice${inputRank}D: Length of size ${size} must ` +\n `match the rank of the array (${inputRank}).`);\n for (let i = 0; i < inputRank; ++i) {\n util.assert(begin[i] + size[i] <= input.shape[i], () => `Error in slice${inputRank}D: begin[${i}] + size[${i}] ` +\n `(${begin[i] + size[i]}) would overflow input.shape[${i}] (${input.shape[i]})`);\n }\n}\n/** Converts a binary mask to an array of axes. Used in stridedSlice(). */\nexport function maskToAxes(mask) {\n const axes = [];\n let axis = 0;\n while (mask > 0) {\n if (mask & 1) {\n axes.push(axis);\n }\n mask /= 2;\n axis++;\n }\n return axes;\n}\n/** Computes the output shape given the strided slice params. */\nexport function computeOutShape(begin, end, strides) {\n const size = [];\n for (let axis = 0; axis < begin.length; axis++) {\n size[axis] = Math.ceil((end[axis] - begin[axis]) / strides[axis]);\n }\n return size;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stride value. Otherwise, insert.\nexport function stridesWithElidedDims(strides, ellipsisInsertionIndex, numElidedAxes, inputShape) {\n const newStrides = [...strides];\n for (let i = newStrides.length; i < inputShape.length; i++) {\n newStrides.push(1);\n }\n for (let i = 0; i < numElidedAxes; i++) {\n if (i === 0) {\n newStrides[ellipsisInsertionIndex] = 1;\n }\n else {\n newStrides.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 1 /* element to add */);\n newStrides.pop();\n }\n }\n return newStrides;\n}\nfunction unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, normalizedAxis) {\n if (normalizedAxis <= ellipsisInsertionIndex) {\n return normalizedAxis;\n }\n return normalizedAxis - (numElidedAxes - 1);\n}\nfunction getElidedAxes(numElidedAxes, ellipsisInsertionIndex) {\n const elidedAxes = [];\n for (let i = 0; i < numElidedAxes; i++) {\n elidedAxes.push(ellipsisInsertionIndex + i);\n }\n return elidedAxes;\n}\n// Normalize the start, end and strides.\nexport function getNormalizedAxes(inputShape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask) {\n const inputRank = inputShape.length;\n let normalizedBegin = new Array(inputRank), normalizedEnd = new Array(inputRank), normalizedStrides = new Array(inputRank);\n if (ellipsisAxes.length && numInterpolatedAxes > 0) {\n const fullIndex = ellipsisAxes[0];\n // The ellipsis applies to the masked index as well as any dimensions\n // that are interpolated.\n const numElidedAxes = numInterpolatedAxes + 1;\n normalizedBegin = startIndicesWithElidedDims(beginMask, fullIndex, numElidedAxes, begin, inputShape);\n normalizedEnd = stopIndicesWithElidedDims(endMask, fullIndex, numElidedAxes, end, inputShape);\n normalizedStrides =\n stridesWithElidedDims(strides, fullIndex, numElidedAxes, inputShape);\n }\n else {\n for (let axis = 0; axis < inputRank; axis++) {\n normalizedBegin[axis] = startForAxis(beginMask, begin, strides, inputShape, axis, ellipsisMask);\n normalizedEnd[axis] =\n stopForAxis(endMask, end, strides, inputShape, axis, ellipsisMask);\n normalizedStrides[axis] = stridesForAxis(strides, axis, ellipsisMask);\n }\n }\n return {\n begin: normalizedBegin,\n end: normalizedEnd,\n strides: normalizedStrides\n };\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current start value. Otherwise, insert.\nexport function startIndicesWithElidedDims(beginMask, ellipsisInsertionIndex, numElidedAxes, originalBegin, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = 0;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalBegin[originalAxis];\n if (beginMask & 1 << originalAxis) {\n originalValue = 0;\n }\n newIndices[axis] = originalValue;\n }\n }\n return newIndices;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stop value. Otherwise, insert.\nexport function stopIndicesWithElidedDims(endMask, ellipsisInsertionIndex, numElidedAxes, originalEnd, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = Number.MAX_SAFE_INTEGER;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalEnd[originalAxis];\n if (endMask & 1 << originalAxis) {\n originalValue = Number.MAX_SAFE_INTEGER;\n }\n newIndices[axis] = originalValue;\n }\n }\n for (let i = 0; i < newIndices.length; i++) {\n // Handle negative indices\n const axisSize = inputShape[i];\n if (newIndices[i] < 0) {\n newIndices[i] += axisSize;\n }\n newIndices[i] = util.clamp(0, newIndices[i], inputShape[i]);\n }\n return newIndices;\n}\nexport function stridesForAxis(strides, axis, ellipsisMask) {\n let stride = strides[axis];\n if (ellipsisMask & (1 << axis) || stride == null) {\n stride = 1;\n }\n return stride;\n}\nexport function startForAxis(beginMask, startIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let start = startIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or the begin index is not set\n // for the axis.\n if (beginMask & 1 << axis || ellipsisMask & 1 << axis || start == null) {\n if (stride > 0) {\n // Forward iteration - use the first element. These values will get\n // clamped below (Note: We could have set them to 0 and axis_size-1, but\n // use lowest() and max() to maintain symmetry with StopForAxis())\n start = Number.MIN_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the last element.\n start = Number.MAX_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (start < 0) {\n start += axisSize;\n }\n // Clamping\n start = util.clamp(0, start, axisSize - 1);\n return start;\n}\nexport function stopForAxis(endMask, stopIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let stop = stopIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or if the stop index is not\n // set for this axis.\n if (endMask & (1 << axis) || ellipsisMask & (1 << axis) || stop == null) {\n if (stride > 0) {\n // Forward iteration - use the last element. These values will get\n // clamped below\n stop = Number.MAX_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the first element.\n stop = Number.MIN_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (stop < 0) {\n stop += axisSize;\n }\n // Clamping\n // Because the end index points one past the last element, we need slightly\n // different clamping ranges depending on the direction.\n if (stride > 0) {\n // Forward iteration\n stop = util.clamp(0, stop, axisSize);\n }\n else {\n // Backward iteration\n stop = util.clamp(-1, stop, axisSize - 1);\n }\n return stop;\n}\n/**\n * Returns true if the slice occupies a continous set of elements in the\n * 'flat' space.\n */\nexport function isSliceContinous(shape, begin, size) {\n // Index of the first axis that has size > 1.\n let firstNonOneAxis = size.length;\n for (let i = 0; i < size.length; i++) {\n if (size[i] > 1) {\n firstNonOneAxis = i;\n break;\n }\n }\n for (let i = firstNonOneAxis + 1; i < size.length; i++) {\n if (begin[i] > 0 || size[i] !== shape[i]) {\n return false;\n }\n }\n return true;\n}\nexport function computeFlatOffset(begin, strides) {\n let flatOffset = begin.length > 0 ? begin[begin.length - 1] : 1;\n for (let i = 0; i < begin.length - 1; i++) {\n flatOffset += begin[i] * strides[i];\n }\n return flatOffset;\n}\nexport function parseSliceParams(x, begin, size) {\n // The following logic allows for more ergonomic calls.\n let begin_;\n const xRank = x.shape.length;\n if (typeof begin === 'number') {\n begin_ = [begin, ...new Array(xRank - 1).fill(0)];\n }\n else if (begin.length < xRank) {\n begin_ = begin.concat(new Array(xRank - begin.length).fill(0));\n }\n else {\n begin_ = begin.slice();\n }\n begin_.forEach(d => {\n util.assert(d !== -1, () => 'slice() does not support negative begin indexing.');\n });\n let size_;\n if (size == null) {\n size_ = new Array(xRank).fill(-1);\n }\n else if (typeof size === 'number') {\n size_ = [size, ...new Array(xRank - 1).fill(-1)];\n }\n else if (size.length < xRank) {\n size_ = size.concat(new Array(xRank - size.length).fill(-1));\n }\n else {\n size_ = size;\n }\n size_ = size_.map((d, i) => {\n if (d >= 0) {\n return d;\n }\n else {\n util.assert(d === -1, () => `Negative size values should be exactly -1 but got ` +\n `${d} for the slice() size at index ${i}.`);\n return x.shape[i] - begin_[i];\n }\n });\n return [begin_, size_];\n}\n//# sourceMappingURL=slice_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from './util';\n/**\n * Serializable defines the serialization contract.\n *\n * TFJS requires serializable classes to return their className when asked\n * to avoid issues with minification.\n */\nexport class Serializable {\n /**\n * Return the class name for this class to use in serialization contexts.\n *\n * Generally speaking this will be the same thing that constructor.name\n * would have returned. However, the class name needs to be robust\n * against minification for serialization/deserialization to work properly.\n *\n * There's also places such as initializers.VarianceScaling, where\n * implementation details between different languages led to different\n * class hierarchies and a non-leaf node is used for serialization purposes.\n */\n getClassName() {\n return this.constructor\n .className;\n }\n /**\n * Creates an instance of T from a ConfigDict.\n *\n * This works for most descendants of serializable. A few need to\n * provide special handling.\n * @param cls A Constructor for the class to instantiate.\n * @param config The Configuration for the object.\n */\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/**\n * Maps string keys to class constructors.\n *\n * Used during (de)serialization from the cross-language JSON format, which\n * requires the class name in the serialization format matches the class\n * names as used in Python, should it exist.\n */\nexport class SerializationMap {\n constructor() {\n this.classNameMap = {};\n }\n /**\n * Returns the singleton instance of the map.\n */\n static getMap() {\n if (SerializationMap.instance == null) {\n SerializationMap.instance = new SerializationMap();\n }\n return SerializationMap.instance;\n }\n /**\n * Registers the class as serializable.\n */\n static register(cls) {\n SerializationMap.getMap().classNameMap[cls.className] =\n [cls, cls.fromConfig];\n }\n}\n/**\n * Register a class with the serialization map of TensorFlow.js.\n *\n * This is often used for registering custom Layers, so they can be\n * serialized and deserialized.\n *\n * Example:\n *\n * ```js\n * class MyCustomLayer extends tf.layers.Layer {\n * static className = 'MyCustomLayer';\n *\n * constructor(config) {\n * super(config);\n * }\n * }\n * tf.serialization.registerClass(MyCustomLayer);\n * ```\n *\n * @param cls The class to be registered. It must have a public static member\n * called `className` defined and the value must be a non-empty string.\n *\n * @doc {heading: 'Models', subheading: 'Serialization', ignoreCI: true}\n */\nexport function registerClass(cls) {\n assert(cls.className != null, () => `Class being registered does not have the static className ` +\n `property defined.`);\n assert(typeof cls.className === 'string', () => `className is required to be a string, but got type ` +\n typeof cls.className);\n assert(cls.className.length > 0, () => `Class being registered has an empty-string as its className, ` +\n `which is disallowed.`);\n SerializationMap.register(cls);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { inferShape } from './tensor_util_env';\nimport { arraysEqual, flatten, isString, isTypedArray } from './util';\nconst TEST_EPSILON_FLOAT32 = 1e-3;\nexport const TEST_EPSILON_FLOAT16 = 1e-1;\nexport function expectArraysClose(actual, expected, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, epsilon));\n}\nexport function testEpsilon() {\n return ENGINE.backend.floatPrecision() === 32 ? TEST_EPSILON_FLOAT32 :\n TEST_EPSILON_FLOAT16;\n}\nfunction expectArraysPredicate(actual, expected, predicate) {\n let checkClassType = true;\n if (isTypedArray(actual) || isTypedArray(expected)) {\n checkClassType = false;\n }\n if (isTypedArray(actual) && isTypedArray(expected)) {\n checkClassType = true;\n }\n if (checkClassType) {\n const aType = actual.constructor.name;\n const bType = expected.constructor.name;\n if (aType !== bType) {\n throw new Error(`Arrays are of different type. Actual: ${aType}. ` +\n `Expected: ${bType}`);\n }\n }\n if (Array.isArray(actual) && Array.isArray(expected)) {\n const actualShape = inferShape(actual);\n const expectedShape = inferShape(expected);\n if (!arraysEqual(actualShape, expectedShape)) {\n throw new Error(`Arrays have different shapes. ` +\n `Actual: [${actualShape}]. Expected: [${expectedShape}]`);\n }\n }\n const actualFlat = isTypedArray(actual) ? actual : flatten(actual);\n const expectedFlat = isTypedArray(expected) ?\n expected :\n flatten(expected);\n if (actualFlat.length !== expectedFlat.length) {\n throw new Error(`Arrays have different lengths actual: ${actualFlat.length} vs ` +\n `expected: ${expectedFlat.length}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n for (let i = 0; i < expectedFlat.length; ++i) {\n const a = actualFlat[i];\n const e = expectedFlat[i];\n if (!predicate(a, e)) {\n throw new Error(`Arrays differ: actual[${i}] = ${a}, expected[${i}] = ${e}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n }\n}\nexport function expectPromiseToFail(fn, done) {\n fn().then(() => done.fail(), () => done());\n}\nexport function expectArraysEqual(actual, expected) {\n const exp = typeof expected === 'string' || typeof expected === 'number' ||\n typeof expected === 'boolean' ?\n [expected] :\n expected;\n if (isString(actual) || isString(actual[0]) ||\n isString(expected) || isString(expected[0])) {\n // tslint:disable-next-line: triple-equals\n return expectArraysPredicate(actual, exp, (a, b) => a == b);\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, 0));\n}\nexport function expectNumbersClose(a, e, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n if (!areClose(a, e, epsilon)) {\n throw new Error(`Numbers differ: actual === ${a}, expected === ${e}`);\n }\n}\nfunction areClose(a, e, epsilon) {\n if (!isFinite(a) && !isFinite(e)) {\n return true;\n }\n if (isNaN(a) || isNaN(e) || Math.abs(a - e) > epsilon) {\n return false;\n }\n return true;\n}\nexport function expectValuesInRange(actual, low, high) {\n for (let i = 0; i < actual.length; i++) {\n if (actual[i] < low || actual[i] > high) {\n throw new Error(`Value out of range:${actual[i]} low: ${low}, high: ${high}`);\n }\n }\n}\nexport function expectArrayBuffersEqual(actual, expected) {\n // Safari & Jasmine don't like comparing ArrayBuffers directly. Wrapping in\n // a Float32Array solves this issue.\n expect(new Float32Array(actual)).toEqual(new Float32Array(expected));\n}\n//# sourceMappingURL=test_util.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { setDeprecationWarningFn } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\n/**\n * Enables production mode which disables correctness checks in favor of\n * performance.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableProdMode() {\n env().set('PROD', true);\n}\n/**\n * Enables debug mode which will log information about all executed kernels:\n * the elapsed time of the kernel execution, as well as the rank, shape, and\n * size of the output tensor.\n *\n * Debug mode will significantly slow down your application as it will\n * download the result of every operation to the CPU. This should not be used in\n * production. Debug mode does not affect the timing information of the kernel\n * execution as we do not measure download time in the kernel execution time.\n *\n * See also: `tf.profile`, `tf.memory`.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableDebugMode() {\n env().set('DEBUG', true);\n}\n/** Globally disables deprecation warnings */\nexport function disableDeprecationWarnings() {\n env().set('DEPRECATION_WARNINGS_ENABLED', false);\n console.warn(`TensorFlow.js deprecation warnings have been disabled.`);\n}\n/** Warn users about deprecated functionality. */\nexport function deprecationWarn(msg) {\n if (env().getBool('DEPRECATION_WARNINGS_ENABLED')) {\n console.warn(msg + ' You can disable deprecation warnings with ' +\n 'tf.disableDeprecationWarnings().');\n }\n}\nsetDeprecationWarningFn(deprecationWarn);\n/**\n * Dispose all variables kept in backend engine.\n *\n * @doc {heading: 'Environment'}\n */\nexport function disposeVariables() {\n ENGINE.disposeVariables();\n}\n/**\n * It returns the global engine that keeps track of all tensors and backends.\n *\n * @doc {heading: 'Environment'}\n */\nexport function engine() {\n return ENGINE;\n}\n/**\n * Returns memory info at the current time in the program. The result is an\n * object with the following properties:\n *\n * - `numBytes`: Number of bytes allocated (undisposed) at this time.\n * - `numTensors`: Number of unique tensors allocated.\n * - `numDataBuffers`: Number of unique data buffers allocated\n * (undisposed) at this time, which is \u2264 the number of tensors\n * (e.g. `a.reshape(newShape)` makes a new Tensor that shares the same\n * data buffer with `a`).\n * - `unreliable`: True if the memory usage is unreliable. See `reasons` when\n * `unreliable` is true.\n * - `reasons`: `string[]`, reasons why the memory is unreliable, present if\n * `unreliable` is true.\n *\n * WebGL Properties:\n * - `numBytesInGPU`: Number of bytes allocated (undisposed) in the GPU only at\n * this time.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function memory() {\n return ENGINE.memory();\n}\n/**\n * Executes the provided function `f()` and returns a promise that resolves\n * with information about the function's memory use:\n * - `newBytes`: the number of new bytes allocated\n * - `newTensors`: the number of new tensors created\n * - `peakBytes`: the peak number of bytes allocated\n * - `kernels`: an array of objects for each kernel involved that reports\n * their input and output shapes, number of bytes used, and number of new\n * tensors created.\n *\n * ```js\n * const profile = await tf.profile(() => {\n * const x = tf.tensor1d([1, 2, 3]);\n * let x2 = x.square();\n * x2.dispose();\n * x2 = x.square();\n * x2.dispose();\n * return x;\n * });\n *\n * console.log(`newBytes: ${profile.newBytes}`);\n * console.log(`newTensors: ${profile.newTensors}`);\n * console.log(`byte usage over all kernels: ${profile.kernels.map(k =>\n * k.totalBytesSnapshot)}`);\n * ```\n *\n *\n * @doc {heading: 'Performance', subheading: 'Profile'}\n */\nexport function profile(f) {\n return ENGINE.profile(f);\n}\n/**\n * Executes the provided function `fn` and after it is executed, cleans up all\n * intermediate tensors allocated by `fn` except those returned by `fn`.\n * `fn` must not return a Promise (async functions not allowed). The returned\n * result can be a complex object.\n *\n * Using this method helps avoid memory leaks. In general, wrap calls to\n * operations in `tf.tidy` for automatic memory cleanup.\n *\n * NOTE: Variables do *not* get cleaned up when inside a tidy(). If you want to\n * dispose variables, please use `tf.disposeVariables` or call dispose()\n * directly on variables.\n *\n * ```js\n * // y = 2 ^ 2 + 1\n * const y = tf.tidy(() => {\n * // a, b, and one will be cleaned up when the tidy ends.\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n * const b = a.square();\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * y.print();\n * ```\n *\n * @param nameOrFn The name of the closure, or the function to execute.\n * If a name is provided, the 2nd argument should be the function.\n * If debug mode is on, the timing and the memory usage of the function\n * will be tracked and displayed on the console using the provided name.\n * @param fn The function to execute.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function tidy(nameOrFn, fn) {\n return ENGINE.tidy(nameOrFn, fn);\n}\n/**\n * Disposes any `tf.Tensor`s found within the provided object.\n *\n * @param container an object that may be a `tf.Tensor` or may directly\n * contain `tf.Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. If\n * the object is not a `tf.Tensor` or does not contain `Tensors`, nothing\n * happens. In general it is safe to pass any object here, except that\n * `Promise`s are not supported.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function dispose(container) {\n const tensors = getTensorsInContainer(container);\n tensors.forEach(tensor => tensor.dispose());\n}\n/**\n * Keeps a `tf.Tensor` generated inside a `tf.tidy` from being disposed\n * automatically.\n *\n * ```js\n * let b;\n * const y = tf.tidy(() => {\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n *\n * // b will not be cleaned up by the tidy. a and one will be cleaned up\n * // when the tidy ends.\n * b = tf.keep(a.square());\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * console.log('y:');\n * y.print();\n * console.log('b:');\n * b.print();\n * ```\n *\n * @param result The tensor to keep from being disposed.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function keep(result) {\n return ENGINE.keep(result);\n}\n/**\n * Executes `f()` and returns a promise that resolves with timing\n * information.\n *\n * The result is an object with the following properties:\n *\n * - `wallMs`: Wall execution time.\n * - `kernelMs`: Kernel execution time, ignoring data transfer. If using the\n * WebGL backend and the query timer extension is not available, this will\n * return an error object.\n * - On `WebGL` The following additional properties exist:\n * - `uploadWaitMs`: CPU blocking time on texture uploads.\n * - `downloadWaitMs`: CPU blocking time on texture downloads (readPixels).\n *\n * ```js\n * const x = tf.randomNormal([20, 20]);\n * const time = await tf.time(() => x.matMul(x));\n *\n * console.log(`kernelMs: ${time.kernelMs}, wallTimeMs: ${time.wallMs}`);\n * ```\n *\n * @param f The function to execute and time.\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nexport function time(f) {\n return ENGINE.time(f);\n}\n/**\n * Sets the backend (cpu, webgl, wasm, etc) responsible for creating tensors and\n * executing operations on those tensors. Returns a promise that resolves\n * to a boolean if the backend initialization was successful.\n *\n * Note this disposes the current backend, if any, as well as any tensors\n * associated with it. A new backend is initialized, even if it is of the\n * same type as the previous one.\n *\n * @param backendName The name of the backend. Currently supports\n * `'webgl'|'cpu'` in the browser, `'tensorflow'` under node.js\n * (requires tfjs-node), and `'wasm'` (requires tfjs-backend-wasm).\n *\n * @doc {heading: 'Backends'}\n */\nexport function setBackend(backendName) {\n return ENGINE.setBackend(backendName);\n}\n/**\n * Returns a promise that resolves when the currently selected backend (or the\n * highest priority one) has initialized. Await this promise when you are using\n * a backend that has async initialization.\n *\n * @doc {heading: 'Backends'}\n */\nexport function ready() {\n return ENGINE.ready();\n}\n/**\n * Returns the current backend name (cpu, webgl, etc). The backend is\n * responsible for creating tensors and executing operations on those tensors.\n *\n * @doc {heading: 'Backends'}\n */\nexport function getBackend() {\n return ENGINE.backendName;\n}\n/**\n * Removes a backend and the registered factory.\n *\n * @doc {heading: 'Backends'}\n */\nexport function removeBackend(name) {\n ENGINE.removeBackend(name);\n}\n/**\n * Finds the backend registered under the provided name. Returns null if the\n * name is not in the registry, or the registration hasn't finished yet.\n */\nexport function findBackend(name) {\n return ENGINE.findBackend(name);\n}\n/**\n * Finds the backend factory registered under the provided name. Returns a\n * function that produces a new backend when called. Returns null if the name\n * is not in the registry.\n */\nexport function findBackendFactory(name) {\n return ENGINE.findBackendFactory(name);\n}\n/**\n * Registers a global backend. The registration should happen when importing\n * a module file (e.g. when importing `backend_webgl.ts`), and is used for\n * modular builds (e.g. custom tfjs bundle with only webgl support).\n *\n * @param factory The backend factory function. When called, it should\n * return a backend instance, or a promise of an instance.\n * @param priority The priority of the backend (higher = more important).\n * In case multiple backends are registered, the priority is used to find\n * the best backend. Defaults to 1.\n * @return False if there is already a registered backend under this name, true\n * if not.\n *\n * @doc {heading: 'Backends'}\n */\nexport function registerBackend(name, factory, priority = 1) {\n return ENGINE.registerBackend(name, factory, priority);\n}\n/**\n * Gets the current backend. If no backends have been initialized, this will\n * attempt to initialize the best backend. Will throw an error if the highest\n * priority backend has async initialization, in which case, you should call\n * 'await tf.ready()' before running other code.\n *\n * @doc {heading: 'Backends'}\n */\nexport function backend() {\n return ENGINE.backend;\n}\n/**\n * Sets the global platform.\n *\n * @param platformName The name of this platform.\n * @param platform A platform implementation.\n */\nexport function setPlatform(platformName, platform) {\n env().setPlatform(platformName, platform);\n}\n//# sourceMappingURL=globals.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Add } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Adds two `tf.Tensor`s element-wise, A + B. Supports broadcasting.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n *\n * ```js\n * // Broadcast add a with b.\n * const a = tf.scalar(5);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n * @param a The first `tf.Tensor` to add.\n * @param b The second `tf.Tensor` to add. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction add_(a, b) {\n let $a = convertToTensor(a, 'a', 'add');\n let $b = convertToTensor(b, 'b', 'add');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.add($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Add);\n}\nexport const add = op({ add_ });\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FloorDiv } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n * The result is rounded with floor function.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.floorDiv(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.floorDiv(b).print(); // or tf.floorDiv(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction floorDiv_(a, b) {\n let $a = convertToTensor(a, 'a', 'floorDiv');\n let $b = convertToTensor(b, 'b', 'floorDiv');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.floorDiv($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FloorDiv);\n}\nexport const floorDiv = op({ floorDiv_ });\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Div } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { floorDiv } from './floorDiv';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction div_(a, b) {\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'int32' && $b.dtype === 'int32') {\n return floorDiv($a, $b);\n }\n const forward = (backend, save) => {\n const res = backend.realDivide($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Div, attrs);\n}\nexport const div = op({ div_ });\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Multiply } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Multiplies two `tf.Tensor`s element-wise, A * B. Supports broadcasting.\n *\n * We also expose `tf.mulStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([2, 3, 4, 5]);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n *\n * ```js\n * // Broadcast mul a with b.\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.scalar(5);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n * @param a The first tensor to multiply.\n * @param b The second tensor to multiply. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mul_(a, b) {\n let $a = convertToTensor(a, 'a', 'mul');\n let $b = convertToTensor(b, 'b', 'mul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.multiply($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Multiply);\n}\nexport const mul = op({ mul_ });\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Abs } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes absolute value element-wise: `abs(x)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.abs().print(); // or tf.abs(x)\n * ```\n * @param x The input `tf.Tensor`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction abs_(x) {\n const $x = convertToTensor(x, 'x', 'abs');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n if ($x.dtype === 'complex64') {\n return backend.complexAbs($x);\n }\n return backend.abs($x);\n }, inputs, null /* grad */, Abs);\n}\nexport const abs = op({ abs_ });\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes acos of the input `tf.Tensor` element-wise: `acos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.acos().print(); // or tf.acos(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acos_(x) {\n const $x = convertToTensor(x, 'x', 'acos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acos);\n}\nexport const acos = op({ acos_ });\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the inverse hyperbolic cos of the input `tf.Tensor` element-wise:\n * `acosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([10, 1, 3, 5.7]);\n *\n * x.acosh().print(); // or tf.acosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acosh_(x) {\n const $x = convertToTensor(x, 'x', 'acosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acosh);\n}\nexport const acosh = op({ acosh_ });\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AddN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Adds a list of `tf.Tensor`s element-wise, each with the same shape and dtype.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n *\n * tf.addN([a, b, c]).print();\n * ```\n * @param tensors A list of tensors with the same shape and dtype.\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction addN_(tensors) {\n util.assert(Array.isArray(tensors), () => 'The argument passed to tf.addN() must be a list of tensors');\n util.assert(tensors.length >= 1, () => `Must pass at least one tensor to tf.addN(), but got ` +\n `${tensors.length}`);\n const $tensors = tensors.map((t, i) => convertToTensor(t, `tensors${i}`, 'addN'));\n const firstTensor = $tensors[0];\n $tensors.forEach(t => {\n if (t.dtype !== firstTensor.dtype) {\n throw new Error('All tensors passed to tf.addN() must have the same dtype');\n }\n });\n $tensors.forEach(t => {\n if (!util.arraysEqual(t.shape, firstTensor.shape)) {\n throw new Error('All tensors passed to tf.addN() must have the same shape');\n }\n });\n const forward = (backend, save) => {\n const res = backend.addN($tensors);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, AddN);\n}\nexport const addN = op({ addN_ });\n//# sourceMappingURL=add_n.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Returns true if the axis specifies the inner most dimensions of the\n * array.\n */\nexport function axesAreInnerMostDims(axes, rank) {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n}\nexport function combineLocations(outputLoc, reduceLoc, axes) {\n const rank = outputLoc.length + reduceLoc.length;\n const loc = [];\n let outIdx = 0;\n let reduceIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n loc.push(outputLoc[outIdx++]);\n }\n else {\n loc.push(reduceLoc[reduceIdx++]);\n }\n }\n return loc;\n}\nexport function computeOutAndReduceShapes(aShape, axes) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outShape.push(aShape[dim]);\n }\n }\n const reduceShape = axes.map(dim => aShape[dim]);\n return [outShape, reduceShape];\n}\nexport function expandShapeToKeepDim(shape, axes) {\n const reduceSubShape = axes.map(x => 1);\n return combineLocations(shape, reduceSubShape, axes);\n}\nexport function assertAxesAreInnerMostDims(msg, axes, rank) {\n util.assert(axesAreInnerMostDims(axes, rank), () => `${msg} supports only inner-most axes for now. ` +\n `Got axes ${axes} and rank-${rank} input.`);\n}\n/**\n * Returns the axes permutation to be used with `tf.transpose`, if such\n * permutation is necessary. Otherwise it returns null. This method is used by\n * operations that operate only on inner-most axes.\n */\nexport function getAxesPermutation(axes, rank) {\n if (axesAreInnerMostDims(axes, rank)) {\n return null;\n }\n const result = [];\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n result.push(i);\n }\n }\n axes.forEach(axis => result.push(axis));\n return result;\n}\n/** Returns the axes permutation that undoes the original permutation. */\nexport function getUndoAxesPermutation(axes) {\n return axes.map((axis, i) => [i, axis])\n .sort((a, b) => a[1] - b[1])\n .map(x => x[0]);\n}\nexport function getInnerMostAxes(numAxes, rank) {\n const res = [];\n for (let i = rank - numAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n}\n//# sourceMappingURL=axis_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { All } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical and of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.all().print(); // or tf.all(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.all(axis).print(); // or tf.all(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction all_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'all', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.all($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, All, attrs);\n}\nexport const all = op({ all_ });\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Any } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical or of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.any().print(); // or tf.any(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.any(axis).print(); // or tf.any(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction any_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'any', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.any($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Any, attrs);\n}\n// tslint:disable-next-line:variable-name\nexport const any = op({ any_ });\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the maximum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMax().print(); // or tf.argMax(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMax(axis).print(); // or tf.argMax(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMax_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMax');\n const forward = (backend, save) => {\n save([$x]);\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMax($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMax, attrs);\n}\nexport const argMax = op({ argMax_ });\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the minimum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMin().print(); // or tf.argMin(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMin(axis).print(); // or tf.argMin(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMin_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMin');\n const forward = (backend, save) => {\n save([$x]);\n if (axis == null) {\n axis = 0;\n }\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMin($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMin, attrs);\n}\nexport const argMin = op({ argMin_ });\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes asin of the input `tf.Tensor` element-wise: `asin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asin().print(); // or tf.asin(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asin_(x) {\n const $x = convertToTensor(x, 'x', 'asin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asin);\n}\nexport const asin = op({ asin_ });\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic sin of the input `tf.Tensor` element-wise:\n * `asinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asinh().print(); // or tf.asinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asinh_(x) {\n const $x = convertToTensor(x, 'x', 'asinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asinh);\n}\nexport const asinh = op({ asinh_ });\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes atan of the input `tf.Tensor` element-wise: `atan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.atan().print(); // or tf.atan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan_(x) {\n const $x = convertToTensor(x, 'x', 'atan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atan);\n}\nexport const atan = op({ atan_ });\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan2 } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes arctangent of `tf.Tensor`s a / b element-wise: `atan2(a, b)`.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1.0, 1.0, -1.0, .7]);\n * const b = tf.tensor1d([2.0, 13.0, 3.5, .21]);\n *\n * tf.atan2(a, b).print()\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan2_(a, b) {\n let $a = convertToTensor(a, 'a', 'atan2');\n let $b = convertToTensor(b, 'b', 'atan2');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.atan2($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Atan2);\n}\nexport const atan2 = op({ atan2_ });\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic tan of the input `tf.Tensor` element-wise:\n * `atanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.atanh().print(); // or tf.atanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atanh_(x) {\n const $x = convertToTensor(x, 'x', 'atanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atanh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atanh);\n}\nexport const atanh = op({ atanh_ });\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n *\n * @param inputShape Input tensor shape is of the following dimensions:\n * `[batch, height, width, inChannels]`.\n * @param filterShape The filter shape is of the following dimensions:\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat The data format of the input and output data.\n * Defaults to 'NHWC'.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`.\n * Defaults to `[1, 1]`. If `dilations` is a single number, then\n * `dilationHeight == dilationWidth`.\n */\nexport function computeDilation2DInfo(inputShape, filterShape, strides, pad, dataFormat = 'NHWC', dilations) {\n // `computerConv2DInfo` require filterShape to be in the dimension of:\n // `[filterHeight, filterWidth, depth, outDepth]`, dilation2d doesn't have\n // outDepth, it should have the same depth as the input.\n // Input shape: [batch, height, width, inChannels]\n const inputChannels = inputShape[3];\n const $filterShape = [...filterShape, inputChannels];\n const $dataFormat = convertConv2DDataFormat(dataFormat);\n return computeConv2DInfo(inputShape, $filterShape, strides, dilations, pad, null /* roundingMode */, null /* depthWise */, $dataFormat);\n}\nexport function computePool2DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'channelsLast') {\n const [filterHeight, filterWidth] = parseTupleParam(filterSize);\n let filterShape;\n if (dataFormat === 'channelsLast') {\n filterShape = [filterHeight, filterWidth, inShape[3], inShape[3]];\n }\n else if (dataFormat === 'channelsFirst') {\n filterShape = [filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, false, dataFormat);\n}\n/**\n * Computes the information for a forward pass of a pooling3D operation.\n */\nexport function computePool3DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'NDHWC') {\n const [filterDepth, filterHeight, filterWidth] = parse3TupleParam(filterSize);\n let filterShape;\n let $dataFormat;\n if (dataFormat === 'NDHWC') {\n $dataFormat = 'channelsLast';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[4], inShape[4]];\n }\n else if (dataFormat === 'NCDHW') {\n $dataFormat = 'channelsFirst';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv3DInfo(inShape, filterShape, strides, dilations, pad, false, $dataFormat, roundingMode);\n}\n/**\n * Computes the information for a forward pass of a convolution/pooling\n * operation.\n */\nexport function computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, depthwise = false, dataFormat = 'channelsLast') {\n let [batchSize, inHeight, inWidth, inChannels] = [-1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideHeight, strideWidth] = parseTupleParam(strides);\n const [dilationHeight, dilationWidth] = parseTupleParam(dilations);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outHeight, outWidth } = getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, effectiveFilterHeight, effectiveFilterWidth, roundingMode, dataFormat);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inHeight,\n inWidth,\n inChannels,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideHeight,\n strideWidth,\n filterHeight,\n filterWidth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\n/**\n * Computes the information for a forward pass of a 3D convolution/pooling\n * operation.\n */\nexport function computeConv3DInfo(inShape, filterShape, strides, dilations, pad, depthwise = false, dataFormat = 'channelsLast', roundingMode) {\n let [batchSize, inDepth, inHeight, inWidth, inChannels] = [-1, -1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterDepth, filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outDepth, outHeight, outWidth } = get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth, effectiveFilterHeight, effectiveFilterWidth, roundingMode);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\nfunction computeOutputShape2D(inShape, fieldSize, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputRows = inShape[0];\n const inputCols = inShape[1];\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputRows, outputCols];\n}\nfunction computeOutputShape4D(inShape, fieldSize, outChannels, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputDepth = inShape[0];\n const inputRows = inShape[1];\n const inputCols = inShape[2];\n const outputDepths = conditionalRound((inputDepth - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputDepths), () => `The output # of depths (${outputDepths}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputDepths, outputRows, outputCols, outChannels];\n}\nexport function computeDefaultPad(inputShape, fieldSize, stride, dilation = 1) {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n}\nfunction parseTupleParam(param) {\n if (typeof param === 'number') {\n return [param, param, param];\n }\n if (param.length === 2) {\n return [param[0], param[1], 1];\n }\n return param;\n}\nfunction parse3TupleParam(param) {\n return typeof param === 'number' ? [param, param, param] : param;\n}\n/* See https://www.tensorflow.org/api_docs/python/tf/nn/atrous_conv2d\n * Atrous convolution is equivalent to standard convolution with upsampled\n * filters with effective_filter_height =\n * filter_height + (filter_height - 1) * (dilation - 1)\n * and effective_filter_width =\n * filter_width + (filter_width - 1) * (dilation - 1),\n * produced by inserting dilation - 1 zeros along consecutive elements across\n * the filters' spatial dimensions.\n * When there is a dilation, this converts a filter dimension to the\n * effective filter dimension, so it can be used in a standard convolution.\n */\nfunction getEffectiveFilterSize(filterSize, dilation) {\n if (dilation <= 1) {\n return filterSize;\n }\n return filterSize + (filterSize - 1) * (dilation - 1);\n}\nfunction getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, filterHeight, filterWidth, roundingMode, dataFormat) {\n let padInfo;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = { top: pad, bottom: pad, left: pad, right: pad, type: padType };\n const outShape = computeOutputShape2D([inHeight, inWidth], filterHeight, strideHeight, pad, roundingMode);\n outHeight = outShape[0];\n outWidth = outShape[1];\n }\n else if (pad === 'same') {\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongHeight = Math.max(0, (outHeight - 1) * strideHeight + filterHeight - inHeight);\n const padAlongWidth = Math.max(0, (outWidth - 1) * strideWidth + filterWidth - inWidth);\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = { top: 0, bottom: 0, left: 0, right: 0, type: 'VALID' };\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else if (typeof pad === 'object') {\n const top = dataFormat === 'channelsLast' ? pad[1][0] : pad[2][0];\n const bottom = dataFormat === 'channelsLast' ? pad[1][1] : pad[2][1];\n const left = dataFormat === 'channelsLast' ? pad[2][0] : pad[3][0];\n const right = dataFormat === 'channelsLast' ? pad[2][1] : pad[3][1];\n const padType = (top === 0 && bottom === 0 && left === 0 && right === 0) ?\n 'VALID' :\n 'EXPLICIT';\n padInfo = { top, bottom, left, right, type: padType };\n outHeight = conditionalRound((inHeight - filterHeight + top + bottom) / strideHeight + 1, roundingMode);\n outWidth = conditionalRound((inWidth - filterWidth + left + right) / strideWidth + 1, roundingMode);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outHeight, outWidth };\n}\nfunction get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, filterDepth, filterHeight, filterWidth, roundingMode) {\n let padInfo;\n let outDepth;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = {\n top: pad,\n bottom: pad,\n left: pad,\n right: pad,\n front: pad,\n back: pad,\n type: padType\n };\n const outShape = computeOutputShape4D([inDepth, inHeight, inWidth, 1], filterDepth, 1, strideDepth, pad, roundingMode);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n }\n else if (pad === 'same') {\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, front, back, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = {\n top: 0,\n bottom: 0,\n left: 0,\n right: 0,\n front: 0,\n back: 0,\n type: 'VALID'\n };\n outDepth = Math.ceil((inDepth - filterDepth + 1) / strideDepth);\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outDepth, outHeight, outWidth };\n}\n/**\n * Rounds a value depending on the rounding mode\n * @param value\n * @param roundingMode\n */\nfunction conditionalRound(value, roundingMode) {\n if (!roundingMode) {\n return value;\n }\n switch (roundingMode) {\n case 'round':\n // used for Caffe Conv\n return Math.round(value);\n case 'ceil':\n // used for Caffe Pool\n return Math.ceil(value);\n case 'floor':\n return Math.floor(value);\n default:\n throw new Error(`Unknown roundingMode ${roundingMode}`);\n }\n}\nexport function tupleValuesAreOne(param) {\n const [dimA, dimB, dimC] = parseTupleParam(param);\n return dimA === 1 && dimB === 1 && dimC === 1;\n}\nexport function eitherStridesOrDilationsAreOne(strides, dilations) {\n return tupleValuesAreOne(strides) || tupleValuesAreOne(dilations);\n}\n/**\n * Convert Conv2D dataFormat from 'NHWC'|'NCHW' to\n * 'channelsLast'|'channelsFirst'\n * @param dataFormat in 'NHWC'|'NCHW' mode\n * @return dataFormat in 'channelsLast'|'channelsFirst' mode\n * @throws unknown dataFormat\n */\nexport function convertConv2DDataFormat(dataFormat) {\n if (dataFormat === 'NHWC') {\n return 'channelsLast';\n }\n else if (dataFormat === 'NCHW') {\n return 'channelsFirst';\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n}\n//# sourceMappingURL=conv_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D average pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction avgPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'avgPool', 'float32');\n const dilations = 1;\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in avgPool: x must be rank 4 but got rank ${x4D.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n save([x4D]);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return x4D.clone();\n }\n return backend.avgPool(x4D, convInfo);\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool, attrs);\n res = cast(res, $x.dtype);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPool = op({ avgPool_ });\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { AvgPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D average pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.avgPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates:\n * `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction avgPool3d_(x, filterSize, strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'avgPool3d', 'float32');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in avgPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in avgPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n save([x5D]);\n return backend.avgPool3d(x5D, convInfo);\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3D, attrs);\n res = cast(res, x5D.dtype);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3d = op({ avgPool3d_ });\n//# sourceMappingURL=avg_pool_3d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsConsistent(shapes, axis) {\n const rank = shapes[0].length;\n shapes.forEach((shape, i) => {\n util.assert(shape.length === rank, () => `Error in concat${rank}D: rank of tensors[${i}] must be the same ` +\n `as the rank of the rest (${rank})`);\n });\n util.assert(axis >= 0 && axis < rank, () => `Error in concat${rank}D: axis must be between 0 and ${rank - 1}.`);\n const firstShape = shapes[0];\n shapes.forEach((shape, i) => {\n for (let r = 0; r < rank; r++) {\n util.assert((r === axis) || (shape[r] === firstShape[r]), () => `Error in concat${rank}D: Shape of tensors[${i}] (${shape}) ` +\n `does not match the shape of the rest (${firstShape}) ` +\n `along the non-concatenated axis ${i}.`);\n }\n });\n}\nexport function computeOutShape(shapes, axis) {\n const outputShape = shapes[0].slice();\n for (let i = 1; i < shapes.length; i++) {\n outputShape[axis] += shapes[i][axis];\n }\n return outputShape;\n}\n//# sourceMappingURL=concat_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Concat } from '../kernel_names';\nimport { convertToTensorArray } from '../tensor_util_env';\nimport { assert, parseAxisParam, sizeFromShape } from '../util';\nimport { assertParamsConsistent, computeOutShape } from './concat_util';\nimport { op } from './operation';\nimport { tensor } from './tensor';\n/**\n * Concatenates a list of `tf.Tensor`s along a given axis.\n *\n * The tensors ranks and types must match, and their sizes must match in all\n * dimensions except `axis`.\n *\n * Also available are stricter rank-specific methods that assert that\n * `tensors` are of the given rank:\n * - `tf.concat1d`\n * - `tf.concat2d`\n * - `tf.concat3d`\n * - `tf.concat4d`\n *\n * Except `tf.concat1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * a.concat(b).print(); // or a.concat(b)\n * ```\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.concat([a, b, c]).print();\n * ```\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [10, 20]]);\n * const b = tf.tensor2d([[3, 4], [30, 40]]);\n * const axis = 1;\n * tf.concat([a, b], axis).print();\n * ```\n * @param tensors A list of tensors to concatenate.\n * @param axis The axis to concate along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction concat_(tensors, axis = 0) {\n assert(tensors.length >= 1, () => 'Pass at least one tensor to concat');\n let $tensors = convertToTensorArray(tensors, 'tensors', 'concat');\n if ($tensors[0].dtype === 'complex64') {\n $tensors.forEach(tensor => {\n if (tensor.dtype !== 'complex64') {\n throw new Error(`Cannot concatenate complex64 tensors with a tensor\n with dtype ${tensor.dtype}. `);\n }\n });\n }\n const forward = (backend, save) => {\n const $axis = parseAxisParam(axis, $tensors[0].shape)[0];\n const outShape = computeOutShape($tensors.map(t => t.shape), $axis);\n if (sizeFromShape(outShape) === 0) {\n return tensor([], outShape);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n $tensors = $tensors.filter(t => t.size > 0);\n if ($tensors.length === 1) {\n return $tensors[0];\n }\n const shapes = $tensors.map(t => t.shape);\n assertParamsConsistent(shapes, $axis);\n const res = backend.concat($tensors, $axis);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n const attr = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Concat, attr);\n}\nexport const concat = op({ concat_ });\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sigmoid } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sigmoid element-wise, `1 / (1 + exp(-x))`\n *\n * ```js\n * const x = tf.tensor1d([0, -1, 2, -3]);\n *\n * x.sigmoid().print(); // or tf.sigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'sigmoid');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sigmoid($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Sigmoid);\n}\nexport const sigmoid = op({ sigmoid_ });\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Slice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as slice_util from './slice_util';\n/**\n * Extracts a slice from a `tf.Tensor` starting at coordinates `begin`\n * and is of size `size`.\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `x` is of the given rank:\n * - `tf.slice1d`\n * - `tf.slice2d`\n * - `tf.slice3d`\n * - `tf.slice4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.slice([1], [2]).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * x.slice([1, 0], [1, 2]).print();\n * ```\n * @param x The input `tf.Tensor` to slice from.\n * @param begin The coordinates to start the slice from. The length can be\n * less than the rank of x - the rest of the axes will have implicit 0 as\n * start. Can also be a single number, in which case it specifies the\n * first axis.\n * @param size The size of the slice. The length can be less than the rank of\n * x - the rest of the axes will have implicit -1. A value of -1 requests\n * the rest of the dimensions in the axis. Can also be a single number,\n * in which case it specifies the size of the first axis.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction slice_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice');\n if ($x.rank === 0) {\n throw new Error('Slicing scalar is not possible');\n }\n const forward = (backend, save) => {\n const [begin_, size_] = slice_util.parseSliceParams($x, begin, size);\n slice_util.assertParamsValid($x, begin_, size_);\n save([$x]);\n return backend.slice($x, begin_, size_);\n };\n const inputs = { x: $x };\n const attrs = { begin, size };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Slice, attrs);\n}\nexport const slice = op({ slice_ });\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic tangent of the input `tf.Tensor` element-wise: `tanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, 70]);\n *\n * x.tanh().print(); // or tf.tanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tanh_(x) {\n const $x = convertToTensor(x, 'x', 'tanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.tanh($x);\n save([y]);\n return y;\n }, inputs, null /* grad */, Tanh);\n}\nexport const tanh = op({ tanh_ });\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { add } from './add';\nimport { concat } from './concat';\nimport { matMul } from './mat_mul';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { slice } from './slice';\nimport { tanh } from './tanh';\n/**\n * Computes the next state and output of a BasicLSTMCell.\n *\n * Returns `[newC, newH]`.\n *\n * Derived from tf.contrib.rnn.BasicLSTMCell.\n *\n * @param forgetBias Forget bias for the cell.\n * @param lstmKernel The weights for the cell.\n * @param lstmBias The bias for the cell.\n * @param data The input to the cell.\n * @param c Previous cell state.\n * @param h Previous cell output.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction basicLSTMCell_(forgetBias, lstmKernel, lstmBias, data, c, h) {\n const $forgetBias = convertToTensor(forgetBias, 'forgetBias', 'basicLSTMCell');\n const $lstmKernel = convertToTensor(lstmKernel, 'lstmKernel', 'basicLSTMCell');\n const $lstmBias = convertToTensor(lstmBias, 'lstmBias', 'basicLSTMCell');\n const $data = convertToTensor(data, 'data', 'basicLSTMCell');\n const $c = convertToTensor(c, 'c', 'basicLSTMCell');\n const $h = convertToTensor(h, 'h', 'basicLSTMCell');\n const combined = concat([$data, $h], 1);\n const weighted = matMul(combined, $lstmKernel);\n const res = add(weighted, $lstmBias);\n // i = input_gate, j = new_input, f = forget_gate, o = output_gate\n const batchSize = res.shape[0];\n const sliceCols = res.shape[1] / 4;\n const sliceSize = [batchSize, sliceCols];\n const i = slice(res, [0, 0], sliceSize);\n const j = slice(res, [0, sliceCols], sliceSize);\n const f = slice(res, [0, sliceCols * 2], sliceSize);\n const o = slice(res, [0, sliceCols * 3], sliceSize);\n const newC = add(mul(sigmoid(i), tanh(j)), mul($c, sigmoid(add($forgetBias, f))));\n const newH = mul(tanh(newC), sigmoid(o));\n return [newC, newH];\n}\nexport const basicLSTMCell = op({ basicLSTMCell_ });\n//# sourceMappingURL=basic_lstm_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchToSpaceND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation reshapes the \"batch\" dimension 0 into `M + 1` dimensions of\n * shape `blockShape + [batch]`, interleaves these blocks back into the grid\n * defined by the spatial dimensions `[1, ..., M]`, to obtain a result with\n * the same rank as the input. The spatial dimensions of this intermediate\n * result are then optionally cropped according to `crops` to produce the\n * output. This is the reverse of `tf.spaceToBatchND`. See below for a precise\n * description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]);\n * const blockShape = [2, 2];\n * const crops = [[0, 0], [0, 0]];\n *\n * x.batchToSpaceND(blockShape, crops).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param crops A 2-D array. Must have shape `[M, 2]`, all values must be >= 0.\n * `crops[i] = [cropStart, cropEnd]` specifies the amount to crop from input\n * dimension `i + 1`, which corresponds to spatial dimension `i`. It is required\n * that `cropStart[i] + cropEnd[i] <= blockShape[i] * inputShape[i + 1]`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Reshape `x` to `reshaped` of shape: `[blockShape[0], ...,\n * blockShape[M-1], batch / prod(blockShape), x.shape[1], ...,\n * x.shape[N-1]]`\n *\n * 2. Permute dimensions of `reshaped`to produce `permuted` of shape `[batch /\n * prod(blockShape),x.shape[1], blockShape[0], ..., x.shape[M],\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 3. Reshape `permuted` to produce `reshapedPermuted` of shape `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0], ..., x.shape[M] *\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 4. Crop the start and end of dimensions `[1, ..., M]` of `reshapedPermuted`\n * according to `crops` to produce the output of shape: `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0] - crops[0,0] - crops[0,1],\n * ..., x.shape[M] * blockShape[M-1] - crops[M-1,0] -\n * crops[M-1,1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction batchToSpaceND_(x, blockShape, crops) {\n const $x = convertToTensor(x, 'x', 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank is ${$x.rank} but should be > than blockShape.length ${blockShape.length}`);\n util.assert(crops.length === blockShape.length, () => `crops.length is ${crops.length} but should be equal to blockShape.length ${blockShape.length}`);\n util.assert($x.shape[0] % prod === 0, () => `input tensor batch is ${$x.shape[0]} but is not divisible by the product of ` +\n `the elements of blockShape ${blockShape.join(' * ')} === ${prod}`);\n const forward = backend => {\n return backend.batchToSpaceND($x, blockShape, crops);\n };\n const inputs = { x: $x };\n const attrs = { blockShape, crops };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, BatchToSpaceND, attrs);\n}\nexport const batchToSpaceND = op({ batchToSpaceND_ });\n//# sourceMappingURL=batch_to_space_nd.js.map", "import { reshape } from './reshape';\nexport function xAs4D(x) {\n let x4D;\n if (x.rank === 0 || x.rank === 1) {\n x4D = reshape(x, [1, 1, 1, x.size]);\n }\n else if (x.rank === 2) {\n x4D = reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n else {\n x4D = x;\n }\n return x4D;\n}\n//# sourceMappingURL=batchnorm_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FusedBatchNorm } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { xAs4D } from './batchnorm_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Batch normalization.\n *\n * As described in\n * [http://arxiv.org/abs/1502.03167](http://arxiv.org/abs/1502.03167).\n *\n * Mean, variance, scale, and offset can be of two shapes:\n * - The same shape as the input.\n * - In the common case, the depth dimension is the last dimension of x, so\n * the values would be an `tf.Tensor1D` of shape [depth].\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that parameters passed are of given rank\n * - `tf.batchNorm2d`\n * - `tf.batchNorm3d`\n * - `tf.batchNorm4d`\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction batchNorm_(x, mean, variance, offset, scale, varianceEpsilon) {\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($mean.rank === $variance.rank, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert($offset == null || $mean.rank === $offset.rank, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert($scale == null || $mean.rank === $scale.rank, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n const x4D = xAs4D($x);\n const forward = (backend, save) => {\n save([x4D, $mean, $variance, $scale]);\n return backend.batchNorm(x4D, as1DOr4D($mean), as1DOr4D($variance), as1DOr4D($offset), as1DOr4D($scale), varianceEpsilon);\n };\n const inputs = {\n x: x4D,\n scale: $scale,\n offset: $offset,\n mean: $mean,\n variance: $variance\n };\n const attrs = { varianceEpsilon };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FusedBatchNorm, attrs);\n return reshape(res, $x.shape);\n}\nfunction as1DOr4D(x) {\n if (x == null) {\n return null;\n }\n if (x.rank === 0) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [x.size]);\n }\n else if (x.rank === 1) {\n return x;\n }\n else if (x.rank === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n return x;\n}\nexport const batchNorm = op({ batchNorm_ });\n//# sourceMappingURL=batchnorm.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 2D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm2d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 2, () => `Error in batchNorm2D: x must be rank 2 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 2 || $mean.rank === 1, () => `Error in batchNorm2D: mean must be rank 2 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 2 || $variance.rank === 1, () => `Error in batchNorm2D: variance must be rank 2 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 2 || $scale.rank === 1, () => `Error in batchNorm2D: scale must be rank 2 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 2 || $offset.rank === 1, () => `Error in batchNorm2D: offset must be rank 2 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm2d = op({ batchNorm2d_ });\n//# sourceMappingURL=batchnorm2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 3D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm3d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 3, () => `Error in batchNorm3D: x must be rank 3 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 3 || $mean.rank === 1, () => `Error in batchNorm3D: mean must be rank 3 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 3 || $variance.rank === 1, () => `Error in batchNorm3D: variance must be rank 3 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 3 || $scale.rank === 1, () => `Error in batchNorm3D: scale must be rank 3 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 3 || $offset.rank === 1, () => `Error in batchNorm3D: offset must be rank 3 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm3d = op({ batchNorm3d_ });\n//# sourceMappingURL=batchnorm3d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 4D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm4d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 4, () => `Error in batchNorm4D: x must be rank 4 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 4 || $mean.rank === 1, () => `Error in batchNorm4D: mean must be rank 4 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 4 || $variance.rank === 1, () => `Error in batchNorm4D: variance must be rank 4 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 4 || $scale.rank === 1, () => `Error in batchNorm4D: scale must be rank 4 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 4 || $offset.rank === 1, () => `Error in batchNorm4D: offset must be rank 4 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm4d = op({ batchNorm4d_ });\n//# sourceMappingURL=batchnorm4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BroadcastTo } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Broadcast an array to a compatible shape NumPy-style.\n *\n * The tensor's shape is compared to the broadcast shape from end to beginning.\n * Ones are prepended to the tensor's shape until is has the same length as\n * the broadcast shape. If input.shape[i]==shape[i], the (i+1)-th axis is\n * already broadcast-compatible. If input.shape[i]==1 and shape[i]==N, then\n * the input tensor is tiled N times along that axis (using tf.tile).\n *\n * @param input The tensor that is to be broadcasted.\n * @param shape The input is to be broadcast to this shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction broadcastTo_(x, shape) {\n let input = convertToTensor(x, 'broadcastTo', 'x');\n const xShape = input.shape;\n if (shape.some(d => !(d > 0) || d % 1 !== 0)) {\n throw new Error(`broadcastTo(): Invalid broadcast shape [${shape}].`);\n }\n if (shape.length < input.rank) {\n throw new Error(`broadcastTo(): shape.length=${shape.length} < input.rank=${input.rank}.`);\n }\n if (shape.length > input.rank) {\n const newShape = input.shape.slice();\n while (newShape.length < shape.length) {\n newShape.unshift(1);\n }\n input = reshape(input, newShape);\n }\n const inputShape = input.shape;\n const reps = Array.from(shape);\n for (let i = shape.length - 1; i >= 0; i--) {\n if (inputShape[i] === shape[i]) {\n reps[i] = 1;\n }\n else if (input.shape[i] !== 1) {\n throw new Error(`broadcastTo(): [${xShape}] cannot be broadcast to [${shape}].`);\n }\n }\n const axes = reps.map((n, i) => n > 1 ? i : -1).filter(i => i >= 0);\n if (axes.length === 0) {\n return clone(input);\n }\n const forward = (backend) => backend.tile(input, reps);\n const inputs = { x: input };\n const attrs = { shape, inputShape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BroadcastTo, attrs);\n}\nexport const broadcastTo = op({ broadcastTo_ });\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Ceil } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes ceiling of input `tf.Tensor` element-wise: `ceil(x)`\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.ceil().print(); // or tf.ceil(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction ceil_(x) {\n const $x = convertToTensor(x, 'x', 'ceil');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.ceil($x), inputs, null /* grad */, Ceil);\n}\nexport const ceil = op({ ceil_ });\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ClipByValue } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Clips values element-wise. `max(min(x, clipValueMax), clipValueMin)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.clipByValue(-2, 3).print(); // or tf.clipByValue(x, -2, 3)\n * ```\n * @param x The input tensor.\n * @param clipValueMin Lower-bound of range to be clipped to.\n * @param clipValueMax Upper-bound of range to be clipped to.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction clipByValue_(x, clipValueMin, clipValueMax) {\n const $x = convertToTensor(x, 'x', 'clipByValue');\n util.assert((clipValueMin <= clipValueMax), () => `Error in clip: min (${clipValueMin}) must be ` +\n `less than or equal to max (${clipValueMax}).`);\n const inputs = { x: $x };\n const attrs = { clipValueMin, clipValueMax };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.clip($x, clipValueMin, clipValueMax);\n save([$x]);\n return res;\n }, inputs, null /* grad */, ClipByValue, attrs);\n}\nexport const clipByValue = op({ clipByValue_ });\n//# sourceMappingURL=clip_by_value.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor1D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(3) = |r1, g1, b1|\n * B: shape(2) = |r2, g2|\n * C = tf.concat1d([A, B]) == |r1, g1, b1, r2, g2|\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @return The concatenated array.\n */\nfunction concat1d_(tensors) {\n return concat(tensors, 0 /* axis */);\n}\nexport const concat1d = op({ concat1d_ });\n//# sourceMappingURL=concat_1d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor2D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat2d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C = shape(2, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concatenate along.\n * @return The concatenated array.\n */\nfunction concat2d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat2d = op({ concat2d_ });\n//# sourceMappingURL=concat_2d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor3D`s along an axis.\n * See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 1, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat3d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C: shape(2, 2, 3) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * if axis = 2:\n * C = shape(2, 1, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat3d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat3d = op({ concat3d_ });\n//# sourceMappingURL=concat_3d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor4D`s along an axis.\n * See `concat` for details.\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat4d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat4d = op({ concat4d_ });\n//# sourceMappingURL=concat_4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 2D convolution over the input x.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2d: input must be rank 4, but got rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n util.assert(inDepth === $filter.shape[2], () => `Error in conv2d: depth of input (${inDepth}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const forward = (backend, save) => {\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2d(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2D, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2d = op({ conv2d_ });\n//# sourceMappingURL=conv2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 1D convolution over the input x.\n *\n * @param x The input tensor, of rank 3 or rank 2, of shape\n * `[batch, width, inChannels]`. If rank 2, batch of 1 is assumed.\n * @param filter The filter, rank 3, of shape\n * `[filterWidth, inDepth, outDepth]`.\n * @param stride The number of entries by which the filter is moved right at\n * each step.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from \"NWC\", \"NCW\". Defaults to \"NWC\",\n * the data is stored in the order of [batch, in_width, in_channels]. Only\n * \"NWC\" is currently supported.\n * @param dilation The dilation rate in which we sample input values in\n * atrous convolution. Defaults to `1`. If it is greater than 1, then\n * stride must be `1`.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv1d_(x, filter, stride, pad, dataFormat = 'NWC', dilation = 1, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv1d');\n const $filter = convertToTensor(filter, 'filter', 'conv1d');\n let x3D = $x;\n let reshapedTo3D = false;\n if ($x.rank === 2) {\n reshapedTo3D = true;\n x3D = reshape($x, [1, $x.shape[0], $x.shape[1]]);\n }\n util.assert(x3D.rank === 3, () => `Error in conv1d: input must be rank 3, but got rank ${x3D.rank}.`);\n util.assert($filter.rank === 3, () => `Error in conv1d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv1d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x3D.shape[2] === $filter.shape[1], () => `Error in conv1d: depth of input (${x3D.shape[2]}) must match ` +\n `input depth for filter ${$filter.shape[1]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(stride, dilation), () => 'Error in conv1D: Either stride or dilation must be 1. ' +\n `Got stride ${stride} and dilation '${dilation}'`);\n util.assert(dataFormat === 'NWC', () => `Error in conv1d: got dataFormat of ${dataFormat} but only NWC is currently supported.`);\n const filter4D = reshape($filter, [1, $filter.shape[0], $filter.shape[1], $filter.shape[2]]);\n const input4D = reshape(x3D, [x3D.shape[0], 1, x3D.shape[1], x3D.shape[2]]);\n const strides = [1, stride];\n const dilations = [1, dilation];\n const conv2dDataFormat = 'NHWC';\n const res = conv2d(input4D, filter4D, strides, pad, conv2dDataFormat, dilations, dimRoundingMode);\n if (reshapedTo3D) {\n return reshape(res, [res.shape[2], res.shape[3]]);\n }\n return reshape(res, [res.shape[0], res.shape[2], res.shape[3]]);\n}\nexport const conv1d = op({ conv1d_ });\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 2D convolution.\n *\n * @param xShape The shape of the input: [batch, height, width, inDepth].\n * If length of 3, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 4 or rank 3 of shape\n * `[batch, outHeight, outWidth, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction conv2DBackpropInput_(xShape, dy, filter, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape4D = xShape;\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n xShape4D = [1, xShape[0], xShape[1], xShape[2]];\n }\n util.assert(xShape4D.length === 4, () => `Error in conv2dDerInput: inShape must be length 4, but got length ` +\n `${xShape4D.length}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerInput: dy must be rank 4, but got ` +\n `rank ${dy4D.rank}`);\n util.assert(filter.rank === 4, () => `Error in conv2dDerInput: filter must be rank 4, but got ` +\n `rank ${filter.rank}`);\n const inDepth = dataFormat === 'NHWC' ? xShape4D[3] : xShape4D[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filter.shape[2], () => `Error in conv2dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[2]}.`);\n util.assert(outDepth === filter.shape[3], () => `Error in conv2dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[3]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerInput: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(xShape4D, filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2dDerInput(dy4D, filter, convInfo);\n save([dy4D, filter]);\n return res;\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, inputShape: xShape4D };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2DBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2DBackpropInput = op({ conv2DBackpropInput_ });\n//# sourceMappingURL=conv2d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv2DBackpropInput } from './conv2d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 2D convolution of an image, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 4 or rank 3, of shape\n * `[batch, height, width, inDepth]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 4 or rank 3:\n * `[batch, height, width, outDepth]`. If rank 3, batch of 1 is assumed.\n * @param strides The strides of the original convolution:\n * `[strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2dTranspose_(x, filter, outputShape, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv2dTranspose');\n return conv2DBackpropInput(outputShape, $x, $filter, strides, pad, 'NHWC', dimRoundingMode);\n}\nexport const conv2dTranspose = op({ conv2dTranspose_ });\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { eitherStridesOrDilationsAreOne } from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 3D convolution over the input x.\n *\n * @param x The input tensor, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, channels]`. If rank 4,\n * batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inChannels, outChannels]`.\n * inChannels must match between input and filter.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationDepth, dilationHeight,\n * dilationWidth]` in which we sample input values across the height\n * and width dimensions in atrous convolution. Defaults to `[1, 1, 1]`.\n * If `dilations` is a single number, then\n * `dilationDepth == dilationHeight == dilationWidth`. If it is greater\n * than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3d_(x, filter, strides, pad, dataFormat = 'NDHWC', dilations = [1, 1, 1]) {\n const $x = convertToTensor(x, 'x', 'conv3d');\n const $filter = convertToTensor(filter, 'filter', 'conv3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3d: input must be rank 5, but got rank ${x5D.rank}.`);\n util.assert($filter.rank === 5, () => `Error in conv3d: filter must be rank 5, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x5D.shape[4] === $filter.shape[3], () => `Error in conv3d: depth of input (${x5D.shape[4]}) must match ` +\n `input depth for filter ${$filter.shape[3]}.`);\n util.assert(eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv3D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NDHWC', () => `Error in conv3d: got dataFormat of ${dataFormat} but only NDHWC is currently supported.`);\n const forward = (backend, save) => {\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, $filter.shape, strides, dilations, pad);\n const res = backend.conv3d(x5D, $filter, convInfo);\n save([x5D, $filter]);\n return res;\n };\n const inputs = { x: x5D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3d = op({ conv3d_ });\n//# sourceMappingURL=conv3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropInputV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 3D convolution.\n *\n * @param xShape The shape of the input: [batch, depth, height, width,\n * in_channels]. If length of 4, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 5 or rank 4 of shape\n * `[batch, outDepth, outHeight, outWidth, in_channels]`.\n * If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n */\nfunction conv3DBackpropInput_(xShape, dy, filter, strides, pad) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape5D = xShape;\n let dy5D = dy;\n let reshapedTo5D = false;\n if (dy.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n xShape5D = [1, xShape[0], xShape[1], xShape[2], xShape[3]];\n }\n const inDepth = xShape5D[4];\n const outDepth = dy5D.shape[4];\n util.assert(xShape5D.length === 5, () => `Error in conv3dDerInput: inShape must be length 5, but got length ` +\n `${xShape5D.length}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerInput: dy must be rank 5, but got ` +\n `rank ${dy5D.rank}`);\n util.assert(filter.rank === 5, () => `Error in conv3dDerInput: filter must be rank 5, but got ` +\n `rank ${filter.rank}`);\n util.assert(inDepth === filter.shape[3], () => `Error in conv3dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[3]}.`);\n util.assert(outDepth === filter.shape[4], () => `Error in conv3dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[4]}.`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(xShape5D, filter.shape, strides, dilations, pad);\n return backend.conv3dDerInput(dy5D, filter, convInfo);\n };\n const inputs = { dy: dy5D, filter };\n const attrs = { pad, strides, inputShape: xShape5D };\n const res = ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropInputV2, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3DBackpropInput = op({ conv3DBackpropInput_ });\n//# sourceMappingURL=conv3d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv3DBackpropInput } from './conv3d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 3D convolution of a volume, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, inDepth]`. If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[depth, filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 5 or rank 4:\n * `[batch, depth, height, width, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param strides The strides of the original convolution:\n * `[strideDepth, strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3dTranspose_(x, filter, outputShape, strides, pad) {\n const $x = convertToTensor(x, 'x', 'conv3dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv3dTranspose');\n return conv3DBackpropInput(outputShape, $x, $filter, strides, pad);\n}\nexport const conv3dTranspose = op({ conv3dTranspose_ });\n//# sourceMappingURL=conv3d_transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes cos of the input `tf.Tensor` element-wise: `cos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.cos().print(); // or tf.cos(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cos_(x) {\n const $x = convertToTensor(x, 'x', 'cos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cos);\n}\nexport const cos = op({ cos_ });\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic cos of the input `tf.Tensor` element-wise: `cosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.cosh().print(); // or tf.cosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cosh_(x) {\n const $x = convertToTensor(x, 'x', 'cosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cosh);\n}\nexport const cosh = op({ cosh_ });\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cumsum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { getAxesPermutation, getInnerMostAxes, getUndoAxesPermutation } from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the cumulative sum of a `tf.Tensor` along `axis`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4]);\n * x.cumsum().print();\n * ```\n * ```js\n * const x = tf.tensor([[1, 2], [3, 4]]);\n * x.cumsum().print();\n * ```\n *\n * @param x The input tensor to be summed.\n * @param axis The axis along which to sum. Optional. Defaults to 0.\n * @param exclusive Whether to perform exclusive cumulative sum. Optional.\n * Defaults to false. If set to true then the sum of each tensor entry\n * does not include its own value, but only the values previous to it\n * along the specified axis.\n * @param reverse Whether to sum in the opposite direction. Optional.\n * Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Scan'}\n */\nfunction cumsum_(x, axis = 0, exclusive = false, reverse = false) {\n const $x = convertToTensor(x, 'x', 'cumsum');\n const forward = (backend, save) => {\n const permutation = getAxesPermutation([axis], $x.rank);\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n }\n const permutedAxis = getInnerMostAxes(1, $x.rank)[0];\n let value = backend.cumsum(permutedX, permutedAxis, exclusive, reverse);\n save([$x]);\n if (permutation != null) {\n const reversePermutation = getUndoAxesPermutation(permutation);\n value = transpose(value, reversePermutation);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, exclusive, reverse };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Cumsum, attrs);\n}\nexport const cumsum = op({ cumsum_ });\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthToSpace } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Rearranges data from depth into blocks of spatial data. More specifically,\n * this op outputs a copy of the input tensor where values from the `depth`\n * dimension are moved in spatial blocks to the `height` and `width` dimensions.\n * The attr `blockSize` indicates the input block size and how the data is\n * moved.\n *\n * - Chunks of data of size `blockSize * blockSize` from depth are rearranged\n * into non-overlapping blocks of size `blockSize x blockSize`\n *\n * - The width the output tensor is `inputWidth * blockSize`, whereas the\n * height is `inputHeight * blockSize`\n *\n * - The Y, X coordinates within each block of the output image are determined\n * by the high order component of the input channel index\n *\n * - The depth of the input tensor must be divisible by `blockSize *\n * blockSize`\n *\n * The `dataFormat` attr specifies the layout of the input and output tensors\n * with the following options: \"NHWC\": [ `batch, height, width, channels` ]\n * \"NCHW\": [ `batch, channels, height, width` ]\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 1, 1, 4]);\n * const blockSize = 2;\n * const dataFormat = \"NHWC\";\n *\n * tf.depthToSpace(x, blockSize, dataFormat).print();\n * ```\n *\n * @param x The input tensor of rank 4\n * @param blockSIze An `int` that is `>= 2`. The size of the spatial block\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to \"NHWC\"\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction depthToSpace_(x, blockSize, dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'depthToSpace');\n const inputHeight = (dataFormat === 'NHWC') ? $x.shape[1] : $x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? $x.shape[2] : $x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? $x.shape[3] : $x.shape[1];\n util.assert(inputHeight * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputHeight} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert(inputWidth * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputWidth} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert((inputDepth % (blockSize * blockSize) === 0), () => `Dimension size must be evenly divisible by ${blockSize * blockSize} but is ${inputDepth} for depthToSpace with input shape ${$x.shape}`);\n const forward = backend => backend.depthToSpace($x, blockSize, dataFormat);\n const inputs = { x: $x };\n const attrs = { blockSize, dataFormat };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, DepthToSpace, attrs);\n}\nexport const depthToSpace = op({ depthToSpace_ });\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Depthwise 2D convolution.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction depthwiseConv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in depthwiseConv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const res = backend.depthwiseConv2D(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, DepthwiseConv2dNative, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2d = op({ depthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Diag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a diagonal tensor with a given diagonal values.\n *\n * Given a diagonal, this operation returns a tensor with the diagonal and\n * everything else padded with zeros.\n *\n * Assume the input has dimensions `[D1,..., Dk]`, then the output is a tensor\n * of rank 2k with dimensions `[D1,..., Dk, D1,..., Dk]`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * tf.diag(x).print()\n * ```\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4, 5, 6, 6, 8], [4, 2])\n *\n * tf.diag(x).print()\n * ```\n * @param x The input tensor.\n */\nfunction diag_(x) {\n const $x = convertToTensor(x, 'x', 'diag');\n const forward = backend => {\n const flat = reshape($x, [$x.size]);\n const result = backend.diag(flat);\n const outShape = [...x.shape, ...x.shape];\n return reshape(result, outShape);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Diag);\n}\nexport const diag = op({ diag_ });\n//# sourceMappingURL=diag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the grayscale dilation over the input `x`.\n *\n * @param x The input tensor, rank 3 or rank 4 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter tensor, rank 3, of shape\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat Specify the data format of the input and output data.\n * Defaults to 'NHWC'. Only 'NHWC' is currently supported. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * for atrous morphological dilation. Defaults to `[1, 1]`. If `dilations`\n * is a single number, then `dilationHeight == dilationWidth`. If it is\n * greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction dilation2d_(x, filter, strides, pad, dilations = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'dilation2d');\n const $filter = convertToTensor(filter, 'filter', 'dilation2d');\n util.assert($x.rank === 3 || $x.rank === 4, () => `Error in dilation2d: input must be rank 3 or 4, but got rank ` +\n `${$x.rank}.`);\n util.assert($filter.rank === 3, () => `Error in dilation2d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n util.assert(dataFormat === 'NHWC', () => `Error in dilation2d: Only NHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n reshapedTo4D = true;\n }\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dilations };\n const res = ENGINE.runKernel(Dilation2D, inputs, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const dilation2d = op({ dilation2d_ });\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport function getBroadcastDims(inShape, outShape) {\n const inRank = inShape.length;\n const dims = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n}\n/**\n * Returns the axes in the output space that should be reduced to produce\n * the input space.\n */\nexport function getReductionAxes(inShape, outShape) {\n const result = [];\n for (let i = 0; i < outShape.length; i++) {\n const inDim = inShape[inShape.length - i - 1];\n const outAxis = outShape.length - i - 1;\n const outDim = outShape[outAxis];\n if (inDim == null || (inDim === 1 && outDim > 1)) {\n result.unshift(outAxis);\n }\n }\n return result;\n}\nexport function assertAndGetBroadcastShape(shapeA, shapeB) {\n const result = [];\n const l = Math.max(shapeA.length, shapeB.length);\n for (let i = 0; i < l; i++) {\n let a = shapeA[shapeA.length - i - 1];\n if (a == null) {\n a = 1;\n }\n let b = shapeB[shapeB.length - i - 1];\n if (b == null) {\n b = 1;\n }\n if (a === 1) {\n result.unshift(b);\n }\n else if (b === 1) {\n result.unshift(a);\n }\n else if (a !== b) {\n const errMsg = `Operands could not be broadcast together with shapes ` +\n `${shapeA} and ${shapeB}.`;\n throw Error(errMsg);\n }\n else {\n result.unshift(a);\n }\n }\n return result;\n}\n//# sourceMappingURL=broadcast_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Equal } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a == b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.equal(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction equal_(a, b) {\n let $a = convertToTensor(a, 'a', 'equal');\n let $b = convertToTensor(b, 'b', 'equal');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.equal($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null, Equal);\n}\nexport const equal = op({ equal_ });\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SelectV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch } from '../util';\nimport { broadcastTo } from './broadcast_to';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the elements, either `a` or `b` depending on the `condition`.\n *\n * If the condition is true, select from `a`, otherwise select from `b`.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const a = tf.tensor1d([1 , 2, 3]);\n * const b = tf.tensor1d([-1, -2, -3]);\n *\n * a.where(cond, b).print();\n * ```\n *\n * @param condition The input condition. Must be of dtype bool.\n * @param a If `condition` is rank 1, `a` may have a higher rank but\n * its first dimension must match the size of `condition`.\n * @param b A tensor with the same dtype as `a` and with shape that is\n * compatible with `a`.\n * @return A tensor with same dtype as `a` and `b`, and shape that is\n * broadcastable from `a` and `b`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction where_(condition, a, b) {\n const $a = convertToTensor(a, 'a', 'where');\n const $b = convertToTensor(b, 'b', 'where');\n const $condition = convertToTensor(condition, 'condition', 'where', 'bool');\n // TODO: move this logic to forward function when the broadcastTo op is\n // implemented in WASM.\n // Find the broadcastable shape for $a and $b.\n const broadcastShape = assertAndGetBroadcastShape($a.shape, $b.shape);\n const $broadcastedA = broadcastTo($a, broadcastShape);\n const $broadcastedB = broadcastTo($b, broadcastShape);\n if ($condition.rank === 1) {\n // If condition rank is 1, then the first dimension must match the size of\n // condition.\n assert($condition.shape[0] === $a.shape[0], () => 'The first dimension of `a` must match the size of `condition`.');\n }\n if ($condition.rank !== 1) {\n // A must have the same shape as condition.\n assertShapesMatch($condition.shape, $broadcastedB.shape, 'Error in where: ');\n }\n const forward = (backend, save) => {\n const res = backend.select($condition, $broadcastedA, $broadcastedB);\n save([$condition]);\n return res;\n };\n const inputs = {\n condition: $condition,\n t: $broadcastedA,\n e: $broadcastedB\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SelectV2);\n}\nexport const where = op({ where_ });\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ZerosLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with all elements set to 0 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.zerosLike(x).print();\n * ```\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction zerosLike_(x) {\n const $x = convertToTensor(x, 'x', 'zerosLike');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.zerosLike($x), inputs, null /* grad */, ZerosLike);\n}\nexport const zerosLike = op({ zerosLike_ });\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { div } from './div';\nimport { equal } from './equal';\nimport { op } from './operation';\nimport { where } from './where';\nimport { zerosLike } from './zeros_like';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. Return 0\n * if denominator is 0.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n * const c = tf.tensor1d([0, 0, 0, 0]);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n * const c = tf.scalar(0);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction divNoNan_(a, b) {\n // TODO: Make this into its own kernel.\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n const divResult = div($a, $b);\n const zeros = zerosLike(divResult);\n const bEqualsZero = equal($b, zeros);\n return where(bEqualsZero, zeros, divResult);\n}\nexport const divNoNan = op({ divNoNan_ });\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices and/or vectors, `t1` and `t2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor2d([[1, 2], [3, 4]]);\n * const c = tf.tensor2d([[1, 2, 3], [4, 5, 6]]);\n *\n * a.dot(b).print(); // or tf.dot(a, b)\n * b.dot(a).print();\n * b.dot(c).print();\n * ```\n * @param t1 The first tensor in the dot operation.\n * @param t2 The second tensor in the dot operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction dot_(t1, t2) {\n const $t1 = convertToTensor(t1, 't1', 'dot');\n const $t2 = convertToTensor(t2, 't2', 'dot');\n util.assert(($t1.rank === 1 || $t1.rank === 2) && ($t2.rank === 1 || $t2.rank === 2), () => `Error in dot: inputs must all be rank 1 or 2, but got ranks ` +\n `${$t1.rank} and ${$t2.rank}.`);\n const t1Inner = ($t1.rank === 1 ? $t1.size : $t1.shape[1]);\n const t2Inner = ($t2.rank === 1 ? $t2.size : $t2.shape[0]);\n util.assert(t1Inner === t2Inner, () => `Error in dot: inner dimensions of inputs must match, but got ` +\n `${t1Inner} and ${t2Inner}.`);\n if ($t1.rank === 1 && $t2.rank === 1) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, []);\n }\n else if ($t1.rank === 1 && $t2.rank === 2) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else if ($t1.rank === 2 && $t2.rank === 1) {\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul($t1, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else {\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul($t1, t22D);\n return t1t2;\n }\n}\nexport const dot = op({ dot_ });\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential linear element-wise: `x > 0 ? e ^ x - 1 : 0`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 1, -3, 2]);\n *\n * x.elu().print(); // or tf.elu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction elu_(x) {\n const $x = convertToTensor(x, 'x', 'elu');\n const forward = (backend, save) => {\n const y = backend.elu($x);\n save([y]);\n return y;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Elu);\n}\nexport const elu = op({ elu_ });\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Erf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes gause error function of the input `tf.Tensor` element-wise:\n * `erf(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.erf().print(); // or tf.erf(x);\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction erf_(x) {\n let $x = convertToTensor(x, 'x', 'erf');\n util.assert($x.dtype === 'int32' || $x.dtype === 'float32', () => 'Input dtype must be `int32` or `float32`.');\n if ($x.dtype === 'int32') {\n $x = cast($x, 'float32');\n }\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.erf($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Erf);\n}\nexport const erf = op({ erf_ });\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Exp } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` element-wise. `e ^ x`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.exp().print(); // or tf.exp(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction exp_(x) {\n const $x = convertToTensor(x, 'x', 'exp');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.exp($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Exp);\n}\nexport const exp = op({ exp_ });\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension\n * into the tensor's shape.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const axis = 1;\n * x.expandDims(axis).print();\n * ```\n *\n * @param x The input tensor whose dimensions to be expanded.\n * @param axis The dimension index at which to insert shape of `1`. Defaults\n * to 0 (the first dimension).\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction expandDims_(x, axis = 0) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'expandDims', parseAs);\n util.assert(axis <= $x.rank, () => 'Axis must be <= rank of the tensor');\n const newShape = $x.shape.slice();\n if (axis < 0) {\n // Negative value is counted from the tail of rank.\n util.assert(-($x.rank + 1) <= axis, () => `Axis must be in the interval [${-($x.rank + 1)}, ${$x.rank}]`);\n axis = $x.rank + axis + 1;\n }\n newShape.splice(axis, 0, 1);\n return reshape($x, newShape);\n}\nexport const expandDims = op({ expandDims_ });\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Expm1 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` minus one element-wise.\n * `e ^ x - 1`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.expm1().print(); // or tf.expm1(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction expm1_(x) {\n const $x = convertToTensor(x, 'x', 'expm1');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.expm1($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Expm1);\n}\nexport const expm1 = op({ expm1_ });\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tile } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Construct a tensor by repeating it the number of times given by reps.\n *\n * This operation creates a new tensor by replicating `input` `reps`\n * times. The output tensor's i'th dimension has `input.shape[i] *\n * reps[i]` elements, and the values of `input` are replicated\n * `reps[i]` times along the i'th dimension. For example, tiling\n * `[a, b, c, d]` by `[2]` produces `[a, b, c, d, a, b, c, d]`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n *\n * a.tile([2]).print(); // or a.tile([2])\n * ```\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.tile([1, 2]).print(); // or a.tile([1, 2])\n * ```\n * @param x The tensor to tile.\n * @param reps Determines the number of replications per dimension.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction tile_(x, reps) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'tile', parseAs);\n util.assert($x.rank === reps.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of reps ${reps}.`);\n const forward = (backend, save) => {\n const res = backend.tile($x, reps);\n save([$x]);\n return res;\n };\n const inputsToSave = [$x];\n const inputs = { x: $x };\n const attrs = { reps };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Tile, attrs, inputsToSave);\n}\nexport const tile = op({ tile_ });\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { tile } from './tile';\n/**\n * Create an identity matrix.\n *\n * @param numRows Number of rows.\n * @param numColumns Number of columns. Defaults to `numRows`.\n * @param batchShape If provided, will add the batch shape to the beginning\n * of the shape of the returned `tf.Tensor` by repeating the identity\n * matrix.\n * @param dtype Data type.\n * @returns Identity matrix of the specified size and data type, possibly\n * with batch repetition if `batchShape` is specified.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction eye_(numRows, numColumns, batchShape, dtype = 'float32') {\n if (numColumns == null) {\n numColumns = numRows;\n }\n const buff = buffer([numRows, numColumns], dtype);\n const n = numRows <= numColumns ? numRows : numColumns;\n for (let i = 0; i < n; ++i) {\n buff.set(1, i, i);\n }\n const out = reshape(buff.toTensor(), [numRows, numColumns]);\n if (batchShape == null) {\n return out;\n }\n else {\n if (batchShape.length === 1) {\n return tile(expandDims(out, 0), [batchShape[0], 1, 1]);\n }\n else if (batchShape.length === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(out, 0), 0), [batchShape[0], batchShape[1], 1, 1]);\n }\n else if (batchShape.length === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(expandDims(out, 0), 0), 0), [\n batchShape[0], batchShape[1], batchShape[2], 1, 1\n ]);\n }\n else {\n throw new Error(`eye() currently supports only 1D and 2D ` +\n // tslint:disable-next-line:no-any\n `batchShapes, but received ${batchShape.length}D.`);\n }\n }\n}\nexport const eye = op({ eye_ });\n//# sourceMappingURL=eye.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Fill } from '../kernel_names';\n/**\n * Creates a `tf.Tensor` filled with a scalar value.\n *\n * ```js\n * tf.fill([2, 2], 4).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param value The scalar value to fill the tensor with.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction fill(shape, value, dtype) {\n const attrs = { shape, value, dtype };\n return ENGINE.runKernelFunc(backend => backend.fill(shape, value, dtype), {}, null, Fill, attrs);\n}\nexport { fill };\n//# sourceMappingURL=fill.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Floor } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes floor of input `tf.Tensor` element-wise: `floor(x)`.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.floor().print(); // or tf.floor(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction floor_(x) {\n const $x = convertToTensor(x, 'x', 'floor');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.floor($x), inputs, null /* grad */, Floor);\n}\nexport const floor = op({ floor_ });\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inputs of size above this threshold will be parallelized by calling multiple\n * shader programs.\n */\nimport { nearestDivisor } from '../util';\nexport const PARALLELIZE_THRESHOLD = 30;\nexport function computeOptimalWindowSize(inSize) {\n if (inSize <= PARALLELIZE_THRESHOLD) {\n return inSize;\n }\n return nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n}\n//# sourceMappingURL=reduce_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nearestDivisor } from '../util';\nimport { PARALLELIZE_THRESHOLD } from './reduce_util';\nexport function segOpComputeOptimalWindowSize(inSize, numSegments) {\n let done = false;\n let res;\n if (inSize <= PARALLELIZE_THRESHOLD) {\n res = inSize;\n done = true;\n }\n else {\n res = nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n }\n while (!done) {\n if (res > numSegments || res === inSize) {\n done = true;\n }\n else {\n res = nearestDivisor(inSize, res + 1);\n }\n }\n return res;\n}\nexport function computeOutShape(aShape, axis, numSegments) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (dim !== axis) {\n outShape.push(aShape[dim]);\n }\n else {\n outShape.push(numSegments);\n }\n }\n return outShape;\n}\nexport function collectGatherOpShapeInfo(x, indices, axis) {\n const dimSize = x.shape[axis];\n const outputShape = [];\n let batchSize = 1;\n let sliceSize = 1;\n for (let i = 0; i < axis; i++) {\n outputShape.push(x.shape[i]);\n batchSize *= x.shape[i];\n }\n for (let i = 0; i < indices.rank; i++) {\n outputShape.push(indices.shape[i]);\n }\n for (let i = axis + 1; i < x.rank; i++) {\n outputShape.push(x.shape[i]);\n sliceSize *= x.shape[i];\n }\n return { batchSize, sliceSize, dimSize, outputShape };\n}\n//# sourceMappingURL=segment_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { collectGatherOpShapeInfo } from './segment_util';\n/**\n * Gather slices from tensor `x`'s axis `axis` according to `indices`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const indices = tf.tensor1d([1, 3, 3], 'int32');\n *\n * x.gather(indices).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const indices = tf.tensor1d([1, 1, 0], 'int32');\n *\n * x.gather(indices).print();\n * ```\n * @param x The input tensor whose slices to be gathered.\n * @param indices The indices of the values to extract.\n * @param axis The axis over which to select values. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction gather_(x, indices, axis = 0) {\n const $x = convertToTensor(x, 'x', 'gather');\n const $indices = convertToTensor(indices, 'indices', 'gather', 'int32');\n const inputs = { x: $x, indices: $indices };\n const attrs = { axis };\n const forward = (backend, save) => {\n const parsedAxis = parseAxisParam(axis, $x.shape)[0];\n const shapeInfo = collectGatherOpShapeInfo($x, $indices, parsedAxis);\n const res = backend.gather($x, reshape($indices, [$indices.size]), parsedAxis);\n save([$x, $indices]);\n return reshape(res, shapeInfo.outputShape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GatherV2, attrs);\n}\nexport const gather = op({ gather_ });\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Greater } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a > b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greater(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greater_(a, b) {\n let $a = convertToTensor(a, 'a', 'greater');\n let $b = convertToTensor(b, 'b', 'greater');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.greater($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Greater);\n}\nexport const greater = op({ greater_ });\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GreaterEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a >= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greaterEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greaterEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'greaterEqual');\n let $b = convertToTensor(b, 'b', 'greaterEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.greaterEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GreaterEqual);\n}\nexport const greaterEqual = op({ greaterEqual_ });\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Imag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the imaginary part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the imaginary part of each element in input considered as a complex number.\n * If input is real, a tensor of all zeros is returned.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.imag(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction imag_(input) {\n const $input = convertToTensor(input, 'input', 'imag');\n const forward = (backend) => {\n return backend.imag($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Imag);\n}\nexport const imag = op({ imag_ });\n//# sourceMappingURL=imag.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsFinite } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are finite.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isFinite().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isFinite_(x) {\n const $x = convertToTensor(x, 'x', 'isFinite');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isFinite($x), inputs, null /* grad */, IsFinite);\n}\nexport const isFinite = op({ isFinite_ });\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsInf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are Infinity or -Infinity.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isInf().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isInf_(x) {\n const $x = convertToTensor(x, 'x', 'isInf');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isInf($x), inputs, null /* grad */, IsInf);\n}\nexport const isInf = op({ isInf_ });\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsNan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * RReturns which elements of x are NaN.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isNaN().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isNaN_(x) {\n const $x = convertToTensor(x, 'x', 'isNaN');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.isNaN($x), inputs, null /* grad */, IsNan);\n}\nexport const isNaN = op({ isNaN_ });\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Maximum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the max of a and b (`a > b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `tf.maximumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * ```js\n * // Broadcast maximum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction maximum_(a, b) {\n let $a = convertToTensor(a, 'a', 'maximum');\n let $b = convertToTensor(b, 'b', 'maximum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.maximum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Maximum);\n}\nexport const maximum = op({ maximum_ });\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isTypedArray } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-0 `tf.Tensor` (scalar) with the provided value and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.scalar` as it makes the code more readable.\n *\n * ```js\n * tf.scalar(3.14).print();\n * ```\n *\n * @param value The value of the scalar.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function scalar(value, dtype) {\n if (((isTypedArray(value) && dtype !== 'string') || Array.isArray(value)) &&\n dtype !== 'complex64') {\n throw new Error('Error creating a new Scalar: value must be a primitive ' +\n '(number|boolean|string)');\n }\n if (dtype === 'string' && isTypedArray(value) &&\n !(value instanceof Uint8Array)) {\n throw new Error('When making a scalar from encoded string, ' +\n 'the value must be `Uint8Array`.');\n }\n const shape = [];\n const inferredShape = [];\n return makeTensor(value, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { maximum } from './maximum';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { scalar } from './scalar';\n/**\n * Computes leaky rectified linear element-wise.\n *\n * See\n * [http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf](\n * http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf)\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.leakyRelu(0.1).print(); // or tf.leakyRelu(x, 0.1)\n * ```\n * @param x The input tensor.\n * @param alpha The scaling factor for negative values, defaults to 0.2.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction leakyRelu_(x, alpha = 0.2) {\n const $x = convertToTensor(x, 'x', 'leakyRelu');\n return maximum(mul(scalar(alpha), $x), $x);\n}\nexport const leakyRelu = op({ leakyRelu_ });\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Less } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a < b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.less(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction less_(a, b) {\n let $a = convertToTensor(a, 'a', 'less');\n let $b = convertToTensor(b, 'b', 'less');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.less($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Less);\n}\nexport const less = op({ less_ });\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LessEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a <= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.lessEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction lessEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'lessEqual');\n let $b = convertToTensor(b, 'b', 'lessEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.lessEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LessEqual);\n}\nexport const lessEqual = op({ lessEqual_ });\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LinSpace } from '../kernel_names';\n/**\n * Return an evenly spaced sequence of numbers over the given interval.\n *\n * ```js\n * tf.linspace(0, 9, 10).print();\n * ```\n * @param start The start value of the sequence.\n * @param stop The end value of the sequence.\n * @param num The number of values to generate.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function linspace(start, stop, num) {\n if (num <= 0) {\n throw new Error('The number of values should be positive.');\n }\n const attrs = { start, stop, num };\n return ENGINE.runKernelFunc(backend => backend.linspace(start, stop, num), {} /* inputs */, null /* grad */, LinSpace, attrs);\n}\n//# sourceMappingURL=linspace.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Normalizes the activation of a local neighborhood across or within\n * channels.\n *\n * @param x The input tensor. The 4-D input tensor is treated as a 3-D array\n * of 1D vectors (along the last dimension), and each vector is\n * normalized independently.\n * @param depthRadius The number of adjacent channels in the 1D normalization\n * window.\n * @param bias A constant bias term for the basis.\n * @param alpha A scale factor, usually positive.\n * @param beta An exponent.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction localResponseNormalization_(x, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const $x = convertToTensor(x, 'x', 'localResponseNormalization');\n util.assert($x.rank === 4 || $x.rank === 3, () => `Error in localResponseNormalization: x must be rank 3 or 4 but got\n rank ${$x.rank}.`);\n util.assert(util.isInt(depthRadius), () => `Error in localResponseNormalization: depthRadius must be an ` +\n `integer but got depthRadius ${depthRadius}.`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n const forward = (backend, save) => {\n const y = backend.localResponseNormalization4D(x4D, depthRadius, bias, alpha, beta);\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { depthRadius, bias, alpha, beta };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRN, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n else {\n return res;\n }\n}\nexport const localResponseNormalization = op({ localResponseNormalization_ });\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` element-wise: `ln(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E]);\n *\n * x.log().print(); // or tf.log(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log_(x) {\n const $x = convertToTensor(x, 'x', 'log');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log);\n}\nexport const log = op({ log_ });\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log1p } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` plus one\n * element-wise: `ln(1 + x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E - 1]);\n *\n * x.log1p().print(); // or tf.log1p(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log1p_(x) {\n const $x = convertToTensor(x, 'x', 'log1p');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log1p($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log1p);\n}\nexport const log1p = op({ log1p_ });\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { Tensor, Variable } from './tensor';\nimport { convertToTensor, convertToTensorArray } from './tensor_util_env';\nimport * as util from './util';\n/**\n * Provided `f(x)`, returns another function `g(x, dy?)`, which gives the\n * gradient of `f(x)` with respect to `x`.\n *\n * If `dy` is provided, the gradient of `f(x).mul(dy).sum()` with respect to\n * `x` is computed instead. `f(x)` must take a single tensor `x` and return a\n * single tensor `y`. If `f()` takes multiple inputs, use `tf.grads` instead.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.grad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * g(x).print();\n * ```\n *\n * ```js\n * // f(x) = x ^ 3\n * const f = x => x.pow(tf.scalar(3, 'int32'));\n * // f'(x) = 3x ^ 2\n * const g = tf.grad(f);\n * // f''(x) = 6x\n * const gg = tf.grad(g);\n *\n * const x = tf.tensor1d([2, 3]);\n * gg(x).print();\n * ```\n *\n * @param f The function f(x), to compute gradient for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grad(f) must be a function');\n return (x, dy) => {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'tf.grad', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grad') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f($x), [$x], $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grad(f)(x, dy) must match the shape ' +\n 'returned by f(x)');\n }\n checkGrads(grads);\n return grads[0];\n });\n };\n}\n/**\n * Provided `f(x1, x2,...)`, returns another function `g([x1, x2,...], dy?)`,\n * which gives an array of gradients of `f()` with respect to each input\n * [`x1`,`x2`,...].\n *\n * If `dy` is passed when calling `g()`, the gradient of\n * `f(x1,...).mul(dy).sum()` with respect to each input is computed instead.\n * The provided `f` must take one or more tensors and return a single tensor\n * `y`. If `f()` takes a single input, we recommend using `tf.grad` instead.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df / da = b, df / db = a\n * const g = tf.grads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const [da, db] = g([a, b]);\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @param f The function `f(x1, x2,...)` to compute gradients for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args), () => 'The args passed in grads(f)(args) must be an array ' +\n 'of `Tensor`s or `TensorLike`s');\n // args can be of any dtype, thus null as the last argument.\n const $args = convertToTensorArray(args, 'args', 'tf.grads', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grads') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f(...$args), $args, $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(grads);\n return grads;\n });\n };\n}\n/**\n * Like `tf.grad`, but also returns the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grad: The gradient of `f(x)` w.r.t `x` (result of `tf.grad`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.valueAndGrad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * const {value, grad} = g(x);\n *\n * console.log('value');\n * value.print();\n * console.log('grad');\n * grad.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrad(f) must be a function');\n return (x, dy) => {\n util.assert(x instanceof Tensor, () => 'The x passed in valueAndGrad(f)(x) must be a tensor');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrad(f)(x, dy) must be a tensor');\n const { grads, value } = ENGINE.gradients(() => f(x), [x], dy);\n checkGrads(grads);\n return { grad: grads[0], value };\n };\n}\n/**\n * Like `tf.grads`, but returns also the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grads: The gradients of `f()` w.r.t each input (result of `tf.grads`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df/da = b, df/db = a\n * const g = tf.valueAndGrads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const {value, grads} = g([a, b]);\n *\n * const [da, db] = grads;\n *\n * console.log('value');\n * value.print();\n *\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args) && args.every(arg => arg instanceof Tensor), () => 'The args passed in valueAndGrads(f)(args) must be array of ' +\n 'tensors');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrads(f)(args, dy) must be a tensor');\n const res = ENGINE.gradients(() => f(...args), args, dy);\n if (dy != null) {\n util.assertShapesMatch(res.value.shape, dy.shape, 'The shape of dy passed in valueAndGrads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(res.grads);\n return res;\n };\n}\n/**\n * Computes and returns the gradient of f(x) with respect to the list of\n * trainable variables provided by `varList`. If no list is provided, it\n * defaults to all trainable variables.\n *\n * ```js\n * const a = tf.variable(tf.tensor1d([3, 4]));\n * const b = tf.variable(tf.tensor1d([5, 6]));\n * const x = tf.tensor1d([1, 2]);\n *\n * // f(a, b) = a * x ^ 2 + b * x\n * const f = () => a.mul(x.square()).add(b.mul(x)).sum();\n * // df/da = x ^ 2, df/db = x\n * const {value, grads} = tf.variableGrads(f);\n *\n * Object.keys(grads).forEach(varName => grads[varName].print());\n * ```\n *\n * @param f The function to execute. f() should return a scalar.\n * @param varList The list of variables to compute the gradients with respect\n * to. Defaults to all trainable variables.\n * @returns An object with the following keys and values:\n * - `value`: The value of the function `f`.\n * - `grads`: A map from the names of the variables to the gradients.\n * If the `varList` argument is provided explicitly and contains a subset of\n * non-trainable variables, this map in the return value will contain keys\n * that map the names of the non-trainable variables to `null`.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction variableGrads(f, varList) {\n util.assert(util.isFunction(f), () => 'The f passed in variableGrads(f) must be a function');\n util.assert(varList == null ||\n Array.isArray(varList) && varList.every(v => v instanceof Variable), () => 'The varList passed in variableGrads(f, varList) must be an array ' +\n 'of variables');\n const specifiedVarList = varList != null;\n if (!specifiedVarList) {\n // Get all of the trainable variables.\n varList = [];\n for (const varName in ENGINE.registeredVariables) {\n varList.push(ENGINE.registeredVariables[varName]);\n }\n }\n const specifiedNonTrainable = specifiedVarList ? varList.filter(variable => !variable.trainable) : null;\n // Prune non-trainable variables.\n const originalVarCount = varList.length;\n varList = varList.filter(variable => variable.trainable);\n util.assert(varList.length > 0, () => `variableGrads() expects at least one of the input variables to ` +\n `be trainable, but none of the ${originalVarCount} variables is ` +\n `trainable.`);\n const allowNoGradients = true;\n const { value, grads } = ENGINE.gradients(f, varList, null, allowNoGradients);\n util.assert(grads.some(g => g != null), () => 'Cannot find a connection between any variable and the result of ' +\n 'the loss function y=f(x). Please make sure the operations that ' +\n 'use variables are inside the function f passed to minimize().');\n util.assert(value.rank === 0, () => `The f passed in variableGrads(f) must return a scalar, but it ` +\n `returned a rank-${value.rank} tensor`);\n const namedGrads = {};\n varList.forEach((v, i) => {\n if (grads[i] != null) {\n namedGrads[v.name] = grads[i];\n }\n });\n if (specifiedNonTrainable != null) {\n // If varList is explicitly provided and contains non-trainable values,\n // add them to the returned gradients with `null` values.\n specifiedNonTrainable.forEach(v => namedGrads[v.name] = null);\n }\n return { value, grads: namedGrads };\n}\n/**\n * Overrides the gradient computation of a function `f`.\n *\n * Takes a function\n * `f(...inputs, save) => {value: Tensor, gradFunc: (dy, saved) => Tensor[]}`\n * and returns another function `g(...inputs)` which takes the same inputs as\n * `f`. When called, `g` returns `f().value`. In backward mode, custom gradients\n * with respect to each input of `f` are computed using `f().gradFunc`.\n *\n * The `save` function passsed to `f` should be used for saving tensors needed\n * in the gradient. And the `saved` passed to the `gradFunc` is a\n * `NamedTensorMap`, which contains those saved tensor.\n *\n * ```js\n * const customOp = tf.customGrad((x, save) => {\n * // Save x to make sure it's available later for the gradient.\n * save([x]);\n * // Override gradient of our custom x ^ 2 op to be dy * abs(x);\n * return {\n * value: x.square(),\n * // Note `saved.x` which points to the `x` we saved earlier.\n * gradFunc: (dy, saved) => [dy.mul(saved[0].abs())]\n * };\n * });\n *\n * const x = tf.tensor1d([-1, -2, 3]);\n * const dx = tf.grad(x => customOp(x));\n *\n * console.log(`f(x):`);\n * customOp(x).print();\n * console.log(`f'(x):`);\n * dx(x).print();\n * ```\n *\n * @param f The function to evaluate in forward mode, which should return\n * `{value: Tensor, gradFunc: (dy, saved) => Tensor[]}`, where `gradFunc`\n * returns the custom gradients of `f` with respect to its inputs.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction customGrad(f) {\n return ENGINE.customGrad(f);\n}\nfunction checkGrads(grads) {\n const numNullGradients = grads.filter(g => g == null).length;\n if (numNullGradients > 0) {\n throw new Error(`Cannot compute gradient of y=f(x) with respect to x. Make sure that\n the f you passed encloses all operations that lead from x to y.`);\n }\n}\nexport { customGrad, variableGrads, valueAndGrad, valueAndGrads, grad, grads, };\n//# sourceMappingURL=gradients.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Negate } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes `-1 * x` element-wise.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, -2, 0], [2, 2]);\n *\n * x.neg().print(); // or tf.neg(x)\n * ```\n *\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction neg_(x) {\n const $x = convertToTensor(x, 'x', 'neg');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.neg($x), inputs, null /* grad */, Negate);\n}\nexport const neg = op({ neg_ });\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softplus } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes softplus of the input `tf.Tensor` element-wise: `log(exp(x) + 1)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.softplus().print(); // or tf.softplus(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction softplus_(x) {\n const $x = convertToTensor(x, 'x', 'softplus');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.softplus($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Softplus);\n}\nexport const softplus = op({ softplus_ });\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../gradients';\nimport { convertToTensor } from '../tensor_util_env';\nimport { mul } from './mul';\nimport { neg } from './neg';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { softplus } from './softplus';\n/**\n * Computes log sigmoid of the input `tf.Tensor` element-wise:\n * `logSigmoid(x)`. For numerical stability, we use `-tf.softplus(-x)`.\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.logSigmoid().print(); // or tf.logSigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction logSigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'logSigmoid');\n // Use a custom gradient to maintain previous implementation.\n // There is no LogSigmoid kernel in TF so we can't use engine.runKernel\n // directly\n const customOp = customGrad((x) => {\n // TODO(yassogba) we can remove the chained softplus call here only\n // after backends have modualrized softplus at which point we can call\n // engine runKernel(..., Sotfplus, ...) directly.\n const value = neg(softplus(neg(x)));\n const gradFunc = (dy) => {\n const derX = mul(dy, sigmoid(neg(x)));\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const logSigmoid = op({ logSigmoid_ });\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Max } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the maximum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.max().print(); // or tf.max(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.max(axis).print(); // or tf.max(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction max_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'max');\n const forward = (backend, save) => {\n const origAxes = util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let maxInput = $x;\n if (permutedAxes != null) {\n maxInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, maxInput.rank);\n }\n const y = backend.max(maxInput, axes);\n if (permutedAxes != null) {\n maxInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, util.parseAxisParam(axis, $x.shape));\n res = reshape(res, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { reductionIndices: axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Max, attrs);\n}\nexport const max = op({ max_ });\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sub } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Subtracts two `tf.Tensor`s element-wise, A - B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n *\n * ```js\n * // Broadcast subtract a with b.\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.scalar(5);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n * @param a The first `tf.Tensor` to subtract from.\n * @param b The second `tf.Tensor` to be subtracted. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction sub_(a, b) {\n let $a = convertToTensor(a, 'a', 'sub');\n let $b = convertToTensor(b, 'b', 'sub');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.subtract($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sub);\n}\nexport const sub = op({ sub_ });\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the sum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If axes has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.sum().print(); // or tf.sum(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.sum(axis).print(); // or tf.sum(x, axis)\n * ```\n *\n * @param x The input tensor to compute the sum over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction sum_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'sum');\n if ($x.dtype === 'bool') {\n $x = cast($x, 'int32');\n }\n const forward = (backend, save) => {\n save([$x]);\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.sum(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sum, attrs);\n}\nexport const sum = op({ sum_ });\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogSoftmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log softmax.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param axis The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction logSoftmax_(logits, axis = -1) {\n const $logits = convertToTensor(logits, 'logits', 'logSoftmax');\n if (axis === -1) {\n axis = $logits.rank - 1;\n }\n if (axis !== $logits.rank - 1) {\n throw Error('Log Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and axis was ${axis}`);\n }\n const forward = (backend, save) => {\n const keepDims = true;\n const xMax = max(logits, axis, true);\n const shifted = sub(logits, xMax);\n const value = sub(cast(shifted, 'float32'), log(sum(exp(shifted), axis, keepDims)));\n save([value]);\n return value;\n };\n const inputs = { logits: $logits };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LogSoftmax, attrs);\n}\nexport const logSoftmax = op({ logSoftmax_ });\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { add } from './add';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log(sum(exp(elements across the reduction dimensions)).\n *\n * Reduces the input along the dimensions given in `axis`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.logSumExp().print(); // or tf.logSumExp(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.logSumExp(axis).print(); // or tf.logSumExp(a, axis)\n * ```\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. If null (the default),\n * reduces all dimensions.\n * @param keepDims If true, retains reduced dimensions with length\n * of 1. Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction logSumExp_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'logSumExp');\n const axes = parseAxisParam(axis, $x.shape);\n const xMax = max($x, axes, true /* keepDims */);\n const a = sub($x, xMax);\n const b = exp(a);\n const c = sum(b, axes);\n const d = log(c);\n const res = add(reshape(xMax, d.shape), d);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, axes);\n return reshape(res, newShape);\n }\n return res;\n}\nexport const logSumExp = op({ logSumExp_ });\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalAnd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a AND b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalAnd(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalAnd_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalAnd', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalAnd', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalAnd($a, $b), inputs, null /* grad */, LogicalAnd);\n}\nexport const logicalAnd = op({ logicalAnd_ });\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalNot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the truth value of `NOT x` element-wise.\n *\n * ```js\n * const a = tf.tensor1d([false, true], 'bool');\n *\n * a.logicalNot().print();\n * ```\n *\n * @param x The input tensor. Must be of dtype 'bool'.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalNot_(x) {\n const $x = convertToTensor(x, 'x', 'logicalNot', 'bool');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.logicalNot($x), inputs, null /* grad */, LogicalNot);\n}\nexport const logicalNot = op({ logicalNot_ });\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalOr } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a OR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalOr(b).print();\n * ```\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalOr_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalOr', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalOr', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalOr($a, $b), inputs, null /* grad */, LogicalOr);\n}\nexport const logicalOr = op({ logicalOr_ });\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { logicalAnd } from './logical_and';\nimport { logicalNot } from './logical_not';\nimport { logicalOr } from './logical_or';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a XOR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalXor(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalXor_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalXor', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalXor', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n // x ^ y = (x | y) & ~(x & y)\n return logicalAnd(logicalOr(a, b), logicalNot(logicalAnd(a, b)));\n}\nexport const logicalXor = op({ logicalXor_ });\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D max pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction maxPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'maxPool');\n const dilations = 1;\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x4D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n let y;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n y = x4D.clone();\n }\n else {\n y = backend.maxPool(x4D, convInfo);\n }\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const maxPool = op({ maxPool_ });\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { MaxPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D max pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.maxPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPool3d_(x, filterSize = [1, 1, 1], strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'maxPool3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in maxPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in maxPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n const y = backend.maxPool3d(x5D, convInfo);\n save([x5D, y]);\n return y;\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3d = op({ maxPool3d_ });\n//# sourceMappingURL=max_pool_3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolWithArgmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the 2D max pooling of an image with Argmax index.\n * The indices in argmax are flattened, so that a maximum value at position `[b,\n * y, x, c]` becomes flattened index: `(y * width + x) * channels + c` if\n * include_batch_in_index is False; `((b * height + y) * width + x) * channels\n * +c` if include_batch_in_index is True.\n *\n * The indices returned are always in `[0, height) x [0, width)` before\n * flattening.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param includeBatchIndex Defaults to False. Whether to include batch\n * dimension in flattened index of argmax.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPoolWithArgmax_(x, filterSize, strides, pad, includeBatchInIndex = false) {\n const $x = convertToTensor(x, 'x', 'maxPoolWithArgmax');\n const inputs = { x: $x };\n const attrs = { filterSize, strides, pad, includeBatchInIndex };\n const result = ENGINE.runKernel(MaxPoolWithArgmax, inputs, attrs);\n return { result: result[0], indexes: result[1] };\n}\nexport const maxPoolWithArgmax = op({ maxPoolWithArgmax_ });\n//# sourceMappingURL=max_pool_with_argmax.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeZerosTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\n/**\n * Creates a `tf.Tensor` with all elements set to 0.\n *\n * ```js\n * tf.zeros([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Can\n * be 'float32', 'int32' or 'bool'. Defaults to 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function zeros(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = zeros(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeZerosTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=zeros.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeOnesTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\nimport { zeros } from './zeros';\n/**\n * Creates a `tf.Tensor` with all elements set to 1.\n *\n * ```js\n * tf.ones([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function ones(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = ones(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeOnesTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=ones.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { customGrad } from '../gradients';\nimport { Mean } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam, sizeFromShape } from '../util';\nimport { computeOutAndReduceShapes } from './axis_util';\nimport { cast } from './cast';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { ones } from './ones';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sum } from './sum';\n/**\n * Computes the mean of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is\n * true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with\n * a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.mean().print(); // or tf.mean(a)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.mean(axis).print(); // or tf.mean(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction mean_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'mean');\n const axes = parseAxisParam(axis, $x.shape);\n const shapes = computeOutAndReduceShapes($x.shape, axes);\n const reduceShape = shapes[1];\n const reduceSize = sizeFromShape(reduceShape);\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n const forward = () => {\n const reduceSizeScalar = scalar(reduceSize);\n // Cast if needed.\n const xReduce = reduceSizeScalar.dtype === $x.dtype ?\n $x :\n cast($x, reduceSizeScalar.dtype);\n const res = div(xReduce, reduceSizeScalar);\n return sum(res, axis, keepDims);\n };\n // Use a custom gradient to bypass 2 gradient backprops since mean is used\n // extremely often.\n const customOp = customGrad((x) => {\n const value = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Mean, attrs);\n const gradFunc = (dy) => {\n const expandedDyShape = x.shape.slice();\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = div(mul(expandedDy, ones(x.shape, 'float32')), reduceSize);\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const mean = op({ mean_ });\n//# sourceMappingURL=mean.js.map", "import { ENGINE } from '../engine';\nimport { Min } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the minimum value from the input.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axes`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axes` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.min().print(); // or tf.min(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.min(axis).print(); // or tf.min(x, axis)\n * ```\n *\n * @param x The input Tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction min_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'min');\n const forward = (backend, save) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let minInput = $x;\n if (permutedAxes != null) {\n minInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const y = backend.min(minInput, axes);\n if (permutedAxes != null) {\n minInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n res = reshape(y, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Min, attrs);\n}\nexport const min = op({ min_ });\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Minimum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the min of a and b (`a < b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `minimumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * ```js\n * // Broadcast minimum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction minimum_(a, b) {\n let $a = convertToTensor(a, 'a', 'minimum');\n let $b = convertToTensor(b, 'b', 'minimum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.minimum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Minimum);\n}\nexport const minimum = op({ minimum_ });\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MirrorPad } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` using mirror padding.\n *\n * This operation implements the `REFLECT` and `SYMMETRIC` modes of pad.\n *\n * ```js\n * const x = tf.range(0, 9).reshape([1, 1, 3, 3]);\n * x.mirrorPad([[0, 0], [0, 0], [2, 2], [2, 2]], 'reflect').print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * In \"reflect\" mode, the padded regions do not include the borders,\n * while in \"symmetric\" mode the padded regions do include the borders.\n * For example, if the input is `[1, 2, 3]` and paddings is `[0, 2]`,\n * then the output is `[1, 2, 3, 2, 1]` in \"reflect\" mode, and\n * `[1, 2, 3, 3, 2]` in \"symmetric\" mode.\n * If `mode` is \"reflect\" then both `paddings[D, 0]` and `paddings[D, 1]`\n * must be no greater than `x.shape[D] - 1`. If mode is \"symmetric\"\n * then both `paddings[D, 0]` and `paddings[D, 1]` must be no greater than\n * `x.shape[D]`\n * @param mode String to specify padding mode. Can be `'reflect' | 'symmetric'`\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction mirrorPad_(x, paddings, mode) {\n util.assert(mode === 'reflect' || mode === 'symmetric', () => `Invalid mode. Mode must be either reflect or symmetric. ` +\n `Got ${mode}.`);\n const $x = convertToTensor(x, 'x', 'mirrorPad');\n if ($x.rank === 0) {\n throw new Error('mirrorPad(scalar) is not defined. ' +\n 'Pass non-scalar to mirrorPad');\n }\n util.assert(paddings.length === $x.rank, () => `Padding doesn't match input. Must be ${$x.rank}. ` +\n `Got ${paddings.length}.`);\n const shapeOffset = mode === 'reflect' ? 1 : 0;\n for (let i = 0; i < $x.rank; i++) {\n util.assert(paddings[i].length === 2, () => `Invalid number of paddings. Must be length of 2 each.`);\n util.assert(paddings[i][0] >= 0 && paddings[i][0] <= $x.shape[i] - shapeOffset &&\n paddings[i][1] >= 0 && paddings[i][1] <= $x.shape[i] - shapeOffset, () => `Padding in dimension ${i} cannot be greater than or equal ` +\n `to ${$x.shape[i] - shapeOffset} or less than 0 for input of ` +\n `shape ${$x.shape}`);\n }\n const attrs = { paddings, mode };\n const inputs = { x: $x };\n return ENGINE.runKernel(MirrorPad, inputs, attrs);\n}\nexport const mirrorPad = op({ mirrorPad_ });\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Mod } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the mod of a and b element-wise.\n * `floor(x / y) * y + mod(x, y) = x`\n * Supports broadcasting.\n *\n * We also expose `tf.modStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * ```js\n * // Broadcast a mod b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mod_(a, b) {\n let $a = convertToTensor(a, 'a', 'mod');\n let $b = convertToTensor(b, 'b', 'mod');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.mod($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Mod);\n}\nexport const mod = op({ mod_ });\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square of `x` element-wise: `x ^ 2`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.sqrt(2), -1]);\n *\n * x.square().print(); // or tf.square(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction square_(x) {\n const $x = convertToTensor(x, 'x', 'square');\n const attrs = {};\n const inputsToSave = [$x];\n const outputsToSave = [];\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n return backend.square($x);\n }, { x: $x }, null /* grad */, 'Square', attrs, inputsToSave, outputsToSave);\n}\nexport const square = op({ square_ });\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { cast } from './cast';\nimport { mean } from './mean';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { square } from './square';\nimport { sub } from './sub';\n/**\n * Calculates the mean and variance of `x`. The mean and variance are\n * calculated by aggregating the contents of `x` across `axes`. If `x` is\n * 1-D and `axes = [0]` this is just the mean and variance of a vector.\n *\n * @param x The input tensor.\n * @param axis The dimension(s) along with to compute mean and\n * variance. By default it reduces all dimensions.\n * @param keepDims If true, the moments have the same dimensionality as the\n * input.\n * @return An object with two keys: `mean` and `variance`.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction moments_(x, axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'moments');\n const axes = parseAxisParam(axis, x.shape);\n const xMean = mean(x, axes, keepDims);\n let keepDimsShape = xMean.shape;\n if (!keepDims) {\n keepDimsShape = expandShapeToKeepDim(xMean.shape, axes);\n }\n const devSquared = square(sub(cast(x, 'float32'), reshape(xMean, keepDimsShape)));\n const variance = mean(devSquared, axes, keepDims);\n return { mean: xMean, variance };\n}\nexport const moments = op({ moments_ });\n//# sourceMappingURL=moments.js.map", "import { convertToTensor, convertToTensorArray } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the next states and outputs of a stack of LSTMCells.\n *\n * Each cell output is used as input to the next cell.\n *\n * Returns `[cellState, cellOutput]`.\n *\n * Derived from tf.contrib.rn.MultiRNNCell.\n *\n * @param lstmCells Array of LSTMCell functions.\n * @param data The input to the cell.\n * @param c Array of previous cell states.\n * @param h Array of previous cell outputs.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction multiRNNCell_(lstmCells, data, c, h) {\n const $data = convertToTensor(data, 'data', 'multiRNNCell');\n const $c = convertToTensorArray(c, 'c', 'multiRNNCell');\n const $h = convertToTensorArray(h, 'h', 'multiRNNCell');\n let input = $data;\n const newStates = [];\n for (let i = 0; i < lstmCells.length; i++) {\n const output = lstmCells[i](input, $c[i], $h[i]);\n newStates.push(output[0]);\n newStates.push(output[1]);\n input = output[1];\n }\n const newC = [];\n const newH = [];\n for (let i = 0; i < newStates.length; i += 2) {\n newC.push(newStates[i]);\n newH.push(newStates[i + 1]);\n }\n return [newC, newH];\n}\nexport const multiRNNCell = op({ multiRNNCell_ });\n//# sourceMappingURL=multi_rnn_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a `tf.Tensor` with values drawn from a multinomial distribution.\n *\n * ```js\n * const probs = tf.tensor([.75, .25]);\n * tf.multinomial(probs, 3).print();\n * ```\n *\n * @param logits 1D array with unnormalized log-probabilities, or\n * 2D array of shape `[batchSize, numOutcomes]`. See the `normalized`\n * parameter.\n * @param numSamples Number of samples to draw for each row slice.\n * @param seed The seed number.\n * @param normalized Whether the provided `logits` are normalized true\n * probabilities (sum to 1). Defaults to false.\n * @return 1D array of shape `[numSamples]`, or 2D array of shape\n * `[batchSize, numSamples]`, depending on the rank of the input.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction multinomial_(logits, numSamples, seed, normalized = false) {\n const $logits = convertToTensor(logits, 'logits', 'multinomial');\n const numOutcomes = $logits.size;\n const origRank = $logits.rank;\n if (numOutcomes < 2) {\n throw new Error(`Error in multinomial: you need at least 2 outcomes, but got ` +\n `${numOutcomes}.`);\n }\n if (origRank > 2) {\n throw new Error(`Rank of probabilities must be 1 or 2, but is ${origRank}`);\n }\n seed = seed || Math.random();\n const logits2D = origRank === 1 ? reshape($logits, [1, -1]) : $logits;\n const res = ENGINE.runKernelFunc(backend => backend.multinomial(logits2D, normalized, numSamples, seed), { logits2D });\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return origRank === 1 ? reshape(res, [res.size]) : res;\n}\nexport const multinomial = op({ multinomial_ });\n//# sourceMappingURL=multinomial.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { NotEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a != b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([0, 2, 3]);\n *\n * a.notEqual(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction notEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'notEqual');\n let $b = convertToTensor(b, 'b', 'notEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend) => backend.notEqual($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, NotEqual);\n}\nexport const notEqual = op({ notEqual_ });\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Real } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the real part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the real part of each element in input considered as a complex number.\n *\n * If the input is real, it simply makes a clone.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.real(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction real_(input) {\n const $input = convertToTensor(input, 'input', 'real');\n const forward = (backend) => {\n return backend.real($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Real);\n}\nexport const real = op({ real_ });\n//# sourceMappingURL=real.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OnesLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { complex } from './complex';\nimport { imag } from './imag';\nimport { op } from './operation';\nimport { real } from './real';\nimport { zerosLike } from './zeros_like';\n/**\n * Creates a `tf.Tensor` with all elements set to 1 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.onesLike(x).print();\n * ```\n * @param x A tensor.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction onesLike_(x) {\n const $x = convertToTensor(x, 'x', 'onesLike');\n const forward = (backend, save) => {\n if ($x.dtype === 'complex64') {\n const r = onesLike(real($x));\n const i = zerosLike(imag($x));\n return complex(r, i);\n }\n return backend.onesLike($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OnesLike);\n}\nexport const onesLike = op({ onesLike_ });\n//# sourceMappingURL=ones_like.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the outer product of two vectors, `v1` and `v2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([3, 4, 5]);\n *\n * tf.outerProduct(a, b).print();\n * ```\n * @param v1 The first vector in the outer product operation.\n * @param v2 The second vector in the outer product operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction outerProduct_(v1, v2) {\n const $v1 = convertToTensor(v1, 'v1', 'outerProduct');\n const $v2 = convertToTensor(v2, 'v2', 'outerProduct');\n util.assert($v1.rank === 1 && $v2.rank === 1, () => `Error in outerProduct: inputs must be rank 1, but got ranks ` +\n `${$v1.rank} and ${$v2.rank}.`);\n const v12D = reshape($v1, [-1, 1]);\n const v22D = reshape($v2, [1, -1]);\n return matMul(v12D, v22D);\n}\nexport const outerProduct = op({ outerProduct_ });\n//# sourceMappingURL=outer_product.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { PadV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` with a given value and paddings.\n *\n * This operation implements `CONSTANT` mode. For `REFLECT` and `SYMMETRIC`,\n * refer to `tf.mirrorPad`\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `paddings` is of given length.\n * - `tf.pad1d`\n * - `tf.pad2d`\n * - `tf.pad3d`\n * - `tf.pad4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.pad([[1, 2]]).print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * @param constantValue The pad value to use. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction pad_(x, paddings, constantValue = 0) {\n const $x = convertToTensor(x, 'x', 'pad');\n if ($x.rank === 0) {\n throw new Error('pad(scalar) is not defined. Pass non-scalar to pad');\n }\n const forward = (backend, save) => {\n save([$x]);\n return backend.pad($x, paddings, constantValue);\n };\n const attrs = { paddings, constantValue };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, PadV2, attrs);\n}\nexport const pad = op({ pad_ });\n//# sourceMappingURL=pad.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor1D` with a given value and paddings. See `pad` for details.\n */\nfunction pad1d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2, () => 'Invalid number of paddings. Must be length of 2.');\n return pad(x, [paddings], constantValue);\n}\nexport const pad1d = op({ pad1d_ });\n//# sourceMappingURL=pad1d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor2D` with a given value and paddings. See `pad` for details.\n */\nfunction pad2d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2 && paddings[0].length === 2 &&\n paddings[1].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad2d = op({ pad2d_ });\n//# sourceMappingURL=pad2d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor3D` with a given value and paddings. See `pad` for details.\n */\nfunction pad3d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 3 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad3d = op({ pad3d_ });\n//# sourceMappingURL=pad3d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor4D` with a given value and paddings. See `pad` for details.\n */\nfunction pad4d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 4 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2 &&\n paddings[3].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad4d = op({ pad4d_ });\n//# sourceMappingURL=pad4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SpaceToBatchND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation divides \"spatial\" dimensions `[1, ..., M]` of the input into\n * a grid of blocks of shape `blockShape`, and interleaves these blocks with\n * the \"batch\" dimension (0) such that in the output, the spatial\n * dimensions `[1, ..., M]` correspond to the position within the grid,\n * and the batch dimension combines both the position within a spatial block\n * and the original batch position. Prior to division into blocks,\n * the spatial dimensions of the input are optionally zero padded\n * according to `paddings`. See below for a precise description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]);\n * const blockShape = [2, 2];\n * const paddings = [[0, 0], [0, 0]];\n *\n * x.spaceToBatchND(blockShape, paddings).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param paddings A 2-D array. Must have shape `[M, 2]`, all values must be >=\n * 0. `paddings[i] = [padStart, padEnd]` specifies the amount to zero-pad\n * from input dimension `i + 1`, which corresponds to spatial dimension `i`. It\n * is required that\n * `(inputShape[i + 1] + padStart + padEnd) % blockShape[i] === 0`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Zero-pad the start and end of dimensions `[1, ..., M]` of the input\n * according to `paddings` to produce `padded` of shape paddedShape.\n *\n * 2. Reshape `padded` to `reshapedPadded` of shape:\n * `[batch] + [paddedShape[1] / blockShape[0], blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1], blockShape[M-1]] + remainingShape`\n *\n * 3. Permute dimensions of `reshapedPadded` to produce `permutedReshapedPadded`\n * of shape: `blockShape + [batch] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * 4. Reshape `permutedReshapedPadded` to flatten `blockShape` into the\n * batch dimension, producing an output tensor of shape:\n * `[batch * prod(blockShape)] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction spaceToBatchND_(x, blockShape, paddings) {\n const $x = convertToTensor(x, 'x', 'spaceToBatchND');\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank ${$x.rank} should be > than [blockShape] ${blockShape.length}`);\n util.assert(paddings.length === blockShape.length, () => `paddings.shape[0] ${paddings.length} must be equal to [blockShape] ${blockShape.length}`);\n util.assert($x.shape.reduce((a, b, i) => {\n if (i > 0 && i <= blockShape.length) {\n return a &&\n ((b + paddings[i - 1][0] + paddings[i - 1][1]) %\n blockShape[i - 1] ===\n 0);\n }\n return a;\n }, true), () => `input spatial dimensions ${$x.shape.slice(1)} with paddings ${paddings.toString()} must be divisible by blockShapes ${blockShape.toString()}`);\n const forward = backend => backend.spaceToBatchND($x, blockShape, paddings);\n const inputs = { x: $x };\n const attrs = { blockShape, paddings };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SpaceToBatchND, attrs);\n}\nexport const spaceToBatchND = op({ spaceToBatchND_ });\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { avgPool } from './avg_pool';\nimport { batchToSpaceND } from './batch_to_space_nd';\nimport * as conv_util from './conv_util';\nimport { maxPool } from './max_pool';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { spaceToBatchND } from './space_to_batch_nd';\n/**\n * Performs an N-D pooling operation\n *\n * @param input The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param windowShape The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param poolingType The type of pooling, either 'max' or 'avg'.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilationRate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction pool_(input, windowShape, poolingType, pad, dilations, strides) {\n if (dilations == null) {\n dilations = [1, 1];\n }\n if (strides == null) {\n strides = 1;\n }\n if (pad === 0) {\n pad = 'valid';\n }\n const $x = convertToTensor(input, 'x', 'maxPool');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in pool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computePool2DInfo(x4D.shape, windowShape, strides, dilations, pad);\n const dilation = [convInfo.dilationHeight, convInfo.dilationWidth];\n // The following implementation does batchToSpace(pool(spaceToBatch(x)))\n // whenever dilation > 1 since the TF kernels do not support dilation > 1.\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L1037\n let basePadding;\n if (pad === 'same') {\n basePadding = withSpaceToBatchBasePaddings([convInfo.filterHeight, convInfo.filterWidth], dilation);\n }\n else {\n basePadding = [[0, 0], [0, 0]];\n }\n const isDilationOne = dilation[0] === 1 && dilation[1] === 1;\n const [adjustedPadding, adjustedCrops] = requiredSpaceToBatchPaddings([convInfo.inHeight, convInfo.inWidth], dilation, basePadding);\n const convertedPad = isDilationOne ? pad : 'valid';\n const convertedX = isDilationOne ? x4D : spaceToBatchND(x4D, dilation, adjustedPadding);\n const forwardOp = poolingType === 'avg' ?\n () => avgPool(convertedX, windowShape, strides, convertedPad) :\n () => maxPool(convertedX, windowShape, strides, convertedPad);\n const y = forwardOp();\n const res = isDilationOne ? y : batchToSpaceND(y, dilation, adjustedCrops);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\n// Helper function to compute crops and paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/array_ops.py#L2184\nfunction requiredSpaceToBatchPaddings(inputShape, blockShape, basePadding) {\n const padStart = basePadding.map(b => b[0]);\n const origPadEnd = basePadding.map(b => b[1]);\n const fullInputShape = inputShape.concat(padStart, origPadEnd);\n const padEndExtra = blockShape.map((b, i) => (b - fullInputShape[i] % b) % b);\n const padEnd = origPadEnd.map((s, i) => s + padEndExtra[i]);\n const paddings = blockShape.map((_, i) => [padStart[i], padEnd[i]]);\n const crops = blockShape.map((_, i) => [0, padEndExtra[i]]);\n return [paddings, crops];\n}\n// Helper function to compute base paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L524\nfunction withSpaceToBatchBasePaddings(filterShape, dilation) {\n // Spatial dimensions of the filters and the upsampled filters in which we\n // introduce (rate - 1) zeros between consecutive filter values.\n const dilatedFilterShape = filterShape.map((s, i) => {\n return s + (s - 1) * (dilation[i] - 1);\n });\n const padExtraShape = dilatedFilterShape.map(s => s - 1);\n // When padding is odd, we pad more at end, following the same\n // convention as conv2d.\n const padExtraStart = padExtraShape.map(s => Math.floor(s / 2));\n const padExtraEnd = padExtraShape.map((s, i) => s - padExtraStart[i]);\n return padExtraShape.map((_, i) => {\n return [padExtraStart[i], padExtraEnd[i]];\n });\n}\nexport const pool = op({ pool_ });\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Pow } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the power of one `tf.Tensor` to another. Supports broadcasting.\n *\n * Given a `tf.Tensor` x and a `tf.Tensor` y, this operation computes x^y for\n * corresponding elements in x and y. The result's dtype will be the upcasted\n * type of the `base` and `exp` dtypes.\n *\n * ```js\n * const a = tf.tensor([[2, 3], [4, 5]])\n * const b = tf.tensor([[1, 2], [3, 0]]).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n *\n * ```js\n * const a = tf.tensor([[1, 2], [3, 4]])\n * const b = tf.tensor(2).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n * We also expose `powStrict` which has the same signature as this op and\n * asserts that `base` and `exp` are the same shape (does not broadcast).\n *\n * @param base The base `tf.Tensor` to pow element-wise.\n * @param exp The exponent `tf.Tensor` to pow element-wise.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction pow_(base, exp) {\n let $base = convertToTensor(base, 'base', 'pow');\n let $exp = convertToTensor(exp, 'exp', 'pow');\n [$base, $exp] = makeTypesMatch($base, $exp);\n const inputs = { a: $base, b: $exp };\n const forward = (backend, save) => {\n const y = backend.pow($base, $exp);\n save([$base, $exp, y]);\n return y;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Pow);\n}\nexport const pow = op({ pow_ });\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prelu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes leaky rectified linear element-wise with parametric alphas.\n *\n * `x < 0 ? alpha * x : f(x) = x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n * const alpha = tf.scalar(0.1);\n *\n * x.prelu(alpha).print(); // or tf.prelu(x, alpha)\n * ```\n * @param x The input tensor.\n * @param alpha Scaling factor for negative values.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction prelu_(x, alpha) {\n const $x = convertToTensor(x, 'x', 'prelu');\n const $alpha = convertToTensor(alpha, 'alpha', 'prelu');\n const forward = (backend, save) => {\n const res = backend.prelu($x, $alpha);\n save([$x, $alpha]);\n return res;\n };\n const inputs = { x: $x, alpha: $alpha };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prelu);\n}\nexport const prelu = op({ prelu_ });\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prod } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the product of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.prod().print(); // or tf.prod(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.prod(axis).print(); // or tf.prod(x, axis)\n * ```\n *\n * @param x The input tensor to compute the product over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction prod_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'prod');\n if ($x.dtype === 'bool') {\n // bool is not an allowed type for the underlying kernel.\n $x = cast($x, 'int32');\n }\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.prod(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prod, attrs);\n}\nexport const prod = op({ prod_ });\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { sizeFromShape } from '../util';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with values sampled from a random number generator\n * function defined by the user.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param randFunction A random number generator function which is called\n * for each element in the output tensor.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n */\nfunction rand_(shape, randFunction, dtype) {\n const size = sizeFromShape(shape);\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n for (let i = 0; i < size; i++) {\n values[i] = randFunction();\n }\n return ENGINE.makeTensor(values, shape, dtype);\n}\nexport const rand = op({ rand_ });\n//# sourceMappingURL=rand.js.map", "// A port of an algorithm by Johannes Baag\u00F8e , 2010\n// http://baagoe.com/en/RandomMusings/javascript/\n// https://github.com/nquinlan/better-random-numbers-for-javascript-mirror\n// Original work is under MIT license -\n\n// Copyright (C) 2010 by Johannes Baag\u00F8e \n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n// \n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n// \n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n\n\n\n(function(global, module, define) {\n\nfunction Alea(seed) {\n var me = this, mash = Mash();\n\n me.next = function() {\n var t = 2091639 * me.s0 + me.c * 2.3283064365386963e-10; // 2^-32\n me.s0 = me.s1;\n me.s1 = me.s2;\n return me.s2 = t - (me.c = t | 0);\n };\n\n // Apply the seeding algorithm from Baagoe.\n me.c = 1;\n me.s0 = mash(' ');\n me.s1 = mash(' ');\n me.s2 = mash(' ');\n me.s0 -= mash(seed);\n if (me.s0 < 0) { me.s0 += 1; }\n me.s1 -= mash(seed);\n if (me.s1 < 0) { me.s1 += 1; }\n me.s2 -= mash(seed);\n if (me.s2 < 0) { me.s2 += 1; }\n mash = null;\n}\n\nfunction copy(f, t) {\n t.c = f.c;\n t.s0 = f.s0;\n t.s1 = f.s1;\n t.s2 = f.s2;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new Alea(seed),\n state = opts && opts.state,\n prng = xg.next;\n prng.int32 = function() { return (xg.next() * 0x100000000) | 0; }\n prng.double = function() {\n return prng() + (prng() * 0x200000 | 0) * 1.1102230246251565e-16; // 2^-53\n };\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nfunction Mash() {\n var n = 0xefc8249d;\n\n var mash = function(data) {\n data = data.toString();\n for (var i = 0; i < data.length; i++) {\n n += data.charCodeAt(i);\n var h = 0.02519603282416938 * n;\n n = h >>> 0;\n h -= n;\n h *= n;\n n = h >>> 0;\n h -= n;\n n += h * 0x100000000; // 2^32\n }\n return (n >>> 0) * 2.3283064365386963e-10; // 2^-32\n };\n\n return mash;\n}\n\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.alea = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xor128\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n\n // Set up generator function.\n me.next = function() {\n var t = me.x ^ (me.x << 11);\n me.x = me.y;\n me.y = me.z;\n me.z = me.w;\n return me.w ^= (me.w >>> 19) ^ t ^ (t >>> 8);\n };\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor128 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorwow\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var t = (me.x ^ (me.x >>> 2));\n me.x = me.y; me.y = me.z; me.z = me.w; me.w = me.v;\n return (me.d = (me.d + 362437 | 0)) +\n (me.v = (me.v ^ (me.v << 4)) ^ (t ^ (t << 1))) | 0;\n };\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n me.v = 0;\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n if (k == strseed.length) {\n me.d = me.x << 10 ^ me.x >>> 4;\n }\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n t.v = f.v;\n t.d = f.d;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorwow = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorshift7\" algorithm by\n// Fran\u00E7ois Panneton and Pierre L'ecuyer:\n// \"On the Xorgshift Random Number Generators\"\n// http://saluc.engr.uconn.edu/refs/crypto/rng/panneton05onthexorshift.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n // Update xor generator.\n var X = me.x, i = me.i, t, v, w;\n t = X[i]; t ^= (t >>> 7); v = t ^ (t << 24);\n t = X[(i + 1) & 7]; v ^= t ^ (t >>> 10);\n t = X[(i + 3) & 7]; v ^= t ^ (t >>> 3);\n t = X[(i + 4) & 7]; v ^= t ^ (t << 7);\n t = X[(i + 7) & 7]; t = t ^ (t << 13); v ^= t ^ (t << 9);\n X[i] = v;\n me.i = (i + 1) & 7;\n return v;\n };\n\n function init(me, seed) {\n var j, w, X = [];\n\n if (seed === (seed | 0)) {\n // Seed state array using a 32-bit integer.\n w = X[0] = seed;\n } else {\n // Seed state using a string.\n seed = '' + seed;\n for (j = 0; j < seed.length; ++j) {\n X[j & 7] = (X[j & 7] << 15) ^\n (seed.charCodeAt(j) + X[(j + 1) & 7] << 13);\n }\n }\n // Enforce an array length of 8, not all zeroes.\n while (X.length < 8) X.push(0);\n for (j = 0; j < 8 && X[j] === 0; ++j);\n if (j == 8) w = X[7] = -1; else w = X[j];\n\n me.x = X;\n me.i = 0;\n\n // Discard an initial 256 values.\n for (j = 256; j > 0; --j) {\n me.next();\n }\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.x = f.x.slice();\n t.i = f.i;\n return t;\n}\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.x) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorshift7 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n", "// A Javascript implementaion of Richard Brent's Xorgens xor4096 algorithm.\n//\n// This fast non-cryptographic random number generator is designed for\n// use in Monte-Carlo algorithms. It combines a long-period xorshift\n// generator with a Weyl generator, and it passes all common batteries\n// of stasticial tests for randomness while consuming only a few nanoseconds\n// for each prng generated. For background on the generator, see Brent's\n// paper: \"Some long-period random number generators using shifts and xors.\"\n// http://arxiv.org/pdf/1004.3115v1.pdf\n//\n// Usage:\n//\n// var xor4096 = require('xor4096');\n// random = xor4096(1); // Seed with int32 or string.\n// assert.equal(random(), 0.1520436450538547); // (0, 1) range, 53 bits.\n// assert.equal(random.int32(), 1806534897); // signed int32, 32 bits.\n//\n// For nonzero numeric keys, this impelementation provides a sequence\n// identical to that by Brent's xorgens 3 implementaion in C. This\n// implementation also provides for initalizing the generator with\n// string seeds, or for saving and restoring the state of the generator.\n//\n// On Chrome, this prng benchmarks about 2.1 times slower than\n// Javascript's built-in Math.random().\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n var w = me.w,\n X = me.X, i = me.i, t, v;\n // Update Weyl generator.\n me.w = w = (w + 0x61c88647) | 0;\n // Update xor generator.\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n // Update Xor generator array state.\n v = X[i] = v ^ t;\n me.i = i;\n // Result is the combination.\n return (v + (w ^ (w >>> 16))) | 0;\n };\n\n function init(me, seed) {\n var t, v, i, j, w, X = [], limit = 128;\n if (seed === (seed | 0)) {\n // Numeric seeds initialize v, which is used to generates X.\n v = seed;\n seed = null;\n } else {\n // String seeds are mixed into v and X one character at a time.\n seed = seed + '\\0';\n v = 0;\n limit = Math.max(limit, seed.length);\n }\n // Initialize circular array and weyl value.\n for (i = 0, j = -32; j < limit; ++j) {\n // Put the unicode characters into the array, and shuffle them.\n if (seed) v ^= seed.charCodeAt((j + 32) % seed.length);\n // After 32 shuffles, take v as the starting w value.\n if (j === 0) w = v;\n v ^= v << 10;\n v ^= v >>> 15;\n v ^= v << 4;\n v ^= v >>> 13;\n if (j >= 0) {\n w = (w + 0x61c88647) | 0; // Weyl.\n t = (X[j & 127] ^= (v + w)); // Combine xor and weyl to init array.\n i = (0 == t) ? i + 1 : 0; // Count zeroes.\n }\n }\n // We have detected all zeroes; make the key nonzero.\n if (i >= 128) {\n X[(seed && seed.length || 0) & 127] = -1;\n }\n // Run the generator 512 times to further mix the state before using it.\n // Factoring this as a function slows the main generator, so it is just\n // unrolled here. The weyl generator is not advanced while warming up.\n i = 127;\n for (j = 4 * 128; j > 0; --j) {\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n X[i] = v ^ t;\n }\n // Storing state as object members is faster than using closure variables.\n me.w = w;\n me.X = X;\n me.i = i;\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.i = f.i;\n t.w = f.w;\n t.X = f.X.slice();\n return t;\n};\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.X) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor4096 = impl;\n}\n\n})(\n this, // window object or global\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n", "// A Javascript implementaion of the \"Tyche-i\" prng algorithm by\n// Samuel Neves and Filipe Araujo.\n// See https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var b = me.b, c = me.c, d = me.d, a = me.a;\n b = (b << 25) ^ (b >>> 7) ^ c;\n c = (c - d) | 0;\n d = (d << 24) ^ (d >>> 8) ^ a;\n a = (a - b) | 0;\n me.b = b = (b << 20) ^ (b >>> 12) ^ c;\n me.c = c = (c - d) | 0;\n me.d = (d << 16) ^ (c >>> 16) ^ a;\n return me.a = (a - b) | 0;\n };\n\n /* The following is non-inverted tyche, which has better internal\n * bit diffusion, but which is about 25% slower than tyche-i in JS.\n me.next = function() {\n var a = me.a, b = me.b, c = me.c, d = me.d;\n a = (me.a + me.b | 0) >>> 0;\n d = me.d ^ a; d = d << 16 ^ d >>> 16;\n c = me.c + d | 0;\n b = me.b ^ c; b = b << 12 ^ d >>> 20;\n me.a = a = a + b | 0;\n d = d ^ a; me.d = d = d << 8 ^ d >>> 24;\n me.c = c = c + d | 0;\n b = b ^ c;\n return me.b = (b << 7 ^ b >>> 25);\n }\n */\n\n me.a = 0;\n me.b = 0;\n me.c = 2654435769 | 0;\n me.d = 1367130551;\n\n if (seed === Math.floor(seed)) {\n // Integer seed.\n me.a = (seed / 0x100000000) | 0;\n me.b = seed | 0;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 20; k++) {\n me.b ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.a = f.a;\n t.b = f.b;\n t.c = f.c;\n t.d = f.d;\n return t;\n};\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.tychei = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "/*\nCopyright 2014 David Bau.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n*/\n\n(function (pool, math) {\n//\n// The following constants are related to IEEE 754 limits.\n//\nvar global = this,\n width = 256, // each RC4 output is 0 <= x < 256\n chunks = 6, // at least six RC4 outputs for each double\n digits = 52, // there are 52 significant digits in a double\n rngname = 'random', // rngname: name for Math.random and Math.seedrandom\n startdenom = math.pow(width, chunks),\n significance = math.pow(2, digits),\n overflow = significance * 2,\n mask = width - 1,\n nodecrypto; // node.js crypto module, initialized at the bottom.\n\n//\n// seedrandom()\n// This is the seedrandom function described above.\n//\nfunction seedrandom(seed, options, callback) {\n var key = [];\n options = (options == true) ? { entropy: true } : (options || {});\n\n // Flatten the seed string or build one from local entropy if needed.\n var shortseed = mixkey(flatten(\n options.entropy ? [seed, tostring(pool)] :\n (seed == null) ? autoseed() : seed, 3), key);\n\n // Use the seed to initialize an ARC4 generator.\n var arc4 = new ARC4(key);\n\n // This function returns a random double in [0, 1) that contains\n // randomness in every bit of the mantissa of the IEEE 754 value.\n var prng = function() {\n var n = arc4.g(chunks), // Start with a numerator n < 2 ^ 48\n d = startdenom, // and denominator d = 2 ^ 48.\n x = 0; // and no 'extra last byte'.\n while (n < significance) { // Fill up all significant digits by\n n = (n + x) * width; // shifting numerator and\n d *= width; // denominator and generating a\n x = arc4.g(1); // new least-significant-byte.\n }\n while (n >= overflow) { // To avoid rounding up, before adding\n n /= 2; // last byte, shift everything\n d /= 2; // right using integer math until\n x >>>= 1; // we have exactly the desired bits.\n }\n return (n + x) / d; // Form the number within [0, 1).\n };\n\n prng.int32 = function() { return arc4.g(4) | 0; }\n prng.quick = function() { return arc4.g(4) / 0x100000000; }\n prng.double = prng;\n\n // Mix the randomness into accumulated entropy.\n mixkey(tostring(arc4.S), pool);\n\n // Calling convention: what to return as a function of prng, seed, is_math.\n return (options.pass || callback ||\n function(prng, seed, is_math_call, state) {\n if (state) {\n // Load the arc4 state from the given state if it has an S array.\n if (state.S) { copy(state, arc4); }\n // Only provide the .state method if requested via options.state.\n prng.state = function() { return copy(arc4, {}); }\n }\n\n // If called as a method of Math (Math.seedrandom()), mutate\n // Math.random because that is how seedrandom.js has worked since v1.0.\n if (is_math_call) { math[rngname] = prng; return seed; }\n\n // Otherwise, it is a newer calling convention, so return the\n // prng directly.\n else return prng;\n })(\n prng,\n shortseed,\n 'global' in options ? options.global : (this == math),\n options.state);\n}\nmath['seed' + rngname] = seedrandom;\n\n//\n// ARC4\n//\n// An ARC4 implementation. The constructor takes a key in the form of\n// an array of at most (width) integers that should be 0 <= x < (width).\n//\n// The g(count) method returns a pseudorandom integer that concatenates\n// the next (count) outputs from ARC4. Its return value is a number x\n// that is in the range 0 <= x < (width ^ count).\n//\nfunction ARC4(key) {\n var t, keylen = key.length,\n me = this, i = 0, j = me.i = me.j = 0, s = me.S = [];\n\n // The empty key [] is treated as [0].\n if (!keylen) { key = [keylen++]; }\n\n // Set up S using the standard key scheduling algorithm.\n while (i < width) {\n s[i] = i++;\n }\n for (i = 0; i < width; i++) {\n s[i] = s[j = mask & (j + key[i % keylen] + (t = s[i]))];\n s[j] = t;\n }\n\n // The \"g\" method returns the next (count) outputs as one number.\n (me.g = function(count) {\n // Using instance members instead of closure state nearly doubles speed.\n var t, r = 0,\n i = me.i, j = me.j, s = me.S;\n while (count--) {\n t = s[i = mask & (i + 1)];\n r = r * width + s[mask & ((s[i] = s[j = mask & (j + t)]) + (s[j] = t))];\n }\n me.i = i; me.j = j;\n return r;\n // For robust unpredictability, the function call below automatically\n // discards an initial batch of values. This is called RC4-drop[256].\n // See http://google.com/search?q=rsa+fluhrer+response&btnI\n })(width);\n}\n\n//\n// copy()\n// Copies internal state of ARC4 to or from a plain object.\n//\nfunction copy(f, t) {\n t.i = f.i;\n t.j = f.j;\n t.S = f.S.slice();\n return t;\n};\n\n//\n// flatten()\n// Converts an object tree to nested arrays of strings.\n//\nfunction flatten(obj, depth) {\n var result = [], typ = (typeof obj), prop;\n if (depth && typ == 'object') {\n for (prop in obj) {\n try { result.push(flatten(obj[prop], depth - 1)); } catch (e) {}\n }\n }\n return (result.length ? result : typ == 'string' ? obj : obj + '\\0');\n}\n\n//\n// mixkey()\n// Mixes a string seed into a key that is an array of integers, and\n// returns a shortened string seed that is equivalent to the result key.\n//\nfunction mixkey(seed, key) {\n var stringseed = seed + '', smear, j = 0;\n while (j < stringseed.length) {\n key[mask & j] =\n mask & ((smear ^= key[mask & j] * 19) + stringseed.charCodeAt(j++));\n }\n return tostring(key);\n}\n\n//\n// autoseed()\n// Returns an object for autoseeding, using window.crypto and Node crypto\n// module if available.\n//\nfunction autoseed() {\n try {\n var out;\n if (nodecrypto && (out = nodecrypto.randomBytes)) {\n // The use of 'out' to remember randomBytes makes tight minified code.\n out = out(width);\n } else {\n out = new Uint8Array(width);\n (global.crypto || global.msCrypto).getRandomValues(out);\n }\n return tostring(out);\n } catch (e) {\n var browser = global.navigator,\n plugins = browser && browser.plugins;\n return [+new Date, global, plugins, global.screen, tostring(pool)];\n }\n}\n\n//\n// tostring()\n// Converts an array of charcodes to a string\n//\nfunction tostring(a) {\n return String.fromCharCode.apply(0, a);\n}\n\n//\n// When seedrandom.js is loaded, we immediately mix a few bits\n// from the built-in RNG into the entropy pool. Because we do\n// not want to interfere with deterministic PRNG state later,\n// seedrandom will not call math.random on its own again after\n// initialization.\n//\nmixkey(math.random(), pool);\n\n//\n// Nodejs and AMD support: export the implementation as a module using\n// either convention.\n//\nif ((typeof module) == 'object' && module.exports) {\n module.exports = seedrandom;\n // When in node.js, try using crypto package for autoseeding.\n try {\n nodecrypto = require('crypto');\n } catch (ex) {}\n} else if ((typeof define) == 'function' && define.amd) {\n define(function() { return seedrandom; });\n}\n\n// End anonymous scope, and pass initial values.\n})(\n [], // pool: entropy pool starts empty\n Math // math: package containing random, pow, and seedrandom\n);\n", "// A library of seedable RNGs implemented in Javascript.\n//\n// Usage:\n//\n// var seedrandom = require('seedrandom');\n// var random = seedrandom(1); // or any seed.\n// var x = random(); // 0 <= x < 1. Every bit is random.\n// var x = random.quick(); // 0 <= x < 1. 32 bits of randomness.\n\n// alea, a 53-bit multiply-with-carry generator by Johannes Baag\u00F8e.\n// Period: ~2^116\n// Reported to pass all BigCrush tests.\nvar alea = require('./lib/alea');\n\n// xor128, a pure xor-shift generator by George Marsaglia.\n// Period: 2^128-1.\n// Reported to fail: MatrixRank and LinearComp.\nvar xor128 = require('./lib/xor128');\n\n// xorwow, George Marsaglia's 160-bit xor-shift combined plus weyl.\n// Period: 2^192-2^32\n// Reported to fail: CollisionOver, SimpPoker, and LinearComp.\nvar xorwow = require('./lib/xorwow');\n\n// xorshift7, by Fran\u00E7ois Panneton and Pierre L'ecuyer, takes\n// a different approach: it adds robustness by allowing more shifts\n// than Marsaglia's original three. It is a 7-shift generator\n// with 256 bits, that passes BigCrush with no systmatic failures.\n// Period 2^256-1.\n// No systematic BigCrush failures reported.\nvar xorshift7 = require('./lib/xorshift7');\n\n// xor4096, by Richard Brent, is a 4096-bit xor-shift with a\n// very long period that also adds a Weyl generator. It also passes\n// BigCrush with no systematic failures. Its long period may\n// be useful if you have many generators and need to avoid\n// collisions.\n// Period: 2^4128-2^32.\n// No systematic BigCrush failures reported.\nvar xor4096 = require('./lib/xor4096');\n\n// Tyche-i, by Samuel Neves and Filipe Araujo, is a bit-shifting random\n// number generator derived from ChaCha, a modern stream cipher.\n// https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n// Period: ~2^127\n// No systematic BigCrush failures reported.\nvar tychei = require('./lib/tychei');\n\n// The original ARC4-based prng included in this library.\n// Period: ~2^1600\nvar sr = require('./seedrandom');\n\nsr.alea = alea;\nsr.xor128 = xor128;\nsr.xorwow = xorwow;\nsr.xorshift7 = xorshift7;\nsr.xor4096 = xor4096;\nsr.tychei = tychei;\n\nmodule.exports = sr;\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as seedrandom from 'seedrandom';\nimport { expectNumbersClose, testEpsilon } from '../test_util';\n// https://en.wikipedia.org/wiki/Marsaglia_polar_method\nexport class MPRandGauss {\n constructor(mean, stdDeviation, dtype, truncated, seed) {\n this.mean = mean;\n this.stdDev = stdDeviation;\n this.dtype = dtype;\n this.nextVal = NaN;\n this.truncated = truncated;\n if (this.truncated) {\n this.upper = this.mean + this.stdDev * 2;\n this.lower = this.mean - this.stdDev * 2;\n }\n const seedValue = seed ? seed : Math.random();\n this.random = seedrandom.alea(seedValue.toString());\n }\n /** Returns next sample from a Gaussian distribution. */\n nextValue() {\n if (!isNaN(this.nextVal)) {\n const value = this.nextVal;\n this.nextVal = NaN;\n return value;\n }\n let resultX, resultY;\n let isValid = false;\n while (!isValid) {\n let v1, v2, s;\n do {\n v1 = 2 * this.random() - 1;\n v2 = 2 * this.random() - 1;\n s = v1 * v1 + v2 * v2;\n } while (s >= 1 || s === 0);\n const mul = Math.sqrt(-2.0 * Math.log(s) / s);\n resultX = this.mean + this.stdDev * v1 * mul;\n resultY = this.mean + this.stdDev * v2 * mul;\n if (!this.truncated || this.isValidTruncated(resultX)) {\n isValid = true;\n }\n }\n if (!this.truncated || this.isValidTruncated(resultY)) {\n this.nextVal = this.convertValue(resultY);\n }\n return this.convertValue(resultX);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype == null || this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n /** Returns true if less than 2-standard-deviations from the mean. */\n isValidTruncated(value) {\n return value <= this.upper && value >= this.lower;\n }\n}\n// Marsaglia, George, and Wai Wan Tsang. 2000. \"A Simple Method for Generating\n// Gamma Variables.\"\nexport class RandGamma {\n constructor(alpha, beta, dtype, seed) {\n this.alpha = alpha;\n this.beta = 1 / beta; // convert rate to scale parameter\n this.dtype = dtype;\n const seedValue = seed ? seed : Math.random();\n this.randu = seedrandom.alea(seedValue.toString());\n this.randn = new MPRandGauss(0, 1, dtype, false, this.randu());\n if (alpha < 1) {\n this.d = alpha + (2 / 3);\n }\n else {\n this.d = alpha - (1 / 3);\n }\n this.c = 1 / Math.sqrt(9 * this.d);\n }\n /** Returns next sample from a gamma distribution. */\n nextValue() {\n let x2, v0, v1, x, u, v;\n while (true) {\n do {\n x = this.randn.nextValue();\n v = 1 + (this.c * x);\n } while (v <= 0);\n v *= v * v;\n x2 = x * x;\n v0 = 1 - (0.331 * x2 * x2);\n v1 = (0.5 * x2) + (this.d * (1 - v + Math.log(v)));\n u = this.randu();\n if (u < v0 || Math.log(u) < v1) {\n break;\n }\n }\n v = (1 / this.beta) * this.d * v;\n if (this.alpha < 1) {\n v *= Math.pow(this.randu(), 1 / this.alpha);\n }\n return this.convertValue(v);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n}\nexport class UniformRandom {\n constructor(min = 0, max = 1, dtype, seed) {\n /** Handles proper rounding for non floating point numbers. */\n this.canReturnFloat = () => (this.dtype == null || this.dtype === 'float32');\n this.min = min;\n this.range = max - min;\n this.dtype = dtype;\n if (seed == null) {\n seed = Math.random();\n }\n if (typeof seed === 'number') {\n seed = seed.toString();\n }\n if (!this.canReturnFloat() && this.range <= 1) {\n throw new Error(`The difference between ${min} - ${max} <= 1 and dtype is not float`);\n }\n this.random = seedrandom.alea(seed);\n }\n convertValue(value) {\n if (this.canReturnFloat()) {\n return value;\n }\n return Math.round(value);\n }\n nextValue() {\n return this.convertValue(this.min + this.range * this.random());\n }\n}\nexport function jarqueBeraNormalityTest(values) {\n // https://en.wikipedia.org/wiki/Jarque%E2%80%93Bera_test\n const n = values.length;\n const s = skewness(values);\n const k = kurtosis(values);\n const jb = n / 6 * (Math.pow(s, 2) + 0.25 * Math.pow(k - 3, 2));\n // JB test requires 2-degress of freedom from Chi-Square @ 0.95:\n // http://www.itl.nist.gov/div898/handbook/eda/section3/eda3674.htm\n const CHI_SQUARE_2DEG = 5.991;\n if (jb > CHI_SQUARE_2DEG) {\n throw new Error(`Invalid p-value for JB: ${jb}`);\n }\n}\nexport function expectArrayInMeanStdRange(actual, expectedMean, expectedStdDev, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n const actualMean = mean(actual);\n expectNumbersClose(actualMean, expectedMean, epsilon);\n expectNumbersClose(standardDeviation(actual, actualMean), expectedStdDev, epsilon);\n}\nfunction mean(values) {\n let sum = 0;\n for (let i = 0; i < values.length; i++) {\n sum += values[i];\n }\n return sum / values.length;\n}\nfunction standardDeviation(values, mean) {\n let squareDiffSum = 0;\n for (let i = 0; i < values.length; i++) {\n const diff = values[i] - mean;\n squareDiffSum += diff * diff;\n }\n return Math.sqrt(squareDiffSum / values.length);\n}\nfunction kurtosis(values) {\n // https://en.wikipedia.org/wiki/Kurtosis\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum4 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum4 += Math.pow(v, 4);\n }\n return (1 / n) * sum4 / Math.pow((1 / n) * sum2, 2);\n}\nfunction skewness(values) {\n // https://en.wikipedia.org/wiki/Skewness\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum3 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum3 += Math.pow(v, 3);\n }\n return (1 / n) * sum3 / Math.pow((1 / (n - 1)) * sum2, 3 / 2);\n}\n//# sourceMappingURL=rand_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { RandGamma } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a gamma distribution.\n *\n * ```js\n * tf.randomGamma([2, 2], 1).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param alpha The shape parameter of the gamma distribution.\n * @param beta The inverse scale parameter of the gamma distribution. Defaults\n * to 1.\n * @param dtype The data type of the output. Defaults to float32.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomGamma_(shape, alpha, beta = 1, dtype = 'float32', seed) {\n if (beta == null) {\n beta = 1;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const rgamma = new RandGamma(alpha, beta, dtype, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = rgamma.nextValue();\n }\n return res.toTensor();\n}\nexport const randomGamma = op({ randomGamma_ });\n//# sourceMappingURL=random_gamma.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a normal distribution.\n *\n * ```js\n * tf.randomNormal([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, false /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const randomNormal = op({ randomNormal_ });\n//# sourceMappingURL=random_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { UniformRandom } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a uniform distribution.\n *\n * The generated values follow a uniform distribution in the range [minval,\n * maxval). The lower bound minval is included in the range, while the upper\n * bound maxval is excluded.\n *\n * ```js\n * tf.randomUniform([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param minval The lower bound on the range of random values to generate.\n * Defaults to 0.\n * @param maxval The upper bound on the range of random values to generate.\n * Defaults to 1.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomUniform_(shape, minval = 0, maxval = 1, dtype = 'float32', seed) {\n const res = buffer(shape, dtype);\n const random = new UniformRandom(minval, maxval, null, seed);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = random.nextValue();\n }\n return res.toTensor();\n}\nexport const randomUniform = op({ randomUniform_ });\n//# sourceMappingURL=random_uniform.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-1 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor1d` as it makes the code more readable.\n *\n * ```js\n * tf.tensor1d([1, 2, 3]).print();\n * ```\n *\n * @param values The values of the tensor. Can be array of numbers,\n * or a `TypedArray`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor1d(values, dtype) {\n assertNonNull(values);\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 1) {\n throw new Error('tensor1d() requires values to be a flat/TypedArray');\n }\n const shape = null;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Range } from '../kernel_names';\nimport { makeZerosTypedArray } from '../util';\nimport { tensor1d } from './tensor1d';\nimport { zeros } from './zeros';\n/**\n * Creates a new `tf.Tensor1D` filled with the numbers in the range provided.\n *\n * The tensor is a is half-open interval meaning it includes start, but\n * excludes stop. Decrementing ranges and negative step values are also\n * supported.sv\n *\n *\n * ```js\n * tf.range(0, 9, 2).print();\n * ```\n *\n * @param start An integer start value\n * @param stop An integer stop value\n * @param step An integer increment (will default to 1 or -1)\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function range(start, stop, step = 1, dtype = 'float32') {\n if (step === 0) {\n throw new Error('Cannot have a step of zero');\n }\n const forward = () => {\n const sameStartStop = start === stop;\n const increasingRangeNegativeStep = start < stop && step < 0;\n const decreasingRangePositiveStep = stop < start && step > 1;\n if (sameStartStop || increasingRangeNegativeStep ||\n decreasingRangePositiveStep) {\n return zeros([0], dtype);\n }\n const numElements = Math.abs(Math.ceil((stop - start) / step));\n const values = makeZerosTypedArray(numElements, dtype);\n if (stop < start && step === 1) {\n // Auto adjust the step's sign if it hasn't been set\n // (or was set to 1)\n step = -1;\n }\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, dtype);\n };\n const attrs = { start, stop, step, dtype };\n return ENGINE.runKernelFunc(forward, {} /* inputs */, null /* grad */, Range, attrs);\n}\n//# sourceMappingURL=range.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reciprocal } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of x element-wise: `1 / x`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, 2]);\n *\n * x.reciprocal().print(); // or tf.reciprocal(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction reciprocal_(x) {\n const $x = convertToTensor(x, 'x', 'reciprocal');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.reciprocal($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Reciprocal);\n}\nexport const reciprocal = op({ reciprocal_ });\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { cast } from './cast';\n/**\n * Computes rectified linear element-wise: `max(x, 0)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.relu().print(); // or tf.relu(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu_(x) {\n const $x = convertToTensor(x, 'x', 'relu');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu);\n}\nexport const relu = op({ relu_ });\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu6 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes rectified linear 6 element-wise: `min(max(x, 0), 6)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 8]);\n *\n * x.relu6().print(); // or tf.relu6(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu6_(x) {\n const $x = convertToTensor(x, 'x', 'relu6');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu6($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu6);\n}\nexport const relu6 = op({ relu6_ });\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reverse } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Reverses a `tf.Tensor` along a specified axis.\n *\n * Also available are stricter rank-specific methods that assert that `x` is\n * of the given rank:\n * - `tf.reverse1d`\n * - `tf.reverse2d`\n * - `tf.reverse3d`\n * - `tf.reverse4d`\n *\n * Except `tf.reverse1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.reverse().print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.reverse(axis).print();\n * ```\n * @param x The input tensor to be reversed.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction reverse_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n if ($x.rank === 0) {\n return clone($x);\n }\n const res = backend.reverse($x, axes);\n return reshape(res, $x.shape);\n };\n const inputs = { x: $x };\n const attrs = { dims: axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Reverse, attrs);\n}\nexport const reverse = op({ reverse_ });\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor1D`.\n *\n * @param x The input tensor.\n */\nfunction reverse1d_(x) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 1, () => `Error in reverse1D: x must be rank 1 but got rank ${$x.rank}.`);\n return reverse($x, 0);\n}\nexport const reverse1d = op({ reverse1d_ });\n//# sourceMappingURL=reverse_1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor2D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse2d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 2, () => `Error in reverse2D: x must be rank 2 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse2d = op({ reverse2d_ });\n//# sourceMappingURL=reverse_2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor3D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse3d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 3, () => `Error in reverse3D: x must be rank 3 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse3d = op({ reverse3d_ });\n//# sourceMappingURL=reverse_3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor4D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse4d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 4, () => `Error in reverse4D: x must be rank 4 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse4d = op({ reverse4d_ });\n//# sourceMappingURL=reverse_4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Round } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes round of input `tf.Tensor` element-wise: `round(x)`.\n * It implements banker's rounding.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.round().print(); // or tf.round(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction round_(x) {\n const $x = convertToTensor(x, 'x', 'round');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.round($x), inputs, null /* grad */, Round);\n}\nexport const round = op({ round_ });\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Rsqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of square root of the input `tf.Tensor` element-wise:\n * `y = 1 / sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.rsqrt().print(); // or tf.rsqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction rsqrt_(x) {\n const $x = convertToTensor(x, 'x', 'rsqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.rsqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Rsqrt);\n}\nexport const rsqrt = op({ rsqrt_ });\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Selu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes scaled exponential linear element-wise.\n *\n * `x < 0 ? scale * alpha * (exp(x) - 1) : x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.selu().print(); // or tf.selu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction selu_(x) {\n const $x = convertToTensor(x, 'x', 'selu');\n const forward = (backend, save) => {\n const res = backend.selu($x);\n save([$x]);\n return res;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Selu);\n}\nexport const selu = op({ selu_ });\n//# sourceMappingURL=selu.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport { depthwiseConv2d } from './depthwise_conv2d';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * 2-D convolution with separable filters.\n *\n * Performs a depthwise convolution that acts separately on channels followed\n * by a pointwise convolution that mixes channels. Note that this is\n * separability between dimensions [1, 2] and 3, not spatial separability\n * between dimensions 1 and 2.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param depthwiseFilter The depthwise filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`. This is\n * the filter used in the first step.\n * @param pointwiseFilter The pointwise filter tensor, rank 4, of shape\n * `[1, 1, inChannels * channelMultiplier, outChannels]`. This is\n * the filter used in the second step.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction separableConv2d_(x, depthwiseFilter, pointwiseFilter, strides, pad, dilation = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'separableConv2d');\n const $depthwiseFilter = convertToTensor(depthwiseFilter, 'depthwiseFilter', 'separableConv2d');\n const $pointwiseFilter = convertToTensor(pointwiseFilter, 'pointwiseFilter', 'separableConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n if (dataFormat === 'NCHW') {\n throw new Error('separableConv2d currently does not support dataFormat NCHW; only ' +\n 'NHWC is supported');\n }\n util.assert(x4D.rank === 4, () => `Error in separableConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($depthwiseFilter.rank === 4, () => `Error in separableConv2d: depthwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.rank === 4, () => `Error in separableConv2d: pointwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.shape[0] === 1, () => `Error in separableConv2d: the first dimension of pointwise filter ` +\n ` must be 1, but got ${$pointwiseFilter.shape[0]}.`);\n util.assert($pointwiseFilter.shape[1] === 1, () => `Error in separableConv2d: the second dimension of pointwise ` +\n `filter must be 1, but got ${$pointwiseFilter.shape[1]}.`);\n const inChannels = $depthwiseFilter.shape[2];\n const channelMultiplier = $depthwiseFilter.shape[3];\n util.assert($pointwiseFilter.shape[2] === inChannels * channelMultiplier, () => `Error in separableConv2d: the third dimension of pointwise filter ` +\n `must be ${inChannels * channelMultiplier}, ` +\n `but got ${$pointwiseFilter.shape[2]}.`);\n const depthwise = depthwiseConv2d(x4D, $depthwiseFilter, strides, pad, dataFormat, dilation);\n const pointwiseStride = 1;\n const res = conv2d(depthwise, $pointwiseFilter, pointwiseStride, 'valid', dataFormat);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const separableConv2d = op({ separableConv2d_ });\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\n/**\n * Computes the difference between two lists of numbers.\n *\n * Given a Tensor `x` and a Tensor `y`, this operation returns a Tensor `out`\n * that represents all values that are in `x` but not in `y`. The returned\n * Tensor `out` is sorted in the same order that the numbers appear in `x`\n * (duplicates are preserved). This operation also returns a Tensor indices that\n * represents the position of each out element in `x`. In other words:\n *\n * `out[i] = x[idx[i]] for i in [0, 1, ..., out.length - 1]`\n *\n * ```js\n * const x = [1, 2, 3, 4, 5, 6];\n * const y = [1, 3, 5];\n *\n * const [out, indices] = await tf.setdiff1dAsync(x, y);\n * out.print(); // [2, 4, 6]\n * indices.print(); // [1, 3, 5]\n * ```\n *\n * @param x 1-D Tensor. Values to keep.\n * @param y 1-D Tensor. Must have the same type as x. Values to exclude in the\n * output.\n * @returns Promise of Tensor tuple [out, indices].\n * out: Tensor with the same type as x.\n * indices: A Tensor of type int32.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nasync function setdiff1dAsync_(x, y) {\n const $x = convertToTensor(x, 'x', 'setdiff1d');\n const $y = convertToTensor(y, 'y', 'setdiff1d');\n util.assert($x.dtype === $y.dtype, () => `x and y should have the same dtype, but got x (${$x.dtype}) and y (${$y.dtype}).`);\n util.assert($x.rank === 1, () => `x should be 1D tensor, but got x (${$x.shape}).`);\n util.assert($y.rank === 1, () => `y should be 1D tensor, but got y (${$y.shape}).`);\n const xVals = await $x.data();\n const yVals = await $y.data();\n const ySet = new Set(yVals);\n let outputSize = 0;\n for (let i = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n outputSize++;\n }\n }\n const buffer = new TensorBuffer([outputSize], $x.dtype);\n const indices = new TensorBuffer([outputSize], 'int32');\n for (let i = 0, p = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n buffer.values[p] = xVals[i];\n indices.values[p] = i;\n p++;\n }\n }\n return [buffer.toTensor(), indices.toTensor()];\n}\nexport const setdiff1dAsync = setdiff1dAsync_;\n//# sourceMappingURL=setdiff1d_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sign } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns an element-wise indication of the sign of a number.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3, NaN, 0]);\n *\n * x.sign().print(); // or tf.sign(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sign_(x) {\n const $x = convertToTensor(x, 'x', 'sign');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.sign($x), inputs, null /* grad */, Sign);\n}\nexport const sign = op({ sign_ });\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sin of the input Tensor element-wise: `sin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.sin().print(); // or tf.sin(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sin_(x) {\n const $x = convertToTensor(x, 'x', 'sin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sin);\n}\nexport const sin = op({ sin_ });\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic sin of the input `tf.Tensor` element-wise: `sinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.sinh().print(); // or tf.sinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sinh_(x) {\n const $x = convertToTensor(x, 'x', 'sinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sinh);\n}\nexport const sinh = op({ sinh_ });\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 1D slice from 1D array starting at coordinates `begin` and is\n * of length `size`. See `slice` for details.\n */\nfunction slice1d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice1d');\n util.assert($x.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, [begin], [size]);\n}\nexport const slice1d = op({ slice1d_ });\n//# sourceMappingURL=slice1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 2D slice from a 2D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice2d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice2d');\n util.assert($x.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice2d = op({ slice2d_ });\n//# sourceMappingURL=slice2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 3D slice from a 3D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice3d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice3d');\n util.assert($x.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice3d = op({ slice3d_ });\n//# sourceMappingURL=slice3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 4D slice from a 4D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice4d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice4d');\n util.assert($x.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice4d = op({ slice4d_ });\n//# sourceMappingURL=slice4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the softmax normalized vector given the logits.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction softmax_(logits, dim = -1) {\n const $logits = convertToTensor(logits, 'logits', 'softmax', 'float32');\n if (dim === -1) {\n dim = $logits.rank - 1;\n }\n if (dim !== $logits.rank - 1) {\n throw Error('Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and dim was ${dim}`);\n }\n const inputs = { logits: $logits };\n const attrs = { dim };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.softmax($logits, dim);\n save([y]);\n return y;\n }, inputs, null /* grad */, Softmax, attrs);\n}\nexport const softmax = op({ softmax_ });\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\n/**\n * Fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the inner-most\n * dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.fft().print(); // tf.spectral.fft(x).print();\n * ```\n * @param input The complex input to compute an fft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction fft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.fft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = input.as2D(batch, innerDimensionSize);\n const result = backend.fft(input2D);\n return result.reshape(input.shape);\n }, inputs, null /* gradient */, FFT);\n}\nexport const fft = op({ fft_ });\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { IFFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Inverse fast Fourier transform.\n *\n * Computes the inverse 1-dimensional discrete Fourier transform over the\n * inner-most dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.ifft().print(); // tf.spectral.ifft(x).print();\n * ```\n * @param input The complex input to compute an ifft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction ifft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.ifft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = reshape(input, [batch, innerDimensionSize]);\n const result = backend.ifft(input2D);\n return reshape(result, input.shape);\n }, inputs, null /* gradient */, IFFT);\n}\nexport const ifft = op({ ifft_ });\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { reverse } from '../reverse';\nimport { scalar } from '../scalar';\nimport { slice } from '../slice';\nimport { ifft } from './ifft';\n/**\n * Inversed real value input fast Fourier transform.\n *\n * Computes the 1-dimensional inversed discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([0, 0, 0]);\n * const x = tf.complex(real, imag);\n *\n * x.irfft().print();\n * ```\n * @param input The real value input to compute an irfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction irfft_(input) {\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let ret;\n if (innerDimensionSize <= 2) {\n const complexInput = reshape(input, [batch, innerDimensionSize]);\n ret = ifft(complexInput);\n }\n else {\n // The length of unique components of the DFT of a real-valued signal\n // is 2 * (input_len - 1)\n const outputShape = [batch, 2 * (innerDimensionSize - 1)];\n const realInput = reshape(real(input), [batch, innerDimensionSize]);\n const imagInput = reshape(imag(input), [batch, innerDimensionSize]);\n const realConjugate = reverse(slice(realInput, [0, 1], [batch, innerDimensionSize - 2]), 1);\n const imagConjugate = mul(reverse(slice(imagInput, [0, 1], [batch, innerDimensionSize - 2]), 1), scalar(-1));\n const r = concat([realInput, realConjugate], 1);\n const i = concat([imagInput, imagConjugate], 1);\n const complexInput = reshape(complex(r, i), [outputShape[0], outputShape[1]]);\n ret = ifft(complexInput);\n }\n ret = real(ret);\n // reshape the result if the input is 3D tensor.\n if (input.rank === 3 && input.shape[0] !== 0) {\n const temp = ret;\n const batch = input.shape[0];\n ret = reshape(ret, [batch, ret.shape[0] / batch, ret.shape[1]]);\n temp.dispose();\n }\n return ret;\n}\nexport const irfft = op({ irfft_ });\n//# sourceMappingURL=irfft.js.map", "import { assert } from '../util';\n/**\n * Prepare the split size array. When the input is a number, the axis is evenly\n * divided among the split size. When the input contains the negative value, the\n * rest of the axis is allocated toward that.\n */\nexport function prepareSplitSize(x, numOrSizeSplits, axis = 0) {\n let splitSizes = [];\n if (typeof (numOrSizeSplits) === 'number') {\n assert(x.shape[axis] % numOrSizeSplits === 0, () => 'Number of splits must evenly divide the axis.');\n splitSizes =\n new Array(numOrSizeSplits).fill(x.shape[axis] / numOrSizeSplits);\n }\n else {\n const numOfNegs = numOrSizeSplits.reduce((count, value) => {\n if (value === -1) {\n count += 1;\n }\n return count;\n }, 0);\n assert(numOfNegs <= 1, () => 'There should be only one negative value in split array.');\n const negIndex = numOrSizeSplits.indexOf(-1);\n // Allow the number of split array to be -1, which indicates the rest\n // of dimension is allocated to that split.\n if (negIndex !== -1) {\n const total = numOrSizeSplits.reduce((a, b) => b > 0 ? a + b : a);\n numOrSizeSplits[negIndex] = x.shape[axis] - total;\n }\n assert(x.shape[axis] === numOrSizeSplits.reduce((a, b) => a + b), () => 'The sum of sizes must match the size of the axis dimension.');\n splitSizes = numOrSizeSplits;\n }\n return splitSizes;\n}\n//# sourceMappingURL=split_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SplitV } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { prepareSplitSize } from './split_util';\n/**\n * Splits a `tf.Tensor` into sub tensors.\n *\n * If `numOrSizeSplits` is a number, splits `x` along dimension `axis`\n * into `numOrSizeSplits` smaller tensors.\n * Requires that `numOrSizeSplits` evenly divides `x.shape[axis]`.\n *\n * If `numOrSizeSplits` is a number array, splits `x` into\n * `numOrSizeSplits.length` pieces. The shape of the `i`-th piece has the\n * same size as `x` except along dimension `axis` where the size is\n * `numOrSizeSplits[i]`.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [2, 4]);\n * const [a, b] = tf.split(x, 2, 1);\n * a.print();\n * b.print();\n *\n * const [c, d, e] = tf.split(x, [1, 2, 1], 1);\n * c.print();\n * d.print();\n * e.print();\n * ```\n *\n * @param x The input tensor to split.\n * @param numOrSizeSplits Either an integer indicating the number of\n * splits along the axis or an array of integers containing the sizes of\n * each output tensor along the axis. If a number then it must evenly divide\n * `x.shape[axis]`; otherwise the sum of sizes must match `x.shape[axis]`.\n * Can contain one -1 indicating that dimension is to be inferred.\n * @param axis The dimension along which to split. Defaults to 0 (the first\n * dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction split_(x, numOrSizeSplits, axis = 0) {\n const $x = convertToTensor(x, 'x', 'split');\n const forward = (backend, _) => {\n const $axis = parseAxisParam(axis, $x.shape)[0];\n const splitSizes = prepareSplitSize($x, numOrSizeSplits, $axis);\n return backend.split($x, splitSizes, $axis);\n };\n const inputs = { x: $x };\n const attr = { numOrSizeSplits, axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SplitV, attr);\n}\nexport const split = op({ split_ });\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../../util';\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { split } from '../split';\nimport { zeros } from '../zeros';\nimport { zerosLike } from '../zeros_like';\nimport { fft } from './fft';\n/**\n * Real value input fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n *\n * real.rfft().print();\n * ```\n * @param input The real value input to compute an rfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction rfft_(input, fftLength) {\n assert(input.dtype === 'float32', () => `The dtype for rfft() must be real value but got ${input.dtype}`);\n let innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let adjustedInput;\n if (fftLength != null && fftLength < innerDimensionSize) {\n // Need to crop\n const begin = input.shape.map(v => 0);\n const size = input.shape.map(v => v);\n size[input.shape.length - 1] = fftLength;\n adjustedInput = slice(input, begin, size);\n innerDimensionSize = fftLength;\n }\n else if (fftLength != null && fftLength > innerDimensionSize) {\n // Need to pad with zeros\n const zerosShape = input.shape.map(v => v);\n zerosShape[input.shape.length - 1] = fftLength - innerDimensionSize;\n adjustedInput = concat([input, zeros(zerosShape)], input.shape.length - 1);\n innerDimensionSize = fftLength;\n }\n else {\n adjustedInput = input;\n }\n // Complement the input with zero imaginary numbers.\n const zerosInput = zerosLike(adjustedInput);\n const complexInput = reshape(complex(adjustedInput, zerosInput), [batch, innerDimensionSize]);\n const ret = fft(complexInput);\n // Exclude complex conjugations. These conjugations are put symmetrically.\n const half = Math.floor(innerDimensionSize / 2) + 1;\n const realValues = real(ret);\n const imagValues = imag(ret);\n const realComplexConjugate = split(realValues, [half, innerDimensionSize - half], realValues.shape.length - 1);\n const imagComplexConjugate = split(imagValues, [half, innerDimensionSize - half], imagValues.shape.length - 1);\n const outputShape = adjustedInput.shape.slice();\n outputShape[adjustedInput.shape.length - 1] = half;\n return reshape(complex(realComplexConjugate[0], imagComplexConjugate[0]), outputShape);\n}\nexport const rfft = op({ rfft_ });\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square root of the input `tf.Tensor` element-wise: `y = sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.sqrt().print(); // or tf.sqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sqrt_(x) {\n const $x = convertToTensor(x, 'x', 'sqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sqrt);\n}\nexport const sqrt = op({ sqrt_ });\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SquaredDifference } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns (a - b) * (a - b) element-wise.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * ```js\n * // Broadcast squared difference a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction squaredDifference_(a, b) {\n let $a = convertToTensor(a, 'a', 'squaredDifference');\n let $b = convertToTensor(b, 'b', 'squaredDifference');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.squaredDifference($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SquaredDifference, attrs);\n}\nexport const squaredDifference = op({ squaredDifference_ });\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { squeezeShape } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Removes dimensions of size 1 from the shape of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4], [1, 1, 4]);\n * x.squeeze().print();\n * ```\n *\n * @param x The input tensor to be squeezed.\n * @param axis An optional list of numbers. If specified, only\n * squeezes the dimensions listed. The dimension index starts at 0. It\n * is an error to squeeze a dimension that is not 1.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction squeeze_(x, axis) {\n const $x = convertToTensor(x, 'x', 'squeeze');\n return reshape($x, squeezeShape($x.shape, axis).newShape);\n}\nexport const squeeze = op({ squeeze_ });\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensorArray } from '../tensor_util_env';\nimport * as util from '../util';\nimport { concat } from './concat';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\n/**\n * Stacks a list of rank-`R` `tf.Tensor`s into one rank-`(R+1)` `tf.Tensor`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.stack([a, b, c]).print();\n * ```\n *\n * @param tensors A list of tensor objects with the same shape and dtype.\n * @param axis The axis to stack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction stack_(tensors, axis = 0) {\n const $tensors = convertToTensorArray(tensors, 'tensors', 'stack');\n util.assert($tensors.length >= 1, () => 'Pass at least one tensor to tf.stack');\n if ($tensors.length === 1) {\n return expandDims($tensors[0], axis);\n }\n const rank = $tensors[0].rank;\n const shape = $tensors[0].shape;\n const dtype = $tensors[0].dtype;\n util.assert(axis <= rank, () => 'Axis must be <= rank of the tensor');\n $tensors.forEach(t => {\n util.assertShapesMatch(shape, t.shape, 'All tensors passed to stack must have matching shapes');\n util.assert(dtype === t.dtype, () => 'All tensors passed to stack must have matching dtypes');\n });\n const expandedTensors = $tensors.map(t => expandDims(t, axis));\n // Stack exists in the TensorFlow C++ API\n // (https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/stack) but not\n // in\n // https://raw.githubusercontent.com/tensorflow/tensorflow/master/tensorflow/core/ops/ops.pbtxt.\n // Therefore we are treating it like a high-level op rather than\n // creating a dedicated stack kernel.\n return concat(expandedTensors, axis);\n}\nexport const stack = op({ stack_ });\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Step } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes step of the input `tf.Tensor` element-wise: `x > 0 ? 1 : alpha * x`\n *\n * ```js\n * const x = tf.tensor1d([0, 2, -1, -3]);\n *\n * x.step(.5).print(); // or tf.step(x, .5)\n * ```\n * @param x The input tensor.\n * @param alpha The gradient when input is negative.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction step_(x, alpha = 0.0) {\n const $x = convertToTensor(x, 'x', 'step');\n const inputs = { x: $x };\n const attrs = { alpha };\n return ENGINE.runKernelFunc(backend => backend.step($x, alpha), inputs, null /* grad */, Step, attrs);\n}\nexport const step = op({ step_ });\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { StridedSlice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { slice } from './slice';\nimport { computeOutShape, getNormalizedAxes, maskToAxes } from './slice_util';\n/**\n * Extracts a strided slice of a tensor.\n *\n * Roughly speaking, this op extracts a slice of size (end-begin)/stride from\n * the given input tensor (x). Starting at the location specified by begin the\n * slice continues by adding stride to the index until all dimensions are not\n * less than end. Note that a stride can be negative, which causes a reverse\n * slice.\n *\n * ```js\n * const t = tf.tensor3d([1, 1, 1 ,2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6],\n * [3, 2, 3]);\n * t.stridedSlice([1, 0, 0], [2, 1, 3], [1, 1, 1]).print() // [[[3, 3, 3]]]\n * t.stridedSlice([1, 0, 0], [2, 2, 3], [1, 1, 1]).print() // [[[3, 3, 3],\n * // [4, 4, 4]]]\n * t.stridedSlice([1, -1, 0], [2, -3, 3], [1, -1, 1]).print() // [[[4, 4, 4],\n * // [3, 3, 3]]]\n * ```\n *\n * @param x The tensor to stride slice.\n * @param begin The coordinates to start the slice from.\n * @param end: The coordinates to end the slice at.\n * @param strides: The size of the slice.\n * @param beginMask: If the ith bit of beginMask is set, begin[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param endMask: If the ith bit of endMask is set, end[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param shrinkAxisMask: a bitmask where bit i implies that\n * the ith specification should shrink the dimensionality. begin and end must\n * imply a slice of size 1 in the dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction stridedSlice_(x, begin, end, strides, beginMask = 0, endMask = 0, ellipsisMask = 0, newAxisMask = 0, shrinkAxisMask = 0) {\n let $x = convertToTensor(x, 'x', 'stridedSlice');\n const forward = (backend) => {\n if (strides == null) {\n strides = new Array(begin.length);\n }\n const ellipsisAxes = maskToAxes(ellipsisMask);\n if (ellipsisAxes.length > 1) {\n throw new Error('Multiple ellipses in slice is not allowed.');\n }\n if (ellipsisMask !== 0 && newAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and newAxisMask is not yet supported.');\n }\n if (ellipsisMask !== 0 && shrinkAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and shrinkAxisMask is not yet supported.');\n }\n const numInterpolatedAxes = $x.rank - begin.length;\n // Expand the dims of x based on the newAxisMask.\n const expandAxes = maskToAxes(newAxisMask);\n const newShape = $x.shape.slice();\n expandAxes.forEach(axis => {\n begin[axis] = 0;\n end[axis] = 1;\n newShape.splice(axis, 0, 1);\n });\n $x = reshape($x, newShape);\n const { begin: normalizedBegin, end: normalizedEnd, strides: normalizedStrides } = getNormalizedAxes($x.shape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask);\n begin = normalizedBegin;\n end = normalizedEnd;\n strides = normalizedStrides;\n const shrinkAxes = maskToAxes(shrinkAxisMask);\n // Adjust the ends based on the shrink mask.\n shrinkAxes.forEach(axis => {\n end[axis] = begin[axis] + 1;\n strides[axis] = 1;\n });\n // Figure out the output shape.\n const size = computeOutShape(begin, end, strides);\n // Remove the axes based on shrinkMask.\n const outShape = size.filter((_, axis) => shrinkAxes.indexOf(axis) === -1);\n const nonStrided = strides.every(v => v === 1);\n if (nonStrided) {\n return reshape(slice($x, begin, size), outShape);\n }\n const res = backend.stridedSlice($x, begin, end, strides);\n return reshape(res, outShape);\n };\n const inputs = { x: $x };\n const attrs = {\n begin,\n end,\n strides,\n beginMask,\n endMask,\n ellipsisMask,\n newAxisMask,\n shrinkAxisMask\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, StridedSlice, attrs);\n}\nexport const stridedSlice = op({ stridedSlice_ });\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes tan of the input `tf.Tensor` element-wise, `tan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.tan().print(); // or tf.tan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tan_(x) {\n const $x = convertToTensor(x, 'x', 'tan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.tan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Tan);\n}\nexport const tan = op({ tan_ });\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-2 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor2d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor2d([[1, 2], [3, 4]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor2d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 2) {\n throw new Error('tensor2d() requires shape to have two numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 2 && inferredShape.length !== 1) {\n throw new Error('tensor2d() requires values to be number[][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor2d() requires shape to be provided when `values` ' +\n 'are a flat/TypedArray');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-4 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor4d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor4d([[[[1], [2]], [[3], [4]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor4d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 4) {\n throw new Error('tensor4d() requires shape to have four numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 4 && inferredShape.length !== 1) {\n throw new Error('tensor4d() requires values to be number[][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor4d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-5 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor5d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor5d([[[[[1], [2]], [[3], [4]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor5d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 5) {\n throw new Error('tensor5d() requires shape to have five numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 5 && inferredShape.length !== 1) {\n throw new Error('tensor5d() requires values to be ' +\n 'number[][][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor5d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor5d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-6 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor6d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor6d([[[[[[1],[2]],[[3],[4]]],[[[5],[6]],[[7],[8]]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor6d([1, 2, 3, 4, 5, 6, 7, 8], [1, 1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor6d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 6) {\n throw new Error('tensor6d() requires shape to have six numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 6 && inferredShape.length !== 1) {\n throw new Error('tensor6d() requires values to be number[][][][][][] or ' +\n 'flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor6d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape ||\n inferredShape;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor6d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { TopK } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Finds the values and indices of the `k` largest entries along the last\n * dimension.\n *\n * If the input is a vector (rank=1), finds the k largest entries in the vector\n * and outputs their values and indices as vectors. Thus values[j] is the j-th\n * largest entry in input, and its index is indices[j].\n * For higher rank inputs, computes the top k entries along the last dimension.\n *\n * If two elements are equal, the lower-index element appears first.\n *\n * ```js\n * const a = tf.tensor2d([[1, 5], [4, 3]]);\n * const {values, indices} = tf.topk(a);\n * values.print();\n * indices.print();\n * ```\n * @param x 1-D or higher `tf.Tensor` with last dimension being at least `k`.\n * @param k Number of top elements to look for along the last dimension.\n * @param sorted If true, the resulting `k` elements will be sorted by the\n * values in descending order.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction topk_(x, k = 1, sorted = true) {\n const $x = convertToTensor(x, 'x', 'topk');\n if ($x.rank === 0) {\n throw new Error('topk() expects the input to be of rank 1 or higher');\n }\n const lastDim = $x.shape[$x.shape.length - 1];\n if (k > lastDim) {\n throw new Error(`'k' passed to topk() must be <= the last dimension (${lastDim}) ` +\n `but got ${k}`);\n }\n const inputs = { x: $x };\n const attrs = { k, sorted };\n const [values, indices] = ENGINE.runKernelFunc(b => b.topk($x, k, sorted), inputs, null /* grad */, TopK, attrs);\n return { values, indices };\n}\nexport const topk = op({ topk_ });\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a truncated normal\n * distribution.\n *\n * ```js\n * tf.truncatedNormal([2, 2]).print();\n * ```\n *\n * The generated values follow a normal distribution with specified mean and\n * standard deviation, except that values whose magnitude is more than 2\n * standard deviations from the mean are dropped and re-picked.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output tensor.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction truncatedNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type $ { dtype }`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, true /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const truncatedNormal = op({ truncatedNormal_ });\n//# sourceMappingURL=truncated_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unique } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert } from '../util';\nimport { op } from './operation';\n/**\n * Finds unique elements along an axis of a tensor.\n *\n * It returns a tensor `values` containing all of the unique elements along the\n * `axis` of the given tensor `x` in the same order that they occur along the\n * `axis` in `x`; `x` does not need to be sorted. It also returns a tensor\n * `indices` the same size as the number of the elements in `x` along the `axis`\n * dimension. It contains the index in the unique output `values`.\n *\n * ```js\n * // A 1-D tensor\n * const a = tf.tensor1d([1, 1, 2, 4, 4, 4, 7, 8, 8]);\n * const {values, indices} = tf.unique(a);\n * values.print(); // [1, 2, 4, 7, 8,]\n * indices.print(); // [0, 0, 1, 2, 2, 2, 3, 4, 4]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=0\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 0)\n * values.print(); // [[1, 0, 0],\n * // [2, 0, 0]]\n * indices.print(); // [0, 0, 1]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=1\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 1)\n * values.print(); // [[1, 0],\n * // [1, 0],\n * // [2, 0]]\n * indices.print(); // [0, 1, 1]\n * ```\n * @param x A tensor (int32, string, bool).\n * @param axis The axis of the tensor to find the unique elements.\n * @returns [uniqueElements, indices] (see above for details)\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction unique_(x, axis = 0) {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'unique', null);\n assert($x.rank > 0, () => 'The input tensor must be at least 1D');\n const inputs = { x: $x };\n const attrs = { axis };\n const [values, indices] = ENGINE.runKernel(Unique, inputs, attrs);\n return { values, indices };\n}\nexport const unique = op({ unique_ });\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, isInt } from '../util';\nimport { op } from './operation';\n/**\n * Computes the sum along segments of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const segmentIds = tf.tensor1d([1, 2, 0, 1], 'int32');\n * const numSegments = 3;\n *\n * x.unsortedSegmentSum(segmentIds, numSegments).print()\n * //or tf.unsortedSegmentSum(x, segmentIds, numSegments)\n * ```\n * @param x The `tf.Tensor` that will be summed along its segments.\n * @param segmentIds A `tf.Tensor1D` whose rank is equal to the rank of `x`'s\n * dimension along the `axis`. Maps each element of `x` to a segment.\n * @param numSegments The number of distinct `segmentIds`.\n *\n * @doc {heading: 'Operations', subheading: 'Segment'}\n */\nfunction unsortedSegmentSum_(x, segmentIds, numSegments) {\n const $x = convertToTensor(x, 'x', 'unsortedSegmentSum');\n const $segmentIds = convertToTensor(segmentIds, 'segmentIds', 'unsortedSegmentSum', 'int32');\n assert(isInt(numSegments), () => 'numSegments must be of dtype int');\n const inputs = { x: $x, segmentIds: $segmentIds };\n const attrs = { numSegments };\n const forward = (backend, save) => {\n const res = backend.unsortedSegmentSum($x, $segmentIds, numSegments);\n save([$segmentIds]);\n return res;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, UnsortedSegmentSum, attrs);\n}\nexport const unsortedSegmentSum = op({ unsortedSegmentSum_ });\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unpack } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Unstacks a `tf.Tensor` of rank-`R` into a list of rank-`(R-1)` `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * tf.unstack(a).forEach(tensor => tensor.print());\n * ```\n *\n * @param x A tensor object.\n * @param axis The axis to unstack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction unstack_(x, axis = 0) {\n const $x = convertToTensor(x, 'x', 'unstack');\n util.assert(axis >= -$x.shape.length && axis < $x.shape.length, () => `Axis = ${axis} is not in [-${$x.shape.length}, ${$x.shape.length})`);\n if (axis < 0) {\n axis += $x.shape.length;\n }\n const inputs = { value: $x };\n const attrs = { axis };\n const forward = (backend) => backend.unstack($x, axis);\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Unpack, attrs);\n}\nexport const unstack = op({ unstack_ });\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\n/**\n * Creates a new variable with the provided initial value.\n * ```js\n * const x = tf.variable(tf.tensor([1, 2, 3]));\n * x.assign(tf.tensor([4, 5, 6]));\n *\n * x.print();\n * ```\n *\n * @param initialValue Initial value for the tensor.\n * @param trainable If true, optimizers are allowed to update it.\n * @param name Name of the variable. Defaults to a unique id.\n * @param dtype If set, initialValue will be converted to the given type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function variable(initialValue, trainable = true, name, dtype) {\n return ENGINE.makeVariable(initialValue, trainable, name, dtype);\n}\n//# sourceMappingURL=variable.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the Where kernel shared between cpu and webgl */\nimport { buffer } from '../ops/buffer';\nexport function whereImpl(condShape, condVals) {\n const indices = [];\n for (let i = 0; i < condVals.length; i++) {\n if (condVals[i]) {\n indices.push(i);\n }\n }\n const inBuffer = buffer(condShape, 'int32');\n const out = buffer([indices.length, condShape.length], 'int32');\n for (let i = 0; i < indices.length; i++) {\n const loc = inBuffer.indexToLoc(indices[i]);\n const offset = i * condShape.length;\n out.values.set(loc, offset);\n }\n return out.toTensor();\n}\n//# sourceMappingURL=where_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { whereImpl } from '../backends/where_impl';\nimport { convertToTensor } from '../tensor_util_env';\n/**\n * Returns the coordinates of true elements of condition.\n *\n * The coordinates are returned in a 2-D tensor where the first dimension (rows)\n * represents the number of true elements, and the second dimension (columns)\n * represents the coordinates of the true elements. Keep in mind, the shape of\n * the output tensor can vary depending on how many true values there are in\n * input. Indices are output in row-major order. The resulting tensor has the\n * shape `[numTrueElems, condition.rank]`.\n *\n * This is analogous to calling the python `tf.where(cond)` without an x or y.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const result = await tf.whereAsync(cond);\n * result.print();\n * ```\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nasync function whereAsync_(condition) {\n const $condition = convertToTensor(condition, 'condition', 'whereAsync', 'bool');\n const vals = await $condition.data();\n const res = whereImpl($condition.shape, vals);\n if (condition !== $condition) {\n $condition.dispose();\n }\n return res;\n}\nexport const whereAsync = whereAsync_;\n//# sourceMappingURL=where_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { gather } from './gather';\nimport { reshape } from './reshape';\nimport { squeeze } from './squeeze';\nimport { whereAsync } from './where_async';\n/**\n * Apply boolean mask to tensor.\n *\n * ```js\n * const tensor = tf.tensor2d([1, 2, 3, 4, 5, 6], [3, 2]);\n * const mask = tf.tensor1d([1, 0, 1], 'bool');\n * const result = await tf.booleanMaskAsync(tensor, mask);\n * result.print();\n * ```\n *\n * @param tensor N-D tensor.\n * @param mask K-D boolean tensor, K <= N and K must be known statically.\n * @param axis A 0-D int Tensor representing the axis in tensor to mask from.\n * By default, axis is 0 which will mask from the first dimension.\n * Otherwise K + axis <= N.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nasync function booleanMaskAsync_(tensor, mask, axis) {\n const $tensor = convertToTensor(tensor, 'tensor', 'boolMask');\n const $mask = convertToTensor(mask, 'mask', 'boolMask', 'bool');\n const axisFrom = axis == null ? 0 : axis;\n const maskDim = $mask.rank;\n const tensorShape = $tensor.shape;\n util.assert(maskDim > 0, () => 'mask cannot be scalar');\n util.assertShapesMatch(tensorShape.slice(axisFrom, axisFrom + maskDim), $mask.shape, `mask's shape must match the first K dimensions of tensor's shape,`);\n let leadingSize = 1;\n for (let i = axisFrom; i < axisFrom + maskDim; i++) {\n leadingSize *= tensorShape[i];\n }\n const targetTensorShape = tensorShape.slice(0, axisFrom)\n .concat([leadingSize], tensorShape.slice(axisFrom + maskDim));\n const reshapedTensor = reshape($tensor, targetTensorShape);\n const reshapedMask = reshape($mask, [-1]);\n const positivePositions = await whereAsync(reshapedMask);\n const indices = squeeze(positivePositions, [1]);\n const res = gather(reshapedTensor, indices, axisFrom);\n // Ensure no memory leak.\n if (tensor !== $tensor) {\n $tensor.dispose();\n }\n if (mask !== $mask) {\n $mask.dispose();\n }\n indices.dispose();\n reshapedTensor.dispose();\n reshapedMask.dispose();\n positivePositions.dispose();\n return res;\n}\nexport const booleanMaskAsync = booleanMaskAsync_;\n//# sourceMappingURL=boolean_mask.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertShapesMatch } from '../util';\nimport { equal } from './equal';\nimport { greater } from './greater';\nimport { greaterEqual } from './greater_equal';\nimport { less } from './less';\nimport { lessEqual } from './less_equal';\nimport { notEqual } from './not_equal';\nimport { op } from './operation';\n/**\n * @deprecated\n * Strict version of `tf.notEqual` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction notEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'notEqualStrict');\n const $b = convertToTensor(b, 'b', 'notEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in notEqualStrict: ');\n return notEqual($a, $b);\n}\n/**\n * @deprecated\n * Strict version of `tf.less` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction lessStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessStrict');\n const $b = convertToTensor(b, 'b', 'lessStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessStrict: ');\n return less($a, $b);\n}\nfunction equalStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'equalStrict');\n const $b = convertToTensor(b, 'b', 'equalStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in equalStrict: ');\n return equal($a, $b);\n}\nfunction lessEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessEqualStrict');\n const $b = convertToTensor(b, 'b', 'lessEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessEqualStrict: ');\n return lessEqual($a, $b);\n}\nfunction greaterStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterStrict');\n const $b = convertToTensor(b, 'b', 'greaterStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterStrict: ');\n return greater($a, $b);\n}\nfunction greaterEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterEqualStrict');\n const $b = convertToTensor(b, 'b', 'greaterEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterEqualStrict: ');\n return greaterEqual($a, $b);\n}\nexport const equalStrict = op({ equalStrict_ });\nexport const greaterEqualStrict = op({ greaterEqualStrict_ });\nexport const greaterStrict = op({ greaterStrict_ });\nexport const lessEqualStrict = op({ lessEqualStrict_ });\nexport const lessStrict = op({ lessStrict_ });\nexport const notEqualStrict = op({ notEqualStrict_ });\n//# sourceMappingURL=compare.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { maximum } from './maximum';\nimport { minimum } from './minimum';\nimport { mod } from './mod';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { squaredDifference } from './squared_difference';\nimport { sub } from './sub';\n/**\n * @deprecated\n * Adds two `tf.Tensor`s element-wise, A + B.\n *\n * Inputs must be the same shape. For broadcasting support, use add() instead.\n *\n * @param a The first Tensor to add element-wise.\n * @param b The second Tensor to add element-wise.\n */\nfunction addStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'addStrict');\n const $b = convertToTensor(b, 'b', 'addStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in addStrict: ');\n return add($a, $b);\n}\n/**\n * @deprecated\n * Subtracts two `tf.Tensor`s element-wise, A - B. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.sub` instead.\n *\n * @param a The first Tensor to subtract element-wise.\n * @param b The second Tensor to subtract element-wise.\n */\nfunction subStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'subStrict');\n const $b = convertToTensor(b, 'b', 'subStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in subStrict: ');\n return sub($a, $b);\n}\n/**\n * @deprecated\n * Computes the power of one `tf.Tensor` to another. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.pow` instead.\n *\n * @param base The base tensor to pow element-wise.\n * @param exp The exponent tensor to pow element-wise.\n */\nfunction powStrict_(base, exp) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n util.assertShapesMatch(base.shape, exp.shape, 'Error in powStrict: ');\n return pow(base, exp);\n}\n/**\n * @deprecated\n * Multiplies two `tf.Tensor`s element-wise, A * B.\n *\n * Inputs must be the same shape. For broadcasting support, use `tf.mul`.\n *\n * @param a The first tensor to multiply.\n * @param b The first tensor to multiply. Must have the same\n * dtype as `a`.\n */\nfunction mulStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'mul');\n const $b = convertToTensor(b, 'b', 'mul');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in multiplyStrict: ');\n return mul($a, $b);\n}\n/**\n * @deprecated\n * Divides two `tf.Tensor`s element-wise, A / B. Inputs must\n * be the same shape.\n *\n * @param a The first tensor as the numerator for element-wise division.\n * @param b The second tensor as the denominator for element-wise division.\n */\nfunction divStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'div');\n const $b = convertToTensor(b, 'b', 'div');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in divideStrict: ');\n return div($a, $b);\n}\n/**\n * @deprecated\n * Returns the mod of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use mod().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction modStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'modStrict');\n const $b = convertToTensor(b, 'b', 'modStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in modStrict: ');\n return mod($a, $b);\n}\n/**\n * @deprecated\n * Returns the min of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use minimum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction minimumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'minimumStrict');\n const $b = convertToTensor(b, 'b', 'minimumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in minimumStrict: ');\n return minimum($a, $b);\n}\n/**\n * @deprecated\n * Returns the max of a and b (`a > b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use maximum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction maximumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'maximumStrict');\n const $b = convertToTensor(b, 'b', 'maximumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in maximumStrict: ');\n return maximum($a, $b);\n}\n/**\n * @deprecated\n * Returns (a - b) * (a - b) element-wise.\n *\n * Inputs must be the same shape. For broadcasting support, use\n * `tf.squaredDifference` instead.\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\nfunction squaredDifferenceStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'squaredDifferenceStrict');\n const $b = convertToTensor(b, 'b', 'squaredDifferenceStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in squaredDifferenceStrict: ');\n return squaredDifference($a, $b);\n}\nexport const addStrict = op({ addStrict_ });\nexport const divStrict = op({ divStrict_ });\nexport const maximumStrict = op({ maximumStrict_ });\nexport const minimumStrict = op({ minimumStrict_ });\nexport const modStrict = op({ modStrict_ });\nexport const mulStrict = op({ mulStrict_ });\nexport const powStrict = op({ powStrict_ });\nexport const squaredDifferenceStrict = op({ squaredDifferenceStrict_ });\nexport const subStrict = op({ subStrict_ });\n//# sourceMappingURL=binary_ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { abs } from './abs';\nimport * as axis_util from './axis_util';\nimport { max } from './max';\nimport { min } from './min';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sqrt } from './sqrt';\nimport { square } from './square';\nimport { sum } from './sum';\n/**\n * Computes the norm of scalar, vectors, and matrices.\n * This function can compute several different vector norms (the 1-norm, the\n * Euclidean or 2-norm, the inf-norm, and in general the p-norm for p > 0)\n * and matrix norms (Frobenius, 1-norm, and inf-norm).\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.norm().print(); // or tf.norm(x)\n * ```\n *\n * @param x The input array.\n * @param ord Optional. Order of the norm. Supported norm types are\n * following:\n *\n * | ord | norm for matrices | norm for vectors\n * |------------|---------------------------|---------------------\n * |'euclidean' |Frobenius norm |2-norm\n * |'fro' |Frobenius norm\t |\n * |Infinity |max(sum(abs(x), axis=1)) |max(abs(x))\n * |-Infinity |min(sum(abs(x), axis=1)) |min(abs(x))\n * |1 |max(sum(abs(x), axis=0)) |sum(abs(x))\n * |2 | |sum(abs(x)^2)^1/2*\n *\n * @param axis Optional. If axis is null (the default), the input is\n * considered a vector and a single vector norm is computed over the entire\n * set of values in the Tensor, i.e. norm(x, ord) is equivalent\n * to norm(x.reshape([-1]), ord). If axis is a integer, the input\n * is considered a batch of vectors, and axis determines the axis in x\n * over which to compute vector norms. If axis is a 2-tuple of integer it is\n * considered a batch of matrices and axis determines the axes in NDArray\n * over which to compute a matrix norm.\n * @param keepDims Optional. If true, the norm have the same dimensionality\n * as the input.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction norm_(x, ord = 'euclidean', axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'norm');\n const norm = normImpl(x, ord, axis);\n let keepDimsShape = norm.shape;\n if (keepDims) {\n const axes = parseAxisParam(axis, x.shape);\n keepDimsShape = axis_util.expandShapeToKeepDim(norm.shape, axes);\n }\n return reshape(norm, keepDimsShape);\n}\nfunction normImpl(x, p, axis = null) {\n if (x.rank === 0) {\n return abs(x);\n }\n // consider vector when no axis is specified\n if (x.rank !== 1 && axis === null) {\n return normImpl(reshape(x, [-1]), p, axis);\n }\n // vector\n if (x.rank === 1 || typeof axis === 'number' ||\n Array.isArray(axis) && axis.length === 1) {\n if (p === 1) {\n return sum(abs(x), axis);\n }\n if (p === Infinity) {\n return max(abs(x), axis);\n }\n if (p === -Infinity) {\n return min(abs(x), axis);\n }\n if (p === 'euclidean' || p === 2) {\n // norm(x, 2) = sum(abs(xi) ^ 2) ^ 1/2\n return sqrt(sum(pow(abs(x), scalar(2, 'int32')), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n // matrix (assumption axis[0] < axis[1])\n if (Array.isArray(axis) && axis.length === 2) {\n if (p === 1) {\n return max(sum(abs(x), axis[0]), axis[1] - 1);\n }\n if (p === Infinity) {\n return max(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === -Infinity) {\n return min(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === 'fro' || p === 'euclidean') {\n // norm(x) = sqrt(sum(pow(x, 2)))\n return sqrt(sum(square(x), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n throw new Error(`Error in norm: invalid axis: ${axis}`);\n}\nexport const norm = op({ norm_ });\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assertTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { scalar } from './scalar';\nimport { sub } from './sub';\n/**\n * Compute the moving average of a variable.\n *\n * Without zeroDebias, the moving average operation is defined by:\n * `v += delta`\n * where\n * `delta = (1 - decay) * (x - v)`\n *\n * With zeroDebias (default), the `delta` term is scaled to debias the\n * effect of the (assumed) zero-initialization of `v`.\n * `delta /= (1 - decay ^ step)`\n *\n * For more details on the zero-debiasing algorithm, see:\n * https://arxiv.org/abs/1412.6980\n *\n * Note that this function is completely stateless and does not keep track of\n * step count. The step count needs to be maintained by the caller and passed\n * in as `step`.\n *\n * @param v The current moving average value.\n * @param x New input value, must have the same shape and dtype as `v`.\n * @param decay The decay factor. Typical values are 0.95 and 0.99.\n * @param step Step count.\n * @param zeroDebias: Whether zeroDebias is to be performed (default: `true`).\n * @returns The new moving average value.\n *\n * @doc {heading: 'Operations', subheading: 'Moving Average'}\n */\nfunction movingAverage_(v, x, decay, step, zeroDebias = true) {\n const $v = convertToTensor(v, 'v', 'movingAverage');\n const $x = convertToTensor(x, 'x', 'movingAverage');\n const $decay = convertToTensor(decay, 'decay', 'movingAverage');\n assertTypesMatch($v, $x);\n util.assert(util.arraysEqual($v.shape, $x.shape), () => 'Shape mismatch in v and x');\n const one = scalar(1);\n const oneMinusDecay = sub(one, $decay);\n let update = mul(sub($x, $v), oneMinusDecay);\n if (zeroDebias) {\n util.assert(step != null, () => 'When using zeroDebias: true, step is required.');\n const $step = convertToTensor(step, 'step', 'movingAverage');\n update = div(update, sub(one, pow($decay, $step)));\n }\n return add($v, update);\n}\nexport const movingAverage = op({ movingAverage_ });\n//# sourceMappingURL=moving_average.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ScatterNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as scatter_nd_util from './scatter_nd_util';\n/**\n * Creates a new tensor by applying sparse updates to individual\n * values or slices within a zero tensor of the given shape tensor according to\n * indices. This operator is the inverse of the `tf.gatherND` operator which\n * extracts values or slices from a given tensor.\n *\n * ```js\n * const indices = tf.tensor2d([4, 3, 1, 7], [4, 1], 'int32');\n * const updates = tf.tensor1d([9, 10, 11, 12]);\n * const shape = [8];\n * tf.scatterND(indices, updates, shape).print() //[0, 11, 0, 10, 9, 0, 0, 12]\n * ```\n *\n * @param indices The tensor contains the indices into the output tensor.\n * @param updates The tensor contains the value for the indices.\n * @param shape: The shape of the output tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction scatterND_(indices, updates, shape) {\n const $indices = convertToTensor(indices, 'indices', 'scatterND', 'int32');\n const $updates = convertToTensor(updates, 'updates', 'scatterND');\n scatter_nd_util.validateInput($updates, $indices, shape);\n const forward = (backend) => {\n return backend.scatterND($indices, $updates, shape);\n };\n const inputs = { indices: $indices, updates: $updates };\n const attrs = { shape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ScatterNd, attrs);\n}\nexport const scatterND = op({ scatterND_ });\n//# sourceMappingURL=scatter_nd.js.map", "/**\n * Validate sparseToDense inputs.\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape number[]. Shape of the dense output tensor.\n * @param validateIndices boolean. indice validation is not supported, error\n * will be thrown if it is set.\n */\nexport function validateInput(sparseIndices, sparseValues, outputShape, defaultValues) {\n if (sparseIndices.dtype !== 'int32') {\n throw new Error('tf.sparseToDense() expects the indices to be int32 type,' +\n ` but the dtype was ${sparseIndices.dtype}.`);\n }\n if (sparseIndices.rank > 2) {\n throw new Error('sparseIndices should be a scalar, vector, or matrix,' +\n ` but got shape ${sparseIndices.shape}.`);\n }\n const numElems = sparseIndices.rank > 0 ? sparseIndices.shape[0] : 1;\n const numDims = sparseIndices.rank > 1 ? sparseIndices.shape[1] : 1;\n if (outputShape.length !== numDims) {\n throw new Error('outputShape has incorrect number of elements:,' +\n ` ${outputShape.length}, should be: ${numDims}.`);\n }\n const numValues = sparseValues.size;\n if (!(sparseValues.rank === 0 ||\n sparseValues.rank === 1 && numValues === numElems)) {\n throw new Error('sparseValues has incorrect shape ' +\n `${sparseValues.shape}, should be [] or [${numElems}]`);\n }\n if (sparseValues.dtype !== defaultValues.dtype) {\n throw new Error('sparseValues.dtype must match defaultValues.dtype');\n }\n}\n//# sourceMappingURL=sparse_to_dense_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SparseToDense } from '../kernel_names';\nimport * as sparse_to_dense from '../ops/sparse_to_dense_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Converts a sparse representation into a dense tensor.\n *\n * Builds an array dense with shape outputShape such that:\n *\n * // If sparseIndices is scalar\n * dense[i] = (i == sparseIndices ? sparseValues : defaultValue)\n *\n * // If sparseIndices is a vector, then for each i\n * dense[sparseIndices[i]] = sparseValues[i]\n *\n * // If sparseIndices is an n by d matrix, then for each i in [0, n)\n * dense[sparseIndices[i][0], ..., sparseIndices[i][d-1]] = sparseValues[i]\n * All other values in dense are set to defaultValue. If sparseValues is a\n * scalar, all sparse indices are set to this single value.\n *\n * If indices are repeated the final value is summed over all values for those\n * indices.\n *\n * ```js\n * const indices = tf.tensor1d([4, 5, 6, 1, 2, 3], 'int32');\n * const values = tf.tensor1d([10, 11, 12, 13, 14, 15], 'float32');\n * const shape = [8];\n * tf.sparseToDense(indices, values, shape).print();\n * ```\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape Shape of the dense output tensor. the type is inferred.\n * @param defaultValue Scalar. Value to set for indices not specified in\n * sparseIndices. Defaults to zero.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction sparseToDense_(sparseIndices, sparseValues, outputShape, defaultValue = 0) {\n const $sparseIndices = convertToTensor(sparseIndices, 'sparseIndices', 'sparseToDense', 'int32');\n const $sparseValues = convertToTensor(sparseValues, 'sparseValues', 'sparseToDense');\n const $defaultValue = convertToTensor(defaultValue, 'defaultValue', 'sparseToDense', $sparseValues.dtype);\n sparse_to_dense.validateInput($sparseIndices, $sparseValues, outputShape, $defaultValue);\n const inputs = {\n sparseIndices: $sparseIndices,\n sparseValues: $sparseValues,\n defaultValue: $defaultValue\n };\n const attrs = { outputShape };\n return ENGINE.runKernelFunc(backend => backend.sparseToDense($sparseIndices, $sparseValues, outputShape, $defaultValue), inputs, null /* grad */, SparseToDense, attrs);\n}\nexport const sparseToDense = op({ sparseToDense_ });\n//# sourceMappingURL=sparse_to_dense.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Gather slices from input tensor into a Tensor with shape specified by\n * `indices`.\n *\n * `indices` is an K-dimensional integer tensor, best thought of as a\n * (K-1)-dimensional tensor of indices into input, where each element defines a\n * slice of input:\n * output[\\\\(i_0, ..., i_{K-2}\\\\)] = input[indices[\\\\(i_0, ..., i_{K-2}\\\\)]]\n *\n * Whereas in `tf.gather`, `indices` defines slices into the first dimension of\n * input, in `tf.gatherND`, `indices` defines slices into the first N dimensions\n * of input, where N = indices.shape[-1].\n *\n * The last dimension of indices can be at most the rank of input:\n * indices.shape[-1] <= input.rank\n *\n * The last dimension of `indices` corresponds to elements\n * (if indices.shape[-1] == input.rank) or slices\n * (if indices.shape[-1] < input.rank) along dimension indices.shape[-1] of\n * input.\n * The output tensor has shape\n * indices.shape[:-1] + input.shape[indices.shape[-1]:]\n *\n * Note that on CPU, if an out of bound index is found, an error is returned. On\n * GPU, if an out of bound index is found, a 0 is stored in the corresponding\n * output value.\n *\n * ```js\n * const indices = tf.tensor2d([0, 1, 1, 0], [2,2], 'int32');\n * const input = tf.tensor2d([9, 10, 11, 12], [2, 2]);\n * tf.gatherND(input, indices).print() // [10, 11]\n * ```\n *\n * @param x The tensor from which to gather values.\n * @param indices Index tensor, must be of type int32.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction gatherND_(x, indices) {\n const $indices = convertToTensor(indices, 'indices', 'gatherND', 'int32');\n const $x = convertToTensor(x, 'x', 'gatherND');\n const forward = (backend) => {\n return backend.gatherND($x, $indices);\n };\n const inputs = { params: $x, indices: $indices };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, GatherNd);\n}\nexport const gatherND = op({ gatherND_ });\n//# sourceMappingURL=gather_nd.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Normalize noise shape based on provided tensor and noise shape.\n *\n * @param x Tensor.\n * @param noiseShape The shape for the randomly generated keep/drop flags, as\n * an array of numbers. Optional.\n * @returns Normalized noise shape.\n */\nexport function getNoiseShape(x, noiseShape) {\n if (noiseShape == null) {\n return x.shape.slice();\n }\n if (util.arraysEqual(x.shape, noiseShape)) {\n return noiseShape;\n }\n if (x.shape.length === noiseShape.length) {\n const newDimension = [];\n for (let i = 0; i < x.shape.length; i++) {\n if (noiseShape[i] == null && x.shape[i] != null) {\n newDimension.push(x.shape[i]);\n }\n else {\n newDimension.push(noiseShape[i]);\n }\n }\n return newDimension;\n }\n return noiseShape;\n}\n//# sourceMappingURL=dropout_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { getNoiseShape } from './dropout_util';\nimport { floor } from './floor';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { randomUniform } from './random_uniform';\n/**\n * Computes dropout.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 2, 1]);\n * const rate = 0.75;\n * const output = tf.dropout(x, rate);\n * output.print();\n * ```\n *\n * @param x A floating point Tensor or TensorLike.\n * @param rate A float in the range [0, 1). The probability that each element\n * of x is discarded.\n * @param noiseShape An array of numbers of type int32, representing the\n * shape for randomly generated keep/drop flags. If the noiseShape has null\n * value, it will be automatically replaced with the x's relative dimension\n * size. Optional.\n * @param seed Used to create random seeds. Optional.\n * @returns A Tensor of the same shape of x.\n *\n * @doc {heading: 'Operations', subheading: 'Dropout'}\n */\nfunction dropout_(x, rate, noiseShape, seed) {\n const $x = convertToTensor(x, 'x', 'dropout');\n util.assert($x.dtype === 'float32', () => `x has to be a floating point tensor since it's going to be ` +\n `scaled, but got a ${$x.dtype} tensor instead.`);\n util.assert(rate >= 0 && rate < 1, () => `rate must be a float in the range [0, 1), but got ${rate}.`);\n if (rate === 0) {\n return x instanceof Tensor ? $x.clone() : $x;\n }\n const $noiseShape = getNoiseShape($x, noiseShape);\n const keepProb = 1 - rate;\n const multiplier = div(floor(add(randomUniform($noiseShape, 0, 1, 'float32', seed), keepProb)), keepProb);\n return mul($x, multiplier);\n}\nexport const dropout = op({ dropout_ });\n//# sourceMappingURL=dropout.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensor1d } from './tensor1d';\nexport function enclosingPowerOfTwo(value) {\n // Return 2**N for integer N such that 2**N >= value.\n return Math.floor(Math.pow(2, Math.ceil(Math.log(value) / Math.log(2.0))));\n}\nexport function cosineWindow(windowLength, a, b) {\n const even = 1 - windowLength % 2;\n const newValues = new Float32Array(windowLength);\n for (let i = 0; i < windowLength; ++i) {\n const cosArg = (2.0 * Math.PI * i) / (windowLength + even - 1);\n newValues[i] = a - b * Math.cos(cosArg);\n }\n return tensor1d(newValues, 'float32');\n}\n//# sourceMappingURL=signal_ops_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch, getTypedArrayFromDType } from '../util';\nimport { tensor } from './tensor';\n/**\n * Returns whether the targets are in the top K predictions.\n *\n * ```js\n * const predictions = tf.tensor2d([[20, 10, 40, 30], [30, 50, -20, 10]]);\n * const targets = tf.tensor1d([2, 0]);\n * const precision = await tf.inTopKAsync(predictions, targets);\n * precision.print();\n * ```\n * @param predictions 2-D or higher `tf.Tensor` with last dimension being\n * at least `k`.\n * @param targets 1-D or higher `tf.Tensor`.\n * @param k Optional Number of top elements to look at for computing precision,\n * default to 1.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nasync function inTopKAsync_(predictions, targets, k = 1) {\n const $predictions = convertToTensor(predictions, 'predictions', 'inTopK');\n const $targets = convertToTensor(targets, 'targets', 'inTopK');\n assert($predictions.rank > 1, () => 'inTopK() expects the predictions to be of rank 2 or higher, ' +\n `but got ${$predictions.rank}`);\n assert($predictions.rank - 1 === $targets.rank, () => `predictions rank should be 1 larger than ` +\n `targets rank, but got predictions rank ` +\n `${$predictions.rank} and targets rank ${$targets.rank}`);\n assertShapesMatch($predictions.shape.slice(0, $predictions.shape.length - 1), $targets.shape, `predictions's shape should be align with the targets' shape, ` +\n 'except the last dimension.');\n const lastDim = $predictions.shape[$predictions.shape.length - 1];\n assert(k > 0 && k <= lastDim, () => `'k' passed to inTopK() must be > 0 && <= the predictions last ` +\n `dimension (${lastDim}), but got ${k}`);\n const predictionsVals = await $predictions.data();\n const targetsVals = await $targets.data();\n // Reshape predictionsVals into a 2d tensor [batch, lastDim]\n // and look up topK along lastDim.\n const [batch, size] = [predictionsVals.length / lastDim, lastDim];\n const precision = getTypedArrayFromDType('bool', batch);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = predictionsVals.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n precision[b] = 0;\n for (let i = 0; i < k; i++) {\n if (valAndInd[i].index === targetsVals[b]) {\n precision[b] = 1;\n break;\n }\n }\n }\n if (predictions !== $predictions) {\n $predictions.dispose();\n }\n if (targets !== $targets) {\n $targets.dispose();\n }\n // Output precision has the same shape as targets.\n return tensor(precision, $targets.shape, 'bool');\n}\nexport const inTopKAsync = inTopKAsync_;\n//# sourceMappingURL=in_top_k.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropFilter } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 2D convolution.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * [batch, height, width, inChannels]. If rank 3, batch of 1 is assumed.\n * @param dy The dy image, of rank 4 or rank 3, of shape\n * [batch, height, width, outDepth]. If rank 3, batch of 1 is assumed.\n * @param filterShape The shape of the filter, length 4,\n * [filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction conv2DBackpropFilter_(x, dy, filterShape, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2dDerFilter: input must be rank 4, but got shape ` +\n `${x4D.shape}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerFilter: dy must be rank 4, but got shape ` +\n `${dy4D.shape}.`);\n util.assert(filterShape.length === 4, () => `Error in conv2dDerFilter: filterShape must be length 4, but got ` +\n `${filterShape}.`);\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filterShape[2], () => `Error in conv2dDerFilter: depth of input ${inDepth}) must ` +\n `match input depth in filter (${filterShape[2]}.`);\n util.assert(outDepth === filterShape[3], () => `Error in conv2dDerFilter: depth of dy (${outDepth}) must ` +\n `match output depth for filter (${filterShape[3]}).`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerFilter: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, filterShape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n return backend.conv2dDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv2DBackpropFilter, attrs);\n}\nexport const conv2DBackpropFilter = op({ conv2DBackpropFilter_ });\n//# sourceMappingURL=conv2d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as broadcast_util from './broadcast_util';\nimport { elu } from './elu';\nimport { mul } from './mul';\nimport { prelu } from './prelu';\nimport { relu } from './relu';\nimport { relu6 } from './relu6';\nimport { reshape } from './reshape';\nimport { step } from './step';\nimport { sum } from './sum';\n// Returns gradient for fused activation.\nexport function getFusedDyActivation(dy, y, activation) {\n if (activation == null || activation === 'linear') {\n return dy;\n }\n if (activation === 'relu') {\n return mul(dy, step(y));\n }\n throw new Error(`Cannot compute gradient for fused activation ${activation}.`);\n}\n// Returns gradient for fused bias.\nexport function getFusedBiasGradient(bias, dyActivation) {\n let res = dyActivation;\n const reduceAxes = broadcast_util.getReductionAxes(bias.shape, dyActivation.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, bias.shape);\n}\nexport function applyActivation(x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return x;\n }\n else if (activation === 'relu') {\n return relu(x);\n }\n else if (activation === 'elu') {\n return elu(x);\n }\n else if (activation === 'relu6') {\n return relu6(x);\n }\n else if (activation === 'prelu') {\n return prelu(x, preluActivationWeights);\n }\n throw new Error(`Unknown fused activation ${activation}.`);\n}\n// Whether we should call fused ops.\nexport const shouldFuse = (gradientDepth, activation) => {\n const gradientMode = gradientDepth > 0;\n return !gradientMode || activation === 'linear';\n};\n//# sourceMappingURL=fused_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { conv2d as unfusedConv2d } from '../conv2d';\nimport { conv2DBackpropFilter } from '../conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../conv2d_backprop_input';\nimport * as conv_util from '../conv_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes a 2D convolution over the input x, optionally fused with adding a\n * bias and applying an activation.\n *\n * ```js\n * const inputDepth = 2;\n * const inShape = [2, 2, 2, inputDepth];\n * const outputDepth = 2;\n * const fSize = 1;\n * const pad = 0;\n * const strides = 1;\n *\n * const x = tf.tensor4d( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,\n * 16], inShape);\n * const w = tf.tensor4d([-1, 1, -2, 0.5], [fSize, fSize, inputDepth,\n * outputDepth]);\n *\n * tf.fused.conv2d({ x, filter: w, strides, pad, dataFormat: 'NHWC',\n * dilations: [1, 1], bias: tf.scalar(5), activation: 'relu' }).print();\n * ```\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid` output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`) to be\n * applied\n * after biasAdd.\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n activation = activation || 'linear';\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused conv2d: input must be rank 4, but got rank ` +\n `${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in conv2d: depth of input (${x4D.shape[3]}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NHWC', () => `Error in conv2d: got dataFormat of ${dataFormat} but only NHWC is currently supported.`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused conv2d');\n }\n const grad = (dy, saved) => {\n const [$filter, x4D, y, $bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused conv2D: ' +\n `dilation rates greater than 1 ` +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n const xDer = conv2DBackpropInput(x4D.shape, dyActivation, $filter, strides, pad);\n const filterDer = conv2DBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad);\n const der = [xDer, filterDer];\n if ($bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n der.push(biasDer);\n }\n return der;\n };\n const forward = (backend) => {\n const res = backend.fusedConv2d({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const conv2d = op({ fusedConv2d_ });\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropFilter } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropFilter_(x, dy, filterShape, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dimRoundingMode, dilations, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropFilter, attrs);\n}\nexport const depthwiseConv2dNativeBackpropFilter = op({ depthwiseConv2dNativeBackpropFilter_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropInput } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropInput_(xShape, dy, filter, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(xShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerInput(dy4D, filter, convInfo);\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dimRoundingMode, dilations, inputShape: xShape };\n const res = ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2dNativeBackpropInput = op({ depthwiseConv2dNativeBackpropInput_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_input.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedDepthwiseConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport * as conv_util from '../conv_util';\nimport { depthwiseConv2d as unfusedDepthwiseConv2d } from '../depthwise_conv2d';\nimport { depthwiseConv2dNativeBackpropFilter } from '../depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../depthwise_conv2d_native_backprop_input';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes depthwise 2D convolution, optionally fused with adding a\n * bias and applying an activation.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`).\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedDepthwiseConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedDepthwiseConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused depthwiseConv2d: filter must be rank 4, ` +\n `but got rank ${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in fused depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in fused depthwiseConv2d: Either strides or dilations must ' +\n `be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused depthwiseConv2d: pad must be an integer when ` +\n `using dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused depthwiseConv2d');\n }\n const grad = (dy, saved) => {\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused depthwiseConv2d: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${dilations}'`);\n const [$filter, x4D, y, bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n const xDer = depthwiseConv2dNativeBackpropInput(x4D.shape, dyActivation, $filter, strides, pad, dilations, dimRoundingMode);\n const filterDer = depthwiseConv2dNativeBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad, dilations, dimRoundingMode);\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [xDer, filterDer, biasDer];\n }\n return [xDer, filterDer];\n };\n const forward = (backend) => {\n const res = backend.fusedDepthwiseConv2D({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const depthwiseConv2d = op({ fusedDepthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { _FusedMatMul } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { matMul as unfusedMatMul } from '../mat_mul';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes the dot product of two matrices with optional activation and bias.\n *\n * ```js\n * const a = tf.tensor2d([-1, -2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const bias = tf.tensor2d([1, 2], [1, 2]);\n *\n * tf.fused.matMul({a, b, bias, activation: 'relu'}).print();\n * ```\n *\n * @param obj An object with the following properties:\n * - `a` First matrix in dot product operation.\n * - `b` Second matrix in dot product operation.\n * - `transposeA` If true, `a` is transposed before multiplication.\n * - `transposeB` If true, `b` is transposed before multiplication.\n * - `bias` Matrix to be added to the result.\n * - `activation` Name of activation kernel (defaults to `linear`).\n * - `preluActivationWeights` Tensor of prelu weights.\n */\nfunction fusedMatMul_({ a, b, transposeA = false, transposeB = false, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedMatMul(a, b, transposeA, transposeB);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n let $a = convertToTensor(a, 'a', 'fused matMul');\n let $b = convertToTensor(b, 'b', 'fused matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n util.assert($a.rank >= 2 && $b.rank >= 2 && $a.rank === $b.rank, () => `Error in fused matMul: inputs must have the same rank of at least ` +\n `2, got ranks ${$a.rank} and ${$b.rank}.`);\n util.assert(util.arraysEqual(outerDimsA, outerDimsB), () => `Error in fused matMul: outer dimensions (${outerDimsA}) and (` +\n `${outerDimsB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} must match.`);\n util.assert(innerShapeA === innerShapeB, () => `Error in fused matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShape = $a.shape.slice(0, -2).concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused matMul');\n [$bias] = makeTypesMatch($bias, $a);\n broadcast_util.assertAndGetBroadcastShape(outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused matMul');\n }\n const grad = (dy, saved) => {\n const [a3D, b3D, y, $bias] = saved;\n // we reshape dy because the result of the forward is not\n // necessarily going to be a 3d tensor due to a reshape done at the end of\n // the customOp.\n const dyActivation = getFusedDyActivation(reshape(dy, y.shape), y, activation);\n let aDer;\n let bDer;\n if (!transposeA && !transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, true, false);\n }\n else if (!transposeA && transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, false);\n bDer = unfusedMatMul(dyActivation, a3D, true, false);\n }\n else if (transposeA && !transposeB) {\n aDer = unfusedMatMul(b3D, dyActivation, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, false, false);\n }\n else {\n aDer = unfusedMatMul(b3D, dyActivation, true, true);\n bDer = unfusedMatMul(dyActivation, a3D, true, true);\n }\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [aDer, bDer, biasDer];\n }\n else {\n return [aDer, bDer];\n }\n };\n const forward = (backend) => {\n const y = backend.fusedBatchMatMul({\n a: a3D,\n b: b3D,\n transposeA,\n transposeB,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return y;\n };\n const inputs = {\n a: a3D,\n b: b3D,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { transposeA, transposeB, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((a3D, b3D, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOp(a3D, b3D);\n }\n else {\n const customOpWithBias = customGrad((a3D, b3D, $bias, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res, $bias]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOpWithBias(a3D, b3D, $bias);\n }\n}\nexport const matMul = op({ fusedMatMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from './fused/conv2d';\nimport { depthwiseConv2d } from './fused/depthwise_conv2d';\nimport { matMul } from './fused/mat_mul';\nexport { conv2d, depthwiseConv2d, matMul };\n//# sourceMappingURL=fused_ops.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a hamming window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hammingWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hammingWindow_(windowLength) {\n return cosineWindow(windowLength, 0.54, 0.46);\n}\nexport const hammingWindow = op({ hammingWindow_ });\n//# sourceMappingURL=hamming_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a Hann window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hannWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hannWindow_(windowLength) {\n return cosineWindow(windowLength, 0.5, 0.5);\n}\nexport const hannWindow = op({ hannWindow_ });\n//# sourceMappingURL=hann_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { fill } from '../fill';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { tensor2d } from '../tensor2d';\n/**\n * Expands input into frames of frameLength.\n * Slides a window size with frameStep.\n *\n * ```js\n * tf.signal.frame([1, 2, 3], 2, 1).print();\n * ```\n * @param signal The input tensor to be expanded\n * @param frameLength Length of each frame\n * @param frameStep The frame hop size in samples.\n * @param padEnd Whether to pad the end of signal with padValue.\n * @param padValue An number to use where the input signal does\n * not exist when padEnd is True.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction frame_(signal, frameLength, frameStep, padEnd = false, padValue = 0) {\n let start = 0;\n const output = [];\n while (start + frameLength <= signal.size) {\n output.push(slice(signal, start, frameLength));\n start += frameStep;\n }\n if (padEnd) {\n while (start < signal.size) {\n const padLen = (start + frameLength) - signal.size;\n const pad = concat([\n slice(signal, start, frameLength - padLen), fill([padLen], padValue)\n ]);\n output.push(pad);\n start += frameStep;\n }\n }\n if (output.length === 0) {\n return tensor2d([], [0, frameLength]);\n }\n return reshape(concat(output), [output.length, frameLength]);\n}\nexport const frame = op({ frame_ });\n//# sourceMappingURL=frame.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { enclosingPowerOfTwo } from '../signal_ops_util';\nimport { slice } from '../slice';\nimport { rfft } from '../spectral/rfft';\nimport { frame } from './frame';\nimport { hannWindow } from './hann_window';\n/**\n * Computes the Short-time Fourier Transform of signals\n * See: https://en.wikipedia.org/wiki/Short-time_Fourier_transform\n *\n * ```js\n * const input = tf.tensor1d([1, 1, 1, 1, 1])\n * tf.signal.stft(input, 3, 1).print();\n * ```\n * @param signal 1-dimensional real value tensor.\n * @param frameLength The window length of samples.\n * @param frameStep The number of samples to step.\n * @param fftLength The size of the FFT to apply.\n * @param windowFn A callable that takes a window length and returns 1-d tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction stft_(signal, frameLength, frameStep, fftLength, windowFn = hannWindow) {\n if (fftLength == null) {\n fftLength = enclosingPowerOfTwo(frameLength);\n }\n const framedSignal = frame(signal, frameLength, frameStep);\n const windowedSignal = mul(framedSignal, windowFn(frameLength));\n const output = [];\n for (let i = 0; i < framedSignal.shape[0]; i++) {\n output.push(rfft(slice(windowedSignal, [i, 0], [1, frameLength]), fftLength));\n }\n return concat(output);\n}\nexport const stft = op({ stft_ });\n//# sourceMappingURL=stft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { CropAndResize } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Extracts crops from the input image tensor and resizes them using bilinear\n * sampling or nearest neighbor sampling (possibly with aspect ratio change)\n * to a common output size specified by cropSize.\n *\n * @param image 4d tensor of shape `[batch,imageHeight,imageWidth, depth]`,\n * where imageHeight and imageWidth must be positive, specifying the\n * batch of images from which to take crops\n * @param boxes 2d float32 tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the normalized\n * coordinates of the box in the boxInd[i]'th image in the batch\n * @param boxInd 1d int32 tensor of shape `[numBoxes]` with values in range\n * `[0, batch)` that specifies the image that the `i`-th box refers to.\n * @param cropSize 1d int32 tensor of 2 elements `[cropHeigh, cropWidth]`\n * specifying the size to which all crops are resized to.\n * @param method Optional string from `'bilinear' | 'nearest'`,\n * defaults to bilinear, which specifies the sampling method for resizing\n * @param extrapolationValue A threshold for deciding when to remove boxes based\n * on score. Defaults to 0.\n * @return A 4D tensor of the shape `[numBoxes,cropHeight,cropWidth,depth]`\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction cropAndResize_(image, boxes, boxInd, cropSize, method, extrapolationValue) {\n const $image = convertToTensor(image, 'image', 'cropAndResize');\n const $boxes = convertToTensor(boxes, 'boxes', 'cropAndResize', 'float32');\n const $boxInd = convertToTensor(boxInd, 'boxInd', 'cropAndResize', 'int32');\n method = method || 'bilinear';\n extrapolationValue = extrapolationValue || 0;\n const numBoxes = $boxes.shape[0];\n util.assert($image.rank === 4, () => 'Error in cropAndResize: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n util.assert($boxes.rank === 2 && $boxes.shape[1] === 4, () => `Error in cropAndResize: boxes must be have size [${numBoxes},4] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert($boxInd.rank === 1 && $boxInd.shape[0] === numBoxes, () => `Error in cropAndResize: boxInd must be have size [${numBoxes}] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert(cropSize.length === 2, () => `Error in cropAndResize: cropSize must be of length 2, but got ` +\n `length ${cropSize.length}.`);\n util.assert(cropSize[0] >= 1 && cropSize[1] >= 1, () => `cropSize must be atleast [1,1], but was ${cropSize}`);\n util.assert(method === 'bilinear' || method === 'nearest', () => `method must be bilinear or nearest, but was ${method}`);\n const forward = (backend) => backend.cropAndResize($image, $boxes, $boxInd, cropSize, method, extrapolationValue);\n const inputs = { image: $image, boxes: $boxes, boxInd: $boxInd };\n const attrs = { method, extrapolationValue, cropSize };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, CropAndResize, attrs);\n return res;\n}\nexport const cropAndResize = op({ cropAndResize_ });\n//# sourceMappingURL=crop_and_resize.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FlipLeftRight } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Flips the image left to right. Currently available in the CPU, WebGL, and\n * WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction flipLeftRight_(image) {\n const $image = convertToTensor(image, 'image', 'flipLeftRight', 'float32');\n util.assert($image.rank === 4, () => 'Error in flipLeftRight: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const res = ENGINE.runKernel(FlipLeftRight, inputs, {});\n return res;\n}\nexport const flipLeftRight = op({ flipLeftRight_ });\n//# sourceMappingURL=flip_left_right.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { RotateWithOffset } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Rotates the input image tensor counter-clockwise with an optional offset\n * center of rotation. Currently available in the CPU, WebGL, and WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n * @param radians The amount of rotation.\n * @param fillValue The value to fill in the empty space leftover\n * after rotation. Can be either a single grayscale value (0-255), or an\n * array of three numbers `[red, green, blue]` specifying the red, green,\n * and blue channels. Defaults to `0` (black).\n * @param center The center of rotation. Can be either a single value (0-1), or\n * an array of two numbers `[centerX, centerY]`. Defaults to `0.5` (rotates\n * the image around its center).\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction rotateWithOffset_(image, radians, fillValue = 0, center = 0.5) {\n const $image = convertToTensor(image, 'image', 'rotateWithOffset', 'float32');\n util.assert($image.rank === 4, () => 'Error in rotateWithOffset: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const attrs = { radians, fillValue, center };\n const res = ENGINE.runKernel(RotateWithOffset, inputs, attrs);\n return res;\n}\nexport const rotateWithOffset = op({ rotateWithOffset_ });\n//# sourceMappingURL=rotate_with_offset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nfunction nonMaxSuppSanityCheck(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n if (iouThreshold == null) {\n iouThreshold = 0.5;\n }\n if (scoreThreshold == null) {\n scoreThreshold = Number.NEGATIVE_INFINITY;\n }\n if (softNmsSigma == null) {\n softNmsSigma = 0.0;\n }\n const numBoxes = boxes.shape[0];\n maxOutputSize = Math.min(maxOutputSize, numBoxes);\n util.assert(0 <= iouThreshold && iouThreshold <= 1, () => `iouThreshold must be in [0, 1], but was '${iouThreshold}'`);\n util.assert(boxes.rank === 2, () => `boxes must be a 2D tensor, but was of rank '${boxes.rank}'`);\n util.assert(boxes.shape[1] === 4, () => `boxes must have 4 columns, but 2nd dimension was ${boxes.shape[1]}`);\n util.assert(scores.rank === 1, () => 'scores must be a 1D tensor');\n util.assert(scores.shape[0] === numBoxes, () => `scores has incompatible shape with boxes. Expected ${numBoxes}, ` +\n `but was ${scores.shape[0]}`);\n util.assert(0 <= softNmsSigma && softNmsSigma <= 1, () => `softNmsSigma must be in [0, 1], but was '${softNmsSigma}'`);\n return { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n}\nexport { nonMaxSuppSanityCheck };\n//# sourceMappingURL=nonmax_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV3 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\nfunction nonMaxSuppression_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold };\n return ENGINE.runKernelFunc(b => b.nonMaxSuppression($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold), { boxes: $boxes, scores: $scores }, null /* grad */, NonMaxSuppressionV3, attrs);\n}\nexport const nonMaxSuppression = op({ nonMaxSuppression_ });\n//# sourceMappingURL=non_max_suppression.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inserts a value into a sorted array. This method allows duplicate, meaning it\n * allows inserting duplicate value, in which case, the element will be inserted\n * at the lowest index of the value.\n * @param arr The array to modify.\n * @param element The element to insert.\n * @param comparator Optional. If no comparator is specified, elements are\n * compared using array_util.defaultComparator, which is suitable for Strings\n * and Numbers in ascending arrays. If the array contains multiple instances of\n * the target value, the left-most instance will be returned. To provide a\n * comparator, it should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n */\nexport function binaryInsert(arr, element, comparator) {\n const index = binarySearch(arr, element, comparator);\n const insertionPoint = index < 0 ? -(index + 1) : index;\n arr.splice(insertionPoint, 0, element);\n}\n/**\n * Searches the array for the target using binary search, returns the index\n * of the found element, or position to insert if element not found. If no\n * comparator is specified, elements are compared using array_\n * util.defaultComparator, which is suitable for Strings and Numbers in\n * ascending arrays. If the array contains multiple instances of the target\n * value, the left-most instance will be returned.\n * @param arr The array to be searched in.\n * @param target The target to be searched for.\n * @param comparator Should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n * @return Lowest index of the target value if found, otherwise the insertion\n * point where the target should be inserted, in the form of\n * (-insertionPoint - 1).\n */\nexport function binarySearch(arr, target, comparator) {\n return binarySearch_(arr, target, comparator || defaultComparator);\n}\n/**\n * Compares its two arguments for order.\n * @param a The first element to be compared.\n * @param b The second element to be compared.\n * @return A negative number, zero, or a positive number as the first\n * argument is less than, equal to, or greater than the second.\n */\nfunction defaultComparator(a, b) {\n return a > b ? 1 : a < b ? -1 : 0;\n}\nfunction binarySearch_(arr, target, comparator) {\n let left = 0;\n let right = arr.length;\n let middle = 0;\n let found = false;\n while (left < right) {\n middle = left + ((right - left) >>> 1);\n const compareResult = comparator(target, arr[middle]);\n if (compareResult > 0) {\n left = middle + 1;\n }\n else {\n right = middle;\n // If compareResult is 0, the value is found. We record it is found,\n // and then keep looking because there may be duplicate.\n found = !compareResult;\n }\n }\n return found ? left : -left - 1;\n}\n//# sourceMappingURL=array_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Implementation of the NonMaxSuppression kernel shared between webgl and cpu.\n */\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { binaryInsert } from './array_util';\nexport function nonMaxSuppressionV3Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */)\n .selectedIndices;\n}\nexport function nonMaxSuppressionV4Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */, false /* returnScoresTensor */, padToMaxOutputSize /* padToMaxOutputSize */, true\n /* returnValidOutputs */ );\n}\nexport function nonMaxSuppressionV5Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, true /* returnScoresTensor */);\n}\nfunction nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, returnScoresTensor = false, padToMaxOutputSize = false, returnValidOutputs = false) {\n // The list is sorted in ascending order, so that we can always pop the\n // candidate with the largest score in O(1) time.\n const candidates = [];\n for (let i = 0; i < scores.length; i++) {\n if (scores[i] > scoreThreshold) {\n candidates.push({ score: scores[i], boxIndex: i, suppressBeginIndex: 0 });\n }\n }\n candidates.sort(ascendingComparator);\n // If softNmsSigma is 0, the outcome of this algorithm is exactly same as\n // before.\n const scale = softNmsSigma > 0 ? (-0.5 / softNmsSigma) : 0.0;\n const selectedIndices = [];\n const selectedScores = [];\n while (selectedIndices.length < maxOutputSize && candidates.length > 0) {\n const candidate = candidates.pop();\n const { score: originalScore, boxIndex, suppressBeginIndex } = candidate;\n if (originalScore < scoreThreshold) {\n break;\n }\n // Overlapping boxes are likely to have similar scores, therefore we\n // iterate through the previously selected boxes backwards in order to\n // see if candidate's score should be suppressed. We use\n // suppressBeginIndex to track and ensure a candidate can be suppressed\n // by a selected box no more than once. Also, if the overlap exceeds\n // iouThreshold, we simply ignore the candidate.\n let ignoreCandidate = false;\n for (let j = selectedIndices.length - 1; j >= suppressBeginIndex; --j) {\n const iou = intersectionOverUnion(boxes, boxIndex, selectedIndices[j]);\n if (iou >= iouThreshold) {\n ignoreCandidate = true;\n break;\n }\n candidate.score =\n candidate.score * suppressWeight(iouThreshold, scale, iou);\n if (candidate.score <= scoreThreshold) {\n break;\n }\n }\n // At this point, if `candidate.score` has not dropped below\n // `scoreThreshold`, then we know that we went through all of the\n // previous selections and can safely update `suppressBeginIndex` to the\n // end of the selected array. Then we can re-insert the candidate with\n // the updated score and suppressBeginIndex back in the candidate list.\n // If on the other hand, `candidate.score` has dropped below the score\n // threshold, we will not add it back to the candidates list.\n candidate.suppressBeginIndex = selectedIndices.length;\n if (!ignoreCandidate) {\n // Candidate has passed all the tests, and is not suppressed, so\n // select the candidate.\n if (candidate.score === originalScore) {\n selectedIndices.push(boxIndex);\n selectedScores.push(candidate.score);\n }\n else if (candidate.score > scoreThreshold) {\n // Candidate's score is suppressed but is still high enough to be\n // considered, so add back to the candidates list.\n binaryInsert(candidates, candidate, ascendingComparator);\n }\n }\n }\n // NonMaxSuppressionV4 feature: padding output to maxOutputSize.\n const validOutputs = selectedIndices.length;\n const elemsToPad = maxOutputSize - validOutputs;\n if (padToMaxOutputSize && elemsToPad > 0) {\n selectedIndices.push(...new Array(elemsToPad).fill(0));\n selectedScores.push(...new Array(elemsToPad).fill(0.0));\n }\n const result = { selectedIndices: tensor1d(selectedIndices, 'int32') };\n if (returnScoresTensor) {\n result['selectedScores'] = tensor1d(selectedScores, 'float32');\n }\n if (returnValidOutputs) {\n result['validOutputs'] = scalar(validOutputs, 'int32');\n }\n return result;\n}\nfunction intersectionOverUnion(boxes, i, j) {\n const iCoord = boxes.subarray(i * 4, i * 4 + 4);\n const jCoord = boxes.subarray(j * 4, j * 4 + 4);\n const yminI = Math.min(iCoord[0], iCoord[2]);\n const xminI = Math.min(iCoord[1], iCoord[3]);\n const ymaxI = Math.max(iCoord[0], iCoord[2]);\n const xmaxI = Math.max(iCoord[1], iCoord[3]);\n const yminJ = Math.min(jCoord[0], jCoord[2]);\n const xminJ = Math.min(jCoord[1], jCoord[3]);\n const ymaxJ = Math.max(jCoord[0], jCoord[2]);\n const xmaxJ = Math.max(jCoord[1], jCoord[3]);\n const areaI = (ymaxI - yminI) * (xmaxI - xminI);\n const areaJ = (ymaxJ - yminJ) * (xmaxJ - xminJ);\n if (areaI <= 0 || areaJ <= 0) {\n return 0.0;\n }\n const intersectionYmin = Math.max(yminI, yminJ);\n const intersectionXmin = Math.max(xminI, xminJ);\n const intersectionYmax = Math.min(ymaxI, ymaxJ);\n const intersectionXmax = Math.min(xmaxI, xmaxJ);\n const intersectionArea = Math.max(intersectionYmax - intersectionYmin, 0.0) *\n Math.max(intersectionXmax - intersectionXmin, 0.0);\n return intersectionArea / (areaI + areaJ - intersectionArea);\n}\n// A Gaussian penalty function, this method always returns values in [0, 1].\n// The weight is a function of similarity, the more overlap two boxes are, the\n// smaller the weight is, meaning highly overlapping boxe will be significantly\n// penalized. On the other hand, a non-overlapping box will not be penalized.\nfunction suppressWeight(iouThreshold, scale, iou) {\n const weight = Math.exp(scale * iou * iou);\n return iou <= iouThreshold ? weight : 0.0;\n}\nfunction ascendingComparator(c1, c2) {\n // For objects with same scores, we make the object with the larger index go\n // first. In an array that pops from the end, this means that the object with\n // the smaller index will be popped first. This ensures the same output as\n // the TensorFlow python version.\n return (c1.score - c2.score) ||\n ((c1.score === c2.score) && (c2.boxIndex - c1.boxIndex));\n}\n//# sourceMappingURL=non_max_suppression_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV3Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This is the async version of `nonMaxSuppression`\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @return A 1D tensor with the selected box indices.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionAsync = nonMaxSuppressionAsync_;\n//# sourceMappingURL=non_max_suppression_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV5 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionWithScore_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n const result = ENGINE.runKernel(NonMaxSuppressionV5, inputs, attrs);\n return { selectedIndices: result[0], selectedScores: result[1] };\n}\nexport const nonMaxSuppressionWithScore = op({ nonMaxSuppressionWithScore_ });\n//# sourceMappingURL=non_max_suppression_with_score.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV5Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionWithScoreAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionWithScoreAsync = nonMaxSuppressionWithScoreAsync_;\n//# sourceMappingURL=non_max_suppression_with_score_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV4 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionPadded_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = {\n maxOutputSize: $maxOutputSize,\n iouThreshold: $iouThreshold,\n scoreThreshold: $scoreThreshold,\n padToMaxOutputSize\n };\n const result = ENGINE.runKernel(NonMaxSuppressionV4, inputs, attrs);\n return { selectedIndices: result[0], validOutputs: result[1] };\n}\nexport const nonMaxSuppressionPadded = op({ nonMaxSuppressionPadded_ });\n//# sourceMappingURL=non_max_suppression_padded.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV4Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionPaddedAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const [boxesVals, scoresVals] = await Promise.all([$boxes.data(), $scores.data()]);\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV4Impl(boxesVals, scoresVals, $maxOutputSize, $iouThreshold, $scoreThreshold, padToMaxOutputSize);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionPaddedAsync = nonMaxSuppressionPaddedAsync_;\n//# sourceMappingURL=non_max_suppression_padded_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeBilinear } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Bilinear resize a single 3D image or a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeBilinear_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeBilinear');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeBilinear: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeBilinear: new shape must 2D, but got shape ` +\n `${size}.`);\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeBilinear(batchImages, newHeight, newWidth, alignCorners);\n };\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeBilinear, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeBilinear = op({ resizeBilinear_ });\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeNearestNeighbor } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * NearestNeighbor resize a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeNearestNeighbor_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeNearestNeighbor');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeNearestNeighbor: new shape must 2D, but got shape ` +\n `${size}.`);\n util.assert($images.dtype === 'float32' || $images.dtype === 'int32', () => '`images` must have `int32` or `float32` as dtype');\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeNearestNeighbor(batchImages, newHeight, newWidth, alignCorners);\n };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeNearestNeighbor, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeNearestNeighbor = op({ resizeNearestNeighbor_ });\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assert } from '../../util';\nimport { greaterEqual } from '../greater_equal';\nimport { lessEqual } from '../less_equal';\nimport { logicalAnd } from '../logical_and';\nimport { op } from '../operation';\nimport { range } from '../range';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\nimport { zeros } from '../zeros';\n/**\n * Copy a tensor setting everything outside a central band in each innermost\n * matrix to zero.\n *\n * The band part is computed as follows: Assume input has `k` dimensions\n * `[I, J, K, ..., M, N]`, then the output is a tensor with the same shape where\n * `band[i, j, k, ..., m, n] = in_band(m, n) * input[i, j, k, ..., m, n]`.\n * The indicator function\n * `in_band(m, n) = (num_lower < 0 || (m-n) <= num_lower))`\n * `&& (num_upper < 0 || (n-m) <= num_upper)`\n *\n * ```js\n * const x = tf.tensor2d([[ 0, 1, 2, 3],\n * [-1, 0, 1, 2],\n * [-2, -1, 0, 1],\n * [-3, -2, -1, 0]]);\n * let y = tf.linalg.bandPart(x, 1, -1);\n * y.print(); // [[ 0, 1, 2, 3],\n * // [-1, 0, 1, 2],\n * // [ 0, -1, 0, 1],\n * // [ 0, 0 , -1, 0]]\n * let z = tf.linalg.bandPart(x, 2, 1);\n * z.print(); // [[ 0, 1, 0, 0],\n * // [-1, 0, 1, 0],\n * // [-2, -1, 0, 1],\n * // [ 0, -2, -1, 0]]\n * ```\n *\n * @param x Rank `k` tensor\n * @param numLower Number of subdiagonals to keep.\n * If negative, keep entire lower triangle.\n * @param numUpper Number of subdiagonals to keep.\n * If negative, keep entire upper triangle.\n * @returns Rank `k` tensor of the same shape as input.\n * The extracted banded tensor.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction bandPart_(a, numLower, numUpper) {\n assert(numLower % 1 === 0, () => `bandPart(): numLower must be an integer, got ${numLower}.`);\n assert(numUpper % 1 === 0, () => `bandPart(): numUpper must be an integer, got ${numUpper}.`);\n const $a = convertToTensor(a, 'a', 'bandPart');\n assert($a.rank >= 2, () => `bandPart(): Rank must be at least 2, got ${$a.rank}.`);\n const shape = $a.shape;\n const [M, N] = $a.shape.slice(-2);\n if (!(numLower <= M)) {\n throw new Error(`bandPart(): numLower (${numLower})` +\n ` must not be greater than the number of rows (${M}).`);\n }\n if (!(numUpper <= N)) {\n throw new Error(`bandPart(): numUpper (${numUpper})` +\n ` must not be greater than the number of columns (${N}).`);\n }\n if (numLower < 0) {\n numLower = M;\n }\n if (numUpper < 0) {\n numUpper = N;\n }\n const i = reshape(range(0, M, 1, 'int32'), [-1, 1]);\n const j = range(0, N, 1, 'int32');\n const ij = sub(i, j);\n const inBand = logicalAnd(lessEqual(ij, scalar(+numLower, 'int32')), greaterEqual(ij, scalar(-numUpper, 'int32')));\n const zero = zeros([M, N], $a.dtype);\n return reshape(stack(unstack(reshape($a, [-1, M, N]))\n .map(mat => where(inBand, mat, zero))), shape);\n}\nexport const bandPart = op({ bandPart_ });\n//# sourceMappingURL=band_part.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { assert } from '../../util';\nimport { div } from '../div';\nimport { mul } from '../mul';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { split } from '../split';\nimport { squeeze } from '../squeeze';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\n/**\n * Gram-Schmidt orthogonalization.\n *\n * ```js\n * const x = tf.tensor2d([[1, 2], [3, 4]]);\n * let y = tf.linalg.gramSchmidt(x);\n * y.print();\n * console.log('Othogonalized:');\n * y.dot(y.transpose()).print(); // should be nearly the identity matrix.\n * console.log('First row direction maintained:');\n * const data = await y.array();\n * console.log(data[0][1] / data[0][0]); // should be nearly 2.\n * ```\n *\n * @param xs The vectors to be orthogonalized, in one of the two following\n * formats:\n * - An Array of `tf.Tensor1D`.\n * - A `tf.Tensor2D`, i.e., a matrix, in which case the vectors are the rows\n * of `xs`.\n * In each case, all the vectors must have the same length and the length\n * must be greater than or equal to the number of vectors.\n * @returns The orthogonalized and normalized vectors or matrix.\n * Orthogonalization means that the vectors or the rows of the matrix\n * are orthogonal (zero inner products). Normalization means that each\n * vector or each row of the matrix has an L2 norm that equals `1`.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction gramSchmidt_(xs) {\n let inputIsTensor2D;\n if (Array.isArray(xs)) {\n inputIsTensor2D = false;\n assert(xs != null && xs.length > 0, () => 'Gram-Schmidt process: input must not be null, undefined, or ' +\n 'empty');\n const dim = xs[0].shape[0];\n for (let i = 1; i < xs.length; ++i) {\n assert(xs[i].shape[0] === dim, () => 'Gram-Schmidt: Non-unique lengths found in the input vectors: ' +\n `(${xs[i].shape[0]} vs. ${dim})`);\n }\n }\n else {\n inputIsTensor2D = true;\n xs = split(xs, xs.shape[0], 0).map(x => squeeze(x, [0]));\n }\n assert(xs.length <= xs[0].shape[0], () => `Gram-Schmidt: Number of vectors (${xs.length}) exceeds ` +\n `number of dimensions (${xs[0].shape[0]}).`);\n const ys = [];\n const xs1d = xs;\n for (let i = 0; i < xs.length; ++i) {\n ys.push(ENGINE.tidy(() => {\n let x = xs1d[i];\n if (i > 0) {\n for (let j = 0; j < i; ++j) {\n const proj = mul(sum(mul(ys[j], x)), ys[j]);\n x = sub(x, proj);\n }\n }\n return div(x, norm(x, 'euclidean'));\n }));\n }\n if (inputIsTensor2D) {\n return stack(ys, 0);\n }\n else {\n return ys;\n }\n}\nexport const gramSchmidt = op({ gramSchmidt_ });\n//# sourceMappingURL=gram_schmidt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { dispose } from '../../globals';\nimport { assert } from '../../util';\nimport { clone } from '../clone';\nimport { concat } from '../concat';\nimport { div } from '../div';\nimport { eye } from '../eye';\nimport { greater } from '../greater';\nimport { matMul } from '../mat_mul';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { tensor2d } from '../tensor2d';\nimport { transpose } from '../transpose';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\n/**\n * Compute QR decomposition of m-by-n matrix using Householder transformation.\n *\n * Implementation based on\n * [http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf]\n * (http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf)\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [3, 4]]);\n * let [q, r] = tf.linalg.qr(a);\n * console.log('Q');\n * q.print();\n * console.log('R');\n * r.print();\n * console.log('Orthogonalized');\n * q.dot(q.transpose()).print() // should be nearly the identity matrix.\n * console.log('Reconstructed');\n * q.dot(r).print(); // should be nearly [[1, 2], [3, 4]];\n * ```\n *\n * @param x The `tf.Tensor` to be QR-decomposed. Must have rank >= 2. Suppose\n * it has the shape `[..., M, N]`.\n * @param fullMatrices An optional boolean parameter. Defaults to `false`.\n * If `true`, compute full-sized `Q`. If `false` (the default),\n * compute only the leading N columns of `Q` and `R`.\n * @returns An `Array` of two `tf.Tensor`s: `[Q, R]`. `Q` is a unitary matrix,\n * i.e., its columns all have unit norm and are mutually orthogonal.\n * If `M >= N`,\n * If `fullMatrices` is `false` (default),\n * - `Q` has a shape of `[..., M, N]`,\n * - `R` has a shape of `[..., N, N]`.\n * If `fullMatrices` is `true` (default),\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * If `M < N`,\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * @throws If the rank of `x` is less than 2.\n *\n * @doc {heading:'Operations',\n * subheading:'Linear Algebra',\n * namespace:'linalg'}\n */\nfunction qr_(x, fullMatrices = false) {\n assert(x.rank >= 2, () => `qr() requires input tensor to have a rank >= 2, but got rank ${x.rank}`);\n if (x.rank === 2) {\n return qr2d(x, fullMatrices);\n }\n else {\n // Rank > 2.\n // TODO(cais): Below we split the input into individual 2D tensors,\n // perform QR decomposition on them and then stack the results back\n // together. We should explore whether this can be parallelized.\n const outerDimsProd = x.shape.slice(0, x.shape.length - 2)\n .reduce((value, prev) => value * prev);\n const x2ds = unstack(reshape(x, [\n outerDimsProd, x.shape[x.shape.length - 2],\n x.shape[x.shape.length - 1]\n ]), 0);\n const q2ds = [];\n const r2ds = [];\n x2ds.forEach(x2d => {\n const [q2d, r2d] = qr2d(x2d, fullMatrices);\n q2ds.push(q2d);\n r2ds.push(r2d);\n });\n const q = reshape(stack(q2ds, 0), x.shape);\n const r = reshape(stack(r2ds, 0), x.shape);\n return [q, r];\n }\n}\nfunction qr2d(x, fullMatrices = false) {\n return ENGINE.tidy(() => {\n assert(x.shape.length === 2, () => `qr2d() requires a 2D Tensor, but got a ${x.shape.length}D Tensor.`);\n const m = x.shape[0];\n const n = x.shape[1];\n let q = eye(m); // Orthogonal transform so far.\n let r = clone(x); // Transformed matrix so far.\n const one2D = tensor2d([[1]], [1, 1]);\n let w = clone(one2D);\n const iters = m >= n ? n : m;\n for (let j = 0; j < iters; ++j) {\n // This tidy within the for-loop ensures we clean up temporary\n // tensors as soon as they are no longer needed.\n const rTemp = r;\n const wTemp = w;\n const qTemp = q;\n [w, r, q] = ENGINE.tidy(() => {\n // Find H = I - tau * w * w', to put zeros below R(j, j).\n const rjEnd1 = slice(r, [j, j], [m - j, 1]);\n const normX = norm(rjEnd1);\n const rjj = slice(r, [j, j], [1, 1]);\n // The sign() function returns 0 on 0, which causes division by zero.\n const s = where(greater(rjj, 0), tensor2d([[-1]]), tensor2d([[1]]));\n const u1 = sub(rjj, mul(s, normX));\n const wPre = div(rjEnd1, u1);\n if (wPre.shape[0] === 1) {\n w = clone(one2D);\n }\n else {\n w = concat([\n one2D,\n slice(wPre, [1, 0], [wPre.shape[0] - 1, wPre.shape[1]])\n ], 0);\n }\n const tau = neg(div(matMul(s, u1), normX));\n // -- R := HR, Q := QH.\n const rjEndAll = slice(r, [j, 0], [m - j, n]);\n const tauTimesW = mul(tau, w);\n const wT = transpose(w);\n if (j === 0) {\n r = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n }\n else {\n const rTimesTau = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n r = concat([slice(r, [0, 0], [j, n]), rTimesTau], 0);\n }\n const tawTimesWT = transpose(tauTimesW);\n const qAllJEnd = slice(q, [0, j], [m, q.shape[1] - j]);\n if (j === 0) {\n q = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n }\n else {\n const qTimesTau = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n q = concat([slice(q, [0, 0], [m, j]), qTimesTau], 1);\n }\n return [w, r, q];\n });\n dispose([rTemp, wTemp, qTemp]);\n }\n if (!fullMatrices && m > n) {\n q = slice(q, [0, 0], [m, n]);\n r = slice(r, [0, 0], [n, n]);\n }\n return [q, r];\n });\n}\nexport const qr = op({ qr_ });\n//# sourceMappingURL=qr.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Reduction;\n(function (Reduction) {\n Reduction[Reduction[\"NONE\"] = 0] = \"NONE\";\n Reduction[Reduction[\"MEAN\"] = 1] = \"MEAN\";\n Reduction[Reduction[\"SUM\"] = 2] = \"SUM\";\n Reduction[Reduction[\"SUM_BY_NONZERO_WEIGHTS\"] = 3] = \"SUM_BY_NONZERO_WEIGHTS\";\n})(Reduction || (Reduction = {}));\n//# sourceMappingURL=loss_ops_utils.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { Reduction } from '../loss_ops_utils';\nimport { mean } from '../mean';\nimport { mul } from '../mul';\nimport { notEqual } from '../not_equal';\nimport { ones } from '../ones';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sum } from '../sum';\n/**\n * Computes the weighted loss between two tensors.\n *\n * @param losses Tensor of shape `[batch_size, d1, ... dN]`.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `losses`, and must be broadcastable to `losses` (i.e., all\n * dimensions must be either `1`, or the same as the corresponding\n * `losses` dimension).\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction computeWeightedLoss_(losses, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $losses = convertToTensor(losses, 'losses', 'computeWeightedLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'computeWeightedLoss');\n }\n const weightedLoss = ($weights == null) ? $losses : mul($losses, $weights);\n if (reduction === Reduction.NONE) {\n return weightedLoss;\n }\n if (reduction === Reduction.SUM) {\n return sum(weightedLoss);\n }\n if (reduction === Reduction.MEAN) {\n if ($weights == null) {\n return mean(weightedLoss);\n }\n else {\n const broadcastFactor = $losses.size / $weights.size;\n const result = div(sum(weightedLoss), sum($weights));\n return broadcastFactor > 1 ? div(result, scalar(broadcastFactor)) :\n result;\n }\n }\n if (reduction === Reduction.SUM_BY_NONZERO_WEIGHTS) {\n if ($weights == null) {\n return div(sum(weightedLoss), scalar($losses.size));\n }\n else {\n const broadcastedWeights = mul($weights, ones($losses.shape));\n const numNonZeros = cast(sum(notEqual(broadcastedWeights, scalar(0))), 'float32');\n return div(sum(weightedLoss), numNonZeros);\n }\n }\n throw Error(`Unknown reduction: ${reduction}`);\n}\nexport const computeWeightedLoss = op({ computeWeightedLoss_ });\n//# sourceMappingURL=compute_weighted_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the absolute difference loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction absoluteDifference_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'absoluteDifference');\n const $predictions = convertToTensor(predictions, 'predictions', 'absoluteDifference');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'absoluteDifference');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in absoluteDifference: ');\n const losses = abs(sub($labels, $predictions));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const absoluteDifference = op({ absoluteDifference_ });\n//# sourceMappingURL=absolute_difference.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the cosine distance loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param axis The dimension along which the cosine distance is computed.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction cosineDistance_(labels, predictions, axis, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'cosineDistance');\n const $predictions = convertToTensor(predictions, 'predictions', 'cosineDistance');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'cosineDistance');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in cosineDistance: ');\n const one = scalar(1);\n const losses = sub(one, sum(mul($labels, $predictions), axis, true));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const cosineDistance = op({ cosineDistance_ });\n//# sourceMappingURL=cosine_distance.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the Hinge loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction hingeLoss_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $labels = convertToTensor(labels, 'labels', 'hingeLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'hingeLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'hingeLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in hingeLoss: ');\n const one = scalar(1);\n // Convert binary labels to (-1, 1)\n $labels = sub(mul(scalar(2), $labels), one);\n const losses = relu(sub(one, mul($labels, $predictions)));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const hingeLoss = op({ hingeLoss_ });\n//# sourceMappingURL=hinge_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { Reduction } from '../loss_ops_utils';\nimport { minimum } from '../minimum';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { square } from '../square';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the huber loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param delta Point where huber loss changes from quadratic to linear.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`.\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction huberLoss_(labels, predictions, weights, delta = 1.0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'huberLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'huberLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'huberLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in huberLoss: ');\n const deltaScalar = scalar(delta);\n const error = abs(sub($predictions, $labels));\n const quadratic = minimum(error, deltaScalar);\n const linear = sub(error, quadratic);\n const losses = add(mul(scalar(0.5), square(quadratic)), mul(deltaScalar, linear));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const huberLoss = op({ huberLoss_ });\n//# sourceMappingURL=huber_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { log } from '../log';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the log loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param epsilon A small increment to avoid taking log of zero\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction logLoss_(labels, predictions, weights, epsilon = 1e-7, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'logLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'logLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'logLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in logLoss: ');\n const one = scalar(1);\n const epsilonScalar = scalar(epsilon);\n const l1 = neg(mul($labels, log(add($predictions, epsilonScalar))));\n const l2 = mul(sub(one, $labels), log(add(sub(one, $predictions), epsilonScalar)));\n const losses = sub(l1, l2);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const logLoss = op({ logLoss_ });\n//# sourceMappingURL=log_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { squaredDifference } from '../squared_difference';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the mean squared error between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction meanSquaredError_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'meanSquaredError');\n const $predictions = convertToTensor(predictions, 'predictions', 'meanSquaredError');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'meanSquaredError');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in meanSquaredError: ');\n const losses = squaredDifference($labels, $predictions);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const meanSquaredError = op({ meanSquaredError_ });\n//# sourceMappingURL=mean_squared_error.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { exp } from '../exp';\nimport { log1p } from '../log1p';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\nfunction sigmoidCrossEntropyWithLogits_(labels, logits) {\n const $labels = convertToTensor(labels, 'labels', 'sigmoidCrossEntropyWithLogits');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropyWithLogits');\n assertShapesMatch($labels.shape, $logits.shape, 'Error in sigmoidCrossEntropyWithLogits: ');\n /**\n * Implementation Details:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n *\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n *\n * Hence, to ensure stability and avoid overflow, the implementation uses\n * this equivalent formulation:\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n */\n const maxOutput = relu($logits);\n const outputXTarget = mul($logits, $labels);\n const sigmoidOutput = log1p(exp(neg(abs($logits))));\n return add(sub(maxOutput, outputXTarget), sigmoidOutput);\n}\n/**\n * Computes the sigmoid cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newMulticlassLabels = multiclassLabels * (1 - labelSmoothing)\n * + 0.5 * labelSmoothing\n *\n * @param multiClassLabels The ground truth output tensor of shape\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction sigmoidCrossEntropy_(multiClassLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $multiClassLabels = convertToTensor(multiClassLabels, 'multiClassLabels', 'sigmoidCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'sigmoidCrossEntropy');\n }\n assertShapesMatch($multiClassLabels.shape, $logits.shape, 'Error in sigmoidCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const half = scalar(0.5);\n $multiClassLabels =\n add(mul($multiClassLabels, sub(one, labelSmoothingScalar)), mul(half, labelSmoothingScalar));\n }\n const losses = sigmoidCrossEntropyWithLogits_($multiClassLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const sigmoidCrossEntropy = op({ sigmoidCrossEntropy_ });\n//# sourceMappingURL=sigmoid_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../../gradients';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { expandShapeToKeepDim } from '../axis_util';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { exp } from '../exp';\nimport { logSumExp } from '../log_sum_exp';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes softmax cross entropy between logits and labels.\n *\n * Measures the probability error in discrete classification tasks in which\n * the classes are mutually exclusive (each entry is in exactly one class).\n * For example, each CIFAR-10 image is labeled with one and only one label: an\n * image can be a dog or a truck, but not both.\n *\n * `NOTE`: While the classes are mutually exclusive, their probabilities need\n * not be. All that is required is that each row of labels is a valid\n * probability distribution. If they are not, the computation of the gradient\n * will be incorrect.\n *\n * `WARNING`: This op expects unscaled logits, since it performs a softmax on\n * logits internally for efficiency. Do not call this op with the output of\n * softmax, as it will produce incorrect results.\n *\n * logits and labels must have the same shape, e.g. [batch_size, num_classes]\n * and the same dtype.\n * @param labels The labels array.\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n */\nfunction softmaxCrossEntropyWithLogits_(labels, logits, dim = -1) {\n if (dim === -1) {\n dim = logits.rank - 1;\n }\n if (dim !== logits.rank - 1) {\n throw Error(`Softmax cross entropy along a non-last dimension is not yet ` +\n `supported. Labels / logits was rank ${logits.rank} ` +\n `and dim was ${dim}`);\n }\n // Use a custom gradient for numerical stability.\n const customOp = customGrad((labels, logits, save) => {\n // Reference:\n // 1. http://cs231n.github.io/linear-classify/#softmax\n // 2. https://blog.feedly.com/tricks-of-the-trade-logsumexp/\n const keepDims = true;\n const lse = logSumExp(logits, [dim], keepDims);\n const logResult = sub(cast(logits, 'float32'), lse);\n save([labels, logResult]);\n const costVector = neg(mul(logResult, labels));\n const value = sum(costVector, [dim]);\n const gradFunc = (dy, saved) => {\n const [labels, logResult] = saved;\n const dyShape = expandShapeToKeepDim(dy.shape, [dim]);\n return [\n mul(reshape(dy, dyShape), sub(cast(labels, 'float32'), exp(logResult))),\n mul(reshape(dy, dyShape), sub(exp(logResult), cast(labels, 'float32'))),\n ];\n };\n return { value, gradFunc };\n });\n return customOp(labels, logits);\n}\n/**\n * Computes the softmax cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newOnehotLabels = onehotLabels * (1 - labelSmoothing)\n * + labelSmoothing / numClasses\n *\n * @param onehotLabels One hot encoded labels\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or 1, and must be\n * broadcastable to `loss` of shape [batch_size]\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction softmaxCrossEntropy_(onehotLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $onehotLabels = convertToTensor(onehotLabels, 'onehotLabels', 'softmaxCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'softmaxCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'softmaxCrossEntropy');\n }\n assertShapesMatch($onehotLabels.shape, $logits.shape, 'Error in softmaxCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const numClasses = scalar($onehotLabels.shape[1]);\n $onehotLabels =\n add(mul($onehotLabels, sub(one, labelSmoothingScalar)), div(labelSmoothingScalar, numClasses));\n }\n const losses = softmaxCrossEntropyWithLogits_($onehotLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const softmaxCrossEntropy = op({ softmaxCrossEntropy_ });\n//# sourceMappingURL=softmax_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Modularized ops.\nexport { abs } from './abs';\nexport { acos } from './acos';\nexport { acosh } from './acosh';\nexport { add } from './add';\nexport { addN } from './add_n';\nexport { all } from './all';\nexport { any } from './any';\nexport { argMax } from './arg_max';\nexport { argMin } from './arg_min';\nexport { asin } from './asin';\nexport { asinh } from './asinh';\nexport { atan } from './atan';\nexport { atan2 } from './atan2';\nexport { atanh } from './atanh';\nexport { avgPool } from './avg_pool';\nexport { avgPool3d } from './avg_pool_3d';\nexport { basicLSTMCell } from './basic_lstm_cell';\nexport { batchToSpaceND } from './batch_to_space_nd';\nexport { batchNorm } from './batchnorm';\nexport { batchNorm2d } from './batchnorm2d';\nexport { batchNorm3d } from './batchnorm3d';\nexport { batchNorm4d } from './batchnorm4d';\nexport { broadcastTo } from './broadcast_to';\nexport { buffer } from './buffer';\nexport { cast } from './cast';\nexport { ceil } from './ceil';\nexport { clipByValue } from './clip_by_value';\nexport { clone } from './clone';\nexport { complex } from './complex';\nexport { concat } from './concat';\nexport { concat1d } from './concat_1d';\nexport { concat2d } from './concat_2d';\nexport { concat3d } from './concat_3d';\nexport { concat4d } from './concat_4d';\nexport { conv1d } from './conv1d';\nexport { conv2d } from './conv2d';\nexport { conv2dTranspose } from './conv2d_transpose';\nexport { conv3d } from './conv3d';\nexport { conv3dTranspose } from './conv3d_transpose';\nexport { cos } from './cos';\nexport { cosh } from './cosh';\nexport { cumsum } from './cumsum';\nexport { depthToSpace } from './depth_to_space';\nexport { depthwiseConv2d } from './depthwise_conv2d';\nexport { diag } from './diag';\nexport { dilation2d } from './dilation2d';\nexport { div } from './div';\nexport { divNoNan } from './div_no_nan';\nexport { dot } from './dot';\nexport { elu } from './elu';\nexport { equal } from './equal';\nexport { erf } from './erf';\nexport { exp } from './exp';\nexport { expandDims } from './expand_dims';\nexport { expm1 } from './expm1';\nexport { eye } from './eye';\nexport { fill } from './fill';\nexport { floor } from './floor';\nexport { floorDiv } from './floorDiv';\nexport { gather } from './gather';\nexport { greater } from './greater';\nexport { greaterEqual } from './greater_equal';\nexport { imag } from './imag';\nexport { isFinite } from './is_finite';\nexport { isInf } from './is_inf';\nexport { isNaN } from './is_nan';\nexport { leakyRelu } from './leaky_relu';\nexport { less } from './less';\nexport { lessEqual } from './less_equal';\nexport { linspace } from './linspace';\nexport { localResponseNormalization } from './local_response_normalization';\nexport { log } from './log';\nexport { log1p } from './log1p';\nexport { logSigmoid } from './log_sigmoid';\nexport { logSoftmax } from './log_softmax';\nexport { logSumExp } from './log_sum_exp';\nexport { logicalAnd } from './logical_and';\nexport { logicalNot } from './logical_not';\nexport { logicalOr } from './logical_or';\nexport { logicalXor } from './logical_xor';\nexport { matMul } from './mat_mul';\nexport { max } from './max';\nexport { maxPool } from './max_pool';\nexport { maxPool3d } from './max_pool_3d';\nexport { maxPoolWithArgmax } from './max_pool_with_argmax';\nexport { maximum } from './maximum';\nexport { mean } from './mean';\nexport { min } from './min';\nexport { minimum } from './minimum';\nexport { mirrorPad } from './mirror_pad';\nexport { mod } from './mod';\nexport { moments } from './moments';\nexport { mul } from './mul';\nexport { multiRNNCell } from './multi_rnn_cell';\nexport { multinomial } from './multinomial';\nexport { neg } from './neg';\nexport { notEqual } from './not_equal';\nexport { oneHot } from './one_hot';\nexport { ones } from './ones';\nexport { onesLike } from './ones_like';\nexport { outerProduct } from './outer_product';\nexport { pad } from './pad';\nexport { pad1d } from './pad1d';\nexport { pad2d } from './pad2d';\nexport { pad3d } from './pad3d';\nexport { pad4d } from './pad4d';\nexport { pool } from './pool';\nexport { pow } from './pow';\nexport { prelu } from './prelu';\nexport { print } from './print';\nexport { prod } from './prod';\nexport { rand } from './rand';\nexport { randomGamma } from './random_gamma';\nexport { randomNormal } from './random_normal';\nexport { randomUniform } from './random_uniform';\nexport { range } from './range';\nexport { real } from './real';\nexport { reciprocal } from './reciprocal';\nexport { relu } from './relu';\nexport { relu6 } from './relu6';\nexport { reshape } from './reshape';\nexport { reverse } from './reverse';\nexport { reverse1d } from './reverse_1d';\nexport { reverse2d } from './reverse_2d';\nexport { reverse3d } from './reverse_3d';\nexport { reverse4d } from './reverse_4d';\nexport { round } from './round';\nexport { rsqrt } from './rsqrt';\nexport { scalar } from './scalar';\nexport { selu } from './selu';\nexport { separableConv2d } from './separable_conv2d';\nexport { setdiff1dAsync } from './setdiff1d_async';\nexport { sigmoid } from './sigmoid';\nexport { sign } from './sign';\nexport { sin } from './sin';\nexport { sinh } from './sinh';\nexport { slice } from './slice';\nexport { slice1d } from './slice1d';\nexport { slice2d } from './slice2d';\nexport { slice3d } from './slice3d';\nexport { slice4d } from './slice4d';\nexport { softmax } from './softmax';\nexport { softplus } from './softplus';\nexport { spaceToBatchND } from './space_to_batch_nd';\nexport { fft } from './spectral/fft';\nexport { ifft } from './spectral/ifft';\nexport { irfft } from './spectral/irfft';\nexport { rfft } from './spectral/rfft';\nexport { split } from './split';\nexport { sqrt } from './sqrt';\nexport { square } from './square';\nexport { squaredDifference } from './squared_difference';\nexport { squeeze } from './squeeze';\nexport { stack } from './stack';\nexport { step } from './step';\nexport { stridedSlice } from './strided_slice';\nexport { sub } from './sub';\nexport { sum } from './sum';\nexport { tan } from './tan';\nexport { tanh } from './tanh';\nexport { tensor } from './tensor';\nexport { tensor1d } from './tensor1d';\nexport { tensor2d } from './tensor2d';\nexport { tensor3d } from './tensor3d';\nexport { tensor4d } from './tensor4d';\nexport { tensor5d } from './tensor5d';\nexport { tensor6d } from './tensor6d';\nexport { tile } from './tile';\nexport { topk } from './topk';\nexport { truncatedNormal } from './truncated_normal';\nexport { unique } from './unique';\nexport { unsortedSegmentSum } from './unsorted_segment_sum';\nexport { unstack } from './unstack';\nexport { variable } from './variable';\nexport { where } from './where';\nexport { whereAsync } from './where_async';\nexport { zeros } from './zeros';\nexport { zerosLike } from './zeros_like';\nexport * from './boolean_mask';\nexport * from './compare';\nexport * from './binary_ops';\nexport * from './transpose';\nexport * from './norm';\nexport * from './moving_average';\nexport * from './scatter_nd';\nexport * from './sparse_to_dense';\nexport * from './gather_nd';\nexport * from './dropout';\nexport * from './signal_ops_util';\nexport * from './in_top_k';\nexport { op, OP_SCOPE_SUFFIX } from './operation';\nimport { rfft } from './spectral/rfft';\nimport { fft } from './spectral/fft';\nimport { ifft } from './spectral/ifft';\nimport { irfft } from './spectral/irfft';\nconst spectral = {\n fft,\n ifft,\n rfft,\n irfft\n};\nimport * as fused from './fused_ops';\nimport { hammingWindow } from './signal/hamming_window';\nimport { hannWindow } from './signal/hann_window';\nimport { frame } from './signal/frame';\nimport { stft } from './signal/stft';\nconst signal = {\n hammingWindow,\n hannWindow,\n frame,\n stft,\n};\n// Image Ops namespace\nimport { cropAndResize } from './image/crop_and_resize';\nimport { flipLeftRight } from './image/flip_left_right';\nimport { rotateWithOffset } from './image/rotate_with_offset';\nimport { nonMaxSuppression } from './image/non_max_suppression';\nimport { nonMaxSuppressionAsync } from './image/non_max_suppression_async';\nimport { nonMaxSuppressionWithScore } from './image/non_max_suppression_with_score';\nimport { nonMaxSuppressionWithScoreAsync } from './image/non_max_suppression_with_score_async';\nimport { nonMaxSuppressionPadded } from './image/non_max_suppression_padded';\nimport { nonMaxSuppressionPaddedAsync } from './image/non_max_suppression_padded_async';\nimport { resizeBilinear } from './image/resize_bilinear';\nimport { resizeNearestNeighbor } from './image/resize_nearest_neighbor';\nconst image = {\n flipLeftRight,\n resizeNearestNeighbor,\n resizeBilinear,\n rotateWithOffset,\n cropAndResize,\n nonMaxSuppression,\n nonMaxSuppressionAsync,\n nonMaxSuppressionWithScore,\n nonMaxSuppressionWithScoreAsync,\n nonMaxSuppressionPadded,\n nonMaxSuppressionPaddedAsync\n};\n// linalg namespace\nimport { bandPart } from './linalg/band_part';\nimport { gramSchmidt } from './linalg/gram_schmidt';\nimport { qr } from './linalg/qr';\nconst linalg = {\n bandPart,\n gramSchmidt,\n qr\n};\n// losses namespace;\nimport { absoluteDifference } from './losses/absolute_difference';\nimport { computeWeightedLoss } from './losses/compute_weighted_loss';\nimport { cosineDistance } from './losses/cosine_distance';\nimport { hingeLoss } from './losses/hinge_loss';\nimport { huberLoss } from './losses/huber_loss';\nimport { logLoss } from './losses/log_loss';\nimport { meanSquaredError } from './losses/mean_squared_error';\nimport { sigmoidCrossEntropy } from './losses/sigmoid_cross_entropy';\nimport { softmaxCrossEntropy } from './losses/softmax_cross_entropy';\nconst losses = {\n absoluteDifference,\n computeWeightedLoss,\n cosineDistance,\n hingeLoss,\n huberLoss,\n logLoss,\n meanSquaredError,\n sigmoidCrossEntropy,\n softmaxCrossEntropy\n};\n// Second level exports.\nexport { image, linalg, losses, spectral, fused, signal };\n//# sourceMappingURL=ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dispose } from '../globals';\nimport { variableGrads } from '../gradients';\nimport { scalar } from '../ops/ops';\nimport { Serializable } from '../serialization';\n/** @doc {heading: 'Training', subheading: 'Classes', namespace: 'train'} */\nexport class Optimizer extends Serializable {\n /**\n * Executes `f()` and minimizes the scalar output of `f()` by computing\n * gradients of y with respect to the list of trainable variables provided by\n * `varList`. If no list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to minimize.\n * @param returnCost Whether to return the scalar cost value produced by\n * executing `f()`.\n * @param varList An optional list of variables to update. If specified, only\n * the trainable variables in varList will be updated by minimize. Defaults to\n * all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n minimize(f, returnCost = false, varList) {\n const { value, grads } = this.computeGradients(f, varList);\n if (varList != null) {\n const gradArray = varList.map(v => ({ name: v.name, tensor: grads[v.name] }));\n this.applyGradients(gradArray);\n }\n else {\n this.applyGradients(grads);\n }\n // Dispose gradients.\n dispose(grads);\n if (returnCost) {\n return value;\n }\n else {\n value.dispose();\n return null;\n }\n }\n /**\n * The number of iterations that this optimizer instance has been invoked for.\n */\n get iterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return this.iterations_;\n }\n incrementIterations() {\n this.iterations_ = this.iterations + 1;\n }\n /**\n * Executes f() and computes the gradient of the scalar output of f() with\n * respect to the list of trainable variables provided by `varList`. If no\n * list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to use for computing\n * gradients with respect to variables.\n * @param varList An optional list of variables to compute gradients with\n * respect to. If specified, only the trainable variables in varList will have\n * gradients computed with respect to. Defaults to all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n computeGradients(f, varList) {\n return variableGrads(f, varList);\n }\n /**\n * Dispose the variables (if any) owned by this optimizer instance.\n */\n dispose() {\n if (this.iterations_ != null) {\n dispose(this.iterations_);\n }\n }\n async saveIterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return {\n name: 'iter',\n // TODO(cais): Use 'int64' type when available.\n tensor: scalar(this.iterations_, 'int32')\n };\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for this optimizer yet.');\n }\n async setWeights(weightValues) {\n throw new Error(`setWeights() is not implemented for this optimizer class ` +\n `${this.getClassName()}`);\n }\n /**\n * Extract the first element of the weight values and set it\n * as the iterations counter variable of this instance of optimizer.\n *\n * @param weightValues\n * @returns Weight values with the first element consumed and excluded.\n */\n async extractIterations(weightValues) {\n this.iterations_ = (await weightValues[0].tensor.data())[0];\n return weightValues.slice(1);\n }\n}\nObject.defineProperty(Optimizer, Symbol.hasInstance, {\n value: (instance) => {\n return instance.minimize != null && instance.computeGradients != null &&\n instance.applyGradients != null;\n }\n});\n//# sourceMappingURL=optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/ops';\nimport { square } from '../ops/square';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdadeltaOptimizer extends Optimizer {\n constructor(learningRate, rho, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.rho = rho;\n this.epsilon = epsilon;\n this.accumulatedGrads = [];\n this.accumulatedUpdates = [];\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedGrads[i] == null) {\n this.accumulatedGrads[i] = {\n originalName: `${name}/accum_grad`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedUpdates[i] == null) {\n this.accumulatedUpdates[i] = {\n originalName: `${name}/accum_var`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n const accumulatedUpdate = this.accumulatedUpdates[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(mul(accumulatedGrad, this.rho), mul(square(gradient), 1 - this.rho));\n const updates = mul(div(sqrt(add(accumulatedUpdate, this.epsilon)), sqrt(add(accumulatedGrad, this.epsilon))), gradient);\n const newAccumulatedUpdate = add(mul(accumulatedUpdate, this.rho), mul(square(updates), 1 - this.rho));\n accumulatedGrad.assign(newAccumulatedGrad);\n accumulatedUpdate.assign(newAccumulatedUpdate);\n const newValue = add(mul(updates, -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedUpdates != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n dispose(this.accumulatedUpdates.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedGrads, ...this.accumulatedUpdates];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedGrads =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedUpdates =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'rho': this.rho,\n 'epsilon': this.epsilon\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['rho'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdadeltaOptimizer.className = 'Adadelta'; // Name matters for Python compatibility.\nregisterClass(AdadeltaOptimizer);\n//# sourceMappingURL=adadelta_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { fill } from '../ops/fill';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdagradOptimizer extends Optimizer {\n constructor(learningRate, initialAccumulatorValue = 0.1) {\n super();\n this.learningRate = learningRate;\n this.initialAccumulatorValue = initialAccumulatorValue;\n this.accumulatedGrads = [];\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulatedGrads[i] == null) {\n const trainable = false;\n this.accumulatedGrads[i] = {\n originalName: `${name}/accumulator`,\n variable: tidy(() => fill(value.shape, this.initialAccumulatorValue)\n .variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(accumulatedGrad, square(gradient));\n accumulatedGrad.assign(newAccumulatedGrad);\n const newValue = add(mul(div(gradient, sqrt(add(newAccumulatedGrad, ENGINE.backend.epsilon()))), -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedGrads != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulatedGrads.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulatedGrads = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'initialAccumulatorValue': this.initialAccumulatorValue,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['initialAccumulatorValue']);\n }\n}\n/** @nocollapse */\nAdagradOptimizer.className = 'Adagrad'; // Note: Name matters for Python compatibility.\nregisterClass(AdagradOptimizer);\n//# sourceMappingURL=adagrad_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.accumulatedFirstMoment = [];\n this.accumulatedSecondMoment = [];\n tidy(() => {\n // accB* will be updated by batch.\n this.accBeta1 = scalar(beta1).variable();\n this.accBeta2 = scalar(beta2).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const oneMinusAccBeta2 = sub(1, this.accBeta2);\n varNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedSecondMoment[i] == null) {\n this.accumulatedSecondMoment[i] = {\n originalName: `${name}/v`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const secondMoment = this.accumulatedSecondMoment[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const newSecondMoment = add(mul(secondMoment, this.beta2), mul(square(gradient), 1 - this.beta2));\n const biasCorrectedFirstMoment = div(newFirstMoment, oneMinusAccBeta1);\n const biasCorrectedSecondMoment = div(newSecondMoment, oneMinusAccBeta2);\n firstMoment.assign(newFirstMoment);\n secondMoment.assign(newSecondMoment);\n const newValue = add(mul(div(biasCorrectedFirstMoment, add(sqrt(biasCorrectedSecondMoment), this.epsilon)), -this.learningRate), value);\n value.assign(newValue);\n });\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n this.accBeta2.assign(mul(this.accBeta2, this.beta2));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.accBeta2.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedSecondMoment != null) {\n dispose(this.accumulatedSecondMoment.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedFirstMoment, ...this.accumulatedSecondMoment];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n tidy(() => {\n this.accBeta1.assign(pow(this.beta1, this.iterations_ + 1));\n this.accBeta2.assign(pow(this.beta2, this.iterations_ + 1));\n });\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedFirstMoment =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedSecondMoment =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdamOptimizer.className = 'Adam'; // Note: Name matters for Python compatibility.\nregisterClass(AdamOptimizer);\n//# sourceMappingURL=adam_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { abs } from '../ops/abs';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { maximum } from '../ops/maximum';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamaxOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null, decay = 0.0) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.decay = decay;\n this.accumulatedFirstMoment = [];\n this.accumulatedWeightedInfNorm = [];\n tidy(() => {\n this.iteration = scalar(0).variable();\n this.accBeta1 = scalar(beta1).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const lr = div(-this.learningRate, add(mul(this.iteration, this.decay), 1));\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n if (this.accumulatedWeightedInfNorm[i] == null) {\n this.accumulatedWeightedInfNorm[i] = {\n originalName: `${name}/v`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const weightedInfNorm = this.accumulatedWeightedInfNorm[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const ut0 = mul(weightedInfNorm, this.beta2);\n const ut1 = abs(gradient);\n const newWeightedInfNorm = maximum(ut0, ut1);\n firstMoment.assign(newFirstMoment);\n weightedInfNorm.assign(newWeightedInfNorm);\n const newValue = add(mul(div(lr, oneMinusAccBeta1), div(newFirstMoment, add(newWeightedInfNorm, this.epsilon))), value);\n value.assign(newValue);\n });\n this.iteration.assign(add(this.iteration, 1));\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.iteration.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedWeightedInfNorm != null) {\n dispose(this.accumulatedWeightedInfNorm.map(v => v.variable));\n }\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for Adamax yet.');\n }\n async setWeights(weightValues) {\n throw new Error('setWeights() is not implemented for Adamax yet.');\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n 'decay': this.decay\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon'], config['decay']);\n }\n}\n/** @nocollapse */\nAdamaxOptimizer.className = 'Adamax'; // Note: Name matters for Python compatbility.\nregisterClass(AdamaxOptimizer);\n//# sourceMappingURL=adamax_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { keep, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class SGDOptimizer extends Optimizer {\n constructor(learningRate) {\n super();\n this.learningRate = learningRate;\n this.setLearningRate(learningRate);\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n varNames.forEach((name, i) => {\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const value = ENGINE.registeredVariables[name];\n tidy(() => {\n const newValue = add(mul(this.c, gradient), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n /**\n * Sets the learning rate of the optimizer.\n */\n setLearningRate(learningRate) {\n this.learningRate = learningRate;\n if (this.c != null) {\n this.c.dispose();\n }\n this.c = keep(scalar(-learningRate));\n }\n dispose() {\n this.c.dispose();\n }\n async getWeights() {\n return [await this.saveIterations()];\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n if (weightValues.length !== 0) {\n throw new Error('SGD optimizer does not have settable weights.');\n }\n }\n getConfig() {\n return { 'learningRate': this.learningRate };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate']);\n }\n}\n/** @nocollapse */\nSGDOptimizer.className = 'SGD'; // Note: Name matters for Python compatibility.\nregisterClass(SGDOptimizer);\n//# sourceMappingURL=sgd_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { SGDOptimizer } from './sgd_optimizer';\n/** @doclink Optimizer */\nexport class MomentumOptimizer extends SGDOptimizer {\n constructor(learningRate, momentum, useNesterov = false) {\n super(learningRate);\n this.learningRate = learningRate;\n this.momentum = momentum;\n this.useNesterov = useNesterov;\n this.accumulations = [];\n this.m = scalar(this.momentum);\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulations[i] == null) {\n const trainable = false;\n this.accumulations[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const accumulation = this.accumulations[i].variable;\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n tidy(() => {\n let newValue;\n const newAccumulation = add(mul(this.m, accumulation), gradient);\n if (this.useNesterov) {\n newValue = add(mul(this.c, add(gradient, mul(newAccumulation, this.m))), value);\n }\n else {\n newValue = add(mul(this.c, newAccumulation), value);\n }\n accumulation.assign(newAccumulation);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n this.m.dispose();\n if (this.accumulations != null) {\n dispose(this.accumulations.map(v => v.variable));\n }\n }\n /**\n * Sets the momentum of the optimizer.\n *\n * @param momentum\n */\n setMomentum(momentum) {\n this.momentum = momentum;\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulations.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulations = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'momentum': this.momentum,\n 'useNesterov': this.useNesterov\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['momentum'], config['useNesterov']);\n }\n}\n/** @nocollapse */\nMomentumOptimizer.className = 'Momentum'; // Name matters for Python compatibility.\nregisterClass(MomentumOptimizer);\n//# sourceMappingURL=momentum_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class RMSPropOptimizer extends Optimizer {\n constructor(learningRate, decay = 0.9, momentum = 0.0, epsilon = null, centered = false) {\n super();\n this.learningRate = learningRate;\n this.decay = decay;\n this.momentum = momentum;\n this.epsilon = epsilon;\n this.accumulatedMeanSquares = [];\n this.accumulatedMoments = [];\n this.accumulatedMeanGrads = [];\n this.centered = centered;\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n if (learningRate == null) {\n throw new Error(`learningRate for RMSPropOptimizer must be defined.`);\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedMeanSquares[i] == null) {\n this.accumulatedMeanSquares[i] = {\n originalName: `${name}/rms`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMoments[i] == null) {\n this.accumulatedMoments[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMeanGrads[i] == null && this.centered) {\n this.accumulatedMeanGrads[i] = {\n originalName: `${name}/mg`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedMeanSquare = this.accumulatedMeanSquares[i].variable;\n const accumulatedMoments = this.accumulatedMoments[i].variable;\n tidy(() => {\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n if (this.centered) {\n const accumulatedMeanGrad = this.accumulatedMeanGrads[i].variable;\n // Centered gradient\n const newAccumulatedMeanGrad = add(mul(accumulatedMeanGrad, this.decay), mul(gradient, 1 - this.decay));\n const gradContribution = div(mul(gradient, this.learningRate), sqrt(sub(newAccumulatedMeanSquare, add(square(newAccumulatedMeanGrad), this.epsilon))));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), gradContribution);\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMeanGrad.assign(newAccumulatedMeanGrad);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n else {\n // Plain gradient\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), div(mul(gradient, this.learningRate), sqrt(add(newAccumulatedMeanSquare, this.epsilon))));\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedMeanSquares != null) {\n dispose(this.accumulatedMeanSquares.map(v => v.variable));\n }\n if (this.accumulatedMeanGrads != null && this.centered) {\n dispose(this.accumulatedMeanGrads.map(v => v.variable));\n }\n if (this.accumulatedMoments != null) {\n dispose(this.accumulatedMoments.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedMeanSquares, ...this.accumulatedMoments];\n if (this.centered) {\n variables.push(...this.accumulatedMeanGrads);\n }\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = this.centered ? weightValues.length / 3 : weightValues.length / 2;\n const trainable = false;\n this.accumulatedMeanSquares =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedMoments =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n if (this.centered) {\n this.accumulatedMeanGrads =\n weightValues.slice(variableCount * 2, variableCount * 3)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'decay': this.decay,\n 'momentum': this.momentum,\n 'epsilon': this.epsilon,\n 'centered': this.centered\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['decay'], config['momentum'], config['epsilon'], config['centered']);\n }\n}\n/** @nocollapse */\nRMSPropOptimizer.className = 'RMSProp'; // Note: Name matters for Python compatibility.\nregisterClass(RMSPropOptimizer);\n//# sourceMappingURL=rmsprop_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AdadeltaOptimizer } from './adadelta_optimizer';\nimport { AdagradOptimizer } from './adagrad_optimizer';\nimport { AdamOptimizer } from './adam_optimizer';\nimport { AdamaxOptimizer } from './adamax_optimizer';\nimport { MomentumOptimizer } from './momentum_optimizer';\nimport { RMSPropOptimizer } from './rmsprop_optimizer';\nimport { SGDOptimizer } from './sgd_optimizer';\nexport class OptimizerConstructors {\n /**\n * Constructs a `tf.SGDOptimizer` that uses stochastic gradient descent.\n *\n * ```js\n * // Fit a quadratic function by learning the coefficients a, b, c.\n * const xs = tf.tensor1d([0, 1, 2, 3]);\n * const ys = tf.tensor1d([1.1, 5.9, 16.8, 33.9]);\n *\n * const a = tf.scalar(Math.random()).variable();\n * const b = tf.scalar(Math.random()).variable();\n * const c = tf.scalar(Math.random()).variable();\n *\n * // y = a * x^2 + b * x + c.\n * const f = x => a.mul(x.square()).add(b.mul(x)).add(c);\n * const loss = (pred, label) => pred.sub(label).square().mean();\n *\n * const learningRate = 0.01;\n * const optimizer = tf.train.sgd(learningRate);\n *\n * // Train the model.\n * for (let i = 0; i < 10; i++) {\n * optimizer.minimize(() => loss(f(xs), ys));\n * }\n *\n * // Make predictions.\n * console.log(\n * `a: ${a.dataSync()}, b: ${b.dataSync()}, c: ${c.dataSync()}`);\n * const preds = f(xs).dataSync();\n * preds.forEach((pred, i) => {\n * console.log(`x: ${i}, pred: ${pred}`);\n * });\n * ```\n *\n * @param learningRate The learning rate to use for the SGD algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static sgd(learningRate) {\n return new SGDOptimizer(learningRate);\n }\n /**\n * Constructs a `tf.MomentumOptimizer` that uses momentum gradient\n * descent.\n *\n * See\n * [http://proceedings.mlr.press/v28/sutskever13.pdf](\n * http://proceedings.mlr.press/v28/sutskever13.pdf)\n *\n * @param learningRate The learning rate to use for the Momentum gradient\n * descent algorithm.\n * @param momentum The momentum to use for the momentum gradient descent\n * algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static momentum(learningRate, momentum, useNesterov = false) {\n return new MomentumOptimizer(learningRate, momentum, useNesterov);\n }\n /**\n * Constructs a `tf.RMSPropOptimizer` that uses RMSProp gradient\n * descent. This implementation uses plain momentum and is not centered\n * version of RMSProp.\n *\n * See\n * [http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf](\n * http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)\n *\n * @param learningRate The learning rate to use for the RMSProp gradient\n * descent algorithm.\n * @param decay The discounting factor for the history/coming gradient.\n * @param momentum The momentum to use for the RMSProp gradient descent\n * algorithm.\n * @param epsilon Small value to avoid zero denominator.\n * @param centered If true, gradients are normalized by the estimated\n * variance of the gradient.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static rmsprop(learningRate, decay = .9, momentum = 0.0, epsilon = null, centered = false) {\n return new RMSPropOptimizer(learningRate, decay, momentum, epsilon, centered);\n }\n /**\n * Constructs a `tf.AdamOptimizer` that uses the Adam algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adam gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adam(learningRate = 0.001, beta1 = 0.9, beta2 = 0.999, epsilon = null) {\n return new AdamOptimizer(learningRate, beta1, beta2, epsilon);\n }\n /**\n * Constructs a `tf.AdadeltaOptimizer` that uses the Adadelta algorithm.\n * See [https://arxiv.org/abs/1212.5701](https://arxiv.org/abs/1212.5701)\n *\n * @param learningRate The learning rate to use for the Adadelta gradient\n * descent algorithm.\n * @param rho The learning rate decay over each update.\n * @param epsilon A constant epsilon used to better condition the grad\n * update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adadelta(learningRate = .001, rho = .95, epsilon = null) {\n return new AdadeltaOptimizer(learningRate, rho, epsilon);\n }\n /**\n * Constructs a `tf.AdamaxOptimizer` that uses the Adamax algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adamax gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n * @param decay The learning rate decay over each update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adamax(learningRate = 0.002, beta1 = 0.9, beta2 = 0.999, epsilon = null, decay = 0.0) {\n return new AdamaxOptimizer(learningRate, beta1, beta2, epsilon, decay);\n }\n /**\n * Constructs a `tf.AdagradOptimizer` that uses the Adagrad algorithm.\n * See\n * [http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf](\n * http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)\n * or\n * [http://ruder.io/optimizing-gradient-descent/index.html#adagrad](\n * http://ruder.io/optimizing-gradient-descent/index.html#adagrad)\n *\n * @param learningRate The learning rate to use for the Adagrad gradient\n * descent algorithm.\n * @param initialAccumulatorValue Starting value for the accumulators, must be\n * positive.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adagrad(learningRate, initialAccumulatorValue = 0.1) {\n return new AdagradOptimizer(learningRate, initialAccumulatorValue);\n }\n}\n//# sourceMappingURL=optimizer_constructors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// So typings can propagate.\nimport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nimport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nimport { AdamOptimizer } from './optimizers/adam_optimizer';\nimport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nimport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nimport { OptimizerConstructors } from './optimizers/optimizer_constructors';\nimport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nimport { SGDOptimizer } from './optimizers/sgd_optimizer';\n// tslint:disable-next-line:no-unused-expression\n[MomentumOptimizer, SGDOptimizer, AdadeltaOptimizer, AdagradOptimizer,\n RMSPropOptimizer, AdamaxOptimizer, AdamOptimizer];\nexport const train = {\n sgd: OptimizerConstructors.sgd,\n momentum: OptimizerConstructors.momentum,\n adadelta: OptimizerConstructors.adadelta,\n adagrad: OptimizerConstructors.adagrad,\n rmsprop: OptimizerConstructors.rmsprop,\n adamax: OptimizerConstructors.adamax,\n adam: OptimizerConstructors.adam\n};\n//# sourceMappingURL=train.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst delayCallback = (() => {\n if (typeof requestAnimationFrame !== 'undefined') {\n return requestAnimationFrame;\n }\n else if (typeof setImmediate !== 'undefined') {\n return setImmediate;\n }\n return (f) => f(); // no delays\n})();\n/**\n * Returns a promise that resolve when a requestAnimationFrame has completed.\n *\n * On Node.js this uses setImmediate instead of requestAnimationFrame.\n *\n * This is simply a sugar method so that users can do the following:\n * `await tf.nextFrame();`\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nfunction nextFrame() {\n return new Promise(resolve => delayCallback(() => resolve()));\n}\nexport { nextFrame };\n//# sourceMappingURL=browser_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Returns the image center in pixels.\nexport function getImageCenter(center, imageHeight, imageWidth) {\n const centerX = imageWidth * (typeof center === 'number' ? center : center[0]);\n const centerY = imageHeight * (typeof center === 'number' ? center : center[1]);\n return [centerX, centerY];\n}\n//# sourceMappingURL=rotate_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Gets the new shape of the input Tensor after it's been reshaped\n * to:\n * [blockShape[0], ..., blockShape[M-1], batch / prod(blockShape),\n * inputShape[1], ..., inputShape[N-1]]\n *\n * See step 1: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshaped(inputShape, blockShape, prod, batchToSpace = true) {\n let reshaped = [];\n if (batchToSpace) {\n reshaped = reshaped.concat(blockShape.slice(0));\n reshaped.push(inputShape[0] / prod);\n reshaped = reshaped.concat(inputShape.slice(1));\n }\n else {\n reshaped = reshaped.concat(inputShape[0]);\n const spatialLength = blockShape.length;\n for (let i = 0; i < spatialLength; ++i) {\n reshaped =\n reshaped.concat([inputShape[i + 1] / blockShape[i], blockShape[i]]);\n }\n reshaped = reshaped.concat(inputShape.slice(spatialLength + 1));\n }\n return reshaped;\n}\n/**\n * Gets the permutation that will transpose the dimensions of the\n * reshaped tensor to shape:\n *\n * [batch / prod(block_shape),inputShape[1], blockShape[0], ...,\n * inputShape[M], blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * see step 2: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getPermuted(reshapedRank, blockShapeRank, batchToSpace = true) {\n const permuted = [];\n if (batchToSpace) {\n permuted.push(blockShapeRank);\n for (let i = blockShapeRank + 1; i < reshapedRank; ++i) {\n if (i <= 2 * blockShapeRank) {\n permuted.push(i);\n permuted.push(i - (blockShapeRank + 1));\n }\n else {\n permuted.push(i);\n }\n }\n }\n else {\n const permutedBeforeBatch = [];\n const permutedAfterBatch = [];\n for (let i = 1; i < reshapedRank; ++i) {\n if (i >= blockShapeRank * 2 + 1 || i % 2 === 1) {\n permutedAfterBatch.push(i);\n }\n else {\n permutedBeforeBatch.push(i);\n }\n }\n permuted.push(...permutedBeforeBatch);\n permuted.push(0);\n permuted.push(...permutedAfterBatch);\n }\n return permuted;\n}\n/**\n * Gets the shape of the reshaped and permuted input Tensor before any cropping\n * is applied. The new shape will be:\n *\n * [batch / prod(blockShape),inputShape[1] * blockShape[0], ...,\n * inputShape[M] * blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 3: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshapedPermuted(inputShape, blockShape, prod, batchToSpace = true) {\n const reshapedPermuted = [];\n if (batchToSpace) {\n reshapedPermuted.push(inputShape[0] / prod);\n }\n else {\n reshapedPermuted.push(inputShape[0] * prod);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n if (i <= blockShape.length) {\n if (batchToSpace) {\n reshapedPermuted.push(blockShape[i - 1] * inputShape[i]);\n }\n else {\n reshapedPermuted.push(inputShape[i] / blockShape[i - 1]);\n }\n }\n else {\n reshapedPermuted.push(inputShape[i]);\n }\n }\n return reshapedPermuted;\n}\n/**\n * Converts the crops argument into the beginning coordinates of a slice\n * operation.\n */\nexport function getSliceBeginCoords(crops, blockShape) {\n const sliceBeginCoords = [0];\n for (let i = 0; i < blockShape; ++i) {\n sliceBeginCoords.push(crops[i][0]);\n }\n return sliceBeginCoords;\n}\n/**\n * Converts the crops argument into the size of a slice operation. When\n * combined with getSliceBeginCoords this function allows the reshaped and\n * permuted Tensor to be cropped to its final output shape of:\n *\n * inputShape[1] * blockShape[0] - crops[0,0] - crops[0,1], ...,\n * inputShape[M] * blockShape[M-1] -crops[M-1,0] -\n * crops[M-1,1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 4: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getSliceSize(uncroppedShape, crops, blockShape) {\n const sliceSize = uncroppedShape.slice(0, 1);\n for (let i = 0; i < blockShape; ++i) {\n sliceSize.push(uncroppedShape[i + 1] - crops[i][0] - crops[i][1]);\n }\n return sliceSize;\n}\n//# sourceMappingURL=array_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const SELU_SCALEALPHA = 1.7580993408473768599402175208123;\nexport const SELU_SCALE = 1.0507009873554804934193349852946;\n//# sourceMappingURL=selu_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const ERF_P = 0.3275911;\nexport const ERF_A1 = 0.254829592;\nexport const ERF_A2 = -0.284496736;\nexport const ERF_A3 = 1.421413741;\nexport const ERF_A4 = -1.453152027;\nexport const ERF_A5 = 1.061405429;\n//# sourceMappingURL=erf_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nexport function warn(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.warn(...msg);\n }\n}\nexport function log(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.log(...msg);\n }\n}\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Merges real and imaginary Float32Arrays into a single complex Float32Array.\n *\n * The memory layout is interleaved as follows:\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n * complex: [r0, i0, r1, i1, r2, i2]\n *\n * This is the inverse of splitRealAndImagArrays.\n *\n * @param real The real values of the complex tensor values.\n * @param imag The imag values of the complex tensor values.\n * @returns A complex tensor as a Float32Array with merged values.\n */\nexport function mergeRealAndImagArrays(real, imag) {\n if (real.length !== imag.length) {\n throw new Error(`Cannot merge real and imag arrays of different lengths. real:` +\n `${real.length}, imag: ${imag.length}.`);\n }\n const result = new Float32Array(real.length * 2);\n for (let i = 0; i < result.length; i += 2) {\n result[i] = real[i / 2];\n result[i + 1] = imag[i / 2];\n }\n return result;\n}\n/**\n * Splits a complex Float32Array into real and imag parts.\n *\n * The memory layout is interleaved as follows:\n * complex: [r0, i0, r1, i1, r2, i2]\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n *\n * This is the inverse of mergeRealAndImagArrays.\n *\n * @param complex The complex tensor values.\n * @returns An object with real and imag Float32Array components of the complex\n * tensor.\n */\nexport function splitRealAndImagArrays(complex) {\n const real = new Float32Array(complex.length / 2);\n const imag = new Float32Array(complex.length / 2);\n for (let i = 0; i < complex.length; i += 2) {\n real[i / 2] = complex[i];\n imag[i / 2] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts even indexed complex values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithEvenIndex(complex) {\n const len = Math.ceil(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 0; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts odd indexed comple values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithOddIndex(complex) {\n const len = Math.floor(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 2; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Get the map representing a complex value in the given array.\n * @param complex The complex tensor values.\n * @param index An index of the target complex value.\n */\nexport function getComplexWithIndex(complex, index) {\n const real = complex[index * 2];\n const imag = complex[index * 2 + 1];\n return { real, imag };\n}\n/**\n * Insert a given complex value into the TypedArray.\n * @param data The array in which the complex value is inserted.\n * @param c The complex value to be inserted.\n * @param index An index of the target complex value.\n */\nexport function assignToTypedArray(data, real, imag, index) {\n data[index * 2] = real;\n data[index * 2 + 1] = imag;\n}\n/**\n * Make the list of exponent terms used by FFT.\n */\nexport function exponents(n, inverse) {\n const real = new Float32Array(n / 2);\n const imag = new Float32Array(n / 2);\n for (let i = 0; i < Math.ceil(n / 2); i++) {\n const x = (inverse ? 2 : -2) * Math.PI * (i / n);\n real[i] = Math.cos(x);\n imag[i] = Math.sin(x);\n }\n return { real, imag };\n}\n/**\n * Make the exponent term used by FFT.\n */\nexport function exponent(k, n, inverse) {\n const x = (inverse ? 2 : -2) * Math.PI * (k / n);\n const real = Math.cos(x);\n const imag = Math.sin(x);\n return { real, imag };\n}\n//# sourceMappingURL=complex_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { cast } from '../ops/cast';\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { zeros } from '../ops/zeros';\nimport { hasEncodingLoss, makeZerosTypedArray } from '../util';\n// Utilities needed by backend consumers of tf-core.\nexport * from '../ops/axis_util';\nexport * from '../ops/broadcast_util';\nexport * from '../ops/concat_util';\nexport * from '../ops/conv_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/reduce_util';\nimport * as slice_util from '../ops/slice_util';\nexport { slice_util };\nexport { upcastType } from '../types';\nexport * from '../ops/rotate_util';\nexport * from '../ops/array_ops_util';\nexport * from '../ops/gather_nd_util';\nexport * from '../ops/scatter_nd_util';\nexport * from '../ops/selu_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/erf_util';\nexport * from '../log';\nexport * from '../backends/complex_util';\nexport * from '../ops/split_util';\nimport * as segment_util from '../ops/segment_util';\nexport { segment_util };\nexport function castTensor(x, dtype, backend) {\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return x.clone();\n }\n const zerosTensor = zeros(x.shape);\n const floatX = cast(x, 'float32');\n const result = backend.complex(floatX, zerosTensor);\n zerosTensor.dispose();\n floatX.dispose();\n return result;\n }\n if (!hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n return ENGINE.makeTensorFromDataId(x.dataId, x.shape, dtype);\n }\n if (x.dtype === 'complex64') {\n const real = backend.real(x);\n const result = cast(real, dtype);\n real.dispose();\n return result;\n }\n if (dtype === 'int32') {\n return backend.int(x);\n }\n else if (dtype === 'bool') {\n const zero = scalar(0, x.dtype);\n const result = backend.notEqual(x, zero);\n zero.dispose();\n return result;\n }\n else {\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n }\n}\nexport function reshapeTensor(x, shape) {\n return ENGINE.makeTensorFromDataId(x.dataId, shape, x.dtype);\n}\nexport function linspaceImpl(start, stop, num) {\n const step = (stop - start) / (num - 1);\n const values = makeZerosTypedArray(num, 'float32');\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, 'float32');\n}\n//# sourceMappingURL=backend_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { slice } from '../ops/slice';\n// TODO(annxingyuan): Use this helper in WASM Split kernel once intermediate\n// kernels have been modularized in WebGL and CPU\n// https://github.com/tensorflow/tfjs/issues/2822.\n/** Shared implementation of the split kernel across WebGL and CPU. */\nexport function split(x, sizeSplits, axis) {\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n return sizeSplits.map(s => {\n const sliceSize = [...size];\n sliceSize[axis] = s;\n const sliceT = slice(x, begin, sliceSize);\n begin[axis] += s;\n return sliceT;\n });\n}\n//# sourceMappingURL=split_shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * An implementation of the tile kernel shared between webgl and cpu for string\n * tensors only.\n */\nimport { buffer } from '../ops/buffer';\nexport function tile(xBuf, reps) {\n const newShape = new Array(xBuf.rank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xBuf.shape[i] * reps[i];\n }\n const result = buffer(newShape, xBuf.dtype);\n for (let i = 0; i < result.values.length; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = new Array(xBuf.rank);\n for (let j = 0; j < originalLoc.length; j++) {\n originalLoc[j] = newLoc[j] % xBuf.shape[j];\n }\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n}\n//# sourceMappingURL=tile_impl.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the TopK kernel shared between webgl and cpu. */\nimport { tensor } from '../ops/tensor';\nimport { getTypedArrayFromDType } from '../util';\nexport function topkImpl(x, xShape, xDtype, k, sorted) {\n // Reshape into a 2d tensor [batch, lastDim] and compute topk along lastDim.\n const lastDim = xShape[xShape.length - 1];\n const [batch, size] = [x.length / lastDim, lastDim];\n const allTopKVals = getTypedArrayFromDType(xDtype, batch * k);\n const allTopKIndices = getTypedArrayFromDType('int32', batch * k);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = x.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n const outOffset = b * k;\n const topKVals = allTopKVals.subarray(outOffset, outOffset + k);\n const topKIndices = allTopKIndices.subarray(outOffset, outOffset + k);\n for (let i = 0; i < k; i++) {\n topKVals[i] = valAndInd[i].value;\n topKIndices[i] = valAndInd[i].index;\n }\n }\n // Reshape back to the original input shape, except that the last\n // dimension is k.\n const outputShape = xShape.slice();\n outputShape[outputShape.length - 1] = k;\n return [\n tensor(allTopKVals, outputShape, xDtype),\n tensor(allTopKIndices, outputShape, 'int32')\n ];\n}\n//# sourceMappingURL=topk_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { nonMaxSuppressionV3Impl, nonMaxSuppressionV4Impl, nonMaxSuppressionV5Impl } from './non_max_suppression_impl';\nexport { split } from './split_shared';\nexport { tile } from './tile_impl';\nexport { topkImpl } from './topk_impl';\nexport { whereImpl } from './where_impl';\n//# sourceMappingURL=kernel_impls.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is tfjs-core without auto registration of things like flags,\n// gradients, chained ops or the opHandler. See base_side_effects.ts for parts\n// tfjs core that are required side effects.\n/**\n * @fileoverview\n * @suppress {partialAlias} Optimization disabled due to passing the module\n * object into a function below:\n *\n * import * as ops from './ops/ops';\n * setOpHandler(ops);\n */\n// Serialization.\nimport * as io from './io/io';\nimport * as math from './math';\nimport * as browser from './ops/browser';\nimport * as gather_util from './ops/gather_nd_util';\nimport * as scatter_util from './ops/scatter_nd_util';\nimport * as slice_util from './ops/slice_util';\nimport * as serialization from './serialization';\nimport * as tensor_util from './tensor_util';\nimport * as test_util from './test_util';\nimport * as util from './util';\nimport { version } from './version';\n// Optimizers.\nexport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nexport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nexport { AdamOptimizer } from './optimizers/adam_optimizer';\nexport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nexport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nexport { Optimizer } from './optimizers/optimizer';\nexport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nexport { SGDOptimizer } from './optimizers/sgd_optimizer';\nexport { Tensor, TensorBuffer, Variable } from './tensor';\nexport { Rank, sumOutType, upcastType } from './types';\nexport * from './ops/ops';\nexport { Reduction } from './ops/loss_ops_utils';\nexport * from './train';\nexport * from './globals';\nexport * from './kernel_registry';\nexport { customGrad, grad, grads, valueAndGrad, valueAndGrads, variableGrads } from './gradients';\nexport { Environment, env, ENV } from './environment';\nexport { version as version_core };\n// Top-level method exports.\nexport { nextFrame } from './browser_util';\n// Second level exports.\nimport * as backend_util from './backends/backend_util';\nimport * as device_util from './device_util';\nexport { browser, io, math, serialization, test_util, util, backend_util, tensor_util, slice_util, gather_util, scatter_util, device_util };\nimport * as kernel_impls from './backends/kernel_impls';\nexport { kernel_impls };\n// Backend specific.\nexport { KernelBackend, DataStorage } from './backends/backend';\n// Export all kernel names / info.\nexport * from './kernel_names';\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const absGradConfig = {\n kernelName: Abs,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, step(cast(x, 'float32'), -1)) };\n }\n};\n//# sourceMappingURL=Abs_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acosGradConfig = {\n kernelName: Acos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = square(cast(x, 'float32'));\n const b = sqrt(sub(scalar(1), a));\n return neg(div(dy, b));\n }\n };\n }\n};\n//# sourceMappingURL=Acos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acoshGradConfig = {\n kernelName: Acosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(sub(square(cast(x, 'float32')), 1));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Acosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const addGradConfig = {\n kernelName: Add,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Add_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AddN } from '../kernel_names';\nexport const addNGradConfig = {\n kernelName: AddN,\n saveAllInputs: true,\n gradFunc: (dy, saved) => {\n const ders = {};\n saved.forEach((_, i) => {\n ders[i] = () => dy.clone();\n });\n return ders;\n }\n};\n//# sourceMappingURL=AddN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMax } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMaxGradConfig = {\n kernelName: ArgMax,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMin } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMinGradConfig = {\n kernelName: ArgMin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const asinGradConfig = {\n kernelName: Asin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sqrt(sub(scalar(1), square(cast(x, 'float32'))))) };\n }\n};\n//# sourceMappingURL=Asin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nexport const asinhGradConfig = {\n kernelName: Asinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(add(scalar(1), square(cast(x, 'float32'))));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Asinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const atan2GradConfig = {\n kernelName: Atan2,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const d = add(square(a), square(b));\n let res = mul(dy, div(b, d));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n const d = add(square(a), square(b));\n let res = neg(mul(dy, div(a, d)));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Atan2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const atanGradConfig = {\n kernelName: Atan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(square(cast(x, 'float32')), 1)) };\n }\n};\n//# sourceMappingURL=Atan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { scalar } from '../ops/scalar';\nexport const atanhGradConfig = {\n kernelName: Atanh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sub(scalar(1), square(cast(x, 'float32')))) };\n }\n};\n//# sourceMappingURL=Atanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d avg pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank4 of shape\n * [batchSize, depth, height, width, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0. The dilation\n * rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction avgPool3dBackprop_(dy, input, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'avgPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'avgPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in avgPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in avgPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.avgPool3dBackprop(dy5D, input5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3dBackprop = op({ avgPool3dBackprop_ });\n//# sourceMappingURL=avg_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool3D } from '../kernel_names';\nimport { avgPool3dBackprop } from '../ops/avg_pool_3d_backprop';\nexport const avgPool3DGradConfig = {\n kernelName: AvgPool3D,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => avgPool3dBackprop(dy, x, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=AvgPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of an 2D avg pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The input image, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction avgPoolBackprop_(dy, input, filterSize, strides, pad) {\n const $dy = convertToTensor(dy, 'dy', 'avgPoolBackprop');\n const $input = convertToTensor(input, 'input', 'avgPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`);\n let input4D = $input;\n let dy4D = $dy;\n let reshapedTo4D = false;\n if ($input.rank === 3) {\n reshapedTo4D = true;\n input4D =\n reshape($input, [1, $input.shape[0], $input.shape[1], $input.shape[2]]);\n dy4D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2]]);\n }\n util.assert(dy4D.rank === 4, () => `Error in avgPoolBackprop: dy must be rank 4 but got rank ` +\n `${dy4D.rank}.`);\n util.assert(input4D.rank === 4, () => `Error in avgPoolBackprop: input must be rank 4 but got rank ` +\n `${input4D.rank}.`);\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo(input4D.shape, filterSize, strides, 1 /* dilations */, pad);\n return backend.avgPoolBackprop(dy4D, input4D, convInfo);\n };\n const inputs = { dy: dy4D, input: input4D };\n const attrs = { filterSize, strides, pad };\n const res = ENGINE.runKernelFunc(forward, inputs, null, AvgPoolBackprop, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPoolBackprop = op({ avgPoolBackprop_ });\n//# sourceMappingURL=avg_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool } from '../kernel_names';\nimport { avgPoolBackprop } from '../ops/avg_pool_backprop';\nexport const avgPoolGradConfig = {\n kernelName: AvgPool,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => avgPoolBackprop(dy, x, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=AvgPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul } from '../kernel_names';\nimport { matMul } from '../ops/mat_mul';\nexport const batchMatMulGradConfig = {\n kernelName: BatchMatMul,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved, attrs) => {\n const [a, b] = saved;\n const { transposeA, transposeB } = attrs;\n if (!transposeA && !transposeB) {\n return {\n a: () => matMul(dy, b, false, true),\n b: () => matMul(a, dy, true, false)\n };\n }\n else if (!transposeA && transposeB) {\n return {\n a: () => matMul(dy, b, false, false),\n b: () => matMul(dy, a, true, false)\n };\n }\n else if (transposeA && !transposeB) {\n return {\n a: () => matMul(b, dy, false, true),\n b: () => matMul(a, dy, false, false)\n };\n }\n else {\n return {\n a: () => matMul(b, dy, true, true),\n b: () => matMul(dy, a, true, true)\n };\n }\n }\n};\n//# sourceMappingURL=BatchMatMul_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchToSpaceND } from '../kernel_names';\nimport { spaceToBatchND } from '../ops/space_to_batch_nd';\nexport const batchToSpaceNDGradConfig = {\n kernelName: BatchToSpaceND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, crops } = attrs;\n return { x: () => spaceToBatchND(dy, blockShape, crops) };\n }\n};\n//# sourceMappingURL=BatchToSpaceND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BroadcastTo } from '../kernel_names';\nimport { sum } from '../ops/sum';\nexport const broadcastToGradConfig = {\n kernelName: BroadcastTo,\n gradFunc: (dy, saved, attrs) => {\n const broadCastToAttrs = attrs;\n const inputShape = broadCastToAttrs.inputShape;\n const outputShape = broadCastToAttrs.shape;\n const reps = Array.from(outputShape);\n for (let i = inputShape.length - 1; i >= 0; i--) {\n if (inputShape[i] === outputShape[i]) {\n reps[i] = 1;\n }\n else if (inputShape[i] !== 1) {\n throw new Error(`broadcastTo(): [${inputShape}] cannot be broadcast to [${outputShape}].`);\n }\n }\n const axes = [];\n for (let i = 0; i < reps.length; i++) {\n if (reps[i] > 1) {\n axes.push(i);\n }\n }\n return { x: () => sum(dy, axes, true /* keepDims */) };\n }\n};\n//# sourceMappingURL=BroadcastTo_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cast } from '../kernel_names';\nexport const castGradConfig = {\n kernelName: Cast,\n gradFunc: (dy) => {\n return { x: () => dy.clone() };\n }\n};\n//# sourceMappingURL=Cast_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const ceilGradConfig = {\n kernelName: Ceil,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Ceil_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '../kernel_names';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { lessEqual } from '../ops/less_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const clipByValueGradConfig = {\n kernelName: ClipByValue,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { clipValueMin, clipValueMax } = attrs;\n return {\n x: () => where(logicalAnd(greaterEqual(x, clipValueMin), lessEqual(x, clipValueMax)), dy, zerosLike(dy)),\n };\n }\n};\n//# sourceMappingURL=ClipByValue_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Concat } from '../kernel_names';\nimport { split } from '../ops/split';\nimport { parseAxisParam } from '../util';\nexport const concatGradConfig = {\n kernelName: Concat,\n saveAllInputs: true,\n gradFunc: (dy, saved, attrs) => {\n const shapes = saved.map(t => t.shape);\n const { axis } = attrs;\n const $axis = parseAxisParam(axis, saved[0].shape)[0];\n const sizeSplits = shapes.map(s => s[$axis]);\n const derTensors = split(dy, sizeSplits, $axis);\n return derTensors.map(t => () => t);\n }\n};\n//# sourceMappingURL=Concat_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2D } from '../kernel_names';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../ops/conv2d_backprop_input';\nimport * as conv_util from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv2DGradConfig = {\n kernelName: Conv2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x4D, $filter] = saved;\n const { dilations, strides, pad, dataFormat } = attrs;\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of conv2D: dilation rates greater than 1 ' +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n return {\n x: () => conv2DBackpropInput(x4D.shape, dy, $filter, strides, pad, dataFormat),\n filter: () => conv2DBackpropFilter(x4D, dy, $filter.shape, strides, pad, dataFormat)\n };\n }\n};\n//# sourceMappingURL=Conv2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport { conv2d } from '../ops/conv2d';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nexport const conv2DBackpropInputGradConfig = {\n kernelName: Conv2DBackpropInput,\n inputsToSave: ['dy', 'filter'],\n gradFunc: (ddx, saved, attrs) => {\n const [dy, filter] = saved;\n const { strides, pad, dataFormat, dimRoundingMode } = attrs;\n return {\n dy: () => conv2d(ddx, filter, strides, pad, dataFormat, 1 /* dilations */, dimRoundingMode),\n filter: () => conv2DBackpropFilter(ddx, dy, filter.shape, strides, pad, dataFormat, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=Conv2DBackpropInput_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropFilterV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 3D convolution.\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * [batch, depth, height, width, inChannels]. If rank 4, batch of 1 is\n * assumed.\n * @param dy The dy image, of rank 5 or rank 4, of shape\n * [batch, depth, height, width, outDepth]. If rank 4, batch of 1 is\n * assumed.\n * @param filterShape The shape of the filter, length 5,\n * [filterDepth, filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideDepth, strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction conv3DBackpropFilter_(x, dy, filterShape, strides, pad) {\n let x5D = x;\n if (x.rank === 4) {\n x5D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2], x.shape[3]]);\n }\n let dy5D = dy;\n if (dy5D.rank === 4) {\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3dDerFilter: input must be rank 5, but got shape ` +\n `${x5D.shape}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerFilter: dy must be rank 5, but got shape ` +\n `${dy5D.shape}.`);\n util.assert(filterShape.length === 5, () => `Error in conv3dDerFilter: filterShape must be length 5, but got ` +\n `${filterShape}.`);\n util.assert(x5D.shape[4] === filterShape[3], () => `Error in conv3dDerFilter: depth of input ${x5D.shape[4]}) must ` +\n `match input depth in filter (${filterShape[3]}.`);\n util.assert(dy5D.shape[4] === filterShape[4], () => `Error in conv3dDerFilter: depth of dy (${dy5D.shape[4]}) must ` +\n `match output depth for filter (${filterShape[4]}).`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, filterShape, strides, dilations, pad);\n return backend.conv3dDerFilter(x5D, dy5D, convInfo);\n };\n const inputs = { x: x5D, dy: dy5D };\n const attrs = { strides, pad, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropFilterV2, attrs);\n}\nexport const conv3DBackpropFilter = op({ conv3DBackpropFilter_ });\n//# sourceMappingURL=conv3d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv3D } from '../kernel_names';\nimport { conv3DBackpropFilter } from '../ops/conv3d_backprop_filter';\nimport { conv3DBackpropInput } from '../ops/conv3d_backprop_input';\nimport { tupleValuesAreOne } from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv3DGradConfig = {\n kernelName: Conv3D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad } = attrs;\n util.assert(tupleValuesAreOne(dilations), () => 'Error in gradient of conv3D: dilation rates greater than 1 are ' +\n `not yet supported in gradients. Got dilations '${dilations}'`);\n const [x5D, $filter] = saved;\n return {\n x: () => conv3DBackpropInput(x5D.shape, dy, $filter, strides, pad),\n filter: () => conv3DBackpropFilter(x5D, dy, $filter.shape, strides, pad)\n };\n }\n};\n//# sourceMappingURL=Conv3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { sin } from '../ops/sin';\nexport const cosGradConfig = {\n kernelName: Cos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(neg(sin(cast(x, 'float32'))), dy) };\n }\n};\n//# sourceMappingURL=Cos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { sinh } from '../ops/sinh';\nexport const coshGradConfig = {\n kernelName: Cosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(sinh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Cosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cumsum } from '../kernel_names';\nimport { getAxesPermutation } from '../ops/axis_util';\nimport { cumsum } from '../ops/cumsum';\nimport { transpose } from '../ops/transpose';\nexport const cumsumGradConfig = {\n kernelName: Cumsum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { axis, exclusive, reverse } = attrs;\n return {\n x: () => {\n const permutation = getAxesPermutation([axis], x.rank);\n let out = cumsum(dy, axis, exclusive, !reverse);\n if (permutation != null) {\n out = transpose(out, permutation);\n }\n return out;\n }\n };\n }\n};\n//# sourceMappingURL=Cumsum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport * as conv_util from '../ops/conv_util';\nimport { depthwiseConv2dNativeBackpropFilter } from '../ops/depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../ops/depthwise_conv2d_native_backprop_input';\nimport * as util from '../util';\nexport const depthwiseConv2dNativeGradConfig = {\n kernelName: DepthwiseConv2dNative,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1] : dilations;\n util.assert(conv_util.tupleValuesAreOne($dilations), () => 'Error in gradient of depthwiseConv2dNative: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${$dilations}'`);\n const [x, filter] = saved;\n util.assert(x.rank === 4, () => `Error in gradient of depthwiseConv2dNative: input must be ` +\n `rank 4, but got rank ${x.rank}.`);\n util.assert(filter.rank === 4, () => `Error in gradient of depthwiseConv2dNative: filter must be ` +\n `rank 4, but got rank ${filter.rank}.`);\n util.assert(x.shape[3] === filter.shape[2], () => `Error in gradient of depthwiseConv2d: number of input ` +\n `channels (${x.shape[3]}) must match the inChannels dimension ` +\n `in filter ${filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in gradient of depthwiseConv2d: Either strides or ' +\n `dilations must be 1. Got strides ${strides} and dilations ` +\n `'${$dilations}'.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n return {\n x: () => depthwiseConv2dNativeBackpropInput(x.shape, dy, filter, strides, pad, dilations, dimRoundingMode),\n filter: () => depthwiseConv2dNativeBackpropFilter(x, dy, filter.shape, strides, pad, dilations, dimRoundingMode),\n };\n }\n};\n//# sourceMappingURL=DepthwiseConv2dNative_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D, Dilation2DBackpropFilter, Dilation2DBackpropInput } from '../kernel_names';\nexport const dilation2dGradConfig = {\n kernelName: Dilation2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x, filter] = saved;\n const inputInputs = { x, filter, dy };\n const filterInputs = { x, filter, dy };\n return {\n x: () => ENGINE.runKernel(Dilation2DBackpropInput, inputInputs, attrs),\n filter: () => ENGINE.runKernel(Dilation2DBackpropFilter, filterInputs, attrs)\n };\n }\n};\n//# sourceMappingURL=Dilation2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const divGradConfig = {\n kernelName: Div,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Div_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu, EluGrad } from '../kernel_names';\nexport const eluGradConfig = {\n kernelName: Elu,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n const backPropKernelFunc = (backend) => {\n return backend.eluDer(dy, y);\n };\n const inputs = { dy, y };\n return {\n x: () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* grad */, EluGrad)\n };\n }\n};\n//# sourceMappingURL=Elu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Erf } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const erfGradConfig = {\n kernelName: Erf,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const a = mul(exp(neg(square(x))), 2 / Math.sqrt(Math.PI));\n return { x: () => mul(dy, a) };\n }\n};\n//# sourceMappingURL=Erf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '../kernel_names';\nimport { mul } from '../ops/mul';\nexport const expGradConfig = {\n kernelName: Exp,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, y) };\n }\n};\n//# sourceMappingURL=Exp_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nexport const expm1GradConfig = {\n kernelName: Expm1,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, exp(x)) };\n }\n};\n//# sourceMappingURL=Expm1_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const floorGradConfig = {\n kernelName: Floor,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Floor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FloorDiv } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const floorDivGradConfig = {\n kernelName: FloorDiv,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=FloorDiv_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { rsqrt } from '../ops/rsqrt';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { tile } from '../ops/tile';\nexport const fusedBatchNormGradConfig = {\n kernelName: FusedBatchNorm,\n inputsToSave: ['x', 'mean', 'variance', 'scale'],\n gradFunc: (dy, saved, attrs) => {\n const { varianceEpsilon } = attrs;\n const [x, mean, variance, scale] = saved;\n const scaleValue = scale == null ? scalar(1) : scale;\n const reductionAxes = getReductionAxes(mean.shape, x.shape);\n const tileShape = [];\n if (mean.rank === 1) {\n for (let i = 0; i < x.shape.length - 1; ++i) {\n tileShape.push(x.shape[i]);\n }\n tileShape.push(1);\n }\n const xMinusMean = sub(x, mean);\n const dyTimesScaleValue = mul(dy, scaleValue);\n const oneOverSqrtVariance = rsqrt(add(variance, scalar(varianceEpsilon)));\n const minusHalfRCube = mul(mul(mul(oneOverSqrtVariance, oneOverSqrtVariance), oneOverSqrtVariance), scalar(-0.5));\n const derX = () => {\n if (mean.rank === 1) {\n return reshape(mul(mul(dy, tile(reshape(oneOverSqrtVariance, [1, 1, 1, mean.shape[0]]), tileShape)), scaleValue), x.shape);\n }\n else {\n return reshape(mul(mul(dy, oneOverSqrtVariance), scaleValue), x.shape);\n }\n };\n const derMean = () => {\n let meanDer = mul(mul(oneOverSqrtVariance, scalar(-1)), dyTimesScaleValue);\n if (mean.rank === 1) {\n meanDer = sum(meanDer, reductionAxes);\n }\n return reshape(meanDer, mean.shape);\n };\n const derVariance = () => {\n let varianceDer = mul(mul(minusHalfRCube, xMinusMean), dyTimesScaleValue);\n if (mean.rank === 1) {\n varianceDer = sum(varianceDer, reductionAxes);\n }\n return reshape(varianceDer, mean.shape);\n };\n const derScale = () => {\n const xMinusMean2TimesRsqrt = mul(xMinusMean, oneOverSqrtVariance);\n let scaleDer = mul(dy, xMinusMean2TimesRsqrt);\n if (mean.rank === 1) {\n scaleDer = sum(scaleDer, reductionAxes);\n }\n return reshape(scaleDer, mean.shape);\n };\n const derOffset = () => {\n let offsetDer = dy;\n if (mean.rank === 1) {\n offsetDer = sum(offsetDer, reductionAxes);\n }\n return reshape(offsetDer, mean.shape);\n };\n return {\n x: derX,\n mean: derMean,\n variance: derVariance,\n scale: derScale,\n offset: derOffset\n };\n }\n};\n//# sourceMappingURL=FusedBatchNorm_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GatherV2 } from '../kernel_names';\nimport { getUndoAxesPermutation } from '../ops/axis_util';\nimport { reshape } from '../ops/reshape';\nimport { transpose } from '../ops/transpose';\nimport { unsortedSegmentSum } from '../ops/unsorted_segment_sum';\nimport { parseAxisParam } from '../util';\nexport const gatherGradConfig = {\n kernelName: GatherV2,\n inputsToSave: ['x', 'indices'],\n gradFunc: (dy, saved, attrs) => {\n const [x, indices] = saved;\n const { axis } = attrs;\n const parsedAxis = parseAxisParam(axis, x.shape)[0];\n const derX = () => {\n const paramsShape = x.shape;\n const indicesSize = indices.size;\n const outerShape = paramsShape.slice(0, parsedAxis);\n const outerDims = outerShape.length;\n const innerShape = paramsShape.slice(axis, paramsShape.length).slice(1);\n const innerDims = innerShape.length;\n const outerAxesIndices = arrayRange(0, outerDims);\n const innerAxesIndices = arrayRange(outerDims + 1, outerDims + 1 + innerDims);\n const valuesShape = arrayConcat([outerShape, [indicesSize], innerShape]);\n const values = reshape(dy, valuesShape);\n const reshapedIndices = reshape(indices, [indicesSize]);\n const transposeDims = arrayConcat([[outerDims], outerAxesIndices, innerAxesIndices]);\n const valuesTranspose = transpose(values, transposeDims);\n let paramsGrad = unsortedSegmentSum(valuesTranspose, reshapedIndices, x.shape[parsedAxis]);\n const invertTransposeDims = getUndoAxesPermutation(transposeDims);\n paramsGrad = transpose(paramsGrad, invertTransposeDims);\n return paramsGrad;\n };\n return { x: derX, indices: () => indices };\n }\n};\nfunction arrayRange(start, stop) {\n const result = [];\n for (let i = start; i < stop; ++i) {\n result.push(i);\n }\n return result;\n}\nfunction arrayConcat(arrays) {\n const result = [];\n for (let i = 0; i < arrays.length; ++i) {\n for (let j = 0; j < arrays[i].length; ++j) {\n result.push(arrays[i][j]);\n }\n }\n return result;\n}\n//# sourceMappingURL=GatherV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GreaterEqual } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const greaterEqualGradConfig = {\n kernelName: GreaterEqual,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n return { a: () => zerosLike(a), b: () => zerosLike(b) };\n }\n};\n//# sourceMappingURL=GreaterEqual_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '../kernel_names';\nimport { cast } from '../ops/cast';\nexport const identityGradConfig = {\n kernelName: Identity,\n gradFunc: (dy) => {\n return { x: () => cast(dy, 'float32') };\n }\n};\n//# sourceMappingURL=Identity_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isFiniteGradConfig = {\n kernelName: IsFinite,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsFinite_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isInfGradConfig = {\n kernelName: IsInf,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsInf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isNanGradConfig = {\n kernelName: IsNan,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsNan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nexport const log1pGradConfig = {\n kernelName: Log1p,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(x, 1)) };\n }\n};\n//# sourceMappingURL=Log1p_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nexport const logGradConfig = {\n kernelName: Log,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, cast(x, 'float32')) };\n }\n};\n//# sourceMappingURL=Log_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogSoftmax } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const logSoftmaxGradConfig = {\n kernelName: LogSoftmax,\n inputsToSave: [],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [value] = saved;\n const { axis } = attrs;\n return {\n logits: () => {\n const keepDims = true;\n const softmax = exp(value);\n return sub(dy, mul(sum(dy, axis, keepDims), softmax));\n }\n };\n }\n};\n//# sourceMappingURL=LogSoftmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRNBackprop } from '../kernel_names';\nimport { op } from './operation';\nfunction localResponseNormalizationBackprop_(x, y, dy, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const forward = backend => backend.LRNGrad(dy, x, y, depthRadius, bias, alpha, beta);\n const inputs = { x, y, dy };\n const attrs = { depthRadius, bias, alpha, beta };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRNBackprop, attrs);\n}\nexport const localResponseNormalizationBackprop = op({ localResponseNormalizationBackprop_ });\n//# sourceMappingURL=local_response_normalization_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LRN } from '../kernel_names';\nimport { localResponseNormalizationBackprop } from '../ops/local_response_normalization_backprop';\nexport const lrnGradConfig = {\n kernelName: LRN,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { depthRadius, bias, alpha, beta } = attrs;\n return {\n x: () => localResponseNormalizationBackprop(x, y, dy, depthRadius, bias, alpha, beta)\n };\n }\n};\n//# sourceMappingURL=LRN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as axis_util from '../ops/axis_util';\nimport { cast } from '../ops/cast';\nimport { equal } from '../ops/equal';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\n/**\n * Gradient helper function for the min and max operations.\n */\nexport function gradForMinAndMax(dy, y, xOrig, origAxes) {\n if (y.rank < xOrig.rank) {\n y = reshape(y, axis_util.expandShapeToKeepDim(y.shape, origAxes));\n }\n if (dy.rank < xOrig.rank) {\n dy = reshape(dy, axis_util.expandShapeToKeepDim(dy.shape, origAxes));\n }\n return {\n x: () => {\n const dx = mul(dy, cast(equal(xOrig, y), dy.dtype));\n return dx;\n }\n };\n}\n//# sourceMappingURL=min_max_grad_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const maxGradConfig = {\n kernelName: Max,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const maxAttrs = attrs;\n const { reductionIndices } = maxAttrs;\n const x = saved[0];\n const y = saved[1];\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n const maxGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return maxGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Max_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Maximum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { less } from '../ops/less';\nimport { mul } from '../ops/mul';\nexport const maximumGradConfig = {\n kernelName: Maximum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(greaterEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(less(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Maximum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d max pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank 4 of shape\n * [batchSize, depth, height, width, channels].\n * @param output The original output image, of rank 5 of shape\n * [batchSize, outDepth, outHeight, outWidth, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPool3dBackprop_(dy, input, output, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'maxPool3dBackprop');\n const $output = convertToTensor(output, 'output', 'maxPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let output5D = $output;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n output5D = reshape($output, [\n 1, $output.shape[0], $output.shape[1], $output.shape[2], $output.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in maxPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in maxPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(output5D.rank === 5, () => `Error in maxPool3dBackprop: output must be rank 5 but got rank ` +\n `${output5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.maxPool3dBackprop(dy5D, input5D, output5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D, output: output5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3dBackprop = op({ maxPool3dBackprop_ });\n//# sourceMappingURL=max_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool3D } from '../kernel_names';\nimport { maxPool3dBackprop } from '../ops/max_pool_3d_backprop';\nexport const maxPool3DGradConfig = {\n kernelName: MaxPool3D,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => maxPool3dBackprop(dy, x, y, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=MaxPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\n/**\n * Computes the backprop of a 2D max pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The original input image, of rank 4, of shape\n * [batchSize, height, width, channels].\n * @param output The original output image, of rank 4, of shape\n * [batchSize, outHeight, outWidth, channels].\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPoolBackprop_(dy, input, output, filterSize, strides, pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPoolBackprop');\n const $input = convertToTensor(input, 'input', 'maxPoolBackprop');\n const $output = convertToTensor(output, 'output', 'maxPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy ` +\n `(${$dy.rank})`);\n util.assert($dy.rank === 4, () => `Error in maxPoolBackprop: dy must be rank 4 but got rank ` +\n `${$dy.rank}.`);\n util.assert($input.rank === 4, () => `Error in maxPoolBackprop: input must be rank 4 but got rank ` +\n `${$input.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPoolBackprop: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo($input.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n return backend.maxPoolBackprop($dy, $input, $output, convInfo);\n };\n const inputs = { dy: $dy, input: $input, output: $output };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n return ENGINE.runKernelFunc(forward, inputs, null, MaxPoolBackprop, attrs);\n}\nexport const maxPoolBackprop = op({ maxPoolBackprop_ });\n//# sourceMappingURL=max_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool } from '../kernel_names';\nimport { maxPoolBackprop } from '../ops/max_pool_backprop';\nexport const maxPoolGradConfig = {\n kernelName: MaxPool,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => maxPoolBackprop(dy, x, y, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=MaxPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Min } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const minGradConfig = {\n kernelName: Min,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const minAttrs = attrs;\n const { axis } = minAttrs;\n const [x, y] = saved;\n const origAxes = util.parseAxisParam(axis, x.shape);\n const minGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return minGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Min_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Minimum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nexport const minimumGradConfig = {\n kernelName: Minimum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(lessEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(greater(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Minimum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const mirrorPadGradConfig = {\n kernelName: MirrorPad,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=MirrorPad_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Mod } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { floor } from '../ops/floor';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const modGradConfig = {\n kernelName: Mod,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(dy, reduceAxes), a.shape);\n }\n return dy;\n };\n const derB = () => {\n const res = mul(dy, neg(floor(div(a, b))));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Mod_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const multiplyGradConfig = {\n kernelName: Multiply,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = mul(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n const res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Multiply_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Negate } from '../kernel_names';\nimport { neg } from '../ops/neg';\nexport const negateGradConfig = {\n kernelName: Negate,\n gradFunc: (dy) => {\n return { x: () => neg(dy) };\n }\n};\n//# sourceMappingURL=Negate_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OneHot } from '../kernel_names';\nimport { zeros } from '../ops/zeros';\nexport const oneHotGradConfig = {\n kernelName: OneHot,\n inputsToSave: ['indices'],\n gradFunc: (dy, saved) => {\n const indices = saved[0];\n return { indices: () => zeros(indices.shape, 'float32') };\n }\n};\n//# sourceMappingURL=OneHot_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OnesLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const onesLikeGradConfig = {\n kernelName: OnesLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=OnesLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2 } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const padV2GradConfig = {\n kernelName: PadV2,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=PadV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pow } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { log } from '../ops/log';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { reshape } from '../ops/reshape';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const powGradConfig = {\n kernelName: Pow,\n inputsToSave: ['a', 'b'],\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [a, b, y] = saved;\n const base = a;\n const exp = b;\n const outShape = broadcast_util.assertAndGetBroadcastShape(base.shape, exp.shape);\n const derBase = () => {\n const expFloat = cast(exp, 'float32');\n let res = mul(dy, mul(expFloat, pow(base, sub(expFloat, scalar(1)))));\n const reduceAxes = broadcast_util.getReductionAxes(base.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, base.shape);\n };\n const derExp = () => {\n const condition = greater(base, 0);\n const logBase = where(condition, log(base), zerosLike(base));\n let res = mul(dy, mul(y, logBase));\n const reduceAxes = broadcast_util.getReductionAxes(exp.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, exp.shape);\n };\n return { a: derBase, b: derExp };\n }\n};\n//# sourceMappingURL=Pow_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '../kernel_names';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const preluGradConfig = {\n kernelName: Prelu,\n inputsToSave: ['x', 'alpha'],\n gradFunc: (dy, saved) => {\n const [x, alpha] = saved;\n const mask = greater(x, 0);\n return {\n x: () => where(mask, dy, mul(dy, alpha)),\n alpha: () => {\n let res = where(mask, zerosLike(dy), mul(dy, x));\n const reduceAxes = getReductionAxes(alpha.shape, dy.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, alpha.shape);\n }\n };\n }\n};\n//# sourceMappingURL=Prelu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const reciprocalGradConfig = {\n kernelName: Reciprocal,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, neg(square(x))) };\n }\n};\n//# sourceMappingURL=Reciprocal_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const relu6GradConfig = {\n kernelName: Relu6,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const mask = mul(lessEqual(x, 6), step(x));\n return { x: () => mul(dy, cast(mask, 'float32')) };\n }\n};\n//# sourceMappingURL=Relu6_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const reluGradConfig = {\n kernelName: Relu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, cast(step(x), 'float32')) };\n }\n};\n//# sourceMappingURL=Relu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape } from '../kernel_names';\nimport { reshape } from '../ops/reshape';\nexport const reshapeGradConfig = {\n kernelName: Reshape,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => reshape(dy, x.shape) };\n }\n};\n//# sourceMappingURL=Reshape_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeBilinear, ResizeBilinearGrad } from '../kernel_names';\nexport const resizeBilinearGradConfig = {\n kernelName: ResizeBilinear,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeBilinearBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeBilinearGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeBilinear_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeNearestNeighbor, ResizeNearestNeighborGrad } from '../kernel_names';\nexport const resizeNearestNeighborGradConfig = {\n kernelName: ResizeNearestNeighbor,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeNearestNeighborBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeNearestNeighborGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeNearestNeighbor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reverse } from '../kernel_names';\nimport { reverse } from '../ops/reverse';\nimport { parseAxisParam } from '../util';\nexport const reverseGradConfig = {\n kernelName: Reverse,\n gradFunc: (dy, saved, attrs) => {\n const { dims } = attrs;\n const axes = parseAxisParam(dims, dy.shape);\n return { x: () => reverse(dy, axes) };\n }\n};\n//# sourceMappingURL=Reverse_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const roundGradConfig = {\n kernelName: Round,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Round_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { pow } from '../ops/pow';\nexport const rsqrtGradConfig = {\n kernelName: Rsqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => neg(div(dy, mul(pow(x, 1.5), 2))) };\n }\n};\n//# sourceMappingURL=Rsqrt_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SelectV2 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { logicalNot } from '../ops/logical_not';\nimport { mul } from '../ops/mul';\nimport { zerosLike } from '../ops/zeros_like';\nexport const selectV2PoolGradConfig = {\n kernelName: SelectV2,\n inputsToSave: ['condition'],\n gradFunc: (dy, saved) => {\n const [condition] = saved;\n return {\n // TODO(julianoks): Return null for condition gradient\n // when backprop supports it.\n condition: () => cast(zerosLike(condition), 'float32'),\n t: () => mul(dy, cast(condition, dy.dtype)),\n e: () => mul(dy, cast(logicalNot(condition), dy.dtype))\n };\n }\n};\n//# sourceMappingURL=SelectV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Selu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { exp } from '../ops/exp';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { SELU_SCALE, SELU_SCALEALPHA } from '../ops/selu_util';\nimport { where } from '../ops/where';\nexport const seluGradConfig = {\n kernelName: Selu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const mask = greater(x, scalar(0));\n const scaleAlpha = scalar(SELU_SCALEALPHA);\n const scale = scalar(SELU_SCALE);\n const greaterThanZeroDer = mul(dy, scale);\n const lessEqualZeroDer = mul(mul(dy, scaleAlpha), exp(cast(x, 'float32')));\n return where(mask, greaterThanZeroDer, lessEqualZeroDer);\n }\n };\n }\n};\n//# sourceMappingURL=Selu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const sigmoidGradConfig = {\n kernelName: Sigmoid,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, mul(y, sub(scalar(1), y))) };\n }\n};\n//# sourceMappingURL=Sigmoid_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const signGradConfig = {\n kernelName: Sign,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Sign_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cos } from '../ops/cos';\nimport { mul } from '../ops/mul';\nexport const sinGradConfig = {\n kernelName: Sin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cos(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cosh } from '../ops/cosh';\nimport { mul } from '../ops/mul';\nexport const sinhGradConfig = {\n kernelName: Sinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cosh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice } from '../kernel_names';\nimport { pad } from '../ops/pad';\nimport { parseSliceParams } from '../ops/slice_util';\nexport const sliceGradConfig = {\n kernelName: Slice,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { begin, size } = attrs;\n const inputShape = x.shape;\n const [begin_, size_] = parseSliceParams(x, begin, size);\n // Create an Nx2 padding where the first column represents how many\n // zeros are prepended (at start) for each dimension, and the second\n // column indicates how many zeros are appended (at end).\n // The number of zeros to append is the shape of the input\n // elementwise-subtracted by both the begin vector and sizes vector.\n const paddings = [];\n for (let i = 0; i < dy.rank; i++) {\n paddings.push([begin_[i], inputShape[i] - begin_[i] - size_[i]]);\n }\n return { x: () => pad(dy, paddings) };\n }\n};\n//# sourceMappingURL=Slice_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softmax } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const softmaxGradConfig = {\n kernelName: Softmax,\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [y] = saved;\n const { dim } = attrs;\n const keepDims = true;\n const dyTimesY = mul(dy, y);\n return {\n logits: () => sub(dyTimesY, mul(sum(dyTimesY, [dim], keepDims), y))\n };\n }\n};\n//# sourceMappingURL=Softmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sigmoid } from '../ops/sigmoid';\nexport const softplusGradConfig = {\n kernelName: Softplus,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, sigmoid(x)) };\n }\n};\n//# sourceMappingURL=Softplus_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SpaceToBatchND } from '../kernel_names';\nimport { batchToSpaceND } from '../ops/batch_to_space_nd';\nexport const spaceToBatchNDGradConfig = {\n kernelName: SpaceToBatchND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, paddings } = attrs;\n return { x: () => batchToSpaceND(dy, blockShape, paddings) };\n }\n};\n//# sourceMappingURL=SpaceToBatchND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SplitV } from '../kernel_names';\nimport { concat } from '../ops/concat';\nexport const splitVGradConfig = {\n kernelName: SplitV,\n gradFunc: (dy, saved, attrs) => {\n const { axis } = attrs;\n return { x: () => concat(dy, axis) };\n }\n};\n//# sourceMappingURL=SplitV_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nexport const sqrtGradConfig = {\n kernelName: Sqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, mul(sqrt(cast(x, 'float32')), 2)) };\n }\n};\n//# sourceMappingURL=Sqrt_grad.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nexport const squareGradConfig = {\n kernelName: Square,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, mul(cast(x, 'float32'), 2)) };\n }\n};\n//# sourceMappingURL=Square_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const squaredDifferenceGradConfig = {\n kernelName: SquaredDifference,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const two = scalar(2);\n const derA = () => mul(dy, mul(two, sub(a, b)));\n const derB = () => mul(dy, mul(two, sub(b, a)));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=SquaredDifference_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const stepGradConfig = {\n kernelName: Step,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports\n // it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Step_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const subGradConfig = {\n kernelName: Sub,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(neg(res), b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Sub_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sum } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { ones } from '../ops/ones';\nimport { reshape } from '../ops/reshape';\nimport { parseAxisParam } from '../util';\nexport const sumGradConfig = {\n kernelName: Sum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const expandedDyShape = x.shape.slice();\n const { axis } = attrs;\n const axes = parseAxisParam(axis, x.shape);\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = mul(expandedDy, ones(x.shape, 'float32'));\n return { x: () => derX };\n }\n};\n//# sourceMappingURL=Sum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '../kernel_names';\nimport { cos } from '../ops/cos';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const tanGradConfig = {\n kernelName: Tan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, square(cos(x))) };\n }\n};\n//# sourceMappingURL=Tan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const tanhGradConfig = {\n kernelName: Tanh,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(sub(scalar(1), square(y)), dy) };\n }\n};\n//# sourceMappingURL=Tanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tile } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { slice } from '../ops/slice';\nimport { zerosLike } from '../ops/zeros_like';\nexport const tileGradConfig = {\n kernelName: Tile,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { reps } = attrs;\n const derX = () => {\n let xGrad = zerosLike(x);\n // TODO(cais): Maybe reduce memory footprint by avoiding repeated\n // slicing.\n if (x.rank === 1) {\n for (let i = 0; i < reps[0]; ++i) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0]], [x.shape[0]]));\n }\n }\n else if (x.rank === 2) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1]], [\n x.shape[0], x.shape[1]\n ]));\n }\n }\n }\n else if (x.rank === 3) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n xGrad =\n add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1], k * x.shape[2]], [x.shape[0], x.shape[1], x.shape[2]]));\n }\n }\n }\n }\n else if (x.rank === 4) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n for (let l = 0; l < reps[3]; ++l) {\n xGrad =\n add(xGrad, slice(dy, [\n i * x.shape[0], j * x.shape[1], k * x.shape[2],\n l * x.shape[3]\n ], [x.shape[0], x.shape[1], x.shape[2], x.shape[3]]));\n }\n }\n }\n }\n }\n else {\n throw new Error(`Gradient for tile operation is not implemented for rank-` +\n `${x.rank} tensors yet.`);\n }\n return xGrad;\n };\n return { x: derX };\n },\n};\n//# sourceMappingURL=Tile_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '../kernel_names';\nimport * as axis_util from '../ops/axis_util';\nimport { transpose } from '../ops/transpose';\nexport const transposeGradConfig = {\n kernelName: Transpose,\n gradFunc: (dy, saved, attrs) => {\n const transposeAttrs = attrs;\n const { perm } = transposeAttrs;\n const undoPerm = axis_util.getUndoAxesPermutation(perm);\n return { x: () => transpose(dy, undoPerm) };\n }\n};\n//# sourceMappingURL=Transpose_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unpack } from '../kernel_names';\nimport { stack } from '../ops/stack';\nexport const unpackGradConfig = {\n kernelName: Unpack,\n gradFunc: (dy, saved, attrs) => {\n const unpackAttrs = attrs;\n const { axis } = unpackAttrs;\n return { value: () => stack(dy, axis) };\n }\n};\n//# sourceMappingURL=Unpack_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { expandDims } from '../ops/expand_dims';\nimport { gather } from '../ops/gather';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { maximum } from '../ops/maximum';\nimport { ones } from '../ops/ones';\nimport { scalar } from '../ops/scalar';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const unsortedSegmentSumGradConfig = {\n kernelName: UnsortedSegmentSum,\n inputsToSave: ['segmentIds'],\n gradFunc: (dy, saved) => {\n const [segmentIds] = saved;\n const derX = () => {\n return gatherDropNegatives(dy, segmentIds);\n };\n return { x: derX };\n }\n};\nfunction gatherDropNegatives(x, indices) {\n // Helper function for unsorted segment ops. Gathers params for\n // positive segment ids and gathers 0 for inputs with negative segment id.\n // Mirrors _GatherDropNegatives from tensorflow/python/ops/math_grad.py\n const zeroClippedIndices = maximum(indices, zerosLike(indices));\n const gathered = gather(x, zeroClippedIndices);\n let isPositive = greaterEqual(indices, scalar(0, 'int32'));\n const numIters = gathered.rank - isPositive.rank;\n for (let i = 0; i < numIters; ++i) {\n isPositive = expandDims(isPositive, i + 1);\n }\n isPositive = logicalAnd(isPositive, ones(gathered.shape, 'bool'));\n const zeroSlice = zerosLike(gathered);\n return where(isPositive, gathered, zeroSlice);\n}\n//# sourceMappingURL=UnsortedSegmentSum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ZerosLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const zerosLikeGradConfig = {\n kernelName: ZerosLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=ZerosLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { absGradConfig } from './gradients/Abs_grad';\nimport { acosGradConfig } from './gradients/Acos_grad';\nimport { acoshGradConfig } from './gradients/Acosh_grad';\nimport { addGradConfig } from './gradients/Add_grad';\nimport { addNGradConfig } from './gradients/AddN_grad';\nimport { argMaxGradConfig } from './gradients/ArgMax_grad';\nimport { argMinGradConfig } from './gradients/ArgMin_grad';\nimport { asinGradConfig } from './gradients/Asin_grad';\nimport { asinhGradConfig } from './gradients/Asinh_grad';\nimport { atan2GradConfig } from './gradients/Atan2_grad';\nimport { atanGradConfig } from './gradients/Atan_grad';\nimport { atanhGradConfig } from './gradients/Atanh_grad';\nimport { avgPool3DGradConfig } from './gradients/AvgPool3D_grad';\nimport { avgPoolGradConfig } from './gradients/AvgPool_grad';\nimport { batchMatMulGradConfig } from './gradients/BatchMatMul_grad';\nimport { batchToSpaceNDGradConfig } from './gradients/BatchToSpaceND_grad';\nimport { broadcastToGradConfig } from './gradients/BroadcastTo_grad';\nimport { castGradConfig } from './gradients/Cast_grad';\nimport { ceilGradConfig } from './gradients/Ceil_grad';\nimport { clipByValueGradConfig } from './gradients/ClipByValue_grad';\nimport { concatGradConfig } from './gradients/Concat_grad';\nimport { conv2DGradConfig } from './gradients/Conv2D_grad';\nimport { conv2DBackpropInputGradConfig } from './gradients/Conv2DBackpropInput_grad';\nimport { conv3DGradConfig } from './gradients/Conv3D_grad';\nimport { cosGradConfig } from './gradients/Cos_grad';\nimport { coshGradConfig } from './gradients/Cosh_grad';\nimport { cumsumGradConfig } from './gradients/Cumsum_grad';\nimport { depthwiseConv2dNativeGradConfig } from './gradients/DepthwiseConv2dNative_grad';\nimport { dilation2dGradConfig } from './gradients/Dilation2D_grad';\nimport { divGradConfig } from './gradients/Div_grad';\nimport { eluGradConfig } from './gradients/Elu_grad';\nimport { erfGradConfig } from './gradients/Erf_grad';\nimport { expGradConfig } from './gradients/Exp_grad';\nimport { expm1GradConfig } from './gradients/Expm1_grad';\nimport { floorGradConfig } from './gradients/Floor_grad';\nimport { floorDivGradConfig } from './gradients/FloorDiv_grad';\nimport { fusedBatchNormGradConfig } from './gradients/FusedBatchNorm_grad';\nimport { gatherGradConfig } from './gradients/GatherV2_grad';\nimport { greaterEqualGradConfig } from './gradients/GreaterEqual_grad';\nimport { identityGradConfig } from './gradients/Identity_grad';\nimport { isFiniteGradConfig } from './gradients/IsFinite_grad';\nimport { isInfGradConfig } from './gradients/IsInf_grad';\nimport { isNanGradConfig } from './gradients/IsNan_grad';\nimport { log1pGradConfig } from './gradients/Log1p_grad';\nimport { logGradConfig } from './gradients/Log_grad';\nimport { logSoftmaxGradConfig } from './gradients/LogSoftmax_grad';\nimport { lrnGradConfig } from './gradients/LRN_grad';\nimport { maxGradConfig } from './gradients/Max_grad';\nimport { maximumGradConfig } from './gradients/Maximum_grad';\nimport { maxPool3DGradConfig } from './gradients/MaxPool3D_grad';\nimport { maxPoolGradConfig } from './gradients/MaxPool_grad';\nimport { minGradConfig } from './gradients/Min_grad';\nimport { minimumGradConfig } from './gradients/Minimum_grad';\nimport { mirrorPadGradConfig } from './gradients/MirrorPad_grad';\nimport { modGradConfig } from './gradients/Mod_grad';\nimport { multiplyGradConfig } from './gradients/Multiply_grad';\nimport { negateGradConfig } from './gradients/Negate_grad';\nimport { oneHotGradConfig } from './gradients/OneHot_grad';\nimport { onesLikeGradConfig } from './gradients/OnesLike_grad';\nimport { padV2GradConfig } from './gradients/PadV2_grad';\nimport { powGradConfig } from './gradients/Pow_grad';\nimport { preluGradConfig } from './gradients/Prelu_grad';\nimport { reciprocalGradConfig } from './gradients/Reciprocal_grad';\nimport { relu6GradConfig } from './gradients/Relu6_grad';\nimport { reluGradConfig } from './gradients/Relu_grad';\nimport { reshapeGradConfig } from './gradients/Reshape_grad';\nimport { resizeBilinearGradConfig } from './gradients/ResizeBilinear_grad';\nimport { resizeNearestNeighborGradConfig } from './gradients/ResizeNearestNeighbor_grad';\nimport { reverseGradConfig } from './gradients/Reverse_grad';\nimport { roundGradConfig } from './gradients/Round_grad';\nimport { rsqrtGradConfig } from './gradients/Rsqrt_grad';\nimport { selectV2PoolGradConfig } from './gradients/SelectV2_grad';\nimport { seluGradConfig } from './gradients/Selu_grad';\nimport { sigmoidGradConfig } from './gradients/Sigmoid_grad';\nimport { signGradConfig } from './gradients/Sign_grad';\nimport { sinGradConfig } from './gradients/Sin_grad';\nimport { sinhGradConfig } from './gradients/Sinh_grad';\nimport { sliceGradConfig } from './gradients/Slice_grad';\nimport { softmaxGradConfig } from './gradients/Softmax_grad';\nimport { softplusGradConfig } from './gradients/Softplus_grad';\nimport { spaceToBatchNDGradConfig } from './gradients/SpaceToBatchND_grad';\nimport { splitVGradConfig } from './gradients/SplitV_grad';\nimport { sqrtGradConfig } from './gradients/Sqrt_grad';\nimport { squareGradConfig } from './gradients/Square_grad';\nimport { squaredDifferenceGradConfig } from './gradients/SquaredDifference_grad';\nimport { stepGradConfig } from './gradients/Step_grad';\nimport { subGradConfig } from './gradients/Sub_grad';\nimport { sumGradConfig } from './gradients/Sum_grad';\nimport { tanGradConfig } from './gradients/Tan_grad';\nimport { tanhGradConfig } from './gradients/Tanh_grad';\nimport { tileGradConfig } from './gradients/Tile_grad';\nimport { transposeGradConfig } from './gradients/Transpose_grad';\nimport { unpackGradConfig } from './gradients/Unpack_grad';\nimport { unsortedSegmentSumGradConfig } from './gradients/UnsortedSegmentSum_grad';\nimport { zerosLikeGradConfig } from './gradients/ZerosLike_grad';\nimport { registerGradient } from './kernel_registry';\n// Export all kernel configs here so that the package can auto register them\nconst gradConfigs = [\n absGradConfig,\n acosGradConfig,\n acoshGradConfig,\n addGradConfig,\n addNGradConfig,\n argMaxGradConfig,\n argMinGradConfig,\n asinGradConfig,\n asinhGradConfig,\n atan2GradConfig,\n atanGradConfig,\n atanhGradConfig,\n avgPool3DGradConfig,\n avgPoolGradConfig,\n batchMatMulGradConfig,\n batchToSpaceNDGradConfig,\n broadcastToGradConfig,\n castGradConfig,\n ceilGradConfig,\n clipByValueGradConfig,\n concatGradConfig,\n conv2DBackpropInputGradConfig,\n conv2DGradConfig,\n conv3DGradConfig,\n cosGradConfig,\n coshGradConfig,\n cumsumGradConfig,\n depthwiseConv2dNativeGradConfig,\n dilation2dGradConfig,\n divGradConfig,\n eluGradConfig,\n erfGradConfig,\n expGradConfig,\n expm1GradConfig,\n floorDivGradConfig,\n floorGradConfig,\n fusedBatchNormGradConfig,\n gatherGradConfig,\n greaterEqualGradConfig,\n identityGradConfig,\n isFiniteGradConfig,\n isInfGradConfig,\n isNanGradConfig,\n log1pGradConfig,\n logGradConfig,\n logSoftmaxGradConfig,\n lrnGradConfig,\n maxGradConfig,\n maxGradConfig,\n maximumGradConfig,\n maxPool3DGradConfig,\n maxPoolGradConfig,\n minGradConfig,\n minimumGradConfig,\n mirrorPadGradConfig,\n modGradConfig,\n multiplyGradConfig,\n negateGradConfig,\n oneHotGradConfig,\n onesLikeGradConfig,\n padV2GradConfig,\n padV2GradConfig,\n powGradConfig,\n preluGradConfig,\n reciprocalGradConfig,\n relu6GradConfig,\n reluGradConfig,\n reshapeGradConfig,\n resizeBilinearGradConfig,\n resizeNearestNeighborGradConfig,\n reverseGradConfig,\n roundGradConfig,\n rsqrtGradConfig,\n selectV2PoolGradConfig,\n seluGradConfig,\n sigmoidGradConfig,\n signGradConfig,\n sinGradConfig,\n sinhGradConfig,\n sliceGradConfig,\n softmaxGradConfig,\n softplusGradConfig,\n spaceToBatchNDGradConfig,\n spaceToBatchNDGradConfig,\n splitVGradConfig,\n splitVGradConfig,\n sqrtGradConfig,\n squaredDifferenceGradConfig,\n squareGradConfig,\n stepGradConfig,\n subGradConfig,\n sumGradConfig,\n tanGradConfig,\n tanhGradConfig,\n tileGradConfig,\n transposeGradConfig,\n unpackGradConfig,\n unsortedSegmentSumGradConfig,\n zerosLikeGradConfig\n];\nfor (const gradientConfig of gradConfigs) {\n registerGradient(gradientConfig);\n}\n//# sourceMappingURL=register_all_gradients.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { abs } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.abs = function () {\n this.throwIfDisposed();\n return abs(this);\n};\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acos = function () {\n this.throwIfDisposed();\n return acos(this);\n};\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acosh = function () {\n this.throwIfDisposed();\n return acosh(this);\n};\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { addStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.addStrict = function (x) {\n this.throwIfDisposed();\n return addStrict(this, x);\n};\n//# sourceMappingURL=add_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { add } from '../../ops/add';\nimport { Tensor } from '../../tensor';\nTensor.prototype.add = function (b) {\n this.throwIfDisposed();\n return add(this, b);\n};\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { all } from '../../ops/all';\nimport { Tensor } from '../../tensor';\nTensor.prototype.all = function (axis, keepDims) {\n this.throwIfDisposed();\n return all(this, axis, keepDims);\n};\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { any } from '../../ops/any';\nimport { Tensor } from '../../tensor';\nTensor.prototype.any = function (axis, keepDims) {\n this.throwIfDisposed();\n return any(this, axis, keepDims);\n};\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMax } from '../../ops/arg_max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMax = function (axis) {\n this.throwIfDisposed();\n return argMax(this, axis);\n};\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMin } from '../../ops/arg_min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMin = function (axis) {\n this.throwIfDisposed();\n return argMin(this, axis);\n};\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nimport { assert } from '../../util';\n/** Converts a size-1 `tf.Tensor` to a `tf.Scalar`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asScalar = function () {\n this.throwIfDisposed();\n assert(this.size === 1, () => 'The array must have only 1 element.');\n return reshape(this, []);\n};\n//# sourceMappingURL=as_scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * Casts a `tf.Tensor` to a specified dtype.\n *\n * @param dtype Data-type to cast the tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asType = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=as_type.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Converts a `tf.Tensor` to a `tf.Tensor1D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as1D = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=as1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor2D`.\n *\n * @param rows Number of rows in `tf.Tensor2D`.\n * @param columns Number of columns in `tf.Tensor2D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as2D = function (rows, columns) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns]);\n};\n//# sourceMappingURL=as2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor3D`.\n *\n * @param rows Number of rows in `tf.Tensor3D`.\n * @param columns Number of columns in `tf.Tensor3D`.\n * @param depth Depth of `tf.Tensor3D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as3D = function (rows, columns, depth) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth]);\n};\n//# sourceMappingURL=as3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor4D`.\n *\n * @param rows Number of rows in `tf.Tensor4D`.\n * @param columns Number of columns in `tf.Tensor4D`.\n * @param depth Depth of `tf.Tensor4D`.\n * @param depth2 4th dimension of `tf.Tensor4D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as4D = function (rows, columns, depth, depth2) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2]);\n};\n//# sourceMappingURL=as4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor5D`.\n *\n * @param rows Number of rows in `tf.Tensor5D`.\n * @param columns Number of columns in `tf.Tensor5D`.\n * @param depth Depth of `tf.Tensor5D`.\n * @param depth2 4th dimension of `tf.Tensor5D`.\n * @param depth3 5th dimension of 'tf.Tensor5D'\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as5D = function (rows, columns, depth, depth2, depth3) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2, depth3]);\n};\n//# sourceMappingURL=as5d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asin = function () {\n this.throwIfDisposed();\n return asin(this);\n};\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asinh = function () {\n this.throwIfDisposed();\n return asinh(this);\n};\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan = function () {\n this.throwIfDisposed();\n return atan(this);\n};\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { atan2 } from '../../ops/atan2';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan2 = function (b) {\n this.throwIfDisposed();\n return atan2(this, b);\n};\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atanh = function () {\n this.throwIfDisposed();\n return atanh(this);\n};\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { avgPool } from '../../ops/avg_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.avgPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return avgPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchToSpaceND } from '../../ops/batch_to_space_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchToSpaceND = function (blockShape, crops) {\n this.throwIfDisposed();\n return batchToSpaceND(this, blockShape, crops);\n};\n//# sourceMappingURL=batch_to_space_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchNorm } from '../../ops/batchnorm';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchNorm = function (mean, variance, offset, scale, varianceEpsilon) {\n this.throwIfDisposed();\n return batchNorm(this, mean, variance, offset, scale, varianceEpsilon);\n};\n//# sourceMappingURL=batchnorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { broadcastTo } from '../../ops/broadcast_to';\nimport { Tensor } from '../../tensor';\nTensor.prototype.broadcastTo = function (shape) {\n this.throwIfDisposed();\n return broadcastTo(this, shape);\n};\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cast = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ceil } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ceil = function () {\n this.throwIfDisposed();\n return ceil(this);\n};\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { clipByValue } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.clipByValue = function (min, max) {\n this.throwIfDisposed();\n return clipByValue(this, min, max);\n};\n//# sourceMappingURL=clip_by_value.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../../ops/concat';\nimport { Tensor } from '../../tensor';\nTensor.prototype.concat = function (x, axis) {\n this.throwIfDisposed();\n if (x instanceof Tensor) {\n x = [x];\n }\n return concat([this, ...x], axis);\n};\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv1d } from '../../ops/conv1d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv1d = function (filter, stride, pad, dataFormat, dilation, dimRoundingMode) {\n this.throwIfDisposed();\n return conv1d(this, filter, stride, pad, dataFormat, dilation, dimRoundingMode);\n};\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2dTranspose } from '../../ops/conv2d_transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2dTranspose = function (filter, outputShape, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2dTranspose(this, filter, outputShape, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from '../../ops/conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cos = function () {\n this.throwIfDisposed();\n return cos(this);\n};\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cosh = function () {\n this.throwIfDisposed();\n return cosh(this);\n};\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { cumsum } from '../../ops/cumsum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cumsum = function (axis, exclusive, reverse) {\n this.throwIfDisposed();\n return cumsum(this, axis, exclusive, reverse);\n};\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthToSpace } from '../../ops/depth_to_space';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthToSpace = function (blockSize, dataFormat) {\n this.throwIfDisposed();\n return depthToSpace(this, blockSize, dataFormat);\n};\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../../globals';\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated Use `depthwiseConv2d` instead.\n */\nTensor.prototype.depthwiseConv2D = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n deprecationWarn('depthwiseConv2D is deprecated, use depthwiseConv2d instead');\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2D_deprecated.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthwiseConv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dilation2d } from '../../ops/dilation2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dilation2d = function (filter, strides, pad, dilations, dataFormat) {\n this.throwIfDisposed();\n return dilation2d(this, filter, strides, pad, dilations, dataFormat);\n};\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { divNoNan } from '../../ops/div_no_nan';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divNoNan = function (b) {\n this.throwIfDisposed();\n return divNoNan(this, b);\n};\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { divStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divStrict = function (x) {\n this.throwIfDisposed();\n return divStrict(this, x);\n};\n//# sourceMappingURL=div_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { div } from '../../ops/div';\nimport { Tensor } from '../../tensor';\nTensor.prototype.div = function (b) {\n this.throwIfDisposed();\n return div(this, b);\n};\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dot } from '../../ops/dot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dot = function (b) {\n this.throwIfDisposed();\n return dot(this, b);\n};\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../../ops/elu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.elu = function () {\n this.throwIfDisposed();\n return elu(this);\n};\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { equalStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.equalStrict = function (x) {\n this.throwIfDisposed();\n return equalStrict(this, x);\n};\n//# sourceMappingURL=equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { equal } from '../../ops/equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.equal = function (b) {\n this.throwIfDisposed();\n return equal(this, b);\n};\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { erf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.erf = function () {\n this.throwIfDisposed();\n return erf(this);\n};\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { exp } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.exp = function () {\n this.throwIfDisposed();\n return exp(this);\n};\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { expandDims } from '../../ops/expand_dims';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expandDims = function (axis) {\n this.throwIfDisposed();\n return expandDims(this, axis);\n};\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { expm1 } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expm1 = function () {\n this.throwIfDisposed();\n return expm1(this);\n};\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { fft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.fft = function () {\n this.throwIfDisposed();\n return fft(this);\n};\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Flatten a Tensor to a 1D array.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.flatten = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=flatten.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { floor } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floor = function () {\n this.throwIfDisposed();\n return floor(this);\n};\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { floorDiv } from '../../ops/floorDiv';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floorDiv = function (b) {\n this.throwIfDisposed();\n return floorDiv(this, b);\n};\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { gather } from '../../ops/gather';\nimport { Tensor } from '../../tensor';\nTensor.prototype.gather = function (indices, axis) {\n this.throwIfDisposed();\n return gather(this, indices, axis);\n};\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterEqualStrict = function (x) {\n this.throwIfDisposed();\n return greaterEqualStrict(this, x);\n};\n//# sourceMappingURL=greater_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greaterEqual } from '../../ops/greater_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greaterEqual = function (b) {\n this.throwIfDisposed();\n return greaterEqual(this, b);\n};\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterStrict = function (x) {\n this.throwIfDisposed();\n return greaterStrict(this, x);\n};\n//# sourceMappingURL=greater_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greater } from '../../ops/greater';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greater = function (b) {\n this.throwIfDisposed();\n return greater(this, b);\n};\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ifft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ifft = function () {\n this.throwIfDisposed();\n return ifft(this);\n};\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { irfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.irfft = function () {\n this.throwIfDisposed();\n return irfft(this);\n};\n//# sourceMappingURL=irfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isFinite } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isFinite = function () {\n this.throwIfDisposed();\n return isFinite(this);\n};\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isInf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isInf = function () {\n this.throwIfDisposed();\n return isInf(this);\n};\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isNaN } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isNaN = function () {\n this.throwIfDisposed();\n return isNaN(this);\n};\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { leakyRelu } from '../../ops/leaky_relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.leakyRelu = function (alpha) {\n this.throwIfDisposed();\n return leakyRelu(this, alpha);\n};\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.lessEqualStrict = function (x) {\n this.throwIfDisposed();\n return lessEqualStrict(this, x);\n};\n//# sourceMappingURL=less_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { lessEqual } from '../../ops/less_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessEqual = function (b) {\n this.throwIfDisposed();\n return lessEqual(this, b);\n};\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessStrict = function (x) {\n this.throwIfDisposed();\n return lessStrict(this, x);\n};\n//# sourceMappingURL=less_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { less } from '../../ops/less';\nimport { Tensor } from '../../tensor';\nTensor.prototype.less = function (b) {\n this.throwIfDisposed();\n return less(this, b);\n};\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { localResponseNormalization } from '../../ops/local_response_normalization';\nimport { Tensor } from '../../tensor';\nTensor.prototype.localResponseNormalization = function (depthRadius, bias, alpha, beta) {\n this.throwIfDisposed();\n return localResponseNormalization(this, depthRadius, bias, alpha, beta);\n};\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSigmoid = function () {\n this.throwIfDisposed();\n return logSigmoid(this);\n};\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSoftmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSoftmax = function (axis) {\n this.throwIfDisposed();\n return logSoftmax(this, axis);\n};\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logSumExp } from '../../ops/log_sum_exp';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSumExp = function (axis, keepDims) {\n this.throwIfDisposed();\n return logSumExp(this, axis, keepDims);\n};\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log = function () {\n this.throwIfDisposed();\n return log(this);\n};\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log1p } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log1p = function () {\n this.throwIfDisposed();\n return log1p(this);\n};\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalAnd } from '../../ops/logical_and';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalAnd = function (b) {\n this.throwIfDisposed();\n return logicalAnd(this, b);\n};\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalNot } from '../../ops/logical_not';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalNot = function () {\n this.throwIfDisposed();\n return logicalNot(this);\n};\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalOr } from '../../ops/logical_or';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalOr = function (b) {\n this.throwIfDisposed();\n return logicalOr(this, b);\n};\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalXor } from '../../ops/logical_xor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalXor = function (b) {\n this.throwIfDisposed();\n return logicalXor(this, b);\n};\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { matMul } from '../../ops/mat_mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.matMul = function (b, transposeA, transposeB) {\n this.throwIfDisposed();\n return matMul(this, b, transposeA, transposeB);\n};\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maxPool } from '../../ops/max_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maxPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return maxPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { max } from '../../ops/max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.max = function (axis, keepDims) {\n this.throwIfDisposed();\n return max(this, axis, keepDims);\n};\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { maximumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.maximumStrict = function (x) {\n this.throwIfDisposed();\n return maximumStrict(this, x);\n};\n//# sourceMappingURL=maximum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maximum } from '../../ops/maximum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maximum = function (b) {\n this.throwIfDisposed();\n return maximum(this, b);\n};\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mean } from '../../ops/mean';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mean = function (axis, keepDims) {\n this.throwIfDisposed();\n return mean(this, axis, keepDims);\n};\n//# sourceMappingURL=mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { min } from '../../ops/min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.min = function (axis, keepDims) {\n this.throwIfDisposed();\n return min(this, axis, keepDims);\n};\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { minimumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.minimumStrict = function (x) {\n this.throwIfDisposed();\n return minimumStrict(this, x);\n};\n//# sourceMappingURL=minimum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { minimum } from '../../ops/minimum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.minimum = function (b) {\n this.throwIfDisposed();\n return minimum(this, b);\n};\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mirrorPad } from '../../ops/mirror_pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mirrorPad = function (paddings, mode) {\n this.throwIfDisposed();\n return mirrorPad(this, paddings, mode);\n};\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { modStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.modStrict = function (x) {\n this.throwIfDisposed();\n return modStrict(this, x);\n};\n//# sourceMappingURL=mod_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mod } from '../../ops/mod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mod = function (b) {\n this.throwIfDisposed();\n return mod(this, b);\n};\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { mulStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.mulStrict = function (x) {\n this.throwIfDisposed();\n return mulStrict(this, x);\n};\n//# sourceMappingURL=mul_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mul } from '../../ops/mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mul = function (b) {\n this.throwIfDisposed();\n return mul(this, b);\n};\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { neg } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.neg = function () {\n this.throwIfDisposed();\n return neg(this);\n};\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { norm } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.norm = function (ord, axis, keepDims) {\n this.throwIfDisposed();\n return norm(this, ord, axis, keepDims);\n};\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { notEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.notEqualStrict = function (x) {\n this.throwIfDisposed();\n return notEqualStrict(this, x);\n};\n//# sourceMappingURL=not_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { notEqual } from '../../ops/not_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.notEqual = function (b) {\n this.throwIfDisposed();\n return notEqual(this, b);\n};\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { oneHot } from '../../ops/one_hot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.oneHot = function (depth, onValue = 1, offValue = 0) {\n this.throwIfDisposed();\n return oneHot(this, depth, onValue, offValue);\n};\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { onesLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.onesLike = function () {\n this.throwIfDisposed();\n return onesLike(this);\n};\n//# sourceMappingURL=ones_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pad } from '../../ops/pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pad = function (paddings, constantValue) {\n this.throwIfDisposed();\n return pad(this, paddings, constantValue);\n};\n//# sourceMappingURL=pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pool } from '../../ops/pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pool = function (windowShape, poolingType, padding, dilationRate, strides) {\n this.throwIfDisposed();\n return pool(this, windowShape, poolingType, padding, dilationRate, strides);\n};\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { powStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.powStrict = function (exp) {\n this.throwIfDisposed();\n return powStrict(this, exp);\n};\n//# sourceMappingURL=pow_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pow } from '../../ops/pow';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pow = function (exp) {\n this.throwIfDisposed();\n return pow(this, exp);\n};\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prelu } from '../../ops/prelu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prelu = function (alpha) {\n this.throwIfDisposed();\n return prelu(this, alpha);\n};\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prod } from '../../ops/prod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prod = function (axis, keepDims) {\n this.throwIfDisposed();\n return prod(this, axis, keepDims);\n};\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { reciprocal } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reciprocal = function () {\n this.throwIfDisposed();\n return reciprocal(this);\n};\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu } from '../../ops/relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu = function () {\n this.throwIfDisposed();\n return relu(this);\n};\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu6 } from '../../ops/relu6';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu6 = function () {\n this.throwIfDisposed();\n return relu6(this);\n};\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Reshapes the tensor into the shape of the provided tensor.\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.reshapeAs = function (x) {\n this.throwIfDisposed();\n return reshape(this, x.shape);\n};\n//# sourceMappingURL=reshape_as.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reshape = function (shape) {\n this.throwIfDisposed();\n return reshape(this, shape);\n};\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeBilinear } from '../../ops/image/resize_bilinear';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeBilinear = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeBilinear(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeNearestNeighbor } from '../../ops/image/resize_nearest_neighbor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeNearestNeighbor = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeNearestNeighbor(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reverse } from '../../ops/reverse';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reverse = function (axis) {\n this.throwIfDisposed();\n return reverse(this, axis);\n};\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rfft = function () {\n this.throwIfDisposed();\n return rfft(this);\n};\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { round } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.round = function () {\n this.throwIfDisposed();\n return round(this);\n};\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rsqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rsqrt = function () {\n this.throwIfDisposed();\n return rsqrt(this);\n};\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { selu } from '../../ops/selu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.selu = function () {\n this.throwIfDisposed();\n return selu(this);\n};\n//# sourceMappingURL=selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { separableConv2d } from '../../ops/separable_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.separableConv2d = function (depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat) {\n this.throwIfDisposed();\n return separableConv2d(this, depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat);\n};\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sigmoid = function () {\n this.throwIfDisposed();\n return sigmoid(this);\n};\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sign } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sign = function () {\n this.throwIfDisposed();\n return sign(this);\n};\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sin = function () {\n this.throwIfDisposed();\n return sin(this);\n};\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sinh = function () {\n this.throwIfDisposed();\n return sinh(this);\n};\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { slice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.slice = function (begin, size) {\n this.throwIfDisposed();\n return slice(this, begin, size);\n};\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softmax = function (dim) {\n this.throwIfDisposed();\n return softmax(this, dim);\n};\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softplus } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softplus = function () {\n this.throwIfDisposed();\n return softplus(this);\n};\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { spaceToBatchND } from '../../ops/space_to_batch_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.spaceToBatchND = function (blockShape, paddings) {\n this.throwIfDisposed();\n return spaceToBatchND(this, blockShape, paddings);\n};\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { split } from '../../ops/split';\nimport { Tensor } from '../../tensor';\nTensor.prototype.split = function (numOrSizeSplits, axis) {\n this.throwIfDisposed();\n return split(this, numOrSizeSplits, axis);\n};\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sqrt = function () {\n this.throwIfDisposed();\n return sqrt(this);\n};\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { square } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.square = function () {\n this.throwIfDisposed();\n return square(this);\n};\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squaredDifference } from '../../ops/squared_difference';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squaredDifference = function (b) {\n this.throwIfDisposed();\n return squaredDifference(this, b);\n};\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { squaredDifferenceStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.squaredDifferenceStrict = function (x) {\n this.throwIfDisposed();\n return squaredDifferenceStrict(this, x);\n};\n//# sourceMappingURL=squared_difference_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squeeze } from '../../ops/squeeze';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squeeze = function (axis) {\n this.throwIfDisposed();\n return squeeze(this, axis);\n};\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { stack } from '../../ops/stack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stack = function (x, axis) {\n this.throwIfDisposed();\n const tensorsToBeStacked = x instanceof Tensor ? [this, x] : [this, ...x];\n return stack(tensorsToBeStacked, axis);\n};\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { step } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.step = function (alpha) {\n this.throwIfDisposed();\n return step(this, alpha);\n};\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { stridedSlice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stridedSlice = function (begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask) {\n this.throwIfDisposed();\n return stridedSlice(this, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask);\n};\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { subStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.subStrict = function (x) {\n this.throwIfDisposed();\n return subStrict(this, x);\n};\n//# sourceMappingURL=sub_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sub } from '../../ops/sub';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sub = function (b) {\n this.throwIfDisposed();\n return sub(this, b);\n};\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sum } from '../../ops/sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sum = function (axis, keepDims) {\n this.throwIfDisposed();\n return sum(this, axis, keepDims);\n};\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tan = function () {\n this.throwIfDisposed();\n return tan(this);\n};\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tanh = function () {\n this.throwIfDisposed();\n return tanh(this);\n};\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tile } from '../../ops/tile';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tile = function (reps) {\n this.throwIfDisposed();\n return tile(this, reps);\n};\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `bool`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toBool = function () {\n this.throwIfDisposed();\n return cast(this, 'bool');\n};\n//# sourceMappingURL=to_bool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `float32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toFloat = function () {\n this.throwIfDisposed();\n return cast(this, 'float32');\n};\n//# sourceMappingURL=to_float.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `int32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toInt = function () {\n this.throwIfDisposed();\n return cast(this, 'int32');\n};\n//# sourceMappingURL=to_int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { topk } from '../../ops/topk';\nimport { Tensor } from '../../tensor';\nTensor.prototype.topk = function (k, sorted) {\n this.throwIfDisposed();\n return topk(this, k, sorted);\n};\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { transpose } from '../../ops/transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.transpose = function (perm) {\n this.throwIfDisposed();\n return transpose(this, perm);\n};\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unique } from '../../ops/unique';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unique = function (axis) {\n this.throwIfDisposed();\n return unique(this, axis);\n};\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unsortedSegmentSum } from '../../ops/unsorted_segment_sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unsortedSegmentSum = function (segmentIds, numSegments) {\n this.throwIfDisposed();\n return unsortedSegmentSum(this, segmentIds, numSegments);\n};\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unstack } from '../../ops/unstack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unstack = function (axis) {\n this.throwIfDisposed();\n return unstack(this, axis);\n};\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { where } from '../../ops/where';\nimport { Tensor } from '../../tensor';\nTensor.prototype.where = function (condition, x) {\n this.throwIfDisposed();\n return where(condition, this, x);\n};\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { zerosLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.zerosLike = function () {\n this.throwIfDisposed();\n return zerosLike(this);\n};\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './abs';\nimport './acos';\nimport './acosh';\nimport './add_strict';\nimport './add';\nimport './all';\nimport './any';\nimport './arg_max';\nimport './arg_min';\nimport './as_scalar';\nimport './as_type';\nimport './as1d';\nimport './as2d';\nimport './as3d';\nimport './as4d';\nimport './as5d';\nimport './asin';\nimport './asinh';\nimport './atan';\nimport './atan2';\nimport './atanh';\nimport './avg_pool';\nimport './batch_to_space_nd';\nimport './batchnorm';\nimport './broadcast_to';\nimport './cast';\nimport './ceil';\nimport './clip_by_value';\nimport './concat';\nimport './conv1d';\nimport './conv2d_transpose';\nimport './conv2d';\nimport './cos';\nimport './cosh';\nimport './cumsum';\nimport './depth_to_space';\nimport './depthwise_conv2D_deprecated';\nimport './depthwise_conv2d';\nimport './dilation2d';\nimport './div_no_nan';\nimport './div_strict';\nimport './div';\nimport './dot';\nimport './elu';\nimport './equal_strict';\nimport './equal';\nimport './erf';\nimport './exp';\nimport './expand_dims';\nimport './expm1';\nimport './fft';\nimport './flatten';\nimport './floor';\nimport './floorDiv';\nimport './gather';\nimport './greater_equal_strict';\nimport './greater_equal';\nimport './greater_strict';\nimport './greater';\nimport './ifft';\nimport './irfft';\nimport './is_finite';\nimport './is_inf';\nimport './is_nan';\nimport './leaky_relu';\nimport './less_equal_strict';\nimport './less_equal';\nimport './less_strict';\nimport './less';\nimport './local_response_normalization';\nimport './log_sigmoid';\nimport './log_softmax';\nimport './log_sum_exp';\nimport './log';\nimport './log1p';\nimport './logical_and';\nimport './logical_not';\nimport './logical_or';\nimport './logical_xor';\nimport './mat_mul';\nimport './max_pool';\nimport './max';\nimport './maximum_strict';\nimport './maximum';\nimport './mean';\nimport './min';\nimport './minimum_strict';\nimport './minimum';\nimport './mirror_pad';\nimport './mod_strict';\nimport './mod';\nimport './mul_strict';\nimport './mul';\nimport './neg';\nimport './norm';\nimport './not_equal_strict';\nimport './not_equal';\nimport './one_hot';\nimport './ones_like';\nimport './pad';\nimport './pool';\nimport './pow_strict';\nimport './pow';\nimport './prelu';\nimport './prod';\nimport './reciprocal';\nimport './relu';\nimport './relu6';\nimport './reshape_as';\nimport './reshape';\nimport './resize_bilinear';\nimport './resize_nearest_neighbor';\nimport './reverse';\nimport './rfft';\nimport './round';\nimport './rsqrt';\nimport './selu';\nimport './separable_conv2d';\nimport './sigmoid';\nimport './sign';\nimport './sin';\nimport './sinh';\nimport './slice';\nimport './softmax';\nimport './softplus';\nimport './space_to_batch_nd';\nimport './split';\nimport './sqrt';\nimport './square';\nimport './squared_difference';\nimport './squared_difference_strict';\nimport './squeeze';\nimport './stack';\nimport './step';\nimport './strided_slice';\nimport './sub_strict';\nimport './sub';\nimport './sum';\nimport './tan';\nimport './tanh';\nimport './tile';\nimport './to_bool';\nimport './to_float';\nimport './to_int';\nimport './topk';\nimport './transpose';\nimport './unique';\nimport './unsorted_segment_sum';\nimport './unstack';\nimport './where';\nimport './zeros_like';\n//# sourceMappingURL=register_all_chained_ops.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code.\nimport './base_side_effects';\n// All exports from this package should be in base.\nexport * from './base';\n// Register all the gradients.\nimport './register_all_gradients';\n// Import all op chainers and add type info to Tensor.\nimport './public/chained_ops/register_all_chained_ops';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { backend } from '@tensorflow/tfjs-core';\nlet _epsilon;\n/**\n * Returns the value of the fuzz factor used in numeric expressions.\n */\nexport function epsilon() {\n if (_epsilon == null) {\n _epsilon = backend().epsilon();\n }\n return _epsilon;\n}\n/**\n * Sets the value of the fuzz factor used in numeric expressions.\n * @param e New value of epsilon.\n */\nexport function setEpsilon(e) {\n _epsilon = e;\n}\n/**\n * Returns the default image data format convention.\n */\nexport function imageDataFormat() {\n return 'channelsLast';\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Explicit error types.\n *\n * See the following link for more information about why the code includes\n * calls to setPrototypeOf:\n *\n * https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work\n */\n// tslint:enable\n/**\n * Equivalent of Python's AttributeError.\n */\nexport class AttributeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AttributeError.prototype);\n }\n}\n/**\n * Equivalent of Python's RuntimeError.\n */\nexport class RuntimeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, RuntimeError.prototype);\n }\n}\n/**\n * Equivalent of Python's ValueError.\n */\nexport class ValueError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, ValueError.prototype);\n }\n}\n/**\n * Equivalent of Python's NotImplementedError.\n */\nexport class NotImplementedError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, NotImplementedError.prototype);\n }\n}\n/**\n * Equivalent of Python's AssertionError.\n */\nexport class AssertionError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AssertionError.prototype);\n }\n}\n/**\n * Equivalent of Python's IndexError.\n */\nexport class IndexError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, IndexError.prototype);\n }\n}\n//# sourceMappingURL=errors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: utils/generic_utils.py */\nimport { util } from '@tensorflow/tfjs-core';\nimport { AssertionError, ValueError } from '../errors';\n// tslint:enable\n/**\n * If `value` is an Array, equivalent to Python's `value * numValues`.\n * If `value` is not an Array, equivalent to Python's `[value] * numValues`\n */\n// tslint:disable-next-line:no-any\nexport function pyListRepeat(value, numValues) {\n if (Array.isArray(value)) {\n // tslint:disable-next-line:no-any\n let newArray = [];\n for (let i = 0; i < numValues; i++) {\n newArray = newArray.concat(value);\n }\n return newArray;\n }\n else {\n const newArray = new Array(numValues);\n newArray.fill(value);\n return newArray;\n }\n}\nexport function assert(val, message) {\n if (!val) {\n throw new AssertionError(message);\n }\n}\n/**\n * Count the number of elements of the `array` that are equal to `reference`.\n */\nexport function count(array, refernce) {\n let counter = 0;\n for (const item of array) {\n if (item === refernce) {\n counter++;\n }\n }\n return counter;\n}\n/**\n * If an array is of length 1, just return the first element. Otherwise, return\n * the full array.\n * @param tensors\n */\nexport function singletonOrArray(xs) {\n if (xs.length === 1) {\n return xs[0];\n }\n return xs;\n}\n/**\n * Normalizes a list/tensor into a list.\n *\n * If a tensor is passed, we return\n * a list of size 1 containing the tensor.\n *\n * @param x target object to be normalized.\n */\n// tslint:disable-next-line:no-any\nexport function toList(x) {\n if (Array.isArray(x)) {\n return x;\n }\n return [x];\n}\n/**\n * Generate a UID for a list\n */\n// tslint:disable-next-line:no-any\nexport function objectListUid(objs) {\n const objectList = toList(objs);\n let retVal = '';\n for (const obj of objectList) {\n if (obj.id == null) {\n throw new ValueError(`Object ${obj} passed to objectListUid without an id`);\n }\n if (retVal !== '') {\n retVal = retVal + ', ';\n }\n retVal = `${retVal}${Math.abs(obj.id)}`;\n }\n return retVal;\n}\n/**\n * Converts string to snake-case.\n * @param name\n */\nexport function toSnakeCase(name) {\n const intermediate = name.replace(/(.)([A-Z][a-z0-9]+)/g, '$1_$2');\n const insecure = intermediate.replace(/([a-z])([A-Z])/g, '$1_$2').toLowerCase();\n /*\n If the class is private the name starts with \"_\" which is not secure\n for creating scopes. We prefix the name with \"private\" in this case.\n */\n if (insecure[0] !== '_') {\n return insecure;\n }\n return 'private' + insecure;\n}\nexport function toCamelCase(identifier) {\n // quick return for empty string or single character strings\n if (identifier.length <= 1) {\n return identifier;\n }\n // Check for the underscore indicating snake_case\n if (identifier.indexOf('_') === -1) {\n return identifier;\n }\n return identifier.replace(/[_]+(\\w|$)/g, (m, p1) => p1.toUpperCase());\n}\n// tslint:disable-next-line:no-any\nlet _GLOBAL_CUSTOM_OBJECTS = {};\nexport function serializeKerasObject(instance) {\n if (instance === null || instance === undefined) {\n return null;\n }\n const dict = {};\n dict['className'] = instance.getClassName();\n dict['config'] = instance.getConfig();\n return dict;\n}\n/**\n * Replace ndarray-style scalar objects in serialization objects with numbers.\n *\n * Background: In some versions of tf.keras, certain scalar values in the HDF5\n * model save file can be serialized as: `{'type': 'ndarray', 'value': num}`,\n * where in `num` is a plain number. This method converts such serialization\n * to a `number`.\n *\n * @param config The keras-format serialization object to be processed\n * (in place).\n */\nfunction convertNDArrayScalarsInConfig(config) {\n if (config == null || typeof config !== 'object') {\n return;\n }\n else if (Array.isArray(config)) {\n config.forEach(configItem => convertNDArrayScalarsInConfig(configItem));\n }\n else {\n const fields = Object.keys(config);\n for (const field of fields) {\n const value = config[field];\n if (value != null && typeof value === 'object') {\n if (!Array.isArray(value) && value['type'] === 'ndarray' &&\n typeof value['value'] === 'number') {\n config[field] = value['value'];\n }\n else {\n convertNDArrayScalarsInConfig(value);\n }\n }\n }\n }\n}\n/**\n * Deserialize a saved Keras Object\n * @param identifier either a string ID or a saved Keras dictionary\n * @param moduleObjects a list of Python class names to object constructors\n * @param customObjects a list of Python class names to object constructors\n * @param printableModuleName debug text for the object being reconstituted\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns a TensorFlow.js Layers object\n */\n// tslint:disable:no-any\nexport function deserializeKerasObject(identifier, moduleObjects = {}, customObjects = {}, printableModuleName = 'object', fastWeightInit = false) {\n // tslint:enable\n if (typeof identifier === 'string') {\n const functionName = identifier;\n let fn;\n if (functionName in customObjects) {\n fn = customObjects[functionName];\n }\n else if (functionName in _GLOBAL_CUSTOM_OBJECTS) {\n fn = _GLOBAL_CUSTOM_OBJECTS[functionName];\n }\n else {\n fn = moduleObjects[functionName];\n if (fn == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${identifier}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n }\n return fn;\n }\n else {\n // In this case we are dealing with a Keras config dictionary.\n const config = identifier;\n if (config['className'] == null || config['config'] == null) {\n throw new ValueError(`${printableModuleName}: Improper config format: ` +\n `${JSON.stringify(config)}.\\n` +\n `'className' and 'config' must set.`);\n }\n const className = config['className'];\n let cls, fromConfig;\n if (className in customObjects) {\n [cls, fromConfig] = customObjects[className];\n }\n else if (className in _GLOBAL_CUSTOM_OBJECTS) {\n [cls, fromConfig] = _GLOBAL_CUSTOM_OBJECTS['className'];\n }\n else if (className in moduleObjects) {\n [cls, fromConfig] = moduleObjects[className];\n }\n if (cls == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${className}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n if (fromConfig != null) {\n // Porting notes: Instead of checking to see whether fromConfig accepts\n // customObjects, we create a customObjects dictionary and tack it on to\n // config['config'] as config['config'].customObjects. Objects can use it,\n // if they want.\n // tslint:disable-next-line:no-any\n const customObjectsCombined = {};\n for (const key of Object.keys(_GLOBAL_CUSTOM_OBJECTS)) {\n customObjectsCombined[key] = _GLOBAL_CUSTOM_OBJECTS[key];\n }\n for (const key of Object.keys(customObjects)) {\n customObjectsCombined[key] = customObjects[key];\n }\n // Add the customObjects to config\n const nestedConfig = config['config'];\n nestedConfig['customObjects'] = customObjectsCombined;\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n convertNDArrayScalarsInConfig(config['config']);\n const returnObj = fromConfig(cls, config['config'], customObjects, fastWeightInit);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n else {\n // Then `cls` may be a function returning a class.\n // In this case by convention `config` holds\n // the kwargs of the function.\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n // In python this is **config['config'], for tfjs-layers we require\n // classes that use this fall-through construction method to take\n // a config interface that mimics the expansion of named parameters.\n const returnObj = new cls(config['config']);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n }\n}\n/**\n * Compares two numbers for sorting.\n * @param a\n * @param b\n */\nexport function numberCompare(a, b) {\n return (a < b) ? -1 : ((a > b) ? 1 : 0);\n}\n/**\n * Comparison of two numbers for reverse sorting.\n * @param a\n * @param b\n */\nexport function reverseNumberCompare(a, b) {\n return -1 * numberCompare(a, b);\n}\n/**\n * Convert a string into the corresponding DType.\n * @param dtype\n * @returns An instance of DType.\n */\nexport function stringToDType(dtype) {\n switch (dtype) {\n case 'float32':\n return 'float32';\n default:\n throw new ValueError(`Invalid dtype: ${dtype}`);\n }\n}\n/**\n * Test the element-by-element equality of two Arrays of strings.\n * @param xs First array of strings.\n * @param ys Second array of strings.\n * @returns Wether the two arrays are all equal, element by element.\n */\nexport function stringsEqual(xs, ys) {\n if (xs == null || ys == null) {\n return xs === ys;\n }\n if (xs.length !== ys.length) {\n return false;\n }\n for (let i = 0; i < xs.length; ++i) {\n if (xs[i] !== ys[i]) {\n return false;\n }\n }\n return true;\n}\n/**\n * Get the unique elements of an array.\n * @param xs Array.\n * @returns An Array consisting of the unique elements in `xs`.\n */\nexport function unique(xs) {\n if (xs == null) {\n return xs;\n }\n const out = [];\n // TODO(cais): Maybe improve performance by sorting.\n for (const x of xs) {\n if (out.indexOf(x) === -1) {\n out.push(x);\n }\n }\n return out;\n}\n/**\n * Determine if an Object is empty (i.e., does not have own properties).\n * @param obj Object\n * @returns Whether the Object is empty.\n * @throws ValueError: If object is `null` or `undefined`.\n */\nexport function isObjectEmpty(obj) {\n if (obj == null) {\n throw new ValueError(`Invalid value in obj: ${JSON.stringify(obj)}`);\n }\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n return false;\n }\n }\n return true;\n}\n/**\n * Helper function used to build type union/enum run-time checkers.\n * @param values The list of allowed values.\n * @param label A string name for the type\n * @param value The value to test.\n * @throws ValueError: If the value is not in values nor `undefined`/`null`.\n */\nexport function checkStringTypeUnionValue(values, label, value) {\n if (value == null) {\n return;\n }\n if (values.indexOf(value) < 0) {\n throw new ValueError(`${value} is not a valid ${label}. Valid values are ${values} or null/undefined.`);\n }\n}\n/**\n * Helper function for verifying the types of inputs.\n *\n * Ensures that the elements of `x` are all of type `expectedType`.\n * Also verifies that the length of `x` is within bounds.\n *\n * @param x Object to test.\n * @param expectedType The string expected type of all of the elements in the\n * Array.\n * @param minLength Return false if x.length is less than this.\n * @param maxLength Return false if x.length is greater than this.\n * @returns true if and only if `x` is an `Array` with\n * length >= `minLength` and <= `maxLength`.\n */\n// tslint:disable:no-any\nexport function checkArrayTypeAndLength(x, expectedType, minLength = 0, maxLength = Infinity) {\n assert(minLength >= 0);\n assert(maxLength >= minLength);\n return (Array.isArray(x) && x.length >= minLength && x.length <= maxLength &&\n x.every(e => typeof e === expectedType));\n}\n// tslint:enable:no-any\n/**\n * Assert that a value or an array of value are positive integer.\n *\n * @param value The value being asserted on. May be a single number or an array\n * of numbers.\n * @param name Name of the value, used to make the error message.\n */\nexport function assertPositiveInteger(value, name) {\n if (Array.isArray(value)) {\n util.assert(value.length > 0, () => `${name} is unexpectedly an empty array.`);\n value.forEach((v, i) => assertPositiveInteger(v, `element ${i + 1} of ${name}`));\n }\n else {\n util.assert(Number.isInteger(value) && value > 0, () => `Expected ${name} to be a positive integer, but got ` +\n `${formatAsFriendlyString(value)}.`);\n }\n}\n/**\n * Format a value into a display-friendly, human-readable fashion.\n *\n * - `null` is formatted as `'null'`\n * - Strings are formated with flanking pair of quotes.\n * - Arrays are formatted with flanking pair of square brackets.\n *\n * @param value The value to display.\n * @return Formatted string.\n */\n// tslint:disable-next-line:no-any\nexport function formatAsFriendlyString(value) {\n if (value === null) {\n return 'null';\n }\n else if (Array.isArray(value)) {\n return '[' + value.map(v => formatAsFriendlyString(v)).join(',') + ']';\n }\n else if (typeof value === 'string') {\n return `\"${value}\"`;\n }\n else {\n return `${value}`;\n }\n}\n/**\n * Returns a function `f2` (decorator) which wraps the original function\n * `f`. `f2` guarantees that `f` can be called at most once\n * every `waitMs` ms. If `f2` is called more often, it will return\n * the last returned result of `f`.\n *\n * @param f The original function `f` to wrap.\n * @param waitMs The time between two consecutive calls to `f` in ms.\n */\nexport function debounce(f, waitMs) {\n let lastTime = util.now();\n let lastResult;\n const f2 = (...args) => {\n const now = util.now();\n if (now - lastTime < waitMs) {\n return lastResult;\n }\n lastTime = now;\n lastResult = f(...args);\n return lastResult;\n };\n return f2;\n}\n/**\n * Returns the fusable activation given a layers identifier.\n *\n * @param activationName The layers identifier string.\n * @return The name of the fusable activation.\n */\nexport function mapActivationToFusedKernel(activationName) {\n if (activationName === 'relu') {\n return 'relu';\n }\n if (activationName === 'linear') {\n return 'linear';\n }\n if (activationName === 'elu') {\n return 'elu';\n }\n return null;\n}\n/**\n * Returns the cartesian product of sets of values.\n * This works the same as itertools.product in Python.\n *\n * Example:\n *\n * filters = [128, 256, 512]\n * paddings = ['same', 'valid']\n *\n * product = [ [128, 'same'], [128, 'valid'], [256, 'same'], [256, 'valid'],\n * [512, 'same'], [512, 'valid']]\n *\n * @param arrayOfValues List/array of values.\n * @return The cartesian product.\n */\nexport function getCartesianProductOfValues(...arrayOfValues) {\n assert(arrayOfValues.length > 0, 'arrayOfValues is empty');\n for (const values of arrayOfValues) {\n assert(Array.isArray(values), 'one of the values is not an array');\n assert(values.length > 0, 'one of the values is empty');\n }\n return arrayOfValues.reduce((products, values) => {\n if (products.length === 0) {\n return values.map(value => [value]);\n }\n return values\n .map(value => {\n return products.map((prevValue) => [...prevValue, value]);\n })\n .reduce((flattenedProduct, unflattenedProduct) => {\n return flattenedProduct.concat(unflattenedProduct);\n }, []);\n }, []);\n}\n//# sourceMappingURL=generic_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/contraints.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\n/**\n * Helper function used by many of the Constraints to find the L2Norms.\n */\nfunction calcL2Norms(w, axis) {\n return tidy(() => tfc.sqrt(tfc.sum(tfc.mul(w, w), axis, true)));\n}\n/**\n * Base class for functions that impose constraints on weight values\n *\n * @doc {\n * heading: 'Constraints',\n * subheading: 'Classes',\n * namespace: 'constraints'\n * }\n */\nexport class Constraint extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\nexport class MaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMaxValue = 2;\n this.defaultAxis = 0;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.clipByValue(norms, 0, this.maxValue);\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return { maxValue: this.maxValue, axis: this.axis };\n }\n}\n/** @nocollapse */\nMaxNorm.className = 'MaxNorm';\nserialization.registerClass(MaxNorm);\nexport class UnitNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultAxis = 0;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => tfc.div(w, tfc.add(epsilon(), calcL2Norms(w, this.axis))));\n }\n getConfig() {\n return { axis: this.axis };\n }\n}\n/** @nocollapse */\nUnitNorm.className = 'UnitNorm';\nserialization.registerClass(UnitNorm);\nexport class NonNeg extends Constraint {\n apply(w) {\n return tfc.relu(w);\n }\n}\n/** @nocollapse */\nNonNeg.className = 'NonNeg';\nserialization.registerClass(NonNeg);\nexport class MinMaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMinValue = 0.0;\n this.defaultMaxValue = 1.0;\n this.defaultRate = 1.0;\n this.defaultAxis = 0;\n this.minValue =\n args.minValue != null ? args.minValue : this.defaultMinValue;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.rate = args.rate != null ? args.rate : this.defaultRate;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.add(tfc.mul(this.rate, tfc.clipByValue(norms, this.minValue, this.maxValue)), tfc.mul(1.0 - this.rate, norms));\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return {\n minValue: this.minValue,\n maxValue: this.maxValue,\n rate: this.rate,\n axis: this.axis\n };\n }\n}\n/** @nocollapse */\nMinMaxNorm.className = 'MinMaxNorm';\nserialization.registerClass(MinMaxNorm);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'maxNorm': 'MaxNorm',\n 'minMaxNorm': 'MinMaxNorm',\n 'nonNeg': 'NonNeg',\n 'unitNorm': 'UnitNorm'\n};\nexport function serializeConstraint(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeConstraint(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'constraint');\n}\nexport function getConstraint(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeConstraint(config);\n }\n else if (identifier instanceof Constraint) {\n return identifier;\n }\n else {\n return deserializeConstraint(identifier);\n }\n}\n//# sourceMappingURL=constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { MaxNorm, MinMaxNorm, NonNeg, UnitNorm } from './constraints';\n/**\n * MaxNorm weight constraint.\n *\n * Constrains the weights incident to each hidden unit\n * to have a norm less than or equal to a desired value.\n *\n * References\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting\n * Srivastava, Hinton, et al.\n * 2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Constraints',namespace: 'constraints'}\n */\nexport function maxNorm(args) {\n return new MaxNorm(args);\n}\n/**\n * Constrains the weights incident to each hidden unit to have unit norm.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function unitNorm(args) {\n return new UnitNorm(args);\n}\n/**\n * Constains the weight to be non-negative.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function nonNeg() {\n return new NonNeg();\n}\n/** @doc {heading: 'Constraints', namespace: 'constraints'} */\nexport function minMaxNorm(config) {\n return new MinMaxNorm(config);\n}\n//# sourceMappingURL=exports_constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_DATA_FORMAT_VALUES = ['channelsFirst', 'channelsLast'];\nexport const VALID_PADDING_MODE_VALUES = ['valid', 'same', 'causal'];\nexport const VALID_POOL_MODE_VALUES = ['max', 'avg'];\nexport const VALID_BIDIRECTIONAL_MERGE_MODES = ['sum', 'mul', 'concat', 'ave'];\nexport const VALID_SAMPLE_WEIGHT_MODES = ['temporal'];\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Common functions for TensorFlow.js Layers.\n */\nimport { VALID_DATA_FORMAT_VALUES, VALID_PADDING_MODE_VALUES, VALID_POOL_MODE_VALUES } from './keras_format/common';\nimport { checkStringTypeUnionValue } from './utils/generic_utils';\n// A map from the requested scoped name of a Tensor to the number of Tensors\n// wanting that name so far. This allows enforcing name uniqueness by appending\n// an incrementing index, e.g. scope/name, scope/name_1, scope/name_2, etc.\nconst nameMap = new Map();\nexport function checkDataFormat(value) {\n checkStringTypeUnionValue(VALID_DATA_FORMAT_VALUES, 'DataFormat', value);\n}\nexport function checkPaddingMode(value) {\n checkStringTypeUnionValue(VALID_PADDING_MODE_VALUES, 'PaddingMode', value);\n}\nexport function checkPoolMode(value) {\n checkStringTypeUnionValue(VALID_POOL_MODE_VALUES, 'PoolMode', value);\n}\nconst _nameScopeStack = [];\nconst _nameScopeDivider = '/';\n/**\n * Enter namescope, which can be nested.\n */\nexport function nameScope(name, fn) {\n _nameScopeStack.push(name);\n try {\n const val = fn();\n _nameScopeStack.pop();\n return val;\n }\n catch (e) {\n _nameScopeStack.pop();\n throw e;\n }\n}\n/**\n * Get the current namescope as a flat, concatenated string.\n */\nfunction currentNameScopePrefix() {\n if (_nameScopeStack.length === 0) {\n return '';\n }\n else {\n return _nameScopeStack.join(_nameScopeDivider) + _nameScopeDivider;\n }\n}\n/**\n * Get the name a Tensor (or Variable) would have if not uniqueified.\n * @param tensorName\n * @return Scoped name string.\n */\nexport function getScopedTensorName(tensorName) {\n if (!isValidTensorName(tensorName)) {\n throw new Error('Not a valid tensor name: \\'' + tensorName + '\\'');\n }\n return currentNameScopePrefix() + tensorName;\n}\n/**\n * Get unique names for Tensors and Variables.\n * @param scopedName The fully-qualified name of the Tensor, i.e. as produced by\n * `getScopedTensorName()`.\n * @return A unique version of the given fully scoped name.\n * If this is the first time that the scoped name is seen in this session,\n * then the given `scopedName` is returned unaltered. If the same name is\n * seen again (producing a collision), an incrementing suffix is added to the\n * end of the name, so it takes the form 'scope/name_1', 'scope/name_2', etc.\n */\nexport function getUniqueTensorName(scopedName) {\n if (!isValidTensorName(scopedName)) {\n throw new Error('Not a valid tensor name: \\'' + scopedName + '\\'');\n }\n if (!nameMap.has(scopedName)) {\n nameMap.set(scopedName, 0);\n }\n const index = nameMap.get(scopedName);\n nameMap.set(scopedName, nameMap.get(scopedName) + 1);\n if (index > 0) {\n const result = `${scopedName}_${index}`;\n // Mark the composed name as used in case someone wants\n // to call getUniqueTensorName(\"name_1\").\n nameMap.set(result, 1);\n return result;\n }\n else {\n return scopedName;\n }\n}\nconst tensorNameRegex = new RegExp(/^[A-Za-z0-9][-A-Za-z0-9\\._\\/]*$/);\n/**\n * Determine whether a string is a valid tensor name.\n * @param name\n * @returns A Boolean indicating whether `name` is a valid tensor name.\n */\nexport function isValidTensorName(name) {\n return !!name.match(tensorNameRegex);\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Math utility functions.\n *\n * This file contains some frequently used math function that operates on\n * number[] or Float32Array and return a number. Many of these functions are\n * not-so-thick wrappers around TF.js Core functions. But they offer the\n * convenience of\n * 1) not having to convert the inputs into Tensors,\n * 2) not having to convert the returned Tensors to numbers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar, tensor1d } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\n/**\n * Determine if a number is an integer.\n */\nexport function isInteger(x) {\n return x === parseInt(x.toString(), 10);\n}\n/**\n * Calculate the product of an array of numbers.\n * @param array The array to calculate the product over.\n * @param begin Beginning index, inclusive.\n * @param end Ending index, exclusive.\n * @return The product.\n */\nexport function arrayProd(array, begin, end) {\n if (begin == null) {\n begin = 0;\n }\n if (end == null) {\n end = array.length;\n }\n let prod = 1;\n for (let i = begin; i < end; ++i) {\n prod *= array[i];\n }\n return prod;\n}\n/**\n * A helper function transforms the two input types to an instance of Tensor1D,\n * so the return value can be fed directly into various TF.js Core functions.\n * @param array\n */\nfunction toArray1D(array) {\n array = Array.isArray(array) ? new Float32Array(array) : array;\n return tensor1d(array);\n}\n/**\n * Compute minimum value.\n * @param array\n * @return minimum value.\n */\nexport function min(array) {\n return tfc.min(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute maximum value.\n * @param array\n * @return maximum value\n */\nexport function max(array) {\n return tfc.max(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute sum of array.\n * @param array\n * @return The sum.\n */\nexport function sum(array) {\n return tfc.sum(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute mean of array.\n * @param array\n * @return The mean.\n */\nexport function mean(array) {\n return sum(array) / array.length;\n}\n/**\n * Compute variance of array.\n * @param array\n * @return The variance.\n */\nexport function variance(array) {\n const demeaned = tfc.sub(toArray1D(array), scalar(mean(array)));\n const sumSquare = tfc.sum(tfc.mul(demeaned, demeaned)).dataSync()[0];\n return sumSquare / array.length;\n}\n/**\n * Compute median of array.\n * @param array\n * @return The median value.\n */\nexport function median(array) {\n const arraySorted = array.slice().sort((a, b) => a - b);\n const lowIdx = Math.floor((arraySorted.length - 1) / 2);\n const highIdx = Math.ceil((arraySorted.length - 1) / 2);\n if (lowIdx === highIdx) {\n return arraySorted[lowIdx];\n }\n return (arraySorted[lowIdx] + arraySorted[highIdx]) / 2;\n}\n/**\n * Generate an array of integers in [begin, end).\n * @param begin Beginning integer, inclusive.\n * @param end Ending integer, exclusive.\n * @returns Range array.\n * @throws ValueError, iff `end` < `begin`.\n */\nexport function range(begin, end) {\n if (end < begin) {\n throw new ValueError(`end (${end}) < begin (${begin}) is forbidden.`);\n }\n const out = [];\n for (let i = begin; i < end; ++i) {\n out.push(i);\n }\n return out;\n}\n//# sourceMappingURL=math_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * deeplearn.js backend.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { onesLike as coreOnesLike, scalar, tensor1d, tidy, where, zerosLike as coreZerosLike } from '@tensorflow/tfjs-core';\nimport { checkDataFormat } from '../common';\nimport { NotImplementedError, ValueError } from '../errors';\nimport * as math_utils from '../utils/math_utils';\nimport { imageDataFormat } from './common';\n// tslint:enable\n/* Setting and getting backend from deeplearn.js. */\n// Default deeplearn.js backend is WebGL (GPU).\nlet backend = 'webgl';\nexport function setBackend(requestedBackend) {\n tfc.setBackend(requestedBackend);\n backend = requestedBackend;\n}\nexport function getBackend() {\n return backend;\n}\n/**\n * Indicates whether the backend is operating symbolically.\n *\n * This function will be used to determine how to interpret user code. If\n * it returns true, calls to the backend construct a symbolic graph; if\n * it returns false, calls to the backend execute immediately.\n */\nexport function isBackendSymbolic() {\n return false;\n}\n/**\n * Get the number of elements in a Tensor.\n * @param x The Tensor.\n * @return Number of elements in `x`.\n */\nexport function countParams(x) {\n const shape = x.shape;\n if (shape.length > 0) {\n return shape.reduce((a, b) => a * b);\n }\n else {\n // Scalar.\n return 1;\n }\n}\n/**\n * Casts a tensor to a different dtype and returns it.\n * @param x Input tensor.\n * @param dtype String: 'float32'|'int32'|'bool'.\n * @returns Tensor of the specified `dtype`.\n */\nexport function cast(x, dtype) {\n return x.asType(dtype);\n}\n/**\n * Adds a 1-sized dimension at index \"axis\".\n * @param x Input tensor.\n * @param axis Position where to add the new axis.\n * @returns Result of the dimension expansion.\n */\nexport function expandDims(x, axis = -1) {\n const outShape = x.shape.slice();\n if (axis < 0) {\n axis = outShape.length + axis + 1;\n }\n outShape.splice(axis, 0, 1);\n return x.reshape(outShape);\n}\n/**\n * Repeats a 2D tensor.\n *\n * If `x` has shape `[samples, dim]` and `n` is 2, for example, the output\n * will have shape `[samples, 2, dim]`.\n *\n * @param x Input tensor.\n * @param n Integer, number of times to repeat.\n * @returns The result of the repeat operation.\n * @throws ValueError: If input tensor is not 2D.\n */\nexport function repeat(x, n) {\n return tidy(() => {\n if (x.shape.length !== 2) {\n throw new ValueError(`repeat() expects a rank-2 tensor, but received a ` +\n `rank-${x.shape.length} tensor.`);\n }\n const y = expandDims(x, 1);\n return tile(y, [1, n, 1]);\n });\n}\n/**\n * Flatten a Tensor into 1D.\n * @param x Input tensor.\n * @return The result of the flattening `x`.\n */\nexport function flatten(x) {\n const newShape = [math_utils.arrayProd(x.shape)];\n return x.reshape(newShape);\n}\n/**\n * Turn a nD tensor into a 2D tensor with same 0th dimension.\n * In other words, it flattens each data samples of a batch.\n *\n * @param x The tensor to flatten. The rank of this tensor is required to be 2\n * or higher.\n * @return The result of the flattening.\n */\nexport function batchFlatten(x) {\n if (x.rank <= 1) {\n throw new ValueError(`batchFlatten requires a minimum rank of 2. Got rank: ${x.rank}.`);\n }\n const newShape = [x.shape[0], math_utils.arrayProd(x.shape, 1)];\n return x.reshape(newShape);\n}\n/**\n * Do slicing along the first axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the first axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongFirstAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [start, 0], [size, array.shape[1]]);\n case 3:\n return tfc.slice3d(array, [start, 0, 0], [size, array.shape[1], array.shape[2]]);\n case 4:\n return tfc.slice4d(array, [start, 0, 0, 0], [size, array.shape[1], array.shape[2], array.shape[3]]);\n case 5:\n return tfc.slice(array, [start, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4]\n ]);\n case 6:\n return tfc.slice(array, [start, 0, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4],\n array.shape[5]\n ]);\n default:\n throw new ValueError(`sliceAlongFirstAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the last axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the last axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongLastAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [0, start], [array.shape[0], size]);\n case 3:\n return tfc.slice3d(array, [0, 0, start], [array.shape[0], array.shape[1], size]);\n case 4:\n return tfc.slice4d(array, [0, 0, 0, start], [array.shape[0], array.shape[1], array.shape[2], size]);\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the sepcified axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size of the slice along the chosen axis.\n * @param choose an axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongAxis(array, start, size, axis) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 3:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice3d(array, [0, start, 0], [array.shape[0], size, array.shape[2]]);\n case 3:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 4:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice4d(array, [0, start, 0, 0], [array.shape[0], size, array.shape[2], array.shape[3]]);\n case 3:\n return tfc.slice4d(array, [0, 0, start, 0], [array.shape[0], array.shape[1], size, array.shape[3]]);\n case 4:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Concatenates a list of tensors alongside the specified axis.\n * @param tensors `Array` of tensors to concatenate.\n * @param axis Concatenation axis.\n * @returns The result of the concatenation.\n */\nexport function concatenate(tensors, axis = -1) {\n let rank;\n if (axis < 0) {\n rank = tensors[0].rank;\n if (rank !== 0) {\n axis = rank;\n }\n else {\n axis = 0;\n }\n }\n if (axis === tensors[0].rank) {\n // Porting Note: This is necessary because tfc.concat() requires axis to be\n // in the interval [-rank, rank).\n axis = -1;\n }\n // Porting Note: Sparse concat is not supported yet.\n return tfc.concat(tensors, axis);\n}\n/**\n * Concatenate two arrays along the first dimension.\n * @param a The 1st `tf.Tensor` to concatenate.\n * @param b The 2nd `tf.Tensor` to concatenate.\n * @returns Result of the concatenation.\n * @throws ValueError: If `a` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function concatAlongFirstAxis(a, b) {\n switch (a.rank) {\n case 1:\n return tfc.concat1d([a, b]);\n case 2:\n return tfc.concat2d([a, b], 0);\n case 3:\n return tfc.concat3d([a, b], 0);\n case 4:\n return tfc.concat4d([a, b], 0);\n default:\n throw new ValueError(`concatAlongFirstAxis() received an unsupported ` +\n `tensor rank: ${a.rank}`);\n }\n}\n/**\n * Creates a tensor by tiling `x` by `n`.\n * @param x A tensor.\n * @param n An Array of integers or a single integer. If an Array, the length\n * must be the same as the number of dimensions in `x`. If a single integer,\n * it will be treated as an Array of length 1.\n */\nexport function tile(x, n) {\n if (!Array.isArray(n)) {\n n = [n];\n }\n if (x.rank !== n.length) {\n throw new ValueError(`The length of input n (${n.length}) does not match ` +\n `the number of dimensions in input x (${x.rank})`);\n }\n return tfc.tile(x, n);\n}\n/* Creation of random tensors. */\n/**\n * Get a tensor with normal distribution of values.\n *\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @return The normal tensor.\n */\nexport function randomNormal(shape, mean = 0.0, stddev = 1.0, dtype, seed) {\n return tfc.randomNormal(shape, mean, stddev, dtype, seed);\n}\n/* Linear Algebra */\n/**\n * Multiply two tensors and returns the result as a tensor.\n *\n * For 2D tensors, this is equivalent to matrix multiplication (matMul).\n * For tensors of higher ranks, it follows the Theano behavior,\n * (e.g. `(2, 3) * (4, 3, 5) -> (2, 4, 5)`). From the Theano documentation:\n *\n * For N dimensions it is a sum product over the last axis of x and the\n * second-to-last of y:\n *\n * @param a A tensor of at least rank 2.\n * @param b A tensor of at least rank 2.\n * @param activation (optional) A string identifying the activation\n * function.\n * @return Result of the dot operation.\n */\nexport function dot(a, b, activation, bias) {\n if ((a.rank < 2) || (b.rank < 2)) {\n throw new NotImplementedError(`dot requires both inputs to be rank >= 2` +\n ` but got x shape = ${a.shape} and y shape = ${b.shape}`);\n }\n if (b.rank >= 3) {\n const xLastDim = a.shape.slice(-1)[0];\n const ySecondLastDim = b.shape.slice(-2)[0];\n if (xLastDim !== ySecondLastDim) {\n throw new NotImplementedError(`If rank y >= 3, then the second last dim` +\n ` of y must equal the last dim of x but got x shape = ${a.shape} and ` +\n ` y shape = ${b.shape}`);\n }\n }\n // Handle basic 2D x 2D case.\n if ((a.rank === 2) && (b.rank === 2)) {\n const transposeA = false;\n const transposeB = false;\n // tfc.fused.matMul only fuses certain activation functions. Unsupported\n // activation functions are treated as 'linear' activations, which is\n // equivalent to a no-op.\n return tfc.fused.matMul({\n a,\n b: b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n });\n }\n else {\n // Reshape x into the analogous 2D Tensor.\n const aFirstDims = a.shape.slice(); // Holds all but the last dim of x.\n const aLastDim = aFirstDims.pop();\n a = a.reshape([-1, aLastDim]);\n // Reshape y into the analogous 2D Tensor, and keep track of the\n // required dimensions to reproduce the output shape.\n const bShape = b.shape.slice();\n const bLastDim = bShape.pop();\n const ySecondLastDim = bShape.pop();\n const yOtherDims = [...bShape, bLastDim];\n // permutation should be like [r-2, 0, 1, 2, ... r-4, r-3, r-1]\n // where r is the rank of y.\n const perm = Array.from({ length: b.rank }, (_, i) => {\n if (i === 0) {\n return b.rank - 2;\n }\n else if (i <= b.rank - 2) {\n return i - 1;\n }\n return i;\n });\n b = b.transpose(perm).reshape([ySecondLastDim, -1]);\n // Multiply x and y as 2D Tensors, and then reshape back to original.\n const outputShape = [...aFirstDims, ...yOtherDims];\n const transposeA = false;\n const transposeB = false;\n return tfc.fused\n .matMul({\n a,\n b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n })\n .reshape(outputShape);\n }\n}\n/**\n * Compute the sign Tensor of an input Tensor.\n *\n * Elements of the input `tf.Tensor` that are === 0 are mapped to 0.\n * Elements of the input `tf.Tensor` that are > 0 are mapped to 1.\n * Elements of the input `tf.Tensor` that are < 0 are mapped to -1.\n *\n * @param x Input `tf.Tensor`.\n * @return The sign `tf.Tensor`.\n */\nexport function sign(x) {\n // TODO(cais): Move to the core.\n return tidy(() => {\n const zerosLikeX = coreZerosLike(x);\n const onesLikeX = coreOnesLike(x);\n return where(tfc.equal(x, zerosLikeX), zerosLikeX, where(tfc.greater(x, coreZerosLike(x)), onesLikeX, tfc.mul(-1, onesLikeX)));\n });\n}\n/**\n * Computes the one-hot representation of an integer tensor.\n * @param indices nD integer tensor of shape\n * `(batch_size, dim1, dim2, ... dim(n-1))`\n * @param numClasses Integer, number of classes to consider.\n * @returns (n + 1)D one hot representation of the input\n * with shape `(batch_size, dim1, dim2, ... dim(n-1), num_classes)`\n */\nexport function oneHot(indices, numClasses) {\n return tidy(() => {\n if (indices.rank !== 1) {\n throw new Error('Only 1D one-hot tensors are supported in the ' +\n 'deeplearn backend, at present.');\n }\n indices = indices.toInt();\n return tfc.oneHot(indices, numClasses).toFloat();\n });\n}\n/* Elementary math functions. */\n/**\n * Retrieves the elements of indices `indices` in the tensor `reference`.\n * @param reference A tensor.\n * @param indices An integer tensor of indices or an `Array` of integers.\n * @param axis Axis along which to perform the gather operation.\n * @returns The result of the gathering as a tensor.\n */\nexport function gather(reference, indices, axis) {\n return tidy(() => {\n if (Array.isArray(indices)) {\n indices = tensor1d(indices, 'int32');\n }\n else {\n indices = indices.toInt();\n }\n return tfc.gather(reference, indices, axis);\n });\n}\n/**\n * Element-wise square.\n * @param x Input tensor.\n * @return element-wise x^2\n */\nexport function square(x) {\n return tfc.mul(x, x);\n}\n/**\n * Element-wise exponentiation.\n *\n * Porting Note: In PyKeras, `a` (the exponent) is a Python integer, which\n * takes advatnage of the backend's (e.g., TensorFlow's) automatic\n * conversion to tensor. Here we allow `a` to be either a number or a tensor.\n *\n * @param x The base tensor.\n * @param a The exponent, tensor or number. If a number, it is rounded to the\n * nearest integer and converted to a tensor.\n * @returns A tensor of the same shape as `x`.\n */\nexport function pow(x, a) {\n return tidy(() => {\n if (typeof (a) === 'number') {\n a = scalar(Math.round(a), 'int32');\n }\n if (a.dtype !== 'int32') {\n throw new NotImplementedError(`Non-int32 dtype (${a.dtype}) is not supported by pow() yet`);\n }\n return tfc.pow(x, a);\n });\n}\n/**\n * Reshapes bias tensor according to rank of x.\n */\nfunction reshapeBias(xRank, bias, dataFormat) {\n const biasShape = bias.shape;\n if (bias.rank !== 1 && bias.rank !== xRank) {\n throw new ValueError(`Unexpected bias dimensions: ${bias.rank}` +\n `; expected it to be 1 or ${xRank}`);\n }\n if (xRank === 5) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[3], biasShape[0], biasShape[1], biasShape[2]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 4) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[2], biasShape[0], biasShape[1]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 3) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1]);\n }\n else {\n return bias.reshape([1, biasShape[1], biasShape[0]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank < 3) {\n return bias;\n }\n throw new ValueError(`Unsupported input rank by biasAdd: ${bias.rank}`);\n}\n/* Neural-network operations. */\n/**\n * Add a bias to a tensor.\n *\n * @param x The tensor to add the bias to.\n * @param bias The bias to add to `x`. Must be 1D or the same rank as `x`.\n * @return Result of the bias adding.\n * @throws ValueError: If the rank of `bias` is incorrect.\n */\nexport function biasAdd(x, bias, dataFormat) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n return x.add(reshapeBias(x.rank, bias, dataFormat));\n });\n}\n/**\n * Exponential linear unit (ELU).\n * @param x A tensor or variable to compute the activation function for.\n * @param alpha: A scalar, a scaling factor for the negative section.\n * @return Output of the ELU operation.\n */\nexport function elu(x, alpha = 1) {\n // TODO(cais): Add support for alpha values other than 1.\n if (alpha !== 1) {\n throw new NotImplementedError(`Support for alpha values other than 1 (${alpha}) is not implemented ` +\n `yet.`);\n }\n return tfc.elu(x);\n}\n/**\n * Softsign of a tensor.\n *\n * Defined as x / (abs(x) + 1), element-wise.\n *\n * @param x: Input.\n * @returns Output.\n */\nexport function softsign(x) {\n return tidy(() => tfc.div(x, tfc.abs(x).add(1)));\n}\n/**\n * Sets entries in `x` to zero at random, while scaling the entire tensor.\n *\n * @param x input tensor.\n * @param level fraction of the entries in the tensor that will be set to 0.\n * @param noiseShape shape of randomly generated keep/drop flags, must be\n * broadcastable to the shape of `x`. Optional.\n * @param seed random seed to ensure determinism. Optional.\n * @returns Result of the dropout operation.\n */\nexport function dropout(x, level, noiseShape, seed) {\n return tidy(() => tfc.dropout(x, level, noiseShape, seed));\n}\n/**\n * Element-wise, segment-wise linear approximation of sigmoid.\n *\n * Returns `0.` if `x < -2.5`, `1.` if `x > 2.5`.\n * In `-2.5 <= x <= 2.5`, returns `0.2 * x + 0.5`.\n *\n * @param x Input tensor.\n * @returns Output tensor.\n */\nexport function hardSigmoid(x) {\n return tidy(() => {\n const y = tfc.add(.5, tfc.mul(.2, x));\n return tfc.clipByValue(y, 0, 1);\n });\n}\n/**\n * Invoke `x` in the training phase, and `alt` otherwise.\n *\n * Porting Note: We do not create placeholder tensors for the `training`\n * boolean flag here, because there is no such thing in the TF.js imperative\n * backend.\n *\n * @param x The function to invoke iff `training` is `true`.\n * @param alt The function to invoke iff `training` is `false`.\n * @param training Boolean flag for whether training phase is active.\n * @returns The return value of `x()` if `training` is `true`, or the return\n * value of `alt()` if `training` is `false`.\n */\nexport function inTrainPhase(x, alt, training = false) {\n return training ? x() : alt();\n}\n//# sourceMappingURL=tfjs_backend.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_FAN_MODE_VALUES = ['fanIn', 'fanOut', 'fanAvg'];\nexport const VALID_DISTRIBUTION_VALUES = ['normal', 'uniform', 'truncatedNormal'];\n// We can't easily extract a string[] from the string union type, but we can\n// recapitulate the list, enforcing at compile time that the values are valid\n// and that we have the right number of them.\n/**\n * A string array of valid Initializer class names.\n *\n * This is guaranteed to match the `InitializerClassName` union type.\n */\nexport const initializerClassNames = [\n 'Zeros', 'Ones', 'Constant', 'RandomNormal', 'RandomUniform',\n 'TruncatedNormal', 'VarianceScaling', 'Orthogonal', 'Identity'\n];\n//# sourceMappingURL=initializer_config.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { eye, linalg, mul, ones, randomUniform, scalar, serialization, tidy, truncatedNormal, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { checkDataFormat } from './common';\nimport { NotImplementedError, ValueError } from './errors';\nimport { VALID_DISTRIBUTION_VALUES, VALID_FAN_MODE_VALUES } from './keras_format/initializer_config';\nimport { checkStringTypeUnionValue, deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nimport { arrayProd } from './utils/math_utils';\nexport function checkFanMode(value) {\n checkStringTypeUnionValue(VALID_FAN_MODE_VALUES, 'FanMode', value);\n}\nexport function checkDistribution(value) {\n checkStringTypeUnionValue(VALID_DISTRIBUTION_VALUES, 'Distribution', value);\n}\n/**\n * Initializer base class.\n *\n * @doc {\n * heading: 'Initializers', subheading: 'Classes', namespace: 'initializers'}\n */\nexport class Initializer extends serialization.Serializable {\n fromConfigUsesCustomObjects() {\n return false;\n }\n getConfig() {\n return {};\n }\n}\nexport class Zeros extends Initializer {\n apply(shape, dtype) {\n return zeros(shape, dtype);\n }\n}\n/** @nocollapse */\nZeros.className = 'Zeros';\nserialization.registerClass(Zeros);\nexport class Ones extends Initializer {\n apply(shape, dtype) {\n return ones(shape, dtype);\n }\n}\n/** @nocollapse */\nOnes.className = 'Ones';\nserialization.registerClass(Ones);\nexport class Constant extends Initializer {\n constructor(args) {\n super();\n if (typeof args !== 'object') {\n throw new ValueError(`Expected argument of type ConstantConfig but got ${args}`);\n }\n if (args.value === undefined) {\n throw new ValueError(`config must have value set but got ${args}`);\n }\n this.value = args.value;\n }\n apply(shape, dtype) {\n return tidy(() => mul(scalar(this.value), ones(shape, dtype)));\n }\n getConfig() {\n return {\n value: this.value,\n };\n }\n}\n/** @nocollapse */\nConstant.className = 'Constant';\nserialization.registerClass(Constant);\nexport class RandomUniform extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MINVAL = -0.05;\n this.DEFAULT_MAXVAL = 0.05;\n this.minval = args.minval || this.DEFAULT_MINVAL;\n this.maxval = args.maxval || this.DEFAULT_MAXVAL;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n return randomUniform(shape, this.minval, this.maxval, dtype);\n }\n getConfig() {\n return { minval: this.minval, maxval: this.maxval, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomUniform.className = 'RandomUniform';\nserialization.registerClass(RandomUniform);\nexport class RandomNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return K.randomNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomNormal.className = 'RandomNormal';\nserialization.registerClass(RandomNormal);\nexport class TruncatedNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`truncatedNormal does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nTruncatedNormal.className = 'TruncatedNormal';\nserialization.registerClass(TruncatedNormal);\nexport class Identity extends Initializer {\n constructor(args) {\n super();\n this.gain = args.gain != null ? args.gain : 1.0;\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length !== 2 || shape[0] !== shape[1]) {\n throw new ValueError('Identity matrix initializer can only be used for' +\n ' 2D square matrices.');\n }\n else {\n return mul(this.gain, eye(shape[0]));\n }\n });\n }\n getConfig() {\n return { gain: this.gain };\n }\n}\n/** @nocollapse */\nIdentity.className = 'Identity';\nserialization.registerClass(Identity);\n/**\n * Computes the number of input and output units for a weight shape.\n * @param shape Shape of weight.\n * @param dataFormat data format to use for convolution kernels.\n * Note that all kernels in Keras are standardized on the\n * CHANNEL_LAST ordering (even when inputs are set to CHANNEL_FIRST).\n * @return An length-2 array: fanIn, fanOut.\n */\nfunction computeFans(shape, dataFormat = 'channelsLast') {\n let fanIn;\n let fanOut;\n checkDataFormat(dataFormat);\n if (shape.length === 2) {\n fanIn = shape[0];\n fanOut = shape[1];\n }\n else if ([3, 4, 5].indexOf(shape.length) !== -1) {\n if (dataFormat === 'channelsFirst') {\n const receptiveFieldSize = arrayProd(shape, 2);\n fanIn = shape[1] * receptiveFieldSize;\n fanOut = shape[0] * receptiveFieldSize;\n }\n else if (dataFormat === 'channelsLast') {\n const receptiveFieldSize = arrayProd(shape, 0, shape.length - 2);\n fanIn = shape[shape.length - 2] * receptiveFieldSize;\n fanOut = shape[shape.length - 1] * receptiveFieldSize;\n }\n }\n else {\n const shapeProd = arrayProd(shape);\n fanIn = Math.sqrt(shapeProd);\n fanOut = Math.sqrt(shapeProd);\n }\n return [fanIn, fanOut];\n}\nexport class VarianceScaling extends Initializer {\n /**\n * Constructor of VarianceScaling.\n * @throws ValueError for invalid value in scale.\n */\n constructor(args) {\n super();\n if (args.scale < 0.0) {\n throw new ValueError(`scale must be a positive float. Got: ${args.scale}`);\n }\n this.scale = args.scale == null ? 1.0 : args.scale;\n this.mode = args.mode == null ? 'fanIn' : args.mode;\n checkFanMode(this.mode);\n this.distribution =\n args.distribution == null ? 'normal' : args.distribution;\n checkDistribution(this.distribution);\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n const fans = computeFans(shape);\n const fanIn = fans[0];\n const fanOut = fans[1];\n let scale = this.scale;\n if (this.mode === 'fanIn') {\n scale /= Math.max(1, fanIn);\n }\n else if (this.mode === 'fanOut') {\n scale /= Math.max(1, fanOut);\n }\n else {\n scale /= Math.max(1, (fanIn + fanOut) / 2);\n }\n if (this.distribution === 'normal') {\n const stddev = Math.sqrt(scale);\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`${this.getClassName()} does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, 0, stddev, dtype, this.seed);\n }\n else {\n const limit = Math.sqrt(3 * scale);\n return randomUniform(shape, -limit, limit, dtype);\n }\n }\n getConfig() {\n return {\n scale: this.scale,\n mode: this.mode,\n distribution: this.distribution,\n seed: this.seed\n };\n }\n}\n/** @nocollapse */\nVarianceScaling.className = 'VarianceScaling';\nserialization.registerClass(VarianceScaling);\nexport class GlorotUniform extends VarianceScaling {\n /**\n * Constructor of GlorotUniform\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotUniform.className = 'GlorotUniform';\nserialization.registerClass(GlorotUniform);\nexport class GlorotNormal extends VarianceScaling {\n /**\n * Constructor of GlorotNormal.\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotNormal.className = 'GlorotNormal';\nserialization.registerClass(GlorotNormal);\nexport class HeNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeNormal.className = 'HeNormal';\nserialization.registerClass(HeNormal);\nexport class HeUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeUniform.className = 'HeUniform';\nserialization.registerClass(HeUniform);\nexport class LeCunNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunNormal.className = 'LeCunNormal';\nserialization.registerClass(LeCunNormal);\nexport class LeCunUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunUniform.className = 'LeCunNormal';\nserialization.registerClass(LeCunUniform);\nexport class Orthogonal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_GAIN = 1;\n this.gain = args.gain == null ? this.DEFAULT_GAIN : args.gain;\n this.seed = args.seed;\n if (this.seed != null) {\n throw new NotImplementedError('Random seed is not implemented for Orthogonal Initializer yet.');\n }\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length < 2) {\n throw new NotImplementedError('Shape must be at least 2D.');\n }\n if (shape[0] * shape[1] > 2000) {\n console.warn(`Orthogonal initializer is being called on a matrix with more ` +\n `than 2000 (${shape[0] * shape[1]}) elements: ` +\n `Slowness may result.`);\n }\n // TODO(cais): Add seed support.\n const normalizedShape = shape[0] > shape[1] ? [shape[1], shape[0]] : shape;\n const a = K.randomNormal(normalizedShape, 0, 1, 'float32');\n let q = linalg.gramSchmidt(a);\n if (shape[0] > shape[1]) {\n q = q.transpose();\n }\n return mul(this.gain, q);\n });\n }\n getConfig() {\n return {\n gain: this.gain,\n seed: this.seed,\n };\n }\n}\n/** @nocollapse */\nOrthogonal.className = 'Orthogonal';\nserialization.registerClass(Orthogonal);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'constant': 'Constant',\n 'glorotNormal': 'GlorotNormal',\n 'glorotUniform': 'GlorotUniform',\n 'heNormal': 'HeNormal',\n 'heUniform': 'HeUniform',\n 'identity': 'Identity',\n 'leCunNormal': 'LeCunNormal',\n 'leCunUniform': 'LeCunUniform',\n 'ones': 'Ones',\n 'orthogonal': 'Orthogonal',\n 'randomNormal': 'RandomNormal',\n 'randomUniform': 'RandomUniform',\n 'truncatedNormal': 'TruncatedNormal',\n 'varianceScaling': 'VarianceScaling',\n 'zeros': 'Zeros'\n};\nfunction deserializeInitializer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'initializer');\n}\nexport function serializeInitializer(initializer) {\n return serializeKerasObject(initializer);\n}\nexport function getInitializer(identifier) {\n if (typeof identifier === 'string') {\n const className = identifier in INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n /* We have four 'helper' classes for common initializers that\n all get serialized as 'VarianceScaling' and shouldn't go through\n the deserializeInitializer pathway. */\n if (className === 'GlorotNormal') {\n return new GlorotNormal();\n }\n else if (className === 'GlorotUniform') {\n return new GlorotUniform();\n }\n else if (className === 'HeNormal') {\n return new HeNormal();\n }\n else if (className === 'HeUniform') {\n return new HeUniform();\n }\n else if (className === 'LeCunNormal') {\n return new LeCunNormal();\n }\n else if (className === 'LeCunUniform') {\n return new LeCunUniform();\n }\n else {\n const config = {};\n config['className'] = className;\n config['config'] = {};\n return deserializeInitializer(config);\n }\n }\n else if (identifier instanceof Initializer) {\n return identifier;\n }\n else {\n return deserializeInitializer(identifier);\n }\n}\n//# sourceMappingURL=initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { Constant, GlorotNormal, GlorotUniform, HeNormal, HeUniform, Identity, LeCunNormal, LeCunUniform, Ones, Orthogonal, RandomNormal, RandomUniform, TruncatedNormal, VarianceScaling, Zeros } from './initializers';\n/**\n * Initializer that generates tensors initialized to 0.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function zeros() {\n return new Zeros();\n}\n/**\n * Initializer that generates tensors initialized to 1.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function ones() {\n return new Ones();\n}\n/**\n * Initializer that generates values initialized to some constant.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function constant(args) {\n return new Constant(args);\n}\n/**\n * Initializer that generates random values initialized to a uniform\n * distribution.\n *\n * Values will be distributed uniformly between the configured minval and\n * maxval.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomUniform(args) {\n return new RandomUniform(args);\n}\n/**\n * Initializer that generates random values initialized to a normal\n * distribution.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomNormal(args) {\n return new RandomNormal(args);\n}\n/**\n * Initializer that generates random values initialized to a truncated normal.\n * distribution.\n *\n * These values are similar to values from a `RandomNormal` except that values\n * more than two standard deviations from the mean are discarded and re-drawn.\n * This is the recommended initializer for neural network weights and filters.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function truncatedNormal(args) {\n return new TruncatedNormal(args);\n}\n/**\n * Initializer that generates the identity matrix.\n * Only use for square 2D matrices.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function identity(args) {\n return new Identity(args);\n}\n/**\n * Initializer capable of adapting its scale to the shape of weights.\n * With distribution=NORMAL, samples are drawn from a truncated normal\n * distribution centered on zero, with `stddev = sqrt(scale / n)` where n is:\n * - number of input units in the weight tensor, if mode = FAN_IN.\n * - number of output units, if mode = FAN_OUT.\n * - average of the numbers of input and output units, if mode = FAN_AVG.\n * With distribution=UNIFORM,\n * samples are drawn from a uniform distribution\n * within [-limit, limit], with `limit = sqrt(3 * scale / n)`.\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function varianceScaling(config) {\n return new VarianceScaling(config);\n}\n/**\n * Glorot uniform initializer, also called Xavier uniform initializer.\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotUniform(args) {\n return new GlorotUniform(args);\n}\n/**\n * Glorot normal initializer, also called Xavier normal initializer.\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor.\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotNormal(args) {\n return new GlorotNormal(args);\n}\n/**\n * He normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function heNormal(args) {\n return new HeNormal(args);\n}\n/**\n * He uniform initializer.\n *\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / fan_in)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function heUniform(args) {\n return new HeUniform(args);\n}\n/**\n * LeCun normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(1 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * References:\n * [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n * [Efficient Backprop](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunNormal(args) {\n return new LeCunNormal(args);\n}\n/**\n * LeCun uniform initializer.\n *\n * It draws samples from a uniform distribution in the interval\n * `[-limit, limit]` with `limit = sqrt(3 / fanIn)`,\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunUniform(args) {\n return new LeCunUniform(args);\n}\n/**\n * Initializer that generates a random orthogonal matrix.\n *\n * Reference:\n * [Saxe et al., http://arxiv.org/abs/1312.6120](http://arxiv.org/abs/1312.6120)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function orthogonal(args) {\n return new Orthogonal(args);\n}\n//# sourceMappingURL=exports_initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Utilities related to persistent state in the backend.\n */\n/**\n * An ID to track `tf.SymbolicTensor`s and derived classes.\n * Required in different places in engine/topology.ts to identify unique\n * tensors.\n */\nlet _nextUniqueTensorId = 0;\nexport function getNextUniqueTensorId() {\n return _nextUniqueTensorId++;\n}\nconst _uidPrefixes = {};\n/**\n * Provides a unique UID given a string prefix.\n *\n * @param prefix\n */\nexport function getUid(prefix = '') {\n if (!(prefix in _uidPrefixes)) {\n _uidPrefixes[prefix] = 0;\n }\n _uidPrefixes[prefix] += 1;\n return prefix + _uidPrefixes[prefix].toString();\n}\n//# sourceMappingURL=state.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\n// tslint:enable\n/**\n * Determine whether the input is an Array of Shapes.\n */\nexport function isArrayOfShapes(x) {\n return Array.isArray(x) && Array.isArray(x[0]);\n}\n/**\n * Special case of normalizing shapes to lists.\n *\n * @param x A shape or list of shapes to normalize into a list of Shapes.\n * @return A list of Shapes.\n */\nexport function normalizeShapeList(x) {\n if (x.length === 0) {\n return [];\n }\n if (!Array.isArray(x[0])) {\n return [x];\n }\n return x;\n}\n/**\n * Helper function to obtain exactly one Tensor.\n * @param xs: A single `tf.Tensor` or an `Array` of `tf.Tensor`s.\n * @return A single `tf.Tensor`. If `xs` is an `Array`, return the first one.\n * @throws ValueError: If `xs` is an `Array` and its length is not 1.\n */\nexport function getExactlyOneTensor(xs) {\n let x;\n if (Array.isArray(xs)) {\n if (xs.length !== 1) {\n throw new ValueError(`Expected Tensor length to be 1; got ${xs.length}`);\n }\n x = xs[0];\n }\n else {\n x = xs;\n }\n return x;\n}\n/**\n * Helper function to obtain exactly on instance of Shape.\n *\n * @param shapes Input single `Shape` or Array of `Shape`s.\n * @returns If input is a single `Shape`, return it unchanged. If the input is\n * an `Array` containing exactly one instance of `Shape`, return the instance.\n * Otherwise, throw a `ValueError`.\n * @throws ValueError: If input is an `Array` of `Shape`s, and its length is not\n * 1.\n */\nexport function getExactlyOneShape(shapes) {\n if (Array.isArray(shapes) && Array.isArray(shapes[0])) {\n if (shapes.length === 1) {\n shapes = shapes;\n return shapes[0];\n }\n else {\n throw new ValueError(`Expected exactly 1 Shape; got ${shapes.length}`);\n }\n }\n else {\n return shapes;\n }\n}\n//# sourceMappingURL=types_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Count the elements in an Array of LayerVariables.\n *\n * @param weights: The LayerVariables of which the constituent numbers are to\n * be counted.\n * @returns A count of the elements in all the LayerVariables\n */\nexport function countParamsInWeights(weights) {\n let count = 0;\n for (const weight of weights) {\n if (weight.shape.length === 0) {\n count += 1;\n }\n else {\n count += weight.shape.reduce((a, b) => a * b);\n }\n }\n return count;\n}\n//# sourceMappingURL=variable_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { variableGrads } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId } from './backend/state';\nimport { getScopedTensorName, getUniqueTensorName } from './common';\nimport { NotImplementedError } from './errors';\nconst DEFAULT_VARIABLE_NAME_PREFIX = 'Variable';\n/**\n * A `tf.layers.LayerVariable` is similar to a `tf.Tensor` in that it has a\n * dtype and shape, but its value is mutable. The value is itself represented\n * as a`tf.Tensor`, and can be read with the `read()` method and updated with\n * the `write()` method.\n */\nexport class LayerVariable {\n /**\n * Construct Variable from a `tf.Tensor`.\n *\n * If not explicitly named, the Variable will be given a name with the\n * prefix 'Variable'. Variable names are unique. In the case of name\n * collision, suffixies '_' will be added to the name.\n *\n * @param val Initial value of the Variable.\n * @param name Name of the variable. If `null` or `undefined` is provided, it\n * will default a name with the prefix 'Variable'.\n * @param constraint Optional, projection function to be applied to the\n * variable after optimize updates\n * @throws ValueError if `name` is `null` or `undefined`.\n */\n constructor(val, dtype = 'float32', name = DEFAULT_VARIABLE_NAME_PREFIX, trainable = true, constraint = null) {\n this.dtype = dtype == null ? 'float32' : dtype;\n this.shape = val.shape;\n this.id = getNextUniqueTensorId();\n name = name == null ? DEFAULT_VARIABLE_NAME_PREFIX : name;\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n this.trainable_ = trainable;\n this.constraint = constraint;\n this.val = tfc.variable(val, this.trainable_, this.name, this.dtype);\n }\n /**\n * Get a snapshot of the Variable's value.\n *\n * The returned value is a snapshot of the Variable's value at the time of\n * the invocation. Future mutations in the value of the tensor will only\n * be reflected by future calls to this method.\n */\n read() {\n this.assertNotDisposed();\n return this.val;\n }\n /**\n * Update the value of the Variable.\n *\n * @param newVal: The new value to update to. Must be consistent with the\n * dtype and shape of the Variable.\n * @return This Variable.\n */\n write(newVal) {\n // TODO(cais): Once TF.js Core supports Tensor.dtype, check dtype match.\n this.assertNotDisposed();\n checkShapesMatch(this.val, newVal);\n // Skip updating if this is the exact same tensor.\n if (this.val.id !== newVal.id) {\n this.val.assign(newVal);\n if (this.constraint != null) {\n this.val.assign(this.constraint.apply(this.val));\n }\n }\n return this;\n }\n /**\n * Dispose this LayersVariable instance from memory.\n */\n dispose() {\n this.assertNotDisposed();\n this.val.dispose();\n }\n assertNotDisposed() {\n if (this.val.isDisposed) {\n throw new Error(`LayersVariable ${this.name} is already disposed.`);\n }\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.trainable_ = trainable;\n this.val.trainable = trainable;\n }\n}\nfunction checkShapesMatch(x, y) {\n if (x.shape.toString() !== y.shape.toString()) {\n throw new Error('Shape mismatch: ' + JSON.stringify(x.shape) + ' vs. ' +\n JSON.stringify(y.shape));\n }\n}\n/**\n * Create a Variable.\n * @param x The initial value of the `Variable`.\n * @param dtype optional, the type of the variable.\n * @param name optional, the name of the variable, default provided by\n * Variable.\n * @param constraint optional, a constraint to be applied after every update.\n * @return The newly instantiated `Variable`.\n */\nexport function variable(x, dtype, name, constraint) {\n return new LayerVariable(x, dtype, name, true, constraint);\n}\n/**\n * Instantiates an all-zeros Variable and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-zero Variable.\n */\nexport function zerosVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n return new LayerVariable(tfc.zeros(shape), dtype, name);\n}\n/**\n * Instantiates an all-zeros tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function zerosLike(x, dtype, name) {\n return new LayerVariable(tfc.zerosLike(x), dtype, name);\n}\n/**\n * Instantiates an all-ones tensor and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-ones Variable.\n */\nexport function onesVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n const allocated = tfc.ones(shape);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiates an all-ones tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function onesLike(x, dtype, name) {\n const allocated = tfc.onesLike(x);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiate an identity matrix and returns it, as a Variable\n *\n * @param size Number of rows/columns.\n * @param dtype Data type of returned Variable.\n * @param name Name of returned Variable.\n * @return A Variable, an identity matrix.\n */\nexport function eyeVariable(size, dtype, name) {\n return new LayerVariable(tfc.eye(size), dtype, name);\n}\n/**\n * Get a Variable with uniform distribution of values.\n * @param shape Shape of the tensor.\n * @param minval Lower bound of the uniform distribution.\n * @param maxval Upper bound of the uniform distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The uniform-random Variable.\n */\nexport function randomUniformVariable(shape, minval, maxval, dtype, seed, name = 'randomUniform') {\n return new LayerVariable(tfc.randomUniform(shape, minval, maxval, dtype), dtype, name);\n}\n/**\n * Get a Variable with truncated-normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function truncatedNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'truncatedNormal') {\n // TODO(cais): Implement logic for dtype and seed once they are supported\n // by deeplearn.js.\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.truncatedNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Get a Variable with normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function randomNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'randomNormal') {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormalVariable does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.randomNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Update the value of a Variable.\n * @param x The Variable to be updated.\n * @param xNew The new value to update to.\n * @return The Variable updated.\n */\nexport function update(x, xNew) {\n return x.write(xNew);\n}\n/**\n * Update the value of a Variable by adding an increment.\n * @param x The Variable to be updated.\n * @param increment The incrment to add to `x`.\n * @return The Variable updated.\n */\nexport function updateAdd(x, increment) {\n return x.write(tfc.add(x.read(), increment));\n}\n/**\n * Update the value of a Variable by subtracting a decrement.\n * @param x The Variable to be updated.\n * @param decrement The decrement to subtract from `x`.\n * @return The Variable updated.\n */\nexport function updateSub(x, decrement) {\n return x.write(tfc.sub(x.read(), decrement));\n}\n/**\n * Get the values of an array of Variables.\n *\n * @param tensors An `Array` of `Variable`s to get the values of.\n * @return The values of the inputs, as an `Array` of`tf.Tensor`s.\n */\nexport function batchGetValue(xs) {\n return xs.map(x => x.read());\n}\n/**\n * Update the value of multiple Variables at once.\n *\n * @param variablesAndValues An `Array`, each element is of type\n * [Variable, Tensor]. The first item is the\n * `Variable` of which the value is to be updated. The second item\n * carries the new value.\n */\nexport function batchSetValue(variablesAndValues) {\n variablesAndValues.forEach(variableAndValue => {\n const variable = variableAndValue[0];\n variable.write(variableAndValue[1]);\n });\n}\n/**\n * Returns the gradients of `variables` w.r.t. the return value of `lossFn`.\n * @param lossFn A function which returns a Scalar to be used as the function\n * value (i.e., numerator) for differentiation.\n * @param variables List of variables to be used as the independent variables\n * (i.e., denominator) for differentiation.\n * @returns An Array of gradients tensors.\n */\nexport function gradients(lossFn, variables) {\n // TODO(cais): The return type signature can be simplified if deeplearn makes\n // the corresponding type public.\n const variableList = variables.map(variable => variable.read());\n const valudAndGrads = variableGrads(lossFn, variableList);\n return variables.map(variable => valudAndGrads.grads[variable.name]);\n}\n//# sourceMappingURL=variables.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId, getUid } from '../backend/state';\nimport { getScopedTensorName, getUniqueTensorName, nameScope } from '../common';\nimport { AttributeError, NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { getInitializer } from '../initializers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as types_utils from '../utils/types_utils';\nimport * as variable_utils from '../utils/variable_utils';\nimport { batchGetValue, batchSetValue, LayerVariable } from '../variables';\n/**\n * Specifies the ndim, dtype and shape of every input to a layer.\n *\n * Every layer should expose (if appropriate) an `inputSpec` attribute:\n * a list of instances of InputSpec (one per input tensor).\n *\n * A null entry in a shape is compatible with any dimension,\n * a null shape is compatible with any shape.\n */\nexport class InputSpec {\n constructor(args) {\n this.dtype = args.dtype;\n this.shape = args.shape;\n /*\n TODO(michaelterry): Could throw error if ndim and shape are both defined\n (then backport).\n */\n if (args.shape != null) {\n this.ndim = args.shape.length;\n }\n else {\n this.ndim = args.ndim;\n }\n this.maxNDim = args.maxNDim;\n this.minNDim = args.minNDim;\n this.axes = args.axes || {};\n }\n}\n/**\n * `tf.SymbolicTensor` is a placeholder for a Tensor without any concrete value.\n *\n * They are most often encountered when building a graph of `Layer`s for a\n * a `tf.LayersModel` and the input data's shape, but not values are known.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\nexport class SymbolicTensor {\n /**\n *\n * @param dtype\n * @param shape\n * @param sourceLayer The Layer that produced this symbolic tensor.\n * @param inputs The inputs passed to sourceLayer's __call__() method.\n * @param nodeIndex\n * @param tensorIndex\n * @param callArgs The keyword arguments passed to the __call__() method.\n * @param name\n * @param outputTensorIndex The index of this tensor in the list of outputs\n * returned by apply().\n */\n constructor(dtype, shape, sourceLayer, inputs, callArgs, name, outputTensorIndex) {\n this.dtype = dtype;\n this.shape = shape;\n this.sourceLayer = sourceLayer;\n this.inputs = inputs;\n this.callArgs = callArgs;\n this.outputTensorIndex = outputTensorIndex;\n this.id = getNextUniqueTensorId();\n if (name != null) {\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n }\n this.rank = shape.length;\n }\n}\nlet _nextNodeID = 0;\n/**\n * A `Node` describes the connectivity between two layers.\n *\n * Each time a layer is connected to some new input,\n * a node is added to `layer.inboundNodes`.\n *\n * Each time the output of a layer is used by another layer,\n * a node is added to `layer.outboundNodes`.\n *\n * `nodeIndices` and `tensorIndices` are basically fine-grained coordinates\n * describing the origin of the `inputTensors`, verifying the following:\n *\n * `inputTensors[i] ==\n * inboundLayers[i].inboundNodes[nodeIndices[i]].outputTensors[\n * tensorIndices[i]]`\n *\n * A node from layer A to layer B is added to:\n * A.outboundNodes\n * B.inboundNodes\n */\nexport class Node {\n constructor(args, \n // TODO(michaelterry): Define actual type for this.\n callArgs) {\n this.callArgs = callArgs;\n this.id = _nextNodeID++;\n /*\n Layer instance (NOT a list).\n this is the layer that takes a list of input tensors\n and turns them into a list of output tensors.\n the current node will be added to\n the inboundNodes of outboundLayer.\n */\n this.outboundLayer = args.outboundLayer;\n /*\n The following 3 properties describe where\n the input tensors come from: which layers,\n and for each layer, which node and which\n tensor output of each node.\n */\n // List of layer instances.\n this.inboundLayers = args.inboundLayers;\n // List of integers, 1:1 mapping with inboundLayers.\n this.nodeIndices = args.nodeIndices;\n // List of integers, 1:1 mapping with inboundLayers.\n this.tensorIndices = args.tensorIndices;\n /*\n Following 2 properties:\n tensor inputs and outputs of outboundLayer.\n */\n // List of tensors. 1:1 mapping with inboundLayers.\n this.inputTensors = args.inputTensors;\n // List of tensors, created by outboundLayer.call().\n this.outputTensors = args.outputTensors;\n /*\n Following 2 properties: input and output masks.\n List of tensors, 1:1 mapping with inputTensor.\n */\n this.inputMasks = args.inputMasks;\n // List of tensors, created by outboundLayer.computeMask().\n this.outputMasks = args.outputMasks;\n // Following 2 properties: input and output shapes.\n // List of shape tuples, shapes of inputTensors.\n this.inputShapes = args.inputShapes;\n // List of shape tuples, shapes of outputTensors.\n this.outputShapes = args.outputShapes;\n // Add nodes to all layers involved.\n for (const layer of args.inboundLayers) {\n if (layer != null) {\n layer.outboundNodes.push(this);\n }\n }\n args.outboundLayer.inboundNodes.push(this);\n }\n getConfig() {\n const inboundNames = [];\n for (const layer of this.inboundLayers) {\n if (layer != null) {\n inboundNames.push(layer.name);\n }\n else {\n inboundNames.push(null);\n }\n }\n return {\n outboundLayer: this.outboundLayer ? this.outboundLayer.name : null,\n inboundLayers: inboundNames,\n nodeIndices: this.nodeIndices,\n tensorIndices: this.tensorIndices\n };\n }\n}\nlet _nextLayerID = 0;\n/**\n * A layer is a grouping of operations and weights that can be composed to\n * create a `tf.LayersModel`.\n *\n * Layers are constructed by using the functions under the\n * [tf.layers](#Layers-Basic) namespace.\n *\n * @doc {heading: 'Layers', subheading: 'Classes', namespace: 'layers'}\n */\nexport class Layer extends serialization.Serializable {\n constructor(args = {}) {\n super();\n this._callHook = null;\n this._addedWeightNames = [];\n // Porting Notes: PyKeras does not have this property in this base Layer\n // class. Instead lets Layer subclass set it dynamically and checks the\n // value with `hasattr`. In tfjs-layers, we let this be a member of this\n // base class.\n this._stateful = false;\n this.id = _nextLayerID++;\n this.activityRegularizer = null;\n this.inputSpec = null;\n this.supportsMasking = false;\n // These properties will be set upon call of this.build()\n this._trainableWeights = [];\n this._nonTrainableWeights = [];\n this._losses = [];\n this._updates = [];\n this._built = false;\n /*\n These lists will be filled via successive calls\n to this.addInboundNode().\n */\n this.inboundNodes = [];\n this.outboundNodes = [];\n let name = args.name;\n if (!name) {\n const prefix = this.getClassName();\n name = generic_utils.toSnakeCase(prefix) + '_' + getUid(prefix);\n }\n this.name = name;\n this.trainable_ = args.trainable == null ? true : args.trainable;\n if (args.inputShape != null || args.batchInputShape != null) {\n /*\n In this case we will later create an input layer\n to insert before the current layer\n */\n let batchInputShape;\n if (args.batchInputShape != null) {\n batchInputShape = args.batchInputShape;\n }\n else if (args.inputShape != null) {\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n batchInputShape = [batchSize].concat(args.inputShape);\n }\n this.batchInputShape = batchInputShape;\n // Set dtype.\n let dtype = args.dtype;\n if (dtype == null) {\n dtype = args.inputDType;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n this.dtype = dtype;\n }\n if (args.weights != null) {\n this.initialWeights = args.weights;\n }\n else {\n this.initialWeights = null;\n }\n // The value of `_refCount` is initialized to null. When the layer is used\n // in a symbolic way for the first time, it will be set to 1.\n this._refCount = null;\n this.fastWeightInitDuringBuild = false;\n }\n /**\n * Converts a layer and its index to a unique (immutable type) name.\n * This function is used internally with `this.containerNodes`.\n * @param layer The layer.\n * @param nodeIndex The layer's position (e.g. via enumerate) in a list of\n * nodes.\n *\n * @returns The unique name.\n */\n static nodeKey(layer, nodeIndex) {\n return layer.name + '_ib-' + nodeIndex.toString();\n }\n /**\n * Returns this.inboundNode at index nodeIndex.\n *\n * Porting note: This is a replacement for _get_node_attribute_at_index()\n * @param nodeIndex\n * @param attrName The name of the attribute related to request for this node.\n */\n getNodeAtIndex(nodeIndex, attrName) {\n if (this.inboundNodes.length === 0) {\n throw new RuntimeError('The layer has never been called ' +\n `and thus has no defined ${attrName}.`);\n }\n if (this.inboundNodes.length <= nodeIndex) {\n throw new ValueError(`Asked to get ${attrName} at node ${nodeIndex}, ` +\n `but the layer has only ${this.inboundNodes.length} inbound nodes.`);\n }\n return this.inboundNodes[nodeIndex];\n }\n /**\n * Retrieves the input tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple inputs).\n */\n getInputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple outputs).\n */\n getOutputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'output').outputTensors);\n }\n // Properties\n /**\n * Retrieves the input tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Input tensor or list of input tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get input() {\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer input\" ' +\n 'is ill-defined. ' +\n 'Use `getInputAt(nodeIndex)` instead.');\n }\n else if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' is not connected, no input to return.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Output tensor or list of output tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get output() {\n if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has no inbound nodes.');\n }\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer output\" ' +\n 'is ill-defined. ' +\n 'Use `getOutputAt(nodeIndex)` instead.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'output').outputTensors);\n }\n get losses() {\n return this._losses;\n }\n /**\n * Retrieves the Layer's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Layer.loss in PyKeras.\n // In PyKeras, Layer.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return this.losses.map(lossFn => lossFn());\n }\n get updates() {\n return this._updates;\n }\n get built() {\n return this._built;\n }\n set built(built) {\n this._built = built;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this._trainableWeights.forEach(w => w.trainable = trainable);\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n if (this.trainable_) {\n return this._trainableWeights.filter(w => w.trainable);\n }\n else {\n return [];\n }\n }\n set trainableWeights(weights) {\n this._trainableWeights = weights;\n }\n get nonTrainableWeights() {\n if (this.trainable) {\n return this._trainableWeights.filter(w => !w.trainable)\n .concat(this._nonTrainableWeights);\n }\n else {\n return this._trainableWeights.concat(this._nonTrainableWeights);\n }\n }\n set nonTrainableWeights(weights) {\n this._nonTrainableWeights = weights;\n }\n /**\n * The concatenation of the lists trainableWeights and nonTrainableWeights\n * (in this order).\n */\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n get stateful() {\n return this._stateful;\n }\n /**\n * Reset the states of the layer.\n *\n * This method of the base Layer class is essentially a no-op.\n * Subclasses that are stateful (e.g., stateful RNNs) should override this\n * method.\n */\n resetStates() {\n if (!this.stateful) {\n throw new Error('Cannot call the resetStates() method of a non-stateful Layer ' +\n 'object.');\n }\n }\n /**\n * Checks compatibility between the layer and provided inputs.\n *\n * This checks that the tensor(s) `input`\n * verify the input assumptions of the layer\n * (if any). If not, exceptions are raised.\n *\n * @param inputs Input tensor or list of input tensors.\n *\n * @exception ValueError in case of mismatch between\n * the provided inputs and the expectations of the layer.\n */\n assertInputCompatibility(inputs) {\n inputs = generic_utils.toList(inputs);\n if (this.inputSpec == null || this.inputSpec.length === 0) {\n return;\n }\n const inputSpec = generic_utils.toList(this.inputSpec);\n if (inputs.length !== inputSpec.length) {\n throw new ValueError(`Layer ${this.name} expects ${inputSpec.length} inputs, ` +\n `but it received ${inputs.length} input tensors. ` +\n `Input received: ${inputs}`);\n }\n for (let inputIndex = 0; inputIndex < inputs.length; inputIndex++) {\n const x = inputs[inputIndex];\n const spec = inputSpec[inputIndex];\n if (spec == null) {\n continue;\n }\n // Check ndim.\n const ndim = x.rank;\n if (spec.ndim != null) {\n if (ndim !== spec.ndim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}: ` +\n `expected ndim=${spec.ndim}, found ndim=${ndim}`);\n }\n }\n if (spec.maxNDim != null) {\n if (ndim > spec.maxNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected max_ndim=${spec.maxNDim}, found ndim=${ndim}`);\n }\n }\n if (spec.minNDim != null) {\n if (ndim < spec.minNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected min_ndim=${spec.minNDim}, found ndim=${ndim}.`);\n }\n }\n // Check dtype.\n if (spec.dtype != null) {\n if (x.dtype !== spec.dtype) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name} ` +\n `: expected dtype=${spec.dtype}, found dtype=${x.dtype}.`);\n }\n }\n // Check specific shape axes.\n if (spec.axes) {\n const xShape = x.shape;\n for (const key in spec.axes) {\n const axis = Number(key);\n const value = spec.axes[key];\n // Perform Python-style slicing in case axis < 0;\n // TODO(cais): Use https://github.com/alvivi/typescript-underscore to\n // ensure type safety through Underscore calls.\n const xShapeAtAxis = axis >= 0 ? xShape[axis] : xShape[xShape.length + axis];\n if (value != null && [value, null].indexOf(xShapeAtAxis) === -1) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected axis ${axis} of input shape to ` +\n `have value ${value} but got shape ${xShape}.`);\n }\n }\n }\n // Check shape.\n if (spec.shape != null) {\n for (let i = 0; i < spec.shape.length; ++i) {\n const specDim = spec.shape[i];\n const dim = x.shape[i];\n if (specDim != null && dim != null) {\n if (specDim !== dim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected shape=${spec.shape}, ` +\n `found shape=${x.shape}.`);\n }\n }\n }\n }\n }\n }\n /**\n * This is where the layer's logic lives.\n *\n * @param inputs Input tensor, or list/tuple of input tensors.\n * @param kwargs Additional keyword arguments.\n *\n * @return A tensor or list/tuple of tensors.\n */\n call(inputs, kwargs) {\n return inputs;\n }\n invokeCallHook(inputs, kwargs) {\n if (this._callHook != null) {\n this._callHook(inputs, kwargs);\n }\n }\n /**\n * Set call hook.\n * This is currently used for testing only.\n * @param callHook\n */\n setCallHook(callHook) {\n this._callHook = callHook;\n }\n /**\n * Clear call hook.\n * This is currently used for testing only.\n */\n clearCallHook() {\n this._callHook = null;\n }\n /**\n * Builds or executes a `Layer's logic.\n *\n * When called with `tf.Tensor`(s), execute the `Layer`s computation and\n * return Tensor(s). For example:\n *\n * ```js\n * const denseLayer = tf.layers.dense({\n * units: 1,\n * kernelInitializer: 'zeros',\n * useBias: false\n * });\n *\n * // Invoke the layer's apply() method with a `tf.Tensor` (with concrete\n * // numeric values).\n * const input = tf.ones([2, 2]);\n * const output = denseLayer.apply(input);\n *\n * // The output's value is expected to be [[0], [0]], due to the fact that\n * // the dense layer has a kernel initialized to all-zeros and does not have\n * // a bias.\n * output.print();\n * ```\n *\n * When called with `tf.SymbolicTensor`(s), this will prepare the layer for\n * future execution. This entails internal book-keeping on shapes of\n * expected Tensors, wiring layers together, and initializing weights.\n *\n * Calling `apply` with `tf.SymbolicTensor`s are typically used during the\n * building of non-`tf.Sequential` models. For example:\n *\n * ```js\n * const flattenLayer = tf.layers.flatten();\n * const denseLayer = tf.layers.dense({units: 1});\n *\n * // Use tf.layers.input() to obtain a SymbolicTensor as input to apply().\n * const input = tf.input({shape: [2, 2]});\n * const output1 = flattenLayer.apply(input);\n *\n * // output1.shape is [null, 4]. The first dimension is the undetermined\n * // batch size. The second dimension comes from flattening the [2, 2]\n * // shape.\n * console.log(JSON.stringify(output1.shape));\n *\n * // The output SymbolicTensor of the flatten layer can be used to call\n * // the apply() of the dense layer:\n * const output2 = denseLayer.apply(output1);\n *\n * // output2.shape is [null, 1]. The first dimension is the undetermined\n * // batch size. The second dimension matches the number of units of the\n * // dense layer.\n * console.log(JSON.stringify(output2.shape));\n *\n * // The input and output and be used to construct a model that consists\n * // of the flatten and dense layers.\n * const model = tf.model({inputs: input, outputs: output2});\n * ```\n *\n * @param inputs a `tf.Tensor` or `tf.SymbolicTensor` or an Array of them.\n * @param kwargs Additional keyword arguments to be passed to `call()`.\n *\n * @return Output of the layer's `call` method.\n *\n * @exception ValueError error in case the layer is missing shape information\n * for its `build` call.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n // Porting Note: This is a replacement for __call__() in Python.\n apply(inputs, kwargs) {\n kwargs = kwargs || {};\n this.assertNotDisposed();\n // Ensure inputs are all the same type.\n const inputsList = generic_utils.toList(inputs);\n let allAreSymbolic = true;\n for (const input of inputsList) {\n if (!(input instanceof SymbolicTensor)) {\n allAreSymbolic = false;\n break;\n }\n }\n let noneAreSymbolic = true;\n for (const input of inputsList) {\n if (input instanceof SymbolicTensor) {\n noneAreSymbolic = false;\n break;\n }\n }\n if (allAreSymbolic === noneAreSymbolic) {\n throw new ValueError('Arguments to apply() must be all ' +\n 'SymbolicTensors or all Tensors');\n }\n // TODO(michaelterry): nameScope() may not be necessary.\n return nameScope(this.name, () => {\n // Handle laying building (weight creating, input spec locking).\n if (!this.built) {\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec specified in the layer constructor.\n */\n this.assertInputCompatibility(inputs);\n // Collect input shapes to build layer.\n const inputShapes = [];\n for (const xElem of generic_utils.toList(inputs)) {\n inputShapes.push(xElem.shape);\n }\n this.build(generic_utils.singletonOrArray(inputShapes));\n this.built = true;\n // Load weights that were specified at layer instantiation.\n if (this.initialWeights) {\n this.setWeights(this.initialWeights);\n }\n if (this._refCount === null && noneAreSymbolic) {\n // The first use of this layer is a non-symbolic call, set ref count\n // to 1 so the Layer can be properly disposed if its dispose() method\n // is called.\n this._refCount = 1;\n }\n }\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec set at build time.\n */\n this.assertInputCompatibility(inputs);\n // Handle mask propagation.\n // TODO(michaelterry): Mask propagation not currently implemented.\n // Actually call the layer, collecting output(s), mask(s), and shape(s).\n if (noneAreSymbolic) {\n let output = this.call(inputs, kwargs);\n // TODO(michaelterry): Compute the outputMask\n // If the layer returns tensors from its inputs, unmodified,\n // we copy them to avoid loss of tensor metadata.\n const outputList = generic_utils.toList(output);\n const outputListCopy = [];\n // TODO(michaelterry): This copying may not be necessary given our eager\n // backend.\n for (let x of outputList) {\n if (inputsList.indexOf(x) !== -1) {\n x = x.clone();\n }\n outputListCopy.push(x);\n }\n output = generic_utils.singletonOrArray(outputListCopy);\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Call addInboundNode()?\n return output;\n }\n else {\n const inputShape = collectInputShape(inputs);\n const outputShape = this.computeOutputShape(inputShape);\n let output;\n const outputDType = guessOutputDType(inputs);\n this.warnOnIncompatibleInputShape(Array.isArray(inputs) ? inputShape[0] :\n inputShape);\n if (outputShape != null && outputShape.length > 0 &&\n Array.isArray(outputShape[0])) {\n // We have multiple output shapes. Create multiple output tensors.\n output = outputShape\n .map((shape, index) => new SymbolicTensor(outputDType, shape, this, generic_utils.toList(inputs), kwargs, this.name, index));\n }\n else {\n output = new SymbolicTensor(outputDType, outputShape, this, generic_utils.toList(inputs), kwargs, this.name);\n }\n /*\n Add an inbound node to the layer, so that it keeps track\n of the call and of all new variables created during the call.\n This also updates the layer history of the output tensor(s).\n If the input tensor(s) had no previous history,\n this does nothing.\n */\n this.addInboundNode(inputs, output, null, null, inputShape, outputShape, kwargs);\n this._refCount++;\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n return output;\n }\n });\n }\n /**\n * Check compatibility between input shape and this layer's batchInputShape.\n *\n * Print warning if any incompatibility is found.\n *\n * @param inputShape Input shape to be checked.\n */\n warnOnIncompatibleInputShape(inputShape) {\n if (this.batchInputShape == null) {\n return;\n }\n else if (inputShape.length !== this.batchInputShape.length) {\n console.warn(`The rank of the input tensor provided (shape: ` +\n `${JSON.stringify(inputShape)}) does not match that of the ` +\n `batchInputShape (${JSON.stringify(this.batchInputShape)}) ` +\n `of the layer ${this.name}`);\n }\n else {\n let dimMismatch = false;\n this.batchInputShape.forEach((dimension, i) => {\n if (dimension != null && inputShape[i] != null &&\n inputShape[i] !== dimension) {\n dimMismatch = true;\n }\n });\n if (dimMismatch) {\n console.warn(`The shape of the input tensor ` +\n `(${JSON.stringify(inputShape)}) does not ` +\n `match the expectation of layer ${this.name}: ` +\n `${JSON.stringify(this.batchInputShape)}`);\n }\n }\n }\n /**\n * Retrieves the output shape(s) of a layer.\n *\n * Only applicable if the layer has only one inbound node, or if all inbound\n * nodes have the same output shape.\n *\n * @returns Output shape or shapes.\n * @throws AttributeError: if the layer is connected to more than one incoming\n * nodes.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n get outputShape() {\n if (this.inboundNodes == null || this.inboundNodes.length === 0) {\n throw new AttributeError(`The layer ${this.name} has never been called and thus has no ` +\n `defined output shape.`);\n }\n const allOutputShapes = [];\n for (const node of this.inboundNodes) {\n const shapeString = JSON.stringify(node.outputShapes);\n if (allOutputShapes.indexOf(shapeString) === -1) {\n allOutputShapes.push(shapeString);\n }\n }\n if (allOutputShapes.length === 1) {\n const outputShapes = this.inboundNodes[0].outputShapes;\n if (Array.isArray(outputShapes) && Array.isArray(outputShapes[0]) &&\n outputShapes.length === 1) {\n return outputShapes[0];\n }\n else {\n return outputShapes;\n }\n }\n else {\n throw new AttributeError(`The layer ${this.name} has multiple inbound nodes with different ` +\n `output shapes. Hence the notion of \"output shape\" is ill-defined ` +\n `for the layer.`);\n // TODO(cais): Implement getOutputShapeAt().\n }\n }\n /**\n * Counts the total number of numbers (e.g., float32, int32) in the\n * weights.\n *\n * @returns An integer count.\n * @throws RuntimeError: If the layer is not built yet (in which case its\n * weights are not defined yet.)\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n countParams() {\n if (!this.built) {\n throw new RuntimeError(`You tried to call countParams() on ${this.name}, ` +\n `but the layer is not built yet. Build it first by calling ` +\n `build(batchInputShape).`);\n }\n return variable_utils.countParamsInWeights(this.weights);\n }\n /**\n * Creates the layer weights.\n *\n * Must be implemented on all layers that have weights.\n *\n * Called when apply() is called to construct the weights.\n *\n * @param inputShape A `Shape` or array of `Shape` (unused).\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n build(inputShape) {\n this.built = true;\n }\n /**\n * Returns the current values of the weights of the layer.\n *\n * @param trainableOnly Whether to get the values of only trainable weights.\n * @returns Weight values as an `Array` of `tf.Tensor`s.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getWeights(trainableOnly = false) {\n return batchGetValue(trainableOnly ? this.trainableWeights : this.weights);\n }\n /**\n * Sets the weights of the layer, from Tensors.\n *\n * @param weights a list of Tensors. The number of arrays and their shape\n * must match number of the dimensions of the weights of the layer (i.e.\n * it should match the output of `getWeights`).\n *\n * @exception ValueError If the provided weights list does not match the\n * layer's specifications.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n setWeights(weights) {\n tidy(() => {\n const params = this.weights;\n if (params.length !== weights.length) {\n // TODO(cais): Restore the following and use `providedWeights`, instead\n // of `weights` in the error message, once the deeplearn.js bug is\n // fixed: https://github.com/PAIR-code/deeplearnjs/issues/498 const\n // providedWeights = JSON.stringify(weights).substr(0, 50);\n throw new ValueError(`You called setWeights(weights) on layer \"${this.name}\" ` +\n `with a weight list of length ${weights.length}, ` +\n `but the layer was expecting ${params.length} weights. ` +\n `Provided weights: ${weights}...`);\n }\n if (params.length === 0) {\n return;\n }\n const weightValueTuples = [];\n const paramValues = batchGetValue(params);\n for (let i = 0; i < paramValues.length; ++i) {\n const pv = paramValues[i];\n const p = params[i];\n const w = weights[i];\n if (!util.arraysEqual(pv.shape, w.shape)) {\n throw new ValueError(`Layer weight shape ${pv.shape} ` +\n `not compatible with provided weight shape ${w.shape}`);\n }\n weightValueTuples.push([p, w]);\n }\n batchSetValue(weightValueTuples);\n });\n }\n /**\n * Adds a weight variable to the layer.\n *\n * @param name Name of the new weight variable.\n * @param shape The shape of the weight.\n * @param dtype The dtype of the weight.\n * @param initializer An initializer instance.\n * @param regularizer A regularizer instance.\n * @param trainable Whether the weight should be trained via backprop or not\n * (assuming that the layer itself is also trainable).\n * @param constraint An optional trainable.\n * @return The created weight variable.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addWeight(name, shape, dtype, initializer, regularizer, trainable, constraint) {\n // Reject duplicate weight names.\n if (this._addedWeightNames.indexOf(name) !== -1) {\n throw new ValueError(`Duplicate weight name ${name} for layer ${this.name}`);\n }\n this._addedWeightNames.push(name);\n if (dtype == null) {\n dtype = 'float32';\n }\n if (this.fastWeightInitDuringBuild) {\n initializer = getInitializer('zeros');\n }\n const initValue = initializer.apply(shape, dtype);\n const weight = new LayerVariable(initValue, dtype, name, trainable, constraint);\n initValue.dispose();\n // Request backend not to dispose the weights of the model on scope() exit.\n if (regularizer != null) {\n this.addLoss(() => regularizer.apply(weight.read()));\n }\n if (trainable == null) {\n trainable = true;\n }\n if (trainable) {\n this._trainableWeights.push(weight);\n }\n else {\n this._nonTrainableWeights.push(weight);\n }\n return weight;\n }\n /**\n * Set the fast-weight-initialization flag.\n *\n * In cases where the initialized weight values will be immediately\n * overwritten by loaded weight values during model loading, setting\n * the flag to `true` saves unnecessary calls to potentially expensive\n * initializers and speeds up the loading process.\n *\n * @param value Target value of the flag.\n */\n setFastWeightInitDuringBuild(value) {\n this.fastWeightInitDuringBuild = value;\n }\n /**\n * Add losses to the layer.\n *\n * The loss may potentionally be conditional on some inputs tensors,\n * for instance activity losses are conditional on the layer's inputs.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addLoss(losses) {\n if (losses == null || Array.isArray(losses) && losses.length === 0) {\n return;\n }\n // Update this.losses\n losses = generic_utils.toList(losses);\n if (this._losses !== undefined && this._losses !== null) {\n this.losses.push(...losses);\n }\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n computeOutputShape(inputShape) {\n return inputShape;\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n if (!this.supportsMasking) {\n if (mask != null) {\n if (Array.isArray(mask)) {\n mask.forEach(maskElement => {\n if (maskElement != null) {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n });\n }\n else {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n }\n // masking not explicitly supported: return null as mask\n return null;\n }\n // if masking is explictly supported, by default\n // carry over the input mask\n return mask;\n }\n /**\n * Internal method to create an inbound node for the layer.\n *\n * @param inputTensors List of input tensors.\n * @param outputTensors List of output tensors.\n * @param inputMasks List of input masks (a mask can be a tensor, or null).\n * @param outputMasks List of output masks (a mask can be a tensor, or null).\n * @param inputShapes List of input shape tuples.\n * @param outputShapes List of output shape tuples.\n * @param kwargs Dictionary of keyword arguments that were passed to the\n * `call` method of the layer at the call that created the node.\n */\n addInboundNode(inputTensors, outputTensors, inputMasks, outputMasks, inputShapes, outputShapes, kwargs = null) {\n const inputTensorList = generic_utils.toList(inputTensors);\n outputTensors = generic_utils.toList(outputTensors);\n inputMasks = generic_utils.toList(inputMasks);\n outputMasks = generic_utils.toList(outputMasks);\n inputShapes = types_utils.normalizeShapeList(inputShapes);\n outputShapes = types_utils.normalizeShapeList(outputShapes);\n // Collect input tensor(s) coordinates.\n const inboundLayers = [];\n const nodeIndices = [];\n const tensorIndices = [];\n for (const x of inputTensorList) {\n /*\n * TODO(michaelterry): Keras adds this value to tensors; it's not\n * clear whether we'll use this or not.\n */\n inboundLayers.push(x.sourceLayer);\n nodeIndices.push(x.nodeIndex);\n tensorIndices.push(x.tensorIndex);\n }\n // Create node, add it to inbound nodes.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers,\n nodeIndices,\n tensorIndices,\n inputTensors: inputTensorList,\n outputTensors,\n inputMasks,\n outputMasks,\n inputShapes,\n outputShapes\n }, kwargs);\n // Update tensor history\n for (let i = 0; i < outputTensors.length; i++) {\n // TODO(michaelterry: _uses_learning_phase not tracked.\n outputTensors[i].sourceLayer = this;\n outputTensors[i].nodeIndex = this.inboundNodes.length - 1;\n outputTensors[i].tensorIndex = i;\n }\n }\n /**\n * Returns the config of the layer.\n *\n * A layer config is a TS dictionary (serializable)\n * containing the configuration of a layer.\n * The same layer can be reinstantiated later\n * (without its trained weights) from this configuration.\n *\n * The config of a layer does not include connectivity\n * information, nor the layer class name. These are handled\n * by 'Container' (one layer of abstraction above).\n *\n * Porting Note: The TS dictionary follows TS naming standrds for\n * keys, and uses tfjs-layers type-safe Enums. Serialization methods\n * should use a helper function to convert to the pythonic storage\n * standard. (see serialization_utils.convertTsToPythonic)\n *\n * @returns TS dictionary of configuration.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getConfig() {\n const config = { name: this.name, trainable: this.trainable };\n if (this.batchInputShape != null) {\n config['batchInputShape'] = this.batchInputShape;\n }\n if (this.dtype != null) {\n config['dtype'] = this.dtype;\n }\n return config;\n }\n /**\n * Dispose the weight variables that this Layer instance holds.\n *\n * @returns {number} Number of disposed variables.\n */\n disposeWeights() {\n this.weights.forEach(weight => weight.dispose());\n return this.weights.length;\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Layer '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose layer's weights.\n *\n * This method decrease the reference count of the Layer object by 1.\n *\n * A Layer is reference-counted. Its reference count is incremented by 1\n * the first item its `apply()` method is called and when it becomes a part\n * of a new `Node` (through calling the `apply()`) method on a\n * `tf.SymbolicTensor`).\n *\n * If the reference count of a Layer becomes 0, all the weights will be\n * disposed and the underlying memory (e.g., the textures allocated in WebGL)\n * will be freed.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * weights of the Layer will *not* be disposed.\n *\n * After a Layer is disposed, it cannot be used in calls such as `apply()`,\n * `getWeights()` or `setWeights()` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the Container after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the layer has already\n * been disposed.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n dispose() {\n if (!this.built) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been ` +\n `built yet.`);\n }\n if (this._refCount === null) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been used ` +\n `yet.`);\n }\n this.assertNotDisposed();\n let numDisposedVariables = 0;\n if (--this._refCount === 0) {\n numDisposedVariables = this.disposeWeights();\n }\n return { refCountAfterDispose: this._refCount, numDisposedVariables };\n }\n}\n/**\n * Collects the input shape(s) of a list of `tf.Tensor`s or\n * `tf.SymbolicTensor`s.\n *\n * TODO(michaelterry): Update PyKeras docs (backport).\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return List of shape tuples (or single tuple), one tuple per input.\n */\nfunction collectInputShape(inputTensors) {\n inputTensors =\n generic_utils.toList(inputTensors);\n const shapes = [];\n for (const x of inputTensors) {\n shapes.push(x.shape);\n }\n return generic_utils.singletonOrArray(shapes);\n}\n/**\n * Guesses output dtype based on inputs.\n *\n * At present, just returns 'float32' for any input.\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return The guessed DType. At present, always returns 'float32'.\n */\nfunction guessOutputDType(inputTensors) {\n return 'float32';\n}\n/**\n * Returns the list of input tensors necessary to compute `tensor`.\n *\n * Output will always be a list of tensors (potentially with 1 element).\n *\n * @param tensor The tensor to start from.\n * @param layer Origin layer of the tensor.\n * @param nodeIndex Origin node index of the tensor.\n *\n * @return Array of input tensors.\n */\nexport function getSourceInputs(tensor, layer, nodeIndex) {\n if (layer == null || (nodeIndex != null && nodeIndex > 0)) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n }\n if (layer.inboundNodes.length === 0) {\n return [tensor];\n }\n else {\n const node = layer.inboundNodes[nodeIndex];\n if (node.inboundLayers.length === 0) {\n return node.inputTensors;\n }\n else {\n const sourceTensors = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const previousSources = getSourceInputs(x, layer, nodeIndex);\n // Avoid input redundancy.\n for (const x of previousSources) {\n if (sourceTensors.indexOf(x) === -1) {\n sourceTensors.push(x);\n }\n }\n }\n return sourceTensors;\n }\n }\n}\n//# sourceMappingURL=topology.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { ValueError } from '../errors';\nimport { Layer, Node, SymbolicTensor } from './topology';\nexport class InputLayer extends Layer {\n constructor(args) {\n super({\n dtype: args.dtype,\n name: args.name != null ? args.name : getUid('input').toString()\n });\n // Normalize config.batchSize and config.sparse\n if (args.batchSize == null) {\n args.batchSize = null;\n }\n if (args.sparse == null) {\n args.sparse = false;\n }\n this.trainable = false;\n this.built = true;\n this.sparse = args.sparse;\n if (args.inputShape != null && args.batchInputShape != null) {\n throw new ValueError('Only provide the inputShape OR ' +\n 'batchInputShape argument to inputLayer, not both at the same time.');\n }\n let batchInputShape = args.batchInputShape;\n if (batchInputShape == null) {\n if (args.inputShape == null) {\n throw new ValueError('An InputLayer should be passed either a ' +\n '`batchInputShape` or an `inputShape`.');\n }\n else {\n batchInputShape = [args.batchSize].concat(args.inputShape);\n }\n }\n else {\n // TODO(michaelterry): Backport to PyKeras\n if (args.batchSize != null) {\n throw new ValueError('Cannot specify batchSize if batchInputShape is ' +\n 'specified when creating an InputLayer.');\n }\n }\n const dtype = args.dtype || 'float32';\n this.batchInputShape = batchInputShape;\n this.dtype = dtype;\n // TODO(michaelterry): Backport this to PyKeras?\n this.inputSpec = [{ shape: batchInputShape }];\n const inputTensor = new SymbolicTensor(this.dtype, this.batchInputShape, this, [], {}, this.name);\n inputTensor.nodeIndex = 0;\n inputTensor.tensorIndex = 0;\n // Create an input node to add to this.outboundNode.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: [inputTensor],\n outputTensors: [inputTensor],\n inputMasks: [null],\n outputMasks: [null],\n inputShapes: [batchInputShape],\n outputShapes: [batchInputShape]\n });\n }\n apply(inputs, kwargs) {\n throw new ValueError('Cannot pass any input to an ' +\n `InputLayer's apply() method. InputLayer name: ${this.name}`);\n }\n dispose() {\n // dispose() for InputLayer is overridden as no-op.\n return { refCountAfterDispose: this._refCount, numDisposedVariables: 0 };\n }\n getConfig() {\n return {\n batchInputShape: this.batchInputShape,\n dtype: this.dtype,\n sparse: this.sparse,\n name: this.name\n };\n }\n}\n/** @nocollapse */\nInputLayer.className = 'InputLayer';\nserialization.registerClass(InputLayer);\nexport function Input(config) {\n if (config.batchShape == null && config.shape == null) {\n throw new Error('Please provide to Input either a `shape`' +\n ' or a `batchShape` argument. Note that ' +\n '`shape` does not include the batch ' +\n 'dimension.');\n }\n if (config.batchShape != null && config.shape != null) {\n // TODO(michaelterry): Backport to PyKeras.\n throw new ValueError('Please provide either a `shape` or `batchShape` ' +\n 'argument to Input, but not both.');\n }\n let batchShape = config.batchShape;\n if (config.shape != null && batchShape == null) {\n batchShape = [null].concat(config.shape);\n }\n let dtype = config.dtype;\n if (dtype == null) {\n dtype = 'float32';\n }\n const inputLayer = new InputLayer({\n batchInputShape: batchShape,\n name: config.name,\n dtype,\n sparse: config.sparse\n });\n const outputs = inputLayer.inboundNodes[0].outputTensors;\n return outputs[0];\n}\n//# sourceMappingURL=input_layer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose } from '@tensorflow/tfjs-core';\n/**\n * Turn any Scalar values in a Logs object into actual number values.\n *\n * @param logs The `Logs` object to be resolved in place.\n */\nexport async function resolveScalarsInLogs(logs) {\n if (logs == null) {\n return;\n }\n const promises = [];\n const keys = [];\n const scalarsToDispose = [];\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n const valueScalar = value;\n promises.push(valueScalar.data());\n keys.push(key);\n scalarsToDispose.push(valueScalar);\n }\n }\n if (promises.length > 0) {\n const values = await Promise.all(promises);\n for (let i = 0; i < values.length; ++i) {\n logs[keys[i]] = values[i][0];\n }\n // Dispose the original scalar tensors.\n dispose(scalarsToDispose);\n }\n}\n/**\n * Dispose all Tensors in an UnresolvedLogs object.\n *\n * @param logs An `UnresolvedLogs` object potentially containing `tf.Tensor`s in\n * places where the values can be `tf.Tensor` or `number`.\n */\nexport function disposeTensorsInLogs(logs) {\n if (logs == null) {\n return;\n }\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n value.dispose();\n }\n }\n}\n//# sourceMappingURL=logs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { add, div, keep, mul, nextFrame, tidy, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nimport * as generic_utils from './utils/generic_utils';\n/** Verbosity logging level when fitting a model. */\nexport var ModelLoggingVerbosity;\n(function (ModelLoggingVerbosity) {\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"SILENT\"] = 0] = \"SILENT\";\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"VERBOSE\"] = 1] = \"VERBOSE\";\n})(ModelLoggingVerbosity || (ModelLoggingVerbosity = {}));\n/** How often to yield to the main thread when training (in ms). */\nexport const DEFAULT_YIELD_EVERY_MS = 125;\n/**\n * Abstract base class used to build new callbacks.\n *\n * The `logs` dictionary that callback methods take as argument will contain\n * keys for quantities relevant to the current batch or epoch.\n *\n * Currently, the `.fit()` method of the `Sequential` model class\n * will include the following quantities in the `logs` that\n * it passes to its callbacks:\n *\n * onEpochEnd: Logs include `acc` and `loss`, and optionally include `valLoss`\n * (if validation is enabled in `fit`), and `valAcc` (if validation and\n * accuracy monitoring are enabled).\n * onBatchBegin: Logs include `size`, the number of samples in the current\n * batch.\n * onBatchEnd: Logs include `loss`, and optionally `acc` (if accuracy monitoring\n * is enabled).\n */\nexport class BaseCallback {\n constructor() {\n // TODO(michaelterry): This type is a best guess.\n this.validationData = null;\n }\n setParams(params) {\n this.params = params;\n }\n async onEpochBegin(epoch, logs) { }\n async onEpochEnd(epoch, logs) { }\n async onBatchBegin(batch, logs) { }\n async onBatchEnd(batch, logs) { }\n async onTrainBegin(logs) { }\n async onTrainEnd(logs) { }\n // LayersModel needs to call Callback.setModel(), but cannot actually depend\n // on Callback because that creates a cyclic dependency. Providing this no-op\n // method on BaseCallback breaks the cycle: this way LayersModel can depend on\n // BaseCallback but not on Callback. The argument is typed as `Container`\n // (the superclass of LayersModel) to avoid recapitulating the cycle. Callback\n // overrides this method and enforces that the argument is really a\n // LayersModel.\n setModel(model) {\n // Do nothing. Use Callback instead of BaseCallback to track the model.\n }\n}\n/**\n * Container abstracting a list of callbacks.\n */\nexport class CallbackList {\n // TODO(cais): When the need arises, uncomment the following lines and\n // implement the queue for time values.\n // private deltaTBatch: number;\n // private deltaTsBatchBegin: Array;\n // private deltaTsBatchEnd: Array;\n /**\n * Constructor of CallbackList.\n * @param callbacks Array of `Callback` instances.\n * @param queueLength Queue length for keeping running statistics over\n * callback execution time.\n */\n constructor(callbacks, queueLength = 10) {\n // TODO(cais): Make use of queueLength when implementing the queue for time\n // values.\n if (callbacks == null) {\n callbacks = [];\n }\n this.callbacks = callbacks;\n this.queueLength = queueLength;\n }\n append(callback) {\n this.callbacks.push(callback);\n }\n setParams(params) {\n for (const callback of this.callbacks) {\n callback.setParams(params);\n }\n }\n setModel(model) {\n for (const callback of this.callbacks) {\n callback.setModel(model);\n }\n }\n /**\n * Called at the start of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochBegin(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochBegin(epoch, logs);\n }\n }\n /**\n * Called at the end of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochEnd(epoch, logs);\n }\n }\n /**\n * Called right before processing a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchBegin(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchBegin(batch, logs);\n }\n }\n /**\n * Called at the end of a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchEnd(batch, logs);\n }\n }\n /**\n * Called at the beginning of training.\n * @param logs Dictionary of logs.\n */\n async onTrainBegin(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainBegin(logs);\n }\n }\n /**\n * Called at the end of training.\n * @param logs Dictionary of logs.\n */\n async onTrainEnd(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainEnd(logs);\n }\n }\n}\n/**\n * Callback that accumulates epoch averages of metrics.\n *\n * This callback is automatically applied to every LayersModel.\n */\nexport class BaseLogger extends BaseCallback {\n constructor() {\n super();\n }\n async onEpochBegin(epoch) {\n this.seen = 0;\n this.totals = {};\n }\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n const batchSize = logs['size'] == null ? 0 : logs['size'];\n this.seen += batchSize;\n for (const key in logs) {\n const value = logs[key];\n if (typeof value === 'number') {\n if (!this.totals.hasOwnProperty(key)) {\n this.totals[key] = 0;\n }\n this.totals[key] = this.totals[key] + value * batchSize;\n }\n else {\n let oldTotalsToDispose;\n if (key in this.totals) {\n oldTotalsToDispose = this.totals[key];\n }\n else {\n this.totals[key] = 0;\n }\n const total = tidy(() => add((this.totals[key]), mul(value, batchSize)));\n this.totals[key] = total;\n if (oldTotalsToDispose != null) {\n oldTotalsToDispose.dispose();\n }\n }\n }\n }\n async onEpochEnd(epoch, logs) {\n if (logs != null) {\n for (const key of this.params['metrics']) {\n if (this.totals[key] == null) {\n continue;\n }\n if (typeof this.totals[key] === 'number') {\n logs[key] = this.totals[key] / this.seen;\n }\n else {\n tidy(() => {\n const log = mul(div(1, this.seen), this.totals[key]);\n logs[key] = log;\n this.totals[key].dispose();\n keep(logs[key]);\n });\n }\n }\n }\n }\n}\n/**\n * Callback that records events into a `History` object. This callback is\n * automatically applied to every TF.js Layers model. The `History` object\n * gets returned by the `fit` method of models.\n */\nexport class History extends BaseCallback {\n async onTrainBegin(logs) {\n this.epoch = [];\n this.history = {};\n }\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n this.epoch.push(epoch);\n for (const key in logs) {\n if (this.history[key] == null) {\n this.history[key] = [];\n }\n this.history[key].push(logs[key]);\n }\n }\n /**\n * Await the values of all losses and metrics.\n */\n async syncData() {\n const promises = [];\n const keys = [];\n const indices = [];\n for (const key in this.history) {\n const valueArray = this.history[key];\n for (let i = 0; i < valueArray.length; ++i) {\n if (typeof valueArray[i] !== 'number') {\n const valueScalar = valueArray[i];\n promises.push(valueScalar.data());\n keys.push(key);\n indices.push(i);\n }\n }\n }\n const values = await Promise.all(promises);\n for (let n = 0; n < values.length; ++n) {\n const tensorToDispose = this.history[keys[n]][indices[n]];\n tensorToDispose.dispose();\n this.history[keys[n]][indices[n]] = values[n][0];\n }\n }\n}\n/**\n * Custom callback for training.\n */\nexport class CustomCallback extends BaseCallback {\n constructor(args, yieldEvery) {\n super();\n this.currentEpoch = 0;\n this.yieldEvery = yieldEvery || 'auto';\n if (this.yieldEvery === 'auto') {\n this.yieldEvery = DEFAULT_YIELD_EVERY_MS;\n }\n if (this.yieldEvery === 'never' && args.onYield != null) {\n throw new Error('yieldEvery is `never` but you provided an `onYield` callback. ' +\n 'Either change `yieldEvery` or remove the callback');\n }\n if (util.isNumber(this.yieldEvery)) {\n // Decorate `maybeWait` so it will be called at most once every\n // `yieldEvery` ms.\n this.maybeWait = generic_utils.debounce(this.maybeWait.bind(this), this.yieldEvery);\n }\n this.trainBegin = args.onTrainBegin;\n this.trainEnd = args.onTrainEnd;\n this.epochBegin = args.onEpochBegin;\n this.epochEnd = args.onEpochEnd;\n this.batchBegin = args.onBatchBegin;\n this.batchEnd = args.onBatchEnd;\n this.yield = args.onYield;\n }\n async maybeWait(epoch, batch, logs) {\n const ps = [];\n if (this.yield != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.yield(epoch, batch, logs));\n }\n ps.push(nextFrame());\n await Promise.all(ps);\n }\n async onEpochBegin(epoch, logs) {\n this.currentEpoch = epoch;\n if (this.epochBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.epochBegin(epoch, logs);\n }\n }\n async onEpochEnd(epoch, logs) {\n const ps = [];\n if (this.epochEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.epochEnd(epoch, logs));\n }\n if (this.yieldEvery === 'epoch') {\n ps.push(nextFrame());\n }\n await Promise.all(ps);\n }\n async onBatchBegin(batch, logs) {\n if (this.batchBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.batchBegin(batch, logs);\n }\n }\n async onBatchEnd(batch, logs) {\n const ps = [];\n if (this.batchEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.batchEnd(batch, logs));\n }\n if (this.yieldEvery === 'batch') {\n ps.push(nextFrame());\n }\n else if (util.isNumber(this.yieldEvery)) {\n ps.push(this.maybeWait(this.currentEpoch, batch, logs));\n }\n await Promise.all(ps);\n }\n async onTrainBegin(logs) {\n if (this.trainBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.trainBegin(logs);\n }\n }\n async onTrainEnd(logs) {\n if (this.trainEnd != null) {\n await resolveScalarsInLogs(logs);\n await this.trainEnd(logs);\n }\n }\n}\n/**\n * Standardize callbacks or configurations of them to an Array of callbacks.\n */\nexport function standardizeCallbacks(callbacks, yieldEvery) {\n if (callbacks == null) {\n callbacks = {};\n }\n if (callbacks instanceof BaseCallback) {\n return [callbacks];\n }\n if (Array.isArray(callbacks) && callbacks[0] instanceof BaseCallback) {\n return callbacks;\n }\n // Convert custom callback configs to custom callback objects.\n const callbackConfigs = generic_utils.toList(callbacks);\n return callbackConfigs.map(callbackConfig => new CustomCallback(callbackConfig, yieldEvery));\n}\n/**\n * A global registry for callback constructors to be used during\n * LayersModel.fit().\n */\nexport class CallbackConstructorRegistry {\n /**\n * Blocks public access to constructor.\n */\n constructor() { }\n /**\n * Register a tf.LayersModel.fit() callback constructor.\n *\n * The registered callback constructor will be used to instantiate\n * callbacks for every tf.LayersModel.fit() call afterwards.\n *\n * @param verbosityLevel Level of verbosity at which the `callbackConstructor`\n * is to be reigstered.\n * @param callbackConstructor A no-arg constructor for `tf.Callback`.\n * @throws Error, if the same callbackConstructor has been registered before,\n * either at the same or a different `verbosityLevel`.\n */\n static registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n util.assert(verbosityLevel >= 0 && Number.isInteger(verbosityLevel), () => `Verbosity level is expected to be an integer >= 0, ` +\n `but got ${verbosityLevel}`);\n CallbackConstructorRegistry.checkForDuplicate(callbackConstructor);\n if (CallbackConstructorRegistry.constructors[verbosityLevel] == null) {\n CallbackConstructorRegistry.constructors[verbosityLevel] = [];\n }\n CallbackConstructorRegistry.constructors[verbosityLevel].push(callbackConstructor);\n }\n static checkForDuplicate(callbackConstructor) {\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const constructors = CallbackConstructorRegistry.constructors[+levelName];\n constructors.forEach(ctor => {\n if (ctor === callbackConstructor) {\n throw new ValueError('Duplicate callback constructor.');\n }\n });\n }\n }\n /**\n * Clear all registered callback constructors.\n */\n static clear() {\n CallbackConstructorRegistry.constructors = {};\n }\n /**\n * Create callbacks using the registered callback constructors.\n *\n * Given `verbosityLevel`, all constructors registered at that level or above\n * will be called and the instantiated callbacks will be used.\n *\n * @param verbosityLevel: Level of verbosity.\n */\n static createCallbacks(verbosityLevel) {\n const constructors = [];\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const level = +levelName;\n if (verbosityLevel >= level) {\n constructors.push(...CallbackConstructorRegistry.constructors[level]);\n }\n }\n return constructors.map(ctor => new ctor());\n }\n}\nCallbackConstructorRegistry.constructors = {};\nexport function configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics) {\n const history = new History();\n const actualCallbacks = [\n new BaseLogger(), ...CallbackConstructorRegistry.createCallbacks(verbose)\n ];\n if (callbacks != null) {\n actualCallbacks.push(...callbacks);\n }\n actualCallbacks.push(history);\n const callbackList = new CallbackList(actualCallbacks);\n // TODO(cais): Figure out when this LayersModel instance can have a\n // dynamically\n // set property called 'callback_model' as in PyKeras.\n callbackList.setParams({\n epochs,\n initialEpoch,\n samples: numTrainSamples,\n steps: stepsPerEpoch,\n batchSize,\n verbose,\n doValidation,\n metrics: callbackMetrics,\n });\n return { callbackList, history };\n}\n//# sourceMappingURL=base_callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source layers/__init__.py */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { deserializeKerasObject } from '../utils/generic_utils';\n/**\n * Instantiate a layer from a config dictionary.\n * @param config dict of the form {class_name: str, config: dict}\n * @param customObjects dict mapping class names (or function names)\n * of custom (non-Keras) objects to class/functions\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns Layer instance (may be LayersModel, Sequential, Layer...)\n */\nexport function deserialize(config, customObjects = {}, fastWeightInit = false) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'layer', fastWeightInit);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: losses.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport * as K from './backend/tfjs_backend';\nimport { ValueError } from './errors';\n/**\n * Normalizes a tensor wrt the L2 norm alongside the specified axis.\n * @param x\n * @param axis Axis along which to perform normalization.\n */\nexport function l2Normalize(x, axis) {\n return tidy(() => {\n if (x.dtype !== 'float32') {\n x = x.asType('float32');\n }\n const squareSum = tfc.sum(K.square(x), axis, true);\n const epsilonTensor = tfc.fill(squareSum.shape, epsilon());\n const norm = tfc.sqrt(tfc.maximum(squareSum, epsilonTensor));\n return tfc.div(x, norm);\n });\n}\nexport function meanSquaredError(yTrue, yPred) {\n return tidy(() => tfc.mean(K.square(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsoluteError(yTrue, yPred) {\n return tidy(() => tfc.mean(tfc.abs(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return tidy(() => {\n const diff = tfc.sub(yTrue, yPred);\n const clippedTrue = tfc.clipByValue(tfc.abs(yTrue), epsilon(), Number.MAX_VALUE);\n const absResult = tfc.abs(tfc.div(diff, clippedTrue));\n return tfc.mul(100, tfc.mean(absResult, -1));\n });\n}\nexport function meanSquaredLogarithmicError(yTrue, yPred) {\n return tidy(() => {\n const clippedPred = tfc.clipByValue(yPred, epsilon(), Number.MAX_VALUE);\n const firstLog = tfc.log(tfc.add(1, clippedPred));\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), Number.MAX_VALUE);\n const secondLog = tfc.log(tfc.add(1, clippedTrue));\n return tfc.mean(K.square(tfc.sub(firstLog, secondLog)), -1);\n });\n}\nexport function squaredHinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(K.square(maxResult), -1);\n });\n}\nexport function hinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(maxResult, -1);\n });\n}\nexport function categoricalHinge(yTrue, yPred) {\n return tidy(() => {\n const pos = tfc.sum(tfc.mul(yTrue, yPred), -1);\n const neg = tfc.max(tfc.mul(tfc.sub(1, yTrue), yPred), -1);\n return tfc.maximum(0, tfc.add(1, tfc.sub(neg, pos)));\n });\n}\n/**\n * Logarithm of the hyperbolic cosine of the prediction error.\n *\n * `log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and\n * to `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly\n * like the mean squared error, but will not be so strongly affected by the\n * occasional wildly incorrect prediction.\n */\nexport function logcosh(yTrue, yPred) {\n return tidy(() => {\n const log2 = Math.log(2);\n const predictionDiff = tfc.sub(yPred, yTrue);\n const logcoshResult = tfc.sub(tfc.add(predictionDiff, tfc.softplus(tfc.mul(-2, predictionDiff))), log2);\n return tfc.mean(logcoshResult, -1);\n });\n}\nexport function categoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n if (fromLogits) {\n output = tfc.softmax(output);\n }\n else {\n // scale preds so that the class probabilities of each sample sum to 1.\n const outputSum = tfc.sum(output, output.shape.length - 1, true);\n output = tfc.div(output, outputSum);\n }\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n return tfc.neg(tfc.sum(tfc.mul(target.toFloat(), tfc.log(output)), output.shape.length - 1));\n });\n}\n/**\n * Categorical crossentropy with integer targets.\n *\n * @param target An integer tensor.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n */\nexport function sparseCategoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n const flatTarget = tfc.floor(K.flatten(target)).toInt();\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n const outputShape = output.shape;\n const oneHotTarget = tfc.oneHot(flatTarget, outputShape[outputShape.length - 1])\n .reshape(outputShape);\n return categoricalCrossentropy(oneHotTarget, output, fromLogits);\n });\n}\n/**\n * From TensorFlow's implementation in nn_impl.py:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n * Hence, to ensure stability and avoid overflow, the implementation uses this\n * equivalent formulation\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n *\n * @param labels The labels.\n * @param logits The logits.\n */\nexport function sigmoidCrossEntropyWithLogits(labels, logits) {\n if (!util.arraysEqual(labels.shape, logits.shape)) {\n throw new ValueError(`logits and labels must have the same shape, but got shapes ` +\n `${JSON.stringify(labels.shape)} and ${JSON.stringify(logits.shape)}`);\n }\n return tidy(() => {\n // The logistic loss formula from above is\n // x - x * z + log(1 + exp(-x))\n // For x < 0, a more numerically stable formula is\n // -x * z + log(1 + exp(x))\n // Note that these two expressions can be combined into the following:\n // max(x, 0) - x * z + log(1 + exp(-abs(x)))\n const reluLogits = logits.relu();\n const negAbsLogits = logits.abs().neg();\n return reluLogits.sub(logits.mul(labels)).add(negAbsLogits.exp().log1p());\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return tidy(() => {\n let y;\n y = tfc.clipByValue(yPred, epsilon(), 1 - epsilon());\n y = tfc.log(tfc.div(y, tfc.sub(1, y)));\n return tfc.mean(sigmoidCrossEntropyWithLogits(yTrue, y), -1);\n });\n}\nexport function kullbackLeiblerDivergence(yTrue, yPred) {\n return tidy(() => {\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), 1);\n const clippedPred = tfc.clipByValue(yPred, epsilon(), 1);\n return tfc.sum(tfc.mul(yTrue, tfc.log(tfc.div(clippedTrue, clippedPred))), -1);\n });\n}\nexport function poisson(yTrue, yPred) {\n return tidy(() => {\n const logPred = tfc.log(tfc.add(epsilon(), yPred));\n return tfc.mean(tfc.sub(yPred, tfc.mul(yTrue, logPred)), -1);\n });\n}\nexport function cosineProximity(yTrue, yPred) {\n return tidy(() => {\n const trueNormalized = l2Normalize(yTrue, -1);\n const predNormalized = l2Normalize(yPred, -1);\n const trueXPred = tfc.mul(trueNormalized, predNormalized);\n return tfc.neg(tfc.sum(trueXPred, -1));\n });\n}\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const msle = meanSquaredLogarithmicError;\nexport const MSLE = meanSquaredLogarithmicError;\nexport const kld = kullbackLeiblerDivergence;\nexport const KLD = kullbackLeiblerDivergence;\nexport const cosine = cosineProximity;\n// TODO(michaelterry): Add deserialize() function.\nexport const lossesMap = {\n meanSquaredError,\n meanAbsoluteError,\n meanAbsolutePercentageError,\n meanSquaredLogarithmicError,\n squaredHinge,\n hinge,\n categoricalHinge,\n logcosh,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n binaryCrossentropy,\n kullbackLeiblerDivergence,\n poisson,\n cosineProximity\n};\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function get(identifierOrFn) {\n if (typeof identifierOrFn === 'string') {\n if (identifierOrFn in lossesMap) {\n return lossesMap[identifierOrFn];\n }\n let errMsg = `Unknown loss ${identifierOrFn}`;\n if (identifierOrFn.toLowerCase().includes('softmaxcrossentropy')) {\n errMsg = `Unknown loss ${identifierOrFn}. ` +\n 'Use \"categoricalCrossentropy\" as the string name for ' +\n 'tf.losses.softmaxCrossEntropy';\n }\n throw new ValueError(errMsg);\n }\n else {\n return identifierOrFn;\n }\n}\n//# sourceMappingURL=losses.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Built-in metrics.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { NotImplementedError, ValueError } from './errors';\nimport { categoricalCrossentropy as categoricalCrossentropyLoss, cosineProximity, meanAbsoluteError, meanAbsolutePercentageError, meanSquaredError, sparseCategoricalCrossentropy as sparseCategoricalCrossentropyLoss } from './losses';\nimport { binaryCrossentropy as lossBinaryCrossentropy } from './losses';\nimport { lossesMap } from './losses';\nimport * as util from './utils/generic_utils';\nexport function binaryAccuracy(yTrue, yPred) {\n return tidy(() => {\n const threshold = tfc.mul(.5, tfc.onesLike(yPred));\n const yPredThresholded = K.cast(tfc.greater(yPred, threshold), yTrue.dtype);\n return tfc.mean(tfc.equal(yTrue, yPredThresholded), -1);\n });\n}\nexport function categoricalAccuracy(yTrue, yPred) {\n return tidy(() => K.cast(tfc.equal(tfc.argMax(yTrue, -1), tfc.argMax(yPred, -1)), 'float32'));\n}\nfunction truePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(1)).sum().cast('float32');\n });\n}\nfunction falseNegatives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(0)).sum().cast('float32');\n });\n}\nfunction falsePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(0), yPred.equal(1)).sum().cast('float32');\n });\n}\nexport function precision(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fp = falsePositives(yTrue, yPred);\n const denominator = tp.add(fp);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function recall(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fn = falseNegatives(yTrue, yPred);\n const denominator = tp.add(fn);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return lossBinaryCrossentropy(yTrue, yPred);\n}\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n if (yTrue.rank === yPred.rank) {\n yTrue = yTrue.squeeze([yTrue.rank - 1]);\n }\n yPred = yPred.argMax(-1);\n if (yPred.dtype !== yTrue.dtype) {\n yPred = yPred.asType(yTrue.dtype);\n }\n return tfc.equal(yTrue, yPred).asType('float32');\n}\nexport function topKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\nexport function sparseTopKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\n// Aliases.\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const categoricalCrossentropy = categoricalCrossentropyLoss;\nexport const cosine = cosineProximity;\nexport const sparseCategoricalCrossentropy = sparseCategoricalCrossentropyLoss;\n// TODO(cais, nielsene): Add serialize().\nexport const metricsMap = {\n binaryAccuracy,\n categoricalAccuracy,\n precision,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n mse,\n MSE,\n mae,\n MAE,\n mape,\n MAPE,\n cosine\n};\nexport function get(identifier) {\n if (typeof identifier === 'string' && identifier in metricsMap) {\n return metricsMap[identifier];\n }\n else if (typeof identifier !== 'string' && identifier != null) {\n return identifier;\n }\n else {\n throw new ValueError(`Unknown metric ${identifier}`);\n }\n}\n/**\n * Get the shortcut function name.\n *\n * If the fn name is a string,\n * directly return the string name.\n * If the function is included in metricsMap or lossesMap,\n * return key of the map.\n * - If the function relative to multiple keys,\n * return the first found key as the function name.\n * - If the function exists in both lossesMap and metricsMap,\n * search lossesMap first.\n * If the function is not included in metricsMap or lossesMap,\n * return the function name.\n *\n * @param fn loss function, metric function, or short cut name.\n * @returns Loss or Metric name in string.\n */\nexport function getLossOrMetricName(fn) {\n util.assert(fn !== null, `Unknown LossOrMetricFn ${fn}`);\n if (typeof fn === 'string') {\n return fn;\n }\n else {\n let fnName;\n for (const key of Object.keys(lossesMap)) {\n if (lossesMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n for (const key of Object.keys(metricsMap)) {\n if (metricsMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n return fn.name;\n }\n}\n//# sourceMappingURL=metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Optimizers.\n */\nimport { train } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { ValueError } from './errors';\n// Add (de)serialize()\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function getOptimizer(identifier) {\n const optimizerMap = {\n 'Adagrad': () => train.adagrad(0.01),\n 'Adadelta': () => train.adadelta(1, 0.95, epsilon()),\n 'Adam': () => train.adam(0.001, 0.9, 0.999, epsilon()),\n 'Adamax': () => train.adamax(0.002, 0.9, 0.999, epsilon(), 0),\n 'RMSProp': () => train.rmsprop(0.001, 0.9, 0, epsilon()),\n 'SGD': () => train.sgd(0.01)\n };\n optimizerMap['adagrad'] = optimizerMap['Adagrad'];\n optimizerMap['adadelta'] = optimizerMap['Adadelta'];\n optimizerMap['adam'] = optimizerMap['Adam'];\n optimizerMap['adamax'] = optimizerMap['Adamax'];\n optimizerMap['rmsprop'] = optimizerMap['RMSProp'];\n optimizerMap['sgd'] = optimizerMap['SGD'];\n if (identifier in optimizerMap) {\n return optimizerMap[identifier]();\n }\n throw new ValueError(`Unknown Optimizer ${identifier}`);\n}\n//# sourceMappingURL=optimizers.js.map", "/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/** Utility functions related to user-defined metadata. */\n// Maximum recommended serialized size for user-defined metadata.\n// Beyond this limit, a warning message will be printed during model loading and\n// saving.\nexport const MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH = 1 * 1024 * 1024;\n/**\n * Check validity of user-defined metadata.\n *\n * @param userDefinedMetadata\n * @param modelName Name of the model that the user-defined metadata belongs to.\n * Used during construction of error messages.\n * @param checkSize Whether to check the size of the metadata is under\n * recommended limit. Default: `false`. If `true`, will try stringify the\n * JSON object and print a console warning if the serialzied size is above the\n * limit.\n * @throws Error if `userDefinedMetadata` is not a plain JSON object.\n */\nexport function checkUserDefinedMetadata(userDefinedMetadata, modelName, checkSize = false) {\n if (userDefinedMetadata == null ||\n typeof userDefinedMetadata !== 'object' ||\n Object.getPrototypeOf(userDefinedMetadata) !== Object.prototype ||\n !plainObjectCheck(userDefinedMetadata)) {\n throw new Error('User-defined metadata is expected to be a JSON object, but is not.');\n }\n if (checkSize) {\n const out = JSON.stringify(userDefinedMetadata);\n if (out.length > MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH) {\n console.warn(`User-defined metadata of model \"${modelName}\" is too large in ` +\n `size (length=${out.length} when serialized). It is not ` +\n `recommended to store such large objects in user-defined metadata. ` +\n `Please make sure its serialized length is <= ` +\n `${MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH}.`);\n }\n }\n}\n/**\n * Check if an input is plain JSON object or any valid subfield of it.\n *\n * @param x The input to be checked.\n * @param assertObject Whether to assert `x` is a JSON object, i.e., reject\n * cases of arrays and primitives.\n * @return Returns `true` if and only if `x` is a plain JSON object,\n * a JSON-valid primitive including string, number, boolean and null,\n * or an array of the said types.\n */\n// tslint:disable-next-line:no-any\nexport function plainObjectCheck(x) {\n if (x === null) {\n // Note: typeof `null` is 'object', and `null` is valid in JSON.\n return true;\n }\n else if (typeof x === 'object') {\n if (Object.getPrototypeOf(x) === Object.prototype) {\n // `x` is a JavaScript object and its prototype is Object.\n const keys = Object.keys(x);\n for (const key of keys) {\n if (typeof key !== 'string') {\n // JSON keys must be strings.\n return false;\n }\n if (!plainObjectCheck(x[key])) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object but its prototype is not Object.\n if (Array.isArray(x)) {\n // `x` is a JavaScript array.\n for (const item of x) {\n if (!plainObjectCheck(item)) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object and its prototype is not Object,\n // and it's not an Array. I.e., it's a complex object such as\n // `Error` and `Date`.\n return false;\n }\n }\n }\n else {\n // `x` is not a JavaScript object or `null`.\n const xType = typeof x;\n return xType === 'string' || xType === 'number' || xType === 'boolean';\n }\n}\n//# sourceMappingURL=user_defined_metadata.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { countParamsInWeights } from './variable_utils';\n/**\n * Print the summary of a LayersModel object.\n *\n * @param model tf.LayersModel instance.\n * @param lineLength Total length of printed lines. Set this to adapt to the\n * display to different terminal or console sizes.\n * @param positions Relative or absolute positions of log elements in each\n * line. Each number corresponds to right-most (i.e., ending) position of a\n * column.\n * If not provided, defaults to `[0.45, 0.85, 1]` for sequential-like\n * models and `[0.33, 0.55, 0.67, 1]` for non-sequential like models.\n * @param printFn Print function to use.\n * It will be called on each line of the summary. You can provide a custom\n * function in order to capture the string summary. Defaults to `console.log`.\n */\nexport function printSummary(model, lineLength, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n const sequentialLike = isModelSequentialLike(model);\n // Header names for different log elements.\n const toDisplay = ['Layer (type)', 'Output shape', 'Param #'];\n if (sequentialLike) {\n lineLength = lineLength || 65;\n positions = positions || [0.45, 0.85, 1];\n }\n else {\n lineLength = lineLength || 98;\n positions = positions || [0.33, 0.55, 0.67, 1];\n // Header names for different log elements.\n }\n if (positions[positions.length - 1] <= 1) {\n // `positions` is relative. Convert it to absolute positioning.\n positions = positions.map(p => Math.floor(lineLength * p));\n }\n let relevantNodes;\n if (!sequentialLike) {\n toDisplay.push('Receives inputs');\n relevantNodes = [];\n for (const depth in model.nodesByDepth) {\n relevantNodes.push(...model.nodesByDepth[depth]);\n }\n }\n printFn('_'.repeat(lineLength));\n printRow(toDisplay, positions, printFn);\n printFn('='.repeat(lineLength));\n const layers = model.layers;\n for (let i = 0; i < layers.length; ++i) {\n if (sequentialLike) {\n printLayerSummary(layers[i], positions, printFn);\n }\n else {\n printLayerSummaryWithConnections(layers[i], positions, relevantNodes, printFn);\n }\n printFn((i === layers.length - 1 ? '=' : '_').repeat(lineLength));\n }\n // tslint:disable-next-line:no-any\n model.checkTrainableWeightsConsistency();\n const trainableCount = countTrainableParams(model);\n const nonTrainableCount = countParamsInWeights(model.nonTrainableWeights);\n printFn(`Total params: ${trainableCount + nonTrainableCount}`);\n printFn(`Trainable params: ${trainableCount}`);\n printFn(`Non-trainable params: ${nonTrainableCount}`);\n printFn('_'.repeat(lineLength));\n}\nfunction countTrainableParams(model) {\n let trainableCount;\n // tslint:disable:no-any\n if (model.collectedTrainableWeights != null) {\n trainableCount =\n countParamsInWeights(model.collectedTrainableWeights);\n }\n else {\n trainableCount = countParamsInWeights(model.trainableWeights);\n }\n // tslint:enable:no-any\n return trainableCount;\n}\nfunction isModelSequentialLike(model) {\n let sequentialLike = true;\n const nodesByDepth = [];\n const nodes = [];\n for (const depth in model.nodesByDepth) {\n nodesByDepth.push(model.nodesByDepth[depth]);\n }\n for (const depthNodes of nodesByDepth) {\n if (depthNodes.length > 1 ||\n depthNodes.length === 1 && depthNodes[0].inboundLayers.length > 1) {\n sequentialLike = false;\n break;\n }\n nodes.push(...depthNodes);\n }\n if (sequentialLike) {\n // Search for shared layers.\n for (const layer of model.layers) {\n let flag = false;\n for (const node of layer.inboundNodes) {\n if (nodes.indexOf(node) !== -1) {\n if (flag) {\n sequentialLike = false;\n break;\n }\n else {\n flag = true;\n }\n }\n }\n if (!sequentialLike) {\n break;\n }\n }\n }\n return sequentialLike;\n}\nfunction printRow(fields, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n let line = '';\n for (let i = 0; i < fields.length; ++i) {\n if (i > 0) {\n line = line.slice(0, line.length - 1) + ' ';\n }\n line += fields[i];\n line = line.slice(0, positions[i]);\n line += ' '.repeat(positions[i] - line.length);\n }\n printFn(line);\n}\n/**\n * Prints a summary for a single Layer, without connectivity information.\n *\n * @param layer: Layer instance to print.\n */\nfunction printLayerSummary(layer, positions, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const name = layer.name;\n const className = layer.getClassName();\n const fields = [`${name} (${className})`, outputShape, layer.countParams().toString()];\n printRow(fields, positions, printFn);\n}\n/**\n * Prints a summary for a single Layer, with connectivity information.\n */\nfunction printLayerSummaryWithConnections(layer, positions, relevantNodes, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const connections = [];\n for (const node of layer.inboundNodes) {\n if (relevantNodes != null && relevantNodes.length > 0 &&\n relevantNodes.indexOf(node) === -1) {\n continue;\n }\n for (let i = 0; i < node.inboundLayers.length; ++i) {\n const inboundLayer = node.inboundLayers[i].name;\n const inboundLayerIndex = node.nodeIndices[i];\n const inboundTensorIndex = node.tensorIndices[i];\n connections.push(`${inboundLayer}[${inboundLayerIndex}][${inboundTensorIndex}]`);\n }\n }\n const name = layer.name;\n const className = layer.getClassName();\n const firstConnection = connections.length === 0 ? '' : connections[0];\n const fields = [\n `${name} (${className})`, outputShape, layer.countParams().toString(),\n firstConnection\n ];\n printRow(fields, positions, printFn);\n for (let i = 1; i < connections.length; ++i) {\n printRow(['', '', '', connections[i]], positions, printFn);\n }\n}\n//# sourceMappingURL=layer_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as generic_utils from '../utils/generic_utils';\n// tslint:enable\n/**\n * Test whether a value in an array is the name of a LayersModel or Layer.\n * @param key The key name that the value is found under. Note that the key\n * may not be at the level immediately above the value, if the value is in a\n * nested array.\n * @param index Index of the value in the Array that it is found in.\n * @param value The value object.\n * @returns A boolean indicating whether value is a name.\n */\nfunction isArrayItemInputOrOutputName(key, index, value) {\n return (key === 'inboundNodes' || key === 'outputLayers' ||\n key === 'inputLayers') &&\n index === 0 && typeof value === 'string';\n}\n/**\n * Convert a Pythonic config object to TypeScript config object.\n * @param pythonicConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertPythonicToTs(pythonicConfig, key) {\n if (pythonicConfig === null) {\n return null;\n }\n else if (typeof pythonicConfig === 'string') {\n return generic_utils.toCamelCase(pythonicConfig);\n }\n else if ((typeof pythonicConfig === 'number') ||\n (typeof pythonicConfig === 'boolean')) {\n return pythonicConfig;\n }\n else if (pythonicConfig instanceof Array) {\n const tsArray = [];\n const arrayLength = pythonicConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = pythonicConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n tsArray.push(item);\n }\n else {\n tsArray.push(convertPythonicToTs(item, key));\n }\n }\n return tsArray;\n }\n else {\n const tsDict = {};\n for (const pythonicKey of Object.keys(pythonicConfig)) {\n const pythonicValue = pythonicConfig[pythonicKey];\n if (pythonicKey === 'name' && typeof pythonicValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // camel-case conversion.\n tsDict[pythonicKey] = pythonicValue;\n }\n else {\n const tsKey = generic_utils.toCamelCase(pythonicKey);\n tsDict[tsKey] = convertPythonicToTs(pythonicValue, tsKey);\n }\n }\n return tsDict;\n }\n}\n/**\n * Convert a TypeScript config object to Python config object.\n * @param tsConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertTsToPythonic(tsConfig, key) {\n if (tsConfig === null || tsConfig === undefined) {\n return null;\n }\n else if (typeof tsConfig === 'string') {\n return generic_utils.toSnakeCase(tsConfig);\n }\n else if ((typeof tsConfig === 'number') || (typeof tsConfig === 'boolean')) {\n return tsConfig;\n }\n else if (tsConfig instanceof Array) {\n const pyArray = [];\n const arrayLength = tsConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = tsConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n pyArray.push(item);\n }\n else {\n pyArray.push(convertTsToPythonic(item, key));\n }\n }\n return pyArray;\n }\n else {\n const pyDict = {};\n for (const tsKey of Object.keys(tsConfig)) {\n const tsValue = tsConfig[tsKey];\n const pyKey = generic_utils.toSnakeCase(tsKey);\n if ((tsKey === 'name' || tsKey === 'className') &&\n typeof tsValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // snake-case conversion.\n pyDict[pyKey] = tsValue;\n }\n else {\n pyDict[pyKey] = convertTsToPythonic(tsValue, tsKey);\n }\n }\n return pyDict;\n }\n}\n//# sourceMappingURL=serialization_utils.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Executor: Evaluates SymbolicTensor based on feeds.\n */\nimport { cast, dispose, memory, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\nimport { toList } from '../utils/generic_utils';\nimport { InputLayer } from './input_layer';\nimport { SymbolicTensor } from './topology';\n/**\n * Helper function to check the dtype and shape compatibility of a feed value.\n */\nfunction assertFeedCompatibility(key, val) {\n // Check dtype compatibility.\n if (key.dtype == null || key.dtype === val.dtype) {\n // a. If types match, return val tensor as is.\n return val;\n }\n try {\n // b. Attempt to convert to expected type.\n return cast(val, key.dtype);\n }\n catch (err) {\n // c. If conversion fails, return helpful error.\n throw new ValueError(`The dtype of the feed (${val.dtype}) can not be cast to the dtype ` +\n `of the key '${key.name}' (${key.dtype}).`);\n }\n}\n/**\n * FeedDict: A mapping from unique SymbolicTensors to feed values for them.\n * A feed value is a concrete value represented as an `Tensor`.\n */\nexport class FeedDict {\n /**\n * Constructor, optionally does copy-construction.\n * @param feeds An Array of `Feed`s, or another `FeedDict`, in which case\n * copy-construction will be performed.\n */\n constructor(feeds) {\n this.id2Value = {};\n this.id2Mask = {};\n this.name2Id = {};\n if (feeds instanceof FeedDict) {\n for (const id in feeds.id2Value) {\n this.id2Value[id] = feeds.id2Value[id];\n if (id in feeds.id2Mask) {\n this.id2Mask[id] = feeds.id2Mask[id];\n }\n }\n }\n else {\n if (feeds == null) {\n return;\n }\n for (const feed of feeds) {\n this.add(feed.key, feed.value);\n }\n }\n }\n /**\n * Add a key-value pair to the FeedDict.\n *\n * @param key The key of the feed.\n * @param value The value of the tensor feed.\n * @param mask The value of the mask feed (optional).\n * @returns This `FeedDict`.\n * @throws ValueError: If the key `SymbolicTensor` already exists in the\n * `FeedDict`.\n */\n add(key, value, mask) {\n if (this.id2Value[key.id] == null) {\n this.id2Value[key.id] = assertFeedCompatibility(key, value);\n this.name2Id[key.name] = key.id;\n if (mask != null) {\n this.id2Mask[key.id] = mask;\n }\n }\n else {\n throw new ValueError(`Duplicate key: name=${key.name}, id=${key.id}`);\n }\n return this;\n }\n /**\n * Add a Feed to the FeedDict.\n * @param feed The new `Feed` to add.\n * @returns This `FeedDict`.\n */\n addFeed(feed) {\n this.add(feed.key, feed.value);\n }\n /**\n * Probe whether a key already exists in the FeedDict.\n * @param key\n */\n hasKey(key) {\n return this.id2Value[key.id] != null;\n }\n /**\n * Get all the SymbolicTensor available in this FeedDict.\n */\n names() {\n return Object.keys(this.name2Id);\n }\n /**\n * Get the feed value for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed value.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getValue(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Value[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Value[id];\n }\n }\n /**\n * Get the feed mask for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed mask.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getMask(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Mask[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Mask[id];\n }\n }\n /** Dispose all mask Tensors held by this object. */\n disposeMasks() {\n if (this.id2Mask != null) {\n dispose(this.id2Mask);\n }\n }\n}\n// Cache for topologically sorted SymbolicTensors for given execution\n// targets (i.e., fetches).\nconst cachedSorted = {};\n// Cache for recipient count maps for given execution targets (i.e., fetches).\nconst cachedRecipientCounts = {};\n/**\n * Execute a SymbolicTensor by using concrete feed values.\n *\n * A `SymbolicTensor` object is a node in a computation graph of TF.js\n * Layers. The object is backed by a source layer and input\n * `SymbolicTensor`s to the source layer. This method evaluates\n * the `call()` method of the source layer, using concrete values of the\n * inputs obtained from either\n * * `feedDict`, if the input key exists in `feedDict`, or else,\n * * a recursive call to `execute()` itself.\n *\n * @param x: The `SymbolicTensor` to execute.\n * @param feedDict: The feed values, as base condition of the recursion.\n * execution.\n * @param kwargs: Optional keyword arguments.\n * @param probe: A probe object (of interface `ExecutionProbe`) used for\n * testing memory footprint of `execute` calls.\n * @returns Result of the execution.\n * @throws ValueError: If any `SymbolicTensor`s from `InputLayer`s\n * encountered during the execution lacks a feed value in `feedDict`.\n */\nexport function execute(fetches, feedDict, kwargs, probe) {\n const training = kwargs == null ? false : kwargs['training'];\n const arrayFetches = Array.isArray(fetches);\n const fetchArray = arrayFetches ? fetches : [fetches];\n const outputNames = fetchArray.map(t => t.name);\n const finalOutputs = [];\n const feedNames = feedDict.names();\n for (const outputName of outputNames) {\n if (feedNames.indexOf(outputName) !== -1) {\n finalOutputs.push(feedDict.getValue(outputName));\n }\n else {\n finalOutputs.push(null);\n }\n }\n if (probe != null) {\n // For optional probing of memory footprint during execution.\n probe.maxNumTensors = -Infinity;\n probe.minNumTensors = Infinity;\n }\n // Check cache.\n const fetchAndFeedKey = outputNames.join(',') + '|' + feedDict.names().join(',');\n let sorted;\n let recipientCounts;\n if (cachedSorted[fetchAndFeedKey] == null) {\n // Cache doesn't contain the desired combination of fetches. Compute\n // topological sort for the combination for the first time.\n const out = getTopologicalSortAndRecipientCounts(fetchArray, feedDict);\n sorted = out.sorted;\n recipientCounts = out.recipientCounts;\n // Store results in cache for future use.\n cachedSorted[fetchAndFeedKey] = sorted;\n cachedRecipientCounts[fetchAndFeedKey] = recipientCounts;\n }\n sorted = cachedSorted[fetchAndFeedKey];\n recipientCounts = {};\n if (!training) {\n Object.assign(recipientCounts, cachedRecipientCounts[fetchAndFeedKey]);\n }\n const internalFeedDict = new FeedDict(feedDict);\n // Start iterative execution on the topologically-sorted SymbolicTensors.\n for (let i = 0; i < sorted.length; ++i) {\n if (probe != null) {\n // For optional probing of memory usage during execution.\n const numTensors = memory().numTensors;\n if (numTensors > probe.maxNumTensors) {\n probe.maxNumTensors = numTensors;\n }\n if (numTensors < probe.minNumTensors) {\n probe.minNumTensors = numTensors;\n }\n }\n const symbolic = sorted[i];\n const srcLayer = symbolic.sourceLayer;\n if (srcLayer instanceof InputLayer) {\n continue;\n }\n const inputValues = [];\n const inputMasks = [];\n const tensorsToDispose = [];\n let maskExists = false;\n for (const input of symbolic.inputs) {\n const value = internalFeedDict.getValue(input);\n const mask = internalFeedDict.getMask(input);\n inputValues.push(value);\n inputMasks.push(mask);\n if (mask != null) {\n maskExists = true;\n }\n if (!training) {\n recipientCounts[input.name]--;\n if (recipientCounts[input.name] === 0 && !feedDict.hasKey(input) &&\n outputNames.indexOf(input.name) === -1 && !value.isDisposed &&\n input.sourceLayer.stateful !== true) {\n tensorsToDispose.push(value);\n }\n }\n }\n if (maskExists) {\n kwargs = kwargs || {};\n kwargs['mask'] = inputMasks[0];\n }\n const outputTensors = toList(srcLayer.apply(inputValues, kwargs));\n let outputMask = null;\n if (srcLayer.supportsMasking) {\n outputMask = srcLayer.computeMask(inputValues, inputMasks);\n }\n const layerOutputs = getNodeOutputs(symbolic);\n const outputSymbolicTensors = Array.isArray(layerOutputs) ? layerOutputs : [layerOutputs];\n for (let i = 0; i < outputSymbolicTensors.length; ++i) {\n if (!internalFeedDict.hasKey(outputSymbolicTensors[i])) {\n internalFeedDict.add(outputSymbolicTensors[i], outputTensors[i], Array.isArray(outputMask) ? outputMask[0] : outputMask);\n }\n const index = outputNames.indexOf(outputSymbolicTensors[i].name);\n if (index !== -1) {\n finalOutputs[index] = outputTensors[i];\n }\n }\n if (!training) {\n // Clean up Tensors that are no longer needed.\n dispose(tensorsToDispose);\n }\n }\n // NOTE(cais): Unlike intermediate tensors, we don't discard mask\n // tensors as we go, because these tensors are sometimes passed over a\n // series of mutliple layers, i.e., not obeying the immediate input\n // relations in the graph. If this becomes a memory-usage concern,\n // we can improve this in the future.\n internalFeedDict.disposeMasks();\n return arrayFetches ? finalOutputs : finalOutputs[0];\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for an array of fetches.\n *\n * This function calls getTopologicalSortAndRecipientCountsForOneFetch and\n * merges their results.\n *\n * @param fetch The array of fetches requested. Must be a non-empty array.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientCounts: Recipient counts for all SymbolicTensors in `sorted`.\n */\nfunction getTopologicalSortAndRecipientCounts(fetches, feedDict) {\n util.assert(fetches != null && fetches.length > 0, () => `Expected at least one fetch, got none`);\n let finalSorted = [];\n let finalRecipientMap = {};\n if (fetches.length === 1) {\n // Special-casing 1 fetch for efficiency.\n const out = getTopologicalSortAndRecipientCountsForOneFetch(fetches[0], feedDict);\n finalSorted = out.sorted;\n finalRecipientMap = out.recipientMap;\n }\n else {\n const visited = new Set();\n for (const fetch of fetches) {\n const { sorted, recipientMap } = getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict);\n // Merge sorted SymbolicTensor Arrays.\n for (const symbolicTensor of sorted) {\n if (!visited.has(symbolicTensor.name)) {\n finalSorted.push(symbolicTensor);\n visited.add(symbolicTensor.name);\n }\n }\n // Merge recipient maps.\n for (const name in recipientMap) {\n if (finalRecipientMap[name] == null) {\n finalRecipientMap[name] = new Set();\n }\n recipientMap[name].forEach(recipient => finalRecipientMap[name].add(recipient));\n }\n }\n }\n return {\n sorted: finalSorted,\n recipientCounts: recipientMap2Counts(finalRecipientMap)\n };\n}\nfunction recipientMap2Counts(recipientMap) {\n const recipientCounts = {};\n for (const name in recipientMap) {\n recipientCounts[name] = recipientMap[name].size;\n }\n return recipientCounts;\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for a single fetch.\n *\n * This helper function processes the upstream SymbolicTensors of a single\n * fetch.\n *\n * @param fetch The single fetch requested.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientMap: Recipient names for all SymbolicTensors in `sorted`.\n */\nexport function getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict) {\n const visited = new Set();\n const sorted = [];\n const recipientMap = {};\n // Put keys of the feedDict into visited first, so they don't have to be\n // walked. This is needed in case where there are feeds for intermediate\n // SymbolicTensors of the graph.\n for (const key of feedDict.names()) {\n visited.add(key);\n }\n const stack = [];\n const marks = [];\n // Initial population of stack and marks.\n stack.push(fetch);\n while (stack.length > 0) {\n const top = stack[stack.length - 1];\n if (visited.has(top.name)) {\n stack.pop();\n continue;\n }\n const topIsMarked = marks[marks.length - 1] === stack.length - 1;\n if (top.inputs.length === 0 || topIsMarked) {\n // Input SymbolicTensor or all children have been visited.\n stack.pop();\n sorted.push(top);\n visited.add(top.name);\n if (topIsMarked) {\n marks.pop();\n }\n }\n else {\n // A non-input SymbolicTensor whose upstream SymbolicTensors haven't\n // been visited yet. Push them onto the stack.\n marks.push(stack.length - 1);\n for (const input of top.inputs) {\n // Increment the recipient count. Note that this needs to happen\n // regardless of whether the SymbolicTensor has been visited before.\n if (recipientMap[input.name] == null) {\n recipientMap[input.name] = new Set();\n }\n recipientMap[input.name].add(top.name);\n if (visited.has(input.name)) {\n continue; // Avoid repeated visits to the same SymbolicTensor.\n }\n stack.push(input);\n }\n }\n }\n return { sorted, recipientMap };\n}\n/**\n * Get the symbolic output tensors of the node to which a given fetch belongs.\n * @param fetch The fetched symbolic tensor.\n * @returns The Array of symbolic tensors output by the node to which `fetch`\n * belongs.\n */\nfunction getNodeOutputs(fetch) {\n let layerOutputs;\n if (fetch.sourceLayer.inboundNodes.length === 1) {\n layerOutputs = fetch.sourceLayer.output;\n }\n else {\n let nodeIndex = null;\n for (let i = 0; i < fetch.sourceLayer.inboundNodes.length; ++i) {\n for (const outputTensor of fetch.sourceLayer.inboundNodes[i]\n .outputTensors) {\n if (outputTensor.id === fetch.id) {\n nodeIndex = i;\n break;\n }\n }\n }\n layerOutputs = fetch.sourceLayer.getOutputAt(nodeIndex);\n }\n return layerOutputs;\n}\n//# sourceMappingURL=executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { tidy } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize as deserializeLayer } from '../layers/serialization';\nimport * as generic_utils from '../utils/generic_utils';\nimport { convertTsToPythonic } from '../utils/serialization_utils';\nimport * as types_utils from '../utils/types_utils';\nimport { batchSetValue } from '../variables';\nimport { version as layersVersion } from '../version';\nimport { execute, FeedDict } from './executor';\nimport { InputLayer } from './input_layer';\nimport { Layer, Node } from './topology';\n/**\n * A Container is a directed acyclic graph of layers.\n *\n * It is the topological form of a \"model\". A LayersModel\n * is simply a Container with added training routines.\n *\n */\nexport class Container extends Layer {\n constructor(args) {\n // No args passed to super's constructor.\n super({});\n this.containerNodes = new Set();\n this.name = args.name;\n if (this.name == null) {\n const prefix = this.getClassName().toLowerCase();\n this.name = getUid(prefix);\n }\n this.supportsMasking = false;\n this.trainable_ = true;\n // TODO(michaelterry): Initialize perInputLosses/Updates here.\n // Container-specific properties.\n if (Array.isArray(args.inputs)) {\n this.inputs = args.inputs.slice();\n }\n else {\n this.inputs = [args.inputs];\n }\n if (Array.isArray(args.outputs)) {\n this.outputs = args.outputs.slice();\n }\n else {\n this.outputs = [args.outputs];\n }\n // Check for redundancy in inputs.\n if (generic_utils.unique(this.inputs).length !== this.inputs.length) {\n throw new ValueError('The list of inputs passed to the model is ' +\n 'redundant. All inputs should only appear once. Found: ' +\n `${this.inputs.map(x => x.name)}`);\n }\n // Check for redundancy in outputs.\n if (generic_utils.unique(this.outputs).length !== this.outputs.length) {\n console.warn('The list of outputs passed to the model is redundant. ' +\n 'All outputs should only appear once. Found: ' +\n `${this.outputs.map(x => x.name)}`);\n }\n /*\n List of initial layers (1 to 1 mapping with this.inputs, hence the same\n layer might appear twice)\n */\n this.inputLayers = [];\n this.inputLayersNodeIndices = [];\n this.inputLayersTensorIndices = [];\n /*\n List of layers (1 to 1 mapping with this.outputs, hence the same layer\n might appear twice)\n */\n this.outputLayers = [];\n this.outputLayersNodeIndices = [];\n this.outputLayersTensorIndices = [];\n /*\n All layers in order of horizontal graph traversal. Entries are unique.\n Includes input and output layers.\n */\n this.layers = [];\n /*\n References to container layers that were constructed internally. We need\n these to properly dispose of tensors from nested containers.\n */\n this.internalContainerRefs = [];\n // TODO(michaelterry): Determine if caching still needed with eager\n // backend.\n /*\n This is for performance optimization when calling the Container on new\n inputs. Every time the Container is called on a set on input tensors,\n we compute the output tensors, output masks and output shapes in one pass,\n then cache them here. When one of these outputs is queried later,\n we retrieve it from there instead of recomputing it.\n */\n // this.outputTensorCache = {};\n // this.outputShapeCache = {};\n // Build this.outputLayers:\n for (const x of this.outputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n this.outputLayers.push(layer);\n this.outputLayersNodeIndices.push(nodeIndex);\n this.outputLayersTensorIndices.push(tensorIndex);\n }\n // TODO(michaelterry): Add output mask cache code.\n // Build this.inputLayers:\n for (const x of this.inputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n /*\n It's supposed to be an input layer, so only one node\n and one tensor output.\n */\n generic_utils.assert(nodeIndex === 0, 'input layer has >1 nodes');\n generic_utils.assert(tensorIndex === 0, 'input layer has >1 tensors');\n this.inputLayers.push(layer);\n this.inputLayersNodeIndices.push(nodeIndex);\n this.inputLayersTensorIndices.push(tensorIndex);\n }\n // Build this.inputNames and this.outputNames.\n this.inputNames = [];\n this.outputNames = [];\n this.feedInputShapes = [];\n this.feedInputNames = [];\n this.feedOutputNames = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n // Check that layer is an InputLayer.\n if (!(layer instanceof InputLayer)) {\n throw new TypeError('Input layers to a LayersModel must be InputLayer objects. ' +\n `Received inputs: ${args.inputs}. ` +\n `Input ${i} (0-based) originates ` +\n `from layer type ${layer.getClassName()}.`);\n }\n this.inputNames.push(layer.name);\n this.feedInputShapes.push(layer.batchInputShape);\n this.feedInputNames.push(layer.name);\n }\n for (const layer of this.outputLayers) {\n this.outputNames.push(layer.name);\n }\n this.internalInputShapes = this.inputs.map(x => x.shape);\n this.internalOutputShapes = this.outputs.map(x => x.shape);\n /*\n Container_nodes: set of nodes included in the graph (not all nodes\n included in the layers are relevant to the current graph).\n */\n // ids of all nodes relevant to the Container:\n const nodesDepths = {};\n // To recover nodes from their ID.\n const nodeIDToNode = {};\n const layersDepths = {};\n // To layers from their ID.\n const layerIDToLayer = {};\n const layerIndices = {};\n const nodesInDecreasingDepth = [];\n /**\n * Builds a map of the graph of layers.\n *\n * This recursively updates the map `layerIndices`,\n * the list `nodesInDecreasingDepth` and the set `containerNodes`.\n *\n * @param tensor Some tensor in a graph.\n * @param finishedNodes Set of nodes whose subgraphs have been traversed\n * completely. Useful to prevent duplicated work.\n * @param nodesInProgress Set of nodes that are currently active on the\n * recursion stack. Useful to detect cycles.\n * @param layer Layer from which `tensor` comes from. If not provided,\n * will be obtained from tensor.sourceLayer.\n * @param nodeIndex Node index from which `tensor` comes from.\n * @param tensorIndex TensorIndex from which `tensor` comes from.\n *\n * @exception RuntimeError if a cycle is detected.\n */\n const buildMapOfGraph = (tensor, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex) => {\n if (layer == null || nodeIndex == null || tensorIndex == null) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n tensorIndex = tensor.tensorIndex;\n }\n const node = layer.inboundNodes[nodeIndex];\n // Prevent cycles.\n if (nodesInProgress.indexOf(node) !== -1) {\n throw new RuntimeError(`The tensor ${tensor.name} at layer \"${layer.name}\" ` +\n 'is part of a cycle.');\n }\n // Don't repeat work for shared subgraphs\n if (finishedNodes.indexOf(node) !== -1) {\n return;\n }\n // Update containerNodes.\n this.containerNodes.add(Container.nodeKey(layer, nodeIndex));\n // Store the traversal order for layer sorting.\n if (!(layer.id in layerIndices)) {\n layerIndices[layer.id] = Object.keys(layerIndices).length;\n }\n if (nodesInProgress.indexOf(node) === -1) {\n nodesInProgress.push(node);\n }\n // Propagate to all previous tensors connected to this node.\n const numInboundLayers = node.inboundLayers.length;\n for (let i = 0; i < numInboundLayers; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n buildMapOfGraph(x, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex);\n }\n finishedNodes.push(node);\n while (nodesInProgress.indexOf(node) >= 0) {\n nodesInProgress.splice(nodesInProgress.indexOf(node), 1);\n }\n nodesInDecreasingDepth.push(node);\n };\n const finishedNodes = [];\n const nodesInProgress = [];\n for (const x of this.outputs) {\n buildMapOfGraph(x, finishedNodes, nodesInProgress);\n }\n const reversedNodesInDecreasingDepth = nodesInDecreasingDepth.slice().reverse();\n for (const node of reversedNodesInDecreasingDepth) {\n nodeIDToNode[node.id] = node;\n // If the depth is not set, the node has no outbound nodes (depth 0).\n if (!(node.id in nodesDepths)) {\n nodesDepths[node.id] = 0;\n }\n let depth = nodesDepths[node.id];\n // Update the depth of the corresponding layer\n const previousDepth = (layersDepths[node.outboundLayer.id] == null ?\n 0 :\n layersDepths[node.outboundLayer.id]);\n /*\n If we've seen this layer before at a higher depth, we should use that\n depth instead of the node depth. This is necessary for shared layers\n that have inputs at different depth levels in the graph.\n */\n depth = Math.max(depth, previousDepth);\n layersDepths[node.outboundLayer.id] = depth;\n layerIDToLayer[node.outboundLayer.id] = node.outboundLayer;\n nodesDepths[node.id] = depth;\n // Update the depth of inbound nodes.\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const inboundNode = inboundLayer.inboundNodes[nodeIndex];\n const previousDepth = (nodesDepths[inboundNode.id] == null ? 0 :\n nodesDepths[inboundNode.id]);\n nodesDepths[inboundNode.id] = Math.max(depth + 1, previousDepth);\n nodeIDToNode[inboundNode.id] = inboundNode;\n }\n }\n // Build a dict {depth: list of nodes with this depth}\n const nodesByDepth = {};\n for (const nodeID in nodesDepths) {\n const depth = nodesDepths[nodeID];\n if (!(depth in nodesByDepth)) {\n nodesByDepth[depth] = [];\n }\n nodesByDepth[depth].push(nodeIDToNode[nodeID]);\n }\n // Build a dict {depth: list of layers with this depth}\n const layersByDepth = {};\n for (const layerID in layersDepths) {\n const depth = layersDepths[layerID];\n if (!(depth in layersByDepth)) {\n layersByDepth[depth] = [];\n }\n layersByDepth[depth].push(layerIDToLayer[layerID]);\n }\n // Get sorted list of layer depths.\n let depthKeys = Object.keys(layersByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Set this.layers and this.layersByDepth.\n this.layers = [];\n for (const depth of depthKeys) {\n const layersForDepth = layersByDepth[depth];\n // Container.layers needs to have a deterministic order:\n // here we order them by traversal order.\n layersForDepth.sort((a, b) => {\n const aIndex = layerIndices[a.id];\n const bIndex = layerIndices[b.id];\n if (aIndex < bIndex) {\n return -1;\n }\n if (aIndex > bIndex) {\n return 1;\n }\n return 0;\n });\n for (const layer of layersForDepth) {\n if (layer instanceof Container) {\n this.internalContainerRefs.push(layer);\n }\n this.layers.push(layer);\n }\n }\n this.layersByDepth = layersByDepth;\n // Get sorted list of node depths;\n depthKeys = Object.keys(nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Check that all tensors required are computable.\n // computable_tensors: all tensors in the graph\n // that can be computed from the inputs provided.\n const computableTensors = this.inputs.slice();\n // To provide a better error msg.\n const layersWithCompleteInput = [];\n for (const depth of depthKeys) {\n for (const node of nodesByDepth[depth]) {\n const layer = node.outboundLayer;\n if (layer != null) {\n for (const x of node.inputTensors) {\n if (computableTensors.indexOf(x) === -1) {\n throw new RuntimeError(`Graph disconnected: cannot obtain value for tensor ${x}` +\n ` at layer \"${layer.name}\". ` +\n 'The following previous layers were accessed without ' +\n `issue: ${layersWithCompleteInput}`);\n }\n }\n for (const x of node.outputTensors) {\n computableTensors.push(x);\n }\n layersWithCompleteInput.push(layer.name);\n }\n }\n }\n // Set this.containerNodes and this.nodesByDepth.\n this.nodesByDepth = nodesByDepth;\n // Ensure name unicity, which will be crucial for serialization\n // (since serialized nodes refer to layers by their name).\n const allNames = this.layers.map(x => x.name);\n for (const name of allNames) {\n const numOccurrences = allNames.filter(x => x === name).length;\n if (numOccurrences !== 1) {\n throw new RuntimeError(`The name \"${name}\" is used ${numOccurrences} times ` +\n 'in the model. All layer names should be unique. Layer names: ' +\n JSON.stringify(allNames));\n }\n }\n // Layer parameters.\n // The new container starts with a single inbound node\n // for its inputs, and no outbound nodes.\n // Will be appended to by future calls to apply().\n this.outboundNodes = [];\n // Will be appended to below, and by future calls to apply().\n this.inboundNodes = [];\n // Create the node linking internal inputs to internal outputs.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n inputMasks: this.inputs.map(x => null),\n outputMasks: this.outputs.map(x => null),\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs.map(x => x.shape)\n });\n this.built = true;\n this._refCount = 1; // The ref count of a container always start at 1.\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Container '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose a LayersModel's weights.\n *\n * This method decrease the reference count of the LayersModel object by 1.\n *\n * A LayersModel is reference-counted. Its reference count is incremented by 1\n * when it is first constructed and when it is used as a Layer of another\n * LayersModel.\n *\n * If the reference count of a LayersModel becomes 0, the `dispose` method of\n * all its constituent `Layer`s will be called.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * `dispose` method of its constituent `Layer`s will *not* be called.\n *\n * After a LayersModel is disposed, it cannot be used in calls such as\n * 'predict`, `evaluate` or `fit` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the LayersModel after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the LayersModel has\n * already been disposed.\n */\n dispose() {\n this.assertNotDisposed();\n const result = { refCountAfterDispose: null, numDisposedVariables: 0 };\n if (--this._refCount === 0) {\n for (const layer of this.layers) {\n result.numDisposedVariables += layer.dispose().numDisposedVariables;\n }\n // Call dispose on each internally created container layer again to ensure\n // their refCounts hit zero and their tensors are subsequently deleted.\n for (const container of this.internalContainerRefs) {\n result.numDisposedVariables += container.dispose().numDisposedVariables;\n }\n }\n result.refCountAfterDispose = this._refCount;\n return result;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.layers.forEach(layer => {\n // tslint:disable-next-line:no-any\n layer._trainableWeights\n .forEach(w => w.trainable = trainable);\n });\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n // Porting Note: This check below is to prevent errors where the\n // _trainableWeights inherited from the parent class (Layer) gets\n // inadvertently used.\n if (this._trainableWeights.length > 0) {\n throw new ValueError('Container instance unexpectedly contains _trainableWeights.' +\n 'The trainable weights of a Container are a union of the ' +\n 'trainable weights of its consituent Layers. Its own ' +\n '_trainableWeights must remain an empty Array.');\n }\n if (!this.trainable) {\n return [];\n }\n let weights = [];\n for (const layer of this.layers) {\n weights = weights.concat(layer.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const layer of this.layers) {\n weights.push(...layer.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const layer of this.layers) {\n trainableWeights.push(...layer.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n /**\n * Loads all layer weights from a JSON object.\n *\n * Porting Note: HDF5 weight files cannot be directly loaded in JavaScript /\n * TypeScript. The utility script at `scripts/pykeras.py` offers means\n * to convert them into JSON strings compatible with this method.\n * Porting Note: TensorFlow.js Layers supports only loading by name currently.\n *\n * @param weights A JSON mapping weight names to weight values as nested\n * arrays of numbers, or a `NamedTensorMap`, i.e., a JSON mapping weight\n * names to `tf.Tensor` objects.\n * @param strict Require that the provided weights exactly match those\n * required by the container. Default: `true`. Passing `false` means that\n * extra weights and missing weights will be silently ignored.\n */\n loadWeights(weights, strict = true) {\n const nameToWeight = {};\n let totalWeightsCount = 0;\n for (const layer of this.layers) {\n for (const weight of layer.weights) {\n if (nameToWeight[weight.originalName] != null) {\n throw new ValueError(`Duplicate weight name: ${weight.originalName}`);\n }\n nameToWeight[weight.originalName] = weight;\n totalWeightsCount++;\n }\n }\n const weightValueTuples = [];\n for (const name in weights) {\n // TF 2.2.0 added cell name to the weight name in the format of\n // layer_name/cell_name/weight_name, we need to remove\n // the inner cell name.\n let validatedName = name;\n if (nameToWeight[name] == null) {\n const tokens = name.split('/');\n const shortenNameArray = tokens.slice(0, -2).concat([tokens[tokens.length - 1]]);\n validatedName = shortenNameArray.join('/');\n }\n if (nameToWeight[validatedName] != null) {\n weightValueTuples.push([nameToWeight[validatedName], weights[name]]);\n }\n else if (strict) {\n throw new ValueError(`Provided weight data has no target variable: ${name}`);\n }\n delete nameToWeight[validatedName];\n }\n if (strict) {\n // Check that all weights are set.\n const unsetNames = [];\n for (const name in nameToWeight) {\n unsetNames.push(name);\n }\n if (unsetNames.length > 0) {\n throw new ValueError(`${unsetNames.length} of ${totalWeightsCount} weights are not set: ` +\n `${unsetNames}`);\n }\n }\n batchSetValue(weightValueTuples);\n }\n /**\n * Util shared between different serialization methods.\n * @returns LayersModel config with Keras version information added.\n */\n updatedConfig() {\n const theConfig = this.getConfig();\n const modelConfig = {};\n modelConfig['className'] = this.getClassName();\n modelConfig['config'] = theConfig;\n modelConfig['kerasVersion'] = `tfjs-layers ${layersVersion}`;\n // TODO(nielsene): Replace something like K.backend() once\n // possible.\n modelConfig['backend'] = 'TensorFlow.js';\n return modelConfig;\n }\n /**\n * Returns a JSON string containing the network configuration.\n *\n * To load a network from a JSON save file, use\n * models.modelFromJSON(jsonString);\n * @param extraJsonArgs Unused in tfjs-layers, maintained for PyKeras\n * @param returnString Whether the return value should be stringified\n * (default: `true`).\n * @returns a JSON string if `returnString` (default), or a JSON object if\n * `!returnString`.\n */\n // tslint:disable-next-line:no-any\n toJSON(unused, returnString = true) {\n const modelConfig = convertTsToPythonic(this.updatedConfig());\n return returnString ? JSON.stringify(modelConfig) : modelConfig;\n }\n /**\n * Call the model on new inputs.\n *\n * In this case `call` just reapplies all ops in the graph to the new inputs\n * (e.g. build a new computational graph from the provided inputs).\n *\n * @param inputs A tensor or list of tensors.\n * @param mask A mask or list of masks. A mask can be either a tensor or null\n * (no mask).\n *\n * @return A tensor if there is a single output, or a list of tensors if there\n * are more than one outputs.\n */\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n const feedDict = new FeedDict();\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n return execute(this.outputs, feedDict, kwargs);\n });\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n let masks;\n if (mask == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n else {\n masks = generic_utils.toList(mask);\n }\n // TODO(michaelterry): Add support for mask caching.\n return this.runInternalGraph(inputs, masks)[1];\n });\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n */\n computeOutputShape(inputShape) {\n const inputShapes = types_utils.normalizeShapeList(inputShape);\n if (inputShapes.length !== this.inputLayers.length) {\n throw new ValueError(`Invalid inputShape argument ${inputShape}: ` +\n `model has ${this.inputLayers.length} tensor inputs.`);\n }\n // TODO(michaelterry): Add caching\n const layersToOutputShapes = {};\n for (let i = 0; i < inputShapes.length; i++) {\n const layer = this.inputLayers[i];\n const inputShape = inputShapes[i];\n // It's an input layer: computeOutputShape is identity,\n // and there is only one node and one tensor output.\n const shapeKey = layer.name + '_0_0';\n layersToOutputShapes[shapeKey] = inputShape;\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Iterate over nodes, by depth level.\n if (depthKeys.length > 1) {\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n if (this.inputLayers.map(x => x.id).indexOf(layer.id) !== -1) {\n // We've already covered the input layers a few lines above.\n continue;\n }\n // Potentially redundant list, same size of node.inputTensors.\n const inputShapes = [];\n for (let j = 0; j < node.inboundLayers.length; j++) {\n const inboundLayer = node.inboundLayers[j];\n const nodeIndex = node.nodeIndices[j];\n const tensorIndex = node.tensorIndices[j];\n const shapeKey = `${inboundLayer.name}_${nodeIndex}_${tensorIndex}`;\n const inputShape = layersToOutputShapes[shapeKey];\n inputShapes.push(inputShape);\n }\n const outputShape = layer.computeOutputShape(generic_utils.singletonOrArray(inputShapes));\n const outputShapes = types_utils.normalizeShapeList(outputShape);\n const nodeIndex = layer.inboundNodes.indexOf(node);\n for (let j = 0; j < outputShapes.length; j++) {\n const shapeKey = `${layer.name}_${nodeIndex}_${j}`;\n layersToOutputShapes[shapeKey] = outputShapes[j];\n }\n }\n }\n }\n // Read final output shapes from layersToOutputShapes.\n const outputShapes = [];\n const outputShapeKeys = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const tensorIndex = this.outputLayersTensorIndices[i];\n const shapeKey = `${layer.name}_${nodeIndex}_${tensorIndex}`;\n outputShapeKeys.push(shapeKey);\n }\n for (let i = 0; i < outputShapeKeys.length; i++) {\n const key = outputShapeKeys[i];\n generic_utils.assert(key in layersToOutputShapes);\n outputShapes.push(layersToOutputShapes[key]);\n }\n // TODO(michaelterry): Update cache\n return generic_utils.singletonOrArray(outputShapes);\n }\n /**\n * Computes output tensors for new inputs.\n *\n * Note:\n * - Expects `inputs` to be a list (potentially with 1 element).\n *\n * @param inputs List of tensors\n * @param masks List of masks (tensors or null).\n * @return Three lists: outputTensors, outputMasks, outputShapes\n */\n runInternalGraph(inputs, masks) {\n if (masks == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n // Dictionary mapping reference tensors to tuples\n // (computed tensor, compute mask)\n // we assume a 1:1 mapping from tensor to mask\n // TODO: raise exception when a `.computeMask()` call\n // does not return a list the same size as `call`\n const tensorMap = {};\n for (let i = 0; i < this.inputs.length; ++i) {\n const x = this.inputs[i];\n const y = inputs[i];\n const mask = masks[i];\n tensorMap[x.id] = [y, mask];\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n const referenceInputTensors = node.inputTensors;\n const referenceOutputTensors = node.outputTensors;\n // If all previous input tensors are available in tensorMap,\n // then call node.inboundLayer on them.\n // List of tuples [input, mask]:\n const computedData = new Array();\n for (const x of referenceInputTensors) {\n if (x.id in tensorMap) {\n computedData.push(tensorMap[x.id]);\n }\n }\n if (computedData.length === referenceInputTensors.length) {\n // TODO(michaelterry): Add K.name_scope here, if we need it.\n let kwargs = {};\n let computedTensors;\n let computedMasks;\n let outputTensors;\n let outputMasks;\n // call layer\n if (node.callArgs != null) {\n kwargs = node.callArgs;\n }\n if (computedData.length === 1) {\n const [computedTensor, computedMask] = computedData[0];\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMask;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensor, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensor, computedMask));\n computedTensors = [computedTensor];\n computedMasks = [computedMask];\n }\n else {\n computedTensors = computedData.map(x => x[0]);\n computedMasks = computedData.map(x => x[1]);\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMasks;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensors, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensors, computedMasks));\n }\n if (layer.activityRegularizer) {\n throw new NotImplementedError('LayersModel invocation with concrete Tensor value(s) in the ' +\n 'presence of activity regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Add model updates and losses\n // Update tensor map.\n for (let i = 0; i < referenceOutputTensors.length; ++i) {\n const x = referenceOutputTensors[i];\n const y = outputTensors[i];\n const mask = outputMasks[i];\n tensorMap[x.id] = [y, mask];\n }\n }\n }\n }\n const outputTensors = [];\n const outputMasks = [];\n const outputShapes = [];\n for (const x of this.outputs) {\n generic_utils.assert(x.id in tensorMap, `Could not compute output ${x.name} : ${x.id}`);\n const [tensor, mask] = tensorMap[x.id];\n outputShapes.push(tensor.shape);\n outputTensors.push(tensor);\n outputMasks.push(mask);\n }\n // TODO(michaelterry): Add support for caches.\n return [outputTensors, outputMasks, outputShapes];\n }\n /**\n * Builds a map of internal node keys to node ordering.\n * Used in serializaion a node orderings may change as unused nodes are\n * dropped. Porting Note: This helper method was pulled out of getConfig to\n * improve readability.\n * @param layers An array of Layers in the model.\n * @returns Map of Node Keys to index order within the layer.\n */\n buildNodeConversionMap(layers) {\n const nodeConversionMap = {};\n let keptNodes;\n for (const layer of this.layers) {\n keptNodes = layer instanceof Container ? 1 : 0;\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n // i.e. we mark it to be saved\n nodeConversionMap[nodeKey] = keptNodes;\n keptNodes += 1;\n }\n }\n }\n return nodeConversionMap;\n }\n /**\n * Retrieves a layer based on either its name (unique) or index.\n *\n * Indices are based on order of horizontal graph traversal (bottom-up).\n *\n * If both `name` and `index` are specified, `index` takes precedence.\n *\n * @param name Name of layer.\n * @param index Index of layer.\n * @returns A Layer instance.\n * @throws ValueError: In case of invalid layer name or index.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Classes',\n * namespace: 'layers',\n * subclasses: ['LayersModel']\n * }\n */\n getLayer(name, index) {\n if (index != null) {\n if (this.layers.length <= index) {\n throw new ValueError(`Was asked to retrieve layer at index ${index}, but model only ` +\n `has ${this.layers.length} layer(s).`);\n }\n else {\n return this.layers[index];\n }\n }\n else {\n if (name == null) {\n throw new ValueError('Provide either a layer name or layer index');\n }\n }\n for (const layer of this.layers) {\n if (layer.name === name) {\n return layer;\n }\n }\n throw new ValueError(`No such layer: ${name}`);\n }\n /**\n * Retrieves the Container's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Container.loss in PyKeras.\n // In PyKeras, Container.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return tidy(() => {\n const losses = [];\n for (const layer of this.layers) {\n for (let nodeIndex = 0; nodeIndex < layer.inboundNodes.length; ++nodeIndex) {\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n losses.push(...layer.calculateLosses());\n }\n }\n }\n // TODO(cais): Add any unconditional model-level losses?\n return losses;\n });\n }\n getConfig() {\n const config = { name: this.name };\n // Build a map from layer unique name (self._node_key)\n // to the index of the nodes that are saved in the config.\n // Only nodes in container_nodes are saved.\n const nodeConversionMap = this.buildNodeConversionMap(this.layers);\n // Serialize and save the layers in layerConfigs\n const layerConfigs = [];\n for (const layer of this.layers) {\n const layerClassName = layer.getClassName();\n const layerConfig = layer.getConfig();\n const filteredInboundNodes = [];\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const node = layer.inboundNodes[originalNodeIndex];\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n let kwargs = {};\n if (this.containerNodes.has(nodeKey)) {\n // The node is relevant to the model:\n // add to filteredInboundNodes.\n if (node.callArgs) {\n try {\n JSON.stringify(node.callArgs);\n kwargs = node.callArgs;\n }\n catch (err) {\n console.warn(`Layer ${layer.name} was passed ` +\n `non-serializable keyword arguments: ` +\n `${node.callArgs}. They will not be included ` +\n `in the serialized model (and thus will be ` +\n `missing at deserialization time).`);\n kwargs = {};\n }\n }\n if (node.inboundLayers.length > 0) {\n const nodeData = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n const nodeKey = Container.nodeKey(inboundLayer, nodeIndex);\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex == null) {\n newNodeIndex = 0;\n }\n nodeData.push([inboundLayer.name, newNodeIndex, tensorIndex, kwargs]);\n }\n filteredInboundNodes.push(nodeData);\n }\n }\n }\n const dict = {};\n dict['name'] = layer.name;\n dict['className'] = layerClassName;\n dict['config'] = layerConfig;\n dict['inboundNodes'] = filteredInboundNodes;\n layerConfigs.push(dict);\n }\n config['layers'] = layerConfigs;\n // Gather info about inputs and outputs\n const modelInputs = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n const nodeIndex = this.inputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.inputLayersTensorIndices[i];\n modelInputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['inputLayers'] = modelInputs;\n const modelOutputs = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.outputLayersTensorIndices[i];\n modelOutputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['outputLayers'] = modelOutputs;\n return config;\n }\n /**\n * Instantiates a LayersModel from its config (output of `get_config()`).\n * @param cls the class to create\n * @param config LayersModel config dictionary.\n * @param customObjects An optional dictionary of custom objects.\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns A LayersModel instance.\n * @throws ValueError: In case of improperly formatted config dict.\n */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n // Layer instances created during\n // the graph reconstruction process\n const createdLayers = {};\n // Dictionary mapping layer instances to\n // node data that specifies a layer call.\n // It acts as a queue that maintains any unprocessed\n // layer call until it becomes possible to process it\n // (i.e. until the input tensors to the call all exist).\n const unprocessedNodes = {};\n function addUnprocessedNode(layer, nodeData) {\n if (!(layer.name in unprocessedNodes)) {\n unprocessedNodes[layer.name] = [nodeData];\n }\n else {\n unprocessedNodes[layer.name].push(nodeData);\n }\n }\n function processNode(layer, nodeData) {\n const inputTensors = [];\n let kwargs;\n for (const inputData of nodeData) {\n const inboundLayerName = inputData[0];\n const inboundNodeIndex = inputData[1];\n const inboundTensorIndex = inputData[2];\n kwargs = inputData[3] == null ?\n {} :\n inputData[3];\n if (!(inboundLayerName in createdLayers)) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundLayer = createdLayers[inboundLayerName];\n if (inboundLayer.inboundNodes.length <= inboundNodeIndex) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundNode = inboundLayer.inboundNodes[inboundNodeIndex];\n inputTensors.push(inboundNode.outputTensors[inboundTensorIndex]);\n }\n // Call layer on its inputs, thus creating the node\n // and building the layer if needed.\n // Note: This has Eager vs Graph Implications.\n if (inputTensors.length > 0) {\n layer.apply(generic_utils.singletonOrArray(inputTensors), kwargs); // was ** kwargs\n }\n }\n /**\n * Deserialize a layer, then call it on appropriate inputs.\n * @param layerData: layer config dict.\n * @throws ValueError: In case of improperly formatted `layer_data`\n * dict.\n */\n function processLayer(layerData) {\n const layerName = layerData['name'];\n // Instantiate layer.\n const layer = deserializeLayer(layerData, config['customObjects'] != null ?\n config['customObjects'] :\n {});\n layer.setFastWeightInitDuringBuild(fastWeightInit);\n createdLayers[layerName] = layer;\n // Gather layer inputs.\n const inboundNodesData = layerData['inboundNodes'];\n inboundNodesData.forEach(nodeData => {\n if (!(nodeData instanceof Array)) {\n throw new ValueError(`Corrupted configuration, expected array for nodeData: ${nodeData}`);\n }\n // We don't process nodes (i.e. make layer calls)\n // on the fly because the inbound node may not yet exist,\n // in case of layer shared at different topological depths\n // (e.g.a model such as A(B(A(B(x)))))\n addUnprocessedNode(layer, nodeData);\n });\n }\n // First, we create all layers and enqueue nodes to be processed.\n const name = config['name'];\n const layersFromConfig = config['layers'];\n for (const layerData of layersFromConfig) {\n processLayer(layerData);\n }\n // Then we process nodes in order of layer depth.\n // Nodes that cannot yet be processed(if the inbound node\n // does not yet exist) are re - enqueued, and the process\n // is repeated until all nodes are processed.\n while (!generic_utils.isObjectEmpty(unprocessedNodes)) {\n for (const layerData of layersFromConfig) {\n const layer = createdLayers[layerData['name']];\n if (layer.name in unprocessedNodes) {\n const currentUnprocessedNodesForLayer = unprocessedNodes[layer.name];\n delete unprocessedNodes[layer.name];\n for (const nodeData of currentUnprocessedNodesForLayer) {\n processNode(layer, nodeData);\n }\n }\n }\n }\n const inputTensors = [];\n const outputTensors = [];\n const inputLayersFromConfig = config['inputLayers'];\n for (const layerData of inputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n inputTensors.push(layerOutputTensors[tensorIndex]);\n }\n const outputLayersFromConfig = config['outputLayers'];\n for (const layerData of outputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n outputTensors.push(layerOutputTensors[tensorIndex]);\n }\n return new cls({ inputs: inputTensors, outputs: outputTensors, name });\n }\n /**\n * Determine whether the container is stateful.\n *\n * Porting Note: this is the equivalent of the stateful @property of\n * the Container class in PyKeras.\n */\n get stateful() {\n // Porting Note: This check is to prevent inadvertent setting of the\n // _stateful property of the Container instance.\n if (this._stateful) {\n throw new ValueError('Container instance unexpectedly has _stateful = true. The ' +\n 'statefulness of a Container is determined by the Layers it ' +\n 'contains. Its _stateful property must remain the default false.');\n }\n for (const layer of this.layers) {\n if (layer.stateful) {\n return true;\n }\n }\n return false;\n }\n /**\n * Reset the state of all stateful constituent layers (if any).\n *\n * Examples of stateful layers include RNN layers whose `stateful` property\n * is set as `true`.\n */\n resetStates() {\n tidy(() => {\n this.layers.forEach(layer => {\n // tslint:disable:no-any\n if (layer.stateful) {\n layer.resetStates();\n }\n // tslint:enable:no-any\n });\n });\n }\n}\n//# sourceMappingURL=container.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose, mul, tensor1d, tidy } from '@tensorflow/tfjs-core';\nfunction standardizeSampleOrClassWeights(xWeight, outputNames, weightType) {\n const numOutputs = outputNames.length;\n if (xWeight == null || (Array.isArray(xWeight) && xWeight.length === 0)) {\n return outputNames.map(name => null);\n }\n if (numOutputs === 1) {\n if (Array.isArray(xWeight) && xWeight.length === 1) {\n return xWeight;\n }\n else if (typeof xWeight === 'object' && outputNames[0] in xWeight) {\n return [xWeight[outputNames[0]]];\n }\n else {\n return [xWeight];\n }\n }\n if (Array.isArray(xWeight)) {\n if (xWeight.length !== numOutputs) {\n throw new Error(`Provided ${weightType} is an array of ${xWeight.length} ` +\n `element(s), but the model has ${numOutputs} outputs. ` +\n `Make sure a set of weights is provided for each model output.`);\n }\n return xWeight;\n }\n else if (typeof xWeight === 'object' && Object.keys(xWeight).length > 0 &&\n typeof xWeight[Object.keys(xWeight)[0]] ===\n 'object') {\n const output = [];\n outputNames.forEach(outputName => {\n if (outputName in xWeight) {\n output.push(xWeight[outputName]);\n }\n else {\n output.push(null);\n }\n });\n return output;\n }\n else {\n throw new Error(`The model has multiple (${numOutputs}) outputs, ` +\n `so ${weightType} must be either an array with ` +\n `${numOutputs} elements or an object with ${outputNames} keys. ` +\n `Provided ${weightType} not understood: ${JSON.stringify(xWeight)}`);\n }\n}\n/**\n * Standardize class weighting objects.\n *\n * This function takes a single class-weighting object, an array of them,\n * or a map from output name to class-weighting object. It compares it to the\n * output name(s) of the model, base on which it outputs an array of\n * class-weighting objects of which the length matches the number of outputs.\n *\n * @param classWeight Input class-weighting object(s).\n * @param outputNames All output name(s) of the model.\n * @return An array of class-weighting objects. The length of the array matches\n * the model's number of outputs.\n */\nexport function standardizeClassWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'classWeight');\n}\nexport function standardizeSampleWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'sampleWeight');\n}\n/**\n * Standardize by-sample and/or by-class weights for training.\n *\n * Note that this function operates on one model output at a time. For a model\n * with multiple outputs, you must call this function multiple times.\n *\n * @param y The target tensor that the by-sample and/or by-class weight is for.\n * The values of y are assumed to encode the classes, either directly\n * as an integer index, or as one-hot encoding.\n * @param sampleWeight By-sample weights.\n * @param classWeight By-class weights: an object mapping class indices\n * (integers) to a weight (float) to apply to the model's loss for the\n * samples from this class during training. This can be useful to tell the\n * model to \"pay more attention\" to samples from an under-represented class.\n * @param sampleWeightMode The mode for the sample weights.\n * @return A Promise of weight tensor, of which the size of the first dimension\n * matches that of `y`.\n */\nexport async function standardizeWeights(y, sampleWeight, classWeight, sampleWeightMode) {\n if (sampleWeight != null || sampleWeightMode != null) {\n // TODO(cais): Once 'temporal' mode is implemented, document it in the doc\n // string.\n throw new Error('Support sampleWeight is not implemented yet');\n }\n if (classWeight != null) {\n // Apply class weights per sample.\n const yClasses = tidy(() => {\n if (y.shape.length === 1) {\n // Assume class indices.\n return y.clone();\n }\n else if (y.shape.length === 2) {\n if (y.shape[1] > 1) {\n // Assume one-hot encoding of classes.\n const axis = 1;\n return y.argMax(axis);\n }\n else if (y.shape[1] === 1) {\n // Class index.\n return y.reshape([y.shape[0]]);\n }\n else {\n throw new Error(`Encountered unexpected last-dimension size (${y.shape[1]}) ` +\n `during handling of class weights. The size is expected to be ` +\n `>= 1.`);\n }\n }\n else {\n throw new Error(`Unexpected rank of target (y) tensor (${y.rank}) during ` +\n `handling of class weights. The rank is expected to be 1 or 2.`);\n }\n });\n const yClassIndices = Array.from(await yClasses.data());\n dispose(yClasses);\n const classSampleWeight = [];\n yClassIndices.forEach(classIndex => {\n if (classWeight[classIndex] == null) {\n throw new Error(`classWeight must contain all classes in the training data. ` +\n `The class ${classIndex} exists in the data but not in ` +\n `classWeight`);\n }\n else {\n classSampleWeight.push(classWeight[classIndex]);\n }\n });\n return tensor1d(classSampleWeight, 'float32');\n }\n else {\n return null;\n }\n}\n/**\n * Apply per-sample weights on the loss values from a number of samples.\n *\n * @param losses Loss tensor of shape `[batchSize]`.\n * @param sampleWeights Per-sample weight tensor of shape `[batchSize]`.\n * @returns Tensor of the same shape as`losses`.\n */\nexport function computeWeightedLoss(losses, sampleWeights) {\n return mul(losses, sampleWeights);\n}\n//# sourceMappingURL=training_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using TensorFlow.js datasets.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { singletonOrArray, toList } from '../utils/generic_utils';\nimport { standardizeClassWeights, standardizeWeights } from './training_utils';\n// Default batch size used during tensor-based validation.\nconst DEFAULT_VALIDATION_BATCH_SIZE = 32;\n/**\n * Standardize the output of a dataset iterator for use by\n * LayersModel.fitDataset().\n *\n * @param model: A `tf.LayersModel` object.\n * @param iteratorOut The output of a dataset iterator. It is required to be\n * an object of the form `{xs: TensorOrArrayOrMap, ys:\n * TensorOrArrayOrMap}`, where `TensorOrArrayOrMap` is a single `tf.Tensor`,\n * a `tf.Tensor[]`, or a flat map from string names to `tf.Tensor`s.\n * @returns A flat array of `tf.Tensor` objects: the input `tf.Tensor`s\n * followed by the target `tf.Tensor`s. When `tf.Tensor`s are provided\n * as a map, the order in the resulting array is taken from the `inputNames`\n * and `outputNames` of the model.\n */\nfunction standardizeDataIteratorOutput(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, iteratorOut) {\n let xs;\n let ys;\n const iteratorOutObj = iteratorOut;\n xs = iteratorOutObj['xs'];\n ys = iteratorOutObj['ys'];\n tfc.util.assert(xs != null && ys != null, () => 'A Dataset iterator for fitDataset() is expected to generate ' +\n 'objects of the form `{xs: xVal, ys: yVal}`, where the two ' +\n 'values may be `tf.Tensor`, an array of Tensors, or a map of ' +\n 'string to Tensor. The provided Dataset instead generates ' +\n `${iteratorOut}`);\n const flattenedXs = flattenTensorOrArrayOrMap('input', model.inputNames, xs);\n const flattenedYs = flattenTensorOrArrayOrMap('output', model.outputNames, ys);\n const batchSize = flattenedXs[0].shape[0];\n tfc.util.assert(flattenedXs.length === model.inputs.length, () => `LayersModel has ${model.inputs.length} inputs, but the dataset ` +\n `provides ${flattenedXs.length} inputs. (Expected input keys: ` +\n `${JSON.stringify(model.inputNames)})`);\n tfc.util.assert(flattenedYs.length === model.outputs.length, () => `LayersModel has ${model.outputs.length} outputs, but the dataset ` +\n `provides ${flattenedYs.length} outputs. (Expected output keys: ` +\n `${JSON.stringify(model.outputNames)})`);\n for (let xIndex = 0; xIndex < flattenedXs.length; xIndex++) {\n tfc.util.assert(flattenedXs[xIndex].shape[0] === batchSize, () => `Batch size mismatch: input ` +\n `${model.inputNames[xIndex]} has ${flattenedXs[xIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n for (let yIndex = 0; yIndex < flattenedYs.length; yIndex++) {\n tfc.util.assert(flattenedYs[yIndex].shape[0] === batchSize, () => `Batch size mismatch: output ` +\n `${model.outputNames[yIndex]} has ${flattenedYs[yIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n return { xs: flattenedXs, ys: flattenedYs };\n}\nfunction flattenTensorOrArrayOrMap(inputOrOutput, names, values) {\n if (values instanceof tfc.Tensor) {\n return [values];\n }\n else if (Array.isArray(values)) {\n tfc.util.assert(values.length === names.length, () => `Received an array of ${values.length} Tensors, but expected ${names.length} to match the ${inputOrOutput} keys ${names}.`);\n return values;\n }\n else {\n const result = [];\n // Check that all the required keys are available.\n for (const name of names) {\n if (values[name] == null) {\n throw new ValueError(`The feature data generated by the dataset lacks the required ` +\n `${inputOrOutput} key '${name}'.`);\n }\n result.push(values[name]);\n }\n return result;\n }\n}\nfunction standardizeTensorValidationData(data) {\n if (data.length === 3) {\n throw new NotImplementedError('Validation with sample weights is not implemented yet.');\n }\n return { xs: data[0], ys: data[1] };\n}\nexport async function fitDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n const hasBatchesPerEpoch = args.batchesPerEpoch != null;\n tfc.util.assert(model.optimizer != null, () => 'You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileConfig).');\n tfc.util.assert(args != null, () => `For fitDataset(), the 2nd argument (config) is required, ` +\n `but it is not provided in this call.`);\n tfc.util.assert(args.epochs != null && args.epochs > 0 && Number.isInteger(args.epochs), () => `For fitDataset(), config.epochs is expected to be a positive ` +\n `integer, but got ${args.epochs}`);\n tfc.util.assert(!hasBatchesPerEpoch ||\n (args.batchesPerEpoch > 0 && Number.isInteger(args.batchesPerEpoch)), () => `For fitDataset(), config.batchesPerEpoch is expected to be a ` +\n `positive integer if specified, but got ${args.batchesPerEpoch}`);\n tfc.util.assert(\n // tslint:disable-next-line:no-any\n args['validationSplit'] == null, () => '`validationSplit` is not supported by `fitDataset()`. ' +\n 'Use validationData instead.');\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n try {\n const doValidation = args.validationData != null;\n let valXs;\n let valYs;\n if (doValidation) {\n if (isDatasetObject(args.validationData)) {\n tfc.util.assert(args.validationBatches == null ||\n (args.validationBatches > 0 &&\n Number.isInteger(args.validationBatches)), () => `For fitDataset() with dataset-based validation, ` +\n `config.validationBatches is expected not to be provided, ` +\n `or to be a positive integer, ` +\n `but got ${args.validationBatches}`);\n }\n else {\n const validationData = standardizeTensorValidationData(args.validationData);\n valXs = validationData.xs;\n valYs = validationData.ys;\n }\n }\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let callbackMetrics;\n if (doValidation) {\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const verbose = args.verbose == null ? 1 : args.verbose;\n const { callbackList, history } = configureCallbacks(callbacks, verbose, args.epochs, null, null, getStepsPerEpoch(dataset, args), null, // Batch size determined by the dataset itself.\n doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n let epoch = args.initialEpoch == null ? 0 : args.initialEpoch;\n let dataIterator = await dataset.iterator();\n while (epoch < args.epochs) {\n const epochLogs = {};\n await callbackList.onEpochBegin(epoch);\n let stepsDone = 0;\n let batchIndex = 0;\n if (!hasBatchesPerEpoch) {\n dataIterator = await dataset.iterator();\n }\n while (hasBatchesPerEpoch ? stepsDone < args.batchesPerEpoch : true) {\n const iteratorOut = await dataIterator.next();\n // If `batchesPerEpoch` is specified, the dataset should not be\n // exhausted until all epoches are done.\n if (hasBatchesPerEpoch && iteratorOut.done) {\n console.warn('You provided `batchesPerEpoch` as ' +\n `${args.batchesPerEpoch}, ` +\n 'but your dataset iterator ran out of data after ' +\n `${stepsDone} batches; ` +\n 'interrupting training. Make sure that your ' +\n 'dataset can generate at least `batchesPerEpoch * epochs` ' +\n 'batches (in this case, ' +\n `${args.batchesPerEpoch * args.epochs} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n break;\n }\n if (iteratorOut.value != null) {\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const batchLogs = {};\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = xs[0].shape[0];\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n const sampleWeights = [];\n if (args.classWeight != null) {\n const standardClassWeights = standardizeClassWeights(args.classWeight, model.outputNames);\n for (let i = 0; i < standardClassWeights.length; ++i) {\n sampleWeights.push(await standardizeWeights(ys[i], null, standardClassWeights[i]));\n }\n }\n // Train on batch.\n const ins = xs.concat(ys).concat(sampleWeights);\n const outs = trainFunction(ins);\n tfc.dispose(ins);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n }\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n batchIndex++;\n stepsDone++;\n }\n if (hasBatchesPerEpoch ? stepsDone >= args.batchesPerEpoch :\n iteratorOut.done) {\n // Epoch finished. Perform validation.\n if (doValidation) {\n let valOuts;\n if (isDatasetObject(args.validationData)) {\n valOuts = toList(await model.evaluateDataset(args.validationData, { batches: args.validationBatches }));\n }\n else {\n valOuts = toList(model.evaluate(valXs, valYs, {\n batchSize: args.validationBatchSize == null ?\n DEFAULT_VALIDATION_BATCH_SIZE :\n args.validationBatchSize,\n verbose: 0\n }));\n }\n for (let i = 0; i < model.metricsNames.length; ++i) {\n epochLogs[`val_${model.metricsNames[i]}`] = valOuts[i];\n }\n }\n // Call `break` to exit one epoch lopp after validation is done. If\n // config.batchesPerEpoch is specified, an epoch while loop will\n // stop when `stepsDone >= config.batchesPerEpoch`. When\n // config.batchesPerEpoch is not provided, the following `break` is\n // required to exit the while lopp after dataset is exhausted.\n break;\n }\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onEpochEnd(epoch, epochLogs);\n epoch++;\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n }\n finally {\n model.isTraining = false;\n }\n}\n/** Helper function that determines number of steps (batches) per epoch. */\nfunction getStepsPerEpoch(dataset, args) {\n // Attempt to determine # of batches in an epoch.\n let stepsPerEpoch = null;\n if (args.batchesPerEpoch != null) {\n stepsPerEpoch = args.batchesPerEpoch;\n }\n else if (Number.isFinite(dataset.size)) {\n stepsPerEpoch = dataset.size;\n }\n return stepsPerEpoch;\n}\n// Check if provided object is a Dataset object by checking its .iterator\n// element.\nfunction isDatasetObject(dataset) {\n return (typeof dataset.iterator === 'function');\n}\n// Check if provided object is a LazyIterator object by checking it's .next\n// element.\nfunction isLazyIteratorObject(iterator) {\n return (typeof iterator.next === 'function');\n}\nexport async function evaluateDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n args = args || {};\n const hasBatches = args.batches != null;\n const f = model.testFunction;\n let outs = [];\n if (args.verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n tfc.util.assert(!hasBatches || (args.batches > 0 && Number.isInteger(args.batches)), () => 'Test loop expects `batches` to be a positive integer, but ' +\n `received ${JSON.stringify(args.batches)}`);\n const dataIterator = isLazyIteratorObject(dataset) ?\n dataset :\n await dataset.iterator();\n // Keeps track of number of examples used in this evaluation.\n let numExamples = 0;\n let batch = 0;\n while (hasBatches ? batch < args.batches : true) {\n const iteratorOut = await dataIterator.next();\n outs = tfc.tidy(() => {\n if (iteratorOut.value) {\n // TODO(cais): Once real dataset is available, use\n // `map(x => standardizeDataIteratorOutput(model, x).map(f)`.\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const xsAndYs = xs.concat(ys);\n const batchOuts = tfc.tidy(() => f(xsAndYs));\n tfc.dispose(xsAndYs);\n if (batch === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n const batchSize = xsAndYs[0].shape[0];\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n const oldScalar = outs[i];\n outs[i] =\n tfc.tidy(() => tfc.add(outs[i], tfc.mul(batchSize, batchOut)));\n if (batch > 0) {\n tfc.dispose(oldScalar);\n }\n }\n tfc.dispose(batchOuts);\n numExamples += batchSize;\n ++batch;\n }\n return outs;\n });\n if (iteratorOut.done) {\n if (hasBatches) {\n console.warn('Your dataset iterator ran out of data during evaluateDataset(). ' +\n 'Interrupting evalution. Make sure that your ' +\n 'dataset can generate at least `batches` ' +\n `batches (in this case, ${args.batches} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n }\n break;\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n const oldScalar = outs[i];\n outs[i] = tfc.div(outs[i], numExamples);\n tfc.dispose(oldScalar);\n }\n return singletonOrArray(outs);\n}\n//# sourceMappingURL=training_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using tf.Tensor objects.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport { expandDims, gather, sliceAlongFirstAxis } from '../backend/tfjs_backend';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { range } from '../utils/math_utils';\nexport function checkBatchSize(batchSize) {\n tfc.util.assert(batchSize > 0 && Number.isInteger(batchSize), () => `batchSize is required to be a positive integer, but got ${batchSize}`);\n}\n/**\n * Slice a Tensor or an Array of Tensors, by start and stop indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArraysByIndices()` together.\n *\n * @param arrays: the input.\n * @param start: the starting index (inclusive).\n * @param stop: the stopping index (exclusive).\n * @returns The result of the slicing. If `arrays` is an `Array` of\n * `tf.Tensor`s, the slicing will be applied to all elements of the `Array`\n * in the same way.\n */\nexport function sliceArrays(arrays, start, stop) {\n if (arrays == null) {\n return [null];\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceAlongFirstAxis(array, start, stop - start));\n }\n else { // Tensor.\n return sliceAlongFirstAxis(arrays, start, stop - start);\n }\n}\n/**\n * Slice a Tensor or an Array of Tensors, by random-order indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArrays()` together.\n *\n * @param arrays The input `tf.Tensor` or `Array` of `tf.Tensor`s to slice.\n * If an `Array` of `tf.Tensor`s, all `tf.Tensor`s will be sliced in the\n * same fashion.\n * @param indices The indices to use for slicing along the first (batch)\n * dimension.\n * @returns Result(s) of the slicing.\n */\nexport function sliceArraysByIndices(arrays, indices) {\n return tfc.tidy(() => {\n if (arrays == null) {\n return null;\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceArraysByIndices(array, indices));\n }\n else {\n // TODO(cais): indices should be a pre-constructed Tensor1D to avoid\n // tensor1d() calls.\n return gather(arrays, indices.dtype === 'int32' ? indices : indices.toInt());\n }\n });\n}\n/**\n * Returns a list of batch indices (tuples of indices).\n * @param size: Integer, total size of the data to slice into batches.\n * @param batchSize: Integer, batch size.\n * @returns An Array of [batchStart, batchEnd] tuples. batchStart is\n * inclusive; batchEnd is exclusive. I.e., each batch consists of indices x\n * that satisfy batchStart <= x < batchEnd.\n */\nexport function makeBatches(size, batchSize) {\n const output = [];\n let batchStart = 0;\n let batchEnd = null;\n while (batchStart < size) {\n batchEnd = batchStart + batchSize;\n if (batchEnd >= size) {\n batchEnd = size;\n }\n output.push([batchStart, batchEnd]);\n batchStart = batchEnd;\n }\n return output;\n}\n/**\n * Abstract fit function for `f(ins)`.\n * @param f A Function returning a list of tensors. For training, this\n * function is expected to perform the updates to the variables.\n * @param ins List of tensors to be fed to `f`.\n * @param outLabels List of strings, display names of the outputs of `f`.\n * @param batchSize Integer batch size or `== null` if unknown. Default : 32.\n * @param epochs Number of times to iterate over the data. Default : 1.\n * @param verbose Verbosity mode: 0, 1, or 2. Default: 1.\n * @param callbacks List of callbacks to be called during training.\n * @param valF Function to call for validation.\n * @param valIns List of tensors to be fed to `valF`.\n * @param shuffle Whether to shuffle the data at the beginning of every\n * epoch. Default : true.\n * @param callbackMetrics List of strings, the display names of the metrics\n * passed to the callbacks. They should be the concatenation of the\n * display names of the outputs of `f` and the list of display names\n * of the outputs of `valF`.\n * @param initialEpoch Epoch at which to start training (useful for\n * resuming a previous training run). Default : 0.\n * @param stepsPerEpoch Total number of steps (batches on samples) before\n * declaring one epoch finished and starting the next epoch. Ignored with\n * the default value of `undefined` or `null`.\n * @param validationSteps Number of steps to run validation for (only if\n * doing validation from data tensors). Not applicable for tfjs-layers.\n * @returns A `History` object.\n */\nasync function fitLoop(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, f, ins, outLabels, batchSize, epochs, verbose, callbacks, valF, valIns, shuffle, callbackMetrics, initialEpoch, stepsPerEpoch, validationSteps) {\n if (batchSize == null) {\n batchSize = 32;\n }\n if (epochs == null) {\n epochs = 1;\n }\n if (shuffle == null) {\n shuffle = true;\n }\n if (initialEpoch == null) {\n initialEpoch = 0;\n }\n // TODO(cais): Change const to let below when implementing validation.\n let doValidation = false;\n if (valF != null && valIns != null) {\n doValidation = true;\n // TODO(cais): verbose message.\n }\n if (validationSteps != null) {\n doValidation = true;\n if (stepsPerEpoch == null) {\n throw new ValueError('Can only use `validationSteps` when doing step-wise training, ' +\n 'i.e., `stepsPerEpoch` must be set.');\n }\n }\n const numTrainSamples = model.checkNumSamples(ins, batchSize, stepsPerEpoch, 'steps_per_epoch');\n let indexArray;\n if (numTrainSamples != null) {\n indexArray = range(0, numTrainSamples);\n }\n if (verbose == null) {\n verbose = 1;\n }\n const { callbackList, history } = configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n // TODO(cais): Take care of callbacks.validation_data as in PyKeras.\n // TODO(cais): Pre-convert feeds for performance as in PyKeras.\n for (let epoch = initialEpoch; epoch < epochs; ++epoch) {\n await callbackList.onEpochBegin(epoch);\n const epochLogs = {};\n if (stepsPerEpoch != null) {\n throw new NotImplementedError('stepsPerEpoch mode is not implemented yet.');\n }\n else {\n if (shuffle === 'batch') {\n throw new NotImplementedError('batch shuffling is not implemneted yet');\n }\n else if (shuffle) {\n util.shuffle(indexArray);\n }\n // Convert the potentially shuffled indices to Tensor1D, to avoid the\n // cost of repeated creation of Array1Ds later on.\n const epochIndexArray1D = tensor1d(indexArray);\n const batches = makeBatches(numTrainSamples, batchSize);\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchLogs = {};\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = sliceAlongFirstAxis(epochIndexArray1D, batchStart, batchEnd - batchStart);\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = batchEnd - batchStart;\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const outs = f(insBatch);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n }\n if (batchIndex === batches.length - 1) { // Last batch.\n if (doValidation) {\n const valOuts = model.testLoop(valF, valIns, batchSize);\n // Porting Notes: In tfjs-layers, valOuts is always an Array.\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = valOuts[i];\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n epochLogs['val_' + label] = out;\n }\n }\n }\n });\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n if (model.stopTraining_) {\n break;\n }\n // TODO(cais): return outs as list of Tensor.\n }\n epochIndexArray1D.dispose();\n }\n // TODO(cais): Run validation at the end of the epoch.\n await callbackList.onEpochEnd(epoch, epochLogs);\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n}\nexport async function fitTensors(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, x, y, args = {}) {\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n let inputs;\n let targets;\n let inputValX;\n let inputValY;\n let valX;\n let valY;\n let sampleWeights;\n try {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // Validate user data.\n // TODO(cais): Support sampleWeight.\n const checkBatchAxis = false;\n const standardizedOuts = await model.standardizeUserData(x, y, args.sampleWeight, args.classWeight, checkBatchAxis, batchSize);\n inputs = standardizedOuts[0];\n targets = standardizedOuts[1];\n sampleWeights = standardizedOuts[2];\n // Prepare validation data.\n let doValidation = false;\n let valIns;\n if (args.validationData != null && args.validationData.length > 0) {\n doValidation = true;\n if (args.validationData.length === 2) {\n // config.validationData consists of valX and valY.\n inputValX = args.validationData[0];\n inputValY = args.validationData[1];\n }\n else if (args.validationData.length === 3) {\n throw new NotImplementedError('validationData including sample weights is not supported yet.');\n }\n else {\n throw new ValueError(`When passing validation data, it must contain 2 (valX, valY) ` +\n `or 3 (valX, valY, valSampleWeight) items; ` +\n `${args.validationData} is invalid.`);\n }\n const checkBatchAxis = true;\n const valStandardized = await model.standardizeUserData(inputValX, inputValY, null, /** Unused sample weights. */ null, /** Unused class weights. */ checkBatchAxis, batchSize);\n valX = valStandardized[0];\n valY = valStandardized[1];\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSplit != null && args.validationSplit > 0 &&\n args.validationSplit < 1) {\n doValidation = true;\n // Porting Note: In tfjs-layers, inputs[0] is always a Tensor.\n const splitAt = Math.floor(inputs[0].shape[0] * (1 - args.validationSplit));\n const originalBatchSize = inputs[0].shape[0];\n valX = sliceArrays(inputs, splitAt, originalBatchSize);\n inputs = sliceArrays(inputs, 0, splitAt);\n valY = sliceArrays(targets, splitAt, originalBatchSize);\n targets = sliceArrays(targets, 0, splitAt);\n // TODO(cais): Once sampleWeights becomes available, slice it to get\n // valSampleWeights.\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSteps != null) {\n doValidation = true;\n // TODO(cais): Add useLearningPhase.\n }\n const ins = inputs.concat(targets).concat(sampleWeights);\n model.checkTrainableWeightsConsistency();\n // TODO(cais): Handle use_learning_phase and learning_phase?\n // Porting Note: Here we see a key deviation of tfjs-layers from\n // Keras.\n // Due to the imperative nature of tfjs-layers' backend (tfjs-core),\n // we do not construct symbolic computation graphs to embody the\n // training process. Instead, we define a function that performs the\n // training action. In PyKeras, the data (inputs and targets) are fed\n // through graph placeholders. In tfjs-layers, the data are fed as\n // function arguments. Since the function are defined below in the\n // scope, we don't have equivalents of PyKeras's\n // `_make_train_funciton`.\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let valFunction;\n let callbackMetrics;\n if (doValidation) {\n model.makeTestFunction();\n valFunction = model.testFunction;\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n valFunction = null;\n valIns = [];\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const out = await fitLoop(model, trainFunction, ins, outLabels, batchSize, args.epochs, args.verbose, callbacks, valFunction, valIns, args.shuffle, callbackMetrics, args.initialEpoch, null, null);\n return out;\n }\n finally {\n model.isTraining = false;\n // Memory clean up.\n disposeNewTensors(inputs, x);\n disposeNewTensors(targets, y);\n disposeNewTensors(valX, inputValX);\n disposeNewTensors(valY, inputValY);\n if (sampleWeights != null) {\n tfc.dispose(sampleWeights);\n }\n }\n // TODO(cais): Add value to outLabels.\n}\n/**\n * Ensure tensors all have a rank of at least 2.\n *\n * If a tensor has a rank of 1, it is dimension-expanded to rank 2.\n * If any tensor has a rank of 0 (i.e., is a scalar), an error will be thrown.\n */\nexport function ensureTensorsRank2OrHigher(tensors) {\n const outs = [];\n if (tensors instanceof Tensor) {\n tensors = [tensors];\n }\n // Make Tensors at least 2D.\n for (let i = 0; i < tensors.length; ++i) {\n const tensor = tensors[i];\n if (tensor.rank === 1) {\n outs.push(expandDims(tensor, 1));\n }\n else if (tensor.rank === 0) {\n throw new Error('Expected tensor to be at least 1D, but received a 0D tensor ' +\n '(scalar).');\n }\n else {\n outs.push(tensor);\n }\n }\n return outs;\n}\n/**\n * Compare a set of tensors with a reference (old) set, discard the ones\n * in the new set that are not present in the reference set.\n *\n * This method is used for memory clenaup during calls such as\n * LayersModel.fit().\n *\n * @param tensors New set which may contain Tensors not present in\n * `refTensors`.\n * @param refTensors Reference Tensor set.\n */\n// TODO(cais, kangyizhang): Deduplicate with tfjs-data.\nexport function disposeNewTensors(tensors, refTensors) {\n if (tensors == null) {\n return;\n }\n const oldTensorIds = [];\n if (refTensors instanceof Tensor) {\n oldTensorIds.push(refTensors.id);\n }\n else if (Array.isArray(refTensors)) {\n refTensors.forEach(t => oldTensorIds.push(t.id));\n }\n else if (refTensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in refTensors) {\n const oldTensor = refTensors[name];\n oldTensorIds.push(oldTensor.id);\n }\n }\n const tensorsToDispose = [];\n if (tensors instanceof Tensor) {\n if (oldTensorIds.indexOf(tensors.id) === -1) {\n tensorsToDispose.push(tensors);\n }\n }\n else if (Array.isArray(tensors)) {\n tensors.forEach(t => {\n if (oldTensorIds.indexOf(t.id) === -1) {\n tensorsToDispose.push(t);\n }\n });\n }\n else if (tensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in tensors) {\n const tensor = tensors[name];\n if (oldTensorIds.indexOf(tensor.id) === -1) {\n tensorsToDispose.push(tensor);\n }\n }\n }\n tensorsToDispose.forEach(t => {\n if (!t.isDisposed) {\n t.dispose();\n }\n });\n}\n//# sourceMappingURL=training_tensors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: engine/training.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { io, Optimizer, scalar, serialization, Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize } from '../layers/serialization';\nimport * as losses from '../losses';\nimport * as Metrics from '../metrics';\nimport * as optimizers from '../optimizers';\nimport { checkUserDefinedMetadata } from '../user_defined_metadata';\nimport { count, pyListRepeat, singletonOrArray, toCamelCase, toSnakeCase, unique } from '../utils/generic_utils';\nimport { printSummary } from '../utils/layer_utils';\nimport { range } from '../utils/math_utils';\nimport { convertPythonicToTs } from '../utils/serialization_utils';\nimport { version } from '../version';\nimport { Container } from './container';\nimport { execute, FeedDict } from './executor';\nimport { evaluateDataset, fitDataset } from './training_dataset';\nimport { checkBatchSize, disposeNewTensors, ensureTensorsRank2OrHigher, fitTensors, makeBatches, sliceArrays, sliceArraysByIndices } from './training_tensors';\nimport { computeWeightedLoss, standardizeClassWeights, standardizeWeights } from './training_utils';\n/**\n * Helper function for polymorphic input data: 1. singleton Tensor.\n */\nexport function isDataTensor(x) {\n return x instanceof Tensor;\n}\n/**\n * Helper function for polymorphic input data: 2. Array of Tensor.\n */\nexport function isDataArray(x) {\n return Array.isArray(x);\n}\n/**\n * Helper function for polymorphic input data: 3. \"dict\" of Tensor.\n */\nexport function isDataDict(x) {\n return !isDataTensor(x) && !isDataArray(x);\n}\n/**\n * Normalizes inputs and targets provided by users.\n * @param data User-provided input data (polymorphic).\n * @param names An Array of expected Tensor names.\n * @param shapes Optional Array of expected Tensor shapes.\n * @param checkBatchAxis Whether to check that the batch axis of the arrays\n * match the expected value found in `shapes`.\n * @param exceptionPrefix String prefix used for exception formatting.\n * @returns List of standardized input Tensors (one Tensor per model input).\n * @throws ValueError: in case of improperly formatted user data.\n */\nexport function standardizeInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n if (names == null || names.length === 0) {\n // Check for the case where the model expected no data, but some data got\n // sent.\n if (data != null) {\n let gotUnexpectedData = false;\n if (isDataArray(data) && data.length > 0) {\n gotUnexpectedData = true;\n }\n else if (isDataDict(data)) {\n for (const key in data) {\n if (data.hasOwnProperty(key)) {\n gotUnexpectedData = true;\n break;\n }\n }\n }\n else {\n // `data` is a singleton Tensor in this case.\n gotUnexpectedData = true;\n }\n if (gotUnexpectedData) {\n throw new ValueError(`Error when checking model ${exceptionPrefix} expected no data, ` +\n `but got ${data}`);\n }\n }\n return [];\n }\n if (data == null) {\n return names.map(name => null);\n }\n let arrays;\n if (isDataDict(data)) {\n data = data;\n arrays = [];\n for (const name of names) {\n if (data[name] == null) {\n throw new ValueError(`No data provided for \"${name}\". Need data for each key in: ` +\n `${names}`);\n }\n arrays.push(data[name]);\n }\n }\n else if (isDataArray(data)) {\n data = data;\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `model expected. Expected to see ${names.length} Tensor(s), but ` +\n `instead got the following list of Tensor(s): ${data}`);\n }\n arrays = data;\n }\n else {\n data = data;\n if (names.length > 1) {\n throw new ValueError(`The model ${exceptionPrefix} expects ${names.length} Tensor(s), ` +\n `but only received one Tensor. Found: Tensor with shape ${data.shape}`);\n }\n arrays = [data];\n }\n arrays = ensureTensorsRank2OrHigher(arrays);\n // Check shape compatibility.\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s). but got array with ` +\n `shape ${array.shape}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n // Skip the first (batch) axis.\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null && refDim >= 0 && dim !== refDim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have shape [${shapes[i]}], but got array with shape ` +\n `[${array.shape}].`);\n }\n }\n }\n }\n return arrays;\n}\n/**\n * User input validation for Tensors.\n * @param inputs `Array` of `tf.Tensor`s for inputs.\n * @param targets `Array` of `tf.Tensor`s for targets.\n * @param weights Optional `Array` of `tf.Tensor`s for sample weights.\n * @throws ValueError: in case of incorrectly formatted data.\n */\nexport function checkArrayLengths(inputs, targets, weights) {\n const setX = unique(inputs.map(input => input.shape[0]));\n setX.sort();\n const setY = unique(targets.map(target => target.shape[0]));\n setY.sort();\n // TODO(cais): Check `weights` as well.\n if (setX.length > 1) {\n throw new ValueError(`All input Tensors (x) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(inputs.map(input => input.shape))}`);\n }\n if (setY.length > 1) {\n throw new ValueError(`All target Tensors (y) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(targets.map(target => target.shape))}`);\n }\n if (setX.length > 0 && setY.length > 0 && !util.arraysEqual(setX, setY)) {\n throw new ValueError(`Input Tensors should have the same number of samples as target ` +\n `Tensors. Found ${setX[0]} input sample(s) and ${setY[0]} target ` +\n `sample(s).`);\n }\n}\n/**\n * Validation on the compatibility of targes and loss functions.\n *\n * This helps prevent users from using loss functions incorrectly.\n *\n * @param targets `Array` of `tf.Tensor`s of targets.\n * @param lossFns `Array` of loss functions.\n * @param outputShapes `Array` of shapes of model outputs.\n */\nfunction checkLossAndTargetCompatibility(targets, lossFns, outputShapes) {\n // TODO(cais): Dedicated test coverage?\n const keyLosses = [\n losses.meanSquaredError, losses.binaryCrossentropy,\n losses.categoricalCrossentropy\n ];\n for (let i = 0; i < targets.length; ++i) {\n const y = targets[i];\n const loss = lossFns[i];\n const shape = outputShapes[i];\n if (loss == null) {\n continue;\n }\n if (loss === losses.categoricalCrossentropy) {\n if (y.shape[y.shape.length - 1] === 1) {\n throw new ValueError(`You are passing a target array of shape ${y.shape} while using ` +\n `a loss 'categorical_crossentropy'. 'categorical_crossentropy'` +\n `expects targets to be binary matrices (1s and 0s) of shape ` +\n `[samples, classes].`);\n // TODO(cais): Example code in error message.\n }\n }\n if (keyLosses.indexOf(loss) !== -1) {\n const slicedYShape = y.shape.slice(1);\n const slicedShape = shape.slice(1);\n for (let j = 0; j < slicedYShape.length; ++j) {\n const targetDim = slicedYShape[j];\n const outDim = slicedShape[j];\n if (outDim != null && targetDim !== outDim) {\n throw new ValueError(`A target Tensor with shape ${y.shape} was passed for an ` +\n `output of shape ${shape}, while using a loss function that ` +\n `expects targets to have the same shape as the output.`);\n }\n }\n }\n }\n}\n/**\n * Check inputs provided by the user.\n *\n * Porting Note: This corresponds to _standardize_input_data() in Python\n * Keras. Because of the strong typing in TF.js, we do not need to convert\n * the data. Specifically:\n * 1) in PyKeras, `data` can be `DataFrame` instances from pandas, for\n * example. We don't need to worry about that here because there is no\n * widely popular javascript/typesdcript equivalent of pandas (so far).\n * If one becomes available in the future, we can add support.\n * 2) in PyKeras, inputs can be Python dict. But here we are stipulating\n * that the data is either a single `tf.Tensor` or an Array of `tf.Tensor`s. We\n * may add support for `Object` data inputs in the future when the need\n * arises.\n *\n * Instead, we perform basic checks for number of parameters and shapes.\n *\n * @param data: The input data.\n * @param names: Name for the inputs, from the model.\n * @param shapes: Expected shapes for the input data, from the model.\n * @param checkBatchAxis: Whether the size along the batch axis (i.e., the\n * first dimension) will be checked for matching.\n * @param exceptionPrefix: Execption prefix message, used in generating error\n * messages.\n * @throws ValueError: on incorrect number of inputs or mismatches in shapes.\n */\nfunction checkInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n let arrays;\n if (Array.isArray(data)) {\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `the model expected. Expected to see ${names.length} Tensor(s),` +\n ` but instead got ${data.length} Tensors(s).`);\n }\n arrays = data;\n }\n else {\n if (names.length > 1) {\n throw new ValueError(`The model expects ${names.length} ${exceptionPrefix} Tensors, ` +\n `but only received one Tensor. Found: array with shape ` +\n `${JSON.stringify(data.shape)}.`);\n }\n arrays = [data];\n }\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s), but got array with ` +\n `shape ${JSON.stringify(array.shape)}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null) {\n if (refDim !== dim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ` +\n `${names[i]} to have shape ${JSON.stringify(shapes[i])} but ` +\n `got array with shape ${JSON.stringify(array.shape)}.`);\n }\n }\n }\n }\n }\n}\n/**\n * Maps metric functions to model outputs.\n * @param metrics An shortcut strings name, metric function, `Array` or dict\n * (`Object`) of metric functions.\n * @param outputNames An `Array` of the names of model outputs.\n * @returns An `Array` (one entry per model output) of `Array` of metric\n * functions. For instance, if the model has 2 outputs, and for the first\n * output we want to compute `binaryAccuracy` and `binaryCrossentropy`,\n * and just `binaryAccuracy` for the second output, the `Array` would look\n * like:\n * `[[binaryAccuracy, binaryCrossentropy], [binaryAccuracy]]`\n * @throws TypeError: incompatible metrics format.\n */\nexport function collectMetrics(metrics, outputNames) {\n if (metrics == null || Array.isArray(metrics) && metrics.length === 0) {\n return outputNames.map(name => []);\n }\n let wrappedMetrics;\n if (typeof metrics === 'string' || typeof metrics === 'function') {\n wrappedMetrics = [metrics];\n }\n else if (Array.isArray(metrics) || typeof metrics === 'object') {\n wrappedMetrics = metrics;\n }\n else {\n throw new TypeError('Type of metrics argument not understood. Expected an string,' +\n `function, Array, or Object, found: ${metrics}`);\n }\n if (Array.isArray(wrappedMetrics)) {\n // We then apply all metrics to all outputs.\n return outputNames.map(name => wrappedMetrics);\n }\n else {\n // In this case, metrics is a dict.\n const nestedMetrics = [];\n for (const name of outputNames) {\n let outputMetrics = wrappedMetrics.hasOwnProperty(name) ? wrappedMetrics[name] : [];\n if (!Array.isArray(outputMetrics)) {\n outputMetrics = [outputMetrics];\n }\n nestedMetrics.push(outputMetrics);\n }\n return nestedMetrics;\n }\n}\nconst LAYERS_MODEL_FORMAT_NAME = 'layers-model';\n/**\n * A `tf.LayersModel` is a directed, acyclic graph of `tf.Layer`s plus methods\n * for training, evaluation, prediction and saving.\n *\n * `tf.LayersModel` is the basic unit of training, inference and evaluation in\n * TensorFlow.js. To create a `tf.LayersModel`, use `tf.LayersModel`.\n *\n * See also:\n * `tf.Sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class LayersModel extends Container {\n constructor(args) {\n super(args);\n this.isTraining = false;\n }\n /**\n * Print a text summary of the model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - If the model has non-sequential-like topology, the inputs each layer\n * receives\n * - The total number of trainable and non-trainable parameters of the model.\n *\n * ```js\n * const input1 = tf.input({shape: [10]});\n * const input2 = tf.input({shape: [20]});\n * const dense1 = tf.layers.dense({units: 4}).apply(input1);\n * const dense2 = tf.layers.dense({units: 8}).apply(input2);\n * const concat = tf.layers.concatenate().apply([dense1, dense2]);\n * const output =\n * tf.layers.dense({units: 3, activation: 'softmax'}).apply(concat);\n *\n * const model = tf.model({inputs: [input1, input2], outputs: output});\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n throw new ValueError(`This model has never been called, thus its weights have not been ` +\n `created yet. So no summary can be displayed. Build the model ` +\n `first (e.g., by calling it on some test data).`);\n }\n printSummary(this, lineLength, positions, printFn);\n }\n /**\n * Configures and prepares the model for training and evaluation. Compiling\n * outfits the model with an optimizer, loss, and/or metrics. Calling `fit`\n * or `evaluate` on an un-compiled model will throw an error.\n *\n * @param args a `ModelCompileArgs` specifying the loss, optimizer, and\n * metrics to be used for fitting and evaluating this model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n compile(args) {\n if (args.loss == null) {\n args.loss = [];\n }\n this.loss = args.loss;\n if (typeof args.optimizer === 'string') {\n this.optimizer_ = optimizers.getOptimizer(args.optimizer);\n this.isOptimizerOwned = true;\n }\n else {\n if (!(args.optimizer instanceof Optimizer)) {\n throw new ValueError(`User-defined optimizer must be an instance of tf.Optimizer.`);\n }\n this.optimizer_ = args.optimizer;\n this.isOptimizerOwned = false;\n }\n // TODO(cais): Add lossWeights.\n // TODO(cais): Add sampleWeightMode.\n // Prepare loss functions.\n let lossFunctions = [];\n if (!Array.isArray(args.loss) && typeof args.loss !== 'string' &&\n typeof args.loss !== 'function') {\n args.loss = args.loss;\n for (const name in args.loss) {\n if (this.outputNames.indexOf(name) === -1) {\n throw new ValueError(`Unknown entry in loss dictionary: \"${name}\". ` +\n `Only expected the following keys: ${this.outputNames}`);\n }\n }\n for (const name of this.outputNames) {\n if (args.loss[name] == null) {\n console.warn(`Output \"${name}\" is missing from loss dictionary. We assume ` +\n `this was done on purpose, and we will not be expecting data ` +\n `to be passed to ${name} during training`);\n }\n lossFunctions.push(losses.get(args.loss[name]));\n }\n }\n else if (Array.isArray(args.loss)) {\n if (args.loss.length !== this.outputs.length) {\n throw new ValueError(`When passing an Array as loss, it should have one entry per ` +\n `model output. The model has ${this.outputs.length} output(s), ` +\n `but you passed loss=${args.loss}.`);\n }\n const theLosses = args.loss;\n lossFunctions = theLosses.map(l => losses.get(l));\n }\n else {\n const lossFunction = losses.get(args.loss);\n this.outputs.forEach(_ => {\n lossFunctions.push(lossFunction);\n });\n }\n this.lossFunctions = lossFunctions;\n this.feedOutputNames = [];\n this.feedOutputShapes = [];\n this.feedLossFns = [];\n for (let i = 0; i < this.outputs.length; ++i) {\n // TODO(cais): Logic for skipping target(s).\n const shape = this.internalOutputShapes[i];\n const name = this.outputNames[i];\n this.feedOutputNames.push(name);\n this.feedOutputShapes.push(shape);\n this.feedLossFns.push(this.lossFunctions[i]);\n }\n // TODO(cais): Add logic for output masks.\n // TODO(cais): Add logic for sample weights.\n const skipTargetIndices = [];\n // Prepare metrics.\n this.metrics = args.metrics;\n // TODO(cais): Add weightedMetrics.\n this.metricsNames = ['loss'];\n this.metricsTensors = [];\n // Compute total loss.\n // Porting Note: In PyKeras, metrics_tensors are symbolic tensor objects.\n // Here, metricsTensors are TypeScript functions. This difference is due\n // to the difference in symbolic/imperative property of the backends.\n nameScope('loss', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n // TODO(cais): Add weightedLoss, sampleWeight and mask.\n // The following line should be weightedLoss\n const weightedLoss = this.lossFunctions[i];\n if (this.outputs.length > 1) {\n this.metricsTensors.push([weightedLoss, i]);\n this.metricsNames.push(this.outputNames[i] + '_loss');\n }\n }\n // Porting Note: Due to the imperative nature of the backend, we calculate\n // the regularizer penalties in the totalLossFunction, instead of here.\n });\n const nestedMetrics = collectMetrics(args.metrics, this.outputNames);\n // TODO(cais): Add nestedWeightedMetrics.\n /**\n * Helper function used in loop below.\n */\n const appendMetric = (outputIndex, metricName, metricTensor) => {\n if (this.outputNames.length > 1) {\n metricName = this.outputNames[outputIndex] + '_' + metricName;\n }\n this.metricsNames.push(metricName);\n this.metricsTensors.push([metricTensor, outputIndex]);\n };\n nameScope('metric', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n const outputMetrics = nestedMetrics[i];\n // TODO(cais): Add weights and outputWeightedMetrics.\n // TODO(cais): Add optional arg `weights` to the following function.\n const handleMetrics = (metrics) => {\n const metricNamePrefix = '';\n let metricName;\n let accFn;\n let weightedMetricFn;\n // TODO(cais): Use 'weights_' for weighted metrics.\n for (const metric of metrics) {\n if (typeof metric === 'string' &&\n ['accuracy', 'acc', 'crossentropy', 'ce'].indexOf(metric) !==\n -1) {\n const outputShape = this.internalOutputShapes[i];\n if (outputShape[outputShape.length - 1] === 1 ||\n this.lossFunctions[i] === losses.binaryCrossentropy) {\n // case: binary accuracy/crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryCrossentropy;\n }\n }\n else if (this.lossFunctions[i] ===\n losses.sparseCategoricalCrossentropy) {\n // case: categorical accuracy / crossentropy with sparse\n // targets.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalCrossentropy;\n }\n }\n else {\n // case: categorical accuracy / crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalCrossentropy;\n }\n }\n let suffix;\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n suffix = 'acc';\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n suffix = 'ce';\n }\n // TODO(cais): Add weighting actually.\n weightedMetricFn = accFn;\n metricName = metricNamePrefix + suffix;\n }\n else {\n const metricFn = Metrics.get(metric);\n // TODO(cais): Add weighting actually.\n weightedMetricFn = metricFn;\n metricName =\n metricNamePrefix + Metrics.getLossOrMetricName(metric);\n }\n // TODO(cais): Add weighting and masking to metricResult.\n let metricResult;\n nameScope(metricName, () => {\n metricResult = weightedMetricFn;\n });\n appendMetric(i, metricName, metricResult);\n }\n };\n handleMetrics(outputMetrics);\n // TODO(cais): Call handleMetrics with weights.\n }\n });\n // Porting Notes: Given the imperative backend of tfjs-core,\n // there is no need for constructing the symbolic graph and placeholders.\n this.collectedTrainableWeights = this.trainableWeights;\n }\n /**\n * Check trainable weights count consistency.\n *\n * This will raise a warning if `this.trainableWeights` and\n * `this.collectedTrainableWeights` are inconsistent (i.e., have different\n * numbers of parameters).\n * Inconsistency will typically arise when one modifies `model.trainable`\n * without calling `model.compile()` again.\n */\n checkTrainableWeightsConsistency() {\n if (this.collectedTrainableWeights == null) {\n return;\n }\n if (this.trainableWeights.length !==\n this.collectedTrainableWeights.length) {\n console.warn('Discrepancy between trainableweights and collected trainable ' +\n 'weights. Did you set `model.trainable` without calling ' +\n '`model.compile()` afterwards?');\n }\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(\n * tf.ones([8, 10]), tf.ones([8, 1]), {batchSize: 4});\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateArgs`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // TODO(cais): Standardize `config.sampleWeights` as well.\n // Validate user data.\n const checkBatchAxis = true;\n const standardizedOuts = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n try {\n // TODO(cais): If uses `useLearningPhase`, set the corresponding element\n // of the input to 0.\n const ins = standardizedOuts[0].concat(standardizedOuts[1]);\n this.makeTestFunction();\n const f = this.testFunction;\n const testOuts = this.testLoop(f, ins, batchSize, args.verbose, args.steps);\n return singletonOrArray(testOuts);\n }\n finally {\n disposeNewTensors(standardizedOuts[0], x);\n disposeNewTensors(standardizedOuts[1], y);\n }\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n this.makeTestFunction();\n return evaluateDataset(this, dataset, args);\n }\n /**\n * Get number of samples provided for training, evaluation or prediction.\n *\n * @param ins Input `tf.Tensor`.\n * @param batchSize Integer batch size, optional.\n * @param steps Total number of steps (batches of samples) before\n * declaring loop finished. Optional.\n * @param stepsName The public API's parameter name for `steps`.\n * @returns Number of samples provided.\n */\n checkNumSamples(ins, batchSize, steps, stepsName = 'steps') {\n let numSamples;\n if (steps != null) {\n numSamples = null;\n if (batchSize != null) {\n throw new ValueError(`If ${stepsName} is set, batchSize must be null or undefined.` +\n `Got batchSize = ${batchSize}`);\n }\n }\n else if (ins != null) {\n if (Array.isArray(ins)) {\n numSamples = ins[0].shape[0];\n }\n else {\n numSamples = ins.shape[0];\n }\n }\n else {\n throw new ValueError(`Either the input data should have a defined shape, or ` +\n `${stepsName} shoud be specified.`);\n }\n return numSamples;\n }\n /**\n * Execute internal tensors of the model with input data feed.\n * @param inputs Input data feed. Must match the inputs of the model.\n * @param outputs Names of the output tensors to be fetched. Must match\n * names of the SymbolicTensors that belong to the graph.\n * @returns Fetched values for `outputs`.\n */\n execute(inputs, outputs) {\n if (Array.isArray(outputs) && outputs.length === 0) {\n throw new ValueError('`outputs` is an empty Array, which is not allowed.');\n }\n const outputsIsArray = Array.isArray(outputs);\n const outputNames = (outputsIsArray ? outputs : [outputs]);\n const outputSymbolicTensors = this.retrieveSymbolicTensors(outputNames);\n // Format the input into a FeedDict.\n const feedDict = new FeedDict();\n if (inputs instanceof Tensor) {\n inputs = [inputs];\n }\n if (Array.isArray(inputs)) {\n if (inputs.length !== this.inputs.length) {\n throw new ValueError(`The number of inputs provided (${inputs.length}) ` +\n `does not match the number of inputs of this model ` +\n `(${this.inputs.length}).`);\n }\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n }\n else {\n for (const input of this.inputs) {\n const tensorValue = inputs[input.name];\n if (tensorValue == null) {\n throw new ValueError(`No value is provided for the model's input ${input.name}`);\n }\n feedDict.add(input, tensorValue);\n }\n }\n // Run execution.\n const executeOutputs = execute(outputSymbolicTensors, feedDict);\n return outputsIsArray ? executeOutputs : executeOutputs[0];\n }\n /**\n * Retrieve the model's internal symbolic tensors from symbolic-tensor names.\n */\n retrieveSymbolicTensors(symbolicTensorNames) {\n const outputSymbolicTensors = pyListRepeat(null, symbolicTensorNames.length);\n let outputsRemaining = symbolicTensorNames.length;\n for (const layer of this.layers) {\n const layerOutputs = Array.isArray(layer.output) ? layer.output : [layer.output];\n const layerOutputNames = layerOutputs.map(output => output.name);\n for (let i = 0; i < symbolicTensorNames.length; ++i) {\n const index = layerOutputNames.indexOf(symbolicTensorNames[i]);\n if (index !== -1) {\n outputSymbolicTensors[i] = layerOutputs[index];\n outputsRemaining--;\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining > 0) {\n const remainingNames = [];\n outputSymbolicTensors.forEach((tensor, i) => {\n if (tensor == null) {\n remainingNames.push(symbolicTensorNames[i]);\n }\n });\n throw new ValueError(`Cannot find SymbolicTensors for output name(s): ` +\n `${JSON.stringify(remainingNames)}`);\n }\n return outputSymbolicTensors;\n }\n /**\n * Helper method to loop over some data in batches.\n *\n * Porting Note: Not using the functional approach in the Python equivalent\n * due to the imperative backend.\n * Porting Note: Does not support step mode currently.\n *\n * @param ins: input data\n * @param batchSize: integer batch size.\n * @param verbose: verbosity model\n * @returns: Predictions as `tf.Tensor` (if a single output) or an `Array` of\n * `tf.Tensor` (if multipe outputs).\n */\n predictLoop(ins, batchSize = 32, verbose = false) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins);\n if (verbose) {\n throw new NotImplementedError('Verbose predictLoop() is not implemented yet.');\n }\n // Sample-based predictions.\n // Porting Note: Tensor currently does not support sliced assignments as\n // in numpy, e.g., x[1:3] = y. Therefore we use concatenation while\n // iterating over the batches.\n const batches = makeBatches(numSamples, batchSize);\n const outsBatches = this.outputs.map(output => []);\n // TODO(cais): Can the scope() be pushed down inside the for loop?\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchOuts = tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n // TODO(cais): Take care of the case of the last element is a flag for\n // training/test.\n const insBatch = sliceArrays(ins, batchStart, batchEnd);\n // Construct the feeds for execute();\n const feeds = [];\n if (Array.isArray(insBatch)) {\n for (let i = 0; i < insBatch.length; ++i) {\n feeds.push({ key: this.inputs[i], value: insBatch[i] });\n }\n }\n else {\n feeds.push({ key: this.inputs[0], value: insBatch });\n }\n const feedDict = new FeedDict(feeds);\n return execute(this.outputs, feedDict);\n });\n batchOuts.forEach((batchOut, i) => outsBatches[i].push(batchOut));\n }\n return singletonOrArray(outsBatches.map(batches => tfc.concat(batches, 0)));\n });\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFlow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([8, 10]), {batchSize: 4}).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param args A `ModelPredictArgs` object containing optional fields.\n *\n * @return Prediction results as a `tf.Tensor`(s).\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n const xsRank2OrHigher = ensureTensorsRank2OrHigher(x);\n checkInputData(xsRank2OrHigher, this.inputNames, this.feedInputShapes, false);\n try {\n // TODO(cais): Take care of stateful models.\n // if (this.stateful) ...\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n return this.predictLoop(xsRank2OrHigher, batchSize);\n }\n finally {\n disposeNewTensors(xsRank2OrHigher, x);\n }\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predictOnBatch(tf.ones([8, 10])).print();\n * ```\n * @param x: Input samples, as a Tensor (for models with exactly one\n * input) or an array of Tensors (for models with more than one input).\n * @return Tensor(s) of predictions\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predictOnBatch(x) {\n checkInputData(x, this.inputNames, this.feedInputShapes, true);\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = (Array.isArray(x) ? x[0] : x).shape[0];\n return this.predictLoop(x, batchSize);\n }\n standardizeUserDataXY(x, y, checkBatchAxis = true, batchSize) {\n // TODO(cais): Add sampleWeight, classWeight\n if (this.optimizer_ == null) {\n throw new RuntimeError('You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileArgs).');\n }\n const outputShapes = [];\n for (let i = 0; i < this.feedOutputShapes.length; ++i) {\n const outputShape = this.feedOutputShapes[i];\n const lossFn = this.feedLossFns[i];\n if (lossFn === losses.sparseCategoricalCrossentropy) {\n outputShapes.push(outputShape.slice(0, outputShape.length - 1).concat([1]));\n }\n else {\n // Porting Note: Because of strong typing `lossFn` must be a function.\n outputShapes.push(outputShape);\n }\n }\n x = standardizeInputData(x, this.feedInputNames, this.feedInputShapes, false, 'input');\n y = standardizeInputData(y, this.feedOutputNames, outputShapes, false, 'target');\n // TODO(cais): Standardize sampleWeights & classWeights.\n checkArrayLengths(x, y, null);\n // TODO(cais): Check sampleWeights as well.\n checkLossAndTargetCompatibility(y, this.feedLossFns, this.feedOutputShapes);\n if (this.stateful && batchSize != null && batchSize > 0) {\n if (x[0].shape[0] % batchSize !== 0) {\n throw new ValueError(`In a stateful network, you should only pass inputs with a ` +\n `number of samples that is divisible by the batch size ` +\n `${batchSize}. Found: ${x[0].shape[0]} sample(s).`);\n }\n }\n return [x, y];\n }\n async standardizeUserData(x, y, sampleWeight, classWeight, checkBatchAxis = true, batchSize) {\n const [standardXs, standardYs] = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n // TODO(cais): Handle sampleWeights.\n if (sampleWeight != null) {\n throw new Error('sample weight is not supported yet.');\n }\n let standardSampleWeights = null;\n if (classWeight != null) {\n const classWeights = standardizeClassWeights(classWeight, this.outputNames);\n standardSampleWeights = [];\n for (let i = 0; i < classWeights.length; ++i) {\n standardSampleWeights.push(await standardizeWeights(standardYs[i], null, classWeights[i]));\n }\n }\n // TODO(cais): Deal with the case of model.stateful == true.\n return [standardXs, standardYs, standardSampleWeights];\n }\n /**\n * Loop over some test data in batches.\n * @param f A Function returning a list of tensors.\n * @param ins Array of tensors to be fed to `f`.\n * @param batchSize Integer batch size or `null` / `undefined`.\n * @param verbose verbosity mode.\n * @param steps Total number of steps (batches of samples) before\n * declaring test finished. Ignored with the default value of `null` /\n * `undefined`.\n * @returns Array of Scalars.\n */\n testLoop(f, ins, batchSize, verbose = 0, steps) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins, batchSize, steps, 'steps');\n const outs = [];\n if (verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n // TODO(cais): Use `indicesForConversionToDense' to prevent slow down.\n if (steps != null) {\n throw new NotImplementedError('steps mode in testLoop() is not implemented yet');\n }\n else {\n const batches = makeBatches(numSamples, batchSize);\n const indexArray = tensor1d(range(0, numSamples));\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = K.sliceAlongFirstAxis(indexArray, batchStart, batchEnd - batchStart);\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const batchOuts = f(insBatch);\n if (batchIndex === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n outs[i] =\n tfc.add(outs[i], tfc.mul(batchEnd - batchStart, batchOut));\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n outs[i] = tfc.div(outs[i], numSamples);\n }\n }\n return outs;\n });\n }\n getDedupedMetricsNames() {\n const outLabels = this.metricsNames;\n // Rename duplicated metrics names (can happen with an output layer\n // shared among multiple dataflows).\n const dedupedOutLabels = [];\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n let newLabel = label;\n if (count(outLabels, label) > 1) {\n const dupIndex = count(outLabels.slice(0, i), label);\n newLabel += `_${dupIndex}`;\n }\n dedupedOutLabels.push(newLabel);\n }\n return dedupedOutLabels;\n }\n /**\n * Creates a function that performs the following actions:\n *\n * 1. computes the losses\n * 2. sums them to get the total loss\n * 3. call the optimizer computes the gradients of the LayersModel's\n * trainable weights w.r.t. the total loss and update the variables\n * 4. calculates the metrics\n * 5. returns the values of the losses and metrics.\n */\n makeTrainFunction() {\n return (data) => {\n const lossValues = [];\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const sampleWeights = data.slice(this.inputs.length + this.outputs.length, this.inputs.length + this.outputs.length * 2);\n const metricsValues = [];\n // Create a function that computes the total loss based on the\n // inputs. This function is used for obtaining gradients through\n // backprop.\n const totalLossFunction = () => {\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict, { 'training': true });\n // TODO(cais): Take care of the case of multiple outputs from a\n // single layer?\n let totalLoss;\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n let loss = lossFunction(targets[i], outputs[i]);\n if (sampleWeights[i] != null) {\n loss = computeWeightedLoss(loss, sampleWeights[i]);\n }\n // TODO(cais): push Scalar instead.\n const meanLoss = tfc.mean(loss);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n lossValues.push(meanLoss);\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n }\n // Compute the metrics.\n // TODO(cais): These should probably be calculated outside\n // totalLossFunction to benefit speed?\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n let weightedMetric;\n if (this.outputs.length > 1 && i < this.outputs.length) {\n weightedMetric = lossValues[i];\n }\n else {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n weightedMetric =\n tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n }\n tfc.keep(weightedMetric);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n metricsValues.push(weightedMetric);\n }\n totalLoss = tfc.mean(totalLoss);\n // Add regularizer penalties.\n this.calculateLosses().forEach(regularizerLoss => {\n totalLoss = tfc.add(totalLoss, regularizerLoss);\n });\n return totalLoss;\n };\n const variables = this.collectedTrainableWeights.map(param => param.read());\n const returnCost = true;\n const totalLossValue = this.optimizer_.minimize(totalLossFunction, returnCost, variables);\n return [totalLossValue].concat(metricsValues);\n };\n }\n /**\n * Create a function which, when invoked with an array of `tf.Tensor`s as a\n * batch of inputs, returns the prespecified loss and metrics of the model\n * under the batch of input data.\n */\n makeTestFunction() {\n this.testFunction = (data) => {\n return tfc.tidy(() => {\n const valOutputs = [];\n let totalLoss;\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict);\n // Compute total loss.\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n // TODO(cais): Add sample weighting and replace the simple\n // averaging.\n const loss = tfc.mean(lossFunction(targets[i], outputs[i]));\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n valOutputs.push(totalLoss);\n }\n // Compute the metrics.\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n // TODO(cais): Replace K.mean() with a proper weighting function.\n const meanMetric = tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n valOutputs.push(meanMetric);\n }\n return valOutputs;\n });\n };\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a\n * dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * for (let i = 1; i < 5 ; ++i) {\n * const h = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(\"Loss after Epoch \" + i + \" : \" + h.history.loss[0]);\n * }\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you\n * can also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named,\n * you can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input\n * data and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n return fitTensors(this, x, y, args);\n }\n // TODO(cais): Add code snippet below when it's possible to instantiate\n // actual dataset objects.\n /**\n * Trains the model using a dataset object.\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for training. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs.\n * Of the two items in the array, the first is the input feature(s) and\n * the second is the output target(s).\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fitDataset(dataset, args) {\n return fitDataset(this, dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n // TODO(cais): Support sampleWeight and classWeight.\n // TODO(cais): Support Dataset objects.\n const standardizeOut = await this.standardizeUserData(x, y);\n const inputs = standardizeOut[0];\n const targets = standardizeOut[1];\n const trainFunction = this.makeTrainFunction();\n const losses = trainFunction(inputs.concat(targets));\n const lossValues = [];\n for (const loss of losses) {\n const v = await loss.data();\n lossValues.push(v[0]);\n }\n tfc.dispose(losses);\n return singletonOrArray(lossValues);\n }\n /**\n * Extract weight values of the model.\n *\n * @param config: An instance of `io.SaveConfig`, which specifies\n * model-saving options such as whether only trainable weights are to be\n * saved.\n * @returns A `NamedTensorMap` mapping original weight names (i.e.,\n * non-uniqueified weight names) to their values.\n */\n getNamedWeights(config) {\n const namedWeights = [];\n const trainableOnly = config != null && config.trainableOnly;\n const weights = trainableOnly ? this.trainableWeights : this.weights;\n const weightValues = this.getWeights(trainableOnly);\n for (let i = 0; i < weights.length; ++i) {\n if (trainableOnly && !weights[i].trainable) {\n // Optionally skip non-trainable weights.\n continue;\n }\n namedWeights.push({ name: weights[i].originalName, tensor: weightValues[i] });\n }\n return namedWeights;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const input = tf.input({shape: [10]});\n * const output = tf.layers.dense({units: 1}).apply(input);\n * const model = tf.model({inputs: [input], outputs: [output]});\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10\n * values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n this.stopTraining_ = stop;\n }\n get stopTraining() {\n return this.stopTraining_;\n }\n get optimizer() {\n return this.optimizer_;\n }\n set optimizer(optimizer) {\n if (this.optimizer_ !== optimizer) {\n this.optimizer_ = optimizer;\n this.isOptimizerOwned = false;\n }\n }\n dispose() {\n const result = super.dispose();\n if (result.refCountAfterDispose === 0 && this.optimizer != null &&\n this.isOptimizerOwned) {\n const numTensorsBeforeOptmizerDisposal = tfc.memory().numTensors;\n this.optimizer_.dispose();\n result.numDisposedVariables +=\n numTensorsBeforeOptmizerDisposal - tfc.memory().numTensors;\n }\n return result;\n }\n getLossIdentifiers() {\n let lossNames;\n if (typeof this.loss === 'string') {\n lossNames = toSnakeCase(this.loss);\n }\n else if (Array.isArray(this.loss)) {\n for (const loss of this.loss) {\n if (typeof loss !== 'string') {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n lossNames = this.loss.map(name => toSnakeCase(name));\n }\n else {\n const outputNames = Object.keys(this.loss);\n lossNames = {};\n const losses = this.loss;\n for (const outputName of outputNames) {\n if (typeof losses[outputName] === 'string') {\n lossNames[outputName] =\n toSnakeCase(losses[outputName]);\n }\n else {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n }\n return lossNames;\n }\n getMetricIdentifiers() {\n if (typeof this.metrics === 'string' ||\n typeof this.metrics === 'function') {\n return [toSnakeCase(Metrics.getLossOrMetricName(this.metrics))];\n }\n else if (Array.isArray(this.metrics)) {\n return this.metrics.map(metric => toSnakeCase(Metrics.getLossOrMetricName(metric)));\n }\n else {\n const metricsIdentifiers = {};\n for (const key in this.metrics) {\n metricsIdentifiers[key] =\n toSnakeCase(Metrics.getLossOrMetricName(this.metrics[key]));\n }\n return metricsIdentifiers;\n }\n }\n getTrainingConfig() {\n return {\n loss: this.getLossIdentifiers(),\n metrics: this.getMetricIdentifiers(),\n optimizer_config: {\n class_name: this.optimizer.getClassName(),\n config: this.optimizer.getConfig()\n }\n };\n // TODO(cais): Add weight_metrics when they are supported.\n // TODO(cais): Add sample_weight_mode when it's supported.\n // TODO(cais): Add loss_weights when it's supported.\n }\n loadTrainingConfig(trainingConfig) {\n if (trainingConfig.weighted_metrics != null) {\n throw new Error('Loading weight_metrics is not supported yet.');\n }\n if (trainingConfig.loss_weights != null) {\n throw new Error('Loading loss_weights is not supported yet.');\n }\n if (trainingConfig.sample_weight_mode != null) {\n throw new Error('Loading sample_weight_mode is not supported yet.');\n }\n const tsConfig = convertPythonicToTs(trainingConfig.optimizer_config);\n const optimizer = deserialize(tsConfig);\n let loss;\n if (typeof trainingConfig.loss === 'string') {\n loss = toCamelCase(trainingConfig.loss);\n }\n else if (Array.isArray(trainingConfig.loss)) {\n loss = trainingConfig.loss.map(lossEntry => toCamelCase(lossEntry));\n }\n else if (trainingConfig.loss != null) {\n loss = {};\n for (const key in trainingConfig.loss) {\n loss[key] = toCamelCase(trainingConfig.loss[key]);\n }\n }\n let metrics;\n if (Array.isArray(trainingConfig.metrics)) {\n metrics = trainingConfig.metrics.map(metric => toCamelCase(metric));\n }\n else if (trainingConfig.metrics != null) {\n metrics = {};\n for (const key in trainingConfig.metrics) {\n metrics[key] = toCamelCase(trainingConfig.metrics[key]);\n }\n }\n this.compile({ loss, metrics, optimizer });\n }\n /**\n * Save the configuration and/or weights of the LayersModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights as two files\n * (`my-model-1.json` and `my-model-1.weights.bin`) downloaded from\n * browser.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('downloads://my-model-1');\n * ```\n *\n * Example 4. Send `model`'s topology and weights to an HTTP server.\n * See the documentation of `tf.io.http` for more details\n * including specifying request parameters and implementation of the\n * server.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('http://my-server/model/upload');\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new ValueError(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new ValueError('LayersModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n const weightDataAndSpecs = await io.encodeWeights(this.getNamedWeights(config));\n const returnString = false;\n const unusedArg = null;\n const modelConfig = this.toJSON(unusedArg, returnString);\n const modelArtifacts = {\n modelTopology: modelConfig,\n format: LAYERS_MODEL_FORMAT_NAME,\n generatedBy: `TensorFlow.js tfjs-layers v${version}`,\n convertedBy: null,\n };\n const includeOptimizer = config == null ? false : config.includeOptimizer;\n if (includeOptimizer && this.optimizer != null) {\n modelArtifacts.trainingConfig = this.getTrainingConfig();\n const weightType = 'optimizer';\n const { data: optimizerWeightData, specs: optimizerWeightSpecs } = await io.encodeWeights(await this.optimizer.getWeights(), weightType);\n weightDataAndSpecs.specs.push(...optimizerWeightSpecs);\n weightDataAndSpecs.data = io.concatenateArrayBuffers([weightDataAndSpecs.data, optimizerWeightData]);\n }\n if (this.userDefinedMetadata != null) {\n // Check serialized size of user-defined metadata.\n const checkSize = true;\n checkUserDefinedMetadata(this.userDefinedMetadata, this.name, checkSize);\n modelArtifacts.userDefinedMetadata = this.userDefinedMetadata;\n }\n modelArtifacts.weightData = weightDataAndSpecs.data;\n modelArtifacts.weightSpecs = weightDataAndSpecs.specs;\n return handlerOrURL.save(modelArtifacts);\n }\n /**\n * Set user-defined metadata.\n *\n * The set metadata will be serialized together with the topology\n * and weights of the model during `save()` calls.\n *\n * @param setUserDefinedMetadata\n */\n setUserDefinedMetadata(userDefinedMetadata) {\n checkUserDefinedMetadata(userDefinedMetadata, this.name);\n this.userDefinedMetadata = userDefinedMetadata;\n }\n /**\n * Get user-defined metadata.\n *\n * The metadata is supplied via one of the two routes:\n * 1. By calling `setUserDefinedMetadata()`.\n * 2. Loaded during model loading (if the model is constructed\n * via `tf.loadLayersModel()`.)\n *\n * If no user-defined metadata is available from either of the\n * two routes, this function will return `undefined`.\n */\n getUserDefinedMetadata() {\n return this.userDefinedMetadata;\n }\n}\n// The class name is 'Model' rather than 'LayersModel' for backwards\n// compatibility since this class name shows up in the serialization format.\n/** @nocollapse */\nLayersModel.className = 'Model';\nserialization.registerClass(LayersModel);\n/**\n * A `tf.Functional` is an alias to `tf.LayersModel`.\n *\n * See also:\n * `tf.LayersModel`, `tf.Sequential`, `tf.loadLayersModel`.\n */\n/** @doc {heading: 'Models', subheading: 'Classes'} */\nexport class Functional extends LayersModel {\n}\nFunctional.className = 'Functional';\nserialization.registerClass(Functional);\n//# sourceMappingURL=training.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source keras/models.py */\nimport { dispose, io, serialization, util } from '@tensorflow/tfjs-core';\nimport { getUid } from './backend/state';\nimport { Input } from './engine/input_layer';\nimport { getSourceInputs, Node } from './engine/topology';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError, RuntimeError, ValueError } from './errors';\nimport { deserialize } from './layers/serialization';\nimport * as generic_utils from './utils/generic_utils';\nimport { convertPythonicToTs } from './utils/serialization_utils';\nimport { getExactlyOneShape } from './utils/types_utils';\n/**\n * Parses a JSON model configuration file and returns a model instance.\n *\n * ```js\n * // This example shows how to serialize a model using `toJSON()` and\n * // deserialize it as another model using `tf.models.modelFromJSON()`.\n * // Note: this example serializes and deserializes only the topology\n * // of the model; the weights of the loaded model will be different\n * // from those of the the original model, due to random weight\n * // initialization.\n * // To load the topology and weights of a model, use `tf.loadLayersModel()`.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.repeatVector({inputShape: [2], n: 4}));\n * // Serialize `model1` as a JSON object.\n * const model1JSON = model1.toJSON(null, false);\n * model1.summary();\n *\n * const model2 = await tf.models.modelFromJSON(model1JSON);\n * model2.summary();\n * ```\n *\n * @param modelAndWeightsConfig JSON object or string encoding a model and\n * weights configuration. It can also be only the topology JSON of the\n * model, in which case the weights will not be loaded.\n * @param custom_objects Optional dictionary mapping names\n * (strings) to custom classes or functions to be\n * considered during deserialization.\n * @returns A TensorFlow.js Layers `tf.LayersModel` instance (uncompiled).\n */\nexport async function modelFromJSON(modelAndWeightsConfig, customObjects) {\n if (!('modelTopology' in modelAndWeightsConfig)) {\n modelAndWeightsConfig = { modelTopology: modelAndWeightsConfig };\n }\n modelAndWeightsConfig = modelAndWeightsConfig;\n let modelTopology = modelAndWeightsConfig.modelTopology;\n if (modelTopology['model_config'] != null) {\n // If the model-topology JSON contains a 'model_config' field, then it is\n // a full model JSON (e.g., from `keras.Model.save()`), which contains\n // not only the model's architecture in its 'model_config' field, but\n // additional information such as the model's optimizer. We use only the\n // 'model_config' field currently.\n modelTopology = modelTopology['model_config'];\n }\n const tsConfig = convertPythonicToTs(modelTopology);\n const model = deserialize(tsConfig, customObjects);\n if (modelAndWeightsConfig.weightsManifest != null) {\n // Load the weight values keyed by the original tensor names in the model\n // file that was loaded. These should match the keys of the weight\n // manifest.\n const weightValues = await io.loadWeights(modelAndWeightsConfig.weightsManifest, modelAndWeightsConfig.pathPrefix, model.weights.map(weight => weight.originalName));\n // Map the weights to the unique tensor names generated during model loading\n const uniqueWeightValues = {};\n for (const weight of model.weights) {\n uniqueWeightValues[weight.originalName] =\n weightValues[weight.originalName];\n }\n model.loadWeights(uniqueWeightValues);\n // Dispose temporary weight values.\n dispose(weightValues);\n }\n return model;\n}\n/**\n * Load a model, including its topology and optionally weights. See the\n * Tutorial named \"How to import a Keras Model\" for usage examples.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * Example 4. Load a model from an HTTP server.\n *\n * ```js\n * const model = await\n * tf.loadLayersModel('https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. This path will be\n * interpreted as a relative HTTP path, to which `fetch` will be used to\n * request the model topology and weight manifest JSON.\n * The content of the JSON file is assumed to be a JSON object with the\n * following fields and values:\n * - 'modelTopology': A JSON object that can be either of:\n * 1. a model architecture JSON consistent with the format of the return\n * value of `keras.Model.to_json()`\n * 2. a full model JSON in the format of `keras.models.save_model()`.\n * - 'weightsManifest': A TensorFlow.js weights manifest.\n * See the Python converter function `save_model()` for more details.\n * It is also assumed that model weights can be accessed from relative\n * paths described by the `paths` fields in weights manifest.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A progress callback of the form:\n * `(fraction: number) => void`. This callback can be used to monitor the\n * model-loading process.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n */\nexport async function loadLayersModelInternal(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n if (typeof pathOrIOHandler === 'string') {\n const handlers = io.getLoadHandlers(pathOrIOHandler, options);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n // TODO(cais): Reformat the args into a single `LoadOptions` once the core\n // is refactored.\n handlers.push(io.browserHTTPRequest(pathOrIOHandler, options));\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${pathOrIOHandler}'`);\n }\n pathOrIOHandler = handlers[0];\n }\n return loadLayersModelFromIOHandler(pathOrIOHandler, undefined, options);\n}\n/**\n * Load a model and optionally its weights, using an IOHandler object.\n *\n * @param handler The instance of `IOHandler` to be used during the model\n * loading.\n * @param customObjects Any optional custom objects to be used during model\n * loading.\n * @param strict Whether the weight loading will be done in strict mode.\n * Default: `true`.\n */\nexport async function loadLayersModelFromIOHandler(handler, customObjects, options) {\n if (options == null) {\n options = {};\n }\n if (handler.load == null) {\n throw new ValueError('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await handler.load();\n let modelTopology = artifacts.modelTopology;\n if (modelTopology['model_config'] != null) {\n modelTopology = modelTopology['model_config'];\n }\n const strict = options.strict == null ? true : options.strict;\n // If weights are provided and the weight-loading mode is strict, use\n // fast weight initialization. This skips costly initializers such as\n // 'orthogonal' and saves unnecessary computation in cases where\n // the initialized weight values will immediately be overwritten by\n // loaded weight values.\n const fastWeightInit = artifacts.weightData != null && artifacts.weightSpecs != null && strict;\n const model = deserialize(convertPythonicToTs(modelTopology), customObjects, fastWeightInit);\n const trainingConfig = artifacts.trainingConfig;\n if (trainingConfig != null) {\n model.loadTrainingConfig(trainingConfig);\n }\n if (artifacts.userDefinedMetadata != null) {\n model.setUserDefinedMetadata(artifacts.userDefinedMetadata);\n }\n // If weightData is present, load the weights into the model.\n if (artifacts.weightData != null) {\n // Loading weights requires weightSpecs.\n if (artifacts.weightSpecs == null) {\n throw new ValueError('LayersModel artifacts contains weight data, but not weight specs. ' +\n 'Therefore loading of weights cannot proceed.');\n }\n const { modelWeights, optimizerWeights } = decodeModelAndOptimizerWeights(artifacts.weightData, artifacts.weightSpecs);\n model.loadWeights(modelWeights, strict);\n if (model.optimizer != null && optimizerWeights.length > 0) {\n await model.optimizer.setWeights(optimizerWeights);\n }\n // Dispose temporary weight values.\n dispose(modelWeights);\n dispose(optimizerWeights.map(w => w.tensor));\n }\n return model;\n}\nfunction decodeModelAndOptimizerWeights(buffer, specs) {\n const name2Tensor = io.decodeWeights(buffer, specs);\n const modelWeights = {};\n const optimizerWeights = [];\n specs.forEach(spec => {\n if (spec.group === 'optimizer') {\n optimizerWeights.push({ name: spec.name, tensor: name2Tensor[spec.name] });\n }\n else {\n modelWeights[spec.name] = name2Tensor[spec.name];\n }\n });\n return { modelWeights, optimizerWeights };\n}\n/**\n * A model with a stack of layers, feeding linearly from one to the next.\n *\n * `tf.sequential` is a factory function that creates an instance of\n * `tf.Sequential`.\n *\n * ```js\n * // Define a model for linear regression.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [1]}));\n *\n * // Prepare the model for training: Specify the loss and the optimizer.\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n *\n * // Generate some synthetic data for training.\n * const xs = tf.tensor2d([1, 2, 3, 4], [4, 1]);\n * const ys = tf.tensor2d([1, 3, 5, 7], [4, 1]);\n *\n * // Train the model using the data then do inference on a data point the\n * // model hasn't seen:\n * await model.fit(xs, ys);\n * model.predict(tf.tensor2d([5], [1, 1])).print();\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class Sequential extends LayersModel {\n constructor(args) {\n super({ inputs: [], outputs: [] });\n args = args || {};\n this.trainable = true;\n this.built = false;\n // Set model name.\n this.name = (args.name != null) ? args.name : getUid('sequential_');\n // Add to the model any layers passed to the constructor.\n if (args.layers != null) {\n for (const layer of args.layers) {\n this.add(layer);\n }\n }\n }\n // Helper function to Sequential.add Throws if the new output shape will be\n // invalid.\n checkShape(layer) {\n const shape = layer.inboundNodes[0].outputTensors[0].shape;\n if (shape.some(x => x < 0)) {\n throw new ValueError('Negative dimension size caused by adding layer ' +\n `${layer.name} with input shape [` +\n `${layer.inboundNodes[0].inputTensors[0].shape}]`);\n }\n }\n /**\n * Adds a layer instance on top of the layer stack.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 8, inputShape: [1]}));\n * model.add(tf.layers.dense({units: 4, activation: 'relu6'}));\n * model.add(tf.layers.dense({units: 1, activation: 'relu6'}));\n * // Note that the untrained model is random at this point.\n * model.predict(tf.randomNormal([10, 1])).print();\n * ```\n * @param layer Layer instance.\n *\n * @exception ValueError In case the `layer` argument does not know its\n * input shape.\n * @exception ValueError In case the `layer` argument has multiple output\n * tensors, or is already connected somewhere else (forbidden in\n * `Sequential` models).\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n add(layer) {\n const isLayerModelInstance = layer instanceof Sequential || layer instanceof LayersModel;\n let modelLayer;\n if (isLayerModelInstance) {\n modelLayer = layer;\n if (modelLayer.outputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n if (modelLayer.inputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single input tensor. ' +\n 'For multi-input layers, ' +\n 'use the functional API.');\n }\n }\n if (this.outputs.length === 0) {\n // first layer in model: check that it is an input layer\n if (layer.inboundNodes.length === 0) {\n // create an input layer\n if (layer.batchInputShape == null) {\n throw new ValueError('The first layer in a Sequential model must ' +\n 'get an `inputShape` or `batchInputShape` argument.');\n }\n // Instantiate the input layer.\n const x = Input({\n batchShape: layer.batchInputShape,\n dtype: layer.dtype,\n name: layer.name + '_input'\n });\n // This will build the current layer and create the node connecting\n // the current layer to the input layer we just created.\n layer.apply(x);\n }\n if (isLayerModelInstance) {\n this.outputs = modelLayer.outputs;\n this.inputs = modelLayer.inputs;\n }\n else {\n if (layer.inboundNodes.length !== 1) {\n throw new ValueError('A layer added to a Sequential model must not already be ' +\n `connected somewhere else. LayersModel received layer ${layer.name} ` +\n `which has ${layer.inboundNodes.length} pre-existing inbound ` +\n 'connections.');\n }\n if (layer.inboundNodes[0].outputTensors.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [layer.inboundNodes[0].outputTensors[0]];\n this.inputs = getSourceInputs(this.outputs[0]);\n }\n this.inboundNodes = [];\n // We create an input node, which we will keep updated\n // as we add more layers.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n // no model-level masking for now\n inputMasks: generic_utils.pyListRepeat(null, this.inputs.length),\n outputMasks: [null],\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs[0].shape\n });\n }\n else {\n const outputTensor = layer.apply(this.outputs[0]);\n if (Array.isArray(outputTensor)) {\n throw new TypeError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [outputTensor];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n this.layers.push(layer);\n this.built = false;\n }\n /**\n * Removes the last layer in the model.\n *\n * @exception TypeError if there are no layers in the model.\n */\n pop() {\n if (this.layers.length === 0) {\n throw new TypeError('There are no layers in the model.');\n }\n this.layers.pop();\n if (this.layers.length === 0) {\n this.outputs = [];\n this.inboundNodes = [];\n this.outboundNodes = [];\n }\n else {\n const lastLayerIndex = this.layers.length - 1;\n this.layers[lastLayerIndex].outboundNodes = [];\n this.outputs = [this.layers[lastLayerIndex].output];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n }\n call(inputs, kwargs) {\n if (this.model == null) {\n this.build();\n }\n return this.model.call(inputs, kwargs);\n }\n build(inputShape) {\n // Call `getExactlyOneShape` without using its return value,\n // to verify that exactly one input shape is provided.\n getExactlyOneShape(inputShape);\n if (this.inputs.length === 0 || this.outputs.length === 0) {\n throw new TypeError('Sequential model cannot be built: model is empty.' +\n ' Add some layers first.');\n }\n // actually create the model\n this.model = new LayersModel({\n inputs: this.inputs,\n outputs: this.outputs[0],\n name: this.name + '_model'\n });\n this.model.trainable = this.trainable;\n // mirror model attributes\n this.supportsMasking = this.model.supportsMasking;\n // TODO(michaelterry): Add caches\n this.inputLayers = this.model.inputLayers;\n this.inputLayersNodeIndices = this.model.inputLayersNodeIndices;\n this.inputLayersTensorIndices = this.model.inputLayersTensorIndices;\n this.outputLayers = this.model.outputLayers;\n this.outputLayersNodeIndices = this.model.outputLayersNodeIndices;\n this.outputLayersTensorIndices = this.model.outputLayersTensorIndices;\n this.nodesByDepth = this.model.nodesByDepth;\n this.containerNodes = this.model.containerNodes;\n this.outputNames = this.model.outputNames;\n this.inputNames = this.model.inputNames;\n // TODO(michaelterry): Add feedInputNames, feedInputs, if needed.\n // TODO(michaelterry): Add callbackModel if needed.\n this.built = true;\n }\n countParams() {\n if (!this.built) {\n this.build();\n }\n return super.countParams();\n }\n /**\n * Print a text summary of the Sequential model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - The total number of trainable and non-trainable parameters of the\n * model.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 100, inputShape: [10], activation: 'relu'}));\n * model.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n *\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n this.build();\n }\n super.summary(lineLength, positions, printFn);\n }\n /**\n * Sets the weights of the model.\n *\n * @param weights Should be a list of Tensors with shapes and types matching\n * the output of `model.getWeights()`.\n */\n setWeights(weights) {\n if (this.model == null) {\n this.build();\n }\n this.model.setWeights(weights);\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * });\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateConfig`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluate(x, y, args);\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluateDataset(dataset, args);\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([2, 10])).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param conifg A `ModelPredictConfig` object containing optional fields.\n *\n * @return `tf.Tensor`(s) of predictions.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predict(x, args);\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * @param x: Input samples, as a Tensor, or list of Tensors (if the model\n * has multiple inputs).\n * @return Tensor(s) of predictions\n */\n predictOnBatch(x) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predictOnBatch(x);\n }\n /**\n * See `LayersModel.compile`.\n *\n * @param args\n */\n compile(args) {\n this.build();\n this.model.compile(args);\n this.optimizer_ = this.model.optimizer;\n // tslint:disable-next-line:no-any\n this.isOptimizerOwned = this.model.isOptimizerOwned;\n this.loss = this.model.loss;\n this.metrics = this.model.metrics;\n // TODO(cais): Add this.lossWeights, this.sampleWeightMode,\n // this.weightedMetrics, this.targets.\n this.metricsTensors = this.model.metricsTensors;\n this.metricsNames = this.model.metricsNames;\n // TODO(cais): Add sampleWeights.\n }\n get optimizer() {\n return this.model == null ? undefined : this.model.optimizer;\n }\n set optimizer(optimizer) {\n this.model.optimizer = optimizer;\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(history.history.loss[0]);\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you can\n * also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named, you\n * can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fit(x, y, args);\n }\n /**\n * Trains the model using a dataset object.\n *\n * ```js\n * const xArray = [\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * ];\n * const yArray = [1, 1, 1, 1];\n * // Create a dataset from the JavaScript array.\n * const xDataset = tf.data.array(xArray);\n * const yDataset = tf.data.array(yArray);\n * // Zip combines the `x` and `y` Datasets into a single Dataset, the\n * // iterator of which will return an object containing of two tensors,\n * // corresponding to `x` and `y`. The call to `batch(4)` will bundle\n * // four such samples into a single object, with the same keys now pointing\n * // to tensors that hold 4 examples, organized along the batch dimension.\n * // The call to `shuffle(4)` causes each iteration through the dataset to\n * // happen in a different order. The size of the shuffle window is 4.\n * const xyDataset = tf.data.zip({xs: xDataset, ys: yDataset})\n * .batch(4)\n * .shuffle(4);\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [9]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fitDataset(xyDataset, {\n * epochs: 4,\n * callbacks: {onEpochEnd: (epoch, logs) => console.log(logs.loss)}\n * });\n * ```\n *\n * @param dataset A dataset object. Its `iterator()` method is expected to\n * generate a dataset iterator object, the `next()` method of which is\n * expected to produce data batches for evaluation. The return value of the\n * `next()` call ought to contain a boolean `done` field and a `value`\n * field.\n *\n * The `value` field is expected to be an object of with fields\n * `xs` and `ys`, which point to the feature tensor and the target tensor,\n * respectively. This case is for models with exactly one input and one\n * output (e.g.. a sequential model). For example:\n * ```js\n * {value: {xs: xsTensor, ys: ysTensor}, done: false}\n * ```\n *\n * If the model has multiple inputs, the `xs` field of `value` should\n * be an object mapping input names to their respective feature tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: {\n * input_1: xsTensor1,\n * input_2: xsTensor2\n * },\n * ys: ysTensor\n * },\n * done: false\n * }\n * ```\n * If the model has multiple outputs, the `ys` field of `value` should\n * be an object mapping output names to their respective target tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: xsTensor,\n * ys: {\n * output_1: ysTensor1,\n * output_2: ysTensor2\n * },\n * },\n * done: false\n * }\n * ```\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async fitDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fitDataset(dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n return this.model.trainOnBatch(x, y);\n }\n /* See parent class for JsDoc */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n let configArray;\n let extraModelConfig = {};\n if (config instanceof Array) {\n if (!(config[0].className != null) ||\n config[0]['className'] === 'Merge') {\n throw new ValueError('Legacy serialization format not supported yet.');\n }\n configArray = config;\n }\n else {\n util.assert(config['layers'] != null, () => `When the config data for a Sequential model is not an Array, ` +\n `it must be an Object that contains the 'layers' field.`);\n configArray = config['layers'];\n delete config['layers'];\n extraModelConfig = config;\n }\n const model = new cls(extraModelConfig);\n if (!(model instanceof Sequential)) {\n throw new NotImplementedError(`Sequential.fromConfig called on non-Sequential input: ${model}`);\n }\n for (const conf of configArray) {\n const customObjects = undefined;\n const layer = deserialize(conf, customObjects, fastWeightInit);\n if (fastWeightInit) {\n layer.setFastWeightInitDuringBuild(true);\n }\n model.add(layer);\n }\n return model;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [10]}));\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10 values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n // TODO(cais): When refactoring to remove the composition pattern happens,\n // remove this method overriding.\n if (this.model == null) {\n throw new ValueError('Cannot set the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n this.model.stopTraining = stop;\n }\n get stopTraining() {\n if (this.model == null) {\n throw new ValueError('Cannot get the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n return this.model.stopTraining;\n }\n // TODO(cais): Override get trainableWeights() here\n // tslint:disable-next-line:no-any\n getConfig() {\n // NOTE(cais): We override the return type of getConfig() to `any` here,\n // because the `Sequential` class is a special case among `Container`\n // subtypes in that its getConfig() method returns an Array (not a\n // dict).\n const layers = [];\n for (const layer of this.layers) {\n const dict = {};\n dict['className'] = layer.getClassName();\n dict['config'] = layer.getConfig();\n layers.push(dict);\n }\n return { name: this.name, layers };\n }\n}\n/** @nocollapse */\nSequential.className = 'Sequential';\nserialization.registerClass(Sequential);\n//# sourceMappingURL=models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { CallbackConstructorRegistry } from './base_callbacks';\nimport { Input, } from './engine/input_layer';\nimport { LayersModel } from './engine/training';\nimport { loadLayersModelInternal, Sequential } from './models';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// LayersModel and related factory methods.\n/**\n * A model is a data structure that consists of `Layers` and defines inputs\n * and outputs.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.model` is more generic, supporting an arbitrary graph (without\n * cycles) of layers. `tf.sequential` is less generic and supports only a linear\n * stack of layers.\n *\n * When creating a `tf.LayersModel`, specify its input(s) and output(s). Layers\n * are used to wire input(s) to output(s).\n *\n * For example, the following code snippet defines a model consisting of\n * two `dense` layers, with 10 and 4 units, respectively.\n *\n * ```js\n * // Define input, which has a size of 5 (not including batch dimension).\n * const input = tf.input({shape: [5]});\n *\n * // First dense layer uses relu activation.\n * const denseLayer1 = tf.layers.dense({units: 10, activation: 'relu'});\n * // Second dense layer uses softmax activation.\n * const denseLayer2 = tf.layers.dense({units: 4, activation: 'softmax'});\n *\n * // Obtain the output symbolic tensor by applying the layers on the input.\n * const output = denseLayer2.apply(denseLayer1.apply(input));\n *\n * // Create the model based on the inputs.\n * const model = tf.model({inputs: input, outputs: output});\n *\n * // The model can be used for training, evaluation and prediction.\n * // For example, the following line runs prediction with the model on\n * // some fake data.\n * model.predict(tf.ones([2, 5])).print();\n * ```\n * See also:\n * `tf.sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function model(args) {\n return new LayersModel(args);\n}\n/**\n * Creates a `tf.Sequential` model. A sequential model is any model where the\n * outputs of one layer are the inputs to the next layer, i.e. the model\n * topology is a simple 'stack' of layers, with no branching or skipping.\n *\n * This means that the first layer passed to a `tf.Sequential` model should have\n * a defined input shape. What that means is that it should have received an\n * `inputShape` or `batchInputShape` argument, or for some type of layers\n * (recurrent, Dense...) an `inputDim` argument.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.sequential` is less generic, supporting only a linear stack of layers.\n * `tf.model` is more generic and supports an arbitrary graph (without\n * cycles) of layers.\n *\n * Examples:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have an input shape defined.\n * model.add(tf.layers.dense({units: 32, inputShape: [50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output, which equals\n * // `[null, 4]`. The 1st dimension is the undetermined batch dimension; the\n * // 2nd is the output size of the model's last layer.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * It is also possible to specify a batch size (with potentially undetermined\n * batch dimension, denoted by \"null\") for the first layer using the\n * `batchInputShape` key. The following example is equivalent to the above:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have a defined input shape\n * model.add(tf.layers.dense({units: 32, batchInputShape: [null, 50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * You can also use an `Array` of already-constructed `Layer`s to create\n * a `tf.Sequential` model:\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 32, inputShape: [50]}),\n * tf.layers.dense({units: 4})]\n * });\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function sequential(config) {\n return new Sequential(config);\n}\n/**\n * Load a model composed of Layer objects, including its topology and optionally\n * weights. See the Tutorial named \"How to import a Keras Model\" for usage\n * examples.\n *\n * This method is applicable to:\n *\n * 1. Models created with the `tf.layers.*`, `tf.sequential`, and\n * `tf.model` APIs of TensorFlow.js and later saved with the\n * `tf.LayersModel.save` method.\n * 2. Models converted from Keras or TensorFlow tf.keras using the\n * [tensorflowjs_converter](https://github.com/tensorflow/tfjs/tree/master/tfjs-converter).\n *\n * This mode is *not* applicable to TensorFlow `SavedModel`s or their converted\n * forms. For those models, use `tf.loadGraphModel`.\n *\n * Example 1. Load a model from an HTTP server.\n *\n * ```js\n * const model = await tf.loadLayersModel(\n * 'https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * Example 2: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 4. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. For file://\n * (tfjs-node-only), http:// and https:// schemas, the path can be\n * either absolute or relative.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A function of the signature `(fraction: number) => void',\n * that can be used as the progress callback for the model loading.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport function loadLayersModel(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n return loadLayersModelInternal(pathOrIOHandler, options);\n}\n/**\n * Used to instantiate an input to a model as a `tf.SymbolicTensor`.\n *\n * Users should call the `input` factory function for\n * consistency with other generator functions.\n *\n * Example:\n *\n * ```js\n * // Defines a simple logistic regression model with 32 dimensional input\n * // and 3 dimensional output.\n * const x = tf.input({shape: [32]});\n * const y = tf.layers.dense({units: 3, activation: 'softmax'}).apply(x);\n * const model = tf.model({inputs: x, outputs: y});\n * model.predict(tf.ones([2, 32])).print();\n * ```\n *\n * Note: `input` is only necessary when using `model`. When using\n * `sequential`, specify `inputShape` for the first layer or use `inputLayer`\n * as the first layer.\n *\n * @doc {heading: 'Models', subheading: 'Inputs'}\n */\nexport function input(config) {\n return Input(config);\n}\nexport function registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n CallbackConstructorRegistry.registerCallbackConstructor(verbosityLevel, callbackConstructor);\n}\n//# sourceMappingURL=exports.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// Layer activation functions\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject } from './utils/generic_utils';\n/**\n * Base class for Activations.\n *\n * Special note: due to cross-language compatibility reasons, the\n * static readonly className field in this family of classes must be set to\n * the initialLowerCamelCase name of the activation.\n */\nexport class Activation extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\n/**\n * Exponential linear unit (ELU).\n * Reference: https://arxiv.org/abs/1511.07289\n */\nexport class Elu extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x: Input.\n * @param alpha: Scaling factor the negative section.\n * @return Output of the ELU activation.\n */\n apply(x, alpha = 1) {\n return K.elu(x, alpha);\n }\n}\n/** @nocollapse */\nElu.className = 'elu';\nserialization.registerClass(Elu);\n/**\n * Scaled Exponential Linear Unit. (Klambauer et al., 2017).\n * Reference: Self-Normalizing Neural Networks, https://arxiv.org/abs/1706.02515\n * Notes:\n * - To be used together with the initialization \"lecunNormal\".\n * - To be used together with the dropout variant \"AlphaDropout\".\n */\nexport class Selu extends Activation {\n apply(x) {\n return tfc.selu(x);\n }\n}\n/** @nocollapse */\nSelu.className = 'selu';\nserialization.registerClass(Selu);\n/**\n * Rectified linear unit\n */\nexport class Relu extends Activation {\n apply(x) {\n return tfc.relu(x);\n }\n}\n/** @nocollapse */\nRelu.className = 'relu';\nserialization.registerClass(Relu);\n/**\n * Rectified linear unit activation maxing out at 6.0.\n */\nexport class Relu6 extends Activation {\n apply(x) {\n return tidy(() => tfc.minimum(6.0, tfc.relu(x)));\n }\n}\n/** @nocollapse */\nRelu6.className = 'relu6';\nserialization.registerClass(Relu6);\n//* Linear activation (no-op) */\nexport class Linear extends Activation {\n apply(x) {\n return x;\n }\n}\n/** @nocollapse */\nLinear.className = 'linear';\nserialization.registerClass(Linear);\n/**\n * Sigmoid activation function.\n */\nexport class Sigmoid extends Activation {\n apply(x) {\n return tfc.sigmoid(x);\n }\n}\n/** @nocollapse */\nSigmoid.className = 'sigmoid';\nserialization.registerClass(Sigmoid);\n/**\n * Segment-wise linear approximation of sigmoid.\n */\nexport class HardSigmoid extends Activation {\n apply(x) {\n return K.hardSigmoid(x);\n }\n}\n/** @nocollapse */\nHardSigmoid.className = 'hardSigmoid';\nserialization.registerClass(HardSigmoid);\n/**\n * Softplus activation function.\n */\nexport class Softplus extends Activation {\n apply(x) {\n return tfc.softplus(x);\n }\n}\n/** @nocollapse */\nSoftplus.className = 'softplus';\nserialization.registerClass(Softplus);\n/**\n * Softsign activation function.\n */\nexport class Softsign extends Activation {\n apply(x) {\n return K.softsign(x);\n }\n}\n/** @nocollapse */\nSoftsign.className = 'softsign';\nserialization.registerClass(Softsign);\n/**\n * Hyperbolic tangent function.\n */\nexport class Tanh extends Activation {\n apply(x) {\n return tfc.tanh(x);\n }\n}\n/** @nocollapse */\nTanh.className = 'tanh';\nserialization.registerClass(Tanh);\n/**\n * Softmax activation function\n */\nexport class Softmax extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.softmax(x, axis);\n }\n}\n/** @nocollapse */\nSoftmax.className = 'softmax';\nserialization.registerClass(Softmax);\n/**\n * Log softmax activation function\n */\nexport class LogSoftmax extends Activation {\n /**\n * Calculate the activation function of log softmax:\n * log( exp(x_i) / sum(exp(x)) )\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.logSoftmax(x, axis);\n }\n}\n/** @nocollapse */\nLogSoftmax.className = 'logSoftmax';\nserialization.registerClass(LogSoftmax);\n/**\n * Swish activation function\n */\nexport class Swish extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param alpha Scaling factor for the sigmoid function.\n * @returns a Tensor of the same shape as x\n */\n apply(x, alpha = 1) {\n return tidy(() => tfc.sigmoid(x.mul(alpha)).mul(x));\n }\n}\n/** @nocollapse */\nSwish.className = 'swish';\nserialization.registerClass(Swish);\nexport function serializeActivation(activation) {\n return activation.getClassName();\n}\nexport function deserializeActivation(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'activation');\n}\nexport function getActivation(identifier) {\n if (identifier == null) {\n const config = {};\n config['className'] = 'linear';\n config['config'] = {};\n return deserializeActivation(config);\n }\n if (typeof identifier === 'string') {\n const config = {};\n config['className'] = identifier;\n config['config'] = {};\n return deserializeActivation(config);\n }\n else if (identifier instanceof Activation) {\n return identifier;\n }\n else {\n return deserializeActivation(identifier);\n }\n}\n//# sourceMappingURL=activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* original source: keras/regularizers.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { abs, add, serialization, sum, tidy, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nfunction assertObjectArgs(args) {\n if (args != null && typeof args !== 'object') {\n throw new Error(`Argument to L1L2 regularizer's constructor is expected to be an ` +\n `object, but received: ${args}`);\n }\n}\n/**\n * Regularizer base class.\n */\nexport class Regularizer extends serialization.Serializable {\n}\nexport class L1L2 extends Regularizer {\n constructor(args) {\n super();\n assertObjectArgs(args);\n this.l1 = args == null || args.l1 == null ? 0.01 : args.l1;\n this.l2 = args == null || args.l2 == null ? 0.01 : args.l2;\n this.hasL1 = this.l1 !== 0;\n this.hasL2 = this.l2 !== 0;\n }\n /**\n * Porting note: Renamed from __call__.\n * @param x Variable of which to calculate the regularization score.\n */\n apply(x) {\n return tidy(() => {\n let regularization = zeros([1]);\n if (this.hasL1) {\n regularization = add(regularization, sum(tfc.mul(this.l1, abs(x))));\n }\n if (this.hasL2) {\n regularization =\n add(regularization, sum(tfc.mul(this.l2, K.square(x))));\n }\n return regularization.asScalar();\n });\n }\n getConfig() {\n return { 'l1': this.l1, 'l2': this.l2 };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls({ l1: config['l1'], l2: config['l2'] });\n }\n}\n/** @nocollapse */\nL1L2.className = 'L1L2';\nserialization.registerClass(L1L2);\nexport function l1(args) {\n assertObjectArgs(args);\n return new L1L2({ l1: args != null ? args.l1 : null, l2: 0 });\n}\nexport function l2(args) {\n assertObjectArgs(args);\n return new L1L2({ l2: args != null ? args.l2 : null, l1: 0 });\n}\n// Maps the JavaScript-like identifier keys to the corresponding keras symbols.\nexport const REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'l1l2': 'L1L2'\n};\nexport function serializeRegularizer(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeRegularizer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'regularizer');\n}\nexport function getRegularizer(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeRegularizer(config);\n }\n else if (identifier instanceof Regularizer) {\n return identifier;\n }\n else {\n return deserializeRegularizer(identifier);\n }\n}\n//# sourceMappingURL=regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Advanced activation layers.\n */\nimport { clipByValue, elu, leakyRelu, prelu, relu, serialization } from '@tensorflow/tfjs-core';\nimport { Softmax as softmaxActivation } from '../activations';\nimport { cast } from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class ReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maxValue = args.maxValue;\n }\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n let output = relu(inputs);\n if (this.maxValue != null) {\n output = clipByValue(output, 0, this.maxValue);\n }\n return output;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { maxValue: this.maxValue };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReLU.className = 'ReLU';\nserialization.registerClass(ReLU);\nexport class LeakyReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 0.3;\n if (args == null) {\n args = {};\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return leakyRelu(x, this.alpha);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLeakyReLU.className = 'LeakyReLU';\nserialization.registerClass(LeakyReLU);\nexport class PReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA_INITIALIZER = 'zeros';\n if (args == null) {\n args = {};\n }\n this.supportsMasking = true;\n this.alphaInitializer =\n getInitializer(args.alphaInitializer || this.DEFAULT_ALPHA_INITIALIZER);\n this.alphaRegularizer = getRegularizer(args.alphaRegularizer);\n this.alphaConstraint = getConstraint(args.alphaConstraint);\n if (args.sharedAxes == null) {\n this.sharedAxes = null;\n }\n else if (Array.isArray(args.sharedAxes)) {\n this.sharedAxes = args.sharedAxes;\n }\n else if (typeof args.sharedAxes === 'number') {\n this.sharedAxes = [args.sharedAxes];\n }\n else {\n throw new ValueError(`Expected sharedAxes to be a number or an array of numbers, ` +\n `but got ${args.sharedAxes}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const paramShape = inputShape.slice(1);\n if (this.sharedAxes != null) {\n for (const i of this.sharedAxes) {\n paramShape[i - 1] = 1;\n }\n }\n this.alpha = this.addWeight('alpha', paramShape, 'float32', this.alphaInitializer, this.alphaRegularizer, true, this.alphaConstraint);\n // Set input spec.\n const axes = {};\n if (this.sharedAxes != null) {\n for (let i = 1; i < inputShape.length; ++i) {\n axes[i] = inputShape[i];\n }\n }\n this.inputSpec = [new InputSpec({\n ndim: inputShape.length,\n axes,\n })];\n this.built = true;\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n return prelu(inputs, this.alpha.read());\n }\n getConfig() {\n const config = {\n alphaInitializer: serializeInitializer(this.alphaInitializer),\n alphaRegularizer: serializeRegularizer(this.alphaRegularizer),\n alphaConstraint: serializeConstraint(this.alphaConstraint),\n sharedAxes: this.sharedAxes\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPReLU.className = 'PReLU';\nserialization.registerClass(PReLU);\nexport class ELU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 1.0;\n if (args == null) {\n args = {};\n }\n if (args.alpha != null && args.alpha !== this.DEFAULT_ALPHA) {\n throw new NotImplementedError(`Non-default alpha value (${args.alpha}) is not supported by the ` +\n `ELU layer yet.`);\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return elu(x);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nELU.className = 'ELU';\nserialization.registerClass(ELU);\nexport class ThresholdedReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_THETA = 1.0;\n if (args == null) {\n args = {};\n }\n this.theta = args.theta == null ? this.DEFAULT_THETA : args.theta;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return x.mul(cast(x.greater(this.theta), 'float32'));\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { theta: this.theta };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nThresholdedReLU.className = 'ThresholdedReLU';\nserialization.registerClass(ThresholdedReLU);\nexport class Softmax extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_AXIS = 1.0;\n if (args == null) {\n args = {};\n }\n this.softmax = new softmaxActivation().apply;\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return this.softmax(x, this.axis);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { axis: this.axis };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nSoftmax.className = 'Softmax';\nserialization.registerClass(Softmax);\n//# sourceMappingURL=advanced_activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\nimport { pyListRepeat } from './generic_utils';\nimport { isInteger, max } from './math_utils';\n/**\n * Transforms a single number of array of numbers into an array of numbers.\n * @param value\n * @param n: The size of the tuple to be returned.\n * @param name: Name of the parameter, used for generating error messages.\n * @returns An array of numbers.\n */\nexport function normalizeArray(value, n, name) {\n if (typeof value === 'number') {\n return pyListRepeat(value, n);\n }\n else {\n if (value.length !== n) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n} integers.` +\n ` Received: ${value.length} elements.`);\n }\n for (let i = 0; i < n; ++i) {\n const singleValue = value[i];\n if (!isInteger(singleValue)) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n}` +\n ` integers. Received: ${JSON.stringify(value)} including a` +\n ` non-integer number ${singleValue}`);\n }\n }\n return value;\n }\n}\n/**\n * Determines output length of a convolution given input length.\n * @param inputLength\n * @param filterSize\n * @param padding\n * @param stride\n * @param dilation: dilation rate.\n */\nexport function convOutputLength(inputLength, filterSize, padding, stride, dilation = 1) {\n if (inputLength == null) {\n return inputLength;\n }\n const dilatedFilterSize = filterSize + (filterSize - 1) * (dilation - 1);\n let outputLength;\n if (padding === 'same') {\n outputLength = inputLength;\n }\n else { // VALID\n outputLength = inputLength - dilatedFilterSize + 1;\n }\n return Math.floor((outputLength + stride - 1) / stride);\n}\nexport function deconvLength(dimSize, strideSize, kernelSize, padding) {\n if (dimSize == null) {\n return null;\n }\n if (padding === 'valid') {\n dimSize = dimSize * strideSize + max([kernelSize - strideSize, 0]);\n }\n else if (padding === 'same') {\n dimSize = dimSize * strideSize;\n }\n else {\n throw new ValueError(`Unsupport padding mode: ${padding}.`);\n }\n return dimSize;\n}\n//# sourceMappingURL=conv_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength, deconvLength, normalizeArray } from '../utils/conv_utils';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Transpose and cast the input before the conv2d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv2DInput(x, dataFormat) {\n // TODO(cais): Cast type to float32 if not.\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * Transpose and cast the input before the conv3d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv3DInput(x, dataFormat) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 4, 1]); // NCDHW -> NDHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * 1D-convolution with bias added.\n *\n * Porting Note: This function does not exist in the Python Keras backend.\n * It is exactly the same as `conv2d`, except the added `bias`.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.\n * @param bias Bias, rank-3, of shape `[outDepth]`.\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1dWithBias(x, kernel, bias, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n // Check the ranks of x, kernel and bias.\n if (x.shape.length !== 3) {\n throw new ValueError(`The input of a conv1dWithBias operation should be 3, but is ` +\n `${x.shape.length} instead.`);\n }\n if (kernel.shape.length !== 3) {\n throw new ValueError(`The kernel for a conv1dWithBias operation should be 3, but is ` +\n `${kernel.shape.length} instead`);\n }\n if (bias != null && bias.shape.length !== 1) {\n throw new ValueError(`The bias for a conv1dWithBias operation should be 1, but is ` +\n `${kernel.shape.length} instead`);\n }\n // TODO(cais): Support CAUSAL padding mode.\n if (dataFormat === 'channelsFirst') {\n x = tfc.transpose(x, [0, 2, 1]); // NCW -> NWC.\n }\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n let y = tfc.conv1d(x, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n return y;\n });\n}\n/**\n * 1D-convolution.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.s\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1d(x, kernel, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv1dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 2D pooling.\n */\nexport function conv2d(x, kernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv2dWithBiasActivation(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution with an added bias and optional activation.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv2d`, except the added `bias`.\n */\nexport function conv2dWithBiasActivation(x, kernel, bias, strides = [1, 1], padding = 'valid', dataFormat, dilationRate, activation = null) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 3 && x.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects input to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n if (kernel.rank !== 3 && kernel.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects kernel to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n let y = preprocessConv2DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.fused.conv2d({\n x: y,\n filter: kernel,\n strides: strides,\n pad: padding === 'same' ? 'same' : 'valid',\n dilations: dilationRate,\n dataFormat: 'NHWC',\n bias,\n activation\n });\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\n/**\n * 3D Convolution.\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 3D convolution.\n */\nexport function conv3d(x, kernel, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv3dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 3D Convolution with an added bias.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv3d`, except the added `bias`.\n */\nexport function conv3dWithBias(x, kernel, bias, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 4 && x.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects input to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n if (kernel.rank !== 4 && kernel.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects kernel to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n let y = preprocessConv3DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv3dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.conv3d(y, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NDHWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]);\n }\n return y;\n });\n}\n/**\n * Abstract convolution layer.\n */\nexport class BaseConv extends Layer {\n constructor(rank, args) {\n super(args);\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n BaseConv.verifyArgs(args);\n this.rank = rank;\n generic_utils.assertPositiveInteger(this.rank, 'rank');\n if (this.rank !== 1 && this.rank !== 2 && this.rank !== 3) {\n throw new NotImplementedError(`Convolution layer for rank other than 1, 2, or 3 (${this.rank}) is ` +\n `not implemented yet.`);\n }\n this.kernelSize = normalizeArray(args.kernelSize, rank, 'kernelSize');\n this.strides = normalizeArray(args.strides == null ? 1 : args.strides, rank, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.activation = getActivation(args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.dilationRate = normalizeArray(args.dilationRate == null ? 1 : args.dilationRate, rank, 'dilationRate');\n if (this.rank === 1 &&\n (Array.isArray(this.dilationRate) && this.dilationRate.length !== 1)) {\n throw new ValueError(`dilationRate must be a number or an array of a single number ` +\n `for 1D convolution, but received ` +\n `${JSON.stringify(this.dilationRate)}`);\n }\n else if (this.rank === 2) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate = [this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 2) {\n throw new ValueError(`dilationRate must be a number or array of two numbers for 2D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n else if (this.rank === 3) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate =\n [this.dilationRate, this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 3) {\n throw new ValueError(`dilationRate must be a number or array of three numbers for 3D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n }\n static verifyArgs(args) {\n // Check config.kernelSize type and shape.\n generic_utils.assert('kernelSize' in args, `required key 'kernelSize' not in config`);\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 3)) {\n throw new ValueError(`BaseConv expects config.kernelSize to be number or number[] with ` +\n `length 1, 2, or 3, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n getConfig() {\n const config = {\n kernelSize: this.kernelSize,\n strides: this.strides,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n biasInitializer: serializeInitializer(this.biasInitializer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/**\n * Abstract nD convolution layer. Ancestor of convolution layers which reduce\n * across channels, i.e., Conv1D and Conv2D, but not DepthwiseConv2D.\n */\nexport class Conv extends BaseConv {\n constructor(rank, args) {\n super(rank, args);\n this.kernel = null;\n Conv.verifyArgs(args);\n this.filters = args.filters;\n generic_utils.assertPositiveInteger(this.filters, 'filters');\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([inputDim, this.filters]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.inputSpec = [{ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } }];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs;\n const biasValue = this.bias == null ? null : this.bias.read();\n const fusedActivationName = generic_utils.mapActivationToFusedKernel(this.activation.getClassName());\n if (fusedActivationName != null && this.rank === 2) {\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate, fusedActivationName);\n }\n else {\n if (this.rank === 1) {\n outputs = conv1dWithBias(inputs, this.kernel.read(), biasValue, this.strides[0], this.padding, this.dataFormat, this.dilationRate[0]);\n }\n else if (this.rank === 2) {\n // TODO(cais): Move up to constructor.\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else if (this.rank === 3) {\n outputs = conv3dWithBias(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else {\n throw new NotImplementedError('convolutions greater than 3D are not implemented yet.');\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const newSpace = [];\n const space = (this.dataFormat === 'channelsLast') ?\n inputShape.slice(1, inputShape.length - 1) :\n inputShape.slice(2);\n for (let i = 0; i < space.length; ++i) {\n const newDim = convOutputLength(space[i], this.kernelSize[i], this.padding, this.strides[i], typeof this.dilationRate === 'number' ? this.dilationRate :\n this.dilationRate[i]);\n newSpace.push(newDim);\n }\n let outputShape = [inputShape[0]];\n if (this.dataFormat === 'channelsLast') {\n outputShape = outputShape.concat(newSpace);\n outputShape.push(this.filters);\n }\n else {\n outputShape.push(this.filters);\n outputShape = outputShape.concat(newSpace);\n }\n return outputShape;\n }\n getConfig() {\n const config = {\n filters: this.filters,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n static verifyArgs(args) {\n // Check config.filters type, shape, and value.\n if (!('filters' in args) || typeof args.filters !== 'number' ||\n args.filters < 1) {\n throw new ValueError(`Convolution layer expected config.filters to be a 'number' > 0 ` +\n `but got ${JSON.stringify(args.filters)}`);\n }\n }\n}\nexport class Conv2D extends Conv {\n constructor(args) {\n super(2, args);\n Conv2D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if ((typeof args.kernelSize !== 'number') &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 2)) {\n throw new ValueError(`Conv2D expects config.kernelSize to be number or number[] with ` +\n `length 1 or 2, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv2D.className = 'Conv2D';\nserialization.registerClass(Conv2D);\nexport class Conv3D extends Conv {\n constructor(args) {\n super(3, args);\n Conv3D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number') {\n if (!(Array.isArray(args.kernelSize) &&\n (args.kernelSize.length === 1 || args.kernelSize.length === 3))) {\n throw new ValueError(`Conv3D expects config.kernelSize to be number or` +\n ` [number, number, number], but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n }\n}\n/** @nocollapse */\nConv3D.className = 'Conv3D';\nserialization.registerClass(Conv3D);\nexport class Conv2DTranspose extends Conv2D {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n if (this.padding !== 'same' && this.padding !== 'valid') {\n throw new ValueError(`Conv2DTranspose currently supports only padding modes 'same' ` +\n `and 'valid', but received padding mode ${this.padding}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length !== 4) {\n throw new ValueError('Input should have rank 4; Received input shape: ' +\n JSON.stringify(inputShape));\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError('The channel dimension of the inputs should be defined. ' +\n 'Found `None`.');\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([this.filters, inputDim]);\n this.kernel = this.addWeight('kernel', kernelShape, 'float32', this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n // Set input spec.\n this.inputSpec =\n [new InputSpec({ ndim: 4, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n if (input.shape.length !== 4) {\n throw new ValueError(`Conv2DTranspose.call() expects input tensor to be rank-4, but ` +\n `received a tensor of rank-${input.shape.length}`);\n }\n const inputShape = input.shape;\n const batchSize = inputShape[0];\n let hAxis;\n let wAxis;\n if (this.dataFormat === 'channelsFirst') {\n hAxis = 2;\n wAxis = 3;\n }\n else {\n hAxis = 1;\n wAxis = 2;\n }\n const height = inputShape[hAxis];\n const width = inputShape[wAxis];\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n // Infer the dynamic output shape.\n const outHeight = deconvLength(height, strideH, kernelH, this.padding);\n const outWidth = deconvLength(width, strideW, kernelW, this.padding);\n // Porting Note: We don't branch based on `this.dataFormat` here,\n // because\n // the tjfs-core function `conv2dTranspose` called below always\n // assumes channelsLast.\n const outputShape = [batchSize, outHeight, outWidth, this.filters];\n if (this.dataFormat !== 'channelsLast') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n }\n let outputs = tfc.conv2dTranspose(input, this.kernel.read(), outputShape, this.strides, this.padding);\n if (this.dataFormat !== 'channelsLast') {\n outputs = tfc.transpose(outputs, [0, 3, 1, 2]);\n }\n if (this.bias != null) {\n outputs =\n K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n let channelAxis;\n let heightAxis;\n let widthAxis;\n if (this.dataFormat === 'channelsFirst') {\n channelAxis = 1;\n heightAxis = 2;\n widthAxis = 3;\n }\n else {\n channelAxis = 3;\n heightAxis = 1;\n widthAxis = 2;\n }\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n outputShape[channelAxis] = this.filters;\n outputShape[heightAxis] =\n deconvLength(outputShape[heightAxis], strideH, kernelH, this.padding);\n outputShape[widthAxis] =\n deconvLength(outputShape[widthAxis], strideW, kernelW, this.padding);\n return outputShape;\n }\n getConfig() {\n const config = super.getConfig();\n delete config['dilationRate'];\n return config;\n }\n}\n/** @nocollapse */\nConv2DTranspose.className = 'Conv2DTranspose';\nserialization.registerClass(Conv2DTranspose);\nexport class SeparableConv extends Conv {\n constructor(rank, config) {\n super(rank, config);\n this.DEFAULT_DEPTHWISE_INITIALIZER = 'glorotUniform';\n this.DEFAULT_POINTWISE_INITIALIZER = 'glorotUniform';\n this.depthwiseKernel = null;\n this.pointwiseKernel = null;\n if (config.filters == null) {\n throw new ValueError('The `filters` configuration field is required by SeparableConv, ' +\n 'but is unspecified.');\n }\n if (config.kernelInitializer != null || config.kernelRegularizer != null ||\n config.kernelConstraint != null) {\n throw new ValueError('Fields kernelInitializer, kernelRegularizer and kernelConstraint ' +\n 'are invalid for SeparableConv2D. Use depthwiseInitializer, ' +\n 'depthwiseRegularizer, depthwiseConstraint, pointwiseInitializer, ' +\n 'pointwiseRegularizer and pointwiseConstraint instead.');\n }\n if (config.padding != null && config.padding !== 'same' &&\n config.padding !== 'valid') {\n throw new ValueError(`SeparableConv${this.rank}D supports only padding modes: ` +\n `'same' and 'valid', but received ${JSON.stringify(config.padding)}`);\n }\n this.depthMultiplier =\n config.depthMultiplier == null ? 1 : config.depthMultiplier;\n this.depthwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_DEPTHWISE_INITIALIZER);\n this.depthwiseRegularizer = getRegularizer(config.depthwiseRegularizer);\n this.depthwiseConstraint = getConstraint(config.depthwiseConstraint);\n this.pointwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_POINTWISE_INITIALIZER);\n this.pointwiseRegularizer = getRegularizer(config.pointwiseRegularizer);\n this.pointwiseConstraint = getConstraint(config.pointwiseConstraint);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < this.rank + 2) {\n throw new ValueError(`Inputs to SeparableConv${this.rank}D should have rank ` +\n `${this.rank + 2}, but received input shape: ` +\n `${JSON.stringify(inputShape)}`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError(`The channel dimension of the inputs should be defined, ` +\n `but found ${JSON.stringify(inputShape[channelAxis])}`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = this.kernelSize.concat([inputDim, this.depthMultiplier]);\n const pointwiseKernelShape = [];\n for (let i = 0; i < this.rank; ++i) {\n pointwiseKernelShape.push(1);\n }\n pointwiseKernelShape.push(inputDim * this.depthMultiplier, this.filters);\n const trainable = true;\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, 'float32', this.depthwiseInitializer, this.depthwiseRegularizer, trainable, this.depthwiseConstraint);\n this.pointwiseKernel = this.addWeight('pointwise_kernel', pointwiseKernelShape, 'float32', this.pointwiseInitializer, this.pointwiseRegularizer, trainable, this.pointwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, trainable, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.inputSpec =\n [new InputSpec({ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let output;\n if (this.rank === 1) {\n throw new NotImplementedError('1D separable convolution is not implemented yet.');\n }\n else if (this.rank === 2) {\n if (this.dataFormat === 'channelsFirst') {\n inputs = tfc.transpose(inputs, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n output = tfc.separableConv2d(inputs, this.depthwiseKernel.read(), this.pointwiseKernel.read(), this.strides, this.padding, this.dilationRate, 'NHWC');\n }\n if (this.useBias) {\n output = K.biasAdd(output, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n if (this.dataFormat === 'channelsFirst') {\n output = tfc.transpose(output, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return output;\n });\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['kernelInitializer'];\n delete config['kernelRegularizer'];\n delete config['kernelConstraint'];\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['pointwiseInitializer'] =\n serializeInitializer(this.pointwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['pointwiseRegularizer'] =\n serializeRegularizer(this.pointwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseConstraint);\n config['pointwiseConstraint'] =\n serializeConstraint(this.pointwiseConstraint);\n return config;\n }\n}\n/** @nocollapse */\nSeparableConv.className = 'SeparableConv';\nexport class SeparableConv2D extends SeparableConv {\n constructor(args) {\n super(2, args);\n }\n}\n/** @nocollapse */\nSeparableConv2D.className = 'SeparableConv2D';\nserialization.registerClass(SeparableConv2D);\nexport class Conv1D extends Conv {\n constructor(args) {\n super(1, args);\n Conv1D.verifyArgs(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['dataFormat'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 1)) {\n throw new ValueError(`Conv1D expects config.kernelSize to be number or number[] with ` +\n `length 1, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv1D.className = 'Conv1D';\nserialization.registerClass(Conv1D);\nexport class Cropping2D extends Layer {\n constructor(args) {\n super(args);\n if (typeof args.cropping === 'number') {\n this.cropping =\n [[args.cropping, args.cropping], [args.cropping, args.cropping]];\n }\n else if (typeof args.cropping[0] === 'number') {\n this.cropping = [\n [args.cropping[0], args.cropping[0]],\n [args.cropping[1], args.cropping[1]]\n ];\n }\n else {\n this.cropping = args.cropping;\n }\n this.dataFormat =\n args.dataFormat === undefined ? 'channelsLast' : args.dataFormat;\n this.inputSpec = [{ ndim: 4 }];\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n return [\n inputShape[0], inputShape[1],\n inputShape[2] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[3] - this.cropping[1][0] - this.cropping[1][1]\n ];\n }\n else {\n return [\n inputShape[0],\n inputShape[1] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[2] - this.cropping[1][0] - this.cropping[1][1], inputShape[3]\n ];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[1] - this.cropping[0][0] - this.cropping[0][1], 2);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[2] - this.cropping[1][1] - this.cropping[1][0], 3);\n }\n else {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[2] - this.cropping[0][0] - this.cropping[0][1], 3);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[3] - this.cropping[1][1] - this.cropping[1][0], 4);\n }\n });\n }\n getConfig() {\n const config = { cropping: this.cropping, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nCropping2D.className = 'Cropping2D';\nserialization.registerClass(Cropping2D);\nexport class UpSampling2D extends Layer {\n constructor(args) {\n super(args);\n this.DEFAULT_SIZE = [2, 2];\n this.inputSpec = [{ ndim: 4 }];\n this.size = args.size == null ? this.DEFAULT_SIZE : args.size;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n const height = inputShape[2] == null ? null : this.size[0] * inputShape[2];\n const width = inputShape[3] == null ? null : this.size[1] * inputShape[3];\n return [inputShape[0], inputShape[1], height, width];\n }\n else {\n const height = inputShape[1] == null ? null : this.size[0] * inputShape[1];\n const width = inputShape[2] == null ? null : this.size[1] * inputShape[2];\n return [inputShape[0], height, width, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n if (this.dataFormat === 'channelsFirst') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n const height = this.size[0] * inputShape[2];\n const width = this.size[1] * inputShape[3];\n const resized = input.resizeNearestNeighbor([height, width]);\n return tfc.transpose(resized, [0, 3, 1, 2]);\n }\n else {\n const height = this.size[0] * inputShape[1];\n const width = this.size[1] * inputShape[2];\n return input.resizeNearestNeighbor([height, width]);\n }\n });\n }\n getConfig() {\n const config = { size: this.size, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nUpSampling2D.className = 'UpSampling2D';\nserialization.registerClass(UpSampling2D);\n//# sourceMappingURL=convolutional.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Depthwise Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { BaseConv, preprocessConv2DInput } from './convolutional';\n/**\n * 2D convolution with separable filters.\n * @param x Input tensor.\n * @param depthwiseKernel Convolution kernel for depthwise convolution.\n * @param strides Strides (Array of two integers).\n * @param padding Padding model.\n * @param dataFormat Data format.\n * @param dilationRate Array of two integers, dilation rates for the separable\n * convolution.\n * @returns Output tensor.\n * @throws ValueError If depthwiseKernel is not a 4D array.\n */\nexport function depthwiseConv2d(x, depthwiseKernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n let y = preprocessConv2DInput(x, dataFormat);\n if (x.rank !== 4) {\n throw new ValueError(`Input for depthwiseConv2d is required to be 4-D, but is instead ` +\n `${x.rank}-D`);\n }\n if (depthwiseKernel.rank !== 4) {\n throw new ValueError(`depthwiseKernel is required to be 4-D, but is instead ` +\n `${depthwiseKernel.rank}-D`);\n }\n y = tfc.depthwiseConv2d(y, depthwiseKernel, strides, padding === 'same' ? 'same' : 'valid', 'NHWC', dilationRate);\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\nexport class DepthwiseConv2D extends BaseConv {\n constructor(args) {\n super(2, args);\n this.depthwiseKernel = null;\n this.depthMultiplier =\n args.depthMultiplier == null ? 1 : args.depthMultiplier;\n this.depthwiseInitializer = getInitializer(args.depthwiseInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.depthwiseConstraint = getConstraint(args.depthwiseConstraint);\n this.depthwiseRegularizer = getRegularizer(args.depthwiseRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 4) {\n throw new ValueError(`Inputs to DepthwiseConv2D should have rank 4. ` +\n `Received input shape: ${JSON.stringify(inputShape)}.`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : 3;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError('The channel dimension of the inputs to DepthwiseConv2D should ' +\n `be defined, but is not (${inputShape[channelAxis]}).`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = [\n this.kernelSize[0], this.kernelSize[1], inputDim, this.depthMultiplier\n ];\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, null, this.depthwiseInitializer, this.depthwiseRegularizer, true, this.depthwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [inputDim * this.depthMultiplier], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs = depthwiseConv2d(inputs, this.depthwiseKernel.read(), this.strides, this.padding, this.dataFormat, null);\n // TODO(cais): Add support for dilation.\n if (this.useBias) {\n outputs = K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n const cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n const outFilters = this.dataFormat === 'channelsFirst' ?\n inputShape[1] * this.depthMultiplier :\n inputShape[3] * this.depthMultiplier;\n const outRows = convOutputLength(rows, this.kernelSize[0], this.padding, this.strides[0]);\n const outCols = convOutputLength(cols, this.kernelSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], outFilters, outRows, outCols];\n }\n else {\n // In this case, assume 'channelsLast'.\n return [inputShape[0], outRows, outCols, outFilters];\n }\n }\n getConfig() {\n const config = super.getConfig();\n config['depthMultiplier'] = this.depthMultiplier;\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseRegularizer);\n return config;\n }\n}\n/** @nocollapse */\nDepthwiseConv2D.className = 'DepthwiseConv2D';\nserialization.registerClass(DepthwiseConv2D);\n//# sourceMappingURL=convolutional_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Recurrent Neural Network Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, SymbolicTensor } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, Initializer, Ones, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor, isArrayOfShapes } from '../utils/types_utils';\nimport { batchGetValue, batchSetValue } from '../variables';\nimport { deserialize } from './serialization';\n/**\n * Standardize `apply()` args to a single list of tensor inputs.\n *\n * When running a model loaded from file, the input tensors `initialState` and\n * `constants` are passed to `RNN.apply()` as part of `inputs` instead of the\n * dedicated kwargs fields. `inputs` consists of\n * `[inputs, initialState0, initialState1, ..., constant0, constant1]` in this\n * case.\n * This method makes sure that arguments are\n * separated and that `initialState` and `constants` are `Array`s of tensors\n * (or None).\n *\n * @param inputs Tensor or `Array` of tensors.\n * @param initialState Tensor or `Array` of tensors or `null`/`undefined`.\n * @param constants Tensor or `Array` of tensors or `null`/`undefined`.\n * @returns An object consisting of\n * inputs: A tensor.\n * initialState: `Array` of tensors or `null`.\n * constants: `Array` of tensors or `null`.\n * @throws ValueError, if `inputs` is an `Array` but either `initialState` or\n * `constants` is provided.\n */\nexport function standardizeArgs(inputs, initialState, constants, numConstants) {\n if (Array.isArray(inputs)) {\n if (initialState != null || constants != null) {\n throw new ValueError('When inputs is an array, neither initialState or constants ' +\n 'should be provided');\n }\n if (numConstants != null) {\n constants = inputs.slice(inputs.length - numConstants, inputs.length);\n inputs = inputs.slice(0, inputs.length - numConstants);\n }\n if (inputs.length > 1) {\n initialState = inputs.slice(1, inputs.length);\n }\n inputs = inputs[0];\n }\n function toListOrNull(x) {\n if (x == null || Array.isArray(x)) {\n return x;\n }\n else {\n return [x];\n }\n }\n initialState = toListOrNull(initialState);\n constants = toListOrNull(constants);\n return { inputs, initialState, constants };\n}\n/**\n * Iterates over the time dimension of a tensor.\n *\n * @param stepFunction RNN step function.\n * Parameters:\n * inputs: tensor with shape `[samples, ...]` (no time dimension),\n * representing input for the batch of samples at a certain time step.\n * states: an Array of tensors.\n * Returns:\n * outputs: tensor with shape `[samples, outputDim]` (no time dimension).\n * newStates: list of tensors, same length and shapes as `states`. The first\n * state in the list must be the output tensor at the previous timestep.\n * @param inputs Tensor of temporal data of shape `[samples, time, ...]` (at\n * least 3D).\n * @param initialStates Tensor with shape `[samples, outputDim]` (no time\n * dimension), containing the initial values of the states used in the step\n * function.\n * @param goBackwards If `true`, do the iteration over the time dimension in\n * reverse order and return the reversed sequence.\n * @param mask Binary tensor with shape `[sample, time, 1]`, with a zero for\n * every element that is masked.\n * @param constants An Array of constant values passed at each step.\n * @param unroll Whether to unroll the RNN or to use a symbolic loop. *Not*\n * applicable to this imperative deeplearn.js backend. Its value is ignored.\n * @param needPerStepOutputs Whether the per-step outputs are to be\n * concatenated into a single tensor and returned (as the second return\n * value). Default: `false`. This arg is included so that the relatively\n * expensive concatenation of the stepwise outputs can be omitted unless\n * the stepwise outputs need to be kept (e.g., for an LSTM layer of which\n * `returnSequence` is `true`.)\n * @returns An Array: `[lastOutput, outputs, newStates]`.\n * lastOutput: the lastest output of the RNN, of shape `[samples, ...]`.\n * outputs: tensor with shape `[samples, time, ...]` where each entry\n * `output[s, t]` is the output of the step function at time `t` for sample\n * `s`. This return value is provided if and only if the\n * `needPerStepOutputs` is set as `true`. If it is set as `false`, this\n * return value will be `undefined`.\n * newStates: Array of tensors, latest states returned by the step function,\n * of shape `(samples, ...)`.\n * @throws ValueError If input dimension is less than 3.\n *\n * TODO(nielsene): This needs to be tidy-ed.\n */\nexport function rnn(stepFunction, inputs, initialStates, goBackwards = false, mask, constants, unroll = false, needPerStepOutputs = false) {\n return tfc.tidy(() => {\n const ndim = inputs.shape.length;\n if (ndim < 3) {\n throw new ValueError(`Input should be at least 3D, but is ${ndim}D.`);\n }\n // Transpose to time-major, i.e., from [batch, time, ...] to [time, batch,\n // ...].\n const axes = [1, 0].concat(math_utils.range(2, ndim));\n inputs = tfc.transpose(inputs, axes);\n if (constants != null) {\n throw new NotImplementedError('The rnn() functoin of the deeplearn.js backend does not support ' +\n 'constants yet.');\n }\n // Porting Note: the unroll option is ignored by the imperative backend.\n if (unroll) {\n console.warn('Backend rnn(): the unroll = true option is not applicable to the ' +\n 'imperative deeplearn.js backend.');\n }\n if (mask != null) {\n mask = mask.asType('bool').asType('float32');\n if (mask.rank === ndim - 1) {\n mask = tfc.expandDims(mask, -1);\n }\n mask = tfc.transpose(mask, axes);\n }\n if (goBackwards) {\n inputs = tfc.reverse(inputs, 0);\n if (mask != null) {\n mask = tfc.reverse(mask, 0);\n }\n }\n // Porting Note: PyKeras with TensorFlow backend uses a symbolic loop\n // (tf.while_loop). But for the imperative deeplearn.js backend, we just\n // use the usual TypeScript control flow to iterate over the time steps in\n // the inputs.\n // Porting Note: PyKeras patches a \"_use_learning_phase\" attribute to\n // outputs.\n // This is not idiomatic in TypeScript. The info regarding whether we are\n // in a learning (i.e., training) phase for RNN is passed in a different\n // way.\n const perStepOutputs = [];\n let lastOutput;\n let states = initialStates;\n const timeSteps = inputs.shape[0];\n const perStepInputs = tfc.unstack(inputs);\n let perStepMasks;\n if (mask != null) {\n perStepMasks = tfc.unstack(mask);\n }\n for (let t = 0; t < timeSteps; ++t) {\n const currentInput = perStepInputs[t];\n const stepOutputs = tfc.tidy(() => stepFunction(currentInput, states));\n if (mask == null) {\n lastOutput = stepOutputs[0];\n states = stepOutputs[1];\n }\n else {\n const maskedOutputs = tfc.tidy(() => {\n const stepMask = perStepMasks[t];\n const negStepMask = tfc.onesLike(stepMask).sub(stepMask);\n // TODO(cais): Would tfc.where() be better for performance?\n const output = stepOutputs[0].mul(stepMask).add(states[0].mul(negStepMask));\n const newStates = states.map((state, i) => {\n return stepOutputs[1][i].mul(stepMask).add(state.mul(negStepMask));\n });\n return { output, newStates };\n });\n lastOutput = maskedOutputs.output;\n states = maskedOutputs.newStates;\n }\n if (needPerStepOutputs) {\n perStepOutputs.push(lastOutput);\n }\n }\n let outputs;\n if (needPerStepOutputs) {\n const axis = 1;\n outputs = tfc.stack(perStepOutputs, axis);\n }\n return [lastOutput, outputs, states];\n });\n}\nexport class RNN extends Layer {\n constructor(args) {\n super(args);\n let cell;\n if (args.cell == null) {\n throw new ValueError('cell property is missing for the constructor of RNN.');\n }\n else if (Array.isArray(args.cell)) {\n cell = new StackedRNNCells({ cells: args.cell });\n }\n else {\n cell = args.cell;\n }\n if (cell.stateSize == null) {\n throw new ValueError('The RNN cell should have an attribute `stateSize` (tuple of ' +\n 'integers, one integer per RNN state).');\n }\n this.cell = cell;\n this.returnSequences =\n args.returnSequences == null ? false : args.returnSequences;\n this.returnState = args.returnState == null ? false : args.returnState;\n this.goBackwards = args.goBackwards == null ? false : args.goBackwards;\n this._stateful = args.stateful == null ? false : args.stateful;\n this.unroll = args.unroll == null ? false : args.unroll;\n this.supportsMasking = true;\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n this.stateSpec = null;\n this.states_ = null;\n // TODO(cais): Add constantsSpec and numConstants.\n this.numConstants = null;\n // TODO(cais): Look into the use of initial_state in the kwargs of the\n // constructor.\n this.keptStates = [];\n }\n // Porting Note: This is the equivalent of `RNN.states` property getter in\n // PyKeras.\n getStates() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n return math_utils.range(0, numStates).map(x => null);\n }\n else {\n return this.states_;\n }\n }\n // Porting Note: This is the equivalent of the `RNN.states` property setter in\n // PyKeras.\n setStates(states) {\n this.states_ = states;\n }\n computeOutputShape(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n // TODO(cais): Remove the casting once stacked RNN cells become supported.\n let stateSize = this.cell.stateSize;\n if (!Array.isArray(stateSize)) {\n stateSize = [stateSize];\n }\n const outputDim = stateSize[0];\n let outputShape;\n if (this.returnSequences) {\n outputShape = [inputShape[0], inputShape[1], outputDim];\n }\n else {\n outputShape = [inputShape[0], outputDim];\n }\n if (this.returnState) {\n const stateShape = [];\n for (const dim of stateSize) {\n stateShape.push([inputShape[0], dim]);\n }\n return [outputShape].concat(stateShape);\n }\n else {\n return outputShape;\n }\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n const outputMask = this.returnSequences ? mask : null;\n if (this.returnState) {\n const stateMask = this.states.map(s => null);\n return [outputMask].concat(stateMask);\n }\n else {\n return outputMask;\n }\n });\n }\n /**\n * Get the current state tensors of the RNN.\n *\n * If the state hasn't been set, return an array of `null`s of the correct\n * length.\n */\n get states() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n const output = [];\n for (let i = 0; i < numStates; ++i) {\n output.push(null);\n }\n return output;\n }\n else {\n return this.states_;\n }\n }\n set states(s) {\n this.states_ = s;\n }\n build(inputShape) {\n // Note inputShape will be an Array of Shapes of initial states and\n // constants if these are passed in apply().\n const constantShape = null;\n if (this.numConstants != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n const batchSize = this.stateful ? inputShape[0] : null;\n const inputDim = inputShape.slice(2);\n this.inputSpec[0] = new InputSpec({ shape: [batchSize, null, ...inputDim] });\n // Allow cell (if RNNCell Layer) to build before we set or validate\n // stateSpec.\n const stepInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (constantShape != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n else {\n this.cell.build(stepInputShape);\n }\n // Set or validate stateSpec.\n let stateSize;\n if (Array.isArray(this.cell.stateSize)) {\n stateSize = this.cell.stateSize;\n }\n else {\n stateSize = [this.cell.stateSize];\n }\n if (this.stateSpec != null) {\n if (!util.arraysEqual(this.stateSpec.map(spec => spec.shape[spec.shape.length - 1]), stateSize)) {\n throw new ValueError(`An initialState was passed that is not compatible with ` +\n `cell.stateSize. Received stateSpec=${this.stateSpec}; ` +\n `However cell.stateSize is ${this.cell.stateSize}`);\n }\n }\n else {\n this.stateSpec =\n stateSize.map(dim => new InputSpec({ shape: [null, dim] }));\n }\n if (this.stateful) {\n this.resetStates();\n }\n }\n /**\n * Reset the state tensors of the RNN.\n *\n * If the `states` argument is `undefined` or `null`, will set the\n * state tensor(s) of the RNN to all-zero tensors of the appropriate\n * shape(s).\n *\n * If `states` is provided, will set the state tensors of the RNN to its\n * value.\n *\n * @param states Optional externally-provided initial states.\n * @param training Whether this call is done during training. For stateful\n * RNNs, this affects whether the old states are kept or discarded. In\n * particular, if `training` is `true`, the old states will be kept so\n * that subsequent backpropgataion through time (BPTT) may work properly.\n * Else, the old states will be discarded.\n */\n resetStates(states, training = false) {\n tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const batchSize = this.inputSpec[0].shape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.states_ == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_ = [tfc.zeros([batchSize, this.cell.stateSize])];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_[0] = tfc.zeros([batchSize, this.cell.stateSize]);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training === true) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const dim = Array.isArray(this.cell.stateSize) ?\n this.cell.stateSize[index] :\n this.cell.stateSize;\n const expectedShape = [batchSize, dim];\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n apply(inputs, kwargs) {\n // TODO(cais): Figure out whether initialState is in kwargs or inputs.\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n // If any of `initial_state` or `constants` are specified and are\n // `tf.SymbolicTensor`s, then add them to the inputs and temporarily modify\n // the input_spec to include them.\n let additionalInputs = [];\n let additionalSpecs = [];\n if (initialState != null) {\n kwargs['initialState'] = initialState;\n additionalInputs = additionalInputs.concat(initialState);\n this.stateSpec = [];\n for (const state of initialState) {\n this.stateSpec.push(new InputSpec({ shape: state.shape }));\n }\n // TODO(cais): Use the following instead.\n // this.stateSpec = initialState.map(state => new InputSpec({shape:\n // state.shape}));\n additionalSpecs = additionalSpecs.concat(this.stateSpec);\n }\n if (constants != null) {\n kwargs['constants'] = constants;\n additionalInputs = additionalInputs.concat(constants);\n // TODO(cais): Add this.constantsSpec.\n this.numConstants = constants.length;\n }\n const isTensor = additionalInputs[0] instanceof SymbolicTensor;\n if (isTensor) {\n // Compute full input spec, including state and constants.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call with temporarily replaced inputSpec.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n // tslint:disable-next-line:no-any\n call(inputs, kwargs) {\n // Input shape: `[samples, time (padded with zeros), input_dim]`.\n // Note that the .build() method of subclasses **must** define\n // this.inputSpec and this.stateSpec owith complete input shapes.\n return tidy(() => {\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n let initialState = kwargs == null ? null : kwargs['initialState'];\n inputs = getExactlyOneTensor(inputs);\n if (initialState == null) {\n if (this.stateful) {\n initialState = this.states_;\n }\n else {\n initialState = this.getInitialState(inputs);\n }\n }\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n if (initialState.length !== numStates) {\n throw new ValueError(`RNN Layer has ${numStates} state(s) but was passed ` +\n `${initialState.length} initial state(s).`);\n }\n if (this.unroll) {\n console.warn('Ignoring unroll = true for RNN layer, due to imperative backend.');\n }\n const cellCallKwargs = { training };\n // TODO(cais): Add support for constants.\n const step = (inputs, states) => {\n // `inputs` and `states` are concatenated to form a single `Array` of\n // `tf.Tensor`s as the input to `cell.call()`.\n const outputs = this.cell.call([inputs].concat(states), cellCallKwargs);\n // Marshall the return value into output and new states.\n return [outputs[0], outputs.slice(1)];\n };\n // TODO(cais): Add support for constants.\n const rnnOutputs = rnn(step, inputs, initialState, this.goBackwards, mask, null, this.unroll, this.returnSequences);\n const lastOutput = rnnOutputs[0];\n const outputs = rnnOutputs[1];\n const states = rnnOutputs[2];\n if (this.stateful) {\n this.resetStates(states, training);\n }\n const output = this.returnSequences ? outputs : lastOutput;\n // TODO(cais): Porperty set learning phase flag.\n if (this.returnState) {\n return [output].concat(states);\n }\n else {\n return output;\n }\n });\n }\n getInitialState(inputs) {\n return tidy(() => {\n // Build an all-zero tensor of shape [samples, outputDim].\n // [Samples, timeSteps, inputDim].\n let initialState = tfc.zeros(inputs.shape);\n // [Samples].\n initialState = tfc.sum(initialState, [1, 2]);\n initialState = K.expandDims(initialState); // [Samples, 1].\n if (Array.isArray(this.cell.stateSize)) {\n return this.cell.stateSize.map(dim => dim > 1 ? K.tile(initialState, [1, dim]) : initialState);\n }\n else {\n return this.cell.stateSize > 1 ?\n [K.tile(initialState, [1, this.cell.stateSize])] :\n [initialState];\n }\n });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n return this.cell.trainableWeights;\n }\n get nonTrainableWeights() {\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n if (!this.trainable) {\n return this.cell.weights;\n }\n return this.cell.nonTrainableWeights;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.cell != null) {\n this.cell.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n returnSequences: this.returnSequences,\n returnState: this.returnState,\n goBackwards: this.goBackwards,\n stateful: this.stateful,\n unroll: this.unroll,\n };\n if (this.numConstants != null) {\n config['numConstants'] = this.numConstants;\n }\n const cellConfig = this.cell.getConfig();\n if (this.getClassName() === RNN.className) {\n config['cell'] = {\n 'className': this.cell.getClassName(),\n 'config': cellConfig,\n };\n }\n // this order is necessary, to prevent cell name from replacing layer name\n return Object.assign({}, cellConfig, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cellConfig = config['cell'];\n const cell = deserialize(cellConfig, customObjects);\n return new cls(Object.assign(config, { cell }));\n }\n}\n/** @nocollapse */\nRNN.className = 'RNN';\nserialization.registerClass(RNN);\n// Porting Note: This is a common parent class for RNN cells. There is no\n// equivalent of this in PyKeras. Having a common parent class forgoes the\n// need for `has_attr(cell, ...)` checks or its TypeScript equivalent.\n/**\n * An RNNCell layer.\n *\n * @doc {heading: 'Layers', subheading: 'Classes'}\n */\nexport class RNNCell extends Layer {\n}\nexport class SimpleRNNCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, `units`);\n this.activation = getActivation(args.activation == null ? this.DEFAULT_ACTIVATION : args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n // TODO(cais): Use regularizer.\n this.kernel = this.addWeight('kernel', [inputShape[inputShape.length - 1], this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n // Porting Note: PyKeras' equivalent of this method takes two tensor inputs:\n // `inputs` and `states`. Here, the two tensors are combined into an\n // `Tensor[]` Array as the first input argument.\n // Similarly, PyKeras' equivalent of this method returns two values:\n // `output` and `[output]`. Here the two are combined into one length-2\n // `Tensor[]`, consisting of `output` repeated.\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`SimpleRNNCell expects 2 input Tensors, got ${inputs.length}.`);\n }\n let prevOutput = inputs[1];\n inputs = inputs[0];\n const training = kwargs['training'] == null ? false : kwargs['training'];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(prevOutput),\n rate: this.recurrentDropout,\n training\n });\n }\n let h;\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n if (dpMask != null) {\n h = K.dot(tfc.mul(inputs, dpMask), this.kernel.read());\n }\n else {\n h = K.dot(inputs, this.kernel.read());\n }\n if (this.bias != null) {\n h = K.biasAdd(h, this.bias.read());\n }\n if (recDpMask != null) {\n prevOutput = tfc.mul(prevOutput, recDpMask);\n }\n let output = tfc.add(h, K.dot(prevOutput, this.recurrentKernel.read()));\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n // TODO(cais): Properly set learning phase on output tensor?\n return [output, output];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nSimpleRNNCell.className = 'SimpleRNNCell';\nserialization.registerClass(SimpleRNNCell);\nexport class SimpleRNN extends RNN {\n constructor(args) {\n args.cell = new SimpleRNNCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nSimpleRNN.className = 'SimpleRNN';\nserialization.registerClass(SimpleRNN);\nexport class GRUCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.resetAfter) {\n throw new ValueError(`GRUCell does not support reset_after parameter set to true.`);\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 3], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 3], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units * 3], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`GRUCell expects 2 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] == null ? false : kwargs['training'];\n let hTMinus1 = inputs[1]; // Previous memory state.\n inputs = inputs[0];\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2, regardless of the actual value of\n // config.implementation.\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 3\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 3\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n let z;\n let r;\n let hh;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let matrixX = K.dot(inputs, this.kernel.read());\n if (this.useBias) {\n matrixX = K.biasAdd(matrixX, this.bias.read());\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n const recurrentKernelValue = this.recurrentKernel.read();\n const [rk1, rk2] = tfc.split(recurrentKernelValue, [2 * this.units, this.units], recurrentKernelValue.rank - 1);\n const matrixInner = K.dot(hTMinus1, rk1);\n const [xZ, xR, xH] = tfc.split(matrixX, 3, matrixX.rank - 1);\n const [recurrentZ, recurrentR] = tfc.split(matrixInner, 2, matrixInner.rank - 1);\n z = this.recurrentActivation.apply(tfc.add(xZ, recurrentZ));\n r = this.recurrentActivation.apply(tfc.add(xR, recurrentR));\n const recurrentH = K.dot(tfc.mul(r, hTMinus1), rk2);\n hh = this.activation.apply(tfc.add(xH, recurrentH));\n const h = tfc.add(tfc.mul(z, hTMinus1), tfc.mul(tfc.add(1, tfc.neg(z)), hh));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n resetAfter: false\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nGRUCell.className = 'GRUCell';\nserialization.registerClass(GRUCell);\nexport class GRU extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new GRUCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nGRU.className = 'GRU';\nserialization.registerClass(GRU);\nexport class LSTMCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.unitForgetBias = args.unitForgetBias;\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = [this.units, this.units];\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 4], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 4], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n let biasInitializer;\n if (this.useBias) {\n if (this.unitForgetBias) {\n const capturedBiasInit = this.biasInitializer;\n const capturedUnits = this.units;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n // TODO(cais): More informative variable names?\n const bI = capturedBiasInit.apply([capturedUnits]);\n const bF = (new Ones()).apply([capturedUnits]);\n const bCAndH = capturedBiasInit.apply([capturedUnits * 2]);\n return K.concatAlongFirstAxis(K.concatAlongFirstAxis(bI, bF), bCAndH);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.units * 4], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n inputs = inputs;\n if (inputs.length !== 3) {\n throw new ValueError(`LSTMCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n let hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n inputs = inputs[0];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 4\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 4\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2 regardless of the actual value of\n // config.implementation.\n let i;\n let f;\n let c;\n let o;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let z = K.dot(inputs, this.kernel.read());\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n z = tfc.add(z, K.dot(hTMinus1, this.recurrentKernel.read()));\n if (this.useBias) {\n z = K.biasAdd(z, this.bias.read());\n }\n const [z0, z1, z2, z3] = tfc.split(z, 4, z.rank - 1);\n i = this.recurrentActivation.apply(z0);\n f = this.recurrentActivation.apply(z1);\n c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(z2)));\n o = this.recurrentActivation.apply(z3);\n const h = tfc.mul(o, this.activation.apply(c));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h, c];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n unitForgetBias: this.unitForgetBias,\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nLSTMCell.className = 'LSTMCell';\nserialization.registerClass(LSTMCell);\nexport class LSTM extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new LSTMCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nLSTM.className = 'LSTM';\nserialization.registerClass(LSTM);\nexport class StackedRNNCells extends RNNCell {\n constructor(args) {\n super(args);\n this.cells = args.cells;\n }\n get stateSize() {\n // States are a flat list in reverse order of the cell stack.\n // This allows perserving the requirement `stack.statesize[0] ===\n // outputDim`. E.g., states of a 2-layer LSTM would be `[h2, c2, h1, c1]`,\n // assuming one LSTM has states `[h, c]`.\n const stateSize = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n stateSize.push(...cell.stateSize);\n }\n else {\n stateSize.push(cell.stateSize);\n }\n }\n return stateSize;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n let states = inputs.slice(1);\n // Recover per-cell states.\n const nestedStates = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n nestedStates.push(states.splice(0, cell.stateSize.length));\n }\n else {\n nestedStates.push(states.splice(0, 1));\n }\n }\n nestedStates.reverse();\n // Call the cells in order and store the returned states.\n const newNestedStates = [];\n let callInputs;\n for (let i = 0; i < this.cells.length; ++i) {\n const cell = this.cells[i];\n states = nestedStates[i];\n // TODO(cais): Take care of constants.\n if (i === 0) {\n callInputs = [inputs[0]].concat(states);\n }\n else {\n callInputs = [callInputs[0]].concat(states);\n }\n callInputs = cell.call(callInputs, kwargs);\n newNestedStates.push(callInputs.slice(1));\n }\n // Format the new states as a flat list in reverse cell order.\n states = [];\n for (const cellStates of newNestedStates.slice().reverse()) {\n states.push(...cellStates);\n }\n return [callInputs[0]].concat(states);\n });\n }\n build(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n // TODO(cais): Take care of input constants.\n // const constantShape = inputShape.slice(1);\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n let outputDim;\n this.cells.forEach((cell, i) => {\n nameScope(`RNNCell_${i}`, () => {\n // TODO(cais): Take care of input constants.\n cell.build(inputShape);\n if (Array.isArray(cell.stateSize)) {\n outputDim = cell.stateSize[0];\n }\n else {\n outputDim = cell.stateSize;\n }\n inputShape = [inputShape[0], outputDim];\n });\n });\n this.built = true;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const getCellConfig = (cell) => {\n return {\n 'className': cell.getClassName(),\n 'config': cell.getConfig(),\n };\n };\n const cellConfigs = this.cells.map(getCellConfig);\n const config = { 'cells': cellConfigs };\n return Object.assign({}, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cells = [];\n for (const cellConfig of config['cells']) {\n cells.push(deserialize(cellConfig, customObjects));\n }\n return new cls({ cells });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const cell of this.cells) {\n trainableWeights.push(...cell.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n /**\n * Retrieve the weights of a the model.\n *\n * @returns A flat `Array` of `tf.Tensor`s.\n */\n getWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.weights);\n }\n return batchGetValue(weights);\n }\n /**\n * Set the weights of the model.\n *\n * @param weights An `Array` of `tf.Tensor`s with shapes and types matching\n * the output of `getWeights()`.\n */\n setWeights(weights) {\n const tuples = [];\n for (const cell of this.cells) {\n const numParams = cell.weights.length;\n const inputWeights = weights.splice(numParams);\n for (let i = 0; i < cell.weights.length; ++i) {\n tuples.push([cell.weights[i], inputWeights[i]]);\n }\n }\n batchSetValue(tuples);\n }\n}\n/** @nocollapse */\nStackedRNNCells.className = 'StackedRNNCells';\nserialization.registerClass(StackedRNNCells);\nexport function generateDropoutMask(args) {\n const { ones, rate, training = false, count = 1 } = args;\n const droppedInputs = () => K.dropout(ones(), rate);\n const createMask = () => K.inTrainPhase(droppedInputs, ones, training);\n // just in case count is provided with null or undefined\n if (!count || count <= 1) {\n return tfc.keep(createMask().clone());\n }\n const masks = Array(count).fill(undefined).map(createMask);\n return masks.map(m => tfc.keep(m.clone()));\n}\n//# sourceMappingURL=recurrent.js.map", "/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { Initializer } from '../initializers';\nimport { convOutputLength, normalizeArray } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\nimport { generateDropoutMask, LSTMCell, RNN, RNNCell } from './recurrent';\nclass ConvRNN2DCell extends RNNCell {\n}\n/**\n * Base class for convolutional-recurrent layers.\n */\nclass ConvRNN2D extends RNN {\n constructor(args) {\n if (args.unroll) {\n throw new NotImplementedError('Unrolling is not possible with convolutional RNNs.');\n }\n if (Array.isArray(args.cell)) {\n throw new NotImplementedError('It is not possible at the moment to stack convolutional cells.');\n }\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n if (kwargs && kwargs['constants']) {\n throw new ValueError('ConvRNN2D cell does not support constants');\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n computeOutputShape(inputShape) {\n let outShape = this.computeSingleOutputShape(inputShape);\n if (!this.returnSequences) {\n outShape = [outShape[0], ...outShape.slice(2)];\n }\n if (this.returnState) {\n outShape =\n [outShape, ...Array(2).fill([inputShape[0], ...outShape.slice(-3)])];\n }\n return outShape;\n }\n getInitialState(inputs) {\n return tfc.tidy(() => {\n const { stateSize } = this.cell;\n const inputShape = inputs.shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const initialState = tfc.zeros(stateShape);\n if (Array.isArray(stateSize)) {\n return Array(stateSize.length).fill(initialState);\n }\n return [initialState];\n });\n }\n resetStates(states, training = false) {\n tfc.tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const inputShape = this.inputSpec[0].shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const batchSize = inputShape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.getStates() == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_ = [tfc.zeros(stateShape)];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_[0] = tfc.zeros(stateShape);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const expectedShape = stateShape;\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n computeSingleOutputShape(inputShape) {\n const { dataFormat, filters, kernelSize, padding, strides, dilationRate } = this.cell;\n const isChannelsFirst = dataFormat === 'channelsFirst';\n const h = inputShape[isChannelsFirst ? 3 : 2];\n const w = inputShape[isChannelsFirst ? 4 : 3];\n const hOut = convOutputLength(h, kernelSize[0], padding, strides[0], dilationRate[0]);\n const wOut = convOutputLength(w, kernelSize[1], padding, strides[1], dilationRate[1]);\n const outShape = [\n ...inputShape.slice(0, 2),\n ...(isChannelsFirst ? [filters, hOut, wOut] : [hOut, wOut, filters])\n ];\n return outShape;\n }\n}\n/** @nocollapse */\nConvRNN2D.className = 'ConvRNN2D';\nexport class ConvLSTM2DCell extends LSTMCell {\n constructor(args) {\n const { filters, kernelSize, strides, padding, dataFormat, dilationRate, } = args;\n super(Object.assign({}, args, { units: filters }));\n this.filters = filters;\n assertPositiveInteger(this.filters, 'filters');\n this.kernelSize = normalizeArray(kernelSize, 2, 'kernelSize');\n this.kernelSize.forEach(size => assertPositiveInteger(size, 'kernelSize'));\n this.strides = normalizeArray(strides || 1, 2, 'strides');\n this.strides.forEach(stride => assertPositiveInteger(stride, 'strides'));\n this.padding = padding || 'valid';\n checkPaddingMode(this.padding);\n this.dataFormat = dataFormat || 'channelsLast';\n checkDataFormat(this.dataFormat);\n this.dilationRate = normalizeArray(dilationRate || 1, 2, 'dilationRate');\n this.dilationRate.forEach(rate => assertPositiveInteger(rate, 'dilationRate'));\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const numOfKernels = 4;\n const kernelShape = this.kernelSize.concat([inputDim, this.filters * numOfKernels]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n const recurrentKernelShape = this.kernelSize.concat([this.filters, this.filters * numOfKernels]);\n this.recurrentKernel = this.addWeight('recurrent_kernel', recurrentKernelShape, null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n let biasInitializer;\n if (this.unitForgetBias) {\n const init = this.biasInitializer;\n const filters = this.filters;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n const biasI = init.apply([filters]);\n const biasF = tfc.ones([filters]);\n const biasCAndO = init.apply([filters * 2]);\n return K.concatenate([biasI, biasF, biasCAndO]);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.filters * numOfKernels], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (inputs.length !== 3) {\n throw new ValueError(`ConvLSTM2DCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] || false;\n const x = inputs[0]; // Current input\n const hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n const numOfKernels = 4;\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(x),\n rate: this.dropout,\n training,\n count: numOfKernels\n });\n }\n const dropoutMask = this.dropoutMask;\n const applyDropout = (x, mask, index) => {\n if (!mask || !mask[index]) {\n return x;\n }\n return tfc.mul(mask[index], x);\n };\n let xI = applyDropout(x, dropoutMask, 0);\n let xF = applyDropout(x, dropoutMask, 1);\n let xC = applyDropout(x, dropoutMask, 2);\n let xO = applyDropout(x, dropoutMask, 3);\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: numOfKernels\n });\n }\n const recDropoutMask = this.recurrentDropoutMask;\n let hI = applyDropout(hTMinus1, recDropoutMask, 0);\n let hF = applyDropout(hTMinus1, recDropoutMask, 1);\n let hC = applyDropout(hTMinus1, recDropoutMask, 2);\n let hO = applyDropout(hTMinus1, recDropoutMask, 3);\n const kernelChannelAxis = 3;\n const [kernelI, kernelF, kernelC, kernelO] = tfc.split(this.kernel.read(), numOfKernels, kernelChannelAxis);\n const [biasI, biasF, biasC, biasO] = this.useBias ?\n tfc.split(this.bias.read(), numOfKernels) :\n [null, null, null, null];\n xI = this.inputConv(xI, kernelI, biasI, this.padding);\n xF = this.inputConv(xF, kernelF, biasF, this.padding);\n xC = this.inputConv(xC, kernelC, biasC, this.padding);\n xO = this.inputConv(xO, kernelO, biasO, this.padding);\n const [recKernelI, recKernelF, recKernelC, recKernelO] = tfc.split(this.recurrentKernel.read(), numOfKernels, kernelChannelAxis);\n hI = this.recurrentConv(hI, recKernelI);\n hF = this.recurrentConv(hF, recKernelF);\n hC = this.recurrentConv(hC, recKernelC);\n hO = this.recurrentConv(hO, recKernelO);\n const i = this.recurrentActivation.apply(tfc.add(xI, hI));\n const f = this.recurrentActivation.apply(tfc.add(xF, hF));\n const c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(tfc.add(xC, hC))));\n const h = tfc.mul(this.recurrentActivation.apply(tfc.add(xO, hO)), this.activation.apply(c));\n return [h, h, c];\n });\n }\n getConfig() {\n const _a = super.getConfig(), { 'units': _ } = _a, baseConfig = __rest(_a, ['units']);\n const config = {\n filters: this.filters,\n kernelSize: this.kernelSize,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n strides: this.strides,\n };\n return Object.assign({}, baseConfig, config);\n }\n inputConv(x, w, b, padding) {\n const out = tfc.conv2d(x, w, this.strides, (padding || 'valid'), this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC', this.dilationRate);\n if (b) {\n return K.biasAdd(out, b, this.dataFormat);\n }\n return out;\n }\n recurrentConv(x, w) {\n const strides = 1;\n return tfc.conv2d(x, w, strides, 'same', this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC');\n }\n}\n/** @nocollapse */\nConvLSTM2DCell.className = 'ConvLSTM2DCell';\ntfc.serialization.registerClass(ConvLSTM2DCell);\nexport class ConvLSTM2D extends ConvRNN2D {\n constructor(args) {\n const cell = new ConvLSTM2DCell(args);\n super(Object.assign({}, args, { cell }));\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nConvLSTM2D.className = 'ConvLSTM2D';\ntfc.serialization.registerClass(ConvLSTM2D);\n//# sourceMappingURL=convolutional_recurrent.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Basic Layers.\n */\nimport { any, notEqual, serialization, tidy, transpose, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger, mapActivationToFusedKernel } from '../utils/generic_utils';\nimport { arrayProd, range } from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Dropout extends Layer {\n constructor(args) {\n super(args);\n this.rate = Math.max(Math.min(args.rate, 1), 0);\n // So that the scalar doesn't get tidied up between executions.\n this.noiseShape = args.noiseShape;\n this.seed = args.seed;\n this.supportsMasking = true;\n }\n getNoiseShape(input) {\n if (this.noiseShape == null) {\n return this.noiseShape;\n }\n const inputShape = input.shape;\n const noiseShape = [];\n for (let i = 0; i < this.noiseShape.length; ++i) {\n noiseShape.push(this.noiseShape[i] == null ? inputShape[i] : this.noiseShape[i]);\n }\n return noiseShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (0 < this.rate && this.rate < 1) {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const noiseShape = this.getNoiseShape(input);\n const output = K.inTrainPhase(() => K.dropout(input, this.rate, noiseShape, this.seed), () => input, training);\n return output;\n }\n return inputs;\n });\n }\n getConfig() {\n const config = {\n rate: this.rate,\n noiseShape: this.noiseShape,\n seed: this.seed,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n dispose() {\n return super.dispose();\n }\n}\n/** @nocollapse */\nDropout.className = 'Dropout';\nserialization.registerClass(Dropout);\nexport class SpatialDropout1D extends Dropout {\n constructor(args) {\n super(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getNoiseShape(input) {\n const inputShape = input.shape;\n return [inputShape[0], 1, inputShape[2]];\n }\n}\n/** @nocollapse */\nSpatialDropout1D.className = 'SpatialDropout1D';\nserialization.registerClass(SpatialDropout1D);\nexport class Dense extends Layer {\n constructor(args) {\n super(args);\n // Default activation: Linear (none).\n this.activation = null;\n this.useBias = true;\n this.kernel = null;\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.batchInputShape == null && args.inputShape == null &&\n args.inputDim != null) {\n // This logic is copied from Layer's constructor, since we can't\n // do exactly what the Python constructor does for Dense().\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n this.batchInputShape = [batchSize, args.inputDim];\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation);\n if (args.useBias != null) {\n this.useBias = args.useBias;\n }\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.supportsMasking = true;\n this.inputSpec = [{ minNDim: 2 }];\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputLastDim = inputShape[inputShape.length - 1];\n if (this.kernel == null) {\n this.kernel = this.addWeight('kernel', [inputLastDim, this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n }\n this.inputSpec = [{ minNDim: 2, axes: { [-1]: inputLastDim } }];\n this.built = true;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n outputShape[outputShape.length - 1] = this.units;\n return outputShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Dense layer accepts only a single input.\n const input = getExactlyOneTensor(inputs);\n const fusedActivationName = mapActivationToFusedKernel(this.activation.getClassName());\n let output;\n if (fusedActivationName != null) {\n output = K.dot(input, this.kernel.read(), fusedActivationName, this.bias ? this.bias.read() : null);\n }\n else {\n output = K.dot(input, this.kernel.read());\n if (this.bias != null) {\n output = K.biasAdd(output, this.bias.read());\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n }\n return output;\n });\n }\n getConfig() {\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDense.className = 'Dense';\nserialization.registerClass(Dense);\nexport class Flatten extends Layer {\n constructor(args) {\n args = args || {};\n super(args);\n this.inputSpec = [{ minNDim: 3 }];\n this.dataFormat = args.dataFormat;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n for (const dim of inputShape.slice(1)) {\n if (dim == null) {\n throw new ValueError(`The shape of the input to \"Flatten\" is not fully defined ` +\n `(got ${inputShape.slice(1)}). Make sure to pass a complete ` +\n `\"input_shape\" or \"batch_input_shape\" argument to the first ` +\n `layer in your model.`);\n }\n }\n return [inputShape[0], arrayProd(inputShape, 1)];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n let input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsFirst' && input.rank > 1) {\n const permutation = [0];\n for (let i = 2; i < input.rank; ++i) {\n permutation.push(i);\n }\n permutation.push(1);\n input = input.transpose(permutation);\n }\n return K.batchFlatten(input);\n });\n }\n getConfig() {\n const config = {};\n if (this.dataFormat != null) {\n config['dataFormat'] = this.dataFormat;\n }\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nFlatten.className = 'Flatten';\nserialization.registerClass(Flatten);\nexport class Activation extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.activation = getActivation(args.activation);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n return this.activation.apply(input);\n });\n }\n getConfig() {\n const config = { activation: serializeActivation(this.activation) };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nActivation.className = 'Activation';\nserialization.registerClass(Activation);\nexport class RepeatVector extends Layer {\n constructor(args) {\n super(args);\n this.n = args.n;\n this.inputSpec = [{ ndim: 2 }];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], this.n, inputShape[1]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n return K.repeat(inputs, this.n);\n });\n }\n getConfig() {\n const config = {\n n: this.n,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nRepeatVector.className = 'RepeatVector';\nserialization.registerClass(RepeatVector);\nexport class Reshape extends Layer {\n constructor(args) {\n super(args);\n this.targetShape = args.targetShape;\n // Make sure that all unknown dimensions are represented as `null`.\n for (let i = 0; i < this.targetShape.length; ++i) {\n if (this.isUnknown(this.targetShape[i])) {\n this.targetShape[i] = null;\n }\n }\n }\n isUnknown(dim) {\n return dim < 0 || dim == null;\n }\n /**\n * Finds and replaces a missing dimension in output shape.\n *\n * This is a near direct port of the internal Numpy function\n * `_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`.\n *\n * @param inputShape: Original shape of array begin reshape.\n * @param outputShape: Target shape of the array, with at most a single\n * `null` or negative number, which indicates an underdetermined dimension\n * that should be derived from `inputShape` and the known dimensions of\n * `outputShape`.\n * @returns: The output shape with `null` replaced with its computed value.\n * @throws: ValueError: If `inputShape` and `outputShape` do not match.\n */\n fixUnknownDimension(inputShape, outputShape) {\n const errorMsg = 'Total size of new array must be unchanged.';\n const finalShape = outputShape.slice();\n let known = 1;\n let unknown = null;\n for (let i = 0; i < finalShape.length; ++i) {\n const dim = finalShape[i];\n if (this.isUnknown(dim)) {\n if (unknown === null) {\n unknown = i;\n }\n else {\n throw new ValueError('Can only specifiy one unknown dimension.');\n }\n }\n else {\n known *= dim;\n }\n }\n const originalSize = arrayProd(inputShape);\n if (unknown !== null) {\n if (known === 0 || originalSize % known !== 0) {\n throw new ValueError(errorMsg);\n }\n finalShape[unknown] = originalSize / known;\n }\n else if (originalSize !== known) {\n throw new ValueError(errorMsg);\n }\n return finalShape;\n }\n computeOutputShape(inputShape) {\n let anyUnknownDims = false;\n for (let i = 0; i < inputShape.length; ++i) {\n if (this.isUnknown(inputShape[i])) {\n anyUnknownDims = true;\n break;\n }\n }\n if (anyUnknownDims) {\n return inputShape.slice(0, 1).concat(this.targetShape);\n }\n else {\n return inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const outputShape = inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n return input.reshape(outputShape);\n });\n }\n getConfig() {\n const config = {\n targetShape: this.targetShape,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReshape.className = 'Reshape';\nserialization.registerClass(Reshape);\nexport class Permute extends Layer {\n constructor(args) {\n super(args);\n if (args.dims == null) {\n throw new Error('Required configuration field `dims` is missing during Permute ' +\n 'constructor call.');\n }\n if (!Array.isArray(args.dims)) {\n throw new Error('Permute constructor requires `dims` to be an Array, but received ' +\n `${args.dims} instead.`);\n }\n // Check the validity of the permutation indices.\n const expectedSortedIndices = range(1, args.dims.length + 1);\n if (!util.arraysEqual(args.dims.slice().sort(), expectedSortedIndices)) {\n throw new Error('Invalid permutation `dims`: ' + JSON.stringify(args.dims) +\n ' `dims` must contain consecutive integers starting from 1.');\n }\n this.dims = args.dims;\n this.dimsIncludingBatch = [0].concat(this.dims);\n this.inputSpec = [new InputSpec({ ndim: this.dims.length + 1 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n this.dims.forEach((dim, i) => {\n outputShape[i + 1] = inputShape[dim];\n });\n return outputShape;\n }\n call(inputs, kwargs) {\n return transpose(getExactlyOneTensor(inputs), this.dimsIncludingBatch);\n }\n getConfig() {\n const config = {\n dims: this.dims,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPermute.className = 'Permute';\nserialization.registerClass(Permute);\nexport class Masking extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maskValue = args.maskValue == null ? 0 : args.maskValue;\n }\n else {\n this.maskValue = 0;\n }\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { maskValue: this.maskValue };\n Object.assign(config, baseConfig);\n return config;\n }\n computeMask(inputs, mask) {\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n return any(notEqual(input, this.maskValue), axis);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n const keepDims = true;\n const booleanMask = any(notEqual(input, this.maskValue), axis, keepDims);\n const output = input.mul(booleanMask.asType(input.dtype));\n return output;\n });\n }\n}\n/** @nocollapse */\nMasking.className = 'Masking';\nserialization.registerClass(Masking);\n//# sourceMappingURL=core.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Embedding Layer.\n *\n * Original source: keras/constraints.py\n */\nimport { notEqual, serialization, tidy, zerosLike } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Embedding extends Layer {\n constructor(args) {\n super(args);\n this.embeddings = null;\n this.DEFAULT_EMBEDDINGS_INITIALIZER = 'randomUniform';\n if (args.batchInputShape == null && args.inputShape == null) {\n // Porting Note: This logic is copied from Layer's constructor, since we\n // can't do exactly what the Python constructor does for Embedding().\n // Specifically, the super constructor can not be called after the\n // mutation of the `config` argument.\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n if (args.inputLength == null) {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (None, )\n this.batchInputShape = [batchSize, null];\n }\n else {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (config.inputLength, )\n this.batchInputShape =\n [batchSize].concat(generic_utils.toList(args.inputLength));\n }\n }\n this.inputDim = args.inputDim;\n generic_utils.assertPositiveInteger(this.inputDim, 'inputDim');\n this.outputDim = args.outputDim;\n generic_utils.assertPositiveInteger(this.outputDim, 'outputDim');\n this.embeddingsInitializer = getInitializer(args.embeddingsInitializer || this.DEFAULT_EMBEDDINGS_INITIALIZER);\n this.embeddingsRegularizer = getRegularizer(args.embeddingsRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.embeddingsConstraint = getConstraint(args.embeddingsConstraint);\n this.maskZero = args.maskZero;\n this.supportsMasking = args.maskZero;\n this.inputLength = args.inputLength;\n }\n build(inputShape) {\n this.embeddings = this.addWeight('embeddings', [this.inputDim, this.outputDim], this.dtype, this.embeddingsInitializer, this.embeddingsRegularizer, true, this.embeddingsConstraint);\n this.built = true;\n }\n // Override warnOnIncompatibleInputShape because an embedding layer allows\n // the input to have varying ranks.\n warnOnIncompatibleInputShape(inputShape) { }\n computeMask(inputs, mask) {\n return tidy(() => {\n if (!this.maskZero) {\n return null;\n }\n else {\n inputs = getExactlyOneTensor(inputs);\n return notEqual(inputs, zerosLike(inputs));\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (this.inputLength == null) {\n return [...inputShape, this.outputDim];\n }\n // inputLength can be an array if input is 3D or higher.\n const inLens = generic_utils.toList(this.inputLength);\n if (inLens.length !== inputShape.length - 1) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else {\n let i = 0;\n for (let k = 0; k < inLens.length; ++k) {\n const s1 = inLens[k];\n const s2 = inputShape[k + 1];\n if ((s1 != null) && (s2 != null) && (s1 !== s2)) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else if (s1 == null) {\n inLens[i] = s2;\n }\n i++;\n }\n }\n return [inputShape[0], ...inLens, this.outputDim];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Embedding layer accepts only a single input.\n let input = getExactlyOneTensor(inputs);\n if (input.dtype !== 'int32') {\n input = K.cast(input, 'int32');\n }\n const output = K.gather(this.embeddings.read(), input.as1D());\n return output.reshape(getExactlyOneShape(this.computeOutputShape(input.shape)));\n });\n }\n getConfig() {\n const config = {\n inputDim: this.inputDim,\n outputDim: this.outputDim,\n embeddingsInitializer: serializeInitializer(this.embeddingsInitializer),\n embeddingsRegularizer: serializeRegularizer(this.embeddingsRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n embeddingsConstraint: serializeConstraint(this.embeddingsConstraint),\n maskZero: this.maskZero,\n inputLength: this.inputLength\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nEmbedding.className = 'Embedding';\nserialization.registerClass(Embedding);\n//# sourceMappingURL=embeddings.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Merge Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { l2Normalize } from '../losses';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as mathUtils from '../utils/math_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\n/**\n * Generic Merge layer for element-wise merge functions.\n *\n * Used to implement `Sum`, `Average`, `Concatenate`, etc.\n */\nexport class Merge extends Layer {\n constructor(args) {\n super(args || {});\n this.supportsMasking = true;\n }\n /**\n * Logic for merging multiple tensors, to be overridden by subclasses.\n * @param inputs\n */\n mergeFunction(inputs) {\n throw new NotImplementedError();\n }\n /**\n * Computes the shape of the result of an elementwise operation.\n *\n * @param shape1: Shape of the first tensor.\n * @param shape2: Shape of the second tensor.\n * @returns Expected output shape when an elementwise operation is carried\n * out on 2 tensors with shapes `shape1` and `shape2`.\n * @throws ValueError: If `shape1` and `shape2` are not compatible for\n * element-wise operations.\n */\n computeElementwiseOpOutputShape(shape1, shape2) {\n if (shape1 == null || shape2 == null) {\n return null;\n }\n else if (shape1.length < shape2.length) {\n return this.computeElementwiseOpOutputShape(shape2, shape1);\n }\n else if (shape2.length === 0) {\n return shape1;\n }\n const outputShape = shape1.slice(0, shape1.length - shape2.length);\n for (let k = 0; k < shape2.length; ++k) {\n const i = shape1[shape1.length - shape2.length + k];\n const j = shape2[k];\n if (i == null || j == null || i < 0 || j < 0) {\n outputShape.push(null);\n }\n else if (i === 1) {\n outputShape.push(j);\n }\n else if (j === 1) {\n outputShape.push(i);\n }\n else {\n if (i !== j) {\n throw new ValueError('Operands could not be broadcast together with shapes ' +\n JSON.stringify(shape1) + ' ' + JSON.stringify(shape2));\n }\n outputShape.push(i);\n }\n }\n return outputShape;\n }\n build(inputShape) {\n // Used purely for shape validation.\n if (Array.isArray(inputShape) && !Array.isArray(inputShape[0])) {\n // Make sure that inputShape is an Array of shape.\n inputShape = [getExactlyOneShape(inputShape)];\n }\n inputShape = inputShape;\n if (inputShape.length < 2) {\n throw new ValueError('A merge layer should be called on an Array of at least 2 inputs.' +\n ` Got ${inputShape.length} input(s).`);\n }\n // Make sure that there is at most one unique batch size among the input\n // shapes.\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length > 1) {\n throw new ValueError(`Can not merge tensors with different batch sizes. ` +\n `Got tensors with shapes: ${JSON.stringify(inputShape)}.`);\n }\n let outputShape = inputShape[0] == null ? null : inputShape[0].slice(1);\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n // If the inputs have different ranks, we have to reshape them to make them\n // broadcastable.\n const allRanks = inputShape.map(shape => shape.length);\n if (inputShape.indexOf(null) === -1 &&\n generic_utils.unique(allRanks).length === 1) {\n this.reshapeRequired = false;\n }\n else {\n this.reshapeRequired = true;\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (this.reshapeRequired) {\n const reshapedInputs = [];\n const inputDims = inputs.map(input => input.rank);\n if (inputDims.indexOf(null) === -1) {\n // If ranks of all inputs are available, we simply expand each of them\n // at axis=1 until all of them have the same rank.\n const maxNDim = mathUtils.max(inputDims);\n for (let x of inputs) {\n const xNDim = x.rank;\n for (let k = 0; k < maxNDim - xNDim; ++k) {\n x = K.expandDims(x, 1);\n }\n reshapedInputs.push(x);\n }\n return this.mergeFunction(reshapedInputs);\n }\n else {\n // Transpose all inputs so that batch size is the last dimension.\n // [batchSize, dim1, dim2, ...] -> [dim1, dim2, ..., batchSize]\n let transposed = false;\n for (const x of inputs) {\n const xNDim = x.rank;\n if (xNDim == null) {\n const xShape = x.shape;\n const batchSize = xShape[0];\n const newShape = xShape.slice(1).concat([batchSize]);\n let xTransposed = x.reshape([batchSize].concat(mathUtils.arrayProd(xShape.slice(1))));\n xTransposed = tfc.transpose(xTransposed, [1, 0]);\n xTransposed = xTransposed.reshape(newShape);\n reshapedInputs.push(xTransposed);\n transposed = true;\n }\n else if (xNDim > 1) {\n const dims = mathUtils.range(1, xNDim).concat([0]);\n reshapedInputs.push(tfc.transpose(x, dims));\n transposed = true;\n }\n else {\n // We don't transpose inputs if they are 1D vectors or scalars.\n reshapedInputs.push(x);\n }\n }\n let y = this.mergeFunction(reshapedInputs);\n const yNDim = y.rank;\n if (transposed) {\n // If inputs have been transposed, we have to transpose the output\n // too.\n if (yNDim == null) {\n const yShape = y.shape;\n const yNDim = yShape.length;\n const batchSize = yShape[yNDim - 1];\n const newShape = [batchSize].concat(yShape.slice(0, yShape.length - 1));\n y = tfc.transpose(y.reshape([-1, batchSize]), [1, 0])\n .reshape(newShape);\n }\n else if (yNDim > 1) {\n const dims = [yNDim - 1].concat(mathUtils.range(0, yNDim - 1));\n y = tfc.transpose(y, dims);\n }\n }\n return y;\n }\n }\n else {\n return this.mergeFunction(inputs);\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n let outputShape;\n if (inputShape[0] == null) {\n outputShape = null;\n }\n else {\n outputShape = inputShape[0].slice(1);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length === 1) {\n outputShape = batchSizes.concat(outputShape);\n }\n else {\n outputShape = [null].concat(outputShape);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an Array');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an Array');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`The Array 'inputs' and 'mask' are expected to have the same ` +\n `length, but have different lengths ` +\n `(${inputs.length} vs ${mask.length})`);\n }\n if (mask.every(m => m == null)) {\n return null;\n }\n mask = mask.map(m => m == null ? m : tfc.expandDims(m, 0));\n let output = mask[0];\n for (let i = 1; i < mask.length - 1; ++i) {\n output = tfc.logicalAnd(output, mask[i]);\n }\n return output;\n });\n }\n}\nexport class Add extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nAdd.className = 'Add';\nserialization.registerClass(Add);\n/**\n * Calculate the element-wise sum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Add` layer, by using no input argument\n * or a single configuration argument. The resultant `Add` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const addLayer = tf.layers.add();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = addLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.add([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.add([input1, input2]).print();\n * // Gives [[11, 22], [33, 44]].\n *\n */\nexport function add(config) {\n if (Array.isArray(config)) {\n const layer = new Add({});\n return layer.apply(config);\n }\n else {\n return new Add(config);\n }\n}\nexport class Multiply extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.mul(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMultiply.className = 'Multiply';\nserialization.registerClass(Multiply);\n/**\n * Calculate the element-wise product of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Multiply` layer, by using no input argument\n * or a single configuration argument. The resultant `Multiply` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const multiplyLayer = tf.layers.multiply();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = multiplyLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.multiply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.multiply([input1, input2]).print();\n * // Gives [[10, 40], [90, 160]].\n *\n */\nexport function multiply(config) {\n if (Array.isArray(config)) {\n const layer = new Multiply({});\n return layer.apply(config);\n }\n else {\n return new Multiply(config);\n }\n}\nexport class Average extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return tfc.mul(1 / inputs.length, output);\n });\n }\n}\n/** @nocollapse */\nAverage.className = 'Average';\nserialization.registerClass(Average);\n/**\n * Calculate the element-wise arithmetic mean of inputs, which all have the same\n * shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Average` layer, by using no input argument\n * or a single configuration argument. The resultant `Average` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const averageLayer = tf.layers.average();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = averageLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.average([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.average([input1, input2]).print();\n * // Gives [[5.5, 11], [16.5, 22]].\n *\n */\nexport function average(config) {\n if (Array.isArray(config)) {\n const layer = new Average({});\n return layer.apply(config);\n }\n else {\n return new Average(config);\n }\n}\nexport class Maximum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.maximum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMaximum.className = 'Maximum';\nserialization.registerClass(Maximum);\n/**\n * Calculate the element-wise maximum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Maximum` layer, by using no input argument\n * or a single configuration argument. The resultant `Maximum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const maximumLayer = tf.layers.maximum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = maximumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.maximum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.maximum([input1, input2]).print();\n * // Gives [[10, 20], [30, 40]].\n *\n */\nexport function maximum(config) {\n if (Array.isArray(config)) {\n const layer = new Maximum({});\n return layer.apply(config);\n }\n else {\n return new Maximum(config);\n }\n}\nexport class Minimum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.minimum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMinimum.className = 'Minimum';\nserialization.registerClass(Minimum);\n/**\n * Calculate the element-wise minimum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Minimum` layer, by using no input argument\n * or a single configuration argument. The resultant `Minimum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const minimumLayer = tf.layers.minimum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = minimumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.minimum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.minimum([input1, input2]).print();\n * // Gives [[1, 2], [3, 4]].\n *\n */\nexport function minimum(config) {\n if (Array.isArray(config)) {\n const layer = new Minimum({});\n return layer.apply(config);\n }\n else {\n return new Minimum(config);\n }\n}\nexport class Concatenate extends Merge {\n constructor(args) {\n super(args);\n this.DEFAULT_AXIS = -1;\n if (args == null) {\n args = {};\n }\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n // Used purely for shape validation.]\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0])) ||\n inputShape.length === 1) {\n throw new ValueError('A `Concatenate` layer should be called on a list of at least 2 ' +\n 'inputs');\n }\n inputShape = inputShape;\n let allNoneShape = true;\n for (const shape of inputShape) {\n if (shape != null) {\n allNoneShape = false;\n break;\n }\n }\n if (allNoneShape) {\n return;\n }\n const shapeSet = [];\n for (let i = 0; i < inputShape.length; ++i) {\n const shapeWithoutConcatAxis = inputShape[i].slice();\n shapeWithoutConcatAxis.splice(this.axis, 1);\n let exists = false;\n for (const shape of shapeSet) {\n if (util.arraysEqual(shape, shapeWithoutConcatAxis)) {\n exists = true;\n break;\n }\n }\n if (!exists) {\n shapeSet.push(shapeWithoutConcatAxis);\n }\n }\n if (shapeSet.length > 1) {\n throw new ValueError('A `Concatenate` layer requires inputs with matching shapes ' +\n 'except for the concat axis. Got input shapes: ' +\n JSON.stringify(inputShape));\n }\n }\n mergeFunction(inputs) {\n return tidy(() => {\n return K.concatenate(inputs, this.axis);\n });\n }\n computeOutputShape(inputShape) {\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0]))) {\n throw new ValueError('A `Concatenate` layer should be called on a list of inputs.');\n }\n const inputShapes = inputShape;\n const outputShape = inputShapes[0].slice();\n const axis = this.axis < 0 ? outputShape.length + this.axis : this.axis;\n // Porting Note: the line above is because TypeScript doesn't support\n // negative indices.\n for (const shape of inputShapes.slice(1)) {\n if (outputShape[axis] == null || shape[axis] == null) {\n outputShape[axis] = null;\n break;\n }\n outputShape[axis] += shape[axis];\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an array for Concatenate');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an array for Concatenate');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`Mismatch in the length of mask (${mask.length}) ` +\n `and the legnth of inputs (${inputs.length})`);\n }\n return tfc.tidy(() => {\n let allNullMasks = true;\n mask.forEach(m => {\n if (m != null) {\n allNullMasks = false;\n return;\n }\n });\n if (allNullMasks) {\n return null;\n }\n const outputMasks = [];\n for (let i = 0; i < inputs.length; ++i) {\n if (mask[i] == null) {\n // Input is unmasked. Append all 1's to masks.\n outputMasks.push(tfc.onesLike(inputs[i]).asType('bool'));\n }\n else if (mask[i].rank < inputs[i].rank) {\n // Mask is smaller than the input, expand it.\n outputMasks.push(tfc.expandDims(mask[i], -1));\n }\n else {\n outputMasks.push(mask[i]);\n }\n }\n const concatenatedMasks = tfc.concat(outputMasks, this.axis);\n return tfc.all(concatenatedMasks, -1, false);\n });\n }\n getConfig() {\n const config = {\n 'axis': this.axis,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nConcatenate.className = 'Concatenate';\nserialization.registerClass(Concatenate);\n/**\n * Concatenate an `Array` of inputs.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Concatenate` layer, by using no input argument\n * or a single configuration argument. The resultant `Concatenate` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const concatLayer = tf.layers.concatenate();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = concatLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 7], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = tf.layers.concatenate([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([[1, 2], [3, 4]], [2, 2]);\n * const input2 = tf.tensor2d([[10, 20], [30, 40]], [2, 2]);\n * tf.layers.concatenate([input1, input2]).print();\n * // Gives [[1, 2, 10, 20], [3, 4, 30, 40]].\n *\n */\nexport function concatenate(config) {\n if (Array.isArray(config)) {\n const layer = new Concatenate({});\n return layer.apply(config);\n }\n else {\n return new Concatenate(config);\n }\n}\n/**\n * Interpretable potentially negative axis index.\n *\n * For example, given axis = -1, and dim = 3, this function will return 2.\n *\n * @param axis The axis index, may be a positive, zero or negative integer.\n * @param dim Total number of dimensions, a positive integer.\n * @returns A non-negative axis index equivalent to the input `axis`.\n */\nfunction interpretAxis(axis, dim) {\n while (axis < 0) {\n axis += dim;\n }\n return axis;\n}\nfunction batchDot(x, y, axes) {\n if (x.shape.length > 3 || y.shape.length > 3) {\n throw new NotImplementedError('batchDot is not implemented for tensors of 4D or higher rank yet');\n }\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of x to be >= 2, ` +\n `but got ${x.shape.length}`);\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of y to be >= 2, ` +\n `but got ${y.shape.length}`);\n if (typeof axes === 'number') {\n axes = [axes, axes];\n }\n if (x.dtype === 'complex64' || y.dtype === 'complex64') {\n throw new NotImplementedError('batchDot is not implemented for complex64-type Tensors yet.');\n }\n const xNDim = x.shape.length;\n const yNDim = y.shape.length;\n if (axes == null) {\n // Behave like batchMatmul by default.\n axes = [xNDim - 1, yNDim - 2];\n }\n const axesArray = axes;\n return tfc.tidy(() => {\n let diff;\n if (xNDim > yNDim) {\n diff = xNDim - yNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n y = y.reshape(y.shape.concat(diffShape));\n }\n else if (yNDim > xNDim) {\n diff = yNDim - xNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n x = x.reshape(x.shape.concat(diffShape));\n }\n else {\n diff = 0;\n }\n let out;\n if (x.shape.length === 2 && y.shape.length === 2) {\n if (axesArray[0] === axesArray[1]) {\n out = x.mul(y).sum(axesArray[0]);\n }\n else {\n out = x.transpose([1, 0]).mul(y).sum(axesArray[1]);\n }\n }\n else {\n const adjX = axesArray[0] !== x.shape.length - 1;\n const adjY = axesArray[1] === y.shape.length - 1;\n out = x.matMul(y, adjX, adjY);\n }\n if (diff > 0) {\n let idx;\n if (xNDim > yNDim) {\n idx = xNDim + yNDim - 3;\n }\n else {\n idx = xNDim - 1;\n }\n const squeezeAxes = [];\n for (let i = idx; i < idx + diff; ++i) {\n squeezeAxes.push(i);\n }\n out = out.squeeze(squeezeAxes);\n }\n if (out.shape.length === 1) {\n out = out.expandDims(1);\n }\n return out;\n });\n}\nexport class Dot extends Merge {\n constructor(args) {\n super(args);\n this.axes = args.axes;\n this.normalize = args.normalize == null ? false : args.normalize;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0];\n const shape2 = inputShape[1];\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n if (shape1[axes[0]] !== shape2[axes[1]]) {\n throw new ValueError(`Dimension incompatibility: ` +\n `${shape1[axes[0]]} !== ${shape2[axes[1]]}`);\n }\n }\n mergeFunction(inputs) {\n if (inputs.length !== 2) {\n throw new ValueError('A `Dot` layer must be called on exactly 2 inputs, ' +\n `but received ${inputs.length} input(s).`);\n }\n let x1 = inputs[0];\n let x2 = inputs[1];\n let axes;\n if (!Array.isArray(this.axes)) {\n axes = [\n interpretAxis(this.axes, x1.shape.length),\n interpretAxis(this.axes, x2.shape.length)\n ];\n }\n else {\n axes = this.axes.map((axis, i) => interpretAxis(axis, inputs[i].shape.length));\n }\n if (this.normalize) {\n x1 = l2Normalize(x1, axes[0]);\n x2 = l2Normalize(x2, axes[1]);\n }\n return batchDot(x1, x2, axes);\n }\n interpretAxes(shape1, shape2) {\n let axes;\n if (!Array.isArray(this.axes)) {\n // `this.axes` is a single integer.\n axes = [\n interpretAxis(this.axes, shape1.length),\n interpretAxis(this.axes, shape2.length)\n ];\n }\n else {\n // `this.axes` is an Array of integers.\n axes = this.axes;\n }\n return axes;\n }\n computeOutputShape(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0].slice();\n const shape2 = inputShape[1].slice();\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n shape1.splice(axes[0], 1);\n shape2.splice(axes[1], 1);\n shape2.splice(0, 1);\n const outputShape = shape1.concat(shape2);\n if (outputShape.length === 1) {\n outputShape.push(1);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return null;\n }\n getConfig() {\n const config = {\n 'axes': this.axes,\n 'normalize': this.normalize\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDot.className = 'Dot';\nserialization.registerClass(Dot);\n// TODO(cais): Add functional interfaces for the merge layers.\n//# sourceMappingURL=merge.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Noise Layers.\n */\nimport { greaterEqual, randomUniform, serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { getExactlyOneTensor } from '../utils/types_utils';\nexport class GaussianNoise extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.stddev = args.stddev;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { stddev: this.stddev };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const noised = () => K.randomNormal(input.shape, 0, this.stddev).add(input);\n const output = K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n return output;\n });\n }\n}\n/** @nocollapse */\nGaussianNoise.className = 'GaussianNoise';\nserialization.registerClass(GaussianNoise);\nexport class GaussianDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (this.rate > 0 && this.rate < 1) {\n const noised = () => {\n const stddev = Math.sqrt(this.rate / (1 - this.rate));\n return input.mul(K.randomNormal(input.shape, 1, stddev));\n };\n return K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n }\n return input;\n });\n }\n}\n/** @nocollapse */\nGaussianDropout.className = 'GaussianDropout';\nserialization.registerClass(GaussianDropout);\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n */\nexport class AlphaDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n this.noiseShape = args.noiseShape;\n }\n _getNoiseShape(inputs) {\n return this.noiseShape || getExactlyOneTensor(inputs).shape;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.rate < 1 && this.rate > 0) {\n const noiseShape = this._getNoiseShape(inputs);\n const droppedInputs = () => {\n const input = getExactlyOneTensor(inputs);\n const alpha = 1.6732632423543772848170429916717;\n const scale = 1.0507009873554804934193349852946;\n const alphaP = -alpha * scale;\n let keptIdx = greaterEqual(randomUniform(noiseShape), this.rate);\n keptIdx = K.cast(keptIdx, 'float32'); // get default dtype.\n // Get affine transformation params.\n const a = ((1 - this.rate) * (1 + this.rate * alphaP ** 2)) ** -0.5;\n const b = -a * alphaP * this.rate;\n // Apply mask.\n const x = input.mul(keptIdx).add(keptIdx.add(-1).mul(alphaP));\n return x.mul(a).add(b);\n };\n return K.inTrainPhase(droppedInputs, () => getExactlyOneTensor(inputs), kwargs['training'] || false);\n }\n return inputs;\n });\n }\n}\n/** @nocollapse */\nAlphaDropout.className = 'AlphaDropout';\nserialization.registerClass(AlphaDropout);\n//# sourceMappingURL=noise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Normalization layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { moments, serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Applies batch normalization on x given mean, var, beta and gamma.\n *\n * I.e. returns:\n * `output = (x - mean) / (sqrt(var) + epsilon) * gamma + beta`\n *\n * @param x Input tensor.\n * @param mean Mean of batch.\n * @param variance Variance of batch.\n * @param beta Tensor with which to center the input.\n * @param gamma Tensor by which to scale the input.\n * @param epsilon Fuzz factor.\n * @returns The result of the batch normalization.\n */\nexport function batchNormalization(x, mean, variance, beta, gamma, epsilon = 1e-3) {\n let out;\n if (x.rank === 2) {\n out = tfc.batchNorm2d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 3) {\n // TODO(cais): Check rank; give proper error message.\n out = tfc.batchNorm3d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 4) {\n out = tfc.batchNorm4d(x, mean, variance, beta, gamma, epsilon);\n }\n else {\n throw new NotImplementedError(`batchNormalization is not implemented for array of rank ${x.rank} ` +\n `yet`);\n }\n return out;\n}\n/**\n * Non-broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const normed = batchNormalization(x, mean, variance, beta, gamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const targetShape = [];\n for (const axis of math_utils.range(0, x.rank)) {\n if (reductionAxes.indexOf(axis) !== -1) {\n targetShape.push(1);\n }\n else {\n targetShape.push(x.shape[axis]);\n }\n }\n const broadcastMean = mean.reshape(targetShape);\n const broadcastVariance = variance.reshape(targetShape);\n const broadcastGamma = gamma == null ? null : gamma.reshape(targetShape);\n const broadcastBeta = beta == null ? null : beta.reshape(targetShape);\n const normed = batchNormalization(x, broadcastMean, broadcastVariance, broadcastBeta, broadcastGamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Batch normalization for use in training (not inference).\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nexport function normalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n if (util.arraysEqual(reductionAxes.slice().sort(), math_utils.range(0, x.rank - 1))) {\n return regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n else {\n return broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n}\nexport class BatchNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.supportsMasking = true;\n this.axis = args.axis == null ? -1 : args.axis;\n this.momentum = args.momentum == null ? 0.99 : args.momentum;\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.movingMeanInitializer =\n getInitializer(args.movingMeanInitializer || 'zeros');\n this.movingVarianceInitializer =\n getInitializer(args.movingVarianceInitializer || 'ones');\n this.betaConstraint = getConstraint(args.betaConstraint);\n this.gammaConstraint = getConstraint(args.gammaConstraint);\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const axis = this.axis >= 0 ? this.axis : (this.axis + inputShape.length);\n const dim = inputShape[axis];\n if (dim == null) {\n throw new ValueError(`Axis ${axis} of input tensor should have a defined dimension but ` +\n `the layer received an input with shape ` +\n `${JSON.stringify(inputShape)}.`);\n }\n this.inputSpec =\n [new InputSpec({ ndim: inputShape.length, axes: { [axis]: dim } })];\n const shape = [dim];\n if (this.scale) {\n this.gamma = this.addWeight('gamma', shape, null, this.gammaInitializer, this.gammaRegularizer, true, this.gammaConstraint);\n }\n if (this.center) {\n this.beta = this.addWeight('beta', shape, null, this.betaInitializer, this.betaRegularizer, true, this.betaConstraint);\n }\n this.movingMean = this.addWeight('moving_mean', shape, null, this.movingMeanInitializer, null, false);\n this.movingVariance = this.addWeight('moving_variance', shape, null, this.movingVarianceInitializer, null, false);\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const ndim = inputShape.length;\n const reductionAxes = math_utils.range(0, ndim);\n const axis = this.axis >= 0 ? this.axis : (this.axis + ndim);\n reductionAxes.splice(axis, 1);\n const broadcastShape = generic_utils.pyListRepeat(1, ndim);\n broadcastShape[axis] = inputShape[axis];\n const sortedReductionAxes = reductionAxes.slice();\n sortedReductionAxes.sort();\n const needsBroadcasting = !util.arraysEqual(sortedReductionAxes, math_utils.range(0, ndim).slice(0, ndim - 1));\n const normalizeInference = () => {\n if (needsBroadcasting) {\n const broadcastMovingMean = this.movingMean.read().reshape(broadcastShape);\n const broadcastMovingVariance = this.movingVariance.read().reshape(broadcastShape);\n const broadcastBeta = this.center ? this.beta.read().reshape(broadcastShape) : null;\n const broadcastGamma = this.scale ? this.gamma.read().reshape(broadcastShape) : null;\n return batchNormalization(input, broadcastMovingMean, broadcastMovingVariance, broadcastBeta, broadcastGamma, this.epsilon);\n }\n else {\n return batchNormalization(input, this.movingMean.read(), this.movingVariance.read(), this.beta == null ? null : this.beta.read(), this.gamma == null ? null : this.gamma.read(), this.epsilon);\n }\n };\n if (!training) {\n return normalizeInference();\n }\n const [normedTraining, mean, variance] = normalizeBatchInTraining(input, this.gamma.read(), this.beta.read(), reductionAxes, this.epsilon);\n const doMovingAverage = (variable, value, momentum) => {\n tfc.tidy(() => {\n const decay = 1 - momentum;\n const origValue = variable.read();\n const updateDelta = origValue.sub(value).mul(decay);\n variable.write(origValue.sub(updateDelta));\n });\n };\n // Perform updates to moving mean and moving variance for training.\n // Porting Note: In PyKeras, these updates to `movingMean` and\n // `movingAverage` are done as a deferred Graph, added to the `Layer`'s\n // `update`s using the `add_update()` method. Here we do it imperatively\n // and encapsulate the updates in a function that is invoked\n // immediately.\n const updateMovingMeanAndVariance = () => {\n doMovingAverage(this.movingMean, mean, this.momentum);\n doMovingAverage(this.movingVariance, variance, this.momentum);\n };\n updateMovingMeanAndVariance();\n return normedTraining;\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n momentum: this.momentum,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n movingMeanInitializer: serializeInitializer(this.movingMeanInitializer),\n movingVarianceInitializer: serializeInitializer(this.movingVarianceInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer),\n betaConstraint: serializeConstraint(this.betaConstraint),\n gammaConstraint: serializeConstraint(this.gammaConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nBatchNormalization.className = 'BatchNormalization';\nserialization.registerClass(BatchNormalization);\nexport class LayerNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.axis = args.axis == null ? -1 : args.axis;\n if (typeof this.axis === 'number') {\n if (!Number.isInteger(this.axis)) {\n throw new Error(`Expected axis to be an integer, but received ${this.axis}`);\n }\n }\n else if (Array.isArray(this.axis)) {\n for (const axis of this.axis) {\n if (!Number.isInteger(axis)) {\n throw new Error(`Expected axis to be an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n }\n }\n else {\n throw new Error(`Expected axis to be an integer or an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const nDims = inputShape.length;\n // Convert axis to array and resolve negatives.\n if (typeof this.axis === 'number') {\n this.axis = [this.axis];\n }\n for (let i = 0; i < this.axis.length; ++i) {\n if (this.axis[i] < 0) {\n this.axis[i] += nDims;\n }\n }\n // Further validate axes.\n for (const axis of this.axis) {\n if (axis < 0 || axis >= nDims) {\n throw new Error(`Invalid axis: ${axis}`);\n }\n }\n if (this.axis.length !== generic_utils.unique(this.axis).length) {\n throw new Error(`Found duplicate axes in: ${this.axis}`);\n }\n const paramShape = this.axis.map(axis => inputShape[axis]);\n const trainable = true;\n if (this.scale) {\n this.gamma = this.addWeight('gamma', paramShape, 'float32', this.gammaInitializer, this.gammaRegularizer, trainable);\n }\n else {\n this.gamma = null;\n }\n if (this.center) {\n this.beta = this.addWeight('beta', paramShape, 'float32', this.betaInitializer, this.betaRegularizer, trainable);\n }\n else {\n this.beta = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const nDims = inputShape.length;\n return tidy(() => {\n const keepDims = true;\n let { mean, variance } = moments(input, this.axis, keepDims);\n const broadcastShape = generic_utils.pyListRepeat(1, nDims);\n for (const dim of this.axis) {\n broadcastShape[dim] = inputShape[dim];\n }\n const broadcast = (v) => {\n if (v != null && v.shape.length !== nDims &&\n this.axis !== [nDims - 1]) {\n return v.reshape(broadcastShape);\n }\n else {\n return v;\n }\n };\n let scale = broadcast(this.gamma.read());\n let offset = broadcast(this.beta.read());\n // TODO(https://github.com/tensorflow/tfjs/issues/2120): The tiling below\n // is a workaround for the limitation of core's batchNormalization?d don't\n // support broadcasting in their gradients. In addition, the tiling is\n // necessary to ensure correctness on the browser CPU backend regardless\n // of forward or backward computation. Remove this workaround once the\n // limitation is addressed. See .\n const momentsTiling = [];\n const scaleOffsetTiling = [];\n for (let i = 0; i < nDims; ++i) {\n if (this.axis.indexOf(i) !== -1) {\n momentsTiling.push(inputShape[i]);\n scaleOffsetTiling.push(1);\n }\n else {\n momentsTiling.push(1);\n scaleOffsetTiling.push(inputShape[i]);\n }\n }\n mean = mean.tile(momentsTiling);\n variance = variance.tile(momentsTiling);\n scale = scale.tile(scaleOffsetTiling);\n offset = offset.tile(scaleOffsetTiling);\n return batchNormalization(input, mean, variance, offset, scale, this.epsilon);\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLayerNormalization.className = 'LayerNormalization';\nserialization.registerClass(LayerNormalization);\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Padding Layers.\n */\n// Porting Note: In Python Keras, the padding layers are in convolutional.py,\n// but we decided to put them in a separate file (padding.ts) for clarity.\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Pads the middle dimension of a 3D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of 2 integers, how many zeros to add at the start and\n * end of the middle dimension (i.e., dimension 1).\n * @return A padded 3D `tf.Tensor`.\n */\nexport function temporalPadding(x, padding) {\n return tidy(() => {\n if (x.rank !== 3) {\n throw new ValueError(`temporalPadding expects input tensor to be 3-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [1, 1];\n }\n if (padding.length !== 2) {\n throw new ValueError(`temporalPadding expects input padding pattern to be a length-2 ` +\n `array, but received a length-${padding.length} array.`);\n }\n const pattern = [[0, 0], padding, [0, 0]];\n return tfc.pad(x, pattern);\n });\n}\n/**\n * Pads the 2nd and 3rd dimensions of a 4D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of two `Array`s, each of which is an `Array` of two\n * integers. The amount of padding at the beginning and end of the 2nd and 3rd\n * dimensions, respectively.\n * @param dataFormat 'channelsLast' (default) or 'channelsFirst'.\n * @return Padded 4D `tf.Tensor`.\n */\nexport function spatial2dPadding(x, padding, dataFormat) {\n return tidy(() => {\n if (x.rank !== 4) {\n throw new ValueError(`temporalPadding expects input tensor to be 4-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [[1, 1], [1, 1]];\n }\n if (padding.length !== 2 || padding[0].length !== 2 ||\n padding[1].length !== 2) {\n throw new ValueError('spatial2dPadding expects `padding` to be an Array of two Arrays, ' +\n 'each of which is an Array of two integers.');\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (dataFormat !== 'channelsLast' && dataFormat !== 'channelsFirst') {\n throw new ValueError(`Unknown data format: ${dataFormat}. ` +\n `Supported data formats are 'channelsLast' and 'channelsFirst.`);\n }\n let pattern;\n if (dataFormat === 'channelsFirst') {\n pattern = [[0, 0], [0, 0], padding[0], padding[1]];\n }\n else {\n pattern = [[0, 0], padding[0], padding[1], [0, 0]];\n }\n return tfc.pad(x, pattern);\n });\n}\nexport class ZeroPadding2D extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.dataFormat =\n args.dataFormat == null ? imageDataFormat() : args.dataFormat;\n // TODO(cais): Maybe refactor the following logic surrounding `padding`\n // into a helper method.\n if (args.padding == null) {\n this.padding = [[1, 1], [1, 1]];\n }\n else if (typeof args.padding === 'number') {\n this.padding =\n [[args.padding, args.padding], [args.padding, args.padding]];\n }\n else {\n args.padding = args.padding;\n if (args.padding.length !== 2) {\n throw new ValueError(`ZeroPadding2D expects padding to be a length-2 array, but ` +\n `received a length-${args.padding.length} array.`);\n }\n let heightPadding;\n let widthPadding;\n if (typeof args.padding[0] === 'number') {\n heightPadding = [args.padding[0], args.padding[0]];\n widthPadding = [args.padding[1], args.padding[1]];\n }\n else {\n args.padding = args.padding;\n if (args.padding[0].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects height padding to be a length-2 array, ` +\n `but received a length-${args.padding[0].length} array.`);\n }\n heightPadding = args.padding[0];\n if (args.padding[1].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects width padding to be a length-2 array, ` +\n `but received a length-${args.padding[1].length} array.`);\n }\n widthPadding = args.padding[1];\n }\n this.padding = [heightPadding, widthPadding];\n }\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows;\n let cols;\n if (this.dataFormat === 'channelsFirst') {\n if (inputShape[2] != null && inputShape[2] >= 0) {\n rows = inputShape[2] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[3] != null && inputShape[3] >= 0) {\n cols = inputShape[3] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n if (inputShape[1] != null && inputShape[1] >= 0) {\n rows = inputShape[1] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[2] != null && inputShape[2] >= 0) {\n cols = inputShape[2] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => spatial2dPadding(getExactlyOneTensor(inputs), this.padding, this.dataFormat));\n }\n getConfig() {\n const config = {\n padding: this.padding,\n dataFormat: this.dataFormat,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nZeroPadding2D.className = 'ZeroPadding2D';\nserialization.registerClass(ZeroPadding2D);\n//# sourceMappingURL=padding.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Pooling Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode, checkPoolMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { preprocessConv2DInput, preprocessConv3DInput } from './convolutional';\n/**\n * 2D pooling.\n * @param x\n * @param poolSize\n * @param stridesdes strides. Defaults to [1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 2D pooling.\n */\nexport function pool2d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // TODO(cais): Remove the preprocessing step once deeplearn.js supports\n // dataFormat as an input argument.\n x = preprocessConv2DInput(x, dataFormat); // x is NHWC after preprocessing.\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n // TODO(cais): Rank check?\n y = tfc.maxPool(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n // TODO(cais): Check the dtype and rank of x and give clear error message\n // if those are incorrect.\n y = tfc.avgPool(\n // TODO(cais): Rank check?\n x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return y;\n });\n}\n/**\n * 3D pooling.\n * @param x\n * @param poolSize. Default to [1, 1, 1].\n * @param strides strides. Defaults to [1, 1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 3D pooling.\n */\nexport function pool3d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // x is NDHWC after preprocessing.\n x = preprocessConv3DInput(x, dataFormat);\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n y = tfc.maxPool3d(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n y = tfc.avgPool3d(x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]); // NDHWC -> NCDHW.\n }\n return y;\n });\n}\n/**\n * Abstract class for different pooling 1D layers.\n */\nexport class Pooling1D extends Layer {\n /**\n *\n * @param args Parameters for the Pooling layer.\n *\n * config.poolSize defaults to 2.\n */\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = 2;\n }\n super(args);\n if (typeof args.poolSize === 'number') {\n this.poolSize = [args.poolSize];\n }\n else if (Array.isArray(args.poolSize) &&\n args.poolSize.length === 1 &&\n typeof args.poolSize[0] === 'number') {\n this.poolSize = args.poolSize;\n }\n else {\n throw new ValueError(`poolSize for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.poolSize)}`);\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else {\n if (typeof args.strides === 'number') {\n this.strides = [args.strides];\n }\n else if (Array.isArray(args.strides) &&\n args.strides.length === 1 &&\n typeof args.strides[0] === 'number') {\n this.strides = args.strides;\n }\n else {\n throw new ValueError(`strides for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.strides)}`);\n }\n }\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const length = convOutputLength(inputShape[1], this.poolSize[0], this.padding, this.strides[0]);\n return [inputShape[0], length, inputShape[2]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Add dummy last dimension.\n inputs = K.expandDims(getExactlyOneTensor(inputs), 2);\n const output = this.poolingFunction(getExactlyOneTensor(inputs), [this.poolSize[0], 1], [this.strides[0], 1], this.padding, 'channelsLast');\n // Remove dummy last dimension.\n return tfc.squeeze(output, [2]);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling1D.className = 'MaxPooling1D';\nserialization.registerClass(MaxPooling1D);\nexport class AveragePooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling1D.className = 'AveragePooling1D';\nserialization.registerClass(AveragePooling1D);\n/**\n * Abstract class for different pooling 2D layers.\n */\nexport class Pooling2D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 2) {\n throw new ValueError(`If the strides property of a 2D pooling layer is an Array, ` +\n `it is expected to have a length of 2, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n rows =\n convOutputLength(rows, this.poolSize[0], this.padding, this.strides[0]);\n cols =\n convOutputLength(cols, this.poolSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling2D.className = 'MaxPooling2D';\nserialization.registerClass(MaxPooling2D);\nexport class AveragePooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling2D.className = 'AveragePooling2D';\nserialization.registerClass(AveragePooling2D);\n/**\n * Abstract class for different pooling 3D layers.\n */\nexport class Pooling3D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 3) {\n throw new ValueError(`If the strides property of a 3D pooling layer is an Array, ` +\n `it is expected to have a length of 3, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let depths = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[4] : inputShape[3];\n depths = convOutputLength(depths, this.poolSize[0], this.padding, this.strides[0]);\n rows =\n convOutputLength(rows, this.poolSize[1], this.padding, this.strides[1]);\n cols =\n convOutputLength(cols, this.poolSize[2], this.padding, this.strides[2]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], depths, rows, cols];\n }\n else {\n return [inputShape[0], depths, rows, cols, inputShape[4]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling3D.className = 'MaxPooling3D';\nserialization.registerClass(MaxPooling3D);\nexport class AveragePooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling3D.className = 'AveragePooling3D';\nserialization.registerClass(AveragePooling3D);\n/**\n * Abstract class for different global pooling 1D layers.\n */\nexport class GlobalPooling1D extends Layer {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], inputShape[2]];\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n}\nexport class GlobalAveragePooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.mean(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling1D.className = 'GlobalAveragePooling1D';\nserialization.registerClass(GlobalAveragePooling1D);\nexport class GlobalMaxPooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.max(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling1D.className = 'GlobalMaxPooling1D';\nserialization.registerClass(GlobalMaxPooling1D);\n/**\n * Abstract class for different global pooling 2D layers.\n */\nexport class GlobalPooling2D extends Layer {\n constructor(args) {\n super(args);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n if (this.dataFormat === 'channelsLast') {\n return [inputShape[0], inputShape[3]];\n }\n else {\n return [inputShape[0], inputShape[1]];\n }\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n getConfig() {\n const config = { dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class GlobalAveragePooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.mean(input, [1, 2]);\n }\n else {\n return tfc.mean(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling2D.className = 'GlobalAveragePooling2D';\nserialization.registerClass(GlobalAveragePooling2D);\nexport class GlobalMaxPooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.max(input, [1, 2]);\n }\n else {\n return tfc.max(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling2D.className = 'GlobalMaxPooling2D';\nserialization.registerClass(GlobalMaxPooling2D);\n//# sourceMappingURL=pooling.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Layers that augment the functionality of a base layer.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { InputSpec, Layer, SymbolicTensor } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { VALID_BIDIRECTIONAL_MERGE_MODES } from '../keras_format/common';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { rnn, standardizeArgs } from './recurrent';\nimport { deserialize } from './serialization';\n/**\n * Abstract wrapper base class.\n *\n * Wrappers take another layer and augment it in various ways.\n * Do not use this class as a layer, it is only an abstract base class.\n * Two usable wrappers are the `TimeDistributed` and `Bidirectional` wrappers.\n */\nexport class Wrapper extends Layer {\n constructor(args) {\n // Porting Note: In PyKeras, `self.layer` is set prior to the calling\n // `super()`. But we can't do that here due to TypeScript's restriction.\n // See: https://github.com/Microsoft/TypeScript/issues/8277\n // As a result, we have to add checks in `get trainable()` and\n // `set trainable()` below in order to prevent using `this.layer` when\n // its value is `undefined`. The super constructor does use the getter\n // and the setter of `this.layer`.\n super(args);\n this.layer = args.layer;\n }\n build(inputShape) {\n this.built = true;\n }\n // TODO(cais): Implement activityRegularizer getter.\n get trainable() {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n return this.layer.trainable;\n }\n else {\n return false;\n }\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n this.layer.trainable = value;\n }\n }\n get trainableWeights() {\n return this.layer.trainableWeights;\n }\n // TODO(cais): Implement setter for trainableWeights.\n get nonTrainableWeights() {\n return this.layer.nonTrainableWeights;\n }\n // TODO(cais): Implement setter for nonTrainableWeights.\n get updates() {\n // tslint:disable-next-line:no-any\n return this.layer._updates;\n }\n // TODO(cais): Implement getUpdatesFor().\n get losses() {\n return this.layer.losses;\n }\n // TODO(cais): Implement getLossesFor().\n getWeights() {\n return this.layer.getWeights();\n }\n setWeights(weights) {\n this.layer.setWeights(weights);\n }\n getConfig() {\n const config = {\n 'layer': {\n 'className': this.layer.getClassName(),\n 'config': this.layer.getConfig(),\n }\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.layer != null) {\n this.layer.setFastWeightInitDuringBuild(value);\n }\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const layerConfig = config['layer'];\n const layer = deserialize(layerConfig, customObjects);\n delete config['layer'];\n const newConfig = { layer };\n Object.assign(newConfig, config);\n return new cls(newConfig);\n }\n}\nexport class TimeDistributed extends Wrapper {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 3) {\n throw new ValueError(`TimeDistributed layer expects an input shape >= 3D, but received ` +\n `input shape ${JSON.stringify(inputShape)}`);\n }\n this.inputSpec = [{ shape: inputShape }];\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (!this.layer.built) {\n this.layer.build(childInputShape);\n this.layer.built = true;\n }\n super.build(inputShape);\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n const childOutputShape = this.layer.computeOutputShape(childInputShape);\n const timesteps = inputShape[1];\n return [childOutputShape[0], timesteps].concat(childOutputShape.slice(1));\n }\n call(inputs, kwargs) {\n return tidy(() => {\n // TODO(cais): Add 'training' and 'useLearningPhase' to kwargs.\n inputs = getExactlyOneTensor(inputs);\n // Porting Note: In tfjs-layers, `inputs` are always concrete tensor\n // values. Hence the inputs can't have an undetermined first (batch)\n // dimension, which is why we always use the K.rnn approach here.\n const step = (inputs, states) => {\n // TODO(cais): Add useLearningPhase.\n // NOTE(cais): `layer.call` may return a length-1 array of Tensor in\n // some cases (e.g., `layer` is a `Sequential` instance), which is\n // why `getExactlyOneTensor` is used below.\n const output = getExactlyOneTensor(this.layer.call(inputs, kwargs));\n return [output, []];\n };\n const rnnOutputs = rnn(step, inputs, [], false /* goBackwards */, null /* mask */, null /* constants */, false /* unroll */, true /* needPerStepOutputs */);\n const y = rnnOutputs[1];\n // TODO(cais): Add activity regularization.\n // TODO(cais): Add useLearningPhase.\n return y;\n });\n }\n}\n/** @nocollapse */\nTimeDistributed.className = 'TimeDistributed';\nserialization.registerClass(TimeDistributed);\nexport function checkBidirectionalMergeMode(value) {\n generic_utils.checkStringTypeUnionValue(VALID_BIDIRECTIONAL_MERGE_MODES, 'BidirectionalMergeMode', value);\n}\nconst DEFAULT_BIDIRECTIONAL_MERGE_MODE = 'concat';\nexport class Bidirectional extends Wrapper {\n constructor(args) {\n super(args);\n // Note: When creating `this.forwardLayer`, the original Layer object\n // (`config.layer`) ought to be cloned. This is why we call\n // `getConfig()` followed by `deserialize()`. Without this cloning,\n // the layer names saved during serialization will incorrectly contain\n // the 'forward_' prefix. In Python Keras, this is done using\n // `copy.copy` (shallow copy), which does not have a simple equivalent\n // in JavaScript. JavaScript's `Object.assign()` does not copy\n // methods.\n const layerConfig = args.layer.getConfig();\n const forwDict = {};\n forwDict['className'] = args.layer.getClassName();\n forwDict['config'] = layerConfig;\n this.forwardLayer = deserialize(forwDict);\n layerConfig['goBackwards'] =\n layerConfig['goBackwards'] === true ? false : true;\n const backDict = {};\n backDict['className'] = args.layer.getClassName();\n backDict['config'] = layerConfig;\n this.backwardLayer = deserialize(backDict);\n this.forwardLayer.name = 'forward_' + this.forwardLayer.name;\n this.backwardLayer.name = 'backward_' + this.backwardLayer.name;\n this.mergeMode = args.mergeMode === undefined ?\n DEFAULT_BIDIRECTIONAL_MERGE_MODE :\n args.mergeMode;\n checkBidirectionalMergeMode(this.mergeMode);\n if (args.weights) {\n throw new NotImplementedError('weights support is not implemented for Bidirectional layer yet.');\n }\n this._stateful = args.layer.stateful;\n this.returnSequences = args.layer.returnSequences;\n this.returnState = args.layer.returnState;\n this.supportsMasking = true;\n this._trainable = true;\n this.inputSpec = args.layer.inputSpec;\n this.numConstants = null;\n }\n get trainable() {\n return this._trainable;\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n this._trainable = value;\n if (this.forwardLayer != null) {\n this.forwardLayer.trainable = value;\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.trainable = value;\n }\n }\n getWeights() {\n return this.forwardLayer.getWeights().concat(this.backwardLayer.getWeights());\n }\n setWeights(weights) {\n const numWeights = weights.length;\n const numeightsOver2 = Math.floor(numWeights / 2);\n this.forwardLayer.setWeights(weights.slice(0, numeightsOver2));\n this.backwardLayer.setWeights(weights.slice(numeightsOver2));\n }\n computeOutputShape(inputShape) {\n let layerShapes = this.forwardLayer.computeOutputShape(inputShape);\n if (!(Array.isArray(layerShapes) && Array.isArray(layerShapes[0]))) {\n layerShapes = [layerShapes];\n }\n layerShapes = layerShapes;\n let outputShape;\n let outputShapes;\n let stateShape;\n if (this.returnState) {\n stateShape = layerShapes.slice(1);\n outputShape = layerShapes[0];\n }\n else {\n outputShape = layerShapes[0];\n }\n outputShape = outputShape;\n if (this.mergeMode === 'concat') {\n outputShape[outputShape.length - 1] *= 2;\n outputShapes = [outputShape];\n }\n else if (this.mergeMode == null) {\n outputShapes = [outputShape, outputShape.slice()];\n }\n else {\n outputShapes = [outputShape];\n }\n if (this.returnState) {\n if (this.mergeMode == null) {\n return outputShapes.concat(stateShape).concat(stateShape.slice());\n }\n return [outputShape].concat(stateShape).concat(stateShape.slice());\n }\n return generic_utils.singletonOrArray(outputShapes);\n }\n apply(inputs, kwargs) {\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n if (Array.isArray(inputs)) {\n initialState = inputs.slice(1);\n inputs = inputs[0];\n }\n if ((initialState == null || initialState.length === 0) &&\n constants == null) {\n return super.apply(inputs, kwargs);\n }\n const additionalInputs = [];\n const additionalSpecs = [];\n if (initialState != null) {\n const numStates = initialState.length;\n if (numStates % 2 > 0) {\n throw new ValueError('When passing `initialState` to a Bidrectional RNN, ' +\n 'the state should be an Array containing the states of ' +\n 'the underlying RNNs.');\n }\n kwargs['initialState'] = initialState;\n additionalInputs.push(...initialState);\n const stateSpecs = initialState\n .map(state => new InputSpec({ shape: state.shape }));\n this.forwardLayer.stateSpec = stateSpecs.slice(0, numStates / 2);\n this.backwardLayer.stateSpec = stateSpecs.slice(numStates / 2);\n additionalSpecs.push(...stateSpecs);\n }\n if (constants != null) {\n throw new NotImplementedError('Support for constants in Bidirectional layers is not ' +\n 'implemented yet.');\n }\n const isSymbolicTensor = additionalInputs[0] instanceof SymbolicTensor;\n for (const tensor of additionalInputs) {\n if (tensor instanceof SymbolicTensor !== isSymbolicTensor) {\n throw new ValueError('The initial state of a Bidirectional layer cannot be ' +\n 'specified as a mix of symbolic and non-symbolic tensors');\n }\n }\n if (isSymbolicTensor) {\n // Compute the full input and specs, including the states.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call temporarily and replace inputSpec.\n // Note: with initial states symbolic calls and non-symbolic calls to\n // this method differ in how the initial states are passed. For\n // symbolic calls, the initial states are passed in the first arg, as\n // an Array of SymbolicTensors; for non-symbolic calls, they are\n // passed in the second arg as a part of the kwargs. Hence the need to\n // temporarily modify inputSpec here.\n // TODO(cais): Make refactoring so that this hacky code below is no\n // longer needed.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const initialState = kwargs['initialState'];\n let y;\n let yRev;\n if (initialState == null) {\n y = this.forwardLayer.call(inputs, kwargs);\n yRev = this.backwardLayer.call(inputs, kwargs);\n }\n else {\n const forwardState = initialState.slice(0, initialState.length / 2);\n const backwardState = initialState.slice(initialState.length / 2);\n y = this.forwardLayer.call(inputs, Object.assign(kwargs, { initialState: forwardState }));\n yRev = this.backwardLayer.call(inputs, Object.assign(kwargs, { initialState: backwardState }));\n }\n let states;\n if (this.returnState) {\n if (Array.isArray(y)) {\n states = y.slice(1).concat(yRev.slice(1));\n }\n else {\n }\n y = y[0];\n yRev = yRev[0];\n }\n if (this.returnSequences) {\n yRev = tfc.reverse(yRev, 1);\n }\n let output;\n if (this.mergeMode === 'concat') {\n output = K.concatenate([y, yRev]);\n }\n else if (this.mergeMode === 'sum') {\n output = tfc.add(y, yRev);\n }\n else if (this.mergeMode === 'ave') {\n output = tfc.mul(.5, tfc.add(y, yRev));\n }\n else if (this.mergeMode === 'mul') {\n output = tfc.mul(y, yRev);\n }\n else if (this.mergeMode == null) {\n output = [y, yRev];\n }\n // TODO(cais): Properly set learning phase.\n if (this.returnState) {\n if (this.mergeMode == null) {\n return output.concat(states);\n }\n return [output].concat(states);\n }\n return output;\n });\n }\n resetStates(states) {\n this.forwardLayer.resetStates();\n this.backwardLayer.resetStates();\n }\n build(inputShape) {\n nameScope(this.forwardLayer.name, () => {\n this.forwardLayer.build(inputShape);\n });\n nameScope(this.backwardLayer.name, () => {\n this.backwardLayer.build(inputShape);\n });\n this.built = true;\n }\n computeMask(inputs, mask) {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n let outputMask;\n if (this.returnSequences) {\n if (this.mergeMode == null) {\n outputMask = [mask, mask];\n }\n else {\n outputMask = mask;\n }\n }\n else {\n if (this.mergeMode == null) {\n outputMask = [null, null];\n }\n else {\n outputMask = null;\n }\n }\n if (this.returnState) {\n const states = this.forwardLayer.states;\n const stateMask = states.map(state => null);\n if (Array.isArray(outputMask)) {\n return outputMask.concat(stateMask).concat(stateMask);\n }\n else {\n return [outputMask].concat(stateMask).concat(stateMask);\n }\n }\n else {\n return outputMask;\n }\n }\n get trainableWeights() {\n return this.forwardLayer.trainableWeights.concat(this.backwardLayer.trainableWeights);\n }\n get nonTrainableWeights() {\n return this.forwardLayer.nonTrainableWeights.concat(this.backwardLayer.nonTrainableWeights);\n }\n // TODO(cais): Implement constraints().\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.forwardLayer != null) {\n this.forwardLayer.setFastWeightInitDuringBuild(value);\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const config = {\n 'mergeMode': this.mergeMode,\n };\n // TODO(cais): Add logic for `numConstants` once the property is added.\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n const rnnLayer = deserialize(config['layer']);\n delete config['layer'];\n // TODO(cais): Add logic for `numConstants` once the property is added.\n if (config['numConstants'] != null) {\n throw new NotImplementedError(`Deserialization of a Bidirectional layer with numConstants ` +\n `present is not supported yet.`);\n }\n // tslint:disable-next-line:no-any\n const newConfig = config;\n newConfig['layer'] = rnnLayer;\n return new cls(newConfig);\n }\n}\n/** @nocollapse */\nBidirectional.className = 'Bidirectional';\nserialization.registerClass(Bidirectional);\n//# sourceMappingURL=wrappers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { InputLayer } from './engine/input_layer';\nimport { Layer } from './engine/topology';\nimport { input } from './exports';\nimport { ELU, LeakyReLU, PReLU, ReLU, Softmax, ThresholdedReLU } from './layers/advanced_activations';\nimport { Conv1D, Conv2D, Conv2DTranspose, Conv3D, Cropping2D, SeparableConv2D, UpSampling2D } from './layers/convolutional';\nimport { DepthwiseConv2D } from './layers/convolutional_depthwise';\nimport { ConvLSTM2D, ConvLSTM2DCell } from './layers/convolutional_recurrent';\nimport { Activation, Dense, Dropout, Flatten, Masking, Permute, RepeatVector, Reshape, SpatialDropout1D } from './layers/core';\nimport { Embedding } from './layers/embeddings';\nimport { Add, Average, Concatenate, Dot, Maximum, Minimum, Multiply } from './layers/merge';\nimport { AlphaDropout, GaussianDropout, GaussianNoise } from './layers/noise';\nimport { BatchNormalization, LayerNormalization } from './layers/normalization';\nimport { ZeroPadding2D } from './layers/padding';\nimport { AveragePooling1D, AveragePooling2D, AveragePooling3D, GlobalAveragePooling1D, GlobalAveragePooling2D, GlobalMaxPooling1D, GlobalMaxPooling2D, MaxPooling1D, MaxPooling2D, MaxPooling3D } from './layers/pooling';\nimport { GRU, GRUCell, LSTM, LSTMCell, RNN, RNNCell, SimpleRNN, SimpleRNNCell, StackedRNNCells } from './layers/recurrent';\nimport { Bidirectional, TimeDistributed } from './layers/wrappers';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// Input Layer.\n/**\n * An input layer is an entry point into a `tf.LayersModel`.\n *\n * `InputLayer` is generated automatically for `tf.Sequential`` models by\n * specifying the `inputshape` or `batchInputShape` for the first layer. It\n * should not be specified explicitly. However, it can be useful sometimes,\n * e.g., when constructing a sequential model from a subset of another\n * sequential model's layers. Like the code snippet below shows.\n *\n * ```js\n * // Define a model which simply adds two inputs.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.dense({inputShape: [4], units: 3, activation: 'relu'}));\n * model1.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n * model1.summary();\n * model1.predict(tf.zeros([1, 4])).print();\n *\n * // Construct another model, reusing the second layer of `model1` while\n * // not using the first layer of `model1`. Note that you cannot add the second\n * // layer of `model` directly as the first layer of the new sequential model,\n * // because doing so will lead to an error related to the fact that the layer\n * // is not an input layer. Instead, you need to create an `inputLayer` and add\n * // it to the new sequential model before adding the reused layer.\n * const model2 = tf.sequential();\n * // Use an inputShape that matches the input shape of `model1`'s second\n * // layer.\n * model2.add(tf.layers.inputLayer({inputShape: [3]}));\n * model2.add(model1.layers[1]);\n * model2.summary();\n * model2.predict(tf.zeros([1, 3])).print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Inputs', namespace: 'layers'}\n */\nexport function inputLayer(args) {\n return new InputLayer(args);\n}\n// Advanced Activation Layers.\n/**\n * Exponetial Linear Unit (ELU).\n *\n * It follows:\n * `f(x) = alpha * (exp(x) - 1.) for x < 0`,\n * `f(x) = x for x >= 0`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Fast and Accurate Deep Network Learning by Exponential Linear Units\n * (ELUs)](https://arxiv.org/abs/1511.07289v1)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function elu(args) {\n return new ELU(args);\n}\n/**\n * Rectified Linear Unit activation function.\n *\n * Input shape:\n * Arbitrary. Use the config field `inputShape` (Array of integers, does\n * not include the sample axis) when using this layer as the first layer\n * in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function reLU(args) {\n return new ReLU(args);\n}\n/**\n * Leaky version of a rectified linear unit.\n *\n * It allows a small gradient when the unit is not active:\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function leakyReLU(args) {\n return new LeakyReLU(args);\n}\n/**\n * Parameterized version of a leaky rectified linear unit.\n *\n * It follows\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n * wherein `alpha` is a trainable weight.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function prelu(args) {\n return new PReLU(args);\n}\n/**\n * Softmax activation layer.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function softmax(args) {\n return new Softmax(args);\n}\n/**\n * Thresholded Rectified Linear Unit.\n *\n * It follows:\n * `f(x) = x for x > theta`,\n * `f(x) = 0 otherwise`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Zero-Bias Autoencoders and the Benefits of Co-Adapting\n * Features](http://arxiv.org/abs/1402.3337)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function thresholdedReLU(args) {\n return new ThresholdedReLU(args);\n}\n// Convolutional Layers.\n/**\n * 1D convolution layer (e.g., temporal convolution).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input over a single spatial (or temporal) dimension\n * to produce a tensor of outputs.\n *\n * If `use_bias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model, provide an\n * `inputShape` argument `Array` or `null`.\n *\n * For example, `inputShape` would be:\n * - `[10, 128]` for sequences of 10 vectors of 128-dimensional vectors\n * - `[null, 128]` for variable-length sequences of 128-dimensional vectors.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv1d(args) {\n return new Conv1D(args);\n}\n/**\n * 2D convolution layer (e.g. spatial convolution over images).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 3]` for 128x128 RGB pictures\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2d(args) {\n return new Conv2D(args);\n}\n/**\n * Transposed convolutional layer (sometimes called Deconvolution).\n *\n * The need for transposed convolutions generally arises\n * from the desire to use a transformation going in the opposite direction of\n * a normal convolution, i.e., from something that has the shape of the output\n * of some convolution to something that has the shape of its input while\n * maintaining a connectivity pattern that is compatible with said\n * convolution.\n *\n * When using this layer as the first layer in a model, provide the\n * configuration `inputShape` (`Array` of integers, does not include the\n * sample axis), e.g., `inputShape: [128, 128, 3]` for 128x128 RGB pictures in\n * `dataFormat: 'channelsLast'`.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if `dataFormat` is `'channelsFirst'`.\n * or 4D tensor with shape\n * `[batch, rows, cols, channels]` if `dataFormat` is `'channelsLast`.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if `dataFormat` is\n * `'channelsFirst'`. or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if `dataFormat` is `'channelsLast'`.\n *\n * References:\n * - [A guide to convolution arithmetic for deep\n * learning](https://arxiv.org/abs/1603.07285v1)\n * - [Deconvolutional\n * Networks](http://www.matthewzeiler.com/pubs/cvpr2010/cvpr2010.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2dTranspose(args) {\n return new Conv2DTranspose(args);\n}\n/**\n * 3D convolution layer (e.g. spatial convolution over volumes).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 128, 1]` for 128x128x128 grayscale volumes\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv3d(args) {\n return new Conv3D(args);\n}\n/**\n * Depthwise separable 2D convolution.\n *\n * Separable convolution consists of first performing\n * a depthwise spatial convolution\n * (which acts on each input channel separately)\n * followed by a pointwise convolution which mixes together the resulting\n * output channels. The `depthMultiplier` argument controls how many\n * output channels are generated per input channel in the depthwise step.\n *\n * Intuitively, separable convolutions can be understood as\n * a way to factorize a convolution kernel into two smaller kernels,\n * or as an extreme version of an Inception block.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, rows, cols, channels]` if data_format='channelsLast'.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if data_format='channelsLast'.\n * `rows` and `cols` values might have changed due to padding.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function separableConv2d(args) {\n return new SeparableConv2D(args);\n}\n/**\n * Cropping layer for 2D input (e.g., image).\n *\n * This layer can crop an input\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, croppedRows, croppedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, croppedRows, croppedCols]`.\n *\n * Examples\n * ```js\n *\n * const model = tf.sequential();\n * model.add(tf.layers.cropping2D({cropping:[[2, 2], [2, 2]],\n * inputShape: [128, 128, 3]}));\n * //now output shape is [batch, 124, 124, 3]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function cropping2D(args) {\n return new Cropping2D(args);\n}\n/**\n * Upsampling layer for 2D inputs.\n *\n * Repeats the rows and columns of the data\n * by size[0] and size[1] respectively.\n *\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, rows, cols]`\n *\n * Output shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, upsampledRows, upsampledCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, upsampledRows, upsampledCols]`\n *\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function upSampling2d(args) {\n return new UpSampling2D(args);\n}\n// Convolutional(depthwise) Layers.\n/**\n * Depthwise separable 2D convolution.\n *\n * Depthwise Separable convolutions consists in performing just the first step\n * in a depthwise spatial convolution (which acts on each input channel\n * separately). The `depthMultplier` argument controls how many output channels\n * are generated per input channel in the depthwise step.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function depthwiseConv2d(args) {\n return new DepthwiseConv2D(args);\n}\n// Basic Layers.\n/**\n * Applies an activation function to an output.\n *\n * This layer applies element-wise activation function. Other layers, notably\n * `dense` can also apply activation functions. Use this isolated activation\n * function to extract the values before and after the\n * activation. For instance:\n *\n * ```js\n * const input = tf.input({shape: [5]});\n * const denseLayer = tf.layers.dense({units: 1});\n * const activationLayer = tf.layers.activation({activation: 'relu6'});\n *\n * // Obtain the output symbolic tensors by applying the layers in order.\n * const denseOutput = denseLayer.apply(input);\n * const activationOutput = activationLayer.apply(denseOutput);\n *\n * // Create the model based on the inputs.\n * const model = tf.model({\n * inputs: input,\n * outputs: [denseOutput, activationOutput]\n * });\n *\n * // Collect both outputs and print separately.\n * const [denseOut, activationOut] = model.predict(tf.randomNormal([6, 5]));\n * denseOut.print();\n * activationOut.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function activation(args) {\n return new Activation(args);\n}\n/**\n * Creates a dense (fully connected) layer.\n *\n * This layer implements the operation:\n * `output = activation(dot(input, kernel) + bias)`\n *\n * `activation` is the element-wise activation function\n * passed as the `activation` argument.\n *\n * `kernel` is a weights matrix created by the layer.\n *\n * `bias` is a bias vector created by the layer (only applicable if `useBias`\n * is `true`).\n *\n * **Input shape:**\n *\n * nD `tf.Tensor` with shape: `(batchSize, ..., inputDim)`.\n *\n * The most common situation would be\n * a 2D input with shape `(batchSize, inputDim)`.\n *\n * **Output shape:**\n *\n * nD tensor with shape: `(batchSize, ..., units)`.\n *\n * For instance, for a 2D input with shape `(batchSize, inputDim)`,\n * the output would have shape `(batchSize, units)`.\n *\n * Note: if the input to the layer has a rank greater than 2, then it is\n * flattened prior to the initial dot product with the kernel.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dense(args) {\n return new Dense(args);\n}\n/**\n * Applies\n * [dropout](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf) to\n * the input.\n *\n * Dropout consists in randomly setting a fraction `rate` of input units to 0 at\n * each update during training time, which helps prevent overfitting.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dropout(args) {\n return new Dropout(args);\n}\n/**\n * Spatial 1D version of Dropout.\n *\n * This Layer type performs the same function as the Dropout layer, but it drops\n * entire 1D feature maps instead of individual elements. For example, if an\n * input example consists of 3 timesteps and the feature map for each timestep\n * has a size of 4, a `spatialDropout1d` layer may zero out the feature maps\n * of the 1st timesteps and 2nd timesteps completely while sparing all feature\n * elements of the 3rd timestep.\n *\n * If adjacent frames (timesteps) are strongly correlated (as is normally the\n * case in early convolution layers), regular dropout will not regularize the\n * activation and will otherwise just result in merely an effective learning\n * rate decrease. In this case, `spatialDropout1d` will help promote\n * independence among feature maps and should be used instead.\n *\n * **Arguments:**\n * rate: A floating-point number >=0 and <=1. Fraction of the input elements\n * to drop.\n *\n * **Input shape:**\n * 3D tensor with shape `(samples, timesteps, channels)`.\n *\n * **Output shape:**\n * Same as the input shape.\n *\n * References:\n * - [Efficient Object Localization Using Convolutional\n * Networks](https://arxiv.org/abs/1411.4280)\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function spatialDropout1d(args) {\n return new SpatialDropout1D(args);\n}\n/**\n * Flattens the input. Does not affect the batch size.\n *\n * A `Flatten` layer flattens each batch in its inputs to 1D (making the output\n * 2D).\n *\n * For example:\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const flattenLayer = tf.layers.flatten();\n * // Inspect the inferred output shape of the flatten layer, which\n * // equals `[null, 12]`. The 2nd dimension is 4 * 3, i.e., the result of the\n * // flattening. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(flattenLayer.apply(input).shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function flatten(args) {\n return new Flatten(args);\n}\n/**\n * Repeats the input n times in a new dimension.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.repeatVector({n: 4, inputShape: [2]}));\n * const x = tf.tensor2d([[10, 20]]);\n * // Use the model to do inference on a data point the model hasn't see\n * model.predict(x).print();\n * // output shape is now [batch, 2, 4]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function repeatVector(args) {\n return new RepeatVector(args);\n}\n/**\n * Reshapes an input to a certain shape.\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const reshapeLayer = tf.layers.reshape({targetShape: [2, 6]});\n * // Inspect the inferred output shape of the Reshape layer, which\n * // equals `[null, 2, 6]`. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(reshapeLayer.apply(input).shape));\n * ```\n *\n * Input shape:\n * Arbitrary, although all dimensions in the input shape must be fixed.\n * Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n *\n * Output shape:\n * [batchSize, targetShape[0], targetShape[1], ...,\n * targetShape[targetShape.length - 1]].\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function reshape(args) {\n return new Reshape(args);\n}\n/**\n * Permutes the dimensions of the input according to a given pattern.\n *\n * Useful for, e.g., connecting RNNs and convnets together.\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.permute({\n * dims: [2, 1],\n * inputShape: [10, 64]\n * }));\n * console.log(model.outputShape);\n * // Now model's output shape is [null, 64, 10], where null is the\n * // unpermuted sample (batch) dimension.\n * ```\n *\n * Input shape:\n * Arbitrary. Use the configuration field `inputShape` when using this\n * layer as the first layer in a model.\n *\n * Output shape:\n * Same rank as the input shape, but with the dimensions re-ordered (i.e.,\n * permuted) according to the `dims` configuration of this layer.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function permute(args) {\n return new Permute(args);\n}\n/**\n * Maps positive integers (indices) into dense vectors of fixed size.\n * eg. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]\n *\n * **Input shape:** 2D tensor with shape: `[batchSize, sequenceLength]`.\n *\n * **Output shape:** 3D tensor with shape: `[batchSize, sequenceLength,\n * outputDim]`.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function embedding(args) {\n return new Embedding(args);\n}\n// Merge Layers.\n/**\n * Layer that performs element-wise addition on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). The inputs are specified as an\n * `Array` when the `apply` method of the `Add` layer instance is called. For\n * example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const addLayer = tf.layers.add();\n * const sum = addLayer.apply([input1, input2]);\n * console.log(JSON.stringify(sum.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function add(args) {\n return new Add(args);\n}\n/**\n * Layer that performs element-wise averaging on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const averageLayer = tf.layers.average();\n * const average = averageLayer.apply([input1, input2]);\n * console.log(JSON.stringify(average.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function average(args) {\n return new Average(args);\n}\n/**\n * Layer that concatenates an `Array` of inputs.\n *\n * It takes a list of tensors, all of the same shape except for the\n * concatenation axis, and returns a single tensor, the concatenation\n * of all inputs. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 3]});\n * const concatLayer = tf.layers.concatenate();\n * const output = concatLayer.apply([input1, input2]);\n * console.log(JSON.stringify(output.shape));\n * // You get [null, 2, 5], with the first dimension as the undetermined batch\n * // dimension. The last dimension (5) is the result of concatenating the\n * // last dimensions of the inputs (2 and 3).\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function concatenate(args) {\n return new Concatenate(args);\n}\n/**\n * Layer that computes the element-wise maximum an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const maxLayer = tf.layers.maximum();\n * const max = maxLayer.apply([input1, input2]);\n * console.log(JSON.stringify(max.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function maximum(args) {\n return new Maximum(args);\n}\n/**\n * Layer that computes the element-wise minimum of an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const minLayer = tf.layers.minimum();\n * const min = minLayer.apply([input1, input2]);\n * console.log(JSON.stringify(min.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function minimum(args) {\n return new Minimum(args);\n}\n/**\n * Layer that multiplies (element-wise) an `Array` of inputs.\n *\n * It takes as input an Array of tensors, all of the same\n * shape, and returns a single tensor (also of the same shape).\n * For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const input3 = tf.input({shape: [2, 2]});\n * const multiplyLayer = tf.layers.multiply();\n * const product = multiplyLayer.apply([input1, input2, input3]);\n * console.log(product.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function multiply(args) {\n return new Multiply(args);\n}\n/**\n * Layer that computes a dot product between samples in two tensors.\n *\n * E.g., if applied to a list of two tensors `a` and `b` both of shape\n * `[batchSize, n]`, the output will be a tensor of shape `[batchSize, 1]`,\n * where each entry at index `[i, 0]` will be the dot product between\n * `a[i, :]` and `b[i, :]`.\n *\n * Example:\n *\n * ```js\n * const dotLayer = tf.layers.dot({axes: -1});\n * const x1 = tf.tensor2d([[10, 20], [30, 40]]);\n * const x2 = tf.tensor2d([[-1, -2], [-3, -4]]);\n *\n * // Invoke the layer's apply() method in eager (imperative) mode.\n * const y = dotLayer.apply([x1, x2]);\n * y.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function dot(args) {\n return new Dot(args);\n}\n// Normalization Layers.\n/**\n * Batch normalization layer (Ioffe and Szegedy, 2014).\n *\n * Normalize the activations of the previous layer at each batch,\n * i.e. applies a transformation that maintains the mean activation\n * close to 0 and the activation standard deviation close to 1.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape` (Array of integers, does\n * not include the sample axis) when calling the constructor of this class,\n * if this layer is used as a first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Batch Normalization: Accelerating Deep Network Training by Reducing\n * Internal Covariate Shift](https://arxiv.org/abs/1502.03167)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function batchNormalization(args) {\n return new BatchNormalization(args);\n}\n/**\n * Layer-normalization layer (Ba et al., 2016).\n *\n * Normalizes the activations of the previous layer for each given example in a\n * batch independently, instead of across a batch like in `batchNormalization`.\n * In other words, this layer applies a transformation that maintanis the mean\n * activation within each example close to0 and activation variance close to 1.\n *\n * Input shape:\n * Arbitrary. Use the argument `inputShape` when using this layer as the first\n * layer in a model.\n *\n * Output shape:\n * Same as input.\n *\n * References:\n * - [Layer Normalization](https://arxiv.org/abs/1607.06450)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function layerNormalization(args) {\n return new LayerNormalization(args);\n}\n// Padding Layers.\n/**\n * Zero-padding layer for 2D input (e.g., image).\n *\n * This layer can add rows and columns of zeros\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, paddedRows, paddedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, paddedRows, paddedCols]`.\n *\n * @doc {heading: 'Layers', subheading: 'Padding', namespace: 'layers'}\n */\nexport function zeroPadding2d(args) {\n return new ZeroPadding2D(args);\n}\n// Pooling Layers.\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * `tf.avgPool1d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling1d(args) {\n return new AveragePooling1D(args);\n}\nexport function avgPool1d(args) {\n return averagePooling1d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling1d(args) {\n return averagePooling1d(args);\n}\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * `tf.avgPool2d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling2d(args) {\n return new AveragePooling2D(args);\n}\nexport function avgPool2d(args) {\n return averagePooling2d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling2d(args) {\n return averagePooling2d(args);\n}\n/**\n * Average pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 4D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling3d(args) {\n return new AveragePooling3D(args);\n}\nexport function avgPool3d(args) {\n return averagePooling3d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling3d(args) {\n return averagePooling3d(args);\n}\n/**\n * Global average pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling1d(args) {\n return new GlobalAveragePooling1D(args);\n}\n/**\n * Global average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling2d(args) {\n return new GlobalAveragePooling2D(args);\n}\n/**\n * Global max pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling1d(args) {\n return new GlobalMaxPooling1D(args);\n}\n/**\n * Global max pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling2d(args) {\n return new GlobalMaxPooling2D(args);\n}\n/**\n * Max pooling operation for temporal data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling1d(args) {\n return new MaxPooling1D(args);\n}\n/**\n * Max pooling operation for spatial data.\n *\n * Input shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat=CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling2d(args) {\n return new MaxPooling2D(args);\n}\n/**\n * Max pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling3d(args) {\n return new MaxPooling3D(args);\n}\n// Recurrent Layers.\n/**\n * Gated Recurrent Unit - Cho et al. 2014.\n *\n * This is an `RNN` layer consisting of one `GRUCell`. However, unlike\n * the underlying `GRUCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.gru({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `GRUCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gru(args) {\n return new GRU(args);\n}\n/**\n * Cell class for `GRU`.\n *\n * `GRUCell` is distinct from the `RNN` subclass `GRU` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `GRU` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.gruCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `GRUCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.gruCell({units: 4}),\n * tf.layers.gruCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `gruCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `GRUCell`, use the\n * `tf.layers.gru`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gruCell(args) {\n return new GRUCell(args);\n}\n/**\n * Long-Short Term Memory layer - Hochreiter 1997.\n *\n * This is an `RNN` layer consisting of one `LSTMCell`. However, unlike\n * the underlying `LSTMCell`, the `apply` method of `LSTM` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const lstm = tf.layers.lstm({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = lstm.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `LSTMCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstm(args) {\n return new LSTM(args);\n}\n/**\n * Cell class for `LSTM`.\n *\n * `LSTMCell` is distinct from the `RNN` subclass `LSTM` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `LSTM` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.lstmCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `LSTMCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.lstmCell({units: 4}),\n * tf.layers.lstmCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `lstmCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `LSTMCell`, use the\n * `tf.layers.lstm`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstmCell(args) {\n return new LSTMCell(args);\n}\n/**\n * Fully-connected RNN where the output is to be fed back to input.\n *\n * This is an `RNN` layer consisting of one `SimpleRNNCell`. However, unlike\n * the underlying `SimpleRNNCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.simpleRNN({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `SimpleRNNCell`'s number of units.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNN(args) {\n return new SimpleRNN(args);\n}\n/**\n * Cell class for `SimpleRNN`.\n *\n * `SimpleRNNCell` is distinct from the `RNN` subclass `SimpleRNN` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `SimpleRNN` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.simpleRNNCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `SimpleRNNCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.simpleRNNCell({units: 4}),\n * tf.layers.simpleRNNCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `SimpleRNNCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `SimpleRNNCell`, use the\n * `tf.layers.simpleRNN`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNNCell(args) {\n return new SimpleRNNCell(args);\n}\n/**\n * Convolutional LSTM layer - Xingjian Shi 2015.\n *\n * This is an `ConvRNN2D` layer consisting of one `ConvLSTM2DCell`. However,\n * unlike the underlying `ConvLSTM2DCell`, the `apply` method of `ConvLSTM2D`\n * operates on a sequence of inputs. The shape of the input (not including the\n * first, batch dimension) needs to be 4-D, with the first dimension being time\n * steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const batchSize = 4;\n * const sequenceLength = 2;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [batchSize, sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const layer = tf.layers.convLstm2d({filters, kernelSize});\n *\n * const output = layer.apply(input);\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2d(args) {\n return new ConvLSTM2D(args);\n}\n/**\n * Cell class for `ConvLSTM2D`.\n *\n * `ConvLSTM2DCell` is distinct from the `ConvRNN2D` subclass `ConvLSTM2D` in\n * that its `call` method takes the input data of only a single time step and\n * returns the cell's output at the time step, while `ConvLSTM2D` takes the\n * input data over a number of time steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const sequenceLength = 1;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const cell = tf.layers.convLstm2dCell({filters, kernelSize});\n *\n * cell.build(input.shape);\n *\n * const outputSize = size - kernelSize + 1;\n * const outShape = [sequenceLength, outputSize, outputSize, filters];\n *\n * const initialH = tf.zeros(outShape);\n * const initialC = tf.zeros(outShape);\n *\n * const [o, h, c] = cell.call([input, initialH, initialC], {});\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2dCell(args) {\n return new ConvLSTM2DCell(args);\n}\n/**\n * Base class for recurrent layers.\n *\n * Input shape:\n * 3D tensor with shape `[batchSize, timeSteps, inputDim]`.\n *\n * Output shape:\n * - if `returnState`, an Array of tensors (i.e., `tf.Tensor`s). The first\n * tensor is the output. The remaining tensors are the states at the\n * last time step, each with shape `[batchSize, units]`.\n * - if `returnSequences`, the output will have shape\n * `[batchSize, timeSteps, units]`.\n * - else, the output will have shape `[batchSize, units]`.\n *\n * Masking:\n * This layer supports masking for input data with a variable number\n * of timesteps. To introduce masks to your data,\n * use an embedding layer with the `mask_zero` parameter\n * set to `True`.\n *\n * Notes on using statefulness in RNNs:\n * You can set RNN layers to be 'stateful', which means that the states\n * computed for the samples in one batch will be reused as initial states\n * for the samples in the next batch. This assumes a one-to-one mapping\n * between samples in different successive batches.\n *\n * To enable statefulness:\n * - specify `stateful: true` in the layer constructor.\n * - specify a fixed batch size for your model, by passing\n * if sequential model:\n * `batchInputShape=[...]` to the first layer in your model.\n * else for functional model with 1 or more Input layers:\n * `batchShape=[...]` to all the first layers in your model.\n * This is the expected shape of your inputs *including the batch size*.\n * It should be a tuple of integers, e.g. `(32, 10, 100)`.\n * - specify `shuffle=False` when calling fit().\n *\n * To reset the states of your model, call `.resetStates()` on either\n * a specific layer, or on your entire model.\n *\n * Note on specifying the initial state of RNNs\n * You can specify the initial state of RNN layers symbolically by\n * calling them with the option `initialState`. The value of\n * `initialState` should be a tensor or list of tensors representing\n * the initial state of the RNN layer.\n *\n * You can specify the initial state of RNN layers numerically by\n * calling `resetStates` with the keyword argument `states`. The value of\n * `states` should be a numpy array or list of numpy arrays representing\n * the initial state of the RNN layer.\n *\n * Note on passing external constants to RNNs\n * You can pass \"external\" constants to the cell using the `constants`\n * keyword argument of `RNN.call` method. This requires that the `cell.call`\n * method accepts the same keyword argument `constants`. Such constants\n * can be used to conditon the cell transformation on additional static inputs\n * (not changing over time), a.k.a an attention mechanism.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function rnn(args) {\n return new RNN(args);\n}\n/**\n * Wrapper allowing a stack of RNN cells to behave as a single cell.\n *\n * Used to implement efficient stacked RNNs.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function stackedRNNCells(args) {\n return new StackedRNNCells(args);\n}\n// Wrapper Layers.\n/** @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'} */\nexport function bidirectional(args) {\n return new Bidirectional(args);\n}\n/**\n * This wrapper applies a layer to every temporal slice of an input.\n *\n * The input should be at least 3D, and the dimension of the index `1` will be\n * considered to be the temporal dimension.\n *\n * Consider a batch of 32 samples, where each sample is a sequence of 10 vectors\n * of 16 dimensions. The batch input shape of the layer is then `[32, 10,\n * 16]`, and the `inputShape`, not including the sample dimension, is\n * `[10, 16]`.\n *\n * You can then use `TimeDistributed` to apply a `Dense` layer to each of the 10\n * timesteps, independently:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.dense({units: 8}),\n * inputShape: [10, 16],\n * }));\n *\n * // Now model.outputShape = [null, 10, 8].\n * // The output will then have shape `[32, 10, 8]`.\n *\n * // In subsequent layers, there is no need for `inputShape`:\n * model.add(tf.layers.timeDistributed({layer: tf.layers.dense({units: 32})}));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * // Now model.outputShape = [null, 10, 32].\n * ```\n *\n * The output will then have shape `[32, 10, 32]`.\n *\n * `TimeDistributed` can be used with arbitrary layers, not just `Dense`, for\n * instance a `Conv2D` layer.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.conv2d({filters: 64, kernelSize: [3, 3]}),\n * inputShape: [10, 299, 299, 3],\n * }));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'}\n */\nexport function timeDistributed(args) {\n return new TimeDistributed(args);\n}\n// Aliases for pooling.\nexport const globalMaxPool1d = globalMaxPooling1d;\nexport const globalMaxPool2d = globalMaxPooling2d;\nexport const maxPool1d = maxPooling1d;\nexport const maxPool2d = maxPooling2d;\nexport { Layer, RNN, RNNCell, input /* alias for tf.input */ };\n/**\n * Apply additive zero-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * This is useful to mitigate overfitting\n * (you could see it as a form of random data augmentation).\n * Gaussian Noise (GS) is a natural choice as corruption process\n * for real valued inputs.\n *\n * # Arguments\n * stddev: float, standard deviation of the noise distribution.\n *\n * # Input shape\n * Arbitrary. Use the keyword argument `input_shape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * # Output shape\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianNoise(args) {\n return new GaussianNoise(args);\n}\n/**\n * Apply multiplicative 1-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting](\n * http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianDropout(args) {\n return new GaussianDropout(args);\n}\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function alphaDropout(args) {\n return new AlphaDropout(args);\n}\n/**\n * Masks a sequence by using a mask value to skip timesteps.\n *\n * If all features for a given sample timestep are equal to `mask_value`,\n * then the sample timestep will be masked (skipped) in all downstream layers\n * (as long as they support masking).\n *\n * If any downstream layer does not support masking yet receives such\n * an input mask, an exception will be raised.\n *\n * Arguments:\n * - `maskValue`: Either None or mask value to skip.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Mask', namespace: 'layers'}\n */\nexport function masking(args) {\n return new Masking(args);\n}\n//# sourceMappingURL=exports_layers.js.map", "import * as losses from './losses';\nimport * as metrics from './metrics';\n/**\n * Binary accuracy metric function.\n *\n * `yTrue` and `yPred` can have 0-1 values. Example:\n * ```js\n * const x = tf.tensor2d([[1, 1, 1, 1], [0, 0, 0, 0]], [2, 4]);\n * const y = tf.tensor2d([[1, 0, 1, 0], [0, 0, 0, 1]], [2, 4]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * `yTrue` and `yPred` can also have floating-number values between 0 and 1, in\n * which case the values will be thresholded at 0.5 to yield 0-1 values (i.e.,\n * a value >= 0.5 and <= 1.0 is interpreted as 1.\n * )\n * Example:\n * ```js\n * const x = tf.tensor1d([1, 1, 1, 1, 0, 0, 0, 0]);\n * const y = tf.tensor1d([0.2, 0.4, 0.6, 0.8, 0.2, 0.3, 0.4, 0.7]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryAccuracy(yTrue, yPred) {\n return metrics.binaryAccuracy(yTrue, yPred);\n}\n/**\n * Binary crossentropy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0], [1], [1], [1]]);\n * const y = tf.tensor2d([[0], [0], [0.5], [1]]);\n * const crossentropy = tf.metrics.binaryCrossentropy(x, y);\n * crossentropy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction, probabilities for the `1` case.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryCrossentropy(yTrue, yPred) {\n return metrics.binaryCrossentropy(yTrue, yPred);\n}\n/**\n * Sparse categorical accuracy metric function.\n *\n * Example:\n * ```js\n *\n * const yTrue = tf.tensor1d([1, 1, 2, 2, 0]);\n * const yPred = tf.tensor2d(\n * [[0, 1, 0], [1, 0, 0], [0, 0.4, 0.6], [0, 0.6, 0.4], [0.7, 0.3, 0]]);\n * const crossentropy = tf.metrics.sparseCategoricalAccuracy(yTrue, yPred);\n * crossentropy.print();\n * ```\n *\n * @param yTrue True labels: indices.\n * @param yPred Predicted probabilities or logits.\n * @returns Accuracy tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n return metrics.sparseCategoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical accuracy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0, 0, 0, 1], [0, 0, 0, 1]]);\n * const y = tf.tensor2d([[0.1, 0.8, 0.05, 0.05], [0.1, 0.05, 0.05, 0.8]]);\n * const accuracy = tf.metrics.categoricalAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth: one-hot encoding of categories.\n * @param yPred Binary Tensor of prediction: probabilities or logits for the\n * same categories as in `yTrue`.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalAccuracy(yTrue, yPred) {\n return metrics.categoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical crossentropy between an output tensor and a target tensor.\n *\n * @param target A tensor of the same shape as `output`.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalCrossentropy(yTrue, yPred) {\n return metrics.categoricalCrossentropy(yTrue, yPred);\n}\n/**\n * Computes the precision of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const precision = tf.metrics.precision(x, y);\n * precision.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Precision Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function precision(yTrue, yPred) {\n return metrics.precision(yTrue, yPred);\n}\n/**\n * Computes the recall of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const recall = tf.metrics.recall(x, y);\n * recall.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Recall Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function recall(yTrue, yPred) {\n return metrics.recall(yTrue, yPred);\n}\n/**\n * Loss or metric function: Cosine proximity.\n *\n * Mathematically, cosine proximity is defined as:\n * `-sum(l2Normalize(yTrue) * l2Normalize(yPred))`,\n * wherein `l2Normalize()` normalizes the L2 norm of the input to 1 and `*`\n * represents element-wise multiplication.\n *\n * ```js\n * const yTrue = tf.tensor2d([[1, 0], [1, 0]]);\n * const yPred = tf.tensor2d([[1 / Math.sqrt(2), 1 / Math.sqrt(2)], [0, 1]]);\n * const proximity = tf.metrics.cosineProximity(yTrue, yPred);\n * proximity.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Cosine proximity Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function cosineProximity(yTrue, yPred) {\n return losses.cosineProximity(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute error.\n *\n * Mathematically, mean absolute error is defined as:\n * `mean(abs(yPred - yTrue))`,\n * wherein the `mean` is applied over feature dimensions.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [0, 0], [2, 3]]);\n * const yPred = tf.tensor2d([[0, 1], [0, 1], [-2, -3]]);\n * const mse = tf.metrics.meanAbsoluteError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsoluteError(yTrue, yPred) {\n return losses.meanAbsoluteError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute percentage error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [10, 20]]);\n * const yPred = tf.tensor2d([[0, 1], [11, 24]]);\n * const mse = tf.metrics.meanAbsolutePercentageError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MAPE`, `tf.metrics.mape`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute percentage error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function MAPE(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function mape(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean squared error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [3, 4]]);\n * const yPred = tf.tensor2d([[0, 1], [-3, -4]]);\n * const mse = tf.metrics.meanSquaredError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MSE`, `tf.metrics.mse`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean squared error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanSquaredError(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function MSE(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function mse(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\n//# sourceMappingURL=exports_metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport { modelFromJSON } from './models';\n//# sourceMappingURL=exports_models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as regularizers from './regularizers';\n// tslint:disable-next-line:max-line-length\nimport { L1L2 } from './regularizers';\n/**\n * Regularizer for L1 and L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x)) + sum(l2 * x^2)\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1l2(config) {\n return new L1L2(config);\n}\n/**\n * Regularizer for L1 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x))\n * @param args l1 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1(config) {\n return regularizers.l1(config);\n}\n/**\n * Regularizer for L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l2 * x^2)\n * @param args l2 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l2(config) {\n return regularizers.l2(config);\n}\n//# sourceMappingURL=exports_regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { BaseCallback } from './base_callbacks';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nexport class Callback extends BaseCallback {\n constructor() {\n super(...arguments);\n /** Instance of `keras.models.Model`. Reference of the model being trained. */\n this.model = null;\n }\n setModel(model) {\n if (!(model instanceof LayersModel)) {\n throw new Error('model must be a LayersModel, not some other Container');\n }\n this.model = model;\n }\n}\nfunction less(currVal, prevVal) {\n return currVal < prevVal;\n}\nfunction greater(currVal, prevVal) {\n return currVal > prevVal;\n}\n/**\n * A Callback that stops training when a monitored quantity has stopped\n * improving.\n */\nexport class EarlyStopping extends Callback {\n constructor(args) {\n super();\n if (args == null) {\n args = {};\n }\n if (args.restoreBestWeights) {\n throw new NotImplementedError('restoreBestWeights = True is not implemented in EarlyStopping yet.');\n }\n this.monitor = args.monitor || 'val_loss';\n this.minDelta = Math.abs(args.minDelta || 0);\n this.patience = args.patience || 0;\n this.verbose = args.verbose || 0;\n this.mode = args.mode || 'auto';\n this.baseline = args.baseline;\n if (['auto', 'min', 'max'].indexOf(this.mode) === -1) {\n console.warn(`EarlyStopping mode '${this.mode}' is invalid. ` +\n `Falling back to mode 'auto'.`);\n this.mode = 'auto';\n }\n if (this.mode === 'min') {\n this.monitorFunc = less;\n }\n else if (this.mode === 'max') {\n this.monitorFunc = greater;\n }\n else {\n // For mode === 'auto'.\n if (this.monitor.indexOf('acc') !== -1) {\n this.monitorFunc = greater;\n }\n else {\n this.monitorFunc = less;\n }\n }\n if (this.monitorFunc === less) {\n this.minDelta *= -1;\n }\n }\n async onTrainBegin(logs) {\n this.wait = 0;\n this.stoppedEpoch = 0;\n if (this.baseline != null) {\n this.best = this.baseline;\n }\n else {\n this.best = this.monitorFunc === less ? Infinity : -Infinity;\n }\n }\n async onEpochEnd(epoch, logs) {\n await resolveScalarsInLogs(logs);\n const current = this.getMonitorValue(logs);\n if (current == null) {\n return;\n }\n if (this.monitorFunc(current - this.minDelta, this.best)) {\n this.best = current;\n this.wait = 0;\n // TODO(cais): Logic for restoreBestWeights.\n }\n else {\n this.wait++;\n if (this.wait >= this.patience) {\n this.stoppedEpoch = epoch;\n this.model.stopTraining = true;\n }\n // TODO(cais): Logic for restoreBestWeights.\n }\n }\n async onTrainEnd(logs) {\n if (this.stoppedEpoch > 0 && this.verbose) {\n console.log(`Epoch ${this.stoppedEpoch}: early stopping.`);\n }\n }\n getMonitorValue(logs) {\n if (logs == null) {\n logs = {};\n }\n const monitorValue = logs[this.monitor];\n if (monitorValue == null) {\n console.warn(`Metric for EarlyStopping ${this.monitor} is not available. ` +\n `Available metrics are: ${Object.keys(logs)}`);\n }\n return monitorValue;\n }\n}\n/**\n * Factory function for a Callback that stops training when a monitored\n * quantity has stopped improving.\n *\n * Early stopping is a type of regularization, and protects model against\n * overfitting.\n *\n * The following example based on fake data illustrates how this callback\n * can be used during `tf.LayersModel.fit()`:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * units: 3,\n * activation: 'softmax',\n * kernelInitializer: 'ones',\n * inputShape: [2]\n * }));\n * const xs = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const ys = tf.tensor2d([[1, 0, 0], [0, 1, 0]], [2, 3]);\n * const xsVal = tf.tensor2d([4, 3, 2, 1], [2, 2]);\n * const ysVal = tf.tensor2d([[0, 0, 1], [0, 1, 0]], [2, 3]);\n * model.compile(\n * {loss: 'categoricalCrossentropy', optimizer: 'sgd', metrics: ['acc']});\n *\n * // Without the EarlyStopping callback, the val_acc value would be:\n * // 0.5, 0.5, 0.5, 0.5, ...\n * // With val_acc being monitored, training should stop after the 2nd epoch.\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * validationData: [xsVal, ysVal],\n * callbacks: tf.callbacks.earlyStopping({monitor: 'val_acc'})\n * });\n *\n * // Expect to see a length-2 array.\n * console.log(history.history.val_acc);\n * ```\n *\n * @doc {\n * heading: 'Callbacks',\n * namespace: 'callbacks'\n * }\n */\nexport function earlyStopping(args) {\n return new EarlyStopping(args);\n}\nexport const callbacks = { earlyStopping };\n//# sourceMappingURL=callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// This file lists all exports of TensorFlow.js Layers\nimport * as constraints from './exports_constraints';\nimport * as initializers from './exports_initializers';\nimport * as layers from './exports_layers';\nimport * as metrics from './exports_metrics';\nimport * as models from './exports_models';\nimport * as regularizers from './exports_regularizers';\nexport { CallbackList, CustomCallback, History } from './base_callbacks';\nexport { Callback, callbacks, EarlyStopping } from './callbacks';\nexport { InputSpec, SymbolicTensor } from './engine/topology';\nexport { LayersModel } from './engine/training';\nexport { input, loadLayersModel, model, registerCallbackConstructor, sequential } from './exports';\nexport { RNN } from './layers/recurrent';\nexport { Sequential } from './models';\nexport { LayerVariable } from './variables';\nexport { version as version_layers } from './version';\nexport { constraints, initializers, layers, metrics, models, regularizers };\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/** DataType enum. */\nexport var DataType;\n(function (DataType) {\n DataType[DataType[\"DT_INVALID\"] = 0] = \"DT_INVALID\";\n DataType[DataType[\"DT_FLOAT\"] = 1] = \"DT_FLOAT\";\n DataType[DataType[\"DT_DOUBLE\"] = 2] = \"DT_DOUBLE\";\n DataType[DataType[\"DT_INT32\"] = 3] = \"DT_INT32\";\n DataType[DataType[\"DT_UINT8\"] = 4] = \"DT_UINT8\";\n DataType[DataType[\"DT_INT16\"] = 5] = \"DT_INT16\";\n DataType[DataType[\"DT_INT8\"] = 6] = \"DT_INT8\";\n DataType[DataType[\"DT_STRING\"] = 7] = \"DT_STRING\";\n DataType[DataType[\"DT_COMPLEX64\"] = 8] = \"DT_COMPLEX64\";\n DataType[DataType[\"DT_INT64\"] = 9] = \"DT_INT64\";\n DataType[DataType[\"DT_BOOL\"] = 10] = \"DT_BOOL\";\n DataType[DataType[\"DT_QINT8\"] = 11] = \"DT_QINT8\";\n DataType[DataType[\"DT_QUINT8\"] = 12] = \"DT_QUINT8\";\n DataType[DataType[\"DT_QINT32\"] = 13] = \"DT_QINT32\";\n DataType[DataType[\"DT_BFLOAT16\"] = 14] = \"DT_BFLOAT16\";\n DataType[DataType[\"DT_FLOAT_REF\"] = 101] = \"DT_FLOAT_REF\";\n DataType[DataType[\"DT_DOUBLE_REF\"] = 102] = \"DT_DOUBLE_REF\";\n DataType[DataType[\"DT_INT32_REF\"] = 103] = \"DT_INT32_REF\";\n DataType[DataType[\"DT_UINT8_REF\"] = 104] = \"DT_UINT8_REF\";\n DataType[DataType[\"DT_INT16_REF\"] = 105] = \"DT_INT16_REF\";\n DataType[DataType[\"DT_INT8_REF\"] = 106] = \"DT_INT8_REF\";\n DataType[DataType[\"DT_STRING_REF\"] = 107] = \"DT_STRING_REF\";\n DataType[DataType[\"DT_COMPLEX64_REF\"] = 108] = \"DT_COMPLEX64_REF\";\n DataType[DataType[\"DT_INT64_REF\"] = 109] = \"DT_INT64_REF\";\n DataType[DataType[\"DT_BOOL_REF\"] = 110] = \"DT_BOOL_REF\";\n DataType[DataType[\"DT_QINT8_REF\"] = 111] = \"DT_QINT8_REF\";\n DataType[DataType[\"DT_QUINT8_REF\"] = 112] = \"DT_QUINT8_REF\";\n DataType[DataType[\"DT_QINT32_REF\"] = 113] = \"DT_QINT32_REF\";\n DataType[DataType[\"DT_BFLOAT16_REF\"] = 114] = \"DT_BFLOAT16_REF\";\n})(DataType || (DataType = {}));\nexport var SaverDef;\n(function (SaverDef) {\n /** CheckpointFormatVersion enum. */\n let CheckpointFormatVersion;\n (function (CheckpointFormatVersion) {\n CheckpointFormatVersion[CheckpointFormatVersion[\"LEGACY\"] = 0] = \"LEGACY\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V1\"] = 1] = \"V1\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V2\"] = 2] = \"V2\";\n })(CheckpointFormatVersion = SaverDef.CheckpointFormatVersion || (SaverDef.CheckpointFormatVersion = {}));\n})(SaverDef || (SaverDef = {}));\n//# sourceMappingURL=compiled_api.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst CUSTOM_OPS = {};\n/**\n * Register an Op for graph model executor. This allow you to register\n * TensorFlow custom op or override existing op.\n *\n * Here is an example of registering a new MatMul Op.\n * ```js\n * const customMatmul = (node) =>\n * tf.matMul(\n * node.inputs[0], node.inputs[1],\n * node.attrs['transpose_a'], node.attrs['transpose_b']);\n *\n * tf.registerOp('MatMul', customMatmul);\n * ```\n * The inputs and attrs of the node object is based on the TensorFlow op\n * registry.\n *\n * @param name The Tensorflow Op name.\n * @param opFunc An op function which is called with the current graph node\n * during execution and needs to return a tensor or a list of tensors. The node\n * has the following attributes:\n * - attr: A map from attribute name to its value\n * - inputs: A list of input tensors\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function registerOp(name, opFunc) {\n const opMapper = {\n tfOpName: name,\n category: 'custom',\n inputs: [],\n attrs: [],\n customExecutor: opFunc\n };\n CUSTOM_OPS[name] = opMapper;\n}\n/**\n * Retrieve the OpMapper object for the registered op.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function getRegisteredOp(name) {\n return CUSTOM_OPS[name];\n}\n/**\n * Deregister the Op for graph model executor.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function deregisterOp(name) {\n delete CUSTOM_OPS[name];\n}\n//# sourceMappingURL=register.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { clone, util } from '@tensorflow/tfjs-core';\nexport function getParamValue(paramName, node, tensorMap, context, resourceManager) {\n const inputParam = node.inputParams[paramName];\n if (inputParam && inputParam.inputIndexStart !== undefined) {\n const start = inputParam.inputIndexStart;\n const end = inputParam.inputIndexEnd === 0 ?\n undefined :\n (inputParam.inputIndexEnd === undefined ? start + 1 :\n inputParam.inputIndexEnd);\n if (inputParam.type === 'tensor') {\n return getTensor(node.inputNames[inputParam.inputIndexStart], tensorMap, context, resourceManager);\n }\n if (inputParam.type === 'tensors') {\n const inputs = node.inputNames.slice(start, end);\n return inputs.map(name => getTensor(name, tensorMap, context, resourceManager));\n }\n const tensor = getTensor(node.inputNames.slice(start)[0], tensorMap, context, resourceManager);\n const data = tensor.dataSync();\n return inputParam.type === 'number' ?\n data[0] :\n util.toNestedArray(tensor.shape, data);\n }\n const attrParam = node.attrParams[paramName];\n return attrParam && attrParam.value;\n}\n/**\n * Retrieve the tensor from tensorsMap based on input name.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function getTensor(name, tensorsMap, context, resourceManager) {\n const [nodeName, index] = parseNodeName(name);\n if (resourceManager != null) {\n const tensor = resourceManager.getHashTableHandleByName(nodeName);\n if (tensor != null) {\n return tensor;\n }\n }\n const contextId = context.currentContextIds.find(contextId => {\n return !!tensorsMap[getNodeNameWithContextId(nodeName, contextId)];\n });\n return contextId !== undefined ?\n tensorsMap[getNodeNameWithContextId(nodeName, contextId)][index] :\n undefined;\n}\n/**\n * Retrieve the tensors based on input name for current context.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n */\nexport function getTensorsForCurrentContenxt(name, tensorsMap, context) {\n return tensorsMap[getNodeNameWithContextId(name, context.currentContextId)];\n}\n/**\n * Returns the node name and index from the Node input name.\n * @param inputName The input name of the node, in format of\n * node_name:output_index, i.e. MatMul:0, if the output_index is not set, it is\n * default to 0.\n */\nexport function getNodeNameAndIndex(inputName, context) {\n const [nodeName, index] = parseNodeName(inputName);\n return [\n getNodeNameWithContextId(nodeName, context && context.currentContextId),\n index\n ];\n}\nfunction getNodeNameWithContextId(name, contextId) {\n return !!contextId ? `${name}-${contextId}` : name;\n}\nexport function parseNodeName(name) {\n const parts = name.split(':');\n if (parts.length === 1) {\n return [name, 0];\n }\n const nodeName = parts[0];\n return [nodeName, Number(parts[parts.length - 1])];\n}\nexport function split(arr, size) {\n const res = [];\n for (let i = 0; i < arr.length; i += size) {\n res.push(arr.slice(i, i + size));\n }\n return res;\n}\nexport function getPadding(node, tensorMap, context) {\n let pad = getParamValue('pad', node, tensorMap, context);\n if (pad === 'explicit') {\n // This is 1d array, we need to convert it to 2d array\n pad = getParamValue('explicitPaddings', node, tensorMap, context);\n const explicitPadding = [[0, 0], [0, 0], [0, 0], [0, 0]];\n for (let i = 0; i < 4; i++) {\n explicitPadding[i][0] = pad[i * 2];\n explicitPadding[i][1] = pad[i * 2 + 1];\n }\n return explicitPadding;\n }\n return pad;\n}\n/**\n * Reuse the tensor if it is marked as keep, otherwise clone the tensor to\n * avoid disposal. This is important for TensorArray and TensorList ops, since\n * internally they use a tensor as the id for TensorArray and TensorList, and\n * to simplify lookup, they also use Tensor.id as the key to the internal map.\n * These id tensors have been marked as kept in the backend, we need avoid clone\n * them in order to create new Tensor.id.\n * @param tensor\n */\nexport function cloneTensor(tensor) {\n return tensor.kept ? tensor : clone(tensor);\n}\n//# sourceMappingURL=utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Add',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddV2',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddN',\n 'category': 'arithmetic',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'BiasAdd',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sub',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'RealDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Div',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'DivNoNan',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mul',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Maximum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Minimum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Pow',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SquaredDifference',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorMod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n }\n];\n//# sourceMappingURL=arithmetic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Abs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan2',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ceil',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ClipByValue',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'clip_value_min', 'name': 'clipValueMin', 'type': 'number' },\n { 'tfName': 'clip_value_max', 'name': 'clipValueMax', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Complex',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'real', 'type': 'tensor' },\n { 'start': 1, 'name': 'imag', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ComplexAbs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Elu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Exp',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Floor',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Imag',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Neg',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Real',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Prelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'alpha', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu6',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'clipValueMin',\n 'name': 'clipValueMin',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'clipValueMax',\n 'name': 'clipValueMax',\n 'type': 'number',\n 'defaultValue': 6\n }\n ]\n },\n {\n 'tfOpName': 'Selu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sigmoid',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Rsqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Square',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sign',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Round',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Expm1',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log1p',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Reciprocal',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Softplus',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Erf',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axes', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'keep_dims',\n 'name': 'keepDims',\n 'type': 'bool',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LeakyRelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 0.2\n },\n {\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=basic_math.js.map", "export const json = [\n {\n 'tfOpName': 'LoopCond',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'pred', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Switch',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'data', 'type': 'tensor' },\n { 'start': 1, 'name': 'pred', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Merge',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Enter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'frame_name', 'name': 'frameName', 'type': 'string' },\n { 'tfName': 'is_constant', 'name': 'isConstant', 'type': 'bool' }\n ]\n },\n {\n 'tfOpName': 'Exit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NextIteration',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'size', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'dynamic_size', 'name': 'dynamicSize', 'type': 'bool' },\n { 'tfName': 'clear_after_read', 'name': 'clearAfterRead', 'type': 'bool' },\n {\n 'tfName': 'identical_element_shapes',\n 'name': 'identicalElementShapes',\n 'type': 'bool'\n },\n { 'tfName': 'tensor_array_name', 'name': 'name', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayWriteV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayReadV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{\n 'tfName': 'dtype',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n },\n {\n 'tfOpName': 'TensorArrayGatherV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayScatterV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArrayConcatV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }, {\n 'tfName': 'element_shape_except0',\n 'name': 'elementShapeExcept0',\n 'type': 'shape',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArraySplitV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArraySizeV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayCloseV3',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'StatelessIf',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'If',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'StatelessWhile',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'While',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'TensorListScatter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListScatterV2',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 3, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGather',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListSetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListReserve',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 1, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListFromTensor',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListStack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' },\n { 'tfName': 'num_elements', 'name': 'numElements', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListSplit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListConcat',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListPopBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListPushBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=control.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'AvgPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPoolWithArgmax',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' }, {\n 'tfName': 'include_batch_in_index',\n 'name': 'includeBatchInIndex',\n 'type': 'bool'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AvgPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Conv1D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'stride', 'name': 'stride', 'type': 'number' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NWC'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'dilation',\n 'name': 'dilation',\n 'type': 'number',\n 'defaultValue': 1\n }\n ]\n },\n {\n 'tfOpName': 'Conv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'useCudnnOnGpu', 'name': 'useCudnnOnGpu', 'type': 'bool' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': '_FusedConv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'use_cudnn_on_gpu',\n 'name': 'useCudnnOnGpu',\n 'type': 'bool',\n 'defaultValue': true\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n ]\n },\n {\n 'tfOpName': 'Conv2DBackpropInput',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 2, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 0, 'name': 'outputShape', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2d',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'FusedDepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n }\n ]\n },\n {\n 'tfOpName': 'Conv3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ],\n },\n {\n 'tfOpName': 'Dilation2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'rates', 'name': 'dilations', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }\n ]\n }\n];\n//# sourceMappingURL=convolution.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Fill',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n { 'start': 1, 'name': 'value', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'LinSpace',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'num', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'OneHot',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'depth', 'type': 'number' },\n { 'start': 2, 'name': 'onValue', 'type': 'number', 'defaultValue': 1 },\n { 'start': 3, 'name': 'offValue', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [\n {\n 'tfName': 'axis',\n 'name': 'axis',\n 'type': 'number',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ones',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'OnesLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'RandomUniform',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'minval',\n 'name': 'minval',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'maxval',\n 'name': 'maxval',\n 'type': 'number',\n 'defaultValue': 1\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Range',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'step', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [{ 'tfName': 'Tidx', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TruncatedNormal',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'means',\n 'name': 'mean',\n 'type': 'number',\n 'defaultValue': 0.0\n },\n {\n 'tfName': 'stddev',\n 'name': 'stdDev',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Zeros',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'ZerosLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'Multinomial',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'logits', 'type': 'tensor' },\n { 'start': 1, 'name': 'numSamples', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' },\n { 'tfName': 'seed2', 'name': 'seed2', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'output_dtype', 'name': 'output_dtype', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=creation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'NonMaxSuppressionV2',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV3',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV4',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'T_threshold',\n 'name': 'threshold',\n 'type': 'dtype',\n 'notSupported': true\n },\n {\n 'tfName': 'pad_to_max_output_size',\n 'name': 'padToMaxOutputSize',\n 'type': 'bool'\n }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV5',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' },\n { 'start': 5, 'name': 'softNmsSigma', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Where',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ListDiff',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=dynamic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'TopKV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'k', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'sorted', 'name': 'sorted', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Unique',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n },\n {\n 'tfOpName': 'UniqueV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n },\n];\n//# sourceMappingURL=evaluation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'PlaceholderWithDefault',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'default', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'Placeholder',\n 'category': 'graph',\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n { 'tfOpName': 'Const', 'category': 'graph' }, {\n 'tfOpName': 'Identity',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IdentityN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Snapshot',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Rank',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Size',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Shape',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'ShapeN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Print',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'data', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'message', 'name': 'message', 'type': 'string' }, {\n 'tfName': 'first_n',\n 'name': 'firstN',\n 'type': 'number',\n 'notSupported': true\n },\n {\n 'tfName': 'summarize',\n 'name': 'summarize',\n 'type': 'number',\n 'defaultValue': 3\n }\n ]\n },\n { 'tfOpName': 'NoOp', 'category': 'graph', 'inputs': [] }, {\n 'tfOpName': 'StopGradient',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'FakeQuantWithMinMaxVars',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'min', 'name': 'min', 'type': 'number' },\n { 'tfName': 'max', 'name': 'max', 'type': 'number' }\n ]\n }\n];\n//# sourceMappingURL=graph.js.map", "export const json = [\n {\n 'tfOpName': 'HashTable',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'HashTableV2',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'LookupTableImport',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableImportV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFind',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFindV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ResizeBilinear',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ResizeNearestNeighbor',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'CropAndResize',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'image', 'type': 'tensor' },\n { 'start': 1, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 2, 'name': 'boxInd', 'type': 'tensor' },\n { 'start': 3, 'name': 'cropSize', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'method', 'name': 'method', 'type': 'string' }, {\n 'tfName': 'extrapolation_value',\n 'name': 'extrapolationValue',\n 'type': 'number'\n }\n ]\n }\n];\n//# sourceMappingURL=image.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Equal',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NotEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Greater',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'GreaterEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Less',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LessEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalAnd',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalNot',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalOr',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Select',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SelectV2',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=logical.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': '_FusedMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' }, {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMulV2',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Transpose',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'perm', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=matrices.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FusedBatchNorm',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV2',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV3',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LRN',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'depth_radius',\n 'name': 'radius',\n 'type': 'number',\n 'defaultValue': 5\n },\n { 'tfName': 'bias', 'name': 'bias', 'type': 'number', 'defaultValue': 1.0 },\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n {\n 'tfName': 'beta',\n 'name': 'beta',\n 'type': 'number',\n 'defaultValue': 0.5\n }\n ]\n },\n {\n 'tfOpName': 'Softmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'LogSoftmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': true,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Max',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Mean',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Min',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Sum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'All',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Any',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'ArgMax',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'ArgMin',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Cumsum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'exclusive', 'name': 'exclusive', 'type': 'bool' },\n { 'tfName': 'reverse', 'name': 'reverse', 'type': 'bool' }\n ]\n }\n];\n//# sourceMappingURL=reduction.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ConcatV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': -1, 'name': 'tensors', 'type': 'tensors' },\n { 'start': -1, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'Concat',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 1, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n { 'start': 0, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'GatherV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Gather',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Reverse',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'dims', 'type': 'bool', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ReverseV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Slice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'size', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'StridedSlice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'end', 'type': 'number[]' },\n { 'start': 3, 'name': 'strides', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'begin_mask',\n 'name': 'beginMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'end_mask',\n 'name': 'endMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'new_axis_mask',\n 'name': 'newAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'ellipsis_mask',\n 'name': 'ellipsisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'shrink_axis_mask',\n 'name': 'shrinkAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Pack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Unpack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'num',\n 'name': 'num',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Tile',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'reps', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Split',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'axis', 'type': 'number', 'defaultValue': 0 },\n { 'start': 1, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'num_split',\n 'name': 'numOrSizeSplits',\n 'type': 'number',\n 'defaultValue': 1\n }]\n },\n {\n 'tfOpName': 'SplitV',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'numOrSizeSplits', 'type': 'number[]' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'ScatterNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'values', 'type': 'tensor' },\n { 'start': 2, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'GatherNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': false,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=slice_join.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IFFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'RFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'IRFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=spectral.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Cast',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'SrcT',\n 'name': 'sdtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n { 'tfName': 'DstT', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'ExpandDims',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'MirrorPad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'mode', 'name': 'mode', 'type': 'string' }]\n },\n {\n 'tfOpName': 'Pad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'constant_value',\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }]\n },\n {\n 'tfOpName': 'PadV2',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' }, {\n 'start': 2,\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Reshape',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Squeeze',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'axis',\n 'tfDeprecatedName': 'squeeze_dims',\n 'name': 'axis',\n 'type': 'number[]'\n }]\n },\n {\n 'tfOpName': 'SpaceToBatchND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'paddings', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'BatchToSpaceND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'crops', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthToSpace',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'block_size', 'name': 'blockSize', 'type': 'number' },\n { 'tfName': 'data_format', 'name': 'dataFormat', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'BroadcastTo',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': []\n }\n];\n//# sourceMappingURL=transformation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as tensorflow from '../data/compiled_api';\nimport { getRegisteredOp } from './custom_op/register';\nimport { getNodeNameAndIndex } from './executors/utils';\nimport * as arithmetic from './op_list/arithmetic';\nimport * as basicMath from './op_list/basic_math';\nimport * as control from './op_list/control';\nimport * as convolution from './op_list/convolution';\nimport * as creation from './op_list/creation';\nimport * as dynamic from './op_list/dynamic';\nimport * as evaluation from './op_list/evaluation';\nimport * as graph from './op_list/graph';\nimport * as hashTable from './op_list/hash_table';\nimport * as image from './op_list/image';\nimport * as logical from './op_list/logical';\nimport * as matrices from './op_list/matrices';\nimport * as normalization from './op_list/normalization';\nimport * as reduction from './op_list/reduction';\nimport * as sliceJoin from './op_list/slice_join';\nimport * as spectral from './op_list/spectral';\nimport * as transformation from './op_list/transformation';\nexport class OperationMapper {\n // Singleton instance for the mapper\n static get Instance() {\n return this._instance || (this._instance = new this());\n }\n // Loads the op mapping from the JSON file.\n constructor() {\n const ops = [\n arithmetic, basicMath, control, convolution, creation, dynamic,\n evaluation, logical, image, graph, matrices, normalization, reduction,\n sliceJoin, spectral, transformation, hashTable\n ];\n const mappersJson = [].concat(...ops.map(op => op.json));\n this.opMappers = mappersJson.reduce((map, mapper) => {\n map[mapper.tfOpName] = mapper;\n return map;\n }, {});\n }\n // Converts the model inference graph from Tensorflow GraphDef to local\n // representation for TensorFlow.js API\n transformGraph(graph, signature = {}) {\n const tfNodes = graph.node;\n const placeholders = [];\n const weights = [];\n const initNodes = [];\n const nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op.startsWith('Placeholder')) {\n placeholders.push(map[node.name]);\n }\n else if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n else if (node.input == null || node.input.length === 0) {\n initNodes.push(map[node.name]);\n }\n return map;\n }, {});\n let inputs = [];\n const outputs = [];\n let inputNodeNameToKey = {};\n let outputNodeNameToKey = {};\n if (signature != null) {\n inputNodeNameToKey = this.mapSignatureEntries(signature.inputs);\n outputNodeNameToKey = this.mapSignatureEntries(signature.outputs);\n }\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n // if signature has not outputs set, add any node that does not have\n // outputs.\n if (Object.keys(outputNodeNameToKey).length === 0) {\n allNodes.forEach(key => {\n const node = nodes[key];\n if (node.children.length === 0) {\n outputs.push(node);\n }\n });\n }\n else {\n Object.keys(outputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node != null) {\n node.signatureKey = outputNodeNameToKey[name];\n outputs.push(node);\n }\n });\n }\n if (Object.keys(inputNodeNameToKey).length > 0) {\n Object.keys(inputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node) {\n node.signatureKey = inputNodeNameToKey[name];\n inputs.push(node);\n }\n });\n }\n else {\n inputs = placeholders;\n }\n let functions = {};\n if (graph.library != null && graph.library.function != null) {\n functions = graph.library.function.reduce((functions, func) => {\n functions[func.signature.name] = this.mapFunction(func);\n return functions;\n }, {});\n }\n const result = { nodes, inputs, outputs, weights, placeholders, signature, functions };\n if (initNodes.length > 0) {\n result.initNodes = initNodes;\n }\n return result;\n }\n mapSignatureEntries(entries) {\n return Object.keys(entries || {})\n .reduce((prev, curr) => {\n prev[entries[curr].name] = curr;\n return prev;\n }, {});\n }\n mapNode(node) {\n // Unsupported ops will cause an error at run-time (not parse time), since\n // they may not be used by the actual execution subgraph.\n const mapper = getRegisteredOp(node.op) || this.opMappers[node.op] || {};\n if (node.attr == null) {\n node.attr = {};\n }\n const newNode = {\n name: node.name,\n op: node.op,\n category: mapper.category,\n inputNames: (node.input ||\n []).map(input => input.startsWith('^') ? input.substr(1) : input),\n inputs: [],\n children: [],\n inputParams: {},\n attrParams: {},\n rawAttrs: node.attr\n };\n if (mapper.inputs != null) {\n newNode.inputParams =\n mapper.inputs.reduce((map, param) => {\n map[param.name] = {\n type: param.type,\n inputIndexStart: param.start,\n inputIndexEnd: param.end\n };\n return map;\n }, {});\n }\n if (mapper.attrs != null) {\n newNode.attrParams =\n mapper.attrs.reduce((map, param) => {\n const type = param.type;\n let value = undefined;\n switch (param.type) {\n case 'string':\n value = getStringParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'string[]':\n value = getStringArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number':\n value = getNumberParam(node.attr, param.tfName, (param.defaultValue || 0));\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumberParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number[]':\n value = getNumericArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumericArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool':\n value = getBoolParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool[]':\n value = getBoolArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape':\n value = getTensorShapeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape[]':\n value = getTensorShapeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype':\n value = getDtypeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype[]':\n value = getDtypeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'func':\n value = getFuncParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getFuncParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'tensor':\n case 'tensors':\n break;\n default:\n throw new Error(`Unsupported param type: ${param.type} for op: ${node.op}`);\n }\n map[param.name] = { value, type };\n return map;\n }, {});\n }\n return newNode;\n }\n // map the TFunctionDef to TFJS graph object\n mapFunction(functionDef) {\n const tfNodes = functionDef.nodeDef;\n const placeholders = [];\n const weights = [];\n let nodes = {};\n if (tfNodes != null) {\n nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n return map;\n }, {});\n }\n const inputs = [];\n const outputs = [];\n functionDef.signature.inputArg.forEach(arg => {\n const [nodeName,] = getNodeNameAndIndex(arg.name);\n const node = {\n name: nodeName,\n op: 'Placeholder',\n inputs: [],\n inputNames: [],\n category: 'graph',\n inputParams: {},\n attrParams: { dtype: { value: parseDtypeParam(arg.type), type: 'dtype' } },\n children: []\n };\n node.signatureKey = arg.name;\n inputs.push(node);\n nodes[nodeName] = node;\n });\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n const returnNodeMap = functionDef.ret;\n functionDef.signature.outputArg.forEach(output => {\n const [nodeName, index] = getNodeNameAndIndex(returnNodeMap[output.name]);\n const node = nodes[nodeName];\n if (node != null) {\n node.defaultOutput = index;\n outputs.push(node);\n }\n });\n const signature = this.mapArgsToSignature(functionDef);\n return { nodes, inputs, outputs, weights, placeholders, signature };\n }\n mapArgsToSignature(functionDef) {\n return {\n methodName: functionDef.signature.name,\n inputs: functionDef.signature.inputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg);\n return map;\n }, {}),\n outputs: functionDef.signature.outputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg, functionDef.ret);\n return map;\n }, {}),\n };\n }\n mapArgToTensorInfo(arg, nameMap) {\n let name = arg.name;\n if (nameMap != null) {\n name = nameMap[name];\n }\n return { name, dtype: arg.type };\n }\n}\nexport function decodeBase64(text) {\n const global = env().global;\n if (typeof global.atob !== 'undefined') {\n return global.atob(text);\n }\n else if (typeof Buffer !== 'undefined') {\n return new Buffer(text, 'base64').toString();\n }\n else {\n throw new Error('Unable to decode base64 in this environment. ' +\n 'Missing built-in atob() or Buffer()');\n }\n}\nexport function parseStringParam(s, keepCase) {\n const value = Array.isArray(s) ? String.fromCharCode.apply(null, s) : decodeBase64(s);\n return keepCase ? value : value.toLowerCase();\n}\nexport function getStringParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param != null) {\n return parseStringParam(param.s, keepCase);\n }\n return def;\n}\nexport function getBoolParam(attrs, name, def) {\n const param = attrs[name];\n return param ? param.b : def;\n}\nexport function getNumberParam(attrs, name, def) {\n const param = attrs[name] || {};\n const value = param['i'] != null ? param['i'] : (param['f'] != null ? param['f'] : def);\n return (typeof value === 'number') ? value : parseInt(value, 10);\n}\nexport function parseDtypeParam(value) {\n if (typeof (value) === 'string') {\n // tslint:disable-next-line:no-any\n value = tensorflow.DataType[value];\n }\n switch (value) {\n case tensorflow.DataType.DT_FLOAT:\n return 'float32';\n case tensorflow.DataType.DT_INT32:\n case tensorflow.DataType.DT_INT64:\n case tensorflow.DataType.DT_INT8:\n case tensorflow.DataType.DT_UINT8:\n return 'int32';\n case tensorflow.DataType.DT_BOOL:\n return 'bool';\n case tensorflow.DataType.DT_DOUBLE:\n return 'float32';\n case tensorflow.DataType.DT_STRING:\n return 'string';\n default:\n // Unknown dtype error will happen at runtime (instead of parse time),\n // since these nodes might not be used by the actual subgraph execution.\n return null;\n }\n}\nexport function getFuncParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.func) {\n return param.func.name;\n }\n return def;\n}\nexport function getDtypeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.type) {\n return parseDtypeParam(param.type);\n }\n return def;\n}\nexport function getDtypeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.type) {\n return param.list.type.map(v => parseDtypeParam(v));\n }\n return def;\n}\nexport function parseTensorShapeParam(shape) {\n if (shape.unknownRank) {\n return undefined;\n }\n if (shape.dim != null) {\n return shape.dim.map(dim => (typeof dim.size === 'number') ? dim.size : parseInt(dim.size, 10));\n }\n return [];\n}\nexport function getTensorShapeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.shape) {\n return parseTensorShapeParam(param.shape);\n }\n return def;\n}\nexport function getNumericArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param) {\n return ((param.list.f && param.list.f.length ? param.list.f :\n param.list.i) ||\n [])\n .map(v => (typeof v === 'number') ? v : parseInt(v, 10));\n }\n return def;\n}\nexport function getStringArrayParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param && param.list && param.list.s) {\n return param.list.s.map((v) => {\n return parseStringParam(v, keepCase);\n });\n }\n return def;\n}\nexport function getTensorShapeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.shape) {\n return param.list.shape.map((v) => {\n return parseTensorShapeParam(v);\n });\n }\n return def;\n}\nexport function getBoolArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.b) {\n return param.list.b;\n }\n return def;\n}\n//# sourceMappingURL=operation_mapper.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getTensor } from '../executors/utils';\nimport { getBoolArrayParam, getBoolParam, getDtypeArrayParam, getDtypeParam, getNumberParam, getNumericArrayParam, getStringArrayParam, getStringParam, getTensorShapeArrayParam, getTensorShapeParam } from '../operation_mapper';\n/**\n * Helper class for lookup inputs and params for nodes in the model graph.\n */\nexport class NodeValueImpl {\n constructor(node, tensorMap, context) {\n this.node = node;\n this.tensorMap = tensorMap;\n this.context = context;\n this.inputs = [];\n this.attrs = {};\n this.inputs = node.inputNames.map(name => this.getInput(name));\n if (node.rawAttrs != null) {\n this.attrs = Object.keys(node.rawAttrs)\n .reduce((attrs, key) => {\n attrs[key] = this.getAttr(key);\n return attrs;\n }, {});\n }\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getInput(name) {\n return getTensor(name, this.tensorMap, this.context);\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getAttr(name, defaultValue) {\n const value = this.node.rawAttrs[name];\n if (value.tensor != null) {\n return getTensor(name, this.tensorMap, this.context);\n }\n if (value.i != null || value.f != null) {\n return getNumberParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.s != null) {\n return getStringParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.b != null) {\n return getBoolParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.shape != null) {\n return getTensorShapeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.type != null) {\n return getDtypeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list != null) {\n if (value.list.i != null || value.list.f != null) {\n return getNumericArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.s != null) {\n return getStringArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.shape != null) {\n return getTensorShapeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.b != null) {\n return getBoolArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.type != null) {\n return getDtypeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n }\n return defaultValue;\n }\n}\n//# sourceMappingURL=node_value_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This file exports ops used by the converters executors. By default it\n * re-exports all ops. In a custom build this is aliased to a file that will\n * only exports ops for a given model.json.\n */\nexport * from './ops';\n//# sourceMappingURL=ops_for_converter.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BiasAdd':\n case 'AddV2':\n case 'Add': {\n return [tfOps.add(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'AddN': {\n return [tfOps.addN(getParamValue('tensors', node, tensorMap, context))];\n }\n case 'FloorMod':\n case 'Mod':\n return [tfOps.mod(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'Mul':\n return [tfOps.mul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'RealDiv':\n case 'Div': {\n return [tfOps.div(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'DivNoNan': {\n return [tfOps.divNoNan(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'FloorDiv': {\n return [tfOps.floorDiv(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Sub': {\n return [tfOps.sub(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Minimum': {\n return [tfOps.minimum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Maximum': {\n return [tfOps.maximum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Pow': {\n return [tfOps.pow(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'SquaredDifference': {\n return [tfOps.squaredDifference(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'arithmetic';\n//# sourceMappingURL=arithmetic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Abs':\n case 'ComplexAbs':\n return [tfOps.abs(getParamValue('x', node, tensorMap, context))];\n case 'Acos':\n return [tfOps.acos(getParamValue('x', node, tensorMap, context))];\n case 'Acosh':\n return [tfOps.acosh(getParamValue('x', node, tensorMap, context))];\n case 'Asin':\n return [tfOps.asin(getParamValue('x', node, tensorMap, context))];\n case 'Asinh':\n return [tfOps.asinh(getParamValue('x', node, tensorMap, context))];\n case 'Atan':\n return [tfOps.atan(getParamValue('x', node, tensorMap, context))];\n case 'Atan2':\n return [tfOps.atan2(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context))];\n case 'Atanh':\n return [tfOps.atanh(getParamValue('x', node, tensorMap, context))];\n case 'Ceil':\n return [tfOps.ceil(getParamValue('x', node, tensorMap, context))];\n case 'Complex':\n return [tfOps.complex(getParamValue('real', node, tensorMap, context), getParamValue('imag', node, tensorMap, context))];\n case 'Cos':\n return [tfOps.cos(getParamValue('x', node, tensorMap, context))];\n case 'Cosh':\n return [tfOps.cosh(getParamValue('x', node, tensorMap, context))];\n case 'Elu':\n return [tfOps.elu(getParamValue('x', node, tensorMap, context))];\n case 'Erf':\n return [tfOps.erf(getParamValue('x', node, tensorMap, context))];\n case 'Exp':\n return [tfOps.exp(getParamValue('x', node, tensorMap, context))];\n case 'Expm1': {\n return [tfOps.expm1(getParamValue('x', node, tensorMap, context))];\n }\n case 'Floor':\n return [tfOps.floor(getParamValue('x', node, tensorMap, context))];\n case 'Log':\n return [tfOps.log(getParamValue('x', node, tensorMap, context))];\n case 'Log1p': {\n return [tfOps.log1p(getParamValue('x', node, tensorMap, context))];\n }\n case 'Imag':\n return [tfOps.imag(getParamValue('x', node, tensorMap, context))];\n case 'Neg':\n return [tfOps.neg(getParamValue('x', node, tensorMap, context))];\n case 'Reciprocal': {\n return [tfOps.reciprocal(getParamValue('x', node, tensorMap, context))];\n }\n case 'Real':\n return [tfOps.real(getParamValue('x', node, tensorMap, context))];\n case 'Relu':\n return [tfOps.relu(getParamValue('x', node, tensorMap, context))];\n case 'Round': {\n return [tfOps.round(getParamValue('x', node, tensorMap, context))];\n }\n case 'Selu':\n return [tfOps.selu(getParamValue('x', node, tensorMap, context))];\n case 'Sigmoid':\n return [tfOps.sigmoid(getParamValue('x', node, tensorMap, context))];\n case 'Sin':\n return [tfOps.sin(getParamValue('x', node, tensorMap, context))];\n case 'Sign': {\n return [tfOps.sign(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sinh': {\n return [tfOps.sinh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Softplus': {\n return [tfOps.softplus(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sqrt': {\n return [tfOps.sqrt(getParamValue('x', node, tensorMap, context))];\n }\n case 'Square': {\n return [tfOps.square(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tanh': {\n return [tfOps.tanh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tan':\n return [tfOps.tan(getParamValue('x', node, tensorMap, context))];\n case 'Relu6':\n case 'ClipByValue':\n return [tfOps.clipByValue(getParamValue('x', node, tensorMap, context), getParamValue('clipValueMin', node, tensorMap, context), getParamValue('clipValueMax', node, tensorMap, context))];\n case 'Rsqrt':\n return [tfOps.rsqrt(getTensor(node.inputNames[0], tensorMap, context))];\n case 'Prod':\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), getParamValue('axes', node, tensorMap, context))];\n case 'LeakyRelu':\n return [tfOps.leakyRelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n case 'Prelu':\n return [tfOps.prelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'basic_math';\n//# sourceMappingURL=basic_math_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This differs from util.assertShapesMatch in that it allows values of\n * negative one, an undefined size of a dimensinon, in a shape to match\n * anything.\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertShapesMatchAllowUndefinedSize(shapeA, shapeB, errorMessagePrefix = '') {\n util.assert(shapesEqualAllowUndefinedSize(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function shapesEqualAllowUndefinedSize(n1, n2) {\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== -1 && n2[i] !== -1 && n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\n//# sourceMappingURL=tensor_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * The TensorArray object keeps an array of Tensors. It\n * allows reading from the array and writing to the array.\n */\nexport class TensorArray {\n constructor(name, dtype, maxSize, elementShape, identicalElementShapes, dynamicSize, clearAfterRead) {\n this.name = name;\n this.dtype = dtype;\n this.maxSize = maxSize;\n this.elementShape = elementShape;\n this.identicalElementShapes = identicalElementShapes;\n this.dynamicSize = dynamicSize;\n this.clearAfterRead = clearAfterRead;\n this.tensors = [];\n this.closed_ = false;\n this.idTensor = scalar(0);\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n get closed() {\n return this.closed_;\n }\n /**\n * Dispose the tensors and idTensor and mark the TensoryArray as closed.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.tensor.id)) {\n tensor.tensor.dispose();\n }\n });\n this.tensors = [];\n this.closed_ = true;\n this.idTensor.dispose();\n }\n size() {\n return this.tensors.length;\n }\n /**\n * Read the value at location index in the TensorArray.\n * @param index Number the index to read from.\n */\n read(index) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || index >= this.size()) {\n throw new Error(`Tried to read from index ${index}, but array size is: ${this.size()}`);\n }\n const tensorWithState = this.tensors[index];\n if (tensorWithState.cleared) {\n throw new Error(`TensorArray ${this.name}: Could not read index ${index} twice because it was cleared after a previous read ` +\n `(perhaps try setting clear_after_read = false?).`);\n }\n if (this.clearAfterRead) {\n tensorWithState.cleared = true;\n }\n tensorWithState.read = true;\n return tensorWithState.tensor;\n }\n /**\n * Helper method to read multiple tensors from the specified indices.\n */\n readMany(indices) {\n return indices.map(index => this.read(index));\n }\n /**\n * Write value into the index of the TensorArray.\n * @param index number the index to write to.\n * @param tensor\n */\n write(index, tensor) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || !this.dynamicSize && index >= this.maxSize) {\n throw new Error(`Tried to write to index ${index}, but array is not resizeable and size is: ${this.maxSize}`);\n }\n const t = this.tensors[index] || {};\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index},\n because the value dtype is ${tensor.dtype}, but TensorArray dtype is ${this.dtype}.`);\n }\n // Set the shape for the first time write to unknow shape tensor array\n if (this.size() === 0 &&\n (this.elementShape == null || this.elementShape.length === 0)) {\n this.elementShape = tensor.shape;\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, `TensorArray ${this.name}: Could not write to TensorArray index ${index}.`);\n if (t.read) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been read.`);\n }\n if (t.written) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been written.`);\n }\n t.tensor = tensor;\n keep(tensor);\n t.written = true;\n this.tensors[index] = t;\n }\n /**\n * Helper method to write multiple tensors to the specified indices.\n */\n writeMany(indices, tensors) {\n if (indices.length !== tensors.length) {\n throw new Error(`TensorArray ${this.name}: could not write multiple tensors,` +\n `because the index size: ${indices.length} is not the same as tensors size: ${tensors.length}.`);\n }\n indices.forEach((i, index) => this.write(i, tensors[index]));\n }\n /**\n * Return selected values in the TensorArray as a packed Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param [indices] number[] Optional. Taking values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size(). If not specified returns\n * all tensors in the original order.\n * @param [dtype]\n */\n gather(indices, dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but gather requested dtype ${dtype}`);\n }\n if (!indices) {\n indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n }\n else {\n indices = indices.slice(0, this.size());\n }\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n // Read all the PersistentTensors into a vector to keep track of\n // their memory.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, 'TensorArray shape mismatch: ');\n return stack(tensors, 0);\n }\n /**\n * Return the values in the TensorArray as a concatenated Tensor.\n */\n concat(dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but concat requested dtype ${dtype}`);\n }\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n const indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n // Collect all the tensors from the tensors array.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, `TensorArray shape mismatch: tensor array shape (${this.elementShape}) vs first tensor shape (${tensors[0].shape})`);\n return concat(tensors, 0);\n }\n /**\n * Scatter the values of a Tensor in specific indices of a TensorArray.\n * @param indices nummber[] values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size().\n * @param tensor Tensor input tensor.\n */\n scatter(indices, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (!this.dynamicSize && maxIndex >= this.maxSize) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${this.maxSize})`);\n }\n this.writeMany(indices, unstack(tensor, 0));\n }\n /**\n * Split the values of a Tensor into the TensorArray.\n * @param length number[] with the lengths to use when splitting value along\n * its first dimension.\n * @param tensor Tensor, the tensor to split.\n */\n split(length, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n if (!this.dynamicSize && length.length !== this.maxSize) {\n throw new Error(`TensorArray's size is not equal to the size of lengths (${this.maxSize} vs. ${length.length}), ` +\n 'and the TensorArray is not marked as dynamically resizeable');\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = [];\n tidy(() => {\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), this.elementShape);\n }\n return tensors;\n });\n const indices = [];\n for (let i = 0; i < length.length; i++) {\n indices[i] = i;\n }\n this.writeMany(indices, tensors);\n }\n}\n//# sourceMappingURL=tensor_array.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * TensorList stores a container of `tf.Tensor` objects, which are accessible\n * via tensors field.\n *\n * In order to get a copy of the underlying list, use the copy method:\n * ```\n * TensorList b = a.copy();\n * b.tensors().pushBack(t); // This does not modify a.tensors().\n * ```\n *\n * Note that this is not a deep copy: the memory locations of the underlying\n * tensors will still point to the same locations of the corresponding tensors\n * in the original.\n */\nexport class TensorList {\n /**\n *\n * @param tensors list of tensors\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param maxNumElements The maximum allowed size of `tensors`. Defaults to -1\n * meaning that the size of `tensors` is unbounded.\n */\n constructor(tensors, elementShape, elementDtype, maxNumElements = -1) {\n this.tensors = tensors;\n this.elementShape = elementShape;\n this.elementDtype = elementDtype;\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (elementDtype !== tensor.dtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${tensor.dtype}`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n });\n }\n this.idTensor = scalar(0);\n this.maxNumElements = maxNumElements;\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n /**\n * Get a new TensorList containing a copy of the underlying tensor container.\n */\n copy() {\n return new TensorList([...this.tensors], this.elementShape, this.elementDtype);\n }\n /**\n * Dispose the tensors and idTensor and clear the tensor list.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n this.tensors.length = 0;\n this.idTensor.dispose();\n }\n /**\n * The size of the tensors in the tensor list.\n */\n size() {\n return this.tensors.length;\n }\n /**\n * Return a tensor that stacks a list of rank-R tf.Tensors into one rank-(R+1)\n * tf.Tensor.\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param numElements the number of elements to stack\n */\n stack(elementShape, elementDtype, numElements = -1) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (numElements !== -1 && this.tensors.length !== numElements) {\n throw new Error(`Operation expected a list with ${numElements} elements but got a list with ${this.tensors.length} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, this.elementShape, 'TensorList shape mismatch: ');\n return tidy(() => {\n const reshapedTensors = this.tensors.map(tensor => reshape(tensor, elementShape));\n return stack(reshapedTensors, 0);\n });\n }\n /**\n * Pop a tensor from the end of the list.\n * @param elementShape shape of the tensor\n * @param elementDtype data type of the tensor\n */\n popBack(elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (this.size() === 0) {\n throw new Error('Trying to pop from an empty list.');\n }\n const tensor = this.tensors.pop();\n assertShapesMatchAllowUndefinedSize(tensor.shape, elementShape, 'TensorList shape mismatch: ');\n return reshape(tensor, elementShape);\n }\n /**\n * Push a tensor to the end of the list.\n * @param tensor Tensor to be pushed.\n */\n pushBack(tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(tensor.shape, this.elementShape, 'TensorList shape mismatch: ');\n if (this.maxNumElements === this.size()) {\n throw new Error(`Trying to push element into a full list.`);\n }\n keep(tensor);\n this.tensors.push(tensor);\n }\n /**\n * Update the size of the list.\n * @param size the new size of the list.\n */\n resize(size) {\n if (size < 0) {\n throw new Error(`TensorListResize expects size to be non-negative. Got: ${size}`);\n }\n if (this.maxNumElements !== -1 && size > this.maxNumElements) {\n throw new Error(`TensorListResize input size ${size} is greater maxNumElement ${this.maxNumElements}.`);\n }\n this.tensors.length = size;\n }\n /**\n * Retrieve the element at the provided index\n * @param elementShape shape of the tensor\n * @param elementDtype dtype of the tensor\n * @param elementIndex index of the tensor\n */\n getItem(elementIndex, elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 || elementIndex > this.tensors.length) {\n throw new Error(`Trying to access element ${elementIndex} in a list with ${this.tensors.length} elements.`);\n }\n if (this.tensors[elementIndex] == null) {\n throw new Error(`element at index ${elementIndex} is null.`);\n }\n assertShapesMatchAllowUndefinedSize(this.tensors[elementIndex].shape, elementShape, 'TensorList shape mismatch: ');\n return this.tensors[elementIndex];\n }\n /**\n * Set the tensor at the index\n * @param elementIndex index of the tensor\n * @param tensor the tensor to be inserted into the list\n */\n setItem(elementIndex, tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 ||\n this.maxNumElements !== -1 && elementIndex >= this.maxNumElements) {\n throw new Error(`Trying to set element ${elementIndex} in a list with max ${this.maxNumElements} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n this.tensors[elementIndex] = tensor;\n }\n /**\n * Return selected values in the TensorList as a stacked Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param indices indices of tensors to gather\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n gather(indices, elementDtype, elementShape) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n // When indices is greater than the size of the list, indices beyond the\n // size of the list are ignored.\n indices = indices.slice(0, this.size());\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = indices.map(i => reshape(this.tensors[i], elementShape));\n return stack(tensors, 0);\n });\n }\n /**\n * Return the values in the TensorList as a concatenated Tensor.\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n concat(elementDtype, elementShape) {\n if (!!elementDtype && elementDtype !== this.elementDtype) {\n throw new Error(`TensorList dtype is ${this.elementDtype} but concat requested dtype ${elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = this.tensors.map(t => reshape(t, elementShape));\n return concat(tensors, 0);\n });\n }\n}\n/**\n * Creates a TensorList which, when stacked, has the value of tensor.\n * @param tensor from tensor\n * @param elementShape output tensor element shape\n */\nexport function fromTensor(tensor, elementShape, elementDtype) {\n const dtype = tensor.dtype;\n if (tensor.shape.length < 1) {\n throw new Error(`Tensor must be at least a vector, but saw shape: ${tensor.shape}`);\n }\n if (tensor.dtype !== elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${elementDtype}`);\n }\n const outputShape = tensor.shape.slice(1);\n assertShapesMatchAllowUndefinedSize(outputShape, elementShape, 'TensorList shape mismatch: ');\n const tensorList = unstack(tensor);\n return new TensorList(tensorList, elementShape, dtype);\n}\n/**\n * Return a TensorList of the given size with empty elements.\n * @param elementShape the shape of the future elements of the list\n * @param elementDtype the desired type of elements in the list\n * @param numElements the number of elements to reserve\n */\nexport function reserve(elementShape, elementDtype, numElements) {\n return new TensorList([], elementShape, elementDtype, numElements);\n}\n/**\n * Put tensors at specific indices of a stacked tensor into a TensorList.\n * @param indices list of indices on how to scatter the tensor.\n * @param tensor input tensor.\n * @param elementShape the shape of the future elements of the list\n * @param numElements the number of elements to scatter\n */\nexport function scatter(tensor, indices, elementShape, numElements) {\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (numElements != null && numElements !== -1 && maxIndex >= numElements) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${numElements})`);\n }\n const list = new TensorList([], elementShape, tensor.dtype, numElements);\n const tensors = unstack(tensor, 0);\n indices.forEach((value, index) => {\n list.setItem(value, tensors[index]);\n });\n return list;\n}\n/**\n * Split the values of a Tensor into a TensorList.\n * @param length the lengths to use when splitting value along\n * its first dimension.\n * @param tensor the tensor to split.\n * @param elementShape the shape of the future elements of the list\n */\nexport function split(tensor, length, elementShape) {\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = tidy(() => {\n const tensors = [];\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), elementShape);\n }\n tensor.dispose();\n return tensors;\n });\n const list = new TensorList([], elementShape, tensor.dtype, length.length);\n for (let i = 0; i < tensors.length; i++) {\n list.setItem(i, tensors[i]);\n }\n return list;\n}\n//# sourceMappingURL=tensor_list.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { TensorArray } from '../../executor/tensor_array';\nimport { fromTensor, reserve, scatter, split } from '../../executor/tensor_list';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'If':\n case 'StatelessIf': {\n const thenFunc = getParamValue('thenBranch', node, tensorMap, context);\n const elseFunc = getParamValue('elseBranch', node, tensorMap, context);\n const cond = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n const condValue = await cond.data();\n if (condValue[0]) {\n return context.functionMap[thenFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n else {\n return context.functionMap[elseFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n }\n case 'While':\n case 'StatelessWhile': {\n const bodyFunc = getParamValue('body', node, tensorMap, context);\n const condFunc = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n // Calculate the condition of the loop\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap));\n const argIds = args.map(tensor => tensor.id);\n let condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n let result = args;\n while (condValue[0]) {\n // Record the previous result for intermediate tensor tracking\n const origResult = result;\n // Execution the body of the loop\n result = await context.functionMap[bodyFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap);\n const resultIds = result.map(tensor => tensor.id);\n // Dispose the intermediate tensor for body function that is not global\n // kept, not input/output of the body function\n origResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n // Recalcuate the condition of the loop using the latest results.\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap));\n condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n }\n return result;\n }\n case 'LoopCond': {\n const pred = getParamValue('pred', node, tensorMap, context);\n return [cloneTensor(pred)];\n }\n case 'Switch': {\n const pred = getParamValue('pred', node, tensorMap, context);\n let data = getParamValue('data', node, tensorMap, context);\n if (!data.kept) {\n data = cloneTensor(data);\n }\n // Outputs nodes :0 => false, :1 => true\n return (await pred.data())[0] ? [undefined, data] : [data, undefined];\n }\n case 'Merge': {\n const inputName = node.inputNames.find(name => getTensor(name, tensorMap, context) !== undefined);\n if (inputName) {\n const data = getTensor(inputName, tensorMap, context);\n return [cloneTensor(data)];\n }\n return undefined;\n }\n case 'Enter': {\n const frameId = getParamValue('frameName', node, tensorMap, context);\n const data = getParamValue('tensor', node, tensorMap, context);\n context.enterFrame(frameId);\n return [cloneTensor(data)];\n }\n case 'Exit': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.exitFrame();\n return [cloneTensor(data)];\n }\n case 'NextIteration': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.nextIteration();\n return [cloneTensor(data)];\n }\n case 'TensorArrayV3': {\n const size = getParamValue('size', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const dynamicSize = getParamValue('dynamicSize', node, tensorMap, context);\n const clearAfterRead = getParamValue('clearAfterRead', node, tensorMap, context);\n const identicalElementShapes = getParamValue('identicalElementShapes', node, tensorMap, context);\n const name = getParamValue('name', node, tensorMap, context);\n const tensorArray = new TensorArray(name, dtype, size, elementShape, identicalElementShapes, dynamicSize, clearAfterRead);\n context.addTensorArray(tensorArray);\n return [tensorArray.idTensor, scalar(1.0)];\n }\n case 'TensorArrayWriteV3': {\n const id = getParamValue('tensorArrayId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const writeTensorArray = context.getTensorArray(id.id);\n writeTensorArray.write(index, writeTensor);\n return [writeTensorArray.idTensor];\n }\n case 'TensorArrayReadV3': {\n const readId = getParamValue('tensorArrayId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const readTensorArray = context.getTensorArray(readId.id);\n return [readTensorArray.read(readIndex)];\n }\n case 'TensorArrayGatherV3': {\n const gatherId = getParamValue('tensorArrayId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const gatherDtype = getParamValue('dtype', node, tensorMap, context);\n const gatherTensorArray = context.getTensorArray(gatherId.id);\n return [gatherTensorArray.gather(gatherIndices, gatherDtype)];\n }\n case 'TensorArrayScatterV3': {\n const scatterId = getParamValue('tensorArrayId', node, tensorMap, context);\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const scatterTensorArray = context.getTensorArray(scatterId.id);\n scatterTensorArray.scatter(scatterIndices, scatterTensor);\n return [scatterTensorArray.idTensor];\n }\n case 'TensorArrayConcatV3': {\n const concatId = getParamValue('tensorArrayId', node, tensorMap, context);\n const concatTensorArray = context.getTensorArray(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n return [concatTensorArray.concat(concatDtype)];\n }\n case 'TensorArraySplitV3': {\n const splitId = getParamValue('tensorArrayId', node, tensorMap, context);\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const splitTensorArray = context.getTensorArray(splitId.id);\n splitTensorArray.split(lengths, splitTensor);\n return [splitTensorArray.idTensor];\n }\n case 'TensorArraySizeV3': {\n const sizeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const sizeTensorArray = context.getTensorArray(sizeId.id);\n return [scalar(sizeTensorArray.size(), 'int32')];\n }\n case 'TensorArrayCloseV3': {\n const closeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const closeTensorArray = context.getTensorArray(closeId.id);\n closeTensorArray.clearAndClose();\n return [closeTensorArray.idTensor];\n }\n case 'TensorListSetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.setItem(index, writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListGetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.getItem(readIndex, elementShape, elementDType)];\n }\n case 'TensorListScatterV2':\n case 'TensorListScatter': {\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = scatter(scatterTensor, scatterIndices, elementShape, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListReserve': {\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = reserve(elementShape, elementDtype, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListGather': {\n const gatherId = getParamValue('tensorListId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(gatherId.id);\n return [tensorList.gather(gatherIndices, elementDtype, elementShape)];\n }\n case 'TensorListStack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.stack(elementShape, elementDtype, numElements)];\n }\n case 'TensorListFromTensor': {\n const tensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = fromTensor(tensor, elementShape, elementDtype);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListConcat': {\n const concatId = getParamValue('tensorListId', node, tensorMap, context);\n const tensorList = context.getTensorList(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n return [tensorList.concat(concatDtype, elementShape)];\n }\n case 'TensorListPushBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.pushBack(writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListPopBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.popBack(elementShape, elementDType)];\n }\n case 'TensorListSplit': {\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const tensorList = split(splitTensor, lengths, elementShape);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'control';\n//# sourceMappingURL=control_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getPadding, getParamValue } from './utils';\nfunction fusedConvAndDepthWiseParams(node, tensorMap, context) {\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const isBatchNorm = extraOp === 'fusedbatchnorm';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd and Prelu ' +\n 'must have two extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd must have ' +\n 'one extra argument: bias.');\n }\n }\n if (isBatchNorm) {\n throw new Error('FusedConv2d and DepthwiseConv2d with FusedBatchNorm is not supported.');\n }\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return {\n stride,\n pad,\n dataFormat,\n dilations,\n biasArg,\n preluArg,\n activationFunc\n };\n}\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Conv1D': {\n const stride = getParamValue('stride', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilation = getParamValue('dilation', node, tensorMap, context);\n return [tfOps.conv1d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), stride, pad, dataFormat, dilation)];\n }\n case 'Conv2D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case '_FusedConv2D': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.conv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'FusedDepthwiseConv2dNative': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.depthwiseConv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'Conv2DBackpropInput':\n case 'Conv2dTranspose': {\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n return [tfOps.conv2dTranspose(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), shape, [stride[1], stride[2]], pad)];\n }\n case 'DepthwiseConv2dNative':\n case 'DepthwiseConv2d': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n return [tfOps.depthwiseConv2d(getParamValue('input', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case 'Conv3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv3d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2], stride[3]], pad, dataFormat, [dilations[1], dilations[2], dilations[3]])];\n }\n case 'AvgPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPoolWithArgmax': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n const includeBatchInIndex = getParamValue('includeBatchInIndex', node, tensorMap, context);\n const { result, indexes } = tfOps.maxPoolWithArgmax(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad, includeBatchInIndex);\n return [result, indexes];\n }\n case 'AvgPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'MaxPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'Dilation2D': {\n const strides = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n // strides: [1, stride_height, stride_width, 1].\n const strideHeight = strides[1];\n const strideWidth = strides[2];\n // dilations: [1, dilation_height, dilation_width, 1].\n const dilationHeight = dilations[1];\n const dilationWidth = dilations[2];\n return [tfOps.dilation2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [strideHeight, strideWidth], pad, [dilationHeight, dilationWidth], 'NHWC' /* dataFormat */)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'convolution';\n//# sourceMappingURL=convolution_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Fill': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const value = getParamValue('value', node, tensorMap, context);\n return [tfOps.fill(shape, value, dtype)];\n }\n case 'LinSpace': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const num = getParamValue('num', node, tensorMap, context);\n return [tfOps.linspace(start, stop, num)];\n }\n case 'Multinomial': {\n const logits = getParamValue('logits', node, tensorMap, context);\n const numSamples = getParamValue('numSamples', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.multinomial(logits, numSamples, seed)];\n }\n case 'OneHot': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const depth = getParamValue('depth', node, tensorMap, context);\n const onValue = getParamValue('onValue', node, tensorMap, context);\n const offValue = getParamValue('offValue', node, tensorMap, context);\n return [tfOps.oneHot(indices, depth, onValue, offValue)];\n }\n case 'Ones': {\n return [tfOps.ones(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'OnesLike': {\n return [tfOps.onesLike(getParamValue('x', node, tensorMap, context))];\n }\n case 'RandomUniform': {\n return [tfOps.randomUniform(\n // tslint:disable-next-line:no-any\n getParamValue('shape', node, tensorMap, context), getParamValue('minval', node, tensorMap, context), getParamValue('maxval', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'Range': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const step = getParamValue('step', node, tensorMap, context);\n return [tfOps.range(start, stop, step, getParamValue('dtype', node, tensorMap, context))];\n }\n case 'TruncatedNormal': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const mean = getParamValue('mean', node, tensorMap, context);\n const stdDev = getParamValue('stdDev', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.truncatedNormal(shape, mean, stdDev, getParamValue('dtype', node, tensorMap, context), seed)];\n }\n case 'Zeros': {\n return [tfOps.zeros(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ZerosLike': {\n return [tfOps.zerosLike(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'creation';\n//# sourceMappingURL=creation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nfunction nmsParams(node, tensorMap, context) {\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const scores = getParamValue('scores', node, tensorMap, context);\n const maxOutputSize = getParamValue('maxOutputSize', node, tensorMap, context);\n const iouThreshold = getParamValue('iouThreshold', node, tensorMap, context);\n const scoreThreshold = getParamValue('scoreThreshold', node, tensorMap, context);\n const softNmsSigma = getParamValue('softNmsSigma', node, tensorMap, context);\n return {\n boxes,\n scores,\n maxOutputSize,\n iouThreshold,\n scoreThreshold,\n softNmsSigma\n };\n}\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'NonMaxSuppressionV5': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = nmsParams(node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionWithScoreAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n return [result.selectedIndices, result.selectedScores];\n }\n case 'NonMaxSuppressionV4': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n const padToMaxOutputSize = getParamValue('padToMaxOutputSize', node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionPaddedAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [result.selectedIndices, result.validOutputs];\n }\n case 'NonMaxSuppressionV3':\n case 'NonMaxSuppressionV2': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n return [await tfOps.image.nonMaxSuppressionAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold)];\n }\n case 'Where': {\n const condition = tfOps.cast(getParamValue('condition', node, tensorMap, context), 'bool');\n const result = [await tfOps.whereAsync(condition)];\n condition.dispose();\n return result;\n }\n case 'ListDiff': {\n return tfOps.setdiff1dAsync(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context));\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'dynamic';\n//# sourceMappingURL=dynamic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'TopKV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const k = getParamValue('k', node, tensorMap, context);\n const sorted = getParamValue('sorted', node, tensorMap, context);\n const result = tfOps.topk(x, k, sorted);\n return [result.values, result.indices];\n }\n case 'Unique': {\n const x = getParamValue('x', node, tensorMap, context);\n const result = tfOps.unique(x);\n return [result.values, result.indices];\n }\n case 'UniqueV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n const result = tfOps.unique(x, axis);\n return [result.values, result.indices];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'evaluation';\n//# sourceMappingURL=evaluation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Const': {\n return tensorMap[node.name];\n }\n case 'PlaceholderWithDefault':\n const def = getParamValue('default', node, tensorMap, context);\n return [getTensor(node.name, tensorMap, context) || def];\n case 'Placeholder':\n return [getTensor(node.name, tensorMap, context)];\n case 'Identity':\n case 'StopGradient':\n case 'FakeQuantWithMinMaxVars': { // This op is currently ignored.\n const data = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(data)];\n }\n case 'IdentityN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => cloneTensor(t));\n case 'Snapshot':\n const snapshot = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(snapshot)];\n case 'Shape':\n return [tfOps.tensor1d(getParamValue('x', node, tensorMap, context).shape, 'int32')];\n case 'ShapeN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => tfOps.tensor1d(t.shape));\n case 'Size':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).size, 'int32')];\n case 'Rank':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).rank, 'int32')];\n case 'NoOp':\n return [tfOps.scalar(1)];\n case 'Print':\n const input = getParamValue('x', node, tensorMap, context);\n const data = getParamValue('data', node, tensorMap, context);\n const message = getParamValue('message', node, tensorMap, context);\n const summarize = getParamValue('summarize', node, tensorMap, context);\n console.warn('The graph has a tf.print() operation,' +\n 'usually used for debugging, which slows down performance.');\n console.log(message);\n for (let i = 0; i < data.length; i++) {\n console.log(Array.prototype.slice.call(data[i].dataSync())\n .slice(0, summarize));\n }\n return [input];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'graph';\n//# sourceMappingURL=graph_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { keep, scalar, stack, tidy, unstack, util } from '@tensorflow/tfjs-core';\n/**\n * Hashtable contains a set of tensors, which can be accessed by key.\n */\nexport class HashTable {\n /**\n * Constructor of HashTable. Creates a hash table.\n *\n * @param keyDType `dtype` of the table keys.\n * @param valueDType `dtype` of the table values.\n */\n constructor(keyDType, valueDType) {\n this.keyDType = keyDType;\n this.valueDType = valueDType;\n this.handle = scalar(0);\n // tslint:disable-next-line: no-any\n this.tensorMap = new Map();\n keep(this.handle);\n }\n get id() {\n return this.handle.id;\n }\n /**\n * Dispose the tensors and handle and clear the hashtable.\n */\n clearAndClose() {\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n this.handle.dispose();\n }\n /**\n * The number of items in the hash table.\n */\n size() {\n return this.tensorMap.size;\n }\n /**\n * Replaces the contents of the table with the specified keys and values.\n * @param keys Keys to store in the hashtable.\n * @param values Values to store in the hashtable.\n */\n async import(keys, values) {\n this.checkKeyAndValueTensor(keys, values);\n // We only store the primitive values of the keys, this allows lookup\n // to be O(1).\n const $keys = await keys.data();\n // Clear the hashTable before inserting new values.\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n return tidy(() => {\n const $values = unstack(values);\n const keysLength = $keys.length;\n const valuesLength = $values.length;\n util.assert(keysLength === valuesLength, () => `The number of elements doesn't match, keys has ` +\n `${keysLength} elements, the values has ${valuesLength} ` +\n `elements.`);\n for (let i = 0; i < keysLength; i++) {\n const key = $keys[i];\n const value = $values[i];\n keep(value);\n this.tensorMap.set(key, value);\n }\n return this.handle;\n });\n }\n /**\n * Looks up keys in a hash table, outputs the corresponding values.\n *\n * Performs batch lookups, for every element in the key tensor, `find`\n * stacks the corresponding value into the return tensor.\n *\n * If an element is not present in the table, the given `defaultValue` is\n * used.\n *\n * @param keys Keys to look up. Must have the same type as the keys of the\n * table.\n * @param defaultValue The scalar `defaultValue` is the value output for keys\n * not present in the table. It must also be of the same type as the\n * table values.\n */\n async find(keys, defaultValue) {\n this.checkKeyAndValueTensor(keys, defaultValue);\n const $keys = await keys.data();\n return tidy(() => {\n const result = [];\n for (let i = 0; i < $keys.length; i++) {\n const key = $keys[i];\n const value = this.findWithDefault(key, defaultValue);\n result.push(value);\n }\n return stack(result);\n });\n }\n // tslint:disable-next-line: no-any\n findWithDefault(key, defaultValue) {\n const result = this.tensorMap.get(key);\n return result != null ? result : defaultValue;\n }\n checkKeyAndValueTensor(key, value) {\n if (key.dtype !== this.keyDType) {\n throw new Error(`Expect key dtype ${this.keyDType}, but got ` +\n `${key.dtype}`);\n }\n if (value.dtype !== this.valueDType) {\n throw new Error(`Expect value dtype ${this.valueDType}, but got ` +\n `${value.dtype}`);\n }\n }\n}\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { HashTable } from '../../executor/hash_table';\nimport { getParamValue } from './utils';\nexport const executeOp = async (node, tensorMap, context, resourceManager) => {\n switch (node.op) {\n case 'HashTable':\n case 'HashTableV2': {\n const keyDType = getParamValue('keyDType', node, tensorMap, context);\n const valueDType = getParamValue('valueDType', node, tensorMap, context);\n const hashTable = new HashTable(keyDType, valueDType);\n resourceManager.addHashTable(node.name, hashTable);\n return [hashTable.handle];\n }\n case 'LookupTableImport':\n case 'LookupTableImportV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.import(keys, values)];\n }\n case 'LookupTableFind':\n case 'LookupTableFindV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.find(keys, defaultValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'hash_table';\n//# sourceMappingURL=hash_table_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ResizeBilinear': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeBilinear(images, [size[0], size[1]], alignCorners)];\n }\n case 'ResizeNearestNeighbor': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeNearestNeighbor(images, [size[0], size[1]], alignCorners)];\n }\n case 'CropAndResize': {\n const image = getParamValue('image', node, tensorMap, context);\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const boxInd = getParamValue('boxInd', node, tensorMap, context);\n const cropSize = getParamValue('cropSize', node, tensorMap, context);\n const method = getParamValue('method', node, tensorMap, context);\n const extrapolationValue = getParamValue('extrapolationValue', node, tensorMap, context);\n return [tfOps.image.cropAndResize(image, boxes, boxInd, cropSize, method, extrapolationValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'image';\n//# sourceMappingURL=image_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Equal': {\n return [tfOps.equal(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'NotEqual': {\n return [tfOps.notEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Greater': {\n return [tfOps.greater(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'GreaterEqual': {\n return [tfOps.greaterEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Less': {\n return [tfOps.less(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LessEqual': {\n return [tfOps.lessEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalAnd': {\n return [tfOps.logicalAnd(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalNot': {\n return [tfOps.logicalNot(getParamValue('a', node, tensorMap, context))];\n }\n case 'LogicalOr': {\n return [tfOps.logicalOr(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Select':\n case 'SelectV2': {\n return [tfOps.where(getParamValue('condition', node, tensorMap, context), getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'logical';\n//# sourceMappingURL=logical_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BatchMatMul':\n case 'BatchMatMulV2':\n case 'MatMul':\n return [tfOps.matMul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context), getParamValue('transposeA', node, tensorMap, context), getParamValue('transposeB', node, tensorMap, context))];\n case 'Transpose':\n return [tfOps.transpose(getParamValue('x', node, tensorMap, context), getParamValue('perm', node, tensorMap, context))];\n case '_FusedMatMul':\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('Fused MatMul with BiasAdd and Prelu must have two ' +\n 'extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('Fused MatMul with BiasAdd must have one extra argument: bias.');\n }\n }\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return [tfOps.fused.matMul({\n a: getParamValue('a', node, tensorMap, context),\n b: getParamValue('b', node, tensorMap, context),\n transposeA: getParamValue('transposeA', node, tensorMap, context),\n transposeB: getParamValue('transposeB', node, tensorMap, context),\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'matrices';\n//# sourceMappingURL=matrices_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FusedBatchNorm':\n case 'FusedBatchNormV2': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'FusedBatchNormV3': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'LRN': {\n return [tfOps.localResponseNormalization(getParamValue('x', node, tensorMap, context), getParamValue('radius', node, tensorMap, context), getParamValue('bias', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context), getParamValue('beta', node, tensorMap, context))];\n }\n case 'Softmax': {\n return [tfOps.softmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'LogSoftmax': {\n return [tfOps.logSoftmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'SparseToDense': {\n return [tfOps.sparseToDense(getParamValue('sparseIndices', node, tensorMap, context), getParamValue('outputShape', node, tensorMap, context), getParamValue('sparseValues', node, tensorMap, context), getParamValue('defaultValue', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'normalization';\n//# sourceMappingURL=normalization_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Max': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.max(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Mean': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.mean(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Min': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.min(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Sum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.sum(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'All': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.all(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Any': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.any(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'ArgMax': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMax(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'ArgMin': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMin(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Prod': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Cumsum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const exclusive = getParamValue('exclusive', node, tensorMap, context);\n const reverse = getParamValue('reverse', node, tensorMap, context);\n return [tfOps.cumsum(getParamValue('x', node, tensorMap, context), axis, exclusive, reverse)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'reduction';\n//# sourceMappingURL=reduction_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ConcatV2':\n case 'Concat': {\n const n = getParamValue('n', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n let inputs = getParamValue('tensors', node, tensorMap, context);\n inputs = inputs.slice(0, n);\n return [tfOps.concat(inputs, axis)];\n }\n case 'GatherV2':\n case 'Gather': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gather(input, tfOps.cast(indices, 'int32'), axis)];\n }\n case 'ReverseV2':\n case 'Reverse': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n return [tfOps.reverse(input, axis)];\n }\n case 'Slice': {\n // tslint:disable-next-line:no-any\n const begin = getParamValue('begin', node, tensorMap, context);\n // tslint:disable-next-line:no-any\n const size = getParamValue('size', node, tensorMap, context);\n return [tfOps.slice(getParamValue('x', node, tensorMap, context), begin, size)];\n }\n case 'StridedSlice': {\n const begin = getParamValue('begin', node, tensorMap, context);\n const end = getParamValue('end', node, tensorMap, context);\n const strides = getParamValue('strides', node, tensorMap, context);\n const beginMask = getParamValue('beginMask', node, tensorMap, context);\n const endMask = getParamValue('endMask', node, tensorMap, context);\n const ellipsisMask = getParamValue('ellipsisMask', node, tensorMap, context);\n const newAxisMask = getParamValue('newAxisMask', node, tensorMap, context);\n const shrinkAxisMask = getParamValue('shrinkAxisMask', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return [tfOps.stridedSlice(tensor, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask)];\n }\n case 'Pack': {\n return tidy(() => {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensors = getParamValue('tensors', node, tensorMap, context);\n // Reshape the tensors to the first tensor's shape if they don't\n // match.\n const shape = tensors[0].shape;\n const squeezedShape = tfOps.squeeze(tensors[0]).shape;\n const mapped = tensors.map(tensor => {\n const sameShape = util.arraysEqual(tensor.shape, shape);\n if (!sameShape &&\n !util.arraysEqual(tfOps.squeeze(tensor).shape, squeezedShape)) {\n throw new Error('the input tensors shape does not match');\n }\n return sameShape ? tensor : tfOps.reshape(tensor, shape);\n });\n return [tfOps.stack(mapped, axis)];\n });\n }\n case 'Unpack': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensor = getParamValue('tensor', node, tensorMap, context);\n return tfOps.unstack(tensor, axis);\n }\n case 'Tile': {\n const reps = getParamValue('reps', node, tensorMap, context);\n return [tfOps.tile(getParamValue('x', node, tensorMap, context), reps)];\n }\n case 'Split':\n case 'SplitV': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const numOrSizeSplits = getParamValue('numOrSizeSplits', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return tfOps.split(tensor, numOrSizeSplits, axis);\n }\n case 'ScatterNd': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const shape = getParamValue('shape', node, tensorMap, context);\n return [tfOps.scatterND(indices, values, shape)];\n }\n case 'GatherNd': {\n const x = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gatherND(x, indices)];\n }\n case 'SparseToDense': {\n const indices = getParamValue('sparseIndices', node, tensorMap, context);\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const sparseValues = getParamValue('sparseValues', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n return [tfOps.sparseToDense(indices, sparseValues, shape, sparseValues.dtype === defaultValue.dtype ?\n defaultValue :\n tfOps.cast(defaultValue, sparseValues.dtype))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'slice_join';\n//# sourceMappingURL=slice_join_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FFT': {\n return [tfOps.fft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IFFT': {\n return [tfOps.ifft(getParamValue('x', node, tensorMap, context))];\n }\n case 'RFFT': {\n return [tfOps.rfft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IRFFT': {\n return [tfOps.irfft(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'spectral';\n//# sourceMappingURL=spectral_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Cast': {\n return [tfOps.cast(getParamValue('x', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ExpandDims': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.expandDims(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Squeeze': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.squeeze(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Reshape': {\n return [tfOps.reshape(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n case 'MirrorPad': {\n return [tfOps.mirrorPad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('mode', node, tensorMap, context))];\n }\n case 'PadV2':\n case 'Pad': {\n return [tfOps.pad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('constantValue', node, tensorMap, context))];\n }\n case 'SpaceToBatchND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const paddings = getParamValue('paddings', node, tensorMap, context);\n return [tfOps.spaceToBatchND(getParamValue('x', node, tensorMap, context), blockShape, paddings)];\n }\n case 'BatchToSpaceND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const crops = getParamValue('crops', node, tensorMap, context);\n return [tfOps.batchToSpaceND(getParamValue('x', node, tensorMap, context), blockShape, crops)];\n }\n case 'DepthToSpace': {\n const blockSize = getParamValue('blockSize', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context).toUpperCase();\n return [tfOps.depthToSpace(getParamValue('x', node, tensorMap, context), blockSize, dataFormat)];\n }\n case 'BroadcastTo': {\n return [tfOps.broadcastTo(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'transformation';\n//# sourceMappingURL=transformation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { NodeValueImpl } from './custom_op/node_value_impl';\nimport { getRegisteredOp } from './custom_op/register';\nimport * as arithmetic from './executors/arithmetic_executor';\nimport * as basicMath from './executors/basic_math_executor';\nimport * as control from './executors/control_executor';\nimport * as convolution from './executors/convolution_executor';\nimport * as creation from './executors/creation_executor';\nimport * as dynamic from './executors/dynamic_executor';\nimport * as evaluation from './executors/evaluation_executor';\nimport * as graph from './executors/graph_executor';\nimport * as hashTable from './executors/hash_table_executor';\nimport * as image from './executors/image_executor';\nimport * as logical from './executors/logical_executor';\nimport * as matrices from './executors/matrices_executor';\nimport * as normalization from './executors/normalization_executor';\nimport * as reduction from './executors/reduction_executor';\nimport * as sliceJoin from './executors/slice_join_executor';\nimport * as spectral from './executors/spectral_executor';\nimport * as transformation from './executors/transformation_executor';\n/**\n * Executes the op defined by the node object.\n * @param node\n * @param tensorMap contains tensors for executed nodes and weights\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function executeOp(node, tensorMap, context, resourceManager) {\n const value = ((node, tensorMap, context) => {\n switch (node.category) {\n case 'arithmetic':\n return tfc.tidy(() => arithmetic.executeOp(node, tensorMap, context));\n case 'basic_math':\n return tfc.tidy(() => basicMath.executeOp(node, tensorMap, context));\n case 'control':\n return control.executeOp(node, tensorMap, context);\n case 'convolution':\n return tfc.tidy(() => convolution.executeOp(node, tensorMap, context));\n case 'creation':\n return tfc.tidy(() => creation.executeOp(node, tensorMap, context));\n case 'dynamic':\n return dynamic.executeOp(node, tensorMap, context);\n case 'evaluation':\n return tfc.tidy(() => evaluation.executeOp(node, tensorMap, context));\n case 'image':\n return tfc.tidy(() => image.executeOp(node, tensorMap, context));\n case 'graph':\n return tfc.tidy(() => graph.executeOp(node, tensorMap, context));\n case 'logical':\n return tfc.tidy(() => logical.executeOp(node, tensorMap, context));\n case 'matrices':\n return tfc.tidy(() => matrices.executeOp(node, tensorMap, context));\n case 'normalization':\n return tfc.tidy(() => normalization.executeOp(node, tensorMap, context));\n case 'reduction':\n return tfc.tidy(() => reduction.executeOp(node, tensorMap, context));\n case 'slice_join':\n return tfc.tidy(() => sliceJoin.executeOp(node, tensorMap, context));\n case 'spectral':\n return tfc.tidy(() => spectral.executeOp(node, tensorMap, context));\n case 'transformation':\n return tfc.tidy(() => transformation.executeOp(node, tensorMap, context));\n case 'hash_table':\n return hashTable.executeOp(node, tensorMap, context, resourceManager);\n case 'custom':\n const opMapper = getRegisteredOp(node.op);\n if (opMapper && opMapper.customExecutor) {\n return opMapper.customExecutor(new NodeValueImpl(node, tensorMap, context));\n }\n else {\n throw TypeError(`Custom op ${node.op} is not registered.`);\n }\n default:\n throw TypeError(`Unknown op '${node.op}'. File an issue at ` +\n `https://github.com/tensorflow/tfjs/issues so we can add it` +\n `, or register a custom execution with tf.registerOp()`);\n }\n })(node, tensorMap, context);\n if (tfc.util.isPromise(value)) {\n return value.then((data) => [].concat(data));\n }\n return [].concat(value);\n}\n//# sourceMappingURL=operation_executor.js.map", "/**\n * ExecutionContext captures the runtime environment of the node. It keeps\n * track of the current frame and iteration for the control flow ops.\n *\n * For example, typical Dynamic RNN model may contain loops, for which\n * TensorFlow will generate graphs with Enter/Exit nodes to control the\n * current execution frame, and NextIteration Nodes for iteration id increment.\n * For model with branch logic, TensorFLow will generate Switch/Merge ops.\n */\nexport class ExecutionContext {\n constructor(weightMap = {}, tensorArrayMap = {}, tensorListMap = {}, functionMap = {}) {\n this.weightMap = weightMap;\n this.tensorArrayMap = tensorArrayMap;\n this.tensorListMap = tensorListMap;\n this.functionMap = functionMap;\n this.rootContext = { id: 0, frameName: '', iterationId: 0 };\n this.contexts = [this.rootContext];\n this.lastId = 0;\n this.generateCurrentContextIds();\n }\n newFrame(id, frameName) {\n return { id, frameName, iterationId: 0 };\n }\n /**\n * Set the current context\n * @param contexts: ExecutionContextInfo[] the current path of execution\n * frames\n */\n set currentContext(contexts) {\n if (this.contexts !== contexts) {\n this.contexts = contexts;\n this.generateCurrentContextIds();\n }\n }\n get currentContext() {\n return this.contexts;\n }\n /**\n * Returns the current context in string format.\n */\n get currentContextId() {\n return this._currentContextIds[0];\n }\n /**\n * Returns the current context and all parent contexts in string format.\n * This allow access to the nodes in the current and parent frames.\n */\n get currentContextIds() {\n return this._currentContextIds;\n }\n generateCurrentContextIds() {\n const names = [];\n for (let i = 0; i < this.contexts.length - 1; i++) {\n const contexts = this.contexts.slice(0, this.contexts.length - i);\n names.push(this.contextIdforContexts(contexts));\n }\n names.push('');\n this._currentContextIds = names;\n }\n contextIdforContexts(contexts) {\n return contexts ?\n contexts\n .map(context => (context.id === 0 && context.iterationId === 0) ?\n '' :\n `${context.frameName}-${context.iterationId}`)\n .join('/') :\n '';\n }\n /**\n * Enter a new frame, a new context is pushed on the current context list.\n * @param frameId new frame id\n */\n enterFrame(frameId) {\n if (this.contexts) {\n this.lastId++;\n this.contexts = this.contexts.slice();\n this.contexts.push(this.newFrame(this.lastId, frameId));\n this._currentContextIds.unshift(this.contextIdforContexts(this.contexts));\n }\n }\n /**\n * Exit the current frame, the last context is removed from the current\n * context list.\n */\n exitFrame() {\n if (this.contexts && this.contexts.length > 1) {\n this.contexts = this.contexts.slice();\n this.contexts.splice(-1);\n this.currentContextIds.shift();\n }\n else {\n throw new Error('Cannot exit frame, the context is empty');\n }\n }\n /**\n * Enter the next iteration of a loop, the iteration id of last context is\n * increased.\n */\n nextIteration() {\n if (this.contexts && this.contexts.length > 0) {\n this.contexts = this.contexts.slice();\n this.lastId++;\n const context = Object.assign({}, this.contexts[this.contexts.length - 1]);\n context.iterationId += 1;\n context.id = this.lastId;\n this.contexts.splice(-1, 1, context);\n this._currentContextIds.splice(0, 1, this.contextIdforContexts(this.contexts));\n }\n else {\n throw new Error('Cannot increase frame iteration, the context is empty');\n }\n }\n getWeight(name) {\n return this.weightMap[name];\n }\n addTensorArray(tensorArray) {\n this.tensorArrayMap[tensorArray.id] = tensorArray;\n }\n getTensorArray(id) {\n return this.tensorArrayMap[id];\n }\n addTensorList(tensorList) {\n this.tensorListMap[tensorList.id] = tensorList;\n }\n getTensorList(id) {\n return this.tensorListMap[id];\n }\n dispose(keepIds) {\n for (const key in this.tensorArrayMap) {\n this.tensorArrayMap[key].clearAndClose(keepIds);\n }\n for (const key in this.tensorListMap) {\n this.tensorListMap[key].clearAndClose(keepIds);\n }\n }\n}\n//# sourceMappingURL=execution_context.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { parseNodeName } from '../operations/executors/utils';\n/**\n * Given graph inputs and desired outputs, find the minimal set of nodes\n * to execute in order to compute the outputs. In addition return other useful\n * info such:\n * - Missing inputs needed to compute the output.\n * - Whether the subgraph contains dynamic ops (control flow, dynamic shape).\n * - Alternative inputs in order to avoid async (dynamic op) execution.\n */\nexport function getExecutionSubgraph(inputs, outputs, weightMap, initNodes) {\n const usedNodes = new Set();\n const missingInputs = [];\n let dynamicNode = null;\n let syncInputs = null;\n // Start with the outputs, going backwards and find all the nodes that are\n // needed to compute those outputs.\n const seen = new Set();\n const inputNodeNames = Object.keys(inputs).map(name => parseNodeName(name)[0]);\n let initNodeNames = [];\n if (initNodes != null) {\n initNodeNames = initNodes.map(node => parseNodeName(node.name)[0]);\n }\n const frontier = [...outputs];\n while (frontier.length > 0) {\n const node = frontier.pop();\n if (isControlFlow(node) || isDynamicShape(node) || isHashTable(node)) {\n if (dynamicNode == null) {\n dynamicNode = node;\n syncInputs = dynamicNode.children.map(child => child.name)\n .filter(name => usedNodes.has(name));\n }\n }\n usedNodes.add(node.name);\n // Weights are dead end since we already have their values.\n if (weightMap[node.name] != null) {\n continue;\n }\n // This node is a dead end since it's one of the user-provided inputs.\n if (inputNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n // This node is a dead end since it doesn't have any inputs.\n if (initNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n if (node.inputs.length === 0) {\n missingInputs.push(node.name);\n continue;\n }\n node.inputs.forEach(input => {\n // Don't add to the frontier if it is already there.\n if (seen.has(input.name)) {\n return;\n }\n seen.add(input.name);\n frontier.push(input);\n });\n }\n return { inputs, outputs, usedNodes, missingInputs, dynamicNode, syncInputs };\n}\n/**\n * Given the execution info, return a list of nodes in topological order that\n * need to be executed to compute the output.\n */\nexport function getNodesInTopologicalOrder(graph, weightMap, executionInfo) {\n const { usedNodes, inputs } = executionInfo;\n const frontier = [];\n const inputNodes = Object.keys(inputs)\n .map(name => parseNodeName(name)[0])\n .map(name => graph.nodes[name]);\n const initNodes = graph.initNodes;\n inputNodes.forEach(input => {\n if (usedNodes.has(input.name)) {\n frontier.push(input);\n }\n });\n graph.weights.forEach(weight => {\n if (usedNodes.has(weight.name)) {\n frontier.push(weight);\n }\n });\n if (initNodes != null) {\n initNodes.forEach(node => {\n if (usedNodes.has(node.name)) {\n frontier.push(node);\n }\n });\n }\n const seen = new Set();\n const orderedNodes = [];\n while (frontier.length > 0) {\n const node = frontier.pop();\n seen.add(node.name);\n if (!weightMap[node.name]) {\n orderedNodes.push(node);\n }\n node.children.forEach(child => {\n if (!seen.has(child.name) && usedNodes.has(child.name) &&\n child.inputs.every(input => seen.has(input.name))) {\n frontier.push(child);\n }\n });\n }\n return orderedNodes;\n}\nconst CONTROL_FLOW_OPS = [\n 'Switch', 'Merge', 'Enter', 'Exit', 'NextIteration', 'StatelessIf',\n 'StatelessWhile', 'if', 'While'\n];\nconst DYNAMIC_SHAPE_OPS = [\n 'NonMaxSuppressionV2', 'NonMaxSuppressionV3', 'NonMaxSuppressionV5', 'Where'\n];\nconst HASH_TABLE_OPS = [\n 'HashTable', 'HashTableV2', 'LookupTableImport', 'LookupTableImportV2',\n 'LookupTableFind', 'LookupTableFindV2'\n];\nexport function isControlFlow(node) {\n return CONTROL_FLOW_OPS.indexOf(node.op) >= 0;\n}\nexport function isDynamicShape(node) {\n return DYNAMIC_SHAPE_OPS.indexOf(node.op) >= 0;\n}\nexport function isHashTable(node) {\n return HASH_TABLE_OPS.indexOf(node.op) >= 0;\n}\n//# sourceMappingURL=model_analysis.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { getNodeNameAndIndex, getParamValue, getTensor, getTensorsForCurrentContenxt, parseNodeName } from '../operations/executors/utils';\nimport { executeOp } from '../operations/operation_executor';\nimport { ExecutionContext } from './execution_context';\nimport { getExecutionSubgraph, getNodesInTopologicalOrder, isControlFlow } from './model_analysis';\nexport class GraphExecutor {\n /**\n *\n * @param graph Graph the model or function graph to be executed.\n * @param parent When building function exector you need to set the parent\n * executor. Since the weights and function executor maps are set at parant\n * level, that function executor can access the function maps and weight maps\n * through the parent.\n */\n constructor(graph, parent) {\n this.graph = graph;\n this.parent = parent;\n this.compiledMap = new Map();\n this._weightMap = {};\n this.SEPERATOR = ',';\n this._functions = {};\n this._functionExecutorMap = {};\n this._outputs = graph.outputs;\n this._inputs = graph.inputs;\n this._initNodes = graph.initNodes;\n this._signature = graph.signature;\n this._functions = graph.functions;\n // create sub-graph executors\n if (graph.functions != null) {\n Object.keys(graph.functions).forEach(name => {\n this._functionExecutorMap[name] =\n new GraphExecutor(graph.functions[name], this);\n });\n }\n }\n get weightIds() {\n return this.parent ? this.parent.weightIds : this._weightIds;\n }\n get functionExecutorMap() {\n return this.parent ? this.parent.functionExecutorMap :\n this._functionExecutorMap;\n }\n get weightMap() {\n return this.parent ? this.parent.weightMap : this._weightMap;\n }\n set weightMap(weightMap) {\n const weightIds = Object.keys(weightMap).map(key => weightMap[key].map(tensor => tensor.id));\n this._weightIds = [].concat(...weightIds);\n this._weightMap = weightMap;\n }\n /**\n * Set `ResourceManager` shared by executors of a model.\n * @param resourceManager: `ResourceManager` of the `GraphModel`.\n */\n set resourceManager(resourceManager) {\n this._resourceManager = resourceManager;\n }\n get inputs() {\n return this._inputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get outputs() {\n return this._outputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get inputNodes() {\n return this._inputs.map(node => node.signatureKey || node.name);\n }\n get outputNodes() {\n return this._outputs.map((node) => {\n const name = node.signatureKey || node.name;\n return node.defaultOutput ? (`${name}:${node.defaultOutput}`) : name;\n });\n }\n get functions() {\n return Object.keys(this._functions).reduce((map, key) => {\n map[key] = this._functions[key].signature;\n return map;\n }, {});\n }\n getCompilationKey(inputs, outputs) {\n const sortedInputs = inputs.map(node => node.name).sort();\n const sortedOutputs = outputs.map(node => node.name).sort();\n return sortedInputs.join(this.SEPERATOR) + '--' +\n sortedOutputs.join(this.SEPERATOR);\n }\n /**\n * Compiles the inference graph and returns the minimal set of nodes that are\n * required for execution, in the correct execution order.\n */\n compile(inputs, outputs) {\n const executionInfo = getExecutionSubgraph(inputs, outputs, this.weightMap, this._initNodes);\n const { missingInputs, dynamicNode, syncInputs } = executionInfo;\n if (dynamicNode != null) {\n throw new Error(`This execution contains the node '${dynamicNode.name}', which has ` +\n `the dynamic op '${dynamicNode.op}'. Please use ` +\n `model.executeAsync() instead. Alternatively, to avoid the ` +\n `dynamic ops, specify the inputs [${syncInputs}]`);\n }\n if (missingInputs.length > 0) {\n const outNames = outputs.map(n => n.name);\n const inNames = Object.keys(inputs);\n throw new Error(`Cannot compute the outputs [${outNames}] from the provided inputs ` +\n `[${inNames}]. Missing the following inputs: [${missingInputs}]`);\n }\n return getNodesInTopologicalOrder(this.graph, this.weightMap, executionInfo);\n }\n /**\n * Executes the inference for given input tensors.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model, if\n * no outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n */\n execute(inputs, outputs) {\n inputs = this.mapInputs(inputs);\n const names = Object.keys(inputs).sort();\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputs.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const compilationKey = this.getCompilationKey(inputNodes, outputNodes);\n // Do nothing if the compiled graph cache contains the input.\n let orderedNodes = this.compiledMap.get(compilationKey);\n if (orderedNodes == null) {\n orderedNodes = this.compile(inputs, outputNodes);\n this.compiledMap.set(compilationKey, orderedNodes);\n }\n const tensorArrayMap = {};\n const tensorListMap = {};\n return tidy(() => {\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const intermediateTensorConsumerCount = {};\n for (let i = 0; i < orderedNodes.length; i++) {\n const node = orderedNodes[i];\n if (!tensorsMap[node.name]) {\n const tensors = executeOp(node, tensorsMap, context, this._resourceManager);\n if (util.isPromise(tensors)) {\n throw new Error(`The execution of the op '${node.op}' returned a promise. ` +\n `Please use model.executeAsync() instead.`);\n }\n tensorsMap[node.name] = tensors;\n this.checkTensorForDisposal(node.name, node, tensorsMap, context, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount);\n }\n }\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(tensorsToKeep);\n }\n return outputs.map(name => getTensor(name, tensorsMap, context));\n });\n }\n getFrozenTensorIds(tensorMap) {\n const ids = [].concat.apply([], Object.keys(tensorMap)\n .map(key => tensorMap[key])\n .map(tensors => tensors.map(tensor => tensor.id)));\n return new Set(ids);\n }\n checkTensorForDisposal(nodeName, node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount) {\n // Skip output nodes and any control flow nodes, since its dependency is\n // tricky to track correctly.\n if (node.category === 'control' || outputNames.indexOf(nodeName) !== -1) {\n return;\n }\n tensorMap[nodeName].forEach(tensor => {\n if (tensor != null) {\n intermediateTensorConsumerCount[tensor.id] =\n (intermediateTensorConsumerCount[tensor.id] || 0) +\n node.children.length;\n }\n });\n node.inputs.forEach(input => {\n // Skip any control flow nodes, since its dependency is tricky to track\n // correctly.\n if (input.category !== 'control') {\n const tensors = getTensorsForCurrentContenxt(input.name, tensorMap, context);\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (tensor && !tensorsToKeep.has(tensor.id)) {\n const count = intermediateTensorConsumerCount[tensor.id];\n if (count === 1) {\n tensor.dispose();\n delete intermediateTensorConsumerCount[tensor.id];\n }\n else if (count != null) {\n // only intermediate nodes has count set, inputs and weights are\n // not.\n intermediateTensorConsumerCount[tensor.id]--;\n }\n }\n });\n }\n }\n });\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n */\n async executeAsync(inputs, outputs) {\n return this._executeAsync(inputs, outputs);\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Optional. Flag for executing a function.\n * @param tensorArrayMap Optional, global TensorArray map by id. Used for\n * function execution.\n * @param tensorArrayMap Optinal global TensorList map by id. Used for\n * function execution.\n */\n async _executeAsync(inputs, outputs, isFunctionExecution = false, tensorArrayMap = {}, tensorListMap = {}) {\n if (!isFunctionExecution) {\n inputs = this.mapInputs(inputs);\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n }\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n // Graph with control flow op requires runtime evaluation of the execution\n // order, while without control flow the execution order is pre-determined\n // in the compile method.\n const tensorMap = await this.executeWithControlFlow(inputs, context, outputs, isFunctionExecution);\n const results = outputs.map(name => getTensor(name, tensorMap, context));\n // dispose all the intermediate tensors\n const outputIds = results.map(t => t.id);\n const inputIds = Object.keys(inputs).map(name => inputs[name].id);\n const keepIds = new Set([...outputIds, ...inputIds, ...this.weightIds]);\n Object.keys(tensorMap).forEach(key => {\n const tensorArray = tensorMap[key];\n tensorArray.forEach(tensor => {\n if (tensor && !tensor.isDisposed && !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n });\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(keepIds);\n }\n return results;\n }\n async executeFunctionAsync(inputs, tensorArrayMap, tensorListMap) {\n const mappedInputs = inputs.reduce((map, tensor, index) => {\n map[this.inputs[index].name] = tensor;\n return map;\n }, {});\n return this._executeAsync(mappedInputs, this.outputNodes, true, tensorArrayMap, tensorListMap);\n }\n /**\n * When there are control flow nodes in the graph, the graph execution use\n * ExecutionContext to keep track of the frames and loop iterators.\n * @param inputs placeholder tensors for the graph.\n * @param context the execution context object for current execution.\n * @param outputNames Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Flag for executing a function.\n */\n async executeWithControlFlow(inputs, context, outputNames, isFunctionExecution) {\n const names = Object.keys(inputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputNames.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const { usedNodes, missingInputs, dynamicNode, syncInputs } = getExecutionSubgraph(inputs, outputNodes, this.weightMap, this._initNodes);\n // First nodes to execute include inputNodes, weights, and initNodes.\n const stack = [\n ...inputNodes, ...this.graph.weights, ...(this._initNodes || [])\n ].map(node => {\n return { node, contexts: context.currentContext };\n });\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const intermediateTensorConsumerCount = {};\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const added = {};\n while (stack.length > 0) {\n const promises = this.processStack(inputNodes, stack, context, tensorsMap, added, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount, usedNodes);\n await Promise.all(promises);\n }\n if (dynamicNode == null && !isFunctionExecution) {\n console.warn(`This model execution did not contain any nodes with control flow ` +\n `or dynamic output shapes. You can use model.execute() instead.`);\n }\n const missingOutputs = outputNodes\n .filter(node => !isControlFlow(node) &&\n !getTensor(node.name, tensorsMap, context))\n .map(node => node.name);\n if (missingOutputs.length > 0) {\n let alternativeMsg = '';\n if (dynamicNode != null) {\n alternativeMsg =\n `Alternatively, to avoid the dynamic ops, use model.execute() ` +\n `and specify the inputs [${syncInputs}]`;\n }\n throw new Error(`Cannot compute the outputs [${missingOutputs}] from the provided ` +\n `inputs [${names}]. Consider providing the following inputs: ` +\n `[${missingInputs}]. ${alternativeMsg}`);\n }\n return tensorsMap;\n }\n processStack(inputNodes, stack, context, tensorMap, added, tensorsToKeep, outputNames, intermediateTensorConsumerCount, usedNodes) {\n const promises = [];\n while (stack.length > 0) {\n const item = stack.pop();\n context.currentContext = item.contexts;\n let nodeName = '';\n // The tensor of the Enter op with isConstant set should be set\n // in the parent scope, so it will be available as constant for the\n // whole loop.\n if (item.node.op === 'Enter' &&\n getParamValue('isConstant', item.node, tensorMap, context)) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n // only process nodes that are not in the tensorMap yet, this include\n // inputNodes and internal initNodes.\n if (tensorMap[item.node.name] == null) {\n const tensors = executeOp(item.node, tensorMap, context, this._resourceManager);\n if (!nodeName) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n const currentContext = context.currentContext;\n if (util.isPromise(tensors)) {\n promises.push(tensors.then(t => {\n tensorMap[nodeName] = t;\n context.currentContext = currentContext;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n return t;\n }));\n }\n else {\n tensorMap[nodeName] = tensors;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n else {\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n return promises;\n }\n processChildNodes(node, stack, context, tensorMap, added, usedNodes) {\n node.children.forEach((childNode) => {\n const [nodeName,] = getNodeNameAndIndex(childNode.name, context);\n if (added[nodeName] || !usedNodes.has(childNode.name)) {\n return;\n }\n // Merge op can be pushed if any of its inputs has value.\n if (childNode.op === 'Merge') {\n if (childNode.inputNames.some(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n }\n else // Otherwise all inputs must to have value.\n if (childNode.inputNames.every(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n });\n }\n /**\n * Releases the memory used by the weight tensors.\n */\n dispose() {\n Object.keys(this.weightMap)\n .forEach(key => this.weightMap[key].forEach(tensor => tensor.dispose()));\n }\n checkInputShapeAndType(inputs) {\n Object.keys(inputs).forEach(name => {\n const input = inputs[name];\n const [nodeName,] = parseNodeName(name);\n const node = this.graph.nodes[nodeName];\n if (node.attrParams['shape'] && node.attrParams['shape'].value) {\n const shape = node.attrParams['shape'].value;\n const match = shape.length === input.shape.length &&\n input.shape.every((dim, index) => shape[index] === -1 || shape[index] === dim);\n util.assert(match, () => `The shape of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be [${shape}], but was ` +\n `[${input.shape}]`);\n }\n if (node.attrParams['dtype'] && node.attrParams['dtype'].value) {\n util.assert(input.dtype === node.attrParams['dtype'].value, () => `The dtype of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be ` +\n `${node.attrParams['dtype'].value}, but was ${input.dtype}`);\n }\n });\n }\n mapInputs(inputs) {\n const result = {};\n for (const inputName in inputs) {\n if (this._signature != null && this._signature.inputs != null &&\n this._signature.inputs[inputName] != null) {\n const tensor = this._signature.inputs[inputName];\n result[tensor.name] = inputs[inputName];\n }\n else {\n result[inputName] = inputs[inputName];\n }\n }\n return result;\n }\n checkInputs(inputs) {\n const notInGraph = Object.keys(inputs).filter(name => {\n const [nodeName] = parseNodeName(name);\n return this.graph.nodes[nodeName] == null;\n });\n if (notInGraph.length > 0) {\n throw new Error(`The dict provided in model.execute(dict) has ` +\n `keys: [${notInGraph}] that are not part of graph`);\n }\n }\n mapOutputs(outputs) {\n return outputs.map(name => {\n if (this._signature != null && this._signature.outputs != null &&\n this._signature.outputs[name] != null) {\n const tensor = this._signature.outputs[name];\n return tensor.name;\n }\n return name;\n }, {});\n }\n checkOutputs(outputs) {\n outputs.forEach(name => {\n const [normalizedName] = parseNodeName(name);\n if (!this.graph.nodes[normalizedName]) {\n throw new Error(`The output '${name}' is not found in the graph`);\n }\n });\n }\n}\n//# sourceMappingURL=graph_executor.js.map", "/**\n * Contains global resources of a model.\n */\nexport class ResourceManager {\n constructor(hashTableNameToHandle = {}, hashTableMap = {}) {\n this.hashTableNameToHandle = hashTableNameToHandle;\n this.hashTableMap = hashTableMap;\n }\n /**\n * Register a `HashTable` in the resource manager.\n *\n * The `HashTable` can be retrieved by `resourceManager.getHashTableById`,\n * where id is the table handle tensor's id.\n *\n * @param name Op node name that creates the `HashTable`.\n * @param hashTable The `HashTable` to be added to resource manager.\n */\n addHashTable(name, hashTable) {\n this.hashTableNameToHandle[name] = hashTable.handle;\n this.hashTableMap[hashTable.id] = hashTable;\n }\n /**\n * Get the table handle by node name.\n * @param name Op node name that creates the `HashTable`. This name is also\n * used in the inputs list of lookup and import `HashTable` ops.\n */\n getHashTableHandleByName(name) {\n return this.hashTableNameToHandle[name];\n }\n /**\n * Get the actual `HashTable` by its handle tensor's id.\n * @param id The id of the handle tensor.\n */\n getHashTableById(id) {\n return this.hashTableMap[id];\n }\n /**\n * Dispose `ResourceManager`, including its hashTables and tensors in them.\n */\n dispose() {\n for (const key in this.hashTableMap) {\n this.hashTableMap[key].clearAndClose();\n delete this.hashTableMap[key];\n }\n for (const name in this.hashTableNameToHandle) {\n this.hashTableNameToHandle[name].dispose();\n delete this.hashTableNameToHandle[name];\n }\n }\n}\n//# sourceMappingURL=resource_manager.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { io, Tensor } from '@tensorflow/tfjs-core';\nimport { OperationMapper } from '../operations/operation_mapper';\nimport { GraphExecutor } from './graph_executor';\nimport { ResourceManager } from './resource_manager';\nexport const TFHUB_SEARCH_PARAM = '?tfjs-format=file';\nexport const DEFAULT_MODEL_NAME = 'model.json';\n/**\n * A `tf.GraphModel` is a directed, acyclic graph built from a\n * SavedModel GraphDef and allows inference execution.\n *\n * A `tf.GraphModel` can only be created by loading from a model converted from\n * a [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) using\n * the command line converter tool and loaded via `tf.loadGraphModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class GraphModel {\n /**\n * @param modelUrl url for the model, or an `io.IOHandler`.\n * @param weightManifestUrl url for the weight file generated by\n * scripts/convert.py script.\n * @param requestOption options for Request, which allows to send credentials\n * and custom headers.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n */\n constructor(modelUrl, loadOptions = {}) {\n this.modelUrl = modelUrl;\n this.loadOptions = loadOptions;\n this.version = 'n/a';\n if (loadOptions == null) {\n this.loadOptions = {};\n }\n this.resourceManager = new ResourceManager();\n }\n // Returns the version information for the tensorflow model GraphDef.\n get modelVersion() {\n return this.version;\n }\n get inputNodes() {\n return this.executor.inputNodes;\n }\n get outputNodes() {\n return this.executor.outputNodes;\n }\n get inputs() {\n return this.executor.inputs;\n }\n get outputs() {\n return this.executor.outputs;\n }\n get weights() {\n return this.executor.weightMap;\n }\n findIOHandler() {\n const path = this.modelUrl;\n if (path.load != null) {\n // Path is an IO Handler.\n this.handler = path;\n }\n else if (this.loadOptions.requestInit != null) {\n this.handler = io.browserHTTPRequest(path, this.loadOptions);\n }\n else {\n const handlers = io.getLoadHandlers(path, this.loadOptions);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n handlers.push(io.browserHTTPRequest(path, this.loadOptions));\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${[path]}'`);\n }\n this.handler = handlers[0];\n }\n }\n /**\n * Loads the model and weight files, construct the in memory weight map and\n * compile the inference graph.\n */\n async load() {\n this.findIOHandler();\n if (this.handler.load == null) {\n throw new Error('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await this.handler.load();\n return this.loadSync(artifacts);\n }\n /**\n * Synchronously construct the in memory weight map and\n * compile the inference graph. Also initialize hashtable if any.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n loadSync(artifacts) {\n this.artifacts = artifacts;\n const graph = this.artifacts.modelTopology;\n let signature = {};\n if (this.artifacts.userDefinedMetadata != null) {\n signature = // tslint:disable-next-line:no-any\n this.artifacts.userDefinedMetadata.signature;\n }\n this.version = `${graph.versions.producer}.${graph.versions.minConsumer}`;\n const weightMap = io.decodeWeights(this.artifacts.weightData, this.artifacts.weightSpecs);\n this.executor = new GraphExecutor(OperationMapper.Instance.transformGraph(graph, signature));\n this.executor.weightMap = this.convertTensorMapToTensorsMap(weightMap);\n // Attach a model-level resourceManager to each executor to share resources,\n // such as `HashTable`.\n this.executor.resourceManager = this.resourceManager;\n if (artifacts.modelInitializer != null) {\n const initializer = OperationMapper.Instance.transformGraph(artifacts.modelInitializer);\n this.initializer = new GraphExecutor(initializer);\n this.initializer.weightMap = this.executor.weightMap;\n // Attach a model-level resourceManager to the initializer, the\n // hashTables created from when executing the initializer will be stored\n // in the resourceManager.\n this.initializer.resourceManager = this.resourceManager;\n this.initializer.executeAsync({}, []);\n }\n return true;\n }\n /**\n * Save the configuration and/or weights of the GraphModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadGraphModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * model.predict(zeros).print();\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new Error(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new Error('GraphModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n return handlerOrURL.save(this.artifacts);\n }\n /**\n * Execute the inference for the input tensors.\n *\n * @param input The input tensors, when there is single input for the model,\n * inputs param should be a `tf.Tensor`. For models with mutliple inputs,\n * inputs params should be in either `tf.Tensor`[] if the input order is\n * fixed, or otherwise NamedTensorMap format.\n *\n * For model with multiple inputs, we recommend you use NamedTensorMap as the\n * input type, if you use `tf.Tensor`[], the order of the array needs to\n * follow the\n * order of inputNodes array. @see {@link GraphModel.inputNodes}\n *\n * You can also feed any intermediate nodes using the NamedTensorMap as the\n * input type. For example, given the graph\n * InputNode => Intermediate => OutputNode,\n * you can execute the subgraph Intermediate => OutputNode by calling\n * model.execute('IntermediateNode' : tf.tensor(...));\n *\n * This is useful for models that uses tf.dynamic_rnn, where the intermediate\n * state needs to be fed manually.\n *\n * For batch inference execution, the tensors for each input need to be\n * concatenated together. For example with mobilenet, the required input shape\n * is [1, 244, 244, 3], which represents the [batch, height, width, channel].\n * If we are provide a batched data of 100 images, the input tensor should be\n * in the shape of [100, 244, 244, 3].\n *\n * @param config Prediction configuration for specifying the batch size and\n * output node names. Currently the batch size option is ignored for graph\n * model.\n *\n * @returns Inference result tensors. The output would be single `tf.Tensor`\n * if model has single output node, otherwise Tensor[] or NamedTensorMap[]\n * will be returned for model with multiple outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(inputs, config) {\n return this.execute(inputs, this.outputNodes);\n }\n normalizeInputs(inputs) {\n if (!(inputs instanceof Tensor) && !Array.isArray(inputs)) {\n // The input is already a NamedTensorMap.\n return inputs;\n }\n inputs = Array.isArray(inputs) ? inputs : [inputs];\n if (inputs.length !== this.inputNodes.length) {\n throw new Error('Input tensor count mismatch,' +\n `the graph model has ${this.inputNodes.length} placeholders, ` +\n `while there are ${inputs.length} input tensors.`);\n }\n return this.inputNodes.reduce((map, inputName, i) => {\n map[inputName] = inputs[i];\n return map;\n }, {});\n }\n normalizeOutputs(outputs) {\n outputs = outputs || this.outputNodes;\n return !Array.isArray(outputs) ? [outputs] : outputs;\n }\n /**\n * Executes inference for the model for given input tensors.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no\n * outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n *\n * @returns A single tensor if provided with a single output or no outputs\n * are provided and there is only one default output, otherwise return a\n * tensor array. The order of the tensor array is the same as the outputs\n * if provided, otherwise the order of outputNodes attribute of the model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n execute(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = this.executor.execute(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n /**\n * Executes inference for the model for given input tensors in async\n * fashion, use this method when your model contains control flow ops.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n *\n * @returns A Promise of single tensor if provided with a single output or\n * no outputs are provided and there is only one default output, otherwise\n * return a tensor map.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async executeAsync(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = await this.executor.executeAsync(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n convertTensorMapToTensorsMap(map) {\n return Object.keys(map).reduce((newMap, key) => {\n newMap[key] = [map[key]];\n return newMap;\n }, {});\n }\n /**\n * Releases the memory used by the weight tensors and resourceManager.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n dispose() {\n this.executor.dispose();\n if (this.initializer) {\n this.initializer.dispose();\n }\n this.resourceManager.dispose();\n }\n}\n/**\n * Load a graph model given a URL to the model definition.\n *\n * Example of loading MobileNetV2 from a URL and making a prediction with a\n * zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n *\n * Example of loading MobileNetV2 from a TF Hub URL and making a prediction with\n * a zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://tfhub.dev/google/imagenet/mobilenet_v2_140_224/classification/2';\n * const model = await tf.loadGraphModel(modelUrl, {fromTFHub: true});\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n * @param modelUrl The url or an `io.IOHandler` that loads the model.\n * @param options Options for the HTTP request, which allows to send credentials\n * and custom headers.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport async function loadGraphModel(modelUrl, options = {}) {\n if (modelUrl == null) {\n throw new Error('modelUrl in loadGraphModel() cannot be null. Please provide a url ' +\n 'or an IOHandler that loads the model');\n }\n if (options == null) {\n options = {};\n }\n if (options.fromTFHub) {\n if (modelUrl.load == null) {\n if (!modelUrl.endsWith('/')) {\n modelUrl = modelUrl + '/';\n }\n modelUrl = `${modelUrl}${DEFAULT_MODEL_NAME}${TFHUB_SEARCH_PARAM}`;\n }\n }\n const model = new GraphModel(modelUrl, options);\n await model.load();\n return model;\n}\n//# sourceMappingURL=graph_model.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { GraphModel, loadGraphModel } from './executor/graph_model';\nexport { deregisterOp, registerOp } from './operations/custom_op/register';\nexport { version as version_converter } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\n/**\n * Apply a mapping function to a nested structure in a recursive manner.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapResult`. The `DeepMapResult` either provides a\n * replacement value for that node (i.e., replacing the subtree), or indicates\n * that the node should be processed recursively.\n */\nexport function deepMap(input, mapFn) {\n return deepMapInternal(input, mapFn);\n}\n/**\n * @param seen: A Map of known object mappings (i.e., memoized results of\n * `mapFn()`)\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepMapInternal(input, mapFn, seen = new Map(), containedIn = new Set()) {\n if (input == null) {\n return null;\n }\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n if (seen.has(input)) {\n return seen.get(input);\n }\n const result = mapFn(input);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep map function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n seen.set(input, result.value);\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const child = input[k];\n const childResult = deepMapInternal(child, mapFn, seen, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// TODO(soergel, kangyizhang) Reconsider naming of deepZip() to avoid confusion\n// with zip()\n/**\n * Zip nested structures together in a recursive manner.\n *\n * This has the effect of transposing or pivoting data, e.g. converting it from\n * a row-major representation to a column-major representation.\n *\n * For example, `deepZip([{a: 1, b: 2}, {a: 3, b: 4}])` returns\n * `{a: [1, 3], b: [2, 4]}`.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure, where\n * the leaves are arrays collecting the values of the inputs at that location\n * (or, optionally, the result of a custom function applied to those arrays).\n *\n * @param inputs: An array of the objects to zip together.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n */\nexport function deepZip(inputs, zipFn = zipToList) {\n return deepZipInternal(inputs, zipFn);\n}\n/**\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepZipInternal(inputs, zipFn, containedIn = new Set()) {\n // The recursion follows the structure of input 0; it's assumed that all the\n // other inputs have the same structure.\n const input = inputs[0];\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n const result = zipFn(inputs);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep zip function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const children = inputs.map(x => x[k]);\n const childResult = deepZipInternal(children, zipFn, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// tslint:disable-next-line:no-any\nexport function zipToList(x) {\n if (x === null) {\n return null;\n }\n // TODO(soergel): validate array type?\n if (isIterable(x[0])) {\n return { value: null, recurse: true };\n }\n else {\n return { value: x, recurse: false };\n }\n}\n/**\n * Apply an async mapping function to a nested structure in a recursive manner.\n *\n * This first creates a nested structure of Promises, and then awaits all of\n * those, resulting in a single Promise for a resolved nested structure.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapAsyncResult`. The `DeepMapAsyncResult` either provides\n * a `Promise` for a replacement value for that node (i.e., replacing the\n * subtree), or indicates that the node should be processed recursively. Note\n * that the decision whether or not to recurse must be made immediately; only\n * the mapped value may be promised.\n */\nexport async function deepMapAndAwaitAll(input, mapFn) {\n const seen = new Map();\n // First do a normal deepMap, collecting Promises in 'seen' as a side effect.\n deepMapInternal(input, mapFn, seen);\n // Replace the Promises in 'seen' in place.\n // Note TypeScript provides no async map iteration, and regular map iteration\n // is broken too, so sadly we have to do Array.from() to make it work.\n // (There's no advantage to Promise.all(), and that would be tricky anyway.)\n for (const key of Array.from(seen.keys())) {\n const value = seen.get(key);\n if (tf.util.isPromise(value)) {\n const mappedValue = await value;\n seen.set(key, mappedValue);\n }\n }\n // Normal deepMap again, this time filling in the resolved values.\n // It's unfortunate that we have to do two passes.\n // TODO(soergel): test performance and think harder about a fast solution.\n const result = deepMapInternal(input, mapFn, seen);\n return result;\n}\n/**\n * Determine whether the argument is iterable.\n *\n * @returns true if the argument is an array or any non-Tensor object.\n */\n// tslint:disable-next-line:no-any\nexport function isIterable(obj) {\n return obj != null && (!ArrayBuffer.isView(obj)) &&\n (Array.isArray(obj) ||\n (typeof obj === 'object' && !(obj instanceof tf.Tensor)));\n}\n/**\n * Determine whether the argument can be converted to Tensor.\n *\n * Tensors, primitives, arrays, and TypedArrays all qualify; anything else does\n * not.\n *\n * @returns true if the argument can be converted to Tensor.\n */\n// tslint:disable-next-line:no-any\nexport function canTensorify(obj) {\n return obj == null || isPrimitive(obj) || Array.isArray(obj) ||\n (typeof obj === 'object' && (obj instanceof tf.Tensor)) ||\n tf.util.isTypedArray(obj);\n}\n/**\n * Returns true if the given `value` is a primitive type. Otherwise returns\n * false. This is equivalant to node util.isPrimitive\n */\nfunction isPrimitive(value) {\n return (value === null ||\n (typeof value !== 'object' && typeof value !== 'function'));\n}\n//# sourceMappingURL=deep_map.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { deepMap, isIterable } from './deep_map';\nexport function deepClone(container) {\n return deepMap(container, cloneIfTensor);\n}\n// tslint:disable-next-line: no-any\nfunction cloneIfTensor(item) {\n if (item instanceof tf.Tensor) {\n return ({ value: item.clone(), recurse: false });\n }\n else if (isIterable(item)) {\n return { value: null, recurse: true };\n }\n else {\n return { value: item, recurse: false };\n }\n}\n//# sourceMappingURL=deep_clone.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * A ring buffer, providing O(1) FIFO, LIFO, and related operations.\n */\nexport class RingBuffer {\n /**\n * Constructs a `RingBuffer`.\n * @param capacity The number of items that the buffer can accomodate.\n */\n constructor(capacity) {\n this.capacity = capacity;\n // Note we store the indices in the range 0 <= index < 2*capacity.\n // This allows us to distinguish the full from the empty case.\n // See https://www.snellman.net/blog/archive/2016-12-13-ring-buffers/\n this.begin = 0; // inclusive\n this.end = 0; // exclusive\n if (capacity == null) {\n throw new RangeError('Can\\'t create a ring buffer of unknown capacity.');\n }\n if (capacity < 1) {\n throw new RangeError('Can\\'t create ring buffer of capacity < 1.');\n }\n this.data = new Array(capacity);\n this.doubledCapacity = 2 * capacity;\n }\n /**\n * Map any index into the range 0 <= index < 2*capacity.\n */\n wrap(index) {\n // don't trust % on negative numbers\n while (index < 0) {\n index += this.doubledCapacity;\n }\n return index % this.doubledCapacity;\n }\n get(index) {\n if (index < 0) {\n throw new RangeError('Can\\'t get item at a negative index.');\n }\n return this.data[index % this.capacity];\n }\n set(index, value) {\n if (index < 0) {\n throw new RangeError('Can\\'t set item at a negative index.');\n }\n this.data[index % this.capacity] = value;\n }\n /**\n * Returns the current number of items in the buffer.\n */\n length() {\n let length = this.end - this.begin;\n if (length < 0) {\n length = this.doubledCapacity + length;\n }\n return length;\n }\n /**\n * Reports whether the buffer is full.\n * @returns true if the number of items in the buffer equals its capacity, and\n * false otherwise.\n */\n isFull() {\n return this.length() === this.capacity;\n }\n /**\n * Reports whether the buffer is empty.\n * @returns true if the number of items in the buffer equals zero, and\n * false otherwise.\n */\n isEmpty() {\n return this.length() === 0;\n }\n /**\n * Adds an item to the end of the buffer.\n */\n push(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.set(this.end, value);\n this.end = this.wrap(this.end + 1);\n }\n /**\n * Adds many items to the end of the buffer, in order.\n */\n pushAll(values) {\n for (const value of values) {\n this.push(value);\n }\n }\n /**\n * Removes and returns the last item in the buffer.\n */\n pop() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n this.end = this.wrap(this.end - 1);\n const result = this.get(this.end);\n this.set(this.end, undefined);\n return result;\n }\n /**\n * Adds an item to the beginning of the buffer.\n */\n unshift(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.begin = this.wrap(this.begin - 1);\n this.set(this.begin, value);\n }\n /**\n * Removes and returns the first item in the buffer.\n */\n shift() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const result = this.get(this.begin);\n this.set(this.begin, undefined);\n this.begin = this.wrap(this.begin + 1);\n return result;\n }\n /**\n * Removes and returns a specific item in the buffer, and moves the last item\n * to the vacated slot. This is useful for implementing a shuffling stream.\n * Note that this operation necessarily scrambles the original order.\n *\n * @param relativeIndex: the index of the item to remove, relative to the\n * first item in the buffer (e.g., hiding the ring nature of the underlying\n * storage).\n */\n shuffleExcise(relativeIndex) {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const index = this.wrap(this.begin + relativeIndex);\n const result = this.get(index);\n this.set(index, this.pop());\n return result;\n }\n}\n//# sourceMappingURL=ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { RingBuffer } from './ring_buffer';\nexport class GrowingRingBuffer extends RingBuffer {\n /**\n * Constructs a `GrowingRingBuffer`.\n */\n constructor() {\n super(GrowingRingBuffer.INITIAL_CAPACITY);\n }\n isFull() {\n return false;\n }\n push(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.push(value);\n }\n unshift(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.unshift(value);\n }\n /**\n * Doubles the capacity of the buffer.\n */\n expand() {\n const newCapacity = this.capacity * 2;\n const newData = new Array(newCapacity);\n const len = this.length();\n // Rotate the buffer to start at index 0 again, since we can't just\n // allocate more space at the end.\n for (let i = 0; i < len; i++) {\n newData[i] = this.get(this.wrap(this.begin + i));\n }\n this.data = newData;\n this.capacity = newCapacity;\n this.doubledCapacity = 2 * this.capacity;\n this.begin = 0;\n this.end = len;\n }\n}\nGrowingRingBuffer.INITIAL_CAPACITY = 32;\n//# sourceMappingURL=growing_ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { deepClone } from '../util/deep_clone';\nimport { deepMapAndAwaitAll, deepZip, zipToList } from '../util/deep_map';\nimport { GrowingRingBuffer } from '../util/growing_ring_buffer';\nimport { RingBuffer } from '../util/ring_buffer';\n// Here we implement a simple asynchronous iterator.\n// This lets us avoid using either third-party stream libraries or\n// recent TypeScript language support requiring polyfills.\n/**\n * Create a `LazyIterator` from an array of items.\n */\nexport function iteratorFromItems(items) {\n return new ArrayIterator(items);\n}\n/**\n * Create a `LazyIterator` of incrementing integers.\n */\nexport function iteratorFromIncrementing(start) {\n let i = start;\n return iteratorFromFunction(() => ({ value: i++, done: false }));\n}\n/**\n * Create a `LazyIterator` from a function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * await iter.forEachAsync(e => console.log(e));\n * ```\n *\n * @param func A function that produces data on each call.\n */\nexport function iteratorFromFunction(func) {\n return new FunctionCallIterator(func);\n}\n/**\n * Create a `LazyIterator` by concatenating underlying streams, which are\n * themselves provided as a stream.\n *\n * This can also be thought of as a \"stream flatten\" operation.\n *\n * @param baseIterators A stream of streams to be concatenated.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenated(baseIterators, baseErrorHandler) {\n return new ChainedIterator(baseIterators, baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by concatenating streams produced by calling a\n * stream-generating function a given number of times.\n *\n * Since a `LazyIterator` is read-once, it cannot be repeated, but this\n * function can be used to achieve a similar effect:\n *\n * LazyIterator.ofConcatenatedFunction(() => new MyIterator(), 6);\n *\n * @param iteratorFunc: A function that produces a new stream on each call.\n * @param count: The number of times to call the function.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenatedFunction(iteratorFunc, count, baseErrorHandler) {\n return iteratorFromConcatenated(iteratorFromFunction(iteratorFunc).take(count), baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by zipping together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nexport function iteratorFromZipped(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n return new ZipIterator(iterators, mismatchMode);\n}\n/**\n * An asynchronous iterator, providing lazy access to a potentially\n * unbounded stream of elements.\n *\n * Iterator can be obtained from a dataset:\n * `const iter = await dataset.iterator();`\n */\nexport class LazyIterator {\n /**\n * Collect all remaining elements of a bounded stream into an array.\n * Obviously this will succeed only for small streams that fit in memory.\n * Useful for testing.\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArray() {\n const result = [];\n let x = await this.next();\n while (!x.done) {\n result.push(x.value);\n x = await this.next();\n }\n return result;\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArrayForTest() {\n const stream = this.prefetch(100);\n const result = [];\n let x = await stream.next();\n while (!x.done) {\n result.push(x.value);\n x = await stream.next();\n }\n return result;\n }\n /**\n * Draw items from the stream until it is exhausted.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveFully() {\n let x = await this.next();\n while (!x.done) {\n x = await this.next();\n }\n }\n /**\n * Draw items from the stream until it is exhausted, or a predicate fails.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveWhile(predicate) {\n let x = await this.next();\n let shouldContinue = predicate(x.value);\n while ((!x.done) && shouldContinue) {\n x = await this.next();\n shouldContinue = predicate(x.value);\n }\n }\n /**\n * Handles errors thrown on this stream using a provided handler function.\n *\n * @param handler A function that handles any `Error` thrown during a `next()`\n * call and returns true if the stream should continue (dropping the failed\n * call) or false if the stream should quietly terminate. If the handler\n * itself throws (or rethrows) an `Error`, that will be propagated.\n *\n * @returns A `LazyIterator` of elements passed through from upstream,\n * possibly filtering or terminating on upstream `next()` calls that\n * throw an `Error`.\n */\n handleErrors(handler) {\n return new ErrorHandlingLazyIterator(this, handler);\n }\n // TODO(soergel): Implement reduce() etc.\n /**\n * Filters this stream according to `predicate`.\n *\n * @param predicate A function mapping a stream element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `LazyIterator` of elements for which the predicate was true.\n */\n filter(predicate) {\n return new FilterIterator(this, predicate);\n }\n /**\n * Maps this stream through a 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n map(transform) {\n return new MapIterator(this, transform);\n }\n /**\n * Maps this stream through an async 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a `Promise` for a\n * transformed stream element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n mapAsync(transform) {\n return new AsyncMapIterator(this, transform);\n }\n /**\n * Maps this stream through a 1-to-1 transform, forcing serial execution.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n serialMapAsync(transform) {\n return new AsyncMapIterator(this, transform).serial();\n }\n /**\n * Maps this stream through a 1-to-many transform.\n *\n * @param transform A function mapping a stream element to an array of\n * transformed elements.\n *\n * @returns A `DataStream` of transformed elements.\n */\n flatmap(transform) {\n return new FlatmapIterator(this, transform);\n }\n /**\n * Apply a function to every element of the stream.\n *\n * @param f A function to apply to each stream element.\n */\n async forEachAsync(f) {\n return this.map(f).resolveFully();\n }\n /**\n * Apply a function to every element of the stream, forcing serial execution.\n *\n * @param f A function to apply to each stream element. Should return 'true'\n * to indicate that the stream should continue, or 'false' to cause it to\n * terminate.\n */\n async serialForEach(f) {\n return this.serialMapAsync(f).resolveWhile(x => (x === true));\n }\n /**\n * Groups elements into batches, represented as arrays of elements.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"Row-major\" means that the resulting batch is simply a collection of\n * rows: `[row1, row2, row3, ...]`. This is contrast to the column-major\n * form, which is needed for vectorized computation.\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `LazyIterator` of batches of elements, represented as arrays\n * of the original element type.\n */\n rowMajorBatch(batchSize, smallLastBatch = true) {\n return new RowMajorBatchIterator(this, batchSize, smallLastBatch);\n }\n /**\n * Groups elements into batches, represented in column-major form.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"column-major\" means that the resulting batch is a (potentially\n * nested) structure representing the columns. Each column entry, then,\n * contains a collection of the values found in that column for a range of\n * input elements. This representation allows for vectorized computation, in\n * contrast to the row-major form.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure,\n * where the leaves are arrays collecting the values of the inputs at that\n * location (or, optionally, the result of a custom function applied to those\n * arrays).\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n * @returns A `LazyIterator` of batches of elements, represented as an object\n * with collections at the leaves.\n */\n columnMajorBatch(batchSize, smallLastBatch = true, \n // tslint:disable-next-line:no-any\n zipFn = zipToList) {\n // First collect the desired number of input elements as a row-major batch.\n const rowBatches = this.rowMajorBatch(batchSize, smallLastBatch);\n // Now 'rotate' or 'pivot' the data, collecting all values from each column\n // in the batch (i.e., for each key within the elements) into an array.\n return rowBatches.map(x => deepZip(x, zipFn));\n }\n /**\n * Concatenate this `LazyIterator` with another.\n *\n * @param iterator A `LazyIterator` to be concatenated onto this one.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can\n * decide whether the error should be propagated, whether the error should\n * be ignored, or whether the base stream should be terminated.\n * @returns A `LazyIterator`.\n */\n concatenate(iterator, baseErrorHandler) {\n return new ChainedIterator(iteratorFromItems([this, iterator]), baseErrorHandler);\n }\n /**\n * Limits this stream to return at most `count` items.\n *\n * @param count The maximum number of items to provide from the stream. If\n * a negative or undefined value is given, the entire stream is returned\n * unaltered.\n */\n take(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new TakeIterator(this, count);\n }\n /**\n * Skips the first `count` items in this stream.\n *\n * @param count The number of items to skip. If a negative or undefined\n * value is given, the entire stream is returned unaltered.\n */\n skip(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new SkipIterator(this, count);\n }\n /**\n * Prefetch the first `bufferSize` items in this stream.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n */\n prefetch(bufferSize) {\n return new PrefetchIterator(this, bufferSize);\n }\n // TODO(soergel): deep sharded shuffle, where supported\n /**\n * Randomly shuffles the elements of this stream.\n *\n * @param bufferSize: An integer specifying the number of elements from\n * this stream from which the new stream will sample.\n * @param seed: (Optional.) An integer specifying the random seed that\n * will be used to create the distribution.\n */\n shuffle(windowSize, seed) {\n return new ShuffleIterator(this, windowSize, seed);\n }\n /**\n * Force an iterator to execute serially: each next() call will await the\n * prior one, so that they cannot execute concurrently.\n */\n serial() {\n return new SerialIterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on LazyIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// Iterators that just extend LazyIterator directly\n// ============================================================================\nclass ArrayIterator extends LazyIterator {\n constructor(items) {\n super();\n this.items = items;\n this.trav = 0;\n }\n summary() {\n return `Array of ${this.items.length} items`;\n }\n async next() {\n if (this.trav >= this.items.length) {\n return { value: null, done: true };\n }\n const item = this.items[this.trav];\n this.trav++;\n return { value: deepClone(item), done: false };\n }\n}\nclass FunctionCallIterator extends LazyIterator {\n constructor(nextFn) {\n super();\n this.nextFn = nextFn;\n }\n summary() {\n return `Function call`;\n }\n async next() {\n try {\n return this.nextFn();\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message =\n `Error thrown while iterating through a dataset: ${e.message}`;\n throw e;\n }\n }\n}\nclass SerialIterator extends LazyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Serial`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n return this.upstream.next();\n }\n}\nclass SkipIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n // Local state that should not be clobbered by out-of-order execution.\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Skip`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // TODO(soergel): consider tradeoffs of reading in parallel, eg.\n // collecting next() promises in an Array and then waiting for\n // Promise.all() of those. Benefit: pseudo-parallel execution. Drawback:\n // maybe delayed GC.\n while (this.count++ < this.maxCount) {\n const skipped = await this.upstream.next();\n // short-circuit if upstream is already empty\n if (skipped.done) {\n return skipped;\n }\n tf.dispose(skipped.value);\n }\n return this.upstream.next();\n }\n}\nclass TakeIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n this.count = 0;\n }\n summary() {\n return `${this.upstream.summary()} -> Take`;\n }\n async next() {\n if (this.count++ >= this.maxCount) {\n return { value: null, done: true };\n }\n return this.upstream.next();\n }\n}\n// Note this batch just groups items into row-wise element arrays.\n// Rotating these to a column-wise representation happens only at the dataset\n// level.\nclass RowMajorBatchIterator extends LazyIterator {\n constructor(upstream, batchSize, enableSmallLastBatch = true) {\n super();\n this.upstream = upstream;\n this.batchSize = batchSize;\n this.enableSmallLastBatch = enableSmallLastBatch;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> RowMajorBatch`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n const batch = [];\n while (batch.length < this.batchSize) {\n const item = await this.upstream.next();\n if (item.done) {\n if (this.enableSmallLastBatch && batch.length > 0) {\n return { value: batch, done: false };\n }\n return { value: null, done: true };\n }\n batch.push(item.value);\n }\n return { value: batch, done: false };\n }\n}\nclass FilterIterator extends LazyIterator {\n constructor(upstream, predicate) {\n super();\n this.upstream = upstream;\n this.predicate = predicate;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Filter`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n const item = await this.upstream.next();\n if (item.done || this.predicate(item.value)) {\n return item;\n }\n tf.dispose(item.value);\n }\n }\n}\nclass MapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Map`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\nclass ErrorHandlingLazyIterator extends LazyIterator {\n constructor(upstream, handler) {\n super();\n this.upstream = upstream;\n this.handler = handler;\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> handleErrors`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n try {\n return await this.upstream.next();\n }\n catch (e) {\n if (!this.handler(e)) {\n return { value: null, done: true };\n }\n // If the handler returns true, loop and fetch the next upstream item.\n // If the upstream iterator throws an endless stream of errors, and if\n // the handler says to ignore them, then we loop forever here. That is\n // the correct behavior-- it's up to the handler to decide when to stop.\n }\n }\n }\n}\nclass AsyncMapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> AsyncMap`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = await this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\n// Iterators that maintain a queue of pending items\n// ============================================================================\n/**\n * A base class for transforming streams that operate by maintaining an\n * output queue of elements that are ready to return via next(). This is\n * commonly required when the transformation is 1-to-many: A call to next()\n * may trigger a call to the underlying stream, which will produce many\n * mapped elements of this stream-- of which we need to return only one, so\n * we have to queue the rest.\n */\nexport class OneToManyIterator extends LazyIterator {\n constructor() {\n super();\n this.outputQueue = new GrowingRingBuffer();\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // Fetch so that the queue contains at least one item if possible.\n // If the upstream source is exhausted, AND there are no items left in\n // the output queue, then this stream is also exhausted.\n while (this.outputQueue.length() === 0) {\n // TODO(soergel): consider parallel reads.\n if (!await this.pump()) {\n return { value: null, done: true };\n }\n }\n return { value: this.outputQueue.shift(), done: false };\n }\n}\nclass FlatmapIterator extends OneToManyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Flatmap`;\n }\n async pump() {\n const item = await this.upstream.next();\n if (item.done) {\n return false;\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // that's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying any\n // intermediate Tensors. Here we are concerned only about the inputs.\n const mappedArray = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mappedArray);\n this.outputQueue.pushAll(mappedArray);\n // TODO(soergel) faster intersection, and deduplicate outputTensors\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return true;\n }\n}\n/**\n * Provides a `LazyIterator` that concatenates a stream of underlying\n * streams.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n */\nexport class ChainedIterator extends LazyIterator {\n constructor(iterators, baseErrorHandler) {\n super();\n this.baseErrorHandler = baseErrorHandler;\n // Strict Promise execution order:\n // a next() call may not even begin until the previous one completes.\n this.lastRead = null;\n // Local state that should not be clobbered by out-of-order execution.\n this.iterator = null;\n this.moreIterators = iterators;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of chained summaries';\n return `${upstreamSummaries} -> Chained`;\n }\n async next() {\n this.lastRead = this.readFromChain(this.lastRead);\n return this.lastRead;\n }\n async readFromChain(lastRead) {\n // Must await on the previous read since the previous read may have advanced\n // the stream of streams, from which we need to read.\n // This is unfortunate since we can't parallelize reads. Which means\n // prefetching of chained streams is a no-op.\n // One solution is to prefetch immediately upstream of this.\n await lastRead;\n if (this.iterator == null) {\n const iteratorResult = await this.moreIterators.next();\n if (iteratorResult.done) {\n // No more streams to stream from.\n return { value: null, done: true };\n }\n this.iterator = iteratorResult.value;\n if (this.baseErrorHandler != null) {\n this.iterator = this.iterator.handleErrors(this.baseErrorHandler);\n }\n }\n const itemResult = await this.iterator.next();\n if (itemResult.done) {\n this.iterator = null;\n return this.readFromChain(lastRead);\n }\n return itemResult;\n }\n}\nexport var ZipMismatchMode;\n(function (ZipMismatchMode) {\n ZipMismatchMode[ZipMismatchMode[\"FAIL\"] = 0] = \"FAIL\";\n ZipMismatchMode[ZipMismatchMode[\"SHORTEST\"] = 1] = \"SHORTEST\";\n ZipMismatchMode[ZipMismatchMode[\"LONGEST\"] = 2] = \"LONGEST\"; // use nulls for exhausted streams; use up the longest stream.\n})(ZipMismatchMode || (ZipMismatchMode = {}));\n/**\n * Provides a `LazyIterator` that zips together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nclass ZipIterator extends LazyIterator {\n constructor(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n super();\n this.iterators = iterators;\n this.mismatchMode = mismatchMode;\n this.count = 0;\n this.currentPromise = null;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of zip summaries';\n return `{${upstreamSummaries}} -> Zip`;\n }\n async nextState(afterState) {\n // This chaining ensures that the underlying next() are not even called\n // before the previous ones have resolved.\n await afterState;\n // Collect underlying iterator \"done\" signals as a side effect in\n // getNext()\n let numIterators = 0;\n let iteratorsDone = 0;\n function getNext(container) {\n if (container instanceof LazyIterator) {\n const result = container.next();\n return {\n value: result.then(x => {\n numIterators++;\n if (x.done) {\n iteratorsDone++;\n }\n return x.value;\n }),\n recurse: false\n };\n }\n else {\n return { value: null, recurse: true };\n }\n }\n const mapped = await deepMapAndAwaitAll(this.iterators, getNext);\n if (numIterators === iteratorsDone) {\n // The streams have all ended.\n return { value: null, done: true };\n }\n if (iteratorsDone > 0) {\n switch (this.mismatchMode) {\n case ZipMismatchMode.FAIL:\n throw new Error('Zipped streams should have the same length. ' +\n `Mismatched at element ${this.count}.`);\n case ZipMismatchMode.SHORTEST:\n return { value: null, done: true };\n case ZipMismatchMode.LONGEST:\n default:\n // Continue. The exhausted streams already produced value: null.\n }\n }\n this.count++;\n return { value: mapped, done: false };\n }\n async next() {\n this.currentPromise = this.nextState(this.currentPromise);\n return this.currentPromise;\n }\n}\n// Iterators that maintain a ring buffer of pending promises\n// ============================================================================\n/**\n * A stream that prefetches a given number of items from an upstream source,\n * returning them in FIFO order.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n */\nexport class PrefetchIterator extends LazyIterator {\n constructor(upstream, bufferSize) {\n super();\n this.upstream = upstream;\n this.bufferSize = bufferSize;\n this.buffer = new RingBuffer(bufferSize);\n }\n summary() {\n return `${this.upstream.summary()} -> Prefetch`;\n }\n /**\n * Refill the prefetch buffer. Returns only after the buffer is full, or\n * the upstream source is exhausted.\n */\n refill() {\n while (!this.buffer.isFull()) {\n const v = this.upstream.next();\n this.buffer.push(v);\n }\n }\n next() {\n this.refill();\n // This shift will never throw an error because the buffer is always\n // full after a refill. If the stream is exhausted, the buffer will be\n // full of Promises that will resolve to the end-of-stream signal.\n return this.buffer.shift();\n }\n}\n/**\n * A stream that performs a sliding-window random shuffle on an upstream\n * source. This is like a `PrefetchIterator` except that the items are\n * returned in randomized order. Mixing naturally improves as the buffer\n * size increases.\n */\nexport class ShuffleIterator extends PrefetchIterator {\n constructor(upstream, windowSize, seed) {\n super(upstream, windowSize);\n this.upstream = upstream;\n this.windowSize = windowSize;\n // Local state that should not be clobbered by out-of-order execution.\n this.upstreamExhausted = false;\n this.random = seedrandom.alea(seed || tf.util.now().toString());\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n randomInt(max) {\n return Math.floor(this.random() * max);\n }\n chooseIndex() {\n return this.randomInt(this.buffer.length());\n }\n async serialNext() {\n // TODO(soergel): consider performance\n if (!this.upstreamExhausted) {\n this.refill();\n }\n while (!this.buffer.isEmpty()) {\n const chosenIndex = this.chooseIndex();\n const result = await this.buffer.shuffleExcise(chosenIndex);\n if (result.done) {\n this.upstreamExhausted = true;\n }\n else {\n this.refill();\n return result;\n }\n }\n return { value: null, done: true };\n }\n}\n//# sourceMappingURL=lazy_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { iteratorFromConcatenated, iteratorFromFunction, iteratorFromItems, iteratorFromZipped, ZipMismatchMode } from './iterators/lazy_iterator';\nimport { canTensorify, deepMapAndAwaitAll, isIterable } from './util/deep_map';\n// TODO(soergel): consider vectorized operations within the pipeline.\n/**\n * Represents a potentially large list of independent data elements (typically\n * 'samples' or 'examples').\n *\n * A 'data example' may be a primitive, an array, a map from string keys to\n * values, or any nested structure of these.\n *\n * A `Dataset` represents an ordered collection of elements, together with a\n * chain of transformations to be performed on those elements. Each\n * transformation is a method of `Dataset` that returns another `Dataset`, so\n * these may be chained, e.g.\n * `const processedDataset = rawDataset.filter(...).map(...).batch(...)`.\n *\n * Data loading and transformation is done in a lazy, streaming fashion. The\n * dataset may be iterated over multiple times; each iteration starts the data\n * loading anew and recapitulates the transformations.\n *\n * A `Dataset` is typically processed as a stream of unbatched examples --i.e.,\n * its transformations are applied one example at a time. Batching produces a\n * new `Dataset` where each element is a batch. Batching should usually come\n * last in a pipeline, because data transformations are easier to express on a\n * per-example basis than on a per-batch basis.\n *\n * The following code examples are calling `await dataset.forEachAsync(...)` to\n * iterate once over the entire dataset in order to print out the data.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class Dataset {\n constructor() {\n this.size = null;\n }\n // TODO(soergel): Make Datasets report whether repeated iterator() calls\n // produce the same result (e.g., reading from a file) or different results\n // (e.g., from the webcam). Currently we don't make this distinction but it\n // could be important for the user to know.\n // abstract isDeterministic(): boolean;\n /**\n * Groups elements into batches.\n *\n * It is assumed that each of the incoming dataset elements has the same\n * structure-- i.e. the same set of keys at each location in an object\n * hierarchy. For each key, the resulting `Dataset` provides a batched\n * element collecting all of the incoming values for that key.\n *\n * * Incoming primitives are grouped into a 1-D Tensor.\n * * Incoming Tensors are grouped into a new Tensor where the 0'th axis is\n * the batch dimension.\n * * Incoming arrays are converted to Tensor and then batched.\n * * A nested array is interpreted as an n-D Tensor, so the batched result\n * has n+1 dimensions.\n * * An array that cannot be converted to Tensor produces an error.\n *\n * If an array should not be batched as a unit, it should first be converted\n * to an object with integer keys.\n *\n * Here are a few examples:\n *\n * Batch a dataset of numbers:\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8]).batch(4);\n * await a.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of arrays:\n * ```js\n * const b = tf.data.array([[1], [2], [3], [4], [5], [6], [7], [8]]).batch(4);\n * await b.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of objects:\n * ```js\n * const c = tf.data.array([{a: 1, b: 11}, {a: 2, b: 12}, {a: 3, b: 13},\n * {a: 4, b: 14}, {a: 5, b: 15}, {a: 6, b: 16}, {a: 7, b: 17},\n * {a: 8, b: 18}]).batch(4);\n * await c.forEachAsync(e => {\n * console.log('{');\n * for(var key in e) {\n * console.log(key+':');\n * e[key].print();\n * }\n * console.log('}');\n * })\n * ```\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `Dataset`, from which a stream of batches can be obtained.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n batch(batchSize, smallLastBatch = true) {\n const base = this;\n tf.util.assert(batchSize > 0, () => `batchSize needs to be positive, but it is\n ${batchSize}`);\n let size;\n if (this.size === Infinity || this.size == null) {\n // If the size of this dataset is infinity or null, the new size keeps the\n // same.\n size = this.size;\n }\n else if (smallLastBatch) {\n // If the size of this dataset is known and include small last batch, the\n // new size is full batch count plus last batch.\n size = Math.ceil(this.size / batchSize);\n }\n else {\n // If the size of this dataset is known and not include small last batch,\n // the new size is full batch count.\n size = Math.floor(this.size / batchSize);\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator())\n .columnMajorBatch(batchSize, smallLastBatch, deepBatchConcat);\n }, size);\n }\n /**\n * Concatenates this `Dataset` with another.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * const b = tf.data.array([4, 5, 6]);\n * const c = a.concatenate(b);\n * await c.forEachAsync(e => console.log(e));\n * ```\n *\n * @param dataset A `Dataset` to be concatenated onto this one.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n concatenate(dataset) {\n const base = this;\n let size;\n if (this.size === Infinity || dataset.size === Infinity) {\n // If the size of any of these two dataset is infinity, new size is\n // infinity.\n size = Infinity;\n }\n else if (this.size != null && dataset.size != null) {\n // If the size of both datasets are known and not infinity, new size is\n // sum the size of these two datasets.\n size = this.size + dataset.size;\n }\n else {\n // If neither of these two datasets has infinite size and any of these two\n // datasets' size is null, the new size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).concatenate(await dataset.iterator()), size);\n }\n /**\n * Filters this dataset according to `predicate`.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])\n * .filter(x => x%2 === 0);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param predicate A function mapping a dataset element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `Dataset` of elements for which the predicate was true.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n filter(predicate) {\n const base = this;\n let size;\n if (this.size === Infinity) {\n // If the size of this dataset is infinity, new size is infinity\n size = Infinity;\n }\n else {\n // If this dataset has limited elements, new size is null because it might\n // exhausted randomly.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).filter(x => tf.tidy(() => predicate(x)));\n }, size);\n }\n /**\n * Apply a function to every element of the dataset.\n *\n * After the function is applied to a dataset element, any Tensors contained\n * within that element are disposed.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function to apply to each dataset element.\n * @returns A `Promise` that resolves after all elements have been processed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async forEachAsync(f) {\n return (await this.iterator()).forEachAsync(f);\n }\n /**\n * Maps this dataset through a 1-to-1 transform.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).map(x => x*x);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param transform A function mapping a dataset element to a transformed\n * dataset element.\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n map(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).map(x => tf.tidy(() => transform(x)));\n }, this.size);\n }\n /**\n * Maps this dataset through an async 1-to-1 transform.\n *\n * ```js\n * const a =\n * tf.data.array([1, 2, 3]).mapAsync(x => new Promise(function(resolve){\n * setTimeout(() => {\n * resolve(x * x);\n * }, Math.random()*1000 + 500);\n * }));\n * console.log(await a.toArray());\n * ```\n *\n * @param transform A function mapping a dataset element to a `Promise` for a\n * transformed dataset element. This transform is responsible for disposing\n * any intermediate `Tensor`s, i.e. by wrapping its computation in\n * `tf.tidy()`; that cannot be automated here (as it is in the synchronous\n * `map()` case).\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n mapAsync(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).mapAsync(transform);\n }, this.size);\n }\n /**\n * Creates a `Dataset` that prefetches elements from this dataset.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n prefetch(bufferSize) {\n if (bufferSize == null) {\n throw new RangeError('`Dataset.prefetch()` requires bufferSize to be specified.');\n }\n const base = this;\n return datasetFromIteratorFn(async () => (await base.iterator()).prefetch(bufferSize), this.size);\n }\n /**\n * Repeats this dataset `count` times.\n *\n * NOTE: If this dataset is a function of global state (e.g. a random number\n * generator), then different repetitions may produce different elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).repeat(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: (Optional) An integer, representing the number of times\n * the dataset should be repeated. The default behavior (if `count` is\n * `undefined` or negative) is for the dataset be repeated indefinitely.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n repeat(count) {\n const base = this;\n let size;\n if (this.size != null && count > 0) {\n // If this dataset has size and count is positive, new size is current\n // size multiply count. This also covers the case that current size is\n // infinity.\n size = this.size * count;\n }\n else if (count === 0) {\n // If count is 0, new size is 0.\n size = 0;\n }\n else if (this.size != null && (count === undefined || count < 0)) {\n // If this dataset has size and count is undefined or negative, the\n // dataset will be repeated indefinitely and new size is infinity.\n size = Infinity;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n const iteratorIterator = iteratorFromFunction(async () => ({ value: await base.iterator(), done: false }));\n return iteratorFromConcatenated(iteratorIterator.take(count));\n }, size);\n }\n /**\n * Creates a `Dataset` that skips `count` initial elements from this dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).skip(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be skipped\n * to form the new dataset. If `count` is greater than the size of this\n * dataset, the new dataset will contain no elements. If `count`\n * is `undefined` or negative, skips the entire dataset.\n *\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n skip(count) {\n const base = this;\n let size;\n if (this.size != null && count >= 0 && this.size >= count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is current size minus skipped size.This also covers the case that\n // current size is infinity.\n size = this.size - count;\n }\n else if (this.size != null &&\n (this.size < count || count === undefined || count < 0)) {\n // If the size of this dataset is smaller than count, or count is\n // undefined or negative, skips the entire dataset and the new size is 0.\n size = 0;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).skip(count), size);\n }\n /**\n * Pseudorandomly shuffles the elements of this dataset. This is done in a\n * streaming manner, by sampling from a given number of prefetched elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).shuffle(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param bufferSize: An integer specifying the number of elements from this\n * dataset from which the new dataset will sample.\n * @param seed: (Optional) An integer specifying the random seed that will\n * be used to create the distribution.\n * @param reshuffleEachIteration: (Optional) A boolean, which if true\n * indicates that the dataset should be pseudorandomly reshuffled each time\n * it is iterated over. If false, elements will be returned in the same\n * shuffled order on each iteration. (Defaults to `true`.)\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n shuffle(bufferSize, seed, reshuffleEachIteration = true) {\n if (bufferSize == null || bufferSize < 0) {\n if (this.size == null) {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified.');\n }\n else {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified. ' +\n 'If your data fits in main memory (for regular JS objects), ' +\n 'and/or GPU memory (for `tf.Tensor`s), consider setting ' +\n `bufferSize to the dataset size (${this.size} elements)`);\n }\n }\n const base = this;\n const random = seedrandom.alea(seed || tf.util.now().toString());\n return datasetFromIteratorFn(async () => {\n let seed2 = random.int32();\n if (reshuffleEachIteration) {\n seed2 += random.int32();\n }\n return (await base.iterator()).shuffle(bufferSize, seed2.toString());\n }, this.size);\n }\n /**\n * Creates a `Dataset` with at most `count` initial elements from this\n * dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).take(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be taken\n * to form the new dataset. If `count` is `undefined` or negative, or if\n * `count` is greater than the size of this dataset, the new dataset will\n * contain all elements of this dataset.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n take(count) {\n const base = this;\n let size;\n if (this.size != null && this.size > count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is count.\n size = count;\n }\n else if (this.size != null && this.size <= count) {\n // If the size of this dataset is equal or smaller than count, the new\n // dataset's size is the size of this dataset.\n size = this.size;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).take(count), size);\n }\n /**\n * Collect all elements of this dataset into an array.\n *\n * Obviously this will succeed only for small datasets that fit in memory.\n * Useful for testing and generally should be avoided if possible.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]);\n * console.log(await a.toArray());\n * ```\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async toArray() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArray();\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n */\n async toArrayForTest() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArrayForTest();\n }\n}\n// TODO(soergel): deep sharded shuffle, where supported\nDataset.MAX_BUFFER_SIZE = 10000;\n/**\n * Create a `Dataset` defined by a provided iterator() function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * const ds = tf.data.datasetFromIteratorFn(iter);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n */\nexport function datasetFromIteratorFn(iteratorFn, size = null) {\n return new class extends Dataset {\n constructor() {\n super(...arguments);\n this.size = size;\n }\n /*\n * Provide a new stream of elements. Note this will also start new streams\n * from any underlying `Dataset`s.\n */\n async iterator() {\n return iteratorFn();\n }\n }();\n}\n/**\n * Create a `Dataset` from an array of elements.\n *\n * Create a Dataset from an array of objects:\n * ```js\n * const a = tf.data.array([{'item': 1}, {'item': 2}, {'item': 3}]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * Create a Dataset from an array of numbers:\n * ```js\n * const a = tf.data.array([4, 5, 6]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n * @param items An array of elements that will be parsed as items in a dataset.\n *\n * @doc {heading: 'Data', subheading: 'Creation', namespace: 'data'}\n */\nexport function array(items) {\n return datasetFromIteratorFn(async () => iteratorFromItems(items), items.length);\n}\n/**\n * Create a `Dataset` by zipping together an array, dict, or nested\n * structure of `Dataset`s (and perhaps additional constants).\n * The underlying datasets must provide elements in a consistent order such that\n * they correspond.\n *\n * The number of elements in the resulting dataset is the same as the size of\n * the smallest dataset in datasets.\n *\n * The nested structure of the `datasets` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Note this means that, given an array of two datasets that produce dict\n * elements, the result is a dataset that produces elements that are arrays\n * of two dicts:\n *\n * Zip an array of datasets:\n * ```js\n * console.log('Zip two datasets of objects:');\n * const ds1 = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const ds2 = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const ds3 = tf.data.zip([ds1, ds2]);\n * await ds3.forEachAsync(e => console.log(JSON.stringify(e)));\n *\n * // If the goal is to merge the dicts in order to produce elements like\n * // {a: ..., b: ...}, this requires a second step such as:\n * console.log('Merge the objects:');\n * const ds4 = ds3.map(x => {return {a: x[0].a, b: x[1].b}});\n * await ds4.forEachAsync(e => console.log(e));\n * ```\n *\n * Zip a dict of datasets:\n * ```js\n * const a = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const b = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const c = tf.data.zip({c: a, d: b});\n * await c.forEachAsync(e => console.log(JSON.stringify(e)));\n * ```\n *\n * @doc {heading: 'Data', subheading: 'Operations', namespace: 'data'}\n */\nexport function zip(datasets) {\n // manually type-check the argument for JS users\n if (!isIterable(datasets)) {\n throw new Error('The argument to zip() must be an object or array.');\n }\n let size;\n if (Array.isArray(datasets)) {\n for (let i = 0; i < datasets.length; i++) {\n size = size == null ? datasets[i].size :\n Math.min(size, datasets[i].size);\n }\n }\n else if (datasets instanceof Object) {\n for (const ds in datasets) {\n size = size == null ? datasets[ds].size :\n Math.min(size, datasets[ds].size);\n }\n }\n return datasetFromIteratorFn(async () => {\n const streams = await deepMapAndAwaitAll(datasets, d => {\n if (d instanceof Dataset) {\n return { value: d.iterator(), recurse: false };\n }\n else if (isIterable(d)) {\n return { value: null, recurse: true };\n }\n else {\n throw new Error('Leaves of the structure passed to zip() must be Datasets, ' +\n 'not primitives.');\n }\n });\n return iteratorFromZipped(streams, ZipMismatchMode.SHORTEST);\n }, size);\n}\n/**\n * A zip function for use with deepZip, passed via the columnMajorBatch call.\n *\n * Accepts an array of identically-structured nested elements and either batches\n * them (if they are primitives, numeric arrays, or Tensors) or requests\n * recursion (if not).\n */\n// tslint:disable-next-line:no-any\nfunction deepBatchConcat(rows) {\n if (rows === null) {\n return null;\n }\n // use the first item to decide whether to recurse or batch here.\n const exampleRow = rows[0];\n if (canTensorify(exampleRow)) {\n // rows is an array of primitives, Tensors, or arrays. Batch them.\n const value = batchConcat(rows);\n return { value, recurse: false };\n }\n // the example row is an object, so recurse into it.\n return { value: null, recurse: true };\n}\n/**\n * Assembles a list of same-shaped numbers, number arrays, or Tensors\n * into a single new Tensor where axis 0 is the batch dimension.\n */\nfunction batchConcat(arrays) {\n if (arrays.length === 0) {\n // We can't return an empty Tensor because we don't know the element shape.\n throw new Error('Can\\'t make a batch of zero elements.');\n }\n if (arrays[0] instanceof tf.Tensor) {\n // Input is an array of Tensors\n return tf.stack(arrays);\n }\n else {\n // Input is a possibly-nested array of numbers.\n return tf.tensor(arrays);\n }\n}\n//# sourceMappingURL=dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { Dataset } from '../dataset';\n/**\n * Represents a potentially large collection of text lines.\n *\n * The results are not batched.\n */\nexport class TextLineDataset extends Dataset {\n /**\n * Create a `TextLineDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n */\n constructor(input) {\n super();\n this.input = input;\n }\n async iterator() {\n const inputIterator = await this.input.iterator();\n const utf8Iterator = inputIterator.decodeUTF8();\n const lineIterator = utf8Iterator.split('\\n').map(line => {\n // Windows/DOS format text file has extra line breaker at the end of line.\n if (line.endsWith('\\r')) {\n line = line.slice(0, -1);\n }\n return line;\n });\n return lineIterator;\n }\n}\n//# sourceMappingURL=text_line_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { Dataset } from '../dataset';\nimport { TextLineDataset } from './text_line_dataset';\nconst CODE_QUOTE = '\"';\nconst STATE_OUT = Symbol('out');\nconst STATE_FIELD = Symbol('field');\nconst STATE_QUOTE = Symbol('quote');\nconst STATE_QUOTE_AFTER_QUOTE = Symbol('quoteafterquote');\nconst STATE_WITHIN_QUOTE_IN_QUOTE = Symbol('quoteinquote');\n/**\n * Represents a potentially large collection of delimited text records.\n *\n * The produced `TensorContainer`s each contain one key-value pair for\n * every column of the table. When a field is empty in the incoming data, the\n * resulting value is `undefined`, or throw error if it is required. Values\n * that can be parsed as numbers are emitted as type `number`, other values\n * are parsed as `string`.\n *\n * The results are not batched.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class CSVDataset extends Dataset {\n /**\n * Create a `CSVDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * hasHeader: (Optional) A boolean value that indicates whether the first\n * row of provided CSV file is a header line with column names, and should\n * not be included in the data. Defaults to `true`.\n *\n * columnNames: (Optional) A list of strings that corresponds to\n * the CSV column names, in order. If provided, it ignores the column\n * names inferred from the header row. If not provided, infers the column\n * names from the first row of the records. If hasHeader is false and\n * columnNames is not provided, this method throws an error.\n *\n * columnConfigs: (Optional) A dictionary whose key is column names, value\n * is an object stating if this column is required, column's data type,\n * default value, and if this column is label. If provided, keys must\n * correspond to names provided in columnNames or inferred from the file\n * header lines. If isLabel is true any column, returns an array of two\n * items: the first item is a dict of features key/value pairs, the second\n * item is a dict of labels key/value pairs. If no feature is marked as\n * label, returns a dict of features only.\n *\n * configuredColumnsOnly (Optional) If true, only columns provided in\n * columnConfigs will be parsed and provided during iteration.\n *\n * delimiter (Optional) The string used to parse each line of the input\n * file. Defaults to `,`.\n */\n constructor(input, csvConfig) {\n super();\n this.input = input;\n this.hasHeader = true;\n this.fullColumnNames = null;\n this.columnNamesValidated = false;\n this.columnConfigs = null;\n this.configuredColumnsOnly = false;\n this.delimiter = ',';\n this.delimWhitespace = false;\n this.base = new TextLineDataset(input);\n if (!csvConfig) {\n csvConfig = {};\n }\n this.hasHeader = csvConfig.hasHeader === false ? false : true;\n this.fullColumnNames = csvConfig.columnNames;\n this.columnConfigs = csvConfig.columnConfigs;\n this.configuredColumnsOnly = csvConfig.configuredColumnsOnly;\n if (csvConfig.delimWhitespace) {\n util.assert(csvConfig.delimiter == null, () => 'Delimiter should not be provided when delimWhitespace is true.');\n this.delimWhitespace = true;\n this.delimiter = ' ';\n }\n else {\n this.delimiter = csvConfig.delimiter ? csvConfig.delimiter : ',';\n }\n }\n /**\n * Returns column names of the csv dataset. If `configuredColumnsOnly` is\n * true, return column names in `columnConfigs`. If `configuredColumnsOnly` is\n * false and `columnNames` is provided, `columnNames`. If\n * `configuredColumnsOnly` is false and `columnNames` is not provided, return\n * all column names parsed from the csv file. For example usage please go to\n * `tf.data.csv`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async columnNames() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n return this.configuredColumnsOnly ? Object.keys(this.columnConfigs) :\n this.fullColumnNames;\n }\n /* 1) If `columnNames` is provided as string[], use this string[] as output\n * keys in corresponding order. The length must match the number of inferred\n * columns if `hasHeader` is true .\n * 2) If `columnNames` is not provided, parse header line as `columnNames` if\n * hasHeader is true. If `hasHeader` is false, throw an error.\n * 3) If `columnConfigs` is provided, all the keys in `columnConfigs` must\n * exist in parsed `columnNames`.\n */\n async setColumnNames() {\n const columnNamesFromFile = await this.maybeReadHeaderLine();\n if (!this.fullColumnNames && !columnNamesFromFile) {\n // Throw an error if columnNames is not provided and no header line.\n throw new Error('Column names must be provided if there is no header line.');\n }\n else if (this.fullColumnNames && columnNamesFromFile) {\n // Check provided columnNames match header line.\n util.assert(columnNamesFromFile.length === this.fullColumnNames.length, () => 'The length of provided columnNames (' +\n this.fullColumnNames.length.toString() +\n ') does not match the length of the header line read from ' +\n 'file (' + columnNamesFromFile.length.toString() + ').');\n }\n if (!this.fullColumnNames) {\n this.fullColumnNames = columnNamesFromFile;\n }\n // Check if there are duplicate column names.\n const counts = this.fullColumnNames.reduce((countAcc, name) => {\n countAcc[name] = (countAcc[name] + 1) || 1;\n return countAcc;\n }, {});\n const duplicateNames = Object.keys(counts).filter((name) => (counts[name] > 1));\n util.assert(duplicateNames.length === 0, () => 'Duplicate column names found: ' + duplicateNames.toString());\n // Check if keys in columnConfigs match columnNames.\n if (this.columnConfigs) {\n for (const key of Object.keys(this.columnConfigs)) {\n const index = this.fullColumnNames.indexOf(key);\n if (index === -1) {\n throw new Error('The key \"' + key +\n '\" provided in columnConfigs does not match any of the column ' +\n 'names (' + this.fullColumnNames.toString() + ').');\n }\n }\n }\n this.columnNamesValidated = true;\n }\n async maybeReadHeaderLine() {\n if (this.hasHeader) {\n const iter = await this.base.iterator();\n const firstElement = await iter.next();\n if (firstElement.done) {\n throw new Error('No data was found for CSV parsing.');\n }\n const firstLine = firstElement.value;\n const headers = this.parseRow(firstLine, false);\n return headers;\n }\n else {\n return null;\n }\n }\n async iterator() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n let lines = await this.base.iterator();\n if (this.hasHeader) {\n // We previously read the first line to get the columnNames.\n // Now that we're providing data, skip it.\n lines = lines.skip(1);\n }\n return lines.map(x => this.makeDataElement(x));\n }\n makeDataElement(line) {\n const values = this.parseRow(line);\n const features = {};\n const labels = {};\n for (let i = 0; i < this.fullColumnNames.length; i++) {\n const key = this.fullColumnNames[i];\n const config = this.columnConfigs ? this.columnConfigs[key] : null;\n if (this.configuredColumnsOnly && !config) {\n // This column is not selected.\n continue;\n }\n else {\n const value = values[i];\n let parsedValue = null;\n if (value === '') {\n // If default value is provided, use it. If default value is not\n // provided, set as undefined.\n if (config && config.default !== undefined) {\n parsedValue = config.default;\n }\n else if (config && (config.required || config.isLabel)) {\n throw new Error(`Required column ${key} is empty in this line: ${line}`);\n }\n else {\n parsedValue = undefined;\n }\n }\n else {\n // A value is present, so parse it based on type\n const valueAsNum = Number(value);\n if (isNaN(valueAsNum)) {\n // The value is a string and this column is declared as boolean\n // in config, parse it as boolean.\n if (config && config.dtype === 'bool') {\n parsedValue = this.getBoolean(value);\n }\n else {\n // Set value as string\n parsedValue = value;\n }\n }\n else if (!config || !config.dtype) {\n // If this value is a number and no type config is provided, return\n // it as number.\n parsedValue = valueAsNum;\n }\n else {\n // If this value is a number and data type is provided, parse it\n // according to provided data type.\n switch (config.dtype) {\n case 'float32':\n parsedValue = valueAsNum;\n break;\n case 'int32':\n parsedValue = Math.floor(valueAsNum);\n break;\n case 'bool':\n parsedValue = this.getBoolean(value);\n break;\n default:\n parsedValue = valueAsNum;\n }\n }\n }\n // Check if this column is label.\n (config && config.isLabel) ? labels[key] = parsedValue :\n features[key] = parsedValue;\n }\n }\n // If label exists, return an object of features and labels as {xs:features,\n // ys:labels}, otherwise return features only.\n if (Object.keys(labels).length === 0) {\n return features;\n }\n else {\n return { xs: features, ys: labels };\n }\n }\n getBoolean(value) {\n if (value === '1' || value.toLowerCase() === 'true') {\n return 1;\n }\n else {\n return 0;\n }\n }\n // adapted from https://beta.observablehq.com/@mbostock/streaming-csv\n parseRow(line, validateElementCount = true) {\n const result = [];\n let readOffset = 0;\n const readLength = line.length;\n let currentState = STATE_OUT;\n // Goes through the line to parse quote.\n for (let i = 0; i < readLength; i++) {\n switch (currentState) {\n // Before enter a new field\n case STATE_OUT:\n switch (line.charAt(i)) {\n // Enter a quoted field\n case CODE_QUOTE:\n readOffset = i + 1;\n currentState = STATE_QUOTE;\n break;\n // Read an empty field\n case this.delimiter:\n readOffset = i + 1;\n // If delimiter is white space and configured to collapse\n // multiple white spaces, ignore this white space.\n if (this.delimiter === ' ' && this.delimWhitespace) {\n break;\n }\n result.push('');\n currentState = STATE_OUT;\n break;\n // Enter an unquoted field\n default:\n currentState = STATE_FIELD;\n readOffset = i;\n break;\n }\n break;\n // In an unquoted field\n case STATE_FIELD:\n switch (line.charAt(i)) {\n // Exit an unquoted field, add it to result\n case this.delimiter:\n result.push(line.substring(readOffset, i));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n default:\n }\n break;\n // In a quoted field\n case STATE_QUOTE:\n switch (line.charAt(i)) {\n // Read a quote after a quote\n case CODE_QUOTE:\n currentState = STATE_QUOTE_AFTER_QUOTE;\n break;\n default:\n }\n break;\n // This state means it's right after a second quote in a field\n case STATE_QUOTE_AFTER_QUOTE:\n switch (line.charAt(i)) {\n // Finished a quoted field\n case this.delimiter:\n result.push(line.substring(readOffset, i - 1));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n // Finished a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n // In a quoted part in a quoted field\n default:\n currentState = STATE_WITHIN_QUOTE_IN_QUOTE;\n break;\n }\n break;\n case STATE_WITHIN_QUOTE_IN_QUOTE:\n switch (line.charAt(i)) {\n // Exit a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n default:\n }\n break;\n default:\n }\n }\n // Adds last item based on if it is quoted.\n if (currentState === STATE_QUOTE_AFTER_QUOTE) {\n result.push(line.substring(readOffset, readLength - 1));\n }\n else {\n result.push(line.substring(readOffset));\n }\n // Check if each row has the same number of elements as column names.\n if (validateElementCount && result.length !== this.fullColumnNames.length) {\n throw new Error(`Invalid row in csv file. Should have ${this.fullColumnNames.length} elements in a row, but got ${result}`);\n }\n return result;\n }\n}\n// TODO(soergel): add more basic datasets for parity with tf.data\n// tf.data.FixedLengthRecordDataset()\n// tf.data.TFRecordDataset()\n//# sourceMappingURL=csv_dataset.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env, tensor, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of tensors from microphone audio stream. The tensors are\n * representing audio data as frequency-domain spectrogram generated with\n * browser's native FFT. Tensors representing time-domain waveform is available\n * based on configuration. Only works in browser environment.\n */\nexport class MicrophoneIterator extends LazyIterator {\n constructor(microphoneConfig) {\n super();\n this.microphoneConfig = microphoneConfig;\n this.isClosed = false;\n this.fftSize = microphoneConfig.fftSize || 1024;\n const fftSizeLog2 = Math.log2(this.fftSize);\n if (this.fftSize < 0 || fftSizeLog2 < 4 || fftSizeLog2 > 14 ||\n !Number.isInteger(fftSizeLog2)) {\n throw new Error(`Invalid fftSize: it must be a power of 2 between ` +\n `2 to 4 and 2 to 14, but got ${this.fftSize}`);\n }\n this.numFrames = microphoneConfig.numFramesPerSpectrogram || 43;\n this.sampleRateHz = microphoneConfig.sampleRateHz;\n this.columnTruncateLength =\n microphoneConfig.columnTruncateLength || this.fftSize;\n this.audioTrackConstraints = microphoneConfig.audioTrackConstraints;\n this.smoothingTimeConstant = microphoneConfig.smoothingTimeConstant || 0;\n this.includeSpectrogram =\n microphoneConfig.includeSpectrogram === false ? false : true;\n this.includeWaveform =\n microphoneConfig.includeWaveform === true ? true : false;\n if (!this.includeSpectrogram && !this.includeWaveform) {\n throw new Error('Both includeSpectrogram and includeWaveform are false. ' +\n 'At least one type of data should be returned.');\n }\n }\n summary() {\n return `microphone`;\n }\n // Construct a MicrophoneIterator and start the audio stream.\n static async create(microphoneConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('microphone API is only supported in browser environment.');\n }\n const microphoneIterator = new MicrophoneIterator(microphoneConfig);\n // Call async function start() to initialize the audio stream.\n await microphoneIterator.start();\n return microphoneIterator;\n }\n // Start the audio stream and FFT.\n async start() {\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n audio: this.audioTrackConstraints == null ? true :\n this.audioTrackConstraints,\n video: false\n });\n }\n catch (e) {\n throw new Error(`Error thrown while initializing video stream: ${e.message}`);\n }\n if (!this.stream) {\n throw new Error('Could not obtain audio from microphone.');\n }\n const ctxConstructor = \n // tslint:disable-next-line:no-any\n window.AudioContext || window.webkitAudioContext;\n this.audioContext = new ctxConstructor();\n if (!this.sampleRateHz) {\n // If sample rate is not provided, use the available sample rate on\n // device.\n this.sampleRateHz = this.audioContext.sampleRate;\n }\n else if (this.audioContext.sampleRate !== this.sampleRateHz) {\n throw new Error(`Mismatch in sampling rate: ` +\n `Expected: ${this.sampleRateHz}; ` +\n `Actual: ${this.audioContext.sampleRate}`);\n }\n const streamSource = this.audioContext.createMediaStreamSource(this.stream);\n this.analyser = this.audioContext.createAnalyser();\n this.analyser.fftSize = this.fftSize * 2;\n this.analyser.smoothingTimeConstant = this.smoothingTimeConstant;\n streamSource.connect(this.analyser);\n this.freqData = new Float32Array(this.fftSize);\n this.timeData = new Float32Array(this.fftSize);\n return;\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let spectrogramTensor;\n let waveformTensor;\n const audioDataQueue = await this.getAudioData();\n if (this.includeSpectrogram) {\n const freqData = this.flattenQueue(audioDataQueue.freqDataQueue);\n spectrogramTensor = this.getTensorFromAudioDataArray(freqData, [this.numFrames, this.columnTruncateLength, 1]);\n }\n if (this.includeWaveform) {\n const timeData = this.flattenQueue(audioDataQueue.timeDataQueue);\n waveformTensor = this.getTensorFromAudioDataArray(timeData, [this.numFrames * this.fftSize, 1]);\n }\n return {\n value: { 'spectrogram': spectrogramTensor, 'waveform': waveformTensor },\n done: false\n };\n }\n // Capture one result from the audio stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n async getAudioData() {\n const freqDataQueue = [];\n const timeDataQueue = [];\n let currentFrames = 0;\n return new Promise(resolve => {\n const intervalID = setInterval(() => {\n if (this.includeSpectrogram) {\n this.analyser.getFloatFrequencyData(this.freqData);\n // If the audio stream is initializing, return empty queue.\n if (this.freqData[0] === -Infinity) {\n resolve({ freqDataQueue, timeDataQueue });\n }\n freqDataQueue.push(this.freqData.slice(0, this.columnTruncateLength));\n }\n if (this.includeWaveform) {\n this.analyser.getFloatTimeDomainData(this.timeData);\n timeDataQueue.push(this.timeData.slice());\n }\n // Clean interval and return when all frames have been collected\n if (++currentFrames === this.numFrames) {\n clearInterval(intervalID);\n resolve({ freqDataQueue, timeDataQueue });\n }\n }, this.fftSize / this.sampleRateHz * 1e3);\n });\n }\n // Stop the audio stream and pause the iterator.\n stop() {\n if (!this.isClosed) {\n this.isClosed = true;\n this.analyser.disconnect();\n this.audioContext.close();\n if (this.stream != null && this.stream.getTracks().length > 0) {\n this.stream.getTracks()[0].stop();\n }\n }\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite audio stream to array.');\n }\n // Return audio sampling rate in Hz\n getSampleRate() {\n return this.sampleRateHz;\n }\n flattenQueue(queue) {\n const frameSize = queue[0].length;\n const freqData = new Float32Array(queue.length * frameSize);\n queue.forEach((data, i) => freqData.set(data, i * frameSize));\n return freqData;\n }\n getTensorFromAudioDataArray(freqData, shape) {\n const vals = new Float32Array(util.sizeFromShape(shape));\n // If the data is less than the output shape, the rest is padded with zeros.\n vals.set(freqData, vals.length - freqData.length);\n return tensor(vals, shape);\n }\n}\n//# sourceMappingURL=microphone_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { browser, env, image, tensor1d, tensor2d, tidy, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of image tensors from webcam video stream. Only works in\n * browser environment.\n */\nexport class WebcamIterator extends LazyIterator {\n constructor(webcamVideoElement, webcamConfig) {\n super();\n this.webcamVideoElement = webcamVideoElement;\n this.webcamConfig = webcamConfig;\n this.isClosed = true;\n this.resize = false;\n if (this.needToResize()) {\n this.resize = true;\n this.cropSize =\n [this.webcamConfig.resizeHeight, this.webcamConfig.resizeWidth];\n this.cropBoxInd = tensor1d([0], 'int32');\n if (this.webcamConfig.centerCrop) {\n // Calculate the box based on resizing shape.\n const widthCroppingRatio = this.webcamConfig.resizeWidth * 1.0 / this.webcamVideoElement.width;\n const heightCroppingRatio = this.webcamConfig.resizeHeight * 1.0 /\n this.webcamVideoElement.height;\n const widthCropStart = (1 - widthCroppingRatio) / 2;\n const heightCropStart = (1 - heightCroppingRatio) / 2;\n const widthCropEnd = widthCropStart + widthCroppingRatio;\n const heightCropEnd = heightCroppingRatio + heightCropStart;\n this.cropBox = tensor2d([heightCropStart, widthCropStart, heightCropEnd, widthCropEnd], [1, 4]);\n }\n else {\n this.cropBox = tensor2d([0, 0, 1, 1], [1, 4]);\n }\n }\n }\n summary() {\n return `webcam`;\n }\n // Construct a WebcamIterator and start it's video stream.\n static async create(webcamVideoElement, webcamConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('tf.data.webcam is only supported in browser environment.');\n }\n if (!webcamVideoElement) {\n // If webcam video element is not provided, create a hidden video element\n // with provided width and height.\n webcamVideoElement = document.createElement('video');\n if (!webcamConfig.resizeWidth || !webcamConfig.resizeHeight) {\n throw new Error('Please provide webcam video element, or resizeWidth and ' +\n 'resizeHeight to create a hidden video element.');\n }\n webcamVideoElement.width = webcamConfig.resizeWidth;\n webcamVideoElement.height = webcamConfig.resizeHeight;\n }\n const webcamIterator = new WebcamIterator(webcamVideoElement, webcamConfig);\n // Call async function to initialize the video stream.\n await webcamIterator.start();\n return webcamIterator;\n }\n // Async function to start video stream.\n async start() {\n if (this.webcamConfig.facingMode) {\n util.assert((this.webcamConfig.facingMode === 'user') ||\n (this.webcamConfig.facingMode === 'environment'), () => `Invalid webcam facing mode: ${this.webcamConfig.facingMode}. ` +\n `Please provide 'user' or 'environment'`);\n }\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n video: {\n deviceId: this.webcamConfig.deviceId,\n facingMode: this.webcamConfig.facingMode ?\n this.webcamConfig.facingMode :\n 'user',\n width: this.webcamVideoElement.width,\n height: this.webcamVideoElement.height\n }\n });\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message = `Error thrown while initializing video stream: ${e.message}`;\n throw e;\n }\n if (!this.stream) {\n throw new Error('Could not obtain video from webcam.');\n }\n // Older browsers may not have srcObject\n try {\n this.webcamVideoElement.srcObject = this.stream;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = window.URL.createObjectURL(this.stream);\n }\n // Start the webcam video stream\n this.webcamVideoElement.play();\n this.isClosed = false;\n return new Promise(resolve => {\n // Add event listener to make sure the webcam has been fully initialized.\n this.webcamVideoElement.onloadedmetadata = () => {\n resolve();\n };\n });\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let img;\n try {\n img = browser.fromPixels(this.webcamVideoElement);\n }\n catch (e) {\n throw new Error(`Error thrown converting video to pixels: ${JSON.stringify(e)}`);\n }\n if (this.resize) {\n try {\n return { value: this.cropAndResizeFrame(img), done: false };\n }\n catch (e) {\n throw new Error(`Error thrown cropping the video: ${e.message}`);\n }\n finally {\n img.dispose();\n }\n }\n else {\n return { value: img, done: false };\n }\n }\n needToResize() {\n // If resizeWidth and resizeHeight are provided, and different from the\n // width and height of original HTMLVideoElement, then resizing and cropping\n // is required.\n if (this.webcamConfig.resizeWidth && this.webcamConfig.resizeHeight &&\n (this.webcamVideoElement.width !== this.webcamConfig.resizeWidth ||\n this.webcamVideoElement.height !== this.webcamConfig.resizeHeight)) {\n return true;\n }\n return false;\n }\n // Cropping and resizing each frame based on config\n cropAndResizeFrame(img) {\n return tidy(() => {\n const expandedImage = img.toFloat().expandDims(0);\n let resizedImage;\n resizedImage = image.cropAndResize(expandedImage, this.cropBox, this.cropBoxInd, this.cropSize, 'bilinear');\n // Extract image from batch cropping.\n const shape = resizedImage.shape;\n return resizedImage.reshape(shape.slice(1));\n });\n }\n // Capture one frame from the video stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n // Stop the video stream and pause webcam iterator.\n stop() {\n const tracks = this.stream.getTracks();\n tracks.forEach(track => track.stop());\n try {\n this.webcamVideoElement.srcObject = null;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = null;\n }\n this.isClosed = true;\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite video stream to array.');\n }\n}\n//# sourceMappingURL=webcam_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * Represents a data source readable as a stream of binary data chunks.\n *\n * Because `Dataset`s can be read repeatedly (via `Dataset.iterator()`), this\n * provides a means to repeatedly create streams from the underlying data\n * sources.\n */\nexport class DataSource {\n}\n// TODO(soergel): consider convenience factory functions here\n// in combination with chainable source->dataset above, e.g.:\n// tf.data.url(...).asCsvDataset().shuffle().batch()\n//# sourceMappingURL=datasource.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nexport class StringIterator extends LazyIterator {\n /**\n * Splits a string stream on a given separator.\n *\n * It is assumed that the incoming chunk boundaries have no semantic meaning,\n * so conceptually the incoming stream is treated simply as the concatenation\n * of its elements.\n *\n * The outgoing stream provides chunks corresponding to the results of the\n * standard string split() operation (even if such a chunk spanned incoming\n * chunks). The separators are not included.\n *\n * A typical usage is to split a text file (represented as a stream with\n * arbitrary chunk boundaries) into lines.\n *\n * @param upstream A readable stream of strings that can be treated as\n * concatenated.\n * @param separator A character to split on.\n */\n split(separator) {\n return new SplitIterator(this, separator);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on StringIterator. Unfortunately they can't be placed in separate files, due\n// to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class SplitIterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass SplitIterator extends StringIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.impl = new SplitIteratorImpl(upstream, separator);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\nclass SplitIteratorImpl extends OneToManyIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.separator = separator;\n // A partial string at the end of an upstream chunk\n this.carryover = '';\n }\n summary() {\n return `${this.upstream.summary()} -> Split('${this.separator}')`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n if (chunkResult.done) {\n if (this.carryover === '') {\n return false;\n }\n // Pretend that the pump succeeded in order to emit the small last batch.\n // The next pump() call will actually fail.\n this.outputQueue.push(this.carryover);\n this.carryover = '';\n return true;\n }\n const lines = chunkResult.value.split(this.separator);\n // Note the behavior: \" ab \".split(' ') === ['', 'ab', '']\n // Thus the carryover may be '' if the separator falls on a chunk\n // boundary; this produces the correct result.\n lines[0] = this.carryover + lines[0];\n for (const line of lines.slice(0, -1)) {\n this.outputQueue.push(line);\n }\n this.carryover = lines[lines.length - 1];\n return true;\n }\n}\n//# sourceMappingURL=string_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nimport { StringIterator } from './string_iterator';\nexport class ByteChunkIterator extends LazyIterator {\n /**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * The byte arrays producetd from the ByteChunkIterator on which this is\n * called will be interpreted as concatenated. No assumptions are made about\n * the boundaries of the incoming chunks, so a multi-byte UTF8 encoding of a\n * character may span the boundary between chunks. This naturally happens,\n * for instance, when reading fixed-size byte arrays from a file.\n */\n decodeUTF8() {\n return new Utf8Iterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on ByteChunkIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class Utf8Iterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass Utf8Iterator extends StringIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.impl = new Utf8IteratorImpl(upstream);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\n/**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * This is tricky because the incoming byte array boundaries may disrupt a\n * multi-byte UTF8 character. Thus any incomplete character data at the end of\n * a chunk must be carried over and prepended to the next chunk before\n * decoding. Luckily with native decoder, TextDecoder in browser and\n * string_decoder in node, byte array boundaries are handled automatically.\n *\n * In the context of an input pipeline for machine learning, UTF8 decoding is\n * needed to parse text files containing training examples or prediction\n * requests (e.g., formatted as CSV or JSON). We cannot use the built-in\n * decoding provided by FileReader.readAsText() because here we are in a\n * streaming context, which FileReader does not support.\n *\n * @param upstream A `LazyIterator` of `Uint8Arrays` containing UTF8-encoded\n * text, which should be interpreted as concatenated. No assumptions are\n * made about the boundaries of the incoming chunks, so a multi-byte UTF8\n * encoding of a character may span the boundary between chunks. This\n * naturally happens, for instance, when reading fixed-size byte arrays from a\n * file.\n */\nclass Utf8IteratorImpl extends OneToManyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n if (env().get('IS_BROWSER')) {\n this.decoder = new TextDecoder('utf-8');\n }\n else {\n // tslint:disable-next-line:no-require-imports\n const { StringDecoder } = require('string_decoder');\n this.decoder = new StringDecoder('utf8');\n }\n }\n summary() {\n return `${this.upstream.summary()} -> Utf8`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n let chunk;\n if (chunkResult.done) {\n return false;\n }\n else {\n chunk = chunkResult.value;\n }\n let text;\n if (env().get('IS_BROWSER')) {\n text = this.decoder.decode(chunk, { stream: true });\n }\n else {\n text = this.decoder.write(Buffer.from(chunk.buffer));\n }\n this.outputQueue.push(text);\n return true;\n }\n}\n//# sourceMappingURL=byte_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// inspired by https://github.com/maxogden/filereader-stream\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { ByteChunkIterator } from './byte_chunk_iterator';\n/**\n * Provide a stream of chunks from a File, Blob, or Uint8Array.\n * @param file The source File, Blob or Uint8Array.\n * @param options Optional settings controlling file reading.\n * @returns a lazy Iterator of Uint8Arrays containing sequential chunks of the\n * input File, Blob or Uint8Array.\n */\nexport class FileChunkIterator extends ByteChunkIterator {\n constructor(file, options = {}) {\n super();\n this.file = file;\n this.options = options;\n util.assert((file instanceof Uint8Array) ||\n (env().get('IS_BROWSER') ?\n (file instanceof File || file instanceof Blob) :\n false), () => 'FileChunkIterator only supports File, Blob and Uint8Array ' +\n 'right now.');\n this.offset = options.offset || 0;\n // default 1MB chunk has tolerable perf on large files\n this.chunkSize = options.chunkSize || 1024 * 1024;\n }\n summary() {\n return `FileChunks ${this.file}`;\n }\n async next() {\n if (this.offset >= ((this.file instanceof Uint8Array) ?\n this.file.byteLength :\n this.file.size)) {\n return { value: null, done: true };\n }\n const chunk = new Promise((resolve, reject) => {\n const end = this.offset + this.chunkSize;\n if (this.file instanceof Uint8Array) {\n // Note if end > this.uint8Array.byteLength, we just get a small last\n // chunk.\n resolve(new Uint8Array(this.file.slice(this.offset, end)));\n }\n else {\n // This branch assumes that this.file type is File or Blob, which\n // means it is in the browser environment.\n // TODO(soergel): is this a performance issue?\n const fileReader = new FileReader();\n fileReader.onload = (event) => {\n let data = fileReader.result;\n // Not sure we can trust the return type of\n // FileReader.readAsArrayBuffer See e.g.\n // https://github.com/node-file-api/FileReader/issues/2\n if (data instanceof ArrayBuffer) {\n data = new Uint8Array(data);\n }\n if (!(data instanceof Uint8Array)) {\n return reject(new TypeError('FileReader returned unknown type.'));\n }\n resolve(data);\n };\n fileReader.onabort = (event) => {\n return reject(new Error('Aborted'));\n };\n fileReader.onerror = (event) => {\n return reject(new Error(event.type));\n };\n // TODO(soergel): better handle onabort, onerror\n // Note if end > this.file.size, we just get a small last chunk.\n const slice = this.file.slice(this.offset, end);\n // We can't use readAsText here (even if we know the file is text)\n // because the slice boundary may fall within a multi-byte character.\n fileReader.readAsArrayBuffer(slice);\n }\n this.offset = end;\n });\n return { value: (await chunk), done: false };\n }\n}\n//# sourceMappingURL=file_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FileChunkIterator } from './file_chunk_iterator';\n/**\n * Provide a stream of chunks from a URL.\n *\n * Note this class first downloads the entire file into memory before providing\n * the first element from the stream. This is because the Fetch API does not\n * yet reliably provide a reader stream for the response body.\n */\nexport async function urlChunkIterator(url, options = {}) {\n let urlString;\n let requestInit;\n if ((typeof url) === 'string') {\n urlString = url;\n }\n else {\n urlString = url.url;\n requestInit = getRequestInitFromRequest(url);\n }\n const response = await util.fetch(urlString, requestInit);\n if (response.ok) {\n const uint8Array = new Uint8Array(await response.arrayBuffer());\n return new FileChunkIterator(uint8Array, options);\n }\n else {\n throw new Error(response.statusText);\n }\n}\n// Generate RequestInit from Request to match tf.util.fetch signature.\nconst getRequestInitFromRequest = (request) => {\n const init = {\n method: request.method,\n headers: request.headers,\n body: request.body,\n mode: request.mode,\n credentials: request.credentials,\n cache: request.cache,\n redirect: request.redirect,\n referrer: request.referrer,\n integrity: request.integrity,\n };\n return init;\n};\n//# sourceMappingURL=url_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// Skip tslint any type check cause this method is aiming to check type of\n// input.\n// tslint:disable-next-line:no-any\nexport function isLocalPath(source) {\n return (typeof source === 'string') && source.substr(0, 7) === 'file://';\n}\n//# sourceMappingURL=source_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { DataSource } from '../datasource';\nimport { FileChunkIterator } from '../iterators/file_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\n/**\n * Represents a file, blob, or Uint8Array readable as a stream of binary data\n * chunks.\n */\nexport class FileDataSource extends DataSource {\n /**\n * Create a `FileDataSource`.\n *\n * @param input Local file path, or `File`/`Blob`/`Uint8Array` object to\n * read. Local file only works in node environment.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(input, options = {}) {\n super();\n this.input = input;\n this.options = options;\n }\n async iterator() {\n if (isLocalPath(this.input) && env().get('IS_NODE')) {\n // tslint:disable-next-line:no-require-imports\n const fs = require('fs');\n this.input = fs.readFileSync(this.input.substr(7));\n }\n // TODO(kangyizhang): Add LocalFileChunkIterator to split local streaming\n // with file in browser.\n return new FileChunkIterator(this.input, this.options);\n }\n}\n//# sourceMappingURL=file_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { DataSource } from '../datasource';\nimport { urlChunkIterator } from '../iterators/url_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\nimport { FileDataSource } from './file_data_source';\n/*\n * Represents a URL readable as a stream of binary data chunks.\n */\nexport class URLDataSource extends DataSource {\n /**\n * Create a `URLDataSource`.\n *\n * @param url A source URL string, or a `Request` object.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(url, fileOptions = {}) {\n super();\n this.url = url;\n this.fileOptions = fileOptions;\n }\n // TODO(soergel): provide appropriate caching options. Currently this\n // will download the URL anew for each call to iterator(). Since we have\n // to treat the downloaded file as a blob/buffer anyway, we may as well retain\n // it-- but that raises GC issues. Also we may want a persistent disk cache.\n async iterator() {\n if (isLocalPath(this.url)) {\n return (new FileDataSource(this.url, this.fileOptions))\n .iterator();\n }\n else {\n return urlChunkIterator(this.url, this.fileOptions);\n }\n }\n}\n//# sourceMappingURL=url_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { datasetFromIteratorFn } from './dataset';\nimport { CSVDataset } from './datasets/csv_dataset';\nimport { iteratorFromFunction } from './iterators/lazy_iterator';\nimport { MicrophoneIterator } from './iterators/microphone_iterator';\nimport { WebcamIterator } from './iterators/webcam_iterator';\nimport { URLDataSource } from './sources/url_data_source';\n/**\n * Create a `CSVDataset` by reading and decoding CSV file(s) from provided URL\n * or local path if it's in Node environment.\n *\n * Note: If isLabel in columnConfigs is `true` for at least one column, the\n * element in returned `CSVDataset` will be an object of\n * `{xs:features, ys:labels}`: xs is a dict of features key/value pairs, ys\n * is a dict of labels key/value pairs. If no column is marked as label,\n * returns a dict of features only.\n *\n * ```js\n * const csvUrl =\n * 'https://storage.googleapis.com/tfjs-examples/multivariate-linear-regression/data/boston-housing-train.csv';\n *\n * async function run() {\n * // We want to predict the column \"medv\", which represents a median value of\n * // a home (in $1000s), so we mark it as a label.\n * const csvDataset = tf.data.csv(\n * csvUrl, {\n * columnConfigs: {\n * medv: {\n * isLabel: true\n * }\n * }\n * });\n *\n * // Number of features is the number of column names minus one for the label\n * // column.\n * const numOfFeatures = (await csvDataset.columnNames()).length - 1;\n *\n * // Prepare the Dataset for training.\n * const flattenedDataset =\n * csvDataset\n * .map(({xs, ys}) =>\n * {\n * // Convert xs(features) and ys(labels) from object form (keyed by\n * // column name) to array form.\n * return {xs:Object.values(xs), ys:Object.values(ys)};\n * })\n * .batch(10);\n *\n * // Define the model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * inputShape: [numOfFeatures],\n * units: 1\n * }));\n * model.compile({\n * optimizer: tf.train.sgd(0.000001),\n * loss: 'meanSquaredError'\n * });\n *\n * // Fit the model using the prepared Dataset\n * return model.fitDataset(flattenedDataset, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * console.log(epoch + ':' + logs.loss);\n * }\n * }\n * });\n * }\n *\n * await run();\n * ```\n *\n * @param source URL or local path to get CSV file. If it's a local path, it\n * must have prefix `file://` and it only works in node environment.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function csv(source, csvConfig = {}) {\n return new CSVDataset(new URLDataSource(source), csvConfig);\n}\n/**\n * Create a `Dataset` that produces each element by calling a provided function.\n *\n * Note that repeated iterations over this `Dataset` may produce different\n * results, because the function will be called anew for each element of each\n * iteration.\n *\n * Also, beware that the sequence of calls to this function may be out of order\n * in time with respect to the logical order of the Dataset. This is due to the\n * asynchronous lazy nature of stream processing, and depends on downstream\n * transformations (e.g. .shuffle()). If the provided function is pure, this is\n * no problem, but if it is a closure over a mutable state (e.g., a traversal\n * pointer), then the order of the produced elements may be scrambled.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const ds = tf.data.func(func);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function that produces one data element on each call.\n */\nexport function func(f) {\n const iter = iteratorFromFunction(f);\n return datasetFromIteratorFn(async () => iter);\n}\n/**\n * Create a `Dataset` that produces each element from provided JavaScript\n * generator, which is a function*\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions),\n * or a function that returns an\n * iterator\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions).\n *\n * The returned iterator should have `.next()` function that returns element in\n * format of `{value: TensorContainer, done:boolean}`.\n *\n * Example of creating a dataset from an iterator factory:\n * ```js\n * function makeIterator() {\n * const numElements = 10;\n * let index = 0;\n *\n * const iterator = {\n * next: () => {\n * let result;\n * if (index < numElements) {\n * result = {value: index, done: false};\n * index++;\n * return result;\n * }\n * return {value: index, done: true};\n * }\n * };\n * return iterator;\n * }\n * const ds = tf.data.generator(makeIterator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * Example of creating a dataset from a generator:\n * ```js\n * function* dataGenerator() {\n * const numElements = 10;\n * let index = 0;\n * while (index < numElements) {\n * const x = index;\n * index++;\n * yield x;\n * }\n * }\n *\n * const ds = tf.data.generator(dataGenerator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param generator A Javascript generator function that returns a JavaScript\n * iterator.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function generator(generator) {\n return datasetFromIteratorFn(async () => {\n const gen = await generator();\n return iteratorFromFunction(() => gen.next());\n });\n}\n/**\n * Create an iterator that generate `Tensor`s from webcam video stream. This API\n * only works in Browser environment when the device has webcam.\n *\n * Note: this code snippet only works when the device has a webcam. It will\n * request permission to open the webcam when running.\n * ```js\n * const videoElement = document.createElement('video');\n * videoElement.width = 100;\n * videoElement.height = 100;\n * const cam = await tf.data.webcam(videoElement);\n * const img = await cam.capture();\n * img.print();\n * cam.stop();\n * ```\n *\n * @param webcamVideoElement A `HTMLVideoElement` used to play video from\n * webcam. If this element is not provided, a hidden `HTMLVideoElement` will\n * be created. In that case, `resizeWidth` and `resizeHeight` must be\n * provided to set the generated tensor shape.\n * @param webcamConfig A `WebcamConfig` object that contains configurations of\n * reading and manipulating data from webcam video stream.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function webcam(webcamVideoElement, webcamConfig) {\n return WebcamIterator.create(webcamVideoElement, webcamConfig);\n}\n/**\n * Create an iterator that generate frequency-domain spectrogram `Tensor`s from\n * microphone audio stream with browser's native FFT. This API only works in\n * browser environment when the device has microphone.\n *\n * Note: this code snippet only works when the device has a microphone. It will\n * request permission to open the microphone when running.\n * ```js\n * const mic = await tf.data.microphone({\n * fftSize: 1024,\n * columnTruncateLength: 232,\n * numFramesPerSpectrogram: 43,\n * sampleRateHz:44100,\n * includeSpectrogram: true,\n * includeWaveform: true\n * });\n * const audioData = await mic.capture();\n * const spectrogramTensor = audioData.spectrogram;\n * spectrogramTensor.print();\n * const waveformTensor = audioData.waveform;\n * waveformTensor.print();\n * mic.stop();\n * ```\n *\n * @param microphoneConfig A `MicrophoneConfig` object that contains\n * configurations of reading audio data from microphone.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function microphone(microphoneConfig) {\n return MicrophoneIterator.create(microphoneConfig);\n}\n//# sourceMappingURL=readers.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { array, Dataset, zip } from './dataset';\nexport { CSVDataset } from './datasets/csv_dataset';\nexport { TextLineDataset } from './datasets/text_line_dataset';\nexport { csv, func, generator, microphone, webcam } from './readers';\nexport { FileDataSource } from './sources/file_data_source';\nexport { URLDataSource } from './sources/url_data_source';\nexport { version as version_data } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors in the CPU backend.`);\n }\n });\n}\n//# sourceMappingURL=cpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { backend_util, DataStorage, engine, env, kernel_impls, KernelBackend, max, slice_util, TensorBuffer, upcastType, util } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV3Impl = kernel_impls.nonMaxSuppressionV3Impl;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport * as seedrandom from 'seedrandom';\nimport { assertNotComplex } from './cpu_util';\nexport class MathBackendCPU extends KernelBackend {\n constructor() {\n super();\n this.blockSize = 48;\n this.firstUse = true;\n this.data = new DataStorage(this, engine());\n }\n write(values, shape, dtype) {\n if (this.firstUse) {\n this.firstUse = false;\n if (env().get('IS_NODE')) {\n backend_util.warn('\\n============================\\n' +\n 'Hi there \uD83D\uDC4B. Looks like you are running TensorFlow.js in ' +\n 'Node.js. To speed things up dramatically, install our node ' +\n 'backend, which binds to TensorFlow C++, by running ' +\n 'npm i @tensorflow/tfjs-node, ' +\n 'or npm i @tensorflow/tfjs-node-gpu if you have CUDA. ' +\n 'Then call require(\\'@tensorflow/tfjs-node\\'); (-gpu ' +\n 'suffix for CUDA) at the start of your program. ' +\n 'Visit https://github.com/tensorflow/tfjs-node for more details.' +\n '\\n============================');\n }\n }\n const dataId = {};\n this.data.set(dataId, { values, dtype, refCount: 1 });\n return dataId;\n }\n /**\n * Create a data bucket in cpu backend.\n * @param shape Shape of the `TensorInfo`.\n * @param dtype DType of the `TensorInfo`.\n * @param values The value of the `TensorInfo` stored as a flattened array.\n */\n makeTensorInfo(shape, dtype, values) {\n let outId;\n if (dtype === 'string' && values != null && values.length > 0 &&\n util.isString(values[0])) {\n const encodedValues = values.map(d => util.encodeString(d));\n outId = this.write(encodedValues, shape, dtype);\n }\n else {\n outId = this.write(values, shape, dtype);\n }\n return { dataId: outId, shape, dtype };\n }\n /** Increase refCount of a `TensorData`. */\n incRef(dataId) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount++;\n }\n /** Decrease refCount of a `TensorData`. */\n decRef(dataId) {\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n this.data.set(dataId, { values, dtype, refCount: 1 });\n }\n numDataIds() {\n return this.data.numDataIds();\n }\n async read(dataId) {\n return this.readSync(dataId);\n }\n readSync(dataId) {\n const { dtype, complexTensorInfos } = this.data.get(dataId);\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n return backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n return this.data.get(dataId).values;\n }\n bufferSync(t) {\n const data = this.readSync(t.dataId);\n let decodedData = data;\n if (t.dtype === 'string') {\n try {\n // Decode the bytes into string.\n decodedData = data.map(d => util.decodeString(d));\n }\n catch (_a) {\n throw new Error('Failed to decode encoded string bytes into utf-8');\n }\n }\n return tf.buffer(t.shape, t.dtype, decodedData);\n }\n makeOutput(values, shape, dtype) {\n const dataId = this.write(values, shape, dtype);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n disposeData(dataId) {\n if (this.data.has(dataId)) {\n const { complexTensorInfos } = this.data.get(dataId);\n if (complexTensorInfos != null) {\n this.disposeData(complexTensorInfos.real.dataId);\n this.disposeData(complexTensorInfos.imag.dataId);\n }\n this.data.delete(dataId);\n }\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n if (tensorData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n async time(f) {\n const start = util.now();\n f();\n const kernelMs = util.now() - start;\n return { kernelMs };\n }\n memory() {\n return {\n // Unreliable due to automatic gc. The numbers above are cumulative.\n unreliable: true,\n reasons: ['The reported memory is an upper bound. Due to automatic garbage ' +\n 'collection, the true allocated memory may be less.']\n };\n }\n stridedSlice(x, begin, end, strides) {\n assertNotComplex(x, 'stridedSlice');\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tf.tensor([], outShape);\n }\n const buffer = tf.buffer(outShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const loc = buffer.indexToLoc(i);\n const newLoc = new Array(loc.length);\n for (let j = 0; j < newLoc.length; j++) {\n newLoc[j] = loc[j] * strides[j] + begin[j];\n }\n buffer.set(xBuf.get(...newLoc), ...loc);\n }\n return buffer.toTensor();\n }\n diag(x) {\n const xVals = this.readSync(x.dataId);\n const buffer = tf.buffer([x.size, x.size], x.dtype);\n const vals = buffer.values;\n for (let i = 0; i < xVals.length; i++) {\n vals[i * x.size + i] = xVals[i];\n }\n return buffer.toTensor();\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = tf.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n reverse(x, axis) {\n assertNotComplex(x, 'reverse');\n const buffer = tf.buffer(x.shape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const outLoc = buffer.indexToLoc(i);\n const inLoc = outLoc.slice();\n axis.forEach(ax => inLoc[ax] = x.shape[ax] - 1 - inLoc[ax]);\n buffer.set(xBuf.get(...inLoc), ...outLoc);\n }\n return buffer.toTensor();\n }\n neg(x) {\n assertNotComplex(x, 'neg');\n // TODO(lina128): Use mul directly once neg is modularized.\n return tf.mul(tf.scalar(-1), x);\n }\n addN(tensors) {\n assertNotComplex(tensors, 'addN');\n const vals = tensors.map(t => this.readSync(t.dataId));\n const result = tf.buffer(tensors[0].shape, tensors[0].dtype);\n const resultVals = result.values;\n for (let i = 0; i < tensors.length; i++) {\n const currVals = vals[i];\n for (let j = 0; j < resultVals.length; j++) {\n resultVals[j] += currVals[j];\n }\n }\n return result.toTensor();\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(lina128): Use sub directly once softmax is modularized.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = tf.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax\n // kernel modularization.\n return tf.div(b, sumExp);\n }\n pow(a, b) {\n assertNotComplex([a, b], 'pow');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.pow(aValue, bValue));\n }\n floorDiv(a, b) {\n assertNotComplex([a, b], 'floorDiv');\n const op = (a, b) => Math.floor(a / b);\n const outputDtype = 'int32';\n return this.broadcastedBinaryOp(a, b, outputDtype, op);\n }\n sum(x, axes) {\n assertNotComplex(x, 'sum');\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let sum = 0;\n for (let j = 0; j < reduceSize; ++j) {\n sum += aVals[offset + j];\n }\n vals[i] = sum;\n }\n return result;\n }\n prod(x, axes) {\n assertNotComplex(x, 'sum');\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let prod = 1;\n for (let j = 0; j < reduceSize; ++j) {\n prod *= aVals[offset + j];\n }\n vals[i] = prod;\n }\n return result;\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n assertNotComplex(x, 'unsortedSegmentSum');\n const res = [];\n // Reshape the segment id's so that they can be broadcast with\n // x. The new shape should be [segmentIds.shape, 1, ..., 1]\n const numIters = x.rank - segmentIds.rank;\n for (let i = 0; i < numIters; ++i) {\n segmentIds = segmentIds.expandDims(i + 1);\n }\n for (let i = 0; i < numSegments; ++i) {\n const segmentId = tf.scalar(i, 'int32');\n const mask = tf.equal(segmentId, segmentIds).asType('float32');\n const sum = mask.mul(x).sum(0);\n res.push(sum);\n }\n return tf.stack(res);\n }\n argMin(x, axis) {\n assertNotComplex(x, 'argMin');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMin', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n let minIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n minIndex = j;\n }\n }\n vals[i] = minIndex;\n }\n return result;\n }\n argMax(x, axis) {\n assertNotComplex(x, 'argMax');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMax', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n let maxIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n maxIndex = j;\n }\n }\n vals[i] = maxIndex;\n }\n return result;\n }\n cumsum(x, axis, exclusive, reverse) {\n assertNotComplex(x, 'cumsum');\n if (axis !== x.rank - 1) {\n throw new Error(`backend.cumsum in CPU expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(x.shape, resultDtype);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n const finalDim = x.shape[x.rank - 1];\n const indexAdjuster = reverse ?\n (i, j) => i + finalDim - j - 1 :\n (i, j) => i + j;\n for (let i = 0; i < aVals.length; i += finalDim) {\n for (let j = 0; j < finalDim; j++) {\n const idx = indexAdjuster(i, j);\n if (j === 0) {\n vals[idx] = exclusive ? 0 : aVals[idx];\n }\n else {\n const prevIdx = indexAdjuster(i, j - 1);\n vals[idx] = exclusive ? aVals[prevIdx] + vals[prevIdx] :\n aVals[idx] + vals[prevIdx];\n }\n }\n }\n return result;\n }\n equal(a, b) {\n assertNotComplex([a, b], 'equal');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal === bVal) ? 1 : 0;\n });\n }\n notEqual(a, b) {\n assertNotComplex([a, b], 'notEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal !== bVal) ? 1 : 0;\n });\n }\n less(a, b) {\n assertNotComplex([a, b], 'less');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal < bVal) ? 1 : 0;\n });\n }\n lessEqual(a, b) {\n assertNotComplex([a, b], 'lessEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal <= bVal) ? 1 : 0;\n });\n }\n greater(a, b) {\n assertNotComplex([a, b], 'greater');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal > bVal) ? 1 : 0;\n });\n }\n greaterEqual(a, b) {\n assertNotComplex([a, b], 'greaterEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal >= bVal) ? 1 : 0;\n });\n }\n logicalAnd(a, b) {\n assertNotComplex([a, b], 'logicalAnd');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal && bVal;\n });\n }\n logicalOr(a, b) {\n assertNotComplex([a, b], 'logicalOr');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal || bVal;\n });\n }\n select(condition, a, b) {\n assertNotComplex([condition, a, b], 'select');\n const values = this.readSync(condition.dataId);\n const aValues = this.readSync(a.dataId);\n const bValues = this.readSync(b.dataId);\n const result = tf.zeros(a.shape, upcastType(a.dtype, b.dtype));\n const newValues = this.readSync(result.dataId);\n let index = 0;\n const offset = condition.rank === 0 || condition.rank > 1 || a.rank === 1 ?\n 1 :\n util.sizeFromShape(a.shape.slice(1));\n for (let i = 0; i < values.length; i++) {\n for (let j = 0; j < offset; j++) {\n if (values[i] === 1) {\n newValues[index++] = aValues[i];\n }\n else {\n newValues[index++] = bValues[i];\n }\n }\n }\n return result;\n }\n where(condition) {\n assertNotComplex([condition], 'where');\n const condVals = this.readSync(condition.dataId);\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n assertNotComplex(x, 'topk');\n const xVals = this.readSync(x.dataId);\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n assertNotComplex(x, 'min');\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n }\n }\n vals[i] = min;\n }\n return result;\n }\n minimum(a, b) {\n assertNotComplex([a, b], 'minimum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.min(aVal, bVal));\n }\n mod(a, b) {\n assertNotComplex([a, b], 'mod');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const rem = aVal % bVal;\n if ((aVal < 0 && bVal < 0) || (aVal >= 0 && bVal >= 0)) {\n return rem;\n }\n else {\n return (rem + bVal) % bVal;\n }\n });\n }\n maximum(a, b) {\n assertNotComplex([a, b], 'maximum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.max(aVal, bVal));\n }\n all(x, axes) {\n assertNotComplex(x, 'all');\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let all = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n all = all && value;\n }\n vals[i] = all;\n }\n return result;\n }\n any(x, axes) {\n assertNotComplex(x, 'any');\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let anyVal = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n anyVal = anyVal || value;\n }\n vals[i] = anyVal;\n }\n return result;\n }\n squaredDifference(a, b) {\n assertNotComplex([a, b], 'squaredDifference');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const diff = aVal - bVal;\n return diff * diff;\n });\n }\n eluDer(dy, y) {\n assertNotComplex([dy, y], 'eluDer');\n const resultValues = new Float32Array(y.size);\n const values = this.readSync(y.dataId);\n const dyValues = this.readSync(dy.dataId);\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n if (v >= 1) {\n resultValues[i] = dyValues[i];\n }\n else {\n resultValues[i] = dyValues[i] * (v + 1);\n }\n }\n return this.makeOutput(resultValues, y.shape, 'float32');\n }\n atan2(a, b) {\n assertNotComplex([a, b], 'atan2');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.atan2(aValue, bValue));\n }\n tile(x, reps) {\n assertNotComplex(x, 'tile');\n return tile(this.bufferSync(x), reps);\n }\n gather(x, indices, axis) {\n assertNotComplex([x, indices], 'gather');\n const newShape = x.shape.slice();\n const indicesValues = this.readSync(indices.dataId);\n newShape[axis] = indicesValues.length;\n const result = tf.buffer(newShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < result.size; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = newLoc.slice();\n originalLoc[axis] = indicesValues[newLoc[axis]];\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n }\n batchToSpaceND(x, blockShape, crops) {\n assertNotComplex([x], 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return tf.transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n pool3d(x, convInfo, poolType) {\n assertNotComplex(x, 'pool3d');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const xValues = this.readSync(x.dataId);\n const output = tf.buffer(convInfo.outShape, x.dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] *\n convInfo.outShape[3] * convInfo.outShape[4];\n const outputDepthStrides = convInfo.outShape[2] * convInfo.outShape[3] * convInfo.outShape[4];\n const outputRowStrides = convInfo.outShape[3] * convInfo.outShape[4];\n const outputColStrides = convInfo.outShape[4];\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n const outputBatchOffset = batch * outputBatchStrides;\n const inputBatchOffset = batch * x.strides[0];\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n const outputDepthOffset = outputBatchOffset + yDepth * outputDepthStrides;\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n const outputRowOffset = outputDepthOffset + yRow * outputRowStrides;\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n const outputColOffset = outputRowOffset + yCol * outputColStrides;\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const xDepthOffset = inputBatchOffset + xDepth * x.strides[1];\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const xRowOffset = xDepthOffset + xRow * x.strides[2];\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const xColOffset = xRowOffset + xCol * x.strides[3];\n const pixel = xValues[xColOffset + channel];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputColOffset + channel;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n avgPool3d(x, convInfo) {\n assertNotComplex(x, 'avgPool3d');\n return this.pool3d(x, convInfo, 'avg').toFloat();\n }\n avgPool3dBackprop(dy, x, convInfo) {\n assertNotComplex([dy, x], 'avgPool3dBackprop');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins.\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel;\n }\n }\n }\n dx.set(dotProd * avgMultiplier, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n maxPool3d(x, convInfo) {\n assertNotComplex(x, 'maxPool3d');\n return this.pool3d(x, convInfo, 'max').toFloat();\n }\n maxPool3dPositions(x, convInfo) {\n const maxPositions = tf.buffer(convInfo.outShape, 'int32');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = this.bufferSync(x);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const wDepth = xDepth - xDepthCorner;\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const wRow = xRow - xRowCorner;\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const wCol = xCol - xColCorner;\n const pixel = xBuf.get(batch, xDepth, xRow, xCol, channel);\n if (pixel >= maxValue) {\n maxValue = pixel;\n maxPosition = wDepth * effectiveFilterHeight *\n effectiveFilterWidth +\n wRow * effectiveFilterHeight + wCol;\n }\n }\n }\n }\n maxPositions.set(maxPosition, batch, yDepth, yRow, yCol, channel);\n }\n }\n }\n }\n }\n return maxPositions.toTensor();\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n assertNotComplex([x, y], 'maxPool3dBackprop');\n const maxPositions = this.maxPool3dPositions(x, convInfo);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const maxPosBuf = this.bufferSync(maxPositions);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const maxPos = effectiveFilterDepth *\n effectiveFilterHeight * effectiveFilterWidth -\n 1 -\n maxPosBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n const curPos = wDepth * effectiveFilterHeight * effectiveFilterWidth +\n wRow * effectiveFilterWidth + wCol;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel * mask;\n }\n }\n }\n dx.set(dotProd, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeBilinear');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(util.sizeFromShape([batch, newHeight, newWidth, numChannels]));\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n let outputIdx = 0;\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n for (let b = 0; b < batch; b++) {\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceRowFloor = Math.floor(sourceFracRow);\n const rowFrac = sourceFracRow - sourceRowFloor;\n const sourceRowCeil = Math.min(oldHeight - 1, Math.ceil(sourceFracRow));\n const topRowOffset = b * x.strides[0] + sourceRowFloor * x.strides[1];\n const botRowOffset = b * x.strides[0] + sourceRowCeil * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceColFloor = Math.floor(sourceFracCol);\n const colFrac = sourceFracCol - sourceColFloor;\n const sourceColCeil = Math.min(oldWidth - 1, Math.ceil(sourceFracCol));\n const topLeftOffest = topRowOffset + sourceColFloor * x.strides[2];\n const botLeftOffset = botRowOffset + sourceColFloor * x.strides[2];\n const topRightOffset = topRowOffset + sourceColCeil * x.strides[2];\n const botRightOffest = botRowOffset + sourceColCeil * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const topLeft = xValues[topLeftOffest + d];\n const bottomLeft = xValues[botLeftOffset + d];\n const topRight = xValues[topRightOffset + d];\n const bottomRight = xValues[botRightOffest + d];\n const top = topLeft + (topRight - topLeft) * colFrac;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * colFrac;\n const newValue = top + (bottom - top) * rowFrac;\n result[outputIdx++] = newValue;\n }\n }\n }\n }\n return tf.tensor(result, [batch, newHeight, newWidth, numChannels]);\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeBilinearBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass and add the\n // corresponding coefficient from dy to the gradient (with some\n // interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/3039375c86a5bbc9610c7725dcaa95d635f87ba2/tensorflow/core/kernels/resize_bilinear_op.cc#L275\n const dyValues = this.readSync(dy.dataId);\n let offset = 0;\n for (let b = 0; b < batch; b++) {\n const bOffset = b * x.strides[0];\n for (let r = 0; r < yHeight; r++) {\n const dxR = r * heightScale;\n const topDxRIndex = Math.floor(dxR);\n const bottomDxRIndex = Math.min(Math.ceil(dxR), xHeight - 1);\n const topDxROffset = bOffset + topDxRIndex * x.strides[1];\n const bottomDxROffset = bOffset + bottomDxRIndex * x.strides[1];\n const dxRLerp = dxR - topDxRIndex;\n const inverseDxRLerp = 1.0 - dxRLerp;\n for (let c = 0; c < yWidth; c++) {\n const dxC = c * widthScale;\n const leftDxCIndex = Math.floor(dxC);\n const rightDxCIndex = Math.min(Math.ceil(dxC), xWidth - 1);\n const dxCLerp = dxC - leftDxCIndex;\n const inverseDxCLerp = 1.0 - dxCLerp;\n const topLeftRCOffset = topDxROffset + leftDxCIndex * x.strides[2];\n const topRightRCOffset = topDxROffset + rightDxCIndex * x.strides[2];\n const bottomLeftRCOffset = bottomDxROffset + leftDxCIndex * x.strides[2];\n const bottomRightRCOffset = bottomDxROffset + rightDxCIndex * x.strides[2];\n const inverseDxRLerpTimesInverseDxCLerp = inverseDxRLerp * inverseDxCLerp;\n const inverseDxRLerpTimesDxCLerp = inverseDxRLerp * dxCLerp;\n const dxRLerpTimesInverseDxCLerp = dxRLerp * inverseDxCLerp;\n const dxRLerpTimesDxCLerp = dxRLerp * dxCLerp;\n for (let d = 0; d < depth; d++) {\n const dyVal = dyValues[offset++];\n output[topLeftRCOffset + d] +=\n dyVal * inverseDxRLerpTimesInverseDxCLerp;\n output[topRightRCOffset + d] += dyVal * inverseDxRLerpTimesDxCLerp;\n output[bottomLeftRCOffset + d] +=\n dyVal * dxRLerpTimesInverseDxCLerp;\n output[bottomRightRCOffset + d] += dyVal * dxRLerpTimesDxCLerp;\n }\n }\n }\n }\n return tf.tensor4d(output, [batch, xWidth, xHeight, depth], x.dtype);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeNearestNeighbor');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const output = new Float32Array(batch * newHeight * newWidth * numChannels);\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n let outputOffset = 0;\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceNearestRow = Math.min(oldHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n const rowOffset = batchOffset + sourceNearestRow * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceNearestCol = Math.min(oldWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n const colOffset = rowOffset + sourceNearestCol * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const newVal = xValues[colOffset + d];\n output[outputOffset++] = newVal;\n }\n }\n }\n }\n return tf.tensor(output, [batch, newHeight, newWidth, numChannels], x.dtype);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeNearestNeighborBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n const dyValues = this.readSync(dy.dataId);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n // Loop over the output space.\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < xHeight; r++) {\n const rowOffset = batchOffset + r * x.strides[1];\n // Compute bounds for where in dy we will look\n const startRLerp = Math.floor(r * invHeightScale);\n const startDyR = Math.floor(startRLerp - (winHeight / 2));\n for (let c = 0; c < xWidth; c++) {\n const colOffset = rowOffset + c * x.strides[2];\n // Compute bounds for where in dy we will look\n const startCLerp = Math.floor(c * invWidthScale);\n const startDyC = Math.floor(startCLerp - (winWidth / 2));\n for (let d = 0; d < depth; d++) {\n let accum = 0;\n // loop over dy\n for (let dyRIndex = 0; dyRIndex < winHeight; dyRIndex++) {\n const dyR = dyRIndex + startDyR;\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= yHeight) {\n continue;\n }\n const dyROffset = batchOffset + dyR * dy.strides[1];\n const sourceFracRow = dyR * heightScale;\n const sourceNearestRow = Math.min(xHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n if (r !== sourceNearestRow) {\n continue;\n }\n for (let dyCIndex = 0; dyCIndex < winWidth; dyCIndex++) {\n const dyC = dyCIndex + startDyC;\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= yWidth) {\n continue;\n }\n const dyCOffset = dyROffset + dyC * dy.strides[2];\n const sourceFracCol = dyC * widthScale;\n const sourceNearestCol = Math.min(xWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n if (c === sourceNearestCol) {\n accum += dyValues[dyCOffset + d];\n }\n }\n }\n output[colOffset + d] = accum;\n }\n }\n }\n }\n return tf.tensor4d(output, x.shape, x.dtype);\n }\n localResponseNormalization4D(x, depthRadius, bias, alpha, beta) {\n assertNotComplex(x, 'localResponseNormalization4D');\n const channels = x.shape[3];\n const maxD = channels - 1;\n const xValues = this.readSync(x.dataId);\n const size = x.size;\n const result = new Float32Array(size);\n function sumAcrossChannels(offset) {\n const currentChannel = offset % channels;\n let beginSumOffset = offset - currentChannel + Math.max(0, currentChannel - depthRadius);\n const endSumOffset = offset - currentChannel +\n Math.min(currentChannel + depthRadius, maxD);\n let sum = 0.0;\n for (; beginSumOffset <= endSumOffset; beginSumOffset++) {\n const z = xValues[beginSumOffset];\n sum += z * z;\n }\n return sum;\n }\n for (let offset = 0; offset < size; offset++) {\n const sum = sumAcrossChannels(offset);\n const val = xValues[offset] * Math.pow(bias + alpha * sum, -beta);\n result[offset] = val;\n }\n return tf.tensor4d(result, x.shape);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n assertNotComplex(dy, 'LRNGrad');\n const channels = dy.shape[3];\n const dyValues = this.readSync(dy.dataId);\n const inputImageValues = this.readSync(inputImage.dataId);\n const outputImageValues = this.readSync(outputImage.dataId);\n const result = new Float32Array(dy.size);\n const size = dy.size;\n for (let offset = 0; offset < size; offset++) {\n const currentChannel = offset % channels;\n const depthBegin = (offset - currentChannel) + Math.max(0, currentChannel - depthRadius);\n const depthEnd = (offset - currentChannel) +\n Math.min(channels, currentChannel + depthRadius + 1);\n let norm = 0;\n for (let k = depthBegin; k < depthEnd; k++) {\n norm += Math.pow(inputImageValues[k], 2);\n }\n norm = alpha * norm + bias;\n for (let k = depthBegin; k < depthEnd; k++) {\n let dyi = -2 * alpha * beta * inputImageValues[k] *\n outputImageValues[offset] / norm;\n if (offset === k) {\n dyi += Math.pow(norm, -beta);\n }\n dyi *= dyValues[offset];\n result[k] += dyi;\n }\n }\n return tf.tensor4d(result, dy.shape);\n }\n multinomial(logits, normalized, numSamples, seed) {\n assertNotComplex(logits, 'multinomial');\n const probabilities = normalized ? logits : tf.softmax(logits);\n const batchSize = probabilities.shape[0];\n const numEvents = probabilities.shape[1];\n const res = tf.zeros([batchSize, numSamples], 'int32');\n const resVals = this.readSync(res.dataId);\n const probVals = this.readSync(probabilities.dataId);\n for (let b = 0; b < batchSize; ++b) {\n const offset = b * numEvents;\n // The cdf won't include the last event. It will be implicit if no other\n // event happened.\n const cdf = new Float32Array(numEvents - 1);\n cdf[0] = probVals[offset];\n for (let event = 1; event < cdf.length; ++event) {\n cdf[event] = cdf[event - 1] + probVals[offset + event];\n }\n const random = seedrandom.alea(seed.toString());\n const outOffset = b * numSamples;\n for (let sampleId = 0; sampleId < numSamples; ++sampleId) {\n const r = random();\n // Assume last event happened by default.\n resVals[outOffset + sampleId] = cdf.length;\n for (let event = 0; event < cdf.length; event++) {\n if (r < cdf[event]) {\n resVals[outOffset + sampleId] = event;\n break;\n }\n }\n }\n }\n return res;\n }\n oneHot(indices, depth, onValue, offValue) {\n assertNotComplex(indices, 'oneHot');\n const res = new Float32Array(indices.size * depth);\n res.fill(offValue);\n const indicesVal = this.readSync(indices.dataId);\n for (let event = 0; event < indices.size; ++event) {\n if (indicesVal[event] >= 0 && indicesVal[event] < depth) {\n res[event * depth + indicesVal[event]] = onValue;\n }\n }\n return tf.tensor2d(res, [indices.size, depth], 'int32');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n assertNotComplex(boxes, 'nonMaxSuppression');\n const boxesVals = this.readSync(boxes.dataId);\n const scoresVals = this.readSync(scores.dataId);\n return nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(dataFormat === 'NHWC', () => `Only NHWC dataFormat supported on CPU for depthToSpace. Got ${dataFormat}`);\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = x.shape[1];\n const inputWidth = x.shape[2];\n const inputDepth = x.shape[3];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(batchSize * outputHeight * outputWidth * outputDepth);\n let outputIdx = 0;\n for (let b = 0; b < batchSize; ++b) {\n for (let h = 0; h < outputHeight; ++h) {\n const inH = Math.floor(h / blockSize);\n const offsetH = (h % blockSize);\n for (let w = 0; w < outputWidth; ++w) {\n const inW = Math.floor(w / blockSize);\n const offsetW = (w % blockSize);\n const offsetD = (offsetH * blockSize + offsetW) * outputDepth;\n for (let d = 0; d < outputDepth; ++d) {\n const inD = d + offsetD;\n const inputIdx = inD + inputDepth * (inW + inputWidth * (inH + inputHeight * b));\n result[outputIdx++] = xValues[inputIdx];\n }\n }\n }\n }\n return tf.tensor4d(result, [batchSize, outputHeight, outputWidth, outputDepth]);\n }\n broadcastedBinaryOp(a, b, dtype, op) {\n const newShape = backend_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const result = tf.buffer(newShape, dtype);\n const aVals = this.readSync(a.dataId);\n const bVals = this.readSync(b.dataId);\n const aBroadcastDims = backend_util.getBroadcastDims(a.shape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(b.shape, newShape);\n const resVals = result.values;\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resVals.length; ++i) {\n resVals[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n const aBuf = this.bufferSync(a);\n const bBuf = this.bufferSync(b);\n for (let i = 0; i < resVals.length; ++i) {\n const loc = result.indexToLoc(i);\n const aLoc = loc.slice(-a.rank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = aBuf.locToIndex(aLoc);\n const bLoc = loc.slice(-b.rank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = bBuf.locToIndex(bLoc);\n resVals[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return result.toTensor();\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n dispose() { }\n floatPrecision() {\n return 32;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return super.epsilon();\n }\n cropAndResize(images, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const [batch, imageHeight, imageWidth, numChannels] = images.shape;\n const numBoxes = boxes.shape[0];\n const [cropHeight, cropWidth] = cropSize;\n const output = tf.buffer([numBoxes, cropHeight, cropWidth, numChannels], 'float32');\n const boxVals = this.readSync(boxes.dataId);\n const boxIndVals = this.readSync(boxIndex.dataId);\n const imageVals = this.readSync(images.dataId);\n const inStride = images.strides; // to calculate flat indexes into image\n const outStride = output.strides; // to calculate flat indexes into output\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op.cc\n for (let b = 0; b < numBoxes; b++) {\n const startInd = b * 4;\n const y1 = boxVals[startInd];\n const x1 = boxVals[startInd + 1];\n const y2 = boxVals[startInd + 2];\n const x2 = boxVals[startInd + 3];\n const bInd = boxIndVals[b];\n if (bInd >= batch) {\n continue;\n }\n const heightScale = (cropHeight > 1) ?\n (y2 - y1) * (imageHeight - 1) / (cropHeight - 1) :\n 0;\n const widthScale = (cropWidth > 1) ? (x2 - x1) * (imageWidth - 1) / (cropWidth - 1) : 0;\n for (let y = 0; y < cropHeight; y++) {\n const yInd = (cropHeight > 1) ?\n y1 * (imageHeight - 1) + y * (heightScale) :\n 0.5 * (y1 + y2) * (imageHeight - 1);\n if (yInd < 0 || yInd > imageHeight - 1) {\n for (let x = 0; x < cropWidth; x++) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n }\n continue;\n }\n if (method === 'bilinear') {\n const topInd = Math.floor(yInd);\n const bottomInd = Math.ceil(yInd);\n const yLerp = yInd - topInd;\n for (let x = 0; x < cropWidth; x++) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const leftInd = Math.floor(xInd);\n const rightInd = Math.ceil(xInd);\n const xLerp = xInd - leftInd;\n for (let c = 0; c < numChannels; c++) {\n let ind = c + leftInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topRight = imageVals[ind];\n ind = c + leftInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomRight = imageVals[ind];\n const top = topLeft + (topRight - topLeft) * xLerp;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * xLerp;\n ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = top + ((bottom - top) * yLerp);\n }\n }\n }\n else { // method == \"nearest\"\n for (let x = 0; x < cropWidth; ++x) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const closestX = Math.round(xInd);\n const closestY = Math.round(yInd);\n for (let c = 0; c < numChannels; c++) {\n const inInd = c + closestX * inStride[2] +\n closestY * inStride[1] + bInd * inStride[0];\n const outInd = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[outInd] = imageVals[inInd];\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n return this.scatter(sparseIndices, sparseValues, outputShape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n if (numSlices === 0) {\n return tf.tensor([], resultShape, x.dtype);\n }\n const buffer = new TensorBuffer([numSlices, sliceSize], x.dtype);\n const indicesData = this.readSync(indices.dataId);\n const xData = this.readSync(x.dataId);\n for (let i = 0; i < numSlices; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n flattenIndex += dim * strides[j];\n index.push(dim);\n }\n if (flattenIndex < 0 || flattenIndex >= x.size / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${x.shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n buffer.values[i * sliceSize + k] = xData[flattenIndex * sliceSize + k];\n }\n }\n return buffer.toTensor().reshape(resultShape);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const defaultValue = tf.scalar(0);\n const sumDupeIndices = true;\n return this.scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported for string tensors');\n }\n else {\n // TODO(lina128): Use fill kernel directly once this kernel is\n // modularized.\n return tf.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n const values = util.getArrayFromDType(x.dtype, util.sizeFromShape(x.shape));\n return this.makeOutput(values, x.shape, x.dtype);\n }\n linspace(start, stop, num) {\n return backend_util.linspaceImpl(start, stop, num);\n }\n scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices) {\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const indicesData = this.readSync(indices.dataId);\n const updatesData = this.readSync(updates.dataId);\n if (outputSize === 0) {\n return tf.tensor([], shape, updates.dtype);\n }\n const buffer = new TensorBuffer(flattenShape, updates.dtype);\n buffer.values.fill(this.readSync(defaultValue.dataId)[0]);\n for (let i = 0; i < numUpdates; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n index.push(dim);\n flattenIndex += dim * strides[j];\n }\n if (flattenIndex < 0 || flattenIndex >= outputSize / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n if (sumDupeIndices) {\n buffer.values[flattenIndex * sliceSize + k] +=\n updatesData[i * sliceSize + k];\n }\n else {\n buffer.values[flattenIndex * sliceSize + k] = updates.rank === 0 ?\n updatesData[0] :\n updatesData[i * sliceSize + k];\n }\n }\n }\n return buffer.toTensor().reshape(shape);\n }\n}\n//# sourceMappingURL=backend_cpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs, util } from '@tensorflow/tfjs-core';\nexport function simpleAbsImpl(vals) {\n const resultValues = new Float32Array(vals.length);\n for (let i = 0; i < vals.length; ++i) {\n resultValues[i] = Math.abs(vals[i]);\n }\n return resultValues;\n}\nexport const abs = (args) => {\n const { x } = args.inputs;\n const cpuBackend = args.backend;\n let resultValues = new Float32Array(util.sizeFromShape(x.shape));\n if (x.dtype !== 'complex64') {\n const values = cpuBackend.data.get(x.dataId).values;\n resultValues = simpleAbsImpl(values);\n }\n else {\n const complexVals = cpuBackend.data.get(x.dataId);\n const real = complexVals.complexTensorInfos.real;\n const imag = complexVals.complexTensorInfos.imag;\n const realVals = cpuBackend.data.get(real.dataId).values;\n const imagVals = cpuBackend.data.get(imag.dataId).values;\n for (let i = 0; i < realVals.length; i++) {\n const real = realVals[i];\n const imag = imagVals[i];\n resultValues[i] = Math.hypot(real, imag);\n }\n }\n return cpuBackend.makeOutput(resultValues, x.shape, 'float32');\n};\nexport const absConfig = {\n kernelName: Abs,\n backendName: 'cpu',\n kernelFunc: abs,\n};\n//# sourceMappingURL=Abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for binary ops. Supports broadcast.\n */\nexport function createSimpleBinaryKernelImpl(op) {\n return (aShape, bShape, aVals, bVals, dtype) => {\n const newShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultRank = newShape.length;\n const resultStrides = util.computeStrides(newShape);\n const resultSize = util.sizeFromShape(newShape);\n const result = util.getTypedArrayFromDType(dtype, resultSize);\n const aRank = aShape.length;\n const bRank = bShape.length;\n const aStrides = util.computeStrides(aShape);\n const bStrides = util.computeStrides(bShape);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, newShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < result.length; ++i) {\n result[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n for (let i = 0; i < result.length; ++i) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n result[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return [result, newShape];\n };\n}\n//# sourceMappingURL=binary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const realVals = backend.data.get(real.dataId).values;\n const imagVals = backend.data.get(imag.dataId).values;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.data.get(complexInfo.dataId);\n // The complex tensor owns the underlying real and imag tensorInfos, only the\n // complex tensor tracks refCount, when complexData is disposed the\n // underlying tensorData will be disposed.\n complex.complexTensorInfos = {\n real: backend.makeTensorInfo(real.shape, 'float32', realVals),\n imag: backend.makeTensorInfo(imag.shape, 'float32', imagVals)\n };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'cpu',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'cpu',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const real = backend.data.get(input.dataId).complexTensorInfos.real;\n const realVal = backend.data.get(real.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the real value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(real.shape, real.dtype, realVal);\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'cpu',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { real } from './Real';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(lina128): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n const values = backend.data.get(x.dataId).values;\n const resultValues = Int32Array.from(values);\n return backend.makeTensorInfo(x.shape, 'int32', resultValues);\n }\n if (dtype === 'bool') {\n // This is essentially the result of notEqual(x, 0). We avoid using\n // kernel notEqual to avoid circular dependency, i.e. binary_utils ->\n // cast -> notEqual -> binary_utils.\n const xVals = backend.data.get(x.dataId).values;\n const zero = util.toTypedArray([0], x.dtype);\n const [resultData, resultShape] = createSimpleBinaryKernelImpl((a, b) => (a !== b) ? 1 : 0)(x.shape, [], xVals, zero, 'bool');\n return backend.makeTensorInfo(resultShape, 'bool', resultData);\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'cpu',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { cast } from '../kernels/Cast';\nimport { complex } from '../kernels/Complex';\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param name Kernel name.\n * @param binaryKernelImpl A `SimpleBinaryKernelImpl` for the kernel.\n * @param binaryKernelComplexImpl Optional. If exists, represents a\n * `ComplexBinaryKernelImpl` for the kernel, will be used when input dtype\n * is `complex64`.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc(name, simpleImpl, complexImpl, dtype) {\n if (complexImpl == null) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n assertNotComplex([a, b], name);\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n };\n }\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n if (a.dtype === 'complex64' || b.dtype === 'complex64') {\n const $aComplex = cast({ inputs: { x: a }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $aComplexVals = cpuBackend.data.get($aComplex.dataId);\n const aReal = $aComplexVals.complexTensorInfos.real;\n const aImag = $aComplexVals.complexTensorInfos.imag;\n const aRealVals = cpuBackend.data.get(aReal.dataId).values;\n const aImagVals = cpuBackend.data.get(aImag.dataId).values;\n const $bComplex = cast({ inputs: { x: b }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $bComplexVals = cpuBackend.data.get($bComplex.dataId);\n const bReal = $bComplexVals.complexTensorInfos.real;\n const bImag = $bComplexVals.complexTensorInfos.imag;\n const bRealVals = cpuBackend.data.get(bReal.dataId).values;\n const bImagVals = cpuBackend.data.get(bImag.dataId).values;\n const [resultRealData, resultImagData, resultShape] = complexImpl(a.shape, b.shape, aRealVals, aImagVals, bRealVals, bImagVals);\n const resultReal = cpuBackend.makeTensorInfo(resultShape, 'float32', resultRealData);\n const resultImag = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImagData);\n const result = complex({ inputs: { real: resultReal, imag: resultImag }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($aComplex);\n cpuBackend.disposeIntermediateTensorInfo($bComplex);\n cpuBackend.disposeIntermediateTensorInfo(resultReal);\n cpuBackend.disposeIntermediateTensorInfo(resultImag);\n return result;\n }\n else {\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n }\n };\n}\n/**\n * Template that creates the complex type implementation for binary ops.\n * Supports broadcast.\n */\nexport function createComplexBinaryKernelImpl(op) {\n return (aShape, bShape, aRealVals, aImagVals, bRealVals, bImagVals) => {\n const resultShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultSize = util.sizeFromShape(resultShape);\n const resultRank = resultShape.length;\n const resultStrides = util.computeStrides(resultShape);\n const resultRealVals = util.getTypedArrayFromDType('float32', resultSize);\n const resultImagVals = util.getTypedArrayFromDType('float32', resultSize);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, resultShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, resultShape);\n const aVals = backend_util.mergeRealAndImagArrays(aRealVals, aImagVals);\n const bVals = backend_util.mergeRealAndImagArrays(bRealVals, bImagVals);\n const aRank = aShape.length;\n const aStrides = util.computeStrides(aShape);\n const bRank = bShape.length;\n const bStrides = util.computeStrides(bShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resultRealVals.length; i++) {\n const aIdx = i % aVals.length;\n const bIdx = i % bVals.length;\n const result = op(aVals[aIdx * 2], aVals[aIdx * 2 + 1], bVals[bIdx * 2], bVals[bIdx * 2 + 1]);\n resultRealVals[i] = result.real;\n resultImagVals[i] = result.imag;\n }\n }\n else {\n for (let i = 0; i < resultRealVals.length; i++) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n const opResult = op(aVals[aIndex * 2], aVals[aIndex * 2 + 1], bVals[bIndex * 2], bVals[bIndex * 2 + 1]);\n resultRealVals[i] = opResult.real;\n resultImagVals[i] = opResult.imag;\n }\n }\n return [resultRealVals, resultImagVals, resultShape];\n };\n}\n//# sourceMappingURL=kernel_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const addImpl = createSimpleBinaryKernelImpl(((a, b) => a + b));\nexport const addComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal + bReal, imag: aImag + bImag };\n}));\nexport const add = binaryKernelFunc(Add, addImpl, addComplexImpl);\nexport const addConfig = {\n kernelName: Add,\n backendName: 'cpu',\n kernelFunc: add\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for unary op.\n */\nexport function createSimpleUnaryImpl(op) {\n return (values, dtype, attrs) => {\n const newValues = util.getTypedArrayFromDType(dtype, values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return newValues;\n };\n}\n//# sourceMappingURL=unary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param name Kernel name.\n * @param op A `SimpleUnaryOperation` for the kernel.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFunc(name, op, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const $dtype = dtype || x.dtype;\n const newValues = util.getArrayFromDType($dtype, xSize);\n for (let i = 0; i < xSize; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n/**\n * Template that creates a `KernelFunc` for unary ops from the given\n * `SimpleUnaryImpl`..\n * @param name Kernel name.\n * @param unaryImpl A `SimpleUnaryImpl` that implements the op.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFuncFromImpl(name, unaryImpl, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const $dtype = dtype || x.dtype;\n const newValues = unaryImpl(values, $dtype, attrs);\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n//# sourceMappingURL=unary_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const ceilImpl = createSimpleUnaryImpl((xi) => Math.ceil(xi));\nexport const ceil = unaryKernelFuncFromImpl(Ceil, ceilImpl);\nexport const ceilConfig = {\n kernelName: Ceil,\n backendName: 'cpu',\n kernelFunc: ceil,\n};\n//# sourceMappingURL=Ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expImpl = createSimpleUnaryImpl((xi) => Math.exp(xi));\nexport const exp = unaryKernelFuncFromImpl(Exp, expImpl);\nexport const expConfig = {\n kernelName: Exp,\n backendName: 'cpu',\n kernelFunc: exp,\n};\n//# sourceMappingURL=Exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expm1Impl = createSimpleUnaryImpl((xi) => Math.expm1(xi));\nexport const expm1 = unaryKernelFuncFromImpl(Expm1, expm1Impl);\nexport const expm1Config = {\n kernelName: Expm1,\n backendName: 'cpu',\n kernelFunc: expm1,\n};\n//# sourceMappingURL=Expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const floorImpl = createSimpleUnaryImpl((xi) => Math.floor(xi));\nexport const floor = unaryKernelFuncFromImpl(Floor, floorImpl);\nexport const floorConfig = {\n kernelName: Floor,\n backendName: 'cpu',\n kernelFunc: floor,\n};\n//# sourceMappingURL=Floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const logImpl = createSimpleUnaryImpl((xi) => Math.log(xi));\nexport const log = unaryKernelFuncFromImpl(Log, logImpl);\nexport const logConfig = {\n kernelName: Log,\n backendName: 'cpu',\n kernelFunc: log,\n};\n//# sourceMappingURL=Log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function maxImpl(aVals, reduceSize, outShape, dtype) {\n const vals = util.getTypedArrayFromDType(dtype, util.sizeFromShape(outShape));\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n }\n }\n vals[i] = max;\n }\n return vals;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const multiplyImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue * bValue));\nexport const multiplyComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return {\n real: aReal * bReal - aImag * bImag,\n imag: aReal * bImag + aImag * bReal\n };\n}));\nexport const multiply = binaryKernelFunc(Multiply, multiplyImpl, multiplyComplexImpl);\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'cpu',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const notEqualImpl = createSimpleBinaryKernelImpl(((a, b) => (a !== b) ? 1 : 0));\nexport const notEqual = binaryKernelFunc(NotEqual, notEqualImpl, null /* complexOp */, 'bool');\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'cpu',\n kernelFunc: notEqual\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const rsqrtImpl = createSimpleUnaryImpl((xi) => 1 / Math.sqrt(xi));\nexport const rsqrt = unaryKernelFuncFromImpl(Rsqrt, rsqrtImpl);\nexport const rsqrtConfig = {\n kernelName: Rsqrt,\n backendName: 'cpu',\n kernelFunc: rsqrt,\n};\n//# sourceMappingURL=Rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice, slice_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function sliceImpl(vals, begin, size, shape, dtype) {\n const isContinous = slice_util.isSliceContinous(shape, begin, size);\n const length = util.sizeFromShape(size);\n const xStrides = util.computeStrides(shape);\n if (isContinous) {\n const flatOffset = slice_util.computeFlatOffset(begin, xStrides);\n return vals.subarray(flatOffset, flatOffset + length);\n }\n const outVals = util.getTypedArrayFromDType(dtype, length);\n for (let i = 0; i < length; ++i) {\n const rank = size.length;\n const strides = util.computeStrides(size);\n const loc = util.indexToLoc(i, rank, strides);\n const xLoc = loc.map((idx, j) => idx + begin[j]);\n const xIndex = util.locToIndex(xLoc, shape.length, xStrides);\n outVals[i] = vals[xIndex];\n }\n return outVals;\n}\nexport function slice(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { begin, size } = attrs;\n assertNotComplex(x, 'slice');\n const [$begin, $size] = slice_util.parseSliceParams(x, begin, size);\n slice_util.assertParamsValid(x, $begin, $size);\n const vals = backend.data.get(x.dataId).values;\n const outVals = sliceImpl(vals, $begin, $size, x.shape, x.dtype);\n return backend.makeTensorInfo($size, x.dtype, outVals);\n}\nexport const sliceConfig = {\n kernelName: Slice,\n backendName: 'cpu',\n kernelFunc: slice\n};\n//# sourceMappingURL=Slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const squaredDifferenceImpl = createSimpleBinaryKernelImpl(((a, b) => {\n const diff = a - b;\n return diff * diff;\n}));\nexport const squaredDifference = binaryKernelFunc(SquaredDifference, squaredDifferenceImpl);\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'cpu',\n kernelFunc: squaredDifference\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const subImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue - bValue));\nexport const subComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal - bReal, imag: aImag - bImag };\n}));\nexport const sub = binaryKernelFunc(Sub, subImpl, subComplexImpl);\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'cpu',\n kernelFunc: sub\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function transposeImpl(xVals, xShape, dtype, perm, newShape) {\n const xRank = xShape.length;\n const xSize = util.sizeFromShape(xShape);\n const xStrides = util.computeStrides(xShape);\n const newStrides = util.computeStrides(newShape);\n const result = util.getTypedArrayFromDType(dtype, util.sizeFromShape(newShape));\n for (let i = 0; i < xSize; ++i) {\n const loc = util.indexToLoc(i, xRank, xStrides);\n // Permute location.\n const newLoc = new Array(loc.length);\n for (let i = 0; i < newLoc.length; i++) {\n newLoc[i] = loc[perm[i]];\n }\n const newIndex = util.locToIndex(newLoc, xRank, newStrides);\n result[newIndex] = xVals[i];\n }\n return result;\n}\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer, util } from '@tensorflow/tfjs-core';\nexport function uniqueImpl(values, axis, shape, dtype) {\n // Normalize and validate axis.\n const $axis = util.parseAxisParam(axis, shape)[0];\n // Calculate the new shape that is suitable for extracting data along the\n // given axis.\n //\n // The rank is 3.\n // The size of the 1st dimension is the size of all the axes < the given axis.\n // The size of the 2nd dimension is the same as the size of the given axis.\n // The size of the 3rd dimension is the size of all the axes > the given axis.\n //\n // For example, for a 4D tensor with shape=[2, 3, 5, 4] and axis=2, the\n // newShape would be: [2*3, 5, 4].\n //\n // Note that this is not the final output shape. This will be the shape for an\n // intermediate TensorBuffer (see inputBuffer below) to allow us to extract\n // values along the given axis. To demonstrate how it works, consider the\n // following example:\n //\n // Input: a 3D tensor, with shape [1, 2, 3]\n // [\n // [\n // [1,2,3],\n // [4,5,6]\n // ]\n // ]\n // Axis: 2 (the last axis).\n // Along axis 2, we expect to extract 3 tensors: [1,4], [2,5], [3,6].\n //\n // For this example, newShape would be: [2, 3, 1], where 2 is calculated from\n // 1*2. The re-shaped data would look like:\n //\n // [\n // [\n // [1], [2], [3]\n // ],\n // [\n // [4], [5], [6]\n // ]\n // ]\n //\n // Then, we can construct a 3-level nested loop by the following dimension\n // order to extract the values along the axis (dimension1):\n // i: dimension1 // 0,1,2 (newShape[1])\n // m: dimension0 // 0,1 (newShape[0])\n // n: dimension2 // 0 (newShape[2])\n //\n // m, i, n\n // ---------\n // Iteration 0: data at [0, 0, 0] => \"1\"\n // Iteration 1: data at [1, 0, 0] => \"4\"\n // We got [1,4].\n // Iteration 2: data at [0, 1, 0] => \"2\"\n // Iteration 3: data at [1, 1, 0] => \"5\"\n // We got [2,5].\n // Iteration 4: data at [0, 2, 0] => \"3\"\n // Iteration 5: data at [1, 2, 0] => \"6\"\n // We got [3,6].\n const newShape = [1, shape[0], 1];\n for (let i = 0; i < $axis; i++) {\n newShape[0] *= shape[i];\n }\n newShape[1] = shape[$axis];\n for (let i = $axis + 1; i < shape.length; i++) {\n newShape[2] *= shape[i];\n }\n // A map from unique elements (their string representations) to their values\n // in \"indices\" (below).\n const uniqueElements = {};\n // The indices of each unique element in the original tensor along the given\n // axis. It is 1D and has the same size as the given axis.\n const indices = new Int32Array(shape[$axis]);\n // Create a buffer so we can easily extract value at a given location.\n const inputBuffer = new TensorBuffer(newShape, dtype, values);\n // The indices along the given axis that have unique elements. This is a\n // de-duped version of \"indices\" above.\n const uniqueIndices = [];\n const is1DTensor = newShape[0] === 1 && newShape[2] === 1;\n for (let i = 0; i < shape[$axis]; i++) {\n // Extract values along the axis.\n let element;\n if (is1DTensor) {\n // Fast path for 1D tensor input.\n element = values[i].toString();\n }\n else {\n const axisValues = [];\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n axisValues.push(inputBuffer.get(m, i, n));\n }\n }\n element = axisValues.join(',');\n }\n // Dedup and update various indices.\n if (uniqueElements[element] !== undefined) {\n indices[i] = uniqueElements[element];\n }\n else {\n const uniqueIndex = Object.keys(uniqueElements).length;\n uniqueElements[element] = uniqueIndex;\n indices[i] = uniqueIndex;\n uniqueIndices.push(i);\n }\n }\n // Now we know where each of the unique elements are located along the axis\n // (uniqueIndices). Extract them from input buffer and store them in the\n // output buffer.\n const outputTmpShape = newShape.slice();\n outputTmpShape[1] = Object.keys(uniqueElements).length;\n const outputBuffer = new TensorBuffer(outputTmpShape, dtype);\n uniqueIndices.forEach((uniqueElementIndex, i) => {\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n outputBuffer.set(inputBuffer.get(m, uniqueElementIndex, n), m, i, n);\n }\n }\n });\n // The output shape can be calculated from the input shape with the size of\n // the given axis replaced by the number of unique elements along that axis.\n const outputShape = shape.slice();\n outputShape[$axis] = outputTmpShape[1];\n return {\n outputValues: outputBuffer.values,\n outputShape,\n indices,\n };\n}\n//# sourceMappingURL=Unique_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Shared functionality among backends.\nexport { simpleAbsImpl } from './kernels/Abs';\nexport { addImpl } from './kernels/Add';\nexport { ceilImpl } from './kernels/Ceil';\nexport { expImpl } from './kernels/Exp';\nexport { expm1Impl } from './kernels/Expm1';\nexport { floorImpl } from './kernels/Floor';\nexport { logImpl } from './kernels/Log';\nexport { maxImpl } from './kernels/Max_impl';\nexport { multiplyImpl } from './kernels/Multiply';\nexport { notEqualImpl } from './kernels/NotEqual';\nexport { rsqrtImpl } from './kernels/Rsqrt';\nexport { sliceImpl } from './kernels/Slice';\nexport { squaredDifferenceImpl } from './kernels/SquaredDifference';\nexport { subImpl } from './kernels/Sub';\nexport { transposeImpl } from './kernels/Transpose_impl';\nexport { uniqueImpl } from './kernels/Unique_impl';\n//# sourceMappingURL=shared.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/*\n * base.ts contains all the exports from tfjs-backend-cpu\n * without auto-kernel registration\n */\nimport { registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendCPU } from './backend_cpu';\nimport * as shared from './shared';\nexport { MathBackendCPU } from './backend_cpu';\nexport { version as version_cpu } from './version';\nexport { shared };\n// Side effects for default initialization of MathBackendCPU\nregisterBackend('cpu', () => new MathBackendCPU(), 1 /* priority */);\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Elu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const elu = unaryKernelFunc(Elu, (xi) => xi >= 0 ? xi : (Math.exp(xi) - 1));\nexport const eluConfig = {\n kernelName: Elu,\n backendName: 'cpu',\n kernelFunc: elu,\n};\n//# sourceMappingURL=Elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nconst preluImpl = createSimpleBinaryKernelImpl((xValue, aValue) => xValue < 0 ? aValue * xValue : xValue);\nexport function prelu(args) {\n const { inputs, backend } = args;\n const { x, alpha } = inputs;\n assertNotComplex([x, alpha], 'prelu');\n const aVals = backend.data.get(x.dataId).values;\n const bVals = backend.data.get(alpha.dataId).values;\n const [resultData, resultShape] = preluImpl(x.shape, alpha.shape, aVals, bVals, x.dtype);\n return backend.makeTensorInfo(resultShape, x.dtype, resultData);\n}\nexport const preluConfig = {\n kernelName: Prelu,\n backendName: 'cpu',\n kernelFunc: prelu,\n};\n//# sourceMappingURL=Prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu = unaryKernelFunc(Relu, (xi) => Math.max(0, xi));\nexport const reluConfig = {\n kernelName: Relu,\n backendName: 'cpu',\n kernelFunc: relu,\n};\n//# sourceMappingURL=Relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu6 = unaryKernelFunc(Relu6, (xi) => Math.min(Math.max(0, xi), 6));\nexport const relu6Config = {\n kernelName: Relu6,\n backendName: 'cpu',\n kernelFunc: relu6,\n};\n//# sourceMappingURL=Relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../kernels/Elu';\nimport { identity } from '../kernels/Identity';\nimport { prelu } from '../kernels/Prelu';\nimport { relu } from '../kernels/Relu';\nimport { relu6 } from '../kernels/Relu6';\nexport function applyActivation(backend, x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return identity({ inputs: { x }, backend });\n }\n else if (activation === 'relu') {\n return relu({ inputs: { x }, backend });\n }\n else if (activation === 'elu') {\n return elu({ inputs: { x }, backend });\n }\n else if (activation === 'relu6') {\n return relu6({ inputs: { x }, backend });\n }\n else if (activation === 'prelu') {\n return prelu({ inputs: { x, alpha: preluActivationWeights }, backend });\n }\n throw new Error(`Activation ${activation} has not been implemented for the CPU backend.`);\n}\n//# sourceMappingURL=fused_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n backend.incRef(x.dataId);\n const xData = backend.data.get(x.dataId);\n if (xData.complexTensorInfos != null) {\n const real = xData.complexTensorInfos.real;\n const imag = xData.complexTensorInfos.imag;\n real.shape = $shape;\n imag.shape = $shape;\n }\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'cpu',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul, buffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { reshape } from './Reshape';\nexport function batchMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b } = inputs;\n const { transposeA, transposeB } = attrs;\n assertNotComplex([a, b], 'matMul');\n const aRank = a.shape.length;\n const bRank = b.shape.length;\n const innerShapeA = transposeA ? a.shape[aRank - 2] : a.shape[aRank - 1];\n const innerShapeB = transposeB ? b.shape[bRank - 1] : b.shape[bRank - 2];\n const outerShapeA = transposeA ? a.shape[aRank - 1] : a.shape[aRank - 2];\n const outerShapeB = transposeB ? b.shape[bRank - 2] : b.shape[bRank - 1];\n const outerDimsA = a.shape.slice(0, -2);\n const outerDimsB = b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert(aRank >= 2 && bRank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n const outShapeOuterDims = batchDimA > batchDimB ? a.shape.slice(0, -2) : b.shape.slice(0, -2);\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${a.shape} and ` +\n `${b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const a3dShape = transposeA ? [batchDimA, innerShapeA, outerShapeA] :\n [batchDimA, outerShapeA, innerShapeA];\n const b3dShape = transposeB ? [batchDimB, outerShapeB, innerShapeB] :\n [batchDimB, innerShapeB, outerShapeB];\n // The rest of the implementation is designed to operate on rank-3 tensors\n const a3d = reshape({ inputs: { x: a }, backend, attrs: { shape: a3dShape } });\n const b3d = reshape({ inputs: { x: b }, backend, attrs: { shape: b3dShape } });\n const sharedDim = transposeA ? a3d.shape[1] : a3d.shape[2];\n const leftDim = transposeA ? a3d.shape[2] : a3d.shape[1];\n const rightDim = transposeB ? b3d.shape[1] : b3d.shape[2];\n const batchDim = Math.max(batchDimA, batchDimB);\n const a3dValues = backend.data.get(a3d.dataId).values;\n const b3dValues = backend.data.get(b3d.dataId).values;\n const a3dStrides = util.computeStrides(a3d.shape);\n const b3dStrides = util.computeStrides(b3d.shape);\n const [aBatch, aOuterStep, aInnerStep] = transposeA ?\n [a3dStrides[0], 1, a3dStrides[1]] :\n [a3dStrides[0], a3dStrides[1], 1];\n const [bInnerStep, bOuterStep, bBatch] = transposeB ?\n [1, b3dStrides[1], b3dStrides[0]] :\n [b3dStrides[1], 1, b3dStrides[0]];\n const size = leftDim * rightDim;\n const result = buffer([batchDim, leftDim, rightDim], a3d.dtype);\n const resVals = result.values;\n const blockSize = backend.blockSize;\n for (let bi = 0; bi < batchDim; bi++) {\n for (let i0 = 0; i0 < leftDim; i0 += blockSize) {\n for (let j0 = 0; j0 < rightDim; j0 += blockSize) {\n for (let k0 = 0; k0 < sharedDim; k0 += blockSize) {\n // for when blockSize doesn't evenly divide the input\n const iBlock = Math.min(i0 + blockSize, leftDim);\n const jBlock = Math.min(j0 + blockSize, rightDim);\n const kBlock = Math.min(k0 + blockSize, sharedDim);\n for (let i = i0; i < iBlock; i++) {\n for (let j = j0; j < jBlock; j++) {\n let sum = 0.0;\n for (let k = k0; k < kBlock; k++) {\n const batchOffsetA = Math.min(bi, batchDimA - 1) * aBatch;\n const batchOffsetB = Math.min(bi, batchDimB - 1) * bBatch;\n const aVal = a3dValues[batchOffsetA + i * aOuterStep + k * aInnerStep];\n const bVal = b3dValues[k * bInnerStep + j * bOuterStep + batchOffsetB];\n sum += aVal * bVal;\n }\n resVals[bi * size + (i * rightDim + j)] += sum;\n }\n }\n }\n }\n }\n }\n backend.disposeIntermediateTensorInfo(a3d);\n backend.disposeIntermediateTensorInfo(b3d);\n // set correct shape on output.\n return backend.makeTensorInfo(outShape, result.dtype, result.values);\n}\nexport const batchMatMulConfig = {\n kernelName: BatchMatMul,\n backendName: 'cpu',\n kernelFunc: batchMatMul,\n};\n//# sourceMappingURL=BatchMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { _FusedMatMul } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { batchMatMul } from './BatchMatMul';\nexport function _fusedMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b, bias, preluActivationWeights } = inputs;\n const { transposeA, transposeB, activation } = attrs;\n let current;\n let addRes;\n let activationRes;\n const intermediates = [];\n const matMulRes = batchMatMul({ inputs: { a, b }, attrs: { transposeA, transposeB }, backend });\n current = matMulRes;\n if (bias) {\n addRes = add({ inputs: { a: current, b: bias }, backend });\n intermediates.push(current);\n current = addRes;\n }\n if (activation) {\n activationRes =\n applyActivation(backend, current, activation, preluActivationWeights);\n intermediates.push(current);\n current = activationRes;\n }\n for (const i of intermediates) {\n backend.disposeIntermediateTensorInfo(i);\n }\n return current;\n}\nexport const _fusedMatMulConfig = {\n kernelName: _FusedMatMul,\n backendName: 'cpu',\n kernelFunc: _fusedMatMul,\n};\n//# sourceMappingURL=_FusedMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acos = unaryKernelFunc(Acos, (xi) => Math.acos(xi));\nexport const acosConfig = {\n kernelName: Acos,\n backendName: 'cpu',\n kernelFunc: acos,\n};\n//# sourceMappingURL=Acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acosh = unaryKernelFunc(Acosh, (xi) => Math.acosh(xi));\nexport const acoshConfig = {\n kernelName: Acosh,\n backendName: 'cpu',\n kernelFunc: acosh,\n};\n//# sourceMappingURL=Acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asin = unaryKernelFunc(Asin, (xi) => Math.asin(xi));\nexport const asinConfig = {\n kernelName: Asin,\n backendName: 'cpu',\n kernelFunc: asin,\n};\n//# sourceMappingURL=Asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asinh = unaryKernelFunc(Asinh, (xi) => Math.asinh(xi));\nexport const asinhConfig = {\n kernelName: Asinh,\n backendName: 'cpu',\n kernelFunc: asinh,\n};\n//# sourceMappingURL=Asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atan = unaryKernelFunc(Atan, (xi) => Math.atan(xi));\nexport const atanConfig = {\n kernelName: Atan,\n backendName: 'cpu',\n kernelFunc: atan,\n};\n//# sourceMappingURL=Atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atanh = unaryKernelFunc(Atanh, (xi) => Math.atanh(xi));\nexport const atanhConfig = {\n kernelName: Atanh,\n backendName: 'cpu',\n kernelFunc: atanh,\n};\n//# sourceMappingURL=Atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from '@tensorflow/tfjs-core';\nexport function pool(xValues, xShape, dtype, strides, convInfo, poolType) {\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const output = buffer(convInfo.outShape, dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] * convInfo.outShape[3];\n const outputRowStrides = convInfo.outShape[2] * convInfo.outShape[3];\n const outputColStrides = convInfo.outShape[3];\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const outputBatchOffset = b * outputBatchStrides;\n const inputBatchOffset = b * strides[0];\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n const outputRowOffset = outputBatchOffset + yR * outputRowStrides;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n const xCMin = Math.max(0, xCCorner);\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const xROffset = inputBatchOffset + xR * strides[1];\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const xCOffset = xROffset + xC * strides[2];\n const pixel = xValues[xCOffset + d];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputRowOffset + yC * outputColStrides + d;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n return output;\n}\nexport function maxPoolPositions(xValues, xShape, dtype, convInfo, flattenPositions = false, includeBatchInIndex = false) {\n const maxPositions = buffer(convInfo.outShape, 'int32');\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = buffer(xShape, dtype, xValues);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n let xRMin = xRCorner;\n while (xRMin < 0) {\n xRMin += dilationHeight;\n }\n // const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n let xCMin = xCCorner;\n while (xCMin < 0) {\n xCMin += dilationWidth;\n }\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const wR = xR - xRCorner;\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const wC = xC - xCCorner;\n const pixel = xBuf.get(b, xR, xC, d);\n if (pixel > maxValue) {\n maxValue = pixel;\n if (flattenPositions) {\n maxPosition = includeBatchInIndex ?\n ((b * convInfo.inHeight + xR) * convInfo.inWidth + xC) *\n convInfo.inChannels +\n d :\n (xR * convInfo.inWidth + xC) * convInfo.inChannels + d;\n }\n else {\n maxPosition = wR * effectiveFilterWidth + wC;\n }\n }\n }\n }\n maxPositions.set(maxPosition, b, yR, yC, d);\n }\n }\n }\n }\n return maxPositions;\n}\n//# sourceMappingURL=pool_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'avg');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'cpu',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util, buffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel;\n }\n }\n dx.set(dotProd * avgMultiplier, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'cpu',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function batchNorm(args) {\n const { inputs, backend, attrs } = args;\n const { x, scale, offset, mean, variance } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n assertNotComplex([x, mean, variance, scale, offset], 'batchNorm');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const xVals = backend.data.get(x.dataId).values;\n const mVals = backend.data.get(mean.dataId).values;\n const varVals = backend.data.get(variance.dataId).values;\n const sVals = scale ? backend.data.get(scale.dataId).values :\n new Float32Array([1]);\n const offVals = offset ?\n backend.data.get(offset.dataId).values :\n new Float32Array([0]);\n const outVals = new Float32Array(xVals.length);\n const offValsLength = offVals.length;\n const sValsLength = sVals.length;\n const varValsLength = varVals.length;\n const mValsLength = mVals.length;\n let offi = 0;\n let mi = 0;\n let si = 0;\n let vi = 0;\n for (let i = 0; i < xVals.length; ++i) {\n outVals[i] = offVals[offi++] +\n (xVals[i] - mVals[mi++]) * sVals[si++] /\n Math.sqrt(varVals[vi++] + varianceEpsilon);\n if (offi >= offValsLength) {\n offi = 0;\n }\n if (mi >= mValsLength) {\n mi = 0;\n }\n if (si >= sValsLength) {\n si = 0;\n }\n if (vi >= varValsLength) {\n vi = 0;\n }\n }\n return backend.makeTensorInfo(x.shape, x.dtype, outVals);\n}\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'cpu',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const clip = unaryKernelFunc(ClipByValue, (xi, attrs) => {\n const clipAttrs = attrs;\n if (xi > clipAttrs.clipValueMax) {\n return clipAttrs.clipValueMax;\n }\n return xi < clipAttrs.clipValueMin ? clipAttrs.clipValueMin : xi;\n});\nexport const clipConfig = {\n kernelName: ClipByValue,\n backendName: 'cpu',\n kernelFunc: clip,\n};\n//# sourceMappingURL=Clip.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const imag = backend.data.get(input.dataId).complexTensorInfos.imag;\n const imagVal = backend.data.get(imag.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the imag value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(imag.shape, imag.dtype, imagVal);\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'cpu',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n let outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n if ($inputs[0].dtype === 'complex64') {\n const reals = $inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = $inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concat({ inputs: reals, backend, attrs: { axis: $axis } });\n const imagConcated = concat({ inputs: imags, backend, attrs: { axis: $axis } });\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const inputs2D = $inputs.map(t => {\n const innerSize = util.sizeFromShape(t.shape.slice($axis));\n const shape = [-1, innerSize];\n return reshape({ inputs: { x: t }, backend, attrs: { shape } });\n });\n // Concats 2d tensors along axis=1.\n outShape =\n backend_util.computeOutShape(inputs2D.map(t => t.shape), 1 /* axis */);\n const outVals = util.getTypedArrayFromDType($inputs[0].dtype, util.sizeFromShape(outShape));\n if (inputs2D[0].shape[0] === 1) {\n // Use built-in TypedArray.set() method for speed.\n let offset = 0;\n inputs2D.forEach(t => {\n const val = backend.data.get(t.dataId).values;\n const size = util.sizeFromShape(t.shape);\n outVals.set(val, offset);\n offset += size;\n });\n }\n else {\n let colOffset = 0;\n inputs2D.forEach(t => {\n const tVals = backend.data.get(t.dataId).values;\n let tIdx = 0;\n for (let row = 0; row < t.shape[0]; ++row) {\n const resIdx = row * outShape[1] + colOffset;\n for (let col = 0; col < t.shape[1]; ++col) {\n outVals[resIdx + col] = tVals[tIdx++];\n }\n }\n colOffset += t.shape[1];\n });\n }\n const finalOutShape = backend_util.computeOutShape($inputs.map(t => t.shape), $axis);\n const outInfo = backend.makeTensorInfo(finalOutShape, inputs[0].dtype, outVals);\n inputs2D.forEach(t => backend.disposeIntermediateTensorInfo(t));\n return outInfo;\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'cpu',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'conv2d');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, dilations, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const padLeft = convInfo.padInfo.left;\n const padTop = convInfo.padInfo.top;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const xBatchStride = xStrides[0];\n const xRowStride = isChannelsLast ? xStrides[1] : xStrides[2];\n const xColStride = isChannelsLast ? xStrides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : xStrides[1];\n const yBatchStride = y.strides[0];\n const yRowStride = isChannelsLast ? y.strides[1] : y.strides[2];\n const yColStride = isChannelsLast ? y.strides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : y.strides[1];\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xBatchStride;\n const yOffset1 = b * yBatchStride;\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * yRowStride;\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xRowStride;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * yColStride;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * xColStride;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1 * xChannelStride];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset3 + d2 * yChannelStride] +=\n xVal * wVals[wOffset3 + d2];\n }\n wOffset3 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, yVals);\n}\nexport const conv2DConfig = {\n kernelName: Conv2D,\n backendName: 'cpu',\n kernelFunc: conv2D\n};\n//# sourceMappingURL=Conv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, dataFormat, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv2dBackpropFilter');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const xVals = backend.data.get(x.dataId).values;\n const dyVals = backend.data.get(dy.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n if (isChannelsLast) {\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n else {\n dotProd += xBuf.get(b, d1, xR, xC) *\n dyBuf.get(b, d2, yR, yC);\n }\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, d2);\n }\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const conv2DBackpropFilterConfig = {\n kernelName: Conv2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropFilter\n};\n//# sourceMappingURL=Conv2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { inputShape, strides, pad, dataFormat, dimRoundingMode } = attrs;\n assertNotComplex([dy, filter], 'conv2dBackpropInput');\n const filterStrides = util.computeStrides(filter.shape);\n const dyStrides = util.computeStrides(dy.shape);\n let $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad, dimRoundingMode, false, $dataFormat);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const dyValues = backend.data.get(dy.dataId).values;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n $dataFormat = convInfo.dataFormat;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const isChannelsLast = $dataFormat === 'channelsLast';\n const xBatchStride = dx.strides[0];\n const xRowStride = isChannelsLast ? dx.strides[1] : dx.strides[2];\n const xColStride = isChannelsLast ? dx.strides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : dx.strides[1];\n const yBatchStride = dyStrides[0];\n const yRowStride = isChannelsLast ? dyStrides[1] : dyStrides[2];\n const yColStride = isChannelsLast ? dyStrides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : dyStrides[1];\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = yBatchStride * b + yRowStride * yR + yColStride * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + yChannelStride * d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n const dxOffset = xBatchStride * b + xRowStride * xR +\n xColStride * xC + xChannelStride * d1;\n dxValues[dxOffset] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv2DBackpropInputConfig = {\n kernelName: Conv2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropInput\n};\n//# sourceMappingURL=Conv2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n assertNotComplex([x, filter], 'conv3d');\n const convInfo = backend_util.computeConv3DInfo(x.shape, filter.shape, strides, dilations, pad);\n const { filterDepth, filterHeight, filterWidth, dilationDepth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padFront = padInfo.front;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yF = 0; yF < convInfo.outDepth; ++yF) {\n const yOffset2 = yOffset1 + yF * y.strides[1];\n const xFCorner = yF * convInfo.strideDepth - padFront;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const xF = xFCorner + wF * dilationDepth;\n if (xF < 0 || xF >= convInfo.inDepth) {\n continue;\n }\n const wOffset1 = wF * filterStrides[0];\n const xOffset2 = xOffset1 + xF * xStrides[1];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset3 = yOffset2 + yR * y.strides[2];\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset2 = wOffset1 + wR * filterStrides[1];\n const xOffset3 = xOffset2 + xR * xStrides[2];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset4 = yOffset3 + yC * convInfo.outChannels;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset3 = wOffset2 + wC * filterStrides[2];\n const xOffset4 = xOffset3 + xC * convInfo.inChannels;\n let wOffset4 = wOffset3;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset4 + d1];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset4 + d2] += xVal * wVals[wOffset4 + d2];\n }\n wOffset4 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const conv3DConfig = {\n kernelName: Conv3D,\n backendName: 'cpu',\n kernelFunc: conv3D\n};\n//# sourceMappingURL=Conv3D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropFilterV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropFilterV2(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv3dBackpropFilterV2');\n const xStrides = util.computeStrides(x.shape);\n const dyStrides = util.computeStrides(dy.shape);\n const convInfo = backend_util.computeConv3DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dw = new TensorBuffer(convInfo.filterShape, 'float32');\n const dwValues = dw.values;\n const [dwS0, dwS1, dwS2, dwS3] = dw.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const xValues = backend.data.get(x.dataId).values;\n const [xS0, xS1, xS2, xS3] = xStrides;\n const frontPad = convInfo.padInfo.front;\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const yFMin = Math.max(0, Math.ceil((frontPad - wF) / strideDepth));\n const yFMax = Math.min(convInfo.outDepth, (convInfo.inDepth + frontPad - wF) / strideDepth);\n const wOffset1 = wF * dwS0;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n const wOffset2 = wR * dwS1 + wOffset1;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n const wOffset3 = wC * dwS2 + wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const wOffset4 = d1 * dwS3 + wOffset3;\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xS0;\n const yOffset1 = b * dyS0;\n for (let yF = yFMin; yF < yFMax; ++yF) {\n const xF = wF + yF * strideDepth - frontPad;\n const xOffset2 = xF * xS1 + xOffset1;\n const yOffset2 = yF * dyS1 + yOffset1;\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n const xOffset3 = xR * xS2 + xOffset2;\n const yOffset3 = yR * dyS2 + yOffset2;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n const xOffset4 = xC * xS3 + xOffset3;\n const yOffset4 = yC * dyS3 + yOffset3;\n dotProd += xValues[xOffset4 + d1] * dyValues[yOffset4 + d2];\n }\n }\n }\n }\n dwValues[wOffset4 + d2] = dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dw.shape, dw.dtype, dw.values);\n}\nexport const conv3DBackpropFilterV2Config = {\n kernelName: Conv3DBackpropFilterV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropFilterV2\n};\n//# sourceMappingURL=Conv3DBackpropFilterV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropInputV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropInputV2(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { pad, strides, inputShape } = attrs;\n assertNotComplex([dy], 'conv3dBackpropInputV2');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv3DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2, dxS3] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2, fltS3] = filterStrides;\n const { batchSize, filterDepth, filterHeight, filterWidth, inChannels, inDepth, inHeight, inWidth, outChannels, outDepth, outHeight, outWidth, strideDepth, strideHeight, strideWidth } = convInfo;\n const frontPad = filterDepth - 1 - convInfo.padInfo.front;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n // Frames of depth\n for (let xF = 0; xF < inDepth; ++xF) {\n const xFCorner = xF - frontPad;\n const xFMin = Math.max(0, Math.ceil(xFCorner / strideDepth));\n const yFMax = Math.min(outDepth, (filterDepth + xFCorner) / strideDepth);\n // Rows as per standard 2d matrix notation\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n // Columns as per standard 2d matrix notation\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yF = xFMin; yF < yFMax; ++yF) {\n const wF = yF * strideDepth - xFCorner;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yF + dyS2 * yR + dyS3 * yC;\n const fltOffset = fltS0 * (filterDepth - 1 - wF) +\n fltS1 * (filterHeight - 1 - wR) +\n fltS2 * (filterWidth - 1 - wC) + fltS3 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xF + dxS2 * xR + dxS3 * xC + d1] =\n dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv3DBackpropInputV2Config = {\n kernelName: Conv3DBackpropInputV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropInputV2\n};\n//# sourceMappingURL=Conv3DBackpropInputV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cos = unaryKernelFunc(Cos, (xi) => Math.cos(xi));\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'cpu',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cosh = unaryKernelFunc(Cosh, (xi) => Math.cosh(xi));\nexport const coshConfig = {\n kernelName: Cosh,\n backendName: 'cpu',\n kernelFunc: cosh,\n};\n//# sourceMappingURL=Cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNative, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNative(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'depthwiseConv2DNative');\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n let $dilations = dilations;\n if ($dilations == null) {\n $dilations = [1, 1];\n }\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${$dilations}'`);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, $dilations, pad, dimRoundingMode, true /* depthwise */);\n const { filterHeight, filterWidth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * y.strides[1];\n const xRCorner = yR * convInfo.strideHeight - padLeft;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xStrides[1];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * y.strides[2];\n const xCCorner = yC * convInfo.strideWidth - padTop;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * convInfo.inChannels;\n let yOffset4 = yOffset3;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1];\n for (let q = 0; q < chMul; ++q) {\n yVals[yOffset4 + q] += xVal * wVals[wOffset3 + q];\n }\n yOffset4 += chMul;\n wOffset3 += chMul;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const depthwiseConv2dNativeConfig = {\n kernelName: DepthwiseConv2dNative,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNative\n};\n//# sourceMappingURL=DepthwiseConv2dNative.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, dilations, pad, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'depthwiseConv2dNativeBackpropFilter');\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const xVals = backend.data.get(x.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyVals = backend.data.get(dy.dataId).values;\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n const d1 = Math.trunc(d2 / chMul);\n const dm = d2 % chMul;\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, dm);\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const depthwiseConv2dNativeBackpropFilterConfig = {\n kernelName: DepthwiseConv2dNativeBackpropFilter,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropFilter\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { strides, dilations, pad, dimRoundingMode, inputShape } = attrs;\n assertNotComplex([dy, filter], 'depthwiseConv2DNativeBackpropInput');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const chMul = outChannels / inChannels;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yR + dyS2 * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let dm = 0; dm < chMul; ++dm) {\n const d2 = d1 * chMul + dm;\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + dm];\n dotProd += pixel * weight;\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xR + dxS2 * xC + d1] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const depthwiseConv2dNativeBackpropInputConfig = {\n kernelName: DepthwiseConv2dNativeBackpropInput,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropInput\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2D, util } from '@tensorflow/tfjs-core';\nexport const dilation2dConfig = {\n kernelName: Dilation2D,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const xVals = cpuBackend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const filterVals = cpuBackend.data.get(filter.dataId).values;\n const filterRank = filter.shape.length;\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n const outSize = util.sizeFromShape(outShape);\n const outRank = outShape.length;\n const outputVals = util.getArrayFromDType(x.dtype, outSize);\n // Upsampling the input by fill in `dilation size - 1` values between each\n // input value.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const xIndex = util.locToIndex([b, hIn, wIn, d], xRank, util.computeStrides(x.shape));\n const filterIndex = util.locToIndex([h, w, d], filterRank, util.computeStrides(filter.shape));\n const val = xVals[xIndex] + filterVals[filterIndex];\n if (val > curVal) {\n curVal = val;\n }\n }\n }\n }\n }\n const outputIndex = util.locToIndex([b, hOut, wOut, d], outRank, util.computeStrides(outShape));\n outputVals[outputIndex] = curVal;\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(outputVals, x.dtype), outShape, x.dtype);\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropFilter, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropFilterConfig = {\n kernelName: Dilation2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropFilter}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed filter gradients has the same dimensions as the filter:\n // [filterHeight, filterWidth, depth]\n const gradients = util.makeZerosNestedTypedArray(filter.shape, filter.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hMax = 0;\n let wMax = 0;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hMax = h;\n wMax = w;\n }\n }\n }\n }\n }\n gradients[hMax][wMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), filter.shape, filter.dtype);\n return { dataId, shape: filter.shape, dtype: filter.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropInput, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropInputConfig = {\n kernelName: Dilation2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropInput}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed gradients has the same dimensions as the input:\n // [batch, inputHeight, inputCols, inChannel]\n const gradients = util.makeZerosNestedTypedArray(x.shape, x.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hInMax = (hBeg < 0) ? 0 : hBeg;\n let wInMax = (wBeg < 0) ? 0 : wBeg;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hInMax = hIn;\n wInMax = wIn;\n }\n }\n }\n }\n }\n gradients[b][hInMax][wInMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const divImpl = createSimpleBinaryKernelImpl((a, b) => a / b);\nexport const div = binaryKernelFunc(Div, divImpl);\nexport const divConfig = {\n kernelName: Div,\n backendName: 'cpu',\n kernelFunc: div\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Erf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst p = backend_util.ERF_P;\nconst a1 = backend_util.ERF_A1;\nconst a2 = backend_util.ERF_A2;\nconst a3 = backend_util.ERF_A3;\nconst a4 = backend_util.ERF_A4;\nconst a5 = backend_util.ERF_A5;\nexport const erf = unaryKernelFunc(Erf, (xi) => {\n const sign = Math.sign(xi);\n const v = Math.abs(xi);\n const t = 1.0 / (1.0 + p * v);\n return sign *\n (1.0 -\n (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t *\n Math.exp(-v * v));\n});\nexport const erfConfig = {\n kernelName: Erf,\n backendName: 'cpu',\n kernelFunc: erf,\n};\n//# sourceMappingURL=Erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { add } from '../kernels/Add';\nimport { complex } from '../kernels/Complex';\nimport { concat } from '../kernels/Concat';\nimport { divConfig } from '../kernels/Div';\nimport { identity } from '../kernels/Identity';\nimport { imag } from '../kernels/Imag';\nimport { multiply } from '../kernels/Multiply';\nimport { real } from '../kernels/Real';\nimport { slice } from '../kernels/Slice';\nimport { sub } from '../kernels/Sub';\n/**\n * Calculate FFT of inner most elements of batch tensor.\n */\nexport function fftBatch(input, inverse, cpuBackend) {\n const inputShape = input.shape;\n const batch = inputShape[0];\n const innerDim = inputShape[1];\n const inputVals = cpuBackend.data.get(input.dataId);\n const real2D = inputVals.complexTensorInfos.real;\n const imag2D = inputVals.complexTensorInfos.imag;\n // Collects real and imaginary values separately.\n const resultShape = [batch, innerDim];\n const resultSize = util.sizeFromShape(resultShape);\n const resultReal = util.getTypedArrayFromDType('float32', resultSize);\n const resultImag = util.getTypedArrayFromDType('float32', resultSize);\n for (let b = 0; b < batch; b++) {\n // TODO: Support slice ops for complex type.\n const r = slice({\n inputs: { x: real2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const i = slice({\n inputs: { x: imag2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const input = complex({ inputs: { real: r, imag: i }, backend: cpuBackend });\n // Run FFT by batch element.\n const { real, imag } = fftImpl(input, inverse, cpuBackend);\n const res = backend_util.mergeRealAndImagArrays(real, imag);\n for (let d = 0; d < innerDim; d++) {\n const c = backend_util.getComplexWithIndex(res, d);\n resultReal[b * innerDim + d] = c.real;\n resultImag[b * innerDim + d] = c.imag;\n }\n cpuBackend.disposeIntermediateTensorInfo(r);\n cpuBackend.disposeIntermediateTensorInfo(i);\n cpuBackend.disposeIntermediateTensorInfo(input);\n }\n const $realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultReal);\n const $imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImag);\n const result = complex({ inputs: { real: $realInfo, imag: $imagInfo }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($realInfo);\n cpuBackend.disposeIntermediateTensorInfo($imagInfo);\n return result;\n}\nexport function fftImpl(input, inverse, cpuBackend) {\n const inputSize = util.sizeFromShape(input.shape);\n const inputVals = cpuBackend.data.get(input.dataId);\n const realVals = cpuBackend.data.get(inputVals.complexTensorInfos.real.dataId).values;\n const imagVals = cpuBackend.data.get(inputVals.complexTensorInfos.imag.dataId).values;\n if (isExponentOf2(inputSize)) {\n const result = fftRadix2(realVals, imagVals, inputSize, inverse, cpuBackend);\n const resultShape = [input.shape[0], input.shape[1]];\n if (inverse) {\n const realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.real);\n const imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.imag);\n const sizeInfo = cpuBackend.makeTensorInfo([], 'float32', util.createScalarValue(inputSize, 'float32'));\n const sizeInfoCopy = identity({ inputs: { x: sizeInfo }, backend: cpuBackend });\n const divRealInfo = divConfig.kernelFunc({ inputs: { a: realInfo, b: sizeInfo }, backend: cpuBackend });\n const divImagInfo = divConfig.kernelFunc({ inputs: { a: imagInfo, b: sizeInfoCopy }, backend: cpuBackend });\n const divRealVals = cpuBackend.data.get(divRealInfo.dataId).values;\n const divImagVals = cpuBackend.data.get(divImagInfo.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(realInfo);\n cpuBackend.disposeIntermediateTensorInfo(imagInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfoCopy);\n cpuBackend.disposeIntermediateTensorInfo(divRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(divImagInfo);\n return { real: divRealVals, imag: divImagVals };\n }\n return result;\n }\n else {\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const rawOutput = fourierTransformByMatmul(data, inputSize, inverse);\n return backend_util.splitRealAndImagArrays(rawOutput);\n }\n}\nfunction isExponentOf2(size) {\n return (size & size - 1) === 0;\n}\n// FFT using Cooley-Tukey algorithm on radix 2 dimensional input.\nfunction fftRadix2(realVals, imagVals, size, inverse, cpuBackend) {\n if (size === 1) {\n return { real: realVals, imag: imagVals };\n }\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const half = size / 2;\n const evenComplex = backend_util.complexWithEvenIndex(data);\n const evenRealVals = evenComplex.real;\n const evenImagVals = evenComplex.imag;\n const evenShape = [evenRealVals.length];\n const evenRealInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenRealVals);\n const evenImagInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenImagVals);\n const evenTensorInfo = complex({ inputs: { real: evenRealInfo, imag: evenImagInfo }, backend: cpuBackend });\n const oddComplex = backend_util.complexWithOddIndex(data);\n const oddRealVals = oddComplex.real;\n const oddImagVals = oddComplex.imag;\n const oddShape = [oddRealVals.length];\n const oddRealInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddRealVals);\n const oddImagInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddImagVals);\n const oddTensorInfo = complex({ inputs: { real: oddRealInfo, imag: oddImagInfo }, backend: cpuBackend });\n // Recursive call for half part of original input.\n const $evenComplex = fftRadix2(evenRealVals, evenImagVals, half, inverse, cpuBackend);\n const $evenRealVals = $evenComplex.real;\n const $evenImagVals = $evenComplex.imag;\n const $evenShape = [$evenRealVals.length];\n const $evenRealInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenRealVals);\n const $evenImagInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenImagVals);\n const $evenTensorInfo = complex({\n inputs: { real: $evenRealInfo, imag: $evenImagInfo },\n backend: cpuBackend\n });\n const $oddComplex = fftRadix2(oddRealVals, oddImagVals, half, inverse, cpuBackend);\n const $oddRealVals = $oddComplex.real;\n const $oddImagVals = $oddComplex.imag;\n const $oddShape = [$oddRealVals.length];\n const $oddRealInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddRealVals);\n const $oddImagInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddImagVals);\n const $oddTensorInfo = complex({ inputs: { real: $oddRealInfo, imag: $oddImagInfo }, backend: cpuBackend });\n const e = backend_util.exponents(size, inverse);\n const eShape = [e.real.length];\n const eRealInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.real);\n const eImagInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.imag);\n const complexInfo = complex({ inputs: { real: eRealInfo, imag: eImagInfo }, backend: cpuBackend });\n const exponentInfo = multiply({ inputs: { a: complexInfo, b: $oddTensorInfo }, backend: cpuBackend });\n const addPart = add({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const subPart = sub({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const addPartReal = real({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartReal = real({ inputs: { input: subPart }, backend: cpuBackend });\n const addPartImag = imag({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartImag = imag({ inputs: { input: subPart }, backend: cpuBackend });\n const $real = concat({\n inputs: [addPartReal, subPartReal],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $imag = concat({\n inputs: [addPartImag, subPartImag],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $realVals = cpuBackend.data.get($real.dataId).values;\n const $imagVals = cpuBackend.data.get($imag.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(eRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(eImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(complexInfo);\n cpuBackend.disposeIntermediateTensorInfo(exponentInfo);\n cpuBackend.disposeIntermediateTensorInfo(addPart);\n cpuBackend.disposeIntermediateTensorInfo(subPart);\n cpuBackend.disposeIntermediateTensorInfo(addPartReal);\n cpuBackend.disposeIntermediateTensorInfo(addPartImag);\n cpuBackend.disposeIntermediateTensorInfo(subPartReal);\n cpuBackend.disposeIntermediateTensorInfo(subPartImag);\n cpuBackend.disposeIntermediateTensorInfo($real);\n cpuBackend.disposeIntermediateTensorInfo($imag);\n return { real: $realVals, imag: $imagVals };\n}\n// Calculate fourier transform by multplying sinusoid matrix.\nfunction fourierTransformByMatmul(data, size, inverse) {\n const ret = new Float32Array(size * 2);\n // TODO: Use matmul instead once it supports complex64 type.\n for (let r = 0; r < size; r++) {\n let real = 0.0;\n let imag = 0.0;\n for (let c = 0; c < size; c++) {\n const e = backend_util.exponent(r * c, size, inverse);\n const term = backend_util.getComplexWithIndex(data, c);\n real += term.real * e.real - term.imag * e.imag;\n imag += term.real * e.imag + term.imag * e.real;\n }\n if (inverse) {\n real /= size;\n imag /= size;\n }\n backend_util.assignToTypedArray(ret, real, imag, r);\n }\n return ret;\n}\n//# sourceMappingURL=fft_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, false, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'cpu',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Fill, util } from '@tensorflow/tfjs-core';\nexport function fill(args) {\n const { backend, attrs } = args;\n const { shape, value, dtype } = attrs;\n const $dtype = dtype || util.inferDtype(value);\n const values = util.getArrayFromDType($dtype, util.sizeFromShape(shape));\n fillValues(values, value, $dtype);\n return backend.makeTensorInfo(shape, $dtype, values);\n}\nexport const fillConfig = {\n kernelName: Fill,\n backendName: 'cpu',\n kernelFunc: fill\n};\nfunction fillValues(values, value, dtype) {\n if (dtype === 'string') {\n values.fill(value);\n }\n else {\n values.fill(value);\n }\n}\n//# sourceMappingURL=Fill.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight, util } from '@tensorflow/tfjs-core';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const coordX = Math.round(imageWidth - x);\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n let outputValue = imageVals[outIdx];\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth) {\n // set the output to the image value at the coordinate position.\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { conv2D } from './Conv2D';\nexport function fusedConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = conv2D({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const resultOld = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n if (activation) {\n const resultOld = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n return result;\n}\nexport const fusedConv2DConfig = {\n kernelName: FusedConv2D,\n backendName: 'cpu',\n kernelFunc: fusedConv2D\n};\n//# sourceMappingURL=FusedConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedDepthwiseConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { depthwiseConv2dNative } from './DepthwiseConv2dNative';\nexport function fusedDepthwiseConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = depthwiseConv2dNative({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const oldResult = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n if (activation) {\n const oldResult = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n return result;\n}\nexport const fusedDepthwiseConv2DConfig = {\n kernelName: FusedDepthwiseConv2D,\n backendName: 'cpu',\n kernelFunc: fusedDepthwiseConv2D\n};\n//# sourceMappingURL=FusedDepthwiseConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, true, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'cpu',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isFinite = unaryKernelFunc(IsFinite, (xi) => Number.isFinite(xi) ? 1 : 0, 'bool');\nexport const isFiniteConfig = {\n kernelName: IsFinite,\n backendName: 'cpu',\n kernelFunc: isFinite,\n};\n//# sourceMappingURL=IsFinite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isInf = unaryKernelFunc(IsInf, (xi) => Math.abs(xi) === Infinity ? 1 : 0, 'bool');\nexport const isInfConfig = {\n kernelName: IsInf,\n backendName: 'cpu',\n kernelFunc: isInf,\n};\n//# sourceMappingURL=IsInf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isNaN = unaryKernelFunc(IsNan, (xi) => Number.isNaN(xi) ? 1 : 0, 'bool');\nexport const isNaNConfig = {\n kernelName: IsNan,\n backendName: 'cpu',\n kernelFunc: isNaN,\n};\n//# sourceMappingURL=IsNaN.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const log1p = unaryKernelFunc(Log1p, (xi) => Math.log1p(xi));\nexport const log1pConfig = {\n kernelName: Log1p,\n backendName: 'cpu',\n kernelFunc: log1p,\n};\n//# sourceMappingURL=Log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogicalNot } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const logicalNot = unaryKernelFunc(LogicalNot, (xi) => xi ? 0 : 1, 'bool');\nexport const logicalNotConfig = {\n kernelName: LogicalNot,\n backendName: 'cpu',\n kernelFunc: logicalNot,\n};\n//# sourceMappingURL=LogicalNot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const cpuBackend = backend;\n let xShape = x.shape;\n const xRank = xShape.length;\n const origAxes = util.parseAxisParam(reductionIndices, xShape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n let xVals = cpuBackend.data.get(x.dataId).values;\n if (permutedAxes != null) {\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xShape[permutedAxes[i]];\n }\n xVals = transposeImpl(xVals, xShape, x.dtype, permutedAxes, newShape);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n xShape = newShape;\n }\n assertNotComplex(x, 'max');\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(xShape, axes);\n const reduceSize = util.sizeFromShape(reduceShape);\n const result = maxImpl(xVals, reduceSize, maxOutShape, x.dtype);\n const dataId = cpuBackend.write(result, maxOutShape, x.dtype);\n let outShape = maxOutShape;\n if (keepDims) {\n // reshape\n const newShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n outShape = newShape;\n }\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'max');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'cpu',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, buffer, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolPositions } from '../utils/pool_utils';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const xValues = backend.data.get(x.dataId).values;\n const maxPosBuf = buffer(convInfo.outShape, x.dtype, maxPoolPositions(xValues, x.shape, x.dtype, convInfo).values);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const maxPos = effectiveFilterHeight * effectiveFilterWidth - 1 -\n maxPosBuf.get(b, dyR, dyC, d);\n const curPos = wR * effectiveFilterWidth + wC;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel * mask;\n }\n }\n dx.set(dotProd, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'cpu',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { maxPoolPositions, pool } from '../utils/pool_utils';\nexport function maxPoolWithArgmaxImpl(xValues, xShape, dtype, includeBatchInIndex, convInfo) {\n const strides = util.computeStrides(xShape);\n const maxPools = pool(xValues, xShape, dtype, strides, convInfo, 'max');\n const maxPositions = maxPoolPositions(xValues, xShape, dtype, convInfo, true, includeBatchInIndex);\n return [maxPools.values, maxPositions.values];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const cpuBackend = backend;\n assertNotComplex(x, 'MaxPoolWithArgmax');\n const values = cpuBackend.data.get(x.dataId).values;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, [1, 1], pad);\n const [pooled, indexes] = maxPoolWithArgmaxImpl(values, x.shape, x.dtype, includeBatchInIndex, convInfo);\n const pooledDataId = cpuBackend.write(pooled, convInfo.outShape, x.dtype);\n const indexesDataId = cpuBackend.write(indexes, convInfo.outShape, x.dtype);\n return [\n { dataId: pooledDataId, shape: convInfo.outShape, dtype: x.dtype },\n { dataId: indexesDataId, shape: convInfo.outShape, dtype: 'int32' }\n ];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function mirrorPad(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, mode } = attrs;\n assertNotComplex(x, 'mirrorPad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const end = paddings.map((p, i) => p[0] + x.shape[i]);\n const offset = mode === 'reflect' ? 0 : 1;\n const xVals = backend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n for (let i = 0; i < resultSize; i++) {\n let coords = util.indexToLoc(i, resultRank, resultStrides);\n for (let i = 0; i < resultRank; i++) {\n if (coords[i] < start[i]) {\n coords[i] = start[i] * 2 - coords[i] - offset;\n }\n else if (coords[i] >= end[i]) {\n coords[i] = (end[i] - 1) * 2 - coords[i] + offset;\n }\n }\n coords = coords.map((c, i) => c - start[i]);\n const inIndex = util.locToIndex(coords, xRank, xStrides);\n resVals[i] = xVals[inIndex];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'cpu',\n kernelFunc: mirrorPad\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionPadded');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionWithScore');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function padV2(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, constantValue } = attrs;\n assertNotComplex(x, 'pad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const xVals = backend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n if (constantValue !== 0) {\n resVals.fill(constantValue);\n }\n for (let i = 0; i < xSize; i++) {\n const coords = util.indexToLoc(i, xRank, xStrides);\n const outCoords = coords.map((c, i) => c + start[i]);\n const outIndex = util.locToIndex(outCoords, resultRank, resultStrides);\n resVals[outIndex] = xVals[i];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const padV2Config = {\n kernelName: PadV2,\n backendName: 'cpu',\n kernelFunc: padV2\n};\n//# sourceMappingURL=PadV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const reciprocal = unaryKernelFunc(Reciprocal, (xi) => 1 / xi);\nexport const reciprocalConfig = {\n kernelName: Reciprocal,\n backendName: 'cpu',\n kernelFunc: reciprocal,\n};\n//# sourceMappingURL=Reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, RotateWithOffset, util } from '@tensorflow/tfjs-core';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const fullOpacityValue = 255;\n const sinFactor = Math.sin(radians);\n const cosFactor = Math.cos(radians);\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const y = coords[1];\n // coordX/coordY are the result of rotating and translating x/y.\n let coordX = (x - centerX) * cosFactor - (y - centerY) * sinFactor;\n let coordY = (x - centerX) * sinFactor + (y - centerY) * cosFactor;\n coordX = Math.round(coordX + centerX);\n coordY = Math.round(coordY + centerY);\n let outputValue = fillValue;\n if (typeof fillValue !== 'number') {\n if (channel === 3) {\n outputValue = fullOpacityValue;\n }\n else {\n outputValue = fillValue[channel];\n }\n }\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth && coordY >= 0 &&\n coordY < imageHeight) {\n // set the output to the image value at the coordinate position.\n const rotatedRowOffset = coordY * (imageWidth * numChannels);\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rotatedRowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const round = unaryKernelFunc(Round, (xi) => {\n // The algorithm is based on banker's rounding.\n const base = Math.floor(xi);\n if (xi - base < 0.5) {\n return Math.floor(xi);\n }\n else if (xi - base > 0.5) {\n return Math.ceil(xi);\n }\n else {\n if (base % 2.0 === 0.0) {\n return base;\n }\n else {\n return base + 1.0;\n }\n }\n});\nexport const roundConfig = {\n kernelName: Round,\n backendName: 'cpu',\n kernelFunc: round,\n};\n//# sourceMappingURL=Round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Selu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst scaleAlpha = backend_util.SELU_SCALEALPHA;\nconst scale = backend_util.SELU_SCALE;\nexport const selu = unaryKernelFunc(Selu, (xi) => {\n if (xi >= 0) {\n return scale * xi;\n }\n else {\n return scaleAlpha * (Math.exp(xi) - 1);\n }\n});\nexport const seluConfig = {\n kernelName: Selu,\n backendName: 'cpu',\n kernelFunc: selu,\n};\n//# sourceMappingURL=Selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sigmoid = unaryKernelFunc(Sigmoid, (xi) => 1 / (1 + Math.exp(-xi)));\nexport const sigmoidConfig = {\n kernelName: Sigmoid,\n backendName: 'cpu',\n kernelFunc: sigmoid,\n};\n//# sourceMappingURL=Sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sign = unaryKernelFunc(Sign, (xi) => {\n if (xi < 0) {\n return -1;\n }\n else if (xi > 0) {\n return 1;\n }\n else {\n return 0;\n }\n});\nexport const signConfig = {\n kernelName: Sign,\n backendName: 'cpu',\n kernelFunc: sign,\n};\n//# sourceMappingURL=Sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sin = unaryKernelFunc(Sin, (xi) => Math.sin(xi));\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'cpu',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sinh = unaryKernelFunc(Sinh, (xi) => Math.sinh(xi));\nexport const sinhConfig = {\n kernelName: Sinh,\n backendName: 'cpu',\n kernelFunc: sinh,\n};\n//# sourceMappingURL=Sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\n// mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n// epsilon is the difference between 1.0 and the next representable float.\n// For a single precision 32 bit float this should be 2^-23, see:\n// https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\nconst epsilon = 1.1920928955078125e-7;\nconst threshold = Math.log(epsilon) + 2.0;\nexport const softplus = unaryKernelFunc(Softplus, (xi) => {\n // Value above which exp(x) may overflow, but softplus(x) == x\n // is within machine epsilon.\n const tooLarge = xi > -threshold;\n // Value below which exp(x) may underflow, but softplus(x) == exp(x)\n // is within machine epsilon.\n const tooSmall = xi < threshold;\n const expX = Math.exp(xi);\n let result;\n if (tooSmall) {\n result = expX;\n }\n else if (tooLarge) {\n result = xi;\n }\n else {\n result = Math.log(1.0 + expX);\n }\n return result;\n});\nexport const softplusConfig = {\n kernelName: Softplus,\n backendName: 'cpu',\n kernelFunc: softplus,\n};\n//# sourceMappingURL=Softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { transposeImpl } from './Transpose_impl';\nexport function transpose(args) {\n const { inputs, attrs, backend } = args;\n const { x } = inputs;\n const { perm } = attrs;\n assertNotComplex(x, 'transpose');\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n const values = backend.data.get(x.dataId).values;\n const result = transposeImpl(values, x.shape, x.dtype, perm, newShape);\n const dataId = backend.write(result, newShape, x.dtype);\n return { dataId, shape: newShape, dtype: x.dtype };\n}\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'cpu',\n kernelFunc: transpose\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, SpaceToBatchND, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { padV2Config } from './PadV2';\nimport { reshape } from './Reshape';\nimport { transpose } from './Transpose';\nexport function spaceToBatchND(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { blockShape, paddings } = attrs;\n assertNotComplex([x], 'spaceToBatchND');\n const prod = util.sizeFromShape(blockShape);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = padV2Config.kernelFunc({\n inputs: { x },\n backend,\n attrs: { paddings: completePaddings, constantValue: 0 }\n });\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const reshapeInputs = { x: paddedX };\n const reshapeAttrs = { shape: reshapedPaddedShape };\n const paddedXReshaped = reshape({ inputs: reshapeInputs, backend, attrs: reshapeAttrs });\n const transposeInputs = { x: paddedXReshaped };\n const transposeAttrs = { perm: permutedReshapedPaddedPermutation };\n const paddedXT = transpose({ inputs: transposeInputs, backend, attrs: transposeAttrs });\n const resultReshapeInputs = { x: paddedXT };\n const resultReshapeAttrs = { shape: flattenShape };\n const result = reshape({ inputs: resultReshapeInputs, backend, attrs: resultReshapeAttrs });\n backend.disposeIntermediateTensorInfo(paddedX);\n backend.disposeIntermediateTensorInfo(paddedXReshaped);\n backend.disposeIntermediateTensorInfo(paddedXT);\n return result;\n}\nexport const spaceToBatchNDConfig = {\n kernelName: SpaceToBatchND,\n backendName: 'cpu',\n kernelFunc: spaceToBatchND\n};\n//# sourceMappingURL=SpaceToBatchND.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sqrt = unaryKernelFunc(Sqrt, (xi) => Math.sqrt(xi));\nexport const sqrtConfig = {\n kernelName: Sqrt,\n backendName: 'cpu',\n kernelFunc: sqrt,\n};\n//# sourceMappingURL=Sqrt.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend }) => {\n const { x } = inputs;\n const cpuBackend = backend;\n assertNotComplex(x, 'square');\n const values = cpuBackend.data.get(x.dataId).values;\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = value * value;\n }\n const dataId = cpuBackend.write(newValues, x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const step = unaryKernelFunc(Step, (xi, attrs) => {\n const stepAttrs = attrs;\n if (isNaN(xi)) {\n return NaN;\n }\n else {\n return xi > 0 ? 1 : stepAttrs.alpha;\n }\n});\nexport const stepConfig = {\n kernelName: Step,\n backendName: 'cpu',\n kernelFunc: step,\n};\n//# sourceMappingURL=Step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tan = unaryKernelFunc(Tan, (xi) => Math.tan(xi));\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'cpu',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tanh = unaryKernelFunc(Tanh, (xi) => Math.tanh(xi));\nexport const tanhConfig = {\n kernelName: Tanh,\n backendName: 'cpu',\n kernelFunc: tanh,\n};\n//# sourceMappingURL=Tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { uniqueImpl } from './Unique_impl';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n const values = backend.data.get(x.dataId).values;\n const { outputValues, outputShape, indices } = uniqueImpl(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'cpu',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// We explicitly import the modular kernels so they get registered in the\n// global registry when we compile the library. A modular build would replace\n// the contents of this file and import only the kernels that are needed.\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { _fusedMatMulConfig } from './kernels/_FusedMatMul';\nimport { absConfig } from './kernels/Abs';\nimport { acosConfig } from './kernels/Acos';\nimport { acoshConfig } from './kernels/Acosh';\nimport { addConfig } from './kernels/Add';\nimport { asinConfig } from './kernels/Asin';\nimport { asinhConfig } from './kernels/Asinh';\nimport { atanConfig } from './kernels/Atan';\nimport { atanhConfig } from './kernels/Atanh';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchMatMulConfig } from './kernels/BatchMatMul';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { ceilConfig } from './kernels/Ceil';\nimport { clipConfig } from './kernels/Clip';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { conv2DConfig } from './kernels/Conv2D';\nimport { conv2DBackpropFilterConfig } from './kernels/Conv2DBackpropFilter';\nimport { conv2DBackpropInputConfig } from './kernels/Conv2DBackpropInput';\nimport { conv3DConfig } from './kernels/Conv3D';\nimport { conv3DBackpropFilterV2Config } from './kernels/Conv3DBackpropFilterV2';\nimport { conv3DBackpropInputV2Config } from './kernels/Conv3DBackpropInputV2';\nimport { cosConfig } from './kernels/Cos';\nimport { coshConfig } from './kernels/Cosh';\nimport { depthwiseConv2dNativeConfig } from './kernels/DepthwiseConv2dNative';\nimport { depthwiseConv2dNativeBackpropFilterConfig } from './kernels/DepthwiseConv2dNativeBackpropFilter';\nimport { depthwiseConv2dNativeBackpropInputConfig } from './kernels/DepthwiseConv2dNativeBackpropInput';\nimport { dilation2dConfig } from './kernels/Dilation2D';\nimport { dilation2dBackpropFilterConfig } from './kernels/Dilation2DBackpropFilter';\nimport { dilation2dBackpropInputConfig } from './kernels/Dilation2DBackpropInput';\nimport { divConfig } from './kernels/Div';\nimport { eluConfig } from './kernels/Elu';\nimport { erfConfig } from './kernels/Erf';\nimport { expConfig } from './kernels/Exp';\nimport { expm1Config } from './kernels/Expm1';\nimport { fftConfig } from './kernels/FFT';\nimport { fillConfig } from './kernels/Fill';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { floorConfig } from './kernels/Floor';\nimport { fusedConv2DConfig } from './kernels/FusedConv2D';\nimport { fusedDepthwiseConv2DConfig } from './kernels/FusedDepthwiseConv2D';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { isFiniteConfig } from './kernels/IsFinite';\nimport { isInfConfig } from './kernels/IsInf';\nimport { isNaNConfig } from './kernels/IsNaN';\nimport { logConfig } from './kernels/Log';\nimport { log1pConfig } from './kernels/Log1p';\nimport { logicalNotConfig } from './kernels/LogicalNot';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { padV2Config } from './kernels/PadV2';\nimport { preluConfig } from './kernels/Prelu';\nimport { realConfig } from './kernels/Real';\nimport { reciprocalConfig } from './kernels/Reciprocal';\nimport { reluConfig } from './kernels/Relu';\nimport { relu6Config } from './kernels/Relu6';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { roundConfig } from './kernels/Round';\nimport { rsqrtConfig } from './kernels/Rsqrt';\nimport { seluConfig } from './kernels/Selu';\nimport { sigmoidConfig } from './kernels/Sigmoid';\nimport { signConfig } from './kernels/Sign';\nimport { sinConfig } from './kernels/Sin';\nimport { sinhConfig } from './kernels/Sinh';\nimport { sliceConfig } from './kernels/Slice';\nimport { softplusConfig } from './kernels/Softplus';\nimport { spaceToBatchNDConfig } from './kernels/SpaceToBatchND';\nimport { sqrtConfig } from './kernels/Sqrt';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { stepConfig } from './kernels/Step';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { tanhConfig } from './kernels/Tanh';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n _fusedMatMulConfig,\n absConfig,\n acosConfig,\n acoshConfig,\n addConfig,\n asinConfig,\n asinhConfig,\n atanConfig,\n atanhConfig,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchMatMulConfig,\n batchNormConfig,\n castConfig,\n ceilConfig,\n clipConfig,\n complexConfig,\n concatConfig,\n conv2DBackpropFilterConfig,\n conv2DBackpropInputConfig,\n conv2DConfig,\n conv3DBackpropFilterV2Config,\n conv3DBackpropInputV2Config,\n conv3DConfig,\n cosConfig,\n coshConfig,\n depthwiseConv2dNativeConfig,\n depthwiseConv2dNativeBackpropFilterConfig,\n depthwiseConv2dNativeBackpropInputConfig,\n dilation2dConfig,\n dilation2dBackpropInputConfig,\n dilation2dBackpropFilterConfig,\n divConfig,\n eluConfig,\n erfConfig,\n expConfig,\n expm1Config,\n fftConfig,\n fillConfig,\n flipLeftRightConfig,\n floorConfig,\n fusedConv2DConfig,\n fusedDepthwiseConv2DConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n isFiniteConfig,\n isInfConfig,\n isNaNConfig,\n logConfig,\n log1pConfig,\n logicalNotConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n maxConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n padV2Config,\n preluConfig,\n realConfig,\n reciprocalConfig,\n reluConfig,\n relu6Config,\n reshapeConfig,\n rotateWithOffsetConfig,\n roundConfig,\n rsqrtConfig,\n seluConfig,\n sigmoidConfig,\n signConfig,\n sinConfig,\n sinhConfig,\n sliceConfig,\n softplusConfig,\n spaceToBatchNDConfig,\n sqrtConfig,\n squareConfig,\n squaredDifferenceConfig,\n stepConfig,\n subConfig,\n tanConfig,\n tanhConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst contexts = {};\nconst WEBGL_ATTRIBUTES = {\n alpha: false,\n antialias: false,\n premultipliedAlpha: false,\n preserveDrawingBuffer: false,\n depth: false,\n stencil: false,\n failIfMajorPerformanceCaveat: true\n};\nexport function clearWebGLContext(webGLVersion) {\n delete contexts[webGLVersion];\n}\nexport function setWebGLContext(webGLVersion, gl) {\n contexts[webGLVersion] = gl;\n}\nexport function getWebGLContext(webGLVersion) {\n if (!(webGLVersion in contexts)) {\n const newCtx = getWebGLRenderingContext(webGLVersion);\n if (newCtx !== null) {\n contexts[webGLVersion] = newCtx;\n }\n else {\n console.log('Could not get context for WebGL version', webGLVersion);\n return null;\n }\n }\n const gl = contexts[webGLVersion];\n if (gl.isContextLost()) {\n delete contexts[webGLVersion];\n return getWebGLContext(webGLVersion);\n }\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n return contexts[webGLVersion];\n}\nfunction createCanvas(webGLVersion) {\n if (typeof OffscreenCanvas !== 'undefined' && webGLVersion === 2) {\n return new OffscreenCanvas(300, 150);\n }\n else if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n }\n else {\n throw new Error('Cannot create a canvas in this context');\n }\n}\nfunction getWebGLRenderingContext(webGLVersion) {\n if (webGLVersion !== 1 && webGLVersion !== 2) {\n throw new Error('Cannot get WebGL rendering context, WebGL is disabled.');\n }\n const canvas = createCanvas(webGLVersion);\n canvas.addEventListener('webglcontextlost', (ev) => {\n ev.preventDefault();\n delete contexts[webGLVersion];\n }, false);\n if (webGLVersion === 1) {\n return (canvas.getContext('webgl', WEBGL_ATTRIBUTES) ||\n canvas.getContext('experimental-webgl', WEBGL_ATTRIBUTES));\n }\n return canvas.getContext('webgl2', WEBGL_ATTRIBUTES);\n}\n//# sourceMappingURL=canvas_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nexport var PackingScheme;\n(function (PackingScheme) {\n /**\n * All values in a single texel are densely packed without any constraints.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 4]\n * (indices are [batch, row, col]).\n *\n * 000|001 010|011 020|021\n * ------- ------- -------\n * 002|003 012|013 022|023\n *\n * 100|101 110|111 120|121\n * ------- ------- -------\n * 102|103 112|113 122|123\n *\n */\n PackingScheme[PackingScheme[\"DENSE\"] = 0] = \"DENSE\";\n /**\n * Single texels contain only values from the same batch, and from adjacent\n * rows and columns.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 5]\n * (indices are [batch, row, col]).\n *\n * 000|001 002|003 004|xxx 020|021 022|023 024|xxx\n * ------- ------- ------- ------- ------- -------\n * 010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n * 100|101 102|103 104|xxx 120|121 122|123 124|xxx\n * ------- ------- ------- ------- ------- -------\n * 110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n */\n PackingScheme[PackingScheme[\"SHARED_BATCH\"] = 1] = \"SHARED_BATCH\";\n})(PackingScheme || (PackingScheme = {}));\nexport var TextureUsage;\n(function (TextureUsage) {\n TextureUsage[TextureUsage[\"RENDER\"] = 0] = \"RENDER\";\n TextureUsage[TextureUsage[\"UPLOAD\"] = 1] = \"UPLOAD\";\n TextureUsage[TextureUsage[\"PIXELS\"] = 2] = \"PIXELS\";\n TextureUsage[TextureUsage[\"DOWNLOAD\"] = 3] = \"DOWNLOAD\";\n})(TextureUsage || (TextureUsage = {}));\nexport var PhysicalTextureType;\n(function (PhysicalTextureType) {\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT16\"] = 0] = \"UNPACKED_FLOAT16\";\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT32\"] = 1] = \"UNPACKED_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_4X1_UNSIGNED_BYTE\"] = 2] = \"PACKED_4X1_UNSIGNED_BYTE\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT32\"] = 3] = \"PACKED_2X2_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT16\"] = 4] = \"PACKED_2X2_FLOAT16\";\n})(PhysicalTextureType || (PhysicalTextureType = {}));\nexport function getUnpackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns, rows];\n}\nexport function getUnpackedArraySizeFromMatrixSize(matrixSize, channelsPerTexture) {\n return matrixSize * channelsPerTexture;\n}\nexport function getColorMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns * 4, rows];\n}\n/**\n * Get shape for densely packed RGBA texture.\n */\nexport function getDenseTexShape(shape) {\n const size = util.sizeFromShape(shape);\n const texelsNeeded = Math.ceil(size / 4);\n return util.sizeToSquarishShape(texelsNeeded);\n}\nexport function getMatrixSizeFromUnpackedArraySize(unpackedSize, channelsPerTexture) {\n if (unpackedSize % channelsPerTexture !== 0) {\n throw new Error(`unpackedSize (${unpackedSize}) must be a multiple of ` +\n `${channelsPerTexture}`);\n }\n return unpackedSize / channelsPerTexture;\n}\nexport function decodeMatrixFromUnpackedColorRGBAArray(unpackedArray, matrix, channels) {\n const requiredSize = unpackedArray.length * channels / 4;\n if (matrix.length < requiredSize) {\n throw new Error(`matrix length (${matrix.length}) must be >= ${requiredSize}`);\n }\n let dst = 0;\n for (let src = 0; src < unpackedArray.length; src += 4) {\n for (let c = 0; c < channels; c++) {\n matrix[dst++] = unpackedArray[src + c];\n }\n }\n}\nexport function getPackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [\n Math.max(1, Math.ceil(columns / 2)), Math.max(1, Math.ceil(rows / 2))\n ];\n}\nexport function getPackedRGBAArraySizeFromMatrixShape(rows, columns) {\n const [w, h] = getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return w * h * 4;\n}\nexport function getTextureConfig(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n let internalFormatFloat;\n let internalFormatHalfFloat;\n let internalFormatPackedHalfFloat;\n let internalFormatPackedFloat;\n let textureFormatFloat;\n let downloadTextureFormat;\n let downloadUnpackNumChannels;\n let defaultNumChannels;\n let textureTypeHalfFloat;\n let textureTypeFloat;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n internalFormatFloat = glany.R32F;\n internalFormatHalfFloat = glany.R16F;\n internalFormatPackedHalfFloat = glany.RGBA16F;\n internalFormatPackedFloat = glany.RGBA32F;\n textureFormatFloat = glany.RED;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 1;\n textureTypeHalfFloat = glany.HALF_FLOAT;\n textureTypeFloat = glany.FLOAT;\n }\n else {\n internalFormatFloat = gl.RGBA;\n internalFormatHalfFloat = gl.RGBA;\n internalFormatPackedHalfFloat = gl.RGBA;\n internalFormatPackedFloat = glany.RGBA;\n textureFormatFloat = gl.RGBA;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 4;\n textureTypeHalfFloat = textureHalfFloatExtension != null ?\n textureHalfFloatExtension.HALF_FLOAT_OES :\n null;\n textureTypeFloat = gl.FLOAT;\n }\n downloadTextureFormat = gl.RGBA;\n return {\n internalFormatFloat,\n internalFormatHalfFloat,\n internalFormatPackedHalfFloat,\n internalFormatPackedFloat,\n textureFormatFloat,\n downloadTextureFormat,\n downloadUnpackNumChannels,\n defaultNumChannels,\n textureTypeHalfFloat,\n textureTypeFloat\n };\n}\n//# sourceMappingURL=tex_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext } from './canvas_util';\nimport { getTextureConfig } from './tex_util';\nexport function callAndCheck(gl, func) {\n const returnValue = func();\n if (env().getBool('DEBUG')) {\n checkWebGLError(gl);\n }\n return returnValue;\n}\nfunction checkWebGLError(gl) {\n const error = gl.getError();\n if (error !== gl.NO_ERROR) {\n throw new Error('WebGL Error: ' + getWebGLErrorMessage(gl, error));\n }\n}\n// https://en.wikipedia.org/wiki/Half-precision_floating-point_format\nconst MIN_FLOAT16 = 5.96e-8;\nconst MAX_FLOAT16 = 65504;\nexport function canBeRepresented(num) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED') || num === 0 ||\n (MIN_FLOAT16 < Math.abs(num) && Math.abs(num) < MAX_FLOAT16)) {\n return true;\n }\n return false;\n}\nexport function getWebGLErrorMessage(gl, status) {\n switch (status) {\n case gl.NO_ERROR:\n return 'NO_ERROR';\n case gl.INVALID_ENUM:\n return 'INVALID_ENUM';\n case gl.INVALID_VALUE:\n return 'INVALID_VALUE';\n case gl.INVALID_OPERATION:\n return 'INVALID_OPERATION';\n case gl.INVALID_FRAMEBUFFER_OPERATION:\n return 'INVALID_FRAMEBUFFER_OPERATION';\n case gl.OUT_OF_MEMORY:\n return 'OUT_OF_MEMORY';\n case gl.CONTEXT_LOST_WEBGL:\n return 'CONTEXT_LOST_WEBGL';\n default:\n return `Unknown error code ${status}`;\n }\n}\nexport function getExtensionOrThrow(gl, extensionName) {\n return throwIfNull(gl, () => gl.getExtension(extensionName), 'Extension \"' + extensionName + '\" not supported on this browser.');\n}\nexport function createVertexShader(gl, vertexShaderSource) {\n const vertexShader = throwIfNull(gl, () => gl.createShader(gl.VERTEX_SHADER), 'Unable to create vertex WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(vertexShader, vertexShaderSource));\n callAndCheck(gl, () => gl.compileShader(vertexShader));\n if (gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS) === false) {\n console.log(gl.getShaderInfoLog(vertexShader));\n throw new Error('Failed to compile vertex shader.');\n }\n return vertexShader;\n}\nexport function createFragmentShader(gl, fragmentShaderSource) {\n const fragmentShader = throwIfNull(gl, () => gl.createShader(gl.FRAGMENT_SHADER), 'Unable to create fragment WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(fragmentShader, fragmentShaderSource));\n callAndCheck(gl, () => gl.compileShader(fragmentShader));\n if (gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS) === false) {\n logShaderSourceAndInfoLog(fragmentShaderSource, gl.getShaderInfoLog(fragmentShader));\n throw new Error('Failed to compile fragment shader.');\n }\n return fragmentShader;\n}\nconst lineNumberRegex = /ERROR: [0-9]+:([0-9]+):/g;\nfunction logShaderSourceAndInfoLog(shaderSource, shaderInfoLog) {\n const lineNumberRegexResult = lineNumberRegex.exec(shaderInfoLog);\n if (lineNumberRegexResult == null) {\n console.log(`Couldn't parse line number in error: ${shaderInfoLog}`);\n console.log(shaderSource);\n return;\n }\n const lineNumber = +lineNumberRegexResult[1];\n const shaderLines = shaderSource.split('\\n');\n const pad = shaderLines.length.toString().length + 2;\n const linesWithLineNumbers = shaderLines.map((line, lineNumber) => util.rightPad((lineNumber + 1).toString(), pad) + line);\n let maxLineLength = 0;\n for (let i = 0; i < linesWithLineNumbers.length; i++) {\n maxLineLength = Math.max(linesWithLineNumbers[i].length, maxLineLength);\n }\n const beforeErrorLines = linesWithLineNumbers.slice(0, lineNumber - 1);\n const errorLine = linesWithLineNumbers.slice(lineNumber - 1, lineNumber);\n const afterErrorLines = linesWithLineNumbers.slice(lineNumber);\n console.log(beforeErrorLines.join('\\n'));\n console.log(shaderInfoLog.split('\\n')[0]);\n console.log(`%c ${util.rightPad(errorLine[0], maxLineLength)}`, 'border:1px solid red; background-color:#e3d2d2; color:#a61717');\n console.log(afterErrorLines.join('\\n'));\n}\nexport function createProgram(gl) {\n return throwIfNull(gl, () => gl.createProgram(), 'Unable to create WebGLProgram.');\n}\nexport function linkProgram(gl, program) {\n callAndCheck(gl, () => gl.linkProgram(program));\n if (gl.getProgramParameter(program, gl.LINK_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Failed to link vertex and fragment shaders.');\n }\n}\nexport function validateProgram(gl, program) {\n callAndCheck(gl, () => gl.validateProgram(program));\n if (gl.getProgramParameter(program, gl.VALIDATE_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Shader program validation failed.');\n }\n}\nexport function createStaticVertexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function createStaticIndexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function getNumChannels() {\n if (env().getNumber('WEBGL_VERSION') === 2) {\n return 1;\n }\n return 4;\n}\nexport function createTexture(gl) {\n return throwIfNull(gl, () => gl.createTexture(), 'Unable to create WebGLTexture.');\n}\nexport function validateTextureSize(width, height) {\n const maxTextureSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if ((width <= 0) || (height <= 0)) {\n const requested = `[${width}x${height}]`;\n throw new Error('Requested texture size ' + requested + ' is invalid.');\n }\n if ((width > maxTextureSize) || (height > maxTextureSize)) {\n const requested = `[${width}x${height}]`;\n const max = `[${maxTextureSize}x${maxTextureSize}]`;\n throw new Error('Requested texture size ' + requested +\n ' greater than WebGL maximum on this browser / GPU ' + max + '.');\n }\n}\nexport function createFramebuffer(gl) {\n return throwIfNull(gl, () => gl.createFramebuffer(), 'Unable to create WebGLFramebuffer.');\n}\nexport function bindVertexBufferToProgramAttribute(gl, program, attribute, buffer, arrayEntriesPerItem, itemStrideInBytes, itemOffsetInBytes) {\n const loc = gl.getAttribLocation(program, attribute);\n if (loc === -1) {\n // The GPU compiler decided to strip out this attribute because it's unused,\n // thus no need to bind.\n return false;\n }\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.vertexAttribPointer(loc, arrayEntriesPerItem, gl.FLOAT, false, itemStrideInBytes, itemOffsetInBytes));\n callAndCheck(gl, () => gl.enableVertexAttribArray(loc));\n return true;\n}\nexport function bindTextureUnit(gl, texture, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n}\nexport function unbindTextureUnit(gl, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function getProgramUniformLocationOrThrow(gl, program, uniformName) {\n return throwIfNull(gl, () => gl.getUniformLocation(program, uniformName), 'uniform \"' + uniformName + '\" not present in program.');\n}\nexport function getProgramUniformLocation(gl, program, uniformName) {\n return gl.getUniformLocation(program, uniformName);\n}\nexport function bindTextureToProgramUniformSampler(gl, texture, uniformSamplerLocation, textureUnit) {\n callAndCheck(gl, () => bindTextureUnit(gl, texture, textureUnit));\n callAndCheck(gl, () => gl.uniform1i(uniformSamplerLocation, textureUnit));\n}\nexport function bindCanvasToFramebuffer(gl) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n callAndCheck(gl, () => gl.viewport(0, 0, gl.canvas.width, gl.canvas.height));\n callAndCheck(gl, () => gl.scissor(0, 0, gl.canvas.width, gl.canvas.height));\n}\nexport function bindColorTextureToFramebuffer(gl, texture, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0));\n}\nexport function unbindColorTextureFromFramebuffer(gl, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, null, 0));\n}\nexport function validateFramebuffer(gl) {\n const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);\n if (status !== gl.FRAMEBUFFER_COMPLETE) {\n throw new Error('Error binding framebuffer: ' + getFramebufferErrorMessage(gl, status));\n }\n}\nexport function getFramebufferErrorMessage(gl, status) {\n switch (status) {\n case gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:\n return 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS';\n case gl.FRAMEBUFFER_UNSUPPORTED:\n return 'FRAMEBUFFER_UNSUPPORTED';\n default:\n return `unknown error ${status}`;\n }\n}\nfunction throwIfNull(gl, returnTOrNull, failureMessage) {\n const tOrNull = callAndCheck(gl, () => returnTOrNull());\n if (tOrNull == null) {\n throw new Error(failureMessage);\n }\n return tOrNull;\n}\nfunction validateTextureUnit(gl, textureUnit) {\n const maxTextureUnit = gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS - 1;\n const glTextureUnit = textureUnit + gl.TEXTURE0;\n if (glTextureUnit < gl.TEXTURE0 || glTextureUnit > maxTextureUnit) {\n const textureUnitRange = `[gl.TEXTURE0, gl.TEXTURE${maxTextureUnit}]`;\n throw new Error(`textureUnit must be in ${textureUnitRange}.`);\n }\n}\nexport function getBatchDim(shape, dimsToSkip = 2) {\n return util.sizeFromShape(shape.slice(0, shape.length - dimsToSkip));\n}\nexport function getRowsCols(shape) {\n if (shape.length === 0) {\n throw Error('Cannot get rows and columns of an empty shape array.');\n }\n return [\n shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]\n ];\n}\nexport function getShapeAs3D(shape) {\n let shapeAs3D = [1, 1, 1];\n const isScalar = shape.length === 0 || (shape.length === 1 && shape[0] === 1);\n if (!isScalar) {\n shapeAs3D =\n [getBatchDim(shape), ...getRowsCols(shape)];\n }\n return shapeAs3D;\n}\nexport function getTextureShapeFromLogicalShape(logShape, isPacked = false) {\n let maxTexSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if (isPacked) {\n maxTexSize = maxTexSize * 2;\n // This logic ensures we accurately count the number of packed texels needed\n // to accommodate the tensor. We can only pack values in the same texel if\n // they are from adjacent pairs of rows/cols within the same batch. So if a\n // tensor has 3 rows, we pretend it has 4 rows in order to account for the\n // fact that the texels containing the third row are half empty.\n logShape = logShape.map((d, i) => i >= logShape.length - 2 ?\n util.nearestLargerEven(logShape[i]) :\n logShape[i]);\n // Packed texture height is at least 2 (the channel height of a single\n // texel).\n if (logShape.length === 1) {\n logShape = [2, logShape[0]];\n }\n }\n // If logical shape is 2, we don't squeeze, since we want to match physical.\n if (logShape.length !== 2) {\n const squeezeResult = util.squeezeShape(logShape);\n logShape = squeezeResult.newShape;\n }\n let size = util.sizeFromShape(logShape);\n if (logShape.length <= 1 && size <= maxTexSize) {\n return [1, size];\n }\n else if (logShape.length === 2 && logShape[0] <= maxTexSize &&\n logShape[1] <= maxTexSize) {\n return logShape;\n }\n else if (logShape.length === 3 && logShape[0] * logShape[1] <= maxTexSize &&\n logShape[2] <= maxTexSize) {\n return [logShape[0] * logShape[1], logShape[2]];\n }\n else if (logShape.length === 3 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2]];\n }\n else if (logShape.length === 4 &&\n logShape[0] * logShape[1] * logShape[2] <= maxTexSize &&\n logShape[3] <= maxTexSize) {\n return [logShape[0] * logShape[1] * logShape[2], logShape[3]];\n }\n else if (logShape.length === 4 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] * logShape[3] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2] * logShape[3]];\n }\n else {\n if (isPacked) {\n // For packed textures size equals the number of channels required to\n // accommodate the texture data. However in order to squarify such that\n // inner dimensions stay even, we rewrite size to equal the number of\n // texels. Then in the return statement we rehydrate the squarified\n // dimensions to channel units.\n const batchDim = getBatchDim(logShape);\n let rows = 2, cols = 2;\n if (logShape.length) {\n [rows, cols] = getRowsCols(logShape);\n }\n size = batchDim * (rows / 2) * (cols / 2);\n return util.sizeToSquarishShape(size).map(d => d * 2);\n }\n return util.sizeToSquarishShape(size);\n }\n}\nfunction isEven(n) {\n return n % 2 === 0;\n}\n/**\n * This determines whether reshaping a packed texture requires rearranging\n * the data within the texture, assuming 2x2 packing.\n */\nexport function isReshapeFree(shape1, shape2) {\n shape1 = shape1.slice(-2);\n shape2 = shape2.slice(-2);\n if (util.arraysEqual(shape1, shape2)) {\n return true;\n }\n if (!shape1.length || !shape2.length) { // One of the shapes is a scalar.\n return true;\n }\n if (shape1[0] === 0 || shape1[1] === 0 || shape2[0] === 0 ||\n shape2[1] === 0) {\n return true;\n }\n if (shape1.length !== shape2.length) { // One of the shapes is a vector.\n const shape1Cols = shape1.slice(-1)[0];\n const shape2Cols = shape2.slice(-1)[0];\n if (shape1Cols === shape2Cols) {\n return true;\n }\n if (isEven(shape1Cols) && isEven(shape2Cols) &&\n (shape1[0] === 1 || shape2[0] === 1)) {\n return true;\n }\n }\n return shape1[1] === shape2[1] && isEven(shape1[0]) && isEven(shape2[0]);\n}\n// We cache webgl params because the environment gets reset between\n// unit tests and we don't want to constantly query the WebGLContext for\n// MAX_TEXTURE_SIZE.\nlet MAX_TEXTURE_SIZE;\nlet MAX_TEXTURES_IN_SHADER;\nexport function getWebGLMaxTextureSize(webGLVersion) {\n if (MAX_TEXTURE_SIZE == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURE_SIZE = gl.getParameter(gl.MAX_TEXTURE_SIZE);\n }\n return MAX_TEXTURE_SIZE;\n}\nexport function resetMaxTextureSize() {\n MAX_TEXTURE_SIZE = null;\n}\nexport function resetMaxTexturesInShader() {\n MAX_TEXTURES_IN_SHADER = null;\n}\nexport function getMaxTexturesInShader(webGLVersion) {\n if (MAX_TEXTURES_IN_SHADER == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURES_IN_SHADER = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);\n }\n // We cap at 16 to avoid spurious runtime \"memory exhausted\" error.\n return Math.min(16, MAX_TEXTURES_IN_SHADER);\n}\nexport function getWebGLDisjointQueryTimerVersion(webGLVersion) {\n if (webGLVersion === 0) {\n return 0;\n }\n let queryTimerVersion;\n const gl = getWebGLContext(webGLVersion);\n if (hasExtension(gl, 'EXT_disjoint_timer_query_webgl2') &&\n webGLVersion === 2) {\n queryTimerVersion = 2;\n }\n else if (hasExtension(gl, 'EXT_disjoint_timer_query')) {\n queryTimerVersion = 1;\n }\n else {\n queryTimerVersion = 0;\n }\n return queryTimerVersion;\n}\nexport function hasExtension(gl, extensionName) {\n const ext = gl.getExtension(extensionName);\n return ext != null;\n}\nexport function isWebGLVersionEnabled(webGLVersion) {\n try {\n const gl = getWebGLContext(webGLVersion);\n if (gl != null) {\n return true;\n }\n }\n catch (e) {\n console.log('Error when getting WebGL context: ', e);\n return false;\n }\n return false;\n}\nexport function isCapableOfRenderingToFloatTexture(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n }\n else {\n if (!hasExtension(gl, 'EXT_color_buffer_float')) {\n return false;\n }\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\n/**\n * Check if we can download values from a float/half-float texture.\n *\n * Note that for performance reasons we use binding a texture to a framebuffer\n * as a proxy for ability to download float values later using readPixels. The\n * texture params of this texture will not match those in readPixels exactly\n * but if we are unable to bind some kind of float texture to the frameBuffer\n * then we definitely will not be able to read float values from it.\n */\nexport function isDownloadFloatTextureEnabled(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n if (!hasExtension(gl, 'WEBGL_color_buffer_float')) {\n return false;\n }\n }\n else {\n if (hasExtension(gl, 'EXT_color_buffer_float')) {\n return createFloatTextureAndBindToFramebuffer(gl);\n }\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (hasExtension(gl, COLOR_BUFFER_HALF_FLOAT)) {\n const textureHalfFloatExtension = gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n return createHalfFloatTextureAndBindToFramebuffer(gl, textureHalfFloatExtension);\n }\n return false;\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\nfunction createFloatTextureAndBindToFramebuffer(gl) {\n const texConfig = getTextureConfig(gl);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nfunction createHalfFloatTextureAndBindToFramebuffer(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n const texConfig = getTextureConfig(gl, textureHalfFloatExtension);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatHalfFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeHalfFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nexport function isWebGLFenceEnabled(webGLVersion) {\n if (webGLVersion !== 2) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n // tslint:disable-next-line:no-any\n const isEnabled = gl.fenceSync != null;\n return isEnabled;\n}\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors ` +\n 'in the WebGL backend.');\n }\n });\n}\n//# sourceMappingURL=webgl_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { device_util, env } from '@tensorflow/tfjs-core';\nimport { getMaxTexturesInShader, getWebGLDisjointQueryTimerVersion, getWebGLMaxTextureSize, isCapableOfRenderingToFloatTexture, isDownloadFloatTextureEnabled, isWebGLFenceEnabled, isWebGLVersionEnabled } from './webgl_util';\nconst ENV = env();\n/**\n * This file contains WebGL-specific flag registrations.\n */\n/**\n * True if WebGL is supported.\n */\nENV.registerFlag('HAS_WEBGL', () => ENV.getNumber('WEBGL_VERSION') > 0);\n/** 0: No WebGL, 1: WebGL 1.0, 2: WebGL 2.0. */\nENV.registerFlag('WEBGL_VERSION', () => {\n if (isWebGLVersionEnabled(2)) {\n return 2;\n }\n else if (isWebGLVersionEnabled(1)) {\n return 1;\n }\n return 0;\n});\n/** Whether to check for numerical representation problems. */\nENV.registerFlag('WEBGL_CHECK_NUMERICAL_PROBLEMS', () => false);\nENV.registerFlag('WEBGL_BUFFER_SUPPORTED', () => ENV.get('WEBGL_VERSION') === 2);\n/** Whether the WebGL backend will sometimes forward ops to the CPU. */\nENV.registerFlag('WEBGL_CPU_FORWARD', () => true);\n/** Whether the WebGL backend will always use f16 textures for rendering. */\nENV.registerFlag('WEBGL_FORCE_F16_TEXTURES', () => false);\n/** Whether to turn all packing related flags on. */\nENV.registerFlag('WEBGL_PACK', () => ENV.getBool('HAS_WEBGL'));\n/** Whether we will pack the batchnormalization op. */\nENV.registerFlag('WEBGL_PACK_NORMALIZATION', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the clip op. */\nENV.registerFlag('WEBGL_PACK_CLIP', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the depthwise conv op. */\n// TODO: https://github.com/tensorflow/tfjs/issues/1679\nENV.registerFlag('WEBGL_PACK_DEPTHWISECONV', () => false);\n/** Whether we will pack binary ops. */\nENV.registerFlag('WEBGL_PACK_BINARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack unary ops. */\nENV.registerFlag('WEBGL_PACK_UNARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack array ops. */\nENV.registerFlag('WEBGL_PACK_ARRAY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack image ops. */\nENV.registerFlag('WEBGL_PACK_IMAGE_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack reduce ops. */\nENV.registerFlag('WEBGL_PACK_REDUCE', () => ENV.getBool('WEBGL_PACK'));\n/** Whether packed WebGL kernels lazily unpack their outputs. */\nENV.registerFlag('WEBGL_LAZILY_UNPACK', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will use the im2col algorithm to speed up convolutions. */\nENV.registerFlag('WEBGL_CONV_IM2COL', () => ENV.getBool('WEBGL_PACK'));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURE_SIZE', () => getWebGLMaxTextureSize(ENV.getNumber('WEBGL_VERSION')));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURES_IN_SHADER', () => getMaxTexturesInShader(ENV.getNumber('WEBGL_VERSION')));\n/**\n * The disjoint_query_timer extension version.\n * 0: disabled, 1: EXT_disjoint_timer_query, 2:\n * EXT_disjoint_timer_query_webgl2.\n * In Firefox with WebGL 2.0,\n * EXT_disjoint_timer_query_webgl2 is not available, so we must use the\n * WebGL 1.0 extension.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION', () => {\n const webGLVersion = ENV.getNumber('WEBGL_VERSION');\n if (webGLVersion === 0) {\n return 0;\n }\n return getWebGLDisjointQueryTimerVersion(webGLVersion);\n});\n/**\n * Whether the timer object from the disjoint_query_timer extension gives\n * timing information that is reliable.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE', () => ENV.getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0 &&\n !device_util.isMobile());\n/**\n * Whether the device is physically capable of rendering to float32 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_CAPABLE', () => isCapableOfRenderingToFloatTexture(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Whether rendering to float32 textures is enabled. If disabled, renders to\n * float16 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_ENABLED', () => {\n return ENV.getBool('WEBGL_FORCE_F16_TEXTURES') ?\n false :\n ENV.getBool('WEBGL_RENDER_FLOAT32_CAPABLE');\n});\n/**\n * Whether downloading float textures is enabled (16 or 32 bit). If disabled,\n * uses IEEE 754 encoding of the float32 values to 4 uint8 when downloading.\n */\nENV.registerFlag('WEBGL_DOWNLOAD_FLOAT_ENABLED', () => isDownloadFloatTextureEnabled(ENV.getNumber('WEBGL_VERSION')));\n/** Whether the fence API is available. */\nENV.registerFlag('WEBGL_FENCE_API_ENABLED', () => isWebGLFenceEnabled(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Tensors with size <= than this will be uploaded as uniforms, not textures.\n */\nENV.registerFlag('WEBGL_SIZE_UPLOAD_UNIFORM', () => {\n // Use uniform uploads only when 32bit floats are supported. In\n // 16bit\n // environments there are problems with comparing a 16bit texture value\n // with a 32bit uniform value.\n const useUniforms = ENV.getBool('WEBGL_RENDER_FLOAT32_ENABLED');\n return useUniforms ? 4 : 0;\n});\n/**\n * If the total number of bytes allocated on the GPU is greater than this\n * number, we will aggressively delete textures upon disposal with\n * gl.deleteMatrixTexture, rather than making them available for reuse.\n *\n * Default value -1 indicates that we will never aggressively delete textures.\n */\nENV.registerFlag('WEBGL_DELETE_TEXTURE_THRESHOLD', () => {\n return -1;\n}, threshold => {\n if (threshold < 0 && threshold !== -1) {\n throw new Error(`WEBGL_DELETE_TEXTURE_THRESHOLD must be -1 (indicating never ` +\n `delete) or at least 0, but got ${threshold}.`);\n }\n});\n//# sourceMappingURL=flags_webgl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import shared functionality from tfjs-backend-cpu without triggering\n// side effects.\n// tslint:disable-next-line: no-imports-from-dist\nimport * as shared from '@tensorflow/tfjs-backend-cpu/dist/shared';\nconst { simpleAbsImpl: simpleAbsImplCPU, addImpl: addImplCPU, ceilImpl: ceilImplCPU, expImpl: expImplCPU, expm1Impl: expm1ImplCPU, floorImpl: floorImplCPU, logImpl: logImplCPU, maxImpl: maxImplCPU, multiplyImpl: multiplyImplCPU, rsqrtImpl: rsqrtImplCPU, sliceImpl: sliceImplCPU, subImpl: subImplCPU, transposeImpl: transposeImplCPU, uniqueImpl: uniqueImplCPU, } = shared;\nexport { simpleAbsImplCPU, addImplCPU, ceilImplCPU, expImplCPU, expm1ImplCPU, logImplCPU, multiplyImplCPU, sliceImplCPU, subImplCPU, floorImplCPU, maxImplCPU, rsqrtImplCPU, transposeImplCPU, uniqueImplCPU, };\n//# sourceMappingURL=shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`float v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n float result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNPackedProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`vec4 v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n vec4 result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ArgMinMaxProgram {\n constructor(reduceInfo, op, firstPass) {\n this.variableNames = ['A'];\n const { windowSize, batchSize, outSize } = reduceInfo;\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n this.outputShape = [batchSize, outSize];\n const compOp = (op === 'max') ? '>' : '<';\n const indexSnippet = firstPass ?\n 'inOffset + i;' :\n 'round(getBestIndicesA(batch, inOffset + i));';\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n int bestIndex = inOffset;\n float bestValue = getA(batch, bestIndex);\n\n for (int i = 0; i < ${windowSize}; i++) {\n int inIdx = ${indexSnippet};\n float candidate = getA(batch, inIdx);\n if (candidate ${compOp} bestValue) {\n bestValue = candidate;\n bestIndex = inIdx;\n }\n }\n setOutput(float(bestIndex));\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport function getVecChannels(name, rank) {\n return ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank).map(d => `${name}.${d}`);\n}\nexport function getChannels(name, rank) {\n if (rank === 1) {\n return [name];\n }\n return getVecChannels(name, rank);\n}\nexport function getSourceCoords(rank, dims) {\n if (rank === 1) {\n return 'rc';\n }\n let coords = '';\n for (let i = 0; i < rank; i++) {\n coords += dims[i];\n if (i < rank - 1) {\n coords += ',';\n }\n }\n return coords;\n}\n//# sourceMappingURL=packing_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nexport function getGlslDifferences() {\n let version;\n let attribute;\n let varyingVs;\n let varyingFs;\n let texture2D;\n let output;\n let defineOutput;\n let defineSpecialNaN;\n let defineSpecialInf;\n let defineRound;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n version = '#version 300 es';\n attribute = 'in';\n varyingVs = 'out';\n varyingFs = 'in';\n texture2D = 'texture';\n output = 'outputColor';\n defineOutput = 'out vec4 outputColor;';\n // Use custom isnan definition to work across differences between\n // implementations on various platforms. While this should happen in ANGLE\n // we still see differences between android and windows (on chrome) when\n // using isnan directly.\n defineSpecialNaN = `\n bool isnan_custom(float val) {\n return (val > 0.0 || val < 0.0) ? false : val != 0.0;\n }\n\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan_custom(val.x),\n isnan_custom(val.y), isnan_custom(val.z), isnan_custom(val.w));\n }\n\n #define isnan(value) isnan_custom(value)\n `;\n // In webgl 2 we do not need to specify a custom isinf so there is no\n // need for a special INFINITY constant.\n defineSpecialInf = ``;\n defineRound = `\n #define round(value) newRound(value)\n int newRound(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 newRound(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n else {\n version = '';\n attribute = 'attribute';\n varyingVs = 'varying';\n varyingFs = 'varying';\n texture2D = 'texture2D';\n output = 'gl_FragColor';\n defineOutput = '';\n // WebGL1 has no built in isnan so we define one here.\n defineSpecialNaN = `\n #define isnan(value) isnan_custom(value)\n bool isnan_custom(float val) {\n return (val > 0. || val < 1. || val == 0.) ? false : true;\n }\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan(val.x), isnan(val.y), isnan(val.z), isnan(val.w));\n }\n `;\n defineSpecialInf = `\n uniform float INFINITY;\n\n bool isinf(float val) {\n return abs(val) == INFINITY;\n }\n bvec4 isinf(vec4 val) {\n return equal(abs(val), vec4(INFINITY));\n }\n `;\n defineRound = `\n int round(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 round(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n return {\n version,\n attribute,\n varyingVs,\n varyingFs,\n texture2D,\n output,\n defineOutput,\n defineSpecialNaN,\n defineSpecialInf,\n defineRound\n };\n}\n//# sourceMappingURL=glsl_version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Produces GLSL code that derives logical coordinates from a flat\n * index. The code performs integer division with each stride and decrements\n * the index until the index equals the final dimension coordinate.\n */\nexport function getLogicalCoordinatesFromFlatIndex(coords, shape, index = 'index') {\n const strides = util.computeStrides(shape);\n return strides\n .map((stride, i) => {\n const line1 = `int ${coords[i]} = ${index} / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coords[i + 1]} = ${index} - ${coords[i]} * ${stride}` :\n `index -= ${coords[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n}\nfunction buildVec(x) {\n if (x.length === 1) {\n return `${x[0]}`;\n }\n return `vec${x.length}(${x.join(',')})`;\n}\n/**\n * Produces GLSL code that computes the dot product of the input x and y\n * vectors. Handles splitting inputs into increments of vec4s when necessary.\n */\nexport function dotify(x, y) {\n if (x.length !== y.length) {\n throw new Error(`Vectors to be dotted must be of the same length -` +\n `got ${x.length} and ${y.length}`);\n }\n const slices = [];\n const nearestVec4 = Math.floor(x.length / 4);\n const nearestVec4Remainder = x.length % 4;\n for (let i = 0; i < nearestVec4; i++) {\n const xSlice = x.slice(i * 4, i * 4 + 4);\n const ySlice = y.slice(i * 4, i * 4 + 4);\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n if (nearestVec4Remainder !== 0) {\n let xSlice = x.slice(nearestVec4 * 4);\n let ySlice = y.slice(nearestVec4 * 4);\n if (xSlice.length === 1) {\n xSlice = xSlice.map(d => `float(${d})`);\n ySlice = ySlice.map(d => `float(${d})`);\n }\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n return slices.map((d, i) => `dot(${d})`).join('+');\n}\n/**\n * Produces GLSL that computes the flat index from 3D coordinates.\n */\nexport function getFlatIndexFrom3D(shape) {\n const strides = util.computeStrides(shape).map(d => d.toString());\n return `\n int getFlatIndex(ivec3 coords) {\n return coords.x * ${strides[0]} + coords.y * ${strides[1]} + coords.z;\n }\n`;\n}\nexport const ENCODE_FLOAT_SNIPPET = `\n const float FLOAT_MAX = 1.70141184e38;\n const float FLOAT_MIN = 1.17549435e-38;\n\n lowp vec4 encode_float(highp float v) {\n if (isnan(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n }\n`;\n//# sourceMappingURL=shader_compiler_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nconst { getBroadcastDims } = backend_util;\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport function makeShader(inputsInfo, outputShape, userCode, usesPackedTextures) {\n const prefixSnippets = [];\n inputsInfo.forEach(x => {\n const size = util.sizeFromShape(x.shapeInfo.logicalShape);\n // Snippet when we decided to upload the values as uniform.\n if (x.shapeInfo.isUniform) {\n prefixSnippets.push(`uniform float ${x.name}${size > 1 ? `[${size}]` : ''};`);\n }\n else {\n prefixSnippets.push(`uniform sampler2D ${x.name};`);\n prefixSnippets.push(`uniform int offset${x.name};`);\n }\n });\n const inputPrefixSnippet = prefixSnippets.join('\\n');\n const inputSamplingSnippet = inputsInfo\n .map(x => getInputSamplingSnippet(x, outputShape, usesPackedTextures))\n .join('\\n');\n const outTexShape = outputShape.texShape;\n const glsl = getGlslDifferences();\n const floatTextureSampleSnippet = getFloatTextureSampleSnippet(glsl);\n let outputSamplingSnippet;\n let floatTextureSetOutputSnippet;\n let shaderPrefix = getShaderPrefix(glsl);\n if (outputShape.isPacked) {\n outputSamplingSnippet =\n getPackedOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRGBASnippet(glsl);\n }\n else {\n outputSamplingSnippet =\n getOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRSnippet(glsl);\n }\n if (usesPackedTextures) {\n shaderPrefix += SHADER_PACKED_PREFIX;\n }\n const source = [\n shaderPrefix, floatTextureSampleSnippet, floatTextureSetOutputSnippet,\n inputPrefixSnippet, outputSamplingSnippet, inputSamplingSnippet, userCode\n ].join('\\n');\n return source;\n}\nfunction getSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getSamplerScalar(inInfo);\n case 1:\n return getSampler1D(inInfo);\n case 2:\n return getSampler2D(inInfo);\n case 3:\n return getSampler3D(inInfo);\n case 4:\n return getSampler4D(inInfo);\n case 5:\n return getSampler5D(inInfo);\n case 6:\n return getSampler6D(inInfo);\n default:\n throw new Error(`${shape.length}-D input sampling` +\n ` is not yet supported`);\n }\n}\nfunction getPackedSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getPackedSamplerScalar(inInfo);\n case 1:\n return getPackedSampler1D(inInfo);\n case 2:\n return getPackedSampler2D(inInfo);\n case 3:\n return getPackedSampler3D(inInfo);\n default:\n return getPackedSamplerND(inInfo);\n }\n}\nfunction getInputSamplingSnippet(inInfo, outShapeInfo, usesPackedTextures = false) {\n let res = '';\n if (usesPackedTextures) {\n res += getPackedSamplerFromInInfo(inInfo);\n }\n else {\n res += getSamplerFromInInfo(inInfo);\n }\n const inShape = inInfo.shapeInfo.logicalShape;\n const outShape = outShapeInfo.logicalShape;\n if (inShape.length <= outShape.length) {\n if (usesPackedTextures) {\n res += getPackedSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n else {\n res += getSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n }\n return res;\n}\nfunction getPackedOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutputPacked1DCoords(outShape, outTexShape);\n case 2:\n return getOutputPacked2DCoords(outShape, outTexShape);\n case 3:\n return getOutputPacked3DCoords(outShape, outTexShape);\n default:\n return getOutputPackedNDCoords(outShape, outTexShape);\n }\n}\nfunction getOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutput1DCoords(outShape, outTexShape);\n case 2:\n return getOutput2DCoords(outShape, outTexShape);\n case 3:\n return getOutput3DCoords(outShape, outTexShape);\n case 4:\n return getOutput4DCoords(outShape, outTexShape);\n case 5:\n return getOutput5DCoords(outShape, outTexShape);\n case 6:\n return getOutput6DCoords(outShape, outTexShape);\n default:\n throw new Error(`${outShape.length}-D output sampling is not yet supported`);\n }\n}\nfunction getFloatTextureSampleSnippet(glsl) {\n return `\n float sampleTexture(sampler2D textureSampler, vec2 uv) {\n return ${glsl.texture2D}(textureSampler, uv).r;\n }\n `;\n}\nfunction getFloatTextureSetRSnippet(glsl) {\n return `\n void setOutput(float val) {\n ${glsl.output} = vec4(val, 0, 0, 0);\n }\n `;\n}\nfunction getFloatTextureSetRGBASnippet(glsl) {\n return `\n void setOutput(vec4 val) {\n ${glsl.output} = val;\n }\n `;\n}\nfunction getShaderPrefix(glsl) {\n const SHADER_PREFIX = `${glsl.version}\n precision highp float;\n precision highp int;\n precision highp sampler2D;\n ${glsl.varyingFs} vec2 resultUV;\n ${glsl.defineOutput}\n const vec2 halfCR = vec2(0.5, 0.5);\n\n struct ivec5\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n };\n\n struct ivec6\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n };\n\n uniform float NAN;\n ${glsl.defineSpecialNaN}\n ${glsl.defineSpecialInf}\n ${glsl.defineRound}\n\n int imod(int x, int y) {\n return x - y * (x / y);\n }\n\n int idiv(int a, int b, float sign) {\n int res = a / b;\n int mod = imod(a, b);\n if (sign < 0. && mod != 0) {\n res -= 1;\n }\n return res;\n }\n\n //Based on the work of Dave Hoskins\n //https://www.shadertoy.com/view/4djSRW\n #define HASHSCALE1 443.8975\n float random(float seed){\n vec2 p = resultUV * seed;\n vec3 p3 = fract(vec3(p.xyx) * HASHSCALE1);\n p3 += dot(p3, p3.yzx + 19.19);\n return fract((p3.x + p3.y) * p3.z);\n }\n\n ${SAMPLE_1D_SNIPPET}\n ${SAMPLE_2D_SNIPPET}\n ${SAMPLE_3D_SNIPPET}\n `;\n return SHADER_PREFIX;\n}\nconst SAMPLE_1D_SNIPPET = `\nvec2 uvFromFlat(int texNumR, int texNumC, int index) {\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom1D(int texNumR, int texNumC, int index) {\n int texelIndex = index / 2;\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_2D_SNIPPET = `\nvec2 packedUVfrom2D(int texelsInLogicalRow, int texNumR,\n int texNumC, int row, int col) {\n int texelIndex = (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_3D_SNIPPET = `\nvec2 packedUVfrom3D(int texNumR, int texNumC,\n int texelsInBatch, int texelsInLogicalRow, int b,\n int row, int col) {\n int index = b * texelsInBatch + (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SHADER_PACKED_PREFIX = `\n float getChannel(vec4 frag, vec2 innerDims) {\n vec2 modCoord = mod(innerDims, 2.);\n return modCoord.x == 0. ?\n (modCoord.y == 0. ? frag.r : frag.g) :\n (modCoord.y == 0. ? frag.b : frag.a);\n }\n float getChannel(vec4 frag, int dim) {\n float modCoord = mod(float(dim), 2.);\n return modCoord == 0. ? frag.r : frag.g;\n }\n`;\nfunction getOutputScalarCoords() {\n return `\n int getOutputCoords() {\n return 0;\n }\n `;\n}\nfunction getOutputPacked1DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (packedTexShape[0] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.x * ${packedTexShape[1]}.0);\n }\n `;\n }\n if (packedTexShape[1] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.y * ${packedTexShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n return 2 * (resTexRC.x * ${packedTexShape[1]} + resTexRC.y);\n }\n `;\n}\nfunction getOutput1DCoords(shape, texShape) {\n if (texShape[0] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.x * ${texShape[1]}.0);\n }\n `;\n }\n if (texShape[1] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.y * ${texShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n return resTexRC.x * ${texShape[1]} + resTexRC.y;\n }\n `;\n}\nfunction getOutputPacked3DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[1] / 2);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec3(b, r, c);\n }\n `;\n}\nfunction getOutput3DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\nfunction getOutputPackedNDCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[shape.length - 1] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[shape.length - 2] / 2);\n let texelsInBatchN = texelsInBatch;\n let batches = ``;\n let coords = 'b, r, c';\n for (let b = 2; b < shape.length - 1; b++) {\n texelsInBatchN *= shape[shape.length - b - 1];\n batches = `\n int b${b} = index / ${texelsInBatchN};\n index -= b${b} * ${texelsInBatchN};\n ` + batches;\n coords = `b${b}, ` + coords;\n }\n return `\n ivec${shape.length} getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n ${batches}\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec${shape.length}(${coords});\n }\n `;\n}\nfunction getOutput4DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2'], shape);\n return `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec4(r, c, d, d2);\n }\n `;\n}\nfunction getOutput5DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3'], shape);\n return `\n ivec5 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx * vec2(${texShape[0]},\n ${texShape[1]}));\n\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec5 outShape = ivec5(r, c, d, d2, d3);\n return outShape;\n }\n `;\n}\nfunction getOutput6DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3', 'd4'], shape);\n return `\n ivec6 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec6 result = ivec6(r, c, d, d2, d3, d4);\n return result;\n }\n `;\n}\nfunction getOutputPacked2DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return 2 * ivec2(resultUV.yx * vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n }\n `;\n }\n // texels needed to accommodate a logical row\n const texelsInLogicalRow = Math.ceil(shape[1] / 2);\n /**\n * getOutputCoords\n *\n * resTexRC: The rows and columns of the texels. If you move over one\n * texel to the right in the packed texture, you are moving over one column\n * (not two).\n *\n * index: The texel index\n */\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec2(r, c);\n }\n `;\n}\nfunction getOutput2DCoords(shape, texShape) {\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return ivec2(resultUV.yx * vec2(${texShape[0]}, ${texShape[1]}));\n }\n `;\n }\n if (shape[1] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(index, 0);\n }\n `;\n }\n if (shape[0] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(0, index);\n }\n `;\n }\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n int r = index / ${shape[1]};\n int c = index - r * ${shape[1]};\n return ivec2(r, c);\n }\n `;\n}\nfunction getFlatOffsetUniformName(texName) {\n return `offset${texName}`;\n}\nfunction getPackedSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}() {\n return ${glsl.texture2D}(${texName}, halfCR);\n }\n `;\n}\nfunction getSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n return `float ${funcName}() {return ${texName};}`;\n }\n const [texNumR, texNumC] = inputInfo.shapeInfo.texShape;\n if (texNumR === 1 && texNumC === 1) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const [tNumR, tNumC] = inputInfo.shapeInfo.texShape;\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}() {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int index) {\n vec2 uv = packedUVfrom1D(\n ${packedTexShape[0]}, ${packedTexShape[1]}, index);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int index) {\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const tNumR = texShape[0];\n const tNumC = texShape[1];\n if (tNumC === 1 && tNumR === 1) {\n return `\n float ${funcName}(int index) {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n if (tNumC === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2(0.5, (float(index + ${offset}) + 0.5) / ${tNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (tNumR === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2((float(index + ${offset}) + 0.5) / ${tNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int index) {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const glsl = getGlslDifferences();\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n }\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const valuesPerRow = Math.ceil(shape[1] / 2);\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = packedUVfrom2D(${valuesPerRow}, ${packedTexShape[0]}, ${packedTexShape[1]}, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n return `\n float ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n int index = round(dot(vec2(row, col), vec2(${shape[1]}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const offset = getFlatOffsetUniformName(texName);\n if (texNumC === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2(0.5, (index + 0.5) / ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumR === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2((index + 0.5) / ${texNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int row, int col) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${shape[1]} + col + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n`;\n}\nfunction getPackedSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (shape[0] === 1) {\n const squeezedShape = shape.slice(1);\n const keptDims = [1, 2];\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['b', 'row', 'col'];\n return `\n ${getPackedSamplerFromInInfo(newInputInfo)}\n vec4 ${funcName}(int b, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[1] / 2);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int b, int row, int col) {\n vec2 uv = packedUVfrom3D(\n ${texNumR}, ${texNumC}, ${texelsInBatch}, ${valuesPerRow}, b, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride0 = shape[1] * shape[2];\n const stride1 = shape[2];\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col', 'depth'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n int index = round(dot(vec3(row, col, depth),\n vec3(${stride0}, ${stride1}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = float(row);\n float texC = dot(vec2(col, depth), vec2(${stride1}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride1 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = dot(vec2(row, col), vec2(${shape[1]}, 1));\n float texC = float(depth);\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSamplerND(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const rank = shape.length;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[rank - 1] / 2);\n let texelsInBatch = valuesPerRow * Math.ceil(shape[rank - 2] / 2);\n let params = `int b, int row, int col`;\n let index = `b * ${texelsInBatch} + (row / 2) * ${valuesPerRow} + (col / 2)`;\n for (let b = 2; b < rank - 1; b++) {\n params = `int b${b}, ` + params;\n texelsInBatch *= shape[rank - b - 1];\n index = `b${b} * ${texelsInBatch} + ` + index;\n }\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(${params}) {\n int index = ${index};\n int texR = index / ${texNumC};\n int texC = index - texR * ${texNumC};\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}, ${texNumR});\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler4D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride2 = shape[3];\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n int index = round(dot(vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = float(row);\n float texC =\n dot(vec3(col, depth, depth2),\n vec3(${stride1}, ${stride2}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride2 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = dot(vec3(row, col, depth),\n vec3(${shape[1] * shape[2]}, ${shape[2]}, 1));\n float texC = float(depth2);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} +\n depth * ${stride2} + depth2;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler5D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride3 = shape[4];\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float index = dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n depth3;\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride3 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float texR = dot(\n vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3]},\n ${shape[2] * shape[3]}, ${shape[3]}, 1));\n int texC = depth3;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler6D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3', 'depth4'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const stride4 = shape[5];\n const stride3 = shape[4] * stride4;\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int index = round(dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n dot(\n vec2(depth3, depth4),\n vec2(${stride4}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, ${stride4})) +\n float(depth4);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride4 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n float texR = dot(vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3] * shape[4]},\n ${shape[2] * shape[3] * shape[4]},\n ${shape[3] * shape[4]},\n ${shape[4]})) + float(depth3);\n int texC = depth4;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 * ${stride4} + depth4 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getUniformSampler(inputInfo) {\n const texName = inputInfo.name;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n if (inSize < 2) {\n return `return ${texName};`;\n }\n return `\n for (int i = 0; i < ${inSize}; i++) {\n if (i == index) {\n return ${texName}[i];\n }\n }\n `;\n}\nfunction getPackedSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const type = getCoordsDataType(outRank);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n let output = `return outputValue;`;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n const isInputScalar = inSize === 1;\n const outSize = util.sizeFromShape(outShapeInfo.logicalShape);\n const isOutputScalar = outSize === 1;\n if (inRank === 1 && !isInputScalar && !isOutputScalar) {\n output = `\n return vec4(outputValue.xy, outputValue.xy);\n `;\n }\n else if (isInputScalar && !isOutputScalar) {\n if (outRank === 1) {\n output = `\n return vec4(outputValue.x, outputValue.x, 0., 0.);\n `;\n }\n else {\n output = `\n return vec4(outputValue.x);\n `;\n }\n }\n else if (broadcastDims.length) {\n const rows = inRank - 2;\n const cols = inRank - 1;\n if (broadcastDims.indexOf(rows) > -1 && broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.x);`;\n }\n else if (broadcastDims.indexOf(rows) > -1) {\n output = `return vec4(outputValue.x, outputValue.y, ` +\n `outputValue.x, outputValue.y);`;\n }\n else if (broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.xx, outputValue.zz);`;\n }\n }\n return `\n vec4 ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n vec4 outputValue = get${texFuncSnippet}(${unpackedCoordsSnippet});\n ${output}\n }\n `;\n}\nfunction getSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const outTexShape = outShapeInfo.texShape;\n const inTexShape = inputInfo.shapeInfo.texShape;\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n if (!inputInfo.shapeInfo.isUniform && inRank === outRank &&\n inputInfo.shapeInfo.flatOffset == null &&\n util.arraysEqual(inTexShape, outTexShape)) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, resultUV);\n }\n `;\n }\n const type = getCoordsDataType(outRank);\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n return `\n float ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n return get${texFuncSnippet}(${unpackedCoordsSnippet});\n }\n `;\n}\nexport function getCoordsDataType(rank) {\n if (rank <= 1) {\n return 'int';\n }\n else if (rank === 2) {\n return 'ivec2';\n }\n else if (rank === 3) {\n return 'ivec3';\n }\n else if (rank === 4) {\n return 'ivec4';\n }\n else if (rank === 5) {\n return 'ivec5';\n }\n else if (rank === 6) {\n return 'ivec6';\n }\n else {\n throw Error(`GPU for rank ${rank} is not yet supported`);\n }\n}\n/** Returns a new input info (a copy) that has a squeezed logical shape. */\nfunction squeezeInputInfo(inInfo, squeezedShape) {\n // Deep copy.\n const newInputInfo = JSON.parse(JSON.stringify(inInfo));\n newInputInfo.shapeInfo.logicalShape = squeezedShape;\n return newInputInfo;\n}\nfunction getSqueezedParams(params, keptDims) {\n return keptDims.map(d => params[d]).join(', ');\n}\n//# sourceMappingURL=shader_compiler.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ArgMinMaxPackedProgram {\n constructor(shape, windowSize, op, firstPass) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n util.assert(shape.length > 2, () => `Packed arg${op.charAt(0).toUpperCase() +\n op.slice(1)} supports only inputs with rank above 2.`);\n const inSize = shape[shape.length - 1];\n const outSize = Math.ceil(inSize / windowSize);\n this.outputShape = shape.slice(0, -1);\n if (outSize > 1) {\n this.outputShape.push(outSize);\n }\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n const outShape = this.outputShape;\n const rank = outShape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n let sourceLocSetup;\n let sourceRank;\n if (outSize === 1) {\n sourceRank = rank + 1;\n const sourceLocDType = getCoordsDataType(sourceRank);\n sourceLocSetup = `\n ${sourceLocDType} sourceLocR = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 1]};\n ${sourceLocDType} sourceLocG = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 2]};\n ${sourceLocDType} sourceLocA = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 1]};\n ${sourceLocDType} sourceLocB = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 2]};`;\n }\n else {\n sourceRank = rank;\n sourceLocSetup = `\n ${dtype} sourceLocR = coords;\n ++${coords[rank - 1]};\n ${dtype} sourceLocG = coords;\n ++${coords[rank - 2]};\n ${dtype} sourceLocA = coords;\n --${coords[rank - 1]};\n ${dtype} sourceLocB = coords;\n --${coords[rank - 2]};`;\n }\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, sourceRank);\n const inChannel = '.' + channels[sourceRank - 1]; // e.g. \".b\" for rank 3.\n const intChannels = channels.map(x => 'int ' + x);\n const srcRCoords = getChannels('sourceLocR', sourceRank - 1).concat('inIdx.r');\n const srcGCoords = getChannels('sourceLocG', sourceRank - 1).concat('inIdx.g');\n const srcBCoords = getChannels('sourceLocB', sourceRank - 1).concat('inIdx.b');\n const srcACoords = getChannels('sourceLocA', sourceRank - 1).concat('inIdx.a');\n const compOp = (op === 'max') ? 'greaterThan' : 'lessThan';\n const fetchCandidateIdx = firstPass ? '' : `\n inIdx = round(vec4(getBestIndicesAChannel(${srcRCoords.join()}),\n getBestIndicesAChannel(${srcGCoords.join()}),\n getBestIndicesAChannel(${srcBCoords.join()}),\n getBestIndicesAChannel(${srcACoords.join()})));`;\n const fetchValue = `vec4(\n getAChannel(${srcRCoords.join()}),\n hasNextCol ? getAChannel(${srcGCoords.join()}) : 0.,\n hasNextRow ? getAChannel(${srcBCoords.join()}) : 0.,\n hasNextRow && hasNextCol ? getAChannel(${srcACoords.join()}) : 0.)`;\n const getBestIndicesAChannelSnippet = firstPass ? '' : `\n float getBestIndicesAChannel(${intChannels.join()}) {\n return getChannel(getBestIndicesA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }`;\n this.userCode = `\n float getAChannel(${intChannels.join()}) {\n return getChannel(getA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }\n ${getBestIndicesAChannelSnippet}\n void main() {\n ${dtype} coords = getOutputCoords();\n bool hasNextCol = ${coords[rank - 1]} < ${outShape[rank - 1] - 1};\n bool hasNextRow = ${coords[rank - 2]} < ${outShape[rank - 2] - 1};\n ${sourceLocSetup}\n ivec4 srcIdx = ivec4(sourceLocR${inChannel}, sourceLocG${inChannel},\n sourceLocB${inChannel}, sourceLocA${inChannel}) * ${windowSize};\n ivec4 inIdx = srcIdx;\n vec4 bestIndex = vec4(inIdx);\n vec4 bestValue = ${fetchValue};\n\n for (int i = 0; i < ${windowSize}; i++) {\n inIdx = srcIdx;\n ${fetchCandidateIdx}\n vec4 candidate = ${fetchValue};\n bvec4 nan = isnan(candidate);\n bvec4 replace = bvec4(\n vec4(${compOp}(candidate, bestValue)) * (vec4(1.0) - vec4(nan)));\n\n bestValue = vec4(replace.x ? candidate.x : bestValue.x,\n replace.y ? candidate.y : bestValue.y,\n replace.z ? candidate.z : bestValue.z,\n replace.w ? candidate.w : bestValue.w);\n bestIndex = mix(bestIndex, vec4(inIdx), vec4(replace));\n srcIdx++;\n }\n setOutput(bestIndex);\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AvgPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC+= ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class AvgPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, d) with pos mask(:, :, :, ch) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=avg_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nconst CHECK_NAN_SNIPPET = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\n// We use native integer division to deal with floating point imprecision. Since\n// we implement floor division and glsl implements truncated division, we\n// correct for this by subtracting 1 from result when the result is negative and\n// there is a remainder.\nexport const INT_DIV = `\n float s = sign(a) * sign(b);\n int ia = round(a);\n int ib = round(b);\n if (ib != 0) {\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n return float(idiv(ia, ib, s));\n } else {\n return NAN;\n }\n`;\nexport const POW = `\nif(a < 0.0 && floor(b) < b){\n return NAN;\n}\nif (b == 0.0) {\n return 1.0;\n}\nreturn (round(mod(b, 2.0)) != 1) ?\n pow(abs(a), b) : sign(a) * pow(abs(a), b);\n`;\nexport const SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const EQUAL = `return float(a == b);`;\nexport const LESS = `return float(a < b);`;\nexport const LESS_EQUAL = `return float(a <= b);`;\nexport const GREATER = `return float(a > b);`;\nexport const GREATER_EQUAL = `return float(a >= b);`;\nexport const LOGICAL_AND = `return float(a >= 1.0 && b >= 1.0);`;\nexport const LOGICAL_OR = `return float(a >= 1.0 || b >= 1.0);`;\nexport const MAX = CHECK_NAN_SNIPPET + `\n return max(a, b);\n`;\nexport const MIN = CHECK_NAN_SNIPPET + `\n return min(a, b);\n`;\nexport const MOD = `if (b == 0.0) return NAN;\n return mod(a, b);`;\nexport const ELU_DER = `return (b >= 1.0) ? a : a * (b + 1.0);`;\nexport const PRELU = `return (a < 0.) ? b * a : a;`;\nexport class BinaryOpProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['A', 'B'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOperation(float a, float b) {\n ${op}\n }\n\n void main() {\n float a = getAAtOutCoords();\n float b = getBAtOutCoords();\n setOutput(binaryOperation(a, b));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nconst CHECK_NAN_SNIPPET = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\nexport const INT_DIV = `\n ivec4 ia = round(a);\n ivec4 ib = round(b);\n bvec4 cond = notEqual(ib, ivec4(0));\n ivec4 result = ivec4(0);\n vec4 s = sign(a) * sign(b);\n\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n if (cond[0]) {\n result[0] = idiv(ia[0], ib[0], s[0]);\n }\n if (cond[1]) {\n result[1] = idiv(ia[1], ib[1], s[1]);\n }\n if (cond[2]) {\n result[2] = idiv(ia[2], ib[2], s[2]);\n }\n if (cond[3]) {\n result[3] = idiv(ia[3], ib[3], s[3]);\n }\n return vec4(result);\n`;\nexport const POW = `\n // isModRound1 has 1 for components with round(mod(b, 2.0)) == 1, 0 otherwise.\n vec4 isModRound1 = vec4(equal(round(mod(b, 2.0)), ivec4(1)));\n vec4 multiplier = sign(a) * isModRound1 + (vec4(1.0) - isModRound1);\n vec4 result = multiplier * pow(abs(a), b);\n\n // Ensure that a^0 = 1, including 0^0 = 1 as this correspond to TF and JS\n bvec4 isExpZero = equal(b, vec4(0.0));\n result.r = isExpZero.r ? 1.0 : result.r;\n result.g = isExpZero.g ? 1.0 : result.g;\n result.b = isExpZero.b ? 1.0 : result.b;\n result.a = isExpZero.a ? 1.0 : result.a;\n\n vec4 isNaN = vec4(lessThan(a, vec4(0.0))) * vec4(lessThan(floor(b), b));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const PRELU = `\n vec4 aLessThanZero = vec4(lessThan(a, vec4(0.)));\n return (aLessThanZero * (b * a)) + ((vec4(1.0) - aLessThanZero) * a);\n`;\nexport const ELU_DER = `\n vec4 bGTEZero = vec4(greaterThanEqual(b, vec4(0.)));\n return (bGTEZero * a) + ((vec4(1.0) - bGTEZero) * (a * (b + vec4(1.0))));\n`;\nexport const EQUAL = `\n return vec4(equal(a, b));\n`;\nexport const NOT_EQUAL = `\n return vec4(notEqual(a, b));\n`;\nexport const LESS = `\n return vec4(lessThan(a, b));\n`;\nexport const LESS_EQUAL = `\n return vec4(lessThanEqual(a, b));\n`;\nexport const GREATER = `\n return vec4(greaterThan(a, b));\n`;\nexport const GREATER_EQUAL = `\n return vec4(greaterThanEqual(a, b));\n`;\nexport const LOGICAL_AND = `\n return vec4(\n vec4(greaterThanEqual(a, vec4(1.0))) *\n vec4(greaterThanEqual(b, vec4(1.0))));\n`;\nexport const LOGICAL_OR = `\n return min(\n vec4(greaterThanEqual(a, vec4(1.0))) +\n vec4(greaterThanEqual(b, vec4(1.0))),\n vec4(1.0));\n`;\nexport const MAX = `\n vec4 result = vec4(max(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MIN = `\n vec4 result = vec4(min(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MOD = `\n vec4 result = mod(a, b);\n vec4 isNaN = vec4(equal(b, vec4(0.0)));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport class BinaryOpPackedProgram {\n constructor(op, aShape, bShape, checkOutOfBounds = false) {\n this.variableNames = ['A', 'B'];\n this.supportsBroadcasting = true;\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const rank = this.outputShape.length;\n let checkOutOfBoundsString = '';\n if (checkOutOfBounds) {\n if (rank === 0 || util.sizeFromShape(this.outputShape) === 1) {\n checkOutOfBoundsString = `\n result.y = 0.;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const dtype = getCoordsDataType(rank);\n checkOutOfBoundsString = `\n ${dtype} coords = getOutputCoords();\n `;\n if (rank === 1) {\n checkOutOfBoundsString += `\n result.y = (coords + 1) >= ${this.outputShape[0]} ? 0. : result.y;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const channels = getChannels('coords', rank);\n checkOutOfBoundsString += `\n bool nextRowOutOfBounds =\n (${channels[rank - 2]} + 1) >= ${this.outputShape[rank - 2]};\n bool nextColOutOfBounds =\n (${channels[rank - 1]} + 1) >= ${this.outputShape[rank - 1]};\n result.y = nextColOutOfBounds ? 0. : result.y;\n result.z = nextRowOutOfBounds ? 0. : result.z;\n result.w = nextColOutOfBounds || nextRowOutOfBounds ? 0. : result.w;\n `;\n }\n }\n }\n this.userCode = `\n vec4 binaryOperation(vec4 a, vec4 b) {\n ${op}\n }\n\n void main() {\n vec4 a = getAAtOutCoords();\n vec4 b = getBAtOutCoords();\n\n vec4 result = binaryOperation(a, b);\n ${checkOutOfBoundsString}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n float value = getAAtOutCoords();\n if (isnan(value)) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, minVal, maxVal));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipPackedProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n vec4 value = getAAtOutCoords();\n\n if (any(isnan(value))) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, vec4(minVal), vec4(maxVal)));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ComplexAbsProgram {\n constructor(shape) {\n this.variableNames = ['real', 'imag'];\n this.outputShape = shape;\n this.userCode = `\n void main() {\n float re = abs(getRealAtOutCoords());\n float im = abs(getImagAtOutCoords());\n float mx = max(re, im);\n\n // sadly the length function in glsl is not underflow-safe\n // (at least not on Intel GPUs). So the safe solution is\n // to ensure underflow-safety in all cases.\n setOutput(\n mx == 0.0 ? 0.0 : mx * length(vec2(1, min(re, im)/mx))\n );\n }\n `;\n }\n}\n//# sourceMappingURL=complex_abs_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int d2 = coords.w;\n\n // Convolve x(?, ?, d1) with dy(:, :, d2) to get dw(wR, wC, d1, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n if (${isChannelsLast}) {\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n } else {\n float dyValue = getDy(b, d2, yR, yC);\n float xValue = getX(b, d1, xR, xC);\n dotProd += (xValue * dyValue);\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[${channelDim}];\n\n ivec2 dyCorner = ivec2(coords[${rowDim}], coords[${colDim}]) - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n\n if (${isChannelsLast}) {\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n } else {\n float xValue = getDy(batch, d2, idyR, idyC);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.userCode = `\n void main() {\n ivec5 coords = getOutputCoords();\n int wF = coords.x;\n int wR = coords.y;\n int wC = coords.z;\n int d1 = coords.w;\n int d2 = coords.u;\n\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yF = 0; yF < ${convInfo.outDepth}; yF++) {\n int xF = wF + yF * ${strideDepth} - ${padFront};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yF, yR, yC, d2);\n float xValue = getX(b, xF, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = filterDepth - 1 - convInfo.padInfo.front;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d1 = coords.u;\n\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyFCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n float dyF = float(dyFCorner + wF) / ${strideDepth}.0;\n\n if (dyF < 0.0 || dyF >= ${convInfo.outDepth}.0 || fract(dyF) > 0.0) {\n continue;\n }\n int idyF = int(dyF);\n\n int wFPerm = ${filterDepth} - 1 - wF;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n float xValue = getDy(batch, idyF, idyR, idyC, d2);\n float wValue = getW(wFPerm, wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int dm = coords.w;\n int d2 = d1 * ${channelMul} + dm;\n\n float dotProd = 0.0;\n\n // TO DO: Vec4 over the batch size\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class DepthwiseConv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[3];\n ivec2 dyCorner = coords.yz - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n float dotProd = 0.0;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n // TO DO: Vec4 over the channelMul\n for (int dm = 0; dm < ${channelMul}; dm++) {\n int d2 = d1 * ${channelMul} + dm;\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, dm);\n dotProd += xValue * wValue;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivationWeights = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivationWeights) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivationWeights) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d2 = coords[${channelDim}];\n\n ivec2 xRCCorner =\n ivec2(coords[${rowDim}], coords[${colDim}]) * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, d2) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 wValues = vec4(\n getW(wR, wC, d1, d2),\n getW(wR, wC, d1 + 1, d2),\n getW(wR, wC, d1 + 2, d2),\n getW(wR, wC, d1 + 3, d2)\n );\n\n if (${isChannelsLast}) {\n vec4 xValues = vec4(\n getX(batch, xR, xC, d1),\n getX(batch, xR, xC, d1 + 1),\n getX(batch, xR, xC, d1 + 2),\n getX(batch, xR, xC, d1 + 3)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec4 xValues = vec4(\n getX(batch, d1, xR, xC),\n getX(batch, d1 + 1, xR, xC),\n getX(batch, d1 + 2, xR, xC),\n getX(batch, d1 + 3, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n\n if (${isChannelsLast}) {\n dotProd +=\n getX(batch, xR, xC, ${inputDepthNearestVec4}) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n } else {\n dotProd +=\n getX(batch, ${inputDepthNearestVec4}, xR, xC) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n }\n\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 wValues = vec2(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n\n if (${isChannelsLast}) {\n vec2 xValues = vec2(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec2 xValues = vec2(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 wValues = vec3(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n\n if (${isChannelsLast}) {\n vec3 xValues = vec3(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec3 xValues = vec3(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 2, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n }\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\nexport class Conv3DProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n this.userCode = `\n const ivec3 strides = ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d2 = coords.u;\n\n ivec3 xFRCCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xFCorner = xFRCCorner.x;\n int xRCorner = xFRCCorner.y;\n int xCCorner = xFRCCorner.z;\n\n // Convolve x(?, ?, ?, d1) with w(:, :, :, d1, d2) to get\n // y(yF, yR, yC, d2). ? = to be determined. : = across all\n // values in that axis.\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n int xF = xFCorner + wF * ${dilationDepth};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3)\n );\n vec4 wValues = vec4(\n getW(wF, wR, wC, d1, d2),\n getW(wF, wR, wC, d1 + 1, d2),\n getW(wF, wR, wC, d1 + 2, d2),\n getW(wF, wR, wC, d1 + 3, d2)\n );\n\n dotProd += dot(xValues, wValues);\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n dotProd +=\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}) *\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2);\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 xValues = vec2(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n vec2 wValues = vec2(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n dotProd += dot(xValues, wValues);\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 xValues = vec3(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n vec3 wValues = vec3(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2 / ${channelMul};\n int q = d2 - d1 * ${channelMul};\n\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, q) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n // TO DO(dsmilkov): Flatten the two for loops and vec4 the operations.\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${xNumRows}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${xNumCols}) {\n continue;\n }\n\n float xVal = getX(batch, xR, xC, d1);\n float wVal = getW(wR, wC, d1, q);\n dotProd += xVal * wVal;\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class DepthwiseConvPacked2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const texelsAcross = filterWidth;\n let mainLoop = `int xR; int xC; int xCOffset;`;\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `\n vec4 xTexelR${r}C${c * 2} = vec4(0.);\n vec4 wR${r}C${c} = vec4(0.);\n vec4 xR${r}C${c} = vec4(0.);`;\n }\n }\n /**\n * This vectorized implementation works by gathering the values needed for\n * each output channel's dot product into vec4's and then multiplying them\n * all together (this happens in the final double for-loop below). Most of\n * the main loop consists of constructing these vec4's with the minimum\n * number of texture2D calls, which means making use of all four returned\n * values from a texture2D call at once.\n */\n for (let r = 0; r < filterHeight; r++) {\n for (let texelC = 0; texelC < texelsAcross; texelC++) {\n const c = texelC * 2;\n mainLoop += `\n xR = xRCorner + ${r * dilationHeight};\n xC = xCCorner + ${c * dilationWidth};\n `;\n if (strideWidth === 1) {\n if (c < filterWidth) {\n // If padding is odd, the outer texels have to be composed.\n if (padLeft % 2 === 1) {\n // TODO: Ensure vec4 previous does not result in redundant sample,\n // and avoid setting xTexelRC's that exceed the boundary in the\n // first place rather than resetting them to vec4(0)).\n // To compute xCOffset:\n // - If padding is odd, we must add 1 to ensure we ask for an\n // even-numbered row.\n // - We subtract 2 to access the previous texel.\n mainLoop += `\n xCOffset = xC + 1;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n xTexelR${r}C${c}.zw = vec2(0.);\n }\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + 1 - 2;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n vec4 previous = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n previous.zw = vec2(0.);\n }\n\n xR${r}C${c} = vec4(previous.zw, xTexelR${r}C${c}.xy);\n } else {\n xR${r}C${c} = vec4(0, 0, xTexelR${r}C${c}.xy);\n }\n `;\n }\n else {\n // Padding is even, so xRC corresponds to a single texel.\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows} && xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xR${r}C${c} = xTexelR${r}C${c};\n `;\n }\n if (c + 1 < filterWidth) {\n // If dilation is even, the second entry should match the first\n // (either both are composed or both are single samples). But if\n // dilation is odd, then the second entry should be the opposite\n // of the first (if the first is composed, the second is a single\n // sample, and vice versa.)\n const nextTexelOffset = padLeft % 2 === 0 ?\n util.nearestLargerEven(dilationWidth) :\n dilationWidth;\n if ((dilationWidth % 2 === 0 && padLeft % 2 === 1) ||\n (dilationWidth % 2 !== 0 && padLeft % 2 !== 1)) {\n mainLoop += `\n xCOffset = xC + ${padLeft % 2} + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n `;\n // If dilation > 1 then the xRC's will not be able to share any\n // values, so each xRC will require two unique calls to getX.\n if (dilationWidth > 1) {\n mainLoop += `\n xCOffset -= 2;\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n `;\n }\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.xy);\n `;\n }\n else {\n mainLoop += `\n xCOffset = xC + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n\n xR${r}C${c + 1} = xTexelR${r}C${c + 2};\n `;\n }\n }\n }\n }\n else { // stride > 1\n if (c < filterWidth) {\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows}) {\n `;\n // Depending on whether padLeft is even or odd, we want either the\n // xy or zw channels from X texels for xR${r}C${c}. If padLeft is\n // even, xR${r}C${c + 1} is simply the zw channels of texels we've\n // already sampled. But if padLeft is odd, xR${r}C{$c + 1}.zw will\n // need to come from the xy channels of a new texel, hence the `vec4\n // final` initialized below.\n if (padLeft % 2 === 1) {\n mainLoop += `\n xCOffset = xC + 1 - ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n if(xC + 1 >= 0 && xC + 1 < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xC + 1, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 final = vec4(0.);\n xCOffset = xC + 1 + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n final = getX(batch, xR, xCOffset, d1);\n }\n xR${r}C${c + 1} = vec4(xTexelR${r}C${c + 2}.xy, final.xy);\n `;\n }\n }\n else {\n mainLoop += `\n if(xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.xy, xTexelR${r}C${c + 2}.xy);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n }\n }\n mainLoop += `}`;\n }\n }\n if (c < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c} = getW(${r}, ${c}, d1, q);\n wR${r}C${c} = vec4(wTexelR${r}C${c}.xz, wTexelR${r}C${c}.xz);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c + 1} = getW(${r}, ${c + 1}, d1, q);\n wR${r}C${c + 1} =\n vec4(wTexelR${r}C${c + 1}.xz, wTexelR${r}C${c + 1}.xz);`;\n }\n }\n }\n }\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `dotProd += xR${r}C${c} * wR${r}C${c};`;\n }\n }\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2;\n int q = 0;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n vec4 dotProd = vec4(0.);\n\n ${mainLoop}\n\n vec4 result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_packed_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class CropAndResizeProgram {\n constructor(imageShape, boxShape, cropSize, method, extrapolationValue) {\n this.variableNames = ['Image', 'Boxes', 'BoxInd'];\n this.outputShape = [];\n const [batch, imageHeight, imageWidth, depth] = imageShape;\n const [numBoxes,] = boxShape;\n const [cropHeight, cropWidth] = cropSize;\n this.outputShape = [numBoxes, cropHeight, cropWidth, depth];\n const methodId = method === 'bilinear' ? 1 : 0;\n const [inputHeightFloat, inputWidthFloat] = [`${imageHeight - 1}.0`, `${imageWidth - 1}.0`];\n const [heightRatio, heightScale, inY] = cropHeight > 1 ?\n [\n `${(imageHeight - 1) / (cropHeight - 1)}`,\n '(y2-y1) * height_ratio',\n `y1*${inputHeightFloat} + float(y)*(height_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (y1+y2) * ${inputHeightFloat}`,\n ];\n const [widthRatio, widthScale, inX] = cropWidth > 1 ?\n [\n `${(imageWidth - 1) / (cropWidth - 1)}`,\n '(x2-x1) * width_ratio',\n `x1*${inputWidthFloat} + float(x)*(width_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (x1+x2) * ${inputWidthFloat}`,\n ];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op_gpu.cu.cc\n this.userCode = `\n const float height_ratio = float(${heightRatio});\n const float width_ratio = float(${widthRatio});\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int y = coords[1];\n int x = coords[2];\n int d = coords[3];\n\n // get box vals\n float y1 = getBoxes(b,0);\n float x1 = getBoxes(b,1);\n float y2 = getBoxes(b,2);\n float x2 = getBoxes(b,3);\n\n // get image in batch index\n int bInd = round(getBoxInd(b));\n if(bInd < 0 || bInd >= ${batch}) {\n return;\n }\n\n float height_scale = ${heightScale};\n float width_scale = ${widthScale};\n\n float in_y = ${inY};\n if( in_y < 0.0 || in_y > ${inputHeightFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n float in_x = ${inX};\n if( in_x < 0.0 || in_x > ${inputWidthFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n\n vec2 sourceFracIndexCR = vec2(in_x,in_y);\n if(${methodId} == 1) {\n // Compute the four integer indices.\n ivec2 sourceFloorCR = ivec2(sourceFracIndexCR);\n ivec2 sourceCeilCR = ivec2(ceil(sourceFracIndexCR));\n\n float topLeft = getImage(b, sourceFloorCR.y, sourceFloorCR.x, d);\n float bottomLeft = getImage(b, sourceCeilCR.y, sourceFloorCR.x, d);\n float topRight = getImage(b, sourceFloorCR.y, sourceCeilCR.x, d);\n float bottomRight = getImage(b, sourceCeilCR.y, sourceCeilCR.x, d);\n\n vec2 fracCR = sourceFracIndexCR - vec2(sourceFloorCR);\n\n float top = topLeft + (topRight - topLeft) * fracCR.x;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracCR.x;\n float newValue = top + (bottom - top) * fracCR.y;\n setOutput(newValue);\n } else {\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestCR = ivec2(floor(\n sourceFracIndexCR + vec2(0.5,0.5)));\n float newValue = getImage(b, sourceNearestCR.y, sourceNearestCR.x, d);\n setOutput(newValue);\n }\n }\n `;\n }\n}\n//# sourceMappingURL=crop_and_resize_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class CumSumProgram {\n constructor(shape, exclusive, reverse) {\n this.variableNames = ['x'];\n this.outputShape = shape;\n const rank = shape.length;\n const val = exclusive ? '0.0' : `getX(${getCoords(rank, 'coords')})`;\n const length = shape[shape.length - 1];\n let condition = '';\n let idxString = '';\n // When exclusive is set, the cumsum op becomes roll op that copies the\n // value from the previous index based on the direction specified by the\n // reverse flag.\n if (exclusive) {\n condition = reverse ? `end != ${length - 1}` : 'end != 0';\n idxString = reverse ? 'end + 1' : 'end - 1';\n }\n else {\n condition = reverse ? `end + pow2 < ${length}` : 'end >= pow2';\n idxString = (reverse ? 'end + pow2' : 'end - pow2');\n }\n this.userCode = `\n uniform float index;\n void main() {\n ${getCoordsDataType(rank)} coords = getOutputCoords();\n int end = ${getFinalCoord(rank, 'coords')};\n float val = ${val};\n int pow2 = int(pow(2.0, index));\n if (${condition}) {\n int idx = ${idxString};\n ${getFinalCoord(rank, 'coords')} = idx;\n val += getX(${getCoords(rank, 'coords')});\n }\n setOutput(val);\n }\n `;\n }\n getCustomSetupFunc(index) {\n return (gpgpu, webGLProgram) => {\n if (this.index == null) {\n this.index = gpgpu.getUniformLocation(webGLProgram, 'index');\n }\n gpgpu.gl.uniform1f(this.index, index);\n };\n }\n}\nfunction getCoords(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.x, ${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.x, ${name}.y, ${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.x, ${name}.y, ${name}.z, ${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\nfunction getFinalCoord(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=cumsum_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getA(rc.x, rc.y, rc.z);\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getChannel(getA(rc.x, rc.y, rc.z), vec2(rc.y, rc.z));\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthToSpaceProgram {\n constructor(outputShape, blockSize, dataFormat) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.outputShape = outputShape;\n this.blockSize = blockSize;\n this.dataFormat = dataFormat;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int h = ${this.getHeightCoordString()};\n int w = ${this.getWidthCoordString()};\n int d = ${this.getDepthCoordString()};\n\n int in_h = h / ${blockSize};\n int offset_h = imod(h, ${blockSize});\n int in_w = w / ${blockSize};\n int offset_w = imod(w, ${blockSize});\n int offset_d = (offset_h * ${blockSize} + offset_w) *\n ${this.getOutputDepthSize()};\n int in_d = d + offset_d;\n\n float result = ${this.getInputSamplingString()};\n setOutput(result);\n }\n `;\n }\n getHeightCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[1]`;\n }\n else {\n return `coords[2]`;\n }\n }\n getWidthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[2]`;\n }\n else {\n return `coords[3]`;\n }\n }\n getDepthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[3]`;\n }\n else {\n return `coords[1]`;\n }\n }\n getOutputDepthSize() {\n if (this.dataFormat === 'NHWC') {\n return this.outputShape[3];\n }\n else {\n return this.outputShape[1];\n }\n }\n getInputSamplingString() {\n if (this.dataFormat === 'NHWC') {\n return `getX(b, in_h, in_w, in_d)`;\n }\n else {\n return `getX(b, in_d, in_h, in_w)`;\n }\n }\n}\n//# sourceMappingURL=depth_to_space_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DiagProgram {\n constructor(size) {\n this.variableNames = ['X'];\n this.outputShape = [size, size];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n float val = coords[0] == coords[1] ? getX(coords[0]) : 0.0;\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=diag_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n float x = getAAtOutCoords();\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n ivec3 coords = getOutputCoords();\n float x = getChannel(getAAtOutCoords(), vec2(coords.y, coords.z));\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport class EncodeMatrixProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let output = `result`;\n if (inputIsUnsignedByte) {\n output = `floor(result * 255. + 0.5)`;\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n int flatIndex = getFlatIndex(coords);\n int offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n int r = flatIndex / ${width};\n int c = imod(flatIndex, ${width});\n vec2 uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n\n float result;\n\n if(offset == 0) {\n result = values[0];\n } else if(offset == 1) {\n result = values[1];\n } else if(offset == 2) {\n result = values[2];\n } else {\n result = values[3];\n }\n\n ${glsl.output} = vec4(${output}, 0., 0., 0.);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\n/*\nThis is how the shader encodes a tensor with shape = [2, 3, 5]\n(indices are [batch, row, col]).\n\n000|001 002|003 004|xxx 020|021 022|023 024|xxx\n------- ------- ------- ------- ------- -------\n010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n\n100|101 102|103 104|xxx 120|121 122|123 124|xxx\n------- ------- ------- ------- ------- -------\n110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n\nSingle texels contain only values from the same batch, and from adjacent rows\nand columns.\n */\nexport class EncodeMatrixPackedProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let mainLoop = '';\n let output = 'result';\n if (inputIsUnsignedByte) {\n output = 'floor(result * 255. + 0.5)';\n }\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n const channel = row * 2 + col;\n mainLoop += `\n localCoords = coords;\n if(localCoords[2] + ${col} < ${outputShape[2]}) {\n localCoords[2] += ${col};\n if(localCoords[1] + ${row} < ${outputShape[1]}) {\n localCoords[1] += ${row};\n\n flatIndex = getFlatIndex(localCoords);\n offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n r = flatIndex / ${width};\n c = imod(flatIndex, ${width});\n uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n values = ${glsl.texture2D}(A, uv);\n\n if(offset == 0) {\n result[${channel}] = values[0];\n } else if(offset == 1) {\n result[${channel}] = values[1];\n } else if(offset == 2) {\n result[${channel}] = values[2];\n } else {\n result[${channel}] = values[3];\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n vec4 result = vec4(0.);\n int flatIndex, r, c, offset;\n ivec3 localCoords;\n vec2 uv;\n vec4 values;\n\n ${mainLoop}\n\n ${glsl.output} = ${output};\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FillProgram {\n constructor(shape, value) {\n this.outputShape = [];\n this.variableNames = ['x'];\n this.outputShape = shape;\n this.userCode = `\n uniform float value;\n void main() {\n // Input can be obtained from uniform value.\n setOutput(value);\n }\n `;\n }\n getCustomSetupFunc(value) {\n return (gpgpu, webGLProgram) => {\n if (this.valueLoc == null) {\n this.valueLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'value');\n }\n gpgpu.gl.uniform1f(this.valueLoc, value);\n };\n }\n}\n//# sourceMappingURL=fill_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class GatherProgram {\n constructor(aShape, indicesLength, axis) {\n this.variableNames = ['A', 'indices'];\n const outputShape = aShape.slice();\n outputShape[axis] = indicesLength;\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape, axis);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape, axis) {\n const rank = aShape.length;\n if (rank > 4) {\n throw Error(`Gather for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `int(getIndices(resRC))`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n if (i === axis) {\n sourceCoords.push(`int(getIndices(${currentCoords[i]}))`);\n }\n else {\n sourceCoords.push(`${currentCoords[i]}`);\n }\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=gather_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class GatherNDProgram {\n constructor(sliceDim, strides, shape) {\n this.sliceDim = sliceDim;\n this.strides = strides;\n this.variableNames = ['x', 'indices'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n const strideString = this.sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${this.strides});\n void main() {\n ${dtype} coords = getOutputCoords();\n int flattenIndex = 0;\n for (int j = 0; j < ${this.sliceDim}; j++) {\n int index = round(getIndices(coords[0], j));\n flattenIndex += index * ${strideString};\n }\n setOutput(getX(flattenIndex, coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=gather_nd_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport function createVertexShader(gl) {\n const glsl = getGlslDifferences();\n const vertexShaderSource = `${glsl.version}\n precision highp float;\n ${glsl.attribute} vec3 clipSpacePos;\n ${glsl.attribute} vec2 uv;\n ${glsl.varyingVs} vec2 resultUV;\n\n void main() {\n gl_Position = vec4(clipSpacePos, 1);\n resultUV = uv;\n }`;\n return webgl_util.createVertexShader(gl, vertexShaderSource);\n}\nexport function createVertexBuffer(gl) {\n // [x y z u v] * [upper-left, lower-left, upper-right, lower-right]\n const vertexArray = new Float32Array([-1, 1, 0, 0, 1, -1, -1, 0, 0, 0, 1, 1, 0, 1, 1, 1, -1, 0, 1, 0]);\n return webgl_util.createStaticVertexBuffer(gl, vertexArray);\n}\nexport function createIndexBuffer(gl) {\n // OpenGL (and WebGL) have \"CCW == front\" winding\n const triangleVertexIndices = new Uint16Array([0, 1, 2, 2, 1, 3]);\n return webgl_util.createStaticIndexBuffer(gl, triangleVertexIndices);\n}\nfunction createAndConfigureTexture(gl, width, height, internalFormat, textureFormat, textureType) {\n webgl_util.validateTextureSize(width, height);\n const texture = webgl_util.createTexture(gl);\n const tex2d = gl.TEXTURE_2D;\n webgl_util.callAndCheck(gl, () => gl.bindTexture(tex2d, texture));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MIN_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MAG_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texImage2D(tex2d, 0, internalFormat, width, height, 0, textureFormat, textureType, null));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n return texture;\n}\nexport function getInternalFormatForFloat32MatrixTexture(textureConfig) {\n return textureConfig.internalFormatFloat;\n}\nexport function createFloat32MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat32MatrixTexture(textureConfig), textureConfig.textureFormatFloat, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16MatrixTexture(textureConfig) {\n return textureConfig.internalFormatHalfFloat;\n}\nexport function createFloat16MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16MatrixTexture(textureConfig), textureConfig.textureFormatFloat, textureConfig.textureTypeHalfFloat);\n}\nexport function getInternalFormatForUnsignedBytesMatrixTexture(textureConfig) {\n return textureConfig.downloadTextureFormat;\n}\nexport function createUnsignedBytesMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForUnsignedBytesMatrixTexture(textureConfig), gl.RGBA, gl.UNSIGNED_BYTE);\n}\nexport function getInternalFormatForPackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedFloat;\n}\nexport function createPackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForPackedMatrixTexture(textureConfig), gl.RGBA, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16PackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedHalfFloat;\n}\nexport function createFloat16PackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16PackedMatrixTexture(textureConfig), gl.RGBA, textureConfig.textureTypeHalfFloat);\n}\nexport function bindVertexProgramAttributeStreams(gl, program, vertexBuffer) {\n const posOffset = 0; // x is the first buffer element\n const uvOffset = 3 * 4; // uv comes after [x y z]\n const stride = (3 * 4) + (2 * 4); // xyz + uv, each entry is 4-byte float.\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer));\n const success = webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'clipSpacePos', vertexBuffer, 3, stride, posOffset);\n return success &&\n webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'uv', vertexBuffer, 2, stride, uvOffset);\n}\nexport function uploadDenseMatrixToTexture(gl, texture, width, height, data, textureConfig) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n let dataForUpload, texelDataType, internalFormat;\n if (data instanceof Uint8Array) {\n dataForUpload = new Uint8Array(width * height * 4);\n texelDataType = gl.UNSIGNED_BYTE;\n internalFormat = gl.RGBA;\n }\n else {\n dataForUpload = new Float32Array(width * height * 4);\n texelDataType = gl.FLOAT;\n internalFormat = textureConfig.internalFormatPackedFloat;\n }\n dataForUpload.set(data);\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, gl.RGBA, texelDataType, dataForUpload));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function uploadPixelDataToTexture(gl, texture, pixels) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n if (pixels.data instanceof Uint8Array) {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, pixels.width, pixels.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, pixels.data));\n }\n else {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, pixels));\n }\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function createBufferFromOutputTexture(gl2, rows, columns, textureConfig) {\n // Create and bind the buffer.\n const buffer = gl2.createBuffer();\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer));\n // Initialize the buffer to the size of the texture in bytes.\n const bytesPerFloat = 4;\n const valuesPerTexel = 4;\n const bufferSizeBytes = bytesPerFloat * valuesPerTexel * rows * columns;\n webgl_util.callAndCheck(gl2, () => gl2.bufferData(gl2.PIXEL_PACK_BUFFER, bufferSizeBytes, gl2.STREAM_READ));\n // Enqueue a command on the GPU command queue to copy of texture into the\n // buffer.\n webgl_util.callAndCheck(gl2, () => gl2.readPixels(0, 0, columns, rows, gl2.RGBA, gl2.FLOAT, 0));\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null));\n return buffer;\n}\nexport function downloadFloat32MatrixFromBuffer(gl, buffer, size) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(size);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadByteEncodedFloatMatrixFromOutputTexture(gl, rows, columns, textureConfig) {\n const [w, h] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n const numChannels = 4;\n const downloadTarget = new Uint8Array(tex_util.getUnpackedArraySizeFromMatrixSize(rows * columns, numChannels));\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, w, h, textureConfig.downloadTextureFormat, gl.UNSIGNED_BYTE, downloadTarget));\n // By wrapping the buffer in a Float32Array, we use native browser IEEE 754\n // decoding of the 4 bytes that back each 32 bit float.\n return new Float32Array(downloadTarget.buffer);\n}\nexport function downloadPackedMatrixFromBuffer(gl, buffer, batch, rows, cols, physicalRows, physicalCols, textureConfig) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(tex_util.getPackedRGBAArraySizeFromMatrixShape(physicalRows, physicalCols));\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadMatrixFromPackedOutputTexture(gl, physicalRows, physicalCols) {\n const packedRGBA = new Float32Array(physicalRows * physicalCols * 4);\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, physicalCols, physicalRows, gl.RGBA, gl.FLOAT, packedRGBA));\n return packedRGBA;\n}\n//# sourceMappingURL=gpgpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext, setWebGLContext } from './canvas_util';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport class GPGPUContext {\n constructor(gl) {\n this.outputTexture = null;\n this.program = null;\n this.disposed = false;\n this.vertexAttrsAreBound = false;\n this.itemsToPoll = [];\n const glVersion = env().getNumber('WEBGL_VERSION');\n if (gl != null) {\n this.gl = gl;\n setWebGLContext(glVersion, gl);\n }\n else {\n this.gl = getWebGLContext(glVersion);\n }\n // WebGL 2.0 enables texture floats without an extension.\n let COLOR_BUFFER_FLOAT = 'WEBGL_color_buffer_float';\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (env().getNumber('WEBGL_VERSION') === 1) {\n const TEXTURE_FLOAT = 'OES_texture_float';\n const TEXTURE_HALF_FLOAT = 'OES_texture_half_float';\n this.textureFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_FLOAT);\n if (webgl_util.hasExtension(this.gl, TEXTURE_HALF_FLOAT)) {\n this.textureHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support half float textures, yet the ' +\n 'environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n this.colorBufferFloatExtension = this.gl.getExtension(COLOR_BUFFER_FLOAT);\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, COLOR_BUFFER_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support color renderable half floats, yet ' +\n 'the environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n }\n else {\n COLOR_BUFFER_FLOAT = 'EXT_color_buffer_float';\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_FLOAT)) {\n this.colorBufferFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_FLOAT);\n }\n else if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n }\n else {\n throw new Error('GL context does not support color renderable floats');\n }\n }\n this.vertexBuffer = gpgpu_util.createVertexBuffer(this.gl);\n this.indexBuffer = gpgpu_util.createIndexBuffer(this.gl);\n this.framebuffer = webgl_util.createFramebuffer(this.gl);\n this.textureConfig =\n tex_util.getTextureConfig(this.gl, this.textureHalfFloatExtension);\n }\n get debug() {\n return env().getBool('DEBUG');\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n if (this.program != null) {\n console.warn('Disposing a GPGPUContext that still has a bound WebGLProgram.' +\n ' This is probably a resource leak, delete the program with ' +\n 'GPGPUContext.deleteProgram before disposing.');\n }\n if (this.outputTexture != null) {\n console.warn('Disposing a GPGPUContext that still has a bound output matrix ' +\n 'texture. This is probably a resource leak, delete the output ' +\n 'matrix texture with GPGPUContext.deleteMatrixTexture before ' +\n 'disposing.');\n }\n const gl = this.gl;\n webgl_util.callAndCheck(gl, () => gl.finish());\n webgl_util.callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteFramebuffer(this.framebuffer));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteBuffer(this.indexBuffer));\n this.disposed = true;\n }\n createFloat32MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat32MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createFloat16MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createUnsignedBytesMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createUnsignedBytesMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n uploadPixelDataToTexture(texture, pixels) {\n this.throwIfDisposed();\n gpgpu_util.uploadPixelDataToTexture(this.gl, texture, pixels);\n }\n uploadDenseMatrixToTexture(texture, width, height, data) {\n this.throwIfDisposed();\n gpgpu_util.uploadDenseMatrixToTexture(this.gl, texture, width, height, data, this.textureConfig);\n }\n createFloat16PackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16PackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createPackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createPackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n deleteMatrixTexture(texture) {\n this.throwIfDisposed();\n if (this.outputTexture === texture) {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n this.outputTexture = null;\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteTexture(texture));\n }\n downloadByteEncodedFloatMatrixFromOutputTexture(texture, rows, columns) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadByteEncodedFloatMatrixFromOutputTexture(this.gl, rows, columns, this.textureConfig));\n }\n downloadPackedMatrixFromBuffer(buffer, batch, rows, columns, physicalRows, physicalCols) {\n return gpgpu_util.downloadPackedMatrixFromBuffer(this.gl, buffer, batch, rows, columns, physicalRows, physicalCols, this.textureConfig);\n }\n downloadFloat32MatrixFromBuffer(buffer, size) {\n return gpgpu_util.downloadFloat32MatrixFromBuffer(this.gl, buffer, size);\n }\n createBufferFromTexture(texture, rows, columns) {\n this.bindTextureToFrameBuffer(texture);\n const result = gpgpu_util.createBufferFromOutputTexture(this.gl, rows, columns, this.textureConfig);\n this.unbindTextureToFrameBuffer();\n return result;\n }\n createAndWaitForFence() {\n const fenceContext = this.createFence(this.gl);\n return this.pollFence(fenceContext);\n }\n createFence(gl) {\n let query;\n let isFencePassed;\n if (env().getBool('WEBGL_FENCE_API_ENABLED')) {\n const gl2 = gl;\n const sync = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);\n gl.flush();\n isFencePassed = () => {\n const status = gl2.clientWaitSync(sync, 0, 0);\n return status === gl2.ALREADY_SIGNALED ||\n status === gl2.CONDITION_SATISFIED;\n };\n query = sync;\n }\n else if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n query = this.beginQuery();\n this.endQuery();\n isFencePassed = () => this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n else {\n // If we have no way to fence, return true immediately. This will fire in\n // WebGL 1.0 when there is no disjoint query timer. In this case, because\n // the fence passes immediately, we'll immediately ask for a download of\n // the texture, which will cause the UI thread to hang.\n isFencePassed = () => true;\n }\n return { query, isFencePassed };\n }\n downloadMatrixFromPackedTexture(texture, physicalRows, physicalCols) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadMatrixFromPackedOutputTexture(this.gl, physicalRows, physicalCols));\n }\n createProgram(fragmentShaderSource) {\n this.throwIfDisposed();\n const gl = this.gl;\n const fragmentShader = webgl_util.createFragmentShader(gl, fragmentShaderSource);\n const vertexShader = gpgpu_util.createVertexShader(gl);\n const program = webgl_util.createProgram(gl);\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, vertexShader));\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, fragmentShader));\n webgl_util.linkProgram(gl, program);\n if (this.debug) {\n webgl_util.validateProgram(gl, program);\n }\n if (!this.vertexAttrsAreBound) {\n this.setProgram(program);\n this.vertexAttrsAreBound = gpgpu_util.bindVertexProgramAttributeStreams(gl, this.program, this.vertexBuffer);\n }\n return program;\n }\n deleteProgram(program) {\n this.throwIfDisposed();\n if (program === this.program) {\n this.program = null;\n }\n if (program != null) {\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteProgram(program));\n }\n }\n setProgram(program) {\n this.throwIfDisposed();\n this.program = program;\n if ((this.program != null) && this.debug) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.useProgram(program));\n }\n getUniformLocation(program, uniformName, shouldThrow = true) {\n this.throwIfDisposed();\n if (shouldThrow) {\n return webgl_util.getProgramUniformLocationOrThrow(this.gl, program, uniformName);\n }\n else {\n return webgl_util.getProgramUniformLocation(this.gl, program, uniformName);\n }\n }\n getAttributeLocation(program, attribute) {\n this.throwIfDisposed();\n return webgl_util.callAndCheck(this.gl, () => this.gl.getAttribLocation(program, attribute));\n }\n getUniformLocationNoThrow(program, uniformName) {\n this.throwIfDisposed();\n return this.gl.getUniformLocation(program, uniformName);\n }\n setInputMatrixTexture(inputMatrixTexture, uniformLocation, textureUnit) {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n webgl_util.bindTextureToProgramUniformSampler(this.gl, inputMatrixTexture, uniformLocation, textureUnit);\n }\n setOutputMatrixTexture(outputMatrixTexture, rows, columns) {\n this.setOutputMatrixTextureDriver(outputMatrixTexture, columns, rows);\n }\n setOutputPackedMatrixTexture(outputPackedMatrixTexture, rows, columns) {\n this.throwIfDisposed();\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n this.setOutputMatrixTextureDriver(outputPackedMatrixTexture, width, height);\n }\n setOutputMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n this.setOutputMatrixWriteRegionDriver(startColumn, startRow, numColumns, numRows);\n }\n setOutputPackedMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n throw new Error('setOutputPackedMatrixWriteRegion not implemented.');\n }\n debugValidate() {\n if (this.program != null) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.validateFramebuffer(this.gl);\n }\n executeProgram() {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n const gl = this.gl;\n if (this.debug) {\n this.debugValidate();\n }\n webgl_util.callAndCheck(gl, () => gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0));\n }\n blockUntilAllProgramsCompleted() {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.finish());\n }\n getQueryTimerExtension() {\n if (this.disjointQueryTimerExtension == null) {\n this.disjointQueryTimerExtension =\n webgl_util.getExtensionOrThrow(this.gl, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2 ?\n 'EXT_disjoint_timer_query_webgl2' :\n 'EXT_disjoint_timer_query');\n }\n return this.disjointQueryTimerExtension;\n }\n getQueryTimerExtensionWebGL2() {\n return this.getQueryTimerExtension();\n }\n getQueryTimerExtensionWebGL1() {\n return this.getQueryTimerExtension();\n }\n beginQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const query = gl2.createQuery();\n gl2.beginQuery(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n const query = ext.createQueryEXT();\n ext.beginQueryEXT(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n endQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n gl2.endQuery(ext.TIME_ELAPSED_EXT);\n return;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n ext.endQueryEXT(ext.TIME_ELAPSED_EXT);\n }\n async waitForQueryAndGetTime(query) {\n await util.repeatedTry(() => this.disposed || // while testing contexts are created / disposed\n // in rapid succession, so without this check we\n // may poll for the query timer indefinitely\n this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION')));\n return this.getQueryTime(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n getQueryTime(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return null;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const timeElapsedNanos = gl2.getQueryParameter(query, gl2.QUERY_RESULT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const timeElapsedNanos = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_EXT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n }\n isQueryAvailable(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return true;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const available = gl2.getQueryParameter(query, gl2.QUERY_RESULT_AVAILABLE);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const available = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_AVAILABLE_EXT);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n }\n pollFence(fenceContext) {\n return new Promise(resolve => {\n this.addItemToPoll(() => fenceContext.isFencePassed(), () => resolve());\n });\n }\n pollItems() {\n // Find the last query that has finished.\n const index = linearSearchLastTrue(this.itemsToPoll.map(x => x.isDoneFn));\n for (let i = 0; i <= index; ++i) {\n const { resolveFn } = this.itemsToPoll[i];\n resolveFn();\n }\n this.itemsToPoll = this.itemsToPoll.slice(index + 1);\n }\n addItemToPoll(isDoneFn, resolveFn) {\n this.itemsToPoll.push({ isDoneFn, resolveFn });\n if (this.itemsToPoll.length > 1) {\n // We already have a running loop that polls.\n return;\n }\n // Start a new loop that polls.\n util.repeatedTry(() => {\n this.pollItems();\n // End the loop if no more items to poll.\n return this.itemsToPoll.length === 0;\n });\n }\n bindTextureToFrameBuffer(texture) {\n this.throwIfDisposed();\n webgl_util.bindColorTextureToFramebuffer(this.gl, texture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n unbindTextureToFrameBuffer() {\n if (this.outputTexture != null) {\n webgl_util.bindColorTextureToFramebuffer(this.gl, this.outputTexture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n else {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n }\n }\n downloadMatrixDriver(texture, downloadAndDecode) {\n this.bindTextureToFrameBuffer(texture);\n const result = downloadAndDecode();\n this.unbindTextureToFrameBuffer();\n return result;\n }\n setOutputMatrixTextureDriver(outputMatrixTextureMaybePacked, width, height) {\n this.throwIfDisposed();\n const gl = this.gl;\n webgl_util.bindColorTextureToFramebuffer(gl, outputMatrixTextureMaybePacked, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(gl);\n }\n this.outputTexture = outputMatrixTextureMaybePacked;\n webgl_util.callAndCheck(gl, () => gl.viewport(0, 0, width, height));\n webgl_util.callAndCheck(gl, () => gl.scissor(0, 0, width, height));\n }\n setOutputMatrixWriteRegionDriver(x, y, width, height) {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.scissor(x, y, width, height));\n }\n throwIfDisposed() {\n if (this.disposed) {\n throw new Error('Attempted to use disposed GPGPUContext.');\n }\n }\n throwIfNoProgram() {\n if (this.program == null) {\n throw new Error('No GPU program is currently set.');\n }\n }\n}\n/**\n * Finds the index of the last true element using linear search.\n * Note: We can't do binary search because Chrome expects us to explicitly\n * test all fences before download:\n * https://github.com/tensorflow/tfjs/issues/1145\n */\nexport function linearSearchLastTrue(arr) {\n let i = 0;\n for (; i < arr.length; ++i) {\n const isDone = arr[i]();\n if (!isDone) {\n break;\n }\n }\n return i - 1;\n}\n//# sourceMappingURL=gpgpu_context.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport * as shader_compiler from './shader_compiler';\nexport function compileProgram(gpgpu, program, inputs, output) {\n const userCode = program.userCode;\n const inputInfos = inputs.map((input, i) => {\n const shapeInfo = {\n logicalShape: input.shape,\n texShape: input.isUniform ? null : input.texData.texShape,\n isUniform: input.isUniform,\n isPacked: input.isUniform ? false : input.texData.isPacked,\n flatOffset: null\n };\n if (input.texData != null && input.texData.slice != null &&\n input.texData.slice.flatOffset > 0) {\n shapeInfo.flatOffset = input.texData.slice.flatOffset;\n }\n return { name: program.variableNames[i], shapeInfo };\n });\n const inShapeInfos = inputInfos.map(x => x.shapeInfo);\n const outShapeInfo = {\n logicalShape: output.shape,\n texShape: output.texData.texShape,\n isUniform: false,\n isPacked: output.texData.isPacked,\n flatOffset: null\n };\n const source = shader_compiler.makeShader(inputInfos, outShapeInfo, userCode, program.packedInputs);\n const webGLProgram = gpgpu.createProgram(source);\n // Add special uniforms (NAN, INFINITY)\n let infLoc = null;\n const nanLoc = gpgpu.getUniformLocation(webGLProgram, 'NAN', false);\n if (env().getNumber('WEBGL_VERSION') === 1) {\n infLoc = gpgpu.getUniformLocation(webGLProgram, 'INFINITY', false);\n }\n // Add user-defined uniforms\n const uniformLocations = {};\n for (let i = 0; i < program.variableNames.length; i++) {\n const varName = program.variableNames[i];\n const shouldThrow = false;\n uniformLocations[varName] =\n gpgpu.getUniformLocation(webGLProgram, varName, shouldThrow);\n uniformLocations[`offset${varName}`] =\n gpgpu.getUniformLocation(webGLProgram, `offset${varName}`, shouldThrow);\n }\n return {\n program,\n source,\n webGLProgram,\n uniformLocations,\n inShapeInfos,\n outShapeInfo,\n infLoc,\n nanLoc,\n };\n}\nfunction validateBinaryAndProgram(shapeInfos, inputs) {\n if (shapeInfos.length !== inputs.length) {\n throw Error(`Binary was compiled with ${shapeInfos.length} inputs, but ` +\n `was executed with ${inputs.length} inputs`);\n }\n shapeInfos.forEach((s, i) => {\n const shapeA = s.logicalShape;\n const input = inputs[i];\n const shapeB = input.shape;\n if (!util.arraysEqual(shapeA, shapeB)) {\n throw Error(`Binary was compiled with different shapes than ` +\n `the current args. Shapes ${shapeA} and ${shapeB} must match`);\n }\n // The input is uploaded as uniform.\n if (s.isUniform && input.isUniform) {\n return;\n }\n const texShapeA = s.texShape;\n const texShapeB = input.isUniform ? null : input.texData.texShape;\n if (!util.arraysEqual(texShapeA, texShapeB)) {\n throw Error(`Binary was compiled with different texture shapes than the` +\n ` current args. Shape ${texShapeA} and ${texShapeB} must match`);\n }\n });\n}\nexport function runProgram(gpgpu, binary, inputs, output, customSetup) {\n validateBinaryAndProgram(binary.inShapeInfos, inputs);\n validateBinaryAndProgram([binary.outShapeInfo], [output]);\n const outTex = output.texData.texture;\n const outTexShape = output.texData.texShape;\n if (output.texData.isPacked) {\n gpgpu.setOutputPackedMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n else {\n gpgpu.setOutputMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n gpgpu.setProgram(binary.webGLProgram);\n // Set special uniforms (NAN, INFINITY)\n if (env().getNumber('WEBGL_VERSION') === 1) {\n if (binary.infLoc !== null) {\n gpgpu.gl.uniform1f(binary.infLoc, Infinity);\n }\n }\n if (binary.nanLoc !== null) {\n gpgpu.gl.uniform1f(binary.nanLoc, NaN);\n }\n // Set user-defined inputs\n inputs.forEach((input, i) => {\n const varName = binary.program.variableNames[i];\n const varLoc = binary.uniformLocations[varName];\n const varOffsetLoc = binary.uniformLocations[`offset${varName}`];\n if (varLoc == null) {\n // The compiler inferred that this variable is not used in this shader.\n return;\n }\n if (input.isUniform) {\n // Upload the values of the tensor as uniform.\n if (util.sizeFromShape(input.shape) < 2) {\n gpgpu.gl.uniform1f(varLoc, input.uniformValues[0]);\n }\n else {\n let vals = input.uniformValues;\n if (!(vals instanceof Float32Array)) {\n vals = new Float32Array(vals);\n }\n gpgpu.gl.uniform1fv(varLoc, vals);\n }\n return;\n }\n // If the input was sliced, upload the flat offset index.\n if (input.texData.slice != null && varOffsetLoc != null) {\n gpgpu.gl.uniform1i(varOffsetLoc, input.texData.slice.flatOffset);\n }\n gpgpu.setInputMatrixTexture(input.texData.texture, varLoc, i);\n });\n if (customSetup != null) {\n customSetup(gpgpu, binary.webGLProgram);\n }\n gpgpu.executeProgram();\n}\nexport function makeShaderKey(program, inputs, output) {\n let keyInputs = '';\n inputs.concat(output).forEach(x => {\n const hasOffset = x.texData != null && x.texData.slice != null &&\n x.texData.slice.flatOffset > 0;\n const texShape = x.isUniform ? 'uniform' : x.texData.texShape;\n keyInputs += `${x.shape}_${texShape}_${hasOffset}`;\n });\n const keyUserCode = program.userCode;\n let key = program.constructor.name;\n // Fast string concat. See https://jsperf.com/string-concatenation/14.\n key += '_' + keyInputs + '_' + keyUserCode;\n return key;\n}\n//# sourceMappingURL=gpgpu_math.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nexport class Im2ColPackedProgram {\n constructor(outputShape, inputShape, convInfo) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const { filterWidth, inChannels, strideWidth, strideHeight, padInfo, outWidth, dilationWidth, dilationHeight, dataFormat } = convInfo;\n const { left, top } = padInfo;\n const itemsPerBlockRow = inChannels * filterWidth;\n const glsl = getGlslDifferences();\n const isChannelsLast = dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 0 : 1;\n const colDim = isChannelsLast ? 1 : 2;\n let unrolled = ``;\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n unrolled += `\n blockIndex = rc.y + ${col};\n pos = rc.x + ${row};\n\n if(blockIndex < ${outputShape[1]} && pos < ${outputShape[0]}) {\n offsetY = int(blockIndex / (${outWidth})) * ${strideHeight} - ${top};\n d0 = offsetY + ${dilationHeight} * (pos / ${itemsPerBlockRow});\n\n if(d0 < ${inputShape[rowDim]} && d0 >= 0) {\n\n offsetX = int(mod(float(blockIndex), ${outWidth}.) * ${strideWidth}. - ${left}.);\n d1 = offsetX + ${dilationWidth} * (int(mod(float(pos), ${itemsPerBlockRow}.) / ${inChannels}.));\n\n if(d1 < ${inputShape[colDim]} && d1 >= 0) {\n\n ch = int(mod(float(pos), ${inChannels}.));\n\n if (${isChannelsLast}) {\n innerDims = vec2(d1, ch);\n result[${row * 2 + col}] = getChannel(\n getA(d0, int(innerDims.x),\n int(innerDims.y)), innerDims);\n } else {\n innerDims = vec2(d0, d1);\n result[${row * 2 + col}] = getChannel(\n getA(ch, int(innerDims.x),\n int(innerDims.y)), innerDims);\n }\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n void main() {\n ivec2 rc = getOutputCoords();\n\n vec4 result = vec4(0);\n\n int blockIndex, pos, offsetY, d0, offsetX, d1, ch;\n vec2 innerDims;\n\n ${unrolled}\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=im2col_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n int d = coords[3];\n float x = getX(b, r, c, d);\n float sum = 0.0;\n for (int j = -${rad}; j <= ${rad}; j++) {\n int idx = d + j;\n if (idx >= 0 && idx <= ${maxD}) {\n float z = getX(b, r, c, idx);\n sum += z * z;\n }\n }\n float val = x * ${powOperator};\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNGradProgram {\n constructor(inputShape, depthRadius, bias, alpha, beta) {\n this.variableNames = ['inputImage', 'outputImage', 'dy'];\n this.outputShape = [];\n this.outputShape = inputShape;\n this.depth = inputShape[3];\n this.depthRadius = depthRadius;\n this.bias = bias;\n this.alpha = alpha;\n this.beta = beta;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n\n float result = 0.0;\n for (int d = 0; d < ${this.depth}; ++d) {\n int depthBegin = int(max(0.0, float(d - ${depthRadius})));\n int depthEnd = int(min(float(${this.depth}),\n float(d + ${depthRadius} + 1)));\n\n const int MIN_DEPTH_BEGIN = 0;\n const int MAX_DEPTH_END = ${this.depth};\n\n float norm = 0.0;\n for (int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k) {\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd) {\n norm += getInputImage(b, r, c, k) * getInputImage(b, r, c, k);\n }\n else {\n break;\n }\n }\n\n norm = float(${alpha}) * norm + float(${bias});\n\n for(int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k){\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd){\n float dyi = -2.0 * float(${alpha})\n * float(${beta})\n * getInputImage(b ,r ,c, k) * getOutputImage(b, r, c, d)\n / norm;\n if (k == d) {\n dyi += pow(norm, -1.0 * ${beta});\n }\n if (k == coords[3]) {\n dyi *= getDy(b, r, c, d);\n result += dyi;\n }\n }\n else {\n break;\n }\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_grad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNPackedProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords.x;\n int r = coords.y;\n int c = coords.z;\n int d = coords.w;\n\n bool hasNextCol = d < ${this.outputShape[3]};\n bool hasNextRow = c < ${this.outputShape[2]};\n\n vec4 sum = vec4(0.);\n vec4 xFragAtOutputCoords = getX(b, r, c, d);\n\n vec4 xAtOutputCoords = vec4(\n getChannel(xFragAtOutputCoords, vec2(c, d)),\n hasNextCol ?\n getChannel(xFragAtOutputCoords, vec2(c, d + 1)) : 0.0,\n hasNextRow ?\n getChannel(xFragAtOutputCoords , vec2(c + 1, d)) : 0.0,\n (hasNextRow && hasNextCol) ?\n getChannel(xFragAtOutputCoords, vec2(c + 1, d + 1)) : 0.0\n );\n\n int firstChannel = d - ${rad};\n vec2 cache = vec2(0.);\n if(firstChannel >= 0){\n vec4 firstChannelFrag = getX(b, r, c, firstChannel);\n cache.x = getChannel(firstChannelFrag, vec2(c, firstChannel));\n if(hasNextRow){\n cache.y = getChannel(firstChannelFrag, vec2(c + 1, firstChannel));\n }\n }\n\n ivec2 depth = ivec2(d, d + 1);\n for (int j = - ${rad}; j <= ${rad}; j++) {\n ivec2 idx = depth + j;\n bvec2 aboveLowerBound = greaterThanEqual(idx, ivec2(0));\n bvec2 belowUpperBound = lessThanEqual(idx, ivec2(${maxD}));\n\n bool depthInRange = aboveLowerBound.x && belowUpperBound.x;\n bool depthPlusOneInRange = aboveLowerBound.y && belowUpperBound.y;\n\n if(depthInRange || depthPlusOneInRange){\n vec4 z = vec4(0.);\n vec4 xFragAtCurrentDepth;\n z.xz = cache.xy;\n if(depthPlusOneInRange && hasNextCol){\n xFragAtCurrentDepth = idx.y != d ?\n getX(b, r, c, idx.y) : xFragAtOutputCoords;\n z.y = getChannel(xFragAtCurrentDepth, vec2(c, idx.y));\n if(hasNextRow){\n z.w = getChannel(xFragAtCurrentDepth, vec2(c + 1, idx.y));\n }\n }\n cache.xy = z.yw;\n sum += z * z;\n }\n }\n vec4 result = xAtOutputCoords * ${powOperator};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MaxPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n int maxPosValue = ${lastIndex} - int(getMaxPos(b, idyR, idyC, d));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue = wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class MaxPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterDepth * effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, ch) with pos mask(:, :, :, d) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n int maxPosValue = ${lastIndex} -\n int(getMaxPos(batch, idyD, idyR, idyC, ch));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue =\n wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=max_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MatMulPackedProgram {\n constructor(aShape, bShape, outputShape, transposeA = false, transposeB = false, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['matrixA', 'matrixB'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const sharedDim = transposeA ? aShape[1] : aShape[2];\n const sharedDimensionPacked = Math.ceil(sharedDim / 2);\n const aSample = transposeA ? 'i * 2, rc.y' : 'rc.y, i * 2';\n const bSample = transposeB ? 'rc.z, i * 2' : 'i * 2, rc.z';\n const aSwizzle = transposeA ? ['a.xxyy', 'a.zzww'] : ['a.xxzz', 'a.yyww'];\n const bSwizzle = transposeB ? ['b.xzxz', 'b.ywyw'] : ['b.xyxy', 'b.zwzw'];\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n let batchASnippet = 'rc.x';\n let batchBSnippet = 'rc.x';\n if (aShape[0] < bShape[0]) {\n batchASnippet = `int(min(float(rc.x), ${aShape[0] - 1}.))`;\n }\n else if (bShape[0] < aShape[0]) {\n batchBSnippet = `int(min(float(rc.x), ${bShape[0] - 1}.))`;\n }\n this.userCode = `\n ${activationSnippet}\n\n const float sharedDimension = ${sharedDimensionPacked}.0;\n\n vec4 dot2x2ARowBCol(ivec3 rc) {\n vec4 result = vec4(0);\n for (int i = 0; i < ${sharedDimensionPacked}; i++) {\n int batchA = ${batchASnippet};\n int batchB = ${batchBSnippet};\n vec4 a = getMatrixA(batchA, ${aSample});\n vec4 b = getMatrixB(batchB, ${bSample});\n\n // These swizzled products need to be separately added.\n // See: https://github.com/tensorflow/tfjs/issues/1735\n result += (${aSwizzle[0]} * ${bSwizzle[0]});\n result += (${aSwizzle[1]} * ${bSwizzle[1]});\n }\n return result;\n }\n\n void main() {\n ivec3 rc = getOutputCoords();\n vec4 result = dot2x2ARowBCol(rc);\n\n ${addBiasSnippet}\n\n ${applyActivationSnippet}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mulmat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MultinomialProgram {\n constructor(batchSize, numOutcomes, numSamples) {\n this.variableNames = ['probs'];\n this.outputShape = [batchSize, numSamples];\n this.userCode = `\n uniform float seed;\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n\n float r = random(seed);\n float cdf = 0.0;\n\n for (int i = 0; i < ${numOutcomes - 1}; i++) {\n cdf += getProbs(batch, i);\n\n if (r < cdf) {\n setOutput(float(i));\n return;\n }\n }\n\n // If no other event happened, last event happened.\n setOutput(float(${numOutcomes - 1}));\n }\n `;\n }\n getCustomSetupFunc(seed) {\n return (gpgpu, webGLProgram) => {\n if (this.seedLoc == null) {\n this.seedLoc = gpgpu.getUniformLocation(webGLProgram, 'seed');\n }\n gpgpu.gl.uniform1f(this.seedLoc, seed);\n };\n }\n}\n//# sourceMappingURL=multinomial_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class OneHotProgram {\n constructor(numIndices, depth, onValue, offValue) {\n this.variableNames = ['indices'];\n this.outputShape = [numIndices, depth];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int index = round(getIndices(coords.x));\n setOutput(mix(float(${offValue}), float(${onValue}),\n float(index == coords.y)));\n }\n `;\n }\n}\n//# sourceMappingURL=onehot_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n // Only input / output 3D tensors.\n this.outputShape = outputShape;\n const rank = outputShape.length;\n if (rank === 0) {\n this.userCode = `\n void main() {\n setOutput(vec4(getA(), 0., 0., 0.));\n }\n `;\n }\n else {\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const outOfBoundsCondition = getOutOfBoundsCondition(rank, outputShape, channels);\n const setup = getSetup(rank, outputShape[outputShape.length - 1], outputShape[outputShape.length - 2], channels);\n const output = getOutput(outputShape, channels);\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n\n if(${outOfBoundsCondition}) {\n setOutput(vec4(0));\n } else {\n ${setup}\n\n setOutput(vec4(${output}));\n }\n }\n `;\n }\n }\n}\nfunction getSourceCoordsArr(rank, dims) {\n const coords = [];\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n let coord = `${row === 0 ? 'r' : 'rp1'}, ${col === 0 ? 'c' : 'cp1'}`;\n for (let d = 2; d < rank; d++) {\n coord = `${dims[dims.length - 1 - d]},` + coord;\n }\n coords.push(coord);\n }\n }\n return coords;\n}\nfunction getOutOfBoundsCondition(rank, shape, dims) {\n if (rank === 1) {\n return `rc > ${shape[0]}`;\n }\n let cond = '';\n for (let i = rank - 2; i < rank; i++) {\n cond += `${dims[i]} >= ${shape[i]}`;\n if (i < rank - 1) {\n cond += '||';\n }\n }\n return cond;\n}\nfunction getSetup(rank, cols, rows, dims) {\n if (rank === 1) {\n return '';\n }\n const innerDims = dims.slice(-2);\n return `\n int r = ${innerDims[0]};\n int c = ${innerDims[1]};\n int rp1 = r + 1;\n int cp1 = c + 1;\n\n bool cEdge = cp1 >= ${cols};\n bool rEdge = rp1 >= ${rows};\n `;\n}\nfunction getOutput(shape, dims) {\n const rank = shape.length;\n const sourceCoords = getSourceCoordsArr(rank, dims);\n if (rank === 1) {\n return `getA(rc),\n rc + 1 >= ${shape[0]} ? 0. : getA(rc + 1),\n 0, 0`;\n }\n return `getA(${sourceCoords[0]}),\n cEdge ? 0. : getA(${sourceCoords[1]}),\n rEdge ? 0. : getA(${sourceCoords[2]}),\n rEdge || cEdge ? 0. : getA(${sourceCoords[3]})`;\n}\n//# sourceMappingURL=pack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const type = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start || outC >= end) {\n setOutput(float(${constantValue}));\n } else {\n setOutput(getX(outC - start));\n }\n }\n `;\n return;\n }\n this.userCode = `\n ${type} start = ${type}(${start});\n ${type} end = ${type}(${end});\n\n void main() {\n ${type} outC = getOutputCoords();\n if (any(lessThan(outC, start)) || any(greaterThanEqual(outC, end))) {\n setOutput(float(${constantValue}));\n } else {\n ${type} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadPackedProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const componentSetup = [\n `${dtype} rc = outputLoc;`, `${coords[rank - 1]} += 1;\n if(${cLimit}) {\n `,\n rank === 1 ? '' : `}\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {`,\n rank === 1 ? '' : ` ${coords[rank - 1]} += 1;\n if(${cLimit}) {`\n ];\n const paddingArea = rank === 1 ?\n 'rc < start || rc >= end' :\n 'any(lessThan(rc, start)) || any(greaterThanEqual(rc, end))';\n let mainLoop = '';\n for (let i = 0, j = rank === 1 ? 2 : 4; i < j; i++) {\n mainLoop += `\n ${componentSetup[i]}\n if (${paddingArea}) {\n result[${i}] = float(${constantValue});\n } else {\n ${dtype} source = rc - start;\n result[${i}] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n mainLoop += (rank === 1 ? `} ` : `}}`);\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Pool2DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n const batchFlattenPositionStr = `((batch * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n const flattenPositionStr = `(xR * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n float avgValue = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xR, xC, d);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ? (includeBatchInIndex ? batchFlattenPositionStr :\n flattenPositionStr) :\n `wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xR, int xC, int d) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xR, xC, d);\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n getValue(batch, xR, xC + 3 * ${dilationWidth}, d)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\nexport class Pool3DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, ch) to get y(yD, yR, yC, ch).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xD, xR, xC, ch);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ?\n (includeBatchInIndex ?\n `(((batch * ${convInfo.inDepth} + xD) * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch` :\n `((xD * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch`) :\n `wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xD, int xR, int xC, int ch) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xD, xR, xC, ch);\n }\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, d) to get y(yD, yR, yC, ch).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 3 * ${dilationWidth}, ch)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pool_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ReduceProgram {\n constructor(reduceInfo, reduceType) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n let initializationValue = '0.0';\n let compareOp = ``;\n if (reduceType === 'prod') {\n initializationValue = '1.0';\n }\n else if (reduceType === 'min') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '1.0 / 1e-20';\n compareOp = `min`;\n }\n else if (reduceType === 'max') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n compareOp = `max`;\n }\n let returnValue = `${reduceType}(${reduceType}(${reduceType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (reduceType === 'sum') {\n returnValue = `sumValue`;\n }\n else if (reduceType === 'prod') {\n returnValue = `prodValue`;\n }\n else if (reduceType === 'all') {\n returnValue = `allValue`;\n }\n else if (reduceType === 'any') {\n returnValue = `anyValue`;\n }\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `\n if (${reduceType === 'sum'}) {\n sumValue += dot(values, ones);\n } else if (${reduceType === 'prod'}) {\n vec2 tmp = vec2(values[0], values[1]) * vec2(values[2], values[3]);\n prodValue *= tmp[0] * tmp[1];\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n let vecType = `vec4`;\n if (reduceType === 'all') {\n initializationValue = '1.0';\n updateSnippet = `\n bool reducedAllValue = all(values);\n float floatedReducedAllValue = float(reducedAllValue);\n allValue = float(allValue >= 1.0 && floatedReducedAllValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n else if (reduceType === 'any') {\n initializationValue = '0.0';\n updateSnippet = `\n bool reducedAnyValue = any(values);\n float floatedReducedAnyValue = float(reducedAnyValue);\n anyValue = float(anyValue >= 1.0 || floatedReducedAnyValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n vec4 minMaxValue = vec4(${initializationValue});\n float prodValue = 1.0;\n float sumValue = 0.0;\n float allValue = 1.0;\n float anyValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=reduce_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as shader_util from './shader_compiler_util';\nexport class ReshapePackedProgram {\n constructor(outputShape, inputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n let mainLoop = ``;\n for (let i = 0; i < 4; i++) {\n let thisRC = `thisRC = rc;`;\n if (i % 2 === 1) {\n thisRC += `thisRC.z += 1;`;\n }\n if (i > 1) {\n thisRC += `thisRC.y += 1;`;\n }\n mainLoop += `\n ${thisRC}\n ${i > 0 ? `if(thisRC.y < rows && thisRC.z < cols){` : ''}\n int flatIndex = getFlatIndex(thisRC);\n\n ivec3 inputRC = inputCoordsFromReshapedOutCoords(flatIndex);\n vec2 inputRCInnerDims = vec2(float(inputRC.y),float(inputRC.z));\n\n result[${i}] =\n getChannel(getA(inputRC.x, inputRC.y, inputRC.z), inputRCInnerDims);\n ${i > 0 ? '}' : ''}\n `;\n }\n this.userCode = `\n ${getReshapedInputCoords(inputShape)}\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 rc = getOutputCoords();\n\n vec4 result = vec4(0.);\n\n ivec3 thisRC;\n int rows = ${outputShape[1]};\n int cols = ${outputShape[2]};\n\n ${mainLoop}\n\n setOutput(result);\n }\n `;\n }\n}\nfunction getReshapedInputCoords(shape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 inputCoordsFromReshapedOutCoords(int index) {\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\n//# sourceMappingURL=reshape_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(startRLerp - float(winHeight / 2));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(startCLerp - float(winWidth / 2));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float dxR = float(dyR) * heightScale;\n int topDxRIndex = int(floor(dxR));\n int bottomDxRIndex = int(min(ceil(dxR), ${xHeight - 1}.0));\n float dxRLerp = dxR - float(topDxRIndex);\n float inverseDxRLerp = 1.0 - dxRLerp;\n\n float dxC = float(dyC) * widthScale;\n int leftDxCIndex = int(floor(dxC));\n int rightDxCIndex = int(min(ceil(dxC), ${xWidth - 1}.0));\n float dxCLerp = dxC - float(leftDxCIndex);\n float inverseDxCLerp = 1.0 - dxCLerp;\n\n if (r == topDxRIndex && c == leftDxCIndex) {\n // topLeft\n accumulator +=\n getDy(b, dyR, dyC, d) * inverseDxRLerp * inverseDxCLerp;\n }\n\n if (r == topDxRIndex && c == rightDxCIndex) {\n // topRight\n accumulator += getDy(b, dyR, dyC, d) * inverseDxRLerp * dxCLerp;\n }\n\n if (r == bottomDxRIndex && c == leftDxCIndex) {\n // bottomLeft\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * inverseDxCLerp;\n }\n\n if (r == bottomDxRIndex && c == rightDxCIndex) {\n // bottomRight\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * dxCLerp;\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec2 sourceFloorRC = ivec2(sourceFracIndexRC);\n ivec2 sourceCeilRC = ivec2(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n float topLeft = getA(b, sourceFloorRC.x, sourceFloorRC.y, d);\n float bottomLeft = getA(b, sourceCeilRC.x, sourceFloorRC.y, d);\n float topRight = getA(b, sourceFloorRC.x, sourceCeilRC.y, d);\n float bottomRight = getA(b, sourceCeilRC.x, sourceCeilRC.y, d);\n\n vec2 fracRC = sourceFracIndexRC - vec2(sourceFloorRC);\n\n float top = topLeft + (topRight - topLeft) * fracRC.y;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracRC.y;\n float newValue = top + (bottom - top) * fracRC.x;\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearPackedProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec3 effectiveInputOverOutputRatioRC = vec3(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec3 inputShapeRC = vec3(${oldHeight}.0, ${oldWidth}.0,\n ${oldWidth}.0);\n\n float getAValue(int b, int r, int c, int d) {\n return getChannel(getA(b, r, c, d), vec2(c, d));\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n // Calculate values for next column in yRC.z.\n ivec3 yRC = coords.yzz + ivec3(0, 0, 1);\n\n // Fractional source index.\n vec3 sourceFracIndexRC = vec3(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec3 sourceFloorRC = ivec3(sourceFracIndexRC);\n ivec3 sourceCeilRC = ivec3(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n // Should we calculate next column and row elements in 2x2 packed cell.\n bool hasNextCol = d < ${depth - 1};\n bool hasNextRow = coords.z < ${newWidth - 1};\n\n // In parallel, construct four corners for all four components in\n // packed 2x2 cell.\n vec4 topLeft = vec4(\n getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 bottomLeft = vec4(\n getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 topRight = vec4(\n getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec4 bottomRight = vec4(\n getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec3 fracRC = sourceFracIndexRC - vec3(sourceFloorRC);\n\n vec4 top = mix(topLeft, topRight, fracRC.yyzz);\n vec4 bottom = mix(bottomLeft, bottomRight, fracRC.yyzz);\n vec4 newValue = mix(top, bottom, fracRC.x);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeigborBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(floor(startRLerp - float(winHeight / 2)));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(floor(startCLerp - float(winWidth / 2)));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float sourceFracRow =\n float(${effectiveXSize[0]}) *\n (float(dyR) / float(${effectiveYSize[0]}));\n\n float sourceFracCol =\n float(${effectiveXSize[1]}) *\n (float(dyC) / float(${effectiveYSize[1]}));\n\n int sourceNearestRow = int(min(\n float(int(${xHeight}) - 1),\n ${alignCorners} ? float(round(sourceFracRow)) :\n float(floor(sourceFracRow))));\n\n int sourceNearestCol = int(min(\n float(int(${xWidth}) - 1),\n ${alignCorners} ? float(round(sourceFracCol)) :\n float(floor(sourceFracCol))));\n\n if (r == sourceNearestRow && c == sourceNearestCol) {\n accumulator += getDy(b, dyR, dyC, d);\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeighborProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n // When align corners is false, we rounds the value with floor.\n const roundBase = alignCorners ? '0.5' : '0.0';\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestRC = ivec2(\n min(inputShapeRC - 1.0, floor(sourceFracIndexRC + ${roundBase})));\n\n float newValue = getA(b, sourceNearestRC.x, sourceNearestRC.y, d);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReverseProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n if (rank === 1) {\n this.userCode = `\n void main() {\n int coord = getOutputCoords();\n setOutput(getX(${xShape[0]} - coord - 1));\n }\n `;\n return;\n }\n const getInCoord = (i) => {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - coords[${i}] - 1`;\n }\n return `coords[${i}]`;\n };\n const inCoords = xShape.map((_, i) => getInCoord(i)).join(',');\n const type = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${type} coords = getOutputCoords();\n setOutput(getX(${inCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=reverse_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReversePackedProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n const channels = getChannels('rc', rank);\n const nextColumn = `${channels[rank - 1]} + 1 < ${this.outputShape[rank - 1]}`;\n const nextRow = `${channels[rank - 2]} + 1 < ${this.outputShape[rank - 2]}`;\n const type = getCoordsDataType(rank);\n if (rank === 1) {\n this.userCode = `\n void main(){\n int rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = getChannel(getX(${xShape[0]} - rc - 1),\n ${xShape[0]} - rc - 1);\n if(${nextColumn}){\n result.g = getChannel(getX(${xShape[0]} - (rc + 1) - 1),\n ${xShape[0]} - (rc + 1) - 1);\n }\n setOutput(result);\n }\n `;\n }\n else {\n this.userCode = `\n void main() {\n ${type} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = ${getR(channels.slice())};\n if(${nextColumn}){\n result.g = ${getG(channels.slice())};\n }\n if(${nextRow}) {\n result.b = ${getB(channels.slice())};\n if(${nextColumn}) {\n result.a = ${getA(channels.slice())};\n }\n }\n setOutput(result);\n }\n `;\n }\n function getR(channels) {\n return getChannel(channels);\n }\n function getG(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n return getChannel(channels);\n }\n function getB(channels) {\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getA(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getChannel(channels) {\n const inCoordsArray = xShape.map((_, i) => getInCoord(i, channels));\n const inCoords = inCoordsArray.join(',');\n const innerDims = inCoordsArray.slice(-2).join(',');\n return `getChannel(getX(${inCoords}), vec2(${innerDims}))`;\n }\n function getInCoord(i, channels1) {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - ${channels1[i]} - 1`;\n }\n else {\n return `${channels1[i]}`;\n }\n }\n }\n}\n//# sourceMappingURL=reverse_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ScatterProgram {\n constructor(updateSize, sliceDim, indicesRank, updatesRank, strides, shape, summingDupeIndex = true) {\n this.variableNames = ['updates', 'indices', 'defaultValue'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n let indicesString = '';\n if (indicesRank === 1) {\n indicesString = 'i';\n }\n else if (indicesRank === 2) {\n indicesString = 'i, j';\n }\n const indicesSnippet = `getIndices(${indicesString})`;\n let updatesString = '';\n if (updatesRank === 1) {\n updatesString = 'i';\n }\n else if (updatesRank === 2) {\n updatesString = 'i, coords[1]';\n }\n const updatesSnippet = `getUpdates(${updatesString})`;\n const strideString = sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n float sum = 0.0;\n bool found = false;\n for (int i = 0; i < ${updateSize}; i++) {\n int flattenedIndex = 0;\n for (int j = 0; j < ${sliceDim}; j++) {\n int index = round(${indicesSnippet});\n flattenedIndex += index * ${strideString};\n }\n if (flattenedIndex == coords[0]) {\n sum += ${updatesSnippet};\n found = true;\n }\n }\n setOutput(mix(getDefaultValue(), sum, float(found)));\n }\n `;\n }\n}\n//# sourceMappingURL=scatter_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class SegmentOpProgram {\n constructor(segOpInfo, segOpType) {\n this.variableNames = ['x', 'segmentIds'];\n const windowSize = segOpInfo.windowSize;\n const batchSize = segOpInfo.batchSize;\n const inSize = segOpInfo.inSize;\n const numSegments = segOpInfo.numSegments;\n const outSize = numSegments * Math.ceil(inSize / windowSize);\n this.outputShape = [batchSize, outSize];\n const initializationValue = '0.0';\n const returnValue = `sumValue`;\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n const updateSnippet = `\n sumValue += dot(values, segFilter);\n `;\n let checkValueOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkValueOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n let checkSegmentIdOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkSegmentIdOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return -1.0;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n\n float getValue(int batch, int inIdx) {\n ${checkValueOutOfBounds}\n return getX(batch, inIdx);\n }\n\n float getSegmentIdAtIndex(int inIdx) {\n ${checkSegmentIdOutOfBounds}\n return getSegmentIds(inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = int(floor(float(outIdx) / float(\n ${numSegments})) * float(${windowSize}));\n int currentSeg = int(mod(float(outIdx), float(${numSegments})));\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 3)) == currentSeg ? 1 : 0\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n int inIdxSeg = int(getSegmentIdAtIndex(inIdx));\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n 0\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=segment_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SelectProgram {\n constructor(cRank, shape, rank) {\n this.variableNames = ['c', 'a', 'b'];\n this.outputShape = shape;\n let cCoords;\n let abCoords;\n if (rank > 4) {\n throw Error(`Where for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n abCoords = `resRC`;\n cCoords = `resRC`;\n }\n else {\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const cCoordVars = [];\n const abCoordVars = [];\n for (let i = 0; i < shape.length; i++) {\n abCoordVars.push(`${currentCoords[i]}`);\n if (i < cRank) {\n cCoordVars.push(`${currentCoords[i]}`);\n }\n }\n cCoords = cCoordVars.join();\n abCoords = abCoordVars.join();\n }\n const dtype = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n float cVal = getC(${cCoords});\n if (cVal >= 1.0) {\n setOutput(getA(${abCoords}));\n } else {\n setOutput(getB(${abCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=select_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SliceProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const uniformPart = `uniform int start[${this.rank}];`;\n const sourceCoords = getCoords(this.rank);\n let body;\n const coordSum = destSize.map((_, i) => {\n return `sourceLoc.${coords[i]} = start[${i}] + coords.${coords[i]};`;\n });\n body = `\n ${dtype} sourceLoc;\n ${dtype} coords = getOutputCoords();\n ${coordSum.join('\\n')}\n `;\n this.userCode = `\n ${uniformPart}\n void main() {\n ${body}\n setOutput(getSource(${sourceCoords}));\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\nconst coords = ['x', 'y', 'z', 'w', 'u', 'v'];\nfunction getCoords(rank) {\n if (rank === 1) {\n return 'sourceLoc';\n }\n else if (rank <= 6) {\n return coords.slice(0, rank).map(x => 'sourceLoc.' + x).join(',');\n }\n else {\n throw Error(`Slicing for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=slice_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class SlicePackedProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const coords = getChannels('coords', this.rank);\n const sourceLoc = getChannels('sourceLoc', this.rank);\n const innerDims = this.rank === 1 ? 'sourceLoc' : `vec2(${sourceLoc.slice(-2).join()})`;\n const getChannel = `getChannel(getSource(${sourceLoc.join()}), ${innerDims})`;\n const upperRow = `\n result.x = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.y = ${getChannel};\n --${sourceLoc[this.rank - 1]};\n }\n `;\n const lowerRow = this.rank === 1 ? '' : `\n --${coords[this.rank - 1]};\n if (++${coords[this.rank - 2]} < ${destSize[this.rank - 2]}) {\n ++${sourceLoc[this.rank - 2]};\n result.z = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.w = ${getChannel};\n }\n }\n `;\n const sourceLocSetup = this.rank <= 4 ?\n `sourceLoc = coords +\n ${dtype}(${destSize.map((_, i) => `start[${i}]`).join()});` :\n destSize.map((_, i) => `${sourceLoc[i]} = ${coords[i]} + start[${i}];`)\n .join('\\n');\n this.userCode = `\n uniform int start[${this.rank}];\n void main() {\n ${dtype} coords = getOutputCoords();\n ${dtype} sourceLoc;\n ${sourceLocSetup}\n vec4 result = vec4(0.);\n ${upperRow}\n ${lowerRow}\n setOutput(result);\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\n//# sourceMappingURL=slice_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class StridedSliceProgram {\n constructor(begin, strides, size) {\n this.variableNames = ['x'];\n this.outputShape = size;\n const rank = size.length;\n const inputDtype = getCoordsDataType(size.length);\n const dtype = getCoordsDataType(size.length);\n let newCoords = '';\n if (rank === 1) {\n newCoords = 'coords * strides + begin';\n }\n else {\n let outputAxis = 0;\n newCoords =\n size.map((_, i) => {\n outputAxis++;\n return size.length === 1 ?\n `coords * strides[${i}] + begin[${i}]` :\n `coords[${outputAxis - 1}] * strides[${i}] + begin[${i}]`;\n })\n .join(',');\n }\n this.userCode = `\n ${inputDtype} begin = ${inputDtype}(${begin});\n ${inputDtype} strides = ${inputDtype}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n setOutput(getX(${newCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=strided_slice_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { getInternalFormatForFloat16MatrixTexture, getInternalFormatForFloat16PackedMatrixTexture, getInternalFormatForFloat32MatrixTexture, getInternalFormatForPackedMatrixTexture, getInternalFormatForUnsignedBytesMatrixTexture } from './gpgpu_util';\nimport { getPackedMatrixTextureShapeWidthHeight, getUnpackedMatrixTextureShapeWidthHeight, PhysicalTextureType, TextureUsage } from './tex_util';\nexport class TextureManager {\n constructor(gpgpu) {\n this.gpgpu = gpgpu;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0; // How many bytes that have been allocated\n // are available for reuse.\n this.freeTextures = {};\n this.logEnabled = false;\n this.usedTextures = {};\n }\n acquireTexture(shapeRC, usage, isPacked) {\n const physicalTexType = getPhysicalFromLogicalTextureType(usage, isPacked);\n const shapeKey = getKeyFromTextureShape(shapeRC, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n if (!(shapeKey in this.usedTextures)) {\n this.usedTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shapeRC, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n if (this.freeTextures[shapeKey].length > 0) {\n this.numFreeTextures--;\n this.numUsedTextures++;\n this._numBytesFree -= texBytes;\n this.log();\n const newTexture = this.freeTextures[shapeKey].shift();\n this.usedTextures[shapeKey].push(newTexture);\n return newTexture;\n }\n let newTexture;\n if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT32) {\n newTexture = this.gpgpu.createPackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16PackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT32) {\n newTexture =\n this.gpgpu.createFloat32MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE) {\n newTexture =\n this.gpgpu.createUnsignedBytesMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n this.usedTextures[shapeKey].push(newTexture);\n this.numUsedTextures++;\n this._numBytesAllocated += texBytes;\n this.log();\n return newTexture;\n }\n releaseTexture(texture, shape, logicalTexType, isPacked) {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n const physicalTexType = getPhysicalFromLogicalTextureType(logicalTexType, isPacked);\n const shapeKey = getKeyFromTextureShape(shape, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shape, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n const deleteTexThreshold = env().get('WEBGL_DELETE_TEXTURE_THRESHOLD');\n if (deleteTexThreshold !== -1 &&\n this._numBytesAllocated > deleteTexThreshold) {\n this.gpgpu.deleteMatrixTexture(texture);\n this._numBytesAllocated -= texBytes;\n }\n else {\n this.freeTextures[shapeKey].push(texture);\n this.numFreeTextures++;\n this._numBytesFree += texBytes;\n }\n this.numUsedTextures--;\n const texList = this.usedTextures[shapeKey];\n const texIndex = texList.indexOf(texture);\n if (texIndex < 0) {\n throw new Error('Cannot release a texture that was never provided by this ' +\n 'texture manager');\n }\n texList.splice(texIndex, 1);\n this.log();\n }\n log() {\n if (!this.logEnabled) {\n return;\n }\n const total = this.numFreeTextures + this.numUsedTextures;\n console.log('Free/Used', `${this.numFreeTextures} / ${this.numUsedTextures}`, `(${total})`);\n const freeRatio = this._numBytesFree / this._numBytesAllocated;\n console.log(`Bytes allocated: ${this._numBytesAllocated}`);\n console.log(`Bytes unused: ${this._numBytesFree} (${Math.round(100 * freeRatio)}%)`);\n }\n get numBytesAllocated() {\n return this._numBytesAllocated;\n }\n get numBytesFree() {\n return this._numBytesFree;\n }\n getNumUsedTextures() {\n return this.numUsedTextures;\n }\n getNumFreeTextures() {\n return this.numFreeTextures;\n }\n dispose() {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n for (const texShape in this.freeTextures) {\n this.freeTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n for (const texShape in this.usedTextures) {\n this.usedTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n this.freeTextures = null;\n this.usedTextures = null;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0;\n }\n}\nfunction numBytesForInternalFormat(gl, internalFormat) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n if (internalFormat === glany.R32F) {\n return 4;\n }\n else if (internalFormat === glany.R16F) {\n return 2;\n }\n else if (internalFormat === glany.RGBA32F) {\n return 16;\n }\n else if (internalFormat === gl.RGBA) {\n return 16;\n }\n else if (internalFormat === glany.RGBA16F) {\n return 8;\n }\n throw new Error(`Unknown internal format ${internalFormat}`);\n}\nexport function computeBytes(shape, physicalTexType, gl, textureConfig, isPacked) {\n // It is not possible to infer packed status from the texture type because\n // depending on the textureConfig, different texture types may resolve to the\n // same internal format (e.g. in WebGL1, the internal format for\n // UNPACKED_FLOAT16 textures is gl.RGBA). Therefore we pass in `isPacked`\n // explicitly.\n const internalFormat = internalFormatForPhysicalTexType(physicalTexType, textureConfig);\n let numElements;\n if (isPacked) {\n const [packedWidth, packedHeight] = getPackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = packedWidth * packedHeight;\n }\n else {\n const [width, height] = getUnpackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = width * height;\n }\n const bytesPerElement = numBytesForInternalFormat(gl, internalFormat);\n return numElements * bytesPerElement;\n}\nfunction internalFormatForPhysicalTexType(physicalTexType, textureConfig) {\n switch (physicalTexType) {\n case PhysicalTextureType.PACKED_2X2_FLOAT32:\n return getInternalFormatForPackedMatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_2X2_FLOAT16:\n return getInternalFormatForFloat16PackedMatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT32:\n return getInternalFormatForFloat32MatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT16:\n return getInternalFormatForFloat16MatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE:\n return getInternalFormatForUnsignedBytesMatrixTexture(textureConfig);\n default:\n throw new Error(`Unknown physical texture type ${physicalTexType}`);\n }\n}\nfunction getPhysicalTextureForRendering(isPacked) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED')) {\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n return PhysicalTextureType.UNPACKED_FLOAT32;\n }\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT16;\n }\n return PhysicalTextureType.UNPACKED_FLOAT16;\n}\nfunction getPhysicalFromLogicalTextureType(logicalTexType, isPacked) {\n if (logicalTexType === TextureUsage.UPLOAD) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n else if (logicalTexType === TextureUsage.RENDER || logicalTexType == null) {\n return getPhysicalTextureForRendering(isPacked);\n }\n else if (logicalTexType === TextureUsage.DOWNLOAD ||\n logicalTexType === TextureUsage.PIXELS) {\n return PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE;\n }\n throw new Error(`Unknown logical texture type ${logicalTexType}`);\n}\nfunction getKeyFromTextureShape(shapeRowsCol, physicalTexType, isPacked) {\n return `${shapeRowsCol[0]}_${shapeRowsCol[1]}_${physicalTexType}_${isPacked}`;\n}\n//# sourceMappingURL=texture_manager.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TileProgram {\n constructor(aShape, reps) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[i] * reps[i];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape) {\n const rank = aShape.length;\n if (rank > 5) {\n throw Error(`Tile for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `imod(resRC, ${aShape[0]})`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n sourceCoords.push(`imod(${currentCoords[i]}, ${aShape[i]})`);\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=tile_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class UnaryOpProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n float unaryOperation(float x) {\n ${opSnippet}\n }\n\n void main() {\n float x = getAAtOutCoords();\n float y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\nconst CHECK_NAN_SNIPPET = `if (isnan(x)) return x;`;\nexport const LINEAR = `return x;`;\nexport const ABS = `return abs(x);`;\nexport const RELU = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : x;\n`;\nexport const RELU6 = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : min(6.0, x);\n`;\nexport const ELU = `return (x >= 0.0) ? x : (exp(x) - 1.0);`;\nexport const SELU = `\n // Stable and Attracting Fixed Point (0, 1) for Normalized Weights.\n // see: https://arxiv.org/abs/1706.02515\n float scaleAlpha = ${backend_util.SELU_SCALEALPHA};\n float scale = ${backend_util.SELU_SCALE};\n return (x >= 0.0) ? scale * x : scaleAlpha * (exp(x) - 1.0);\n`;\nexport function STEP(alpha = 0.0) {\n return CHECK_NAN_SNIPPET + `\n return x > 0.0 ? 1.0 : float(${alpha});\n `;\n}\nexport const NEG = `return -x;`;\nexport const CEIL = `return ceil(x);`;\nexport const FLOOR = `return floor(x);`;\nexport const SIGN = `\n if (isnan(x)) { return 0.0; }\n return sign(x);\n`;\nexport const IS_NAN = `return float(isnan(x));`;\nexport const IS_INF = `return float(isinf(x));`;\nexport const IS_FINITE = `return float(!isnan(x) && !isinf(x));`;\nexport const ROUND = `\n // OpenGL ES does not support round function.\n // The algorithm is based on banker's rounding.\n float base = floor(x);\n if ((x - base) < 0.5) {\n return floor(x);\n } else if ((x - base) > 0.5) {\n return ceil(x);\n } else {\n if (mod(base, 2.0) == 0.0) {\n return base;\n } else {\n return base + 1.0;\n }\n }\n`;\nexport const EXP = `return exp(x);`;\nexport const EXPM1 = `return exp(x) - 1.0;`;\nexport const LOG = `if (x < 0.0) return NAN;\n return log(x);`;\nexport const LOG1P = `return log(1.0 + x);`;\nexport const SQRT = `return sqrt(x);`;\nexport const RSQRT = `return inversesqrt(x);`;\nexport const SIGMOID = `return 1.0 / (1.0 + exp(-1.0 * x));`;\n/**\n * mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n *\n * epsilon is the difference between 1.0 and the next representable\n * float. For a single precision 32 bit float this should be 2^-23, see:\n * https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\n *\n * too_large = (x > -threshold) is value above which exp(x) may overflow\n * but softplus(x) == x is within machine epsilon\n *\n * too_small = (x < threshold) is value below which exp(x) may underflow,\n * but softplus(x) == exp(x) is within machine epsilon.\n */\nexport const SOFTPLUS = `\n float epsilon = 1.1920928955078125e-7;\n float threshold = log(epsilon) + 2.0;\n\n bool too_large = x > -threshold;\n bool too_small = x < threshold;\n\n float result;\n float exp_x = exp(x);\n\n if (too_large){\n result = x;\n }\n else if (too_small){\n result = exp_x;\n }\n else{\n result = log(exp_x + 1.0);\n }\n return result;\n`;\nexport const ASIN = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return asin(x);\n`;\nexport const ACOS = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return acos(x);\n`;\nexport const ATAN = CHECK_NAN_SNIPPET + `\n return atan(x);\n`;\nexport const SINH = `\n float e2x = exp(x);\n return (e2x - 1.0 / e2x) / 2.0;\n`;\nexport const COSH = `\n float e2x = exp(-x);\n return (e2x + 1.0 / e2x) / 2.0;\n`;\nexport const TANH = `\n float e2x = exp(-2.0 * abs(x));\n return sign(x) * (1.0 - e2x) / (1.0 + e2x);\n`;\nexport const ASINH = CHECK_NAN_SNIPPET + `return log(x + sqrt(x * x + 1.0));`;\nexport const ACOSH = CHECK_NAN_SNIPPET + `\n if (x < 1.0) return NAN;\n return log(x + sqrt(x * x - 1.0));`;\nexport const ATANH = CHECK_NAN_SNIPPET + `\n if ((x < -1.0) || (x > 1.0)) return NAN;\n return (log(1.0 + x) - log(1.0 - x)) / 2.0;`;\nexport const ERF = `\n // Error function is calculated approximately with elementary function.\n // See \"Handbook of Mathematical Functions with Formulas,\n // Graphs, and Mathematical Tables\", Abramowitz and Stegun.\n float p = ${backend_util.ERF_P};\n float a1 = ${backend_util.ERF_A1};\n float a2 = ${backend_util.ERF_A2};\n float a3 = ${backend_util.ERF_A3};\n float a4 = ${backend_util.ERF_A4};\n float a5 = ${backend_util.ERF_A5};\n\n float sign = sign(x);\n x = abs(x);\n float t = 1.0 / (1.0 + p * x);\n return sign * (1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*exp(-x*x));\n`;\nexport const RECIPROCAL = `return 1.0 / x;`;\nexport const LOGICAL_NOT = `return float(!(x >= 1.0));`;\nexport const CLONE = 'return x;';\n//# sourceMappingURL=unaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const LINEAR = `return x;`;\nexport const LOG = `\n vec4 result = log(x);\n vec4 isNaN = vec4(lessThan(x, vec4(0.0)));\n result.r = isNaN.r == 1.0 ? NAN : result.r;\n result.g = isNaN.g == 1.0 ? NAN : result.g;\n result.b = isNaN.b == 1.0 ? NAN : result.b;\n result.a = isNaN.a == 1.0 ? NAN : result.a;\n\n return result;\n`;\nexport const RELU = `\n vec4 result = x * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const RELU6 = `\n vec4 result = min(x, vec4(6.)) * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const ELU = `\n vec4 result;\n\n result.r = (x.r >= 0.0) ? x.r : (exp(x.r) - 1.0);\n result.g = (x.g >= 0.0) ? x.g : (exp(x.g) - 1.0);\n result.b = (x.b >= 0.0) ? x.b : (exp(x.b) - 1.0);\n result.a = (x.a >= 0.0) ? x.a : (exp(x.a) - 1.0);\n\n return result;\n`;\nexport class UnaryOpPackedProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n vec4 unaryOperation(vec4 x) {\n ${opSnippet}\n }\n\n void main() {\n vec4 x = getAAtOutCoords();\n vec4 y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\n//# sourceMappingURL=unaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels, getSourceCoords } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class UnpackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outputShape = outputShape;\n const rank = outputShape.length;\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const sourceCoords = getSourceCoords(rank, channels);\n const innerDims = channels.slice(-2);\n const coords = rank <= 1 ? 'rc' : `vec2(${innerDims.join(',')})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 packedInput = getA(${sourceCoords});\n\n setOutput(getChannel(packedInput, ${coords}));\n }\n `;\n }\n}\n//# sourceMappingURL=unpack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import webgl flags.\nimport './flags_webgl';\nimport * as tf from '@tensorflow/tfjs-core';\nimport { div, engine, env, max, range, reshape, scalar, softmax, tensor, tidy, transpose } from '@tensorflow/tfjs-core';\nimport { backend_util, buffer, kernel_impls, slice_util, util } from '@tensorflow/tfjs-core';\nimport { DataStorage, KernelBackend, upcastType } from '@tensorflow/tfjs-core';\nimport { ceilImplCPU, expImplCPU, expm1ImplCPU, floorImplCPU, logImplCPU, rsqrtImplCPU, simpleAbsImplCPU, sliceImplCPU } from './kernel_utils/shared';\nconst { segment_util } = backend_util;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport { AddNProgram } from './addn_gpu';\nimport { AddNPackedProgram } from './addn_packed_gpu';\nimport { ArgMinMaxProgram } from './argminmax_gpu';\nimport { ArgMinMaxPackedProgram } from './argminmax_packed_gpu';\nimport { AvgPool3DBackpropProgram } from './avg_pool_backprop_gpu';\nimport * as binaryop_gpu from './binaryop_gpu';\nimport { BinaryOpProgram } from './binaryop_gpu';\nimport * as binaryop_packed_gpu from './binaryop_packed_gpu';\nimport { BinaryOpPackedProgram } from './binaryop_packed_gpu';\nimport { getWebGLContext } from './canvas_util';\nimport { ClipProgram } from './clip_gpu';\nimport { ClipPackedProgram } from './clip_packed_gpu';\nimport { ComplexAbsProgram } from './complex_abs_gpu';\nimport { Conv2DDerFilterProgram, Conv2DDerInputProgram, Conv3DDerFilterProgram, Conv3DDerInputProgram } from './conv_backprop_gpu';\nimport { DepthwiseConv2DDerFilterProgram, DepthwiseConv2DDerInputProgram } from './conv_backprop_gpu_depthwise';\nimport { Conv2DProgram, Conv3DProgram } from './conv_gpu';\nimport { DepthwiseConv2DProgram } from './conv_gpu_depthwise';\nimport { DepthwiseConvPacked2DProgram } from './conv_packed_gpu_depthwise';\nimport { CropAndResizeProgram } from './crop_and_resize_gpu';\nimport { CumSumProgram } from './cumsum_gpu';\nimport { DecodeMatrixProgram } from './decode_matrix_gpu';\nimport { DecodeMatrixPackedProgram } from './decode_matrix_packed_gpu';\nimport { DepthToSpaceProgram } from './depth_to_space_gpu';\nimport { DiagProgram } from './diag_gpu';\nimport { EncodeFloatProgram } from './encode_float_gpu';\nimport { EncodeFloatPackedProgram } from './encode_float_packed_gpu';\nimport { EncodeMatrixProgram } from './encode_matrix_gpu';\nimport { EncodeMatrixPackedProgram } from './encode_matrix_packed_gpu';\nimport { FillProgram } from './fill_gpu';\nimport { GatherProgram } from './gather_gpu';\nimport { GatherNDProgram } from './gather_nd_gpu';\nimport { GPGPUContext } from './gpgpu_context';\nimport * as gpgpu_math from './gpgpu_math';\nimport { Im2ColPackedProgram } from './im2col_packed_gpu';\nimport { LRNProgram } from './lrn_gpu';\nimport { LRNGradProgram } from './lrn_grad_gpu';\nimport { LRNPackedProgram } from './lrn_packed_gpu';\nimport { MaxPool3DBackpropProgram } from './max_pool_backprop_gpu';\nimport { MatMulPackedProgram } from './mulmat_packed_gpu';\nimport { MultinomialProgram } from './multinomial_gpu';\nimport { OneHotProgram } from './onehot_gpu';\nimport { PackProgram } from './pack_gpu';\nimport { PadProgram } from './pad_gpu';\nimport { PadPackedProgram } from './pad_packed_gpu';\nimport { Pool3DProgram } from './pool_gpu';\nimport { ReduceProgram } from './reduce_gpu';\nimport { ReshapePackedProgram } from './reshape_packed_gpu';\nimport { ResizeBilinearBackpropProgram } from './resize_bilinear_backprop_gpu';\nimport { ResizeBilinearProgram } from './resize_bilinear_gpu';\nimport { ResizeBilinearPackedProgram } from './resize_bilinear_packed_gpu';\nimport { ResizeNearestNeigborBackpropProgram } from './resize_nearest_neighbor_backprop_gpu';\nimport { ResizeNearestNeighborProgram } from './resize_nearest_neighbor_gpu';\nimport { ReverseProgram } from './reverse_gpu';\nimport { ReversePackedProgram } from './reverse_packed_gpu';\nimport { ScatterProgram } from './scatter_gpu';\nimport { SegmentOpProgram } from './segment_gpu';\nimport { SelectProgram } from './select_gpu';\nimport { SliceProgram } from './slice_gpu';\nimport { SlicePackedProgram } from './slice_packed_gpu';\nimport { StridedSliceProgram } from './strided_slice_gpu';\nimport * as tex_util from './tex_util';\nimport { TextureUsage } from './tex_util';\nimport { TextureManager } from './texture_manager';\nimport { TileProgram } from './tile_gpu';\nimport * as unary_op from './unaryop_gpu';\nimport { UnaryOpProgram } from './unaryop_gpu';\nimport * as unary_packed_op from './unaryop_packed_gpu';\nimport { UnaryOpPackedProgram } from './unaryop_packed_gpu';\nimport { UnpackProgram } from './unpack_gpu';\nimport * as webgl_util from './webgl_util';\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\nconst binaryCaches = {};\nexport function getBinaryCache(webGLVersion) {\n if (webGLVersion in binaryCaches) {\n return binaryCaches[webGLVersion];\n }\n binaryCaches[webGLVersion] = {};\n return binaryCaches[webGLVersion];\n}\nfunction mapActivationToShaderProgram(activation, packed = false) {\n if (activation === 'linear') {\n if (packed) {\n return unary_packed_op.LINEAR;\n }\n return unary_op.LINEAR;\n }\n else if (activation === 'relu') {\n if (packed) {\n return unary_packed_op.RELU;\n }\n return unary_op.RELU;\n }\n else if (activation === 'elu') {\n if (packed) {\n return unary_packed_op.ELU;\n }\n return unary_op.ELU;\n }\n else if (activation === 'relu6') {\n if (packed) {\n return unary_packed_op.RELU6;\n }\n return unary_op.RELU6;\n }\n else if (activation === 'prelu') {\n if (packed) {\n return binaryop_packed_gpu.PRELU;\n }\n return binaryop_gpu.PRELU;\n }\n throw new Error(`Activation ${activation} has not been implemented for the WebGL backend.`);\n}\n// Empirically determined constant used to determine size threshold for handing\n// off execution to the CPU.\nconst CPU_HANDOFF_SIZE_THRESHOLD = 128;\n// Empirically determined constant used to decide the number of MB on GPU\n// before we warn about high memory use. The MB are this constant * screen area\n// * dpi / 1024 / 1024.\nconst BEFORE_PAGING_CONSTANT = 600;\nfunction numMBBeforeWarning() {\n if (env().global.screen == null) {\n return 1024; // 1 GB.\n }\n return (env().global.screen.height * env().global.screen.width *\n window.devicePixelRatio) *\n BEFORE_PAGING_CONSTANT / 1024 / 1024;\n}\n// Empirically determined minimal shared dimension in matmul before we forward\n// to a.mul(b).sum() in order to take advantage of GPU parallelism. See\n// https://github.com/tensorflow/tfjs-core/pull/1379 for benchmarks.\nexport const MATMUL_SHARED_DIM_THRESHOLD = 1000;\nexport class MathBackendWebGL extends KernelBackend {\n constructor(gpgpu) {\n super();\n // Maps data ids that have a pending read operation, to list of subscribers.\n this.pendingRead = new WeakMap();\n // List of data ids that are scheduled for disposal, but are waiting on a\n // pending read operation.\n this.pendingDisposal = new WeakSet();\n // Used to count the number of 'shallow' sliced tensors that point to the\n // same data id.\n this.dataRefCount = new WeakMap();\n this.numBytesInGPU = 0;\n // Accumulated time spent (including blocking) in uploading data to webgl.\n this.uploadWaitMs = 0;\n // Accumulated time spent (including blocking in downloading data from webgl.\n this.downloadWaitMs = 0;\n this.warnedAboutMemory = false;\n this.warnedAboutCPUBackend = false;\n this.pendingDeletes = 0;\n this.disposed = false;\n if (!env().getBool('HAS_WEBGL')) {\n throw new Error('WebGL is not supported on this device');\n }\n if (gpgpu == null) {\n const gl = getWebGLContext(env().getNumber('WEBGL_VERSION'));\n this.binaryCache = getBinaryCache(env().getNumber('WEBGL_VERSION'));\n this.gpgpu = new GPGPUContext(gl);\n this.canvas = gl.canvas;\n this.gpgpuCreatedLocally = true;\n }\n else {\n this.gpgpu = gpgpu;\n this.binaryCache = {};\n this.gpgpuCreatedLocally = false;\n this.canvas = gpgpu.gl.canvas;\n }\n this.textureManager = new TextureManager(this.gpgpu);\n this.numMBBeforeWarning = numMBBeforeWarning();\n this.texData = new DataStorage(this, engine());\n }\n numDataIds() {\n return this.texData.numDataIds() +\n (this.cpuBackend ? this.cpuBackend.numDataIds() : 0) -\n this.pendingDeletes;\n }\n write(values, shape, dtype) {\n if (env().getBool('WEBGL_CHECK_NUMERICAL_PROBLEMS') ||\n env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64' && values != null) {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n const dataId = {};\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n return dataId;\n }\n /** Increase refCount of a `TextureData`. */\n incRef(dataId) {\n const texData = this.texData.get(dataId);\n texData.refCount++;\n }\n /** Decrease refCount of a `TextureData`. */\n decRef(dataId) {\n if (this.texData.has(dataId)) {\n const texData = this.texData.get(dataId);\n texData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n if (env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.texData.has(dataId)) {\n const textureData = this.texData.get(dataId);\n textureData.refCount--;\n if (textureData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n readSync(dataId) {\n const texData = this.texData.get(dataId);\n const { values, dtype, complexTensorInfos, slice, shape, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.readSync(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (dtype === 'string') {\n return values;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let result;\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n result = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else {\n result = this.getValuesFromTexture(dataId);\n }\n if (shouldTimeProgram) {\n this.downloadWaitMs += util.now() - start;\n }\n return this.convertAndCacheOnCPU(dataId, result);\n }\n async read(dataId) {\n if (this.pendingRead.has(dataId)) {\n const subscribers = this.pendingRead.get(dataId);\n return new Promise(resolve => subscribers.push(resolve));\n }\n const texData = this.texData.get(dataId);\n const { values, shape, slice, dtype, complexTensorInfos, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.read(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (!env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED') &&\n env().getNumber('WEBGL_VERSION') === 2) {\n throw new Error(`tensor.data() with WEBGL_DOWNLOAD_FLOAT_ENABLED=false and ` +\n `WEBGL_VERSION=2 not yet supported.`);\n }\n let buffer = null;\n let tmpDownloadTarget;\n if (dtype !== 'complex64' && env().get('WEBGL_BUFFER_SUPPORTED')) {\n // Possibly copy the texture into a buffer before inserting a fence.\n tmpDownloadTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpDownloadTarget.dataId);\n buffer = this.gpgpu.createBufferFromTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape));\n }\n this.pendingRead.set(dataId, []);\n if (dtype !== 'complex64') {\n // Create a fence and wait for it to resolve.\n await this.gpgpu.createAndWaitForFence();\n }\n // Download the values from the GPU.\n let vals;\n if (dtype === 'complex64') {\n const ps = await Promise.all([\n this.read(complexTensorInfos.real.dataId),\n this.read(complexTensorInfos.imag.dataId)\n ]);\n const realValues = ps[0];\n const imagValues = ps[1];\n vals = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else if (buffer == null) {\n vals = this.getValuesFromTexture(dataId);\n }\n else {\n const size = util.sizeFromShape(shape);\n vals = this.gpgpu.downloadFloat32MatrixFromBuffer(buffer, size);\n }\n if (tmpDownloadTarget != null) {\n this.disposeIntermediateTensorInfo(tmpDownloadTarget);\n }\n const dTypeVals = this.convertAndCacheOnCPU(dataId, vals);\n const subscribers = this.pendingRead.get(dataId);\n this.pendingRead.delete(dataId);\n // Notify all pending reads.\n subscribers.forEach(resolve => resolve(dTypeVals));\n if (this.pendingDisposal.has(dataId)) {\n this.pendingDisposal.delete(dataId);\n this.disposeData(dataId);\n this.pendingDeletes--;\n }\n return dTypeVals;\n }\n checkNumericalProblems(values) {\n if (values == null) {\n return;\n }\n for (let i = 0; i < values.length; i++) {\n const num = values[i];\n if (!webgl_util.canBeRepresented(num)) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_CAPABLE')) {\n throw Error(`The value ${num} cannot be represented with your ` +\n `current settings. Consider enabling float32 rendering: ` +\n `'tf.env().set('WEBGL_RENDER_FLOAT32_ENABLED', true);'`);\n }\n throw Error(`The value ${num} cannot be represented on this device.`);\n }\n }\n }\n getValuesFromTexture(dataId) {\n const { shape, dtype, isPacked } = this.texData.get(dataId);\n const size = util.sizeFromShape(shape);\n if (env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED')) {\n const tmpTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpTarget.dataId);\n const vals = this.gpgpu\n .downloadMatrixFromPackedTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape))\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(tmpTarget);\n return vals;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK') && isPacked === true;\n const outputShape = shouldUsePackedProgram ? webgl_util.getShapeAs3D(shape) : shape;\n const program = shouldUsePackedProgram ?\n new EncodeFloatPackedProgram(outputShape) :\n new EncodeFloatProgram(outputShape);\n const output = this.runWebGLProgram(program, [{ shape: outputShape, dtype, dataId }], 'float32');\n const tmpData = this.texData.get(output.dataId);\n const vals = this.gpgpu\n .downloadByteEncodedFloatMatrixFromOutputTexture(tmpData.texture, tmpData.texShape[0], tmpData.texShape[1])\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(output);\n return vals;\n }\n async time(f) {\n const oldActiveTimers = this.activeTimers;\n const newActiveTimers = [];\n let outerMostTime = false;\n if (this.programTimersStack == null) {\n this.programTimersStack = newActiveTimers;\n outerMostTime = true;\n }\n else {\n this.activeTimers.push(newActiveTimers);\n }\n this.activeTimers = newActiveTimers;\n f();\n // needing to split these up because util.flatten only accepts certain types\n const flattenedActiveTimerQueries = util.flatten(this.activeTimers.map((d) => d.query))\n .filter(d => d != null);\n const flattenedActiveTimerNames = util.flatten(this.activeTimers.map((d) => d.name))\n .filter(d => d != null);\n this.activeTimers = oldActiveTimers;\n if (outerMostTime) {\n this.programTimersStack = null;\n }\n const res = {\n uploadWaitMs: this.uploadWaitMs,\n downloadWaitMs: this.downloadWaitMs,\n kernelMs: null,\n wallMs: null // will be filled by the engine\n };\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n const kernelMs = await Promise.all(flattenedActiveTimerQueries);\n res['kernelMs'] = util.sum(kernelMs);\n res['getExtraProfileInfo'] = () => kernelMs.map((d, i) => ({ name: flattenedActiveTimerNames[i], ms: d }))\n .map(d => `${d.name}: ${d.ms}`)\n .join(', ');\n }\n else {\n res['kernelMs'] = {\n error: 'WebGL query timers are not supported in this environment.'\n };\n }\n this.uploadWaitMs = 0;\n this.downloadWaitMs = 0;\n return res;\n }\n memory() {\n return {\n unreliable: false,\n numBytesInGPU: this.numBytesInGPU,\n numBytesInGPUAllocated: this.textureManager.numBytesAllocated,\n numBytesInGPUFree: this.textureManager.numBytesFree\n };\n }\n startTimer() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.beginQuery();\n }\n return { startMs: util.now(), endMs: null };\n }\n endTimer(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n this.gpgpu.endQuery();\n return query;\n }\n query.endMs = util.now();\n return query;\n }\n async getQueryTime(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.waitForQueryAndGetTime(query);\n }\n const timerQuery = query;\n return timerQuery.endMs - timerQuery.startMs;\n }\n disposeData(dataId) {\n if (this.pendingDisposal.has(dataId)) {\n return;\n }\n if (this.pendingRead.has(dataId)) {\n this.pendingDisposal.add(dataId);\n this.pendingDeletes++;\n return;\n }\n // No-op if already disposed.\n if (!this.texData.has(dataId)) {\n return;\n }\n // Trying to dispose a textureData that has a 'kept' refCount, e.g. trying\n // to dispose a tensor whose data bucket is shared with a complex tensor. In\n // this case we are removing a reference to the textureData, but we\n // shouldn't actually dispose the texture.\n if (this.texData.get(dataId).complexParentRefCount > 0) {\n this.texData.get(dataId).refCount--;\n return;\n }\n this.releaseGPUData(dataId);\n const { complexTensorInfos } = this.texData.get(dataId);\n if (complexTensorInfos != null) {\n this.texData.get(complexTensorInfos.real.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.real);\n this.texData.get(complexTensorInfos.imag.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.imag);\n }\n this.texData.delete(dataId);\n }\n releaseGPUData(dataId) {\n const { texture, dtype, texShape, usage, isPacked, slice } = this.texData.get(dataId);\n const key = slice && slice.origDataId || dataId;\n const refCount = this.dataRefCount.get(key);\n if (refCount > 1) {\n this.dataRefCount.set(key, refCount - 1);\n }\n else {\n this.dataRefCount.delete(key);\n if (texture != null) {\n this.numBytesInGPU -= this.computeBytes(texShape, dtype);\n this.textureManager.releaseTexture(texture, texShape, usage, isPacked);\n }\n }\n const texData = this.texData.get(dataId);\n texData.texture = null;\n texData.texShape = null;\n texData.isPacked = false;\n texData.slice = null;\n }\n getTexture(dataId) {\n this.uploadToGPU(dataId);\n return this.texData.get(dataId).texture;\n }\n /**\n * Returns internal information for the specific data bucket. Used in unit\n * tests.\n */\n getDataInfo(dataId) {\n return this.texData.get(dataId);\n }\n getCPUBackend() {\n if (!env().getBool('WEBGL_CPU_FORWARD')) {\n return null;\n }\n if (this.cpuBackend == null) {\n this.cpuBackend = engine().findBackend('cpu');\n }\n return this.cpuBackend;\n }\n /*\n Tests whether all the inputs to an op are small and on the CPU. This heuristic\n determines when it would be faster to execute a kernel on the CPU. WebGL\n kernels opt into running this check and forwarding when appropriate.\n TODO(https://github.com/tensorflow/tfjs/issues/872): Develop a more\n sustainable strategy for optimizing backend execution of ops.\n */\n shouldExecuteOnCPU(inputs, sizeThreshold = CPU_HANDOFF_SIZE_THRESHOLD) {\n const cpuBackend = this.getCPUBackend();\n if (!this.warnedAboutCPUBackend && cpuBackend == null) {\n console.warn('Your application contains ops that are small enough to be ' +\n 'executed on the CPU backend, however the CPU backend cannot ' +\n 'be found. Consider importing the CPU backend ' +\n '(@tensorflow/tfjs-backend-cpu) for better performance.');\n this.warnedAboutCPUBackend = true;\n }\n return cpuBackend != null &&\n inputs.every(input => this.texData.get(input.dataId).texture == null &&\n util.sizeFromShape(input.shape) < sizeThreshold);\n }\n getGPGPUContext() {\n return this.gpgpu;\n }\n slice(x, begin, size) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = sliceImplCPU(this.texData.get(x.dataId).values, begin, size, x.shape, x.dtype);\n return this.makeOutput(size, x.dtype, outValues);\n }\n // Short-circuit computation if the slice is zero-sized.\n if (util.sizeFromShape(size) === 0) {\n return tensor([], size, x.dtype);\n }\n const { isPacked } = this.texData.get(x.dataId);\n const isContinous = slice_util.isSliceContinous(x.shape, begin, size);\n if (isPacked || !isContinous) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new SlicePackedProgram(size) :\n new SliceProgram(size);\n const customSetup = program.getCustomSetupFunc(begin);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n this.uploadToGPU(x.dataId);\n return this.shallowSlice(x, begin, size);\n }\n shallowSlice(x, begin, size) {\n const xTexData = this.texData.get(x.dataId);\n const t = this.makeOutput(size, x.dtype);\n const newTexData = this.texData.get(t.dataId);\n // Copy texture data from the original tensor.\n Object.assign(newTexData, xTexData);\n newTexData.shape = size;\n newTexData.dtype = x.dtype;\n let flatOffset = slice_util.computeFlatOffset(begin, x.strides);\n if (xTexData.slice) {\n // We are slicing an already sliced tensor, so we have to accumulate\n // the offset.\n flatOffset += xTexData.slice.flatOffset;\n }\n newTexData.slice = {\n flatOffset,\n // Point to the original dataId, which is used to do ref counting.\n origDataId: xTexData.slice && xTexData.slice.origDataId || x.dataId\n };\n // Increase the ref count for that data bucket.\n const refCount = this.dataRefCount.get(newTexData.slice.origDataId) || 1;\n this.dataRefCount.set(newTexData.slice.origDataId, refCount + 1);\n return t;\n }\n stridedSlice(x, begin, end, strides) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.stridedSlice(x, begin, end, strides));\n if (cpuRes) {\n return cpuRes;\n }\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tensor([], outShape);\n }\n const program = new StridedSliceProgram(begin, strides, outShape);\n return this.compileAndRun(program, [x]);\n }\n reverse(x, axis) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new ReversePackedProgram(x.shape, axis) :\n new ReverseProgram(x.shape, axis);\n return this.compileAndRun(program, [x]);\n }\n neg(x) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.neg(x));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.NEG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.NEG);\n return this.compileAndRun(program, [x]);\n }\n batchMatMul(a, b, transposeA, transposeB) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const sharedDim = transposeA ? a.shape[1] : a.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n // Since the matrices are vectors, it is faster to call mul().sum()\n // because sum() is O(sqrt(N)) due to divide-and-conquer.\n if ((outerShapeA === 1 || outerShapeB === 1) &&\n sharedDim > MATMUL_SHARED_DIM_THRESHOLD) {\n if (transposeA) {\n a = transpose(a, [0, 2, 1]);\n }\n if (transposeB) {\n b = transpose(b, [0, 2, 1]);\n }\n const a3D = outerShapeB === 1 ? a : a.as3D(batch, sharedDim, 1);\n const axis = outerShapeB === 1 ? 2 : 1;\n const b3D = outerShapeB === 1 ? b.as3D(batch, 1, sharedDim) : b;\n // TODO(annxingyuan): Call multiply directly as part of batchMatMul\n // modularization.\n const product = tf.mul(a3D, b3D);\n return product.sum(axis, true /* keepDims */);\n }\n const dtype = upcastType(a.dtype, b.dtype);\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB);\n return this.compileAndRun(program, [a, b], dtype);\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n const dtype = upcastType(a.dtype, b.dtype);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [a, b];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs, dtype);\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new LRNPackedProgram(x.shape, radius, bias, alpha, beta) :\n new LRNProgram(x.shape, radius, bias, alpha, beta);\n return this.compileAndRun(program, [x]);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n const program = new LRNGradProgram(inputImage.shape, depthRadius, bias, alpha, beta);\n return this.compileAndRun(program, [inputImage, outputImage, dy]);\n }\n tile(x, reps) {\n if (x.dtype === 'string') {\n const data = this.readSync(x.dataId);\n const decodedData = data.map(d => util.decodeString(d));\n const buf = buffer(x.shape, x.dtype, decodedData);\n return tile(buf, reps);\n }\n const program = new TileProgram(x.shape, reps);\n return this.compileAndRun(program, [x]);\n }\n pad(x, paddings, constantValue) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new PadPackedProgram(x.shape, paddings, constantValue) :\n new PadProgram(x.shape, paddings, constantValue);\n return this.compileAndRun(program, [x]);\n }\n gather(x, indices, axis) {\n const cpuRes = this.tryRunOnCpuOrThrow([x, indices], () => this.cpuBackend.gather(x, indices, axis));\n if (cpuRes) {\n return cpuRes;\n }\n const program = new GatherProgram(x.shape, indices.size, axis);\n return this.compileAndRun(program, [x, indices]);\n }\n batchToSpaceND(x, blockShape, crops) {\n util.assert(x.rank <= 4, () => 'batchToSpaceND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n spaceToBatchND(x, blockShape, paddings) {\n util.assert(x.rank <= 4, () => 'spaceToBatchND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = x.pad(completePaddings);\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const paddedXT = transpose(paddedX.reshape(reshapedPaddedShape), permutedReshapedPaddedPermutation);\n return reshape(paddedXT, flattenShape);\n }\n reduce(x, reduceType, dtype) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const outSize = Math.ceil(inSize / windowSize);\n const reduceInfo = { windowSize, inSize, batchSize, outSize };\n const program = new ReduceProgram(reduceInfo, reduceType);\n const output = this.compileAndRun(program, [x], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.reduce(output, reduceType, dtype);\n }\n argReduce(x, reduceType, bestIndicesA = null) {\n let batchSize = x.shape[0];\n let inSize = x.shape[1];\n if (bestIndicesA != null) {\n batchSize = bestIndicesA.shape[0];\n inSize = bestIndicesA.shape[1];\n }\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const reduceInfo = {\n windowSize,\n inSize,\n batchSize,\n outSize: Math.ceil(inSize / windowSize)\n };\n const program = new ArgMinMaxProgram(reduceInfo, reduceType, bestIndicesA == null);\n const inputs = [x];\n if (bestIndicesA != null) {\n inputs.push(bestIndicesA);\n }\n const output = this.compileAndRun(program, inputs, 'int32');\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.argReduce(x, reduceType, output);\n }\n argReducePacked(x, reduceType, bestIndicesA = null) {\n const inShape = bestIndicesA != null ? bestIndicesA.shape : x.shape;\n const inSize = inShape[inShape.length - 1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const program = new ArgMinMaxPackedProgram(inShape, windowSize, reduceType, bestIndicesA == null);\n const inputs = bestIndicesA == null ? [x] : [x, bestIndicesA];\n const output = this.compileAndRun(program, inputs, 'int32');\n if (output.rank === x.rank) {\n return this.argReducePacked(x, reduceType, output);\n }\n return output;\n }\n sum(x, axes) {\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'sum', outputDType).reshape(outShape);\n }\n prod(x, axes) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.prod(x, axes));\n if (cpuRes) {\n return cpuRes;\n }\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'prod', outputDType).reshape(outShape);\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n let axis = 0;\n const permutation = backend_util.getAxesPermutation([axis], x.rank);\n let permutedX = x;\n if (permutation != null) {\n permutedX = transpose(x, permutation);\n axis = backend_util.getInnerMostAxes(1, x.rank)[0];\n }\n const outShape = segment_util.computeOutShape(permutedX.shape, axis, numSegments);\n const inSize = util.sizeFromShape([permutedX.shape[axis]]);\n const a2D = permutedX.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n let result = this.segOpCompute(a2D, 'unsortedSegmentSum', segmentIds, outputDType, numSegments)\n .reshape(outShape);\n if (permutation != null) {\n result =\n transpose(result, backend_util.getUndoAxesPermutation(permutation));\n }\n return result;\n }\n segOpCompute(x, segOpType, segmentIds, dtype, numSegments) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = segment_util.segOpComputeOptimalWindowSize(inSize, numSegments);\n const segOpInfo = { windowSize, inSize, batchSize, numSegments };\n const program = new SegmentOpProgram(segOpInfo, segOpType);\n const output = this.compileAndRun(program, [x, segmentIds], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === numSegments) {\n return output;\n }\n segmentIds = range(0, numSegments).tile([inSize / windowSize]);\n return this.segOpCompute(output, segOpType, segmentIds, dtype, numSegments);\n }\n argMinMaxReduce(x, axis, reduceType) {\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('arg' + reduceType.charAt(0).toUpperCase() + reduceType.slice(1), axes, x.rank);\n if (!env().getBool('WEBGL_PACK_REDUCE') || x.rank <= 2) {\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.argReduce(a2D, reduceType).reshape(outShape);\n }\n return this.argReducePacked(x, reduceType);\n }\n argMin(x, axis) {\n return this.argMinMaxReduce(x, axis, 'min');\n }\n argMax(x, axis) {\n return this.argMinMaxReduce(x, axis, 'max');\n }\n cumsum(x, axis, exclusive, reverse) {\n if (axis !== x.rank - 1) {\n throw new Error(`WebGL cumsum shader expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const size = x.shape[axis];\n let result = x;\n // Use cumsum parallel algorithm, ref:\n // https://developer.nvidia.com/gpugems/gpugems3/part-vi-gpu-computing/chapter-39-parallel-prefix-sum-scan-cuda\n for (let i = 0; i <= Math.ceil(Math.log2(size)) - 1; i++) {\n const program = new CumSumProgram(x.shape, false, reverse);\n const customSetup = program.getCustomSetupFunc(i);\n const prevResult = result;\n result = this.compileAndRun(program, [result], result.dtype, customSetup);\n prevResult.dispose();\n }\n // For exclusive cumsum, shift the end result in the direction of sum and\n // add 0 to the front index.\n if (exclusive) {\n const program = new CumSumProgram(x.shape, exclusive, reverse);\n const prevResult = result;\n result = this.compileAndRun(program, [result]);\n prevResult.dispose();\n }\n return result;\n }\n equal(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n less(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.less(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n lessEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greater(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.greater(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greaterEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalNot(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOGICAL_NOT);\n return this.compileAndRun(program, [x]);\n }\n logicalAnd(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_AND, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_AND, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalOr(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_OR, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_OR, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n select(condition, a, b) {\n const program = new SelectProgram(condition.rank, a.shape, a.rank);\n return this.compileAndRun(program, [condition, a, b], upcastType(a.dtype, b.dtype));\n }\n where(condition) {\n backend_util.warn('tf.where() in webgl locks the UI thread. ' +\n 'Call tf.whereAsync() instead');\n const condVals = condition.dataSync();\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n const xVals = x.dataSync();\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'min', a2D.dtype).reshape(outShape);\n }\n minimum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.minimum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MIN, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MIN, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n mod(a, b) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MOD, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MOD, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n maximum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.maximum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MAX, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MAX, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n all(x, axes) {\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'all', a2D.dtype).reshape(outShape);\n }\n any(x, axes) {\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'any', a2D.dtype).reshape(outShape);\n }\n floorDiv(a, b) {\n const op = binaryop_gpu.INT_DIV;\n const outputDtype = 'int32';\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.INT_DIV, outputDtype);\n }\n const program = new BinaryOpProgram(op, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], outputDtype);\n }\n packedUnaryOp(x, op, dtype) {\n const program = new UnaryOpPackedProgram(x.shape, op);\n return this.compileAndRun(program, [x], dtype);\n }\n packedBinaryOp(a, b, op, dtype, checkOutOfBounds = false) {\n const program = new BinaryOpPackedProgram(op, a.shape, b.shape, checkOutOfBounds);\n return this.compileAndRun(program, [a, b], dtype);\n }\n // Returns a TensorInfo with the complex shape and the dataId of the\n // underlying part. We need to do this because a reshaped complex tensor is\n // not reflected in its parts.\n makeComplexComponentTensorInfo(complexTensor, complexPart) {\n return {\n dataId: complexPart.dataId,\n dtype: complexPart.dtype,\n shape: complexTensor.shape\n };\n }\n addN(tensors) {\n if (tensors.length === 1) {\n return tensors[0];\n }\n // Limit the number of uploaded textures for optimization.\n if (tensors.length > env().get('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(tensors.length / 2);\n const leftSide = this.addN(tensors.slice(0, midIndex));\n const rightSide = this.addN(tensors.slice(midIndex));\n return this.addN([leftSide, rightSide]);\n }\n const dtype = tensors.map(t => t.dtype).reduce((d1, d2) => upcastType(d1, d2));\n const shapes = tensors.map(t => t.shape);\n // We can make sure shapes are identical in op level.\n const usePackedOp = env().getBool('WEBGL_PACK');\n const program = usePackedOp ?\n new AddNPackedProgram(tensors[0].shape, shapes) :\n new AddNProgram(tensors[0].shape, shapes);\n return this.compileAndRun(program, tensors, dtype);\n }\n pow(a, b) {\n const usePackedOp = env().getBool('WEBGL_PACK_BINARY_OPERATIONS');\n const program = usePackedOp ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.POW, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.POW, a.shape, b.shape);\n const dtype = upcastType(a.dtype, b.dtype);\n return this.compileAndRun(program, [a, b], dtype);\n }\n ceil(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = ceilImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.CEIL, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.CEIL);\n return this.compileAndRun(program, [x]);\n }\n floor(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = floorImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.FLOOR, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.FLOOR);\n return this.compileAndRun(program, [x]);\n }\n sign(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGN);\n return this.compileAndRun(program, [x]);\n }\n isNaN(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_NAN);\n return this.compileAndRun(program, [x], 'bool');\n }\n isInf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_INF);\n return this.compileAndRun(program, [x], 'bool');\n }\n isFinite(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_FINITE);\n return this.compileAndRun(program, [x], 'bool');\n }\n round(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ROUND);\n return this.compileAndRun(program, [x]);\n }\n exp(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXP, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXP);\n return this.compileAndRun(program, [x]);\n }\n expm1(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expm1ImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXPM1, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXPM1);\n return this.compileAndRun(program, [x]);\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(annxingyuan): Call sub directly as part of softmax kernel\n // modularization.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = this.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax kernel\n // modularization.\n return div(b, sumExp);\n }\n log(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = logImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.LOG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.LOG);\n return this.compileAndRun(program, [x]);\n }\n log1p(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOG1P);\n return this.compileAndRun(program, [x]);\n }\n sqrt(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SQRT);\n return this.compileAndRun(program, [x]);\n }\n rsqrt(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = rsqrtImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.RSQRT);\n return this.compileAndRun(program, [x]);\n }\n reciprocal(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.RECIPROCAL);\n return this.compileAndRun(program, [x]);\n }\n relu(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU);\n }\n return this.compileAndRun(program, [x]);\n }\n relu6(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU6);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU6);\n }\n return this.compileAndRun(program, [x]);\n }\n prelu(x, alpha) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.PRELU, x.shape, alpha.shape) :\n new BinaryOpProgram(binaryop_gpu.PRELU, x.shape, alpha.shape);\n return this.compileAndRun(program, [x, alpha]);\n }\n elu(x) {\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.ELU, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ELU);\n return this.compileAndRun(program, [x]);\n }\n eluDer(dy, y) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.ELU_DER, dy.shape, y.shape) :\n new BinaryOpProgram(binaryop_gpu.ELU_DER, dy.shape, y.shape);\n return this.compileAndRun(program, [dy, y]);\n }\n selu(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SELU);\n return this.compileAndRun(program, [x]);\n }\n clip(x, min, max) {\n let program;\n if (env().getBool('WEBGL_PACK_CLIP')) {\n program = new ClipPackedProgram(x.shape);\n }\n else {\n program = new ClipProgram(x.shape);\n }\n const customSetup = program.getCustomSetupFunc(min, max);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n abs(x) {\n // TODO: handle cases when x is complex.\n if (this.shouldExecuteOnCPU([x]) && x.dtype !== 'complex64') {\n const outValues = simpleAbsImplCPU(this.texData.get(x.dataId).values);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.ABS, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ABS);\n return this.compileAndRun(program, [x]);\n }\n complexAbs(x) {\n const xData = this.texData.get(x.dataId);\n const program = new ComplexAbsProgram(x.shape);\n const inputs = [\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.real),\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.imag),\n ];\n return this.compileAndRun(program, inputs);\n }\n sigmoid(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGMOID);\n return this.compileAndRun(program, [x]);\n }\n softplus(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SOFTPLUS);\n return this.compileAndRun(program, [x]);\n }\n asin(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASIN);\n return this.compileAndRun(program, [x]);\n }\n acos(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOS);\n return this.compileAndRun(program, [x]);\n }\n atan(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATAN);\n return this.compileAndRun(program, [x]);\n }\n sinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SINH);\n return this.compileAndRun(program, [x]);\n }\n cosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.COSH);\n return this.compileAndRun(program, [x]);\n }\n tanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.TANH);\n return this.compileAndRun(program, [x]);\n }\n asinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASINH);\n return this.compileAndRun(program, [x]);\n }\n acosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOSH);\n return this.compileAndRun(program, [x]);\n }\n atanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATANH);\n return this.compileAndRun(program, [x]);\n }\n erf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ERF);\n return this.compileAndRun(program, [x]);\n }\n step(x, alpha) {\n const program = new UnaryOpProgram(x.shape, unary_op.STEP(alpha));\n return this.compileAndRun(program, [x]);\n }\n conv2dByMatMul(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Reshapes conv2D input to 2D tensors, uses matMul and then reshape the\n // result from 2D to 4D.\n const xShape = x.shape;\n const xTexData = this.texData.get(x.dataId);\n const sharedMatMulDim = convInfo.inChannels;\n const outerShapeX = xShape[0] * xShape[1] * xShape[2];\n const outerShapeFilter = convInfo.outChannels;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const transposeA = false;\n const transposeB = false;\n // TODO: Once reduction ops are packed, batchMatMul will always be packed\n // and we can remove this condition.\n const batchMatMulWillBeUnpacked = (outerShapeX === 1 || outerShapeFilter === 1) &&\n sharedMatMulDim > MATMUL_SHARED_DIM_THRESHOLD;\n const reshapeWillBeExpensive = xShape[2] % 2 !== 0 && !!xTexData.isPacked;\n if (batchMatMulWillBeUnpacked || !env().getBool('WEBGL_LAZILY_UNPACK') ||\n !env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ||\n !reshapeWillBeExpensive) {\n const targetShape = isChannelsLast ? xShape[0] * xShape[1] * xShape[2] :\n xShape[0] * xShape[2] * xShape[3];\n const xReshaped = reshape(x, [1, targetShape, convInfo.inChannels]);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const result = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n return reshape(result, convInfo.outShape);\n }\n // Following optimization is specific to packed |x| with odd row count\n // (For example, in channelLast mode, 'row count' refers to x.shape[2]):\n // we avoid expensive packed 2x2 reshape by padding row count to next,\n // even number. When x.shape[2] is odd, the result of packed batchMatMul is\n // the same (has the same texture layout and and values in the texture) as\n // it is for even x.shape[2] + 1. We make the odd-rows tensor to look like\n // even-rows tensor before the operation and, after the batchMatMul,\n // fix the even-rows result to have odd number of rows.\n const targetShape = isChannelsLast ?\n xShape[0] * xShape[1] * (xShape[2] + 1) :\n xShape[0] * xShape[2] * (xShape[3] + 1);\n const xReshaped = {\n dataId: x.dataId,\n shape: [1, targetShape, convInfo.inChannels],\n dtype: x.dtype\n };\n // xTexData.shape gets referenced from GPGPUBinary.inShapeInfos.\n // Decrementing row count, after batchMatMul->...->compileProgram leads to\n // invalid row count within the reference in GPGPUBinary.inShapeInfos.\n // Alternative fix would be to provide a copy to GPGPUBinary.inShapeInfos\n // in compileProgram method, but that would affect compilation of all\n // programs - instead, provide a copy here, with even row count, before\n // calling batchMatMul->...->compileProgram and after that, the original\n // xTexData.shape is restored.\n const originalXTexDataShape = xTexData.shape;\n xTexData.shape = xTexData.shape.slice();\n xTexData.shape[xTexData.shape.length - 2]++;\n util.assert(webgl_util.isReshapeFree(xTexData.shape, xReshaped.shape), () => `packed reshape ${xTexData.shape} to ${xReshaped.shape} isn't free`);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const pointwiseConv = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n const pointwiseConvTexData = this.texData.get(pointwiseConv.dataId);\n util.assert(pointwiseConvTexData.isPacked, () => 'batchMatMul result is expected to be packed');\n // Restore the input shape to original.\n xTexData.shape = originalXTexDataShape;\n // Set the output shape - there is no need for expensive reshape as data\n // layout is already correct.\n pointwiseConvTexData.shape = convInfo.outShape;\n return engine().makeTensorFromDataId(pointwiseConv.dataId, convInfo.outShape, pointwiseConv.dtype);\n }\n conv2dWithIm2Row(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Rearranges conv2d input so each block to be convolved over forms the\n // column of a new matrix with shape [filterWidth * filterHeight *\n // inChannels, outHeight * outWidth]. The filter is also rearranged so each\n // output channel forms a row of a new matrix with shape [outChannels,\n // filterWidth * filterHeight * inChannels]. The convolution is then\n // computed by multiplying these matrices and reshaping the result.\n const { filterWidth, filterHeight, inChannels, outWidth, outHeight, dataFormat } = convInfo;\n const isChannelsLast = dataFormat === 'channelsLast';\n const sharedDim = filterWidth * filterHeight * inChannels;\n const numCols = outHeight * outWidth;\n const x2ColShape = [sharedDim, numCols];\n const transposeA = true;\n const transposeB = false;\n const xSqueezed = x.squeeze([0]);\n const w2Row = filter.reshape([1, sharedDim, -1]);\n const im2ColProgram = new Im2ColPackedProgram(x2ColShape, xSqueezed.shape, convInfo);\n const im2Col = this.compileAndRun(im2ColProgram, [xSqueezed]).reshape([\n 1, x2ColShape[0], x2ColShape[1]\n ]);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const matmulProgram = new MatMulPackedProgram(im2Col.shape, w2Row.shape, [1, numCols, convInfo.outChannels], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [im2Col, w2Row];\n if (bias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n const product = this.compileAndRun(matmulProgram, inputs);\n if (isChannelsLast) {\n return product.reshape([1, outHeight, outWidth, convInfo.outChannels]);\n }\n else {\n return product.reshape([1, convInfo.outChannels, outHeight, outWidth]);\n }\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && input.shape[0] === 1) {\n return this.conv2dWithIm2Row(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, false) : null;\n const program = new Conv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [input, filter];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs);\n }\n conv2d(x, filter, convInfo) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(x, filter, convInfo);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && x.shape[0] === 1) {\n return this.conv2dWithIm2Row(x, filter, convInfo);\n }\n const program = new Conv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv2dDerInput(dy, filter, convInfo) {\n const program = new Conv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv2dDerFilter(x, dy, convInfo) {\n const program = new Conv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n const shouldPackDepthwiseConv = env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1;\n const fusedActivation = activation ?\n mapActivationToShaderProgram(activation, shouldPackDepthwiseConv) :\n null;\n const inputs = [input, filter];\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n if (hasBias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n let program;\n if (shouldPackDepthwiseConv) {\n program = new DepthwiseConvPacked2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n program = new DepthwiseConv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n depthwiseConv2D(x, filter, convInfo) {\n let program;\n if (env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1) {\n program = new DepthwiseConvPacked2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n program = new DepthwiseConv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n const program = new DepthwiseConv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n depthwiseConv2DDerFilter(x, dy, convInfo) {\n const program = new DepthwiseConv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n conv3d(x, filter, convInfo) {\n const program = new Conv3DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv3dDerInput(dy, filter, convInfo) {\n const program = new Conv3DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv3dDerFilter(x, dy, convInfo) {\n const program = new Conv3DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = this.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n avgPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'avg', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n const avgPool3dBackpropProgram = new AvgPool3DBackpropProgram(convInfo);\n return this.compileAndRun(avgPool3dBackpropProgram, [dy], x.dtype);\n }\n maxPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'max', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n const getPositions = true;\n const maxPool3dPositionsProgram = new Pool3DProgram(convInfo, 'max', getPositions);\n const maxPool3dPositions = this.compileAndRun(maxPool3dPositionsProgram, [x]);\n const maxPool3dBackPropProgram = new MaxPool3DBackpropProgram(convInfo);\n const result = this.compileAndRun(maxPool3dBackPropProgram, [dy, maxPool3dPositions], x.dtype);\n maxPool3dPositions.dispose();\n return result;\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n const program = env().getBool('WEBGL_PACK_IMAGE_OPERATIONS') ?\n new ResizeBilinearPackedProgram(x.shape, newHeight, newWidth, alignCorners) :\n new ResizeBilinearProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x], 'float32');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n const program = new ResizeBilinearBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n const program = new ResizeNearestNeighborProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x]);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n const program = new ResizeNearestNeigborBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n multinomial(logits, normalized, numSamples, seed) {\n const probs = normalized ? logits : softmax(logits);\n const batchSize = probs.shape[0];\n const numOutcomes = probs.shape[1];\n const program = new MultinomialProgram(batchSize, numOutcomes, numSamples);\n const customSetup = program.getCustomSetupFunc(seed);\n return this.compileAndRun(program, [probs], 'int32', customSetup);\n }\n oneHot(indices, depth, onValue, offValue) {\n const program = new OneHotProgram(indices.size, depth, onValue, offValue);\n return this.compileAndRun(program, [indices]);\n }\n diag(x) {\n const program = new DiagProgram(x.size);\n return this.compileAndRun(program, [x]);\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const program = new CropAndResizeProgram(image.shape, boxes.shape, cropSize, method, extrapolationValue);\n return this.compileAndRun(program, [image, boxes, boxIndex], 'float32');\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = (dataFormat === 'NHWC') ? x.shape[1] : x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? x.shape[2] : x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? x.shape[3] : x.shape[1];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const outputShape = (dataFormat === 'NHWC') ?\n [batchSize, outputHeight, outputWidth, outputDepth] :\n [batchSize, outputDepth, outputHeight, outputWidth];\n const program = new DepthToSpaceProgram(outputShape, blockSize, dataFormat);\n return this.compileAndRun(program, [x]);\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const flattenIndices = indices.reshape([numUpdates, sliceRank]);\n const flattenX = updates.reshape([numUpdates, sliceSize]);\n if (outputSize === 0) {\n return backend_util.reshapeTensor(tensor([]), shape);\n }\n const defaultValue = scalar(0);\n const program = new ScatterProgram(numUpdates, sliceRank, flattenIndices.rank, flattenX.rank, strides, flattenShape);\n const res = this.compileAndRun(program, [flattenX, flattenIndices, defaultValue]);\n return res.reshape(shape);\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n const program = new ScatterProgram(numUpdates, sliceRank, sparseIndices.rank, sparseValues.rank, strides, [outputSize, 1], sumDupeIndices);\n const res = this.compileAndRun(program, [sparseValues, sparseIndices, defaultValue]);\n return res.reshape(outputShape);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n const flattenIndices = indices.reshape([numSlices, sliceRank]);\n const flattenX = x.reshape([x.size / sliceSize, sliceSize]);\n const program = new GatherNDProgram(sliceRank, strides, [numSlices, sliceSize]);\n const res = this.compileAndRun(program, [flattenX, flattenIndices]);\n return res.reshape(resultShape);\n }\n fill(shape, value, dtype) {\n dtype = dtype || util.inferDtype(value);\n if (dtype === 'string') {\n // String type should be handled in CPU memory.\n const values = util.getArrayFromDType(dtype, util.sizeFromShape(shape));\n values.fill(value);\n return engine().makeTensor(values, shape, dtype, this);\n }\n else {\n const program = new FillProgram(shape, value);\n const customSetup = program.getCustomSetupFunc(value);\n return this.compileAndRun(program, [], dtype, customSetup);\n }\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported under string dtype');\n }\n else {\n // TODO(cais, smilkov): Add WebGL shader for onesLike:\n // https://github.com/tensorflow/tfjs/issues/1293\n return this.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n return this.fill(x.shape, x.dtype === 'string' ? '' : 0, x.dtype);\n }\n linspace(start, stop, num) {\n // TODO: Use CPU implementation due to the precision problem in Safari.\n return backend_util.linspaceImpl(start, stop, num);\n }\n makeTensorInfo(shape, dtype, values) {\n const dataId = this.write(values, shape, dtype);\n this.texData.get(dataId).usage = null;\n return { dataId, shape, dtype };\n }\n makeOutput(shape, dtype, values) {\n const { dataId } = this.makeTensorInfo(shape, dtype, values);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n unpackTensor(input) {\n const program = new UnpackProgram(input.shape);\n return this.runWebGLProgram(program, [input], input.dtype);\n }\n packTensor(input) {\n const program = new PackProgram(input.shape);\n const preventEagerUnpackingOutput = true;\n return this.runWebGLProgram(program, [input], input.dtype, null /* customSetup */, preventEagerUnpackingOutput);\n }\n packedReshape(input, afterShape) {\n const input3DShape = [\n webgl_util.getBatchDim(input.shape),\n ...webgl_util.getRowsCols(input.shape)\n ];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [\n webgl_util.getBatchDim(afterShape), ...webgl_util.getRowsCols(afterShape)\n ];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = this.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n }\n decode(dataId) {\n const texData = this.texData.get(dataId);\n const { isPacked, shape, dtype } = texData;\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n if (isPacked) {\n program = new DecodeMatrixPackedProgram(shapeAs3D);\n }\n else {\n program = new DecodeMatrixProgram(shapeAs3D);\n }\n const preventEagerUnpackingOfOutput = true;\n const out = this.runWebGLProgram(program, [{ shape: shapeAs3D, dtype, dataId }], dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dtype, shape, dataId: out.dataId };\n }\n runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n const output = this.makeTensorInfo(program.outputShape, outputDtype);\n const outData = this.texData.get(output.dataId);\n if (program.packedOutput) {\n outData.isPacked = true;\n }\n if (program.outPackingScheme === tex_util.PackingScheme.DENSE) {\n const texelShape = tex_util.getDenseTexShape(program.outputShape);\n // For a densely packed output, we explicitly set texShape\n // so it doesn't get assigned later according to our typical packing\n // scheme wherein a single texel can only contain values from adjacent\n // rows/cols.\n outData.texShape = texelShape.map(d => d * 2);\n }\n if (program.outTexUsage != null) {\n outData.usage = program.outTexUsage;\n }\n if (util.sizeFromShape(output.shape) === 0) {\n // Short-circuit the computation since the result is empty (has 0 in its\n // shape).\n outData.values =\n util.getTypedArrayFromDType(output.dtype, 0);\n return output;\n }\n const dataToDispose = [];\n const inputsData = inputs.map(input => {\n if (input.dtype === 'complex64') {\n throw new Error(`GPGPUProgram does not support complex64 input. For complex64 ` +\n `dtypes, please separate the program into real and imaginary ` +\n `parts.`);\n }\n let texData = this.texData.get(input.dataId);\n if (texData.texture == null) {\n if (!program.packedInputs &&\n util.sizeFromShape(input.shape) <=\n env().getNumber('WEBGL_SIZE_UPLOAD_UNIFORM')) {\n // Upload small tensors that live on the CPU as uniforms, not as\n // textures. Do this only when the environment supports 32bit floats\n // due to problems when comparing 16bit floats with 32bit floats.\n // TODO(https://github.com/tensorflow/tfjs/issues/821): Make it\n // possible for packed shaders to sample from uniforms.\n return {\n shape: input.shape,\n texData: null,\n isUniform: true,\n uniformValues: texData.values\n };\n }\n // This ensures that if a packed program's inputs have not yet been\n // uploaded to the GPU, they get uploaded as packed right off the bat.\n if (program.packedInputs) {\n texData.isPacked = true;\n texData.shape = input.shape;\n }\n }\n else if (!!texData.isPacked !== !!program.packedInputs) {\n input = texData.isPacked ? this.unpackTensor(input) :\n this.packTensor(input);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n }\n else if (texData.isPacked &&\n !webgl_util.isReshapeFree(texData.shape, input.shape)) {\n // This is a special case where a texture exists for a tensor\n // but the shapes are incompatible (due to packing constraints) because\n // the tensor did not have a chance to go through the packed reshape\n // shader. This only happens when we reshape the *same* tensor to form\n // *distinct* inputs to an op, e.g. dotting a vector with itself. This\n // case will disappear once packed uploading is the default.\n const savedInput = input;\n const targetShape = input.shape;\n input.shape = texData.shape;\n input = this.packedReshape(input, targetShape);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n savedInput.shape = targetShape;\n }\n this.uploadToGPU(input.dataId);\n return { shape: input.shape, texData, isUniform: false };\n });\n this.uploadToGPU(output.dataId);\n const outputData = { shape: output.shape, texData: outData, isUniform: false };\n const key = gpgpu_math.makeShaderKey(program, inputsData, outputData);\n const binary = this.getAndSaveBinary(key, () => {\n return gpgpu_math.compileProgram(this.gpgpu, program, inputsData, outputData);\n });\n const shouldTimeProgram = this.activeTimers != null;\n let query;\n if (shouldTimeProgram) {\n query = this.startTimer();\n }\n gpgpu_math.runProgram(this.gpgpu, binary, inputsData, outputData, customSetup);\n dataToDispose.forEach(info => this.disposeIntermediateTensorInfo(info));\n if (shouldTimeProgram) {\n query = this.endTimer(query);\n this.activeTimers.push({ name: program.constructor.name, query: this.getQueryTime(query) });\n }\n if (!env().getBool('WEBGL_LAZILY_UNPACK') && outData.isPacked &&\n preventEagerUnpackingOfOutput === false) {\n const unpacked = this.unpackTensor(output);\n this.disposeIntermediateTensorInfo(output);\n return unpacked;\n }\n return output;\n }\n compileAndRun(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n outputDtype = outputDtype || inputs[0].dtype;\n const outInfo = this.runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput);\n return engine().makeTensorFromDataId(outInfo.dataId, outInfo.shape, outInfo.dtype);\n }\n getAndSaveBinary(key, getBinary) {\n if (!(key in this.binaryCache)) {\n this.binaryCache[key] = getBinary();\n }\n return this.binaryCache[key];\n }\n getTextureManager() {\n return this.textureManager;\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n // Avoid disposing the compiled webgl programs during unit testing because\n // it slows down test execution.\n if (!env().getBool('IS_TEST')) {\n const allKeys = Object.keys(this.binaryCache);\n allKeys.forEach(key => {\n this.gpgpu.deleteProgram(this.binaryCache[key].webGLProgram);\n delete this.binaryCache[key];\n });\n }\n this.textureManager.dispose();\n if (this.canvas != null &&\n (typeof (HTMLCanvasElement) !== 'undefined' &&\n this.canvas instanceof HTMLCanvasElement)) {\n this.canvas.remove();\n }\n else {\n this.canvas = null;\n }\n if (this.gpgpuCreatedLocally) {\n this.gpgpu.program = null;\n this.gpgpu.dispose();\n }\n this.disposed = true;\n }\n floatPrecision() {\n if (this.floatPrecisionValue == null) {\n this.floatPrecisionValue = tidy(() => {\n if (!env().get('WEBGL_RENDER_FLOAT32_ENABLED')) {\n // Momentarily switching DEBUG flag to false so we don't throw an\n // error trying to upload a small value.\n const debugFlag = env().getBool('DEBUG');\n env().set('DEBUG', false);\n const underflowCheckValue = this.abs(scalar(1e-8)).dataSync()[0];\n env().set('DEBUG', debugFlag);\n if (underflowCheckValue > 0) {\n return 32;\n }\n }\n return 16;\n });\n }\n return this.floatPrecisionValue;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n uploadToGPU(dataId) {\n const texData = this.texData.get(dataId);\n const { shape, dtype, values, texture, usage, isPacked } = texData;\n if (texture != null) {\n // Array is already on GPU. No-op.\n return;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let texShape = texData.texShape;\n if (texShape == null) {\n texShape = webgl_util.getTextureShapeFromLogicalShape(shape, isPacked);\n texData.texShape = texShape;\n }\n if (values != null) {\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n let width = texShape[1], height = texShape[0];\n const isByteArray = values instanceof Uint8Array;\n if (isPacked) {\n [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(texShape[0], texShape[1]);\n program = new EncodeMatrixPackedProgram(shapeAs3D, [height, width], isByteArray);\n }\n else {\n program =\n new EncodeMatrixProgram(shapeAs3D, [height, width], isByteArray);\n }\n const tempDenseInputHandle = this.makeTensorInfo([height, width], dtype);\n if (isByteArray) {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.PIXELS;\n }\n else {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.UPLOAD;\n }\n this.gpgpu.uploadDenseMatrixToTexture(this.getTexture(tempDenseInputHandle.dataId), width, height, values);\n // We want the output to remain packed regardless of the value of\n // WEBGL_PACK.\n const preventEagerUnpacking = true;\n const encodedOutputTarget = this.runWebGLProgram(program, [tempDenseInputHandle], dtype, null, preventEagerUnpacking);\n // Have the original texture assume the identity of the encoded output.\n const outputTexData = this.texData.get(encodedOutputTarget.dataId);\n texData.texture = outputTexData.texture;\n texData.texShape = outputTexData.texShape;\n texData.isPacked = outputTexData.isPacked;\n texData.usage = outputTexData.usage;\n this.disposeIntermediateTensorInfo(tempDenseInputHandle);\n this.texData.delete(encodedOutputTarget.dataId);\n // Once uploaded, don't store the values on cpu.\n texData.values = null;\n if (shouldTimeProgram) {\n this.uploadWaitMs += util.now() - start;\n }\n }\n else {\n const newTexture = this.acquireTexture(texShape, usage, dtype, isPacked);\n texData.texture = newTexture;\n }\n }\n convertAndCacheOnCPU(dataId, float32Values) {\n const texData = this.texData.get(dataId);\n const { dtype } = texData;\n this.releaseGPUData(dataId);\n if (float32Values != null) {\n texData.values = float32ToTypedArray(float32Values, dtype);\n }\n return texData.values;\n }\n acquireTexture(texShape, texType, dtype, isPacked) {\n this.numBytesInGPU += this.computeBytes(texShape, dtype);\n if (!this.warnedAboutMemory &&\n this.numBytesInGPU > this.numMBBeforeWarning * 1024 * 1024) {\n const mb = (this.numBytesInGPU / 1024 / 1024).toFixed(2);\n this.warnedAboutMemory = true;\n console.warn(`High memory usage in GPU: ${mb} MB, ` +\n `most likely due to a memory leak`);\n }\n return this.textureManager.acquireTexture(texShape, texType, isPacked);\n }\n computeBytes(shape, dtype) {\n return shape[0] * shape[1] * util.bytesPerElement(dtype);\n }\n tryRunOnCpuOrThrow(inputs, fn) {\n if (this.shouldExecuteOnCPU(inputs)) {\n try {\n return fn();\n }\n catch (e) {\n if (env().getBool('IS_TEST')) {\n throw new Error('CPU forwarding failed');\n }\n }\n }\n return null;\n }\n}\nfunction float32ToTypedArray(a, dtype) {\n if (dtype === 'float32' || dtype === 'complex64') {\n return a;\n }\n else if (dtype === 'int32' || dtype === 'bool') {\n const result = (dtype === 'int32') ? new Int32Array(a.length) :\n new Uint8Array(a.length);\n for (let i = 0; i < result.length; ++i) {\n result[i] = Math.round(a[i]);\n }\n return result;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n//# sourceMappingURL=backend_webgl.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as webgl_util from './webgl_util';\nexport { MathBackendWebGL } from './backend_webgl';\nexport { setWebGLContext } from './canvas_util';\nexport { GPGPUContext } from './gpgpu_context';\n// WebGL specific utils.\nexport { gpgpu_util, webgl_util };\n/**\n * Enforce use of half precision textures if available on the platform.\n *\n * @doc {heading: 'Environment', namespace: 'webgl'}\n */\nexport function forceHalfFloat() {\n env().set('WEBGL_FORCE_F16_TEXTURES', true);\n}\n//# sourceMappingURL=webgl.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is the webgl backend without auto kernel registration.\nimport { device_util, registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendWebGL } from './backend_webgl';\nexport { version as version_webgl } from './version';\nif (device_util.isBrowser()) {\n registerBackend('webgl', () => new MathBackendWebGL(), 2 /* priority */);\n}\n// Export webgl utilities\nexport * from './webgl';\n// Export forceHalfFlost under webgl namespace for the union bundle.\nimport { forceHalfFloat } from './webgl';\nexport const webgl = { forceHalfFloat };\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'webgl',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\n/**\n * In WebGL data is stored in GPU textures which can't be efficiently copied, so\n * complex tensors share data with their real and imaginary components. Complex\n * tensors increment the `complexParentRefCount` properties of the underlying\n * data buckets to prevent them from being disposed, as the engine's disposal\n * logic does not account for data sharing by complex tensors.\n *\n * When a complex tensor is disposed, it will explicitly decrease the\n * `complexParentRefCount` properties of its underlying components.\n */\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.texData.get(complexInfo.dataId);\n const realTensorInfo = identity({ inputs: { x: real }, backend });\n const realData = backend.texData.get(realTensorInfo.dataId);\n realData.complexParentRefCount++;\n const imagTensorInfo = identity({ inputs: { x: imag }, backend });\n const imagData = backend.texData.get(imagTensorInfo.dataId);\n imagData.complexParentRefCount++;\n complex.complexTensorInfos = { real: realTensorInfo, imag: imagTensorInfo };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'webgl',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, upcastType } from '@tensorflow/tfjs-core';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { complex } from '../kernels/Complex';\nimport { UnaryOpProgram } from '../unaryop_gpu';\nexport const CHECK_NAN_SNIPPET_UNARY = `if (isnan(x)) return x;`;\nexport const CHECK_NAN_SNIPPET_BINARY = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\nexport const CHECK_NAN_SNIPPET_BINARY_PACKED = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param opSnippets Op snippet to create `UnaryOpProgram`.\n */\nexport function unaryKernelFunc(opSnippet) {\n return ({ inputs, backend }) => {\n const { x } = inputs;\n const webglBackend = backend;\n const program = new UnaryOpProgram(x.shape, opSnippet);\n return webglBackend.runWebGLProgram(program, [x], x.dtype);\n };\n}\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param opSnippet Op snippet to create `BinaryOpProgram`.\n * @param packedOpSnippet Op snippet to create `BinaryOpPackedProgram`.\n * @param checkOutOfBoundsForPackedProgram Whether to set checkOutOfBounds=true\n * when creating BinaryOpPackedProgram.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc({ opSnippet, packedOpSnippet, checkOutOfBounds = false, supportsComplex = false, cpuKernelImpl, dtype }) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const webglBackend = backend;\n if (supportsComplex && a.dtype === 'complex64') {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [real, imag] = [\n [aData.complexTensorInfos.real, bData.complexTensorInfos.real],\n [aData.complexTensorInfos.imag, bData.complexTensorInfos.imag]\n ].map(complexParts => {\n const [aPart, bPart] = complexParts;\n const aHandle = {\n dataId: aPart.dataId,\n dtype: aPart.dtype,\n shape: a.shape\n };\n const bHandle = {\n dataId: bPart.dataId,\n dtype: bPart.dtype,\n shape: b.shape\n };\n const program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n return webglBackend.runWebGLProgram(program, [aHandle, bHandle], upcastType(aPart.dtype, bPart.dtype));\n });\n const complexOutput = complex({ inputs: { real, imag }, backend: webglBackend });\n webglBackend.disposeIntermediateTensorInfo(real);\n webglBackend.disposeIntermediateTensorInfo(imag);\n // TODO(annxingyuan): Implement CPU forwarding for complex inputs.\n return complexOutput;\n }\n const $dtype = dtype || upcastType(a.dtype, b.dtype);\n if (webglBackend.shouldExecuteOnCPU([a, b]) && cpuKernelImpl != null) {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [outValues, outShape] = cpuKernelImpl(a.shape, b.shape, aData.values, bData.values, $dtype);\n const out = webglBackend.makeTensorInfo(outShape, $dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') &&\n packedOpSnippet != null;\n let program;\n if (shouldUsePackedProgram) {\n program = new BinaryOpPackedProgram(packedOpSnippet, a.shape, b.shape, checkOutOfBounds);\n }\n else {\n program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n }\n return webglBackend.runWebGLProgram(program, [a, b], $dtype);\n };\n}\n//# sourceMappingURL=kernel_funcs_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { addImplCPU as cpuAdd } from '../kernel_utils/shared';\nconst ADD = 'return a + b;';\nexport const addKernelFunc = binaryKernelFunc({\n opSnippet: ADD,\n packedOpSnippet: ADD,\n supportsComplex: true,\n cpuKernelImpl: cpuAdd\n});\nexport const addConfig = {\n kernelName: Add,\n backendName: 'webgl',\n kernelFunc: addKernelFunc\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc, CHECK_NAN_SNIPPET_BINARY, CHECK_NAN_SNIPPET_BINARY_PACKED } from '../kernel_utils/kernel_funcs_utils';\nconst ATAN2 = CHECK_NAN_SNIPPET_BINARY + `\n return atan(a, b);\n`;\nconst ATAN2_PACKED = `\n vec4 result = atan(a, b);\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET_BINARY_PACKED + `\n return result;\n`;\nexport const atan2 = binaryKernelFunc({ opSnippet: ATAN2, packedOpSnippet: ATAN2_PACKED });\nexport const atan2Config = {\n kernelName: Atan2,\n backendName: 'webgl',\n kernelFunc: atan2,\n};\n//# sourceMappingURL=Atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const avgPoolProgram = new Pool2DProgram(convInfo, 'avg', false);\n return backend.runWebGLProgram(avgPoolProgram, [x], 'float32');\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'webgl',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util } from '@tensorflow/tfjs-core';\nimport { AvgPool2DBackpropProgram } from '../avg_pool_backprop_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const avgPoolBackpropProgram = new AvgPool2DBackpropProgram(convInfo);\n return backend.runWebGLProgram(avgPoolBackpropProgram, [dy], x.dtype);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'webgl',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.outputShape = [];\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = '0.0';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = '1.0';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n float x = getXAtOutCoords();\n float mean = getMeanAtOutCoords();\n float variance = getVarianceAtOutCoords();\n float offset = ${offsetSnippet};\n float scale = ${scaleSnippet};\n float inv = scale * inversesqrt(variance + float(${varianceEpsilon}));\n setOutput(dot(vec3(x, -mean, offset), vec3(inv, inv, 1)));\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormPackedProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = 'vec4(0.0)';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = 'vec4(1.0)';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n vec4 offset = ${offsetSnippet};\n vec4 scale = ${scaleSnippet};\n\n vec4 x = getXAtOutCoords();\n vec4 mean = getMeanAtOutCoords();\n vec4 variance = getVarianceAtOutCoords();\n\n vec4 inv = scale * inversesqrt(variance + vec4(${varianceEpsilon}));\n\n setOutput((x - mean) * inv + offset);\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { BatchNormProgram } from '../batchnorm_gpu';\nimport { BatchNormPackedProgram } from '../batchnorm_packed_gpu';\nexport const batchNorm = ({ inputs, backend, attrs }) => {\n const { x, mean, variance, offset, scale } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const finalInputs = [x, mean, variance];\n let offsetShape = null;\n if (offset != null) {\n offsetShape = offset.shape;\n finalInputs.push(offset);\n }\n let scaleShape = null;\n if (scale != null) {\n scaleShape = scale.shape;\n finalInputs.push(scale);\n }\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new BatchNormPackedProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon) :\n new BatchNormProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon);\n const output = backend.runWebGLProgram(program, finalInputs, finalInputs[0].dtype);\n return output;\n};\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'webgl',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst NOT_EQUAL = `return float(a != b);`;\nexport const notEqual = binaryKernelFunc({ opSnippet: NOT_EQUAL, dtype: 'bool' });\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'webgl',\n kernelFunc: notEqual,\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.real }, backend });\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'webgl',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnaryOpProgram } from '../unaryop_gpu';\nconst TO_INT = `return float(int(x));`;\nexport function int(input, backend) {\n const program = new UnaryOpProgram(input.shape, TO_INT);\n const output = backend.runWebGLProgram(program, [input], 'int32');\n return { dataId: output.dataId, shape: output.shape, dtype: output.dtype };\n}\n//# sourceMappingURL=int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { notEqual } from './NotEqual';\nimport { real } from './Real';\nimport { int } from '../kernel_utils/int';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(annxingyuan): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n return int(x, backend);\n }\n if (dtype === 'bool') {\n const zerosTensorInfo = backend.makeTensorInfo([], 'bool', util.getTypedArrayFromDType('bool', 1));\n const binaryInputs = { a: x, b: zerosTensorInfo };\n const result = notEqual({ inputs: binaryInputs, backend });\n backend.disposeIntermediateTensorInfo(zerosTensorInfo);\n return result;\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'webgl',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class ConcatProgram {\n // Concats 2d tensors along axis=1. See comments in MathBackendWebGL.concat().\n constructor(shapes) {\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, 1 /* axis */);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][1];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][1];\n }\n const snippets = [`if (yC < ${offsets[0]}) setOutput(getT0(yR, yC));`];\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n snippets.push(`else if (yC < ${offsets[i]}) ` +\n `setOutput(getT${i}(yR, yC-${shift}));`);\n }\n const lastIndex = offsets.length;\n const lastShift = offsets[offsets.length - 1];\n snippets.push(`else setOutput(getT${lastIndex}(yR, yC-${lastShift}));`);\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int yR = coords.x;\n int yC = coords.y;\n\n ${snippets.join('\\n ')}\n }\n `;\n }\n}\n//# sourceMappingURL=concat_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ConcatPackedProgram {\n constructor(shapes, axis) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, axis);\n const shape = this.outputShape;\n const rank = shape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][axis];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][axis];\n }\n const channel = channels[axis];\n const lastChannels = channels.slice(-2);\n const allChannels = channels.join();\n let getValueSnippet = `if (${channel} < ${offsets[0]}) {\n return getChannel(\n getT0(${allChannels}), vec2(${lastChannels.join()}));\n }`;\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n // Note: the >= comparison below may seem unnecessary given the check\n // above but is needed to workaround branch execution issues on some\n // devices. It makes all the conditions exclusive without relying on\n // execution order.\n getValueSnippet += `\n if (${channel} < ${offsets[i]} && ${channel} >= ${offsets[i - 1]}) {\n return getChannel(\n getT${i}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));\n }`;\n }\n const lastIndex = offsets.length;\n const shift = offsets[offsets.length - 1];\n getValueSnippet += `\n return getChannel(\n getT${lastIndex}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));`;\n this.userCode = `\n float getValue(${channels.map(x => 'int ' + x)}) {\n ${getValueSnippet}\n }\n\n void main() {\n ${dtype} coords = getOutputCoords();\n vec4 result = vec4(getValue(${coords}), 0., 0., 0.);\n\n ${coords[rank - 1]} = ${coords[rank - 1]} + 1;\n if (${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.g = getValue(${coords});\n }\n\n ${coords[rank - 2]} = ${coords[rank - 2]} + 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]}) {\n result.a = getValue(${coords});\n }\n\n ${coords[rank - 1]} = ${coords[rank - 1]} - 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]} &&\n ${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.b = getValue(${coords});\n }\n setOutput(result);\n }\n `;\n }\n}\n/**\n * Return an expression for coordinates into a vector where a given channel\n * will be offset by [shift].\n *\n * @param channels the channels to consider\n * @param channel the channel we want shifted\n * @param shift the amount to subtract from the channel.\n *\n * @returns a string of the form 'x, y-[shift], z' where any one channel can\n * have the shift applied.\n */\nfunction shiftedChannels(channels, channel, shift) {\n const channelIdx = channels.indexOf(channel);\n const res = channels.map((c, idx) => {\n if (idx === channelIdx) {\n return `${c} - ${shift}`;\n }\n else {\n return c;\n }\n });\n return res.join();\n}\n//# sourceMappingURL=concat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.imag }, backend });\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'webgl',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ReshapePackedProgram } from '../reshape_packed_gpu';\nimport { getBatchDim, getRowsCols } from '../webgl_util';\nexport function packedReshape(input, afterShape, backend) {\n const input3DShape = [getBatchDim(input.shape),\n ...getRowsCols(input.shape)];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [getBatchDim(afterShape),\n ...getRowsCols(afterShape)];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = backend.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n}\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nimport { packedReshape } from '../kernel_utils/reshape';\nimport { isReshapeFree } from '../webgl_util';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const webglBackend = backend;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n const xTexData = webglBackend.texData.get(x.dataId);\n if (xTexData.isPacked && !isReshapeFree(x.shape, $shape) &&\n !(xTexData.texture !== null && isReshapeFree(xTexData.shape, $shape))) {\n return packedReshape(x, $shape, webglBackend);\n }\n webglBackend.incRef(x.dataId);\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'webgl',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, util } from '@tensorflow/tfjs-core';\nimport { ConcatProgram } from '../concat_gpu';\nimport { ConcatPackedProgram } from '../concat_packed_gpu';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concatImpl(inputs, axis, backend) {\n const dtype = inputs[0].dtype;\n if (dtype === 'complex64') {\n const reals = inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concatImpl(reals, axis, backend);\n const imagConcated = concatImpl(imags, axis, backend);\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n if (inputs.length > env().getNumber('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(inputs.length / 2);\n const leftSide = concatImpl(inputs.slice(0, midIndex), axis, backend);\n const rightSide = concatImpl(inputs.slice(midIndex), axis, backend);\n const result = concatImpl([leftSide, rightSide], axis, backend);\n backend.disposeIntermediateTensorInfo(leftSide);\n backend.disposeIntermediateTensorInfo(rightSide);\n return result;\n }\n if (env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') &&\n inputs[0].shape.length > 1) {\n const program = new ConcatPackedProgram(inputs.map(t => t.shape), axis);\n return backend.runWebGLProgram(program, inputs, dtype);\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), axis);\n const tensors2D = inputs.map(x => reshape({\n inputs: { x },\n attrs: { shape: [-1, util.sizeFromShape(x.shape.slice(axis))] },\n backend\n }));\n const program = new ConcatProgram(tensors2D.map(t => t.shape));\n const result = backend.runWebGLProgram(program, tensors2D, dtype);\n tensors2D.forEach(r => backend.disposeIntermediateTensorInfo(r));\n const reshapedResult = reshape({ inputs: { x: result }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(result);\n return reshapedResult;\n}\n//# sourceMappingURL=Concat_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { concatImpl } from './Concat_impl';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n return concatImpl($inputs, $axis, backend);\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'webgl',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst COS = CHECK_NAN_SNIPPET_UNARY + `\n return cos(x);\n`;\nexport const cos = unaryKernelFunc(COS);\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'webgl',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\n// Without the equality check div produces 0.9999 for a = b, which when\n// floored can cause errors.\nconst DIV = `\nif (a == b) {\n return 1.0;\n};\nreturn a / b;`;\n// We do the same as in ./binaryop_gpu, with vec4 and ivec4.\n// On Linux, the vectorized implementation produces NaNs when a and b are 0.\nconst DIV_PACKED = `\n // vec4 one = vec4(equal(a, b));\n // return one + (vec4(1.0) - one) * a / b;\n vec4 result = a / b;\n if(a.x == b.x) {\n result.x = 1.;\n }\n if(a.y == b.y) {\n result.y = 1.;\n }\n if(a.z == b.z) {\n result.z = 1.;\n }\n if(a.w == b.w) {\n result.w = 1.;\n }\n\n return result;\n`;\nexport const div = binaryKernelFunc({ opSnippet: DIV, packedOpSnippet: DIV_PACKED, checkOutOfBounds: true });\nexport const divConfig = {\n kernelName: Div,\n backendName: 'webgl',\n kernelFunc: div,\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FFTProgram {\n constructor(component, inputShape, inverse) {\n this.variableNames = ['real', 'imag'];\n const innerDim = inputShape[1];\n this.outputShape = inputShape;\n const exponentMultiplierSnippet = inverse ? `2.0 * ${Math.PI}` : `-2.0 * ${Math.PI}`;\n const resultDenominator = inverse ? `${innerDim}.0` : '1.0';\n let opString;\n if (component === 'real') {\n opString = 'return real * expR - imag * expI;';\n }\n else if (component === 'imag') {\n opString = 'return real * expI + imag * expR;';\n }\n else {\n throw new Error(`FFT component must be either \"real\" or \"imag\", got ${component}.`);\n }\n this.userCode = `\n const float exponentMultiplier = ${exponentMultiplierSnippet};\n\n float unaryOpComplex(float real, float expR, float imag, float expI) {\n ${opString}\n }\n\n float mulMatDFT(int batch, int index) {\n float indexRatio = float(index) / float(${innerDim});\n float exponentMultiplierTimesIndexRatio =\n exponentMultiplier * indexRatio;\n\n float result = 0.0;\n\n for (int i = 0; i < ${innerDim}; i++) {\n // x = (-2|2 * PI / N) * index * i;\n float x = exponentMultiplierTimesIndexRatio * float(i);\n float expR = cos(x);\n float expI = sin(x);\n float real = getReal(batch, i);\n float imag = getImag(batch, i);\n\n result +=\n unaryOpComplex(real, expR, imag, expI) / ${resultDenominator};\n }\n\n return result;\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n setOutput(mulMatDFT(coords[0], coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=fft_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FFTProgram } from '../fft_gpu';\nimport { complex } from './Complex';\nimport { reshape } from './Reshape';\nexport function fftImpl(x, inverse, backend) {\n const xData = backend.texData.get(x.dataId);\n const inputSize = util.sizeFromShape(x.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = x.shape[x.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({ inputs: { x }, backend, attrs: { shape: [batch, innerDimensionSize] } });\n const xShape = input2D.shape;\n const realProgram = new FFTProgram('real', xShape, inverse);\n const imagProgram = new FFTProgram('imag', xShape, inverse);\n const inputs = [\n {\n dataId: xData.complexTensorInfos.real.dataId,\n dtype: xData.complexTensorInfos.real.dtype,\n shape: xShape\n },\n {\n dataId: xData.complexTensorInfos.imag.dataId,\n dtype: xData.complexTensorInfos.imag.dtype,\n shape: xShape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n const complexOutputReshaped = reshape({ inputs: { x: complexOutput }, backend, attrs: { shape: x.shape } });\n backend.disposeIntermediateTensorInfo(complexOutputReshaped);\n return complexOutputReshaped;\n}\n//# sourceMappingURL=FFT_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, false /* inverse */, backend);\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'webgl',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FlipLeftRightProgram {\n constructor(imageShape) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageWidth = imageShape[2];\n this.outputShape = imageShape;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n\n int coordX = ${imageWidth} - x;\n float outputValue;\n if(coordX >= 0 && coordX < ${imageWidth}) {\n outputValue = getImage(coords[0], coords[1], coordX, coords[3]);\n } else {\n outputValue = getImage(coords[0], coords[1], coords[2], coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=flip_left_right_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight } from '@tensorflow/tfjs-core';\nimport { FlipLeftRightProgram } from '../flip_left_right_gpu';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend }) => {\n const { image } = inputs;\n const webglBackend = backend;\n const program = new FlipLeftRightProgram(image.shape);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${width}.0, ${height}.0);\n\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n setOutput(floor(value * 255.0 + 0.5));\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n\n vec4 result = vec4(0.);\n\n for(int row=0; row<=1; row++) {\n for(int col=0; col<=1; col++) {\n texC = coords[1] + row;\n depth = coords[2] + col;\n\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n result[row * 2 + col] = floor(value * 255.0 + 0.5);\n }\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { FromPixels } from '@tensorflow/tfjs-core';\nimport { TextureUsage } from '../tex_util';\nimport { FromPixelsProgram } from './FromPixels_utils/from_pixels_gpu';\nimport { FromPixelsPackedProgram } from './FromPixels_utils/from_pixels_packed_gpu';\nexport const fromPixelsConfig = {\n kernelName: FromPixels,\n backendName: 'webgl',\n kernelFunc: fromPixels,\n};\nlet fromPixels2DContext;\nfunction fromPixels(args) {\n const { inputs, backend, attrs } = args;\n let { pixels } = inputs;\n const { numChannels } = attrs;\n const isVideo = typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement;\n const isImage = typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement;\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n const texShape = [height, width];\n const outShape = [height, width, numChannels];\n if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n pixels = fromPixels2DContext.canvas;\n }\n const tempPixelHandle = backend.makeTensorInfo(texShape, 'int32');\n // This is a byte texture with pixels.\n backend.texData.get(tempPixelHandle.dataId).usage = TextureUsage.PIXELS;\n backend.gpgpu.uploadPixelDataToTexture(backend.getTexture(tempPixelHandle.dataId), pixels);\n const program = env().getBool('WEBGL_PACK') ?\n new FromPixelsPackedProgram(outShape) :\n new FromPixelsProgram(outShape);\n const res = backend.runWebGLProgram(program, [tempPixelHandle], 'int32');\n backend.disposeData(tempPixelHandle.dataId);\n return res;\n}\n//# sourceMappingURL=FromPixels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, true /* inverse */, backend);\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'webgl',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class MeanProgram {\n constructor(reduceInfo, divisor) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `sumValue += dot(values, ones);`;\n if (divisor != null) {\n const denominator = 1 / divisor;\n updateSnippet = `sumValue += dot(values * ${util.isInt(denominator) ? denominator.toPrecision(2) :\n denominator}, ones);`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return 0.0;\n }\n `;\n }\n this.userCode = `\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(getValue(batch, inIdx), 0.0, 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1), 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2), 0.0);\n\n ${updateSnippet}\n }\n setOutput(sumValue);\n }\n `;\n }\n}\n//# sourceMappingURL=mean_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { MeanProgram } from '../mean_gpu';\nimport { ReduceProgram } from '../reduce_gpu';\n// Returns an array of configuration objects that describe each stage of the\n// reduction.\nfunction getReductionStages(inShape) {\n const stages = [];\n while (stages.length === 0 || stages[stages.length - 1].outSize !== 1) {\n const outSize = stages.length ? stages[stages.length - 1].outSize : inShape[1];\n const windowSize = backend_util.computeOptimalWindowSize(outSize);\n stages.push({\n inSize: outSize,\n windowSize,\n outSize: Math.ceil(outSize / windowSize)\n });\n }\n return stages;\n}\nexport function reduce(x, dtype, reductionType, backend) {\n const reductionStages = getReductionStages(x.shape);\n let result = x;\n for (let i = 0; i < reductionStages.length; i++) {\n const { inSize, windowSize, outSize } = reductionStages[i];\n let program;\n let previousResult;\n if (reductionType === 'mean') {\n program = i === 0 ?\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, inSize) :\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize });\n }\n else {\n program = new ReduceProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, reductionType);\n }\n previousResult = result;\n result = backend.runWebGLProgram(program, [result], dtype);\n if (previousResult.dataId !== x.dataId) {\n backend.disposeIntermediateTensorInfo(previousResult);\n }\n }\n return result;\n}\n//# sourceMappingURL=reduce.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function maxImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, x.dtype, 'max', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposeProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const switched = getSwitchedCoords(newDim);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${switched}));\n }\n `;\n }\n}\nfunction getSwitchedCoords(newDim) {\n const rank = newDim.length;\n if (rank > 6) {\n throw Error(`Transpose for rank ${rank} is not yet supported`);\n }\n const originalOrder = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u', 'resRC.v'];\n const switchedCoords = new Array(rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedCoords[newDim[i]] = originalOrder[i];\n }\n return switchedCoords.join();\n}\n//# sourceMappingURL=transpose_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getVecChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposePackedProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n if (this.rank > 6) {\n throw Error(`Packed transpose for rank ${this.rank} is not yet supported.`);\n }\n const dtype = getCoordsDataType(this.rank);\n const outputOrder = getVecChannels('rc', this.rank);\n const switchedOrder = new Array(this.rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedOrder[newDim[i]] = outputOrder[i];\n }\n const innerDims = `vec2(${switchedOrder.slice(-2).join()})`;\n const nextColumn = `++${outputOrder[this.rank - 1]} < ${outputShape[this.rank - 1]}`;\n const getc = `getChannel(getA(${switchedOrder.join()}), ${innerDims})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result[0] = ${getc};\n if(${nextColumn}) {\n result[1] = ${getc};\n }\n --${outputOrder[this.rank - 1]};\n if(++${outputOrder[this.rank - 2]} < ${outputShape[this.rank - 2]}) {\n result[2] = ${getc};\n if(${nextColumn}) {\n result[3] = ${getc};\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=transpose_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { transposeImplCPU } from '../kernel_utils/shared';\nimport { TransposeProgram } from '../transpose_gpu';\nimport { TransposePackedProgram } from '../transpose_packed_gpu';\nexport function transposeImpl(x, perm, backend) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new TransposePackedProgram(x.shape, perm) :\n new TransposeProgram(x.shape, perm);\n return backend.runWebGLProgram(program, [x], x.dtype);\n}\nexport { transposeImplCPU };\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxImplCPU } from '../kernel_utils/shared';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const maxInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n let maxInput = x;\n if (maxInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const maxInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n maxInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const maxInputData = webglBackend.texData.get(maxInput.dataId);\n maxInputData.values = maxInputValues;\n }\n else {\n maxInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(maxInput.shape, axes);\n let outShape = maxOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n }\n let out;\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const outValues = maxImplCPU(values, util.sizeFromShape(reduceShape), outShape, x.dtype);\n out = webglBackend.makeTensorInfo(outShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = maxImpl(maxInput, reduceShape, outShape, webglBackend);\n }\n if (maxInputIsTransposed) {\n webglBackend.disposeIntermediateTensorInfo(maxInput);\n }\n return out;\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const maxPoolProgram = new Pool2DProgram(convInfo, 'max', false);\n return backend.runWebGLProgram(maxPoolProgram, [x], x.dtype);\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'webgl',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { MaxPool2DBackpropProgram } from '../max_pool_backprop_gpu';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const getPositions = true;\n const maxPoolPositionsProgram = new Pool2DProgram(convInfo, 'max', getPositions);\n const maxPoolPositions = backend.runWebGLProgram(maxPoolPositionsProgram, [x], x.dtype);\n const maxPoolBackPropProgram = new MaxPool2DBackpropProgram(convInfo);\n const result = backend.runWebGLProgram(maxPoolBackPropProgram, [dy, maxPoolPositions], x.dtype);\n backend.disposeIntermediateTensorInfo(maxPoolPositions);\n return result;\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'webgl',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pool2DProgram } from '../pool_gpu';\nexport function maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, backend) {\n let program = new Pool2DProgram(convInfo, 'max', false);\n const poolOutput = backend.runWebGLProgram(program, [x], 'float32');\n program = new Pool2DProgram(convInfo, 'max', true, true, includeBatchInIndex);\n const indexOutput = backend.runWebGLProgram(program, [x], 'float32');\n return [poolOutput, indexOutput];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const webglBackend = backend;\n util.assert(x.shape.length === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x.shape.length}.`);\n const dilations = [1, 1];\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad);\n const [result, indexes] = maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, webglBackend);\n return [result, indexes];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function meanImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, 'float32', 'mean', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Mean_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Mean, util } from '@tensorflow/tfjs-core';\nimport { meanImpl } from './Mean_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const meanConfig = {\n kernelName: Mean,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { keepDims, axis } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(axis, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const meanInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n const intermediates = [];\n let meanInput = x;\n if (meanInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(meanInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const meanInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n meanInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const meanInputData = webglBackend.texData.get(meanInput.dataId);\n meanInputData.values = meanInputValues;\n }\n else {\n meanInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n intermediates.push(meanInput);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('sum', axes, xRank);\n const [meanOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(meanInput.shape, axes);\n let outShape = meanOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(meanOutShape, origAxes);\n }\n const out = meanImpl(meanInput, reduceShape, outShape, webglBackend);\n for (const i of intermediates) {\n webglBackend.disposeIntermediateTensorInfo(i);\n }\n return out;\n }\n};\n//# sourceMappingURL=Mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class MirrorPadProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n const offset = mode === 'reflect' ? 0 : 1;\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start) {\n outC = start * 2 - outC - ${offset};\n } else if(outC >= end) {\n outC = (end - 1) * 2 - outC + ${offset};\n }\n setOutput(getX(outC - start));\n }\n `;\n return;\n }\n this.userCode = `\n ${dtype} start = ${dtype}(${start});\n ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outC = getOutputCoords();\n for (int i = 0; i < ${rank}; i++) {\n if (outC[i] < start[i]) {\n outC[i] = start[i] * 2 - outC[i] - ${offset};\n } else if(outC[i] >= end[i]) {\n outC[i] = (end[i] - 1) * 2 - outC[i] + ${offset};\n }\n }\n ${dtype} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\n/**\n * Example shader code for\n * `mirrorPad(tf.tensor1d([1, 2, 3], 'int32'), [[2, 2]], 'reflect')`\n * ```\n * const int start = int(2);\n * const int end = int(5);\n *\n * void main() {\n * int outputLoc = getOutputCoords();\n * vec4 result = vec4(0.);\n *\n * int rc = outputLoc;\n *\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[0] = getChannel(getX(source), source);\n * rc += 1;\n * if(rc < 6) {\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[1] = getChannel(getX(source), source);\n * }\n *\n * setOutput(result);\n * }\n * ```\n */\nexport class MirrorPadPackedProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const offset = mode === 'reflect' ? 0 : 1;\n let mainLoop = '';\n if (rank === 1) {\n const padSetup = `\n ${dtype} source = rc;\n if (source < start) {\n source = start * 2 - source - ${offset};\n } else if (source >= end) {\n source = (end - 1) * 2 - source + ${offset};\n }\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n else {\n const padSetup = `\n ${dtype} source = rc;\n ${dtype} lt = ${dtype}(lessThan(source, start));\n ${dtype} gte = ${dtype}(greaterThanEqual(source, end));\n ${dtype} orig = 1 - (lt + gte);\n source = orig * source +\n lt * (start * 2 - source - ${offset}) +\n gte * ((end - 1) * 2 - source + ${offset});\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {\n ${padSetup}\n result[2] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[3] = getChannel(getX(${source.join()}), ${innerDims});\n }\n }\n `;\n }\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, MirrorPad } from '@tensorflow/tfjs-core';\nimport { MirrorPadProgram } from '../mirror_pad_gpu';\nimport { MirrorPadPackedProgram } from '../mirror_pad_packed_gpu';\nexport const mirrorPadKernelFunc = ({ inputs, backend, attrs }) => {\n const { x } = inputs;\n const { paddings, mode } = attrs;\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new MirrorPadPackedProgram(x.shape, paddings, mode) :\n new MirrorPadProgram(x.shape, paddings, mode);\n const output = backend.runWebGLProgram(program, [x], x.dtype);\n return output;\n};\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'webgl',\n kernelFunc: mirrorPadKernelFunc,\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\n// (Ar + Ai)(Br + Bi) =\n// ArBr + ArBi + AiBr + AiBi = ArBr - AB + ArBi + AiBr\n// Yr = ArBr - AB\n// Yi = ArBi + AiBr\nexport const COMPLEX_MULTIPLY = {\n REAL: 'return areal * breal - aimag * bimag;',\n IMAG: 'return areal * bimag + aimag * breal;'\n};\nexport class BinaryOpComplexProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['AReal', 'AImag', 'BReal', 'BImag'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOpComplex(\n float areal, float aimag, float breal, float bimag) {\n ${op}\n }\n\n void main() {\n float areal = getARealAtOutCoords();\n float aimag = getAImagAtOutCoords();\n float breal = getBRealAtOutCoords();\n float bimag = getBImagAtOutCoords();\n setOutput(binaryOpComplex(areal, aimag, breal, bimag));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_complex_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, Multiply } from '@tensorflow/tfjs-core';\nimport * as binaryop_complex_gpu from '../binaryop_complex_gpu';\nimport { BinaryOpComplexProgram } from '../binaryop_complex_gpu';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { multiplyImplCPU as cpuMultiply } from '../kernel_utils/shared';\nimport { complex } from './Complex';\nconst MUL = 'return a * b;';\nexport function multiply(args) {\n const { inputs, backend } = args;\n const { a, b } = inputs;\n const dtype = backend_util.upcastType(a.dtype, b.dtype);\n if (a.dtype === 'complex64') {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const realProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.REAL, a.shape, b.shape);\n const imagProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.IMAG, a.shape, b.shape);\n const inputs = [\n {\n dataId: aData.complexTensorInfos.real.dataId,\n dtype: aData.complexTensorInfos.real.dtype,\n shape: a.shape\n },\n {\n dataId: aData.complexTensorInfos.imag.dataId,\n dtype: aData.complexTensorInfos.imag.dtype,\n shape: a.shape\n },\n {\n dataId: bData.complexTensorInfos.real.dataId,\n dtype: bData.complexTensorInfos.real.dtype,\n shape: b.shape\n },\n {\n dataId: bData.complexTensorInfos.imag.dataId,\n dtype: bData.complexTensorInfos.imag.dtype,\n shape: b.shape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n // TODO(annxingyuan): CPU forwarding for complex inputs.\n return complexOutput;\n }\n if (backend.shouldExecuteOnCPU([a, b])) {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const [outValues, outShape] = cpuMultiply(a.shape, b.shape, aData.values, bData.values, dtype);\n const out = backend.makeTensorInfo(outShape, dtype);\n const outData = backend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n let program;\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n program = new BinaryOpPackedProgram(MUL, a.shape, b.shape);\n }\n else {\n program = new BinaryOpProgram(MUL, a.shape, b.shape);\n }\n return backend.runWebGLProgram(program, [a, b], dtype);\n}\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'webgl',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV3 } from '@tensorflow/tfjs-core';\nexport const nonMaxSuppressionV3Config = {\n kernelName: NonMaxSuppressionV3,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n return kernel_impls.nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal);\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV3.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls } from '@tensorflow/tfjs-core';\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class RotateProgram {\n constructor(imageShape, radians, fillValue, center) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageHeight = imageShape[1];\n const imageWidth = imageShape[2];\n const sinFactor = Math.sin(radians).toFixed(3);\n const cosFactor = Math.cos(radians).toFixed(3);\n this.outputShape = imageShape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const centerXString = centerX.toFixed(3);\n const centerYString = centerY.toFixed(3);\n let fillSnippet = '';\n if (typeof fillValue === 'number') {\n fillSnippet = `float outputValue = ${fillValue.toFixed(2)};`;\n }\n else {\n fillSnippet = `\n vec3 fill = vec3(${fillValue.join(',')});\n float outputValue = fill[coords[3]];`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n int y = coords[1];\n float coordXFloat = (float(x) - ${centerXString}) * ${cosFactor} - (float(y) - ${centerYString}) * ${sinFactor};\n float coordYFloat = (float(x) - ${centerXString}) * ${sinFactor} + (float(y) - ${centerYString}) * ${cosFactor};\n int coordX = int(round(coordXFloat + ${centerXString}));\n int coordY = int(round(coordYFloat + ${centerYString}));\n ${fillSnippet}\n if(coordX >= 0 && coordX < ${imageWidth} && coordY >= 0 && coordY < ${imageHeight}) {\n outputValue = getImage(coords[0], coordY, coordX, coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=rotate_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { RotateWithOffset } from '@tensorflow/tfjs-core';\nimport { RotateProgram } from '../rotate_gpu';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const webglBackend = backend;\n const program = new RotateProgram(image.shape, radians, fillValue, center);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SIN = CHECK_NAN_SNIPPET_UNARY + `\n return sin(x);\n`;\nexport const sin = unaryKernelFunc(SIN);\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'webgl',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARE = `return x * x;`;\nexport const square = unaryKernelFunc(SQUARE);\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'webgl',\n kernelFunc: square,\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const squaredDifference = binaryKernelFunc({ opSnippet: SQUARED_DIFFERENCE, packedOpSnippet: SQUARED_DIFFERENCE });\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'webgl',\n kernelFunc: squaredDifference,\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { subImplCPU as cpuSub } from '../kernel_utils/shared';\nconst SUB = 'return a - b;';\nexport const subKernelFunc = binaryKernelFunc({\n opSnippet: SUB,\n packedOpSnippet: SUB,\n supportsComplex: true,\n cpuKernelImpl: cpuSub\n});\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'webgl',\n kernelFunc: subKernelFunc\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst TAN = `return tan(x);`;\nexport const tan = unaryKernelFunc(TAN);\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'webgl',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { transposeImpl } from './Transpose_impl';\nimport { transposeImplCPU as cpuTranspose } from './Transpose_impl';\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { perm } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n let out;\n if (webglBackend.shouldExecuteOnCPU([x])) {\n const xTexData = webglBackend.texData.get(x.dataId);\n const values = xTexData.values;\n const outValues = cpuTranspose(values, x.shape, x.dtype, perm, newShape);\n out = webglBackend.makeTensorInfo(newShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = transposeImpl(x, perm, webglBackend);\n }\n return out;\n }\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { uniqueImplCPU } from '../kernel_utils/shared';\nimport { assertNotComplex } from '../webgl_util';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n // For now, always forward calculation to the CPU backend.\n console.warn('WARNING: ', 'UI might be locked temporarily as data is being downloaded');\n const values = backend.readSync(x.dataId);\n const { outputValues, outputShape, indices } = uniqueImplCPU(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'webgl',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { addConfig } from './kernels/Add';\nimport { atan2Config } from './kernels/Atan2';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { cosConfig } from './kernels/Cos';\nimport { divConfig } from './kernels/Div';\nimport { fftConfig } from './kernels/FFT';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { fromPixelsConfig } from './kernels/FromPixels';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { meanConfig } from './kernels/Mean';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV3Config } from './kernels/NonMaxSuppressionV3';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { realConfig } from './kernels/Real';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { sinConfig } from './kernels/Sin';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n addConfig,\n atan2Config,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchNormConfig,\n castConfig,\n complexConfig,\n concatConfig,\n cosConfig,\n divConfig,\n fftConfig,\n flipLeftRightConfig,\n fromPixelsConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n maxConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n meanConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV3Config,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n realConfig,\n reshapeConfig,\n rotateWithOffsetConfig,\n sinConfig,\n squareConfig,\n subConfig,\n squaredDifferenceConfig,\n tanConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport {version};\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport * from '@tensorflow/tfjs-core';\nexport * from '@tensorflow/tfjs-layers';\nexport * from '@tensorflow/tfjs-converter';\n\n// Export data api as tf.data\nimport * as data from '@tensorflow/tfjs-data';\nexport {data};\n\n// Import and register backends.\nimport '@tensorflow/tfjs-backend-cpu';\nimport '@tensorflow/tfjs-backend-webgl';\n\n// Import versions of all sub-packages.\nimport {version_core} from '@tensorflow/tfjs-core';\nimport {version_cpu} from '@tensorflow/tfjs-backend-cpu';\nimport {version_webgl} from '@tensorflow/tfjs-backend-webgl';\nimport {version_data} from '@tensorflow/tfjs-data';\nimport {version_layers} from '@tensorflow/tfjs-layers';\nimport {version_converter} from '@tensorflow/tfjs-converter';\nimport {version as version_union} from './version';\n\nexport const version = {\n 'tfjs-core': version_core,\n 'tfjs-backend-cpu': version_cpu,\n 'tfjs-backend-webgl': version_webgl,\n 'tfjs-data': version_data,\n 'tfjs-layers': version_layers,\n 'tfjs-converter': version_converter,\n 'tfjs': version_union\n};\n", "", "", "", "\nvar WasmBackendModuleThreadedSimd = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModuleThreadedSimd) {\n WasmBackendModuleThreadedSimd = WasmBackendModuleThreadedSimd || {};\n\nfunction GROWABLE_HEAP_I8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP8}function GROWABLE_HEAP_U8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU8}function GROWABLE_HEAP_I32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP32}function GROWABLE_HEAP_U32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU32}function GROWABLE_HEAP_F64(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPF64}var Module=typeof WasmBackendModuleThreadedSimd!==\"undefined\"?WasmBackendModuleThreadedSimd:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var ENVIRONMENT_IS_PTHREAD=Module[\"ENVIRONMENT_IS_PTHREAD\"]||false;if(ENVIRONMENT_IS_PTHREAD){buffer=Module[\"buffer\"];DYNAMIC_BASE=Module[\"DYNAMIC_BASE\"];DYNAMICTOP_PTR=Module[\"DYNAMICTOP_PTR\"]}var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"};var nodeWorkerThreads;try{nodeWorkerThreads=require(\"worker_threads\")}catch(e){console.error('The \"worker_threads\" module is not supported in this node.js build - perhaps a newer version is needed?');throw e}Worker=nodeWorkerThreads.Worker}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}if(ENVIRONMENT_IS_NODE){read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret}}else{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}if(ENVIRONMENT_IS_NODE){if(typeof performance===\"undefined\"){performance=require(\"perf_hooks\").performance}}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var Atomics_load=Atomics.load;var Atomics_store=Atomics.store;var Atomics_compareExchange=Atomics.compareExchange;var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":165,\"maximum\":165+0,\"element\":\"anyfunc\"});var wasmModule;var threadInfoStruct=0;var selfThreadId=0;var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx)){var u0=heap[idx++];if(!u0)return str;if(!(u0&128)){str+=String.fromCharCode(u0);continue}var u1=heap[idx++]&63;if((u0&224)==192){str+=String.fromCharCode((u0&31)<<6|u1);continue}var u2=heap[idx++]&63;if((u0&240)==224){u0=(u0&15)<<12|u1<<6|u2}else{u0=(u0&7)<<18|u1<<12|u2<<6|heap[idx++]&63}if(u0<65536){str+=String.fromCharCode(u0)}else{var ch=u0-65536;str+=String.fromCharCode(55296|ch>>10,56320|ch&1023)}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(GROWABLE_HEAP_U8(),ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,GROWABLE_HEAP_U8(),outPtr,maxBytesToWrite)}function lengthBytesUTF8(str){var len=0;for(var i=0;i=55296&&u<=57343)u=65536+((u&1023)<<10)|str.charCodeAt(++i)&1023;if(u<=127)++len;else if(u<=2047)len+=2;else if(u<=65535)len+=3;else len+=4}return len}function writeArrayToMemory(array,buffer){GROWABLE_HEAP_I8().set(array,buffer)}var WASM_PAGE_SIZE=65536;function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var STACK_BASE=5256384,STACKTOP=STACK_BASE,STACK_MAX=13504,DYNAMIC_BASE=5256384,DYNAMICTOP_PTR=12576;if(ENVIRONMENT_IS_PTHREAD){}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;if(ENVIRONMENT_IS_PTHREAD){wasmMemory=Module[\"wasmMemory\"];buffer=Module[\"buffer\"]}else{if(Module[\"wasmMemory\"]){wasmMemory=Module[\"wasmMemory\"]}else{wasmMemory=new WebAssembly.Memory({\"initial\":INITIAL_INITIAL_MEMORY/WASM_PAGE_SIZE,\"maximum\":2147483648/WASM_PAGE_SIZE,\"shared\":true});if(!(wasmMemory.buffer instanceof SharedArrayBuffer)){err(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\");if(ENVIRONMENT_IS_NODE){console.log(\"(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and also use a recent version)\")}throw Error(\"bad memory\")}}}if(wasmMemory){buffer=wasmMemory.buffer}INITIAL_INITIAL_MEMORY=buffer.byteLength;updateGlobalBufferAndViews(buffer);if(!ENVIRONMENT_IS_PTHREAD){GROWABLE_HEAP_I32()[DYNAMICTOP_PTR>>2]=DYNAMIC_BASE}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATEXIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;if(ENVIRONMENT_IS_PTHREAD)runtimeInitialized=true;function preRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){if(ENVIRONMENT_IS_PTHREAD)return;callRuntimeCallbacks(__ATMAIN__)}function postRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){assert(!ENVIRONMENT_IS_PTHREAD,\"addRunDependency cannot be used in a pthread worker\");runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}if(ENVIRONMENT_IS_PTHREAD)console.error(\"Pthread aborting at \"+(new Error).stack);what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm-threaded-simd.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"a\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmModule=module;if(!ENVIRONMENT_IS_PTHREAD){var numWorkersToLoad=PThread.unusedWorkers.length;PThread.unusedWorkers.forEach(function(w){PThread.loadWasmModuleToWorker(w,function(){if(!--numWorkersToLoad)removeRunDependency(\"wasm-instantiate\")})})}}if(!ENVIRONMENT_IS_PTHREAD){addRunDependency(\"wasm-instantiate\")}function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"],output[\"module\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}var ASM_CONSTS={};function initPthreadsJS(){PThread.initRuntime()}if(!ENVIRONMENT_IS_PTHREAD)__ATINIT__.push({func:function(){___wasm_call_ctors()}});var __pthread_ptr=0;var __pthread_is_main_runtime_thread=0;var __pthread_is_main_browser_thread=0;function __register_pthread_ptr(pthreadPtr,isMainBrowserThread,isMainRuntimeThread){pthreadPtr=pthreadPtr|0;isMainBrowserThread=isMainBrowserThread|0;isMainRuntimeThread=isMainRuntimeThread|0;__pthread_ptr=pthreadPtr;__pthread_is_main_browser_thread=isMainBrowserThread;__pthread_is_main_runtime_thread=isMainRuntimeThread}Module[\"__register_pthread_ptr\"]=__register_pthread_ptr;var ERRNO_CODES={EPERM:63,ENOENT:44,ESRCH:71,EINTR:27,EIO:29,ENXIO:60,E2BIG:1,ENOEXEC:45,EBADF:8,ECHILD:12,EAGAIN:6,EWOULDBLOCK:6,ENOMEM:48,EACCES:2,EFAULT:21,ENOTBLK:105,EBUSY:10,EEXIST:20,EXDEV:75,ENODEV:43,ENOTDIR:54,EISDIR:31,EINVAL:28,ENFILE:41,EMFILE:33,ENOTTY:59,ETXTBSY:74,EFBIG:22,ENOSPC:51,ESPIPE:70,EROFS:69,EMLINK:34,EPIPE:64,EDOM:18,ERANGE:68,ENOMSG:49,EIDRM:24,ECHRNG:106,EL2NSYNC:156,EL3HLT:107,EL3RST:108,ELNRNG:109,EUNATCH:110,ENOCSI:111,EL2HLT:112,EDEADLK:16,ENOLCK:46,EBADE:113,EBADR:114,EXFULL:115,ENOANO:104,EBADRQC:103,EBADSLT:102,EDEADLOCK:16,EBFONT:101,ENOSTR:100,ENODATA:116,ETIME:117,ENOSR:118,ENONET:119,ENOPKG:120,EREMOTE:121,ENOLINK:47,EADV:122,ESRMNT:123,ECOMM:124,EPROTO:65,EMULTIHOP:36,EDOTDOT:125,EBADMSG:9,ENOTUNIQ:126,EBADFD:127,EREMCHG:128,ELIBACC:129,ELIBBAD:130,ELIBSCN:131,ELIBMAX:132,ELIBEXEC:133,ENOSYS:52,ENOTEMPTY:55,ENAMETOOLONG:37,ELOOP:32,EOPNOTSUPP:138,EPFNOSUPPORT:139,ECONNRESET:15,ENOBUFS:42,EAFNOSUPPORT:5,EPROTOTYPE:67,ENOTSOCK:57,ENOPROTOOPT:50,ESHUTDOWN:140,ECONNREFUSED:14,EADDRINUSE:3,ECONNABORTED:13,ENETUNREACH:40,ENETDOWN:38,ETIMEDOUT:73,EHOSTDOWN:142,EHOSTUNREACH:23,EINPROGRESS:26,EALREADY:7,EDESTADDRREQ:17,EMSGSIZE:35,EPROTONOSUPPORT:66,ESOCKTNOSUPPORT:137,EADDRNOTAVAIL:4,ENETRESET:39,EISCONN:30,ENOTCONN:53,ETOOMANYREFS:141,EUSERS:136,EDQUOT:19,ESTALE:72,ENOTSUP:138,ENOMEDIUM:148,EILSEQ:25,EOVERFLOW:61,ECANCELED:11,ENOTRECOVERABLE:56,EOWNERDEAD:62,ESTRPIPE:135};var __main_thread_futex_wait_address=13488;function _emscripten_futex_wake(addr,count){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0||count<0)return-28;if(count==0)return 0;if(count>=2147483647)count=Infinity;var mainThreadWaitAddress=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2);var mainThreadWoken=0;if(mainThreadWaitAddress==addr){var loadedAddr=Atomics.compareExchange(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,mainThreadWaitAddress,0);if(loadedAddr==mainThreadWaitAddress){--count;mainThreadWoken=1;if(count<=0)return 1}}var ret=Atomics.notify(GROWABLE_HEAP_I32(),addr>>2,count);if(ret>=0)return ret+mainThreadWoken;throw\"Atomics.notify returned an unexpected value \"+ret}Module[\"_emscripten_futex_wake\"]=_emscripten_futex_wake;function __kill_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _kill_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _kill_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];pthread.worker.terminate();PThread.freeThreadData(pthread);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(pthread.worker),1);pthread.worker.pthread=undefined}function __cancel_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cancel_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cancel_thread!\";var pthread=PThread.pthreads[pthread_ptr];pthread.worker.postMessage({\"cmd\":\"cancel\"})}function __cleanup_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cleanup_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cleanup_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];if(pthread){var worker=pthread.worker;PThread.returnWorkerToPool(worker)}}var PThread={MAIN_THREAD_ID:1,mainThreadInfo:{schedPolicy:0,schedPrio:0},unusedWorkers:[],runningWorkers:[],initRuntime:function(){__register_pthread_ptr(PThread.mainThreadBlock,!ENVIRONMENT_IS_WORKER,1);_emscripten_register_main_browser_thread_id(PThread.mainThreadBlock)},initMainThreadBlock:function(){var pthreadPoolSize=8;for(var i=0;i>2]=PThread.mainThreadBlock;var headPtr=PThread.mainThreadBlock+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var tlsMemory=12976;for(var i=0;i<128;++i)GROWABLE_HEAP_U32()[tlsMemory/4+i]=0;Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+104>>2,tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+40>>2,PThread.mainThreadBlock);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+44>>2,42)},initWorker:function(){},pthreads:{},exitHandlers:null,setThreadStatus:function(){},runExitHandlers:function(){if(PThread.exitHandlers!==null){while(PThread.exitHandlers.length>0){PThread.exitHandlers.pop()()}PThread.exitHandlers=null}if(ENVIRONMENT_IS_PTHREAD&&threadInfoStruct)___pthread_tsd_run_dtors()},threadExit:function(exitCode){var tb=_pthread_self();if(tb){Atomics.store(GROWABLE_HEAP_U32(),tb+4>>2,exitCode);Atomics.store(GROWABLE_HEAP_U32(),tb+0>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+60>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+64>>2,0);PThread.runExitHandlers();_emscripten_futex_wake(tb+0,2147483647);__register_pthread_ptr(0,0,0);threadInfoStruct=0;if(ENVIRONMENT_IS_PTHREAD){postMessage({\"cmd\":\"exit\"})}}},threadCancel:function(){PThread.runExitHandlers();Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+4>>2,-1);Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+0>>2,1);_emscripten_futex_wake(threadInfoStruct+0,2147483647);threadInfoStruct=selfThreadId=0;__register_pthread_ptr(0,0,0);postMessage({\"cmd\":\"cancelDone\"})},terminateAllThreads:function(){for(var t in PThread.pthreads){var pthread=PThread.pthreads[t];if(pthread&&pthread.worker){PThread.returnWorkerToPool(pthread.worker)}}PThread.pthreads={};for(var i=0;i>2];GROWABLE_HEAP_I32()[pthread.threadInfoStruct+104>>2]=0;_free(tlsMemory);_free(pthread.threadInfoStruct)}pthread.threadInfoStruct=0;if(pthread.allocatedOwnStack&&pthread.stackBase)_free(pthread.stackBase);pthread.stackBase=0;if(pthread.worker)pthread.worker.pthread=null},returnWorkerToPool:function(worker){delete PThread.pthreads[worker.pthread.thread];PThread.unusedWorkers.push(worker);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(worker),1);PThread.freeThreadData(worker.pthread);worker.pthread=undefined},receiveObjectTransfer:function(data){},loadWasmModuleToWorker:function(worker,onFinishedLoading){worker.onmessage=function(e){var d=e[\"data\"];var cmd=d[\"cmd\"];if(worker.pthread)PThread.currentProxiedOperationCallerThread=worker.pthread.threadInfoStruct;if(d[\"targetThread\"]&&d[\"targetThread\"]!=_pthread_self()){var thread=PThread.pthreads[d.targetThread];if(thread){thread.worker.postMessage(e.data,d[\"transferList\"])}else{console.error('Internal error! Worker sent a message \"'+cmd+'\" to target pthread '+d[\"targetThread\"]+\", but that thread no longer exists!\")}PThread.currentProxiedOperationCallerThread=undefined;return}if(cmd===\"processQueuedMainThreadWork\"){_emscripten_main_thread_process_queued_calls()}else if(cmd===\"spawnThread\"){__spawn_thread(e.data)}else if(cmd===\"cleanupThread\"){__cleanup_thread(d[\"thread\"])}else if(cmd===\"killThread\"){__kill_thread(d[\"thread\"])}else if(cmd===\"cancelThread\"){__cancel_thread(d[\"thread\"])}else if(cmd===\"loaded\"){worker.loaded=true;if(onFinishedLoading)onFinishedLoading(worker);if(worker.runPthread){worker.runPthread();delete worker.runPthread}}else if(cmd===\"print\"){out(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"printErr\"){err(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"alert\"){alert(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"exit\"){var detached=worker.pthread&&Atomics.load(GROWABLE_HEAP_U32(),worker.pthread.thread+68>>2);if(detached){PThread.returnWorkerToPool(worker)}}else if(cmd===\"cancelDone\"){PThread.returnWorkerToPool(worker)}else if(cmd===\"objectTransfer\"){PThread.receiveObjectTransfer(e.data)}else if(e.data.target===\"setimmediate\"){worker.postMessage(e.data)}else{err(\"worker sent an unknown command \"+cmd)}PThread.currentProxiedOperationCallerThread=undefined};worker.onerror=function(e){err(\"pthread sent an error! \"+e.filename+\":\"+e.lineno+\": \"+e.message)};if(ENVIRONMENT_IS_NODE){worker.on(\"message\",function(data){worker.onmessage({data:data})});worker.on(\"error\",function(data){worker.onerror(data)});worker.on(\"exit\",function(data){console.log(\"worker exited - TODO: update the worker queue?\")})}worker.postMessage({\"cmd\":\"load\",\"urlOrBlob\":Module[\"mainScriptUrlOrBlob\"]||_scriptDir,\"wasmMemory\":wasmMemory,\"wasmModule\":wasmModule,\"DYNAMIC_BASE\":DYNAMIC_BASE,\"DYNAMICTOP_PTR\":DYNAMICTOP_PTR})},allocateUnusedWorker:function(){var pthreadMainJs=locateFile(\"tfjs-backend-wasm-threaded-simd.worker.js\");PThread.unusedWorkers.push(new Worker(pthreadMainJs))},getNewWorker:function(){if(PThread.unusedWorkers.length==0){PThread.allocateUnusedWorker();PThread.loadWasmModuleToWorker(PThread.unusedWorkers[0])}if(PThread.unusedWorkers.length>0)return PThread.unusedWorkers.pop();else return null},busySpinWait:function(msecs){var t=performance.now()+msecs;while(performance.now()>2]=value;return value}function _atexit(func,arg){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(1,1,func,arg);__ATEXIT__.unshift({func:func,arg:arg})}function __emscripten_notify_thread_queue(targetThreadId,mainThreadId){if(targetThreadId==mainThreadId){postMessage({\"cmd\":\"processQueuedMainThreadWork\"})}else if(ENVIRONMENT_IS_PTHREAD){postMessage({\"targetThread\":targetThreadId,\"cmd\":\"processThreadQueue\"})}else{var pthread=PThread.pthreads[targetThreadId];var worker=pthread&&pthread.worker;if(!worker){return}worker.postMessage({\"cmd\":\"processThreadQueue\"})}return 1}function _abort(){abort()}function _emscripten_conditional_set_current_thread_status(expectedStatus,newStatus){expectedStatus=expectedStatus|0;newStatus=newStatus|0}function _emscripten_futex_wait(addr,val,timeout){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0)return-28;if(ENVIRONMENT_IS_WORKER){var ret=Atomics.wait(GROWABLE_HEAP_I32(),addr>>2,val,timeout);if(ret===\"timed-out\")return-73;if(ret===\"not-equal\")return-6;if(ret===\"ok\")return 0;throw\"Atomics.wait returned an unexpected value \"+ret}else{var loadedVal=Atomics.load(GROWABLE_HEAP_I32(),addr>>2);if(val!=loadedVal)return-6;var tNow=performance.now();var tEnd=tNow+timeout;Atomics.store(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,addr);var ourWaitAddress=addr;while(addr==ourWaitAddress){tNow=performance.now();if(tNow>tEnd){return-73}_emscripten_main_thread_process_queued_calls();addr=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2)}return 0}}function _emscripten_is_main_browser_thread(){return __pthread_is_main_browser_thread|0}function _emscripten_is_main_runtime_thread(){return __pthread_is_main_runtime_thread|0}function _emscripten_memcpy_big(dest,src,num){GROWABLE_HEAP_U8().copyWithin(dest,src,src+num)}function _emscripten_num_logical_cores(){return navigator[\"hardwareConcurrency\"]}function _emscripten_proxy_to_main_thread_js(index,sync){var numCallArgs=arguments.length-2;var stack=stackSave();var args=stackAlloc(numCallArgs*8);var b=args>>3;for(var i=0;i>3]);buf+=8}else{buf=buf+3&~3;args.push(GROWABLE_HEAP_I32()[buf>>2]);buf+=4}}return args}function _emscripten_receive_on_main_thread_js(index,numCallArgs,args){_emscripten_receive_on_main_thread_js_callArgs.length=numCallArgs;var b=args>>3;for(var i=0;i>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){requestedSize=requestedSize>>>0;var oldSize=_emscripten_get_heap_size();if(requestedSize<=oldSize){return false}var PAGE_MULTIPLE=65536;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}var minHeapSize=16777216;for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(minHeapSize,requestedSize,overGrownHeapSize),PAGE_MULTIPLE));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}var JSEvents={keyEvent:0,mouseEvent:0,wheelEvent:0,uiEvent:0,focusEvent:0,deviceOrientationEvent:0,deviceMotionEvent:0,fullscreenChangeEvent:0,pointerlockChangeEvent:0,visibilityChangeEvent:0,touchEvent:0,previousFullscreenElement:null,previousScreenX:null,previousScreenY:null,removeEventListenersRegistered:false,removeAllEventListeners:function(){for(var i=JSEvents.eventHandlers.length-1;i>=0;--i){JSEvents._removeHandler(i)}JSEvents.eventHandlers=[];JSEvents.deferredCalls=[]},registerRemoveEventListeners:function(){if(!JSEvents.removeEventListenersRegistered){__ATEXIT__.push(JSEvents.removeAllEventListeners);JSEvents.removeEventListenersRegistered=true}},deferredCalls:[],deferCall:function(targetFunction,precedence,argsList){function arraysHaveEqualContent(arrA,arrB){if(arrA.length!=arrB.length)return false;for(var i in arrA){if(arrA[i]!=arrB[i])return false}return true}for(var i in JSEvents.deferredCalls){var call=JSEvents.deferredCalls[i];if(call.targetFunction==targetFunction&&arraysHaveEqualContent(call.argsList,argsList)){return}}JSEvents.deferredCalls.push({targetFunction:targetFunction,precedence:precedence,argsList:argsList});JSEvents.deferredCalls.sort(function(x,y){return x.precedence>2]=eventTypeId;GROWABLE_HEAP_I32()[varargs+4>>2]=eventData;GROWABLE_HEAP_I32()[varargs+8>>2]=userData;_emscripten_async_queue_on_thread_(targetThread,637534208,eventHandlerFunc,eventData,varargs);stackRestore(stackTop)},getTargetThreadForEventCallback:function(targetThread){switch(targetThread){case 1:return 0;case 2:return PThread.currentProxiedOperationCallerThread;default:return targetThread}},getNodeNameForTarget:function(target){if(!target)return\"\";if(target==window)return\"#window\";if(target==screen)return\"#screen\";return target&&target.nodeName?target.nodeName:\"\"},fullscreenEnabled:function(){return document.fullscreenEnabled||document.webkitFullscreenEnabled}};function stringToNewUTF8(jsString){var length=lengthBytesUTF8(jsString)+1;var cString=_malloc(length);stringToUTF8(jsString,cString,length);return cString}function _emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height){var stackTop=stackSave();var varargs=stackAlloc(12);var targetCanvasPtr=0;if(targetCanvas){targetCanvasPtr=stringToNewUTF8(targetCanvas)}GROWABLE_HEAP_I32()[varargs>>2]=targetCanvasPtr;GROWABLE_HEAP_I32()[varargs+4>>2]=width;GROWABLE_HEAP_I32()[varargs+8>>2]=height;_emscripten_async_queue_on_thread_(targetThread,657457152,0,targetCanvasPtr,varargs);stackRestore(stackTop)}function _emscripten_set_offscreencanvas_size_on_target_thread(targetThread,targetCanvas,width,height){targetCanvas=targetCanvas?UTF8ToString(targetCanvas):\"\";_emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height)}function __maybeCStringToJsString(cString){return cString>2?UTF8ToString(cString):cString}var specialHTMLTargets=[0,typeof document!==\"undefined\"?document:0,typeof window!==\"undefined\"?window:0];function __findEventTarget(target){target=__maybeCStringToJsString(target);var domElement=specialHTMLTargets[target]||(typeof document!==\"undefined\"?document.querySelector(target):undefined);return domElement}function __findCanvasEventTarget(target){return __findEventTarget(target)}function _emscripten_set_canvas_element_size_calling_thread(target,width,height){var canvas=__findCanvasEventTarget(target);if(!canvas)return-4;if(canvas.canvasSharedPtr){GROWABLE_HEAP_I32()[canvas.canvasSharedPtr>>2]=width;GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+4>>2]=height}if(canvas.offscreenCanvas||!canvas.controlTransferredOffscreen){if(canvas.offscreenCanvas)canvas=canvas.offscreenCanvas;var autoResizeViewport=false;if(canvas.GLctxObject&&canvas.GLctxObject.GLctx){var prevViewport=canvas.GLctxObject.GLctx.getParameter(2978);autoResizeViewport=prevViewport[0]===0&&prevViewport[1]===0&&prevViewport[2]===canvas.width&&prevViewport[3]===canvas.height}canvas.width=width;canvas.height=height;if(autoResizeViewport){canvas.GLctxObject.GLctx.viewport(0,0,width,height)}}else if(canvas.canvasSharedPtr){var targetThread=GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+8>>2];_emscripten_set_offscreencanvas_size_on_target_thread(targetThread,target,width,height);return 1}else{return-4}return 0}function _emscripten_set_canvas_element_size_main_thread(target,width,height){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(2,1,target,width,height);return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}function _emscripten_set_canvas_element_size(target,width,height){var canvas=__findCanvasEventTarget(target);if(canvas){return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}else{return _emscripten_set_canvas_element_size_main_thread(target,width,height)}}function _emscripten_set_current_thread_status(newStatus){newStatus=newStatus|0}function _emscripten_set_thread_name(threadId,name){threadId=threadId|0;name=name|0}function __webgl_enable_ANGLE_instanced_arrays(ctx){var ext=ctx.getExtension(\"ANGLE_instanced_arrays\");if(ext){ctx[\"vertexAttribDivisor\"]=function(index,divisor){ext[\"vertexAttribDivisorANGLE\"](index,divisor)};ctx[\"drawArraysInstanced\"]=function(mode,first,count,primcount){ext[\"drawArraysInstancedANGLE\"](mode,first,count,primcount)};ctx[\"drawElementsInstanced\"]=function(mode,count,type,indices,primcount){ext[\"drawElementsInstancedANGLE\"](mode,count,type,indices,primcount)};return 1}}function __webgl_enable_OES_vertex_array_object(ctx){var ext=ctx.getExtension(\"OES_vertex_array_object\");if(ext){ctx[\"createVertexArray\"]=function(){return ext[\"createVertexArrayOES\"]()};ctx[\"deleteVertexArray\"]=function(vao){ext[\"deleteVertexArrayOES\"](vao)};ctx[\"bindVertexArray\"]=function(vao){ext[\"bindVertexArrayOES\"](vao)};ctx[\"isVertexArray\"]=function(vao){return ext[\"isVertexArrayOES\"](vao)};return 1}}function __webgl_enable_WEBGL_draw_buffers(ctx){var ext=ctx.getExtension(\"WEBGL_draw_buffers\");if(ext){ctx[\"drawBuffers\"]=function(n,bufs){ext[\"drawBuffersWEBGL\"](n,bufs)};return 1}}var GL={counter:1,lastError:0,buffers:[],mappedBuffers:{},programs:[],framebuffers:[],renderbuffers:[],textures:[],uniforms:[],shaders:[],vaos:[],contexts:{},currentContext:null,offscreenCanvases:{},timerQueriesEXT:[],programInfos:{},stringCache:{},unpackAlignment:4,init:function(){var miniTempFloatBuffer=new Float32Array(GL.MINI_TEMP_BUFFER_SIZE);for(var i=0;i>2]:-1;source+=UTF8ToString(GROWABLE_HEAP_I32()[string+i*4>>2],len<0?undefined:len)}return source},createContext:function(canvas,webGLContextAttributes){var ctx=canvas.getContext(\"webgl\",webGLContextAttributes);if(!ctx)return 0;var handle=GL.registerContext(ctx,webGLContextAttributes);return handle},registerContext:function(ctx,webGLContextAttributes){var handle=_malloc(8);GROWABLE_HEAP_I32()[handle+4>>2]=_pthread_self();var context={handle:handle,attributes:webGLContextAttributes,version:webGLContextAttributes.majorVersion,GLctx:ctx};if(ctx.canvas)ctx.canvas.GLctxObject=context;GL.contexts[handle]=context;if(typeof webGLContextAttributes.enableExtensionsByDefault===\"undefined\"||webGLContextAttributes.enableExtensionsByDefault){GL.initExtensions(context)}return handle},makeContextCurrent:function(contextHandle){GL.currentContext=GL.contexts[contextHandle];Module.ctx=GLctx=GL.currentContext&&GL.currentContext.GLctx;return!(contextHandle&&!GLctx)},getContext:function(contextHandle){return GL.contexts[contextHandle]},deleteContext:function(contextHandle){if(GL.currentContext===GL.contexts[contextHandle])GL.currentContext=null;if(typeof JSEvents===\"object\")JSEvents.removeAllHandlersOnTarget(GL.contexts[contextHandle].GLctx.canvas);if(GL.contexts[contextHandle]&&GL.contexts[contextHandle].GLctx.canvas)GL.contexts[contextHandle].GLctx.canvas.GLctxObject=undefined;_free(GL.contexts[contextHandle].handle);GL.contexts[contextHandle]=null},initExtensions:function(context){if(!context)context=GL.currentContext;if(context.initExtensionsDone)return;context.initExtensionsDone=true;var GLctx=context.GLctx;__webgl_enable_ANGLE_instanced_arrays(GLctx);__webgl_enable_OES_vertex_array_object(GLctx);__webgl_enable_WEBGL_draw_buffers(GLctx);GLctx.disjointTimerQueryExt=GLctx.getExtension(\"EXT_disjoint_timer_query\");var automaticallyEnabledExtensions=[\"OES_texture_float\",\"OES_texture_half_float\",\"OES_standard_derivatives\",\"OES_vertex_array_object\",\"WEBGL_compressed_texture_s3tc\",\"WEBGL_depth_texture\",\"OES_element_index_uint\",\"EXT_texture_filter_anisotropic\",\"EXT_frag_depth\",\"WEBGL_draw_buffers\",\"ANGLE_instanced_arrays\",\"OES_texture_float_linear\",\"OES_texture_half_float_linear\",\"EXT_blend_minmax\",\"EXT_shader_texture_lod\",\"EXT_texture_norm16\",\"WEBGL_compressed_texture_pvrtc\",\"EXT_color_buffer_half_float\",\"WEBGL_color_buffer_float\",\"EXT_sRGB\",\"WEBGL_compressed_texture_etc1\",\"EXT_disjoint_timer_query\",\"WEBGL_compressed_texture_etc\",\"WEBGL_compressed_texture_astc\",\"EXT_color_buffer_float\",\"WEBGL_compressed_texture_s3tc_srgb\",\"EXT_disjoint_timer_query_webgl2\",\"WEBKIT_WEBGL_compressed_texture_pvrtc\"];var exts=GLctx.getSupportedExtensions()||[];exts.forEach(function(ext){if(automaticallyEnabledExtensions.indexOf(ext)!=-1){GLctx.getExtension(ext)}})},populateUniformTable:function(program){var p=GL.programs[program];var ptable=GL.programInfos[program]={uniforms:{},maxUniformLength:0,maxAttributeLength:-1,maxUniformBlockNameLength:-1};var utable=ptable.uniforms;var numUniforms=GLctx.getProgramParameter(p,35718);for(var i=0;i>2;contextAttributes[\"alpha\"]=!!GROWABLE_HEAP_I32()[a+(0>>2)];contextAttributes[\"depth\"]=!!GROWABLE_HEAP_I32()[a+(4>>2)];contextAttributes[\"stencil\"]=!!GROWABLE_HEAP_I32()[a+(8>>2)];contextAttributes[\"antialias\"]=!!GROWABLE_HEAP_I32()[a+(12>>2)];contextAttributes[\"premultipliedAlpha\"]=!!GROWABLE_HEAP_I32()[a+(16>>2)];contextAttributes[\"preserveDrawingBuffer\"]=!!GROWABLE_HEAP_I32()[a+(20>>2)];var powerPreference=GROWABLE_HEAP_I32()[a+(24>>2)];contextAttributes[\"powerPreference\"]=__emscripten_webgl_power_preferences[powerPreference];contextAttributes[\"failIfMajorPerformanceCaveat\"]=!!GROWABLE_HEAP_I32()[a+(28>>2)];contextAttributes.majorVersion=GROWABLE_HEAP_I32()[a+(32>>2)];contextAttributes.minorVersion=GROWABLE_HEAP_I32()[a+(36>>2)];contextAttributes.enableExtensionsByDefault=GROWABLE_HEAP_I32()[a+(40>>2)];contextAttributes.explicitSwapControl=GROWABLE_HEAP_I32()[a+(44>>2)];contextAttributes.proxyContextToMainThread=GROWABLE_HEAP_I32()[a+(48>>2)];contextAttributes.renderViaOffscreenBackBuffer=GROWABLE_HEAP_I32()[a+(52>>2)];var canvas=__findCanvasEventTarget(target);if(!canvas){return-4}if(contextAttributes.explicitSwapControl){return-1}var contextHandle=GL.createContext(canvas,contextAttributes);return contextHandle}function _emscripten_webgl_create_context(a0,a1){return _emscripten_webgl_do_create_context(a0,a1)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=GROWABLE_HEAP_I32()[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(3,1,fd);return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(4,1,fd,offset_low,offset_high,whence,newOffset)}function _fd_write(fd,iov,iovcnt,pnum){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(5,1,fd,iov,iovcnt,pnum);var num=0;for(var i=0;i>2];var len=GROWABLE_HEAP_I32()[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _pthread_cleanup_pop(execute){var routine=PThread.exitHandlers.pop();if(execute)routine()}function _pthread_cleanup_push(routine,arg){if(PThread.exitHandlers===null){PThread.exitHandlers=[]}PThread.exitHandlers.push(function(){dynCall_vi(routine,arg)})}function __spawn_thread(threadParams){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _spawn_thread() can only ever be called from main application thread!\";var worker=PThread.getNewWorker();if(worker.pthread!==undefined)throw\"Internal error!\";if(!threadParams.pthread_ptr)throw\"Internal error, no pthread ptr!\";PThread.runningWorkers.push(worker);var tlsMemory=_malloc(128*4);for(var i=0;i<128;++i){GROWABLE_HEAP_I32()[tlsMemory+i*4>>2]=0}var stackHigh=threadParams.stackBase+threadParams.stackSize;var pthread=PThread.pthreads[threadParams.pthread_ptr]={worker:worker,stackBase:threadParams.stackBase,stackSize:threadParams.stackSize,allocatedOwnStack:threadParams.allocatedOwnStack,thread:threadParams.pthread_ptr,threadInfoStruct:threadParams.pthread_ptr};var tis=pthread.threadInfoStruct>>2;Atomics.store(GROWABLE_HEAP_U32(),tis+(0>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(4>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(8>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(68>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(104>>2),tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),tis+(48>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(40>>2),pthread.threadInfoStruct);Atomics.store(GROWABLE_HEAP_U32(),tis+(44>>2),42);Atomics.store(GROWABLE_HEAP_U32(),tis+(108>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(84>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(80>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+8>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+12>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+20>>2),threadParams.schedPolicy);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+24>>2),threadParams.schedPrio);var global_libc=_emscripten_get_global_libc();var global_locale=global_libc+40;Atomics.store(GROWABLE_HEAP_U32(),tis+(176>>2),global_locale);worker.pthread=pthread;var msg={\"cmd\":\"run\",\"start_routine\":threadParams.startRoutine,\"arg\":threadParams.arg,\"threadInfoStruct\":threadParams.pthread_ptr,\"selfThreadId\":threadParams.pthread_ptr,\"parentThreadId\":threadParams.parent_pthread_ptr,\"stackBase\":threadParams.stackBase,\"stackSize\":threadParams.stackSize};worker.runPthread=function(){msg.time=performance.now();worker.postMessage(msg,threadParams.transferList)};if(worker.loaded){worker.runPthread();delete worker.runPthread}}function _pthread_getschedparam(thread,policy,schedparam){if(!policy&&!schedparam)return ERRNO_CODES.EINVAL;if(!thread){err(\"pthread_getschedparam called with a null thread pointer!\");return ERRNO_CODES.ESRCH}var self=GROWABLE_HEAP_I32()[thread+12>>2];if(self!==thread){err(\"pthread_getschedparam attempted on thread \"+thread+\", which does not point to a valid thread, or does not exist anymore!\");return ERRNO_CODES.ESRCH}var schedPolicy=Atomics.load(GROWABLE_HEAP_U32(),thread+108+20>>2);var schedPrio=Atomics.load(GROWABLE_HEAP_U32(),thread+108+24>>2);if(policy)GROWABLE_HEAP_I32()[policy>>2]=schedPolicy;if(schedparam)GROWABLE_HEAP_I32()[schedparam>>2]=schedPrio;return 0}function _pthread_self(){return __pthread_ptr|0}Module[\"_pthread_self\"]=_pthread_self;function _pthread_create(pthread_ptr,attr,start_routine,arg){if(typeof SharedArrayBuffer===\"undefined\"){err(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\");return 6}if(!pthread_ptr){err(\"pthread_create called with a null thread pointer!\");return 28}var transferList=[];var error=0;if(ENVIRONMENT_IS_PTHREAD&&(transferList.length===0||error)){return _emscripten_sync_run_in_main_thread_4(687865856,pthread_ptr,attr,start_routine,arg)}if(error)return error;var stackSize=0;var stackBase=0;var detached=0;var schedPolicy=0;var schedPrio=0;if(attr){stackSize=GROWABLE_HEAP_I32()[attr>>2];stackSize+=81920;stackBase=GROWABLE_HEAP_I32()[attr+8>>2];detached=GROWABLE_HEAP_I32()[attr+12>>2]!==0;var inheritSched=GROWABLE_HEAP_I32()[attr+16>>2]===0;if(inheritSched){var prevSchedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];var prevSchedPrio=GROWABLE_HEAP_I32()[attr+24>>2];var parentThreadPtr=PThread.currentProxiedOperationCallerThread?PThread.currentProxiedOperationCallerThread:_pthread_self();_pthread_getschedparam(parentThreadPtr,attr+20,attr+24);schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2];GROWABLE_HEAP_I32()[attr+20>>2]=prevSchedPolicy;GROWABLE_HEAP_I32()[attr+24>>2]=prevSchedPrio}else{schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2]}}else{stackSize=2097152}var allocatedOwnStack=stackBase==0;if(allocatedOwnStack){stackBase=_memalign(16,stackSize)}else{stackBase-=stackSize;assert(stackBase>0)}var threadInfoStruct=_malloc(232);for(var i=0;i<232>>2;++i)GROWABLE_HEAP_U32()[(threadInfoStruct>>2)+i]=0;GROWABLE_HEAP_I32()[pthread_ptr>>2]=threadInfoStruct;GROWABLE_HEAP_I32()[threadInfoStruct+12>>2]=threadInfoStruct;var headPtr=threadInfoStruct+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var threadParams={stackBase:stackBase,stackSize:stackSize,allocatedOwnStack:allocatedOwnStack,schedPolicy:schedPolicy,schedPrio:schedPrio,detached:detached,startRoutine:start_routine,pthread_ptr:threadInfoStruct,parent_pthread_ptr:_pthread_self(),arg:arg,transferList:transferList};if(ENVIRONMENT_IS_PTHREAD){threadParams.cmd=\"spawnThread\";postMessage(threadParams,transferList)}else{__spawn_thread(threadParams)}return 0}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}function _sysconf(name){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(6,1,name);switch(name){case 30:return 16384;case 85:var maxHeapSize=2147483648;return maxHeapSize/16384;case 132:case 133:case 12:case 137:case 138:case 15:case 235:case 16:case 17:case 18:case 19:case 20:case 149:case 13:case 10:case 236:case 153:case 9:case 21:case 22:case 159:case 154:case 14:case 77:case 78:case 139:case 80:case 81:case 82:case 68:case 67:case 164:case 11:case 29:case 47:case 48:case 95:case 52:case 51:case 46:case 79:return 200809;case 27:case 246:case 127:case 128:case 23:case 24:case 160:case 161:case 181:case 182:case 242:case 183:case 184:case 243:case 244:case 245:case 165:case 178:case 179:case 49:case 50:case 168:case 169:case 175:case 170:case 171:case 172:case 97:case 76:case 32:case 173:case 35:return-1;case 176:case 177:case 7:case 155:case 8:case 157:case 125:case 126:case 92:case 93:case 129:case 130:case 131:case 94:case 91:return 1;case 74:case 60:case 69:case 70:case 4:return 1024;case 31:case 42:case 72:return 32;case 87:case 26:case 33:return 2147483647;case 34:case 1:return 47839;case 38:case 36:return 99;case 43:case 37:return 2048;case 0:return 2097152;case 3:return 65536;case 28:return 32768;case 44:return 32767;case 75:return 16384;case 39:return 1e3;case 89:return 700;case 71:return 256;case 40:return 255;case 2:return 100;case 180:return 64;case 25:return 20;case 5:return 16;case 6:return 6;case 73:return 4;case 84:{if(typeof navigator===\"object\")return navigator[\"hardwareConcurrency\"]||1;return 1}}setErrNo(28);return-1}if(!ENVIRONMENT_IS_PTHREAD)PThread.initMainThreadBlock();else PThread.initWorker();var GLctx;GL.init();var proxiedFunctionTable=[null,_atexit,_emscripten_set_canvas_element_size_main_thread,_fd_close,_fd_seek,_fd_write,_sysconf];var asmLibraryArg={\"e\":___assert_fail,\"r\":___call_main,\"w\":__emscripten_notify_thread_queue,\"a\":_abort,\"l\":_emscripten_conditional_set_current_thread_status,\"d\":_emscripten_futex_wait,\"c\":_emscripten_futex_wake,\"h\":_emscripten_get_now,\"g\":_emscripten_is_main_browser_thread,\"x\":_emscripten_is_main_runtime_thread,\"q\":_emscripten_memcpy_big,\"B\":_emscripten_num_logical_cores,\"t\":_emscripten_receive_on_main_thread_js,\"A\":_emscripten_resize_heap,\"u\":_emscripten_set_canvas_element_size,\"k\":_emscripten_set_current_thread_status,\"s\":_emscripten_set_thread_name,\"v\":_emscripten_webgl_create_context,\"m\":_fd_close,\"o\":_fd_seek,\"i\":_fd_write,\"p\":initPthreadsJS,\"memory\":wasmMemory||Module[\"wasmMemory\"],\"y\":_pthread_cleanup_pop,\"z\":_pthread_cleanup_push,\"j\":_pthread_create,\"b\":_pthread_self,\"f\":_roundf,\"n\":_sysconf,\"table\":wasmTable};var asm=createWasm();Module[\"asm\"]=asm;var ___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=function(){return(___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=Module[\"asm\"][\"C\"]).apply(null,arguments)};var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"D\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"E\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"F\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"G\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"H\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"I\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"J\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"K\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"L\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"M\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"N\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"O\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"P\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Q\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"R\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"S\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"T\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"U\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"V\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"W\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"X\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"Y\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"Z\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"_\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"$\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"aa\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"ba\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"ca\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"da\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"ea\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"fa\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"ga\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"ha\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"ia\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"ja\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"ka\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"la\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"ma\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"na\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"oa\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"pa\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"qa\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"ra\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"sa\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"ta\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"ua\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"va\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"wa\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"xa\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"ya\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"za\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"Aa\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Ba\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"Ca\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Da\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"Ea\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"Fa\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Ga\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Ha\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Ia\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Ja\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Ka\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"La\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"Ma\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Na\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Oa\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Pa\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Qa\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Ra\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"Sa\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"Ta\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"Ua\"]).apply(null,arguments)};var _emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=function(){return(_emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=Module[\"asm\"][\"Va\"]).apply(null,arguments)};var ___errno_location=Module[\"___errno_location\"]=function(){return(___errno_location=Module[\"___errno_location\"]=Module[\"asm\"][\"Wa\"]).apply(null,arguments)};var ___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=function(){return(___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=Module[\"asm\"][\"Xa\"]).apply(null,arguments)};var _memalign=Module[\"_memalign\"]=function(){return(_memalign=Module[\"_memalign\"]=Module[\"asm\"][\"Ya\"]).apply(null,arguments)};var ___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=function(){return(___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=Module[\"asm\"][\"Za\"]).apply(null,arguments)};var _emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=function(){return(_emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=Module[\"asm\"][\"_a\"]).apply(null,arguments)};var _emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=function(){return(_emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=Module[\"asm\"][\"$a\"]).apply(null,arguments)};var _emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=function(){return(_emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=Module[\"asm\"][\"ab\"]).apply(null,arguments)};var _emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=function(){return(_emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=Module[\"asm\"][\"bb\"]).apply(null,arguments)};var _emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=function(){return(_emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=Module[\"asm\"][\"cb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=function(){return(_emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=Module[\"asm\"][\"db\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=function(){return(_emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=Module[\"asm\"][\"eb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=function(){return(_emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=Module[\"asm\"][\"fb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=function(){return(_emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=Module[\"asm\"][\"gb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=function(){return(_emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=Module[\"asm\"][\"hb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=function(){return(_emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=Module[\"asm\"][\"ib\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=function(){return(_emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=Module[\"asm\"][\"jb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=function(){return(_emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=Module[\"asm\"][\"kb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=function(){return(_emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=Module[\"asm\"][\"lb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=function(){return(_emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=Module[\"asm\"][\"mb\"]).apply(null,arguments)};var _emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=function(){return(_emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=Module[\"asm\"][\"nb\"]).apply(null,arguments)};var _emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=function(){return(_emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=Module[\"asm\"][\"ob\"]).apply(null,arguments)};var _emscripten_tls_init=Module[\"_emscripten_tls_init\"]=function(){return(_emscripten_tls_init=Module[\"_emscripten_tls_init\"]=Module[\"asm\"][\"pb\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"qb\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"rb\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"sb\"]).apply(null,arguments)};var dynCall_vi=Module[\"dynCall_vi\"]=function(){return(dynCall_vi=Module[\"dynCall_vi\"]=Module[\"asm\"][\"tb\"]).apply(null,arguments)};var dynCall_v=Module[\"dynCall_v\"]=function(){return(dynCall_v=Module[\"dynCall_v\"]=Module[\"asm\"][\"ub\"]).apply(null,arguments)};var dynCall_ii=Module[\"dynCall_ii\"]=function(){return(dynCall_ii=Module[\"dynCall_ii\"]=Module[\"asm\"][\"vb\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;Module[\"PThread\"]=PThread;Module[\"PThread\"]=PThread;Module[\"_pthread_self\"]=_pthread_self;Module[\"wasmMemory\"]=wasmMemory;Module[\"ExitStatus\"]=ExitStatus;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}if(!ENVIRONMENT_IS_PTHREAD)noExitRuntime=true;if(!ENVIRONMENT_IS_PTHREAD)run();\n\n\n return WasmBackendModuleThreadedSimd\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModuleThreadedSimd;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModuleThreadedSimd; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModuleThreadedSimd\"] = WasmBackendModuleThreadedSimd;\n ", "\nvar WasmBackendModule = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModule) {\n WasmBackendModule = WasmBackendModule || {};\n\nvar Module=typeof WasmBackendModule!==\"undefined\"?WasmBackendModule:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":147,\"maximum\":147+0,\"element\":\"anyfunc\"});var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str=\"\";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,HEAPU8,outPtr,maxBytesToWrite)}function writeArrayToMemory(array,buffer){HEAP8.set(array,buffer)}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"env\":asmLibraryArg,\"wasi_snapshot_preview1\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmMemory=exports[\"memory\"];updateGlobalBufferAndViews(wasmMemory.buffer);removeRunDependency(\"wasm-instantiate\")}addRunDependency(\"wasm-instantiate\");function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}__ATINIT__.push();function _emscripten_notify_memory_growth(memoryIndex){updateGlobalBufferAndViews(wasmMemory.buffer)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=HEAP32[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){}function _fd_write(fd,iov,iovcnt,pnum){var num=0;for(var i=0;i>2];var len=HEAP32[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _exit(status){exit(status)}function _proc_exit(code){_exit(code)}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}var asmLibraryArg={\"emscripten_notify_memory_growth\":_emscripten_notify_memory_growth,\"fd_close\":_fd_close,\"fd_seek\":_fd_seek,\"fd_write\":_fd_write,\"proc_exit\":_proc_exit,\"roundf\":_roundf};var asm=createWasm();Module[\"asm\"]=asm;var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"init\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"register_tensor\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"dispose_data\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"dispose\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"Abs\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"Add\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"AddN\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"ArgMax\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"AvgPool\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"BatchMatMul\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"ClipByValue\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"Conv2D\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"Conv2DBackpropInput\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Cos\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"CropAndResize\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"Cumsum\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"DepthToSpace\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"DepthwiseConv2dNative\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"Div\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"Equal\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"Exp\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"FlipLeftRight\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"FloorDiv\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"FusedBatchNorm\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"FusedConv2D\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"FusedDepthwiseConv2D\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"Gather\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"GatherNd\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"Greater\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"GreaterEqual\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"Less\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"LessEqual\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"Log\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"LogicalAnd\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"Max\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"MaxPool\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"Maximum\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"Min\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"Minimum\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"Multiply\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"Negate\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"NonMaxSuppressionV3\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"NonMaxSuppressionV4\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"NonMaxSuppressionV5\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"NotEqual\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"OneHot\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"PadV2\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"Pow\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"Prelu\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"Relu\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"Relu6\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"ResizeBilinear\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Reverse\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"RotateWithOffset\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Rsqrt\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"ScatterNd\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"SelectV2\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Sigmoid\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Sin\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Softmax\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Sqrt\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Square\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"SquaredDifference\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"StridedSlice\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Sub\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Sum\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Tanh\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Tile\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Transpose\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"_FusedMatMul\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"malloc\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"free\"]).apply(null,arguments)};var __start=Module[\"__start\"]=function(){return(__start=Module[\"__start\"]=Module[\"asm\"][\"_start\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"stackSave\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"stackAlloc\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"stackRestore\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}var calledMain=false;dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function callMain(args){var entryFunction=Module[\"__start\"];try{entryFunction();var ret=0;exit(ret,true)}catch(e){if(e instanceof ExitStatus){return}else if(e==\"unwind\"){noExitRuntime=true;return}else{var toLog=e;if(e&&typeof e===\"object\"&&e.stack){toLog=[e,e.stack]}err(\"exception thrown: \"+toLog);quit_(1,e)}}finally{calledMain=true}}function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();if(shouldRunNow)callMain(args);postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;function exit(status,implicit){if(implicit&&noExitRuntime&&status===0){return}if(noExitRuntime){}else{ABORT=true;EXITSTATUS=status;exitRuntime();if(Module[\"onExit\"])Module[\"onExit\"](status)}quit_(status,new ExitStatus(status))}if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}var shouldRunNow=true;if(Module[\"noInitialRun\"])shouldRunNow=false;noExitRuntime=true;run();\n\n\n return WasmBackendModule\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModule;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModule; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModule\"] = WasmBackendModule;\n ", "import { tf, loadGraphModel } from '../tf.js';\n\nconst NUM_LANDMARKS = 6;\n\nfunction generateAnchors(inputSize) {\n const spec = { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] };\n const anchors = [];\n for (let i = 0; i < spec.strides.length; i++) {\n const stride = spec.strides[i];\n const gridRows = Math.floor((inputSize + stride - 1) / stride);\n const gridCols = Math.floor((inputSize + stride - 1) / stride);\n const anchorsNum = spec.anchors[i];\n for (let gridY = 0; gridY < gridRows; gridY++) {\n const anchorY = stride * (gridY + 0.5);\n for (let gridX = 0; gridX < gridCols; gridX++) {\n const anchorX = stride * (gridX + 0.5);\n for (let n = 0; n < anchorsNum; n++) {\n anchors.push([anchorX, anchorY]);\n }\n }\n }\n }\n return anchors;\n}\n\nconst disposeBox = (box) => {\n box.startEndTensor.dispose();\n box.startPoint.dispose();\n box.endPoint.dispose();\n};\n\nconst createBox = (startEndTensor) => ({\n startEndTensor,\n startPoint: tf.slice(startEndTensor, [0, 0], [-1, 2]),\n endPoint: tf.slice(startEndTensor, [0, 2], [-1, 2]),\n});\n\nconst scaleBox = (box, factors) => {\n const starts = tf.mul(box.startPoint, factors);\n const ends = tf.mul(box.endPoint, factors);\n const newCoordinates = tf.concat2d([starts, ends], 1);\n return createBox(newCoordinates);\n};\n\nfunction decodeBounds(boxOutputs, anchors, inputSize) {\n const boxStarts = tf.slice(boxOutputs, [0, 1], [-1, 2]);\n const centers = tf.add(boxStarts, anchors);\n const boxSizes = tf.slice(boxOutputs, [0, 3], [-1, 2]);\n const boxSizesNormalized = tf.div(boxSizes, inputSize);\n const centersNormalized = tf.div(centers, inputSize);\n const halfBoxSize = tf.div(boxSizesNormalized, 2);\n const starts = tf.sub(centersNormalized, halfBoxSize);\n const ends = tf.add(centersNormalized, halfBoxSize);\n const startNormalized = tf.mul(starts, inputSize);\n const endNormalized = tf.mul(ends, inputSize);\n const concatAxis = 1;\n return tf.concat2d([startNormalized, endNormalized], concatAxis);\n}\n\nfunction scaleBoxFromPrediction(face, scaleFactor) {\n return tf.tidy(() => {\n const box = face['box'] ? face['box'] : face;\n return scaleBox(box, scaleFactor).startEndTensor.squeeze();\n });\n}\n\nclass BlazeFaceModel {\n constructor(model, config) {\n this.blazeFaceModel = model;\n this.width = config.detector.inputSize;\n this.height = config.detector.inputSize;\n this.anchorsData = generateAnchors(config.detector.inputSize);\n this.anchors = tf.tensor2d(this.anchorsData);\n this.inputSize = tf.tensor1d([this.width, this.height]);\n this.config = config;\n this.scaleFaces = 0.8;\n }\n\n async getBoundingBoxes(inputImage) {\n // sanity check on input\n if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;\n const [detectedOutputs, boxes, scores] = tf.tidy(() => {\n const resizedImage = inputImage.resizeBilinear([this.width, this.height]);\n // const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);\n const normalizedImage = tf.sub(resizedImage.div(127.5), 1);\n const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);\n let prediction;\n // are we using tfhub or pinto converted model?\n if (Array.isArray(batchedPrediction)) {\n const sorted = batchedPrediction.sort((a, b) => a.size - b.size);\n const concat384 = tf.concat([sorted[0], sorted[2]], 2); // dim: 384, 1 + 16\n const concat512 = tf.concat([sorted[1], sorted[3]], 2); // dim: 512, 1 + 16\n const concat = tf.concat([concat512, concat384], 1);\n prediction = concat.squeeze(0);\n } else {\n prediction = batchedPrediction.squeeze(); // when using tfhub model\n }\n const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);\n const logits = tf.slice(prediction, [0, 0], [-1, 1]);\n const scoresOut = tf.sigmoid(logits).squeeze();\n return [prediction, decodedBounds, scoresOut];\n });\n const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);\n const boxIndices = boxIndicesTensor.arraySync();\n boxIndicesTensor.dispose();\n const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));\n const boundingBoxes = boundingBoxesMap.map((boundingBox) => {\n const vals = boundingBox.arraySync();\n boundingBox.dispose();\n return vals;\n });\n\n const scoresVal = scores.dataSync();\n const annotatedBoxes = [];\n for (const i in boundingBoxes) {\n const boxIndex = boxIndices[i];\n const confidence = scoresVal[boxIndex];\n if (confidence > this.config.detector.minConfidence) {\n const box = createBox(boundingBoxes[i]);\n const anchor = this.anchorsData[boxIndex];\n const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));\n annotatedBoxes.push({ box, landmarks, anchor, confidence });\n }\n }\n detectedOutputs.dispose();\n boxes.dispose();\n scores.dispose();\n detectedOutputs.dispose();\n return {\n boxes: annotatedBoxes,\n scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height],\n };\n }\n\n async estimateFaces(input) {\n const { boxes, scaleFactor } = await this.getBoundingBoxes(input);\n const faces = [];\n for (const face of boxes) {\n const landmarkData = face.landmarks.arraySync();\n const scaledBox = scaleBoxFromPrediction(face, scaleFactor);\n const boxData = scaleBox.arraySync();\n const probabilityData = face.probability.arraySync();\n const anchor = face.anchor;\n const [scaleFactorX, scaleFactorY] = scaleFactor;\n const scaledLandmarks = landmarkData\n .map((landmark) => ([\n (landmark[0] + anchor[0]) * scaleFactorX,\n (landmark[1] + anchor[1]) * scaleFactorY,\n ]));\n const normalizedFace = {\n topLeft: boxData.slice(0, 2),\n bottomRight: boxData.slice(2),\n landmarks: scaledLandmarks,\n probability: probabilityData,\n };\n disposeBox(face.box);\n face.landmarks.dispose();\n face.probability.dispose();\n scaledBox.dispose();\n faces.push(normalizedFace);\n }\n return faces;\n }\n}\n\nasync function load(config) {\n const blazeface = await loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });\n const model = new BlazeFaceModel(blazeface, config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n return model;\n}\n\nexports.load = load;\nexports.BlazeFaceModel = BlazeFaceModel;\nexports.disposeBox = disposeBox;\n", "exports.MESH_ANNOTATIONS = {\n silhouette: [\n 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288,\n 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136,\n 172, 58, 132, 93, 234, 127, 162, 21, 54, 103, 67, 109,\n ],\n lipsUpperOuter: [61, 185, 40, 39, 37, 0, 267, 269, 270, 409, 291],\n lipsLowerOuter: [146, 91, 181, 84, 17, 314, 405, 321, 375, 291],\n lipsUpperInner: [78, 191, 80, 81, 82, 13, 312, 311, 310, 415, 308],\n lipsLowerInner: [78, 95, 88, 178, 87, 14, 317, 402, 318, 324, 308],\n rightEyeUpper0: [246, 161, 160, 159, 158, 157, 173],\n rightEyeLower0: [33, 7, 163, 144, 145, 153, 154, 155, 133],\n rightEyeUpper1: [247, 30, 29, 27, 28, 56, 190],\n rightEyeLower1: [130, 25, 110, 24, 23, 22, 26, 112, 243],\n rightEyeUpper2: [113, 225, 224, 223, 222, 221, 189],\n rightEyeLower2: [226, 31, 228, 229, 230, 231, 232, 233, 244],\n rightEyeLower3: [143, 111, 117, 118, 119, 120, 121, 128, 245],\n rightEyebrowUpper: [156, 70, 63, 105, 66, 107, 55, 193],\n rightEyebrowLower: [35, 124, 46, 53, 52, 65],\n rightEyeIris: [473, 474, 475, 476, 477],\n leftEyeUpper0: [466, 388, 387, 386, 385, 384, 398],\n leftEyeLower0: [263, 249, 390, 373, 374, 380, 381, 382, 362],\n leftEyeUpper1: [467, 260, 259, 257, 258, 286, 414],\n leftEyeLower1: [359, 255, 339, 254, 253, 252, 256, 341, 463],\n leftEyeUpper2: [342, 445, 444, 443, 442, 441, 413],\n leftEyeLower2: [446, 261, 448, 449, 450, 451, 452, 453, 464],\n leftEyeLower3: [372, 340, 346, 347, 348, 349, 350, 357, 465],\n leftEyebrowUpper: [383, 300, 293, 334, 296, 336, 285, 417],\n leftEyebrowLower: [265, 353, 276, 283, 282, 295],\n leftEyeIris: [468, 469, 470, 471, 472],\n midwayBetweenEyes: [168],\n noseTip: [1],\n noseBottom: [2],\n noseRightCorner: [98],\n noseLeftCorner: [327],\n rightCheek: [205],\n leftCheek: [425],\n};\nexports.MESH_TO_IRIS_INDICES_MAP = [ // A mapping from facemesh model keypoints to iris model keypoints.\n { key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] },\n { key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] },\n { key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] },\n { key: 'EyeLower0', indices: [0, 1, 2, 3, 4, 5, 6, 7, 8] },\n { key: 'EyeLower1', indices: [16, 17, 18, 19, 20, 21, 22, 23, 24] },\n { key: 'EyeLower2', indices: [32, 33, 34, 35, 36, 37, 38, 39, 40] },\n { key: 'EyeLower3', indices: [54, 55, 56, 57, 58, 59, 60, 61, 62] },\n { key: 'EyebrowUpper', indices: [63, 64, 65, 66, 67, 68, 69, 70] },\n { key: 'EyebrowLower', indices: [48, 49, 50, 51, 52, 53] },\n];\n", "import { tf } from '../tf.js';\n\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n return { startPoint, endPoint };\n}\nexports.scaleBoxCoordinates = scaleBoxCoordinates;\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nexports.getBoxSize = getBoxSize;\n\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nexports.getBoxCenter = getBoxCenter;\n\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nexports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;\n\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.enlargeBox = enlargeBox;\n\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.squarifyBox = squarifyBox;\n", "exports.IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];\n/**\n * Normalizes the provided angle to the range -pi to pi.\n * @param angle The angle in radians to be normalized.\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nexports.normalizeRadians = normalizeRadians;\n/**\n * Computes the angle of rotation between two anchor points.\n * @param point1 First anchor point\n * @param point2 Second anchor point\n */\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nexports.computeRotation = computeRotation;\nfunction radToDegrees(rad) {\n return rad * 180 / Math.PI;\n}\nexports.radToDegrees = radToDegrees;\nfunction buildTranslationMatrix(x, y) {\n return [[1, 0, x], [0, 1, y], [0, 0, 1]];\n}\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nexports.dot = dot;\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nexports.getColumnFrom2DArr = getColumnFrom2DArr;\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nexports.buildRotationMatrix = buildRotationMatrix;\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nexports.invertTransformMatrix = invertTransformMatrix;\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexports.rotatePoint = rotatePoint;\nfunction xyDistanceBetweenPoints(a, b) {\n return Math.sqrt(((a[0] - b[0]) ** 2) + ((a[1] - b[1]) ** 2));\n}\nexports.xyDistanceBetweenPoints = xyDistanceBetweenPoints;\n", "/* eslint-disable class-methods-use-this */\nimport { tf } from '../tf.js';\nimport * as bounding from './box';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nconst LANDMARKS_COUNT = 468;\nconst MESH_MOUTH_INDEX = 13;\nconst MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [MESH_MOUTH_INDEX, keypoints.MESH_ANNOTATIONS['midwayBetweenEyes'][0]];\nconst BLAZEFACE_MOUTH_INDEX = 3;\nconst BLAZEFACE_NOSE_INDEX = 2;\nconst BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [BLAZEFACE_MOUTH_INDEX, BLAZEFACE_NOSE_INDEX];\nconst LEFT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['leftEyeLower0'];\nconst LEFT_EYE_BOUNDS = [LEFT_EYE_OUTLINE[0], LEFT_EYE_OUTLINE[LEFT_EYE_OUTLINE.length - 1]];\nconst RIGHT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['rightEyeLower0'];\nconst RIGHT_EYE_BOUNDS = [RIGHT_EYE_OUTLINE[0], RIGHT_EYE_OUTLINE[RIGHT_EYE_OUTLINE.length - 1]];\nconst IRIS_UPPER_CENTER_INDEX = 3;\nconst IRIS_LOWER_CENTER_INDEX = 4;\nconst IRIS_IRIS_INDEX = 71;\nconst IRIS_NUM_COORDINATES = 76;\n\n// Replace the raw coordinates returned by facemesh with refined iris model coordinates. Update the z coordinate to be an average of the original and the new. This produces the best visual effect.\nfunction replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {\n for (let i = 0; i < keypoints.MESH_TO_IRIS_INDICES_MAP.length; i++) {\n const { key, indices } = keypoints.MESH_TO_IRIS_INDICES_MAP[i];\n const originalIndices = keypoints.MESH_ANNOTATIONS[`${prefix}${key}`];\n const shouldReplaceAllKeys = keys == null;\n if (shouldReplaceAllKeys || keys.includes(key)) {\n for (let j = 0; j < indices.length; j++) {\n const index = indices[j];\n rawCoords[originalIndices[j]] = [\n newCoords[index][0], newCoords[index][1],\n (newCoords[index][2] + rawCoords[originalIndices[j]][2]) / 2,\n ];\n }\n }\n }\n}\n// The Pipeline coordinates between the bounding box and skeleton models.\nclass Pipeline {\n constructor(boundingBoxDetector, meshDetector, irisModel, config) {\n // An array of facial bounding boxes.\n this.storedBoxes = [];\n this.runsWithoutFaceDetector = 0;\n this.boundingBoxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.irisModel = irisModel;\n this.meshWidth = config.mesh.inputSize;\n this.meshHeight = config.mesh.inputSize;\n this.irisSize = config.iris.inputSize;\n this.irisEnlarge = 2.3;\n this.skipped = 1000;\n this.detectedFaces = 0;\n }\n\n transformRawCoords(rawCoords, box, angle, rotationMatrix) {\n const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });\n const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight];\n const coordsScaled = rawCoords.map((coord) => ([\n scaleFactor[0] * (coord[0] - this.meshWidth / 2),\n scaleFactor[1] * (coord[1] - this.meshHeight / 2), coord[2],\n ]));\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]]));\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => ([\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1], coord[2],\n ]));\n }\n\n getLeftToRightEyeDepthDifference(rawCoords) {\n const leftEyeZ = rawCoords[LEFT_EYE_BOUNDS[0]][2];\n const rightEyeZ = rawCoords[RIGHT_EYE_BOUNDS[0]][2];\n return leftEyeZ - rightEyeZ;\n }\n\n // Returns a box describing a cropped region around the eye fit for passing to the iris model.\n getEyeBox(rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {\n const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));\n const boxSize = bounding.getBoxSize(box);\n let crop = tf.image.cropAndResize(face, [[\n box.startPoint[1] / this.meshHeight,\n box.startPoint[0] / this.meshWidth, box.endPoint[1] / this.meshHeight,\n box.endPoint[0] / this.meshWidth,\n ]], [0], [this.irisSize, this.irisSize]);\n if (flip) {\n crop = tf.image.flipLeftRight(crop);\n }\n return { box, boxSize, crop };\n }\n\n // Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.\n getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) {\n const eyeRawCoords = [];\n for (let i = 0; i < IRIS_NUM_COORDINATES; i++) {\n const x = eyeData[i * 3];\n const y = eyeData[i * 3 + 1];\n const z = eyeData[i * 3 + 2];\n eyeRawCoords.push([\n (flip\n ? (1 - (x / this.irisSize))\n : (x / this.irisSize)) * eyeBoxSize[0] + eyeBox.startPoint[0],\n (y / this.irisSize) * eyeBoxSize[1] + eyeBox.startPoint[1], z,\n ]);\n }\n return { rawCoords: eyeRawCoords, iris: eyeRawCoords.slice(IRIS_IRIS_INDEX) };\n }\n\n // The z-coordinates returned for the iris are unreliable, so we take the z values from the surrounding keypoints.\n getAdjustedIrisCoords(rawCoords, irisCoords, direction) {\n const upperCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeUpper0`][IRIS_UPPER_CENTER_INDEX]][2];\n const lowerCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeLower0`][IRIS_LOWER_CENTER_INDEX]][2];\n const averageZ = (upperCenterZ + lowerCenterZ) / 2;\n // Iris indices: 0: center | 1: right | 2: above | 3: left | 4: below\n return irisCoords.map((coord, i) => {\n let z = averageZ;\n if (i === 2) {\n z = upperCenterZ;\n } else if (i === 4) {\n z = lowerCenterZ;\n }\n return [coord[0], coord[1], z];\n });\n }\n\n async predict(input, config) {\n this.skipped++;\n let useFreshBox = false;\n // run new detector every skipFrames unless we only want box to start with\n let detector;\n if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled) {\n detector = await this.boundingBoxDetector.getBoundingBoxes(input);\n // don't reset on test image\n if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.detector.maxFaces))) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n for (const possible of detector.boxes) {\n this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks, confidence: possible.confidence });\n }\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n\n if (useFreshBox) {\n if (!detector || !detector.boxes || (detector.boxes.length === 0)) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n return null;\n }\n for (const i in this.storedBoxes) {\n const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);\n const enlargedBox = bounding.enlargeBox(scaledBox);\n const landmarks = this.storedBoxes[i].landmarks.arraySync();\n const confidence = this.storedBoxes[i].confidence;\n this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };\n }\n this.runsWithoutFaceDetector = 0;\n }\n if (detector && detector.boxes) {\n detector.boxes.forEach((prediction) => {\n prediction.box.startPoint.dispose();\n prediction.box.endPoint.dispose();\n prediction.landmarks.dispose();\n });\n }\n\n // console.log(this.skipped, config.detector.skipFrames, this.detectedFaces, config.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);\n\n let results = tf.tidy(() => this.storedBoxes.map((box, i) => {\n let angle = 0;\n // The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).\n const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;\n let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n if (boxLandmarksFromMeshModel === false) {\n [indexOfMouth, indexOfForehead] = BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n }\n angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);\n const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });\n const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];\n let rotatedImage = input;\n let rotationMatrix = util.IDENTITY_MATRIX;\n if (angle !== 0) {\n rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);\n rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);\n }\n const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };\n const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);\n\n // if we're not going to produce mesh, don't spend time with further processing\n if (!config.mesh.enabled) {\n const prediction = {\n coords: null,\n box,\n faceConfidence: null,\n confidence: box.confidence,\n image: face,\n };\n return prediction;\n }\n\n // The first returned tensor represents facial contours, which are included in the coordinates.\n const [, confidence, coords] = this.meshDetector.predict(face);\n const confidenceVal = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceVal < config.detector.minConfidence) {\n coords.dispose();\n return null;\n }\n const coordsReshaped = tf.reshape(coords, [-1, 3]);\n let rawCoords = coordsReshaped.arraySync();\n if (config.iris.enabled) {\n const { box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop } = this.getEyeBox(rawCoords, face, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true);\n const { box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop } = this.getEyeBox(rawCoords, face, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]);\n const eyePredictions = (this.irisModel.predict(tf.concat([leftEyeCrop, rightEyeCrop])));\n const eyePredictionsData = eyePredictions.dataSync();\n eyePredictions.dispose();\n const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3);\n const { rawCoords: leftEyeRawCoords, iris: leftIrisRawCoords } = this.getEyeCoords(leftEyeData, leftEyeBox, leftEyeBoxSize, true);\n const rightEyeData = eyePredictionsData.slice(IRIS_NUM_COORDINATES * 3);\n const { rawCoords: rightEyeRawCoords, iris: rightIrisRawCoords } = this.getEyeCoords(rightEyeData, rightEyeBox, rightEyeBoxSize);\n const leftToRightEyeDepthDifference = this.getLeftToRightEyeDepthDifference(rawCoords);\n if (Math.abs(leftToRightEyeDepthDifference) < 30) { // User is looking straight ahead.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left');\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right');\n // If the user is looking to the left or to the right, the iris coordinates tend to diverge too much from the mesh coordinates for them to be merged. So we only update a single contour line above and below the eye.\n } else if (leftToRightEyeDepthDifference < 1) { // User is looking towards the right.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', ['EyeUpper0', 'EyeLower0']);\n } else { // User is looking towards the left.\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right', ['EyeUpper0', 'EyeLower0']);\n }\n const adjustedLeftIrisCoords = this.getAdjustedIrisCoords(rawCoords, leftIrisRawCoords, 'left');\n const adjustedRightIrisCoords = this.getAdjustedIrisCoords(rawCoords, rightIrisRawCoords, 'right');\n rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);\n }\n const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);\n tf.dispose(rawCoords);\n const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));\n const transformedCoords = tf.tensor2d(transformedCoordsData);\n const prediction = {\n coords: transformedCoords,\n box: landmarksBox,\n faceConfidence: confidenceVal,\n confidence: box.confidence,\n image: face,\n };\n this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };\n return prediction;\n }));\n results = results.filter((a) => a !== null);\n this.detectedFaces = results.length;\n return results;\n }\n\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint, landmarks };\n }\n}\nexports.Pipeline = Pipeline;\n", "exports.UV_COORDS = [\n [0.499976992607117, 0.652534008026123],\n [0.500025987625122, 0.547487020492554],\n [0.499974012374878, 0.602371990680695],\n [0.482113003730774, 0.471979022026062],\n [0.500150978565216, 0.527155995368958],\n [0.499909996986389, 0.498252987861633],\n [0.499523013830185, 0.40106201171875],\n [0.289712011814117, 0.380764007568359],\n [0.499954998493195, 0.312398016452789],\n [0.499987006187439, 0.269918978214264],\n [0.500023007392883, 0.107050001621246],\n [0.500023007392883, 0.666234016418457],\n [0.5000159740448, 0.679224014282227],\n [0.500023007392883, 0.692348003387451],\n [0.499976992607117, 0.695277988910675],\n [0.499976992607117, 0.70593398809433],\n [0.499976992607117, 0.719385027885437],\n [0.499976992607117, 0.737019002437592],\n [0.499967992305756, 0.781370997428894],\n [0.499816000461578, 0.562981009483337],\n [0.473773002624512, 0.573909997940063],\n [0.104906998574734, 0.254140973091125],\n [0.365929991006851, 0.409575998783112],\n [0.338757991790771, 0.41302502155304],\n [0.311120003461838, 0.409460008144379],\n [0.274657994508743, 0.389131009578705],\n [0.393361985683441, 0.403706014156342],\n [0.345234006643295, 0.344011008739471],\n [0.370094001293182, 0.346076011657715],\n [0.319321990013123, 0.347265005111694],\n [0.297903001308441, 0.353591024875641],\n [0.24779200553894, 0.410809993743896],\n [0.396889001131058, 0.842755019664764],\n [0.280097991228104, 0.375599980354309],\n [0.106310002505779, 0.399955987930298],\n [0.2099249958992, 0.391353011131287],\n [0.355807989835739, 0.534406006336212],\n [0.471751004457474, 0.65040397644043],\n [0.474155008792877, 0.680191993713379],\n [0.439785003662109, 0.657229006290436],\n [0.414617002010345, 0.66654098033905],\n [0.450374007225037, 0.680860996246338],\n [0.428770989179611, 0.682690978050232],\n [0.374971002340317, 0.727805018424988],\n [0.486716985702515, 0.547628998756409],\n [0.485300987958908, 0.527395009994507],\n [0.257764995098114, 0.314490020275116],\n [0.401223003864288, 0.455172002315521],\n [0.429818987846375, 0.548614978790283],\n [0.421351999044418, 0.533740997314453],\n [0.276895999908447, 0.532056987285614],\n [0.483370006084442, 0.499586999416351],\n [0.33721199631691, 0.282882988452911],\n [0.296391993761063, 0.293242990970612],\n [0.169294998049736, 0.193813979625702],\n [0.447580009698868, 0.302609980106354],\n [0.392390012741089, 0.353887975215912],\n [0.354490011930466, 0.696784019470215],\n [0.067304998636246, 0.730105042457581],\n [0.442739009857178, 0.572826027870178],\n [0.457098007202148, 0.584792017936707],\n [0.381974011659622, 0.694710969924927],\n [0.392388999462128, 0.694203019142151],\n [0.277076005935669, 0.271932005882263],\n [0.422551989555359, 0.563233017921448],\n [0.385919004678726, 0.281364023685455],\n [0.383103013038635, 0.255840003490448],\n [0.331431001424789, 0.119714021682739],\n [0.229923993349075, 0.232002973556519],\n [0.364500999450684, 0.189113974571228],\n [0.229622006416321, 0.299540996551514],\n [0.173287004232407, 0.278747975826263],\n [0.472878992557526, 0.666198015213013],\n [0.446828007698059, 0.668527007102966],\n [0.422762006521225, 0.673889994621277],\n [0.445307999849319, 0.580065965652466],\n [0.388103008270264, 0.693961024284363],\n [0.403039008378983, 0.706539988517761],\n [0.403629004955292, 0.693953037261963],\n [0.460041999816895, 0.557139039039612],\n [0.431158006191254, 0.692366003990173],\n [0.452181994915009, 0.692366003990173],\n [0.475387006998062, 0.692366003990173],\n [0.465828001499176, 0.779190003871918],\n [0.472328990697861, 0.736225962638855],\n [0.473087012767792, 0.717857003211975],\n [0.473122000694275, 0.704625964164734],\n [0.473033010959625, 0.695277988910675],\n [0.427942007780075, 0.695277988910675],\n [0.426479011774063, 0.703539967536926],\n [0.423162013292313, 0.711845993995667],\n [0.4183090031147, 0.720062971115112],\n [0.390094995498657, 0.639572978019714],\n [0.013953999616206, 0.560034036636353],\n [0.499913990497589, 0.58014702796936],\n [0.413199990987778, 0.69539999961853],\n [0.409626007080078, 0.701822996139526],\n [0.468080013990402, 0.601534962654114],\n [0.422728985548019, 0.585985004901886],\n [0.463079988956451, 0.593783974647522],\n [0.37211999297142, 0.47341400384903],\n [0.334562003612518, 0.496073007583618],\n [0.411671012639999, 0.546965003013611],\n [0.242175996303558, 0.14767599105835],\n [0.290776997804642, 0.201445996761322],\n [0.327338010072708, 0.256527006626129],\n [0.399509996175766, 0.748921036720276],\n [0.441727995872498, 0.261676013469696],\n [0.429764986038208, 0.187834024429321],\n [0.412198007106781, 0.108901023864746],\n [0.288955003023148, 0.398952007293701],\n [0.218936994671822, 0.435410976409912],\n [0.41278201341629, 0.398970007896423],\n [0.257135003805161, 0.355440020561218],\n [0.427684992551804, 0.437960982322693],\n [0.448339998722076, 0.536936044692993],\n [0.178560003638268, 0.45755398273468],\n [0.247308000922203, 0.457193970680237],\n [0.286267012357712, 0.467674970626831],\n [0.332827985286713, 0.460712015628815],\n [0.368755996227264, 0.447206974029541],\n [0.398963987827301, 0.432654976844788],\n [0.476410001516342, 0.405806005001068],\n [0.189241006970406, 0.523923993110657],\n [0.228962004184723, 0.348950982093811],\n [0.490725994110107, 0.562400996685028],\n [0.404670000076294, 0.485132992267609],\n [0.019469000399113, 0.401564002037048],\n [0.426243007183075, 0.420431017875671],\n [0.396993011236191, 0.548797011375427],\n [0.266469985246658, 0.376977026462555],\n [0.439121007919312, 0.51895797252655],\n [0.032313998788595, 0.644356966018677],\n [0.419054001569748, 0.387154996395111],\n [0.462783008813858, 0.505746960639954],\n [0.238978996872902, 0.779744982719421],\n [0.198220998048782, 0.831938028335571],\n [0.107550002634525, 0.540755033493042],\n [0.183610007166862, 0.740257024765015],\n [0.134409993886948, 0.333683013916016],\n [0.385764002799988, 0.883153975009918],\n [0.490967005491257, 0.579378008842468],\n [0.382384985685349, 0.508572995662689],\n [0.174399003386497, 0.397670984268188],\n [0.318785011768341, 0.39623498916626],\n [0.343364000320435, 0.400596976280212],\n [0.396100014448166, 0.710216999053955],\n [0.187885001301765, 0.588537991046906],\n [0.430987000465393, 0.944064974784851],\n [0.318993002176285, 0.898285031318665],\n [0.266247987747192, 0.869701027870178],\n [0.500023007392883, 0.190576016902924],\n [0.499976992607117, 0.954452991485596],\n [0.366169989109039, 0.398822009563446],\n [0.393207013607025, 0.39553701877594],\n [0.410373002290726, 0.391080021858215],\n [0.194993004202843, 0.342101991176605],\n [0.388664990663528, 0.362284004688263],\n [0.365961998701096, 0.355970978736877],\n [0.343364000320435, 0.355356991291046],\n [0.318785011768341, 0.35834002494812],\n [0.301414996385574, 0.363156020641327],\n [0.058132998645306, 0.319076001644135],\n [0.301414996385574, 0.387449026107788],\n [0.499987989664078, 0.618434011936188],\n [0.415838003158569, 0.624195992946625],\n [0.445681989192963, 0.566076993942261],\n [0.465844005346298, 0.620640993118286],\n [0.49992299079895, 0.351523995399475],\n [0.288718998432159, 0.819945991039276],\n [0.335278987884521, 0.852819979190826],\n [0.440512001514435, 0.902418971061707],\n [0.128294005990028, 0.791940987110138],\n [0.408771991729736, 0.373893976211548],\n [0.455606997013092, 0.451801002025604],\n [0.499877005815506, 0.908990025520325],\n [0.375436991453171, 0.924192011356354],\n [0.11421000212431, 0.615022003650665],\n [0.448662012815475, 0.695277988910675],\n [0.4480200111866, 0.704632043838501],\n [0.447111994028091, 0.715808033943176],\n [0.444831997156143, 0.730794012546539],\n [0.430011987686157, 0.766808986663818],\n [0.406787008047104, 0.685672998428345],\n [0.400738000869751, 0.681069016456604],\n [0.392399996519089, 0.677703022956848],\n [0.367855995893478, 0.663918972015381],\n [0.247923001646996, 0.601333022117615],\n [0.452769994735718, 0.420849978923798],\n [0.43639200925827, 0.359887003898621],\n [0.416164010763168, 0.368713974952698],\n [0.413385987281799, 0.692366003990173],\n [0.228018000721931, 0.683571994304657],\n [0.468268007040024, 0.352671027183533],\n [0.411361992359161, 0.804327011108398],\n [0.499989002943039, 0.469825029373169],\n [0.479153990745544, 0.442654013633728],\n [0.499974012374878, 0.439637005329132],\n [0.432112008333206, 0.493588984012604],\n [0.499886006116867, 0.866917014122009],\n [0.49991300702095, 0.821729004383087],\n [0.456548988819122, 0.819200992584229],\n [0.344549000263214, 0.745438992977142],\n [0.37890899181366, 0.574010014533997],\n [0.374292999505997, 0.780184984207153],\n [0.319687992334366, 0.570737957954407],\n [0.357154995203018, 0.604269981384277],\n [0.295284003019333, 0.621580958366394],\n [0.447750002145767, 0.862477004528046],\n [0.410986006259918, 0.508723020553589],\n [0.31395098567009, 0.775308012962341],\n [0.354128003120422, 0.812552988529205],\n [0.324548006057739, 0.703992962837219],\n [0.189096003770828, 0.646299958229065],\n [0.279776990413666, 0.71465802192688],\n [0.1338230073452, 0.682700991630554],\n [0.336768001317978, 0.644733011722565],\n [0.429883986711502, 0.466521978378296],\n [0.455527991056442, 0.548622965812683],\n [0.437114000320435, 0.558896005153656],\n [0.467287987470627, 0.529924988746643],\n [0.414712011814117, 0.335219979286194],\n [0.37704598903656, 0.322777986526489],\n [0.344107985496521, 0.320150971412659],\n [0.312875986099243, 0.32233202457428],\n [0.283526003360748, 0.333190023899078],\n [0.241245999932289, 0.382785975933075],\n [0.102986000478268, 0.468762993812561],\n [0.267612010240555, 0.424560010433197],\n [0.297879010438919, 0.433175981044769],\n [0.333433985710144, 0.433878004550934],\n [0.366427004337311, 0.426115989685059],\n [0.396012008190155, 0.416696012020111],\n [0.420121014118195, 0.41022801399231],\n [0.007561000064015, 0.480777025222778],\n [0.432949006557465, 0.569517970085144],\n [0.458638995885849, 0.479089021682739],\n [0.473466008901596, 0.545744001865387],\n [0.476087987422943, 0.563830018043518],\n [0.468472003936768, 0.555056989192963],\n [0.433990985155106, 0.582361996173859],\n [0.483518004417419, 0.562983989715576],\n [0.482482999563217, 0.57784903049469],\n [0.42645001411438, 0.389798998832703],\n [0.438998997211456, 0.39649498462677],\n [0.450067013502121, 0.400434017181396],\n [0.289712011814117, 0.368252992630005],\n [0.276670008897781, 0.363372981548309],\n [0.517862021923065, 0.471948027610779],\n [0.710287988185883, 0.380764007568359],\n [0.526226997375488, 0.573909997940063],\n [0.895093023777008, 0.254140973091125],\n [0.634069979190826, 0.409575998783112],\n [0.661242008209229, 0.41302502155304],\n [0.688880026340485, 0.409460008144379],\n [0.725341975688934, 0.389131009578705],\n [0.606630027294159, 0.40370500087738],\n [0.654766023159027, 0.344011008739471],\n [0.629905998706818, 0.346076011657715],\n [0.680678009986877, 0.347265005111694],\n [0.702096998691559, 0.353591024875641],\n [0.75221198797226, 0.410804986953735],\n [0.602918028831482, 0.842862963676453],\n [0.719901978969574, 0.375599980354309],\n [0.893692970275879, 0.399959981441498],\n [0.790081977844238, 0.391354024410248],\n [0.643998026847839, 0.534487962722778],\n [0.528249025344849, 0.65040397644043],\n [0.525849997997284, 0.680191040039062],\n [0.560214996337891, 0.657229006290436],\n [0.585384011268616, 0.66654098033905],\n [0.549625992774963, 0.680860996246338],\n [0.57122802734375, 0.682691991329193],\n [0.624852001667023, 0.72809898853302],\n [0.513050019741058, 0.547281980514526],\n [0.51509702205658, 0.527251958847046],\n [0.742246985435486, 0.314507007598877],\n [0.598631024360657, 0.454979002475739],\n [0.570338010787964, 0.548575043678284],\n [0.578631997108459, 0.533622980117798],\n [0.723087012767792, 0.532054007053375],\n [0.516445994377136, 0.499638974666595],\n [0.662801027297974, 0.282917976379395],\n [0.70362401008606, 0.293271005153656],\n [0.830704987049103, 0.193813979625702],\n [0.552385985851288, 0.302568018436432],\n [0.607609987258911, 0.353887975215912],\n [0.645429015159607, 0.696707010269165],\n [0.932694971561432, 0.730105042457581],\n [0.557260990142822, 0.572826027870178],\n [0.542901992797852, 0.584792017936707],\n [0.6180260181427, 0.694710969924927],\n [0.607590973377228, 0.694203019142151],\n [0.722943007946014, 0.271963000297546],\n [0.577413976192474, 0.563166975975037],\n [0.614082992076874, 0.281386971473694],\n [0.616907000541687, 0.255886018276215],\n [0.668509006500244, 0.119913995265961],\n [0.770092010498047, 0.232020974159241],\n [0.635536015033722, 0.189248979091644],\n [0.77039098739624, 0.299556016921997],\n [0.826722025871277, 0.278755009174347],\n [0.527121007442474, 0.666198015213013],\n [0.553171992301941, 0.668527007102966],\n [0.577238023281097, 0.673889994621277],\n [0.554691970348358, 0.580065965652466],\n [0.611896991729736, 0.693961024284363],\n [0.59696102142334, 0.706539988517761],\n [0.596370995044708, 0.693953037261963],\n [0.539958000183105, 0.557139039039612],\n [0.568841993808746, 0.692366003990173],\n [0.547818005084991, 0.692366003990173],\n [0.52461302280426, 0.692366003990173],\n [0.534089982509613, 0.779141008853912],\n [0.527670979499817, 0.736225962638855],\n [0.526912987232208, 0.717857003211975],\n [0.526877999305725, 0.704625964164734],\n [0.526966989040375, 0.695277988910675],\n [0.572058022022247, 0.695277988910675],\n [0.573521018028259, 0.703539967536926],\n [0.57683801651001, 0.711845993995667],\n [0.581691026687622, 0.720062971115112],\n [0.609944999217987, 0.639909982681274],\n [0.986046016216278, 0.560034036636353],\n [0.5867999792099, 0.69539999961853],\n [0.590372025966644, 0.701822996139526],\n [0.531915009021759, 0.601536989212036],\n [0.577268004417419, 0.585934996604919],\n [0.536915004253387, 0.593786001205444],\n [0.627542972564697, 0.473352015018463],\n [0.665585994720459, 0.495950996875763],\n [0.588353991508484, 0.546862006187439],\n [0.757824003696442, 0.14767599105835],\n [0.709249973297119, 0.201507985591888],\n [0.672684013843536, 0.256581008434296],\n [0.600408971309662, 0.74900496006012],\n [0.55826598405838, 0.261672019958496],\n [0.570303976535797, 0.187870979309082],\n [0.588165998458862, 0.109044015407562],\n [0.711045026779175, 0.398952007293701],\n [0.781069993972778, 0.435405015945435],\n [0.587247014045715, 0.398931980133057],\n [0.742869973182678, 0.355445981025696],\n [0.572156012058258, 0.437651991844177],\n [0.55186802148819, 0.536570012569427],\n [0.821442008018494, 0.457556009292603],\n [0.752701997756958, 0.457181990146637],\n [0.71375697851181, 0.467626988887787],\n [0.66711300611496, 0.460672974586487],\n [0.631101012229919, 0.447153985500336],\n [0.6008620262146, 0.432473003864288],\n [0.523481011390686, 0.405627012252808],\n [0.810747981071472, 0.523926019668579],\n [0.771045982837677, 0.348959028720856],\n [0.509127020835876, 0.562718033790588],\n [0.595292985439301, 0.485023975372314],\n [0.980530977249146, 0.401564002037048],\n [0.573499977588654, 0.420000016689301],\n [0.602994978427887, 0.548687994480133],\n [0.733529984951019, 0.376977026462555],\n [0.560611009597778, 0.519016981124878],\n [0.967685997486115, 0.644356966018677],\n [0.580985009670258, 0.387160003185272],\n [0.537728011608124, 0.505385041236877],\n [0.760966002941132, 0.779752969741821],\n [0.801778972148895, 0.831938028335571],\n [0.892440974712372, 0.54076099395752],\n [0.816350996494293, 0.740260004997253],\n [0.865594983100891, 0.333687007427216],\n [0.614073991775513, 0.883246004581451],\n [0.508952975273132, 0.579437971115112],\n [0.617941975593567, 0.508316040039062],\n [0.825608015060425, 0.397674977779388],\n [0.681214988231659, 0.39623498916626],\n [0.656635999679565, 0.400596976280212],\n [0.603900015354156, 0.710216999053955],\n [0.81208598613739, 0.588539004325867],\n [0.56801301240921, 0.944564998149872],\n [0.681007981300354, 0.898285031318665],\n [0.733752012252808, 0.869701027870178],\n [0.633830010890961, 0.398822009563446],\n [0.606792986392975, 0.39553701877594],\n [0.589659988880157, 0.391062021255493],\n [0.805015981197357, 0.342108011245728],\n [0.611334979534149, 0.362284004688263],\n [0.634037971496582, 0.355970978736877],\n [0.656635999679565, 0.355356991291046],\n [0.681214988231659, 0.35834002494812],\n [0.698584973812103, 0.363156020641327],\n [0.941866993904114, 0.319076001644135],\n [0.698584973812103, 0.387449026107788],\n [0.584177017211914, 0.624107003211975],\n [0.554318010807037, 0.566076993942261],\n [0.534153997898102, 0.62064003944397],\n [0.711217999458313, 0.819975018501282],\n [0.664629995822906, 0.852871000766754],\n [0.559099972248077, 0.902631998062134],\n [0.871706008911133, 0.791940987110138],\n [0.591234028339386, 0.373893976211548],\n [0.544341027736664, 0.451583981513977],\n [0.624562978744507, 0.924192011356354],\n [0.88577002286911, 0.615028977394104],\n [0.551338016986847, 0.695277988910675],\n [0.551980018615723, 0.704632043838501],\n [0.552887976169586, 0.715808033943176],\n [0.555167973041534, 0.730794012546539],\n [0.569944024085999, 0.767035007476807],\n [0.593203008174896, 0.685675978660583],\n [0.599261999130249, 0.681069016456604],\n [0.607599973678589, 0.677703022956848],\n [0.631937980651855, 0.663500010967255],\n [0.752032995223999, 0.601315021514893],\n [0.547226011753082, 0.420395016670227],\n [0.563543975353241, 0.359827995300293],\n [0.583841025829315, 0.368713974952698],\n [0.586614012718201, 0.692366003990173],\n [0.771915018558502, 0.683578014373779],\n [0.531597018241882, 0.352482974529266],\n [0.588370978832245, 0.804440975189209],\n [0.52079701423645, 0.442565023899078],\n [0.567984998226166, 0.493479013442993],\n [0.543282985687256, 0.819254994392395],\n [0.655317008495331, 0.745514988899231],\n [0.621008992195129, 0.574018001556396],\n [0.625559985637665, 0.78031200170517],\n [0.680198013782501, 0.570719003677368],\n [0.64276397228241, 0.604337990283966],\n [0.704662978649139, 0.621529996395111],\n [0.552012026309967, 0.862591981887817],\n [0.589071989059448, 0.508637011051178],\n [0.685944974422455, 0.775357007980347],\n [0.645735025405884, 0.812640011310577],\n [0.675342977046967, 0.703978002071381],\n [0.810858011245728, 0.646304965019226],\n [0.72012197971344, 0.714666962623596],\n [0.866151988506317, 0.682704985141754],\n [0.663187026977539, 0.644596993923187],\n [0.570082008838654, 0.466325998306274],\n [0.544561982154846, 0.548375964164734],\n [0.562758982181549, 0.558784961700439],\n [0.531987011432648, 0.530140042304993],\n [0.585271000862122, 0.335177004337311],\n [0.622952997684479, 0.32277899980545],\n [0.655896008014679, 0.320163011550903],\n [0.687132000923157, 0.322345972061157],\n [0.716481983661652, 0.333200991153717],\n [0.758756995201111, 0.382786989212036],\n [0.897013008594513, 0.468769013881683],\n [0.732392013072968, 0.424547016620636],\n [0.70211398601532, 0.433162987232208],\n [0.66652500629425, 0.433866024017334],\n [0.633504986763, 0.426087975502014],\n [0.603875994682312, 0.416586995124817],\n [0.579657971858978, 0.409945011138916],\n [0.992439985275269, 0.480777025222778],\n [0.567192018032074, 0.569419980049133],\n [0.54136598110199, 0.478899002075195],\n [0.526564002037048, 0.546118021011353],\n [0.523913025856018, 0.563830018043518],\n [0.531529009342194, 0.555056989192963],\n [0.566035985946655, 0.582329034805298],\n [0.51631098985672, 0.563053965568542],\n [0.5174720287323, 0.577877044677734],\n [0.573594987392426, 0.389806985855103],\n [0.560697972774506, 0.395331978797913],\n [0.549755990505219, 0.399751007556915],\n [0.710287988185883, 0.368252992630005],\n [0.723330020904541, 0.363372981548309],\n];\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as blazeface from './blazeface';\nimport * as keypoints from './keypoints';\nimport * as pipe from './facepipeline';\nimport * as uv_coords from './uvcoords';\nimport * as triangulation from './triangulation';\n\nclass MediaPipeFaceMesh {\n constructor(blazeFace, blazeMeshModel, irisModel, config) {\n this.pipeline = new pipe.Pipeline(blazeFace, blazeMeshModel, irisModel, config);\n if (config) this.config = config;\n }\n\n async estimateFaces(input, config) {\n if (config) this.config = config;\n const predictions = await this.pipeline.predict(input, config);\n const results = [];\n for (const prediction of (predictions || [])) {\n // guard against disposed tensors on long running operations such as pause in middle of processing\n if (prediction.isDisposedInternal) continue;\n const mesh = prediction.coords ? prediction.coords.arraySync() : null;\n const annotations = {};\n if (mesh && mesh.length > 0) {\n for (const key in keypoints.MESH_ANNOTATIONS) {\n if (this.config.iris.enabled || key.includes('Iris') === false) {\n annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => mesh[index]);\n }\n }\n }\n results.push({\n confidence: prediction.confidence || 0,\n box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,\n mesh,\n annotations,\n image: prediction.image ? tf.clone(prediction.image) : null,\n });\n if (prediction.coords) prediction.coords.dispose();\n if (prediction.image) prediction.image.dispose();\n }\n return results;\n }\n}\n\nasync function load(config) {\n const models = await Promise.all([\n blazeface.load(config),\n loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),\n ]);\n const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.mesh.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.iris.modelPath.match(/\\/(.*)\\./)[1]}`);\n return faceMesh;\n}\n\nexports.load = load;\nexports.MediaPipeFaceMesh = MediaPipeFaceMesh;\nexports.uv_coords = uv_coords;\nexports.triangulation = triangulation.default;\n", "const profileData = {};\n\nfunction profile(name, data) {\n if (!data || !data.kernels) return;\n const maxResults = 5;\n const time = data.kernels\n .filter((a) => a.kernelTimeMs > 0)\n .reduce((a, b) => a += b.kernelTimeMs, 0);\n const slowest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.kernelTimeMs > 0)\n .sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);\n const largest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.totalBytesSnapshot > 0)\n .sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);\n if (slowest.length > maxResults) slowest.length = maxResults;\n if (largest.length > maxResults) largest.length = maxResults;\n const res = { newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };\n profileData[name] = res;\n // eslint-disable-next-line no-console\n console.log('Human profiler', name, res);\n}\n\nexports.run = profile;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { age: 0 };\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\n\nasync function load(config) {\n if (!models.age) {\n models.age = await loadGraphModel(config.face.age.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.age.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.age;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n const enhance = tf.mul(resize, [255.0]);\n tf.dispose(resize);\n\n let ageT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.age.enabled) ageT = await models.age.predict(enhance);\n } else {\n const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};\n ageT = profileAge.result.clone();\n profileAge.result.dispose();\n profile.run('age', profileAge);\n }\n enhance.dispose();\n\n if (ageT) {\n const data = ageT.dataSync();\n obj.age = Math.trunc(10 * data[0]) / 10;\n }\n ageT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { gender: '' };\nlet frame = Number.MAX_SAFE_INTEGER;\nlet alternative = false;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\n\nasync function load(config) {\n if (!models.gender) {\n models.gender = await loadGraphModel(config.face.gender.modelPath);\n alternative = models.gender.inputs[0].shape[3] === 1;\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.gender.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.gender;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.gender.skipFrames) && last.gender !== '') {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);\n let enhance;\n if (alternative) {\n enhance = tf.tidy(() => {\n const [red, green, blue] = tf.split(resize, 3, 3);\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n return grayscale.sub(0.5).mul(2);\n });\n } else {\n enhance = tf.mul(resize, [255.0]);\n }\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n tf.dispose(resize);\n\n let genderT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);\n } else {\n const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};\n genderT = profileGender.result.clone();\n profileGender.result.dispose();\n profile.run('gender', profileGender);\n }\n enhance.dispose();\n\n if (genderT) {\n const data = genderT.dataSync();\n if (alternative) {\n // returns two values 0..1, bigger one is prediction\n const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] > data[1] ? 'female' : 'male';\n obj.confidence = confidence;\n }\n } else {\n // returns one value 0..1, .5 is prediction threshold\n const confidence = Math.trunc(200 * Math.abs((data[0] - 0.5))) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] <= 0.5 ? 'female' : 'male';\n obj.confidence = Math.min(0.99, confidence);\n }\n }\n }\n genderT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];\nconst models = {};\nlet last = [];\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\nconst scale = 1; // score multiplication factor\n\nasync function load(config) {\n if (!models.emotion) {\n models.emotion = await loadGraphModel(config.face.emotion.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.emotion;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);\n const [red, green, blue] = tf.split(resize, 3, 3);\n resize.dispose();\n // weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n red.dispose();\n green.dispose();\n blue.dispose();\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n redNorm.dispose();\n greenNorm.dispose();\n blueNorm.dispose();\n const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));\n grayscale.dispose();\n const obj = [];\n if (config.face.emotion.enabled) {\n let data;\n if (!config.profile) {\n const emotionT = await models.emotion.predict(normalize);\n data = emotionT.dataSync();\n tf.dispose(emotionT);\n } else {\n const profileData = await tf.profile(() => models.emotion.predict(normalize));\n data = profileData.result.dataSync();\n profileData.result.dispose();\n profile.run('emotion', profileData);\n }\n for (let i = 0; i < data.length; i++) {\n if (scale * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * scale * data[i]) / 100), emotion: annotations[i] });\n }\n obj.sort((a, b) => b.score - a.score);\n }\n normalize.dispose();\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf } from '../tf.js';\n\nclass BaseModel {\n constructor(model, outputStride) {\n this.model = model;\n this.outputStride = outputStride;\n }\n\n predict(input) {\n return tf.tidy(() => {\n const asFloat = this.preprocessInput(input.toFloat());\n const asBatch = asFloat.expandDims(0);\n const results = this.model.predict(asBatch);\n const results3d = results.map((y) => y.squeeze([0]));\n const namedResults = this.nameOutputResults(results3d);\n return {\n heatmapScores: namedResults.heatmap.sigmoid(),\n offsets: namedResults.offsets,\n displacementFwd: namedResults.displacementFwd,\n displacementBwd: namedResults.displacementBwd,\n };\n });\n }\n\n /**\n * Releases the CPU and GPU memory allocated by the model.\n */\n dispose() {\n this.model.dispose();\n }\n}\nexports.BaseModel = BaseModel;\n", "import { tf } from '../tf.js';\nimport * as modelBase from './modelBase';\n\nclass MobileNet extends modelBase.BaseModel {\n // eslint-disable-next-line class-methods-use-this\n preprocessInput(input) {\n // Normalize the pixels [0, 255] to be between [-1, 1].\n return tf.tidy(() => tf.div(input, 127.5).sub(1.0));\n }\n\n // eslint-disable-next-line class-methods-use-this\n nameOutputResults(results) {\n const [offsets, heatmap, displacementFwd, displacementBwd] = results;\n return { offsets, heatmap, displacementFwd, displacementBwd };\n }\n}\nexports.MobileNet = MobileNet;\n", "// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort\nfunction half(k) {\n return Math.floor(k / 2);\n}\nclass MaxHeap {\n constructor(maxSize, getElementValue) {\n this.priorityQueue = new Array(maxSize);\n this.numberOfElements = -1;\n this.getElementValue = getElementValue;\n }\n\n enqueue(x) {\n this.priorityQueue[++this.numberOfElements] = x;\n this.swim(this.numberOfElements);\n }\n\n dequeue() {\n const max = this.priorityQueue[0];\n this.exchange(0, this.numberOfElements--);\n this.sink(0);\n this.priorityQueue[this.numberOfElements + 1] = null;\n return max;\n }\n\n empty() {\n return this.numberOfElements === -1;\n }\n\n size() {\n return this.numberOfElements + 1;\n }\n\n all() {\n return this.priorityQueue.slice(0, this.numberOfElements + 1);\n }\n\n max() {\n return this.priorityQueue[0];\n }\n\n swim(k) {\n while (k > 0 && this.less(half(k), k)) {\n this.exchange(k, half(k));\n k = half(k);\n }\n }\n\n sink(k) {\n while (2 * k <= this.numberOfElements) {\n let j = 2 * k;\n if (j < this.numberOfElements && this.less(j, j + 1)) j++;\n if (!this.less(k, j)) break;\n this.exchange(k, j);\n k = j;\n }\n }\n\n getValueAt(i) {\n return this.getElementValue(this.priorityQueue[i]);\n }\n\n less(i, j) {\n return this.getValueAt(i) < this.getValueAt(j);\n }\n\n exchange(i, j) {\n const t = this.priorityQueue[i];\n this.priorityQueue[i] = this.priorityQueue[j];\n this.priorityQueue[j] = t;\n }\n}\nexports.MaxHeap = MaxHeap;\n", "import * as heapSort from './heapSort';\n\nfunction scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores) {\n const [height, width] = scores.shape;\n let localMaximum = true;\n const yStart = Math.max(heatmapY - localMaximumRadius, 0);\n const yEnd = Math.min(heatmapY + localMaximumRadius + 1, height);\n for (let yCurrent = yStart; yCurrent < yEnd; ++yCurrent) {\n const xStart = Math.max(heatmapX - localMaximumRadius, 0);\n const xEnd = Math.min(heatmapX + localMaximumRadius + 1, width);\n for (let xCurrent = xStart; xCurrent < xEnd; ++xCurrent) {\n if (scores.get(yCurrent, xCurrent, keypointId) > score) {\n localMaximum = false;\n break;\n }\n }\n if (!localMaximum) {\n break;\n }\n }\n return localMaximum;\n}\n/**\n * Builds a priority queue with part candidate positions for a specific image in\n * the batch. For this we find all local maxima in the score maps with score\n * values above a threshold. We create a single priority queue across all parts.\n */\nfunction buildPartWithScoreQueue(scoreThreshold, localMaximumRadius, scores) {\n const [height, width, numKeypoints] = scores.shape;\n const queue = new heapSort.MaxHeap(height * width * numKeypoints, ({ score }) => score);\n for (let heatmapY = 0; heatmapY < height; ++heatmapY) {\n for (let heatmapX = 0; heatmapX < width; ++heatmapX) {\n for (let keypointId = 0; keypointId < numKeypoints; ++keypointId) {\n const score = scores.get(heatmapY, heatmapX, keypointId);\n // Only consider parts with score greater or equal to threshold as root candidates.\n if (score < scoreThreshold) continue;\n // Only consider keypoints whose score is maximum in a local window.\n if (scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores)) {\n queue.enqueue({ score, part: { heatmapY, heatmapX, id: keypointId } });\n }\n }\n }\n }\n return queue;\n}\nexports.buildPartWithScoreQueue = buildPartWithScoreQueue;\n", "exports.partNames = [\n 'nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftShoulder',\n 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist',\n 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle',\n];\nexports.NUM_KEYPOINTS = exports.partNames.length;\nexports.partIds = exports.partNames.reduce((result, jointName, i) => {\n result[jointName] = i;\n return result;\n}, {});\nconst connectedPartNames = [\n ['leftHip', 'leftShoulder'], ['leftElbow', 'leftShoulder'],\n ['leftElbow', 'leftWrist'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['rightHip', 'rightShoulder'],\n ['rightElbow', 'rightShoulder'], ['rightElbow', 'rightWrist'],\n ['rightHip', 'rightKnee'], ['rightKnee', 'rightAnkle'],\n ['leftShoulder', 'rightShoulder'], ['leftHip', 'rightHip'],\n];\n/*\n * Define the skeleton. This defines the parent->child relationships of our\n * tree. Arbitrarily this defines the nose as the root of the tree, however\n * since we will infer the displacement for both parent->child and\n * child->parent, we can define the tree root as any node.\n */\nexports.poseChain = [\n ['nose', 'leftEye'], ['leftEye', 'leftEar'], ['nose', 'rightEye'],\n ['rightEye', 'rightEar'], ['nose', 'leftShoulder'],\n ['leftShoulder', 'leftElbow'], ['leftElbow', 'leftWrist'],\n ['leftShoulder', 'leftHip'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['nose', 'rightShoulder'],\n ['rightShoulder', 'rightElbow'], ['rightElbow', 'rightWrist'],\n ['rightShoulder', 'rightHip'], ['rightHip', 'rightKnee'],\n ['rightKnee', 'rightAnkle'],\n];\nexports.connectedPartIndices = connectedPartNames.map(([jointNameA, jointNameB]) => ([exports.partIds[jointNameA], exports.partIds[jointNameB]]));\nexports.partChannels = [\n 'left_face',\n 'right_face',\n 'right_upper_leg_front',\n 'right_lower_leg_back',\n 'right_upper_leg_back',\n 'left_lower_leg_front',\n 'left_upper_leg_front',\n 'left_upper_leg_back',\n 'left_lower_leg_back',\n 'right_feet',\n 'right_lower_leg_front',\n 'left_feet',\n 'torso_front',\n 'torso_back',\n 'right_upper_arm_front',\n 'right_upper_arm_back',\n 'right_lower_arm_back',\n 'left_lower_arm_front',\n 'left_upper_arm_front',\n 'left_upper_arm_back',\n 'left_lower_arm_back',\n 'right_hand',\n 'right_lower_arm_front',\n 'left_hand',\n];\n", "import * as kpt from './keypoints';\n\nfunction getOffsetPoint(y, x, keypoint, offsets) {\n return {\n y: offsets.get(y, x, keypoint),\n x: offsets.get(y, x, keypoint + kpt.NUM_KEYPOINTS),\n };\n}\nexports.getOffsetPoint = getOffsetPoint;\n\nfunction getImageCoords(part, outputStride, offsets) {\n const { heatmapY, heatmapX, id: keypoint } = part;\n const { y, x } = getOffsetPoint(heatmapY, heatmapX, keypoint, offsets);\n return {\n x: part.heatmapX * outputStride + x,\n y: part.heatmapY * outputStride + y,\n };\n}\nexports.getImageCoords = getImageCoords;\n\nfunction fillArray(element, size) {\n const result = new Array(size);\n for (let i = 0; i < size; i++) {\n result[i] = element;\n }\n return result;\n}\nexports.fillArray = fillArray;\n\nfunction clamp(a, min, max) {\n if (a < min) return min;\n if (a > max) return max;\n return a;\n}\nexports.clamp = clamp;\n\nfunction squaredDistance(y1, x1, y2, x2) {\n const dy = y2 - y1;\n const dx = x2 - x1;\n return dy * dy + dx * dx;\n}\nexports.squaredDistance = squaredDistance;\n\nfunction addVectors(a, b) {\n return { x: a.x + b.x, y: a.y + b.y };\n}\nexports.addVectors = addVectors;\n\nfunction clampVector(a, min, max) {\n return { y: clamp(a.y, min, max), x: clamp(a.x, min, max) };\n}\nexports.clampVector = clampVector;\n", "import * as keypoints from './keypoints';\nimport * as vectors from './vectors';\n\nconst parentChildrenTuples = keypoints.poseChain.map(([parentJoinName, childJoinName]) => ([keypoints.partIds[parentJoinName], keypoints.partIds[childJoinName]]));\nconst parentToChildEdges = parentChildrenTuples.map(([, childJointId]) => childJointId);\nconst childToParentEdges = parentChildrenTuples.map(([parentJointId]) => parentJointId);\nfunction getDisplacement(edgeId, point, displacements) {\n const numEdges = displacements.shape[2] / 2;\n return {\n y: displacements.get(point.y, point.x, edgeId),\n x: displacements.get(point.y, point.x, numEdges + edgeId),\n };\n}\nfunction getStridedIndexNearPoint(point, outputStride, height, width) {\n return {\n y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1),\n x: vectors.clamp(Math.round(point.x / outputStride), 0, width - 1),\n };\n}\n/**\n * We get a new keypoint along the `edgeId` for the pose instance, assuming\n * that the position of the `idSource` part is already known. For this, we\n * follow the displacement vector from the source to target part (stored in\n * the `i`-t channel of the displacement tensor). The displaced keypoint\n * vector is refined using the offset vector by `offsetRefineStep` times.\n */\nfunction traverseToTargetKeypoint(edgeId, sourceKeypoint, targetKeypointId, scoresBuffer, offsets, outputStride, displacements, offsetRefineStep = 2) {\n const [height, width] = scoresBuffer.shape;\n // Nearest neighbor interpolation for the source->target displacements.\n const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, outputStride, height, width);\n const displacement = getDisplacement(edgeId, sourceKeypointIndices, displacements);\n const displacedPoint = vectors.addVectors(sourceKeypoint.position, displacement);\n let targetKeypoint = displacedPoint;\n for (let i = 0; i < offsetRefineStep; i++) {\n const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const offsetPoint = vectors.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetKeypointId, offsets);\n targetKeypoint = vectors.addVectors({\n x: targetKeypointIndices.x * outputStride,\n y: targetKeypointIndices.y * outputStride,\n }, { x: offsetPoint.x, y: offsetPoint.y });\n }\n const targetKeyPointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const score = scoresBuffer.get(targetKeyPointIndices.y, targetKeyPointIndices.x, targetKeypointId);\n return { position: targetKeypoint, part: keypoints.partNames[targetKeypointId], score };\n}\n/**\n * Follows the displacement fields to decode the full pose of the object\n * instance given the position of a part that acts as root.\n *\n * @return An array of decoded keypoints and their scores for a single pose\n */\nfunction decodePose(root, scores, offsets, outputStride, displacementsFwd, displacementsBwd) {\n const numParts = scores.shape[2];\n const numEdges = parentToChildEdges.length;\n const instanceKeypoints = new Array(numParts);\n // Start a new detection instance at the position of the root.\n const { part: rootPart, score: rootScore } = root;\n const rootPoint = vectors.getImageCoords(rootPart, outputStride, offsets);\n instanceKeypoints[rootPart.id] = {\n score: rootScore,\n part: keypoints.partNames[rootPart.id],\n position: rootPoint,\n };\n // Decode the part positions upwards in the tree, following the backward\n // displacements.\n for (let edge = numEdges - 1; edge >= 0; --edge) {\n const sourceKeypointId = parentToChildEdges[edge];\n const targetKeypointId = childToParentEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsBwd);\n }\n }\n // Decode the part positions downwards in the tree, following the forward\n // displacements.\n for (let edge = 0; edge < numEdges; ++edge) {\n const sourceKeypointId = childToParentEdges[edge];\n const targetKeypointId = parentToChildEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsFwd);\n }\n }\n return instanceKeypoints;\n}\nexports.decodePose = decodePose;\n", "import * as buildParts from './buildParts';\nimport * as decodePose from './decodePose';\nimport * as vectors from './vectors';\n\nfunction withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) {\n return poses.some(({ keypoints }) => {\n const correspondingKeypoint = keypoints[keypointId].position;\n return vectors.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;\n });\n}\n/* Score the newly proposed object instance without taking into account\n * the scores of the parts that overlap with any previously detected\n * instance.\n */\nfunction getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {\n const notOverlappedKeypointScores = instanceKeypoints.reduce((result, { position, score }, keypointId) => {\n if (!withinNmsRadiusOfCorrespondingPoint(existingPoses, squaredNmsRadius, position, keypointId)) {\n result += score;\n }\n return result;\n }, 0.0);\n return notOverlappedKeypointScores / instanceKeypoints.length;\n}\n// A point (y, x) is considered as root part candidate if its score is a\n// maximum in a window |y - y'| <= kLocalMaximumRadius, |x - x'| <=\n// kLocalMaximumRadius.\nconst kLocalMaximumRadius = 1;\n/**\n * Detects multiple poses and finds their parts from part scores and\n * displacement vectors. It returns up to `maxDetections` object instance\n * detections in decreasing root score order. It works as follows: We first\n * create a priority queue with local part score maxima above\n * `scoreThreshold`, considering all parts at the same time. Then we\n * iteratively pull the top element of the queue (in decreasing score order)\n * and treat it as a root candidate for a new object instance. To avoid\n * duplicate detections, we reject the root candidate if it is within a disk\n * of `nmsRadius` pixels from the corresponding part of a previously detected\n * instance, which is a form of part-based non-maximum suppression (NMS). If\n * the root candidate passes the NMS check, we start a new object instance\n * detection, treating the corresponding part as root and finding the\n * positions of the remaining parts by following the displacement vectors\n * along the tree-structured part graph. We assign to the newly detected\n * instance a score equal to the sum of scores of its parts which have not\n * been claimed by a previous instance (i.e., those at least `nmsRadius`\n * pixels away from the corresponding part of all previously detected\n * instances), divided by the total number of parts `numParts`.\n *\n * @param heatmapScores 3-D tensor with shape `[height, width, numParts]`.\n * The value of heatmapScores[y, x, k]` is the score of placing the `k`-th\n * object part at position `(y, x)`.\n *\n * @param offsets 3-D tensor with shape `[height, width, numParts * 2]`.\n * The value of [offsets[y, x, k], offsets[y, x, k + numParts]]` is the\n * short range offset vector of the `k`-th object part at heatmap\n * position `(y, x)`.\n *\n * @param displacementsFwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the forward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param displacementsBwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the backward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param outputStride The output stride that was used when feed-forwarding\n * through the PoseNet model. Must be 32, 16, or 8.\n *\n * @param maxPoseDetections Maximum number of returned instance detections per\n * image.\n *\n * @param scoreThreshold Only return instance detections that have root part\n * score greater or equal to this value. Defaults to 0.5.\n *\n * @param nmsRadius Non-maximum suppression part distance. It needs to be\n * strictly positive. Two parts suppress each other if they are less than\n * `nmsRadius` pixels away. Defaults to 20.\n *\n * @return An array of poses and their scores, each containing keypoints and\n * the corresponding keypoint scores.\n */\nfunction decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, maxPoseDetections, scoreThreshold = 0.5, nmsRadius = 20) {\n const poses = [];\n const queue = buildParts.buildPartWithScoreQueue(scoreThreshold, kLocalMaximumRadius, scoresBuffer);\n const squaredNmsRadius = nmsRadius * nmsRadius;\n // Generate at most maxDetections object instances per image in\n // decreasing root part score order.\n while (poses.length < maxPoseDetections && !queue.empty()) {\n // The top element in the queue is the next root candidate.\n const root = queue.dequeue();\n // Part-based non-maximum suppression: We reject a root candidate if it\n // is within a disk of `nmsRadius` pixels from the corresponding part of\n // a previously detected instance.\n const rootImageCoords = vectors.getImageCoords(root.part, outputStride, offsetsBuffer);\n if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;\n // Start a new detection instance at the position of the root.\n const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, outputStride, displacementsFwdBuffer, displacementsBwdBuffer);\n const score = getInstanceScore(poses, squaredNmsRadius, keypoints);\n poses.push({ keypoints, score });\n }\n return poses;\n}\nexports.decodeMultiplePoses = decodeMultiplePoses;\n", "import * as kpt from './keypoints';\n\nfunction eitherPointDoesntMeetConfidence(a, b, minConfidence) {\n return (a < minConfidence || b < minConfidence);\n}\n\nfunction getAdjacentKeyPoints(keypoints, minConfidence) {\n return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {\n if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {\n return result;\n }\n result.push([keypoints[leftJoint], keypoints[rightJoint]]);\n return result;\n }, []);\n}\nexports.getAdjacentKeyPoints = getAdjacentKeyPoints;\n\nconst { NEGATIVE_INFINITY, POSITIVE_INFINITY } = Number;\nfunction getBoundingBox(keypoints) {\n return keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({\n maxX: Math.max(maxX, x),\n maxY: Math.max(maxY, y),\n minX: Math.min(minX, x),\n minY: Math.min(minY, y),\n }), {\n maxX: NEGATIVE_INFINITY,\n maxY: NEGATIVE_INFINITY,\n minX: POSITIVE_INFINITY,\n minY: POSITIVE_INFINITY,\n });\n}\nexports.getBoundingBox = getBoundingBox;\n\nfunction getBoundingBoxPoints(keypoints) {\n const { minX, minY, maxX, maxY } = getBoundingBox(keypoints);\n return [{ x: minX, y: minY }, { x: maxX, y: minY }, { x: maxX, y: maxY }, { x: minX, y: maxY }];\n}\nexports.getBoundingBoxPoints = getBoundingBoxPoints;\n\nasync function toTensorBuffers3D(tensors) {\n return Promise.all(tensors.map((tensor) => tensor.buffer()));\n}\nexports.toTensorBuffers3D = toTensorBuffers3D;\n\nfunction scalePose(pose, scaleY, scaleX) {\n return {\n score: pose.score,\n keypoints: pose.keypoints.map(({ score, part, position }) => ({\n score,\n part,\n position: { x: position.x * scaleX, y: position.y * scaleY },\n })),\n };\n}\nexports.scalePose = scalePose;\n\nfunction resizeTo(image, [targetH, targetW]) {\n const input = image.squeeze(0);\n const resized = input.resizeBilinear([targetH, targetW]);\n input.dispose();\n return resized;\n}\nexports.resizeTo = resizeTo;\n\nfunction scaleAndFlipPoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]) {\n const scaledPoses = poses.map((pose) => scalePose(pose, height / inputResolutionHeight, width / inputResolutionWidth));\n return scaledPoses;\n}\nexports.scaleAndFlipPoses = scaleAndFlipPoses;\n", "import { loadGraphModel } from '../tf.js';\nimport * as modelMobileNet from './modelMobileNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as util from './util';\n\nclass PoseNet {\n constructor(net) {\n this.baseModel = net;\n this.outputStride = 16;\n }\n\n async estimatePoses(input, config) {\n return new Promise(async (resolve) => {\n const height = input.shape[1];\n const width = input.shape[2];\n const resized = util.resizeTo(input, [config.body.inputSize, config.body.inputSize]);\n const res = this.baseModel.predict(resized);\n const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);\n const scoresBuffer = allTensorBuffers[0];\n const offsetsBuffer = allTensorBuffers[1];\n const displacementsFwdBuffer = allTensorBuffers[2];\n const displacementsBwdBuffer = allTensorBuffers[3];\n const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, this.outputStride, config.body.maxDetections, config.body.scoreThreshold, config.body.nmsRadius);\n const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);\n res.heatmapScores.dispose();\n res.offsets.dispose();\n res.displacementFwd.dispose();\n res.displacementBwd.dispose();\n resized.dispose();\n resolve(resultPoses);\n });\n }\n\n dispose() {\n this.baseModel.dispose();\n }\n}\nexports.PoseNet = PoseNet;\n\nasync function load(config) {\n const graphModel = await loadGraphModel(config.body.modelPath);\n const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.body.modelPath.match(/\\/(.*)\\./)[1]}`);\n return new PoseNet(mobilenet);\n}\nexports.load = load;\n", "import * as modelMobileNet from './modelMobileNet';\nimport * as modelPoseNet from './modelPoseNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nexports.load = modelPoseNet.load;\nexports.PoseNet = modelPoseNet.PoseNet;\n\nexports.MobileNet = modelMobileNet.MobileNet;\nexports.decodeMultiplePoses = decodeMultiple.decodeMultiplePoses;\nexports.partChannels = keypoints.partChannels;\nexports.partIds = keypoints.partIds;\nexports.partNames = keypoints.partNames;\nexports.poseChain = keypoints.poseChain;\nexports.getAdjacentKeyPoints = util.getAdjacentKeyPoints;\nexports.getBoundingBox = util.getBoundingBox;\nexports.getBoundingBoxPoints = util.getBoundingBoxPoints;\nexports.scaleAndFlipPoses = util.scaleAndFlipPoses;\nexports.scalePose = util.scalePose;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\n\nclass HandDetector {\n constructor(model, inputSize, anchorsAnnotated) {\n this.model = model;\n this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);\n this.anchorsTensor = tf.tensor2d(this.anchors);\n this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);\n this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);\n }\n\n normalizeBoxes(boxes) {\n return tf.tidy(() => {\n const boxOffsets = tf.slice(boxes, [0, 0], [-1, 2]);\n const boxSizes = tf.slice(boxes, [0, 2], [-1, 2]);\n const boxCenterPoints = tf.add(tf.div(boxOffsets, this.inputSizeTensor), this.anchorsTensor);\n const halfBoxSizes = tf.div(boxSizes, this.doubleInputSizeTensor);\n const startPoints = tf.mul(tf.sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n const endPoints = tf.mul(tf.add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n return tf.concat2d([startPoints, endPoints], 1);\n });\n }\n\n normalizeLandmarks(rawPalmLandmarks, index) {\n return tf.tidy(() => {\n const landmarks = tf.add(tf.div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]);\n return tf.mul(landmarks, this.inputSizeTensor);\n });\n }\n\n async getBoxes(input, config) {\n const batched = this.model.predict(input);\n const predictions = batched.squeeze();\n batched.dispose();\n const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());\n const scoresVal = scores.dataSync();\n const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);\n const boxes = this.normalizeBoxes(rawBoxes);\n rawBoxes.dispose();\n const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);\n const filtered = filteredT.arraySync();\n\n scores.dispose();\n filteredT.dispose();\n const hands = [];\n for (const boxIndex of filtered) {\n if (scoresVal[boxIndex] >= config.minConfidence) {\n const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);\n const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);\n const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));\n rawPalmLandmarks.dispose();\n hands.push({ box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex] });\n }\n }\n predictions.dispose();\n boxes.dispose();\n return hands;\n }\n\n async estimateHandBounds(input, config) {\n const inputHeight = input.shape[1];\n const inputWidth = input.shape[2];\n const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));\n const predictions = await this.getBoxes(image, config);\n image.dispose();\n if (!predictions || predictions.length === 0) return null;\n const hands = [];\n for (const prediction of predictions) {\n const boxes = prediction.box.dataSync();\n const startPoint = boxes.slice(0, 2);\n const endPoint = boxes.slice(2, 4);\n const palmLandmarks = prediction.palmLandmarks.arraySync();\n prediction.box.dispose();\n prediction.palmLandmarks.dispose();\n hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));\n }\n return hands;\n }\n}\nexports.HandDetector = HandDetector;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\nimport * as util from './util';\n\nconst PALM_BOX_SHIFT_VECTOR = [0, -0.4];\nconst PALM_BOX_ENLARGE_FACTOR = 3;\nconst HAND_BOX_SHIFT_VECTOR = [0, -0.1]; // move detected hand box by x,y to ease landmark detection\nconst HAND_BOX_ENLARGE_FACTOR = 1.65; // increased from model default 1.65;\nconst PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];\nconst PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;\nconst PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;\n\nclass HandPipeline {\n constructor(boundingBoxDetector, meshDetector, inputSize) {\n this.boxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.inputSize = inputSize;\n this.storedBoxes = [];\n this.skipped = 1000;\n this.detectedHands = 0;\n }\n\n getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {\n const rotatedPalmLandmarks = palmLandmarks.map((coord) => {\n const homogeneousCoordinate = [...coord, 1];\n return util.rotatePoint(homogeneousCoordinate, rotationMatrix);\n });\n const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);\n return box.enlargeBox(box.squarifyBox(box.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), PALM_BOX_ENLARGE_FACTOR);\n }\n\n getBoxForHandLandmarks(landmarks) {\n const boundingBox = this.calculateLandmarksBoundingBox(landmarks);\n const boxAroundHand = box.enlargeBox(box.squarifyBox(box.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const palmLandmarks = [];\n for (let i = 0; i < PALM_LANDMARK_IDS.length; i++) {\n palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));\n }\n boxAroundHand.palmLandmarks = palmLandmarks;\n return boxAroundHand;\n }\n\n transformRawCoords(rawCoords, box2, angle, rotationMatrix) {\n const boxSize = box.getBoxSize(box2);\n const scaleFactor = [boxSize[0] / this.inputSize, boxSize[1] / this.inputSize];\n const coordsScaled = rawCoords.map((coord) => [\n scaleFactor[0] * (coord[0] - this.inputSize / 2),\n scaleFactor[1] * (coord[1] - this.inputSize / 2),\n coord[2],\n ]);\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => {\n const rotated = util.rotatePoint(coord, coordsRotationMatrix);\n return [...rotated, coord[2]];\n });\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...box.getBoxCenter(box2), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => [\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1],\n coord[2],\n ]);\n }\n\n async estimateHands(image, config) {\n this.skipped++;\n let useFreshBox = false;\n\n // run new detector every skipFrames unless we only want box to start with\n let boxes;\n if ((this.skipped > config.skipFrames) || !config.landmarks) {\n boxes = await this.boxDetector.estimateHandBounds(image, config);\n // don't reset on test image\n if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.maxHands) || !config.landmarks)) {\n this.storedBoxes = [];\n this.detectedHands = 0;\n for (const possible of boxes) this.storedBoxes.push(possible);\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n const hands = [];\n // console.log(`skipped: ${this.skipped} max: ${config.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);\n\n // go through working set of boxes\n for (const i in this.storedBoxes) {\n const currentBox = this.storedBoxes[i];\n if (!currentBox) continue;\n if (config.landmarks) {\n const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);\n const palmCenter = box.getBoxCenter(currentBox);\n const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];\n const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);\n const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);\n const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;\n const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);\n const handImage = croppedInput.div(255);\n croppedInput.dispose();\n rotatedImage.dispose();\n const [confidence, keypoints] = await this.meshDetector.predict(handImage);\n handImage.dispose();\n const confidenceValue = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceValue >= config.minConfidence) {\n const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);\n const rawCoords = keypointsReshaped.arraySync();\n keypoints.dispose();\n keypointsReshaped.dispose();\n const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);\n const nextBoundingBox = this.getBoxForHandLandmarks(coords);\n this.storedBoxes[i] = nextBoundingBox;\n const result = {\n landmarks: coords,\n confidence: confidenceValue,\n box: {\n topLeft: nextBoundingBox.startPoint,\n bottomRight: nextBoundingBox.endPoint,\n },\n };\n hands.push(result);\n } else {\n this.storedBoxes[i] = null;\n }\n keypoints.dispose();\n } else {\n const enlarged = box.enlargeBox(box.squarifyBox(box.shiftBox(currentBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const result = {\n confidence: currentBox.confidence,\n box: {\n topLeft: enlarged.startPoint,\n bottomRight: enlarged.endPoint,\n },\n };\n hands.push(result);\n }\n }\n this.storedBoxes = this.storedBoxes.filter((a) => a !== null);\n this.detectedHands = hands.length;\n return hands;\n }\n\n // eslint-disable-next-line class-methods-use-this\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint };\n }\n}\n\nexports.HandPipeline = HandPipeline;\n", "exports.anchors = [\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n];\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html\n\nimport { loadGraphModel } from '../tf.js';\nimport * as handdetector from './handdetector';\nimport * as pipeline from './handpipeline';\nimport * as anchors from './anchors';\n\nconst MESH_ANNOTATIONS = {\n thumb: [1, 2, 3, 4],\n indexFinger: [5, 6, 7, 8],\n middleFinger: [9, 10, 11, 12],\n ringFinger: [13, 14, 15, 16],\n pinky: [17, 18, 19, 20],\n palmBase: [0],\n};\n\nclass HandPose {\n constructor(pipe) {\n this.pipeline = pipe;\n }\n\n static getAnnotations() {\n return MESH_ANNOTATIONS;\n }\n\n async estimateHands(input, config) {\n const predictions = await this.pipeline.estimateHands(input, config);\n if (!predictions) return [];\n const hands = [];\n for (const prediction of predictions) {\n const annotations = {};\n if (prediction.landmarks) {\n for (const key of Object.keys(MESH_ANNOTATIONS)) {\n annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);\n }\n }\n hands.push({\n confidence: prediction.confidence,\n box: prediction.box ? [\n prediction.box.topLeft[0],\n prediction.box.topLeft[1],\n prediction.box.bottomRight[0] - prediction.box.topLeft[0],\n prediction.box.bottomRight[1] - prediction.box.topLeft[1],\n ] : 0,\n landmarks: prediction.landmarks,\n annotations,\n });\n }\n return hands;\n }\n}\nexports.HandPose = HandPose;\n\nasync function load(config) {\n const [handDetectorModel, handPoseModel] = await Promise.all([\n loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),\n ]);\n const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);\n const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);\n const handpose = new HandPose(pipe);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.skeleton.modelPath.match(/\\/(.*)\\./)[1]}`);\n return handpose;\n}\nexports.load = load;\n", "exports.body = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const pose of res) {\n // raising hands\n const leftWrist = pose.keypoints.find((a) => (a.part === 'leftWrist'));\n const rightWrist = pose.keypoints.find((a) => (a.part === 'rightWrist'));\n const nose = pose.keypoints.find((a) => (a.part === 'nose'));\n if (nose && leftWrist && rightWrist && (leftWrist.position.y < nose.position.y) && (rightWrist.position.y < nose.position.y)) gestures.push('i give up');\n else if (nose && leftWrist && (leftWrist.position.y < nose.position.y)) gestures.push('raise left hand');\n else if (nose && rightWrist && (rightWrist.position.y < nose.position.y)) gestures.push('raise right hand');\n\n // leaning\n const leftShoulder = pose.keypoints.find((a) => (a.part === 'leftShoulder'));\n const rightShoulder = pose.keypoints.find((a) => (a.part === 'rightShoulder'));\n if (leftShoulder && rightShoulder) gestures.push(`leaning ${(leftShoulder.position.y > rightShoulder.position.y) ? 'left' : 'right'}`);\n }\n return gestures;\n};\n\nexports.face = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const face of res) {\n // if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {\n // gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);\n // }\n if (face.mesh && face.mesh.length > 0) {\n const eyeFacing = face.mesh[35][2] - face.mesh[263][2];\n if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');\n else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);\n const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openLeft < 0.2) gestures.push('blink left eye');\n const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openRight < 0.2) gestures.push('blink right eye');\n const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));\n if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);\n const chinDepth = face.mesh[152][2];\n if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);\n }\n }\n return gestures;\n};\n\nexports.hand = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const hand of res) {\n const fingers = [];\n for (const [finger, pos] of Object.entries(hand['annotations'])) {\n if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger\n }\n if (fingers && fingers.length > 0) {\n const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));\n const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));\n gestures.push(`${closest.name} forward ${highest.name} up`);\n }\n }\n return gestures;\n};\n", "/* eslint-disable no-use-before-define */\n/*\nWebGLImageFilter - MIT Licensed\n2013, Dominic Szablewski - phoboslab.org\n\n*/\n\nconst WebGLProgram = function (gl, vertexSource, fragmentSource) {\n const _collect = function (source, prefix, collection) {\n const r = new RegExp('\\\\b' + prefix + ' \\\\w+ (\\\\w+)', 'ig');\n source.replace(r, (match, name) => {\n collection[name] = 0;\n return match;\n });\n };\n\n const _compile = function (source, type) {\n const shader = gl.createShader(type);\n gl.shaderSource(shader, source);\n gl.compileShader(shader);\n\n if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {\n throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));\n }\n return shader;\n };\n\n this.uniform = {};\n this.attribute = {};\n\n const _vsh = _compile(vertexSource, gl.VERTEX_SHADER);\n const _fsh = _compile(fragmentSource, gl.FRAGMENT_SHADER);\n\n this.id = gl.createProgram();\n gl.attachShader(this.id, _vsh);\n gl.attachShader(this.id, _fsh);\n gl.linkProgram(this.id);\n\n if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {\n throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));\n }\n\n gl.useProgram(this.id);\n\n // Collect attributes\n _collect(vertexSource, 'attribute', this.attribute);\n for (const a in this.attribute) {\n this.attribute[a] = gl.getAttribLocation(this.id, a);\n }\n\n // Collect uniforms\n _collect(vertexSource, 'uniform', this.uniform);\n _collect(fragmentSource, 'uniform', this.uniform);\n for (const u in this.uniform) {\n this.uniform[u] = gl.getUniformLocation(this.id, u);\n }\n};\n\nconst WebGLImageFilter = function (params) {\n if (!params) params = { };\n let _drawCount = 0;\n let _sourceTexture = null;\n let _lastInChain = false;\n let _currentFramebufferIndex = -1;\n let _tempFramebuffers = [null, null];\n let _filterChain = [];\n let _width = -1;\n let _height = -1;\n let _vertexBuffer = null;\n let _currentProgram = null;\n const _canvas = params.canvas || document.createElement('canvas');\n\n // key is the shader program source, value is the compiled program\n const _shaderProgramCache = { };\n\n const gl = _canvas.getContext('webgl');\n if (!gl) throw new Error('Filter: getContext() failed');\n\n this.addFilter = function (name) {\n // eslint-disable-next-line prefer-rest-params\n const args = Array.prototype.slice.call(arguments, 1);\n const filter = _filter[name];\n\n _filterChain.push({ func: filter, args });\n };\n\n this.reset = function () {\n _filterChain = [];\n };\n\n this.apply = function (image) {\n _resize(image.width, image.height);\n _drawCount = 0;\n\n // Create the texture for the input image if we haven't yet\n if (!_sourceTexture) _sourceTexture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, _sourceTexture);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);\n\n // No filters? Just draw\n if (_filterChain.length === 0) {\n // const program = _compileShader(SHADER.FRAGMENT_IDENTITY);\n _draw();\n return _canvas;\n }\n\n for (let i = 0; i < _filterChain.length; i++) {\n _lastInChain = (i === _filterChain.length - 1);\n const f = _filterChain[i];\n f.func.apply(this, f.args || []);\n }\n\n return _canvas;\n };\n\n const _resize = function (width, height) {\n // Same width/height? Nothing to do here\n if (width === _width && height === _height) { return; }\n\n _canvas.width = width;\n _width = width;\n _canvas.height = height;\n _height = height;\n\n // Create the context if we don't have it yet\n if (!_vertexBuffer) {\n // Create the vertex buffer for the two triangles [x, y, u, v] * 6\n const vertices = new Float32Array([\n -1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0,\n -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0,\n ]);\n // eslint-disable-next-line no-unused-expressions\n (_vertexBuffer = gl.createBuffer(), gl.bindBuffer(gl.ARRAY_BUFFER, _vertexBuffer));\n gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);\n\n // Note sure if this is a good idea; at least it makes texture loading\n // in Ejecta instant.\n gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);\n }\n\n gl.viewport(0, 0, _width, _height);\n\n // Delete old temp framebuffers\n _tempFramebuffers = [null, null];\n };\n\n const _getTempFramebuffer = function (index) {\n _tempFramebuffers[index] = _tempFramebuffers[index]\n || _createFramebufferTexture(_width, _height);\n\n return _tempFramebuffers[index];\n };\n\n const _createFramebufferTexture = function (width, height) {\n const fbo = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);\n\n const renderbuffer = gl.createRenderbuffer();\n gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);\n\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);\n\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n\n return { fbo, texture };\n };\n\n const _draw = function (flags) {\n let source = null;\n let target = null;\n let flipY = false;\n\n // Set up the source\n if (_drawCount === 0) {\n // First draw call - use the source texture\n source = _sourceTexture;\n } else {\n // All following draw calls use the temp buffer last drawn to\n source = _getTempFramebuffer(_currentFramebufferIndex).texture;\n }\n _drawCount++;\n\n // Set up the target\n if (_lastInChain && !(flags & DRAW.INTERMEDIATE)) {\n // Last filter in our chain - draw directly to the WebGL Canvas. We may\n // also have to flip the image vertically now\n target = null;\n flipY = _drawCount % 2 === 0;\n } else {\n // Intermediate draw call - get a temp buffer to draw to\n _currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;\n target = _getTempFramebuffer(_currentFramebufferIndex).fbo;\n }\n\n // Bind the source and target and draw the two triangles\n gl.bindTexture(gl.TEXTURE_2D, source);\n gl.bindFramebuffer(gl.FRAMEBUFFER, target);\n\n gl.uniform1f(_currentProgram.uniform.flipY, (flipY ? -1 : 1));\n gl.drawArrays(gl.TRIANGLES, 0, 6);\n };\n\n const _compileShader = function (fragmentSource) {\n if (_shaderProgramCache[fragmentSource]) {\n _currentProgram = _shaderProgramCache[fragmentSource];\n gl.useProgram(_currentProgram.id);\n return _currentProgram;\n }\n\n // Compile shaders\n _currentProgram = new WebGLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);\n\n const floatSize = Float32Array.BYTES_PER_ELEMENT;\n const vertSize = 4 * floatSize;\n gl.enableVertexAttribArray(_currentProgram.attribute.pos);\n gl.vertexAttribPointer(_currentProgram.attribute.pos, 2, gl.FLOAT, false, vertSize, 0 * floatSize);\n gl.enableVertexAttribArray(_currentProgram.attribute.uv);\n gl.vertexAttribPointer(_currentProgram.attribute.uv, 2, gl.FLOAT, false, vertSize, 2 * floatSize);\n\n _shaderProgramCache[fragmentSource] = _currentProgram;\n return _currentProgram;\n };\n\n let DRAW = { INTERMEDIATE: 1 };\n\n let SHADER = {};\n SHADER.VERTEX_IDENTITY = [\n 'precision highp float;',\n 'attribute vec2 pos;',\n 'attribute vec2 uv;',\n 'varying vec2 vUv;',\n 'uniform float flipY;',\n\n 'void main(void) {',\n 'vUv = uv;',\n 'gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.);',\n '}',\n ].join('\\n');\n\n SHADER.FRAGMENT_IDENTITY = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n\n 'void main(void) {',\n 'gl_FragColor = texture2D(texture, vUv);',\n '}',\n ].join('\\n');\n\n let _filter = {};\n\n // -------------------------------------------------------------------------\n // Color Matrix Filter\n\n _filter.colorMatrix = function (matrix) {\n // Create a Float32 Array and normalize the offset component to 0-1\n const m = new Float32Array(matrix);\n m[4] /= 255;\n m[9] /= 255;\n m[14] /= 255;\n m[19] /= 255;\n\n // Can we ignore the alpha value? Makes things a bit faster.\n const shader = (m[18] === 1 && m[3] === 0 && m[8] === 0 && m[13] === 0 && m[15] === 0 && m[16] === 0 && m[17] === 0 && m[19] === 0)\n ? _filter.colorMatrix.SHADER.WITHOUT_ALPHA\n : _filter.colorMatrix.SHADER.WITH_ALPHA;\n\n const program = _compileShader(shader);\n gl.uniform1fv(program.uniform.m, m);\n _draw();\n };\n\n _filter.colorMatrix.SHADER = {};\n _filter.colorMatrix.SHADER.WITH_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14];',\n 'gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19];',\n '}',\n ].join('\\n');\n _filter.colorMatrix.SHADER.WITHOUT_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14];',\n 'gl_FragColor.a = c.a;',\n '}',\n ].join('\\n');\n\n _filter.brightness = function (brightness) {\n const b = (brightness || 0) + 1;\n _filter.colorMatrix([\n b, 0, 0, 0, 0,\n 0, b, 0, 0, 0,\n 0, 0, b, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.saturation = function (amount) {\n const x = (amount || 0) * 2 / 3 + 1;\n const y = ((x - 1) * -0.5);\n _filter.colorMatrix([\n x, y, y, 0, 0,\n y, x, y, 0, 0,\n y, y, x, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturate = function () {\n _filter.saturation(-1);\n };\n\n _filter.contrast = function (amount) {\n const v = (amount || 0) + 1;\n const o = -128 * (v - 1);\n\n _filter.colorMatrix([\n v, 0, 0, 0, o,\n 0, v, 0, 0, o,\n 0, 0, v, 0, o,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.negative = function () {\n _filter.contrast(-2);\n };\n\n _filter.hue = function (rotation) {\n rotation = (rotation || 0) / 180 * Math.PI;\n const cos = Math.cos(rotation);\n const sin = Math.sin(rotation);\n const lumR = 0.213;\n const lumG = 0.715;\n const lumB = 0.072;\n\n _filter.colorMatrix([\n lumR + cos * (1 - lumR) + sin * (-lumR), lumG + cos * (-lumG) + sin * (-lumG), lumB + cos * (-lumB) + sin * (1 - lumB), 0, 0,\n lumR + cos * (-lumR) + sin * (0.143), lumG + cos * (1 - lumG) + sin * (0.140), lumB + cos * (-lumB) + sin * (-0.283), 0, 0,\n lumR + cos * (-lumR) + sin * (-(1 - lumR)), lumG + cos * (-lumG) + sin * (lumG), lumB + cos * (1 - lumB) + sin * (lumB), 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturateLuminance = function () {\n _filter.colorMatrix([\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.sepia = function () {\n _filter.colorMatrix([\n 0.393, 0.7689999, 0.18899999, 0, 0,\n 0.349, 0.6859999, 0.16799999, 0, 0,\n 0.272, 0.5339999, 0.13099999, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.brownie = function () {\n _filter.colorMatrix([\n 0.5997023498159715, 0.34553243048391263, -0.2708298674538042, 0, 47.43192855600873,\n -0.037703249837783157, 0.8609577587992641, 0.15059552388459913, 0, -36.96841498319127,\n 0.24113635128153335, -0.07441037908422492, 0.44972182064877153, 0, -7.562075277591283,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.vintagePinhole = function () {\n _filter.colorMatrix([\n 0.6279345635605994, 0.3202183420819367, -0.03965408211312453, 0, 9.651285835294123,\n 0.02578397704808868, 0.6441188644374771, 0.03259127616149294, 0, 7.462829176470591,\n 0.0466055556782719, -0.0851232987247891, 0.5241648018700465, 0, 5.159190588235296,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.kodachrome = function () {\n _filter.colorMatrix([\n 1.1285582396593525, -0.3967382283601348, -0.03992559172921793, 0, 63.72958762196502,\n -0.16404339962244616, 1.0835251566291304, -0.05498805115633132, 0, 24.732407896706203,\n -0.16786010706155763, -0.5603416277695248, 1.6014850761964943, 0, 35.62982807460946,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.technicolor = function () {\n _filter.colorMatrix([\n 1.9125277891456083, -0.8545344976951645, -0.09155508482755585, 0, 11.793603434377337,\n -0.3087833385928097, 1.7658908555458428, -0.10601743074722245, 0, -70.35205161461398,\n -0.231103377548616, -0.7501899197440212, 1.847597816108189, 0, 30.950940869491138,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.polaroid = function () {\n _filter.colorMatrix([\n 1.438, -0.062, -0.062, 0, 0,\n -0.122, 1.378, -0.122, 0, 0,\n -0.016, -0.016, 1.483, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.shiftToBGR = function () {\n _filter.colorMatrix([\n 0, 0, 1, 0, 0,\n 0, 1, 0, 0, 0,\n 1, 0, 0, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Convolution Filter\n\n _filter.convolution = function (matrix) {\n const m = new Float32Array(matrix);\n const pixelSizeX = 1 / _width;\n const pixelSizeY = 1 / _height;\n\n const program = _compileShader(_filter.convolution.SHADER);\n gl.uniform1fv(program.uniform.m, m);\n gl.uniform2f(program.uniform.px, pixelSizeX, pixelSizeY);\n _draw();\n };\n\n _filter.convolution.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n 'uniform float m[9];',\n\n 'void main(void) {',\n 'vec4 c11 = texture2D(texture, vUv - px);', // top left\n 'vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y));', // top center\n 'vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y));', // top right\n\n 'vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) );', // mid left\n 'vec4 c22 = texture2D(texture, vUv);', // mid center\n 'vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) );', // mid right\n\n 'vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) );', // bottom left\n 'vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) );', // bottom center\n 'vec4 c33 = texture2D(texture, vUv + px );', // bottom right\n\n 'gl_FragColor = ',\n 'c11 * m[0] + c12 * m[1] + c22 * m[2] +',\n 'c21 * m[3] + c22 * m[4] + c23 * m[5] +',\n 'c31 * m[6] + c32 * m[7] + c33 * m[8];',\n 'gl_FragColor.a = c22.a;',\n '}',\n ].join('\\n');\n\n _filter.detectEdges = function () {\n _filter.convolution.call(this, [\n 0, 1, 0,\n 1, -4, 1,\n 0, 1, 0,\n ]);\n };\n\n _filter.sobelX = function () {\n _filter.convolution.call(this, [\n -1, 0, 1,\n -2, 0, 2,\n -1, 0, 1,\n ]);\n };\n\n _filter.sobelY = function () {\n _filter.convolution.call(this, [\n -1, -2, -1,\n 0, 0, 0,\n 1, 2, 1,\n ]);\n };\n\n _filter.sharpen = function (amount) {\n const a = amount || 1;\n _filter.convolution.call(this, [\n 0, -1 * a, 0,\n -1 * a, 1 + 4 * a, -1 * a,\n 0, -1 * a, 0,\n ]);\n };\n\n _filter.emboss = function (size) {\n const s = size || 1;\n _filter.convolution.call(this, [\n -2 * s, -1 * s, 0,\n -1 * s, 1, 1 * s,\n 0, 1 * s, 2 * s,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Blur Filter\n\n _filter.blur = function (size) {\n const blurSizeX = (size / 7) / _width;\n const blurSizeY = (size / 7) / _height;\n\n const program = _compileShader(_filter.blur.SHADER);\n\n // Vertical\n gl.uniform2f(program.uniform.px, 0, blurSizeY);\n _draw(DRAW.INTERMEDIATE);\n\n // Horizontal\n gl.uniform2f(program.uniform.px, blurSizeX, 0);\n _draw();\n };\n\n _filter.blur.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv )*0.159576912161;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265;',\n '}',\n ].join('\\n');\n\n // -------------------------------------------------------------------------\n // Pixelate Filter\n\n _filter.pixelate = function (size) {\n const blurSizeX = (size) / _width;\n const blurSizeY = (size) / _height;\n\n const program = _compileShader(_filter.pixelate.SHADER);\n\n // Horizontal\n gl.uniform2f(program.uniform.size, blurSizeX, blurSizeY);\n _draw();\n };\n\n _filter.pixelate.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform vec2 size;',\n 'uniform sampler2D texture;',\n\n 'vec2 pixelate(vec2 coord, vec2 size) {',\n 'return floor( coord / size ) * size;',\n '}',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'vec2 coord = pixelate(vUv, size);',\n 'gl_FragColor += texture2D(texture, coord);',\n '}',\n ].join('\\n');\n};\n\nexports.Canvas = WebGLImageFilter;\n", "import { tf } from './tf.js';\nimport * as fxImage from './imagefx.js';\n\n// internal temp canvases\nlet inCanvas = null;\nlet outCanvas = null;\n\n// process input image and return tensor\n// input can be tensor, imagedata, htmlimageelement, htmlvideoelement\n// input is resized and run through imagefx filter\nfunction process(input, config) {\n let tensor;\n if (input instanceof tf.Tensor) {\n tensor = tf.clone(input);\n } else {\n const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));\n const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));\n let targetWidth = originalWidth;\n let targetHeight = originalHeight;\n if (config.filter.width > 0) targetWidth = config.filter.width;\n else if (config.filter.height > 0) targetWidth = originalWidth * (config.filter.height / originalHeight);\n if (config.filter.height > 0) targetHeight = config.filter.height;\n else if (config.filter.width > 0) targetHeight = originalHeight * (config.filter.width / originalWidth);\n if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) {\n inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n if (inCanvas.width !== targetWidth) inCanvas.width = targetWidth;\n if (inCanvas.height !== targetHeight) inCanvas.height = targetHeight;\n }\n const ctx = inCanvas.getContext('2d');\n if (input instanceof ImageData) ctx.putImageData(input, 0, 0);\n else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);\n if (config.filter.enabled) {\n if (!this.fx || !outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) {\n outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');\n if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;\n if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;\n this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')\n }\n this.fx.reset();\n this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled\n if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast);\n if (config.filter.sharpness !== 0) this.fx.addFilter('sharpen', config.filter.sharpness);\n if (config.filter.blur !== 0) this.fx.addFilter('blur', config.filter.blur);\n if (config.filter.saturation !== 0) this.fx.addFilter('saturation', config.filter.saturation);\n if (config.filter.hue !== 0) this.fx.addFilter('hue', config.filter.hue);\n if (config.filter.negative) this.fx.addFilter('negative');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.vintage) this.fx.addFilter('brownie');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.kodachrome) this.fx.addFilter('kodachrome');\n if (config.filter.technicolor) this.fx.addFilter('technicolor');\n if (config.filter.polaroid) this.fx.addFilter('polaroid');\n if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);\n this.fx.apply(inCanvas);\n // read pixel data\n // const gl = outCanvas.getContext('webgl');\n const gl = false;\n if (gl) {\n const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);\n const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);\n gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);\n // gl returns rbga while we only need rgb, so discarding alpha channel\n // gl returns starting point as lower left, so need to invert vertical\n let i = 0;\n for (let y = outCanvas.height - 1; y >= 0; y--) {\n for (let x = 0; x < outCanvas.width; x++) {\n const index = (x + y * outCanvas.width) * 4;\n pixBuffer[i++] = glBuffer[index + 0];\n pixBuffer[i++] = glBuffer[index + 1];\n pixBuffer[i++] = glBuffer[index + 2];\n }\n }\n outCanvas.data = pixBuffer;\n }\n } else {\n outCanvas = inCanvas;\n }\n let pixels;\n if (outCanvas.data) {\n const shape = [outCanvas.height, outCanvas.width, 3];\n pixels = tf.tensor3d(outCanvas.data, shape, 'int32');\n } else if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {\n // tf kernel-optimized method to get imagedata, also if input is imagedata, just use it\n pixels = tf.browser.fromPixels(outCanvas);\n } else {\n // cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas\n const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n tempCanvas.width = targetWidth;\n tempCanvas.height = targetHeight;\n const tempCtx = tempCanvas.getContext('2d');\n tempCtx.drawImage(outCanvas, 0, 0);\n const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);\n pixels = tf.browser.fromPixels(data);\n }\n const casted = pixels.toFloat();\n tensor = casted.expandDims(0);\n pixels.dispose();\n casted.dispose();\n }\n return { tensor, canvas: config.filter.return ? outCanvas : null };\n}\n\nexports.process = process;\n", "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "export const wasmWorkerContents = 'var threadInfoStruct=0;var selfThreadId=0;var parentThreadId=0;var Module={};function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(\" \");console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(\" \");postMessage({cmd:\"alert\",text:text,threadId:selfThreadId})}var err=threadPrintErr;this.alert=threadAlert;Module[\"instantiateWasm\"]=function(info,receiveInstance){var instance=new WebAssembly.Instance(Module[\"wasmModule\"],info);Module[\"wasmModule\"]=null;receiveInstance(instance);return instance.exports};this.onmessage=function(e){try{if(e.data.cmd===\"load\"){Module[\"DYNAMIC_BASE\"]=e.data.DYNAMIC_BASE;Module[\"DYNAMICTOP_PTR\"]=e.data.DYNAMICTOP_PTR;Module[\"wasmModule\"]=e.data.wasmModule;Module[\"wasmMemory\"]=e.data.wasmMemory;Module[\"buffer\"]=Module[\"wasmMemory\"].buffer;Module[\"ENVIRONMENT_IS_PTHREAD\"]=true;if(typeof e.data.urlOrBlob===\"string\"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}Module=WasmBackendModuleThreadedSimd(Module);postMessage({\"cmd\":\"loaded\"})}else if(e.data.cmd===\"objectTransfer\"){Module[\"PThread\"].receiveObjectTransfer(e.data)}else if(e.data.cmd===\"run\"){Module[\"__performance_now_clock_drift\"]=performance.now()-e.data.time;threadInfoStruct=e.data.threadInfoStruct;Module[\"__register_pthread_ptr\"](threadInfoStruct,0,0);selfThreadId=e.data.selfThreadId;parentThreadId=e.data.parentThreadId;var max=e.data.stackBase;var top=e.data.stackBase+e.data.stackSize;Module[\"establishStackSpace\"](top,max);Module[\"_emscripten_tls_init\"]();Module[\"PThread\"].receiveObjectTransfer(e.data);Module[\"PThread\"].setThreadStatus(Module[\"_pthread_self\"](),1);try{var result=Module[\"dynCall_ii\"](e.data.start_routine,e.data.arg);if(!Module[\"getNoExitRuntime\"]())Module[\"PThread\"].threadExit(result)}catch(ex){if(ex===\"Canceled!\"){Module[\"PThread\"].threadCancel()}else if(ex!=\"unwind\"){Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+4>>2,ex instanceof Module[\"ExitStatus\"]?ex.status:-2);Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+0>>2,1);Module[\"_emscripten_futex_wake\"](threadInfoStruct+0,2147483647);if(!(ex instanceof Module[\"ExitStatus\"]))throw ex}}}else if(e.data.cmd===\"cancel\"){if(threadInfoStruct){Module[\"PThread\"].threadCancel()}}else if(e.data.target===\"setimmediate\"){}else if(e.data.cmd===\"processThreadQueue\"){if(threadInfoStruct){Module[\"_emscripten_current_thread_process_queued_calls\"]()}}else{err(\"worker.js received unknown command \"+e.data.cmd);err(e.data)}}catch(ex){err(\"worker.js onmessage() captured an uncaught exception: \"+ex);if(ex.stack)err(ex.stack);throw ex}};if(typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\"){self={location:{href:__filename}};var onmessage=this.onmessage;var nodeWorkerThreads=require(\"worker_threads\");Worker=nodeWorkerThreads.Worker;var parentPort=nodeWorkerThreads.parentPort;parentPort.on(\"message\",function(data){onmessage({data:data})});var nodeFS=require(\"fs\");var nodeRead=function(filename){return nodeFS.readFileSync(filename,\"utf8\")};function globalEval(x){global.require=require;global.Module=Module;eval.call(null,x)}importScripts=function(f){globalEval(nodeRead(f))};postMessage=function(msg){parentPort.postMessage(msg)};if(typeof performance===\"undefined\"){performance={now:function(){return Date.now()}}}}';", null, null, null, null, "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", "export default [\n 127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121,\n 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,\n 151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92,\n 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,\n 157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4,\n 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,\n 181, 85, 84, 17, 206, 203, 36, 148, 171, 140, 92, 40, 39, 193, 189, 244,\n 159, 158, 28, 247, 246, 161, 236, 3, 196, 54, 68, 104, 193, 168, 8, 117,\n 228, 31, 189, 193, 55, 98, 97, 99, 126, 47, 100, 166, 79, 218, 155, 154, 26,\n 209, 49, 131, 135, 136, 150, 47, 126, 217, 223, 52, 53, 45, 51, 134, 211,\n 170, 140, 67, 69, 108, 43, 106, 91, 230, 119, 120, 226, 130, 247, 63, 53,\n 52, 238, 20, 242, 46, 70, 156, 78, 62, 96, 46, 53, 63, 143, 34, 227, 173,\n 155, 133, 123, 117, 111, 44, 125, 19, 236, 134, 51, 216, 206, 205, 154, 153,\n 22, 39, 37, 167, 200, 201, 208, 36, 142, 100, 57, 212, 202, 20, 60, 99, 28,\n 158, 157, 35, 226, 113, 160, 159, 27, 204, 202, 210, 113, 225, 46, 43, 202,\n 204, 62, 76, 77, 137, 123, 116, 41, 38, 72, 203, 129, 142, 64, 98, 240, 49,\n 102, 64, 41, 73, 74, 212, 216, 207, 42, 74, 184, 169, 170, 211, 170, 149,\n 176, 105, 66, 69, 122, 6, 168, 123, 147, 187, 96, 77, 90, 65, 55, 107, 89,\n 90, 180, 101, 100, 120, 63, 105, 104, 93, 137, 227, 15, 86, 85, 129, 102,\n 49, 14, 87, 86, 55, 8, 9, 100, 47, 121, 145, 23, 22, 88, 89, 179, 6, 122,\n 196, 88, 95, 96, 138, 172, 136, 215, 58, 172, 115, 48, 219, 42, 80, 81, 195,\n 3, 51, 43, 146, 61, 171, 175, 199, 81, 82, 38, 53, 46, 225, 144, 163, 110,\n 246, 33, 7, 52, 65, 66, 229, 228, 117, 34, 127, 234, 107, 108, 69, 109, 108,\n 151, 48, 64, 235, 62, 78, 191, 129, 209, 126, 111, 35, 143, 163, 161, 246,\n 117, 123, 50, 222, 65, 52, 19, 125, 141, 221, 55, 65, 3, 195, 197, 25, 7,\n 33, 220, 237, 44, 70, 71, 139, 122, 193, 245, 247, 130, 33, 71, 21, 162,\n 153, 158, 159, 170, 169, 150, 188, 174, 196, 216, 186, 92, 144, 160, 161, 2,\n 97, 167, 141, 125, 241, 164, 167, 37, 72, 38, 12, 145, 159, 160, 38, 82, 13,\n 63, 68, 71, 226, 35, 111, 158, 153, 154, 101, 50, 205, 206, 92, 165, 209,\n 198, 217, 165, 167, 97, 220, 115, 218, 133, 112, 243, 239, 238, 241, 214,\n 135, 169, 190, 173, 133, 171, 208, 32, 125, 44, 237, 86, 87, 178, 85, 86,\n 179, 84, 85, 180, 83, 84, 181, 201, 83, 182, 137, 93, 132, 76, 62, 183, 61,\n 76, 184, 57, 61, 185, 212, 57, 186, 214, 207, 187, 34, 143, 156, 79, 239,\n 237, 123, 137, 177, 44, 1, 4, 201, 194, 32, 64, 102, 129, 213, 215, 138, 59,\n 166, 219, 242, 99, 97, 2, 94, 141, 75, 59, 235, 24, 110, 228, 25, 130, 226,\n 23, 24, 229, 22, 23, 230, 26, 22, 231, 112, 26, 232, 189, 190, 243, 221, 56,\n 190, 28, 56, 221, 27, 28, 222, 29, 27, 223, 30, 29, 224, 247, 30, 225, 238,\n 79, 20, 166, 59, 75, 60, 75, 240, 147, 177, 215, 20, 79, 166, 187, 147, 213,\n 112, 233, 244, 233, 128, 245, 128, 114, 188, 114, 217, 174, 131, 115, 220,\n 217, 198, 236, 198, 131, 134, 177, 132, 58, 143, 35, 124, 110, 163, 7, 228,\n 110, 25, 356, 389, 368, 11, 302, 267, 452, 350, 349, 302, 303, 269, 357,\n 343, 277, 452, 453, 357, 333, 332, 297, 175, 152, 377, 384, 398, 382, 347,\n 348, 330, 303, 304, 270, 9, 336, 337, 278, 279, 360, 418, 262, 431, 304,\n 408, 409, 310, 415, 407, 270, 409, 410, 450, 348, 347, 422, 430, 434, 313,\n 314, 17, 306, 307, 375, 387, 388, 260, 286, 414, 398, 335, 406, 418, 364,\n 367, 416, 423, 358, 327, 251, 284, 298, 281, 5, 4, 373, 374, 253, 307, 320,\n 321, 425, 427, 411, 421, 313, 18, 321, 405, 406, 320, 404, 405, 315, 16, 17,\n 426, 425, 266, 377, 400, 369, 322, 391, 269, 417, 465, 464, 386, 257, 258,\n 466, 260, 388, 456, 399, 419, 284, 332, 333, 417, 285, 8, 346, 340, 261,\n 413, 441, 285, 327, 460, 328, 355, 371, 329, 392, 439, 438, 382, 341, 256,\n 429, 420, 360, 364, 394, 379, 277, 343, 437, 443, 444, 283, 275, 440, 363,\n 431, 262, 369, 297, 338, 337, 273, 375, 321, 450, 451, 349, 446, 342, 467,\n 293, 334, 282, 458, 461, 462, 276, 353, 383, 308, 324, 325, 276, 300, 293,\n 372, 345, 447, 382, 398, 362, 352, 345, 340, 274, 1, 19, 456, 248, 281, 436,\n 427, 425, 381, 256, 252, 269, 391, 393, 200, 199, 428, 266, 330, 329, 287,\n 273, 422, 250, 462, 328, 258, 286, 384, 265, 353, 342, 387, 259, 257, 424,\n 431, 430, 342, 353, 276, 273, 335, 424, 292, 325, 307, 366, 447, 345, 271,\n 303, 302, 423, 266, 371, 294, 455, 460, 279, 278, 294, 271, 272, 304, 432,\n 434, 427, 272, 407, 408, 394, 430, 431, 395, 369, 400, 334, 333, 299, 351,\n 417, 168, 352, 280, 411, 325, 319, 320, 295, 296, 336, 319, 403, 404, 330,\n 348, 349, 293, 298, 333, 323, 454, 447, 15, 16, 315, 358, 429, 279, 14, 15,\n 316, 285, 336, 9, 329, 349, 350, 374, 380, 252, 318, 402, 403, 6, 197, 419,\n 318, 319, 325, 367, 364, 365, 435, 367, 397, 344, 438, 439, 272, 271, 311,\n 195, 5, 281, 273, 287, 291, 396, 428, 199, 311, 271, 268, 283, 444, 445,\n 373, 254, 339, 263, 466, 249, 282, 334, 296, 449, 347, 346, 264, 447, 454,\n 336, 296, 299, 338, 10, 151, 278, 439, 455, 292, 407, 415, 358, 371, 355,\n 340, 345, 372, 390, 249, 466, 346, 347, 280, 442, 443, 282, 19, 94, 370,\n 441, 442, 295, 248, 419, 197, 263, 255, 359, 440, 275, 274, 300, 383, 368,\n 351, 412, 465, 263, 467, 466, 301, 368, 389, 380, 374, 386, 395, 378, 379,\n 412, 351, 419, 436, 426, 322, 373, 390, 388, 2, 164, 393, 370, 462, 461,\n 164, 0, 267, 302, 11, 12, 374, 373, 387, 268, 12, 13, 293, 300, 301, 446,\n 261, 340, 385, 384, 381, 330, 266, 425, 426, 423, 391, 429, 355, 437, 391,\n 327, 326, 440, 457, 438, 341, 382, 362, 459, 457, 461, 434, 430, 394, 414,\n 463, 362, 396, 369, 262, 354, 461, 457, 316, 403, 402, 315, 404, 403, 314,\n 405, 404, 313, 406, 405, 421, 418, 406, 366, 401, 361, 306, 408, 407, 291,\n 409, 408, 287, 410, 409, 432, 436, 410, 434, 416, 411, 264, 368, 383, 309,\n 438, 457, 352, 376, 401, 274, 275, 4, 421, 428, 262, 294, 327, 358, 433,\n 416, 367, 289, 455, 439, 462, 370, 326, 2, 326, 370, 305, 460, 455, 254,\n 449, 448, 255, 261, 446, 253, 450, 449, 252, 451, 450, 256, 452, 451, 341,\n 453, 452, 413, 464, 463, 441, 413, 414, 258, 442, 441, 257, 443, 442, 259,\n 444, 443, 260, 445, 444, 467, 342, 445, 459, 458, 250, 289, 392, 290, 290,\n 328, 460, 376, 433, 435, 250, 290, 392, 411, 416, 433, 341, 463, 464, 453,\n 464, 465, 357, 465, 412, 343, 412, 399, 360, 363, 440, 437, 399, 456, 420,\n 456, 363, 401, 435, 288, 372, 383, 353, 339, 255, 249, 448, 261, 255, 133,\n 243, 190, 133, 155, 112, 33, 246, 247, 33, 130, 25, 398, 384, 286, 362, 398,\n 414, 362, 463, 341, 263, 359, 467, 263, 249, 255, 466, 467, 260, 75, 60,\n 166, 238, 239, 79, 162, 127, 139, 72, 11, 37, 121, 232, 120, 73, 72, 39,\n 114, 128, 47, 233, 232, 128, 103, 104, 67, 152, 175, 148, 173, 157, 155,\n 119, 118, 101, 74, 73, 40, 107, 9, 108, 49, 48, 131, 32, 194, 211, 184, 74,\n 185, 191, 80, 183, 185, 40, 186, 119, 230, 118, 210, 202, 214, 84, 83, 17,\n 77, 76, 146, 161, 160, 30, 190, 56, 173, 182, 106, 194, 138, 135, 192, 129,\n 203, 98, 54, 21, 68, 5, 51, 4, 145, 144, 23, 90, 77, 91, 207, 205, 187, 83,\n 201, 18, 181, 91, 182, 180, 90, 181, 16, 85, 17, 205, 206, 36, 176, 148,\n 140, 165, 92, 39, 245, 193, 244, 27, 159, 28, 30, 247, 161, 174, 236, 196,\n 103, 54, 104, 55, 193, 8, 111, 117, 31, 221, 189, 55, 240, 98, 99, 142, 126,\n 100, 219, 166, 218, 112, 155, 26, 198, 209, 131, 169, 135, 150, 114, 47,\n 217, 224, 223, 53, 220, 45, 134, 32, 211, 140, 109, 67, 108, 146, 43, 91,\n 231, 230, 120, 113, 226, 247, 105, 63, 52, 241, 238, 242, 124, 46, 156, 95,\n 78, 96, 70, 46, 63, 116, 143, 227, 116, 123, 111, 1, 44, 19, 3, 236, 51,\n 207, 216, 205, 26, 154, 22, 165, 39, 167, 199, 200, 208, 101, 36, 100, 43,\n 57, 202, 242, 20, 99, 56, 28, 157, 124, 35, 113, 29, 160, 27, 211, 204, 210,\n 124, 113, 46, 106, 43, 204, 96, 62, 77, 227, 137, 116, 73, 41, 72, 36, 203,\n 142, 235, 64, 240, 48, 49, 64, 42, 41, 74, 214, 212, 207, 183, 42, 184, 210,\n 169, 211, 140, 170, 176, 104, 105, 69, 193, 122, 168, 50, 123, 187, 89, 96,\n 90, 66, 65, 107, 179, 89, 180, 119, 101, 120, 68, 63, 104, 234, 93, 227, 16,\n 15, 85, 209, 129, 49, 15, 14, 86, 107, 55, 9, 120, 100, 121, 153, 145, 22,\n 178, 88, 179, 197, 6, 196, 89, 88, 96, 135, 138, 136, 138, 215, 172, 218,\n 115, 219, 41, 42, 81, 5, 195, 51, 57, 43, 61, 208, 171, 199, 41, 81, 38,\n 224, 53, 225, 24, 144, 110, 105, 52, 66, 118, 229, 117, 227, 34, 234, 66,\n 107, 69, 10, 109, 151, 219, 48, 235, 183, 62, 191, 142, 129, 126, 116, 111,\n 143, 7, 163, 246, 118, 117, 50, 223, 222, 52, 94, 19, 141, 222, 221, 65,\n 196, 3, 197, 45, 220, 44, 156, 70, 139, 188, 122, 245, 139, 71, 162, 145,\n 153, 159, 149, 170, 150, 122, 188, 196, 206, 216, 92, 163, 144, 161, 164, 2,\n 167, 242, 141, 241, 0, 164, 37, 11, 72, 12, 144, 145, 160, 12, 38, 13, 70,\n 63, 71, 31, 226, 111, 157, 158, 154, 36, 101, 205, 203, 206, 165, 126, 209,\n 217, 98, 165, 97, 237, 220, 218, 237, 239, 241, 210, 214, 169, 140, 171, 32,\n 241, 125, 237, 179, 86, 178, 180, 85, 179, 181, 84, 180, 182, 83, 181, 194,\n 201, 182, 177, 137, 132, 184, 76, 183, 185, 61, 184, 186, 57, 185, 216, 212,\n 186, 192, 214, 187, 139, 34, 156, 218, 79, 237, 147, 123, 177, 45, 44, 4,\n 208, 201, 32, 98, 64, 129, 192, 213, 138, 235, 59, 219, 141, 242, 97, 97, 2,\n 141, 240, 75, 235, 229, 24, 228, 31, 25, 226, 230, 23, 229, 231, 22, 230,\n 232, 26, 231, 233, 112, 232, 244, 189, 243, 189, 221, 190, 222, 28, 221,\n 223, 27, 222, 224, 29, 223, 225, 30, 224, 113, 247, 225, 99, 60, 240, 213,\n 147, 215, 60, 20, 166, 192, 187, 213, 243, 112, 244, 244, 233, 245, 245,\n 128, 188, 188, 114, 174, 134, 131, 220, 174, 217, 236, 236, 198, 134, 215,\n 177, 58, 156, 143, 124, 25, 110, 7, 31, 228, 25, 264, 356, 368, 0, 11, 267,\n 451, 452, 349, 267, 302, 269, 350, 357, 277, 350, 452, 357, 299, 333, 297,\n 396, 175, 377, 381, 384, 382, 280, 347, 330, 269, 303, 270, 151, 9, 337,\n 344, 278, 360, 424, 418, 431, 270, 304, 409, 272, 310, 407, 322, 270, 410,\n 449, 450, 347, 432, 422, 434, 18, 313, 17, 291, 306, 375, 259, 387, 260,\n 424, 335, 418, 434, 364, 416, 391, 423, 327, 301, 251, 298, 275, 281, 4,\n 254, 373, 253, 375, 307, 321, 280, 425, 411, 200, 421, 18, 335, 321, 406,\n 321, 320, 405, 314, 315, 17, 423, 426, 266, 396, 377, 369, 270, 322, 269,\n 413, 417, 464, 385, 386, 258, 248, 456, 419, 298, 284, 333, 168, 417, 8,\n 448, 346, 261, 417, 413, 285, 326, 327, 328, 277, 355, 329, 309, 392, 438,\n 381, 382, 256, 279, 429, 360, 365, 364, 379, 355, 277, 437, 282, 443, 283,\n 281, 275, 363, 395, 431, 369, 299, 297, 337, 335, 273, 321, 348, 450, 349,\n 359, 446, 467, 283, 293, 282, 250, 458, 462, 300, 276, 383, 292, 308, 325,\n 283, 276, 293, 264, 372, 447, 346, 352, 340, 354, 274, 19, 363, 456, 281,\n 426, 436, 425, 380, 381, 252, 267, 269, 393, 421, 200, 428, 371, 266, 329,\n 432, 287, 422, 290, 250, 328, 385, 258, 384, 446, 265, 342, 386, 387, 257,\n 422, 424, 430, 445, 342, 276, 422, 273, 424, 306, 292, 307, 352, 366, 345,\n 268, 271, 302, 358, 423, 371, 327, 294, 460, 331, 279, 294, 303, 271, 304,\n 436, 432, 427, 304, 272, 408, 395, 394, 431, 378, 395, 400, 296, 334, 299,\n 6, 351, 168, 376, 352, 411, 307, 325, 320, 285, 295, 336, 320, 319, 404,\n 329, 330, 349, 334, 293, 333, 366, 323, 447, 316, 15, 315, 331, 358, 279,\n 317, 14, 316, 8, 285, 9, 277, 329, 350, 253, 374, 252, 319, 318, 403, 351,\n 6, 419, 324, 318, 325, 397, 367, 365, 288, 435, 397, 278, 344, 439, 310,\n 272, 311, 248, 195, 281, 375, 273, 291, 175, 396, 199, 312, 311, 268, 276,\n 283, 445, 390, 373, 339, 295, 282, 296, 448, 449, 346, 356, 264, 454, 337,\n 336, 299, 337, 338, 151, 294, 278, 455, 308, 292, 415, 429, 358, 355, 265,\n 340, 372, 388, 390, 466, 352, 346, 280, 295, 442, 282, 354, 19, 370, 285,\n 441, 295, 195, 248, 197, 457, 440, 274, 301, 300, 368, 417, 351, 465, 251,\n 301, 389, 385, 380, 386, 394, 395, 379, 399, 412, 419, 410, 436, 322, 387,\n 373, 388, 326, 2, 393, 354, 370, 461, 393, 164, 267, 268, 302, 12, 386, 374,\n 387, 312, 268, 13, 298, 293, 301, 265, 446, 340, 380, 385, 381, 280, 330,\n 425, 322, 426, 391, 420, 429, 437, 393, 391, 326, 344, 440, 438, 458, 459,\n 461, 364, 434, 394, 428, 396, 262, 274, 354, 457, 317, 316, 402, 316, 315,\n 403, 315, 314, 404, 314, 313, 405, 313, 421, 406, 323, 366, 361, 292, 306,\n 407, 306, 291, 408, 291, 287, 409, 287, 432, 410, 427, 434, 411, 372, 264,\n 383, 459, 309, 457, 366, 352, 401, 1, 274, 4, 418, 421, 262, 331, 294, 358,\n 435, 433, 367, 392, 289, 439, 328, 462, 326, 94, 2, 370, 289, 305, 455, 339,\n 254, 448, 359, 255, 446, 254, 253, 449, 253, 252, 450, 252, 256, 451, 256,\n 341, 452, 414, 413, 463, 286, 441, 414, 286, 258, 441, 258, 257, 442, 257,\n 259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305,\n 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,\n 453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360,\n 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tf } from '../tf.js';\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h,\n box.startPoint[0] / w,\n box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n const palmLandmarks = box.palmLandmarks.map((coord) => {\n const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];\n return scaledCoord;\n });\n return { startPoint, endPoint, palmLandmarks, confidence: box.confidence };\n}\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction shiftBox(box, shiftFactor) {\n const boxSize = [\n box.endPoint[0] - box.startPoint[0],\n box.endPoint[1] - box.startPoint[1],\n ];\n const shiftVector = [boxSize[0] * shiftFactor[0], boxSize[1] * shiftFactor[1]];\n const startPoint = [box.startPoint[0] + shiftVector[0], box.startPoint[1] + shiftVector[1]];\n const endPoint = [box.endPoint[0] + shiftVector[0], box.endPoint[1] + shiftVector[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nexport {\n cutBoxFromImageAndResize,\n enlargeBox,\n getBoxCenter,\n getBoxSize,\n scaleBoxCoordinates,\n shiftBox,\n squarifyBox,\n};\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nconst buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexport {\n buildRotationMatrix,\n computeRotation,\n dot,\n getColumnFrom2DArr,\n invertTransformMatrix,\n normalizeRadians,\n rotatePoint,\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/* eslint-disable indent */\n/* eslint-disable no-multi-spaces */\n\nexport default {\n backend: 'webgl', // select tfjs backend to use\n wasmPath: '../assets/', // path for wasm binaries\n // only used for backend: wasm\n console: true, // enable debugging output to console\n async: true, // execute enabled models in parallel\n // this disables per-model performance data but\n // slightly increases performance\n // cannot be used if profiling is enabled\n profile: false, // enable tfjs profiling\n // this has significant performance impact\n // only enable for debugging purposes\n // currently only implemented for age,gender,emotion models\n deallocate: false, // aggresively deallocate gpu memory after each usage\n // only valid for webgl backend and only during first call\n // cannot be changed unless library is reloaded\n // this has significant performance impact\n // only enable on low-memory devices\n scoped: false, // enable scoped runs\n // some models *may* have memory leaks,\n // this wrapps everything in a local scope at a cost of performance\n // typically not needed\n videoOptimized: true, // perform additional optimizations when input is video,\n // must be disabled for images\n // basically this skips object box boundary detection for every n frames\n // while maintaining in-box detection since objects cannot move that fast\n\n filter: {\n enabled: true, // enable image pre-processing filters\n width: 0, // resize input width\n height: 0, // resize input height\n // if both width and height are set to 0, there is no resizing\n // if just one is set, second one is scaled automatically\n // if both are set, values are used as-is\n return: true, // return processed canvas imagedata in result\n brightness: 0, // range: -1 (darken) to 1 (lighten)\n contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)\n sharpness: 0, // range: 0 (no sharpening) to 1 (maximum sharpening)\n blur: 0, // range: 0 (no blur) to N (blur radius in pixels)\n saturation: 0, // range: -1 (reduce saturation) to 1 (increase saturation)\n hue: 0, // range: 0 (no change) to 360 (hue rotation in degrees)\n negative: false, // image negative\n sepia: false, // image sepia colors\n vintage: false, // image vintage colors\n kodachrome: false, // image kodachrome colors\n technicolor: false, // image technicolor colors\n polaroid: false, // image polaroid camera effect\n pixelate: 0, // range: 0 (no pixelate) to N (number of pixels to pixelate)\n },\n\n gesture: {\n enabled: true, // enable simple gesture recognition\n },\n\n face: {\n enabled: true, // controls if specified modul is enabled\n // face.enabled is required for all face models:\n // detector, mesh, iris, age, gender, emotion\n // (note: module is not loaded until it is required)\n detector: {\n modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.\n // 'front' is optimized for large faces\n // such as front-facing camera and\n // 'back' is optimized for distanct faces.\n inputSize: 256, // fixed value: 128 for front and 256 for 'back'\n maxFaces: 10, // maximum number of faces detected in the input\n // should be set to the minimum number for performance\n skipFrames: 15, // how many frames to go without re-running the face bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated face analysis as the head probably hasn't moved much\n // in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in\n // non-maximum suppression (0.1 means drop if overlap 10%)\n scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression,\n // this is applied on detection objects only and before minConfidence\n },\n\n mesh: {\n enabled: true,\n modelPath: '../models/facemesh.json',\n inputSize: 192, // fixed value\n },\n\n iris: {\n enabled: true,\n modelPath: '../models/iris.json',\n inputSize: 64, // fixed value\n },\n\n age: {\n enabled: true,\n modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'\n // which determines training set for model\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n gender: {\n enabled: true,\n minConfidence: 0.1, // threshold for discarding a prediction\n modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n emotion: {\n enabled: true,\n inputSize: 64, // fixed value\n minConfidence: 0.2, // threshold for discarding a prediction\n skipFrames: 15, // how many frames to go without re-running the detector\n modelPath: '../models/emotion-large.json', // can be 'mini', 'large'\n },\n },\n\n body: {\n enabled: true,\n modelPath: '../models/posenet.json',\n inputSize: 257, // fixed value\n maxDetections: 10, // maximum number of people detected in the input\n // should be set to the minimum number for performance\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression\n nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression\n },\n\n hand: {\n enabled: true,\n inputSize: 256, // fixed value\n skipFrames: 15, // how many frames to go without re-running the hand bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated hand skeleton analysis as the hand probably\n // hasn't moved much in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much\n // in non-maximum suppression\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on\n // score in non-maximum suppression\n maxHands: 1, // maximum number of hands detected in the input\n // should be set to the minimum number for performance\n landmarks: true, // detect hand landmarks or just hand boundary box\n detector: {\n modelPath: '../models/handdetect.json',\n },\n skeleton: {\n modelPath: '../models/handskeleton.json',\n },\n },\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "async function drawGesture(result, canvas, ui) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n ctx.font = ui.baseFont;\n ctx.fillStyle = ui.baseLabel;\n let i = 1;\n for (const [key, val] of Object.entries(result)) {\n if (val.length > 0) {\n const label = `${key}: ${val.join(', ')}`;\n ctx.fillText(label, 6, i * (ui.baseLineHeight + 24));\n i += 1;\n }\n }\n}\n\nasync function drawFace(result, canvas, ui, triangulation) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n for (const face of result) {\n ctx.font = ui.baseFont;\n ctx.strokeStyle = ui.baseColor;\n ctx.fillStyle = ui.baseColor;\n ctx.lineWidth = ui.baseLineWidth;\n ctx.beginPath();\n if (ui.drawBoxes) {\n ctx.rect(face.box[0], face.box[1], face.box[2], face.box[3]);\n }\n // silly hack since fillText does not suport new line\n const labels = [];\n // labels.push(`${Math.trunc(100 * face.confidence)}% face`);\n if (face.genderConfidence) labels.push(`${Math.trunc(100 * face.genderConfidence)}% ${face.gender || ''}`);\n if (face.age) labels.push(`age: ${face.age || ''}`);\n if (face.iris) labels.push(`iris: ${face.iris}`);\n if (face.emotion && face.emotion.length > 0) {\n const emotion = face.emotion.map((a) => `${Math.trunc(100 * a.score)}% ${a.emotion}`);\n labels.push(emotion.join(' '));\n }\n ctx.fillStyle = ui.baseLabel;\n for (const i in labels) ctx.fillText(labels[i], face.box[0] + 8, face.box[1] + 24 + ((i + 1) * ui.baseLineHeight));\n ctx.stroke();\n ctx.lineWidth = 1;\n if (face.mesh) {\n if (ui.drawPoints) {\n for (const point of face.mesh) {\n ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor;\n ctx.beginPath();\n ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);\n ctx.fill();\n }\n }\n if (ui.drawPolygons) {\n for (let i = 0; i < triangulation.length / 3; i++) {\n const points = [\n triangulation[i * 3 + 0],\n triangulation[i * 3 + 1],\n triangulation[i * 3 + 2],\n ].map((index) => face.mesh[index]);\n const path = new Path2D();\n path.moveTo(points[0][0], points[0][1]);\n for (const point of points) {\n path.lineTo(point[0], point[1]);\n }\n path.closePath();\n ctx.strokeStyle = ui.useDepth ? `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)` : ui.baseColor;\n ctx.stroke(path);\n if (ui.fillPolygons) {\n ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)` : ui.baseColor;\n ctx.fill(path);\n }\n }\n // iris: array[center, left, top, right, bottom]\n if (face.annotations && face.annotations.leftEyeIris) {\n ctx.strokeStyle = ui.useDepth ? 'rgba(255, 200, 255, 0.3)' : ui.baseColor;\n ctx.beginPath();\n const sizeX = Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]) / 2;\n const sizeY = Math.abs(face.annotations.leftEyeIris[4][1] - face.annotations.leftEyeIris[2][1]) / 2;\n ctx.ellipse(face.annotations.leftEyeIris[0][0], face.annotations.leftEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);\n ctx.stroke();\n if (ui.fillPolygons) {\n ctx.fillStyle = ui.useDepth ? 'rgba(255, 255, 200, 0.3)' : ui.baseColor;\n ctx.fill();\n }\n }\n if (face.annotations && face.annotations.rightEyeIris) {\n ctx.strokeStyle = ui.useDepth ? 'rgba(255, 200, 255, 0.3)' : ui.baseColor;\n ctx.beginPath();\n const sizeX = Math.abs(face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) / 2;\n const sizeY = Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]) / 2;\n ctx.ellipse(face.annotations.rightEyeIris[0][0], face.annotations.rightEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);\n ctx.stroke();\n if (ui.fillPolygons) {\n ctx.fillStyle = ui.useDepth ? 'rgba(255, 255, 200, 0.3)' : ui.baseColor;\n ctx.fill();\n }\n }\n }\n }\n }\n}\n\nasync function drawBody(result, canvas, ui) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n ctx.lineJoin = 'round';\n for (const pose of result) {\n ctx.fillStyle = ui.baseColor;\n ctx.strokeStyle = ui.baseColor;\n ctx.font = ui.baseFont;\n ctx.lineWidth = ui.baseLineWidth;\n if (ui.drawPoints) {\n for (const point of pose.keypoints) {\n ctx.beginPath();\n ctx.arc(point.position.x, point.position.y, 2, 0, 2 * Math.PI);\n ctx.fill();\n }\n }\n if (ui.drawPolygons) {\n const path = new Path2D();\n let part;\n // torso\n part = pose.keypoints.find((a) => a.part === 'leftShoulder');\n path.moveTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightShoulder');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightHip');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'leftHip');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'leftShoulder');\n path.lineTo(part.position.x, part.position.y);\n // legs\n part = pose.keypoints.find((a) => a.part === 'leftHip');\n path.moveTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'leftKnee');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'leftAnkle');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightHip');\n path.moveTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightKnee');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightAnkle');\n path.lineTo(part.position.x, part.position.y);\n // arms\n part = pose.keypoints.find((a) => a.part === 'rightShoulder');\n path.moveTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'leftShoulder');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'leftElbow');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'leftWrist');\n path.lineTo(part.position.x, part.position.y);\n // arms\n part = pose.keypoints.find((a) => a.part === 'leftShoulder');\n path.moveTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightShoulder');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightElbow');\n path.lineTo(part.position.x, part.position.y);\n part = pose.keypoints.find((a) => a.part === 'rightWrist');\n path.lineTo(part.position.x, part.position.y);\n // draw all\n ctx.stroke(path);\n }\n }\n}\n\nasync function drawHand(result, canvas, ui) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n ctx.lineJoin = 'round';\n for (const hand of result) {\n ctx.font = ui.baseFont;\n ctx.lineWidth = ui.baseLineWidth;\n if (ui.drawBoxes) {\n ctx.lineWidth = ui.baseLineWidth;\n ctx.beginPath();\n ctx.strokeStyle = ui.baseColor;\n ctx.fillStyle = ui.baseColor;\n ctx.rect(hand.box[0], hand.box[1], hand.box[2], hand.box[3]);\n ctx.fillStyle = ui.baseLabel;\n ctx.fillText('hand', hand.box[0] + 2, hand.box[1] + 22, hand.box[2]);\n ctx.stroke();\n }\n if (ui.drawPoints) {\n if (hand.landmarks && hand.landmarks.length > 0) {\n for (const point of hand.landmarks) {\n ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor;\n ctx.beginPath();\n ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);\n ctx.fill();\n }\n }\n }\n if (ui.drawPolygons) {\n const addPart = (part) => {\n if (!part) return;\n for (let i = 0; i < part.length; i++) {\n ctx.lineWidth = ui.baseLineWidth;\n ctx.beginPath();\n ctx.strokeStyle = ui.useDepth ? `rgba(${127.5 + (2 * part[i][2])}, ${127.5 - (2 * part[i][2])}, 255, 0.5)` : ui.baseColor;\n ctx.moveTo(part[i > 0 ? i - 1 : 0][0], part[i > 0 ? i - 1 : 0][1]);\n ctx.lineTo(part[i][0], part[i][1]);\n ctx.stroke();\n }\n };\n addPart(hand.annotations.indexFinger);\n addPart(hand.annotations.middleFinger);\n addPart(hand.annotations.ringFinger);\n addPart(hand.annotations.pinky);\n addPart(hand.annotations.thumb);\n // addPart(hand.annotations.palmBase);\n }\n }\n}\n\nconst draw = {\n face: drawFace,\n body: drawBody,\n hand: drawHand,\n gesture: drawGesture,\n};\n\nexport default draw;\n", "let instance = 0;\nlet CSScreated = false;\n\nlet theme = {\n background: 'darkslategray',\n hover: 'lightgray',\n itemBackground: 'black',\n itemColor: 'white',\n buttonBackground: 'lightblue',\n buttonHover: 'lightgreen',\n checkboxOn: 'lightgreen',\n checkboxOff: 'lightcoral',\n rangeBackground: 'lightblue',\n rangeLabel: 'white',\n chartColor: 'lightblue',\n};\n\nfunction createCSS() {\n if (CSScreated) return;\n const css = `\n :root { --rounded: 0.2rem; }\n .menu { position: absolute; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem);\n box-shadow: 0 0 8px dimgrey; background: ${theme.background}; border-radius: var(--rounded); border-color: black; border-style: solid; border-width: thin; }\n\n .menu:hover { box-shadow: 0 0 8px ${theme.hover}; }\n .menu-container { display: block; max-height: 100vh; }\n .menu-container-fadeout { max-height: 0; overflow: hidden; transition: max-height, 0.5s ease; }\n .menu-container-fadein { max-height: 100vh; overflow: hidden; transition: max-height, 0.5s ease; }\n .menu-item { display: flex; white-space: nowrap; padding: 0.2rem; width: max-content; cursor: default; }\n .menu-title { text-align: right; cursor: pointer; }\n .menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }\n .menu-label { padding: 0; font-weight: 800; }\n\n .menu-list { margin-right: 0.8rem; }\n select:focus { outline: none; }\n .menu-list-item { background: ${theme.itemBackground}; color: ${theme.itemColor}; border: none; padding: 0.2rem; font-family: inherit;\n font-variant: inherit; border-radius: var(--rounded); font-weight: 800; }\n\n .menu-chart-title { padding: 0; font-size: 0.8rem; font-weight: 800; align-items: center}\n .menu-chart-canvas { background: transparent; margin: 0.2rem 0 0.2rem 0.6rem; }\n \n .menu-button { border: 0; background: ${theme.buttonBackground}; width: 100%; padding: 8px; margin: 8px 0 8px 0; cursor: pointer; box-shadow: 4px 4px 4px 0 dimgrey;\n border-radius: var(--rounded); justify-content: center; font-family: inherit; font-variant: inherit; font-size: 1rem; font-weight: 800; }\n .menu-button:hover { background: ${theme.buttonHover}; box-shadow: 4px 4px 4px 0 black; }\n .menu-button:focus { outline: none; }\n\n .menu-checkbox { width: 2.8rem; height: 1rem; background: ${theme.itemBackground}; margin: 0.5rem 0.8rem 0 0; position: relative; border-radius: var(--rounded); }\n .menu-checkbox:after { content: 'OFF'; color: ${theme.checkboxOff}; position: absolute; right: 0.2rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }\n .menu-checkbox:before { content: 'ON'; color: ${theme.checkboxOn}; position: absolute; left: 0.3rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }\n .menu-checkbox-label { width: 1.3rem; height: 0.8rem; cursor: pointer; position: absolute; top: 0.1rem; left: 0.1rem; z-index: 1; background: ${theme.checkboxOff};\n border-radius: var(--rounded); transition: left 0.6s ease; }\n\n input[type=checkbox] { visibility: hidden; }\n input[type=checkbox]:checked + label { left: 1.4rem; background: ${theme.checkboxOn}; }\n\n .menu-range { margin: 0 0.8rem 0 0; width: 5rem; background: transparent; color: ${theme.rangeBackground}; }\n .menu-range:before { color: ${theme.rangeLabel}; margin: 0 0.4rem 0 0; font-weight: 800; font-size: 0.6rem; position: relative; top: 0.3rem; content: attr(value); }\n\n input[type=range] { -webkit-appearance: none; }\n input[type=range]::-webkit-slider-runnable-track { width: 100%; height: 1rem; cursor: pointer; background: ${theme.itemBackground}; border-radius: var(--rounded); border: 1px; }\n input[type=range]::-moz-range-track { width: 100%; height: 1rem; cursor: pointer; background: ${theme.itemBackground}; border-radius: var(--rounded); border: 1px; }\n input[type=range]::-webkit-slider-thumb { border: 1px solid #000000; margin-top: 0.05rem; height: 0.9rem; width: 1.5rem; border-radius: var(--rounded); background: ${theme.rangeBackground}; cursor: pointer; -webkit-appearance: none; }\n input[type=range]::-moz-range-thumb { border: 1px solid #000000; margin-top: 0.05rem; height: 0.9rem; width: 1.5rem; border-radius: var(--rounded); background: ${theme.rangeBackground}; cursor: pointer; -webkit-appearance: none; }\n\n .svg-background { fill:darkslategrey; cursor:pointer; opacity: 0.6; }\n .svg-foreground { fill:white; cursor:pointer; opacity: 0.8; }\n `;\n const el = document.createElement('style');\n el.innerHTML = css;\n document.getElementsByTagName('head')[0].appendChild(el);\n CSScreated = true;\n}\n\nclass Menu {\n constructor(parent, title, position, userTheme) {\n if (userTheme) theme = { ...theme, ...userTheme };\n createCSS();\n this.createMenu(parent, title, position);\n this.id = 0;\n this.instance = instance;\n instance++;\n this._maxFPS = 0;\n this.hidden = 0;\n }\n\n createMenu(parent, title = '', position = { top: null, left: null, bottom: null, right: null }) {\n this.menu = document.createElement('div');\n this.menu.id = `menu-${instance}`;\n this.menu.className = 'menu';\n if (position) {\n if (position.top) this.menu.style.top = position.top;\n if (position.bottom) this.menu.style.bottom = position.bottom;\n if (position.left) this.menu.style.left = position.left;\n if (position.right) this.menu.style.right = position.right;\n }\n\n this.container = document.createElement('div');\n this.container.id = `menu-container-${instance}`;\n this.container.className = 'menu-container menu-container-fadein';\n\n // set menu title with pulldown arrow\n const elTitle = document.createElement('div');\n elTitle.className = 'menu-title';\n elTitle.id = `menu-title-${instance}`;\n const svg = `\n \n \n `;\n elTitle.innerHTML = `${title}${svg}`;\n this.menu.appendChild(elTitle);\n elTitle.addEventListener('click', () => {\n this.container.classList.toggle('menu-container-fadeout');\n this.container.classList.toggle('menu-container-fadein');\n this.menu.style.borderStyle = this.container.classList.contains('menu-container-fadeout') ? 'none' : 'solid';\n });\n\n this.menu.appendChild(this.container);\n if (typeof parent === 'object') parent.appendChild(this.menu);\n else document.getElementById(parent).appendChild(this.menu);\n }\n\n get newID() {\n this.id++;\n return `menu-${this.instance}-${this.id}`;\n }\n\n get ID() {\n return `menu-${this.instance}-${this.id}`;\n }\n\n get width() {\n return this.menu.offsetWidth;\n }\n\n get height() {\n return this.menu.offsetHeight;\n }\n\n hide() {\n if (this.container.classList.contains('menu-container-fadein')) {\n this.container.classList.toggle('menu-container-fadeout');\n this.container.classList.toggle('menu-container-fadein');\n }\n }\n\n visible() {\n return (this.container.classList.contains('menu-container-fadein'));\n }\n\n toggle(evt) {\n this.container.classList.toggle('menu-container-fadeout');\n this.container.classList.toggle('menu-container-fadein');\n if (this.container.classList.contains('menu-container-fadein') && evt) {\n const x = evt.x || (evt.touches && evt.touches[0] ? evt.touches[0].pageX : null);\n const y = evt.y || (evt.touches && evt.touches[0] ? evt.touches[0].pageY : null);\n if (x) this.menu.style.left = `${x - 105}px`;\n if (y) this.menu.style.top = '5.5rem'; // `${evt.y + 55}px`;\n if (this.menu.offsetLeft < 0) this.menu.style.left = 0;\n if ((this.menu.offsetLeft + this.menu.offsetWidth) > window.innerWidth) {\n this.menu.style.left = null;\n this.menu.style.right = 0;\n }\n this.menu.style.borderStyle = 'solid';\n } else {\n this.menu.style.borderStyle = 'none';\n }\n }\n\n addTitle(title) {\n const el = document.createElement('div');\n el.className = 'menu-title';\n el.id = this.newID;\n el.innerHTML = title;\n this.menu.appendChild(el);\n el.addEventListener('click', () => {\n this.hidden = !this.hidden;\n const all = document.getElementsByClassName('menu');\n for (const item of all) {\n item.style.display = this.hidden ? 'none' : 'block';\n }\n });\n return el;\n }\n\n addLabel(title) {\n const el = document.createElement('div');\n el.className = 'menu-item menu-label';\n el.id = this.newID;\n el.innerHTML = title;\n this.container.appendChild(el);\n return el;\n }\n\n addBool(title, object, variable, callback) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.innerHTML = `${title}`;\n this.container.appendChild(el);\n el.addEventListener('change', (evt) => {\n object[variable] = evt.target.checked;\n if (callback) callback(evt.target.checked);\n });\n return el;\n }\n\n async addList(title, items, selected, callback) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n let options = '';\n for (const item of items) {\n const def = item === selected ? 'selected' : '';\n options += `${item} `;\n }\n el.innerHTML = `${title}`;\n el.style.fontFamily = document.body.style.fontFamily;\n el.style.fontSize = document.body.style.fontSize;\n el.style.fontVariant = document.body.style.fontVariant;\n this.container.appendChild(el);\n el.addEventListener('change', (evt) => {\n if (callback) callback(items[evt.target.selectedIndex]);\n });\n return el;\n }\n\n addRange(title, object, variable, min, max, step, callback) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.innerHTML = `${title}`;\n this.container.appendChild(el);\n el.addEventListener('change', (evt) => {\n object[variable] = parseInt(evt.target.value) === parseFloat(evt.target.value) ? parseInt(evt.target.value) : parseFloat(evt.target.value);\n evt.target.setAttribute('value', evt.target.value);\n if (callback) callback(evt.target.value);\n });\n el.input = el.children[0];\n return el;\n }\n\n addHTML(html) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.id = this.newID;\n if (html) el.innerHTML = html;\n this.container.appendChild(el);\n return el;\n }\n\n addButton(titleOn, titleOff, callback) {\n const el = document.createElement('button');\n el.className = 'menu-item menu-button';\n el.style.fontFamily = document.body.style.fontFamily;\n el.style.fontSize = document.body.style.fontSize;\n el.style.fontVariant = document.body.style.fontVariant;\n el.type = 'button';\n el.id = this.newID;\n el.innerText = titleOn;\n this.container.appendChild(el);\n el.addEventListener('click', () => {\n if (el.innerText === titleOn) el.innerText = titleOff;\n else el.innerText = titleOn;\n if (callback) callback(el.innerText !== titleOn);\n });\n return el;\n }\n\n addValue(title, val, suffix = '') {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.id = `menu-val-${title}`;\n el.innerText = `${title}: ${val}${suffix}`;\n this.container.appendChild(el);\n return el;\n }\n\n // eslint-disable-next-line class-methods-use-this\n updateValue(title, val, suffix = '') {\n const el = document.getElementById(`menu-val-${title}`);\n if (el) el.innerText = `${title}: ${val}${suffix}`;\n else this.addValue(title, val);\n }\n\n addChart(title, id, width = 200, height = 40, color) {\n if (color) theme.chartColor = color;\n const el = document.createElement('div');\n el.className = 'menu-item menu-chart-title';\n el.id = this.newID;\n el.innerHTML = `${title} `;\n this.container.appendChild(el);\n return el;\n }\n\n // eslint-disable-next-line class-methods-use-this\n async updateChart(id, values) {\n if (!values || (values.length === 0)) return;\n const canvas = document.getElementById(`menu-canvas-${id}`);\n if (!canvas) return;\n const ctx = canvas.getContext('2d');\n ctx.fillStyle = theme.background;\n ctx.fillRect(0, 0, canvas.width, canvas.height);\n const width = canvas.width / values.length;\n const max = 1 + Math.max(...values);\n const height = canvas.height / max;\n for (const i in values) {\n const gradient = ctx.createLinearGradient(0, (max - values[i]) * height, 0, 0);\n gradient.addColorStop(0.1, theme.chartColor);\n gradient.addColorStop(0.4, theme.background);\n ctx.fillStyle = gradient;\n ctx.fillRect(i * width, 0, width - 4, canvas.height);\n ctx.fillStyle = theme.background;\n ctx.font = `${width / 1.5}px \"Segoe UI\"`;\n ctx.fillText(Math.round(values[i]), i * width + 1, canvas.height - 1, width - 1);\n }\n }\n}\n\nexport default Menu;\n", "import Human from '../dist/human.esm.js';\nimport draw from './draw.js';\nimport Menu from './menu.js';\n\nconst userConfig = {}; // add any user configuration overrides\n\nconst human = new Human(userConfig);\n\n// ui options\nconst ui = {\n baseColor: 'rgba(173, 216, 230, 0.3)', // 'lightblue' with light alpha channel\n baseBackground: 'rgba(50, 50, 50, 1)', // 'grey'\n baseLabel: 'rgba(173, 216, 230, 0.9)', // 'lightblue' with dark alpha channel\n baseFontProto: 'small-caps {size} \"Segoe UI\"',\n baseLineWidth: 12,\n baseLineHeightProto: 2,\n crop: true,\n columns: 2,\n busy: false,\n facing: true,\n useWorker: false,\n worker: 'demo/worker.js',\n samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],\n drawBoxes: true,\n drawPoints: false,\n drawPolygons: true,\n fillPolygons: false,\n useDepth: true,\n console: true,\n maxFrames: 10,\n modelsPreload: true,\n modelsWarmup: true,\n menuWidth: 0,\n menuHeight: 0,\n camera: {},\n fps: [],\n};\n\n// global variables\nlet menu;\nlet menuFX;\nlet worker;\nlet timeStamp;\n\n// helper function: translates json to human readable string\nfunction str(...msg) {\n if (!Array.isArray(msg)) return msg;\n let line = '';\n for (const entry of msg) {\n if (typeof entry === 'object') line += JSON.stringify(entry).replace(/{|}|\"|\\[|\\]/g, '').replace(/,/g, ', ');\n else line += entry;\n }\n return line;\n}\n\n// helper function: wrapper around console output\nconst log = (...msg) => {\n // eslint-disable-next-line no-console\n if (ui.console) console.log(...msg);\n};\n\nconst status = (msg) => {\n // eslint-disable-next-line no-console\n document.getElementById('status').innerText = msg;\n};\n\n// draws processed results and starts processing of a next frame\nfunction drawResults(input, result, canvas) {\n // update fps data\n const elapsed = performance.now() - timeStamp;\n ui.fps.push(1000 / elapsed);\n if (ui.fps.length > ui.maxFrames) ui.fps.shift();\n\n // enable for continous performance monitoring\n // console.log(result.performance);\n\n // immediate loop before we even draw results, but limit frame rate to 30\n if (input.srcObject) {\n // eslint-disable-next-line no-use-before-define\n if (elapsed > 33) requestAnimationFrame(() => runHumanDetect(input, canvas));\n // eslint-disable-next-line no-use-before-define\n else setTimeout(() => runHumanDetect(input, canvas), 33 - elapsed);\n }\n // draw fps chart\n menu.updateChart('FPS', ui.fps);\n // draw image from video\n const ctx = canvas.getContext('2d');\n ctx.fillStyle = ui.baseBackground;\n ctx.fillRect(0, 0, canvas.width, canvas.height);\n if (result.canvas) {\n if (result.canvas.width !== canvas.width) canvas.width = result.canvas.width;\n if (result.canvas.height !== canvas.height) canvas.height = result.canvas.height;\n ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, result.canvas.width, result.canvas.height);\n } else {\n ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);\n }\n // draw all results\n draw.face(result.face, canvas, ui, human.facemesh.triangulation);\n draw.body(result.body, canvas, ui);\n draw.hand(result.hand, canvas, ui);\n draw.gesture(result.gesture, canvas, ui);\n // update log\n const engine = human.tf.engine();\n const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';\n const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;\n const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';\n const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b) / ui.fps.length) / 10;\n const warning = (ui.fps.length > 5) && (avg < 5) ? 'warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models ' : '';\n document.getElementById('log').innerHTML = `\n video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing} \n backend: ${human.tf.getBackend()} | ${memory} \n performance: ${str(result.performance)} FPS:${avg} \n ${warning}\n `;\n}\n\n// setup webcam\nasync function setupCamera() {\n if (ui.busy) return null;\n ui.busy = true;\n const video = document.getElementById('video');\n const canvas = document.getElementById('canvas');\n const output = document.getElementById('log');\n const live = video.srcObject ? ((video.srcObject.getVideoTracks()[0].readyState === 'live') && (video.readyState > 2) && (!video.paused)) : false;\n let msg = '';\n status('setting up camera');\n // setup webcam. note that navigator.mediaDevices requires that page is accessed via https\n if (!navigator.mediaDevices) {\n msg = 'camera access not supported';\n output.innerText += `\\n${msg}`;\n log(msg);\n status(msg);\n return null;\n }\n let stream;\n const constraints = {\n audio: false,\n video: {\n facingMode: ui.facing ? 'user' : 'environment',\n resizeMode: ui.crop ? 'crop-and-scale' : 'none',\n width: { ideal: window.innerWidth },\n height: { ideal: window.innerHeight },\n },\n };\n try {\n // if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };\n // else constraints.video.height = { ideal: window.innerHeight };\n stream = await navigator.mediaDevices.getUserMedia(constraints);\n } catch (err) {\n if (err.name === 'PermissionDeniedError') msg = 'camera permission denied';\n else if (err.name === 'SourceUnavailableError') msg = 'camera not available';\n else msg = 'camera error';\n output.innerText += `\\n${msg}`;\n status(msg);\n log(err);\n }\n if (stream) video.srcObject = stream;\n else return null;\n const track = stream.getVideoTracks()[0];\n const settings = track.getSettings();\n // log('camera constraints:', constraints, 'window:', { width: window.innerWidth, height: window.innerHeight }, 'settings:', settings, 'track:', track);\n ui.camera = { name: track.label?.toLowerCase(), width: settings.width, height: settings.height, facing: settings.facingMode === 'user' ? 'front' : 'back' };\n return new Promise((resolve) => {\n video.onloadeddata = async () => {\n video.width = video.videoWidth;\n video.height = video.videoHeight;\n canvas.width = video.width;\n canvas.height = video.height;\n canvas.style.width = canvas.width > canvas.height ? '100vw' : '';\n canvas.style.height = canvas.width > canvas.height ? '' : '100vh';\n ui.menuWidth.input.setAttribute('value', video.width);\n ui.menuHeight.input.setAttribute('value', video.height);\n // silly font resizing for paint-on-canvas since viewport can be zoomed\n const size = 14 + (6 * canvas.width / window.innerWidth);\n ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);\n if (live) video.play();\n ui.busy = false;\n // do once more because onresize events can be delayed or skipped\n // if (video.width > window.innerWidth) await setupCamera();\n status('');\n resolve(video);\n };\n });\n}\n\n// wrapper for worker.postmessage that creates worker if one does not exist\nfunction webWorker(input, image, canvas) {\n if (!worker) {\n // create new webworker and add event handler only once\n log('creating worker thread');\n worker = new Worker(ui.worker, { type: 'module' });\n worker.warned = false;\n // after receiving message from webworker, parse&draw results and send new frame for processing\n worker.addEventListener('message', (msg) => {\n if (!worker.warned) {\n log('warning: cannot transfer canvas from worked thread');\n log('warning: image will not show filter effects');\n worker.warned = true;\n }\n drawResults(input, msg.data.result, canvas);\n });\n }\n // pass image data as arraybuffer to worker by reference to avoid copy\n worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height }, [image.data.buffer]);\n}\n\n// main processing function when input is webcam, can use direct invocation or web worker\nfunction runHumanDetect(input, canvas) {\n timeStamp = performance.now();\n // if live video\n const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);\n if (!live && input.srcObject) {\n // if we want to continue and camera not ready, retry in 0.5sec, else just give up\n if (input.paused) log('camera paused');\n else if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);\n else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);\n return;\n }\n status('');\n if (ui.useWorker) {\n // get image data from video as we cannot send html objects to webworker\n const offscreen = new OffscreenCanvas(canvas.width, canvas.height);\n const ctx = offscreen.getContext('2d');\n ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);\n const data = ctx.getImageData(0, 0, canvas.width, canvas.height);\n // perform detection in worker\n webWorker(input, data, canvas, userConfig);\n } else {\n human.detect(input, userConfig).then((result) => {\n if (result.error) log(result.error);\n else drawResults(input, result, canvas);\n });\n }\n}\n\n// main processing function when input is image, can use direct invocation or web worker\nasync function processImage(input) {\n timeStamp = performance.now();\n return new Promise((resolve) => {\n const image = new Image();\n image.onload = async () => {\n log('Processing image:', image.src);\n const canvas = document.getElementById('canvas');\n image.width = image.naturalWidth;\n image.height = image.naturalHeight;\n canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;\n canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;\n const result = await human.detect(image, userConfig);\n drawResults(image, result, canvas);\n const thumb = document.createElement('canvas');\n thumb.className = 'thumbnail';\n thumb.width = window.innerWidth / (ui.columns + 0.1);\n thumb.height = canvas.height / (window.innerWidth / thumb.width);\n const ctx = thumb.getContext('2d');\n ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height, 0, 0, thumb.width, thumb.height);\n document.getElementById('samples-container').appendChild(thumb);\n image.src = '';\n resolve(true);\n };\n image.src = input;\n });\n}\n\n// just initialize everything and call main function\nasync function detectVideo() {\n human.config.videoOptimized = true;\n document.getElementById('samples-container').style.display = 'none';\n document.getElementById('canvas').style.display = 'block';\n const video = document.getElementById('video');\n const canvas = document.getElementById('canvas');\n ui.baseLineHeight = ui.baseLineHeightProto;\n if ((video.srcObject !== null) && !video.paused) {\n document.getElementById('play').style.display = 'block';\n status('paused');\n video.pause();\n } else {\n await setupCamera();\n document.getElementById('play').style.display = 'none';\n status('');\n video.play();\n }\n runHumanDetect(video, canvas);\n}\n\n// just initialize everything and call main function\nasync function detectSampleImages() {\n document.getElementById('play').style.display = 'none';\n human.config.videoOptimized = false;\n const size = 12 + Math.trunc(12 * ui.columns * window.innerWidth / document.body.clientWidth);\n ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);\n ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;\n document.getElementById('canvas').style.display = 'none';\n document.getElementById('samples-container').style.display = 'block';\n log('Running detection of sample images');\n status('processing images');\n document.getElementById('samples-container').innerHTML = '';\n for (const sample of ui.samples) await processImage(sample);\n status('');\n}\n\nfunction setupMenu() {\n menu = new Menu(document.body, '', { top: '1rem', right: '1rem' });\n const btn = menu.addButton('start video', 'pause video', () => detectVideo());\n menu.addButton('process images', 'process images', () => detectSampleImages());\n document.getElementById('play').addEventListener('click', () => btn.click());\n\n menu.addHTML(' ');\n menu.addList('backend', ['cpu', 'webgl', 'wasm'], human.config.backend, (val) => human.config.backend = val);\n menu.addBool('async operations', human.config, 'async', (val) => human.config.async = val);\n menu.addBool('enable profiler', human.config, 'profile', (val) => human.config.profile = val);\n menu.addBool('memory shield', human.config, 'deallocate', (val) => human.config.deallocate = val);\n menu.addBool('use web worker', ui, 'useWorker');\n menu.addHTML(' ');\n menu.addLabel('enabled models');\n menu.addBool('face detect', human.config.face, 'enabled');\n menu.addBool('face mesh', human.config.face.mesh, 'enabled');\n menu.addBool('face iris', human.config.face.iris, 'enabled');\n menu.addBool('face age', human.config.face.age, 'enabled');\n menu.addBool('face gender', human.config.face.gender, 'enabled');\n menu.addBool('face emotion', human.config.face.emotion, 'enabled');\n menu.addBool('body pose', human.config.body, 'enabled');\n menu.addBool('hand pose', human.config.hand, 'enabled');\n menu.addBool('gesture analysis', human.config.gesture, 'enabled');\n\n menu.addHTML(' ');\n menu.addLabel('model parameters');\n menu.addRange('max objects', human.config.face.detector, 'maxFaces', 1, 50, 1, (val) => {\n human.config.face.detector.maxFaces = parseInt(val);\n human.config.body.maxDetections = parseInt(val);\n human.config.hand.maxHands = parseInt(val);\n });\n menu.addRange('skip frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {\n human.config.face.detector.skipFrames = parseInt(val);\n human.config.face.emotion.skipFrames = parseInt(val);\n human.config.face.age.skipFrames = parseInt(val);\n human.config.hand.skipFrames = parseInt(val);\n });\n menu.addRange('min confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {\n human.config.face.detector.minConfidence = parseFloat(val);\n human.config.face.gender.minConfidence = parseFloat(val);\n human.config.face.emotion.minConfidence = parseFloat(val);\n human.config.hand.minConfidence = parseFloat(val);\n });\n menu.addRange('score threshold', human.config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {\n human.config.face.detector.scoreThreshold = parseFloat(val);\n human.config.hand.scoreThreshold = parseFloat(val);\n human.config.body.scoreThreshold = parseFloat(val);\n });\n menu.addRange('overlap', human.config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {\n human.config.face.detector.iouThreshold = parseFloat(val);\n human.config.hand.iouThreshold = parseFloat(val);\n });\n\n menu.addHTML(' ');\n menu.addChart('FPS', 'FPS');\n\n menuFX = new Menu(document.body, '', { top: '1rem', right: '18rem' });\n menuFX.addLabel('ui options');\n menuFX.addBool('crop & scale', ui, 'crop', () => setupCamera());\n menuFX.addBool('camera front/back', ui, 'facing', () => setupCamera());\n menuFX.addBool('use 3D depth', ui, 'useDepth');\n menuFX.addBool('draw boxes', ui, 'drawBoxes');\n menuFX.addBool('draw polygons', ui, 'drawPolygons');\n menuFX.addBool('Fill Polygons', ui, 'fillPolygons');\n menuFX.addBool('draw points', ui, 'drawPoints');\n menuFX.addHTML(' ');\n menuFX.addLabel('image processing');\n menuFX.addBool('enabled', human.config.filter, 'enabled');\n ui.menuWidth = menuFX.addRange('image width', human.config.filter, 'width', 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));\n ui.menuHeight = menuFX.addRange('image height', human.config.filter, 'height', 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));\n menuFX.addRange('brightness', human.config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => human.config.filter.brightness = parseFloat(val));\n menuFX.addRange('contrast', human.config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => human.config.filter.contrast = parseFloat(val));\n menuFX.addRange('sharpness', human.config.filter, 'sharpness', 0, 1.0, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));\n menuFX.addRange('blur', human.config.filter, 'blur', 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));\n menuFX.addRange('saturation', human.config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => human.config.filter.saturation = parseFloat(val));\n menuFX.addRange('hue', human.config.filter, 'hue', 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));\n menuFX.addRange('pixelate', human.config.filter, 'pixelate', 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));\n menuFX.addBool('negative', human.config.filter, 'negative');\n menuFX.addBool('sepia', human.config.filter, 'sepia');\n menuFX.addBool('vintage', human.config.filter, 'vintage');\n menuFX.addBool('kodachrome', human.config.filter, 'kodachrome');\n menuFX.addBool('technicolor', human.config.filter, 'technicolor');\n menuFX.addBool('polaroid', human.config.filter, 'polaroid');\n}\n\nasync function main() {\n log('Human: demo starting ...');\n setupMenu();\n document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;\n human.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n // this is not required, just pre-loads all models\n if (ui.modelsPreload) {\n status('loading');\n await human.load(userConfig);\n }\n // this is not required, just pre-warms all models for faster initial inference\n if (ui.modelsWarmup) {\n status('initializing');\n await human.warmup(userConfig);\n }\n status('human: ready');\n document.getElementById('loader').style.display = 'none';\n document.getElementById('play').style.display = 'block';\n}\n\nwindow.onload = main;\nwindow.onresize = setupCamera;\n"],
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,sBAAA,WAAA;AAAA;AAGA,mBAAgB;AAIf,QAAI,OAAO,SAAS;AAAe,aAAO;;AAC1C,QAAI,OAAO,WAAW;AAAe,aAAO;;AAC5C,QAAI,OAAO,YAAW;AAAe,aAAO;;AAC5C,UAAM,IAAI,MAAM;;AAGjB,gBAAa;AAEb,SAAO,UAAU,UAAU,QAAO;AAGlC,MAAI,QAAO;AACV,YAAQ,UAAU,QAAO,MAAM,KAAK;;AAGrC,UAAQ,UAAU,QAAO;AACzB,UAAQ,UAAU,QAAO;AACzB,UAAQ,WAAW,QAAO;;ACxB1B,0BAAA,WAAA;AACA,gBAAa;AACb,gBAAa,QAAO;AAGpB;AACE,oBAAgB;AACd,UAAI,OAAO,IAAI;;;AAGnB,MAAI,QAAO,QAAQ,QAAO,SAAS,QAAO,eAAe,QAAO;AAC9D,WAAO,UAAU;;AAGjB,cAAU,SAAQ;AAClB,YAAQ,SAAS;;AAGnB;AACE,WAAO,QAAO,KAAK,kBAAkB;;AAIvC,YAAU,SAAQ;AAElB,aAAW,OAAO;AAChB,QAAI,OAAO,QAAQ;AACjB,YAAM,IAAI,UAAU;;AAEtB,WAAO,QAAO,KAAK,kBAAkB;;AAGvC,aAAW,QAAQ;AACjB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;;AAEtB,cAAU,QAAO;AACjB,QAAI,UAAS;AACX,UAAI,OAAO,aAAa;AACtB,YAAI,KAAK,OAAM;;AAEf,YAAI,KAAK;;;AAGX,UAAI,KAAK;;AAEX,WAAO;;AAGT,aAAW,cAAc;AACvB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;;AAEtB,WAAO,QAAO;;AAGhB,aAAW,kBAAkB;AAC3B,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;;AAEtB,WAAO,QAAO,WAAW;;;AC5D3B,6BAAA,WAAA;AAqBA;AAIA,gBAAa,sBAAuB;AAGpC,mBAAiB,QAAO,cAAc;AACpC,eAAW,KAAK;AAChB,YAAQ,YAAY,SAAS;WACtB;WAAW;WAAY;WAAa;WAAa;WAAc;WAAc;WAAY;WAAa;WAAe;WAAgB;AACxI,eAAO;;AAEP,eAAO;;;AAIb;AACE,QAAI,CAAC;AAAK,aAAO;AACjB;AACA,WAAO;AACL,cAAQ;aACD;aACA;AACH,iBAAO;aACJ;aACA;aACA;aACA;AACH,iBAAO;aACJ;aACA;AACH,iBAAO;aACJ;aACA;aACA;AACH,iBAAO;;AAEP,cAAI;AAAS;AACb,gBAAO,MAAK,KAAK;AACjB,oBAAU;;;;AAOlB;AACE,eAAW,mBAAmB;AAC9B,QAAI,OAAO,SAAS,YAAa,SAAO,eAAe,cAAc,CAAC,WAAW;AAAO,YAAM,IAAI,MAAM,uBAAuB;AAC/H,WAAO,QAAQ;;AAMjB,UAAQ,gBAAgB;AACxB;AACE,SAAK,WAAW,kBAAkB;AAClC;AACA,YAAQ,KAAK;WACN;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;WACG;AACH,aAAK,WAAW;AAChB,aAAK;AACL;WACG;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;;AAEA,aAAK,QAAQ;AACb,aAAK,MAAM;AACX;;AAEJ,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,WAAW,QAAO,YAAY;;AAGrC,gBAAc,UAAU,QAAQ;AAC9B,QAAI,IAAI,WAAW;AAAG,aAAO;AAC7B;AACA;AACA,QAAI,KAAK;AACP,UAAI,KAAK,SAAS;AAClB,UAAI,MAAM;AAAW,eAAO;AAC5B,UAAI,KAAK;AACT,WAAK,WAAW;;AAEhB,UAAI;;AAEN,QAAI,IAAI,IAAI;AAAQ,aAAO,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AACtE,WAAO,KAAK;;AAGd,gBAAc,UAAU,MAAM;AAG9B,gBAAc,UAAU,OAAO;AAG/B,gBAAc,UAAU,WAAW;AACjC,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,KAAK;AAChE,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;;AAEvD,QAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,IAAI;AAC/D,SAAK,YAAY,IAAI;;AAKvB;AACE,QAAI,QAAQ;AAAM,aAAO;aAAW,QAAQ,MAAM;AAAM,aAAO;aAAW,QAAQ,MAAM;AAAM,aAAO;aAAW,QAAQ,MAAM;AAAM,aAAO;AAC3I,WAAO,QAAQ,MAAM,IAAO,KAAK;;AAMnC;AACE,YAAQ,IAAI,SAAS;AACrB,QAAI,IAAI;AAAG,aAAO;AAClB,aAAS,cAAc,IAAI;AAC3B,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AACP,YAAI,OAAO;AAAG,eAAK;;AAAO,gBAAK,WAAW,KAAK;;AAEjD,aAAO;;AAET,WAAO;;AAWT;AACE,QAAK,KAAI,KAAK,SAAU;AACtB,YAAK,WAAW;AAChB,aAAO;;AAET,QAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,UAAK,KAAI,KAAK,SAAU;AACtB,cAAK,WAAW;AAChB,eAAO;;AAET,UAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,YAAK,KAAI,KAAK,SAAU;AACtB,gBAAK,WAAW;AAChB,iBAAO;;;;;AAOf;AACE,YAAQ,KAAK,YAAY,KAAK;AAC9B,YAAQ,oBAAoB,MAAM,KAAK;AACvC,QAAI,MAAM;AAAW,aAAO;AAC5B,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,GAAG,GAAG,KAAK;AACnC,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;;AAEvD,QAAI,KAAK,KAAK,UAAU,GAAG,GAAG,IAAI;AAClC,SAAK,YAAY,IAAI;;AAMvB;AACE,gBAAY,oBAAoB,MAAM,KAAK;AAC3C,QAAI,CAAC,KAAK;AAAU,aAAO,IAAI,SAAS,QAAQ;AAChD,SAAK,YAAY;AACjB,cAAU,IAAI,SAAU,SAAQ,KAAK;AACrC,QAAI,KAAK,KAAK,UAAU,GAAG;AAC3B,WAAO,IAAI,SAAS,QAAQ,GAAG;;AAKjC;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI;AAC9B,WAAO;;AAOT;AACE,QAAK,KAAI,SAAS,KAAK,MAAM;AAC3B,cAAQ,IAAI,SAAS,WAAW;AAChC,UAAI;AACF,gBAAQ,EAAE,WAAW,EAAE,SAAS;AAChC,YAAI,KAAK,SAAU,KAAK;AACtB,eAAK,WAAW;AAChB,eAAK,YAAY;AACjB,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,iBAAO,EAAE,MAAM,GAAG;;;AAGtB,aAAO;;AAET,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAO,IAAI,SAAS,WAAW,GAAG,IAAI,SAAS;;AAKjD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AACP,gBAAU,KAAK,YAAY,KAAK;AAChC,aAAO,IAAI,KAAK,SAAS,SAAS,WAAW,GAAG;;AAElD,WAAO;;AAGT;AACE,YAAS,KAAI,SAAS,KAAK;AAC3B,QAAI,MAAM;AAAG,aAAO,IAAI,SAAS,UAAU;AAC3C,SAAK,WAAW,IAAI;AACpB,SAAK,YAAY;AACjB,QAAI,MAAM;AACR,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;;AAEpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;;AAEtC,WAAO,IAAI,SAAS,UAAU,GAAG,IAAI,SAAS;;AAGhD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI,KAAK,SAAS,SAAS,UAAU,GAAG,IAAI,KAAK;AAC3E,WAAO;;AAIT;AACE,WAAO,IAAI,SAAS,KAAK;;AAG3B;AACE,WAAO,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;;;;;;;;;;;;;;;;;;;;;;;;ACtS/C;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,6BAAwB;;MAG3B;AACI,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,aAAK,OAAO,IAAI;AAChB,aAAK,eAAe;;MAExB;AACI,YAAI,CAAC,KAAK,KAAK,IAAI;AACf,eAAK,UAAU,SAAS,KAAK,SAAS;;AAE1C,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,aAAK,KAAK,IAAI,QAAQ;;MAE1B;AACI,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,eAAO,KAAK,KAAK,OAAO;;MAE5B;AACI,eAAO,KAAK;;;;MAUhB;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,KAAK,qBAAqB,KAAK,mBAAkB;;MAE5D;AACI,eAAO,mBAAkB;;MAE7B,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;;AAGjC;AACI,YAAM,IAAI,MAAM,IAAI;;ACvfxB;;;;;;;;;;;;;;;;AA8BO;AACH,oBAAc,OAAM;AACpB,iBAAW;AACX,mBAAY;AAEZ,aAAO,UAAU;AAEb,iBAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,eAAO,OAAM;AACb,eAAM,WAAW,OAAM;AACvB,eAAM,UAAS;;;AAIhB;AACH,aAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAE9B;AACH,aAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAEhC;AACH,iBAAU;AACV,mBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAO,IAAI;;AAEf,aAAO;;AASJ;AACH,gBAAU,KAAK;AACf,aAAQ,IAAI,IAAM,KAAI,KAAK;;AAGxB;AACH,mBAAa;AACb,mBAAa,GAAG,IAAI,EAAE,QAAQ;AAC1B,qBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,kBAAU,OAAO;;AAErB,aAAO;;AAiBJ;AACH,UAAI,CAAC;AACD,cAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAGjD,qEAAgE;AACnE,cAAO,aAAY,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAErF;AACH,cAAO,KAAK,MAAM,MAAM;;AAqBrB,oCAA+B,qBAAqB;AACvD,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,MAAM,QAAQ,QAAQ,cAAa,QAAQ,CAAC;AAC5C,qBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,mBAAQ,IAAI,IAAI,QAAQ;;;AAI5B,eAAO,KAAK;;AAEhB,aAAO;;AAaJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO;;AAEX,iBAAW,MAAM;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,gBAAQ,MAAM;;AAElB,aAAO;;AAEJ;AACH,aAAO,MAAM,WAAW;;AAErB;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO;;AAEX,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,aAAO,IAAI,MAAM;;AAEd;AAEH,UAAI,KAAK,QAAQ;AAEb,eAAO,KAAK,KAAK;;AAErB,UAAI,MAAM;AACN,eAAO;iBAEF,MAAM;AACX,eAAO;;AAGP,oBAAY,KAAK,IAAI,IAAI;AACzB,eAAQ,OAAM,KAAM,OAAM;;;AAG3B;AACH,oBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,aAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAc7B;AACH,8BAAwB,IAAI,YAAY;AACxC,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,wBAAgB,KAAK;;AAEzB,eAAQ;AACR,aAAO;;AAEJ;AACH,UAAI,QAAQ,EAAE;AACV,eAAO;;AAEX,aAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAE5B,6CAAwC,aAAa;AACxD,aAAO,IAAI,QAAQ;AACf,uBAAe;AACf,sBAAc;AACV,cAAI;AACA;AACA;;AAEJ;AACA,8BAAoB,QAAQ;AAC5B,cAAI,cAAc,QAAQ,YAAY;AAClC;AACA;;AAEJ,qBAAW,OAAO;;AAEtB;;;AAYD;AACH,sBAAgB;AAChB,wBAAkB;AAClB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,MAAM,MAAM;AACZ,uBAAa,MAAM;mBAEd,MAAM,OAAO;AAClB,cAAI,gBAAgB;AAChB,kBAAM,MAAM,yDACW,uBAAuB;;AAElD,wBAAc;mBAET,MAAM,KAAK;AAChB,gBAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAGvE,UAAI,gBAAgB;AAChB,YAAI,OAAO,KAAK,SAAS;AACrB,gBAAM,MAAM,QAAQ,yCAAyC;;AAEjE,eAAO;;AAEX,UAAI,cAAc;AACd,cAAM,MAAM,qCAAqC;;AAGrD,UAAI,OAAO,cAAc;AACrB,cAAM,MAAM,wDACD,UAAU;;AAEzB,uBAAiB,MAAM;AACvB,eAAS,eAAe,OAAO;AAC/B,aAAO;;AAEJ;AACH,mBAAa,MAAM;AAEnB,aAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAEzD,cAAO,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OAAO,MAAM,+CAA+C,SAAS,sBACjG;AAEhB,cAAO,KAAK,MAAM,QAAM,OAAM,MAAM,MAAM,0DAC1B;AAEhB,aAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAGrC;AACH,uBAAiB;AACjB,uBAAiB;AACjB,2BAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,mBAAc,QAAQ,QAAQ,eAC1B,OACA,gBAAe,MAAM,OAAO;AAChC,cAAQ;AACR,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,QAAQ;AACR,cAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAC9B,kBAAM,IAAI,MAAM,sBAAsB,oBAAoB,MAAM;;AAEpE,cAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACjD,qBAAS,KAAK,MAAM;AACpB,qBAAS,KAAK;;AAElB,cAAI,KAAK,MAAM;AACX;;;AAGR,YAAI,MAAM,OAAO;AACb,mBAAS,KAAK,MAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,UAAU;;AAEhB;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,MAAM;;AAGnB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AACxB,gBAAM,MAAM,oBAAoB,iCAAiC;;;;AAKtE;AACH,aAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAMhC;AACH,UAAI,YAAY;AACZ,eAAO;;AAEX,UAAI,YAAY,aAAa,YAAY;AACrC,eAAO;;AAEX,UAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC5D,eAAO;;AAEX,UAAI,YAAY,UAAU,YAAY;AAClC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAEd;AACH,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU;AACf,eAAO;iBAEF,UAAU;AACf,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;AASlC;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,kBAAY;AACZ,UAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,aAAO;;AAGJ;AACH,aAAO,OAAO,UAAU,YAAY,iBAAiB;;AAElD;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,UAAI,MAAM,QAAQ;AACd,eAAO,YAAW,OAAO;;AAE7B,UAAI,kBAAkB;AAClB,eAAO;iBAEF,kBAAkB,cAAc,kBAAkB;AACvD,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,WAAU;AACf,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAEzC;AACH,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,YAAI,OAAO,MAAM;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,MAAM;AACnB,UAAI,OAAO;AACP,eAAO;;AAIX,sBAAgB,IAAI,MAAM,OAAO;AACjC,cAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,mBAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC7B,gBAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE5C,aAAO;;AAEX;AACI,kBAAY,IAAI;AAChB,UAAI,MAAM,WAAW;AACjB,kBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,EAAE,SAAS;;;AAIxB,kBAAU,MAAM;AAChB,qBAAa,MAAM,MAAM;AACzB,oBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,mBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAG3D,aAAO;;AAGJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO,EAAE;;AAEb,mBAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,UAAI,SAAS;AAET,eAAO;;AAEX,UAAI,SAAS,EAAE;AACX,cAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAElE,aAAO,mBAAkB,GAAG,OAAO;;AAEhC;AACH,qBAAc,qBAAoB,MAAM;AACxC,mBAAa,GAAG,IAAI,OAAM,QAAQ;AAC9B,eAAM,KAAK;;AAEf,aAAO;;AAEJ;AACH,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,eAAO,IAAI,WAAW;;AAGtB,cAAM,IAAI,MAAM,qBAAqB;;;AAQtC;AACH,mBAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,UAAI,SAAS,QAAQ,UAAU;AAC3B,eAAO,eAAc,OAAO,IAAI,aAAa;iBAExC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;iBAEtC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;;AAG3C,cAAM,IAAI,MAAM,qBAAqB;;;AAGtC;AACH,YAAM,QAAQ;AACV,gBAAO,OAAO,UAAU,YAAY,WAAW,GAAG,MAAM,0EAC1C;;;AAWf;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,KAAK;;AAEhB,mBAAY,KAAK,KAAK,SAAS;AAC/B,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,kBAAS,QAAQ,KAAK,KAAK;;AAE/B,aAAO;;AAUJ;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,CAAC;;AAEZ,mBAAa,IAAI,MAAM;AACvB,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,aAAK,KAAK,KAAK,MAAM,SAAQ,QAAQ;AACrC,kBAAS,KAAK,KAAK,QAAQ;;AAE/B,WAAK,KAAK,SAAS,KAAK;AACxB,aAAO;;AAOJ;AAOH,aAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;ACrnB3D;;;;;;;;;;;;;;;;AAkBA,uCAAkC;;MAU9B;AACI,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,KAAK,YAAY;AACjB,kBAAQ,KAAK,YAAY,KAAK,oEACO;;AAEzC,aAAK,eAAe;AACpB,aAAK,WAAW;;MAEpB;AACI,aAAK,aAAa,YAAY,CAAE,cAAc;AAG9C,YAAI,KAAK,SAAS,aAAa;AAC3B,4BAAkB,KAAK,SAAS;AAChC,kBAAQ,KAAK,qCAAqC,aAAa;AAC/D,eAAK,IAAI,UAAU;;;YAGrB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,aAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,eAAO,KAAK,MAAM;;MAEtB;AACI,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,0BAAkB,KAAK,aAAa;AACpC,YAAI,WAAU;AACV,gBAAM,IAAI,MAAM,QAAQ;;AAG5B,aAAK,MAAM,YAAY;AACvB,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK;;UAGZ;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,mBAAmB;;AAEvC,aAAK,MAAM,YAAY;AACvB,YAAI,KAAK,aAAa,UAAU,WAAW;AACvC,eAAK,aAAa,UAAU,QAAQ;;;MAG5C;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,yBAAyB;;AAE7C,eAAO,KAAK,aAAa,UAAU;;MAEvC;AACI,aAAK,QAAQ,OAAO,OAAO,IAAI;;MAEnC;AACI,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACvC;;AAEJ,0BAAkB,gBAAe,KAAK,OAAO,SAAS;AACtD,YAAI,8BAA6B;AAC7B,4BAAkB,UAAU,4BAA2B,MAAM;AAC7D,oBAAU,QAAQ;AACd,iCAAqB,SAAS,MAAM;AACpC,iBAAK,SAAS,OAAO,YAAW,KAAK;;;;;AAK9C;AACH,qBAAe;AACf,kBAAY,QAAQ,+BAA+B;AAC/C,qBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,eAAO,EAAE,KAAK;;AAElB,aAAO;;AAEX;AACI,aAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAEnE;AACI,cAAQ,MAAM;AACd,UAAI,UAAU,UAAU,UAAU;AAC9B,eAAO,UAAU;iBAEZ,GAAG,CAAC,YAAY;AACrB,eAAO,CAAC;;AAEZ,YAAM,IAAI,MAAM,oCAAoC,kBAAkB;;AAUnE;AACH,aAAO,SAAA;;AAEA,aAAA,MAAM;AACV;AACH,eAAA,MAAM;;AC9JV;;;;;;;;;;;;;;;;AAoBA;AAEO;AACH,UAAI,oBAAmB;AAEnB;AACA,YAAI,OAAQ,WAAY;AACpB,eAAK;mBAEA,OAAQ,WAAY;AACzB,eAAK;mBAEA,OAAQ,YAAa;AAC1B,eAAK;mBAEA,OAAQ,SAAU;AACvB,eAAK;;AAGL,gBAAM,IAAI,MAAM;;AAEpB,2BAAkB;;AAEtB,aAAO;;AAGX;AACI,iBAAW;AACX,UAAI,GAAG,cAAc;AACjB,WAAG,aAAa,IAAI;;AAExB,aAAO,GAAG;;AASP;AACH,wBAAkB;AAClB,UAAI,UAAU,IAAI;AACd,eAAO,UAAU,IAAI;;AAGrB,0BAAkB;AAClB,kBAAU,IAAI,KAAK;AACnB,eAAO,UAAU,IAAI;;;ACpEjB,iBAAO;AACP,iBAAQ;AACR,kBAAS;AACT,iBAAO;AACP,kBAAQ;AACR,gBAAO;AACP,gBAAO;AACP,oBAAU;AACV,mBAAU;AACV,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,kBAAS;AACT,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,yBAAe;AACf,2BAAkB;AAClB,wBAAe;AACf,kBAAQ;AACR,iBAAQ;AACR,yBAAe;AACf,qBAAW;AACX,oBAAU;AACV,oBAAU;AACV,iCAAwB;AACxB,iCAAuB;AACvB,mBAAU;AACV,mCAA0B;AAC1B,kCAAyB;AACzB,iBAAO;AACP,iBAAQ;AACR,oBAAU;AACV,2BAAiB;AACjB,0BAAgB;AAChB,mCAAyB;AACzB,gDAAuC;AACvC,+CAAsC;AACtC,iBAAQ;AACR,uBAAc;AACd,oCAA2B;AAC3B,qCAA4B;AAC5B,iBAAO;AACP,iBAAO;AACP,oBAAW;AACX,gBAAO;AACP,mBAAS;AACT,iBAAO;AACP,kBAAS;AACT,gBAAO;AACP,kBAAQ;AACR,2BAAiB;AACjB,kBAAS;AACT,sBAAY;AACZ,4BAAkB;AAClB,sBAAY;AACZ,sBAAY;AACZ,qBAAW;AACX,0BAAgB;AAChB,sBAAY;AACZ,iBAAQ;AACR,iBAAQ;AACR,qBAAY;AACZ,kBAAS;AACT,kBAAS;AACT,kBAAQ;AACR,uBAAa;AACb,qBAAY;AACZ,iBAAO;AACP,kBAAS;AACT,wBAAc;AACd,uBAAc;AACd,sBAAa;AACb,uBAAc;AACd,gBAAO;AACP,wBAAe;AACf,iBAAO;AACP,qBAAW;AACX,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,8BAAqB;AACrB,iBAAQ;AACR,iBAAO;AACP,qBAAW;AACX,sBAAa;AACb,gBAAO;AACP,sBAAY;AACZ,oBAAU;AACV,sBAAY;AACZ,iCAAuB;AACvB,iCAAuB;AACvB,iCAAuB;AACvB,sBAAY;AACZ,oBAAU;AACV,mBAAS;AACT,iBAAQ;AACR,iBAAO;AACP,mBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,uBAAc;AACd,kBAAQ;AACR,qBAAW;AACX,kCAAyB;AACzB,sCAA6B;AAC7B,4BAAkB;AAClB,+BAAsB;AACtB,mBAAS;AACT,qBAAW;AACX,kBAAS;AACT,mBAAS;AACT,uBAAa;AACb,sBAAY;AACZ,iBAAQ;AACR,mBAAS;AACT,iBAAO;AACP,iBAAQ;AACR,iBAAQ;AACR,qBAAW;AACX,qBAAY;AACZ,kBAAQ;AACR,iBAAO;AACP,2BAAkB;AAClB,oBAAU;AACV,qBAAW;AACX,+BAAqB;AACrB,oBAAU;AACV,iBAAO;AACP,0BAAiB;AACjB,0BAAgB;AAChB,gBAAO;AACP,kBAAQ;AACR,kBAAQ;AACR,iBAAQ;AACR,uBAAa;AACb,mBAAU;AACV,oBAAU;AACV,+BAAsB;AACtB,uBAAa;AAIb,kBAAQ;AACR,uBAAc;AACd,8BAAoB;AACpB,0BAAgB;AAChB,yBAAe;AACf,kCAAwB;ACxJpC;;;;;;;;;;;;;;;;AAkBA,4BAAuB,WAAU,kBAAkB,MAAM,IAAI;AAC7D,0BAAqB,WAAU,gBAAgB,MAAM,IAAI;AAOlD;AACH,kBAAY,SAAQ,YAAY;AAChC,aAAO,gBAAe,IAAI;;AAMvB;AACH,aAAO,cAAa,IAAI;;AAErB;AACH,iBAAW,gBAAe;AAC1B,qBAAe;AACf,aAAO;AACH,eAAQ,MAAM,SAAU,GAAG;AAC3B,YAAI;AACA;;AAEJ,+BAAsB;AACtB,2BAAmB,IAAI,MAAM;AAC7B,YAAI,aAAY;AACZ,iBAAO,KAAK;;;AAGpB,aAAO;;AAaJ;AACH,aAAQ,YAAY,eAAgB;AACpC,kBAAY,SAAQ,YAAY;AAChC,UAAI,gBAAe,IAAI;AACnB,gBAAQ,KAAK,eAAe,4BACpB;;AAEZ,sBAAe,IAAI,KAAK;;AAUrB;AACH,aAAQ,cAAe;AACvB,UAAI,cAAa,IAAI;AAGjB,YAAI,OAAM,QAAQ;AACd,kBAAQ,KAAK,gCAAgC;;;AAGrD,oBAAa,IAAI,YAAY;;AAS1B;AACH,kBAAY,SAAQ,YAAY;AAChC,UAAI,CAAC,gBAAe,IAAI;AACpB,cAAM,IAAI,MAAM,eAAe,4BACvB;;AAEZ,sBAAe,OAAO;;AAGnB;AACH,UAAI,CAAC,cAAa,IAAI;AAClB,cAAM,IAAI,MAAM,iBAAiB;;AAErC,oBAAa,OAAO;;AAQjB;AACH,sBAAgB,sBAAqB;AACrC,cAAQ,QAAQ;AACZ,gCAAwB,OAAO,OAAO,IAAI,cAAc,CAAE,aAAa;AACvE,wBAAe;;;AAGvB;AACI,aAAO,GAAG,eAAe;;AChI7B;;;;;;;;;;;;;;;;AAsBO;AACH,UAAI,UAAU;AACV,eAAO,cAAa;;AAExB,aAAO,cAAa,CAAC,QAAQ;;AAEjC;AACI,aAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAEvC;AACH,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,UAAI,MAAM,QAAQ;AACd,YAAI,SAAa;;AAErB,UAAI,OAAM,QAAQ;AACd,kCAA8B,GAAG;;AAErC,UAAI,oBAAmB,GAAG;AACtB,eAAO;;AAEX,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,qBAAa,IAAI,WAAW,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,cAAI,KAAK,MAAM,EAAE,QAAQ;AACrB,iBAAK,KAAK;;;AAGlB,eAAO;;AAGP,cAAM,IAAI,MAAM,qBAAqB;;;AActC;AACH,aAAO,OAAM,SAAS;;AAkBnB;AACH,aAAO,OAAM,SAAS,MAAM,MAAM;;AAU/B,yCAAoC;AACvC,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,GAAG;;AAU7B,6CAAwC;AAC3C,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACxHxC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,SAAS;AACd,YAAI,UAAU;AACV,eAAK,SAAS,IAAI;;;MAG1B;AACI;AACA,oCAA4B;AACxB,oBAAU;;AAEd,sBAAc,KAAK,aAAa,KAAK;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAe,QAAQ;AAGvB,iBAAO,OAAO,KAAK;AACf,uCAA0B,YAAY,OAAO,OAAO;;;AAG5D,8BAAsB;UAClB;UACA;UACA;UACA,QAAQ,MAAM,KAAK,YAAU,OAAO;UACpC,WAAW,MAAM,KAAK,YAAU,OAAO,uBAAuB,OAC1D,OAAO,wBACP;;AAER,eAAO;;MAEX;AACI,eAAQ,YAAY,SAAS,QAAQ,QAAQ,aAAc;AAC3D,gBAAQ,QAAQ;AACZ,kBAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACjD,iBAAK,OAAO,iBAAiB,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAAQ,eAAe;;;;;AAKvH;AACH,UAAI,UAAU;AAEV,eAAO;;AAEX,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AAExB,kBAAQ,KAAK,SAAS,yBAAyB;AAC/C,iBAAO;;;AAGf,aAAO;;;MAGP;AACI,sBAAa,OAAO,WAAW,WAAW,UAAc,GAAG,YAAY,KACnE,OAAO;AACX,2BAAmB,UAAc,MAAM;AACvC,qBAAa,OAAO;AACpB,qBAAa,OAAO;AACpB,sBAAc,UAAc,OAAO,MAAM,YAAY;AACrD,qCAA6B;AAC7B,4BAAmB;AACf,yBAAc,OAAO;AACrB,cAAI,UAAS;AAGT,+BAAmB,OAAM,SAAS,OAAO;AACzC,8BAAkB,WAAW;AAC7B,sCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAGnE,gBAAQ,IAAI,KAAK,gBAAiB,WAAW,SAAS,WAAY,UAAW,4BAA6B,aAAa,oBAAoB,aAAa,cAAc,iBAAiB,gBAAgB;;;AC9F/M;;;;;;;;;;;;;;;;AAyBO;AAGH,2BAAqB;AACrB,yBAAmB;AACnB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,qBAAa,GAAG,GAAG,MAAM;;AAE7B,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AACxB,gCAAwB;AACpB,yBAAc,WAAW;AACzB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,gBAAI,aAAa,OAAM;AACnB,mBAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,8BAAgB;AAChB,yBAAW,KAAK,MAAM;AACtB;;;AAGR,cAAI;AACA;;;;AAKZ,6BAAuB;AACvB,qBAAe,EAAE,MAAM;AACvB,uBAAiB;AACjB,mBAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AAClC,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACrC,cAAI,eAAe,KAAK,QAAQ,GAAG;AAC/B,oCAAwB;AACpB,6BAAe,WAAW,WAAW,MAAM;AAC3C,uBAAS,KAAK,MAAM;;AAExB;;;;AAKZ,2BAAqB;AACrB,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,YAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAErC,+BAAqB;AACrB,kCAAwB,KAAK;AACzB,8BAAkB,KAAK,OAAO;AAC9B,gBAAI,aAAa,UAAU;AACvB,2BAAa,aAAa;;;AAIlC,6BAAmB,OAAO,OAAO,IAAI;AACrC,qBAAW,SAAS;AACpB,qBAAW,UAAU,KAAK;AAC1B,uBAAa,KAAK;;;AAG1B,aAAO;;AASJ;AAEH,mBAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC1C,qBAAa,aAAa;AAC1B,oBAAY;AACZ,aAAK,QAAQ,QAAQ;AACjB,6BAAmB,6BAA6B,EAAE;AAClD,cAAI,cAAc;AACd,gBAAI,KAAK;;AAKT,gBAAI,KAAK;;;AAGjB,YAAI,KAAK,YAAY;AACjB,gBAAM,IAAI,MAAM,4DACL,KAAK;;AAGpB,+BAAuB,KAAK,SAAS;AACrC,gCAAwB,KAAK;AACzB,cAAI,CAAE,cAAa;AACf,kBAAM,IAAI,MAAM,iCAAiC,yCACf,OAAO,KAAK;;AAGlD,qBAAW,MAAK,MAAM,eAAe;AACrC,cAAI,GAAG,UAAU;AACb,kBAAM,IAAI,MAAM,4BAA4B,KAAK,qCAC1C,iDAAiD,GAAG;;AAE/D,oBAAU,KAAK,OAAO;AACtB,cAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,kBAAM,IAAI,MAAM,4BAA4B,KAAK,sCACzC,yBAAyB,GAAG,wDACL,EAAE;;AAErC,cAAI,6BAA6B,EAAE,OAAO;AACtC,yCAA6B,EAAE,MAAM;;AAGrC,gCAAoB,6BAA6B,EAAE;AACnD,yCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,wBAAY;;;;;AChJ5B;;;;;;;;;;;;;;;;AAkBA,mCAA8B;AAE9B,wCAAmC;AAEnC,mCAA8B;AACvB;AACH,sBAAgB,gBAAe;AAC/B,wBAAkB,yBAAwB,MAAM,OAAO,OAAO;AAC9D,mBAAa,MAAM;AACnB,wBAAkB,mBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,oBAAc,CAAC;AACf,UAAI;AACA,cAAM,KAAK,YAAY;AACvB,cAAM,KAAK,WAAW;AACtB,cAAM,KAAK,aAAa;AACxB,cAAM,KAAK;;AAEf,YAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,aAAO,MAAM,KAAK;;AAEtB;AACI,gBAAU,eAAc;AACxB,sBAAgB,QAAQ,QAAQ,SAAS;AACzC,wBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,mBAAa,MAAM;AACnB,6BAAuB,UAAU,cAAc,qBAAoB,QAAQ;AAC3E,UAAI,OAAO;AACP,uBAAe,GAAG,MAAM,IAAI,SAAS;AACjC,yBAAe,MAAM;AACrB,uBAAa,GAAG,IAAI,SAAS;AACzB,sBAAU,KAAK,KAAK,IAAI,UAAU,IAAI,aAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAIpG,aAAO;;AAEX;AACI;AACA,UAAI,MAAM,QAAQ;AACd,iBAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,8BAC/B,WAAW,IAAI,GAAG,QAAQ;iBAE5B,UAAS;AACd,iBAAS,IAAI;iBAER,UAAU;AACf,iBAAS,iBAAgB;;AAGzB,iBAAS,WAAW,IAAI,QAAQ,yBAAwB;;AAE5D,aAAO,UAAS,QAAQ;;AAE5B;AACI,aAAO,MAAM,IAAI,UAAU;;AAE/B,iFAA4E;AACxE,gCAA0B,UAAU,cAAc,IAAI;AACtD,mBAAa,MAAM;AACnB,mBAAa,MAAM;AACnB,UAAI,SAAS;AACT,YAAI,UAAU;AACV,+BAAqB,qBAAoB;AACzC,iBAAO,CAAC,aAAY,aAAa,IAAI,GAAG;;AAE5C,YAAI,UAAU;AACV,iBAAO,CAAC,iBAAgB,KAAK;;AAEjC,eAAO,CAAC,KAAK,GAAG;;AAEpB,UAAI,SAAS;AACT,YAAI,OAAO;AACP,gCAAsB,8BAA6B;AACnD,0BAAgB,MAAM,KAAK,KAAK,MAAM,GAAG;AACzC,yBAAe,MAAM,KAAK,KAAK,MAAO,QAAO,+BAA8B,mBAAmB,OAAO;AACrG,cAAI,UAAU;AACV,wBAAY,qBAAoB;AAChC,uBAAW,qBAAoB;;AAEnC,iBAAO;YACH,MACI,UAAU,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IAAI,UAAU,aAAY,GAAG,UAAU,OAAO,8BAA6B,IAAI,QAC/E,KAAK,QACV;;;AAGZ,4BAAoB,UAAU,cAAc,qBAAoB,QAC5D,MAAM,KAAK;AACf,eAAO;UACH,MACI,YAAY,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAIZ,uBAAiB,MAAM,MAAM;AAC7B,yBAAmB,QAAQ,MAAM;AACjC,qBAAe,QAAQ,KAAK;AAC5B,oBAAc;AACd,UAAI,OAAO;AACP,qBAAa,GAAG,IAAI,6BAA4B;AAC5C,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW;;AAEpG,cAAM,KAAK;AACX,qBAAa,OAAO,6BAA4B,IAAI,MAAM;AACtD,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAIjH,qBAAa,GAAG,IAAI,MAAM;AACtB,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAGrH,kBAAY,SAAS,IAAI,MAAM;AAC/B,YAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,cAAM,KAAK,MAAM,MAAM,KAAK;;AAEhC,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM;AACtB,sBAAc;;AAElB,YAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,aAAO;;AAEX;AACI,4BAAsB;AACtB,mBAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,sBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAE1C,aAAO;;AChKX;;;;;;;;;;;;;;;;;MA4BI;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ,MAAM;AACnB,aAAK,OAAO,eAAmB;AAC/B,YAAI,UAAU;AACV,oBAAU,OAAO;AACjB,kBAAY,MAAM,KAAK,MAAM,MAAM,qBAAqB,qDAC1B,KAAK;;AAEvC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAIpB,aAAK,SAAS,UAAU,mBAAuB,OAAO,KAAK;AAC3D,aAAK,UAAU,gBAAe;;MAUlC;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAY,KAAK,WAAW,KAAK,MAAM,MAAM,uCAAuC,KAAK,gCAClE,KAAK;AAC5B,uBAAc,KAAK,WAAW;AAC9B,aAAK,OAAO,UAAS;;MASzB;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAQ;AACR,0BAAkB;AACd,cAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC7B,wBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,kBAAM,IAAI,MAAM;;AAEpB;;AAEJ,qBAAY,KAAK,KAAK,SAAS;AAC/B,sBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,MAAK,KAAK;;AAEpC,eAAO,KAAK,OAAO;;MAEvB;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,KAAK;;AAEhB,qBAAY,KAAK,KAAK,SAAS;AAC/B,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,KAAK,KAAK;;AAEpC,eAAO;;MAEX;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,CAAC;;AAEZ,qBAAa,IAAI,MAAM,KAAK,MAAM;AAClC,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,eAAK,KAAK,KAAK,MAAM,SAAQ,KAAK,QAAQ;AAC1C,oBAAS,KAAK,KAAK,KAAK,QAAQ;;AAEpC,aAAK,KAAK,SAAS,KAAK;AACxB,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAOtB;AACI,eAAO,aAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAIpE,qBAAgB;AAEhB,qBAAgB;AAEhB,gCAA2B;AAI3B,KAAC;AAMM;AACH,mBAAY;;AAOT;AACH,mBAAY;;AAMT;AACH,8BAAuB;;;MAWvB;AAEI,aAAK,OAAO;AACZ,aAAK,qBAAqB;AAC1B,aAAK,QAAQ,MAAM;AACnB,aAAK,QAAQ,SAAS;AACtB,aAAK,OAAO,eAAmB;AAC/B,aAAK,UAAU,gBAAe;AAC9B,aAAK,SAAS;AACd,aAAK,KAAK;AACV,aAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;UAExD;AACA,eAAO,KAAK,MAAM;;YAOhB;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO;;MAMpD;AACI,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK;;YAQnD;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,eAAc,KAAK,OAAO;;MAQrC;AACI,eAAO,eAAc,KAAK,OAAO,KAAK;;YAQpC;AACF,aAAK;AACL,sBAAa,aAAY,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU;AACf,wBAAc,MAAM;AACpB;AACI,mBAAO,MAAM,IAAI,OAAK,cAAkB;;AAGxC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;MAQX;AACI,aAAK;AACL,sBAAa,aAAY,SAAS,KAAK;AACvC,YAAI,KAAK,UAAU;AACf;AACI,mBAAO,MAAK,IAAI,OAAK,cAAkB;;AAGvC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;YAGL;AACF,aAAK;AACL,sBAAa,MAAM,aAAY,KAAK,KAAK;AACzC,YAAI,KAAK,UAAU;AACf,iBAAO;;AAGP,iBAAO,IAAI,WAAW,MAAK;;;MAQnC;AACI,YAAI,KAAK;AACL;;AAEJ,qBAAY,cAAc;AAC1B,aAAK,qBAAqB;;UAE1B;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAWxB,gBAAgB;AACZ,eAAO,WAAU,MAAM,MAAM;;MAMjC;AACI,aAAK;AACL,eAAO,WAAU,MAAM;;MAO3B,mBAAmB;AACf,qBAAa,KAAK;AAClB,eAAO,gBAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;MAExD;AACI,aAAK;AACL,eAAO,WAAU,KAAK,MAAM;;MAEhC,qBAAqB;AACjB,aAAK;AACL,eAAO,aAAY,aAAa,MAAM,WAAW,MAAM;;;AAG/D,WAAO,eAAe,SAAQ,OAAO,aAAa;MAC9C,OAAO;AAMH,eAAO,CAAC,CAAC,aAAY,UAAS,QAAQ,QAAQ,UAAS,YAAY,QAC/D,UAAS,mBAAmB;;;4BAQV;MAC1B;AACI,cAAM,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AACnE,aAAK,YAAY;AACjB,aAAK,OAAO;;MAUhB;AACI,YAAI,SAAS,UAAU,KAAK;AACxB,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,YAAI,CAAC,aAAiB,SAAS,OAAO,KAAK;AACvC,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,qBAAY,cAAc;AAC1B,aAAK,SAAS,SAAS;AACvB,qBAAY,OAAO,MAAM;;MAE7B;AACI,qBAAY,gBAAgB;AAC5B,aAAK,qBAAqB;;;AAGlC,WAAO,eAAe,WAAU,OAAO,aAAa;MAChD,OAAO;AACH,eAAO,qBAAoB,WAAU,UAAS,UAAU,QACpD,UAAS,kBAAkB;;;AC5XvC;;;;;;;;;;;;;;;;AAiBC,IAAA;AACG,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;OACd,SAAA,QAAS,UAAA,OAAO;AAGnB;AACC,IAAA;AACG,yBAAkB,aAAa;AAC/B,yBAAkB,WAAW;AAC7B,yBAAkB,UAAU;AAC5B,yBAAkB,eAAe;OAClC,sBAAsB,sBAAoB;AAC7C;AACC,IAAA;AACG,wBAAiB,aAAa;AAC9B,wBAAiB,WAAW;AAC5B,wBAAiB,UAAU;AAC3B,wBAAiB,eAAe;OACjC,qBAAqB,qBAAmB;AAC3C;AACC,IAAA;AACG,2BAAoB,aAAa;AACjC,2BAAoB,WAAW;AAC/B,2BAAoB,UAAU;AAC9B,2BAAoB,eAAe;OACpC,wBAAwB,wBAAsB;AACjD;AACC,IAAA;AACG,6BAAsB,aAAa;AACnC,6BAAsB,WAAW;AACjC,6BAAsB,UAAU;AAChC,6BAAsB,eAAe;OACtC,0BAA0B,0BAAwB;AACrD,2BAAsB;MAClB,SAAW;MACX,OAAS;MACT,MAAQ;MACR,WAAa;;AAEV;AACH,UAAI,UAAU,YAAY,UAAU;AAChC,YAAI,UAAU,YAAY,UAAU;AAChC,iBAAO;;AAEX,cAAM,IAAI,MAAM,kBAAkB,cAAc;;AAEpD,aAAO,eAAc,OAAO;;AAGzB;AACH,aAAO,YAAW,MAAM;;ACzE5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,EAAE,UAAU,EAAE;AACd,eAAO,CAAC,GAAG;;AAEf,oBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,aAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AAE3B;AACH,cAAO,EAAE,UAAU,EAAE,OAAO,MAAM,2BAA2B,EAAE,qBAChD,EAAE;;AAEd;AACH,aAAO,WAAW,KAAK,OAAK,EAAE,OAAO,QAAO;;AAczC;AACH,mBAAa;AACb,mBAAa,IAAI;AACjB,2BAAoB,QAAQ,MAAM;AAClC,aAAO;;AAEX;AACI,UAAI,aAAa;AACb;;AAEJ,UAAI,qBAAqB;AACrB,aAAK,KAAK;AACV;;AAEJ,UAAI,CAAC,YAAW;AACZ;;AAGJ,uBAAiB;AACjB,sBAAgB;AACZ,oBAAY,SAAS;AACrB,YAAI,CAAC,KAAK,IAAI;AACV,eAAK,IAAI;AACT,+BAAoB,KAAK,MAAM;;;;AAK3C;AACI,aAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;;;;;;;;AC1EhD;;;;;;;;;;;;;;;;;MA4BI;AAEI,aAAK,sBAAsB;AAC3B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AAItB,aAAK,gBAAgB;AAGrB,aAAK,cAAc;AACnB,aAAK,aAAa;AAKlB,aAAK,oBAAoB;AACzB,aAAK,cAAc;AACnB,aAAK,aAAa,IAAI;AACtB,aAAK,YAAY;AACjB,aAAK,gBAAgB,CAAE,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;MAE1F;AACI,mCAA2B,KAAK;AAC5B,eAAK,oBAAoB,cAAc;;;;;MAK/C;AACI,aAAK,MAAM;AACX,aAAK,WAAW;AAChB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,QAAQ,IAAI;;YAEf;AACF,YAAI,KAAK,sBAAsB;AAC3B,iBAAO,KAAK,mBAAmB,KAAK;;;AAExC,YAAI,KAAK,mBAAmB;AACxB;;AAEJ,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,0BAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,cAAI;AACA,kBAAM,KAAK,WAAW;AACtB;;;AAGR,cAAM,IAAI,MAAM;;UAGhB;AACA,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM,YAAY,KAAK;;AAIrC,YAAI,KAAK,mBAAmB;AACxB,iBAAQ,MAAM,aAAc,KAAK;AACjC,cAAI;AACA,kBAAM,IAAI,MAAM,iCAAiC;;AAIrD,eAAK,WAAW;;AAEpB,eAAO,KAAK;;MAEhB;AACI,eAAO,OAAO,KAAK,KAAK;;MAE5B;AACI,YAAI,CAAE,gBAAe,KAAK;AAGtB,cAAI,eAAe,KAAK;AACpB,mBAAQ,aAAc,KAAK,kBAAkB;AAC7C,gBAAI;AAEA,qBAAO;;;AAIX,mBAAO;;;AAGf,eAAO,KAAK,SAAS;;MAEzB;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,iBAAO;;AAEX,eAAO,KAAK,gBAAgB,aAAa;;MAE7C,iDAAiD;AAC7C,YAAI,eAAe,KAAK;AACpB,kBAAQ,KAAK,GAAG;AAEhB,iBAAO;;AAEX,aAAK,gBAAgB,eAAe,CAAE,SAAS;AAC/C,eAAO;;YAEL;AACF,YAAI,KAAK,gBAAgB,gBAAgB;AACrC,gBAAM,IAAI,MAAM,iBAAiB;;AAErC,aAAK,cAAc;AACnB,YAAI,KAAK,SAAS,gBAAgB;AAC9B,eAAK,kBAAkB;AACvB,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,yBAAe,YAAY,MAAM,UAAU;AAC3C,cAAI,CAAC;AACD,mBAAO;;;AAGf,aAAK,kBAAkB,KAAK,SAAS;AACrC,aAAK;AAEL,aAAK,WAAW,IAAI,UAAS,KAAK;AAClC,eAAO;;MAEX;AACI,wBAAgB,sBAAqB,KAAK;AAC1C,gBAAQ,QAAQ;AACZ,cAAI,OAAO,aAAa;AACpB,mBAAO,UAAU,KAAK;;;;MAIlC;AACI,wBAAgB,sBAAqB;AACrC,gBAAQ,QAAQ;AACZ,cAAI,OAAO,eAAe;AACtB,mBAAO,YAAY,KAAK,SAAS;;;;MAU7C;AACI,qCAA6B,KAAK,gBAAgB;AAClD,YAAI,wBAAwB;AACxB,gBAAM,IAAI,MAAM,6BAA6B;;AAEjD;AACI,2BAAgB,qBAAqB;AAMrC,cAAI,YAAW,CAAE,qBAAmB,mBAC7B,OAAO,SAAQ,SAAS;AAC3B,8BAAkB,EAAE,KAAK;AACzB,4BAAgB,SACX,KAAK;AAEN,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,SAAS,eAAe;AAC7B,mBAAK,qBAAqB;AAC1B,qBAAO;eAEN,MAAM;AAEP,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,qBAAqB;AAC1B,sBAAQ,KAAK,6BAA6B;AAC1C,sBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,qBAAO;;AAEX,iBAAK,qBAAqB;AAC1B,mBAAO,CAAE,SAAS,WAAW;;AAG7B,iBAAK,SAAS,eAAe;AAC7B,mBAAO,CAAE,SAAS,MAAM,WAAW;;;AAIvC,kBAAQ,KAAK,6BAA6B;AAC1C,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO,CAAE,SAAS,OAAO,WAAW;;;MAG5C;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,gBAAM,IAAI,MAAM,GAAG;;AAEvB,YAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAG/D,eAAK;;AAET,YAAI,eAAe,KAAK;AACpB,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,eAAO,KAAK,gBAAgB;AAE5B,YAAI,KAAK,gBAAgB;AACrB,eAAK,qBAAqB;AAC1B,eAAK,cAAc;AACnB,eAAK,kBAAkB;;;MAG/B;AACI,YAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC7C,gBAAM,IAAI,MAAM;;AAEpB,eAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE1C,iBAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;MAGpC;AACI,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,cAAI,aAAa;AACb,mBAAO,CAAE,MAAM,aAAa;;;AAGpC,cAAM,IAAI,MAAM;;MAGpB;AACI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAmB,KAAK;AACxB,uBAAe,KAAK,SAAS;AAG7B,mBAAW,YAAY;AACvB,aAAK,UAAU;AACf,iBAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,YAAI,KAAK;AAGL,eAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;MAG3E;AACI,mBAAW;AACX,YAAI,MAAM;AAEN,cAAI,OAAO,aAAa;AACpB,kBAAM,IAAI,MAAM;;AAEpB,eAAK;;AAIL,cAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACtD,kBAAM,IAAI,MAAM;;AAGpB,cAAI,OAAO,OAAO;AACd,kBAAM,IAAI,MAAM;;AAGpB,iBAAO;;AAIX;AACA,eAAO,KAAK,UAAU,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AAC5E,mBAAS;AACT,cAAI,kBAAkB;AAClB,oBAAQ,MAAM;;AAElB,iBAAO;;;MAGf;AACI;AACA;AACI,sBAAY;AACZ;AACA,iBAAO;;AAGP;AACA,gBAAM;;;MAGd;AACI,eAAO,QAAO;;MAElB;AACI,eAAO,QAAO;;MAWlB;AACI,kBAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,uBAAe,CAAE;AACjB,sBAAa,QAAS;UAClB,GAAG;AACC,0BAAc;AACd,+BAAmB,CAAE,GAAG;AACxB,0BAAc,CAAE;AAChB,mBAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,YAAY,MAAiB,OAAM;;;AAG3G,sBAAc;AACd,aAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,OAAM,OAAO;AACxE,eAAO;;MAeX;AACI,4BAAoB;AACpB,8BAAsB;AAItB,eAAO,KAAK,cAAc,aAAa,QAAQ,eAAe,YAAY,OAAO,cAAc;;MAEnG;AACI,eAAO,KAAK,IAAI,QAAQ;;MAE5B;AACI,gCAAwB,KAAK,QAAQ;AAErC,+BAAuB;AACvB,iBAAS,QAAQ;AAGb,8BAAqB,KAAK,UAAU,cAAc,IAAI;;AAO1D,yBAAiB,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACpF,8BAAsB,kBAAkB,mBAAmB,mBAAmB;AAC9E,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,YAAY,KAAK,6CACzB,0CAA0C;;;MAO1D;AACI;AACA,oBAAY;AACZ,yBAAiB,KAAK;AACtB,YAAI,cAAc;AACd,uBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAEvE,kCAA0B,KAAK,MAAM;AACrC,mCAA2B,KAAK,MAAM;AACtC,YAAI,KAAK;AACL,eAAK,MAAM,kBAAkB,KAAK;;AAEtC;AACA,uBAAe,WAAU,YAAY,KAAK;AAC1C;AACA,YAAI,UAAU;AACV,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,OAAO,WAAW,CAAE,QAAQ,OAAO,SAAS,KAAK;AACvD,6BAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,+BAAmB,SAAS,IAAI,EAAG,QAAQ,OAAO,WAAY,KAAK,qBAAqB,QAAQ,OAAO;AAKvG,gBAAI;AACA,kCAAoB,KAAK,sBAAsB,YAAY,QAAQ;AACnE,kBAAI,iBAAiB;AAKjB,oBAAI,iBAAiB;AACjB,kCAAgB;;AAEpB,mCAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,gCAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAExD,sBAAQ,KAAK,2BAA2B;;AAE5C,mBAAO;;;AAIX,2BAAiB;AAIb,gBAAI,CAAC;AACD;;AAEJ,oBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAEvD,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,yBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,mBAAO;;;AAIf;AACA,aAAK,UAAU,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC3E,cAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC1C,sBAAU;;AAGV,4BAAgB,KAAK,SAAS,cAAc,YAAY,QAAQ,MAAM;AACtE,gBAAI,KAAK,IAAI,QAAQ;AACjB,mBAAK,SAAS,iBAAiB;;AAEnC,sBAAU,cAAc;;;AAGhC,YAAI;AACA,eAAK,YAAY,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAExE,YAAI,KAAK,MAAM;AACX,eAAK,MAAM,cAAc,QAAQ,KAAK;YAClC,MAAM;YACN,YAAY,KAAK,MAAM,WAAW;YAClC,oBAAoB,KAAK,MAAM;YAC/B,cAAc,KAAK,MAAM,aAAa;YACtC,sBAAsB,KAAK,MAAM;YACjC,aAAa,OAAO,KAAK,QAAQ,IAAI,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;YACtF,cAAc,QAAQ,IAAI,UAAQ,KAAK;YACvC,cAAc,cAAc;YAC5B,WAAW,cAAc;;;AAGjC,eAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;MAOnD;AACI,sBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,eAAO;;MAYX;AACI,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,+BAAqB,WAAW,gBAAgB;AAChD,gCAAsB,WAAW,iBAAiB;AAGlD;AACA,cAAI,WAAW;AACX,oBAAY,MAAM,QAAQ,SAAS,MAAM;AACzC,iCAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAG7D,iCAAqB,aAAa,IAAI,eAAe,OAAO;;AAEhE,sCAA4B,QAAQ,OAAO,UAAU,cAAc;AACnE,iBAAO,mBAAmB,OAAO;;AAIrC,eAAO;;MAOX;AACI,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAEpB,gBAAQ,SAAS;AACjB,mBAAU,YAAW,KAAK;AAC1B,0BAAkB;AAClB,YAAI,UAAU,YAAY,UAAc,OAAO;AAC3C,wBAAc,OAAO,IAAI,OAAK,cAAkB;;AAEpD,uBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AAEf,YAAI,UAAU;AACV,uBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAiB,sBAAqB;AACtC,eAAK,MAAM,YAAY,WAAW,KAAK;AACvC,eAAK,QAAQ;;AAEjB,eAAO;;MAOX;AACI,gBAAQ,SAAS;AACjB,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AACf,eAAO;;MAEX,uCAAuC;AACnC,eAAO,QAAQ,KAAK,iBAAiB;AACrC,YAAI,SAAS,QAAQ,UAAU,aAAa;AACxC,yBAAe,aAAa,KAAK;;AAErC,kBAAU,IAAI,UAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,gBAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE5C,aAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,aAAK,OAAO,GAAG,KAAK;AACpB,eAAO;;MAEX;AACI,yBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,YAAI,aAAa;AACb,eAAK,MAAM;AAGX,sBAAY;AACZ,cAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,oBAAQ,EAAE,OAAO,iBAAqB,EAAE;;AAE5C,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;YAChC,SAAS,YAAW,KAAK;YACzB,OAAO,EAAE;YACT,OAAO,EAAE;YACT;YACA,UAAU;;AAEd,eAAK,MAAM,YAAY;;AAE3B,aAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AACpC,YAAI,CAAE,cAAa;AACf,eAAK,MAAM;;;MAGnB;AACI,YAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC7B;;AAEJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,qBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,yBAAiB,KAAK;AACtB,YAAI,YAAY;AAGZ,cAAI,EAAE,UAAU;AACZ,iBAAK,MAAM,YAAY,KAAK;;AAEhC,eAAK,MAAM;AACX,eAAK,QAAQ,YAAY,EAAE;AAC3B,eAAK,MAAM,WAAW,OAAO,EAAE;;AAG/B,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;MAM5C;AACI,8BAAsB,KAAK,MAAM;AAC7B,oBAAU,KAAK,MAAM,oBAAoB;AACzC,eAAK,gBAAgB;;;MAG7B;AACI,aAAK,cAAc;AACnB,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,iBAAO,KAAK,MAAM,oBAAoB,EAAE;;;MAGhD;AACI,qBAAa,KAAK,QAAQ;AAC1B,aAAK,aAAa,KAAK,MAAM;AAC7B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,WAAW,KAAK,MAAM;AAC3B,YAAI,KAAK,MAAM,mBAAmB;AAC9B,eAAK,aAAa;AAClB,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU;;AAEnB,eAAK,QAAQ,KAAK;;AAGtB,eAAO;;YAEL;AACF,aAAK,MAAM,YAAY;AACvB,2BAAmB,KAAK,MAAM;AAC9B,gCAAwB,KAAK,MAAM;AACnC,aAAK,MAAM,cAAc,UAAU;AACnC,aAAK,MAAM,cAAc,SAAS,MAAM;AACxC,aAAK,MAAM,YAAY;AACvB,aAAK,MAAM,cAAc,YAAY,KAAK,IAAI,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AAC7F,aAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,aAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,6BAAqB,KAAK,MAAM,cAAc;AAC1C,iBAAO,eAAe,MAAM,OAAO;AACnC,iBAAO,YAAY,MAAM,OAAO;;AAEpC,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;MAEtE;AACI,yBAAiB,CAAE,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AACjF,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,0BAAgB,WAAW;;AAE/B,YAAI,iBAAiB;AACjB,mBAAS,WAAW;AAGhB,kBAAM,IAAI,IAAI;AACV,kBAAI,MAAM;AACN,+BAAe,QAAQ;AACvB,6BAAa,qBAAyB,OAAO,MAAM,OAAO;AAC1D,uBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEtD,qBAAO;;AAIX,mBAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAGnE,aAAK,MAAM,WAAW,KAAK;;MAE/B;AACI,eAAO,OAAO;AACd,eAAO;;MAEX;AACI,YAAI,KAAK,MAAM,kBAAkB;AAC7B,eAAK,MAAM,aAAa;;AAE5B,aAAK,MAAM;;MAEf;AACI,aAAK,MAAM;;MAMf;AACI,0BAAkB;UACd,OAAO;UACP,MAAM;UACN,IAAI,KAAK,MAAM;;AAEnB,YAAI;AACA,oBAAU,OAAO;;AAErB,aAAK,MAAM,WAAW,KAAK;AAC3B,aAAK,MAAM,cAAc;;MAM7B;AACI,uCAA+B,uBAAsB;AACrD,0CAAkC,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAE5E,qBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACrD,0BAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,cAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACtD,oBAAO;;;AAGf,yBAAiB,KAAK,MAAM,WAAW;AACvC,aAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAEzD,+BAAuB,QAAQ;AAG3B,cAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC5C,iBAAK,MAAM;;;;MAUvB,wCAAwC;AACpC,gBAAY,GAAG,SAAS,GAAG,MAAM;AACjC,YAAI,MAAM,QAAQ,GAAG,UAAU;AAC3B,gBAAM,IAAI,MAAM,0CAA0C,GAAG;;AAEjE,kBAAU,KAAK,UAAU,MAAM,KAAK,aAAa,MAAM,KAAK,WAAW,MAAM,KAAK,KAAK,WAAW;AAClG,gBAAY,aAAa,SAAQ,MAAM;AAEvC,6BAAqB,sBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,YAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAC9D,gBAAM,IAAI,MAAM;;AAIpB,eAAO,KAAK,KAAK,YAAY;AACzB,yCAA+B;AAC/B,iCAAuB,EAAE,MAAO,MAAM,OAAQ,MAAK,EAAE,SAAS;AAE9D,kCAAuB,wBAAwB,cAE/C,QAAK,KAAK,KAAK,KAEf;AACA,yBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AACnD,cAAI,KAAK,MAAM,kBAAkB;AAG7B,iBAAK,MAAM,WAAW,QAAQ;AAC1B,oCAAqB,KAAK;AACtB,wBAAO;;;AAGf,iBAAK,MAAM,aAAa;;AAE5B,iBAAO,CAAE,OAAO,GAAG,OAAA;;;MAG3B;AACI,gBAAY,YAAgB,IAAI,MAAM;AACtC,eAAO;AACH,kBAAY,OAAO,MAAM,OAAK,aAAa,UAAS,MAAM;AAE1D;AACA,2BAAiB;AACjB,iBAAO,QAAQ;AACX,qBAAS,KAAK;;AAElB,iBAAO,KAAK,cAAc;AACtB,kBAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AACvB,oBAAY,IAAI,iBAAiB,SAAQ,MAAM;AAE/C,oBAAY,YAAgB,IAAI,WAAW,MAAM;AAEjD,mBAAO,IAAI;aACZ,UAAU;AACT,4BAAgB,IAAI,SAAS,IAAI;AACjC,2BAAc,MAAM,QAAQ,WAAW,UAAU,CAAC;AAClD,oBAAY,OAAM,WAAW,OAAO,QAAQ,MAAM;AAGlD,oBAAY,OAAM,MAAM,OAAK,aAAa,UAAS,MAAM;AAGzD,4BAAgB;AAChB,mBAAM,QAAQ;AACV,sBAAQ,KAAK,MAAM;;AAEvB,mBAAO;;;;MAInB;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,SAAS;;MAEjC;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,KAAK;;YAEvB;AACF,sBAAc;AACd,2BAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,mBAAW,SAAS,SAAQ;AAC5B,eAAO;;MAQX;AACI,YAAI,KAAK,MAAM,eAAe;AAC1B,iBAAO,UAAU,KAAK,MAAM,YAAY;AACxC,eAAK,MAAM,YAAY,MAAM,KAAK;;AAEtC,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAMtB;AAEI,aAAK;AACL,aAAK,MAAM;AACX,aAAK,IAAI;AACT,aAAK,QAAQ,IAAI;AACjB,kCAA0B,KAAK;AAC3B,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;;;AAGlC,YAAO,eAAe;AACtB,YAAO,iBAAiB;AACxB;AACI,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAErC;AACH,iBAAW;AACX,UAAI,GAAG,aAAa;AAChB,6BAAoB,IAAI,aAAY;AACpC,WAAG,YAAY,IAAI,QAAO;;AAE9B,4BAAqB,GAAG,UAAU;AAGlC,wBAAiB,MAAM,GAAG;AAC1B,aAAO,GAAG;;AAEP,oBAAe;AAOf;AAEH,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI,GAAG;AAC3B,aAAK,CAAC,GAAG;AACT,eAAO;SACR,QAAQ,MAAqB;;ACp7BpC;;;;;;;;;;;;;;;;AAiBA;AACI,aAAO,OAAO,cAAc,eAAe,aAAa;;AAErD;AACH,UAAI;AAEA,kBAAU,UAAU,aAAa,UAAU,UAAU,OAAO;AAE5D,eAAO,2TACF,KAAK,MAEN,0kDACK,KAAK,EAAE,OAAO,GAAG;;AAE9B,aAAO;;AAEJ;AACH,aAAQ,OAAO,WAAW,eAAe,OAAO,YAAY,QAEvD,OAAO,sBAAsB;;;;;;;ACpCtC;;;;;;;;;;;;;;;;AAmBA,iBAAY;AAKZ,SAAI,aAAa,SAAS,MAAM,OAAO;AACnC,UAAI;AACA,gBAAQ,KAAK;;;AAMrB,SAAI,aAAa,cAAc,MAAM;AAErC,SAAI,aAAa,WAAW,MAAO,OAAO,YAAY,eACjD,OAAO,QAAQ,aAAa,eAC5B,OAAO,QAAQ,SAAS,SAAS;AAEtC,SAAI,aAAa,aAAa,MAAM,OAAO,cAAc,eAAe,aAAa,QACjF,UAAU,aAAa,QAAQ,SAAS,KAAK,UAAU,cACvD,aAAa,KAAK,UAAU;AAKhC,SAAI,aAAa,QAAQ,MAAM;AAK/B,SAAI,aAAa,sCAAsC,MAAM,KAAI,QAAQ;AAEzE,SAAI,aAAa,gCAAgC,MAAM;AAEvD,SAAI,aAAa,WAAW,MAAM;ACtDlC;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB;AAChB,UAAI,cAAa;AACb,eAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAE1C,UAAI,CAAC,MAAM,QAAQ;AACf,eAAO;;AAEX,oBAAc;AACd,aAAO,MAAM,QAAQ,cACjB,cAAa,cAAc,UAAU;AACrC,cAAM,KAAK,UAAU;AACrB,oBAAY,UAAU;;AAE1B,UAAI,MAAM,QAAQ,QACd,OAAM,QAAQ;AACd,oCAA2B,KAAK,OAAO;;AAE3C,aAAO;;AAEX;AACI,gBAAU,WAAW;AACrB,UAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,cAAa;AACvC,gBAAO,MAAM,WAAW,GAAG,MAAM,eAAe,QAAQ,KAAK,+DACjB,MAAM;AAClD;;AAEJ,cAAO,MAAM,SAAS,GAAG,MAAM,eAAe,QAAQ,KAAK,oDACjC,IAAI;AAC9B,cAAO,IAAI,WAAW,MAAM,IAAI,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrE,IAAI;AAC7B,uBAAiB,MAAM,MAAM;AAC7B,mBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,oCAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAGpE;AACI,UAAI,iBAAiB;AACjB;;AAEJ,UAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AAC/C,cAAM,IAAI,MAAM,aAAa,uBAAuB,yBAC1C,iCAAiC;;;AAG5C,uEAAkE;AACrE,UAAI,aAAa;AACb,qBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,eAAO;;AAEX,0BAAoB,YAAW;AAG/B,UAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACtD,wBAAgB;;AAEpB,mBAAY,cAAc,eAAe,SAAS;AAClD,UAAK,KAAK,QACL,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACnD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC3C,qBAAa,KAAK,OAAO,SAAS,EAAE,YAAY;AAChD,cAAM,IAAI,MAAM,aAAa,uBAAuB,0DACd;;AAE1C,4BAAsB,YAAW,GAAG;AACpC,UAAI,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ;AACnC,YAAI,CAAC;;AAET,6BAAuB;AACvB,qBAAe,kBAAkB,WAC7B,cAAa,GAAG,iBAChB,SAAQ,GAAG,IAAI;AACnB,aAAO,QAAO,WAAW,QAAQ,eAAe;;AAE7C,6EAAyE;AAC5E,UAAI,CAAC,MAAM,QAAQ;AACf,cAAM,IAAI,MAAM,YAAY,qBAAqB;;AAGrD,sBAAgB;AAChB,aAAO,QAAQ,IAAI,UAAU,iBAAgB,GAAG,GAAG,WAAW,MAAM,eAAe;;ACtGvF;;;;;;;;;;;;;;;;AAkBY,6BAAmB;AAMxB;AACH,mBAAa,OAAO,KAAK;AACzB,UAAI,KAAK,WAAW;AAChB,cAAM,IAAI,MAAM,yGAET,KAAK;;AAEhB,mBAAa,KAAK;AAClB,iBAAW,EAAE;AAEb,UAAI,OAAO,SAAS;AAChB,iBAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAGjD,eAAS,SAAS;AAElB,iBAAW;AACP,gBAAO,WAAW;AAClB;AACI,yBAAe,GAAG,GAAG;AACrB,cAAI,WAAU;AACV,oBAAQ,MAAM;;AAElB,kBAAO,SAAS;AAChB,iBAAO;;AAGP,kBAAO,SAAS;AAChB,gBAAM;;;AAGd,aAAO,eAAe,IAAI,QAAQ,CAAE,OAAO,QAAQ,cAAc;AAEjE,aAAO;;ACzDX;;;;;;;;;;;;;;;;AAyCA;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,yBAAuB,MAAM,OAAO,MAAM,OAAO,yBAAyB,MAAM,aAAa,MAAM;AAEnG,sBAAgB;AACZ,eAAO,SAAQ,QAAQ,OAAO;;AAElC,qBAAe,CAAE,MAAM,OAAO,MAAM;AACpC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAW,IAAG,CAAE,UAAA;ACpD5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,SAAS;AACT,gBAAQ,YAAW;;AAEvB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AAClB,cAAM,IAAI,MAAM;;AAGpB,UAAI,SAAS;AACT,4CAAmC;AACnC,6BAAqB,eAAc;AACnC,6BAAqB,eAAc;AACnC,gBAAO,iBAAiB,cAAc,MAAM,iCAAiC,kCACtE,+BAA+B;AACtC,qBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AACxC,2BAAiB,cAAc;AAC/B,oCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,eAAc,MAAM,MAAM,MACvC;AACJ,kBAAO,cAAc,OAAO,MAAM,MAAM,CAAC,mBAAmB,MAAM,gDAC1D,qDACM;;;AAGtB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ;AACxC,iBAAS,CAAC;;AAEd,cAAQ,SAAS;AACjB,eAAS,UAAU,WACf,cAAa,QAAQ,SACrB,SAAQ,QAAQ,IAAI;AACxB,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxD5C;;;;;;;;;;;;;;;;AA8CO;AACH,4BAAsB,YAAW,QAAQ;AACzC,aAAO,YAAW,QAAQ,OAAO,eAAe;;AChDpD;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,SAAW;MACX,SAAW;MACX,OAAS;MACT,QAAU;MACV,OAAS;MACT,MAAQ;MACR,WAAa;;AC3BjB;;;;;;;;;;;;;;;;AAqBA,oCAAgC;AAkBzB;AAEH,oBAAc;AACd,2BAAqB;AACrB,oBAAc,MAAM,QAAQ,WACxB,QAAQ,IAAI,aAAU,QAAO,QAC7B,OAAO,KAAK;AAChB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,qBAAa,MAAM;AACnB,kBAAU,MAAM,QAAQ,WAAW,QAAQ,GAAG,SAAS,QAAQ;AAC/D,YAAI,EAAE,UAAU,aAAa,EAAE,UAAU,WAAW,EAAE,UAAU,UAC5D,EAAE,UAAU,YAAY,EAAE,UAAU;AACpC,gBAAM,IAAI,MAAM,gCAAgC,UAAU,EAAE;;AAEhE,qBAAa,CAAE,MAAM,OAAO,EAAE,OAAO,OAAO,EAAE;AAC9C,YAAI,EAAE,UAAU;AACZ,4BAAkB,IAAI,QAAQ;AAC1B,yBAAa,MAAM,EAAE;AACrB,kCAAsB,KAAK,OAAO,WAAU,KAAI,EAAE,QAAQ,KACtD,0BAA0B,KAAK;AACnC,0BAAc,IAAI,WAAW;AAC7B,yBAAa;AACb,0BAAa,GAAG,KAAI,KAAK,QAAQ;AAC7B,0BAAY,KAAK;AACjB,oCAAsB,IAAI,WAAW,IAAI,YAAY,CAAC,IAAI,SAAS;AACnE,oBAAM,IAAI,eAAe;AACzB,wBAAU;AACV,oBAAM,IAAI,KAAK;AACf,wBAAU,IAAI;;AAElB,oBAAQ;;AAEZ,uBAAa,KAAK;;AAGlB,uBAAa,KAAK,EAAE;;AAExB,YAAI,SAAS;AACT,eAAK,QAAQ;;AAEjB,cAAM,KAAK;;AAEf,2BAAqB,MAAM,QAAQ,IAAI;AACvC,aAAO,CAAE,MAAM,uBAAuB,eAAe;;AAiBlD;AAEH,kBAAY;AACZ;AACA,mBAAa;AACb,yBAAmB;AACf,qBAAa,KAAK;AAClB,sBAAc,KAAK;AACnB,sBAAc,KAAK;AACnB,qBAAa,eAAc;AAC3B;AACA,YAAI,kBAAkB;AAClB,+BAAqB,KAAK;AAC1B,cAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,gBAAI,CAAE,UAAS,gBAAgB,WAAW;AACtC,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa;;qBAIrE,aAAa,UAAU;AAC5B,gBAAI,UAAU;AACV,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa,yDACf;;;AAI3D,kBAAM,IAAI,MAAM,UAAU,KAAK,uCACL,aAAa;;AAI3C,yCAA+B,qBAAqB,aAAa;AACjE,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,iCAAwB,aAAa,UAAU,UAC3C,IAAI,WAAW,cACf,IAAI,YAAY;AACpB,cAAI,UAAU;AACV,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,uBAAS,IAAI,aAAa,eAAe;AACzC,2BAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,0BAAU,eAAe;AACzB,uBAAO,KAAK,IAAI,aAAa,QAAQ,aAAa;;uBAGjD,aAAa,UAAU;AAC5B,kBAAI,kBAAkB;AAClB,gCAAgB;;AAEpB,uBAAS,cAAc;;AAGvB,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;qBAI7D,UAAU;AACf,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;AAGlE,qBAAS,IAAI,WAAW,eAAe;AACvC,yBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAU,eAAe;AACzB,qBAAO,KAAK,KAAK,MAAM,IAAI,aAAa,QAAQ,aAAa;;;AAIjE,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;mBAEZ,UAAU;AACf,wBAAa,eAAc,KAAK;AAChC,mBAAS;AACT,uBAAa,GAAG,IAAI,OAAM;AACtB,+BAAmB,IAAI,YAAY,QAAO,MAAM,QAAQ,SAAS,0BAA0B;AAC3F,sBAAU;AACV,0BAAc,IAAI,WAAW,QAAO,MAAM,QAAQ,SAAS;AAC3D,mBAAO,KAAK;AACZ,sBAAU;;;AAId,8BAAoB,qBAAqB;AACzC,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,cAAI,UAAU;AACV,qBAAS,IAAI,aAAa;qBAErB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,aAAa;AAC1B,0BAAa,IAAI,aAAa,OAAO,SAAS;AAC9C,2BAAc,IAAI,aAAa,OAAO,SAAS;AAC/C,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAK,KAAK,OAAO,IAAI;AACrB,qBAAM,KAAK,OAAO,IAAI,IAAI;;AAE9B,+BAAmB,QAAO,OAAM,OAAO;AACvC,gCAAoB,QAAO,QAAO,OAAO;AACzC,gBAAI,QAAQ,SAAQ,YAAY;AAChC,uBAAW;AACX,wBAAY;;AAGZ,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;;AAErB,YAAI,UAAU;AACV,cAAI,QAAQ,QAAO,QAAQ,OAAO;;;AAG1C,aAAO;;AAKJ;AAEH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM,wBAAwB,KAAK,UAAU;;AAE3D,4BAAsB;AAQtB,2BAAqB;AACrB,SAAG,QAAQ;AACP,2BAAmB,EAAE;AAErB,qBAAa,KAAK,EAAE,eAAe,EAAE,OAAO,aAAa,IACrD,IAAI,EAAE,YAAY;AACtB,YAAI,CAAE,cAAa,gBAAgB,aAAa,cAC5C,aAAa;AACb,gBAAM,IAAI,MAAM,mCAAmC,EAAE,YAAY;;;AAIzE,gBAAU,IAAI,WAAW;AACzB,mBAAa;AACb,mBAAa,QAAQ;AACjB,UAAE,IAAI,IAAI,WAAW,EAAE,SAAS;AAChC,kBAAU,EAAE;;AAEhB,aAAO,EAAE;;AAGb,0BAAsB,OAAO,WAAW,eACnC,QAAO,SAAS,eAAe,OAAO,SAAS,eAC5C,OAAO,SAAS;AAUjB;AACH,UAAI;AACA,eAAO,OAAO,WAAW;;AAE7B,aAAO,IAAI,KAAK,CAAC,OAAM;;AAQpB;AACH,UAAI;AACA,eAAO,OAAO,KAAK,SAAQ,SAAS;;AAExC,kBAAY,IAAI,WAAW;AAC3B,cAAQ;AACR,mBAAa,OAAO,IAAI,QAAQ,IAAI,GAAG;AACnC,aAAK,OAAO,aAAa,IAAI;;AAEjC,aAAO,KAAK;;AAQT;AACH,UAAI;AACA,oBAAY,OAAO,KAAK,MAAK;AAC7B,eAAO,IAAI,OAAO,MAAM,IAAI,YAAY,IAAI,aAAa,IAAI;;AAEjE,gBAAU,KAAK;AACf,sBAAe,IAAI,WAAW,EAAE;AAChC,mBAAa,GAAG,IAAI,EAAE,QAAQ,EAAE;AAC5B,gBAAO,IAAI,CAAC,EAAE,WAAW,KAAK;;AAElC,aAAO,QAAO;;AAQX;AACH,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,4BAAsB;AACtB,cAAQ,QAAQ;AACZ,2BAAmB,QAAO;;AAE9B,mBAAa,IAAI,WAAW;AAC5B,mBAAa;AACb,cAAQ,QAAQ;AACZ,aAAK,IAAI,IAAI,WAAW,UAAS;AACjC,kBAAU,QAAO;;AAErB,aAAO,KAAK;;AAST;AACH,wBAAkB;AAClB,aAAO,KAAK;AACZ,aAAO,KAAK,SAAS;AACjB,eAAO,KAAK,MAAM,GAAG,KAAK,SAAS;;AAEvC,oBAAc,KAAK,MAAM;AACzB,aAAO,MAAM,MAAM,SAAS;;AAOzB;AACH,UAAI,eAAe,yBAAyB;AACxC,cAAM,IAAI,MAAM;;AAEpB,aAAO;QACH,WAAW,IAAI;QACf,mBAAmB;QACnB,oBAAoB,eAAe,iBAAiB,OAChD,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,kBAAkB,eAAe,eAAe,OAC5C,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,iBAAiB,eAAe,cAAc,OAC1C,IACA,eAAe,WAAW;;;AAStC;AACI,8BAAwB;AACpB,gBAAQ,KAAK;AACb,gBAAQ;AACR,eAAQ,KAAI,aAAgB;AACxB,eAAK;AACL,gBAAM;;AAEV,aAAK,CAAC;AACN,aAAK;AACL,eAAO,IAAI;;AAEf,2BAAqB,IAAI,YAAY;AACrC,mBAAa,KAAK;AAClB,mBAAa,GAAG,IAAI,MAAM;AACtB,qBAAa,KAAK,gBAAgB;;AAEtC,mBAAa,MAAM,IAAI,MAAM;AACzB,qBAAa,KAAK,YAAe,KAAI,QAAS;;AAElD,aAAO;;AAQX;AACI,4BAAsB,IAAI,YAAY;AACtC,oBAAc,KAAK;AACnB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,mBAAa,GAAG,IAAI,IAAI;AACpB,sBAAc,KAAK,KAAK;;AAE5B,mBAAa,IAAI,IAAI,IAAI;AACrB,sBAAc,KAAK,aAAe,KAAI,MAAO;;AAEjD,aAAO;;AAQX;AACI,0BAAoB,IAAI,YAAY;AACpC,mBAAa,GAAG,IAAI,IAAI;AACpB,oBAAY,KAAK;;AAErB,kBAAY,KAAK,YAAY,MAAM;AACnC,aAAO;;AASJ;AAIH,2BAAqB;AACrB,4BAAsB;AACtB,0BAAoB;AACpB,aAAO;AACH,wBAAe,IAAI,YAAY,IAAI,eAAe;AAClD,iCAAyB,IAAI,YAAY;AACzC,0BAAiB,GAAG,SAAQ,eAAe,QAAQ;AAC/C,8BAAoB,eAAe;AACnC,8BAAoB,aAAa,YAAY,eAAe,MAAO,eAAc,SAC7E,cAAc,eAAe;AACjC,2BAAiB,UAAS;;AAE9B,eAAO,IAAI,aAAa;;;ACtchC;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;;aAEhB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAQrB;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAQ7C;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAU7C;AACH,eAAO,iBAAiB,YAAY,KAAK;;aAUtC;AACH,eAAO,iBAAiB,YAAY,KAAK,QAAQ;;aAE9C;AACH,8BAAsB;AACtB,wBAAgB,gBAAgB,SAC5B,iBAAiB,cAAc,cAC/B,iBAAiB,cAAc;AACnC,gBAAQ,QAAQ;AACZ,0BAAgB,OAAO,KAAK;AAC5B,cAAI,YAAY;AACZ,0BAAc,KAAK;;;AAG3B,eAAO;;;AAGR,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,4BAAwB,SAAS,iBAAiB,gBAAgB;AAClE,4BAAwB,sBAAsB,iBAAiB,gBAAgB,KAAK;ACpF3F;;;;;;;;;;;;;;;;AAoBA,0BAAsB;AACtB,6BAAyB;AAIzB,6BAAyB;AAIzB,4BAAwB;AAIjB;AACH,yBAAmB;AACnB,aAAO,IAAI,QAAQ;AACf,8BAAsB,WAAW,eAAe;AAChD,sBAAc,YAAY,MAAM;AAChC,sBAAc,UAAU,WAAS,OAAO;;;AAGhD;AACI,UAAI,CAAC,OAAM,QAAQ;AAIf,cAAM,IAAI,MAAM;;AAIpB,wBAAkB,OAAO,WAAW,cAAc,OAAO;AACzD,sBAAgB,UAAU,aAAa,UAAU,gBAC7C,UAAU,mBAAmB,UAAU,eACvC,UAAU;AACd,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,iBAAW,YAAY;AACvB,SAAG,kBAAkB,kBAAkB,CAAE,SAAS;AAClD,SAAG,kBAAkB,iBAAiB,CAAE,SAAS;;;MAQjD;AACI,aAAK,YAAY;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;;YAEf;AAEF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,eAAO,KAAK,eAAe,KAAK,WAAW;;YAEzC;AACF,eAAO,KAAK,eAAe,KAAK;;MAgBpC;AACI,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,gBAAI,kBAAkB;AAElB,8BAAgB,GAAG,YAAY,kBAAkB;AACjD,iCAAmB,QAAQ,YAAY;AACvC,iCAAmB,WAAW,IAAI,KAAK;AACvC,yBAAW,YAAY;AACnB,oBAAI,WAAW,UAAU;AACrB,qBAAG;AACH,yBAAO,OAAO,IAAI,MAAM,gCAAgC,KAAK;;AAI7D,0BAAQ,WAAW,OAAO;;;AAGlC,yBAAW,UAAU;AACjB,mBAAG;AACH,uBAAO,OAAO,WAAW;;AAE7B,sBAAQ,aAAa,MAAM,GAAG;;AAI9B,yCAA2B,6BAA6B;AAExD,6BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAgB,OAAO,YAAY;AACnC,qCAAuB,UAAU,IAAI,CAAE,WAAW,KAAK,WAAW;AAClE;AACA,6BAAe,YAAY;AAEvB,0BAAU,GAAG,YAAY,kBAAkB;AAC3C,mCAAmB,QAAQ,YAAY;AACvC,wCAAwB,WAAW,IAAI;kBACnC,WAAW,KAAK;kBAChB;kBACA;;AAEJ,gCAAgB,YAAY,MAAM,QAAQ,CAAE;AAC5C,gCAAgB,UAAU;AAGtB,8BAAY,OAAO,YAAY;AAC/B,4CAA0B,UAAU,OAAO,KAAK;AAChD,oCAAkB,YAAY;AAC1B,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;AAElC,oCAAkB,UAAU;AACxB,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;;;AAI1C,6BAAe,UAAU;AACrB,mBAAG;AACH,uBAAO,OAAO,eAAe;;AAEjC,qBAAO,aAAa;AAChB,oBAAI,WAAW;AACX,qBAAG;;AAGH,0BAAQ,aAAa,MAAM,GAAG;;;;;AAK9C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;AAI9D,qBAAiB,aAAa;AACvB,4BAAwB;AAC3B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAkB7B;AACH,aAAO,IAAI,iBAAiB;;AAEhC;AACI,aAAO,IAAI,WAAW,iBAAiB,cACnC,IAAI,MAAM,iBAAiB,WAAW,UACtC;;;MAGJ;AACI,aAAK,YAAY;;YAEf;AACF,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,uBAAW,GAAG,YAAY,iBAAiB;AAC3C,0BAAc,GAAG,YAAY;AAS7B,sCAA0B,MAAM;AAChC,8BAAkB,YAAY;AAC1B,0BAAY;AACZ,iCAAmB,kBAAkB;AACjC,oBAAI,KAAK,aAAa,KAAK;;AAE/B,sBAAQ;;AAEZ,8BAAkB,UAAU;AACxB,iBAAG;AACH,qBAAO,OAAO,kBAAkB;;AAEpC,eAAG,aAAa,MAAM,GAAG;;AAE7B,sBAAY,UAAU,WAAS,OAAO,YAAY;;;YAGpD;AACF,eAAO,iBAAiB;AACxB,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,2BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAkB,OAAO,YAAY;AACrC,mCAAuB,UAAU,IAAI;AACrC;AACA,2BAAe,YAAY;AACvB,kBAAI,eAAe,UAAU;AACzB,mBAAG;AACH,uBAAO,OAAO,IAAI,MAAM,gCAAgC;;AAKxD,0CAA0B,UAAU,OAAO;AAC3C,wCAAwB;AAEpB,4BAAU,GAAG,YAAY,kBAAkB;AAC3C,qCAAmB,QAAQ,YAAY;AACvC,6CAA2B,WAAW,OAAO;AAC7C,qCAAmB,YAAY,MAAM,QAAQ,eAAe,OAAO;AACnE,qCAAmB,UAAU,WAAS,OAAO,eAAe;;AAIhE,kCAAkB,YAAY;AAC9B,kCAAkB,UAAU;AACxB;AACA,qBAAG;AACH,yBAAO,OAAO,eAAe;;;;AAIzC,2BAAe,UAAU;AACrB,iBAAG;AACH,qBAAO,OAAO,eAAe;;AAEjC,mBAAO,aAAa;AAChB,kBAAI,WAAW;AACX,mBAAG;;AAGH,wBAAQ,aAAa,MAAM,GAAG;;;;AAI1C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;ACrT9D;;;;;;;;;;;;;;;;AAqBA,2BAAuB;AACvB,wBAAoB;AACpB,wBAAoB;AACpB,kCAA8B;AAC9B,gCAA4B;AAC5B,+BAA2B;AAC3B,kCAA8B;AAMvB;AACH,UAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAC/B,cAAM,IAAI,MAAM;;AAGpB,iBAAW,OAAO;AAClB,+BAAyB;AACzB,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,oBAAY,GAAG,IAAI;AACnB,uBAAe,cAAc;AAC7B,YAAI,IAAI,WAAW,WAAW,IAAI,SAAS,OAAO;AAC9C,aAAG,WAAW;AACd,4BAAkB,oBAAoB;AACtC,cAAI,iBAAiB,QAAQ,eAAe;AACxC,6BAAiB,KAAK;;;;AAIlC,aAAO;;AAEX;AACI,aAAO;QACH,MAAM,CAAC,aAAa,MAAM,aAAa,KAAK;QAC5C,UAAU,CAAC,aAAa,MAAM,uBAAuB,KAAK;QAC1D,aAAa,CAAC,aAAa,MAAM,qBAAqB,KAAK;QAC3D,YAAY,CAAC,aAAa,MAAM,oBAAoB,KAAK;QACzD,eAAe,CAAC,aAAa,MAAM,uBAAuB,KAAK;;;AAUvE;AACI,oBAAc,IAAI,MAAM;AACxB,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,uBAAuB;;AAE3C,aAAO,MAAM,MAAM,GAAG,MAAM,SAAS,GAAG,KAAK;;AAEjD;AACI,aAAO,IAAI,WAAW,oBAAoB,cACtC,IAAI,MAAM,oBAAoB,WAAW,UACzC;;;MAQJ;AACI,YAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAK/B,gBAAM,IAAI,MAAM;;AAEpB,aAAK,KAAK,OAAO;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;AACjB,aAAK,OAAO,aAAa,KAAK;;YAW5B;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,2BAAiB,KAAK,UAAU,eAAe;AAC/C,8BAAoB,KAAK,UAAU,eAAe;AAClD,qCAA2B,6BAA6B;AACxD;AACI,iBAAK,GAAG,QAAQ,KAAK,KAAK,MAAM,KAAK,UAAU;AAC/C,iBAAK,GAAG,QAAQ,KAAK,KAAK,UAAU;AACpC,iBAAK,GAAG,QAAQ,KAAK,KAAK,aAAa;AACvC,iBAAK,GAAG,QAAQ,KAAK,KAAK,YAAY,0BAA0B,eAAe;AAC/E,iBAAK,GAAG,QAAQ,KAAK,KAAK,eAAe,KAAK,UAAU;cACpD,QAAQ,eAAe;cACvB,aAAa,eAAe;cAC5B,aAAa,eAAe;cAC5B,qBAAqB,eAAe;;AAExC,mBAAO,CAAE;;AAIT,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,kBAAM,IAAI,MAAM,yBAAyB,KAAK,kHAEpB,mBAAmB,wCACrB,mBAAmB,qCACpB,mBAAmB;;;;YAYhD;AACF,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AAClD,YAAI,QAAQ;AACR,gBAAM,IAAI,MAAM,kDAAkD,KAAK;;AAE3E,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM;;AAGpB,oBAAY;AAEZ,yBAAiB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACtD,YAAI,YAAY;AACZ,gBAAM,IAAI,MAAM,4CAA4C,KAAK;;AAGrE,YAAI,gBAAgB;AAEpB,4BAAoB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACzD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,gDAAgD,KAAK;;AAGzE,YAAI,cAAc;AAElB,+BAAuB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACjD,YAAI,kBAAkB;AAClB,2BAAiB,KAAK,MAAM;AAC5B,cAAI,SAAS,SAAS;AACtB,cAAI,cAAc,SAAS;AAC3B,cAAI,cAAc,SAAS;AAC3B,cAAI,sBAAsB,SAAS;;AAGvC,iCAAyB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACnD,YAAI,oBAAoB;AACpB,gBAAM,IAAI,MAAM,wDACR,KAAK;;AAEjB,YAAI,aAAa,0BAA0B;AAC3C,eAAO;;;AAGf,wBAAoB,aAAa;AAC1B,+BAA2B;AAC9B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,oBAAoB;AAC1D,iBAAO,oBAAoB,IAAI,MAAM,oBAAoB,WAAW;;AAGpE,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAyB7B;AACH,aAAO,IAAI,oBAAoB;;;MAG/B;AACI,gBAAO,OAAM,QAAQ,eAAe,MAAM;AAC1C,gBAAO,OAAO,WAAW,eACrB,OAAO,OAAO,iBAAiB,aAAa,MAAM;AACtD,aAAK,KAAK,OAAO;;YAEf;AACF,oBAAY;AACZ,uBAAe,cAAc;AAC7B,uBAAe,iBAAiB;AAChC,qBAAa,GAAG,IAAI,KAAK,GAAG,QAAQ,EAAE;AAClC,sBAAY,KAAK,GAAG,IAAI;AACxB,cAAI,IAAI,WAAW,WAAW,IAAI,SAAS;AACvC,8BAAkB,oBAAoB;AACtC,gBAAI,aAAa,KAAK,MAAM,KAAK,GAAG,QAAQ;;;AAGpD,eAAO;;YAEL;AACF,eAAO,mBAAiB;AACxB,qBAAa,aAAa;AAC1B,YAAI,KAAK,GAAG,QAAQ,KAAK,SAAS;AAC9B,gBAAM,IAAI,MAAM,8BAA8B;;AAElD,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK;AAC7C,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,eAAO;;;ACnRf;;;;;;;;;;;;;;;;AA4BA,8BAA0B;;MAEtB;AACI,aAAK,WAAW;;aAEb;AACH,YAAI,0BAA0B,YAAY;AACtC,oCAA0B,WAAW,IAAI;;AAE7C,eAAO,0BAA0B;;aAQ9B;AACH,gBAAO,UAAU,MAAM,MAAM;AAC7B,YAAI,OAAO,SAAS;AAChB,mBAAS,OAAO,MAAM,GAAG,OAAO,QAAQ;;AAE5C,gBAAO,OAAO,SAAS,GAAG,MAAM;AAChC,yBAAiB,0BAA0B;AAC3C,gBAAO,SAAS,SAAS,WAAW,MAAM,MAAM,2DAA2D;AAC3G,iBAAS,SAAS,UAAU;;aAEzB;AACH,wBAAgB,KAAK,cAAc,SAAS;AAC5C,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM,yCAAyC;;AAE7D,eAAO;;aAEJ;AACH,eAAO,OAAO,KAAK,KAAK,cAAc;;;AAW9C;AACI,UAAI,IAAI,QAAQ,uBAAuB;AACnC,cAAM,IAAI,MAAM,6EAET,0BAA0B,aAAa,KAAK;;AAEvD,aAAO;QACH,QAAQ,IAAI,MAAM,mBAAmB;QACrC,MAAM,IAAI,MAAM,mBAAmB;;;AAG3C,yEAAqE;AACjE,cAAO,cAAc,SAAS,MAAM,wCAAwC;AAC5E,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,kEAAkE;AACxG,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,wCACxD;AACpC,0BAAoB,aAAa;AACjC,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,uEAC3B;AACX,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,6CACnD;AACzC,0BAAoB,aAAa;AACjC,2BAAqB,SAAS,WAAW;AACzC,yBAAmB,SAAS,WAAW;AACvC,yBAAmB,iBAAiB,SAAS,WAAW;AACxD,6BAAuB,MAAM,YAAY;AAIzC,UAAI,gBAAgB;AAChB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,yBAAmB,MAAM,YAAY,KAAK;AAI1C,UAAI,gBAAgB,CAAC;AACjB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,aAAO,WAAW;;AAqCtB;AACI,sBAAgB,0BAA0B;AAC1C,kBAAY;AACZ,2BAAqB;AACjB,0BAAkB,MAAM,0BAA0B,WAAW,QAAQ;AACrE,2BAAmB;AACf,sBAAY,SAAS,oBAAoB;AACzC,cAAI,OAAO,UAAU;;;AAG7B,aAAO;;AAmCX;AACI,4BAAsB,SAAS;AAC/B,sBAAgB,0BAA0B,WAAW,cAAc;AACnE,aAAO,QAAQ,YAAY,cAAc;;AAiD7C;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AAgDlD;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AC/SlD;;;;;;;;;;;;;;;;;MAsBI;AACI,eAAO,MAAM,MAAM;;MAEvB;AACI,eAAO,YAAY;;MAEvB;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,kDAAkD;;AAEtE,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc,IAAI;;AAE3B,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,eAAO,IAAI,YAAY,UAAU,OAAO;;;AAGhD,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,WAAW,IAAI;AAEjC;AACI,kCAA0B,gBAAgB,oBAAoB,YAAY,IAAI;;;AAKlF;AACI,kCAA0B,gBAAgB,iBAAiB,YAAY,IAAI;;;;ACnDnF;;;;;;;;;;;;;;;;AAkBO,yBAAqB;MAExB,aAAa,MAAM;;AAEvB;AAGO;AACH,oBAAc;;AAEX;AACH,oBAAc;;AAEX;AACH,aAAO;;;MAGP;AAEI,aAAK,OAAO;AAGZ,aAAK,cAAc,IAAI,KAAK,KAAK;;MAErC;AACI,YAAI,OAAM,OAAO,SAAS;AACtB,iBAAO,OAAM,OAAO,MAAM,MAAM;;AAEpC,YAAI,eAAe;AACf,wBAAc,aAAa;;AAE/B,eAAO,YAAY,MAAM;;MAE7B;AACI,sBAAa,QAAQ;AACrB,eAAO,MAAK,KAAK,MAAO,MAAK,KAAK;;MAEtC;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,YAAI,MAAM,WAAW;AACjB,iBAAO;;AAEX,eAAO,IAAI,KAAK,KAAK,YAAY,UAAU,OAAO;;;AAG1D,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,QAAQ,IAAI;;ACrElC;;;;;;;;;;;;;;;;AA4CO,oCAA+B;AAClC,cAAQ,SAAS;AACjB,0CAAwC;AACxC,aAAO,IAAI,cAAa,OAAO,OAAO;;AC/C1C;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAEnC,UAAI,CAAC,cAAkB;AACnB,cAAM,IAAI,MAAM,mCAAmC;;AAEvD,UAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE,OAAA;AC/CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,SAAS;AAC5C,sBAAgB,MAAM,QAAO,qBAAqB,GAAG,QAAQ,GAAG,OAAO,GAAG;AAC1E,qBAAe,CAAE,GAAG;AAGpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA6BO,iCAA4B;AAC/B,cAAQ,IAAI,EAAE,SAAS;;AC9B3B;;;;;;;;;;;;;;;;AAmBA;AAYA,wBAAkB;MACd,QAAA;MACA,MAAA;MACA;MACA,OAAA;;AAEJ,iBAAa;ACrCb;;;;;;;;;;;;;;;;AAwBA,qCAAiC;AACjC,wCAAoC;AACpC,+CAA2C;AAC3C;AACI,aAAO,IAAI,QAAQ,aAAW,WAAW,UAAU,KAAK;;;MAGxD;AACI,YAAI,CAAC,OAAM,QAAQ;AAGf,gBAAM,IAAI,MAAM;;AAGpB,YAAI,eAAe,WAAW,iBAAiB;AAC3C,2BAAiB,eAAe,MAAM,iBAAiB,WAAW;;AAEtE,YAAI,kBAAkB,QAAQ,eAAe,WAAW;AACpD,2BAAiB;;AAErB,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,qBACD,iBAAiB;;YAEnB;AACF,YAAI,OAAQ,aAAc;AACtB,gBAAM,IAAI,MAAM;;AAGpB,2BAAmB,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM;AAC5F,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,kCAAwB,CAAC;YACjB,OAAO,CAAC,OAAO,KAAK;YACpB,SAAS,eAAe;;AAEhC,iDAAuC;YACnC,eAAe,eAAe;YAC9B,QAAQ,eAAe;YACvB,aAAa,eAAe;YAC5B,aAAa,eAAe;YAC5B;;AAEJ,oDAA0C,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM;AAGxI,6BAAmB,KAAK,cAAc,OAAO,SAAS,cAAc,OAChE,KAAK;AACT,qBAAW,WAAW,KAAK;AAC3B,qBAAW,OAAO;AAIlB,gBAAM,MAAM,MAAM,WAAW,cAAc,IAAI,WAAW;AAC1D,cAAI,eAAe,cAAc;AAC7B,qCAAyB,KAAK,oBAAoB,OAC9C,SAAS,cAAc,OACvB,KAAK;AACT,6BAAiB,WAAW,KAAK;AACjC,6BAAiB,OAAO;AACxB,kBAAM,MAAM,MAAM,iBAAiB,cAAc,IAAI,WAAW;;AAEpE,iBAAO,CAAE,oBAAoB,6BAA6B;;;;AAItE,qBAAiB,aAAa;;MAE1B;AACI,YAAI,SAAS,QAAQ,MAAM,SAAS;AAChC,gBAAM,IAAI,MAAM,wEACI;;AAExB,aAAK,QAAQ;;YAEX;AACF,yBAAiB,KAAK,MAAM;AAC5B,4BAAoB,KAAK,MAAM,MAAM;AACrC,eAAO,IAAI,QAAQ;AACf,6BAAmB,IAAI;AACvB,qBAAW,SAAS;AAEhB,8BAAkB,KAAK,MAAM,MAAM,OAAO;AAC1C,kCAAsB,UAAU;AAChC,gBAAI,iBAAiB;AACjB,qBAAO,IAAI,MAAM,4CAA4C,SAAS;AACtE;;AAEJ,gBAAI,YAAY,WAAW;AACvB,sBAAQ,CAAE;;AAEd,oCAAwB,UAAU;AAClC,gBAAI,mBAAmB;AACnB,qBAAO,IAAI,MAAM,6CAA6C,SAAS;AACvE;;AAEJ;AACA;AACI,2BACI,KAAK,4BAA4B,iBAAiB;;AAGtD,qBAAO;AACP;;AAEJ,gCAAoB;AACpB,0BAAc;AACd,mCAAuB;AACvB,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,sBAAM,KAAK;AACX,+BAAe,KAAK;;AAExB,0BAAY,KAAK,GAAG,aAAa;;AAErC,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,yCAAyB,IAAI;AAC7B,iCAAiB,SAAS;AAEtB,qCAAmB,OAAM,OAAO;AAChC,iCAAc,MAAM,QAAQ;AAC5B,iCAAe,UAAS;AACxB,sBAAI,eAAe,QAAQ,UAAU;AACjC,4BAAQ;sBACJ;sBACA;sBACA,YAAY,wBAAwB;sBACpC,QAAQ,UAAU;sBAClB,aAAa,UAAU;sBACvB,aAAa,UAAU;sBACvB,qBAAqB,UAAU;;;;AAI3C,iCAAiB,UAAU,WAAS,OAAO,6CAA6C;AACxF,iCAAiB,kBAAkB,WAAW;;;;AAI1D,qBAAW,UAAU,WAAS,OAAO,sEACnB,SAAS;AAE3B,qBAAW,WAAW;;;MAM9B;AACI,0BAAkB;AAClB,0BAAkB,MAAM,IAAI,UAAQ,SAAS,KAAK;AAClD,2BAAmB;AACnB,4BAAoB;AAChB,gBAAM,MAAM,QAAQ;AAChB,iCAAqB,SAAS;AAC9B,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,uDACR;;AAEZ,sBAAU,KAAK;AACf,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,8BAA8B;;AAG9C,yBAAW,QAAQ,MAAM,UAAU,QAAQ;;;;AAIvD,YAAI,UAAU,WAAW,MAAM;AAC3B,gBAAM,IAAI,MAAM,wDACR,UAAU,oDACV,MAAM;;AAElB,eAAO;;;AAGR,mCAA+B;AAClC,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AAwC7B,+CAA2C;AAC9C,aAAO,IAAI,iBAAiB;;AA0CzB;AACH,aAAO,IAAI,aAAa;;AC7S5B;;;;;;;;;;;;;;;;AAyBO;AACH,oBAAc;AACd,sBAAgB,iBAAiB,OAAO,IAAI;AAC5C,oBAAc,eAAe,OAAO,IAAI;AACxC,oBAAc,eAAe;AAC7B,4BAAsB;AACtB,8BAAwB;AACpB,gBAAQ,KAAK;AACT,2BAAiB,gBACb,EAAE,kBAAkB,SAAS,SAAU,eAAc;AAEzD,qBAAW;AACX,iBAAO;;AAEX,eAAO;;AAEX;AACI,gBAAO,aAAY,QAAQ,MAAM,QAAQ,cAAa,UAAS,SAAS,GAAG,MAAM;;AAErF;AACI,gBAAO,kBAAiB,KAAK,kBAAiB,GAAG,MAAM,oEAC9B;AACzB,gBAAO,gBAAe,KAAK,gBAAe,GAAG,MAAM,kEAC5B;AACvB,gBAAO,gBAAe,gBAAe,MAAM,yEAClB,kCAClB;;AAEX,aAAO,QAAQ,IAAI,SAAS,IAAI;;ACrDpC;;;;;;;;;;;;;;;;AAgCO;AACH,UAAI,eAAe;AACf,sBAAc;;AAElB,wBAAkB,YAAY,aAAa,OAAO,OAAM,SAAS,QAC7D,YAAY;AAEhB,uBAAiB,UAAU,IAAI,cAAY,UAAU,UAAU,YAAY,aAAa,CAAE,UAAU;AACpG,iCAA2B;AAC3B,+BAAyB;AACzB,wBAAkB,YAAY,cAAc,OACxC,MAAM,QAAQ,IAAI,YAClB,MAAM,wBAAwB,UAAU,YAAY,YAAY,oBAAoB;AACxF,6BAAuB,UAAU,IAAI,cAAY,SAAS;AAC1D,kCAA4B;AAC5B,gCAA0B;AAC1B,sBAAgB,YAAY,cAAc,OACtC,MAAM,QAAQ,IAAI,kBAClB,MAAM,wBAAwB,gBAAgB,YAAY,YAAY,qBAAqB;AAC/F,aAAO;;AAWJ,0DAAsD;AAMzD,2BAAqB,eAAe,yBAAyB,WAAW,CAAE;AAC1E,2BAAoB,qBAAqB;AACzC,aAAO,aAAY,UAAU,gBAAgB;;AA0B1C;AACH,aAAO,kCAAkC;AAGrC,uCAA+B,SAAS,IAAI,MAAM;AAClD,oCAA4B;AAC5B,6BAAqB,eAAe,OAAO,YAAY,IAAI,MAAM,SAAS;AAC1E,uCAA+B;AAC/B,iBAAS,QAAQ;AACb,4BAAkB;AAClB,8BAAoB,QAAQ,QAAQ;AAChC,6BAAkB,kBAAkB,eAChC,aAAa,aAAa,QAC1B,aAAa;AACjB,iCAAqB,qBAAqB,YACtC,eAAmB,aAAa;AACpC,gDAAoC;AAChC,qCAAuB,cAAc;AACrC,kBAAI,oBAAoB,eAAe;AACnC,oCAAoB,cAAc;;AAEtC,kCAAoB,YAAY,KAAK;gBACjC,eAAe;gBACf;gBACA,WAAW;;;AAGnB,gBAAI,eAAe;AACf,0BAAY,QAAQ;AAChB,oBAAI,eAAe,aAAa;AAC5B;AACA,+BAAa,eAAe;;;;AAKpC;;AAEJ,mCAAuB,KAAK,aAAa;AACzC,2BAAe;;;AAGvB,YAAI,CAAC,aAAa,MAAM,WAAS;AAC7B,kCAAwB,YAAY,OAAO,UAAU,CAAC,aAAa;AACnE,gBAAM,IAAI,MAAM,kDACT,gBAAgB,KAAK;wCAErB,uBAAuB,KAAK;;AAIvC,oCAA4B,uBAAuB,OAAO;AACtD,cAAI;AACA,wBAAY,KAAK;;AAErB,iBAAO;WACR;AACH,0BAAkB;AAClB,4BAAoB,QAAQ;AACxB,mBAAS,GAAG,MAAM,QAAQ;AACtB,6BAAiB,iBACZ,EAAC,eAAe,SAAS,OAAO,MAAM,MAAM;AACjD,sBAAU,KAAK;;;AAGvB,wBAAgB,MAAM,qBAAqB;AAC3C,iCAAyB;AACzB,gCAAwB;AACxB,4BAAoB,QAAQ;AACxB,6BAAmB,SAAS,GAAG,MAAM;AACrC,2BAAiB;AACjB,wBAAa,GAAG,KAAI,YAAY;AAC5B,0BAAc,QAAQ,oBAAoB,IAAG;;AAGjD,8BAAoB,IAAI,YAAY;AACpC,kCAAwB,IAAI,WAAW;AACvC,kCAAwB;AACxB,wBAAa,GAAG,KAAI,YAAY;AAC5B,4BAAe,IAAI,WAAW,QAAQ,oBAAoB;AAC1D,4BAAgB,IAAI,SAAQ;AAC5B,iCAAqB,QAAO;;AAEhC,iCAAuB,oBAAoB;AAC3C,yBAAe,QAAQ;AACnB,+BAAmB,YAAY,MAAM,aAAa,aAAa,aAAa,cAAc,aAAa;AACvG,oCAAwB,cAAc,YAAY,CAAC,aAAa;AAChE,+BAAmB;AACf,+BAAiB,QAAQ,gBAAgB;;;AAGjD,+BAAqB;;AAEzB,eAAO;;;AC7Lf;;;;;;;;;;;;;;;;AA0BA,mCAA+B;AAC/B,sBAAkB;;MAEd;AACI,aAAK,iBAAiB;AACtB,YAAI,eAAe;AACf,wBAAc;;AAElB,aAAK,mBAAmB,YAAY;AACpC,aAAK,aAAa,YAAY;AAC9B,aAAK,qBAAqB,YAAY;AACtC,YAAI,YAAY,aAAa;AACzB,kBAAO,OAAO,YAAY,cAAc,YAAY,MAAM;AAG1D,eAAK,QAAQ,YAAY;;AAGzB,eAAK,QAAQ,OAAM,SAAS;;AAEhC,gBAAO,QAAQ,QAAQ,KAAK,SAAS,GAAG,MAAM;AAE9C,YAAI,MAAM,QAAQ;AACd,kBAAO,KAAK,WAAW,GAAG,MAAM,iEACP,KAAK;;AAElC,aAAK,OAAO;AACZ,YAAI,YAAY,eAAe,QAC3B,YAAY,YAAY,QAAQ;AAChC,gBAAM,IAAI,MAAM;;AAEpB,aAAK,cAAc,YAAY,eAAe;;YAE5C;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa,OAAO,OAAO,CAAE,QAAQ,KAAK,iBAAkB,KAAK;AACjE,cAAK,OAAO,IAAI;AAChB,gCAAwB,CAAC;UACjB,OAAO,CAAC;UACR,SAAS,eAAe;;AAEhC,+CAAuC;UACnC,eAAe,eAAe;UAC9B,QAAQ,eAAe;UACvB,aAAa,eAAe;UAC5B,aAAa,eAAe;UAC5B,qBAAqB,eAAe;UACpC;;AAEJ,cAAK,KAAK,OAAO,cAAc,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM,aAAc;AAChH,YAAI,eAAe,cAAc;AAC7B,gBAAK,KAAK,OAAO,qBAAqB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM,0BAA2B;;AAEnH,yBAAiB,MAAM,KAAK,MAAM,KAAK,MAAM;AAC7C,YAAI,SAAS;AACT,iBAAO;YACH,oBAAoB,6BAA6B;YACjD,WAAW,CAAC;;;AAIhB,gBAAM,IAAI,MAAM,gEACT,SAAS;;;YAWlB;AACF,mCAA2B,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK;AAC5D,YAAI,CAAC,mBAAmB;AACpB,gBAAM,IAAI,MAAM,cAAc,KAAK,gCAC5B,mBAAmB;;AAG9B;AACA;AACI,wBAAc,MAAM,mBAAmB;;AAGvC,wBAAc,+CAA+C,KAAK;AAGlE,cAAI,KAAK,KAAK,SAAS;AACnB,uBAAW;;AAQX,uBAAW;;AAGf,gBAAM,IAAI,MAAM;;AAEpB,8BAAsB,YAAY;AAClC,gCAAwB,YAAY;AACpC,4BAAoB,YAAY;AAChC,4BAAoB,YAAY;AAChC,uBAAe,YAAY;AAC3B,oCAA4B,YAAY;AAExC,YAAI,iBAAiB,QAAQ,mBAAmB;AAC5C,gBAAM,IAAI,MAAM,2BAA2B,KAAK;;AAGpD;AACA;AACA,YAAI,mBAAmB;AACnB,0BAAgB,MAAM,KAAK,YAAY;AACvC,WAAC,aAAa,cAAc;;AAEhC,0BAAkB;UACd;UACA;UACA;UACA;UACA;UACA;UACA;;AAEJ,4BAAoB,YAAY;AAChC,YAAI;AACA,oBAAU,mBAAmB;;AAEjC,eAAO;;YAEL;AACF,2BAAmB,MAAM,QAAQ,KAAK,QAAQ,KAAK,KAAK,KAAK,KAAK;AAClE,iCAAyB,SAAS;AAClC,2BAAmB,KAAK,oBAAoB;AAC5C,4BAAoB;AACpB,4BAAoB;AAChB,sBAAY,KAAK,GAAG,MAAM;;AAE9B,0BAAkB;AAClB,4BAAoB;AACpB,mCAA2B;AACvB,6BAAmB,aAAa;AAC5B,gBAAI,KAAK,sBAAsB;AAC3B,0BAAY,KAAK,KAAK,mBAAmB;;AAGzC,wBAAU,KAAK,aAAa,OAAO;;;;AAI/C,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,MAAM,QAAQ,IAAI;;AAExC,wBAAgB,MAAM,yBAAyB,WAAW;UACtD,aAAa,KAAK;UAClB,WAAW,KAAK;UAChB,YAAY,KAAK;;AAErB,eAAO,CAAC,aAAa,wBAAwB;;;AAGrD,gBAAY,mBAAmB;AAYxB;AACH,wBAAkB,IAAI,YAAY;AAClC,8BAAwB,IAAI,YAAY;AACxC,qBAAe,IAAI,UAAU,GAAG;AAChC,qBAAe,kBAAkB,YAAY,IAAI,UAAU,mBAAmB;AAC9E,aAAO,CAAC,SAAS,KAAK;;AAEnB;AACH,aAAO,IAAI,MAAM,YAAY,qBAAqB;;AAE/C,uBAAmB;AACtB,UAAI,OAAO,UAAU,eAChB,gBAAe,QAAQ,YAAY,aAAa;AAIjD,eAAO;;AAGP,qBAAa;AACb,YAAI,MAAM,QAAQ;AACd,mBAAS,IAAI,MAAM,aAAW,aAAa;;AAG3C,mBAAS,aAAa;;AAE1B,YAAI;AACA,iBAAO,KAAK,KAAK;;;AAGzB,aAAO;;AAEX,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAuE7B;AACH,aAAO,IAAI,YAAY,MAAM;;AAO1B;AACH,aAAO,KAAK,MAAM;;AC/TtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,iBAAiB;;YAEpB;AACF,eAAO,KAAK;;;;MAIhB;AACI,aAAK,cAAc;;YAEjB;AACF,eAAO,KAAK,YAAY;;;AAwBzB;AACH,UAAI,UAAU,WAAW;AACrB,iCAAyB,eAAe,iBAAiB,QACrD,eAAe,eAAe;AAClC,YAAI;AACA,iBAAO,IAAI,kBAAkB;;AAK7B,kBAAQ,KAAK;AAIb,iBAAO,IAAI,kBAAkB,CAAE,eAAe;;;AAMlD,gBAAQ,KAAK;AAIb,eAAO,IAAI,kBAAkB;UACzB,eAAe;UACf;UACA;UACA;;;;AAmBL;AACH,aAAO,IAAI,iBAAiB;;ACrGhC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA+CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,sBAAgB;AACZ,gBAAQ,wBAA4B,OAAO,GAAG;AAC9C,gBAAY,GAAG,SAAS,eAAmB,QAAQ,MAAM;AACzD,aAAK,CAAC;AACN,eAAO,SAAQ,QAAQ,IAAI;;AAE/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;;AAE/D,qBAAW,IAAG,CAAE,UAAA;AC3D5B;;;;;;;;;;;;;;;;AAuCA,wCAAoC,oBAAoB;AACpD,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,aAAK,CAAC,IAAI;AACV,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,0BAAkB,eAAmB;AACrC,0BAAkB,eAAmB;AACrC,oCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,gBAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,qBAAqB,MAAM,uJAE3C,oBAAoB;AAChD,gBAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,kCAA0B,YAAY,YAAY,aAAa;AAC/D,yBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,sBAAc,SAAQ,YAAY,KAAK,KAAK,YAAY;AACxD,eAAO,SAAQ,OAAO;;AAE1B,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;;AAEnE,mBAAU,IAAG,CAAE;AC5E3B;;;;;;;;;;;;;;;;AAwCA,+CAA2C,cAAc;AACrD,UAAI,QAAQ;AACR,cAAM,IAAI,MAAM,iDAAiD;;AAErE,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,uBAAiB,CAAC,GAAG,SAAS,OAAO;AACrC,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,SAAQ,OAAO,SAAQ,UAAU,CAAC,SAAS,QAAQ,OAAO,SAAS,WAAW;;AAEjG,qBAAe,CAAE,SAAS;AAC1B,oBAAc,CAAE,OAAO,SAAS;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;ACtD3B;;;;;;;;;;;;;;;;AAwCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,QAAQ;AACR,eAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAErC,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,WAAK,QAAQ;AACT,gBAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,MAAM,+CAA+C,GAAG,OAAO,aACxF;;AAEpB,UAAI,GAAG,QAAQ;AACX,eAAO,GAAG;;AAEd,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,OAAO,QAAQ,MAAqB,YAAW;;AAEpG,uBAAa,IAAG,CAAE,YAAA;AC1D9B;;;;;;;;;;;;;;;;AAoDO;AACH,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,cAAY,cAAc,QAAQ,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,+DACzE;AACf,cAAY,QAAQ,SAAS,GAAG,MAAM,gDAAgD,QAAQ;AAC9F,cAAY,aAAa,SAAS,GAAG,MAAM,qDAC5B,aAAa;AAC5B,cAAY,QAAQ,MAAM,OAAO,aAAa,MAAM,IAAI,MAAM,uCACvD,QAAQ,MAAM,UAAU,aAAa,MAAM;AAElD,cAAY,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,4DAC3D;AAGP,2BAAqB,QAAO,MAAK,SAAS,UAAU;AACpD,gCAA0B,QAAO,MAAK,cAAc,UAAU;AAC9D,4BAAsB,WAAU;AAChC,sBAAgB,OAAO,eAAe;AACtC,aAAO,MAAK,SAAS;;AAElB,4BAAwB,IAAG,CAAE;ACzEpC;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AAwBA;AAwBA,+CAA2C;AAEvC,UAAI,cAAc;AACd,cAAM,IAAI,MAAM;;AAEpB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,wBAAkB;AAClB,wBAAkB;AAClB,oBAAc;AACd,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,gBAAgB;AACvB,sBAAc;iBAET,OAAQ,cAAe,eAAe,kBAAkB;AAC7D,sBAAc;iBAET,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAEL,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAGL,OAAO,cAAc;AAC1B,uBAAe;;AAGf,cAAM,IAAI,MAAM,qPAID,OAAO,YAAY;;AAEtC,UAAI;AACA,8CAAsC;AACtC,YAAI,WACA,OAAO,aACH;AACJ,gBAAM,IAAI,MAAM;;;AAMxB,qBAAe,WAAU,YAAY,QAAO;AAC5C,UAAI,UAAU;AACV,uBAAe,CAAE;AACjB,sBAAc,CAAE;AAChB,eAAO,QAAO,UAAU,YAAY,QAAQ;;AAEhD,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B;AACA,UAAI;AACA,eAEI,OAAO,WAAW,MAAM,aAAa,GAAG,GAAG,OAAO,QAAQ;iBAEzD,eAAe;AACpB,eAAO,OAAO;iBAET,WAAW;AAChB,YAAI,uBAAuB;AACvB,gCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,4BAAoB,OAAO,QAAQ;AACnC,4BAAoB,OAAO,SAAS;AACpC,4BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,eAAO,oBAAoB,aAAa,GAAG,GAAG,OAAO,QAAQ;;AAEjE;AACA,UAAI,gBAAgB;AAChB,iBAAS,IAAI,WAAW;;AAGxB,0BAAkB,QAAQ;AAC1B,iBAAS,IAAI,WAAW,YAAY;AACpC,qBAAa,GAAG,IAAI,WAAW;AAC3B,6BAAmB,GAAG,UAAU,aAAa,EAAE;AAC3C,mBAAO,IAAI,cAAc,WAAW,KAAK,IAAI,IAAI;;;;AAI7D,uBAAiB,CAAC,QAAQ,OAAO;AACjC,aAAO,SAAS,QAAQ,UAAU;;AAqB/B;AACH,iBAAW,iBAAgB,KAAK,OAAO;AACvC,UAAI,CAAE,gBAAe;AAEjB,kCAA0B;AAC1B,eAAO,MAAK,mBAAmB;AAC/B,0BAAkB;;AAEtB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,MAAM,wDAAwD,KAAK;;AAEjF,8BAAwB,KAAK,MAAM,MAAM,GAAG;AAC5C,oBAAc,KAAK,SAAS,IAAI,IAAI,KAAK,MAAM;AAC/C,UAAI,QAAQ,KAAK,UAAU;AACvB,cAAM,IAAI,MAAM,0DACS;;AAE7B,UAAI,KAAK,UAAU,aAAa,KAAK,UAAU;AAC3C,cAAM,IAAI,MAAM,kCAAkC,KAAK;;AAG3D,oBAAa,MAAM,KAAK;AACxB,yBAAmB,KAAK,UAAU,YAAY,MAAM;AACpD,oBAAc,IAAI,kBAAkB,QAAQ,SAAS;AACrD,mBAAa,GAAG,IAAI,SAAS,OAAO,EAAE;AAClC,qBAAa,CAAC,GAAG,GAAG,GAAG;AACvB,qBAAa,GAAG,IAAI,OAAO;AACvB,wBAAc,MAAK,IAAI,QAAQ;AAC/B,cAAI,KAAK,UAAU;AACf,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACqB;;qBAGpC,KAAK,UAAU;AACpB,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACuB;;;AAG/C,cAAI,UAAU;AACV,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;;AAGlB,iBAAK,KAAK,QAAQ;;;AAG1B,kBAAU,IAAI;AACd,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;;AAEnC,UAAI,UAAU;AACV,eAAO,QAAQ;AACf,eAAO,SAAS;AAChB,oBAAY,OAAO,WAAW;AAC9B,0BAAkB,IAAI,UAAU,OAAO,OAAO;AAC9C,YAAI,aAAa,WAAW,GAAG;;AAEnC,UAAI,SAAS;AACT,aAAK;;AAET,aAAO;;AAEJ,uBAAmB,IAAG,CAAE;;;;;;AC5NxB;AACH,UAAI,QAAO,OAAO;AACd,cAAM,IAAI,MAAM,4EACS,QAAO;;AAEpC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,8EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,yEACU,QAAQ;;AAEtC,UAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AACzC,cAAM,IAAI,MAAM,iEACT,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAEzD,UAAI,QAAO,SAAS;AAChB,cAAM,IAAI,MAAM,mEACK,QAAO;;AAEhC,2BAAqB,QAAQ;AAC7B,wBAAkB,aAAa,aAAa,SAAS;AAGrD,oBAAc;AACd,mBAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC3C,mBAAW,aAAa;;AAE5B,yBAAmB,QAAO;AAC1B,0BAAoB,aAAa;AACjC,kBAAY;AACZ,sBAAgB;AAChB,mBAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACvC,qBAAa,WAAW;AACxB,oBAAY,KAAK,WAAW;;AAEhC,sBAAgB;QAAC,GAAG,gBAAe,QAAO,OAAO,IAAI,YAAU,SAAS;QACpE;QAAG,MAAM,GAAG;AAChB,aAAO,CAAC,aAAa,SAAS,WAAW;;;;;;ACzCtC;AACH,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AACzD,yBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAC9C,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEnD,UAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC1C,cAAM,IAAI,MAAM,aACZ,0BAA0B,WAAY,SAAQ,OAAO;;AAE7D,UAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC3C,cAAM,IAAI,MAAM,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAE9E,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,YAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACnC,gBAAM,IAAI,MAAM,aACZ,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAAO,QAAQ,MAAM;;;AAGhG,mBAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC3C,YAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC1C,gBAAM,IAAI,MAAM,aACZ,kBAAkB,IAAI,cAAc,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAAc,MAAM,IAAI;;;;AAWlH;AACH,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,0DAA0D,QAAQ;;AAEtF,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,6DAA6D;;AAEjF,UAAI,MAAM,WAAW;AACjB,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;AAElF,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;;AAGtF,2BAAoB,OAAO,SAAS;;AAWjC;AAEH,0BAAoB,QAAQ,MAAM;AAClC,wBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAIvE,sBAAgB,MAAM;AACtB,sBAAgB;AAChB,mBAAa,WAAW,IAAI,SAAS,EAAE;AACnC,qBAAa,MAAM;;AAEvB,2BAAsB,YAAY,IAAK,IAAI;AAC3C,yBAAmB,eAAc,QAAQ,SAAS;AAClD,sBAAgB,CAAC,GAAG,gBAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,yBAAmB,eAAc;AACjC,aAAO,CAAE,WAAW,YAAY,WAAW,SAAS;;;;;;;;AC9FxD;;;;;;;;;;;;;;;;AAiBO;AACH,wBAAkB,OAAM,MAAM;AAC9B,cAAY,cAAc,MAAM,QAAQ,MAAM,iBAAiB,+BAA+B,2CAC1D;AACpC,cAAY,cAAc,KAAK,QAAQ,MAAM,iBAAiB,8BAA8B,0CACxD;AACpC,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,gBAAY,MAAM,KAAK,KAAK,MAAM,OAAM,MAAM,IAAI,MAAM,iBAAiB,qBAAqB,aAAa,OACnG,MAAM,KAAK,KAAK,kCAAkC,OAAO,OAAM,MAAM;;;AAI9E;AACH,mBAAa;AACb,iBAAW;AACX,aAAO,OAAO;AACV,YAAI,OAAO;AACP,eAAK,KAAK;;AAEd,gBAAQ;AACR;;AAEJ,aAAO;;AAGJ;AACH,mBAAa;AACb,sBAAgB,GAAG,OAAO,MAAM,QAAQ;AACpC,aAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE/D,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,mBAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACnD,mBAAW,KAAK;;AAEpB,mBAAa,GAAG,IAAI,eAAe;AAC/B,YAAI,MAAM;AACN,qBAAW,0BAA0B;;AAGrC,qBAAW,OAAO,wBAAwB,GAAgC;AAC1E,qBAAW;;;AAGnB,aAAO;;AAEX;AACI,UAAI,kBAAkB;AAClB,eAAO;;AAEX,aAAO,iBAAkB,iBAAgB;;AAE7C;AACI,yBAAmB;AACnB,mBAAa,GAAG,IAAI,eAAe;AAC/B,mBAAW,KAAK,yBAAyB;;AAE7C,aAAO;;AAGJ;AACH,wBAAkB,WAAW;AAC7B,4BAAsB,IAAI,MAAM,4BAA4B,IAAI,MAAM,gCAAgC,IAAI,MAAM;AAChH,UAAI,aAAa,UAAU,sBAAsB;AAC7C,0BAAkB,aAAa;AAG/B,8BAAsB,sBAAsB;AAC5C,0BAAkB,4BAA2B,WAAW,WAAW,eAAe,OAAO;AACzF,wBAAgB,2BAA0B,SAAS,WAAW,eAAe,KAAK;AAClF,4BACI,uBAAsB,SAAS,WAAW,eAAe;;AAG7D,wBAAgB,GAAG,OAAO,WAAW;AACjC,0BAAgB,QAAQ,cAAa,WAAW,OAAO,SAAS,YAAY,MAAM;AAClF,wBAAc,QACV,aAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,4BAAkB,QAAQ,gBAAe,SAAS,MAAM;;;AAGhE,aAAO;QACH,OAAO;QACP,KAAK;QACL,SAAS;;;AAKV;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ;;AAGnB,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,cAAc;AAClC,cAAI,YAAY,KAAK;AACjB,4BAAgB;;AAEpB,qBAAW,QAAQ;;;AAG3B,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ,OAAO;;AAG1B,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,YAAY;AAChC,cAAI,UAAU,KAAK;AACf,4BAAgB,OAAO;;AAE3B,qBAAW,QAAQ;;;AAG3B,mBAAa,GAAG,IAAI,WAAW,QAAQ;AAEnC,yBAAiB,WAAW;AAC5B,YAAI,WAAW,KAAK;AAChB,qBAAW,MAAM;;AAErB,mBAAW,KAAK,OAAW,GAAG,WAAW,IAAI,WAAW;;AAE5D,aAAO;;AAEJ;AACH,mBAAa,QAAQ;AACrB,UAAI,eAAgB,KAAK,QAAS,UAAU;AACxC,iBAAS;;AAEb,aAAO;;AAEJ;AAEH,kBAAY,aAAa;AACzB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAC9D,YAAI,SAAS;AAIT,kBAAQ,OAAO;;AAIf,kBAAQ,OAAO;;;AAIvB,uBAAiB,WAAW;AAC5B,UAAI,QAAQ;AACR,iBAAS;;AAGb,cAAQ,OAAW,GAAG,OAAO,WAAW;AACxC,aAAO;;AAEJ;AAEH,iBAAW,YAAY;AACvB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AAC/D,YAAI,SAAS;AAGT,iBAAO,OAAO;;AAId,iBAAO,OAAO;;;AAItB,uBAAiB,WAAW;AAC5B,UAAI,OAAO;AACP,gBAAQ;;AAKZ,UAAI,SAAS;AAET,eAAO,OAAW,GAAG,MAAM;;AAI3B,eAAO,OAAW,IAAI,MAAM,WAAW;;AAE3C,aAAO;;AAMJ;AAEH,4BAAsB,KAAK;AAC3B,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,YAAI,KAAK,KAAK;AACV,4BAAkB;AAClB;;;AAGR,mBAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AAC/C,YAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AAClC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,uBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,sBAAc,MAAM,KAAK,QAAQ;;AAErC,aAAO;;AAEJ;AAEH;AACA,oBAAc,EAAE,MAAM;AACtB,UAAI,OAAO,UAAU;AACjB,iBAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEzC,MAAM,SAAS;AACpB,iBAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAG3D,iBAAS,MAAM;;AAEnB,aAAO,QAAQ;AACX,gBAAY,MAAM,IAAI,MAAM;;AAEhC;AACA,UAAI,QAAQ;AACR,gBAAQ,IAAI,MAAM,OAAO,KAAK;iBAEzB,OAAO,SAAS;AACrB,gBAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEvC,KAAK,SAAS;AACnB,gBAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAGxD,gBAAQ;;AAEZ,cAAQ,MAAM,IAAI;AACd,YAAI,KAAK;AACL,iBAAO;;AAGP,kBAAY,MAAM,IAAI,MAAM,qDACrB,mCAAmC;AAC1C,iBAAO,EAAE,MAAM,KAAK,OAAO;;;AAGnC,aAAO,CAAC,QAAQ;;;;;;;;;;;;;;;;;;ACnSpB;;;;;;;;;;;;;;;;;MAmCI;AACI,eAAO,KAAK,YACP;;aAWF;AACH,eAAO,IAAI,IAAI;;;;MAWnB;AACI,aAAK,eAAe;;aAKjB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAKrB;AACH,yBAAiB,SAAS,aAAa,IAAI,aACvC,CAAC,KAAK,IAAI;;;AA2Bf;AACH,cAAO,IAAI,aAAa,MAAM,MAAM;AAEpC,cAAO,OAAO,IAAI,cAAc,UAAU,MAAM,wDAC5C,OAAO,IAAI;AACf,cAAO,IAAI,UAAU,SAAS,GAAG,MAAM;AAEvC,uBAAiB,SAAS;;;;;;;;AC/G9B;;;;;;;;;;;;;;;;AAmBA,iCAA6B;AACtB,iCAA6B;AAC7B;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,aAAO,QAAO,QAAQ,qBAAqB,KAAK,uBAC5C;;AAER;AACI,2BAAqB;AACrB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI;AACA,sBAAc,OAAO,YAAY;AACjC,sBAAc,SAAS,YAAY;AACnC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM,yCAAyC,oBACxC;;;AAGzB,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ;AACvC,4BAAoB,YAAW;AAC/B,8BAAsB,YAAW;AACjC,YAAI,CAAC,aAAY,aAAa;AAC1B,gBAAM,IAAI,MAAM,0CACA,4BAA4B;;;AAGpD,yBAAmB,cAAa,UAAU,SAAS,SAAQ;AAC3D,2BAAqB,cAAa,YAC9B,WACA,SAAQ;AACZ,UAAI,WAAW,WAAW,aAAa;AACnC,cAAM,IAAI,MAAM,yCAAyC,WAAW,uBACnD,aAAa;YACb;YACA;;AAErB,mBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kBAAU,WAAW;AACrB,kBAAU,aAAa;AACvB,YAAI,CAAC,UAAU,GAAG;AACd,gBAAM,IAAI,MAAM,yBAAyB,QAAQ,eAAe,QAAQ;YACvD;YACA;;;;AAItB;AACH,WAAK,KAAK,MAAM,KAAK,QAAQ,MAAM;;AAEhC;AACH,mBAAY,OAAO,aAAa,YAAY,OAAO,aAAa,YAC5D,OAAO,aAAa,YACpB,CAAC,YACD;AACJ,UAAI,UAAS,WAAW,UAAS,OAAO,OACpC,UAAS,aAAa,UAAS,SAAS;AAExC,eAAO,sBAAsB,QAAQ,MAAK,UAAU,KAAK;;AAE7D,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,UAAI,CAAC,SAAS,GAAG,GAAG;AAChB,cAAM,IAAI,MAAM,8BAA8B,mBAAmB;;;AAGzE;AACI,UAAI,CAAC,SAAS,MAAM,CAAC,SAAS;AAC1B,eAAO;;AAEX,UAAI,MAAM,MAAM,MAAM,MAAM,KAAK,IAAI,IAAI,KAAK;AAC1C,eAAO;;AAEX,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK,OAAO,OAAO,KAAK;AAC/B,gBAAM,IAAI,MAAM,sBAAsB,OAAO,WAAW,cAAc;;;;AAI3E;AAGH,aAAO,IAAI,aAAa,SAAS,QAAQ,IAAI,aAAa;;;;;;;;;;;;;ACrH9D;AAEK,qBAAW;ACFhB;;;;;;;;;;;;;;;;AA0BO;AACH,aAAM,IAAI,QAAQ;;AAgBf;AACH,aAAM,IAAI,SAAS;;AAGhB;AACH,aAAM,IAAI,gCAAgC;AAC1C,cAAQ,KAAK;;AAGV;AACH,UAAI,OAAM,QAAQ;AACd,gBAAQ,KAAK,MAAM;;;AAI3B,6BAAwB;AAMjB;AACH,cAAO;;AAOJ;AACH,aAAO;;AAuBJ;AACH,aAAO,QAAO;;AA+BX;AACH,aAAO,QAAO,QAAQ;;AA0CnB;AACH,aAAO,QAAO,KAAK,UAAU;;AAa1B;AACH,sBAAgB,uBAAsB;AACtC,cAAQ,QAAQ,aAAU,QAAO;;AAkC9B;AACH,aAAO,QAAO,KAAK;;AA2BhB;AACH,aAAO,QAAO,KAAK;;AAiBhB;AACH,aAAO,QAAO,WAAW;;AAStB;AACH,aAAO,QAAO;;AAQX;AACH,aAAO,QAAO;;AAOX;AACH,cAAO,cAAc;;AAMlB;AACH,aAAO,QAAO,YAAY;;AAOvB;AACH,aAAO,QAAO,mBAAmB;;AAiB9B,wDAAmD;AACtD,aAAO,QAAO,gBAAgB,MAAM,SAAS;;AAU1C;AACH,aAAO,QAAO;;AAQX;AACH,aAAM,YAAY,cAAc;;AC5VpC;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AA+CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;AC3D7B;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU,WAAW,GAAG,UAAU;AACrC,eAAO,SAAS,IAAI;;AAExB,sBAAgB;AACZ,oBAAY,SAAQ,WAAW,IAAI;AACnC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,gBAAO,IAAG,CAAE;AC9DxB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAO,IAAG,CAAE,MAAA;AC1DxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,SAAQ,WAAW;;AAE9B,eAAO,SAAQ,IAAI;SACpB,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AC3CxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,cAAY,MAAM,QAAQ,UAAU,MAAM;AAC1C,cAAY,QAAQ,UAAU,GAAG,MAAM,uDAChC,QAAQ;AACf,uBAAiB,QAAQ,IAAI,UAAU,iBAAgB,GAAG,UAAU,KAAK;AACzE,0BAAoB,SAAS;AAC7B,eAAS,QAAQ;AACb,YAAI,EAAE,UAAU,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAGxB,eAAS,QAAQ;AACb,YAAI,CAAC,aAAiB,EAAE,OAAO,YAAY;AACvC,gBAAM,IAAI,MAAM;;;AAGxB,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC1DzB;;;;;;;;;;;;;;;;AAqBO;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,YAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AACzC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,UAAU,SAAS,UAAU;AAC1C,kBAAY;AACZ,mBAAa;AACb,sBAAgB;AAChB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,cAAI,KAAK,UAAU;;AAGnB,cAAI,KAAK,UAAU;;;AAG3B,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,mBAAS,KAAK,OAAO;;;AAG7B,0BAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,aAAO,CAAC,UAAU;;AAEf;AACH,6BAAuB,KAAK,IAAI,OAAK;AACrC,aAAO,kBAAiB,OAAO,gBAAgB;;AAE5C;AACH,cAAY,sBAAqB,MAAM,OAAO,MAAM,GAAG,uDACvC,iBAAiB;;AAO9B;AACH,UAAI,sBAAqB,MAAM;AAC3B,eAAO;;AAEX,qBAAe;AACf,mBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,YAAI,KAAK,QAAQ,OAAO;AACpB,iBAAO,KAAK;;;AAGpB,WAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,aAAO;;AAGJ;AACH,aAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAEb;AACH,kBAAY;AACZ,mBAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACrC,YAAI,KAAK;;AAEb,aAAO;;AC5FX;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAE3D,gBAAO,IAAG,CAAE;AC1ExB;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAG3D,gBAAO,IAAG,CAAE;AC3ExB;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAU,IAAG,CAAE;AC/D3B;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,QAAQ;AACR,iBAAO;;AAEX,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;;AAE9D,mBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;ACjD1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAyCO,yFAAmF;AAKtF,4BAAsB,WAAW;AACjC,2BAAqB,CAAC,GAAG,aAAa;AACtC,0BAAoB,yBAAwB;AAC5C,aAAO,mBAAkB,YAAY,cAAc,SAAS,WAAW,MAAK,MAAyB,MAAsB;;AAExH,0GAAoG;AACvG,0CAAoC,iBAAgB;AACpD;AACA,UAAI,eAAe;AACf,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAEzD,eAAe;AACpB,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAG9D,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAAO;;AAK1F,0GAAoG;AACvG,uDAAiD,kBAAiB;AAClE;AACA;AACA,UAAI,eAAe;AACf,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAE5D,eAAe;AACpB,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAGjE,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aAAa;;AAMzF,0GAAoG,oBAAoB;AAC3H,uDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,UAAI,eAAe;AACf,SAAC,WAAW,UAAU,SAAS,cAAc;iBAExC,eAAe;AACpB,SAAC,WAAW,YAAY,UAAU,WAAW;;AAG7C,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,4DAAsD;AACtD,0CAAoC,iBAAgB;AACpD,8CAAwC,iBAAgB;AACxD,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,WAAW,YAAa,kBAAiB,MAAK,UAAU,SAAS,cAAc,aAAa,uBAAuB,sBAAsB,cAAc;AACxK,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,WAAW;iBAE1C,eAAe;AACpB,mBAAW,CAAC,WAAW,WAAW,UAAU;;AAEhD,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAOD,4FAAsF,oBAAoB;AAC7G,gEAA0D,CAAC,IAAI,IAAI,IAAI,IAAI;AAC3E,UAAI,eAAe;AACf,SAAC,WAAW,SAAS,UAAU,SAAS,cAAc;iBAEjD,eAAe;AACpB,SAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAGtD,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,yEAAmE;AACnE,uDAAiD,kBAAiB;AAClE,6DAAuD,kBAAiB;AACxE,mCAA6B,wBAAuB,aAAa;AACjE,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,UAAU,WAAW,YAAa,oBAAmB,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAAa,sBAAsB,uBAAuB,sBAAsB;AAClN,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,UAAU,WAAW;iBAEpD,eAAe;AACpB,mBAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAE1D,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,YAAY;;AAExB;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,yBAAmB,QAAQ;AAC3B,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,2BAAqB,kBAAkB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AAC3F,cAAY,OAAW,eAAe,MAAM,2BAA2B;AAEvE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,cAAc,YAAY,YAAY;;AAE3C,0EAAqE;AACxE,iCAA2B,wBAAuB,WAAW;AAC7D,aAAO,KAAK,MAAO,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAErF;AACI,UAAI,OAAO,UAAU;AACjB,eAAO,CAAC,OAAO,OAAO;;AAE1B,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAEhC,aAAO;;AAEX;AACI,aAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAa/D;AACI,UAAI,YAAY;AACZ,eAAO;;AAEX,aAAO,aAAc,cAAa,KAAM,YAAW;;AAEvD;AACI;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU,CAAE,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAChE,yBAAiB,sBAAqB,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC5F,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,+BAAuB,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AACnF,8BAAsB,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC/E,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;iBAEvC,SAAQ;AACb,kBAAU,CAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACxD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;iBAE9C,OAAO,SAAQ;AACpB,oBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,uBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,qBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,sBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,wBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC5C,oBAAY,kBAAkB,YAAW,eAAe,MAAM,UAAU,eAAe,GAAG;AAC1F,mBAAW,kBAAkB,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAGtF,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,WAAW;;AAEjC;AACI;AACA;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,yBAAiB,sBAAqB,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAAK;AACzG,mBAAW,SAAS;AACpB,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,mBAAW,KAAK,KAAK,UAAU;AAC/B,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,+BAAwB,aAAY,KAAK,eAAe,eAAe;AACvE,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,sBAAc,KAAK,MAAM,gBAAgB;AACzC,qBAAa,gBAAgB;AAC7B,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;iBAEpD,SAAQ;AACb,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAGnD,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,UAAU,WAAW;;AAO3C;AACI,UAAI,CAAC;AACD,eAAO;;AAEX,cAAQ;aACC;AAED,iBAAO,KAAK,MAAM;aACjB;AAED,iBAAO,KAAK,KAAK;aAChB;AACD,iBAAO,KAAK,MAAM;;AAElB,gBAAM,IAAI,MAAM,wBAAwB;;;AAG7C;AACH,iCAA2B,iBAAgB;AAC3C,aAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAEzC;AACH,aAAO,mBAAkB,YAAY,mBAAkB;;AASpD;AACH,UAAI,eAAe;AACf,eAAO;iBAEF,eAAe;AACpB,eAAO;;AAGP,cAAM,IAAI,MAAM,sBAAsB;;;ACtZ9C;;;;;;;;;;;;;;;;AA6CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,wBAAkB;AAClB,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,mDAAmD,IAAI;AACzF,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG,aAAK,CAAC;AACN,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,iBAAO,IAAI;;AAEf,eAAO,SAAQ,QAAQ,KAAK;;AAEhC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC1E,YAAM,MAAK,KAAK,GAAG;AACnB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;AC/E5B;;;;;;;;;;;;;;;;AAsEA,oFAA+E;AAC3E,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,aAAK,CAAC;AACN,eAAO,SAAQ,UAAU,KAAK;;AAElC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK,iBAAiB,YAAY;AACvE,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC5E,YAAM,MAAK,KAAK,IAAI;AACpB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC/G9B;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,OAAO,GAAG;AACvB,aAAO,QAAQ;AACX,gBAAY,MAAM,WAAW,MAAM,MAAM,kBAAkB,0BAA0B,gDACrD;;AAEpC,cAAY,QAAQ,KAAK,OAAO,MAAM,MAAM,kBAAkB,qCAAqC,OAAO;AAC1G,yBAAmB,OAAO;AAC1B,aAAO,QAAQ;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,kBAAa,MAAM,QAAU,MAAM,OAAO,WAAW,IAAK,MAAM,kBAAkB,2BAA2B,OAAO,gDACvE,+CACN;;;;AAI5C;AACH,0BAAoB,OAAO,GAAG;AAC9B,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,oBAAY,SAAS,OAAO,GAAG;;AAEnC,aAAO;;ACtCX;;;;;;;;;;;;;;;;AA+DA,qCAAiC;AAC7B,cAAO,QAAQ,UAAU,GAAG,MAAM;AAClC,qBAAe,qBAAqB,SAAS,WAAW;AACxD,UAAI,SAAS,GAAG,UAAU;AACtB,iBAAS,QAAQ;AACb,cAAI,QAAO,UAAU;AACjB,kBAAM,IAAI,MAAM;uBACT,QAAO;;;;AAI1B,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,SAAS,GAAG,OAAO;AACtD,yBAAiB,kBAAgB,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,YAAI,eAAc,cAAc;AAC5B,iBAAO,QAAO,IAAI;;AAGtB,mBAAW,SAAS,OAAO,OAAK,EAAE,OAAO;AACzC,YAAI,SAAS,WAAW;AACpB,iBAAO,SAAS;;AAEpB,uBAAe,SAAS,IAAI,OAAK,EAAE;AACnC,gCAAuB,QAAQ;AAC/B,oBAAY,SAAQ,OAAO,UAAU;AACrC,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,mBAAa,CAAE;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AC/F3B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,QAAQ;AAC5B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAW,IAAG,CAAE;ACzC5B;;;;;;;;;;;;;;;;AAuDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,gCAAwB,kBAA4B,IAAI,OAAO;AAC/D,2BAA6B,IAAI,QAAQ;AACzC,aAAK,CAAC;AACN,eAAO,SAAQ,MAAM,IAAI,QAAQ;;AAErC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,mBAAS,IAAG,CAAE;ACtE1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,KAAK;AACvB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,mBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAyCA;AACI,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,wBAAkB,iBAAgB,UAAU,YAAY;AACxD,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,QAAO,CAAC,OAAO,KAAK;AACrC,uBAAiB,OAAO,UAAU;AAClC,kBAAY,MAAI,UAAU;AAE1B,wBAAkB,IAAI,MAAM;AAC5B,wBAAkB,IAAI,MAAM,KAAK;AACjC,wBAAkB,CAAC,WAAW;AAC9B,gBAAU,OAAM,KAAK,CAAC,GAAG,IAAI;AAC7B,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY;AACrC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,mBAAa,MAAI,KAAI,SAAQ,IAAI,OAAK,KAAK,KAAI,IAAI,SAAQ,MAAI,aAAa;AAC5E,mBAAa,KAAI,OAAK,OAAO,SAAQ;AACrC,aAAO,CAAC,MAAM;;AAEN,0BAAiB,IAAG,CAAE;AC/DlC;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,iBAAiB,GAAG,+CAA+C,WAAW;AAClI,cAAY,MAAM,WAAW,WAAW,QAAQ,MAAM,mBAAmB,MAAM,oDAAoD,WAAW;AAC9I,cAAY,GAAG,MAAM,KAAK,UAAS,GAAG,MAAM,yBAAyB,GAAG,MAAM,wEAC5C,WAAW,KAAK,cAAc;AAChE,sBAAgB;AACZ,eAAO,SAAQ,eAAe,IAAI,YAAY;;AAElD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;AClF5B;AACH;AACA,UAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,GAAG,EAAE;iBAExB,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAEvC,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAGrD,cAAM;;AAEV,aAAO;;ACfX;;;;;;;;;;;;;;;;AAiDA;AACI,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,MAAM,SAAS,UAAU,MAAM,MAAM;AAEjD,cAAY,WAAW,QAAQ,MAAM,SAAS,QAAQ,MAAM,MAAM;AAElE,cAAY,UAAU,QAAQ,MAAM,SAAS,OAAO,MAAM,MAAM;AAEhE,kBAAY,MAAM;AAClB,sBAAgB;AACZ,aAAK,CAAC,KAAK,OAAO,WAAW;AAC7B,eAAO,SAAQ,UAAU,KAAK,SAAS,QAAQ,SAAS,YAAY,SAAS,UAAU,SAAS,SAAS;;AAE7G,qBAAe;QACX,GAAG;QACH,OAAO;QACP,QAAQ;QACR,MAAM;QACN,UAAU;;AAEd,oBAAc,CAAE;AAChB,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,aAAO,SAAQ,KAAK,GAAG;;AAE3B;AACI,UAAI,KAAK;AACL,eAAO;;AAEX,UAAI,EAAE,SAAS;AAEX,eAAO,SAAQ,GAAG,CAAC,EAAE;iBAEhB,EAAE,SAAS;AAChB,eAAO;iBAEF,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAExC,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAE1D,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC5F9B;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC3ChC;;;;;;;;;;;;;;;;AAoCA;AACI,mBAAY,iBAAgB,GAAG,eAAe;AAC9C,qBAAe,OAAM;AACrB,UAAI,MAAM,KAAK,OAAK,CAAE,KAAI,MAAM,IAAI,MAAM;AACtC,cAAM,IAAI,MAAM,2CAA2C;;AAE/D,UAAI,MAAM,SAAS,OAAM;AACrB,cAAM,IAAI,MAAM,+BAA+B,MAAM,uBAAuB,OAAM;;AAEtF,UAAI,MAAM,SAAS,OAAM;AACrB,yBAAiB,OAAM,MAAM;AAC7B,eAAO,SAAS,SAAS,MAAM;AAC3B,mBAAS,QAAQ;;AAErB,iBAAQ,SAAQ,QAAO;;AAE3B,yBAAmB,OAAM;AACzB,mBAAa,MAAM,KAAK;AACxB,mBAAa,MAAM,SAAS,GAAG,KAAK,GAAG;AACnC,YAAI,WAAW,OAAO,MAAM;AACxB,eAAK,KAAK;mBAEL,OAAM,MAAM,OAAO;AACxB,gBAAM,IAAI,MAAM,mBAAmB,mCAAmC;;;AAG9E,mBAAa,KAAK,IAAI,UAAU,IAAI,IAAI,IAAI,IAAI,OAAO,OAAK,KAAK;AACjE,UAAI,KAAK,WAAW;AAChB,eAAO,MAAM;;AAEjB,sBAAgB,cAAa,SAAQ,KAAK,QAAO;AACjD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAEnE,wBAAe,IAAG,CAAE;ACvEhC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAa,gBAAgB,cAAe,MAAM,uBAAuB,oDACvC;AAClC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,cAAc;AAC9B,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK,IAAI,cAAc;AAC3C,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,cAAa;;AAEjC,wBAAe,IAAG,CAAE;AClChC;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACa7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACC7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;AC1B7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACb7B;;;;;;;;;;;;;;;;AAwDA,4DAAuD,oBAAoB,CAAC,GAAG;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,cAAY,YAAY,QAAQ,MAAM,IAAI,MAAM,oCAAoC,8CACtD,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,sBAAgB;AACZ,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACxH,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,oBAAU,IAAG,CAAE;ACzD3B,2DAAsD,kBAAkB;AACpE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM;;AAEhD,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,QAAQ,WAAW,MAAM,oEAC5D,wBAAwB;AAC1C,cAAY,eAAe,OAAO,MAAM,sCAAsC;AAC9E,uBAAiB,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;AACxF,sBAAgB,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM;AACvE,sBAAgB,CAAC,GAAG;AACpB,wBAAkB,CAAC,GAAG;AACtB,+BAAyB;AACzB,kBAAY,QAAO,SAAS,UAAU,SAAS,MAAK,kBAAkB,WAAW;AACjF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEjD,aAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEnD,mBAAU,IAAG,CAAE;ACnE3B;;;;;;;;;;;;;;;;AA+CA,kFAA6E;AACzE,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AAC1D,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO;;AAEhD,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,sBAAgB,eAAe,SAAS,SAAS,KAAK,SAAS;AAC/D,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,+EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,qBAAY,SAAQ,eAAe,MAAM,QAAQ;AACjD,aAAK,CAAC,MAAM;AACZ,eAAO;;AAEX,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,sBAAqB;AACxF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,iCAA4B,IAAG,CAAE;AClExC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,qBAAoB,aAAa,IAAI,SAAS,SAAS,MAAK,QAAQ;;AAEnE,4BAAmB,IAAG,CAAE;AC7BpC;;;;;;;;;;;;;;;;AAwDA,4DAAuD,qBAAqB,CAAC,GAAG,GAAG;AAC/E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAA+B,SAAS,YAAY,MAAM,uEACnD,0BAA0B;AAC7C,cAAY,eAAe,SAAS,MAAM,sCAAsC;AAChF,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW;AAC3F,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,mBAAU,IAAG,CAAE;ACvF3B;;;;;;;;;;;;;;;;AAwCA;AACI,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACvE,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI,OAAO;;AAE3D,sBAAgB,SAAS;AACzB,uBAAiB,KAAK,MAAM;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW;AACzF,eAAO,SAAQ,eAAe,MAAM,QAAQ;;AAEhD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,KAAA,MAAK,SAAS,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,uBAAuB;AAC/E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,gCAA4B,IAAG,CAAE;ACtDxC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,oBAAoB,aAAa,IAAI,SAAS,SAAS;;AAEtD,4BAAmB,IAAG,CAAE;AC3BpC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA6CA,+BAA2B,eAAe,kBAAiB;AACvD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,4BAAoB,oBAAmB,CAAC,OAAO,GAAG;AAClD,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;;AAE9B,6BAAqB,kBAAiB,GAAG,GAAG,MAAM;AAClD,oBAAY,SAAQ,OAAO,WAAW,cAAc,WAAW;AAC/D,aAAK,CAAC;AACN,YAAI,eAAe;AACf,qCAA2B,wBAAuB;AAClD,kBAAQ,WAAU,OAAO;;AAE7B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM,WAAW,SAAA;AACjC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA0DA,sDAAkD;AAC9C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAqB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACrE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,cAAY,cAAc,aAAa,GAAG,MAAM;MAC9C,mBAAmB;MACnB,GAAG;AACL,cAAY,aAAa,aAAa,GAAG,MAAM;MAC7C,kBAAkB;UACd,GAAG;AACT,cAAa,aAAc,aAAY,eAAe,GAAI,MAAM,8CAA8C,YAAY,oBAAoB,gDAAgD,GAAG;AACjM,sBAAgB,cAAW,SAAQ,aAAa,IAAI,WAAW;AAC/D,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,WAAW;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,eAAc;;AAExE,0BAAgB,IAAG,CAAE;AC3EjC;;;;;;;;;;;;;;;;AAqEA,qEAAgE,oBAAoB,CAAC,GAAG;AACpF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,uDAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG;;AAEpB,gBAAY,gCAAyC,SAAS,YAAY,MAAM,gFAC1D,0BAA0B;AAChD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH,qBAAY,SAAQ,gBAAgB,KAAK,SAAS;AAClD,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,wBAAuB;AAC1F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,6BAAmB,IAAG,CAAE;AC5GpC;;;;;;;;;;;;;;;;AA0CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,SAAQ,IAAI,CAAC,GAAG;AAC7B,uBAAe,SAAQ,KAAK;AAC5B,yBAAiB,CAAC,GAAG,EAAE,OAAO,GAAG,EAAE;AACnC,eAAO,SAAQ,QAAQ;;AAE3B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACrDzB;;;;;;;;;;;;;;;;AAqDA,+DAA0D,CAAC,GAAG,iBAAiB;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM,gEAC3C,GAAG;AACV,cAAY,QAAQ,SAAS,GAAG,MAAM,4DAC/B,QAAQ;AACf,cAAY,eAAe,QAAQ,MAAM,gFACZ;AAC7B,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACzD,uBAAe;;AAEnB,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK;AAC9B,kBAAY,QAAO,UAAU,YAAY,QAAQ;AACjD,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,uBAAc,IAAG,CAAE;AC5E/B;;;;;;;;;;;;;;;;AAyBO;AACH,qBAAe,QAAQ;AACvB,mBAAa;AACb,mBAAa,GAAG,IAAI,QAAQ;AACxB,oBAAY,SAAS,IAAI;AACzB,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,YAAI,IAAI,KAAK,MAAM;AACf,eAAK,QAAQ;;;AAGrB,aAAO;;AAMJ;AACH,qBAAe;AACf,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,sBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,wBAAgB,SAAS,SAAS,IAAI;AACtC,uBAAe,SAAS;AACxB,YAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC1C,iBAAO,QAAQ;;;AAGvB,aAAO;;AAEJ;AACH,qBAAe;AACf,gBAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AACzC,mBAAa,GAAG,IAAI,GAAG;AACnB,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,YAAI,MAAM;AACN,iBAAO,QAAQ;mBAEV,MAAM;AACX,iBAAO,QAAQ;mBAEV,MAAM;AACX,yBAAe,wDACR,cAAc;AACrB,gBAAM,MAAM;;AAGZ,iBAAO,QAAQ;;;AAGvB,aAAO;;ACjFX;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,MAAM,IAAI;AAC7C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM;;AAE3C,kBAAS,IAAG,CAAE;AC9C1B;;;;;;;;;;;;;;;;AA8CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAmB,iBAAgB,WAAW,aAAa,SAAS;AAIpE,6BAAuB,4BAA2B,GAAG,OAAO,GAAG;AAC/D,4BAAsB,YAAY,IAAI;AACtC,4BAAsB,YAAY,IAAI;AACtC,UAAI,WAAW,SAAS;AAGpB,gBAAO,WAAW,MAAM,OAAO,GAAG,MAAM,IAAI,MAAM;;AAEtD,UAAI,WAAW,SAAS;AAEpB,2BAAkB,WAAW,OAAO,cAAc,OAAO;;AAE7D,sBAAgB;AACZ,oBAAY,SAAQ,OAAO,YAAY,eAAe;AACtD,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe;QACX,WAAW;QACX,GAAG;QACH,GAAG;;AAEP,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;AC7E1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,KAAK,QAAQ,MAAiB;;AAE/E,uBAAa,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AAqDA;AAEI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,wBAAkB,IAAI,IAAI;AAC1B,qBAAc,WAAU;AACxB,0BAAoB,MAAM,IAAI;AAC9B,aAAO,MAAM,aAAa,QAAO;;AAEzB,qBAAY,IAAG,CAAE;AC/D7B;;;;;;;;;;;;;;;;AAsCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAa,KAAI,SAAS,KAAK,IAAI,SAAS,MAAO,KAAI,SAAS,KAAK,IAAI,SAAS,IAAI,MAAM,+DACrF,IAAI,YAAY,IAAI;AAC3B,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,cAAY,YAAY,SAAS,MAAM,gEAChC,eAAe;AACtB,UAAI,IAAI,SAAS,KAAK,IAAI,SAAS;AAC/B,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM;iBAEhB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM,CAAC,KAAK;iBAEtB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,KAAK;AACzB,eAAO,SAAQ,MAAM,CAAC,KAAK;;AAG3B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,KAAK;AACzB,eAAO;;;AAGH,iBAAO,IAAG,CAAE;ACtExB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,kBAAU,SAAQ,IAAI;AACtB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAO,IAAG,CAAE,MAAA;AC1CxB;;;;;;;;;;;;;;;;AAmCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,cAAY,GAAG,UAAU,WAAW,GAAG,UAAU,WAAW,MAAM;AAClE,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AChDxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAoCA,mCAA+B;AAC3B,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,cAAY,QAAQ,GAAG,MAAM,MAAM;AACnC,uBAAiB,GAAG,MAAM;AAC1B,UAAI,OAAO;AAEP,gBAAY,CAAE,IAAG,OAAO,MAAM,MAAM,MAAM,iCAAiC,CAAE,IAAG,OAAO,OAAO,GAAG;AACjG,eAAO,GAAG,OAAO,OAAO;;AAE5B,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,SAAQ,IAAI;;AAEX,uBAAc,IAAG,CAAE;ACjD/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA8CA;AACI,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,QAAQ;AAC3C,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK,IAAI;AAC7B,aAAK,CAAC;AACN,eAAO;;AAEX,2BAAqB,CAAC;AACtB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,OAAM,OAAO;;AAEnE,kBAAQ,IAAG,CAAE;AC7DzB;;;;;;;;;;;;;;;;AAmCA,2DAAuD;AACnD,UAAI,cAAc;AACd,qBAAa;;AAEjB,mBAAa,QAAO,CAAC,SAAS,aAAa;AAC3C,gBAAU,WAAW,aAAa,UAAU;AAC5C,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,aAAK,IAAI,GAAG,GAAG;;AAEnB,kBAAY,SAAQ,KAAK,YAAY,CAAC,SAAS;AAC/C,UAAI,cAAc;AACd,eAAO;;AAGP,YAAI,WAAW,WAAW;AACtB,iBAAO,MAAK,WAAW,KAAK,IAAI,CAAC,WAAW,IAAI,GAAG;mBAE9C,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,KAAK,IAAI,IAAI,CAAC,WAAW,IAAI,WAAW,IAAI,GAAG;mBAE5E,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,WAAW,KAAK,IAAI,IAAI,IAAI;YAC1D,WAAW;YAAI,WAAW;YAAI,WAAW;YAAI;YAAG;;;AAIpD,gBAAM,IAAI,MAAM,qEAEiB,WAAW;;;;AAI5C,gBAAO,IAAG,CAAE;ACrExB;;;;;;;;;;;;;;;;AAgCA;AACI,oBAAc,CAAE,OAAO,OAAO;AAC9B,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,OAAO,OAAO,QAAQ,IAAI,MAAM,OAAM;;AClC9F;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAqBO,mCAA8B;AAC9B;AACH,UAAI,UAAU;AACV,eAAO;;AAEX,aAAO,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AC1BvD;;;;;;;;;;;;;;;;AAkBO;AACH,iBAAW;AACX;AACA,UAAI,UAAU;AACV,cAAM;AACN,eAAO;;AAGP,cAAM,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAEtD,aAAO,CAAC;AACJ,YAAI,MAAM,eAAe,QAAQ;AAC7B,iBAAO;;AAGP,gBAAM,gBAAe,QAAQ,MAAM;;;AAG3C,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,QAAQ;AACR,mBAAS,KAAK,OAAO;;AAGrB,mBAAS,KAAK;;;AAGtB,aAAO;;AAEJ;AACH,sBAAgB,EAAE,MAAM;AACxB,0BAAoB;AACpB,sBAAgB;AAChB,sBAAgB;AAChB,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,mBAAa,GAAG,IAAI,QAAQ,MAAM;AAC9B,oBAAY,KAAK,QAAQ,MAAM;;AAEnC,mBAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AAC/B,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,aAAO,CAAE,WAAW,WAAW,SAAS;;;;;;;;ACnE5C;;;;;;;;;;;;;;;;AA6CA,wCAAoC;AAChC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,qBAAe,CAAE,GAAG,IAAI,SAAS;AACjC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,2BAAmB,gBAAe,MAAM,GAAG,OAAO;AAClD,0BAAkB,0BAAyB,IAAI,UAAU;AACzD,oBAAY,SAAQ,OAAO,IAAI,SAAQ,UAAU,CAAC,SAAS,QAAQ;AACnE,aAAK,CAAC,IAAI;AACV,eAAO,SAAQ,KAAK,UAAU;;AAElC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAU;;AAEhE,mBAAU,IAAG,CAAE;AC3D3B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,QAAQ,IAAI;AAC/C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,aAAa,IAAI;AACrC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,yBAAgB,IAAG,CAAE;AClDjC;;;;;;;;;;;;;;;;AAkCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC1CzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,SAAS,KAAK,QAAQ,MAAiB;;AAEhF,uBAAY,IAAG,CAAE;ACrC7B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,oBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AAiCO;AACH,UAAM,eAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,UAAU,YAAY,cAAa,UACnC,CAAE,kBAAiB;AACnB,cAAM,IAAI,MAAM;;AAGpB,oBAAc;AACd,4BAAsB;AACtB,aAAO,YAAW,OAAO,OAAO,eAAe;;AC9CnD;;;;;;;;;;;;;;;;AAsCA,mCAA+B;AAC3B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,QAAQ,KAAI,QAAO,QAAQ,KAAK;;AAE/B,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,KAAK,IAAI;AAC5C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC7CzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,UAAU,IAAI;AAClC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAa,IAAG,CAAE;AClD9B;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM;;AAEpB,oBAAc,CAAE,OAAO,MAAM;AAC7B,aAAO,QAAO,cAAc,cAAW,SAAQ,SAAS,OAAO,MAAM,MAAM,IAAiB,MAAiB,UAAU;;ACnC3H;;;;;;;;;;;;;;;;AAqCA,0DAAsD,UAAU,WAAW,UAAU;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM;sBAChC,GAAG;AACrB,cAAY,OAAW,cAAc,MAAM,2FACR;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,sBAAgB;AACZ,kBAAU,SAAQ,6BAA6B,KAAK,aAAa,MAAM,OAAO;AAC9E,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;AACxE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAG3D,eAAO;;;AAGH,uCAA8B,IAAG,CAAE;AChE/C;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAsDA;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AAEH,mBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,aAAa;AAClE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,KAAK,CAAC,KAAK;AAC7D,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO,OAAM;;;;AAiCzB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,OAAO,MAAM;AAGvC,sBAAc,qBAAqB,MAAM,QAAQ,YAAY;AAC7D,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,cAAc;AACnE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,GAAG,QAAQ,OAAO;AACpE,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO;;;;AA6BnB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,aAAa,SAAQ,MAAM;AACvC,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,eAAQ,eAAO,SAAU,QAAO,UAAU,MAAM,EAAE,IAAI,CAAC,IAAI;AAC3D,mBAAW;AACX,eAAO,CAAE,MAAM,OAAM,IAAI;;;AAkCjC;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,SAAS,KAAK,MAAM,SAAO,eAAe,UAAS,MAAM;AAEnF,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,oBAAY,QAAO,UAAU,MAAM,EAAE,GAAG,OAAO,MAAM;AACrD,YAAI,MAAM;AACN,6BAAuB,IAAI,MAAM,OAAO,GAAG,OAAO;;AAGtD,mBAAW,IAAI;AACf,eAAO;;;AAiCf;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,cAAY,WAAW,QACnB,MAAM,QAAQ,YAAY,QAAQ,MAAM,OAAK,aAAa,YAAW,MAAM;AAE/E,+BAAyB,WAAW;AACpC,UAAI,CAAC;AAED,kBAAU;AACV,8BAAsB,QAAO;AACzB,kBAAQ,KAAK,QAAO,oBAAoB;;;AAGhD,oCAA8B,mBAAmB,QAAQ,OAAO,eAAY,CAAC,UAAS,aAAa;AAEnG,+BAAyB,QAAQ;AACjC,gBAAU,QAAQ,OAAO,eAAY,UAAS;AAC9C,cAAY,QAAQ,SAAS,GAAG,MAAM,gGACD;AAErC,+BAAyB;AACzB,aAAQ,OAAO,iBAAU,QAAO,UAAU,GAAG,SAAS,MAAM;AAC5D,cAAY,OAAM,KAAK,OAAK,KAAK,OAAO,MAAM;AAG9C,cAAY,MAAM,SAAS,GAAG,MAAM,iFACb,MAAM;AAC7B,yBAAmB;AACnB,cAAQ,QAAQ;AACZ,YAAI,OAAM,MAAM;AACZ,qBAAW,EAAE,QAAQ,OAAM;;;AAGnC,UAAI,yBAAyB;AAGzB,8BAAsB,QAAQ,OAAK,WAAW,EAAE,QAAQ;;AAE5D,aAAO,CAAE,OAAO,OAAO;;AA0C3B;AACI,aAAO,QAAO,WAAW;;AAE7B;AACI,+BAAyB,OAAM,OAAO,OAAK,KAAK,MAAM;AACtD,UAAI,mBAAmB;AACnB,cAAM,IAAI,MAAM;;;;AC5TxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,IAAI,KAAK,QAAQ,MAAiB;;AAEzE,gBAAO,IAAG,CAAE;ACtCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,SAAS;AAC7B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAY,IAAG,CAAE;ACzC7B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAInC,uBAAiB,WAAW;AAIxB,sBAAc,IAAI,SAAS,IAAI;AAC/B,yBAAiB;AACb,uBAAa,KAAI,IAAI,SAAQ,IAAI;AACjC,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,uBAAc,IAAG,CAAE;ACtD/B;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAoB,MAAM,GAAG;AAC9C,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,SAAS;;AAE5D,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO,gBAAoB,MAAM,GAAG;AAC7F,gBAAM,SAAQ,KAAK;AACnB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,kBAAkB,MAAM;AACxC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjFxB;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,gBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AAuDA,6BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,aAAK,CAAC;AACN,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,IAAI,WAAW;AACnC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAK;;AAE3D,kBAAO,IAAG,CAAE,MAAA;ACjFxB;;;;;;;;;;;;;;;;AA+CA,wCAAoC;AAChC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,UAAI,SAAS;AACT,eAAO,QAAQ,OAAO;;AAE1B,UAAI,SAAS,QAAQ,OAAO;AACxB,cAAM,MAAM,gFACW,QAAQ,qBAAqB;;AAExD,sBAAgB;AACZ,yBAAiB;AACjB,qBAAa,KAAI,QAAQ,MAAM;AAC/B,wBAAgB,IAAI,QAAQ;AAC5B,sBAAc,IAAI,MAAK,SAAS,YAAY,MAAI,MAAI,IAAI,UAAU,MAAM;AACxE,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAY;;AAElE,uBAAc,IAAG,CAAE;ACpE/B;;;;;;;;;;;;;;;;AAwDA,kCAA8B,iBAAiB;AAC3C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,mBAAa,KAAI,IAAI,MAAM;AAC3B,gBAAU,IAAI,IAAI;AAClB,gBAAU,IAAI;AACd,gBAAU,MAAI,GAAG;AACjB,gBAAU,MAAI;AACd,kBAAY,MAAI,SAAQ,MAAM,EAAE,QAAQ;AACxC,UAAI;AACA,yBAAiB,sBAAqB,IAAI,OAAO;AACjD,eAAO,SAAQ,KAAK;;AAExB,aAAO;;AAEC,sBAAa,IAAG,CAAE;ACvE9B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,IAAI,KAAK,QAAQ,MAAiB;;AAEpF,uBAAc,IAAG,CAAE;AC3C/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,KAAK,QAAQ,MAAiB;;AAEhF,uBAAc,IAAG,CAAE;ACtC/B;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,KAAK,QAAQ,MAAiB;;AAEnF,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAqCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AAExC,aAAO,WAAW,UAAU,GAAG,IAAI,WAAW,WAAW,GAAG;;AAEpD,uBAAc,IAAG,CAAE;AC5C/B;;;;;;;;;;;;;;;;AAiDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,wBAAkB;AAClB,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG;AACA,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAI,IAAI;;AAGR,cAAI,SAAQ,QAAQ,KAAK;;AAE7B,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC5E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;ACtF5B;;;;;;;;;;;;;;;;AAoEA,wCAAoC,CAAC,GAAG,GAAG,iDAAgD;AACvF,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,kBAAU,SAAQ,UAAU,KAAK;AACjC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC9E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC7G9B;;;;;;;;;;;;;;;;AAqDA,oFAA+E;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,qBAAe,QAAO,UAAU,mBAAmB,QAAQ;AAC3D,aAAO,CAAE,QAAQ,OAAO,IAAI,SAAS,OAAO;;AAEpC,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAgCO,mCAA8B;AACjC,UAAI,UAAU;AACV,sBAAa,OAAM,OAAO;AAC1B,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,qBAAoB,eAAc,QAAQ;AACzD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACvC5C;;;;;;;;;;;;;;;;AAiCO,mCAA6B;AAChC,UAAI,UAAU;AACV,sBAAa,OAAK,OAAO;AACzB,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxC5C;;;;;;;;;;;;;;;;AA2DA,6BAAyB,iBAAiB;AACtC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,qBAAe,2BAA0B,GAAG,OAAO;AACnD,0BAAoB,OAAO;AAC3B,yBAAmB,eAAc;AACjC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,sBAAgB;AACZ,iCAAyB,QAAO;AAEhC,wBAAgB,iBAAiB,UAAU,GAAG,QAC1C,KACA,MAAK,IAAI,iBAAiB;AAC9B,oBAAY,IAAI,SAAS;AACzB,eAAO,MAAI,KAAK,MAAM;;AAI1B,uBAAiB,WAAW;AACxB,sBAAc,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;AAC3E,yBAAiB;AACb,kCAAwB,GAAE,MAAM;AAChC,eAAK,QAAQ;AACT,4BAAgB,SAAQ;;AAE5B,6BAAmB,SAAQ,IAAI;AAC/B,uBAAa,IAAI,KAAI,YAAY,OAAK,GAAE,OAAO,aAAa;AAC5D,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,iBAAQ,IAAG,CAAE;ACxDzB,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO;AAChE,gBAAM,SAAQ,GAAG;AACjB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjExB;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AA8CA;AACI,cAAY,SAAS,aAAa,SAAS,aAAa,MAAM,+DACnD;AACX,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAGpB,cAAY,SAAS,WAAW,GAAG,MAAM,MAAM,wCAAwC,GAAG,aAC/E,SAAS;AACpB,0BAAoB,SAAS,YAAY,IAAI;AAC7C,mBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,gBAAY,SAAS,GAAG,WAAW,GAAG,MAAM;AAC5C,gBAAY,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,eAC/D,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,aAAa,MAAM,wBAAwB,wCAC5F,GAAG,MAAM,KAAK,iDACX,GAAG;;AAEpB,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,UAAU,WAAW,QAAQ;;AAEnC,sBAAa,IAAG,CAAE;ACpE9B;;;;;;;;;;;;;;;;AAiDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc;AACd,2BAAqB,CAAC;AACtB,4BAAsB;AACtB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,eAAO,SAAQ,OAAO;SACvB,CAAE,GAAG,KAAM,MAAiB,UAAU,OAAO,cAAc;;AAEtD,mBAAU,IAAG,CAAE;ACzC3B;;;;;;;;;;;;;;;;AAuCA,gCAA4B,iBAAiB;AACzC,UAAI,iBAAgB,GAAG,KAAK;AAC5B,mBAAa,gBAAe,MAAM,EAAE;AACpC,oBAAc,KAAK,GAAG,MAAM;AAC5B,0BAAoB,MAAM;AAC1B,UAAI,CAAC;AACD,wBAAgB,sBAAqB,MAAM,OAAO;;AAEtD,yBAAmB,OAAO,IAAI,MAAK,GAAG,YAAY,SAAQ,OAAO;AACjE,wBAAiB,KAAK,YAAY,MAAM;AACxC,aAAO,CAAE,MAAM,OAAO,UAAA;;AAEd,oBAAW,IAAG,CAAE;ACjC5B;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,qBAAqB,GAAG,KAAK;AACxC,iBAAW,qBAAqB,GAAG,KAAK;AACxC,mBAAY;AACZ,wBAAkB;AAClB,mBAAa,GAAG,IAAI,UAAU,QAAQ;AAClC,uBAAe,UAAU,GAAG,QAAO,GAAG,IAAI,GAAG;AAC7C,kBAAU,KAAK,OAAO;AACtB,kBAAU,KAAK,OAAO;AACtB,iBAAQ,OAAO;;AAEnB,mBAAa;AACb,mBAAa;AACb,mBAAa,GAAG,IAAI,UAAU,QAAQ,KAAK;AACvC,aAAK,KAAK,UAAU;AACpB,aAAK,KAAK,UAAU,IAAI;;AAE5B,aAAO,CAAC,MAAM;;AAEN,yBAAgB,IAAG,CAAE;ACtCjC;;;;;;;;;;;;;;;;AAwCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,0BAAoB,QAAQ;AAC5B,uBAAiB,QAAQ;AACzB,UAAI,cAAc;AACd,cAAM,IAAI,MAAM,+DACT;;AAEX,UAAI,WAAW;AACX,cAAM,IAAI,MAAM,gDAAgD;;AAEpE,aAAO,QAAQ,KAAK;AACpB,uBAAiB,aAAa,IAAI,SAAQ,SAAS,CAAC,GAAG,OAAO;AAC9D,kBAAY,QAAO,cAAc,cAAW,SAAQ,YAAY,UAAU,YAAY,YAAY,OAAO,CAAE;AAE3G,aAAO,aAAa,IAAI,SAAQ,KAAK,CAAC,IAAI,SAAS;;AAE3C,wBAAe,IAAG,CAAE;ACzDhC;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAa,SAAQ,SAAS,IAAI;AAClD,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,qBAAY,IAAG,CAAE;AC7C7B;;;;;;;;;;;;;;;;AAmCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC3CzB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,YAAI,GAAG,UAAU;AACb,oBAAU,UAAS,KAAK;AACxB,oBAAU,WAAU,KAAK;AACzB,iBAAO,SAAQ,GAAG;;AAEtB,eAAO,SAAQ,SAAS;;AAE5B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAY,IAAG,CAAE;AC9B7B;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAY,IAAI,SAAS,KAAK,IAAI,SAAS,GAAG,MAAM,+DAC7C,IAAI,YAAY,IAAI;AAC3B,mBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,mBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,aAAO,OAAO,MAAM;;AAEZ,yBAAgB,IAAG,CAAE;AC5BjC;;;;;;;;;;;;;;;;AA6CA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,IAAI,IAAI,UAAU;;AAErC,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,iBAAO,IAAG,CAAE;ACpDxB,iDAA6C;AACzC,cAAO,SAAS,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,CAAC,WAAW;;AAElB,kBAAS,IAAG,CAAE;ACJ1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,GAAG,MAAM;AAChE,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACZ1B;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,cAAc,GAAG,sCAAsC,WAAW;AACtH,cAAY,SAAS,WAAW,WAAW,QAAQ,MAAM,qBAAqB,SAAS,wCAAwC,WAAW;AAC1I,cAAY,GAAG,MAAM,OAAO;AACxB,YAAI,IAAI,KAAK,KAAK,WAAW;AACzB,iBAAO,KACD,KAAI,SAAS,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,MACvC,WAAW,IAAI,OACf;;AAEZ,eAAO;SACR,OAAO,MAAM,4BAA4B,GAAG,MAAM,MAAM,oBAAoB,SAAS,+CAA+C,WAAW;AAClJ,sBAAgB,cAAW,SAAQ,eAAe,IAAI,YAAY;AAClE,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;ACvFnC;;;;;;;;;;;;;;;;AAmDA;AACI,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,SAAQ;AACR,eAAM;;AAEV,iBAAW,iBAAgB,QAAO,KAAK;AACvC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,gCAAyC,SAAS,YAAY,MAAM,qEAC7D,0BAA0B;AAC7C,uBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,uBAAiB,CAAC,SAAS,gBAAgB,SAAS;AAKpD;AACA,UAAI,SAAQ;AACR,sBAAc,6BAA6B,CAAC,SAAS,cAAc,SAAS,cAAc;;AAG1F,sBAAc,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE/B,4BAAsB,SAAS,OAAO,KAAK,SAAS,OAAO;AAC3D,+CAAyC,6BAA6B,CAAC,SAAS,UAAU,SAAS,UAAU,UAAU;AACvH,2BAAqB,gBAAgB,OAAM;AAC3C,yBAAmB,gBAAgB,MAAM,eAAe,KAAK,UAAU;AACvE,wBAAkB,gBAAgB,QAC9B,MAAM,SAAQ,YAAY,aAAa,SAAS,gBAChD,MAAM,SAAQ,YAAY,aAAa,SAAS;AACpD,gBAAU;AACV,kBAAY,gBAAgB,IAAI,eAAe,GAAG,UAAU;AAC5D,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAKX;AACI,uBAAiB,YAAY,IAAI,OAAK,EAAE;AACxC,yBAAmB,YAAY,IAAI,OAAK,EAAE;AAC1C,6BAAuB,WAAW,OAAO,UAAU;AACnD,0BAAoB,WAAW,IAAI,UAAW,KAAI,eAAe,KAAK,KAAK;AAC3E,qBAAe,WAAW,IAAI,UAAU,IAAI,YAAY;AACxD,uBAAiB,WAAW,IAAI,UAAU,CAAC,SAAS,IAAI,OAAO;AAC/D,oBAAc,WAAW,IAAI,UAAU,CAAC,GAAG,YAAY;AACvD,aAAO,CAAC,UAAU;;AAKtB;AAGI,iCAA2B,YAAY,IAAI;AACvC,eAAO,IAAK,KAAI,KAAM,UAAS,KAAK;;AAExC,4BAAsB,mBAAmB,IAAI,OAAK,IAAI;AAGtD,4BAAsB,cAAc,IAAI,OAAK,KAAK,MAAM,IAAI;AAC5D,0BAAoB,cAAc,IAAI,UAAU,IAAI,cAAc;AAClE,aAAO,cAAc,IAAI;AACrB,eAAO,CAAC,cAAc,IAAI,YAAY;;;AAGlC,iBAAQ,IAAG,CAAE;AChIzB;;;;;;;;;;;;;;;;AAiDA;AACI,kBAAY,iBAAgB,OAAM,QAAQ;AAC1C,iBAAW,iBAAgB,MAAK,OAAO;AACvC,OAAC,OAAO,QAAQ,gBAAe,OAAO;AACtC,qBAAe,CAAE,GAAG,OAAO,GAAG;AAC9B,sBAAgB;AACZ,kBAAU,SAAQ,IAAI,OAAO;AAC7B,aAAK,CAAC,OAAO,MAAM;AACnB,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,OAAO;AAC/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE,QAAA;AC/C1B;;;;;;;;;;;;;;;;AAuDA,6BAAyB,iBAAiB;AACtC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AAEb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,KAAK,WAAW;AACpC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;;AAE5D,iBAAQ,IAAG,CAAE;ACjFzB;;;;;;;;;;;;;;;;AA4BA;AACI,mBAAa,eAAc;AAC3B,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,mBAAa,GAAG,IAAI,MAAM;AACtB,eAAO,KAAK;;AAEhB,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAEhC,iBAAQ,IAAG,CAAE;;;;;;;;;;;;;;;ACrBxB,MAAA;AAED;AACE,mBAAS,aAAa;AAEtB,aAAG,OAAO;AACR,oBAAQ,UAAU,GAAG,KAAK,GAAG,IAAI;AACjC,eAAG,KAAK,GAAG;AACX,eAAG,KAAK,GAAG;AACX,mBAAO,GAAG,KAAK,IAAK,IAAG,IAAI,IAAI;;AAIjC,aAAG,IAAI;AACP,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,iBAAO;;AAGT;AACE,YAAE,IAAI,EAAE;AACR,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,iBAAO;;AAGT;AACE,mBAAS,IAAI,KAAK,eACN,QAAQ,KAAK,cACd,GAAG;AACd,eAAK,QAAQ;AAAa,mBAAQ,GAAG,SAAS,aAAe;;AAC7D,eAAK,SAAS;AACZ,mBAAO,SAAU,UAAS,UAAW,KAAK;;AAE5C,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT;AACE,kBAAQ;AAER,qBAAW;AACT,oBAAO,MAAK;AACZ,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,mBAAK,MAAK,WAAW;AACrB,sBAAQ,sBAAsB;AAC9B,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK;AACL,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK,IAAI;;AAEX,mBAAQ,OAAM,KAAK;;AAGrB,iBAAO;;AAIT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,OAAO;;SAIZ,gBAC+B,SAC9B;;;AC3GF,MAAA;AAED;AACE,mBAAS,gBAAgB;AAEzB,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAGP,aAAG,OAAO;AACR,oBAAQ,GAAG,IAAK,GAAG,KAAK;AACxB,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,mBAAO,GAAG,KAAM,GAAG,MAAM,KAAM,IAAK,MAAM;;AAG5C,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC1EF,MAAA;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAS,GAAG,IAAK,GAAG,MAAM;AAC1B,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AACjD,mBAAQ,IAAG,IAAK,GAAG,IAAI,SAAS,KAC5B,IAAG,IAAK,GAAG,IAAK,GAAG,KAAK,IAAO,KAAK,KAAK,MAAO;;AAGtD,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAEP,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,gBAAI,KAAK,QAAQ;AACf,iBAAG,IAAI,GAAG,KAAK,KAAK,GAAG,MAAM;;AAE/B,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC7EF,MAAA;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AAER,oBAAQ,GAAG,OAAO,GAAG;AACrB,gBAAI,EAAE;AAAI,iBAAM,MAAM;AAAI,gBAAI,IAAK,KAAK;AACxC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,KAAK;AACnC,gBAAI,EAAG,IAAI,IAAK;AAAI,gBAAI,IAAK,KAAK;AAAK,iBAAK,IAAK,KAAK;AACtD,cAAE,KAAK;AACP,eAAG,IAAK,IAAI,IAAK;AACjB,mBAAO;;AAGT;AACE,0BAAc;AAEd,gBAAI,UAAU,SAAO;AAEnB,kBAAI,EAAE,KAAK;;AAGX,sBAAO,KAAK;AACZ,mBAAK,IAAI,GAAG,IAAI,MAAK,QAAQ,EAAE;AAC7B,kBAAE,IAAI,KAAM,EAAE,IAAI,MAAM,KACnB,MAAK,WAAW,KAAK,EAAG,IAAI,IAAK,MAAM;;;AAIhD,mBAAO,EAAE,SAAS;AAAG,gBAAE,KAAK;AAC5B,iBAAK,IAAI,GAAG,IAAI,KAAK,EAAE,OAAO,GAAG,EAAE;AAAE;AACrC,gBAAI,KAAK;AAAG,kBAAI,EAAE,KAAK;;AAAS,kBAAI,EAAE;AAEtC,gBAAG,IAAI;AACP,gBAAG,IAAI;AAGP,iBAAK,IAAI,KAAK,IAAI,GAAG,EAAE;AACrB,kBAAG;;;AAIP,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE,EAAE;AACV,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,YAAY;;SAIjB,gBAC+B,SAC9B;;;ACrEF,MAAA;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AACR,oBAAQ,GAAG,OACH,GAAG,OAAO,GAAG;AAErB,eAAG,IAAI,IAAK,IAAI,aAAc;AAE9B,gBAAI,EAAG,IAAI,KAAM;AACjB,gBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,iBAAK,KAAK;AACV,iBAAK,KAAK;AACV,iBAAK,MAAM;AACX,iBAAK,MAAM;AAEX,gBAAI,EAAE,KAAK,IAAI;AACf,eAAG,IAAI;AAEP,mBAAQ,IAAK,KAAK,MAAM,MAAQ;;AAGlC;AACE,mCAAuB,YAAY;AACnC,gBAAI,UAAU,SAAO;AAEnB,kBAAI;AACJ,sBAAO;;AAGP,sBAAO,QAAO;AACd,kBAAI;AACJ,sBAAQ,KAAK,IAAI,OAAO,MAAK;;AAG/B,iBAAK,IAAI,GAAG,IAAI,KAAK,IAAI,OAAO,EAAE;AAEhC,kBAAI;AAAM,qBAAK,MAAK,WAAY,KAAI,MAAM,MAAK;AAE/C,kBAAI,MAAM;AAAG,oBAAI;AACjB,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,kBAAI,KAAK;AACP,oBAAK,IAAI,aAAc;AACvB,oBAAK,EAAE,IAAI,QAAS,IAAI;AACxB,oBAAU,KAAL,IAAU,IAAI,IAAI;;;AAI3B,gBAAI,KAAK;AACP,gBAAG,UAAQ,MAAK,UAAU,KAAK,OAAO;;AAKxC,gBAAI;AACJ,iBAAK,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE;AACzB,kBAAI,EAAG,IAAI,KAAM;AACjB,kBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,mBAAK,KAAK;AACV,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,MAAM;AACX,gBAAE,KAAK,IAAI;;AAGb,gBAAG,IAAI;AACP,gBAAG,IAAI;AACP,gBAAG,IAAI;;AAGT,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAED;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,UAAU;;SAIf,gBAC+B,SAC9B;;;AC5IF,MAAA;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAQ,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,GAAG;AACzC,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,eAAG,IAAI,IAAK,KAAK,KAAO,MAAM,KAAM;AACpC,eAAG,IAAI,IAAK,IAAI,IAAK;AACrB,eAAG,IAAK,KAAK,KAAO,MAAM,KAAM;AAChC,mBAAO,GAAG,IAAK,IAAI,IAAK;;AAmB1B,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI,aAAa;AACpB,aAAG,IAAI;AAEP,cAAI,SAAS,KAAK,MAAM;AAEtB,eAAG,IAAK,OAAO,aAAe;AAC9B,eAAG,IAAI,OAAO;;AAGd,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AACR;AAED;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC3EF,MAAA;AAID,sBAAa,cACD,cACC,YACA,cACC,uBACG,MAAK,IAAI,OAAO,wBACd,MAAK,IAAI,GAAG,oBAChB,eAAe,UACnB,QAAQ;AAOnB;AACE,oBAAU;AACV,oBAAW,WAAW,OAAQ,CAAE,SAAS,QAAU,WAAW;AAG9D,0BAAgB,OAAO,SACrB,QAAQ,UAAU,CAAC,MAAM,SAAS,UACjC,QAAQ,OAAQ,aAAa,MAAM,IAAI;AAG1C,qBAAW,IAAI,KAAK;AAIpB,qBAAW;AACT,oBAAQ,KAAK,EAAE,aACP,gBACA;AACR,mBAAO,IAAI;AACT,kBAAK,KAAI,KAAK;AACd,mBAAK;AACL,kBAAI,KAAK,EAAE;;AAEb,mBAAO,KAAK;AACV,mBAAK;AACL,mBAAK;AACL,qBAAO;;AAET,mBAAQ,KAAI,KAAK;;AAGnB,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,SAAS;AAGd,iBAAO,SAAS,KAAK,IAAI;AAGzB,iBAAQ,SAAQ,QAAQ,YACpB;AACE,gBAAI;AAEF,kBAAI,MAAM;AAAK,qBAAK,OAAO;;AAE3B,oBAAK,QAAQ;AAAa,uBAAO,KAAK,MAAM;;;AAK9C,gBAAI;AAAgB,oBAAK,WAAW;AAAM,qBAAO;;AAI5C,qBAAO;aAElB,MACA,WACA,YAAY,UAAU,QAAQ,SAAU,QAAQ,OAChD,QAAQ;;AAEV,cAAK,SAAS,WAAW;AAYzB;AACE,0BAAgB,IAAI,aACX,UAAU,OAAO,GAAG,IAAI,GAAG,IAAI,OAAO,GAAG,IAAI;AAGtD,cAAI,CAAC;AAAU,kBAAM,CAAC;;AAGtB,iBAAO,IAAI;AACT,cAAE,KAAK;;AAET,eAAK,IAAI,GAAG,IAAI,OAAO;AACrB,cAAE,KAAK,EAAE,IAAI,OAAQ,IAAI,IAAI,IAAI,UAAW,KAAI,EAAE;AAClD,cAAE,KAAK;;AAIR,UAAA,IAAG,IAAI;AAEN,wBAAW,QACH,GAAG,QAAO,GAAG,QAAO,GAAG;AAC/B,mBAAO;AACL,mBAAI,GAAE,KAAI,OAAQ,KAAI;AACtB,kBAAI,IAAI,QAAQ,GAAE,OAAS,IAAE,MAAK,GAAE,KAAI,OAAQ,KAAI,OAAQ,IAAE,MAAK;;AAErE,eAAG,IAAI;AAAG,eAAG,IAAI;AACjB,mBAAO;aAIN;;AAOL;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAMD;AACE,uBAAa,UAAW,OAAO;AAC/B,cAAI,SAAS,OAAO;AAClB,iBAAK,QAAQ;AACX;AAAM,uBAAO,KAAK,SAAQ,IAAI,OAAO,QAAQ;;;;;AAGjD,iBAAQ,OAAO,SAAS,SAAS,OAAO,WAAW,MAAM,MAAM;;AAQjE;AACE,2BAAiB,OAAO,eAAe;AACvC,iBAAO,IAAI,WAAW;AACpB,gBAAI,OAAO,KACT,OAAS,UAAS,IAAI,OAAO,KAAK,MAAM,WAAW,WAAW;;AAElE,iBAAO,SAAS;;AAQlB;AACE;AACE;AACA,gBAAI,cAAe,OAAM,WAAW;AAElC,oBAAM,IAAI;;AAEV,oBAAM,IAAI,WAAW;AACpB,cAAA,SAAO,UAAU,QAAO,UAAU,gBAAgB;;AAErD,mBAAO,SAAS;;AAEhB,2BAAc,QAAO,qBACP,YAAW,SAAQ;AACjC,mBAAO,CAAC,CAAC,IAAI,QAAM,SAAQ,SAAS,QAAO,QAAQ,SAAS;;;AAQhE;AACE,iBAAO,OAAO,aAAa,MAAM,GAAG;;AAUtC,eAAO,MAAK,UAAU;AAMtB,YAAmC,QAAO;AACxC,kBAAA,UAAiB;AAEjB;AACE,yBAAa;;;mBAEL;AACV,UAAA,SAAO;AAAa,mBAAO;;;SAK3B,IACA;;ACjMF,eAAG,OAAO;AACV,eAAG,SAAS;AACZ,eAAG,SAAS;AACZ,eAAG,YAAY;AACf,eAAG,UAAU;AACb,eAAG,SAAS;AAEZ,uBAAiB;;AC3DjB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,OAAO;AACZ,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;AACvC,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;;AAE3C,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,SAAS,aAAgB,UAAU;;MAG5C;AACI,YAAI,CAAC,MAAM,KAAK;AACZ,wBAAc,KAAK;AACnB,eAAK,UAAU;AACf,iBAAO;;AAEX;AACA,sBAAc;AACd,eAAO,CAAC;AACJ;AACA;AACI,iBAAK,IAAI,KAAK,WAAW;AACzB,iBAAK,IAAI,KAAK,WAAW;AACzB,gBAAI,KAAK,KAAK,KAAK;mBACd,KAAK,KAAK,MAAM;AACzB,uBAAY,KAAK,KAAK,KAAO,KAAK,IAAI,KAAK;AAC3C,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,cAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,sBAAU;;;AAGlB,YAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,eAAK,UAAU,KAAK,aAAa;;AAErC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU;AACrC,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,SAAS,KAAK,SAAS,SAAS,KAAK;;;;MAMhD;AACI,aAAK,QAAQ;AACb,aAAK,OAAO,IAAI;AAChB,aAAK,QAAQ;AACb,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,QAAQ,aAAgB,UAAU;AACvC,aAAK,QAAQ,IAAI,YAAY,GAAG,GAAG,OAAO,OAAO,KAAK;AACtD,YAAI,QAAQ;AACR,eAAK,IAAI,QAAS,IAAI;;AAGtB,eAAK,IAAI,QAAS,IAAI;;AAE1B,aAAK,IAAI,IAAI,KAAK,KAAK,IAAI,KAAK;;MAGpC;AACI;AACA,eAAO;AACH;AACI,gBAAI,KAAK,MAAM;AACf,gBAAI,IAAK,KAAK,IAAI;mBACb,KAAK;AACd,eAAK,IAAI;AACT,eAAK,IAAI;AACT,eAAK,IAAK,QAAQ,KAAK;AACvB,eAAM,MAAM,KAAO,KAAK,IAAK,KAAI,IAAI,KAAK,IAAI;AAC9C,cAAI,KAAK;AACT,cAAI,IAAI,MAAM,KAAK,IAAI,KAAK;AACxB;;;AAGR,YAAK,IAAI,KAAK,OAAQ,KAAK,IAAI;AAC/B,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,IAAI,KAAK,SAAS,IAAI,KAAK;;AAEzC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,UAAU;AACf,iBAAO;;AAEX,eAAO,KAAK,MAAM;;;;MAItB,mBAAkB,UAAS;AAEvB,aAAK,iBAAiB,MAAO,KAAK,SAAS,QAAQ,KAAK,UAAU;AAClE,aAAK,MAAM;AACX,aAAK,QAAQ,OAAM;AACnB,aAAK,QAAQ;AACb,YAAI,QAAQ;AACR,iBAAO,KAAK;;AAEhB,YAAI,OAAO,SAAS;AAChB,iBAAO,KAAK;;AAEhB,YAAI,CAAC,KAAK,oBAAoB,KAAK,SAAS;AACxC,gBAAM,IAAI,MAAM,0BAA0B,UAAS;;AAEvD,aAAK,SAAS,aAAgB;;MAElC;AACI,YAAI,KAAK;AACL,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,aAAa,KAAK,MAAM,KAAK,QAAQ,KAAK;;;AAGvD;AAEH,gBAAU,OAAO;AACjB,gBAAU,SAAS;AACnB,gBAAU,SAAS;AACnB,iBAAW,IAAI,IAAK,MAAK,IAAI,GAAG,KAAK,OAAO,KAAK,IAAI,IAAI,GAAG;AAG5D,8BAAwB;AACxB,UAAI,KAAK;AACL,cAAM,IAAI,MAAM,2BAA2B;;;AAG5C;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,yBAAmB,OAAK;AACxB,yBAAmB,YAAY,cAAc;AAC7C,yBAAmB,kBAAkB,QAAQ,aAAa,gBAAgB;;AAE9E;AACI,iBAAU;AACV,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,gBAAO,OAAO;;AAElB,aAAO,OAAM,OAAO;;AAExB;AACI,0BAAoB;AACpB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,OAAO,KAAK;AACzB,yBAAiB,OAAO;;AAE5B,aAAO,KAAK,KAAK,gBAAgB,OAAO;;AAE5C;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAI,IAAK,OAAM;;AAErD;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAK,KAAI,KAAM,OAAM,IAAI;;AClN/D;;;;;;;;;;;;;;;;AAmCA,+CAA2C,WAAW;AAClD,UAAI,QAAQ;AACR,eAAO;;AAEX,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,qBAAe,IAAI,UAAU,OAAO,MAAM,OAAO;AACjD,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,wBAAe,IAAG,CAAE;ACpDhC;;;;;;;;;;;;;;;;AAkCA,0CAAqC,YAAY;AAC7C,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,OAAuB;AAC9E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,yBAAgB,IAAG,CAAE;AC7CjC;;;;;;;;;;;;;;;;AAuCA,4CAAwC,YAAY,WAAW;AAC3D,kBAAY,QAAO,OAAO;AAC1B,qBAAe,IAAI,cAAc,QAAQ,QAAQ,MAAM;AACvD,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,0BAAiB,IAAG,CAAE;AC/ClC;;;;;;;;;;;;;;;;AAmCO;AACH,qBAAc;AACd,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW;AACzB,cAAM,IAAI,MAAM;;AAEpB,oBAAc;AACd,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1CpD;;;;;;;;;;;;;;;;AAwCO,wCAAmC,WAAW;AACjD,UAAI,UAAS;AACT,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,8BAAsB,UAAU;AAChC,4CAAoC,QAAQ,QAAQ,QAAO;AAC3D,4CAAoC,OAAO,SAAS,QAAO;AAC3D,YAAI,iBAAiB,+BACjB;AACA,iBAAO,OAAM,CAAC,IAAI;;AAEtB,4BAAoB,KAAK,IAAI,KAAK,KAAM,QAAO,SAAS;AACxD,uBAAe,qBAAoB,aAAa;AAChD,YAAI,OAAO,SAAS,UAAS;AAGzB,kBAAO;;AAEX,eAAO,KAAK;AACZ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,iBAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,eAAO,UAAS,QAAQ;;AAE5B,oBAAc,CAAE,OAAO,MAAM,MAAA,OAAM;AACnC,aAAO,QAAO,cAAc,SAAS,IAAiB,MAAiB,OAAO;;AClElF;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,WAAW;AAC/B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,uBAAc,IAAG,CAAE;ACzC/B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAQ,IAAG,CAAE,OAAA;AC9CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,MAAM;;AAEzB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE,QAAA;AC9C1B;;;;;;;;;;;;;;;;AAsDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,YAAI,GAAG,SAAS;AACZ,iBAAO,MAAM;;AAEjB,oBAAY,SAAQ,QAAQ,IAAI;AAChC,eAAO,SAAQ,KAAK,GAAG;;AAE3B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,UAAS;;AAEnE,qBAAW,IAAG,CAAE;ACpE5B;;;;;;;;;;;;;;;;AAyBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AC9B9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACtC1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACOzB,6FAAwF,CAAC,GAAG,iBAAiB;AACzG,iBAAW,iBAAgB,GAAG,KAAK;AACnC,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,UAAI,eAAe;AACf,cAAM,IAAI,MAAM;;AAGpB,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFACxB,iBAAiB,MAAM;AAClD,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFAClB,iBAAiB,MAAM;AACxD,yBAAmB,iBAAiB,MAAM;AAC1C,gCAA0B,iBAAiB,MAAM;AACjD,cAAY,iBAAiB,MAAM,OAAO,aAAa,mBAAmB,MAAM,6EACjE,aAAa,8BACb,iBAAiB,MAAM;AACtC,wBAAkB,iBAAgB,KAAK,kBAAkB,SAAS,MAAK,YAAY;AACnF,8BAAwB;AACxB,kBAAY,QAAO,WAAW,kBAAkB,iBAAiB,SAAS;AAC1E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,4BAAmB,IAAG,CAAE;ACxFpC;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,GAAG,OAAO,MAAM,kDAAkD,GAAG,iBAAiB,GAAG;AAClH,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,oBAAc,MAAM,GAAG;AACvB,oBAAc,MAAM,GAAG;AACvB,mBAAa,IAAI,IAAI;AACrB,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB;;;AAGR,sBAAe,IAAI,cAAa,CAAC,aAAa,GAAG;AACjD,sBAAgB,IAAI,cAAa,CAAC,aAAa;AAC/C,mBAAa,QAAO,GAAG,IAAI,MAAM,QAAQ;AACrC,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB,kBAAO,OAAO,MAAK,MAAM;AACzB,kBAAQ,OAAO,MAAK;AACpB;;;AAGR,aAAO,CAAC,QAAO,YAAY,QAAQ;;AAE3B,2BAAkB;AC1E9B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,CAAC,QAAQ,CAAC;;AAEnB,oBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAyCA,oCAAgC;AAC5B,sBAAgB,iBAAgB,QAAQ,UAAU,WAAW;AAC7D,UAAI,QAAQ;AACR,cAAM,QAAQ,OAAO;;AAEzB,UAAI,QAAQ,QAAQ,OAAO;AACvB,cAAM,MAAM,4EACW,QAAQ,oBAAoB;;AAEvD,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,QAAQ,SAAS;AACnC,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,UAAS;;AAE7B,qBAAW,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,6DAC3B,OAAM;AACrB,qBAAe,CAAE,OAAA;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,OAAM,KAAK,OAAO;AAClC,uBAAe,SAAQ,IAAI;AAC3B,eAAO,OAAO,QAAQ,OAAM;SAC7B,QAAQ,MAAqB;;AAExB,gBAAO,IAAG,CAAE;AClDxB;;;;;;;;;;;;;;;;AAsCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,8DAC3B,OAAM;AACrB,qBAAe,CAAE,OAAA;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,SAAQ,QAAO,CAAC,OAAO;AACvC,uBAAe,SAAQ,KAAK;AAC5B,eAAO,SAAQ,QAAQ,OAAM;SAC9B,QAAQ,MAAqB;;AAExB,iBAAQ,IAAG,CAAE;ACnDzB;;;;;;;;;;;;;;;;AA4CA;AACI,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,sBAAsB;AACtB,6BAAqB,SAAQ,QAAO,CAAC,OAAO;AAC5C,cAAM,KAAK;;AAKX,4BAAoB,CAAC,OAAO,IAAK,sBAAqB;AACtD,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,8BAAsB,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK;AACzF,8BAAsB,KAAI,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK,IAAI,QAAO;AACxG,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,6BAAqB,SAAQ,SAAQ,GAAG,IAAI,CAAC,YAAY,IAAI,YAAY;AACzE,cAAM,KAAK;;AAEf,YAAM,KAAK;AAEX,UAAI,OAAM,SAAS,KAAK,OAAM,MAAM,OAAO;AACvC,qBAAa;AACb,uBAAc,OAAM,MAAM;AAC1B,cAAM,SAAQ,KAAK,CAAC,QAAO,IAAI,MAAM,KAAK,QAAO,IAAI,MAAM;AAC3D,aAAK;;AAET,aAAO;;AAEC,kBAAS,IAAG,CAAE;ACrEnB,0DAAqD;AACxD,uBAAiB;AACjB,UAAI,OAAQ,oBAAqB;AAC7B,gBAAO,EAAE,MAAM,QAAQ,oBAAoB,GAAG,MAAM;AACpD,qBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAGpD,0BAAkB,gBAAgB,OAAO;AACrC,cAAI,UAAU;AACV,sBAAS;;AAEb,iBAAO;WACR;AACH,gBAAO,aAAa,GAAG,MAAM;AAC7B,yBAAiB,gBAAgB,QAAQ;AAGzC,YAAI,aAAa;AACb,wBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,0BAAgB,YAAY,EAAE,MAAM,QAAQ;;AAEhD,gBAAO,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IAAI,MAAM;AACxE,qBAAa;;AAEjB,aAAO;;AC/BX;;;;;;;;;;;;;;;;AAyDA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,GAAG,OAAO;AAC7C,2BAAmB,kBAAiB,IAAI,iBAAiB;AACzD,eAAO,SAAQ,MAAM,IAAI,YAAY;;AAEzC,qBAAe,CAAE,GAAG;AACpB,mBAAa,CAAE,iBAAiB;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAS,IAAG,CAAE;ACpE1B;;;;;;;;;;;;;;;;AA2CA;AACI,cAAO,OAAM,UAAU,WAAW,MAAM,mDAAmD,OAAM;AACjG,+BAAyB,OAAM,MAAM,OAAM,MAAM,SAAS;AAC1D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,aAAa,QAAQ,YAAY;AAEjC,sBAAc,OAAM,MAAM,IAAI,OAAK;AACnC,qBAAa,OAAM,MAAM,IAAI,OAAK;AAClC,aAAK,OAAM,MAAM,SAAS,KAAK;AAC/B,wBAAgB,OAAM,QAAO,OAAO;AACpC,6BAAqB;iBAEhB,aAAa,QAAQ,YAAY;AAEtC,2BAAmB,OAAM,MAAM,IAAI,OAAK;AACxC,mBAAW,OAAM,MAAM,SAAS,KAAK,YAAY;AACjD,wBAAgB,QAAO,CAAC,QAAO,OAAM,cAAc,OAAM,MAAM,SAAS;AACxE,6BAAqB;;AAGrB,wBAAgB;;AAGpB,yBAAmB,WAAU;AAC7B,2BAAqB,SAAQ,SAAQ,eAAe,aAAa,CAAC,OAAO;AACzE,kBAAY,IAAI;AAEhB,mBAAa,KAAK,MAAM,qBAAqB,KAAK;AAClD,yBAAmB,KAAK;AACxB,yBAAmB,KAAK;AACxB,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,0BAAoB,cAAc,MAAM;AACxC,kBAAY,cAAc,MAAM,SAAS,KAAK;AAC9C,aAAO,SAAQ,SAAQ,qBAAqB,IAAI,qBAAqB,KAAK;;AAElE,iBAAQ,IAAG,CAAE;AChFzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,kBAAkB,IAAI;AAC1C,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAmB;;AAEzE,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,SAAQ,IAAI,cAAa,GAAG,OAAO,MAAM;;AAExC,oBAAW,IAAG,CAAE;ACvC5B;;;;;;;;;;;;;;;;AAoCA,oCAAgC;AAC5B,uBAAiB,qBAAqB,SAAS,WAAW;AAC1D,cAAY,SAAS,UAAU,GAAG,MAAM;AACxC,UAAI,SAAS,WAAW;AACpB,eAAO,WAAW,SAAS,IAAI;;AAEnC,mBAAa,SAAS,GAAG;AACzB,oBAAc,SAAS,GAAG;AAC1B,oBAAc,SAAS,GAAG;AAC1B,cAAY,QAAQ,MAAM,MAAM;AAChC,eAAS,QAAQ;AACb,2BAAuB,OAAO,EAAE,OAAO;AACvC,gBAAY,UAAU,EAAE,OAAO,MAAM;;AAEzC,8BAAwB,SAAS,IAAI,OAAK,WAAW,GAAG;AAOxD,aAAO,QAAO,iBAAiB;;AAEvB,kBAAS,IAAG,CAAE;AC3D1B;;;;;;;;;;;;;;;;AAiCA,+BAA0B;AACtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE,OAAA;ACvCzB;;;;;;;;;;;;;;;;AAwDA,+DAA2D,aAAa,kBAAkB,iBAAiB,oBAAoB;AAC3H,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,YAAI,WAAW;AACX,oBAAU,IAAI,MAAM,MAAM;;AAE9B,6BAAqB,YAAW;AAChC,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,gBAAgB;AACtC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,mBAAmB;AACzC,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B,GAAG,OAAO,MAAM;AAE5C,2BAAmB,YAAW;AAC9B,yBAAiB,GAAG,MAAM;AAC1B,mBAAW,QAAQ;AACf,gBAAM,QAAQ;AACd,cAAI,QAAQ;AACZ,mBAAS,OAAO,MAAM,GAAG;;AAE7B,aAAK,SAAQ,IAAI;AACjB,eAAQ,wBAAwB,oBAAoB,8BAA+B,mBAAkB,GAAG,OAAO,cAAc,qBAAqB,OAAO,KAAK,SAAS,WAAW,SAAS;AAC3L,gBAAQ;AACR,cAAM;AACN,kBAAU;AACV,2BAAmB,YAAW;AAE9B,mBAAW,QAAQ;AACf,cAAI,QAAQ,MAAM,QAAQ;AAC1B,kBAAQ,QAAQ;;AAGpB,qBAAa,iBAAgB,OAAO,KAAK;AAEzC,yBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AACvE,2BAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,YAAI;AACA,iBAAO,SAAQ,OAAM,IAAI,OAAO,OAAO;;AAE3C,oBAAY,SAAQ,aAAa,IAAI,OAAO,KAAK;AACjD,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,oBAAc;QACV;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;AAEJ,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;;AAEpE,0BAAgB,IAAG,CAAE;ACpHjC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACxDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,cAAQ,SACJ;AACJ,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1DpD;;;;;;;;;;;;;;;;AA4CA,0BAAsB,YAAY;AAC9B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB,GAAG,MAAM,GAAG,MAAM,SAAS;AAC3C,UAAI,IAAI;AACJ,cAAM,IAAI,MAAM,uDAAuD,oBACxD;;AAEnB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,GAAG;AACnB,gCAA0B,QAAO,cAAc,OAAK,EAAE,KAAK,IAAI,GAAG,SAAS,QAAQ,MAAiB,MAAM;AAC1G,aAAO,CAAE,QAAQ;;AAET,iBAAQ,IAAG,CAAE;AC3DzB;;;;;;;;;;;;;;;;AAuCA,6CAAwC,YAAY;AAChD,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM;;AAEpB,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,MAAsB;AAC7E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,4BAAmB,IAAG,CAAE;AClDpC;;;;;;;;;;;;;;;;AAsEA,+BAA2B;AAEvB,iBAAW,iBAAgB,GAAG,KAAK,UAAU;AAC7C,cAAO,GAAG,OAAO,GAAG,MAAM;AAC1B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,gCAA0B,QAAO,UAAU,QAAQ,QAAQ;AAC3D,aAAO,CAAE,QAAQ;;AAET,mBAAU,IAAG,CAAE;AC/E3B;;;;;;;;;;;;;;;;AAuCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAoB,iBAAgB,YAAY,cAAc,sBAAsB;AACpF,cAAO,OAAM,cAAc,MAAM;AACjC,qBAAe,CAAE,GAAG,IAAI,YAAY;AACpC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,oBAAY,SAAQ,mBAAmB,IAAI,aAAa;AACxD,aAAK,CAAC;AACN,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAoB;;AAE1E,+BAAsB,IAAG,CAAE;ACpDvC;;;;;;;;;;;;;;;;AAmCA,gCAA4B;AACxB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,QAAQ,CAAC,GAAG,MAAM,UAAU,OAAO,GAAG,MAAM,QAAQ,MAAM,UAAU,oBAAoB,GAAG,MAAM,WAAW,GAAG,MAAM;AACjI,UAAI,OAAO;AACP,gBAAQ,GAAG,MAAM;;AAErB,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE;AAChB,sBAAgB,cAAa,SAAQ,QAAQ,IAAI;AACjD,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAiCO,gDAA4C;AAC/C,aAAO,QAAO,aAAa,cAAc,WAAW,MAAM;;AClC9D;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB;AAChB,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,YAAI,SAAS;AACT,kBAAQ,KAAK;;;AAGrB,uBAAiB,QAAO,WAAW;AACnC,kBAAY,QAAO,CAAC,QAAQ,QAAQ,UAAU,SAAS;AACvD,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,oBAAY,SAAS,WAAW,QAAQ;AACxC,uBAAe,IAAI,UAAU;AAC7B,YAAI,OAAO,IAAI,KAAK;;AAExB,aAAO,IAAI;;AChCf;;;;;;;;;;;;;;;;AAsCA;AACI,yBAAmB,iBAAgB,WAAW,aAAa,cAAc;AACzE,mBAAa,MAAM,WAAW;AAC9B,kBAAY,UAAU,WAAW,OAAO;AACxC,UAAI,cAAc;AACd,mBAAW;;AAEf,aAAO;;AAEC,uBAAc;AC/C1B;;;;;;;;;;;;;;;;AAwCA;AACI,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,oBAAc,iBAAgB,MAAM,QAAQ,YAAY;AACxD,uBAAiB,QAAQ,OAAO,IAAI;AACpC,sBAAgB,MAAM;AACtB,0BAAoB,QAAQ;AAC5B,cAAY,UAAU,GAAG,MAAM;AAC/B,yBAAuB,YAAY,MAAM,UAAU,WAAW,UAAU,MAAM,OAAO;AACrF,wBAAkB;AAClB,mBAAa,UAAU,IAAI,WAAW,SAAS;AAC3C,uBAAe,YAAY;;AAE/B,gCAA0B,YAAY,MAAM,GAAG,UAC1C,OAAO,CAAC,cAAc,YAAY,MAAM,WAAW;AACxD,6BAAuB,SAAQ,SAAS;AACxC,2BAAqB,SAAQ,OAAO,CAAC;AACrC,gCAA0B,MAAM,WAAW;AAC3C,sBAAgB,QAAQ,mBAAmB,CAAC;AAC5C,kBAAY,OAAO,gBAAgB,SAAS;AAE5C,UAAI,YAAW;AACX,gBAAQ;;AAEZ,UAAI,SAAS;AACT,cAAM;;AAEV,cAAQ;AACR,qBAAe;AACf,mBAAa;AACb,wBAAkB;AAClB,aAAO;;AAEC,6BAAoB;ACxEhC;;;;;;;;;;;;;;;;AAmCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,SAAS,IAAI;;AAWxB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,KAAK,IAAI;;AAEpB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,MAAM,IAAI;;AAErB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,UAAU,IAAI;;AAEzB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,QAAQ,IAAI;;AAEvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,aAAa,IAAI;;AAEhB,wBAAe,IAAG,CAAE;AACpB,+BAAsB,IAAG,CAAE;AAC3B,0BAAiB,IAAG,CAAE;AACtB,4BAAmB,IAAG,CAAE;AACxB,uBAAc,IAAG,CAAE;AACnB,2BAAkB,IAAG,CAAE;ACjGnC;;;;;;;;;;;;;;;;AAsCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,MAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,yBAAuB,MAAK,OAAO,KAAI,OAAO;AAC9C,aAAO,IAAI,OAAM;;AAYrB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,KAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAUvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAYvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,kBAAkB,IAAI;;AAErB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,0BAAiB,IAAG,CAAE;AACtB,0BAAiB,IAAG,CAAE;AACtB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,oCAA2B,IAAG,CAAE;AAChC,sBAAa,IAAG,CAAE;AC5L9B;;;;;;;;;;;;;;;;AAmEA,4BAAwB,oBAAoB,iBAAiB;AACzD,UAAI,iBAAgB,GAAG,KAAK;AAC5B,oBAAa,SAAS,GAAG,KAAK;AAC9B,0BAAoB,MAAK;AACzB,UAAI;AACA,qBAAa,gBAAe,MAAM,EAAE;AACpC,wBAAgB,sBAA+B,MAAK,OAAO;;AAE/D,aAAO,SAAQ,OAAM;;AAEzB,oCAA+B;AAC3B,UAAI,EAAE,SAAS;AACX,eAAO,IAAI;;AAGf,UAAI,EAAE,SAAS,KAAK,SAAS;AACzB,eAAO,SAAS,SAAQ,GAAG,CAAC,MAAM,IAAG;;AAGzC,UAAI,EAAE,SAAS,KAAK,OAAO,SAAS,YAChC,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,MAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM,eAAe,OAAM;AAE3B,iBAAO,KAAK,MAAI,IAAI,IAAI,IAAI,QAAO,GAAG,WAAW;;AAErD,cAAM,IAAI,MAAM,qCAAqC;;AAGzD,UAAI,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;;AAE/C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM,SAAS,OAAM;AAErB,iBAAO,KAAK,MAAI,OAAO,IAAI;;AAE/B,cAAM,IAAI,MAAM,qCAAqC;;AAEzD,YAAM,IAAI,MAAM,gCAAgC;;AAExC,iBAAQ,IAAG,CAAE;AC1HzB;;;;;;;;;;;;;;;;AAsDA,6DAAwD;AACpD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,uBAAiB,IAAI;AACrB,cAAY,aAAiB,GAAG,OAAO,GAAG,QAAQ,MAAM;AACxD,kBAAY,QAAO;AACnB,4BAAsB,IAAI,KAAK;AAC/B,oBAAa,KAAI,IAAI,IAAI,KAAK;AAC9B,UAAI;AACA,gBAAY,SAAQ,MAAM,MAAM;AAChC,sBAAc,iBAAgB,OAAM,QAAQ;AAC5C,kBAAS,IAAI,SAAQ,IAAI,KAAK,IAAI,QAAQ;;AAE9C,aAAO,MAAI,IAAI;;AAEP,0BAAiB,IAAG,CAAE;ACtElC;;;;;;;;;;;;;;;;AAwCA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,aAAa;AAClE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,qBAA8B,UAAU,UAAU;AAClD,sBAAgB;AACZ,eAAO,SAAQ,UAAU,UAAU,UAAU;;AAEjD,qBAAe,CAAE,SAAS,UAAU,SAAS;AAC7C,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAW;;AAEjE,sBAAa,IAAG,CAAE;ACtCvB;AACH,UAAI,cAAc,UAAU;AACxB,cAAM,IAAI,MAAM,8EACU,cAAc;;AAE5C,UAAI,cAAc,OAAO;AACrB,cAAM,IAAI,MAAM,sEACM,cAAc;;AAExC,uBAAiB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AACnE,sBAAgB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AAClE,UAAI,YAAY,WAAW;AACvB,cAAM,IAAI,MAAM,kDACR,YAAY,sBAAsB;;AAE9C,wBAAkB,aAAa;AAC/B,UAAI,CAAE,cAAa,SAAS,KACxB,aAAa,SAAS,KAAK,cAAc;AACzC,cAAM,IAAI,MAAM,oCACT,aAAa,2BAA2B;;AAEnD,UAAI,aAAa,UAAU,cAAc;AACrC,cAAM,IAAI,MAAM;;;ACnCxB;;;;;;;;;;;;;;;;AA2DA,qFAAiF;AAC7E,6BAAuB,iBAAgB,eAAe,iBAAiB,iBAAiB;AACxF,4BAAsB,iBAAgB,cAAc,gBAAgB;AACpE,4BAAsB,iBAAgB,cAAc,gBAAgB,iBAAiB,cAAc;AACnG,sBAA8B,gBAAgB,eAAe,aAAa;AAC1E,qBAAe;QACX,eAAe;QACf,cAAc;QACd,cAAc;;AAElB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,cAAc,gBAAgB,eAAe,aAAa,gBAAgB,QAAQ,MAAiB,eAAe;;AAEzJ,0BAAiB,IAAG,CAAE;ACxElC;;;;;;;;;;;;;;;;AA0DA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,YAAY;AACjE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,eAAO,SAAQ,SAAS,IAAI;;AAEhC,qBAAe,CAAE,QAAQ,IAAI,SAAS;AACtC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;ACnE7B;;;;;;;;;;;;;;;;AAyBO;AACH,UAAI,cAAc;AACd,eAAO,EAAE,MAAM;;AAEnB,UAAI,aAAiB,EAAE,OAAO;AAC1B,eAAO;;AAEX,UAAI,EAAE,MAAM,WAAW,WAAW;AAC9B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,EAAE,MAAM,QAAQ;AAChC,cAAI,WAAW,MAAM,QAAQ,EAAE,MAAM,MAAM;AACvC,yBAAa,KAAK,EAAE,MAAM;;AAG1B,yBAAa,KAAK,WAAW;;;AAGrC,eAAO;;AAEX,aAAO;;AC5CX;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,WAAW,MAAM,gFACjB,GAAG;AAC5B,cAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,qDAAqD;AAC9F,UAAI,SAAS;AACT,eAAO,aAAa,UAAS,GAAG,UAAU;;AAE9C,0BAAoB,cAAc,IAAI;AACtC,uBAAiB,IAAI;AACrB,yBAAmB,IAAI,MAAM,MAAI,cAAc,aAAa,GAAG,GAAG,WAAW,OAAO,YAAY;AAChG,aAAO,KAAI,IAAI;;AAEP,oBAAW,IAAG,CAAE;AC7D5B;;;;;;;;;;;;;;;;AAiBO;AAEH,aAAO,KAAK,MAAM,KAAK,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,SAAS,KAAK,IAAI;;AAEhE;AACH,mBAAa,IAAI,eAAe;AAChC,wBAAkB,IAAI,aAAa;AACnC,mBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,uBAAgB,IAAM,KAAK,KAAK,IAAM,gBAAe,OAAO;AAC5D,kBAAU,KAAK,IAAI,IAAI,KAAK,IAAI;;AAEpC,aAAO,UAAS,WAAW;;AC5B/B;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,cAAO,aAAa,OAAO,GAAG,MAAM,uEACrB,aAAa;AAC5B,cAAO,aAAa,OAAO,MAAM,SAAS,MAAM,MAAM,mFAE/C,aAAa,yBAAyB,SAAS;AACtD,yBAAkB,aAAa,MAAM,MAAM,GAAG,aAAa,MAAM,SAAS,IAAI,SAAS,OAAO;AAE9F,sBAAgB,aAAa,MAAM,aAAa,MAAM,SAAS;AAC/D,cAAO,IAAI,KAAK,KAAK,SAAS,MAAM,4EAClB,qBAAqB;AACvC,8BAAwB,MAAM,aAAa;AAC3C,0BAAoB,MAAM,SAAS;AAGnC,4BAAsB,CAAC,gBAAgB,SAAS,SAAS;AACzD,yBAAkB,wBAAuB,QAAQ;AACjD,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,gBAAgB,SAAS,QAAQ,SAAS;AACvD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,mBAAU,KAAK;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,UAAU,GAAG,UAAU,YAAY;AACnC,uBAAU,KAAK;AACf;;;;AAIZ,UAAI,gBAAgB;AAChB,qBAAa;;AAEjB,UAAI,YAAY;AACZ,iBAAS;;AAGb,aAAO,QAAO,YAAW,SAAS,OAAO;;AAEjC,wBAAe;AChF3B;;;;;;;;;;;;;;;;AA4CA,mFAA8E;AAC1E,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,YAAY,IAAI,MAAM,4CAA4C,8CACtD,YAAY;AAChD,cAAY,aAAa,YAAY,IAAI,MAAM,0CAA0C,iDACnD,YAAY;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,iBAAiB;AAC3D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,sBAAsB;;AAEtE,iCAA6B,IAAG,CAAE;AC/EzC;;;;;;;;;;;;;;;;AA0BO;AACH,UAAI,eAAc,QAAQ,gBAAe;AACrC,eAAO;;AAEX,UAAI,gBAAe;AACf,eAAO,KAAI,IAAI,MAAK;;AAExB,YAAM,IAAI,MAAM,gDAAgD;;AAG7D;AACH,gBAAU;AACV,yBAAmB,kBAAgC,KAAK,OAAO,aAAa;AAC5E,UAAI,WAAW,SAAS;AACpB,cAAM,MAAI,KAAK;;AAEnB,aAAO,SAAQ,KAAK,KAAK;;AAEtB;AACH,UAAI,gBAAe;AACf,eAAO;iBAEF,gBAAe;AACpB,eAAO,MAAK;iBAEP,gBAAe;AACpB,eAAO,KAAI;iBAEN,gBAAe;AACpB,eAAO,OAAM;iBAER,gBAAe;AACpB,eAAO,OAAM,GAAG;;AAEpB,YAAM,IAAI,MAAM,4BAA4B;;AAGzC,wBAAmB;AACtB,2BAAqB,gBAAgB;AACrC,aAAO,CAAC,gBAAgB,gBAAe;;ACjE3C;;;;;;;;;;;;;;;;AAuFA,2BAAwB,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AACpI,oBAAa,eAAc;AAC3B,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,QAAc,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AAC3E,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,6DAC3B,IAAI;AACX,cAAY,QAAQ,SAAS,GAAG,MAAM,8DAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,6EACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,cAAY,eAAe,QAAQ,MAAM,sCAAsC;AAC/E,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK;AAChG;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,4CAAiC;AACjC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,gBAAY,mBAA4B,YAAY,MAAM,uHAEA;AAC1D,qBAAa,qBAAoB,KAAI,OAAO,cAAc,UAAS,SAAS;AAC5E,0BAAkB,qBAAqB,MAAK,cAAc,SAAQ,OAAO,SAAS;AAClF,oBAAY,CAAC,MAAM;AACnB,YAAI,UAAS;AACT,0BAAgB,sBAAqB,QAAO;AAC5C,cAAI,KAAK;;AAEb,eAAO;;AAEX,sBAAgB;AACZ,oBAAY,SAAQ,YAAY;UAC5B,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN,YAAA;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW,iBAAiB,YAAA;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,qBAAe,IAAG,CAAE;AC1L3B;;;;;;;;;;;;;;;;AAqBA,iGAA4F,CAAC,GAAG;AAC5F,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,yBAAyB,KAAK,MAAM;;AAEvD,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,KAAA,MAAK,iBAAiB,WAAW;AAC1D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,qCAAqC;;AAErF,gDAA4C,IAAG,CAAE;ACtCxD;;;;;;;;;;;;;;;;AAqBA,gGAA2F,CAAC,GAAG;AAC3F,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,wBAAwB,MAAM,QAAQ;;AAEzD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,KAAA,MAAK,iBAAiB,WAAW,YAAY;AACtE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,oCAAoC;AAC5F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,+CAA2C,IAAG,CAAE;ACxCvD;;;;;;;;;;;;;;;;AAiFA,oCAAiC,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AAC7I,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,iBAAuB,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AACpF,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,sEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,uEAClB,QAAQ;AAC5B,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,6DAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,cAAY,gCAAyC,SAAS,YAAY,MAAM,sFACvD,0BAA0B;AACnD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,qFACN,+BAA+B;;AAEhE,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,gBAAY,mBAA4B,YAAY,MAAM,mHAElD;AACR,2CAAgC;AAChC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,qBAAa,mCAAmC,KAAI,OAAO,cAAc,UAAS,SAAS,MAAK,WAAW;AAC3G,0BAAkB,oCAAoC,MAAK,cAAc,SAAQ,OAAO,SAAS,MAAK,WAAW;AACjH,YAAI,SAAQ;AACR,0BAAgB,sBAAqB,OAAO;AAC5C,iBAAO,CAAC,MAAM,WAAW;;AAE7B,eAAO,CAAC,MAAM;;AAElB,sBAAgB;AACZ,oBAAY,SAAQ,qBAAqB;UACrC,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN,YAAA;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW,iBAAiB,YAAA;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,8BAAwB,IAAG,CAAE;ACrLpC;;;;;;;;;;;;;;;;AAgDA,2BAAwB,GAAG,GAAG,aAAa,OAAO,aAAa,OAAO,MAAM,0BAAa,UAAU;AAC/F,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,OAAc,GAAG,GAAG,YAAY;AAC7C,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,cAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,GAAG,SAAS,GAAG,MAAM,MAAM,kFACnD,GAAG,YAAY,GAAG;AACtC,cAAY,aAAiB,YAAY,aAAa,MAAM,4CAA4C,oBACjG,sCAAsC,GAAG,aACzC,GAAG;AACV,cAAY,gBAAgB,aAAa,MAAM,wCAAwC,qBAChF,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,uBAAiB,GAAG,MAAM,MAAM,GAAG,IAAI,OAAO,CAAC,aAAa;AAC5D,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,UAAU,MAAM;;AAE9D;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,wCAA6B;AAI7B,6BAAqB,sBAAqB,SAAQ,IAAI,EAAE,QAAQ,GAAG;AACnE;AACA;AACA,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,MAAM;mBAEzC,CAAC,cAAc;AACpB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,cAAc,MAAK,MAAM;mBAEzC,cAAc,CAAC;AACpB,iBAAO,OAAc,MAAK,cAAc,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,OAAO;;AAG/C,iBAAO,OAAc,MAAK,cAAc,MAAM;AAC9C,iBAAO,OAAc,cAAc,MAAK,MAAM;;AAElD,YAAI,QAAQ;AACR,0BAAgB,sBAAqB,QAAO;AAC5C,iBAAO,CAAC,MAAM,MAAM;;AAGpB,iBAAO,CAAC,MAAM;;;AAGtB,sBAAgB;AACZ,kBAAU,SAAQ,iBAAiB;UAC/B,GAAG;UACH,GAAG;UACH;UACA;UACA,MAAM;UACN,YAAA;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,GAAG;QACH,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,YAAY,YAAY,YAAA;AAGxC,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK;AAChB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK,KAAK;AACrB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,iBAAiB,KAAK,KAAK;;;AAGnC,qBAAe,IAAG,CAAE;ACnK3B;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,MAAM;;AAErC,0BAAsB,IAAG,CAAE;ACjClC;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,KAAK;;AAEpC,uBAAmB,IAAG,CAAE;ACjC/B;;;;;;;;;;;;;;;;AAsCA,8DAAyD,kBAAkB;AACvE,kBAAY;AACZ,qBAAe;AACf,aAAO,QAAQ,eAAe,QAAO;AACjC,eAAO,KAAK,OAAM,SAAQ,OAAO;AACjC,iBAAS;;AAEb,UAAI;AACA,eAAO,QAAQ,QAAO;AAClB,yBAAgB,QAAQ,cAAe,QAAO;AAC9C,uBAAY,QAAO;YACf,OAAM,SAAQ,OAAO,cAAc;YAAS,MAAK,CAAC,SAAS;;AAE/D,iBAAO,KAAK;AACZ,mBAAS;;;AAGjB,UAAI,OAAO,WAAW;AAClB,eAAO,SAAS,IAAI,CAAC,GAAG;;AAE5B,aAAO,SAAQ,QAAO,SAAS,CAAC,OAAO,QAAQ;;AAE5C,kBAAc,IAAG,CAAE;AC5D1B;;;;;;;;;;;;;;;;AAwCA,0EAAqE;AACjE,UAAI,aAAa;AACb,oBAAY,oBAAoB;;AAEpC,2BAAqB,MAAM,SAAQ,aAAa;AAChD,6BAAuB,KAAI,cAAc,SAAS;AAClD,qBAAe;AACf,mBAAa,GAAG,IAAI,aAAa,MAAM,IAAI;AACvC,eAAO,KAAK,KAAK,OAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,eAAe;;AAEtE,aAAO,QAAO;;AAEX,iBAAa,IAAG,CAAE;ACpDzB;;;;;;;;;;;;;;;;AA4CA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,qBAAe,iBAAgB,OAAO,SAAS,iBAAiB;AAChE,sBAAgB,iBAAgB,QAAQ,UAAU,iBAAiB;AACnE,eAAS,UAAU;AACnB,2BAAqB,sBAAsB;AAC3C,uBAAiB,OAAO,MAAM;AAC9B,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,cAAY,OAAO,SAAS,KAAK,OAAO,MAAM,OAAO,GAAG,MAAM,oDAAoD,6BAC7F,OAAO;AAC5B,cAAY,QAAQ,SAAS,KAAK,QAAQ,MAAM,OAAO,UAAU,MAAM,qDAAqD,2BACvG,OAAO;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,wEAC3B,SAAS;AACvB,cAAY,SAAS,MAAM,KAAK,SAAS,MAAM,GAAG,MAAM,2CAA2C;AACnG,cAAY,WAAW,cAAc,WAAW,WAAW,MAAM,+CAA+C;AAChH,sBAAgB,cAAa,SAAQ,cAAc,QAAQ,QAAQ,SAAS,UAAU,QAAQ;AAC9F,qBAAe,CAAE,OAAO,QAAQ,OAAO,QAAQ,QAAQ;AACvD,oBAAc,CAAE,QAAQ,oBAAoB;AAC5C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,gBAAe;AAClF,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACnElC;;;;;;;;;;;;;;;;AA4BA;AACI,qBAAe,iBAAgB,QAAO,SAAS,iBAAiB;AAChE,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,kBAAY,QAAO,UAAU,gBAAe,QAAQ;AACpD,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACpClC;;;;;;;;;;;;;;;;AAqCA,4DAAuD,YAAY;AAC/D,qBAAe,iBAAgB,QAAO,SAAS,oBAAoB;AACnE,cAAY,OAAO,SAAS,GAAG,MAAM,gEACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE,SAAS,WAAW;AACpC,kBAAY,QAAO,UAAU,mBAAkB,QAAQ;AACvD,aAAO;;AAEJ,8BAAyB,IAAG,CAAE;AC9CrC;;;;;;;;;;;;;;;;AAiBA;AACI,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,UAAI,kBAAkB;AAClB,yBAAiB,OAAO;;AAE5B,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,uBAAiB,MAAM,MAAM;AAC7B,sBAAgB,KAAK,IAAI,eAAe;AACxC,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,cAAY,MAAM,SAAS,GAAG,MAAM,+CAA+C,MAAM;AACzF,cAAY,MAAM,MAAM,OAAO,GAAG,MAAM,oDAAoD,MAAM,MAAM;AACxG,cAAY,OAAO,SAAS,GAAG,MAAM;AACrC,cAAY,OAAO,MAAM,OAAO,UAAU,MAAM,sDAAsD,qBACvF,OAAO,MAAM;AAC5B,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,aAAO,CAAE,eAAe,cAAc,gBAAgB;;ACpC1D;;;;;;;;;;;;;;;;AAqBA,6EAAyE,sBAAsB,OAAO;AAClG,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,oBAAc,CAAE,eAAe,cAAc;AAC7C,aAAO,QAAO,cAAc,OAAK,EAAE,kBAAkB,QAAQ,SAAS,eAAe,cAAc,iBAAiB,CAAE,OAAO,QAAQ,QAAQ,UAAW,MAAiB,sBAAqB;;AAE3L,8BAA0B,IAAG,CAAE;AC/BtC;;;;;;;;;;;;;;;;AA6BO;AACH,qBAAc,aAAa,KAAK,SAAS;AACzC,6BAAuB,SAAQ,IAAI,CAAE,UAAQ,KAAK;AAClD,UAAI,OAAO,gBAAgB,GAAG;;AAiB3B;AACH,aAAO,cAAc,KAAK,QAAQ,cAAc;;AASpD;AACI,aAAO,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK;;AAEpC;AACI,iBAAW;AACX,kBAAY,IAAI;AAChB,mBAAa;AACb,kBAAY;AACZ,aAAO,OAAO;AACV,iBAAS,OAAS,SAAQ,SAAU;AACpC,8BAAsB,WAAW,QAAQ,IAAI;AAC7C,YAAI,gBAAgB;AAChB,iBAAO,SAAS;;AAGhB,kBAAQ;AAGR,kBAAQ,CAAC;;;AAGjB,aAAO,QAAQ,OAAO,CAAC,OAAO;;AChFlC;;;;;;;;;;;;;;;;AAsBO;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GACrF;;AAEF;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GAAsB,OAAgC,oBAA6C;;AAG1L;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,cAAc;;AAE5G,mIAA+H,4BAA4B,4BAA4B;AAGnL,yBAAmB;AACnB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK;AACZ,qBAAW,KAAK,CAAE,OAAO,OAAO,IAAI,UAAU,GAAG,oBAAoB;;;AAG7E,iBAAW,KAAK;AAGhB,qBAAc,eAAe,IAAK,OAAO,eAAgB;AACzD,8BAAwB;AACxB,6BAAuB;AACvB,aAAO,gBAAgB,SAAS,iBAAiB,WAAW,SAAS;AACjE,0BAAkB,WAAW;AAC7B,eAAQ,sBAAsB,UAAU,sBAAuB;AAC/D,YAAI,gBAAgB;AAChB;;AAQJ,8BAAsB;AACtB,qBAAa,gBAAgB,SAAS,GAAG,KAAK,oBAAoB,EAAE;AAChE,sBAAY,sBAAsB,OAAO,UAAU,gBAAgB;AACnE,cAAI,OAAO;AACP,8BAAkB;AAClB;;AAEJ,oBAAU,QACN,UAAU,QAAQ,eAAe,cAAc,QAAO;AAC1D,cAAI,UAAU,SAAS;AACnB;;;AAUR,kBAAU,qBAAqB,gBAAgB;AAC/C,YAAI,CAAC;AAGD,cAAI,UAAU,UAAU;AACpB,4BAAgB,KAAK;AACrB,2BAAe,KAAK,UAAU;qBAEzB,UAAU,QAAQ;AAGvB,yBAAa,YAAY,WAAW;;;;AAKhD,2BAAqB,gBAAgB;AACrC,yBAAmB,gBAAgB;AACnC,UAAI,sBAAsB,aAAa;AACnC,wBAAgB,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;AACnD,uBAAe,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;;AAEtD,qBAAe,CAAE,iBAAiB,UAAS,iBAAiB;AAC5D,UAAI;AACA,eAAO,oBAAoB,UAAS,gBAAgB;;AAExD,UAAI;AACA,eAAO,kBAAkB,QAAO,cAAc;;AAElD,aAAO;;AAEX;AACI,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,UAAI,SAAS,KAAK,SAAS;AACvB,eAAO;;AAEX,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,mBAAmB,kBAAkB,KACnE,KAAK,IAAI,mBAAmB,kBAAkB;AAClD,aAAO,mBAAoB,SAAQ,QAAQ;;AAM/C;AACI,qBAAe,KAAK,IAAI,SAAQ,MAAM;AACtC,aAAO,OAAO,eAAe,SAAS;;AAE1C;AAKI,aAAQ,GAAG,QAAQ,GAAG,SAChB,GAAG,UAAU,GAAG,SAAW,GAAG,WAAW,GAAG;;ACrJtD;;;;;;;;;;;;;;;;AAuCA,wFAAoF,sBAAsB,OAAO;AAC7G,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc;AACxF,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,mCAA+B;AC7DtC;;;;;;;;;;;;;;;;AAkDA,sFAAkF,sBAAsB,OAAO,kCAAkC;AAC7I,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc,CAAE,eAAe,cAAc,gBAAgB;AAC7D,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,gBAAgB,OAAO;;AAEzD,uCAAmC,IAAG,CAAE;AC/D/C;;;;;;;;;;;;;;;;AAgDA,iGAA6F,sBAAsB,OAAO,kCAAkC;AACxJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACxG,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,4CAAwC;ACvE/C;;;;;;;;;;;;;;;;AA4CA,mFAA+E,sBAAsB,OAAO,wCAAwC;AAChJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc;QACV,eAAe;QACf,cAAc;QACd,gBAAgB;QAChB;;AAEJ,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,cAAc,OAAO;;AAEvD,oCAAgC,IAAG,CAAE;AC7D5C;;;;;;;;;;;;;;;;AA0CA,8FAA0F,sBAAsB,OAAO,wCAAwC;AAC3J,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,sCAAgC,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AAI1E,kBAAY,wBAAwB,WAAW,YAAY,gBAAgB,eAAe,iBAAiB;AAC3G,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,yCAAqC;AC9D5C;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,gEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,6DAC9B;AACP,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,eAAe,aAAa,WAAW,UAAU;;AAEpE,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAuB,IAAG,CAAE;AC7DnC;;;;;;;;;;;;;;;;AAoCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,uEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,oEAC9B;AACP,cAAY,QAAQ,UAAU,aAAa,QAAQ,UAAU,SAAS,MAAM;AAC5E,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,sBAAsB,aAAa,WAAW,UAAU;;AAE3E,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,uBAAuB;AAC9F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,kCAA8B,IAAG,CAAE;AC9D1C;;;;;;;;;;;;;;;;AAoEA;AACI,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAO,GAAG,QAAQ,GAAG,MAAM,4CAA4C,GAAG;AAC1E,oBAAc,GAAG;AACjB,qBAAe,GAAG,MAAM,MAAM;AAC9B,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,0DACY;;AAEzD,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,6DACe;;AAE5D,UAAI,WAAW;AACX,mBAAW;;AAEf,UAAI,WAAW;AACX,mBAAW;;AAEf,gBAAU,SAAQ,MAAM,GAAG,GAAG,GAAG,UAAU,CAAC,IAAI;AAChD,gBAAU,MAAM,GAAG,GAAG,GAAG;AACzB,iBAAW,IAAI,GAAG;AAClB,qBAAe,WAAW,UAAU,IAAI,QAAO,CAAC,UAAU,WAAW,aAAa,IAAI,QAAO,CAAC,UAAU;AACxG,mBAAa,OAAM,CAAC,GAAG,IAAI,GAAG;AAC9B,aAAO,SAAQ,MAAM,QAAQ,SAAQ,IAAI,CAAC,IAAI,GAAG,KAC5C,IAAI,SAAO,MAAM,QAAQ,KAAK,SAAS;;AAEzC,qBAAiB,IAAG,CAAE;ACjG7B;;;;;;;;;;;;;;;;AAuDA;AACI;AACA,UAAI,MAAM,QAAQ;AACd,0BAAkB;AAClB,gBAAO,MAAM,QAAQ,GAAG,SAAS,GAAG,MAAM;AAE1C,oBAAY,GAAG,GAAG,MAAM;AACxB,qBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,kBAAO,GAAG,GAAG,MAAM,OAAO,KAAK,MAAM,iEAC7B,GAAG,GAAG,MAAM,UAAU;;;AAIlC,0BAAkB;AAClB,aAAK,OAAM,IAAI,GAAG,MAAM,IAAI,GAAG,IAAI,OAAK,QAAQ,GAAG,CAAC;;AAExD,cAAO,GAAG,UAAU,GAAG,GAAG,MAAM,IAAI,MAAM,oCAAoC,GAAG,yCACpD,GAAG,GAAG,MAAM;AACzC,iBAAW;AACX,mBAAa;AACb,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,WAAG,KAAK,QAAO,KAAK;AAChB,kBAAQ,KAAK;AACb,cAAI,IAAI;AACJ,yBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,2BAAa,KAAI,MAAI,KAAI,GAAG,IAAI,KAAK,GAAG;AACxC,kBAAI,IAAI,GAAG;;;AAGnB,iBAAO,IAAI,GAAG,KAAK,GAAG;;;AAG9B,UAAI;AACA,eAAO,MAAM,IAAI;;AAGjB,eAAO;;;AAGR,wBAAoB,IAAG,CAAE;AC9FhC;;;;;;;;;;;;;;;;AAgFA,mCAA+B;AAC3B,cAAO,EAAE,QAAQ,GAAG,MAAM,gEAAgE,EAAE;AAC5F,UAAI,EAAE,SAAS;AACX,eAAO,KAAK,GAAG;;AAOf,8BAAsB,EAAE,MAAM,MAAM,GAAG,EAAE,MAAM,SAAS,GACnD,OAAO,iBAAiB,QAAQ;AACrC,qBAAa,QAAQ,SAAQ,GAAG;UAC5B;UAAe,EAAE,MAAM,EAAE,MAAM,SAAS;UACxC,EAAE,MAAM,EAAE,MAAM,SAAS;YACzB;AACJ,qBAAa;AACb,qBAAa;AACb,aAAK,QAAQ;AACT,6BAAmB,KAAK,KAAK;AAC7B,eAAK,KAAK;AACV,eAAK,KAAK;;AAEd,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,eAAO,CAAC,GAAG;;;AAGnB,oCAAgC;AAC5B,aAAO,QAAO,KAAK;AACf,gBAAO,EAAE,MAAM,WAAW,GAAG,MAAM,0CAA0C,EAAE,MAAM;AACrF,kBAAU,EAAE,MAAM;AAClB,kBAAU,EAAE,MAAM;AAClB,gBAAQ,IAAI;AACZ,gBAAQ,MAAM;AACd,sBAAc,SAAS,CAAC,CAAC,KAAK,CAAC,GAAG;AAClC,gBAAQ,MAAM;AACd,sBAAc,KAAK,IAAI,IAAI;AAC3B,qBAAa,GAAG,IAAI,OAAO,EAAE;AAGzB,wBAAc;AACd,wBAAc;AACd,wBAAc;AACd,WAAC,GAAG,GAAG,KAAK,QAAO,KAAK;AAEpB,2BAAe,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AACxC,0BAAc,KAAK;AACnB,wBAAY,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AAEjC,sBAAU,MAAM,QAAQ,KAAK,IAAI,SAAS,CAAC,CAAC,OAAO,SAAS,CAAC,CAAC;AAC9D,uBAAW,IAAI,KAAK,KAAI,GAAG;AAC3B,yBAAa,IAAI,QAAQ;AACzB,gBAAI,KAAK,MAAM,OAAO;AAClB,kBAAI,MAAM;;AAGV,kBAAI,QAAO;gBACP;gBACA,OAAM,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,MAAM,KAAK,GAAG,KAAK,MAAM;iBACpD;;AAEP,wBAAY,IAAI,IAAI,OAAO,GAAG,KAAK;AAEnC,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AAC1C,8BAAkB,KAAI,KAAK;AAC3B,uBAAW,WAAU;AACrB,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;;AAG/C,gCAAkB,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;AAC7D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,+BAAmB,WAAU;AAC7B,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,MAAM,KAAK;AACnD,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;;AAG9C,gCAAkB,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;AAC5D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,mBAAO,CAAC,GAAG,GAAG;;AAElB,kBAAQ,CAAC,OAAO,OAAO;;AAE3B,YAAI,CAAC,gBAAgB,IAAI;AACrB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AACzB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE7B,eAAO,CAAC,GAAG;;;AAGZ,eAAW,IAAG,CAAE;AC9KvB;;;;;;;;;;;;;;;;AAiBC,IAAA;AACG,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,SAAS,KAAK;AAClC,gBAAU,UAAU,4BAA4B,KAAK;OACtD,SAAA,aAAc,UAAA,YAAY;ACA7B,iEAA2D,SAAA,UAAU;AACjE,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,2BAAsB,YAAY,OAAQ,UAAU,KAAI,SAAS;AACjE,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO;;AAEX,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO,MAAI;;AAEf,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,KAAK;;AAGZ,kCAAwB,QAAQ,OAAO,SAAS;AAChD,yBAAe,IAAI,MAAI,eAAe,MAAI;AAC1C,iBAAO,kBAAkB,IAAI,IAAI,QAAQ,QAAO,oBAC5C;;;AAGZ,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,IAAI,MAAI,eAAe,QAAO,QAAQ;;AAG7C,qCAA2B,KAAI,UAAU,OAAK,QAAQ;AACtD,8BAAoB,MAAK,MAAI,SAAS,oBAAoB,QAAO,MAAM;AACvE,iBAAO,IAAI,MAAI,eAAe;;;AAGtC,YAAM,MAAM,sBAAsB;;AAE/B,gCAA4B,IAAG,CAAE;AC1DxC;;;;;;;;;;;;;;;;AAsCA,4EAAuE,SAAA,UAAU;AAC7E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,IAAI,IAAI,SAAS;AAChC,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,+BAA2B,IAAG,CAAE;ACxBvC,8EAAyE,SAAA,UAAU;AAC/E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,sBAAe,IAAI,KAAK,MAAI,KAAI,SAAS,eAAe,MAAM;AAC9D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,2BAAuB,IAAG,CAAE;ACbnC,mEAA8D,SAAA,UAAU;AACpE,oBAAc,iBAAgB,QAAQ,UAAU;AAChD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AAEnB,gBAAU,IAAI,KAAI,QAAO,IAAI,UAAU;AACvC,sBAAe,MAAK,IAAI,KAAK,KAAI,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AA4CA,8DAA0D,gBAAiB,SAAA,UAAU;AACjF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,0BAAoB,QAAO;AAC3B,oBAAc,IAAI,IAAI,cAAc;AACpC,wBAAkB,QAAQ,OAAO;AACjC,qBAAe,IAAI,OAAO;AAC1B,sBAAe,MAAI,KAAI,QAAO,MAAM,OAAO,aAAa,KAAI,aAAa;AACzE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;AC3D9B;;;;;;;;;;;;;;;;AA2CA,+DAA0D,mBAAkB,SAAA,UAAU;AAClF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,4BAAsB,QAAO;AAC7B,kBAAW,IAAI,KAAI,SAAS,MAAI,MAAI,cAAc;AAClD,kBAAW,KAAI,IAAI,KAAK,UAAU,MAAI,MAAI,IAAI,KAAK,eAAe;AAClE,sBAAe,IAAI,KAAI;AACvB,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,oBAAgB,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA,0EAAqE,SAAA,UAAU;AAC3E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,kBAAkB,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,6BAAyB,IAAG,CAAE;AChDrC;;;;;;;;;;;;;;;;AA8BA;AACI,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,yBAAkB,QAAQ,OAAO,QAAQ,OAAO;AAqBhD,wBAAkB,MAAK;AACvB,4BAAsB,KAAI,SAAS;AACnC,4BAAsB,MAAM,IAAI,IAAI,IAAI;AACxC,aAAO,MAAI,IAAI,WAAW,gBAAgB;;AAuB9C,sFAAkF,gBAAe,SAAA,UAAU;AACvG,8BAAwB,iBAAgB,kBAAkB,oBAAoB;AAC9E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,kBAAkB,OAAO,QAAQ,OAAO;AAC1D,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,qBAAa,QAAO;AACpB,4BACI,MAAI,KAAI,mBAAmB,IAAI,KAAK,wBAAwB,KAAI,MAAM;;AAE9E,sBAAe,+BAA+B,mBAAmB;AACjE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AClGxC;;;;;;;;;;;;;;;;AA0DA,kEAA8D;AAC1D,UAAI,QAAQ;AACR,cAAM,OAAO,OAAO;;AAExB,UAAI,QAAQ,OAAO,OAAO;AACtB,cAAM,MAAM,mGAC+B,OAAO,oBAC/B;;AAGvB,uBAAiB,WAAW;AAIxB,yBAAiB;AACjB,oBAAY,UAAU,SAAQ,CAAC,MAAM;AACrC,0BAAkB,IAAI,MAAK,SAAQ,YAAY;AAC/C,aAAK,CAAC,SAAQ;AACd,2BAAmB,IAAI,KAAI,WAAW;AACtC,sBAAc,MAAI,YAAY,CAAC;AAC/B,yBAAiB;AACb,wCAA4B;AAC5B,0BAAgB,sBAAqB,GAAG,OAAO,CAAC;AAChD,iBAAO;YACH,KAAI,SAAQ,IAAI,UAAU,IAAI,MAAK,SAAQ,YAAY,IAAI;YAC3D,KAAI,SAAQ,IAAI,UAAU,IAAI,IAAI,aAAY,MAAK,SAAQ;;;AAGnE,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS,QAAQ;;AAqB5B,kFAA8E,gBAAe,SAAA,UAAU;AACnG,0BAAoB,iBAAgB,cAAc,gBAAgB;AAClE,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,cAAc,OAAO,QAAQ,OAAO;AACtD,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,2BAAmB,QAAO,cAAc,MAAM;AAC9C,wBACI,MAAI,KAAI,eAAe,IAAI,KAAK,wBAAwB,IAAI,sBAAsB;;AAE1F,sBAAe,+BAA+B,eAAe;AAC7D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AC/HxC;;;;;;;;;;;;;;;;AAoNK,qBAAY;MACb;MACA;MACA;MACA;;AAOC,mBAAU;MACX;MACA;MACA;MACA;;AAcC,mBAAS;MACV,eAAA;MACA;MACA,gBAAA;MACA,kBAAA;MACA,eAAA;MACA;MACA;MACA;MACA;MACA;MACA;;AAMC,mBAAU;MACX;MACA;MACA;;AAYC,mBAAU;MACX;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AC1RJ;;;;;;;;;;;;;;;;4BAqB+B;MAe3B,yBAAyB;AACrB,eAAQ,OAAO,iBAAU,KAAK,iBAAiB,GAAG;AAClD,YAAI,WAAW;AACX,4BAAkB,QAAQ,IAAI,OAAM,EAAE,MAAM,EAAE,MAAM,QAAQ,OAAM,EAAE;AACpE,eAAK,eAAe;;AAGpB,eAAK,eAAe;;AAGxB,gBAAQ;AACR,YAAI;AACA,iBAAO;;AAGP,gBAAM;AACN,iBAAO;;;UAMX;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO,KAAK;;MAEhB;AACI,aAAK,cAAc,KAAK,aAAa;;MAezC;AACI,eAAO,cAAc,GAAG;;MAK5B;AACI,YAAI,KAAK,eAAe;AACpB,kBAAQ,KAAK;;;YAGf;AACF,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO;UACH,MAAM;UAEN,QAAQ,QAAO,KAAK,aAAa;;;YAGnC;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM,4DACT,KAAK;;YASV;AACF,aAAK,cAAe,OAAM,aAAa,GAAG,OAAO,QAAQ;AACzD,eAAO,aAAa,MAAM;;;AAGlC,WAAO,eAAe,WAAW,OAAO,aAAa;MACjD,OAAO;AACH,eAAO,UAAS,YAAY,QAAQ,UAAS,oBAAoB,QAC7D,UAAS,kBAAkB;;;AC3HvC;;;;;;;;;;;;;;;;oCA2BuC;MACnC,0CAAyC;AACrC;AACA,aAAK,eAAe;AACpB,aAAK,MAAM;AACX,aAAK,UAAU;AACf,aAAK,mBAAmB;AACxB,aAAK,qBAAqB;AAC1B,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,iBAAiB,MAAM;AAC5B,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,oCAA0B,KAAK,mBAAmB,GAAG;AACrD,eAAK;AACD,uCAA2B,MAAI,KAAI,iBAAiB,KAAK,MAAM,KAAI,OAAO,WAAW,IAAI,KAAK;AAC9F,4BAAgB,KAAI,IAAI,KAAK,MAAI,mBAAmB,KAAK,WAAW,KAAK,MAAI,iBAAiB,KAAK,YAAY;AAC/G,yCAA6B,MAAI,KAAI,mBAAmB,KAAK,MAAM,KAAI,OAAO,UAAU,IAAI,KAAK;AACjG,4BAAgB,OAAO;AACvB,8BAAkB,OAAO;AACzB,6BAAiB,MAAI,KAAI,SAAS,CAAC,KAAK,eAAe;AACvD,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;AACzC,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,kBAAkB,GAAG,KAAK;AACrD,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,mBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,KAAO,KAAK;UACZ,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,QAAQ,QAAO;;;AAIrE,sBAAkB,YAAY;AAC9B,kBAAc;ACvHd;;;;;;;;;;;;;;;;mCA2BsC;MAClC,oDAAoD;AAChD;AACA,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,mBAAmB;;MAE5B;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,iBAAiB,MAAM;AAC5B,8BAAkB;AAClB,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,MAAK,MAAM,OAAO,KAAK,yBACvC,SAAS;;;AAGtB,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,eAAK;AACD,uCAA2B,MAAI,iBAAiB,OAAO;AACvD,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,UAAU,KAAK,MAAI,oBAAoB,QAAO,QAAQ,cAAc,CAAC,KAAK,eAAe;AACtH,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,oBAAoB;AACzB,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;;;YAG3C;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,iBAAiB,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAE5G;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,mBAAmB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEvG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,yBAA2B,KAAK;;;aAIjC;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO;;;AAItD,qBAAiB,YAAY;AAC7B,kBAAc;AC3Fd;;;;;;;;;;;;;;;;gCA6BmC;MAC/B,mDAAkD;AAC9C;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,0BAA0B;AAC/B,aAAK;AAED,eAAK,WAAW,QAAO,OAAO;AAC9B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,mCAAyB,IAAI,GAAG,KAAK;AACrC,mBAAS,QAAQ;AACb,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,gBAAI,KAAK,wBAAwB,MAAM;AACnC,mBAAK,wBAAwB,KAAK;gBAC9B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,iCAAqB,KAAK,wBAAwB,GAAG;AACrD,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,oCAAwB,MAAI,KAAI,cAAc,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC1F,6CAAiC,IAAI,gBAAgB;AACrD,8CAAkC,IAAI,iBAAiB;AACvD,wBAAY,OAAO;AACnB,yBAAa,OAAO;AACpB,6BAAiB,MAAI,KAAI,IAAI,0BAA0B,MAAI,KAAK,4BAA4B,KAAK,WAAW,CAAC,KAAK,eAAe;AACjI,kBAAM,OAAO;;AAEjB,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;AAC7C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,SAAS;AACd,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,2BAA2B;AAChC,kBAAQ,KAAK,wBAAwB,IAAI,OAAK,EAAE;;;YAGlD;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,aAAK;AACD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;AACxD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;;AAE5D,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,0BACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO;;;AAIxF,kBAAc,YAAY;AAC1B,kBAAc;AC7Id;;;;;;;;;;;;;;;;kCA4BqC;MACjC,mDAAkD,cAAc;AAC5D;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,yBAAyB;AAC9B,aAAK,6BAA6B;AAClC,aAAK;AACD,eAAK,YAAY,QAAO,GAAG;AAC3B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,qBAAW,IAAI,CAAC,KAAK,cAAc,MAAI,KAAI,KAAK,WAAW,KAAK,QAAQ;AACxE,wBAAc,QAAQ;AAClB,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,gBAAI,KAAK,2BAA2B,MAAM;AACtC,mBAAK,2BAA2B,KAAK;gBACjC,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,oCAAwB,KAAK,2BAA2B,GAAG;AAC3D,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,wBAAY,KAAI,iBAAiB,KAAK;AACtC,wBAAY,IAAI;AAChB,uCAA2B,QAAQ,KAAK;AACxC,wBAAY,OAAO;AACnB,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,IAAI,mBAAmB,IAAI,gBAAgB,MAAI,oBAAoB,KAAK,YAAY;AACjH,kBAAM,OAAO;;AAEjB,eAAK,UAAU,OAAO,MAAI,KAAK,WAAW;AAC1C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,UAAU;AACf,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,8BAA8B;AACnC,kBAAQ,KAAK,2BAA2B,IAAI,OAAK,EAAE;;;YAGrD;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM;;MAEpB;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;UAChB,OAAS,KAAK;;;aAIf;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO,YAAY,QAAO;;;AAI3G,oBAAgB,YAAY;AAC5B,kBAAc;AC1Hd;;;;;;;;;;;;;;;;+BAwBkC;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,gBAAgB;;MAEzB;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,iBAAS,QAAQ;AACb,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wBAAc,QAAO,oBAAoB;AACzC,eAAK;AACD,6BAAiB,MAAI,KAAI,KAAK,GAAG,WAAW;AAC5C,kBAAM,OAAO;;;AAGrB,aAAK;;MAKT;AACI,aAAK,eAAe;AACpB,YAAI,KAAK,KAAK;AACV,eAAK,EAAE;;AAEX,aAAK,IAAI,KAAK,QAAO,CAAC;;MAE1B;AACI,aAAK,EAAE;;YAEL;AACF,eAAO,CAAC,MAAM,KAAK;;YAEjB;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,YAAI,aAAa,WAAW;AACxB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,CAAE,cAAgB,KAAK;;aAG3B;AACH,eAAO,IAAI,IAAI,QAAO;;;AAI9B,iBAAa,YAAY;AACzB,kBAAc;ACjFd;;;;;;;;;;;;;;;;oCAyBuC;MACnC,kDAAkD;AAC9C,cAAM;AACN,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,aAAK,IAAI,QAAO,KAAK;;MAEzB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,cAAc,MAAM;AACzB,8BAAkB;AAClB,iBAAK,cAAc,KAAK;cACpB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,+BAAqB,KAAK,cAAc,GAAG;AAC3C,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,eAAK;AACD;AACA,oCAAwB,MAAI,KAAI,KAAK,GAAG,eAAe;AACvD,gBAAI,KAAK;AACL,yBAAW,MAAI,KAAI,KAAK,GAAG,MAAI,UAAU,KAAI,iBAAiB,KAAK,MAAM;;AAGzE,yBAAW,MAAI,KAAI,KAAK,GAAG,kBAAkB;;AAEjD,yBAAa,OAAO;AACpB,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,aAAK,EAAE;AACP,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK,cAAc,IAAI,OAAK,EAAE;;;MAQ9C;AACI,aAAK,WAAW;;YAEd;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,cAAc,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEzG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,gBAAgB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEpG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,UAAY,KAAK;UACjB,aAAe,KAAK;;;aAIrB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,aAAa,QAAO;;;AAI1E,sBAAkB,YAAY;AAC9B,kBAAc;AC1Gd;;;;;;;;;;;;;;;;mCA4BsC;MAClC,kCAAkC,gBAAgB,cAAe,iBAAiB;AAC9E;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,qBAAqB;AAC1B,aAAK,uBAAuB;AAC5B,aAAK,WAAW;AAChB,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;AAElC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,uBAAuB,MAAM;AAClC,iBAAK,uBAAuB,KAAK;cAC7B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,qBAAqB,MAAM,QAAQ,KAAK;AAC7C,iBAAK,qBAAqB,KAAK;cAC3B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wCAA8B,KAAK,uBAAuB,GAAG;AAC7D,qCAA2B,KAAK,mBAAmB,GAAG;AACtD,eAAK;AACD,6CAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,gBAAI,KAAK;AACL,0CAA4B,KAAK,qBAAqB,GAAG;AAEzD,6CAA+B,MAAI,KAAI,qBAAqB,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChG,uCAAyB,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,IAAI,0BAA0B,MAAI,OAAO,yBAAyB,KAAK;AAC3I,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW;AAC1E,oCAAsB,OAAO;AAC7B,kCAAoB,OAAO;AAC3B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;AAIb,gDAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,MAAI,2BAA0B,KAAK;AACxJ,oCAAsB,OAAO;AAC7B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;;;AAIzB,aAAK;;MAET;AACI,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,wBAAwB,QAAQ,KAAK;AAC1C,kBAAQ,KAAK,qBAAqB,IAAI,OAAK,EAAE;;AAEjD,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,KAAK;;AAE3B,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,KAAK,WAAW,aAAa,SAAS,IAAI,aAAa,SAAS;AACtF,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,YAAI,KAAK;AACL,eAAK,uBACD,aAAa,MAAM,gBAAgB,GAAG,gBAAgB,GACjD,IAAI,OAAM;YACX,cAAc,EAAE;YAChB,UAAU,EAAE,OAAO,SAAS;;;;MAI5C;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,UAAY,KAAK;UACjB,SAAW,KAAK;UAChB,UAAY,KAAK;;;aAIlB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,aAAa,QAAO,YAAY,QAAO;;;AAI9G,qBAAiB,YAAY;AAC7B,kBAAc;ACrKd;;;;;;;;;;;;;;;;;aA6DW;AACH,eAAO,IAAI,aAAa;;aAiBrB,+CAA+C;AAClD,eAAO,IAAI,kBAAkB,cAAc,UAAU;;aAsBlD,8BAA8B,gBAAe,cAAe,iBAAiB;AAChF,eAAO,IAAI,iBAAiB,cAAc,OAAO,UAAU,UAAS;;aAcjE,oBAAoB,cAAe,aAAa,kBAAiB;AACpE,eAAO,IAAI,cAAc,cAAc,OAAO,OAAO;;aAclD,wBAAwB,YAAY,iBAAe;AACtD,eAAO,IAAI,kBAAkB,cAAc,KAAK;;aAe7C,sBAAsB,cAAe,aAAa,kBAAiB,cAAc;AACpF,eAAO,IAAI,gBAAgB,cAAc,OAAO,OAAO,UAAS;;aAkB7D,gDAAgD;AACnD,eAAO,IAAI,iBAAiB,cAAc;;;ACxKlD;;;;;;;;;;;;;;;;AA0BA;MAAC;MAAmB;MAAc;MAAmB;MACjD;MAAkB;MAAiB;;AAC3B,kBAAS;MACjB,KAAK,sBAAsB;MAC3B,UAAU,sBAAsB;MAChC,UAAU,sBAAsB;MAChC,SAAS,sBAAsB;MAC/B,SAAS,sBAAsB;MAC/B,QAAQ,sBAAsB;MAC9B,MAAM,sBAAsB;;ACnChC;;;;;;;;;;;;;;;;AAgBA,0BAAuB;AACnB,UAAI,OAAO,0BAA0B;AACjC,eAAO;iBAEF,OAAO,iBAAiB;AAC7B,eAAO;;AAEX,aAAO,OAAO;;AAYlB;AACI,aAAO,IAAI,QAAQ,aAAW,cAAc,MAAM;;ACpCtD;;;;;;;;;;;;;;;;AAiBO;AACH,sBAAgB,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC3E,sBAAgB,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAC5E,aAAO,CAAC,SAAS;;ACpBrB;;;;;;;;;;;;;;;;AAwBO,wEAAkE;AACrE,qBAAe;AACf,UAAI;AACA,mBAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,iBAAS,KAAK,WAAW,KAAK;AAC9B,mBAAW,SAAS,OAAO,WAAW,MAAM;;AAG5C,mBAAW,SAAS,OAAO,WAAW;AACtC,8BAAsB,WAAW;AACjC,qBAAa,GAAG,IAAI,eAAe,EAAE;AACjC,qBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAEvE,mBAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAEhE,aAAO;;AAWJ,uEAAkE;AACrE,uBAAiB;AACjB,UAAI;AACA,iBAAS,KAAK;AACd,qBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACjD,cAAI,KAAK,IAAI;AACT,qBAAS,KAAK;AACd,qBAAS,KAAK,IAAK,kBAAiB;;AAGpC,qBAAS,KAAK;;;;AAKtB,oCAA4B;AAC5B,mCAA2B;AAC3B,qBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,cAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AACzC,+BAAmB,KAAK;;AAGxB,gCAAoB,KAAK;;;AAGjC,iBAAS,KAAK,GAAG;AACjB,iBAAS,KAAK;AACd,iBAAS,KAAK,GAAG;;AAErB,aAAO;;AAWJ,gFAA0E;AAC7E,+BAAyB;AACzB,UAAI;AACA,yBAAiB,KAAK,WAAW,KAAK;;AAGtC,yBAAiB,KAAK,WAAW,KAAK;;AAE1C,mBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,YAAI,KAAK,WAAW;AAChB,cAAI;AACA,6BAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAGrD,6BAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAIzD,2BAAiB,KAAK,WAAW;;;AAGzC,aAAO;;AAMJ;AACH,+BAAyB,CAAC;AAC1B,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,yBAAiB,KAAK,MAAM,GAAG;;AAEnC,aAAO;;AAaJ;AACH,wBAAkB,eAAe,MAAM,GAAG;AAC1C,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,kBAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAElE,aAAO;;AC7IX;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,wBAAmB;ACjB1B;;;;;;;;;;;;;;;;AAgBO,mBAAc;AACd,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;ACrBtB;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,KAAK,GAAG;;;AAGjB;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,IAAI,GAAG;;;ACxBvB;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,MAAK,WAAW,MAAK;AACrB,cAAM,IAAI,MAAM,gEACT,MAAK,iBAAiB,MAAK;;AAEtC,qBAAe,IAAI,aAAa,MAAK,SAAS;AAC9C,mBAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACpC,eAAO,KAAK,MAAK,IAAI;AACrB,eAAO,IAAI,KAAK,MAAK,IAAI;;AAE7B,aAAO;;AAgBJ;AACH,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,IAAI,KAAK,SAAQ;AACtB,cAAK,IAAI,KAAK,SAAQ,IAAI;;AAE9B,aAAO,CAAE,MAAA,OAAM,MAAA;;AAMZ;AACH,kBAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,MAAA,OAAM,MAAA;;AAMZ;AACH,kBAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,MAAA,OAAM,MAAA;;AAOZ;AACH,oBAAa,SAAQ,SAAQ;AAC7B,oBAAa,SAAQ,SAAQ,IAAI;AACjC,aAAO,CAAE,MAAA,OAAM,MAAA;;AAQZ;AACH,YAAK,SAAQ,KAAK;AAClB,YAAK,SAAQ,IAAI,KAAK;;AAKnB;AACH,oBAAa,IAAI,aAAa,IAAI;AAClC,oBAAa,IAAI,aAAa,IAAI;AAClC,mBAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AAClC,kBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,cAAK,KAAK,KAAK,IAAI;AACnB,cAAK,KAAK,KAAK,IAAI;;AAEvB,aAAO,CAAE,MAAA,OAAM,MAAA;;AAKZ;AACH,gBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,oBAAa,KAAK,IAAI;AACtB,oBAAa,KAAK,IAAI;AACtB,aAAO,CAAE,MAAA,OAAM,MAAA;;ACrInB;;;;;;;;;;;;;;;;AA4CO;AACH,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,EAAE;;AAEb,4BAAoB,OAAM,EAAE;AAC5B,uBAAe,MAAK,GAAG;AACvB,uBAAe,SAAQ,QAAQ,QAAQ;AACvC,oBAAY;AACZ,eAAO;AACP,eAAO;;AAEX,UAAI,CAAC,iBAAgB,EAAE,OAAO;AAG1B,eAAO,QAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAE1D,UAAI,EAAE,UAAU;AACZ,sBAAa,SAAQ,KAAK;AAC1B,uBAAe,MAAK,OAAM;AAC1B,cAAK;AACL,eAAO;;AAEX,UAAI,UAAU;AACV,eAAO,SAAQ,IAAI;iBAEd,UAAU;AACf,qBAAa,QAAO,GAAG,EAAE;AACzB,uBAAe,SAAQ,SAAS,GAAG;AACnC,aAAK;AACL,eAAO;;AAGP,cAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAGhE;AACH,aAAO,QAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAEnD;AACH,oBAAc,QAAO,SAAU,OAAM;AACrC,qBAAe,qBAAoB,KAAK;AACxC,aAAO,KAAK;AACZ,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,eAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,aAAO,UAAS,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1F5B;;;;;;;;;;;;;;;;AAqBO;AACH,oBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,mBAAa,EAAE,MAAM;AACrB,aAAO,WAAW,IAAI;AAClB,0BAAkB,CAAC,GAAG;AACtB,kBAAU,QAAQ;AAClB,uBAAe,OAAM,GAAG,OAAO;AAC/B,cAAM,SAAS;AACf,eAAO;;;AC7Bf;;;;;;;;;;;;;;;;AAqBO;AACH,uBAAiB,IAAI,MAAM,KAAK;AAChC,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,KAAK,MAAM,KAAK,KAAK;;AAEvC,qBAAe,QAAO,UAAU,KAAK;AACrC,mBAAa,GAAG,IAAI,OAAO,OAAO,QAAQ,EAAE;AACxC,uBAAe,OAAO,WAAW;AACjC,4BAAoB,IAAI,MAAM,KAAK;AACnC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK,MAAM;;AAE5C,8BAAsB,KAAK,WAAW;AACtC,eAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,aAAO,OAAO;;ACpClB;;;;;;;;;;;;;;;;AAmBO;AAEH,sBAAgB,OAAO,OAAO,SAAS;AACvC,4BAAsB,CAAC,EAAE,SAAS,SAAS;AAC3C,0BAAoB,wBAAuB,QAAQ,QAAQ;AAC3D,6BAAuB,wBAAuB,SAAS,QAAQ;AAC/D,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,EAAE,SAAS,QAAQ,SAAS;AACzC,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,0BAAkB,IAAI;AACtB,yBAAiB,YAAY,SAAS,WAAW,YAAY;AAC7D,4BAAoB,eAAe,SAAS,WAAW,YAAY;AACnE,qBAAa,GAAG,IAAI,GAAG;AACnB,mBAAS,KAAK,UAAU,GAAG;AAC3B,sBAAY,KAAK,UAAU,GAAG;;;AAKtC,0BAAoB,OAAO;AAC3B,kBAAY,YAAY,SAAS,KAAK;AACtC,aAAO;QACH,QAAO,aAAa,aAAa;QACjC,QAAO,gBAAgB,aAAa;;;AC/C5C;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,GAAG,YAAY;;;ACzB3D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,OAAO,MAAK,GAAG;AACzB,sBAAU,KAAK,IAAI,QAAO,IAAI;AAC9B,mBAAO,IAAI,IAAI,IAAI;;;;;ACjCnC;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,IAAI,OAAO,MAAK,GAAG,aAAa;AAC/C,mBAAO,IAAI,IAAI;;;;;AC9B/B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC1C7B;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,qBAAa;AACb,cAAM,QAAQ;AACV,eAAK,KAAK,MAAM,GAAG;;AAEvB,eAAO;;;ACzBf;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAuBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAK,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC5BrE;;;;;;;;;;;;;;;;AAuBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,MAAI,QAAO,IAAI,OAAO,MAAK,GAAG;AAC7C,mBAAO,IAAI,IAAI;;;;;AC/B/B;;;;;;;;;;;;;;;;AAyBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,KAAI,IAAI,IAAI,GAAG;AACzB,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,IAAI,KAAI,IAAI,IAAI,GAAG;AAC7B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACjD7B;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,OAAO,MAAK,GAAG,aAAa;;;AC1BlE;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC3BhE;;;;;;;;;;;;;;;;AAoDA,6EAAwE,CAAC,GAAG,GAAG;AAC3E,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,iBAAW;AACX,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;;AAG3E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS;;AAEpD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS,WAAW,KAAA,MAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;ACvFtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC1BpF;;;;;;;;;;;;;;;;AAuCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAAoC,IAAI;AAC7G,oBAAc;AACd,iBAAW;AACX,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,kBACI,SAAQ,QAAQ,CAAC,GAAG,OAAO,MAAM,IAAI,OAAO,MAAM,IAAI,OAAO,MAAM;AACvE,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAElE,cAAY,KAAK,SAAS,GAAG,MAAM,4DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,+DAC/B,QAAQ;AACf,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,GAAmB;AACpG,eAAO,SAAQ,gBAAgB,MAAM,SAAS;;AAElD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS,KAAA;AACrC,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;AACzE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAwB,IAAG,CAAE;ACpEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,YAAY,SAAS;;;;ACzBjE;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,cAAe;AACnC,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;;mBAG5B,CAAC,cAAc;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;mBAG5B,cAAc,CAAC;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;;;AAIlC,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;YAC7B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;;;;AC7C7C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,SAAU;AAC9B,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,UAAU;AACN,iCAAyB;AACzB,2BAAmB,iBAAiB;AACpC,4BAAoB,iBAAiB;AACrC,qBAAa,MAAM,KAAK;AACxB,qBAAa,WAAW,SAAS,GAAG,KAAK,GAAG;AACxC,cAAI,WAAW,OAAO,YAAY;AAC9B,iBAAK,KAAK;qBAEL,WAAW,OAAO;AACvB,kBAAM,IAAI,MAAM,mBAAmB,uCAAuC;;;AAGlF,qBAAa;AACb,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,cAAI,KAAK,KAAK;AACV,iBAAK,KAAK;;;AAGlB,eAAO,CAAE,GAAG,MAAM,MAAI,IAAI,MAAM;;;ACvCxC;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACpB7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAEN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACtBpC;;;;;;;;;;;;;;;;AAsBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,cAAc,gBAAiB;AACvC,eAAO;UACH,GAAG,MAAM,MAAM,WAAW,aAAa,GAAG,eAAe,UAAU,GAAG,gBAAgB,IAAI,WAAU;;;;AC7BhH;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,uBAAe,MAAM,IAAI,OAAK,EAAE;AAChC,eAAQ,QAAS;AACjB,sBAAc,gBAAe,MAAM,MAAM,GAAG,OAAO;AACnD,2BAAmB,OAAO,IAAI,OAAK,EAAE;AACrC,2BAAmB,OAAM,IAAI,YAAY;AACzC,eAAO,WAAW,IAAI,OAAK,MAAM;;;AC5BzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,+BAAuB;AACvB,eAAQ,WAAW,SAAS,WAAK,cAAe;AAChD,gBAAY,mBAA4B,YAAY,MAAM,iHACA;AAC1D,eAAO;UACH,GAAG,MAAM,qBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS,MAAK;UACnE,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS,MAAK;;;;AC/BrF;;;;;;;;;;;;;;;;AAmBO,0CAAsC;MACzC,YAAY;MACZ,cAAc,CAAC,MAAM;MACrB,UAAU;AACN,6BAAqB;AACrB,eAAQ,SAAS,WAAK,YAAY,mBAAoB;AACtD,eAAO;UACH,IAAI,MAAM,QAAO,KAAK,QAAQ,SAAS,MAAK,YAAY,GAAmB;UAC3E,QAAQ,MAAM,qBAAqB,KAAK,IAAI,OAAO,OAAO,SAAS,MAAK,YAAY;;;;AC3BhG;;;;;;;;;;;;;;;;AAsCA;AACI,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAErE,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE3E,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,cAAY,IAAI,MAAM,OAAO,YAAY,IAAI,MAAM,4CAA4C,IAAI,MAAM,yCACrE,YAAY;AAChD,cAAY,KAAK,MAAM,OAAO,YAAY,IAAI,MAAM,0CAA0C,KAAK,MAAM,2CACnE,YAAY;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,KAAA,MAAK;AAC9B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,wBAAwB;;AAExE,iCAA6B,IAAG,CAAE;AClEzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,aAAQ;AACpC,gBAAY,mBAAkB,YAAY,MAAM,iHACM;AACtD,+BAAuB;AACvB,eAAO;UACH,GAAG,MAAM,oBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS;UAC9D,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS;;;;AC/BhF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI,MAAK,GAAG,cAAc;;;AC1B5D;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAoBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,MAAM,WAAW,qBAAY;AACrC,eAAO;UACH,GAAG;AACC,gCAAoB,oBAAmB,CAAC,OAAO,EAAE;AACjD,sBAAU,QAAO,IAAI,MAAM,WAAW,CAAC;AACvC,gBAAI,eAAe;AACf,oBAAM,WAAU,KAAK;;AAEzB,mBAAO;;;;;ACjCvB;;;;;;;;;;;;;;;;AAqBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,WAAK,mBAAoB;AACrD,2BAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAChD,gBAAY,mBAA4B,aAAa,MAAM,mHAEnD;AACR,4BAAoB;AACpB,gBAAY,EAAE,SAAS,GAAG,MAAM,kFACJ,EAAE;AAC9B,gBAAY,OAAO,SAAS,GAAG,MAAM,mFACT,OAAO;AACnC,gBAAY,EAAE,MAAM,OAAO,OAAO,MAAM,IAAI,MAAM,mEACjC,EAAE,MAAM,qDACR,OAAO,MAAM;AAC9B,gBAAY,gCAAyC,SAAS,aAAa,MAAM,6FACxC,0BACjC;AACR,YAAI,mBAAmB;AACnB,kBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,eAAO;UACH,GAAG,MAAM,mCAAmC,EAAE,OAAO,IAAI,QAAQ,SAAS,MAAK,WAAW;UAC1F,QAAQ,MAAM,oCAAoC,GAAG,IAAI,OAAO,OAAO,SAAS,MAAK,WAAW;;;;AC/C5G;;;;;;;;;;;;;;;;AAkBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,4BAAoB;AACpB,4BAAoB,CAAE,GAAG,QAAQ;AACjC,6BAAqB,CAAE,GAAG,QAAQ;AAClC,eAAO;UACH,GAAG,MAAM,QAAO,UAAU,yBAAyB,aAAa;UAChE,QAAQ,MAAM,QAAO,UAAU,0BAA0B,cAAc;;;;AC3BnF;;;;;;;;;;;;;;;;AAyBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,mCAA2B;AACvB,iBAAO,SAAQ,OAAO,IAAI;;AAE9B,uBAAe,CAAE,IAAI;AACrB,eAAO;UACH,GAAG,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAiB;;;;AC5BvF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,kBAAU,KAAI,IAAI,IAAI,OAAO,MAAM,IAAI,KAAK,KAAK,KAAK;AACtD,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;AC3BlC;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACvBlC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAyBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AA0BO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC,KAAK,QAAQ,YAAY;MACxC,UAAU;AACN,eAAQ,mBAAoB;AAC5B,8CAAmC;AACnC,2BAAmB,UAAS,OAAO,QAAO,KAAK;AAC/C,8BAAsB,kBAAiB,MAAK,OAAO,EAAE;AACrD,0BAAkB;AAClB,YAAI,MAAK,SAAS;AACd,uBAAa,GAAG,IAAI,EAAE,MAAM,SAAS,GAAG,EAAE;AACtC,sBAAU,KAAK,EAAE,MAAM;;AAE3B,oBAAU,KAAK;;AAEnB,2BAAmB,IAAI,GAAG;AAC1B,kCAA0B,KAAI,IAAI;AAClC,oCAA4B,MAAM,MAAI,WAAU,QAAO;AACvD,+BAAuB,KAAI,KAAI,KAAI,qBAAqB,sBAAsB,sBAAsB,QAAO;AAC3G,qBAAa;AACT,cAAI,MAAK,SAAS;AACd,mBAAO,SAAQ,KAAI,KAAI,IAAI,MAAK,SAAQ,qBAAqB,CAAC,GAAG,GAAG,GAAG,MAAK,MAAM,MAAM,aAAa,aAAa,EAAE;;AAGpH,mBAAO,SAAQ,KAAI,KAAI,IAAI,sBAAsB,aAAa,EAAE;;;AAGxE,wBAAgB;AACZ,wBAAc,KAAI,KAAI,qBAAqB,QAAO,MAAM;AACxD,cAAI,MAAK,SAAS;AACd,sBAAU,MAAI,SAAS;;AAE3B,iBAAO,SAAQ,SAAS,MAAK;;AAEjC,4BAAoB;AAChB,4BAAkB,KAAI,KAAI,gBAAgB,aAAa;AACvD,cAAI,MAAK,SAAS;AACd,0BAAc,MAAI,aAAa;;AAEnC,iBAAO,SAAQ,aAAa,MAAK;;AAErC,yBAAiB;AACb,wCAA8B,KAAI,YAAY;AAC9C,yBAAe,KAAI,IAAI;AACvB,cAAI,MAAK,SAAS;AACd,uBAAW,MAAI,UAAU;;AAE7B,iBAAO,SAAQ,UAAU,MAAK;;AAElC,0BAAkB;AACd,0BAAgB;AAChB,cAAI,MAAK,SAAS;AACd,wBAAY,MAAI,WAAW;;AAE/B,iBAAO,SAAQ,WAAW,MAAK;;AAEnC,eAAO;UACH,GAAG;UACH,MAAM;UACN,UAAU;UACV,OAAO;UACP,QAAQ;;;;ACvFpB;;;;;;;;;;;;;;;;AAsBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,6BAAqB;AACrB,eAAQ,QAAS;AACjB,2BAAmB,gBAAe,MAAM,EAAE,OAAO;AACjD,qBAAa;AACT,8BAAoB,EAAE;AACtB,8BAAoB,QAAQ;AAC5B,6BAAmB,YAAY,MAAM,GAAG;AACxC,4BAAkB,WAAW;AAC7B,6BAAmB,YAAY,MAAM,MAAM,YAAY,QAAQ,MAAM;AACrE,4BAAkB,WAAW;AAC7B,mCAAyB,WAAW,GAAG;AACvC,mCAAyB,WAAW,YAAY,GAAG,YAAY,IAAI;AACnE,8BAAoB,YAAY,CAAC,YAAY,CAAC,cAAc;AAC5D,yBAAe,SAAQ,IAAI;AAC3B,kCAAwB,SAAQ,SAAS,CAAC;AAC1C,gCAAsB,YAAY,CAAC,CAAC,YAAY,kBAAkB;AAClE,kCAAwB,WAAU,QAAQ;AAC1C,2BAAiB,mBAAmB,iBAAiB,iBAAiB,EAAE,MAAM;AAC9E,sCAA4B,wBAAuB;AACnD,uBAAa,WAAU,YAAY;AACnC,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,SAAS,MAAM;;;AAGzC;AACI,qBAAe;AACf,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,eAAO,KAAK;;AAEhB,aAAO;;AAEX;AACI,qBAAe;AACf,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,iBAAO,KAAK,OAAO,GAAG;;;AAG9B,aAAO;;ACjEX;;;;;;;;;;;;;;;;AAkBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI,GAAG,MAAM,WAAU;;;ACvB3D;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,MAAK,IAAI;;;ACrBnC;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,GAAG;;;ACxBzC;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAK,GAAG;;;ACxB1C;;;;;;;;;;;;;;;;AAqBO,iCAA6B;MAChC,YAAY;MACZ,cAAc;MACd,eAAe,CAAC;MAChB,UAAU;AACN,wBAAgB;AAChB,eAAQ,QAAS;AACjB,eAAO;UACH,QAAQ;AACJ,6BAAiB;AACjB,6BAAgB,IAAI;AACpB,mBAAO,IAAI,IAAI,KAAI,MAAI,IAAI,MAAM,WAAW;;;;;AChC5D;;;;;;;;;;;;;;;;AAmBA,yEAAqE,UAAU,WAAW,UAAU;AAChG,sBAAgB,cAAW,SAAQ,QAAQ,IAAI,GAAG,GAAG,aAAa,MAAM,OAAO;AAC/E,qBAAe,CAAE,GAAG,GAAG;AACvB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAExE,+CAA2C,IAAG,CAAE;ACzBvD;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,aAAa,MAAM,OAAO,QAAS;AAC3C,eAAO;UACH,GAAG,MAAM,mCAAmC,GAAG,GAAG,IAAI,aAAa,MAAM,OAAO;;;;AC1B5F;;;;;;;;;;;;;;;;AAwBO;AACH,UAAI,EAAE,OAAO,MAAM;AACf,YAAI,SAAQ,GAAG,sBAA+B,EAAE,OAAO;;AAE3D,UAAI,GAAG,OAAO,MAAM;AAChB,aAAK,SAAQ,IAAI,sBAA+B,GAAG,OAAO;;AAE9D,aAAO;QACH,GAAG;AACC,qBAAW,KAAI,IAAI,MAAK,MAAM,OAAO,IAAI,GAAG;AAC5C,iBAAO;;;;AClCnB;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,oBAAqB;AAC7B,kBAAU,MAAM;AAChB,kBAAU,MAAM;AAChB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AChC/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,aAAa,GAAG,IAAI;AACpD,qBAAa,MAAM,KAAI,IAAI,MAAK,KAAK,GAAG,IAAI;AAC5C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAsDA,qFAAgF,CAAC,GAAG,GAAG;AACnF,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,iBAAW;AACX,oBAAc;AACd,qBAAe;AACf,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;AAEvE,mBAAW,SAAQ,SAAS;UACxB;UAAG,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;;;AAG/E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,SAAS,SAAS,GAAG,MAAM,kEAChC,SAAS;AAChB,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS,UAAU;;AAE9D,qBAAe,CAAE,IAAI,MAAM,OAAO,SAAS,QAAQ;AACnD,oBAAc,CAAE,YAAY,SAAS,WAAW,KAAA,MAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;AChGtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC3BvF;;;;;;;;;;;;;;;;AA2CA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAC7D,IAAI;AACZ,cAAY,IAAI,SAAS,GAAG,MAAM,4DAC3B,IAAI;AACX,cAAY,OAAO,SAAS,GAAG,MAAM,+DAC9B,OAAO;AACd,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,OAAO,OAAO,YAAY,SAAS,GAAmB,MAAK;AACxG,eAAO,SAAQ,gBAAgB,KAAK,QAAQ,SAAS;;AAEzD,qBAAe,CAAE,IAAI,KAAK,OAAO,QAAQ,QAAQ;AACjD,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;;AAEjE,4BAAwB,IAAG,CAAE;ACjEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,GAAG,YAAY,SAAS;;;;AC1BpE;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,QAAS;AACjB,uBAAe;AACf,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AC/B/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,UAAU,GAAG,IAAI;AACjD,qBAAa,MAAM,KAAI,IAAI,MAAK,QAAQ,GAAG,IAAI;AAC/C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AAwBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,IAAI,aAAa,EAAE;;AAE1C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,IAAI,MAAM,IAAI,GAAG;AACrC,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC7C7B;;;;;;;;;;;;;;;;AAsBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5C7B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,IAAI;;;ACrB9B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,wBAAgB,MAAM;AACtB,eAAO,CAAE,SAAS,MAAM,OAAM,QAAQ,OAAO;;;ACvBrD;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AA6BO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,eAAe,CAAC;MAChB,UAAU;AACN,0BAAkB;AAClB,sBAAa;AACb,qBAAY;AACZ,yBAAiB,4BAA0C,MAAK,OAAO,KAAI;AAC3E,wBAAgB;AACZ,2BAAiB,MAAK,MAAK;AAC3B,oBAAU,KAAI,IAAI,KAAI,UAAU,IAAI,OAAM,IAAI,UAAU,QAAO;AAC/D,6BAAmB,kBAAgC,MAAK,OAAO;AAC/D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,MAAK;;AAE7B,uBAAe;AACX,4BAAkB,QAAQ,OAAM;AAChC,0BAAgB,MAAM,WAAW,MAAI,QAAO,WAAU;AACtD,oBAAU,KAAI,IAAI,KAAI,GAAG;AACzB,6BAAmB,kBAAgC,KAAI,OAAO;AAC9D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,KAAI;;AAE5B,eAAO,CAAE,GAAG,SAAS,GAAG;;;ACzDhC;;;;;;;;;;;;;;;;AAwBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,2BAAmB;AACnB,qBAAa,QAAQ,GAAG;AACxB,eAAO;UACH,GAAG,MAAM,MAAM,MAAM,IAAI,KAAI,IAAI;UACjC,OAAO;AACH,sBAAU,MAAM,MAAM,WAAU,KAAK,KAAI,IAAI;AAC7C,+BAAmB,kBAAiB,MAAM,OAAO,GAAG;AACpD,gBAAI,WAAW,SAAS;AACpB,oBAAM,MAAI,KAAK;;AAEnB,mBAAO,SAAQ,KAAK,MAAM;;;;;ACtC1C;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,OAAO;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,qBAAa,KAAI,UAAU,GAAG,IAAI,MAAK;AACvC,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAM;;;AC3B7C;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,IAAI;;;ACzBhD;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI,EAAE;;;ACvBxC;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,uBAAuB,IAAI,QAAQ;;AAEtD,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,oBAAoB;AAClH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAkBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,8BAA8B,IAAI,QAAQ;;AAE7D,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,2BAA2B;AACzH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAmBO,8BAA0B;MAC7B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,GAAG;AACrC,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,KAAI,IAAI,GAAG,MAAM;;;AC1BvD;;;;;;;;;;;;;;;;AAqBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,4BAAoB;AACpB,eAAO;UAGH,WAAW,MAAM,MAAK,WAAU,YAAY;UAC5C,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,GAAG;UACpC,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,YAAY,GAAG;;;;AC/B5D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,yBAAa,QAAQ,GAAG,QAAO;AAC/B,gCAAmB,QAAO;AAC1B,2BAAc,QAAO;AACrB,uCAA2B,KAAI,IAAI;AACnC,qCAAyB,KAAI,KAAI,IAAI,cAAa,IAAI,MAAK,GAAG;AAC9D,mBAAO,MAAM,MAAM,oBAAoB;;;;;ACpCvD;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,GAAG,IAAI,QAAO,IAAI;;;ACzBxD;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,GAAG,aAAa;;;ACzBvD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAO,QAAS;AACxB,2BAAmB,EAAE;AACrB,gCAAwB,kBAAiB,GAAG,OAAO;AAMnD,yBAAiB;AACjB,qBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,mBAAS,KAAK,CAAC,OAAO,IAAI,WAAW,KAAK,OAAO,KAAK,MAAM;;AAEhE,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACpClC;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAQ;AAChB,yBAAiB;AACjB,yBAAiB,KAAI,IAAI;AACzB,eAAO;UACH,QAAQ,MAAM,IAAI,UAAU,KAAI,MAAI,UAAU,CAAC,MAAM,WAAW;;;;AC7B5E;;;;;;;;;;;;;;;;AAmBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,SAAQ;;;ACxB1C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,YAAa;AACjC,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,eAAO,CAAE,GAAG,MAAM,QAAO,IAAI;;;ACtBrC;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAI,KAAK,MAAK,GAAG,aAAa;;;AC1BhE;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,MAAK,GAAG,YAAY;;;ACxB1D;;;;;;;;;;;;;;;;AAoBO,wCAAoC;MACvC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,oBAAY,QAAO;AACnB,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,IAAI,MAAM,EAAE;;AAE/B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC3C7B;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,gCAAwB,EAAE,MAAM;AAChC,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,EAAE;AACpC,aAAK,QAAQ;AACT,0BAAgB,SAAQ;;AAE5B,2BAAmB,SAAQ,IAAI;AAC/B,qBAAa,KAAI,YAAY,OAAK,EAAE,OAAO;AAC3C,eAAO,CAAE,GAAG,MAAM;;;AClC1B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,OAAO,IAAI;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,QAAO,IAAI,OAAO,KAAK;;;AC1BzD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,QAAS;AACjB,qBAAa;AACT,sBAAY,WAAU;AAGtB,cAAI,EAAE,SAAS;AACX,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,sBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM;;qBAGvD,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,wBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK;kBAC3D,EAAE,MAAM;kBAAI,EAAE,MAAM;;;;qBAK3B,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,0BACI,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;qBAKnH,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,+BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,4BACI,MAAI,OAAO,OAAM,IAAI;sBACjB,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAC5C,IAAI,EAAE,MAAM;uBACb,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;;;AAOpE,kBAAM,IAAI,MAAM,2DACT,EAAE;;AAEb,iBAAO;;AAEX,eAAO,CAAE,GAAG;;;AC3EpB;;;;;;;;;;;;;;;;AAmBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,+BAAuB;AACvB,eAAQ,QAAS;AACjB,yBAAiB,wBAAiC;AAClD,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI;;;ACzBxC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,4BAAoB;AACpB,eAAQ,QAAS;AACjB,eAAO,CAAE,OAAO,MAAM,MAAM,IAAI;;;ACvBxC;;;;;;;;;;;;;;;;AA0BO,yCAAqC;MACxC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,6BAAqB;AACrB,qBAAa;AACT,iBAAO,oBAAoB,IAAI;;AAEnC,eAAO,CAAE,GAAG;;;AAGpB;AAII,iCAA2B,QAAQ,SAAS,WAAU;AACtD,uBAAiB,OAAO,GAAG;AAC3B,uBAAiB,aAAa,SAAS,QAAO,GAAG;AACjD,uBAAiB,SAAS,OAAO,WAAW;AAC5C,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,qBAAa,WAAW,YAAY,IAAI;;AAE5C,mBAAa,WAAW,YAAY,OAAK,SAAS,OAAO;AACzD,wBAAkB,WAAU;AAC5B,aAAO,MAAM,YAAY,UAAU;;AClDvC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAiHA,wBAAoB;MAChB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,iCAA6B;AACzB,uBAAiB;;ACvNrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,cAAO,KAAK,SAAS,GAAG,MAAM;AAC9B,aAAO,SAAQ,MAAM;;ACzBzB;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;AC5BtB;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM;;AC3BhC;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS;;AC5BzC;;;;;;;;;;;;;;;;AA2BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO;;AC7BhD;;;;;;;;;;;;;;;;AA6BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ;;AC/BxD;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,OAAM,WAAU,QAAQ,QAAO;;ACpB1D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACpB7B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM,MAAK;;ACrBlC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,UAAI,aAAa;AACb,YAAI,CAAC;;AAET,aAAO,QAAO,CAAC,MAAM,GAAG,IAAI;;ACvBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,QAAQ,QAAQ,MAAK,YAAY,UAAU;;ACpBnE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,QAAQ,aAAa,SAAS,MAAK;;ACpBpE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpBrE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,uBAAgB;AAChB,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACzB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM,QAAQ,SAAS,MAAK,WAAW;;ACpB7D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACrB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI;;ACpBf;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACxB7B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,SAAS;;ACpBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM;;ACxBpC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,aAAa,MAAM;;ACpB9B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,WAAS;;ACrBpB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,QAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM;;ACxBjC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,6BAA6B;AAC1C,WAAK;AACL,aAAO,2BAA2B,MAAM,aAAa,MAAM,OAAO;;ACpBtE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,MAAM;;ACpBjC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,GAAG,YAAY;;ACpBvC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,UAAU;;ACpBrC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,KAAK,MAAM;;ACrBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM;;ACxBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS,0BAA2B,cAAc;AAC/D,WAAK;AACL,aAAO,QAAO,MAAM,OAAO,SAAS;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,UAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,UAAU;;ACpB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,aAAa,aAAa,SAAS,cAAc;;ACpBvE;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM;;ACpBjB;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,SAAQ,MAAM,EAAE;;AC3B3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,gBAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,wBAAwB;AACrC,WAAK;AACL,aAAO,sBAAsB,MAAM,YAAY;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,iBAAiB,iBAAiB,SAAS,MAAK,UAAU;;ACpB3F;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ;;ACrBnB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,OAAO;;ACrB9B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACrBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,iBAAiB;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO;;ACrBlB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,oBAAoB;AACjC,WAAK;AACL,aAAO,kBAAkB,MAAM;;ACpBnC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,0BAA0B;AACvC,WAAK;AACL,aAAO,wBAAwB,MAAM;;ACxBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,iCAA2B,aAAa,UAAS,CAAC,MAAM,KAAK,CAAC,MAAM,GAAG;AACvE,aAAO,MAAM,oBAAoB;;ACrBrC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;ACrBlG;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,OAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,GAAG;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM,YAAY;;ACpBhD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,WAAW,MAAM;;ACpBlC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU;;ACrBrB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;AAUA;AAIO;AACH,UAAI,YAAY;AACZ,mBAAW,WAAU;;AAEzB,aAAO;;AAMJ;AACH,iBAAW;;AAKR;AACH,aAAO;;AC/BX;;;;;;;;;iCAqBoC;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;+BAMjB;MAC9B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,aAAa;;;6BAMjB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;sCAMN;MACrC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,oBAAoB;;;iCAMpB;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;6BAMnB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;AC3E/C;;;;;;;;;AAkBO;AACH,UAAI,MAAM,QAAQ;AAEd,uBAAe;AACf,qBAAa,GAAG,IAAI,WAAW;AAC3B,qBAAW,SAAS,OAAO;;AAE/B,eAAO;;AAGP,yBAAiB,IAAI,MAAM;AAC3B,iBAAS,KAAK;AACd,eAAO;;;AAGR;AACH,UAAI,CAAC;AACD,cAAM,IAAI,eAAe;;;AAM1B;AACH,oBAAc;AACd,yBAAmB;AACf,YAAI,SAAS;AACT;;;AAGR,aAAO;;AAOJ;AACH,UAAI,GAAG,WAAW;AACd,eAAO,GAAG;;AAEd,aAAO;;AAWJ;AACH,UAAI,MAAM,QAAQ;AACd,eAAO;;AAEX,aAAO,CAAC;;AAML;AACH,yBAAmB,OAAO;AAC1B,mBAAa;AACb,wBAAkB;AACd,YAAI,IAAI,MAAM;AACV,gBAAM,IAAI,WAAW,UAAU;;AAEnC,YAAI,WAAW;AACX,mBAAS,SAAS;;AAEtB,iBAAS,GAAG,SAAS,KAAK,IAAI,IAAI;;AAEtC,aAAO;;AAMJ;AACH,2BAAqB,KAAK,QAAQ,wBAAwB;AAC1D,uBAAiB,aAAa,QAAQ,mBAAmB,SAAS;AAKlE,UAAI,SAAS,OAAO;AAChB,eAAO;;AAEX,aAAO,YAAY;;AAEhB;AAEH,UAAI,WAAW,UAAU;AACrB,eAAO;;AAGX,UAAI,WAAW,QAAQ,SAAS;AAC5B,eAAO;;AAEX,aAAO,WAAW,QAAQ,eAAe,WAAW,GAAG;;AAG3D,iCAA6B;AACtB;AACH,UAAI,cAAa,QAAQ,cAAa;AAClC,eAAO;;AAEX,mBAAa;AACb,WAAK,eAAe,UAAS;AAC7B,WAAK,YAAY,UAAS;AAC1B,aAAO;;AAaX;AACI,UAAI,WAAU,QAAQ,OAAO,YAAW;AACpC;iBAEK,MAAM,QAAQ;AACnB,gBAAO,QAAQ,gBAAc,8BAA8B;;AAG3D,uBAAe,OAAO,KAAK;AAC3B,4BAAoB;AAChB,wBAAc,QAAO;AACrB,cAAI,SAAS,QAAQ,OAAO,UAAU;AAClC,gBAAI,CAAC,MAAM,QAAQ,UAAU,MAAM,YAAY,aAC3C,OAAO,MAAM,aAAa;AAC1B,sBAAO,SAAS,MAAM;;AAGtB,4CAA8B;;;;;;AAmB3C,gEAA4D,oBAAoB,0BAA0B,2BAA2B;AAExI,UAAI,OAAO,eAAe;AACtB,6BAAqB;AACrB;AACA,YAAI,gBAAgB;AAChB,eAAK,cAAc;mBAEd,gBAAgB;AACrB,eAAK,uBAAuB;;AAG5B,eAAK,cAAc;AACnB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;;AAM7B,eAAO;;AAIP,wBAAe;AACf,YAAI,QAAO,gBAAgB,QAAQ,QAAO,aAAa;AACnD,gBAAM,IAAI,WAAW,GAAG,gDACjB,KAAK,UAAU;;;AAG1B,0BAAkB,QAAO;AACzB;AACA,YAAI,aAAa;AACb,WAAC,KAAK,cAAc,cAAc;mBAE7B,aAAa;AAClB,WAAC,KAAK,cAAc,uBAAuB;mBAEtC,aAAa;AAClB,WAAC,KAAK,cAAc,cAAc;;AAEtC,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;AAKzB,YAAI,cAAc;AAMd,wCAA8B;AAC9B,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,uBAAuB;;AAExD,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,cAAc;;AAG/C,+BAAqB,QAAO;AAC5B,uBAAa,mBAAmB;AAChC,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAEhD,wCAA8B,QAAO;AACrC,4BAAkB,WAAW,KAAK,QAAO,WAAW,eAAe;AACnE,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;AAMP,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAKhD,4BAAkB,IAAI,IAAI,QAAO;AACjC,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;;;AASZ;AACH,aAAQ,IAAI,IAAK,KAAO,IAAI,IAAK,IAAI;;AAOlC;AACH,aAAO,KAAK,cAAc,GAAG;;AAO1B;AACH,cAAQ;aACC;AACD,iBAAO;;AAEP,gBAAM,IAAI,WAAW,kBAAkB;;;AAS5C;AACH,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO,OAAO;;AAElB,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAOJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,kBAAY;AAEZ,sBAAgB;AACZ,YAAI,IAAI,QAAQ,OAAO;AACnB,cAAI,KAAK;;;AAGjB,aAAO;;AAQJ;AACH,UAAI,OAAO;AACP,cAAM,IAAI,WAAW,yBAAyB,KAAK,UAAU;;AAEjE,wBAAkB;AACd,YAAI,IAAI,eAAe;AACnB,iBAAO;;;AAGf,aAAO;;AASJ;AACH,UAAI,SAAS;AACT;;AAEJ,UAAI,OAAO,QAAQ,SAAS;AACxB,cAAM,IAAI,WAAW,GAAG,wBAAwB,4BAA4B;;;AAkB7E,kEAA8D,eAAe;AAChF,eAAO,aAAa;AACpB,eAAO,aAAa;AACpB,aAAQ,MAAM,QAAQ,MAAM,EAAE,UAAU,aAAa,EAAE,UAAU,aAC7D,EAAE,MAAM,OAAK,OAAO,MAAM;;AAU3B;AACH,UAAI,MAAM,QAAQ;AACd,gBAAY,MAAM,SAAS,GAAG,MAAM,GAAG;AACvC,cAAM,QAAQ,UAAU,sBAAsB,GAAG,WAAW,IAAI,QAAQ;;AAGxE,gBAAY,OAAO,UAAU,UAAU,QAAQ,GAAG,MAAM,YAAY,0CAC7D,uBAAuB;;;AAc/B;AACH,UAAI,UAAU;AACV,eAAO;iBAEF,MAAM,QAAQ;AACnB,eAAO,MAAM,MAAM,IAAI,OAAK,uBAAuB,IAAI,KAAK,OAAO;iBAE9D,OAAO,UAAU;AACtB,eAAO,IAAI;;AAGX,eAAO,GAAG;;;AAYX;AACH,qBAAe;AACf;AACA,iBAAW;AACP,sBAAY;AACZ,YAAI,QAAM,WAAW;AACjB,iBAAO;;AAEX,mBAAW;AACX,qBAAa,EAAE,GAAG;AAClB,eAAO;;AAEX,aAAO;;AAQJ;AACH,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,aAAO;;AAiBJ;AACH,eAAO,cAAc,SAAS,GAAG;AACjC,2BAAqB;AACjB,iBAAO,MAAM,QAAQ,SAAS;AAC9B,iBAAO,OAAO,SAAS,GAAG;;AAE9B,aAAO,cAAc,OAAO;AACxB,YAAI,SAAS,WAAW;AACpB,iBAAO,OAAO,IAAI,WAAS,CAAC;;AAEhC,eAAO,OACF,IAAI;AACL,iBAAO,SAAS,IAAI,eAAe,CAAC,GAAG,WAAW;WAEjD,OAAO;AACR,iBAAO,iBAAiB,OAAO;WAChC;SACJ;;AClgBP;;;;;;;;;AAiBA;AACI,aAAO,KAAK,MAAM,KAAS,MAAQ,KAAQ,GAAG,IAAI,MAAM;;6BAW5B;MAC5B;AACI,eAAO;;;0BAGc;MACzB;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,YAAgB,OAAO,GAAG,KAAK;AAC/C,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO,CAAE,UAAU,KAAK,UAAU,MAAM,KAAK;;;AAIrD,YAAQ,YAAY;AACpB,kBAA4B;2BACE;MAC1B;AACI;AACA,aAAK,cAAc;AACnB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK,MAAM,IAAQ,GAAG,MAAQ,WAAW,YAAY,GAAG,KAAK;;MAExE;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,aAAS,YAAY;AACrB,kBAA4B;yBACA;MACxB;AACI,eAAO,MAAS;;;AAIxB,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,MAAQ,KAAQ,KAAK,MAAM,YAAgB,OAAO,KAAK,UAAU,KAAK,YAAY,KAAQ,IAAM,KAAK,MAAM;AAC3H,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO;UACH,UAAU,KAAK;UACf,UAAU,KAAK;UACf,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,sDAAkD;MACrD,SAAW;MACX,YAAc;MACd,QAAU;MACV,UAAY;;AAET;AACH,aAAO,qBAAqB;;AAEzB,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,4CAC5B,0CAA0C,cAC1C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC/IrC;;;;;;;;;AAwBO;AACH,aAAO,IAAI,QAAQ;;AAOhB;AACH,aAAO,IAAI,SAAS;;AAOjB;AACH,aAAO,IAAI;;AAGR;AACH,aAAO,IAAI,WAAW;;;;;;;;;AC7C1B;;;;;;;;;AASO,qCAAiC,CAAC,iBAAiB;AACnD,sCAAkC,CAAC,SAAS,QAAQ;AACpD,mCAA+B,CAAC,OAAO;AACvC,4CAAwC,CAAC,OAAO,OAAO,UAAU;AACjE,sCAAkC,CAAC;ACb1C;;;;;;;;;AAiBA,oBAAgB,IAAI;AACb;AACH,gCAA0B,0BAA0B,cAAc;;AAE/D;AACH,gCAA0B,2BAA2B,eAAe;;AAEjE;AACH,gCAA0B,wBAAwB,YAAY;;AAElE,4BAAwB;AACxB,8BAA0B;AAInB;AACH,sBAAgB,KAAK;AACrB;AACI,oBAAY;AACZ,wBAAgB;AAChB,eAAO;;AAGP,wBAAgB;AAChB,cAAM;;;AAMd;AACI,UAAI,gBAAgB,WAAW;AAC3B,eAAO;;AAGP,eAAO,gBAAgB,KAAK,qBAAqB;;;AAQlD;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,aAAO,2BAA2B;;AAY/B;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,UAAI,CAAC,QAAQ,IAAI;AACb,gBAAQ,IAAI,YAAY;;AAE5B,qBAAc,QAAQ,IAAI;AAC1B,cAAQ,IAAI,YAAY,QAAQ,IAAI,cAAc;AAClD,UAAI,SAAQ;AACR,uBAAe,GAAG,cAAc;AAGhC,gBAAQ,IAAI,QAAQ;AACpB,eAAO;;AAGP,eAAO;;;AAGf,4BAAwB,IAAI,OAAO;AAM5B;AACH,aAAO,CAAC,CAAC,KAAK,MAAM;;ACvGxB;;;;;;;;;AAyBO;AACH,aAAO,MAAM,SAAS,EAAE,YAAY;;AASjC;AACH,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,OAAO;AACP,cAAM,OAAM;;AAEhB,kBAAW;AACX,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,iBAAQ,OAAM;;AAElB,aAAO;;AAOX;AACI,eAAQ,MAAM,QAAQ,UAAS,IAAI,aAAa,UAAS;AACzD,aAAO,UAAS;;AAOb;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAI,UAAS,OAAM;;AAOvB;AACH,uBAAiB,IAAQ,UAAU,SAAQ,QAAO,OAAK;AACvD,wBAAkB,MAAQ,KAAQ,UAAU,WAAW,WAAW;AAClE,aAAO,YAAY,OAAM;;AAOtB;AACH,0BAAoB,OAAM,QAAQ,KAAK,UAAU,IAAI;AACrD,qBAAe,KAAK,MAAO,aAAY,SAAS,KAAK;AACrD,sBAAgB,KAAK,KAAM,aAAY,SAAS,KAAK;AACrD,UAAI,WAAW;AACX,eAAO,YAAY;;AAEvB,aAAQ,aAAY,UAAU,YAAY,YAAY;;AASnD;AACH,UAAI,MAAM;AACN,cAAM,IAAI,WAAW,QAAQ,iBAAiB;;AAElD,kBAAY;AACZ,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,YAAI,KAAK;;AAEb,aAAO;;AChIX;;;;;;;;;AAqBA,oBAAc;AACP;AACH,iBAAe;AACf,kBAAU;;AAEP;AACH,aAAO;;AASJ;AACH,aAAO;;AAOJ;AACH,oBAAc,EAAE;AAChB,UAAI,MAAM,SAAS;AACf,eAAO,MAAM,OAAO,UAAU,IAAI;;AAIlC,eAAO;;;AASR;AACH,aAAO,EAAE,OAAO;;AAQb,oCAA8B;AACjC,uBAAiB,EAAE,MAAM;AACzB,UAAI,OAAO;AACP,eAAO,SAAS,SAAS,OAAO;;AAEpC,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,EAAE,QAAQ;;AAad;AACH,aAAO,KAAK;AACR,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,yDACT,EAAE,MAAM;;AAExB,kBAAU,aAAW,GAAG;AACxB,eAAO,OAAK,GAAG,CAAC,GAAG,GAAG;;;AAQvB;AACH,uBAAiB,CAAC,UAAqB,EAAE;AACzC,aAAO,EAAE,QAAQ;;AAUd;AACH,UAAI,EAAE,QAAQ;AACV,cAAM,IAAI,WAAW,wDAAwD,EAAE;;AAEnF,uBAAiB,CAAC,EAAE,MAAM,IAAI,UAAqB,EAAE,OAAO;AAC5D,aAAO,EAAE,QAAQ;;AAUd;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,IAAI,CAAC,MAAM,OAAM,MAAM;eACxD;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;eAC3E;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM;eAC9F;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,IAAI;cACzC;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;;eAErE;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,GAAG,IAAI;cAC5C;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAClE,OAAM,MAAM;;;AAGhB,kBAAM,IAAI,WAAW,8DACd,OAAM;;;;AAYtB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI;eACtD;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;eACzE;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;;AAE7F,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAatB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC3E;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;mBAC9F;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC9F;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;;AAGf,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAUtB,yCAAqC;AACxC;AACA,UAAI,OAAO;AACP,eAAO,QAAQ,GAAG;AAClB,YAAI,SAAS;AACT,iBAAO;;AAGP,iBAAO;;;AAGf,UAAI,SAAS,QAAQ,GAAG;AAGpB,eAAO;;AAGX,aAAO,QAAW,SAAS;;AASxB;AACH,cAAQ,EAAE;aACD;AACD,iBAAO,SAAa,CAAC,GAAG;aACvB;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;;AAE5B,gBAAM,IAAI,WAAW,+DACD,EAAE;;;AAU3B;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,YAAI,CAAC;;AAET,UAAI,EAAE,SAAS,EAAE;AACb,cAAM,IAAI,WAAW,0BAA0B,EAAE,+DACL,EAAE;;AAElD,aAAO,MAAS,GAAG;;AAahB,2CAAoC,YAAc;AACrD,aAAO,aAAiB,OAAO,OAAM,QAAQ,OAAO;;AAmBjD;AACH,UAAK,EAAE,OAAO,KAAO,EAAE,OAAO;AAC1B,cAAM,IAAI,oBAAoB,8DACJ,EAAE,uBAAuB,EAAE;;AAEzD,UAAI,EAAE,QAAQ;AACV,yBAAiB,EAAE,MAAM,MAAM,IAAI;AACnC,+BAAuB,EAAE,MAAM,MAAM,IAAI;AACzC,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB,gGAC8B,EAAE,wBAC5C,EAAE;;;AAI5B,UAAK,EAAE,SAAS,KAAO,EAAE,SAAS;AAC9B,2BAAmB;AACnB,2BAAmB;AAInB,eAAO,SAAiB;UACpB;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D,YAAA;;;AAKJ,2BAAmB,EAAE,MAAM;AAC3B,yBAAiB,WAAW;AAC5B,YAAI,EAAE,QAAQ,CAAC,IAAI;AAGnB,uBAAe,EAAE,MAAM;AACvB,yBAAiB,OAAO;AACxB,+BAAuB,OAAO;AAC9B,2BAAmB,CAAC,GAAG,QAAQ;AAG/B,qBAAa,MAAM,KAAK,CAAE,QAAQ,EAAE,OAAQ;AACxC,cAAI,MAAM;AACN,mBAAO,EAAE,OAAO;qBAEX,KAAK,EAAE,OAAO;AACnB,mBAAO,IAAI;;AAEf,iBAAO;;AAEX,YAAI,EAAE,UAAU,MAAM,QAAQ,CAAC,gBAAgB;AAE/C,4BAAoB,CAAC,GAAG,YAAY,GAAG;AACvC,2BAAmB;AACnB,2BAAmB;AACnB,eAAO,SACK;UACR;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D,YAAA;WAEC,QAAQ;;;AAad;AAEH,aAAO,KAAK;AACR,2BAAmB,WAAc;AACjC,0BAAkB,UAAa;AAC/B,eAAO,MAAM,MAAU,GAAG,aAAa,YAAY,MAAM,QAAY,GAAG,WAAc,KAAK,WAAW,KAAQ,IAAI;;;AAWnH;AACH,aAAO,KAAK;AACR,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM;;AAGpB,kBAAU,QAAQ;AAClB,eAAO,QAAW,SAAS,YAAY;;;AAWxC;AACH,aAAO,KAAK;AACR,YAAI,MAAM,QAAQ;AACd,oBAAU,UAAS,SAAS;;AAG5B,oBAAU,QAAQ;;AAEtB,eAAO,OAAW,WAAW,SAAS;;;AAQvC;AACH,aAAO,KAAQ,GAAG;;AAcf;AACH,aAAO,KAAK;AACR,YAAI,OAAQ,MAAO;AACf,cAAI,QAAO,KAAK,MAAM,IAAI;;AAE9B,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,oBAAoB,oBAAoB,EAAE;;AAExD,eAAO,IAAQ,GAAG;;;AAM1B;AACI,wBAAkB,KAAK;AACvB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,WAAW,+BAA+B,KAAK,gCACzB;;AAEpC,UAAI,UAAU;AACV,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG,GAAG;;AAG5C,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG3E,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,GAAG,UAAU;;AAG3C,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG;;AAGzC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG7D,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,UAAU;;AAGxC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI;;AAGtC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU;;mBAG/C,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,UAAU;;AAGrC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,QAAQ;AACb,eAAO;;AAEX,YAAM,IAAI,WAAW,sCAAsC,KAAK;;AAW7D;AACH,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,eAAO,EAAE,IAAI,YAAY,EAAE,MAAM,MAAM;;;AASxC,8BAAwB;AAE3B,UAAI,UAAU;AACV,cAAM,IAAI,oBAAoB,0CAA0C;;AAG5E,aAAO,KAAQ;;AAUZ;AACH,aAAO,KAAK,MAAM,IAAQ,GAAG,IAAQ,GAAG,IAAI;;AAYzC;AACH,aAAO,KAAK,MAAM,QAAY,GAAG,OAAO,YAAY;;AAWjD;AACH,aAAO,KAAK;AACR,kBAAU,MAAQ,KAAI,KAAQ,KAAI;AAClC,eAAO,YAAgB,GAAG,GAAG;;;AAgB9B,6CAAyC;AAC5C,aAAO,WAAW,MAAM;;AChoB5B;;;;;;;;;AASO,kCAA8B,CAAC,SAAS,UAAU;AAClD,sCAAkC,CAAC,UAAU,WAAW;AASxD,kCAA8B;MACjC;MAAS;MAAQ;MAAY;MAAgB;MAC7C;MAAmB;MAAmB;MAAc;;ACrBxD;;;;;;;;;AAgBO;AACH,gCAA0B,uBAAuB,WAAW;;AAEzD;AACH,gCAA0B,2BAA2B,gBAAgB;;8BAQxC;MAC7B;AACI,eAAO;;MAEX;AACI,eAAO;;;wBAGY;MACvB;AACI,eAAO,OAAM,OAAO;;;AAI5B,UAAM,YAAY;AAClB,kBAA4B;uBACF;MACtB;AACI,eAAO,OAAK,OAAO;;;AAI3B,SAAK,YAAY;AACjB,kBAA4B;2BACE;MAC1B;AACI;AACA,YAAI,OAAO,SAAS;AAChB,gBAAM,IAAI,WAAW,oDAAoD;;AAE7E,YAAI,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,sCAAsC;;AAE/D,aAAK,QAAQ,KAAK;;MAEtB;AACI,eAAO,KAAK,MAAM,KAAI,QAAO,KAAK,QAAQ,OAAK,OAAO;;MAE1D;AACI,eAAO;UACH,OAAO,KAAK;;;;AAKxB,aAAS,YAAY;AACrB,kBAA4B;gCACO;MAC/B;AACI;AACA,aAAK,iBAAiB;AACtB,aAAK,iBAAiB;AACtB,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO,cAAc,OAAO,KAAK,QAAQ,KAAK,QAAQ;;MAE1D;AACI,eAAO,CAAE,QAAQ,KAAK,QAAQ,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAItE,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,eAAO,eAAe,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAErE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,iBAAa,YAAY;AACzB,kBAA4B;kCACS;MACjC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,0CAA0C;;AAE5E,eAAO,gBAAgB,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAEtE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,oBAAgB,YAAY;AAC5B,kBAA4B;6BACE;MAC1B;AACI;AACA,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO;;MAEhD;AACI,eAAO,KAAK;AACR,cAAI,MAAM,WAAW,KAAK,MAAM,OAAO,MAAM;AACzC,kBAAM,IAAI,WAAW;;AAIrB,mBAAO,KAAI,KAAK,MAAM,IAAI,MAAM;;;;MAI5C;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;AAS5B,6CAAyC;AACrC;AACA;AACA,sBAAgB;AAChB,UAAI,MAAM,WAAW;AACjB,gBAAQ,MAAM;AACd,iBAAS,MAAM;iBAEV,CAAC,GAAG,GAAG,GAAG,QAAQ,MAAM,YAAY;AACzC,YAAI,eAAe;AACf,qCAA2B,UAAU,OAAO;AAC5C,kBAAQ,MAAM,KAAK;AACnB,mBAAS,MAAM,KAAK;mBAEf,eAAe;AACpB,qCAA2B,UAAU,OAAO,GAAG,MAAM,SAAS;AAC9D,kBAAQ,MAAM,MAAM,SAAS,KAAK;AAClC,mBAAS,MAAM,MAAM,SAAS,KAAK;;;AAIvC,0BAAkB,UAAU;AAC5B,gBAAQ,KAAK,KAAK;AAClB,iBAAS,KAAK,KAAK;;AAEvB,aAAO,CAAC,OAAO;;kCAEkB;MAKjC;AACI;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW,wCAAwC,KAAK;;AAEtE,aAAK,QAAQ,KAAK,SAAS,OAAO,IAAM,KAAK;AAC7C,aAAK,OAAO,KAAK,QAAQ,OAAO,UAAU,KAAK;AAC/C,qBAAa,KAAK;AAClB,aAAK,eACD,KAAK,gBAAgB,OAAO,WAAW,KAAK;AAChD,0BAAkB,KAAK;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,qBAAa,YAAY;AACzB,sBAAc,KAAK;AACnB,uBAAe,KAAK;AACpB,qBAAY,KAAK;AACjB,YAAI,KAAK,SAAS;AACd,oBAAS,KAAK,IAAI,GAAG;mBAEhB,KAAK,SAAS;AACnB,oBAAS,KAAK,IAAI,GAAG;;AAGrB,oBAAS,KAAK,IAAI,GAAI,SAAQ,UAAU;;AAE5C,YAAI,KAAK,iBAAiB;AACtB,yBAAe,KAAK,KAAK;AACzB,kBAAQ,SAAS;AACjB,cAAI,UAAU,aAAa,UAAU;AACjC,kBAAM,IAAI,oBAAoB,GAAG,KAAK,yCAAyC;;AAEnF,iBAAO,gBAAgB,OAAO,GAAG,QAAQ,OAAO,KAAK;;AAGrD,wBAAc,KAAK,KAAK,IAAI;AAC5B,iBAAO,cAAc,OAAO,CAAC,OAAO,OAAO;;;MAGnD;AACI,eAAO;UACH,OAAO,KAAK;UACZ,MAAM,KAAK;UACX,cAAc,KAAK;UACnB,MAAM,KAAK;;;;AAKvB,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAQ/B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAQ9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;2BACE;MAC1B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,aAAS,YAAY;AACrB,kBAA4B;4BACG;MAC3B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,cAAU,YAAY;AACtB,kBAA4B;8BACK;MAC7B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,gBAAY,YAAY;AACxB,kBAA4B;+BACM;MAC9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,oBAAoB;;;MAGtC;AACI,eAAO,KAAK;AACR,cAAI,MAAM,SAAS;AACf,kBAAM,IAAI,oBAAoB;;AAElC,cAAI,MAAM,KAAK,MAAM,KAAK;AACtB,oBAAQ,KAAK,2EACK,MAAM,KAAK,MAAM;;AAIvC,kCAAwB,MAAM,KAAK,MAAM,KAAK,CAAC,MAAM,IAAI,MAAM,MAAM;AACrE,oBAAU,eAAe,iBAAiB,GAAG,GAAG;AAChD,kBAAQ,OAAO,YAAY;AAC3B,cAAI,MAAM,KAAK,MAAM;AACjB,gBAAI,EAAE;;AAEV,iBAAO,KAAI,KAAK,MAAM;;;MAG9B;AACI,eAAO;UACH,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,uDAAmD;MACtD,UAAY;MACZ,cAAgB;MAChB,eAAiB;MACjB,UAAY;MACZ,WAAa;MACb,UAAY;MACZ,aAAe;MACf,cAAgB;MAChB,MAAQ;MACR,YAAc;MACd,cAAgB;MAChB,eAAiB;MACjB,iBAAmB;MACnB,iBAAmB;MACnB,OAAS;;AAEb,6DAAwD;AACpD,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,aAAO,qBAAqB;;AAEzB;AACH,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AAIJ,YAAI,cAAc;AACd,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;;AAGX,0BAAe;AACf,kBAAO,eAAe;AACtB,kBAAO,YAAY;AACnB,iBAAO,uBAAuB;;iBAG7B,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;ACretC;;;;;;;;;AAgBO;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI,SAAS;;AAWjB;AACH,aAAO,IAAI,cAAc;;AAQtB;AACH,aAAO,IAAI,aAAa;;AAYrB;AACH,aAAO,IAAI,gBAAgB;;AAQxB;AACH,aAAO,IAAI,WAAS;;AAejB;AACH,aAAO,IAAI,gBAAgB;;AAexB;AACH,aAAO,IAAI,cAAc;;AAetB;AACH,aAAO,IAAI,aAAa;;AAcrB;AACH,aAAO,IAAI,SAAS;;AAcjB;AACH,aAAO,IAAI,UAAU;;AAelB;AACH,aAAO,IAAI,YAAY;;AAWpB;AACH,aAAO,IAAI,aAAa;;AAUrB;AACH,aAAO,IAAI,WAAW;;;;;;;;;;;;;;;;;;;;ACjM1B;;;;;;;;;AAiBA,8BAA0B;AACnB;AACH,aAAO;;AAEX,yBAAqB;AAMd,6BAAyB;AAC5B,UAAI,CAAE,WAAU;AACZ,qBAAa,UAAU;;AAE3B,mBAAa,WAAW;AACxB,aAAO,SAAS,aAAa,QAAQ;;AChCzC;;;;;;;;;AAcO;AACH,aAAO,MAAM,QAAQ,MAAM,MAAM,QAAQ,EAAE;;AAQxC;AACH,UAAI,EAAE,WAAW;AACb,eAAO;;AAEX,UAAI,CAAC,MAAM,QAAQ,EAAE;AACjB,eAAO,CAAC;;AAEZ,aAAO;;AAQJ;AACH;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,GAAG,WAAW;AACd,gBAAM,IAAI,WAAW,uCAAuC,GAAG;;AAEnE,YAAI,GAAG;;AAGP,YAAI;;AAER,aAAO;;AAYJ;AACH,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ,OAAO;AAC9C,YAAI,OAAO,WAAW;AAClB,mBAAS;AACT,iBAAO,OAAO;;AAGd,gBAAM,IAAI,WAAW,iCAAiC,OAAO;;;AAIjE,eAAO;;;ACxEf;;;;;;;;;AAgBO;AACH,mBAAY;AACZ,2BAAqB;AACjB,YAAI,OAAO,MAAM,WAAW;AACxB,oBAAS;;AAGT,oBAAS,OAAO,MAAM,OAAO,UAAU,IAAI;;;AAGnD,aAAO;;AC1BX;;;;;;;;;AAcA,yCAAqC;;MAsBjC,yBAAyB,kBAAkB,0CAA0C,mBAAmB;AACpG,aAAK,QAAQ,SAAS,OAAO,YAAY;AACzC,aAAK,QAAQ,IAAI;AACjB,aAAK,KAAK;AACV,eAAO,QAAQ,OAAO,+BAA+B;AACrD,aAAK,eAAe,oBAAoB;AACxC,aAAK,OAAO,oBAAoB,KAAK;AACrC,aAAK,aAAa;AAClB,aAAK,aAAa;AAClB,aAAK,MAAM,SAAa,KAAK,KAAK,YAAY,KAAK,MAAM,KAAK;;MASlE;AACI,aAAK;AACL,eAAO,KAAK;;MAShB;AAEI,aAAK;AACL,yBAAiB,KAAK,KAAK;AAE3B,YAAI,KAAK,IAAI,OAAO,OAAO;AACvB,eAAK,IAAI,OAAO;AAChB,cAAI,KAAK,cAAc;AACnB,iBAAK,IAAI,OAAO,KAAK,WAAW,MAAM,KAAK;;;AAGnD,eAAO;;MAKX;AACI,aAAK;AACL,aAAK,IAAI;;MAEb;AACI,YAAI,KAAK,IAAI;AACT,gBAAM,IAAI,MAAM,kBAAkB,KAAK;;;UAG3C;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,aAAa;AAClB,aAAK,IAAI,YAAY;;;AAG7B;AACI,UAAI,EAAE,MAAM,eAAe,EAAE,MAAM;AAC/B,cAAM,IAAI,MAAM,qBAAqB,KAAK,UAAU,EAAE,SAAS,UAC3D,KAAK,UAAU,EAAE;;;AAYtB;AACH,aAAO,IAAI,cAAc,GAAG,OAAO,MAAM,MAAM;;AAU5C;AAEH,aAAO,IAAI,cAAc,OAAU,QAAQ,OAAO;;AAU/C;AACH,aAAO,IAAI,cAAc,WAAc,IAAI,OAAO;;AAU/C;AAEH,wBAAkB,OAAS;AAC3B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,wBAAkB,UAAa;AAC/B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,aAAO,IAAI,cAAc,IAAQ,OAAO,OAAO;;AAY5C,8EAA0E;AAC7E,aAAO,IAAI,cAAc,cAAkB,OAAO,QAAQ,QAAQ,QAAQ,OAAO;;AAY9E,oDAA+C,YAAc,uBAAyB;AAGzF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,aAAO,IAAI,cAAc,gBAAoB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAYpF,iDAA4C,YAAc,uBAAyB;AACtF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,+CAA+C;;AAEjF,aAAO,IAAI,cAAc,aAAiB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAQjF;AACH,aAAO,EAAE,MAAM;;AAQZ;AACH,aAAO,EAAE,MAAM,MAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,EAAE,MAAM,IAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,GAAG,IAAI,OAAK,EAAE;;AAUlB;AACH,yBAAmB,QAAQ;AACvB,0BAAiB,iBAAiB;AAClC,kBAAS,MAAM,iBAAiB;;;AAWjC;AAGH,2BAAqB,UAAU,IAAI,eAAY,UAAS;AACxD,4BAAsB,cAAc,QAAQ;AAC5C,aAAO,UAAU,IAAI,eAAY,cAAc,MAAM,UAAS;;AC/RlE;;;;;;;;;;MA6BI;AACI,aAAK,QAAQ,KAAK;AAClB,aAAK,QAAQ,KAAK;AAKlB,YAAI,KAAK,SAAS;AACd,eAAK,OAAO,KAAK,MAAM;;AAGvB,eAAK,OAAO,KAAK;;AAErB,aAAK,UAAU,KAAK;AACpB,aAAK,UAAU,KAAK;AACpB,aAAK,OAAO,KAAK,QAAQ;;;;MAyB7B;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,cAAc;AACnB,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,oBAAoB;AACzB,aAAK,KAAK;AACV,YAAI,QAAQ;AACR,eAAK,eAAe,oBAAoB;AACxC,eAAK,OAAO,oBAAoB,KAAK;;AAEzC,aAAK,OAAO,MAAM;;;AAG1B,sBAAkB;;MAsBd;AAGI,aAAK,WAAW;AAChB,aAAK,KAAK;AAQV,aAAK,gBAAgB,KAAK;AAQ1B,aAAK,gBAAgB,KAAK;AAE1B,aAAK,cAAc,KAAK;AAExB,aAAK,gBAAgB,KAAK;AAM1B,aAAK,eAAe,KAAK;AAEzB,aAAK,gBAAgB,KAAK;AAK1B,aAAK,aAAa,KAAK;AAEvB,aAAK,cAAc,KAAK;AAGxB,aAAK,cAAc,KAAK;AAExB,aAAK,eAAe,KAAK;AAEzB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,kBAAM,cAAc,KAAK;;;AAGjC,aAAK,cAAc,aAAa,KAAK;;MAEzC;AACI,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,yBAAa,KAAK,MAAM;;AAGxB,yBAAa,KAAK;;;AAG1B,eAAO;UACH,eAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;UAC9D,eAAe;UACf,aAAa,KAAK;UAClB,eAAe,KAAK;;;;AAIhC,uBAAmB;wBAUQ;MACvB,mBAAmB;AACf;AACA,aAAK,YAAY;AACjB,aAAK,oBAAoB;AAKzB,aAAK,YAAY;AACjB,aAAK,KAAK;AACV,aAAK,sBAAsB;AAC3B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AAEvB,aAAK,oBAAoB;AACzB,aAAK,uBAAuB;AAC5B,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,SAAS;AAKd,aAAK,eAAe;AACpB,aAAK,gBAAgB;AACrB,mBAAW,KAAK;AAChB,YAAI,CAAC;AACD,yBAAe,KAAK;AACpB,iBAAO,YAA0B,UAAU,MAAM,OAAO;;AAE5D,aAAK,OAAO;AACZ,aAAK,aAAa,KAAK,aAAa,OAAO,OAAO,KAAK;AACvD,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AAKnD;AACA,cAAI,KAAK,mBAAmB;AACxB,8BAAkB,KAAK;qBAElB,KAAK,cAAc;AACxB,4BAAgB;AAChB,gBAAI,KAAK,aAAa;AAClB,0BAAY,KAAK;;AAErB,8BAAkB,CAAC,WAAW,OAAO,KAAK;;AAE9C,eAAK,kBAAkB;AAEvB,sBAAY,KAAK;AACjB,cAAI,SAAS;AACT,oBAAQ,KAAK;;AAEjB,cAAI,SAAS;AACT,oBAAQ;;AAEZ,eAAK,QAAQ;;AAEjB,YAAI,KAAK,WAAW;AAChB,eAAK,iBAAiB,KAAK;;AAG3B,eAAK,iBAAiB;;AAI1B,aAAK,YAAY;AACjB,aAAK,4BAA4B;;aAW9B;AACH,eAAO,MAAM,OAAO,SAAS,UAAU;;MAS3C;AACI,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,aAAa,2DACQ;;AAEnC,YAAI,KAAK,aAAa,UAAU;AAC5B,gBAAM,IAAI,WAAW,gBAAgB,oBAAoB,qCAC3B,KAAK,aAAa;;AAEpD,eAAO,KAAK,aAAa;;MAW7B;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,SAAS;;MAWlF;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,UAAU;;UAc/E;AACA,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;mBAMlC,KAAK,aAAa,WAAW;AAClC,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,SAAS;;UAatE;AACA,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAM3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,UAAU;;UAEvE;AACA,eAAO,KAAK;;MAOhB;AAKI,eAAO,KAAK,OAAO,IAAI,YAAU;;UAEjC;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,SAAS;;UAEd;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,kBAAkB,QAAQ,OAAK,EAAE,YAAY;AAClD,aAAK,aAAa;;UAElB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,EAAE;;AAG5C,iBAAO;;;UAGX;AACA,aAAK,oBAAoB;;UAEzB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,CAAC,EAAE,WACxC,OAAO,KAAK;;AAGjB,iBAAO,KAAK,kBAAkB,OAAO,KAAK;;;UAG9C;AACA,aAAK,uBAAuB;;UAM5B;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;UAEzC;AACA,eAAO,KAAK;;MAShB;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;;MAgBxB;AACI,iBAAS,OAAqB;AAC9B,YAAI,KAAK,aAAa,QAAQ,KAAK,UAAU,WAAW;AACpD;;AAEJ,0BAAkB,OAAqB,KAAK;AAC5C,YAAI,OAAO,WAAW,UAAU;AAC5B,gBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,UAAU,kCACtC,OAAO,yCACP;;AAE3B,8BAAsB,GAAG,aAAa,OAAO,QAAQ;AACjD,oBAAU,OAAO;AACjB,uBAAa,UAAU;AACvB,cAAI,QAAQ;AACR;;AAGJ,uBAAa,EAAE;AACf,cAAI,KAAK,QAAQ;AACb,gBAAI,SAAS,KAAK;AACd,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,uBACvD,KAAK,oBAAoB;;;AAGtD,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAG/D,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAI/D,cAAI,KAAK,SAAS;AACd,gBAAI,EAAE,UAAU,KAAK;AACjB,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,yBACpD,KAAK,sBAAsB,EAAE;;;AAI7D,cAAI,KAAK;AACL,2BAAe,EAAE;AACjB,8BAAkB,KAAK;AACnB,2BAAa,OAAO;AACpB,4BAAc,KAAK,KAAK;AAIxB,mCAAqB,QAAQ,IAAI,OAAO,QAAQ,OAAO,OAAO,SAAS;AACvE,kBAAI,SAAS,QAAQ,CAAC,OAAO,MAAM,QAAQ,kBAAkB;AACzD,sBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,uBAAuB,qCACjB,uBAAuB;;;;AAKrD,cAAI,KAAK,SAAS;AACd,yBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,8BAAgB,KAAK,MAAM;AAC3B,0BAAY,EAAE,MAAM;AACpB,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,YAAY;AACZ,wBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,wBAAwB,KAAK,sBACtB,EAAE;;;;;;;MAe7C;AACI,eAAO;;MAEX;AACI,YAAI,KAAK,aAAa;AAClB,eAAK,UAAU,QAAQ;;;MAQ/B;AACI,aAAK,YAAY;;MAMrB;AACI,aAAK,YAAY;;MAuErB;AACI,iBAAS,UAAU;AACnB,aAAK;AAEL,2BAAmB,OAAqB;AACxC,6BAAqB;AACrB,6BAAoB;AAChB,cAAI,CAAE,mBAAiB;AACnB,6BAAiB;AACjB;;;AAGR,8BAAsB;AACtB,6BAAoB;AAChB,cAAI,kBAAiB;AACjB,8BAAkB;AAClB;;;AAGR,YAAI,mBAAmB;AACnB,gBAAM,IAAI,WAAW;;AAIzB,eAAO,UAAU,KAAK,MAAM;AAExB,cAAI,CAAC,KAAK;AAKN,iBAAK,yBAAyB;AAE9B,gCAAoB;AACpB,gCAAoB,OAAqB;AACrC,0BAAY,KAAK,MAAM;;AAE3B,iBAAK,MAAM,iBAA+B;AAC1C,iBAAK,QAAQ;AAEb,gBAAI,KAAK;AACL,mBAAK,WAAW,KAAK;;AAEzB,gBAAI,KAAK,cAAc,QAAQ;AAI3B,mBAAK,YAAY;;;AAOzB,eAAK,yBAAyB;AAI9B,cAAI;AACA,yBAAa,KAAK,KAAK,QAAQ;AAI/B,+BAAmB,OAAqB;AACxC,mCAAuB;AAGvB,0BAAc;AACV,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,EAAE;;AAEV,6BAAe,KAAK;;AAExB,qBAAS,iBAA+B;AACxC,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAIlC,mBAAO;;AAGP,+BAAmB,kBAAkB;AACrC,gCAAoB,KAAK,mBAAmB;AAC5C;AACA,gCAAoB,iBAAiB;AACrC,iBAAK,6BAA6B,MAAM,QAAQ,UAAU,WAAW,KACjE;AACJ,gBAAI,eAAe,QAAQ,YAAY,SAAS,KAC5C,MAAM,QAAQ,YAAY;AAE1B,uBAAS,YACJ,IAAI,mBAAkB,IAAI,eAAe,aAAa,OAAO,MAAM,OAAqB,SAAS,QAAQ,KAAK,MAAM;;AAGzH,uBAAS,IAAI,eAAe,aAAa,aAAa,MAAM,OAAqB,SAAS,QAAQ,KAAK;;AAS3G,iBAAK,eAAe,QAAQ,QAAQ,MAAM,MAAM,YAAY,aAAa;AACzE,iBAAK;AACL,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAGlC,mBAAO;;;;MAWnB;AACI,YAAI,KAAK,mBAAmB;AACxB;mBAEK,WAAW,WAAW,KAAK,gBAAgB;AAChD,kBAAQ,KAAK,iDACN,KAAK,UAAU,4DACE,KAAK,UAAU,KAAK,kCACxB,KAAK;;AAGzB,4BAAkB;AAClB,eAAK,gBAAgB,QAAQ;AACzB,gBAAI,aAAa,QAAQ,WAAW,MAAM,QACtC,WAAW,OAAO;AAClB,4BAAc;;;AAGtB,cAAI;AACA,oBAAQ,KAAK,kCACL,KAAK,UAAU,wDACe,KAAK,SACpC,KAAK,UAAU,KAAK;;;;UAgBnC;AACA,YAAI,KAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC1D,gBAAM,IAAI,eAAe,aAAa,KAAK;;AAG/C,gCAAwB;AACxB,2BAAmB,KAAK;AACpB,8BAAoB,KAAK,UAAU,KAAK;AACxC,cAAI,gBAAgB,QAAQ,iBAAiB;AACzC,4BAAgB,KAAK;;;AAG7B,YAAI,gBAAgB,WAAW;AAC3B,+BAAqB,KAAK,aAAa,GAAG;AAC1C,cAAI,MAAM,QAAQ,iBAAiB,MAAM,QAAQ,aAAa,OAC1D,aAAa,WAAW;AACxB,mBAAO,aAAa;;AAGpB,mBAAO;;;AAIX,gBAAM,IAAI,eAAe,aAAa,KAAK;;;MAgBnD;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa,sCAAsC,KAAK;;AAItE,eAAO,qBAAoC,KAAK;;MAapD;AACI,aAAK,QAAQ;;MAUjB,2BAA2B;AACvB,eAAO,cAAc,gBAAgB,KAAK,mBAAmB,KAAK;;MActE;AACI,aAAK;AACD,yBAAe,KAAK;AACpB,cAAI,OAAO,WAAW,QAAQ;AAK1B,kBAAM,IAAI,WAAW,4CAA4C,KAAK,sCAClC,QAAQ,uCACT,OAAO,qCACjB;;AAE7B,cAAI,OAAO,WAAW;AAClB;;AAEJ,oCAA0B;AAC1B,8BAAoB,cAAc;AAClC,uBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,uBAAW,YAAY;AACvB,uBAAU,OAAO;AACjB,sBAAU,QAAQ;AAClB,gBAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,oBAAM,IAAI,WAAW,sBAAsB,GAAG,mDACG,EAAE;;AAEvD,8BAAkB,KAAK,CAAC,IAAG;;AAE/B,wBAAc;;;MAkBtB;AAEI,YAAI,KAAK,kBAAkB,QAAQ,UAAU;AACzC,gBAAM,IAAI,WAAW,yBAAyB,kBAAkB,KAAK;;AAEzE,aAAK,kBAAkB,KAAK;AAC5B,YAAI,SAAS;AACT,kBAAQ;;AAEZ,YAAI,KAAK;AACL,wBAAc,eAAe;;AAEjC,0BAAkB,YAAY,MAAM,OAAO;AAC3C,uBAAe,IAAI,cAAc,WAAW,OAAO,MAAM,WAAW;AACpE,kBAAU;AAEV,YAAI,eAAe;AACf,eAAK,QAAQ,MAAM,YAAY,MAAM,OAAO;;AAEhD,YAAI,aAAa;AACb,sBAAY;;AAEhB,YAAI;AACA,eAAK,kBAAkB,KAAK;;AAG5B,eAAK,qBAAqB,KAAK;;AAEnC,eAAO;;MAYX;AACI,aAAK,4BAA4B;;MAUrC;AACI,YAAI,WAAU,QAAQ,MAAM,QAAQ,YAAW,QAAO,WAAW;AAC7D;;AAGJ,kBAAS,OAAqB;AAC9B,YAAI,KAAK,YAAY,UAAa,KAAK,YAAY;AAC/C,eAAK,OAAO,KAAK,GAAG;;;MAc5B;AACI,eAAO;;MAWX;AACI,YAAI,CAAC,KAAK;AACN,cAAI,QAAQ;AACR,gBAAI,MAAM,QAAQ;AACd,mBAAK,QAAQ;AACT,oBAAI,eAAe;AACf,wBAAM,IAAI,UAAU,SAAS,KAAK;;;;AAM1C,oBAAM,IAAI,UAAU,SAAS,KAAK;;;AAK1C,iBAAO;;AAIX,eAAO;;MAcX,yGAAyG;AACrG,gCAAwB,OAAqB;AAC7C,wBAAgB,OAAqB;AACrC,qBAAa,OAAqB;AAClC,sBAAc,OAAqB;AACnC,sBAAc,mBAA+B;AAC7C,uBAAe,mBAA+B;AAE9C,8BAAsB;AACtB,4BAAoB;AACpB,8BAAsB;AACtB,wBAAgB;AAKZ,wBAAc,KAAK,EAAE;AACrB,sBAAY,KAAK,EAAE;AACnB,wBAAc,KAAK,EAAE;;AAKzB,YAAI,KAAK;UACL,eAAe;UACf;UACA;UACA;UACA,cAAc;UACd;UACA;UACA;UACA;UACA;WACD;AAEH,qBAAa,GAAG,IAAI,cAAc,QAAQ;AAEtC,wBAAc,GAAG,cAAc;AAC/B,wBAAc,GAAG,YAAY,KAAK,aAAa,SAAS;AACxD,wBAAc,GAAG,cAAc;;;MAwBvC;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,WAAW,KAAK;AAClD,YAAI,KAAK,mBAAmB;AACxB,kBAAO,qBAAqB,KAAK;;AAErC,YAAI,KAAK,SAAS;AACd,kBAAO,WAAW,KAAK;;AAE3B,eAAO;;MAOX;AACI,aAAK,QAAQ,QAAQ,YAAU,OAAO;AACtC,eAAO,KAAK,QAAQ;;MAExB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,UAAU,KAAK;;;MAiCvC;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,aAAK;AACL,mCAA2B;AAC3B,YAAI,EAAE,KAAK,cAAc;AACrB,iCAAuB,KAAK;;AAEhC,eAAO,CAAE,sBAAsB,KAAK,WAAW;;;AAavD;AACI,qBACI,OAAqB;AACzB,qBAAe;AACf,sBAAgB;AACZ,eAAO,KAAK,EAAE;;AAElB,aAAO,iBAA+B;;AAW1C;AACI,aAAO;;AAaJ;AACH,UAAI,SAAS,QAAS,aAAa,QAAQ,YAAY;AACnD,gBAAQ,QAAO;AACf,oBAAY,QAAO;;AAEvB,UAAI,MAAM,aAAa,WAAW;AAC9B,eAAO,CAAC;;AAGR,qBAAa,MAAM,aAAa;AAChC,YAAI,KAAK,cAAc,WAAW;AAC9B,iBAAO,KAAK;;AAGZ,gCAAsB;AACtB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,oCAAwB,gBAAgB,GAAG,QAAO;AAElD,6BAAgB;AACZ,kBAAI,cAAc,QAAQ,QAAO;AAC7B,8BAAc,KAAK;;;;AAI/B,iBAAO;;;;ACztCnB;;;;;;;;;6BAagC;MAC5B;AACI,cAAM;UACF,OAAO,KAAK;UACZ,MAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,OAAO,SAAS;;AAG1D,YAAI,KAAK,aAAa;AAClB,eAAK,YAAY;;AAErB,YAAI,KAAK,UAAU;AACf,eAAK,SAAS;;AAElB,aAAK,YAAY;AACjB,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK;AACnB,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AACnD,gBAAM,IAAI,WAAW;;AAGzB,8BAAsB,KAAK;AAC3B,YAAI,mBAAmB;AACnB,cAAI,KAAK,cAAc;AACnB,kBAAM,IAAI,WAAW;;AAIrB,8BAAkB,CAAC,KAAK,WAAW,OAAO,KAAK;;;AAKnD,cAAI,KAAK,aAAa;AAClB,kBAAM,IAAI,WAAW;;;AAI7B,sBAAc,KAAK,SAAS;AAC5B,aAAK,kBAAkB;AACvB,aAAK,QAAQ;AAEb,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,4BAAoB,IAAI,eAAe,KAAK,OAAO,KAAK,iBAAiB,MAAM,IAAI,IAAI,KAAK;AAC5F,oBAAY,YAAY;AACxB,oBAAY,cAAc;AAI1B,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,CAAC;UACf,eAAe,CAAC;UAChB,YAAY,CAAC;UACb,aAAa,CAAC;UACd,aAAa,CAAC;UACd,cAAc,CAAC;;;MAGvB;AACI,cAAM,IAAI,WAAW,6EACgC,KAAK;;MAE9D;AAEI,eAAO,CAAE,sBAAsB,KAAK,WAAW,sBAAsB;;MAEzE;AACI,eAAO;UACH,iBAAiB,KAAK;UACtB,OAAO,KAAK;UACZ,QAAQ,KAAK;UACb,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AACrB;AACH,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAC7C,cAAM,IAAI,MAAM;;AAKpB,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAE7C,cAAM,IAAI,WAAW;;AAGzB,uBAAiB,QAAO;AACxB,UAAI,QAAO,SAAS,QAAQ,cAAc;AACtC,qBAAa,CAAC,MAAM,OAAO,QAAO;;AAEtC,kBAAY,QAAO;AACnB,UAAI,SAAS;AACT,gBAAQ;;AAEZ,0BAAmB,IAAI,WAAW;QAC9B,iBAAiB;QACjB,MAAM,QAAO;QACb;QACA,QAAQ,QAAO;;AAEnB,sBAAgB,YAAW,aAAa,GAAG;AAC3C,aAAO,QAAQ;;ACzHnB;;;;;;;;;AAeO;AACH,UAAI,QAAQ;AACR;;AAEJ,uBAAiB;AACjB,mBAAa;AACb,+BAAyB;AACzB,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,8BAAoB;AACpB,mBAAS,KAAK,YAAY;AAC1B,eAAK,KAAK;AACV,2BAAiB,KAAK;;;AAG9B,UAAI,SAAS,SAAS;AAClB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,eAAK,KAAK,MAAM,OAAO,GAAG;;AAG9B,gBAAQ;;;AAST;AACH,UAAI,QAAQ;AACR;;AAEJ,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,gBAAM;;;;ACrDlB;;;;;;;;;AAeO;AACN,IAAA;AACG,6BAAsB,uBAAsB,YAAY,KAAK;AAC7D,6BAAsB,uBAAsB,aAAa,KAAK;OAC/D,yBAA0B,yBAAwB;AAE9C,mCAA+B;;MAoBlC;AAEI,aAAK,iBAAiB;;MAE1B;AACI,aAAK,SAAS;;YAEZ;;YACA;;YACA;;YACA;;YACA;;YACA;;MAQN;;;;MAmBA,sCAAqC;AAGjC,YAAI,cAAa;AACb,uBAAY;;AAEhB,aAAK,YAAY;AACjB,aAAK,cAAc;;MAEvB;AACI,aAAK,UAAU,KAAK;;MAExB;AACI,+BAAuB,KAAK;AACxB,mBAAS,UAAU;;;MAG3B;AACI,+BAAuB,KAAK;AACxB,mBAAS,SAAS;;;YAQpB;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAQnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAOnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa;;;YAO9B;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW;;;;6BASN;MAC5B;AACI;;YAEE;AACF,aAAK,OAAO;AACZ,aAAK,SAAS;;YAEZ;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,0BAAkB,KAAK,WAAW,OAAO,IAAI,KAAK;AAClD,aAAK,QAAQ;AACb,0BAAkB;AACd,wBAAc,KAAK;AACnB,cAAI,OAAO,UAAU;AACjB,gBAAI,CAAC,KAAK,OAAO,eAAe;AAC5B,mBAAK,OAAO,OAAO;;AAEvB,iBAAK,OAAO,OAAO,KAAK,OAAO,OAAO,QAAQ;;AAG9C;AACA,gBAAI,OAAO,KAAK;AACZ,mCAAqB,KAAK,OAAO;;AAGjC,mBAAK,OAAO,OAAO;;AAEvB,0BAAc,KAAK,MAAM,MAAK,KAAK,OAAO,MAAO,KAAI,OAAO;AAC5D,iBAAK,OAAO,OAAO;AACnB,gBAAI,sBAAsB;AACtB,iCAAmB;;;;;YAK7B;AACF,YAAI,QAAQ;AACR,4BAAkB,KAAK,OAAO;AAC1B,gBAAI,KAAK,OAAO,QAAQ;AACpB;;AAEJ,gBAAI,OAAO,KAAK,OAAO,SAAS;AAC5B,mBAAK,OAAO,KAAK,OAAO,OAAO,KAAK;;AAGpC,mBAAK;AACD,6BAAY,KAAI,IAAI,GAAG,KAAK,OAAO,KAAK,OAAO;AAC/C,qBAAK,OAAO;AACZ,qBAAK,OAAO,KAAK;AACjB,qBAAK,KAAK;;;;;;;0BAYL;YACnB;AACF,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,MAAM,KAAK;AAChB,0BAAkB;AACd,cAAI,KAAK,QAAQ,QAAQ;AACrB,iBAAK,QAAQ,OAAO;;AAExB,eAAK,QAAQ,KAAK,KAAK,KAAK;;;YAM9B;AACF,yBAAiB;AACjB,qBAAa;AACb,wBAAgB;AAChB,0BAAkB,KAAK;AACnB,6BAAmB,KAAK,QAAQ;AAChC,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,gBAAI,OAAO,WAAW,OAAO;AACzB,kCAAoB,WAAW;AAC/B,uBAAS,KAAK,YAAY;AAC1B,mBAAK,KAAK;AACV,sBAAQ,KAAK;;;;AAIzB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,kCAAwB,KAAK,QAAQ,KAAK,IAAI,QAAQ;AACtD,0BAAgB;AAChB,eAAK,QAAQ,KAAK,IAAI,QAAQ,MAAM,OAAO,GAAG;;;;iCAOtB;MAChC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,aAAa,cAAc;AAChC,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;;AAEtB,YAAI,KAAK,eAAe,WAAW,KAAK,WAAW;AAC/C,gBAAM,IAAI,MAAM;;AAGpB,YAAI,UAAc,KAAK;AAGnB,eAAK,YAAY,SAAuB,KAAK,UAAU,KAAK,OAAO,KAAK;;AAE5E,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,QAAQ,KAAK;;YAEhB;AACF,mBAAW;AACX,YAAI,KAAK,SAAS;AACd,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,MAAM,OAAO,OAAO;;AAErC,WAAG,KAAK;AACR,cAAM,QAAQ,IAAI;;YAEhB;AACF,aAAK,eAAe;AACpB,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;;AAEZ,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;mBAEH,UAAc,KAAK;AACxB,aAAG,KAAK,KAAK,UAAU,KAAK,cAAc,OAAO;;AAErD,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW;;;YAGxB;AACF,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,SAAS;;;;AAOzB;AACH,UAAI,cAAa;AACb,qBAAY;;AAEhB,UAAI,sBAAqB;AACrB,eAAO,CAAC;;AAEZ,UAAI,MAAM,QAAQ,eAAc,WAAU,cAAc;AACpD,eAAO;;AAGX,8BAAwB,OAAqB;AAC7C,aAAO,gBAAgB,IAAI,oBAAkB,IAAI,eAAe,gBAAgB;;;MAUhF;;aAaO;AACH,gBAAY,kBAAkB,KAAK,OAAO,UAAU,iBAAiB,MAAM,8DAC5D;AACf,oCAA4B,kBAAkB;AAC9C,YAAI,4BAA4B,aAAa,mBAAmB;AAC5D,sCAA4B,aAAa,kBAAkB;;AAE/D,oCAA4B,aAAa,gBAAgB,KAAK;;aAE3D;AACH,gCAAwB,4BAA4B;AAChD,+BAAqB,4BAA4B,aAAa,CAAC;AAC/D,uBAAa,QAAQ;AACjB,gBAAI,SAAS;AACT,oBAAM,IAAI,WAAW;;;;;aAQ9B;AACH,oCAA4B,eAAe;;aAUxC;AACH,6BAAqB;AACrB,gCAAwB,4BAA4B;AAChD,wBAAc,CAAC;AACf,cAAI,kBAAkB;AAClB,yBAAa,KAAK,GAAG,4BAA4B,aAAa;;;AAGtE,eAAO,aAAa,IAAI,UAAQ,IAAI;;;AAG5C,gCAA4B,eAAe;AACpC;AACH,sBAAgB,IAAI;AACpB,8BAAwB;QACpB,IAAI;QAAc,GAAG,4BAA4B,gBAAgB;;AAErE,UAAI,cAAa;AACb,wBAAgB,KAAK,GAAG;;AAE5B,sBAAgB,KAAK;AACrB,2BAAqB,IAAI,aAAa;AAItC,mBAAa,UAAU;QACnB;QACA;QACA,SAAS;QACT,OAAO;QACP;QACA;QACA;QACA,SAAS;;AAEb,aAAO,CAAE,cAAc;;ACre3B;;;;;;;;;AAuBO,kDAA6C,qBAAqB;AACrE,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe,SAAS;;ACxBxH;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,UAAU;AACZ,cAAI,EAAE,OAAO;;AAEjB,0BAAkB,MAAQ,SAAS,IAAI,MAAM;AAC7C,8BAAsB,MAAS,UAAU,OAAO;AAChD,sBAAa,KAAS,QAAY,WAAW;AAC7C,eAAO,IAAQ,GAAG;;;AAGnB;AACH,aAAO,KAAK,MAAM,KAAS,SAAS,IAAQ,OAAO,SAAS;;AAEzD;AACH,aAAO,KAAK,MAAM,KAAS,IAAQ,IAAQ,OAAO,SAAS;;AAExD;AACH,aAAO,KAAK;AACR,qBAAa,IAAQ,OAAO;AAC5B,4BAAoB,YAAgB,IAAQ,QAAQ,WAAW,OAAO;AACtE,0BAAkB,IAAQ,IAAQ,MAAM;AACxC,eAAO,KAAQ,KAAK,KAAS,WAAW;;;AAGzC;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,yBAAiB,MAAQ,MAAQ,GAAG;AACpC,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,0BAAkB,MAAQ,MAAQ,GAAG;AACrC,eAAO,KAAS,SAAS,IAAQ,UAAU,aAAa;;;AAGzD;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,SAAS,YAAY;;;AAGtC;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,WAAW;;;AAG5B;AACH,aAAO,KAAK;AACR,oBAAY,MAAQ,KAAQ,OAAO,QAAQ;AAC3C,qBAAY,KAAQ,KAAQ,IAAQ,GAAG,QAAQ,QAAQ;AACvD,eAAO,QAAY,GAAG,MAAQ,GAAG,IAAQ,MAAK;;;AAW/C;AACH,aAAO,KAAK;AACR,uBAAa,KAAK,IAAI;AACtB,+BAAuB,IAAQ,OAAO;AACtC,8BAAsB,IAAQ,MAAQ,gBAAgB,SAAa,KAAQ,IAAI,mBAAmB;AAClG,eAAO,KAAS,eAAe;;;AAGhC,kEAA8D;AACjE,aAAO,KAAK;AACR,YAAI;AACA,mBAAS,SAAY;;AAIrB,4BAAkB,MAAQ,QAAQ,OAAO,MAAM,SAAS,GAAG;AAC3D,mBAAS,IAAQ,QAAQ;;AAE7B,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,eAAO,IAAQ,MAAQ,KAAQ,OAAO,WAAW,MAAQ,UAAU,OAAO,MAAM,SAAS;;;AAY1F,wEAAoE;AACvE,aAAO,KAAK;AACR,2BAAmB,MAAU,UAAU,SAAS;AAChD,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,4BAAoB,OAAO;AAC3B,6BAAqB,QAAW,YAAY,YAAY,YAAY,SAAS,IACxE,QAAQ;AACb,eAAO,wBAAwB,cAAc,QAAQ;;;AAwBtD;AACH,UAAI,CAAC,aAAiB,OAAO,OAAO,OAAO;AACvC,cAAM,IAAI,WAAW,8DACd,KAAK,UAAU,OAAO,cAAc,KAAK,UAAU,OAAO;;AAErE,aAAO,KAAK;AAOR,2BAAmB,OAAO;AAC1B,6BAAqB,OAAO,MAAM;AAClC,eAAO,WAAW,IAAI,OAAO,IAAI,SAAS,IAAI,aAAa,MAAM;;;AAGlE;AACH,aAAO,KAAK;AACR;AACA,YAAI,YAAgB,OAAO,WAAW,IAAI;AAC1C,YAAI,MAAQ,IAAQ,GAAG,IAAQ,GAAG;AAClC,eAAO,KAAS,8BAA8B,OAAO,IAAI;;;AAG1D;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW;AACtD,4BAAoB,YAAgB,OAAO,WAAW;AACtD,eAAO,MAAQ,KAAQ,OAAO,MAAQ,IAAQ,aAAa,gBAAgB;;;AAG5E;AACH,aAAO,KAAK;AACR,wBAAgB,MAAQ,MAAQ,WAAW;AAC3C,eAAO,KAAS,IAAQ,OAAO,KAAQ,OAAO,WAAW;;;AAG1D;AACH,aAAO,KAAK;AACR,+BAAuB,YAAY,OAAO;AAC1C,+BAAuB,YAAY,OAAO;AAC1C,0BAAkB,KAAQ,gBAAgB;AAC1C,eAAO,IAAQ,MAAQ,WAAW;;;AAGnC,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,gBAAY;AACZ,gBAAY;AACZ,mBAAe;AAEf,sBAAkB;MACzB,kBAAI;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAIG;AACH,UAAI,OAAO,mBAAmB;AAC1B,YAAI,kBAAkB;AAClB,iBAAO,UAAU;;AAErB,qBAAa,gBAAgB;AAC7B,YAAI,eAAe,cAAc,SAAS;AACtC,mBAAS,gBAAgB;;AAI7B,cAAM,IAAI,WAAW;;AAGrB,eAAO;;;ACzOf;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,2BAAkB,KAAQ,KAAI,UAAa;AAC3C,iCAAyB,OAAO,QAAY,OAAO,aAAY,MAAM;AACrE,eAAO,KAAS,MAAU,OAAO,mBAAmB;;;AAGrD;AACH,aAAO,KAAK,MAAM,OAAO,MAAU,OAAW,OAAO,KAAK,OAAW,OAAO,MAAM;;AAEtF;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGlE;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,mBAAuB,OAAO;;AAElC;AACH,UAAI,MAAM,SAAS,MAAM;AACrB,gBAAQ,MAAM,QAAQ,CAAC,MAAM,OAAO;;AAExC,cAAQ,MAAM,OAAO;AACrB,UAAI,MAAM,UAAU,MAAM;AACtB,gBAAQ,MAAM,OAAO,MAAM;;AAE/B,aAAO,MAAU,OAAO,OAAO,OAAO;;AAEnC;AACH,YAAM,IAAI;;AAEP;AACH,YAAM,IAAI;;AAGP,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa;AACb,sCAAgC;AAChC,qBAAe;AACf,4CAAsC;AAEtC,uBAAmB;MACtB;MACA;MACA;MACJ,yBAAI;MACJ,+BAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,MAAI;MACJ,MAAI;MACJ,QAAI;;AAEG;AACH,UAAI,OAAO,eAAe,YAAY,cAAc;AAChD,eAAO,WAAW;iBAEb,OAAO,eAAe,YAAY,cAAc;AACrD,eAAO;;AAGP,cAAM,IAAI,WAAW,kBAAkB;;;AAoBxC;AACH,eAAY,OAAO,MAAM,0BAA0B;AACnD,UAAI,OAAO,OAAO;AACd,eAAO;;AAGP;AACA,0BAAkB,OAAO,KAAK;AAC1B,cAAI,UAAU,SAAS;AACnB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,0BAAkB,OAAO,KAAK;AAC1B,cAAI,WAAW,SAAS;AACpB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,eAAO,GAAG;;;AChKlB;;;;;;;;;AAkBO;AACH,2BAAqB;QACjB,SAAW,MAAM,MAAM,QAAQ;QAC/B,UAAY,MAAM,MAAM,SAAS,GAAG,MAAM;QAC1C,MAAQ,MAAM,MAAM,KAAK,MAAO,KAAK,OAAO;QAC5C,QAAU,MAAM,MAAM,OAAO,MAAO,KAAK,OAAO,WAAW;QAC3D,SAAW,MAAM,MAAM,QAAQ,MAAO,KAAK,GAAG;QAC9C,KAAO,MAAM,MAAM,IAAI;;AAE3B,mBAAa,aAAa,aAAa;AACvC,mBAAa,cAAc,aAAa;AACxC,mBAAa,UAAU,aAAa;AACpC,mBAAa,YAAY,aAAa;AACtC,mBAAa,aAAa,aAAa;AACvC,mBAAa,SAAS,aAAa;AACnC,UAAI,cAAc;AACd,eAAO,aAAa;;AAExB,YAAM,IAAI,WAAW,qBAAqB;;ACpC9C;;;;;;;;;AAaO,wDAAoD,IAAI,OAAO;AAa/D,kFAA8E;AACjF,UAAI,uBAAuB,QACvB,OAAO,wBAAwB,YAC/B,OAAO,eAAe,yBAAyB,OAAO,aACtD,CAAC,iBAAiB;AAClB,cAAM,IAAI,MAAM;;AAEpB,UAAI;AACA,oBAAY,KAAK,UAAU;AAC3B,YAAI,IAAI,SAAS;AACb,kBAAQ,KAAK,mCAAmC,2CAC5B,IAAI,qJAGjB;;;;AAeZ;AACH,UAAI,MAAM;AAEN,eAAO;iBAEF,OAAO,MAAM;AAClB,YAAI,OAAO,eAAe,OAAO,OAAO;AAEpC,uBAAa,OAAO,KAAK;AACzB,4BAAkB;AACd,gBAAI,OAAO,QAAQ;AAEf,qBAAO;;AAEX,gBAAI,CAAC,iBAAiB,EAAE;AACpB,qBAAO;;;AAGf,iBAAO;;AAIP,cAAI,MAAM,QAAQ;AAEd,+BAAmB;AACf,kBAAI,CAAC,iBAAiB;AAClB,uBAAO;;;AAGf,mBAAO;;AAMP,mBAAO;;;;AAMf,sBAAc,OAAO;AACrB,eAAO,UAAU,YAAY,UAAU,YAAY,UAAU;;;ACjGrE;;;;;;;;;AAyBO,mEAEG,QAAQ;AACd,6BAAuB,sBAAsB;AAE7C,wBAAkB,CAAC,gBAAgB,gBAAgB;AACnD,UAAI;AACA,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM;;AAGtC,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM,MAAM;;AAGhD,UAAI,UAAU,UAAU,SAAS,MAAM;AAEnC,oBAAY,UAAU,IAAI,QAAK,KAAK,MAAM,aAAa;;AAE3D;AACA,UAAI,CAAC;AACD,kBAAU,KAAK;AACf,wBAAgB;AAChB,4BAAoB,OAAM;AACtB,wBAAc,KAAK,GAAG,OAAM,aAAa;;;AAGjD,cAAQ,IAAI,OAAO;AACnB,eAAS,WAAW,WAAW;AAC/B,cAAQ,IAAI,OAAO;AACnB,qBAAe,OAAM;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI;AACA,4BAAkB,OAAO,IAAI,WAAW;;AAGxC,2CAAiC,OAAO,IAAI,WAAW,eAAe;;AAE1E,gBAAS,OAAM,OAAO,SAAS,IAAI,MAAM,KAAK,OAAO;;AAGzD,aAAM;AACN,6BAAuB,qBAAqB;AAC5C,gCAA0B,qBAAqB,OAAM;AACrD,cAAQ,iBAAiB,iBAAiB;AAC1C,cAAQ,qBAAqB;AAC7B,cAAQ,yBAAyB;AACjC,cAAQ,IAAI,OAAO;;AAEvB;AACI;AAEA,UAAI,OAAM,6BAA6B;AACnC,yBACI,qBAAqB,OAAM;;AAG/B,yBAAiB,qBAAqB,OAAM;;AAGhD,aAAO;;AAEX;AACI,2BAAqB;AACrB,2BAAqB;AACrB,oBAAc;AACd,0BAAoB,OAAM;AACtB,qBAAa,KAAK,OAAM,aAAa;;AAEzC,+BAAyB;AACrB,YAAI,WAAW,SAAS,KACpB,WAAW,WAAW,KAAK,WAAW,GAAG,cAAc,SAAS;AAChE,2BAAiB;AACjB;;AAEJ,cAAM,KAAK,GAAG;;AAElB,UAAI;AAEA,4BAAoB,OAAM;AACtB,qBAAW;AACX,6BAAmB,MAAM;AACrB,gBAAI,MAAM,QAAQ,UAAU;AACxB,kBAAI;AACA,iCAAiB;AACjB;;AAGA,uBAAO;;;;AAInB,cAAI,CAAC;AACD;;;;AAIZ,aAAO;;AAEX,mDAEU,QAAQ;AACd,iBAAW;AACX,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,IAAI;AACJ,iBAAO,KAAK,MAAM,GAAG,KAAK,SAAS,KAAK;;AAE5C,gBAAQ,OAAO;AACf,eAAO,KAAK,MAAM,GAAG,UAAU;AAC/B,gBAAQ,IAAI,OAAO,UAAU,KAAK,KAAK;;AAE3C,cAAQ;;AAOZ;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,qBAAe,CAAC,GAAG,SAAS,cAAc,aAAa,MAAM,cAAc;AAC3E,eAAS,QAAQ,WAAW;;AAKhC;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,0BAAoB;AACpB,yBAAmB,MAAM;AACrB,YAAI,iBAAiB,QAAQ,cAAc,SAAS,KAChD,cAAc,QAAQ,UAAU;AAChC;;AAEJ,qBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,+BAAqB,KAAK,cAAc,GAAG;AAC3C,oCAA0B,KAAK,YAAY;AAC3C,qCAA2B,KAAK,cAAc;AAC9C,sBAAY,KAAK,GAAG,gBAAgB,sBAAsB;;;AAGlE,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,8BAAwB,YAAY,WAAW,IAAI,KAAK,YAAY;AACpE,qBAAe;QACX,GAAG,SAAS;QAAc;QAAa,MAAM,cAAc;QAC3D;;AAEJ,eAAS,QAAQ,WAAW;AAC5B,mBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,iBAAS,CAAC,IAAI,IAAI,IAAI,YAAY,KAAK,WAAW;;;ACjM1D;;;;;;;;;AAoBA;AACI,aAAQ,SAAQ,kBAAkB,QAAQ,kBACtC,QAAQ,kBACR,WAAU,KAAK,OAAO,UAAU;;AAQjC;AACH,UAAI,mBAAmB;AACnB,eAAO;iBAEF,OAAO,mBAAmB;AAC/B,eAAO,YAA0B;iBAE3B,OAAO,mBAAmB,YAC/B,OAAO,mBAAmB;AAC3B,eAAO;iBAEF,0BAA0B;AAC/B,wBAAgB;AAChB,4BAAoB,eAAe;AACnC,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,eAAe;AAC5B,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,kCAA0B,OAAO,KAAK;AAClC,gCAAsB,eAAe;AACrC,cAAI,gBAAgB,UAAU,OAAO,kBAAkB;AAInD,mBAAO,eAAe;;AAGtB,0BAAc,YAA0B;AACxC,mBAAO,SAAS,oBAAoB,eAAe;;;AAG3D,eAAO;;;AASR;AACH,UAAI,aAAa,QAAQ,aAAa;AAClC,eAAO;iBAEF,OAAO,aAAa;AACzB,eAAO,YAA0B;iBAE3B,OAAO,aAAa,YAAc,OAAO,aAAa;AAC5D,eAAO;iBAEF,oBAAoB;AACzB,wBAAgB;AAChB,4BAAoB,SAAS;AAC7B,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,SAAS;AACtB,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,4BAAoB,OAAO,KAAK;AAC5B,0BAAgB,SAAS;AACzB,wBAAc,YAA0B;AACxC,cAAK,WAAU,UAAU,UAAU,gBAC/B,OAAO,YAAY;AAInB,mBAAO,SAAS;;AAGhB,mBAAO,SAAS,oBAAoB,SAAS;;;AAGrD,eAAO;;;ACxHf;AAEK,sBAAW;ACFhB;;;;;;;;;AAoBA;AAEI,UAAI,IAAI,SAAS,QAAQ,IAAI,UAAU,IAAI;AAEvC,eAAO;;AAEX;AAEI,eAAO,MAAK,KAAK,IAAI;;AAIrB,cAAM,IAAI,WAAW,0BAA0B,IAAI,mDAChC,IAAI,UAAU,IAAI;;;;MAazC;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,YAAI,iBAAiB;AACjB,2BAAiB,MAAM;AACnB,iBAAK,SAAS,MAAM,MAAM,SAAS;AACnC,gBAAI,MAAM,MAAM;AACZ,mBAAK,QAAQ,MAAM,MAAM,QAAQ;;;;AAKzC,cAAI,SAAS;AACT;;AAEJ,6BAAmB;AACf,iBAAK,IAAI,KAAK,KAAK,KAAK;;;;MAcpC;AACI,YAAI,KAAK,SAAS,IAAI,OAAO;AACzB,eAAK,SAAS,IAAI,MAAM,wBAAwB,KAAK;AACrD,eAAK,QAAQ,IAAI,QAAQ,IAAI;AAC7B,cAAI,QAAQ;AACR,iBAAK,QAAQ,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,uBAAuB,IAAI,YAAY,IAAI;;AAEpE,eAAO;;MAOX;AACI,aAAK,IAAI,KAAK,KAAK,KAAK;;MAM5B;AACI,eAAO,KAAK,SAAS,IAAI,OAAO;;MAKpC;AACI,eAAO,OAAO,KAAK,KAAK;;MAS5B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,SAAS,IAAI;;;AAI7B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,SAAS;;;MAU7B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,QAAQ,IAAI;;;AAI5B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,QAAQ;;;MAI5B;AACI,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;;;AAMzB,yBAAqB;AAErB,kCAA8B;AAsBvB;AACH,uBAAiB,UAAU,OAAO,QAAQ,OAAO;AACjD,2BAAqB,MAAM,QAAQ;AACnC,yBAAmB,eAAe,UAAU,CAAC;AAC7C,0BAAoB,WAAW,IAAI,OAAK,EAAE;AAC1C,2BAAqB;AACrB,wBAAkB,SAAS;AAC3B,+BAAyB;AACrB,YAAI,UAAU,QAAQ,gBAAgB;AAClC,uBAAa,KAAK,SAAS,SAAS;;AAGpC,uBAAa,KAAK;;;AAG1B,UAAI,SAAS;AAET,cAAM,gBAAgB;AACtB,cAAM,gBAAgB;;AAG1B,8BAAwB,YAAY,KAAK,OAAO,MAAM,SAAS,QAAQ,KAAK;AAC5E;AACA;AACA,UAAI,aAAa,oBAAoB;AAGjC,oBAAY,qCAAqC,YAAY;AAC7D,iBAAS,IAAI;AACb,0BAAkB,IAAI;AAEtB,qBAAa,mBAAmB;AAChC,8BAAsB,mBAAmB;;AAE7C,eAAS,aAAa;AACtB,wBAAkB;AAClB,UAAI,CAAC;AACD,eAAO,OAAO,iBAAiB,sBAAsB;;AAEzD,+BAAyB,IAAI,SAAS;AAEtC,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,SAAS;AAET,6BAAmB,SAAS;AAC5B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;AAE1B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;;AAG9B,yBAAiB,OAAO;AACxB,yBAAiB,SAAS;AAC1B,YAAI,oBAAoB;AACpB;;AAEJ,4BAAoB;AACpB,2BAAmB;AACnB,iCAAyB;AACzB,yBAAiB;AACjB,6BAAoB,SAAS;AACzB,wBAAc,iBAAiB,SAAS;AACxC,uBAAa,iBAAiB,QAAQ;AACtC,sBAAY,KAAK;AACjB,qBAAW,KAAK;AAChB,cAAI,QAAQ;AACR,yBAAa;;AAEjB,cAAI,CAAC;AACD,4BAAgB,OAAM;AACtB,gBAAI,gBAAgB,OAAM,UAAU,KAAK,CAAC,SAAS,OAAO,WACtD,YAAY,QAAQ,OAAM,UAAU,MAAM,CAAC,MAAM,cACjD,OAAM,YAAY,aAAa;AAC/B,+BAAiB,KAAK;;;;AAIlC,YAAI;AACA,mBAAS,UAAU;AACnB,iBAAO,UAAU,WAAW;;AAEhC,8BAAsB,OAAO,SAAS,MAAM,aAAa;AACzD,yBAAiB;AACjB,YAAI,SAAS;AACT,uBAAa,SAAS,YAAY,aAAa;;AAEnD,6BAAqB,eAAe;AACpC,sCAA8B,MAAM,QAAQ,gBAAgB,eAAe,CAAC;AAC5E,sBAAa,GAAG,KAAI,sBAAsB,QAAQ,EAAE;AAChD,cAAI,CAAC,iBAAiB,OAAO,sBAAsB;AAC/C,6BAAiB,IAAI,sBAAsB,KAAI,cAAc,KAAI,MAAM,QAAQ,cAAc,WAAW,KAAK;;AAEjH,yBAAc,YAAY,QAAQ,sBAAsB,IAAG;AAC3D,cAAI,WAAU;AACV,yBAAa,UAAS,cAAc;;;AAG5C,YAAI,CAAC;AAED,kBAAQ;;;AAQhB,uBAAiB;AACjB,aAAO,eAAe,eAAe,aAAa;;AAatD;AACI,cAAY,WAAW,QAAQ,QAAQ,SAAS,GAAG,MAAM;AACzD,wBAAkB;AAClB,8BAAwB;AACxB,UAAI,QAAQ,WAAW;AAEnB,oBAAY,gDAAgD,QAAQ,IAAI;AACxE,sBAAc,IAAI;AAClB,4BAAoB,IAAI;;AAGxB,wBAAgB,IAAI;AACpB,6BAAoB;AAChB,iBAAQ,QAAQ,gBAAiB,gDAAgD,QAAO;AAExF,uCAA6B;AACzB,gBAAI,CAAC,QAAQ,IAAI,eAAe;AAC5B,0BAAY,KAAK;AACjB,sBAAQ,IAAI,eAAe;;;AAInC,6BAAmB;AACf,gBAAI,kBAAkB,SAAS;AAC3B,gCAAkB,QAAQ,IAAI;;AAElC,yBAAa,MAAM,QAAQ,eAAa,kBAAkB,MAAM,IAAI;;;;AAIhF,aAAO;QACH,QAAQ;QACR,iBAAiB,oBAAoB;;;AAG7C;AACI,8BAAwB;AACxB,yBAAmB;AACf,wBAAgB,QAAQ,aAAa,MAAM;;AAE/C,aAAO;;AAaJ;AACH,sBAAgB,IAAI;AACpB,qBAAe;AACf,2BAAqB;AAIrB,wBAAkB,SAAS;AACvB,gBAAQ,IAAI;;AAEhB,qBAAc;AACd,oBAAc;AAEd,aAAM,KAAK;AACX,aAAO,OAAM,SAAS;AAClB,oBAAY,OAAM,OAAM,SAAS;AACjC,YAAI,QAAQ,IAAI,IAAI;AAChB,iBAAM;AACN;;AAEJ,4BAAoB,MAAM,MAAM,SAAS,OAAO,OAAM,SAAS;AAC/D,YAAI,IAAI,OAAO,WAAW,KAAK;AAE3B,iBAAM;AACN,iBAAO,KAAK;AACZ,kBAAQ,IAAI,IAAI;AAChB,cAAI;AACA,kBAAM;;;AAMV,gBAAM,KAAK,OAAM,SAAS;AAC1B,+BAAoB,IAAI;AAGpB,gBAAI,aAAa,OAAM,SAAS;AAC5B,2BAAa,OAAM,QAAQ,IAAI;;AAEnC,yBAAa,OAAM,MAAM,IAAI,IAAI;AACjC,gBAAI,QAAQ,IAAI,OAAM;AAClB;;AAEJ,mBAAM,KAAK;;;;AAIvB,aAAO,CAAE,QAAQ;;AAQrB;AACI;AACA,UAAI,OAAM,YAAY,aAAa,WAAW;AAC1C,uBAAe,OAAM,YAAY;;AAGjC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAM,YAAY,aAAa,QAAQ,EAAE;AACzD,qCAA2B,OAAM,YAAY,aAAa,GACrD;AACD,gBAAI,aAAa,OAAO,OAAM;AAC1B,0BAAY;AACZ;;;;AAIZ,uBAAe,OAAM,YAAY,YAAY;;AAEjD,aAAO;;ACzbX;;;;;;;;;4BA6B+B;MAC3B;AAEI,cAAM;AACN,aAAK,iBAAiB,IAAI;AAC1B,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,yBAAe,KAAK,eAAe;AACnC,eAAK,OAAO,OAAO;;AAEvB,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAGlB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,SAAS,KAAK,OAAO;;AAG1B,eAAK,SAAS,CAAC,KAAK;;AAExB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,UAAU,KAAK,QAAQ;;AAG5B,eAAK,UAAU,CAAC,KAAK;;AAGzB,YAAI,SAAqB,KAAK,QAAQ,WAAW,KAAK,OAAO;AACzD,gBAAM,IAAI,WAAW,mGAEd,KAAK,OAAO,IAAI,OAAK,EAAE;;AAGlC,YAAI,SAAqB,KAAK,SAAS,WAAW,KAAK,QAAQ;AAC3D,kBAAQ,KAAK,qGAEN,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAMnC,aAAK,cAAc;AACnB,aAAK,yBAAyB;AAC9B,aAAK,2BAA2B;AAKhC,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,4BAA4B;AAKjC,aAAK,SAAS;AAKd,aAAK,wBAAwB;AAa7B,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,wBAAwB,KAAK;AAClC,eAAK,0BAA0B,KAAK;;AAIxC,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AAKtB,mBAAqB,cAAc,GAAG;AACtC,mBAAqB,gBAAgB,GAAG;AACxC,eAAK,YAAY,KAAK;AACtB,eAAK,uBAAuB,KAAK;AACjC,eAAK,yBAAyB,KAAK;;AAGvC,aAAK,aAAa;AAClB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,iBAAiB;AACtB,aAAK,kBAAkB;AACvB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAE/B,cAAI,CAAE,kBAAiB;AACnB,kBAAM,IAAI,UAAU,8EACI,KAAK,iBAChB,0CACU,MAAM;;AAEjC,eAAK,WAAW,KAAK,MAAM;AAC3B,eAAK,gBAAgB,KAAK,MAAM;AAChC,eAAK,eAAe,KAAK,MAAM;;AAEnC,4BAAoB,KAAK;AACrB,eAAK,YAAY,KAAK,MAAM;;AAEhC,aAAK,sBAAsB,KAAK,OAAO,IAAI,OAAK,EAAE;AAClD,aAAK,uBAAuB,KAAK,QAAQ,IAAI,OAAK,EAAE;AAMpD,4BAAoB;AAEpB,6BAAqB;AACrB,6BAAqB;AAErB,+BAAuB;AACvB,6BAAqB;AACrB,uCAA+B;AAmB/B,gCAAwB;AACpB,cAAI,SAAS,QAAQ,aAAa,QAAQ,eAAe;AACrD,oBAAQ,QAAO;AACf,wBAAY,QAAO;AACnB,0BAAc,QAAO;;AAEzB,uBAAa,MAAM,aAAa;AAEhC,cAAI,iBAAgB,QAAQ,UAAU;AAClC,kBAAM,IAAI,aAAa,cAAc,QAAO,kBAAkB,MAAM;;AAIxE,cAAI,eAAc,QAAQ,UAAU;AAChC;;AAGJ,eAAK,eAAe,IAAI,UAAU,QAAQ,OAAO;AAEjD,cAAI,CAAE,OAAM,MAAM;AACd,yBAAa,MAAM,MAAM,OAAO,KAAK,cAAc;;AAEvD,cAAI,iBAAgB,QAAQ,UAAU;AAClC,6BAAgB,KAAK;;AAGzB,mCAAyB,KAAK,cAAc;AAC5C,uBAAa,GAAG,IAAI,kBAAkB;AAClC,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,iCAAoB,KAAK,cAAc;AACvC,4BAAgB,GAAG,gBAAe,kBAAiB,QAAO,YAAW;;AAEzE,yBAAc,KAAK;AACnB,iBAAO,iBAAgB,QAAQ,SAAS;AACpC,6BAAgB,OAAO,iBAAgB,QAAQ,OAAO;;AAE1D,iCAAuB,KAAK;;AAEhC,8BAAsB;AACtB,gCAAwB;AACxB,wBAAgB,KAAK;AACjB,0BAAgB,GAAG,eAAe;;AAEtC,+CAAuC,uBAAuB,QAAQ;AACtE,2BAAmB;AACf,uBAAa,KAAK,MAAM;AAExB,cAAI,CAAE,MAAK,MAAM;AACb,wBAAY,KAAK,MAAM;;AAE3B,sBAAY,YAAY,KAAK;AAE7B,gCAAuB,aAAa,KAAK,cAAc,OAAO,OAC1D,IACA,aAAa,KAAK,cAAc;AAMpC,kBAAQ,KAAK,IAAI,OAAO;AACxB,uBAAa,KAAK,cAAc,MAAM;AACtC,yBAAe,KAAK,cAAc,MAAM,KAAK;AAC7C,sBAAY,KAAK,MAAM;AAEvB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,iCAAqB,KAAK,cAAc;AACxC,8BAAkB,KAAK,YAAY;AACnC,gCAAoB,aAAa,aAAa;AAC9C,mCAAuB,YAAY,YAAY,OAAO,OAAO,IACzD,YAAY,YAAY;AAC5B,wBAAY,YAAY,MAAM,KAAK,IAAI,QAAQ,GAAG;AAClD,yBAAa,YAAY,MAAM;;;AAIvC,6BAAqB;AACrB,6BAAqB;AACjB,wBAAc,YAAY;AAC1B,cAAI,CAAE,UAAS;AACX,yBAAa,SAAS;;AAE1B,uBAAa,OAAO,KAAK,aAAa;;AAG1C,8BAAsB;AACtB,8BAAsB;AAClB,wBAAc,aAAa;AAC3B,cAAI,CAAE,UAAS;AACX,0BAAc,SAAS;;AAE3B,wBAAc,OAAO,KAAK,eAAe;;AAG7C,wBAAgB,OAAO,KAAK,eACvB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,aAAK,SAAS;AACd,4BAAoB;AAChB,iCAAuB,cAAc;AAGrC,yBAAe,KAAK;AAChB,2BAAe,aAAa,EAAE;AAC9B,2BAAe,aAAa,EAAE;AAC9B,gBAAI,SAAS;AACT,qBAAO;;AAEX,gBAAI,SAAS;AACT,qBAAO;;AAEX,mBAAO;;AAEX,8BAAoB;AAChB,gBAAI,iBAAiB;AACjB,mBAAK,sBAAsB,KAAK;;AAEpC,iBAAK,OAAO,KAAK;;;AAGzB,aAAK,gBAAgB;AAErB,oBAAY,OAAO,KAAK,cACnB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAIV,kCAA0B,KAAK,OAAO;AAEtC,wCAAgC;AAChC,4BAAoB;AAChB,6BAAmB,aAAa;AAC5B,0BAAc,KAAK;AACnB,gBAAI,SAAS;AACT,8BAAgB,KAAK;AACjB,oBAAI,kBAAkB,QAAQ,OAAO;AACjC,wBAAM,IAAI,aAAa,sDAAsD,eAC3D,MAAM,qEAEV;;;AAGtB,8BAAgB,KAAK;AACjB,kCAAkB,KAAK;;AAE3B,sCAAwB,KAAK,MAAM;;;;AAK/C,aAAK,eAAe;AAGpB,yBAAiB,KAAK,OAAO,IAAI,OAAK,EAAE;AACxC,2BAAmB;AACf,iCAAuB,SAAS,OAAO,OAAK,MAAM,MAAM;AACxD,cAAI,mBAAmB;AACnB,kBAAM,IAAI,aAAa,aAAa,iBAAiB,uFAEjD,KAAK,UAAU;;;AAO3B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAIpB,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,KAAK;UACnB,eAAe,KAAK;UACpB,YAAY,KAAK,OAAO,IAAI,OAAK;UACjC,aAAa,KAAK,QAAQ,IAAI,OAAK;UACnC,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;UACpC,cAAc,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAE1C,aAAK,QAAQ;AACb,aAAK,YAAY;;MAErB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,cAAc,KAAK;;;MA6B3C;AACI,aAAK;AACL,uBAAe,CAAE,sBAAsB,MAAM,sBAAsB;AACnE,YAAI,EAAE,KAAK,cAAc;AACrB,8BAAoB,KAAK;AACrB,mBAAO,wBAAwB,MAAM,UAAU;;AAInD,kCAAwB,KAAK;AACzB,mBAAO,wBAAwB,UAAU,UAAU;;;AAG3D,eAAO,uBAAuB,KAAK;AACnC,eAAO;;UAEP;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,OAAO,QAAQ;AAEhB,gBAAM,kBACD,QAAQ,OAAK,EAAE,YAAY;;AAEpC,aAAK,aAAa;;UAElB;AAIA,YAAI,KAAK,kBAAkB,SAAS;AAChC,gBAAM,IAAI,WAAW;;AAKzB,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,sBAAc;AACd,4BAAoB,KAAK;AACrB,oBAAU,QAAQ,OAAO,MAAM;;AAEnC,eAAO;;UAEP;AACA,wBAAgB;AAChB,4BAAoB,KAAK;AACrB,kBAAQ,KAAK,GAAG,MAAM;;AAE1B,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,8BAAoB,KAAK;AACrB,6BAAiB,KAAK,GAAG,MAAM;;AAEnC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;UAEP;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;MAiB7C,8BAA8B;AAC1B,6BAAqB;AACrB,gCAAwB;AACxB,4BAAoB,KAAK;AACrB,+BAAqB,MAAM;AACvB,gBAAI,aAAa,OAAO,iBAAiB;AACrC,oBAAM,IAAI,WAAW,0BAA0B,OAAO;;AAE1D,yBAAa,OAAO,gBAAgB;AACpC;;;AAGR,kCAA0B;AAC1B,2BAAmB;AAIf,8BAAoB;AACpB,cAAI,aAAa,SAAS;AACtB,2BAAe,KAAK,MAAM;AAC1B,qCAAyB,OAAO,MAAM,GAAG,IAAI,OAAO,CAAC,OAAO,OAAO,SAAS;AAC5E,4BAAgB,iBAAiB,KAAK;;AAE1C,cAAI,aAAa,kBAAkB;AAC/B,8BAAkB,KAAK,CAAC,aAAa,gBAAgB,QAAQ;qBAExD;AACL,kBAAM,IAAI,WAAW,gDAAgD;;AAEzE,iBAAO,aAAa;;AAExB,YAAI;AAEA,6BAAmB;AACnB,6BAAmB;AACf,uBAAW,KAAK;;AAEpB,cAAI,WAAW,SAAS;AACpB,kBAAM,IAAI,WAAW,GAAG,WAAW,aAAa,0CACzC;;;AAGf,sBAAc;;MAMlB;AACI,0BAAkB,KAAK;AACvB,4BAAoB;AACpB,oBAAY,eAAe,KAAK;AAChC,oBAAY,YAAY;AACxB,oBAAY,kBAAkB,eAAe;AAG7C,oBAAY,aAAa;AACzB,eAAO;;MAcX,8BAA8B;AAC1B,4BAAoB,oBAAoB,KAAK;AAC7C,eAAO,eAAe,KAAK,UAAU,eAAe;;MAexD;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B,2BAAiB,IAAI;AACrB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;AAExC,iBAAO,QAAQ,KAAK,SAAS,UAAU;;;MAY/C;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B;AACA,cAAI,QAAQ;AACR,oBAAQ,aAA2B,MAAM,OAAO;;AAGhD,oBAAQ,OAAqB;;AAGjC,iBAAO,KAAK,iBAAiB,QAAQ,OAAO;;;MAYpD;AACI,4BAAoB,mBAA+B;AACnD,YAAI,YAAY,WAAW,KAAK,YAAY;AACxC,gBAAM,IAAI,WAAW,+BAA+B,yBACnC,KAAK,YAAY;;AAGtC,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,wBAAc,KAAK,YAAY;AAC/B,8BAAmB,YAAY;AAG/B,2BAAiB,MAAM,OAAO;AAC9B,+BAAqB,YAAY;;AAErC,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,YAAI,UAAU,SAAS;AACnB,8BAAoB;AAChB,0BAAc,KAAK,aAAa;AAChC,+BAAmB;AAEf,4BAAc,KAAK;AACnB,kBAAI,KAAK,YAAY,IAAI,OAAK,EAAE,IAAI,QAAQ,MAAM,QAAQ;AAEtD;;AAGJ,mCAAoB;AACpB,2BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,qCAAqB,KAAK,cAAc;AACxC,mCAAkB,KAAK,YAAY;AACnC,oCAAoB,KAAK,cAAc;AACvC,iCAAiB,GAAG,aAAa,QAAQ,cAAa;AACtD,oCAAmB,qBAAqB;AACxC,6BAAY,KAAK;;AAErB,kCAAoB,MAAM,mBAAmB,iBAA+B;AAC5E,oCAAqB,mBAA+B;AACpD,gCAAkB,MAAM,aAAa,QAAQ;AAC7C,2BAAa,GAAG,IAAI,cAAa,QAAQ;AACrC,iCAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,qCAAqB,YAAY,cAAa;;;;;AAM9D,6BAAqB;AACrB,gCAAwB;AACxB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,8BAAoB,KAAK,0BAA0B;AACnD,2BAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,0BAAgB,KAAK;;AAEzB,qBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,sBAAY,gBAAgB;AAC5B,mBAAqB,OAAO;AAC5B,uBAAa,KAAK,qBAAqB;;AAG3C,eAAO,iBAA+B;;MAY1C;AACI,YAAI,SAAS;AACT,kBAAQ,aAA2B,MAAM,OAAO;;AAOpD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAU,KAAK,OAAO;AACtB,oBAAU,OAAO;AACjB,uBAAa,MAAM;AACnB,oBAAU,EAAE,MAAM,CAAC,GAAG;;AAE1B,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AACV,4BAAoB;AAChB,wBAAc,KAAK,aAAa;AAChC,6BAAmB;AAEf,0BAAc,KAAK;AACnB,0CAA8B,KAAK;AACnC,2CAA+B,KAAK;AAIpC,iCAAqB,IAAI;AACzB,4BAAgB;AACZ,kBAAI,EAAE,MAAM;AACR,6BAAa,KAAK,UAAU,EAAE;;;AAGtC,gBAAI,aAAa,WAAW,sBAAsB;AAE9C,2BAAa;AACb;AACA;AACA;AACA;AAEA,kBAAI,KAAK,YAAY;AACjB,yBAAS,KAAK;;AAElB,kBAAI,aAAa,WAAW;AACxB,uDAAuC,aAAa;AACpD,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,gBAAgB;AACpD,+BAAc,OAAqB,MAAM,YAAY,gBAAgB;AACrE,kCAAkB,CAAC;AACnB,gCAAgB,CAAC;;AAGjB,kCAAkB,aAAa,IAAI,OAAK,EAAE;AAC1C,gCAAgB,aAAa,IAAI,OAAK,EAAE;AACxC,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,iBAAiB;AACrD,+BAAc,OAAqB,MAAM,YAAY,iBAAiB;;AAE1E,kBAAI,MAAM;AACN,sBAAM,IAAI,oBAAoB;;AAKlC,2BAAa,GAAG,IAAI,uBAAuB,QAAQ,EAAE;AACjD,0BAAU,uBAAuB;AACjC,0BAAU,eAAc;AACxB,6BAAa,aAAY;AACzB,0BAAU,EAAE,MAAM,CAAC,GAAG;;;;;AAKtC,8BAAsB;AACtB,4BAAoB;AACpB,6BAAqB;AACrB,wBAAgB,KAAK;AACjB,mBAAqB,EAAE,MAAM,WAAW,4BAA4B,EAAE,UAAU,EAAE;AAClF,kCAAuB,UAAU,EAAE;AACnC,uBAAa,KAAK,QAAO;AACzB,wBAAc,KAAK;AACnB,sBAAY,KAAK;;AAGrB,eAAO,CAAC,eAAe,aAAa;;MAUxC;AACI,kCAA0B;AAC1B;AACA,4BAAoB,KAAK;AACrB,sBAAY,iBAAiB,YAAY,IAAI;AAC7C,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,4BAAgB,UAAU,QAAQ,OAAO;AACzC,gBAAI,KAAK,eAAe,IAAI;AAExB,gCAAkB,WAAW;AAC7B,2BAAa;;;;AAIzB,eAAO;;MAqBX;AACI,YAAI,UAAS;AACT,cAAI,KAAK,OAAO,UAAU;AACtB,kBAAM,IAAI,WAAW,wCAAwC,8BAClD,KAAK,OAAO;;AAGvB,mBAAO,KAAK,OAAO;;;AAIvB,cAAI,QAAQ;AACR,kBAAM,IAAI,WAAW;;;AAG7B,4BAAoB,KAAK;AACrB,cAAI,MAAM,SAAS;AACf,mBAAO;;;AAGf,cAAM,IAAI,WAAW,kBAAkB;;MAO3C;AAKI,eAAO,KAAK;AACR,0BAAe;AACf,8BAAoB,KAAK;AACrB,iCAAqB,GAAG,YAAY,MAAM,aAAa,QAAQ,EAAE;AAC7D,8BAAgB,UAAU,QAAQ,OAAO;AACzC,kBAAI,KAAK,eAAe,IAAI;AACxB,wBAAO,KAAK,GAAG,MAAM;;;;AAKjC,iBAAO;;;MAGf;AACI,wBAAe,CAAE,MAAM,KAAK;AAI5B,kCAA0B,KAAK,uBAAuB,KAAK;AAE3D,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,iCAAuB,MAAM;AAC7B,8BAAoB,MAAM;AAC1B,uCAA6B;AAC7B,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,yBAAa,MAAM,aAAa;AAChC,4BAAgB,UAAU,QAAQ,OAAO;AACzC,yBAAa;AACb,gBAAI,KAAK,eAAe,IAAI;AAGxB,kBAAI,KAAK;AACL;AACI,uBAAK,UAAU,KAAK;AACpB,2BAAS,KAAK;;AAGd,0BAAQ,KAAK,SAAS,MAAM,uDAErB,KAAK;AAGZ,2BAAS;;;AAGjB,kBAAI,KAAK,cAAc,SAAS;AAC5B,iCAAiB;AACjB,6BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,uCAAqB,KAAK,cAAc;AACxC,oCAAkB,KAAK,YAAY;AACnC,sCAAoB,KAAK,cAAc;AACvC,mCAAgB,UAAU,QAAQ,cAAc;AAChD,qCAAmB,kBAAkB;AACrC,sBAAI,gBAAgB;AAChB,mCAAe;;AAEnB,2BAAS,KAAK,CAAC,aAAa,MAAM,cAAc,aAAa;;AAEjE,qCAAqB,KAAK;;;;AAItC,uBAAa;AACb,eAAK,UAAU,MAAM;AACrB,eAAK,eAAe;AACpB,eAAK,YAAY;AACjB,eAAK,kBAAkB;AACvB,uBAAa,KAAK;;AAEtB,gBAAO,YAAY;AAEnB,4BAAoB;AACpB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAC/B,4BAAkB,KAAK,uBAAuB;AAC9C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,yBAAyB;AAClD,sBAAY,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEhD,gBAAO,iBAAiB;AACxB,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,0BAA0B;AACnD,uBAAa,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEjD,gBAAO,kBAAkB;AACzB,eAAO;;aAeJ,yCAAwC,qBAAqB;AAGhE,8BAAsB;AAMtB,iCAAyB;AACzB;AACI,cAAI,CAAE,OAAM,QAAQ;AAChB,6BAAiB,MAAM,QAAQ,CAAC;;AAGhC,6BAAiB,MAAM,MAAM,KAAK;;;AAG1C;AACI,gCAAqB;AACrB;AACA,kCAAwB;AACpB,qCAAyB,UAAU;AACnC,qCAAyB,UAAU;AACnC,uCAA2B,UAAU;AACrC,qBAAS,UAAU,MAAM,OACrB,KACA,UAAU;AACd,gBAAI,CAAE,qBAAoB;AACtB,iCAAmB,OAAO;AAC1B;;AAEJ,iCAAqB,cAAc;AACnC,gBAAI,aAAa,aAAa,UAAU;AACpC,iCAAmB,OAAO;AAC1B;;AAEJ,gCAAoB,aAAa,aAAa;AAC9C,0BAAa,KAAK,YAAY,cAAc;;AAKhD,cAAI,cAAa,SAAS;AACtB,kBAAM,MAAM,iBAA+B,gBAAe;;;AASlE;AACI,4BAAkB,UAAU;AAE5B,wBAAc,YAAiB,WAAW,QAAO,oBAAoB,OACjE,QAAO,mBACP;AACJ,gBAAM,6BAA6B;AACnC,wBAAc,aAAa;AAE3B,mCAAyB,UAAU;AACnC,2BAAiB,QAAQ;AACrB,gBAAI,CAAE,qBAAoB;AACtB,oBAAM,IAAI,WAAW,yDAAyD;;AAMlF,+BAAmB,OAAO;;;AAIlC,qBAAa,QAAO;AACpB,iCAAyB,QAAO;AAChC,gCAAwB;AACpB,uBAAa;;AAMjB,eAAO,CAAC,cAA4B;AAChC,kCAAwB;AACpB,0BAAc,cAAc,UAAU;AACtC,gBAAI,MAAM,QAAQ;AACd,sDAAwC,iBAAiB,MAAM;AAC/D,qBAAO,iBAAiB,MAAM;AAC9B,qCAAuB;AACnB,4BAAY,OAAO;;;;;AAKnC,6BAAqB;AACrB,8BAAsB;AACtB,sCAA8B,QAAO;AACrC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,uBAAa,KAAK,mBAAmB;;AAEzC,uCAA+B,QAAO;AACtC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,wBAAc,KAAK,mBAAmB;;AAE1C,eAAO,IAAI,IAAI,CAAE,QAAQ,cAAc,SAAS,eAAe;;UAQ/D;AAGA,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAIzB,4BAAoB,KAAK;AACrB,cAAI,MAAM;AACN,mBAAO;;;AAGf,eAAO;;MAQX;AACI,aAAK;AACD,eAAK,OAAO,QAAQ;AAEhB,gBAAI,MAAM;AACN,oBAAM;;;;;;AC5mC1B;;;;;;;;;AAUA;AACI,yBAAmB,YAAY;AAC/B,UAAI,WAAW,QAAS,MAAM,QAAQ,YAAY,QAAQ,WAAW;AACjE,eAAO,YAAY,IAAI,UAAQ;;AAEnC,UAAI,eAAe;AACf,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,iBAAO;mBAEF,OAAO,YAAY,YAAY,YAAY,MAAM;AACtD,iBAAO,CAAC,QAAQ,YAAY;;AAG5B,iBAAO,CAAC;;;AAGhB,UAAI,MAAM,QAAQ;AACd,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,MAAM,YAAY,6BAA6B,QAAQ,wCAC5B;;AAGzC,eAAO;iBAEF,OAAO,YAAY,YAAY,OAAO,KAAK,SAAS,SAAS,KAClE,OAAO,QAAQ,OAAO,KAAK,SAAS,QAChC;AACJ,uBAAe;AACf,oBAAY,QAAQ;AAChB,cAAI,cAAc;AACd,mBAAO,KAAK,QAAQ;;AAGpB,mBAAO,KAAK;;;AAGpB,eAAO;;AAGP,cAAM,IAAI,MAAM,2BAA2B,2BACjC,2CACH,yCAAyC,8BAChC,8BAA8B,KAAK,UAAU;;;AAgB9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAE9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAoB9D;AACH,UAAI,gBAAgB,QAAQ,oBAAoB;AAG5C,cAAM,IAAI,MAAM;;AAEpB,UAAI,eAAe;AAEf,yBAAiB,KAAK;AAClB,cAAI,EAAE,MAAM,WAAW;AAEnB,mBAAO,EAAE;qBAEJ,EAAE,MAAM,WAAW;AACxB,gBAAI,EAAE,MAAM,KAAK;AAEb,2BAAa;AACb,qBAAO,EAAE,OAAO;uBAEX,EAAE,MAAM,OAAO;AAEpB,qBAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;;AAG1B,oBAAM,IAAI,MAAM,+CAA+C,EAAE,MAAM;;;AAM3E,kBAAM,IAAI,MAAM,yCAAyC,EAAE;;;AAInE,8BAAsB,MAAM,KAAK,MAAM,SAAS;AAChD,gBAAQ;AACR,kCAA0B;AAC1B,sBAAc,QAAQ;AAClB,cAAI,YAAY,eAAe;AAC3B,kBAAM,IAAI,MAAM,wEACC;;AAIjB,8BAAkB,KAAK,YAAY;;;AAG3C,eAAO,UAAS,mBAAmB;;AAGnC,eAAO;;;AAUR;AACH,aAAO,KAAI,SAAQ;;ACzJvB;;;;;;;;;AAoBA,0CAAsC;AAetC;AAKI;AACA;AACA,6BAAuB;AACvB,WAAK,eAAe;AACpB,WAAK,eAAe;AACpB,cAAgB,MAAM,QAAQ,MAAM,MAAM,MAAM,mPAIzC;AACP,0BAAoB,0BAA0B,SAAS,OAAM,YAAY;AACzE,0BAAoB,0BAA0B,UAAU,OAAM,aAAa;AAC3E,wBAAkB,YAAY,GAAG,MAAM;AACvC,cAAgB,YAAY,WAAW,OAAM,OAAO,QAAQ,MAAM,mBAAmB,OAAM,OAAO,2CAClF,YAAY,yCACrB,KAAK,UAAU,OAAM;AAC5B,cAAgB,YAAY,WAAW,OAAM,QAAQ,QAAQ,MAAM,mBAAmB,OAAM,QAAQ,4CACpF,YAAY,2CACrB,KAAK,UAAU,OAAM;AAC5B,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,8BAC3D,OAAM,WAAW,eAAe,YAAY,QAAQ,MAAM,iBAChD,4BAA4B,OAAM,WAAW;;AAElE,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,+BAC3D,OAAM,YAAY,eAAe,YAAY,QAAQ,MAAM,iBACjD,4BAA4B,OAAM,WAAW;;AAElE,aAAO,CAAE,IAAI,aAAa,IAAI;;AAElC;AACI,UAAI,kBAAkB;AAClB,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,gBAAgB,OAAO,WAAW,MAAM,QAAQ,MAAM,wBAAwB,OAAO,gCAAgC,MAAM,uBAAuB,sBAAsB;AACxK,eAAO;;AAGP,uBAAe;AAEf,2BAAmB;AACf,cAAI,OAAO,SAAS;AAChB,kBAAM,IAAI,WAAW,gEACd,sBAAsB;;AAEjC,iBAAO,KAAK,OAAO;;AAEvB,eAAO;;;AAGf;AACI,UAAI,MAAK,WAAW;AAChB,cAAM,IAAI,oBAAoB;;AAElC,aAAO,CAAE,IAAI,MAAK,IAAI,IAAI,MAAK;;AAE5B;AAKH,iCAA2B,KAAK,mBAAmB;AACnD,cAAgB,OAAM,aAAa,MAAM,MAAM;AAE/C,cAAgB,QAAQ,MAAM,MAAM;AAEpC,cAAgB,KAAK,UAAU,QAAQ,KAAK,SAAS,KAAK,OAAO,UAAU,KAAK,SAAS,MAAM,iFACvE,KAAK;AAC7B,cAAgB,CAAC,sBACZ,KAAK,kBAAkB,KAAK,OAAO,UAAU,KAAK,kBAAmB,MAAM,uGAClC,KAAK;AACnD,cAEA,KAAK,sBAAsB,MAAM,MAAM;AAEvC,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACI,6BAAqB,KAAK,kBAAkB;AAC5C;AACA;AACA,YAAI;AACA,cAAI,gBAAgB,KAAK;AACrB,oBAAgB,KAAK,qBAAqB,QACrC,KAAK,oBAAoB,KACtB,OAAO,UAAU,KAAK,oBAAqB,MAAM,iJAG1C,KAAK;;AAGpB,mCAAuB,gCAAgC,KAAK;AAC5D,oBAAQ,eAAe;AACvB,oBAAQ,eAAe;;;AAG/B,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA,YAAI;AACA,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,wBAAgB,KAAK,WAAW,OAAO,IAAI,KAAK;AAChD,eAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,KAAK,QAAQ,MAAM,MAAM,iBAAiB,SAAS,OAAO,MACnI,cAAc;AACd,qBAAa,SAAS;AACtB,eAAM,UAAU;AAChB,cAAM,aAAa;AACnB,eAAM,gBAAgB;AACtB,oBAAY,KAAK,gBAAgB,OAAO,IAAI,KAAK;AACjD,2BAAmB,MAAM,QAAQ;AACjC,eAAO,QAAQ,KAAK;AAChB,4BAAkB;AAClB,gBAAM,aAAa,aAAa;AAChC,0BAAgB;AAChB,2BAAiB;AACjB,cAAI,CAAC;AACD,2BAAe,MAAM,QAAQ;;AAEjC,iBAAO,qBAAqB,YAAY,KAAK,kBAAkB;AAC3D,gCAAoB,MAAM,aAAa;AAGvC,gBAAI,sBAAsB,YAAY;AAClC,sBAAQ,KAAK,uCACN,KAAK,oEAEL,mJAIA,KAAK,kBAAkB,KAAK;AAGnC;;AAEJ,gBAAI,YAAY,SAAS;AACrB,qBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,gCAAkB;AAClB,wBAAU,WAAW;AACrB,wBAAU,UAAU,GAAG,GAAG,MAAM;AAChC,oBAAM,aAAa,aAAa,YAAY;AAC5C,oCAAsB;AACtB,kBAAI,KAAK,eAAe;AACpB,6CAA6B,wBAAwB,KAAK,aAAa,OAAM;AAC7E,6BAAa,GAAG,IAAI,qBAAqB,QAAQ,EAAE;AAC/C,gCAAc,KAAK,MAAM,mBAAmB,GAAG,IAAI,MAAM,qBAAqB;;;AAItF,0BAAY,GAAG,OAAO,IAAI,OAAO;AACjC,2BAAa,cAAc;AAC3B,sBAAY;AACZ,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAEb,oBAAM,aAAa,WAAW,YAAY;AAC1C,mCAAqB;AACrB;AACA;;AAEJ,gBAAI,qBAAqB,aAAa,KAAK,kBACvC,YAAY;AAEZ,kBAAI;AACA;AACA,oBAAI,gBAAgB,KAAK;AACrB,4BAAU,OAAO,MAAM,OAAM,gBAAgB,KAAK,gBAAgB,CAAE,SAAS,KAAK;;AAGlF,4BAAU,OAAO,OAAM,SAAS,OAAO,OAAO;oBAC1C,WAAW,KAAK,uBAAuB,OACnC,gCACA,KAAK;oBACT,SAAS;;;AAGjB,6BAAa,GAAG,IAAI,OAAM,aAAa,QAAQ,EAAE;AAC7C,4BAAU,OAAO,OAAM,aAAa,QAAQ,QAAQ;;;AAQ5D;;AAEJ,gBAAI,OAAM;AACN;;;AAGR,gBAAM,aAAa,WAAW,OAAO;AACrC;AACA,cAAI,OAAM;AACN;;;AAGR,cAAM,aAAa;AACnB,cAAM,OAAM,QAAQ;AACpB,eAAO,OAAM;;AAGb,eAAM,aAAa;;;AAI3B;AAEI,0BAAoB;AACpB,UAAI,KAAK,mBAAmB;AACxB,wBAAgB,KAAK;iBAEhB,OAAO,SAAS,QAAQ;AAC7B,wBAAgB,QAAQ;;AAE5B,aAAO;;AAIX;AACI,aAAQ,OAAO,QAAQ,aAAa;;AAIxC;AACI,aAAQ,OAAO,SAAS,SAAS;;AAE9B;AAKH,aAAO,QAAQ;AACf,yBAAmB,KAAK,WAAW;AACnC,gBAAU,OAAM;AAChB,iBAAW;AACX,UAAI,KAAK,UAAU;AACf,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,CAAC,cAAe,KAAK,UAAU,KAAK,OAAO,UAAU,KAAK,UAAW,MAAM,wEAC3E,KAAK,UAAU,KAAK;AACpC,2BAAqB,qBAAqB,WACtC,UACA,MAAM,QAAQ;AAElB,wBAAkB;AAClB,kBAAY;AACZ,aAAO,aAAa,QAAQ,KAAK,UAAU;AACvC,4BAAoB,MAAM,aAAa;AACvC,eAAO,KAAS;AACZ,cAAI,YAAY;AAGZ,mBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,4BAAgB,GAAG,OAAO;AAC1B,8BAAkB,KAAS,MAAM,EAAE;AACnC,oBAAY;AACZ,gBAAI,UAAU;AACV,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,qBAAK,KAAK,QAAO;;;AAGzB,8BAAkB,QAAQ,GAAG,MAAM;AACnC,yBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,+BAAiB,UAAU;AAC3B,gCAAkB,KAAK;AACvB,mBAAK,KACD,KAAS,MAAM,MAAQ,KAAK,IAAI,KAAQ,WAAW;AACvD,kBAAI,QAAQ;AACR,wBAAY;;;AAGpB,oBAAY;AACZ,2BAAe;AACf,cAAE;;AAEN,iBAAO;;AAEX,YAAI,YAAY;AACZ,cAAI;AACA,oBAAQ,KAAK,gLAGiB,KAAK;;AAIvC;;;AAGR,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,0BAAkB,KAAK;AACvB,aAAK,KAAK,IAAQ,KAAK,IAAI;AAC3B,gBAAY;;AAEhB,aAAO,iBAAiB;;AC5V5B;;;;;;;;;AAmBO;AACH,cAAgB,YAAY,KAAK,OAAO,UAAU,YAAY,MAAM,2DAA2D;;AAe5H;AACH,UAAI,UAAU;AACV,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,eAAO,OAAO,IAAI,YAAS,oBAAoB,QAAO,OAAO,OAAO;;AAGpE,eAAO,oBAAoB,QAAQ,OAAO,OAAO;;;AAgBlD;AACH,aAAO,KAAS;AACZ,YAAI,UAAU;AACV,iBAAO;mBAEF,MAAM,QAAQ;AACnB,iBAAO,OAAO,IAAI,YAAS,qBAAqB,QAAO;;AAKvD,iBAAO,SAAO,QAAQ,QAAQ,UAAU,UAAU,UAAU,QAAQ;;;;AAYzE;AACH,qBAAe;AACf,uBAAiB;AACjB,qBAAe;AACf,aAAO,aAAa;AAChB,mBAAW,aAAa;AACxB,YAAI,YAAY;AACZ,qBAAW;;AAEf,eAAO,KAAK,CAAC,YAAY;AACzB,qBAAa;;AAEjB,aAAO;;AA6BX;AAII,UAAI,aAAa;AACb,oBAAY;;AAEhB,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,aAAW;AACX,oBAAU;;AAEd,UAAI,gBAAgB;AAChB,uBAAe;;AAGnB,yBAAmB;AACnB,UAAI,QAAQ,QAAQ,UAAU;AAC1B,uBAAe;;AAGnB,UAAI,mBAAmB;AACnB,uBAAe;AACf,YAAI,iBAAiB;AACjB,gBAAM,IAAI,WAAW;;;AAI7B,8BAAwB,OAAM,gBAAgB,KAAK,WAAW,eAAe;AAC7E;AACA,UAAI,mBAAmB;AACnB,qBAAa,QAAM,GAAG;;AAE1B,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,QAAQ,cAAc,iBAAiB,eAAe,WAAW,cAAc;AACxJ,mBAAa,SAAS;AACtB,aAAM,UAAU;AAChB,YAAM,aAAa;AACnB,aAAM,gBAAgB;AAGtB,uBAAiB,cAAc,QAAQ,QAAQ,EAAE;AAC7C,cAAM,aAAa,aAAa;AAChC,0BAAkB;AAClB,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,cAAI,cAAY;AACZ,kBAAM,IAAI,oBAAoB;qBAEzB;AACL,qBAAa;;AAIjB,oCAA0B,UAAS;AACnC,0BAAgB,YAAY,iBAAiB;AAC7C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB;AAClB,kBAAM,aAAa,aAAa,YAAY;AAC5C,iBAAS;AACL,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAoB,mBAAmB,YAAY,WAAW;AAC/E,wBAAU,WAAW;AACrB,wBAAU,UAAU,WAAW;AAG/B,+BAAiB,qBAAqB,KAAK;AAC3C,2BAAa,EAAE;AACf,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAGb,kBAAI,eAAe,QAAQ,SAAS;AAChC,oBAAI;AACA,kCAAgB,OAAM,SAAS,MAAM,QAAQ;AAE7C,+BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,kCAAc,UAAU;AACxB,gCAAY,QAAQ;AACpB,yBAAS;AAET,8BAAU,SAAS,SAAS;;;;;AAK5C,kBAAM,aAAa,WAAW,YAAY;AAC1C,iCAAqB;AACrB,gBAAI,OAAM;AACN;;;AAIR,4BAAkB;;AAGtB,cAAM,aAAa,WAAW,OAAO;AACrC,YAAI,OAAM;AACN;;;AAGR,YAAM,aAAa;AACnB,YAAM,OAAM,QAAQ;AACpB,aAAO,OAAM;;AAEV,mDAGa;AAChB,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACI,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,MAAM,OAAM,oBAAoB,GAAG,GAAG,KAAK,cAAc,KAAK,aAAa,gBAAgB;AACpH,iBAAS,iBAAiB;AAC1B,kBAAU,iBAAiB;AAC3B,wBAAgB,iBAAiB;AAEjC,2BAAmB;AACnB;AACA,YAAI,KAAK,kBAAkB,QAAQ,KAAK,eAAe,SAAS;AAC5D,yBAAe;AACf,cAAI,KAAK,eAAe,WAAW;AAE/B,wBAAY,KAAK,eAAe;AAChC,wBAAY,KAAK,eAAe;qBAE3B,KAAK,eAAe,WAAW;AACpC,kBAAM,IAAI,oBAAoB;;AAG9B,kBAAM,IAAI,WAAW,0GAEd,KAAK;;AAEhB,kCAAuB;AACvB,kCAAwB,MAAM,OAAM,oBAAoB,WAAW,WAAW,MAAoC,MAAmC,iBAAgB;AACrK,iBAAO,gBAAgB;AACvB,iBAAO,gBAAgB;AACvB,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB,QAAQ,KAAK,kBAAkB,KAC5D,KAAK,kBAAkB;AACvB,yBAAe;AAEf,0BAAgB,KAAK,MAAM,OAAO,GAAG,MAAM,KAAM,KAAI,KAAK;AAC1D,oCAA0B,OAAO,GAAG,MAAM;AAC1C,iBAAO,YAAY,QAAQ,SAAS;AACpC,mBAAS,YAAY,QAAQ,GAAG;AAChC,iBAAO,YAAY,SAAS,SAAS;AACrC,oBAAU,YAAY,SAAS,GAAG;AAGlC,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB;AAC7B,yBAAe;;AAGnB,oBAAY,OAAO,OAAO,SAAS,OAAO;AAC1C,eAAM;AAYN,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA;AACA,YAAI;AACA,iBAAM;AACN,wBAAc,OAAM;AACpB,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,wBAAc;AACd,mBAAS;AACT,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,oBAAY,MAAM,QAAQ,QAAO,eAAe,KAAK,WAAW,WAAW,KAAK,QAAQ,KAAK,SAAS,YAAW,aAAa,QAAQ,KAAK,SAAS,iBAAiB,KAAK,cAAc,MAAM;AAC9L,eAAO;;AAGP,eAAM,aAAa;AAEnB,0BAAkB,QAAQ;AAC1B,0BAAkB,SAAS;AAC3B,0BAAkB,MAAM;AACxB,0BAAkB,MAAM;AACxB,YAAI,iBAAiB;AACjB,kBAAY;;;;AAWjB;AACH,mBAAa;AACb,UAAI,mBAAmB;AACnB,kBAAU,CAAC;;AAGf,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAe,QAAQ;AACvB,YAAI,QAAO,SAAS;AAChB,eAAK,KAAK,aAAW,SAAQ;mBAExB,QAAO,SAAS;AACrB,gBAAM,IAAI,MAAM;;AAIhB,eAAK,KAAK;;;AAGlB,aAAO;;AAcJ;AACH,UAAI,WAAW;AACX;;AAEJ,2BAAqB;AACrB,UAAI,sBAAsB;AACtB,qBAAa,KAAK,WAAW;iBAExB,MAAM,QAAQ;AACnB,mBAAW,QAAQ,OAAK,aAAa,KAAK,EAAE;iBAEvC,cAAc;AAEnB,2BAAmB;AACf,4BAAkB,WAAW;AAC7B,uBAAa,KAAK,UAAU;;;AAGpC,+BAAyB;AACzB,UAAI,mBAAmB;AACnB,YAAI,aAAa,QAAQ,QAAQ,QAAQ;AACrC,2BAAiB,KAAK;;iBAGrB,MAAM,QAAQ;AACnB,gBAAQ,QAAQ;AACZ,cAAI,aAAa,QAAQ,EAAE,QAAQ;AAC/B,6BAAiB,KAAK;;;iBAIzB,WAAW;AAEhB,2BAAmB;AACf,0BAAe,QAAQ;AACvB,cAAI,aAAa,QAAQ,QAAO,QAAQ;AACpC,6BAAiB,KAAK;;;;AAIlC,uBAAiB,QAAQ;AACrB,YAAI,CAAC,EAAE;AACH,YAAE;;;;AChbd;;;;;;;;;AAiCO;AACH,aAAO,aAAa;;AAKjB;AACH,aAAO,MAAM,QAAQ;;AAKlB;AACH,aAAO,CAAC,aAAa,MAAM,CAAC,YAAY;;AAarC,yEAAoE,wBAAwB;AAC/F,UAAI,SAAS,QAAQ,MAAM,WAAW;AAGlC,YAAI,SAAQ;AACR,kCAAwB;AACxB,cAAI,YAAY,UAAS,MAAK,SAAS;AACnC,gCAAoB;qBAEf,WAAW;AAChB,8BAAkB;AACd,kBAAI,MAAK,eAAe;AACpB,oCAAoB;AACpB;;;;AAMR,gCAAoB;;AAExB,cAAI;AACA,kBAAM,IAAI,WAAW,6BAA6B,6CACnC;;;AAGvB,eAAO;;AAEX,UAAI,SAAQ;AACR,eAAO,MAAM,IAAI,UAAQ;;AAE7B;AACA,UAAI,WAAW;AACX,gBAAO;AACP,iBAAS;AACT,2BAAmB;AACf,cAAI,MAAK,SAAS;AACd,kBAAM,IAAI,WAAW,yBAAyB,qCACvC;;AAEX,iBAAO,KAAK,MAAK;;iBAGhB,YAAY;AACjB,gBAAO;AACP,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,gIAEX,MAAM,sEACO;;AAExD,iBAAS;;AAGT,gBAAO;AACP,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,aAAa,2BAA2B,MAAM,4EACL,MAAK;;AAEvE,iBAAS,CAAC;;AAEd,eAAS,2BAA2B;AAEpC,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,OAAM;;AAEvB,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AAEZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU,QAAQ,UAAU,KAAK,QAAQ;AACzC,oBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,qBACzD,OAAO,kCACrB,OAAM;;;;;AAK9B,aAAO;;AASJ;AACH,mBAAa,SAAO,OAAO,IAAI,YAAS,OAAM,MAAM;AACpD,WAAK;AACL,mBAAa,SAAO,QAAQ,IAAI,YAAU,OAAO,MAAM;AACvD,WAAK;AAEL,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,mFAEd,KAAK,UAAU,OAAO,IAAI,YAAS,OAAM;;AAEpD,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,oFAEd,KAAK,UAAU,QAAQ,IAAI,YAAU,OAAO;;AAEvD,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,CAAC,aAAiB,MAAM;AAC9D,cAAM,IAAI,WAAW,iFACC,KAAK,0BAA0B,KAAK;;;AAalE;AAEI,wBAAkB;QACd;QAAyB;QACzB;;AAEJ,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,kBAAU,QAAQ;AAClB,qBAAa,QAAQ;AACrB,sBAAc,aAAa;AAC3B,YAAI,QAAQ;AACR;;AAEJ,YAAI,SAAS;AACT,cAAI,EAAE,MAAM,EAAE,MAAM,SAAS,OAAO;AAChC,kBAAM,IAAI,WAAW,2CAA2C,EAAE;;;AAO1E,YAAI,UAAU,QAAQ,UAAU;AAC5B,+BAAqB,EAAE,MAAM,MAAM;AACnC,8BAAoB,MAAM,MAAM;AAChC,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,8BAAkB,aAAa;AAC/B,2BAAe,YAAY;AAC3B,gBAAI,UAAU,QAAQ,cAAc;AAChC,oBAAM,IAAI,WAAW,8BAA8B,EAAE,2CAC9B;;;;;;AAiC3C,mEAA8D,wBAAwB;AAClF;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,oIAEP,MAAM,qCACzB,MAAK;;AAEjC,iBAAS;;AAGT,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,qBAAqB,MAAM,UAAU,kFAEnD,KAAK,UAAU,MAAK;;AAE/B,iBAAS,CAAC;;AAEd,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,KAAK,UAAU,OAAM;;AAEtC,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AACZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU;AACV,kBAAI,WAAW;AACX,sBAAM,IAAI,WAAW,uBAAuB,6BACrC,MAAM,oBAAoB,KAAK,UAAU,OAAO,gCAC3B,KAAK,UAAU,OAAM;;;;;;;AAoBlE;AACH,UAAI,WAAW,QAAQ,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAChE,eAAO,YAAY,IAAI,UAAQ;;AAEnC;AACA,UAAI,OAAO,YAAY,YAAY,OAAO,YAAY;AAClD,yBAAiB,CAAC;iBAEb,MAAM,QAAQ,YAAY,OAAO,YAAY;AAClD,yBAAiB;;AAGjB,cAAM,IAAI,UAAU,kGACsB;;AAE9C,UAAI,MAAM,QAAQ;AAEd,eAAO,YAAY,IAAI,UAAQ;;AAI/B,8BAAsB;AACtB,2BAAmB;AACf,8BAAoB,eAAe,eAAe,QAAQ,eAAe,QAAQ;AACjF,cAAI,CAAC,MAAM,QAAQ;AACf,4BAAgB,CAAC;;AAErB,wBAAc,KAAK;;AAEvB,eAAO;;;AAGf,qCAAiC;8BAaA;MAC7B;AACI,cAAM;AACN,aAAK,aAAa;;MAqCtB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,WAAW;;AAIzB,qBAAa,MAAM,YAAY,WAAW;;MAY9C;AACI,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,aAAK,OAAO,KAAK;AACjB,YAAI,OAAO,KAAK,cAAc;AAC1B,eAAK,aAAa,aAAwB,KAAK;AAC/C,eAAK,mBAAmB;;AAGxB,cAAI,CAAE,MAAK,qBAAqB;AAC5B,kBAAM,IAAI,WAAW;;AAEzB,eAAK,aAAa,KAAK;AACvB,eAAK,mBAAmB;;AAK5B,4BAAoB;AACpB,YAAI,CAAC,MAAM,QAAQ,KAAK,SAAS,OAAO,KAAK,SAAS,YAClD,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,KAAK;AACjB,6BAAmB,KAAK;AACpB,gBAAI,KAAK,YAAY,QAAQ,UAAU;AACnC,oBAAM,IAAI,WAAW,sCAAsC,4CAClB,KAAK;;;AAGtD,6BAAmB,KAAK;AACpB,gBAAI,KAAK,KAAK,SAAS;AACnB,sBAAQ,KAAK,WAAW,gIAED;;AAE3B,0BAAc,KAAK,IAAW,KAAK,KAAK;;mBAGvC,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,KAAK,WAAW,KAAK,QAAQ;AAClC,kBAAM,IAAI,WAAW,2FACc,KAAK,QAAQ,yCACrB,KAAK;;AAEpC,4BAAkB,KAAK;AACvB,0BAAgB,UAAU,IAAI,OAAK,IAAW;;AAG9C,+BAAqB,IAAW,KAAK;AACrC,eAAK,QAAQ,QAAQ;AACjB,0BAAc,KAAK;;;AAG3B,aAAK,gBAAgB;AACrB,aAAK,kBAAkB;AACvB,aAAK,mBAAmB;AACxB,aAAK,cAAc;AACnB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AAEvC,wBAAc,KAAK,qBAAqB;AACxC,uBAAa,KAAK,YAAY;AAC9B,eAAK,gBAAgB,KAAK;AAC1B,eAAK,iBAAiB,KAAK;AAC3B,eAAK,YAAY,KAAK,KAAK,cAAc;;AAI7C,kCAA0B;AAE1B,aAAK,UAAU,KAAK;AAEpB,aAAK,eAAe,CAAC;AACrB,aAAK,iBAAiB;AAKtB,kBAAU,QAAQ;AACd,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAIJ,iCAAqB,KAAK,cAAc;AACxC,gBAAI,KAAK,QAAQ,SAAS;AACtB,mBAAK,eAAe,KAAK,CAAC,cAAc;AACxC,mBAAK,aAAa,KAAK,KAAK,YAAY,KAAK;;;;AAMzD,8BAAsB,eAAe,KAAK,SAAS,KAAK;AAKxD,6BAAqB;AACjB,cAAI,KAAK,YAAY,SAAS;AAC1B,yBAAa,KAAK,YAAY,eAAe,MAAM;;AAEvD,eAAK,aAAa,KAAK;AACvB,eAAK,eAAe,KAAK,CAAC,cAAc;;AAE5C,kBAAU,UAAU;AAChB,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAEJ,kCAAsB,cAAc;AAGpC,kCAAsB;AAClB,uCAAyB;AACzB;AACA;AACA;AAEA,mCAAqB;AACjB,oBAAI,OAAO,WAAW,YAClB,CAAC,YAAY,OAAO,gBAAgB,MAAM,QAAQ,YAC9C;AACJ,sCAAoB,KAAK,qBAAqB;AAC9C,sBAAI,YAAY,YAAY,SAAS,OAAO,KACxC,KAAK,cAAc,OAAO;AAE1B,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;6BAGP,KAAK,cAAc,OACxB;AAGA,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAKZ,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAGhB;AACA,sBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,6BAAS;6BAEJ,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,6BAAS;;AAGb,qCAAmB;AACnB,+BAAa,mBAAmB;;AAGhC,mCAAiB,MAAY;AAE7B,qCAAmB;AACnB,+BACI,mBAAmB,oBAA4B;;AAGvD;AACA,0BAAU,YAAY;AAClB,iCAAe;;AAEnB,6BAAa,GAAG,YAAY;;;AAGpC,0BAAc;;;AAMtB,aAAK,4BAA4B,KAAK;;MAW1C;AACI,YAAI,KAAK,6BAA6B;AAClC;;AAEJ,YAAI,KAAK,iBAAiB,WACtB,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK;;;MAoCrB,sBAAsB;AAClB,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAC1E;AAGI,sBAAY,iBAAiB,GAAG,OAAO,iBAAiB;AACxD,eAAK;AACL,oBAAU,KAAK;AACf,2BAAiB,KAAK,SAAS,GAAG,KAAK,WAAW,KAAK,SAAS,KAAK;AACrE,iBAAO,iBAAiB;;AAGxB,4BAAkB,iBAAiB,IAAI;AACvC,4BAAkB,iBAAiB,IAAI;;;YAyBzC;AACF,aAAK;AACL,eAAO,gBAAgB,MAAM,SAAS;;MAY1C,mDAAmD;AAC/C;AACA,YAAI,SAAS;AACT,uBAAa;AACb,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW,MAAM,yEACJ;;mBAGtB,OAAO;AACZ,cAAI,MAAM,QAAQ;AACd,yBAAa,IAAI,GAAG,MAAM;;AAG1B,yBAAa,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,yDACd;;AAEX,eAAO;;MASX;AACI,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,gBAAM,IAAI,WAAW;;AAEzB,+BAAuB,MAAM,QAAQ;AACrC,4BAAqB,iBAAiB,UAAU,CAAC;AACjD,sCAA8B,KAAK,wBAAwB;AAE3D,yBAAiB,IAAI;AACrB,YAAI,kBAAkB;AAClB,mBAAS,CAAC;;AAEd,YAAI,MAAM,QAAQ;AACd,cAAI,OAAO,WAAW,KAAK,OAAO;AAC9B,kBAAM,IAAI,WAAW,kCAAkC,OAAO,8DAEtD,KAAK,OAAO;;AAExB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;;AAIxC,+BAAoB,KAAK;AACrB,gCAAoB,OAAO,OAAM;AACjC,gBAAI,eAAe;AACf,oBAAM,IAAI,WAAW,8CAA8C,OAAM;;AAE7E,qBAAS,IAAI,QAAO;;;AAI5B,+BAAuB,QAAQ,uBAAuB;AACtD,eAAO,iBAAiB,iBAAiB,eAAe;;MAK5D;AACI,sCAA8B,aAAa,MAAM,oBAAoB;AACrE,+BAAuB,oBAAoB;AAC3C,4BAAoB,KAAK;AACrB,+BAAqB,MAAM,QAAQ,MAAM,UAAU,MAAM,SAAS,CAAC,MAAM;AACzE,mCAAyB,aAAa,IAAI,YAAU,OAAO;AAC3D,uBAAa,GAAG,IAAI,oBAAoB,QAAQ,EAAE;AAC9C,2BAAc,iBAAiB,QAAQ,oBAAoB;AAC3D,gBAAI,WAAU;AACV,oCAAsB,KAAK,aAAa;AACxC;;AAEJ,gBAAI,qBAAqB;AACrB;;;AAGR,cAAI,qBAAqB;AACrB;;;AAGR,YAAI,mBAAmB;AACnB,iCAAuB;AACvB,gCAAsB,QAAQ;AAC1B,gBAAI,WAAU;AACV,6BAAe,KAAK,oBAAoB;;;AAGhD,gBAAM,IAAI,WAAW,mDACd,KAAK,UAAU;;AAE1B,eAAO;;MAeX,6BAA6B,cAAc;AACvC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB;AACxC,cAAI;AACA,kBAAM,IAAI,oBAAoB;;AAMlC,0BAAgB,YAAY,YAAY;AACxC,8BAAoB,KAAK,QAAQ,IAAI,YAAU;AAE/C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB,KAAS;AACvB,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AAGrC,+BAAiB,YAAY,KAAK,YAAY;AAE9C,4BAAc;AACd,kBAAI,MAAM,QAAQ;AACd,6BAAa,GAAG,IAAI,SAAS,QAAQ,EAAE;AACnC,wBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,SAAS;;;AAItD,sBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO;;AAE7C,+BAAiB,IAAI,SAAS;AAC9B,qBAAO,QAAQ,KAAK,SAAS;;AAEjC,sBAAU,QAAQ,iBAAiB,YAAY,GAAG,KAAK;;AAE3D,iBAAO,iBAAiB,YAAY,IAAI,cAAW,QAAW,UAAS;;;MA8B/E,kBAAkB;AACd,gCAAwB,2BAA2B;AACnD,uBAAe,iBAAiB,KAAK,YAAY,KAAK,iBAAiB;AACvE;AAKI,4BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,yBAAe;AACf,iBAAO,KAAK,YAAY,iBAAiB;;AAGzC,4BAAkB,iBAAiB;;;MAkB3C;AACI,uBAAe,GAAG,KAAK,YAAY,KAAK,iBAAiB;AAGzD,0BAAmB,OAAM,QAAQ,KAAK,EAAE,KAAK,GAAG,MAAM;AACtD,eAAO,KAAK,YAAY,GAAG;;MAE/B,6CAA6C;AAEzC,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,aAAa;;AAG3B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,iBAAiB,QAAQ,EAAE;AAChD,8BAAoB,KAAK,iBAAiB;AAC1C,yBAAe,KAAK,YAAY;AAChC,cAAI,WAAW;AACX,yBAAa,KAAK,YAAY,MAAM,GAAG,YAAY,SAAS,GAAG,OAAO,CAAC;;AAIvE,yBAAa,KAAK;;;AAG1B,YAAI,qBAAqB,GAAG,KAAK,gBAAgB,KAAK,iBAAiB,OAAO;AAC9E,YAAI,qBAAqB,GAAG,KAAK,iBAAiB,cAAc,OAAO;AAEvE,0BAAkB,GAAG,GAAG;AAExB,wCAAgC,GAAG,KAAK,aAAa,KAAK;AAC1D,YAAI,KAAK,YAAY,aAAa,QAAQ,YAAY;AAClD,cAAI,EAAE,GAAG,MAAM,KAAK,cAAc;AAC9B,kBAAM,IAAI,WAAW,mHAEd,qBAAqB,EAAE,GAAG,MAAM;;;AAG/C,eAAO,CAAC,GAAG;;YAET,sEAAsE;AACxE,yCAAiC,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAElF,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B;AAC5B,YAAI,eAAe;AACf,+BAAqB,wBAAwB,aAAa,KAAK;AAC/D,kCAAwB;AACxB,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kCAAsB,KAAK,MAAM,mBAAmB,WAAW,IAAI,MAAM,aAAa;;;AAI9F,eAAO,CAAC,YAAY,YAAY;;MAapC,sCAAsC;AAClC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB,KAAK,WAAW,OAAO;AAC/D,uBAAa;AACb,cAAI,UAAU;AACV,kBAAM,IAAI,oBAAoB;;AAGlC,cAAI,SAAS;AACT,kBAAM,IAAI,oBAAoB;;AAG9B,4BAAgB,YAAY,YAAY;AACxC,+BAAmB,UAAS,QAAM,GAAG;AACrC,kCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAsB,YAAY,YAAY,WAAW;AAG1E,+BAAiB,qBAAqB,KAAK;AAC3C,gCAAkB,EAAE;AACpB,kBAAI,eAAe;AACf,6BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,uBAAK,KAAK,QAAO;;;AAGzB,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,iCAAiB,UAAU;AAC3B,qBAAK,KACD,MAAQ,KAAK,IAAI,KAAQ,WAAW,YAAY;;;AAG5D,yBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,mBAAK,KAAK,IAAQ,KAAK,IAAI;;;AAGnC,iBAAO;;;MAGf;AACI,0BAAkB,KAAK;AAGvB,iCAAyB;AACzB,qBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,wBAAc,UAAU;AACxB,yBAAe;AACf,cAAI,MAAM,WAAW,SAAS;AAC1B,6BAAiB,MAAM,UAAU,MAAM,GAAG,IAAI;AAC9C,wBAAY,IAAI;;AAEpB,2BAAiB,KAAK;;AAE1B,eAAO;;MAYX;AACI,eAAO;AACH,6BAAmB;AACnB,yBAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,0BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,gCAAsB,MAAK,MAAM,KAAK,OAAO,SAAS,KAAK,QAAQ,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ,SAAS;AACtH,gCAAsB;AAItB,oCAA0B;AACtB,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS,UAAU,CAAE,UAAY;AAG9D;AACA,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AACxC,yBAAW,aAAa,QAAQ,IAAI,QAAQ;AAC5C,kBAAI,cAAc,MAAM;AACpB,uBAAO,sBAAoB,MAAM,cAAc;;AAGnD,+BAAiB,KAAS;AAE1B,yBAAW,KAAK;AAChB,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;;AAMvC,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C;AACA,kBAAI,KAAK,QAAQ,SAAS,KAAK,IAAI,KAAK,QAAQ;AAC5C,iCAAiB,WAAW;;AAG5B,+BAAe,KAAK,eAAe,GAAG;AACtC,oCAAoB,KAAK,eAAe,GAAG;AAC3C,iCACI,KAAS,OAAO,QAAQ,cAAc,QAAQ;;AAEtD,mBAAS;AAET,4BAAc,KAAK;;AAEvB,wBAAY,KAAS;AAErB,iBAAK,kBAAkB,QAAQ;AAC3B,0BAAY,MAAQ,WAAW;;AAEnC,mBAAO;;AAEX,4BAAkB,KAAK,0BAA0B,IAAI,WAAS,MAAM;AACpE,6BAAmB;AACnB,iCAAuB,KAAK,WAAW,SAAS,mBAAmB,YAAY;AAC/E,iBAAO,CAAC,gBAAgB,OAAO;;;MAQvC;AACI,aAAK,eAAe;AAChB,iBAAO,KAAS;AACZ,+BAAmB;AACnB;AACA,2BAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,4BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS;AAEtC,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AAGxC,2BAAa,KAAS,aAAa,QAAQ,IAAI,QAAQ;AACvD,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;AAEnC,yBAAW,KAAK;;AAGpB,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C,6BAAe,KAAK,eAAe,GAAG;AACtC,kCAAoB,KAAK,eAAe,GAAG;AAE3C,iCAAmB,KAAS,OAAO,QAAQ,cAAc,QAAQ;AACjE,yBAAW,KAAK;;AAEpB,mBAAO;;;;YAsCb,iBAAiB;AACnB,eAAO,WAAW,MAAM,GAAG,GAAG;;YAyB5B;AACF,eAAO,WAAW,MAAM,SAAS;;YAyB/B;AAGF,+BAAuB,MAAM,KAAK,oBAAoB,GAAG;AACzD,uBAAe,eAAe;AAC9B,wBAAgB,eAAe;AAC/B,8BAAsB,KAAK;AAC3B,wBAAe,cAAc,OAAO,OAAO;AAC3C,2BAAmB;AACnB,2BAAmB;AACf,oBAAU,MAAM,KAAK;AACrB,qBAAW,KAAK,EAAE;;AAEtB,gBAAY;AACZ,eAAO,iBAAiB;;MAW5B;AACI,6BAAqB;AACrB,8BAAsB,WAAU,QAAQ,QAAO;AAC/C,wBAAgB,gBAAgB,KAAK,mBAAmB,KAAK;AAC7D,6BAAqB,KAAK,WAAW;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,cAAI,iBAAiB,CAAC,QAAQ,GAAG;AAE7B;;AAEJ,uBAAa,KAAK,CAAE,MAAM,QAAQ,GAAG,cAAc,QAAQ,aAAa;;AAE5E,eAAO;;UAgCP;AACA,aAAK,gBAAgB;;UAErB;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;AAClB,eAAK,mBAAmB;;;MAGhC;AACI,uBAAe,MAAM;AACrB,YAAI,OAAO,yBAAyB,KAAK,KAAK,aAAa,QACvD,KAAK;AACL,mDAAyC,SAAa;AACtD,eAAK,WAAW;AAChB,iBAAO,wBACH,mCAAmC,SAAa;;AAExD,eAAO;;MAEX;AACI;AACA,YAAI,OAAO,KAAK,SAAS;AACrB,sBAAY,YAAY,KAAK;mBAExB,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,OAAO,SAAS;AAChB,oBAAM,IAAI,MAAM;;;AAGxB,sBAAY,KAAK,KAAK,IAAI,UAAQ,YAAY;;AAG9C,8BAAoB,OAAO,KAAK,KAAK;AACrC,sBAAY;AACZ,0BAAe,KAAK;AACpB,mCAAyB;AACrB,gBAAI,OAAO,QAAO,gBAAgB;AAC9B,wBAAU,cACN,YAAY,QAAO;;AAGvB,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;MAEX;AACI,YAAI,OAAO,KAAK,YAAY,YACxB,OAAO,KAAK,YAAY;AACxB,iBAAO,CAAC,YAAY,oBAA4B,KAAK;mBAEhD,MAAM,QAAQ,KAAK;AACxB,iBAAO,KAAK,QAAQ,IAAI,YAAU,YAAY,oBAA4B;;AAG1E,qCAA2B;AAC3B,4BAAkB,KAAK;AACnB,+BAAmB,OACf,YAAY,oBAA4B,KAAK,QAAQ;;AAE7D,iBAAO;;;MAGf;AACI,eAAO;UACH,MAAM,KAAK;UACX,SAAS,KAAK;UACd,kBAAkB;YACd,YAAY,KAAK,UAAU;YAC3B,QAAQ,KAAK,UAAU;;;;MAOnC;AACI,YAAI,eAAe,oBAAoB;AACnC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,gBAAgB;AAC/B,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,sBAAsB;AACrC,gBAAM,IAAI,MAAM;;AAEpB,yBAAiB,oBAAoB,eAAe;AACpD,0BAAkB,YAAY;AAC9B;AACA,YAAI,OAAO,eAAe,SAAS;AAC/B,iBAAO,YAAY,eAAe;mBAE7B,MAAM,QAAQ,eAAe;AAClC,iBAAO,eAAe,KAAK,IAAI,eAAa,YAAY;mBAEnD,eAAe,QAAQ;AAC5B,iBAAO;AACP,4BAAkB,eAAe;AAC7B,iBAAK,OAAO,YAAY,eAAe,KAAK;;;AAGpD;AACA,YAAI,MAAM,QAAQ,eAAe;AAC7B,oBAAU,eAAe,QAAQ,IAAI,YAAU,YAAY;mBAEtD,eAAe,WAAW;AAC/B,oBAAU;AACV,4BAAkB,eAAe;AAC7B,oBAAQ,OAAO,YAAY,eAAe,QAAQ;;;AAG1D,aAAK,QAAQ,CAAE,MAAM,SAAS;;YAmF5B;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,WAAW,0CAA0C;qBAE1D,SAAS,SAAS;AACvB,kBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,WAAW;;AAGzB,mCAA2B,MAAM,cAAiB,KAAK,gBAAgB;AACvE,6BAAqB;AACrB,0BAAkB;AAClB,4BAAoB,KAAK,OAAO,WAAW;AAC3C,+BAAuB;UACnB,eAAe;UACf,QAAQ;UACR,aAAa,8BAA8B;UAC3C,aAAa;;AAEjB,iCAAyB,WAAU,OAAO,QAAQ,QAAO;AACzD,YAAI,oBAAoB,KAAK,aAAa;AACtC,yBAAe,iBAAiB,KAAK;AACrC,6BAAmB;AACnB,iBAAQ,2BAA2B,+BAAgC,MAAM,cAAiB,MAAM,KAAK,UAAU,cAAc;AAC7H,6BAAmB,MAAM,KAAK,GAAG;AACjC,6BAAmB,OAAO,wBAA2B,CAAC,mBAAmB,MAAM;;AAEnF,YAAI,KAAK,uBAAuB;AAE5B,4BAAkB;AAClB,mCAAyB,KAAK,qBAAqB,KAAK,MAAM;AAC9D,yBAAe,sBAAsB,KAAK;;AAE9C,uBAAe,aAAa,mBAAmB;AAC/C,uBAAe,cAAc,mBAAmB;AAChD,eAAO,aAAa,KAAK;;MAU7B;AACI,iCAAyB,qBAAqB,KAAK;AACnD,aAAK,sBAAsB;;MAa/B;AACI,eAAO,KAAK;;;AAMpB,gBAAY,YAAY;AACxB,kBAA4B;6BAQI;;AAEhC,eAAW,YAAY;AACvB,kBAA4B;ACzkD5B;;;;;;;;;AAiDO;AACH,UAAI,CAAE,oBAAmB;AACrB,gCAAwB,CAAE,eAAe;;AAE7C,8BAAwB;AACxB,0BAAoB,sBAAsB;AAC1C,UAAI,cAAc,mBAAmB;AAMjC,wBAAgB,cAAc;;AAElC,uBAAiB,oBAAoB;AACrC,qBAAc,YAAY,UAAU;AACpC,UAAI,sBAAsB,mBAAmB;AAIzC,6BAAqB,MAAM,YAAe,sBAAsB,iBAAiB,sBAAsB,YAAY,OAAM,QAAQ,IAAI,YAAU,OAAO;AAEtJ,mCAA2B;AAC3B,6BAAqB,OAAM;AACvB,6BAAmB,OAAO,gBACtB,aAAa,OAAO;;AAE5B,eAAM,YAAY;AAElB,gBAAQ;;AAEZ,aAAO;;AA0FJ;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,OAAO,oBAAoB;AAC3B,yBAAiB,gBAAmB,iBAAiB;AACrD,YAAI,SAAS,WAAW;AAKpB,mBAAS,KAAK,mBAAsB,iBAAiB;mBAEhD,SAAS,SAAS;AACvB,gBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,0BAAkB,SAAS;;AAE/B,aAAO,6BAA6B,iBAAiB,QAAW;;AAY7D;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ,QAAQ;AAChB,cAAM,IAAI,WAAW;;AAGzB,wBAAkB,MAAM,QAAQ;AAChC,0BAAoB,UAAU;AAC9B,UAAI,cAAc,mBAAmB;AACjC,wBAAgB,cAAc;;AAElC,qBAAe,QAAQ,UAAU,OAAO,OAAO,QAAQ;AAMvD,6BAAuB,UAAU,cAAc,QAAQ,UAAU,eAAe,QAAQ;AACxF,qBAAc,YAAY,oBAAoB,gBAAgB,eAAe;AAC7E,6BAAuB,UAAU;AACjC,UAAI,kBAAkB;AAClB,eAAM,mBAAmB;;AAE7B,UAAI,UAAU,uBAAuB;AACjC,eAAM,uBAAuB,UAAU;;AAG3C,UAAI,UAAU,cAAc;AAExB,YAAI,UAAU,eAAe;AACzB,gBAAM,IAAI,WAAW;;AAGzB,eAAQ,cAAc,oBAAqB,+BAA+B,UAAU,YAAY,UAAU;AAC1G,eAAM,YAAY,cAAc;AAChC,YAAI,OAAM,aAAa,QAAQ,iBAAiB,SAAS;AACrD,gBAAM,OAAM,UAAU,WAAW;;AAGrC,gBAAQ;AACR,gBAAQ,iBAAiB,IAAI,OAAK,EAAE;;AAExC,aAAO;;AAEX;AACI,0BAAoB,cAAiB,SAAQ;AAC7C,2BAAqB;AACrB,+BAAyB;AACzB,YAAM,QAAQ;AACV,YAAI,KAAK,UAAU;AACf,2BAAiB,KAAK,CAAE,MAAM,KAAK,MAAM,QAAQ,YAAY,KAAK;;AAGlE,uBAAa,KAAK,QAAQ,YAAY,KAAK;;;AAGnD,aAAO,CAAE,cAAc;;6BA4BK;MAC5B;AACI,cAAM,CAAE,QAAQ,IAAI,SAAS;AAC7B,eAAO,QAAQ;AACf,aAAK,YAAY;AACjB,aAAK,QAAQ;AAEb,aAAK,OAAQ,KAAK,QAAQ,OAAQ,KAAK,OAAO,OAAO;AAErD,YAAI,KAAK,UAAU;AACf,8BAAoB,KAAK;AACrB,iBAAK,IAAI;;;;MAMrB;AACI,sBAAc,MAAM,aAAa,GAAG,cAAc,GAAG;AACrD,YAAI,MAAM,KAAK,OAAK,IAAI;AACpB,gBAAM,IAAI,WAAW,kDACd,MAAM,0BACN,MAAM,aAAa,GAAG,aAAa,GAAG;;;MAwBrD;AACI,qCAA6B,iBAAiB,cAAc,iBAAiB;AAC7E;AACA,YAAI;AACA,uBAAa;AACb,cAAI,WAAW,QAAQ,WAAW;AAC9B,kBAAM,IAAI,WAAW;;AAKzB,cAAI,WAAW,OAAO,WAAW;AAC7B,kBAAM,IAAI,WAAW;;;AAM7B,YAAI,KAAK,QAAQ,WAAW;AAExB,cAAI,MAAM,aAAa,WAAW;AAE9B,gBAAI,MAAM,mBAAmB;AACzB,oBAAM,IAAI,WAAW;;AAIzB,sBAAU,MAAM;cACZ,YAAY,MAAM;cAClB,OAAO,MAAM;cACb,MAAM,MAAM,OAAO;;AAIvB,kBAAM,MAAM;;AAEhB,cAAI;AACA,iBAAK,UAAU,WAAW;AAC1B,iBAAK,SAAS,WAAW;;AAGzB,gBAAI,MAAM,aAAa,WAAW;AAC9B,oBAAM,IAAI,WAAW,gHACuC,MAAM,kBACjD,MAAM,aAAa;;AAGxC,gBAAI,MAAM,aAAa,GAAG,cAAc,WAAW;AAC/C,oBAAM,IAAI,WAAW;;AAKzB,iBAAK,WAAW;AAChB,iBAAK,UAAU,CAAC,MAAM,aAAa,GAAG,cAAc;AACpD,iBAAK,SAAS,gBAAgB,KAAK,QAAQ;;AAE/C,eAAK,eAAe;AAKpB,cAAI,KAAK;YACL,eAAe;YACf,eAAe;YACf,aAAa;YACb,eAAe;YACf,cAAc,KAAK;YACnB,eAAe,KAAK;YAEpB,YAAY,aAA2B,MAAM,KAAK,OAAO;YACzD,aAAa,CAAC;YACd,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;YACpC,cAAc,KAAK,QAAQ,GAAG;;;AAIlC,+BAAqB,MAAM,MAAM,KAAK,QAAQ;AAC9C,cAAI,MAAM,QAAQ;AACd,kBAAM,IAAI,UAAU;;AAKxB,eAAK,WAAW;AAChB,eAAK,UAAU,CAAC;AAEhB,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;AAEzD,aAAK,OAAO,KAAK;AACjB,aAAK,QAAQ;;MAOjB;AACI,YAAI,KAAK,OAAO,WAAW;AACvB,gBAAM,IAAI,UAAU;;AAExB,aAAK,OAAO;AACZ,YAAI,KAAK,OAAO,WAAW;AACvB,eAAK,UAAU;AACf,eAAK,eAAe;AACpB,eAAK,gBAAgB;;AAGrB,iCAAuB,KAAK,OAAO,SAAS;AAC5C,eAAK,OAAO,gBAAgB,gBAAgB;AAC5C,eAAK,UAAU,CAAC,KAAK,OAAO,gBAAgB;AAE5C,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;;MAG7D;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,KAAK,QAAQ;;MAEnC;AAGI,2BAAmB;AACnB,YAAI,KAAK,OAAO,WAAW,KAAK,KAAK,QAAQ,WAAW;AACpD,gBAAM,IAAI,UAAU;;AAIxB,aAAK,QAAQ,IAAI,YAAY;UACzB,QAAQ,KAAK;UACb,SAAS,KAAK,QAAQ;UACtB,MAAM,KAAK,OAAO;;AAEtB,aAAK,MAAM,YAAY,KAAK;AAE5B,aAAK,kBAAkB,KAAK,MAAM;AAElC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,yBAAyB,KAAK,MAAM;AACzC,aAAK,2BAA2B,KAAK,MAAM;AAC3C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,0BAA0B,KAAK,MAAM;AAC1C,aAAK,4BAA4B,KAAK,MAAM;AAC5C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,aAAa,KAAK,MAAM;AAG7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,MAAM;;MAgCjB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,cAAM,QAAQ,YAAY,WAAW;;MAQzC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,aAAK,MAAM,WAAW;;MAkC1B,sBAAsB;AAClB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,SAAS,GAAG,GAAG;;YAwB/B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,gBAAgB,SAAS;;MA6B/C,kBAAkB;AACd,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,QAAQ,GAAG;;MASjC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,eAAe;;MAOrC;AACI,aAAK;AACL,aAAK,MAAM,QAAQ;AACnB,aAAK,aAAa,KAAK,MAAM;AAE7B,aAAK,mBAAmB,KAAK,MAAM;AACnC,aAAK,OAAO,KAAK,MAAM;AACvB,aAAK,UAAU,KAAK,MAAM;AAG1B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,eAAe,KAAK,MAAM;;UAG/B;AACA,eAAO,KAAK,SAAS,OAAO,SAAY,KAAK,MAAM;;UAEnD;AACA,aAAK,MAAM,YAAY;;YAiCrB,iBAAiB;AACnB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,IAAI,GAAG,GAAG;;YAuF1B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,WAAW,SAAS;;YAyBpC;AACF,eAAO,KAAK,MAAM,aAAa,GAAG;;aAI/B,yCAAwC,qBAAqB;AAChE;AACA,+BAAuB;AACvB,YAAI,mBAAkB;AAClB,cAAI,CAAE,SAAO,GAAG,aAAa,SACzB,QAAO,GAAG,iBAAiB;AAC3B,kBAAM,IAAI,WAAW;;AAEzB,wBAAc;;AAGd,kBAAY,QAAO,aAAa,MAAM,MAAM;AAE5C,wBAAc,QAAO;AACrB,iBAAO,QAAO;AACd,6BAAmB;;AAEvB,uBAAc,IAAI,IAAI;AACtB,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,oBAAoB,yDAAyD;;AAE3F,2BAAmB;AACf,iCAAsB;AACtB,wBAAc,YAAY,MAAM,gBAAe;AAC/C,cAAI;AACA,kBAAM,6BAA6B;;AAEvC,iBAAM,IAAI;;AAEd,eAAO;;UA8BP;AAGA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,aAAK,MAAM,eAAe;;UAE1B;AACA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,eAAO,KAAK,MAAM;;MAItB;AAKI,uBAAe;AACf,4BAAoB,KAAK;AACrB,uBAAa;AACb,eAAK,eAAe,MAAM;AAC1B,eAAK,YAAY,MAAM;AACvB,iBAAO,KAAK;;AAEhB,eAAO,CAAE,MAAM,KAAK,MAAM;;;AAIlC,eAAW,YAAY;AACvB,kBAA4B;ACn6B5B;;;;;;;;;AAyDO;AACH,aAAO,IAAI,YAAY;;AA8DpB;AACH,aAAO,IAAI,WAAW;;AA6FnB;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAO,wBAAwB,iBAAiB;;AAyB7C;AACH,aAAO,MAAM;;AAEV;AACH,kCAA4B,4BAA4B,gBAAgB;;ACvP5E;;;;;;;;;6BAqBgC;MAC5B;AACI,eAAO;;;wBAOU;MAQrB,iBAAiB;AACb,eAAO,MAAM,GAAG;;;AAIxB,UAAI,YAAY;AAChB,kBAA4B;yBAQF;MACtB;AACI,eAAO,KAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;yBAIF;MACtB;AACI,eAAO,MAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;0BAID;MACvB;AACI,eAAO,KAAK,MAAM,QAAY,GAAK,MAAS;;;AAIpD,YAAM,YAAY;AAClB,kBAA4B;yBAEA;MACxB;AACI,eAAO;;;AAIf,WAAO,YAAY;AACnB,kBAA4B;4BAIC;MACzB;AACI,eAAO,SAAY;;;AAI3B,cAAQ,YAAY;AACpB,kBAA4B;8BAIK;MAC7B;AACI,eAAO,YAAc;;;AAI7B,gBAAY,YAAY;AACxB,kBAA4B;6BAIE;MAC1B;AACI,eAAO,SAAa;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;2BAIE;MAC1B;AACI,eAAO,SAAW;;;AAI1B,aAAS,YAAY;AACrB,kBAA4B;yBAIF;MACtB;AACI,eAAO,OAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;4BAIC;MAazB,gBAAiB;AACb,eAAO,SAAY,GAAG;;;AAI9B,cAAQ,YAAY;AACpB,kBAA4B;+BAII;MAc5B,gBAAiB;AACb,eAAO,WAAe,GAAG;;;AAIjC,iBAAW,YAAY;AACvB,kBAA4B;wBAID;MAQvB,iBAAiB;AACb,eAAO,KAAK,MAAM,SAAY,EAAE,IAAI,QAAQ,IAAI;;;AAIxD,UAAM,YAAY;AAClB,kBAA4B;AACrB;AACH,aAAO,YAAW;;AAEf,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;;AAEjC,UAAI,OAAO,eAAe;AACtB,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC1OrC;;;;;;;;;AAcA;AACI,UAAI,QAAQ,QAAQ,OAAO,SAAS;AAChC,cAAM,IAAI,MAAM,yFACa;;;8BAMJ;;uBAEP;MACtB;AACI;AACA,yBAAiB;AACjB,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,QAAQ,KAAK,OAAO;AACzB,aAAK,QAAQ,KAAK,OAAO;;MAM7B;AACI,eAAO,KAAK;AACR,+BAAqB,OAAM,CAAC;AAC5B,cAAI,KAAK;AACL,6BAAiB,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,IAAI;;AAElE,cAAI,KAAK;AACL,6BACI,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,SAAS;;AAE1D,iBAAO,eAAe;;;MAG9B;AACI,eAAO,CAAE,IAAM,KAAK,IAAI,IAAM,KAAK;;aAGhC;AACH,eAAO,IAAI,IAAI,CAAE,IAAI,QAAO,OAAO,IAAI,QAAO;;;AAItD,SAAK,YAAY;AACjB,kBAA4B;AACrB;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAEtD;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAGtD,uDAAmD;MACtD,MAAQ;;AAEL;AACH,aAAO,qBAAqB;;AAEzB,6DAAwD;AAC3D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,uBAAuB;iBAEzB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;AC/FtC;;;;;;;;;uBAqB0B;MACtB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,WAAW,KAAK;;;MAG7B;AACI,iBAAS,oBAAoB;AAC7B,qBAAa,MAAK;AAClB,YAAI,KAAK,YAAY;AACjB,mBAAS,YAAY,QAAQ,GAAG,KAAK;;AAEzC,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,UAAU,KAAK;AAChC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,SAAK,YAAY;AACjB,kBAA4B;4BACG;MAC3B;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,UAAU,GAAG,KAAK;;MAE7B;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;wBACD;MACvB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,4BAA4B;AACjC,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,kBAAkB;AACvB,aAAK,mBACD,eAAe,KAAK,oBAAoB,KAAK;AACjD,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB,cAAc,KAAK;AAC1C,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa;mBAEb,MAAM,QAAQ,KAAK;AACxB,eAAK,aAAa,KAAK;mBAElB,OAAO,KAAK,eAAe;AAChC,eAAK,aAAa,CAAC,KAAK;;AAGxB,gBAAM,IAAI,WAAW,sEACN,KAAK;;;MAG5B;AACI,qBAAa,mBAAmB;AAChC,2BAAmB,WAAW,MAAM;AACpC,YAAI,KAAK,cAAc;AACnB,0BAAgB,KAAK;AACjB,uBAAW,IAAI,KAAK;;;AAG5B,aAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;AAErH,qBAAa;AACb,YAAI,KAAK,cAAc;AACnB,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,iBAAK,KAAK,WAAW;;;AAG7B,aAAK,YAAY,CAAC,IAAI,UAAU;UACxB,MAAM,WAAW;UACjB;;AAER,aAAK,QAAQ;;MAEjB;AACI,iBAAS,oBAAoB;AAC7B,eAAO,OAAM,QAAQ,KAAK,MAAM;;MAEpC;AACI,wBAAe;UACX,kBAAkB,qBAAqB,KAAK;UAC5C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,oBAAoB,KAAK;UAC1C,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;sBACH;MACrB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU,KAAK;AAC1C,gBAAM,IAAI,oBAAoB,4BAA4B,KAAK;;AAGnE,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAI;;MAEf;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;kCACS;MACjC;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,EAAE,IAAI,OAAK,EAAE,QAAQ,KAAK,QAAQ;;MAE7C;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;4BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,UAAU,IAAI,YAAoB;AACvC,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;;MAE7D;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAK,QAAQ,GAAG,KAAK;;MAEhC;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,MAAM,KAAK;AAC5B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;ACjO5B;;;;;;;;;AAmBO;AACH,UAAI,OAAO,UAAU;AACjB,eAAO,aAAa,OAAO;;AAG3B,YAAI,MAAM,WAAW;AACjB,gBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAC1D,MAAM;;AAE5B,qBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,8BAAoB,MAAM;AAC1B,cAAI,CAAC,UAAU;AACX,kBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAChD,KAAK,UAAU,yCAChB;;;AAGnC,eAAO;;;AAWR,mFAA+E;AAClF,UAAI,eAAe;AACf,eAAO;;AAEX,gCAA0B,aAAc,cAAa,KAAM,YAAW;AACtE;AACA,UAAI,YAAY;AACZ,uBAAe;;AAGf,uBAAe,cAAc,oBAAoB;;AAErD,aAAO,KAAK,MAAO,gBAAe,SAAS,KAAK;;AAE7C;AACH,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,kBAAU,UAAU,aAAa,MAAI,CAAC,aAAa,YAAY;iBAE1D,YAAY;AACjB,kBAAU,UAAU;;AAGpB,cAAM,IAAI,WAAW,2BAA2B;;AAEpD,aAAO;;AC1EX;;;;;;;;;AA+BO;AAEH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAGlC,iBAAO;;;;AASZ;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAGrC,iBAAO;;;;AAoBZ,uDAAmD,aAAa,oCAAoC;AACvG,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAEhB,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,+DACd,EAAE,MAAM;;AAEnB,YAAI,OAAO,MAAM,WAAW;AACxB,gBAAM,IAAI,WAAW,iEACd,OAAO,MAAM;;AAExB,YAAI,QAAQ,QAAQ,KAAK,MAAM,WAAW;AACtC,gBAAM,IAAI,WAAW,+DACd,OAAO,MAAM;;AAGxB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG;;AAEhC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,gBAAQ,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,OAAO;AACrF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,eAAO;;;AAeR,2CAAqC,aAAa,oCAAoC;AACzF,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAatE,2CAAqC,CAAC,GAAG,cAAc;AAC1D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,yBAAyB,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQhF,iEAA6D,CAAC,GAAG,cAAc,iDAAgD;AAClI,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,6EACD,EAAE;;AAE1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,8EACD,EAAE;;AAE1B,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,SAAiB;UACjB,GAAG;UACH,QAAQ;UACR;UACA,KAAK,YAAY,SAAS,SAAS;UACnC,WAAW;UACX,YAAY;UACZ;UACA,YAAA;;AAEJ,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR,2CAAqC,CAAC,GAAG,GAAG,cAAc;AAC7D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQtE,uDAAmD,CAAC,GAAG,GAAG,cAAc;AAC3E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,oEACd,EAAE;;AAEb,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,SAAS;AACnF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;2BAMe;MAC1B;AACI,cAAM;AACN,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,iBAAS,WAAW;AACpB,aAAK,OAAO;AACZ,8BAAoC,KAAK,MAAM;AAC/C,YAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,KAAK,SAAS;AACpD,gBAAM,IAAI,oBAAoB,qDAAqD,KAAK;;AAG5F,aAAK,aAAa,eAAe,KAAK,YAAY,MAAM;AACxD,aAAK,UAAU,eAAe,KAAK,WAAW,OAAO,IAAI,KAAK,SAAS,MAAM;AAC7E,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,aAAa,cAAc,KAAK;AACrC,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,eAAe,eAAe,KAAK,gBAAgB,OAAO,IAAI,KAAK,cAAc,MAAM;AAC5F,YAAI,KAAK,SAAS,KACb,OAAM,QAAQ,KAAK,iBAAiB,KAAK,aAAa,WAAW;AAClE,gBAAM,IAAI,WAAW,iGAEd,KAAK,UAAU,KAAK;mBAEtB,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eAAe,CAAC,KAAK,cAAc,KAAK;qBAExC,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,0FACY,KAAK,UAAU,KAAK;;mBAGpD,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eACD,CAAC,KAAK,cAAc,KAAK,cAAc,KAAK;qBAE3C,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,4FACY,KAAK,UAAU,KAAK;;;;aAI1D;AAEH,iBAAqB,gBAAgB,MAAM;AAC3C,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,oGACkB,KAAK,UAAU,KAAK;;;MAGnE;AACI,wBAAe;UACX,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,iBAAiB,qBAAqB,KAAK;UAC3C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;uBAOW;MACtB;AACI,cAAM,MAAM;AACZ,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,UAAU,KAAK;AACpB,8BAAoC,KAAK,SAAS;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,oBAAoB,eAAe,KAAK;;MAEjD;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AAC3D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAEpH,aAAK,YAAY,CAAC,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AAChE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,4BAAkB,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK;AACvD,sCAA4B,2BAAyC,KAAK,WAAW;AACrF,cAAI,uBAAuB,QAAQ,KAAK,SAAS;AAC7C,sBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK,cAAc;;AAG1I,gBAAI,KAAK,SAAS;AACd,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,QAAQ,IAAI,KAAK,SAAS,KAAK,YAAY,KAAK,aAAa;uBAE7H,KAAK,SAAS;AAEnB,wBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;uBAEvH,KAAK,SAAS;AACnB,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;;AAGlH,oBAAM,IAAI,oBAAoB;;AAElC,gBAAI,KAAK,cAAc;AACnB,wBAAU,KAAK,WAAW,MAAM;;;AAGxC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,yBAAiB;AACjB,sBAAe,KAAK,eAAe,iBAC/B,WAAW,MAAM,GAAG,WAAW,SAAS,KACxC,WAAW,MAAM;AACrB,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,yBAAe,iBAAiB,MAAM,IAAI,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ,IAAI,OAAO,KAAK,iBAAiB,WAAW,KAAK,eACtI,KAAK,aAAa;AACtB,mBAAS,KAAK;;AAElB,0BAAkB,CAAC,WAAW;AAC9B,YAAI,KAAK,eAAe;AACpB,wBAAc,YAAY,OAAO;AACjC,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,KAAK,KAAK;AACtB,wBAAc,YAAY,OAAO;;AAErC,eAAO;;MAEX;AACI,wBAAe;UACX,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,mBAAmB,qBAAqB,KAAK;UAC7C,kBAAkB,oBAAoB,KAAK;;AAE/C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAEJ;AAEH,YAAI,CAAE,cAAa,SAAS,OAAO,KAAK,YAAY,YAChD,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,0EACN,KAAK,UAAU,KAAK;;;;2BAInB;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAK,OAAO,KAAK,eAAe,YAC5B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,8FACc,KAAK,UAAU,KAAK;;;;AAKnE,aAAO,YAAY;AACnB,kBAA4B;2BACA;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe;AAC3B,cAAI,CAAE,OAAM,QAAQ,KAAK,eACpB,MAAK,WAAW,WAAW,KAAK,KAAK,WAAW,WAAW;AAC5D,kBAAM,IAAI,WAAW,2FAC0B,KAAK,UAAU,KAAK;;;;;AAMnF,aAAO,YAAY;AACnB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,YAAI,KAAK,YAAY,UAAU,KAAK,YAAY;AAC5C,gBAAM,IAAI,WAAW,uGACyB,KAAK;;;MAG3D;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW,qDACjB,KAAK,UAAU;;AAEvB,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW;;AAGzB,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS;AAC1D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,WAAW,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AAC1H,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGzH,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,GAAG,MAAM,EAAG,cAAc;AACrD,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,cAAI,OAAM,MAAM,WAAW;AACvB,kBAAM,IAAI,WAAW,2FACY,OAAM,MAAM;;AAEjD,6BAAmB,OAAM;AACzB,4BAAkB,WAAW;AAC7B;AACA;AACA,cAAI,KAAK,eAAe;AACpB,oBAAQ;AACR,oBAAQ;;AAGR,oBAAQ;AACR,oBAAQ;;AAEZ,yBAAe,WAAW;AAC1B,wBAAc,WAAW;AACzB,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,QAAQ;AAC7B,0BAAgB,KAAK,QAAQ;AAE7B,4BAAkB,aAAa,QAAQ,SAAS,SAAS,KAAK;AAC9D,2BAAiB,aAAa,OAAO,SAAS,SAAS,KAAK;AAK5D,8BAAoB,CAAC,WAAW,WAAW,UAAU,KAAK;AAC1D,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;;AAE3C,wBAAc,gBAAoB,QAAO,KAAK,OAAO,QAAQ,aAAa,KAAK,SAAS,KAAK;AAC7F,cAAI,KAAK,eAAe;AACpB,sBAAU,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAE/C,cAAI,KAAK,QAAQ;AACb,sBACI,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAElD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B;AACA;AACA;AACA,YAAI,KAAK,eAAe;AACpB,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAGZ,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAEhB,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,QAAQ;AAC7B,wBAAgB,KAAK,QAAQ;AAC7B,oBAAY,eAAe,KAAK;AAChC,oBAAY,cACR,aAAa,YAAY,aAAa,SAAS,SAAS,KAAK;AACjE,oBAAY,aACR,aAAa,YAAY,YAAY,SAAS,SAAS,KAAK;AAChE,eAAO;;MAEX;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAC/B;AACI,cAAM,MAAM;AACZ,aAAK,gCAAgC;AACrC,aAAK,gCAAgC;AACrC,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,YAAI,QAAO,WAAW;AAClB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,QAAO,qBAAqB,QAAQ,QAAO,qBAAqB,QAChE,QAAO,oBAAoB;AAC3B,gBAAM,IAAI,WAAW;;AAKzB,YAAI,QAAO,WAAW,QAAQ,QAAO,YAAY,UAC7C,QAAO,YAAY;AACnB,gBAAM,IAAI,WAAW,gBAAgB,KAAK,uEACF,KAAK,UAAU,QAAO;;AAElE,aAAK,kBACD,QAAO,mBAAmB,OAAO,IAAI,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS,KAAK,OAAO;AAChC,gBAAM,IAAI,WAAW,0BAA0B,KAAK,0BAC7C,KAAK,OAAO,gCACZ,KAAK,UAAU;;AAE1B,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,oEACJ,KAAK,UAAU,WAAW;;AAE/C,yBAAiB,WAAW;AAC5B,qCAA6B,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AACpE,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,KAAK,MAAM,EAAE;AAC7B,+BAAqB,KAAK;;AAE9B,6BAAqB,KAAK,WAAW,KAAK,iBAAiB,KAAK;AAChE,0BAAkB;AAClB,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,WAAW,KAAK;;AAG1H,eAAK,OAAO;;AAEhB,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AACjE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,cAAI,KAAK,SAAS;AACd,kBAAM,IAAI,oBAAoB;qBAEzB,KAAK,SAAS;AACnB,gBAAI,KAAK,eAAe;AACpB,uBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,qBAAS,gBAAoB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,cAAc;;AAElJ,cAAI,KAAK;AACL,qBAAS,QAAU,QAAQ,KAAK,KAAK,QAAQ,KAAK;;AAEtD,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAEnC,cAAI,KAAK,eAAe;AACpB,qBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,iBAAO;;;MAGf;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,kBAAc,YAAY;kCACW;MACjC;AACI,cAAM,GAAG;;;AAIjB,oBAAgB,YAAY;AAC5B,kBAA4B;yBACA;MACxB;AACI,cAAM,GAAG;AACT,eAAO,WAAW;AAClB,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,yFACS,KAAK,UAAU,KAAK;;;;AAK9D,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WACD,CAAC,CAAC,KAAK,UAAU,KAAK,WAAW,CAAC,KAAK,UAAU,KAAK;mBAErD,OAAO,KAAK,SAAS,OAAO;AACjC,eAAK,WAAW;YACZ,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;YACjC,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;;;AAIrC,eAAK,WAAW,KAAK;;AAEzB,aAAK,aACD,KAAK,eAAe,SAAY,iBAAiB,KAAK;AAC1D,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;YACH,WAAW;YAAI,WAAW;YAC1B,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;;;AAI3D,iBAAO;YACH,WAAW;YACX,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YAAI,WAAW;;;;MAIlF;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,cAAI,KAAK,eAAe;AACpB,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;AAGnH,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;;;MAI/H;AACI,wBAAe,CAAE,UAAU,KAAK,UAAU,YAAY,KAAK;AAC3D,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,eAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,eAAe,CAAC,GAAG;AACxB,aAAK,YAAY,CAAC,CAAE,MAAM;AAC1B,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;;MAExD;AACI,YAAI,KAAK,eAAe;AACpB,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ;;AAG9C,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,QAAQ,OAAO,WAAW;;;MAGzD;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,6BAAmB,OAAM;AACzB,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;AACvC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,4BAAgB,OAAM,sBAAsB,CAAC,QAAQ;AACrD,mBAAO,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAGxC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,mBAAO,OAAM,sBAAsB,CAAC,QAAQ;;;;MAIxD;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,YAAY,KAAK;AACnD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;ACn0B5B;;;;;;;;;AAoCO,6DAAuD,CAAC,GAAG,cAAc;AAC5E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,gBAAgB,SAAS;AACzB,gBAAM,IAAI,WAAW,yDACd,gBAAgB;;AAE3B,YAAI,iBAAoB,GAAG,iBAAiB,SAAS,YAAY,SAAS,SAAS,SAAS,QAAQ;AACpG,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;kCAGsB;MACjC;AACI,cAAM,GAAG;AACT,aAAK,kBAAkB;AACvB,aAAK,kBACD,KAAK,mBAAmB,OAAO,IAAI,KAAK;AAC5C,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,uBAAuB,eAAe,KAAK;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,uEACQ,KAAK,UAAU;;AAEhD,4BAAoB,KAAK,eAAe,kBAAkB,IAAI;AAC9D,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,yFACU,WAAW;;AAE9C,yBAAiB,WAAW;AAC5B,qCAA6B;UACzB,KAAK,WAAW;UAAI,KAAK,WAAW;UAAI;UAAU,KAAK;;AAE3D,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,WAAW,KAAK,kBAAkB,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGnI,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,wBAAc,kBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY;AAEhH,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAExD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,2BAAmB,KAAK,eAAe,kBACnC,WAAW,KAAK,KAAK,kBACrB,WAAW,KAAK,KAAK;AACzB,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,YAAY,SAAS;;AAI5C,iBAAO,CAAC,WAAW,IAAI,SAAS,SAAS;;;MAGjD;AACI,wBAAe,MAAM;AACrB,gBAAO,qBAAqB,KAAK;AACjC,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;ACzI5B;;;;;;;;;AAkDO;AACH,UAAI,MAAM,QAAQ;AACd,YAAI,gBAAgB,QAAQ,aAAa;AACrC,gBAAM,IAAI,WAAW;;AAGzB,YAAI,gBAAgB;AAChB,sBAAY,OAAO,MAAM,OAAO,SAAS,cAAc,OAAO;AAC9D,mBAAS,OAAO,MAAM,GAAG,OAAO,SAAS;;AAE7C,YAAI,OAAO,SAAS;AAChB,yBAAe,OAAO,MAAM,GAAG,OAAO;;AAE1C,iBAAS,OAAO;;AAEpB;AACI,YAAI,KAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;;AAGP,iBAAO,CAAC;;;AAGhB,qBAAe,aAAa;AAC5B,kBAAY,aAAa;AACzB,aAAO,CAAE,QAAQ,cAAc;;AA6C5B,oEAAgE,iCAAiC,4BAA4B;AAChI,aAAO,KAAS;AACZ,qBAAa,OAAO,MAAM;AAC1B,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,uCAAuC;;AAIhE,qBAAa,CAAC,GAAG,GAAG,OAAO,QAAiB,GAAG;AAC/C,iBAAS,WAAc,QAAQ;AAC/B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAIlC,YAAI;AACA,kBAAQ,KAAK;;AAGjB,YAAI,QAAQ;AACR,iBAAO,KAAK,OAAO,QAAQ,OAAO;AAClC,cAAI,KAAK,SAAS,OAAO;AACrB,mBAAO,WAAe,MAAM;;AAEhC,iBAAO,WAAc,MAAM;;AAE/B,YAAI;AACA,mBAAS,SAAY,QAAQ;AAC7B,cAAI,QAAQ;AACR,mBAAO,SAAY,MAAM;;;AAYjC,+BAAuB;AACvB;AACA,qBAAa;AACb,0BAAkB,OAAO,MAAM;AAC/B,8BAAsB,QAAY;AAClC;AACA,YAAI,QAAQ;AACR,yBAAe,QAAY;;AAE/B,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,+BAAqB,cAAc;AACnC,8BAAoB,KAAS,MAAM,aAAa,cAAc;AAC9D,cAAI,QAAQ;AACR,yBAAa,YAAY;AACzB,qBAAS,YAAY;;AAGrB,kCAAsB,KAAS;AAC3B,+BAAiB,aAAa;AAC9B,kCAAoB,UAAa,UAAU,IAAI;AAE/C,6BAAe,YAAY,GAAG,IAAI,UAAU,IAAI,OAAO,GAAG,IAAI;AAC9D,gCAAkB,OAAO,IAAI;AACzB,uBAAO,YAAY,GAAG,GAAG,IAAI,UAAU,IAAI,MAAM,IAAI;;AAEzD,qBAAO,CAAE,QAAQ;;AAErB,yBAAa,cAAc;AAC3B,qBAAS,cAAc;;AAE3B,cAAI;AACA,2BAAe,KAAK;;;AAG5B;AACA,YAAI;AACA,uBAAa;AACb,oBAAU,MAAU,gBAAgB;;AAExC,eAAO,CAAC,YAAY,SAAS;;;sBAGZ;MACrB;AACI,cAAM;AACN;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW;mBAEhB,MAAM,QAAQ,KAAK;AACxB,iBAAO,IAAI,gBAAgB,CAAE,OAAO,KAAK;;AAGzC,iBAAO,KAAK;;AAEhB,YAAI,KAAK,aAAa;AAClB,gBAAM,IAAI,WAAW;;AAGzB,aAAK,OAAO;AACZ,aAAK,kBACD,KAAK,mBAAmB,OAAO,QAAQ,KAAK;AAChD,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,YAAY,KAAK,YAAY,OAAO,QAAQ,KAAK;AACtD,aAAK,SAAS,KAAK,UAAU,OAAO,QAAQ,KAAK;AACjD,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,aAAK,YAAY;AACjB,aAAK,UAAU;AAEf,aAAK,eAAe;AAGpB,aAAK,aAAa;;MAItB;AACI,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,iBAAO,QAAiB,GAAG,WAAW,IAAI,OAAK;;AAG/C,iBAAO,KAAK;;;MAKpB;AACI,aAAK,UAAU;;MAEnB;AACI,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AAEb,wBAAgB,KAAK,KAAK;AAC1B,YAAI,CAAC,MAAM,QAAQ;AACf,sBAAY,CAAC;;AAEjB,0BAAkB,UAAU;AAC5B;AACA,YAAI,KAAK;AACL,wBAAc,CAAC,WAAW,IAAI,WAAW,IAAI;;AAG7C,wBAAc,CAAC,WAAW,IAAI;;AAElC,YAAI,KAAK;AACL,6BAAmB;AACnB,4BAAkB;AACd,uBAAW,KAAK,CAAC,WAAW,IAAI;;AAEpC,iBAAO,CAAC,aAAa,OAAO;;AAG5B,iBAAO;;;MAGf;AACI,eAAO,KAAS;AACZ,cAAI,MAAM,QAAQ;AACd,mBAAO,KAAK;;AAEhB,6BAAmB,KAAK,kBAAkB,OAAO;AACjD,cAAI,KAAK;AACL,8BAAkB,KAAK,OAAO,IAAI,OAAK;AACvC,mBAAO,CAAC,YAAY,OAAO;;AAG3B,mBAAO;;;;UAUf;AACA,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,yBAAe;AACf,uBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,mBAAO,KAAK;;AAEhB,iBAAO;;AAGP,iBAAO,KAAK;;;UAGhB;AACA,aAAK,UAAU;;MAEnB;AAGI,8BAAsB;AACtB,YAAI,KAAK,gBAAgB;AACrB,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AACb,0BAAkB,KAAK,WAAW,WAAW,KAAK;AAClD,yBAAiB,WAAW,MAAM;AAClC,aAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,CAAC,WAAW,MAAM,GAAG;AAGhE,+BAAuB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAC/D,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,eAAK,KAAK,MAAM;;AAGpB;AACA,YAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,CAAC,KAAK,KAAK;;AAE3B,YAAI,KAAK,aAAa;AAClB,cAAI,CAAC,aAAiB,KAAK,UAAU,IAAI,UAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,KAAK;AACjF,kBAAM,IAAI,WAAW,6FACqB,KAAK,wCACd,KAAK,KAAK;;;AAI/C,eAAK,YACD,UAAU,IAAI,SAAO,IAAI,UAAU,CAAE,OAAO,CAAC,MAAM;;AAE3D,YAAI,KAAK;AACL,eAAK;;;MAoBb,+BAA+B;AAC3B,aAAK;AACD,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,4BAAkB,KAAK,UAAU,GAAG,MAAM;AAC1C,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,WAAW;AAChB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,UAAU,CAAC,OAAU,CAAC,WAAW,KAAK,KAAK;;qBAG/C,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,QAAQ,KAAK,OAAU,CAAC,WAAW,KAAK,KAAK;;;AAItD,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI,aAAa;AAKb,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,0BAAY,MAAM,QAAQ,KAAK,KAAK,aAChC,KAAK,KAAK,UAAU,UACpB,KAAK,KAAK;AACd,oCAAsB,CAAC,WAAW;AAClC,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AAEI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AAIzB,+BAAuB;AACvB,8BAAsB;AACtB,YAAI,gBAAgB;AAChB,iBAAO,kBAAkB;AACzB,6BAAmB,iBAAiB,OAAO;AAC3C,eAAK,YAAY;AACjB,8BAAoB;AAChB,iBAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,MAAM;;AAKrD,4BAAkB,gBAAgB,OAAO,KAAK;;AAElD,YAAI,aAAa;AACb,iBAAO,eAAe;AACtB,6BAAmB,iBAAiB,OAAO;AAE3C,eAAK,eAAe,UAAU;;AAElC,yBAAiB,iBAAiB,cAAc;AAChD,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAE5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAInC;AAII,eAAO,KAAK;AACR,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,6BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,mBAAS,oBAAoB;AAC7B,cAAI,gBAAgB;AAChB,gBAAI,KAAK;AACL,6BAAe,KAAK;;AAGpB,6BAAe,KAAK,gBAAgB;;;AAG5C,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,cAAI,aAAa,WAAW;AACxB,kBAAM,IAAI,WAAW,iBAAiB,qCAC/B,aAAa;;AAExB,cAAI,KAAK;AACL,oBAAQ,KAAK;;AAEjB,iCAAuB,CAAE;AAEzB,wBAAa;AAGT,6BAAgB,KAAK,KAAK,KAAK,CAAC,SAAQ,OAAO,UAAS;AAExD,mBAAO,CAAC,SAAQ,IAAI,SAAQ,MAAM;;AAGtC,6BAAmB,IAAI,OAAM,QAAQ,cAAc,KAAK,aAAa,MAAM,MAAM,KAAK,QAAQ,KAAK;AACnG,6BAAmB,WAAW;AAC9B,0BAAgB,WAAW;AAC3B,yBAAe,WAAW;AAC1B,cAAI,KAAK;AACL,iBAAK,YAAY,QAAQ;;AAE7B,yBAAe,KAAK,kBAAkB,UAAU;AAEhD,cAAI,KAAK;AACL,mBAAO,CAAC,QAAQ,OAAO;;AAGvB,mBAAO;;;;MAInB;AACI,eAAO,KAAK;AAGR,6BAAmB,OAAU,OAAO;AAEpC,yBAAe,MAAQ,cAAc,CAAC,GAAG;AACzC,yBAAe,aAAa;AAC5B,cAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAO,KAAK,KAAK,UAAU,IAAI,SAAO,MAAM,IAAI,OAAO,cAAc,CAAC,GAAG,QAAQ;;AAGjF,mBAAO,KAAK,KAAK,YAAY,IACzB,CAAC,OAAO,cAAc,CAAC,GAAG,KAAK,KAAK,eACpC,CAAC;;;;UAIb;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAGX,eAAO,KAAK,KAAK;;UAEjB;AAEA,YAAI,CAAC,KAAK;AACN,iBAAO,KAAK,KAAK;;AAErB,eAAO,KAAK,KAAK;;MAErB;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,6BAA6B;;;MAG/C;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,iBAAiB,KAAK;UACtB,aAAa,KAAK;UAClB,aAAa,KAAK;UAClB,UAAU,KAAK;UACf,QAAQ,KAAK;;AAEjB,YAAI,KAAK,gBAAgB;AACrB,kBAAO,kBAAkB,KAAK;;AAElC,2BAAmB,KAAK,KAAK;AAC7B,YAAI,KAAK,mBAAmB,IAAI;AAC5B,kBAAO,UAAU;YACb,WAAa,KAAK,KAAK;YACvB,QAAU;;;AAIlB,eAAO,OAAO,OAAO,IAAI,YAAY,YAAY;;aAG9C,yCAAwC;AAC3C,2BAAmB,QAAO;AAC1B,qBAAa,YAAY,YAAY;AACrC,eAAO,IAAI,IAAI,OAAO,OAAO,SAAQ,CAAE;;;AAI/C,QAAI,YAAY;AAChB,kBAA4B;0BASC;;gCAEM;MAC/B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,cAAc,OAAO,KAAK,qBAAqB,KAAK;AACzF,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAEhC,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,WAAW,WAAW,SAAS,IAAI,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACzJ,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC3J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG9G,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAQjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8CAA8C,OAAO;;AAE9E,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR;AACA,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB,cAAI,UAAU;AACV,gBAAI,MAAM,KAAQ,QAAQ,SAAS,KAAK,OAAO;;AAG/C,gBAAI,MAAM,QAAQ,KAAK,OAAO;;AAElC,cAAI,KAAK,QAAQ;AACb,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,cAAI,aAAa;AACb,yBAAa,KAAQ,YAAY;;AAErC,uBAAa,MAAQ,GAAG,MAAM,YAAY,KAAK,gBAAgB;AAC/D,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAGnC,iBAAO,CAAC,QAAQ;;;MAGxB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;;AAE3B,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,kBAAc,YAAY;AAC1B,kBAA4B;4BACG;MAC3B;AACI,aAAK,OAAO,IAAI,cAAc;AAC9B,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,eAAO,IAAI,IAAI;;;AAIvB,cAAU,YAAY;AACtB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGlH,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,uDACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAe,OAAO;AACtB,mBAAS,OAAO;AAIhB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,wBAAc,MAAM,QAAQ,KAAK,OAAO;AACxC,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK;;AAE3C,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,uCAA6B,KAAK,gBAAgB;AAClD,6BAAmB,OAAU,sBAAsB,CAAC,IAAI,KAAK,OAAO,KAAK,QAAQ,qBAAqB,OAAO;AAC7G,8BAAoB,MAAM,UAAU;AACpC,+BAAqB,OAAU,SAAS,GAAG,QAAQ,OAAO;AAC1D,2CAAiC,OAAU,aAAa,GAAG,YAAY,OAAO;AAC9E,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,6BAAmB,MAAM,KAAQ,GAAG,WAAW;AAC/C,eAAK,KAAK,WAAW,MAAM,MAAQ,IAAI;AACvC,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,MAAQ,GAAG,IAAQ,KAAK;AAExE,iBAAO,CAAC,GAAG;;;MAGnB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;UACrB,YAAY;;AAEhB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,YAAQ,YAAY;AACpB,kBAA4B;sBACH;MACrB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,QAAQ;AACxB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,QAAI,YAAY;AAChB,kBAA4B;2BACE;MAC1B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,KAAK;AAC3B,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,CAAC,KAAK,OAAO,KAAK;AACnC,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI;AACA,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J;AACA,YAAI,KAAK;AACL,cAAI,KAAK;AACL,qCAAyB,KAAK;AAC9B,kCAAsB,KAAK;AAC3B,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AAEI,2BAAW,iBAAiB,MAAM,CAAC;AACnC,2BAAY,IAAI,OAAQ,MAAM,CAAC;AAC/B,+BAAe,iBAAiB,MAAM,CAAC,gBAAgB;AACvD,uBAAO,qBAAuB,qBAAuB,IAAI,KAAK;;eAItE,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG7G,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,wDACd,OAAO;;AAElB,yBAAe,OAAO;AACtB,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AAIvB;AACA;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,kBAAQ,MAAM,QAAQ,KAAK,OAAO;AAClC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,cAAI,MAAQ,GAAG,MAAM,UAAU,KAAK,gBAAgB;AACpD,cAAI,KAAK;AACL,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,mCAAyB,OAAU,GAAG,GAAG,EAAE,OAAO;AAClD,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM;AACnE,cAAI,KAAK,oBAAoB,MAAM;AACnC,oBAAU,KAAQ,GAAG,KAAK,WAAW,MAAM;AAE3C,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,gBAAgB,KAAK;UACrB,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;;AAEzB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,aAAS,YAAY;AACrB,kBAA4B;uBACF;MACtB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,SAAS;AACzB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,SAAK,YAAY;AACjB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,QAAQ,KAAK;;UAElB;AAKA,0BAAkB;AAClB,2BAAmB,KAAK,MAAM,QAAQ;AAClC,cAAI,MAAM,QAAQ,KAAK;AACnB,sBAAU,KAAK,GAAG,KAAK;;AAGvB,sBAAU,KAAK,KAAK;;;AAG5B,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,mBAAS;AACT,uBAAa,OAAO,MAAM;AAE1B,+BAAqB;AACrB,6BAAmB,KAAK,MAAM,QAAQ;AAClC,gBAAI,MAAM,QAAQ,KAAK;AACnB,2BAAa,KAAK,OAAO,OAAO,GAAG,KAAK,UAAU;;AAGlD,2BAAa,KAAK,OAAO,OAAO,GAAG;;;AAG3C,uBAAa;AAEb,kCAAwB;AACxB;AACA,uBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,yBAAa,KAAK,MAAM;AACxB,qBAAS,aAAa;AAEtB,gBAAI,MAAM;AACN,2BAAa,CAAC,OAAO,IAAI,OAAO;;AAGhC,2BAAa,CAAC,WAAW,IAAI,OAAO;;AAExC,yBAAa,KAAK,KAAK,YAAY;AACnC,4BAAgB,KAAK,WAAW,MAAM;;AAG1C,mBAAS;AACT,mCAAyB,gBAAgB,QAAQ;AAC7C,mBAAO,KAAK,GAAG;;AAEnB,iBAAO,CAAC,WAAW,IAAI,OAAO;;;MAGtC;AACI,YAAI,gBAAgB;AAGhB,uBAAa,WAAW;;AAE5B,qBAAa;AACb;AACA,aAAK,MAAM,QAAQ;AACf,oBAAU,WAAW,KAAK;AAEtB,iBAAK,MAAM;AACX,gBAAI,MAAM,QAAQ,KAAK;AACnB,0BAAY,KAAK,UAAU;;AAG3B,0BAAY,KAAK;;AAErB,yBAAa,CAAC,WAAW,IAAI;;;AAGrC,aAAK,QAAQ;;MAEjB;AACI,2BAAmB,MAAM;AACzB,8BAAsB;AAClB,iBAAO;YACH,WAAa,KAAK;YAClB,QAAU,KAAK;;;AAGvB,4BAAoB,KAAK,MAAM,IAAI;AACnC,wBAAe,CAAE,OAAS;AAC1B,eAAO,OAAO,OAAO,IAAI,YAAY;;aAGlC,yCAAwC;AAC3C,sBAAc;AACd,iCAAyB,QAAO;AAC5B,gBAAM,KAAK,YAAY,YAAY;;AAEvC,eAAO,IAAI,IAAI,CAAE;;UAEjB;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO;;UAEP;AACA,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,6BAAmB,KAAK;AACpB,6BAAiB,KAAK,GAAG,KAAK;;AAElC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;MAOX;AACI,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO,cAAc;;MAQzB;AACI,uBAAe;AACf,2BAAmB,KAAK;AACpB,4BAAkB,KAAK,QAAQ;AAC/B,+BAAqB,QAAQ,OAAO;AACpC,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,mBAAO,KAAK,CAAC,KAAK,QAAQ,IAAI,aAAa;;;AAGnD,sBAAc;;;AAItB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,aAAQ,aAAM,MAAM,WAAW,OAAO,gBAAQ,KAAM;AACpD,4BAAsB,MAAM,UAAU,SAAQ;AAC9C,yBAAmB,MAAM,aAAe,eAAe,OAAM;AAE7D,UAAI,CAAC,UAAS,UAAS;AACnB,eAAO,KAAS,aAAa;;AAEjC,oBAAc,MAAM,QAAO,KAAK,QAAW,IAAI;AAC/C,aAAO,MAAM,IAAI,OAAK,KAAS,EAAE;;ACtzCrC;;;;;;;;;AASA,iBAAsC;AAClC,cAAQ;AACR,qBAAc;AAAG,YAAI,OAAO,UAAU,eAAe,KAAK,GAAG,OAAM,EAAE,QAAQ,MAAK;AAC9E,YAAE,MAAK,EAAE;AACb,UAAI,KAAK,QAAQ,OAAO,OAAO,0BAA0B;AACrD,qBAAa,QAAO,OAAO,sBAAsB,IAAI,IAAI,GAAE,QAAQ;AAC/D,cAAI,EAAE,QAAQ,GAAE,MAAM,KAAK,OAAO,UAAU,qBAAqB,KAAK,GAAG,GAAE;AACvE,cAAE,GAAE,MAAM,EAAE,GAAE;;AAE1B,aAAO;;gCAaiB;;4BAKJ;MACpB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,MAAM,QAAQ,KAAK;AACnB,gBAAM,IAAI,oBAAoB;;AAElC,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,KAAS;AACZ,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,cAAI,UAAU,OAAO;AACjB,kBAAM,IAAI,WAAW;;AAEzB,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;MAGpD;AACI,uBAAe,KAAK,yBAAyB;AAC7C,YAAI,CAAC,KAAK;AACN,qBAAW,CAAC,SAAS,IAAI,GAAG,SAAS,MAAM;;AAE/C,YAAI,KAAK;AACL,qBACI,CAAC,UAAU,GAAG,MAAM,GAAG,KAAK,CAAC,WAAW,IAAI,GAAG,SAAS,MAAM;;AAEtE,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,iBAAQ,aAAc,KAAK;AAC3B,6BAAmB,OAAO;AAC1B,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,+BAAqB,OAAU;AAC/B,cAAI,MAAM,QAAQ;AACd,mBAAO,MAAM,UAAU,QAAQ,KAAK;;AAExC,iBAAO,CAAC;;;MAGhB,+BAA+B;AAC3B,aAAS;AACL,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,6BAAmB,KAAK,UAAU,GAAG;AACrC,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,4BAAkB,WAAW;AAC7B,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,eAAe;AACpB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,UAAU,CAAC,OAAU;;qBAGzB,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,QAAQ,KAAK,OAAU;;;AAIhC,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI;AAKA,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,oCAAsB;AACtB,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AACI,eAAQ,YAAY,SAAS,YAAY,SAAS,SAAS,gBAAiB,KAAK;AACjF,gCAAwB,eAAe;AACvC,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,yBAAiB;UACb,GAAG,WAAW,MAAM,GAAG;UACvB,GAAI,kBAAkB,CAAC,SAAS,MAAM,QAAQ,CAAC,MAAM,MAAM;;AAE/D,eAAO;;;AAIf,cAAU,YAAY;iCACc;MAChC;AACI,eAAQ,SAAS,YAAY,SAAS,SAAS,YAAY,gBAAkB;AAC7E,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE,OAAO;AACvC,aAAK,UAAU;AACf,8BAAsB,KAAK,SAAS;AACpC,aAAK,aAAa,eAAe,YAAY,GAAG;AAChD,aAAK,WAAW,QAAQ,UAAQ,sBAAsB,MAAM;AAC5D,aAAK,UAAU,eAAe,WAAW,GAAG,GAAG;AAC/C,aAAK,QAAQ,QAAQ,YAAU,sBAAsB,QAAQ;AAC7D,aAAK,UAAU,WAAW;AAC1B,yBAAiB,KAAK;AACtB,aAAK,aAAa,cAAc;AAChC,wBAAgB,KAAK;AACrB,aAAK,eAAe,eAAe,gBAAgB,GAAG,GAAG;AACzD,aAAK,aAAa,QAAQ,UAAQ,sBAAsB,MAAM;;MAElE;AACI;AACA,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,6BAAqB;AACrB,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK,UAAU;AACrE,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,qCAA6B,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS,KAAK,UAAU;AAClF,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL;AACA,cAAI,KAAK;AACL,0BAAa,KAAK;AAClB,4BAAgB,KAAK;AACrB,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AACI,8BAAc,MAAK,MAAM,CAAC;AAC1B,8BAAc,OAAS,CAAC;AACxB,kCAAkB,MAAK,MAAM,CAAC,UAAU;AACxC,uBAAO,YAAc,CAAC,OAAO,OAAO;;eAI5C,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,eAAe,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE9H,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8DACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe;AACvC,oBAAU,OAAO;AACjB,2BAAiB,OAAO;AACxB,2BAAiB,OAAO;AACxB,+BAAqB;AACrB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,8BAAoB,KAAK;AACzB,+BAAqB;AACjB,gBAAI,CAAC,QAAQ,CAAC,KAAK;AACf,qBAAO;;AAEX,mBAAO,KAAQ,KAAK,SAAQ;;AAEhC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,iCAAuB,KAAK;AAC5B,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,oCAA0B;AAC1B,uDAA6C,OAAU,KAAK,OAAO,QAAQ,cAAc;AACzF,+CAAqC,KAAK,UACtC,OAAU,KAAK,KAAK,QAAQ,gBAC5B,CAAC,MAAM,MAAM,MAAM;AACvB,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,mEAAyD,OAAU,KAAK,gBAAgB,QAAQ,cAAc;AAC9G,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM,MAAQ,IAAI;AACrF,oBAAU,KAAQ,KAAK,oBAAoB,MAAM,MAAQ,IAAI,MAAM,KAAK,WAAW,MAAM;AACzF,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,mBAAW,MAAM,cAAe,YAAe,iBAAiB,OAAO,IAAI,CAAC;AAC5E,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,SAAS,KAAK;;AAElB,eAAO,OAAO,OAAO,IAAI,YAAY;;MAEzC;AACI,oBAAY,QAAW,GAAG,GAAG,KAAK,SAAU,WAAW,SAAU,KAAK,eAAe,kBAAkB,SAAS,QAAQ,KAAK;AAC7H,YAAI;AACA,iBAAO,QAAU,KAAK,GAAG,KAAK;;AAElC,eAAO;;MAEX;AACI,wBAAgB;AAChB,eAAO,QAAW,GAAG,GAAG,SAAS,QAAQ,KAAK,eAAe,kBAAkB,SAAS;;;AAIhG,mBAAe,YAAY;AAC3B,kBAAgC;6BACA;MAC5B;AACI,qBAAa,IAAI,eAAe;AAChC,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE;;aAG7B;AACH,eAAO,IAAI,IAAI;;;AAIvB,eAAW,YAAY;AACvB,kBAAgC;AClVhC;;;;;;;;;0BAuB6B;MACzB;AACI,cAAM;AACN,aAAK,OAAO,KAAK,IAAI,KAAK,IAAI,KAAK,MAAM,IAAI;AAE7C,aAAK,aAAa,KAAK;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,kBAAkB;;MAE3B;AACI,YAAI,KAAK,cAAc;AACnB,iBAAO,KAAK;;AAEhB,2BAAmB,OAAM;AACzB,2BAAmB;AACnB,qBAAa,GAAG,IAAI,KAAK,WAAW,QAAQ,EAAE;AAC1C,qBAAW,KAAK,KAAK,WAAW,MAAM,OAAO,WAAW,KAAK,KAAK,WAAW;;AAEjF,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,IAAI,KAAK,QAAQ,KAAK,OAAO;AAC7B,6BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,+BAAmB,KAAK,cAAc;AACtC,2BAAe,aAAe,MAAM,UAAU,QAAO,KAAK,MAAM,YAAY,KAAK,OAAO,MAAM,QAAO;AACrG,mBAAO;;AAEX,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,YAAY,KAAK;UACjB,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,MAAM;;;AAIrB,YAAQ,YAAY;AACpB,kBAA4B;mCACU;MAClC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,2BAAmB,OAAM;AACzB,eAAO,CAAC,WAAW,IAAI,GAAG,WAAW;;;AAI7C,qBAAiB,YAAY;AAC7B,kBAA4B;wBACD;MACvB;AACI,cAAM;AAEN,aAAK,aAAa;AAClB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc,QACnD,KAAK,YAAY;AAGjB,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,eAAK,kBAAkB,CAAC,WAAW,KAAK;;AAE5C,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAExB,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,CAAE,SAAS;;MAEjC;AACI,qBAAa,mBAAmB;AAChC,6BAAqB,WAAW,WAAW,SAAS;AACpD,YAAI,KAAK,UAAU;AACf,eAAK,SAAS,KAAK,UAAU,UAAU,CAAC,cAAc,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,cAAI,KAAK;AACL,iBAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;;AAGtH,aAAK,YAAY,CAAC,CAAE,SAAS,GAAG,MAAM,EAAG,KAAK;AAC9C,aAAK,QAAQ;;MAEjB;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,oBAAY,YAAY,SAAS,KAAK,KAAK;AAC3C,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,yBAAc,oBAAoB;AAClC,sCAA4B,2BAA2B,KAAK,WAAW;AACvE;AACA,cAAI,uBAAuB;AACvB,qBAAS,MAAM,QAAO,KAAK,OAAO,QAAQ,qBAAqB,KAAK,OAAO,KAAK,KAAK,SAAS;;AAG9F,qBAAS,MAAM,QAAO,KAAK,OAAO;AAClC,gBAAI,KAAK,QAAQ;AACb,uBAAS,QAAU,QAAQ,KAAK,KAAK;;AAEzC,gBAAI,KAAK,cAAc;AACnB,uBAAS,KAAK,WAAW,MAAM;;;AAGvC,iBAAO;;;MAGf;AACI,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;0BACC;MACzB;AACI,eAAO,QAAQ;AACf,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,SAAS;AAC7B,aAAK,aAAa,KAAK;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,0BAAkB,WAAW,MAAM;AAC/B,cAAI,OAAO;AACP,kBAAM,IAAI,WAAW,iEACT,WAAW,MAAM;;;AAKrC,eAAO,CAAC,WAAW,IAAI,UAAU,YAAY;;MAEjD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,uBAAY,oBAAoB;AAChC,cAAI,KAAK,eAAe,mBAAmB,OAAM,OAAO;AACpD,gCAAoB,CAAC;AACrB,yBAAa,GAAG,IAAI,OAAM,MAAM,EAAE;AAC9B,0BAAY,KAAK;;AAErB,wBAAY,KAAK;AACjB,qBAAQ,OAAM,UAAU;;AAE5B,iBAAO,aAAe;;;MAG9B;AACI,wBAAe;AACf,YAAI,KAAK,cAAc;AACnB,kBAAO,gBAAgB,KAAK;;AAEhC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;+BACI;MAC5B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,aAAa,cAAc,KAAK;;MAEzC;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,iBAAO,KAAK,WAAW,MAAM;;;MAGrC;AACI,wBAAe,CAAE,YAAY,oBAAoB,KAAK;AACtD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,IAAI,KAAK;AACd,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,eAAO,CAAC,WAAW,IAAI,KAAK,GAAG,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,iBAAO,OAAS,QAAQ,KAAK;;;MAGrC;AACI,wBAAe;UACX,GAAG,KAAK;;AAEZ,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;4BACC;MACzB;AACI,cAAM;AACN,aAAK,cAAc,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ,EAAE;AAC3C,cAAI,KAAK,UAAU,KAAK,YAAY;AAChC,iBAAK,YAAY,KAAK;;;;MAIlC;AACI,eAAO,MAAM,KAAK,OAAO;;MAgB7B;AACI,yBAAiB;AACjB,2BAAmB,YAAY;AAC/B,oBAAY;AACZ,sBAAc;AACd,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,sBAAY,WAAW;AACvB,cAAI,KAAK,UAAU;AACf,gBAAI,YAAY;AACZ,wBAAU;;AAGV,oBAAM,IAAI,WAAW;;;AAIzB,qBAAS;;;AAGjB,6BAAqB,UAAU;AAC/B,YAAI,YAAY;AACZ,cAAI,UAAU,KAAK,eAAe,UAAU;AACxC,kBAAM,IAAI,WAAW;;AAEzB,qBAAW,WAAW,eAAe;mBAEhC,iBAAiB;AACtB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;MAEX;AACI,6BAAqB;AACrB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,cAAI,KAAK,UAAU,WAAW;AAC1B,6BAAiB;AACjB;;;AAGR,YAAI;AACA,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK;;AAG1C,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;;;MAGhG;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,8BAAoB,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;AACrG,iBAAO,OAAM,QAAQ;;;MAG7B;AACI,wBAAe;UACX,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,gBAAM,IAAI,MAAM,sEACT,KAAK;;AAGhB,sCAA8B,QAAM,GAAG,KAAK,KAAK,SAAS;AAC1D,YAAI,CAAC,aAAiB,KAAK,KAAK,QAAQ,QAAQ;AAC5C,gBAAM,IAAI,MAAM,iCAAiC,KAAK,UAAU,KAAK,QACjE;;AAER,aAAK,OAAO,KAAK;AACjB,aAAK,qBAAqB,CAAC,GAAG,OAAO,KAAK;AAC1C,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,KAAK,SAAS;;MAE/D;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,aAAK,KAAK,QAAQ;AACd,sBAAY,IAAI,KAAK,WAAW;;AAEpC,eAAO;;MAEX;AACI,eAAO,WAAU,oBAAoB,SAAS,KAAK;;MAEvD;AACI,wBAAe;UACX,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,YAAY,KAAK,aAAa,OAAO,IAAI,KAAK;;AAGnD,eAAK,YAAY;;;MAGzB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,WAAW,KAAK;AACjC,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,uBAAc,oBAAoB;AAClC,qBAAa;AACb,eAAO,IAAI,SAAS,QAAO,KAAK,YAAY;;MAEhD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,uBAAa;AACb,2BAAiB;AACjB,8BAAoB,IAAI,SAAS,QAAO,KAAK,YAAY,MAAM;AAC/D,yBAAe,OAAM,IAAI,YAAY,OAAO,OAAM;AAClD,iBAAO;;;;AAKnB,YAAQ,YAAY;AACpB,kBAA4B;AC3c5B;;;;;;;;;4BAuB+B;MAC3B;AACI,cAAM;AACN,aAAK,aAAa;AAClB,aAAK,iCAAiC;AACtC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc;AAKnD,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,cAAI,KAAK,eAAe;AAGpB,iBAAK,kBAAkB,CAAC,WAAW;;AAKnC,iBAAK,kBACD,CAAC,WAAW,OAAO,OAAqB,KAAK;;;AAGzD,aAAK,WAAW,KAAK;AACrB,8BAAoC,KAAK,UAAU;AACnD,aAAK,YAAY,KAAK;AACtB,8BAAoC,KAAK,WAAW;AACpD,aAAK,wBAAwB,eAAe,KAAK,yBAAyB,KAAK;AAC/E,aAAK,wBAAwB,eAAe,KAAK;AACjD,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,uBAAuB,cAAc,KAAK;AAC/C,aAAK,WAAW,KAAK;AACrB,aAAK,kBAAkB,KAAK;AAC5B,aAAK,cAAc,KAAK;;MAE5B;AACI,aAAK,aAAa,KAAK,UAAU,cAAc,CAAC,KAAK,UAAU,KAAK,YAAY,KAAK,OAAO,KAAK,uBAAuB,KAAK,uBAAuB,MAAM,KAAK;AAC/J,aAAK,QAAQ;;MAIjB;;MACA;AACI,eAAO,KAAK;AACR,cAAI,CAAC,KAAK;AACN,mBAAO;;AAGP,qBAAS,oBAAoB;AAC7B,mBAAO,SAAS,QAAQ,WAAU;;;;MAI9C;AACI,qBAAa,mBAAmB;AAChC,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,GAAG,YAAY,KAAK;;AAGhC,uBAAe,OAAqB,KAAK;AACzC,YAAI,OAAO,WAAW,WAAW,SAAS;AACtC,gBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;;AAG7B,kBAAQ;AACR,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,uBAAW,OAAO;AAClB,uBAAW,WAAW,IAAI;AAC1B,gBAAK,MAAM,QAAU,MAAM,QAAU,OAAO;AACxC,oBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;uBAExB,MAAM;AACX,qBAAO,KAAK;;AAEhB;;;AAGR,eAAO,CAAC,WAAW,IAAI,GAAG,QAAQ,KAAK;;MAE3C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,uBAAY,oBAAoB;AAChC,cAAI,OAAM,UAAU;AAChB,qBAAQ,OAAO,QAAO;;AAE1B,yBAAe,SAAS,KAAK,WAAW,QAAQ,OAAM;AACtD,iBAAO,OAAO,QAAQ,mBAAmB,KAAK,mBAAmB,OAAM;;;MAG/E;AACI,wBAAe;UACX,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,uBAAuB,qBAAqB,KAAK;UACjD,uBAAuB,qBAAqB,KAAK;UACjD,qBAAqB,qBAAqB,KAAK;UAC/C,sBAAsB,oBAAoB,KAAK;UAC/C,UAAU,KAAK;UACf,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;ACzI5B;;;;;;;;;wBA0B2B;MACvB;AACI,cAAM,QAAQ;AACd,aAAK,kBAAkB;;MAM3B;AACI,cAAM,IAAI;;MAYd;AACI,YAAI,UAAU,QAAQ,UAAU;AAC5B,iBAAO;mBAEF,OAAO,SAAS,OAAO;AAC5B,iBAAO,KAAK,gCAAgC,QAAQ;mBAE/C,OAAO,WAAW;AACvB,iBAAO;;AAEX,4BAAoB,OAAO,MAAM,GAAG,OAAO,SAAS,OAAO;AAC3D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO,OAAO,SAAS,OAAO,SAAS;AACjD,oBAAU,OAAO;AACjB,cAAI,KAAK,QAAQ,KAAK,QAAQ,IAAI,KAAK,IAAI;AACvC,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;;AAGjB,gBAAI,MAAM;AACN,oBAAM,IAAI,WAAW,0DACjB,KAAK,UAAU,UAAU,MAAM,KAAK,UAAU;;AAEtD,wBAAY,KAAK;;;AAGzB,eAAO;;MAEX;AAEI,YAAI,MAAM,QAAQ,eAAe,CAAC,MAAM,QAAQ,WAAW;AAEvD,uBAAa,CAAC,mBAAmB;;AAErC,qBAAa;AACb,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,wEACT,WAAW;;AAI3B,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,8EACW,KAAK,UAAU;;AAEnD,0BAAkB,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACrE,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAIpE,yBAAiB,WAAW,IAAI,WAAS,MAAM;AAC/C,YAAI,WAAW,QAAQ,UAAU,MAC7B,SAAqB,UAAU,WAAW;AAC1C,eAAK,kBAAkB;;AAGvB,eAAK,kBAAkB;;;MAG/B;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,KAAK;AACL,mCAAuB;AACvB,8BAAkB,OAAO,IAAI,YAAS,OAAM;AAC5C,gBAAI,UAAU,QAAQ,UAAU;AAG5B,8BAAgB,MAAc;AAC9B,4BAAc;AACV,8BAAc,EAAE;AAChB,6BAAa,GAAG,IAAI,UAAU,OAAO,EAAE;AACnC,sBAAI,aAAa,GAAG;;AAExB,+BAAe,KAAK;;AAExB,qBAAO,KAAK,cAAc;;AAK1B,+BAAiB;AACjB,8BAAgB;AACZ,8BAAc,EAAE;AAChB,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,oCAAkB,OAAO;AACzB,mCAAiB,OAAO,MAAM,GAAG,OAAO,CAAC;AACzC,oCAAkB,EAAE,QAAQ,CAAC,WAAW,OAAO,UAAoB,OAAO,MAAM;AAChF,gCAAc,WAAc,aAAa,CAAC,GAAG;AAC7C,gCAAc,YAAY,QAAQ;AAClC,iCAAe,KAAK;AACpB,+BAAa;2BAER,QAAQ;AACb,+BAAa,QAAgB,GAAG,OAAO,OAAO,CAAC;AAC/C,iCAAe,KAAK,WAAc,GAAG;AACrC,+BAAa;;AAIb,iCAAe,KAAK;;;AAG5B,sBAAQ,KAAK,cAAc;AAC3B,4BAAc,EAAE;AAChB,kBAAI;AAGA,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,iCAAc,OAAO;AACrB,oCAAkB,OAAO,SAAQ;AACjC,mCAAiB,CAAC,WAAW,OAAO,OAAO,MAAM,GAAG,OAAO,SAAS;AACpE,sBAAI,WAAc,EAAE,QAAQ,CAAC,IAAI,aAAa,CAAC,GAAG,IAC7C,QAAQ;2BAER,QAAQ;AACb,+BAAa,CAAC,QAAQ,GAAG,OAAO,QAAgB,GAAG,QAAQ;AAC3D,sBAAI,WAAc,GAAG;;;AAG7B,qBAAO;;;AAIX,mBAAO,KAAK,cAAc;;;;MAItC;AACI,qBAAa;AACb;AACA,YAAI,WAAW,MAAM;AACjB,wBAAc;;AAGd,wBAAc,WAAW,GAAG,MAAM;;AAEtC,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAEpE,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,WAAW;AACtB,wBAAc,WAAW,OAAO;;AAGhC,wBAAc,CAAC,MAAM,OAAO;;AAEhC,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,cAAI,QAAQ;AACR,mBAAO;;AAEX,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,KAAK,WAAW,OAAO;AACvB,kBAAM,IAAI,WAAW,mGAEb,OAAO,aAAa,KAAK;;AAErC,cAAI,KAAK,MAAM,OAAK,KAAK;AACrB,mBAAO;;AAEX,iBAAO,KAAK,IAAI,OAAK,KAAK,OAAO,IAAI,WAAe,GAAG;AACvD,uBAAa,KAAK;AAClB,uBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,qBAAS,WAAe,QAAQ,KAAK;;AAEzC,iBAAO;;;;wBAIM;MACrB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,UAAI,YAAY;AAChB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,MAAI;AACtB,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,MAAI;;;6BAGO;MAC1B;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,eAAS,YAAY;AACrB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,WAAS;AAC3B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,WAAS;;;0BAGC;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO,KAAQ,IAAI,OAAO,QAAQ;;;;AAK9C,YAAQ,YAAY;AACpB,kBAA4B;AAgDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,QAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,QAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;8BAGM;MAC7B;AACI,cAAM;AACN,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AAEI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW,QACxD,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,qBAAa;AACb,2BAAmB;AACnB,4BAAoB;AAChB,cAAI,SAAS;AACT,2BAAe;AACf;;;AAGR,YAAI;AACA;;AAEJ,yBAAiB;AACjB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,yCAA+B,WAAW,GAAG;AAC7C,iCAAuB,OAAO,KAAK,MAAM;AACzC,uBAAa;AACb,8BAAoB;AAChB,gBAAI,aAAiB,OAAO;AACxB,uBAAS;AACT;;;AAGR,cAAI,CAAC;AACD,qBAAS,KAAK;;;AAGtB,YAAI,SAAS,SAAS;AAClB,gBAAM,IAAI,WAAW,8GAEjB,KAAK,UAAU;;;MAG3B;AACI,eAAO,KAAK;AACR,iBAAO,YAAc,QAAQ,KAAK;;;MAG1C;AACI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW;AACxD,gBAAM,IAAI,WAAW;;AAEzB,4BAAoB;AACpB,4BAAoB,YAAY,GAAG;AACnC,qBAAa,KAAK,OAAO,IAAI,YAAY,SAAS,KAAK,OAAO,KAAK;AAGnE,4BAAoB,YAAY,MAAM;AAClC,cAAI,YAAY,SAAS,QAAQ,MAAM,SAAS;AAC5C,wBAAY,QAAQ;AACpB;;AAEJ,sBAAY,SAAS,MAAM;;AAE/B,eAAO;;MAEX;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,KAAK,WAAW,OAAO;AACvB,gBAAM,IAAI,WAAW,mCAAmC,KAAK,qCAC5B,OAAO;;AAE5C,eAAO,KAAS;AACZ,6BAAmB;AACnB,eAAK,QAAQ;AACT,gBAAI,KAAK;AACL,6BAAe;AACf;;;AAGR,cAAI;AACA,mBAAO;;AAEX,8BAAoB;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,gBAAI,KAAK,MAAM;AAEX,0BAAY,KAAK,UAAa,OAAO,IAAI,OAAO;uBAE3C,KAAK,GAAG,OAAO,OAAO,GAAG;AAE9B,0BAAY,KAAK,WAAe,KAAK,IAAI;;AAGzC,0BAAY,KAAK,KAAK;;;AAG9B,oCAA0B,QAAW,aAAa,KAAK;AACvD,iBAAO,IAAQ,mBAAmB,IAAI;;;MAG9C;AACI,wBAAe;UACX,MAAQ,KAAK;;AAEjB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,gBAAY,YAAY;AACxB,kBAA4B;AAiDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,YAAY;AAC9B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,YAAY;;;AAY/B;AACI,aAAO,OAAO;AACV,gBAAQ;;AAEZ,aAAO;;AAEX;AACI,UAAI,EAAE,MAAM,SAAS,KAAK,EAAE,MAAM,SAAS;AACvC,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,UAAI,OAAO,SAAS;AAChB,eAAO,CAAC,MAAM;;AAElB,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,cAAM,IAAI,oBAAoB;;AAElC,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,UAAI,QAAQ;AAER,eAAO,CAAC,QAAQ,GAAG,QAAQ;;AAE/B,wBAAkB;AAClB,aAAO,KAAS;AACZ;AACA,YAAI,QAAQ;AACR,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;mBAExB,QAAQ;AACb,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;;AAG7B,iBAAO;;AAEX;AACA,YAAI,EAAE,MAAM,WAAW,KAAK,EAAE,MAAM,WAAW;AAC3C,cAAI,UAAU,OAAO,UAAU;AAC3B,kBAAM,EAAE,IAAI,GAAG,IAAI,UAAU;;AAG7B,kBAAM,EAAE,UAAU,CAAC,GAAG,IAAI,IAAI,GAAG,IAAI,UAAU;;;AAInD,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,gBAAM,EAAE,OAAO,GAAG,MAAM;;AAE5B,YAAI,OAAO;AACP;AACA,cAAI,QAAQ;AACR,kBAAM,QAAQ,QAAQ;;AAGtB,kBAAM,QAAQ;;AAElB,8BAAoB;AACpB,uBAAa,KAAK,IAAI,MAAM,MAAM,EAAE;AAChC,wBAAY,KAAK;;AAErB,gBAAM,IAAI,QAAQ;;AAEtB,YAAI,IAAI,MAAM,WAAW;AACrB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;;sBAGU;MACrB;AACI,cAAM;AACN,aAAK,OAAO,KAAK;AACjB,aAAK,YAAY,KAAK,aAAa,OAAO,QAAQ,KAAK;AACvD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW;AAC1B,uBAAe,WAAW;AAC1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,YAAI,OAAO,KAAK,QAAQ,OAAO,KAAK;AAChC,gBAAM,IAAI,WAAW,8BACd,OAAO,KAAK,WAAW,OAAO,KAAK;;;MAGlD;AACI,YAAI,OAAO,WAAW;AAClB,gBAAM,IAAI,WAAW,oEACD,OAAO;;AAE/B,iBAAS,OAAO;AAChB,iBAAS,OAAO;AAChB;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,iBAAO;YACH,cAAc,KAAK,MAAM,GAAG,MAAM;YAClC,cAAc,KAAK,MAAM,GAAG,MAAM;;;AAItC,iBAAO,KAAK,KAAK,IAAI,aAAa,cAAc,MAAM,OAAO,GAAG,MAAM;;AAE1E,YAAI,KAAK;AACL,eAAK,YAAY,IAAI,KAAK;AAC1B,eAAK,YAAY,IAAI,KAAK;;AAE9B,eAAO,SAAS,IAAI,IAAI;;MAE5B;AACI;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AAEpB,iBAAO;YACH,cAAc,KAAK,MAAM,OAAO;YAChC,cAAc,KAAK,MAAM,OAAO;;;AAKpC,iBAAO,KAAK;;AAEhB,eAAO;;MAEX;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW,GAAG;AAC7B,uBAAe,WAAW,GAAG;AAC7B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,GAAG;AACjB,4BAAoB,OAAO,OAAO;AAClC,YAAI,YAAY,WAAW;AACvB,sBAAY,KAAK;;AAErB,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe;UACX,MAAQ,KAAK;UACb,WAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;ACl9B5B;;;;;;;;;gCAgBmC;MAC/B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,SAAS,KAAK;;MAEvB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,QAAQ,KAAK;AAC9B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,yBAAe,MAAM,eAAe,OAAM,OAAO,GAAG,KAAK,QAAQ,IAAI;AACrE,yBAAe,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;AACzE,iBAAO;;;;AAKnB,kBAAc,YAAY;AAC1B,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,2BAAe;AACX,6BAAe,KAAK,KAAK,KAAK,OAAQ,KAAI,KAAK;AAC/C,qBAAO,OAAM,IAAI,eAAe,OAAM,OAAO,GAAG;;AAEpD,mBAAO,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;;AAErE,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;+BA8BM;MAC9B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,aAAa,KAAK;;MAE3B;AACI,eAAO,KAAK,cAAc,oBAAoB,QAAQ;;MAE1D;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,+BAAmB,KAAK,eAAe;AACvC,kCAAsB;AAClB,6BAAc,oBAAoB;AAClC,4BAAc;AACd,6BAAc;AACd,6BAAe,CAAC,QAAQ;AACxB,4BAAc,aAAa,cAAc,aAAa,KAAK;AAC3D,wBAAU,OAAO,SAAS;AAE1B,wBAAY,MAAI,KAAK,QAAS,KAAI,KAAK,OAAO,UAAU,OAAO;AAC/D,wBAAU,CAAC,IAAI,SAAS,KAAK;AAE7B,wBAAU,OAAM,IAAI,SAAS,IAAI,QAAQ,IAAI,IAAI,IAAI;AACrD,qBAAO,EAAE,IAAI,GAAG,IAAI;;AAExB,mBAAO,aAAe,eAAe,MAAM,oBAAoB,SAAS,OAAO,eAAe;;AAElG,iBAAO;;;;AAKnB,iBAAa,YAAY;AACzB,kBAA4B;ACvJ5B;;;;;;;;;AAoCO,6EAAsE;AACzE;AACA,UAAI,EAAE,SAAS;AACX,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAEhB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAChB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;;AAGtD,cAAM,IAAI,oBAAoB,2DAA2D,EAAE;;AAG/F,aAAO;;AAmBX,uFAAkF;AAC9E,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,uBAAe,mBAAmB,GAAG,OAAM,WAAU,MAAM,OAAO;AAClE,eAAO,CAAC,QAAQ,OAAM;;;AAoB9B,yFAAoF;AAChF,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,4BAAoB;AACpB,2BAAmB,QAAiB,GAAG,EAAE;AACrC,cAAI,cAAc,QAAQ,UAAU;AAChC,wBAAY,KAAK;;AAGjB,wBAAY,KAAK,EAAE,MAAM;;;AAGjC,8BAAsB,MAAK,QAAQ;AACnC,kCAA0B,UAAS,QAAQ;AAC3C,+BAAuB,SAAS,OAAO,OAAO,MAAM,QAAQ;AAC5D,8BAAsB,QAAQ,OAAO,OAAO,KAAK,QAAQ;AACzD,uBAAe,mBAAmB,GAAG,eAAe,mBAAmB,eAAe,gBAAgB;AACtG,eAAO,CAAC,QAAQ,OAAM;;;AAcvB,gFAA2E;AAC9E,UAAI,aAAiB,cAAc,QAAQ,QAAQ,QAAiB,GAAG,EAAE,OAAO;AAC5E,eAAO,gCAAgC,GAAG,OAAO,MAAM,eAAe;;AAGtE,eAAO,kCAAkC,GAAG,OAAO,MAAM,eAAe;;;qCAGxC;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,aAAK,WAAW,KAAK,YAAY,OAAO,OAAO,KAAK;AACpD,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,wBACD,eAAe,KAAK,yBAAyB;AACjD,aAAK,4BACD,eAAe,KAAK,6BAA6B;AACrD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,cAAc,KAAK;AAC1C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;;MAEhD;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO,WAAW;AAClE,oBAAY,WAAW;AACvB,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,QAAQ,mGAEtB,KAAK,UAAU;;AAE1B,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,WAAW,QAAQ,MAAM,EAAG,OAAO;AAC9D,sBAAc,CAAC;AACf,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,OAAO,MAAM,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;;AAE/G,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,OAAO,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE3G,aAAK,aAAa,KAAK,UAAU,eAAe,OAAO,MAAM,KAAK,uBAAuB,MAAM;AAC/F,aAAK,iBAAiB,KAAK,UAAU,mBAAmB,OAAO,MAAM,KAAK,2BAA2B,MAAM;AAC3G,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,uBAAa,WAAW;AACxB,gCAAsB,QAAiB,GAAG;AAC1C,uBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO;AACvD,wBAAc,OAAO,MAAM;AAC3B,iCAAuB,aAA2B,GAAG;AACrD,yBAAe,QAAQ,WAAW;AAClC,sCAA4B,cAAc;AAC1C,8BAAoB;AACpB,oCAA0B,CAAC,aAAiB,qBAAqB,QAAiB,GAAG,MAAM,MAAM,GAAG,OAAO;AAC3G,qCAA2B;AACvB,gBAAI;AACA,0CAA4B,KAAK,WAAW,OAAO,QAAQ;AAC3D,8CAAgC,KAAK,eAAe,OAAO,QAAQ;AACnE,oCAAsB,KAAK,SAAS,KAAK,KAAK,OAAO,QAAQ,kBAAkB;AAC/E,qCAAuB,KAAK,QAAQ,KAAK,MAAM,OAAO,QAAQ,kBAAkB;AAChF,qBAAO,mBAAmB,QAAO,qBAAqB,yBAAyB,eAAe,gBAAgB,KAAK;;AAGnH,qBAAO,mBAAmB,QAAO,KAAK,WAAW,QAAQ,KAAK,eAAe,QAAQ,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK,MAAM,QAAQ,KAAK;;;AAG9L,cAAI,CAAC;AACD,mBAAO;;AAEX,qDAAyC,yBAAyB,QAAO,KAAK,MAAM,QAAQ,KAAK,KAAK,QAAQ,eAAe,KAAK;AAClI,kCAAwB;AACpB,iBAAS;AACL,4BAAc,IAAI;AAClB,gCAAkB,UAAS;AAC3B,kCAAoB,UAAU,IAAI,OAAO,IAAI;AAC7C,wBAAS,MAAM,UAAU,IAAI;;;AASrC,8CAAoC;AAChC,4BAAgB,KAAK,YAAY,OAAM,KAAK;AAC5C,4BAAgB,KAAK,gBAAgB,WAAU,KAAK;;AAExD;AACA,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,uBAAuB,qBAAqB,KAAK;UACjD,2BAA2B,qBAAqB,KAAK;UACrD,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,gBAAgB,oBAAoB,KAAK;UACzC,iBAAiB,oBAAoB,KAAK;;AAE9C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;qCACY;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,YAAI,OAAO,KAAK,SAAS;AACrB,cAAI,CAAC,OAAO,UAAU,KAAK;AACvB,kBAAM,IAAI,MAAM,gDAAgD,KAAK;;mBAGpE,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,CAAC,OAAO,UAAU;AAClB,oBAAM,IAAI,MAAM,0DACI,KAAK,UAAU,KAAK;;;;AAKhD,gBAAM,IAAI,MAAM,wEACI,KAAK,UAAU,KAAK;;AAE5C,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,sBAAc,WAAW;AAEzB,YAAI,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,CAAC,KAAK;;AAEtB,qBAAa,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE;AACpC,cAAI,KAAK,KAAK,KAAK;AACf,iBAAK,KAAK,MAAM;;;AAIxB,2BAAmB,KAAK;AACpB,cAAI,OAAO,KAAK,QAAQ;AACpB,kBAAM,IAAI,MAAM,iBAAiB;;;AAGzC,YAAI,KAAK,KAAK,WAAW,SAAqB,KAAK,MAAM;AACrD,gBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAErD,2BAAmB,KAAK,KAAK,IAAI,UAAQ,WAAW;AACpD,0BAAkB;AAClB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB;;AAG1G,eAAK,QAAQ;;AAEjB,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,YAAY,WAAW,KAAK,iBAAiB,KAAK,iBAAiB;;AAGtG,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,uBAAc,oBAAoB;AAClC,2BAAmB,OAAM;AACzB,sBAAc,WAAW;AACzB,eAAO,KAAK;AACR,2BAAiB;AACjB,eAAM,aAAM,uBAAa,QAAQ,QAAO,KAAK,MAAM;AACnD,iCAAuB,aAA2B,GAAG;AACrD,4BAAkB,KAAK;AACnB,2BAAe,OAAO,WAAW;;AAErC,4BAAkB;AACd,gBAAI,KAAK,QAAQ,EAAE,MAAM,WAAW,SAChC,KAAK,SAAS,CAAC,QAAQ;AACvB,qBAAO,EAAE,QAAQ;;AAGjB,qBAAO;;;AAGf,uBAAY,UAAU,KAAK,MAAM;AACjC,uBAAa,UAAU,KAAK,KAAK;AAOjC,gCAAsB;AACtB,oCAA0B;AAC1B,uBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,gBAAI,KAAK,KAAK,QAAQ,OAAO;AACzB,4BAAc,KAAK,WAAW;AAC9B,gCAAkB,KAAK;;AAGvB,4BAAc,KAAK;AACnB,gCAAkB,KAAK,WAAW;;;AAG1C,kBAAO,MAAK,KAAK;AACjB,sBAAW,UAAS,KAAK;AACzB,mBAAQ,OAAM,KAAK;AACnB,mBAAS,OAAO,KAAK;AACrB,iBAAO,mBAAmB,QAAO,OAAM,WAAU,QAAQ,QAAO,KAAK;;;MAG7E;AACI,wBAAe;UACX,MAAM,KAAK;UACX,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;;AAEhD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;AC1Y5B;;;;;;;;;AA4BO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,WAAW,+FACe,QAAQ;;AAEhD,wBAAgB,CAAC,CAAC,GAAG,IAAI,SAAS,CAAC,GAAG;AACtC,eAAO,KAAQ,GAAG;;;AAanB;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE3B,YAAI,QAAQ,WAAW,KAAK,QAAQ,GAAG,WAAW,KAC9C,QAAQ,GAAG,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,eAAe,kBAAkB,eAAe;AAChD,gBAAM,IAAI,WAAW,wBAAwB;;AAGjD;AACA,YAAI,eAAe;AACf,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ;;AAG/C,oBAAU,CAAC,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ,IAAI,CAAC,GAAG;;AAEnD,eAAO,KAAQ,GAAG;;;gCAGS;MAC/B;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,oBAAoB,KAAK;AAGvD,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;mBAEvB,OAAO,KAAK,YAAY;AAC7B,eAAK,UACD,CAAC,CAAC,KAAK,SAAS,KAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAGvD,eAAK,UAAU,KAAK;AACpB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,+EACI,KAAK,QAAQ;;AAE1C;AACA;AACA,cAAI,OAAO,KAAK,QAAQ,OAAO;AAC3B,4BAAgB,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;AAC/C,2BAAe,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;;AAG9C,iBAAK,UAAU,KAAK;AACpB,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,sFACQ,KAAK,QAAQ,GAAG;;AAEjD,4BAAgB,KAAK,QAAQ;AAC7B,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,qFACQ,KAAK,QAAQ,GAAG;;AAEjD,2BAAe,KAAK,QAAQ;;AAEhC,eAAK,UAAU,CAAC,eAAe;;AAEnC,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC;AACA;AACA,YAAI,KAAK,eAAe;AACpB,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK,MAAM,iBAAiB,oBAAoB,SAAS,KAAK,SAAS,KAAK;;MAEvF;AACI,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,kBAAc,YAAY;AAC1B,kBAA4B;ACtL5B;;;;;;;;;AAkCO;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAIf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AAEb,cAAI,SAAY,GAAG,UAAU,SAAS;;AAKtC,cAAI,SAEJ,GAAG,UAAU,SAAS;;AAE1B,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG,GAAG;;AAErB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAGf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AACb,cAAI,UAAc,GAAG,UAAU,SAAS;;AAGxC,cAAI,UAAc,GAAG,UAAU,SAAS;;AAE5C,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;4BAMgB;MAO3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW;;AAEpB,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WAAW,CAAC,KAAK;mBAEjB,MAAM,QAAQ,KAAK,aACxB,KAAK,SAAS,WAAW,KACzB,OAAO,KAAK,SAAS,OAAO;AAC5B,eAAK,WAAW,KAAK;;AAGrB,gBAAM,IAAI,WAAW,qGAEd,KAAK,UAAU,KAAK;;AAE/B,8BAAsB,KAAK,UAAU;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAGpB,cAAI,OAAO,KAAK,YAAY;AACxB,iBAAK,UAAU,CAAC,KAAK;qBAEhB,MAAM,QAAQ,KAAK,YACxB,KAAK,QAAQ,WAAW,KACxB,OAAO,KAAK,QAAQ,OAAO;AAC3B,iBAAK,UAAU,KAAK;;AAGpB,kBAAM,IAAI,WAAW,oGAEd,KAAK,UAAU,KAAK;;;AAGnC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,uBAAe,iBAAiB,WAAW,IAAI,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC5F,eAAO,CAAC,WAAW,IAAI,QAAQ,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,mBAAS,aAAa,oBAAoB,SAAS;AACnD,yBAAe,KAAK,gBAAgB,oBAAoB,SAAS,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,QAAQ,IAAI,IAAI,KAAK,SAAS;AAE5H,iBAAO,QAAY,QAAQ,CAAC;;;MAGpC;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;;AAElB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG;;AAExB,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK;AACzB,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAEvC,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG,GAAG;;AAE3B,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK,UAAU,KAAK;AACxC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK,SAAS,KAAK;;AAErD,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,iBAAS,iBAAiB,QAAQ,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC/E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ,MAAM;;AAGpD,iBAAO,CAAC,WAAW,IAAI,QAAQ,MAAM,MAAM,WAAW;;;MAG9D;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,CAAC,WAAW,IAAI,WAAW;;MAEtC;AACI,cAAM,IAAI;;;yCAG0B;MACxC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAS,QAAO;;;;AAKnC,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAQ,QAAO;;;;AAKlC,uBAAmB,YAAY;AAC/B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa;AACb,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW;;AAGlC,iBAAO,CAAC,WAAW,IAAI,WAAW;;;MAG1C;AACI,cAAM,IAAI;;MAEd;AACI,wBAAe,CAAE,YAAY,KAAK;AAClC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;yCAG6B;MACxC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAS,QAAO,CAAC,GAAG;;AAG3B,mBAAO,KAAS,QAAO,CAAC,GAAG;;;;;AAM3C,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAQ,QAAO,CAAC,GAAG;;AAG1B,mBAAO,KAAQ,QAAO,CAAC,GAAG;;;;;AAM1C,uBAAmB,YAAY;AAC/B,kBAA4B;ACpgB5B;;;;;;;;;0BA8B6B;MACzB;AAQI,cAAM;AACN,aAAK,QAAQ,KAAK;;MAEtB;AACI,aAAK,QAAQ;;UAGb;AAIA,YAAI,KAAK,SAAS;AACd,iBAAO,KAAK,MAAM;;AAGlB,iBAAO;;;UAGX;AAIA,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,YAAY;;;UAG3B;AACA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;UAGlB;AAEA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,KAAK,MAAM;;MAEtB;AACI,aAAK,MAAM,WAAW;;MAE1B;AACI,wBAAe;UACX,OAAS;YACL,WAAa,KAAK,MAAM;YACxB,QAAU,KAAK,MAAM;;;AAG7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,6BAA6B;;;aAIzC,yCAAwC;AAC3C,4BAAoB,QAAO;AAC3B,sBAAc,YAAY,aAAa;AACvC,eAAO,QAAO;AACd,0BAAkB,CAAE;AACpB,eAAO,OAAO,WAAW;AACzB,eAAO,IAAI,IAAI;;;kCAGc;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,gFACF,KAAK,UAAU;;AAEtC,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,YAAI,CAAC,KAAK,MAAM;AACZ,eAAK,MAAM,MAAM;AACjB,eAAK,MAAM,QAAQ;;AAEvB,cAAM,MAAM;;MAEhB;AACI,qBAAa,mBAAmB;AAChC,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,iCAAyB,KAAK,MAAM,mBAAmB;AACvD,0BAAkB,WAAW;AAC7B,eAAO,CAAC,iBAAiB,IAAI,WAAW,OAAO,iBAAiB,MAAM;;MAE1E;AACI,eAAO,KAAK;AAER,mBAAS,oBAAoB;AAI7B,wBAAa;AAKT,2BAAe,oBAAoB,KAAK,MAAM,KAAK,SAAQ;AAC3D,mBAAO,CAAC,QAAQ;;AAEpB,6BAAmB,IAAI,OAAM,QAAQ,IAAI,OAAyB,MAAiB,MAAsB,OAAoB;AAC7H,oBAAU,WAAW;AAGrB,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,gCAAwC,iCAAiC,0BAA0B;;AAEvG,6CAAyC;gCACN;MAC/B;AACI,cAAM;AASN,4BAAoB,KAAK,MAAM;AAC/B,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,eAAe,YAAY;AAChC,oBAAY,iBACR,YAAY,mBAAmB,OAAO,QAAQ;AAClD,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,gBAAgB,YAAY;AACjC,aAAK,aAAa,OAAO,aAAa,KAAK,aAAa;AACxD,aAAK,cAAc,OAAO,cAAc,KAAK,cAAc;AAC3D,aAAK,YAAY,KAAK,cAAc,SAChC,mCACA,KAAK;AACT,oCAA4B,KAAK;AACjC,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,kBAAkB,KAAK,MAAM;AAClC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,eAAe;;UAEpB;AACA,eAAO,KAAK;;UAEZ;AAIA,aAAK,aAAa;AAClB,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,YAAY;;AAElC,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,YAAY;;;MAGvC;AACI,eAAO,KAAK,aAAa,aAAa,OAAO,KAAK,cAAc;;MAEpE;AACI,2BAAmB,QAAQ;AAC3B,+BAAuB,KAAK,MAAM,aAAa;AAC/C,aAAK,aAAa,WAAW,QAAQ,MAAM,GAAG;AAC9C,aAAK,cAAc,WAAW,QAAQ,MAAM;;MAEhD;AACI,0BAAkB,KAAK,aAAa,mBAAmB;AACvD,YAAI,CAAE,OAAM,QAAQ,gBAAgB,MAAM,QAAQ,YAAY;AAC1D,wBAAc,CAAC;;AAEnB,sBAAc;AACd;AACA;AACA;AACA,YAAI,KAAK;AACL,uBAAa,YAAY,MAAM;AAC/B,wBAAc,YAAY;;AAG1B,wBAAc,YAAY;;AAE9B,sBAAc;AACd,YAAI,KAAK,cAAc;AACnB,sBAAY,YAAY,SAAS,MAAM;AACvC,yBAAe,CAAC;mBAEX,KAAK,aAAa;AACvB,yBAAe,CAAC,aAAa,YAAY;;AAGzC,yBAAe,CAAC;;AAEpB,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,mBAAO,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE7D,iBAAO,CAAC,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE9D,eAAO,iBAA+B;;MAE1C;AACI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AACzB,YAAI,MAAM,QAAQ;AACd,yBAAe,OAAO,MAAM;AAC5B,mBAAS,OAAO;;AAEpB,YAAK,iBAAgB,QAAQ,aAAa,WAAW,MACjD,aAAa;AACb,iBAAO,MAAM,MAAM,QAAQ;;AAE/B,iCAAyB;AACzB,gCAAwB;AACxB,YAAI,gBAAgB;AAChB,4BAAkB,aAAa;AAC/B,cAAI,YAAY,IAAI;AAChB,kBAAM,IAAI,WAAW;;AAIzB,iBAAO,kBAAkB;AACzB,2BAAiB,KAAK,GAAG;AACzB,6BAAmB,aACd,IAAI,WAAS,IAAI,UAAU,CAAE,OAAO,MAAM;AAC/C,eAAK,aAAa,YAAY,WAAW,MAAM,GAAG,YAAY;AAC9D,eAAK,cAAc,YAAY,WAAW,MAAM,YAAY;AAC5D,0BAAgB,KAAK,GAAG;;AAE5B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAGlC,iCAAyB,iBAAiB,cAAc;AACxD,8BAAqB;AACjB,cAAI,mBAAkB,mBAAmB;AACrC,kBAAM,IAAI,WAAW;;;AAI7B,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAU5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAGnC;AACI,eAAO,KAAK;AACR,+BAAqB,OAAO;AAC5B;AACA;AACA,cAAI,gBAAgB;AAChB,gBAAI,KAAK,aAAa,KAAK,QAAQ;AACnC,mBAAO,KAAK,cAAc,KAAK,QAAQ;;AAGvC,iCAAqB,aAAa,MAAM,GAAG,aAAa,SAAS;AACjE,kCAAsB,aAAa,MAAM,aAAa,SAAS;AAC/D,gBAAI,KAAK,aAAa,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;AACzE,mBAAO,KAAK,cAAc,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;;AAEjF;AACA,cAAI,KAAK;AACL,gBAAI,MAAM,QAAQ;AACd,uBAAS,EAAE,MAAM,GAAG,OAAO,KAAK,MAAM;;;AAI1C,gBAAI,EAAE;AACN,mBAAO,KAAK;;AAEhB,cAAI,KAAK;AACL,mBAAO,SAAY,MAAM;;AAE7B;AACA,cAAI,KAAK,cAAc;AACnB,qBAAS,YAAc,CAAC,GAAG;qBAEtB,KAAK,cAAc;AACxB,qBAAS,MAAQ,GAAG;qBAEf,KAAK,cAAc;AACxB,qBAAS,KAAQ,KAAI,MAAQ,GAAG;qBAE3B,KAAK,cAAc;AACxB,qBAAS,KAAQ,GAAG;qBAEf,KAAK,aAAa;AACvB,qBAAS,CAAC,GAAG;;AAGjB,cAAI,KAAK;AACL,gBAAI,KAAK,aAAa;AAClB,qBAAO,OAAO,OAAO;;AAEzB,mBAAO,CAAC,QAAQ,OAAO;;AAE3B,iBAAO;;;MAGf;AACI,aAAK,aAAa;AAClB,aAAK,cAAc;;MAEvB;AACI,kBAAU,KAAK,aAAa,MAAM;AAC9B,eAAK,aAAa,MAAM;;AAE5B,kBAAU,KAAK,cAAc,MAAM;AAC/B,eAAK,cAAc,MAAM;;AAE7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,MAAM,QAAQ;AACd,iBAAO,KAAK;;AAEhB;AACA,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAIjB,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAGrB,YAAI,KAAK;AACL,yBAAe,KAAK,aAAa;AACjC,4BAAkB,OAAO,IAAI,WAAS;AACtC,cAAI,MAAM,QAAQ;AACd,mBAAO,WAAW,OAAO,WAAW,OAAO;;AAG3C,mBAAO,CAAC,YAAY,OAAO,WAAW,OAAO;;;AAIjD,iBAAO;;;UAGX;AACA,eAAO,KAAK,aAAa,iBAAiB,OAAO,KAAK,cAAc;;UAEpE;AACA,eAAO,KAAK,aAAa,oBAAoB,OAAO,KAAK,cAAc;;MAG3E;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,6BAA6B;;AAEnD,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,6BAA6B;;;MAGxD;AACI,wBAAe;UACX,WAAa,KAAK;;AAGtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAGJ;AACH,yBAAiB,YAAY,QAAO;AACpC,eAAO,QAAO;AAEd,YAAI,QAAO,mBAAmB;AAC1B,gBAAM,IAAI,oBAAoB;;AAIlC,0BAAkB;AAClB,kBAAU,WAAW;AACrB,eAAO,IAAI,IAAI;;;AAIvB,kBAAc,YAAY;AAC1B,kBAA4B;ACle5B;;;;;;;;;AA+DO;AACH,aAAO,IAAI,WAAW;;AA2BnB;AACH,aAAO,IAAI,IAAI;;AAmBZ;AACH,aAAO,IAAI,KAAK;;AAsBb;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAM;;AAkBd;AACH,aAAO,IAAI,UAAQ;;AA0BhB;AACH,aAAO,IAAI,gBAAgB;;AAuBxB;AACH,aAAO,IAAI,OAAO;;AAoBf;AACH,aAAO,IAAI,SAAO;;AAqCf;AACH,aAAO,IAAI,gBAAgB;;AAoBxB;AACH,aAAO,IAAI,SAAO;;AA+Bf;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,WAAW;;AA0BnB;AACH,aAAO,IAAI,aAAa;;AAarB;AACH,aAAO,IAAI,gBAAgB;;AAkCxB;AACH,aAAO,IAAI,aAAW;;AAmCnB;AACH,aAAO,IAAI,MAAM;;AAYd;AACH,aAAO,IAAI,QAAQ;;AAkChB;AACH,aAAO,IAAI,iBAAiB;;AAqBzB;AACH,aAAO,IAAI,QAAQ;;AAgBhB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,UAAQ;;AA8BhB;AACH,aAAO,IAAI,QAAQ;;AAahB;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAI;;AAoBZ;AACH,aAAO,IAAI,QAAQ;;AAsBhB;AACH,aAAO,IAAI,YAAY;;AAoBpB;AACH,aAAO,IAAI,UAAQ;;AAoBhB;AACH,aAAO,IAAI,UAAQ;;AAqBhB;AACH,aAAO,IAAI,WAAS;;AAwBjB;AACH,aAAO,IAAI,IAAI;;AAwBZ;AACH,aAAO,IAAI,mBAAmB;;AAsB3B;AACH,aAAO,IAAI,mBAAmB;;AAyB3B;AACH,aAAO,IAAI,cAAc;;AActB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAyBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAuBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAWrB;AACH,aAAO,IAAI,uBAAuB;;AAgB/B;AACH,aAAO,IAAI,uBAAuB;;AAW/B;AACH,aAAO,IAAI,mBAAmB;;AAgB3B;AACH,aAAO,IAAI,mBAAmB;;AAW3B;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AA0BrB;AACH,aAAO,IAAI,IAAI;;AA+CZ;AACH,aAAO,IAAI,QAAQ;;AAyBhB;AACH,aAAO,IAAI,KAAK;;AA+Cb;AACH,aAAO,IAAI,SAAS;;AA0BjB;AACH,aAAO,IAAI,UAAU;;AA+ClB;AACH,aAAO,IAAI,cAAc;;AA6BtB;AACH,aAAO,IAAI,WAAW;;AAmCnB;AACH,aAAO,IAAI,eAAe;;AA8DvB;AACH,aAAO,IAAI,IAAI;;AASZ;AACH,aAAO,IAAI,gBAAgB;;AAIxB;AACH,aAAO,IAAI,cAAc;;AAgDtB;AACH,aAAO,IAAI,gBAAgB;;AAGxB,4BAAwB;AACxB,4BAAwB;AACxB,sBAAkB;AAClB,sBAAkB;AAyBlB;AACH,aAAO,IAAI,cAAc;;AA0BtB;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACpjDhB;AACH,aAAO,eAAuB,OAAO;;AAmBlC;AACH,aAAO,qBAA2B,OAAO;;AAqBtC;AACH,aAAO,0BAAkC,OAAO;;AAoB7C;AACH,aAAO,oBAA4B,OAAO;;AAavC;AACH,aAAO,0BAAgC,OAAO;;AAqC3C;AACH,aAAO,UAAkB,OAAO;;AAqC7B;AACH,aAAO,OAAe,OAAO;;AAuB1B;AACH,aAAO,gBAAuB,OAAO;;AAsBlC;AACH,aAAO,kBAAyB,OAAO;;AAoBpC;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAoB9C;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;;;;;;;;;;;;;;;;;;;AC9R1C;;;;;;;;;;;;;ACAA;;;;;;;;;AAoBO;AACH,aAAO,IAAI,KAAK;;AAWb;AACH,aAAO,GAAgB;;AAWpB;AACH,aAAO,GAAgB;;;;;;;;AC7C3B;;;;;;;;;2BAc8B;MAC1B;AACI,cAAM,GAAG;AAET,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,QAAQ;;;AAGrB;AACI,aAAO,UAAU;;AAErB;AACI,aAAO,UAAU;;gCAMc;MAC/B;AACI;AACA,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,WAAW,KAAK,IAAI,KAAK,YAAY;AAC1C,aAAK,WAAW,KAAK,YAAY;AACjC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,OAAO,KAAK,QAAQ;AACzB,aAAK,WAAW,KAAK;AACrB,YAAI,CAAC,QAAQ,OAAO,OAAO,QAAQ,KAAK,UAAU;AAC9C,kBAAQ,KAAK,uBAAuB,KAAK;AAEzC,eAAK,OAAO;;AAEhB,YAAI,KAAK,SAAS;AACd,eAAK,cAAc;mBAEd,KAAK,SAAS;AACnB,eAAK,cAAc;;AAInB,cAAI,KAAK,QAAQ,QAAQ,WAAW;AAChC,iBAAK,cAAc;;AAGnB,iBAAK,cAAc;;;AAG3B,YAAI,KAAK,gBAAgB;AACrB,eAAK,YAAY;;;YAGnB;AACF,aAAK,OAAO;AACZ,aAAK,eAAe;AACpB,YAAI,KAAK,YAAY;AACjB,eAAK,OAAO,KAAK;;AAGjB,eAAK,OAAO,KAAK,gBAAgB,SAAO,WAAW;;;YAGrD;AACF,cAAM,qBAAqB;AAC3B,wBAAgB,KAAK,gBAAgB;AACrC,YAAI,WAAW;AACX;;AAEJ,YAAI,KAAK,YAAY,UAAU,KAAK,UAAU,KAAK;AAC/C,eAAK,OAAO;AACZ,eAAK,OAAO;;AAIZ,eAAK;AACL,cAAI,KAAK,QAAQ,KAAK;AAClB,iBAAK,eAAe;AACpB,iBAAK,MAAM,eAAe;;;;YAKhC;AACF,YAAI,KAAK,eAAe,KAAK,KAAK;AAC9B,kBAAQ,IAAI,SAAS,KAAK;;;MAGlC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,6BAAqB,KAAK,KAAK;AAC/B,YAAI,gBAAgB;AAChB,kBAAQ,KAAK,4BAA4B,KAAK,oDAChB,OAAO,KAAK;;AAE9C,eAAO;;;AA8CR;AACH,aAAO,IAAI,cAAc;;AAEjB,sBAAa,CAAE;ACzK3B;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAkBO;AACN,IAAA;AACG,gBAAS,UAAS,gBAAgB,KAAK;AACvC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,KAAK;AACpC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,kBAAkB,KAAK;AACzC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,MAAM;AACrC,gBAAS,UAAS,cAAc,MAAM;AACtC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,iBAAiB,MAAM;AACzC,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,sBAAsB,OAAO;AAC/C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,qBAAqB,OAAO;OAC/C,YAAa,YAAW;AACpB;AACN,IAAA;AAEG;AACC,MAAA;AACG,iCAAwB,yBAAwB,YAAY,KAAK;AACjE,iCAAwB,yBAAwB,QAAQ,KAAK;AAC7D,iCAAwB,yBAAwB,QAAQ,KAAK;SAC9D,0BAA0B,UAAS,2BAA4B,WAAS,0BAA0B;OACtG,YAAa,YAAW;AC3D3B;;;;;;;;;;;;;;;;AAgBA,uBAAmB;AA0BZ;AACH,uBAAiB;QACb,UAAU;QACV,UAAU;QACV,QAAQ;QACR,OAAO;QACP,gBAAgB;;AAEpB,iBAAW,QAAQ;;AAShB;AACH,aAAO,WAAW;;AASf;AACH,aAAO,WAAW;;ACtEtB;;;;;;;;;;;;;;;;AAiBO;AACH,yBAAmB,KAAK,YAAY;AACpC,UAAI,cAAc,WAAW,oBAAoB;AAC7C,sBAAc,WAAW;AACzB,oBAAY,WAAW,kBAAkB,IACrC,SACC,WAAW,kBAAkB,SAAY,QAAQ,IAC9C,WAAW;AACnB,YAAI,WAAW,SAAS;AACpB,iBAAO,UAAU,KAAK,WAAW,WAAW,kBAAkB,WAAW,SAAS;;AAEtF,YAAI,WAAW,SAAS;AACpB,yBAAe,KAAK,WAAW,MAAM,OAAO;AAC5C,iBAAO,OAAO,IAAI,UAAQ,UAAU,MAAM,WAAW,SAAS;;AAElE,wBAAe,UAAU,KAAK,WAAW,MAAM,OAAO,IAAI,WAAW,SAAS;AAC9E,sBAAa,QAAO;AACpB,eAAO,WAAW,SAAS,WACvB,MAAK,KACL,eAAmB,QAAO,OAAO;;AAEzC,wBAAkB,KAAK,WAAW;AAClC,aAAO,aAAa,UAAU;;AAS3B;AACH,iCAA0B,cAAc;AACxC,UAAI,mBAAmB;AACnB,wBAAe,gBAAgB,yBAAyB;AACxD,YAAI,WAAU;AACV,iBAAO;;;AAGf,wBAAkB,QAAQ,kBAAkB,KAAK;AAC7C,eAAO,CAAC,CAAC,WAAW,yBAAyB,UAAU;;AAE3D,aAAO,cAAc,SACjB,WAAW,yBAAyB,UAAU,YAAY,UAC1D;;AAOD;AACH,aAAO,WAAW,yBAAyB,MAAM,QAAQ;;AAQtD;AACH,iCAA0B,cAAc;AACxC,aAAO;QACH,yBAAyB,UAAU,WAAW,QAAQ;QACtD;;;AAGR;AACI,aAAO,CAAC,CAAC,YAAY,GAAG,QAAQ,cAAc;;AAE3C;AACH,oBAAc,KAAK,MAAM;AACzB,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM;;AAElB,uBAAiB,MAAM;AACvB,aAAO,CAAC,UAAU,OAAO,MAAM,MAAM,SAAS;;AAE3C;AACH,kBAAY;AACZ,mBAAa,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,YAAI,KAAK,IAAI,MAAM,GAAG,IAAI;;AAE9B,aAAO;;AAEJ;AACH,iBAAU,cAAc,OAAO,MAAM,WAAW;AAChD,UAAI,SAAQ;AAER,eAAM,cAAc,oBAAoB,MAAM,WAAW;AACzD,gCAAwB,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG;AACrD,qBAAa,GAAG,IAAI,GAAG;AACnB,0BAAgB,GAAG,KAAK,KAAI,IAAI;AAChC,0BAAgB,GAAG,KAAK,KAAI,IAAI,IAAI;;AAExC,eAAO;;AAEX,aAAO;;AAWJ;AACH,aAAO,QAAO,OAAO,UAAS,MAAM;;AC9HxC;;;;;;;;;;;;;;;;AAgBO,iBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;;;;;;AClL/E;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;UAC9D,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC5dzB,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;MAErD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;UAC3D,CAAE,QAAU,oBAAoB,MAAQ,kBAAkB,MAAQ;UAClE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,qBAAqB,MAAQ,QAAQ,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAAW;YACrD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;;MAGhD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;;;MAGnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;;;;;;ACvUzE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UAAc;YAC7D,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAChD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,iBAAiB,MAAQ;UAAU;YACpE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,SAAS,MAAQ,aAAa,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;;;;;;;;ACnV1D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;;QAEzC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ,UAAU,cAAgB;UACnE,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ,UAAU,cAAgB;;QAExE,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;QAEpE,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;MAE3D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAAY;YACpD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;;QAEhD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;UAC1C,CAAE,QAAU,gBAAgB,MAAQ,gBAAgB,MAAQ;;;;;;;;AC1KxE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;;MAGxD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;;;MAIpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;UAChD,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;;QAE/C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;;;;;;ACtClD;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD,CAAE,UAAY,SAAS,UAAY;MAAW;QAC1C,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAAY;YAC1D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B,CAAE,UAAY,QAAQ,UAAY,SAAS,QAAU;MAAM;QACvD,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;UAC1C,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;;;;;;;;AC1G/C,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAAY;YACxD,QAAU;YACV,MAAQ;YACR,MAAQ;;;;;;;;;ACtDxB;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC3IhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UAAY;YAC3D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;ACrIhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UACtE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;AChJhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;UACtD,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;;;;;;;;ACzG9D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,IAAI,MAAQ,WAAW,MAAQ;UACpD,CAAE,OAAS,IAAI,MAAQ,QAAQ,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;UACnD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,QAAQ,cAAgB;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;UACrC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;QAEvD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAG9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAChE,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,mBAAmB,MAAQ;UACjD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;;MAGjD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;ACvNhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/ChC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;;MAGrD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;MAE1D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UAAc;YACnD,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,kBAAoB;UACpB,MAAQ;UACR,MAAQ;;;MAGpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;;MAGlD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;;;;;;;ACtIjB;;;;;;;;;;;;;;;;;iBAuCe;AACP,eAAO,KAAK,aAAc,MAAK,YAAY,IAAI;;MAGnD;AACI,oBAAY;UACR;UAAY;UAAW;UAAS;UAAa;UAAU;UACvD;UAAY;UAAS;UAAO;UAAO;UAAU;UAAe;UAC5D;UAAW;UAAU;UAAgB;;AAEzC,4BAAoB,GAAG,OAAO,GAAG,IAAI,IAAI,SAAM,IAAG;AAClD,aAAK,YAAY,YAAY,OAAO;AAChC,cAAI,OAAO,YAAY;AACvB,iBAAO;WACR;;MAIP,mCAAkC;AAC9B,wBAAgB,OAAM;AACtB,6BAAqB;AACrB,wBAAgB;AAChB,0BAAkB;AAClB,sBAAc,QAAQ,OAAO;AACzB,cAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,cAAI,KAAK,GAAG,WAAW;AACnB,yBAAa,KAAK,IAAI,KAAK;qBAEtB,KAAK,OAAO;AACjB,oBAAQ,KAAK,IAAI,KAAK;qBAEjB,KAAK,SAAS,QAAQ,KAAK,MAAM,WAAW;AACjD,sBAAU,KAAK,IAAI,KAAK;;AAE5B,iBAAO;WACR;AACH,qBAAa;AACb,wBAAgB;AAChB,iCAAyB;AACzB,kCAA0B;AAC1B,YAAI,aAAa;AACb,+BAAqB,KAAK,oBAAoB,UAAU;AACxD,gCAAsB,KAAK,oBAAoB,UAAU;;AAE7D,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAKtC,YAAI,OAAO,KAAK,qBAAqB,WAAW;AAC5C,mBAAS,QAAQ;AACb,yBAAa,MAAM;AACnB,gBAAI,KAAK,SAAS,WAAW;AACzB,sBAAQ,KAAK;;;;AAKrB,iBAAO,KAAK,qBAAqB,QAAQ;AACrC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI,QAAQ;AACR,mBAAK,eAAe,oBAAoB;AACxC,sBAAQ,KAAK;;;;AAIzB,YAAI,OAAO,KAAK,oBAAoB,SAAS;AACzC,iBAAO,KAAK,oBAAoB,QAAQ;AACpC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI;AACA,mBAAK,eAAe,mBAAmB;AACvC,qBAAO,KAAK;;;;AAKpB,mBAAS;;AAEb,wBAAgB;AAChB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,YAAY;AACnD,sBAAY,OAAM,QAAQ,SAAS,OAAO;AACtC,uBAAU,MAAK,UAAU,QAAQ,KAAK,YAAY;AAClD,mBAAO;aACR;;AAEP,uBAAe,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc,WAAW;AAC3E,YAAI,UAAU,SAAS;AACnB,iBAAO,YAAY;;AAEvB,eAAO;;MAEX;AACI,eAAO,OAAO,KAAK,WAAW,IACzB,OAAO;AACR,eAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;WACR;;MAEP;AAGI,uBAAe,gBAAgB,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO;AACtE,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,wBAAgB;UACZ,MAAM,KAAK;UACX,IAAI,KAAK;UACT,UAAU,OAAO;UACjB,YAAa,MAAK,SACd,IAAI,IAAI,YAAS,OAAM,WAAW,OAAO,OAAM,OAAO,KAAK;UAC/D,QAAQ;UACR,UAAU;UACV,aAAa;UACb,YAAY;UACZ,UAAU,KAAK;;AAEnB,YAAI,OAAO,UAAU;AACjB,kBAAQ,cACJ,OAAO,OAAO,OAAO;AACjB,gBAAI,MAAM,QAAQ;cACd,MAAM,MAAM;cACZ,iBAAiB,MAAM;cACvB,eAAe,MAAM;;AAEzB,mBAAO;aACR;;AAEX,YAAI,OAAO,SAAS;AAChB,kBAAQ,aACJ,OAAO,MAAM,OAAO;AAChB,yBAAa,MAAM;AACnB,wBAAY;AACZ,oBAAQ,MAAM;mBACL;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAQ,MAAM;AACtD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAS,MAAM,gBAAgB;AACvE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,qBAAqB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC5D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,qBAAqB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE1E;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;AACD,wBAAQ,kBAAkB,KAAK,MAAM,MAAM,QAAQ,MAAM;AACzD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,kBAAkB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEvE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,yBAAyB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAChE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,yBAAyB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE9E;mBACC;AACD,wBAAQ,cAAc,KAAK,MAAM,MAAM,QAAQ,MAAM;AACrD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,cAAc,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEnE;mBACC;AACD,wBAAQ,mBAAmB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC1D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,mBAAmB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAExE;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;mBACA;AACD;;AAEA,sBAAM,IAAI,MAAM,2BAA2B,MAAM,gBAAgB,KAAK;;AAE9E,gBAAI,MAAM,QAAQ,CAAE,OAAO;AAC3B,mBAAO;aACR;;AAEX,eAAO;;MAGX;AACI,wBAAgB,YAAY;AAC5B,6BAAqB;AACrB,wBAAgB;AAChB,oBAAY;AACZ,YAAI,WAAW;AACX,kBAAQ,QAAQ,OAAO;AACnB,gBAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,gBAAI,KAAK,OAAO;AACZ,sBAAQ,KAAK,IAAI,KAAK;;AAE1B,mBAAO;aACR;;AAEP,uBAAe;AACf,wBAAgB;AAChB,oBAAY,UAAU,SAAS,QAAQ;AACnC,6BAAoB,oBAAoB,IAAI;AAC5C,uBAAa;YACT,MAAM;YACN,IAAI;YACJ,QAAQ;YACR,YAAY;YACZ,UAAU;YACV,aAAa;YACb,YAAY,CAAE,OAAO,CAAE,OAAO,gBAAgB,IAAI,OAAO,MAAM;YAC/D,UAAU;;AAEd,eAAK,eAAe,IAAI;AACxB,iBAAO,KAAK;AACZ,gBAAM,YAAY;;AAEtB,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAGtC,8BAAsB,YAAY;AAClC,oBAAY,UAAU,UAAU,QAAQ;AACpC,qCAA0B,oBAAoB,cAAc,OAAO;AACnE,uBAAa,MAAM;AACnB,cAAI,QAAQ;AACR,iBAAK,gBAAgB;AACrB,oBAAQ,KAAK;;;AAGrB,0BAAkB,KAAK,mBAAmB;AAC1C,eAAO,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc;;MAE5D;AACI,eAAO;UACH,YAAY,YAAY,UAAU;UAClC,QAAQ,YAAY,UAAU,SAAS,OAAO;AAC1C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB;AACxC,mBAAO;aACR;UACH,SAAS,YAAY,UAAU,UAAU,OAAO;AAC5C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB,KAAK,YAAY;AACzD,mBAAO;aACR;;;MAGX;AACI,mBAAW,IAAI;AACf,YAAI,YAAW;AACX,iBAAO,SAAQ;;AAEnB,eAAO,CAAE,MAAM,OAAO,IAAI;;;AAG3B;AACH,sBAAe,OAAM;AACrB,UAAI,OAAO,QAAO,SAAS;AACvB,eAAO,QAAO,KAAK;iBAEd,OAAO,WAAW;AACvB,eAAO,IAAI,OAAO,MAAM,UAAU;;AAGlC,cAAM,IAAI,MAAM;;;AAIjB;AACH,oBAAc,MAAM,QAAQ,KAAK,OAAO,aAAa,MAAM,MAAM,KAAK,aAAa;AACnF,aAAO,WAAW,QAAQ,MAAM;;AAE7B,yDAAqD;AACxD,oBAAc,MAAM;AACpB,UAAI,SAAS;AACT,eAAO,iBAAiB,MAAM,GAAG;;AAErC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,aAAO,QAAQ,MAAM,IAAI;;AAEtB;AACH,oBAAc,MAAM,SAAS;AAC7B,oBAAc,MAAM,QAAQ,OAAO,MAAM,OAAQ,MAAM,QAAQ,OAAO,MAAM,OAAO;AACnF,aAAQ,OAAO,UAAU,WAAY,QAAQ,SAAS,OAAO;;AAE1D;AACH,UAAI,OAAQ,UAAW;AAEnB,gBAAQ,SAAoB;;AAEhC,cAAQ;aACC,SAAoB;AACrB,iBAAO;aACN,SAAoB;aACpB,SAAoB;aACpB,SAAoB;aACpB,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;;AAIP,iBAAO;;;AAGZ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,gBAAgB,MAAM;;AAEjC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,KAAK,IAAI,OAAK,gBAAgB;;AAEpD,aAAO;;AAEJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,UAAI,MAAM,OAAO;AACb,eAAO,MAAM,IAAI,IAAI,SAAQ,OAAO,IAAI,SAAS,WAAY,IAAI,OAAO,SAAS,IAAI,MAAM;;AAE/F,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,sBAAsB,MAAM;;AAEvC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI;AACA,eAAS,QAAM,KAAK,KAAK,MAAM,KAAK,EAAE,SAAS,MAAM,KAAK,IACtD,MAAM,KAAK,MACX,IACC,IAAI,OAAM,OAAO,MAAM,WAAY,IAAI,SAAS,GAAG;;AAE5D,aAAO;;AAEJ,8DAA0D;AAC7D,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,EAAE,IAAI;AACpB,iBAAO,iBAAiB,GAAG;;;AAGnC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,MAAM,IAAI;AACxB,iBAAO,sBAAsB;;;AAGrC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AC9cX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,OAAO;AACZ,aAAK,YAAY;AACjB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK,WAAW,IAAI,UAAQ,KAAK,SAAS;AACxD,YAAI,KAAK,YAAY;AACjB,eAAK,QAAQ,OAAO,KAAK,KAAK,UACzB,OAAO;AACR,kBAAM,OAAO,KAAK,QAAQ;AAC1B,mBAAO;aACR;;;MAOX;AACI,eAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;MAMhD;AACI,sBAAc,KAAK,KAAK,SAAS;AACjC,YAAI,MAAM,UAAU;AAChB,iBAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;AAEhD,YAAI,MAAM,KAAK,QAAQ,MAAM,KAAK;AAC9B,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,aAAa,KAAK,KAAK,UAAU,MAAM;;AAElD,YAAI,MAAM,SAAS;AACf,iBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,YAAI,MAAM,QAAQ;AACd,iBAAO,cAAc,KAAK,KAAK,UAAU,MAAM;;AAEnD,YAAI,MAAM,QAAQ;AACd,cAAI,MAAM,KAAK,KAAK,QAAQ,MAAM,KAAK,KAAK;AACxC,mBAAO,qBAAqB,KAAK,KAAK,UAAU,MAAM;;AAE1D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,cAAI,MAAM,KAAK,SAAS;AACpB,mBAAO,yBAAyB,KAAK,KAAK,UAAU,MAAM;;AAE9D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,kBAAkB,KAAK,KAAK,UAAU,MAAM;;AAEvD,cAAI,MAAM,KAAK,QAAQ;AACnB,mBAAO,mBAAmB,KAAK,KAAK,UAAU,MAAM;;;AAG5D,eAAO;;;ACrFf;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAmBO,sBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,KAAW,cAAc,WAAW,MAAM,WAAW;;aAE5D;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAGlH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,qBAAiB;AC/DxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACrG;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;aACzD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW;;aAExD;AACD,iBAAO,CAAC,OAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;aACA;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;aAC/K;AACD,iBAAO,CAAC,MAAY,UAAU,KAAK,WAAW,IAAI,WAAW;aAC5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aACvG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;AAE1G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrHxB;;;;;;;;;;;;;;;;AAsBO,sFAAkF;AACrF,cAAY,8BAA8B,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAE5G;AACH,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,MAAM,GAAG,OAAO,MAAM,GAAG,OAAO,GAAG;AAC7C,iBAAO;;;AAGf,aAAO;;AClCX;;;;;;;;;;;;;;;;;MAuBI;AACI,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,yBAAyB;AAC9B,aAAK,cAAc;AACnB,aAAK,iBAAiB;AACtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,WAAW,QAAO;AACvB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK;;MAKhB;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO,OAAO;AAC9C,oBAAO,OAAO;;;AAGtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,SAAS;;MAElB;AACI,eAAO,KAAK,QAAQ;;MAMxB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,UAAS,KAAK;AAC3B,gBAAM,IAAI,MAAM,4BAA4B,8BAA6B,KAAK;;AAElF,gCAAwB,KAAK,QAAQ;AACrC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;;AAGtE,YAAI,KAAK;AACL,0BAAgB,UAAU;;AAE9B,wBAAgB,OAAO;AACvB,eAAO,gBAAgB;;MAK3B;AACI,eAAO,QAAQ,IAAI,YAAS,KAAK,KAAK;;MAO1C;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,CAAC,KAAK,eAAe,UAAS,KAAK;AAChD,gBAAM,IAAI,MAAM,2BAA2B,oDAAmD,KAAK;;AAEvG,kBAAU,KAAK,QAAQ,WAAU;AACjC,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;uCACvD,QAAO,mCAAmC,KAAK;;AAG9E,YAAI,KAAK,WAAW,KACf,MAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC3D,eAAK,eAAe,QAAO;;AAE/B,4CAAoC,KAAK,cAAc,QAAO,OAAO,eAAe,KAAK,8CAA8C;AACvI,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,UAAE,SAAS;AACX,aAAK;AACL,UAAE,UAAU;AACZ,aAAK,QAAQ,UAAS;;MAK1B;AACI,YAAI,QAAQ,WAAW,QAAQ;AAC3B,gBAAM,IAAI,MAAM,eAAe,KAAK,kEACL,QAAQ,2CAA2C,QAAQ;;AAE9F,gBAAQ,QAAQ,eAAc,KAAK,MAAM,GAAG,QAAQ;;MAUxD;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,CAAC;AACD,oBAAU;AACV,uBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAQ,KAAK;;;AAIjB,oBAAU,QAAQ,MAAM,GAAG,KAAK;;AAEpC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAItC,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO;AACzE,eAAO,MAAM,SAAS;;MAK1B;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,kBAAQ,KAAK;;AAGjB,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO,mDAAmD,KAAK,wCAAwC,QAAQ,GAAG;AACpL,eAAO,QAAO,SAAS;;MAQ3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,YAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,gBAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,yBAAiB,KAAK,IAAI,GAAG;AAC7B,YAAI,CAAC,KAAK,eAAe,YAAY,KAAK;AACtC,gBAAM,IAAI,MAAM,mCAAmC,iBAAiB,KAAK;;AAE7E,aAAK,UAAU,SAAS,QAAQ,SAAQ;;MAQ5C;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,0BAAkB;AAClB,kCAA0B,OAAO,IAAI;AACjC,yBAAe;AACf,iBAAO;;AAEX,YAAI,gBAAgB,QAAO,MAAM;AAC7B,gBAAM,IAAI,MAAM;;UAElB,uCAAuC,QAAO;;AAEhD,YAAI,CAAC,KAAK,eAAe,OAAO,WAAW,KAAK;AAC5C,gBAAM,IAAI,MAAM,2DAA2D,KAAK,eAAe,OAAO;;AAG1G,8BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,wBAAgB;AAChB,aAAK;AACD,oBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,6BAAgB,CAAC,GAAG,gBAAgB;AACpC,0BAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,oBAAQ,KAAK,SAAQ,OAAM,SAAQ,UAAS,QAAQ,KAAK;;AAE7D,iBAAO;;AAEX,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,kBAAQ,KAAK;;AAEjB,aAAK,UAAU,SAAS;;;AC9OhC;;;;;;;;;;;;;;;;;MAyCI,kEAAkE;AAC9D,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,YAAI,WAAW;AACX,kBAAQ,QAAQ;AACZ,gBAAI,iBAAiB,QAAO;AACxB,oBAAM,IAAI,MAAM,mCAAmC,mCAAmC,QAAO;;AAEjG,gDAAoC,cAAc,QAAO,OAAO;AAChE,iBAAK;;;AAGb,aAAK,WAAW,QAAO;AACvB,aAAK,iBAAiB;AACtB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;MAKzB;AACI,eAAO,IAAI,WAAW,CAAC,GAAG,KAAK,UAAU,KAAK,cAAc,KAAK;;MAKrE;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO;AACvC,oBAAO;;;AAGf,aAAK,QAAQ,SAAS;AACtB,aAAK,SAAS;;MAKlB;AACI,eAAO,KAAK,QAAQ;;MASxB,gDAAgD;AAC5C,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,gBAAgB,MAAM,KAAK,QAAQ,WAAW;AAC9C,gBAAM,IAAI,MAAM,kCAAkC,4CAA4C,KAAK,QAAQ;;AAE/G,4CAAoC,cAAc,KAAK,cAAc;AACrE,eAAO,KAAK;AACR,kCAAwB,KAAK,QAAQ,IAAI,aAAU,SAAQ,SAAQ;AACnE,iBAAO,MAAM,iBAAiB;;;MAQtC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;AAEpB,wBAAe,KAAK,QAAQ;AAC5B,4CAAoC,QAAO,OAAO,cAAc;AAChE,eAAO,SAAQ,SAAQ;;MAM3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,4CAAoC,QAAO,OAAO,KAAK,cAAc;AACrE,YAAI,KAAK,mBAAmB,KAAK;AAC7B,gBAAM,IAAI,MAAM;;AAEpB,aAAK;AACL,aAAK,QAAQ,KAAK;;MAMtB;AACI,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,0DAA0D;;AAE9E,YAAI,KAAK,mBAAmB,MAAM,OAAO,KAAK;AAC1C,gBAAM,IAAI,MAAM,+BAA+B,iCAAiC,KAAK;;AAEzF,aAAK,QAAQ,SAAS;;MAQ1B;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,eAAe,KAAK,eAAe,KAAK,QAAQ;AAChD,gBAAM,IAAI,MAAM,4BAA4B,+BAA+B,KAAK,QAAQ;;AAE5F,YAAI,KAAK,QAAQ,iBAAiB;AAC9B,gBAAM,IAAI,MAAM,oBAAoB;;AAExC,4CAAoC,KAAK,QAAQ,cAAc,OAAO,cAAc;AACpF,eAAO,KAAK,QAAQ;;MAOxB;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,YAAI,eAAe,KACf,KAAK,mBAAmB,MAAM,gBAAgB,KAAK;AACnD,gBAAM,IAAI,MAAM,yBAAyB,mCAAmC,KAAK;;AAErF,4CAAoC,KAAK,cAAc,QAAO,OAAO;AACrE,aAAK;AACL,aAAK,QAAQ,gBAAgB;;MASjC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,4CAAoC,KAAK,cAAc,cAAc;AAGrE,kBAAU,QAAQ,MAAM,GAAG,KAAK;AAChC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,QAAQ,IAAI,OAAK,SAAQ,KAAK,QAAQ,IAAI;AAC1D,iBAAO,MAAM,SAAS;;;MAQ9B;AACI,YAAI,CAAC,CAAC,gBAAgB,iBAAiB,KAAK;AACxC,gBAAM,IAAI,MAAM,uBAAuB,KAAK,2CAA2C;;AAE3F,4CAAoC,KAAK,cAAc,cAAc;AACrE,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,KAAK,QAAQ,IAAI,OAAK,SAAQ,GAAG;AACjD,iBAAO,QAAO,SAAS;;;;AAS5B;AACH,oBAAc,QAAO;AACrB,UAAI,QAAO,MAAM,SAAS;AACtB,cAAM,IAAI,MAAM,oDAAoD,QAAO;;AAE/E,UAAI,QAAO,UAAU;AACjB,cAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B;;AAE1F,0BAAoB,QAAO,MAAM,MAAM;AACvC,0CAAoC,aAAa,cAAc;AAC/D,yBAAmB,QAAQ;AAC3B,aAAO,IAAI,WAAW,YAAY,cAAc;;AAQ7C;AACH,aAAO,IAAI,WAAW,IAAI,cAAc,cAAc;;AASnD;AACH,UAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,cAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,uBAAiB,KAAK,IAAI,GAAG;AAC7B,UAAI,eAAe,QAAQ,gBAAgB,MAAM,YAAY;AACzD,cAAM,IAAI,MAAM,mCAAmC,iBAAiB;;AAExE,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO;AAC5D,sBAAgB,QAAQ,SAAQ;AAChC,cAAQ,QAAQ;AACZ,aAAK,QAAQ,OAAO,QAAQ;;AAEhC,aAAO;;AASJ;AACH,wBAAkB;AAClB,gCAA0B,OAAO,IAAI;AACjC,uBAAe;AACf,eAAO;;AAEX,UAAI,gBAAgB,QAAO,MAAM;AAC7B,cAAM,IAAI,MAAM;;UAEd,uCAAuC,QAAO;;AAEpD,4BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,sBAAgB,KAAK;AACjB,yBAAgB;AAChB,kBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,0BAAgB,CAAC,GAAG,gBAAgB;AACpC,wBAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,mBAAQ,KAAK,SAAQ,OAAM,SAAQ,SAAS,QAAQ;;AAExD,gBAAO;AACP,eAAO;;AAEX,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO,OAAO;AACnE,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,aAAK,QAAQ,GAAG,QAAQ;;AAE5B,aAAO;;ACvTX;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,MAAM,KAAK;AAC7B,cAAI,UAAU;AACV,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;AAGhG,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;;aAGnG;aACA;AACD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AAEpD,6BAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;AACnH,yBAAe,KAAK,IAAI,aAAU,QAAO;AACzC,0BAAgB,MAAM,WAAW,GAAG;AAEpC,qBAAW,QAAQ;AACf,gBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ;AAC9C,sBAAO;;;AAGf,uBAAa;AACb,iBAAO,UAAU;AAEb,+BAAmB;AAEnB,qBAAS,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AAC1G,8BAAkB,OAAO,IAAI,aAAU,QAAO;AAG9C,uBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;AAIf,gCAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AACrH,wBAAY,MAAM,YAAW,GAAG;AAEhC,wBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;;AAInB,iBAAO;;aAEN;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAW,cAAc,QAAQ,MAAM,WAAW;AAClD,cAAI,CAAC,MAAK;AACN,oBAAO,YAAY;;AAGvB,iBAAQ,OAAM,KAAK,QAAQ,KAAK,CAAC,QAAW,SAAQ,CAAC,OAAM;;aAE1D;AACD,4BAAkB,KAAK,WAAW,KAAK,UAAQ,UAAU,MAAM,WAAW,aAAa;AACvF,cAAI;AACA,0BAAa,UAAU,WAAW,WAAW;AAC7C,mBAAO,CAAC,YAAY;;AAExB,iBAAO;;aAEN;AACD,0BAAgB,cAAc,aAAa,MAAM,WAAW;AAC5D,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ,WAAW;AACnB,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,yCAA+B,cAAc,0BAA0B,MAAM,WAAW;AACxF,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,8BAAoB,IAAI,YAAY,MAAM,OAAO,MAAM,cAAc,wBAAwB,aAAa;AAC1G,kBAAQ,eAAe;AACvB,iBAAO,CAAC,YAAY,UAAU,QAAO;;aAEpC;AACD,qBAAW,cAAc,iBAAiB,MAAM,WAAW;AAC3D,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,mCAAyB,QAAQ,eAAe,GAAG;AACnD,2BAAiB,MAAM,QAAO;AAC9B,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,gBAAgB,KAAK;;aAE5B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,iBAAO,CAAC,kBAAkB,OAAO,eAAe;;aAE/C;AACD,4BAAkB,cAAc,iBAAiB,MAAM,WAAW;AAClE,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,qCAA2B,QAAQ,eAAe,UAAU;AAC5D,6BAAmB,QAAQ,gBAAgB;AAC3C,iBAAO,CAAC,mBAAmB;;aAE1B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,iBAAO,CAAC,kBAAkB,OAAO;;aAEhC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB,MAAM,SAAS;AAChC,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,QAAO,gBAAgB,QAAQ;;aAEtC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB;AACjB,iBAAO,CAAC,iBAAiB;;aAExB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,QAAQ,QAAO;AAC1B,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,WAAW,cAAc;;aAEnD;aACA;AACD,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,eAAe,gBAAgB,cAAc;AACxE,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,cAAc;AACvD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,OAAO,eAAe,cAAc;;aAEtD;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,MAAM,cAAc,cAAc;;aAEpD;AACD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,WAAW,SAAQ,cAAc;AACpD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,WAAW,OAAO,aAAa;;aAEtC;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,SAAS;AACpB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,cAAc;;aAExC;AACD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,6BAAmB,QAAM,aAAa,SAAS;AAC/C,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;;AAGnB,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACjRxB;;;;;;;;;;;;;;;;AAmBA;AACI,wCAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,wBAAkB,YAAY;AAC9B,sBAAgB,mBAAmB;AACnC,0BAAoB,YAAY;AAChC,sBAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,UAAI;AACA,YAAI,WAAW,YAAY;AACvB,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,WAAW,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAIxB,UAAI;AACA,cAAM,IAAI,MAAM;;AAEpB,qBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,mBAAY,WAAW,MAAM,WAAW;AACxC,yBAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,wBAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,aAAO;QACH;QACA,KAAA;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,QAAQ,MAAK,YAAY;;aAE9I;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEvL;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,kBAA4B;YAC5B,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;aACA;AACD,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,iBAAO,CAAC,gBAAsB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE7J;aACA;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,iBAAO,CAAC,iBAAsB,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEpM;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU,IAAI,UAAU;;aAEhN;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,sCAA4B,cAAc,uBAAuB,MAAM,WAAW;AAClF,iBAAQ,QAAQ,WAAY,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK;AAC/J,iBAAO,CAAC,QAAQ;;aAEf;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAE9D,+BAAqB,QAAQ;AAC7B,8BAAoB,QAAQ;AAE5B,iCAAuB,UAAU;AACjC,gCAAsB,UAAU;AAChC,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,cAAc,cAAc,MAAK,CAAC,gBAAgB,gBAAgB;;;AAG7L,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5KxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,MAAW,OAAO,OAAO;;aAEhC;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAe,OAAO,MAAM;;aAEnC;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAkB,QAAQ,YAAY;;aAE7C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,QAAa,SAAS,OAAO,SAAS;;aAE7C;AACD,iBAAO,CAAC,OAAW,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,cAEJ,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAEnM;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAY,OAAO,MAAM,OAAM,cAAc,SAAS,MAAM,WAAW;;aAE9E;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,gBAAsB,OAAO,OAAM,QAAQ,cAAc,SAAS,MAAM,WAAW,UAAU;;aAEpG;AACD,iBAAO,CAAC,OAAY,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE7G;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW;;;AAG5D,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChFxB;;;;;;;;;;;;;;;;AAmBA;AACI,oBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,qBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,4BAAsB,cAAc,iBAAiB,MAAM,WAAW;AACtE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,gBAAgB,gBAAiB,UAAU,MAAM,WAAW;AAChH,yBAAe,MAAM,OAAY,gCAAgC,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC7H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,yBAAe,MAAM,OAAY,6BAA6B,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC1H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;aACA;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,iBAAO,CAAC,MAAM,OAAY,uBAAuB,OAAO,QAAQ,eAAe,cAAc;;aAE5F;AACD,4BAAkB,MAAW,cAAc,aAAa,MAAM,WAAW,UAAU;AACnF,yBAAe,CAAC,MAAM,WAAiB;AACvC,oBAAU;AACV,iBAAO;;aAEN;AACD,iBAAO,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG9G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AClExB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,yBAAe,KAAW,GAAG,GAAG;AAChC,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,OAAa;AAC5B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,OAAa,GAAG;AAC/B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;;AAG9B,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC3CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,UAAU,KAAK;;aAErB;AACD,sBAAY,cAAc,WAAW,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW,YAAY;aACnD;AACD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW;aACvC;aACA;aACA;AACD,wBAAa,cAAc,KAAK,MAAM,WAAW;AACjD,iBAAO,CAAC,YAAY;;aAEnB;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,YAAY;aAC3B;AACD,2BAAiB,cAAc,KAAK,MAAM,WAAW;AACrD,iBAAO,CAAC,YAAY;aACnB;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW,SAAS,OAAO;aAC1E;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,UAAe,EAAE;aAChC;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa;aACpB;AACD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kBAAQ,KAAK;AAEb,kBAAQ,IAAI;AACZ,uBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAQ,IAAI,MAAM,UAAU,MAAM,KAAK,MAAK,GAAG,YAC1C,MAAM,GAAG;;AAElB,iBAAO,CAAC;;AAER,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrExB;;;;;;;;;;;;;;;;;MA2BI;AACI,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,QAAO;AAErB,aAAK,YAAY,IAAI;AACrB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,OAAO;;MAKvB;AACI,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,aAAK,OAAO;;MAKhB;AACI,eAAO,KAAK,UAAU;;YAOpB;AACF,aAAK,uBAAuB,MAAM;AAGlC,sBAAc,MAAM,KAAK;AAEzB,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,eAAO,KAAK;AACR,0BAAgB,QAAQ;AACxB,6BAAmB,MAAM;AACzB,+BAAqB,QAAQ;AAC7B,kBAAY,eAAe,cAAc,MAAM,kDACxC,uCAAuC;AAE9C,uBAAa,GAAG,IAAI,YAAY;AAC5B,wBAAY,MAAM;AAClB,0BAAc,QAAQ;AACtB,iBAAK;AACL,iBAAK,UAAU,IAAI,KAAK;;AAE5B,iBAAO,KAAK;;;YAkBd;AACF,aAAK,uBAAuB,MAAM;AAClC,sBAAc,MAAM,KAAK;AACzB,eAAO,KAAK;AACR,yBAAe;AACf,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,MAAM;AAClB,0BAAc,KAAK,gBAAgB,KAAK;AACxC,mBAAO,KAAK;;AAEhB,iBAAO,MAAM;;;MAIrB;AACI,uBAAe,KAAK,UAAU,IAAI;AAClC,eAAO,UAAU,OAAO,SAAS;;MAErC;AACI,YAAI,IAAI,UAAU,KAAK;AACnB,gBAAM,IAAI,MAAM,oBAAoB,KAAK,qBAClC,IAAI;;AAEf,YAAI,MAAM,UAAU,KAAK;AACrB,gBAAM,IAAI,MAAM,sBAAsB,KAAK,uBACpC,MAAM;;;;ACzHzB;;;;;;;;;;;;;;;;AAkBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,6BAAkB,IAAI,UAAU,UAAU;AAC1C,0BAAgB,aAAa,KAAK,MAAM;AACxC,iBAAO,CAAC,WAAU;;aAEjB;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,OAAO,MAAM;;aAEpC;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,KAAK,MAAM;;;AAGnC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,eAAe,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAE9D;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,sBAAsB,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAErE;AACD,2BAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,iBAAO,CAAC,OAAY,cAAc,UAAO,OAAO,QAAQ,UAAU,QAAQ;;;AAG1E,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC9CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAErG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,aAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEpG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE1G;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;aACA;AACD,iBAAO,CAAC,MAAY,cAAc,aAAa,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG5J,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACxDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW;aACpN;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC5G;AACD,4CAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,4BAAkB,YAAY;AAC9B,0BAAgB,mBAAmB;AACnC,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,cAAI;AACA,gBAAI,WAAW,YAAY;AACvB,oBAAM,IAAI,MAAM;;AAGpB,gBAAI,CAAC,WAAW,YAAY;AACxB,oBAAM,IAAI,MAAM;;;AAGxB,sCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,MAAM;YACN,YAAY;YACZ,wBAAwB;;;AAGhC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACvDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,2BAAiC,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEnR;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;;aAEzD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,cAAoB,cAAc,iBAAiB,MAAM,WAAW,UAAU,cAAc,eAAe,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;;;AAGtP,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,2BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM,WAAW;;;AAGpF,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC1ExB;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,WAAW,MAAM,WAAW;AACvD,mBAAS,OAAO,MAAM,GAAG;AACzB,iBAAO,CAAC,QAAa,QAAQ;;aAE5B;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,OAAa,QAAO,MAAW,SAAS,UAAU;;aAEzD;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAc,QAAO;;aAE5B;AAED,wBAAc,cAAc,SAAS,MAAM,WAAW;AAEtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,OAAO;;aAExE;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,CAAC,cAAmB,SAAQ,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;aAEtG;AACD,iBAAO,KAAK;AACR,yBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAgB,cAAc,WAAW,MAAM,WAAW;AAG1D,0BAAc,QAAQ,GAAG;AACzB,kCAAsB,QAAc,QAAQ,IAAI;AAChD,2BAAe,QAAQ,IAAI;AACvB,gCAAkB,aAAiB,QAAO,OAAO;AACjD,kBAAI,CAAC,aACD,CAAC,aAAiB,QAAc,SAAQ,OAAO;AAC/C,sBAAM,IAAI,MAAM;;AAEpB,qBAAO,YAAY,UAAS,SAAc,SAAQ;;AAEtD,mBAAO,CAAC,MAAY,QAAQ;;;aAG/B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,iBAAO,QAAc,SAAQ;;aAE5B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEhE;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,kCAAwB,cAAc,mBAAmB,MAAM,WAAW;AAC1E,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,OAAY,SAAQ,iBAAiB;;aAE3C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAgB,SAAS,QAAQ;;aAExC;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,SAAe,GAAG;;aAEzB;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,cAAoB,SAAS,cAAc,OAAO,aAAa,UAAU,aAAa,QACtF,eACA,MAAW,cAAc,aAAa;;;AAG9C,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACzHxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;;aAErD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;;AAGxD,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrCxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAExG;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEnE;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE3G;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEhK;aACA;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,iBAAiB,MAAM,WAAW;;aAEnK;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAAS;AACzE,iBAAO,CAAC,cAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,WAAW;;aAEnF;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;;AAGhH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChExB;;;;;;;;;;;;;;;;AA2CO;AACH,oBAAe;AACX,gBAAQ,MAAK;eACJ;AACD,mBAAO,KAAS,MAAM,UAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAsB,OAAM,YAAW;eAC5D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAkB,OAAM,YAAW;eACxD;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAwB,OAAM,YAAW;eAC9D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAyB,OAAM,YAAW;eAC/D;AACD,mBAAO,YAAoB,OAAM,YAAW,UAAS;eACpD;AACD,6BAAiB,gBAAgB,MAAK;AACtC,gBAAI,YAAY,SAAS;AACrB,qBAAO,SAAS,eAAe,IAAI,cAAc,OAAM,YAAW;;AAGlE,oBAAM,UAAU,aAAa,MAAK;;;AAGtC,kBAAM,UAAU,eAAe,MAAK;;SAI7C,MAAM,WAAW;AACpB,UAAI,WAAmB;AACnB,eAAO,MAAM,KAAK,WAAU,GAAG,OAAO;;AAE1C,aAAO,GAAG,OAAO;;;MCvFjB,wBAAwB,qBAAqB,oBAAoB,kBAAkB;AAC/E,aAAK,YAAY;AACjB,aAAK,iBAAiB;AACtB,aAAK,gBAAgB;AACrB,aAAK,cAAc;AACnB,aAAK,cAAc,CAAE,IAAI,GAAG,WAAW,IAAI,aAAa;AACxD,aAAK,WAAW,CAAC,KAAK;AACtB,aAAK,SAAS;AACd,aAAK;;MAET;AACI,eAAO,CAAE,IAAI,WAAW,aAAa;;UAOrC;AACA,YAAI,KAAK,aAAa;AAClB,eAAK,WAAW;AAChB,eAAK;;;UAGT;AACA,eAAO,KAAK;;UAKZ;AACA,eAAO,KAAK,mBAAmB;;UAM/B;AACA,eAAO,KAAK;;MAEhB;AACI,sBAAc;AACd,qBAAa,GAAG,IAAI,KAAK,SAAS,SAAS,GAAG;AAC1C,4BAAiB,KAAK,SAAS,MAAM,GAAG,KAAK,SAAS,SAAS;AAC/D,gBAAM,KAAK,KAAK,qBAAqB;;AAEzC,cAAM,KAAK;AACX,aAAK,qBAAqB;;MAE9B;AACI,eAAO,YACH,UACK,IAAI,aAAY,QAAQ,OAAO,KAAK,QAAQ,gBAAgB,IAC7D,KACA,GAAG,QAAQ,aAAa,QAAQ,eAC/B,KAAK,OACV;;MAMR;AACI,YAAI,KAAK;AACL,eAAK;AACL,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,KAAK,KAAK,SAAS,KAAK,QAAQ;AAC9C,eAAK,mBAAmB,QAAQ,KAAK,qBAAqB,KAAK;;;MAOvE;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,OAAO;AACrB,eAAK,kBAAkB;;AAGvB,gBAAM,IAAI,MAAM;;;MAOxB;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK;AACL,0BAAgB,OAAO,OAAO,IAAI,KAAK,SAAS,KAAK,SAAS,SAAS;AACvE,kBAAQ,eAAe;AACvB,kBAAQ,KAAK,KAAK;AAClB,eAAK,SAAS,OAAO,IAAI,GAAG;AAC5B,eAAK,mBAAmB,OAAO,GAAG,GAAG,KAAK,qBAAqB,KAAK;;AAGpE,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,KAAK,UAAU;;MAE1B;AACI,aAAK,eAAe,YAAY,MAAM;;MAE1C;AACI,eAAO,KAAK,eAAe;;MAE/B;AACI,aAAK,cAAc,WAAW,MAAM;;MAExC;AACI,eAAO,KAAK,cAAc;;MAE9B;AACI,0BAAkB,KAAK;AACnB,eAAK,eAAe,KAAK,cAAc;;AAE3C,0BAAkB,KAAK;AACnB,eAAK,cAAc,KAAK,cAAc;;;;ACpIlD;;;;;;;;;;;;;;;;AAyBO;AACH,wBAAkB,IAAI;AACtB,4BAAsB;AACtB,wBAAkB;AAClB,uBAAiB;AAGjB,mBAAa,IAAI;AACjB,6BAAuB,OAAO,KAAK,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAC3E,0BAAoB;AACpB,UAAI,aAAa;AACb,wBAAgB,UAAU,IAAI,UAAQ,cAAc,KAAK,MAAM;;AAEnE,uBAAiB,CAAC,GAAG;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,YAAI,cAAc,SAAS,eAAe,SAAS,YAAY;AAC3D,cAAI,eAAe;AACf,0BAAc;AACd,yBAAa,YAAY,SAAS,IAAI,WAAS,MAAM,MAChD,OAAO,UAAQ,UAAU,IAAI;;;AAG1C,kBAAU,IAAI,KAAK;AAEnB,YAAI,UAAU,KAAK,SAAS;AACxB;;AAGJ,YAAI,eAAe,QAAQ,KAAK,UAAU;AACtC;;AAGJ,YAAI,cAAc,QAAQ,KAAK,UAAU;AACrC;;AAEJ,YAAI,KAAK,OAAO,WAAW;AACvB,wBAAc,KAAK,KAAK;AACxB;;AAEJ,aAAK,OAAO,QAAQ;AAEhB,cAAI,KAAK,IAAI,OAAM;AACf;;AAEJ,eAAK,IAAI,OAAM;AACf,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,QAAQ,SAAS,WAAW,eAAe,aAAa;;AAM9D;AACH,aAAQ,WAAW,UAAW;AAC9B,uBAAiB;AACjB,yBAAmB,OAAO,KAAK,QAC1B,IAAI,UAAQ,cAAc,MAAM,IAChC,IAAI,UAAQ,OAAM,MAAM;AAC7B,wBAAkB,OAAM;AACxB,iBAAW,QAAQ;AACf,YAAI,UAAU,IAAI,OAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAM,QAAQ,QAAQ;AAClB,YAAI,UAAU,IAAI,OAAO;AACrB,mBAAS,KAAK;;;AAGtB,UAAI,aAAa;AACb,kBAAU,QAAQ;AACd,cAAI,UAAU,IAAI,KAAK;AACnB,qBAAS,KAAK;;;;AAI1B,mBAAa,IAAI;AACjB,2BAAqB;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,aAAK,IAAI,KAAK;AACd,YAAI,CAAC,UAAU,KAAK;AAChB,uBAAa,KAAK;;AAEtB,aAAK,SAAS,QAAQ;AAClB,cAAI,CAAC,KAAK,IAAI,MAAM,SAAS,UAAU,IAAI,MAAM,SAC7C,MAAM,OAAO,MAAM,YAAS,KAAK,IAAI,OAAM;AAC3C,qBAAS,KAAK;;;;AAI1B,aAAO;;AAEX,6BAAyB;MACrB;MAAU;MAAS;MAAS;MAAQ;MAAiB;MACrD;MAAkB;MAAM;;AAE5B,8BAA0B;MACtB;MAAuB;MAAuB;MAAuB;;AAEzE,2BAAuB;MACnB;MAAa;MAAe;MAAqB;MACjD;MAAmB;;AAEhB;AACH,aAAO,iBAAiB,QAAQ,KAAK,OAAO;;AAEzC;AACH,aAAO,kBAAkB,QAAQ,KAAK,OAAO;;AAE1C;AACH,aAAO,eAAe,QAAQ,KAAK,OAAO;;AC3I9C;;;;;;;;;;;;;;;;;MA8BI;AACI,aAAK,QAAQ;AACb,aAAK,SAAS;AACd,aAAK,cAAc,IAAI;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,OAAM;AACtB,aAAK,UAAU,OAAM;AACrB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AAExB,YAAI,OAAM,aAAa;AACnB,iBAAO,KAAK,OAAM,WAAW,QAAQ;AACjC,iBAAK,qBAAqB,QACtB,IAAI,cAAc,OAAM,UAAU,OAAO;;;;UAIrD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,sBAC7B,KAAK;;UAET;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,0BAAkB,OAAO,KAAK,WAAW,IAAI,SAAO,UAAU,KAAK,IAAI,aAAU,QAAO;AACxF,aAAK,aAAa,GAAG,OAAO,GAAG;AAC/B,aAAK,aAAa;;UAMlB;AACA,aAAK,mBAAmB;;UAExB;AACA,eAAO,KAAK,QAAQ,IAAI;AACpB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,QAAQ,IAAI,UAAQ,KAAK,gBAAgB,KAAK;;UAE1D;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,uBAAa,KAAK,gBAAgB,KAAK;AACvC,iBAAO,KAAK,gBAAiB,GAAG,QAAQ,KAAK,kBAAmB;;;UAGpE;AACA,eAAO,OAAO,KAAK,KAAK,YAAY,OAAO;AACvC,cAAI,OAAO,KAAK,WAAW,KAAK;AAChC,iBAAO;WACR;;MAEP;AACI,6BAAqB,OAAO,IAAI,UAAQ,KAAK,MAAM;AACnD,8BAAsB,QAAQ,IAAI,UAAQ,KAAK,MAAM;AACrD,eAAO,aAAa,KAAK,KAAK,aAAa,OACvC,cAAc,KAAK,KAAK;;MAMhC;AACI,8BAAsB,qBAAqB,QAAQ,SAAS,KAAK,WAAW,KAAK;AACjF,eAAQ,eAAe,aAAa,cAAe;AACnD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,qCAAqC,YAAY,oCAC1C,YAAY,8GAEK;;AAE5C,YAAI,cAAc,SAAS;AACvB,2BAAiB,QAAQ,IAAI,OAAK,EAAE;AACpC,0BAAgB,OAAO,KAAK;AAC5B,gBAAM,IAAI,MAAM,+BAA+B,uCACvC,4CAA4C;;AAExD,eAAO,2BAA2B,KAAK,OAAO,KAAK,WAAW;;MAWlE;AACI,iBAAS,KAAK,UAAU;AACxB,sBAAc,OAAO,KAAK,QAAQ;AAClC,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,kBAAU,KAAK,WAAW;AAC1B,aAAK,aAAa;AAClB,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAChE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,+BAAuB,KAAK,kBAAkB,YAAY;AAE1D,2BAAmB,KAAK,YAAY,IAAI;AACxC,YAAI,gBAAgB;AAChB,yBAAe,KAAK,QAAQ,QAAQ;AACpC,eAAK,YAAY,IAAI,gBAAgB;;AAEzC,+BAAuB;AACvB,8BAAsB;AACtB,eAAO,KAAK;AACR,0BAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AACzF,6BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,iBAAO,KAAK,QAAQ,QAAQ;AACxB,uCAA0B,cAAc;AACxC,4BAAgB;AAChB,oBAAQ,UAAS,OAAO;AACxB,uBAAW,YAAY;;AAE3B,gCAAsB,KAAK,mBAAmB;AAC9C,kDAAwC;AACxC,uBAAa,GAAG,IAAI,aAAa,QAAQ;AACrC,yBAAa,aAAa;AAC1B,gBAAI,CAAC,WAAW,KAAK;AACjB,8BAAgB,YAAU,MAAM,YAAY,SAAS,KAAK;AAC1D,kBAAI,WAAe;AACf,sBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAGrD,yBAAW,KAAK,QAAQ;AACxB,mBAAK,uBAAuB,KAAK,MAAM,MAAM,YAAY,SAAS,eAAe,iBAAiB;;;AAI1G,cAAI,KAAK,UAAU;AACf,oBAAQ,QAAQ;;AAEpB,iBAAO,QAAQ,IAAI,UAAQ,UAAU,MAAM,YAAY;;;MAG/D;AACI,oBAAY,GAAG,OAAO,MAAM,IAAI,OAAO,KAAK,WACvC,IAAI,SAAO,UAAU,MACrB,IAAI,aAAW,QAAQ,IAAI,aAAU,QAAO;AACjD,eAAO,IAAI,IAAI;;MAEnB;AAGI,YAAI,KAAK,aAAa,aAAa,YAAY,QAAQ,cAAc;AACjE;;AAEJ,kBAAU,UAAU,QAAQ;AACxB,cAAI,WAAU;AACV,4CAAgC,QAAO,MAClC,iCAAgC,QAAO,OAAO,KAC3C,KAAK,SAAS;;;AAG9B,aAAK,OAAO,QAAQ;AAGhB,cAAI,OAAM,aAAa;AACnB,4BAAgB,6BAA6B,OAAM,MAAM,WAAW;AACpE,gBAAI,WAAW;AACX,sBAAQ,QAAQ;AACZ,oBAAI,WAAU,CAAC,cAAc,IAAI,QAAO;AACpC,iCAAc,gCAAgC,QAAO;AACrD,sBAAI,WAAU;AACV,4BAAO;AACP,2BAAO,gCAAgC,QAAO;6BAEzC,UAAS;AAGd,oDAAgC,QAAO;;;;;;;;YAiB7D;AACF,eAAO,KAAK,cAAc,QAAQ;;YAgBhC,qDAAqD,wBAAwB,oBAAoB;AACnG,YAAI,CAAC;AACD,mBAAS,KAAK,UAAU;AACxB,eAAK,YAAY;AACjB,eAAK,uBAAuB;AAC5B,oBAAU,KAAK,WAAW;AAC1B,eAAK,aAAa;;AAEtB,wBAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AAIzF,0BAAkB,MAAM,KAAK,uBAAuB,QAAQ,SAAS,SAAS;AAC9E,wBAAgB,QAAQ,IAAI,UAAQ,UAAU,MAAM,WAAW;AAE/D,0BAAkB,QAAQ,IAAI,OAAK,EAAE;AACrC,yBAAiB,OAAO,KAAK,QAAQ,IAAI,UAAQ,OAAO,MAAM;AAC9D,wBAAgB,IAAI,IAAI,CAAC,GAAG,WAAW,GAAG,UAAU,GAAG,KAAK;AAC5D,eAAO,KAAK,WAAW,QAAQ;AAC3B,8BAAoB,UAAU;AAC9B,sBAAY,QAAQ;AAChB,gBAAI,WAAU,CAAC,QAAO,cAAc,CAAC,QAAQ,IAAI,QAAO;AACpD,sBAAO;;;;AAKnB,YAAI,KAAK,UAAU;AACf,kBAAQ,QAAQ;;AAEpB,eAAO;;YAEL;AACF,6BAAqB,OAAO,OAAO;AAC/B,cAAI,KAAK,OAAO,QAAO,QAAQ;AAC/B,iBAAO;WACR;AACH,eAAO,KAAK,cAAc,cAAc,KAAK,aAAa,MAAM,gBAAgB;;YAa9E;AACF,sBAAc,OAAO,KAAK;AAC1B,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,YAAY,IAAI,UAAQ,cAAc,MAAM;AACpE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,eAAQ,WAAW,eAAe,aAAa,cAAe,qBAAqB,QAAQ,aAAa,KAAK,WAAW,KAAK;AAE7H,uBAAc;UACV,GAAG;UAAY,GAAG,KAAK,MAAM;UAAS,GAAI,KAAK,cAAc;UAC/D,IAAI;AACF,iBAAO,CAAE,MAAM,UAAU,QAAQ;;AAErC,2BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,eAAO,KAAK,QAAQ,QAAQ;AACxB,qCAA0B,cAAc;AACxC,0BAAgB;AAChB,kBAAQ,UAAS,OAAO;AACxB,qBAAW,YAAY;;AAE3B,gDAAwC;AACxC,8BAAsB,KAAK,mBAAmB;AAC9C,sBAAc;AACd,eAAO,OAAM,SAAS;AAClB,2BAAiB,KAAK,aAAa,YAAY,QAAO,SAAS,YAAY,OAAO,eAAe,iBAAiB,iCAAiC;AACnJ,gBAAM,QAAQ,IAAI;;AAEtB,YAAI,eAAe,QAAQ,CAAC;AACxB,kBAAQ,KAAK;;AAGjB,+BAAuB,YAClB,OAAO,UAAQ,CAAC,cAAc,SAC/B,CAAC,UAAU,KAAK,MAAM,YAAY,UACjC,IAAI,UAAQ,KAAK;AACtB,YAAI,eAAe,SAAS;AACxB,+BAAqB;AACrB,cAAI,eAAe;AACf,6BACI,wFAC+B;;AAEvC,gBAAM,IAAI,MAAM,+BAA+B,6CAChC,qDACP,mBAAmB;;AAE/B,eAAO;;MAEX;AACI,yBAAiB;AACjB,eAAO,OAAM,SAAS;AAClB,uBAAa,OAAM;AACnB,kBAAQ,iBAAiB,KAAK;AAC9B,yBAAe;AAIf,cAAI,KAAK,KAAK,OAAO,WACjB,cAAc,cAAc,KAAK,MAAM,WAAW;AAClD,aAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAIrD,cAAI,UAAU,KAAK,KAAK,SAAS;AAC7B,4BAAgB,YAAU,KAAK,MAAM,WAAW,SAAS,KAAK;AAC9D,gBAAI,CAAC;AACD,eAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAErD,mCAAuB,QAAQ;AAC/B,gBAAI,WAAe;AACf,uBAAS,KAAK,QAAQ,KAAK;AACvB,0BAAU,YAAY;AACtB,wBAAQ,iBAAiB;AACzB,qBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,qBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;AACpE,uBAAO;;;AAIX,wBAAU,YAAY;AACtB,mBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,mBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAIxE,iBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAG5E,eAAO;;MAEX;AACI,aAAK,SAAS,QAAQ;AAClB,6BAAoB,oBAAoB,UAAU,MAAM;AACxD,cAAI,MAAM,aAAa,CAAC,UAAU,IAAI,UAAU;AAC5C;;AAGJ,cAAI,UAAU,OAAO;AACjB,gBAAI,UAAU,WAAW,KAAK;AAC1B,qBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,oBAAM,YAAY;AAClB,qBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;qBAIxD,UAAU,WAAW,MAAM;AAC5B,mBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,kBAAM,YAAY;AAClB,mBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;;;MAOjE;AACI,eAAO,KAAK,KAAK,WACZ,QAAQ,SAAO,KAAK,UAAU,KAAK,QAAQ,aAAU,QAAO;;MAErE;AACI,eAAO,KAAK,QAAQ,QAAQ;AACxB,yBAAc,OAAO;AACrB,6BAAoB,cAAc;AAClC,uBAAa,KAAK,MAAM,MAAM;AAC9B,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,0BAAc,KAAK,WAAW,SAAS;AACvC,0BAAc,MAAM,WAAW,OAAM,MAAM,UACvC,OAAM,MAAM,MAAM,iBAAgB,MAAM,YAAW,MAAM,MAAM,YAAW;AAC9E,oBAAY,OAAO,MAAM,sBAAsB,KAAK,mDAChB,oBAC5B,OAAM;;AAElB,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,oBAAY,OAAM,UAAU,KAAK,WAAW,SAAS,OAAO,MAAM,sBAAsB,KAAK,kDAEtF,KAAK,WAAW,SAAS,kBAAkB,OAAM;;;;MAIpE;AACI,uBAAe;AACf,gCAAwB;AACpB,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,UAAU,QACrD,KAAK,WAAW,OAAO,cAAc;AACrC,4BAAe,KAAK,WAAW,OAAO;AACtC,mBAAO,QAAO,QAAQ,OAAO;;AAG7B,mBAAO,aAAa,OAAO;;;AAGnC,eAAO;;MAEX;AACI,2BAAmB,OAAO,KAAK,QAAQ,OAAO;AAC1C,6BAAmB,cAAc;AACjC,iBAAO,KAAK,MAAM,MAAM,aAAa;;AAEzC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,MAAM,uDACF;;;MAGtB;AACI,eAAO,QAAQ,IAAI;AACf,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,WAAW,QACtD,KAAK,WAAW,QAAQ,SAAS;AACjC,4BAAe,KAAK,WAAW,QAAQ;AACvC,mBAAO,QAAO;;AAElB,iBAAO;WACR;;MAEP;AACI,gBAAQ,QAAQ;AACZ,mCAAyB,cAAc;AACvC,cAAI,CAAC,KAAK,MAAM,MAAM;AAClB,kBAAM,IAAI,MAAM,eAAe;;;;;;MCpf3C,oCAAoC,mBAAmB;AACnD,aAAK,wBAAwB;AAC7B,aAAK,eAAe;;MAWxB;AACI,aAAK,sBAAsB,QAAQ,WAAU;AAC7C,aAAK,aAAa,WAAU,MAAM;;MAOtC;AACI,eAAO,KAAK,sBAAsB;;MAMtC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,0BAAkB,KAAK;AACnB,eAAK,aAAa,KAAK;AACvB,iBAAO,KAAK,aAAa;;AAE7B,2BAAmB,KAAK;AACpB,eAAK,sBAAsB,MAAM;AACjC,iBAAO,KAAK,sBAAsB;;;;AC9C9C;;;;;;;;;;;;;;;;AAoBO,+BAA2B;AAC3B,+BAA2B;;MAqB9B,oCAAoC;AAChC,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,UAAU;AACf,YAAI,eAAe;AACf,eAAK,cAAc;;AAEvB,aAAK,kBAAkB,IAAI;;UAG3B;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;MAEzB;AACI,qBAAa,KAAK;AAClB,YAAI,KAAK,QAAQ;AAEb,eAAK,UAAU;mBAEV,KAAK,YAAY,eAAe;AACrC,eAAK,UAAU,mBAAsB,MAAM,KAAK;;AAGhD,2BAAiB,gBAAmB,MAAM,KAAK;AAC/C,cAAI,SAAS,WAAW;AAGpB,qBAAS,KAAK,mBAAsB,MAAM,KAAK;qBAE1C,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC,CAAC;;AAEjB,eAAK,UAAU,SAAS;;;YAO1B;AACF,aAAK;AACL,YAAI,KAAK,QAAQ,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,0BAAkB,MAAM,KAAK,QAAQ;AACrC,eAAO,KAAK,SAAS;;MAQzB;AACI,aAAK,YAAY;AACjB,uBAAc,KAAK,UAAU;AAC7B,wBAAgB;AAChB,YAAI,KAAK,UAAU,uBAAuB;AACtC,sBACI,KAAK,UAAU,oBAAoB;;AAE3C,aAAK,UAAU,GAAG,OAAM,SAAS,YAAY,OAAM,SAAS;AAC5D,0BAAkB,cAAiB,KAAK,UAAU,YAAY,KAAK,UAAU;AAC7E,aAAK,WAAW,IAAI,cAAc,gBAAgB,SAAS,eAAe,QAAO;AACjF,aAAK,SAAS,YAAY,KAAK,6BAA6B;AAG5D,aAAK,SAAS,kBAAkB,KAAK;AACrC,YAAI,UAAU,oBAAoB;AAC9B,8BAAoB,gBAAgB,SAAS,eAAe,UAAU;AACtE,eAAK,cAAc,IAAI,cAAc;AACrC,eAAK,YAAY,YAAY,KAAK,SAAS;AAI3C,eAAK,YAAY,kBAAkB,KAAK;AACxC,eAAK,YAAY,aAAa,IAAI;;AAEtC,eAAO;;YA8CL;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,MAAM,0CAA0C;qBAErD,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,eAAO,aAAa,KAAK,KAAK;;MAwClC;AACI,eAAO,KAAK,QAAQ,QAAQ,KAAK;;MAErC;AACI,YAAI,CAAE,mBAAkB,YAAW,CAAC,MAAM,QAAQ;AAE9C,iBAAO;;AAEX,iBAAS,MAAM,QAAQ,UAAU,SAAS,CAAC;AAC3C,YAAI,OAAO,WAAW,KAAK,WAAW;AAClC,gBAAM,IAAI,MAAM,mDACW,KAAK,WAAW,wCACpB,OAAO;;AAElC,eAAO,KAAK,WAAW,OAAO;AAC1B,cAAI,aAAa,OAAO;AACxB,iBAAO;WACR;;MAEP;AACI,kBAAU,WAAW,KAAK;AAC1B,eAAO,CAAC,MAAM,QAAQ,WAAW,CAAC,WAAW;;MAkBjD;AACI,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,KAAK,SAAS,QAAQ,QAAQ;AAC7C,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;YAkBzC;AACF,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,MAAM,KAAK,SAAS,aAAa,QAAQ;AACxD,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;MAE/C;AACI,eAAO,OAAO,KAAK,KAAK,OAAO;AAC3B,iBAAO,OAAO,CAAC,IAAI;AACnB,iBAAO;WACR;;MAOP;AACI,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,YAAY;;AAErB,aAAK,gBAAgB;;;AAiCtB,uDAAkD;AACrD,UAAI,YAAY;AACZ,cAAM,IAAI,MAAM;;AAGpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ;AACR,YAAI,SAAS,QAAQ;AACjB,cAAI,CAAC,SAAS,SAAS;AACnB,uBAAW,WAAW;;AAE1B,qBAAW,GAAG,WAAW,qBAAqB;;;AAGtD,qBAAc,IAAI,WAAW,UAAU;AACvC,YAAM,OAAM;AACZ,aAAO;;ACrXX;AAEK,sBAAW;ACFhB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAmCO;AACH,aAAO,gBAAgB,QAAO;;AAQlC,mDAA8C,IAAI,qBAAqB,IAAI;AACvE,UAAI,UAAS;AACT,eAAO;;AAEX,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,UAAI,KAAK,IAAI;AACT,eAAO,KAAK,IAAI;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,aAAK,IAAI,QAAO,OAAO;AACvB,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,wBAAc,OAAM;AACpB,8BAAoB,gBAAgB,OAAO,OAAO,MAAM;AACxD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AA2B1D,qCAAiC;AACpC,aAAO,gBAAgB,QAAQ;;AAMnC,0DAAsD,IAAI;AAGtD,qBAAc,OAAO;AACrB,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,2BAAiB,OAAO,IAAI,OAAK,EAAE;AACnC,8BAAoB,gBAAgB,UAAU,OAAO;AACrD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AAI1D;AACH,UAAI,MAAM;AACN,eAAO;;AAGX,UAAI,aAAW,EAAE;AACb,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,GAAG,SAAS;;;AAyB7B;AACH,mBAAa,IAAI;AAEjB,sBAAgB,QAAO,OAAO;AAK9B,wBAAkB,MAAM,KAAK,KAAK;AAC9B,sBAAc,KAAK,IAAI;AACvB,YAAI,WAAkB;AAClB,8BAAoB,MAAM;AAC1B,eAAK,IAAI,KAAK;;;AAMtB,qBAAe,gBAAgB,QAAO,OAAO;AAC7C,aAAO;;AAQJ;AACH,aAAO,OAAO,QAAS,CAAC,YAAY,OAAO,QACtC,OAAM,QAAQ,QACV,OAAO,QAAQ,YAAY,CAAE,gBAAe;;AAWlD;AACH,aAAO,OAAO,QAAQ,YAAY,QAAQ,MAAM,QAAQ,QACnD,OAAO,QAAQ,YAAa,eAAe,WAC5C,cAAqB;;AAM7B;AACI,aAAQ,UAAU,QACb,OAAO,UAAU,YAAY,OAAO,UAAU;;AClOvD;;;;;;;;;;;;;;;;;AAmBO;AACH,aAAO,QAAQ,WAAW;;AAG9B;AACI,UAAI,gBAAgB;AAChB,eAAQ,CAAE,OAAO,KAAK,SAAS,SAAS;iBAEnC,aAAW;AAChB,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,MAAM,SAAS;;;AC/BvC;;;;;;;;;;;;;;;;;;MAyBI;AACI,aAAK,WAAW;AAIhB,aAAK,QAAQ;AACb,aAAK,MAAM;AACX,YAAI,YAAY;AACZ,gBAAM,IAAI,WAAW;;AAEzB,YAAI,WAAW;AACX,gBAAM,IAAI,WAAW;;AAEzB,aAAK,OAAO,IAAI,MAAM;AACtB,aAAK,kBAAkB,IAAI;;MAK/B;AAEI,eAAO,SAAQ;AACX,oBAAS,KAAK;;AAElB,eAAO,SAAQ,KAAK;;MAExB;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,eAAO,KAAK,KAAK,SAAQ,KAAK;;MAElC;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,aAAK,KAAK,SAAQ,KAAK,YAAY;;MAKvC;AACI,qBAAa,KAAK,MAAM,KAAK;AAC7B,YAAI,SAAS;AACT,mBAAS,KAAK,kBAAkB;;AAEpC,eAAO;;MAOX;AACI,eAAO,KAAK,aAAa,KAAK;;MAOlC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,IAAI,KAAK,KAAK;AACnB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;;MAKpC;AACI,4BAAoB;AAChB,eAAK,KAAK;;;MAMlB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;AAChC,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,KAAK;AACnB,eAAO;;MAKX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,aAAK,IAAI,KAAK,OAAO;;MAKzB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,OAAO;AACrB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,eAAO;;MAWX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAc,KAAK,KAAK,KAAK,QAAQ;AACrC,uBAAe,KAAK,IAAI;AACxB,aAAK,IAAI,QAAO,KAAK;AACrB,eAAO;;;AC7Jf;;;;;;;;;;;;;;;;;oCAkBuC;MAInC;AACI,cAAM,kBAAkB;;MAE5B;AACI,eAAO;;MAEX;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,KAAK;;MAEf;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,QAAQ;;MAKlB;AACI,4BAAoB,KAAK,WAAW;AACpC,wBAAgB,IAAI,MAAM;AAC1B,oBAAY,KAAK;AAGjB,qBAAa,GAAG,IAAI,KAAK;AACrB,kBAAQ,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK,QAAQ;;AAEjD,aAAK,OAAO;AACZ,aAAK,WAAW;AAChB,aAAK,kBAAkB,IAAI,KAAK;AAChC,aAAK,QAAQ;AACb,aAAK,MAAM;;;AAGnB,sBAAkB,mBAAmB;AC3DrC;;;;;;;;;;;;;;;;;AA6BO;AACH,aAAO,IAAI,cAAc;;AAKtB;AACH,cAAQ;AACR,aAAO,qBAAqB,MAAO,EAAE,OAAO,KAAK,MAAM;;AAepD;AACH,aAAO,IAAI,qBAAqB;;AAc7B;AACH,aAAO,IAAI,gBAAgB,eAAe;;AAkBvC;AACH,aAAO,yBAAyB,qBAAqB,cAAc,KAAK,SAAQ;;AA0B7E,0DAAsD,gBAAgB;AACzE,aAAO,IAAI,YAAY,WAAW;;;YAkB5B;AACF,uBAAe;AACf,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,KAAK;;AAEnB,eAAO;;YAaL;AACF,uBAAe,KAAK,SAAS;AAC7B,uBAAe;AACf,gBAAQ,MAAM,OAAO;AACrB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,OAAO;;AAErB,eAAO;;YASL;AACF,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,cAAI,MAAM,KAAK;;;YAUjB;AACF,gBAAQ,MAAM,KAAK;AACnB,6BAAqB,UAAU,EAAE;AACjC,eAAQ,CAAC,EAAE,QAAS;AAChB,cAAI,MAAM,KAAK;AACf,2BAAiB,UAAU,EAAE;;;MAerC;AACI,eAAO,IAAI,0BAA0B,MAAM;;MAW/C;AACI,eAAO,IAAI,eAAe,MAAM;;MAUpC;AACI,eAAO,IAAI,YAAY,MAAM;;MAUjC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAUtC;AACI,eAAO,IAAI,iBAAiB,MAAM,WAAW;;MAUjD;AACI,eAAO,IAAI,gBAAgB,MAAM;;YAO/B;AACF,eAAO,KAAK,IAAI,GAAG;;YASjB;AACF,eAAO,KAAK,eAAe,GAAG,aAAa,OAAM,MAAM;;MAoB3D,0CAA0C;AACtC,eAAO,IAAI,sBAAsB,MAAM,WAAW;;MAkCtD,6CAA6C,cAErC;AAEJ,2BAAmB,KAAK,cAAc,WAAW;AAGjD,eAAO,WAAW,IAAI,OAAK,QAAQ,GAAG;;MAY1C;AACI,eAAO,IAAI,gBAAgB,kBAAkB,CAAC,MAAM,YAAY;;MASpE;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAQlC;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAWlC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAWtC;AACI,eAAO,IAAI,gBAAgB,MAAM,YAAY;;MAMjD;AACI,eAAO,IAAI,eAAe;;;gCAUN;MACxB;AACI;AACA,aAAK,QAAQ;AACb,aAAK,OAAO;;MAEhB;AACI,eAAO,YAAY,KAAK,MAAM;;YAE5B;AACF,YAAI,KAAK,QAAQ,KAAK,MAAM;AACxB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,qBAAa,KAAK,MAAM,KAAK;AAC7B,aAAK;AACL,eAAO,CAAE,OAAO,UAAU,OAAO,MAAM;;;uCAGZ;MAC/B;AACI;AACA,aAAK,SAAS;;MAElB;AACI,eAAO;;YAEL;AACF;AACI,iBAAO,KAAK;;AAIZ,YAAE,UACE,mDAAmD,EAAE;AACzD,gBAAM;;;;iCAIW;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAEhB,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAKF,eAAO,KAAK,UAAU,KAAK;AACvB,0BAAgB,MAAM,KAAK,SAAS;AAEpC,cAAI,QAAQ;AACR,mBAAO;;AAEX,kBAAW,QAAQ;;AAEvB,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAChB,aAAK,QAAQ;;MAEjB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,YAAI,KAAK,WAAW,KAAK;AACrB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAO,KAAK,SAAS;;;wCAMO;MAChC,wDAAwD;AACpD;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,sBAAc;AACd,eAAO,MAAM,SAAS,KAAK;AACvB,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK;AACL,gBAAI,KAAK,wBAAwB,MAAM,SAAS;AAC5C,qBAAO,CAAE,OAAO,OAAO,MAAM;;AAEjC,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,gBAAM,KAAK,KAAK;;AAEpB,eAAO,CAAE,OAAO,OAAO,MAAM;;;iCAGR;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK,QAAQ,KAAK,UAAU,KAAK;AACjC,mBAAO;;AAEX,kBAAW,KAAK;;;;8BAIF;MACtB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,KAAK,UAAU,KAAK;AACnC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;4CAGE;MACpC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH;AACI,mBAAO,MAAM,KAAK,SAAS;;AAG3B,gBAAI,CAAC,KAAK,QAAQ;AACd,qBAAO,CAAE,OAAO,MAAM,MAAM;;;;;;mCAUjB;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,MAAM,KAAK,UAAU,KAAK;AACzC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;oCAaC;MACnC;AACI;AACA,aAAK,cAAc,IAAI;AACvB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAIF,eAAO,KAAK,YAAY,aAAa;AAEjC,cAAI,CAAC,MAAM,KAAK;AACZ,mBAAO,CAAE,OAAO,MAAM,MAAM;;;AAGpC,eAAO,CAAE,OAAO,KAAK,YAAY,SAAS,MAAM;;;kCAG1B;MAC1B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO;;AAEX,6BAAqB,uBAAqC,KAAK;AAM/D,4BAAoB,KAAK,UAAU,KAAK;AACxC,8BAAsB,uBAAqC;AAC3D,aAAK,YAAY,QAAQ;AAGzB,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO;;;kCAYsB;MACjC;AACI;AACA,aAAK,mBAAmB;AAGxB,aAAK,WAAW;AAEhB,aAAK,WAAW;AAChB,aAAK,gBAAgB;;MAEzB;AACI,kCAA0B;AAC1B,eAAO,GAAG;;YAER;AACF,aAAK,WAAW,KAAK,cAAc,KAAK;AACxC,eAAO,KAAK;;YAEV;AAMF,cAAM;AACN,YAAI,KAAK,YAAY;AACjB,iCAAuB,MAAM,KAAK,cAAc;AAChD,cAAI,eAAe;AAEf,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAK,WAAW,eAAe;AAC/B,cAAI,KAAK,oBAAoB;AACzB,iBAAK,WAAW,KAAK,SAAS,aAAa,KAAK;;;AAGxD,2BAAmB,MAAM,KAAK,SAAS;AACvC,YAAI,WAAW;AACX,eAAK,WAAW;AAChB,iBAAO,KAAK,cAAc;;AAE9B,eAAO;;;AAGR;AACN,IAAA;AACG,uBAAgB,iBAAgB,UAAU,KAAK;AAC/C,uBAAgB,iBAAgB,cAAc,KAAK;AACnD,uBAAgB,iBAAgB,aAAa,KAAK;OACnD,mBAAoB,mBAAkB;8BA8Bf;MACtB,sCAAsC,gBAAgB;AAClD;AACA,aAAK,YAAY;AACjB,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,iBAAiB;;MAE1B;AACI,kCAA0B;AAC1B,eAAO,IAAI;;YAET;AAGF,cAAM;AAGN,2BAAmB;AACnB,4BAAoB;AACpB;AACI,cAAI,qBAAqB;AACrB,2BAAe,UAAU;AACzB,mBAAO;cACH,OAAO,OAAO,KAAK;AACf;AACA,oBAAI,EAAE;AACF;;AAEJ,uBAAO,EAAE;;cAEb,SAAS;;;AAIb,mBAAO,CAAE,OAAO,MAAM,SAAS;;;AAGvC,uBAAe,MAAM,mBAAmB,KAAK,WAAW;AACxD,YAAI,iBAAiB;AAEjB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,YAAI,gBAAgB;AAChB,kBAAQ,KAAK;iBACJ,gBAAgB;AACjB,oBAAM,IAAI,MAAM,qEACa,KAAK;iBACjC,gBAAgB;AACjB,qBAAO,CAAE,OAAO,MAAM,MAAM;iBAC3B,gBAAgB;;;;AAK7B,aAAK;AACL,eAAO,CAAE,OAAO,QAAQ,MAAM;;YAE5B;AACF,aAAK,iBAAiB,KAAK,UAAU,KAAK;AAC1C,eAAO,KAAK;;;mCAYkB;MAClC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,IAAI,WAAW;;MAEjC;AACI,eAAO,GAAG,KAAK,SAAS;;MAM5B;AACI,eAAO,CAAC,KAAK,OAAO;AAChB,oBAAU,KAAK,SAAS;AACxB,eAAK,OAAO,KAAK;;;MAGzB;AACI,aAAK;AAIL,eAAO,KAAK,OAAO;;;kCASU;MACjC;AACI,cAAM,UAAU;AAChB,aAAK,WAAW;AAChB,aAAK,aAAa;AAElB,aAAK,oBAAoB;AACzB,aAAK,SAAS,aAAgB,QAAQ,OAAc;AACpD,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK,MAAM,KAAK,WAAW;;MAEtC;AACI,eAAO,KAAK,UAAU,KAAK,OAAO;;YAEhC;AAEF,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,CAAC,KAAK,OAAO;AAChB,8BAAoB,KAAK;AACzB,yBAAe,MAAM,KAAK,OAAO,cAAc;AAC/C,cAAI,OAAO;AACP,iBAAK,oBAAoB;;AAGzB,iBAAK;AACL,mBAAO;;;AAGf,eAAO,CAAE,OAAO,MAAM,MAAM;;;AC1+BpC;;;;;;;;;;;;;;;;;;MAmDI;AACI,aAAK,OAAO;;MA8DhB,kCAAkC;AAC9B,sBAAa;AACb,gBAAe,YAAY,GAAG,MAAM;QACpC;AACA;AACA,YAAI,KAAK,SAAS,YAAY,KAAK,QAAQ;AAGvC,iBAAO,KAAK;mBAEP;AAGL,iBAAO,KAAK,KAAK,KAAK,OAAO;;AAK7B,iBAAO,KAAK,MAAM,KAAK,OAAO;;AAElC,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YACd,iBAAiB,WAAW,gBAAgB;WAClD;;MAiBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS,YAAY,QAAQ,SAAS;AAG3C,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,QAAQ,QAAQ;AAG1C,iBAAO,KAAK,OAAO,QAAQ;;AAK3B,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,YAAY,MAAM,QAAQ,aAAa;;MAkB5G;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS;AAEd,iBAAO;;AAKP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,OAAO,OAAK,KAAQ,MAAM,UAAU;WACpE;;YAkBD;AACF,eAAQ,OAAM,KAAK,YAAY,aAAa;;MAiBhD;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,IAAI,OAAK,KAAQ,MAAM,UAAU;WACjE,KAAK;;MAyBZ;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,SAAS;WACzC,KAAK;;MAWZ;AACI,YAAI,cAAc;AACd,gBAAM,IAAI,WAAW;;AAEzB,sBAAa;AACb,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,SAAS,aAAa,KAAK;;MAoBhG;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,SAAQ;AAI7B,iBAAO,KAAK,OAAO;mBAEd,WAAU;AAEf,iBAAO;mBAEF,KAAK,QAAQ,QAAS,YAAU,UAAa,SAAQ;AAG1D,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,mCAAyB,qBAAqB,YAAa,EAAE,OAAO,MAAM,MAAK,YAAY,MAAM;AACjG,iBAAO,yBAAyB,iBAAiB,KAAK;WACvD;;MAmBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,UAAS,KAAK,KAAK,QAAQ;AAIhD,iBAAO,KAAK,OAAO;mBAEd,KAAK,QAAQ,QACjB,MAAK,OAAO,UAAS,WAAU,UAAa,SAAQ;AAGrD,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;MAuBlF,mDAAmD;AAC/C,YAAI,cAAc,QAAQ,aAAa;AACnC,cAAI,KAAK,QAAQ;AACb,kBAAM,IAAI,WAAW;;AAGrB,kBAAM,IAAI,WAAW,mNAGkB,KAAK;;;AAGpD,sBAAa;AACb,uBAAe,aAAgB,QAAQ,OAAc;AACrD,eAAO,sBAAsB;AACzB,sBAAY,OAAO;AACnB,cAAI;AACA,qBAAS,OAAO;;AAEpB,iBAAQ,OAAM,MAAK,YAAY,QAAQ,YAAY,MAAM;WAC1D,KAAK;;MAmBZ;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,KAAK,OAAO;AAGjC,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,KAAK,QAAQ;AAGvC,iBAAO,KAAK;;AAIZ,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;YAkB5E;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;YAa7B;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;;AAIvC,YAAQ,kBAAkB;AAanB,sDAAkD;AACrD,aAAO,IAAI,cAAc;QACrB;AACI,gBAAM,GAAG;AACT,eAAK,OAAO;;cAMV;AACF,iBAAO;;;;AAsBZ;AACH,aAAO,sBAAsB,YAAY,kBAAkB,QAAQ,MAAM;;AA2CtE;AAEH,UAAI,CAAC,aAAW;AACZ,cAAM,IAAI,MAAM;;AAEpB;AACA,UAAI,MAAM,QAAQ;AACd,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAO,QAAQ,OAAO,SAAS,GAAG,OAC9B,KAAK,IAAI,MAAM,SAAS,GAAG;;iBAG9B,oBAAoB;AACzB,yBAAiB;AACb,iBAAO,QAAQ,OAAO,SAAS,IAAI,OAC/B,KAAK,IAAI,MAAM,SAAS,IAAI;;;AAGxC,aAAO,sBAAsB;AACzB,wBAAgB,MAAM,mBAAmB,UAAU;AAC/C,cAAI,aAAa;AACb,mBAAO,CAAE,OAAO,EAAE,YAAY,SAAS;qBAElC,aAAW;AAChB,mBAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,kBAAM,IAAI,MAAM;;;AAIxB,eAAO,mBAAmB,SAAS,gBAAgB;SACpD;;AAUP;AACI,UAAI,SAAS;AACT,eAAO;;AAGX,yBAAmB,KAAK;AACxB,UAAI,aAAa;AAEb,sBAAc,YAAY;AAC1B,eAAO,CAAE,OAAO,SAAS;;AAG7B,aAAO,CAAE,OAAO,MAAM,SAAS;;AAMnC;AACI,UAAI,OAAO,WAAW;AAElB,cAAM,IAAI,MAAM;;AAEpB,UAAI,OAAO,cAAc;AAErB,eAAO,MAAS;;AAIhB,eAAO,QAAU;;;AC/oBzB;;;;;;;;;;;;;;;;;kCAuBqC;MAMjC;AACI;AACA,aAAK,QAAQ;;YAEX;AACF,8BAAsB,MAAM,KAAK,MAAM;AACvC,6BAAqB,cAAc;AACnC,6BAAqB,aAAa,MAAM,MAAM,IAAI;AAE9C,cAAI,KAAK,SAAS;AACd,mBAAO,KAAK,MAAM,GAAG;;AAEzB,iBAAO;;AAEX,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;;AAoBA,uBAAmB;AACnB,sBAAkB,OAAO;AACzB,wBAAoB,OAAO;AAC3B,wBAAoB,OAAO;AAC3B,oCAAgC,OAAO;AACvC,wCAAoC,OAAO;6BAcX;MAiC5B;AACI;AACA,aAAK,QAAQ;AACb,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,gBAAgB;AACrB,aAAK,wBAAwB;AAC7B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,OAAO,IAAI,gBAAgB;AAChC,YAAI,CAAC;AACD,sBAAY;;AAEhB,aAAK,YAAY,UAAU,cAAc,QAAQ,QAAQ;AACzD,aAAK,kBAAkB,UAAU;AACjC,aAAK,gBAAgB,UAAU;AAC/B,aAAK,wBAAwB,UAAU;AACvC,YAAI,UAAU;AACV,kBAAY,UAAU,aAAa,MAAM,MAAM;AAC/C,eAAK,kBAAkB;AACvB,eAAK,YAAY;;AAGjB,eAAK,YAAY,UAAU,YAAY,UAAU,YAAY;;;YAa/D;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,eAAO,KAAK,wBAAwB,OAAO,KAAK,KAAK,iBACjD,KAAK;;YAUP;AACF,oCAA4B,MAAM,KAAK;AACvC,YAAI,CAAC,KAAK,mBAAmB,CAAC;AAE1B,gBAAM,IAAI,MAAM;mBAEX,KAAK,mBAAmB;AAE7B,kBAAY,oBAAoB,WAAW,KAAK,gBAAgB,QAAQ,MAAM,yCAC1E,KAAK,gBAAgB,OAAO,aAC5B,oEACW,oBAAoB,OAAO,aAAa;;AAE3D,YAAI,CAAC,KAAK;AACN,eAAK,kBAAkB;;AAG3B,uBAAe,KAAK,gBAAgB,OAAO;AACvC,mBAAS,QAAS,SAAS,QAAQ,KAAM;AACzC,iBAAO;WACR;AACH,+BAAuB,OAAO,KAAK,QAAQ,OAAO,UAAW,OAAO,QAAQ;AAC5E,gBAAY,eAAe,WAAW,GAAG,MAAM,mCAAmC,eAAe;AAEjG,YAAI,KAAK;AACL,4BAAkB,OAAO,KAAK,KAAK;AAC/B,2BAAc,KAAK,gBAAgB,QAAQ;AAC3C,gBAAI,WAAU;AACV,oBAAM,IAAI,MAAM,cAAc,MAC1B,yEACY,KAAK,gBAAgB,aAAa;;;;AAI9D,aAAK,uBAAuB;;YAE1B;AACF,YAAI,KAAK;AACL,uBAAa,MAAM,KAAK,KAAK;AAC7B,+BAAqB,MAAM,KAAK;AAChC,cAAI,aAAa;AACb,kBAAM,IAAI,MAAM;;AAEpB,4BAAkB,aAAa;AAC/B,0BAAgB,KAAK,SAAS,WAAW;AACzC,iBAAO;;AAGP,iBAAO;;;YAGT;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,oBAAY,MAAM,KAAK,KAAK;AAC5B,YAAI,KAAK;AAGL,kBAAQ,MAAM,KAAK;;AAEvB,eAAO,MAAM,IAAI,OAAK,KAAK,gBAAgB;;MAE/C;AACI,uBAAe,KAAK,SAAS;AAC7B,yBAAiB;AACjB,uBAAe;AACf,qBAAa,GAAG,IAAI,KAAK,gBAAgB,QAAQ;AAC7C,sBAAY,KAAK,gBAAgB;AACjC,0BAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;AAC9D,cAAI,KAAK,yBAAyB,CAAC;AAE/B;;AAGA,0BAAc,OAAO;AACrB,8BAAkB;AAClB,gBAAI,UAAU;AAGV,kBAAI,WAAU,QAAO,YAAY;AAC7B,8BAAc,QAAO;yBAEhB,WAAW,SAAO,YAAY,QAAO;AAC1C,sBAAM,IAAI,MAAM,mBAAmB,8BAA8B;;AAGjE,8BAAc;;;AAKlB,iCAAmB,OAAO;AAC1B,kBAAI,MAAM;AAGN,oBAAI,WAAU,QAAO,UAAU;AAC3B,gCAAc,KAAK,WAAW;;AAI9B,gCAAc;;yBAGb,CAAC,WAAU,CAAC,QAAO;AAGxB,8BAAc;;AAKd,wBAAQ,QAAO;uBACN;AACD,kCAAc;AACd;uBACC;AACD,kCAAc,KAAK,MAAM;AACzB;uBACC;AACD,kCAAc,KAAK,WAAW;AAC9B;;AAEA,kCAAc;;;;AAK7B,uBAAU,QAAO,UAAW,OAAO,OAAO,cACvC,SAAS,OAAO;;;AAK5B,YAAI,OAAO,KAAK,QAAQ,WAAW;AAC/B,iBAAO;;AAGP,iBAAO,CAAE,IAAI,UAAU,IAAI;;;MAGnC;AACI,YAAI,UAAU,OAAO,MAAM,kBAAkB;AACzC,iBAAO;;AAGP,iBAAO;;;MAIf,sCAAsC;AAClC,uBAAe;AACf,yBAAiB;AACjB,2BAAmB,KAAK;AACxB,2BAAmB;AAEnB,qBAAa,GAAG,IAAI,YAAY;AAC5B,kBAAQ;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,+BAAa,IAAI;AACjB,iCAAe;AACf;qBAEC,KAAK;AACN,+BAAa,IAAI;AAGjB,sBAAI,KAAK,cAAc,OAAO,KAAK;AAC/B;;AAEJ,yBAAO,KAAK;AACZ,iCAAe;AACf;;AAGA,iCAAe;AACf,+BAAa;AACb;;AAER;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY;AACvC,iCAAe;AACf,+BAAa,IAAI;AACjB;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY,IAAI;AAC3C,iCAAe;AACf,+BAAa,IAAI;AACjB;qBAEC;AACD,iCAAe;AACf;;AAGA,iCAAe;AACf;;AAER;iBACC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;;;;AAKZ,YAAI,iBAAiB;AACjB,iBAAO,KAAK,KAAK,UAAU,YAAY,aAAa;;AAGpD,iBAAO,KAAK,KAAK,UAAU;;AAG/B,YAAI,wBAAwB,OAAO,WAAW,KAAK,gBAAgB;AAC/D,gBAAM,IAAI,MAAM,wCAAwC,KAAK,gBAAgB,qCAAqC;;AAEtH,eAAO;;;ACpXf;;;;;;;;;;;;;;;;;qCAyBwC;MACpC;AACI;AACA,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,aAAK,UAAU,iBAAiB,WAAW;AAC3C,4BAAoB,KAAK,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU,KAAK,cAAc,KAAK,cAAc,MACrD,CAAC,OAAO,UAAU;AAClB,gBAAM,IAAI,MAAM,gFACmB,KAAK;;AAE5C,aAAK,YAAY,iBAAiB,2BAA2B;AAC7D,aAAK,eAAe,iBAAiB;AACrC,aAAK,uBACD,iBAAiB,wBAAwB,KAAK;AAClD,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,wBAAwB,iBAAiB,yBAAyB;AACvE,aAAK,qBACD,iBAAiB,uBAAuB,QAAQ,QAAQ;AAC5D,aAAK,kBACD,iBAAiB,oBAAoB,OAAO,OAAO;AACvD,YAAI,CAAC,KAAK,sBAAsB,CAAC,KAAK;AAClC,gBAAM,IAAI,MAAM;;;MAIxB;AACI,eAAO;;mBAGE,0BAA0B;AACnC,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,mCAA2B,IAAI,mBAAmB;AAElD,cAAM,mBAAmB;AACzB,eAAO;;YAGL;AACF;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO,KAAK,yBAAyB,OAAO,OACxC,KAAK;YACT,OAAO;;;AAIX,gBAAM,IAAI,MAAM,iDAAiD,EAAE;;AAEvE,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAEpB,+BAEA,OAAO,gBAAgB,OAAO;AAC9B,aAAK,eAAe,IAAI;AACxB,YAAI,CAAC,KAAK;AAGN,eAAK,eAAe,KAAK,aAAa;mBAEjC,KAAK,aAAa,eAAe,KAAK;AAC3C,gBAAM,IAAI,MAAM,wCACC,KAAK,yBACP,KAAK,aAAa;;AAErC,6BAAqB,KAAK,aAAa,wBAAwB,KAAK;AACpE,aAAK,WAAW,KAAK,aAAa;AAClC,aAAK,SAAS,UAAU,KAAK,UAAU;AACvC,aAAK,SAAS,wBAAwB,KAAK;AAC3C,qBAAa,QAAQ,KAAK;AAC1B,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC;;YAEE;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACA,+BAAuB,MAAM,KAAK;AAClC,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,8BAAoB,KAAK,4BAA4B,UAAU,CAAC,KAAK,WAAW,KAAK,sBAAsB;;AAE/G,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,2BAAiB,KAAK,4BAA4B,UAAU,CAAC,KAAK,YAAY,KAAK,SAAS;;AAEhG,eAAO;UACH,OAAO,CAAE,aAAe,mBAAmB,UAAY;UACvD,MAAM;;;YAKR;AACF,eAAQ,OAAM,KAAK,QAAQ;;YAEzB;AACF,8BAAsB;AACtB,8BAAsB;AACtB,4BAAoB;AACpB,eAAO,IAAI,QAAQ;AACf,6BAAmB,YAAY;AAC3B,gBAAI,KAAK;AACL,mBAAK,SAAS,sBAAsB,KAAK;AAEzC,kBAAI,KAAK,SAAS,OAAO;AACrB,wBAAQ,CAAE,eAAe;;AAE7B,4BAAc,KAAK,KAAK,SAAS,MAAM,GAAG,KAAK;;AAEnD,gBAAI,KAAK;AACL,mBAAK,SAAS,uBAAuB,KAAK;AAC1C,4BAAc,KAAK,KAAK,SAAS;;AAGrC,gBAAI,EAAE,kBAAkB,KAAK;AACzB,4BAAc;AACd,sBAAQ,CAAE,eAAe;;aAE9B,KAAK,UAAU,KAAK,eAAe;;;MAI9C;AACI,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,SAAS;AACd,eAAK,aAAa;AAClB,cAAI,KAAK,UAAU,QAAQ,KAAK,OAAO,YAAY,SAAS;AACxD,iBAAK,OAAO,YAAY,GAAG;;;;MAKvC;AACI,cAAM,IAAI,MAAM;;MAGpB;AACI,eAAO,KAAK;;MAEhB;AACI,0BAAkB,MAAM,GAAG;AAC3B,yBAAiB,IAAI,aAAa,MAAM,SAAS;AACjD,cAAM,QAAQ,cAAa,SAAS,IAAI,OAAM,IAAI;AAClD,eAAO;;MAEX;AACI,qBAAa,IAAI,aAAa,eAAmB;AAEjD,aAAK,IAAI,UAAU,KAAK,SAAS,SAAS;AAC1C,eAAO,QAAO,MAAM;;;ACvL5B;;;;;;;;;;;;;;;;;iCAuBoC;MAChC;AACI;AACA,aAAK,qBAAqB;AAC1B,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,SAAS;AACd,eAAK,WACD,CAAC,KAAK,aAAa,cAAc,KAAK,aAAa;AACvD,eAAK,aAAa,UAAS,CAAC,IAAI;AAChC,cAAI,KAAK,aAAa;AAElB,uCAA2B,KAAK,aAAa,cAAc,IAAM,KAAK,mBAAmB;AACzF,wCAA4B,KAAK,aAAa,eAAe,IACzD,KAAK,mBAAmB;AAC5B,mCAAwB,KAAI,sBAAsB;AAClD,oCAAyB,KAAI,uBAAuB;AACpD,iCAAqB,iBAAiB;AACtC,kCAAsB,sBAAsB;AAC5C,iBAAK,UAAU,SAAS,CAAC,iBAAiB,gBAAgB,eAAe,eAAe,CAAC,GAAG;;AAG5F,iBAAK,UAAU,SAAS,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG;;;;MAItD;AACI,eAAO;;mBAGE,0CAA0C;AACnD,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,YAAI,CAAC;AAGD,+BAAqB,SAAS,cAAc;AAC5C,cAAI,CAAC,aAAa,eAAe,CAAC,aAAa;AAC3C,kBAAM,IAAI,MAAM;;AAGpB,6BAAmB,QAAQ,aAAa;AACxC,6BAAmB,SAAS,aAAa;;AAE7C,+BAAuB,IAAI,eAAe,oBAAoB;AAE9D,cAAM,eAAe;AACrB,eAAO;;YAGL;AACF,YAAI,KAAK,aAAa;AAClB,kBAAa,KAAK,aAAa,eAAe,UACzC,KAAK,aAAa,eAAe,eAAgB,MAAM,+BAA+B,KAAK,aAAa;;AAGjH;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO;cACH,UAAU,KAAK,aAAa;cAC5B,YAAY,KAAK,aAAa,aAC1B,KAAK,aAAa,aAClB;cACJ,OAAO,KAAK,mBAAmB;cAC/B,QAAQ,KAAK,mBAAmB;;;;AAMxC,YAAE,UAAU,iDAAiD,EAAE;AAC/D,gBAAM;;AAEV,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAGpB;AACI,eAAK,mBAAmB,YAAY,KAAK;;AAGzC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM,OAAO,IAAI,gBAAgB,KAAK;;AAGlE,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,eAAO,IAAI,QAAQ;AAEf,eAAK,mBAAmB,mBAAmB;AACvC;;;;YAIN;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACI,gBAAM,WAAmB,KAAK;;AAG9B,gBAAM,IAAI,MAAM,4CAA4C,KAAK,UAAU;;AAE/E,YAAI,KAAK;AACL;AACI,mBAAO,CAAE,OAAO,KAAK,mBAAmB,MAAM,MAAM;;AAGpD,kBAAM,IAAI,MAAM,oCAAoC,EAAE;;AAGtD,gBAAI;;;AAIR,iBAAO,CAAE,OAAO,KAAK,MAAM;;;MAGnC;AAII,YAAI,KAAK,aAAa,eAAe,KAAK,aAAa,gBAClD,MAAK,mBAAmB,UAAU,KAAK,aAAa,eACjD,KAAK,mBAAmB,WAAW,KAAK,aAAa;AACzD,iBAAO;;AAEX,eAAO;;MAGX;AACI,eAAO,KAAK;AACR,gCAAsB,IAAI,UAAU,WAAW;AAC/C;AACA,yBAAe,OAAM,cAAc,eAAe,KAAK,SAAS,KAAK,YAAY,KAAK,UAAU;AAEhG,wBAAc,aAAa;AAC3B,iBAAO,aAAa,QAAQ,MAAM,MAAM;;;YAK1C;AACF,eAAQ,OAAM,KAAK,QAAQ;;MAG/B;AACI,uBAAe,KAAK,OAAO;AAC3B,eAAO,QAAQ,WAAS,MAAM;AAC9B;AACI,eAAK,mBAAmB,YAAY;;AAGpC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM;;AAElC,aAAK,WAAW;;MAGpB;AACI,cAAM,IAAI,MAAM;;;AC5LxB;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;iCAkBoC;MAmBhC;AACI,eAAO,IAAI,cAAc,MAAM;;;gCAYX;MACxB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,kBAAkB,UAAU;;MAEhD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;oCAGO;MAC5B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AAEjB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS,uBAAuB,KAAK;;YAElD;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC,YAAI,YAAY;AACZ,cAAI,KAAK,cAAc;AACnB,mBAAO;;AAIX,eAAK,YAAY,KAAK,KAAK;AAC3B,eAAK,YAAY;AACjB,iBAAO;;AAEX,sBAAc,YAAY,MAAM,MAAM,KAAK;AAI3C,cAAM,KAAK,KAAK,YAAY,MAAM;AAClC,2BAAmB,MAAM,MAAM,GAAG;AAC9B,eAAK,YAAY,KAAK;;AAE1B,aAAK,YAAY,MAAM,MAAM,SAAS;AACtC,eAAO;;;AC/Ff;;;;;;;;;;;;;;;;;oCAoBuC;MAUnC;AACI,eAAO,IAAI,aAAa;;;+BAYL;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,iBAAiB;;MAErC;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;mCAyBM;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,YAAI,OAAM,IAAI;AACV,eAAK,UAAU,IAAI,YAAY;;AAI/B,iBAAQ,iBAAkB;AAC1B,eAAK,UAAU,IAAI,cAAc;;;MAGzC;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC;AACA,YAAI,YAAY;AACZ,iBAAO;;AAGP,kBAAQ,YAAY;;AAExB;AACA,YAAI,OAAM,IAAI;AACV,iBAAO,KAAK,QAAQ,OAAO,OAAO,CAAE,QAAQ;;AAG5C,iBAAO,KAAK,QAAQ,MAAM,OAAO,KAAK,MAAM;;AAEhD,aAAK,YAAY,KAAK;AACtB,eAAO;;;AC/Gf;;;;;;;;;;;;;;;;;oCA2BuC;MACnC,4BAA4B;AACxB;AACA,aAAK,OAAO;AACZ,aAAK,UAAU;AACf,gBAAa,gBAAgB,cACxB,QAAM,IAAI,gBACN,gBAAgB,QAAQ,gBAAgB,OACzC,QAAQ,MAAM;AAEtB,aAAK,SAAS,QAAQ,UAAU;AAEhC,aAAK,YAAY,QAAQ,aAAa,OAAO;;MAEjD;AACI,eAAO,cAAc,KAAK;;YAExB;AACF,YAAI,KAAK,UAAY,MAAK,gBAAgB,aACtC,KAAK,KAAK,aACV,KAAK,KAAK;AACV,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,sBAAc,IAAI,QAAQ;AACtB,sBAAY,KAAK,SAAS,KAAK;AAC/B,cAAI,KAAK,gBAAgB;AAGrB,oBAAQ,IAAI,WAAW,KAAK,KAAK,MAAM,KAAK,QAAQ;;AAMpD,+BAAmB,IAAI;AACvB,uBAAW,SAAS;AAChB,0BAAW,WAAW;AAItB,kBAAI,iBAAgB;AAChB,wBAAO,IAAI,WAAW;;AAE1B,kBAAI,CAAE,kBAAgB;AAClB,uBAAO,OAAO,IAAI,UAAU;;AAEhC,sBAAQ;;AAEZ,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM;;AAE5B,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM,MAAM;;AAIlC,2BAAc,KAAK,KAAK,MAAM,KAAK,QAAQ;AAG3C,uBAAW,kBAAkB;;AAEjC,eAAK,SAAS;;AAElB,eAAO,CAAE,OAAQ,MAAM,OAAQ,MAAM;;;AC1F7C;;;;;;;;;;;;;;;;;AA0BO,mDAA+C;AAClD;AACA;AACA,UAAK,OAAO,QAAS;AACjB,oBAAY;;AAGZ,oBAAY,IAAI;AAChB,sBAAc,0BAA0B;;AAE5C,uBAAiB,MAAM,QAAW,WAAW;AAC7C,UAAI,SAAS;AACT,2BAAmB,IAAI,WAAW,MAAM,SAAS;AACjD,eAAO,IAAI,kBAAkB,YAAY;;AAGzC,cAAM,IAAI,MAAM,SAAS;;;AAIjC,sCAAkC;AAC9B,oBAAa;QACT,QAAQ,QAAQ;QAChB,SAAS,QAAQ;QACjB,MAAM,QAAQ;QACd,MAAM,QAAQ;QACd,aAAa,QAAQ;QACrB,OAAO,QAAQ;QACf,UAAU,QAAQ;QAClB,UAAU,QAAQ;QAClB,WAAW,QAAQ;;AAEvB,aAAO;;AC1DX;;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,OAAO,WAAW,YAAa,OAAO,OAAO,GAAG,OAAO;;ACrBnE;;;;;;;;;;;;;;;;;iCAyBoC;MAShC,8BAA6B;AACzB;AACA,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,YAAY,KAAK,UAAU,OAAM,IAAI;AAErC,qBAAW;AACX,eAAK,QAAQ,GAAG,aAAa,KAAK,MAAM,OAAO;;AAInD,eAAO,IAAI,kBAAkB,KAAK,OAAO,KAAK;;;AC/CtD;;;;;;;;;;;;;;;;;gCAwBmC;MAQ/B,+BAA+B;AAC3B;AACA,aAAK,MAAM;AACX,aAAK,cAAc;;YAMjB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAQ,IAAI,eAAe,KAAK,KAAK,KAAK,aACrC;;AAGL,iBAAO,iBAAiB,KAAK,KAAK,KAAK;;;;AC/CnD;;;;;;;;;;;;;;;;;AAqGO,qCAAiC;AACpC,aAAO,IAAI,WAAW,IAAI,cAAc,SAAS;;AA0B9C;AACH,mBAAa,qBAAqB;AAClC,aAAO,sBAAsB,YAAY;;AA8DtC;AACH,aAAO,sBAAsB;AACzB,oBAAY,MAAM;AAClB,eAAO,qBAAqB,MAAM,IAAI;;;AAiCvC;AACH,aAAO,eAAe,OAAO,oBAAoB;;AAoC9C;AACH,aAAO,mBAAmB,OAAO;;AC1QrC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;ACvB1D;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAChC,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;iCAGkB;MAChC;AACI;AACA,aAAK,YAAY;AACjB,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,aAAY,MAAM;;MAEtC;AACI,YAAI,KAAK;AACL,eAAK,WAAW;AAChB,cAAI,OAAM,IAAI;AACV,kBAAkB;;;AAY1B,uBAAe;AACf,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;AACjD,eAAO;;MAQX;AACI;AACA,YAAI,UAAU,YAAY,UAAU,QAAQ,OAAO,SAAS,KACxD,UAAc,OAAO;AACrB,gCAAsB,OAAO,IAAI,OAAK,cAAkB;AACxD,kBAAQ,KAAK,MAAM,eAAe,OAAO;;AAGzC,kBAAQ,KAAK,MAAM,QAAQ,OAAO;;AAEtC,eAAO,CAAE,QAAQ,OAAO,OAAO;;MAGnC;AACI,2BAAmB,KAAK,KAAK,IAAI;AACjC,mBAAW;;MAGf;AACI,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;;;MAGnB;AACI,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;;MAErD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,SAAS;;MAEzB;AACI,eAAQ,OAAO,sBAAuB,KAAK,KAAK,IAAI;AACpD,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,iBAAO,wBAAoC,YAAY;;AAE3D,eAAO,KAAK,KAAK,IAAI,QAAQ;;MAEjC;AACI,sBAAa,KAAK,SAAS,EAAE;AAC7B,0BAAkB;AAClB,YAAI,EAAE,UAAU;AACZ;AAEI,0BAAc,MAAK,IAAI,OAAK,cAAkB;;AAG9C,kBAAM,IAAI,MAAM;;;AAGxB,eAAO,QAAU,EAAE,OAAO,EAAE,OAAO;;MAEvC;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,YAAI,KAAK,KAAK,IAAI;AACd,iBAAQ,sBAAuB,KAAK,KAAK,IAAI;AAC7C,cAAI,sBAAsB;AACtB,iBAAK,YAAY,mBAAmB,KAAK;AACzC,iBAAK,YAAY,mBAAmB,KAAK;;AAE7C,eAAK,KAAK,OAAO;;;MAGzB;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;AACX,cAAI,WAAW,WAAW;AACtB,iBAAK,YAAY;;;;YAIvB;AACF,sBAAc;AACd;AACA,yBAAiB,SAAa;AAC9B,eAAO,CAAE;;MAEb;AACI,eAAO;UAEH,YAAY;UACZ,SAAS,CAAC;;;MAIlB;AACI,yBAAiB,GAAG;AACpB,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAU,IAAI;;AAEzB,yBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,sBAAY,SAAO,WAAW;AAC9B,yBAAe,IAAI,MAAM,IAAI;AAC7B,uBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,mBAAO,KAAK,IAAI,KAAK,QAAQ,KAAK,MAAM;;AAE5C,mBAAO,IAAI,KAAK,IAAI,GAAG,SAAS,GAAG;;AAEvC,eAAO,SAAO;;MAElB;AACI,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAe,QAAU,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE;AAC7C,qBAAa,SAAO;AACpB,qBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,eAAK,IAAI,EAAE,OAAO,KAAK,MAAM;;AAEjC,eAAO,SAAO;;MAElB;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,OAAS,GAAG,OAAO,MAAM,QAAQ;;AAE9C,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,yBAAe,QAAU,EAAE,OAAO,EAAE;AACpC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,yBAAe,SAAO,WAAW;AACjC,wBAAc,OAAO;AACrB,eAAK,QAAQ,QAAM,MAAM,MAAM,EAAE,MAAM,MAAM,IAAI,MAAM;AACvD,mBAAO,IAAI,KAAK,IAAI,GAAG,QAAQ,GAAG;;AAEtC,eAAO,SAAO;;MAElB;AACI,yBAAiB,GAAG;AAEpB,eAAO,KAAO,QAAU,KAAK;;MAEjC;AACI,yBAAiB,SAAS;AAC1B,qBAAa,QAAQ,IAAI,OAAK,KAAK,SAAS,EAAE;AAC9C,uBAAe,QAAU,QAAQ,GAAG,OAAO,QAAQ,GAAG;AACtD,2BAAmB,OAAO;AAC1B,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,2BAAiB,KAAK;AACtB,uBAAa,GAAG,IAAI,WAAW,QAAQ;AACnC,uBAAW,MAAM,SAAS;;;AAGlC,eAAO,OAAO;;MAElB;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAExE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,IAAO;AACjB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAO,GAAG;;MAErB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,IAAI,QAAQ;;MAExF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAW,YAAU,KAAK,MAAM,KAAI;AACpC,4BAAoB;AACpB,eAAO,KAAK,oBAAoB,GAAG,GAAG,aAAa;;MAEvD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU;AACV,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,oBAAO,MAAM,SAAS;;AAE1B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,sBAAW;AACX,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,qBAAQ,MAAM,SAAS;;AAE3B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oBAAY;AAGZ,yBAAiB,EAAE,OAAO,WAAW;AACrC,qBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,uBAAa,WAAW,WAAW,IAAI;;AAE3C,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAkB,QAAU,GAAG;AAC/B,uBAAa,MAAS,WAAW,YAAY,OAAO;AACpD,uBAAY,KAAK,IAAI,GAAG,IAAI;AAC5B,cAAI,KAAK;;AAEb,eAAO,MAAS;;MAEpB;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,oDAAoD,EAAE,OAAO,kBACzD;;AAExB,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,EAAE,OAAO;AACjC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAiB,EAAE,MAAM,EAAE,OAAO;AAClC,8BAAsB,WAClB,UAAU,IAAI,WAAW,IAAI,IAC7B,UAAU,IAAI;AAClB,qBAAa,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,uBAAa,GAAG,IAAI,UAAU;AAC1B,wBAAY,cAAc,GAAG;AAC7B,gBAAI,MAAM;AACN,mBAAK,OAAO,YAAY,IAAI,MAAM;;AAGlC,8BAAgB,cAAc,GAAG,IAAI;AACrC,mBAAK,OAAO,YAAY,MAAM,WAAW,KAAK,WAC1C,MAAM,OAAO,KAAK;;;;AAIlC,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,WAAW,GAAG,IAAI;AACpC,uBAAe,KAAK,SAAS,UAAU;AACvC,wBAAgB,KAAK,SAAS,EAAE;AAChC,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,OAAS,EAAE,OAAO,YAAW,EAAE,OAAO,EAAE;AACvD,0BAAkB,KAAK,SAAS,OAAO;AACvC,qBAAY;AACZ,uBAAe,UAAU,SAAS,KAAK,UAAU,OAAO,KAAK,EAAE,SAAS,IACpE,IACA,eAAmB,EAAE,MAAM,MAAM;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAa,GAAG,IAAI,QAAQ;AACxB,gBAAI,OAAO,OAAO;AACd,wBAAU,YAAW,QAAQ;;AAG7B,wBAAU,YAAW,QAAQ;;;;AAIzC,eAAO;;MAEX;AACI,yBAAiB,CAAC,YAAY;AAC9B,yBAAiB,KAAK,SAAS,UAAU;AACzC,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,yBAAiB,GAAG;AACpB,sBAAc,KAAK,SAAS,EAAE;AAC9B,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;;;AAGd,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,sBAAY,OAAO;AACnB,cAAK,OAAO,KAAK,OAAO,KAAO,QAAQ,KAAK,QAAQ;AAChD,mBAAO;;AAGP,mBAAQ,OAAM,QAAQ;;;;MAIlC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,mBAAM,QAAO;;AAEjB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,uBAAa,MAAM;AACnB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,qBAAS,UAAU;;AAEvB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,uBAAa,OAAO;AACpB,iBAAO,OAAO;;;MAGtB;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,6BAAqB,IAAI,aAAa,EAAE;AACxC,uBAAe,KAAK,SAAS,EAAE;AAC/B,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO;AACjB,cAAI,KAAK;AACL,yBAAa,KAAK,SAAS;;AAG3B,yBAAa,KAAK,SAAS,KAAM,KAAI;;;AAG7C,eAAO,KAAK,WAAW,cAAc,EAAE,OAAO;;MAElD;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,MAAM,QAAQ;;MAE1F;AACI,yBAAiB,GAAG;AACpB,eAAO,OAAK,KAAK,WAAW,IAAI;;MAEpC;AACI,yBAAiB,CAAC,GAAG,UAAU;AAC/B,yBAAiB,EAAE,MAAM;AACzB,8BAAsB,KAAK,SAAS,QAAQ;AAC5C,iBAAS,QAAQ,cAAc;AAC/B,uBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AAC/B,yBAAe,OAAO,WAAW;AACjC,8BAAoB,OAAO;AAC3B,sBAAY,QAAQ,cAAc,OAAO;AACzC,gCAAsB,KAAK,WAAW;AACtC,iBAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,eAAO,OAAO;;MAElB;AACI,yBAAiB,CAAC,IAAI;AACtB,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAa,EAAE,QAAQ,WAAW,UACpC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,yBAAiB,GAAG;AACpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,QAAU,SAAS,UAAU,EAAE;AAC9C,2BAAmB,OAAO;AAC1B,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAChE,SAAS,SAAS,KAAK,SAAS,SAAS;AAC7C,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,iCAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,iCAAyB,SAAS,SAAS;AAC3C,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,oCAA0B,QAAQ;AAClC,mCAAyB,QAAQ,EAAE,QAAQ;AAC3C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,wCAA0B,oBAAoB,SAAS;AACvD,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,wCAAwB,oBAAoB,OAAO;AACnD,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,0CAAwB,kBAAkB,OAAO;AACjD,oCAAkB;AAClB,iCAAe;AACf,+BAAY;AACZ,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,yCAAqB,mBAAmB,SAAS,EAAE,QAAQ;AAC3D,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,yCAAmB,eAAe,OAAO,EAAE,QAAQ;AACnD,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,2CAAmB,aAAa,OAAO,EAAE,QAAQ;AACjD,sCAAc,QAAQ,aAAa;AACnC,4BAAK,aAAa,SAAS,QAAQ;AAC/B,wCAAc;mCAET,aAAa;AAClB,sCAAY;AACZ;;AAEJ,4BAAI,MAAM;AACN;;;AAGR,0BAAI,MAAM;AACN;;;AAGR,wBAAI,MAAM;AACN;;;AAGR,uCAAqB,kBAAkB;AACvC,6BAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;;AAMhE,eAAO,OAAO;;MAElB;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,8BAAsB,IAAK,eAAc,eAAe;AACxD,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW;;;;AAIvB,qBAAG,IAAI,UAAU,eAAe,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlF,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,6BAAqB,QAAU,SAAS,UAAU;AAClD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,qBAAa,KAAK,WAAW;AAC7B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,iCAAe,OAAO;AACtB,oCAAkB;AAClB,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,mCAAe,SAAS;AACxB,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,mCAAa,OAAO;AACpB,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,qCAAa,OAAO;AACpB,sCAAc,KAAK,IAAI,OAAO,QAAQ,MAAM,MAAM;AAClD,4BAAI,SAAS;AACT,qCAAW;AACX,wCAAc,SAAS,wBACnB,uBACA,OAAO,wBAAwB;;;;;AAKnD,+BAAa,IAAI,aAAa,OAAO,QAAQ,MAAM,MAAM;;;;;;AAM7E,eAAO,aAAa;;MAExB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,6BAAqB,KAAK,mBAAmB,GAAG;AAChD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,0BAAkB,KAAK,WAAW;AAClC,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,uCAAe,uBACX,wBAAwB,uBACxB,IACA,UAAU,IAAI,OAAO,SAAS,OAAO,OAAO;AAChD,uCAAe,SAAS,wBAAwB,uBAC5C,OAAO,uBAAuB;AAClC,qCAAa,WAAW,SAAS,IAAI;AACrC,4BAAI,SAAS;AACT;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW,QAAQ;;;;AAI/B,qBAAG,IAAI,SAAS,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlE,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,eAAmB,CAAC,OAAO,WAAW,UAAU;AAChF,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,wBAAgB;AAChB,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,qBAAa,GAAG,IAAI,OAAO;AACvB,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,mCAAuB,KAAK,MAAM;AAClC,4BAAgB,gBAAgB;AAChC,kCAAsB,KAAK,IAAI,YAAY,GAAG,KAAK,KAAK;AACxD,iCAAqB,IAAI,EAAE,QAAQ,KAAK,iBAAiB,EAAE,QAAQ;AACnE,iCAAqB,IAAI,EAAE,QAAQ,KAAK,gBAAgB,EAAE,QAAQ;AAClE,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,qCAAuB,KAAK,MAAM;AAClC,8BAAgB,gBAAgB;AAChC,oCAAsB,KAAK,IAAI,WAAW,GAAG,KAAK,KAAK;AACvD,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,2BAAa,GAAG,IAAI,aAAa;AAG7B,gCAAgB,QAAQ,gBAAgB;AACxC,mCAAmB,QAAQ,gBAAgB;AAC3C,iCAAiB,QAAQ,iBAAiB;AAC1C,oCAAoB,QAAQ,iBAAiB;AAC7C,4BAAY,UAAW,YAAW,WAAW;AAC7C,+BAAe,aAAc,eAAc,cAAc;AACzD,iCAAiB,MAAO,UAAS,OAAO;AACxC,uBAAO,eAAe;;;;;AAKtC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU;;MAE1D;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAK3D,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AAItD,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa;AACb,qBAAa,GAAG,IAAI,OAAO;AACvB,0BAAgB,IAAI,EAAE,QAAQ;AAC9B,uBAAa,GAAG,IAAI,SAAS;AACzB,wBAAY,IAAI;AAChB,gCAAoB,KAAK,MAAM;AAC/B,mCAAuB,KAAK,IAAI,KAAK,KAAK,MAAM,UAAU;AAC1D,iCAAqB,UAAU,cAAc,EAAE,QAAQ;AACvD,oCAAwB,UAAU,iBAAiB,EAAE,QAAQ;AAC7D,4BAAgB,MAAM;AACtB,mCAAuB,IAAM;AAC7B,yBAAa,GAAG,IAAI,QAAQ;AACxB,0BAAY,IAAI;AAChB,mCAAqB,KAAK,MAAM;AAChC,oCAAsB,KAAK,IAAI,KAAK,KAAK,MAAM,SAAS;AACxD,8BAAgB,MAAM;AACtB,qCAAuB,IAAM;AAC7B,sCAAwB,eAAe,eAAe,EAAE,QAAQ;AAChE,uCAAyB,eAAe,gBAAgB,EAAE,QAAQ;AAClE,yCAA2B,kBAAkB,eAAe,EAAE,QAAQ;AACtE,0CAA4B,kBAAkB,gBAAgB,EAAE,QAAQ;AACxE,wDAA0C,iBAAiB;AAC3D,iDAAmC,iBAAiB;AACpD,iDAAmC,UAAU;AAC7C,0CAA4B,UAAU;AACtC,2BAAa,GAAG,IAAI,OAAO;AACvB,8BAAc,SAAS;AACvB,uBAAO,kBAAkB,MACrB,QAAQ;AACZ,uBAAO,mBAAmB,MAAM,QAAQ;AACxC,uBAAO,qBAAqB,MACxB,QAAQ;AACZ,uBAAO,sBAAsB,MAAM,QAAQ;;;;;AAK3D,eAAO,SAAY,QAAQ,CAAC,OAAO,QAAQ,SAAS,QAAQ,EAAE;;MAElE;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,QAAQ,YAAY,WAAW;AAC/D,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,2BAAmB;AACnB,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,qCAAyB,KAAK,IAAI,YAAY,GAAG,eAAe,KAAK,MAAM,iBACvE,KAAK,MAAM;AACf,8BAAkB,cAAc,mBAAmB,EAAE,QAAQ;AAC7D,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,uCAAyB,KAAK,IAAI,WAAW,GAAG,eAAe,KAAK,MAAM,iBACtE,KAAK,MAAM;AACf,gCAAkB,YAAY,mBAAmB,EAAE,QAAQ;AAC3D,2BAAa,GAAG,IAAI,aAAa;AAG7B,+BAAe,QAAQ,YAAY;AACnC,uBAAO,kBAAkB;;;;;AAKzC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU,cAAc,EAAE;;MAE1E;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAC3D,yBAAiB,KAAK,SAAS,GAAG;AAGlC,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAElD,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,SAAS;AACzB,8BAAkB,cAAc,IAAI,EAAE,QAAQ;AAE9C,+BAAmB,KAAK,MAAM,IAAI;AAClC,6BAAiB,KAAK,MAAM,aAAc,YAAY;AACtD,yBAAa,GAAG,IAAI,QAAQ;AACxB,gCAAkB,YAAY,IAAI,EAAE,QAAQ;AAE5C,iCAAmB,KAAK,MAAM,IAAI;AAClC,+BAAiB,KAAK,MAAM,aAAc,WAAW;AACrD,2BAAa,GAAG,IAAI,OAAO;AACvB,4BAAY;AAEZ,oCAAoB,GAAG,WAAW,WAAW;AACzC,8BAAY,WAAW;AAEvB,sBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,oCAAkB,cAAc,MAAM,GAAG,QAAQ;AACjD,wCAAsB,MAAM;AAC5B,2CAAyB,KAAK,IAAI,UAAU,GAAG,eAAe,KAAK,MAAM,iBACrE,KAAK,MAAM;AACf,sBAAI,MAAM;AACN;;AAEJ,sCAAoB,GAAG,WAAW,UAAU;AACxC,gCAAY,WAAW;AAEvB,wBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,sCAAkB,YAAY,MAAM,GAAG,QAAQ;AAC/C,0CAAsB,MAAM;AAC5B,6CAAyB,KAAK,IAAI,SAAS,GAAG,eAAe,KAAK,MAAM,iBACpE,KAAK,MAAM;AACf,wBAAI,MAAM;AACN,+BAAS,SAAS,YAAY;;;;AAI1C,uBAAO,YAAY,KAAK;;;;;AAKxC,eAAO,SAAY,QAAQ,EAAE,OAAO,EAAE;;MAE1C;AACI,yBAAiB,GAAG;AACpB,yBAAiB,EAAE,MAAM;AACzB,qBAAa,WAAW;AACxB,wBAAgB,KAAK,SAAS,EAAE;AAChC,qBAAa,EAAE;AACf,uBAAe,IAAI,aAAa;AAChC;AACI,iCAAuB,SAAS;AAChC,+BAAqB,SAAS,iBAAiB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,+BAAqB,SAAS,iBAC1B,KAAK,IAAI,iBAAiB,aAAa;AAC3C,qBAAU;AACV,iBAAO,kBAAkB,cAAc;AACnC,sBAAU,QAAQ;AAClB,oBAAO,IAAI;;AAEf,iBAAO;;AAEX,0BAAkB,GAAG,SAAS,MAAM;AAChC,uBAAY,kBAAkB;AAC9B,sBAAY,QAAQ,UAAU,KAAK,IAAI,OAAO,QAAQ,MAAK,CAAC;AAC5D,iBAAO,UAAU;;AAErB,eAAO,SAAY,QAAQ,EAAE;;MAEjC;AACI,yBAAiB,IAAI;AACrB,yBAAiB,GAAG,MAAM;AAC1B,yBAAiB,KAAK,SAAS,GAAG;AAClC,iCAAyB,KAAK,SAAS,WAAW;AAClD,kCAA0B,KAAK,SAAS,YAAY;AACpD,uBAAe,IAAI,aAAa,GAAG;AACnC,qBAAa,GAAG;AAChB,0BAAkB,GAAG,SAAS,MAAM;AAChC,iCAAuB,SAAS;AAChC,6BAAoB,SAAS,iBAAkB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,2BAAkB,SAAS,iBACvB,KAAK,IAAI,UAAU,iBAAiB,cAAc;AACtD,sBAAW;AACX,uBAAa,YAAY,IAAI,UAAU;AACnC,qBAAQ,KAAK,IAAI,iBAAiB,IAAI;;AAE1C,kBAAO,QAAQ,QAAO;AACtB,uBAAa,YAAY,IAAI,UAAU;AACnC,sBAAU,KAAK,QAAQ,OAAO,iBAAiB,KAC3C,kBAAkB,UAAU;AAChC,gBAAI,WAAW;AACX,qBAAO,KAAK,IAAI,OAAM,CAAC;;AAE3B,mBAAO,SAAS;AAChB,mBAAO,MAAM;;;AAGrB,eAAO,SAAY,QAAQ,GAAG;;MAElC;AACI,yBAAiB,QAAQ;AACzB,8BAAsB,aAAa,SAAS,SAAW;AACvD,0BAAkB,cAAc,MAAM;AACtC,0BAAkB,cAAc,MAAM;AACtC,oBAAY,OAAS,CAAC,WAAW,aAAa;AAC9C,wBAAgB,KAAK,SAAS,IAAI;AAClC,yBAAiB,KAAK,SAAS,cAAc;AAC7C,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,yBAAe,IAAI;AAGnB,sBAAY,IAAI,aAAa,YAAY;AACzC,cAAI,KAAK,SAAS;AAClB,2BAAiB,GAAG,QAAQ,IAAI,QAAQ,EAAE;AACtC,gBAAI,SAAS,IAAI,QAAQ,KAAK,SAAS,SAAS;;AAEpD,yBAAe,aAAgB,KAAK;AACpC,4BAAkB,IAAI;AACtB,8BAAoB,GAAG,WAAW,YAAY,EAAE;AAC5C,sBAAU;AAEV,oBAAQ,YAAY,YAAY,IAAI;AACpC,6BAAiB,GAAG,QAAQ,IAAI,QAAQ;AACpC,kBAAI,IAAI,IAAI;AACR,wBAAQ,YAAY,YAAY;AAChC;;;;;AAKhB,eAAO;;MAEX;AACI,yBAAiB,SAAS;AAC1B,oBAAY,IAAI,aAAa,QAAQ,OAAO;AAC5C,YAAI,KAAK;AACT,2BAAmB,KAAK,SAAS,QAAQ;AACzC,yBAAiB,GAAG,QAAQ,QAAQ,MAAM,EAAE;AACxC,cAAI,WAAW,UAAU,KAAK,WAAW,SAAS;AAC9C,gBAAI,QAAQ,QAAQ,WAAW,UAAU;;;AAGjD,eAAO,SAAY,KAAK,CAAC,QAAQ,MAAM,QAAQ;;MAEnD;AACI,yBAAiB,OAAO;AACxB,0BAAkB,KAAK,SAAS,MAAM;AACtC,2BAAmB,KAAK,SAAS,OAAO;AACxC,eAAO,0BAAwB,WAAW,YAAY,eAAe,cAAc;;MAEvF;AACI,gBAAY,eAAe,QAAQ,MAAM,+DAA+D;AACxG,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAoB,EAAE,MAAM;AAC5B,2BAAmB,EAAE,MAAM;AAC3B,2BAAmB,EAAE,MAAM;AAC3B,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,YAAY,eAAe,cAAc;AACzE,wBAAgB;AAChB,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,uBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,wBAAY,KAAK,MAAM,IAAI;AAC3B,4BAAiB,IAAI;AACrB,yBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,0BAAY,KAAK,MAAM,IAAI;AAC3B,8BAAiB,IAAI;AACrB,8BAAiB,WAAU,YAAY,WAAW;AAClD,2BAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAY,IAAI;AAChB,iCAAiB,MAAM,aAAc,OAAM,aAAc,OAAM,cAAc;AAC7E,uBAAO,eAAe,QAAQ;;;;;AAK9C,eAAO,SAAY,QAAQ,CAAC,WAAW,cAAc,aAAa;;MAEtE;AACI,yBAAiB,4BAAwC,EAAE,OAAO,EAAE;AACpE,uBAAe,QAAU,UAAU;AACnC,sBAAc,KAAK,SAAS,EAAE;AAC9B,sBAAc,KAAK,SAAS,EAAE;AAC9B,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,wBAAgB,OAAO;AACvB,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,oBAAQ,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI7D,uBAAa,KAAK,WAAW;AAC7B,uBAAa,KAAK,WAAW;AAC7B,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAY,OAAO,WAAW;AAC9B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,oBAAQ,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG7C,eAAO,OAAO;;MAElB;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;;MACA;AACI,eAAO;;MAGX;AACI,eAAO,MAAM;;MAEjB;AACI,8DAAsD,OAAO;AAC7D,yBAAiB,MAAM,MAAM;AAC7B,wCAAgC;AAChC,uBAAe,QAAU,CAAC,UAAU,YAAY,WAAW,cAAc;AACzE,wBAAgB,KAAK,SAAS,MAAM;AACpC,2BAAmB,KAAK,SAAS,SAAS;AAC1C,0BAAkB,KAAK,SAAS,OAAO;AACvC,yBAAiB,OAAO;AACxB,0BAAkB,OAAO;AAIzB,qBAAa,GAAG,IAAI,UAAU;AAC1B,2BAAiB,IAAI;AACrB,qBAAW,QAAQ;AACnB,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,uBAAa,WAAW;AACxB,cAAI,QAAQ;AACR;;AAEJ,8BAAqB,aAAa,IAC7B,MAAK,MAAO,eAAc,KAAM,cAAa,KAC9C;AACJ,6BAAoB,YAAY,IAAM,MAAK,MAAO,cAAa,KAAM,aAAY,KAAK;AACtF,uBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc,aAAa,IACvB,KAAM,eAAc,KAAK,IAAK,cAC9B,MAAO,MAAK,MAAO,eAAc;AACrC,gBAAI,OAAO,KAAK,OAAO,cAAc;AACjC,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAa,GAAG,IAAI,aAAa;AAC7B,8BAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,yBAAO,OAAO,OAAO;;;AAG7B;;AAEJ,gBAAI,WAAW;AACX,6BAAe,KAAK,MAAM;AAC1B,gCAAkB,KAAK,KAAK;AAC5B,4BAAc,OAAO;AACrB,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,gCAAgB,KAAK,MAAM;AAC3B,iCAAiB,KAAK,KAAK;AAC3B,8BAAc,OAAO;AACrB,6BAAa,GAAG,IAAI,aAAa;AAC7B,4BAAU,IAAI,UAAU,SAAS,KAAK,SAAS,SAAS,KACpD,OAAO,SAAS;AACpB,kCAAgB,UAAU;AAC1B,wBAAM,IAAI,WAAW,SAAS,KAAK,SAAS,SAAS,KACjD,OAAO,SAAS;AACpB,mCAAiB,UAAU;AAC3B,wBAAM,IAAI,UAAU,SAAS,KAAK,YAAY,SAAS,KACnD,OAAO,SAAS;AACpB,qCAAmB,UAAU;AAC7B,wBAAM,IAAI,WAAW,SAAS,KAAK,YAAY,SAAS,KACpD,OAAO,SAAS;AACpB,sCAAoB,UAAU;AAC9B,8BAAY,UAAW,YAAW,WAAW;AAC7C,iCAAe,aAAc,eAAc,cAAc;AACzD,wBAAM,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AAC9D,yBAAO,OAAO,OAAO,MAAQ,UAAS,OAAO;;;;AAKrD,2BAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,iCAAiB,KAAK,MAAM;AAC5B,iCAAiB,KAAK,MAAM;AAC5B,6BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAc,IAAI,WAAW,SAAS,KAClC,WAAW,SAAS,KAAK,OAAO,SAAS;AAC7C,iCAAe,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACvE,yBAAO,OAAO,UAAU,UAAU;;;;;;AAMtD,eAAO,OAAO;;MAElB;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,cAAc,eAAe;AAC5H,+BAAuB;AACvB,eAAO,KAAK,QAAQ,eAAe,cAAc,aAAa,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEvI;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,YAAI,cAAc;AACd,iBAAO,QAAU,IAAI,aAAa,EAAE;;AAExC,wBAAe,IAAI,cAAa,CAAC,WAAW,YAAY,EAAE;AAC1D,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,WAAW;AAC3B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,4BAAgB,MAAM,QAAQ;AAC9B,mBAAM,KAAK;;AAEf,cAAI,eAAe,KAAK,gBAAgB,EAAE,OAAO;AAC7C,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B,EAAE;;AAEvE,uBAAa,GAAG,IAAI,WAAW;AAC3B,oBAAO,OAAO,IAAI,YAAY,KAAK,MAAM,eAAe,YAAY;;;AAG5E,eAAO,QAAO,WAAW,QAAQ;;MAErC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,QAAU;AAC/B,+BAAuB;AACvB,eAAO,KAAK,QAAQ,SAAS,SAAS,OAAO,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEtH;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,MAAQ,EAAE,OAAO,GAAG,EAAE;;;MAGrC;AACI,uBAAe,mBAAuB,EAAE,OAAO,eAAmB,EAAE;AACpE,eAAO,KAAK,WAAW,QAAQ,EAAE,OAAO,EAAE;;MAE9C;AACI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,6BAAqB,CAAC,aAAa,WAAW;AAC9C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,YAAI,eAAe;AACf,iBAAO,QAAU,IAAI,OAAO,QAAQ;;AAExC,wBAAe,IAAI,cAAa,cAAc,QAAQ;AACtD,gBAAO,OAAO,KAAK,KAAK,SAAS,aAAa,QAAQ;AACtD,qBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,mBAAM,KAAK;AACX,4BAAgB,MAAM,QAAQ;;AAElC,cAAI,eAAe,KAAK,gBAAgB,aAAa;AACjD,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B;;AAErE,uBAAa,GAAG,IAAI,WAAW;AAC3B,gBAAI;AACA,sBAAO,OAAO,eAAe,YAAY,MACrC,YAAY,IAAI,YAAY;;AAGhC,sBAAO,OAAO,eAAe,YAAY,KAAK,QAAQ,SAAS,IAC3D,YAAY,KACZ,YAAY,IAAI,YAAY;;;;AAI5C,eAAO,QAAO,WAAW,QAAQ;;;ACp9CzC;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,IAAI,aAAa,KAAK;AAC3C,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,qBAAa,KAAK,KAAK,IAAI,KAAK;;AAEpC,aAAO;;AAEJ,kBAAY;AACf,aAAQ,KAAM,KAAK;AACnB,yBAAmB,KAAK;AACxB,yBAAmB,IAAI,aAAa,eAAmB,EAAE;AACzD,UAAI,EAAE,UAAU;AACZ,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,cAAc;;AAG7B,4BAAoB,WAAW,KAAK,IAAI,EAAE;AAC1C,sBAAa,YAAY,mBAAmB;AAC5C,sBAAa,YAAY,mBAAmB;AAC5C,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,wBAAa,SAAS;AACtB,wBAAa,SAAS;AACtB,uBAAa,KAAK,KAAK,MAAM,OAAM;;;AAG3C,aAAO,WAAW,WAAW,cAAc,EAAE,OAAO;;AAEjD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,yBAAiB,4BAAwC,QAAQ;AACjE,2BAAmB,SAAS;AAC5B,8BAAsB,gBAAoB;AAC1C,2BAAmB,eAAmB;AACtC,uBAAe,wBAA4B,OAAO;AAClD,sBAAc,OAAO;AACrB,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,yBAAiB,gBAAoB;AACrC,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mBAAO,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI5D,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,mBAAO,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG5C,eAAO,CAAC,QAAQ;;;AClDxB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,KAAK,IAAI,YAAY;AAI7C,eAAQ,qBAAqB;QACzB,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;QACpD,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;;AAExD,aAAO;;AAEJ,0BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAsBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe,SAAA;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAA,UAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,uBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,6BAAqB,WAAW,KAAK;AACrC,eAAO,SAAQ,eAAe,EAAE,OAAO,SAAS;;AAEpD,UAAI,UAAU;AAIV,sBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAa,cAAkB,CAAC,IAAI,EAAE;AACtC,0CAAkC,6BAA6B,UAAW,MAAM,IAAK,IAAI,GAAG,EAAE,OAAO,IAAI,OAAO,MAAM;AACtH,eAAO,SAAQ,eAAe,aAAa,QAAQ;;AAEvD,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AA+BO;AACH,UAAI,eAAe;AACf,eAAO,EAAG,QAAQ;AACd,iBAAQ,GAAG,KAAM;AACjB,6BAAmB;AACnB,2BAAiB,CAAC,GAAG,IAAI;AACzB,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;AAG9D,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,2BAAmB;AACnB,YAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,gEAAsD,YAAY,EAAE,OAAO,EAAE,OAAO,WAAW,WAAW,WAAW;AACrH,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,yBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,YAAY,MAAM,aAAc,SAAS;AAClF,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO;;AAGP,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;;AAQ3D;AACH,aAAO;AACH,4BAAoB,4BAAwC,QAAQ;AACpE,2BAAmB,eAAmB;AACtC,2BAAmB,YAAY;AAC/B,8BAAsB,gBAAoB;AAC1C,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,yBAAa,IAAI,MAAM;AACvB,yBAAa,IAAI,MAAM;AACvB,2BAAe,IAAG,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI,IAAI,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI;AAC1F,2BAAe,KAAK,OAAO;AAC3B,2BAAe,KAAK,OAAO;;;AAI/B,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,6BAAiB,IAAG,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI,IAAI,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI;AACpG,2BAAe,KAAK,SAAS;AAC7B,2BAAe,KAAK,SAAS;;;AAGrC,eAAO,CAAC,gBAAgB,gBAAgB;;;AC1HhD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,UAAU,IAAI;AAC5D,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,0BAAkB,wBAA4B,OAAO,OAAO;AAC5D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;AA0BO;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,sBAAc,eAAmB,EAAE;AACnC,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,mBAAuB,QAAQ;AACjD,qBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AAYnD;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,UAAU,QAAQ,QAAQ;AAC5C,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AChE1D;;;;;;;;;;;;;;;;AAmBO,qBAAiB,sBAAsB,QAAQ,KAAK,KAAK;AACzD,mBAAa,wBAAwB,MAAM;AAC3C,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,wBAA4B,OAAO,eAAmB;AACnE,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,uBAAe,IAAI;AACnB,mBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,wBAAc,MAAM,SAAS;AAC7B,cAAI,QAAQ;AACR,mBAAM;;;AAGd,aAAK,KAAK;;AAEd,aAAO;;AC9BX;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,oBAAoB,SAAS;AAChF,gCAA4B,8BAA+B;AAC9D,aAAO;QACH,MAAM,QAAQ,QAAQ,QAAQ;QAC9B,MAAM,QAAQ,QAAQ,QAAQ;;;AAG/B,uBAAiB,iBAAiB,WAAU,cAAc;AAC1D,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,UAAW,MAAM,IAAK,IAAI;AAC7E,uBAAiB,iBAAiB,WAAU,cAAc,MAAsB;AAChF,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,IAAI,KAAK,KAAK;AAC9D,oBAAc,wBAAwB,QAAO;AAC7C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBO;AACH,0BAAoB,kBAA4B,OAAO,OAAO;AAC9D,qBAAe,eAAmB;AAClC,uBAAiB,gBAAoB;AACrC,UAAI;AACA,2BAAmB,mBAA6B,OAAO;AACvD,eAAO,KAAK,SAAS,YAAY,aAAa;;AAElD,sBAAgB,wBAA4B,OAAO;AACnD,mBAAa,GAAG,IAAI,QAAQ,EAAE;AAC1B,qBAAa,KAAK;AAClB,wBAAgB,gBAAoB;AACpC,oBAAY,YAAgB,GAAG,MAAM;AACrC,qBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,uBAAe,YAAgB,MAAM,MAAM,QAAQ;AACnD,gBAAQ,KAAK,KAAK;;AAEtB,aAAO;;AAEJ;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,OAAO,QAAS;AACxB,uBAAiB,GAAG;AACpB,8BAAwB,kBAA4B,GAAG,OAAO;AAC9D,yBAA6B,GAAG,QAAQ;AACxC,mBAAa,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACxC,sBAAgB,UAAU,MAAM,QAAQ,OAAO,EAAE,OAAO,EAAE;AAC1D,aAAO,SAAQ,eAAe,OAAO,EAAE,OAAO;;AAE3C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAmBO,kCAA8B,6BAA8B;AAC/D,mBAAa,IAAI;AACjB,aAAO,OAAO;;AAEX,gCAA0B,iBAAiB,oBAAmB;AAC9D,qCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,oBAAoB,SAAS;AAC3E,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,OAAO;AACrB,oBAAc,eAAmB;AACjC,uBAAiB,gBAAoB;AACrC,yBAAmB,gBAAoB;AACvC,qBAAe,wBAA4B,OAAO,eAAmB;AACrE,mBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAY,YAAgB,GAAG,OAAO;AAEtC,uBAAe,IAAI,MAAM,IAAI;AAC7B,sBAAa,GAAG,KAAI,OAAO,QAAQ;AAC/B,iBAAO,MAAK,IAAI,KAAK;;AAEzB,yBAAiB,YAAgB,QAAQ,OAAO;AAChD,eAAO,YAAY,MAAM;;AAE7B,aAAO;;ACjCX;;;;;;;;;;;;;;;;AAiBO;AAEH,oBAAc,gBAAoB,MAAM,OAAO;AAwD/C,uBAAiB,CAAC,GAAG,MAAM,IAAI;AAC/B,mBAAa,GAAG,IAAI,OAAO;AACvB,iBAAS,MAAM,MAAM;;AAEzB,eAAS,KAAK,MAAM;AACpB,mBAAa,QAAQ,GAAG,IAAI,MAAM,QAAQ;AACtC,iBAAS,MAAM,MAAM;;AAIzB,6BAAuB;AAGvB,sBAAgB,IAAI,WAAW,MAAM;AAErC,0BAAoB,IAAI,cAAa,UAAU,OAAO;AAGtD,4BAAsB;AACtB,yBAAmB,SAAS,OAAO,KAAK,SAAS,OAAO;AACxD,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAE9B;AACA,YAAI;AAEA,oBAAU,OAAO,GAAG;;AAGpB,6BAAmB;AACnB,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAW,KAAK,YAAY,IAAI,GAAG,GAAG;;;AAG9C,oBAAU,WAAW,KAAK;;AAG9B,YAAI,eAAe,aAAa;AAC5B,kBAAQ,KAAK,eAAe;;AAG5B,8BAAoB,OAAO,KAAK,gBAAgB;AAChD,yBAAe,WAAW;AAC1B,kBAAQ,KAAK;AACb,wBAAc,KAAK;;;AAM3B,6BAAuB,SAAS;AAChC,qBAAe,KAAK,OAAO,KAAK,gBAAgB;AAChD,2BAAqB,IAAI,cAAa,gBAAgB;AACtD,oBAAc,QAAQ;AAClB,qBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,IAAI,YAAY,IAAI,GAAG,oBAAoB,IAAI,GAAG,GAAG;;;;AAM9E,0BAAoB,MAAM;AAC1B,kBAAY,SAAS,eAAe;AACpC,aAAO;QACH,cAAc,aAAa;QAC3B;QACA;;;AC9IR;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA2BA,qBAAgB,OAAO,MAAM,IAAI,kBAAkB;AC3BnD;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,MAAM,IAAI,KAAM,KAAK,IAAI,MAAM;AACxE,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBA,sBAAkB,6BAA6B,oBAAoB,SAAS,IAAI,SAAS,SAAS;AAC3F;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,SAAU;AACrB,uBAAiB,CAAC,GAAG,QAAQ;AAC7B,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAM,QAAQ;AAC7C,wCAAkC,UAAU,EAAE,OAAO,MAAM,OAAO,OAAO,OAAO,EAAE;AAClF,aAAO,SAAQ,eAAe,aAAa,EAAE,OAAO;;AAEjD,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,IAAI,GAAG;AACvD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,QAAO,QAAQ,KAAK,IAAI,KAAK,IAAI,GAAG,KAAK;AACvE,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAqBO;AACH,UAAI,gBAAe;AACf,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAE5B,gBAAe;AACpB,eAAO,OAAK,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAExB,gBAAe;AACpB,eAAO,MAAI,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAEvB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAEzB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,GAAG,OAAO,yBAA0B,SAAA;;AAEjE,YAAM,IAAI,MAAM,cAAc;;ACrClC;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,eAAQ,OAAO,EAAE;AACjB,oBAAc,SAAQ,KAAK,IAAI,EAAE;AACjC,UAAI,MAAM,sBAAsB;AAC5B,sBAAa,MAAM,mBAAmB;AACtC,sBAAa,MAAM,mBAAmB;AACtC,cAAK,QAAQ;AACb,cAAK,QAAQ;;AAEjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,KAAM;AACjB,aAAQ,YAAY,cAAe;AACnC,uBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,kCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,cAAY,SAAS,KAAK,SAAS,KAAK,qBAAqB,MAAM,uJAEvC,oBAAoB;AAChD,gCAA0B,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AAC1F,uBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,cAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AACvB,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAC7B,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAE7B,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AACjE,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AACjE,wBAAkB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACxD,sBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,uBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,uBAAiB,KAAK,IAAI,WAAW;AACrC,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,yBAAmB,gBAAoB,IAAI;AAC3C,yBAAmB,gBAAoB,IAAI;AAC3C,+CAAyC,aACrC,CAAC,WAAW,IAAI,GAAG,WAAW,MAC9B,CAAC,WAAW,IAAI,WAAW,IAAI;AACnC,+CAAyC,aACrC,CAAC,GAAG,WAAW,IAAI,WAAW,MAC9B,CAAC,WAAW,IAAI,GAAG,WAAW;AAClC,mBAAa,UAAU;AACvB,qBAAe,QAAO,CAAC,UAAU,SAAS,WAAW,IAAI;AACzD,sBAAgB,OAAO;AACvB,wBAAkB,SAAQ;AAC1B,oBAAc,GAAG,KAAK,UAAU;AAC5B,sBAAc,GAAG,KAAK,SAAS,MAAM;AACjC,wBAAc,GAAG,KAAK,UAAU,MAAM;AAClC,0BAAc,GAAG,KAAK,WAAW,MAAM;AAEnC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,2BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAU;AACV,+BAAa,IAAI,IAAI,QAAQ;AACzB,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,iCAAa,UAAU,eAAe,IAAI,aAAa,IAAI;AAC3D,iCAAa,UAAU,IAAI,aAAa,IAAI,aAAa;AACzD,4BAAO,OAAO;;AAElB,0BAAQ,KAAK,OAAQ,KAAI,WAAW,OAAO;;;;;;;AAOnE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AAEtC,aAAO,SAAQ,eAAe,UAAU,OAAO,OAAO,OAAO;;AAE1D,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtGhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,GAAG,MAAM,0BAA2B;AAC/C,aAAQ,YAAY,YAAY,2BAAe;AAC/C;AACA;AACA;AACA,4BAAsB;AACtB,wBAAkB,aAAY,CAAE,QAAQ,CAAE,GAAG,IAAK,OAAO,CAAE,YAAY,aAAc,SAAA;AACrF,gBAAU;AACV,UAAI;AACA,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,SAAS,GAAG,OAAQ,SAAA;AAChD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,UAAI;AACA,wBACI,kBAAgB,UAAS,SAAS,aAAY;AAClD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,sBAAgB;AACZ,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA2B;MAC9B,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,2BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,qBAAe,QAAO,SAAS,UAAU;AACzC,yBAAmB,OAAO;AAC1B,iCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,+BAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,+BAAyB,SAAS,SAAS;AAC3C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,kCAA0B,IAAI;AAC9B,iCAAyB,IAAI,QAAQ;AACrC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,0BAAc,KAAK,IAAI,GAAG;AAC1B,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,oCAAwB,oBAAoB,KAAK;AACjD,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,4BAAc,KAAK,IAAI,GAAG;AAC1B,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,gCAAkB;AAClB,6BAAe;AACf,2BAAY;AACZ,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,iCAAiB,mBAAmB,KAAK,QAAQ;AACjD,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,mCAAiB,WAAW,KAAK,QAAQ;AACzC,gCAAc,QAAQ,WAAW;AACjC,sBAAK,aAAa,SAAS,QAAQ;AAC/B,kCAAc;6BAET,aAAa;AAClB,gCAAY;AACZ;;;AAGR,oBAAI,MAAM;AACN;;;AAGR,mCAAqB,kBAAkB,KAAK,mBAAmB;AAC/D,yBAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;AAK5D,aAAO;;AAEJ,mFAA+E,6BAA6B;AAC/G,2BAAqB,QAAO,SAAS,UAAU;AAC/C,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,mBAAa,QAAO,QAAQ,OAAO;AACnC,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,wBAAY;AACZ,mBAAO,QAAQ;AACX,uBAAS;;AAGb,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,0BAAY;AACZ,qBAAO,QAAQ;AACX,yBAAS;;AAEb,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,6BAAe,OAAO;AACtB,gCAAkB;AAClB,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,2BAAW,KAAK;AAChB,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,6BAAW,KAAK;AAChB,gCAAc,KAAK,IAAI,GAAG,IAAI,IAAI;AAClC,sBAAI,QAAQ;AACR,+BAAW;AACX,wBAAI;AACA,oCAAc,sBACR,MAAI,SAAS,WAAW,MAAM,SAAS,UAAU,MAC/C,SAAS,aACT,IACH,MAAK,SAAS,UAAU,MAAM,SAAS,aAAa;;AAGzD,oCAAc,KAAK,uBAAuB;;;;;AAK1D,2BAAa,IAAI,aAAa,GAAG,IAAI,IAAI;;;;;AAKzD,aAAO;;AClIX;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,uBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,4BAAsB,IAAK,gBAAe;AAC1C,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW;;;AAGnB,iBAAG,IAAI,UAAU,eAAe,GAAG,KAAK,KAAK;;;;;AAK7D,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,eAAO,QAAQ,aAAM,uBAAa;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,uBAAiB,CAAC,GAAG,OAAM,WAAU,QAAO,SAAS;AACrD,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC5C,sBAAgB,SAAQ,KAAK,IAAI,UAAS,QAAQ;AAClD,oBAAc,SAAQ,SAAQ,KAAK,IAAI,OAAM,QAAQ,SACjD,IAAI,aAAa,CAAC;AACtB,sBAAgB,SACZ,SAAQ,KAAK,IAAI,OAAO,QAAQ,SAChC,IAAI,aAAa,CAAC;AACtB,sBAAgB,IAAI,aAAa,MAAM;AACvC,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,iBAAW;AACX,eAAS;AACT,eAAS;AACT,eAAS;AACT,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,gBAAQ,KAAK,QAAQ,UAChB,OAAM,KAAK,MAAM,SAAS,MAAM,QAC7B,KAAK,KAAK,QAAQ,QAAQ;AAClC,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;;AAGb,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AAkBO,kBAAa,gBAAgB,cAAa;AAC7C,wBAAkB;AAClB,UAAI,KAAK,UAAU;AACf,eAAO,UAAU;;AAErB,aAAO,KAAK,UAAU,eAAe,UAAU,eAAe;;AAE3D,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC5BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,qBAAe,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACtE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,UAAI,QAAQ,GAAG,UAAU;AACrB,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9D,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9D,6BAAqB,SAAO,CAAE,QAAQ,OAAO,SAAA,UAAS,OAAO,CAAE,MAAM;AACrE,6BAAqB,SAAO,CAAE,QAAQ,OAAO,SAAA,UAAS,OAAO,CAAE,MAAM;AACrE,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAA;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AASX,uBAAiB,QAAQ,IAAI;AACzB,0BAAkB,eAAmB,EAAE,MAAM,MAAM;AACnD,sBAAc,CAAC,IAAI;AACnB,eAAO,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAA,UAAS,OAAO,CAAE;;AAGzD,iBACI,kBAA6B,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,sBAAgB,wBAA4B,QAAQ,GAAG,OAAO,eAAmB;AACjF,UAAI,SAAS,GAAG,MAAM,OAAO;AAEzB,qBAAa;AACb,iBAAS,QAAQ;AACb,sBAAY,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACvC,uBAAa,eAAmB,EAAE;AAClC,kBAAQ,IAAI,KAAK;AACjB,oBAAU;;;AAId,wBAAgB;AAChB,iBAAS,QAAQ;AACb,wBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAW;AACX,yBAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,2BAAe,MAAM,SAAS,KAAK;AACnC,2BAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,sBAAQ,SAAS,OAAO,MAAM;;;AAGtC,uBAAa,EAAE,MAAM;;;AAG7B,4BAAsB,kBAA6B,QAAQ,IAAI,OAAK,EAAE,QAAQ;AAC9E,sBAAgB,SAAQ,eAAe,eAAe,OAAO,GAAG,OAAO;AACvE,eAAS,QAAQ,OAAK,SAAQ,8BAA8B;AAC5D,aAAO;;AAEJ,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AChGhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,YAAY,WAAW,mBAAoB;AACjE,uBAAiB,CAAC,GAAG,SAAS;AAC9B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAuB;AACxI,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,6BAAuB,SAAS,eAAe;AAC/C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,2BAAqB,SAAS;AAC9B,yBAAmB,iBAAiB,SAAS,KAAK,SAAS;AAC3D,yBAAmB,iBAAiB,SAAS,KAAK;AAClD,6BAAuB,iBAAiB,IAAI,SAAS;AACrD,2BAAqB,EAAE,QAAQ;AAC/B,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK,EAAE,QAAQ;AAC7D,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,EAAE,QAAQ;AACtD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI;AACrB,yBAAiB,IAAI;AACrB,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK;AACjC,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK;AACjC,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK;AACjC,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK;AACjC,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW,KAAK;AACnC,gCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,0BAAM,WAAW,KAAK,mBAClB,OAAO,MAAM,WAAW;;AAEhC,8BAAY,SAAS;;;;;;;AAO7C,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,YAAY,iBAAiB,eAAgB;AACnE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB,MAAK,iBAAiB,OAAuB;AAC/I,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,6BAAuB,SAAS,eAAe;AAC/C,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,0BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,4BAAc;AACd,2BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,eAAe;AACpC,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,wBAAI;AACA,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;AAGzB,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;;AAKzC,iBAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;;AAK5C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,YAAY,SAAS,WAAK,YAAY,mBAAoB;AAClE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,4BAAsB,gBAAoB,OAAO;AACjD,wBAAkB,gBAAoB,GAAG;AACzC,wBAAkB,yBAAqC;AACvD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB,MAAK,iBAAiB,OAAO;AACnI,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,oBAAc,SAAS;AACvB,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,6BAAuB,gBAAgB;AACvC,2BAAqB,GAAG,QAAQ;AAChC,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK,GAAG,QAAQ;AAC/D,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK;AACpD,6BAAuB,iBAAiB,IAAI,GAAG,QAAQ;AACvD,2BAAqB,UAAU;AAC/B,yBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,yBAAmB,iBAAiB,UAAU,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,UAAU;AACtD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,eAAe,IAAI,aAAa,KAAK,aAAa;AACnE,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,kCAAc,SAAS,WAAW,iBAAiB;AACnD,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,+BAAiB,eAAe,IAAI,aAAa,KAC7C,aAAa,KAAK,iBAAiB;AACvC,uBAAS,YAAY;;;;;AAKrC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY;;AClFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,aAAc;AACpC,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW;AAC3F,aAAQ,aAAa,cAAc,aAAa,eAAe,gBAAgB,eAAe,WAAY;AAC1G,uBAAiB,QAAQ;AACzB,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,cAAc;AAC7C,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,eAAe;AAC9C,4BAAc,GAAG,KAAK,cAAc,EAAE;AAClC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,8BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,mCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAiB,KAAK,SAAS,cAAc;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,+BAAW,WAAW,KAAK;AAC3B,wBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,qCAAiB,WAAW,KAAK,cAAc;AAC/C,qCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAe;AACf,kCAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,mCAAa,MAAM,WAAW;AAC9B,oCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAM,WAAW,OAAO,OAAO,MAAM,WAAW;;AAEpD,kCAAY,SAAS;;;;;;;;;AASrD,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,eAAgB;AACtC,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,gBAAoB,EAAE;AACvC,wBAAkB,gBAAoB,GAAG;AACzC,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB;AAClG,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,mCAA6B;AAC7B,uBAAiB,SAAS,QAAQ;AAClC,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,YAAW,MAAM;AACtD,sBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,WAAW,MAAM;AAC/E,yBAAiB,KAAK;AACtB,sBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,wBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,2BAAiB,KAAK,OAAO;AAC7B,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,0BAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,6BAAiB,KAAK,OAAO;AAC7B,0BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAiB,KAAK,OAAO;AAC7B,4BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAc;AACd,6BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,mCAAiB,IAAI;AACrB,mCAAiB,IAAI;AACrB,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,qCAAiB,KAAK,MAAM;AAC5B,qCAAiB,KAAK,OAAO;AAC7B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,KAAK,eAAe;AACpC,uCAAiB,KAAK,MAAM;AAC5B,uCAAiB,KAAK,OAAO;AAC7B,oCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,mCAAW,KAAK,KAAK,cAAc;AACnC,yCAAiB,KAAK,MAAM;AAC5B,yCAAiB,KAAK,OAAO;AAC7B,mCAAW,QAAQ,WAAW,MAAM,SAAS,WAAW;;;;;AAKxE,yBAAS,WAAW,MAAM;;;;;;AAM9C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,yCAAqC;MACxC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,WAAK,SAAS,cAAe;AACrC,uBAAiB,CAAC,KAAK;AACvB,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB;AACtG,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,2CAAqC;AACrC,aAAQ,WAAW,aAAa,cAAc,aAAa,YAAY,SAAS,UAAU,SAAS,aAAa,UAAU,WAAW,UAAU,aAAa,cAAc,eAAgB;AAC1L,uBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAEhC,wBAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAE5D,0BAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAE9D,4BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,iCAAiB,KAAK;AACtB,8BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,8BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,8BAAc;AACd,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,eAAe;AAC/B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,cAAc;AAC9B,uCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO;AAC3D,wCAAkB,QAAS,eAAc,IAAI,MACzC,QAAS,gBAAe,IAAI,MAC5B,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,oCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sCAAc,SAAS,WAAW;AAClC,uCAAe,UAAU,YAAY;AACrC,mCAAW,QAAQ;;;;;AAKnC,yBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK,MACpD;;;;;;AAMxB,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,wCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrFhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,WAAW,mBAAoB;AACrD,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB;AACjB,UAAI,cAAc;AACd,qBAAa,CAAC,GAAG;;AAErB,cAAY,gCAA4C,SAAS,aAAa,MAAM,gFAC9D,0BAA0B;AAChD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,YAAY,MAAK,iBAAiB;AAClH,aAAQ,cAAc,aAAa,gBAAgB,eAAe,WAAY;AAC9E,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,oBAAc,SAAS,cAAc,SAAS;AAC9C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,+BAAe;AACf,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW;AAC9B,+BAAa,GAAG,IAAI,OAAO,EAAE;AACzB,0BAAM,WAAW,MAAM,OAAO,MAAM,WAAW;;AAEnD,8BAAY;AACZ,8BAAY;;;;;;;AAOpC,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACnFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,eAAgB;AAClE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAChH,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAS,cAAc,SAAS;AAC9C,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,uBAAW,KAAK,MAAM,KAAK;AAC3B,uBAAW,KAAK;AAChB,0BAAc;AACd,yBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,KAAK,eAAe;AACpC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,cAAc;AACnC,6BAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;AAIrC,eAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;AAIxC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,sDAAkD;MACrD,YAAY;MACZ,aAAa;MACb,YAAY;;AC9DhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,cAAe;AACjE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACpH,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,iCAA2B,GAAG;AAC9B,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,iCAA2B;AAC3B,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,oBAAc,cAAc;AAC5B,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO;AAC/C,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,OAAO,EAAE;AAC3B,+BAAW,KAAK,QAAQ;AACxB,kCAAc,SAAS,WAAW;AAClC,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,uBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,MAAM;;;;;AAKlE,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,qDAAiD;MACpD,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAiBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,UAAW;AACtB,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,sBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,sBAAc,EAAE,MAAM;AACtB,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,2BAAmB,OAAO,MAAM;AAChC,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,wBAAgB,eAAmB;AACnC,wBAAgB,SAAS;AACzB,2BAAmB,mBAAuB,EAAE,OAAO;AAKnD,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,uCAAe,YAAgB,CAAC,GAAG,KAAK,KAAK,IAAI,OAAO,gBAAoB,EAAE;AAC9E,4CAAoB,YAAgB,CAAC,GAAG,GAAG,IAAI,YAAY,gBAAoB,OAAO;AACtF,oCAAY,MAAM,UAAU,WAAW;AACvC,4BAAI,MAAM;AACN,mCAAS;;;;;;AAM7B,oCAAoB,YAAgB,CAAC,GAAG,MAAM,MAAM,IAAI,SAAS,gBAAoB;AACrF,2BAAW,eAAe;;;;;AAK1C,uBAAe,WAAW,MAAM,cAAkB,YAAY,EAAE,QAAQ,UAAU,EAAE;AACpF,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;AClEnD;;;;;;;;;;;;;;;;AAiBO,2CAAuC;MAC1C,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,kEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,OAAO,OAAO,OAAO;AAMtE,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,2BAAW;AACX,2BAAW;AACX,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,iCAAO;AACP,iCAAO;;;;;;AAM3B,2BAAU,MAAM,MAAM,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAK/D,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,OAAO,OAAO,OAAO;AAC5F,eAAO,CAAE,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;;ACtE5D;;;;;;;;;;;;;;;;AAiBO,0CAAsC;MACzC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,iEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,EAAE,OAAO,EAAE;AAM5D,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,mCAAS;AACT,mCAAS;;;;;;AAM7B,2BAAU,GAAG,QAAQ,QAAQ,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAKtE,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,EAAE,OAAO,EAAE;AAClF,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;ACtElD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA6B,UAAU,IAAI;AAC3D,kBAAY,iBAAiB,MAAK;AAClC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBA,cAAU;AACV,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACJ,kBAAY,gBAAgB,KAAK;AACpC,oBAAa,KAAK,KAAK;AACvB,gBAAU,KAAK,IAAI;AACnB,gBAAU,IAAO,KAAM,IAAI;AAC3B,aAAO,QACF,KACQ,SAAK,IAAI,MAAM,IAAK,MAAM,IAAI,MAAM,IAAI,MAAM,IAC/C,KAAK,IAAI,CAAC,IAAI;;AAEvB,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AA8BO;AACH,yBAAmB,OAAM;AACzB,oBAAc,WAAW;AACzB,uBAAiB,WAAW;AAC5B,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,qBAAe,UAAU,mBAAmB;AAC5C,qBAAe,UAAU,mBAAmB;AAE5C,0BAAoB,CAAC,OAAO;AAC5B,yBAAmB,eAAmB;AACtC,yBAAmB,wBAA4B,WAAW;AAC1D,yBAAmB,wBAA4B,WAAW;AAC1D,mBAAa,GAAG,IAAI,OAAO;AAEvB,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,uBAAc,UAAQ,CAAE,QAAQ,CAAE,MAAM,GAAG,MAAM,IAAK,SAAS;AAE/D,eAAQ,aAAM,eAAS,QAAQ,QAAO,SAAS;AAC/C,oBAAY,wBAAoC,OAAM;AACtD,qBAAa,GAAG,IAAI,UAAU;AAC1B,oBAAU,qBAAiC,KAAK;AAChD,qBAAW,IAAI,WAAW,KAAK,EAAE;AACjC,qBAAW,IAAI,WAAW,KAAK,EAAE;;AAErC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;;AAE7C,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,qBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AAChF,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO;;AAEJ;AACH,wBAAkB,eAAmB,OAAM;AAC3C,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,UAAI,cAAc;AACd,uBAAe,UAAU,UAAU,UAAU,WAAW,SAAS;AACjE,4BAAoB,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM;AACjD,YAAI;AACA,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,IAAI,WAAW,mBAAuB,WAAW;AAC5F,+BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAS;AAClE,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,WAAY,SAAS;AAC1F,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,eAAgB,SAAS;AAC9F,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO,CAAE,MAAM,aAAa,MAAM;;AAEtC,eAAO;;AAGP,sBAAa,wBAAoC,UAAU;AAC3D,0BAAkB,yBAAyB,OAAM,WAAW;AAC5D,eAAO,wBAAoC;;;AAGnD;AACI,aAAQ,QAAO,OAAO,OAAO;;AAGjC;AACI,UAAI,SAAS;AACT,eAAO,CAAE,MAAM,UAAU,MAAM;;AAEnC,oBAAa,wBAAoC,UAAU;AAC3D,mBAAa,OAAO;AACpB,0BAAoB,sBAAkC;AACtD,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,yBAAmB,qBAAiC;AACpD,0BAAoB,WAAW;AAC/B,0BAAoB,WAAW;AAC/B,uBAAiB,CAAC,YAAY;AAC9B,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,aAAa,MAAM,cAAe,SAAS;AAE3F,2BAAqB,UAAU,cAAc,cAAc,MAAM,SAAS;AAC1E,4BAAsB,aAAa;AACnC,4BAAsB,aAAa;AACnC,yBAAmB,CAAC,cAAc;AAClC,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,8BAAwB,UAAQ;QAC5B,QAAQ,CAAE,MAAM,eAAe,MAAM;QACrC,SAAS;;AAEb,0BAAoB,UAAU,aAAa,aAAa,MAAM,SAAS;AACvE,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,gBAAU,WAAuB,MAAM;AACvC,qBAAe,CAAC,EAAE,KAAK;AACvB,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,0BAAoB,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AACrF,2BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,aAAa,GAAG,iBAAkB,SAAS;AACxF,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO,CAAE,MAAM,WAAW,MAAM;;AAGpC;AACI,kBAAY,IAAI,aAAa,OAAO;AAEpC,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAW;AACX,oBAAW;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,oBAAU,UAAsB,IAAI,GAAG,MAAM;AAC7C,uBAAa,qBAAiC,OAAM;AACpD,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;AAC3C,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;;AAE/C,YAAI;AACA,mBAAQ;AACR,mBAAQ;;AAEZ,4BAAgC,KAAK,OAAM,OAAM;;AAErD,aAAO;;AChOX;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,OAAO;AACxC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,SAAA,UAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,mBAAS,SAAU;AAC3B,aAAQ,OAAO,OAAO,SAAU;AAChC,qBAAe,SAAS,YAAgB;AACxC,qBAAe,mBAAuB,QAAQ,eAAmB;AACjE,iBAAW,QAAQ,OAAO;AAC1B,aAAO,SAAQ,eAAe,OAAO,QAAQ;;AAE1C,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACI,UAAI,UAAU;AACV,eAAO,KAAK;;AAGZ,eAAO,KAAK;;;ACnCpB;;;;;;;;;;;;;;;;AAiBO,iCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,+BAAe,KAAK,MAAM,aAAa;AACvC,+BAAe,cAAc,YAAY,YAAY;AACrD,kCAAkB,UAAU;AAE5B,oBAAI,UAAU,KAAK,SAAS;AAExB,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,YAAY,mBAAmB;AAC9D,gCAAc,UAAU;;AAE5B,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACnD1D;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,OAAO;QAChB,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ,SAAA;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,sBAAsB;QAC/B,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ,SAAA;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,wCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,MAAM;AACvC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,SAAA,UAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAkBO,uBAAiB,gBAAgB,UAAU,QAAQ,OAAO,SAAS,MAAM,IAAI,GAAG;AAChF,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,IAAI,QAAQ,WAAW,IAAI,GAAG;AAChF,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,OAAO,MAAM,MAAM,IAAI,GAAG;AACvE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,KAAK,IAAI,GAAG;AACnE,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBO,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,2BAAmB;AACnB,qBAAa,EAAE;AACf,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB,kBAAkB;AACvD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,oBAAY,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC1C,YAAI,gBAAgB;AAChB,2BAAiB,IAAI,MAAM;AAC3B,uBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,qBAAS,KAAK,OAAO,aAAa;;AAEtC,kBAAQ,cAAc,OAAO,QAAQ,EAAE,OAAO,cAAc;AAC5D,iBAAO,kBAA8B,KAAK,QAAQ;AAClD,mBAAS;;AAEb,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,QAAQ;AAClF,2BAAmB,eAAmB;AACtC,uBAAe,QAAQ,OAAO,YAAY,aAAa,EAAE;AACzD,uBAAe,WAAW,MAAM,QAAQ,aAAa,EAAE;AACvD,uBAAe;AACf,YAAI;AAEA,2BAAiB,sBAAkC,aAAa;AAChE,qBAAW;;AAEf,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;ACxDnD;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,uBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,wBAAkB,QAAO,SAAS,UAAU,EAAE,OAAO,iBAAiB,SAAS,EAAE,OAAO,EAAE,OAAO,UAAU;AAC3G,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,iCAAe,wBAAwB,uBAAuB,IAC1D,UAAU,IAAI,GAAG,KAAK,KAAK;AAC/B,iCAAe,KAAK,uBAAuB;AAC3C,+BAAa,WAAW,SAAS,IAAI;AACrC,sBAAI,SAAS;AACT;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW,QAAQ;;;AAG3B,iBAAG,IAAI,SAAS,GAAG,KAAK,KAAK;;;;;AAK7C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChFhB;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB,gBAAoB;AACpC,uBAAiB,OAAK,SAAS,QAAQ,OAAO,SAAS,UAAU;AACjE,2BAAqB,iBAAiB,SAAS,QAAQ,OAAO,UAAU,MAAM;AAC9E,aAAO,CAAC,SAAS,QAAQ,aAAa;;ACtB1C;;;;;;;;;;;;;;;;AAoBO,oCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,CAAC,GAAG,IAAI;AACtF,kCAA0B,sBAAsB,QAAQ,EAAE,OAAO,EAAE,OAAO,qBAAqB;AAC/F,6BAAqB,WAAW,MAAM,QAAQ,SAAS,UAAU,EAAE;AACnE,8BAAsB,WAAW,MAAM,SAAS,SAAS,UAAU,EAAE;AACrE,eAAO;UACH,CAAE,QAAQ,cAAc,OAAO,SAAS,UAAU,OAAO,EAAE;UAC3D,CAAE,QAAQ,eAAe,OAAO,SAAS,UAAU,OAAO;;;;ACnCtE;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,kBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,EAAE,MAAM;AAClD,qBAAe,SAAS,YAAY,IAAI;AACxC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,mBAAa,GAAG,IAAI,YAAY;AAC5B,sBAAa,YAAgB,GAAG,YAAY;AAC5C,sBAAa,GAAG,KAAI,YAAY;AAC5B,cAAI,QAAO,MAAK,MAAM;AAClB,oBAAO,MAAK,MAAM,MAAK,IAAI,QAAO,MAAK;qBAElC,QAAO,OAAM,IAAI;AACtB,oBAAO,MAAM,KAAI,MAAK,KAAK,IAAI,QAAO,MAAK;;;AAGnD,kBAAS,QAAO,IAAI,WAAU,IAAI,MAAM;AACxC,wBAAgB,YAAgB,SAAQ,OAAO;AAC/C,gBAAQ,KAAK,MAAM;;AAEvB,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtDhB;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC/BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,iBAAkB;AACpC,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,eAAmB,EAAE;AACnC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,UAAI,kBAAkB;AAClB,gBAAQ,KAAK;;AAEjB,mBAAa,GAAG,IAAI,OAAO;AACvB,wBAAe,YAAgB,GAAG,OAAO;AACzC,0BAAkB,QAAO,IAAI,WAAU,IAAI,MAAM;AACjD,yBAAiB,YAAgB,WAAW,YAAY;AACxD,gBAAQ,YAAY,MAAM;;AAE9B,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChDhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,IAAI;AAC3D,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO,oCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,iCAAyB;AACzB,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,0BAAU,QAAO;AAEjB,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,yBAAS,KAAK,MAAM,SAAS;AAC7B,yBAAS,KAAK,MAAM,SAAS;AAC7B,kCAAkB;AAClB,oBAAI,OAAO,cAAc;AACrB,sBAAI,YAAY;AACZ,kCAAc;;AAGd,kCAAc,UAAU;;;AAIhC,oBAAI,UAAU,KAAK,SAAS,cAAc,UAAU,KAChD,SAAS;AAET,2CAAyB,SAAU,cAAa;AAChD,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,mBAAmB,mBAAmB;AACrE,gCAAc,UAAU;;AAE5B,+BAAe,cAAc,YAAY,YAAY;AACrD,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACvE1D;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO;AAExC,oBAAa,KAAK,MAAM;AACxB,UAAI,KAAK,QAAO;AACZ,eAAO,KAAK,MAAM;iBAEb,KAAK,QAAO;AACjB,eAAO,KAAK,KAAK;;AAGjB,YAAI,QAAO,MAAQ;AACf,iBAAO;;AAGP,iBAAO,QAAO;;;;AAInB,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAkBA,uBAAmB;AACnB,kBAAc;AACP,mBAAa,gBAAgB,MAAM;AACtC,UAAI,MAAM;AACN,eAAO,QAAQ;;AAGf,eAAO,aAAc,MAAK,IAAI,MAAM;;;AAGrC,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAkBO,sBAAgB,gBAAgB,UAAS,QAAQ,IAAK,KAAI,KAAK,IAAI,CAAC;AACpE,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM;AACtC,UAAI,KAAK;AACL,eAAO;iBAEF,KAAK;AACV,eAAO;;AAGP,eAAO;;;AAGR,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBA,sBAAgB;AAChB,sBAAkB,KAAK,IAAI,aAAW;AAC/B,uBAAiB,gBAAgB,UAAU;AAG9C,uBAAiB,KAAK,CAAC;AAGvB,uBAAiB,KAAK;AACtB,mBAAa,KAAK,IAAI;AACtB;AACA,UAAI;AACA,iBAAS;iBAEJ;AACL,iBAAS;;AAGT,iBAAS,KAAK,IAAI,IAAM;;AAE5B,aAAO;;AAEJ,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,KAAM;AACd,aAAQ,QAAS;AACjB,uBAAiB,GAAG;AACpB,oBAAc,EAAE,MAAM;AACtB,uBAAiB,IAAI,MAAM;AAC3B,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,qBAAe,cAAc,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC7D,qBAAe,SAAQ,MAAM,QAAQ,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;AAExC,6BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACrChB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,YAAY,YAAa;AACjC,uBAAiB,CAAC,IAAI;AACtB,oBAAa,eAAmB;AAChC,+BAAyB,CAAC,CAAC,GAAG;AAC9B,uBAAiB,KAAK,GAAG;AACzB,mBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,yBAAiB,KAAK,CAAC,GAAG;;AAE9B,sBAAgB,aAAY,WAAW;QACnC,QAAQ,CAAE;QACV,SAAA;QACA,OAAO,CAAE,UAAU,kBAAkB,eAAe;;AAExD,kCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,gDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,2BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,4BAAsB,CAAE,GAAG;AAC3B,2BAAqB,CAAE,OAAO;AAC9B,8BAAwB,UAAQ,CAAE,QAAQ,eAAe,SAAA,UAAS,OAAO;AACzE,8BAAwB,CAAE,GAAG;AAC7B,6BAAuB,CAAE,MAAM;AAC/B,uBAAiB,YAAU,CAAE,QAAQ,iBAAiB,SAAA,UAAS,OAAO;AACtE,kCAA4B,CAAE,GAAG;AACjC,iCAA2B,CAAE,OAAO;AACpC,qBAAe,UAAQ,CAAE,QAAQ,qBAAqB,SAAA,UAAS,OAAO;AACtE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,iCAA6B;MAChC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,KAAM;AACd,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,0BAAkB,IAAI,aAAa,OAAO;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAc,OAAO;AACrB,oBAAU,KAAK,QAAQ;;AAE3B,uBAAe,WAAW,MAAM,WAAW,EAAE,OAAO,EAAE;AACtD,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;AChClD;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM;AACtC,wBAAkB;AAClB,UAAI,MAAM;AACN,eAAO;;AAGP,eAAO,KAAK,IAAI,IAAI,UAAU;;;AAG/B,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,KAAK,QAAQ,KAAK,IAAI;AAClD,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,aAAQ,cAAc,aAAa,WAAY,WAAW,QAAQ,MAAM,EAAE,OAAO,EAAE;AACnF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AClChB;;;;;;;;;;;;;;;;AA6GA,2BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;ACxMnB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAgBA,qBAAiB;AACjB,6BAAyB;MACrB,OAAO;MACP,WAAW;MACX,oBAAoB;MACpB,uBAAuB;MACvB,OAAO;MACP,SAAS;MACT,8BAA8B;;AAE3B;AACH,aAAO,SAAS;;AAEb;AACH,eAAS,gBAAgB;;AAEtB;AACH,UAAI,CAAE,iBAAgB;AAClB,uBAAe,yBAAyB;AACxC,YAAI,WAAW;AACX,mBAAS,gBAAgB;;AAGzB,kBAAQ,IAAI,2CAA2C;AACvD,iBAAO;;;AAGf,iBAAW,SAAS;AACpB,UAAI,GAAG;AACH,eAAO,SAAS;AAChB,eAAO,gBAAgB;;AAE3B,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,OAAO,GAAG;AACb,SAAG,OAAO,GAAG;AACb,SAAG,SAAS,GAAG;AACf,aAAO,SAAS;;AAEpB;AACI,UAAI,OAAO,oBAAoB,eAAe,iBAAiB;AAC3D,eAAO,IAAI,gBAAgB,KAAK;iBAE3B,OAAO,aAAa;AACzB,eAAO,SAAS,cAAc;;AAG9B,cAAM,IAAI,MAAM;;;AAGxB;AACI,UAAI,iBAAiB,KAAK,iBAAiB;AACvC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,aAAa;AAC5B,aAAO,iBAAiB,oBAAoB;AACxC,WAAG;AACH,eAAO,SAAS;SACjB;AACH,UAAI,iBAAiB;AACjB,eAAQ,OAAO,WAAW,SAAS,qBAC/B,OAAO,WAAW,sBAAsB;;AAEhD,aAAO,OAAO,WAAW,UAAU;;ACnFvC;;;;;;;;;;;;;;;;AAiBO;AACN,IAAA;AAgBG,qBAAc,eAAc,WAAW,KAAK;AAiB5C,qBAAc,eAAc,kBAAkB,KAAK;OACpD,iBAAkB,iBAAgB;AAC9B;AACN,IAAA;AACG,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,cAAc,KAAK;OAC9C,gBAAiB,gBAAe;AAC5B;AACN,IAAA;AACG,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,8BAA8B,KAAK;AAC3E,2BAAoB,qBAAoB,wBAAwB,KAAK;AACrE,2BAAoB,qBAAoB,wBAAwB,KAAK;OACtE,uBAAwB,uBAAsB;AAC1C;AACH,aAAO,CAAC,SAAS;;AAEd;AACH,aAAO,aAAa;;AAEjB;AACH,aAAO,CAAC,UAAU,GAAG;;AAKlB;AACH,mBAAa,eAAmB;AAChC,2BAAqB,KAAK,KAAK,OAAO;AACtC,aAAO,qBAAyB;;AAE7B;AACH,UAAI,eAAe,uBAAuB;AACtC,cAAM,IAAI,MAAM,iBAAiB,uCAC1B;;AAEX,aAAO,eAAe;;AAEnB;AACH,2BAAqB,cAAc,SAAS,WAAW;AACvD,UAAI,OAAO,SAAS;AAChB,cAAM,IAAI,MAAM,kBAAkB,OAAO,sBAAsB;;AAEnE,gBAAU;AACV,qBAAe,GAAG,MAAM,cAAc,QAAQ,OAAO;AACjD,qBAAa,GAAG,IAAI,UAAU;AAC1B,iBAAO,SAAS,cAAc,MAAM;;;;AAIzC;AACH,aAAO;QACH,KAAK,IAAI,GAAG,KAAK,KAAK,UAAU;QAAK,KAAK,IAAI,GAAG,KAAK,KAAK,OAAO;;;AAGnE;AACH,qBAAe,uCAAuC,MAAM;AAC5D,aAAO,IAAI,IAAI;;AAEZ;AAIH,oBAAc;AACd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,8BAAsB,MAAM;AAC5B,kCAA0B,MAAM;AAChC,wCAAgC,MAAM;AACtC,oCAA4B,MAAM;AAClC,6BAAqB,MAAM;AAC3B,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,MAAM;AAC7B,2BAAmB,MAAM;;AAGzB,8BAAsB,GAAG;AACzB,kCAA0B,GAAG;AAC7B,wCAAgC,GAAG;AACnC,oCAA4B,MAAM;AAClC,6BAAqB,GAAG;AACxB,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,6BAA6B,OAChD,0BAA0B,iBAC1B;AACJ,2BAAmB,GAAG;;AAE1B,8BAAwB,GAAG;AAC3B,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;ACnKR;;;;;;;;;;;;;;;;AAmBO;AACH,0BAAoB;AACpB,UAAI,OAAM,QAAQ;AACd,wBAAgB;;AAEpB,aAAO;;AAEX;AACI,oBAAc,GAAG;AACjB,UAAI,UAAU,GAAG;AACb,cAAM,IAAI,MAAM,kBAAkB,qBAAqB,IAAI;;;AAInE,wBAAoB;AACpB,wBAAoB;AACb;AACH,UAAI,OAAM,QAAQ,mCAAmC,QAAQ,KACxD,cAAc,KAAK,IAAI,QAAQ,KAAK,IAAI,OAAO;AAChD,eAAO;;AAEX,aAAO;;AAEJ;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,sBAAsB;;;AAGlC;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,aAAa,gBAAgB,gBAAgB,gBAAgB;;AAE1F;AACH,2BAAqB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,gBAAgB;AAC9E,mBAAa,IAAI,MAAM,GAAG,aAAa,cAAc;AACrD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,cAAc,GAAG,oBAAoB;AAC3D,gBAAQ,IAAI,GAAG,iBAAiB;AAChC,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEJ;AACH,6BAAuB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,kBAAkB;AAClF,mBAAa,IAAI,MAAM,GAAG,aAAa,gBAAgB;AACvD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,gBAAgB,GAAG,oBAAoB;AAC7D,kCAA0B,sBAAsB,GAAG,iBAAiB;AACpE,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX,4BAAwB;AACxB;AACI,oCAA8B,gBAAgB,KAAK;AACnD,UAAI,yBAAyB;AACzB,gBAAQ,IAAI,wCAAwC;AACpD,gBAAQ,IAAI;AACZ;;AAEJ,yBAAmB,CAAC,sBAAsB;AAC1C,0BAAoB,aAAa,MAAM;AACvC,mBAAY,YAAY,OAAO,WAAW,SAAS;AACnD,mCAA6B,YAAY,IAAI,uBAAsB,UAAe,eAAa,GAAG,YAAY,QAAO;AACrH,0BAAoB;AACpB,mBAAa,GAAG,IAAI,qBAAqB,QAAQ;AAC7C,wBAAgB,KAAK,IAAI,qBAAqB,GAAG,QAAQ;;AAE7D,+BAAyB,qBAAqB,MAAM,GAAG,aAAa;AACpE,wBAAkB,qBAAqB,MAAM,aAAa,GAAG;AAC7D,8BAAwB,qBAAqB,MAAM;AACnD,cAAQ,IAAI,iBAAiB,KAAK;AAClC,cAAQ,IAAI,cAAc,MAAM,MAAM;AACtC,cAAQ,IAAI,MAAM,UAAc,UAAU,IAAI,kBAAkB;AAChE,cAAQ,IAAI,gBAAgB,KAAK;;AAE9B;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,mBAAa,IAAI,MAAM,GAAG,YAAY;AACtC,UAAI,GAAG,oBAAoB,SAAS,GAAG,iBAAiB;AACpD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB;AAC1C,UAAI,GAAG,oBAAoB,SAAS,GAAG,qBAAqB;AACxD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc,OAAM,GAAG;AAC/D,aAAO;;AAEJ;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AAC9D,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB,OAAM,GAAG;AACvE,aAAO;;AAEJ;AACH,UAAI,OAAM,UAAU,qBAAqB;AACrC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,6BAAuB,OAAM,UAAU;AACvC,UAAK,SAAS,KAAO,UAAU;AAC3B,0BAAkB,IAAI,SAAS;AAC/B,cAAM,IAAI,MAAM,4BAA4B,YAAY;;AAE5D,UAAK,QAAQ,kBAAoB,SAAS;AACtC,0BAAkB,IAAI,SAAS;AAC/B,qBAAY,IAAI,kBAAkB;AAClC,cAAM,IAAI,MAAM,4BAA4B,YACxC,uDAAuD,OAAM;;;AAGlE;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,qBAAqB;;AAElD;AACH,kBAAY,GAAG,kBAAkB,SAAS;AAC1C,UAAI,QAAQ;AAGR,eAAO;;AAEX,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,oBAAoB,KAAK,qBAAqB,GAAG,OAAO,OAAO,mBAAmB;AAC5G,mBAAa,IAAI,MAAM,GAAG,wBAAwB;AAClD,aAAO;;AAEJ;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,mBAAmB,SAAS,cAAc,cAAc,cAAc;;AAEnG;AACH,aAAO,GAAG,mBAAmB,SAAS;;AAEnC;AACH,mBAAa,IAAI,MAAM,gBAAgB,IAAI,SAAS;AACpD,mBAAa,IAAI,MAAM,GAAG,UAAU,wBAAwB;;AAEzD;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;AACpE,mBAAa,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;;AAEhE;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;;AAE1G;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,MAAM;;AAEvG;AACH,sBAAe,GAAG,uBAAuB,GAAG;AAC5C,UAAI,YAAW,GAAG;AACd,cAAM,IAAI,MAAM,gCAAgC,2BAA2B,IAAI;;;AAGhF;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,iBAAiB;;;AAGpC;AACI,sBAAgB,aAAa,IAAI,MAAM;AACvC,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,6BAAuB,GAAG,mCAAmC;AAC7D,4BAAsB,cAAc,GAAG;AACvC,UAAI,gBAAgB,GAAG,YAAY,gBAAgB;AAC/C,iCAAyB,2BAA2B;AACpD,cAAM,IAAI,MAAM,0BAA0B;;;AAG3C,6CAAyC;AAC5C,aAAO,eAAmB,MAAM,MAAM,GAAG,MAAM,SAAS;;AAErD;AACH,UAAI,MAAM,WAAW;AACjB,cAAM,MAAM;;AAEhB,aAAO;QACH,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;QAAG,MAAM,MAAM,SAAS;;;AAGtE;AACH,sBAAgB,CAAC,GAAG,GAAG;AACvB,uBAAiB,MAAM,WAAW,KAAM,MAAM,WAAW,KAAK,MAAM,OAAO;AAC3E,UAAI,CAAC;AACD,oBACI,CAAC,YAAY,QAAQ,GAAG,YAAY;;AAE5C,aAAO;;AAEJ,kEAA8D;AACjE,uBAAiB,OAAM,UAAU;AACjC,UAAI;AACA,qBAAa,aAAa;AAM1B,mBAAW,SAAS,IAAI,UAAU,KAAK,SAAS,SAAS,IACrD,mBAAuB,SAAS,MAChC,SAAS;AAGb,YAAI,SAAS,WAAW;AACpB,qBAAW,CAAC,GAAG,SAAS;;;AAIhC,UAAI,SAAS,WAAW;AACpB,8BAAsB,cAAkB;AACxC,mBAAW,cAAc;;AAE7B,iBAAW,eAAmB;AAC9B,UAAI,SAAS,UAAU,KAAK,QAAQ;AAChC,eAAO,CAAC,GAAG;iBAEN,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,MAAM;AACf,eAAO;iBAEF,SAAS,WAAW,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3D,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,IAAI,SAAS;iBAEvC,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,MAAM;AAC7B,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS;iBAEvC,SAAS,WAAW,KACzB,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3C,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,KAAK,SAAS,IAAI,SAAS;iBAErD,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM;AAC3C,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS,KAAK,SAAS;;AAG1D,YAAI;AAMA,2BAAiB,YAAY;AAC7B,qBAAW,UAAU;AACrB,cAAI,SAAS;AACT,aAAC,MAAM,QAAQ,YAAY;;AAE/B,iBAAO,WAAY,QAAO,KAAM,QAAO;AACvC,iBAAO,qBAAyB,MAAM,IAAI,OAAK,IAAI;;AAEvD,eAAO,qBAAyB;;;AAGxC;AACI,aAAO,IAAI,MAAM;;AAMd;AACH,eAAS,OAAO,MAAM;AACtB,eAAS,OAAO,MAAM;AACtB,UAAI,aAAiB,QAAQ;AACzB,eAAO;;AAEX,UAAI,CAAC,OAAO,UAAU,CAAC,OAAO;AAC1B,eAAO;;AAEX,UAAI,OAAO,OAAO,KAAK,OAAO,OAAO,KAAK,OAAO,OAAO,KACpD,OAAO,OAAO;AACd,eAAO;;AAEX,UAAI,OAAO,WAAW,OAAO;AACzB,2BAAmB,OAAO,MAAM,IAAI;AACpC,2BAAmB,OAAO,MAAM,IAAI;AACpC,YAAI,eAAe;AACf,iBAAO;;AAEX,YAAI,OAAO,eAAe,OAAO,eAC5B,QAAO,OAAO,KAAK,OAAO,OAAO;AAClC,iBAAO;;;AAGf,aAAO,OAAO,OAAO,OAAO,MAAM,OAAO,OAAO,OAAO,OAAO,OAAO;;AAKzE;AACA;AACO;AACH,UAAI,oBAAoB;AACpB,mBAAW,gBAAgB;AAC3B,2BAAmB,GAAG,aAAa,GAAG;;AAE1C,aAAO;;AAEJ;AACH,yBAAmB;;AAEhB;AACH,+BAAyB;;AAEtB;AACH,UAAI,0BAA0B;AAC1B,mBAAW,gBAAgB;AAC3B,iCAAyB,GAAG,aAAa,GAAG;;AAGhD,aAAO,KAAK,IAAI,IAAI;;AAEjB;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX;AACA,iBAAW,gBAAgB;AAC3B,UAAI,aAAa,IAAI,sCACjB,iBAAiB;AACjB,4BAAoB;iBAEf,aAAa,IAAI;AACtB,4BAAoB;;AAGpB,4BAAoB;;AAExB,aAAO;;AAEJ;AACH,kBAAY,GAAG,aAAa;AAC5B,aAAO,OAAO;;AAEX;AACH;AACI,mBAAW,gBAAgB;AAC3B,YAAI,MAAM;AACN,iBAAO;;;AAIX,gBAAQ,IAAI,sCAAsC;AAClD,eAAO;;AAEX,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAGf,oCAA8B,uCAAuC;AACrE,aAAO;;AAWJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;AAEX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,aAAa,IAAI;AACjB,iBAAO,uCAAuC;;AAElD,wCAAgC;AAChC,YAAI,aAAa,IAAI;AACjB,4CAAkC,GAAG,aAAa;AAClD,iBAAO,2CAA2C,IAAI;;AAE1D,eAAO;;AAEX,oCAA8B,uCAAuC;AACrE,aAAO;;AAEX;AACI,wBAAkB,iBAAiB;AACnC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,qBAAqB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,kBAAkB;AAC3I,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEX;AAGI,wBAAkB,iBAAiB,IAAI;AACvC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,yBAAyB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,sBAAsB;AACnJ,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAE3B,wBAAkB,GAAG,aAAa;AAClC,aAAO;;AAEJ;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;AChhB1D;;;;;;;;;;;;;;;;AAkBA,kBAAY;AAOZ,UAAI,aAAa,aAAa,MAAM,MAAI,UAAU,mBAAmB;AAErE,UAAI,aAAa,iBAAiB;AAC9B,UAAI,sBAAsB;AACtB,eAAO;iBAEF,sBAAsB;AAC3B,eAAO;;AAEX,aAAO;;AAGX,UAAI,aAAa,kCAAkC,MAAM;AACzD,UAAI,aAAa,0BAA0B,MAAM,MAAI,IAAI,qBAAqB;AAE9E,UAAI,aAAa,qBAAqB,MAAM;AAE5C,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,cAAc,MAAM,MAAI,QAAQ;AAEjD,UAAI,aAAa,4BAA4B,MAAM,MAAI,QAAQ;AAE/D,UAAI,aAAa,mBAAmB,MAAM,MAAI,QAAQ;AAGtD,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,gCAAgC,MAAM,MAAI,QAAQ;AAEnE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,uBAAuB,MAAM,MAAI,QAAQ;AAE1D,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,0BAA0B,MAAM,uBAAuB,MAAI,UAAU;AAEtF,UAAI,aAAa,gCAAgC,MAAM,uBAAuB,MAAI,UAAU;AAS5F,UAAI,aAAa,gDAAgD;AAC7D,2BAAqB,MAAI,UAAU;AACnC,UAAI,iBAAiB;AACjB,eAAO;;AAEX,aAAO,kCAAkC;;AAM7C,UAAI,aAAa,iDAAiD,MAAM,MAAI,UAAU,kDAAkD,KACpI,CAAC;AAIL,UAAI,aAAa,gCAAgC,MAAM,mCAAmC,MAAI,UAAU;AAKxG,UAAI,aAAa,gCAAgC;AAC7C,aAAO,MAAI,QAAQ,8BACf,QACA,MAAI,QAAQ;;AAMpB,UAAI,aAAa,gCAAgC,MAAM,8BAA8B,MAAI,UAAU;AAEnG,UAAI,aAAa,2BAA2B,MAAM,oBAAoB,MAAI,UAAU;AAIpF,UAAI,aAAa,6BAA6B;AAK1C,0BAAoB,MAAI,QAAQ;AAChC,aAAO,cAAc,IAAI;;AAS7B,UAAI,aAAa,kCAAkC;AAC/C,aAAO;OACR;AACC,UAAI,aAAY,KAAK,eAAc;AAC/B,cAAM,IAAI,MAAM,8FACsB;;;ACtI9C;;;;;;;;;;;;;;;;AAoBA,WAAQ,iCAAiC,qBAAqB,uBAAuB,qBAAqB,yBAAyB,yBAAyB,qBAAqB,qBAAqB,+BAA+B,yBAAyB,yBAAyB,qBAAqB,iCAAiC,6BAA+B;ACpB5W;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,UAAU,kBAAiB;;AAG7C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;yBAEC;;;;;;ACpCzB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,SAAS,kBAAiB;;AAG5C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;wBAEA;;;;;;ACtCxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,WAAY;AAC3C,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,aAAK,cAAc,CAAC,WAAW;AAC/B,uBAAgB,QAAO,QAAS,MAAM;AACtC,6BAAqB,YACjB,kBACA;AACJ,aAAK,WAAW;;;;;kCAKU;;;;;8BAKJ;wBACN;;0BAEE;;;;;;;;;;ACzC1B;;;;;;;;;;;;;;;;AAgBO;AACH,aAAO,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG,MAAM,IAAI,OAAK,GAAG,QAAQ;;AAEtE;AACH,UAAI,SAAS;AACT,eAAO,CAAC;;AAEZ,aAAO,eAAe,MAAM;;AAEzB;AACH,UAAI,SAAS;AACT,eAAO;;AAEX,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM;AACtB,mBAAU,KAAK;AACf,YAAI,IAAI,OAAO;AACX,qBAAU;;;AAGlB,aAAO;;ACpCX;;;;;;;;;;;;;;;;AAiBO;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAKf,2BAAmB;;;;;;;;;;;;AAcnB,2BAAmB;AACnB,sBAAc;;;;;;;;;;;AAYd,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAEf,2BAAmB;;;;;;;;;AASnB,2BAAmB;;;;;;;;;;AAUnB,sBAAc;;;;;;;;;;AAUlB,aAAO;QACH,SAAA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AClHR;;;;;;;;;;;;;;;;AAsBO,yEAAmE;AACtE,sBAAgB,gBAAoB;AACpC,aAAO,QACF,IAAI;AACL,sBAAc,OAAO,QAAO,QAAQ,YAAW;AAC/C,sBAAc,MAAM,QAAQ,SAAS,IACjC,OAAO,QAAO,IAAI,QAAQ,YAAW,QAAO,QAAQ,WACpD,YAAY,QAAO,QAAQ;AAC/B,eAAO,GAAG,UAAU;SAEnB,KAAK;;AAEd;AACI,UAAI,EAAE,WAAW;AACb,eAAO,GAAG,EAAE;;AAEhB,aAAO,MAAM,EAAE,UAAU,EAAE,KAAK;;AAM7B;AACH,UAAI,EAAE,WAAW,EAAE;AACf,cAAM,IAAI,MAAM,wDACL,EAAE,cAAc,EAAE;;AAEjC,qBAAe;AACf,0BAAoB,KAAK,MAAM,EAAE,SAAS;AAC1C,mCAA6B,EAAE,SAAS;AACxC,mBAAa,GAAG,IAAI,aAAa;AAC7B,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,UAAI,yBAAyB;AACzB,qBAAa,EAAE,MAAM,cAAc;AACnC,qBAAa,EAAE,MAAM,cAAc;AACnC,YAAI,OAAO,WAAW;AAClB,mBAAS,OAAO,IAAI,OAAK,SAAS;AAClC,mBAAS,OAAO,IAAI,OAAK,SAAS;;AAEtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,aAAO,OAAO,IAAI,UAAU,OAAO,MAAM,KAAK;;AAK3C;AACH,sBAAgB,gBAAoB,OAAO,IAAI,OAAK,EAAE;AACtD,aAAO;;wBAEa,QAAQ,mBAAmB,QAAQ;;;;AAIpD,iCAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC/EpC;;;;;;;;;;;;;;;;AAiBA,WAAM,wCAAuB;AAGtB;AACH,6BAAuB;AACvB,iBAAW,QAAQ;AACf,qBAAa,eAAmB,EAAE,UAAU;AAE5C,YAAI,EAAE,UAAU;AACZ,yBAAe,KAAK,iBAAiB,EAAE,OAAO,OAAO,IAAI,IAAI,UAAU;;AAGvE,yBAAe,KAAK,qBAAqB,EAAE;AAC3C,yBAAe,KAAK,qBAAqB,EAAE;;;AAGnD,iCAA2B,eAAe,KAAK;AAC/C,mCAA6B,WACxB,IAAI,OAAK,wBAAwB,GAAG,aAAa,qBACjD,KAAK;AACV,0BAAoB,YAAY;AAChC,mBAAa;AACb,wCAAkC,6BAA6B;AAC/D;AACA;AACA,yBAAmB,gBAAgB;AACnC,UAAI,YAAY;AACZ,gCACI,+BAA+B,YAAY,cAAc;AAC7D,uCAA+B,8BAA8B;;AAG7D,gCACI,yBAAyB,YAAY,cAAc;AACvD,uCAA+B,2BAA2B;;AAE9D,UAAI;AACA,wBAAgB;;AAEpB,qBAAe;QACX;QAAc;QAA2B;QACzC;QAAoB;QAAuB;QAAsB;QACnE,KAAK;AACP,aAAO;;AAEX;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,iBAAiB;aACvB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;;AAEpB,gBAAM,IAAI,MAAM,GAAG,MAAM;;;AAIrC;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,uBAAuB;aAC7B;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;;AAE1B,iBAAO,mBAAmB;;;AAGtC,gFAA4E;AACxE,gBAAU;AACV,UAAI;AACA,eAAO,2BAA2B;;AAGlC,eAAO,qBAAqB;;AAEhC,sBAAgB,OAAO,UAAU;AACjC,uBAAiB,aAAa;AAC9B,UAAI,QAAQ,UAAU,SAAS;AAC3B,YAAI;AACA,iBAAO,+BAA+B,QAAQ;;AAG9C,iBAAO,yBAAyB,QAAQ;;;AAGhD,aAAO;;AAEX;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;;AAEzC,iBAAO,wBAAwB,UAAU;;;AAGrD;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;;AAEnC,gBAAM,IAAI,MAAM,GAAG,SAAS;;;AAGxC;AACI,aAAO;;eAEI,KAAK;;;;AAIpB;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,4BAAsB,GAAG,KAAK;;;;MAI5B,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;MAuBL,KAAK;MACL,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;MAyBL;MACA;MACA;;AAEF,aAAO;;AAEX,8BAA0B;;;;;;;;;;;;;AAa1B,8BAA0B;;;;;;;;;AAS1B,8BAA0B;;;;;;;;;;AAU1B,iCAA6B;;;;;;;;;;;;AAY7B;AACI,aAAO;;;;;;AAMX;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;;;AAIhD;AACI,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;4BACjC,SAAS;;;;AAIrC;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAChD,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,KAAK;AAChE,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;wBAExB;qBACH;;6BAEQ;4BACD;;;;;;AAM5B;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;QAClC;;;;;AAKR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/D,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/E,2BAAqB;AACrB,oBAAc;AACd,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,0BAAkB,MAAM,MAAM,SAAS,IAAI;AAC3C,kBAAU;aACL,eAAe;kBACV,OAAO;QACjB;AACA,kBAAS,IAAI,QAAQ;;AAEzB,aAAO;UACD,MAAM;;oCAEoB,eAAe,OAAO,eAAe;iCACxC,eAAe;;QAExC;;wBAEgB;qBACH;;6BAEQ;4BACD;;mBAET,MAAM,UAAU;;;;AAInC;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,OAAO;AACrG,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;QAClC;;;;;AAKR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,OAAO;AAC3G,aAAO;;kDAEuC,SAAS;+BAC5B,SAAS;;iCAEP,SAAS;;QAElC;;;;;;;AAOR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,MAAM,OAAO;AACjH,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;;QAElC;;;;;;;AAOR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,aAAiB,OAAO;AACxB,eAAO;;8CAE+B,eAAe,OAAO,eAAe;;;;AAK/E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAUhD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;;iCAExC,eAAe;6BACnB;4BACD;;;;;;AAM5B;AACI,UAAI,aAAiB,OAAO;AACxB,eAAO;;0CAE2B,SAAS,OAAO,SAAS;;;;AAI/D,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;wBAClB,MAAM;4BACF,MAAM;;;;;AAKlC;AACI,aAAO,SAAS;;AAEpB;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,mBAAa;AACb,aAAO;WACA;eACI,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AACpB,eAAO,SAAS,sBAAsB;;AAE1C,iCAA2B,UAAU,UAAU;AAC/C,UAAI,YAAY,KAAK,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,6BAAuB,UAAU,UAAU;AAC3C,qBAAe,yBAAyB;AACxC,aAAO;YACC;6BACiB,UAAU,UAAU;6BACpB;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,mBAAa;AACb,aAAO;WACA;;UAED,eAAe,OAAO,eAAe;eAChC,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;UACJ,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,oBAAc,SAAS;AACvB,oBAAc,SAAS;AACvB,UAAI,UAAU,KAAK,UAAU;AACzB,eAAO;cACD;+BACiB;;;;AAI3B,qBAAe,yBAAyB;AACxC,UAAI,UAAU;AACV,eAAO;cACD;6CAC+B,oBAAoB;+BAClC;;;;AAI3B,UAAI,UAAU;AACV,eAAO;cACD;wCAC0B,oBAAoB;+BAC7B;;;;AAI3B,aAAO;YACC;6BACiB,UAAU,kBAAkB;6BAC5B;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,mBAAa;AACb,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,eAAO;aACF;qDACwC,cAAc;;iBAElD,KAAK,aAAa;;;;AAI/B,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,aAAO;WACA;iCACsB,iBAAiB,eAAe,OAAO,eAAe;eACxE,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,yBAAgB,SAAS;AACzB,yBAAgB,SAAS;AACzB,eAAO;YACH;mDACuC,eAAc;6BACpC;;;;AAIzB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO;AACvB,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;qDACuC,MAAM;UACjD,kBAAkB;;;;AAIxB,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,qBAAe,yBAAyB;AACxC,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;4CACpB;6BACf;;;;AAIzB,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;uCACzB;6BACV;;;;AAIzB,aAAO;UACD;;wBAEc,MAAM,cAAc;2BACjB,YAAY;2BACZ;;;;AAI3B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,MAAM,OAAO;AACb,8BAAsB,MAAM,MAAM;AAClC,yBAAiB,CAAC,GAAG;AACrB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,KAAK,OAAO;AAC5B,eAAO;UACL,2BAA2B;eACtB;mBACI,YAAY,kBAAkB,QAAQ;;;;AAIrD,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,4BAAsB,eAAe,KAAK,KAAK,MAAM,KAAK;AAC1D,mBAAa;AACb,aAAO;WACA;;UAED,YAAY,YAAY,kBAAkB;eACrC,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM,KAAK,MAAM;AACjC,sBAAgB,MAAM;AACtB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO;AAC9B,eAAO;UACL,qBAAqB;gBACf;mBACG,YAAY,kBAAkB,QAAQ;;;;AAIrD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY;UACnC,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,yBAAmB,UAAU,UAAU;AACvC,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;gBACC;;oDAEoC;;4BAExB,cAAc;iCACT;;;;AAI7B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;YACH;8CACkC,MAAM;;qDAEC,cAAc;6BACtC;;;;AAIzB,qBAAe,yBAAyB;AACxC,aAAO;cACG;;4BAEc,mBAAmB,qBAAqB;+BACrC,YAAY;+BACZ;;;;AAI/B;AACI,oBAAc,UAAU,UAAU;AAClC,mBAAa,MAAM;AACnB,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,OAAO,KAAK;AACjD,0BAAoB,eAAe,KAAK,KAAK,MAAM,OAAO,KAAK;AAC/D,mBAAa;AACb,mBAAY,OAAO,+BAA+B;AAClD,mBAAa,GAAG,IAAI,OAAO,GAAG;AAC1B,iBAAS,QAAQ,QAAQ;AACzB,yBAAiB,MAAM,OAAO,IAAI;AAClC,iBAAQ,IAAI,OAAO,qBAAqB;;AAE5C,mBAAa;AACb,aAAO;WACA,YAAY;oBACH;2BACO;kCACO;qDACmB,YAAY;eAClD,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS;AACvC,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY,YAAY;UAC/C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;uBAIS,YAAY;;0BAET,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;gCAEkB,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGrC,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB;oBACzB;6BACS,YAAY,oBAAoB;6BAChC;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU;AACjD,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;iBAGG,YAAY,YAAY,YAAY;;UAE3C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;gCAGkB,YAAY,YAAY;;0BAE9B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGtB,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB,qBAAqB;qBAC7C,sBAAsB;6BACd,YAAY;6BACZ;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU,UAAU;AAC3D,eAAO;QACP,qBAAqB;cACf;;iBAEG,YAAY,kBAAkB,QAAQ;;;;AAInD,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;mBAGlC;UACT,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;0BAG3B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM;iBACvC,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM;iBACjB,MAAM;;;yBAGE,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;;0BAGc,mBAAmB,qBAAqB;qBAC7C,sBAAsB,sBAAsB;6BACpC,YAAY;6BACZ;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,qBAAe,eAAmB,UAAU,UAAU;AACtD,UAAI,SAAS;AACT,eAAO,UAAU;;AAErB,aAAO;0BACe;;iBAET;;;;;AAKjB;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,mBAAa,kBAAkB;AAC/B,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,mBAAa;AACb,qBAAe,eAAmB,UAAU,UAAU;AACtD,4BAAsB,WAAW;AACjC,sBAAgB,eAAmB,aAAa;AAChD,6BAAuB,YAAY;AACnC,UAAI,WAAW,KAAK,CAAC,iBAAiB,CAAC;AACnC,iBAAS;;;iBAIJ,iBAAiB,CAAC;AACvB,YAAI,YAAY;AACZ,mBAAS;;;;AAKT,mBAAS;;;;iBAKR,cAAc;AACnB,qBAAa,SAAS;AACtB,qBAAa,SAAS;AACtB,YAAI,cAAc,QAAQ,QAAQ,MAAM,cAAc,QAAQ,QAAQ;AAClE,mBAAS;mBAEJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;mBAGJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;;;AAGjB,aAAO;WACA;QACH;QACA;8BACsB,kBAAkB;QACxC;;;;AAIR;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,0BAAoB,aAAa;AACjC,yBAAmB,UAAU,UAAU;AACvC,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,UAAI,CAAC,UAAU,UAAU,aAAa,WAAW,WAC7C,UAAU,UAAU,cAAc,QAClC,aAAiB,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,mBAAa,kBAAkB;AAC/B,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,aAAO;YACC;QACJ;QACA;kBACU,kBAAkB;;;;AAI7B;AACH,UAAI,QAAQ;AACR,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;;AAGP,cAAM,MAAM,gBAAgB;;;AAIpC;AAEI,2BAAqB,KAAK,MAAM,KAAK,UAAU;AAC/C,mBAAa,UAAU,eAAe;AACtC,aAAO;;AAEX;AACI,aAAO,SAAS,IAAI,OAAK,OAAO,IAAI,KAAK;;AC3sC7C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,gBAAY,MAAM,SAAS,GAAG,MAAM,aAAa,IAAG,OAAO,GAAG,gBAC1D,IAAG,MAAM;AACb,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB,KAAK,KAAK,SAAS;AACnC,aAAK,cAAc,MAAM,MAAM,GAAG;AAClC,YAAI,UAAU;AACV,eAAK,YAAY,KAAK;;AAE1B,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,yBAAiB,KAAK;AACtB,qBAAa,SAAS;AACtB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC;AACA;AACA,YAAI,YAAY;AACZ,uBAAa,OAAO;AACpB,iCAAuB,kBAAkB;AACzC,2BAAiB;UACnB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;;AAGd,uBAAa;AACb,2BAAiB;UACnB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;;AAElB,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,0BAAkB,MAAM,SAAS,aAAa;AAC9C,4BAAoB,SAAS,IAAI,OAAK,SAAS;AAC/C,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,uBAAgB,QAAO,QAAS,gBAAgB;AAChD,kCAA0B,YAAY,KAAK;sDACG,WAAW;sDACX,WAAW;sDACX,WAAW;sDACX,WAAW;AACzD,2BAAmB;0BACD,WAAW;uCACE,WAAW;uCACX,WAAW;qDACG,WAAW;AACxD,8CAAsC,YAAY,KAAK;qCAC1B,YAAY;4CACL,SAAS;iDACJ,SAAS,MAAM,IAAI;;AAE5D,aAAK,WAAW;0BACE,YAAY;iCACL,SAAS;sCACJ,SAAS,MAAM,IAAI;;QAEjD;;UAEE;4BACkB,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;4BAC3C,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;UAC7D;yCAC+B,wBAAwB;sBAC3C,wBAAwB,gBAAgB;;;2BAGnC;;8BAEG;;YAElB;6BACiB;;;mBAGV;;;;;;;;;;;;;;AChHnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,gBAAe;AAC1C,aAAK,WAAW;iCACS,WAAW;0CACF;;;;;;;;;;;;;;gCAcV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;mBACf;kDAC+B;;sCAEZ,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,eAAc,eAAe;AACxD,aAAK,WAAW;iCACS,aAAa,WAAW;0CACf;;;;;;;;;;;;;;;;;gCAiBV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;;;;;;;;;;;AC1IjD;;;;;;;;;;;;;;;;AAiBA,8BAA0B;;;;AAQnB,oBAAgB;;;;;;;;;;;AAWhB,gBAAY;;;;;;;;;;AAUZ,+BAA2B;AAC3B,kBAAc;AACd,iBAAa;AACb,uBAAmB;AACnB,oBAAgB;AAChB,0BAAsB;AACtB,wBAAoB;AACpB,uBAAmB;AACnB,gBAAY,oBAAoB;;;AAGhC,gBAAY,oBAAoB;;;AAGhC,gBAAY;;AAEZ,oBAAgB;AAChB,kBAAc;;MAEjB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;UAEd;;;;;;;;;;;ACtEV;;;;;;;;;;;;;;;;AAmBA,gCAA0B;;;;;;AAMnB,sBAAgB;;;;;;;;;;;;;;;;;;;;;;AAsBhB,kBAAY;;;;;;;;;;;;;;MAef,sBAAoB;;;AAGjB,oBAAc;;;;AAId,sBAAgB;;;;AAIhB,oBAAc;;;AAGd,sBAAkB;;;AAGlB,mBAAa;;;AAGb,yBAAmB;;;AAGnB,sBAAgB;;;AAGhB,4BAAsB;;;AAGtB,0BAAoB;;;;;AAKpB,yBAAmB;;;;;;AAMnB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;;MAIpB,oDAAmD;AAC/C,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,uBAAuB;AAC5B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,4BAAwC,QAAQ;AACnE,qBAAa,KAAK,YAAY;AAC9B,qCAA6B;AAC7B,YAAI;AACA,cAAI,SAAS,KAAK,eAAmB,KAAK,iBAAiB;AACvD,qCAAyB;;;;;;AAOzB,0BAAc,kBAAkB;AAChC,qCAAyB;YAC7B;;AAEI,gBAAI,SAAS;AACT,wCAA0B;yCACL,KAAK,YAAY;;;;;AAMtC,+BAAiB,YAAY,UAAU;AACvC,wCAA0B;;iBAE7B,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;iBAEtD,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;;;;;;;AAQ/D,aAAK,WAAW;;UAEd;;;;;;;;UAQA;;;;;;;AChLV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;MAepB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC1C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;MAgBpB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC7C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,+BAAuB,SAAS,eAAe;AAC/C,aAAK,WAAW;;;;;;;;;;;;8BAYM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;oBAIzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,aAAK,WAAW;iCACS,WAAW;;;;;0BAKlB;;wCAEc,mBAAmB;;;;;;;gCAO3B;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES,SAAS;;oBAEzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,WAAW;;;;;;;;;;;8BAWM,SAAS;kCACL,SAAS;iCACV,iBAAiB;;kCAEhB,SAAS;;;;oCAIP,SAAS;mCACV,kBAAkB;;oCAEjB,SAAS;;;;sCAIP,SAAS;qCACV,iBAAiB;;sCAEhB,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;gCAczB;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES;oDACgB;;wCAEZ,SAAS;;;;;;6BAMpB;;sCAES,SAAS;;;;;;;;;;;;;AC/P/C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;;;;;;;wBAOA;;;;;8BAKM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;;;;;;;;;;;;;MAgBzC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;gCAYZ;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;;oCAGS;8BACN;;;;;;;;;;;;AC9G9B;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,kCAAkC;AAClF,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;0BAKlB;;;2BAGC,mBAAmB;;;;;;;gCAOd;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;;;;;;;;oBAQhB;;;;;;;;;;;;;;;;;;;kBAmBF,4BAA4B;;oBAE1B;;0CAEsB;mCACP;;;kCAGD;mCACC;;;yBAGV,4BAA4B;;+BAEtB;+BACA;;;oBAGX;;wCAEoB;wCACA;;;;;gCAKR;gCACA;;;;;yBAKP,4BAA4B;;+BAEtB;+BACA;+BACA;;;oBAGX;;wCAEoB;wCACA;wCACA;;;;;gCAKR;gCACA;gCACA;;;;;;;;;;UAUtB;UACA;;;;;;;MAON;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,aAAK,WAAW;oCACY,gBAAgB,iBAAiB;iCACpC,aAAa,WAAW;;;;;;;;;;;;;;;;gCAgBzB;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;yCACK;;oCAEL,SAAS;;;;sCAIP;;;;;;;;;;;;;;;;;oBAiBlB,4BAA4B;;4CAEJ;qCACP;2BACV,4BAA4B;;4CAEX;4CACA;;;qCAGP;qCACA;;;2BAGV,4BAA4B;;4CAEX;4CACA;4CACA;;;qCAGP;qCACA;qCACA;;;;;;;;;;;;AC1RrC;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,2BAAmB,SAAS,cAAc,SAAS;AACnD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;wBAOpB;4BACI;;;;;;;;;gCASI;qCACK;;gCAEL;;;;kCAIE;uCACK;;kCAEL;;;;;;;;;;;UAWxB;UACA;;;;;;AClGV;;;;;;;;;;;;;;;;;MAkBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB;AACrB,uBAAe;AACf,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY;wBACJ,KAAK,IAAI;mBACd,KAAK;mBACL,KAAK;;;AAWhB,qBAAa,GAAG,IAAI,cAAc;AAC9B,4BAAkB,GAAG,SAAS,cAAc;AACxC,sBAAU,SAAS;AACnB,wBAAY;4BACA,IAAI;4BACJ,IAAI;;AAEhB,gBAAI,gBAAgB;AAChB,kBAAI,IAAI;AAEJ,oBAAI,UAAU,MAAM;AAQhB,8BAAY;;qCAEH,2CAA2C;2BACrD,KAAK;;;;uCAIO;6BACV,KAAK;;;2BAGP,KAAK;;;;qCAIK,2CAA2C;;;;;uCAKzC;;;;sBAIjB,KAAK,gCAAgC,KAAK;;sBAE1C,KAAK,yBAAyB,KAAK;;;;AAM7B,8BAAY;qCACH,+BAA+B;2BACzC,KAAK;;2BAEL,KAAK;;;oBAGZ,KAAK,cAAc,KAAK;;;AAGpB,oBAAI,IAAI,IAAI;AAMR,0CAAwB,UAAU,MAAM,IACpC,mBAAuB,iBACvB;AACJ,sBAAK,gBAAgB,MAAM,KAAK,UAAU,MAAM,KAC3C,gBAAgB,MAAM,KAAK,UAAU,MAAM;AAC5C,gCAAY;oCACR,UAAU,OAAO;;uCAEd;kDACW;6BACrB,KAAK,IAAI;;;AAKN,wBAAI,gBAAgB;AAChB,kCAAY;;yCAEP;oDACW;+BACrB,KAAK;;+BAEL,KAAK;;;;AAIJ,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;AAI3B,gCAAY;oCACR;;uCAEG;kDACW;6BACrB,KAAK,IAAI;;;sBAGhB,KAAK,IAAI,cAAc,KAAK,IAAI;;;;;;AAOlC,kBAAI,IAAI;AACJ,4BAAY;mCACD;;AAQX,oBAAI,UAAU,MAAM;AAChB,8BAAY;sCACF;iDACW;2BACtB,KAAK;;2BAEL,KAAK;;;6CAGa;2BAClB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;;wCAEJ;mDACW;;;sBAG7B,KAAK,IAAI,mBAAmB,KAAK,IAAI;;;;AAK/B,8BAAY;qCACH;2BACV,KAAK;;2BAEL,KAAK;;;kCAGE;iDACe;2BACtB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;;AAInC,4BAAY;;;AAGpB,gBAAI,IAAI;AACJ,0BAAY;0BACN,KAAK,YAAY,MAAM;gBACjC,KAAK,mBAAmB,KAAK,gBAAgB,KAAK;;AAE9C,kBAAI,IAAI,IAAI;AACR,4BAAY;4BACR,KAAK,IAAI,YAAY,MAAM,IAAI;kBACzC,KAAK,IAAI;8BACG,KAAK,IAAI,gBAAgB,KAAK,IAAI;;;;;AAKxD,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY,gBAAgB,KAAK,SAAS,KAAK;;;AAGvD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;;;;;;;;;UAelC;;;UAGA;UACA;;;;;;ACvSV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS;AACxC,aAAK,cAAc;AACnB,wDAAgD;AAChD,2BAAoB;AACpB,wCAAgC;AAChC,aAAK,cAAc,CAAC,UAAU,YAAY,WAAW;AACrD,yBAAiB,WAAW,aAAa,IAAI;AAC7C,oDAA4C,CAAC,GAAG,cAAc,OAAO,GAAG,aAAa;AACrF,gDAAwC,aAAa,IACjD;UACI,GAAI,eAAc,KAAM,cAAa;UACrC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAE3B,8CAAsC,YAAY,IAC9C;UACI,GAAI,cAAa,KAAM,aAAY;UACnC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAK3B,aAAK,WAAW;yCACiB;wCACD;;;;;;;;;;;;;;;;iCAgBP;;;;+BAIF;8BACD;;uBAEP;mCACY;4BACP;;;uBAGL;mCACY;4BACP;;;;;aAKf;;;;;;;;;;;;;;;;;;;;;;;;;;;;MCtFT;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,MAAM;AACnB,oBAAY,YAAY,QAAQ,QAAQ,UAAU,MAAM;AACxD,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB;AAChB,wBAAgB;AAIhB,YAAI;AACA,sBAAY,WAAU,UAAU,SAAS,MAAM;AAC/C,sBAAY,WAAU,YAAY;;AAGlC,sBAAY,WAAU,gBAAgB,WAAW;AACjD,sBAAa,WAAU,eAAe;;AAE1C,aAAK,WAAW;;;UAGd,kBAAkB;oBACR,cAAc,MAAM;sBAClB;;cAER;sBACQ;YACV,cAAc,MAAM;wBACR,UAAU,MAAM;;;;;;MAMpC;AACI,eAAO;AACH,cAAI,KAAK,SAAS;AACd,iBAAK,QAAQ,MAAM,mBAAmB,cAAc;;AAExD,gBAAM,GAAG,UAAU,KAAK,OAAO;;;;AAI3C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG,WAAW;iBAEhB,SAAS;AACd,eAAO,GAAG,WAAW,WAAW;iBAE3B,SAAS;AACd,eAAO,GAAG,WAAW,WAAW,WAAW;;AAG3C,cAAM,MAAM,2BAA2B;;;AAG/C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;;AAGV,cAAM,MAAM,2BAA2B;;;AC7E/C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,WAAW;;;;gBAIR,KAAK;gBACL,KAAK;gBACL,KAAK;;uBAEE;+BACQ;uBACR;+BACQ;mCACI;UACzB,KAAK;;;uBAGQ,KAAK;;;;;MAKxB;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO,KAAK,YAAY;;AAGxB,iBAAO,KAAK,YAAY;;;MAGhC;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;;ACjFnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,MAAM;AAC1B,aAAK,WAAW;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;UAIE,KAAK;;;;;AC9Bf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;;UAKE,KAAK;;;;;ACjCf;;;;;;;;;;;;;;;;;MAmBI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;8BAUT;kCACI;iDACe,YAAY;wBACrC,KAAK;;;;;;;;;;;;;;UAcnB,KAAK,iBAAiB;;;;;ACxDhC;;;;;;;;;;;;;;;;;MAkCI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,4BAAgB,MAAM,IAAI;AAC1B,wBAAY;;gCAEI,SAAS,YAAY;gCACrB;kCACE,SAAS,YAAY;kCACrB;;;;;;;gCAOF;oCACI;kDACc,YAAY;yBACrC,KAAK;;;yBAGL;;yBAEA;;yBAEA;;yBAEA;;;;;;;AAOjB,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;;UAW7B;;UAEA,KAAK,YAAY;;;;;AC9F3B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;MAQpB;AACI,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;;AAElE,gBAAM,GAAG,UAAU,KAAK,UAAU;;;;AClC9C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,4BAAoB,OAAO;AAC3B,oBAAY,QAAQ;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB,QAAQ;AAC7C,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,mBAAmB;;AAEnC,UAAI,SAAS;AACT,eAAO;;AAEX,4BAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,MAAM;AACN,uBAAa,KAAK,kBAAkB,cAAc;;AAGlD,uBAAa,KAAK,GAAG,cAAc;;;AAG3C,aAAO,aAAa;;;MClDpB;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,6BAAqB,KAAK,WAAW,IAAI,eAAe;AACxD,aAAK,WAAW;UACd,yBAAyB,eAAe,KAAK;;YAE3C;;gCAEoB,KAAK;;sCAEC;;;;;;;ACjBtC;;;;;;;;;;;;;;;;AAmBO;AACH,mBAAa;AACb,iCAA2B,GAAG,KAAK;;MAEjC,KAAK;MACL,KAAK;MACL,KAAK;;;;;;AAMP,aAAO,mBAA8B,IAAI;;AAEtC;AAEH,0BAAoB,IAAI,aAAa,CAAC,IAAI,GAAG,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,IAAI,GAAG,GAAG;AACnG,aAAO,yBAAoC,IAAI;;AAE5C;AAEH,oCAA8B,IAAI,YAAY,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG;AAC9D,aAAO,wBAAmC,IAAI;;AAElD;AACI,0BAA+B,OAAO;AACtC,sBAAgB,cAAyB;AACzC,oBAAc,GAAG;AACjB,mBAAwB,IAAI,MAAM,GAAG,YAAY,OAAO;AACxD,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,WAAW,OAAO,GAAG,gBAAgB,OAAO,QAAQ,GAAG,eAAe,aAAa;AACxH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,aAAO;;AAEJ;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,GAAG;;AAE/I;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,cAAc;;AAE1J;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,GAAG;;AAE5H;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,wCAAwC,gBAAgB,GAAG,MAAM,GAAG;;AAErH;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,cAAc;;AAEvI;AACH,wBAAkB;AAClB,uBAAiB,IAAI;AACrB,qBAAgB,IAAI,IAAM,IAAI;AAC9B,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,sBAAgB,mCAA8C,IAAI,SAAS,gBAAgB,cAAc,GAAG,QAAQ;AACpH,aAAO,WACH,mCAA8C,IAAI,SAAS,MAAM,cAAc,GAAG,QAAQ;;AAE3F;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE;AACA,UAAI,iBAAgB;AAChB,wBAAgB,IAAI,WAAW,QAAQ,SAAS;AAChD,wBAAgB,GAAG;AACnB,yBAAiB,GAAG;;AAGpB,wBAAgB,IAAI,aAAa,QAAQ,SAAS;AAClD,wBAAgB,GAAG;AACnB,yBAAiB,cAAc;;AAEnC,oBAAc,IAAI;AAClB,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,gBAAgB,OAAO,QAAQ,GAAG,GAAG,MAAM,eAAe;AAC5H,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,UAAI,OAAO,gBAAgB;AACvB,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe,OAAO;;AAG7I,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;;AAE1G,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AAEH,sBAAe,IAAI;AACnB,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AAEzE,4BAAsB;AACtB,6BAAuB;AACvB,8BAAwB,gBAAgB,iBAAiB,OAAO;AAChE,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB,iBAAiB,IAAI;AAG9F,mBAAwB,KAAK,MAAM,IAAI,WAAW,GAAG,GAAG,SAAS,MAAM,IAAI,MAAM,IAAI,OAAO;AAC5F,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AACzE,aAAO;;AAEJ;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa;AACxC,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,qBAAe,yCAAkD,MAAM;AACvE,0BAAoB;AACpB,6BAAuB,IAAI,WAAW,mCAA4C,OAAO,SAAS;AAClG,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,GAAG,GAAG,cAAc,uBAAuB,GAAG,eAAe;AAGnH,aAAO,IAAI,aAAa,eAAe;;AAEpC;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa,sCAA+C,cAAc;AACrG,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,yBAAmB,IAAI,aAAa,eAAe,eAAe;AAClE,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,cAAc,cAAc,GAAG,MAAM,GAAG,OAAO;AACrG,aAAO;;AC1KX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,gBAAgB;AACrB,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,sBAAsB;AAC3B,aAAK,cAAc;AACnB,0BAAkB,OAAM,UAAU;AAClC,YAAI,MAAM;AACN,eAAK,KAAK;AACV,0BAAgB,WAAW;;AAG3B,eAAK,KAAK,gBAAgB;;AAG9B,iCAAyB;AACzB,wCAAgC;AAChC,YAAI,OAAM,UAAU,qBAAqB;AACrC,gCAAsB;AACtB,qCAA2B;AAC3B,eAAK,wBACD,oBAA+B,KAAK,IAAI;AAC5C,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;AAGpB,eAAK,4BAA4B,KAAK,GAAG,aAAa;AACtD,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,gCACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;;AAKpB,+BAAqB;AACrB,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,KAAK,GAAG,aAAa;qBAEpB,aAAwB,KAAK,IAAI;AACtC,iBAAK,gCACD,KAAK,GAAG,aAAa;;AAGzB,kBAAM,IAAI,MAAM;;;AAGxB,aAAK,eAAe,mBAA8B,KAAK;AACvD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,gBACD,iBAA0B,KAAK,IAAI,KAAK;;UAE5C;AACA,eAAO,OAAM,QAAQ;;MAEzB;AACI,YAAI,KAAK;AACL;;AAEJ,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;AAIjB,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK;;AAKjB,mBAAW,KAAK;AAChB,qBAAwB,IAAI,MAAM,GAAG;AACrC,qBAAwB,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AACrE,qBAAwB,IAAI,MAAM,GAAG,kBAAkB,KAAK;AAC5D,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AACzE,qBAAwB,IAAI,MAAM,GAAG,aAAa,KAAK;AACvD,aAAK,WAAW;;MAEpB;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,iCAAoC,KAAK,IAAI,SAAS;;MAE1D;AACI,aAAK;AACL,mCAAsC,KAAK,IAAI,SAAS,OAAO,QAAQ,OAAM,KAAK;;MAEtF;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,eAAO,0BAAqC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE7E;AACI,aAAK;AACL,YAAI,KAAK,kBAAkB;AACvB,4CAA6C,KAAK,IAAI,KAAK;AAC3D,eAAK,gBAAgB;;AAEzB,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;MAEjE;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,gDAA2D,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE5I;AACI,eAAO,+BAA0C,KAAK,IAAI,SAAQ,OAAO,MAAM,SAAS,cAAc,cAAc,KAAK;;MAE7H;AACI,eAAO,gCAA2C,KAAK,IAAI,SAAQ;;MAEvE;AACI,aAAK,yBAAyB;AAC9B,uBAAe,8BAAyC,KAAK,IAAI,MAAM,SAAS,KAAK;AACrF,aAAK;AACL,eAAO;;MAEX;AACI,6BAAqB,KAAK,YAAY,KAAK;AAC3C,eAAO,KAAK,UAAU;;MAE1B;AACI;AACA;AACA,YAAI,OAAM,QAAQ;AACd,sBAAY;AACZ,uBAAa,IAAI,UAAU,IAAI,4BAA4B;AAC3D,aAAG;AACH,0BAAgB;AACZ,4BAAe,IAAI,eAAe,MAAM,GAAG;AAC3C,mBAAO,YAAW,IAAI,oBAClB,YAAW,IAAI;;AAEvB,kBAAQ;mBAEH,OAAM,UAAU,kDAAkD;AACvE,kBAAQ,KAAK;AACb,eAAK;AACL,0BAAgB,MAAM,KAAK,iBAAiB,OAAO,OAAM,UAAU;;AAOnE,0BAAgB,MAAM;;AAE1B,eAAO,CAAE,OAAO;;MAEpB;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,sCAAiD,KAAK,IAAI,cAAc;;MAE5H;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,+BAAuB,qBAAgC,IAAI;AAC3D,6BAAqB,qBAA8B;AACnD,wBAAgB,cAAyB;AACzC,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,oBAAuB,IAAI;AAC3B,YAAI,KAAK;AACL,0BAA2B,IAAI;;AAEnC,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,sBAAsB,kCAA6C,IAAI,KAAK,SAAS,KAAK;;AAEnG,eAAO;;MAEX;AACI,aAAK;AACL,YAAI,YAAY,KAAK;AACjB,eAAK,UAAU;;AAEnB,YAAI,WAAW;AACX,uBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;;MAGrE;AACI,aAAK;AACL,aAAK,UAAU;AACf,YAAK,KAAK,WAAW,QAAS,KAAK;AAC/B,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,WAAW;;MAE9D,uDAAuD;AACnD,aAAK;AACL,YAAI;AACA,iBAAO,iCAA4C,KAAK,IAAI,SAAS;;AAGrE,iBAAO,0BAAqC,KAAK,IAAI,SAAS;;;MAGtE;AACI,aAAK;AACL,eAAO,aAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,kBAAkB,SAAS;;MAErF;AACI,aAAK;AACL,eAAO,KAAK,GAAG,mBAAmB,SAAS;;MAE/C;AACI,aAAK;AACL,aAAK;AACL,2CAA8C,KAAK,IAAI,oBAAoB,iBAAiB;;MAEhG;AACI,aAAK,6BAA6B,qBAAqB,SAAS;;MAEpE;AACI,aAAK;AACL,gCAAwB,uCAAgD,MAAM;AAC9E,aAAK,6BAA6B,2BAA2B,OAAO;;MAExE;AACI,aAAK,iCAAiC,aAAa,UAAU,YAAY;;MAE7E;AACI,cAAM,IAAI,MAAM;;MAEpB;AACI,YAAI,KAAK,WAAW;AAChB,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,4BAA+B,KAAK;;MAExC;AACI,aAAK;AACL,aAAK;AACL,mBAAW,KAAK;AAChB,YAAI,KAAK;AACL,eAAK;;AAET,qBAAwB,IAAI,MAAM,GAAG,aAAa,GAAG,WAAW,GAAG,GAAG,gBAAgB;;MAE1F;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG;;MAEnD;AACI,YAAI,KAAK,+BAA+B;AACpC,eAAK,8BACD,oBAA+B,KAAK,IAAI,OAAM,UAAU,oDAAoD,IACxG,oCACA;;AAEZ,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,yBAAc,IAAI;AAClB,cAAI,WAAW,KAAI,kBAAkB;AACrC,iBAAO;;AAEX,oBAAY,KAAK;AACjB,sBAAc,IAAI;AAClB,YAAI,cAAc,IAAI,kBAAkB;AACxC,eAAO;;MAEX;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,cAAI,SAAS,KAAI;AACjB;;AAEJ,oBAAY,KAAK;AACjB,YAAI,YAAY,IAAI;;YAElB;AACF,cAAM,aAAiB,MAAM,KAAK,YAG9B,KAAK,iBAAiB,OAAO,OAAM,UAAU;AACjD,eAAO,KAAK,aAAa,OAAO,OAAM,UAAU;;MAEpD;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;AAG1B,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;;MAGlC;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;AAG1B,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;;MAGlC;AACI,eAAO,IAAI,QAAQ;AACf,eAAK,cAAc,MAAM,aAAa,iBAAiB,MAAM;;;MAGrE;AAEI,uBAAc,qBAAqB,KAAK,YAAY,IAAI,OAAK,EAAE;AAC/D,qBAAa,GAAG,KAAK,QAAO,EAAE;AAC1B,iBAAQ,aAAc,KAAK,YAAY;AACvC;;AAEJ,aAAK,cAAc,KAAK,YAAY,MAAM,SAAQ;;MAEtD;AACI,aAAK,YAAY,KAAK,CAAE,UAAU;AAClC,YAAI,KAAK,YAAY,SAAS;AAE1B;;AAGJ,qBAAiB;AACb,eAAK;AAEL,iBAAO,KAAK,YAAY,WAAW;;;MAG3C;AACI,aAAK;AACL,sCAAyC,KAAK,IAAI,SAAS,KAAK;AAChE,YAAI,KAAK;AACL,8BAA+B,KAAK;;;MAG5C;AACI,YAAI,KAAK,iBAAiB;AACtB,wCAAyC,KAAK,IAAI,KAAK,eAAe,KAAK;AAC3E,cAAI,KAAK;AACL,gCAA+B,KAAK;;;AAIxC,4CAA6C,KAAK,IAAI,KAAK;;;MAGnE;AACI,aAAK,yBAAyB;AAC9B,uBAAe;AACf,aAAK;AACL,eAAO;;MAEX;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,sCAAyC,IAAI,gCAAgC,KAAK;AAClF,YAAI,KAAK;AACL,8BAA+B;;AAEnC,aAAK,gBAAgB;AACrB,qBAAwB,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,OAAO;AAC3D,qBAAwB,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAE9D;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAExE;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAGxB;AACI,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;;;AAUrB;AACH,cAAQ;AACR,aAAO,IAAI,IAAI,QAAQ,EAAE;AACrB,uBAAe,IAAI;AACnB,YAAI,CAAC;AACD;;;AAGR,aAAO,IAAI;;AC5cf;;;;;;;;;;;;;;;;AAkBO;AACH,uBAAiB,QAAQ;AACzB,yBAAmB,OAAO,IAAI;AAC1B,0BAAkB;UACd,cAAc,OAAM;UACpB,UAAU,OAAM,YAAY,OAAO,OAAM,QAAQ;UACjD,WAAW,OAAM;UACjB,UAAU,OAAM,YAAY,QAAQ,OAAM,QAAQ;UAClD,YAAY;;AAEhB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,SAAS,QAChD,OAAM,QAAQ,MAAM,aAAa;AACjC,oBAAU,aAAa,OAAM,QAAQ,MAAM;;AAE/C,eAAO,CAAE,MAAM,QAAQ,cAAc,IAAI;;AAE7C,2BAAqB,WAAW,IAAI,OAAK,EAAE;AAC3C,2BAAqB;QACjB,cAAc,OAAO;QACrB,UAAU,OAAO,QAAQ;QACzB,WAAW;QACX,UAAU,OAAO,QAAQ;QACzB,YAAY;;AAEhB,qBAAe,WAA2B,YAAY,cAAc,UAAU,QAAQ;AACtF,2BAAqB,MAAM,cAAc;AAEzC,mBAAa;AACb,qBAAe,MAAM,mBAAmB,cAAc,OAAO;AAC7D,UAAI,OAAM,UAAU,qBAAqB;AACrC,iBAAS,MAAM,mBAAmB,cAAc,YAAY;;AAGhE,+BAAyB;AACzB,mBAAa,GAAG,IAAI,QAAQ,cAAc,QAAQ;AAC9C,wBAAgB,QAAQ,cAAc;AACtC,4BAAoB;AACpB,yBAAiB,WACb,MAAM,mBAAmB,cAAc,SAAS;AACpD,yBAAiB,SAAS,aACtB,MAAM,mBAAmB,cAAc,SAAS,WAAW;;AAEnE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW,WAAW,OAAO;AAC7B,cAAM,MAAM,4BAA4B,WAAW,wCAC1B,OAAO;;AAEpC,iBAAW,QAAQ;AACf,uBAAe,EAAE;AACjB,uBAAc,OAAO;AACrB,uBAAe,OAAM;AACrB,YAAI,CAAC,aAAiB,QAAQ;AAC1B,gBAAM,MAAM,2EACoB,cAAc;;AAGlD,YAAI,EAAE,aAAa,OAAM;AACrB;;AAEJ,0BAAkB,EAAE;AACpB,0BAAkB,OAAM,YAAY,OAAO,OAAM,QAAQ;AACzD,YAAI,CAAC,aAAiB,WAAW;AAC7B,gBAAM,MAAM,kFACgB,iBAAiB;;;;AAIlD;AACH,+BAAyB,OAAO,cAAc;AAC9C,+BAAyB,CAAC,OAAO,eAAe,CAAC;AACjD,qBAAe,OAAO,QAAQ;AAC9B,0BAAoB,OAAO,QAAQ;AACnC,UAAI,OAAO,QAAQ;AACf,cAAM,6BAA6B,QAAQ,YAAY,IAAI,YAAY;;AAGvE,cAAM,uBAAuB,QAAQ,YAAY,IAAI,YAAY;;AAErE,YAAM,WAAW,OAAO;AAExB,UAAI,OAAM,UAAU,qBAAqB;AACrC,YAAI,OAAO,WAAW;AAClB,gBAAM,GAAG,UAAU,OAAO,QAAQ;;;AAG1C,UAAI,OAAO,WAAW;AAClB,cAAM,GAAG,UAAU,OAAO,QAAQ;;AAGtC,aAAO,QAAQ;AACX,wBAAgB,OAAO,QAAQ,cAAc;AAC7C,uBAAe,OAAO,iBAAiB;AACvC,6BAAqB,OAAO,iBAAiB,SAAS;AACtD,YAAI,UAAU;AAEV;;AAEJ,YAAI,OAAM;AAEN,cAAI,eAAmB,OAAM,SAAS;AAClC,kBAAM,GAAG,UAAU,QAAQ,OAAM,cAAc;;AAG/C,uBAAW,OAAM;AACjB,gBAAI,CAAE,iBAAgB;AAClB,qBAAO,IAAI,aAAa;;AAE5B,kBAAM,GAAG,WAAW,QAAQ;;AAEhC;;AAGJ,YAAI,OAAM,QAAQ,SAAS,QAAQ,gBAAgB;AAC/C,gBAAM,GAAG,UAAU,cAAc,OAAM,QAAQ,MAAM;;AAEzD,cAAM,sBAAsB,OAAM,QAAQ,SAAS,QAAQ;;AAE/D,UAAI,eAAe;AACf,oBAAY,OAAO,OAAO;;AAE9B,YAAM;;AAEH;AACH,sBAAgB;AAChB,aAAO,OAAO,QAAQ,QAAQ;AAC1B,0BAAkB,EAAE,WAAW,QAAQ,EAAE,QAAQ,SAAS,QACtD,EAAE,QAAQ,MAAM,aAAa;AACjC,yBAAiB,EAAE,YAAY,YAAY,EAAE,QAAQ;AACrD,qBAAa,GAAG,EAAE,SAAS,YAAY;;AAE3C,0BAAoB,QAAQ;AAC5B,gBAAU,QAAQ,YAAY;AAE9B,aAAO,MAAM,YAAY,MAAM;AAC/B,aAAO;;ACnKX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,eAAQ,aAAa,YAAY,aAAa,cAAc,SAAS,UAAU,eAAe,gBAAgB,cAAe;AAC7H,eAAQ,MAAM,OAAQ;AACtB,iCAAyB,aAAa;AACtC,qBAAa;AACb,+BAAuB,eAAe;AACtC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe;AACf,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,wBAAY;gCACI;yBACP;;4BAEG,YAAY,eAAe,YAAY;0CACzB,gBAAgB,kBAAkB;6BAC/C,2BAA2B;;sBAElC,WAAW;;qDAEoB,gBAAgB,kBAAkB;+BACxD,wCAAwC,wBAAwB;;wBAEvE,WAAW;;2CAEQ;;sBAErB;;2BAEK,MAAM,IAAI;;;;;2BAKV,MAAM,IAAI;;;;;;;;;;AAU7B,aAAK,WAAW;;;;;;;;;UASd;;UAEA,KAAK;;;;;AC9Ef;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;;wBASA,aAAa;;oCAED;;;;;0BAKV;;;;;;ACtD1B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,cAAc,eAAe;AACnD,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,QAAQ,WAAW;AACxB,aAAK,cAAc;AACnB,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,OAAO;AACZ,aAAK,WAAW;;;;;;;;8BAQM,KAAK;oDACiB;yCACX,KAAK;0BACpB;;;sCAGY,KAAK;;;;;;;;;;;;;;;yBAelB,yBAAyB;;;;;;;yCAOT;0BACf;;;;0CAIgB;;;;;;;;;;;;;;;;;ACnE1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;gCAQQ,KAAK,YAAY;gCACjB,KAAK,YAAY;;;;;;;;;;;;;;;iCAehB;;;;;;;;;;;yBAWR,aAAa;;;6DAGuB;;;;;;;;;;;;;;;;;;;;;0CAqBnB;;;;;;ACnG1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,wBAAwB,uBAAuB;AACjE,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;;;gCAcZ;kBACd;gDAC8B;;oCAEZ,SAAS;;;;;kCAKX;kDACgB;;sCAEZ,SAAS;;;;;;;gCAOf;;;;qCAIK;;;;;;;;;;;;MAYjC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,uBAAuB,wBAAwB,uBAAuB;AACxF,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;;;;gCAiBzB;mBACb;gDAC6B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;kCAOf;;;;;;yBAMT,2BAA2B;yBAC3B;;;;;;;;;;;;ACrJzB;;;;;;;;;;;;;;;;;MAiBI,sDAAsD,oBAAoB,iBAAiB,qBAAoB,2BAA2B;AACtI,aAAK,gBAAgB,CAAC,WAAW;AACjC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,0BAAkB,aAAa,OAAO,KAAK,OAAO;AAClD,sCAA8B,KAAK,KAAK,YAAY;AACpD,wBAAgB,aAAa,gBAAgB;AAC7C,wBAAgB,aAAa,gBAAgB;AAC7C,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,4BAAoB;AACpB,4BAAoB;AACpB,YAAI,OAAO,KAAK,OAAO;AACnB,0BAAgB,wBAAwB,OAAO,KAAK;mBAE/C,OAAO,KAAK,OAAO;AACxB,0BAAgB,wBAAwB,OAAO,KAAK;;AAExD,aAAK,WAAW;QAChB;;sCAE8B;;;;8BAIR;yBACL;yBACA;wCACe;wCACA;;;;uBAIjB,SAAS,QAAQ,SAAS;uBAC1B,SAAS,QAAQ,SAAS;;;;;;;;;UASvC;;UAEA;;;;;;;ACrFV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,WAAW;AAC/B,aAAK,WAAW;;;;;;;;;;8BAUM,cAAc;;;;;;;;;;0BAUlB,cAAc;;;;MAIpC;AACI,eAAO;AACH,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU,MAAM,mBAAmB,cAAc;;AAE1D,gBAAM,GAAG,UAAU,KAAK,SAAS;;;;ACjD7C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,YAAY;AAChC,aAAK,WAAW;;;;8BAIM,oBAAoB;;;;;;ACxBlD;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AAEpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,YAAI,SAAS;AACT,eAAK,WAAW;;;;;;AAOhB,2BAAiB,YAAY,MAAM;AACnC,wBAAc,kBAAkB;AAChC,uCAA6B,wBAAwB,MAAM,aAAa;AACxE,0BAAc,SAAS,MAAM,YAAY,YAAY,SAAS,IAAI,YAAY,YAAY,SAAS,IAAI;AACvG,yBAAe,UAAU,aAAa;AACtC,eAAK,WAAW;;YAEhB;;eAEG;;;cAGD;;6BAEe;;;;;;;AAO7B;AACI,sBAAe;AACf,qBAAe,GAAG,OAAO,GAAG;AACxB,uBAAe,GAAG,OAAO,GAAG;AACxB,sBAAY,GAAG,QAAQ,IAAI,MAAM,UAAU,QAAQ,IAAI,MAAM;AAC7D,uBAAa,GAAG,IAAI,MAAM;AACtB,oBAAQ,GAAG,KAAK,KAAK,SAAS,IAAI,QAAQ;;AAE9C,kBAAO,KAAK;;;AAGpB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO,QAAQ,MAAM;;AAEzB,iBAAW;AACX,mBAAa,OAAO,GAAG,IAAI,MAAM;AAC7B,gBAAQ,GAAG,KAAK,SAAS,MAAM;AAC/B,YAAI,IAAI,OAAO;AACX,kBAAQ;;;AAGhB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO;;AAEX,wBAAkB,KAAK,MAAM;AAC7B,aAAO;cACG,UAAU;cACV,UAAU;;;;0BAIE;0BACA;;;AAG1B;AACI,mBAAa,MAAM;AACnB,2BAAqB,mBAAmB,MAAM;AAC9C,UAAI,SAAS;AACT,eAAO;wBACS,MAAM;;;AAG1B,aAAO,QAAQ,aAAa;8BACF,aAAa;8BACb,aAAa;uCACJ,aAAa;;AC3GpD;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,qBAAa,kBAAkB;AAC/B,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;8BAKU;;;;;;AAMlB;;AAEJ,aAAK,WAAW;QAChB,gBAAgB,QAAQ;QACxB,cAAc,QAAQ;;;UAGpB;;4BAEkB;;YAEhB;2BACe;;;;;;ACpD3B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,+BAAuB;UACnB,GAAG;UAAyB,GAAG,QAAO,OAAO;YAC7C;;UAEA,SAAS,IAAI,KAAK;;SAErB,QAAO,OAAO;YACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;UAC9C,SAAS,IAAI,KAAK,KAAK,QAAO,OAAO;cACnC;;AAEN,4BAAoB,SAAS,IACzB,4BACA;AACJ,uBAAe;AACf,qBAAa,OAAO,SAAS,IAAI,IAAI,GAAG,IAAI,GAAG;AAC3C,sBAAY;UACd,eAAe;cACX;mBACK,cAAc;;YAErB;mBACO,wBAAwB,OAAO,YAAY;;;;AAItD,oBAAa,SAAS,IAAI,OAAO;AACjC,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;AClEV;;;;;;;;;;;;;;;;;MAiBI,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,wCAAgC,cAAc,SAAS,oBAAoB,SAAS,mBAAmB,SAAS;AAChH,mCAA2B,SAAS,SAAS,mBAAmB,SAAS;AACzE,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;sCACU,iBAAiB;mCACpB,WAAW;;;;;;;;;;;;;;;;;;kCAkBZ;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;;;;;;;0BAUnB;;;mCAGS,mBAAoB,sBAAsB,0BAC7D,qBACA,QAAQ;;;;;;;AAOZ;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;oCACY,iBAAiB;iCACpB,WAAW;0CACF;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;kCAkBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;uCACK;;;;yCAIE;6CACI;6CACA;;;cAG/B;;;gCAGkB;gBAChB,6BAA6B;;;;;;;;cAQ/B;uBACS,6BAA6B;;;yCAGX;;;;;cAK3B;uBACS,6BAA6B;;;yCAGX;6CACI;;;;cAI/B;;;oBAGM;;;;;;MAMhB,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;;oBAER,gBAAgB,iBAAiB;mCAClB,aAAa,WAAW;;;;;;;;;;;;;;;;;;kCAkBzB;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;sCAIP;0BACZ;;;sCAGY,SAAS;;;;;;;;;;4BAUnB;;;qCAGS,mBACpB,sBACG,cAAc,SAAS,mBAAmB,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAC5G,UAAU,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAChF,QAAQ,2BAA2B;6BACtB;;;;;;;;AAQjB;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;;gBAER,gBAAgB,iBAAiB;iCAChB,aAAa,WAAW;0CACf;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;;kCAmBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;oBACd;;;kCAGc,SAAS;;;;oCAIP;yCACK;;;;+CAIM;mDACI;mDACA;;;gBAGnC;;;kCAGkB;kBAChB,6BAA6B;;;;;;;;gBAQ/B;yBACS,6BAA6B;;;+CAGP;;;;;gBAK/B;yBACS,6BAA6B;;;+CAGP;mDACI;;;;gBAInC;;;sBAGM;;;;;;ACpZtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,kCAA0B;AAC1B,wBAAgB;AAChB,YAAI,eAAe;AACf,gCAAsB;mBAEjB,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;mBAEP,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;;AAEhB,0BAAkB,GAAG,cAAc,cAAc;AAEjD,YAAI,eAAe;AACf,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;;AAElB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;YAChB,eAAe;;mBAER,eAAe;;;;wBAIV;;;AAGhB,sBAAc;AACd,YAAI,eAAe;AACf,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;mBAEL,eAAe;AACpB,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;;AAEd,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;0CACkB;;;;UAIhC;;;;;;;;kCAQwB;;kCAEA;;;;;;8BAMJ;;YAElB,oBAAoB;;;;;;;YAOpB;;;iCAGqB;cACnB,4BAA4B;YAC9B,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;;oBAEQ;;;;;ACvJpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,uBAAa;AACb,cAAI,IAAI,MAAM;AACV,sBAAU;;AAEd,cAAI,IAAI;AACJ,sBAAU;;AAEd,sBAAY;UACd;UACA,IAAI,IAAI,4CAA4C;;;;;;mBAM3C;;UAET,IAAI,IAAI,MAAM;;;AAGhB,aAAK,WAAW;QAChB,uBAAuB;QACvB,mBAA+B;;;;;;;;qBAQlB,YAAY;qBACZ,YAAY;;UAEvB;;;;;;;AAOV;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;QAEH;;;;;ACrER;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;;sDAMkB,UAAU;;;;;;qDAMX,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7F9D;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AClCvD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;uCAChB;;;;;;;;;;;;;;;;;;;;;;gCAsBP,QAAQ;uCACD,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7DlD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;sBAKd,eAAe;sCACC,eAAe;;;wBAG7B,eAAe;wCACC,eAAe;;;4BAG3B;kBACV;;;;4BAIU;kBACV;;;;;;;;;;;;;;;ACpGlB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAGpD,0BAAkB,eAAe,QAAQ;AACzC,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;8DAaO;;;;;;;;;ACjD9D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,YAAI,SAAS;AACT,eAAK,WAAW;;;2BAGD,OAAO;;;AAGtB;;AAEJ,2BAAmB;AACf,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,eAAe;;AAEpC,iBAAO,UAAU;;AAErB,yBAAiB,OAAO,IAAI,UAAU,WAAW,IAAI,KAAK;AAC1D,qBAAa,kBAAkB;AAC/B,aAAK,WAAW;;UAEd;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,yBAAiB,YAAY,MAAM;AACnC,2BAAmB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AAC1E,wBAAgB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AACvE,qBAAa,kBAAkB;AAC/B,YAAI,SAAS;AACT,eAAK,WAAW;;;;uCAIW,OAAO;cAChC,OAAO;eACN;2CAC4B,OAAO;kBAChC,OAAO;;;;;;AAOb,eAAK,WAAW;;YAEhB;;uBAEW,KAAK,SAAS;eACtB;yBACU,KAAK,SAAS;;eAExB;yBACU,KAAK,SAAS;iBACtB;2BACU,KAAK,SAAS;;;;;;;AAOjC;AACI,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,gCAAsB,OAAO,IAAI,UAAU,WAAW,GAAG;AACzD,2BAAiB,cAAc,KAAK;AACpC,4BAAkB,cAAc,MAAM,IAAI,KAAK;AAC/C,iBAAO,mBAAmB,mBAAmB;;AAEjD;AACI,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,QAAQ,UAAU;;AAGnC,mBAAO,GAAG,UAAU;;;;;AC7FpC;;;;;;;;;;;;;;;;;MAkBI,+FAA+F;AAC3F,aAAK,gBAAgB,CAAC,WAAW,WAAW;AAC5C,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,6BAAqB,WAAW,IAAI,eAAe;AACnD,aAAK,WAAW;UACd,yBAAyB,eAAe;;;YAGtC;;;gCAGoB;;kCAEE;kCACA;0CACQ;;;uBAGnB;;;;;;;;;ACtDvB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,2BAAmB,UAAU;AAC7B,0BAAkB,UAAU;AAC5B,uBAAe,UAAU;AACzB,4BAAoB,UAAU;AAC9B,wBAAgB,cAAc,KAAK,KAAK,SAAS;AACjD,aAAK,cAAc,CAAC,WAAW;AAC/B,oCAA4B;AAC5B,4BAAoB;AACpB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,8BAAsB;;;AAGtB,oCAA4B;AAC5B,YAAI,SAAS,aAAa;AACtB,kCAAwB;oCACA;;;;;AAK5B,wCAAgC;AAChC,YAAI,SAAS,aAAa;AACtB,sCAA4B;oCACJ;;;;;AAK5B,aAAK,WAAW;0CACkB;;;UAGhC;;;;;UAKA;;;;;;;;;YASE,yBAAyB;wDACmB;;;;8BAI1B;;;;;;;;;;;;;;;;YAgBlB;;;iCAGqB;cACnB,4BAA4B;;;;;;;;;;;;;;;;;YAiB9B;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;;oBAEQ;;;;;AC9IpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK,KAAK;AAChC,aAAK,cAAc;AACnB;AACA;AACA,YAAI,OAAO;AACP,gBAAM,MAAM,kBAAkB;;AAElC,YAAI,SAAS;AACT,qBAAW;AACX,oBAAU;;AAGV,gCAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,6BAAmB;AACnB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,KAAK,GAAG,cAAc;AAClC,gBAAI,IAAI;AACJ,yBAAW,KAAK,GAAG,cAAc;;;AAGzC,oBAAU,WAAW;AACrB,qBAAW,YAAY;;AAE3B,sBAAc,kBAAkB;AAChC,aAAK,WAAW;;UAEd;4BACkB;;2BAED;;2BAEA;;;;;;ACnD3B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,qBAAqB,KAAK;AAC9C,6BAAqB,YAAU,KAAK;AACpC;AACA,yBAAiB,SAAS,IAAI;AAC1B,iBAAO,aAAa,OAAO,cAAc,eAAe,OAAO;;AAEnE,gBAAO;UACL;UACA;UACA,SAAS,KAAK;;AAEhB,aAAK,WAAW;QAChB;;UAEE;8BACoB;;;;MAI1B;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;AAI/C,mBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC;AACI,UAAI,SAAS;AACT,eAAO;iBAEF,QAAQ;AACb,eAAO,OAAO,MAAM,GAAG,MAAM,IAAI,OAAK,eAAe,GAAG,KAAK;;AAG7D,cAAM,MAAM,oBAAoB;;;ACrExC;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,wBAAe,YAAY,UAAU,KAAK;AAC1C,0BAAkB,YAAY,aAAa,KAAK;AAChD,0BAAkB,KAAK,SAAS,IAAI,cAAc,QAAQ,UAAU,MAAM,IAAI;AAC9E,2BAAmB,wBAAwB,UAAU,YAAY;AACjE,yBAAiB;mBACN;cACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;YACT,UAAU,KAAK,OAAO;;;AAG1B,yBAAiB,KAAK,SAAS,IAAI,KAAK;UACtC,QAAO,KAAK,OAAO;cACf,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;gBACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;cAClD,UAAU,KAAK,OAAO;uBACb;;;;AAIf,+BAAuB,KAAK,QAAQ,IAChC;cACE,SAAS,SAAS,IAAI,UAAU,SAAS,MAAM,aACjD,SAAS,IAAI,UAAU,GAAG,UAAU,QAAQ,QAAO,cAAc,OAC5D,KAAK;AACd,aAAK,WAAW;0BACE,KAAK;;UAErB;UACA;UACA;;UAEA;UACA;;;;;MAKN;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;ACjF/C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,KAAK;AAClB,2BAAmB,kBAAkB,KAAK;AAC1C,sBAAc,kBAAkB,KAAK;AACrC,wBAAgB;AAChB,YAAI,SAAS;AACT,sBAAY;;AAGZ,2BAAiB;AACjB,sBACI,KAAK,IAAI;AACL;AACA,mBAAO,KAAK,WAAW,IACnB,oBAAoB,cAAc,OAClC,UAAU,aAAa,gBAAgB,cAAc;aAExD,KAAK;;AAElB,aAAK,WAAW;QAChB,sBAAsB,cAAc;QACpC,wBAAwB,cAAc;;;UAGpC;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,QAAQ;AACb,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AACpB,aAAK,aAAa;AAClB,aAAK,eAAe;;MAExB;AACI,gCAAwB,kCAAkC,OAAO;AACjE,yBAAiB,uBAAuB,SAAS,iBAAiB;AAClE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,SAAS,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AACjG,YAAI,KAAK,aAAa,UAAU,SAAS;AACrC,eAAK;AACL,eAAK;AACL,eAAK,iBAAiB;AACtB,eAAK;AACL,8BAAmB,KAAK,aAAa,UAAU;AAC/C,eAAK,aAAa,UAAU,KAAK;AACjC,iBAAO;;AAEX;AACA,YAAI,oBAAoB,oBAAoB;AACxC,uBAAa,KAAK,MAAM,0BAA0B,QAAQ,IAAI,QAAQ;mBAEjE,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;mBAE/D,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;;AAExE,aAAK,aAAa,UAAU,KAAK;AACjC,aAAK;AACL,aAAK,sBAAsB;AAC3B,aAAK;AACL,eAAO;;MAEX;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,gCAAwB,kCAAkC,gBAAgB;AAC1E,yBAAiB,uBAAuB,OAAO,iBAAiB;AAChE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,OAAO,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AAC/F,mCAA2B,OAAM,IAAI;AACrC,YAAI,uBAAuB,MACvB,KAAK,qBAAqB;AAC1B,eAAK,MAAM,oBAAoB;AAC/B,eAAK,sBAAsB;;AAG3B,eAAK,aAAa,UAAU,KAAK;AACjC,eAAK;AACL,eAAK,iBAAiB;;AAE1B,aAAK;AACL,wBAAgB,KAAK,aAAa;AAClC,yBAAiB,QAAQ,QAAQ;AACjC,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM;;AAGpB,gBAAQ,OAAO,UAAU;AACzB,aAAK;;MAET;AACI,YAAI,CAAC,KAAK;AACN;;AAEJ,sBAAc,KAAK,kBAAkB,KAAK;AAC1C,gBAAQ,IAAI,aAAa,GAAG,KAAK,qBAAqB,KAAK,mBAAmB,IAAI;AAClF,0BAAkB,KAAK,gBAAgB,KAAK;AAC5C,gBAAQ,IAAI,oBAAoB,KAAK;AACrC,gBAAQ,IAAI,iBAAiB,KAAK,kBAAkB,KAAK,MAAM,MAAM;;UAErE;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;;;AAG7B;AAEI,oBAAc;AACd,UAAI,mBAAmB,MAAM;AACzB,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,GAAG;AAC3B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;;AAEX,YAAM,IAAI,MAAM,2BAA2B;;AAExC;AAMH,6BAAuB,iCAAiC,iBAAiB;AACzE;AACA,UAAI;AACA,4CAAoC,uCAAuC,MAAM,IAAI,MAAM;AAC3F,sBAAc,cAAc;;AAG5B,gCAAwB,yCAAyC,MAAM,IAAI,MAAM;AACjF,sBAAc,QAAQ;;AAE1B,+BAAwB,0BAA0B,IAAI;AACtD,aAAO,cAAc;;AAEzB;AACI,cAAQ;aACC,oBAAoB;AACrB,iBAAO,wCAAwC;aAC9C,oBAAoB;AACrB,iBAAO,+CAA+C;aACrD,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,+CAA+C;;AAEtD,gBAAM,IAAI,MAAM,iCAAiC;;;AAG7D;AACI,UAAI,OAAM,QAAQ;AACd,YAAI;AACA,iBAAO,oBAAoB;;AAE/B,eAAO,oBAAoB;;AAE/B,UAAI;AACA,eAAO,oBAAoB;;AAE/B,aAAO,oBAAoB;;AAE/B;AACI,UAAI,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;iBAEtB,mBAAmB,aAAa,UAAU,kBAAkB;AACjE,eAAO,+BAA+B;iBAEjC,mBAAmB,aAAa,YACrC,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;;AAE/B,YAAM,IAAI,MAAM,gCAAgC;;AAEpD;AACI,aAAO,GAAG,aAAa,MAAM,aAAa,MAAM,mBAAmB;;AC1OvE;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK;;AAEtC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB;AACrC,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,iBAAiB;;AAEjC,UAAI,SAAS;AACT,eAAO,eAAe,OAAO;;AAEjC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW;AACnE,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,KAAK,QAAQ,cAAc,OAAO,OAAO;;AAE1D,aAAO,aAAa;;ACjDxB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;AAYV,gCAA0B;AACnB,mBAAe;AACf,gBAAY;AACZ,iBAAa,sBAAoB;;;AAGjC,kBAAc,sBAAoB;;;AAGlC,kBAAY;AACZ,iBAAa;;;uBAGG;kBACL;;;AAGX,0BAAsB;AACzB,aAAO,sBAAoB;mCACI;;;AAG5B,gBAAY;AACZ,iBAAa;AACb,kBAAc;AACd,iBAAa;;;;AAIb,mBAAe;AACf,mBAAe;AACf,sBAAkB;AAClB,kBAAc;;;;;;;;;;;;;;;;AAgBd,gBAAY;AACZ,kBAAc;AACd,gBAAY;;AAEZ,kBAAc;AACd,iBAAa;AACb,kBAAc;AACd,oBAAgB;AAchB,qBAAiB;;;;;;;;;;;;;;;;;;;;;AAqBjB,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;AAGjC,iBAAa;;;;AAIb,iBAAa;;;;AAIb,iBAAa;;;;AAIb,kBAAc,sBAAoB;AAClC,kBAAc,sBAAoB;;;AAGlC,kBAAc,sBAAoB;;;AAGlC,gBAAY;;;;cAIL;eACC;eACA;eACA;eACA;eACA;;;;;;;AAOR,uBAAmB;AACnB,wBAAoB;AACpB,kBAAc;ACjLrB;;;;;;;;;;;;;;;;AAgBO,qBAAe;AACf,kBAAY;;;;;;;;;;AAUZ,mBAAa;;;;;;;;;;;AAWb,oBAAc;;;;;;;;;;;AAWd,kBAAY;;;;;;;;;;;MAWf;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;ACnEV;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,yBAAiB,YAAY,MAAM;AACnC,sBAAc,kBAAkB;AAChC,6BAAqB,gBAAgB,MAAM;AAC3C,0BAAkB,SAAS,MAAM;AACjC,wBAAe,QAAQ,IAAI,OAAO,QAAQ,UAAU,KAAK;AACzD,aAAK,WAAW;;UAEd;kCACwB;;4CAEU;;;;;ACnC5C;;;;;;;;;;;;;;;;AAuBA,WAAM,gCAAmB;AACzB,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;AAuEX,8BAAwB;AACxB,8BAAwB;AAC/B,yBAAqB;AACd;AACH,UAAI,gBAAgB;AAChB,eAAO,aAAa;;AAExB,mBAAa,gBAAgB;AAC7B,aAAO,aAAa;;AAExB,gEAA2D;AACvD,UAAI,gBAAe;AACf,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;;AAEX,YAAM,IAAI,MAAM,cAAc;;AAIlC,uCAAmC;AAInC,mCAA+B;AAC/B;AACI,UAAI,OAAM,OAAO,UAAU;AACvB,eAAO;;AAEX,aAAQ,OAAM,OAAO,OAAO,SAAS,OAAM,OAAO,OAAO,QACrD,OAAO,mBACP,yBAAyB,OAAO;;AAKjC,wCAAoC;mCACL;MAClC;AACI;AAEA,aAAK,cAAc,IAAI;AAGvB,aAAK,kBAAkB,IAAI;AAG3B,aAAK,eAAe,IAAI;AACxB,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAEpB,aAAK,iBAAiB;AACtB,aAAK,oBAAoB;AACzB,aAAK,wBAAwB;AAC7B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,YAAI,CAAC,OAAM,QAAQ;AACf,gBAAM,IAAI,MAAM;;AAEpB,YAAI,SAAS;AACT,qBAAW,gBAAgB,OAAM,UAAU;AAC3C,eAAK,cAAc,eAAe,OAAM,UAAU;AAClD,eAAK,QAAQ,IAAI,aAAa;AAC9B,eAAK,SAAS,GAAG;AACjB,eAAK,sBAAsB;;AAG3B,eAAK,QAAQ;AACb,eAAK,cAAc;AACnB,eAAK,sBAAsB;AAC3B,eAAK,SAAS,MAAM,GAAG;;AAE3B,aAAK,iBAAiB,IAAI,eAAe,KAAK;AAC9C,aAAK,qBAAqB;AAC1B,aAAK,UAAU,IAAI,aAAY,MAAM;;MAEzC;AACI,eAAO,KAAK,QAAQ,eACf,MAAK,aAAa,KAAK,WAAW,eAAe,KAClD,KAAK;;MAEb;AACI,YAAI,OAAM,QAAQ,qCACd,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU,eAAe,UAAU;AACnC,gBAAM,IAAI,MAAM;;AAGpB,uBAAe;AACf,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;AAE3B,eAAO;;MAGX;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ;;MAGZ;AACI,YAAI,KAAK,QAAQ,IAAI;AACjB,0BAAgB,KAAK,QAAQ,IAAI;AACjC,kBAAQ;;;MAGhB;AACI,YAAI,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAGpB,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;;MAG/B;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,QAAQ,IAAI;AACjB,8BAAoB,KAAK,QAAQ,IAAI;AACrC,sBAAY;AACZ,cAAI,YAAY,WAAW;AACvB,iBAAK,YAAY;;;;MAI7B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,oBAAoB,eAAO,OAAO,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,SAAS,IAAI;AAC/B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,UAAU;AACV,iBAAO;;AAEX,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ;AACA,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,mBAAS,wBAAoC,YAAY;;AAGzD,mBAAS,KAAK,qBAAqB;;AAEvC,YAAI;AACA,eAAK,kBAAkB,SAAa;;AAExC,eAAO,KAAK,qBAAqB,QAAQ;;YAEvC;AACF,YAAI,KAAK,YAAY,IAAI;AACrB,+BAAoB,KAAK,YAAY,IAAI;AACzC,iBAAO,IAAI,QAAQ,aAAW,aAAY,KAAK;;AAEnD,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,eAAO,OAAO,oBAAoB,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,KAAK,IAAI;AAC3B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,CAAC,OAAM,QAAQ,mCACf,OAAM,UAAU,qBAAqB;AACrC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa;AACb;AACA,YAAI,UAAU,eAAe,OAAM,IAAI;AAEnC,8BAAoB,KAAK,OAAO;AAChC,0BAAgB,KAAK,QAAQ,IAAI,kBAAkB;AACnD,oBAAS,KAAK,MAAM,wBAAwB,QAAQ,SAAS,GAAG,iBAA0B;;AAE9F,aAAK,YAAY,IAAI,QAAQ;AAC7B,YAAI,UAAU;AAEV,gBAAM,KAAK,MAAM;;AAGrB;AACA,YAAI,UAAU;AACV,qBAAW,MAAM,QAAQ,IAAI;YACzB,KAAK,KAAK,mBAAmB,KAAK;YAClC,KAAK,KAAK,mBAAmB,KAAK;;AAEtC,6BAAmB,GAAG;AACtB,6BAAmB,GAAG;AACtB,iBAAO,wBAAoC,YAAY;mBAElD,WAAU;AACf,iBAAO,KAAK,qBAAqB;;AAGjC,uBAAa,eAAmB;AAChC,iBAAO,KAAK,MAAM,gCAAgC,SAAQ;;AAE9D,YAAI,qBAAqB;AACrB,eAAK,8BAA8B;;AAEvC,0BAAkB,KAAK,qBAAqB,QAAQ;AACpD,4BAAoB,KAAK,YAAY,IAAI;AACzC,aAAK,YAAY,OAAO;AAExB,oBAAY,QAAQ,aAAW,QAAQ;AACvC,YAAI,KAAK,gBAAgB,IAAI;AACzB,eAAK,gBAAgB,OAAO;AAC5B,eAAK,YAAY;AACjB,eAAK;;AAET,eAAO;;MAEX;AACI,YAAI,UAAU;AACV;;AAEJ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,sBAAY,OAAO;AACnB,cAAI,CAAC,iBAA4B;AAC7B,gBAAI,OAAM,QAAQ;AACd,oBAAM,MAAM,aAAa;;AAI7B,kBAAM,MAAM,aAAa;;;;MAIrC;AACI,eAAQ,OAAO,OAAO,YAAa,KAAK,QAAQ,IAAI;AACpD,qBAAa,eAAmB;AAChC,YAAI,OAAM,QAAQ;AACd,4BAAkB,KAAK,OAAO;AAC9B,2BAAgB,KAAK,QAAQ,IAAI,UAAU;AAC3C,wBAAa,KAAK,MACb,gCAAgC,SAAQ,SAAS,GAAG,iBAA0B,QAC9E,SAAS,GAAG;AACjB,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,iBAAiB,aAAa;AAC3E,4BAAoB,yBAAyB,aAAwB,SAAS;AAC9E,wBAAgB,yBACZ,IAAI,yBAAyB,eAC7B,IAAI,mBAAmB;AAC3B,uBAAe,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,aAAa,OAAO,UAAW;AACtF,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,qBAAa,KAAK,MACb,gDAAgD,QAAQ,SAAS,QAAQ,SAAS,IAAI,QAAQ,SAAS,IACvG,SAAS,GAAG;AACjB,aAAK,8BAA8B;AACnC,eAAO;;YAEL;AACF,gCAAwB,KAAK;AAC7B,gCAAwB;AACxB,4BAAoB;AACpB,YAAI,KAAK,sBAAsB;AAC3B,eAAK,qBAAqB;AAC1B,0BAAgB;;AAGhB,eAAK,aAAa,KAAK;;AAE3B,aAAK,eAAe;AACpB;AAEA,4CAAoC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,QAC3E,OAAO,OAAK,KAAK;AACtB,0CAAkC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,OACzE,OAAO,OAAK,KAAK;AACtB,aAAK,eAAe;AACpB,YAAI;AACA,eAAK,qBAAqB;;AAE9B,oBAAY;UACR,cAAc,KAAK;UACnB,gBAAgB,KAAK;UACrB,UAAU;UACV,QAAQ;;AAEZ,YAAI,OAAM,UAAU,mDAAmD;AACnE,2BAAiB,MAAM,QAAQ,IAAI;AACnC,cAAI,cAAc,KAAS;AAC3B,cAAI,yBAAyB,MAAM,SAAS,IAAI,UAAW,EAAE,MAAM,0BAA0B,IAAI,IAAI,KAChG,IAAI,OAAK,GAAG,EAAE,SAAS,EAAE,MACzB,KAAK;;AAGV,cAAI,cAAc;YACd,OAAO;;;AAGf,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,eAAO;;MAEX;AACI,eAAO;UACH,YAAY;UACZ,eAAe,KAAK;UACpB,wBAAwB,KAAK,eAAe;UAC5C,mBAAmB,KAAK,eAAe;;;MAG/C;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM;;AAEtB,eAAO,CAAE,SAAS,QAAY,OAAO;;MAEzC;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,eAAK,MAAM;AACX,iBAAO;;AAEX,cAAM,QAAQ;AACd,eAAO;;YAEL;AACF,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM,uBAAuB;;AAE7C,2BAAmB;AACnB,eAAO,WAAW,QAAQ,WAAW;;MAEzC;AACI,YAAI,KAAK,gBAAgB,IAAI;AACzB;;AAEJ,YAAI,KAAK,YAAY,IAAI;AACrB,eAAK,gBAAgB,IAAI;AACzB,eAAK;AACL;;AAGJ,YAAI,CAAC,KAAK,QAAQ,IAAI;AAClB;;AAMJ,YAAI,KAAK,QAAQ,IAAI,QAAQ,wBAAwB;AACjD,eAAK,QAAQ,IAAI,QAAQ;AACzB;;AAEJ,aAAK,eAAe;AACpB,eAAQ,sBAAuB,KAAK,QAAQ,IAAI;AAChD,YAAI,sBAAsB;AACtB,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;AACtD,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;;AAE1D,aAAK,QAAQ,OAAO;;MAExB;AACI,eAAQ,SAAS,OAAO,UAAU,OAAO,UAAU,iBAAU,KAAK,QAAQ,IAAI;AAC9E,oBAAY,UAAS,OAAM,cAAc;AACzC,yBAAiB,KAAK,aAAa,IAAI;AACvC,YAAI,WAAW;AACX,eAAK,aAAa,IAAI,KAAK,WAAW;;AAGtC,eAAK,aAAa,OAAO;AACzB,cAAI,WAAW;AACX,iBAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,iBAAK,eAAe,eAAe,SAAS,UAAU,OAAO;;;AAGrE,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ,UAAU;AAClB,gBAAQ,WAAW;AACnB,gBAAQ,WAAW;AACnB,gBAAQ,QAAQ;;MAEpB;AACI,aAAK,YAAY;AACjB,eAAO,KAAK,QAAQ,IAAI,QAAQ;;MAMpC;AACI,eAAO,KAAK,QAAQ,IAAI;;MAE5B;AACI,YAAI,CAAC,OAAM,QAAQ;AACf,iBAAO;;AAEX,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa,WAAS,YAAY;;AAE3C,eAAO,KAAK;;MAShB,2CAA2C;AACvC,2BAAmB,KAAK;AACxB,YAAI,CAAC,KAAK,yBAAyB,cAAc;AAC7C,kBAAQ,KAAK;AAIb,eAAK,wBAAwB;;AAEjC,eAAO,cAAc,QACjB,OAAO,MAAM,YAAS,KAAK,QAAQ,IAAI,OAAM,QAAQ,WAAW,QAC5D,eAAmB,OAAM,SAAS;;MAE9C;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,OAAO,MAAM,EAAE,OAAO,EAAE;AAC1F,iBAAO,KAAK,WAAW,MAAM,EAAE,OAAO;;AAG1C,YAAI,eAAmB,UAAU;AAC7B,iBAAO,QAAO,IAAI,MAAM,EAAE;;AAE9B,eAAQ,YAAa,KAAK,QAAQ,IAAI,EAAE;AACxC,4BAAoB,kBAA4B,EAAE,OAAO,OAAO;AAChE,YAAI,YAAY,CAAC;AACb,0BAAgB,OAAM,QAAQ,iCAC1B,IAAI,mBAAmB,QACvB,IAAI,aAAa;AACrB,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;AAElD,aAAK,YAAY,EAAE;AACnB,eAAO,KAAK,aAAa,GAAG,OAAO;;MAEvC;AACI,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,kBAAU,KAAK,WAAW,MAAM,EAAE;AAClC,2BAAmB,KAAK,QAAQ,IAAI,EAAE;AAEtC,eAAO,OAAO,YAAY;AAC1B,mBAAW,QAAQ;AACnB,mBAAW,QAAQ,EAAE;AACrB,yBAAiB,mBAA6B,OAAO,EAAE;AACvD,YAAI,SAAS;AAGT,wBAAc,SAAS,MAAM;;AAEjC,mBAAW,QAAQ;UACf;UAEA,YAAY,SAAS,SAAS,SAAS,MAAM,cAAc,EAAE;;AAGjE,yBAAiB,KAAK,aAAa,IAAI,WAAW,MAAM,eAAe;AACvE,aAAK,aAAa,IAAI,WAAW,MAAM,YAAY,WAAW;AAC9D,eAAO;;MAEX;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,aAAa,GAAG,OAAO,KAAK;AAC9F,YAAI;AACA,iBAAO;;AAEX,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAO,IAAI;;AAEtB,wBAAgB,IAAI,oBAAoB,OAAO,SAAS;AACxD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,qBAAqB,EAAE,OAAO,QAClC,IAAI,eAAe,EAAE,OAAO;AAChC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,IAAI;AACtE,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,0BAAkB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACpD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAG3C,YAAK,iBAAgB,KAAK,gBAAgB,MACtC,YAAY;AACZ,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,sBAAY,gBAAgB,IAAI,IAAI,EAAE,KAAK,OAAO,WAAW;AAC7D,uBAAa,gBAAgB,IAAI,IAAI;AACrC,sBAAY,gBAAgB,IAAI,EAAE,KAAK,OAAO,GAAG,aAAa;AAG9D,0BAAgB,KAAO,KAAK;AAC5B,iBAAO,QAAQ,IAAI,MAAM;;AAE7B,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY;AACzG,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAC3C,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/I,uBAAe,CAAC,GAAG;AACnB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS,QAAQ;;MAE/C;AACI,wBAAgB,OAAM,QAAQ,8BAC1B,IAAI,iBAAiB,EAAE,OAAO,QAAQ,MAAM,OAAO,QACnD,IAAI,WAAW,EAAE,OAAO,QAAQ,MAAM,OAAO;AACjD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,WAAW,OAAO,aAAa,MAAM,OAAO;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC,YAAY,aAAa;;MAEjE;AACI,YAAI,EAAE,UAAU;AACZ,wBAAa,KAAK,SAAS,EAAE;AAC7B,8BAAoB,MAAK,IAAI,OAAK,cAAkB;AACpD,sBAAY,QAAO,EAAE,OAAO,EAAE,OAAO;AACrC,iBAAO,OAAK,KAAK;;AAErB,wBAAgB,IAAI,YAAY,EAAE,OAAO;AACzC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,iBAAiB,EAAE,OAAO,UAAU,iBACxC,IAAI,WAAW,EAAE,OAAO,UAAU;AACtC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,UAAU,MAAM,KAAK,WAAW,OAAO,GAAG,SAAS;AAC9F,YAAI;AACA,iBAAO;;AAEX,wBAAgB,IAAI,cAAc,EAAE,OAAO,QAAQ,MAAM;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAU,EAAE,QAAQ,WAAW,UACjC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,iCAAyB,CAAC,CAAC,GAAG;AAC9B,yBAAiB,KAAK,GAAG;AACzB,qBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,2BAAiB,KAAK,CAAC,GAAG;;AAE9B,wBAAgB,EAAE,IAAI;AACtB,oCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,kDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,6BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,yBAAiB,WAAU,QAAQ,QAAQ,sBAAsB;AACjE,eAAO,SAAQ,UAAU;;MAE7B;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,0BAAsC;AACzD,wBAAgB,KAAK,KAAK,SAAS;AACnC,2BAAmB,CAAE,YAAY,QAAQ,WAAW;AACpD,wBAAgB,IAAI,cAAc,YAAY;AAC9C,uBAAe,KAAK,cAAc,SAAS,CAAC,IAAI;AAEhD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,OAAO,QAAQ,YAAY;;MAE3C,wCAAwC;AACpC,wBAAgB,EAAE,MAAM;AACxB,qBAAa,EAAE,MAAM;AACrB,YAAI,gBAAgB;AAChB,sBAAY,aAAa,MAAM;AAC/B,mBAAS,aAAa,MAAM;;AAEhC,2BAAmB,0BAAsC;AACzD,2BAAmB;UACf;UACA;UACA;UACA,SAAS,KAAK,KAAK,SAAS;;AAEhC,wBAAgB,IAAI,iBAAiB,YAAY,YAAY,gBAAgB;AAC7E,uBAAe,CAAC;AAChB,YAAI,gBAAgB;AAChB,iBAAO,KAAK;;AAEhB,uBAAe,KAAK,cAAc,SAAS,QAAQ;AAEnD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,UAAU,GAAG,YAAY;;MAEzC,8CAA8C;AAC1C,wBAAgB,gBAAgB,OAAO,aAAa,QAAQ,EAAE;AAC9D,uBAAe,QAAQ,QAAQ,SAAS;AACxC,2BAAmB,0BAAsC;AACzD,wBAAgB,IAAI,uBAAuB,SAAS,YAAY,YAAY,gBAAgB;AAC5F,uBAAe,gBAAgB,OAAO,CAAC,KAAK,CAAC,GAAG;AAChD,uBAAe,KAAK,cAAc,SAAS,QAAQ;AACnD,YAAI,OAAO,SAAS,EAAE;AAClB,iBAAO,KAAK,gBAAgB,GAAG,YAAY;;AAE/C,eAAO;;MAEX;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,OAAO,aAAa,QAAQ;;MAExD;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC1E,YAAI;AACA,iBAAO;;AAEX,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,QAAQ,aAAa,QAAQ;;MAEzD;AACI,mBAAW;AACX,4BAAoB,oBAAgC,CAAC,OAAO,EAAE;AAC9D,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,GAAG;AACzB,iBAAO,kBAA8B,GAAG,EAAE,MAAM;;AAEpD,yBAAiB,eAAa,gBAAgB,UAAU,OAAO,MAAM;AACrE,uBAAe,eAAmB,CAAC,UAAU,MAAM;AACnD,oBAAY,UAAU,KAAK,IAAI;AAC/B,4BAAoB,WAAc,EAAE;AACpC,qBAAa,KAAK,aAAa,KAAK,sBAAsB,YAAY,aAAa,aAC9E,QAAQ;AACb,YAAI,eAAe;AACf,mBACI,WAAU,QAAQ,wBAAoC;;AAE9D,eAAO;;MAEX;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,eAAa,8BAA8B,QAAQ;AACtE,0BAAkB,CAAE,YAAY,QAAQ,WAAW;AACnD,wBAAgB,IAAI,iBAAiB,WAAW;AAChD,uBAAe,KAAK,cAAc,SAAS,CAAC,GAAG,aAAa;AAE5D,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,qBAAa,MAAM,GAAG,aAAa,KAAK,CAAC,SAAS;AAClD,eAAO,KAAK,aAAa,QAAQ,WAAW,YAAY,OAAO;;MAEnE;AACI,qBAAa,CAAC;AACd,oCAAwC,QAAQ,WAAW,OAAO,GAAG,gBAAgB,WAAW,MAAM,IAAI,MAAM,EAAE;AAClH,YAAI,CAAC,OAAM,QAAQ,wBAAwB,EAAE,QAAQ;AACjD,0CAAgC,2BAAuC,EAAE,OAAO;AAChF,yBAAe,eAAmB;AAClC,sBAAY,EAAE,KAAK,IAAI;AACvB,iBAAO,KAAK,UAAU,KAAK,YAAY,QAAQ;;AAEnD,eAAO,KAAK,gBAAgB,GAAG;;MAEnC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,kDAAkD,EAAE,OAAO,kBACvD;;AAExB,qBAAa,EAAE,MAAM;AACrB,qBAAa;AAGb,qBAAa,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,SAAS,GAAG;AACjD,0BAAgB,IAAI,cAAc,EAAE,OAAO,OAAO;AAClD,8BAAoB,QAAQ,mBAAmB;AAC/C,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC,SAAS,OAAO,OAAO;AAC7D,qBAAW;;AAIf,YAAI;AACA,0BAAgB,IAAI,cAAc,EAAE,OAAO,WAAW;AACtD,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC;AACtC,qBAAW;;AAEf,eAAO;;MAEX;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,SAA2B;;AAEhE,wBAAgB,IAAI,gBAAgB,OAAoB,EAAE,OAAO,EAAE;AACnE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC7E,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,QAA0B;;AAE/D,wBAAgB,IAAI,gBAAgB,MAAmB,EAAE,OAAO,EAAE;AAClE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,SAAsB,EAAE,OAAO,EAAE;AACrE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,iBAAmC;;AAExE,wBAAgB,IAAI,gBAAgB,eAA4B,EAAE,OAAO,EAAE;AAC3E,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,eAAiC;;AAEtE,wBAAgB,IAAI,gBAAgB,aAA0B,EAAE,OAAO,EAAE;AACzE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,cAAc,UAAU,MAAM,EAAE,OAAO,EAAE;AAC7D,eAAO,KAAK,cAAc,SAAS,CAAC,WAAW,GAAG,IAAI,YAAW,EAAE,OAAO,EAAE;;MAEhF;AACI,cAAkB;AAElB,yBAAiB,UAAU;AAC3B,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,sBAAc,EAAE;AAChB,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oBAAW;AACX,4BAAoB;AACpB,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,KAAI,EAAE,OAAO,EAAE;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,qBAAqB,EAAE,OAAO;AAClD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C,oDAAmD;AAC/C,wBAAgB,IAAI,sBAAsB,KAAI,EAAE,OAAO,EAAE,OAAO;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAK/C;AACI,eAAO;UACH,QAAQ,YAAY;UACpB,OAAO,YAAY;UACnB,OAAO,cAAc;;;MAG7B;AACI,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAQ;;AAGnB,YAAI,QAAQ,SAAS,OAAM,IAAI;AAC3B,2BAAiB,KAAK,MAAM,QAAQ,SAAS;AAC7C,2BAAiB,KAAK,KAAK,QAAQ,MAAM,GAAG;AAC5C,4BAAkB,KAAK,KAAK,QAAQ,MAAM;AAC1C,iBAAO,KAAK,KAAK,CAAC,UAAU;;AAEhC,sBAAc,QAAQ,IAAI,OAAK,EAAE,OAAO,OAAO,YAAY,YAAW,IAAI;AAC1E,uBAAe,QAAQ,IAAI,OAAK,EAAE;AAElC,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,kBAAkB,QAAQ,GAAG,OAAO,UACxC,IAAI,YAAY,QAAQ,GAAG,OAAO;AACtC,eAAO,KAAK,cAAc,SAAS,SAAS;;MAEhD;AACI,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,YAAY,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACnE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,MAAe,EAAE;;AAElD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAGxE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,KAAK,IAAI;AACnB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAI,GAAG;;MAElB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,SAA2B,EAAE,OAAO,MAAM,SACpE,IAAI,gBAAgB,OAAoB,EAAE,OAAO,MAAM;AAC3D,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,WAA6B,GAAG,OAAO,EAAE,SACnE,IAAI,gBAAgB,SAAsB,GAAG,OAAO,EAAE;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,kBAAkB,EAAE;;AAGlC,oBAAU,IAAI,YAAY,EAAE;;AAEhC,4BAAoB,QAAQ,mBAAmB,MAAK;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;MAElD;AAEI,YAAI,KAAK,mBAAmB,CAAC,OAAO,EAAE,UAAU;AAC5C,4BAAkB,iBAAiB,KAAK,QAAQ,IAAI,EAAE,QAAQ;AAC9D,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,KAAK,QAAQ,IAAI,EAAE;AACjC,wBAAgB,IAAI,kBAAkB,EAAE;AACxC,uBAAe;UACX,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;UAChE,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;;AAEpE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO,KAAc;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AAGI,uBAAe,EAAE;AACjB,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,gCAAwB,SAAS;AACjC,4BAAoB,OAAO,KAAK,OAAO,KAAK,OAAO;AACnD,iCAAyB,SAAS;AAClC,+BAAuB,SAAS,eAAe;AAC/C,2BAAmB;AACnB,2BAAmB;AAGnB,0CAAmC,iBAAgB,KAAK,qBAAqB,MACzE,kBAAkB;AACtB,uCAA+B,OAAO,KAAK,MAAM,KAAK,CAAC,CAAC,SAAS;AACjE,YAAI,6BAA6B,CAAC,OAAM,QAAQ,0BAC5C,CAAC,OAAM,QAAQ,mCACf,CAAC;AACD,+BAAoB,iBAAiB,OAAO,KAAK,OAAO,KAAK,OAAO,KAChE,OAAO,KAAK,OAAO,KAAK,OAAO;AACnC,6BAAkB,SAAQ,GAAG,CAAC,GAAG,cAAa,SAAS;AACvD,kCAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,yBAAe,KAAK,iBAAiB;YACjC,GAAG;YACH,GAAG;YACH;YACA;YACA;YACA,YAAA;YACA;;AAEJ,iBAAO,SAAQ,QAAQ,SAAS;;AAUpC,4BAAoB,iBAChB,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK,KACrC,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK;AACzC,0BAAkB;UACd,QAAQ,EAAE;UACV,OAAO,CAAC,GAAG,aAAa,SAAS;UACjC,OAAO,EAAE;;AAUb,sCAA8B,SAAS;AACvC,iBAAS,QAAQ,SAAS,MAAM;AAChC,iBAAS,MAAM,SAAS,MAAM,SAAS;AACvC,gBAAY,cAAyB,SAAS,OAAO,UAAU,QAAQ,MAAM,kBAAkB,SAAS,YAAY,UAAU;AAC9H,+BAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,8BAAsB,KAAK,iBAAiB;UACxC,GAAG;UACH,GAAG;UACH;UACA;UACA;UACA,YAAA;UACA;;AAEJ,qCAA6B,KAAK,QAAQ,IAAI,cAAc;AAC5D,gBAAY,qBAAqB,UAAU,MAAM;AAEjD,iBAAS,QAAQ;AAGjB,6BAAqB,QAAQ,SAAS;AACtC,eAAO,WAAS,qBAAqB,cAAc,QAAQ,SAAS,UAAU,cAAc;;MAEhG;AAOI,eAAQ,aAAa,cAAc,YAAY,UAAU,WAAW,cAAe;AACnF,+BAAuB,eAAe;AACtC,0BAAkB,cAAc,eAAe;AAC/C,wBAAgB,YAAY;AAC5B,2BAAmB,CAAC,WAAW;AAC/B,2BAAmB;AACnB,2BAAmB;AACnB,0BAAkB,EAAE,QAAQ,CAAC;AAC7B,sBAAc,OAAO,QAAQ,CAAC,GAAG,WAAW;AAC5C,8BAAsB,IAAI,oBAAoB,YAAY,UAAU,OAAO;AAC3E,uBAAe,KAAK,cAAc,eAAe,CAAC,YAAY,QAAQ;UAClE;UAAG,WAAW;UAAI,WAAW;;AAEjC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,8BAAsB,IAAI,oBAAoB,OAAO,OAAO,MAAM,OAAO,CAAC,GAAG,SAAS,SAAS,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/J,uBAAe,CAAC,QAAQ;AACxB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,wBAAgB,KAAK,cAAc,eAAe;AAClD,YAAI;AACA,iBAAO,QAAQ,QAAQ,CAAC,GAAG,WAAW,UAAU,SAAS;;AAGzD,iBAAO,QAAQ,QAAQ,CAAC,GAAG,SAAS,aAAa,WAAW;;;MAGpE,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE1E,YAAI,OAAM,QAAQ,wBAAwB,OAAM,MAAM,OAAO;AACzD,iBAAO,KAAK,iBAAiB,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE5E,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,SAAS;AACvF,wBAAgB,IAAI,cAAc,UAAU,SAAS,iBAAiB;AACtE,uBAAe,CAAC,QAAO;AACvB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,GAAG,QAAQ;;AAE1C,YAAI,OAAM,QAAQ,wBAAwB,EAAE,MAAM,OAAO;AACrD,iBAAO,KAAK,iBAAiB,GAAG,QAAQ;;AAE5C,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,wCAAgC,OAAM,QAAQ,+BAC1C,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AACnD,gCAAwB,cACpB,6BAA6B,aAAY,2BACzC;AACJ,uBAAe,CAAC,QAAO;AACvB,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB;AACA,YAAI;AACA,oBAAU,IAAI,6BAA6B,UAAU,SAAS,iBAAiB;AAC/E,iBAAO,KAAK,cAAc,SAAS;;AAEvC,kBAAU,IAAI,uBAAuB,UAAU,SAAS,iBAAiB;AACzE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI;AACA,YAAI,OAAM,QAAQ,+BACd,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AAC/C,oBAAU,IAAI,6BAA6B;AAC3C,iBAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;AAE3C,kBAAU,IAAI,uBAAuB;AACrC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,+BAA+B;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,gCAAgC;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,KAAK,MAAM,GAAG,OAAO,MAAM,QAAQ;;AAEhD,eAAO;;MAEX;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,yCAAiC,IAAI,yBAAyB;AAC9D,eAAO,KAAK,cAAc,0BAA0B,CAAC,KAAK,EAAE;;MAEhE;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,6BAAqB;AACrB,0CAAkC,IAAI,cAAc,UAAU,OAAO;AACrE,mCAA2B,KAAK,cAAc,2BAA2B,CAAC;AAC1E,yCAAiC,IAAI,yBAAyB;AAC9D,uBAAe,KAAK,cAAc,0BAA0B,CAAC,IAAI,qBAAqB,EAAE;AACxF,2BAAmB;AACnB,eAAO;;MAEX;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,4BAA4B,EAAE,OAAO,WAAW,UAAU,gBAC9D,IAAI,sBAAsB,EAAE,OAAO,WAAW,UAAU;AAC5D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,8BAA8B,IAAI,GAAG;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,6BAA6B,EAAE,OAAO,WAAW,UAAU;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,oCAAoC,IAAI,GAAG;AAC/D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,aAAa,SAAS,SAAQ;AAC5C,0BAAkB,MAAM,MAAM;AAC9B,4BAAoB,MAAM,MAAM;AAChC,wBAAgB,IAAI,mBAAmB,WAAW,aAAa;AAC/D,4BAAoB,QAAQ,mBAAmB;AAC/C,eAAO,KAAK,cAAc,SAAS,CAAC,QAAQ,SAAS;;MAEzD;AACI,wBAAgB,IAAI,cAAc,QAAQ,MAAM,OAAO,SAAS;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,YAAY,EAAE;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,qBAAqB,OAAM,OAAO,MAAM,OAAO,UAAU,QAAQ;AACrF,eAAO,KAAK,cAAc,SAAS,CAAC,QAAO,OAAO,WAAW;;MAEjE;AACI,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,4BAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAC3C,wBAAgB,IAAI,oBAAoB,aAAa,WAAW;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,CAAC,aAAa,WAAW;AAC9C,+BAAuB,QAAQ,QAAQ,CAAC,YAAY;AACpD,yBAAiB,QAAQ,QAAQ,CAAC,YAAY;AAC9C,YAAI,eAAe;AACf,iBAAO,eAA2B,QAAO,KAAK;;AAElD,6BAAqB,QAAO;AAC5B,wBAAgB,IAAI,eAAe,YAAY,WAAW,eAAe,MAAM,SAAS,MAAM,SAAS;AACvG,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU,gBAAgB;AACnE,eAAO,IAAI,QAAQ;;MAEvB;AACI,eAAQ,WAAW,YAAY,SAAS,cAAe,iBAA6B,cAAc,eAAe;AACjH,+BAAuB;AACvB,wBAAgB,IAAI,eAAe,YAAY,WAAW,cAAc,MAAM,aAAa,MAAM,SAAS,CAAC,YAAY,IAAI;AAC3H,oBAAY,KAAK,cAAc,SAAS,CAAC,cAAc,eAAe;AACtE,eAAO,IAAI,QAAQ;;MAEvB;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,+BAAuB,QAAQ,QAAQ,CAAC,WAAW;AACnD,yBAAiB,EAAE,QAAQ,CAAC,EAAE,OAAO,WAAW;AAChD,wBAAgB,IAAI,gBAAgB,WAAW,SAAS,CAAC,WAAW;AACpE,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU;AACnD,eAAO,IAAI,QAAQ;;MAEvB;AACI,gBAAQ,SAAS,YAAgB;AACjC,YAAI,UAAU;AAEV,yBAAe,mBAAuB,OAAO,eAAmB;AAChE,iBAAO,KAAK;AACZ,iBAAO,WAAS,WAAW,QAAQ,OAAO,OAAO;;AAGjD,0BAAgB,IAAI,YAAY,OAAO;AACvC,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,IAAI,OAAO;;;MAGtD;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,KAAK,KAAK,EAAE,OAAO,GAAG,EAAE;;;MAGvC;AACI,eAAO,KAAK,KAAK,EAAE,OAAO,EAAE,UAAU,WAAW,KAAK,GAAG,EAAE;;MAE/D;AAEI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,aAAK,QAAQ,IAAI,QAAQ,QAAQ;AACjC,eAAO,CAAE,QAAQ,OAAO;;MAE5B;AACI,eAAQ,UAAW,KAAK,eAAe,OAAO,OAAO;AACrD,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,wBAAgB,IAAI,cAAc,OAAM;AACxC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM;;MAExD;AACI,wBAAgB,IAAI,YAAY,OAAM;AACtC,4CAAoC;AACpC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM,OAAO,MAAwB;;MAEvF;AACI,6BAAqB;UACjB,YAAuB,OAAM;UAC7B,GAAG,YAAuB,OAAM;;AAEpC,wBAAgB;UACZ,OAAO,OAAM;UACb,OAAO;UACP,QAAQ,OAAM;;AAElB,+BAAuB;UACnB,YAAuB;UAAa,GAAG,YAAuB;;AAElE,wBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,8CAAsC;AACtC,uBAAe,KAAK,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAC7F,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;MAErE;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,UAAU,OAAO,SAAU;AACnC,0BAAkB,aAAwB;AAC1C;AACA,YAAI;AACA,oBAAU,IAAI,0BAA0B;;AAGxC,oBAAU,IAAI,oBAAoB;;AAEtC,8CAAsC;AACtC,oBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,WAAW,OAAO,UAAW,OAAO,MAAwB;AAChH,eAAO,CAAE,OAAO,OAAO,QAAQ,IAAI;;MAEvC,2FAA2F;AACvF,uBAAe,KAAK,eAAe,QAAQ,aAAa;AACxD,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,YAAI,QAAQ;AACR,kBAAQ,WAAW;;AAEvB,YAAI,QAAQ,qBAAqB,cAAuB;AACpD,6BAAmB,iBAA0B,QAAQ;AAKrD,kBAAQ,WAAW,WAAW,IAAI,OAAK,IAAI;;AAE/C,YAAI,QAAQ,eAAe;AACvB,kBAAQ,QAAQ,QAAQ;;AAE5B,YAAI,eAAmB,OAAO,WAAW;AAGrC,kBAAQ,SACJ,wBAA4B,OAAO,OAAO;AAC9C,iBAAO;;AAEX,8BAAsB;AACtB,2BAAmB,OAAO,IAAI;AAC1B,cAAI,OAAM,UAAU;AAChB,kBAAM,IAAI,MAAM;;AAIpB,wBAAc,KAAK,QAAQ,IAAI,OAAM;AACrC,cAAI,QAAQ,WAAW;AACnB,gBAAI,CAAC,QAAQ,gBACT,eAAmB,OAAM,UACrB,OAAM,UAAU;AAMpB,qBAAO;gBACH,OAAO,OAAM;gBACb,SAAS;gBACT,WAAW;gBACX,eAAe,QAAQ;;;AAK/B,gBAAI,QAAQ;AACR,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ,OAAM;;qBAGrB,CAAC,CAAC,QAAQ,aAAa,CAAC,CAAC,QAAQ;AACtC,qBAAQ,QAAQ,WAAW,KAAK,aAAa,UACzC,KAAK,WAAW;AACpB,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;qBAE5B,QAAQ,YACb,CAAC,cAAyB,QAAQ,OAAO,OAAM;AAO/C,+BAAmB;AACnB,gCAAoB,OAAM;AAC1B,mBAAM,QAAQ,QAAQ;AACtB,qBAAQ,KAAK,cAAc,QAAO;AAClC,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;AACjC,uBAAW,QAAQ;;AAEvB,eAAK,YAAY,OAAM;AACvB,iBAAO,CAAE,OAAO,OAAM,OAAO,SAAS,WAAW;;AAErD,aAAK,YAAY,OAAO;AACxB,2BAAmB,CAAE,OAAO,OAAO,OAAO,SAAS,SAAS,WAAW;AACvE,oBAAY,cAAyB,SAAS,YAAY;AAC1D,uBAAe,KAAK,iBAAiB,KAAK;AACtC,iBAAO,eAA0B,KAAK,OAAO,SAAS,YAAY;;AAEtE,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ,KAAK;;AAEjB,mBAAsB,KAAK,OAAO,QAAQ,YAAY,YAAY;AAClE,sBAAc,QAAQ,UAAQ,KAAK,8BAA8B;AACjE,YAAI;AACA,kBAAQ,KAAK,SAAS;AACtB,eAAK,aAAa,KAAK,CAAE,MAAM,QAAQ,YAAY,MAAM,OAAO,KAAK,aAAa;;AAEtF,YAAI,CAAC,OAAM,QAAQ,0BAA0B,QAAQ,YACjD,kCAAkC;AAClC,2BAAiB,KAAK,aAAa;AACnC,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,eAAO;;MAEX,yFAAyF;AACrF,sBAAc,eAAe,OAAO,GAAG;AACvC,wBAAgB,KAAK,gBAAgB,SAAS,QAAQ,aAAa,aAAa;AAChF,eAAO,WAAS,qBAAqB,QAAQ,QAAQ,QAAQ,OAAO,QAAQ;;MAEhF;AACI,YAAI,CAAE,QAAO,KAAK;AACd,eAAK,YAAY,OAAO;;AAE5B,eAAO,KAAK,YAAY;;MAE5B;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL;;AAIJ,YAAI,CAAC,OAAM,QAAQ;AACf,0BAAgB,OAAO,KAAK,KAAK;AACjC,kBAAQ,QAAQ;AACZ,iBAAK,MAAM,cAAc,KAAK,YAAY,KAAK;AAC/C,mBAAO,KAAK,YAAY;;;AAGhC,aAAK,eAAe;AACpB,YAAI,KAAK,UAAU,QACd,QAAQ,sBAAuB,eAC5B,KAAK,kBAAkB;AAC3B,eAAK,OAAO;;AAGZ,eAAK,SAAS;;AAElB,YAAI,KAAK;AACL,eAAK,MAAM,UAAU;AACrB,eAAK,MAAM;;AAEf,aAAK,WAAW;;MAEpB;AACI,YAAI,KAAK,uBAAuB;AAC5B,eAAK,sBAAsB,KAAK;AAC5B,gBAAI,CAAC,OAAM,IAAI;AAGX,gCAAkB,OAAM,QAAQ;AAChC,qBAAM,IAAI,SAAS;AACnB,0CAA4B,KAAK,IAAI,QAAO,OAAO,WAAW;AAC9D,qBAAM,IAAI,SAAS;AACnB,kBAAI,sBAAsB;AACtB,uBAAO;;;AAGf,mBAAO;;;AAGf,eAAO,KAAK;;MAGhB;AACI,eAAO,KAAK,qBAAqB,KAAK,oBAAkB;;MAE5D;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,OAAO,OAAO,QAAQ,SAAS,OAAO,YAAa;AAC3D,YAAI,WAAW;AAEX;;AAEJ,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ,uBAAe,QAAQ;AACvB,YAAI,YAAY;AACZ,qBAAW,gCAA2C,OAAO;AAC7D,kBAAQ,WAAW;;AAEvB,YAAI,UAAU;AACV,4BAAkB,aAAwB;AAC1C;AACA,sBAAY,SAAS,aAAa,SAAS;AAC3C,8BAAoB,kBAAkB;AACtC,cAAI;AACA,aAAC,OAAO,UAAU,uCAAgD,SAAS,IAAI,SAAS;AACxF,sBAAU,IAAI,0BAA0B,WAAW,CAAC,QAAQ,QAAQ;;AAGpE,sBACI,IAAI,oBAAoB,WAAW,CAAC,QAAQ,QAAQ;;AAE5D,uCAA6B,KAAK,eAAe,CAAC,QAAQ,QAAQ;AAClE,cAAI;AACA,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAGjB,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAErB,eAAK,MAAM,2BAA2B,KAAK,WAAW,qBAAqB,SAAS,OAAO,QAAQ;AAGnG,wCAA8B;AAC9B,sCAA4B,KAAK,gBAAgB,SAAS,CAAC,uBAAuB,OAAO,MAAM;AAE/F,gCAAsB,KAAK,QAAQ,IAAI,oBAAoB;AAC3D,kBAAQ,UAAU,cAAc;AAChC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,QAAQ,cAAc;AAC9B,eAAK,8BAA8B;AACnC,eAAK,QAAQ,OAAO,oBAAoB;AAExC,kBAAQ,SAAS;AACjB,cAAI;AACA,iBAAK,gBAAgB,SAAa;;;AAItC,6BAAmB,KAAK,eAAe,UAAU,OAAO,OAAO;AAC/D,kBAAQ,UAAU;;;MAG1B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,SAAU;AAClB,aAAK,eAAe;AACpB,YAAI,iBAAiB;AACjB,kBAAQ,SAAS,oBAAoB,eAAe;;AAExD,eAAO,QAAQ;;MAEnB;AACI,aAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,YAAI,CAAC,KAAK,qBACN,KAAK,gBAAgB,KAAK,qBAAqB,OAAO;AACtD,qBAAY,MAAK,gBAAgB,OAAO,MAAM,QAAQ;AACtD,eAAK,oBAAoB;AACzB,kBAAQ,KAAK,6BAA6B;;AAG9C,eAAO,KAAK,eAAe,eAAe,UAAU,SAAS;;MAEjE;AACI,eAAO,MAAM,KAAK,MAAM,KAAK,iBAAqB;;MAEtD;AACI,YAAI,KAAK,mBAAmB;AACxB;AACI,mBAAO;;AAGP,gBAAI,OAAM,QAAQ;AACd,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;;AAGf;AACI,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU,WAAW,UAAU;AACpC,uBAAgB,UAAU,UAAW,IAAI,WAAW,EAAE,UAClD,IAAI,WAAW,EAAE;AACrB,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iBAAO,KAAK,KAAK,MAAM,EAAE;;AAE7B,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;ACjgEzC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA6BO;AACH,aAAM,IAAI,4BAA4B;;AC9B1C;;;;;;;;;;;;;;;;AAoBA,QAAI;AACA,uBAAgB,SAAS,MAAM,IAAI,oBAAoB;;AAMpD,kBAAc,CAAE;AC3BvB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AA4BO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,QAAQ,IAAI,YAAY;AAChD,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ,SAAA;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ,SAAA;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,eAAQ,qBAAqB,CAAE,MAAM,gBAAgB,MAAM;AAC3D,aAAO;;AAEJ,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAqBO,oCAAgC;AAChC,qCAAiC;;;;AAIjC,4CAAwC;;;;;;AAUxC;AACH,aAAO,EAAG,QAAQ;AACd,eAAQ,KAAM;AACd,6BAAqB;AACrB,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,aAAa,gBAAgB,SAAS,CAAC,IAAI,EAAE;;;AAarD,iCAA4B,WAAW,iBAAiB,mBAAmB,OAAO,kBAAkB,OAAO,eAAe;AAC7H,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,6BAAqB;AACrB,YAAI,mBAAmB,EAAE,UAAU;AAC/B,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,iCAAqB;YACjB,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YACzD,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YAC3D,IAAI;AACF,mCAAuB;AACvB,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,6BAAgB,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;AAC1D,mBAAO,aAAa,gBAAgB,UAAS,CAAC,SAAS,UAAU,YAAW,MAAM,OAAO,MAAM;;AAEnG,gCAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAA,OAAM,MAAA,QAAQ,SAAS;AACjE,uBAAa,8BAA8B;AAC3C,uBAAa,8BAA8B;AAE3C,iBAAO;;AAEX,uBAAe,SAAS,YAAW,EAAE,OAAO,EAAE;AAC9C,YAAI,aAAa,mBAAmB,CAAC,GAAG,OAAO,iBAAiB;AAC5D,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wCAA8B,cAAc,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AAC1F,sBAAY,aAAa,eAAe,UAAU;AAClD,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;AACjB,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,mCACzC,mBAAmB;AACvB;AACA,YAAI;AACA,oBAAU,IAAI,sBAAsB,iBAAiB,EAAE,OAAO,EAAE,OAAO;;AAGvE,oBAAU,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;;AAExD,eAAO,aAAa,gBAAgB,SAAS,CAAC,GAAG,IAAI;;;ACxG7D;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,kBAAc,2BAA2B;;;AAGzC,yBAAqB;;;MAIjB,kCAAkC;;;AAG/B,oBAAc,mBAAiB,CAAE,WAAW,OAAO,iBAAiB;AACpE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI;;AAEjD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,yBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,qCAA+B,IAAI,yBAAyB;AAC5D,aAAO,SAAQ,gBAAgB,wBAAwB,CAAC,KAAK,EAAE;;AAE5D,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;wBACD;2DACmC;;;;;;AC3C3D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;wBAEA;uBACD;;;;;;yDAMkC;;;;;;;AC9CzD;;;;;;;;;;;;;;;;AAmBO,wBAAkB,EAAG,QAAQ,mBAAS;AACzC,aAAQ,GAAG,aAAM,qBAAU,QAAQ,iBAAU;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,0BAAoB,CAAC,GAAG,OAAM;AAC9B,wBAAkB;AAClB,UAAI,UAAU;AACV,sBAAc,OAAO;AACrB,oBAAY,KAAK;;AAErB,uBAAiB;AACjB,UAAI,UAAS;AACT,qBAAa,OAAM;AACnB,oBAAY,KAAK;;AAErB,sBAAgB,OAAM,QAAQ,8BAC1B,IAAI,uBAAuB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY,mBACzF,IAAI,iBAAiB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY;AACvF,qBAAe,SAAQ,gBAAgB,SAAS,aAAa,YAAY,GAAG;AAC5E,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAkBA,wBAAkB;AACX,uBAAiB,mBAAiB,CAAE,WAAW,aAAW,OAAO;AACjE,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ,SAAA;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBA,mBAAe;AACR;AACH,sBAAgB,IAAI,eAAe,OAAM,OAAO;AAChD,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,SAAQ;AACzD,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;ACrBvE;;;;;;;;;;;;;;;;AAuBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe,SAAA;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAA,UAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,eAAO,IAAI,GAAG;;AAElB,UAAI,UAAU;AACV,gCAAwB,SAAQ,eAAe,IAAI,QAAQ,wBAA4B,QAAQ;AAC/F,6BAAqB,CAAE,GAAG,GAAG,GAAG;AAChC,uBAAe,WAAS,CAAE,QAAQ,cAAc,SAAA;AAChD,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpEhB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,yBAAiB,CAAC,YAAY,QAAQ;AACtC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,wBAAc,QAAQ,IAAI;AAC1B,mBAAS,KAAK,iBAAiB,QAAQ,qBAClB,YAAY;;AAErC,0BAAkB,QAAQ;AAC1B,0BAAkB,QAAQ,QAAQ,SAAS;AAC3C,iBAAS,KAAK,sBAAsB,oBAAoB;AACxD,aAAK,WAAW;;;;;;UAMd,SAAS,KAAK;;;;;AC3CxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,sBAAc,KAAK;AACnB,qBAAa,MAAM;AACnB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,wBAAgB,SAAS;AACzB,6BAAqB,SAAS,MAAM;AACpC,4BAAoB,SAAS;AAC7B,8BAAsB,OAAO,aAAa,QAAQ;;oBAEtC,sBAAsB,aAAa;;AAE/C,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAc,QAAQ,IAAI;AAK1B,6BAAmB;cACjB,aAAa,QAAQ,UAAU,cAAc,QAAQ,IAAI;;kBAErD,KAAK,gBAAgB,UAAU,SAAS;mBACvC,gBAAgB,cAAc,SAAS;;;AAGlD,0BAAkB,QAAQ;AAC1B,sBAAc,QAAQ,QAAQ,SAAS;AACvC,2BAAmB;;gBAEX,aAAa,gBAAgB,UAAU,SAAS;iBAC/C,gBAAgB,cAAc,SAAS;AAChD,aAAK,WAAW;uBACD,SAAS,IAAI,OAAK,SAAS;UACxC;;;;UAIA;sCAC4B;;UAE5B,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;cACnC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;;;;;AAkBhC;AACI,yBAAmB,SAAS,QAAQ;AACpC,kBAAY,SAAS,IAAI;AACrB,YAAI,QAAQ;AACR,iBAAO,GAAG,OAAO;;AAGjB,iBAAO;;;AAGf,aAAO,IAAI;;AChHf;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ,SAAA;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAkBO;AACH,2BAAqB;QAAC,YAAY,OAAM;QACpC,GAAG,YAAY,OAAM;;AACzB,sBAAgB;QACZ,OAAO,OAAM;QACb,OAAO;QACP,QAAQ,OAAM;;AAElB,6BAAuB;QAAC,YAAY;QAChC,GAAG,YAAY;;AACnB,sBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,4CAAsC;AACtC,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAChG,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;AC/BrE;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,2BAAqB;AACrB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,uBAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,UAAI,SAAS,YAAY,CAAC,cAAc,EAAE,OAAO,WAC7C,CAAE,UAAS,YAAY,QAAQ,cAAc,SAAS,OAAO;AAC7D,eAAO,cAAc,GAAG,QAAQ;;AAEpC,mBAAa,OAAO,EAAE;AACtB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzChB;;;;;;;;;;;;;;;;AAuBO;AACH,oBAAc,OAAO,GAAG;AACxB,UAAI,UAAU;AACV,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC7D,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC7D,6BAAqB,WAAW,OAAO,MAAM;AAC7C,6BAAqB,WAAW,OAAO,MAAM;AAC7C,wBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAA;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAO,SAAS,OAAM,UAAU;AAChC,yBAAiB,KAAK,MAAM,OAAO,SAAS;AAC5C,yBAAiB,WAAW,OAAO,MAAM,GAAG,WAAW,MAAM;AAC7D,0BAAkB,WAAW,OAAO,MAAM,WAAW,MAAM;AAC3D,wBAAe,WAAW,CAAC,UAAU,YAAY,MAAM;AACvD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAM,QAAQ,kCACd,OAAO,GAAG,MAAM,SAAS;AACzB,yBAAgB,IAAI,oBAAoB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAClE,eAAO,SAAQ,gBAAgB,UAAS,QAAQ;;AASpD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,wBAAkB,OAAO,IAAI,OAAK,UAAQ;QACtC,QAAQ,CAAE;QACV,OAAO,CAAE,OAAO,CAAC,IAAI,eAAmB,EAAE,MAAM,MAAM;QACtD,SAAA;;AAEJ,sBAAgB,IAAI,cAAc,UAAU,IAAI,OAAK,EAAE;AACvD,qBAAe,SAAQ,gBAAgB,SAAS,WAAW;AAC3D,gBAAU,QAAQ,OAAK,SAAQ,8BAA8B;AAC7D,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,OAAO,CAAE,OAAO,WAAY,SAAA;AACpF,eAAQ,8BAA8B;AACtC,aAAO;;ACrEX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,aAAO,WAAW,SAAS,OAAO;;AAE/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAoBA,gBAAY;;;;;AAOZ,uBAAmB;;;;;;;;;;;;;;;;;;;AAmBZ,kBAAY,mBAAiB,CAAE,WAAW,KAAK,iBAAiB,YAAY,kBAAkB;AAC9F,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AClDhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,yBAAiB,WAAW;AAC5B,aAAK,cAAc;AACnB,0CAAkC,UAAU,SAAS,KAAK,OAAO,UAAU,KAAK;AAChF,kCAA0B,UAAU,GAAG,eAAe;AACtD;AACA,YAAI,cAAc;AACd,qBAAW;mBAEN,cAAc;AACnB,qBAAW;;AAGX,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,aAAK,WAAW;yCACiB;;;UAG/B;;;;kDAIwC;;;;;;8BAMpB;;;;;;;;;yDAS2B;;;;;;;;;;;;;ACxDzD;;;;;;;;;;;;;;;;AAoBO;AACH,oBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,wBAAkB,eAAmB,EAAE;AAEvC,iCAA2B,EAAE,MAAM,EAAE,MAAM,SAAS;AACpD,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ,CAAE,QAAQ,CAAE,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO,CAAC,OAAO;AAC1E,qBAAe,QAAQ;AACvB,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,qBAAe;QACX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;QAEX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;;AAGf,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY,SAAA;AAC5E,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,oCAA8B,UAAQ,CAAE,QAAQ,CAAE,GAAG,gBAAiB,SAAA,UAAS,OAAO,CAAE,OAAO,EAAE;AACjG,eAAQ,8BAA8B;AACtC,aAAO;;ACjDX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,OAAqB;;AAExC,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,2BAAmB,WAAW;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;;uCAEc;;;;;;;;;;AC7BvC;;;;;;;;;;;;;;;;AAkBO,kCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,iBAAU;AAClB,6BAAqB;AACrB,wBAAgB,IAAI,qBAAqB,OAAM;AAC/C,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;uDAM+B,YAAY;;wBAE3C,KAAK;;;;;;;;;;;;;;;;;AC/B7B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;8BAeM,YAAY;4BACd,KAAK;;;;;;;;;;;;;;;;UAgBvB,KAAK;;;;;ACzDf;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACA;AACI,aAAQ,QAAQ,mBAAS,SAAU;AACnC,WAAM,UAAW;AACjB,aAAQ,eAAgB;AACxB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B,uBAAiB,CAAC,QAAQ;AAC1B,uBAAiB,CAAC,QAAQ,OAAO;AACjC,UAAI,WAAW;AACX,YAAI,yBAAuB;AACvB,kCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,8BAAoB,OAAO,QAAQ;AACnC,8BAAoB,OAAO,SAAS;AACpC,8BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,iBAAS,sBAAoB;;AAEjC,8BAAwB,SAAQ,eAAe,UAAU;AAEzD,eAAQ,QAAQ,IAAI,gBAAgB,QAAQ,QAAQ,aAAa;AACjE,eAAQ,MAAM,yBAAyB,SAAQ,WAAW,gBAAgB,SAAS;AACnF,sBAAgB,OAAM,QAAQ,gBAC1B,IAAI,wBAAwB,YAC5B,IAAI,kBAAkB;AAC1B,kBAAY,SAAQ,gBAAgB,SAAS,CAAC,kBAAkB;AAChE,eAAQ,YAAY,gBAAgB;AACpC,aAAO;;AC7DX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,MAAoB;;AAEvC,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;AACpB,YAAI,WAAW;AACX,8BAAoB,IAAI;AACxB,0BAAgB,4BAA4B,OAAW,eAAe,YAAY,YAAY,KAC1F;;AAER,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;;;;UAId;;;;;;;;kCAQwB;;;;8BAIJ;;;;;;;;;YASlB;;;iCAGqB;cACnB,4BAA4B;;;YAG9B;qBACS,4BAA4B;;;;;YAKrC;qBACS,4BAA4B;;;;;;YAMrC;;;;;;;ACnFZ;;;;;;;;;;;;;;;;AAqBA;AACI,qBAAe;AACf,aAAO,OAAO,WAAW,KAAK,OAAO,OAAO,SAAS,GAAG,YAAY;AAChE,wBAAgB,OAAO,SAAS,OAAO,OAAO,SAAS,GAAG,UAAU,QAAQ;AAC5E,2BAAmB,0BAAsC;AACzD,eAAO,KAAK;UACR,QAAQ;UACR;UACA,SAAS,KAAK,KAAK,UAAU;;;AAGrC,aAAO;;AAEJ;AACH,8BAAwB,mBAAmB,EAAE;AAC7C,mBAAa;AACb,mBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,eAAQ,QAAQ,YAAY,WAAY,gBAAgB;AACxD;AACA;AACA,YAAI,kBAAkB;AAClB,oBAAU,MAAM,IACZ,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW,UACxE,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI;;AAGjE,oBAAU,IAAI,cAAc,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW;;AAExF,yBAAiB;AACjB,iBAAS,SAAQ,gBAAgB,SAAS,CAAC,SAAS;AACpD,YAAI,eAAe,WAAW,EAAE;AAC5B,mBAAQ,8BAA8B;;;AAG9C,aAAO;;ACvDX;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW,SAAA;AACtF,sBAAgB,OAAO,eAAe,EAAE,OAAO,OAAO;AACtD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY,SAAA;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,yBAAiB,kBAAkB;AACnC,aAAK,WAAW;;QAEhB;uBACe;;;;;AAKvB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,sBAAsB;;AAEtC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW,WAAW;AAC9E,6BAAuB,IAAI,MAAM;AACjC,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAe,OAAO,MAAM,cAAc;;AAE9C,aAAO,eAAe;;AC9C1B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,YAAI,KAAK,OAAO;AACZ,gBAAM,MAAM,6BAA6B,KAAK;;AAElD,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,eAAe,MAAM,KAAK;AAC9C,8BAAsB,IAAI,MAAM,KAAK;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,wBAAc,OAAO,MAAM,YAAY;;AAE3C,0BAAkB,QAAQ,cAAc,MAAM,IAAI;AAClD,2BAAmB,KAAK,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;AAChF,qBAAa,mBAAmB,cAAc,YAAY;AAC1D,aAAK,WAAW;;QAEhB;;oBAEY;WACT;sBACW;;UAEZ,YAAY,KAAK,OAAO;aACrB,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;sBAC/C;aACT;wBACW;;;;;;;;ACrDxB;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,QACpC,IAAI,iBAAiB,EAAE,OAAO;AAClC,aAAO,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;;ACxBnD;;;;;;;;;;;;;;;;AAqBO,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,qCAA6B,gBAAgB;AAC7C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,uBAAe;AACf,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,mCAAuB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AAChF,uBAAW,aAAa,eAAe,UAAU,EAAE;AACnD,iCAAqB,aAAa,QAAQ,IAAI,SAAS;AACvD,yBAAa,SAAS;;AAGtB,uBAAW,gBAAc,GAAG,cAAc;;AAE9C,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,SAAS,OAAO;AAC1F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,aAAa;;AAE9D;AACA,YAAI;AACA,2BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,yBAAe,SAAS;AACxB,4BAAkB,WAAW,QAAQ,eAAmB,cAAc,UAAU,EAAE;AAClF,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,UAAQ,UAAU,aAAa,UAAU;;AAEnD,YAAI;AACA,uBAAa,8BAA8B;;AAE/C,eAAO;;;AC3Ef;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI,EAAE;;AAEnD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,yBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,2BAAqB;AACrB,sCAAgC,IAAI,cAAc,UAAU,OAAO;AACnE,gCAAyB,SAAQ,gBAAgB,yBAAyB,CAAC,IAAI,EAAE;AACjF,qCAA+B,IAAI,yBAAyB;AAC5D,qBAAe,SAAQ,gBAAgB,wBAAwB,CAAC,IAAI,oBAAmB,EAAE;AACzF,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,IAAI,cAAc,UAAU,OAAO;AACjD,yBAAmB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AACzD,gBAAU,IAAI,cAAc,UAAU,OAAO,MAAM,MAAM;AACzD,0BAAoB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AAC1D,aAAO,CAAC,YAAY;;ACtBxB;;;;;;;;;;;;;;;;AAmBO,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,6BAAqB;AACrB,gBAAY,EAAE,MAAM,WAAW,GAAG,MAAM,uDAAuD,EAAE,MAAM;AACvG,0BAAkB,CAAC,GAAG;AACtB,gBAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW;AACzF,kCAA0B,wBAAsB,GAAG,qBAAqB,UAAU;AAClF,eAAO,CAAC,QAAQ;;;AChCxB;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW,SAAA;AACtF,sBAAgB,OAAO,eAAe,WAAW,QAAQ;AACzD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY,SAAA;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;AAmBO,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,UAAU,QAAS;AAC3B,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,sCAA8B,gBAAgB;AAC9C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,8BAAsB;AACtB,wBAAgB;AAChB,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,UAAU;AACpD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,oCAAwB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AACjF,wBAAY,aAAa,eAAe,UAAU,EAAE;AACpD,kCAAsB,aAAa,QAAQ,IAAI,UAAU;AACzD,0BAAc,SAAS;;AAGvB,wBAAY,gBAAc,GAAG,cAAc;;AAE/C,wBAAc,KAAK;AACnB,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,4CAAoC,2BAAuC,UAAU,OAAO;AAC5F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,cAAc;;AAE/D,oBAAY,SAAS,WAAW,aAAa,UAAU;AACvD,wBAAgB;AACZ,uBAAa,8BAA8B;;AAE/C,eAAO;;;AChEf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,uBAAe,SAAS,YAAY,IAAI;AACxC,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;wCAKoB;;4CAEI;;;;;AAKhC;;AAEJ,aAAK,WAAW;QAChB,iBAAiB,SAAS;QAC1B,eAAe,SAAS;;;UAGtB;8BACoB;;iDAEmB;;qDAEI;;;UAG3C;yBACe;;;;;AC1DzB;;;;;;;;;;;;;;;;;MA0DI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,uBAAe,SAAS,YAAY,IAAI;AACxC,uBAAe;AACf,YAAI,SAAS;AACT,2BAAiB;UACnB;;0CAEgC;;8CAEI;;;;AAIlC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;;AAK/C,2BAAiB;UACnB;UACA,cAAc;UACd,eAAe;UACf;;6CAEmC;kDACK;;;AAGtC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;UAGjD,QAAO,OAAO;aACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;YAC/C;wCAC4B,OAAO,YAAY;YAC/C,QAAO,OAAO;eACX;cACD;0CAC4B,OAAO,YAAY;;;;;AAKrD,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;ACtIV;;;;;;;;;;;;;;;;AAmBO,gCAA4B,EAAG,QAAQ,mBAAS;AACnD,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,UAAU,QAC9C,IAAI,iBAAiB,EAAE,OAAO,UAAU;AAC5C,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;AACvD,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,MAAM;MACN,MAAM;;;MAGN;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS,SAAS;AACjD,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;;UAGd;;;;;;;;;;;;;AChCV;;;;;;;;;;;;;;;;AAuBA,gBAAY;AACL;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,KAAM;AACjB,oBAAc,YAAwB,EAAE,OAAO,EAAE;AACjD,UAAI,EAAE,UAAU;AACZ,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,wBAAe;UACX;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;;AAGjB,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,8BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY,SAAA;AAC5E,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AAEtC,eAAO;;AAEX,UAAI,SAAQ,mBAAmB,CAAC,GAAG;AAC/B,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sCAA8B,gBAAY,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AACxF,oBAAY,SAAQ,eAAe,UAAU;AAC7C,wBAAgB,SAAQ,QAAQ,IAAI,IAAI;AACxC,gBAAQ,SAAS;AACjB,eAAO;;AAEX;AACA,UAAI,OAAM,QAAQ;AACd,kBAAU,IAAI,sBAAsB,KAAK,EAAE,OAAO,EAAE;;AAGpD,kBAAU,IAAI,gBAAgB,KAAK,EAAE,OAAO,EAAE;;AAElD,aAAO,SAAQ,gBAAgB,SAAS,CAAC,GAAG,IAAI;;AAE7C,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACpFhB;;;;;;;;;;;;;;;;AAiBO,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,kBAAmB;AACxD,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,eAAO,wBAAqC,WAAW,YAAY,kBAAkB,iBAAiB;;;AC/B9G;;;;;;;;;;;;;;;;AAiBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC9BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,4BAAoB,WAAW;AAC/B,2BAAmB,WAAW;AAC9B,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,aAAK,cAAc;AACnB,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,8BAAsB,QAAQ,QAAQ;AACtC,8BAAsB,QAAQ,QAAQ;AACtC,0BAAkB;AAClB,YAAI,OAAO,cAAc;AACrB,wBAAc,uBAAuB,UAAU,QAAQ;;AAGvD,wBAAc;2BACC,UAAU,KAAK;;;AAGlC,aAAK,WAAW;;;;;4CAKoB,oBAAoB,2BAA2B,oBAAoB;4CACnE,oBAAoB,2BAA2B,oBAAoB;iDAC9D;iDACA;YACrC;uCAC2B,yCAAyC;;;;;;;;AChDhF;;;;;;;;;;;;;;;;AAkBO,qCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,6BAAqB;AACrB,wBAAgB,IAAI,cAAc,OAAM,OAAO,SAAS,WAAW;AACnE,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC3Bf;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAkBA,mBAAe;AACR,qBAAe,kBAAgB;AAC/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBA,iCAA2B;AACpB,gCAA0B,mBAAiB,CAAE,WAAW,sBAAoB,iBAAiB;AAC7F,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,gBAAY;AACL,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBO,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,QAAS;AACjB,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,IAAI,MAAM;AAC3B,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,mBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B;AACA,YAAI,aAAa,mBAAmB,CAAC;AACjC,2BAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,yBAAe,SAAS;AACxB,4BAAkB,iBAAa,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC/D,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,gBAAc,GAAG,MAAM;;AAEjC,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,yBAAiB,GAAG;AAEpB,cAAQ,KAAK,aAAa;AAC1B,qBAAe,SAAQ,SAAS,EAAE;AAClC,aAAQ,cAAc,aAAa,WAAY,cAAc,QAAQ,MAAM,EAAE,OAAO,EAAE;AACtF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAuDA,4BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;AC/FnB;;;;;;;;;;;;;;;;ACAA;AAGA,sBAAgB;ACHhB;;;;;;;;;;;;;;;;sBAsCuB;MACrB,aAAa;MACb,oBAAoB;MACpB,sBAAsB;MACtB,aAAa;MACb,eAAe;MACf,kBAAkB;MAClB,MAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7CV,mBAAA,WAAA;;ACAA,6BAAA,WAAA;;ACAA,yBAAA,WAAA;;ACAA,8CAAA,WAAA;AACA,sCAAqC;AACnC,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,uCAAgC,kCAAiC;AAEnE;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAM;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAQ;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAQ,mBAAW,OAAO,mCAAgC,cAAY,iCAA8B;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;;;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,mCAA2B,OAAO,6BAA2B;AAAM,UAAG;AAAwB,kBAAO,OAAO;AAAU,uBAAa,OAAO;AAAgB,yBAAe,OAAO;;AAAkB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;;AAAiB,eAAO,kBAAgB;;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;;AAAS,4BAAgB,YAAU;;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;;AAAK,kBAAO,IAAI;AAAQ,iBAAO;;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;;;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;;AAAS,eAAO,aAAW;AAAW,iBAAM;;AAA8B;AAAsB;AAAI,8BAAkB;;AAAmC,kBAAQ,MAAM;AAA2G,gBAAM;;AAAE,iBAAO,kBAAkB;iBAAe;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;;;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;mBAAmB,OAAO,aAAW;AAAa,uBAAW;;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;;;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;;iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;;AAAI,YAAG;AAAY,4BAAgB;;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;;AAAQ,4BAAgB;;AAAG,YAAG;AAAqB,kBAAM;AAAqC,gBAAG,CAAC;AAAO,uBAAO;AAAc,gBAAG,CAAC;AAAS,yBAAS;AAAgB,uBAAS,SAAS,aAAa;AAAU,mBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;;AAAS,uBAAW;AAA8B,sBAAQ,MAAM,UAAS;AAAM,gBAAG,CAAC,IAAI;AAAQ,oBAAI,IAAI,WAAW;;AAAK,oBAAO,IAAI;AAAQ,mBAAO;;;AAAU,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;;;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;;AAAO;;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;;;AAAO,yBAAe;AAAgB,mBAAS,QAAM;;;;AAAa,UAAG;AAAqB,YAAG,OAAO,gBAAc;AAAa,wBAAY,qBAAsB;;;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;;;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ,yBAAiB,QAAQ;AAAK,0BAAkB,QAAQ;AAAM,oCAA4B,QAAQ;AAAgB;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY;AAAe,6BAAqB;AAAE,yBAAiB;AAAE,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;;;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,SAAM,QAAM,SAAM,UAAW,SAAM;AAAG,sBAAS,MAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,MAAI,MAAI;;AAAK,iBAAO;WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;;AAAS,oBAAM,KAAG,KAAK;;;;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;;;AAAO;AAAoD,qBAAW,MAAI;AAAe,mBAAQ;AAAG,eAAM,CAAE,QAAK;AAAS,mBAAO,KAAK;AAAO,cAAG,CAAC;AAAG,mBAAO;AAAI,cAAG,CAAE,MAAG;AAAM,oBAAK,OAAO,aAAa;AAAI;;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,oBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,iBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;;AAAQ,iBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;;AAAG,cAAG,KAAG;AAAO,oBAAK,OAAO,aAAa;;AAAS,qBAAO,KAAG;AAAM,oBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;;;AAAO,eAAO;;AAAI;AAA0C,eAAO,MAAI,kBAAkB,oBAAmB,KAAI,kBAAgB;;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,KAAI,QAAO,EAAE;AAAG,kBAAM,KAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,KAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;;AAAS;AAAkD,eAAO,kBAAkB,MAAI,oBAAmB,QAAO;;AAAiB;AAA8B,kBAAQ;AAAE,qBAAU,GAAE,IAAE,KAAI,QAAO,EAAE;AAAG,kBAAM,KAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAM,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAI,WAAW,EAAE,KAAG;AAAK,cAAG,KAAG;AAAI,cAAE;mBAAY,KAAG;AAAK,mBAAK;mBAAU,KAAG;AAAM,mBAAK;;AAAO,mBAAK;;AAAE,eAAO;;AAAI;AAA0C,2BAAmB,IAAI,OAAM;;AAAQ,2BAAmB;AAAM;AAA6B,YAAG,IAAE,WAAS;AAAG,eAAG,WAAS,IAAE;;AAAS,eAAO;;AAAE;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;;AAAK,uBAAe,oBAAiB,wBAAqB,sBAAmB,0BAAuB;AAAM,UAAG;;AAAyB,mCAA2B,OAAO,qBAAmB;AAAS,UAAG;AAAwB,qBAAW,OAAO;AAAc,kBAAO,OAAO;;AAAe,YAAG,OAAO;AAAe,uBAAW,OAAO;;AAAmB,uBAAW,IAAI,YAAY,OAAO,CAAC,SAAU,yBAAuB,gBAAe,SAAU,aAAW,gBAAe,QAAS;AAAO,cAAG,CAAE,YAAW,kBAAkB;AAAoB,gBAAI;AAA+N,gBAAG;AAAqB,sBAAQ,IAAI;;AAAqH,kBAAM,MAAM;;;;AAAgB,UAAG;AAAY,kBAAO,WAAW;;AAAO,+BAAuB,QAAO;AAAW,iCAA2B;AAAQ,UAAG,CAAC;AAAwB,4BAAoB,kBAAgB,KAAG;;AAAa;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;;AAAW,qBAAO,cAAc,MAAK,SAAS;;;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;;;;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,UAAG;AAAuB,6BAAmB;AAAK;AAAkB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;;;AAAU,6BAAqB;;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;;AAAY;AAAmB,YAAG;AAAuB;AAAO,6BAAqB;;AAAY;AAAmB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;;;AAAU,6BAAqB;;AAAe;AAAyB,qBAAa,QAAQ;;AAAI;AAA0B,sBAAc,QAAQ;;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B,gBAAO,CAAC,wBAAuB;AAAuD;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;;;;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;;AAAM,YAAG;AAAuB,kBAAQ,MAAM,yBAAwB,IAAI,QAAO;AAAO,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,KAAI,WAAW,UAAQ,KAAI,QAAQ,YAAU;;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;;AAAe,2BAAmB;AAAuC,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;;AAAY,cAAG;AAAY,mBAAO,WAAW;;AAAqB,kBAAK;;;AAA8D,gBAAM;;;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;;AAAI,mBAAO,SAAS;aAAmB,MAAM;AAAW,mBAAO;;;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;;;AAAe;AAAsB,mBAAS,CAAC,GAAI;AAAe;AAA0C,yBAAY,UAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW;AAAO,cAAG,CAAC;AAAwB,mCAAqB,QAAQ,cAAc;AAAO,oBAAQ,cAAc,QAAQ;AAAY,sBAAQ,uBAAuB,GAAE;AAAW,oBAAG,CAAC,EAAE;AAAiB,sCAAoB;;;;;AAAyB,YAAG,CAAC;AAAwB,2BAAiB;;AAAoB;AAA2C,0BAAgB,OAAO,aAAY,OAAO;;AAAW;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;;;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;;;;AAAoC,mBAAO,uBAAuB;;;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;;;AAAO;AAAmB,eAAM;;AAAG,uBAAe;AAAG;AAA0B,gBAAQ;;AAAc,UAAG,CAAC;AAAuB,mBAAW,KAAK,CAAC,MAAK;AAAW;;AAAwB,0BAAkB;AAAE,6CAAqC;AAAE,6CAAqC;AAAE;AAAoF,qBAAW,aAAW;AAAE,8BAAoB,sBAAoB;AAAE,8BAAoB,sBAAoB;AAAE,wBAAc;AAAW,2CAAiC;AAAoB,2CAAiC;;AAAoB,aAAO,4BAA0B;AAAuB,wBAAgB,CAAC,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,OAAM,IAAG,KAAI,IAAG,OAAM,IAAG,OAAM,GAAE,SAAQ,IAAG,OAAM,GAAE,QAAO,IAAG,QAAO,GAAE,aAAY,GAAE,QAAO,IAAG,QAAO,GAAE,QAAO,IAAG,SAAQ,KAAI,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,SAAQ,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,SAAQ,IAAG,OAAM,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,MAAK,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,KAAI,UAAS,KAAI,QAAO,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,IAAG,QAAO,IAAG,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,WAAU,IAAG,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,IAAG,MAAK,KAAI,QAAO,KAAI,OAAM,KAAI,QAAO,IAAG,WAAU,IAAG,SAAQ,KAAI,SAAQ,GAAE,UAAS,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,UAAS,KAAI,QAAO,IAAG,WAAU,IAAG,cAAa,IAAG,OAAM,IAAG,YAAW,KAAI,cAAa,KAAI,YAAW,IAAG,SAAQ,IAAG,cAAa,GAAE,YAAW,IAAG,UAAS,IAAG,aAAY,IAAG,WAAU,KAAI,cAAa,IAAG,YAAW,GAAE,cAAa,IAAG,aAAY,IAAG,UAAS,IAAG,WAAU,IAAG,WAAU,KAAI,cAAa,IAAG,aAAY,IAAG,UAAS,GAAE,cAAa,IAAG,UAAS,IAAG,iBAAgB,IAAG,iBAAgB,KAAI,eAAc,GAAE,WAAU,IAAG,SAAQ,IAAG,UAAS,IAAG,cAAa,KAAI,QAAO,KAAI,QAAO,IAAG,QAAO,IAAG,SAAQ,KAAI,WAAU,KAAI,QAAO,IAAG,WAAU,IAAG,WAAU,IAAG,iBAAgB,IAAG,YAAW,IAAG,UAAS;AAAK,6CAAqC;AAAM;AAA4C,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK,QAAM,QAAM;AAAE,iBAAM;AAAI,YAAG,SAAO;AAAE,iBAAO;AAAE,YAAG,SAAO;AAAW,kBAAM;AAAS,oCAA0B,QAAQ,KAAK,qBAAoB,oCAAkC;AAAG,8BAAoB;AAAE,YAAG,yBAAuB;AAAM,2BAAe,QAAQ,gBAAgB,qBAAoB,oCAAkC,GAAE,uBAAsB;AAAG,cAAG,cAAY;AAAuB,cAAE;AAAM,8BAAgB;AAAE,gBAAG,SAAO;AAAE,qBAAO;;;AAAG,kBAAQ,QAAQ,OAAO,qBAAoB,QAAM,GAAE;AAAO,YAAG,OAAK;AAAE,iBAAO,MAAI;AAAgB,cAAK,iDAA+C;;AAAI,aAAO,4BAA0B;AAAuB;AAAoC,YAAG;AAAuB,gBAAK;AAAuF,YAAG,CAAC;AAAY,gBAAK;AAAoD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO;AAAY,gBAAQ,eAAe;AAAS,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,QAAQ,SAAQ;AAAG,gBAAQ,OAAO,UAAQ;;AAAU;AAAsC,YAAG;AAAuB,gBAAK;AAAyF,YAAG,CAAC;AAAY,gBAAK;AAAsD,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO,YAAY,CAAC,KAAM;;AAAW;AAAuC,YAAG;AAAuB,gBAAK;AAA0F,YAAG,CAAC;AAAY,gBAAK;AAAuD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,YAAG;AAAS,wBAAW,QAAQ;AAAO,kBAAQ,mBAAmB;;;AAAS,oBAAY,CAAC,gBAAe,GAAE,gBAAe,CAAC,aAAY,GAAE,WAAU,IAAG,eAAc,IAAG,gBAAe,IAAG,aAAY;AAAW,+BAAuB,QAAQ,iBAAgB,CAAC,uBAAsB;AAAG,oDAA4C,QAAQ;SAAkB,qBAAoB;AAAW,8BAAoB;AAAE,qBAAU,GAAE,IAAE,iBAAgB,EAAE;AAAG,kBAAQ;;AAAuB,gBAAQ,kBAAgB;AAAM,qBAAU,GAAE,IAAE,MAAI,GAAE,EAAE;AAAE,8BAAoB,QAAQ,kBAAgB,IAAE,KAAG;AAAE,4BAAoB,QAAQ,kBAAgB,MAAI,KAAG,QAAQ;AAAgB,sBAAY,QAAQ,kBAAgB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,wBAAc;AAAM,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAE,8BAAoB,YAAU,IAAE,KAAG;AAAE,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,OAAK,GAAE;AAAW,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE,QAAQ;AAAiB,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE;SAAK,YAAW;SAAa,UAAS,IAAG,cAAa,MAAK,iBAAgB;SAAa,iBAAgB;AAAW,YAAG,QAAQ,iBAAe;AAAM,iBAAM,QAAQ,aAAa,SAAO;AAAG,oBAAQ,aAAa;;AAAQ,kBAAQ,eAAa;;AAAK,YAAG,0BAAwB;AAAiB;SAA4B,YAAW;AAAmB,iBAAO;AAAgB,YAAG;AAAI,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAU,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ;AAAkB,iCAAuB,KAAG,GAAE;AAAY,iCAAuB,GAAE,GAAE;AAAG,6BAAiB;AAAE,cAAG;AAAwB,wBAAY,CAAC,KAAM;;;SAAY,cAAa;AAAW,gBAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAI,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAG,+BAAuB,mBAAiB,GAAE;AAAY,2BAAiB,eAAa;AAAE,+BAAuB,GAAE,GAAE;AAAG,oBAAY,CAAC,KAAM;SAAgB,qBAAoB;AAAW,sBAAa,QAAQ;AAAU,wBAAY,QAAQ,SAAS;AAAG,cAAG,WAAS,QAAQ;AAAQ,oBAAQ,mBAAmB,QAAQ;;;AAAS,gBAAQ,WAAS;AAAG,qBAAU,GAAE,IAAE,QAAQ,cAAc,QAAO,EAAE;AAAG,wBAAW,QAAQ,cAAc;AAAG,kBAAO;;AAAY,gBAAQ,gBAAc;AAAG,qBAAU,GAAE,IAAE,QAAQ,eAAe,QAAO,EAAE;AAAG,wBAAW,QAAQ,eAAe;AAAG,wBAAY,QAAO;AAAQ,kBAAQ,eAAe;AAAS,kBAAO;;AAAY,gBAAQ,iBAAe;SAAI,gBAAe;AAAkB,YAAG,CAAC;AAAQ;AAAO,YAAG,QAAQ;AAAkB,0BAAc,oBAAoB,QAAQ,mBAAiB,OAAK;AAAG,8BAAoB,QAAQ,mBAAiB,OAAK,KAAG;AAAE,gBAAM;AAAW,gBAAM,QAAQ;;AAAkB,gBAAQ,mBAAiB;AAAE,YAAG,QAAQ,qBAAmB,QAAQ;AAAU,gBAAM,QAAQ;AAAW,gBAAQ,YAAU;AAAE,YAAG,QAAQ;AAAO,kBAAQ,OAAO,UAAQ;SAAM,oBAAmB;AAAiB,eAAO,QAAQ,SAAS,QAAO,QAAQ;AAAQ,gBAAQ,cAAc,KAAK;AAAQ,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,UAAQ;AAAG,gBAAQ,eAAe,QAAO;AAAS,gBAAO,UAAQ;SAAW,uBAAsB;SAAiB,wBAAuB;AAAmC,gBAAO,YAAU;AAAY,kBAAM,EAAE;AAAQ,oBAAQ,EAAE;AAAO,cAAG,QAAO;AAAQ,oBAAQ,sCAAoC,QAAO,QAAQ;AAAiB,cAAG,EAAE,mBAAiB,EAAE,mBAAiB;AAAiB,yBAAW,QAAQ,SAAS,EAAE;AAAc,gBAAG;AAAQ,qBAAO,OAAO,YAAY,EAAE,MAAK,EAAE;;AAAsB,sBAAQ,MAAM,4CAA0C,MAAI,yBAAuB,EAAE,kBAAgB;;AAAuC,oBAAQ,sCAAoC;AAAU;;AAAO,cAAG,QAAM;AAA+B;qBAAuD,QAAM;AAAe,2BAAe,EAAE;qBAAc,QAAM;AAAiB,6BAAiB,EAAE;qBAAmB,QAAM;AAAc,0BAAc,EAAE;qBAAmB,QAAM;AAAgB,4BAAgB,EAAE;qBAAmB,QAAM;AAAU,oBAAO,SAAO;AAAK,gBAAG;AAAkB,gCAAkB;AAAQ,gBAAG,QAAO;AAAY,sBAAO;AAAa,qBAAO,QAAO;;qBAAoB,QAAM;AAAS,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;qBAAiB,QAAM;AAAY,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;qBAAiB,QAAM;AAAS,kBAAM,YAAU,EAAE,cAAY,OAAK,EAAE;qBAAiB,QAAM;AAAQ,2BAAa,QAAO,WAAS,QAAQ,KAAK,qBAAoB,QAAO,QAAQ,SAAO,MAAI;AAAG,gBAAG;AAAU,sBAAQ,mBAAmB;;qBAAiB,QAAM;AAAc,oBAAQ,mBAAmB;qBAAgB,QAAM;AAAkB,oBAAQ,sBAAsB,EAAE;qBAAc,EAAE,KAAK,WAAS;AAAgB,oBAAO,YAAY,EAAE;;AAAW,gBAAI,oCAAkC;;AAAK,kBAAQ,sCAAoC;;AAAW,gBAAO,UAAQ;AAAY,cAAI,4BAA0B,EAAE,WAAS,MAAI,EAAE,SAAO,OAAK,EAAE;;AAAU,YAAG;AAAqB,kBAAO,GAAG,WAAU;AAAe,oBAAO,UAAU,CAAC,MAAK;;AAAS,kBAAO,GAAG,SAAQ;AAAe,oBAAO,QAAQ;;AAAQ,kBAAO,GAAG,QAAO;AAAe,oBAAQ,IAAI;;;AAAoD,gBAAO,YAAY,CAAC,KAAM,QAAO,WAAY,OAAO,0BAAwB,YAAW,YAAwB,YAAwB,cAA4B;SAAmC,sBAAqB;AAAW,4BAAkB,WAAW;AAA6C,gBAAQ,cAAc,KAAK,IAAI,OAAO;SAAiB,cAAa;AAAW,YAAG,QAAQ,cAAc,UAAQ;AAAG,kBAAQ;AAAuB,kBAAQ,uBAAuB,QAAQ,cAAc;;AAAI,YAAG,QAAQ,cAAc,SAAO;AAAE,iBAAO,QAAQ,cAAc;;AAAW,iBAAO;SAAM,cAAa;AAAgB,gBAAM,YAAY,QAAM;AAAM,eAAM,YAAY,QAAM;;;AAAO;AAAgD,qBAAW,WAAS;AAAS,oBAAU;AAAS,qBAAa;;AAAU,aAAO,yBAAuB;AAAoB;AAA4B,eAAO;;AAAc,aAAO,sBAAoB;AAAiB;AAAsD,cAAM,uBAAqB,aAAa,aAAW,WAAS,CAAC,WAAS,aAAa,YAAU,oBAAmB,MAAK,OAAK,aAAa,QAAM;;AAAqB;AAAiC,yBAAe,MAAM,MAAK;;AAAM;AAAwB,UAAG;AAAqB,8BAAoB;AAAW,kBAAM,QAAQ;AAAY,iBAAO,EAAE,KAAG,MAAI,EAAE,KAAG;;iBAAa;AAAwB,8BAAoB;AAAW,iBAAO,YAAY,QAAM,OAAO;;iBAA0C,OAAO,YAAU;AAAa,8BAAoB;;AAAa,8BAAoB;AAAW,iBAAO,YAAY;;AAAO;AAAyB,4BAAoB,uBAAqB,KAAG;AAAM,eAAO;;AAAM;AAA2B,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,MAAK;AAAK,mBAAW,QAAQ,CAAC,MAAU;;AAAU;AAAuE,YAAG,kBAAgB;AAAc,sBAAY,CAAC,KAAM;mBAAwC;AAAwB,sBAAY,CAAC,cAAe,gBAAe,KAAM;;AAA4B,wBAAY,QAAQ,SAAS;AAAgB,wBAAW,WAAS,QAAQ;AAAO,cAAG,CAAC;AAAQ;;AAAO,kBAAO,YAAY,CAAC,KAAM;;AAAuB,eAAO;;AAAE;AAAkB;;AAAQ;AAAqF,yBAAe,iBAAe;AAAE,oBAAU,YAAU;;AAAE;AAAkD,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK;AAAK,iBAAM;AAAI,YAAG;AAAuB,oBAAQ,QAAQ,KAAK,qBAAoB,QAAM,GAAE,KAAI;AAAS,cAAG,QAAM;AAAY,mBAAM;AAAI,cAAG,QAAM;AAAY,mBAAM;AAAG,cAAG,QAAM;AAAK,mBAAO;AAAE,gBAAK,+CAA6C;;AAAS,0BAAc,QAAQ,KAAK,qBAAoB,QAAM;AAAG,cAAG,OAAK;AAAU,mBAAM;AAAG,qBAAS,YAAY;AAAM,qBAAS,OAAK;AAAQ,kBAAQ,MAAM,qBAAoB,oCAAkC,GAAE;AAAM,+BAAmB;AAAK,iBAAM,QAAM;AAAgB,mBAAK,YAAY;AAAM,gBAAG,OAAK;AAAM,qBAAM;;AAAI;AAA+C,mBAAK,QAAQ,KAAK,qBAAoB,oCAAkC;;AAAG,iBAAO;;;AAAG;AAA8C,eAAO,mCAAiC;;AAAE;AAA8C,eAAO,mCAAiC;;AAAE;AAA8C,2BAAmB,WAAW,MAAK,KAAI,MAAI;;AAAK;AAAyC,eAAO,UAAU;;AAAuB;AAAyD,0BAAgB,UAAU,SAAO;AAAE,oBAAU;AAAY,mBAAS,WAAW,cAAY;AAAG,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,8BAAoB,IAAE,KAAG,UAAU,IAAE;;AAAG,kBAAQ,0CAA0C,OAAM,aAAY,MAAK;AAAM,qBAAa;AAAO,eAAO;;AAAI,2DAAmD;AAAG;AAAsC,YAAG,CAAC,iBAAiB;AAAO,2BAAiB,QAAM;;AAAG,mBAAS,iBAAiB;AAAM,aAAK,SAAO;AAAE;AAAO,eAAM,KAAG,mBAAmB;AAAW,cAAG,OAAK,OAAK,OAAK;AAAK,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;;AAAO,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;;;AAAG,eAAO;;AAAK;AAAuE,uDAA+C,SAAO;AAAY,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,yDAA+C,KAAG,oBAAoB,IAAE;;AAAG,2BAAiB,QAAM;AAAE,mBAAS,CAAC,eAAa,qBAAqB,SAAO,WAAW,CAAC,QAAM;AAAG,YAAG;AAAc,uBAAW,+CAA+C;AAAG,0BAAc,+CAA+C;AAAG,0BAAc,iBAAiB,QAAO;AAAW,iBAAO,KAAK,MAAM,MAAK;;AAAW,eAAO,KAAK,MAAM,MAAK;;AAAgD;AAAqC,eAAO,mBAAmB;;AAAO;AAAyC;AAAI,qBAAW,KAAK,OAAK,QAAO,aAAW,UAAQ;AAAI,qCAA2B,WAAW;AAAQ,iBAAO;;;;AAAa;AAAgD,wBAAc,kBAAgB;AAAE,sBAAY;AAA4B,YAAG,iBAAe;AAAS,iBAAO;;AAAM,4BAAkB;AAAM,0BAAgB;AAAW,YAAG,gBAAc;AAAa,iBAAO;;AAAM,0BAAgB;AAAS,2BAAgB,GAAE,WAAS,GAAE,WAAS;AAAG,kCAAsB,UAAS,KAAE,MAAG;AAAS,8BAAkB,KAAK,IAAI,mBAAkB,gBAAc;AAAW,wBAAY,KAAK,IAAI,aAAY,QAAQ,KAAK,IAAI,aAAY,eAAc,oBAAmB;AAAgB,4BAAgB,0BAA0B;AAAS,cAAG;AAAa,mBAAO;;;AAAM,eAAO;;AAAM,qBAAa,CAAC,UAAS,GAAE,YAAW,GAAE,YAAW,GAAE,SAAQ,GAAE,YAAW,GAAE,wBAAuB,GAAE,mBAAkB,GAAE,uBAAsB,GAAE,wBAAuB,GAAE,uBAAsB,GAAE,YAAW,GAAE,2BAA0B,MAAK,iBAAgB,MAAK,iBAAgB,MAAK,gCAA+B,OAAM,yBAAwB;AAAW,qBAAU,SAAS,cAAc,SAAO,GAAE,KAAG,GAAE,EAAE;AAAG,mBAAS,eAAe;;AAAG,iBAAS,gBAAc;AAAG,iBAAS,gBAAc;SAAI,8BAA6B;AAAW,YAAG,CAAC,SAAS;AAAgC,qBAAW,KAAK,SAAS;AAAyB,mBAAS,iCAA+B;;SAAO,eAAc,IAAG,WAAU;AAA6C;AAA2C,cAAG,KAAK,UAAQ,KAAK;AAAO,mBAAO;AAAM,yBAAa;AAAM,gBAAG,KAAK,OAAI,KAAK;AAAG,qBAAO;;AAAM,iBAAO;;AAAK,sBAAa,SAAS;AAAe,qBAAS,SAAS,cAAc;AAAG,cAAG,KAAK,kBAAgB,kBAAgB,uBAAuB,KAAK,UAAS;AAAW;;;AAAQ,iBAAS,cAAc,KAAK,CAAC,gBAA8B,YAAsB;AAAoB,iBAAS,cAAc,KAAK;AAAc,iBAAO,EAAE,aAAW,EAAE;;SAAc,qBAAoB;AAAyB,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,kBAAgB;AAAgB,qBAAS,cAAc,OAAO,GAAE;AAAG,cAAE;;;SAAK,gCAA+B;AAAW,eAAO,SAAS,kBAAgB,SAAS,oBAAoB;SAAqB,kBAAiB;AAAW,YAAG,CAAC,SAAS;AAAkC;;AAAO,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,qBAAS,SAAS,cAAc;AAAG,mBAAS,cAAc,OAAO,GAAE;AAAG,YAAE;AAAE,eAAK,eAAe,MAAM,MAAK,KAAK;;SAAY,gBAAe,GAAE,qBAAoB,MAAK,eAAc,IAAG,2BAA0B;AAAiC,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,UAAQ,UAAS,EAAC,mBAAiB,mBAAiB,SAAS,cAAc,GAAG;AAAkB,qBAAS,eAAe;;;SAAQ,gBAAe;AAAY,gBAAM,SAAS,cAAc;AAAG,UAAE,OAAO,oBAAoB,EAAE,iBAAgB,EAAE,mBAAkB,EAAE;AAAY,iBAAS,cAAc,OAAO,GAAE;SAAI,yBAAwB;AAAuB,6BAAmB;AAA+B,YAAE,SAAS;AAAe,mBAAS,sBAAoB;AAAa,mBAAS;AAAmB,uBAAa,YAAY;AAAO,mBAAS;AAAmB,YAAE,SAAS;;AAAgB,YAAG,aAAa;AAAc,uBAAa,oBAAkB;AAAe,uBAAa,OAAO,iBAAiB,aAAa,iBAAgB,gBAAe,aAAa;AAAY,mBAAS,cAAc,KAAK;AAAc,mBAAS;;AAAoC,uBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,gBAAG,SAAS,cAAc,GAAG,UAAQ,aAAa,UAAQ,SAAS,cAAc,GAAG,mBAAiB,aAAa;AAAiB,uBAAS,eAAe;;;;SAAS,gCAA+B;AAAuE,uBAAa;AAAY,sBAAY,WAAW;AAAI,4BAAoB,WAAS,KAAG;AAAY,4BAAoB,UAAQ,KAAG,KAAG;AAAU,4BAAoB,UAAQ,KAAG,KAAG;AAAS,2CAAmC,cAAa,WAAU,kBAAiB,WAAU;AAAS,qBAAa;SAAW,iCAAgC;AAAuB,gBAAO;eAAmB;AAAE,mBAAO;eAAO;AAAE,mBAAO,QAAQ;;AAA4C,mBAAO;;SAAe,sBAAqB;AAAiB,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,UAAQ;AAAO,iBAAM;AAAU,YAAG,UAAQ;AAAO,iBAAM;AAAU,eAAO,UAAQ,OAAO,WAAS,OAAO,WAAS;SAAI,mBAAkB;AAAW,eAAO,SAAS,qBAAmB,SAAS;;AAA0B;AAAmC,qBAAW,gBAAgB,YAAU;AAAE,sBAAY,QAAQ;AAAQ,qBAAa,UAAS,SAAQ;AAAQ,eAAO;;AAAQ;AAA0G,uBAAa;AAAY,sBAAY,WAAW;AAAI,8BAAoB;AAAE,YAAG;AAAc,4BAAgB,gBAAgB;;AAAc,4BAAoB,WAAS,KAAG;AAAgB,4BAAoB,UAAQ,KAAG,KAAG;AAAM,4BAAoB,UAAQ,KAAG,KAAG;AAAO,2CAAmC,cAAa,WAAU,GAAE,iBAAgB;AAAS,qBAAa;;AAAU;AAAuG,uBAAa,eAAa,aAAa,gBAAc;AAAG,iEAAyD,cAAa,cAAa,OAAM;;AAAQ;AAA2C,eAAO,UAAQ,IAAE,aAAa,WAAS;;AAAQ,+BAAuB,CAAC,GAAE,OAAO,aAAW,cAAY,WAAS,GAAE,OAAO,WAAS,cAAY,SAAO;AAAG;AAAmC,iBAAO,yBAAyB;AAAQ,yBAAe,mBAAmB,WAAU,QAAO,aAAW,cAAY,SAAS,cAAc,UAAQ;AAAW,eAAO;;AAAW;AAAyC,eAAO,kBAAkB;;AAAQ;AAAiF,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,OAAO;AAAiB,8BAAoB,OAAO,mBAAiB,KAAG;AAAM,8BAAoB,OAAO,kBAAgB,KAAG,KAAG;;AAAO,YAAG,OAAO,mBAAiB,CAAC,OAAO;AAA6B,cAAG,OAAO;AAAgB,qBAAO,OAAO;AAAgB,mCAAuB;AAAM,cAAG,OAAO,eAAa,OAAO,YAAY;AAAO,+BAAiB,OAAO,YAAY,MAAM,aAAa;AAAM,iCAAmB,aAAa,OAAK,KAAG,aAAa,OAAK,KAAG,aAAa,OAAK,OAAO,SAAO,aAAa,OAAK,OAAO;;AAAO,iBAAO,QAAM;AAAM,iBAAO,SAAO;AAAO,cAAG;AAAoB,mBAAO,YAAY,MAAM,SAAS,GAAE,GAAE,OAAM;;mBAAiB,OAAO;AAAiB,6BAAiB,oBAAoB,OAAO,kBAAgB,KAAG;AAAG,gEAAsD,cAAa,QAAO,OAAM;AAAQ,iBAAO;;AAAO,iBAAM;;AAAG,eAAO;;AAAE;AAA8E,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,QAAO,OAAM;AAAQ,eAAO,mDAAmD,QAAO,OAAM;;AAAQ;AAAkE,qBAAW,wBAAwB;AAAQ,YAAG;AAAQ,iBAAO,mDAAmD,QAAO,OAAM;;AAAa,iBAAO,gDAAgD,QAAO,OAAM;;;AAAS;AAA0D,oBAAU,YAAU;;AAAE;AAAoD,mBAAS,WAAS;AAAE,eAAK,OAAK;;AAAE;AAAoD,kBAAQ,IAAI,aAAa;AAA0B,YAAG;AAAK,cAAI,yBAAuB;AAAwB,gBAAI,4BAA4B,OAAM;;AAAU,cAAI,yBAAuB;AAAqC,gBAAI,4BAA4B,MAAK,OAAM,OAAM;;AAAY,cAAI,2BAAyB;AAA4C,gBAAI,8BAA8B,MAAK,OAAM,MAAK,SAAQ;;AAAY,iBAAO;;;AAAG;AAAqD,kBAAQ,IAAI,aAAa;AAA2B,YAAG;AAAK,cAAI,uBAAqB;AAAW,mBAAO,IAAI;;AAA2B,cAAI,uBAAqB;AAAc,gBAAI,wBAAwB;;AAAM,cAAI,qBAAmB;AAAc,gBAAI,sBAAsB;;AAAM,cAAI,mBAAiB;AAAc,mBAAO,IAAI,oBAAoB;;AAAM,iBAAO;;;AAAG;AAAgD,kBAAQ,IAAI,aAAa;AAAsB,YAAG;AAAK,cAAI,iBAAe;AAAiB,gBAAI,oBAAoB,GAAE;;AAAO,iBAAO;;;AAAG,eAAO,CAAC,SAAQ,GAAE,WAAU,GAAE,SAAQ,IAAG,eAAc,IAAG,UAAS,IAAG,cAAa,IAAG,eAAc,IAAG,UAAS,IAAG,UAAS,IAAG,SAAQ,IAAG,MAAK,IAAG,UAAS,IAAG,gBAAe,MAAK,mBAAkB,IAAG,iBAAgB,IAAG,cAAa,IAAG,aAAY,IAAG,iBAAgB,GAAE,MAAK;AAAW,kCAAwB,IAAI,aAAa,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,yBAAyB,KAAG,oBAAoB,SAAS,GAAE,IAAE;;AAAG,gCAAsB,IAAI,WAAW,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,uBAAuB,KAAG,kBAAkB,SAAS,GAAE,IAAE;;SAAK,aAAY;AAAgC,YAAG,CAAC,GAAG;AAAW,aAAG,YAAU;;SAAY,UAAS;AAAgB,kBAAQ,GAAG;AAAU,qBAAU,MAAM,QAAO,IAAE,KAAI;AAAK,gBAAM,KAAG;;AAAK,eAAO;SAAK,uBAAsB,KAAI,0BAAyB,CAAC,IAAG,wBAAuB,CAAC,IAAG,WAAU;AAAqC,qBAAW;AAAG,qBAAU,GAAE,IAAE,OAAM,EAAE;AAAG,oBAAQ,SAAO,oBAAoB,SAAO,IAAE,KAAG,KAAG;AAAG,oBAAQ,aAAa,oBAAoB,SAAO,IAAE,KAAG,IAAG,MAAI,IAAE,SAAU;;AAAK,eAAO;SAAQ,eAAc;AAAwC,kBAAQ,OAAO,WAAW,SAAQ;AAAwB,YAAG,CAAC;AAAI,iBAAO;AAAE,qBAAW,GAAG,gBAAgB,KAAI;AAAwB,eAAO;SAAQ,iBAAgB;AAAqC,qBAAW,QAAQ;AAAG,4BAAoB,SAAO,KAAG,KAAG;AAAgB,sBAAY,CAAC,QAAc,YAAW,wBAAuB,SAAQ,uBAAuB,cAAa,OAAM;AAAK,YAAG,IAAI;AAAO,cAAI,OAAO,cAAY;AAAQ,WAAG,SAAS,UAAQ;AAAQ,YAAG,OAAO,uBAAuB,8BAA4B,eAAa,uBAAuB;AAA2B,aAAG,eAAe;;AAAS,eAAO;SAAQ,oBAAmB;AAAwB,WAAG,iBAAe,GAAG,SAAS;AAAe,eAAO,MAAI,QAAM,GAAG,kBAAgB,GAAG,eAAe;AAAM,eAAM,CAAE,kBAAe,CAAC;SAAQ,YAAW;AAAwB,eAAO,GAAG,SAAS;SAAgB,eAAc;AAAwB,YAAG,GAAG,mBAAiB,GAAG,SAAS;AAAe,aAAG,iBAAe;AAAK,YAAG,OAAO,aAAW;AAAS,mBAAS,0BAA0B,GAAG,SAAS,eAAe,MAAM;AAAQ,YAAG,GAAG,SAAS,kBAAgB,GAAG,SAAS,eAAe,MAAM;AAAO,aAAG,SAAS,eAAe,MAAM,OAAO,cAAY;AAAU,cAAM,GAAG,SAAS,eAAe;AAAQ,WAAG,SAAS,iBAAe;SAAM,gBAAe;AAAkB,YAAG,CAAC;AAAQ,oBAAQ,GAAG;AAAe,YAAG,QAAQ;AAAmB;AAAO,gBAAQ,qBAAmB;AAAK,qBAAU,QAAQ;AAAM,8CAAsC;AAAO,+CAAuC;AAAO,0CAAkC;AAAO,eAAM,wBAAsB,OAAM,aAAa;AAA4B,6CAAmC,CAAC,qBAAoB,0BAAyB,4BAA2B,2BAA0B,iCAAgC,uBAAsB,0BAAyB,kCAAiC,kBAAiB,sBAAqB,0BAAyB,4BAA2B,iCAAgC,oBAAmB,0BAAyB,sBAAqB,kCAAiC,+BAA8B,4BAA2B,YAAW,iCAAgC,4BAA2B,gCAA+B,iCAAgC,0BAAyB,sCAAqC,mCAAkC;AAAyC,mBAAS,OAAM,4BAA0B;AAAG,aAAK,QAAQ;AAAc,cAAG,+BAA+B,QAAQ,QAAM;AAAI,mBAAM,aAAa;;;SAAS,sBAAqB;AAAkB,gBAAM,GAAG,SAAS;AAAS,qBAAW,GAAG,aAAa,WAAS,CAAC,UAAS,IAAG,kBAAiB,GAAE,oBAAmB,IAAG,2BAA0B;AAAI,qBAAW,OAAO;AAAS,0BAAgB,MAAM,oBAAoB,GAAE;AAAO,qBAAU,GAAE,IAAE,aAAY,EAAE;AAAG,kBAAM,MAAM,iBAAiB,GAAE;AAAG,qBAAS,EAAE;AAAK,iBAAO,mBAAiB,KAAK,IAAI,OAAO,kBAAiB,KAAK,SAAO;AAAG,cAAG,KAAK,MAAM,OAAK;AAAK,mBAAK,KAAK,MAAM,GAAE,KAAK,YAAY;;AAAM,oBAAQ,MAAM,mBAAmB,GAAE;AAAM,cAAG;AAAK,qBAAO,GAAG,SAAS,GAAG;AAAU,mBAAO,QAAM,CAAC,EAAE,MAAK;AAAI,eAAG,SAAS,MAAI;AAAI,yBAAU,GAAE,IAAE,EAAE,MAAK,EAAE;AAAG,sBAAM,OAAK,MAAI,IAAE;AAAI,oBAAI,MAAM,mBAAmB,GAAE;AAAG,mBAAG,GAAG,SAAS,GAAG;AAAU,iBAAG,SAAS,MAAI;;;;;AAAS,iDAAyC,CAAC,WAAU,aAAY;AAAoB;AAAgE,gCAAsB;AAAG,gBAAM,cAAY;AAAE,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,aAAW,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,eAAa,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,wBAAsB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,8BAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,qBAAmB,qCAAqC;AAAiB,0BAAkB,kCAAgC,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,4BAA0B,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,sBAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,+BAA6B,oBAAoB,IAAG,OAAI;AAAI,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAQ,iBAAM;;AAAG,YAAG,kBAAkB;AAAqB,iBAAM;;AAAG,4BAAkB,GAAG,cAAc,QAAO;AAAmB,eAAO;;AAAc;AAAiD,eAAO,oCAAoC,IAAG;;AAAI,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;;;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;;;AAAO,eAAO;SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;;AAAI,YAAG,QAAM;AAAe,kBAAM;;AAAI,eAAO,cAAW,MAAI,MAAI;SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;;AAAG,eAAO,OAAK;SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAK,UAAA,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;;AAAO,kBAAO,KAAK;;SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,oBAAoB,SAAS,UAAQ,KAAG;AAAG,eAAO;SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;SAAK,OAAM;AAAmB,eAAO;;AAAM;AAAuB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAI,eAAO;;AAAE;AAA8D,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,YAAW,aAAY,QAAO;;AAAW;AAAuC,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,KAAI,QAAO;AAAM,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,oBAAoB,MAAI,IAAE,KAAG;AAAG,oBAAQ,oBAAoB,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,mBAAmB,MAAI;;AAAI,iBAAK;;AAAI,4BAAoB,QAAM,KAAG;AAAI,eAAO;;AAAE;AAAuC,sBAAY,QAAQ,aAAa;AAAM,YAAG;AAAQ;;AAAU;AAA4C,YAAG,QAAQ,iBAAe;AAAM,kBAAQ,eAAa;;AAAG,gBAAQ,aAAa,KAAK;AAAW,qBAAW,SAAQ;;;AAAO;AAAsC,YAAG;AAAuB,gBAAK;AAAwF,sBAAW,QAAQ;AAAe,YAAG,QAAO,YAAU;AAAU,gBAAK;AAAkB,YAAG,CAAC,aAAa;AAAY,gBAAK;AAAkC,gBAAQ,eAAe,KAAK;AAAQ,wBAAc,QAAQ,MAAI;AAAG,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAG,8BAAoB,YAAU,IAAE,KAAG,KAAG;;AAAE,wBAAc,aAAa,YAAU,aAAa;AAAU,sBAAY,QAAQ,SAAS,aAAa,eAAa,CAAC,iBAAc,WAAU,aAAa,WAAU,WAAU,aAAa,WAAU,mBAAkB,aAAa,mBAAkB,QAAO,aAAa,aAAY,kBAAiB,aAAa;AAAa,kBAAQ,QAAQ,oBAAkB;AAAE,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,QAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAI,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,KAAG,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAa,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAW,0BAAgB;AAA8B,4BAAkB,cAAY;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAe,gBAAO,UAAQ;AAAQ,kBAAQ,CAAC,KAAM,OAAM,eAAgB,aAAa,cAAa,KAAM,aAAa,KAAI,kBAAmB,aAAa,aAAY,cAAe,aAAa,aAAY,gBAAiB,aAAa,oBAAmB,WAAY,aAAa,WAAU,WAAY,aAAa;AAAW,gBAAO,aAAW;AAAW,cAAI,OAAK,YAAY;AAAM,kBAAO,YAAY,KAAI,aAAa;;AAAe,YAAG,QAAO;AAAQ,kBAAO;AAAa,iBAAO,QAAO;;;AAAY;AAA0D,YAAG,CAAC,UAAQ,CAAC;AAAW,iBAAO,YAAY;AAAO,YAAG,CAAC;AAAQ,cAAI;AAA4D,iBAAO,YAAY;;AAAM,oBAAS,oBAAoB,SAAO,MAAI;AAAG,YAAG,UAAO;AAAQ,cAAI,+CAA6C,SAAO;AAAwE,iBAAO,YAAY;;AAAM,0BAAgB,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,wBAAc,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,YAAG;AAAO,8BAAoB,UAAQ,KAAG;AAAY,YAAG;AAAW,8BAAoB,cAAY,KAAG;AAAU,eAAO;;AAAE;AAAyB,eAAO,gBAAc;;AAAE,aAAO,mBAAiB;AAAc;AAA6D,YAAG,OAAO,sBAAoB;AAAa,cAAI;AAAuF,iBAAO;;AAAE,YAAG,CAAC;AAAa,cAAI;AAAqD,iBAAO;;AAAG,2BAAiB;AAAG,oBAAU;AAAE,YAAG,0BAAyB,cAAa,WAAS,KAAG;AAAQ,iBAAO,sCAAsC,WAAU,aAAY,MAAK,eAAc;;AAAK,YAAG;AAAM,iBAAO;AAAM,wBAAc;AAAE,wBAAc;AAAE,uBAAa;AAAE,0BAAgB;AAAE,wBAAc;AAAE,YAAG;AAAM,sBAAU,oBAAoB,QAAM;AAAG,uBAAW;AAAM,sBAAU,oBAAoB,OAAK,KAAG;AAAG,qBAAS,oBAAoB,OAAK,MAAI,OAAK;AAAE,6BAAiB,oBAAoB,OAAK,MAAI,OAAK;AAAE,cAAG;AAAc,kCAAoB,oBAAoB,OAAK,MAAI;AAAG,gCAAkB,oBAAoB,OAAK,MAAI;AAAG,kCAAoB,QAAQ,sCAAoC,QAAQ,sCAAoC;AAAgB,mCAAuB,iBAAgB,OAAK,IAAG,OAAK;AAAI,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;AAAG,gCAAoB,OAAK,MAAI,KAAG;AAAgB,gCAAoB,OAAK,MAAI,KAAG;;AAAmB,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;;;AAAS,sBAAU;;AAAQ,gCAAsB,aAAW;AAAE,YAAG;AAAmB,sBAAU,UAAU,IAAG;;AAAgB,uBAAW;AAAU,kBAAO,YAAU;;AAAG,gCAAqB,QAAQ;AAAK,qBAAU,GAAE,IAAE,OAAK,GAAE,EAAE;AAAE,8BAAqB,sBAAkB,KAAG,KAAG;AAAE,4BAAoB,eAAa,KAAG;AAAiB,4BAAoB,oBAAiB,MAAI,KAAG;AAAiB,sBAAY,oBAAiB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,2BAAiB,CAAC,WAAoB,WAAoB,mBAAoC,aAAwB,WAAoB,UAAkB,cAAa,eAAc,aAAY,mBAAiB,oBAAmB,iBAAgB,KAAQ;AAA2B,YAAG;AAAwB,uBAAa,MAAI;AAAc,sBAAY,cAAa;;AAAmB,yBAAe;;AAAc,eAAO;;AAAE;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;;AAAK;AAAwB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAM,gBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,8BAAgB;AAAW,mBAAO,cAAY;eAAW;eAAS;eAAS;eAAQ;eAAS;eAAS;eAAQ;eAAS;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAO;eAAQ;eAAQ;eAAS;eAAS;eAAQ;eAAQ;eAAQ;eAAS;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAS;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;AAAG,mBAAO;eAAY;eAAQ;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAQ;eAAS;AAAG,mBAAM;eAAQ;eAAS;eAAS;eAAO;eAAS;eAAO;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAS;eAAQ;AAAG,mBAAO;eAAO;eAAQ;eAAQ;eAAQ;eAAQ;AAAE,mBAAO;eAAU;eAAQ;eAAQ;AAAG,mBAAO;eAAQ;eAAQ;eAAQ;AAAG,mBAAO;eAAgB;eAAQ;AAAE,mBAAO;eAAW;eAAQ;AAAG,mBAAO;eAAQ;eAAQ;AAAG,mBAAO;eAAU;AAAE,mBAAO;eAAa;AAAE,mBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,mBAAO;eAAS;AAAG,mBAAO;eAAS;AAAG,mBAAO;eAAS;AAAG,mBAAO;eAAS;AAAE,mBAAO;eAAS;AAAI,mBAAO;eAAQ;AAAG,mBAAO;eAAQ;AAAE,mBAAO;eAAQ;AAAE,mBAAO;eAAO;AAAG,mBAAO;eAAO;AAAI,gBAAG,OAAO,cAAY;AAAS,qBAAO,UAAU,0BAAwB;AAAE,mBAAO;;;AAAG,iBAAS;AAAI,eAAM;;AAAG,UAAG,CAAC;AAAuB,gBAAQ;;AAA2B,gBAAQ;AAAa;AAAU,SAAG;AAAO,iCAAyB,CAAC,MAAK,SAAQ,iDAAgD,WAAU,UAAS,WAAU;AAAU,0BAAkB,CAAC,GAAI,gBAAe,GAAI,cAAa,GAAI,kCAAiC,GAAI,QAAO,GAAI,mDAAkD,GAAI,wBAAuB,GAAI,wBAAuB,GAAI,qBAAoB,GAAI,oCAAmC,GAAI,oCAAmC,GAAI,wBAAuB,GAAI,+BAA8B,GAAI,uCAAsC,GAAI,yBAAwB,GAAI,qCAAoC,GAAI,uCAAsC,GAAI,6BAA4B,GAAI,kCAAiC,GAAI,WAAU,GAAI,UAAS,GAAI,WAAU,GAAI,gBAAe,QAAS,cAAY,OAAO,eAAc,GAAI,sBAAqB,GAAI,uBAAsB,GAAI,iBAAgB,GAAI,eAAc,GAAI,SAAQ,GAAI,UAAS,OAAQ;AAAW,gBAAQ;AAAa,aAAO,SAAO;AAAI,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,wCAAgC,OAAO,iCAA+B;AAAW,eAAO,+BAA4B,OAAO,iCAA+B,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,yDAAiD,OAAO,kDAAgD;AAAW,eAAO,gDAA6C,OAAO,kDAAgD,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,4DAAoD,OAAO,qDAAmD;AAAW,eAAO,mDAAgD,OAAO,qDAAmD,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,wDAAgD,OAAO,iDAA+C;AAAW,eAAO,+CAA4C,OAAO,iDAA+C,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iDAAyC,OAAO,0CAAwC;AAAW,eAAO,wCAAqC,OAAO,0CAAwC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,gDAAwC,OAAO,yCAAuC;AAAW,eAAO,uCAAoC,OAAO,yCAAuC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,gEAAwD,OAAO,yDAAuD;AAAW,eAAO,uDAAoD,OAAO,yDAAuD,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sDAA8C,OAAO,+CAA6C;AAAW,eAAO,6CAA0C,OAAO,+CAA6C,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM,aAAO,aAAW;AAAQ,aAAO,aAAW;AAAQ,aAAO,mBAAiB;AAAc,aAAO,gBAAc;AAAW,aAAO,gBAAc;AAAW;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;;;AAAS,eAAO;;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,UAAO;AAAI,aAAK,SAAO;;AAAO,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;;AAAW;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B;;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;eAAK;AAAG;aAAS;;AAAQ;;;AAAS,aAAO,SAAO;AAAI,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;;;AAAS,UAAG,CAAC;AAAuB,wBAAc;AAAK,UAAG,CAAC;AAAuB;AAGx1tE,aAAO;;;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;;WACxB,OAAO,YAAY;AAC1B,YAAQ,mCAAmC;;ACpBjD,gCAAA,WAAA;AACA,0BAAyB;AACvB,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,2BAAoB,sBAAqB;AAE3C,mBAAW,OAAO,uBAAoB,cAAY,qBAAkB;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;;;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;;AAAiB,eAAO,kBAAgB;;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;;AAAS,4BAAgB,YAAU;;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;;AAAK,kBAAO,IAAI;AAAQ,iBAAO;;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;;;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;;AAAS,eAAO,aAAW;AAAW,iBAAM;;iBAAsC;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;;;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;mBAAmB,OAAO,aAAW;AAAa,uBAAW;;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;;;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;;iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;;AAAI,YAAG;AAAY,4BAAgB;;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;;AAAQ,4BAAgB;;AAAG;AAAC,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;;;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;;AAAO;;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;;;AAAO,yBAAe;AAAgB,mBAAS,QAAM;;;;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;;;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;;;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,SAAM,QAAM,SAAM,UAAW,SAAM;AAAG,sBAAS,MAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,MAAI,MAAI;;AAAK,iBAAO;WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;;AAAS,oBAAM,KAAG,KAAK;;;;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;;;AAAO,wBAAgB,OAAO,gBAAc,cAAY,IAAI,YAAY,UAAQ;AAAU;AAAoD,qBAAW,MAAI;AAAe,qBAAW;AAAI,eAAM,KAAK,WAAS,CAAE,WAAQ;AAAQ,YAAE;AAAO,YAAG,SAAO,MAAI,MAAI,KAAK,YAAU;AAAa,iBAAO,YAAY,OAAO,KAAK,SAAS,KAAI;;AAAc,qBAAQ;AAAG,iBAAM,MAAI;AAAQ,qBAAO,KAAK;AAAO,gBAAG,CAAE,MAAG;AAAM,sBAAK,OAAO,aAAa;AAAI;;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,sBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,mBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;;AAAQ,mBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;;AAAG,gBAAG,KAAG;AAAO,sBAAK,OAAO,aAAa;;AAAS,uBAAO,KAAG;AAAM,sBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;;;;AAAQ,eAAO;;AAAI;AAA0C,eAAO,MAAI,kBAAkB,QAAO,KAAI,kBAAgB;;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,KAAI,QAAO,EAAE;AAAG,kBAAM,KAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,KAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;;AAAS;AAAkD,eAAO,kBAAkB,MAAI,QAAO,QAAO;;AAAiB;AAA0C,cAAM,IAAI,OAAM;;AAAQ;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;;AAAK,mCAA2B,OAAO,qBAAmB;AAAS;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;;AAAW,qBAAO,cAAc,MAAK,SAAS;;;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;;;;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,0BAAkB;AAAM;AAAkB,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;;;AAAU,6BAAqB;;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;;AAAY;AAAmB,6BAAqB;;AAAY;AAAuB,wBAAc;;AAAK;AAAmB,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;;;AAAU,6BAAqB;;AAAe;AAAyB,qBAAa,QAAQ;;AAAI;AAA0B,sBAAc,QAAQ;;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;;;;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;;AAAM,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,KAAI,WAAW,UAAQ,KAAI,QAAQ,YAAU;;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;;AAAe,2BAAmB;AAAyB,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;;AAAY,cAAG;AAAY,mBAAO,WAAW;;AAAqB,kBAAK;;;AAA8D,gBAAM;;;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;;AAAI,mBAAO,SAAS;aAAmB,MAAM;AAAW,mBAAO;;;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;;;AAAe;AAAsB,mBAAS,CAAC,KAAM,eAAc,wBAAyB;AAAe;AAA0C,yBAAY,UAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW,SAAQ;AAAU,qCAA2B,WAAW;AAAQ,8BAAoB;;AAAoB,yBAAiB;AAAoB;AAA2C,0BAAgB,OAAO;;AAAa;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;;;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;;;;AAAoC,mBAAO,uBAAuB;;;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;;;AAAO;AAAmB,eAAM;;AAAG,iBAAW;AAAO;AAAuD,mCAA2B,WAAW;;AAAQ,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;;;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;;;AAAO,eAAO;SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;;AAAI,YAAG,QAAM;AAAe,kBAAM;;AAAI,eAAO,cAAW,MAAI,MAAI;SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;;AAAG,eAAO,OAAK;SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAK,UAAA,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;;AAAO,kBAAO,KAAK;;SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,OAAO,SAAS,UAAQ,KAAG;AAAG,eAAO;SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;SAAK,OAAM;AAAmB,eAAO;;AAAM;AAAuB,eAAO;;AAAE;;AAA+D;AAAuC,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,OAAO,MAAI,IAAE,KAAG;AAAG,oBAAQ,OAAO,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,OAAO,MAAI;;AAAI,iBAAK;;AAAI,eAAO,QAAM,KAAG;AAAI,eAAO;;AAAE;AAAuB,aAAK;;AAAQ;AAA0B,cAAM;;AAAM;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;;AAAK,0BAAkB,CAAC,iCAAkC,kCAAiC,UAAW,WAAU,SAAU,UAAS,UAAW,WAAU,WAAY,YAAW,QAAS;AAAS,gBAAQ;AAAa,aAAO,SAAO;AAAI,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,oBAAoB,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,0BAA0B,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,yBAAyB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,eAAe,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,qBAAqB,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,sBAAsB,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,eAAe,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;;;AAAS,eAAO;;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,UAAO;AAAI,aAAK,SAAO;;AAAO,uBAAe;AAAM,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;;AAAW;AAAwB,4BAAkB,OAAO;AAAW;AAAI;AAAgB,oBAAQ;AAAE,eAAK,KAAI;;AAAe,cAAG,aAAa;AAAY;qBAAe,KAAG;AAAU,4BAAc;AAAK;;AAAY,wBAAU;AAAE,gBAAG,KAAG,OAAO,MAAI,YAAU,EAAE;AAAO,sBAAM,CAAC,GAAE,EAAE;;AAAO,gBAAI,uBAAqB;AAAO,kBAAM,GAAE;;;AAAY,uBAAW;;;AAAM;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B,cAAG;AAAa,qBAAS;AAAM;;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;eAAK;AAAG;aAAS;;AAAQ;;;AAAS,aAAO,SAAO;AAAI;AAA+B,YAAG,YAAU,iBAAe,YAAS;AAAG;;AAAO,YAAG;;AAAqB,kBAAM;AAAK,uBAAW;AAAO;AAAc,cAAG,OAAO;AAAU,mBAAO,UAAU;;AAAQ,cAAM,SAAO,IAAI,WAAW;;AAAS,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;;;AAAS,yBAAiB;AAAK,UAAG,OAAO;AAAgB,uBAAa;AAAM,sBAAc;AAAK;AAGp30B,aAAO;;;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;;WACxB,OAAO,YAAY;AAC1B,YAAQ,uBAAuB;;ACpBrC,wBAAA,WAAA;AAEA,wBAAsB;AAEtB;AACE,iBAAa,CAAE,SAAS,CAAC,YAAY,IAAI,YAAY,IAAI,SAAS,CAAC,GAAG;AACtE,oBAAgB;AAChB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,qBAAe,KAAK,QAAQ;AAC5B,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,yBAAmB,KAAK,QAAQ;AAChC,uBAAiB,GAAG,QAAQ,UAAU;AACpC,wBAAgB,SAAU,SAAQ;AAClC,yBAAiB,GAAG,QAAQ,UAAU;AACpC,0BAAgB,SAAU,SAAQ;AAClC,uBAAa,GAAG,IAAI,YAAY;AAC9B,oBAAQ,KAAK,CAAC,SAAS;;;;;AAK/B,WAAO;;AAGT,qBAAmB;AACjB,QAAI,eAAe;AACnB,QAAI,WAAW;AACf,QAAI,SAAS;;AAGf,oBAAkB,oBAAqB;IACrC;IACA,YAAY,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;IAClD,UAAU,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;;AAGlD,mBAAiB;AACf,mBAAe,GAAG,IAAI,IAAI,YAAY;AACtC,iBAAa,GAAG,IAAI,IAAI,UAAU;AAClC,2BAAuB,GAAG,SAAS,CAAC,QAAQ,OAAO;AACnD,WAAO,UAAU;;AAGnB;AACE,sBAAkB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAgB,GAAG,IAAI,WAAW;AAClC,qBAAiB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACnD,+BAA2B,GAAG,IAAI,UAAU;AAC5C,8BAA0B,GAAG,IAAI,SAAS;AAC1C,wBAAoB,GAAG,IAAI,oBAAoB;AAC/C,mBAAe,GAAG,IAAI,mBAAmB;AACzC,iBAAa,GAAG,IAAI,mBAAmB;AACvC,4BAAwB,GAAG,IAAI,QAAQ;AACvC,0BAAsB,GAAG,IAAI,MAAM;AACnC,uBAAmB;AACnB,WAAO,GAAG,SAAS,CAAC,iBAAiB,gBAAgB;;AAGvD;AACE,WAAO,GAAG,KAAK;AACb,kBAAY,MAAK,SAAS,MAAK,SAAS;AACxC,aAAO,SAAS,KAAK,aAAa,eAAe;;;;IAKnD;AACE,WAAK,iBAAiB;AACtB,WAAK,QAAQ,QAAO,SAAS;AAC7B,WAAK,SAAS,QAAO,SAAS;AAC9B,WAAK,cAAc,gBAAgB,QAAO,SAAS;AACnD,WAAK,UAAU,GAAG,SAAS,KAAK;AAChC,WAAK,YAAY,GAAG,SAAS,CAAC,KAAK,OAAO,KAAK;AAC/C,WAAK,SAAS;AACd,WAAK,aAAa;;UAGd;AAEJ,UAAK,CAAC,cAAgB,WAAW,sBAAwB,WAAW,MAAM,WAAW,KAAO,WAAW,MAAM,KAAK,KAAO,WAAW,MAAM,KAAK;AAAI,eAAO;AAC1J,+CAAyC,GAAG,KAAK;AAC/C,6BAAqB,WAAW,eAAe,CAAC,KAAK,OAAO,KAAK;AAEjE,gCAAwB,GAAG,IAAI,aAAa,IAAI,QAAQ;AACxD,kCAA0B,KAAK,eAAe,QAAQ;AACtD;AAEA,YAAI,MAAM,QAAQ;AAChB,yBAAe,kBAAkB,KAAK,UAAU,EAAE,OAAO,EAAE;AAC3D,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,0BAAe,GAAG,OAAO,CAAC,WAAW,YAAY;AACjD,uBAAa,QAAO,QAAQ;;AAE5B,uBAAa,kBAAkB;;AAEjC,8BAAsB,aAAa,YAAY,KAAK,SAAS,KAAK;AAClE,uBAAe,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACjD,0BAAkB,GAAG,QAAQ,QAAQ;AACrC,eAAO,CAAC,YAAY,eAAe;;AAErC,+BAAyB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,KAAK,OAAO,SAAS,UAAU,KAAK,OAAO,SAAS,cAAc,KAAK,OAAO,SAAS;AACrK,yBAAmB,iBAAiB;AACpC,uBAAiB;AACjB,+BAAyB,WAAW,IAAI,cAAc,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACzF,4BAAsB,iBAAiB,IAAI;AACzC,qBAAa,YAAY;AACzB,oBAAY;AACZ,eAAO;;AAGT,wBAAkB,OAAO;AACzB,6BAAuB;AACvB,sBAAgB;AACd,yBAAiB,WAAW;AAC5B,2BAAmB,UAAU;AAC7B,YAAI,aAAa,KAAK,OAAO,SAAS;AACpC,sBAAY,UAAU,cAAc;AACpC,yBAAe,KAAK,YAAY;AAChC,4BAAkB,GAAG,KAAK,MAAM,GAAG,MAAM,iBAAiB,CAAC,UAAU,gBAAgB,IAAI,CAAC,GAAG,KAAK,UAAU,QAAQ,CAAC,eAAe;AACpI,yBAAe,KAAK,CAAE,KAAK,WAAW,QAAQ;;;AAGlD,sBAAgB;AAChB,YAAM;AACN,aAAO;AACP,sBAAgB;AAChB,aAAO;QACL,OAAO;QACP,aAAa,CAAC,WAAW,MAAM,KAAK,KAAK,OAAO,WAAW,MAAM,KAAK,KAAK;;;UAIzE;AACJ,aAAQ,OAAO,eAAgB,MAAM,KAAK,iBAAiB;AAC3D,oBAAc;AACd,0BAAmB;AACjB,6BAAqB,MAAK,UAAU;AACpC,0BAAkB,uBAAuB,OAAM;AAC/C,wBAAgB,SAAS;AACzB,gCAAwB,MAAK,YAAY;AACzC,uBAAe,MAAK;AACpB,6CAAqC;AACrC,gCAAwB,aACrB,IAAI,cAAe;UACjB,UAAS,KAAK,OAAO,MAAM;UAC3B,UAAS,KAAK,OAAO,MAAM;;AAEhC,+BAAuB;UACrB,SAAS,QAAQ,MAAM,GAAG;UAC1B,aAAa,QAAQ,MAAM;UAC3B,WAAW;UACX,aAAa;;AAEf,mBAAW,MAAK;AAChB,cAAK,UAAU;AACf,cAAK,YAAY;AACjB,kBAAU;AACV,cAAM,KAAK;;AAEb,aAAO;;;AAIX;AACE,sBAAkB,MAAM,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAClH,kBAAc,IAAI,eAAe,WAAW;AAE5C,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;;AAGT,UAAQ,OAAO;AACf,UAAQ,iBAAiB;AACzB,UAAQ,aAAa;;AC/KrB,wBAAA,WAAA;AAAA,UAAQ,mBAAmB;IACzB,YAAY;MACV;MAAI;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MACtD;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MACvD;MAAK;MAAI;MAAK;MAAI;MAAK;MAAK;MAAK;MAAI;MAAI;MAAK;MAAI;;IAEpD,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK;IAC7D,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;IAC3D,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;IAC9D,gBAAgB,CAAC,IAAI,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;IAC9D,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC/C,gBAAgB,CAAC,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACtD,gBAAgB,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,IAAI;IAC1C,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK;IACpD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC/C,gBAAgB,CAAC,KAAK,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACzD,mBAAmB,CAAC,KAAK,IAAI,IAAI,KAAK,IAAI,KAAK,IAAI;IACnD,mBAAmB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI;IACzC,cAAc,CAAC,KAAK,KAAK,KAAK,KAAK;IACnC,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACtD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;IAC5C,aAAa,CAAC,KAAK,KAAK,KAAK,KAAK;IAClC,mBAAmB,CAAC;IACpB,SAAS,CAAC;IACV,YAAY,CAAC;IACb,iBAAiB,CAAC;IAClB,gBAAgB,CAAC;IACjB,YAAY,CAAC;IACb,WAAW,CAAC;;AAEd,UAAQ,2BAA2B;IACjC,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI;IACrD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG;IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC9D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC7D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI;;;AC/CvD,kBAAA,WAAA;AAEA;AACE,uBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,qBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,WAAO,CAAE,YAAY;;AAEvB,UAAQ,sBAAsB;AAE9B;AACE,WAAO;MACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;MAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;;;AAG9C,UAAQ,aAAa;AAErB;AACE,WAAO;MACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;MAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;;;AAGhE,UAAQ,eAAe;AAEvB;AACE,cAAU,OAAM,MAAM;AACtB,cAAU,OAAM,MAAM;AACtB,kBAAc,CAAC;MACb,IAAI,WAAW,KAAK;MAAG,IAAI,WAAW,KAAK;MAAG,IAAI,SAAS,KAAK;MAChE,IAAI,SAAS,KAAK;;AAEpB,WAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;;AAEnD,UAAQ,2BAA2B;AAEnC,qCAAkC;AAChC,mBAAe,cAAa;AAC5B,iBAAa,YAAW;AACxB,wBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,uBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,qBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;;AAEhD,UAAQ,aAAa;AAErB;AACE,oBAAgB,cAAa;AAC7B,iBAAa,YAAW;AACxB,oBAAgB,KAAK,IAAI,GAAG;AAC5B,qBAAiB,UAAU;AAC3B,uBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,qBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;;AAEhD,UAAQ,cAAc;;ACvDtB,mBAAA,WAAA;AAAA,UAAQ,kBAAkB,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AAKxD;AACE,WAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;;AAExE,UAAQ,mBAAmB;AAM3B;AACE,oBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,WAAO,kBAAiB;;AAE1B,UAAQ,kBAAkB;AAC1B;AACE,WAAO,MAAM,MAAM,KAAK;;AAE1B,UAAQ,eAAe;AACvB;AACE,WAAO,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;;AAEvC;AACE,kBAAc;AACd,iBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAW,GAAG,KAAK,GAAG;;AAExB,WAAO;;AAET,UAAQ,MAAM;AACd;AACE,mBAAe;AACf,iBAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,aAAO,KAAK,IAAI,GAAG;;AAErB,WAAO;;AAET,UAAQ,qBAAqB;AAC7B;AACE,oBAAgB;AAChB,iBAAa,KAAK;AAClB,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK;AACb,qBAAe,GAAG,MAAM,MAAM;AAC5B,gBAAQ,KAAK,KAAK,KAAI,KAAK,MAAM,oBAAmB,MAAM;;;AAG9D,WAAO;;AAET;AACE,iBAAa,KAAK,IAAI;AACtB,iBAAa,KAAK,IAAI;AACtB,2BAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,8BAA0B,wBAAuB,OAAO,IAAI,OAAO;AACnE,qCAAiC,2BAA0B,mBAAmB;AAC9E,sCAAkC,wBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,WAAO,2BAA0B,0BAA0B;;AAE7D,UAAQ,sBAAsB;AAC9B;AACE,8BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,iCAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,gCAA4B;MAC1B,CAAC,KAAI,kBAAkB,IAAI;MAC3B,CAAC,KAAI,kBAAkB,IAAI;;AAE7B,WAAO;MACL,kBAAkB,GAAG,OAAO,oBAAoB;MAChD,kBAAkB,GAAG,OAAO,oBAAoB;MAChD,CAAC,GAAG,GAAG;;;AAGX,UAAQ,wBAAwB;AAChC;AACE,WAAO;MACL,KAAI,uBAAuB,eAAe;MAC1C,KAAI,uBAAuB,eAAe;;;AAG9C,UAAQ,cAAc;AACtB;AACE,WAAO,KAAK,KAAO,GAAE,KAAK,EAAE,OAAO,IAAO,GAAE,KAAK,EAAE,OAAO;;AAE5D,UAAQ,0BAA0B;;ACvFlC,2BAAA,WAAA;AAEA,mBAA0B,WAAA;AAC1B,oBAA2B,WAAA;AAC3B,iBAAsB,WAAA;AAEtB,0BAAwB;AACxB,2BAAyB;AACzB,kDAAgD,CAAC,kBAA4B,UAAA,iBAAiB,qBAAqB;AACnH,gCAA8B;AAC9B,+BAA6B;AAC7B,uDAAqD,CAAC,uBAAuB;AAC7E,2BAAmC,UAAA,iBAAiB;AACpD,0BAAwB,CAAC,iBAAiB,IAAI,iBAAiB,iBAAiB,SAAS;AACzF,4BAAoC,UAAA,iBAAiB;AACrD,2BAAyB,CAAC,kBAAkB,IAAI,kBAAkB,kBAAkB,SAAS;AAC7F,kCAAgC;AAChC,kCAAgC;AAChC,0BAAwB;AACxB,+BAA6B;AAG7B;AACE,iBAAa,GAAG,IAAc,UAAA,yBAAyB,QAAQ;AAC7D,aAAQ,KAAK,WAAsB,UAAA,yBAAyB;AAC5D,8BAAkC,UAAA,iBAAiB,GAAG,SAAS;AAC/D,mCAA6B,QAAQ;AACrC,UAAI,wBAAwB,KAAK,SAAS;AACxC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,wBAAc,QAAQ;AACtB,oBAAU,gBAAgB,MAAM;YAC9B,UAAU,OAAO;YAAI,UAAU,OAAO;YACrC,WAAU,OAAO,KAAK,UAAU,gBAAgB,IAAI,MAAM;;;;;;;IAQnE;AAEE,WAAK,cAAc;AACnB,WAAK,0BAA0B;AAC/B,WAAK,sBAAsB;AAC3B,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,YAAY,QAAO,KAAK;AAC7B,WAAK,aAAa,QAAO,KAAK;AAC9B,WAAK,WAAW,QAAO,KAAK;AAC5B,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;;IAGvB;AACE,sBAAyB,SAAA,WAAW,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAChF,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAY;QAC7C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,aAAa;QAAI,MAAM;;AAE3D,mCAAkC,OAAA,oBAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI,WAAY,CAAC,GAAQ,OAAA,YAAY,OAAO,uBAAuB,MAAM;AAC5G,oCAAmC,OAAA,sBAAsB;AACzD,wBAAkB,CAAC,GAAY,SAAA,aAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI,YAAa;AACrG,gCAA0B;QACnB,OAAA,IAAI,WAAW,sBAAsB;QACrC,OAAA,IAAI,WAAW,sBAAsB;;AAE5C,aAAO,cAAc,IAAI,WAAY;QACnC,MAAM,KAAK,kBAAkB;QAC7B,MAAM,KAAK,kBAAkB;QAAI,MAAM;;;IAI3C;AACE,uBAAiB,UAAU,gBAAgB,IAAI;AAC/C,wBAAkB,UAAU,iBAAiB,IAAI;AACjD,aAAO,WAAW;;IAIpB,6EAA4E;AAC1E,kBAAqB,SAAA,YAAqB,SAAA,WAAW,KAAK,8BAA8B,CAAC,UAAU,sBAAsB,UAAU,wBAAwB,KAAK;AAChK,sBAAyB,SAAA,WAAW;AACpC,iBAAW,GAAG,MAAM,cAAc,OAAM,CAAC;QACvC,IAAI,WAAW,KAAK,KAAK;QACzB,IAAI,WAAW,KAAK,KAAK;QAAW,IAAI,SAAS,KAAK,KAAK;QAC3D,IAAI,SAAS,KAAK,KAAK;UACrB,CAAC,IAAI,CAAC,KAAK,UAAU,KAAK;AAC9B,UAAI;AACF,eAAO,GAAG,MAAM,cAAc;;AAEhC,aAAO,CAAE,KAAK,SAAS;;IAIzB,iDAAiD;AAC/C,2BAAqB;AACrB,mBAAa,GAAG,IAAI,sBAAsB;AACxC,kBAAU,QAAQ,IAAI;AACtB,kBAAU,QAAQ,IAAI,IAAI;AAC1B,kBAAU,QAAQ,IAAI,IAAI;AAC1B,qBAAa,KAAK;UACf,QACI,IAAK,IAAI,KAAK,WACd,IAAI,KAAK,YAAa,WAAW,KAAK,OAAO,WAAW;UAC5D,IAAI,KAAK,WAAY,WAAW,KAAK,OAAO,WAAW;UAAI;;;AAGhE,aAAO,CAAE,WAAW,cAAc,MAAM,aAAa,MAAM;;IAI7D;AACE,2BAAqB,UAAoB,UAAA,iBAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,2BAAqB,UAAoB,UAAA,iBAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,uBAAkB,gBAAe,gBAAgB;AAEjD,aAAO,WAAW,IAAI;AACpB,gBAAQ;AACR,YAAI,MAAM;AACR,cAAI;mBACK,MAAM;AACf,cAAI;;AAEN,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;;UAI1B;AACJ,WAAK;AACL,wBAAkB;AAElB;AACA,UAAK,KAAK,UAAU,QAAO,SAAS,cAAe,CAAC,QAAO,KAAK;AAC9D,mBAAW,MAAM,KAAK,oBAAoB,iBAAiB;AAE3D,YAAK,MAAM,MAAM,OAAO,OAAS,MAAM,MAAM,OAAO;AAAM,eAAK,UAAU;;AAI3E,UAAI,YAAY,SAAS,SAAU,SAAS,MAAM,SAAS,KAAO,EAAC,QAAO,KAAK,WAAY,SAAS,MAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,SAAS;AAClL,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB,SAAS;AAC9B,eAAK,YAAY,KAAK,CAAE,YAAY,SAAS,IAAI,WAAW,YAAY,UAAU,SAAS,IAAI,SAAS,YAAY,WAAW,SAAS,WAAW,YAAY,SAAS;;AAE1K,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;;AAGjD,UAAI;AACF,YAAI,CAAC,YAAY,CAAC,SAAS,SAAU,SAAS,MAAM,WAAW;AAC7D,eAAK,cAAc;AACnB,eAAK,gBAAgB;AACrB,iBAAO;;AAET,wBAAgB,KAAK;AACnB,4BAA2B,SAAA,oBAAoB,CAAE,YAAY,KAAK,YAAY,GAAG,YAAY,UAAU,KAAK,YAAY,GAAG,WAAY,SAAS;AAChJ,8BAA6B,SAAA,WAAW;AACxC,4BAAkB,KAAK,YAAY,GAAG,UAAU;AAChD,6BAAmB,KAAK,YAAY,GAAG;AACvC,eAAK,YAAY,KAAK,IAAK,aAAa,YAAY;;AAEtD,aAAK,0BAA0B;;AAEjC,UAAI,YAAY,SAAS;AACvB,iBAAS,MAAM,QAAQ;AACrB,qBAAW,IAAI,WAAW;AAC1B,qBAAW,IAAI,SAAS;AACxB,qBAAW,UAAU;;;AAMzB,oBAAc,GAAG,KAAK,MAAM,KAAK,YAAY,IAAI;AAC/C,oBAAY;AAEZ,0CAAkC,IAAI,UAAU,UAAU;AAC1D,8CAAsC;AACtC,YAAI,8BAA8B;AAChC,WAAC,cAAc,mBAAmB;;AAEpC,gBAAa,OAAA,gBAAgB,IAAI,UAAU,eAAe,IAAI,UAAU;AACxE,2BAA4B,SAAA,aAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AACrF,qCAA6B,CAAC,WAAW,KAAK,MAAM,MAAM,IAAI,WAAW,KAAK,MAAM,MAAM;AAC1F,2BAAmB;AACnB,6BAA0B,OAAA;AAC1B,YAAI,UAAU;AACZ,yBAAe,GAAG,MAAM,iBAAiB,OAAO,OAAO,GAAG;AAC1D,2BAAsB,OAAA,oBAAoB,CAAC,OAAO;;AAEpD,uBAAe,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAC3D,sBAAsB,SAAA,yBAAyB,QAAQ,cAAc,CAAC,KAAK,YAAY,KAAK,YAAY,IAAI;AAG5G,YAAI,CAAC,QAAO,KAAK;AACf,8BAAmB;YACjB,QAAQ;YACR;YACA,gBAAgB;YAChB,YAAY,IAAI;YAChB,OAAO;;AAET,iBAAO;;AAIT,uCAA+B,KAAK,aAAa,QAAQ;AACzD,8BAAsB,WAAW,WAAW;AAC5C,mBAAW;AACX,YAAI,gBAAgB,QAAO,SAAS;AAClC,iBAAO;AACP,iBAAO;;AAET,+BAAuB,GAAG,QAAQ,QAAQ,CAAC,IAAI;AAC/C,wBAAgB,eAAe;AAC/B,YAAI,QAAO,KAAK;AACd,iBAAQ,iBAAiB,yBAAyB,qBAAsB,KAAK,UAAU,WAAW,OAAM,gBAAgB,IAAI,gBAAgB,IAAI;AAChJ,iBAAQ,kBAAkB,0BAA0B,sBAAuB,KAAK,UAAU,WAAW,OAAM,iBAAiB,IAAI,iBAAiB;AACjJ,iCAAwB,KAAK,UAAU,QAAQ,GAAG,OAAO,CAAC,aAAa;AACvE,qCAA2B,eAAe;AAC1C,yBAAe;AACf,8BAAoB,mBAAmB,MAAM,GAAG,uBAAuB;AACvE,iBAAQ,6BAA6B,2BAA4B,KAAK,aAAa,aAAa,YAAY,gBAAgB;AAC5H,+BAAqB,mBAAmB,MAAM,uBAAuB;AACrE,iBAAQ,8BAA8B,4BAA6B,KAAK,aAAa,cAAc,aAAa;AAChH,gDAAsC,KAAK,iCAAiC;AAC5E,cAAI,KAAK,IAAI,iCAAiC;AAC5C,kCAAsB,WAAW,kBAAkB;AACnD,kCAAsB,WAAW,mBAAmB;qBAE3C,gCAAgC;AACzC,kCAAsB,WAAW,kBAAkB,QAAQ,CAAC,aAAa;;AAEzE,kCAAsB,WAAW,mBAAmB,SAAS,CAAC,aAAa;;AAE7E,yCAA+B,KAAK,sBAAsB,WAAW,mBAAmB;AACxF,0CAAgC,KAAK,sBAAsB,WAAW,oBAAoB;AAC1F,sBAAY,UAAU,OAAO,wBAAwB,OAAO;;AAE9D,sCAA8B,KAAK,mBAAmB,WAAW,KAAK,OAAO;AAC7E,WAAG,QAAQ;AACX,6BAA8B,SAAA,WAAW,KAAK,8BAA8B;AAC5E,kCAA0B,GAAG,SAAS;AACtC,2BAAmB;UACjB,QAAQ;UACR,KAAK;UACL,gBAAgB;UAChB,YAAY,IAAI;UAChB,OAAO;;AAET,aAAK,YAAY,KAAK,IAAK,cAAc,WAAW,kBAAkB,aAAa,YAAY,IAAI,YAAY,gBAAgB;AAC/H,eAAO;;AAET,gBAAU,QAAQ,OAAO,OAAO,MAAM;AACtC,WAAK,gBAAgB,QAAQ;AAC7B,aAAO;;IAGT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY,UAAU;;;AAGnC,UAAQ,WAAW;;AC9QnB,uBAAA,WAAA;AAAA,UAAQ,YAAY;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,kBAAkB;IACnB,CAAC,gBAAgB;IACjB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;;;ACpdtB,uBAAA,WAAA;AACA,oBAA2B,WAAA;AAC3B,oBAA2B,WAAA;AAC3B,eAAsB,WAAA;AACtB,oBAA2B,WAAA;;IAIzB;AACE,WAAK,WAAW,IAAS,KAAA,SAAS,WAAW,gBAAgB,WAAW;AACxE,UAAI;AAAQ,aAAK,SAAS;;UAGtB;AACJ,UAAI;AAAQ,aAAK,SAAS;AAC1B,0BAAoB,MAAM,KAAK,SAAS,QAAQ,OAAO;AACvD,sBAAgB;AAChB,+BAA0B,eAAe;AAEvC,YAAI,WAAW;AAAoB;AACnC,qBAAa,WAAW,SAAS,WAAW,OAAO,cAAc;AACjE,4BAAoB;AACpB,YAAI,QAAQ,KAAK,SAAS;AACxB,4BAA4B,UAAA;AAC1B,gBAAI,KAAK,OAAO,KAAK,WAAW,IAAI,SAAS,YAAY;AACvD,0BAAY,OAAiB,UAAA,iBAAiB,KAAK,IAAI,WAAW,KAAK;;;;AAI7E,gBAAQ,KAAK;UACX,YAAY,WAAW,cAAc;UACrC,KAAK,WAAW,MAAM,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,MAAM;UAC3M;UACA;UACA,OAAO,WAAW,QAAQ,GAAG,MAAM,WAAW,SAAS;;AAEzD,YAAI,WAAW;AAAQ,qBAAW,OAAO;AACzC,YAAI,WAAW;AAAO,qBAAW,MAAM;;AAEzC,aAAO;;;AAIX;AACE,mBAAe,MAAM,QAAQ,IAAI;MACrB,UAAA,KAAK;MACf,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;MAClF,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;;AAEpF,qBAAiB,IAAI,kBAAkB,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI;AAExE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAE1E,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO;;AAGT,UAAQ,OAAO;AACf,UAAQ,oBAAoB;AAC5B,UAAQ,YAAY;AACpB,UAAQ,gBAA8B;;AC5DtC,sBAAA,WAAA;AAAA,sBAAoB;AAEpB;AACE,QAAI,CAAC,SAAQ,CAAC,MAAK;AAAS;AAC5B,uBAAmB;AACnB,iBAAa,MAAK,QACf,OAAO,OAAO,EAAE,eAAe,GAC/B,OAAO,UAAU,KAAK,EAAE,cAAc;AACzC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;OACjC,OAAO,OAAO,EAAE,eAAe,GAC/B,KAAK,UAAU,EAAE,eAAe,EAAE;AACrC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;OACjC,OAAO,OAAO,EAAE,qBAAqB,GACrC,KAAK,UAAU,EAAE,qBAAqB,EAAE;AAC3C,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,gBAAY,CAAE,UAAU,MAAK,UAAU,YAAY,MAAK,YAAY,WAAW,MAAK,WAAW,cAAc,MAAK,QAAQ,QAAQ,eAAe,MAAM,kBAAkB,SAAS,kBAAkB;AACpM,gBAAY,QAAQ;AAEpB,YAAQ,IAAI,kBAAkB,MAAM;;AAGtC,UAAQ,MAAM;;ACxBd,kBAAA,WAAA;AACA,mBAAyB,WAAA;AAEzB,iBAAe;AACf,aAAW,CAAE,KAAK;AAClB,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AAEjB;AACE,QAAI,CAAC,OAAO;AACV,aAAO,MAAM,MAAM,eAAe,QAAO,KAAK,IAAI;AAElD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,IAAI,UAAU,MAAM,YAAY;;AAEhF,WAAO,OAAO;;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,IAAI,cAAe,KAAK,OAAQ,KAAK,MAAM;AAClE,eAAS;AACT,aAAO;;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,IAAI,WAAW,QAAO,KAAK,IAAI;AAEnG,sBAAgB,GAAG,IAAI,QAAQ,CAAC;AAChC,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,IAAI;AAAS,iBAAO,MAAM,OAAO,IAAI,QAAQ;;AAE7D,2BAAmB,QAAO,KAAK,IAAI,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,IAAI,QAAQ,YAAY;AACnG,eAAO,WAAW,OAAO;AACzB,mBAAW,OAAO;AACV,iBAAA,IAAI,OAAO;;AAErB,cAAQ;AAER,UAAI;AACF,sBAAa,KAAK;AAClB,YAAI,MAAM,KAAK,MAAM,KAAK,MAAK,MAAM;;AAEvC,WAAK;AAEL,aAAO;AACP,cAAQ;;;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;;AC9Df,qBAAA,WAAA;AACA,mBAAyB,WAAA;AAEzB,iBAAe;AACf,aAAW,CAAE,QAAQ;AACrB,cAAY,OAAO;AACnB,oBAAkB;AAGlB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAE7B;AACE,QAAI,CAAC,OAAO;AACV,aAAO,SAAS,MAAM,eAAe,QAAO,KAAK,OAAO;AACxD,oBAAc,OAAO,OAAO,OAAO,GAAG,MAAM,OAAO;AAEnD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,YAAY;;AAEnF,WAAO,OAAO;;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,OAAO,cAAe,KAAK,WAAW;AAC7D,eAAS;AACT,aAAO;;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,OAAO,WAAW,QAAO,KAAK,OAAO;AACzG;AACA,UAAI;AACF,kBAAU,GAAG,KAAK;AAChB,qCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,0BAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,4BAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,2BAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,4BAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,iBAAO,UAAU,IAAI,KAAK,IAAI;;;AAGhC,kBAAU,GAAG,IAAI,QAAQ,CAAC;;AAG5B,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,OAAO;AAAS,oBAAU,MAAM,OAAO,OAAO,QAAQ;;AAEtE,8BAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,OAAO,QAAQ,YAAY;AAC5G,kBAAU,cAAc,OAAO;AAC/B,sBAAc,OAAO;AACb,iBAAA,IAAI,UAAU;;AAExB,cAAQ;AAER,UAAI;AACF,sBAAa,QAAQ;AACrB,YAAI;AAEF,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAI,MAAK,KAAK,MAAK,OAAO;AACnE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,KAAK,MAAK,KAAK,WAAW;AAC5C,gBAAI,aAAa;;;AAInB,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAK,MAAK,KAAK,QAAS;AACjE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,MAAM,MAAM,WAAW;AACzC,gBAAI,aAAa,KAAK,IAAI,MAAM;;;;AAItC,cAAQ;AAER,aAAO;AACP,cAAQ;;;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;;AC3Ff,sBAAA,WAAA;AACA,mBAAyB,WAAA;AAEzB,sBAAoB,CAAC,SAAS,WAAW,QAAQ,SAAS,OAAO,WAAW;AAC5E,iBAAe;AACf,aAAW;AACX,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAC7B,gBAAc;AAEd;AACE,QAAI,CAAC,OAAO;AACV,aAAO,UAAU,MAAM,eAAe,QAAO,KAAK,QAAQ;AAE1D,cAAQ,IAAI,sBAAsB,QAAO,KAAK,QAAQ,UAAU,MAAM,YAAY;;AAEpF,WAAO,OAAO;;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,QAAQ,cAAgB,KAAK,SAAS;AAC7D,eAAS;AACT,aAAO;;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,QAAQ,WAAW,QAAO,KAAK,QAAQ;AAE3G,iCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,aAAO;AAEP,sBAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,wBAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,uBAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,UAAI;AACJ,YAAM;AACN,WAAK;AACL,wBAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,cAAQ;AACR,gBAAU;AACV,eAAS;AACT,wBAAkB,GAAG,KAAK,MAAM,UAAU,IAAI,KAAK,IAAI;AACvD,gBAAU;AACV,kBAAY;AACZ,UAAI,QAAO,KAAK,QAAQ;AACtB;AACA,YAAI,CAAC,QAAO;AACV,2BAAiB,MAAM,OAAO,QAAQ,QAAQ;AAC9C,kBAAO,SAAS;AAChB,aAAG,QAAQ;;AAEX,8BAAoB,MAAM,GAAG,QAAQ,MAAM,OAAO,QAAQ,QAAQ;AAClE,kBAAO,YAAY,OAAO;AAC1B,sBAAY,OAAO;AACX,mBAAA,IAAI,WAAW;;AAEzB,qBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,cAAI,QAAQ,MAAK,KAAK,QAAO,KAAK,QAAQ;AAAe,gBAAI,KAAK,CAAE,OAAO,KAAK,IAAI,MAAM,KAAK,MAAM,MAAM,QAAQ,MAAK,MAAM,MAAM,SAAS,YAAY;;AAE3J,YAAI,KAAK,UAAU,EAAE,QAAQ,EAAE;;AAEjC,gBAAU;AACV,aAAO;AACP,cAAQ;;;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;;AC7Ef,wBAAA,WAAA;;IAGE;AACE,WAAK,QAAQ;AACb,WAAK,eAAe;;IAGtB;AACE,aAAO,GAAG,KAAK;AACb,wBAAgB,KAAK,gBAAgB,MAAM;AAC3C,wBAAgB,QAAQ,WAAW;AACnC,wBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAkB,QAAQ,IAAI,OAAO,EAAE,QAAQ,CAAC;AAChD,6BAAqB,KAAK,kBAAkB;AAC5C,eAAO;UACL,eAAe,aAAa,QAAQ;UACpC,SAAS,aAAa;UACtB,iBAAiB,aAAa;UAC9B,iBAAiB,aAAa;;;;IAQpC;AACE,WAAK,MAAM;;;AAGf,UAAQ,YAAY;;AC/BpB,6BAAA,WAAA;AACA,oBAA2B,WAAA;0BAEO,UAAA;IAEhC;AAEE,aAAO,GAAG,KAAK,MAAM,GAAG,IAAI,OAAO,OAAO,IAAI;;IAIhD;AACE,mEAA6D;AAC7D,aAAO,CAAE,SAAS,SAAS,iBAAiB;;;AAGhD,UAAQ,YAAY;;AChBpB,uBAAA,WAAA;AACA;AACE,WAAO,KAAK,MAAM,IAAI;;;IAGtB;AACE,WAAK,gBAAgB,IAAI,MAAM;AAC/B,WAAK,mBAAmB;AACxB,WAAK,kBAAkB;;IAGzB;AACE,WAAK,cAAc,EAAE,KAAK,oBAAoB;AAC9C,WAAK,KAAK,KAAK;;IAGjB;AACE,mBAAY,KAAK,cAAc;AAC/B,WAAK,SAAS,GAAG,KAAK;AACtB,WAAK,KAAK;AACV,WAAK,cAAc,KAAK,mBAAmB,KAAK;AAChD,aAAO;;IAGT;AACE,aAAO,KAAK,qBAAqB;;IAGnC;AACE,aAAO,KAAK,mBAAmB;;IAGjC;AACE,aAAO,KAAK,cAAc,MAAM,GAAG,KAAK,mBAAmB;;IAG7D;AACE,aAAO,KAAK,cAAc;;IAG5B;AACE,aAAO,IAAI,KAAK,KAAK,KAAK,KAAK,IAAI;AACjC,aAAK,SAAS,GAAG,KAAK;AACtB,YAAI,KAAK;;;IAIb;AACE,aAAO,IAAI,KAAK,KAAK;AACnB,gBAAQ,IAAI;AACZ,YAAI,IAAI,KAAK,oBAAoB,KAAK,KAAK,GAAG,IAAI;AAAI;AACtD,YAAI,CAAC,KAAK,KAAK,GAAG;AAAI;AACtB,aAAK,SAAS,GAAG;AACjB,YAAI;;;IAIR;AACE,aAAO,KAAK,gBAAgB,KAAK,cAAc;;IAGjD;AACE,aAAO,KAAK,WAAW,KAAK,KAAK,WAAW;;IAG9C;AACE,gBAAU,KAAK,cAAc;AAC7B,WAAK,cAAc,KAAK,KAAK,cAAc;AAC3C,WAAK,cAAc,KAAK;;;AAG5B,UAAQ,UAAU;;ACvElB,yBAAA,WAAA;AAAA,mBAA0B,WAAA;AAE1B;AACE,4BAAwB,OAAO;AAC/B,uBAAmB;AACnB,mBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,iBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,wBAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,qBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,mBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,0BAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,YAAI,OAAO,IAAI,UAAU,UAAU,cAAc;AAC/C,yBAAe;AACf;;;AAGJ,UAAI,CAAC;AACH;;;AAGJ,WAAO;;AAOT;AACE,0CAAsC,OAAO;AAC7C,kBAAc,IAAa,SAAA,QAAQ,SAAS,QAAQ,cAAc,EAAG,WAAY;AACjF,wBAAoB,GAAG,WAAW,QAAQ,EAAE;AAC1C,0BAAoB,GAAG,WAAW,OAAO,EAAE;AACzC,8BAAsB,GAAG,aAAa,cAAc,EAAE;AACpD,wBAAc,OAAO,IAAI,UAAU,UAAU;AAE7C,cAAI,QAAQ;AAAgB;AAE5B,cAAI,4BAA4B,YAAY,OAAO,UAAU,UAAU,oBAAoB;AACzF,kBAAM,QAAQ,CAAE,OAAO,MAAM,CAAE,UAAU,UAAU,IAAI;;;;;AAK/D,WAAO;;AAET,UAAQ,0BAA0B;;AC7ClC,yBAAA,WAAA;AAAA,UAAQ,YAAY;IAClB;IAAQ;IAAW;IAAY;IAAW;IAAY;IACtD;IAAiB;IAAa;IAAc;IAAa;IACzD;IAAW;IAAY;IAAY;IAAa;IAAa;;AAE/D,UAAQ,gBAAgB,QAAQ,UAAU;AAC1C,UAAQ,UAAU,QAAQ,UAAU,OAAO;AACzC,WAAO,aAAa;AACpB,WAAO;KACN;AACH,6BAA2B;IACzB,CAAC,WAAW;IAAiB,CAAC,aAAa;IAC3C,CAAC,aAAa;IAAc,CAAC,WAAW;IACxC,CAAC,YAAY;IAAc,CAAC,YAAY;IACxC,CAAC,cAAc;IAAkB,CAAC,cAAc;IAChD,CAAC,YAAY;IAAc,CAAC,aAAa;IACzC,CAAC,gBAAgB;IAAkB,CAAC,WAAW;;AAQjD,UAAQ,YAAY;IAClB,CAAC,QAAQ;IAAY,CAAC,WAAW;IAAY,CAAC,QAAQ;IACtD,CAAC,YAAY;IAAa,CAAC,QAAQ;IACnC,CAAC,gBAAgB;IAAc,CAAC,aAAa;IAC7C,CAAC,gBAAgB;IAAY,CAAC,WAAW;IACzC,CAAC,YAAY;IAAc,CAAC,QAAQ;IACpC,CAAC,iBAAiB;IAAe,CAAC,cAAc;IAChD,CAAC,iBAAiB;IAAa,CAAC,YAAY;IAC5C,CAAC,aAAa;;AAEhB,UAAQ,uBAAuB,mBAAmB,IAAI,8BAA+B,CAAC,QAAQ,QAAQ,aAAa,QAAQ,QAAQ;AACnI,UAAQ,eAAe;IACrB;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AC3DF,sBAAA,WAAA;AAAA,cAAqB,WAAA;AAErB;AACE,WAAO;MACL,GAAG,QAAQ,IAAI,GAAG,GAAG;MACrB,GAAG,QAAQ,IAAI,GAAG,GAAG,WAAe,IAAA;;;AAGxC,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,UAAU,UAAU,gBAAiB;AAC7C,WAAQ,GAAG,KAAM,eAAe,UAAU,UAAU,UAAU;AAC9D,WAAO;MACL,GAAG,KAAK,WAAW,eAAe;MAClC,GAAG,KAAK,WAAW,eAAe;;;AAGtC,UAAQ,iBAAiB;AAEzB;AACE,mBAAe,IAAI,MAAM;AACzB,iBAAa,GAAG,IAAI,MAAM;AACxB,aAAO,KAAK;;AAEd,WAAO;;AAET,UAAQ,YAAY;AAEpB;AACE,QAAI,IAAI;AAAK,aAAO;AACpB,QAAI,IAAI;AAAK,aAAO;AACpB,WAAO;;AAET,UAAQ,QAAQ;AAEhB;AACE,eAAW,KAAK;AAChB,eAAW,KAAK;AAChB,WAAO,KAAK,KAAK,KAAK;;AAExB,UAAQ,kBAAkB;AAE1B;AACE,WAAO,CAAE,GAAG,EAAE,IAAI,EAAE,GAAG,GAAG,EAAE,IAAI,EAAE;;AAEpC,UAAQ,aAAa;AAErB;AACE,WAAO,CAAE,GAAG,OAAM,EAAE,GAAG,MAAK,OAAM,GAAG,OAAM,EAAE,GAAG,MAAK;;AAEvD,UAAQ,cAAc;;ACnDtB,yBAAA,WAAA;AAAA,oBAA2B,WAAA;AAC3B,kBAAyB,WAAA;AAEzB,+BAAuC,UAAA,UAAU,IAAI,qCAAsC,CAAW,UAAA,QAAQ,iBAA2B,UAAA,QAAQ;AACjJ,6BAA2B,qBAAqB,IAAI,sBAAsB;AAC1E,6BAA2B,qBAAqB,IAAI,qBAAqB;AACzE;AACE,qBAAiB,cAAc,MAAM,KAAK;AAC1C,WAAO;MACL,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG;MACvC,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG,WAAW;;;AAGtD;AACE,WAAO;MACL,GAAW,QAAA,MAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,SAAS;MACjE,GAAW,QAAA,MAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,QAAQ;;;AAUpE,qJAAmJ;AACjJ,4BAAwB,aAAa;AAErC,kCAA8B,yBAAyB,eAAe,UAAU,cAAc,QAAQ;AACtG,yBAAqB,gBAAgB,QAAQ,uBAAuB;AACpE,2BAA+B,QAAA,WAAW,eAAe,UAAU;AACnE,yBAAqB;AACrB,iBAAa,GAAG,IAAI,kBAAkB;AACpC,oCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,0BAA4B,QAAA,eAAe,sBAAsB,GAAG,sBAAsB,GAAG,kBAAkB;AAC/G,uBAAyB,QAAA,WAAW;QAClC,GAAG,sBAAsB,IAAI;QAC7B,GAAG,sBAAsB,IAAI;SAC5B,CAAE,GAAG,YAAY,GAAG,GAAG,YAAY;;AAExC,kCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,kBAAc,aAAa,IAAI,sBAAsB,GAAG,sBAAsB,GAAG;AACjF,WAAO,CAAE,UAAU,gBAAgB,MAAgB,UAAA,UAAU,mBAAmB;;AAQlF;AACE,qBAAiB,OAAO,MAAM;AAC9B,qBAAiB,mBAAmB;AACpC,8BAA0B,IAAI,MAAM;AAEpC,WAAQ,gBAAgB,oBAAqB;AAC7C,sBAA0B,QAAA,eAAe,UAAU,cAAc;AACjE,sBAAkB,SAAS,MAAM;MAC/B,OAAO;MACP,MAAgB,UAAA,UAAU,SAAS;MACnC,UAAU;;AAIZ,oBAAgB,WAAW,GAAG,QAAQ,GAAG,EAAE;AACzC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;;;AAK/J,oBAAgB,GAAG,OAAO,UAAU,EAAE;AACpC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;;;AAG/J,WAAO;;AAET,UAAQ,aAAa;;ACnFrB,6BAAA,WAAA;AAAA,qBAA4B,WAAA;AAC5B,qBAA4B,WAAA;AAC5B,kBAAyB,WAAA;AAEzB,yEAAwE,GAAG;AACzE,WAAO,MAAM,KAAK,EAAG;AACnB,oCAA8B,UAAU,YAAY;AACpD,aAAe,QAAA,gBAAgB,GAAG,GAAG,sBAAsB,GAAG,sBAAsB,MAAM;;;AAO9F;AACE,wCAAoC,kBAAkB,OAAO,UAAW,UAAU;AAChF,UAAI,CAAC,oCAAoC,eAAe,kBAAkB,UAAU;AAClF,kBAAU;;AAEZ,aAAO;OACN;AACH,WAAO,8BAA8B,kBAAkB;;AAKzD,8BAA4B;AAwD5B,8JAA4J,iBAAiB;AAC3K,kBAAc;AACd,kBAAyB,WAAA,wBAAwB,gBAAgB,qBAAqB;AACtF,6BAAyB,YAAY;AAGrC,WAAO,MAAM,SAAS,qBAAqB,CAAC,MAAM;AAEhD,mBAAa,MAAM;AAInB,8BAAgC,QAAA,eAAe,KAAK,MAAM,cAAc;AACxE,UAAI,oCAAoC,OAAO,kBAAkB,iBAAiB,KAAK,KAAK;AAAK;AAEjG,wBAA6B,WAAA,WAAW,MAAM,cAAc,eAAe,cAAc,wBAAwB;AACjH,oBAAc,iBAAiB,OAAO,kBAAkB;AACxD,YAAM,KAAK,CAAE,WAAW;;AAE1B,WAAO;;AAET,UAAQ,sBAAsB;;ACvG9B,oBAAA,WAAA;AAAA,cAAqB,WAAA;AAErB;AACE,WAAQ,IAAI,iBAAiB,IAAI;;AAGnC;AACE,WAAW,IAAA,qBAAqB,OAAO;AACrC,UAAI,gCAAgC,UAAU,WAAW,OAAO,UAAU,YAAY,OAAO;AAC3F,eAAO;;AAET,aAAO,KAAK,CAAC,UAAU,YAAY,UAAU;AAC7C,aAAO;OACN;;AAEL,UAAQ,uBAAuB;AAE/B,SAAQ,mBAAmB,qBAAsB;AACjD;AACE,WAAO,UAAU,OAAO,EAAG,MAAM,MAAM,MAAM,QAAU,WAAY,GAAG,QAAW;MAC/E,MAAM,KAAK,IAAI,MAAM;MACrB,MAAM,KAAK,IAAI,MAAM;MACrB,MAAM,KAAK,IAAI,MAAM;MACrB,MAAM,KAAK,IAAI,MAAM;QACnB;MACF,MAAM;MACN,MAAM;MACN,MAAM;MACN,MAAM;;;AAGV,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,MAAM,MAAM,MAAM,QAAS,eAAe;AAClD,WAAO,CAAC,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG;;AAE1F,UAAQ,uBAAuB;AAE/B;AACE,WAAO,QAAQ,IAAI,QAAQ,IAAI,aAAY,QAAO;;AAEpD,UAAQ,oBAAoB;AAE5B;AACE,WAAO;MACL,OAAO,KAAK;MACZ,WAAW,KAAK,UAAU,IAAI,EAAG,OAAO,MAAM,cAAgB;QAC5D;QACA;QACA,UAAU,CAAE,GAAG,SAAS,IAAI,QAAQ,GAAG,SAAS,IAAI;;;;AAI1D,UAAQ,YAAY;AAEpB;AACE,kBAAc,OAAM,QAAQ;AAC5B,oBAAgB,MAAM,eAAe,CAAC,SAAS;AAC/C,UAAM;AACN,WAAO;;AAET,UAAQ,WAAW;AAEnB;AACE,wBAAoB,MAAM,IAAI,UAAU,UAAU,MAAM,SAAS,uBAAuB,QAAQ;AAChG,WAAO;;AAET,UAAQ,oBAAoB;;ACpE5B,2BAAA,WAAA;AACA,yBAAgC,WAAA;AAChC,yBAAgC,WAAA;AAChC,iBAAsB,WAAA;;IAGpB;AACE,WAAK,YAAY;AACjB,WAAK,eAAe;;UAGhB;AACJ,aAAO,IAAI,QAAQ;AACjB,uBAAe,MAAM,MAAM;AAC3B,sBAAc,MAAM,MAAM;AAC1B,wBAAqB,OAAA,SAAS,OAAO,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACzE,oBAAY,KAAK,UAAU,QAAQ;AACnC,iCAAyB,MAAW,OAAA,kBAAkB,CAAC,IAAI,eAAe,IAAI,SAAS,IAAI,iBAAiB,IAAI;AAChH,6BAAqB,iBAAiB;AACtC,8BAAsB,iBAAiB;AACvC,uCAA+B,iBAAiB;AAChD,uCAA+B,iBAAiB;AAChD,sBAAc,MAAqB,eAAA,oBAAoB,cAAc,eAAe,wBAAwB,wBAAwB,KAAK,cAAc,QAAO,KAAK,eAAe,QAAO,KAAK,gBAAgB,QAAO,KAAK;AAC1N,4BAAyB,OAAA,kBAAkB,OAAO,CAAC,QAAQ,QAAQ,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACvG,YAAI,cAAc;AAClB,YAAI,QAAQ;AACZ,YAAI,gBAAgB;AACpB,YAAI,gBAAgB;AACpB,gBAAQ;AACR,gBAAQ;;;IAIZ;AACE,WAAK,UAAU;;;AAGnB,UAAQ,UAAU;AAElB;AACE,uBAAmB,MAAM,eAAe,QAAO,KAAK;AACpD,sBAAkB,IAAmB,eAAA,UAAU,YAAY,KAAK;AAEhE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO,IAAI,QAAQ;;AAErB,UAAQ,OAAO;;AC9Cf,sBAAA,WAAA;AAAA,yBAAgC,WAAA;AAChC,uBAA8B,WAAA;AAC9B,yBAAgC,WAAA;AAChC,oBAA2B,WAAA;AAC3B,iBAAsB,WAAA;AAEtB,UAAQ,OAAoB,aAAA;AAC5B,UAAQ,UAAuB,aAAA;AAE/B,UAAQ,YAA2B,eAAA;AACnC,UAAQ,sBAAqC,eAAA;AAC7C,UAAQ,eAAyB,UAAA;AACjC,UAAQ,UAAoB,UAAA;AAC5B,UAAQ,YAAsB,UAAA;AAC9B,UAAQ,YAAsB,UAAA;AAC9B,UAAQ,uBAA4B,OAAA;AACpC,UAAQ,iBAAsB,OAAA;AAC9B,UAAQ,uBAA4B,OAAA;AACpC,UAAQ,oBAAyB,OAAA;AACjC,UAAQ,YAAiB,OAAA;;ACnBzB,2BAAA,WAAA;AAAA;;;;;;;;;;;;;;;;;IAqBE;AACE,WAAK,QAAQ;AACb,WAAK,UAAU,iBAAiB,IAAI,YAAY,CAAC,OAAO,UAAU,OAAO;AACzE,WAAK,gBAAgB,GAAG,SAAS,KAAK;AACtC,WAAK,kBAAkB,GAAG,SAAS,CAAC,WAAW;AAC/C,WAAK,wBAAwB,GAAG,SAAS,CAAC,YAAY,GAAG,YAAY;;IAGvE;AACE,aAAO,GAAG,KAAK;AACb,2BAAmB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAChD,yBAAiB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAC9C,gCAAwB,GAAG,IAAI,GAAG,IAAI,YAAY,KAAK,kBAAkB,KAAK;AAC9E,6BAAqB,GAAG,IAAI,UAAU,KAAK;AAC3C,4BAAoB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACvE,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACrE,eAAO,GAAG,SAAS,CAAC,aAAa,YAAY;;;IAIjD;AACE,aAAO,GAAG,KAAK;AACb,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,QAAQ,CAAC,IAAI,GAAG,KAAK,KAAK,kBAAkB,KAAK,QAAQ;AAC1G,eAAO,GAAG,IAAI,WAAW,KAAK;;;UAI5B;AACJ,sBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAoB,QAAQ;AAC5B,cAAQ;AACR,qBAAe,GAAG,KAAK,MAAM,GAAG,QAAQ,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,KAAK;AAChF,wBAAkB,OAAO;AACzB,uBAAiB,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAc,KAAK,eAAe;AAClC,eAAS;AACT,wBAAkB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,QAAO,UAAU,QAAO,cAAc,QAAO;AACpH,uBAAiB,UAAU;AAE3B,aAAO;AACP,gBAAU;AACV,oBAAc;AACd,6BAAuB;AACrB,YAAI,UAAU,aAAa,QAAO;AAChC,8BAAoB,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACvD,mCAAyB,GAAG,MAAM,aAAa,CAAC,UAAU,IAAI,CAAC,GAAG;AAClE,gCAAsB,GAAG,KAAK,MAAM,KAAK,mBAAmB,kBAAkB,UAAU,QAAQ,CAAC,IAAI;AACrG,2BAAiB;AACjB,gBAAM,KAAK,CAAE,KAAK,aAAa,eAAe,YAAY,UAAU;;;AAGxE,kBAAY;AACZ,YAAM;AACN,aAAO;;UAGH;AACJ,0BAAoB,MAAM,MAAM;AAChC,yBAAmB,MAAM,MAAM;AAC/B,qBAAc,GAAG,KAAK,MAAM,MAAM,eAAe,CAAC,QAAO,WAAW,QAAO,YAAY,IAAI,OAAO,IAAI;AACtG,0BAAoB,MAAM,KAAK,SAAS,QAAO;AAC/C,aAAM;AACN,UAAI,CAAC,eAAe,YAAY,WAAW;AAAG,eAAO;AACrD,oBAAc;AACd,+BAAyB;AACvB,sBAAc,WAAW,IAAI;AAC7B,2BAAmB,MAAM,MAAM,GAAG;AAClC,yBAAiB,MAAM,MAAM,GAAG;AAChC,8BAAsB,WAAW,cAAc;AAC/C,mBAAW,IAAI;AACf,mBAAW,cAAc;AACzB,cAAM,KAAS,oBAAoB,CAAE,YAAY,UAAU,eAAe,YAAY,WAAW,aAAc,CAAC,aAAa,QAAO,WAAW,cAAc,QAAO;;AAEtK,aAAO;;;AAGX,UAAQ,eAAe;;ACjGvB,2BAAA,WAAA;AAAA;;;;;;;;;;;;;;;;AAqBA,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,4BAA0B,CAAC,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG;AAC/C,4CAA0C;AAC1C,qDAAmD;;IAGjD;AACE,WAAK,cAAc;AACnB,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;;IAGvB;AACE,mCAA6B,cAAc,IAAI;AAC7C,sCAA8B,CAAC,GAAG,OAAO;AACzC,eAAY,YAAY,uBAAuB;;AAEjD,4BAAsB,KAAK,8BAA8B;AACzD,aAAW,WAAe,YAAgB,SAAS,eAAe,yBAAyB;;IAG7F;AACE,0BAAoB,KAAK,8BAA8B;AACvD,4BAA0B,WAAe,YAAgB,SAAS,aAAa,yBAAyB;AACxG,4BAAsB;AACtB,mBAAa,GAAG,IAAI,kBAAkB,QAAQ;AAC5C,sBAAc,KAAK,UAAU,kBAAkB,IAAI,MAAM,GAAG;;AAE9D,oBAAc,gBAAgB;AAC9B,aAAO;;IAGT;AACE,sBAAoB,WAAW;AAC/B,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAW;QAC5C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;QAC9C,MAAM;;AAER,mCAAkC,oBAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI;AACrC,wBAAqB,YAAY,OAAO;AACxC,eAAO,CAAC,GAAG,SAAS,MAAM;;AAE5B,oCAAmC,sBAAsB;AACzD,wBAAkB,CAAC,GAAO,aAAa,OAAO;AAC9C,gCAA0B;QACnB,IAAI,WAAW,sBAAsB;QACrC,IAAI,WAAW,sBAAsB;;AAE5C,aAAO,cAAc,IAAI,WAAW;QAClC,MAAM,KAAK,kBAAkB;QAC7B,MAAM,KAAK,kBAAkB;QAC7B,MAAM;;;UAIJ;AACJ,WAAK;AACL,wBAAkB;AAGlB;AACA,UAAK,KAAK,UAAU,QAAO,cAAe,CAAC,QAAO;AAChD,gBAAQ,MAAM,KAAK,YAAY,mBAAmB,QAAO;AAEzD,YAAK,OAAM,MAAM,OAAO,OAAS,OAAM,MAAM,OAAO;AAAM,eAAK,UAAU;;AAI3E,UAAI,SAAU,MAAM,SAAS,KAAQ,OAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,YAAa,CAAC,QAAO;AAC/H,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB;AAAO,eAAK,YAAY,KAAK;AACpD,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;;AAEjD,oBAAc;AAId,sBAAgB,KAAK;AACnB,2BAAmB,KAAK,YAAY;AACpC,YAAI,CAAC;AAAY;AACjB,YAAI,QAAO;AACT,wBAAmB,gBAAgB,WAAW,cAAc,oCAAoC,WAAW,cAAc;AACzH,6BAAuB,aAAa;AACpC,uCAA6B,CAAC,WAAW,KAAK,OAAM,MAAM,IAAI,WAAW,KAAK,OAAM,MAAM;AAC1F,+BAAqB,GAAG,MAAM,iBAAiB,QAAO,OAAO,GAAG;AAChE,iCAA4B,oBAAoB,CAAC,OAAO;AACxD,yBAAe,cAAc,KAAK,uBAAuB,WAAW,eAAe,kBAAkB;AACrG,+BAAyB,yBAAyB,QAAQ,cAAc,CAAC,KAAK,WAAW,KAAK;AAC9F,4BAAkB,aAAa,IAAI;AACnC,uBAAa;AACb,uBAAa;AACb,0CAAgC,MAAM,KAAK,aAAa,QAAQ;AAChE,oBAAU;AACV,kCAAwB,WAAW,WAAW;AAC9C,qBAAW;AACX,cAAI,mBAAmB,QAAO;AAC5B,sCAA0B,GAAG,QAAQ,WAAW,CAAC,IAAI;AACrD,8BAAkB,kBAAkB;AACpC,sBAAU;AACV,8BAAkB;AAClB,2BAAe,KAAK,mBAAmB,WAAW,QAAQ,OAAO;AACjE,oCAAwB,KAAK,uBAAuB;AACpD,iBAAK,YAAY,KAAK;AACtB,2BAAe;cACb,WAAW;cACX,YAAY;cACZ,KAAK;gBACH,SAAS,gBAAgB;gBACzB,aAAa,gBAAgB;;;AAGjC,kBAAM,KAAK;;AAEX,iBAAK,YAAY,KAAK;;AAExB,oBAAU;;AAEV,2BAAqB,WAAe,YAAgB,SAAS,YAAY,yBAAyB;AAClG,yBAAe;YACb,YAAY,WAAW;YACvB,KAAK;cACH,SAAS,SAAS;cAClB,aAAa,SAAS;;;AAG1B,gBAAM,KAAK;;;AAGf,WAAK,cAAc,KAAK,YAAY,OAAO,OAAO,MAAM;AACxD,WAAK,gBAAgB,MAAM;AAC3B,aAAO;;IAIT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY;;;AAIzB,UAAQ,eAAe;;AC9KvB,sBAAA,WAAA;AAAA,UAAQ,UAAU;IAChB;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;;;AC/viBd,uBAAA,WAAA;AAmBA,uBAA8B,WAAA;AAC9B,mBAA0B,WAAA;AAC1B,kBAAyB,WAAA;AArBzB;;;;;;;;;;;;;;;;AAuBA,2BAAyB;IACvB,OAAO,CAAC,GAAG,GAAG,GAAG;IACjB,aAAa,CAAC,GAAG,GAAG,GAAG;IACvB,cAAc,CAAC,GAAG,IAAI,IAAI;IAC1B,YAAY,CAAC,IAAI,IAAI,IAAI;IACzB,OAAO,CAAC,IAAI,IAAI,IAAI;IACpB,UAAU,CAAC;;;IAIX;AACE,WAAK,WAAW;;WAGX;AACL,aAAO;;UAGH;AACJ,0BAAoB,MAAM,KAAK,SAAS,cAAc,OAAO;AAC7D,UAAI,CAAC;AAAa,eAAO;AACzB,oBAAc;AACd,+BAAyB;AACvB,4BAAoB;AACpB,YAAI,WAAW;AACb,4BAAkB,OAAO,KAAK;AAC5B,wBAAY,OAAO,iBAAiB,KAAK,IAAI,WAAW,WAAW,UAAU;;;AAGjF,cAAM,KAAK;UACT,YAAY,WAAW;UACvB,KAAK,WAAW,MAAM;YACpB,WAAW,IAAI,QAAQ;YACvB,WAAW,IAAI,QAAQ;YACvB,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;YACvD,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;cACrD;UACJ,WAAW,WAAW;UACtB;;;AAGJ,aAAO;;;AAGX,UAAQ,WAAW;AAEnB;AACE,+CAA2C,MAAM,QAAQ,IAAI;MAC3D,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;MAC1F,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;;AAE5F,qBAAiB,IAAiB,aAAA,aAAa,mBAAmB,QAAO,WAAmB,QAAA;AAC5F,iBAAa,IAAa,SAAA,aAAa,UAAU,eAAe,QAAO;AACvE,sBAAiB,IAAI,SAAS;AAE9B,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAE9E,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;;AAET,UAAQ,OAAO;;ACnFf,sBAAA,WAAA;AAAA,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,uBAAmB;AAEjB,wBAAkB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACzD,yBAAmB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC1D,mBAAa,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACpD,UAAI,QAAQ,aAAa,cAAe,UAAU,SAAS,IAAI,KAAK,SAAS,KAAO,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;eACnI,QAAQ,aAAc,UAAU,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;eAC7E,QAAQ,cAAe,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAGxF,2BAAqB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC5D,4BAAsB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC7D,UAAI,gBAAgB;AAAe,iBAAS,KAAK,WAAY,aAAa,SAAS,IAAI,cAAc,SAAS,IAAK,SAAS;;AAE9H,WAAO;;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AAIjB,UAAI,MAAK,QAAQ,MAAK,KAAK,SAAS;AAClC,0BAAkB,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACpD,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK;;AACvC,mBAAS,KAAK,UAAU,YAAY,IAAI,UAAU;AACvD,yBAAiB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAC/G,YAAI,WAAW;AAAK,mBAAS,KAAK;AAClC,0BAAkB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAChH,YAAI,YAAY;AAAK,mBAAS,KAAK;AACnC,0BAAkB,KAAK,IAAI,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,IAAI,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACjI,YAAI,YAAY;AAAI,mBAAS,KAAK,SAAS,KAAK,MAAM;AACtD,0BAAkB,MAAK,KAAK,KAAK;AACjC,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK,QAAQ,YAAY,IAAI,OAAO;;;AAG/E,WAAO;;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AACjB,sBAAgB;AAChB,kCAA4B,OAAO,QAAQ,MAAK;AAC9C,YAAI,WAAW;AAAY,kBAAQ,KAAK,CAAE,MAAM,OAAO,eAAe,UAAU,IAAI;;AAEtF,UAAI,WAAW,QAAQ,SAAS;AAC9B,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,iBAAS,KAAK,GAAG,QAAQ,gBAAgB,QAAQ;;;AAGrD,WAAO;;;AC1DT,sBAAA,WAAA;AAOA,uBAAqB;AACnB,qBAAiB;AACf,gBAAU,IAAI,OAAO,QAAQ,SAAS,gBAAgB;AACtD,aAAO,QAAQ,GAAG;AAChB,mBAAW,QAAQ;AACnB,eAAO;;;AAIX,qBAAiB;AACf,qBAAe,GAAG,aAAa;AAC/B,SAAG,aAAa,QAAQ;AACxB,SAAG,cAAc;AAEjB,UAAI,CAAC,GAAG,mBAAmB,QAAQ,GAAG;AACpC,cAAM,IAAI,MAAM,6BAA6B,GAAG,iBAAiB;;AAEnE,aAAO;;AAGT,SAAK,UAAU;AACf,SAAK,YAAY;AAEjB,iBAAa,SAAS,cAAc,GAAG;AACvC,iBAAa,SAAS,gBAAgB,GAAG;AAEzC,SAAK,KAAK,GAAG;AACb,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,YAAY,KAAK;AAEpB,QAAI,CAAC,GAAG,oBAAoB,KAAK,IAAI,GAAG;AACtC,YAAM,IAAI,MAAM,0BAA0B,GAAG,kBAAkB,KAAK;;AAGtE,OAAG,WAAW,KAAK;AAGnB,aAAS,cAAc,aAAa,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,UAAU,KAAK,GAAG,kBAAkB,KAAK,IAAI;;AAIpD,aAAS,cAAc,WAAW,KAAK;AACvC,aAAS,gBAAgB,WAAW,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,QAAQ,KAAK,GAAG,mBAAmB,KAAK,IAAI;;;AAIrD,2BAAyB;AACvB,QAAI,CAAC;AAAQ,eAAS;AACtB,qBAAiB;AACjB,yBAAqB;AACrB,uBAAmB;AACnB,mCAA+B;AAC/B,4BAAwB,CAAC,MAAM;AAC/B,uBAAmB;AACnB,iBAAa;AACb,kBAAc;AACd,wBAAoB;AACpB,0BAAsB;AACtB,oBAAgB,OAAO,UAAU,SAAS,cAAc;AAGxD,gCAA4B;AAE5B,eAAW,QAAQ,WAAW;AAC9B,QAAI,CAAC;AAAI,YAAM,IAAI,MAAM;AAEzB,SAAK,YAAY;AAEf,mBAAa,MAAM,UAAU,MAAM,KAAK,WAAW;AACnD,qBAAe,QAAQ;AAEvB,mBAAa,KAAK,CAAE,MAAM,QAAQ;;AAGpC,SAAK,QAAQ;AACX,qBAAe;;AAGjB,SAAK,QAAQ;AACX,cAAQ,OAAM,OAAO,OAAM;AAC3B,mBAAa;AAGb,UAAI,CAAC;AAAgB,yBAAiB,GAAG;AACzC,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;AAGpE,UAAI,aAAa,WAAW;AAE1B;AACA,eAAO;;AAGT,mBAAa,GAAG,IAAI,aAAa,QAAQ;AACvC,uBAAgB,MAAM,aAAa,SAAS;AAC5C,kBAAU,aAAa;AACvB,UAAE,KAAK,MAAM,MAAM,EAAE,QAAQ;;AAG/B,aAAO;;AAGT,oBAAgB;AAEd,UAAI,UAAU,UAAU,WAAW;AAAW;;AAE9C,cAAQ,QAAQ;AAChB,eAAS;AACT,cAAQ,SAAS;AACjB,gBAAU;AAGV,UAAI,CAAC;AAEH,yBAAiB,IAAI,aAAa;UAChC;UAAI;UAAI;UAAG;UAAG;UAAG;UAAI;UAAG;UAAG;UAAI;UAAG;UAAG;UACrC;UAAI;UAAG;UAAG;UAAG;UAAG;UAAI;UAAG;UAAG;UAAG;UAAG;UAAG;;AAGpC,wBAAgB,GAAG,gBAAgB,GAAG,WAAW,GAAG,cAAc;AACnE,WAAG,WAAW,GAAG,cAAc,UAAU,GAAG;AAI5C,WAAG,YAAY,GAAG,gCAAgC;;AAGpD,SAAG,SAAS,GAAG,GAAG,QAAQ;AAG1B,0BAAoB,CAAC,MAAM;;AAG7B,gCAA4B;AAC1B,wBAAkB,SAAS,kBAAkB,UAC1C,0BAA0B,QAAQ;AAErC,aAAO,kBAAkB;;AAG3B,sCAAkC;AAChC,kBAAY,GAAG;AACf,SAAG,gBAAgB,GAAG,aAAa;AAEnC,2BAAqB,GAAG;AACxB,SAAG,iBAAiB,GAAG,cAAc;AAErC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe;AAEtF,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AAEtD,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AAEtF,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,aAAO,CAAE,KAAK;;AAGhB,kBAAc;AACZ,mBAAa;AACb,mBAAa;AACb,kBAAY;AAGZ,UAAI,eAAe;AAEjB,iBAAS;;AAGT,iBAAS,oBAAoB,0BAA0B;;AAEzD;AAGA,UAAI,gBAAgB,CAAE,SAAQ,KAAK;AAGjC,iBAAS;AACT,gBAAQ,aAAa,MAAM;;AAG3B,mCAA4B,4BAA2B,KAAK;AAC5D,iBAAS,oBAAoB,0BAA0B;;AAIzD,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,SAAG,UAAU,gBAAgB,QAAQ,OAAQ,QAAQ,KAAK;AAC1D,SAAG,WAAW,GAAG,WAAW,GAAG;;AAGjC,2BAAuB;AACrB,UAAI,oBAAoB;AACtB,0BAAkB,oBAAoB;AACtC,WAAG,WAAW,gBAAgB;AAC9B,eAAO;;AAIT,wBAAkB,IAAI,aAAa,IAAI,OAAO,iBAAiB;AAE/D,wBAAkB,aAAa;AAC/B,uBAAiB,IAAI;AACrB,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,KAAK,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AACxF,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,IAAI,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AAEvF,0BAAoB,kBAAkB;AACtC,aAAO;;AAGT,eAAW,CAAE,cAAc;AAE3B,iBAAa;AACb,WAAO,kBAAkB;MACvB;MACA;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA,KAAK;AAEP,WAAO,oBAAoB;MACzB;MACA;MACA;MAEA;MACA;MACA;MACA,KAAK;AAEP,kBAAc;AAKd,YAAQ,cAAc;AAEpB,gBAAU,IAAI,aAAa;AAC3B,QAAE,MAAM;AACR,QAAE,MAAM;AACR,QAAE,OAAO;AACT,QAAE,OAAO;AAGT,qBAAgB,EAAE,QAAQ,KAAK,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,IAC7H,QAAQ,YAAY,OAAO,gBAC3B,QAAQ,YAAY,OAAO;AAE/B,sBAAgB,eAAe;AAC/B,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC;;AAGF,YAAQ,YAAY,SAAS;AAC7B,YAAQ,YAAY,OAAO,aAAa;MACtC;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AACP,YAAQ,YAAY,OAAO,gBAAgB;MACzC;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AAEP,YAAQ,aAAa;AACnB,gBAAW,eAAc,KAAK;AAC9B,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,gBAAW,WAAU,KAAK,IAAI,IAAI;AAClC,gBAAY,KAAI,KAAK;AACrB,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,cAAQ,WAAW;;AAGrB,YAAQ,WAAW;AACjB,gBAAW,WAAU,KAAK;AAC1B,gBAAU,OAAQ,KAAI;AAEtB,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,WAAW;AACjB,cAAQ,SAAS;;AAGnB,YAAQ,MAAM;AACZ,iBAAY,aAAY,KAAK,MAAM,KAAK;AACxC,kBAAY,KAAK,IAAI;AACrB,kBAAY,KAAK,IAAI;AACrB,mBAAa;AACb,mBAAa;AACb,mBAAa;AAEb,cAAQ,YAAY;QAClB,OAAO,MAAO,KAAI,QAAQ,MAAO,CAAC;QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAC;QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,KAAI;QAAO;QAAG;QAC3H,OAAO,MAAO,CAAC,OAAQ,MAAO;QAAQ,OAAO,MAAO,KAAI,QAAQ,MAAO;QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;QAAS;QAAG;QACzH,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAE,KAAI;QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;QAAO,OAAO,MAAO,KAAI,QAAQ,MAAO;QAAO;QAAG;QAC5H;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,sBAAsB;AAC5B,cAAQ,YAAY;QAClB;QAAW;QAAW;QAAW;QAAG;QACpC;QAAW;QAAW;QAAW;QAAG;QACpC;QAAW;QAAW;QAAW;QAAG;QACpC;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,QAAQ;AACd,cAAQ,YAAY;QAClB;QAAO;QAAW;QAAY;QAAG;QACjC;QAAO;QAAW;QAAY;QAAG;QACjC;QAAO;QAAW;QAAY;QAAG;QACjC;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,UAAU;AAChB,cAAQ,YAAY;QAClB;QAAoB;QAAqB;QAAqB;QAAG;QACjE;QAAuB;QAAoB;QAAqB;QAAG;QACnE;QAAqB;QAAsB;QAAqB;QAAG;QACnE;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,iBAAiB;AACvB,cAAQ,YAAY;QAClB;QAAoB;QAAoB;QAAsB;QAAG;QACjE;QAAqB;QAAoB;QAAqB;QAAG;QACjE;QAAoB;QAAqB;QAAoB;QAAG;QAChE;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;QAClB;QAAoB;QAAqB;QAAsB;QAAG;QAClE;QAAsB;QAAoB;QAAsB;QAAG;QACnE;QAAsB;QAAqB;QAAoB;QAAG;QAClE;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,cAAc;AACpB,cAAQ,YAAY;QAClB;QAAoB;QAAqB;QAAsB;QAAG;QAClE;QAAqB;QAAoB;QAAsB;QAAG;QAClE;QAAoB;QAAqB;QAAmB;QAAG;QAC/D;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,WAAW;AACjB,cAAQ,YAAY;QAClB;QAAO;QAAQ;QAAQ;QAAG;QAC1B;QAAQ;QAAO;QAAQ;QAAG;QAC1B;QAAQ;QAAQ;QAAO;QAAG;QAC1B;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAOhB,YAAQ,cAAc;AACpB,gBAAU,IAAI,aAAa;AAC3B,yBAAmB,IAAI;AACvB,yBAAmB,IAAI;AAEvB,sBAAgB,eAAe,QAAQ,YAAY;AACnD,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC,SAAG,UAAU,QAAQ,QAAQ,IAAI,YAAY;AAC7C;;AAGF,YAAQ,YAAY,SAAS;MAC3B;MACA;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MAEA;MACA;MACA;MAEA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AAEP,YAAQ,cAAc;AACpB,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAG;QAAG;QACN;QAAG;QAAI;QACP;QAAG;QAAG;;;AAIV,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAI;QAAG;QACP;QAAI;QAAG;QACP;QAAI;QAAG;;;AAIX,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAI;QAAI;QACR;QAAG;QAAG;QACN;QAAG;QAAG;;;AAIV,YAAQ,UAAU;AAChB,gBAAU,UAAU;AACpB,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAG,KAAK;QAAG;QACX,KAAK;QAAG,IAAI,IAAI;QAAG,KAAK;QACxB;QAAG,KAAK;QAAG;;;AAIf,YAAQ,SAAS;AACf,gBAAU,QAAQ;AAClB,cAAQ,YAAY,KAAK,MAAM;QAC7B,KAAK;QAAG,KAAK;QAAG;QAChB,KAAK;QAAG;QAAG,IAAI;QACf;QAAG,IAAI;QAAG,IAAI;;;AAOlB,YAAQ,OAAO;AACb,wBAAmB,OAAO,IAAK;AAC/B,wBAAmB,OAAO,IAAK;AAE/B,sBAAgB,eAAe,QAAQ,KAAK;AAG5C,SAAG,UAAU,QAAQ,QAAQ,IAAI,GAAG;AACpC,YAAM,KAAK;AAGX,SAAG,UAAU,QAAQ,QAAQ,IAAI,WAAW;AAC5C;;AAGF,YAAQ,KAAK,SAAS;MACpB;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AAKP,YAAQ,WAAW;AACjB,wBAAmB,OAAQ;AAC3B,wBAAmB,OAAQ;AAE3B,sBAAgB,eAAe,QAAQ,SAAS;AAGhD,SAAG,UAAU,QAAQ,QAAQ,MAAM,WAAW;AAC9C;;AAGF,YAAQ,SAAS,SAAS;MACxB;MACA;MACA;MACA;MAEA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA,KAAK;;AAGT,UAAQ,SAAS;;AC7lBjB,oBAAA,WAAA;AACA,kBAAyB,WAAA;AAGzB,iBAAe;AACf,kBAAgB;AAKhB;AACE;AACA,QAAI,iBAAiB,GAAG;AACtB,gBAAS,GAAG,MAAM;;AAElB,4BAAsB,MAAM,gBAAgB,MAAM,cAAc,MAAM,SAAU,MAAM,SAAU,MAAM,MAAM,KAAK;AACjH,6BAAuB,MAAM,iBAAiB,MAAM,eAAe,MAAM,UAAW,MAAM,SAAU,MAAM,MAAM,KAAK;AACrH,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAO,OAAO,QAAQ;AAAG,sBAAc,QAAO,OAAO;eAChD,QAAO,OAAO,SAAS;AAAG,sBAAc,gBAAiB,SAAO,OAAO,SAAS;AACzF,UAAI,QAAO,OAAO,SAAS;AAAG,uBAAe,QAAO,OAAO;eAClD,QAAO,OAAO,QAAQ;AAAG,uBAAe,iBAAkB,SAAO,OAAO,QAAQ;AACzF,UAAI,CAAC,YAAa,SAAS,UAAU,eAAiB,SAAS,WAAW;AACxE,mBAAY,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AAC9H,YAAI,SAAS,UAAU;AAAa,mBAAS,QAAQ;AACrD,YAAI,SAAS,WAAW;AAAc,mBAAS,SAAS;;AAE1D,kBAAY,SAAS,WAAW;AAChC,UAAI,iBAAiB;AAAW,YAAI,aAAa,OAAO,GAAG;;AACtD,YAAI,UAAU,OAAO,GAAG,GAAG,eAAe,gBAAgB,GAAG,GAAG,SAAS,OAAO,SAAS;AAC9F,UAAI,QAAO,OAAO;AAChB,YAAI,CAAC,KAAK,MAAM,CAAC,aAAc,SAAS,UAAU,UAAU,SAAW,SAAS,WAAW,UAAU;AACnG,sBAAa,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,SAAS,OAAO,SAAS,UAAU,SAAS,cAAc;AACrI,cAAI,UAAU,UAAU,SAAS;AAAO,sBAAU,QAAQ,SAAS;AACnE,cAAI,UAAU,WAAW,SAAS;AAAQ,sBAAU,SAAS,SAAS;AACtE,eAAK,KAAK,GAAG,IAAI,MAAM,aAAa,IAAY,QAAA,OAAO,CAAE,QAAQ,cAAe;;AAElF,aAAK,GAAG;AACR,aAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,cAAc;AAAG,eAAK,GAAG,UAAU,WAAW,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,SAAS;AAAG,eAAK,GAAG,UAAU,QAAQ,QAAO,OAAO;AACtE,YAAI,QAAO,OAAO,eAAe;AAAG,eAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAClF,YAAI,QAAO,OAAO,QAAQ;AAAG,eAAK,GAAG,UAAU,OAAO,QAAO,OAAO;AACpE,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAS,eAAK,GAAG,UAAU;AAC7C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAY,eAAK,GAAG,UAAU;AAChD,YAAI,QAAO,OAAO;AAAa,eAAK,GAAG,UAAU;AACjD,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,aAAK,GAAG,MAAM;AAGd,mBAAW;AACX,YAAI;AACF,2BAAiB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACrE,4BAAkB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACtE,aAAG,WAAW,GAAG,GAAG,UAAU,OAAO,UAAU,QAAQ,GAAG,MAAM,GAAG,eAAe;AAGlF,kBAAQ;AACR,uBAAa,UAAU,SAAS,GAAG,KAAK,GAAG;AACzC,yBAAa,GAAG,IAAI,UAAU,OAAO;AACnC,4BAAe,KAAI,IAAI,UAAU,SAAS;AAC1C,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;;;AAGtC,oBAAU,OAAO;;;AAGnB,oBAAY;;AAEd;AACA,UAAI,UAAU;AACZ,sBAAc,CAAC,UAAU,QAAQ,UAAU,OAAO;AAClD,iBAAS,GAAG,SAAS,UAAU,MAAM,OAAO;iBAClC,QAAO,YAAY,WAAa,qBAAqB;AAE/D,iBAAS,GAAG,QAAQ,WAAW;;AAG/B,2BAAoB,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AACtI,mBAAW,QAAQ;AACnB,mBAAW,SAAS;AACpB,wBAAgB,WAAW,WAAW;AACtC,gBAAQ,UAAU,WAAW,GAAG;AAChC,sBAAa,QAAQ,aAAa,GAAG,GAAG,aAAa;AACrD,iBAAS,GAAG,QAAQ,WAAW;;AAEjC,qBAAe,OAAO;AACtB,gBAAS,OAAO,WAAW;AAC3B,aAAO;AACP,aAAO;;AAET,WAAO,CAAE,QAAA,SAAQ,QAAQ,QAAO,OAAO,SAAS,YAAY;;AAG9D,UAAQ,UAAU;;AC5FlB,WAAoB,WAAA;ACVpB;;;;;;;;;;;;;;;;AAsBO,wBAAwB;AACxB,wBAAwB;;EA0B7B;AAAoB,SAAA,UAAA;AAAgC,SAAA,YAAA;AAH5C,SAAA,OAAO,IAAI;AACX,SAAA,eAAe;;EAIvB;AACE,QAAI,CAAC,KAAK,KAAK,IAAI;AACjB,WAAK,UAAU,SAAS,KAAK,SAAS;;AAExC,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,SAAK,KAAK,IAAI,QAAQ;;EAGxB;AACE,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,WAAO,KAAK,KAAK,OAAO;;EAG1B;AACE,WAAO,KAAK;;;;EAwBd;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,KAAK,qBAAqB,KAAK,kBAAkB;;EAG1D;AAGE,WAAO,kBAAkB;;EAG3B,kBACK,GAAG,GAAG,YAAY,YAAY,MAAM,YAAY;AAEnD,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B,aACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B,sBACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAI3B;AACE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;;AAI7B;AACE,QAAM,IAAI,MACN,IAAI;;ACzpBV;;;;;;;;;;;;;;;;AAiCM;AAEJ,gBAAc,MAAM;AACpB,aAAW;AACX,cAAY;AAEZ,SAAO,UAAU;AAEf,YAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,WAAO,MAAM;AACb,UAAM,WAAW,MAAM;AACvB,UAAM,SAAS;;;AAKb;AACJ,SAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAG7B;AACJ,SAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAG/B;AACJ,aAAU;AACV,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,YAAO,IAAI;;AAEb,SAAO;;AAUH;AACJ,YAAU,KAAK;AACf,SAAQ,IAAI,IAAM,KAAI,KAAK;;AAIvB;AACJ,eAAa;AACb,eAAa,GAAG,IAAI,EAAE,QAAQ;AAC5B,iBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,cAAU,OAAO;;AAEnB,SAAO;;AAkBH;AACJ,MAAI,CAAC;AACH,UAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAI9C,gEACuD;AAC3D,SACI,YAAY,QAAQ,SACpB,MAAM,qBAAqB,WAAW,cAAc;;AAGpD;AACJ,SACI,KAAK,MACL,MAAM;;AAsBN,+BAEsC,qBAAqB;AAC/D,MAAI,UAAU;AACZ,aAAS;;AAEX,MAAI,MAAM,QAAQ,QAAQ,aAAa,QAAQ,CAAC;AAC9C,iBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,cAAQ,IAAI,IAAI,QAAQ;;;AAG1B,WAAO,KAAK;;AAEd,SAAO;;AAcH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO;;AAET,aAAW,MAAM;AACjB,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,YAAQ,MAAM;;AAEhB,SAAO;;AAGH;AACJ,SAAO,MAAM,WAAW;;AAGpB;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,MAAI,MAAM,QAAQ,MAAM;AACtB,WAAO;;AAGT,MAAI,GAAG,WAAW,GAAG;AACnB,WAAO;;AAET,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,QAAI,GAAG,OAAO,GAAG;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,SAAO,IAAI,MAAM;;AAGb;AAEJ,MAAK,KAAa,QAAQ;AAExB,WAAQ,KAAa,KAAK;;AAE5B,MAAI,MAAM;AACR,WAAO;aACE,MAAM;AACf,WAAO;;AAEP,gBAAY,KAAK,IAAI,IAAI;AACzB,WAAQ,OAAM,KAAM,OAAM;;;AAIxB;AACJ,gBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,SAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAe5B;AACJ,0BAAwB,IAAI,YAAY;AACxC,eAAa,GAAG,IAAI,GAAG,EAAE;AACvB,oBAAgB,KAAK;;AAEvB,UAAQ;AACR,SAAO;;AAGH;AACJ,MAAI,QAAQ,EAAE;AACZ,WAAO;;AAET,SAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAG3B,wCACgC,aAAqB;AAEzD,SAAO,IAAI,QAAc;AACvB,mBAAe;AAEf,kBAAc;AACZ,UAAI;AACF;AACA;;AAGF;AAEA,0BAAoB,QAAQ;AAE5B,UAAI,cAAc,QAAQ,YAAY;AACpC;AACA;;AAEF,iBAAW,OAAO;;AAGpB;;;AAaE;AAEJ,kBAAgB;AAChB,oBAAkB;AAElB,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,MAAM;AACd,mBAAa,MAAM;eACV,MAAM,OAAO;AACtB,UAAI,gBAAgB;AAClB,cAAM,MACF,yDACmB,uBAAuB;;AAEhD,oBAAc;eACL,MAAM,KAAK;AACpB,YAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAInE,MAAI,gBAAgB;AAClB,QAAI,OAAO,KAAK,SAAS;AACvB,YAAM,MAAM,QAAQ,yCAAyC;;AAE/D,WAAO;;AAGT,MAAI,cAAc;AAChB,UAAM,MACF,qCAAqC;;AAG3C,MAAI,OAAO,cAAc;AACvB,UAAM,MACF,wDACO,UAAU;;AAGvB,mBAAiB,MAAM;AACvB,WAAS,eAAe,OAAO;AAC/B,SAAO;;AAGH;AAEJ,eAAa,MAAM;AAGnB,SAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAGzD,SACI,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OACrC,MACI,+CAA+C,SAAS,sBAC5C;AAGpB,SACI,KAAK,MAAM,QAAM,MAAM,MACvB,MAAM,0DACU;AAGpB,SAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAIpC;AAEJ,mBAA2B;AAC3B,mBAA2B;AAC3B,uBAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,eAAc,QAAQ,QAAQ,eAC1B,OACA,eAAe,MAAM,OAAO;AAChC,UAAQ;AACR,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,QAAQ;AACV,UAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAChC,cAAM,IAAI,MACN,sBAAsB,oBAAoB,MAAM;;AAEtD,UAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACnD,iBAAS,KAAK,MAAM;AACpB,iBAAS,KAAK;;AAEhB,UAAI,KAAK,MAAM;AACb;;;AAGJ,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;AACpB,eAAS,KAAK;;;AAGlB,SAAO,CAAC,UAAU;;AAGd;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;;AAExB,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,MAAgB;;AAE7B,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAC1B,YAAM,MAAM,oBAAoB,iCAAiC;;;;AAMjE;AACJ,SAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAO/B;AACJ,MAAI,YAAY;AACd,WAAO;;AAET,MAAI,YAAY,aAAa,YAAY;AACvC,WAAO;;AAET,MAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC9D,WAAO;;AAET,MAAI,YAAY,UAAU,YAAY;AACpC,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAGb;AACJ,MAAI,UAAU,aAAa,UAAU;AACnC,WAAO;aACE,UAAU;AACnB,WAAO;aACE,UAAU;AACnB,WAAO;;AAEP,UAAM,IAAI,MAAM,iBAAiB;;;AAU/B;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,cAAY;AACZ,MAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,SAAO;;AAIH;AACJ,SAAO,OAAO,UAAU,YAAY,iBAAiB;;AAGjD;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,MAAI,MAAM,QAAQ;AAChB,WAAO,WAAW,OAAO;;AAE3B,MAAI,kBAAkB;AACpB,WAAO;aACE,kBAAkB,cAAc,kBAAkB;AAC3D,WAAO;aACE,SAAS;AAClB,WAAO;aACE,SAAS;AAClB,WAAO;aACE,UAAU;AACnB,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAGxC;AACJ,eAAa,OAAO,IAAI,MAAM,EAAE;AAC9B,QAAI,OAAO,MAAM;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,eAAa,MAAM;AACnB,MAAI,OAAO;AACT,WAAO;;AAKT,kBAAgB,IAAI,MAAM,OAAO;AACjC,UAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,eAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC/B,YAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE1C,SAAO;;AAGT;AACE,cAAY,IAAI;AAChB,MAAI,MAAM,WAAW;AACnB,cAAU,MAAM;AAChB,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,EAAE,SAAS;;;AAGtB,cAAU,MAAM;AAChB,iBAAa,MAAM,MAAM;AACzB,gBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,kBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAGvD,SAAO;;AAIH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO,EAAE;;AAEX,eAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,MAAI,SAAS;AAEX,WAAO;;AAET,MAAI,SAAS,EAAE;AACb,UAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAGhE,SAAO,kBAAkB,GAAG,OAAO;;AAG/B;AAEJ,gBAAc,oBAAoB,MAAM;AACxC,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,UAAM,KAAK;;AAEb,SAAO;;AAGH;AAEJ,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,WAAO,IAAI,WAAW;;AAEtB,UAAM,IAAI,MAAM,qBAAqB;;;AASnC;AAEJ,eAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,MAAI,SAAS,QAAQ,UAAU;AAC7B,WAAO,cAAc,OAAO,IAAI,aAAa;aACpC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;aAClC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;;AAE3C,UAAM,IAAI,MAAM,qBAAqB;;;AAInC;AACJ,QAAM,QAAQ;AACZ,WACI,OAAO,UAAU,YAAY,WAAW,GACxC,MACI,0EACU;;;AAYhB;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,KAAK;;AAEd,cAAY,KAAK,KAAK,SAAS;AAC/B,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,aAAS,QAAQ,KAAK,KAAK;;AAE7B,SAAO;;AAWH;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,CAAC;;AAEV,eAAuB,IAAI,MAAM;AACjC,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,SAAK,KAAK,KAAK,MAAM,QAAQ,QAAQ;AACrC,aAAS,KAAK,KAAK,QAAQ;;AAE7B,OAAK,KAAK,SAAS,KAAK;AACxB,SAAO;;AAQH;AAOJ,SAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;AC1rBzD;;;;;;;;;;;;;;;;AAqBA,kCAAkC;;EA6BhC;AAAmB,SAAA,SAAA;AATX,SAAA,QAAe;AACf,SAAA,eAAwD;AAExD,SAAA,WAAkB;AAOxB,SAAK;;EAGP;AACE,QAAI,KAAK,YAAY;AACnB,cAAQ,KACJ,YAAY,KAAK,oEACgB;;AAEvC,SAAK,eAAe;AACpB,SAAK,WAAW;;EAGlB;AAGE,SAAK,aAAa,YAAY,CAAC,cAAc;AAI7C,QAAI,KAAK,SAAS,aAAa;AAC7B,wBAAkB,KAAK,SAAS;AAChC,cAAQ,KACJ,qCAAqC,aAAa;AACtD,WAAK,IAAI,UAAU;;;QAIjB;AACJ,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,SAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,WAAO,KAAK,MAAM;;EAGpB;AACE,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,sBAAkB,KAAK,aAAa;AACpC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN,QAAQ;;AAId,SAAK,MAAM,YAAY;AAEvB,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK;;MAGV;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,mBAAmB;;AAEzB,SAAK,MAAM,YAAY;AACvB,QAAI,KAAK,aAAa,UAAU,WAAW;AACzC,WAAK,aAAa,UAAU,QAAQ;;;EAIhC;AACN,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,yBAAyB;;AAE/B,WAAO,KAAK,aAAa,UAAU;;EAGrC;AACE,SAAK,QAAQ,OAAO,OAAO,IAAI;;EAGjC;AACE,SAAK,QAAQ;AACb,SAAK,WAAW;AAChB,SAAK;;EAGC;AACN,QAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACzC;;AAGF,sBAAkB,eAAe,KAAK,OAAO,SAAS;AACtD,QAAI,6BAA6B;AAC/B,wBAAkB,UAAU,2BAA2B,MAAM;AAC7D,gBAAU,QAAQ;AAChB,6BAAqB,SAAS,MAAM;AACpC,aAAK,SAAS,OAAO,WAAW,KAAK;;;;;AAMvC;AACJ,iBAAe;AACf,cAAY,QAAQ,+BAA+B;AACjD,gBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,WAAO,EAAE,KAAK;;AAEhB,SAAO;;AAGT;AAEE,SAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAGjE;AACE,UAAQ,MAAM;AACd,MAAI,UAAU,UAAU,UAAU;AAChC,WAAO,UAAU;aACR,GAAG,CAAE,YAAY;AAC1B,WAAO,CAAC;;AAEV,QAAM,IAAI,MACN,oCAAoC,kBAAkB;;AAWtD;AACJ,SAAO;;AAGF,UAAuB;AACxB;AACJ,QAAM;;AC/MR;;;;;;;;;;;;;;;;AAqBA;AAEM;AACJ,MAAI,mBAAmB;AAErB;AACA,QAAI,OAAQ,WAAY;AACtB,WAAK;eACI,OAAQ,WAAY;AAC7B,WAAK;eACI,OAAQ,YAAa;AAC9B,WAAK;eACI,OAAQ,SAAU;AAC3B,WAAK;;AAEL,YAAM,IAAI,MAAM;;AAElB,sBAAkB;;AAEpB,SAAO;;AAIT;AACE,aAAW;AACX,MAAI,GAAG,cAAc;AACnB,OAAG,aAAa,IAAI;;AAEtB,SAAO,GAAG;;AAUN;AACJ,oBAAkB;AAClB,MAAI,UAAU,IAAI;AAChB,WAAO,UAAU,IAAI;;AAErB,sBAAkB;AAClB,cAAU,IAAI,KAAK;AACnB,WAAO,UAAU,IAAI;;;ACzClB,YAAY;ACSZ,YAAY;AAGZ,aAAa;ACiBb,eAAe;AC2Bf,gBAAgB;ACsChB,oBAAoB;ACuBpB,aAAa;ACSb,oBAAoB;AAOpB,gBAAgB;AAGhB,eAAe;AAMf,eAAe;ACoBf,4BAA4B;ACoC5B,YAAY;ACMZ,eAAe;AAQf,sBAAsB;AAStB,qBAAqB;AAOrB,8BAA8B;ACsD9B,YAAY;AAGZ,YAAY;ACSZ,cAAc;AAGd,YAAY;ACSZ,aAAa;AAOb,sBAAsB;ACMtB,iBAAiB;AAGjB,uBAAuB;AAOvB,iBAAiB;AAMjB,iBAAiB;AAGjB,gBAAgB;AAGhB,qBAAqB;AAGrB,iBAAiB;ACkBjB,aAAa;AAGb,kBAAkB;ACSlB,YAAY;ACMZ,mBAAmB;ACiCnB,YAAY;AAOZ,gBAAgB;AAGhB,gBAAgB;ACyDhB,YAAY;AAOZ,gBAAgB;ACahB,iBAAiB;AAGjB,eAAe;AAGf,iBAAiB;AAGjB,4BAA4B;AAS5B,4BAA4B;AAU5B,4BAA4B;AAU5B,iBAAiB;AAGjB,eAAe;AAQf,cAAc;ACUd,YAAY;AAGZ,cAAc;ACwBd,aAAa;AAGb,gBAAgB;ACiBhB,uBAAuB;ACUvB,cAAc;AAGd,gBAAgB;ACShB,cAAc;AAGd,kBAAkB;AAMlB,iBAAiB;ACMjB,cAAc;AAMd,YAAY;ACSZ,gBAAgB;ACMhB,aAAa;AAGb,YAAY;ACcZ,eAAe;AAOf,gBAAgB;AAMhB,0BAA0B;AAG1B,eAAe;AAGf,YAAY;ACUZ,qBAAqB;ACgBrB,aAAa;AAGb,aAAa;ACab,kBAAkB;ACclB,eAAe;ACaf,kBAAkB;AAMlB,aAAa;ACeb,yBAAyB;AAQzB,qBAAqB;AAerB,oBAAoB;AAgBpB,6BAA6B;ACt1BpC;;;;;;;;;;;;;;;;AAuBA,uBACI,UAAU,kBAAkB,MAAM,IAAI;AAC1C,qBACI,UAAU,gBAAgB,MAAM,IAAI;AAoElC;AAEJ,cAAY,QAAQ,YAAY;AAChC,SAAO,eAAe,IAAI;;AAOtB;AACJ,SAAO,aAAa,IAAI;;AAGpB;AACJ,aAAW,eAAe;AAC1B,iBAA+B;AAE/B,SAAO;AACL,WAAO,MAAM,SAAS,GAAG;AACzB,QAAI;AACF;;AAEF,2BAAsB;AACtB,uBAAoB,IAAI,MAAM;AAC9B,QAAI,aAAY;AACd,aAAO,KAAK;;;AAGhB,SAAO;;AAcH;AACJ,SAAO,YAAY,eAAe;AAClC,cAAY,QAAQ,YAAY;AAChC,MAAI,eAAe,IAAI;AACrB,YAAQ,KACJ,eAAe,4BACX;;AAEV,iBAAe,IAAI,KAAK;;ACmE1B;AACE,SAAO,GAAG,eAAe;;ACrN3B,qBAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAyBM;AAEJ,MAAI,UAAU;AACZ,WAAO,aAAa;;AAGtB,SAAO,aAAa,CAAC,QAAQ;;AAG/B;AACE,SAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAGtC;AACJ,MAAI,UAAU;AACZ,UAAM,IAAI,MAAM;;AAElB,MAAI,MAAM,QAAQ;AAChB,QAAS,QAAQ;;AAGnB,MAAI,MAAM,QAAQ;AACX,6BAAyB,GAAe;;AAE/C,MAAI,mBAAmB,GAAG;AACxB,WAAO;;AAET,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,iBAAa,IAAI,WAAY,EAAe;AAC5C,iBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,UAAI,KAAK,MAAO,EAAe,QAAQ;AACrC,aAAK,KAAK;;;AAGd,WAAO;;AAEP,UAAM,IAAI,MAAM,qBAAqB;;;AAenC;AACJ,SAAO,MAAM,SAAS;;AAmBlB;AAEJ,SAAO,MAAM,SAAS,MAAM,MAAM;;AAW9B,oCAA6C;AACjD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,GAAG;;AAW5B,wCAAqD;AACzD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,OAAO;;AClItC;;;;;;;;;;;;;;;;;EAgCE;AAAoB,SAAA,eAAA;AAAoC,SAAA,SAAA;AACtD,QAAI,UAAU;AACZ,WAAK,SAAS,IAAI;;;EAItB;AAEE;AACA,gCAA4B;AAC1B,gBAAU;;AAEZ,kBAAc,KAAK,aAAa,KAAK;AAErC,iBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,qBAAe,QAAQ;AAGvB,aAAO,OAAO,KAAK;AACjB,kCAA0B,YAAY,OAAO,OAAO;;;AAIxD,0BAAsB;MACpB;MACA;MACA;MACA,QAAQ,MAAM,KAAK,YAAU,OAAO;MACpC,WAAW,MAAM,KACb,YAAU,OAAO,uBAAuB,OACpC,OAAO,wBACP;;AAEV,WAAO;;EAGT;AACE,WAAO,YAAY,SAAS,QAAQ,QAAQ,aAAa;AAEzD,YAAQ,QAAQ;AACd,cAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACnD,aAAK,OAAO,iBACR,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAC1D,eAAe;;;;;AAMrB;AAEJ,MAAI,UAAU;AAEZ,WAAO;;AAET,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAE1B,cAAQ,KAAK,SAAS,yBAAyB;AAC/C,aAAO;;;AAGX,SAAO;;;EAIP;AAIE,iBAAa,OAAO,WAAW,WAAgB,SAAS,GAAG,YAAY,KAC7B,OAAO;AACjD,uBAAwB,SAAS,MAAM;AACvC,iBAAa,OAAO;AACpB,iBAAa,OAAO;AACpB,kBAAmB,SAAS,OAAO,MAAM,YAAY;AACrD,iCAA6B;AAE7B,wBAAmB;AACjB,oBAAc,OAAO;AACrB,UAAI,SAAS;AAGX,2BAAmB,MAAM,SAAS,OAAO;AACzC,0BAAkB,WAAW;AAC7B,kCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAI/D,YAAQ,IACJ,KAAK,gBAAiB,UAAW,SAAS,WAAY,UAClD,4BAA6B,aACjC,oBAAoB,aAAa,cAAc,iBAC/C,gBAAgB;;;AC/HxB;;;;;;;;;;;;;;;;AA2CM;AAIJ,uBAAoD;AACpD,qBAAgD;AAChD,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAa,GAAG,GAAG,MAAM;;AAG3B,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AACxB,4BAAwB;AACtB,oBAAc,WAAW;AAEzB,0BAAoB;AACpB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,YAAI,aAAa,MAAM;AACrB,eAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,0BAAgB;AAChB,qBAAW,KAAK,MAAM;AACtB;;;AAIJ,UAAI;AACF;;;;AAMN,yBAAsD;AACtD,iBAAe,EAAE,MAAM;AACvB,mBAA8C;AAE9C,eAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AACpC,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AAGxB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,UAAI,eAAe,KAAK,QAAQ,GAAG;AACjC,gCAAwB;AACtB,yBAAe,WAAW,WAAW,MAAM;AAC3C,mBAAS,KAAK,MAAM;;AAEtB;;;;AAMN,uBAAiC;AACjC,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAElB,QAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAEvC,2BAAoD;AACpD,8BAAwB,KAAK;AAC3B,0BAAkB,KAAK,OAAO;AAC9B,YAAI,aAAa,UAAU;AACzB,uBAAa,aAAa;;;AAK9B,yBAAmB,OAAO,OAAO,IAAI;AACrC,iBAAW,SAAS;AACpB,iBAAW,UAAU,KAAK;AAE1B,mBAAa,KAAK;;;AAItB,SAAO;;AAUH;AAKJ,eAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC5C,iBAAa,aAAa;AAE1B,gBAAsB;AACtB,SAAK,QAAQ,QAAQ;AACnB,yBAAmB,6BAA6B,EAAE;AAClD,UAAI,cAAc;AAChB,YAAI,KAAK;;AAIT,YAAI,KAAK;;;AAIb,QAAI,KAAK,YAAY;AACnB,YAAM,IAAI,MACN,4DACO,KAAK;;AAIlB,2BAAuB,KAAK,SAAS;AAErC,4BAAwB,KAAK;AAC3B,UAAI,CAAE,cAAa;AACjB,cAAM,IAAI,MACN,iCAAiC,yCACH,OAAO,KAAK;;AAIhD,iBAAW,KAAK,MAAM,eAAe;AACrC,UAAI,GAAG,UAAU;AACf,cAAM,IAAI,MACN,4BACI,KAAK,qCACN,iDAAiD,GAAG;;AAE7D,gBAAU,KAAK,OAAO;AACtB,UAAI,CAAM,YAAY,GAAG,OAAO,EAAE;AAChC,cAAM,IAAI,MACN,4BACI,KAAK,sCACL,yBAAyB,GAAG,wDACL,EAAE;;AAGnC,UAAI,6BAA6B,EAAE,OAAO;AACxC,qCAA6B,EAAE,MAAM;;AAErC,4BAAoB,6BAA6B,EAAE;AACnD,qCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,oBAAY;;;;;AC5LpB;;;;;;;;;;;;;;;;AAqBA,8BAA8B;AAE9B,mCAAmC;AAEnC,8BAA8B;AAExB;AAGJ,kBAAgB,eAAe;AAC/B,oBAAkB,wBAAwB,MAAM,OAAO,OAAO;AAC9D,eAAa,MAAM;AACnB,oBAAkB,kBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,gBAAc,CAAC;AACf,MAAI;AACF,UAAM,KAAK,YAAY;AACvB,UAAM,KAAK,WAAW;AACtB,UAAM,KAAK,aAAa;AACxB,UAAM,KAAK;;AAEb,QAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,SAAO,MAAM,KAAK;;AAGpB;AAGE,YAAU,cAAc;AACxB,kBAAgB,QAAQ,QAAQ,SAAS;AACzC,oBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,eAAa,MAAM;AACnB,yBACI,UAAU,cAAc,oBAAoB,QAAQ;AAExD,MAAI,OAAO;AACT,mBAAe,GAAG,MAAM,IAAI,SAAS;AACnC,qBAAe,MAAM;AACrB,mBAAa,GAAG,IAAI,SAAS;AAC3B,kBAAU,KAAK,KAAK,IAChB,UAAU,IACV,YAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAI5D,SAAO;;AAGT;AAEE;AACA,MAAI,MAAM,QAAQ;AAChB,aAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,6BAC/B,WAAW,IAAI,GAAG,QAAQ;aACxB,SAAS;AAClB,aAAS,IAAI;aACJ,UAAU;AACnB,aAAS,gBAAgB;;AAEzB,aAAS,WAAW,IAAI,QAAQ,wBAAwB;;AAG1D,SAAO,SAAS,QAAQ;;AAG1B;AACE,SAAO,MAAM,IAAI,UAAU;;AAG7B,4EAEqD;AACnD,4BAA0B,UAAU,cAAc,IAAI;AAEtD,eAAa,MAAM;AACnB,eAAa,MAAM;AACnB,MAAI,SAAS;AACX,QAAI,UAAU;AACZ,2BAAqB,oBAAoB;AACzC,aAAO,CAAC,YAAY,aAAa,IAAI,GAAG;;AAE1C,QAAI,UAAU;AACZ,aAAO,CAAC,gBAAgB,KAAK;;AAE/B,WAAO,CAAC,KAAK,GAAG;;AAGlB,MAAI,SAAS;AACX,QAAI,OAAO;AACT,4BAAsB,6BAA6B;AAEnD,sBAAgB,MAAM,KAClB,KAAK,MAAM,GAAG;AAClB,qBAAe,MAAM,KAAqC,KAAK,MAC1D,QAAO,8BAA8B,mBACtC,OAAO;AACX,UAAI,UAAU;AACZ,oBAAY,oBAAoB;AAChC,mBAAW,oBAAoB;;AAEjC,aAAO;QACL,MACA,UAAU,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IACG,UAAU,YACN,GAAG,UAAU,OAAO,6BAA6B,IAAI,QAC5D,KAAK,QACV;;;AAGJ,wBACI,UAAU,cAAc,oBAAoB,QACpB,MAAM,KAAoB;AAEtD,WAAO;MACL,MACA,YAAY,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAKJ,mBAAiB,MAAM,MAAM;AAC7B,qBAAmB,QAAQ,MAAM;AACjC,iBAAe,QAAQ,KAAK;AAC5B,gBAAwB;AACxB,MAAI,OAAO;AACT,iBAAa,GAAG,IAAI,4BAA4B;AAC9C,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD;;AAEN,UAAM,KAAK;AACX,iBAAa,OAAO,4BAA4B,IAAI,MAAM;AACxD,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGnB,iBAAa,GAAG,IAAI,MAAM;AACxB,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGrB,cAAY,SAAS,IAAI,MAAM;AAC/B,QAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,UAAM,KAAK,MAAM,MAAM,KAAK;;AAE9B,mBAAiB;AACjB,eAAa,GAAG,IAAI,MAAM;AACxB,kBAAc;;AAEhB,QAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,SAAO;;AAGT;AAEE,wBAA+C;AAC/C,eAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,kBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAExC,SAAO;;ACnMT;;;;;;;;;;;;;;;;;EA6CE;AAAuC,SAAA,QAAA;AACrC,SAAK,QAAQ,MAAM;AACnB,SAAK,OAAY,cAAc;AAE/B,QAAI,UAAU;AACZ,gBAAU,OAAO;AACZ,aACD,MAAM,KAAK,MACX,MAAM,qBAAqB,qDACG,KAAK;;AAEzC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN;;AAIN,SAAK,SAAS,UAAe,kBAAkB,OAAO,KAAK;AAC3D,SAAK,UAAU,eAAe;;EAWhC;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEL,WACD,KAAK,WAAW,KAAK,MACrB,MAAM,uCAAuC,KAAK,gCAC3B,KAAK;AAEhC,kBAAc,KAAK,WAAW;AAC9B,SAAK,OAAO,SAAS;;EAUvB;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEV,YAAQ;AACR,sBAAkB;AAChB,UAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC/B,oBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,cAAM,IAAI,MAAM;;AAElB;;AAEF,gBAAY,KAAK,KAAK,SAAS;AAC/B,kBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,MAAK,KAAK;;AAElC,WAAO,KAAK,OAAO;;EAGrB;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,KAAK;;AAEd,gBAAY,KAAK,KAAK,SAAS;AAC/B,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,KAAK,KAAK;;AAElC,WAAO;;EAGT;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,CAAC;;AAEV,iBAAuB,IAAI,MAAM,KAAK,MAAM;AAC5C,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,WAAK,KAAK,KAAK,MAAM,QAAQ,KAAK,QAAQ;AAC1C,eAAS,KAAK,KAAK,KAAK,QAAQ;;AAElC,SAAK,KAAK,SAAS,KAAK;AACxB,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAQpB;AACE,WAAO,YAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAiChE,gBAAqC;AAErC,gBAA2B;AAE3B,2BAAkD;ACW5C;AACJ,cAAY;;ACgBR;AACJ,yBAAuB;;;EAoDvB;AAXA,SAAA,OAAO;AAoIG,SAAA,qBAAqB;AAxH7B,SAAK,QAAQ,MAAM;AACnB,SAAK,QAAQ,SAAS;AACtB,SAAK,OAAY,cAAc;AAC/B,SAAK,UAAU,eAAe;AAC9B,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;MAGtD;AACF,WAAO,KAAK,MAAM;;QAQd;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY;;EAOvD;AACE,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY,KAAK;;QAStD;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,cAAc,KAAK,OAAO;;EASnC;AACE,WAAO,cAAc,KAAK,OAAO,KAAK;;QASlC;AACJ,SAAK;AACL,kBAAa,YAAY,KAAK,KAAK;AACnC,QAAI,KAAK,UAAU;AACjB,oBAAc,MAAM;AACpB;AACE,eAAO,MAAM,IAAI,OAAU,aAAa;;AAExC,cAAM,IAAI,MACN;;;AAIR,WAAO;;EAST;AACE,SAAK;AACL,kBAAa,YAAY,SAAS,KAAK;AACvC,QAAI,KAAK,UAAU;AACjB;AACE,eAAQ,MAAsB,IAAI,OAAU,aAAa;;AAGzD,cAAM,IAAI,MACN;;;AAIR,WAAO;;QAIH;AACJ,SAAK;AACL,kBAAa,MAAM,YAAY,KAAK,KAAK;AACzC,QAAI,KAAK,UAAU;AACjB,aAAO;;AAEP,aAAO,IAAI,WAAY,MAAoB;;;EAS/C;AACE,QAAI,KAAK;AACP;;AAEF,gBAAY,cAAc;AAC1B,SAAK,qBAAqB;;MAIxB;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK;AACP,YAAM,IAAI,MAAM;;;EAYpB,gBAAgB;AACd,WAAO,UAAU,MAAM,MAAM;;EAO/B;AACE,SAAK;AACL,WAAO,UAAU,MAAM;;EAQzB,mBAAmB;AACjB,iBAAa,KAAK;AAClB,WAAO,eAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;EAGtD;AACE,SAAK;AACL,WAAO,UAAU,KAAK,MAAW;;EAEnC,qBAAqB;AACnB,SAAK;AACL,WAAO,YAAY,aAAa,MAAM,WAAW,MAAM;;;AAI3D,OAAO,eAAe,QAAQ,OAAO,aAAa;EAChD,OAAO;AAML,WAAO,CAAC,CAAC,aAAY,UAAS,QAAQ,QAAQ,UAAS,YAAY,QAC/D,UAAS,mBAAmB;;;uBAoCiB;EAGnD;AAGE,UACI,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AAH/B,SAAA,YAAA;AAIlC,SAAK,OAAO;;EAWd;AACE,QAAI,SAAS,UAAU,KAAK;AAC1B,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,QAAI,CAAM,YAAY,SAAS,OAAO,KAAK;AACzC,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,gBAAY,cAAc;AAC1B,SAAK,SAAS,SAAS;AACvB,gBAAY,OAAO,MAAM;;EAG3B;AACE,gBAAY,gBAAgB;AAC5B,SAAK,qBAAqB;;;AAI9B,OAAO,eAAe,UAAU,OAAO,aAAa;EAClD,OAAO;AACL,WAAO,qBAAoB,UAAU,UAAS,UAAU,QACpD,UAAS,kBAAkB;;;ACnhBnC;;;;;;;;;;;;;;;;AAgEA;AAAA,AAAA;AACE,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;GAPU,QAAA,QAAI;AAqBhB;AAAA,AAAA;AACE,qBAAA,aAAA;AACA,qBAAA,WAAA;AACA,qBAAA,UAAA;AACA,qBAAA,eAAA;GAJG,qBAAA,qBAAiB;AAOtB;AAAA,AAAA;AACE,oBAAA,aAAA;AACA,oBAAA,WAAA;AACA,oBAAA,UAAA;AACA,oBAAA,eAAA;GAJG,oBAAA,oBAAgB;AAOrB;AAAA,AAAA;AACE,uBAAA,aAAA;AACA,uBAAA,WAAA;AACA,uBAAA,UAAA;AACA,uBAAA,eAAA;GAJG,uBAAA,uBAAmB;AAOxB;AAAA,AAAA;AACE,yBAAA,aAAA;AACA,yBAAA,WAAA;AACA,yBAAA,UAAA;AACA,yBAAA,eAAA;GAJG,yBAAA,yBAAqB;AAO1B,sBAAsB;EACpB,SAAW;EACX,OAAS;EACT,MAAQ;EACR,WAAa;;AAGT;AACJ,MAAI,UAAU,YAAY,UAAU;AAClC,QAAI,UAAU,YAAY,UAAU;AAClC,aAAO;;AAET,UAAM,IAAI,MAAM,kBAAkB,cAAc;;AAElD,SAAO,cAAc,OAAO;;AC/H9B;;;;;;;;;;;;;;;;AAsBM;AACJ,MAAI,EAAE,UAAU,EAAE;AAChB,WAAO,CAAC,GAAG;;AAEb,gBAAc,WAAW,EAAE,OAAO,EAAE;AACpC,SAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AC0B1B;AACJ,eAAuB;AACvB,eAAa,IAAI;AACjB,sBAAoB,QAAQ,MAAM;AAClC,SAAO;;AAGT;AAEE,MAAI,aAAa;AACf;;AAEF,MAAI,qBAAqB;AACvB,SAAK,KAAK;AACV;;AAEF,MAAI,CAAC,WAAW;AACd;;AAGF,mBAAiB;AACjB,kBAAgB;AACd,gBAAY,SAAS;AACrB,QAAI,CAAC,KAAK,IAAI;AACZ,WAAK,IAAI;AACT,0BAAoB,KAAK,MAAM;;;;AAMrC;AACE,SAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;ACrF9C;;;;;;;;;;;;;;;;;EAmFA;AAEE,SAAA,sBAAwC;AAExC,SAAA,iBAAiB;AACjB,SAAA,WAAW;AACX,SAAA,aAAa;AACb,SAAA,mBAAmB;AACnB,SAAA,iBAAiB;AAMjB,SAAA,gBAAgB;AAGhB,SAAA,cAAc;AAId,SAAA,aAA2B;AAK3B,SAAA,oBAA8B;AAC9B,SAAA,cAAc;AAEd,SAAA,aAAa,IAAI;AAQjB,SAAA,YAAY;AACZ,SAAA,gBACI,CAAC,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;EAEpE;AACE,+BAA2B,KAAK;AAC9B,WAAK,oBAAoB,cAAc;;;;;EAqB3C;AAAmB,SAAA,MAAA;AAbnB,SAAA,WAA0C;AAC1C,SAAA,kBAKI;AAKI,SAAA,uBAAuB;AAG7B,SAAK,QAAQ,IAAI;;QAGb;AACJ,QAAI,KAAK,sBAAsB;AAC7B,aAAO,KAAK,mBAAmB,KAAK;;;AAEtC,QAAI,KAAK,mBAAmB;AAC1B;;AAEF,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,sBAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,UAAI;AACF,cAAM,KAAK,WAAW;AACtB;;;AAIJ,UAAM,IAAI,MACN;;MAIF;AACF,QAAI,KAAK,sBAAsB;AAC7B,YAAM,IAAI,MACN,YAAY,KAAK;;AAIvB,QAAI,KAAK,mBAAmB;AAC1B,aAAO,MAAM,aAAa,KAAK;AAC/B,UAAI;AACF,cAAM,IAAI,MACN,iCAAiC;;AAIvC,WAAK,WAAW;;AAElB,WAAO,KAAK;;EAGd;AACE,WAAO,OAAO,KAAK,KAAK;;EAG1B;AACE,QAAI,CAAE,gBAAe,KAAK;AAGxB,UAAI,eAAe,KAAK;AACtB,eAAO,aAAa,KAAK,kBAAkB;AAC3C,YAAI;AAEF,iBAAO;;;AAGT,eAAO;;;AAGX,WAAO,KAAK,SAAS;;EAGvB;AAEE,QAAI,CAAE,gBAAe,KAAK;AACxB,aAAO;;AAET,WAAO,KAAK,gBAAgB,aAAa;;EAG3C,iDAGe;AACb,QAAI,eAAe,KAAK;AACtB,cAAQ,KACJ,GAAG;AAEP,aAAO;;AAET,SAAK,gBAAgB,eAAe,CAAC,SAAS;AAC9C,WAAO;;QAGH;AACJ,QAAI,KAAK,gBAAgB,gBAAgB;AACvC,YAAM,IAAI,MAAM,iBAAiB;;AAEnC,SAAK,cAAc;AACnB,QAAI,KAAK,SAAS,gBAAgB;AAChC,WAAK,kBAAkB;AACvB,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,qBAAe,YAAY,MAAM,UAAU;AAC3C,UAAI,CAAC;AACH,eAAO;;;AAGX,SAAK,kBAAkB,KAAK,SAAS;AACrC,SAAK;AAEL,SAAK,WAAW,IAAI,SAAS,KAAK;AAElC,WAAO;;EAGD;AACN,oBAAgB,qBAAqB,KAAK;AAC1C,YAAQ,QAAQ;AACd,UAAI,OAAO,aAAa;AACtB,eAAO,UAAU,KAAK;;;;EAKpB;AACN,oBAAgB,qBAAqB;AACrC,YAAQ,QAAQ;AACd,UAAI,OAAO,eAAe;AACxB,eAAO,YAAY,KAAK,SAAS;;;;EAW/B;AAEN,iCAA6B,KAAK,gBAAgB;AAClD,QAAI,wBAAwB;AAC1B,YAAM,IAAI,MACN,6BAA6B;;AAGnC;AACE,uBAAgB,qBAAqB;AAMrC,UAAI,YAAW,CAAE,qBAAmB,kBAC7B,OAAO,SAAQ,SAAS;AAC7B,0BAAkB,EAAE,KAAK;AACzB,wBACI,SACK,KAAK;AAEJ,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,SAAS,eAAe;AAC7B,eAAK,qBAAqB;AAC1B,iBAAO;WAER,MAAM;AAEL,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,qBAAqB;AAC1B,kBAAQ,KACJ,6BAA6B;AACjC,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO;;AAEjB,aAAK,qBAAqB;AAC1B,eAAO,CAAC,SAAS,WAAW;;AAE5B,aAAK,SAAS,eAAe;AAC7B,eAAO,CAAC,SAAS,MAAM,WAAW;;;AAGpC,cAAQ,KAAK,6BAA6B;AAC1C,cAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,aAAO,CAAC,SAAS,OAAO,WAAW;;;EAIvC;AACE,QAAI,CAAE,gBAAe,KAAK;AACxB,YAAM,IAAI,MAAM,GAAG;;AAErB,QAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAGjE,WAAK;;AAGP,QAAI,eAAe,KAAK;AACtB,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAGvB,WAAO,KAAK,gBAAgB;AAG5B,QAAI,KAAK,gBAAgB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,cAAc;AACnB,WAAK,kBAAkB;;;EAInB;AACN,QAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC/C,YAAM,IAAI,MAAM;;AAElB,WAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE5C,aAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;EAIxB;AAEN,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,UAAI,aAAa;AACf,eAAO,CAAC,MAAM,aAAa;;;AAG/B,UAAM,IAAI,MACN;;EAIN;AACE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAmB,KAAK;AACxB,mBAAe,KAAK,SAAS;AAG7B,eAAW,YAAY;AACvB,SAAK,UAAU;AACf,aAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,QAAI,KAAK;AAGP,WAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;EAIvE;AAEE,eAAmB;AACnB,QAAI,MAAM;AAER,UAAI,OAAO,aAAa;AACtB,cAAM,IAAI,MAAM;;AAElB,WAAK;;AAGL,UAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACxD,cAAM,IAAI,MACN;;AAGN,UAAI,OAAO,OAAO;AAChB,cAAM,IAAI,MACN;;AAGN,aAAO;;AAIT;AACA,WAAO,KAAK,UACR,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AACxD,eAAS;AACT,UAAI,kBAAkB;AACpB,gBAAQ,MAAM;;AAEhB,aAAO;;;EAIP;AACN;AACA;AACE,kBAAY;AACZ;AACA,aAAO;;AAEP;AACA,YAAM;;;EAKF;AACN,WAAO,OAAO;;EAIR;AACN,WAAO,OAAO;;EAYR;AACN,cAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,mBAAe,CAAC;AAChB,iBAAa,QAAiB;MAC5B,GAAG;AACD,sBAAc;AACd,2BAAmB,CAAC,GAAG;AACvB,sBAAc,CAAC;AAEf,eAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAC5B,YAAoC,MAAiB,MACrD;;;AAGR,kBAAwB;AACxB,SAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,MAAM,OAAO;AACxE,WAAO;;EAgBT;AAGE,wBAA0B;AAC1B,0BAA4B;AAI5B,WAAO,KAAK,cACR,aAAa,QAAQ,eAAe,YAAY,OAAO,cACvD;;EAGE;AACN,WAAO,KAAK,IAAI,QAAQ;;EAGlB;AAGN,4BAAwB,KAAK,QAAQ;AAGrC,2BAAuB;AACvB,aAAS,QAAQ;AAGf,0BAAqB,KAAK,UAAU,cAAc,IAAI;;AAQxD,qBACI,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACvE,0BACI,kBAAkB,mBAAmB,mBAAmB;AAC5D,QAAI,gBAAgB;AAClB,YAAM,IAAI,MACN,YAAY,KAAK,6CACb,0CAA0C;;;EAQtD;AAKE;AACA,gBAAsB;AACtB,qBAAiB,KAAK;AACtB,QAAI,cAAc;AAChB,mBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAGrE,8BAA0B,KAAK,MAAM;AACrC,+BAA2B,KAAK,MAAM;AAEtC,QAAI,KAAK;AACP,WAAK,MAAM,kBAAkB,KAAK;;AAGpC;AACA,mBAAe,UAAU,YAAY,KAAK;AAC1C;AACA,QAAI,UAAU;AACZ,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,OAAO,WAAW,CAAC,QAAQ,OAAO,SAAS,KAAK;AACtD,yBAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,2BAAmB,SAAS,IACxB,EAAE,QAAQ,OAAO,WACb,KAAK,qBAAqB,QAAQ,OAAO;AAMjD,YAAI;AACF,8BACI,KAAK,sBAAsB,YAAY,QAAQ;AACnD,cAAI,iBAAiB;AAKnB,gBAAI,iBAAiB;AACnB,8BAAgB;;AAElB,+BAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,4BAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAEtD,kBAAQ,KAAK,2BAA2B;;AAE1C,eAAO;;;AAGT,uBAA+B;AAI7B,YAAI,CAAC;AACH;;AAEF,gBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAGrD,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,qBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,eAAO;;;AAKX;AACA,SAAK,UACD,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC9D,UAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC5C,kBAAU;;AAEV,wBAAgB,KAAK,SAAS,cAC1B,YAAY,QAAQ,MAAM;AAC9B,YAAI,KAAK,IAAI,QAAQ;AACnB,eAAK,SAAS,iBAAiB;;AAEjC,kBAAU,cAAc;;;AAIhC,QAAI;AACF,WAAK,YACD,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAGzD,QAAI,KAAK,MAAM;AACb,WAAK,MAAM,cAAc,QAAQ,KAAK;QACpC,MAAM;QACN,YAAY,KAAK,MAAM,WAAW;QAClC,oBAAoB,KAAK,MAAM;QAC/B,cAAc,KAAK,MAAM,aAAa;QACtC,sBAAsB,KAAK,MAAM;QACjC,aAAa,OAAO,KAAK,QAAQ,IAC7B,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;QACrD,cAAc,QAAQ,IAAI,UAAQ,KAAK;QACvC,cAAc,cAAc;QAC5B,WAAW,cAAc;;;AAG7B,WAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;EAQzC;AACN,kBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,WAAO;;EAaD;AAGN,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,2BAA+B,WAAW,gBAAgB;AAC1D,4BAAiC,WAAW,iBAAiB;AAI7D;AACA,UAAI,WAAW;AACR,eACD,MAAM,QAAQ,SACd,MAAM;AAEV,6BAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAE7D,6BAAqB,aAAa,IAAI,eAAe,OAAO;;AAG9D,kCACI,QAAQ,OAAO,UAAU,cAAc;AAE3C,aAAO,mBAAmB,OAAO;;AAInC,WAAO;;EAQT;AAGE,QAAI,UAAU;AACZ,YAAM,IAAI,MAAM;;AAElB,YAAQ,SAAS;AACjB,eAAU,YAAW,KAAK;AAC1B,sBAAkB;AAClB,QAAI,UAAU,YAAiB,SAAS,OAAO;AAC7C,oBAAe,OAAoB,IAAI,OAAU,aAAa;;AAEhE,mBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AAGf,QAAI,UAAU;AACZ,mBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAiB,qBAAqB;AACtC,WAAK,MAAM,YAAY,WAAW,KAAK;AACvC,WAAK,QAAQ;;AAEf,WAAO;;EAQT;AAGE,YAAQ,SAAS;AACjB,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AACf,WAAO;;EAGT,uCACsC;AAEpC,WAAO,QAAQ,KAAK,iBAAiB;AACrC,QAAI,SAAS,QAAQ,UAAU,aAAa;AAC1C,qBAAe,aAAa,KAAK;;AAEnC,cAAU,IAAI,SAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,YAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE1C,SAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,SAAK,OAAO,GAAG,KAAK;AACpB,WAAO;;EAGT;AACE,qBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,QAAI,aAAa;AACf,WAAK,MAAM;AAIX,kBAAY;AACZ,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACzC,gBAAQ,EAAE,OAAY,gBAAgB,EAAE;;AAE1C,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;QAClC,SAAS,YAAW,KAAK;QACzB,OAAO,EAAE;QACT,OAAO,EAAE;QACT;QACA,UAAU;;AAEZ,WAAK,MAAM,YAAY;;AAGzB,SAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AAEpC,QAAI,CAAE,cAAa;AACjB,WAAK,MAAM;;;EAIf;AACE,QAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC/B;;AAGF,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,iBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,qBAAiB,KAAK;AAEtB,QAAI,YAAY;AAGd,UAAI,EAAE,UAAU;AACd,aAAK,MAAM,YAAY,KAAK;;AAE9B,WAAK,MAAM;AAEX,WAAK,QAAQ,YAAY,EAAE;AAC3B,WAAK,MAAM,WAAW,OAAO,EAAE;;AAE/B,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;EAOxC;AACE,0BAAsB,KAAK,MAAM;AAC/B,gBAAU,KAAK,MAAM,oBAAoB;AACzC,WAAK,gBAAgB;;;EAIzB;AACE,SAAK,cAAc;AACnB,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,aAAO,KAAK,MAAM,oBAAoB,EAAE;;;EAI5C;AACE,iBAAa,KAAK,QAAQ;AAC1B,SAAK,aAAa,KAAK,MAAM;AAC7B,SAAK,iBAAiB,KAAK,MAAM;AACjC,SAAK,WAAW,KAAK,MAAM;AAC3B,QAAI,KAAK,MAAM,mBAAmB;AAChC,WAAK,aAAa;AAClB,UAAI,KAAK,WAAW;AAClB,aAAK,UAAU;;AAEjB,WAAK,QAAQ,KACT;;AAGN,WAAO;;QAGH;AAEJ,SAAK,MAAM,YAAY;AAEvB,uBAAmB,KAAK,MAAM;AAC9B,4BAAwB,KAAK,MAAM;AAEnC,SAAK,MAAM,cAAc,UAAU;AACnC,SAAK,MAAM,cAAc,SAAS,MAAM;AAExC,SAAK,MAAM,YAAY;AAEvB,SAAK,MAAM,cAAc,YAAY,KAAK,IACtC,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AACnD,SAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,SAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,yBAAqB,KAAK,MAAM,cAAc;AAC5C,aAAO,eAAe,MAAM,OAAO;AACnC,aAAO,YAAY,MAAM,OAAO;;AAElC,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;EAG5D;AAGN,qBACI,CAAC,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AAEnE,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,sBAAgB,WAAW;;AAE7B,QAAI,iBAAiB;AACnB,eAAS,WAAW;AAGlB,cAAM,IAAI,IAAI;AACZ,cAAI,MAAM;AACR,2BAAe,QAAQ;AACvB,yBAAkB,oBAAoB,OAAO,MAAM,OAAO;AAC1D,mBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEpD,iBAAO;;AAIT,eAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAG/D,SAAK,MAAM,WAAW,KAAK;;EAG7B;AACE,WAAO,OAAO;AACd,WAAO;;EAGD;AACN,QAAI,KAAK,MAAM,kBAAkB;AAC/B,WAAK,MAAM,aAAa;;AAE1B,SAAK,MAAM;;EAGL;AACN,SAAK,MAAM;;EAOb;AACE,sBAA8B;MAC5B,OAAO;MACP,MAAM;MACN,IAAI,KAAK,MAAM;;AAEjB,QAAI;AACF,gBAAU,OAAO;;AAEnB,SAAK,MAAM,WAAW,KAAK;AAC3B,SAAK,MAAM,cAAc;;EAO3B;AACE,mCAA+B,sBAAsB;AACrD,sCACI,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAG9C,iBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACvD,sBAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,UAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACxD,gBAAO;;;AAIX,qBAAiB,KAAK,MAAM,WAAW;AACvC,SAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAGzD,2BAAuB,QAAQ;AAG7B,UAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC9C,aAAK,MAAM;;;;EAWjB,wCAEuB;AAChB,WACD,GAAG,SAAS,GAAG,MAAM;AACzB,QAAI,MAAM,QAAQ,GAAG,UAAU;AAC7B,YAAM,IAAI,MAAM,0CAA0C,GAAG;;AAG/D,cAAU,KAAK,UACX,MAAM,KAAK,aAAa,MAAM,KAAK,WACnC,MAAM,KAAK,KAAK,WAAW;AAE1B,WACD,aAAa,QACb,MAAM;AAEV,yBAAqB,qBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,QAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAChE,YAAM,IAAI,MACN;;AAKN,WAAO,KAAK,KAAK,YAAY;AAC3B,qCAA6D;AAC7D,6BAAuB,EAAE,MAAO,MAAM,OAAQ,KAAK,EAAE,SAAS;AAG9D,6BACI,wBAAwB,cAExB,QAAK,KAAK,KAAK,KAEf;AACJ,oBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AAEnD,UAAI,KAAK,MAAM,kBAAkB;AAG/B,aAAK,MAAM,WAAW,QAAQ;AAC5B,gCAAqB,KAAK;AACxB,oBAAO;;;AAGX,aAAK,MAAM,aAAa;;AAE1B,aAAO,CAAC,OAAO,GAAG;;;EAItB;AAEO,WACI,WAAW,IAChB,MAAM;AACV,WAAO;AACA,aACD,OAAO,MAAM,OAAK,aAAa,SAC/B,MAAM;AAGV;AAIA,uBAAiC;AACjC,aAAO,QAAQ;AACb,iBAAS,KAAK;;AAEhB,aAAO,KAAK,cACR;AACE,cAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AAClB,eACD,IAAI,iBAAiB,QACrB,MAAM;AAEL,eACI,WAAW,IAAI,WACpB,MAAM;AAEV,eAAO,IAAI;SAEb,UACA;AACE,wBAAgB,IAAI,SAAS,IAAI;AACjC,sBACI,MAAM,QAAQ,WAAW,UAAU,CAAC;AACnC,eACD,MAAM,WAAW,OAAO,QACxB,MAAM;AAGL,eACD,MAAM,MAAM,OAAK,aAAa,SAC9B,MAAM;AAGV,wBAA+C;AAC/C,cAAM,QAAQ;AACZ,kBAAQ,KAAK,MAAM;;AAErB,eAAO;;;;EAKjB;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,SAAS;;EAE/B;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,KAAK;;QAGrB;AACJ,kBAAc;AACd,uBAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,eAAW,SAAS,QAAQ;AAC5B,WAAO;;EASD;AACN,QAAI,KAAK,MAAM,eAAe;AAC5B,aAAO,UAAU,KAAK,MAAM,YAAY;AACxC,WAAK,MAAM,YAAY,MAAM,KAAK;;AAGpC,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAOpB;AAEE,SAAK;AAEL,SAAK,MAAM;AACX,SAAK,IAAI;AACT,SAAK,QAAQ,IAAI;AAEjB,8BAA0B,KAAK;AAC7B,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAEvB,SAAK,cAAc;AACnB,SAAK,kBAAkB;AACvB,SAAK,qBAAqB;;;AA/sBb,OAAA,eAAe;AAKf,OAAA,iBAAiB;AA8sBlC;AACE,iBAAe,mBAAmB,cAAc,QAAQ;AACxD,SAAO,OAAO,WAAW,QAAQ,OAAO;;AAGpC;AACJ,aAAW;AACX,MAAI,GAAG,aAAa;AAClB,yBAAoB,IAAI,YAAY;AACpC,OAAG,YAAY,IAAI,OAAO;;AAE5B,uBAAqB,GAAG,UAAU;AAIlC,mBAAiB,MAAM,GAAG;AAC1B,SAAO,GAAG;;AAGL,eAAe;AAQhB;AAEJ,iBAAe,CAAC,GAAG;AACnB,SAAO,OAAO,cAAc;AAC1B,gBAAY,SAAQ,IAAI,GAAG;AAC3B,SAAK,CAAC,GAAG;AACT,WAAO;KACN,QAAgC,MAAqB;;ACzrC1D;;;;;;;;;;;;;;;;AAuBM;AACJ,kBAA4B;AAE5B,MAAI,aAAa;AACf,WAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAExC,MAAI,CAAC,MAAM,QAAQ;AACjB,WAAO;;AAET,gBAAwB;AAExB,SAAO,MAAM,QAAQ,cACd,aAAa,cAAc,UAAU;AAC1C,UAAM,KAAK,UAAU;AACrB,gBAAY,UAAU;;AAExB,MAAI,MAAM,QAAQ,QACd,MAAM,QAAQ;AAChB,+BAA2B,KAAK,OAAO;;AAGzC,SAAO;;AAGT;AAEE,YAAU,WAAW;AACrB,MAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,aAAa;AACzC,WACI,MAAM,WAAW,GACjB,MAAM,eAAe,QAAQ,KAAK,+DACU,MAAM;AACtD;;AAEF,SACI,MAAM,SAAS,GACf,MAAM,eAAe,QAAQ,KAAK,oDACR,IAAI;AAClC,SACI,IAAI,WAAW,MAAM,IACrB,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrC,IAAI;AACjC,mBAAiB,MAAM,MAAM;AAC7B,eAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,+BAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAIhE;AAGE,MAAI,iBAAiB;AACnB;;AAEF,MAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AACjD,UAAM,IAAI,MACN,aAAa,uBAAuB,yBAC9B,iCAAiC;;;AAIzC,kEAEiC;AACrC,MAAI,aAAa;AACf,gBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,WAAO;;AAET,sBAAoB,WAAW;AAG/B,MAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACxD,oBAAgB;;AAElB,cAAY,cAAc,eAAe,SAAS;AAElD,MAAK,KAAK,QACL,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACtD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC1C,iBAAa,KAAK,OAAO,SAAU,EAAS,YAAY;AACxD,UAAM,IAAI,MACN,aAAa,uBAAuB,0DACF;;AAExC,wBAAsB,WAAW,GAAG;AACpC,MAAI,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ;AACrC,QAAI,CAAC;;AAEP,yBAAuB;AACvB,iBAAe,kBAAkB,WAC7B,aAAa,GAAG,iBAChB,QAAQ,GAAe,IAAI;AAC/B,SAAO,OAAO,WAAW,QAAQ,eAAe;;ACrHlD;;;;;;;;;;;;;;;;AAmBO,wBAAwB;AAOzB;AACJ,eAAa,OAAO,KAAK;AACzB,MAAI,KAAK,WAAW;AAClB,UAAM,IAAI,MACN,yGAEG,KAAK;;AAGd,eAAa,KAAK;AAClB,aAAW,EAAE;AAGb,MAAI,OAAO,SAAS;AAClB,aAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAI/C,WAAS,SAAS;AAGlB,aAAW;AACT,WAAO,WAAW;AAClB;AACE,qBAAe,GAAG,GAAG;AACrB,UAAI,UAAU;AACZ,gBAAQ,MAAM;;AAEhB,aAAO,SAAS;AAChB,aAAO;;AAEP,aAAO,SAAS;AAChB,YAAM;;;AAGV,SAAO,eAAe,IAAI,QAAQ,CAAC,OAAO,QAAQ,cAAc;AAGhE,SAAO;;AChET;;;;;;;;;;;;;;;;AA8CA;AACE,gBAAc,gBAAgB,MAAM,QAAQ;AAC5C,gBAAc,gBAAgB,MAAM,QAAQ;AACvC,oBACD,MAAM,OAAO,MAAM,OACnB,yBAAyB,MAAM,aAAa,MAAM;AAGtD,kBAAqC;AACnC,WAAO,SAAQ,QAAQ,OAAO;;AAEhC,iBAA8B,CAAC,MAAM,OAAO,MAAM;AAClD,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAGN,gBAAgB,GAAG,CAAC;AC/D3B;;;;;;;;;;;;;;;;AAwBM;AAGJ,MAAI,SAAS;AACX,YAAQ,WAAW;;AAErB,MAAI,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AACpB,UAAM,IAAI,MACN;;AAGN,MAAI,SAAS;AACX,uCAAmC;AAEnC,yBAAqB,cAAc;AACnC,yBAAqB,cAAc;AACnC,WACI,iBAAiB,cACjB,MACI,iCAAiC,kCAC9B,+BAA+B;AAE1C,iBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AAC1C,uBAAiB,cAAc;AAC/B,gCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,cAAc,MAAM,MAAM,MACvC;AACJ,aACI,cAAc,OAAO,MAAM,MAAM,CAAC,mBAClC,MAAM,gDACE,qDACM;;;AAItB,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ;AAC1C,aAAS,CAAC;;AAGZ,UAAQ,SAAS;AACjB,WAAS,UAAU,WACf,aAAa,QAAQ,SACrB,QAAQ,QAAoB,IAAI;AACpC,SAAO,OAAO,WAAW,QAAsB,OAAO;;AC1ExD;;;;;;;;;;;;;;;;AA+CM,+BAC6B;AAEjC,UAAQ,SAAS;AACZ,qCAAmC;AACxC,SAAO,IAAI,aAAmB,OAAO,OAAO;;ACpD9C;;;;;;;;;;;;;;;;AAuCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAGnC,MAAI,CAAM,aAAa;AACrB,UAAM,IAAI,MAAM,mCAAmC;;AAErD,MAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACrC,UAAM,IAAI,MAAM;;AAGlB,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAGtB,aAAa,GAAG,CAAC;AC3DxB;;;;;;;;;;;;;;;;AAuDA;AAEE,aAAW,gBAAgB,GAAG,KAAK,WAAW;AAE9C,iBAA8B,CAAC,GAAG;AAClC,gBAA4B,CAAC;AAC7B,kBACiB;AACf,YAAa,uBAAuB,OAAO,GAAG;AACzC,WACD,GAAG,SAAc,cAAc,QAC/B,MAAM;AACV,SAAK,CAAC;AACN,WAAO,SAAQ,QAAQ,IAAI;;AAE7B,SAAO,OAAO,cACV,SAAS,QAAgC,MAAiB,SAC1D;;AAEC,gBAAgB,GAAG,CAAC;AC1E3B;;;;;;;;;;;;;;;;AA+CA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,MAAI,QAAQ;AACV,WAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAE9B,SACD,GAAG,SAAS,KAAK,QACjB,MAAM,qCAAqC,GAAG,kCACb;AACrC,OAAK,QAAQ;AACN,WACD,QAAQ,KAAK,OAAO,GAAG,MACvB,MAAM,+CAA+C,GAAG,OAAO,aAC/C;;AAGtB,MAAI,GAAG,QAAQ;AACb,WAAO,GAAG;;AAGZ,iBAAgC,CAAC,GAAG;AACpC,gBAA8B,CAAC;AAE/B,SAAO,OAAO,cACV,cAAW,SAAQ,UAAU,IAAI,OAAO,QACxC,MAAqB,WAAW;;AAG/B,kBAAkB,GAAG,CAAC;AC3D7B,+BAAA;;;;ACUM;AAEJ,MAAI,QAAO,OAAO;AAChB,UAAM,IAAI,MACN,4EACqB,QAAO;;AAElC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,8EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MACN,yEACsB,QAAQ;;AAEpC,MAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AAC3C,UAAM,IAAI,MACN,iEACG,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAGvD,MAAI,QAAO,SAAS;AAClB,UAAM,IAAI,MACN,mEACiB,QAAO;;AAG9B,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAIrD,gBAAc;AACd,eAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC7C,eAAW,aAAa;;AAG1B,qBAAmB,QAAO;AAE1B,sBAAoB,aAAa;AACjC,cAAY;AAEZ,kBAAgB;AAChB,eAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACzC,iBAAa,WAAW;AACxB,gBAAY,KAAK,WAAW;;AAG9B,kBACI;IAAC,GAAG,eAAe,QAAO,OAAO,IAAI,YAAU,SAAS;IACvD;IAAG,MAAM,GAAG;AAEjB,SAAO,CAAC,aAAa,SAAS,WAAW;;AC/D3C,gCAAA;;;;;;ACQM;AAEJ,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AAEzD,qBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAE9C,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEjD,MAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC5C,UAAM,IAAI,MACN,aACA,0BAA0B,WAAY,SAAQ,OAAO;;AAE3D,MAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC7C,UAAM,IAAI,MACN,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAEhE,eAAa,GAAG,IAAI,UAAU,EAAE;AAC9B,QAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACrC,YAAM,IAAI,MACN,aACA,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAC3D,QAAQ,MAAM;;;AAG1B,eAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC7C,QAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC5C,YAAM,IAAI,MACN,aACA,kBAAkB,IAAI,cAClB,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAC7C,MAAM,IAAI;;;;AAmBlB;AAEJ,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MAAM,0DACZ,QAAQ;;AAEd,MAAI,MAAM,SAAS;AACjB,UAAM,IAAI,MACN,6DAA6D;;AAGnE,MAAI,MAAM,WAAW;AACnB,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;AAEd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;;AAIhB,sBAAoB,OAAO,SAAS;;AAYhC;AAIJ,sBAAoB,QAAQ,MAAM;AAClC,oBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAKvE,kBAAgB,MAAM;AAEtB,kBAAgB;AAChB,eAAa,WAAW,IAAI,SAAS,EAAE;AACrC,iBAAa,MAAM;;AAGrB,uBAAsB,YAAY,IAAK,IAAI;AAC3C,qBAAmB,cAAc,QAAQ,SAAS;AAElD,kBAAgB,CAAC,GAAG,eAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,qBAAmB,cAAc;AACjC,SAAO,CAAC,WAAW,YAAY,WAAW,SAAS;;ACnJrD,2BAAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBM;AAEJ,oBAAkB,MAAM,MAAM;AACzB,SACD,cAAc,MAAM,QACpB,MAAM,iBAAiB,+BAA+B,2CAClB;AACnC,SACD,cAAc,KAAK,QACnB,MAAM,iBAAiB,8BAA8B,0CACjB;AAExC,eAAa,GAAG,IAAI,WAAW,EAAE;AAC1B,WACD,MAAM,KAAK,KAAK,MAAM,MAAM,MAAM,IAClC,MAAM,iBAAiB,qBAAqB,aAAa,OACjD,MAAM,KAAK,KAAK,kCAAkC,OAChD,MAAM,MAAM;;;AAKxB;AACJ,eAAa;AACb,aAAW;AACX,SAAO,OAAO;AACZ,QAAI,OAAO;AACT,WAAK,KAAK;;AAEZ,YAAQ;AACR;;AAEF,SAAO;;AAIH;AAEJ,eAAa;AACb,kBAAgB,GAAG,OAAO,MAAM,QAAQ;AACtC,SAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE7D,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,eAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACrD,eAAW,KAAK;;AAElB,eAAa,GAAG,IAAI,eAAe;AACjC,QAAI,MAAM;AACR,iBAAW,0BAA0B;;AAErC,iBAAW,OACP,wBAAwB,GACxB;AACJ,iBAAW;;;AAGf,SAAO;;AAGT;AAGE,MAAI,kBAAkB;AACpB,WAAO;;AAGT,SAAO,iBAAkB,iBAAgB;;AAG3C;AACE,qBAAmB;AACnB,eAAa,GAAG,IAAI,eAAe;AACjC,eAAW,KAAK,yBAAyB;;AAE3C,SAAO;;AAIH;AAKJ,oBAAkB,WAAW;AAC7B,wBAAsB,IAAI,MAAM,4BACZ,IAAI,MAAM,gCACN,IAAI,MAAM;AAClC,MAAI,aAAa,UAAU,sBAAsB;AAC/C,sBAAkB,aAAa;AAI/B,0BAAsB,sBAAsB;AAC5C,sBAAkB,2BACd,WAAW,WAAW,eAAe,OAAO;AAChD,oBAAgB,0BACZ,SAAS,WAAW,eAAe,KAAK;AAC5C,wBACI,sBAAsB,SAAS,WAAW,eAAe;;AAE7D,oBAAgB,GAAG,OAAO,WAAW;AACnC,sBAAgB,QAAQ,aACpB,WAAW,OAAO,SAAS,YAAY,MAAM;AACjD,oBAAc,QACV,YAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,wBAAkB,QAAQ,eAAe,SAAS,MAAM;;;AAI5D,SAAO;IACL,OAAO;IACP,KAAK;IACL,SAAS;;;AAMP;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ;;AAEnB,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,cAAc;AAClC,UAAI,YAAY,KAAK;AACnB,wBAAgB;;AAGlB,iBAAW,QAAQ;;;AAGvB,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ,OAAO;;AAE1B,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,YAAY;AAChC,UAAI,UAAU,KAAK;AACjB,wBAAgB,OAAO;;AAEzB,iBAAW,QAAQ;;;AAIvB,eAAa,GAAG,IAAI,WAAW,QAAQ;AAErC,qBAAiB,WAAW;AAC5B,QAAI,WAAW,KAAK;AAClB,iBAAW,MAAM;;AAEnB,eAAW,KAAU,MAAM,GAAG,WAAW,IAAI,WAAW;;AAE1D,SAAO;;AAGH;AAEJ,eAAa,QAAQ;AACrB,MAAI,eAAgB,KAAK,QAAS,UAAU;AAC1C,aAAS;;AAGX,SAAO;;AAGH;AAIJ,cAAY,aAAa;AACzB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAChE,QAAI,SAAS;AAIX,cAAQ,OAAO;;AAGf,cAAQ,OAAO;;;AAKnB,mBAAiB,WAAW;AAC5B,MAAI,QAAQ;AACV,aAAS;;AAIX,UAAa,MAAM,GAAG,OAAO,WAAW;AAExC,SAAO;;AAGH;AAIJ,aAAW,YAAY;AACvB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AACjE,QAAI,SAAS;AAGX,aAAO,OAAO;;AAGd,aAAO,OAAO;;;AAKlB,mBAAiB,WAAW;AAC5B,MAAI,OAAO;AACT,YAAQ;;AAMV,MAAI,SAAS;AAEX,WAAY,MAAM,GAAG,MAAM;;AAG3B,WAAY,MAAM,IAAI,MAAM,WAAW;;AAGzC,SAAO;;AAOH;AAGJ,wBAAsB,KAAK;AAC3B,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,KAAK;AACZ,wBAAkB;AAClB;;;AAIJ,eAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AACjD,QAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AACpC,aAAO;;;AAGX,SAAO;;AAGH;AACJ,mBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,kBAAc,MAAM,KAAK,QAAQ;;AAEnC,SAAO;;AAGH;AAGJ;AACA,gBAAc,EAAE,MAAM;AACtB,MAAI,OAAO,UAAU;AACnB,aAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACrC,MAAM,SAAS;AACxB,aAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAE3D,aAAS,MAAM;;AAEjB,SAAO,QAAQ;AACR,WACD,MAAM,IAAI,MAAM;;AAEtB;AACA,MAAI,QAAQ;AACV,YAAQ,IAAI,MAAM,OAAO,KAAK;aACrB,OAAO,SAAS;AACzB,YAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACnC,KAAK,SAAS;AACvB,YAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAExD,YAAQ;;AAEV,UAAQ,MAAM,IAAI;AAChB,QAAI,KAAK;AACP,aAAO;;AAEF,aACD,MAAM,IACN,MAAM,qDACC,mCAAmC;AAC9C,aAAO,EAAE,MAAM,KAAK,OAAO;;;AAG/B,SAAO,CAAC,QAAQ;;ACjWlB;;;;;;;;;;;;;;;;AC6DM;AACJ,MAAI,MAAM,QAAQ;AAChB,YAAQ,KACJ,MAAM;;;AAId,wBAAwB;ACgBlB;AACJ,SAAO;;AC8QH,mDAES;AACb,SAAO,OAAO,gBAAgB,MAAM,SAAS;;ACtW/C;;;;;;;;;;;;;;;;AAoDA;AACE,WAAS,gBAAgB,GAAG,KAAK;AACjC,WAAS,gBAAgB,GAAG,KAAK;AACjC,GAAC,IAAI,MAAM,eAAe,IAAI;AAE9B,kBAAqC;AACnC,gBAAY,SAAQ,SAAS,IAAI;AACjC,SAAK,CAAC,IAAI;AACV,WAAO;;AAET,iBAA+B,CAAC,GAAG,IAAI,GAAG;AAE1C,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAEN,YAAY,GAAG,CAAC;ACpEvB;;;;;;;;;;;;;;;;AAuBM;AACJ,eAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,QAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AAC3C,aAAO;;;AAGX,SAAO;;AAGH;AAEJ,eAAa,UAAU,SAAS,UAAU;AAC1C,cAAY;AACZ,eAAa;AACb,kBAAgB;AACd,iBAAe,GAAG,MAAM,MAAM;AAC9B,QAAI,KAAK,QAAQ,SAAS;AACxB,UAAI,KAAK,UAAU;;AAEnB,UAAI,KAAK,UAAU;;;AAGvB,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,KAAK,QAAQ,SAAS;AACxB,eAAS,KAAK,OAAO;;;AAGzB,sBAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,SAAO,CAAC,UAAU;;AAGd;AAEJ,yBAAuB,KAAK,IAAI,OAAK;AACrC,SAAO,iBAAiB,OAAO,gBAAgB;;AAG3C;AAEC,SACD,qBAAqB,MAAM,OAC3B,MAAM,GAAG,uDACO,iBAAiB;;AAQjC;AAEJ,MAAI,qBAAqB,MAAM;AAC7B,WAAO;;AAET,iBAAyB;AACzB,eAAa,GAAG,IAAI,MAAM,EAAE;AAC1B,QAAI,KAAK,QAAQ,OAAO;AACtB,aAAO,KAAK;;;AAGhB,OAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,SAAO;;AAIH;AACJ,SAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAGZ;AACJ,cAAsB;AACtB,eAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACvC,QAAI,KAAK;;AAEX,SAAO;;AC3GT;;;;;;;;;;;;;;;;AAmGM,oFAG+C;AAMnD,wBAAsB,WAAW;AACjC,uBACI,CAAC,GAAG,aAAa;AACrB,sBAAoB,wBAAwB;AAE5C,SAAO,kBACH,YAAY,cAAc,SAAS,WAAW,MAC9C,MAAyB,MAAsB;;AAG/C,qGAK2C;AAC/C,sCAAoC,gBAAgB;AAEpD;AACA,MAAI,eAAe;AACjB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACrD,eAAe;AACxB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAE9D,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAC7D;;AAMA,qGAM4B;AAChC,mDAAiD,iBAAiB;AAElE;AACA;AACA,MAAI,eAAe;AACjB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACxD,eAAe;AACxB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAEjE,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aACtD;;AAOA,qGAKiD,oBACN;AAC/C,mDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,MAAI,eAAe;AACjB,KAAC,WAAW,UAAU,SAAS,cAAc;aACpC,eAAe;AACxB,KAAC,WAAW,YAAY,UAAU,WAAW;;AAE7C,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,wDAAsD;AACtD,sCAAoC,gBAAgB;AACpD,0CAAwC,gBAAgB;AAExD,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,WAAW,YAAY,iBACnC,MAAK,UAAU,SAAS,cAAc,aAAa,uBACnD,sBAAsB,cAAc;AAExC,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,WAAW;aACtC,eAAe;AACxB,eAAW,CAAC,WAAW,WAAW,UAAU;;AAG9C,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AA0CE,uFAKU,oBACiC;AAE/C,4DACI,CAAC,IAAI,IAAI,IAAI,IAAI;AACrB,MAAI,eAAe;AACjB,KAAC,WAAW,SAAS,UAAU,SAAS,cAAc;aAC7C,eAAe;AACxB,KAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAEtD,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,qEACI;AACJ,mDAAiD,iBAAiB;AAClE,yDACI,iBAAiB;AAErB,+BACI,uBAAuB,aAAa;AACxC,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,UAAU,WAAW,YAAY,mBAC7C,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAC5D,sBAAsB,uBAAuB,sBAC7C;AAEJ,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,UAAU,WAAW;aAChD,eAAe;AACxB,eAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAGxD,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,YAAY;;AAGtB;AAIE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,qBAAmB,QAAQ;AAC3B,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,uBAAqB,iBAChB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AACpD,SACI,MAAM,eACX,MAAM,2BAA2B;AAGrC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,cAAc,YAAY,YAAY;;AAG1C,qEAE4C;AAChD,6BAA2B,uBAAuB,WAAW;AAC7D,SAAO,KAAK,MACP,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAGrE;AACE,MAAI,OAAO,UAAU;AACnB,WAAO,CAAC,OAAO,OAAO;;AAExB,MAAI,MAAM,WAAW;AACnB,WAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAE9B,SAAO;;AAGT;AAEE,SAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAc7D;AACE,MAAI,YAAY;AACd,WAAO;;AAGT,SAAO,aAAc,cAAa,KAAM,YAAW;;AAGrD;AAOE;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU,CAAC,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAC/D,qBAAiB,qBACb,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC1D,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,2BACI,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AAChE,0BACI,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC7D,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAC9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;aAClC,SAAQ;AACjB,cAAU,CAAC,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACvD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;aAC1C,OAAO,SAAQ;AACxB,gBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,mBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,iBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,kBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,oBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC3C,gBAAY,iBACP,YAAW,eAAe,MAAM,UAAU,eAAe,GAC1D;AACJ,eAAW,iBACN,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAE9D,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,WAAW;;AAG9B;AAUE;AACA;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,qBAAiB,qBACb,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAC9D;AACJ,eAAW,SAAS;AACpB,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,eAAW,KAAK,KAAK,UAAU;AAC/B,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,2BACK,aAAY,KAAK,eAAe,eAAe;AACpD,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,kBAAc,KAAK,MAAM,gBAAgB;AACzC,iBAAa,gBAAgB;AAC7B,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAE9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;aAC/C,SAAQ;AACjB,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAEnD,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,UAAU,WAAW;;AAQxC;AAEE,MAAI,CAAC;AACH,WAAO;;AAET,UAAQ;SACD;AAEH,aAAO,KAAK,MAAM;SACf;AAEH,aAAO,KAAK,KAAK;SACd;AACH,aAAO,KAAK,MAAM;;AAElB,YAAM,IAAI,MAAM,wBAAwB;;;AAIxC;AACJ,6BAA2B,gBAAgB;AAC3C,SAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAGxC;AAEJ,SAAO,kBAAkB,YAAY,kBAAkB;;AAUnD;AAEJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO;;AAEP,UAAM,IAAI,MAAM,sBAAsB;;;AC5mB1C;;;;;;;;;;;;;;;;AAmBM;AACJ,eAAa,OAAO,GAAG;AACvB,SAAO,QAAQ;AACR,WACD,MAAM,WAAW,MACjB,MACI,kBAAkB,0BAA0B,gDAChB;;AAGjC,SACD,QAAQ,KAAK,OAAO,MACpB,MAAM,kBAAkB,qCAAqC,OAAO;AAExE,qBAAmB,OAAO;AAC1B,SAAO,QAAQ;AACb,iBAAa,GAAG,IAAI,MAAM;AACnB,aACA,MAAM,QAAU,MAAM,OAAO,WAAW,IACzC,MAAM,kBAAkB,2BAA2B,OAAO,gDACb,+CACN;;;;AAK3C;AACJ,sBAAoB,OAAO,GAAG;AAC9B,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,gBAAY,SAAS,OAAO,GAAG;;AAEjC,SAAO;;AClDT;;;;;;;;;;;;;;;;AA0BM;AAEJ,iBAAe,QAAQ;AACvB,eAAuB;AACvB,eAAa,GAAG,IAAI,QAAQ;AAC1B,gBAAY,SAAS,IAAI;AACzB,cAAU,QAAQ,QAAQ;AAC1B,cAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,QAAI,IAAI,KAAK,MAAM;AACjB,WAAK,QAAQ;;;AAGjB,SAAO;;AAOH;AAEJ,iBAAyB;AACzB,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,kBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,oBAAgB,SAAS,SAAS,IAAI;AACtC,mBAAe,SAAS;AACxB,QAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC5C,aAAO,QAAQ;;;AAGnB,SAAO;;AAGH;AAEJ,iBAAyB;AACzB,YAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AAEzC,eAAa,GAAG,IAAI,GAAG;AACrB,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,QAAI,MAAM;AACR,aAAO,QAAQ;eACN,MAAM;AACf,aAAO,QAAQ;eACN,MAAM;AACf,qBAAe,wDACR,cAAc;AACrB,YAAM,MAAM;;AAEZ,aAAO,QAAQ;;;AAGnB,SAAO;;ACrFT;;;;;;;;;;;;;;;;AAsCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,cAAU,SAAQ,IAAI;AACtB,SAAK,CAAC;AACN,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAE9B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,YAAY,GAAG,CAAC;ACtDvB,6BAAA;;;;;;ACAA;;;;;;;;;;;;;;;;AAuBO,8BAA8B;AAS/B;AACJ,MAAI,UAAU;AACZ,WAAO;;AAET,SAAO,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;ACpCrD;;;;;;;;;;;;;;;;AA6BM;AAEJ,aAAW;AACX;AAEA,MAAI,UAAU;AACZ,UAAM;AACN,WAAO;;AAEP,UAAM,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAGpD,SAAO,CAAC;AACN,QAAI,MAAM,eAAe,QAAQ;AAC/B,aAAO;;AAEP,YAAM,eAAe,QAAQ,MAAM;;;AAGvC,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,QAAQ;AACV,eAAS,KAAK,OAAO;;AAErB,eAAS,KAAK;;;AAGlB,SAAO;;AASH;AAEJ,kBAAgB,EAAE,MAAM;AAExB,sBAA8B;AAC9B,kBAAgB;AAChB,kBAAgB;AAChB,eAAa,GAAG,IAAI,MAAM;AACxB,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,eAAa,GAAG,IAAI,QAAQ,MAAM;AAChC,gBAAY,KAAK,QAAQ,MAAM;;AAGjC,eAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AACjC,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,SAAO,CAAC,WAAW,WAAW,SAAS;;AC5FzC;;;;;;;;;;;;;;;;AAqCM;AAEJ,MAAM,cAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,UAAU,YAAY,aAAa,UACnC,CAAE,kBAAiB;AACrB,UAAM,IAAI,MACN;;AAGN,gBAAwB;AACxB,wBAAgC;AAChC,SAAO,WAAW,OAAO,OAAO,eAAe;;ACrDjD;;;;;;;;;;;;;;;;AA6DA,wBACkD,iBAAiB;AACjE,WAAS,gBAAgB,GAAG,KAAK;AACjC,MAAI,GAAG,UAAU;AACf,SAAK,KAAK,IAAI;;AAGhB,kBAAqC;AACnC,SAAK,CAAC;AACN,iBAAa,eAAe,MAAM,GAAG;AAErC,wBAAoB,mBAAmB,MAAM,GAAG;AAChD,wBAAoB;AACpB,oBAAgB;AAChB,QAAI,eAAe;AACjB,kBAAY,UAAU,IAAI;AAC1B,sBAAgB,iBAAiB,cAAc,QAAQ,GAAG;;AAE5D,gBAAY,SAAQ,IAAI,WAAW;AACnC,QAAI;AACF,uBAAiB,qBAAqB,MAAM,OAAO;AACnD,cAAQ,QAAQ,OAAO;;AAEzB,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAC9B,gBAAwB,CAAC,MAAM;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB,KAC1D;;AAGN,aAAY,GAAG,CAAC;AC/FvB;;;;;;;;;;;;;;;;AAqCM,8BACoC;AACxC,MAAI,UAAU;AACZ,iBAAa,MAAM,OAAO;AAC1B,iBAAa,MAAM,OAAO;AAC1B,WAAO,QAAQ,MAAM;;AAEvB,iBAAe,oBAAoB,cAAc,QAAQ;AACzD,SAAO,OAAO,WAAW,QAAQ,OAAO;;AC7C1C;;;;;;;;;;;;;;;;AA0CA;AACE,aAAW,gBAAgB,GAAG,KAAK;AACnC,iBAAe,gBAAgB,OAAO,SAAS;AAE/C,kBAAqC;AACnC,gBAAY,SAAQ,MAAM,IAAI;AAC9B,SAAK,CAAC,IAAI;AACV,WAAO;;AAGT,iBAA4B,CAAC,GAAG,IAAI,OAAO;AAC3C,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;AC1DzB;;;;;;;;;;;;;;;;AAwCM;AACJ,gBAAc;AACd,wBAAsB,WAAW,QAAQ;AACzC,MAAI,cAAc,WAAW;AAC3B,UAAM,IAAI,MAAM;;AAElB,gBAAwB;AACxB,SAAO,WAAW,QAAQ,OAAO,eAAe;;AC/ClD;;;;;;;;;;;;;;;;AAwCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,KAAK;;AAGtB,iBAA2B,CAAC,GAAG;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,aAAa,GAAG,CAAC;AC5DxB;;;;;;;;;;;;;;;;AAwCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,MAAM;;AAGvB,iBAA4B,CAAC,GAAG;AAEhC,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;ACnCnB,qDAEK;AACT,mBAAiB;AACjB,MAAI,OAAQ,oBAAqB;AAC/B,WACI,EAAE,MAAM,QAAQ,oBAAoB,GACpC,MAAM;AACV,iBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAEpD,sBAAkB,gBAAgB,OAAO;AACvC,UAAI,UAAU;AACZ,iBAAS;;AAEX,aAAO;OACN;AACH,WACI,aAAa,GACb,MAAM;AACV,qBAAiB,gBAAgB,QAAQ;AAGzC,QAAI,aAAa;AACf,oBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,sBAAgB,YAAY,EAAE,MAAM,QAAQ;;AAE9C,WACI,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IACvD,MAAM;AACV,iBAAa;;AAGf,SAAO;;AC1DT;;;;;;;;;;;;;;;;AAwCA,0BAA0D;AACxD,aAAW,gBAAgB,GAAG,KAAK;AAEnC,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAEtB,aAAa,GAAG,CAAC;AClDxB;;;;;;;;;;;;;;;;AA+BM;AAEJ,MAAI,cAAc,QAAQ,eAAe;AACvC,WAAO;;AAET,MAAI,eAAe;AACjB,WAAO,IAAI,IAAI,KAAK;;AAEtB,QAAM,IAAI,MACN,gDAAgD;;AAIhD;AAEJ,YAAU;AACV,qBACmB,iBAAiB,KAAK,OAAO,aAAa;AAC7D,MAAI,WAAW,SAAS;AACtB,UAAM,KAAI,KAAK;;AAEjB,SAAO,QAAQ,KAAK,KAAK;;AAGrB;AAGJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO,KAAK;aACH,eAAe;AACxB,WAAO,IAAI;aACF,eAAe;AACxB,WAAO,MAAM;aACJ,eAAe;AACxB,WAAO,MAAM,GAAG;;AAElB,QAAM,IAAI,MAAM,4BAA4B;;AAIvC,mBAAmB;AACxB,uBAAqB,gBAAgB;AACrC,SAAO,CAAC,gBAAgB,eAAe;;AC3EzC,6BAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAkBM;AAGJ,kBACI,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC/D,kBACI,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAChE,SAAO,CAAC,SAAS;;ACzBnB;;;;;;;;;;;;;;;;AAyBM,kEAEa;AACjB,iBAAyB;AACzB,MAAI;AACF,eAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,aAAS,KAAK,WAAW,KAAK;AAC9B,eAAW,SAAS,OAAO,WAAW,MAAM;;AAE5C,eAAW,SAAS,OAAO,WAAW;AACtC,0BAAsB,WAAW;AACjC,iBAAa,GAAG,IAAI,eAAe,EAAE;AACnC,iBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAErE,eAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAE9D,SAAO;;AAYH,kEAEa;AACjB,mBAAiB;AACjB,MAAI;AACF,aAAS,KAAK;AACd,iBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACnD,UAAI,KAAK,IAAI;AACX,iBAAS,KAAK;AACd,iBAAS,KAAK,IAAK,kBAAiB;;AAEpC,iBAAS,KAAK;;;;AAIlB,gCAA4B;AAC5B,+BAA2B;AAC3B,iBAAa,GAAG,IAAI,cAAc,EAAE;AAClC,UAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AAC3C,2BAAmB,KAAK;;AAExB,4BAAoB,KAAK;;;AAG7B,aAAS,KAAK,GAAG;AACjB,aAAS,KAAK;AACd,aAAS,KAAK,GAAG;;AAEnB,SAAO;;AAYH,0EAEa;AACjB,2BAAyB;AAEzB,MAAI;AACF,qBAAiB,KAAK,WAAW,KAAK;;AAEtC,qBAAiB,KAAK,WAAW,KAAK;;AAGxC,eAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACvC,QAAI,KAAK,WAAW;AAClB,UAAI;AACF,yBAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAErD,yBAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAGvD,uBAAiB,KAAK,WAAW;;;AAIrC,SAAO;;AAOH;AAEJ,2BAAyB,CAAC;AAC1B,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,qBAAiB,KAAK,MAAM,GAAG;;AAEjC,SAAO;;AAcH;AAEJ,oBAAkB,eAAe,MAAM,GAAG;AAC1C,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,cAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAGhE,SAAO;;ACvJT;;;;;;;;;;;;;;;;AAiBO,wBAAwB;AACxB,mBAAmB;AClB1B;;;;;;;;;;;;;;;;AAiBO,cAAc;AACd,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;ACtBtB;;;;;;;;;;;;;;;;AAmBM;AACJ,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,KAAK,GAAG;;;AAId;AACJ,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,IAAI,GAAG;;;AC3BnB;;;;;;;;;;;;;;;;AAgCM;AAEJ,MAAI,KAAK,WAAW,KAAK;AACvB,UAAM,IAAI,MACN,gEACG,KAAK,iBAAiB,KAAK;;AAEpC,iBAAe,IAAI,aAAa,KAAK,SAAS;AAC9C,eAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,WAAO,KAAK,KAAK,IAAI;AACrB,WAAO,IAAI,KAAK,KAAK,IAAI;;AAE3B,SAAO;;AAiBH;AAEJ,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,IAAI,KAAK,SAAQ;AACtB,SAAK,IAAI,KAAK,SAAQ,IAAI;;AAE5B,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAQV;AAEJ,eAAa,SAAQ,QAAQ;AAC7B,eAAa,SAAQ,QAAQ,IAAI;AACjC,SAAO,CAAC,MAAM;;AASV;AAEJ,QAAK,QAAQ,KAAK;AAClB,QAAK,QAAQ,IAAI,KAAK;;AAMlB;AAEJ,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AACpC,cAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,SAAK,KAAK,KAAK,IAAI;AACnB,SAAK,KAAK,KAAK,IAAI;;AAErB,SAAO,CAAC,MAAM;;AAMV;AAEJ,YAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,SAAO,CAAC,MAAM;;ACvJhB;;;;;;;;;;;;;;;;ACwDM;AAEJ,MAAI,UAAU;AACZ,QAAI,EAAE,UAAU;AACd,aAAO,EAAE;;AAEX,wBAAoB,MAAM,EAAE;AAC5B,mBAAe,KAAK,GAAG;AACvB,mBAAe,SAAQ,QAAQ,QAAQ;AACvC,gBAAY;AACZ,WAAO;AACP,WAAO;;AAGT,MAAI,CAAC,gBAAgB,EAAE,OAAO;AAG5B,WAAO,OAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAExD,MAAI,EAAE,UAAU;AACd,iBAAa,SAAQ,KAAK;AAC1B,mBAAe,KAAK,MAAM;AAC1B,SAAK;AACL,WAAO;;AAET,MAAI,UAAU;AACZ,WAAO,SAAQ,IAAI;aACV,UAAU;AACnB,iBAAa,OAAO,GAAG,EAAE;AACzB,mBAAe,SAAQ,SAAS,GAAG;AACnC,SAAK;AACL,WAAO;;AAEP,UAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAI7D;AAEJ,SAAO,OAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAGlD;AACJ,gBAAc,QAAO,SAAU,OAAM;AAErC,iBAAe,oBAAoB,KAAK;AACxC,SAAO,KAAK;AACZ,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,WAAO,KAAK,OAAO,IAAI,KAAK;;AAG9B,SAAO,SAAS,QAAQ;;AC3G1B;;;;;;;;;;;;;;;;AAkBA;AAAA,AAAA;AACE,YAAA,UAAA,aAAA,KAAA;AACA,YAAA,UAAA,WAAA,KAAA;AACA,YAAA,UAAA,UAAA,KAAA;AACA,YAAA,UAAA,YAAA,KAAA;AACA,YAAA,UAAA,eAAA,KAAA;GALU,YAAA,YAAQ;AASpB;AAAA,AAAA;AACE,qBAAA,mBAAA,YAAA,KAAA;AACA,qBAAA,mBAAA,UAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;GAJU,qBAAA,qBAAiB;AC3B7B;;;;;;;;;;;;;;;;AAuBA;AAMA;AACE,oBAAkB,SAAQ,KAAK,MAAM,cAAc,MAAiB;IAClE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,GAAG,MAAM,0BAA0B;AAE7C,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,SAAO,YAAY,YAAY,cAAc;AAC7C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,uDACQ,SAAS,MAAM;;AAE7B,aAAS,SAAS;;AAEpB,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,kBAAgB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AAClD,mBAAiB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACnD,mBAAiB,EAAE,MAAM;AAEzB,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,EAAE;AAChE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,kBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,YAAY,YAAY,iBAAiB,QAAQ,0BACjD;AAEJ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC1Gd;;;;;;;;;;;;;;;;AAqBM;AACJ;AAEA;AACE,gBACI,SAAQ,KAAK,MAAM,YAAY,MAAiB,CAAC,UAAU;;AAGjE;AAEE,WAAO,mBAAS,SAAS,MAAM;AAC/B,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,QAAI,aAAK,cAAc,IAAI,WAAW;AACpC,aAAO;;AAGT,cAAS,KAAK;AACd,WAAO;;AAGT,SAAO,CAAC,YAAY,aAAa,QAAQ,WAAA,YAAW,YAAA;;AC7CtD;;;;;;;;;;;;;;;;AAoBO,kBAAgC,wBAAwB;ACpB/D;;;;;;;;;;;;;;;;AAuBM;AAGJ;AAKA;AACE,gBAAW,SAAQ,KAAK,MAAM,YAAY,MAAiB;MACzD;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;;AAIJ;AAEE,WAAO,mBAAS,UAAU;AAC1B,WAAO,GAAG,KAAK;AACf,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,uBAAmB,SAAS,OAAO,QAAQ,EAAE;AAC7C,qBAAiB,qBAAa,2BAA2B,EAAE,OAAO,EAAE;AACpE,gBAAY,SAAQ,WAAW,UAAU;AAGzC,QAAI,aAAK,cAAc,cAAc;AACnC,aAAO;;AAGT,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,wBAAmB,MAAM,UACrB,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,SAAS,EAAE,QAAQ;AAGvB,QAAI,2BAAyB,EAAE,UAAU;AACvC;AACA,aAAO;;AAGT,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,QAAI,mBAAmB;AACrB;AACA,aAAO;;AAEP,YAAM,IAAI,MACN,0DACiB,EAAE,SAAS;;;AAIpC,SAAO,CAAC,YAAY,aAAa,QAAQ,WAAA,YAAW,YAAA;;ACvFtD;;;;;;;;;;;;;;;;AAqBA,8BAA8B;AAEvB,kBACH,yBAAyB,KAAK;ACxBlC;;;;;;;;;;;;;;;;AAuBA;AAIA;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,cAAY,SAAQ,WAAW,OAAO,GAAG,OAAO,OAAO,GAAG;AAG1D,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,mBAAiB,OAAO,IAAI,OAAK,SAAQ,UAAU,IAAI,EAAE,QAAQ;AACjE,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAC9D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,eAAe,SAAS,QAAQ,SAAS,IAAI,QAAQ;AAE9D,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb;EACA,YAAY;;ACzDd;;;;;;;;;;;;;;;;AAsBM;AAEJ,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;ACnCd;;;;;;;;;;;;;;;;AAwBA;AAIA;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAiB;IAC7D;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAIJ,SAAO,QAAQ,mBAAS,SAAS;AAGjC,+BAA6B,kBAAkB,OAAO,EAAE,OAAO,MAAM;AAErE,mBAAiB;AACjB,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,OAAO;AACd,mBAAa;;;AAGjB,mBAAiB,iBAAgB,OAAO,EAAE,OAAO,MAAM;AACvD,YAAU;IACR,QAAQ,OAAO,EAAE;IACjB,OAAO;IACP,OAAO,OAAO,EAAE;;AAGlB,MAAI;AACF,mBAAe,SAAS,CAAC,QAAQ,SAAA;AACjC,WAAO,QAAQ;AACf,WAAO;;AAGT,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,gBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,OAAO,WAC5D,KAAK;AACT,SAAO;;AAGT;AACE,mBAAiB,IAAI,MAAM,QAAQ;AACnC,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,QAAQ,KAAK;;AAE7B,SAAO;;AAGT;AAEE,mBAA2B;AAC3B,kBAA0B;AAC1B,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;;AAEtB,QAAI,MAAM,KAAK,QAAQ;AACrB,cAAQ,KAAK,KAAK;;;AAGtB,eAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,oBAAgB;AAChB,iBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,UAAI,QAAQ,MAAM,KACb,eAAc,MAAM,QAAQ,aAAa,QAAQ;AACpD,oBAAY;;;AAGhB,YAAQ,aAAa;;AAEvB,SAAO,CAAC,UAAU;;AAGb,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACrHb;;;;;;;;;;;;;;;;AA6BM;AAOJ,iBAAe,EAAE;AACjB,gBAAc,EAAE,MAAM;AAEtB,uBAAqB,aAAK,eAAe,MAAM;AAC/C,aAAW;AACX,uBAAqB,qBAAa,mBAAmB,MAAM;AAC3D,oBAAkB;AAClB,2BAAyB;AACzB,MAAI,gBAAgB;AAClB,qBAA2B,IAAI,MAAM;AACrC,iBAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,eAAS,KAAK,OAAO,aAAa;;AAGpC,WAAO,qBAAa,iBAAiB,KAAK,QAAQ;AAClD,kBACI,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,eAAe,SAAA;AAEzD,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,yBAAqB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAC/D,QAAI,iBAAiB;AACnB,2BAAqB;;;AAIzB,SAAO,CAAC,YAAY,aAAa,cAAc,MAAM;;AC7DvD;;;;;;;;;;;;;;;;AAwBA;AAIA;AACE,cAAW,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACrD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ;AACf,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,sBACrB,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,mBAAiB,MAAM,MAAM,MAAM,GAAG;AACtC,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,aAAK,cAAc,IAAI;AACzC,oBAAkB,MAAM,MAAM,KAAK;AACnC,YAAS,SAAS,SAAS,MAAM,QAAQ,WAAW,WAAW;AAE/D,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;AC/Eb;;;;;;;;;;;;;;;;AAqBA;AAMA;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,mBAAiB,SAAS;AAE1B,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,MAAI,SAAS,kBAAkB,KAAK,SAAS,mBAAmB;AAC9D,UAAM,IAAI,MACN,0EACQ,SAAS,mBAAmB,SAAS;;AAGnD,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,cAAc,aAAa,UACjE;AACJ,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Fd;;;;;;;;;;;;;;;;AAqBM;AAKJ,SAAO,QAAQ,SAAS;AACxB,SAAO,KAAK;AACZ,SAAO,SAAS;AAEhB,gBAAc,aAAK,cAAc,EAAE;AACnC,iBAAe,aAAK,uBAAuB,OAAO;AAElD,eAAK,OACD,UAAU,aAAK,cAAc,SAC7B,MAAM,cAAc,sBAAsB,EAAE;AAGhD,SAAO,CAAC,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAG7C,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;;AC5Cd;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,KAAK;AACf,SAAO,YAAY,cAAc;AAEjC,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,gBAAc,EAAE,MAAM;AACtB,gBAAc,EAAE,MAAM;AAEtB,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,qBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,qBAAmB,EAAE,MAAM,MAAM,GAAG;AAEpC,oBAAkB,aAAK,cAAc;AACrC,oBAAkB,aAAK,cAAc;AAErC,8BACI,cAAc,aAAa,cAAc,KAAK,cAAc;AAEhE,eAAK,OACD,SAAS,KAAK,SAAS,KAAK,qBAC5B,MAAM,uJAEsB,oBAAoB;AAEpD,4BACI,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AACpE,mBAAiB,kBAAkB,OAAO,CAAC,aAAa;AAExD,eAAK,OACD,gBAAgB,aAChB,MAAM,kCAAkC,qBACjC,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AAE3B,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AACvD,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AAGvD,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,SAAA,UAAS,OAAO,CAAC,OAAO;AAC7D,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,SAAA,UAAS,OAAO,CAAC,OAAO;AAE7D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,kBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,mBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,mBAAiB,KAAK,IAAI,WAAW;AAErC,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,IAAI;AAClE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAC7D,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAE7D,kBACI,OAAO,aAAa,IAAI,MAAM,QAAQ,OAAO,aAC7C,IAAI,MAAM,QAAQ,YAAY,YAAY;AAE9C,MAAI,QAAQ;AACZ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Hd;;;;;;;;;;;;;;;;AAsBM;AAGJ,SAAO,SAAS,IAAI,QAAQ,QAAQ,qBAAW;AAC/C,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;ACpCd;;;;;;;;;;;;;;;;AAqBA;AAEA;AACE,aAAW,SAAQ,KAAK,MAAM,aAAa,MAAiB;IAC1D;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,cAAc,gBAAgB;AACrC,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,KAAK,cAAc,cAAc;AAC1C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACnDd;;;;;;;;;;;;;;;;AAqBA;AAEE,SAAO,QAAQ,qBAAW;AAE1B,eAAa,aAAK,eAAe,KAAK,MAAM,MAAM,OAAO,GAAG,OAAO;AAEnE,mBAAiB,qBAAa,gBAAgB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAExE,cAAY,SAAQ,WAAW,UAAU,OAAO,GAAG;AAEnD,MAAI,aAAK,cAAc,cAAc;AACnC,WAAO;;AAIT,kBAAgB,OAAO,OAAO,OAAK,aAAK,cAAc,EAAE,SAAS;AACjE,MAAI,QAAQ,WAAW;AACrB,WAAO,QAAQ;;AAGjB,iBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,uBAAa,uBAAuB,QAAQ;AAE5C,mBAAiB,aAAK,cAAc,QAAQ,GAAG,MAAM,MAAM,GAAG;AAC9D,qBAAmB;AACnB,oBAAkB,QAAQ,IAAI;AAC5B,qBAAiB,aAAK,cAAc,MAAM,MAAM,MAAM;AACtD,oBAAgB;AAChB,WAAO;;AAET,iBAAe,QAAQ,IAAI,WAAS,SAAQ,mBAAmB;AAC/D,kBAAgB,SAAQ,mBAAmB;AAC3C,eAAa,GAAG,IAAI,UAAU;AAC5B,oBAAgB,IAAI;AACpB,iBAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,uBAAiB,UAAU;AAC3B,uBAAiB,IAAI;AACrB,mBAAa,OAAO,GAAG,SAAS,UAAU,WAAW;AACrD,cAAQ,IAAI,MAAM;AAClB,mBAAa;;;AAGjB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;ACrEd;;;;;;;;;;;;;;;;AAqBA;AAQA;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,iBAAiB,cAAc;AAC/D,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB,OAAO;AAEjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,oDACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,aACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACrGd;;;;;;;;;;;;;;;;AAqBA;AAUA;AACE,4BAA0B,SAAQ,KAAK,MAAM,qBAAqB,MAAM;IACtE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,IAAI,UAAU;AACrB,SAAO,SAAS,WAAK,YAAY,iBAAiB,cAAc;AAEhE,oBAAkB;AAElB,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBAC1B,YAAY,OAAO,OAA2C,SAC9D,WAAW,MAAK,iBAAiB,OAAuB;AAC5D,SACE,WACA,cACA,aACA,YACA,UACA,SACA,aACA,WACA,UACA,cACA,eACE;AAEJ,iBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,kBAAgB,cAAc,IAAI,SAAS,QAAQ;AAEnD,yBAAuB,SAAS,eAAe;AAC/C,oBAAkB,aAAK,eAAe,SAAS;AAC/C,oBAAkB,aAAK,eAAe,GAAG;AACzC,gCAA8B,aAAK,eAAe,OAAO;AACzD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AACtD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AAEtD,cAAY,SAAQ,WAAW,SAAS,SAAS;AACjD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eAAa,SAAQ,UAAU,IAAI,GAAG,QAAQ;AAC9C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,0BACI,MAAM,UAAU,WAAW,cAAc,aAAa,UAAU,SAChE,YAAY,WAAW,UAAU,aAAa,cAAc,aAC5D,QAAQ,SAAS,OAAO,OAAO,OAAO,cAAc,YACpD,YAAY,gBAAgB,cAAc,YAAY,YACtD,gBAAgB;AACpB,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Hd;;;;;;;;;;;;;;;;AAqBO,kBAAgC,wBAAwB;ACrB/D;;;;;;;;;;;;;;;;AAwBA;AAAA,AAAA;AACE,uBAAA,qBAAA,cAAA,KAAA;AACA,uBAAA,qBAAA,aAAA,KAAA;GAFG,uBAAA,uBAAmB;AAKxB;AAKA;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAe;IACnE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ,oBAAoB,YAAY;AAC/C,SAAO,eAAO,OAAO,UAAU;AAE/B,mBAAiB,MAAM,MAAM;AAE7B,kCAAgC;AAChC,mBAAiB,CAAC,UAAU,YAAY,WAAW,OAAM,MAAM;AAE/D,mBAAiB,SAAQ,UAAU,IAAI,OAAM;AAC7C;AACA,MAAI,OAAM,UAAU;AAClB,iBAAa,MAAK,CAAC,SAAA,UAAS,QAAQ,CAAC,GAAG,SAAQ,OAAO,CAAC,OAAO;AAC/D,iBAAa,SAAQ,UAAU,IAAI,WAAW;;AAGhD,mBAAiB,WAAW;AAC5B,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,2BAAyB,IAAI,WAAW,IAAI,WAAW,OAAM,OAAO;AAEpE,oBACI,UAAU,SAAS,UAAU,UAAU,kBAAkB,YACzD,WACA,oBAAoB,SACpB,oBAAoB;AAExB,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AChGd;;;;;;;;;;;;;;;;AAyBA;AAGA;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,MAAM,WAAW,qBAAW;AACnC,gBAAc,EAAE,MAAM;AAEtB,eAAK,OAAO,EAAE,UAAU,aAAa,EAAE,UAAU,SAC/C,MAAM,2BAA2B,EAAE;AAErC,sBAAoB,qBAAa,mBAAmB,CAAC,OAAO;AAC5D,kBAAgB;AAChB,MAAI,gBAAgB;AAClB,gBAAY,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,cAAc,SAAA;;AAElE,uBAAqB,qBAAa,iBAAiB,GAAG,OAAO;AAC7D,uBAAa,2BAA2B,UAAU,CAAC,eAAe;AAElE,sBAAoB,SAAQ,WAAW,UAAU,OAAO,UAAU;AAClE,mBAAiB,UAAU,MAAM;AACjC,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,wBAAsB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAChE,aAAW,aAAa,YAAY,IAAI,GAAG,WAAU,IAAI,GAAG,UACjD,eAAe,SAAS,EAAE;AAGrC,YAAU;AACV,MAAI,gBAAgB;AAClB,4BAAwB,qBAAa,uBAAuB;AAC5D,UAAM,WACJ,CAAC,QAAQ,CAAC,GAAG,cAAc,OAAO,CAAC,MAAM,kBAAkB,SAAA;AAC7D,aAAQ,YAAY,UAAU;AAC9B,aAAQ,YAAY,YAAY;;AAElC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACjFd;;;;;;;;;;;;;;;;AAqBA;AAKA;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,WAAW,cAAc;AAEhC,eAAK,OACD,YAAY,GACZ,MAAM,sDAAsD;AAEhE,oBAAkB,EAAE,MAAM;AAC1B,sBAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAElE,uBAAqB,cAAc;AACnC,sBAAoB,aAAa;AACjC,sBAAoB,aAAc,aAAY;AAE9C,sBAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAE3C,cAAY,SAAQ,WAAW,aAAa;AAE5C,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAClB,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAEhE,2BAAyB,IAAI,WAAW,IAAI,WAAW,aAAa;AACpE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,cAAc;AAEpE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,uBAAqB,eAAe,SAAS,IAAI;AACjD,mBACI,KAAK,WAAW,cAAc,eAAe,EAAE,MAAM,SAAS,GAC9D,kBAAkB,iBAAiB,YAAY,QAAQ;AAE3D,SAAO;;AAGF,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC1Fd;;;;;;;;;;;;;;;;AAqBA;AAQA;AACE,wBACI,SAAQ,KAAK,MAAM,uBAAuB,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,mBAAmB;AAEnD,qBAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAEhD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAClD,YAA0C,MAAK,iBAChD;AAEJ,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,mEACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,oCAAkD;EACvD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC5Gd;;;;;;;;;;;;;;;;AAqBA,+BAA8B;AACvB,kBACH,yBAAyB,KAAK;ACvBlC;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,oBACH,yBAAyB,OAAO,wBAAuB;ACtB3D;;;;;;;;;;;;;;;;AAoBO,kBAAgC,wBAAwB;ACpB/D;;;;;;;;;;;;;;;;AAsBA;AACE,SAAO,QAAQ,OAAO,OAAO,QAAQ,qBAAW;AAChD,cAAY,SAAQ,WAAW,OAAO;AACtC,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;ACjCd;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAiB;IACrE;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAEJ,SAAO,QAAQ,qBAAW;AAC1B,SAAO,iBAAS;AAEhB,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,oBACI,SAAS,OAAO,aAAa,YAAY,aAAa;AAC1D,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACxDb;;;;;;;;;;;;;;;;AAqBA,+BAA8B;AACvB,uBACH,yBAAyB,UAAU;ACvBvC;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,kBAAgB,SAAQ,KAAK,MACzB,gBAAgB,MAChB,CAAC,UAAU,UAAU,UAAU,UAAU,UAAU,UAAU;;AAGnE;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,mBAAmB;AAC1B,SAAO,GAAG,MAAM,UAAU,QAAQ,SAAS;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,iBAAe,SAAQ,UAAU,IAAI,KAAK,QAAQ;AAClD,qBAAmB,SAAQ,UAAU,IAAI,SAAS,QAAQ;AAC1D,mBAAiB,UAAU,OAAO,SAAQ,UAAU,IAAI,OAAO,QAAQ,KAAK;AAC5E,kBAAgB,SAAS,OAAO,SAAQ,UAAU,IAAI,MAAM,QAAQ,KAAK;AAEzE,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAE1C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBACI,KAAK,QAAQ,YAAY,UAAU,SAAS,iBAAiB;AACjE,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Dd;;;;;;;;;;;;;;;;AAuBA;AASA;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK;AAET,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,sDACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,2BAA2B,SAAS,wDACI;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,yDACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,kBACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Id;;;;;;;;;;;;;;;;AAuBA;AASA;AACE,6BACI,SAAQ,KAAK,MAAM,sBAAsB,MAAiB;IACxD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB;AAE1B,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,+DACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,oCAAoC,SAAS,wDACL;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,kEACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,2BACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,mCAAiD;EACtD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Id;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,iBAAe,SAAQ,KAAK,MAAM,UAAU,MAAe;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,UAAU;AAC1B,SAAO,QAAQ,WAAW;AAE1B,uDACI,uBAAY,mBAAmB,QAAkB;AAErD,cAAY,SAAQ,WAAW,aAAa,OAAO;AACnD,MAAI,cAAc;AAChB,WAAO;;AAGT,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAErD,gBAAc,SAAQ,UAAU,IAAI,OAAO;AAC3C,cAAY,MAAM;AAClB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eACI,KAAK,SAAS,OAAO,QAAQ,WAAW,WAAW,WAAW,WAC9D,cAAc;AAElB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC5Ed;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAe;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,GAAG,WAAW;AACrB,SAAO,QAAQ;AAEf,mBAAiB,EAAE,MAAM;AACzB,WAAS,QAAQ,aAAK,cAAc,QAAQ;AAC5C,sBAAoB,EAAE,MAAM,SAAS;AAErC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAElB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAChE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AAEjE,aACI,KAAK,SAAS,EAAE,QAAQ,eAAe,aAAa,WAAW,MAC/D,iBAAiB;AAGrB,qBAAmB,aAAK,eAAe,MAAM,EAAE,OAAO;AACtD,oBAAkB,qBAAa,aAAa,yBACxC,GAAa,SAAmB;AAEpC,MAAI,QAAQ,UAAU;AACtB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACvFd;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,sBACH,yBAAyB,SAAS,wBAAuB;ACtB7D;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,2BACH,yBAAyB,cAAc,wBAAuB;ACtBlE;;;;;;;;;;;;;;;;AAmBA,+BAA8B;AACvB,mBACH,yBAAyB,MAAM,wBAAuB;ACrB1D;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,wBACH,yBAAyB,WAAW,wBAAuB;ACtB/D;;;;;;;;;;;;;;;;AAmBO,kBAAgC,wBAAwB;ACnB/D;;;;;;;;;;;;;;;;AAmBA,+BAA8B;AACvB,yBACH,yBAAyB,YAAY,wBAAuB;ACrBhE;;;;;;;;;;;;;;;;AAwBA;AAEA;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,wBAAwB,YAAY;AAC3C,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,YAAQ;AACR,cAAU;;AAGZ,oBAAkB,MAAM,MAAM;AAC9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Ed;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,sBACH,yBAAyB,SAAS;ACrBtC;;;;;;;;;;;;;;;;AAqBA;AAOA;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAEhC,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,gBAAgB,eACtD,cAAc,aAAa,eAAe,gBAAgB;AAC9D,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Fd;;;;;;;;;;;;;;;;AAuBA;AAEA;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,oBAAkB,MAAM,MAAM;AAE9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AClFd;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,sBACH,yBAAyB,SAAS;ACrBtC;;;;;;;;;;;;;;;;AAqBA,gCAA8B;AACvB,uBACH,yBAAyB,UAAU;ACvBvC;;;;;;;;;;;;;;;;AAmBO,qBAAmC,wBAAwB;ACnBlE;;;;;;;;;;;;;;;;AA8BM;AAEJ,iBAAe,IAAI,WAAW,SAAQ,KAAK,OAAO,QAAQ,WAAW;AACrE,2BAAyB,OAAO;AAChC,uBAAqB,OAAO;AAC5B,0BAAwB,OAAO;AAC/B,wBAAsB,OAAO;AAE7B,WAAQ,KAAK,MAAM;AACnB,SAAO,CAAC,kBAAkB,cAAc,iBAAiB;;ACvC3D;;;;;;;;;;;;;;;;AAuBA;AAIA;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,kBAAkB;AACtD,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBACI,UAAS,SAAS,UAAU,eAAe,cAAc;AAE7D,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AACnB,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX;;ACxEF;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,sBAChD;AACJ,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,6BAA2B,SAAQ,WAAW,IAAI,SAAS;AAE3D,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Ed;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,gBAAgB;AACpE,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAI/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAChD,+BACI,SAAQ,WAAW,CAAC,eAAe,WAAW;AAElD,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Ed;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,uBACH,yBAAyB,UAAU,yBAAuB;ACrB9D;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,WAAW;AAClB,SAAO,OAAO,SAAS,YAAY;AAEnC,cAAY,SAAQ,WAAW,CAAC,GAAG,QAAQ,OAAO,QAAQ;AAC1D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,aAAW,WAAW,OAAO,SAAS,UAAU;AAEhD,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACxDd;;;;;;;;;;;;;;;;AAqBA;AACE,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;AChCd;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,SAAS,IAAI,mBAAS,QAAQ,UAAU,kBAAkB;AAEjE,mBAAiB,SAAS,IACtB,UAAU,EAAE,KAAqB,EAAE,MAAM,KAAK,EAAE;AACpD,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,0BAAwB,SAAS,IAAI,cAAY,SAAS;AAC1D,2BAAyB,SAAS,IAAI,cAAY,SAAS;AAC3D,2BACI,IAAI,WAAW,IAAI,WAAW,iBAAiB;AACnD,4BACI,IAAI,WAAW,IAAI,WAAW,kBAAkB;AAEpD,YACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,kBACrD,mBAAmB,eAAe;AACtC,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACrEb;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,kBACH,yBAAyB,KAAK;ACrBlC;;;;;;;;;;;;;;;;AAqBA;AAEA;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,GAAG,SAAS;AACnB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,oBAAkB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AAEtD,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAU,KAAK,WAAW;AAC1B,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,mBAAiC,wBAAwB;ACnBhE;;;;;;;;;;;;;;;;AAmBO,oBAAkC,wBAAwB;ACnBjE;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,uBAAqB,SAAQ,KAAK,MAAM,gBAAgB,MAAe;IACrE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AAEjC,SAAO,UAAU;AACjB,SAAO,cAAc,QAAQ;AAC7B,gCAA8B;AAE9B,oDAAkD,OAAO;AACzD,mBAAiB,CAAC,OAAO,WAAW,UAAU;AAE9C,cAAY,SAAQ,UAAU,IAAI,OAAO;AACzC;AACA,MAAI,MAAM,UAAU;AAClB,iBACI,MAAK,CAAC,SAAA,UAAS,QAAQ,CAAC,GAAG,SAAS,OAAO,CAAC,OAAO;AACvD,YAAQ,SAAQ,UAAU,IAAI,WAAW;;AAE3C,cAAY,MAAM;AAElB,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,aAAK,cAAc,OAAO,WAAW;AACvC,WAAO;;AAET,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,OAAO,WAAW,UAAU,aAAa,WAAW,UACzD,eAAe,IAAI,GAAG;AAE1B,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACtFd;;;;;;;;;;;;;;;;AAwBA;AAIA;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAM;IAC9C;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,QAAQ;AAEf,eAAa,aAAK,eAAe,MAAM,EAAE;AAEzC,MAAI,EAAE,MAAM,WAAW;AACrB,WAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,SAAA;;AAGhC,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,wBAAsB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE7D,cACI,KAAK,WAAW,KAAK,QAAQ,eAAe,EAAE,MAAM,QAAQ;AAEhE,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,QAAQ,SAAA;;AAGtD,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACrEb;;;;;;;;;;;;;;;;AAsBA;AAKA;AACE,eAAa,SAAQ,KAAK,MAAM,kBAAkB,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,iBAAS;AAChB,SAAO,SAAS,WAAW,UAAU;AAErC,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,6BACI,qBAAa,eAAe,QAAQ,aAAa;AAErD,sBAAoB,cAAc;AAClC,2BAAyB;AAEzB,qBAAmB,OAAO,cAAc,WACpC,CAAC,WAAW,WAAW,WAAW,cAAc,IAAI,oBACpD,CAAC,GAAG,WAAW;AACnB,oBAAkB,IAAI,WAAW,IAAI,WAAW,YAAY;AAE5D,aACI,SAAS,OAAO,aAAa,YAAY,aAAa,SAAS,SAC/D,SAAS,WAAW,WAAW,QAAQ;AAC3C,SAAO;;AAGF,+BAA6C;EAClD,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;AC/Eb;;;;;;;;;;;;;;;;AAmBO,oBAAkC,wBAAwB;ACnBjE;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAe;IAC3D;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAIE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,SAAS,WAAW;AAC3B,SAAO,SAAS;AAEhB,cAAY,SAAQ,WAAW,OAAO,QAAQ;AAC9C,MAAI,aAAK,cAAc,WAAW;AAChC,WAAO;;AAGT,SAAO,WAAW,YAAY,WAAW,SAAS,cAC9C,wBAAa,gBAAgB,SAAS,SAAS;AAEnD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBACI,WAAW,WAAW,SAAS,QAAQ,QAAQ,WAAW,YAC1D,WAAW,cAAc,YAAY;AAEzC,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Ed;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAM;IAC9C;IACA;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,WAAW,GAAG,KAAK;AAE1B,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBAAc,UAAU,MAAM;AAC9B,gBAAc,EAAE,MAAM;AAEtB,iBAAe,UAAU,KAAK,QAAQ,KAAK,UAAU,IACjD,IACA,aAAK,cAAc,EAAE,MAAM,MAAM;AAErC,aAAW,aAAa,KAAK,KAAK,QAAQ;AAC1C,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;AC5Db;;;;;;;;;;;;;;;;AAqBA;AAEA;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB,CAAC,UAAU;;AAGrE;AAEE,SAAO,mBAAS,SAAS,MAAM;AAC/B,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK;AACd,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,kBAAgC,wBAAwB;ACnB/D;;;;;;;;;;;;;;;;AAsBM;AAEJ,SAAO,SAAS,IAAI,QAAQ,OAAO,OAAO,qBAAW;AAErD,0BAAwB,mBAAW,iBAAiB,GAAG,OAAO;AAE9D,sBAAoB,mBAAW,iBAAiB,EAAE,OAAO,QAAQ;AACjE,gBAAc,SAAQ,mBAAmB;AACzC,cAAY,SAAQ,WAAW,OAAO,EAAE;AACxC,kBAAgB,SAAQ,mBAAmB;AAC3C,mBAAiB,aAAK,eAAe,EAAE;AACvC,MAAI;AACF,uBAAmB,mBAAW,kBAAkB,QAAQ;AACxD,YAAQ,IACJ,MAAM,SAAS,YAAY,aAAa,aAAK,cAAc;AAC/D,WAAO;;AAET,eAAa,EAAE,MAAM;AACrB,MAAI,SAAS;AACX,YACI,OAAO,SAAS,IAAI,SAAS,QAC7B;aACK,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SACjC,QAAoC;aAC/B,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SAAS,IAAI,SAC9C,QACA;;AAEJ,qBAAiB,OAAO,GAAG,SAAS,QAAQ;;AAE9C,SAAO;;AAGT;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,oBAAgB,IAAI,UAAU;AAC9B,YAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,iBAAa,KAAK;;;AAItB;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,sBAAgB,IAAI,WAAW,IAAI,WAAW;AAC9C,cAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,mBAAa,KAAK;;;;AAKxB;AAKE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,iBAAe,MAAM;AAErB,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,mBAAa,QAAQ,IAAI,MAAM;AAC7B,wBAAgB,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW;AAC7D,gBAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,qBAAa,KAAK;;;;;AAM1B;AAGE,iBAAe,OAAO,MAAM,MAAM,OAAO;AACzC,eAAa,OAAO,MAAM,OAAO,MAAM,OAAO;AAC9C,eAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AACjC,gBAAY,OAAO,WAAW;AAC9B,iBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,YAAQ,KAAK,KAAK,IAAI,GAAG;;;AAItB,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;;ACrId;;;;;;;;;;;;;;;;AAqBA;AAGA;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACtD;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,SAAS,SAAS,QAAQ,QAAQ;AAClD,cAAY,SAAQ,UAAU,IAAI,OAAO,QAAQ;AACjD,cAAY,SAAQ,WAAW,OAAO,OAAO,OAAO;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,mBAAiB,OAAO,MAAM;AAC9B,gBAAc,aAAK,cAAc,OAAO,SAAS;AAGjD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK,OAAO,UAAU;AAC/B,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACzDd;;;;;;;;;;;;;;;;AAwBM;AAEJ,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,KAAK;AACZ,SAAO,iBAAiB,QAAQ;AAEhC,gBAAc,aAAK,eAAe,MAAM,EAAE,OAAO;AAEjD,qBAAmB,qBAAa,iBAAiB,GAAG,iBAAiB;AACrE,gBAAc,IAAI,MAAM,EAAE,MAAM,QAAQ,KAAK;AAC7C,eAAa,EAAE,MAAM;AACrB,SAAO,WAAW,IAAI;AACpB,uBAAmB,CAAC,GAAG;AACvB,eAAW,SAAS;AACpB,mBACI,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,MAAM,aAAa,SAAA;AAC1D,UAAM,UAAU;AAChB,WAAO;;;AAIJ,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;AChDd;;;;;;;;;;;;;;;;AAoBO,mBAAiC,wBAAwB;ACpBhE;;;;;;;;;;;;;;;;AAmBO,qBAAmC,wBAAwB;ACnBlE;;;;;;;;;;;;;;;;AAkBA,gCAA8B;AACvB,gCACH,yBAAyB,mBAAmB;ACpBhD;;;;;;;;;;;;;;;;AAuBA;AAMA;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AAEZ,OAAK,OAAO,KAAK,WAAW;AAC5B,MAAI,WAAW;AACb,cAAU,IAAI,MAAM,MAAM;;AAG5B,SAAO,WAAW,SAAS,cAAc,aAAa,kBAAkB;AAExE,uBAAqB,qBAAa,WAAW,WAAW;AACxD,MAAI,aAAa,SAAS;AACxB,UAAM,IAAI,MAAM;;AAGlB,MAAI,iBAAiB,KAAK,gBAAgB;AACxC,UAAM,IAAI,MACN;;AAGN,MAAI,iBAAiB,KAAK,mBAAmB;AAC3C,UAAM,IAAI,MACN;;AAGN,8BAA4B,EAAE,MAAM,SAAS,MAAM;AAGnD,qBAAmB,qBAAa,WAAW,WAAW;AACtD,mBAAiB,EAAE,MAAM;AACzB,aAAW,QAAQ;AACjB,UAAM,QAAQ;AACd,QAAI,QAAQ;AACZ,aAAS,OAAO,MAAM,GAAG;;AAG3B,oBAAkB,SAAQ,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,WAAW,SAAA;AAElE,SACE,wBACA,oBACA,8BAEE,qBAAa,WAAW,kBACpB,UAAU,OAAO,cAAc,qBAAqB,OAAO,KAC3D,SAAS,WAAW,SAAS;AACrC,UAAQ;AACR,QAAM;AACN,YAAU;AAEV,qBAAmB,qBAAa,WAAW,WAAW;AAEtD,aAAW,QAAQ;AACjB,QAAI,QAAQ,MAAM,QAAQ;AAC1B,YAAQ,QAAQ;;AAIlB,eAAa,qBAAa,WAAW,gBAAgB,OAAO,KAAK;AAEjE,mBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AAEvE,qBAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,MAAI;AACF,oBAAgB,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,OAAO,SAAA;AAC1D,WAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,UAAU,OAAO,CAAC,OAAO,WAAW,SAAA;;AAGlE,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,CAAC,SAAS,KAAK,UAAQ,SAAS;AAClC,gBAAY,SAAQ,UAAU,IAAI,UAAU,QAAQ;AACpD,0BAAsB,IAAI,WACtB,IAAI,WAAW,aAAK,eAAe,UAAU,QAAQ;AACzD,uBAAmB,IAAI,WAAW,IAAI,WAAW,OAAO;AACxD,qBAAiB,IAAI,WAAW,IAAI,WAAW,KAAK;AACpD,yBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,6BAAyB,IAAI,WAAW,IAAI,WAAW,UAAU;AACjE,4BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AACjE,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,eAAe,UAAU,MAAM,QAAQ,YAAY,UACxD,cAAc,kBAAkB,iBAAiB,SAAS,QAC1D;;AAGN,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,WAAW,SAAA;;AAGvD,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AChJd;;;;;;;;;;;;;;;;AAkBA,gCAA8B;AACvB,kBACH,yBAAyB,KAAK;ACpBlC;;;;;;;;;;;;;;;;AAuBA;AAEA;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,sBAAoB;AACpB,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;AACV,sBAAgB,qBAAa,iBACzB,cAAc,QAAQ,MAAM,MAAM;;;AAI1C,uBAAa,2BACT,OAAO,eAAe,MAAM,MAAM;AACtC,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACpFd;;;;;;;;;;;;;;;;AAmBO,mBAAiC,wBAAwB;ACnBhE;;;;;;;;;;;;;;;;AAuBA;AAIA;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,SAAO,QAAQ;AAEf,mBAA2B,IAAI,MAAM,EAAE,MAAM;AAC7C,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,EAAE,MAAM,KAAK,KAAK;;AAElC,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAE9D,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WACI,KAAK,aAAa,EAAE,MAAM,QAAQ,eAAe,SAAS,QAC1D,SAAS,IAAI,QAAQ;AACzB,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AChEd;;;;;;;;;;;;;;;;AAuBA;AAGE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,SAAS;AAChB,SAAO,QAAQ;AACf,qBAAmB,MAAM,MAAM;AAC/B,eAAa,MAAM,MAAM;AACzB,mBAA2B,IAAI,MAAM,OAAO;AAC5C,iBAAe;AACf,eAAa,GAAG,IAAI,MAAM;AACxB,QAAI,MAAM;AACR,eAAS,cAAc,MAAM,MAAM;;;AAGvC,eAA2B,IAAI,MAAM;AACrC,gBAAc,IAAI,MAAM,MAAM,KAAK;AACnC,eAAa,MAAM,MAAM;AACzB,OAAK,QAAQ;AACb,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,UAAM,QAAQ;AACd,SAAK,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,QAAQ,OAAO,CAAC,OAAO,OAAO,SAAA;;AAE7D,SAAO,KAAK,IAAI,EAAE,QAAQ,WAAY,EAAC,QAAQ,OAAO,OAAO;;AAGxD,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;ACpDd;;;;;;;;;;;;;;;;AAqBA;AACE,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;;AChCd;;;;;;;;;;;;;;;;AAmGA,sBAAsC;EACpC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;AAGF,2BAA2B;AACzB,iBAAe;;ACnLjB;;;;;;;;;;;;;;;;AAmBA,aAAY;AAMZ,KAAI,aAIA,yBAAyB,YAAY,YAAY,SAAS,IAAI,WAAW;EACvE;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;EAAG;EACpD;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;;AAOvD,KAAI,aAAa,gCAAgC;AAG/C,MAAI,KAAI,IAAI;AACV,WAAO;;AAGT;AAGE,QAAI,iBAAiB,MAAM,YAAY,IAAI,kBAAkB;AAG7D,WAAO,YAAY,SAAS,IAAI,WAAW;MACzC;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;MAAG;MACnE;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;;;AAGlE,WAAO;;;ACnCX,wCAAoC,WAAA;ACrB7B,2BAA2B;ACwBlC,0BAAwB,WAAA;AAxBxB;;;;;;;;;;;;;;;;AA0BA,sBAAsB;0BAaW;EAK/B;AACE;AADiB,SAAA,OAAA;AAHX,SAAA,mBAAmB;AAKzB,SAAK,KAAK,KAAK;AACf,SAAK,YAAY,IAAI,YAAY,MAAM;;EAGzC;AAEE,mBAAe;AACf,SAAK,KAAK,QAAQ,QAAQ,OAAO;AACjC,WAAO;;EAGT;AACE,WAAO,KAAK,UAAU;;QAGlB;AACJ,kBAAc,aAAK;AACnB;AACA,qBAAiB,aAAK,QAAQ;AAC9B,WAAO,CAAC;;EAGV;AAGE,eAAW,KAAK;AAChB,QAAI,UAAU;AACZ,0BAAoB;AACpB,WAAK,UAAU,IACX,QAAQ,CAAC,IAAI,aAAa,OAAO,OAAO,cAAc;AAC1D;;AAGF,iBAAa,aAAK,cAAc;AAChC,qBAAiB,OAAO,aAAK,gBAAgB;AAC7C,yBAAqB,KAAK,KAAK,QAAQ;AAEvC,SAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AAErD,SAAK,KAAK,KAAK,eAAe,IAAI,MAAM;AAExC,QAAI,UAAU;AACZ,WAAK,KAAK,OAAO,IACb,IAAI,WACC,OAAmC,QACnC,OAAmC,YAAY,WACpD;;;QAIF;AACJ,WAAO,KAAK,SAAS;;EAGvB;AACE,WAAO,cAAc,OAAO,OAAO,eAC/B,KAAK,UAAU,IAAI;AACvB,QAAI,UAAU;AACZ,aAAO;;AAET,kBAAc,KAAK,KAAK,OAAO,MAC3B,cACA,eAAe,aAAK,cAAc,SAAS,aAAK,gBAAgB;AACpE,WAAO,qBAAqB,MAAM,QAAQ;;EAG5C;AACE,kBAAa,KAAK,UAAU,IAAI;AAChC,SAAK,KAAK,MAAM,MAAK;AACrB,SAAK,KAAK,KAAK,YAAY,MAAK;AAChC,SAAK,UAAU,OAAO;;EAGxB;AACE,WAAO;;EAKT;AACE,WAAO,KAAK,UAAU,IAAI,QAAQ;;EAGpC;AACE,SAAK,KAAK,KAAK;AACf,SAAK,OAAO;;EAGd;AACE,WAAO,CAAC,YAAY;;EAStB;AAEE;AACA,QAAI,gBAAgB;AAClB,eAAS,KAAK,MAAM,MAAmB,OAAO;;AAE9C,eAAS;AACT,iBAAW,KAAK;AAChB,WAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AACrD,mBAAa,aAAK,cAAc;AAChC,WAAK,KAAK,KAAK,eAAe,IAAI,MAAM;;AAE1C,WAAO,CAAC,QAAQ,OAAO;;EAGzB,oBAAoB,OAAO,OAAO;AAEhC,oBAAe,KAAK,KAAK,OAAO;AAChC,WAAO,gBAAgB,KAAK,UAAU,IAAI;AAC1C,iBAAa,aAAK,cAAc;AAChC,YAAQ;WACD;AACH,eAAO,IAAI,aAAa,SAAQ,cAAc;WAC3C;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;WACzC;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;;AAE5C,cAAM,IAAI,MAAM,iBAAiB;;;;AAKzC,gBAAgB,QAAQ;AACtB,SAAO,QAAQ,MAAM;AACrB,SAAO,IAAI,YAAY;GACtB;AAEH;AAEE,SAAO;AACL,iBAAK,MAAM,MAAM,CAAC,aAAa,gBAAgB,KAAK;AAClD,UAAI,CAAC,SAAS;AACZ,gBAAQ,IAAI,EAAE,uCAAuC;;AAEvD,eAAS,cAAc,KAAK;AAC1B,oBAAY,YAAY,QAAQ,SAAS,KAAK;AAC5C,mBAAS,OAAO;;;;AAItB,WAAO;;;AAUX;AAGE,MAAI,YAAY;AAGd,WAAO;;AAGT,aAA2B;AAC3B,MAAI,iBAAiB;AACnB,WAAO;aACE;AACT,WAAO;;AAGT,MAAI,eAAe;AACjB,QAAI,YAAY,SAAS;AACvB,aAAO,YAAY;;;AAIvB,SAAO,mBAAmB;;AAU5B;AACE,4CAA0C,MAAM,QAAQ,IAAI;IAC1D,MAAM,SAAS;IACf,MAAM,SAAS;;AAGjB,SAAO,IAAI,QAAQ;AACjB,0BAAyC;AAOzC,kBAAc,aAAa;AACzB,UAAI,KAAK,SAAS;AAChB,yBAAiB;AACjB,qBAAa,IAAI,KAAK,CAAC,WAAW,CAAC,MAAM;AACzC,eAAO,IAAI,gBAAgB;;AAG7B,UAAI,KAAK,SAAS;AAChB,eAAO,oBACH,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEhD,aAAO,SAAS;;AAMlB,QAAI;AACF,oBAAc,kBACV,0BAA0B,oBACtB,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEpD;AAEA,QAAI,oBAAoB,iBAAiB,YAAY;AACnD,aAAO,gCAAA,QAAwB;AAC/B,WAAK,sBAAsB,IAAI,KAC3B,CAAC,qEACA,gCAAA,QAAwB,aACzB,CAAC,MAAM;;AAGX,aAAO,kBAAA,QAAY;;AAGrB,2BAA+B;AAE/B,SAAK,OAAO;MACV,MAAM,KAAK,MAAM,QAAQ,MAAM;MAC/B,gBAAgB,KAAK,MACjB,mBAAmB,MACnB;QACE;QACA;QACA;;MAEN,aAAa,KAAK,MAAM,gBAAgB,gBAAgB,CAAC;MACzD,SAAS,KAAK,MAAM,WAAW,gBAAgB;;AAEjD,sBAAkB;AAClB,SAAK,uBAAuB;AAC1B,oBAAc;AACd,oBAAc;AACd,cAAQ,CAAC;;AAEX,SAAK,UAAU;AACb,UAAI;AAEF;;AAEF,UAAI;AAGF;;AAEF,oBAAc;AACd,wBACI;AAEJ,aAAO,CAAC,SAAS;;;;AAKvB;AAEE,UAAQ;SACD;AACH,aAAO,IAAI,aAAa;SACrB;AACH,aAAO,IAAI,WAAW;SACnB;AACH,aAAO,IAAI,WAAW;;AAEtB,YAAM,IAAI,MAAM,iBAAiB;;;AAIvC,wBAAwB;EACtB;EAA0B;EAC1B;;AAIF,eAAuB;AACvB,qBAA6B;AAC7B,kBAAsD;AACtD,kBAAkB;AAClB,kBAAkB;ACoDZ,0DAEiB;AACrB,MAAI;AACF,UAAM,IAAI,MACN;;AAKN,MAAI,OAAO,oBAAoB;AAC7B,qBAAiB;;AAEjB,kBAAc;AACd,yBACI,gBAAgB,OAAO,UAAQ,YAAY,SAAS;AACxD,QAAI,aAAa,SAAS;AACxB,YAAM,IAAI,MACN,2DACG,aAAa,KAAK;;;AAM7B,gBAAc;;AC5ahB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACaA,uBAA0B,GAAA;ACb1B,4BAAe;EACb;EAAK;EAAI;EAAK;EAAI;EAAG;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAC1E;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAI;EACzE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAI;EAC1E;EAAK;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EACxE;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EACpE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAG;EACpE;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EACzE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EACrE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACtE;EAAI;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EACxE;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EACxE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACvE;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EACxE;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EACvE;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EACrE;EAAI;EAAI;EAAI;EAAI;EAAI;EAAG;EAAG;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAG;EACrE;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EACxE;EAAG;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAG;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAG;EAAK;EAAK;EAAI;EACvE;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAC1E;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EACzE;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EACtE;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EACxE;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACrE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAG;EAAG;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACzE;EAAK;EAAK;EAAK;EAAI;EAAI;EAAG;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACzE;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAI;EAAI;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAG;EACvE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAG;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACzE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EACxE;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAG;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACrE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EACrE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAG;EAAK;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACvE;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAI;EAAI;EAAI;EAAI;EAAG;EAAI;EAAG;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACpE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAG;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAI;EAAI;EAAG;EAAK;EACrE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACvE;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EACxE;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EACvE;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACxE;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACzE;EAAI;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAI;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAI;EAAK;EAAK;EAAG;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAI;EAAI;EAAI;EAAG;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EACrE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACtE;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EACrE;EAAK;EAAG;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAC1E;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EACvE;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACzE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACvE;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAC1E;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACrE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACpE;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EACtE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAG;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAG;EAAI;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EACrE;EAAK;EAAI;EAAK;EAAG;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAG;EAAK;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;;ACvKnE,iBAA0B,WAAA;AAC1B,YAAqB,WAAA;AACrB,eAAwB,WAAA;AACxB,gBAAyB,WAAA;AACzB,gBAAyB,WAAA;ACLzB;;;;;;;;;;;;;;;;AAkBA;AACE,SAAO;IACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;IAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;;;AAG9C;AACE,SAAO;IACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;IAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;;;AAGhE;AACE,YAAU,OAAM,MAAM;AACtB,YAAU,OAAM,MAAM;AACtB,gBAAc,CAAC;IACb,IAAI,WAAW,KAAK;IACpB,IAAI,WAAW,KAAK;IACpB,IAAI,SAAS,KAAK;IAClB,IAAI,SAAS,KAAK;;AAEpB,SAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;;AAEnD;AACE,qBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,mBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,wBAAsB,IAAI,cAAc,IAAI;AAC1C,wBAAoB,CAAC,MAAM,KAAK,OAAO,IAAI,MAAM,KAAK,OAAO;AAC7D,WAAO;;AAET,SAAO,CAAE,YAAY,UAAU,eAAe,YAAY,IAAI;;AAEhE,kCAAkC;AAChC,iBAAe,aAAa;AAC5B,eAAa,WAAW;AACxB,sBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,qBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,mBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;;AAEpD;AACE,kBAAgB,aAAa;AAC7B,eAAa,WAAW;AACxB,kBAAgB,KAAK,IAAI,GAAG;AAC5B,mBAAiB,UAAU;AAC3B,qBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,mBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;;AAEpD;AACE,kBAAgB;IACd,IAAI,SAAS,KAAK,IAAI,WAAW;IACjC,IAAI,SAAS,KAAK,IAAI,WAAW;;AAEnC,sBAAoB,CAAC,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,YAAY;AAC3E,qBAAmB,CAAC,IAAI,WAAW,KAAK,YAAY,IAAI,IAAI,WAAW,KAAK,YAAY;AACxF,mBAAiB,CAAC,IAAI,SAAS,KAAK,YAAY,IAAI,IAAI,SAAS,KAAK,YAAY;AAClF,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;;AC3EpD;;;;;;;;;;;;;;;;AAgBA;AACE,SAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;;AAExE;AACE,kBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,SAAO,iBAAiB;;AAE1B,+BAA+B,UAAU,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AACvE;AACE,gBAAc;AACd,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,eAAW,GAAG,KAAK,GAAG;;AAExB,SAAO;;AAET;AACE,iBAAe;AACf,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,WAAO,KAAK,IAAI,GAAG;;AAErB,SAAO;;AAET;AACE,kBAAgB;AAChB,eAAa,KAAK;AAClB,iBAAe,GAAG,MAAM,MAAM;AAC5B,YAAQ,KAAK;AACb,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK,KAAK,IAAI,KAAK,MAAM,mBAAmB,MAAM;;;AAG9D,SAAO;;AAET;AACE,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,yBAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,4BAA0B,uBAAuB,OAAO,IAAI,OAAO;AACnE,mCAAiC,0BAA0B,mBAAmB;AAC9E,oCAAkC,uBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,SAAO,0BAA0B,0BAA0B;;AAE7D;AACE,4BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,+BAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,8BAA4B;IAC1B,CAAC,IAAI,kBAAkB,IAAI;IAC3B,CAAC,IAAI,kBAAkB,IAAI;;AAE7B,SAAO;IACL,kBAAkB,GAAG,OAAO,oBAAoB;IAChD,kBAAkB,GAAG,OAAO,oBAAoB;IAChD,CAAC,GAAG,GAAG;;;AAGX;AACE,SAAO;IACL,IAAI,uBAAuB,eAAe;IAC1C,IAAI,uBAAuB,eAAe;;;ACpE9C,iBAA0B,WAAA;AAC1B,gBAAyB,WAAA;AACzB,cAAuB,WAAA;AACvB,gBAAyB,WAAA;ACNzB,qBAAe;EACb,SAAS;EACT,UAAU;EAEV,SAAS;EACT,OAAO;EAIP,SAAS;EAIT,YAAY;EAKZ,QAAQ;EAIR,gBAAgB;EAKhB,QAAQ;IACN,SAAS;IACT,OAAO;IACP,QAAQ;IAIR,QAAQ;IACR,YAAY;IACZ,UAAU;IACV,WAAW;IACX,MAAM;IACN,YAAY;IACZ,KAAK;IACL,UAAU;IACV,OAAO;IACP,SAAS;IACT,YAAY;IACZ,aAAa;IACb,UAAU;IACV,UAAU;;EAGZ,SAAS;IACP,SAAS;;EAGX,MAAM;IACJ,SAAS;IAIT,UAAU;MACR,WAAW;MAIX,WAAW;MACX,UAAU;MAEV,YAAY;MAKZ,eAAe;MACf,cAAc;MAEd,gBAAgB;;IAKlB,MAAM;MACJ,SAAS;MACT,WAAW;MACX,WAAW;;IAGb,MAAM;MACJ,SAAS;MACT,WAAW;MACX,WAAW;;IAGb,KAAK;MACH,SAAS;MACT,WAAW;MAEX,WAAW;MACX,YAAY;;IAId,QAAQ;MACN,SAAS;MACT,eAAe;MACf,WAAW;MACX,WAAW;MACX,YAAY;;IAId,SAAS;MACP,SAAS;MACT,WAAW;MACX,eAAe;MACf,YAAY;MACZ,WAAW;;;EAIf,MAAM;IACJ,SAAS;IACT,WAAW;IACX,WAAW;IACX,eAAe;IAEf,gBAAgB;IAEhB,WAAW;;EAGb,MAAM;IACJ,SAAS;IACT,WAAW;IACX,YAAY;IAKZ,eAAe;IACf,cAAc;IAEd,gBAAgB;IAEhB,UAAU;IAEV,WAAW;IACX,UAAU;MACR,WAAW;;IAEb,UAAU;MACR,WAAW;;;;;AC3IjB,0BAA0B;EACxB,MAAM,CAAE,UAAU,CAAE,YAAY,IAAK,KAAK,CAAE,YAAY,IAAK,QAAQ,CAAE,YAAY,IAAK,SAAS,CAAE,YAAY;EAAO,MAAM,CAAE,YAAY;;AAI5I,aAAY;AACV,MAAI,OAAO,gBAAgB;AAAa,WAAO,YAAY;AAC3D,SAAO,SAAS,OAAO,QAAQ,OAAO,YAAY,MAAO;;AAI3D;AACE,mBAAiB,SAAS,OAAO,OAAO,QAAQ;AAChD,SAAO,QAAQ,OAAO;AACpB,WAAO,KAAK,OAAO,IAAI,QAAQ;AAC7B,mBAAa,KAAK;AAClB,mBAAa,IAAI;AACjB,UAAI,MAAM,QAAQ,SAAS,MAAM,QAAQ;AACvC,aAAK,OAAO,KAAK,OAAO,GAAG;iBAClB,SAAS,SAAS,SAAS;AACpC,aAAK,OAAO,UAAU,MAAM;;AAE5B,aAAK,OAAO;;;AAGhB,WAAO;KACN;;;EAIH,0BAAyB;AACvB,SAAK,KAAK;AACV,SAAK,UAAc;AACnB,SAAK,SAAS,UAAiB,gBAAS;AACxC,SAAK,KAAK;AACV,SAAK,QAAQ;AACb,SAAK,aAAa;AAClB,SAAK,qBAAqB;AAC1B,SAAK,cAAc;AACnB,SAAK,WAAW;AAChB,SAAK,OAAO;AAEZ,SAAK,SAAS;MACZ,UAAU;MACV,SAAS;MACT,UAAU;MACV,MAAM;MACN,KAAK;MACL,QAAQ;MACR,SAAS;;AAGX,SAAK,WAAW;AAChB,SAAK,MAAM;AACX,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,OAAO;;EAId;AAEE,QAAI,OAAO,KAAK,OAAO;AAAS,cAAQ,IAAI,UAAU,GAAG;;EAG3D;AACE,QAAI,KAAK,OAAO;AAAS,aAAe,QAAA;AACxC,WAAO;;EAIT;AACE,QAAI,CAAC,KAAK;AAAoB;AAC9B,oBAAgB,GAAG,SAAS,MAAM;AAClC,qBAAiB,KAAK;AACtB,SAAK,aAAa;AAClB,mBAAe,UAAU;AACzB,QAAI,WAAW;AAAG,WAAK,IAAI,GAAG,KAAK;;EAIrC;AACE,QAAI,CAAC,KAAK;AAAa,aAAO;AAC9B,QAAI,CAAC;AAAO,aAAO;AACnB,QAAI,GAAG,IAAI,MAAM,WAAW,CAAE,kBAAiB,GAAG;AAChD,aAAO;;AAET;AACE,SAAG;;AAEH,aAAO;;AAET,WAAO;;QAIH;AACJ,SAAK,QAAQ;AACb,uBAAkB;AAClB,QAAI;AAAY,WAAK,SAAS,UAAU,KAAK,QAAQ;AAErD,QAAI,KAAK;AACP,WAAK,aAAa;AAClB,WAAK,IAAI,YAAY,KAAK,kCAAkC,GAAG;AAC/D,WAAK,IAAI,kBAAkB,KAAK;AAChC,WAAK,IAAI,UAAU,GAAG,IAAI;AAC1B,WAAK,WAAW;;AAElB,QAAI,KAAK,OAAO;AACd;QACE,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;UACV,MAAM,QAAQ,IAAI;QACpB,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAmB,SAAA,KAAK,KAAK,OAAO,QAAQ;QACtF,KAAK,OAAO,OAAS,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,UAAe,IAAA,KAAK,KAAK,UAAU;QACzG,KAAK,OAAO,UAAY,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,UAAkB,OAAA,KAAK,KAAK,UAAU;QAClH,KAAK,OAAO,WAAa,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,UAAmB,QAAA,KAAK,KAAK,UAAU;QACrH,KAAK,OAAO,WAAY,MAAK,OAAO,KAAK,UAAkB,QAAA,KAAK,KAAK,UAAU;QAC/E,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAmB,SAAA,KAAK,KAAK,OAAO,QAAQ;;;AAGxF,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAe,SAAA,KAAK,KAAK,OAAO;AAC9G,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,WAAW,CAAC,KAAK,OAAO;AAAK,aAAK,OAAO,MAAM,MAAU,IAAA,KAAK,KAAK;AACxH,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO;AAAQ,aAAK,OAAO,SAAS,MAAa,OAAA,KAAK,KAAK;AACpI,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAc,QAAA,KAAK,KAAK;AACxI,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAc,QAAA,KAAK,KAAK;AACpG,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAe,SAAA,KAAK,KAAK,OAAO;;AAEhH,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,QAAQ;AAAI,WAAK,KAAK,OAAO;;QAIlD;AACJ,uBAAkB;AAClB,QAAI,KAAK,OAAO,WAAY,KAAK,OAAO,YAAY,MAAO,SAAU,GAAG,iBAAiB,KAAK,OAAO;AACnG,WAAK,QAAQ;AAWb,WAAK,IAAI,oBAAoB,KAAK,OAAO;AAEzC,UAAI,KAAK,OAAO,YAAY;AAC1B,aAAK,IAAI,uBAAuB,KAAK,OAAO;AAC5C,qBAAa,KAAK,OAAO;AACzB,qBAAa,MAAM,GAAG,MAAM,SAAS;AACrC,YAAI,CAAC;AAAM,eAAK,IAAI;;AAGtB,YAAM,GAAG,WAAW,KAAK,OAAO;AAChC,SAAG;AAIH,UAAI,KAAK,OAAO,YAAY;AAC1B,YAAI,KAAK,OAAO;AACd,eAAK,IAAI,mDAAmD,KAAK,OAAO;AACxE,aAAG,IAAI,IAAI,kCAAkC,KAAK,OAAO,aAAa,IAAI;;AAG5E,WAAG,IAAI,IAAI,4BAA4B;;AAEzC,YAAM,GAAG;;AAEX,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,WAAW;AAAI,WAAK,KAAK,UAAU;;QAGxD;AAGJ;AACA;AACA;AACA;AACA,oBAAgB;AAChB,SAAK,QAAQ;AACb,iBAAY;AACZ,kBAAc,MAAM,KAAK,OAAO,SAAS,cAAc,OAAO,KAAK,OAAO;AAC1E,SAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AACpC,wBAAmB;AACjB,WAAK,QAAQ;AAEb,UAAI,CAAC,MAAK,SAAS,MAAK,MAAM;AAC5B,aAAK,IAAI,4BAA4B,MAAK;AAC1C;;AAGF,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAc,IAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;;AAE/E,aAAK,QAAQ;AACb,qBAAY;AACZ,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAU,MAAU,IAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;AACrF,aAAK,KAAK,MAAM,KAAK,MAAM,SAAQ;;AAIrC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAiB,OAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;;AAExF,aAAK,QAAQ;AACb,qBAAY;AACZ,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAU,MAAa,OAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;AAC9F,aAAK,KAAK,SAAS,KAAK,MAAM,SAAQ;;AAGxC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAkB,QAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;;AAE3F,aAAK,QAAQ;AACb,qBAAY;AACZ,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAU,MAAc,QAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;AACjG,aAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;;AAEzC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,SAAC,QAAQ,WAAW,cAAc,MAAM,QAAQ,IAAI,CAAC,QAAQ,WAAW;;AAG1E,WAAK,QAAQ;AAEb,YAAK,MAAM;AAIX,uBAAkB,MAAK,YAAY,eAAe,MAAK,YAAY,eAE/D,OAAO,KAAK,IAAI,KAAK,IAAI,MAAK,YAAY,YAAY,GAAG,KAAK,MAAK,YAAY,YAAY,GAAG,KAAK,KAAK,IAAI,MAAK,YAAY,aAAa,GAAG,KAAK,MAAK,YAAY,aAAa,GAAG,OACnL;AAGJ,cAAQ,KAAK;QACX,YAAY,MAAK;QACjB,KAAK,MAAK;QACV,MAAM,MAAK;QACX,aAAa,MAAK;QAClB,KAAK,OAAO;QACZ,QAAQ,UAAU;QAClB,kBAAkB,UAAU;QAC5B,SAAS;QACT,MAAO,aAAa,IAAK,KAAK,MAAM,YAAY,MAAM;;AAExD,WAAK,QAAQ;;AAEf,SAAK,QAAQ;AACb,QAAI,KAAK,OAAO;AACd,UAAI,KAAK,KAAK;AAAM,eAAO,KAAK,KAAK;AACrC,UAAI,KAAK,KAAK;AAAK,eAAO,KAAK,KAAK;AACpC,UAAI,KAAK,KAAK;AAAQ,eAAO,KAAK,KAAK;AACvC,UAAI,KAAK,KAAK;AAAS,eAAO,KAAK,KAAK;;AAE1C,WAAO;;QAIH,4BAA2B;AAC/B,SAAK,QAAQ;AACb;AAGA,SAAK,SAAS,UAAU,KAAK,QAAQ;AACrC,QAAI,CAAC,KAAK,OAAO;AAAgB,WAAK,SAAS,UAAU,KAAK,QAAQ;AAGtE,SAAK,QAAQ;AACb,kBAAc,KAAK,OAAO;AAC1B,QAAI;AACF,WAAK,IAAI,OAAO;AAChB,aAAO,CAAE;;AAIX,WAAO,IAAI,QAAQ;AACjB;AACA;AACA;AAEA,wBAAkB;AAGlB,YAAM,KAAK;AAGX,YAAM,KAAK;AAEX,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,mBAAY;AACZ,uBAAsB,MAAA,QAAQ,OAAO,KAAK;AAC1C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,WAAW,SAAQ,UAAU;AACvE,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;;AAErC,aAAK,QAAQ;AACb,qBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,WAAW,SAAQ,UAAU;AAC7E,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;;AAItC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AACtG,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;;AAErC,aAAK,QAAQ;AACb,qBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AAC5G,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;;AAEtC,WAAK,QAAQ;AAGb,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAC5G,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;;AAErC,aAAK,QAAQ;AACb,qBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAClH,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;;AAKtC,UAAI,KAAK,OAAO;AACd,SAAC,SAAS,SAAS,WAAW,MAAM,QAAQ,IAAI,CAAC,SAAS,SAAS;;AAErE,eAAQ,OAAO;AAEf,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,uBAAiB;AACjB,UAAI,KAAK,OAAO,QAAQ;AACtB,qBAAY;AACZ,qBAAa,CAAE,MAAc,QAAA,KAAK,UAAU,MAAc,QAAA,KAAK,UAAU,MAAc,QAAA,KAAK;AAC5F,YAAI,CAAC,KAAK,OAAO;AAAO,eAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;iBACtD,KAAK,KAAK;AAAS,iBAAO,KAAK,KAAK;;AAG/C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AACb,cAAQ,CAAE,MAAM,SAAS,MAAM,SAAS,MAAM,SAAS,SAAS,YAAY,aAAa,KAAK,MAAM,QAAQ,SAAQ;;;QAIlH;AACJ,mBAAe,IAAI,UAAU,KAAK;AAClC,UAAM,KAAK,OAAO,QAAQ;AAC1B,SAAK,IAAI;;;;;ACpYb,2BAA2B;AACzB,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,MAAI,OAAO,IAAG;AACd,MAAI,YAAY,IAAG;AACnB,UAAQ;AACR,2BAAyB,OAAO,QAAQ;AACtC,QAAI,IAAI,SAAS;AACf,oBAAc,GAAG,QAAQ,IAAI,KAAK;AAClC,UAAI,SAAS,OAAO,GAAG,IAAK,KAAG,iBAAiB;AAChD,WAAK;AAAA;AAAA;AAAA;AAKX;AACE,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,qBAAmB;AACjB,QAAI,OAAO,IAAG;AACd,QAAI,cAAc,IAAG;AACrB,QAAI,YAAY,IAAG;AACnB,QAAI,YAAY,IAAG;AACnB,QAAI;AACJ,QAAI,IAAG;AACL,UAAI,KAAK,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI;AAAA;AAG3D,mBAAe;AAEf,QAAI,KAAK;AAAkB,aAAO,KAAK,GAAG,KAAK,MAAM,MAAM,KAAK,sBAAsB,KAAK,UAAU;AACrG,QAAI,KAAK;AAAK,aAAO,KAAK,QAAQ,KAAK,OAAO;AAC9C,QAAI,KAAK;AAAM,aAAO,KAAK,SAAS,KAAK;AACzC,QAAI,KAAK,WAAW,KAAK,QAAQ,SAAS;AACxC,uBAAgB,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,MAAM,MAAM,EAAE,WAAW,EAAE;AAC3E,aAAO,KAAK,SAAQ,KAAK;AAAA;AAE3B,QAAI,YAAY,IAAG;AACnB,oBAAgB;AAAQ,UAAI,SAAS,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG,KAAK,IAAI,KAAK,KAAO,KAAI,KAAK,IAAG;AAClG,QAAI;AACJ,QAAI,YAAY;AAChB,QAAI,KAAK;AACP,UAAI,IAAG;AACL,4BAAoB,KAAK;AACvB,cAAI,YAAY,IAAG,WAAW,QAAQ,QAAS,IAAI,MAAM,OAAQ,QAAS,IAAI,MAAM,kBAAmB,IAAG;AAC1G,cAAI;AACJ,cAAI,IAAI,MAAM,IAAI,MAAM,IAAI,GAAG,GAAG,IAAI,KAAK;AAC3C,cAAI;AAAA;AAAA;AAGR,UAAI,IAAG;AACL,qBAAa,GAAG,IAAI,cAAc,SAAS,GAAG;AAC5C,yBAAe;AAAA,YACb,cAAc,IAAI,IAAI;AAAA,YACtB,cAAc,IAAI,IAAI;AAAA,YACtB,cAAc,IAAI,IAAI;AAAA,YACtB,IAAI,WAAW,KAAK,KAAK;AAC3B,uBAAa,IAAI;AACjB,eAAK,OAAO,OAAO,GAAG,IAAI,OAAO,GAAG;AACpC,8BAAoB;AAClB,iBAAK,OAAO,MAAM,IAAI,MAAM;AAAA;AAE9B,eAAK;AACL,cAAI,cAAc,IAAG,WAAW,QAAQ,QAAS,IAAI,OAAO,GAAG,OAAQ,QAAS,IAAI,OAAO,GAAG,kBAAmB,IAAG;AACpH,cAAI,OAAO;AACX,cAAI,IAAG;AACL,gBAAI,YAAY,IAAG,WAAW,QAAQ,QAAS,IAAI,OAAO,GAAG,OAAQ,QAAS,IAAI,OAAO,GAAG,kBAAmB,IAAG;AAClH,gBAAI,KAAK;AAAA;AAAA;AAIb,YAAI,KAAK,eAAe,KAAK,YAAY;AACvC,cAAI,cAAc,IAAG,WAAW,6BAA6B,IAAG;AAChE,cAAI;AACJ,wBAAc,KAAK,IAAI,KAAK,YAAY,YAAY,GAAG,KAAK,KAAK,YAAY,YAAY,GAAG,MAAM;AAClG,wBAAc,KAAK,IAAI,KAAK,YAAY,YAAY,GAAG,KAAK,KAAK,YAAY,YAAY,GAAG,MAAM;AAClG,cAAI,QAAQ,KAAK,YAAY,YAAY,GAAG,IAAI,KAAK,YAAY,YAAY,GAAG,IAAI,OAAO,OAAO,GAAG,GAAG,IAAI,KAAK;AACjH,cAAI;AACJ,cAAI,IAAG;AACL,gBAAI,YAAY,IAAG,WAAW,6BAA6B,IAAG;AAC9D,gBAAI;AAAA;AAAA;AAGR,YAAI,KAAK,eAAe,KAAK,YAAY;AACvC,cAAI,cAAc,IAAG,WAAW,6BAA6B,IAAG;AAChE,cAAI;AACJ,wBAAc,KAAK,IAAI,KAAK,YAAY,aAAa,GAAG,KAAK,KAAK,YAAY,aAAa,GAAG,MAAM;AACpG,wBAAc,KAAK,IAAI,KAAK,YAAY,aAAa,GAAG,KAAK,KAAK,YAAY,aAAa,GAAG,MAAM;AACpG,cAAI,QAAQ,KAAK,YAAY,aAAa,GAAG,IAAI,KAAK,YAAY,aAAa,GAAG,IAAI,OAAO,OAAO,GAAG,GAAG,IAAI,KAAK;AACnH,cAAI;AACJ,cAAI,IAAG;AACL,gBAAI,YAAY,IAAG,WAAW,6BAA6B,IAAG;AAC9D,gBAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQhB;AACE,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,MAAI,WAAW;AACf,qBAAmB;AACjB,QAAI,YAAY,IAAG;AACnB,QAAI,cAAc,IAAG;AACrB,QAAI,OAAO,IAAG;AACd,QAAI,YAAY,IAAG;AACnB,QAAI,IAAG;AACL,0BAAoB,KAAK;AACvB,YAAI;AACJ,YAAI,IAAI,MAAM,SAAS,GAAG,MAAM,SAAS,GAAG,GAAG,GAAG,IAAI,KAAK;AAC3D,YAAI;AAAA;AAAA;AAGR,QAAI,IAAG;AACL,mBAAa,IAAI;AACjB;AAEA,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,KAAK,UAAU,KAAK,OAAO,EAAE,SAAS;AAC7C,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,UAAI,OAAO;AAAA;AAAA;AAAA;AAKjB;AACE,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,MAAI,WAAW;AACf,qBAAmB;AACjB,QAAI,OAAO,IAAG;AACd,QAAI,YAAY,IAAG;AACnB,QAAI,IAAG;AACL,UAAI,YAAY,IAAG;AACnB,UAAI;AACJ,UAAI,cAAc,IAAG;AACrB,UAAI,YAAY,IAAG;AACnB,UAAI,KAAK,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI;AACzD,UAAI,YAAY,IAAG;AACnB,UAAI,SAAS,QAAQ,KAAK,IAAI,KAAK,GAAG,KAAK,IAAI,KAAK,IAAI,KAAK,IAAI;AACjE,UAAI;AAAA;AAEN,QAAI,IAAG;AACL,UAAI,KAAK,aAAa,KAAK,UAAU,SAAS;AAC5C,4BAAoB,KAAK;AACvB,cAAI,YAAY,IAAG,WAAW,QAAQ,QAAS,IAAI,MAAM,OAAQ,QAAS,IAAI,MAAM,kBAAmB,IAAG;AAC1G,cAAI;AACJ,cAAI,IAAI,MAAM,IAAI,MAAM,IAAI,GAAG,GAAG,IAAI,KAAK;AAC3C,cAAI;AAAA;AAAA;AAAA;AAIV,QAAI,IAAG;AACL,sBAAgB;AACd,YAAI,CAAC;AAAM;AACX,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,cAAI,YAAY,IAAG;AACnB,cAAI;AACJ,cAAI,cAAc,IAAG,WAAW,QAAQ,QAAS,IAAI,KAAK,GAAG,OAAQ,QAAS,IAAI,KAAK,GAAG,kBAAmB,IAAG;AAChH,cAAI,OAAO,KAAK,IAAI,IAAI,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,GAAG;AAC/D,cAAI,OAAO,KAAK,GAAG,IAAI,KAAK,GAAG;AAC/B,cAAI;AAAA;AAAA;AAGR,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AAAA;AAAA;AAAA;AAM/B,aAAa;AAAA,EACX,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS;AAAA;AAGX,mBAAe;;;AC/Nf,IAAI,WAAW;AACf,iBAAiB;AAEjB,YAAY;AAAA,EACV,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,gBAAgB;AAAA,EAChB,WAAW;AAAA,EACX,kBAAkB;AAAA,EAClB,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,YAAY;AAAA,EACZ,YAAY;AAAA;AAGd;AACE,MAAI;AAAY;AAChB,cAAY;AAAA;AAAA;AAAA,qDAGuC,MAAM;AAAA;AAAA,sCAErB,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAWV,MAAM,0BAA0B,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0CAM9B,MAAM;AAAA;AAAA,qCAEX,MAAM;AAAA;AAAA;AAAA,8DAGmB,MAAM;AAAA,kDAClB,MAAM;AAAA,kDACN,MAAM;AAAA,kJAC0F,MAAM;AAAA;AAAA;AAAA;AAAA,qEAInF,MAAM;AAAA;AAAA,qFAEU,MAAM;AAAA,gCAC3D,MAAM;AAAA;AAAA;AAAA,+GAGyE,MAAM;AAAA,kGACnB,MAAM;AAAA,wKACgE,MAAM;AAAA,oKACV,MAAM;AAAA;AAAA;AAAA;AAAA;AAKxK,aAAW,SAAS,cAAc;AAClC,KAAG,YAAY;AACf,WAAS,qBAAqB,QAAQ,GAAG,YAAY;AACrD,eAAa;AAAA;AAtEf;AAAA,EA0EE;AACE,QAAI;AAAW,cAAQ,IAAK,UAAU;AACtC;AACA,SAAK,WAAW,QAAQ,OAAO;AAC/B,SAAK,KAAK;AACV,SAAK,WAAW;AAChB;AACA,SAAK,UAAU;AACf,SAAK,SAAS;AAAA;AAAA,EAGhB,2BAA2B,eAAe,CAAE,KAAK,MAAM,MAAM,MAAM,QAAQ,MAAM,OAAO;AACtF,SAAK,OAAO,SAAS,cAAc;AACnC,SAAK,KAAK,KAAK,QAAQ;AACvB,SAAK,KAAK,YAAY;AACtB,QAAI;AACF,UAAI,SAAS;AAAK,aAAK,KAAK,MAAM,MAAM,SAAS;AACjD,UAAI,SAAS;AAAQ,aAAK,KAAK,MAAM,SAAS,SAAS;AACvD,UAAI,SAAS;AAAM,aAAK,KAAK,MAAM,OAAO,SAAS;AACnD,UAAI,SAAS;AAAO,aAAK,KAAK,MAAM,QAAQ,SAAS;AAAA;AAGvD,SAAK,YAAY,SAAS,cAAc;AACxC,SAAK,UAAU,KAAK,kBAAkB;AACtC,SAAK,UAAU,YAAY;AAG3B,oBAAgB,SAAS,cAAc;AACvC,YAAQ,YAAY;AACpB,YAAQ,KAAK,cAAc;AAC3B,gBAAY;AAAA;AAAA;AAAA;AAIZ,YAAQ,YAAY,GAAG,QAAQ;AAC/B,SAAK,KAAK,YAAY;AACtB,YAAQ,iBAAiB,SAAS;AAChC,WAAK,UAAU,UAAU,OAAO;AAChC,WAAK,UAAU,UAAU,OAAO;AAChC,WAAK,KAAK,MAAM,cAAc,KAAK,UAAU,UAAU,SAAS,4BAA4B,SAAS;AAAA;AAGvG,SAAK,KAAK,YAAY,KAAK;AAC3B,QAAI,OAAO,WAAW;AAAU,aAAO,YAAY,KAAK;AAAA;AACnD,eAAS,eAAe,QAAQ,YAAY,KAAK;AAAA;AAAA,MAGpD;AACF,SAAK;AACL,WAAO,QAAQ,KAAK,YAAY,KAAK;AAAA;AAAA,MAGnC;AACF,WAAO,QAAQ,KAAK,YAAY,KAAK;AAAA;AAAA,MAGnC;AACF,WAAO,KAAK,KAAK;AAAA;AAAA,MAGf;AACF,WAAO,KAAK,KAAK;AAAA;AAAA,EAGnB;AACE,QAAI,KAAK,UAAU,UAAU,SAAS;AACpC,WAAK,UAAU,UAAU,OAAO;AAChC,WAAK,UAAU,UAAU,OAAO;AAAA;AAAA;AAAA,EAIpC;AACE,WAAQ,KAAK,UAAU,UAAU,SAAS;AAAA;AAAA,EAG5C;AACE,SAAK,UAAU,UAAU,OAAO;AAChC,SAAK,UAAU,UAAU,OAAO;AAChC,QAAI,KAAK,UAAU,UAAU,SAAS,4BAA4B;AAChE,gBAAU,IAAI,KAAM,KAAI,WAAW,IAAI,QAAQ,KAAK,IAAI,QAAQ,GAAG,QAAQ;AAC3E,gBAAU,IAAI,KAAM,KAAI,WAAW,IAAI,QAAQ,KAAK,IAAI,QAAQ,GAAG,QAAQ;AAC3E,UAAI;AAAG,aAAK,KAAK,MAAM,OAAO,GAAG,IAAI;AACrC,UAAI;AAAG,aAAK,KAAK,MAAM,MAAM;AAC7B,UAAI,KAAK,KAAK,aAAa;AAAG,aAAK,KAAK,MAAM,OAAO;AACrD,UAAK,KAAK,KAAK,aAAa,KAAK,KAAK,cAAe,OAAO;AAC1D,aAAK,KAAK,MAAM,OAAO;AACvB,aAAK,KAAK,MAAM,QAAQ;AAAA;AAE1B,WAAK,KAAK,MAAM,cAAc;AAAA;AAE9B,WAAK,KAAK,MAAM,cAAc;AAAA;AAAA;AAAA,EAIlC;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,OAAG,YAAY;AACf,SAAK,KAAK,YAAY;AACtB,OAAG,iBAAiB,SAAS;AAC3B,WAAK,SAAS,CAAC,KAAK;AACpB,kBAAY,SAAS,uBAAuB;AAC5C,yBAAmB;AACjB,aAAK,MAAM,UAAU,KAAK,SAAS,SAAS;AAAA;AAAA;AAGhD,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,OAAG,YAAY;AACf,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,YAAY,+EAA+E,KAAK,UAAU,OAAO,YAAY,YAAY,+CAA+C,KAAK,qBAAqB;AACrN,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,UAAU;AAC5B,aAAO,YAAY,IAAI,OAAO;AAC9B,UAAI;AAAU,iBAAS,IAAI,OAAO;AAAA;AAEpC,WAAO;AAAA;AAAA,QAGH;AACJ,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,kBAAc;AACd,uBAAmB;AACjB,kBAAY,SAAS,WAAW,aAAa;AAC7C,iBAAW,kBAAkB,SAAS,OAAO;AAAA;AAE/C,OAAG,YAAY,wCAAwC,KAAK,8BAA8B,+BAA+B,KAAK,qBAAqB;AACnJ,OAAG,MAAM,aAAa,SAAS,KAAK,MAAM;AAC1C,OAAG,MAAM,WAAW,SAAS,KAAK,MAAM;AACxC,OAAG,MAAM,cAAc,SAAS,KAAK,MAAM;AAC3C,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,UAAU;AAC5B,UAAI;AAAU,iBAAS,MAAM,IAAI,OAAO;AAAA;AAE1C,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,YAAY,8CAA8C,KAAK,eAAe,cAAa,eAAc,iBAAgB,OAAO,cAAc;AACjJ,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,UAAU;AAC5B,aAAO,YAAY,SAAS,IAAI,OAAO,WAAW,WAAW,IAAI,OAAO,SAAS,SAAS,IAAI,OAAO,SAAS,WAAW,IAAI,OAAO;AACpI,UAAI,OAAO,aAAa,SAAS,IAAI,OAAO;AAC5C,UAAI;AAAU,iBAAS,IAAI,OAAO;AAAA;AAEpC,OAAG,QAAQ,GAAG,SAAS;AACvB,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,QAAI;AAAM,SAAG,YAAY;AACzB,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,MAAM,aAAa,SAAS,KAAK,MAAM;AAC1C,OAAG,MAAM,WAAW,SAAS,KAAK,MAAM;AACxC,OAAG,MAAM,cAAc,SAAS,KAAK,MAAM;AAC3C,OAAG,OAAO;AACV,OAAG,KAAK,KAAK;AACb,OAAG,YAAY;AACf,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,SAAS;AAC3B,UAAI,GAAG,cAAc;AAAS,WAAG,YAAY;AAAA;AACxC,WAAG,YAAY;AACpB,UAAI;AAAU,iBAAS,GAAG,cAAc;AAAA;AAE1C,WAAO;AAAA;AAAA,EAGT,8BAA8B;AAC5B,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,YAAY;AACpB,OAAG,YAAY,GAAG,UAAU,MAAM;AAClC,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,EAIT,iCAAiC;AAC/B,eAAW,SAAS,eAAe,YAAY;AAC/C,QAAI;AAAI,SAAG,YAAY,GAAG,UAAU,MAAM;AAAA;AACrC,WAAK,SAAS,OAAO;AAAA;AAAA,EAG5B,4BAA4B,cAAc;AACxC,QAAI;AAAO,YAAM,aAAa;AAC9B,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,OAAG,YAAY,eAAe,MAAM,cAAc,uCAAuC,wCAAwC,oBAAoB;AACrJ,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,QAIH;AACJ,QAAI,CAAC,UAAW,OAAO,WAAW;AAAI;AACtC,mBAAe,SAAS,eAAe,eAAe;AACtD,QAAI,CAAC;AAAQ;AACb,gBAAY,OAAO,WAAW;AAC9B,QAAI,YAAY,MAAM;AACtB,QAAI,SAAS,GAAG,GAAG,OAAO,OAAO,OAAO;AACxC,kBAAc,OAAO,QAAQ,OAAO;AACpC,iBAAY,IAAI,KAAK,IAAI,GAAG;AAC5B,mBAAe,OAAO,SAAS;AAC/B,oBAAgB;AACd,uBAAiB,IAAI,qBAAqB,GAAI,QAAM,OAAO,MAAM,QAAQ,GAAG;AAC5E,eAAS,aAAa,KAAK,MAAM;AACjC,eAAS,aAAa,KAAK,MAAM;AACjC,UAAI,YAAY;AAChB,UAAI,SAAS,IAAI,OAAO,GAAG,QAAQ,GAAG,OAAO;AAC7C,UAAI,YAAY,MAAM;AACtB,UAAI,OAAO,GAAG,QAAQ;AACtB,UAAI,SAAS,KAAK,MAAM,OAAO,KAAK,IAAI,QAAQ,GAAG,OAAO,SAAS,GAAG,QAAQ;AAAA;AAAA;AAAA;AAKpF,mBAAe;;;ACvTf,MAAM,aAAa;AAEnB,cAAc,IAAI,MAAM;AAGxB,WAAW;AAAA,EACT,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,WAAW;AAAA,EACX,eAAe;AAAA,EACf,eAAe;AAAA,EACf,qBAAqB;AAAA,EACrB,MAAM;AAAA,EACN,SAAS;AAAA,EACT,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS,CAAC,yBAAyB,yBAAyB,yBAAyB,yBAAyB,yBAAyB;AAAA,EACvI,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,cAAc;AAAA,EACd,UAAU;AAAA,EACV,SAAS;AAAA,EACT,WAAW;AAAA,EACX,eAAe;AAAA,EACf,cAAc;AAAA,EACd,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,QAAQ;AAAA,EACR,KAAK;AAAA;AAIP;AACA;AACA;AACA;AAGA;AACE,MAAI,CAAC,MAAM,QAAQ;AAAM,WAAO;AAChC,aAAW;AACX,sBAAoB;AAClB,QAAI,OAAO,UAAU;AAAU,cAAQ,KAAK,UAAU,OAAO,QAAQ,gBAAgB,IAAI,QAAQ,MAAM;AAAA;AAClG,cAAQ;AAAA;AAEf,SAAO;AAAA;AAIT,aAAY;AAEV,MAAI,GAAG;AAAS,YAAQ,IAAI,GAAG;AAAA;AAGjC,eAAe;AAEb,WAAS,eAAe,UAAU,YAAY;AAAA;AAIhD;AAEE,kBAAgB,YAAY,QAAQ;AACpC,KAAG,IAAI,KAAK,MAAO;AACnB,MAAI,GAAG,IAAI,SAAS,GAAG;AAAW,OAAG,IAAI;AAMzC,MAAI,MAAM;AAER,QAAI,UAAU;AAAI,4BAAsB,MAAM,eAAe,OAAO;AAAA;AAE/D,iBAAW,MAAM,eAAe,OAAO,SAAS,KAAK;AAAA;AAG5D,QAAK,YAAY,OAAO,GAAG;AAE3B,cAAY,OAAO,WAAW;AAC9B,MAAI,YAAY,GAAG;AACnB,MAAI,SAAS,GAAG,GAAG,OAAO,OAAO,OAAO;AACxC,MAAI,OAAO;AACT,QAAI,OAAO,OAAO,UAAU,OAAO;AAAO,aAAO,QAAQ,OAAO,OAAO;AACvE,QAAI,OAAO,OAAO,WAAW,OAAO;AAAQ,aAAO,SAAS,OAAO,OAAO;AAC1E,QAAI,UAAU,OAAO,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO,OAAO,OAAO;AAAA;AAEvH,QAAI,UAAU,OAAO,GAAG,GAAG,MAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO;AAAA;AAGnF,eAAK,KAAK,OAAO,MAAM,QAAQ,IAAI,MAAM,SAAS;AAClD,eAAK,KAAK,OAAO,MAAM,QAAQ;AAC/B,eAAK,KAAK,OAAO,MAAM,QAAQ;AAC/B,eAAK,QAAQ,OAAO,SAAS,QAAQ;AAErC,iBAAe,MAAM,GAAG;AACxB,cAAY,OAAO,kBAAkB,QAAS,QAAO,gBAAgB,gBAAgB,OAAO,gBAAgB,gBAAgB,GAAG,2BAA2B;AAC1J,iBAAe,WAAW,OAAO,MAAM,SAAS,0BAA0B,kBAAkB,OAAO,MAAM,WAAW;AACpH,qBAAmB,OAAO,SAAS,eAAe,OAAO,OAAO,WAAW,OAAO,OAAO,WAAW;AACpG,cAAY,KAAK,MAAM,KAAK,GAAG,IAAI,OAAO,UAAU,IAAI,KAAK,GAAG,IAAI,UAAU;AAC9E,kBAAiB,GAAG,IAAI,SAAS,KAAO,MAAM,IAAK,gKAAgK;AACnN,WAAS,eAAe,OAAO,YAAY;AAAA,aAChC,GAAG,OAAO,kBAAkB,GAAG,OAAO,wBAAwB,GAAG,OAAO,WAAW,GAAG,OAAO,UAAU;AAAA,eACrG,MAAM,GAAG,kBAAkB;AAAA,mBACvB,IAAI,OAAO,oBAAoB;AAAA,MAC5C;AAAA;AAAA;AAKN;AArHA;AAsHE,MAAI,GAAG;AAAM,WAAO;AACpB,KAAG,OAAO;AACV,gBAAc,SAAS,eAAe;AACtC,iBAAe,SAAS,eAAe;AACvC,iBAAe,SAAS,eAAe;AACvC,eAAa,MAAM,YAAc,MAAM,UAAU,iBAAiB,GAAG,eAAe,UAAY,MAAM,aAAa,KAAO,CAAC,MAAM,SAAW;AAC5I,YAAU;AACV,SAAO;AAEP,MAAI,CAAC,UAAU;AACb,UAAM;AACN,WAAO,aAAa;AAAA,EAAK;AACzB,SAAI;AACJ,WAAO;AACP,WAAO;AAAA;AAET;AACA,sBAAoB;AAAA,IAClB,OAAO;AAAA,IACP,OAAO;AAAA,MACL,YAAY,GAAG,SAAS,SAAS;AAAA,MACjC,YAAY,GAAG,OAAO,mBAAmB;AAAA,MACzC,OAAO,CAAE,OAAO,OAAO;AAAA,MACvB,QAAQ,CAAE,OAAO,OAAO;AAAA;AAAA;AAG5B;AAGE,aAAS,MAAM,UAAU,aAAa,aAAa;AAAA;AAEnD,QAAI,IAAI,SAAS;AAAyB,YAAM;AAAA,aACvC,IAAI,SAAS;AAA0B,YAAM;AAAA;AACjD,YAAM;AACX,WAAO,aAAa;AAAA,EAAK;AACzB,WAAO;AACP,SAAI;AAAA;AAEN,MAAI;AAAQ,UAAM,YAAY;AAAA;AACzB,WAAO;AACZ,gBAAc,OAAO,iBAAiB;AACtC,mBAAiB,MAAM;AAEvB,KAAG,SAAS,CAAE,MAAM,YAAM,UAAN,mBAAa,eAAe,OAAO,SAAS,OAAO,QAAQ,SAAS,QAAQ,QAAQ,SAAS,eAAe,SAAS,UAAU;AACnJ,SAAO,IAAI,QAAQ;AACjB,UAAM,eAAe;AACnB,YAAM,QAAQ,MAAM;AACpB,YAAM,SAAS,MAAM;AACrB,aAAO,QAAQ,MAAM;AACrB,aAAO,SAAS,MAAM;AACtB,aAAO,MAAM,QAAQ,OAAO,QAAQ,OAAO,SAAS,UAAU;AAC9D,aAAO,MAAM,SAAS,OAAO,QAAQ,OAAO,SAAS,KAAK;AAC1D,SAAG,UAAU,MAAM,aAAa,SAAS,MAAM;AAC/C,SAAG,WAAW,MAAM,aAAa,SAAS,MAAM;AAEhD,mBAAa,KAAM,IAAI,OAAO,QAAQ,OAAO;AAC7C,SAAG,WAAW,GAAG,cAAc,QAAQ,UAAU,GAAG;AACpD,UAAI;AAAM,cAAM;AAChB,SAAG,OAAO;AAGV,aAAO;AACP,cAAQ;AAAA;AAAA;AAAA;AAMd;AACE,MAAI,CAAC;AAEH,SAAI;AACJ,aAAS,IAAI,OAAO,GAAG,QAAQ,CAAE,MAAM;AACvC,WAAO,SAAS;AAEhB,WAAO,iBAAiB,WAAW;AACjC,UAAI,CAAC,OAAO;AACV,aAAI;AACJ,aAAI;AACJ,eAAO,SAAS;AAAA;AAElB,kBAAY,OAAO,IAAI,KAAK,QAAQ;AAAA;AAAA;AAIxC,SAAO,YAAY,CAAE,OAAO,OAAM,KAAK,QAAQ,OAAO,OAAO,OAAO,QAAQ,OAAO,SAAU,CAAC,OAAM,KAAK;AAAA;AAI3G;AA/MA;AAgNE,cAAY,YAAY;AAExB,eAAa,MAAM,aAAc,MAAM,UAAU,iBAAiB,GAAG,eAAe,UAAY,MAAM,aAAa,KAAO,CAAC,MAAM;AACjI,MAAI,CAAC,QAAQ,MAAM;AAEjB,QAAI,MAAM;AAAQ,WAAI;AAAA,aACZ,MAAM,UAAU,iBAAiB,GAAG,eAAe,UAAY,MAAM,cAAc;AAAI,iBAAW,MAAM,eAAe,OAAO,SAAS;AAAA;AAC5I,WAAI,kCAAkC,YAAM,cAAN,mBAAiB,iBAAiB,GAAG,4BAA4B,MAAM;AAClH;AAAA;AAEF,SAAO;AACP,MAAI,GAAG;AAEL,sBAAkB,IAAI,gBAAgB,OAAO,OAAO,OAAO;AAC3D,gBAAY,UAAU,WAAW;AACjC,QAAI,UAAU,OAAO,GAAG,GAAG,MAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO;AACjF,iBAAa,IAAI,aAAa,GAAG,GAAG,OAAO,OAAO,OAAO;AAEzD,cAAU,OAAO,MAAM,QAAQ;AAAA;AAE/B,UAAM,OAAO,OAAO,YAAY,KAAK;AACnC,UAAI,OAAO;AAAO,aAAI,OAAO;AAAA;AACxB,oBAAY,OAAO,QAAQ;AAAA;AAAA;AAAA;AAMtC;AACE,cAAY,YAAY;AACxB,SAAO,IAAI,QAAQ;AACjB,mBAAc,IAAI;AAClB,WAAM,SAAS;AACb,WAAI,qBAAqB,OAAM;AAC/B,qBAAe,SAAS,eAAe;AACvC,aAAM,QAAQ,OAAM;AACpB,aAAM,SAAS,OAAM;AACrB,aAAO,QAAQ,MAAM,OAAO,OAAO,SAAS,MAAM,OAAO,OAAO,QAAQ,IAAI,MAAM,OAAO,OAAO,QAAQ,OAAM;AAC9G,aAAO,SAAS,MAAM,OAAO,OAAO,UAAU,MAAM,OAAO,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,SAAS,OAAM;AAClH,qBAAe,MAAM,MAAM,OAAO,QAAO;AACzC,kBAAY,QAAO,QAAQ;AAC3B,oBAAc,SAAS,cAAc;AACrC,YAAM,YAAY;AAClB,YAAM,QAAQ,OAAO,aAAc,IAAG,UAAU;AAChD,YAAM,SAAS,OAAO,SAAU,QAAO,aAAa,MAAM;AAC1D,kBAAY,MAAM,WAAW;AAC7B,UAAI,UAAU,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,MAAM,OAAO,MAAM;AAClF,eAAS,eAAe,qBAAqB,YAAY;AACzD,aAAM,MAAM;AACZ,cAAQ;AAAA;AAEV,WAAM,MAAM;AAAA;AAAA;AAKhB;AACE,QAAM,OAAO,iBAAiB;AAC9B,WAAS,eAAe,qBAAqB,MAAM,UAAU;AAC7D,WAAS,eAAe,UAAU,MAAM,UAAU;AAClD,gBAAc,SAAS,eAAe;AACtC,iBAAe,SAAS,eAAe;AACvC,KAAG,iBAAiB,GAAG;AACvB,MAAK,MAAM,cAAc,QAAS,CAAC,MAAM;AACvC,aAAS,eAAe,QAAQ,MAAM,UAAU;AAChD,WAAO;AACP,UAAM;AAAA;AAEN,UAAM;AACN,aAAS,eAAe,QAAQ,MAAM,UAAU;AAChD,WAAO;AACP,UAAM;AAAA;AAER,iBAAe,OAAO;AAAA;AAIxB;AACE,WAAS,eAAe,QAAQ,MAAM,UAAU;AAChD,QAAM,OAAO,iBAAiB;AAC9B,eAAa,KAAK,KAAK,MAAM,KAAK,GAAG,UAAU,OAAO,aAAa,SAAS,KAAK;AACjF,KAAG,WAAW,GAAG,cAAc,QAAQ,UAAU,GAAG;AACpD,KAAG,iBAAiB,GAAG,sBAAsB,GAAG;AAChD,WAAS,eAAe,UAAU,MAAM,UAAU;AAClD,WAAS,eAAe,qBAAqB,MAAM,UAAU;AAC7D,OAAI;AACJ,SAAO;AACP,WAAS,eAAe,qBAAqB,YAAY;AACzD,uBAAqB,GAAG;AAAS,UAAM,aAAa;AACpD,SAAO;AAAA;AAGT;AACE,UAAO,IAAI,aAAK,SAAS,MAAM,IAAI,CAAE,KAAK,QAAQ,OAAO;AACzD,cAAY,MAAK,UAAU,eAAe,eAAe,MAAM;AAC/D,QAAK,UAAU,kBAAkB,kBAAkB,MAAM;AACzD,WAAS,eAAe,QAAQ,iBAAiB,SAAS,MAAM,IAAI;AAEpE,QAAK,QAAQ;AACb,QAAK,QAAQ,WAAW,CAAC,OAAO,SAAS,SAAS,MAAM,OAAO,SAAS,SAAS,MAAM,OAAO,UAAU;AACxG,QAAK,QAAQ,oBAAoB,MAAM,QAAQ,SAAS,SAAS,MAAM,OAAO,QAAQ;AACtF,QAAK,QAAQ,mBAAmB,MAAM,QAAQ,WAAW,SAAS,MAAM,OAAO,UAAU;AACzF,QAAK,QAAQ,iBAAiB,MAAM,QAAQ,cAAc,SAAS,MAAM,OAAO,aAAa;AAC7F,QAAK,QAAQ,kBAAkB,IAAI;AACnC,QAAK,QAAQ;AACb,QAAK,SAAS;AACd,QAAK,QAAQ,eAAe,MAAM,OAAO,MAAM;AAC/C,QAAK,QAAQ,aAAa,MAAM,OAAO,KAAK,MAAM;AAClD,QAAK,QAAQ,aAAa,MAAM,OAAO,KAAK,MAAM;AAClD,QAAK,QAAQ,YAAY,MAAM,OAAO,KAAK,KAAK;AAChD,QAAK,QAAQ,eAAe,MAAM,OAAO,KAAK,QAAQ;AACtD,QAAK,QAAQ,gBAAgB,MAAM,OAAO,KAAK,SAAS;AACxD,QAAK,QAAQ,aAAa,MAAM,OAAO,MAAM;AAC7C,QAAK,QAAQ,aAAa,MAAM,OAAO,MAAM;AAC7C,QAAK,QAAQ,oBAAoB,MAAM,OAAO,SAAS;AAEvD,QAAK,QAAQ;AACb,QAAK,SAAS;AACd,QAAK,SAAS,eAAe,MAAM,OAAO,KAAK,UAAU,YAAY,GAAG,IAAI,GAAG;AAC7E,UAAM,OAAO,KAAK,SAAS,WAAW,SAAS;AAC/C,UAAM,OAAO,KAAK,gBAAgB,SAAS;AAC3C,UAAM,OAAO,KAAK,WAAW,SAAS;AAAA;AAExC,QAAK,SAAS,eAAe,MAAM,OAAO,KAAK,UAAU,cAAc,GAAG,IAAI,GAAG;AAC/E,UAAM,OAAO,KAAK,SAAS,aAAa,SAAS;AACjD,UAAM,OAAO,KAAK,QAAQ,aAAa,SAAS;AAChD,UAAM,OAAO,KAAK,IAAI,aAAa,SAAS;AAC5C,UAAM,OAAO,KAAK,aAAa,SAAS;AAAA;AAE1C,QAAK,SAAS,kBAAkB,MAAM,OAAO,KAAK,UAAU,iBAAiB,GAAK,GAAK,MAAM;AAC3F,UAAM,OAAO,KAAK,SAAS,gBAAgB,WAAW;AACtD,UAAM,OAAO,KAAK,OAAO,gBAAgB,WAAW;AACpD,UAAM,OAAO,KAAK,QAAQ,gBAAgB,WAAW;AACrD,UAAM,OAAO,KAAK,gBAAgB,WAAW;AAAA;AAE/C,QAAK,SAAS,mBAAmB,MAAM,OAAO,KAAK,UAAU,kBAAkB,KAAK,GAAK,MAAM;AAC7F,UAAM,OAAO,KAAK,SAAS,iBAAiB,WAAW;AACvD,UAAM,OAAO,KAAK,iBAAiB,WAAW;AAC9C,UAAM,OAAO,KAAK,iBAAiB,WAAW;AAAA;AAEhD,QAAK,SAAS,WAAW,MAAM,OAAO,KAAK,UAAU,gBAAgB,KAAK,GAAK,MAAM;AACnF,UAAM,OAAO,KAAK,SAAS,eAAe,WAAW;AACrD,UAAM,OAAO,KAAK,eAAe,WAAW;AAAA;AAG9C,QAAK,QAAQ;AACb,QAAK,SAAS,OAAO;AAErB,WAAS,IAAI,aAAK,SAAS,MAAM,IAAI,CAAE,KAAK,QAAQ,OAAO;AAC3D,SAAO,SAAS;AAChB,SAAO,QAAQ,gBAAgB,IAAI,QAAQ,MAAM;AACjD,SAAO,QAAQ,qBAAqB,IAAI,UAAU,MAAM;AACxD,SAAO,QAAQ,gBAAgB,IAAI;AACnC,SAAO,QAAQ,cAAc,IAAI;AACjC,SAAO,QAAQ,iBAAiB,IAAI;AACpC,SAAO,QAAQ,iBAAiB,IAAI;AACpC,SAAO,QAAQ,eAAe,IAAI;AAClC,SAAO,QAAQ;AACf,SAAO,SAAS;AAChB,SAAO,QAAQ,WAAW,MAAM,OAAO,QAAQ;AAC/C,KAAG,YAAY,OAAO,SAAS,eAAe,MAAM,OAAO,QAAQ,SAAS,GAAG,MAAM,IAAI,SAAS,MAAM,OAAO,OAAO,QAAQ,SAAS;AACvI,KAAG,aAAa,OAAO,SAAS,gBAAgB,MAAM,OAAO,QAAQ,UAAU,GAAG,MAAM,IAAI,SAAS,MAAM,OAAO,OAAO,SAAS,SAAS;AAC3I,SAAO,SAAS,cAAc,MAAM,OAAO,QAAQ,cAAc,IAAM,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,aAAa,WAAW;AACvI,SAAO,SAAS,YAAY,MAAM,OAAO,QAAQ,YAAY,IAAM,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,WAAW,WAAW;AACjI,SAAO,SAAS,aAAa,MAAM,OAAO,QAAQ,aAAa,GAAG,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,YAAY,WAAW;AACjI,SAAO,SAAS,QAAQ,MAAM,OAAO,QAAQ,QAAQ,GAAG,IAAI,GAAG,SAAS,MAAM,OAAO,OAAO,OAAO,SAAS;AAC5G,SAAO,SAAS,cAAc,MAAM,OAAO,QAAQ,cAAc,IAAM,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,aAAa,WAAW;AACvI,SAAO,SAAS,OAAO,MAAM,OAAO,QAAQ,OAAO,GAAG,KAAK,GAAG,SAAS,MAAM,OAAO,OAAO,MAAM,SAAS;AAC1G,SAAO,SAAS,YAAY,MAAM,OAAO,QAAQ,YAAY,GAAG,IAAI,GAAG,SAAS,MAAM,OAAO,OAAO,WAAW,SAAS;AACxH,SAAO,QAAQ,YAAY,MAAM,OAAO,QAAQ;AAChD,SAAO,QAAQ,SAAS,MAAM,OAAO,QAAQ;AAC7C,SAAO,QAAQ,WAAW,MAAM,OAAO,QAAQ;AAC/C,SAAO,QAAQ,cAAc,MAAM,OAAO,QAAQ;AAClD,SAAO,QAAQ,eAAe,MAAM,OAAO,QAAQ;AACnD,SAAO,QAAQ,YAAY,MAAM,OAAO,QAAQ;AAAA;AAGlD;AACE,OAAI;AACJ;AACA,WAAS,eAAe,OAAO,YAAY,kBAAkB,MAAM,kCAAkC,MAAM,GAAG;AAC9G,QAAM,GAAG,IAAI,IAAI,4BAA4B;AAE7C,MAAI,GAAG;AACL,WAAO;AACP,UAAM,MAAM,KAAK;AAAA;AAGnB,MAAI,GAAG;AACL,WAAO;AACP,UAAM,MAAM,OAAO;AAAA;AAErB,SAAO;AACP,WAAS,eAAe,UAAU,MAAM,UAAU;AAClD,WAAS,eAAe,QAAQ,MAAM,UAAU;AAAA;AAGlD,OAAO,SAAS;AAChB,OAAO,WAAW;",
+ "sourcesContent": ["\"use strict\";\n\n// ref: https://github.com/tc39/proposal-global\nvar getGlobal = function () {\n\t// the only reliable means to get the global object is\n\t// `Function('return this')()`\n\t// However, this causes CSP violations in Chrome apps.\n\tif (typeof self !== 'undefined') { return self; }\n\tif (typeof window !== 'undefined') { return window; }\n\tif (typeof global !== 'undefined') { return global; }\n\tthrow new Error('unable to locate global object');\n}\n\nvar global = getGlobal();\n\nmodule.exports = exports = global.fetch;\n\n// Needed for TypeScript and Webpack.\nif (global.fetch) {\n\texports.default = global.fetch.bind(global);\n}\n\nexports.Headers = global.Headers;\nexports.Request = global.Request;\nexports.Response = global.Response;", "/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n", "// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/* */\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\n/** Convenient class for storing tensor-related data. */\nexport class DataStorage {\n constructor(backend, dataMover) {\n this.backend = backend;\n this.dataMover = dataMover;\n this.data = new WeakMap();\n this.dataIdsCount = 0;\n }\n get(dataId) {\n if (!this.data.has(dataId)) {\n this.dataMover.moveData(this.backend, dataId);\n }\n return this.data.get(dataId);\n }\n set(dataId, value) {\n this.dataIdsCount++;\n this.data.set(dataId, value);\n }\n has(dataId) {\n return this.data.has(dataId);\n }\n delete(dataId) {\n this.dataIdsCount--;\n return this.data.delete(dataId);\n }\n numDataIds() {\n return this.dataIdsCount;\n }\n}\n/**\n * The interface that defines the kernels that should be implemented when\n * adding a new backend. New backends don't need to implement every one of the\n * methods, this can be done gradually (throw an error for unimplemented\n * methods).\n */\nexport class KernelBackend {\n time(f) {\n return notYetImplemented('time');\n }\n read(dataId) {\n return notYetImplemented('read');\n }\n readSync(dataId) {\n return notYetImplemented('readSync');\n }\n numDataIds() {\n return notYetImplemented('numDataIds');\n }\n disposeData(dataId) {\n return notYetImplemented('disposeData');\n }\n write(values, shape, dtype) {\n return notYetImplemented('write');\n }\n move(dataId, values, shape, dtype) {\n return notYetImplemented('move');\n }\n memory() {\n return notYetImplemented('memory');\n }\n /** Returns the highest precision for floats in bits (e.g. 16 or 32) */\n floatPrecision() {\n return notYetImplemented('floatPrecision');\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n batchMatMul(a, b, transposeA, transposeB) {\n return notYetImplemented('batchMatMul');\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedBatchMatMul');\n }\n slice(x, begin, size) {\n return notYetImplemented('slice');\n }\n stridedSlice(x, begin, end, strides) {\n return notYetImplemented('stridedSlice');\n }\n unstack(x, axis) {\n return notYetImplemented('unstack');\n }\n reverse(a, axis) {\n return notYetImplemented('reverse');\n }\n concat(tensors, axis) {\n return notYetImplemented('concat');\n }\n neg(a) {\n return notYetImplemented('neg');\n }\n add(a, b) {\n return notYetImplemented('add');\n }\n addN(tensors) {\n return notYetImplemented('addN');\n }\n subtract(a, b) {\n return notYetImplemented('subtract');\n }\n multiply(a, b) {\n return notYetImplemented('multiply');\n }\n realDivide(a, b) {\n return notYetImplemented('realDivide');\n }\n floorDiv(a, b) {\n return notYetImplemented('floorDiv');\n }\n sum(x, axes) {\n return notYetImplemented('sum');\n }\n prod(x, axes) {\n return notYetImplemented('prod');\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n return notYetImplemented('unsortedSegmentSum');\n }\n argMin(x, axis) {\n return notYetImplemented('argMin');\n }\n argMax(x, axis) {\n return notYetImplemented('argMax');\n }\n equal(a, b) {\n return notYetImplemented('equal');\n }\n notEqual(a, b) {\n return notYetImplemented('notEqual');\n }\n less(a, b) {\n return notYetImplemented('less');\n }\n lessEqual(a, b) {\n return notYetImplemented('lessEqual');\n }\n greater(a, b) {\n return notYetImplemented('greater');\n }\n greaterEqual(a, b) {\n return notYetImplemented('greaterEqual');\n }\n logicalNot(a) {\n return notYetImplemented('logicalNot');\n }\n logicalAnd(a, b) {\n return notYetImplemented('logicalAnd');\n }\n logicalOr(a, b) {\n return notYetImplemented('logicalOr');\n }\n where(condition) {\n return notYetImplemented('where');\n }\n select(condition, a, b) {\n return notYetImplemented('select');\n }\n topk(x, k, sorted) {\n return notYetImplemented('topk');\n }\n min(x, axes) {\n return notYetImplemented('min');\n }\n minimum(a, b) {\n return notYetImplemented('minimum');\n }\n mod(a, b) {\n return notYetImplemented('mod');\n }\n max(x, axes) {\n return notYetImplemented('max');\n }\n maximum(a, b) {\n return notYetImplemented('maximum');\n }\n all(x, axes) {\n return notYetImplemented('all');\n }\n any(x, axes) {\n return notYetImplemented('any');\n }\n squaredDifference(a, b) {\n return notYetImplemented('squaredDifference');\n }\n ceil(x) {\n return notYetImplemented('ceil');\n }\n floor(x) {\n return notYetImplemented('floor');\n }\n round(x) {\n return notYetImplemented('round');\n }\n sign(x) {\n return notYetImplemented('sign');\n }\n isNaN(x) {\n return notYetImplemented('isNaN');\n }\n isInf(x) {\n return notYetImplemented('isInf');\n }\n isFinite(x) {\n return notYetImplemented('isFinite');\n }\n pow(a, b) {\n return notYetImplemented('pow');\n }\n exp(x) {\n return notYetImplemented('exp');\n }\n expm1(x) {\n return notYetImplemented('expm1');\n }\n softmax(x, dim) {\n return notYetImplemented('softmax');\n }\n log(x) {\n return notYetImplemented('log');\n }\n log1p(x) {\n return notYetImplemented('log1p');\n }\n sqrt(x) {\n return notYetImplemented('sqrt');\n }\n rsqrt(x) {\n return notYetImplemented('rsqrt');\n }\n square(x) {\n return notYetImplemented('square');\n }\n reciprocal(x) {\n return notYetImplemented('reciprocal');\n }\n relu(x) {\n return notYetImplemented('relu');\n }\n relu6(x) {\n return notYetImplemented('relu6');\n }\n prelu(x, a) {\n return notYetImplemented('prelu');\n }\n elu(x) {\n return notYetImplemented('elu');\n }\n eluDer(dy, y) {\n return notYetImplemented('eluDer');\n }\n selu(x) {\n return notYetImplemented('selu');\n }\n int(x) {\n return notYetImplemented('int');\n }\n clip(x, min, max) {\n return notYetImplemented('clip');\n }\n abs(x) {\n return notYetImplemented('abs');\n }\n complexAbs(x) {\n return notYetImplemented('complexAbs');\n }\n sigmoid(x) {\n return notYetImplemented('sigmoid');\n }\n softplus(x) {\n return notYetImplemented('softplus');\n }\n sin(x) {\n return notYetImplemented('sin');\n }\n cos(x) {\n return notYetImplemented('cos');\n }\n tan(x) {\n return notYetImplemented('tan');\n }\n asin(x) {\n return notYetImplemented('asin');\n }\n acos(x) {\n return notYetImplemented('acos');\n }\n atan(x) {\n return notYetImplemented('atan');\n }\n atan2(a, b) {\n return notYetImplemented('atan2');\n }\n sinh(x) {\n return notYetImplemented('sinh');\n }\n cosh(x) {\n return notYetImplemented('cosh');\n }\n tanh(x) {\n return notYetImplemented('tanh');\n }\n asinh(x) {\n return notYetImplemented('asinh');\n }\n acosh(x) {\n return notYetImplemented('acosh');\n }\n atanh(x) {\n return notYetImplemented('atanh');\n }\n erf(x) {\n return notYetImplemented('erf');\n }\n step(x, alpha) {\n return notYetImplemented('step');\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedConv2d');\n }\n conv2d(x, filter, convInfo) {\n return notYetImplemented('conv2d');\n }\n conv2dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv2dDerInput');\n }\n conv2dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv2dDerFilter');\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedDepthwiseConv2D');\n }\n depthwiseConv2D(input, filter, convInfo) {\n return notYetImplemented('depthwiseConv2D');\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n return notYetImplemented('depthwiseConv2DDerInput');\n }\n depthwiseConv2DDerFilter(x, dY, convInfo) {\n return notYetImplemented('depthwiseConv2DDerFilter');\n }\n conv3d(x, filter, convInfo) {\n return notYetImplemented('conv3d');\n }\n conv3dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv3dDerInput');\n }\n conv3dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv3dDerFilter');\n }\n maxPool(x, convInfo) {\n return notYetImplemented('maxPool');\n }\n maxPoolBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPoolBackprop');\n }\n avgPool(x, convInfo) {\n return notYetImplemented('avgPool');\n }\n avgPoolBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPoolBackprop');\n }\n avgPool3d(x, convInfo) {\n return notYetImplemented('avgPool3d');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPool3dBackprop');\n }\n maxPool3d(x, convInfo) {\n return notYetImplemented('maxPool3d');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPool3dBackprop');\n }\n reshape(x, shape) {\n return notYetImplemented('reshape');\n }\n cast(x, dtype) {\n return notYetImplemented('cast');\n }\n tile(x, reps) {\n return notYetImplemented('tile');\n }\n pad(x, paddings, constantValue) {\n return notYetImplemented('pad');\n }\n transpose(x, perm) {\n return notYetImplemented('transpose');\n }\n gather(x, indices, axis) {\n return notYetImplemented('gather');\n }\n gatherND(x, indices) {\n return notYetImplemented('gatherND');\n }\n scatterND(indices, updates, shape) {\n return notYetImplemented('scatterND');\n }\n batchToSpaceND(x, blockShape, crops) {\n return notYetImplemented('batchToSpaceND');\n }\n spaceToBatchND(x, blockShape, paddings) {\n return notYetImplemented('spaceToBatchND');\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n return notYetImplemented('resizeBilinear');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeBilinearBackprop');\n }\n resizeNearestNeighbor(x, newHEight, newWidth, alignCorners) {\n return notYetImplemented('resizeNearestNeighbor');\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeNearestNeighborBackprop');\n }\n batchNorm(x, mean, variance, offset, scale, varianceEpsilon) {\n return notYetImplemented('batchNorm');\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n return notYetImplemented('localResponseNormalization4D');\n }\n LRNGrad(dy, inputImage, outputImage, radius, bias, alpha, beta) {\n return notYetImplemented('LRNGrad');\n }\n multinomial(logits, normalized, numSamples, seed) {\n return notYetImplemented('multinomial');\n }\n oneHot(indices, depth, onValue, offValue) {\n return notYetImplemented('oneHot');\n }\n cumsum(x, axis, exclusive, reverse) {\n return notYetImplemented('cumsum');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return notYetImplemented('nonMaxSuppression');\n }\n fft(x) {\n return notYetImplemented('fft');\n }\n ifft(x) {\n return notYetImplemented('ifft');\n }\n complex(real, imag) {\n return notYetImplemented('complex');\n }\n real(input) {\n return notYetImplemented('real');\n }\n imag(input) {\n return notYetImplemented('imag');\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n return notYetImplemented('cropAndResize');\n }\n depthToSpace(x, blockSize, dataFormat) {\n return notYetImplemented('depthToSpace');\n }\n // Aligns with the \"SplitV\" kernel in TensorFlow.\n split(value, sizeSplits, axis) {\n return notYetImplemented('split');\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n return notYetImplemented('sparseToDense');\n }\n diag(x) {\n return notYetImplemented('diag');\n }\n fill(shape, value, dtype) {\n return notYetImplemented('fill');\n }\n onesLike(x) {\n return notYetImplemented('onesLike');\n }\n zerosLike(x) {\n return notYetImplemented('zerosLike');\n }\n linspace(start, stop, num) {\n return notYetImplemented('linspace');\n }\n dispose() {\n return notYetImplemented('dispose');\n }\n}\nfunction notYetImplemented(kernelName) {\n throw new Error(`'${kernelName}' not yet implemented or not found in the registry. ` +\n `This kernel may not be supported by the tfjs backend you have chosen`);\n}\n//# sourceMappingURL=backend.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Shuffles the array in-place using Fisher-Yates algorithm.\n *\n * ```js\n * const a = [1, 2, 3, 4, 5];\n * tf.util.shuffle(a);\n * console.log(a);\n * ```\n *\n * @param array The array to shuffle in-place.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\n// tslint:disable-next-line:no-any\nexport function shuffle(array) {\n let counter = array.length;\n let temp = 0;\n let index = 0;\n // While there are elements in the array\n while (counter > 0) {\n // Pick a random index\n index = (Math.random() * counter) | 0;\n // Decrease counter by 1\n counter--;\n // And swap the last element with it\n temp = array[counter];\n array[counter] = array[index];\n array[index] = temp;\n }\n}\n/** Clamps a value to a specified range. */\nexport function clamp(min, x, max) {\n return Math.max(min, Math.min(x, max));\n}\nexport function nearestLargerEven(val) {\n return val % 2 === 0 ? val : val + 1;\n}\nexport function sum(arr) {\n let sum = 0;\n for (let i = 0; i < arr.length; i++) {\n sum += arr[i];\n }\n return sum;\n}\n/**\n * Returns a sample from a uniform [a, b) distribution.\n *\n * @param a The minimum support (inclusive).\n * @param b The maximum support (exclusive).\n * @return A pseudorandom number on the half-open interval [a,b).\n */\nexport function randUniform(a, b) {\n const r = Math.random();\n return (b * r) + (1 - r) * a;\n}\n/** Returns the squared Euclidean distance between two vectors. */\nexport function distSquared(a, b) {\n let result = 0;\n for (let i = 0; i < a.length; i++) {\n const diff = Number(a[i]) - Number(b[i]);\n result += diff * diff;\n }\n return result;\n}\n/**\n * Asserts that the expression is true. Otherwise throws an error with the\n * provided message.\n *\n * ```js\n * const x = 2;\n * tf.util.assert(x === 2, 'x is not 2');\n * ```\n *\n * @param expr The expression to assert (as a boolean).\n * @param msg A function that returns the message to report when throwing an\n * error. We use a function for performance reasons.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function assert(expr, msg) {\n if (!expr) {\n throw new Error(typeof msg === 'string' ? msg : msg());\n }\n}\nexport function assertShapesMatch(shapeA, shapeB, errorMessagePrefix = '') {\n assert(arraysEqual(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function assertNonNull(a) {\n assert(a != null, () => `The input to the tensor constructor must be a non-null value.`);\n}\n// NOTE: We explicitly type out what T extends instead of any so that\n// util.flatten on a nested array of number doesn't try to infer T as a\n// number[][], causing us to explicitly type util.flatten().\n/**\n * Flattens an arbitrarily nested array.\n *\n * ```js\n * const a = [[1, 2], [3, 4], [5, [6, [7]]]];\n * const flat = tf.util.flatten(a);\n * console.log(flat);\n * ```\n *\n * @param arr The nested array to flatten.\n * @param result The destination array which holds the elements.\n * @param skipTypedArray If true, avoids flattening the typed arrays. Defaults\n * to false.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function flatten(arr, result = [], skipTypedArray = false) {\n if (result == null) {\n result = [];\n }\n if (Array.isArray(arr) || isTypedArray(arr) && !skipTypedArray) {\n for (let i = 0; i < arr.length; ++i) {\n flatten(arr[i], result, skipTypedArray);\n }\n }\n else {\n result.push(arr);\n }\n return result;\n}\n/**\n * Returns the size (number of elements) of the tensor given its shape.\n *\n * ```js\n * const shape = [3, 4, 2];\n * const size = tf.util.sizeFromShape(shape);\n * console.log(size);\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function sizeFromShape(shape) {\n if (shape.length === 0) {\n // Scalar.\n return 1;\n }\n let size = shape[0];\n for (let i = 1; i < shape.length; i++) {\n size *= shape[i];\n }\n return size;\n}\nexport function isScalarShape(shape) {\n return shape.length === 0;\n}\nexport function arraysEqual(n1, n2) {\n if (n1 === n2) {\n return true;\n }\n if (n1 == null || n2 == null) {\n return false;\n }\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\nexport function isInt(a) {\n return a % 1 === 0;\n}\nexport function tanh(x) {\n // tslint:disable-next-line:no-any\n if (Math.tanh != null) {\n // tslint:disable-next-line:no-any\n return Math.tanh(x);\n }\n if (x === Infinity) {\n return 1;\n }\n else if (x === -Infinity) {\n return -1;\n }\n else {\n const e2x = Math.exp(2 * x);\n return (e2x - 1) / (e2x + 1);\n }\n}\nexport function sizeToSquarishShape(size) {\n const width = Math.ceil(Math.sqrt(size));\n return [width, Math.ceil(size / width)];\n}\n/**\n * Creates a new array with randomized indicies to a given quantity.\n *\n * ```js\n * const randomTen = tf.util.createShuffledIndices(10);\n * console.log(randomTen);\n * ```\n *\n * @param number Quantity of how many shuffled indicies to create.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function createShuffledIndices(n) {\n const shuffledIndices = new Uint32Array(n);\n for (let i = 0; i < n; ++i) {\n shuffledIndices[i] = i;\n }\n shuffle(shuffledIndices);\n return shuffledIndices;\n}\nexport function rightPad(a, size) {\n if (size <= a.length) {\n return a;\n }\n return a + ' '.repeat(size - a.length);\n}\nexport function repeatedTry(checkFn, delayFn = (counter) => 0, maxCounter) {\n return new Promise((resolve, reject) => {\n let tryCount = 0;\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n tryCount++;\n const nextBackoff = delayFn(tryCount);\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n tryFn();\n });\n}\n/**\n * Given the full size of the array and a shape that may contain -1 as the\n * implicit dimension, returns the inferred shape where -1 is replaced.\n * E.g. For shape=[2, -1, 3] and size=24, it will return [2, 4, 3].\n *\n * @param shape The shape, which may contain -1 in some dimension.\n * @param size The full size (number of elements) of the array.\n * @return The inferred shape where -1 is replaced with the inferred size.\n */\nexport function inferFromImplicitShape(shape, size) {\n let shapeProd = 1;\n let implicitIdx = -1;\n for (let i = 0; i < shape.length; ++i) {\n if (shape[i] >= 0) {\n shapeProd *= shape[i];\n }\n else if (shape[i] === -1) {\n if (implicitIdx !== -1) {\n throw Error(`Shapes can only have 1 implicit size. ` +\n `Found -1 at dim ${implicitIdx} and dim ${i}`);\n }\n implicitIdx = i;\n }\n else if (shape[i] < 0) {\n throw Error(`Shapes can not be < 0. Found ${shape[i]} at dim ${i}`);\n }\n }\n if (implicitIdx === -1) {\n if (size > 0 && size !== shapeProd) {\n throw Error(`Size(${size}) must match the product of shape ${shape}`);\n }\n return shape;\n }\n if (shapeProd === 0) {\n throw Error(`Cannot infer the missing size in [${shape}] when ` +\n `there are 0 elements`);\n }\n if (size % shapeProd !== 0) {\n throw Error(`The implicit shape can't be a fractional number. ` +\n `Got ${size} / ${shapeProd}`);\n }\n const newShape = shape.slice();\n newShape[implicitIdx] = size / shapeProd;\n return newShape;\n}\nexport function parseAxisParam(axis, shape) {\n const rank = shape.length;\n // Normalize input\n axis = axis == null ? shape.map((s, i) => i) : [].concat(axis);\n // Check for valid range\n assert(axis.every(ax => ax >= -rank && ax < rank), () => `All values in axis param must be in range [-${rank}, ${rank}) but ` +\n `got axis ${axis}`);\n // Check for only integers\n assert(axis.every(ax => isInt(ax)), () => `All values in axis param must be integers but ` +\n `got axis ${axis}`);\n // Handle negative axis.\n return axis.map(a => a < 0 ? rank + a : a);\n}\n/** Reduces the shape by removing all dimensions of shape 1. */\nexport function squeezeShape(shape, axis) {\n const newShape = [];\n const keptDims = [];\n const isEmptyArray = axis != null && Array.isArray(axis) && axis.length === 0;\n const axes = (axis == null || isEmptyArray) ?\n null :\n parseAxisParam(axis, shape).sort();\n let j = 0;\n for (let i = 0; i < shape.length; ++i) {\n if (axes != null) {\n if (axes[j] === i && shape[i] !== 1) {\n throw new Error(`Can't squeeze axis ${i} since its dim '${shape[i]}' is not 1`);\n }\n if ((axes[j] == null || axes[j] > i) && shape[i] === 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n if (axes[j] <= i) {\n j++;\n }\n }\n if (shape[i] !== 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n }\n return { newShape, keptDims };\n}\nexport function getTypedArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function getArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else if (dtype === 'string') {\n values = new Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function checkConversionForErrors(vals, dtype) {\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n throw Error(`A tensor of type ${dtype} being uploaded contains ${num}.`);\n }\n }\n}\n/** Returns true if the dtype is valid. */\nexport function isValidDtype(dtype) {\n return dtype === 'bool' || dtype === 'complex64' || dtype === 'float32' ||\n dtype === 'int32' || dtype === 'string';\n}\n/**\n * Returns true if the new type can't encode the old type without loss of\n * precision.\n */\nexport function hasEncodingLoss(oldType, newType) {\n if (newType === 'complex64') {\n return false;\n }\n if (newType === 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'int32' && oldType !== 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'bool' && oldType === 'bool') {\n return false;\n }\n return true;\n}\nexport function isTypedArray(a) {\n return a instanceof Float32Array || a instanceof Int32Array ||\n a instanceof Uint8Array;\n}\nexport function bytesPerElement(dtype) {\n if (dtype === 'float32' || dtype === 'int32') {\n return 4;\n }\n else if (dtype === 'complex64') {\n return 8;\n }\n else if (dtype === 'bool') {\n return 1;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n/**\n * Returns the approximate number of bytes allocated in the string array - 2\n * bytes per character. Computing the exact bytes for a native string in JS is\n * not possible since it depends on the encoding of the html page that serves\n * the website.\n */\nexport function bytesFromStringArray(arr) {\n if (arr == null) {\n return 0;\n }\n let bytes = 0;\n arr.forEach(x => bytes += x.length);\n return bytes;\n}\n/** Returns true if the value is a string. */\nexport function isString(value) {\n return typeof value === 'string' || value instanceof String;\n}\nexport function isBoolean(value) {\n return typeof value === 'boolean';\n}\nexport function isNumber(value) {\n return typeof value === 'number';\n}\nexport function inferDtype(values) {\n if (Array.isArray(values)) {\n return inferDtype(values[0]);\n }\n if (values instanceof Float32Array) {\n return 'float32';\n }\n else if (values instanceof Int32Array || values instanceof Uint8Array) {\n return 'int32';\n }\n else if (isNumber(values)) {\n return 'float32';\n }\n else if (isString(values)) {\n return 'string';\n }\n else if (isBoolean(values)) {\n return 'bool';\n }\n return 'float32';\n}\nexport function isFunction(f) {\n return !!(f && f.constructor && f.call && f.apply);\n}\nexport function nearestDivisor(size, start) {\n for (let i = start; i < size; ++i) {\n if (size % i === 0) {\n return i;\n }\n }\n return size;\n}\nexport function computeStrides(shape) {\n const rank = shape.length;\n if (rank < 2) {\n return [];\n }\n // Last dimension has implicit stride of 1, thus having D-1 (instead of D)\n // strides.\n const strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n return strides;\n}\nfunction createNestedArray(offset, shape, a) {\n const ret = new Array();\n if (shape.length === 1) {\n const d = shape[0];\n for (let i = 0; i < d; i++) {\n ret[i] = a[offset + i];\n }\n }\n else {\n const d = shape[0];\n const rest = shape.slice(1);\n const len = rest.reduce((acc, c) => acc * c);\n for (let i = 0; i < d; i++) {\n ret[i] = createNestedArray(offset + i * len, rest, a);\n }\n }\n return ret;\n}\n// Provide a nested array of TypedArray in given shape.\nexport function toNestedArray(shape, a) {\n if (shape.length === 0) {\n // Scalar type should return a single number.\n return a[0];\n }\n const size = shape.reduce((acc, c) => acc * c);\n if (size === 0) {\n // A tensor with shape zero should be turned into empty list.\n return [];\n }\n if (size !== a.length) {\n throw new Error(`[${shape}] does not match the input size ${a.length}.`);\n }\n return createNestedArray(0, shape, a);\n}\nexport function makeOnesTypedArray(size, dtype) {\n const array = makeZerosTypedArray(size, dtype);\n for (let i = 0; i < array.length; i++) {\n array[i] = 1;\n }\n return array;\n}\nexport function makeZerosTypedArray(size, dtype) {\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(size);\n }\n else if (dtype === 'int32') {\n return new Int32Array(size);\n }\n else if (dtype === 'bool') {\n return new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Make nested `TypedArray` filled with zeros.\n * @param shape The shape information for the nested array.\n * @param dtype dtype of the array element.\n */\nexport function makeZerosNestedTypedArray(shape, dtype) {\n const size = shape.reduce((prev, curr) => prev * curr, 1);\n if (dtype == null || dtype === 'float32') {\n return toNestedArray(shape, new Float32Array(size));\n }\n else if (dtype === 'int32') {\n return toNestedArray(shape, new Int32Array(size));\n }\n else if (dtype === 'bool') {\n return toNestedArray(shape, new Uint8Array(size));\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\nexport function assertNonNegativeIntegerDimensions(shape) {\n shape.forEach(dimSize => {\n assert(Number.isInteger(dimSize) && dimSize >= 0, () => `Tensor must have a shape comprised of positive integers but got ` +\n `shape [${shape}].`);\n });\n}\n/**\n * Computes flat index for a given location (multidimentionsal index) in a\n * Tensor/multidimensional array.\n *\n * @param locs Location in the tensor.\n * @param rank Rank of the tensor.\n * @param strides Tensor strides.\n */\nexport function locToIndex(locs, rank, strides) {\n if (rank === 0) {\n return 0;\n }\n else if (rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += strides[i] * locs[i];\n }\n return index;\n}\n/**\n * Computes the location (multidimensional index) in a tensor/multidimentional\n * array for a given flat index.\n *\n * @param index Index in flat array.\n * @param rank Rank of tensor.\n * @param strides Strides of tensor.\n */\nexport function indexToLoc(index, rank, strides) {\n if (rank === 0) {\n return [];\n }\n else if (rank === 1) {\n return [index];\n }\n const locs = new Array(rank);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / strides[i]);\n index -= locs[i] * strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n}\n/**\n * This method asserts whether an object is a Promise instance.\n * @param object\n */\n// tslint:disable-next-line: no-any\nexport function isPromise(object) {\n // We chose to not use 'obj instanceOf Promise' for two reasons:\n // 1. It only reliably works for es6 Promise, not other Promise\n // implementations.\n // 2. It doesn't work with framework that uses zone.js. zone.js monkey patch\n // the async calls, so it is possible the obj (patched) is comparing to a\n // pre-patched Promise.\n return object && object.then && typeof object.then === 'function';\n}\n//# sourceMappingURL=util_base.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isPromise } from './util_base';\n// Expects flags from URL in the format ?tfjsflags=FLAG1:1,FLAG2:true.\nconst TENSORFLOWJS_FLAGS_PREFIX = 'tfjsflags';\n/**\n * The environment contains evaluated flags as well as the registered platform.\n * This is always used as a global singleton and can be retrieved with\n * `tf.env()`.\n *\n * @doc {heading: 'Environment'}\n */\nexport class Environment {\n // tslint:disable-next-line: no-any\n constructor(global) {\n this.global = global;\n this.flags = {};\n this.flagRegistry = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n setPlatform(platformName, platform) {\n if (this.platform != null) {\n console.warn(`Platform ${this.platformName} has already been set. ` +\n `Overwriting the platform with ${platform}.`);\n }\n this.platformName = platformName;\n this.platform = platform;\n }\n registerFlag(flagName, evaluationFn, setHook) {\n this.flagRegistry[flagName] = { evaluationFn, setHook };\n // Override the flag value from the URL. This has to happen here because the\n // environment is initialized before flags get registered.\n if (this.urlFlags[flagName] != null) {\n const flagValue = this.urlFlags[flagName];\n console.warn(`Setting feature override from URL ${flagName}: ${flagValue}.`);\n this.set(flagName, flagValue);\n }\n }\n async getAsync(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n this.flags[flagName] = await this.evaluateFlag(flagName);\n return this.flags[flagName];\n }\n get(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n const flagValue = this.evaluateFlag(flagName);\n if (isPromise(flagValue)) {\n throw new Error(`Flag ${flagName} cannot be synchronously evaluated. ` +\n `Please use getAsync() instead.`);\n }\n this.flags[flagName] = flagValue;\n return this.flags[flagName];\n }\n getNumber(flagName) {\n return this.get(flagName);\n }\n getBool(flagName) {\n return this.get(flagName);\n }\n getFlags() {\n return this.flags;\n }\n // For backwards compatibility.\n get features() {\n return this.flags;\n }\n set(flagName, value) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot set flag ${flagName} as it has not been registered.`);\n }\n this.flags[flagName] = value;\n if (this.flagRegistry[flagName].setHook != null) {\n this.flagRegistry[flagName].setHook(value);\n }\n }\n evaluateFlag(flagName) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot evaluate flag '${flagName}': no evaluation function found.`);\n }\n return this.flagRegistry[flagName].evaluationFn();\n }\n setFlags(flags) {\n this.flags = Object.assign({}, flags);\n }\n reset() {\n this.flags = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n populateURLFlags() {\n if (typeof this.global === 'undefined' ||\n typeof this.global.location === 'undefined' ||\n typeof this.global.location.search === 'undefined') {\n return;\n }\n const urlParams = getQueryParams(this.global.location.search);\n if (TENSORFLOWJS_FLAGS_PREFIX in urlParams) {\n const keyValues = urlParams[TENSORFLOWJS_FLAGS_PREFIX].split(',');\n keyValues.forEach(keyValue => {\n const [key, value] = keyValue.split(':');\n this.urlFlags[key] = parseValue(key, value);\n });\n }\n }\n}\nexport function getQueryParams(queryString) {\n const params = {};\n queryString.replace(/[?&]([^=?&]+)(?:=([^&]*))?/g, (s, ...t) => {\n decodeParam(params, t[0], t[1]);\n return t.join('=');\n });\n return params;\n}\nfunction decodeParam(params, name, value) {\n params[decodeURIComponent(name)] = decodeURIComponent(value || '');\n}\nfunction parseValue(flagName, value) {\n value = value.toLowerCase();\n if (value === 'true' || value === 'false') {\n return value === 'true';\n }\n else if (`${+value}` === value) {\n return +value;\n }\n throw new Error(`Could not parse value flag value ${value} for flag ${flagName}.`);\n}\n/**\n * Returns the current environment (a global singleton).\n *\n * The environment object contains the evaluated feature values as well as the\n * active platform.\n *\n * @doc {heading: 'Environment'}\n */\nexport function env() {\n return ENV;\n}\nexport let ENV = null;\nexport function setEnvironmentGlobal(environment) {\n ENV = environment;\n}\n//# sourceMappingURL=environment.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Note that the identifier globalNameSpace is scoped to this module, but will\n// always resolve to the same global object regardless of how the module is\n// resolved.\n// tslint:disable-next-line:no-any\nlet globalNameSpace;\n// tslint:disable-next-line:no-any\nexport function getGlobalNamespace() {\n if (globalNameSpace == null) {\n // tslint:disable-next-line:no-any\n let ns;\n if (typeof (window) !== 'undefined') {\n ns = window;\n }\n else if (typeof (global) !== 'undefined') {\n ns = global;\n }\n else if (typeof (process) !== 'undefined') {\n ns = process;\n }\n else if (typeof (self) !== 'undefined') {\n ns = self;\n }\n else {\n throw new Error('Could not find a global object');\n }\n globalNameSpace = ns;\n }\n return globalNameSpace;\n}\n// tslint:disable-next-line:no-any\nfunction getGlobalMap() {\n const ns = getGlobalNamespace();\n if (ns._tfGlobals == null) {\n ns._tfGlobals = new Map();\n }\n return ns._tfGlobals;\n}\n/**\n * Returns a globally accessible 'singleton' object.\n *\n * @param key the name of the object\n * @param init a function to initialize to initialize this object\n * the first time it is fetched.\n */\nexport function getGlobal(key, init) {\n const globalMap = getGlobalMap();\n if (globalMap.has(key)) {\n return globalMap.get(key);\n }\n else {\n const singleton = init();\n globalMap.set(key, singleton);\n return globalMap.get(key);\n }\n}\n//# sourceMappingURL=global_util.js.map", "export const Abs = 'Abs';\nexport const Acos = 'Acos';\nexport const Acosh = 'Acosh';\nexport const Add = 'Add';\nexport const AddN = 'AddN';\nexport const All = 'All';\nexport const Any = 'Any';\nexport const ArgMax = 'ArgMax';\nexport const ArgMin = 'ArgMin';\nexport const Asin = 'Asin';\nexport const Asinh = 'Asinh';\nexport const Atan = 'Atan';\nexport const Atanh = 'Atanh';\nexport const Atan2 = 'Atan2';\nexport const AvgPool = 'AvgPool';\nexport const AvgPoolBackprop = 'AvgPoolBackprop';\nexport const AvgPool3D = 'AvgPool3D';\nexport const AvgPool3DBackprop = 'AvgPool3DBackprop';\nexport const BatchMatMul = 'BatchMatMul';\nexport const BatchToSpaceND = 'BatchToSpaceND';\nexport const BroadcastTo = 'BroadcastTo';\nexport const Cast = 'Cast';\nexport const Ceil = 'Ceil';\nexport const ClipByValue = 'ClipByValue';\nexport const Complex = 'Complex';\nexport const Concat = 'Concat';\nexport const Conv2D = 'Conv2D';\nexport const Conv2DBackpropFilter = 'Conv2DBackpropFilter';\nexport const Conv2DBackpropInput = 'Conv2DBackpropInput';\nexport const Conv3D = 'Conv3D';\nexport const Conv3DBackpropFilterV2 = 'Conv3DBackpropFilterV2';\nexport const Conv3DBackpropInputV2 = 'Conv3DBackpropInputV2';\nexport const Cos = 'Cos';\nexport const Cosh = 'Cosh';\nexport const Cumsum = 'Cumsum';\nexport const CropAndResize = 'CropAndResize';\nexport const DepthToSpace = 'DepthToSpace';\nexport const DepthwiseConv2dNative = 'DepthwiseConv2dNative';\nexport const DepthwiseConv2dNativeBackpropFilter = 'DepthwiseConv2dNativeBackpropFilter';\nexport const DepthwiseConv2dNativeBackpropInput = 'DepthwiseConv2dNativeBackpropInput';\nexport const Diag = 'Diag';\nexport const Dilation2D = 'Dilation2D';\nexport const Dilation2DBackpropInput = 'Dilation2DBackpropInput';\nexport const Dilation2DBackpropFilter = 'Dilation2DBackpropFilter';\nexport const Div = 'Div';\nexport const Elu = 'Elu';\nexport const EluGrad = 'EluGrad';\nexport const Erf = 'Erf';\nexport const Equal = 'Equal';\nexport const Exp = 'Exp';\nexport const Expm1 = 'Expm1';\nexport const FFT = 'FFT';\nexport const Fill = 'Fill';\nexport const FlipLeftRight = 'FlipLeftRight';\nexport const Floor = 'Floor';\nexport const FloorDiv = 'FloorDiv';\nexport const FusedBatchNorm = 'FusedBatchNorm';\nexport const GatherV2 = 'GatherV2';\nexport const GatherNd = 'GatherNd';\nexport const Greater = 'Greater';\nexport const GreaterEqual = 'GreaterEqual';\nexport const Identity = 'Identity';\nexport const IFFT = 'IFFT';\nexport const Imag = 'Imag';\nexport const IsFinite = 'IsFinite';\nexport const IsInf = 'IsInf';\nexport const IsNan = 'IsNan';\nexport const Less = 'Less';\nexport const LessEqual = 'LessEqual';\nexport const LinSpace = 'LinSpace';\nexport const Log = 'Log';\nexport const Log1p = 'Log1p';\nexport const LogicalAnd = 'LogicalAnd';\nexport const LogicalNot = 'LogicalNot';\nexport const LogicalOr = 'LogicalOr';\nexport const LogSoftmax = 'LogSoftmax';\nexport const LRN = 'LRN';\nexport const LRNBackprop = 'LRNBackprop';\nexport const Max = 'Max';\nexport const Maximum = 'Maximum';\nexport const MaxPool = 'MaxPool';\nexport const MaxPoolBackprop = 'MaxPoolBackprop';\nexport const MaxPool3D = 'MaxPool3D';\nexport const MaxPool3DBackprop = 'MaxPool3DBackprop';\nexport const MaxPoolWithArgmax = 'MaxPoolWithArgmax';\nexport const Mean = 'Mean';\nexport const Min = 'Min';\nexport const Minimum = 'Minimum';\nexport const MirrorPad = 'MirrorPad';\nexport const Mod = 'Mod';\nexport const Multiply = 'Multiply';\nexport const Negate = 'Negate';\nexport const NotEqual = 'NotEqual';\nexport const NonMaxSuppressionV3 = 'NonMaxSuppressionV3';\nexport const NonMaxSuppressionV4 = 'NonMaxSuppressionV4';\nexport const NonMaxSuppressionV5 = 'NonMaxSuppressionV5';\nexport const OnesLike = 'OnesLike';\nexport const OneHot = 'OneHot';\nexport const PadV2 = 'PadV2';\nexport const Pool = 'Pool';\nexport const Pow = 'Pow';\nexport const Prelu = 'Prelu';\nexport const Prod = 'Prod';\nexport const Range = 'Range';\nexport const Real = 'Real';\nexport const Reciprocal = 'Reciprocal';\nexport const Relu = 'Relu';\nexport const Reshape = 'Reshape';\nexport const ResizeNearestNeighbor = 'ResizeNearestNeighbor';\nexport const ResizeNearestNeighborGrad = 'ResizeNearestNeighborGrad';\nexport const ResizeBilinear = 'ResizeBilinear';\nexport const ResizeBilinearGrad = 'ResizeBilinearGrad';\nexport const Relu6 = 'Relu6';\nexport const Reverse = 'Reverse';\nexport const Round = 'Round';\nexport const Rsqrt = 'Rsqrt';\nexport const ScatterNd = 'ScatterNd';\nexport const SelectV2 = 'SelectV2';\nexport const Selu = 'Selu';\nexport const Slice = 'Slice';\nexport const Sin = 'Sin';\nexport const Sinh = 'Sinh';\nexport const Sign = 'Sign';\nexport const Sigmoid = 'Sigmoid';\nexport const Softplus = 'Softplus';\nexport const Sqrt = 'Sqrt';\nexport const Sum = 'Sum';\nexport const SpaceToBatchND = 'SpaceToBatchND';\nexport const SplitV = 'SplitV';\nexport const Softmax = 'Softmax';\nexport const SquaredDifference = 'SquaredDifference';\nexport const Square = 'Square';\nexport const Sub = 'Sub';\nexport const SparseToDense = 'SparseToDense';\nexport const StridedSlice = 'StridedSlice';\nexport const Tan = 'Tan';\nexport const Tanh = 'Tanh';\nexport const Tile = 'Tile';\nexport const TopK = 'TopK';\nexport const Transpose = 'Transpose';\nexport const Unique = 'Unique';\nexport const Unpack = 'Unpack';\nexport const UnsortedSegmentSum = 'UnsortedSegmentSum';\nexport const ZerosLike = 'ZerosLike';\n/**\n * TensorFlow.js-only kernels\n */\nexport const Step = 'Step';\nexport const FromPixels = 'FromPixels';\nexport const RotateWithOffset = 'RotateWithOffset';\nexport const _FusedMatMul = '_FusedMatMul';\nexport const FusedConv2D = 'FusedConv2D';\nexport const FusedDepthwiseConv2D = 'FusedDepthwiseConv2D';\n//# sourceMappingURL=kernel_names.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport { getGlobal } from './global_util';\nconst kernelRegistry = getGlobal('kernelRegistry', () => new Map());\nconst gradRegistry = getGlobal('gradRegistry', () => new Map());\n/**\n * Returns the kernel function (code) associated with the provided names.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n */\nexport function getKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n return kernelRegistry.get(key);\n}\n/**\n * Returns the registered gradient info associated with the provided kernel.\n * @param kernelName The official TF kernel name.\n */\nexport function getGradient(kernelName) {\n return gradRegistry.get(kernelName);\n}\nexport function getKernelsForBackend(backendName) {\n const it = kernelRegistry.entries();\n const result = [];\n while (true) {\n const { done, value } = it.next();\n if (done) {\n break;\n }\n const [key, config] = value;\n const [backend,] = key.split('_');\n if (backend === backendName) {\n result.push(config);\n }\n }\n return result;\n}\n/**\n * Registers the function (forward pass) for the kernel in a global registry.\n *\n * @param config A config object with the following properties:\n * - `kernelName` The official name of the kernel.\n * - `backendName` The official name of the backend.\n * - `kernelFunc` The function to run during the forward pass of the kernel.\n * - `setupFunc` Optional. Gets called once, after the backend initializes.\n * - `disposeFunc` Optional. Gets called once, right before the backend is\n * disposed.\n */\nexport function registerKernel(config) {\n const { kernelName, backendName } = config;\n const key = makeKey(kernelName, backendName);\n if (kernelRegistry.has(key)) {\n console.warn(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is already registered`);\n }\n kernelRegistry.set(key, config);\n}\n/**\n * Registers a gradient function for a given kernel in the global registry,\n * to be used during the back-propagation of that kernel.\n *\n * @param config An object with the following properties:\n * - `kernelName` The name of the kernel that the gradient function is for.\n * - `gradFunc` The function to run during back-propagation.\n */\nexport function registerGradient(config) {\n const { kernelName } = config;\n if (gradRegistry.has(kernelName)) {\n // TODO (yassogba) after 3.0 assess whether we need to keep this gated\n // to debug mode.\n if (env().getBool('DEBUG')) {\n console.warn(`Overriding the gradient for '${kernelName}'`);\n }\n }\n gradRegistry.set(kernelName, config);\n}\n/**\n * Removes the kernel function from the registry.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n *\n */\nexport function unregisterKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n if (!kernelRegistry.has(key)) {\n throw new Error(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is not registered`);\n }\n kernelRegistry.delete(key);\n}\n/** Removes the registered gradient from the global registry. */\nexport function unregisterGradient(kernelName) {\n if (!gradRegistry.has(kernelName)) {\n throw new Error(`The gradient '${kernelName}' for backend is not registered`);\n }\n gradRegistry.delete(kernelName);\n}\n/**\n * Finds kernels that have already been registered to a backend and re-registers\n * them for a new backend. Useful for registering custom backends.\n * @param registeredBackendName Already registered backend.\n * @param newBackendName New backend.\n */\nexport function copyRegisteredKernels(registeredBackendName, newBackendName) {\n const kernels = getKernelsForBackend(registeredBackendName);\n kernels.forEach(kernelConfig => {\n const newKernelConfig = Object.assign({}, kernelConfig, { backendName: newBackendName });\n registerKernel(newKernelConfig);\n });\n}\nfunction makeKey(kernelName, backendName) {\n return `${backendName}_${kernelName}`;\n}\n//# sourceMappingURL=kernel_registry.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport * as base from './util_base';\nexport * from './util_base';\n/**\n * Create typed array for scalar value. Used for storing in `DataStorage`.\n */\nexport function createScalarValue(value, dtype) {\n if (dtype === 'string') {\n return encodeString(value);\n }\n return toTypedArray([value], dtype);\n}\nfunction noConversionNeeded(a, dtype) {\n return (a instanceof Float32Array && dtype === 'float32') ||\n (a instanceof Int32Array && dtype === 'int32') ||\n (a instanceof Uint8Array && dtype === 'bool');\n}\nexport function toTypedArray(a, dtype) {\n if (dtype === 'string') {\n throw new Error('Cannot convert a string[] to a TypedArray');\n }\n if (Array.isArray(a)) {\n a = base.flatten(a);\n }\n if (env().getBool('DEBUG')) {\n base.checkConversionForErrors(a, dtype);\n }\n if (noConversionNeeded(a, dtype)) {\n return a;\n }\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(a);\n }\n else if (dtype === 'int32') {\n return new Int32Array(a);\n }\n else if (dtype === 'bool') {\n const bool = new Uint8Array(a.length);\n for (let i = 0; i < bool.length; ++i) {\n if (Math.round(a[i]) !== 0) {\n bool[i] = 1;\n }\n }\n return bool;\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Returns the current high-resolution time in milliseconds relative to an\n * arbitrary time in the past. It works across different platforms (node.js,\n * browsers).\n *\n * ```js\n * console.log(tf.util.now());\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function now() {\n return env().platform.now();\n}\n/**\n * Returns a platform-specific implementation of\n * [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n *\n * If `fetch` is defined on the global object (`window`, `process`, etc.),\n * `tf.util.fetch` returns that function.\n *\n * If not, `tf.util.fetch` returns a platform-specific solution.\n *\n * ```js\n * const resource = await tf.util.fetch('https://unpkg.com/@tensorflow/tfjs');\n * // handle response\n * ```\n *\n * @doc {heading: 'Util'}\n */\nexport function fetch(path, requestInits) {\n return env().platform.fetch(path, requestInits);\n}\n/**\n * Encodes the provided string into bytes using the provided encoding scheme.\n *\n * @param s The string to encode.\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function encodeString(s, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.encode(s, encoding);\n}\n/**\n * Decodes the provided bytes into a string using the provided encoding scheme.\n * @param bytes The bytes to decode.\n *\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function decodeString(bytes, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.decode(bytes, encoding);\n}\n//# sourceMappingURL=util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\nexport class Profiler {\n constructor(backendTimer, logger) {\n this.backendTimer = backendTimer;\n this.logger = logger;\n if (logger == null) {\n this.logger = new Logger();\n }\n }\n profileKernel(kernelName, inputs, f) {\n let outputs;\n const holdResultWrapperFn = () => {\n outputs = f();\n };\n const timer = this.backendTimer.time(holdResultWrapperFn);\n for (let i = 0; i < outputs.length; i++) {\n const output = outputs[i];\n // Dangling promise here because we don't want to propagate up\n // asynchronicity.\n output.data().then(tensorVals => {\n checkComputationForErrors(tensorVals, output.dtype, kernelName);\n });\n }\n const kernelProfile = {\n kernelName,\n outputs,\n inputs,\n timeMs: timer.then(timing => timing.kernelMs),\n extraInfo: timer.then(timing => timing.getExtraProfileInfo != null ?\n timing.getExtraProfileInfo() :\n '')\n };\n return kernelProfile;\n }\n logKernelProfile(kernelProfile) {\n const { kernelName, outputs, timeMs, inputs, extraInfo } = kernelProfile;\n outputs.forEach(result => {\n Promise.all([result.data(), timeMs, extraInfo]).then(valueContainer => {\n this.logger.logKernelProfile(kernelName, result, valueContainer[0], valueContainer[1], inputs, valueContainer[2]);\n });\n });\n }\n}\nexport function checkComputationForErrors(vals, dtype, kernelName) {\n if (dtype !== 'float32') {\n // Only floating point computations will generate NaN values\n return false;\n }\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n // Throwing custom exception so behavior is testable.\n console.warn(`Found ${num} in the result of '${kernelName}'`);\n return true;\n }\n }\n return false;\n}\nexport class Logger {\n logKernelProfile(name, result, vals, timeMs, inputs, extraInfo) {\n const time = typeof timeMs === 'number' ? util.rightPad(`${timeMs}ms`, 9) :\n timeMs['error'];\n const paddedName = util.rightPad(name, 25);\n const rank = result.rank;\n const size = result.size;\n const shape = util.rightPad(result.shape.toString(), 14);\n let inputShapesDescription = '';\n for (const name in inputs) {\n const input = inputs[name];\n if (input != null) {\n // The input might be a non-tensor (e.g HTMLImageElement), in which case\n // we claim the output shape as input shape.\n const inputShape = input.shape || result.shape;\n const inputRank = inputShape.length;\n inputShapesDescription +=\n `${name}: ${inputRank}D ${inputRank > 0 ? inputShape : ''} `;\n }\n }\n console.log(`%c${paddedName}\\t%c${time}\\t%c${rank}D ${shape}\\t%c${size}\\t%c${inputShapesDescription}\\t%c${extraInfo}`, 'font-weight:bold', 'color:red', 'color:blue', 'color: orange', 'color: green', 'color: steelblue');\n }\n}\n//# sourceMappingURL=profiler.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\n/**\n * Computes a list of TapeNodes that connect x to y, filtering everything else\n * out and preserving the order of the original tape elements.\n *\n * @param tape The tape elements to filter.\n * @param xs The input Tensors.\n * @param y The output Tensor.\n */\nexport function getFilteredNodesXToY(tape, xs, y) {\n // Forward pass to compute all the nodes and Tensors that are transitively a\n // function of x.\n const tensorsFromX = {};\n const nodesFromX = {};\n for (let i = 0; i < xs.length; i++) {\n tensorsFromX[xs[i].id] = true;\n }\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n for (const inputName in nodeInputs) {\n const input = nodeInputs[inputName];\n let anyInputFromX = false;\n for (let j = 0; j < xs.length; j++) {\n if (tensorsFromX[input.id]) {\n node.outputs.forEach(output => tensorsFromX[output.id] = true);\n anyInputFromX = true;\n nodesFromX[node.id] = true;\n break;\n }\n }\n if (anyInputFromX) {\n break;\n }\n }\n }\n // Backward pass to find all of the nodes and Tensors that lead to y.\n const tensorsLeadToY = {};\n tensorsLeadToY[y.id] = true;\n const nodesToY = {};\n for (let i = tape.length - 1; i >= 0; i--) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n // If any of the outputs lead to y, mark all of the inputs as leading to y.\n for (let j = 0; j < node.outputs.length; j++) {\n if (tensorsLeadToY[node.outputs[j].id]) {\n for (const inputName in nodeInputs) {\n tensorsLeadToY[nodeInputs[inputName].id] = true;\n nodesToY[node.id] = true;\n }\n break;\n }\n }\n }\n // Return the paths that come from x and lead to y.\n const filteredTape = [];\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n if (nodesFromX[node.id] && nodesToY[node.id]) {\n // Prune the inputs from the node that aren't a function of x.\n const prunedInputs = {};\n for (const inputName in node.inputs) {\n const nodeInput = node.inputs[inputName];\n if (tensorsFromX[nodeInput.id]) {\n prunedInputs[inputName] = nodeInput;\n }\n }\n // Copy the node and overwrite inputsAndArgs to the pruned version.\n const prunedNode = Object.assign({}, node);\n prunedNode.inputs = prunedInputs;\n prunedNode.outputs = node.outputs;\n filteredTape.push(prunedNode);\n }\n }\n return filteredTape;\n}\n/**\n * Backpropagate gradients through the filtered TapeNodes.\n *\n * @param tensorAccumulatedGradientMap A map of Tensor to its gradient. This map\n * is mutated by this method.\n * @param filteredTape The filtered TapeNodes to backprop through.\n */\nexport function backpropagateGradients(tensorAccumulatedGradientMap, filteredTape, tidy, add) {\n // Walk the tape backward and keep a map of Tensor to its gradient.\n for (let i = filteredTape.length - 1; i >= 0; i--) {\n const node = filteredTape[i];\n const dys = [];\n node.outputs.forEach(o => {\n const gradTensor = tensorAccumulatedGradientMap[o.id];\n if (gradTensor != null) {\n dys.push(gradTensor);\n }\n else {\n // This particular output is not in the back-propagation subgraph, so it\n // does not affect the final output, thus we put null for its dy.\n dys.push(null);\n }\n });\n if (node.gradient == null) {\n throw new Error(`Cannot compute gradient: gradient function not found ` +\n `for ${node.kernelName}.`);\n }\n // Backprop dy through this node and accumulate gradients over the inputs.\n const inputGradients = node.gradient(dys);\n for (const inputName in node.inputs) {\n if (!(inputName in inputGradients)) {\n throw new Error(`Cannot backprop through input ${inputName}. ` +\n `Available gradients found: ${Object.keys(inputGradients)}.`);\n }\n // Call the gradient function.\n const dx = tidy(() => inputGradients[inputName]());\n if (dx.dtype !== 'float32') {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `${inputName} must have 'float32' dtype, but has '${dx.dtype}'`);\n }\n const x = node.inputs[inputName];\n if (!util.arraysEqual(dx.shape, x.shape)) {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `'${inputName}' has shape '${dx.shape}', which does not match ` +\n `the shape of the input '${x.shape}'`);\n }\n if (tensorAccumulatedGradientMap[x.id] == null) {\n tensorAccumulatedGradientMap[x.id] = dx;\n }\n else {\n const curGradient = tensorAccumulatedGradientMap[x.id];\n tensorAccumulatedGradientMap[x.id] = add(curGradient, dx);\n curGradient.dispose();\n }\n }\n }\n}\n//# sourceMappingURL=tape.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { computeStrides, isString, rightPad, sizeFromShape } from './util';\n// Maximum number of values before we decide to show ellipsis.\nconst FORMAT_LIMIT_NUM_VALS = 20;\n// Number of first and last values to show when displaying a, b,...,y, z.\nconst FORMAT_NUM_FIRST_LAST_VALS = 3;\n// Number of significant digits to show.\nconst FORMAT_NUM_SIG_DIGITS = 7;\nexport function tensorToString(vals, shape, dtype, verbose) {\n const strides = computeStrides(shape);\n const padPerCol = computeMaxSizePerColumn(vals, shape, dtype, strides);\n const rank = shape.length;\n const valsLines = subTensorToString(vals, shape, dtype, strides, padPerCol);\n const lines = ['Tensor'];\n if (verbose) {\n lines.push(` dtype: ${dtype}`);\n lines.push(` rank: ${rank}`);\n lines.push(` shape: [${shape}]`);\n lines.push(` values:`);\n }\n lines.push(valsLines.map(l => ' ' + l).join('\\n'));\n return lines.join('\\n');\n}\nfunction computeMaxSizePerColumn(vals, shape, dtype, strides) {\n const n = sizeFromShape(shape);\n const numCols = strides[strides.length - 1];\n const padPerCol = new Array(numCols).fill(0);\n const rank = shape.length;\n const valuesOrTuples = dtype === 'complex64' ? createComplexTuples(vals) : vals;\n if (rank > 1) {\n for (let row = 0; row < n / numCols; row++) {\n const offset = row * numCols;\n for (let j = 0; j < numCols; j++) {\n padPerCol[j] = Math.max(padPerCol[j], valToString(valuesOrTuples[offset + j], 0, dtype).length);\n }\n }\n }\n return padPerCol;\n}\nfunction valToString(val, pad, dtype) {\n let valStr;\n if (Array.isArray(val)) {\n valStr = `${parseFloat(val[0].toFixed(FORMAT_NUM_SIG_DIGITS))} + ` +\n `${parseFloat(val[1].toFixed(FORMAT_NUM_SIG_DIGITS))}j`;\n }\n else if (isString(val)) {\n valStr = `'${val}'`;\n }\n else if (dtype === 'bool') {\n valStr = boolNumToString(val);\n }\n else {\n valStr = parseFloat(val.toFixed(FORMAT_NUM_SIG_DIGITS)).toString();\n }\n return rightPad(valStr, pad);\n}\nfunction boolNumToString(v) {\n return v === 0 ? 'false' : 'true';\n}\nfunction subTensorToString(vals, shape, dtype, strides, padPerCol, isLast = true) {\n const storagePerElement = dtype === 'complex64' ? 2 : 1;\n const size = shape[0];\n const rank = shape.length;\n if (rank === 0) {\n if (dtype === 'complex64') {\n const complexTuple = createComplexTuples(vals);\n return [valToString(complexTuple[0], 0, dtype)];\n }\n if (dtype === 'bool') {\n return [boolNumToString(vals[0])];\n }\n return [vals[0].toString()];\n }\n if (rank === 1) {\n if (size > FORMAT_LIMIT_NUM_VALS) {\n const firstValsSize = FORMAT_NUM_FIRST_LAST_VALS * storagePerElement;\n let firstVals = Array.from(vals.slice(0, firstValsSize));\n let lastVals = Array.from(vals.slice((size - FORMAT_NUM_FIRST_LAST_VALS) * storagePerElement, size * storagePerElement));\n if (dtype === 'complex64') {\n firstVals = createComplexTuples(firstVals);\n lastVals = createComplexTuples(lastVals);\n }\n return [\n '[' +\n firstVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ', ..., ' +\n lastVals\n .map((x, i) => valToString(x, padPerCol[size - FORMAT_NUM_FIRST_LAST_VALS + i], dtype))\n .join(', ') +\n ']'\n ];\n }\n const displayVals = dtype === 'complex64' ? createComplexTuples(vals) :\n Array.from(vals);\n return [\n '[' +\n displayVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ']'\n ];\n }\n // The array is rank 2 or more.\n const subshape = shape.slice(1);\n const substrides = strides.slice(1);\n const stride = strides[0] * storagePerElement;\n const lines = [];\n if (size > FORMAT_LIMIT_NUM_VALS) {\n for (let i = 0; i < FORMAT_NUM_FIRST_LAST_VALS; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, false /* isLast */));\n }\n lines.push('...');\n for (let i = size - FORMAT_NUM_FIRST_LAST_VALS; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n else {\n for (let i = 0; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n const sep = rank === 2 ? ',' : '';\n lines[0] = '[' + lines[0] + sep;\n for (let i = 1; i < lines.length - 1; i++) {\n lines[i] = ' ' + lines[i] + sep;\n }\n let newLineSep = ',\\n';\n for (let i = 2; i < rank; i++) {\n newLineSep += '\\n';\n }\n lines[lines.length - 1] =\n ' ' + lines[lines.length - 1] + ']' + (isLast ? '' : newLineSep);\n return lines;\n}\nfunction createComplexTuples(vals) {\n const complexTuples = [];\n for (let i = 0; i < vals.length; i += 2) {\n complexTuples.push([vals[i], vals[i + 1]]);\n }\n return complexTuples;\n}\n//# sourceMappingURL=tensor_format.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensorToString } from './tensor_format';\nimport * as util from './util';\nimport { computeStrides, toNestedArray } from './util';\n/**\n * A mutable object, similar to `tf.Tensor`, that allows users to set values\n * at locations before converting to an immutable `tf.Tensor`.\n *\n * See `tf.buffer` for creating a tensor buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class TensorBuffer {\n constructor(shape, dtype, values) {\n this.dtype = dtype;\n this.shape = shape.slice();\n this.size = util.sizeFromShape(shape);\n if (values != null) {\n const n = values.length;\n util.assert(n === this.size, () => `Length of values '${n}' does not match the size ` +\n `inferred by the shape '${this.size}'.`);\n }\n if (dtype === 'complex64') {\n throw new Error(`complex64 dtype TensorBuffers are not supported. Please create ` +\n `a TensorBuffer for the real and imaginary parts separately and ` +\n `call tf.complex(real, imag).`);\n }\n this.values = values || util.getArrayFromDType(dtype, this.size);\n this.strides = computeStrides(shape);\n }\n /**\n * Sets a value in the buffer at a given location.\n *\n * @param value The value to set.\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n set(value, ...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n util.assert(locs.length === this.rank, () => `The number of provided coordinates (${locs.length}) must ` +\n `match the rank (${this.rank})`);\n const index = this.locToIndex(locs);\n this.values[index] = value;\n }\n /**\n * Returns the value in the buffer at the provided location.\n *\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n get(...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n let i = 0;\n for (const loc of locs) {\n if (loc < 0 || loc >= this.shape[i]) {\n const msg = `Requested out of range element at ${locs}. ` +\n ` Buffer shape=${this.shape}`;\n throw new Error(msg);\n }\n i++;\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return this.values[index];\n }\n locToIndex(locs) {\n if (this.rank === 0) {\n return 0;\n }\n else if (this.rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return index;\n }\n indexToLoc(index) {\n if (this.rank === 0) {\n return [];\n }\n else if (this.rank === 1) {\n return [index];\n }\n const locs = new Array(this.shape.length);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / this.strides[i]);\n index -= locs[i] * this.strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Creates an immutable `tf.Tensor` object from the buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n toTensor() {\n return trackerFn().makeTensor(this.values, this.shape, this.dtype);\n }\n}\n// For tracking tensor creation and disposal.\nlet trackerFn = null;\n// Used by chaining methods to call into ops.\nlet opHandler = null;\n// Used to warn about deprecated methods.\nlet deprecationWarningFn = null;\n// This here so that we can use this method on dev branches and keep the\n// functionality at master.\n// tslint:disable-next-line:no-unused-expression\n[deprecationWarningFn];\n/**\n * An external consumer can register itself as the tensor tracker. This way\n * the Tensor class can notify the tracker for every tensor created and\n * disposed.\n */\nexport function setTensorTracker(fn) {\n trackerFn = fn;\n}\n/**\n * An external consumer can register itself as the op handler. This way the\n * Tensor class can have chaining methods that call into ops via the op\n * handler.\n */\nexport function setOpHandler(handler) {\n opHandler = handler;\n}\n/**\n * Sets the deprecation warning function to be used by this file. This way the\n * Tensor class can be a leaf but still use the environment.\n */\nexport function setDeprecationWarningFn(fn) {\n deprecationWarningFn = fn;\n}\n/**\n * A `tf.Tensor` object represents an immutable, multidimensional array of\n * numbers that has a shape and a data type.\n *\n * See `tf.tensor` for details on how to create a `tf.Tensor`.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Tensor {\n constructor(shape, dtype, dataId, id) {\n /** Whether this tensor has been globally kept. */\n this.kept = false;\n this.isDisposedInternal = false;\n this.shape = shape.slice();\n this.dtype = dtype || 'float32';\n this.size = util.sizeFromShape(shape);\n this.strides = computeStrides(shape);\n this.dataId = dataId;\n this.id = id;\n this.rankType = (this.rank < 5 ? this.rank.toString() : 'higher');\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Returns a promise of `tf.TensorBuffer` that holds the underlying data.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async buffer() {\n const vals = await this.data();\n return opHandler.buffer(this.shape, this.dtype, vals);\n }\n /**\n * Returns a `tf.TensorBuffer` that holds the underlying data.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n bufferSync() {\n return opHandler.buffer(this.shape, this.dtype, this.dataSync());\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * asynchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async array() {\n const vals = await this.data();\n return toNestedArray(this.shape, vals);\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * synchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n arraySync() {\n return toNestedArray(this.shape, this.dataSync());\n }\n /**\n * Asynchronously downloads the values from the `tf.Tensor`. Returns a\n * promise of `TypedArray` that resolves when the computation has finished.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async data() {\n this.throwIfDisposed();\n const data = trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n const bytes = await data;\n try {\n return bytes.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /**\n * Synchronously downloads the values from the `tf.Tensor`. This blocks the\n * UI thread until the values are ready, which can cause performance issues.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dataSync() {\n this.throwIfDisposed();\n const data = trackerFn().readSync(this.dataId);\n if (this.dtype === 'string') {\n try {\n return data.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /** Returns the underlying bytes of the tensor's data. */\n async bytes() {\n this.throwIfDisposed();\n const data = await trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n return data;\n }\n else {\n return new Uint8Array(data.buffer);\n }\n }\n /**\n * Disposes `tf.Tensor` from memory.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dispose() {\n if (this.isDisposed) {\n return;\n }\n trackerFn().disposeTensor(this);\n this.isDisposedInternal = true;\n }\n get isDisposed() {\n return this.isDisposedInternal;\n }\n throwIfDisposed() {\n if (this.isDisposed) {\n throw new Error(`Tensor is disposed.`);\n }\n }\n /**\n * Prints the `tf.Tensor`. See `tf.print` for details.\n *\n * @param verbose Whether to print verbose information about the tensor,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n print(verbose = false) {\n return opHandler.print(this, verbose);\n }\n /**\n * Returns a copy of the tensor. See `tf.clone` for details.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n clone() {\n this.throwIfDisposed();\n return opHandler.clone(this);\n }\n /**\n * Returns a human-readable description of the tensor. Useful for logging.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n toString(verbose = false) {\n const vals = this.dataSync();\n return tensorToString(vals, this.shape, this.dtype, verbose);\n }\n cast(dtype) {\n this.throwIfDisposed();\n return opHandler.cast(this, dtype);\n }\n variable(trainable = true, name, dtype) {\n this.throwIfDisposed();\n return trackerFn().makeVariable(this, trainable, name, dtype);\n }\n}\nObject.defineProperty(Tensor, Symbol.hasInstance, {\n value: (instance) => {\n // Implementation note: we should use properties of the object that will be\n // defined before the constructor body has finished executing (methods).\n // This is because when this code is transpiled by babel, babel will call\n // classCallCheck before the constructor body is run.\n // See https://github.com/tensorflow/tfjs/issues/3384 for backstory.\n return !!instance && instance.data != null && instance.dataSync != null &&\n instance.throwIfDisposed != null;\n }\n});\n/**\n * A mutable `tf.Tensor`, useful for persisting state, e.g. for training.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Variable extends Tensor {\n constructor(initialValue, trainable, name, tensorId) {\n super(initialValue.shape, initialValue.dtype, initialValue.dataId, tensorId);\n this.trainable = trainable;\n this.name = name;\n }\n /**\n * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have\n * the same shape and dtype as the old `tf.Tensor`.\n *\n * @param newValue New tensor to be assigned to this variable.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n assign(newValue) {\n if (newValue.dtype !== this.dtype) {\n throw new Error(`dtype of the new value (${newValue.dtype}) and ` +\n `previous value (${this.dtype}) must match`);\n }\n if (!util.arraysEqual(newValue.shape, this.shape)) {\n throw new Error(`shape of the new value (${newValue.shape}) and ` +\n `previous value (${this.shape}) must match`);\n }\n trackerFn().disposeTensor(this);\n this.dataId = newValue.dataId;\n trackerFn().incRef(this, null /* backend */);\n }\n dispose() {\n trackerFn().disposeVariable(this);\n this.isDisposedInternal = true;\n }\n}\nObject.defineProperty(Variable, Symbol.hasInstance, {\n value: (instance) => {\n return instance instanceof Tensor && instance.assign != null &&\n instance.assign instanceof Function;\n }\n});\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Rank;\n(function (Rank) {\n Rank[\"R0\"] = \"R0\";\n Rank[\"R1\"] = \"R1\";\n Rank[\"R2\"] = \"R2\";\n Rank[\"R3\"] = \"R3\";\n Rank[\"R4\"] = \"R4\";\n Rank[\"R5\"] = \"R5\";\n Rank[\"R6\"] = \"R6\";\n})(Rank || (Rank = {}));\n// Looks for upcasting types. Used, for example, in operations with mixed dtype\n// inputs.\nvar UpcastInt32AndMap;\n(function (UpcastInt32AndMap) {\n UpcastInt32AndMap[\"float32\"] = \"float32\";\n UpcastInt32AndMap[\"int32\"] = \"int32\";\n UpcastInt32AndMap[\"bool\"] = \"int32\";\n UpcastInt32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastInt32AndMap || (UpcastInt32AndMap = {}));\nvar UpcastBoolAndMap;\n(function (UpcastBoolAndMap) {\n UpcastBoolAndMap[\"float32\"] = \"float32\";\n UpcastBoolAndMap[\"int32\"] = \"int32\";\n UpcastBoolAndMap[\"bool\"] = \"bool\";\n UpcastBoolAndMap[\"complex64\"] = \"complex64\";\n})(UpcastBoolAndMap || (UpcastBoolAndMap = {}));\nvar UpcastFloat32AndMap;\n(function (UpcastFloat32AndMap) {\n UpcastFloat32AndMap[\"float32\"] = \"float32\";\n UpcastFloat32AndMap[\"int32\"] = \"float32\";\n UpcastFloat32AndMap[\"bool\"] = \"float32\";\n UpcastFloat32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastFloat32AndMap || (UpcastFloat32AndMap = {}));\nvar UpcastComplex64AndMap;\n(function (UpcastComplex64AndMap) {\n UpcastComplex64AndMap[\"float32\"] = \"complex64\";\n UpcastComplex64AndMap[\"int32\"] = \"complex64\";\n UpcastComplex64AndMap[\"bool\"] = \"complex64\";\n UpcastComplex64AndMap[\"complex64\"] = \"complex64\";\n})(UpcastComplex64AndMap || (UpcastComplex64AndMap = {}));\nconst upcastTypeMap = {\n 'float32': UpcastFloat32AndMap,\n 'int32': UpcastInt32AndMap,\n 'bool': UpcastBoolAndMap,\n 'complex64': UpcastComplex64AndMap\n};\nexport function upcastType(typeA, typeB) {\n if (typeA === 'string' || typeB === 'string') {\n if (typeA === 'string' && typeB === 'string') {\n return 'string';\n }\n throw new Error(`Can not upcast ${typeA} with ${typeB}`);\n }\n return upcastTypeMap[typeA][typeB];\n}\n/** Returns the output type after summation. */\nexport function sumOutType(type) {\n return upcastType(type, 'int32');\n}\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from './tensor';\nimport { upcastType } from './types';\nimport { assert } from './util';\nexport function makeTypesMatch(a, b) {\n if (a.dtype === b.dtype) {\n return [a, b];\n }\n const dtype = upcastType(a.dtype, b.dtype);\n return [a.cast(dtype), b.cast(dtype)];\n}\nexport function assertTypesMatch(a, b) {\n assert(a.dtype === b.dtype, () => `The dtypes of the first(${a.dtype}) and` +\n ` second(${b.dtype}) input must match`);\n}\nexport function isTensorInList(tensor, tensorList) {\n return tensorList.some(x => x.id === tensor.id);\n}\n/**\n * Extracts any `Tensor`s found within the provided object.\n *\n * @param container an object that may be a `Tensor` or may directly contain\n * `Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. In general it\n * is safe to pass any object here, except that `Promise`s are not\n * supported.\n * @returns An array of `Tensors` found within the passed object. If the\n * argument is simply a `Tensor', a list containing that `Tensor` is\n * returned. If the object is not a `Tensor` or does not\n * contain `Tensors`, an empty list is returned.\n */\nexport function getTensorsInContainer(result) {\n const list = [];\n const seen = new Set();\n walkTensorContainer(result, list, seen);\n return list;\n}\nfunction walkTensorContainer(container, list, seen) {\n if (container == null) {\n return;\n }\n if (container instanceof Tensor) {\n list.push(container);\n return;\n }\n if (!isIterable(container)) {\n return;\n }\n // Iteration over keys works also for arrays.\n const iterable = container;\n for (const k in iterable) {\n const val = iterable[k];\n if (!seen.has(val)) {\n seen.add(val);\n walkTensorContainer(val, list, seen);\n }\n }\n}\n// tslint:disable-next-line:no-any\nfunction isIterable(obj) {\n return Array.isArray(obj) || typeof obj === 'object';\n}\n//# sourceMappingURL=tensor_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { KernelBackend } from './backends/backend';\nimport { Environment, setEnvironmentGlobal } from './environment';\nimport { getGlobalNamespace } from './global_util';\nimport { Add, Cast } from './kernel_names';\nimport { getGradient, getKernel, getKernelsForBackend } from './kernel_registry';\nimport { Profiler } from './profiler';\nimport { backpropagateGradients, getFilteredNodesXToY } from './tape';\nimport { setTensorTracker, Tensor, Variable } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\nimport * as util from './util';\nimport { bytesFromStringArray, makeOnesTypedArray, now, sizeFromShape } from './util';\nclass EngineState {\n constructor() {\n // Public since optimizers will use it.\n this.registeredVariables = {};\n this.nextTapeNodeId = 0;\n this.numBytes = 0;\n this.numTensors = 0;\n this.numStringTensors = 0;\n this.numDataBuffers = 0;\n // Number of nested tf.grad() statements when computing higher-order\n // gradients. E.g. `1` for first-order gradients and `2` for second-order\n // gradients. Used to track if the tape should be removed after a backprop.\n this.gradientDepth = 0;\n // Number of nested kernel calls. When kernel depth is greater than 1, we turn\n // off the tape.\n this.kernelDepth = 0;\n this.scopeStack = [];\n /**\n * Keeps track of the number of data moves during a kernel execution. We\n * maintain a stack since kernels can call other kernels, recursively.\n */\n this.numDataMovesStack = [];\n this.nextScopeId = 0;\n this.tensorInfo = new WeakMap();\n this.profiling = false;\n this.activeProfile = { newBytes: 0, newTensors: 0, peakBytes: 0, kernels: [], result: null };\n }\n dispose() {\n for (const variableName in this.registeredVariables) {\n this.registeredVariables[variableName].dispose();\n }\n }\n}\nexport class Engine {\n constructor(ENV) {\n this.ENV = ENV;\n this.registry = {};\n this.registryFactory = {};\n this.pendingBackendInitId = 0;\n this.state = new EngineState();\n }\n async ready() {\n if (this.pendingBackendInit != null) {\n return this.pendingBackendInit.then(() => { });\n }\n if (this.backendInstance != null) {\n return;\n }\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const success = await this.initializeBackend(backendName).success;\n if (success) {\n await this.setBackend(backendName);\n return;\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n get backend() {\n if (this.pendingBackendInit != null) {\n throw new Error(`Backend '${this.backendName}' has not yet been initialized. Make ` +\n `sure to await tf.ready() or await tf.setBackend() before calling ` +\n `other methods`);\n }\n if (this.backendInstance == null) {\n const { name, asyncInit } = this.initializeBackendsAndReturnBest();\n if (asyncInit) {\n throw new Error(`The highest priority backend '${name}' has not yet been ` +\n `initialized. Make sure to await tf.ready() or ` +\n `await tf.setBackend() before calling other methods`);\n }\n this.setBackend(name);\n }\n return this.backendInstance;\n }\n backendNames() {\n return Object.keys(this.registryFactory);\n }\n findBackend(backendName) {\n if (!(backendName in this.registry)) {\n // If the backend hasn't been initialized but we have a registry entry for\n // it, initialize it and return it.\n if (backendName in this.registryFactory) {\n const { asyncInit } = this.initializeBackend(backendName);\n if (asyncInit) {\n // Backend is not ready yet.\n return null;\n }\n }\n else {\n return null;\n }\n }\n return this.registry[backendName];\n }\n findBackendFactory(backendName) {\n if (!(backendName in this.registryFactory)) {\n return null;\n }\n return this.registryFactory[backendName].factory;\n }\n registerBackend(backendName, factory, priority = 1) {\n if (backendName in this.registryFactory) {\n console.warn(`${backendName} backend was already registered. ` +\n `Reusing existing backend factory.`);\n return false;\n }\n this.registryFactory[backendName] = { factory, priority };\n return true;\n }\n async setBackend(backendName) {\n if (this.registryFactory[backendName] == null) {\n throw new Error(`Backend name '${backendName}' not found in registry`);\n }\n this.backendName = backendName;\n if (this.registry[backendName] == null) {\n this.backendInstance = null;\n const { success, asyncInit } = this.initializeBackend(backendName);\n const result = asyncInit ? await success : success;\n if (!result) {\n return false;\n }\n }\n this.backendInstance = this.registry[backendName];\n this.setupRegisteredKernels();\n // Reset the profiler.\n this.profiler = new Profiler(this.backendInstance);\n return true;\n }\n setupRegisteredKernels() {\n const kernels = getKernelsForBackend(this.backendName);\n kernels.forEach(kernel => {\n if (kernel.setupFunc != null) {\n kernel.setupFunc(this.backendInstance);\n }\n });\n }\n disposeRegisteredKernels(backendName) {\n const kernels = getKernelsForBackend(backendName);\n kernels.forEach(kernel => {\n if (kernel.disposeFunc != null) {\n kernel.disposeFunc(this.registry[backendName]);\n }\n });\n }\n /**\n * Initializes a backend by looking up the backend name in the factory\n * registry and calling the factory method. Returns a boolean representing\n * whether the initialization of the backend suceeded. Throws an error if\n * there is no backend in the factory registry.\n */\n initializeBackend(backendName) {\n const registryFactoryEntry = this.registryFactory[backendName];\n if (registryFactoryEntry == null) {\n throw new Error(`Cannot initialize backend ${backendName}, no registration found.`);\n }\n try {\n const backend = registryFactoryEntry.factory();\n /* Test if the factory returns a promise.\n Done in a more liberal way than\n previous 'Promise.resolve(backend)===backend'\n as we needed to account for custom Promise\n implementations (e.g. Angular) */\n if (backend && !(backend instanceof KernelBackend)\n && typeof backend.then === 'function') {\n const promiseId = ++this.pendingBackendInitId;\n const success = backend\n .then(backendInstance => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.registry[backendName] = backendInstance;\n this.pendingBackendInit = null;\n return true;\n })\n .catch(err => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.pendingBackendInit = null;\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return false;\n });\n this.pendingBackendInit = success;\n return { success, asyncInit: true };\n }\n else {\n this.registry[backendName] = backend;\n return { success: true, asyncInit: false };\n }\n }\n catch (err) {\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return { success: false, asyncInit: false };\n }\n }\n removeBackend(backendName) {\n if (!(backendName in this.registryFactory)) {\n throw new Error(`${backendName} backend not found in registry`);\n }\n if (this.backendName === backendName && this.pendingBackendInit != null) {\n // There is a pending promise of the backend we want to remove. Make it\n // obsolete.\n this.pendingBackendInitId++;\n }\n if (backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n delete this.registryFactory[backendName];\n // Unset the backend if it is active.\n if (this.backendName === backendName) {\n this.pendingBackendInit = null;\n this.backendName = null;\n this.backendInstance = null;\n }\n }\n getSortedBackends() {\n if (Object.keys(this.registryFactory).length === 0) {\n throw new Error('No backend found in registry.');\n }\n return Object.keys(this.registryFactory).sort((a, b) => {\n // Highest priority comes first.\n return this.registryFactory[b].priority -\n this.registryFactory[a].priority;\n });\n }\n initializeBackendsAndReturnBest() {\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const { success, asyncInit } = this.initializeBackend(backendName);\n if (asyncInit || success) {\n return { name: backendName, asyncInit };\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n moveData(backend, dataId) {\n const info = this.state.tensorInfo.get(dataId);\n const srcBackend = info.backend;\n const values = this.readSync(dataId);\n // Delete the tensor from the old backend and move it to the new\n // backend.\n srcBackend.disposeData(dataId);\n info.backend = backend;\n backend.move(dataId, values, info.shape, info.dtype);\n if (this.shouldCheckForMemLeaks()) {\n // Track the number of moves during a kernel execution to correctly\n // detect memory leaks.\n this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1]++;\n }\n }\n tidy(nameOrFn, fn) {\n let name = null;\n if (fn == null) {\n // Called with only 1 argument.\n if (typeof nameOrFn !== 'function') {\n throw new Error('Please provide a function to tidy()');\n }\n fn = nameOrFn;\n }\n else {\n // Called with 2 arguments.\n if (typeof nameOrFn !== 'string' && !(nameOrFn instanceof String)) {\n throw new Error('When calling with two arguments, the first argument ' +\n 'to tidy() must be a string');\n }\n if (typeof fn !== 'function') {\n throw new Error('When calling with two arguments, the 2nd argument ' +\n 'to tidy() must be a function');\n }\n name = nameOrFn;\n // TODO(nsthorat,smilkov): Do operation logging and performance\n // profiling.\n }\n let result;\n return this.scopedRun(() => this.startScope(name), () => this.endScope(result), () => {\n result = fn();\n if (result instanceof Promise) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n return result;\n });\n }\n scopedRun(start, end, f) {\n start();\n try {\n const res = f();\n end();\n return res;\n }\n catch (ex) {\n end();\n throw ex;\n }\n }\n nextTensorId() {\n return Engine.nextTensorId++;\n }\n nextVariableId() {\n return Engine.nextVariableId++;\n }\n /**\n * This method is called instead of the public-facing tensor.clone() when\n * saving a tensor for backwards pass. It makes sure to add the clone\n * operation to the tape regardless of being called inside a kernel\n * execution.\n *\n * This method will go away once all kernels are modularized since we won't\n * need to turn off the tape inside runKernel().\n */\n clone(x) {\n const y = this.makeTensorFromDataId(x.dataId, x.shape, x.dtype);\n const inputs = { x };\n const grad = (dy) => ({\n x: () => {\n const dtype = 'float32';\n const gradInputs = { x: dy };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast(dy, dtype), gradInputs, null /* grad */, Cast, attrs);\n }\n });\n const saved = [];\n this.addTapeNode(this.state.activeScope.name, inputs, [y], grad, saved, {});\n return y;\n }\n /**\n * Execute a kernel with the given name and return the output tensor.\n *\n * @param kernelName The name of the kernel to execute.\n * @param inputs A map of input names to tensors.\n * @param attrs A map of attribute names to their values. An attribute is a\n * primitive (non-tensor) input to the kernel.\n * @param inputsToSave A list of tensors, inputs to save for the backprop\n * computation.\n * @param outputsToSave A list of booleans, specifying which output to save\n * for the backprop computation. These are booleans since the output\n * tensors are not visible to the user.\n */\n runKernel(kernelName, inputs, attrs, inputsToSave, outputsToSave) {\n const forwardFunc = null;\n const backwardsFunc = null;\n // Call runKernel as a stop-gap until we modularize all kernels.\n // Once we modularize all kernels, we will remove the existing\n // `runKernelFunc`.\n return this.runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave);\n }\n shouldCheckForMemLeaks() {\n return this.ENV.getBool('IS_TEST');\n }\n checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos) {\n const numDataIdsAfter = this.backend.numDataIds();\n // Count the number of data ids associated with the result of the kernel.\n let numOutputDataIds = 0;\n outInfos.forEach(info => {\n // Complex numbers allocate 3 data ids, one for 'real', one for\n // 'imaginary', and one for the container that holds the former two.\n numOutputDataIds += (info.dtype === 'complex64' ? 3 : 1);\n });\n // Account for the number of moves during kernel execution. A \"data move\"\n // can happen in the middle of a kernel execution, placing a new (key,value)\n // pair in the data storage. Since data moves have net zero effect (we\n // always remove the data from the old backend), we have to cancel them out\n // when detecting memory leaks.\n const numMoves = this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1];\n const dataIdsLeaked = numDataIdsAfter - numDataIdsBefore - numOutputDataIds - numMoves;\n if (dataIdsLeaked > 0) {\n throw new Error(`Backend '${this.backendName}' has an internal memory leak ` +\n `(${dataIdsLeaked} data ids) after running '${kernelName}'`);\n }\n }\n /**\n * @deprecated Use `runKernel` for newly added kernels. Keep using this method\n * only for kernels that are not yet fully modularized.\n */\n runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave) {\n let outputs;\n let saved = [];\n const isTapeOn = this.isTapeOn();\n if (kernelName == null) {\n kernelName =\n this.state.activeScope != null ? this.state.activeScope.name : '';\n }\n const startingBytecount = this.state.numBytes;\n const startingNumTensors = this.state.numTensors;\n if (this.shouldCheckForMemLeaks()) {\n this.state.numDataMovesStack.push(0);\n }\n let kernelFunc;\n const kernel = getKernel(kernelName, this.backendName);\n let out;\n if (kernel != null) {\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = kernel.kernelFunc({ inputs, attrs, backend: this.backend });\n const outInfos = Array.isArray(out) ? out : [out];\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos);\n }\n const outTensors = outInfos.map(({ dataId, shape, dtype }) => this.makeTensorFromDataId(dataId, shape, dtype));\n // Save the inputs and outputs.\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (isTapeOn) {\n let tensorsToSave = this.getTensorsForGradient(kernelName, inputs, outTensors);\n if (tensorsToSave == null) {\n // Fallback for ops that call runKernelFunc and pass in\n // inputsToSave and outputsToSave. Currently this is the set of ops\n // with kernel support in the WASM backend. Once those ops and\n // respective gradients are modularised we can remove this path.\n if (outputsToSave == null) {\n outputsToSave = [];\n }\n const outsToSave = outTensors.filter((_, i) => outputsToSave[i]);\n tensorsToSave = (inputsToSave || []).slice().concat(outsToSave);\n }\n saved = this.saveTensorsForBackwardMode(tensorsToSave);\n }\n return outTensors;\n };\n }\n else {\n const saveFunc = (tensors) => {\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (!isTapeOn) {\n return;\n }\n saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n };\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = this.tidy(() => forwardFunc(this.backend, saveFunc));\n const outs = (Array.isArray(out) ? out : [out]);\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outs);\n }\n return outs;\n };\n }\n // Stop recording to a tape when running a kernel.\n let kernelProfile;\n this.scopedRun(() => this.state.kernelDepth++, () => this.state.kernelDepth--, () => {\n if (!this.ENV.getBool('DEBUG') && !this.state.profiling) {\n outputs = kernelFunc();\n }\n else {\n kernelProfile = this.profiler.profileKernel(kernelName, inputs, () => kernelFunc());\n if (this.ENV.getBool('DEBUG')) {\n this.profiler.logKernelProfile(kernelProfile);\n }\n outputs = kernelProfile.outputs;\n }\n });\n if (isTapeOn) {\n this.addTapeNode(kernelName, inputs, outputs, backwardsFunc, saved, attrs);\n }\n if (this.state.profiling) {\n this.state.activeProfile.kernels.push({\n name: kernelName,\n bytesAdded: this.state.numBytes - startingBytecount,\n totalBytesSnapshot: this.state.numBytes,\n tensorsAdded: this.state.numTensors - startingNumTensors,\n totalTensorsSnapshot: this.state.numTensors,\n inputShapes: Object.keys(inputs).map(key => inputs[key] != null ? inputs[key].shape : null),\n outputShapes: outputs.map(item => item.shape),\n kernelTimeMs: kernelProfile.timeMs,\n extraInfo: kernelProfile.extraInfo\n });\n }\n return (Array.isArray(out) ? outputs : outputs[0]);\n }\n /**\n * Saves tensors used in forward mode for use in backward mode.\n *\n * @param tensors the list of tensors to save.\n */\n saveTensorsForBackwardMode(tensors) {\n const saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n return saved;\n }\n /**\n * Returns a list of tensors to save for a given gradient calculation.\n *\n * Returns undefined if their is no registered gradient for this kernel in the\n * gradient registry.\n *\n * @param kernelName name of kernel to look up gradient for.\n * @param inputs a map of input tensors.\n * @param outputs an array of output tensors from forward mode of kernel.\n */\n getTensorsForGradient(kernelName, inputs, outputs) {\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n const inputsToSave = gradConfig.inputsToSave || [];\n const outputsToSave = gradConfig.outputsToSave || [];\n // If saveAllInputs is true, all inputs will be saved. Otherwise, inputs\n // specified in inputsToSave will be saved.\n let inputTensorsToSave;\n if (gradConfig.saveAllInputs) {\n util.assert(Array.isArray(inputs), () => 'saveAllInputs is true, expected inputs to be an array.');\n inputTensorsToSave = Object.keys(inputs).map((key) => inputs[key]);\n }\n else {\n inputTensorsToSave = inputsToSave.map((inputName) => inputs[inputName]);\n }\n const outputTensorsToSave = outputs.filter((_, i) => outputsToSave[i]);\n return inputTensorsToSave.concat(outputTensorsToSave);\n }\n // TODO(yassogba) throw exception here once all runkernelFunc calls with\n // inputsToSave/outputsToSave are removed\n return null;\n }\n /**\n * Internal method used by public APIs for tensor creation. Makes a new\n * tensor with the provided shape, dtype and values. It always\n * creates a new data id and writes the values to the underlying backend.\n */\n makeTensor(values, shape, dtype, backend) {\n if (values == null) {\n throw new Error('Values passed to engine.makeTensor() are null');\n }\n dtype = dtype || 'float32';\n backend = backend || this.backend;\n let backendVals = values;\n if (dtype === 'string' && util.isString(values[0])) {\n backendVals = values.map(d => util.encodeString(d));\n }\n const dataId = backend.write(backendVals, shape, dtype);\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n // Count bytes for string tensors.\n if (dtype === 'string') {\n const info = this.state.tensorInfo.get(dataId);\n const newBytes = bytesFromStringArray(backendVals);\n this.state.numBytes += newBytes - info.bytes;\n info.bytes = newBytes;\n }\n return t;\n }\n /**\n * Internal method used by backends. Makes a new tensor\n * that is a wrapper around an existing data id. It doesn't create\n * a new data id, only increments the ref count used in memory tracking.\n */\n makeTensorFromDataId(dataId, shape, dtype, backend) {\n dtype = dtype || 'float32';\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n return t;\n }\n makeVariable(initialValue, trainable = true, name, dtype) {\n name = name || this.nextVariableId().toString();\n if (dtype != null && dtype !== initialValue.dtype) {\n initialValue = initialValue.cast(dtype);\n }\n const v = new Variable(initialValue, trainable, name, this.nextTensorId());\n if (this.state.registeredVariables[v.name] != null) {\n throw new Error(`Variable with name ${v.name} was already registered`);\n }\n this.state.registeredVariables[v.name] = v;\n this.incRef(v, this.backend);\n return v;\n }\n incRef(a, backend) {\n const refCount = this.state.tensorInfo.has(a.dataId) ?\n this.state.tensorInfo.get(a.dataId).refCount :\n 0;\n this.state.numTensors++;\n if (a.dtype === 'string') {\n this.state.numStringTensors++;\n }\n if (refCount === 0) {\n this.state.numDataBuffers++;\n // Bytes for complex numbers are counted by their components. Bytes for\n // string tensors are counted when writing values.\n let bytes = 0;\n if (a.dtype !== 'complex64' && a.dtype !== 'string') {\n bytes = a.size * util.bytesPerElement(a.dtype);\n }\n this.state.tensorInfo.set(a.dataId, {\n backend: backend || this.backend,\n dtype: a.dtype,\n shape: a.shape,\n bytes,\n refCount: 0\n });\n this.state.numBytes += bytes;\n }\n this.state.tensorInfo.get(a.dataId).refCount++;\n if (!(a instanceof Variable)) {\n this.track(a);\n }\n }\n disposeTensor(a) {\n if (!this.state.tensorInfo.has(a.dataId)) {\n return;\n }\n this.state.numTensors--;\n if (a.dtype === 'string') {\n this.state.numStringTensors--;\n }\n const info = this.state.tensorInfo.get(a.dataId);\n const refCount = info.refCount;\n if (refCount <= 1) {\n // Don't count bytes for complex numbers as they are counted by their\n // components.\n if (a.dtype !== 'complex64') {\n this.state.numBytes -= info.bytes;\n }\n this.state.numDataBuffers--;\n info.backend.disposeData(a.dataId);\n this.state.tensorInfo.delete(a.dataId);\n }\n else {\n this.state.tensorInfo.get(a.dataId).refCount--;\n }\n // TODO(nsthorat): Construct an error and save the stack trace for\n // debugging when in debug mode. Creating a stack trace is too expensive\n // to do unconditionally.\n }\n disposeVariables() {\n for (const varName in this.state.registeredVariables) {\n const v = this.state.registeredVariables[varName];\n this.disposeVariable(v);\n }\n }\n disposeVariable(v) {\n this.disposeTensor(v);\n if (this.state.registeredVariables[v.name] != null) {\n delete this.state.registeredVariables[v.name];\n }\n }\n memory() {\n const info = this.backend.memory();\n info.numTensors = this.state.numTensors;\n info.numDataBuffers = this.state.numDataBuffers;\n info.numBytes = this.state.numBytes;\n if (this.state.numStringTensors > 0) {\n info.unreliable = true;\n if (info.reasons == null) {\n info.reasons = [];\n }\n info.reasons.push('Memory usage by string tensors is approximate ' +\n '(2 bytes per character)');\n }\n return info;\n }\n async profile(query) {\n this.state.profiling = true;\n const startBytes = this.state.numBytes;\n const startNumTensors = this.state.numTensors;\n this.state.activeProfile.kernels = [];\n this.state.activeProfile.result = await query();\n this.state.profiling = false;\n this.state.activeProfile.peakBytes = Math.max(...this.state.activeProfile.kernels.map(d => d.totalBytesSnapshot));\n this.state.activeProfile.newBytes = this.state.numBytes - startBytes;\n this.state.activeProfile.newTensors =\n this.state.numTensors - startNumTensors;\n for (const kernel of this.state.activeProfile.kernels) {\n kernel.kernelTimeMs = await kernel.kernelTimeMs;\n kernel.extraInfo = await kernel.extraInfo;\n }\n return this.state.activeProfile;\n }\n isTapeOn() {\n return this.state.gradientDepth > 0 && this.state.kernelDepth === 0;\n }\n addTapeNode(kernelName, inputs, outputs, gradientsFunc, saved, attrs) {\n const tapeNode = { id: this.state.nextTapeNodeId++, kernelName, inputs, outputs, saved };\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n gradientsFunc = gradConfig.gradFunc;\n }\n if (gradientsFunc != null) {\n tapeNode.gradient = (dys) => {\n // TODO(smilkov): To optimize back-prop, pass dys that are not used in\n // the backprop graph to the user as null instead of zeros\n dys = dys.map((dy, i) => {\n if (dy == null) {\n const output = outputs[i];\n const vals = util.makeZerosTypedArray(output.size, output.dtype);\n return this.makeTensor(vals, output.shape, output.dtype);\n }\n return dy;\n });\n // Grad functions of ops with single outputs expect a dy, while ops\n // with multiple outputs expect dys (array of dy).\n return gradientsFunc(dys.length > 1 ? dys : dys[0], saved, attrs);\n };\n }\n this.state.activeTape.push(tapeNode);\n }\n keep(result) {\n result.kept = true;\n return result;\n }\n startTape() {\n if (this.state.gradientDepth === 0) {\n this.state.activeTape = [];\n }\n this.state.gradientDepth++;\n }\n endTape() {\n this.state.gradientDepth--;\n }\n /**\n * Start a scope. Use this with endScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n startScope(name) {\n const scopeInfo = {\n track: [],\n name: 'unnamed scope',\n id: this.state.nextScopeId++\n };\n if (name) {\n scopeInfo.name = name;\n }\n this.state.scopeStack.push(scopeInfo);\n this.state.activeScope = scopeInfo;\n }\n /**\n * End a scope. Use this with startScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n endScope(result) {\n const tensorsToTrackInParent = getTensorsInContainer(result);\n const tensorsToTrackInParentSet = new Set(tensorsToTrackInParent.map(t => t.id));\n // Dispose the arrays tracked in this scope.\n for (let i = 0; i < this.state.activeScope.track.length; i++) {\n const tensor = this.state.activeScope.track[i];\n if (!tensor.kept && !tensorsToTrackInParentSet.has(tensor.id)) {\n tensor.dispose();\n }\n }\n const oldScope = this.state.scopeStack.pop();\n this.state.activeScope = this.state.scopeStack.length === 0 ?\n null :\n this.state.scopeStack[this.state.scopeStack.length - 1];\n // Track the current result in the parent scope.\n tensorsToTrackInParent.forEach(tensor => {\n // Only track the tensor if was allocated in the inner scope and is not\n // globally kept.\n if (!tensor.kept && tensor.scopeId === oldScope.id) {\n this.track(tensor);\n }\n });\n }\n /**\n * Returns gradients of `f` with respect to each of the `xs`. The gradients\n * returned are of the same length as `xs`, but some might be null if `f`\n * was not a function of that `x`. It also takes optional dy to multiply the\n * gradient, which defaults to `1`.\n */\n gradients(f, xs, dy, allowNoGradients = false) {\n util.assert(xs.length > 0, () => 'gradients() received an empty list of xs.');\n if (dy != null && dy.dtype !== 'float32') {\n throw new Error(`dy must have 'float32' dtype, but has '${dy.dtype}'`);\n }\n const y = this.scopedRun(() => this.startTape(), () => this.endTape(), () => this.tidy('forward', f));\n util.assert(y instanceof Tensor, () => 'The result y returned by f() must be a tensor.');\n // Filter out the nodes that don't connect x => y.\n const filteredTape = getFilteredNodesXToY(this.state.activeTape, xs, y);\n if (!allowNoGradients && filteredTape.length === 0 && xs.length > 0) {\n throw new Error('Cannot compute gradient of y=f(x) with respect to x. Make sure ' +\n 'that the f you passed encloses all operations that lead from x ' +\n 'to y.');\n }\n return this.tidy('backward', () => {\n const accumulatedGradientMap = {};\n accumulatedGradientMap[y.id] = (dy == null) ? ones(y.shape) : dy;\n // Backprop gradients through the filtered nodes.\n backpropagateGradients(accumulatedGradientMap, filteredTape, \n // Pass the tidy function to avoid circular dep with `tape.ts`.\n f => this.tidy(f), \n // Pass an add function to avoide a circular dep with `tape.ts`.\n add);\n const grads = xs.map(x => accumulatedGradientMap[x.id]);\n if (this.state.gradientDepth === 0) {\n // This means that we are not computing higher-order gradients\n // and can clean up the tape.\n this.state.activeTape.forEach(node => {\n for (const tensor of node.saved) {\n tensor.dispose();\n }\n });\n this.state.activeTape = null;\n }\n return { value: y, grads };\n });\n }\n customGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in customGrad(f) must be a function.');\n return (...inputs) => {\n util.assert(inputs.every(t => t instanceof Tensor), () => 'The args passed in customGrad(f)(x1, x2,...) must all be ' +\n 'tensors');\n let res;\n const inputMap = {};\n inputs.forEach((input, i) => {\n inputMap[i] = input;\n });\n return this.runKernelFunc((_, save) => {\n res = f(...[...inputs, save]);\n util.assert(res.value instanceof Tensor, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.value` is a tensor');\n util.assert(util.isFunction(res.gradFunc), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function.');\n return res.value;\n }, inputMap, (dy, saved) => {\n const gradRes = res.gradFunc(dy, saved);\n const grads = Array.isArray(gradRes) ? gradRes : [gradRes];\n util.assert(grads.length === inputs.length, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'the same number of tensors as inputs passed to f(...).');\n util.assert(grads.every(t => t instanceof Tensor), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'a list of only tensors.');\n const gradMap = {};\n grads.forEach((grad, i) => {\n gradMap[i] = () => grad;\n });\n return gradMap;\n });\n };\n }\n readSync(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.readSync(dataId);\n }\n read(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.read(dataId);\n }\n async time(query) {\n const start = now();\n const timingInfo = await this.backend.time(query);\n timingInfo.wallMs = now() - start;\n return timingInfo;\n }\n /**\n * Tracks a Tensor in the current scope to be automatically cleaned up\n * when the current scope ends, and returns the value.\n *\n * @param result The Tensor to track in the current scope.\n */\n track(result) {\n if (this.state.activeScope != null) {\n result.scopeId = this.state.activeScope.id;\n this.state.activeScope.track.push(result);\n }\n return result;\n }\n get registeredVariables() {\n return this.state.registeredVariables;\n }\n /**\n * Resets the engine state. Removes all backends but does not remove\n * registered backend factories.\n */\n reset() {\n // Make any pending promise obsolete.\n this.pendingBackendInitId++;\n this.state.dispose();\n this.ENV.reset();\n this.state = new EngineState();\n for (const backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n this.backendName = null;\n this.backendInstance = null;\n this.pendingBackendInit = null;\n }\n}\nEngine.nextTensorId = 0;\nEngine.nextVariableId = 0;\nfunction ones(shape) {\n const values = makeOnesTypedArray(sizeFromShape(shape), 'float32');\n return ENGINE.makeTensor(values, shape, 'float32');\n}\nexport function getOrMakeEngine() {\n const ns = getGlobalNamespace();\n if (ns._tfengine == null) {\n const environment = new Environment(ns);\n ns._tfengine = new Engine(environment);\n }\n setEnvironmentGlobal(ns._tfengine.ENV);\n // Tell the current tensor interface that the global engine is responsible\n // for tracking.\n setTensorTracker(() => ns._tfengine);\n return ns._tfengine;\n}\nexport const ENGINE = getOrMakeEngine();\n/**\n * A implementation of the add op for use within engine and tape.\n *\n * This allows us to avoid a circular dependency between add.ts and engine.\n * It is exported to be available in tape tests.\n */\nexport function add(a, b) {\n // We duplicate Add here to avoid a circular dependency with add.ts.\n const inputs = { a, b };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.add(a, b);\n save([a, b]);\n return res;\n }, inputs, null /* gradient */, Add);\n}\n//# sourceMappingURL=engine.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line:no-any\nfunction _isNavigatorDefined() {\n return typeof navigator !== 'undefined' && navigator != null;\n}\nexport function isMobile() {\n if (_isNavigatorDefined()) {\n // tslint:disable-next-line:no-any\n const a = navigator.userAgent || navigator.vendor || window.opera;\n // tslint:disable-next-line:max-line-length\n return /(android|bb\\d+|meego).+mobile|avantgo|bada\\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i\n .test(a) ||\n // tslint:disable-next-line:max-line-length\n /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\\-(n|u)|c55\\/|capi|ccwa|cdm\\-|cell|chtm|cldc|cmd\\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\\-s|devi|dica|dmob|do(c|p)o|ds(12|\\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\\-|_)|g1 u|g560|gene|gf\\-5|g\\-mo|go(\\.w|od)|gr(ad|un)|haie|hcit|hd\\-(m|p|t)|hei\\-|hi(pt|ta)|hp( i|ip)|hs\\-c|ht(c(\\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\\-(20|go|ma)|i230|iac( |\\-|\\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\\/)|klon|kpt |kwc\\-|kyo(c|k)|le(no|xi)|lg( g|\\/(k|l|u)|50|54|\\-[a-w])|libw|lynx|m1\\-w|m3ga|m50\\/|ma(te|ui|xo)|mc(01|21|ca)|m\\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\\-2|po(ck|rt|se)|prox|psio|pt\\-g|qa\\-a|qc(07|12|21|32|60|\\-[2-7]|i\\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\\-|oo|p\\-)|sdk\\/|se(c(\\-|0|1)|47|mc|nd|ri)|sgh\\-|shar|sie(\\-|m)|sk\\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\\-|v\\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\\-|tdg\\-|tel(i|m)|tim\\-|t\\-mo|to(pl|sh)|ts(70|m\\-|m3|m5)|tx\\-9|up(\\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\\-|your|zeto|zte\\-/i\n .test(a.substr(0, 4));\n }\n return false;\n}\nexport function isBrowser() {\n return (typeof window !== 'undefined' && window.document != null) ||\n //@ts-ignore\n (typeof WorkerGlobalScope !== 'undefined');\n}\n//# sourceMappingURL=device_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './engine';\nimport * as device_util from './device_util';\nimport { env } from './environment';\nconst ENV = env();\n/**\n * This file contains environment-related flag registrations.\n */\n/** Whether to enable debug mode. */\nENV.registerFlag('DEBUG', () => false, debugValue => {\n if (debugValue) {\n console.warn('Debugging mode is ON. The output of every math call will ' +\n 'be downloaded to CPU and checked for NaNs. ' +\n 'This significantly impacts performance.');\n }\n});\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_BROWSER', () => device_util.isBrowser());\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_NODE', () => (typeof process !== 'undefined') &&\n (typeof process.versions !== 'undefined') &&\n (typeof process.versions.node !== 'undefined'));\n/** Whether this browser is Chrome. */\nENV.registerFlag('IS_CHROME', () => typeof navigator !== 'undefined' && navigator != null &&\n navigator.userAgent != null && /Chrome/.test(navigator.userAgent) &&\n /Google Inc/.test(navigator.vendor));\n/**\n * True when the environment is \"production\" where we disable safety checks\n * to gain performance.\n */\nENV.registerFlag('PROD', () => false);\n/**\n * Whether to do sanity checks when inferring a shape from user-provided\n * values, used when creating a new tensor.\n */\nENV.registerFlag('TENSORLIKE_CHECK_SHAPE_CONSISTENCY', () => ENV.getBool('DEBUG'));\n/** Whether deprecation warnings are enabled. */\nENV.registerFlag('DEPRECATION_WARNINGS_ENABLED', () => true);\n/** True if running unit tests. */\nENV.registerFlag('IS_TEST', () => false);\n//# sourceMappingURL=flags.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { Tensor } from './tensor';\nimport { assert, flatten, inferDtype, isTypedArray, toTypedArray } from './util';\nexport function inferShape(val, dtype) {\n let firstElem = val;\n if (isTypedArray(val)) {\n return dtype === 'string' ? [] : [val.length];\n }\n if (!Array.isArray(val)) {\n return []; // Scalar.\n }\n const shape = [];\n while (Array.isArray(firstElem) ||\n isTypedArray(firstElem) && dtype !== 'string') {\n shape.push(firstElem.length);\n firstElem = firstElem[0];\n }\n if (Array.isArray(val) &&\n env().getBool('TENSORLIKE_CHECK_SHAPE_CONSISTENCY')) {\n deepAssertShapeConsistency(val, shape, []);\n }\n return shape;\n}\nfunction deepAssertShapeConsistency(val, shape, indices) {\n indices = indices || [];\n if (!(Array.isArray(val)) && !isTypedArray(val)) {\n assert(shape.length === 0, () => `Element arr[${indices.join('][')}] is a primitive, ` +\n `but should be an array/TypedArray of ${shape[0]} elements`);\n return;\n }\n assert(shape.length > 0, () => `Element arr[${indices.join('][')}] should be a primitive, ` +\n `but is an array of ${val.length} elements`);\n assert(val.length === shape[0], () => `Element arr[${indices.join('][')}] should have ${shape[0]} ` +\n `elements, but has ${val.length} elements`);\n const subShape = shape.slice(1);\n for (let i = 0; i < val.length; ++i) {\n deepAssertShapeConsistency(val[i], subShape, indices.concat(i));\n }\n}\nfunction assertDtype(expectedDtype, actualDType, argName, functionName) {\n if (expectedDtype == null) {\n return;\n }\n if (expectedDtype !== 'numeric' && expectedDtype !== actualDType ||\n expectedDtype === 'numeric' && actualDType === 'string') {\n throw new Error(`Argument '${argName}' passed to '${functionName}' must ` +\n `be ${expectedDtype} tensor, but got ${actualDType} tensor`);\n }\n}\nexport function convertToTensor(x, argName, functionName, parseAsDtype = 'numeric') {\n if (x instanceof Tensor) {\n assertDtype(parseAsDtype, x.dtype, argName, functionName);\n return x;\n }\n let inferredDtype = inferDtype(x);\n // If the user expects a bool/int/float, use that info to update the\n // inferredDtype when it is not a string.\n if (inferredDtype !== 'string' &&\n ['bool', 'int32', 'float32'].indexOf(parseAsDtype) >= 0) {\n inferredDtype = parseAsDtype;\n }\n assertDtype(parseAsDtype, inferredDtype, argName, functionName);\n if ((x == null) ||\n (!isTypedArray(x) && !Array.isArray(x) && typeof x !== 'number' &&\n typeof x !== 'boolean' && typeof x !== 'string')) {\n const type = x == null ? 'null' : x.constructor.name;\n throw new Error(`Argument '${argName}' passed to '${functionName}' must be a ` +\n `Tensor or TensorLike, but got '${type}'`);\n }\n const inferredShape = inferShape(x, inferredDtype);\n if (!isTypedArray(x) && !Array.isArray(x)) {\n x = [x];\n }\n const skipTypedArray = true;\n const values = inferredDtype !== 'string' ?\n toTypedArray(x, inferredDtype) :\n flatten(x, [], skipTypedArray);\n return ENGINE.makeTensor(values, inferredShape, inferredDtype);\n}\nexport function convertToTensorArray(arg, argName, functionName, parseAsDtype = 'numeric') {\n if (!Array.isArray(arg)) {\n throw new Error(`Argument ${argName} passed to ${functionName} must be a ` +\n '`Tensor[]` or `TensorLike[]`');\n }\n const tensors = arg;\n return tensors.map((t, i) => convertToTensor(t, `${argName}[${i}]`, functionName), parseAsDtype);\n}\n//# sourceMappingURL=tensor_util_env.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { isPromise } from '../util';\nexport const OP_SCOPE_SUFFIX = '__op';\n/**\n * Used for wrapping functions that perform math operations on\n * Tensors. The function will be wrapped in a named scope that cleans all\n * memory usage after the function is done.\n */\nexport function op(f) {\n const keys = Object.keys(f);\n if (keys.length !== 1) {\n throw new Error(`Please provide an object with a single key ` +\n `(operation name) mapping to a function. Got an object with ` +\n `${keys.length} keys.`);\n }\n let opName = keys[0];\n const fn = f[opName];\n // Strip the underscore from the end of the function name.\n if (opName.endsWith('_')) {\n opName = opName.substring(0, opName.length - 1);\n }\n // add an __op suffix to distinguish ops from kernels in tf.profile\n opName = opName + OP_SCOPE_SUFFIX;\n // tslint:disable-next-line:no-any\n const f2 = (...args) => {\n ENGINE.startScope(opName);\n try {\n const result = fn(...args);\n if (isPromise(result)) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n ENGINE.endScope(result);\n return result;\n }\n catch (ex) {\n ENGINE.endScope(null);\n throw ex;\n }\n };\n Object.defineProperty(f2, 'name', { value: opName, configurable: true });\n // tslint:disable-next-line:no-any\n return f2;\n}\n//# sourceMappingURL=operation.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Complex } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Converts two real numbers to a complex number.\n *\n * Given a tensor `real` representing the real part of a complex number, and a\n * tensor `imag` representing the imaginary part of a complex number, this\n * operation returns complex numbers elementwise of the form [r0, i0, r1, i1],\n * where r represents the real part and i represents the imag part.\n *\n * The input tensors real and imag must have the same shape.\n *\n * ```js\n * const real = tf.tensor1d([2.25, 3.25]);\n * const imag = tf.tensor1d([4.75, 5.75]);\n * const complex = tf.complex(real, imag);\n *\n * complex.print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction complex_(real, imag) {\n const $real = convertToTensor(real, 'real', 'complex');\n const $imag = convertToTensor(imag, 'imag', 'complex');\n util.assertShapesMatch($real.shape, $imag.shape, `real and imag shapes, ${$real.shape} and ${$imag.shape}, ` +\n `must match in call to tf.complex().`);\n const forward = (backend) => {\n return backend.complex($real, $imag);\n };\n const inputs = { real: $real, imag: $imag };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Complex);\n}\nexport const complex = op({ complex_ });\n//# sourceMappingURL=complex.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { assert, assertNonNegativeIntegerDimensions, flatten, inferDtype, isTypedArray, sizeFromShape, toTypedArray } from '../util';\n/** This is shared code across all tensor creation methods. */\nexport function makeTensor(values, shape, inferredShape, dtype) {\n if (dtype == null) {\n dtype = inferDtype(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot construct a complex64 tensor directly. ` +\n `Please use tf.complex(real, imag).`);\n }\n if (!isTypedArray(values) && !Array.isArray(values) &&\n typeof values !== 'number' && typeof values !== 'boolean' &&\n typeof values !== 'string') {\n throw new Error('values passed to tensor(values) must be a number/boolean/string or ' +\n 'an array of numbers/booleans/strings, or a TypedArray');\n }\n if (shape != null) {\n assertNonNegativeIntegerDimensions(shape);\n const providedSize = sizeFromShape(shape);\n const inferredSize = sizeFromShape(inferredShape);\n assert(providedSize === inferredSize, () => `Based on the provided shape, [${shape}], the tensor should have ` +\n `${providedSize} values but has ${inferredSize}`);\n for (let i = 0; i < inferredShape.length; ++i) {\n const inferred = inferredShape[i];\n const flatDimsDontMatch = i === inferredShape.length - 1 ?\n inferred !== sizeFromShape(shape.slice(i)) :\n true;\n assert(inferredShape[i] === shape[i] || !flatDimsDontMatch, () => `Error creating a new Tensor. Inferred shape ` +\n `(${inferredShape}) does not match the provided ` +\n `shape (${shape}). `);\n }\n }\n if (!isTypedArray(values) && !Array.isArray(values)) {\n values = [values];\n }\n shape = shape || inferredShape;\n values = dtype !== 'string' ?\n toTypedArray(values, dtype) :\n flatten(values, [], true);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=tensor_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates a `tf.Tensor` with the provided values, shape and dtype.\n *\n * ```js\n * // Pass an array of values to create a vector.\n * tf.tensor([1, 2, 3, 4]).print();\n * ```\n *\n * ```js\n * // Pass a nested array of values to make a matrix or a higher\n * // dimensional tensor.\n * tf.tensor([[1, 2], [3, 4]]).print();\n * ```\n *\n * ```js\n * // Pass a flat array and specify a shape yourself.\n * tf.tensor([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`. If the values are strings,\n * they will be encoded as utf-8 and kept as `Uint8Array[]`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor(values, shape, dtype) {\n const inferredShape = inferShape(values, dtype);\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/* Type definitions for exporting and importing of models. */\n/**\n * A map from Tensor dtype to number of bytes per element of the Tensor.\n */\nexport const DTYPE_VALUE_SIZE_MAP = {\n 'float32': 4,\n 'float16': 2,\n 'int32': 4,\n 'uint16': 2,\n 'uint8': 1,\n 'bool': 1,\n 'complex64': 8\n};\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../ops/complex';\nimport { tensor } from '../ops/tensor';\nimport { sizeFromShape } from '../util';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/** Number of bytes reserved for the length of the string. (32bit integer). */\nconst NUM_BYTES_STRING_LENGTH = 4;\n/**\n * Encode a map from names to weight values as an ArrayBuffer, along with an\n * `Array` of `WeightsManifestEntry` as specification of the encoded weights.\n *\n * This function does not perform sharding.\n *\n * This function is the reverse of `decodeWeights`.\n *\n * @param tensors A map (\"dict\") from names to tensors.\n * @param group Group to which the weights belong (optional).\n * @returns A `Promise` of\n * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s\n * concatenated.\n * - An `Array` of `WeightManifestEntry`s, carrying information including\n * tensor names, `dtype`s and shapes.\n * @throws Error: on unsupported tensor `dtype`.\n */\nexport async function encodeWeights(tensors, group) {\n // TODO(adarob, cais): Support quantization.\n const specs = [];\n const dataPromises = [];\n const names = Array.isArray(tensors) ?\n tensors.map(tensor => tensor.name) :\n Object.keys(tensors);\n for (let i = 0; i < names.length; ++i) {\n const name = names[i];\n const t = Array.isArray(tensors) ? tensors[i].tensor : tensors[name];\n if (t.dtype !== 'float32' && t.dtype !== 'int32' && t.dtype !== 'bool' &&\n t.dtype !== 'string' && t.dtype !== 'complex64') {\n throw new Error(`Unsupported dtype in weight '${name}': ${t.dtype}`);\n }\n const spec = { name, shape: t.shape, dtype: t.dtype };\n if (t.dtype === 'string') {\n const utf8bytes = new Promise(async (resolve) => {\n const vals = await t.bytes();\n const totalNumBytes = vals.reduce((p, c) => p + c.length, 0) +\n NUM_BYTES_STRING_LENGTH * vals.length;\n const bytes = new Uint8Array(totalNumBytes);\n let offset = 0;\n for (let i = 0; i < vals.length; i++) {\n const val = vals[i];\n const bytesOfLength = new Uint8Array(new Uint32Array([val.length]).buffer);\n bytes.set(bytesOfLength, offset);\n offset += NUM_BYTES_STRING_LENGTH;\n bytes.set(val, offset);\n offset += val.length;\n }\n resolve(bytes);\n });\n dataPromises.push(utf8bytes);\n }\n else {\n dataPromises.push(t.data());\n }\n if (group != null) {\n spec.group = group;\n }\n specs.push(spec);\n }\n const tensorValues = await Promise.all(dataPromises);\n return { data: concatenateTypedArrays(tensorValues), specs };\n}\n/**\n * Decode flat ArrayBuffer as weights.\n *\n * This function does not handle sharding.\n *\n * This function is the reverse of `encodeWeights`.\n *\n * @param buffer A flat ArrayBuffer carrying the binary values of the tensors\n * concatenated in the order specified in `specs`.\n * @param specs Specifications of the names, dtypes and shapes of the tensors\n * whose value are encoded by `buffer`.\n * @return A map from tensor name to tensor value, with the names corresponding\n * to names in `specs`.\n * @throws Error, if any of the tensors has unsupported dtype.\n */\nexport function decodeWeights(buffer, specs) {\n // TODO(adarob, cais): Support quantization.\n const out = {};\n let float16Decode;\n let offset = 0;\n for (const spec of specs) {\n const name = spec.name;\n const dtype = spec.dtype;\n const shape = spec.shape;\n const size = sizeFromShape(shape);\n let values;\n if ('quantization' in spec) {\n const quantization = spec.quantization;\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n if (!('min' in quantization && 'scale' in quantization)) {\n throw new Error(`Weight ${spec.name} with quantization ${quantization.dtype} ` +\n `doesn't have corresponding metadata min and scale.`);\n }\n }\n else if (quantization.dtype === 'float16') {\n if (dtype !== 'float32') {\n throw new Error(`Weight ${spec.name} is quantized with ${quantization.dtype} ` +\n `which only supports weights of type float32 not ${dtype}.`);\n }\n }\n else {\n throw new Error(`Weight ${spec.name} has unknown ` +\n `quantization dtype ${quantization.dtype}. ` +\n `Supported quantization dtypes are: ` +\n `'uint8', 'uint16', and 'float16'.`);\n }\n const quantizationSizeFactor = DTYPE_VALUE_SIZE_MAP[quantization.dtype];\n const byteBuffer = buffer.slice(offset, offset + size * quantizationSizeFactor);\n const quantizedArray = (quantization.dtype === 'uint8') ?\n new Uint8Array(byteBuffer) :\n new Uint16Array(byteBuffer);\n if (dtype === 'float32') {\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n values = new Float32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = v * quantization.scale + quantization.min;\n }\n }\n else if (quantization.dtype === 'float16') {\n if (float16Decode === undefined) {\n float16Decode = getFloat16Decoder();\n }\n values = float16Decode(quantizedArray);\n }\n else {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type float32.`);\n }\n }\n else if (dtype === 'int32') {\n if (quantization.dtype !== 'uint8' && quantization.dtype !== 'uint16') {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type int32.`);\n }\n values = new Int32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = Math.round(v * quantization.scale + quantization.min);\n }\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * quantizationSizeFactor;\n }\n else if (dtype === 'string') {\n const size = sizeFromShape(spec.shape);\n values = [];\n for (let i = 0; i < size; i++) {\n const byteLength = new Uint32Array(buffer.slice(offset, offset + NUM_BYTES_STRING_LENGTH))[0];\n offset += NUM_BYTES_STRING_LENGTH;\n const bytes = new Uint8Array(buffer.slice(offset, offset + byteLength));\n values.push(bytes);\n offset += byteLength;\n }\n }\n else {\n const dtypeFactor = DTYPE_VALUE_SIZE_MAP[dtype];\n const byteBuffer = buffer.slice(offset, offset + size * dtypeFactor);\n if (dtype === 'float32') {\n values = new Float32Array(byteBuffer);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(byteBuffer);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(byteBuffer);\n }\n else if (dtype === 'complex64') {\n values = new Float32Array(byteBuffer);\n const real = new Float32Array(values.length / 2);\n const image = new Float32Array(values.length / 2);\n for (let i = 0; i < real.length; i++) {\n real[i] = values[i * 2];\n image[i] = values[i * 2 + 1];\n }\n const realTensor = tensor(real, shape, 'float32');\n const imageTensor = tensor(image, shape, 'float32');\n out[name] = complex(realTensor, imageTensor);\n realTensor.dispose();\n imageTensor.dispose();\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * dtypeFactor;\n }\n if (dtype !== 'complex64') {\n out[name] = tensor(values, shape, dtype);\n }\n }\n return out;\n}\n/**\n * Concatenate TypedArrays into an ArrayBuffer.\n */\nexport function concatenateTypedArrays(xs) {\n // TODO(adarob, cais): Support quantization.\n if (xs === null) {\n throw new Error(`Invalid input value: ${JSON.stringify(xs)}`);\n }\n let totalByteLength = 0;\n // `normalizedXs` is here for this reason: a `TypedArray`'s `buffer'\n // can have a different byte length from that of the `TypedArray` itself,\n // for example, when the `TypedArray` is created from an offset in an\n // `ArrayBuffer`. `normliazedXs` holds `TypedArray`s whose `buffer`s match\n // the `TypedArray` in byte length. If an element of `xs` does not show\n // this property, a new `TypedArray` that satisfy this property will be\n // constructed and pushed into `normalizedXs`.\n const normalizedXs = [];\n xs.forEach((x) => {\n totalByteLength += x.byteLength;\n // tslint:disable:no-any\n normalizedXs.push(x.byteLength === x.buffer.byteLength ? x :\n new x.constructor(x));\n if (!(x instanceof Float32Array || x instanceof Int32Array ||\n x instanceof Uint8Array)) {\n throw new Error(`Unsupported TypedArray subtype: ${x.constructor.name}`);\n }\n // tslint:enable:no-any\n });\n const y = new Uint8Array(totalByteLength);\n let offset = 0;\n normalizedXs.forEach((x) => {\n y.set(new Uint8Array(x.buffer), offset);\n offset += x.byteLength;\n });\n return y.buffer;\n}\n// Use Buffer on Node.js instead of Blob/atob/btoa\nconst useNodeBuffer = typeof Buffer !== 'undefined' &&\n (typeof Blob === 'undefined' || typeof atob === 'undefined' ||\n typeof btoa === 'undefined');\n/**\n * Calculate the byte length of a JavaScript string.\n *\n * Note that a JavaScript string can contain wide characters, therefore the\n * length of the string is not necessarily equal to the byte length.\n *\n * @param str Input string.\n * @returns Byte length.\n */\nexport function stringByteLength(str) {\n if (useNodeBuffer) {\n return Buffer.byteLength(str);\n }\n return new Blob([str]).size;\n}\n/**\n * Encode an ArrayBuffer as a base64 encoded string.\n *\n * @param buffer `ArrayBuffer` to be converted.\n * @returns A string that base64-encodes `buffer`.\n */\nexport function arrayBufferToBase64String(buffer) {\n if (useNodeBuffer) {\n return Buffer.from(buffer).toString('base64');\n }\n const buf = new Uint8Array(buffer);\n let s = '';\n for (let i = 0, l = buf.length; i < l; i++) {\n s += String.fromCharCode(buf[i]);\n }\n return btoa(s);\n}\n/**\n * Decode a base64 string as an ArrayBuffer.\n *\n * @param str Base64 string.\n * @returns Decoded `ArrayBuffer`.\n */\nexport function base64StringToArrayBuffer(str) {\n if (useNodeBuffer) {\n const buf = Buffer.from(str, 'base64');\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n }\n const s = atob(str);\n const buffer = new Uint8Array(s.length);\n for (let i = 0; i < s.length; ++i) {\n buffer.set([s.charCodeAt(i)], i);\n }\n return buffer.buffer;\n}\n/**\n * Concatenate a number of ArrayBuffers into one.\n *\n * @param buffers A number of array buffers to concatenate.\n * @returns Result of concatenating `buffers` in order.\n */\nexport function concatenateArrayBuffers(buffers) {\n if (buffers.length === 1) {\n return buffers[0];\n }\n let totalByteLength = 0;\n buffers.forEach((buffer) => {\n totalByteLength += buffer.byteLength;\n });\n const temp = new Uint8Array(totalByteLength);\n let offset = 0;\n buffers.forEach((buffer) => {\n temp.set(new Uint8Array(buffer), offset);\n offset += buffer.byteLength;\n });\n return temp.buffer;\n}\n/**\n * Get the basename of a path.\n *\n * Behaves in a way analogous to Linux's basename command.\n *\n * @param path\n */\nexport function basename(path) {\n const SEPARATOR = '/';\n path = path.trim();\n while (path.endsWith(SEPARATOR)) {\n path = path.slice(0, path.length - 1);\n }\n const items = path.split(SEPARATOR);\n return items[items.length - 1];\n}\n/**\n * Populate ModelArtifactsInfo fields for a model with JSON topology.\n * @param modelArtifacts\n * @returns A ModelArtifactsInfo object.\n */\nexport function getModelArtifactsInfoForJSON(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('Expected JSON model topology, received ArrayBuffer.');\n }\n return {\n dateSaved: new Date(),\n modelTopologyType: 'JSON',\n modelTopologyBytes: modelArtifacts.modelTopology == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.modelTopology)),\n weightSpecsBytes: modelArtifacts.weightSpecs == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.weightSpecs)),\n weightDataBytes: modelArtifacts.weightData == null ?\n 0 :\n modelArtifacts.weightData.byteLength,\n };\n}\n/**\n * Computes mantisa table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 2048 mantissa lookup values.\n */\nfunction computeFloat16MantisaTable() {\n const convertMantissa = (i) => {\n let m = i << 13;\n let e = 0;\n while ((m & 0x00800000) === 0) {\n e -= 0x00800000;\n m <<= 1;\n }\n m &= ~0x00800000;\n e += 0x38800000;\n return m | e;\n };\n const mantisaTable = new Uint32Array(2048);\n mantisaTable[0] = 0;\n for (let i = 1; i < 1024; i++) {\n mantisaTable[i] = convertMantissa(i);\n }\n for (let i = 1024; i < 2048; i++) {\n mantisaTable[i] = 0x38000000 + ((i - 1024) << 13);\n }\n return mantisaTable;\n}\n/**\n * Computes exponent table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 64 exponent lookup values.\n */\nfunction computeFloat16ExponentTable() {\n const exponentTable = new Uint32Array(64);\n exponentTable[0] = 0;\n exponentTable[31] = 0x47800000;\n exponentTable[32] = 0x80000000;\n exponentTable[63] = 0xc7800000;\n for (let i = 1; i < 31; i++) {\n exponentTable[i] = i << 23;\n }\n for (let i = 33; i < 63; i++) {\n exponentTable[i] = 0x80000000 + ((i - 32) << 23);\n }\n return exponentTable;\n}\n/**\n * Computes offset table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 6d offset values.\n */\nfunction computeFloat16OffsetTable() {\n const offsetTable = new Uint32Array(64);\n for (let i = 0; i < 64; i++) {\n offsetTable[i] = 1024;\n }\n offsetTable[0] = offsetTable[32] = 0;\n return offsetTable;\n}\n/**\n * Retrieve a Float16 decoder which will decode a ByteArray of Float16 values\n * to a Float32Array.\n *\n * @returns Function (buffer: Uint16Array) => Float32Array which decodes\n * the Uint16Array of Float16 bytes to a Float32Array.\n */\nexport function getFloat16Decoder() {\n // Algorithm is based off of\n // http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n // Cache lookup tables\n const mantisaTable = computeFloat16MantisaTable();\n const exponentTable = computeFloat16ExponentTable();\n const offsetTable = computeFloat16OffsetTable();\n return (quantizedArray) => {\n const buffer = new ArrayBuffer(4 * quantizedArray.length);\n const bufferUint32View = new Uint32Array(buffer);\n for (let index = 0; index < quantizedArray.length; index++) {\n const float16Bits = quantizedArray[index];\n const float32Bits = mantisaTable[offsetTable[float16Bits >> 10] + (float16Bits & 0x3ff)] +\n exponentTable[float16Bits >> 10];\n bufferUint32View[index] = float32Bits;\n }\n return new Float32Array(buffer);\n };\n}\n//# sourceMappingURL=io_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class IORouterRegistry {\n constructor() {\n this.saveRouters = [];\n this.loadRouters = [];\n }\n static getInstance() {\n if (IORouterRegistry.instance == null) {\n IORouterRegistry.instance = new IORouterRegistry();\n }\n return IORouterRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerSaveRouter(saveRouter) {\n IORouterRegistry.getInstance().saveRouters.push(saveRouter);\n }\n /**\n * Register a load-handler router.\n *\n * @param loadRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `load` method defined or `null`.\n */\n static registerLoadRouter(loadRouter) {\n IORouterRegistry.getInstance().loadRouters.push(loadRouter);\n }\n /**\n * Look up IOHandler for saving, given a URL-like string.\n *\n * @param url\n * @returns If only one match is found, an instance of IOHandler with the\n * `save` method defined. If no match is found, `null`.\n * @throws Error, if more than one match is found.\n */\n static getSaveHandlers(url) {\n return IORouterRegistry.getHandlers(url, 'save');\n }\n /**\n * Look up IOHandler for loading, given a URL-like string.\n *\n * @param url\n * @param loadOptions Optional, custom load options.\n * @returns All valid handlers for `url`, given the currently registered\n * handler routers.\n */\n static getLoadHandlers(url, loadOptions) {\n return IORouterRegistry.getHandlers(url, 'load', loadOptions);\n }\n static getHandlers(url, handlerType, loadOptions) {\n const validHandlers = [];\n const routers = handlerType === 'load' ?\n IORouterRegistry.getInstance().loadRouters :\n IORouterRegistry.getInstance().saveRouters;\n routers.forEach(router => {\n const handler = router(url, loadOptions);\n if (handler !== null) {\n validHandlers.push(handler);\n }\n });\n return validHandlers;\n }\n}\nexport const registerSaveRouter = (loudRouter) => IORouterRegistry.registerSaveRouter(loudRouter);\nexport const registerLoadRouter = (loudRouter) => IORouterRegistry.registerLoadRouter(loudRouter);\nexport const getSaveHandlers = (url) => IORouterRegistry.getSaveHandlers(url);\nexport const getLoadHandlers = (url, loadOptions) => IORouterRegistry.getLoadHandlers(url, loadOptions);\n//# sourceMappingURL=router_registry.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DATABASE_NAME = 'tensorflowjs';\nconst DATABASE_VERSION = 1;\n// Model data and ModelArtifactsInfo (metadata) are stored in two separate\n// stores for efficient access of the list of stored models and their metadata.\n// 1. The object store for model data: topology, weights and weight manifests.\nconst MODEL_STORE_NAME = 'models_store';\n// 2. The object store for ModelArtifactsInfo, including meta-information such\n// as the type of topology (JSON vs binary), byte size of the topology, byte\n// size of the weights, etc.\nconst INFO_STORE_NAME = 'model_info_store';\n/**\n * Delete the entire database for tensorflow.js, including the models store.\n */\nexport async function deleteDatabase() {\n const idbFactory = getIndexedDBFactory();\n return new Promise((resolve, reject) => {\n const deleteRequest = idbFactory.deleteDatabase(DATABASE_NAME);\n deleteRequest.onsuccess = () => resolve();\n deleteRequest.onerror = error => reject(error);\n });\n}\nfunction getIndexedDBFactory() {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Add more info about what IOHandler subtypes are available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('Failed to obtain IndexedDB factory because the current environment' +\n 'is not a web browser.');\n }\n // tslint:disable-next-line:no-any\n const theWindow = typeof window === 'undefined' ? self : window;\n const factory = theWindow.indexedDB || theWindow.mozIndexedDB ||\n theWindow.webkitIndexedDB || theWindow.msIndexedDB ||\n theWindow.shimIndexedDB;\n if (factory == null) {\n throw new Error('The current browser does not appear to support IndexedDB.');\n }\n return factory;\n}\nfunction setUpDatabase(openRequest) {\n const db = openRequest.result;\n db.createObjectStore(MODEL_STORE_NAME, { keyPath: 'modelPath' });\n db.createObjectStore(INFO_STORE_NAME, { keyPath: 'modelPath' });\n}\n/**\n * IOHandler subclass: Browser IndexedDB.\n *\n * See the doc string of `browserIndexedDB` for more details.\n */\nexport class BrowserIndexedDB {\n constructor(modelPath) {\n this.indexedDB = getIndexedDBFactory();\n if (modelPath == null || !modelPath) {\n throw new Error('For IndexedDB, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n }\n async save(modelArtifacts) {\n // TODO(cais): Support saving GraphDef models.\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n return this.databaseAction(this.modelPath, modelArtifacts);\n }\n async load() {\n return this.databaseAction(this.modelPath);\n }\n /**\n * Perform database action to put model artifacts into or read model artifacts\n * from IndexedDB object store.\n *\n * Whether the action is put or get depends on whether `modelArtifacts` is\n * specified. If it is specified, the action will be put; otherwise the action\n * will be get.\n *\n * @param modelPath A unique string path for the model.\n * @param modelArtifacts If specified, it will be the model artifacts to be\n * stored in IndexedDB.\n * @returns A `Promise` of `SaveResult`, if the action is put, or a `Promise`\n * of `ModelArtifacts`, if the action is get.\n */\n databaseAction(modelPath, modelArtifacts) {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n if (modelArtifacts == null) {\n // Read model out from object store.\n const modelTx = db.transaction(MODEL_STORE_NAME, 'readonly');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const getRequest = modelStore.get(this.modelPath);\n getRequest.onsuccess = () => {\n if (getRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${this.modelPath}' ` +\n `in IndexedDB.`));\n }\n else {\n resolve(getRequest.result.modelArtifacts);\n }\n };\n getRequest.onerror = error => {\n db.close();\n return reject(getRequest.error);\n };\n modelTx.oncomplete = () => db.close();\n }\n else {\n // Put model into object store.\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n // First, put ModelArtifactsInfo into info store.\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n let infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const putInfoRequest = infoStore.put({ modelPath: this.modelPath, modelArtifactsInfo });\n let modelTx;\n putInfoRequest.onsuccess = () => {\n // Second, put model data into model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const putModelRequest = modelStore.put({\n modelPath: this.modelPath,\n modelArtifacts,\n modelArtifactsInfo\n });\n putModelRequest.onsuccess = () => resolve({ modelArtifactsInfo });\n putModelRequest.onerror = error => {\n // If the put-model request fails, roll back the info entry as\n // well.\n infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const deleteInfoRequest = infoStore.delete(this.modelPath);\n deleteInfoRequest.onsuccess = () => {\n db.close();\n return reject(putModelRequest.error);\n };\n deleteInfoRequest.onerror = error => {\n db.close();\n return reject(putModelRequest.error);\n };\n };\n };\n putInfoRequest.onerror = error => {\n db.close();\n return reject(putInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n }\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\nBrowserIndexedDB.URL_SCHEME = 'indexeddb://';\nexport const indexedDBRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserIndexedDB.URL_SCHEME)) {\n return browserIndexedDB(url.slice(BrowserIndexedDB.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(indexedDBRouter);\nIORouterRegistry.registerLoadRouter(indexedDBRouter);\n/**\n * Creates a browser IndexedDB IOHandler for saving and loading models.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save('indexeddb://MyModel'));\n * console.log(saveResult);\n * ```\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `BrowserIndexedDB` (sublcass of `IOHandler`),\n * which can be used with, e.g., `tf.Model.save`.\n */\nexport function browserIndexedDB(modelPath) {\n return new BrowserIndexedDB(modelPath);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserIndexedDB.URL_SCHEME) ?\n key.slice(BrowserIndexedDB.URL_SCHEME.length) :\n key;\n}\nexport class BrowserIndexedDBManager {\n constructor() {\n this.indexedDB = getIndexedDBFactory();\n }\n async listModels() {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const tx = db.transaction(INFO_STORE_NAME, 'readonly');\n const store = tx.objectStore(INFO_STORE_NAME);\n // tslint:disable:max-line-length\n // Need to cast `store` as `any` here because TypeScript's DOM\n // library does not have the `getAll()` method even though the\n // method is supported in the latest version of most mainstream\n // browsers:\n // https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/getAll\n // tslint:enable:max-line-length\n // tslint:disable-next-line:no-any\n const getAllInfoRequest = store.getAll();\n getAllInfoRequest.onsuccess = () => {\n const out = {};\n for (const item of getAllInfoRequest.result) {\n out[item.modelPath] = item.modelArtifactsInfo;\n }\n resolve(out);\n };\n getAllInfoRequest.onerror = error => {\n db.close();\n return reject(getAllInfoRequest.error);\n };\n tx.oncomplete = () => db.close();\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n const infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const getInfoRequest = infoStore.get(path);\n let modelTx;\n getInfoRequest.onsuccess = () => {\n if (getInfoRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${path}' ` +\n `in IndexedDB.`));\n }\n else {\n // First, delete the entry in the info store.\n const deleteInfoRequest = infoStore.delete(path);\n const deleteModelData = () => {\n // Second, delete the entry in the model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const deleteModelRequest = modelStore.delete(path);\n deleteModelRequest.onsuccess = () => resolve(getInfoRequest.result.modelArtifactsInfo);\n deleteModelRequest.onerror = error => reject(getInfoRequest.error);\n };\n // Proceed with deleting model data regardless of whether deletion\n // of info data succeeds or not.\n deleteInfoRequest.onsuccess = deleteModelData;\n deleteInfoRequest.onerror = error => {\n deleteModelData();\n db.close();\n return reject(getInfoRequest.error);\n };\n }\n };\n getInfoRequest.onerror = error => {\n db.close();\n return reject(getInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\n//# sourceMappingURL=indexed_db.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { arrayBufferToBase64String, base64StringToArrayBuffer, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst PATH_SEPARATOR = '/';\nconst PATH_PREFIX = 'tensorflowjs_models';\nconst INFO_SUFFIX = 'info';\nconst MODEL_TOPOLOGY_SUFFIX = 'model_topology';\nconst WEIGHT_SPECS_SUFFIX = 'weight_specs';\nconst WEIGHT_DATA_SUFFIX = 'weight_data';\nconst MODEL_METADATA_SUFFIX = 'model_metadata';\n/**\n * Purge all tensorflow.js-saved model artifacts from local storage.\n *\n * @returns Paths of the models purged.\n */\nexport function purgeLocalStorageArtifacts() {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n throw new Error('purgeLocalStorageModels() cannot proceed because local storage is ' +\n 'unavailable in the current environment.');\n }\n const LS = window.localStorage;\n const purgedModelPaths = [];\n for (let i = 0; i < LS.length; ++i) {\n const key = LS.key(i);\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n if (key.startsWith(prefix) && key.length > prefix.length) {\n LS.removeItem(key);\n const modelName = getModelPathFromKey(key);\n if (purgedModelPaths.indexOf(modelName) === -1) {\n purgedModelPaths.push(modelName);\n }\n }\n }\n return purgedModelPaths;\n}\nfunction getModelKeys(path) {\n return {\n info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR),\n topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR),\n weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR),\n weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR),\n modelMetadata: [PATH_PREFIX, path, MODEL_METADATA_SUFFIX].join(PATH_SEPARATOR)\n };\n}\n/**\n * Get model path from a local-storage key.\n *\n * E.g., 'tensorflowjs_models/my/model/1/info' --> 'my/model/1'\n *\n * @param key\n */\nfunction getModelPathFromKey(key) {\n const items = key.split(PATH_SEPARATOR);\n if (items.length < 3) {\n throw new Error(`Invalid key format: ${key}`);\n }\n return items.slice(1, items.length - 1).join(PATH_SEPARATOR);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserLocalStorage.URL_SCHEME) ?\n key.slice(BrowserLocalStorage.URL_SCHEME.length) :\n key;\n}\n/**\n * IOHandler subclass: Browser Local Storage.\n *\n * See the doc string to `browserLocalStorage` for more details.\n */\nexport class BrowserLocalStorage {\n constructor(modelPath) {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n // TODO(cais): Add more info about what IOHandler subtypes are\n // available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('The current environment does not support local storage.');\n }\n this.LS = window.localStorage;\n if (modelPath == null || !modelPath) {\n throw new Error('For local storage, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n this.keys = getModelKeys(this.modelPath);\n }\n /**\n * Save model artifacts to browser local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @param modelArtifacts The model artifacts to be stored.\n * @returns An instance of SaveResult.\n */\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const topology = JSON.stringify(modelArtifacts.modelTopology);\n const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs);\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n try {\n this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo));\n this.LS.setItem(this.keys.topology, topology);\n this.LS.setItem(this.keys.weightSpecs, weightSpecs);\n this.LS.setItem(this.keys.weightData, arrayBufferToBase64String(modelArtifacts.weightData));\n this.LS.setItem(this.keys.modelMetadata, JSON.stringify({\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata\n }));\n return { modelArtifactsInfo };\n }\n catch (err) {\n // If saving failed, clean up all items saved so far.\n this.LS.removeItem(this.keys.info);\n this.LS.removeItem(this.keys.topology);\n this.LS.removeItem(this.keys.weightSpecs);\n this.LS.removeItem(this.keys.weightData);\n this.LS.removeItem(this.keys.modelMetadata);\n throw new Error(`Failed to save model '${this.modelPath}' to local storage: ` +\n `size quota being exceeded is a possible cause of this failure: ` +\n `modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, ` +\n `weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, ` +\n `weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`);\n }\n }\n }\n /**\n * Load a model from local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @returns The loaded model (if loading succeeds).\n */\n async load() {\n const info = JSON.parse(this.LS.getItem(this.keys.info));\n if (info == null) {\n throw new Error(`In local storage, there is no model with name '${this.modelPath}'`);\n }\n if (info.modelTopologyType !== 'JSON') {\n throw new Error('BrowserLocalStorage does not support loading non-JSON model ' +\n 'topology yet.');\n }\n const out = {};\n // Load topology.\n const topology = JSON.parse(this.LS.getItem(this.keys.topology));\n if (topology == null) {\n throw new Error(`In local storage, the topology of model '${this.modelPath}' ` +\n `is missing.`);\n }\n out.modelTopology = topology;\n // Load weight specs.\n const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs));\n if (weightSpecs == null) {\n throw new Error(`In local storage, the weight specs of model '${this.modelPath}' ` +\n `are missing.`);\n }\n out.weightSpecs = weightSpecs;\n // Load meta-data fields.\n const metadataString = this.LS.getItem(this.keys.modelMetadata);\n if (metadataString != null) {\n const metadata = JSON.parse(metadataString);\n out.format = metadata['format'];\n out.generatedBy = metadata['generatedBy'];\n out.convertedBy = metadata['convertedBy'];\n out.userDefinedMetadata = metadata['userDefinedMetadata'];\n }\n // Load weight data.\n const weightDataBase64 = this.LS.getItem(this.keys.weightData);\n if (weightDataBase64 == null) {\n throw new Error(`In local storage, the binary weight values of model ` +\n `'${this.modelPath}' are missing.`);\n }\n out.weightData = base64StringToArrayBuffer(weightDataBase64);\n return out;\n }\n}\nBrowserLocalStorage.URL_SCHEME = 'localstorage://';\nexport const localStorageRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserLocalStorage.URL_SCHEME)) {\n return browserLocalStorage(url.slice(BrowserLocalStorage.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(localStorageRouter);\nIORouterRegistry.registerLoadRouter(localStorageRouter);\n/**\n * Factory function for local storage IOHandler.\n *\n * This `IOHandler` supports both `save` and `load`.\n *\n * For each model's saved artifacts, four items are saved to local storage.\n * - `${PATH_SEPARATOR}/${modelPath}/info`: Contains meta-info about the\n * model, such as date saved, type of the topology, size in bytes, etc.\n * - `${PATH_SEPARATOR}/${modelPath}/topology`: Model topology. For Keras-\n * style models, this is a stringized JSON.\n * - `${PATH_SEPARATOR}/${modelPath}/weight_specs`: Weight specs of the\n * model, can be used to decode the saved binary weight values (see\n * item below).\n * - `${PATH_SEPARATOR}/${modelPath}/weight_data`: Concatenated binary\n * weight values, stored as a base64-encoded string.\n *\n * Saving may throw an `Error` if the total size of the artifacts exceed the\n * browser-specific quota.\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `IOHandler`, which can be used with, e.g.,\n * `tf.Model.save`.\n */\nexport function browserLocalStorage(modelPath) {\n return new BrowserLocalStorage(modelPath);\n}\nexport class BrowserLocalStorageManager {\n constructor() {\n assert(env().getBool('IS_BROWSER'), () => 'Current environment is not a web browser');\n assert(typeof window === 'undefined' ||\n typeof window.localStorage !== 'undefined', () => 'Current browser does not appear to support localStorage');\n this.LS = window.localStorage;\n }\n async listModels() {\n const out = {};\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n const suffix = PATH_SEPARATOR + INFO_SUFFIX;\n for (let i = 0; i < this.LS.length; ++i) {\n const key = this.LS.key(i);\n if (key.startsWith(prefix) && key.endsWith(suffix)) {\n const modelPath = getModelPathFromKey(key);\n out[modelPath] = JSON.parse(this.LS.getItem(key));\n }\n }\n return out;\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n const keys = getModelKeys(path);\n if (this.LS.getItem(keys.info) == null) {\n throw new Error(`Cannot find model at path '${path}'`);\n }\n const info = JSON.parse(this.LS.getItem(keys.info));\n this.LS.removeItem(keys.info);\n this.LS.removeItem(keys.topology);\n this.LS.removeItem(keys.weightSpecs);\n this.LS.removeItem(keys.weightData);\n return info;\n }\n}\n//# sourceMappingURL=local_storage.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Classes and functions for model management across multiple storage mediums.\n *\n * Supported client actions:\n * - Listing models on all registered storage mediums.\n * - Remove model by URL from any registered storage mediums, by using URL\n * string.\n * - Moving or copying model from one path to another in the same medium or from\n * one medium to another, by using URL strings.\n */\nimport { assert } from '../util';\nimport { IORouterRegistry } from './router_registry';\nconst URL_SCHEME_SUFFIX = '://';\nexport class ModelStoreManagerRegistry {\n constructor() {\n this.managers = {};\n }\n static getInstance() {\n if (ModelStoreManagerRegistry.instance == null) {\n ModelStoreManagerRegistry.instance = new ModelStoreManagerRegistry();\n }\n return ModelStoreManagerRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerManager(scheme, manager) {\n assert(scheme != null, () => 'scheme must not be undefined or null.');\n if (scheme.endsWith(URL_SCHEME_SUFFIX)) {\n scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX));\n }\n assert(scheme.length > 0, () => 'scheme must not be an empty string.');\n const registry = ModelStoreManagerRegistry.getInstance();\n assert(registry.managers[scheme] == null, () => `A model store manager is already registered for scheme '${scheme}'.`);\n registry.managers[scheme] = manager;\n }\n static getManager(scheme) {\n const manager = this.getInstance().managers[scheme];\n if (manager == null) {\n throw new Error(`Cannot find model manager for scheme '${scheme}'`);\n }\n return manager;\n }\n static getSchemes() {\n return Object.keys(this.getInstance().managers);\n }\n}\n/**\n * Helper method for parsing a URL string into a scheme and a path.\n *\n * @param url E.g., 'localstorage://my-model'\n * @returns A dictionary with two fields: scheme and path.\n * Scheme: e.g., 'localstorage' in the example above.\n * Path: e.g., 'my-model' in the example above.\n */\nfunction parseURL(url) {\n if (url.indexOf(URL_SCHEME_SUFFIX) === -1) {\n throw new Error(`The url string provided does not contain a scheme. ` +\n `Supported schemes are: ` +\n `${ModelStoreManagerRegistry.getSchemes().join(',')}`);\n }\n return {\n scheme: url.split(URL_SCHEME_SUFFIX)[0],\n path: url.split(URL_SCHEME_SUFFIX)[1],\n };\n}\nasync function cloneModelInternal(sourceURL, destURL, deleteSource = false) {\n assert(sourceURL !== destURL, () => `Old path and new path are the same: '${sourceURL}'`);\n const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL);\n assert(loadHandlers.length > 0, () => `Copying failed because no load handler is found for source URL ${sourceURL}.`);\n assert(loadHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `load handlers for source URL ${sourceURL}.`);\n const loadHandler = loadHandlers[0];\n const saveHandlers = IORouterRegistry.getSaveHandlers(destURL);\n assert(saveHandlers.length > 0, () => `Copying failed because no save handler is found for destination ` +\n `URL ${destURL}.`);\n assert(saveHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `save handlers for destination URL ${destURL}.`);\n const saveHandler = saveHandlers[0];\n const sourceScheme = parseURL(sourceURL).scheme;\n const sourcePath = parseURL(sourceURL).path;\n const sameMedium = sourceScheme === parseURL(sourceURL).scheme;\n const modelArtifacts = await loadHandler.load();\n // If moving within the same storage medium, remove the old model as soon as\n // the loading is done. Without doing this, it is possible that the combined\n // size of the two models will cause the cloning to fail.\n if (deleteSource && sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n const saveResult = await saveHandler.save(modelArtifacts);\n // If moving between mediums, the deletion is done after the save succeeds.\n // This guards against the case in which saving to the destination medium\n // fails.\n if (deleteSource && !sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n return saveResult.modelArtifactsInfo;\n}\n/**\n * List all models stored in registered storage mediums.\n *\n * For a web browser environment, the registered mediums are Local Storage and\n * IndexedDB.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @returns A `Promise` of a dictionary mapping URLs of existing models to\n * their model artifacts info. URLs include medium-specific schemes, e.g.,\n * 'indexeddb://my/model/1'. Model artifacts info include type of the\n * model's topology, byte sizes of the topology, weights, etc.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function listModels() {\n const schemes = ModelStoreManagerRegistry.getSchemes();\n const out = {};\n for (const scheme of schemes) {\n const schemeOut = await ModelStoreManagerRegistry.getManager(scheme).listModels();\n for (const path in schemeOut) {\n const url = scheme + URL_SCHEME_SUFFIX + path;\n out[url] = schemeOut[path];\n }\n }\n return out;\n}\n/**\n * Remove a model specified by URL from a reigstered storage medium.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @param url A URL to a stored model, with a scheme prefix, e.g.,\n * 'localstorage://my-model-1', 'indexeddb://my/model/2'.\n * @returns ModelArtifactsInfo of the deleted model (if and only if deletion\n * is successful).\n * @throws Error if deletion fails, e.g., if no model exists at `path`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function removeModel(url) {\n const schemeAndPath = parseURL(url);\n const manager = ModelStoreManagerRegistry.getManager(schemeAndPath.scheme);\n return manager.removeModel(schemeAndPath.path);\n}\n/**\n * Copy a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Copying within a storage medium, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Copying between two storage mediums, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Copy the model, from Local Storage to IndexedDB.\n * await tf.io.copyModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove both models.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of copying.\n * @param destURL Destination URL of copying.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function copyModel(sourceURL, destURL) {\n const deleteSource = false;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\n/**\n * Move a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Moving within a storage medium, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Moving between two storage mediums, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Move the model, from Local Storage to IndexedDB.\n * await tf.io.moveModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove the moved model.\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of moving.\n * @param destURL Destination URL of moving.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function moveModel(sourceURL, destURL) {\n const deleteSource = true;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\nexport { moveModel, copyModel, removeModel, listModels };\n//# sourceMappingURL=model_management.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { BrowserIndexedDB, BrowserIndexedDBManager } from '../io/indexed_db';\nimport { BrowserLocalStorage, BrowserLocalStorageManager } from '../io/local_storage';\nimport { ModelStoreManagerRegistry } from '../io/model_management';\nexport class PlatformBrowser {\n fetch(path, init) {\n return fetch(path, init);\n }\n now() {\n return performance.now();\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);\n }\n if (this.textEncoder == null) {\n this.textEncoder = new TextEncoder();\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n return new TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_BROWSER')) {\n env().setPlatform('browser', new PlatformBrowser());\n // Register LocalStorage IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserLocalStorage.URL_SCHEME, new BrowserLocalStorageManager());\n }\n catch (err) {\n }\n // Register IndexedDB IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager());\n }\n catch (err) {\n }\n}\n//# sourceMappingURL=platform_browser.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\n// We are wrapping this within an object so it can be stubbed by Jasmine.\nexport const getNodeFetch = {\n // tslint:disable-next-line:no-require-imports\n importFetch: () => require('node-fetch')\n};\nlet systemFetch;\n// These getters and setters are for testing so we don't export a mutable\n// variable.\nexport function resetSystemFetch() {\n systemFetch = null;\n}\nexport function setSystemFetch(fetchFn) {\n systemFetch = fetchFn;\n}\nexport function getSystemFetch() {\n return systemFetch;\n}\nexport class PlatformNode {\n constructor() {\n // tslint:disable-next-line:no-require-imports\n this.util = require('util');\n // According to the spec, the built-in encoder can do only UTF-8 encoding.\n // https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder/TextEncoder\n this.textEncoder = new this.util.TextEncoder();\n }\n fetch(path, requestInits) {\n if (env().global.fetch != null) {\n return env().global.fetch(path, requestInits);\n }\n if (systemFetch == null) {\n systemFetch = getNodeFetch.importFetch();\n }\n return systemFetch(path, requestInits);\n }\n now() {\n const time = process.hrtime();\n return time[0] * 1000 + time[1] / 1000000;\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Node built-in encoder only supports utf-8, but got ${encoding}`);\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n if (bytes.length === 0) {\n return '';\n }\n return new this.util.TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_NODE')) {\n env().setPlatform('node', new PlatformNode());\n}\n//# sourceMappingURL=platform_node.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport * as util from '../util';\n/**\n * Creates an empty `tf.TensorBuffer` with the specified `shape` and `dtype`.\n *\n * The values are stored in CPU as `TypedArray`. Fill the buffer using\n * `buffer.set()`, or by modifying directly `buffer.values`.\n *\n * When done, call `buffer.toTensor()` to get an immutable `tf.Tensor` with\n * those values.\n *\n * ```js\n * // Create a buffer and set values at particular indices.\n * const buffer = tf.buffer([2, 2]);\n * buffer.set(3, 0, 0);\n * buffer.set(5, 1, 0);\n *\n * // Convert the buffer back to a tensor.\n * buffer.toTensor().print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The dtype of the buffer. Defaults to 'float32'.\n * @param values The values of the buffer as `TypedArray`. Defaults to\n * zeros.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function buffer(shape, dtype = 'float32', values) {\n dtype = dtype || 'float32';\n util.assertNonNegativeIntegerDimensions(shape);\n return new TensorBuffer(shape, dtype, values);\n}\n//# sourceMappingURL=buffer.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cast } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Casts a `tf.Tensor` to a new dtype.\n *\n * ```js\n * const x = tf.tensor1d([1.5, 2.5, 3]);\n * tf.cast(x, 'int32').print();\n * ```\n * @param x The input tensor to be casted.\n * @param dtype The dtype to cast the input tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction cast_(x, dtype) {\n const $x = convertToTensor(x, 'x', 'cast');\n // Sanity checks.\n if (!util.isValidDtype(dtype)) {\n throw new Error(`Failed to cast to unknown dtype ${dtype}`);\n }\n if (dtype === 'string' && $x.dtype !== 'string' ||\n dtype !== 'string' && $x.dtype === 'string') {\n throw new Error('Only strings can be casted to strings');\n }\n const inputs = { x: $x };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast($x, dtype), inputs, null /* grad */, Cast, attrs);\n}\nexport const cast = op({ cast_ });\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Identity } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a new tensor with the same values and shape as the specified\n * tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n *\n * x.clone().print();\n * ```\n *\n * @param x The tensor to clone.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction clone_(x) {\n const $x = convertToTensor(x, 'x', 'clone', null);\n const forward = () => ENGINE.makeTensorFromDataId($x.dataId, $x.shape, $x.dtype);\n const inputs = { x: $x };\n // Note this op is called tf.identity in python. Hence the kernel name used\n // here.\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Identity);\n}\nexport const clone = op({ clone_ });\n//# sourceMappingURL=clone.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Prints information about the `tf.Tensor` including its data.\n *\n * ```js\n * const verbose = true;\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print(verbose);\n * ```\n * @param x The tensor to be printed.\n * @param verbose Whether to print verbose information about the ` Tensor`,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function print(x, verbose = false) {\n console.log(x.toString(verbose));\n}\n//# sourceMappingURL=print.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code for tfjs-core\n// Set up Engine and ENV\nimport { getOrMakeEngine } from './engine';\ngetOrMakeEngine();\n// Register backend-agnostic flags.\nimport './flags';\n// Register platforms\nimport './platforms/platform_browser';\nimport './platforms/platform_node';\n// Set up OpHandler\nimport { buffer } from './ops/buffer';\nimport { cast } from './ops/cast';\nimport { clone } from './ops/clone';\nimport { print } from './ops/print';\nimport { setOpHandler } from './tensor';\nconst opHandler = {\n buffer,\n cast,\n clone,\n print\n};\nsetOpHandler(opHandler);\n//# sourceMappingURL=base_side_effects.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandlers related to files, such as browser-triggered file downloads,\n * user-selected files in browser.\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { basename, concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DEFAULT_FILE_NAME_PREFIX = 'model';\nconst DEFAULT_JSON_EXTENSION_NAME = '.json';\nconst DEFAULT_WEIGHT_DATA_EXTENSION_NAME = '.weights.bin';\nfunction defer(f) {\n return new Promise(resolve => setTimeout(resolve)).then(f);\n}\nexport class BrowserDownloads {\n constructor(fileNamePrefix) {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Provide info on what IOHandlers are available under the\n // current environment.\n throw new Error('browserDownloads() cannot proceed because the current environment ' +\n 'is not a browser.');\n }\n if (fileNamePrefix.startsWith(BrowserDownloads.URL_SCHEME)) {\n fileNamePrefix = fileNamePrefix.slice(BrowserDownloads.URL_SCHEME.length);\n }\n if (fileNamePrefix == null || fileNamePrefix.length === 0) {\n fileNamePrefix = DEFAULT_FILE_NAME_PREFIX;\n }\n this.modelTopologyFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME;\n this.weightDataFileName =\n fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME;\n }\n async save(modelArtifacts) {\n if (typeof (document) === 'undefined') {\n throw new Error('Browser downloads are not supported in ' +\n 'this environment since `document` is not present');\n }\n const weightsURL = window.URL.createObjectURL(new Blob([modelArtifacts.weightData], { type: 'application/octet-stream' }));\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserDownloads.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const weightsManifest = [{\n paths: ['./' + this.weightDataFileName],\n weights: modelArtifacts.weightSpecs\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n weightsManifest\n };\n const modelTopologyAndWeightManifestURL = window.URL.createObjectURL(new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: 'application/json' }));\n // If anchor elements are not provided, create them without attaching them\n // to parents, so that the downloaded file names can be controlled.\n const jsonAnchor = this.jsonAnchor == null ? document.createElement('a') :\n this.jsonAnchor;\n jsonAnchor.download = this.modelTopologyFileName;\n jsonAnchor.href = modelTopologyAndWeightManifestURL;\n // Trigger downloads by evoking a click event on the download anchors.\n // When multiple downloads are started synchronously, Firefox will only\n // save the last one.\n await defer(() => jsonAnchor.dispatchEvent(new MouseEvent('click')));\n if (modelArtifacts.weightData != null) {\n const weightDataAnchor = this.weightDataAnchor == null ?\n document.createElement('a') :\n this.weightDataAnchor;\n weightDataAnchor.download = this.weightDataFileName;\n weightDataAnchor.href = weightsURL;\n await defer(() => weightDataAnchor.dispatchEvent(new MouseEvent('click')));\n }\n return { modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts) };\n }\n }\n}\nBrowserDownloads.URL_SCHEME = 'downloads://';\nclass BrowserFiles {\n constructor(files) {\n if (files == null || files.length < 1) {\n throw new Error(`When calling browserFiles, at least 1 file is required, ` +\n `but received ${files}`);\n }\n this.files = files;\n }\n async load() {\n const jsonFile = this.files[0];\n const weightFiles = this.files.slice(1);\n return new Promise((resolve, reject) => {\n const jsonReader = new FileReader();\n jsonReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const modelJSON = JSON.parse(event.target.result);\n const modelTopology = modelJSON.modelTopology;\n if (modelTopology == null) {\n reject(new Error(`modelTopology field is missing from file ${jsonFile.name}`));\n return;\n }\n if (weightFiles.length === 0) {\n resolve({ modelTopology });\n }\n const weightsManifest = modelJSON.weightsManifest;\n if (weightsManifest == null) {\n reject(new Error(`weightManifest field is missing from file ${jsonFile.name}`));\n return;\n }\n let pathToFile;\n try {\n pathToFile =\n this.checkManifestAndWeightFiles(weightsManifest, weightFiles);\n }\n catch (err) {\n reject(err);\n return;\n }\n const weightSpecs = [];\n const paths = [];\n const perFileBuffers = [];\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n paths.push(path);\n perFileBuffers.push(null);\n });\n weightSpecs.push(...weightsGroup.weights);\n });\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n const weightFileReader = new FileReader();\n weightFileReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const weightData = event.target.result;\n const index = paths.indexOf(path);\n perFileBuffers[index] = weightData;\n if (perFileBuffers.indexOf(null) === -1) {\n resolve({\n modelTopology,\n weightSpecs,\n weightData: concatenateArrayBuffers(perFileBuffers),\n format: modelJSON.format,\n generatedBy: modelJSON.generatedBy,\n convertedBy: modelJSON.convertedBy,\n userDefinedMetadata: modelJSON.userDefinedMetadata\n });\n }\n };\n weightFileReader.onerror = error => reject(`Failed to weights data from file of path '${path}'.`);\n weightFileReader.readAsArrayBuffer(pathToFile[path]);\n });\n });\n };\n jsonReader.onerror = error => reject(`Failed to read model topology and weights manifest JSON ` +\n `from file '${jsonFile.name}'. BrowserFiles supports loading ` +\n `Keras-style tf.Model artifacts only.`);\n jsonReader.readAsText(jsonFile);\n });\n }\n /**\n * Check the compatibility between weights manifest and weight files.\n */\n checkManifestAndWeightFiles(manifest, files) {\n const basenames = [];\n const fileNames = files.map(file => basename(file.name));\n const pathToFile = {};\n for (const group of manifest) {\n group.paths.forEach(path => {\n const pathBasename = basename(path);\n if (basenames.indexOf(pathBasename) !== -1) {\n throw new Error(`Duplicate file basename found in weights manifest: ` +\n `'${pathBasename}'`);\n }\n basenames.push(pathBasename);\n if (fileNames.indexOf(pathBasename) === -1) {\n throw new Error(`Weight file with basename '${pathBasename}' is not provided.`);\n }\n else {\n pathToFile[path] = files[fileNames.indexOf(pathBasename)];\n }\n });\n }\n if (basenames.length !== files.length) {\n throw new Error(`Mismatch in the number of files in weights manifest ` +\n `(${basenames.length}) and the number of weight files provided ` +\n `(${files.length}).`);\n }\n return pathToFile;\n }\n}\nexport const browserDownloadsRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserDownloads.URL_SCHEME)) {\n return browserDownloads(url.slice(BrowserDownloads.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(browserDownloadsRouter);\n/**\n * Creates an IOHandler that triggers file downloads from the browser.\n *\n * The returned `IOHandler` instance can be used as model exporting methods such\n * as `tf.Model.save` and supports only saving.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * const saveResult = await model.save('downloads://mymodel');\n * // This will trigger downloading of two files:\n * // 'mymodel.json' and 'mymodel.weights.bin'.\n * console.log(saveResult);\n * ```\n *\n * @param fileNamePrefix Prefix name of the files to be downloaded. For use with\n * `tf.Model`, `fileNamePrefix` should follow either of the following two\n * formats:\n * 1. `null` or `undefined`, in which case the default file\n * names will be used:\n * - 'model.json' for the JSON file containing the model topology and\n * weights manifest.\n * - 'model.weights.bin' for the binary file containing the binary weight\n * values.\n * 2. A single string or an Array of a single string, as the file name prefix.\n * For example, if `'foo'` is provided, the downloaded JSON\n * file and binary weights file will be named 'foo.json' and\n * 'foo.weights.bin', respectively.\n * @param config Additional configuration for triggering downloads.\n * @returns An instance of `BrowserDownloads` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserDownloads(fileNamePrefix = 'model') {\n return new BrowserDownloads(fileNamePrefix);\n}\n/**\n * Creates an IOHandler that loads model artifacts from user-selected files.\n *\n * This method can be used for loading from files such as user-selected files\n * in the browser.\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * // Note: This code snippet won't run properly without the actual file input\n * // elements in the HTML DOM.\n *\n * // Suppose there are two HTML file input (` `)\n * // elements.\n * const uploadJSONInput = document.getElementById('upload-json');\n * const uploadWeightsInput = document.getElementById('upload-weights');\n * const model = await tf.loadLayersModel(tf.io.browserFiles(\n * [uploadJSONInput.files[0], uploadWeightsInput.files[0]]));\n * ```\n *\n * @param files `File`s to load from. Currently, this function supports only\n * loading from files that contain Keras-style models (i.e., `tf.Model`s), for\n * which an `Array` of `File`s is expected (in that order):\n * - A JSON file containing the model topology and weight manifest.\n * - Optionally, One or more binary files containing the binary weights.\n * These files must have names that match the paths in the `weightsManifest`\n * contained by the aforementioned JSON file, or errors will be thrown\n * during loading. These weights files have the same format as the ones\n * generated by `tensorflowjs_converter` that comes with the `tensorflowjs`\n * Python PIP package. If no weights files are provided, only the model\n * topology will be loaded from the JSON file above.\n * @returns An instance of `Files` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserFiles(files) {\n return new BrowserFiles(files);\n}\n//# sourceMappingURL=browser_files.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../util';\n/**\n * Monitor Promise.all progress, fire onProgress callback function.\n *\n * @param promises Promise list going to be monitored\n * @param onProgress Callback function. Fired when a promise resolved.\n * @param startFraction Optional fraction start. Default to 0.\n * @param endFraction Optional fraction end. Default to 1.\n */\nexport function monitorPromisesProgress(promises, onProgress, startFraction, endFraction) {\n checkPromises(promises);\n startFraction = startFraction == null ? 0 : startFraction;\n endFraction = endFraction == null ? 1 : endFraction;\n checkFraction(startFraction, endFraction);\n let resolvedPromise = 0;\n const registerMonitor = (promise) => {\n promise.then(value => {\n const fraction = startFraction +\n ++resolvedPromise / promises.length * (endFraction - startFraction);\n // pass fraction as parameter to callback function.\n onProgress(fraction);\n return value;\n });\n return promise;\n };\n function checkPromises(promises) {\n assert(promises != null && Array.isArray(promises) && promises.length > 0, () => 'promises must be a none empty array');\n }\n function checkFraction(startFraction, endFraction) {\n assert(startFraction >= 0 && startFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got startFraction ${startFraction}`);\n assert(endFraction >= 0 && endFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got endFraction ${endFraction}`);\n assert(endFraction >= startFraction, () => `startFraction must be no more than endFraction, but ` +\n `got startFraction ${startFraction} and endFraction ` +\n `${endFraction}`);\n }\n return Promise.all(promises.map(registerMonitor));\n}\n//# sourceMappingURL=progress.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\nimport * as util from '../util';\nimport { decodeWeights } from './io_utils';\nimport { monitorPromisesProgress } from './progress';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/**\n * Reads binary weights data from a number of URLs.\n *\n * @param fetchURLs URLs to send the HTTP requests at, using `fetch` calls.\n * @param requestOptions RequestInit (options) for the HTTP requests.\n * @param fetchFunc Optional overriding value for the `window.fetch` function.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns A `Promise` of an Array of `ArrayBuffer`. The Array has the same\n * length as `fetchURLs`.\n */\nexport async function loadWeightsAsArrayBuffer(fetchURLs, loadOptions) {\n if (loadOptions == null) {\n loadOptions = {};\n }\n const fetchFunc = loadOptions.fetchFunc == null ? env().platform.fetch :\n loadOptions.fetchFunc;\n // Create the requests for all of the weights in parallel.\n const requests = fetchURLs.map(fetchURL => fetchFunc(fetchURL, loadOptions.requestInit, { isBinary: true }));\n const fetchStartFraction = 0;\n const fetchEndFraction = 0.5;\n const responses = loadOptions.onProgress == null ?\n await Promise.all(requests) :\n await monitorPromisesProgress(requests, loadOptions.onProgress, fetchStartFraction, fetchEndFraction);\n const bufferPromises = responses.map(response => response.arrayBuffer());\n const bufferStartFraction = 0.5;\n const bufferEndFraction = 1;\n const buffers = loadOptions.onProgress == null ?\n await Promise.all(bufferPromises) :\n await monitorPromisesProgress(bufferPromises, loadOptions.onProgress, bufferStartFraction, bufferEndFraction);\n return buffers;\n}\n/**\n * Reads a weights manifest JSON configuration, fetches the weights and\n * returns them as `Tensor`s.\n *\n * @param manifest The weights manifest JSON.\n * @param filePathPrefix The path prefix for filenames given in the manifest.\n * Defaults to the empty string.\n * @param weightNames The names of the weights to be fetched.\n */\nexport async function loadWeights(manifest, filePathPrefix = '', weightNames, requestInit) {\n // TODO(nsthorat): Groups are currently fetched atomically. If you need a\n // single weight from a group, the whole group will be fetched. At a future\n // date, we should support fetching only the individual shards within a\n // group that are needed to reconstruct the requested weight.\n // TODO(cais): Use `decodeWeights` for implementation.\n const fetchWeights = (fetchUrls) => loadWeightsAsArrayBuffer(fetchUrls, { requestInit });\n const loadWeights = weightsLoaderFactory(fetchWeights);\n return loadWeights(manifest, filePathPrefix, weightNames);\n}\n/**\n * Creates a function, which reads a weights manifest JSON configuration,\n * fetches the weight files using the specified function and returns them as\n * `Tensor`s.\n *\n * ```js\n * // example for creating a nodejs weight loader, which reads the weight files\n * // from disk using fs.readFileSync\n *\n * import * as fs from 'fs'\n *\n * const fetchWeightsFromDisk = (filePaths: string[]) =>\n * filePaths.map(filePath => fs.readFileSync(filePath).buffer)\n *\n * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk)\n *\n * const manifest = JSON.parse(\n * fs.readFileSync('./my_model-weights_manifest').toString()\n * )\n * const weightMap = await loadWeights(manifest, './')\n * ```\n * @param fetchWeightsFunction The function used for fetching the weight files.\n * @returns Weight loading function.\n */\nexport function weightsLoaderFactory(fetchWeightsFunction) {\n return async (manifest, filePathPrefix = '', weightNames) => {\n // Collect all the groups, weights, and their relative offsets to be\n // fetched.\n const groupIndicesToFetchMap = manifest.map(() => false);\n const groupWeightsToFetch = {};\n const weightsFound = weightNames != null ? weightNames.map(() => false) : [];\n const allManifestWeightNames = [];\n manifest.forEach((manifestGroupConfig, groupIndex) => {\n let groupOffset = 0;\n manifestGroupConfig.weights.forEach(weightsEntry => {\n const rawDtype = ('quantization' in weightsEntry) ?\n weightsEntry.quantization.dtype :\n weightsEntry.dtype;\n const weightsBytes = DTYPE_VALUE_SIZE_MAP[rawDtype] *\n util.sizeFromShape(weightsEntry.shape);\n const enqueueWeightsForFetchingFn = () => {\n groupIndicesToFetchMap[groupIndex] = true;\n if (groupWeightsToFetch[groupIndex] == null) {\n groupWeightsToFetch[groupIndex] = [];\n }\n groupWeightsToFetch[groupIndex].push({\n manifestEntry: weightsEntry,\n groupOffset,\n sizeBytes: weightsBytes\n });\n };\n if (weightNames != null) {\n weightNames.forEach((weightName, weightIndex) => {\n if (weightName === weightsEntry.name) {\n enqueueWeightsForFetchingFn();\n weightsFound[weightIndex] = true;\n }\n });\n }\n else {\n enqueueWeightsForFetchingFn();\n }\n allManifestWeightNames.push(weightsEntry.name);\n groupOffset += weightsBytes;\n });\n });\n if (!weightsFound.every(found => found)) {\n const weightsNotFound = weightNames.filter((_, i) => !weightsFound[i]);\n throw new Error(`Could not find weights in manifest with names: ` +\n `${weightsNotFound.join(', ')}. \\n` +\n `Manifest JSON has weights with names: ` +\n `${allManifestWeightNames.join(', ')}.`);\n }\n // Convert the one-hot boolean groupId => shouldFetch map to a list of group\n // IDs.\n const groupIndicesToFetch = groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i) => {\n if (shouldFetch) {\n accumulator.push(i);\n }\n return accumulator;\n }, []);\n const fetchUrls = [];\n groupIndicesToFetch.forEach(i => {\n manifest[i].paths.forEach(filepath => {\n const fetchUrl = filePathPrefix +\n (!filePathPrefix.endsWith('/') ? '/' : '') + filepath;\n fetchUrls.push(fetchUrl);\n });\n });\n const buffers = await fetchWeightsFunction(fetchUrls);\n const weightsTensorMap = {};\n let bufferIndexOffset = 0;\n groupIndicesToFetch.forEach(i => {\n const numBuffers = manifest[i].paths.length;\n let groupBytes = 0;\n for (let i = 0; i < numBuffers; i++) {\n groupBytes += buffers[bufferIndexOffset + i].byteLength;\n }\n // Create a buffer for the whole group.\n const groupBuffer = new ArrayBuffer(groupBytes);\n const groupByteBuffer = new Uint8Array(groupBuffer);\n let groupBufferOffset = 0;\n for (let i = 0; i < numBuffers; i++) {\n const buffer = new Uint8Array(buffers[bufferIndexOffset + i]);\n groupByteBuffer.set(buffer, groupBufferOffset);\n groupBufferOffset += buffer.byteLength;\n }\n const weightsEntries = groupWeightsToFetch[i];\n weightsEntries.forEach(weightsEntry => {\n const byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes);\n const nameToTensorMap = decodeWeights(byteBuffer, [weightsEntry.manifestEntry]);\n for (const name in nameToTensorMap) {\n weightsTensorMap[name] = nameToTensorMap[name];\n }\n });\n bufferIndexOffset += numBuffers;\n });\n return weightsTensorMap;\n };\n}\n//# sourceMappingURL=weights_loader.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandler implementations based on HTTP requests in the web browser.\n *\n * Uses [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n */\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nimport { loadWeightsAsArrayBuffer } from './weights_loader';\nconst OCTET_STREAM_MIME_TYPE = 'application/octet-stream';\nconst JSON_TYPE = 'application/json';\nexport class HTTPRequest {\n constructor(path, loadOptions) {\n this.DEFAULT_METHOD = 'POST';\n if (loadOptions == null) {\n loadOptions = {};\n }\n this.weightPathPrefix = loadOptions.weightPathPrefix;\n this.onProgress = loadOptions.onProgress;\n this.weightUrlConverter = loadOptions.weightUrlConverter;\n if (loadOptions.fetchFunc != null) {\n assert(typeof loadOptions.fetchFunc === 'function', () => 'Must pass a function that matches the signature of ' +\n '`fetch` (see ' +\n 'https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)');\n this.fetch = loadOptions.fetchFunc;\n }\n else {\n this.fetch = env().platform.fetch;\n }\n assert(path != null && path.length > 0, () => 'URL path for http must not be null, undefined or ' +\n 'empty.');\n if (Array.isArray(path)) {\n assert(path.length === 2, () => 'URL paths for http must have a length of 2, ' +\n `(actual length is ${path.length}).`);\n }\n this.path = path;\n if (loadOptions.requestInit != null &&\n loadOptions.requestInit.body != null) {\n throw new Error('requestInit is expected to have no pre-existing body, but has one.');\n }\n this.requestInit = loadOptions.requestInit || {};\n }\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserHTTPRequest.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n const init = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit);\n init.body = new FormData();\n const weightsManifest = [{\n paths: ['./model.weights.bin'],\n weights: modelArtifacts.weightSpecs,\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata,\n weightsManifest\n };\n init.body.append('model.json', new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: JSON_TYPE }), 'model.json');\n if (modelArtifacts.weightData != null) {\n init.body.append('model.weights.bin', new Blob([modelArtifacts.weightData], { type: OCTET_STREAM_MIME_TYPE }), 'model.weights.bin');\n }\n const response = await this.fetch(this.path, init);\n if (response.ok) {\n return {\n modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts),\n responses: [response],\n };\n }\n else {\n throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ` +\n `${response.status}.`);\n }\n }\n /**\n * Load model artifacts via HTTP request(s).\n *\n * See the documentation to `tf.io.http` for details on the saved\n * artifacts.\n *\n * @returns The loaded model artifacts (if loading succeeds).\n */\n async load() {\n const modelConfigRequest = await this.fetch(this.path, this.requestInit);\n if (!modelConfigRequest.ok) {\n throw new Error(`Request to ${this.path} failed with status code ` +\n `${modelConfigRequest.status}. Please verify this URL points to ` +\n `the model JSON of the model to load.`);\n }\n let modelConfig;\n try {\n modelConfig = await modelConfigRequest.json();\n }\n catch (e) {\n let message = `Failed to parse model JSON of response from ${this.path}.`;\n // TODO(nsthorat): Remove this after some time when we're comfortable that\n // .pb files are mostly gone.\n if (this.path.endsWith('.pb')) {\n message += ' Your path contains a .pb file extension. ' +\n 'Support for .pb models have been removed in TensorFlow.js 1.0 ' +\n 'in favor of .json models. You can re-convert your Python ' +\n 'TensorFlow model using the TensorFlow.js 1.0 conversion scripts ' +\n 'or you can convert your.pb models with the \\'pb2json\\'' +\n 'NPM script in the tensorflow/tfjs-converter repository.';\n }\n else {\n message += ' Please make sure the server is serving valid ' +\n 'JSON for this request.';\n }\n throw new Error(message);\n }\n const modelTopology = modelConfig.modelTopology;\n const weightsManifest = modelConfig.weightsManifest;\n const generatedBy = modelConfig.generatedBy;\n const convertedBy = modelConfig.convertedBy;\n const format = modelConfig.format;\n const userDefinedMetadata = modelConfig.userDefinedMetadata;\n // We do not allow both modelTopology and weightsManifest to be missing.\n if (modelTopology == null && weightsManifest == null) {\n throw new Error(`The JSON from HTTP path ${this.path} contains neither model ` +\n `topology or manifest for weights.`);\n }\n let weightSpecs;\n let weightData;\n if (weightsManifest != null) {\n const results = await this.loadWeights(weightsManifest);\n [weightSpecs, weightData] = results;\n }\n const artifacts = {\n modelTopology,\n weightSpecs,\n weightData,\n userDefinedMetadata,\n generatedBy,\n convertedBy,\n format\n };\n const initializer = modelConfig.modelInitializer;\n if (initializer) {\n artifacts.modelInitializer = initializer;\n }\n return artifacts;\n }\n async loadWeights(weightsManifest) {\n const weightPath = Array.isArray(this.path) ? this.path[1] : this.path;\n const [prefix, suffix] = parseUrl(weightPath);\n const pathPrefix = this.weightPathPrefix || prefix;\n const weightSpecs = [];\n for (const entry of weightsManifest) {\n weightSpecs.push(...entry.weights);\n }\n const fetchURLs = [];\n const urlPromises = [];\n for (const weightsGroup of weightsManifest) {\n for (const path of weightsGroup.paths) {\n if (this.weightUrlConverter != null) {\n urlPromises.push(this.weightUrlConverter(path));\n }\n else {\n fetchURLs.push(pathPrefix + path + suffix);\n }\n }\n }\n if (this.weightUrlConverter) {\n fetchURLs.push(...await Promise.all(urlPromises));\n }\n const buffers = await loadWeightsAsArrayBuffer(fetchURLs, {\n requestInit: this.requestInit,\n fetchFunc: this.fetch,\n onProgress: this.onProgress\n });\n return [weightSpecs, concatenateArrayBuffers(buffers)];\n }\n}\nHTTPRequest.URL_SCHEME_REGEX = /^https?:\\/\\//;\n/**\n * Extract the prefix and suffix of the url, where the prefix is the path before\n * the last file, and suffix is the search params after the last file.\n * ```\n * const url = 'http://tfhub.dev/model/1/tensorflowjs_model.pb?tfjs-format=file'\n * [prefix, suffix] = parseUrl(url)\n * // prefix = 'http://tfhub.dev/model/1/'\n * // suffix = '?tfjs-format=file'\n * ```\n * @param url the model url to be parsed.\n */\nexport function parseUrl(url) {\n const lastSlash = url.lastIndexOf('/');\n const lastSearchParam = url.lastIndexOf('?');\n const prefix = url.substring(0, lastSlash);\n const suffix = lastSearchParam > lastSlash ? url.substring(lastSearchParam) : '';\n return [prefix + '/', suffix];\n}\nexport function isHTTPScheme(url) {\n return url.match(HTTPRequest.URL_SCHEME_REGEX) != null;\n}\nexport const httpRouter = (url, loadOptions) => {\n if (typeof fetch === 'undefined' &&\n (loadOptions == null || loadOptions.fetchFunc == null)) {\n // `http` uses `fetch` or `node-fetch`, if one wants to use it in\n // an environment that is not the browser or node they have to setup a\n // global fetch polyfill.\n return null;\n }\n else {\n let isHTTP = true;\n if (Array.isArray(url)) {\n isHTTP = url.every(urlItem => isHTTPScheme(urlItem));\n }\n else {\n isHTTP = isHTTPScheme(url);\n }\n if (isHTTP) {\n return http(url, loadOptions);\n }\n }\n return null;\n};\nIORouterRegistry.registerSaveRouter(httpRouter);\nIORouterRegistry.registerLoadRouter(httpRouter);\n/**\n * Creates an IOHandler subtype that sends model artifacts to HTTP server.\n *\n * An HTTP request of the `multipart/form-data` mime type will be sent to the\n * `path` URL. The form data includes artifacts that represent the topology\n * and/or weights of the model. In the case of Keras-style `tf.Model`, two\n * blobs (files) exist in form-data:\n * - A JSON file consisting of `modelTopology` and `weightsManifest`.\n * - A binary weights file consisting of the concatenated weight values.\n * These files are in the same format as the one generated by\n * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html).\n *\n * The following code snippet exemplifies the client-side code that uses this\n * function:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save(tf.io.http(\n * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}}));\n * console.log(saveResult);\n * ```\n *\n * If the default `POST` method is to be used, without any custom parameters\n * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`:\n *\n * ```js\n * const saveResult = await model.save('http://model-server:5000/upload');\n * ```\n *\n * The following GitHub Gist\n * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864\n * implements a server based on [flask](https://github.com/pallets/flask) that\n * can receive the request. Upon receiving the model artifacts via the requst,\n * this particular server reconsistutes instances of [Keras\n * Models](https://keras.io/models/model/) in memory.\n *\n *\n * @param path A URL path to the model.\n * Can be an absolute HTTP path (e.g.,\n * 'http://localhost:8000/model-upload)') or a relative path (e.g.,\n * './model-upload').\n * @param requestInit Request configurations to be used when sending\n * HTTP request to server using `fetch`. It can contain fields such as\n * `method`, `credentials`, `headers`, `mode`, etc. See\n * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n * for more information. `requestInit` must not have a body, because the\n * body will be set by TensorFlow.js. File blobs representing the model\n * topology (filename: 'model.json') and the weights of the model (filename:\n * 'model.weights.bin') will be appended to the body. If `requestInit` has a\n * `body`, an Error will be thrown.\n * @param loadOptions Optional configuration for the loading. It includes the\n * following fields:\n * - weightPathPrefix Optional, this specifies the path prefix for weight\n * files, by default this is calculated from the path param.\n * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js,\n * the `fetch` from node-fetch can be used here.\n * - onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns An instance of `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function http(path, loadOptions) {\n return new HTTPRequest(path, loadOptions);\n}\n/**\n * Deprecated. Use `tf.io.http`.\n * @param path\n * @param loadOptions\n */\nexport function browserHTTPRequest(path, loadOptions) {\n return http(path, loadOptions);\n}\n//# sourceMappingURL=http.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nclass PassthroughLoader {\n constructor(modelArtifacts) {\n this.modelArtifacts = modelArtifacts;\n }\n async load() {\n return this.modelArtifacts;\n }\n}\nclass PassthroughSaver {\n constructor(saveHandler) {\n this.saveHandler = saveHandler;\n }\n async save(modelArtifacts) {\n return this.saveHandler(modelArtifacts);\n }\n}\n/**\n * Creates an IOHandler that loads model artifacts from memory.\n *\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * const model = await tf.loadLayersModel(tf.io.fromMemory(\n * modelTopology, weightSpecs, weightData));\n * ```\n *\n * @param modelArtifacts a object containing model topology (i.e., parsed from\n * the JSON format).\n * @param weightSpecs An array of `WeightsManifestEntry` objects describing the\n * names, shapes, types, and quantization of the weight data.\n * @param weightData A single `ArrayBuffer` containing the weight data,\n * concatenated in the order described by the weightSpecs.\n * @param trainingConfig Model training configuration. Optional.\n *\n * @returns A passthrough `IOHandler` that simply loads the provided data.\n */\nexport function fromMemory(modelArtifacts, weightSpecs, weightData, trainingConfig) {\n if (arguments.length === 1) {\n const isModelArtifacts = modelArtifacts.modelTopology != null ||\n modelArtifacts.weightSpecs != null;\n if (isModelArtifacts) {\n return new PassthroughLoader(modelArtifacts);\n }\n else {\n // Legacy support: with only modelTopology.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({ modelTopology: modelArtifacts });\n }\n }\n else {\n // Legacy support.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({\n modelTopology: modelArtifacts,\n weightSpecs,\n weightData,\n trainingConfig\n });\n }\n}\n/**\n * Creates an IOHandler that passes saved model artifacts to a callback.\n *\n * ```js\n * function handleSave(artifacts) {\n * // ... do something with the artifacts ...\n * return {modelArtifactsInfo: {...}, ...};\n * }\n *\n * const saveResult = model.save(tf.io.withSaveHandler(handleSave));\n * ```\n *\n * @param saveHandler A function that accepts a `ModelArtifacts` and returns a\n * `SaveResult`.\n */\nexport function withSaveHandler(saveHandler) {\n return new PassthroughSaver(saveHandler);\n}\n//# sourceMappingURL=passthrough.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Importing local_storage and indexed_db is necessary for the routers to be\n// registered.\nimport './indexed_db';\nimport './local_storage';\nimport { browserFiles } from './browser_files';\nimport { browserHTTPRequest, http, isHTTPScheme } from './http';\nimport { concatenateArrayBuffers, decodeWeights, encodeWeights, getModelArtifactsInfoForJSON } from './io_utils';\nimport { fromMemory, withSaveHandler } from './passthrough';\nimport { getLoadHandlers, getSaveHandlers, registerLoadRouter, registerSaveRouter } from './router_registry';\nimport { loadWeights, weightsLoaderFactory } from './weights_loader';\nexport { copyModel, listModels, moveModel, removeModel } from './model_management';\nexport { browserFiles, browserHTTPRequest, concatenateArrayBuffers, decodeWeights, encodeWeights, fromMemory, getLoadHandlers, getModelArtifactsInfoForJSON, getSaveHandlers, http, isHTTPScheme, loadWeights, registerLoadRouter, registerSaveRouter, weightsLoaderFactory, withSaveHandler };\n//# sourceMappingURL=io.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reshape } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Reshapes a `tf.Tensor` to a given shape.\n *\n * Given an input tensor, returns a new tensor with the same values as the\n * input tensor with shape `shape`.\n *\n * If one component of shape is the special value -1, the size of that\n * dimension is computed so that the total size remains constant. In\n * particular, a shape of [-1] flattens into 1-D. At most one component of\n * shape can be -1.\n *\n * If shape is 1-D or higher, then the operation returns a tensor with shape\n * shape filled with the values of tensor. In this case, the number of\n * elements implied by shape must be the same as the number of elements in\n * tensor.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.reshape([2, 2]).print();\n * ```\n *\n * @param x The input tensor to be reshaped.\n * @param shape An array of integers defining the output tensor shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction reshape_(x, shape) {\n const $x = convertToTensor(x, 'x', 'reshape', null);\n const inputs = { x: $x };\n const attrs = { shape };\n const forward = (backend, save) => {\n shape = util.inferFromImplicitShape(shape, $x.size);\n util.assert($x.size === util.sizeFromShape(shape), () => 'new shape and old shape must have the same number of elements.');\n save([$x]);\n return backend.reshape($x, shape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Reshape, attrs);\n}\nexport const reshape = op({ reshape_ });\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchMatMul } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices, A * B. These must be matrices.\n *\n * ```js\n * const a = tf.tensor2d([1, 2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.matMul(b).print(); // or tf.matMul(a, b)\n * ```\n * @param a First matrix in dot product operation.\n * @param b Second matrix in dot product operation.\n * @param transposeA If true, `a` is transposed before multiplication.\n * @param transposeB If true, `b` is transposed before multiplication.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction matMul_(a, b, transposeA = false, transposeB = false) {\n let $a = convertToTensor(a, 'a', 'matMul');\n let $b = convertToTensor(b, 'b', 'matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n save([$a, $b]);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert($a.rank >= 2 && $b.rank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShapeOuterDims = batchDimA > batchDimB ? outerDimsA : outerDimsB;\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n const res3d = backend.batchMatMul(a3D, b3D, transposeA, transposeB);\n return reshape(res3d, outShape);\n };\n const inputs = { a: $a, b: $b };\n const attrs = { transposeA, transposeB };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BatchMatMul, attrs);\n}\nexport const matMul = op({ matMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OneHot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a one-hot `tf.Tensor`. The locations represented by `indices` take\n * value `onValue` (defaults to 1), while all other locations take value\n * `offValue` (defaults to 0). If `indices` is rank `R`, the output has rank\n * `R+1` with the last axis of size `depth`.\n *\n * ```js\n * tf.oneHot(tf.tensor1d([0, 1], 'int32'), 3).print();\n * ```\n *\n * @param indices `tf.Tensor` of indices with dtype `int32`.\n * @param depth The depth of the one hot dimension.\n * @param onValue A number used to fill in the output when the index matches\n * the location.\n * @param offValue A number used to fill in the output when the index does\n * not match the location.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction oneHot_(indices, depth, onValue = 1, offValue = 0) {\n if (depth < 2) {\n throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`);\n }\n const $indices = convertToTensor(indices, 'indices', 'oneHot', 'int32');\n const outShape = [...$indices.shape, depth];\n const forward = (backend, save) => {\n save([$indices]);\n return reshape(backend.oneHot(reshape($indices, [$indices.size]), depth, onValue, offValue), outShape);\n };\n const inputs = { indices: $indices };\n const attrs = { depth, onValue, offValue };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OneHot, attrs);\n}\nexport const oneHot = op({ oneHot_ });\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Transpose } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Transposes the `tf.Tensor`. Permutes the dimensions according to `perm`.\n *\n * The returned `tf.Tensor`'s dimension `i` will correspond to the input\n * dimension `perm[i]`. If `perm` is not given, it is set to `[n-1...0]`,\n * where `n` is the rank of the input `tf.Tensor`. Hence by default, this\n * operation performs a regular matrix transpose on 2-D input `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4, 5, 6], [2, 3]);\n *\n * a.transpose().print(); // or tf.transpose(a)\n * ```\n *\n * @param x The tensor to transpose.\n * @param perm The permutation of the dimensions of a.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction transpose_(x, perm) {\n const $x = convertToTensor(x, 'x', 'transpose');\n if (perm == null) {\n perm = $x.shape.map((s, i) => i).reverse();\n }\n util.assert($x.rank === perm.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of perm ${perm}.`);\n perm.forEach(axis => {\n util.assert(axis >= 0 && axis < $x.rank, () => `All entries in 'perm' must be between 0 and ${$x.rank - 1}` +\n ` but got ${perm}`);\n });\n if ($x.rank <= 1) {\n return $x.clone();\n }\n const inputs = { x: $x };\n const attrs = { perm };\n return ENGINE.runKernelFunc(backend => backend.transpose($x, perm), inputs, null /* gradient */, Transpose, attrs);\n}\nexport const transpose = op({ transpose_ });\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { matMul } from './mat_mul';\nimport { oneHot } from './one_hot';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the confusion matrix from true labels and predicted labels.\n *\n * ```js\n * const labels = tf.tensor1d([0, 1, 2, 1, 0], 'int32');\n * const predictions = tf.tensor1d([0, 2, 2, 1, 0], 'int32');\n * const numClasses = 3;\n * const out = tf.math.confusionMatrix(labels, predictions, numClasses);\n * out.print();\n * // Expected output matrix:\n * // [[2, 0, 0],\n * // [0, 1, 1],\n * // [0, 0, 1]]\n * ```\n *\n * @param labels The target labels, assumed to be 0-based integers\n * for the classes. The shape is `[numExamples]`, where\n * `numExamples` is the number of examples included.\n * @param predictions The predicted classes, assumed to be\n * 0-based integers for the classes. Must have the same shape as `labels`.\n * @param numClasses Number of all classes, as an integer.\n * Its value must be larger than the largest element in `labels` and\n * `predictions`.\n * @returns The confusion matrix as a int32-type 2D tensor. The value at\n * row `r` and column `c` is the number of times examples of actual class\n * `r` were predicted as class `c`.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nexport function confusionMatrix_(labels, predictions, numClasses) {\n const $labels = convertToTensor(labels, 'labels', 'confusionMatrix');\n const $predictions = convertToTensor(predictions, 'predictions', 'confusionMatrix');\n util.assert(numClasses == null || numClasses > 0 && Number.isInteger(numClasses), () => `If provided, numClasses must be a positive integer, ` +\n `but got ${numClasses}`);\n util.assert($labels.rank === 1, () => `Expected the rank of labels to be 1, but got ${$labels.rank}`);\n util.assert($predictions.rank === 1, () => `Expected the rank of predictions to be 1, ` +\n `but got ${$predictions.rank}`);\n util.assert($labels.shape[0] === $predictions.shape[0], () => `Mismatch in the number of examples: ` +\n `${$labels.shape[0]} vs. ${$predictions.shape[0]}. ` +\n `Labels and predictions should have the same number of elements.`);\n util.assert(numClasses > 0 && Number.isInteger(numClasses), () => `numClasses is required to be a positive integer, but got ` +\n `${numClasses}`);\n // TODO(cais): In the future, if oneHot supports tensors inputs for\n // `numClasses`, `confusionMatrix` can make `numClasses` optional.\n const oneHotLabels = oneHot(cast($labels, 'int32'), numClasses);\n const oneHotPredictions = oneHot(cast($predictions, 'int32'), numClasses);\n const oneHotLabelsT = transpose(oneHotLabels);\n const product = matMul(oneHotLabelsT, oneHotPredictions);\n return cast(product, 'int32');\n}\nexport const confusionMatrix = op({ confusionMatrix_ });\n//# sourceMappingURL=confusion_matrix.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Exports under the tf.math.* namespace.\n */\nimport { confusionMatrix } from './ops/confusion_matrix';\nexport { confusionMatrix };\n//# sourceMappingURL=math.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-3 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor3d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor3d([[[1], [2]], [[3], [4]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor3d([1, 2, 3, 4], [2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor3d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 3) {\n throw new Error('tensor3d() requires shape to have three numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 3 && inferredShape.length !== 1) {\n throw new Error('tensor3d() requires values to be number[][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor3d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor3d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FromPixels } from '../kernel_names';\nimport { getKernel } from '../kernel_registry';\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { tensor3d } from './tensor3d';\nlet fromPixels2DContext;\n/**\n * Creates a `tf.Tensor` from an image.\n *\n * ```js\n * const image = new ImageData(1, 1);\n * image.data[0] = 100;\n * image.data[1] = 150;\n * image.data[2] = 200;\n * image.data[3] = 255;\n *\n * tf.browser.fromPixels(image).print();\n * ```\n *\n * @param pixels The input image to construct the tensor from. The\n * supported image types are all 4-channel. You can also pass in an image\n * object with following attributes:\n * `{data: Uint8Array; width: number; height: number}`\n * @param numChannels The number of channels of the output tensor. A\n * numChannels value less than 4 allows you to ignore channels. Defaults to\n * 3 (ignores alpha channel of input image).\n *\n * @doc {heading: 'Browser', namespace: 'browser', ignoreCI: true}\n */\nfunction fromPixels_(pixels, numChannels = 3) {\n // Sanity checks.\n if (numChannels > 4) {\n throw new Error('Cannot construct Tensor with more than 4 channels from pixels.');\n }\n if (pixels == null) {\n throw new Error('pixels passed to tf.browser.fromPixels() can not be null');\n }\n let isPixelData = false;\n let isImageData = false;\n let isVideo = false;\n let isImage = false;\n let isCanvasLike = false;\n if (pixels.data instanceof Uint8Array) {\n isPixelData = true;\n }\n else if (typeof (ImageData) !== 'undefined' && pixels instanceof ImageData) {\n isImageData = true;\n }\n else if (typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement) {\n isVideo = true;\n }\n else if (typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement) {\n isImage = true;\n // tslint:disable-next-line: no-any\n }\n else if (pixels.getContext != null) {\n isCanvasLike = true;\n }\n else {\n throw new Error('pixels passed to tf.browser.fromPixels() must be either an ' +\n `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData ` +\n `in browser, or OffscreenCanvas, ImageData in webworker` +\n ` or {data: Uint32Array, width: number, height: number}, ` +\n `but was ${pixels.constructor.name}`);\n }\n if (isVideo) {\n const HAVE_CURRENT_DATA_READY_STATE = 2;\n if (isVideo &&\n pixels.readyState <\n HAVE_CURRENT_DATA_READY_STATE) {\n throw new Error('The video element has not loaded data yet. Please wait for ' +\n '`loadeddata` event on the element.');\n }\n }\n // If the current backend has 'FromPixels' registered, it has a more\n // efficient way of handling pixel uploads, so we call that.\n const kernel = getKernel(FromPixels, ENGINE.backendName);\n if (kernel != null) {\n const inputs = { pixels };\n const attrs = { numChannels };\n return ENGINE.runKernel(FromPixels, inputs, attrs);\n }\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n let vals;\n if (isCanvasLike) {\n vals =\n // tslint:disable-next-line:no-any\n pixels.getContext('2d').getImageData(0, 0, width, height).data;\n }\n else if (isImageData || isPixelData) {\n vals = pixels.data;\n }\n else if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n vals = fromPixels2DContext.getImageData(0, 0, width, height).data;\n }\n let values;\n if (numChannels === 4) {\n values = new Int32Array(vals);\n }\n else {\n const numPixels = width * height;\n values = new Int32Array(numPixels * numChannels);\n for (let i = 0; i < numPixels; i++) {\n for (let channel = 0; channel < numChannels; ++channel) {\n values[i * numChannels + channel] = vals[i * 4 + channel];\n }\n }\n }\n const outShape = [height, width, numChannels];\n return tensor3d(values, outShape, 'int32');\n}\n/**\n * Draws a `tf.Tensor` of pixel values to a byte array or optionally a\n * canvas.\n *\n * When the dtype of the input is 'float32', we assume values in the range\n * [0-1]. Otherwise, when input is 'int32', we assume values in the range\n * [0-255].\n *\n * Returns a promise that resolves when the canvas has been drawn to.\n *\n * @param img A rank-2 or rank-3 tensor. If rank-2, draws grayscale. If\n * rank-3, must have depth of 1, 3 or 4. When depth of 1, draws\n * grayscale. When depth of 3, we draw with the first three components of\n * the depth dimension corresponding to r, g, b and alpha = 1. When depth of\n * 4, all four components of the depth dimension correspond to r, g, b, a.\n * @param canvas The canvas to draw to.\n *\n * @doc {heading: 'Browser', namespace: 'browser'}\n */\nexport async function toPixels(img, canvas) {\n let $img = convertToTensor(img, 'img', 'toPixels');\n if (!(img instanceof Tensor)) {\n // Assume int32 if user passed a native array.\n const originalImgTensor = $img;\n $img = cast(originalImgTensor, 'int32');\n originalImgTensor.dispose();\n }\n if ($img.rank !== 2 && $img.rank !== 3) {\n throw new Error(`toPixels only supports rank 2 or 3 tensors, got rank ${$img.rank}.`);\n }\n const [height, width] = $img.shape.slice(0, 2);\n const depth = $img.rank === 2 ? 1 : $img.shape[2];\n if (depth > 4 || depth === 2) {\n throw new Error(`toPixels only supports depth of size ` +\n `1, 3 or 4 but got ${depth}`);\n }\n if ($img.dtype !== 'float32' && $img.dtype !== 'int32') {\n throw new Error(`Unsupported type for toPixels: ${$img.dtype}.` +\n ` Please use float32 or int32 tensors.`);\n }\n const data = await $img.data();\n const multiplier = $img.dtype === 'float32' ? 255 : 1;\n const bytes = new Uint8ClampedArray(width * height * 4);\n for (let i = 0; i < height * width; ++i) {\n const rgba = [0, 0, 0, 255];\n for (let d = 0; d < depth; d++) {\n const value = data[i * depth + d];\n if ($img.dtype === 'float32') {\n if (value < 0 || value > 1) {\n throw new Error(`Tensor values for a float32 Tensor must be in the ` +\n `range [0 - 1] but encountered ${value}.`);\n }\n }\n else if ($img.dtype === 'int32') {\n if (value < 0 || value > 255) {\n throw new Error(`Tensor values for a int32 Tensor must be in the ` +\n `range [0 - 255] but encountered ${value}.`);\n }\n }\n if (depth === 1) {\n rgba[0] = value * multiplier;\n rgba[1] = value * multiplier;\n rgba[2] = value * multiplier;\n }\n else {\n rgba[d] = value * multiplier;\n }\n }\n const j = i * 4;\n bytes[j + 0] = Math.round(rgba[0]);\n bytes[j + 1] = Math.round(rgba[1]);\n bytes[j + 2] = Math.round(rgba[2]);\n bytes[j + 3] = Math.round(rgba[3]);\n }\n if (canvas != null) {\n canvas.width = width;\n canvas.height = height;\n const ctx = canvas.getContext('2d');\n const imageData = new ImageData(bytes, width, height);\n ctx.putImageData(imageData, 0, 0);\n }\n if ($img !== img) {\n $img.dispose();\n }\n return bytes;\n}\nexport const fromPixels = op({ fromPixels_ });\n//# sourceMappingURL=browser.js.map", "import { computeStrides } from '../util';\n/**\n * Validate gather nd inputs.\n *\n * @param tensor The tensor contains the source values.\n * @param indices The tensor contains the indices to slice the source.\n *\n * @returns [resultShape, numUpdates, sliceSize, strides]\n */\nexport function prepareAndValidate(tensor, indices) {\n if (tensor.rank < 1) {\n throw new Error('tf.gatherND() expects the input to be rank 1 or higher,' +\n ` but the rank was ${tensor.rank}.`);\n }\n if (indices.rank < 1) {\n throw new Error('tf.gatherND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error('tf.gatherND() expects the indices to be int32 type,' +\n ` but the dtype was ${indices.dtype}.`);\n }\n if (indices.shape[indices.rank - 1] > tensor.rank) {\n throw new Error('index innermost dimension length must be <= tensor rank; saw: ' +\n `${indices.shape[indices.rank - 1]} vs. ${tensor.rank}`);\n }\n if (tensor.size === 0) {\n throw new Error('Requested more than 0 entries, but input is empty.' +\n ` Input shape: ${tensor.shape}.`);\n }\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n // The result shape is\n // indices.shape[:-1] + params.shape[indices.shape[-1]:]\n let nResult = 1;\n for (let i = 0; i < indicesShape.length - 1; ++i) {\n nResult *= indicesShape[i];\n }\n const inputShape = tensor.shape;\n const resultShape = indicesShape.slice();\n resultShape.pop();\n let sliceSize = 1;\n for (let i = sliceRank; i < tensor.rank; ++i) {\n sliceSize *= inputShape[i];\n resultShape.push(inputShape[i]);\n }\n const strides = [...computeStrides(tensor.shape).map(stride => stride / sliceSize),\n 1].slice(0, sliceRank);\n return [resultShape, nResult, sliceSize, strides];\n}\n//# sourceMappingURL=gather_nd_util.js.map", "import { computeStrides, sizeFromShape } from '../util';\n/**\n * Check whether updates.shape = indices.shape[:batchDim] +\n * shape[sliceDim:]\n *\n * @param x The input tensor.\n */\nexport function validateUpdateShape(shape, indices, updates) {\n const sliceDim = (indices.rank > 1) ? indices.shape[indices.rank - 1] : 1;\n const batchDim = (indices.rank > 1) ? indices.rank - 1 : 1;\n const shapeError = 'Must have updates.shape = indices.shape[:batchDim] + ' +\n `shape[sliceDim:], got updates.shape: ${updates.shape}` +\n `, indices.shape: ${indices.shape}, shape: ${shape}` +\n `, sliceDim: ${sliceDim}, and batchDim: ${batchDim}.`;\n if (updates.rank < batchDim) {\n throw new Error(shapeError + ` update.rank < ${batchDim}. `);\n }\n if (shape.length < sliceDim + (updates.rank - batchDim)) {\n throw new Error(shapeError +\n ` Output shape length < ${sliceDim + (updates.rank - batchDim)}`);\n }\n if (updates.rank !== batchDim + shape.length - sliceDim) {\n throw new Error(shapeError + ` update.rank != ${batchDim + shape.length - sliceDim}`);\n }\n for (let d = 0; d < batchDim; ++d) {\n if (updates.shape[d] !== indices.shape[d]) {\n throw new Error(shapeError +\n ` updates.shape[${d}] (${updates.shape[d]}) != indices.shape[${d}] (${indices.shape[d]}).`);\n }\n }\n for (let d = 0; d < updates.rank - batchDim; ++d) {\n if (updates.shape[d + batchDim] !== shape[d + sliceDim]) {\n throw new Error(shapeError +\n ` updates.shape[${d + batchDim}] (${updates.shape[d + batchDim]}) != shape[${d + batchDim}] (${shape[d + batchDim]})`);\n }\n }\n}\n/**\n * Validate scatter nd inputs.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n */\nexport function validateInput(updates, indices, shape) {\n if (indices.rank < 1) {\n throw new Error('tf.scatterND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (updates.rank < 1) {\n throw new Error('tf.scatterND() expects the updates to be rank 1 or higher,' +\n ` but the rank was ${updates.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error(`The dtype of 'indices' should be int32, but got dtype: ${indices.dtype}`);\n }\n if (shape.length < 1) {\n throw new Error(`Output rank must be greater or equal to 1, but got shape: ${shape}`);\n }\n if (shape.length === 0) {\n if (indices.size === 0) {\n throw new Error(`Indices specified for empty output. indices shape: ${indices.shape}`);\n }\n if (updates.size === 0) {\n throw new Error(`Updates specified for empty output. updates shape: ${updates.shape}`);\n }\n }\n validateUpdateShape(shape, indices, updates);\n}\n/**\n * Calculate the shape information for the output.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n *\n * @returns ScatterShapeInfo\n */\nexport function calculateShapes(updates, indices, shape) {\n // Calculate the number of dimensions in indices\n const indicesRank = indices.shape.length;\n const sliceRank = (indicesRank > 1) ? indices.shape[indicesRank - 1] : 1;\n // Calculate the number of elements that make up each slice of our updated\n // tensor. This allows us to work with flattened tensors and copy over whole\n // slices at a time.\n const totalNd = shape.length;\n let sliceSize = 1;\n for (let i = sliceRank; i < totalNd; ++i) {\n sliceSize *= shape[i];\n }\n const safeSliceDim = (sliceRank < 1) ? 1 : sliceRank;\n const numUpdates = sizeFromShape(indices.shape) / safeSliceDim;\n const strides = [...computeStrides(shape.slice(0, sliceRank)), 1];\n const outputSize = sizeFromShape(shape);\n return { sliceRank, numUpdates, sliceSize, strides, outputSize };\n}\n//# sourceMappingURL=scatter_nd_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsValid(input, begin, size) {\n const inputRank = input.shape.length;\n util.assert(inputRank === begin.length, () => `Error in slice${inputRank}D: Length of begin ${begin} must ` +\n `match the rank of the array (${inputRank}).`);\n util.assert(inputRank === size.length, () => `Error in slice${inputRank}D: Length of size ${size} must ` +\n `match the rank of the array (${inputRank}).`);\n for (let i = 0; i < inputRank; ++i) {\n util.assert(begin[i] + size[i] <= input.shape[i], () => `Error in slice${inputRank}D: begin[${i}] + size[${i}] ` +\n `(${begin[i] + size[i]}) would overflow input.shape[${i}] (${input.shape[i]})`);\n }\n}\n/** Converts a binary mask to an array of axes. Used in stridedSlice(). */\nexport function maskToAxes(mask) {\n const axes = [];\n let axis = 0;\n while (mask > 0) {\n if (mask & 1) {\n axes.push(axis);\n }\n mask /= 2;\n axis++;\n }\n return axes;\n}\n/** Computes the output shape given the strided slice params. */\nexport function computeOutShape(begin, end, strides) {\n const size = [];\n for (let axis = 0; axis < begin.length; axis++) {\n size[axis] = Math.ceil((end[axis] - begin[axis]) / strides[axis]);\n }\n return size;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stride value. Otherwise, insert.\nexport function stridesWithElidedDims(strides, ellipsisInsertionIndex, numElidedAxes, inputShape) {\n const newStrides = [...strides];\n for (let i = newStrides.length; i < inputShape.length; i++) {\n newStrides.push(1);\n }\n for (let i = 0; i < numElidedAxes; i++) {\n if (i === 0) {\n newStrides[ellipsisInsertionIndex] = 1;\n }\n else {\n newStrides.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 1 /* element to add */);\n newStrides.pop();\n }\n }\n return newStrides;\n}\nfunction unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, normalizedAxis) {\n if (normalizedAxis <= ellipsisInsertionIndex) {\n return normalizedAxis;\n }\n return normalizedAxis - (numElidedAxes - 1);\n}\nfunction getElidedAxes(numElidedAxes, ellipsisInsertionIndex) {\n const elidedAxes = [];\n for (let i = 0; i < numElidedAxes; i++) {\n elidedAxes.push(ellipsisInsertionIndex + i);\n }\n return elidedAxes;\n}\n// Normalize the start, end and strides.\nexport function getNormalizedAxes(inputShape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask) {\n const inputRank = inputShape.length;\n let normalizedBegin = new Array(inputRank), normalizedEnd = new Array(inputRank), normalizedStrides = new Array(inputRank);\n if (ellipsisAxes.length && numInterpolatedAxes > 0) {\n const fullIndex = ellipsisAxes[0];\n // The ellipsis applies to the masked index as well as any dimensions\n // that are interpolated.\n const numElidedAxes = numInterpolatedAxes + 1;\n normalizedBegin = startIndicesWithElidedDims(beginMask, fullIndex, numElidedAxes, begin, inputShape);\n normalizedEnd = stopIndicesWithElidedDims(endMask, fullIndex, numElidedAxes, end, inputShape);\n normalizedStrides =\n stridesWithElidedDims(strides, fullIndex, numElidedAxes, inputShape);\n }\n else {\n for (let axis = 0; axis < inputRank; axis++) {\n normalizedBegin[axis] = startForAxis(beginMask, begin, strides, inputShape, axis, ellipsisMask);\n normalizedEnd[axis] =\n stopForAxis(endMask, end, strides, inputShape, axis, ellipsisMask);\n normalizedStrides[axis] = stridesForAxis(strides, axis, ellipsisMask);\n }\n }\n return {\n begin: normalizedBegin,\n end: normalizedEnd,\n strides: normalizedStrides\n };\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current start value. Otherwise, insert.\nexport function startIndicesWithElidedDims(beginMask, ellipsisInsertionIndex, numElidedAxes, originalBegin, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = 0;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalBegin[originalAxis];\n if (beginMask & 1 << originalAxis) {\n originalValue = 0;\n }\n newIndices[axis] = originalValue;\n }\n }\n return newIndices;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stop value. Otherwise, insert.\nexport function stopIndicesWithElidedDims(endMask, ellipsisInsertionIndex, numElidedAxes, originalEnd, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = Number.MAX_SAFE_INTEGER;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalEnd[originalAxis];\n if (endMask & 1 << originalAxis) {\n originalValue = Number.MAX_SAFE_INTEGER;\n }\n newIndices[axis] = originalValue;\n }\n }\n for (let i = 0; i < newIndices.length; i++) {\n // Handle negative indices\n const axisSize = inputShape[i];\n if (newIndices[i] < 0) {\n newIndices[i] += axisSize;\n }\n newIndices[i] = util.clamp(0, newIndices[i], inputShape[i]);\n }\n return newIndices;\n}\nexport function stridesForAxis(strides, axis, ellipsisMask) {\n let stride = strides[axis];\n if (ellipsisMask & (1 << axis) || stride == null) {\n stride = 1;\n }\n return stride;\n}\nexport function startForAxis(beginMask, startIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let start = startIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or the begin index is not set\n // for the axis.\n if (beginMask & 1 << axis || ellipsisMask & 1 << axis || start == null) {\n if (stride > 0) {\n // Forward iteration - use the first element. These values will get\n // clamped below (Note: We could have set them to 0 and axis_size-1, but\n // use lowest() and max() to maintain symmetry with StopForAxis())\n start = Number.MIN_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the last element.\n start = Number.MAX_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (start < 0) {\n start += axisSize;\n }\n // Clamping\n start = util.clamp(0, start, axisSize - 1);\n return start;\n}\nexport function stopForAxis(endMask, stopIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let stop = stopIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or if the stop index is not\n // set for this axis.\n if (endMask & (1 << axis) || ellipsisMask & (1 << axis) || stop == null) {\n if (stride > 0) {\n // Forward iteration - use the last element. These values will get\n // clamped below\n stop = Number.MAX_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the first element.\n stop = Number.MIN_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (stop < 0) {\n stop += axisSize;\n }\n // Clamping\n // Because the end index points one past the last element, we need slightly\n // different clamping ranges depending on the direction.\n if (stride > 0) {\n // Forward iteration\n stop = util.clamp(0, stop, axisSize);\n }\n else {\n // Backward iteration\n stop = util.clamp(-1, stop, axisSize - 1);\n }\n return stop;\n}\n/**\n * Returns true if the slice occupies a continous set of elements in the\n * 'flat' space.\n */\nexport function isSliceContinous(shape, begin, size) {\n // Index of the first axis that has size > 1.\n let firstNonOneAxis = size.length;\n for (let i = 0; i < size.length; i++) {\n if (size[i] > 1) {\n firstNonOneAxis = i;\n break;\n }\n }\n for (let i = firstNonOneAxis + 1; i < size.length; i++) {\n if (begin[i] > 0 || size[i] !== shape[i]) {\n return false;\n }\n }\n return true;\n}\nexport function computeFlatOffset(begin, strides) {\n let flatOffset = begin.length > 0 ? begin[begin.length - 1] : 1;\n for (let i = 0; i < begin.length - 1; i++) {\n flatOffset += begin[i] * strides[i];\n }\n return flatOffset;\n}\nexport function parseSliceParams(x, begin, size) {\n // The following logic allows for more ergonomic calls.\n let begin_;\n const xRank = x.shape.length;\n if (typeof begin === 'number') {\n begin_ = [begin, ...new Array(xRank - 1).fill(0)];\n }\n else if (begin.length < xRank) {\n begin_ = begin.concat(new Array(xRank - begin.length).fill(0));\n }\n else {\n begin_ = begin.slice();\n }\n begin_.forEach(d => {\n util.assert(d !== -1, () => 'slice() does not support negative begin indexing.');\n });\n let size_;\n if (size == null) {\n size_ = new Array(xRank).fill(-1);\n }\n else if (typeof size === 'number') {\n size_ = [size, ...new Array(xRank - 1).fill(-1)];\n }\n else if (size.length < xRank) {\n size_ = size.concat(new Array(xRank - size.length).fill(-1));\n }\n else {\n size_ = size;\n }\n size_ = size_.map((d, i) => {\n if (d >= 0) {\n return d;\n }\n else {\n util.assert(d === -1, () => `Negative size values should be exactly -1 but got ` +\n `${d} for the slice() size at index ${i}.`);\n return x.shape[i] - begin_[i];\n }\n });\n return [begin_, size_];\n}\n//# sourceMappingURL=slice_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from './util';\n/**\n * Serializable defines the serialization contract.\n *\n * TFJS requires serializable classes to return their className when asked\n * to avoid issues with minification.\n */\nexport class Serializable {\n /**\n * Return the class name for this class to use in serialization contexts.\n *\n * Generally speaking this will be the same thing that constructor.name\n * would have returned. However, the class name needs to be robust\n * against minification for serialization/deserialization to work properly.\n *\n * There's also places such as initializers.VarianceScaling, where\n * implementation details between different languages led to different\n * class hierarchies and a non-leaf node is used for serialization purposes.\n */\n getClassName() {\n return this.constructor\n .className;\n }\n /**\n * Creates an instance of T from a ConfigDict.\n *\n * This works for most descendants of serializable. A few need to\n * provide special handling.\n * @param cls A Constructor for the class to instantiate.\n * @param config The Configuration for the object.\n */\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/**\n * Maps string keys to class constructors.\n *\n * Used during (de)serialization from the cross-language JSON format, which\n * requires the class name in the serialization format matches the class\n * names as used in Python, should it exist.\n */\nexport class SerializationMap {\n constructor() {\n this.classNameMap = {};\n }\n /**\n * Returns the singleton instance of the map.\n */\n static getMap() {\n if (SerializationMap.instance == null) {\n SerializationMap.instance = new SerializationMap();\n }\n return SerializationMap.instance;\n }\n /**\n * Registers the class as serializable.\n */\n static register(cls) {\n SerializationMap.getMap().classNameMap[cls.className] =\n [cls, cls.fromConfig];\n }\n}\n/**\n * Register a class with the serialization map of TensorFlow.js.\n *\n * This is often used for registering custom Layers, so they can be\n * serialized and deserialized.\n *\n * Example:\n *\n * ```js\n * class MyCustomLayer extends tf.layers.Layer {\n * static className = 'MyCustomLayer';\n *\n * constructor(config) {\n * super(config);\n * }\n * }\n * tf.serialization.registerClass(MyCustomLayer);\n * ```\n *\n * @param cls The class to be registered. It must have a public static member\n * called `className` defined and the value must be a non-empty string.\n *\n * @doc {heading: 'Models', subheading: 'Serialization', ignoreCI: true}\n */\nexport function registerClass(cls) {\n assert(cls.className != null, () => `Class being registered does not have the static className ` +\n `property defined.`);\n assert(typeof cls.className === 'string', () => `className is required to be a string, but got type ` +\n typeof cls.className);\n assert(cls.className.length > 0, () => `Class being registered has an empty-string as its className, ` +\n `which is disallowed.`);\n SerializationMap.register(cls);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { inferShape } from './tensor_util_env';\nimport { arraysEqual, flatten, isString, isTypedArray } from './util';\nconst TEST_EPSILON_FLOAT32 = 1e-3;\nexport const TEST_EPSILON_FLOAT16 = 1e-1;\nexport function expectArraysClose(actual, expected, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, epsilon));\n}\nexport function testEpsilon() {\n return ENGINE.backend.floatPrecision() === 32 ? TEST_EPSILON_FLOAT32 :\n TEST_EPSILON_FLOAT16;\n}\nfunction expectArraysPredicate(actual, expected, predicate) {\n let checkClassType = true;\n if (isTypedArray(actual) || isTypedArray(expected)) {\n checkClassType = false;\n }\n if (isTypedArray(actual) && isTypedArray(expected)) {\n checkClassType = true;\n }\n if (checkClassType) {\n const aType = actual.constructor.name;\n const bType = expected.constructor.name;\n if (aType !== bType) {\n throw new Error(`Arrays are of different type. Actual: ${aType}. ` +\n `Expected: ${bType}`);\n }\n }\n if (Array.isArray(actual) && Array.isArray(expected)) {\n const actualShape = inferShape(actual);\n const expectedShape = inferShape(expected);\n if (!arraysEqual(actualShape, expectedShape)) {\n throw new Error(`Arrays have different shapes. ` +\n `Actual: [${actualShape}]. Expected: [${expectedShape}]`);\n }\n }\n const actualFlat = isTypedArray(actual) ? actual : flatten(actual);\n const expectedFlat = isTypedArray(expected) ?\n expected :\n flatten(expected);\n if (actualFlat.length !== expectedFlat.length) {\n throw new Error(`Arrays have different lengths actual: ${actualFlat.length} vs ` +\n `expected: ${expectedFlat.length}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n for (let i = 0; i < expectedFlat.length; ++i) {\n const a = actualFlat[i];\n const e = expectedFlat[i];\n if (!predicate(a, e)) {\n throw new Error(`Arrays differ: actual[${i}] = ${a}, expected[${i}] = ${e}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n }\n}\nexport function expectPromiseToFail(fn, done) {\n fn().then(() => done.fail(), () => done());\n}\nexport function expectArraysEqual(actual, expected) {\n const exp = typeof expected === 'string' || typeof expected === 'number' ||\n typeof expected === 'boolean' ?\n [expected] :\n expected;\n if (isString(actual) || isString(actual[0]) ||\n isString(expected) || isString(expected[0])) {\n // tslint:disable-next-line: triple-equals\n return expectArraysPredicate(actual, exp, (a, b) => a == b);\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, 0));\n}\nexport function expectNumbersClose(a, e, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n if (!areClose(a, e, epsilon)) {\n throw new Error(`Numbers differ: actual === ${a}, expected === ${e}`);\n }\n}\nfunction areClose(a, e, epsilon) {\n if (!isFinite(a) && !isFinite(e)) {\n return true;\n }\n if (isNaN(a) || isNaN(e) || Math.abs(a - e) > epsilon) {\n return false;\n }\n return true;\n}\nexport function expectValuesInRange(actual, low, high) {\n for (let i = 0; i < actual.length; i++) {\n if (actual[i] < low || actual[i] > high) {\n throw new Error(`Value out of range:${actual[i]} low: ${low}, high: ${high}`);\n }\n }\n}\nexport function expectArrayBuffersEqual(actual, expected) {\n // Safari & Jasmine don't like comparing ArrayBuffers directly. Wrapping in\n // a Float32Array solves this issue.\n expect(new Float32Array(actual)).toEqual(new Float32Array(expected));\n}\n//# sourceMappingURL=test_util.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { setDeprecationWarningFn } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\n/**\n * Enables production mode which disables correctness checks in favor of\n * performance.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableProdMode() {\n env().set('PROD', true);\n}\n/**\n * Enables debug mode which will log information about all executed kernels:\n * the elapsed time of the kernel execution, as well as the rank, shape, and\n * size of the output tensor.\n *\n * Debug mode will significantly slow down your application as it will\n * download the result of every operation to the CPU. This should not be used in\n * production. Debug mode does not affect the timing information of the kernel\n * execution as we do not measure download time in the kernel execution time.\n *\n * See also: `tf.profile`, `tf.memory`.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableDebugMode() {\n env().set('DEBUG', true);\n}\n/** Globally disables deprecation warnings */\nexport function disableDeprecationWarnings() {\n env().set('DEPRECATION_WARNINGS_ENABLED', false);\n console.warn(`TensorFlow.js deprecation warnings have been disabled.`);\n}\n/** Warn users about deprecated functionality. */\nexport function deprecationWarn(msg) {\n if (env().getBool('DEPRECATION_WARNINGS_ENABLED')) {\n console.warn(msg + ' You can disable deprecation warnings with ' +\n 'tf.disableDeprecationWarnings().');\n }\n}\nsetDeprecationWarningFn(deprecationWarn);\n/**\n * Dispose all variables kept in backend engine.\n *\n * @doc {heading: 'Environment'}\n */\nexport function disposeVariables() {\n ENGINE.disposeVariables();\n}\n/**\n * It returns the global engine that keeps track of all tensors and backends.\n *\n * @doc {heading: 'Environment'}\n */\nexport function engine() {\n return ENGINE;\n}\n/**\n * Returns memory info at the current time in the program. The result is an\n * object with the following properties:\n *\n * - `numBytes`: Number of bytes allocated (undisposed) at this time.\n * - `numTensors`: Number of unique tensors allocated.\n * - `numDataBuffers`: Number of unique data buffers allocated\n * (undisposed) at this time, which is \u2264 the number of tensors\n * (e.g. `a.reshape(newShape)` makes a new Tensor that shares the same\n * data buffer with `a`).\n * - `unreliable`: True if the memory usage is unreliable. See `reasons` when\n * `unreliable` is true.\n * - `reasons`: `string[]`, reasons why the memory is unreliable, present if\n * `unreliable` is true.\n *\n * WebGL Properties:\n * - `numBytesInGPU`: Number of bytes allocated (undisposed) in the GPU only at\n * this time.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function memory() {\n return ENGINE.memory();\n}\n/**\n * Executes the provided function `f()` and returns a promise that resolves\n * with information about the function's memory use:\n * - `newBytes`: the number of new bytes allocated\n * - `newTensors`: the number of new tensors created\n * - `peakBytes`: the peak number of bytes allocated\n * - `kernels`: an array of objects for each kernel involved that reports\n * their input and output shapes, number of bytes used, and number of new\n * tensors created.\n *\n * ```js\n * const profile = await tf.profile(() => {\n * const x = tf.tensor1d([1, 2, 3]);\n * let x2 = x.square();\n * x2.dispose();\n * x2 = x.square();\n * x2.dispose();\n * return x;\n * });\n *\n * console.log(`newBytes: ${profile.newBytes}`);\n * console.log(`newTensors: ${profile.newTensors}`);\n * console.log(`byte usage over all kernels: ${profile.kernels.map(k =>\n * k.totalBytesSnapshot)}`);\n * ```\n *\n *\n * @doc {heading: 'Performance', subheading: 'Profile'}\n */\nexport function profile(f) {\n return ENGINE.profile(f);\n}\n/**\n * Executes the provided function `fn` and after it is executed, cleans up all\n * intermediate tensors allocated by `fn` except those returned by `fn`.\n * `fn` must not return a Promise (async functions not allowed). The returned\n * result can be a complex object.\n *\n * Using this method helps avoid memory leaks. In general, wrap calls to\n * operations in `tf.tidy` for automatic memory cleanup.\n *\n * NOTE: Variables do *not* get cleaned up when inside a tidy(). If you want to\n * dispose variables, please use `tf.disposeVariables` or call dispose()\n * directly on variables.\n *\n * ```js\n * // y = 2 ^ 2 + 1\n * const y = tf.tidy(() => {\n * // a, b, and one will be cleaned up when the tidy ends.\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n * const b = a.square();\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * y.print();\n * ```\n *\n * @param nameOrFn The name of the closure, or the function to execute.\n * If a name is provided, the 2nd argument should be the function.\n * If debug mode is on, the timing and the memory usage of the function\n * will be tracked and displayed on the console using the provided name.\n * @param fn The function to execute.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function tidy(nameOrFn, fn) {\n return ENGINE.tidy(nameOrFn, fn);\n}\n/**\n * Disposes any `tf.Tensor`s found within the provided object.\n *\n * @param container an object that may be a `tf.Tensor` or may directly\n * contain `tf.Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. If\n * the object is not a `tf.Tensor` or does not contain `Tensors`, nothing\n * happens. In general it is safe to pass any object here, except that\n * `Promise`s are not supported.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function dispose(container) {\n const tensors = getTensorsInContainer(container);\n tensors.forEach(tensor => tensor.dispose());\n}\n/**\n * Keeps a `tf.Tensor` generated inside a `tf.tidy` from being disposed\n * automatically.\n *\n * ```js\n * let b;\n * const y = tf.tidy(() => {\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n *\n * // b will not be cleaned up by the tidy. a and one will be cleaned up\n * // when the tidy ends.\n * b = tf.keep(a.square());\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * console.log('y:');\n * y.print();\n * console.log('b:');\n * b.print();\n * ```\n *\n * @param result The tensor to keep from being disposed.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function keep(result) {\n return ENGINE.keep(result);\n}\n/**\n * Executes `f()` and returns a promise that resolves with timing\n * information.\n *\n * The result is an object with the following properties:\n *\n * - `wallMs`: Wall execution time.\n * - `kernelMs`: Kernel execution time, ignoring data transfer. If using the\n * WebGL backend and the query timer extension is not available, this will\n * return an error object.\n * - On `WebGL` The following additional properties exist:\n * - `uploadWaitMs`: CPU blocking time on texture uploads.\n * - `downloadWaitMs`: CPU blocking time on texture downloads (readPixels).\n *\n * ```js\n * const x = tf.randomNormal([20, 20]);\n * const time = await tf.time(() => x.matMul(x));\n *\n * console.log(`kernelMs: ${time.kernelMs}, wallTimeMs: ${time.wallMs}`);\n * ```\n *\n * @param f The function to execute and time.\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nexport function time(f) {\n return ENGINE.time(f);\n}\n/**\n * Sets the backend (cpu, webgl, wasm, etc) responsible for creating tensors and\n * executing operations on those tensors. Returns a promise that resolves\n * to a boolean if the backend initialization was successful.\n *\n * Note this disposes the current backend, if any, as well as any tensors\n * associated with it. A new backend is initialized, even if it is of the\n * same type as the previous one.\n *\n * @param backendName The name of the backend. Currently supports\n * `'webgl'|'cpu'` in the browser, `'tensorflow'` under node.js\n * (requires tfjs-node), and `'wasm'` (requires tfjs-backend-wasm).\n *\n * @doc {heading: 'Backends'}\n */\nexport function setBackend(backendName) {\n return ENGINE.setBackend(backendName);\n}\n/**\n * Returns a promise that resolves when the currently selected backend (or the\n * highest priority one) has initialized. Await this promise when you are using\n * a backend that has async initialization.\n *\n * @doc {heading: 'Backends'}\n */\nexport function ready() {\n return ENGINE.ready();\n}\n/**\n * Returns the current backend name (cpu, webgl, etc). The backend is\n * responsible for creating tensors and executing operations on those tensors.\n *\n * @doc {heading: 'Backends'}\n */\nexport function getBackend() {\n return ENGINE.backendName;\n}\n/**\n * Removes a backend and the registered factory.\n *\n * @doc {heading: 'Backends'}\n */\nexport function removeBackend(name) {\n ENGINE.removeBackend(name);\n}\n/**\n * Finds the backend registered under the provided name. Returns null if the\n * name is not in the registry, or the registration hasn't finished yet.\n */\nexport function findBackend(name) {\n return ENGINE.findBackend(name);\n}\n/**\n * Finds the backend factory registered under the provided name. Returns a\n * function that produces a new backend when called. Returns null if the name\n * is not in the registry.\n */\nexport function findBackendFactory(name) {\n return ENGINE.findBackendFactory(name);\n}\n/**\n * Registers a global backend. The registration should happen when importing\n * a module file (e.g. when importing `backend_webgl.ts`), and is used for\n * modular builds (e.g. custom tfjs bundle with only webgl support).\n *\n * @param factory The backend factory function. When called, it should\n * return a backend instance, or a promise of an instance.\n * @param priority The priority of the backend (higher = more important).\n * In case multiple backends are registered, the priority is used to find\n * the best backend. Defaults to 1.\n * @return False if there is already a registered backend under this name, true\n * if not.\n *\n * @doc {heading: 'Backends'}\n */\nexport function registerBackend(name, factory, priority = 1) {\n return ENGINE.registerBackend(name, factory, priority);\n}\n/**\n * Gets the current backend. If no backends have been initialized, this will\n * attempt to initialize the best backend. Will throw an error if the highest\n * priority backend has async initialization, in which case, you should call\n * 'await tf.ready()' before running other code.\n *\n * @doc {heading: 'Backends'}\n */\nexport function backend() {\n return ENGINE.backend;\n}\n/**\n * Sets the global platform.\n *\n * @param platformName The name of this platform.\n * @param platform A platform implementation.\n */\nexport function setPlatform(platformName, platform) {\n env().setPlatform(platformName, platform);\n}\n//# sourceMappingURL=globals.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Add } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Adds two `tf.Tensor`s element-wise, A + B. Supports broadcasting.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n *\n * ```js\n * // Broadcast add a with b.\n * const a = tf.scalar(5);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n * @param a The first `tf.Tensor` to add.\n * @param b The second `tf.Tensor` to add. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction add_(a, b) {\n let $a = convertToTensor(a, 'a', 'add');\n let $b = convertToTensor(b, 'b', 'add');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.add($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Add);\n}\nexport const add = op({ add_ });\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FloorDiv } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n * The result is rounded with floor function.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.floorDiv(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.floorDiv(b).print(); // or tf.floorDiv(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction floorDiv_(a, b) {\n let $a = convertToTensor(a, 'a', 'floorDiv');\n let $b = convertToTensor(b, 'b', 'floorDiv');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.floorDiv($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FloorDiv);\n}\nexport const floorDiv = op({ floorDiv_ });\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Div } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { floorDiv } from './floorDiv';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction div_(a, b) {\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'int32' && $b.dtype === 'int32') {\n return floorDiv($a, $b);\n }\n const forward = (backend, save) => {\n const res = backend.realDivide($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Div, attrs);\n}\nexport const div = op({ div_ });\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Multiply } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Multiplies two `tf.Tensor`s element-wise, A * B. Supports broadcasting.\n *\n * We also expose `tf.mulStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([2, 3, 4, 5]);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n *\n * ```js\n * // Broadcast mul a with b.\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.scalar(5);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n * @param a The first tensor to multiply.\n * @param b The second tensor to multiply. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mul_(a, b) {\n let $a = convertToTensor(a, 'a', 'mul');\n let $b = convertToTensor(b, 'b', 'mul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.multiply($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Multiply);\n}\nexport const mul = op({ mul_ });\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Abs } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes absolute value element-wise: `abs(x)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.abs().print(); // or tf.abs(x)\n * ```\n * @param x The input `tf.Tensor`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction abs_(x) {\n const $x = convertToTensor(x, 'x', 'abs');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n if ($x.dtype === 'complex64') {\n return backend.complexAbs($x);\n }\n return backend.abs($x);\n }, inputs, null /* grad */, Abs);\n}\nexport const abs = op({ abs_ });\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes acos of the input `tf.Tensor` element-wise: `acos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.acos().print(); // or tf.acos(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acos_(x) {\n const $x = convertToTensor(x, 'x', 'acos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acos);\n}\nexport const acos = op({ acos_ });\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the inverse hyperbolic cos of the input `tf.Tensor` element-wise:\n * `acosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([10, 1, 3, 5.7]);\n *\n * x.acosh().print(); // or tf.acosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acosh_(x) {\n const $x = convertToTensor(x, 'x', 'acosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acosh);\n}\nexport const acosh = op({ acosh_ });\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AddN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Adds a list of `tf.Tensor`s element-wise, each with the same shape and dtype.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n *\n * tf.addN([a, b, c]).print();\n * ```\n * @param tensors A list of tensors with the same shape and dtype.\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction addN_(tensors) {\n util.assert(Array.isArray(tensors), () => 'The argument passed to tf.addN() must be a list of tensors');\n util.assert(tensors.length >= 1, () => `Must pass at least one tensor to tf.addN(), but got ` +\n `${tensors.length}`);\n const $tensors = tensors.map((t, i) => convertToTensor(t, `tensors${i}`, 'addN'));\n const firstTensor = $tensors[0];\n $tensors.forEach(t => {\n if (t.dtype !== firstTensor.dtype) {\n throw new Error('All tensors passed to tf.addN() must have the same dtype');\n }\n });\n $tensors.forEach(t => {\n if (!util.arraysEqual(t.shape, firstTensor.shape)) {\n throw new Error('All tensors passed to tf.addN() must have the same shape');\n }\n });\n const forward = (backend, save) => {\n const res = backend.addN($tensors);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, AddN);\n}\nexport const addN = op({ addN_ });\n//# sourceMappingURL=add_n.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Returns true if the axis specifies the inner most dimensions of the\n * array.\n */\nexport function axesAreInnerMostDims(axes, rank) {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n}\nexport function combineLocations(outputLoc, reduceLoc, axes) {\n const rank = outputLoc.length + reduceLoc.length;\n const loc = [];\n let outIdx = 0;\n let reduceIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n loc.push(outputLoc[outIdx++]);\n }\n else {\n loc.push(reduceLoc[reduceIdx++]);\n }\n }\n return loc;\n}\nexport function computeOutAndReduceShapes(aShape, axes) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outShape.push(aShape[dim]);\n }\n }\n const reduceShape = axes.map(dim => aShape[dim]);\n return [outShape, reduceShape];\n}\nexport function expandShapeToKeepDim(shape, axes) {\n const reduceSubShape = axes.map(x => 1);\n return combineLocations(shape, reduceSubShape, axes);\n}\nexport function assertAxesAreInnerMostDims(msg, axes, rank) {\n util.assert(axesAreInnerMostDims(axes, rank), () => `${msg} supports only inner-most axes for now. ` +\n `Got axes ${axes} and rank-${rank} input.`);\n}\n/**\n * Returns the axes permutation to be used with `tf.transpose`, if such\n * permutation is necessary. Otherwise it returns null. This method is used by\n * operations that operate only on inner-most axes.\n */\nexport function getAxesPermutation(axes, rank) {\n if (axesAreInnerMostDims(axes, rank)) {\n return null;\n }\n const result = [];\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n result.push(i);\n }\n }\n axes.forEach(axis => result.push(axis));\n return result;\n}\n/** Returns the axes permutation that undoes the original permutation. */\nexport function getUndoAxesPermutation(axes) {\n return axes.map((axis, i) => [i, axis])\n .sort((a, b) => a[1] - b[1])\n .map(x => x[0]);\n}\nexport function getInnerMostAxes(numAxes, rank) {\n const res = [];\n for (let i = rank - numAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n}\n//# sourceMappingURL=axis_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { All } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical and of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.all().print(); // or tf.all(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.all(axis).print(); // or tf.all(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction all_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'all', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.all($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, All, attrs);\n}\nexport const all = op({ all_ });\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Any } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical or of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.any().print(); // or tf.any(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.any(axis).print(); // or tf.any(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction any_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'any', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.any($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Any, attrs);\n}\n// tslint:disable-next-line:variable-name\nexport const any = op({ any_ });\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the maximum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMax().print(); // or tf.argMax(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMax(axis).print(); // or tf.argMax(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMax_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMax');\n const forward = (backend, save) => {\n save([$x]);\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMax($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMax, attrs);\n}\nexport const argMax = op({ argMax_ });\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the minimum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMin().print(); // or tf.argMin(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMin(axis).print(); // or tf.argMin(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMin_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMin');\n const forward = (backend, save) => {\n save([$x]);\n if (axis == null) {\n axis = 0;\n }\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMin($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMin, attrs);\n}\nexport const argMin = op({ argMin_ });\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes asin of the input `tf.Tensor` element-wise: `asin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asin().print(); // or tf.asin(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asin_(x) {\n const $x = convertToTensor(x, 'x', 'asin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asin);\n}\nexport const asin = op({ asin_ });\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic sin of the input `tf.Tensor` element-wise:\n * `asinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asinh().print(); // or tf.asinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asinh_(x) {\n const $x = convertToTensor(x, 'x', 'asinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asinh);\n}\nexport const asinh = op({ asinh_ });\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes atan of the input `tf.Tensor` element-wise: `atan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.atan().print(); // or tf.atan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan_(x) {\n const $x = convertToTensor(x, 'x', 'atan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atan);\n}\nexport const atan = op({ atan_ });\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan2 } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes arctangent of `tf.Tensor`s a / b element-wise: `atan2(a, b)`.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1.0, 1.0, -1.0, .7]);\n * const b = tf.tensor1d([2.0, 13.0, 3.5, .21]);\n *\n * tf.atan2(a, b).print()\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan2_(a, b) {\n let $a = convertToTensor(a, 'a', 'atan2');\n let $b = convertToTensor(b, 'b', 'atan2');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.atan2($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Atan2);\n}\nexport const atan2 = op({ atan2_ });\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic tan of the input `tf.Tensor` element-wise:\n * `atanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.atanh().print(); // or tf.atanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atanh_(x) {\n const $x = convertToTensor(x, 'x', 'atanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atanh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atanh);\n}\nexport const atanh = op({ atanh_ });\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n *\n * @param inputShape Input tensor shape is of the following dimensions:\n * `[batch, height, width, inChannels]`.\n * @param filterShape The filter shape is of the following dimensions:\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat The data format of the input and output data.\n * Defaults to 'NHWC'.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`.\n * Defaults to `[1, 1]`. If `dilations` is a single number, then\n * `dilationHeight == dilationWidth`.\n */\nexport function computeDilation2DInfo(inputShape, filterShape, strides, pad, dataFormat = 'NHWC', dilations) {\n // `computerConv2DInfo` require filterShape to be in the dimension of:\n // `[filterHeight, filterWidth, depth, outDepth]`, dilation2d doesn't have\n // outDepth, it should have the same depth as the input.\n // Input shape: [batch, height, width, inChannels]\n const inputChannels = inputShape[3];\n const $filterShape = [...filterShape, inputChannels];\n const $dataFormat = convertConv2DDataFormat(dataFormat);\n return computeConv2DInfo(inputShape, $filterShape, strides, dilations, pad, null /* roundingMode */, null /* depthWise */, $dataFormat);\n}\nexport function computePool2DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'channelsLast') {\n const [filterHeight, filterWidth] = parseTupleParam(filterSize);\n let filterShape;\n if (dataFormat === 'channelsLast') {\n filterShape = [filterHeight, filterWidth, inShape[3], inShape[3]];\n }\n else if (dataFormat === 'channelsFirst') {\n filterShape = [filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, false, dataFormat);\n}\n/**\n * Computes the information for a forward pass of a pooling3D operation.\n */\nexport function computePool3DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'NDHWC') {\n const [filterDepth, filterHeight, filterWidth] = parse3TupleParam(filterSize);\n let filterShape;\n let $dataFormat;\n if (dataFormat === 'NDHWC') {\n $dataFormat = 'channelsLast';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[4], inShape[4]];\n }\n else if (dataFormat === 'NCDHW') {\n $dataFormat = 'channelsFirst';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv3DInfo(inShape, filterShape, strides, dilations, pad, false, $dataFormat, roundingMode);\n}\n/**\n * Computes the information for a forward pass of a convolution/pooling\n * operation.\n */\nexport function computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, depthwise = false, dataFormat = 'channelsLast') {\n let [batchSize, inHeight, inWidth, inChannels] = [-1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideHeight, strideWidth] = parseTupleParam(strides);\n const [dilationHeight, dilationWidth] = parseTupleParam(dilations);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outHeight, outWidth } = getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, effectiveFilterHeight, effectiveFilterWidth, roundingMode, dataFormat);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inHeight,\n inWidth,\n inChannels,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideHeight,\n strideWidth,\n filterHeight,\n filterWidth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\n/**\n * Computes the information for a forward pass of a 3D convolution/pooling\n * operation.\n */\nexport function computeConv3DInfo(inShape, filterShape, strides, dilations, pad, depthwise = false, dataFormat = 'channelsLast', roundingMode) {\n let [batchSize, inDepth, inHeight, inWidth, inChannels] = [-1, -1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterDepth, filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outDepth, outHeight, outWidth } = get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth, effectiveFilterHeight, effectiveFilterWidth, roundingMode);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\nfunction computeOutputShape2D(inShape, fieldSize, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputRows = inShape[0];\n const inputCols = inShape[1];\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputRows, outputCols];\n}\nfunction computeOutputShape4D(inShape, fieldSize, outChannels, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputDepth = inShape[0];\n const inputRows = inShape[1];\n const inputCols = inShape[2];\n const outputDepths = conditionalRound((inputDepth - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputDepths), () => `The output # of depths (${outputDepths}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputDepths, outputRows, outputCols, outChannels];\n}\nexport function computeDefaultPad(inputShape, fieldSize, stride, dilation = 1) {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n}\nfunction parseTupleParam(param) {\n if (typeof param === 'number') {\n return [param, param, param];\n }\n if (param.length === 2) {\n return [param[0], param[1], 1];\n }\n return param;\n}\nfunction parse3TupleParam(param) {\n return typeof param === 'number' ? [param, param, param] : param;\n}\n/* See https://www.tensorflow.org/api_docs/python/tf/nn/atrous_conv2d\n * Atrous convolution is equivalent to standard convolution with upsampled\n * filters with effective_filter_height =\n * filter_height + (filter_height - 1) * (dilation - 1)\n * and effective_filter_width =\n * filter_width + (filter_width - 1) * (dilation - 1),\n * produced by inserting dilation - 1 zeros along consecutive elements across\n * the filters' spatial dimensions.\n * When there is a dilation, this converts a filter dimension to the\n * effective filter dimension, so it can be used in a standard convolution.\n */\nfunction getEffectiveFilterSize(filterSize, dilation) {\n if (dilation <= 1) {\n return filterSize;\n }\n return filterSize + (filterSize - 1) * (dilation - 1);\n}\nfunction getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, filterHeight, filterWidth, roundingMode, dataFormat) {\n let padInfo;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = { top: pad, bottom: pad, left: pad, right: pad, type: padType };\n const outShape = computeOutputShape2D([inHeight, inWidth], filterHeight, strideHeight, pad, roundingMode);\n outHeight = outShape[0];\n outWidth = outShape[1];\n }\n else if (pad === 'same') {\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongHeight = Math.max(0, (outHeight - 1) * strideHeight + filterHeight - inHeight);\n const padAlongWidth = Math.max(0, (outWidth - 1) * strideWidth + filterWidth - inWidth);\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = { top: 0, bottom: 0, left: 0, right: 0, type: 'VALID' };\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else if (typeof pad === 'object') {\n const top = dataFormat === 'channelsLast' ? pad[1][0] : pad[2][0];\n const bottom = dataFormat === 'channelsLast' ? pad[1][1] : pad[2][1];\n const left = dataFormat === 'channelsLast' ? pad[2][0] : pad[3][0];\n const right = dataFormat === 'channelsLast' ? pad[2][1] : pad[3][1];\n const padType = (top === 0 && bottom === 0 && left === 0 && right === 0) ?\n 'VALID' :\n 'EXPLICIT';\n padInfo = { top, bottom, left, right, type: padType };\n outHeight = conditionalRound((inHeight - filterHeight + top + bottom) / strideHeight + 1, roundingMode);\n outWidth = conditionalRound((inWidth - filterWidth + left + right) / strideWidth + 1, roundingMode);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outHeight, outWidth };\n}\nfunction get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, filterDepth, filterHeight, filterWidth, roundingMode) {\n let padInfo;\n let outDepth;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = {\n top: pad,\n bottom: pad,\n left: pad,\n right: pad,\n front: pad,\n back: pad,\n type: padType\n };\n const outShape = computeOutputShape4D([inDepth, inHeight, inWidth, 1], filterDepth, 1, strideDepth, pad, roundingMode);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n }\n else if (pad === 'same') {\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, front, back, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = {\n top: 0,\n bottom: 0,\n left: 0,\n right: 0,\n front: 0,\n back: 0,\n type: 'VALID'\n };\n outDepth = Math.ceil((inDepth - filterDepth + 1) / strideDepth);\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outDepth, outHeight, outWidth };\n}\n/**\n * Rounds a value depending on the rounding mode\n * @param value\n * @param roundingMode\n */\nfunction conditionalRound(value, roundingMode) {\n if (!roundingMode) {\n return value;\n }\n switch (roundingMode) {\n case 'round':\n // used for Caffe Conv\n return Math.round(value);\n case 'ceil':\n // used for Caffe Pool\n return Math.ceil(value);\n case 'floor':\n return Math.floor(value);\n default:\n throw new Error(`Unknown roundingMode ${roundingMode}`);\n }\n}\nexport function tupleValuesAreOne(param) {\n const [dimA, dimB, dimC] = parseTupleParam(param);\n return dimA === 1 && dimB === 1 && dimC === 1;\n}\nexport function eitherStridesOrDilationsAreOne(strides, dilations) {\n return tupleValuesAreOne(strides) || tupleValuesAreOne(dilations);\n}\n/**\n * Convert Conv2D dataFormat from 'NHWC'|'NCHW' to\n * 'channelsLast'|'channelsFirst'\n * @param dataFormat in 'NHWC'|'NCHW' mode\n * @return dataFormat in 'channelsLast'|'channelsFirst' mode\n * @throws unknown dataFormat\n */\nexport function convertConv2DDataFormat(dataFormat) {\n if (dataFormat === 'NHWC') {\n return 'channelsLast';\n }\n else if (dataFormat === 'NCHW') {\n return 'channelsFirst';\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n}\n//# sourceMappingURL=conv_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D average pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction avgPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'avgPool', 'float32');\n const dilations = 1;\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in avgPool: x must be rank 4 but got rank ${x4D.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n save([x4D]);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return x4D.clone();\n }\n return backend.avgPool(x4D, convInfo);\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool, attrs);\n res = cast(res, $x.dtype);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPool = op({ avgPool_ });\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { AvgPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D average pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.avgPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates:\n * `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction avgPool3d_(x, filterSize, strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'avgPool3d', 'float32');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in avgPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in avgPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n save([x5D]);\n return backend.avgPool3d(x5D, convInfo);\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3D, attrs);\n res = cast(res, x5D.dtype);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3d = op({ avgPool3d_ });\n//# sourceMappingURL=avg_pool_3d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsConsistent(shapes, axis) {\n const rank = shapes[0].length;\n shapes.forEach((shape, i) => {\n util.assert(shape.length === rank, () => `Error in concat${rank}D: rank of tensors[${i}] must be the same ` +\n `as the rank of the rest (${rank})`);\n });\n util.assert(axis >= 0 && axis < rank, () => `Error in concat${rank}D: axis must be between 0 and ${rank - 1}.`);\n const firstShape = shapes[0];\n shapes.forEach((shape, i) => {\n for (let r = 0; r < rank; r++) {\n util.assert((r === axis) || (shape[r] === firstShape[r]), () => `Error in concat${rank}D: Shape of tensors[${i}] (${shape}) ` +\n `does not match the shape of the rest (${firstShape}) ` +\n `along the non-concatenated axis ${i}.`);\n }\n });\n}\nexport function computeOutShape(shapes, axis) {\n const outputShape = shapes[0].slice();\n for (let i = 1; i < shapes.length; i++) {\n outputShape[axis] += shapes[i][axis];\n }\n return outputShape;\n}\n//# sourceMappingURL=concat_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Concat } from '../kernel_names';\nimport { convertToTensorArray } from '../tensor_util_env';\nimport { assert, parseAxisParam, sizeFromShape } from '../util';\nimport { assertParamsConsistent, computeOutShape } from './concat_util';\nimport { op } from './operation';\nimport { tensor } from './tensor';\n/**\n * Concatenates a list of `tf.Tensor`s along a given axis.\n *\n * The tensors ranks and types must match, and their sizes must match in all\n * dimensions except `axis`.\n *\n * Also available are stricter rank-specific methods that assert that\n * `tensors` are of the given rank:\n * - `tf.concat1d`\n * - `tf.concat2d`\n * - `tf.concat3d`\n * - `tf.concat4d`\n *\n * Except `tf.concat1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * a.concat(b).print(); // or a.concat(b)\n * ```\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.concat([a, b, c]).print();\n * ```\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [10, 20]]);\n * const b = tf.tensor2d([[3, 4], [30, 40]]);\n * const axis = 1;\n * tf.concat([a, b], axis).print();\n * ```\n * @param tensors A list of tensors to concatenate.\n * @param axis The axis to concate along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction concat_(tensors, axis = 0) {\n assert(tensors.length >= 1, () => 'Pass at least one tensor to concat');\n let $tensors = convertToTensorArray(tensors, 'tensors', 'concat');\n if ($tensors[0].dtype === 'complex64') {\n $tensors.forEach(tensor => {\n if (tensor.dtype !== 'complex64') {\n throw new Error(`Cannot concatenate complex64 tensors with a tensor\n with dtype ${tensor.dtype}. `);\n }\n });\n }\n const forward = (backend, save) => {\n const $axis = parseAxisParam(axis, $tensors[0].shape)[0];\n const outShape = computeOutShape($tensors.map(t => t.shape), $axis);\n if (sizeFromShape(outShape) === 0) {\n return tensor([], outShape);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n $tensors = $tensors.filter(t => t.size > 0);\n if ($tensors.length === 1) {\n return $tensors[0];\n }\n const shapes = $tensors.map(t => t.shape);\n assertParamsConsistent(shapes, $axis);\n const res = backend.concat($tensors, $axis);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n const attr = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Concat, attr);\n}\nexport const concat = op({ concat_ });\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sigmoid } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sigmoid element-wise, `1 / (1 + exp(-x))`\n *\n * ```js\n * const x = tf.tensor1d([0, -1, 2, -3]);\n *\n * x.sigmoid().print(); // or tf.sigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'sigmoid');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sigmoid($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Sigmoid);\n}\nexport const sigmoid = op({ sigmoid_ });\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Slice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as slice_util from './slice_util';\n/**\n * Extracts a slice from a `tf.Tensor` starting at coordinates `begin`\n * and is of size `size`.\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `x` is of the given rank:\n * - `tf.slice1d`\n * - `tf.slice2d`\n * - `tf.slice3d`\n * - `tf.slice4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.slice([1], [2]).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * x.slice([1, 0], [1, 2]).print();\n * ```\n * @param x The input `tf.Tensor` to slice from.\n * @param begin The coordinates to start the slice from. The length can be\n * less than the rank of x - the rest of the axes will have implicit 0 as\n * start. Can also be a single number, in which case it specifies the\n * first axis.\n * @param size The size of the slice. The length can be less than the rank of\n * x - the rest of the axes will have implicit -1. A value of -1 requests\n * the rest of the dimensions in the axis. Can also be a single number,\n * in which case it specifies the size of the first axis.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction slice_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice');\n if ($x.rank === 0) {\n throw new Error('Slicing scalar is not possible');\n }\n const forward = (backend, save) => {\n const [begin_, size_] = slice_util.parseSliceParams($x, begin, size);\n slice_util.assertParamsValid($x, begin_, size_);\n save([$x]);\n return backend.slice($x, begin_, size_);\n };\n const inputs = { x: $x };\n const attrs = { begin, size };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Slice, attrs);\n}\nexport const slice = op({ slice_ });\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic tangent of the input `tf.Tensor` element-wise: `tanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, 70]);\n *\n * x.tanh().print(); // or tf.tanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tanh_(x) {\n const $x = convertToTensor(x, 'x', 'tanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.tanh($x);\n save([y]);\n return y;\n }, inputs, null /* grad */, Tanh);\n}\nexport const tanh = op({ tanh_ });\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { add } from './add';\nimport { concat } from './concat';\nimport { matMul } from './mat_mul';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { slice } from './slice';\nimport { tanh } from './tanh';\n/**\n * Computes the next state and output of a BasicLSTMCell.\n *\n * Returns `[newC, newH]`.\n *\n * Derived from tf.contrib.rnn.BasicLSTMCell.\n *\n * @param forgetBias Forget bias for the cell.\n * @param lstmKernel The weights for the cell.\n * @param lstmBias The bias for the cell.\n * @param data The input to the cell.\n * @param c Previous cell state.\n * @param h Previous cell output.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction basicLSTMCell_(forgetBias, lstmKernel, lstmBias, data, c, h) {\n const $forgetBias = convertToTensor(forgetBias, 'forgetBias', 'basicLSTMCell');\n const $lstmKernel = convertToTensor(lstmKernel, 'lstmKernel', 'basicLSTMCell');\n const $lstmBias = convertToTensor(lstmBias, 'lstmBias', 'basicLSTMCell');\n const $data = convertToTensor(data, 'data', 'basicLSTMCell');\n const $c = convertToTensor(c, 'c', 'basicLSTMCell');\n const $h = convertToTensor(h, 'h', 'basicLSTMCell');\n const combined = concat([$data, $h], 1);\n const weighted = matMul(combined, $lstmKernel);\n const res = add(weighted, $lstmBias);\n // i = input_gate, j = new_input, f = forget_gate, o = output_gate\n const batchSize = res.shape[0];\n const sliceCols = res.shape[1] / 4;\n const sliceSize = [batchSize, sliceCols];\n const i = slice(res, [0, 0], sliceSize);\n const j = slice(res, [0, sliceCols], sliceSize);\n const f = slice(res, [0, sliceCols * 2], sliceSize);\n const o = slice(res, [0, sliceCols * 3], sliceSize);\n const newC = add(mul(sigmoid(i), tanh(j)), mul($c, sigmoid(add($forgetBias, f))));\n const newH = mul(tanh(newC), sigmoid(o));\n return [newC, newH];\n}\nexport const basicLSTMCell = op({ basicLSTMCell_ });\n//# sourceMappingURL=basic_lstm_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchToSpaceND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation reshapes the \"batch\" dimension 0 into `M + 1` dimensions of\n * shape `blockShape + [batch]`, interleaves these blocks back into the grid\n * defined by the spatial dimensions `[1, ..., M]`, to obtain a result with\n * the same rank as the input. The spatial dimensions of this intermediate\n * result are then optionally cropped according to `crops` to produce the\n * output. This is the reverse of `tf.spaceToBatchND`. See below for a precise\n * description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]);\n * const blockShape = [2, 2];\n * const crops = [[0, 0], [0, 0]];\n *\n * x.batchToSpaceND(blockShape, crops).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param crops A 2-D array. Must have shape `[M, 2]`, all values must be >= 0.\n * `crops[i] = [cropStart, cropEnd]` specifies the amount to crop from input\n * dimension `i + 1`, which corresponds to spatial dimension `i`. It is required\n * that `cropStart[i] + cropEnd[i] <= blockShape[i] * inputShape[i + 1]`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Reshape `x` to `reshaped` of shape: `[blockShape[0], ...,\n * blockShape[M-1], batch / prod(blockShape), x.shape[1], ...,\n * x.shape[N-1]]`\n *\n * 2. Permute dimensions of `reshaped`to produce `permuted` of shape `[batch /\n * prod(blockShape),x.shape[1], blockShape[0], ..., x.shape[M],\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 3. Reshape `permuted` to produce `reshapedPermuted` of shape `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0], ..., x.shape[M] *\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 4. Crop the start and end of dimensions `[1, ..., M]` of `reshapedPermuted`\n * according to `crops` to produce the output of shape: `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0] - crops[0,0] - crops[0,1],\n * ..., x.shape[M] * blockShape[M-1] - crops[M-1,0] -\n * crops[M-1,1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction batchToSpaceND_(x, blockShape, crops) {\n const $x = convertToTensor(x, 'x', 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank is ${$x.rank} but should be > than blockShape.length ${blockShape.length}`);\n util.assert(crops.length === blockShape.length, () => `crops.length is ${crops.length} but should be equal to blockShape.length ${blockShape.length}`);\n util.assert($x.shape[0] % prod === 0, () => `input tensor batch is ${$x.shape[0]} but is not divisible by the product of ` +\n `the elements of blockShape ${blockShape.join(' * ')} === ${prod}`);\n const forward = backend => {\n return backend.batchToSpaceND($x, blockShape, crops);\n };\n const inputs = { x: $x };\n const attrs = { blockShape, crops };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, BatchToSpaceND, attrs);\n}\nexport const batchToSpaceND = op({ batchToSpaceND_ });\n//# sourceMappingURL=batch_to_space_nd.js.map", "import { reshape } from './reshape';\nexport function xAs4D(x) {\n let x4D;\n if (x.rank === 0 || x.rank === 1) {\n x4D = reshape(x, [1, 1, 1, x.size]);\n }\n else if (x.rank === 2) {\n x4D = reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n else {\n x4D = x;\n }\n return x4D;\n}\n//# sourceMappingURL=batchnorm_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FusedBatchNorm } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { xAs4D } from './batchnorm_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Batch normalization.\n *\n * As described in\n * [http://arxiv.org/abs/1502.03167](http://arxiv.org/abs/1502.03167).\n *\n * Mean, variance, scale, and offset can be of two shapes:\n * - The same shape as the input.\n * - In the common case, the depth dimension is the last dimension of x, so\n * the values would be an `tf.Tensor1D` of shape [depth].\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that parameters passed are of given rank\n * - `tf.batchNorm2d`\n * - `tf.batchNorm3d`\n * - `tf.batchNorm4d`\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction batchNorm_(x, mean, variance, offset, scale, varianceEpsilon) {\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($mean.rank === $variance.rank, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert($offset == null || $mean.rank === $offset.rank, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert($scale == null || $mean.rank === $scale.rank, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n const x4D = xAs4D($x);\n const forward = (backend, save) => {\n save([x4D, $mean, $variance, $scale]);\n return backend.batchNorm(x4D, as1DOr4D($mean), as1DOr4D($variance), as1DOr4D($offset), as1DOr4D($scale), varianceEpsilon);\n };\n const inputs = {\n x: x4D,\n scale: $scale,\n offset: $offset,\n mean: $mean,\n variance: $variance\n };\n const attrs = { varianceEpsilon };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FusedBatchNorm, attrs);\n return reshape(res, $x.shape);\n}\nfunction as1DOr4D(x) {\n if (x == null) {\n return null;\n }\n if (x.rank === 0) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [x.size]);\n }\n else if (x.rank === 1) {\n return x;\n }\n else if (x.rank === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n return x;\n}\nexport const batchNorm = op({ batchNorm_ });\n//# sourceMappingURL=batchnorm.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 2D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm2d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 2, () => `Error in batchNorm2D: x must be rank 2 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 2 || $mean.rank === 1, () => `Error in batchNorm2D: mean must be rank 2 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 2 || $variance.rank === 1, () => `Error in batchNorm2D: variance must be rank 2 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 2 || $scale.rank === 1, () => `Error in batchNorm2D: scale must be rank 2 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 2 || $offset.rank === 1, () => `Error in batchNorm2D: offset must be rank 2 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm2d = op({ batchNorm2d_ });\n//# sourceMappingURL=batchnorm2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 3D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm3d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 3, () => `Error in batchNorm3D: x must be rank 3 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 3 || $mean.rank === 1, () => `Error in batchNorm3D: mean must be rank 3 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 3 || $variance.rank === 1, () => `Error in batchNorm3D: variance must be rank 3 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 3 || $scale.rank === 1, () => `Error in batchNorm3D: scale must be rank 3 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 3 || $offset.rank === 1, () => `Error in batchNorm3D: offset must be rank 3 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm3d = op({ batchNorm3d_ });\n//# sourceMappingURL=batchnorm3d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 4D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm4d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 4, () => `Error in batchNorm4D: x must be rank 4 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 4 || $mean.rank === 1, () => `Error in batchNorm4D: mean must be rank 4 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 4 || $variance.rank === 1, () => `Error in batchNorm4D: variance must be rank 4 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 4 || $scale.rank === 1, () => `Error in batchNorm4D: scale must be rank 4 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 4 || $offset.rank === 1, () => `Error in batchNorm4D: offset must be rank 4 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm4d = op({ batchNorm4d_ });\n//# sourceMappingURL=batchnorm4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BroadcastTo } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Broadcast an array to a compatible shape NumPy-style.\n *\n * The tensor's shape is compared to the broadcast shape from end to beginning.\n * Ones are prepended to the tensor's shape until is has the same length as\n * the broadcast shape. If input.shape[i]==shape[i], the (i+1)-th axis is\n * already broadcast-compatible. If input.shape[i]==1 and shape[i]==N, then\n * the input tensor is tiled N times along that axis (using tf.tile).\n *\n * @param input The tensor that is to be broadcasted.\n * @param shape The input is to be broadcast to this shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction broadcastTo_(x, shape) {\n let input = convertToTensor(x, 'broadcastTo', 'x');\n const xShape = input.shape;\n if (shape.some(d => !(d > 0) || d % 1 !== 0)) {\n throw new Error(`broadcastTo(): Invalid broadcast shape [${shape}].`);\n }\n if (shape.length < input.rank) {\n throw new Error(`broadcastTo(): shape.length=${shape.length} < input.rank=${input.rank}.`);\n }\n if (shape.length > input.rank) {\n const newShape = input.shape.slice();\n while (newShape.length < shape.length) {\n newShape.unshift(1);\n }\n input = reshape(input, newShape);\n }\n const inputShape = input.shape;\n const reps = Array.from(shape);\n for (let i = shape.length - 1; i >= 0; i--) {\n if (inputShape[i] === shape[i]) {\n reps[i] = 1;\n }\n else if (input.shape[i] !== 1) {\n throw new Error(`broadcastTo(): [${xShape}] cannot be broadcast to [${shape}].`);\n }\n }\n const axes = reps.map((n, i) => n > 1 ? i : -1).filter(i => i >= 0);\n if (axes.length === 0) {\n return clone(input);\n }\n const forward = (backend) => backend.tile(input, reps);\n const inputs = { x: input };\n const attrs = { shape, inputShape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BroadcastTo, attrs);\n}\nexport const broadcastTo = op({ broadcastTo_ });\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Ceil } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes ceiling of input `tf.Tensor` element-wise: `ceil(x)`\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.ceil().print(); // or tf.ceil(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction ceil_(x) {\n const $x = convertToTensor(x, 'x', 'ceil');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.ceil($x), inputs, null /* grad */, Ceil);\n}\nexport const ceil = op({ ceil_ });\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ClipByValue } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Clips values element-wise. `max(min(x, clipValueMax), clipValueMin)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.clipByValue(-2, 3).print(); // or tf.clipByValue(x, -2, 3)\n * ```\n * @param x The input tensor.\n * @param clipValueMin Lower-bound of range to be clipped to.\n * @param clipValueMax Upper-bound of range to be clipped to.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction clipByValue_(x, clipValueMin, clipValueMax) {\n const $x = convertToTensor(x, 'x', 'clipByValue');\n util.assert((clipValueMin <= clipValueMax), () => `Error in clip: min (${clipValueMin}) must be ` +\n `less than or equal to max (${clipValueMax}).`);\n const inputs = { x: $x };\n const attrs = { clipValueMin, clipValueMax };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.clip($x, clipValueMin, clipValueMax);\n save([$x]);\n return res;\n }, inputs, null /* grad */, ClipByValue, attrs);\n}\nexport const clipByValue = op({ clipByValue_ });\n//# sourceMappingURL=clip_by_value.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor1D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(3) = |r1, g1, b1|\n * B: shape(2) = |r2, g2|\n * C = tf.concat1d([A, B]) == |r1, g1, b1, r2, g2|\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @return The concatenated array.\n */\nfunction concat1d_(tensors) {\n return concat(tensors, 0 /* axis */);\n}\nexport const concat1d = op({ concat1d_ });\n//# sourceMappingURL=concat_1d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor2D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat2d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C = shape(2, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concatenate along.\n * @return The concatenated array.\n */\nfunction concat2d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat2d = op({ concat2d_ });\n//# sourceMappingURL=concat_2d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor3D`s along an axis.\n * See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 1, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat3d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C: shape(2, 2, 3) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * if axis = 2:\n * C = shape(2, 1, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat3d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat3d = op({ concat3d_ });\n//# sourceMappingURL=concat_3d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor4D`s along an axis.\n * See `concat` for details.\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat4d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat4d = op({ concat4d_ });\n//# sourceMappingURL=concat_4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 2D convolution over the input x.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2d: input must be rank 4, but got rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n util.assert(inDepth === $filter.shape[2], () => `Error in conv2d: depth of input (${inDepth}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const forward = (backend, save) => {\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2d(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2D, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2d = op({ conv2d_ });\n//# sourceMappingURL=conv2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 1D convolution over the input x.\n *\n * @param x The input tensor, of rank 3 or rank 2, of shape\n * `[batch, width, inChannels]`. If rank 2, batch of 1 is assumed.\n * @param filter The filter, rank 3, of shape\n * `[filterWidth, inDepth, outDepth]`.\n * @param stride The number of entries by which the filter is moved right at\n * each step.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from \"NWC\", \"NCW\". Defaults to \"NWC\",\n * the data is stored in the order of [batch, in_width, in_channels]. Only\n * \"NWC\" is currently supported.\n * @param dilation The dilation rate in which we sample input values in\n * atrous convolution. Defaults to `1`. If it is greater than 1, then\n * stride must be `1`.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv1d_(x, filter, stride, pad, dataFormat = 'NWC', dilation = 1, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv1d');\n const $filter = convertToTensor(filter, 'filter', 'conv1d');\n let x3D = $x;\n let reshapedTo3D = false;\n if ($x.rank === 2) {\n reshapedTo3D = true;\n x3D = reshape($x, [1, $x.shape[0], $x.shape[1]]);\n }\n util.assert(x3D.rank === 3, () => `Error in conv1d: input must be rank 3, but got rank ${x3D.rank}.`);\n util.assert($filter.rank === 3, () => `Error in conv1d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv1d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x3D.shape[2] === $filter.shape[1], () => `Error in conv1d: depth of input (${x3D.shape[2]}) must match ` +\n `input depth for filter ${$filter.shape[1]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(stride, dilation), () => 'Error in conv1D: Either stride or dilation must be 1. ' +\n `Got stride ${stride} and dilation '${dilation}'`);\n util.assert(dataFormat === 'NWC', () => `Error in conv1d: got dataFormat of ${dataFormat} but only NWC is currently supported.`);\n const filter4D = reshape($filter, [1, $filter.shape[0], $filter.shape[1], $filter.shape[2]]);\n const input4D = reshape(x3D, [x3D.shape[0], 1, x3D.shape[1], x3D.shape[2]]);\n const strides = [1, stride];\n const dilations = [1, dilation];\n const conv2dDataFormat = 'NHWC';\n const res = conv2d(input4D, filter4D, strides, pad, conv2dDataFormat, dilations, dimRoundingMode);\n if (reshapedTo3D) {\n return reshape(res, [res.shape[2], res.shape[3]]);\n }\n return reshape(res, [res.shape[0], res.shape[2], res.shape[3]]);\n}\nexport const conv1d = op({ conv1d_ });\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 2D convolution.\n *\n * @param xShape The shape of the input: [batch, height, width, inDepth].\n * If length of 3, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 4 or rank 3 of shape\n * `[batch, outHeight, outWidth, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction conv2DBackpropInput_(xShape, dy, filter, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape4D = xShape;\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n xShape4D = [1, xShape[0], xShape[1], xShape[2]];\n }\n util.assert(xShape4D.length === 4, () => `Error in conv2dDerInput: inShape must be length 4, but got length ` +\n `${xShape4D.length}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerInput: dy must be rank 4, but got ` +\n `rank ${dy4D.rank}`);\n util.assert(filter.rank === 4, () => `Error in conv2dDerInput: filter must be rank 4, but got ` +\n `rank ${filter.rank}`);\n const inDepth = dataFormat === 'NHWC' ? xShape4D[3] : xShape4D[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filter.shape[2], () => `Error in conv2dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[2]}.`);\n util.assert(outDepth === filter.shape[3], () => `Error in conv2dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[3]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerInput: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(xShape4D, filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2dDerInput(dy4D, filter, convInfo);\n save([dy4D, filter]);\n return res;\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, inputShape: xShape4D };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2DBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2DBackpropInput = op({ conv2DBackpropInput_ });\n//# sourceMappingURL=conv2d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv2DBackpropInput } from './conv2d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 2D convolution of an image, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 4 or rank 3, of shape\n * `[batch, height, width, inDepth]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 4 or rank 3:\n * `[batch, height, width, outDepth]`. If rank 3, batch of 1 is assumed.\n * @param strides The strides of the original convolution:\n * `[strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2dTranspose_(x, filter, outputShape, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv2dTranspose');\n return conv2DBackpropInput(outputShape, $x, $filter, strides, pad, 'NHWC', dimRoundingMode);\n}\nexport const conv2dTranspose = op({ conv2dTranspose_ });\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { eitherStridesOrDilationsAreOne } from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 3D convolution over the input x.\n *\n * @param x The input tensor, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, channels]`. If rank 4,\n * batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inChannels, outChannels]`.\n * inChannels must match between input and filter.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationDepth, dilationHeight,\n * dilationWidth]` in which we sample input values across the height\n * and width dimensions in atrous convolution. Defaults to `[1, 1, 1]`.\n * If `dilations` is a single number, then\n * `dilationDepth == dilationHeight == dilationWidth`. If it is greater\n * than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3d_(x, filter, strides, pad, dataFormat = 'NDHWC', dilations = [1, 1, 1]) {\n const $x = convertToTensor(x, 'x', 'conv3d');\n const $filter = convertToTensor(filter, 'filter', 'conv3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3d: input must be rank 5, but got rank ${x5D.rank}.`);\n util.assert($filter.rank === 5, () => `Error in conv3d: filter must be rank 5, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x5D.shape[4] === $filter.shape[3], () => `Error in conv3d: depth of input (${x5D.shape[4]}) must match ` +\n `input depth for filter ${$filter.shape[3]}.`);\n util.assert(eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv3D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NDHWC', () => `Error in conv3d: got dataFormat of ${dataFormat} but only NDHWC is currently supported.`);\n const forward = (backend, save) => {\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, $filter.shape, strides, dilations, pad);\n const res = backend.conv3d(x5D, $filter, convInfo);\n save([x5D, $filter]);\n return res;\n };\n const inputs = { x: x5D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3d = op({ conv3d_ });\n//# sourceMappingURL=conv3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropInputV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 3D convolution.\n *\n * @param xShape The shape of the input: [batch, depth, height, width,\n * in_channels]. If length of 4, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 5 or rank 4 of shape\n * `[batch, outDepth, outHeight, outWidth, in_channels]`.\n * If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n */\nfunction conv3DBackpropInput_(xShape, dy, filter, strides, pad) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape5D = xShape;\n let dy5D = dy;\n let reshapedTo5D = false;\n if (dy.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n xShape5D = [1, xShape[0], xShape[1], xShape[2], xShape[3]];\n }\n const inDepth = xShape5D[4];\n const outDepth = dy5D.shape[4];\n util.assert(xShape5D.length === 5, () => `Error in conv3dDerInput: inShape must be length 5, but got length ` +\n `${xShape5D.length}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerInput: dy must be rank 5, but got ` +\n `rank ${dy5D.rank}`);\n util.assert(filter.rank === 5, () => `Error in conv3dDerInput: filter must be rank 5, but got ` +\n `rank ${filter.rank}`);\n util.assert(inDepth === filter.shape[3], () => `Error in conv3dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[3]}.`);\n util.assert(outDepth === filter.shape[4], () => `Error in conv3dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[4]}.`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(xShape5D, filter.shape, strides, dilations, pad);\n return backend.conv3dDerInput(dy5D, filter, convInfo);\n };\n const inputs = { dy: dy5D, filter };\n const attrs = { pad, strides, inputShape: xShape5D };\n const res = ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropInputV2, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3DBackpropInput = op({ conv3DBackpropInput_ });\n//# sourceMappingURL=conv3d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv3DBackpropInput } from './conv3d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 3D convolution of a volume, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, inDepth]`. If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[depth, filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 5 or rank 4:\n * `[batch, depth, height, width, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param strides The strides of the original convolution:\n * `[strideDepth, strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3dTranspose_(x, filter, outputShape, strides, pad) {\n const $x = convertToTensor(x, 'x', 'conv3dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv3dTranspose');\n return conv3DBackpropInput(outputShape, $x, $filter, strides, pad);\n}\nexport const conv3dTranspose = op({ conv3dTranspose_ });\n//# sourceMappingURL=conv3d_transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes cos of the input `tf.Tensor` element-wise: `cos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.cos().print(); // or tf.cos(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cos_(x) {\n const $x = convertToTensor(x, 'x', 'cos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cos);\n}\nexport const cos = op({ cos_ });\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic cos of the input `tf.Tensor` element-wise: `cosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.cosh().print(); // or tf.cosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cosh_(x) {\n const $x = convertToTensor(x, 'x', 'cosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cosh);\n}\nexport const cosh = op({ cosh_ });\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cumsum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { getAxesPermutation, getInnerMostAxes, getUndoAxesPermutation } from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the cumulative sum of a `tf.Tensor` along `axis`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4]);\n * x.cumsum().print();\n * ```\n * ```js\n * const x = tf.tensor([[1, 2], [3, 4]]);\n * x.cumsum().print();\n * ```\n *\n * @param x The input tensor to be summed.\n * @param axis The axis along which to sum. Optional. Defaults to 0.\n * @param exclusive Whether to perform exclusive cumulative sum. Optional.\n * Defaults to false. If set to true then the sum of each tensor entry\n * does not include its own value, but only the values previous to it\n * along the specified axis.\n * @param reverse Whether to sum in the opposite direction. Optional.\n * Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Scan'}\n */\nfunction cumsum_(x, axis = 0, exclusive = false, reverse = false) {\n const $x = convertToTensor(x, 'x', 'cumsum');\n const forward = (backend, save) => {\n const permutation = getAxesPermutation([axis], $x.rank);\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n }\n const permutedAxis = getInnerMostAxes(1, $x.rank)[0];\n let value = backend.cumsum(permutedX, permutedAxis, exclusive, reverse);\n save([$x]);\n if (permutation != null) {\n const reversePermutation = getUndoAxesPermutation(permutation);\n value = transpose(value, reversePermutation);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, exclusive, reverse };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Cumsum, attrs);\n}\nexport const cumsum = op({ cumsum_ });\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthToSpace } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Rearranges data from depth into blocks of spatial data. More specifically,\n * this op outputs a copy of the input tensor where values from the `depth`\n * dimension are moved in spatial blocks to the `height` and `width` dimensions.\n * The attr `blockSize` indicates the input block size and how the data is\n * moved.\n *\n * - Chunks of data of size `blockSize * blockSize` from depth are rearranged\n * into non-overlapping blocks of size `blockSize x blockSize`\n *\n * - The width the output tensor is `inputWidth * blockSize`, whereas the\n * height is `inputHeight * blockSize`\n *\n * - The Y, X coordinates within each block of the output image are determined\n * by the high order component of the input channel index\n *\n * - The depth of the input tensor must be divisible by `blockSize *\n * blockSize`\n *\n * The `dataFormat` attr specifies the layout of the input and output tensors\n * with the following options: \"NHWC\": [ `batch, height, width, channels` ]\n * \"NCHW\": [ `batch, channels, height, width` ]\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 1, 1, 4]);\n * const blockSize = 2;\n * const dataFormat = \"NHWC\";\n *\n * tf.depthToSpace(x, blockSize, dataFormat).print();\n * ```\n *\n * @param x The input tensor of rank 4\n * @param blockSIze An `int` that is `>= 2`. The size of the spatial block\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to \"NHWC\"\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction depthToSpace_(x, blockSize, dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'depthToSpace');\n const inputHeight = (dataFormat === 'NHWC') ? $x.shape[1] : $x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? $x.shape[2] : $x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? $x.shape[3] : $x.shape[1];\n util.assert(inputHeight * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputHeight} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert(inputWidth * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputWidth} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert((inputDepth % (blockSize * blockSize) === 0), () => `Dimension size must be evenly divisible by ${blockSize * blockSize} but is ${inputDepth} for depthToSpace with input shape ${$x.shape}`);\n const forward = backend => backend.depthToSpace($x, blockSize, dataFormat);\n const inputs = { x: $x };\n const attrs = { blockSize, dataFormat };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, DepthToSpace, attrs);\n}\nexport const depthToSpace = op({ depthToSpace_ });\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Depthwise 2D convolution.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction depthwiseConv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in depthwiseConv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const res = backend.depthwiseConv2D(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, DepthwiseConv2dNative, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2d = op({ depthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Diag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a diagonal tensor with a given diagonal values.\n *\n * Given a diagonal, this operation returns a tensor with the diagonal and\n * everything else padded with zeros.\n *\n * Assume the input has dimensions `[D1,..., Dk]`, then the output is a tensor\n * of rank 2k with dimensions `[D1,..., Dk, D1,..., Dk]`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * tf.diag(x).print()\n * ```\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4, 5, 6, 6, 8], [4, 2])\n *\n * tf.diag(x).print()\n * ```\n * @param x The input tensor.\n */\nfunction diag_(x) {\n const $x = convertToTensor(x, 'x', 'diag');\n const forward = backend => {\n const flat = reshape($x, [$x.size]);\n const result = backend.diag(flat);\n const outShape = [...x.shape, ...x.shape];\n return reshape(result, outShape);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Diag);\n}\nexport const diag = op({ diag_ });\n//# sourceMappingURL=diag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the grayscale dilation over the input `x`.\n *\n * @param x The input tensor, rank 3 or rank 4 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter tensor, rank 3, of shape\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat Specify the data format of the input and output data.\n * Defaults to 'NHWC'. Only 'NHWC' is currently supported. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * for atrous morphological dilation. Defaults to `[1, 1]`. If `dilations`\n * is a single number, then `dilationHeight == dilationWidth`. If it is\n * greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction dilation2d_(x, filter, strides, pad, dilations = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'dilation2d');\n const $filter = convertToTensor(filter, 'filter', 'dilation2d');\n util.assert($x.rank === 3 || $x.rank === 4, () => `Error in dilation2d: input must be rank 3 or 4, but got rank ` +\n `${$x.rank}.`);\n util.assert($filter.rank === 3, () => `Error in dilation2d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n util.assert(dataFormat === 'NHWC', () => `Error in dilation2d: Only NHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n reshapedTo4D = true;\n }\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dilations };\n const res = ENGINE.runKernel(Dilation2D, inputs, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const dilation2d = op({ dilation2d_ });\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport function getBroadcastDims(inShape, outShape) {\n const inRank = inShape.length;\n const dims = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n}\n/**\n * Returns the axes in the output space that should be reduced to produce\n * the input space.\n */\nexport function getReductionAxes(inShape, outShape) {\n const result = [];\n for (let i = 0; i < outShape.length; i++) {\n const inDim = inShape[inShape.length - i - 1];\n const outAxis = outShape.length - i - 1;\n const outDim = outShape[outAxis];\n if (inDim == null || (inDim === 1 && outDim > 1)) {\n result.unshift(outAxis);\n }\n }\n return result;\n}\nexport function assertAndGetBroadcastShape(shapeA, shapeB) {\n const result = [];\n const l = Math.max(shapeA.length, shapeB.length);\n for (let i = 0; i < l; i++) {\n let a = shapeA[shapeA.length - i - 1];\n if (a == null) {\n a = 1;\n }\n let b = shapeB[shapeB.length - i - 1];\n if (b == null) {\n b = 1;\n }\n if (a === 1) {\n result.unshift(b);\n }\n else if (b === 1) {\n result.unshift(a);\n }\n else if (a !== b) {\n const errMsg = `Operands could not be broadcast together with shapes ` +\n `${shapeA} and ${shapeB}.`;\n throw Error(errMsg);\n }\n else {\n result.unshift(a);\n }\n }\n return result;\n}\n//# sourceMappingURL=broadcast_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Equal } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a == b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.equal(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction equal_(a, b) {\n let $a = convertToTensor(a, 'a', 'equal');\n let $b = convertToTensor(b, 'b', 'equal');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.equal($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null, Equal);\n}\nexport const equal = op({ equal_ });\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SelectV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch } from '../util';\nimport { broadcastTo } from './broadcast_to';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the elements, either `a` or `b` depending on the `condition`.\n *\n * If the condition is true, select from `a`, otherwise select from `b`.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const a = tf.tensor1d([1 , 2, 3]);\n * const b = tf.tensor1d([-1, -2, -3]);\n *\n * a.where(cond, b).print();\n * ```\n *\n * @param condition The input condition. Must be of dtype bool.\n * @param a If `condition` is rank 1, `a` may have a higher rank but\n * its first dimension must match the size of `condition`.\n * @param b A tensor with the same dtype as `a` and with shape that is\n * compatible with `a`.\n * @return A tensor with same dtype as `a` and `b`, and shape that is\n * broadcastable from `a` and `b`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction where_(condition, a, b) {\n const $a = convertToTensor(a, 'a', 'where');\n const $b = convertToTensor(b, 'b', 'where');\n const $condition = convertToTensor(condition, 'condition', 'where', 'bool');\n // TODO: move this logic to forward function when the broadcastTo op is\n // implemented in WASM.\n // Find the broadcastable shape for $a and $b.\n const broadcastShape = assertAndGetBroadcastShape($a.shape, $b.shape);\n const $broadcastedA = broadcastTo($a, broadcastShape);\n const $broadcastedB = broadcastTo($b, broadcastShape);\n if ($condition.rank === 1) {\n // If condition rank is 1, then the first dimension must match the size of\n // condition.\n assert($condition.shape[0] === $a.shape[0], () => 'The first dimension of `a` must match the size of `condition`.');\n }\n if ($condition.rank !== 1) {\n // A must have the same shape as condition.\n assertShapesMatch($condition.shape, $broadcastedB.shape, 'Error in where: ');\n }\n const forward = (backend, save) => {\n const res = backend.select($condition, $broadcastedA, $broadcastedB);\n save([$condition]);\n return res;\n };\n const inputs = {\n condition: $condition,\n t: $broadcastedA,\n e: $broadcastedB\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SelectV2);\n}\nexport const where = op({ where_ });\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ZerosLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with all elements set to 0 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.zerosLike(x).print();\n * ```\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction zerosLike_(x) {\n const $x = convertToTensor(x, 'x', 'zerosLike');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.zerosLike($x), inputs, null /* grad */, ZerosLike);\n}\nexport const zerosLike = op({ zerosLike_ });\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { div } from './div';\nimport { equal } from './equal';\nimport { op } from './operation';\nimport { where } from './where';\nimport { zerosLike } from './zeros_like';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. Return 0\n * if denominator is 0.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n * const c = tf.tensor1d([0, 0, 0, 0]);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n * const c = tf.scalar(0);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction divNoNan_(a, b) {\n // TODO: Make this into its own kernel.\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n const divResult = div($a, $b);\n const zeros = zerosLike(divResult);\n const bEqualsZero = equal($b, zeros);\n return where(bEqualsZero, zeros, divResult);\n}\nexport const divNoNan = op({ divNoNan_ });\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices and/or vectors, `t1` and `t2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor2d([[1, 2], [3, 4]]);\n * const c = tf.tensor2d([[1, 2, 3], [4, 5, 6]]);\n *\n * a.dot(b).print(); // or tf.dot(a, b)\n * b.dot(a).print();\n * b.dot(c).print();\n * ```\n * @param t1 The first tensor in the dot operation.\n * @param t2 The second tensor in the dot operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction dot_(t1, t2) {\n const $t1 = convertToTensor(t1, 't1', 'dot');\n const $t2 = convertToTensor(t2, 't2', 'dot');\n util.assert(($t1.rank === 1 || $t1.rank === 2) && ($t2.rank === 1 || $t2.rank === 2), () => `Error in dot: inputs must all be rank 1 or 2, but got ranks ` +\n `${$t1.rank} and ${$t2.rank}.`);\n const t1Inner = ($t1.rank === 1 ? $t1.size : $t1.shape[1]);\n const t2Inner = ($t2.rank === 1 ? $t2.size : $t2.shape[0]);\n util.assert(t1Inner === t2Inner, () => `Error in dot: inner dimensions of inputs must match, but got ` +\n `${t1Inner} and ${t2Inner}.`);\n if ($t1.rank === 1 && $t2.rank === 1) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, []);\n }\n else if ($t1.rank === 1 && $t2.rank === 2) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else if ($t1.rank === 2 && $t2.rank === 1) {\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul($t1, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else {\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul($t1, t22D);\n return t1t2;\n }\n}\nexport const dot = op({ dot_ });\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential linear element-wise: `x > 0 ? e ^ x - 1 : 0`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 1, -3, 2]);\n *\n * x.elu().print(); // or tf.elu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction elu_(x) {\n const $x = convertToTensor(x, 'x', 'elu');\n const forward = (backend, save) => {\n const y = backend.elu($x);\n save([y]);\n return y;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Elu);\n}\nexport const elu = op({ elu_ });\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Erf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes gause error function of the input `tf.Tensor` element-wise:\n * `erf(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.erf().print(); // or tf.erf(x);\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction erf_(x) {\n let $x = convertToTensor(x, 'x', 'erf');\n util.assert($x.dtype === 'int32' || $x.dtype === 'float32', () => 'Input dtype must be `int32` or `float32`.');\n if ($x.dtype === 'int32') {\n $x = cast($x, 'float32');\n }\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.erf($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Erf);\n}\nexport const erf = op({ erf_ });\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Exp } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` element-wise. `e ^ x`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.exp().print(); // or tf.exp(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction exp_(x) {\n const $x = convertToTensor(x, 'x', 'exp');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.exp($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Exp);\n}\nexport const exp = op({ exp_ });\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension\n * into the tensor's shape.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const axis = 1;\n * x.expandDims(axis).print();\n * ```\n *\n * @param x The input tensor whose dimensions to be expanded.\n * @param axis The dimension index at which to insert shape of `1`. Defaults\n * to 0 (the first dimension).\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction expandDims_(x, axis = 0) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'expandDims', parseAs);\n util.assert(axis <= $x.rank, () => 'Axis must be <= rank of the tensor');\n const newShape = $x.shape.slice();\n if (axis < 0) {\n // Negative value is counted from the tail of rank.\n util.assert(-($x.rank + 1) <= axis, () => `Axis must be in the interval [${-($x.rank + 1)}, ${$x.rank}]`);\n axis = $x.rank + axis + 1;\n }\n newShape.splice(axis, 0, 1);\n return reshape($x, newShape);\n}\nexport const expandDims = op({ expandDims_ });\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Expm1 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` minus one element-wise.\n * `e ^ x - 1`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.expm1().print(); // or tf.expm1(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction expm1_(x) {\n const $x = convertToTensor(x, 'x', 'expm1');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.expm1($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Expm1);\n}\nexport const expm1 = op({ expm1_ });\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tile } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Construct a tensor by repeating it the number of times given by reps.\n *\n * This operation creates a new tensor by replicating `input` `reps`\n * times. The output tensor's i'th dimension has `input.shape[i] *\n * reps[i]` elements, and the values of `input` are replicated\n * `reps[i]` times along the i'th dimension. For example, tiling\n * `[a, b, c, d]` by `[2]` produces `[a, b, c, d, a, b, c, d]`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n *\n * a.tile([2]).print(); // or a.tile([2])\n * ```\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.tile([1, 2]).print(); // or a.tile([1, 2])\n * ```\n * @param x The tensor to tile.\n * @param reps Determines the number of replications per dimension.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction tile_(x, reps) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'tile', parseAs);\n util.assert($x.rank === reps.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of reps ${reps}.`);\n const forward = (backend, save) => {\n const res = backend.tile($x, reps);\n save([$x]);\n return res;\n };\n const inputsToSave = [$x];\n const inputs = { x: $x };\n const attrs = { reps };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Tile, attrs, inputsToSave);\n}\nexport const tile = op({ tile_ });\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { tile } from './tile';\n/**\n * Create an identity matrix.\n *\n * @param numRows Number of rows.\n * @param numColumns Number of columns. Defaults to `numRows`.\n * @param batchShape If provided, will add the batch shape to the beginning\n * of the shape of the returned `tf.Tensor` by repeating the identity\n * matrix.\n * @param dtype Data type.\n * @returns Identity matrix of the specified size and data type, possibly\n * with batch repetition if `batchShape` is specified.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction eye_(numRows, numColumns, batchShape, dtype = 'float32') {\n if (numColumns == null) {\n numColumns = numRows;\n }\n const buff = buffer([numRows, numColumns], dtype);\n const n = numRows <= numColumns ? numRows : numColumns;\n for (let i = 0; i < n; ++i) {\n buff.set(1, i, i);\n }\n const out = reshape(buff.toTensor(), [numRows, numColumns]);\n if (batchShape == null) {\n return out;\n }\n else {\n if (batchShape.length === 1) {\n return tile(expandDims(out, 0), [batchShape[0], 1, 1]);\n }\n else if (batchShape.length === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(out, 0), 0), [batchShape[0], batchShape[1], 1, 1]);\n }\n else if (batchShape.length === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(expandDims(out, 0), 0), 0), [\n batchShape[0], batchShape[1], batchShape[2], 1, 1\n ]);\n }\n else {\n throw new Error(`eye() currently supports only 1D and 2D ` +\n // tslint:disable-next-line:no-any\n `batchShapes, but received ${batchShape.length}D.`);\n }\n }\n}\nexport const eye = op({ eye_ });\n//# sourceMappingURL=eye.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Fill } from '../kernel_names';\n/**\n * Creates a `tf.Tensor` filled with a scalar value.\n *\n * ```js\n * tf.fill([2, 2], 4).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param value The scalar value to fill the tensor with.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction fill(shape, value, dtype) {\n const attrs = { shape, value, dtype };\n return ENGINE.runKernelFunc(backend => backend.fill(shape, value, dtype), {}, null, Fill, attrs);\n}\nexport { fill };\n//# sourceMappingURL=fill.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Floor } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes floor of input `tf.Tensor` element-wise: `floor(x)`.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.floor().print(); // or tf.floor(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction floor_(x) {\n const $x = convertToTensor(x, 'x', 'floor');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.floor($x), inputs, null /* grad */, Floor);\n}\nexport const floor = op({ floor_ });\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inputs of size above this threshold will be parallelized by calling multiple\n * shader programs.\n */\nimport { nearestDivisor } from '../util';\nexport const PARALLELIZE_THRESHOLD = 30;\nexport function computeOptimalWindowSize(inSize) {\n if (inSize <= PARALLELIZE_THRESHOLD) {\n return inSize;\n }\n return nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n}\n//# sourceMappingURL=reduce_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nearestDivisor } from '../util';\nimport { PARALLELIZE_THRESHOLD } from './reduce_util';\nexport function segOpComputeOptimalWindowSize(inSize, numSegments) {\n let done = false;\n let res;\n if (inSize <= PARALLELIZE_THRESHOLD) {\n res = inSize;\n done = true;\n }\n else {\n res = nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n }\n while (!done) {\n if (res > numSegments || res === inSize) {\n done = true;\n }\n else {\n res = nearestDivisor(inSize, res + 1);\n }\n }\n return res;\n}\nexport function computeOutShape(aShape, axis, numSegments) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (dim !== axis) {\n outShape.push(aShape[dim]);\n }\n else {\n outShape.push(numSegments);\n }\n }\n return outShape;\n}\nexport function collectGatherOpShapeInfo(x, indices, axis) {\n const dimSize = x.shape[axis];\n const outputShape = [];\n let batchSize = 1;\n let sliceSize = 1;\n for (let i = 0; i < axis; i++) {\n outputShape.push(x.shape[i]);\n batchSize *= x.shape[i];\n }\n for (let i = 0; i < indices.rank; i++) {\n outputShape.push(indices.shape[i]);\n }\n for (let i = axis + 1; i < x.rank; i++) {\n outputShape.push(x.shape[i]);\n sliceSize *= x.shape[i];\n }\n return { batchSize, sliceSize, dimSize, outputShape };\n}\n//# sourceMappingURL=segment_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { collectGatherOpShapeInfo } from './segment_util';\n/**\n * Gather slices from tensor `x`'s axis `axis` according to `indices`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const indices = tf.tensor1d([1, 3, 3], 'int32');\n *\n * x.gather(indices).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const indices = tf.tensor1d([1, 1, 0], 'int32');\n *\n * x.gather(indices).print();\n * ```\n * @param x The input tensor whose slices to be gathered.\n * @param indices The indices of the values to extract.\n * @param axis The axis over which to select values. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction gather_(x, indices, axis = 0) {\n const $x = convertToTensor(x, 'x', 'gather');\n const $indices = convertToTensor(indices, 'indices', 'gather', 'int32');\n const inputs = { x: $x, indices: $indices };\n const attrs = { axis };\n const forward = (backend, save) => {\n const parsedAxis = parseAxisParam(axis, $x.shape)[0];\n const shapeInfo = collectGatherOpShapeInfo($x, $indices, parsedAxis);\n const res = backend.gather($x, reshape($indices, [$indices.size]), parsedAxis);\n save([$x, $indices]);\n return reshape(res, shapeInfo.outputShape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GatherV2, attrs);\n}\nexport const gather = op({ gather_ });\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Greater } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a > b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greater(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greater_(a, b) {\n let $a = convertToTensor(a, 'a', 'greater');\n let $b = convertToTensor(b, 'b', 'greater');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.greater($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Greater);\n}\nexport const greater = op({ greater_ });\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GreaterEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a >= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greaterEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greaterEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'greaterEqual');\n let $b = convertToTensor(b, 'b', 'greaterEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.greaterEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GreaterEqual);\n}\nexport const greaterEqual = op({ greaterEqual_ });\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Imag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the imaginary part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the imaginary part of each element in input considered as a complex number.\n * If input is real, a tensor of all zeros is returned.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.imag(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction imag_(input) {\n const $input = convertToTensor(input, 'input', 'imag');\n const forward = (backend) => {\n return backend.imag($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Imag);\n}\nexport const imag = op({ imag_ });\n//# sourceMappingURL=imag.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsFinite } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are finite.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isFinite().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isFinite_(x) {\n const $x = convertToTensor(x, 'x', 'isFinite');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isFinite($x), inputs, null /* grad */, IsFinite);\n}\nexport const isFinite = op({ isFinite_ });\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsInf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are Infinity or -Infinity.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isInf().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isInf_(x) {\n const $x = convertToTensor(x, 'x', 'isInf');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isInf($x), inputs, null /* grad */, IsInf);\n}\nexport const isInf = op({ isInf_ });\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsNan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * RReturns which elements of x are NaN.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isNaN().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isNaN_(x) {\n const $x = convertToTensor(x, 'x', 'isNaN');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.isNaN($x), inputs, null /* grad */, IsNan);\n}\nexport const isNaN = op({ isNaN_ });\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Maximum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the max of a and b (`a > b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `tf.maximumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * ```js\n * // Broadcast maximum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction maximum_(a, b) {\n let $a = convertToTensor(a, 'a', 'maximum');\n let $b = convertToTensor(b, 'b', 'maximum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.maximum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Maximum);\n}\nexport const maximum = op({ maximum_ });\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isTypedArray } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-0 `tf.Tensor` (scalar) with the provided value and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.scalar` as it makes the code more readable.\n *\n * ```js\n * tf.scalar(3.14).print();\n * ```\n *\n * @param value The value of the scalar.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function scalar(value, dtype) {\n if (((isTypedArray(value) && dtype !== 'string') || Array.isArray(value)) &&\n dtype !== 'complex64') {\n throw new Error('Error creating a new Scalar: value must be a primitive ' +\n '(number|boolean|string)');\n }\n if (dtype === 'string' && isTypedArray(value) &&\n !(value instanceof Uint8Array)) {\n throw new Error('When making a scalar from encoded string, ' +\n 'the value must be `Uint8Array`.');\n }\n const shape = [];\n const inferredShape = [];\n return makeTensor(value, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { maximum } from './maximum';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { scalar } from './scalar';\n/**\n * Computes leaky rectified linear element-wise.\n *\n * See\n * [http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf](\n * http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf)\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.leakyRelu(0.1).print(); // or tf.leakyRelu(x, 0.1)\n * ```\n * @param x The input tensor.\n * @param alpha The scaling factor for negative values, defaults to 0.2.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction leakyRelu_(x, alpha = 0.2) {\n const $x = convertToTensor(x, 'x', 'leakyRelu');\n return maximum(mul(scalar(alpha), $x), $x);\n}\nexport const leakyRelu = op({ leakyRelu_ });\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Less } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a < b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.less(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction less_(a, b) {\n let $a = convertToTensor(a, 'a', 'less');\n let $b = convertToTensor(b, 'b', 'less');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.less($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Less);\n}\nexport const less = op({ less_ });\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LessEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a <= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.lessEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction lessEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'lessEqual');\n let $b = convertToTensor(b, 'b', 'lessEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.lessEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LessEqual);\n}\nexport const lessEqual = op({ lessEqual_ });\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LinSpace } from '../kernel_names';\n/**\n * Return an evenly spaced sequence of numbers over the given interval.\n *\n * ```js\n * tf.linspace(0, 9, 10).print();\n * ```\n * @param start The start value of the sequence.\n * @param stop The end value of the sequence.\n * @param num The number of values to generate.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function linspace(start, stop, num) {\n if (num <= 0) {\n throw new Error('The number of values should be positive.');\n }\n const attrs = { start, stop, num };\n return ENGINE.runKernelFunc(backend => backend.linspace(start, stop, num), {} /* inputs */, null /* grad */, LinSpace, attrs);\n}\n//# sourceMappingURL=linspace.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Normalizes the activation of a local neighborhood across or within\n * channels.\n *\n * @param x The input tensor. The 4-D input tensor is treated as a 3-D array\n * of 1D vectors (along the last dimension), and each vector is\n * normalized independently.\n * @param depthRadius The number of adjacent channels in the 1D normalization\n * window.\n * @param bias A constant bias term for the basis.\n * @param alpha A scale factor, usually positive.\n * @param beta An exponent.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction localResponseNormalization_(x, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const $x = convertToTensor(x, 'x', 'localResponseNormalization');\n util.assert($x.rank === 4 || $x.rank === 3, () => `Error in localResponseNormalization: x must be rank 3 or 4 but got\n rank ${$x.rank}.`);\n util.assert(util.isInt(depthRadius), () => `Error in localResponseNormalization: depthRadius must be an ` +\n `integer but got depthRadius ${depthRadius}.`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n const forward = (backend, save) => {\n const y = backend.localResponseNormalization4D(x4D, depthRadius, bias, alpha, beta);\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { depthRadius, bias, alpha, beta };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRN, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n else {\n return res;\n }\n}\nexport const localResponseNormalization = op({ localResponseNormalization_ });\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` element-wise: `ln(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E]);\n *\n * x.log().print(); // or tf.log(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log_(x) {\n const $x = convertToTensor(x, 'x', 'log');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log);\n}\nexport const log = op({ log_ });\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log1p } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` plus one\n * element-wise: `ln(1 + x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E - 1]);\n *\n * x.log1p().print(); // or tf.log1p(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log1p_(x) {\n const $x = convertToTensor(x, 'x', 'log1p');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log1p($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log1p);\n}\nexport const log1p = op({ log1p_ });\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { Tensor, Variable } from './tensor';\nimport { convertToTensor, convertToTensorArray } from './tensor_util_env';\nimport * as util from './util';\n/**\n * Provided `f(x)`, returns another function `g(x, dy?)`, which gives the\n * gradient of `f(x)` with respect to `x`.\n *\n * If `dy` is provided, the gradient of `f(x).mul(dy).sum()` with respect to\n * `x` is computed instead. `f(x)` must take a single tensor `x` and return a\n * single tensor `y`. If `f()` takes multiple inputs, use `tf.grads` instead.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.grad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * g(x).print();\n * ```\n *\n * ```js\n * // f(x) = x ^ 3\n * const f = x => x.pow(tf.scalar(3, 'int32'));\n * // f'(x) = 3x ^ 2\n * const g = tf.grad(f);\n * // f''(x) = 6x\n * const gg = tf.grad(g);\n *\n * const x = tf.tensor1d([2, 3]);\n * gg(x).print();\n * ```\n *\n * @param f The function f(x), to compute gradient for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grad(f) must be a function');\n return (x, dy) => {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'tf.grad', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grad') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f($x), [$x], $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grad(f)(x, dy) must match the shape ' +\n 'returned by f(x)');\n }\n checkGrads(grads);\n return grads[0];\n });\n };\n}\n/**\n * Provided `f(x1, x2,...)`, returns another function `g([x1, x2,...], dy?)`,\n * which gives an array of gradients of `f()` with respect to each input\n * [`x1`,`x2`,...].\n *\n * If `dy` is passed when calling `g()`, the gradient of\n * `f(x1,...).mul(dy).sum()` with respect to each input is computed instead.\n * The provided `f` must take one or more tensors and return a single tensor\n * `y`. If `f()` takes a single input, we recommend using `tf.grad` instead.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df / da = b, df / db = a\n * const g = tf.grads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const [da, db] = g([a, b]);\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @param f The function `f(x1, x2,...)` to compute gradients for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args), () => 'The args passed in grads(f)(args) must be an array ' +\n 'of `Tensor`s or `TensorLike`s');\n // args can be of any dtype, thus null as the last argument.\n const $args = convertToTensorArray(args, 'args', 'tf.grads', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grads') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f(...$args), $args, $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(grads);\n return grads;\n });\n };\n}\n/**\n * Like `tf.grad`, but also returns the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grad: The gradient of `f(x)` w.r.t `x` (result of `tf.grad`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.valueAndGrad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * const {value, grad} = g(x);\n *\n * console.log('value');\n * value.print();\n * console.log('grad');\n * grad.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrad(f) must be a function');\n return (x, dy) => {\n util.assert(x instanceof Tensor, () => 'The x passed in valueAndGrad(f)(x) must be a tensor');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrad(f)(x, dy) must be a tensor');\n const { grads, value } = ENGINE.gradients(() => f(x), [x], dy);\n checkGrads(grads);\n return { grad: grads[0], value };\n };\n}\n/**\n * Like `tf.grads`, but returns also the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grads: The gradients of `f()` w.r.t each input (result of `tf.grads`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df/da = b, df/db = a\n * const g = tf.valueAndGrads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const {value, grads} = g([a, b]);\n *\n * const [da, db] = grads;\n *\n * console.log('value');\n * value.print();\n *\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args) && args.every(arg => arg instanceof Tensor), () => 'The args passed in valueAndGrads(f)(args) must be array of ' +\n 'tensors');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrads(f)(args, dy) must be a tensor');\n const res = ENGINE.gradients(() => f(...args), args, dy);\n if (dy != null) {\n util.assertShapesMatch(res.value.shape, dy.shape, 'The shape of dy passed in valueAndGrads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(res.grads);\n return res;\n };\n}\n/**\n * Computes and returns the gradient of f(x) with respect to the list of\n * trainable variables provided by `varList`. If no list is provided, it\n * defaults to all trainable variables.\n *\n * ```js\n * const a = tf.variable(tf.tensor1d([3, 4]));\n * const b = tf.variable(tf.tensor1d([5, 6]));\n * const x = tf.tensor1d([1, 2]);\n *\n * // f(a, b) = a * x ^ 2 + b * x\n * const f = () => a.mul(x.square()).add(b.mul(x)).sum();\n * // df/da = x ^ 2, df/db = x\n * const {value, grads} = tf.variableGrads(f);\n *\n * Object.keys(grads).forEach(varName => grads[varName].print());\n * ```\n *\n * @param f The function to execute. f() should return a scalar.\n * @param varList The list of variables to compute the gradients with respect\n * to. Defaults to all trainable variables.\n * @returns An object with the following keys and values:\n * - `value`: The value of the function `f`.\n * - `grads`: A map from the names of the variables to the gradients.\n * If the `varList` argument is provided explicitly and contains a subset of\n * non-trainable variables, this map in the return value will contain keys\n * that map the names of the non-trainable variables to `null`.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction variableGrads(f, varList) {\n util.assert(util.isFunction(f), () => 'The f passed in variableGrads(f) must be a function');\n util.assert(varList == null ||\n Array.isArray(varList) && varList.every(v => v instanceof Variable), () => 'The varList passed in variableGrads(f, varList) must be an array ' +\n 'of variables');\n const specifiedVarList = varList != null;\n if (!specifiedVarList) {\n // Get all of the trainable variables.\n varList = [];\n for (const varName in ENGINE.registeredVariables) {\n varList.push(ENGINE.registeredVariables[varName]);\n }\n }\n const specifiedNonTrainable = specifiedVarList ? varList.filter(variable => !variable.trainable) : null;\n // Prune non-trainable variables.\n const originalVarCount = varList.length;\n varList = varList.filter(variable => variable.trainable);\n util.assert(varList.length > 0, () => `variableGrads() expects at least one of the input variables to ` +\n `be trainable, but none of the ${originalVarCount} variables is ` +\n `trainable.`);\n const allowNoGradients = true;\n const { value, grads } = ENGINE.gradients(f, varList, null, allowNoGradients);\n util.assert(grads.some(g => g != null), () => 'Cannot find a connection between any variable and the result of ' +\n 'the loss function y=f(x). Please make sure the operations that ' +\n 'use variables are inside the function f passed to minimize().');\n util.assert(value.rank === 0, () => `The f passed in variableGrads(f) must return a scalar, but it ` +\n `returned a rank-${value.rank} tensor`);\n const namedGrads = {};\n varList.forEach((v, i) => {\n if (grads[i] != null) {\n namedGrads[v.name] = grads[i];\n }\n });\n if (specifiedNonTrainable != null) {\n // If varList is explicitly provided and contains non-trainable values,\n // add them to the returned gradients with `null` values.\n specifiedNonTrainable.forEach(v => namedGrads[v.name] = null);\n }\n return { value, grads: namedGrads };\n}\n/**\n * Overrides the gradient computation of a function `f`.\n *\n * Takes a function\n * `f(...inputs, save) => {value: Tensor, gradFunc: (dy, saved) => Tensor[]}`\n * and returns another function `g(...inputs)` which takes the same inputs as\n * `f`. When called, `g` returns `f().value`. In backward mode, custom gradients\n * with respect to each input of `f` are computed using `f().gradFunc`.\n *\n * The `save` function passsed to `f` should be used for saving tensors needed\n * in the gradient. And the `saved` passed to the `gradFunc` is a\n * `NamedTensorMap`, which contains those saved tensor.\n *\n * ```js\n * const customOp = tf.customGrad((x, save) => {\n * // Save x to make sure it's available later for the gradient.\n * save([x]);\n * // Override gradient of our custom x ^ 2 op to be dy * abs(x);\n * return {\n * value: x.square(),\n * // Note `saved.x` which points to the `x` we saved earlier.\n * gradFunc: (dy, saved) => [dy.mul(saved[0].abs())]\n * };\n * });\n *\n * const x = tf.tensor1d([-1, -2, 3]);\n * const dx = tf.grad(x => customOp(x));\n *\n * console.log(`f(x):`);\n * customOp(x).print();\n * console.log(`f'(x):`);\n * dx(x).print();\n * ```\n *\n * @param f The function to evaluate in forward mode, which should return\n * `{value: Tensor, gradFunc: (dy, saved) => Tensor[]}`, where `gradFunc`\n * returns the custom gradients of `f` with respect to its inputs.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction customGrad(f) {\n return ENGINE.customGrad(f);\n}\nfunction checkGrads(grads) {\n const numNullGradients = grads.filter(g => g == null).length;\n if (numNullGradients > 0) {\n throw new Error(`Cannot compute gradient of y=f(x) with respect to x. Make sure that\n the f you passed encloses all operations that lead from x to y.`);\n }\n}\nexport { customGrad, variableGrads, valueAndGrad, valueAndGrads, grad, grads, };\n//# sourceMappingURL=gradients.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Negate } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes `-1 * x` element-wise.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, -2, 0], [2, 2]);\n *\n * x.neg().print(); // or tf.neg(x)\n * ```\n *\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction neg_(x) {\n const $x = convertToTensor(x, 'x', 'neg');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.neg($x), inputs, null /* grad */, Negate);\n}\nexport const neg = op({ neg_ });\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softplus } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes softplus of the input `tf.Tensor` element-wise: `log(exp(x) + 1)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.softplus().print(); // or tf.softplus(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction softplus_(x) {\n const $x = convertToTensor(x, 'x', 'softplus');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.softplus($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Softplus);\n}\nexport const softplus = op({ softplus_ });\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../gradients';\nimport { convertToTensor } from '../tensor_util_env';\nimport { mul } from './mul';\nimport { neg } from './neg';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { softplus } from './softplus';\n/**\n * Computes log sigmoid of the input `tf.Tensor` element-wise:\n * `logSigmoid(x)`. For numerical stability, we use `-tf.softplus(-x)`.\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.logSigmoid().print(); // or tf.logSigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction logSigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'logSigmoid');\n // Use a custom gradient to maintain previous implementation.\n // There is no LogSigmoid kernel in TF so we can't use engine.runKernel\n // directly\n const customOp = customGrad((x) => {\n // TODO(yassogba) we can remove the chained softplus call here only\n // after backends have modualrized softplus at which point we can call\n // engine runKernel(..., Sotfplus, ...) directly.\n const value = neg(softplus(neg(x)));\n const gradFunc = (dy) => {\n const derX = mul(dy, sigmoid(neg(x)));\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const logSigmoid = op({ logSigmoid_ });\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Max } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the maximum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.max().print(); // or tf.max(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.max(axis).print(); // or tf.max(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction max_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'max');\n const forward = (backend, save) => {\n const origAxes = util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let maxInput = $x;\n if (permutedAxes != null) {\n maxInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, maxInput.rank);\n }\n const y = backend.max(maxInput, axes);\n if (permutedAxes != null) {\n maxInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, util.parseAxisParam(axis, $x.shape));\n res = reshape(res, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { reductionIndices: axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Max, attrs);\n}\nexport const max = op({ max_ });\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sub } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Subtracts two `tf.Tensor`s element-wise, A - B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n *\n * ```js\n * // Broadcast subtract a with b.\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.scalar(5);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n * @param a The first `tf.Tensor` to subtract from.\n * @param b The second `tf.Tensor` to be subtracted. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction sub_(a, b) {\n let $a = convertToTensor(a, 'a', 'sub');\n let $b = convertToTensor(b, 'b', 'sub');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.subtract($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sub);\n}\nexport const sub = op({ sub_ });\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the sum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If axes has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.sum().print(); // or tf.sum(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.sum(axis).print(); // or tf.sum(x, axis)\n * ```\n *\n * @param x The input tensor to compute the sum over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction sum_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'sum');\n if ($x.dtype === 'bool') {\n $x = cast($x, 'int32');\n }\n const forward = (backend, save) => {\n save([$x]);\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.sum(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sum, attrs);\n}\nexport const sum = op({ sum_ });\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogSoftmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log softmax.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param axis The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction logSoftmax_(logits, axis = -1) {\n const $logits = convertToTensor(logits, 'logits', 'logSoftmax');\n if (axis === -1) {\n axis = $logits.rank - 1;\n }\n if (axis !== $logits.rank - 1) {\n throw Error('Log Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and axis was ${axis}`);\n }\n const forward = (backend, save) => {\n const keepDims = true;\n const xMax = max(logits, axis, true);\n const shifted = sub(logits, xMax);\n const value = sub(cast(shifted, 'float32'), log(sum(exp(shifted), axis, keepDims)));\n save([value]);\n return value;\n };\n const inputs = { logits: $logits };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LogSoftmax, attrs);\n}\nexport const logSoftmax = op({ logSoftmax_ });\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { add } from './add';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log(sum(exp(elements across the reduction dimensions)).\n *\n * Reduces the input along the dimensions given in `axis`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.logSumExp().print(); // or tf.logSumExp(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.logSumExp(axis).print(); // or tf.logSumExp(a, axis)\n * ```\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. If null (the default),\n * reduces all dimensions.\n * @param keepDims If true, retains reduced dimensions with length\n * of 1. Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction logSumExp_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'logSumExp');\n const axes = parseAxisParam(axis, $x.shape);\n const xMax = max($x, axes, true /* keepDims */);\n const a = sub($x, xMax);\n const b = exp(a);\n const c = sum(b, axes);\n const d = log(c);\n const res = add(reshape(xMax, d.shape), d);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, axes);\n return reshape(res, newShape);\n }\n return res;\n}\nexport const logSumExp = op({ logSumExp_ });\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalAnd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a AND b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalAnd(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalAnd_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalAnd', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalAnd', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalAnd($a, $b), inputs, null /* grad */, LogicalAnd);\n}\nexport const logicalAnd = op({ logicalAnd_ });\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalNot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the truth value of `NOT x` element-wise.\n *\n * ```js\n * const a = tf.tensor1d([false, true], 'bool');\n *\n * a.logicalNot().print();\n * ```\n *\n * @param x The input tensor. Must be of dtype 'bool'.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalNot_(x) {\n const $x = convertToTensor(x, 'x', 'logicalNot', 'bool');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.logicalNot($x), inputs, null /* grad */, LogicalNot);\n}\nexport const logicalNot = op({ logicalNot_ });\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalOr } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a OR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalOr(b).print();\n * ```\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalOr_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalOr', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalOr', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalOr($a, $b), inputs, null /* grad */, LogicalOr);\n}\nexport const logicalOr = op({ logicalOr_ });\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { logicalAnd } from './logical_and';\nimport { logicalNot } from './logical_not';\nimport { logicalOr } from './logical_or';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a XOR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalXor(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalXor_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalXor', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalXor', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n // x ^ y = (x | y) & ~(x & y)\n return logicalAnd(logicalOr(a, b), logicalNot(logicalAnd(a, b)));\n}\nexport const logicalXor = op({ logicalXor_ });\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D max pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction maxPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'maxPool');\n const dilations = 1;\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x4D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n let y;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n y = x4D.clone();\n }\n else {\n y = backend.maxPool(x4D, convInfo);\n }\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const maxPool = op({ maxPool_ });\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { MaxPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D max pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.maxPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPool3d_(x, filterSize = [1, 1, 1], strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'maxPool3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in maxPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in maxPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n const y = backend.maxPool3d(x5D, convInfo);\n save([x5D, y]);\n return y;\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3d = op({ maxPool3d_ });\n//# sourceMappingURL=max_pool_3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolWithArgmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the 2D max pooling of an image with Argmax index.\n * The indices in argmax are flattened, so that a maximum value at position `[b,\n * y, x, c]` becomes flattened index: `(y * width + x) * channels + c` if\n * include_batch_in_index is False; `((b * height + y) * width + x) * channels\n * +c` if include_batch_in_index is True.\n *\n * The indices returned are always in `[0, height) x [0, width)` before\n * flattening.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param includeBatchIndex Defaults to False. Whether to include batch\n * dimension in flattened index of argmax.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPoolWithArgmax_(x, filterSize, strides, pad, includeBatchInIndex = false) {\n const $x = convertToTensor(x, 'x', 'maxPoolWithArgmax');\n const inputs = { x: $x };\n const attrs = { filterSize, strides, pad, includeBatchInIndex };\n const result = ENGINE.runKernel(MaxPoolWithArgmax, inputs, attrs);\n return { result: result[0], indexes: result[1] };\n}\nexport const maxPoolWithArgmax = op({ maxPoolWithArgmax_ });\n//# sourceMappingURL=max_pool_with_argmax.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeZerosTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\n/**\n * Creates a `tf.Tensor` with all elements set to 0.\n *\n * ```js\n * tf.zeros([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Can\n * be 'float32', 'int32' or 'bool'. Defaults to 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function zeros(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = zeros(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeZerosTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=zeros.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeOnesTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\nimport { zeros } from './zeros';\n/**\n * Creates a `tf.Tensor` with all elements set to 1.\n *\n * ```js\n * tf.ones([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function ones(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = ones(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeOnesTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=ones.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { customGrad } from '../gradients';\nimport { Mean } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam, sizeFromShape } from '../util';\nimport { computeOutAndReduceShapes } from './axis_util';\nimport { cast } from './cast';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { ones } from './ones';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sum } from './sum';\n/**\n * Computes the mean of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is\n * true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with\n * a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.mean().print(); // or tf.mean(a)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.mean(axis).print(); // or tf.mean(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction mean_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'mean');\n const axes = parseAxisParam(axis, $x.shape);\n const shapes = computeOutAndReduceShapes($x.shape, axes);\n const reduceShape = shapes[1];\n const reduceSize = sizeFromShape(reduceShape);\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n const forward = () => {\n const reduceSizeScalar = scalar(reduceSize);\n // Cast if needed.\n const xReduce = reduceSizeScalar.dtype === $x.dtype ?\n $x :\n cast($x, reduceSizeScalar.dtype);\n const res = div(xReduce, reduceSizeScalar);\n return sum(res, axis, keepDims);\n };\n // Use a custom gradient to bypass 2 gradient backprops since mean is used\n // extremely often.\n const customOp = customGrad((x) => {\n const value = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Mean, attrs);\n const gradFunc = (dy) => {\n const expandedDyShape = x.shape.slice();\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = div(mul(expandedDy, ones(x.shape, 'float32')), reduceSize);\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const mean = op({ mean_ });\n//# sourceMappingURL=mean.js.map", "import { ENGINE } from '../engine';\nimport { Min } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the minimum value from the input.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axes`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axes` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.min().print(); // or tf.min(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.min(axis).print(); // or tf.min(x, axis)\n * ```\n *\n * @param x The input Tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction min_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'min');\n const forward = (backend, save) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let minInput = $x;\n if (permutedAxes != null) {\n minInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const y = backend.min(minInput, axes);\n if (permutedAxes != null) {\n minInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n res = reshape(y, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Min, attrs);\n}\nexport const min = op({ min_ });\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Minimum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the min of a and b (`a < b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `minimumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * ```js\n * // Broadcast minimum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction minimum_(a, b) {\n let $a = convertToTensor(a, 'a', 'minimum');\n let $b = convertToTensor(b, 'b', 'minimum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.minimum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Minimum);\n}\nexport const minimum = op({ minimum_ });\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MirrorPad } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` using mirror padding.\n *\n * This operation implements the `REFLECT` and `SYMMETRIC` modes of pad.\n *\n * ```js\n * const x = tf.range(0, 9).reshape([1, 1, 3, 3]);\n * x.mirrorPad([[0, 0], [0, 0], [2, 2], [2, 2]], 'reflect').print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * In \"reflect\" mode, the padded regions do not include the borders,\n * while in \"symmetric\" mode the padded regions do include the borders.\n * For example, if the input is `[1, 2, 3]` and paddings is `[0, 2]`,\n * then the output is `[1, 2, 3, 2, 1]` in \"reflect\" mode, and\n * `[1, 2, 3, 3, 2]` in \"symmetric\" mode.\n * If `mode` is \"reflect\" then both `paddings[D, 0]` and `paddings[D, 1]`\n * must be no greater than `x.shape[D] - 1`. If mode is \"symmetric\"\n * then both `paddings[D, 0]` and `paddings[D, 1]` must be no greater than\n * `x.shape[D]`\n * @param mode String to specify padding mode. Can be `'reflect' | 'symmetric'`\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction mirrorPad_(x, paddings, mode) {\n util.assert(mode === 'reflect' || mode === 'symmetric', () => `Invalid mode. Mode must be either reflect or symmetric. ` +\n `Got ${mode}.`);\n const $x = convertToTensor(x, 'x', 'mirrorPad');\n if ($x.rank === 0) {\n throw new Error('mirrorPad(scalar) is not defined. ' +\n 'Pass non-scalar to mirrorPad');\n }\n util.assert(paddings.length === $x.rank, () => `Padding doesn't match input. Must be ${$x.rank}. ` +\n `Got ${paddings.length}.`);\n const shapeOffset = mode === 'reflect' ? 1 : 0;\n for (let i = 0; i < $x.rank; i++) {\n util.assert(paddings[i].length === 2, () => `Invalid number of paddings. Must be length of 2 each.`);\n util.assert(paddings[i][0] >= 0 && paddings[i][0] <= $x.shape[i] - shapeOffset &&\n paddings[i][1] >= 0 && paddings[i][1] <= $x.shape[i] - shapeOffset, () => `Padding in dimension ${i} cannot be greater than or equal ` +\n `to ${$x.shape[i] - shapeOffset} or less than 0 for input of ` +\n `shape ${$x.shape}`);\n }\n const attrs = { paddings, mode };\n const inputs = { x: $x };\n return ENGINE.runKernel(MirrorPad, inputs, attrs);\n}\nexport const mirrorPad = op({ mirrorPad_ });\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Mod } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the mod of a and b element-wise.\n * `floor(x / y) * y + mod(x, y) = x`\n * Supports broadcasting.\n *\n * We also expose `tf.modStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * ```js\n * // Broadcast a mod b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mod_(a, b) {\n let $a = convertToTensor(a, 'a', 'mod');\n let $b = convertToTensor(b, 'b', 'mod');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.mod($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Mod);\n}\nexport const mod = op({ mod_ });\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square of `x` element-wise: `x ^ 2`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.sqrt(2), -1]);\n *\n * x.square().print(); // or tf.square(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction square_(x) {\n const $x = convertToTensor(x, 'x', 'square');\n const attrs = {};\n const inputsToSave = [$x];\n const outputsToSave = [];\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n return backend.square($x);\n }, { x: $x }, null /* grad */, 'Square', attrs, inputsToSave, outputsToSave);\n}\nexport const square = op({ square_ });\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { cast } from './cast';\nimport { mean } from './mean';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { square } from './square';\nimport { sub } from './sub';\n/**\n * Calculates the mean and variance of `x`. The mean and variance are\n * calculated by aggregating the contents of `x` across `axes`. If `x` is\n * 1-D and `axes = [0]` this is just the mean and variance of a vector.\n *\n * @param x The input tensor.\n * @param axis The dimension(s) along with to compute mean and\n * variance. By default it reduces all dimensions.\n * @param keepDims If true, the moments have the same dimensionality as the\n * input.\n * @return An object with two keys: `mean` and `variance`.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction moments_(x, axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'moments');\n const axes = parseAxisParam(axis, x.shape);\n const xMean = mean(x, axes, keepDims);\n let keepDimsShape = xMean.shape;\n if (!keepDims) {\n keepDimsShape = expandShapeToKeepDim(xMean.shape, axes);\n }\n const devSquared = square(sub(cast(x, 'float32'), reshape(xMean, keepDimsShape)));\n const variance = mean(devSquared, axes, keepDims);\n return { mean: xMean, variance };\n}\nexport const moments = op({ moments_ });\n//# sourceMappingURL=moments.js.map", "import { convertToTensor, convertToTensorArray } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the next states and outputs of a stack of LSTMCells.\n *\n * Each cell output is used as input to the next cell.\n *\n * Returns `[cellState, cellOutput]`.\n *\n * Derived from tf.contrib.rn.MultiRNNCell.\n *\n * @param lstmCells Array of LSTMCell functions.\n * @param data The input to the cell.\n * @param c Array of previous cell states.\n * @param h Array of previous cell outputs.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction multiRNNCell_(lstmCells, data, c, h) {\n const $data = convertToTensor(data, 'data', 'multiRNNCell');\n const $c = convertToTensorArray(c, 'c', 'multiRNNCell');\n const $h = convertToTensorArray(h, 'h', 'multiRNNCell');\n let input = $data;\n const newStates = [];\n for (let i = 0; i < lstmCells.length; i++) {\n const output = lstmCells[i](input, $c[i], $h[i]);\n newStates.push(output[0]);\n newStates.push(output[1]);\n input = output[1];\n }\n const newC = [];\n const newH = [];\n for (let i = 0; i < newStates.length; i += 2) {\n newC.push(newStates[i]);\n newH.push(newStates[i + 1]);\n }\n return [newC, newH];\n}\nexport const multiRNNCell = op({ multiRNNCell_ });\n//# sourceMappingURL=multi_rnn_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a `tf.Tensor` with values drawn from a multinomial distribution.\n *\n * ```js\n * const probs = tf.tensor([.75, .25]);\n * tf.multinomial(probs, 3).print();\n * ```\n *\n * @param logits 1D array with unnormalized log-probabilities, or\n * 2D array of shape `[batchSize, numOutcomes]`. See the `normalized`\n * parameter.\n * @param numSamples Number of samples to draw for each row slice.\n * @param seed The seed number.\n * @param normalized Whether the provided `logits` are normalized true\n * probabilities (sum to 1). Defaults to false.\n * @return 1D array of shape `[numSamples]`, or 2D array of shape\n * `[batchSize, numSamples]`, depending on the rank of the input.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction multinomial_(logits, numSamples, seed, normalized = false) {\n const $logits = convertToTensor(logits, 'logits', 'multinomial');\n const numOutcomes = $logits.size;\n const origRank = $logits.rank;\n if (numOutcomes < 2) {\n throw new Error(`Error in multinomial: you need at least 2 outcomes, but got ` +\n `${numOutcomes}.`);\n }\n if (origRank > 2) {\n throw new Error(`Rank of probabilities must be 1 or 2, but is ${origRank}`);\n }\n seed = seed || Math.random();\n const logits2D = origRank === 1 ? reshape($logits, [1, -1]) : $logits;\n const res = ENGINE.runKernelFunc(backend => backend.multinomial(logits2D, normalized, numSamples, seed), { logits2D });\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return origRank === 1 ? reshape(res, [res.size]) : res;\n}\nexport const multinomial = op({ multinomial_ });\n//# sourceMappingURL=multinomial.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { NotEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a != b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([0, 2, 3]);\n *\n * a.notEqual(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction notEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'notEqual');\n let $b = convertToTensor(b, 'b', 'notEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend) => backend.notEqual($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, NotEqual);\n}\nexport const notEqual = op({ notEqual_ });\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Real } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the real part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the real part of each element in input considered as a complex number.\n *\n * If the input is real, it simply makes a clone.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.real(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction real_(input) {\n const $input = convertToTensor(input, 'input', 'real');\n const forward = (backend) => {\n return backend.real($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Real);\n}\nexport const real = op({ real_ });\n//# sourceMappingURL=real.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OnesLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { complex } from './complex';\nimport { imag } from './imag';\nimport { op } from './operation';\nimport { real } from './real';\nimport { zerosLike } from './zeros_like';\n/**\n * Creates a `tf.Tensor` with all elements set to 1 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.onesLike(x).print();\n * ```\n * @param x A tensor.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction onesLike_(x) {\n const $x = convertToTensor(x, 'x', 'onesLike');\n const forward = (backend, save) => {\n if ($x.dtype === 'complex64') {\n const r = onesLike(real($x));\n const i = zerosLike(imag($x));\n return complex(r, i);\n }\n return backend.onesLike($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OnesLike);\n}\nexport const onesLike = op({ onesLike_ });\n//# sourceMappingURL=ones_like.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the outer product of two vectors, `v1` and `v2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([3, 4, 5]);\n *\n * tf.outerProduct(a, b).print();\n * ```\n * @param v1 The first vector in the outer product operation.\n * @param v2 The second vector in the outer product operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction outerProduct_(v1, v2) {\n const $v1 = convertToTensor(v1, 'v1', 'outerProduct');\n const $v2 = convertToTensor(v2, 'v2', 'outerProduct');\n util.assert($v1.rank === 1 && $v2.rank === 1, () => `Error in outerProduct: inputs must be rank 1, but got ranks ` +\n `${$v1.rank} and ${$v2.rank}.`);\n const v12D = reshape($v1, [-1, 1]);\n const v22D = reshape($v2, [1, -1]);\n return matMul(v12D, v22D);\n}\nexport const outerProduct = op({ outerProduct_ });\n//# sourceMappingURL=outer_product.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { PadV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` with a given value and paddings.\n *\n * This operation implements `CONSTANT` mode. For `REFLECT` and `SYMMETRIC`,\n * refer to `tf.mirrorPad`\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `paddings` is of given length.\n * - `tf.pad1d`\n * - `tf.pad2d`\n * - `tf.pad3d`\n * - `tf.pad4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.pad([[1, 2]]).print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * @param constantValue The pad value to use. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction pad_(x, paddings, constantValue = 0) {\n const $x = convertToTensor(x, 'x', 'pad');\n if ($x.rank === 0) {\n throw new Error('pad(scalar) is not defined. Pass non-scalar to pad');\n }\n const forward = (backend, save) => {\n save([$x]);\n return backend.pad($x, paddings, constantValue);\n };\n const attrs = { paddings, constantValue };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, PadV2, attrs);\n}\nexport const pad = op({ pad_ });\n//# sourceMappingURL=pad.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor1D` with a given value and paddings. See `pad` for details.\n */\nfunction pad1d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2, () => 'Invalid number of paddings. Must be length of 2.');\n return pad(x, [paddings], constantValue);\n}\nexport const pad1d = op({ pad1d_ });\n//# sourceMappingURL=pad1d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor2D` with a given value and paddings. See `pad` for details.\n */\nfunction pad2d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2 && paddings[0].length === 2 &&\n paddings[1].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad2d = op({ pad2d_ });\n//# sourceMappingURL=pad2d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor3D` with a given value and paddings. See `pad` for details.\n */\nfunction pad3d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 3 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad3d = op({ pad3d_ });\n//# sourceMappingURL=pad3d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor4D` with a given value and paddings. See `pad` for details.\n */\nfunction pad4d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 4 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2 &&\n paddings[3].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad4d = op({ pad4d_ });\n//# sourceMappingURL=pad4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SpaceToBatchND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation divides \"spatial\" dimensions `[1, ..., M]` of the input into\n * a grid of blocks of shape `blockShape`, and interleaves these blocks with\n * the \"batch\" dimension (0) such that in the output, the spatial\n * dimensions `[1, ..., M]` correspond to the position within the grid,\n * and the batch dimension combines both the position within a spatial block\n * and the original batch position. Prior to division into blocks,\n * the spatial dimensions of the input are optionally zero padded\n * according to `paddings`. See below for a precise description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]);\n * const blockShape = [2, 2];\n * const paddings = [[0, 0], [0, 0]];\n *\n * x.spaceToBatchND(blockShape, paddings).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param paddings A 2-D array. Must have shape `[M, 2]`, all values must be >=\n * 0. `paddings[i] = [padStart, padEnd]` specifies the amount to zero-pad\n * from input dimension `i + 1`, which corresponds to spatial dimension `i`. It\n * is required that\n * `(inputShape[i + 1] + padStart + padEnd) % blockShape[i] === 0`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Zero-pad the start and end of dimensions `[1, ..., M]` of the input\n * according to `paddings` to produce `padded` of shape paddedShape.\n *\n * 2. Reshape `padded` to `reshapedPadded` of shape:\n * `[batch] + [paddedShape[1] / blockShape[0], blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1], blockShape[M-1]] + remainingShape`\n *\n * 3. Permute dimensions of `reshapedPadded` to produce `permutedReshapedPadded`\n * of shape: `blockShape + [batch] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * 4. Reshape `permutedReshapedPadded` to flatten `blockShape` into the\n * batch dimension, producing an output tensor of shape:\n * `[batch * prod(blockShape)] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction spaceToBatchND_(x, blockShape, paddings) {\n const $x = convertToTensor(x, 'x', 'spaceToBatchND');\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank ${$x.rank} should be > than [blockShape] ${blockShape.length}`);\n util.assert(paddings.length === blockShape.length, () => `paddings.shape[0] ${paddings.length} must be equal to [blockShape] ${blockShape.length}`);\n util.assert($x.shape.reduce((a, b, i) => {\n if (i > 0 && i <= blockShape.length) {\n return a &&\n ((b + paddings[i - 1][0] + paddings[i - 1][1]) %\n blockShape[i - 1] ===\n 0);\n }\n return a;\n }, true), () => `input spatial dimensions ${$x.shape.slice(1)} with paddings ${paddings.toString()} must be divisible by blockShapes ${blockShape.toString()}`);\n const forward = backend => backend.spaceToBatchND($x, blockShape, paddings);\n const inputs = { x: $x };\n const attrs = { blockShape, paddings };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SpaceToBatchND, attrs);\n}\nexport const spaceToBatchND = op({ spaceToBatchND_ });\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { avgPool } from './avg_pool';\nimport { batchToSpaceND } from './batch_to_space_nd';\nimport * as conv_util from './conv_util';\nimport { maxPool } from './max_pool';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { spaceToBatchND } from './space_to_batch_nd';\n/**\n * Performs an N-D pooling operation\n *\n * @param input The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param windowShape The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param poolingType The type of pooling, either 'max' or 'avg'.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilationRate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction pool_(input, windowShape, poolingType, pad, dilations, strides) {\n if (dilations == null) {\n dilations = [1, 1];\n }\n if (strides == null) {\n strides = 1;\n }\n if (pad === 0) {\n pad = 'valid';\n }\n const $x = convertToTensor(input, 'x', 'maxPool');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in pool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computePool2DInfo(x4D.shape, windowShape, strides, dilations, pad);\n const dilation = [convInfo.dilationHeight, convInfo.dilationWidth];\n // The following implementation does batchToSpace(pool(spaceToBatch(x)))\n // whenever dilation > 1 since the TF kernels do not support dilation > 1.\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L1037\n let basePadding;\n if (pad === 'same') {\n basePadding = withSpaceToBatchBasePaddings([convInfo.filterHeight, convInfo.filterWidth], dilation);\n }\n else {\n basePadding = [[0, 0], [0, 0]];\n }\n const isDilationOne = dilation[0] === 1 && dilation[1] === 1;\n const [adjustedPadding, adjustedCrops] = requiredSpaceToBatchPaddings([convInfo.inHeight, convInfo.inWidth], dilation, basePadding);\n const convertedPad = isDilationOne ? pad : 'valid';\n const convertedX = isDilationOne ? x4D : spaceToBatchND(x4D, dilation, adjustedPadding);\n const forwardOp = poolingType === 'avg' ?\n () => avgPool(convertedX, windowShape, strides, convertedPad) :\n () => maxPool(convertedX, windowShape, strides, convertedPad);\n const y = forwardOp();\n const res = isDilationOne ? y : batchToSpaceND(y, dilation, adjustedCrops);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\n// Helper function to compute crops and paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/array_ops.py#L2184\nfunction requiredSpaceToBatchPaddings(inputShape, blockShape, basePadding) {\n const padStart = basePadding.map(b => b[0]);\n const origPadEnd = basePadding.map(b => b[1]);\n const fullInputShape = inputShape.concat(padStart, origPadEnd);\n const padEndExtra = blockShape.map((b, i) => (b - fullInputShape[i] % b) % b);\n const padEnd = origPadEnd.map((s, i) => s + padEndExtra[i]);\n const paddings = blockShape.map((_, i) => [padStart[i], padEnd[i]]);\n const crops = blockShape.map((_, i) => [0, padEndExtra[i]]);\n return [paddings, crops];\n}\n// Helper function to compute base paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L524\nfunction withSpaceToBatchBasePaddings(filterShape, dilation) {\n // Spatial dimensions of the filters and the upsampled filters in which we\n // introduce (rate - 1) zeros between consecutive filter values.\n const dilatedFilterShape = filterShape.map((s, i) => {\n return s + (s - 1) * (dilation[i] - 1);\n });\n const padExtraShape = dilatedFilterShape.map(s => s - 1);\n // When padding is odd, we pad more at end, following the same\n // convention as conv2d.\n const padExtraStart = padExtraShape.map(s => Math.floor(s / 2));\n const padExtraEnd = padExtraShape.map((s, i) => s - padExtraStart[i]);\n return padExtraShape.map((_, i) => {\n return [padExtraStart[i], padExtraEnd[i]];\n });\n}\nexport const pool = op({ pool_ });\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Pow } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the power of one `tf.Tensor` to another. Supports broadcasting.\n *\n * Given a `tf.Tensor` x and a `tf.Tensor` y, this operation computes x^y for\n * corresponding elements in x and y. The result's dtype will be the upcasted\n * type of the `base` and `exp` dtypes.\n *\n * ```js\n * const a = tf.tensor([[2, 3], [4, 5]])\n * const b = tf.tensor([[1, 2], [3, 0]]).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n *\n * ```js\n * const a = tf.tensor([[1, 2], [3, 4]])\n * const b = tf.tensor(2).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n * We also expose `powStrict` which has the same signature as this op and\n * asserts that `base` and `exp` are the same shape (does not broadcast).\n *\n * @param base The base `tf.Tensor` to pow element-wise.\n * @param exp The exponent `tf.Tensor` to pow element-wise.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction pow_(base, exp) {\n let $base = convertToTensor(base, 'base', 'pow');\n let $exp = convertToTensor(exp, 'exp', 'pow');\n [$base, $exp] = makeTypesMatch($base, $exp);\n const inputs = { a: $base, b: $exp };\n const forward = (backend, save) => {\n const y = backend.pow($base, $exp);\n save([$base, $exp, y]);\n return y;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Pow);\n}\nexport const pow = op({ pow_ });\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prelu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes leaky rectified linear element-wise with parametric alphas.\n *\n * `x < 0 ? alpha * x : f(x) = x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n * const alpha = tf.scalar(0.1);\n *\n * x.prelu(alpha).print(); // or tf.prelu(x, alpha)\n * ```\n * @param x The input tensor.\n * @param alpha Scaling factor for negative values.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction prelu_(x, alpha) {\n const $x = convertToTensor(x, 'x', 'prelu');\n const $alpha = convertToTensor(alpha, 'alpha', 'prelu');\n const forward = (backend, save) => {\n const res = backend.prelu($x, $alpha);\n save([$x, $alpha]);\n return res;\n };\n const inputs = { x: $x, alpha: $alpha };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prelu);\n}\nexport const prelu = op({ prelu_ });\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prod } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the product of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.prod().print(); // or tf.prod(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.prod(axis).print(); // or tf.prod(x, axis)\n * ```\n *\n * @param x The input tensor to compute the product over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction prod_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'prod');\n if ($x.dtype === 'bool') {\n // bool is not an allowed type for the underlying kernel.\n $x = cast($x, 'int32');\n }\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.prod(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prod, attrs);\n}\nexport const prod = op({ prod_ });\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { sizeFromShape } from '../util';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with values sampled from a random number generator\n * function defined by the user.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param randFunction A random number generator function which is called\n * for each element in the output tensor.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n */\nfunction rand_(shape, randFunction, dtype) {\n const size = sizeFromShape(shape);\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n for (let i = 0; i < size; i++) {\n values[i] = randFunction();\n }\n return ENGINE.makeTensor(values, shape, dtype);\n}\nexport const rand = op({ rand_ });\n//# sourceMappingURL=rand.js.map", "// A port of an algorithm by Johannes Baag\u00F8e , 2010\n// http://baagoe.com/en/RandomMusings/javascript/\n// https://github.com/nquinlan/better-random-numbers-for-javascript-mirror\n// Original work is under MIT license -\n\n// Copyright (C) 2010 by Johannes Baag\u00F8e \n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n// \n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n// \n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n\n\n\n(function(global, module, define) {\n\nfunction Alea(seed) {\n var me = this, mash = Mash();\n\n me.next = function() {\n var t = 2091639 * me.s0 + me.c * 2.3283064365386963e-10; // 2^-32\n me.s0 = me.s1;\n me.s1 = me.s2;\n return me.s2 = t - (me.c = t | 0);\n };\n\n // Apply the seeding algorithm from Baagoe.\n me.c = 1;\n me.s0 = mash(' ');\n me.s1 = mash(' ');\n me.s2 = mash(' ');\n me.s0 -= mash(seed);\n if (me.s0 < 0) { me.s0 += 1; }\n me.s1 -= mash(seed);\n if (me.s1 < 0) { me.s1 += 1; }\n me.s2 -= mash(seed);\n if (me.s2 < 0) { me.s2 += 1; }\n mash = null;\n}\n\nfunction copy(f, t) {\n t.c = f.c;\n t.s0 = f.s0;\n t.s1 = f.s1;\n t.s2 = f.s2;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new Alea(seed),\n state = opts && opts.state,\n prng = xg.next;\n prng.int32 = function() { return (xg.next() * 0x100000000) | 0; }\n prng.double = function() {\n return prng() + (prng() * 0x200000 | 0) * 1.1102230246251565e-16; // 2^-53\n };\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nfunction Mash() {\n var n = 0xefc8249d;\n\n var mash = function(data) {\n data = data.toString();\n for (var i = 0; i < data.length; i++) {\n n += data.charCodeAt(i);\n var h = 0.02519603282416938 * n;\n n = h >>> 0;\n h -= n;\n h *= n;\n n = h >>> 0;\n h -= n;\n n += h * 0x100000000; // 2^32\n }\n return (n >>> 0) * 2.3283064365386963e-10; // 2^-32\n };\n\n return mash;\n}\n\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.alea = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xor128\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n\n // Set up generator function.\n me.next = function() {\n var t = me.x ^ (me.x << 11);\n me.x = me.y;\n me.y = me.z;\n me.z = me.w;\n return me.w ^= (me.w >>> 19) ^ t ^ (t >>> 8);\n };\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor128 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorwow\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var t = (me.x ^ (me.x >>> 2));\n me.x = me.y; me.y = me.z; me.z = me.w; me.w = me.v;\n return (me.d = (me.d + 362437 | 0)) +\n (me.v = (me.v ^ (me.v << 4)) ^ (t ^ (t << 1))) | 0;\n };\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n me.v = 0;\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n if (k == strseed.length) {\n me.d = me.x << 10 ^ me.x >>> 4;\n }\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n t.v = f.v;\n t.d = f.d;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorwow = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorshift7\" algorithm by\n// Fran\u00E7ois Panneton and Pierre L'ecuyer:\n// \"On the Xorgshift Random Number Generators\"\n// http://saluc.engr.uconn.edu/refs/crypto/rng/panneton05onthexorshift.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n // Update xor generator.\n var X = me.x, i = me.i, t, v, w;\n t = X[i]; t ^= (t >>> 7); v = t ^ (t << 24);\n t = X[(i + 1) & 7]; v ^= t ^ (t >>> 10);\n t = X[(i + 3) & 7]; v ^= t ^ (t >>> 3);\n t = X[(i + 4) & 7]; v ^= t ^ (t << 7);\n t = X[(i + 7) & 7]; t = t ^ (t << 13); v ^= t ^ (t << 9);\n X[i] = v;\n me.i = (i + 1) & 7;\n return v;\n };\n\n function init(me, seed) {\n var j, w, X = [];\n\n if (seed === (seed | 0)) {\n // Seed state array using a 32-bit integer.\n w = X[0] = seed;\n } else {\n // Seed state using a string.\n seed = '' + seed;\n for (j = 0; j < seed.length; ++j) {\n X[j & 7] = (X[j & 7] << 15) ^\n (seed.charCodeAt(j) + X[(j + 1) & 7] << 13);\n }\n }\n // Enforce an array length of 8, not all zeroes.\n while (X.length < 8) X.push(0);\n for (j = 0; j < 8 && X[j] === 0; ++j);\n if (j == 8) w = X[7] = -1; else w = X[j];\n\n me.x = X;\n me.i = 0;\n\n // Discard an initial 256 values.\n for (j = 256; j > 0; --j) {\n me.next();\n }\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.x = f.x.slice();\n t.i = f.i;\n return t;\n}\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.x) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorshift7 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n", "// A Javascript implementaion of Richard Brent's Xorgens xor4096 algorithm.\n//\n// This fast non-cryptographic random number generator is designed for\n// use in Monte-Carlo algorithms. It combines a long-period xorshift\n// generator with a Weyl generator, and it passes all common batteries\n// of stasticial tests for randomness while consuming only a few nanoseconds\n// for each prng generated. For background on the generator, see Brent's\n// paper: \"Some long-period random number generators using shifts and xors.\"\n// http://arxiv.org/pdf/1004.3115v1.pdf\n//\n// Usage:\n//\n// var xor4096 = require('xor4096');\n// random = xor4096(1); // Seed with int32 or string.\n// assert.equal(random(), 0.1520436450538547); // (0, 1) range, 53 bits.\n// assert.equal(random.int32(), 1806534897); // signed int32, 32 bits.\n//\n// For nonzero numeric keys, this impelementation provides a sequence\n// identical to that by Brent's xorgens 3 implementaion in C. This\n// implementation also provides for initalizing the generator with\n// string seeds, or for saving and restoring the state of the generator.\n//\n// On Chrome, this prng benchmarks about 2.1 times slower than\n// Javascript's built-in Math.random().\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n var w = me.w,\n X = me.X, i = me.i, t, v;\n // Update Weyl generator.\n me.w = w = (w + 0x61c88647) | 0;\n // Update xor generator.\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n // Update Xor generator array state.\n v = X[i] = v ^ t;\n me.i = i;\n // Result is the combination.\n return (v + (w ^ (w >>> 16))) | 0;\n };\n\n function init(me, seed) {\n var t, v, i, j, w, X = [], limit = 128;\n if (seed === (seed | 0)) {\n // Numeric seeds initialize v, which is used to generates X.\n v = seed;\n seed = null;\n } else {\n // String seeds are mixed into v and X one character at a time.\n seed = seed + '\\0';\n v = 0;\n limit = Math.max(limit, seed.length);\n }\n // Initialize circular array and weyl value.\n for (i = 0, j = -32; j < limit; ++j) {\n // Put the unicode characters into the array, and shuffle them.\n if (seed) v ^= seed.charCodeAt((j + 32) % seed.length);\n // After 32 shuffles, take v as the starting w value.\n if (j === 0) w = v;\n v ^= v << 10;\n v ^= v >>> 15;\n v ^= v << 4;\n v ^= v >>> 13;\n if (j >= 0) {\n w = (w + 0x61c88647) | 0; // Weyl.\n t = (X[j & 127] ^= (v + w)); // Combine xor and weyl to init array.\n i = (0 == t) ? i + 1 : 0; // Count zeroes.\n }\n }\n // We have detected all zeroes; make the key nonzero.\n if (i >= 128) {\n X[(seed && seed.length || 0) & 127] = -1;\n }\n // Run the generator 512 times to further mix the state before using it.\n // Factoring this as a function slows the main generator, so it is just\n // unrolled here. The weyl generator is not advanced while warming up.\n i = 127;\n for (j = 4 * 128; j > 0; --j) {\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n X[i] = v ^ t;\n }\n // Storing state as object members is faster than using closure variables.\n me.w = w;\n me.X = X;\n me.i = i;\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.i = f.i;\n t.w = f.w;\n t.X = f.X.slice();\n return t;\n};\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.X) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor4096 = impl;\n}\n\n})(\n this, // window object or global\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n", "// A Javascript implementaion of the \"Tyche-i\" prng algorithm by\n// Samuel Neves and Filipe Araujo.\n// See https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var b = me.b, c = me.c, d = me.d, a = me.a;\n b = (b << 25) ^ (b >>> 7) ^ c;\n c = (c - d) | 0;\n d = (d << 24) ^ (d >>> 8) ^ a;\n a = (a - b) | 0;\n me.b = b = (b << 20) ^ (b >>> 12) ^ c;\n me.c = c = (c - d) | 0;\n me.d = (d << 16) ^ (c >>> 16) ^ a;\n return me.a = (a - b) | 0;\n };\n\n /* The following is non-inverted tyche, which has better internal\n * bit diffusion, but which is about 25% slower than tyche-i in JS.\n me.next = function() {\n var a = me.a, b = me.b, c = me.c, d = me.d;\n a = (me.a + me.b | 0) >>> 0;\n d = me.d ^ a; d = d << 16 ^ d >>> 16;\n c = me.c + d | 0;\n b = me.b ^ c; b = b << 12 ^ d >>> 20;\n me.a = a = a + b | 0;\n d = d ^ a; me.d = d = d << 8 ^ d >>> 24;\n me.c = c = c + d | 0;\n b = b ^ c;\n return me.b = (b << 7 ^ b >>> 25);\n }\n */\n\n me.a = 0;\n me.b = 0;\n me.c = 2654435769 | 0;\n me.d = 1367130551;\n\n if (seed === Math.floor(seed)) {\n // Integer seed.\n me.a = (seed / 0x100000000) | 0;\n me.b = seed | 0;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 20; k++) {\n me.b ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.a = f.a;\n t.b = f.b;\n t.c = f.c;\n t.d = f.d;\n return t;\n};\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.tychei = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "/*\nCopyright 2014 David Bau.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n*/\n\n(function (pool, math) {\n//\n// The following constants are related to IEEE 754 limits.\n//\nvar global = this,\n width = 256, // each RC4 output is 0 <= x < 256\n chunks = 6, // at least six RC4 outputs for each double\n digits = 52, // there are 52 significant digits in a double\n rngname = 'random', // rngname: name for Math.random and Math.seedrandom\n startdenom = math.pow(width, chunks),\n significance = math.pow(2, digits),\n overflow = significance * 2,\n mask = width - 1,\n nodecrypto; // node.js crypto module, initialized at the bottom.\n\n//\n// seedrandom()\n// This is the seedrandom function described above.\n//\nfunction seedrandom(seed, options, callback) {\n var key = [];\n options = (options == true) ? { entropy: true } : (options || {});\n\n // Flatten the seed string or build one from local entropy if needed.\n var shortseed = mixkey(flatten(\n options.entropy ? [seed, tostring(pool)] :\n (seed == null) ? autoseed() : seed, 3), key);\n\n // Use the seed to initialize an ARC4 generator.\n var arc4 = new ARC4(key);\n\n // This function returns a random double in [0, 1) that contains\n // randomness in every bit of the mantissa of the IEEE 754 value.\n var prng = function() {\n var n = arc4.g(chunks), // Start with a numerator n < 2 ^ 48\n d = startdenom, // and denominator d = 2 ^ 48.\n x = 0; // and no 'extra last byte'.\n while (n < significance) { // Fill up all significant digits by\n n = (n + x) * width; // shifting numerator and\n d *= width; // denominator and generating a\n x = arc4.g(1); // new least-significant-byte.\n }\n while (n >= overflow) { // To avoid rounding up, before adding\n n /= 2; // last byte, shift everything\n d /= 2; // right using integer math until\n x >>>= 1; // we have exactly the desired bits.\n }\n return (n + x) / d; // Form the number within [0, 1).\n };\n\n prng.int32 = function() { return arc4.g(4) | 0; }\n prng.quick = function() { return arc4.g(4) / 0x100000000; }\n prng.double = prng;\n\n // Mix the randomness into accumulated entropy.\n mixkey(tostring(arc4.S), pool);\n\n // Calling convention: what to return as a function of prng, seed, is_math.\n return (options.pass || callback ||\n function(prng, seed, is_math_call, state) {\n if (state) {\n // Load the arc4 state from the given state if it has an S array.\n if (state.S) { copy(state, arc4); }\n // Only provide the .state method if requested via options.state.\n prng.state = function() { return copy(arc4, {}); }\n }\n\n // If called as a method of Math (Math.seedrandom()), mutate\n // Math.random because that is how seedrandom.js has worked since v1.0.\n if (is_math_call) { math[rngname] = prng; return seed; }\n\n // Otherwise, it is a newer calling convention, so return the\n // prng directly.\n else return prng;\n })(\n prng,\n shortseed,\n 'global' in options ? options.global : (this == math),\n options.state);\n}\nmath['seed' + rngname] = seedrandom;\n\n//\n// ARC4\n//\n// An ARC4 implementation. The constructor takes a key in the form of\n// an array of at most (width) integers that should be 0 <= x < (width).\n//\n// The g(count) method returns a pseudorandom integer that concatenates\n// the next (count) outputs from ARC4. Its return value is a number x\n// that is in the range 0 <= x < (width ^ count).\n//\nfunction ARC4(key) {\n var t, keylen = key.length,\n me = this, i = 0, j = me.i = me.j = 0, s = me.S = [];\n\n // The empty key [] is treated as [0].\n if (!keylen) { key = [keylen++]; }\n\n // Set up S using the standard key scheduling algorithm.\n while (i < width) {\n s[i] = i++;\n }\n for (i = 0; i < width; i++) {\n s[i] = s[j = mask & (j + key[i % keylen] + (t = s[i]))];\n s[j] = t;\n }\n\n // The \"g\" method returns the next (count) outputs as one number.\n (me.g = function(count) {\n // Using instance members instead of closure state nearly doubles speed.\n var t, r = 0,\n i = me.i, j = me.j, s = me.S;\n while (count--) {\n t = s[i = mask & (i + 1)];\n r = r * width + s[mask & ((s[i] = s[j = mask & (j + t)]) + (s[j] = t))];\n }\n me.i = i; me.j = j;\n return r;\n // For robust unpredictability, the function call below automatically\n // discards an initial batch of values. This is called RC4-drop[256].\n // See http://google.com/search?q=rsa+fluhrer+response&btnI\n })(width);\n}\n\n//\n// copy()\n// Copies internal state of ARC4 to or from a plain object.\n//\nfunction copy(f, t) {\n t.i = f.i;\n t.j = f.j;\n t.S = f.S.slice();\n return t;\n};\n\n//\n// flatten()\n// Converts an object tree to nested arrays of strings.\n//\nfunction flatten(obj, depth) {\n var result = [], typ = (typeof obj), prop;\n if (depth && typ == 'object') {\n for (prop in obj) {\n try { result.push(flatten(obj[prop], depth - 1)); } catch (e) {}\n }\n }\n return (result.length ? result : typ == 'string' ? obj : obj + '\\0');\n}\n\n//\n// mixkey()\n// Mixes a string seed into a key that is an array of integers, and\n// returns a shortened string seed that is equivalent to the result key.\n//\nfunction mixkey(seed, key) {\n var stringseed = seed + '', smear, j = 0;\n while (j < stringseed.length) {\n key[mask & j] =\n mask & ((smear ^= key[mask & j] * 19) + stringseed.charCodeAt(j++));\n }\n return tostring(key);\n}\n\n//\n// autoseed()\n// Returns an object for autoseeding, using window.crypto and Node crypto\n// module if available.\n//\nfunction autoseed() {\n try {\n var out;\n if (nodecrypto && (out = nodecrypto.randomBytes)) {\n // The use of 'out' to remember randomBytes makes tight minified code.\n out = out(width);\n } else {\n out = new Uint8Array(width);\n (global.crypto || global.msCrypto).getRandomValues(out);\n }\n return tostring(out);\n } catch (e) {\n var browser = global.navigator,\n plugins = browser && browser.plugins;\n return [+new Date, global, plugins, global.screen, tostring(pool)];\n }\n}\n\n//\n// tostring()\n// Converts an array of charcodes to a string\n//\nfunction tostring(a) {\n return String.fromCharCode.apply(0, a);\n}\n\n//\n// When seedrandom.js is loaded, we immediately mix a few bits\n// from the built-in RNG into the entropy pool. Because we do\n// not want to interfere with deterministic PRNG state later,\n// seedrandom will not call math.random on its own again after\n// initialization.\n//\nmixkey(math.random(), pool);\n\n//\n// Nodejs and AMD support: export the implementation as a module using\n// either convention.\n//\nif ((typeof module) == 'object' && module.exports) {\n module.exports = seedrandom;\n // When in node.js, try using crypto package for autoseeding.\n try {\n nodecrypto = require('crypto');\n } catch (ex) {}\n} else if ((typeof define) == 'function' && define.amd) {\n define(function() { return seedrandom; });\n}\n\n// End anonymous scope, and pass initial values.\n})(\n [], // pool: entropy pool starts empty\n Math // math: package containing random, pow, and seedrandom\n);\n", "// A library of seedable RNGs implemented in Javascript.\n//\n// Usage:\n//\n// var seedrandom = require('seedrandom');\n// var random = seedrandom(1); // or any seed.\n// var x = random(); // 0 <= x < 1. Every bit is random.\n// var x = random.quick(); // 0 <= x < 1. 32 bits of randomness.\n\n// alea, a 53-bit multiply-with-carry generator by Johannes Baag\u00F8e.\n// Period: ~2^116\n// Reported to pass all BigCrush tests.\nvar alea = require('./lib/alea');\n\n// xor128, a pure xor-shift generator by George Marsaglia.\n// Period: 2^128-1.\n// Reported to fail: MatrixRank and LinearComp.\nvar xor128 = require('./lib/xor128');\n\n// xorwow, George Marsaglia's 160-bit xor-shift combined plus weyl.\n// Period: 2^192-2^32\n// Reported to fail: CollisionOver, SimpPoker, and LinearComp.\nvar xorwow = require('./lib/xorwow');\n\n// xorshift7, by Fran\u00E7ois Panneton and Pierre L'ecuyer, takes\n// a different approach: it adds robustness by allowing more shifts\n// than Marsaglia's original three. It is a 7-shift generator\n// with 256 bits, that passes BigCrush with no systmatic failures.\n// Period 2^256-1.\n// No systematic BigCrush failures reported.\nvar xorshift7 = require('./lib/xorshift7');\n\n// xor4096, by Richard Brent, is a 4096-bit xor-shift with a\n// very long period that also adds a Weyl generator. It also passes\n// BigCrush with no systematic failures. Its long period may\n// be useful if you have many generators and need to avoid\n// collisions.\n// Period: 2^4128-2^32.\n// No systematic BigCrush failures reported.\nvar xor4096 = require('./lib/xor4096');\n\n// Tyche-i, by Samuel Neves and Filipe Araujo, is a bit-shifting random\n// number generator derived from ChaCha, a modern stream cipher.\n// https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n// Period: ~2^127\n// No systematic BigCrush failures reported.\nvar tychei = require('./lib/tychei');\n\n// The original ARC4-based prng included in this library.\n// Period: ~2^1600\nvar sr = require('./seedrandom');\n\nsr.alea = alea;\nsr.xor128 = xor128;\nsr.xorwow = xorwow;\nsr.xorshift7 = xorshift7;\nsr.xor4096 = xor4096;\nsr.tychei = tychei;\n\nmodule.exports = sr;\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as seedrandom from 'seedrandom';\nimport { expectNumbersClose, testEpsilon } from '../test_util';\n// https://en.wikipedia.org/wiki/Marsaglia_polar_method\nexport class MPRandGauss {\n constructor(mean, stdDeviation, dtype, truncated, seed) {\n this.mean = mean;\n this.stdDev = stdDeviation;\n this.dtype = dtype;\n this.nextVal = NaN;\n this.truncated = truncated;\n if (this.truncated) {\n this.upper = this.mean + this.stdDev * 2;\n this.lower = this.mean - this.stdDev * 2;\n }\n const seedValue = seed ? seed : Math.random();\n this.random = seedrandom.alea(seedValue.toString());\n }\n /** Returns next sample from a Gaussian distribution. */\n nextValue() {\n if (!isNaN(this.nextVal)) {\n const value = this.nextVal;\n this.nextVal = NaN;\n return value;\n }\n let resultX, resultY;\n let isValid = false;\n while (!isValid) {\n let v1, v2, s;\n do {\n v1 = 2 * this.random() - 1;\n v2 = 2 * this.random() - 1;\n s = v1 * v1 + v2 * v2;\n } while (s >= 1 || s === 0);\n const mul = Math.sqrt(-2.0 * Math.log(s) / s);\n resultX = this.mean + this.stdDev * v1 * mul;\n resultY = this.mean + this.stdDev * v2 * mul;\n if (!this.truncated || this.isValidTruncated(resultX)) {\n isValid = true;\n }\n }\n if (!this.truncated || this.isValidTruncated(resultY)) {\n this.nextVal = this.convertValue(resultY);\n }\n return this.convertValue(resultX);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype == null || this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n /** Returns true if less than 2-standard-deviations from the mean. */\n isValidTruncated(value) {\n return value <= this.upper && value >= this.lower;\n }\n}\n// Marsaglia, George, and Wai Wan Tsang. 2000. \"A Simple Method for Generating\n// Gamma Variables.\"\nexport class RandGamma {\n constructor(alpha, beta, dtype, seed) {\n this.alpha = alpha;\n this.beta = 1 / beta; // convert rate to scale parameter\n this.dtype = dtype;\n const seedValue = seed ? seed : Math.random();\n this.randu = seedrandom.alea(seedValue.toString());\n this.randn = new MPRandGauss(0, 1, dtype, false, this.randu());\n if (alpha < 1) {\n this.d = alpha + (2 / 3);\n }\n else {\n this.d = alpha - (1 / 3);\n }\n this.c = 1 / Math.sqrt(9 * this.d);\n }\n /** Returns next sample from a gamma distribution. */\n nextValue() {\n let x2, v0, v1, x, u, v;\n while (true) {\n do {\n x = this.randn.nextValue();\n v = 1 + (this.c * x);\n } while (v <= 0);\n v *= v * v;\n x2 = x * x;\n v0 = 1 - (0.331 * x2 * x2);\n v1 = (0.5 * x2) + (this.d * (1 - v + Math.log(v)));\n u = this.randu();\n if (u < v0 || Math.log(u) < v1) {\n break;\n }\n }\n v = (1 / this.beta) * this.d * v;\n if (this.alpha < 1) {\n v *= Math.pow(this.randu(), 1 / this.alpha);\n }\n return this.convertValue(v);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n}\nexport class UniformRandom {\n constructor(min = 0, max = 1, dtype, seed) {\n /** Handles proper rounding for non floating point numbers. */\n this.canReturnFloat = () => (this.dtype == null || this.dtype === 'float32');\n this.min = min;\n this.range = max - min;\n this.dtype = dtype;\n if (seed == null) {\n seed = Math.random();\n }\n if (typeof seed === 'number') {\n seed = seed.toString();\n }\n if (!this.canReturnFloat() && this.range <= 1) {\n throw new Error(`The difference between ${min} - ${max} <= 1 and dtype is not float`);\n }\n this.random = seedrandom.alea(seed);\n }\n convertValue(value) {\n if (this.canReturnFloat()) {\n return value;\n }\n return Math.round(value);\n }\n nextValue() {\n return this.convertValue(this.min + this.range * this.random());\n }\n}\nexport function jarqueBeraNormalityTest(values) {\n // https://en.wikipedia.org/wiki/Jarque%E2%80%93Bera_test\n const n = values.length;\n const s = skewness(values);\n const k = kurtosis(values);\n const jb = n / 6 * (Math.pow(s, 2) + 0.25 * Math.pow(k - 3, 2));\n // JB test requires 2-degress of freedom from Chi-Square @ 0.95:\n // http://www.itl.nist.gov/div898/handbook/eda/section3/eda3674.htm\n const CHI_SQUARE_2DEG = 5.991;\n if (jb > CHI_SQUARE_2DEG) {\n throw new Error(`Invalid p-value for JB: ${jb}`);\n }\n}\nexport function expectArrayInMeanStdRange(actual, expectedMean, expectedStdDev, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n const actualMean = mean(actual);\n expectNumbersClose(actualMean, expectedMean, epsilon);\n expectNumbersClose(standardDeviation(actual, actualMean), expectedStdDev, epsilon);\n}\nfunction mean(values) {\n let sum = 0;\n for (let i = 0; i < values.length; i++) {\n sum += values[i];\n }\n return sum / values.length;\n}\nfunction standardDeviation(values, mean) {\n let squareDiffSum = 0;\n for (let i = 0; i < values.length; i++) {\n const diff = values[i] - mean;\n squareDiffSum += diff * diff;\n }\n return Math.sqrt(squareDiffSum / values.length);\n}\nfunction kurtosis(values) {\n // https://en.wikipedia.org/wiki/Kurtosis\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum4 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum4 += Math.pow(v, 4);\n }\n return (1 / n) * sum4 / Math.pow((1 / n) * sum2, 2);\n}\nfunction skewness(values) {\n // https://en.wikipedia.org/wiki/Skewness\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum3 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum3 += Math.pow(v, 3);\n }\n return (1 / n) * sum3 / Math.pow((1 / (n - 1)) * sum2, 3 / 2);\n}\n//# sourceMappingURL=rand_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { RandGamma } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a gamma distribution.\n *\n * ```js\n * tf.randomGamma([2, 2], 1).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param alpha The shape parameter of the gamma distribution.\n * @param beta The inverse scale parameter of the gamma distribution. Defaults\n * to 1.\n * @param dtype The data type of the output. Defaults to float32.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomGamma_(shape, alpha, beta = 1, dtype = 'float32', seed) {\n if (beta == null) {\n beta = 1;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const rgamma = new RandGamma(alpha, beta, dtype, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = rgamma.nextValue();\n }\n return res.toTensor();\n}\nexport const randomGamma = op({ randomGamma_ });\n//# sourceMappingURL=random_gamma.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a normal distribution.\n *\n * ```js\n * tf.randomNormal([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, false /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const randomNormal = op({ randomNormal_ });\n//# sourceMappingURL=random_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { UniformRandom } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a uniform distribution.\n *\n * The generated values follow a uniform distribution in the range [minval,\n * maxval). The lower bound minval is included in the range, while the upper\n * bound maxval is excluded.\n *\n * ```js\n * tf.randomUniform([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param minval The lower bound on the range of random values to generate.\n * Defaults to 0.\n * @param maxval The upper bound on the range of random values to generate.\n * Defaults to 1.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomUniform_(shape, minval = 0, maxval = 1, dtype = 'float32', seed) {\n const res = buffer(shape, dtype);\n const random = new UniformRandom(minval, maxval, null, seed);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = random.nextValue();\n }\n return res.toTensor();\n}\nexport const randomUniform = op({ randomUniform_ });\n//# sourceMappingURL=random_uniform.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-1 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor1d` as it makes the code more readable.\n *\n * ```js\n * tf.tensor1d([1, 2, 3]).print();\n * ```\n *\n * @param values The values of the tensor. Can be array of numbers,\n * or a `TypedArray`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor1d(values, dtype) {\n assertNonNull(values);\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 1) {\n throw new Error('tensor1d() requires values to be a flat/TypedArray');\n }\n const shape = null;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Range } from '../kernel_names';\nimport { makeZerosTypedArray } from '../util';\nimport { tensor1d } from './tensor1d';\nimport { zeros } from './zeros';\n/**\n * Creates a new `tf.Tensor1D` filled with the numbers in the range provided.\n *\n * The tensor is a is half-open interval meaning it includes start, but\n * excludes stop. Decrementing ranges and negative step values are also\n * supported.sv\n *\n *\n * ```js\n * tf.range(0, 9, 2).print();\n * ```\n *\n * @param start An integer start value\n * @param stop An integer stop value\n * @param step An integer increment (will default to 1 or -1)\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function range(start, stop, step = 1, dtype = 'float32') {\n if (step === 0) {\n throw new Error('Cannot have a step of zero');\n }\n const forward = () => {\n const sameStartStop = start === stop;\n const increasingRangeNegativeStep = start < stop && step < 0;\n const decreasingRangePositiveStep = stop < start && step > 1;\n if (sameStartStop || increasingRangeNegativeStep ||\n decreasingRangePositiveStep) {\n return zeros([0], dtype);\n }\n const numElements = Math.abs(Math.ceil((stop - start) / step));\n const values = makeZerosTypedArray(numElements, dtype);\n if (stop < start && step === 1) {\n // Auto adjust the step's sign if it hasn't been set\n // (or was set to 1)\n step = -1;\n }\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, dtype);\n };\n const attrs = { start, stop, step, dtype };\n return ENGINE.runKernelFunc(forward, {} /* inputs */, null /* grad */, Range, attrs);\n}\n//# sourceMappingURL=range.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reciprocal } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of x element-wise: `1 / x`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, 2]);\n *\n * x.reciprocal().print(); // or tf.reciprocal(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction reciprocal_(x) {\n const $x = convertToTensor(x, 'x', 'reciprocal');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.reciprocal($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Reciprocal);\n}\nexport const reciprocal = op({ reciprocal_ });\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { cast } from './cast';\n/**\n * Computes rectified linear element-wise: `max(x, 0)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.relu().print(); // or tf.relu(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu_(x) {\n const $x = convertToTensor(x, 'x', 'relu');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu);\n}\nexport const relu = op({ relu_ });\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu6 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes rectified linear 6 element-wise: `min(max(x, 0), 6)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 8]);\n *\n * x.relu6().print(); // or tf.relu6(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu6_(x) {\n const $x = convertToTensor(x, 'x', 'relu6');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu6($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu6);\n}\nexport const relu6 = op({ relu6_ });\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reverse } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Reverses a `tf.Tensor` along a specified axis.\n *\n * Also available are stricter rank-specific methods that assert that `x` is\n * of the given rank:\n * - `tf.reverse1d`\n * - `tf.reverse2d`\n * - `tf.reverse3d`\n * - `tf.reverse4d`\n *\n * Except `tf.reverse1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.reverse().print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.reverse(axis).print();\n * ```\n * @param x The input tensor to be reversed.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction reverse_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n if ($x.rank === 0) {\n return clone($x);\n }\n const res = backend.reverse($x, axes);\n return reshape(res, $x.shape);\n };\n const inputs = { x: $x };\n const attrs = { dims: axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Reverse, attrs);\n}\nexport const reverse = op({ reverse_ });\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor1D`.\n *\n * @param x The input tensor.\n */\nfunction reverse1d_(x) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 1, () => `Error in reverse1D: x must be rank 1 but got rank ${$x.rank}.`);\n return reverse($x, 0);\n}\nexport const reverse1d = op({ reverse1d_ });\n//# sourceMappingURL=reverse_1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor2D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse2d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 2, () => `Error in reverse2D: x must be rank 2 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse2d = op({ reverse2d_ });\n//# sourceMappingURL=reverse_2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor3D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse3d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 3, () => `Error in reverse3D: x must be rank 3 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse3d = op({ reverse3d_ });\n//# sourceMappingURL=reverse_3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor4D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse4d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 4, () => `Error in reverse4D: x must be rank 4 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse4d = op({ reverse4d_ });\n//# sourceMappingURL=reverse_4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Round } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes round of input `tf.Tensor` element-wise: `round(x)`.\n * It implements banker's rounding.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.round().print(); // or tf.round(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction round_(x) {\n const $x = convertToTensor(x, 'x', 'round');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.round($x), inputs, null /* grad */, Round);\n}\nexport const round = op({ round_ });\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Rsqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of square root of the input `tf.Tensor` element-wise:\n * `y = 1 / sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.rsqrt().print(); // or tf.rsqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction rsqrt_(x) {\n const $x = convertToTensor(x, 'x', 'rsqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.rsqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Rsqrt);\n}\nexport const rsqrt = op({ rsqrt_ });\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Selu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes scaled exponential linear element-wise.\n *\n * `x < 0 ? scale * alpha * (exp(x) - 1) : x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.selu().print(); // or tf.selu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction selu_(x) {\n const $x = convertToTensor(x, 'x', 'selu');\n const forward = (backend, save) => {\n const res = backend.selu($x);\n save([$x]);\n return res;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Selu);\n}\nexport const selu = op({ selu_ });\n//# sourceMappingURL=selu.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport { depthwiseConv2d } from './depthwise_conv2d';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * 2-D convolution with separable filters.\n *\n * Performs a depthwise convolution that acts separately on channels followed\n * by a pointwise convolution that mixes channels. Note that this is\n * separability between dimensions [1, 2] and 3, not spatial separability\n * between dimensions 1 and 2.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param depthwiseFilter The depthwise filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`. This is\n * the filter used in the first step.\n * @param pointwiseFilter The pointwise filter tensor, rank 4, of shape\n * `[1, 1, inChannels * channelMultiplier, outChannels]`. This is\n * the filter used in the second step.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction separableConv2d_(x, depthwiseFilter, pointwiseFilter, strides, pad, dilation = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'separableConv2d');\n const $depthwiseFilter = convertToTensor(depthwiseFilter, 'depthwiseFilter', 'separableConv2d');\n const $pointwiseFilter = convertToTensor(pointwiseFilter, 'pointwiseFilter', 'separableConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n if (dataFormat === 'NCHW') {\n throw new Error('separableConv2d currently does not support dataFormat NCHW; only ' +\n 'NHWC is supported');\n }\n util.assert(x4D.rank === 4, () => `Error in separableConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($depthwiseFilter.rank === 4, () => `Error in separableConv2d: depthwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.rank === 4, () => `Error in separableConv2d: pointwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.shape[0] === 1, () => `Error in separableConv2d: the first dimension of pointwise filter ` +\n ` must be 1, but got ${$pointwiseFilter.shape[0]}.`);\n util.assert($pointwiseFilter.shape[1] === 1, () => `Error in separableConv2d: the second dimension of pointwise ` +\n `filter must be 1, but got ${$pointwiseFilter.shape[1]}.`);\n const inChannels = $depthwiseFilter.shape[2];\n const channelMultiplier = $depthwiseFilter.shape[3];\n util.assert($pointwiseFilter.shape[2] === inChannels * channelMultiplier, () => `Error in separableConv2d: the third dimension of pointwise filter ` +\n `must be ${inChannels * channelMultiplier}, ` +\n `but got ${$pointwiseFilter.shape[2]}.`);\n const depthwise = depthwiseConv2d(x4D, $depthwiseFilter, strides, pad, dataFormat, dilation);\n const pointwiseStride = 1;\n const res = conv2d(depthwise, $pointwiseFilter, pointwiseStride, 'valid', dataFormat);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const separableConv2d = op({ separableConv2d_ });\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\n/**\n * Computes the difference between two lists of numbers.\n *\n * Given a Tensor `x` and a Tensor `y`, this operation returns a Tensor `out`\n * that represents all values that are in `x` but not in `y`. The returned\n * Tensor `out` is sorted in the same order that the numbers appear in `x`\n * (duplicates are preserved). This operation also returns a Tensor indices that\n * represents the position of each out element in `x`. In other words:\n *\n * `out[i] = x[idx[i]] for i in [0, 1, ..., out.length - 1]`\n *\n * ```js\n * const x = [1, 2, 3, 4, 5, 6];\n * const y = [1, 3, 5];\n *\n * const [out, indices] = await tf.setdiff1dAsync(x, y);\n * out.print(); // [2, 4, 6]\n * indices.print(); // [1, 3, 5]\n * ```\n *\n * @param x 1-D Tensor. Values to keep.\n * @param y 1-D Tensor. Must have the same type as x. Values to exclude in the\n * output.\n * @returns Promise of Tensor tuple [out, indices].\n * out: Tensor with the same type as x.\n * indices: A Tensor of type int32.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nasync function setdiff1dAsync_(x, y) {\n const $x = convertToTensor(x, 'x', 'setdiff1d');\n const $y = convertToTensor(y, 'y', 'setdiff1d');\n util.assert($x.dtype === $y.dtype, () => `x and y should have the same dtype, but got x (${$x.dtype}) and y (${$y.dtype}).`);\n util.assert($x.rank === 1, () => `x should be 1D tensor, but got x (${$x.shape}).`);\n util.assert($y.rank === 1, () => `y should be 1D tensor, but got y (${$y.shape}).`);\n const xVals = await $x.data();\n const yVals = await $y.data();\n const ySet = new Set(yVals);\n let outputSize = 0;\n for (let i = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n outputSize++;\n }\n }\n const buffer = new TensorBuffer([outputSize], $x.dtype);\n const indices = new TensorBuffer([outputSize], 'int32');\n for (let i = 0, p = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n buffer.values[p] = xVals[i];\n indices.values[p] = i;\n p++;\n }\n }\n return [buffer.toTensor(), indices.toTensor()];\n}\nexport const setdiff1dAsync = setdiff1dAsync_;\n//# sourceMappingURL=setdiff1d_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sign } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns an element-wise indication of the sign of a number.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3, NaN, 0]);\n *\n * x.sign().print(); // or tf.sign(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sign_(x) {\n const $x = convertToTensor(x, 'x', 'sign');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.sign($x), inputs, null /* grad */, Sign);\n}\nexport const sign = op({ sign_ });\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sin of the input Tensor element-wise: `sin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.sin().print(); // or tf.sin(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sin_(x) {\n const $x = convertToTensor(x, 'x', 'sin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sin);\n}\nexport const sin = op({ sin_ });\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic sin of the input `tf.Tensor` element-wise: `sinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.sinh().print(); // or tf.sinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sinh_(x) {\n const $x = convertToTensor(x, 'x', 'sinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sinh);\n}\nexport const sinh = op({ sinh_ });\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 1D slice from 1D array starting at coordinates `begin` and is\n * of length `size`. See `slice` for details.\n */\nfunction slice1d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice1d');\n util.assert($x.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, [begin], [size]);\n}\nexport const slice1d = op({ slice1d_ });\n//# sourceMappingURL=slice1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 2D slice from a 2D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice2d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice2d');\n util.assert($x.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice2d = op({ slice2d_ });\n//# sourceMappingURL=slice2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 3D slice from a 3D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice3d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice3d');\n util.assert($x.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice3d = op({ slice3d_ });\n//# sourceMappingURL=slice3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 4D slice from a 4D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice4d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice4d');\n util.assert($x.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice4d = op({ slice4d_ });\n//# sourceMappingURL=slice4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the softmax normalized vector given the logits.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction softmax_(logits, dim = -1) {\n const $logits = convertToTensor(logits, 'logits', 'softmax', 'float32');\n if (dim === -1) {\n dim = $logits.rank - 1;\n }\n if (dim !== $logits.rank - 1) {\n throw Error('Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and dim was ${dim}`);\n }\n const inputs = { logits: $logits };\n const attrs = { dim };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.softmax($logits, dim);\n save([y]);\n return y;\n }, inputs, null /* grad */, Softmax, attrs);\n}\nexport const softmax = op({ softmax_ });\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\n/**\n * Fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the inner-most\n * dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.fft().print(); // tf.spectral.fft(x).print();\n * ```\n * @param input The complex input to compute an fft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction fft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.fft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = input.as2D(batch, innerDimensionSize);\n const result = backend.fft(input2D);\n return result.reshape(input.shape);\n }, inputs, null /* gradient */, FFT);\n}\nexport const fft = op({ fft_ });\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { IFFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Inverse fast Fourier transform.\n *\n * Computes the inverse 1-dimensional discrete Fourier transform over the\n * inner-most dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.ifft().print(); // tf.spectral.ifft(x).print();\n * ```\n * @param input The complex input to compute an ifft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction ifft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.ifft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = reshape(input, [batch, innerDimensionSize]);\n const result = backend.ifft(input2D);\n return reshape(result, input.shape);\n }, inputs, null /* gradient */, IFFT);\n}\nexport const ifft = op({ ifft_ });\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { reverse } from '../reverse';\nimport { scalar } from '../scalar';\nimport { slice } from '../slice';\nimport { ifft } from './ifft';\n/**\n * Inversed real value input fast Fourier transform.\n *\n * Computes the 1-dimensional inversed discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([0, 0, 0]);\n * const x = tf.complex(real, imag);\n *\n * x.irfft().print();\n * ```\n * @param input The real value input to compute an irfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction irfft_(input) {\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let ret;\n if (innerDimensionSize <= 2) {\n const complexInput = reshape(input, [batch, innerDimensionSize]);\n ret = ifft(complexInput);\n }\n else {\n // The length of unique components of the DFT of a real-valued signal\n // is 2 * (input_len - 1)\n const outputShape = [batch, 2 * (innerDimensionSize - 1)];\n const realInput = reshape(real(input), [batch, innerDimensionSize]);\n const imagInput = reshape(imag(input), [batch, innerDimensionSize]);\n const realConjugate = reverse(slice(realInput, [0, 1], [batch, innerDimensionSize - 2]), 1);\n const imagConjugate = mul(reverse(slice(imagInput, [0, 1], [batch, innerDimensionSize - 2]), 1), scalar(-1));\n const r = concat([realInput, realConjugate], 1);\n const i = concat([imagInput, imagConjugate], 1);\n const complexInput = reshape(complex(r, i), [outputShape[0], outputShape[1]]);\n ret = ifft(complexInput);\n }\n ret = real(ret);\n // reshape the result if the input is 3D tensor.\n if (input.rank === 3 && input.shape[0] !== 0) {\n const temp = ret;\n const batch = input.shape[0];\n ret = reshape(ret, [batch, ret.shape[0] / batch, ret.shape[1]]);\n temp.dispose();\n }\n return ret;\n}\nexport const irfft = op({ irfft_ });\n//# sourceMappingURL=irfft.js.map", "import { assert } from '../util';\n/**\n * Prepare the split size array. When the input is a number, the axis is evenly\n * divided among the split size. When the input contains the negative value, the\n * rest of the axis is allocated toward that.\n */\nexport function prepareSplitSize(x, numOrSizeSplits, axis = 0) {\n let splitSizes = [];\n if (typeof (numOrSizeSplits) === 'number') {\n assert(x.shape[axis] % numOrSizeSplits === 0, () => 'Number of splits must evenly divide the axis.');\n splitSizes =\n new Array(numOrSizeSplits).fill(x.shape[axis] / numOrSizeSplits);\n }\n else {\n const numOfNegs = numOrSizeSplits.reduce((count, value) => {\n if (value === -1) {\n count += 1;\n }\n return count;\n }, 0);\n assert(numOfNegs <= 1, () => 'There should be only one negative value in split array.');\n const negIndex = numOrSizeSplits.indexOf(-1);\n // Allow the number of split array to be -1, which indicates the rest\n // of dimension is allocated to that split.\n if (negIndex !== -1) {\n const total = numOrSizeSplits.reduce((a, b) => b > 0 ? a + b : a);\n numOrSizeSplits[negIndex] = x.shape[axis] - total;\n }\n assert(x.shape[axis] === numOrSizeSplits.reduce((a, b) => a + b), () => 'The sum of sizes must match the size of the axis dimension.');\n splitSizes = numOrSizeSplits;\n }\n return splitSizes;\n}\n//# sourceMappingURL=split_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SplitV } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { prepareSplitSize } from './split_util';\n/**\n * Splits a `tf.Tensor` into sub tensors.\n *\n * If `numOrSizeSplits` is a number, splits `x` along dimension `axis`\n * into `numOrSizeSplits` smaller tensors.\n * Requires that `numOrSizeSplits` evenly divides `x.shape[axis]`.\n *\n * If `numOrSizeSplits` is a number array, splits `x` into\n * `numOrSizeSplits.length` pieces. The shape of the `i`-th piece has the\n * same size as `x` except along dimension `axis` where the size is\n * `numOrSizeSplits[i]`.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [2, 4]);\n * const [a, b] = tf.split(x, 2, 1);\n * a.print();\n * b.print();\n *\n * const [c, d, e] = tf.split(x, [1, 2, 1], 1);\n * c.print();\n * d.print();\n * e.print();\n * ```\n *\n * @param x The input tensor to split.\n * @param numOrSizeSplits Either an integer indicating the number of\n * splits along the axis or an array of integers containing the sizes of\n * each output tensor along the axis. If a number then it must evenly divide\n * `x.shape[axis]`; otherwise the sum of sizes must match `x.shape[axis]`.\n * Can contain one -1 indicating that dimension is to be inferred.\n * @param axis The dimension along which to split. Defaults to 0 (the first\n * dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction split_(x, numOrSizeSplits, axis = 0) {\n const $x = convertToTensor(x, 'x', 'split');\n const forward = (backend, _) => {\n const $axis = parseAxisParam(axis, $x.shape)[0];\n const splitSizes = prepareSplitSize($x, numOrSizeSplits, $axis);\n return backend.split($x, splitSizes, $axis);\n };\n const inputs = { x: $x };\n const attr = { numOrSizeSplits, axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SplitV, attr);\n}\nexport const split = op({ split_ });\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../../util';\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { split } from '../split';\nimport { zeros } from '../zeros';\nimport { zerosLike } from '../zeros_like';\nimport { fft } from './fft';\n/**\n * Real value input fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n *\n * real.rfft().print();\n * ```\n * @param input The real value input to compute an rfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction rfft_(input, fftLength) {\n assert(input.dtype === 'float32', () => `The dtype for rfft() must be real value but got ${input.dtype}`);\n let innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let adjustedInput;\n if (fftLength != null && fftLength < innerDimensionSize) {\n // Need to crop\n const begin = input.shape.map(v => 0);\n const size = input.shape.map(v => v);\n size[input.shape.length - 1] = fftLength;\n adjustedInput = slice(input, begin, size);\n innerDimensionSize = fftLength;\n }\n else if (fftLength != null && fftLength > innerDimensionSize) {\n // Need to pad with zeros\n const zerosShape = input.shape.map(v => v);\n zerosShape[input.shape.length - 1] = fftLength - innerDimensionSize;\n adjustedInput = concat([input, zeros(zerosShape)], input.shape.length - 1);\n innerDimensionSize = fftLength;\n }\n else {\n adjustedInput = input;\n }\n // Complement the input with zero imaginary numbers.\n const zerosInput = zerosLike(adjustedInput);\n const complexInput = reshape(complex(adjustedInput, zerosInput), [batch, innerDimensionSize]);\n const ret = fft(complexInput);\n // Exclude complex conjugations. These conjugations are put symmetrically.\n const half = Math.floor(innerDimensionSize / 2) + 1;\n const realValues = real(ret);\n const imagValues = imag(ret);\n const realComplexConjugate = split(realValues, [half, innerDimensionSize - half], realValues.shape.length - 1);\n const imagComplexConjugate = split(imagValues, [half, innerDimensionSize - half], imagValues.shape.length - 1);\n const outputShape = adjustedInput.shape.slice();\n outputShape[adjustedInput.shape.length - 1] = half;\n return reshape(complex(realComplexConjugate[0], imagComplexConjugate[0]), outputShape);\n}\nexport const rfft = op({ rfft_ });\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square root of the input `tf.Tensor` element-wise: `y = sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.sqrt().print(); // or tf.sqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sqrt_(x) {\n const $x = convertToTensor(x, 'x', 'sqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sqrt);\n}\nexport const sqrt = op({ sqrt_ });\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SquaredDifference } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns (a - b) * (a - b) element-wise.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * ```js\n * // Broadcast squared difference a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction squaredDifference_(a, b) {\n let $a = convertToTensor(a, 'a', 'squaredDifference');\n let $b = convertToTensor(b, 'b', 'squaredDifference');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.squaredDifference($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SquaredDifference, attrs);\n}\nexport const squaredDifference = op({ squaredDifference_ });\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { squeezeShape } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Removes dimensions of size 1 from the shape of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4], [1, 1, 4]);\n * x.squeeze().print();\n * ```\n *\n * @param x The input tensor to be squeezed.\n * @param axis An optional list of numbers. If specified, only\n * squeezes the dimensions listed. The dimension index starts at 0. It\n * is an error to squeeze a dimension that is not 1.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction squeeze_(x, axis) {\n const $x = convertToTensor(x, 'x', 'squeeze');\n return reshape($x, squeezeShape($x.shape, axis).newShape);\n}\nexport const squeeze = op({ squeeze_ });\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensorArray } from '../tensor_util_env';\nimport * as util from '../util';\nimport { concat } from './concat';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\n/**\n * Stacks a list of rank-`R` `tf.Tensor`s into one rank-`(R+1)` `tf.Tensor`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.stack([a, b, c]).print();\n * ```\n *\n * @param tensors A list of tensor objects with the same shape and dtype.\n * @param axis The axis to stack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction stack_(tensors, axis = 0) {\n const $tensors = convertToTensorArray(tensors, 'tensors', 'stack');\n util.assert($tensors.length >= 1, () => 'Pass at least one tensor to tf.stack');\n if ($tensors.length === 1) {\n return expandDims($tensors[0], axis);\n }\n const rank = $tensors[0].rank;\n const shape = $tensors[0].shape;\n const dtype = $tensors[0].dtype;\n util.assert(axis <= rank, () => 'Axis must be <= rank of the tensor');\n $tensors.forEach(t => {\n util.assertShapesMatch(shape, t.shape, 'All tensors passed to stack must have matching shapes');\n util.assert(dtype === t.dtype, () => 'All tensors passed to stack must have matching dtypes');\n });\n const expandedTensors = $tensors.map(t => expandDims(t, axis));\n // Stack exists in the TensorFlow C++ API\n // (https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/stack) but not\n // in\n // https://raw.githubusercontent.com/tensorflow/tensorflow/master/tensorflow/core/ops/ops.pbtxt.\n // Therefore we are treating it like a high-level op rather than\n // creating a dedicated stack kernel.\n return concat(expandedTensors, axis);\n}\nexport const stack = op({ stack_ });\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Step } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes step of the input `tf.Tensor` element-wise: `x > 0 ? 1 : alpha * x`\n *\n * ```js\n * const x = tf.tensor1d([0, 2, -1, -3]);\n *\n * x.step(.5).print(); // or tf.step(x, .5)\n * ```\n * @param x The input tensor.\n * @param alpha The gradient when input is negative.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction step_(x, alpha = 0.0) {\n const $x = convertToTensor(x, 'x', 'step');\n const inputs = { x: $x };\n const attrs = { alpha };\n return ENGINE.runKernelFunc(backend => backend.step($x, alpha), inputs, null /* grad */, Step, attrs);\n}\nexport const step = op({ step_ });\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { StridedSlice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { slice } from './slice';\nimport { computeOutShape, getNormalizedAxes, maskToAxes } from './slice_util';\n/**\n * Extracts a strided slice of a tensor.\n *\n * Roughly speaking, this op extracts a slice of size (end-begin)/stride from\n * the given input tensor (x). Starting at the location specified by begin the\n * slice continues by adding stride to the index until all dimensions are not\n * less than end. Note that a stride can be negative, which causes a reverse\n * slice.\n *\n * ```js\n * const t = tf.tensor3d([1, 1, 1 ,2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6],\n * [3, 2, 3]);\n * t.stridedSlice([1, 0, 0], [2, 1, 3], [1, 1, 1]).print() // [[[3, 3, 3]]]\n * t.stridedSlice([1, 0, 0], [2, 2, 3], [1, 1, 1]).print() // [[[3, 3, 3],\n * // [4, 4, 4]]]\n * t.stridedSlice([1, -1, 0], [2, -3, 3], [1, -1, 1]).print() // [[[4, 4, 4],\n * // [3, 3, 3]]]\n * ```\n *\n * @param x The tensor to stride slice.\n * @param begin The coordinates to start the slice from.\n * @param end: The coordinates to end the slice at.\n * @param strides: The size of the slice.\n * @param beginMask: If the ith bit of beginMask is set, begin[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param endMask: If the ith bit of endMask is set, end[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param shrinkAxisMask: a bitmask where bit i implies that\n * the ith specification should shrink the dimensionality. begin and end must\n * imply a slice of size 1 in the dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction stridedSlice_(x, begin, end, strides, beginMask = 0, endMask = 0, ellipsisMask = 0, newAxisMask = 0, shrinkAxisMask = 0) {\n let $x = convertToTensor(x, 'x', 'stridedSlice');\n const forward = (backend) => {\n if (strides == null) {\n strides = new Array(begin.length);\n }\n const ellipsisAxes = maskToAxes(ellipsisMask);\n if (ellipsisAxes.length > 1) {\n throw new Error('Multiple ellipses in slice is not allowed.');\n }\n if (ellipsisMask !== 0 && newAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and newAxisMask is not yet supported.');\n }\n if (ellipsisMask !== 0 && shrinkAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and shrinkAxisMask is not yet supported.');\n }\n const numInterpolatedAxes = $x.rank - begin.length;\n // Expand the dims of x based on the newAxisMask.\n const expandAxes = maskToAxes(newAxisMask);\n const newShape = $x.shape.slice();\n expandAxes.forEach(axis => {\n begin[axis] = 0;\n end[axis] = 1;\n newShape.splice(axis, 0, 1);\n });\n $x = reshape($x, newShape);\n const { begin: normalizedBegin, end: normalizedEnd, strides: normalizedStrides } = getNormalizedAxes($x.shape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask);\n begin = normalizedBegin;\n end = normalizedEnd;\n strides = normalizedStrides;\n const shrinkAxes = maskToAxes(shrinkAxisMask);\n // Adjust the ends based on the shrink mask.\n shrinkAxes.forEach(axis => {\n end[axis] = begin[axis] + 1;\n strides[axis] = 1;\n });\n // Figure out the output shape.\n const size = computeOutShape(begin, end, strides);\n // Remove the axes based on shrinkMask.\n const outShape = size.filter((_, axis) => shrinkAxes.indexOf(axis) === -1);\n const nonStrided = strides.every(v => v === 1);\n if (nonStrided) {\n return reshape(slice($x, begin, size), outShape);\n }\n const res = backend.stridedSlice($x, begin, end, strides);\n return reshape(res, outShape);\n };\n const inputs = { x: $x };\n const attrs = {\n begin,\n end,\n strides,\n beginMask,\n endMask,\n ellipsisMask,\n newAxisMask,\n shrinkAxisMask\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, StridedSlice, attrs);\n}\nexport const stridedSlice = op({ stridedSlice_ });\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes tan of the input `tf.Tensor` element-wise, `tan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.tan().print(); // or tf.tan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tan_(x) {\n const $x = convertToTensor(x, 'x', 'tan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.tan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Tan);\n}\nexport const tan = op({ tan_ });\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-2 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor2d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor2d([[1, 2], [3, 4]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor2d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 2) {\n throw new Error('tensor2d() requires shape to have two numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 2 && inferredShape.length !== 1) {\n throw new Error('tensor2d() requires values to be number[][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor2d() requires shape to be provided when `values` ' +\n 'are a flat/TypedArray');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-4 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor4d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor4d([[[[1], [2]], [[3], [4]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor4d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 4) {\n throw new Error('tensor4d() requires shape to have four numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 4 && inferredShape.length !== 1) {\n throw new Error('tensor4d() requires values to be number[][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor4d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-5 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor5d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor5d([[[[[1], [2]], [[3], [4]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor5d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 5) {\n throw new Error('tensor5d() requires shape to have five numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 5 && inferredShape.length !== 1) {\n throw new Error('tensor5d() requires values to be ' +\n 'number[][][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor5d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor5d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-6 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor6d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor6d([[[[[[1],[2]],[[3],[4]]],[[[5],[6]],[[7],[8]]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor6d([1, 2, 3, 4, 5, 6, 7, 8], [1, 1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor6d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 6) {\n throw new Error('tensor6d() requires shape to have six numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 6 && inferredShape.length !== 1) {\n throw new Error('tensor6d() requires values to be number[][][][][][] or ' +\n 'flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor6d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape ||\n inferredShape;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor6d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { TopK } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Finds the values and indices of the `k` largest entries along the last\n * dimension.\n *\n * If the input is a vector (rank=1), finds the k largest entries in the vector\n * and outputs their values and indices as vectors. Thus values[j] is the j-th\n * largest entry in input, and its index is indices[j].\n * For higher rank inputs, computes the top k entries along the last dimension.\n *\n * If two elements are equal, the lower-index element appears first.\n *\n * ```js\n * const a = tf.tensor2d([[1, 5], [4, 3]]);\n * const {values, indices} = tf.topk(a);\n * values.print();\n * indices.print();\n * ```\n * @param x 1-D or higher `tf.Tensor` with last dimension being at least `k`.\n * @param k Number of top elements to look for along the last dimension.\n * @param sorted If true, the resulting `k` elements will be sorted by the\n * values in descending order.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction topk_(x, k = 1, sorted = true) {\n const $x = convertToTensor(x, 'x', 'topk');\n if ($x.rank === 0) {\n throw new Error('topk() expects the input to be of rank 1 or higher');\n }\n const lastDim = $x.shape[$x.shape.length - 1];\n if (k > lastDim) {\n throw new Error(`'k' passed to topk() must be <= the last dimension (${lastDim}) ` +\n `but got ${k}`);\n }\n const inputs = { x: $x };\n const attrs = { k, sorted };\n const [values, indices] = ENGINE.runKernelFunc(b => b.topk($x, k, sorted), inputs, null /* grad */, TopK, attrs);\n return { values, indices };\n}\nexport const topk = op({ topk_ });\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a truncated normal\n * distribution.\n *\n * ```js\n * tf.truncatedNormal([2, 2]).print();\n * ```\n *\n * The generated values follow a normal distribution with specified mean and\n * standard deviation, except that values whose magnitude is more than 2\n * standard deviations from the mean are dropped and re-picked.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output tensor.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction truncatedNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type $ { dtype }`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, true /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const truncatedNormal = op({ truncatedNormal_ });\n//# sourceMappingURL=truncated_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unique } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert } from '../util';\nimport { op } from './operation';\n/**\n * Finds unique elements along an axis of a tensor.\n *\n * It returns a tensor `values` containing all of the unique elements along the\n * `axis` of the given tensor `x` in the same order that they occur along the\n * `axis` in `x`; `x` does not need to be sorted. It also returns a tensor\n * `indices` the same size as the number of the elements in `x` along the `axis`\n * dimension. It contains the index in the unique output `values`.\n *\n * ```js\n * // A 1-D tensor\n * const a = tf.tensor1d([1, 1, 2, 4, 4, 4, 7, 8, 8]);\n * const {values, indices} = tf.unique(a);\n * values.print(); // [1, 2, 4, 7, 8,]\n * indices.print(); // [0, 0, 1, 2, 2, 2, 3, 4, 4]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=0\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 0)\n * values.print(); // [[1, 0, 0],\n * // [2, 0, 0]]\n * indices.print(); // [0, 0, 1]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=1\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 1)\n * values.print(); // [[1, 0],\n * // [1, 0],\n * // [2, 0]]\n * indices.print(); // [0, 1, 1]\n * ```\n * @param x A tensor (int32, string, bool).\n * @param axis The axis of the tensor to find the unique elements.\n * @returns [uniqueElements, indices] (see above for details)\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction unique_(x, axis = 0) {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'unique', null);\n assert($x.rank > 0, () => 'The input tensor must be at least 1D');\n const inputs = { x: $x };\n const attrs = { axis };\n const [values, indices] = ENGINE.runKernel(Unique, inputs, attrs);\n return { values, indices };\n}\nexport const unique = op({ unique_ });\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, isInt } from '../util';\nimport { op } from './operation';\n/**\n * Computes the sum along segments of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const segmentIds = tf.tensor1d([1, 2, 0, 1], 'int32');\n * const numSegments = 3;\n *\n * x.unsortedSegmentSum(segmentIds, numSegments).print()\n * //or tf.unsortedSegmentSum(x, segmentIds, numSegments)\n * ```\n * @param x The `tf.Tensor` that will be summed along its segments.\n * @param segmentIds A `tf.Tensor1D` whose rank is equal to the rank of `x`'s\n * dimension along the `axis`. Maps each element of `x` to a segment.\n * @param numSegments The number of distinct `segmentIds`.\n *\n * @doc {heading: 'Operations', subheading: 'Segment'}\n */\nfunction unsortedSegmentSum_(x, segmentIds, numSegments) {\n const $x = convertToTensor(x, 'x', 'unsortedSegmentSum');\n const $segmentIds = convertToTensor(segmentIds, 'segmentIds', 'unsortedSegmentSum', 'int32');\n assert(isInt(numSegments), () => 'numSegments must be of dtype int');\n const inputs = { x: $x, segmentIds: $segmentIds };\n const attrs = { numSegments };\n const forward = (backend, save) => {\n const res = backend.unsortedSegmentSum($x, $segmentIds, numSegments);\n save([$segmentIds]);\n return res;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, UnsortedSegmentSum, attrs);\n}\nexport const unsortedSegmentSum = op({ unsortedSegmentSum_ });\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unpack } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Unstacks a `tf.Tensor` of rank-`R` into a list of rank-`(R-1)` `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * tf.unstack(a).forEach(tensor => tensor.print());\n * ```\n *\n * @param x A tensor object.\n * @param axis The axis to unstack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction unstack_(x, axis = 0) {\n const $x = convertToTensor(x, 'x', 'unstack');\n util.assert(axis >= -$x.shape.length && axis < $x.shape.length, () => `Axis = ${axis} is not in [-${$x.shape.length}, ${$x.shape.length})`);\n if (axis < 0) {\n axis += $x.shape.length;\n }\n const inputs = { value: $x };\n const attrs = { axis };\n const forward = (backend) => backend.unstack($x, axis);\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Unpack, attrs);\n}\nexport const unstack = op({ unstack_ });\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\n/**\n * Creates a new variable with the provided initial value.\n * ```js\n * const x = tf.variable(tf.tensor([1, 2, 3]));\n * x.assign(tf.tensor([4, 5, 6]));\n *\n * x.print();\n * ```\n *\n * @param initialValue Initial value for the tensor.\n * @param trainable If true, optimizers are allowed to update it.\n * @param name Name of the variable. Defaults to a unique id.\n * @param dtype If set, initialValue will be converted to the given type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function variable(initialValue, trainable = true, name, dtype) {\n return ENGINE.makeVariable(initialValue, trainable, name, dtype);\n}\n//# sourceMappingURL=variable.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the Where kernel shared between cpu and webgl */\nimport { buffer } from '../ops/buffer';\nexport function whereImpl(condShape, condVals) {\n const indices = [];\n for (let i = 0; i < condVals.length; i++) {\n if (condVals[i]) {\n indices.push(i);\n }\n }\n const inBuffer = buffer(condShape, 'int32');\n const out = buffer([indices.length, condShape.length], 'int32');\n for (let i = 0; i < indices.length; i++) {\n const loc = inBuffer.indexToLoc(indices[i]);\n const offset = i * condShape.length;\n out.values.set(loc, offset);\n }\n return out.toTensor();\n}\n//# sourceMappingURL=where_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { whereImpl } from '../backends/where_impl';\nimport { convertToTensor } from '../tensor_util_env';\n/**\n * Returns the coordinates of true elements of condition.\n *\n * The coordinates are returned in a 2-D tensor where the first dimension (rows)\n * represents the number of true elements, and the second dimension (columns)\n * represents the coordinates of the true elements. Keep in mind, the shape of\n * the output tensor can vary depending on how many true values there are in\n * input. Indices are output in row-major order. The resulting tensor has the\n * shape `[numTrueElems, condition.rank]`.\n *\n * This is analogous to calling the python `tf.where(cond)` without an x or y.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const result = await tf.whereAsync(cond);\n * result.print();\n * ```\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nasync function whereAsync_(condition) {\n const $condition = convertToTensor(condition, 'condition', 'whereAsync', 'bool');\n const vals = await $condition.data();\n const res = whereImpl($condition.shape, vals);\n if (condition !== $condition) {\n $condition.dispose();\n }\n return res;\n}\nexport const whereAsync = whereAsync_;\n//# sourceMappingURL=where_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { gather } from './gather';\nimport { reshape } from './reshape';\nimport { squeeze } from './squeeze';\nimport { whereAsync } from './where_async';\n/**\n * Apply boolean mask to tensor.\n *\n * ```js\n * const tensor = tf.tensor2d([1, 2, 3, 4, 5, 6], [3, 2]);\n * const mask = tf.tensor1d([1, 0, 1], 'bool');\n * const result = await tf.booleanMaskAsync(tensor, mask);\n * result.print();\n * ```\n *\n * @param tensor N-D tensor.\n * @param mask K-D boolean tensor, K <= N and K must be known statically.\n * @param axis A 0-D int Tensor representing the axis in tensor to mask from.\n * By default, axis is 0 which will mask from the first dimension.\n * Otherwise K + axis <= N.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nasync function booleanMaskAsync_(tensor, mask, axis) {\n const $tensor = convertToTensor(tensor, 'tensor', 'boolMask');\n const $mask = convertToTensor(mask, 'mask', 'boolMask', 'bool');\n const axisFrom = axis == null ? 0 : axis;\n const maskDim = $mask.rank;\n const tensorShape = $tensor.shape;\n util.assert(maskDim > 0, () => 'mask cannot be scalar');\n util.assertShapesMatch(tensorShape.slice(axisFrom, axisFrom + maskDim), $mask.shape, `mask's shape must match the first K dimensions of tensor's shape,`);\n let leadingSize = 1;\n for (let i = axisFrom; i < axisFrom + maskDim; i++) {\n leadingSize *= tensorShape[i];\n }\n const targetTensorShape = tensorShape.slice(0, axisFrom)\n .concat([leadingSize], tensorShape.slice(axisFrom + maskDim));\n const reshapedTensor = reshape($tensor, targetTensorShape);\n const reshapedMask = reshape($mask, [-1]);\n const positivePositions = await whereAsync(reshapedMask);\n const indices = squeeze(positivePositions, [1]);\n const res = gather(reshapedTensor, indices, axisFrom);\n // Ensure no memory leak.\n if (tensor !== $tensor) {\n $tensor.dispose();\n }\n if (mask !== $mask) {\n $mask.dispose();\n }\n indices.dispose();\n reshapedTensor.dispose();\n reshapedMask.dispose();\n positivePositions.dispose();\n return res;\n}\nexport const booleanMaskAsync = booleanMaskAsync_;\n//# sourceMappingURL=boolean_mask.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertShapesMatch } from '../util';\nimport { equal } from './equal';\nimport { greater } from './greater';\nimport { greaterEqual } from './greater_equal';\nimport { less } from './less';\nimport { lessEqual } from './less_equal';\nimport { notEqual } from './not_equal';\nimport { op } from './operation';\n/**\n * @deprecated\n * Strict version of `tf.notEqual` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction notEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'notEqualStrict');\n const $b = convertToTensor(b, 'b', 'notEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in notEqualStrict: ');\n return notEqual($a, $b);\n}\n/**\n * @deprecated\n * Strict version of `tf.less` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction lessStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessStrict');\n const $b = convertToTensor(b, 'b', 'lessStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessStrict: ');\n return less($a, $b);\n}\nfunction equalStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'equalStrict');\n const $b = convertToTensor(b, 'b', 'equalStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in equalStrict: ');\n return equal($a, $b);\n}\nfunction lessEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessEqualStrict');\n const $b = convertToTensor(b, 'b', 'lessEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessEqualStrict: ');\n return lessEqual($a, $b);\n}\nfunction greaterStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterStrict');\n const $b = convertToTensor(b, 'b', 'greaterStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterStrict: ');\n return greater($a, $b);\n}\nfunction greaterEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterEqualStrict');\n const $b = convertToTensor(b, 'b', 'greaterEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterEqualStrict: ');\n return greaterEqual($a, $b);\n}\nexport const equalStrict = op({ equalStrict_ });\nexport const greaterEqualStrict = op({ greaterEqualStrict_ });\nexport const greaterStrict = op({ greaterStrict_ });\nexport const lessEqualStrict = op({ lessEqualStrict_ });\nexport const lessStrict = op({ lessStrict_ });\nexport const notEqualStrict = op({ notEqualStrict_ });\n//# sourceMappingURL=compare.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { maximum } from './maximum';\nimport { minimum } from './minimum';\nimport { mod } from './mod';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { squaredDifference } from './squared_difference';\nimport { sub } from './sub';\n/**\n * @deprecated\n * Adds two `tf.Tensor`s element-wise, A + B.\n *\n * Inputs must be the same shape. For broadcasting support, use add() instead.\n *\n * @param a The first Tensor to add element-wise.\n * @param b The second Tensor to add element-wise.\n */\nfunction addStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'addStrict');\n const $b = convertToTensor(b, 'b', 'addStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in addStrict: ');\n return add($a, $b);\n}\n/**\n * @deprecated\n * Subtracts two `tf.Tensor`s element-wise, A - B. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.sub` instead.\n *\n * @param a The first Tensor to subtract element-wise.\n * @param b The second Tensor to subtract element-wise.\n */\nfunction subStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'subStrict');\n const $b = convertToTensor(b, 'b', 'subStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in subStrict: ');\n return sub($a, $b);\n}\n/**\n * @deprecated\n * Computes the power of one `tf.Tensor` to another. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.pow` instead.\n *\n * @param base The base tensor to pow element-wise.\n * @param exp The exponent tensor to pow element-wise.\n */\nfunction powStrict_(base, exp) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n util.assertShapesMatch(base.shape, exp.shape, 'Error in powStrict: ');\n return pow(base, exp);\n}\n/**\n * @deprecated\n * Multiplies two `tf.Tensor`s element-wise, A * B.\n *\n * Inputs must be the same shape. For broadcasting support, use `tf.mul`.\n *\n * @param a The first tensor to multiply.\n * @param b The first tensor to multiply. Must have the same\n * dtype as `a`.\n */\nfunction mulStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'mul');\n const $b = convertToTensor(b, 'b', 'mul');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in multiplyStrict: ');\n return mul($a, $b);\n}\n/**\n * @deprecated\n * Divides two `tf.Tensor`s element-wise, A / B. Inputs must\n * be the same shape.\n *\n * @param a The first tensor as the numerator for element-wise division.\n * @param b The second tensor as the denominator for element-wise division.\n */\nfunction divStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'div');\n const $b = convertToTensor(b, 'b', 'div');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in divideStrict: ');\n return div($a, $b);\n}\n/**\n * @deprecated\n * Returns the mod of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use mod().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction modStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'modStrict');\n const $b = convertToTensor(b, 'b', 'modStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in modStrict: ');\n return mod($a, $b);\n}\n/**\n * @deprecated\n * Returns the min of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use minimum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction minimumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'minimumStrict');\n const $b = convertToTensor(b, 'b', 'minimumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in minimumStrict: ');\n return minimum($a, $b);\n}\n/**\n * @deprecated\n * Returns the max of a and b (`a > b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use maximum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction maximumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'maximumStrict');\n const $b = convertToTensor(b, 'b', 'maximumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in maximumStrict: ');\n return maximum($a, $b);\n}\n/**\n * @deprecated\n * Returns (a - b) * (a - b) element-wise.\n *\n * Inputs must be the same shape. For broadcasting support, use\n * `tf.squaredDifference` instead.\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\nfunction squaredDifferenceStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'squaredDifferenceStrict');\n const $b = convertToTensor(b, 'b', 'squaredDifferenceStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in squaredDifferenceStrict: ');\n return squaredDifference($a, $b);\n}\nexport const addStrict = op({ addStrict_ });\nexport const divStrict = op({ divStrict_ });\nexport const maximumStrict = op({ maximumStrict_ });\nexport const minimumStrict = op({ minimumStrict_ });\nexport const modStrict = op({ modStrict_ });\nexport const mulStrict = op({ mulStrict_ });\nexport const powStrict = op({ powStrict_ });\nexport const squaredDifferenceStrict = op({ squaredDifferenceStrict_ });\nexport const subStrict = op({ subStrict_ });\n//# sourceMappingURL=binary_ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { abs } from './abs';\nimport * as axis_util from './axis_util';\nimport { max } from './max';\nimport { min } from './min';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sqrt } from './sqrt';\nimport { square } from './square';\nimport { sum } from './sum';\n/**\n * Computes the norm of scalar, vectors, and matrices.\n * This function can compute several different vector norms (the 1-norm, the\n * Euclidean or 2-norm, the inf-norm, and in general the p-norm for p > 0)\n * and matrix norms (Frobenius, 1-norm, and inf-norm).\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.norm().print(); // or tf.norm(x)\n * ```\n *\n * @param x The input array.\n * @param ord Optional. Order of the norm. Supported norm types are\n * following:\n *\n * | ord | norm for matrices | norm for vectors\n * |------------|---------------------------|---------------------\n * |'euclidean' |Frobenius norm |2-norm\n * |'fro' |Frobenius norm\t |\n * |Infinity |max(sum(abs(x), axis=1)) |max(abs(x))\n * |-Infinity |min(sum(abs(x), axis=1)) |min(abs(x))\n * |1 |max(sum(abs(x), axis=0)) |sum(abs(x))\n * |2 | |sum(abs(x)^2)^1/2*\n *\n * @param axis Optional. If axis is null (the default), the input is\n * considered a vector and a single vector norm is computed over the entire\n * set of values in the Tensor, i.e. norm(x, ord) is equivalent\n * to norm(x.reshape([-1]), ord). If axis is a integer, the input\n * is considered a batch of vectors, and axis determines the axis in x\n * over which to compute vector norms. If axis is a 2-tuple of integer it is\n * considered a batch of matrices and axis determines the axes in NDArray\n * over which to compute a matrix norm.\n * @param keepDims Optional. If true, the norm have the same dimensionality\n * as the input.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction norm_(x, ord = 'euclidean', axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'norm');\n const norm = normImpl(x, ord, axis);\n let keepDimsShape = norm.shape;\n if (keepDims) {\n const axes = parseAxisParam(axis, x.shape);\n keepDimsShape = axis_util.expandShapeToKeepDim(norm.shape, axes);\n }\n return reshape(norm, keepDimsShape);\n}\nfunction normImpl(x, p, axis = null) {\n if (x.rank === 0) {\n return abs(x);\n }\n // consider vector when no axis is specified\n if (x.rank !== 1 && axis === null) {\n return normImpl(reshape(x, [-1]), p, axis);\n }\n // vector\n if (x.rank === 1 || typeof axis === 'number' ||\n Array.isArray(axis) && axis.length === 1) {\n if (p === 1) {\n return sum(abs(x), axis);\n }\n if (p === Infinity) {\n return max(abs(x), axis);\n }\n if (p === -Infinity) {\n return min(abs(x), axis);\n }\n if (p === 'euclidean' || p === 2) {\n // norm(x, 2) = sum(abs(xi) ^ 2) ^ 1/2\n return sqrt(sum(pow(abs(x), scalar(2, 'int32')), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n // matrix (assumption axis[0] < axis[1])\n if (Array.isArray(axis) && axis.length === 2) {\n if (p === 1) {\n return max(sum(abs(x), axis[0]), axis[1] - 1);\n }\n if (p === Infinity) {\n return max(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === -Infinity) {\n return min(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === 'fro' || p === 'euclidean') {\n // norm(x) = sqrt(sum(pow(x, 2)))\n return sqrt(sum(square(x), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n throw new Error(`Error in norm: invalid axis: ${axis}`);\n}\nexport const norm = op({ norm_ });\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assertTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { scalar } from './scalar';\nimport { sub } from './sub';\n/**\n * Compute the moving average of a variable.\n *\n * Without zeroDebias, the moving average operation is defined by:\n * `v += delta`\n * where\n * `delta = (1 - decay) * (x - v)`\n *\n * With zeroDebias (default), the `delta` term is scaled to debias the\n * effect of the (assumed) zero-initialization of `v`.\n * `delta /= (1 - decay ^ step)`\n *\n * For more details on the zero-debiasing algorithm, see:\n * https://arxiv.org/abs/1412.6980\n *\n * Note that this function is completely stateless and does not keep track of\n * step count. The step count needs to be maintained by the caller and passed\n * in as `step`.\n *\n * @param v The current moving average value.\n * @param x New input value, must have the same shape and dtype as `v`.\n * @param decay The decay factor. Typical values are 0.95 and 0.99.\n * @param step Step count.\n * @param zeroDebias: Whether zeroDebias is to be performed (default: `true`).\n * @returns The new moving average value.\n *\n * @doc {heading: 'Operations', subheading: 'Moving Average'}\n */\nfunction movingAverage_(v, x, decay, step, zeroDebias = true) {\n const $v = convertToTensor(v, 'v', 'movingAverage');\n const $x = convertToTensor(x, 'x', 'movingAverage');\n const $decay = convertToTensor(decay, 'decay', 'movingAverage');\n assertTypesMatch($v, $x);\n util.assert(util.arraysEqual($v.shape, $x.shape), () => 'Shape mismatch in v and x');\n const one = scalar(1);\n const oneMinusDecay = sub(one, $decay);\n let update = mul(sub($x, $v), oneMinusDecay);\n if (zeroDebias) {\n util.assert(step != null, () => 'When using zeroDebias: true, step is required.');\n const $step = convertToTensor(step, 'step', 'movingAverage');\n update = div(update, sub(one, pow($decay, $step)));\n }\n return add($v, update);\n}\nexport const movingAverage = op({ movingAverage_ });\n//# sourceMappingURL=moving_average.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ScatterNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as scatter_nd_util from './scatter_nd_util';\n/**\n * Creates a new tensor by applying sparse updates to individual\n * values or slices within a zero tensor of the given shape tensor according to\n * indices. This operator is the inverse of the `tf.gatherND` operator which\n * extracts values or slices from a given tensor.\n *\n * ```js\n * const indices = tf.tensor2d([4, 3, 1, 7], [4, 1], 'int32');\n * const updates = tf.tensor1d([9, 10, 11, 12]);\n * const shape = [8];\n * tf.scatterND(indices, updates, shape).print() //[0, 11, 0, 10, 9, 0, 0, 12]\n * ```\n *\n * @param indices The tensor contains the indices into the output tensor.\n * @param updates The tensor contains the value for the indices.\n * @param shape: The shape of the output tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction scatterND_(indices, updates, shape) {\n const $indices = convertToTensor(indices, 'indices', 'scatterND', 'int32');\n const $updates = convertToTensor(updates, 'updates', 'scatterND');\n scatter_nd_util.validateInput($updates, $indices, shape);\n const forward = (backend) => {\n return backend.scatterND($indices, $updates, shape);\n };\n const inputs = { indices: $indices, updates: $updates };\n const attrs = { shape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ScatterNd, attrs);\n}\nexport const scatterND = op({ scatterND_ });\n//# sourceMappingURL=scatter_nd.js.map", "/**\n * Validate sparseToDense inputs.\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape number[]. Shape of the dense output tensor.\n * @param validateIndices boolean. indice validation is not supported, error\n * will be thrown if it is set.\n */\nexport function validateInput(sparseIndices, sparseValues, outputShape, defaultValues) {\n if (sparseIndices.dtype !== 'int32') {\n throw new Error('tf.sparseToDense() expects the indices to be int32 type,' +\n ` but the dtype was ${sparseIndices.dtype}.`);\n }\n if (sparseIndices.rank > 2) {\n throw new Error('sparseIndices should be a scalar, vector, or matrix,' +\n ` but got shape ${sparseIndices.shape}.`);\n }\n const numElems = sparseIndices.rank > 0 ? sparseIndices.shape[0] : 1;\n const numDims = sparseIndices.rank > 1 ? sparseIndices.shape[1] : 1;\n if (outputShape.length !== numDims) {\n throw new Error('outputShape has incorrect number of elements:,' +\n ` ${outputShape.length}, should be: ${numDims}.`);\n }\n const numValues = sparseValues.size;\n if (!(sparseValues.rank === 0 ||\n sparseValues.rank === 1 && numValues === numElems)) {\n throw new Error('sparseValues has incorrect shape ' +\n `${sparseValues.shape}, should be [] or [${numElems}]`);\n }\n if (sparseValues.dtype !== defaultValues.dtype) {\n throw new Error('sparseValues.dtype must match defaultValues.dtype');\n }\n}\n//# sourceMappingURL=sparse_to_dense_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SparseToDense } from '../kernel_names';\nimport * as sparse_to_dense from '../ops/sparse_to_dense_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Converts a sparse representation into a dense tensor.\n *\n * Builds an array dense with shape outputShape such that:\n *\n * // If sparseIndices is scalar\n * dense[i] = (i == sparseIndices ? sparseValues : defaultValue)\n *\n * // If sparseIndices is a vector, then for each i\n * dense[sparseIndices[i]] = sparseValues[i]\n *\n * // If sparseIndices is an n by d matrix, then for each i in [0, n)\n * dense[sparseIndices[i][0], ..., sparseIndices[i][d-1]] = sparseValues[i]\n * All other values in dense are set to defaultValue. If sparseValues is a\n * scalar, all sparse indices are set to this single value.\n *\n * If indices are repeated the final value is summed over all values for those\n * indices.\n *\n * ```js\n * const indices = tf.tensor1d([4, 5, 6, 1, 2, 3], 'int32');\n * const values = tf.tensor1d([10, 11, 12, 13, 14, 15], 'float32');\n * const shape = [8];\n * tf.sparseToDense(indices, values, shape).print();\n * ```\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape Shape of the dense output tensor. the type is inferred.\n * @param defaultValue Scalar. Value to set for indices not specified in\n * sparseIndices. Defaults to zero.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction sparseToDense_(sparseIndices, sparseValues, outputShape, defaultValue = 0) {\n const $sparseIndices = convertToTensor(sparseIndices, 'sparseIndices', 'sparseToDense', 'int32');\n const $sparseValues = convertToTensor(sparseValues, 'sparseValues', 'sparseToDense');\n const $defaultValue = convertToTensor(defaultValue, 'defaultValue', 'sparseToDense', $sparseValues.dtype);\n sparse_to_dense.validateInput($sparseIndices, $sparseValues, outputShape, $defaultValue);\n const inputs = {\n sparseIndices: $sparseIndices,\n sparseValues: $sparseValues,\n defaultValue: $defaultValue\n };\n const attrs = { outputShape };\n return ENGINE.runKernelFunc(backend => backend.sparseToDense($sparseIndices, $sparseValues, outputShape, $defaultValue), inputs, null /* grad */, SparseToDense, attrs);\n}\nexport const sparseToDense = op({ sparseToDense_ });\n//# sourceMappingURL=sparse_to_dense.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Gather slices from input tensor into a Tensor with shape specified by\n * `indices`.\n *\n * `indices` is an K-dimensional integer tensor, best thought of as a\n * (K-1)-dimensional tensor of indices into input, where each element defines a\n * slice of input:\n * output[\\\\(i_0, ..., i_{K-2}\\\\)] = input[indices[\\\\(i_0, ..., i_{K-2}\\\\)]]\n *\n * Whereas in `tf.gather`, `indices` defines slices into the first dimension of\n * input, in `tf.gatherND`, `indices` defines slices into the first N dimensions\n * of input, where N = indices.shape[-1].\n *\n * The last dimension of indices can be at most the rank of input:\n * indices.shape[-1] <= input.rank\n *\n * The last dimension of `indices` corresponds to elements\n * (if indices.shape[-1] == input.rank) or slices\n * (if indices.shape[-1] < input.rank) along dimension indices.shape[-1] of\n * input.\n * The output tensor has shape\n * indices.shape[:-1] + input.shape[indices.shape[-1]:]\n *\n * Note that on CPU, if an out of bound index is found, an error is returned. On\n * GPU, if an out of bound index is found, a 0 is stored in the corresponding\n * output value.\n *\n * ```js\n * const indices = tf.tensor2d([0, 1, 1, 0], [2,2], 'int32');\n * const input = tf.tensor2d([9, 10, 11, 12], [2, 2]);\n * tf.gatherND(input, indices).print() // [10, 11]\n * ```\n *\n * @param x The tensor from which to gather values.\n * @param indices Index tensor, must be of type int32.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction gatherND_(x, indices) {\n const $indices = convertToTensor(indices, 'indices', 'gatherND', 'int32');\n const $x = convertToTensor(x, 'x', 'gatherND');\n const forward = (backend) => {\n return backend.gatherND($x, $indices);\n };\n const inputs = { params: $x, indices: $indices };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, GatherNd);\n}\nexport const gatherND = op({ gatherND_ });\n//# sourceMappingURL=gather_nd.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Normalize noise shape based on provided tensor and noise shape.\n *\n * @param x Tensor.\n * @param noiseShape The shape for the randomly generated keep/drop flags, as\n * an array of numbers. Optional.\n * @returns Normalized noise shape.\n */\nexport function getNoiseShape(x, noiseShape) {\n if (noiseShape == null) {\n return x.shape.slice();\n }\n if (util.arraysEqual(x.shape, noiseShape)) {\n return noiseShape;\n }\n if (x.shape.length === noiseShape.length) {\n const newDimension = [];\n for (let i = 0; i < x.shape.length; i++) {\n if (noiseShape[i] == null && x.shape[i] != null) {\n newDimension.push(x.shape[i]);\n }\n else {\n newDimension.push(noiseShape[i]);\n }\n }\n return newDimension;\n }\n return noiseShape;\n}\n//# sourceMappingURL=dropout_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { getNoiseShape } from './dropout_util';\nimport { floor } from './floor';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { randomUniform } from './random_uniform';\n/**\n * Computes dropout.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 2, 1]);\n * const rate = 0.75;\n * const output = tf.dropout(x, rate);\n * output.print();\n * ```\n *\n * @param x A floating point Tensor or TensorLike.\n * @param rate A float in the range [0, 1). The probability that each element\n * of x is discarded.\n * @param noiseShape An array of numbers of type int32, representing the\n * shape for randomly generated keep/drop flags. If the noiseShape has null\n * value, it will be automatically replaced with the x's relative dimension\n * size. Optional.\n * @param seed Used to create random seeds. Optional.\n * @returns A Tensor of the same shape of x.\n *\n * @doc {heading: 'Operations', subheading: 'Dropout'}\n */\nfunction dropout_(x, rate, noiseShape, seed) {\n const $x = convertToTensor(x, 'x', 'dropout');\n util.assert($x.dtype === 'float32', () => `x has to be a floating point tensor since it's going to be ` +\n `scaled, but got a ${$x.dtype} tensor instead.`);\n util.assert(rate >= 0 && rate < 1, () => `rate must be a float in the range [0, 1), but got ${rate}.`);\n if (rate === 0) {\n return x instanceof Tensor ? $x.clone() : $x;\n }\n const $noiseShape = getNoiseShape($x, noiseShape);\n const keepProb = 1 - rate;\n const multiplier = div(floor(add(randomUniform($noiseShape, 0, 1, 'float32', seed), keepProb)), keepProb);\n return mul($x, multiplier);\n}\nexport const dropout = op({ dropout_ });\n//# sourceMappingURL=dropout.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensor1d } from './tensor1d';\nexport function enclosingPowerOfTwo(value) {\n // Return 2**N for integer N such that 2**N >= value.\n return Math.floor(Math.pow(2, Math.ceil(Math.log(value) / Math.log(2.0))));\n}\nexport function cosineWindow(windowLength, a, b) {\n const even = 1 - windowLength % 2;\n const newValues = new Float32Array(windowLength);\n for (let i = 0; i < windowLength; ++i) {\n const cosArg = (2.0 * Math.PI * i) / (windowLength + even - 1);\n newValues[i] = a - b * Math.cos(cosArg);\n }\n return tensor1d(newValues, 'float32');\n}\n//# sourceMappingURL=signal_ops_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch, getTypedArrayFromDType } from '../util';\nimport { tensor } from './tensor';\n/**\n * Returns whether the targets are in the top K predictions.\n *\n * ```js\n * const predictions = tf.tensor2d([[20, 10, 40, 30], [30, 50, -20, 10]]);\n * const targets = tf.tensor1d([2, 0]);\n * const precision = await tf.inTopKAsync(predictions, targets);\n * precision.print();\n * ```\n * @param predictions 2-D or higher `tf.Tensor` with last dimension being\n * at least `k`.\n * @param targets 1-D or higher `tf.Tensor`.\n * @param k Optional Number of top elements to look at for computing precision,\n * default to 1.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nasync function inTopKAsync_(predictions, targets, k = 1) {\n const $predictions = convertToTensor(predictions, 'predictions', 'inTopK');\n const $targets = convertToTensor(targets, 'targets', 'inTopK');\n assert($predictions.rank > 1, () => 'inTopK() expects the predictions to be of rank 2 or higher, ' +\n `but got ${$predictions.rank}`);\n assert($predictions.rank - 1 === $targets.rank, () => `predictions rank should be 1 larger than ` +\n `targets rank, but got predictions rank ` +\n `${$predictions.rank} and targets rank ${$targets.rank}`);\n assertShapesMatch($predictions.shape.slice(0, $predictions.shape.length - 1), $targets.shape, `predictions's shape should be align with the targets' shape, ` +\n 'except the last dimension.');\n const lastDim = $predictions.shape[$predictions.shape.length - 1];\n assert(k > 0 && k <= lastDim, () => `'k' passed to inTopK() must be > 0 && <= the predictions last ` +\n `dimension (${lastDim}), but got ${k}`);\n const predictionsVals = await $predictions.data();\n const targetsVals = await $targets.data();\n // Reshape predictionsVals into a 2d tensor [batch, lastDim]\n // and look up topK along lastDim.\n const [batch, size] = [predictionsVals.length / lastDim, lastDim];\n const precision = getTypedArrayFromDType('bool', batch);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = predictionsVals.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n precision[b] = 0;\n for (let i = 0; i < k; i++) {\n if (valAndInd[i].index === targetsVals[b]) {\n precision[b] = 1;\n break;\n }\n }\n }\n if (predictions !== $predictions) {\n $predictions.dispose();\n }\n if (targets !== $targets) {\n $targets.dispose();\n }\n // Output precision has the same shape as targets.\n return tensor(precision, $targets.shape, 'bool');\n}\nexport const inTopKAsync = inTopKAsync_;\n//# sourceMappingURL=in_top_k.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropFilter } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 2D convolution.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * [batch, height, width, inChannels]. If rank 3, batch of 1 is assumed.\n * @param dy The dy image, of rank 4 or rank 3, of shape\n * [batch, height, width, outDepth]. If rank 3, batch of 1 is assumed.\n * @param filterShape The shape of the filter, length 4,\n * [filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction conv2DBackpropFilter_(x, dy, filterShape, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2dDerFilter: input must be rank 4, but got shape ` +\n `${x4D.shape}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerFilter: dy must be rank 4, but got shape ` +\n `${dy4D.shape}.`);\n util.assert(filterShape.length === 4, () => `Error in conv2dDerFilter: filterShape must be length 4, but got ` +\n `${filterShape}.`);\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filterShape[2], () => `Error in conv2dDerFilter: depth of input ${inDepth}) must ` +\n `match input depth in filter (${filterShape[2]}.`);\n util.assert(outDepth === filterShape[3], () => `Error in conv2dDerFilter: depth of dy (${outDepth}) must ` +\n `match output depth for filter (${filterShape[3]}).`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerFilter: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, filterShape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n return backend.conv2dDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv2DBackpropFilter, attrs);\n}\nexport const conv2DBackpropFilter = op({ conv2DBackpropFilter_ });\n//# sourceMappingURL=conv2d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as broadcast_util from './broadcast_util';\nimport { elu } from './elu';\nimport { mul } from './mul';\nimport { prelu } from './prelu';\nimport { relu } from './relu';\nimport { relu6 } from './relu6';\nimport { reshape } from './reshape';\nimport { step } from './step';\nimport { sum } from './sum';\n// Returns gradient for fused activation.\nexport function getFusedDyActivation(dy, y, activation) {\n if (activation == null || activation === 'linear') {\n return dy;\n }\n if (activation === 'relu') {\n return mul(dy, step(y));\n }\n throw new Error(`Cannot compute gradient for fused activation ${activation}.`);\n}\n// Returns gradient for fused bias.\nexport function getFusedBiasGradient(bias, dyActivation) {\n let res = dyActivation;\n const reduceAxes = broadcast_util.getReductionAxes(bias.shape, dyActivation.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, bias.shape);\n}\nexport function applyActivation(x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return x;\n }\n else if (activation === 'relu') {\n return relu(x);\n }\n else if (activation === 'elu') {\n return elu(x);\n }\n else if (activation === 'relu6') {\n return relu6(x);\n }\n else if (activation === 'prelu') {\n return prelu(x, preluActivationWeights);\n }\n throw new Error(`Unknown fused activation ${activation}.`);\n}\n// Whether we should call fused ops.\nexport const shouldFuse = (gradientDepth, activation) => {\n const gradientMode = gradientDepth > 0;\n return !gradientMode || activation === 'linear';\n};\n//# sourceMappingURL=fused_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { conv2d as unfusedConv2d } from '../conv2d';\nimport { conv2DBackpropFilter } from '../conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../conv2d_backprop_input';\nimport * as conv_util from '../conv_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes a 2D convolution over the input x, optionally fused with adding a\n * bias and applying an activation.\n *\n * ```js\n * const inputDepth = 2;\n * const inShape = [2, 2, 2, inputDepth];\n * const outputDepth = 2;\n * const fSize = 1;\n * const pad = 0;\n * const strides = 1;\n *\n * const x = tf.tensor4d( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,\n * 16], inShape);\n * const w = tf.tensor4d([-1, 1, -2, 0.5], [fSize, fSize, inputDepth,\n * outputDepth]);\n *\n * tf.fused.conv2d({ x, filter: w, strides, pad, dataFormat: 'NHWC',\n * dilations: [1, 1], bias: tf.scalar(5), activation: 'relu' }).print();\n * ```\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid` output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`) to be\n * applied\n * after biasAdd.\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n activation = activation || 'linear';\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused conv2d: input must be rank 4, but got rank ` +\n `${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in conv2d: depth of input (${x4D.shape[3]}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NHWC', () => `Error in conv2d: got dataFormat of ${dataFormat} but only NHWC is currently supported.`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused conv2d');\n }\n const grad = (dy, saved) => {\n const [$filter, x4D, y, $bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused conv2D: ' +\n `dilation rates greater than 1 ` +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n const xDer = conv2DBackpropInput(x4D.shape, dyActivation, $filter, strides, pad);\n const filterDer = conv2DBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad);\n const der = [xDer, filterDer];\n if ($bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n der.push(biasDer);\n }\n return der;\n };\n const forward = (backend) => {\n const res = backend.fusedConv2d({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const conv2d = op({ fusedConv2d_ });\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropFilter } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropFilter_(x, dy, filterShape, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dimRoundingMode, dilations, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropFilter, attrs);\n}\nexport const depthwiseConv2dNativeBackpropFilter = op({ depthwiseConv2dNativeBackpropFilter_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropInput } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropInput_(xShape, dy, filter, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(xShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerInput(dy4D, filter, convInfo);\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dimRoundingMode, dilations, inputShape: xShape };\n const res = ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2dNativeBackpropInput = op({ depthwiseConv2dNativeBackpropInput_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_input.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedDepthwiseConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport * as conv_util from '../conv_util';\nimport { depthwiseConv2d as unfusedDepthwiseConv2d } from '../depthwise_conv2d';\nimport { depthwiseConv2dNativeBackpropFilter } from '../depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../depthwise_conv2d_native_backprop_input';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes depthwise 2D convolution, optionally fused with adding a\n * bias and applying an activation.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`).\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedDepthwiseConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedDepthwiseConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused depthwiseConv2d: filter must be rank 4, ` +\n `but got rank ${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in fused depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in fused depthwiseConv2d: Either strides or dilations must ' +\n `be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused depthwiseConv2d: pad must be an integer when ` +\n `using dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused depthwiseConv2d');\n }\n const grad = (dy, saved) => {\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused depthwiseConv2d: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${dilations}'`);\n const [$filter, x4D, y, bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n const xDer = depthwiseConv2dNativeBackpropInput(x4D.shape, dyActivation, $filter, strides, pad, dilations, dimRoundingMode);\n const filterDer = depthwiseConv2dNativeBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad, dilations, dimRoundingMode);\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [xDer, filterDer, biasDer];\n }\n return [xDer, filterDer];\n };\n const forward = (backend) => {\n const res = backend.fusedDepthwiseConv2D({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const depthwiseConv2d = op({ fusedDepthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { _FusedMatMul } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { matMul as unfusedMatMul } from '../mat_mul';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes the dot product of two matrices with optional activation and bias.\n *\n * ```js\n * const a = tf.tensor2d([-1, -2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const bias = tf.tensor2d([1, 2], [1, 2]);\n *\n * tf.fused.matMul({a, b, bias, activation: 'relu'}).print();\n * ```\n *\n * @param obj An object with the following properties:\n * - `a` First matrix in dot product operation.\n * - `b` Second matrix in dot product operation.\n * - `transposeA` If true, `a` is transposed before multiplication.\n * - `transposeB` If true, `b` is transposed before multiplication.\n * - `bias` Matrix to be added to the result.\n * - `activation` Name of activation kernel (defaults to `linear`).\n * - `preluActivationWeights` Tensor of prelu weights.\n */\nfunction fusedMatMul_({ a, b, transposeA = false, transposeB = false, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedMatMul(a, b, transposeA, transposeB);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n let $a = convertToTensor(a, 'a', 'fused matMul');\n let $b = convertToTensor(b, 'b', 'fused matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n util.assert($a.rank >= 2 && $b.rank >= 2 && $a.rank === $b.rank, () => `Error in fused matMul: inputs must have the same rank of at least ` +\n `2, got ranks ${$a.rank} and ${$b.rank}.`);\n util.assert(util.arraysEqual(outerDimsA, outerDimsB), () => `Error in fused matMul: outer dimensions (${outerDimsA}) and (` +\n `${outerDimsB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} must match.`);\n util.assert(innerShapeA === innerShapeB, () => `Error in fused matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShape = $a.shape.slice(0, -2).concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused matMul');\n [$bias] = makeTypesMatch($bias, $a);\n broadcast_util.assertAndGetBroadcastShape(outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused matMul');\n }\n const grad = (dy, saved) => {\n const [a3D, b3D, y, $bias] = saved;\n // we reshape dy because the result of the forward is not\n // necessarily going to be a 3d tensor due to a reshape done at the end of\n // the customOp.\n const dyActivation = getFusedDyActivation(reshape(dy, y.shape), y, activation);\n let aDer;\n let bDer;\n if (!transposeA && !transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, true, false);\n }\n else if (!transposeA && transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, false);\n bDer = unfusedMatMul(dyActivation, a3D, true, false);\n }\n else if (transposeA && !transposeB) {\n aDer = unfusedMatMul(b3D, dyActivation, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, false, false);\n }\n else {\n aDer = unfusedMatMul(b3D, dyActivation, true, true);\n bDer = unfusedMatMul(dyActivation, a3D, true, true);\n }\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [aDer, bDer, biasDer];\n }\n else {\n return [aDer, bDer];\n }\n };\n const forward = (backend) => {\n const y = backend.fusedBatchMatMul({\n a: a3D,\n b: b3D,\n transposeA,\n transposeB,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return y;\n };\n const inputs = {\n a: a3D,\n b: b3D,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { transposeA, transposeB, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((a3D, b3D, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOp(a3D, b3D);\n }\n else {\n const customOpWithBias = customGrad((a3D, b3D, $bias, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res, $bias]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOpWithBias(a3D, b3D, $bias);\n }\n}\nexport const matMul = op({ fusedMatMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from './fused/conv2d';\nimport { depthwiseConv2d } from './fused/depthwise_conv2d';\nimport { matMul } from './fused/mat_mul';\nexport { conv2d, depthwiseConv2d, matMul };\n//# sourceMappingURL=fused_ops.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a hamming window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hammingWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hammingWindow_(windowLength) {\n return cosineWindow(windowLength, 0.54, 0.46);\n}\nexport const hammingWindow = op({ hammingWindow_ });\n//# sourceMappingURL=hamming_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a Hann window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hannWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hannWindow_(windowLength) {\n return cosineWindow(windowLength, 0.5, 0.5);\n}\nexport const hannWindow = op({ hannWindow_ });\n//# sourceMappingURL=hann_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { fill } from '../fill';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { tensor2d } from '../tensor2d';\n/**\n * Expands input into frames of frameLength.\n * Slides a window size with frameStep.\n *\n * ```js\n * tf.signal.frame([1, 2, 3], 2, 1).print();\n * ```\n * @param signal The input tensor to be expanded\n * @param frameLength Length of each frame\n * @param frameStep The frame hop size in samples.\n * @param padEnd Whether to pad the end of signal with padValue.\n * @param padValue An number to use where the input signal does\n * not exist when padEnd is True.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction frame_(signal, frameLength, frameStep, padEnd = false, padValue = 0) {\n let start = 0;\n const output = [];\n while (start + frameLength <= signal.size) {\n output.push(slice(signal, start, frameLength));\n start += frameStep;\n }\n if (padEnd) {\n while (start < signal.size) {\n const padLen = (start + frameLength) - signal.size;\n const pad = concat([\n slice(signal, start, frameLength - padLen), fill([padLen], padValue)\n ]);\n output.push(pad);\n start += frameStep;\n }\n }\n if (output.length === 0) {\n return tensor2d([], [0, frameLength]);\n }\n return reshape(concat(output), [output.length, frameLength]);\n}\nexport const frame = op({ frame_ });\n//# sourceMappingURL=frame.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { enclosingPowerOfTwo } from '../signal_ops_util';\nimport { slice } from '../slice';\nimport { rfft } from '../spectral/rfft';\nimport { frame } from './frame';\nimport { hannWindow } from './hann_window';\n/**\n * Computes the Short-time Fourier Transform of signals\n * See: https://en.wikipedia.org/wiki/Short-time_Fourier_transform\n *\n * ```js\n * const input = tf.tensor1d([1, 1, 1, 1, 1])\n * tf.signal.stft(input, 3, 1).print();\n * ```\n * @param signal 1-dimensional real value tensor.\n * @param frameLength The window length of samples.\n * @param frameStep The number of samples to step.\n * @param fftLength The size of the FFT to apply.\n * @param windowFn A callable that takes a window length and returns 1-d tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction stft_(signal, frameLength, frameStep, fftLength, windowFn = hannWindow) {\n if (fftLength == null) {\n fftLength = enclosingPowerOfTwo(frameLength);\n }\n const framedSignal = frame(signal, frameLength, frameStep);\n const windowedSignal = mul(framedSignal, windowFn(frameLength));\n const output = [];\n for (let i = 0; i < framedSignal.shape[0]; i++) {\n output.push(rfft(slice(windowedSignal, [i, 0], [1, frameLength]), fftLength));\n }\n return concat(output);\n}\nexport const stft = op({ stft_ });\n//# sourceMappingURL=stft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { CropAndResize } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Extracts crops from the input image tensor and resizes them using bilinear\n * sampling or nearest neighbor sampling (possibly with aspect ratio change)\n * to a common output size specified by cropSize.\n *\n * @param image 4d tensor of shape `[batch,imageHeight,imageWidth, depth]`,\n * where imageHeight and imageWidth must be positive, specifying the\n * batch of images from which to take crops\n * @param boxes 2d float32 tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the normalized\n * coordinates of the box in the boxInd[i]'th image in the batch\n * @param boxInd 1d int32 tensor of shape `[numBoxes]` with values in range\n * `[0, batch)` that specifies the image that the `i`-th box refers to.\n * @param cropSize 1d int32 tensor of 2 elements `[cropHeigh, cropWidth]`\n * specifying the size to which all crops are resized to.\n * @param method Optional string from `'bilinear' | 'nearest'`,\n * defaults to bilinear, which specifies the sampling method for resizing\n * @param extrapolationValue A threshold for deciding when to remove boxes based\n * on score. Defaults to 0.\n * @return A 4D tensor of the shape `[numBoxes,cropHeight,cropWidth,depth]`\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction cropAndResize_(image, boxes, boxInd, cropSize, method, extrapolationValue) {\n const $image = convertToTensor(image, 'image', 'cropAndResize');\n const $boxes = convertToTensor(boxes, 'boxes', 'cropAndResize', 'float32');\n const $boxInd = convertToTensor(boxInd, 'boxInd', 'cropAndResize', 'int32');\n method = method || 'bilinear';\n extrapolationValue = extrapolationValue || 0;\n const numBoxes = $boxes.shape[0];\n util.assert($image.rank === 4, () => 'Error in cropAndResize: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n util.assert($boxes.rank === 2 && $boxes.shape[1] === 4, () => `Error in cropAndResize: boxes must be have size [${numBoxes},4] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert($boxInd.rank === 1 && $boxInd.shape[0] === numBoxes, () => `Error in cropAndResize: boxInd must be have size [${numBoxes}] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert(cropSize.length === 2, () => `Error in cropAndResize: cropSize must be of length 2, but got ` +\n `length ${cropSize.length}.`);\n util.assert(cropSize[0] >= 1 && cropSize[1] >= 1, () => `cropSize must be atleast [1,1], but was ${cropSize}`);\n util.assert(method === 'bilinear' || method === 'nearest', () => `method must be bilinear or nearest, but was ${method}`);\n const forward = (backend) => backend.cropAndResize($image, $boxes, $boxInd, cropSize, method, extrapolationValue);\n const inputs = { image: $image, boxes: $boxes, boxInd: $boxInd };\n const attrs = { method, extrapolationValue, cropSize };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, CropAndResize, attrs);\n return res;\n}\nexport const cropAndResize = op({ cropAndResize_ });\n//# sourceMappingURL=crop_and_resize.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FlipLeftRight } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Flips the image left to right. Currently available in the CPU, WebGL, and\n * WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction flipLeftRight_(image) {\n const $image = convertToTensor(image, 'image', 'flipLeftRight', 'float32');\n util.assert($image.rank === 4, () => 'Error in flipLeftRight: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const res = ENGINE.runKernel(FlipLeftRight, inputs, {});\n return res;\n}\nexport const flipLeftRight = op({ flipLeftRight_ });\n//# sourceMappingURL=flip_left_right.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { RotateWithOffset } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Rotates the input image tensor counter-clockwise with an optional offset\n * center of rotation. Currently available in the CPU, WebGL, and WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n * @param radians The amount of rotation.\n * @param fillValue The value to fill in the empty space leftover\n * after rotation. Can be either a single grayscale value (0-255), or an\n * array of three numbers `[red, green, blue]` specifying the red, green,\n * and blue channels. Defaults to `0` (black).\n * @param center The center of rotation. Can be either a single value (0-1), or\n * an array of two numbers `[centerX, centerY]`. Defaults to `0.5` (rotates\n * the image around its center).\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction rotateWithOffset_(image, radians, fillValue = 0, center = 0.5) {\n const $image = convertToTensor(image, 'image', 'rotateWithOffset', 'float32');\n util.assert($image.rank === 4, () => 'Error in rotateWithOffset: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const attrs = { radians, fillValue, center };\n const res = ENGINE.runKernel(RotateWithOffset, inputs, attrs);\n return res;\n}\nexport const rotateWithOffset = op({ rotateWithOffset_ });\n//# sourceMappingURL=rotate_with_offset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nfunction nonMaxSuppSanityCheck(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n if (iouThreshold == null) {\n iouThreshold = 0.5;\n }\n if (scoreThreshold == null) {\n scoreThreshold = Number.NEGATIVE_INFINITY;\n }\n if (softNmsSigma == null) {\n softNmsSigma = 0.0;\n }\n const numBoxes = boxes.shape[0];\n maxOutputSize = Math.min(maxOutputSize, numBoxes);\n util.assert(0 <= iouThreshold && iouThreshold <= 1, () => `iouThreshold must be in [0, 1], but was '${iouThreshold}'`);\n util.assert(boxes.rank === 2, () => `boxes must be a 2D tensor, but was of rank '${boxes.rank}'`);\n util.assert(boxes.shape[1] === 4, () => `boxes must have 4 columns, but 2nd dimension was ${boxes.shape[1]}`);\n util.assert(scores.rank === 1, () => 'scores must be a 1D tensor');\n util.assert(scores.shape[0] === numBoxes, () => `scores has incompatible shape with boxes. Expected ${numBoxes}, ` +\n `but was ${scores.shape[0]}`);\n util.assert(0 <= softNmsSigma && softNmsSigma <= 1, () => `softNmsSigma must be in [0, 1], but was '${softNmsSigma}'`);\n return { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n}\nexport { nonMaxSuppSanityCheck };\n//# sourceMappingURL=nonmax_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV3 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\nfunction nonMaxSuppression_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold };\n return ENGINE.runKernelFunc(b => b.nonMaxSuppression($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold), { boxes: $boxes, scores: $scores }, null /* grad */, NonMaxSuppressionV3, attrs);\n}\nexport const nonMaxSuppression = op({ nonMaxSuppression_ });\n//# sourceMappingURL=non_max_suppression.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inserts a value into a sorted array. This method allows duplicate, meaning it\n * allows inserting duplicate value, in which case, the element will be inserted\n * at the lowest index of the value.\n * @param arr The array to modify.\n * @param element The element to insert.\n * @param comparator Optional. If no comparator is specified, elements are\n * compared using array_util.defaultComparator, which is suitable for Strings\n * and Numbers in ascending arrays. If the array contains multiple instances of\n * the target value, the left-most instance will be returned. To provide a\n * comparator, it should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n */\nexport function binaryInsert(arr, element, comparator) {\n const index = binarySearch(arr, element, comparator);\n const insertionPoint = index < 0 ? -(index + 1) : index;\n arr.splice(insertionPoint, 0, element);\n}\n/**\n * Searches the array for the target using binary search, returns the index\n * of the found element, or position to insert if element not found. If no\n * comparator is specified, elements are compared using array_\n * util.defaultComparator, which is suitable for Strings and Numbers in\n * ascending arrays. If the array contains multiple instances of the target\n * value, the left-most instance will be returned.\n * @param arr The array to be searched in.\n * @param target The target to be searched for.\n * @param comparator Should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n * @return Lowest index of the target value if found, otherwise the insertion\n * point where the target should be inserted, in the form of\n * (-insertionPoint - 1).\n */\nexport function binarySearch(arr, target, comparator) {\n return binarySearch_(arr, target, comparator || defaultComparator);\n}\n/**\n * Compares its two arguments for order.\n * @param a The first element to be compared.\n * @param b The second element to be compared.\n * @return A negative number, zero, or a positive number as the first\n * argument is less than, equal to, or greater than the second.\n */\nfunction defaultComparator(a, b) {\n return a > b ? 1 : a < b ? -1 : 0;\n}\nfunction binarySearch_(arr, target, comparator) {\n let left = 0;\n let right = arr.length;\n let middle = 0;\n let found = false;\n while (left < right) {\n middle = left + ((right - left) >>> 1);\n const compareResult = comparator(target, arr[middle]);\n if (compareResult > 0) {\n left = middle + 1;\n }\n else {\n right = middle;\n // If compareResult is 0, the value is found. We record it is found,\n // and then keep looking because there may be duplicate.\n found = !compareResult;\n }\n }\n return found ? left : -left - 1;\n}\n//# sourceMappingURL=array_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Implementation of the NonMaxSuppression kernel shared between webgl and cpu.\n */\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { binaryInsert } from './array_util';\nexport function nonMaxSuppressionV3Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */)\n .selectedIndices;\n}\nexport function nonMaxSuppressionV4Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */, false /* returnScoresTensor */, padToMaxOutputSize /* padToMaxOutputSize */, true\n /* returnValidOutputs */ );\n}\nexport function nonMaxSuppressionV5Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, true /* returnScoresTensor */);\n}\nfunction nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, returnScoresTensor = false, padToMaxOutputSize = false, returnValidOutputs = false) {\n // The list is sorted in ascending order, so that we can always pop the\n // candidate with the largest score in O(1) time.\n const candidates = [];\n for (let i = 0; i < scores.length; i++) {\n if (scores[i] > scoreThreshold) {\n candidates.push({ score: scores[i], boxIndex: i, suppressBeginIndex: 0 });\n }\n }\n candidates.sort(ascendingComparator);\n // If softNmsSigma is 0, the outcome of this algorithm is exactly same as\n // before.\n const scale = softNmsSigma > 0 ? (-0.5 / softNmsSigma) : 0.0;\n const selectedIndices = [];\n const selectedScores = [];\n while (selectedIndices.length < maxOutputSize && candidates.length > 0) {\n const candidate = candidates.pop();\n const { score: originalScore, boxIndex, suppressBeginIndex } = candidate;\n if (originalScore < scoreThreshold) {\n break;\n }\n // Overlapping boxes are likely to have similar scores, therefore we\n // iterate through the previously selected boxes backwards in order to\n // see if candidate's score should be suppressed. We use\n // suppressBeginIndex to track and ensure a candidate can be suppressed\n // by a selected box no more than once. Also, if the overlap exceeds\n // iouThreshold, we simply ignore the candidate.\n let ignoreCandidate = false;\n for (let j = selectedIndices.length - 1; j >= suppressBeginIndex; --j) {\n const iou = intersectionOverUnion(boxes, boxIndex, selectedIndices[j]);\n if (iou >= iouThreshold) {\n ignoreCandidate = true;\n break;\n }\n candidate.score =\n candidate.score * suppressWeight(iouThreshold, scale, iou);\n if (candidate.score <= scoreThreshold) {\n break;\n }\n }\n // At this point, if `candidate.score` has not dropped below\n // `scoreThreshold`, then we know that we went through all of the\n // previous selections and can safely update `suppressBeginIndex` to the\n // end of the selected array. Then we can re-insert the candidate with\n // the updated score and suppressBeginIndex back in the candidate list.\n // If on the other hand, `candidate.score` has dropped below the score\n // threshold, we will not add it back to the candidates list.\n candidate.suppressBeginIndex = selectedIndices.length;\n if (!ignoreCandidate) {\n // Candidate has passed all the tests, and is not suppressed, so\n // select the candidate.\n if (candidate.score === originalScore) {\n selectedIndices.push(boxIndex);\n selectedScores.push(candidate.score);\n }\n else if (candidate.score > scoreThreshold) {\n // Candidate's score is suppressed but is still high enough to be\n // considered, so add back to the candidates list.\n binaryInsert(candidates, candidate, ascendingComparator);\n }\n }\n }\n // NonMaxSuppressionV4 feature: padding output to maxOutputSize.\n const validOutputs = selectedIndices.length;\n const elemsToPad = maxOutputSize - validOutputs;\n if (padToMaxOutputSize && elemsToPad > 0) {\n selectedIndices.push(...new Array(elemsToPad).fill(0));\n selectedScores.push(...new Array(elemsToPad).fill(0.0));\n }\n const result = { selectedIndices: tensor1d(selectedIndices, 'int32') };\n if (returnScoresTensor) {\n result['selectedScores'] = tensor1d(selectedScores, 'float32');\n }\n if (returnValidOutputs) {\n result['validOutputs'] = scalar(validOutputs, 'int32');\n }\n return result;\n}\nfunction intersectionOverUnion(boxes, i, j) {\n const iCoord = boxes.subarray(i * 4, i * 4 + 4);\n const jCoord = boxes.subarray(j * 4, j * 4 + 4);\n const yminI = Math.min(iCoord[0], iCoord[2]);\n const xminI = Math.min(iCoord[1], iCoord[3]);\n const ymaxI = Math.max(iCoord[0], iCoord[2]);\n const xmaxI = Math.max(iCoord[1], iCoord[3]);\n const yminJ = Math.min(jCoord[0], jCoord[2]);\n const xminJ = Math.min(jCoord[1], jCoord[3]);\n const ymaxJ = Math.max(jCoord[0], jCoord[2]);\n const xmaxJ = Math.max(jCoord[1], jCoord[3]);\n const areaI = (ymaxI - yminI) * (xmaxI - xminI);\n const areaJ = (ymaxJ - yminJ) * (xmaxJ - xminJ);\n if (areaI <= 0 || areaJ <= 0) {\n return 0.0;\n }\n const intersectionYmin = Math.max(yminI, yminJ);\n const intersectionXmin = Math.max(xminI, xminJ);\n const intersectionYmax = Math.min(ymaxI, ymaxJ);\n const intersectionXmax = Math.min(xmaxI, xmaxJ);\n const intersectionArea = Math.max(intersectionYmax - intersectionYmin, 0.0) *\n Math.max(intersectionXmax - intersectionXmin, 0.0);\n return intersectionArea / (areaI + areaJ - intersectionArea);\n}\n// A Gaussian penalty function, this method always returns values in [0, 1].\n// The weight is a function of similarity, the more overlap two boxes are, the\n// smaller the weight is, meaning highly overlapping boxe will be significantly\n// penalized. On the other hand, a non-overlapping box will not be penalized.\nfunction suppressWeight(iouThreshold, scale, iou) {\n const weight = Math.exp(scale * iou * iou);\n return iou <= iouThreshold ? weight : 0.0;\n}\nfunction ascendingComparator(c1, c2) {\n // For objects with same scores, we make the object with the larger index go\n // first. In an array that pops from the end, this means that the object with\n // the smaller index will be popped first. This ensures the same output as\n // the TensorFlow python version.\n return (c1.score - c2.score) ||\n ((c1.score === c2.score) && (c2.boxIndex - c1.boxIndex));\n}\n//# sourceMappingURL=non_max_suppression_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV3Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This is the async version of `nonMaxSuppression`\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @return A 1D tensor with the selected box indices.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionAsync = nonMaxSuppressionAsync_;\n//# sourceMappingURL=non_max_suppression_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV5 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionWithScore_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n const result = ENGINE.runKernel(NonMaxSuppressionV5, inputs, attrs);\n return { selectedIndices: result[0], selectedScores: result[1] };\n}\nexport const nonMaxSuppressionWithScore = op({ nonMaxSuppressionWithScore_ });\n//# sourceMappingURL=non_max_suppression_with_score.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV5Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionWithScoreAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionWithScoreAsync = nonMaxSuppressionWithScoreAsync_;\n//# sourceMappingURL=non_max_suppression_with_score_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV4 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionPadded_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = {\n maxOutputSize: $maxOutputSize,\n iouThreshold: $iouThreshold,\n scoreThreshold: $scoreThreshold,\n padToMaxOutputSize\n };\n const result = ENGINE.runKernel(NonMaxSuppressionV4, inputs, attrs);\n return { selectedIndices: result[0], validOutputs: result[1] };\n}\nexport const nonMaxSuppressionPadded = op({ nonMaxSuppressionPadded_ });\n//# sourceMappingURL=non_max_suppression_padded.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV4Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionPaddedAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const [boxesVals, scoresVals] = await Promise.all([$boxes.data(), $scores.data()]);\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV4Impl(boxesVals, scoresVals, $maxOutputSize, $iouThreshold, $scoreThreshold, padToMaxOutputSize);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionPaddedAsync = nonMaxSuppressionPaddedAsync_;\n//# sourceMappingURL=non_max_suppression_padded_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeBilinear } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Bilinear resize a single 3D image or a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeBilinear_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeBilinear');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeBilinear: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeBilinear: new shape must 2D, but got shape ` +\n `${size}.`);\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeBilinear(batchImages, newHeight, newWidth, alignCorners);\n };\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeBilinear, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeBilinear = op({ resizeBilinear_ });\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeNearestNeighbor } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * NearestNeighbor resize a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeNearestNeighbor_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeNearestNeighbor');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeNearestNeighbor: new shape must 2D, but got shape ` +\n `${size}.`);\n util.assert($images.dtype === 'float32' || $images.dtype === 'int32', () => '`images` must have `int32` or `float32` as dtype');\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeNearestNeighbor(batchImages, newHeight, newWidth, alignCorners);\n };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeNearestNeighbor, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeNearestNeighbor = op({ resizeNearestNeighbor_ });\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assert } from '../../util';\nimport { greaterEqual } from '../greater_equal';\nimport { lessEqual } from '../less_equal';\nimport { logicalAnd } from '../logical_and';\nimport { op } from '../operation';\nimport { range } from '../range';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\nimport { zeros } from '../zeros';\n/**\n * Copy a tensor setting everything outside a central band in each innermost\n * matrix to zero.\n *\n * The band part is computed as follows: Assume input has `k` dimensions\n * `[I, J, K, ..., M, N]`, then the output is a tensor with the same shape where\n * `band[i, j, k, ..., m, n] = in_band(m, n) * input[i, j, k, ..., m, n]`.\n * The indicator function\n * `in_band(m, n) = (num_lower < 0 || (m-n) <= num_lower))`\n * `&& (num_upper < 0 || (n-m) <= num_upper)`\n *\n * ```js\n * const x = tf.tensor2d([[ 0, 1, 2, 3],\n * [-1, 0, 1, 2],\n * [-2, -1, 0, 1],\n * [-3, -2, -1, 0]]);\n * let y = tf.linalg.bandPart(x, 1, -1);\n * y.print(); // [[ 0, 1, 2, 3],\n * // [-1, 0, 1, 2],\n * // [ 0, -1, 0, 1],\n * // [ 0, 0 , -1, 0]]\n * let z = tf.linalg.bandPart(x, 2, 1);\n * z.print(); // [[ 0, 1, 0, 0],\n * // [-1, 0, 1, 0],\n * // [-2, -1, 0, 1],\n * // [ 0, -2, -1, 0]]\n * ```\n *\n * @param x Rank `k` tensor\n * @param numLower Number of subdiagonals to keep.\n * If negative, keep entire lower triangle.\n * @param numUpper Number of subdiagonals to keep.\n * If negative, keep entire upper triangle.\n * @returns Rank `k` tensor of the same shape as input.\n * The extracted banded tensor.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction bandPart_(a, numLower, numUpper) {\n assert(numLower % 1 === 0, () => `bandPart(): numLower must be an integer, got ${numLower}.`);\n assert(numUpper % 1 === 0, () => `bandPart(): numUpper must be an integer, got ${numUpper}.`);\n const $a = convertToTensor(a, 'a', 'bandPart');\n assert($a.rank >= 2, () => `bandPart(): Rank must be at least 2, got ${$a.rank}.`);\n const shape = $a.shape;\n const [M, N] = $a.shape.slice(-2);\n if (!(numLower <= M)) {\n throw new Error(`bandPart(): numLower (${numLower})` +\n ` must not be greater than the number of rows (${M}).`);\n }\n if (!(numUpper <= N)) {\n throw new Error(`bandPart(): numUpper (${numUpper})` +\n ` must not be greater than the number of columns (${N}).`);\n }\n if (numLower < 0) {\n numLower = M;\n }\n if (numUpper < 0) {\n numUpper = N;\n }\n const i = reshape(range(0, M, 1, 'int32'), [-1, 1]);\n const j = range(0, N, 1, 'int32');\n const ij = sub(i, j);\n const inBand = logicalAnd(lessEqual(ij, scalar(+numLower, 'int32')), greaterEqual(ij, scalar(-numUpper, 'int32')));\n const zero = zeros([M, N], $a.dtype);\n return reshape(stack(unstack(reshape($a, [-1, M, N]))\n .map(mat => where(inBand, mat, zero))), shape);\n}\nexport const bandPart = op({ bandPart_ });\n//# sourceMappingURL=band_part.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { assert } from '../../util';\nimport { div } from '../div';\nimport { mul } from '../mul';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { split } from '../split';\nimport { squeeze } from '../squeeze';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\n/**\n * Gram-Schmidt orthogonalization.\n *\n * ```js\n * const x = tf.tensor2d([[1, 2], [3, 4]]);\n * let y = tf.linalg.gramSchmidt(x);\n * y.print();\n * console.log('Othogonalized:');\n * y.dot(y.transpose()).print(); // should be nearly the identity matrix.\n * console.log('First row direction maintained:');\n * const data = await y.array();\n * console.log(data[0][1] / data[0][0]); // should be nearly 2.\n * ```\n *\n * @param xs The vectors to be orthogonalized, in one of the two following\n * formats:\n * - An Array of `tf.Tensor1D`.\n * - A `tf.Tensor2D`, i.e., a matrix, in which case the vectors are the rows\n * of `xs`.\n * In each case, all the vectors must have the same length and the length\n * must be greater than or equal to the number of vectors.\n * @returns The orthogonalized and normalized vectors or matrix.\n * Orthogonalization means that the vectors or the rows of the matrix\n * are orthogonal (zero inner products). Normalization means that each\n * vector or each row of the matrix has an L2 norm that equals `1`.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction gramSchmidt_(xs) {\n let inputIsTensor2D;\n if (Array.isArray(xs)) {\n inputIsTensor2D = false;\n assert(xs != null && xs.length > 0, () => 'Gram-Schmidt process: input must not be null, undefined, or ' +\n 'empty');\n const dim = xs[0].shape[0];\n for (let i = 1; i < xs.length; ++i) {\n assert(xs[i].shape[0] === dim, () => 'Gram-Schmidt: Non-unique lengths found in the input vectors: ' +\n `(${xs[i].shape[0]} vs. ${dim})`);\n }\n }\n else {\n inputIsTensor2D = true;\n xs = split(xs, xs.shape[0], 0).map(x => squeeze(x, [0]));\n }\n assert(xs.length <= xs[0].shape[0], () => `Gram-Schmidt: Number of vectors (${xs.length}) exceeds ` +\n `number of dimensions (${xs[0].shape[0]}).`);\n const ys = [];\n const xs1d = xs;\n for (let i = 0; i < xs.length; ++i) {\n ys.push(ENGINE.tidy(() => {\n let x = xs1d[i];\n if (i > 0) {\n for (let j = 0; j < i; ++j) {\n const proj = mul(sum(mul(ys[j], x)), ys[j]);\n x = sub(x, proj);\n }\n }\n return div(x, norm(x, 'euclidean'));\n }));\n }\n if (inputIsTensor2D) {\n return stack(ys, 0);\n }\n else {\n return ys;\n }\n}\nexport const gramSchmidt = op({ gramSchmidt_ });\n//# sourceMappingURL=gram_schmidt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { dispose } from '../../globals';\nimport { assert } from '../../util';\nimport { clone } from '../clone';\nimport { concat } from '../concat';\nimport { div } from '../div';\nimport { eye } from '../eye';\nimport { greater } from '../greater';\nimport { matMul } from '../mat_mul';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { tensor2d } from '../tensor2d';\nimport { transpose } from '../transpose';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\n/**\n * Compute QR decomposition of m-by-n matrix using Householder transformation.\n *\n * Implementation based on\n * [http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf]\n * (http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf)\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [3, 4]]);\n * let [q, r] = tf.linalg.qr(a);\n * console.log('Q');\n * q.print();\n * console.log('R');\n * r.print();\n * console.log('Orthogonalized');\n * q.dot(q.transpose()).print() // should be nearly the identity matrix.\n * console.log('Reconstructed');\n * q.dot(r).print(); // should be nearly [[1, 2], [3, 4]];\n * ```\n *\n * @param x The `tf.Tensor` to be QR-decomposed. Must have rank >= 2. Suppose\n * it has the shape `[..., M, N]`.\n * @param fullMatrices An optional boolean parameter. Defaults to `false`.\n * If `true`, compute full-sized `Q`. If `false` (the default),\n * compute only the leading N columns of `Q` and `R`.\n * @returns An `Array` of two `tf.Tensor`s: `[Q, R]`. `Q` is a unitary matrix,\n * i.e., its columns all have unit norm and are mutually orthogonal.\n * If `M >= N`,\n * If `fullMatrices` is `false` (default),\n * - `Q` has a shape of `[..., M, N]`,\n * - `R` has a shape of `[..., N, N]`.\n * If `fullMatrices` is `true` (default),\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * If `M < N`,\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * @throws If the rank of `x` is less than 2.\n *\n * @doc {heading:'Operations',\n * subheading:'Linear Algebra',\n * namespace:'linalg'}\n */\nfunction qr_(x, fullMatrices = false) {\n assert(x.rank >= 2, () => `qr() requires input tensor to have a rank >= 2, but got rank ${x.rank}`);\n if (x.rank === 2) {\n return qr2d(x, fullMatrices);\n }\n else {\n // Rank > 2.\n // TODO(cais): Below we split the input into individual 2D tensors,\n // perform QR decomposition on them and then stack the results back\n // together. We should explore whether this can be parallelized.\n const outerDimsProd = x.shape.slice(0, x.shape.length - 2)\n .reduce((value, prev) => value * prev);\n const x2ds = unstack(reshape(x, [\n outerDimsProd, x.shape[x.shape.length - 2],\n x.shape[x.shape.length - 1]\n ]), 0);\n const q2ds = [];\n const r2ds = [];\n x2ds.forEach(x2d => {\n const [q2d, r2d] = qr2d(x2d, fullMatrices);\n q2ds.push(q2d);\n r2ds.push(r2d);\n });\n const q = reshape(stack(q2ds, 0), x.shape);\n const r = reshape(stack(r2ds, 0), x.shape);\n return [q, r];\n }\n}\nfunction qr2d(x, fullMatrices = false) {\n return ENGINE.tidy(() => {\n assert(x.shape.length === 2, () => `qr2d() requires a 2D Tensor, but got a ${x.shape.length}D Tensor.`);\n const m = x.shape[0];\n const n = x.shape[1];\n let q = eye(m); // Orthogonal transform so far.\n let r = clone(x); // Transformed matrix so far.\n const one2D = tensor2d([[1]], [1, 1]);\n let w = clone(one2D);\n const iters = m >= n ? n : m;\n for (let j = 0; j < iters; ++j) {\n // This tidy within the for-loop ensures we clean up temporary\n // tensors as soon as they are no longer needed.\n const rTemp = r;\n const wTemp = w;\n const qTemp = q;\n [w, r, q] = ENGINE.tidy(() => {\n // Find H = I - tau * w * w', to put zeros below R(j, j).\n const rjEnd1 = slice(r, [j, j], [m - j, 1]);\n const normX = norm(rjEnd1);\n const rjj = slice(r, [j, j], [1, 1]);\n // The sign() function returns 0 on 0, which causes division by zero.\n const s = where(greater(rjj, 0), tensor2d([[-1]]), tensor2d([[1]]));\n const u1 = sub(rjj, mul(s, normX));\n const wPre = div(rjEnd1, u1);\n if (wPre.shape[0] === 1) {\n w = clone(one2D);\n }\n else {\n w = concat([\n one2D,\n slice(wPre, [1, 0], [wPre.shape[0] - 1, wPre.shape[1]])\n ], 0);\n }\n const tau = neg(div(matMul(s, u1), normX));\n // -- R := HR, Q := QH.\n const rjEndAll = slice(r, [j, 0], [m - j, n]);\n const tauTimesW = mul(tau, w);\n const wT = transpose(w);\n if (j === 0) {\n r = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n }\n else {\n const rTimesTau = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n r = concat([slice(r, [0, 0], [j, n]), rTimesTau], 0);\n }\n const tawTimesWT = transpose(tauTimesW);\n const qAllJEnd = slice(q, [0, j], [m, q.shape[1] - j]);\n if (j === 0) {\n q = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n }\n else {\n const qTimesTau = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n q = concat([slice(q, [0, 0], [m, j]), qTimesTau], 1);\n }\n return [w, r, q];\n });\n dispose([rTemp, wTemp, qTemp]);\n }\n if (!fullMatrices && m > n) {\n q = slice(q, [0, 0], [m, n]);\n r = slice(r, [0, 0], [n, n]);\n }\n return [q, r];\n });\n}\nexport const qr = op({ qr_ });\n//# sourceMappingURL=qr.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Reduction;\n(function (Reduction) {\n Reduction[Reduction[\"NONE\"] = 0] = \"NONE\";\n Reduction[Reduction[\"MEAN\"] = 1] = \"MEAN\";\n Reduction[Reduction[\"SUM\"] = 2] = \"SUM\";\n Reduction[Reduction[\"SUM_BY_NONZERO_WEIGHTS\"] = 3] = \"SUM_BY_NONZERO_WEIGHTS\";\n})(Reduction || (Reduction = {}));\n//# sourceMappingURL=loss_ops_utils.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { Reduction } from '../loss_ops_utils';\nimport { mean } from '../mean';\nimport { mul } from '../mul';\nimport { notEqual } from '../not_equal';\nimport { ones } from '../ones';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sum } from '../sum';\n/**\n * Computes the weighted loss between two tensors.\n *\n * @param losses Tensor of shape `[batch_size, d1, ... dN]`.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `losses`, and must be broadcastable to `losses` (i.e., all\n * dimensions must be either `1`, or the same as the corresponding\n * `losses` dimension).\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction computeWeightedLoss_(losses, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $losses = convertToTensor(losses, 'losses', 'computeWeightedLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'computeWeightedLoss');\n }\n const weightedLoss = ($weights == null) ? $losses : mul($losses, $weights);\n if (reduction === Reduction.NONE) {\n return weightedLoss;\n }\n if (reduction === Reduction.SUM) {\n return sum(weightedLoss);\n }\n if (reduction === Reduction.MEAN) {\n if ($weights == null) {\n return mean(weightedLoss);\n }\n else {\n const broadcastFactor = $losses.size / $weights.size;\n const result = div(sum(weightedLoss), sum($weights));\n return broadcastFactor > 1 ? div(result, scalar(broadcastFactor)) :\n result;\n }\n }\n if (reduction === Reduction.SUM_BY_NONZERO_WEIGHTS) {\n if ($weights == null) {\n return div(sum(weightedLoss), scalar($losses.size));\n }\n else {\n const broadcastedWeights = mul($weights, ones($losses.shape));\n const numNonZeros = cast(sum(notEqual(broadcastedWeights, scalar(0))), 'float32');\n return div(sum(weightedLoss), numNonZeros);\n }\n }\n throw Error(`Unknown reduction: ${reduction}`);\n}\nexport const computeWeightedLoss = op({ computeWeightedLoss_ });\n//# sourceMappingURL=compute_weighted_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the absolute difference loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction absoluteDifference_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'absoluteDifference');\n const $predictions = convertToTensor(predictions, 'predictions', 'absoluteDifference');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'absoluteDifference');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in absoluteDifference: ');\n const losses = abs(sub($labels, $predictions));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const absoluteDifference = op({ absoluteDifference_ });\n//# sourceMappingURL=absolute_difference.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the cosine distance loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param axis The dimension along which the cosine distance is computed.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction cosineDistance_(labels, predictions, axis, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'cosineDistance');\n const $predictions = convertToTensor(predictions, 'predictions', 'cosineDistance');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'cosineDistance');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in cosineDistance: ');\n const one = scalar(1);\n const losses = sub(one, sum(mul($labels, $predictions), axis, true));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const cosineDistance = op({ cosineDistance_ });\n//# sourceMappingURL=cosine_distance.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the Hinge loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction hingeLoss_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $labels = convertToTensor(labels, 'labels', 'hingeLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'hingeLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'hingeLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in hingeLoss: ');\n const one = scalar(1);\n // Convert binary labels to (-1, 1)\n $labels = sub(mul(scalar(2), $labels), one);\n const losses = relu(sub(one, mul($labels, $predictions)));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const hingeLoss = op({ hingeLoss_ });\n//# sourceMappingURL=hinge_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { Reduction } from '../loss_ops_utils';\nimport { minimum } from '../minimum';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { square } from '../square';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the huber loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param delta Point where huber loss changes from quadratic to linear.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`.\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction huberLoss_(labels, predictions, weights, delta = 1.0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'huberLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'huberLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'huberLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in huberLoss: ');\n const deltaScalar = scalar(delta);\n const error = abs(sub($predictions, $labels));\n const quadratic = minimum(error, deltaScalar);\n const linear = sub(error, quadratic);\n const losses = add(mul(scalar(0.5), square(quadratic)), mul(deltaScalar, linear));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const huberLoss = op({ huberLoss_ });\n//# sourceMappingURL=huber_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { log } from '../log';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the log loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param epsilon A small increment to avoid taking log of zero\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction logLoss_(labels, predictions, weights, epsilon = 1e-7, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'logLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'logLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'logLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in logLoss: ');\n const one = scalar(1);\n const epsilonScalar = scalar(epsilon);\n const l1 = neg(mul($labels, log(add($predictions, epsilonScalar))));\n const l2 = mul(sub(one, $labels), log(add(sub(one, $predictions), epsilonScalar)));\n const losses = sub(l1, l2);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const logLoss = op({ logLoss_ });\n//# sourceMappingURL=log_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { squaredDifference } from '../squared_difference';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the mean squared error between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction meanSquaredError_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'meanSquaredError');\n const $predictions = convertToTensor(predictions, 'predictions', 'meanSquaredError');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'meanSquaredError');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in meanSquaredError: ');\n const losses = squaredDifference($labels, $predictions);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const meanSquaredError = op({ meanSquaredError_ });\n//# sourceMappingURL=mean_squared_error.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { exp } from '../exp';\nimport { log1p } from '../log1p';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\nfunction sigmoidCrossEntropyWithLogits_(labels, logits) {\n const $labels = convertToTensor(labels, 'labels', 'sigmoidCrossEntropyWithLogits');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropyWithLogits');\n assertShapesMatch($labels.shape, $logits.shape, 'Error in sigmoidCrossEntropyWithLogits: ');\n /**\n * Implementation Details:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n *\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n *\n * Hence, to ensure stability and avoid overflow, the implementation uses\n * this equivalent formulation:\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n */\n const maxOutput = relu($logits);\n const outputXTarget = mul($logits, $labels);\n const sigmoidOutput = log1p(exp(neg(abs($logits))));\n return add(sub(maxOutput, outputXTarget), sigmoidOutput);\n}\n/**\n * Computes the sigmoid cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newMulticlassLabels = multiclassLabels * (1 - labelSmoothing)\n * + 0.5 * labelSmoothing\n *\n * @param multiClassLabels The ground truth output tensor of shape\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction sigmoidCrossEntropy_(multiClassLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $multiClassLabels = convertToTensor(multiClassLabels, 'multiClassLabels', 'sigmoidCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'sigmoidCrossEntropy');\n }\n assertShapesMatch($multiClassLabels.shape, $logits.shape, 'Error in sigmoidCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const half = scalar(0.5);\n $multiClassLabels =\n add(mul($multiClassLabels, sub(one, labelSmoothingScalar)), mul(half, labelSmoothingScalar));\n }\n const losses = sigmoidCrossEntropyWithLogits_($multiClassLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const sigmoidCrossEntropy = op({ sigmoidCrossEntropy_ });\n//# sourceMappingURL=sigmoid_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../../gradients';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { expandShapeToKeepDim } from '../axis_util';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { exp } from '../exp';\nimport { logSumExp } from '../log_sum_exp';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes softmax cross entropy between logits and labels.\n *\n * Measures the probability error in discrete classification tasks in which\n * the classes are mutually exclusive (each entry is in exactly one class).\n * For example, each CIFAR-10 image is labeled with one and only one label: an\n * image can be a dog or a truck, but not both.\n *\n * `NOTE`: While the classes are mutually exclusive, their probabilities need\n * not be. All that is required is that each row of labels is a valid\n * probability distribution. If they are not, the computation of the gradient\n * will be incorrect.\n *\n * `WARNING`: This op expects unscaled logits, since it performs a softmax on\n * logits internally for efficiency. Do not call this op with the output of\n * softmax, as it will produce incorrect results.\n *\n * logits and labels must have the same shape, e.g. [batch_size, num_classes]\n * and the same dtype.\n * @param labels The labels array.\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n */\nfunction softmaxCrossEntropyWithLogits_(labels, logits, dim = -1) {\n if (dim === -1) {\n dim = logits.rank - 1;\n }\n if (dim !== logits.rank - 1) {\n throw Error(`Softmax cross entropy along a non-last dimension is not yet ` +\n `supported. Labels / logits was rank ${logits.rank} ` +\n `and dim was ${dim}`);\n }\n // Use a custom gradient for numerical stability.\n const customOp = customGrad((labels, logits, save) => {\n // Reference:\n // 1. http://cs231n.github.io/linear-classify/#softmax\n // 2. https://blog.feedly.com/tricks-of-the-trade-logsumexp/\n const keepDims = true;\n const lse = logSumExp(logits, [dim], keepDims);\n const logResult = sub(cast(logits, 'float32'), lse);\n save([labels, logResult]);\n const costVector = neg(mul(logResult, labels));\n const value = sum(costVector, [dim]);\n const gradFunc = (dy, saved) => {\n const [labels, logResult] = saved;\n const dyShape = expandShapeToKeepDim(dy.shape, [dim]);\n return [\n mul(reshape(dy, dyShape), sub(cast(labels, 'float32'), exp(logResult))),\n mul(reshape(dy, dyShape), sub(exp(logResult), cast(labels, 'float32'))),\n ];\n };\n return { value, gradFunc };\n });\n return customOp(labels, logits);\n}\n/**\n * Computes the softmax cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newOnehotLabels = onehotLabels * (1 - labelSmoothing)\n * + labelSmoothing / numClasses\n *\n * @param onehotLabels One hot encoded labels\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or 1, and must be\n * broadcastable to `loss` of shape [batch_size]\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction softmaxCrossEntropy_(onehotLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $onehotLabels = convertToTensor(onehotLabels, 'onehotLabels', 'softmaxCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'softmaxCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'softmaxCrossEntropy');\n }\n assertShapesMatch($onehotLabels.shape, $logits.shape, 'Error in softmaxCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const numClasses = scalar($onehotLabels.shape[1]);\n $onehotLabels =\n add(mul($onehotLabels, sub(one, labelSmoothingScalar)), div(labelSmoothingScalar, numClasses));\n }\n const losses = softmaxCrossEntropyWithLogits_($onehotLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const softmaxCrossEntropy = op({ softmaxCrossEntropy_ });\n//# sourceMappingURL=softmax_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Modularized ops.\nexport { abs } from './abs';\nexport { acos } from './acos';\nexport { acosh } from './acosh';\nexport { add } from './add';\nexport { addN } from './add_n';\nexport { all } from './all';\nexport { any } from './any';\nexport { argMax } from './arg_max';\nexport { argMin } from './arg_min';\nexport { asin } from './asin';\nexport { asinh } from './asinh';\nexport { atan } from './atan';\nexport { atan2 } from './atan2';\nexport { atanh } from './atanh';\nexport { avgPool } from './avg_pool';\nexport { avgPool3d } from './avg_pool_3d';\nexport { basicLSTMCell } from './basic_lstm_cell';\nexport { batchToSpaceND } from './batch_to_space_nd';\nexport { batchNorm } from './batchnorm';\nexport { batchNorm2d } from './batchnorm2d';\nexport { batchNorm3d } from './batchnorm3d';\nexport { batchNorm4d } from './batchnorm4d';\nexport { broadcastTo } from './broadcast_to';\nexport { buffer } from './buffer';\nexport { cast } from './cast';\nexport { ceil } from './ceil';\nexport { clipByValue } from './clip_by_value';\nexport { clone } from './clone';\nexport { complex } from './complex';\nexport { concat } from './concat';\nexport { concat1d } from './concat_1d';\nexport { concat2d } from './concat_2d';\nexport { concat3d } from './concat_3d';\nexport { concat4d } from './concat_4d';\nexport { conv1d } from './conv1d';\nexport { conv2d } from './conv2d';\nexport { conv2dTranspose } from './conv2d_transpose';\nexport { conv3d } from './conv3d';\nexport { conv3dTranspose } from './conv3d_transpose';\nexport { cos } from './cos';\nexport { cosh } from './cosh';\nexport { cumsum } from './cumsum';\nexport { depthToSpace } from './depth_to_space';\nexport { depthwiseConv2d } from './depthwise_conv2d';\nexport { diag } from './diag';\nexport { dilation2d } from './dilation2d';\nexport { div } from './div';\nexport { divNoNan } from './div_no_nan';\nexport { dot } from './dot';\nexport { elu } from './elu';\nexport { equal } from './equal';\nexport { erf } from './erf';\nexport { exp } from './exp';\nexport { expandDims } from './expand_dims';\nexport { expm1 } from './expm1';\nexport { eye } from './eye';\nexport { fill } from './fill';\nexport { floor } from './floor';\nexport { floorDiv } from './floorDiv';\nexport { gather } from './gather';\nexport { greater } from './greater';\nexport { greaterEqual } from './greater_equal';\nexport { imag } from './imag';\nexport { isFinite } from './is_finite';\nexport { isInf } from './is_inf';\nexport { isNaN } from './is_nan';\nexport { leakyRelu } from './leaky_relu';\nexport { less } from './less';\nexport { lessEqual } from './less_equal';\nexport { linspace } from './linspace';\nexport { localResponseNormalization } from './local_response_normalization';\nexport { log } from './log';\nexport { log1p } from './log1p';\nexport { logSigmoid } from './log_sigmoid';\nexport { logSoftmax } from './log_softmax';\nexport { logSumExp } from './log_sum_exp';\nexport { logicalAnd } from './logical_and';\nexport { logicalNot } from './logical_not';\nexport { logicalOr } from './logical_or';\nexport { logicalXor } from './logical_xor';\nexport { matMul } from './mat_mul';\nexport { max } from './max';\nexport { maxPool } from './max_pool';\nexport { maxPool3d } from './max_pool_3d';\nexport { maxPoolWithArgmax } from './max_pool_with_argmax';\nexport { maximum } from './maximum';\nexport { mean } from './mean';\nexport { min } from './min';\nexport { minimum } from './minimum';\nexport { mirrorPad } from './mirror_pad';\nexport { mod } from './mod';\nexport { moments } from './moments';\nexport { mul } from './mul';\nexport { multiRNNCell } from './multi_rnn_cell';\nexport { multinomial } from './multinomial';\nexport { neg } from './neg';\nexport { notEqual } from './not_equal';\nexport { oneHot } from './one_hot';\nexport { ones } from './ones';\nexport { onesLike } from './ones_like';\nexport { outerProduct } from './outer_product';\nexport { pad } from './pad';\nexport { pad1d } from './pad1d';\nexport { pad2d } from './pad2d';\nexport { pad3d } from './pad3d';\nexport { pad4d } from './pad4d';\nexport { pool } from './pool';\nexport { pow } from './pow';\nexport { prelu } from './prelu';\nexport { print } from './print';\nexport { prod } from './prod';\nexport { rand } from './rand';\nexport { randomGamma } from './random_gamma';\nexport { randomNormal } from './random_normal';\nexport { randomUniform } from './random_uniform';\nexport { range } from './range';\nexport { real } from './real';\nexport { reciprocal } from './reciprocal';\nexport { relu } from './relu';\nexport { relu6 } from './relu6';\nexport { reshape } from './reshape';\nexport { reverse } from './reverse';\nexport { reverse1d } from './reverse_1d';\nexport { reverse2d } from './reverse_2d';\nexport { reverse3d } from './reverse_3d';\nexport { reverse4d } from './reverse_4d';\nexport { round } from './round';\nexport { rsqrt } from './rsqrt';\nexport { scalar } from './scalar';\nexport { selu } from './selu';\nexport { separableConv2d } from './separable_conv2d';\nexport { setdiff1dAsync } from './setdiff1d_async';\nexport { sigmoid } from './sigmoid';\nexport { sign } from './sign';\nexport { sin } from './sin';\nexport { sinh } from './sinh';\nexport { slice } from './slice';\nexport { slice1d } from './slice1d';\nexport { slice2d } from './slice2d';\nexport { slice3d } from './slice3d';\nexport { slice4d } from './slice4d';\nexport { softmax } from './softmax';\nexport { softplus } from './softplus';\nexport { spaceToBatchND } from './space_to_batch_nd';\nexport { fft } from './spectral/fft';\nexport { ifft } from './spectral/ifft';\nexport { irfft } from './spectral/irfft';\nexport { rfft } from './spectral/rfft';\nexport { split } from './split';\nexport { sqrt } from './sqrt';\nexport { square } from './square';\nexport { squaredDifference } from './squared_difference';\nexport { squeeze } from './squeeze';\nexport { stack } from './stack';\nexport { step } from './step';\nexport { stridedSlice } from './strided_slice';\nexport { sub } from './sub';\nexport { sum } from './sum';\nexport { tan } from './tan';\nexport { tanh } from './tanh';\nexport { tensor } from './tensor';\nexport { tensor1d } from './tensor1d';\nexport { tensor2d } from './tensor2d';\nexport { tensor3d } from './tensor3d';\nexport { tensor4d } from './tensor4d';\nexport { tensor5d } from './tensor5d';\nexport { tensor6d } from './tensor6d';\nexport { tile } from './tile';\nexport { topk } from './topk';\nexport { truncatedNormal } from './truncated_normal';\nexport { unique } from './unique';\nexport { unsortedSegmentSum } from './unsorted_segment_sum';\nexport { unstack } from './unstack';\nexport { variable } from './variable';\nexport { where } from './where';\nexport { whereAsync } from './where_async';\nexport { zeros } from './zeros';\nexport { zerosLike } from './zeros_like';\nexport * from './boolean_mask';\nexport * from './compare';\nexport * from './binary_ops';\nexport * from './transpose';\nexport * from './norm';\nexport * from './moving_average';\nexport * from './scatter_nd';\nexport * from './sparse_to_dense';\nexport * from './gather_nd';\nexport * from './dropout';\nexport * from './signal_ops_util';\nexport * from './in_top_k';\nexport { op, OP_SCOPE_SUFFIX } from './operation';\nimport { rfft } from './spectral/rfft';\nimport { fft } from './spectral/fft';\nimport { ifft } from './spectral/ifft';\nimport { irfft } from './spectral/irfft';\nconst spectral = {\n fft,\n ifft,\n rfft,\n irfft\n};\nimport * as fused from './fused_ops';\nimport { hammingWindow } from './signal/hamming_window';\nimport { hannWindow } from './signal/hann_window';\nimport { frame } from './signal/frame';\nimport { stft } from './signal/stft';\nconst signal = {\n hammingWindow,\n hannWindow,\n frame,\n stft,\n};\n// Image Ops namespace\nimport { cropAndResize } from './image/crop_and_resize';\nimport { flipLeftRight } from './image/flip_left_right';\nimport { rotateWithOffset } from './image/rotate_with_offset';\nimport { nonMaxSuppression } from './image/non_max_suppression';\nimport { nonMaxSuppressionAsync } from './image/non_max_suppression_async';\nimport { nonMaxSuppressionWithScore } from './image/non_max_suppression_with_score';\nimport { nonMaxSuppressionWithScoreAsync } from './image/non_max_suppression_with_score_async';\nimport { nonMaxSuppressionPadded } from './image/non_max_suppression_padded';\nimport { nonMaxSuppressionPaddedAsync } from './image/non_max_suppression_padded_async';\nimport { resizeBilinear } from './image/resize_bilinear';\nimport { resizeNearestNeighbor } from './image/resize_nearest_neighbor';\nconst image = {\n flipLeftRight,\n resizeNearestNeighbor,\n resizeBilinear,\n rotateWithOffset,\n cropAndResize,\n nonMaxSuppression,\n nonMaxSuppressionAsync,\n nonMaxSuppressionWithScore,\n nonMaxSuppressionWithScoreAsync,\n nonMaxSuppressionPadded,\n nonMaxSuppressionPaddedAsync\n};\n// linalg namespace\nimport { bandPart } from './linalg/band_part';\nimport { gramSchmidt } from './linalg/gram_schmidt';\nimport { qr } from './linalg/qr';\nconst linalg = {\n bandPart,\n gramSchmidt,\n qr\n};\n// losses namespace;\nimport { absoluteDifference } from './losses/absolute_difference';\nimport { computeWeightedLoss } from './losses/compute_weighted_loss';\nimport { cosineDistance } from './losses/cosine_distance';\nimport { hingeLoss } from './losses/hinge_loss';\nimport { huberLoss } from './losses/huber_loss';\nimport { logLoss } from './losses/log_loss';\nimport { meanSquaredError } from './losses/mean_squared_error';\nimport { sigmoidCrossEntropy } from './losses/sigmoid_cross_entropy';\nimport { softmaxCrossEntropy } from './losses/softmax_cross_entropy';\nconst losses = {\n absoluteDifference,\n computeWeightedLoss,\n cosineDistance,\n hingeLoss,\n huberLoss,\n logLoss,\n meanSquaredError,\n sigmoidCrossEntropy,\n softmaxCrossEntropy\n};\n// Second level exports.\nexport { image, linalg, losses, spectral, fused, signal };\n//# sourceMappingURL=ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dispose } from '../globals';\nimport { variableGrads } from '../gradients';\nimport { scalar } from '../ops/ops';\nimport { Serializable } from '../serialization';\n/** @doc {heading: 'Training', subheading: 'Classes', namespace: 'train'} */\nexport class Optimizer extends Serializable {\n /**\n * Executes `f()` and minimizes the scalar output of `f()` by computing\n * gradients of y with respect to the list of trainable variables provided by\n * `varList`. If no list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to minimize.\n * @param returnCost Whether to return the scalar cost value produced by\n * executing `f()`.\n * @param varList An optional list of variables to update. If specified, only\n * the trainable variables in varList will be updated by minimize. Defaults to\n * all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n minimize(f, returnCost = false, varList) {\n const { value, grads } = this.computeGradients(f, varList);\n if (varList != null) {\n const gradArray = varList.map(v => ({ name: v.name, tensor: grads[v.name] }));\n this.applyGradients(gradArray);\n }\n else {\n this.applyGradients(grads);\n }\n // Dispose gradients.\n dispose(grads);\n if (returnCost) {\n return value;\n }\n else {\n value.dispose();\n return null;\n }\n }\n /**\n * The number of iterations that this optimizer instance has been invoked for.\n */\n get iterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return this.iterations_;\n }\n incrementIterations() {\n this.iterations_ = this.iterations + 1;\n }\n /**\n * Executes f() and computes the gradient of the scalar output of f() with\n * respect to the list of trainable variables provided by `varList`. If no\n * list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to use for computing\n * gradients with respect to variables.\n * @param varList An optional list of variables to compute gradients with\n * respect to. If specified, only the trainable variables in varList will have\n * gradients computed with respect to. Defaults to all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n computeGradients(f, varList) {\n return variableGrads(f, varList);\n }\n /**\n * Dispose the variables (if any) owned by this optimizer instance.\n */\n dispose() {\n if (this.iterations_ != null) {\n dispose(this.iterations_);\n }\n }\n async saveIterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return {\n name: 'iter',\n // TODO(cais): Use 'int64' type when available.\n tensor: scalar(this.iterations_, 'int32')\n };\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for this optimizer yet.');\n }\n async setWeights(weightValues) {\n throw new Error(`setWeights() is not implemented for this optimizer class ` +\n `${this.getClassName()}`);\n }\n /**\n * Extract the first element of the weight values and set it\n * as the iterations counter variable of this instance of optimizer.\n *\n * @param weightValues\n * @returns Weight values with the first element consumed and excluded.\n */\n async extractIterations(weightValues) {\n this.iterations_ = (await weightValues[0].tensor.data())[0];\n return weightValues.slice(1);\n }\n}\nObject.defineProperty(Optimizer, Symbol.hasInstance, {\n value: (instance) => {\n return instance.minimize != null && instance.computeGradients != null &&\n instance.applyGradients != null;\n }\n});\n//# sourceMappingURL=optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/ops';\nimport { square } from '../ops/square';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdadeltaOptimizer extends Optimizer {\n constructor(learningRate, rho, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.rho = rho;\n this.epsilon = epsilon;\n this.accumulatedGrads = [];\n this.accumulatedUpdates = [];\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedGrads[i] == null) {\n this.accumulatedGrads[i] = {\n originalName: `${name}/accum_grad`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedUpdates[i] == null) {\n this.accumulatedUpdates[i] = {\n originalName: `${name}/accum_var`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n const accumulatedUpdate = this.accumulatedUpdates[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(mul(accumulatedGrad, this.rho), mul(square(gradient), 1 - this.rho));\n const updates = mul(div(sqrt(add(accumulatedUpdate, this.epsilon)), sqrt(add(accumulatedGrad, this.epsilon))), gradient);\n const newAccumulatedUpdate = add(mul(accumulatedUpdate, this.rho), mul(square(updates), 1 - this.rho));\n accumulatedGrad.assign(newAccumulatedGrad);\n accumulatedUpdate.assign(newAccumulatedUpdate);\n const newValue = add(mul(updates, -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedUpdates != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n dispose(this.accumulatedUpdates.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedGrads, ...this.accumulatedUpdates];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedGrads =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedUpdates =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'rho': this.rho,\n 'epsilon': this.epsilon\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['rho'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdadeltaOptimizer.className = 'Adadelta'; // Name matters for Python compatibility.\nregisterClass(AdadeltaOptimizer);\n//# sourceMappingURL=adadelta_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { fill } from '../ops/fill';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdagradOptimizer extends Optimizer {\n constructor(learningRate, initialAccumulatorValue = 0.1) {\n super();\n this.learningRate = learningRate;\n this.initialAccumulatorValue = initialAccumulatorValue;\n this.accumulatedGrads = [];\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulatedGrads[i] == null) {\n const trainable = false;\n this.accumulatedGrads[i] = {\n originalName: `${name}/accumulator`,\n variable: tidy(() => fill(value.shape, this.initialAccumulatorValue)\n .variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(accumulatedGrad, square(gradient));\n accumulatedGrad.assign(newAccumulatedGrad);\n const newValue = add(mul(div(gradient, sqrt(add(newAccumulatedGrad, ENGINE.backend.epsilon()))), -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedGrads != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulatedGrads.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulatedGrads = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'initialAccumulatorValue': this.initialAccumulatorValue,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['initialAccumulatorValue']);\n }\n}\n/** @nocollapse */\nAdagradOptimizer.className = 'Adagrad'; // Note: Name matters for Python compatibility.\nregisterClass(AdagradOptimizer);\n//# sourceMappingURL=adagrad_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.accumulatedFirstMoment = [];\n this.accumulatedSecondMoment = [];\n tidy(() => {\n // accB* will be updated by batch.\n this.accBeta1 = scalar(beta1).variable();\n this.accBeta2 = scalar(beta2).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const oneMinusAccBeta2 = sub(1, this.accBeta2);\n varNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedSecondMoment[i] == null) {\n this.accumulatedSecondMoment[i] = {\n originalName: `${name}/v`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const secondMoment = this.accumulatedSecondMoment[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const newSecondMoment = add(mul(secondMoment, this.beta2), mul(square(gradient), 1 - this.beta2));\n const biasCorrectedFirstMoment = div(newFirstMoment, oneMinusAccBeta1);\n const biasCorrectedSecondMoment = div(newSecondMoment, oneMinusAccBeta2);\n firstMoment.assign(newFirstMoment);\n secondMoment.assign(newSecondMoment);\n const newValue = add(mul(div(biasCorrectedFirstMoment, add(sqrt(biasCorrectedSecondMoment), this.epsilon)), -this.learningRate), value);\n value.assign(newValue);\n });\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n this.accBeta2.assign(mul(this.accBeta2, this.beta2));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.accBeta2.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedSecondMoment != null) {\n dispose(this.accumulatedSecondMoment.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedFirstMoment, ...this.accumulatedSecondMoment];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n tidy(() => {\n this.accBeta1.assign(pow(this.beta1, this.iterations_ + 1));\n this.accBeta2.assign(pow(this.beta2, this.iterations_ + 1));\n });\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedFirstMoment =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedSecondMoment =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdamOptimizer.className = 'Adam'; // Note: Name matters for Python compatibility.\nregisterClass(AdamOptimizer);\n//# sourceMappingURL=adam_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { abs } from '../ops/abs';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { maximum } from '../ops/maximum';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamaxOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null, decay = 0.0) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.decay = decay;\n this.accumulatedFirstMoment = [];\n this.accumulatedWeightedInfNorm = [];\n tidy(() => {\n this.iteration = scalar(0).variable();\n this.accBeta1 = scalar(beta1).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const lr = div(-this.learningRate, add(mul(this.iteration, this.decay), 1));\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n if (this.accumulatedWeightedInfNorm[i] == null) {\n this.accumulatedWeightedInfNorm[i] = {\n originalName: `${name}/v`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const weightedInfNorm = this.accumulatedWeightedInfNorm[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const ut0 = mul(weightedInfNorm, this.beta2);\n const ut1 = abs(gradient);\n const newWeightedInfNorm = maximum(ut0, ut1);\n firstMoment.assign(newFirstMoment);\n weightedInfNorm.assign(newWeightedInfNorm);\n const newValue = add(mul(div(lr, oneMinusAccBeta1), div(newFirstMoment, add(newWeightedInfNorm, this.epsilon))), value);\n value.assign(newValue);\n });\n this.iteration.assign(add(this.iteration, 1));\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.iteration.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedWeightedInfNorm != null) {\n dispose(this.accumulatedWeightedInfNorm.map(v => v.variable));\n }\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for Adamax yet.');\n }\n async setWeights(weightValues) {\n throw new Error('setWeights() is not implemented for Adamax yet.');\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n 'decay': this.decay\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon'], config['decay']);\n }\n}\n/** @nocollapse */\nAdamaxOptimizer.className = 'Adamax'; // Note: Name matters for Python compatbility.\nregisterClass(AdamaxOptimizer);\n//# sourceMappingURL=adamax_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { keep, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class SGDOptimizer extends Optimizer {\n constructor(learningRate) {\n super();\n this.learningRate = learningRate;\n this.setLearningRate(learningRate);\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n varNames.forEach((name, i) => {\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const value = ENGINE.registeredVariables[name];\n tidy(() => {\n const newValue = add(mul(this.c, gradient), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n /**\n * Sets the learning rate of the optimizer.\n */\n setLearningRate(learningRate) {\n this.learningRate = learningRate;\n if (this.c != null) {\n this.c.dispose();\n }\n this.c = keep(scalar(-learningRate));\n }\n dispose() {\n this.c.dispose();\n }\n async getWeights() {\n return [await this.saveIterations()];\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n if (weightValues.length !== 0) {\n throw new Error('SGD optimizer does not have settable weights.');\n }\n }\n getConfig() {\n return { 'learningRate': this.learningRate };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate']);\n }\n}\n/** @nocollapse */\nSGDOptimizer.className = 'SGD'; // Note: Name matters for Python compatibility.\nregisterClass(SGDOptimizer);\n//# sourceMappingURL=sgd_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { SGDOptimizer } from './sgd_optimizer';\n/** @doclink Optimizer */\nexport class MomentumOptimizer extends SGDOptimizer {\n constructor(learningRate, momentum, useNesterov = false) {\n super(learningRate);\n this.learningRate = learningRate;\n this.momentum = momentum;\n this.useNesterov = useNesterov;\n this.accumulations = [];\n this.m = scalar(this.momentum);\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulations[i] == null) {\n const trainable = false;\n this.accumulations[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const accumulation = this.accumulations[i].variable;\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n tidy(() => {\n let newValue;\n const newAccumulation = add(mul(this.m, accumulation), gradient);\n if (this.useNesterov) {\n newValue = add(mul(this.c, add(gradient, mul(newAccumulation, this.m))), value);\n }\n else {\n newValue = add(mul(this.c, newAccumulation), value);\n }\n accumulation.assign(newAccumulation);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n this.m.dispose();\n if (this.accumulations != null) {\n dispose(this.accumulations.map(v => v.variable));\n }\n }\n /**\n * Sets the momentum of the optimizer.\n *\n * @param momentum\n */\n setMomentum(momentum) {\n this.momentum = momentum;\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulations.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulations = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'momentum': this.momentum,\n 'useNesterov': this.useNesterov\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['momentum'], config['useNesterov']);\n }\n}\n/** @nocollapse */\nMomentumOptimizer.className = 'Momentum'; // Name matters for Python compatibility.\nregisterClass(MomentumOptimizer);\n//# sourceMappingURL=momentum_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class RMSPropOptimizer extends Optimizer {\n constructor(learningRate, decay = 0.9, momentum = 0.0, epsilon = null, centered = false) {\n super();\n this.learningRate = learningRate;\n this.decay = decay;\n this.momentum = momentum;\n this.epsilon = epsilon;\n this.accumulatedMeanSquares = [];\n this.accumulatedMoments = [];\n this.accumulatedMeanGrads = [];\n this.centered = centered;\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n if (learningRate == null) {\n throw new Error(`learningRate for RMSPropOptimizer must be defined.`);\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedMeanSquares[i] == null) {\n this.accumulatedMeanSquares[i] = {\n originalName: `${name}/rms`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMoments[i] == null) {\n this.accumulatedMoments[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMeanGrads[i] == null && this.centered) {\n this.accumulatedMeanGrads[i] = {\n originalName: `${name}/mg`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedMeanSquare = this.accumulatedMeanSquares[i].variable;\n const accumulatedMoments = this.accumulatedMoments[i].variable;\n tidy(() => {\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n if (this.centered) {\n const accumulatedMeanGrad = this.accumulatedMeanGrads[i].variable;\n // Centered gradient\n const newAccumulatedMeanGrad = add(mul(accumulatedMeanGrad, this.decay), mul(gradient, 1 - this.decay));\n const gradContribution = div(mul(gradient, this.learningRate), sqrt(sub(newAccumulatedMeanSquare, add(square(newAccumulatedMeanGrad), this.epsilon))));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), gradContribution);\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMeanGrad.assign(newAccumulatedMeanGrad);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n else {\n // Plain gradient\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), div(mul(gradient, this.learningRate), sqrt(add(newAccumulatedMeanSquare, this.epsilon))));\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedMeanSquares != null) {\n dispose(this.accumulatedMeanSquares.map(v => v.variable));\n }\n if (this.accumulatedMeanGrads != null && this.centered) {\n dispose(this.accumulatedMeanGrads.map(v => v.variable));\n }\n if (this.accumulatedMoments != null) {\n dispose(this.accumulatedMoments.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedMeanSquares, ...this.accumulatedMoments];\n if (this.centered) {\n variables.push(...this.accumulatedMeanGrads);\n }\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = this.centered ? weightValues.length / 3 : weightValues.length / 2;\n const trainable = false;\n this.accumulatedMeanSquares =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedMoments =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n if (this.centered) {\n this.accumulatedMeanGrads =\n weightValues.slice(variableCount * 2, variableCount * 3)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'decay': this.decay,\n 'momentum': this.momentum,\n 'epsilon': this.epsilon,\n 'centered': this.centered\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['decay'], config['momentum'], config['epsilon'], config['centered']);\n }\n}\n/** @nocollapse */\nRMSPropOptimizer.className = 'RMSProp'; // Note: Name matters for Python compatibility.\nregisterClass(RMSPropOptimizer);\n//# sourceMappingURL=rmsprop_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AdadeltaOptimizer } from './adadelta_optimizer';\nimport { AdagradOptimizer } from './adagrad_optimizer';\nimport { AdamOptimizer } from './adam_optimizer';\nimport { AdamaxOptimizer } from './adamax_optimizer';\nimport { MomentumOptimizer } from './momentum_optimizer';\nimport { RMSPropOptimizer } from './rmsprop_optimizer';\nimport { SGDOptimizer } from './sgd_optimizer';\nexport class OptimizerConstructors {\n /**\n * Constructs a `tf.SGDOptimizer` that uses stochastic gradient descent.\n *\n * ```js\n * // Fit a quadratic function by learning the coefficients a, b, c.\n * const xs = tf.tensor1d([0, 1, 2, 3]);\n * const ys = tf.tensor1d([1.1, 5.9, 16.8, 33.9]);\n *\n * const a = tf.scalar(Math.random()).variable();\n * const b = tf.scalar(Math.random()).variable();\n * const c = tf.scalar(Math.random()).variable();\n *\n * // y = a * x^2 + b * x + c.\n * const f = x => a.mul(x.square()).add(b.mul(x)).add(c);\n * const loss = (pred, label) => pred.sub(label).square().mean();\n *\n * const learningRate = 0.01;\n * const optimizer = tf.train.sgd(learningRate);\n *\n * // Train the model.\n * for (let i = 0; i < 10; i++) {\n * optimizer.minimize(() => loss(f(xs), ys));\n * }\n *\n * // Make predictions.\n * console.log(\n * `a: ${a.dataSync()}, b: ${b.dataSync()}, c: ${c.dataSync()}`);\n * const preds = f(xs).dataSync();\n * preds.forEach((pred, i) => {\n * console.log(`x: ${i}, pred: ${pred}`);\n * });\n * ```\n *\n * @param learningRate The learning rate to use for the SGD algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static sgd(learningRate) {\n return new SGDOptimizer(learningRate);\n }\n /**\n * Constructs a `tf.MomentumOptimizer` that uses momentum gradient\n * descent.\n *\n * See\n * [http://proceedings.mlr.press/v28/sutskever13.pdf](\n * http://proceedings.mlr.press/v28/sutskever13.pdf)\n *\n * @param learningRate The learning rate to use for the Momentum gradient\n * descent algorithm.\n * @param momentum The momentum to use for the momentum gradient descent\n * algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static momentum(learningRate, momentum, useNesterov = false) {\n return new MomentumOptimizer(learningRate, momentum, useNesterov);\n }\n /**\n * Constructs a `tf.RMSPropOptimizer` that uses RMSProp gradient\n * descent. This implementation uses plain momentum and is not centered\n * version of RMSProp.\n *\n * See\n * [http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf](\n * http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)\n *\n * @param learningRate The learning rate to use for the RMSProp gradient\n * descent algorithm.\n * @param decay The discounting factor for the history/coming gradient.\n * @param momentum The momentum to use for the RMSProp gradient descent\n * algorithm.\n * @param epsilon Small value to avoid zero denominator.\n * @param centered If true, gradients are normalized by the estimated\n * variance of the gradient.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static rmsprop(learningRate, decay = .9, momentum = 0.0, epsilon = null, centered = false) {\n return new RMSPropOptimizer(learningRate, decay, momentum, epsilon, centered);\n }\n /**\n * Constructs a `tf.AdamOptimizer` that uses the Adam algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adam gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adam(learningRate = 0.001, beta1 = 0.9, beta2 = 0.999, epsilon = null) {\n return new AdamOptimizer(learningRate, beta1, beta2, epsilon);\n }\n /**\n * Constructs a `tf.AdadeltaOptimizer` that uses the Adadelta algorithm.\n * See [https://arxiv.org/abs/1212.5701](https://arxiv.org/abs/1212.5701)\n *\n * @param learningRate The learning rate to use for the Adadelta gradient\n * descent algorithm.\n * @param rho The learning rate decay over each update.\n * @param epsilon A constant epsilon used to better condition the grad\n * update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adadelta(learningRate = .001, rho = .95, epsilon = null) {\n return new AdadeltaOptimizer(learningRate, rho, epsilon);\n }\n /**\n * Constructs a `tf.AdamaxOptimizer` that uses the Adamax algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adamax gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n * @param decay The learning rate decay over each update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adamax(learningRate = 0.002, beta1 = 0.9, beta2 = 0.999, epsilon = null, decay = 0.0) {\n return new AdamaxOptimizer(learningRate, beta1, beta2, epsilon, decay);\n }\n /**\n * Constructs a `tf.AdagradOptimizer` that uses the Adagrad algorithm.\n * See\n * [http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf](\n * http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)\n * or\n * [http://ruder.io/optimizing-gradient-descent/index.html#adagrad](\n * http://ruder.io/optimizing-gradient-descent/index.html#adagrad)\n *\n * @param learningRate The learning rate to use for the Adagrad gradient\n * descent algorithm.\n * @param initialAccumulatorValue Starting value for the accumulators, must be\n * positive.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adagrad(learningRate, initialAccumulatorValue = 0.1) {\n return new AdagradOptimizer(learningRate, initialAccumulatorValue);\n }\n}\n//# sourceMappingURL=optimizer_constructors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// So typings can propagate.\nimport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nimport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nimport { AdamOptimizer } from './optimizers/adam_optimizer';\nimport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nimport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nimport { OptimizerConstructors } from './optimizers/optimizer_constructors';\nimport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nimport { SGDOptimizer } from './optimizers/sgd_optimizer';\n// tslint:disable-next-line:no-unused-expression\n[MomentumOptimizer, SGDOptimizer, AdadeltaOptimizer, AdagradOptimizer,\n RMSPropOptimizer, AdamaxOptimizer, AdamOptimizer];\nexport const train = {\n sgd: OptimizerConstructors.sgd,\n momentum: OptimizerConstructors.momentum,\n adadelta: OptimizerConstructors.adadelta,\n adagrad: OptimizerConstructors.adagrad,\n rmsprop: OptimizerConstructors.rmsprop,\n adamax: OptimizerConstructors.adamax,\n adam: OptimizerConstructors.adam\n};\n//# sourceMappingURL=train.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst delayCallback = (() => {\n if (typeof requestAnimationFrame !== 'undefined') {\n return requestAnimationFrame;\n }\n else if (typeof setImmediate !== 'undefined') {\n return setImmediate;\n }\n return (f) => f(); // no delays\n})();\n/**\n * Returns a promise that resolve when a requestAnimationFrame has completed.\n *\n * On Node.js this uses setImmediate instead of requestAnimationFrame.\n *\n * This is simply a sugar method so that users can do the following:\n * `await tf.nextFrame();`\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nfunction nextFrame() {\n return new Promise(resolve => delayCallback(() => resolve()));\n}\nexport { nextFrame };\n//# sourceMappingURL=browser_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Returns the image center in pixels.\nexport function getImageCenter(center, imageHeight, imageWidth) {\n const centerX = imageWidth * (typeof center === 'number' ? center : center[0]);\n const centerY = imageHeight * (typeof center === 'number' ? center : center[1]);\n return [centerX, centerY];\n}\n//# sourceMappingURL=rotate_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Gets the new shape of the input Tensor after it's been reshaped\n * to:\n * [blockShape[0], ..., blockShape[M-1], batch / prod(blockShape),\n * inputShape[1], ..., inputShape[N-1]]\n *\n * See step 1: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshaped(inputShape, blockShape, prod, batchToSpace = true) {\n let reshaped = [];\n if (batchToSpace) {\n reshaped = reshaped.concat(blockShape.slice(0));\n reshaped.push(inputShape[0] / prod);\n reshaped = reshaped.concat(inputShape.slice(1));\n }\n else {\n reshaped = reshaped.concat(inputShape[0]);\n const spatialLength = blockShape.length;\n for (let i = 0; i < spatialLength; ++i) {\n reshaped =\n reshaped.concat([inputShape[i + 1] / blockShape[i], blockShape[i]]);\n }\n reshaped = reshaped.concat(inputShape.slice(spatialLength + 1));\n }\n return reshaped;\n}\n/**\n * Gets the permutation that will transpose the dimensions of the\n * reshaped tensor to shape:\n *\n * [batch / prod(block_shape),inputShape[1], blockShape[0], ...,\n * inputShape[M], blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * see step 2: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getPermuted(reshapedRank, blockShapeRank, batchToSpace = true) {\n const permuted = [];\n if (batchToSpace) {\n permuted.push(blockShapeRank);\n for (let i = blockShapeRank + 1; i < reshapedRank; ++i) {\n if (i <= 2 * blockShapeRank) {\n permuted.push(i);\n permuted.push(i - (blockShapeRank + 1));\n }\n else {\n permuted.push(i);\n }\n }\n }\n else {\n const permutedBeforeBatch = [];\n const permutedAfterBatch = [];\n for (let i = 1; i < reshapedRank; ++i) {\n if (i >= blockShapeRank * 2 + 1 || i % 2 === 1) {\n permutedAfterBatch.push(i);\n }\n else {\n permutedBeforeBatch.push(i);\n }\n }\n permuted.push(...permutedBeforeBatch);\n permuted.push(0);\n permuted.push(...permutedAfterBatch);\n }\n return permuted;\n}\n/**\n * Gets the shape of the reshaped and permuted input Tensor before any cropping\n * is applied. The new shape will be:\n *\n * [batch / prod(blockShape),inputShape[1] * blockShape[0], ...,\n * inputShape[M] * blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 3: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshapedPermuted(inputShape, blockShape, prod, batchToSpace = true) {\n const reshapedPermuted = [];\n if (batchToSpace) {\n reshapedPermuted.push(inputShape[0] / prod);\n }\n else {\n reshapedPermuted.push(inputShape[0] * prod);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n if (i <= blockShape.length) {\n if (batchToSpace) {\n reshapedPermuted.push(blockShape[i - 1] * inputShape[i]);\n }\n else {\n reshapedPermuted.push(inputShape[i] / blockShape[i - 1]);\n }\n }\n else {\n reshapedPermuted.push(inputShape[i]);\n }\n }\n return reshapedPermuted;\n}\n/**\n * Converts the crops argument into the beginning coordinates of a slice\n * operation.\n */\nexport function getSliceBeginCoords(crops, blockShape) {\n const sliceBeginCoords = [0];\n for (let i = 0; i < blockShape; ++i) {\n sliceBeginCoords.push(crops[i][0]);\n }\n return sliceBeginCoords;\n}\n/**\n * Converts the crops argument into the size of a slice operation. When\n * combined with getSliceBeginCoords this function allows the reshaped and\n * permuted Tensor to be cropped to its final output shape of:\n *\n * inputShape[1] * blockShape[0] - crops[0,0] - crops[0,1], ...,\n * inputShape[M] * blockShape[M-1] -crops[M-1,0] -\n * crops[M-1,1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 4: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getSliceSize(uncroppedShape, crops, blockShape) {\n const sliceSize = uncroppedShape.slice(0, 1);\n for (let i = 0; i < blockShape; ++i) {\n sliceSize.push(uncroppedShape[i + 1] - crops[i][0] - crops[i][1]);\n }\n return sliceSize;\n}\n//# sourceMappingURL=array_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const SELU_SCALEALPHA = 1.7580993408473768599402175208123;\nexport const SELU_SCALE = 1.0507009873554804934193349852946;\n//# sourceMappingURL=selu_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const ERF_P = 0.3275911;\nexport const ERF_A1 = 0.254829592;\nexport const ERF_A2 = -0.284496736;\nexport const ERF_A3 = 1.421413741;\nexport const ERF_A4 = -1.453152027;\nexport const ERF_A5 = 1.061405429;\n//# sourceMappingURL=erf_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nexport function warn(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.warn(...msg);\n }\n}\nexport function log(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.log(...msg);\n }\n}\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Merges real and imaginary Float32Arrays into a single complex Float32Array.\n *\n * The memory layout is interleaved as follows:\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n * complex: [r0, i0, r1, i1, r2, i2]\n *\n * This is the inverse of splitRealAndImagArrays.\n *\n * @param real The real values of the complex tensor values.\n * @param imag The imag values of the complex tensor values.\n * @returns A complex tensor as a Float32Array with merged values.\n */\nexport function mergeRealAndImagArrays(real, imag) {\n if (real.length !== imag.length) {\n throw new Error(`Cannot merge real and imag arrays of different lengths. real:` +\n `${real.length}, imag: ${imag.length}.`);\n }\n const result = new Float32Array(real.length * 2);\n for (let i = 0; i < result.length; i += 2) {\n result[i] = real[i / 2];\n result[i + 1] = imag[i / 2];\n }\n return result;\n}\n/**\n * Splits a complex Float32Array into real and imag parts.\n *\n * The memory layout is interleaved as follows:\n * complex: [r0, i0, r1, i1, r2, i2]\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n *\n * This is the inverse of mergeRealAndImagArrays.\n *\n * @param complex The complex tensor values.\n * @returns An object with real and imag Float32Array components of the complex\n * tensor.\n */\nexport function splitRealAndImagArrays(complex) {\n const real = new Float32Array(complex.length / 2);\n const imag = new Float32Array(complex.length / 2);\n for (let i = 0; i < complex.length; i += 2) {\n real[i / 2] = complex[i];\n imag[i / 2] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts even indexed complex values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithEvenIndex(complex) {\n const len = Math.ceil(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 0; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts odd indexed comple values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithOddIndex(complex) {\n const len = Math.floor(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 2; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Get the map representing a complex value in the given array.\n * @param complex The complex tensor values.\n * @param index An index of the target complex value.\n */\nexport function getComplexWithIndex(complex, index) {\n const real = complex[index * 2];\n const imag = complex[index * 2 + 1];\n return { real, imag };\n}\n/**\n * Insert a given complex value into the TypedArray.\n * @param data The array in which the complex value is inserted.\n * @param c The complex value to be inserted.\n * @param index An index of the target complex value.\n */\nexport function assignToTypedArray(data, real, imag, index) {\n data[index * 2] = real;\n data[index * 2 + 1] = imag;\n}\n/**\n * Make the list of exponent terms used by FFT.\n */\nexport function exponents(n, inverse) {\n const real = new Float32Array(n / 2);\n const imag = new Float32Array(n / 2);\n for (let i = 0; i < Math.ceil(n / 2); i++) {\n const x = (inverse ? 2 : -2) * Math.PI * (i / n);\n real[i] = Math.cos(x);\n imag[i] = Math.sin(x);\n }\n return { real, imag };\n}\n/**\n * Make the exponent term used by FFT.\n */\nexport function exponent(k, n, inverse) {\n const x = (inverse ? 2 : -2) * Math.PI * (k / n);\n const real = Math.cos(x);\n const imag = Math.sin(x);\n return { real, imag };\n}\n//# sourceMappingURL=complex_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { cast } from '../ops/cast';\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { zeros } from '../ops/zeros';\nimport { hasEncodingLoss, makeZerosTypedArray } from '../util';\n// Utilities needed by backend consumers of tf-core.\nexport * from '../ops/axis_util';\nexport * from '../ops/broadcast_util';\nexport * from '../ops/concat_util';\nexport * from '../ops/conv_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/reduce_util';\nimport * as slice_util from '../ops/slice_util';\nexport { slice_util };\nexport { upcastType } from '../types';\nexport * from '../ops/rotate_util';\nexport * from '../ops/array_ops_util';\nexport * from '../ops/gather_nd_util';\nexport * from '../ops/scatter_nd_util';\nexport * from '../ops/selu_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/erf_util';\nexport * from '../log';\nexport * from '../backends/complex_util';\nexport * from '../ops/split_util';\nimport * as segment_util from '../ops/segment_util';\nexport { segment_util };\nexport function castTensor(x, dtype, backend) {\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return x.clone();\n }\n const zerosTensor = zeros(x.shape);\n const floatX = cast(x, 'float32');\n const result = backend.complex(floatX, zerosTensor);\n zerosTensor.dispose();\n floatX.dispose();\n return result;\n }\n if (!hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n return ENGINE.makeTensorFromDataId(x.dataId, x.shape, dtype);\n }\n if (x.dtype === 'complex64') {\n const real = backend.real(x);\n const result = cast(real, dtype);\n real.dispose();\n return result;\n }\n if (dtype === 'int32') {\n return backend.int(x);\n }\n else if (dtype === 'bool') {\n const zero = scalar(0, x.dtype);\n const result = backend.notEqual(x, zero);\n zero.dispose();\n return result;\n }\n else {\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n }\n}\nexport function reshapeTensor(x, shape) {\n return ENGINE.makeTensorFromDataId(x.dataId, shape, x.dtype);\n}\nexport function linspaceImpl(start, stop, num) {\n const step = (stop - start) / (num - 1);\n const values = makeZerosTypedArray(num, 'float32');\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, 'float32');\n}\n//# sourceMappingURL=backend_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { slice } from '../ops/slice';\n// TODO(annxingyuan): Use this helper in WASM Split kernel once intermediate\n// kernels have been modularized in WebGL and CPU\n// https://github.com/tensorflow/tfjs/issues/2822.\n/** Shared implementation of the split kernel across WebGL and CPU. */\nexport function split(x, sizeSplits, axis) {\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n return sizeSplits.map(s => {\n const sliceSize = [...size];\n sliceSize[axis] = s;\n const sliceT = slice(x, begin, sliceSize);\n begin[axis] += s;\n return sliceT;\n });\n}\n//# sourceMappingURL=split_shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * An implementation of the tile kernel shared between webgl and cpu for string\n * tensors only.\n */\nimport { buffer } from '../ops/buffer';\nexport function tile(xBuf, reps) {\n const newShape = new Array(xBuf.rank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xBuf.shape[i] * reps[i];\n }\n const result = buffer(newShape, xBuf.dtype);\n for (let i = 0; i < result.values.length; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = new Array(xBuf.rank);\n for (let j = 0; j < originalLoc.length; j++) {\n originalLoc[j] = newLoc[j] % xBuf.shape[j];\n }\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n}\n//# sourceMappingURL=tile_impl.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the TopK kernel shared between webgl and cpu. */\nimport { tensor } from '../ops/tensor';\nimport { getTypedArrayFromDType } from '../util';\nexport function topkImpl(x, xShape, xDtype, k, sorted) {\n // Reshape into a 2d tensor [batch, lastDim] and compute topk along lastDim.\n const lastDim = xShape[xShape.length - 1];\n const [batch, size] = [x.length / lastDim, lastDim];\n const allTopKVals = getTypedArrayFromDType(xDtype, batch * k);\n const allTopKIndices = getTypedArrayFromDType('int32', batch * k);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = x.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n const outOffset = b * k;\n const topKVals = allTopKVals.subarray(outOffset, outOffset + k);\n const topKIndices = allTopKIndices.subarray(outOffset, outOffset + k);\n for (let i = 0; i < k; i++) {\n topKVals[i] = valAndInd[i].value;\n topKIndices[i] = valAndInd[i].index;\n }\n }\n // Reshape back to the original input shape, except that the last\n // dimension is k.\n const outputShape = xShape.slice();\n outputShape[outputShape.length - 1] = k;\n return [\n tensor(allTopKVals, outputShape, xDtype),\n tensor(allTopKIndices, outputShape, 'int32')\n ];\n}\n//# sourceMappingURL=topk_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { nonMaxSuppressionV3Impl, nonMaxSuppressionV4Impl, nonMaxSuppressionV5Impl } from './non_max_suppression_impl';\nexport { split } from './split_shared';\nexport { tile } from './tile_impl';\nexport { topkImpl } from './topk_impl';\nexport { whereImpl } from './where_impl';\n//# sourceMappingURL=kernel_impls.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is tfjs-core without auto registration of things like flags,\n// gradients, chained ops or the opHandler. See base_side_effects.ts for parts\n// tfjs core that are required side effects.\n/**\n * @fileoverview\n * @suppress {partialAlias} Optimization disabled due to passing the module\n * object into a function below:\n *\n * import * as ops from './ops/ops';\n * setOpHandler(ops);\n */\n// Serialization.\nimport * as io from './io/io';\nimport * as math from './math';\nimport * as browser from './ops/browser';\nimport * as gather_util from './ops/gather_nd_util';\nimport * as scatter_util from './ops/scatter_nd_util';\nimport * as slice_util from './ops/slice_util';\nimport * as serialization from './serialization';\nimport * as tensor_util from './tensor_util';\nimport * as test_util from './test_util';\nimport * as util from './util';\nimport { version } from './version';\n// Optimizers.\nexport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nexport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nexport { AdamOptimizer } from './optimizers/adam_optimizer';\nexport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nexport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nexport { Optimizer } from './optimizers/optimizer';\nexport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nexport { SGDOptimizer } from './optimizers/sgd_optimizer';\nexport { Tensor, TensorBuffer, Variable } from './tensor';\nexport { Rank, sumOutType, upcastType } from './types';\nexport * from './ops/ops';\nexport { Reduction } from './ops/loss_ops_utils';\nexport * from './train';\nexport * from './globals';\nexport * from './kernel_registry';\nexport { customGrad, grad, grads, valueAndGrad, valueAndGrads, variableGrads } from './gradients';\nexport { Environment, env, ENV } from './environment';\nexport { version as version_core };\n// Top-level method exports.\nexport { nextFrame } from './browser_util';\n// Second level exports.\nimport * as backend_util from './backends/backend_util';\nimport * as device_util from './device_util';\nexport { browser, io, math, serialization, test_util, util, backend_util, tensor_util, slice_util, gather_util, scatter_util, device_util };\nimport * as kernel_impls from './backends/kernel_impls';\nexport { kernel_impls };\n// Backend specific.\nexport { KernelBackend, DataStorage } from './backends/backend';\n// Export all kernel names / info.\nexport * from './kernel_names';\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const absGradConfig = {\n kernelName: Abs,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, step(cast(x, 'float32'), -1)) };\n }\n};\n//# sourceMappingURL=Abs_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acosGradConfig = {\n kernelName: Acos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = square(cast(x, 'float32'));\n const b = sqrt(sub(scalar(1), a));\n return neg(div(dy, b));\n }\n };\n }\n};\n//# sourceMappingURL=Acos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acoshGradConfig = {\n kernelName: Acosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(sub(square(cast(x, 'float32')), 1));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Acosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const addGradConfig = {\n kernelName: Add,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Add_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AddN } from '../kernel_names';\nexport const addNGradConfig = {\n kernelName: AddN,\n saveAllInputs: true,\n gradFunc: (dy, saved) => {\n const ders = {};\n saved.forEach((_, i) => {\n ders[i] = () => dy.clone();\n });\n return ders;\n }\n};\n//# sourceMappingURL=AddN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMax } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMaxGradConfig = {\n kernelName: ArgMax,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMin } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMinGradConfig = {\n kernelName: ArgMin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const asinGradConfig = {\n kernelName: Asin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sqrt(sub(scalar(1), square(cast(x, 'float32'))))) };\n }\n};\n//# sourceMappingURL=Asin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nexport const asinhGradConfig = {\n kernelName: Asinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(add(scalar(1), square(cast(x, 'float32'))));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Asinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const atan2GradConfig = {\n kernelName: Atan2,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const d = add(square(a), square(b));\n let res = mul(dy, div(b, d));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n const d = add(square(a), square(b));\n let res = neg(mul(dy, div(a, d)));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Atan2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const atanGradConfig = {\n kernelName: Atan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(square(cast(x, 'float32')), 1)) };\n }\n};\n//# sourceMappingURL=Atan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { scalar } from '../ops/scalar';\nexport const atanhGradConfig = {\n kernelName: Atanh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sub(scalar(1), square(cast(x, 'float32')))) };\n }\n};\n//# sourceMappingURL=Atanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d avg pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank4 of shape\n * [batchSize, depth, height, width, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0. The dilation\n * rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction avgPool3dBackprop_(dy, input, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'avgPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'avgPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in avgPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in avgPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.avgPool3dBackprop(dy5D, input5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3dBackprop = op({ avgPool3dBackprop_ });\n//# sourceMappingURL=avg_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool3D } from '../kernel_names';\nimport { avgPool3dBackprop } from '../ops/avg_pool_3d_backprop';\nexport const avgPool3DGradConfig = {\n kernelName: AvgPool3D,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => avgPool3dBackprop(dy, x, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=AvgPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of an 2D avg pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The input image, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction avgPoolBackprop_(dy, input, filterSize, strides, pad) {\n const $dy = convertToTensor(dy, 'dy', 'avgPoolBackprop');\n const $input = convertToTensor(input, 'input', 'avgPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`);\n let input4D = $input;\n let dy4D = $dy;\n let reshapedTo4D = false;\n if ($input.rank === 3) {\n reshapedTo4D = true;\n input4D =\n reshape($input, [1, $input.shape[0], $input.shape[1], $input.shape[2]]);\n dy4D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2]]);\n }\n util.assert(dy4D.rank === 4, () => `Error in avgPoolBackprop: dy must be rank 4 but got rank ` +\n `${dy4D.rank}.`);\n util.assert(input4D.rank === 4, () => `Error in avgPoolBackprop: input must be rank 4 but got rank ` +\n `${input4D.rank}.`);\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo(input4D.shape, filterSize, strides, 1 /* dilations */, pad);\n return backend.avgPoolBackprop(dy4D, input4D, convInfo);\n };\n const inputs = { dy: dy4D, input: input4D };\n const attrs = { filterSize, strides, pad };\n const res = ENGINE.runKernelFunc(forward, inputs, null, AvgPoolBackprop, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPoolBackprop = op({ avgPoolBackprop_ });\n//# sourceMappingURL=avg_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool } from '../kernel_names';\nimport { avgPoolBackprop } from '../ops/avg_pool_backprop';\nexport const avgPoolGradConfig = {\n kernelName: AvgPool,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => avgPoolBackprop(dy, x, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=AvgPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul } from '../kernel_names';\nimport { matMul } from '../ops/mat_mul';\nexport const batchMatMulGradConfig = {\n kernelName: BatchMatMul,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved, attrs) => {\n const [a, b] = saved;\n const { transposeA, transposeB } = attrs;\n if (!transposeA && !transposeB) {\n return {\n a: () => matMul(dy, b, false, true),\n b: () => matMul(a, dy, true, false)\n };\n }\n else if (!transposeA && transposeB) {\n return {\n a: () => matMul(dy, b, false, false),\n b: () => matMul(dy, a, true, false)\n };\n }\n else if (transposeA && !transposeB) {\n return {\n a: () => matMul(b, dy, false, true),\n b: () => matMul(a, dy, false, false)\n };\n }\n else {\n return {\n a: () => matMul(b, dy, true, true),\n b: () => matMul(dy, a, true, true)\n };\n }\n }\n};\n//# sourceMappingURL=BatchMatMul_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchToSpaceND } from '../kernel_names';\nimport { spaceToBatchND } from '../ops/space_to_batch_nd';\nexport const batchToSpaceNDGradConfig = {\n kernelName: BatchToSpaceND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, crops } = attrs;\n return { x: () => spaceToBatchND(dy, blockShape, crops) };\n }\n};\n//# sourceMappingURL=BatchToSpaceND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BroadcastTo } from '../kernel_names';\nimport { sum } from '../ops/sum';\nexport const broadcastToGradConfig = {\n kernelName: BroadcastTo,\n gradFunc: (dy, saved, attrs) => {\n const broadCastToAttrs = attrs;\n const inputShape = broadCastToAttrs.inputShape;\n const outputShape = broadCastToAttrs.shape;\n const reps = Array.from(outputShape);\n for (let i = inputShape.length - 1; i >= 0; i--) {\n if (inputShape[i] === outputShape[i]) {\n reps[i] = 1;\n }\n else if (inputShape[i] !== 1) {\n throw new Error(`broadcastTo(): [${inputShape}] cannot be broadcast to [${outputShape}].`);\n }\n }\n const axes = [];\n for (let i = 0; i < reps.length; i++) {\n if (reps[i] > 1) {\n axes.push(i);\n }\n }\n return { x: () => sum(dy, axes, true /* keepDims */) };\n }\n};\n//# sourceMappingURL=BroadcastTo_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cast } from '../kernel_names';\nexport const castGradConfig = {\n kernelName: Cast,\n gradFunc: (dy) => {\n return { x: () => dy.clone() };\n }\n};\n//# sourceMappingURL=Cast_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const ceilGradConfig = {\n kernelName: Ceil,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Ceil_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '../kernel_names';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { lessEqual } from '../ops/less_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const clipByValueGradConfig = {\n kernelName: ClipByValue,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { clipValueMin, clipValueMax } = attrs;\n return {\n x: () => where(logicalAnd(greaterEqual(x, clipValueMin), lessEqual(x, clipValueMax)), dy, zerosLike(dy)),\n };\n }\n};\n//# sourceMappingURL=ClipByValue_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Concat } from '../kernel_names';\nimport { split } from '../ops/split';\nimport { parseAxisParam } from '../util';\nexport const concatGradConfig = {\n kernelName: Concat,\n saveAllInputs: true,\n gradFunc: (dy, saved, attrs) => {\n const shapes = saved.map(t => t.shape);\n const { axis } = attrs;\n const $axis = parseAxisParam(axis, saved[0].shape)[0];\n const sizeSplits = shapes.map(s => s[$axis]);\n const derTensors = split(dy, sizeSplits, $axis);\n return derTensors.map(t => () => t);\n }\n};\n//# sourceMappingURL=Concat_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2D } from '../kernel_names';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../ops/conv2d_backprop_input';\nimport * as conv_util from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv2DGradConfig = {\n kernelName: Conv2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x4D, $filter] = saved;\n const { dilations, strides, pad, dataFormat } = attrs;\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of conv2D: dilation rates greater than 1 ' +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n return {\n x: () => conv2DBackpropInput(x4D.shape, dy, $filter, strides, pad, dataFormat),\n filter: () => conv2DBackpropFilter(x4D, dy, $filter.shape, strides, pad, dataFormat)\n };\n }\n};\n//# sourceMappingURL=Conv2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport { conv2d } from '../ops/conv2d';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nexport const conv2DBackpropInputGradConfig = {\n kernelName: Conv2DBackpropInput,\n inputsToSave: ['dy', 'filter'],\n gradFunc: (ddx, saved, attrs) => {\n const [dy, filter] = saved;\n const { strides, pad, dataFormat, dimRoundingMode } = attrs;\n return {\n dy: () => conv2d(ddx, filter, strides, pad, dataFormat, 1 /* dilations */, dimRoundingMode),\n filter: () => conv2DBackpropFilter(ddx, dy, filter.shape, strides, pad, dataFormat, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=Conv2DBackpropInput_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropFilterV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 3D convolution.\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * [batch, depth, height, width, inChannels]. If rank 4, batch of 1 is\n * assumed.\n * @param dy The dy image, of rank 5 or rank 4, of shape\n * [batch, depth, height, width, outDepth]. If rank 4, batch of 1 is\n * assumed.\n * @param filterShape The shape of the filter, length 5,\n * [filterDepth, filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideDepth, strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction conv3DBackpropFilter_(x, dy, filterShape, strides, pad) {\n let x5D = x;\n if (x.rank === 4) {\n x5D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2], x.shape[3]]);\n }\n let dy5D = dy;\n if (dy5D.rank === 4) {\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3dDerFilter: input must be rank 5, but got shape ` +\n `${x5D.shape}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerFilter: dy must be rank 5, but got shape ` +\n `${dy5D.shape}.`);\n util.assert(filterShape.length === 5, () => `Error in conv3dDerFilter: filterShape must be length 5, but got ` +\n `${filterShape}.`);\n util.assert(x5D.shape[4] === filterShape[3], () => `Error in conv3dDerFilter: depth of input ${x5D.shape[4]}) must ` +\n `match input depth in filter (${filterShape[3]}.`);\n util.assert(dy5D.shape[4] === filterShape[4], () => `Error in conv3dDerFilter: depth of dy (${dy5D.shape[4]}) must ` +\n `match output depth for filter (${filterShape[4]}).`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, filterShape, strides, dilations, pad);\n return backend.conv3dDerFilter(x5D, dy5D, convInfo);\n };\n const inputs = { x: x5D, dy: dy5D };\n const attrs = { strides, pad, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropFilterV2, attrs);\n}\nexport const conv3DBackpropFilter = op({ conv3DBackpropFilter_ });\n//# sourceMappingURL=conv3d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv3D } from '../kernel_names';\nimport { conv3DBackpropFilter } from '../ops/conv3d_backprop_filter';\nimport { conv3DBackpropInput } from '../ops/conv3d_backprop_input';\nimport { tupleValuesAreOne } from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv3DGradConfig = {\n kernelName: Conv3D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad } = attrs;\n util.assert(tupleValuesAreOne(dilations), () => 'Error in gradient of conv3D: dilation rates greater than 1 are ' +\n `not yet supported in gradients. Got dilations '${dilations}'`);\n const [x5D, $filter] = saved;\n return {\n x: () => conv3DBackpropInput(x5D.shape, dy, $filter, strides, pad),\n filter: () => conv3DBackpropFilter(x5D, dy, $filter.shape, strides, pad)\n };\n }\n};\n//# sourceMappingURL=Conv3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { sin } from '../ops/sin';\nexport const cosGradConfig = {\n kernelName: Cos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(neg(sin(cast(x, 'float32'))), dy) };\n }\n};\n//# sourceMappingURL=Cos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { sinh } from '../ops/sinh';\nexport const coshGradConfig = {\n kernelName: Cosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(sinh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Cosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cumsum } from '../kernel_names';\nimport { getAxesPermutation } from '../ops/axis_util';\nimport { cumsum } from '../ops/cumsum';\nimport { transpose } from '../ops/transpose';\nexport const cumsumGradConfig = {\n kernelName: Cumsum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { axis, exclusive, reverse } = attrs;\n return {\n x: () => {\n const permutation = getAxesPermutation([axis], x.rank);\n let out = cumsum(dy, axis, exclusive, !reverse);\n if (permutation != null) {\n out = transpose(out, permutation);\n }\n return out;\n }\n };\n }\n};\n//# sourceMappingURL=Cumsum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport * as conv_util from '../ops/conv_util';\nimport { depthwiseConv2dNativeBackpropFilter } from '../ops/depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../ops/depthwise_conv2d_native_backprop_input';\nimport * as util from '../util';\nexport const depthwiseConv2dNativeGradConfig = {\n kernelName: DepthwiseConv2dNative,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1] : dilations;\n util.assert(conv_util.tupleValuesAreOne($dilations), () => 'Error in gradient of depthwiseConv2dNative: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${$dilations}'`);\n const [x, filter] = saved;\n util.assert(x.rank === 4, () => `Error in gradient of depthwiseConv2dNative: input must be ` +\n `rank 4, but got rank ${x.rank}.`);\n util.assert(filter.rank === 4, () => `Error in gradient of depthwiseConv2dNative: filter must be ` +\n `rank 4, but got rank ${filter.rank}.`);\n util.assert(x.shape[3] === filter.shape[2], () => `Error in gradient of depthwiseConv2d: number of input ` +\n `channels (${x.shape[3]}) must match the inChannels dimension ` +\n `in filter ${filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in gradient of depthwiseConv2d: Either strides or ' +\n `dilations must be 1. Got strides ${strides} and dilations ` +\n `'${$dilations}'.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n return {\n x: () => depthwiseConv2dNativeBackpropInput(x.shape, dy, filter, strides, pad, dilations, dimRoundingMode),\n filter: () => depthwiseConv2dNativeBackpropFilter(x, dy, filter.shape, strides, pad, dilations, dimRoundingMode),\n };\n }\n};\n//# sourceMappingURL=DepthwiseConv2dNative_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D, Dilation2DBackpropFilter, Dilation2DBackpropInput } from '../kernel_names';\nexport const dilation2dGradConfig = {\n kernelName: Dilation2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x, filter] = saved;\n const inputInputs = { x, filter, dy };\n const filterInputs = { x, filter, dy };\n return {\n x: () => ENGINE.runKernel(Dilation2DBackpropInput, inputInputs, attrs),\n filter: () => ENGINE.runKernel(Dilation2DBackpropFilter, filterInputs, attrs)\n };\n }\n};\n//# sourceMappingURL=Dilation2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const divGradConfig = {\n kernelName: Div,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Div_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu, EluGrad } from '../kernel_names';\nexport const eluGradConfig = {\n kernelName: Elu,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n const backPropKernelFunc = (backend) => {\n return backend.eluDer(dy, y);\n };\n const inputs = { dy, y };\n return {\n x: () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* grad */, EluGrad)\n };\n }\n};\n//# sourceMappingURL=Elu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Erf } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const erfGradConfig = {\n kernelName: Erf,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const a = mul(exp(neg(square(x))), 2 / Math.sqrt(Math.PI));\n return { x: () => mul(dy, a) };\n }\n};\n//# sourceMappingURL=Erf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '../kernel_names';\nimport { mul } from '../ops/mul';\nexport const expGradConfig = {\n kernelName: Exp,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, y) };\n }\n};\n//# sourceMappingURL=Exp_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nexport const expm1GradConfig = {\n kernelName: Expm1,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, exp(x)) };\n }\n};\n//# sourceMappingURL=Expm1_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const floorGradConfig = {\n kernelName: Floor,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Floor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FloorDiv } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const floorDivGradConfig = {\n kernelName: FloorDiv,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=FloorDiv_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { rsqrt } from '../ops/rsqrt';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { tile } from '../ops/tile';\nexport const fusedBatchNormGradConfig = {\n kernelName: FusedBatchNorm,\n inputsToSave: ['x', 'mean', 'variance', 'scale'],\n gradFunc: (dy, saved, attrs) => {\n const { varianceEpsilon } = attrs;\n const [x, mean, variance, scale] = saved;\n const scaleValue = scale == null ? scalar(1) : scale;\n const reductionAxes = getReductionAxes(mean.shape, x.shape);\n const tileShape = [];\n if (mean.rank === 1) {\n for (let i = 0; i < x.shape.length - 1; ++i) {\n tileShape.push(x.shape[i]);\n }\n tileShape.push(1);\n }\n const xMinusMean = sub(x, mean);\n const dyTimesScaleValue = mul(dy, scaleValue);\n const oneOverSqrtVariance = rsqrt(add(variance, scalar(varianceEpsilon)));\n const minusHalfRCube = mul(mul(mul(oneOverSqrtVariance, oneOverSqrtVariance), oneOverSqrtVariance), scalar(-0.5));\n const derX = () => {\n if (mean.rank === 1) {\n return reshape(mul(mul(dy, tile(reshape(oneOverSqrtVariance, [1, 1, 1, mean.shape[0]]), tileShape)), scaleValue), x.shape);\n }\n else {\n return reshape(mul(mul(dy, oneOverSqrtVariance), scaleValue), x.shape);\n }\n };\n const derMean = () => {\n let meanDer = mul(mul(oneOverSqrtVariance, scalar(-1)), dyTimesScaleValue);\n if (mean.rank === 1) {\n meanDer = sum(meanDer, reductionAxes);\n }\n return reshape(meanDer, mean.shape);\n };\n const derVariance = () => {\n let varianceDer = mul(mul(minusHalfRCube, xMinusMean), dyTimesScaleValue);\n if (mean.rank === 1) {\n varianceDer = sum(varianceDer, reductionAxes);\n }\n return reshape(varianceDer, mean.shape);\n };\n const derScale = () => {\n const xMinusMean2TimesRsqrt = mul(xMinusMean, oneOverSqrtVariance);\n let scaleDer = mul(dy, xMinusMean2TimesRsqrt);\n if (mean.rank === 1) {\n scaleDer = sum(scaleDer, reductionAxes);\n }\n return reshape(scaleDer, mean.shape);\n };\n const derOffset = () => {\n let offsetDer = dy;\n if (mean.rank === 1) {\n offsetDer = sum(offsetDer, reductionAxes);\n }\n return reshape(offsetDer, mean.shape);\n };\n return {\n x: derX,\n mean: derMean,\n variance: derVariance,\n scale: derScale,\n offset: derOffset\n };\n }\n};\n//# sourceMappingURL=FusedBatchNorm_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GatherV2 } from '../kernel_names';\nimport { getUndoAxesPermutation } from '../ops/axis_util';\nimport { reshape } from '../ops/reshape';\nimport { transpose } from '../ops/transpose';\nimport { unsortedSegmentSum } from '../ops/unsorted_segment_sum';\nimport { parseAxisParam } from '../util';\nexport const gatherGradConfig = {\n kernelName: GatherV2,\n inputsToSave: ['x', 'indices'],\n gradFunc: (dy, saved, attrs) => {\n const [x, indices] = saved;\n const { axis } = attrs;\n const parsedAxis = parseAxisParam(axis, x.shape)[0];\n const derX = () => {\n const paramsShape = x.shape;\n const indicesSize = indices.size;\n const outerShape = paramsShape.slice(0, parsedAxis);\n const outerDims = outerShape.length;\n const innerShape = paramsShape.slice(axis, paramsShape.length).slice(1);\n const innerDims = innerShape.length;\n const outerAxesIndices = arrayRange(0, outerDims);\n const innerAxesIndices = arrayRange(outerDims + 1, outerDims + 1 + innerDims);\n const valuesShape = arrayConcat([outerShape, [indicesSize], innerShape]);\n const values = reshape(dy, valuesShape);\n const reshapedIndices = reshape(indices, [indicesSize]);\n const transposeDims = arrayConcat([[outerDims], outerAxesIndices, innerAxesIndices]);\n const valuesTranspose = transpose(values, transposeDims);\n let paramsGrad = unsortedSegmentSum(valuesTranspose, reshapedIndices, x.shape[parsedAxis]);\n const invertTransposeDims = getUndoAxesPermutation(transposeDims);\n paramsGrad = transpose(paramsGrad, invertTransposeDims);\n return paramsGrad;\n };\n return { x: derX, indices: () => indices };\n }\n};\nfunction arrayRange(start, stop) {\n const result = [];\n for (let i = start; i < stop; ++i) {\n result.push(i);\n }\n return result;\n}\nfunction arrayConcat(arrays) {\n const result = [];\n for (let i = 0; i < arrays.length; ++i) {\n for (let j = 0; j < arrays[i].length; ++j) {\n result.push(arrays[i][j]);\n }\n }\n return result;\n}\n//# sourceMappingURL=GatherV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GreaterEqual } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const greaterEqualGradConfig = {\n kernelName: GreaterEqual,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n return { a: () => zerosLike(a), b: () => zerosLike(b) };\n }\n};\n//# sourceMappingURL=GreaterEqual_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '../kernel_names';\nimport { cast } from '../ops/cast';\nexport const identityGradConfig = {\n kernelName: Identity,\n gradFunc: (dy) => {\n return { x: () => cast(dy, 'float32') };\n }\n};\n//# sourceMappingURL=Identity_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isFiniteGradConfig = {\n kernelName: IsFinite,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsFinite_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isInfGradConfig = {\n kernelName: IsInf,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsInf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isNanGradConfig = {\n kernelName: IsNan,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsNan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nexport const log1pGradConfig = {\n kernelName: Log1p,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(x, 1)) };\n }\n};\n//# sourceMappingURL=Log1p_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nexport const logGradConfig = {\n kernelName: Log,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, cast(x, 'float32')) };\n }\n};\n//# sourceMappingURL=Log_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogSoftmax } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const logSoftmaxGradConfig = {\n kernelName: LogSoftmax,\n inputsToSave: [],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [value] = saved;\n const { axis } = attrs;\n return {\n logits: () => {\n const keepDims = true;\n const softmax = exp(value);\n return sub(dy, mul(sum(dy, axis, keepDims), softmax));\n }\n };\n }\n};\n//# sourceMappingURL=LogSoftmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRNBackprop } from '../kernel_names';\nimport { op } from './operation';\nfunction localResponseNormalizationBackprop_(x, y, dy, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const forward = backend => backend.LRNGrad(dy, x, y, depthRadius, bias, alpha, beta);\n const inputs = { x, y, dy };\n const attrs = { depthRadius, bias, alpha, beta };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRNBackprop, attrs);\n}\nexport const localResponseNormalizationBackprop = op({ localResponseNormalizationBackprop_ });\n//# sourceMappingURL=local_response_normalization_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LRN } from '../kernel_names';\nimport { localResponseNormalizationBackprop } from '../ops/local_response_normalization_backprop';\nexport const lrnGradConfig = {\n kernelName: LRN,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { depthRadius, bias, alpha, beta } = attrs;\n return {\n x: () => localResponseNormalizationBackprop(x, y, dy, depthRadius, bias, alpha, beta)\n };\n }\n};\n//# sourceMappingURL=LRN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as axis_util from '../ops/axis_util';\nimport { cast } from '../ops/cast';\nimport { equal } from '../ops/equal';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\n/**\n * Gradient helper function for the min and max operations.\n */\nexport function gradForMinAndMax(dy, y, xOrig, origAxes) {\n if (y.rank < xOrig.rank) {\n y = reshape(y, axis_util.expandShapeToKeepDim(y.shape, origAxes));\n }\n if (dy.rank < xOrig.rank) {\n dy = reshape(dy, axis_util.expandShapeToKeepDim(dy.shape, origAxes));\n }\n return {\n x: () => {\n const dx = mul(dy, cast(equal(xOrig, y), dy.dtype));\n return dx;\n }\n };\n}\n//# sourceMappingURL=min_max_grad_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const maxGradConfig = {\n kernelName: Max,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const maxAttrs = attrs;\n const { reductionIndices } = maxAttrs;\n const x = saved[0];\n const y = saved[1];\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n const maxGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return maxGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Max_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Maximum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { less } from '../ops/less';\nimport { mul } from '../ops/mul';\nexport const maximumGradConfig = {\n kernelName: Maximum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(greaterEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(less(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Maximum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d max pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank 4 of shape\n * [batchSize, depth, height, width, channels].\n * @param output The original output image, of rank 5 of shape\n * [batchSize, outDepth, outHeight, outWidth, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPool3dBackprop_(dy, input, output, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'maxPool3dBackprop');\n const $output = convertToTensor(output, 'output', 'maxPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let output5D = $output;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n output5D = reshape($output, [\n 1, $output.shape[0], $output.shape[1], $output.shape[2], $output.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in maxPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in maxPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(output5D.rank === 5, () => `Error in maxPool3dBackprop: output must be rank 5 but got rank ` +\n `${output5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.maxPool3dBackprop(dy5D, input5D, output5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D, output: output5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3dBackprop = op({ maxPool3dBackprop_ });\n//# sourceMappingURL=max_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool3D } from '../kernel_names';\nimport { maxPool3dBackprop } from '../ops/max_pool_3d_backprop';\nexport const maxPool3DGradConfig = {\n kernelName: MaxPool3D,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => maxPool3dBackprop(dy, x, y, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=MaxPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\n/**\n * Computes the backprop of a 2D max pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The original input image, of rank 4, of shape\n * [batchSize, height, width, channels].\n * @param output The original output image, of rank 4, of shape\n * [batchSize, outHeight, outWidth, channels].\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPoolBackprop_(dy, input, output, filterSize, strides, pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPoolBackprop');\n const $input = convertToTensor(input, 'input', 'maxPoolBackprop');\n const $output = convertToTensor(output, 'output', 'maxPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy ` +\n `(${$dy.rank})`);\n util.assert($dy.rank === 4, () => `Error in maxPoolBackprop: dy must be rank 4 but got rank ` +\n `${$dy.rank}.`);\n util.assert($input.rank === 4, () => `Error in maxPoolBackprop: input must be rank 4 but got rank ` +\n `${$input.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPoolBackprop: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo($input.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n return backend.maxPoolBackprop($dy, $input, $output, convInfo);\n };\n const inputs = { dy: $dy, input: $input, output: $output };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n return ENGINE.runKernelFunc(forward, inputs, null, MaxPoolBackprop, attrs);\n}\nexport const maxPoolBackprop = op({ maxPoolBackprop_ });\n//# sourceMappingURL=max_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool } from '../kernel_names';\nimport { maxPoolBackprop } from '../ops/max_pool_backprop';\nexport const maxPoolGradConfig = {\n kernelName: MaxPool,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => maxPoolBackprop(dy, x, y, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=MaxPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Min } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const minGradConfig = {\n kernelName: Min,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const minAttrs = attrs;\n const { axis } = minAttrs;\n const [x, y] = saved;\n const origAxes = util.parseAxisParam(axis, x.shape);\n const minGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return minGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Min_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Minimum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nexport const minimumGradConfig = {\n kernelName: Minimum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(lessEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(greater(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Minimum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const mirrorPadGradConfig = {\n kernelName: MirrorPad,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=MirrorPad_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Mod } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { floor } from '../ops/floor';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const modGradConfig = {\n kernelName: Mod,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(dy, reduceAxes), a.shape);\n }\n return dy;\n };\n const derB = () => {\n const res = mul(dy, neg(floor(div(a, b))));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Mod_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const multiplyGradConfig = {\n kernelName: Multiply,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = mul(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n const res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Multiply_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Negate } from '../kernel_names';\nimport { neg } from '../ops/neg';\nexport const negateGradConfig = {\n kernelName: Negate,\n gradFunc: (dy) => {\n return { x: () => neg(dy) };\n }\n};\n//# sourceMappingURL=Negate_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OneHot } from '../kernel_names';\nimport { zeros } from '../ops/zeros';\nexport const oneHotGradConfig = {\n kernelName: OneHot,\n inputsToSave: ['indices'],\n gradFunc: (dy, saved) => {\n const indices = saved[0];\n return { indices: () => zeros(indices.shape, 'float32') };\n }\n};\n//# sourceMappingURL=OneHot_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OnesLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const onesLikeGradConfig = {\n kernelName: OnesLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=OnesLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2 } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const padV2GradConfig = {\n kernelName: PadV2,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=PadV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pow } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { log } from '../ops/log';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { reshape } from '../ops/reshape';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const powGradConfig = {\n kernelName: Pow,\n inputsToSave: ['a', 'b'],\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [a, b, y] = saved;\n const base = a;\n const exp = b;\n const outShape = broadcast_util.assertAndGetBroadcastShape(base.shape, exp.shape);\n const derBase = () => {\n const expFloat = cast(exp, 'float32');\n let res = mul(dy, mul(expFloat, pow(base, sub(expFloat, scalar(1)))));\n const reduceAxes = broadcast_util.getReductionAxes(base.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, base.shape);\n };\n const derExp = () => {\n const condition = greater(base, 0);\n const logBase = where(condition, log(base), zerosLike(base));\n let res = mul(dy, mul(y, logBase));\n const reduceAxes = broadcast_util.getReductionAxes(exp.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, exp.shape);\n };\n return { a: derBase, b: derExp };\n }\n};\n//# sourceMappingURL=Pow_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '../kernel_names';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const preluGradConfig = {\n kernelName: Prelu,\n inputsToSave: ['x', 'alpha'],\n gradFunc: (dy, saved) => {\n const [x, alpha] = saved;\n const mask = greater(x, 0);\n return {\n x: () => where(mask, dy, mul(dy, alpha)),\n alpha: () => {\n let res = where(mask, zerosLike(dy), mul(dy, x));\n const reduceAxes = getReductionAxes(alpha.shape, dy.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, alpha.shape);\n }\n };\n }\n};\n//# sourceMappingURL=Prelu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const reciprocalGradConfig = {\n kernelName: Reciprocal,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, neg(square(x))) };\n }\n};\n//# sourceMappingURL=Reciprocal_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const relu6GradConfig = {\n kernelName: Relu6,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const mask = mul(lessEqual(x, 6), step(x));\n return { x: () => mul(dy, cast(mask, 'float32')) };\n }\n};\n//# sourceMappingURL=Relu6_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const reluGradConfig = {\n kernelName: Relu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, cast(step(x), 'float32')) };\n }\n};\n//# sourceMappingURL=Relu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape } from '../kernel_names';\nimport { reshape } from '../ops/reshape';\nexport const reshapeGradConfig = {\n kernelName: Reshape,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => reshape(dy, x.shape) };\n }\n};\n//# sourceMappingURL=Reshape_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeBilinear, ResizeBilinearGrad } from '../kernel_names';\nexport const resizeBilinearGradConfig = {\n kernelName: ResizeBilinear,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeBilinearBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeBilinearGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeBilinear_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeNearestNeighbor, ResizeNearestNeighborGrad } from '../kernel_names';\nexport const resizeNearestNeighborGradConfig = {\n kernelName: ResizeNearestNeighbor,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeNearestNeighborBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeNearestNeighborGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeNearestNeighbor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reverse } from '../kernel_names';\nimport { reverse } from '../ops/reverse';\nimport { parseAxisParam } from '../util';\nexport const reverseGradConfig = {\n kernelName: Reverse,\n gradFunc: (dy, saved, attrs) => {\n const { dims } = attrs;\n const axes = parseAxisParam(dims, dy.shape);\n return { x: () => reverse(dy, axes) };\n }\n};\n//# sourceMappingURL=Reverse_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const roundGradConfig = {\n kernelName: Round,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Round_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { pow } from '../ops/pow';\nexport const rsqrtGradConfig = {\n kernelName: Rsqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => neg(div(dy, mul(pow(x, 1.5), 2))) };\n }\n};\n//# sourceMappingURL=Rsqrt_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SelectV2 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { logicalNot } from '../ops/logical_not';\nimport { mul } from '../ops/mul';\nimport { zerosLike } from '../ops/zeros_like';\nexport const selectV2PoolGradConfig = {\n kernelName: SelectV2,\n inputsToSave: ['condition'],\n gradFunc: (dy, saved) => {\n const [condition] = saved;\n return {\n // TODO(julianoks): Return null for condition gradient\n // when backprop supports it.\n condition: () => cast(zerosLike(condition), 'float32'),\n t: () => mul(dy, cast(condition, dy.dtype)),\n e: () => mul(dy, cast(logicalNot(condition), dy.dtype))\n };\n }\n};\n//# sourceMappingURL=SelectV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Selu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { exp } from '../ops/exp';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { SELU_SCALE, SELU_SCALEALPHA } from '../ops/selu_util';\nimport { where } from '../ops/where';\nexport const seluGradConfig = {\n kernelName: Selu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const mask = greater(x, scalar(0));\n const scaleAlpha = scalar(SELU_SCALEALPHA);\n const scale = scalar(SELU_SCALE);\n const greaterThanZeroDer = mul(dy, scale);\n const lessEqualZeroDer = mul(mul(dy, scaleAlpha), exp(cast(x, 'float32')));\n return where(mask, greaterThanZeroDer, lessEqualZeroDer);\n }\n };\n }\n};\n//# sourceMappingURL=Selu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const sigmoidGradConfig = {\n kernelName: Sigmoid,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, mul(y, sub(scalar(1), y))) };\n }\n};\n//# sourceMappingURL=Sigmoid_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const signGradConfig = {\n kernelName: Sign,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Sign_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cos } from '../ops/cos';\nimport { mul } from '../ops/mul';\nexport const sinGradConfig = {\n kernelName: Sin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cos(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cosh } from '../ops/cosh';\nimport { mul } from '../ops/mul';\nexport const sinhGradConfig = {\n kernelName: Sinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cosh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice } from '../kernel_names';\nimport { pad } from '../ops/pad';\nimport { parseSliceParams } from '../ops/slice_util';\nexport const sliceGradConfig = {\n kernelName: Slice,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { begin, size } = attrs;\n const inputShape = x.shape;\n const [begin_, size_] = parseSliceParams(x, begin, size);\n // Create an Nx2 padding where the first column represents how many\n // zeros are prepended (at start) for each dimension, and the second\n // column indicates how many zeros are appended (at end).\n // The number of zeros to append is the shape of the input\n // elementwise-subtracted by both the begin vector and sizes vector.\n const paddings = [];\n for (let i = 0; i < dy.rank; i++) {\n paddings.push([begin_[i], inputShape[i] - begin_[i] - size_[i]]);\n }\n return { x: () => pad(dy, paddings) };\n }\n};\n//# sourceMappingURL=Slice_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softmax } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const softmaxGradConfig = {\n kernelName: Softmax,\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [y] = saved;\n const { dim } = attrs;\n const keepDims = true;\n const dyTimesY = mul(dy, y);\n return {\n logits: () => sub(dyTimesY, mul(sum(dyTimesY, [dim], keepDims), y))\n };\n }\n};\n//# sourceMappingURL=Softmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sigmoid } from '../ops/sigmoid';\nexport const softplusGradConfig = {\n kernelName: Softplus,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, sigmoid(x)) };\n }\n};\n//# sourceMappingURL=Softplus_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SpaceToBatchND } from '../kernel_names';\nimport { batchToSpaceND } from '../ops/batch_to_space_nd';\nexport const spaceToBatchNDGradConfig = {\n kernelName: SpaceToBatchND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, paddings } = attrs;\n return { x: () => batchToSpaceND(dy, blockShape, paddings) };\n }\n};\n//# sourceMappingURL=SpaceToBatchND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SplitV } from '../kernel_names';\nimport { concat } from '../ops/concat';\nexport const splitVGradConfig = {\n kernelName: SplitV,\n gradFunc: (dy, saved, attrs) => {\n const { axis } = attrs;\n return { x: () => concat(dy, axis) };\n }\n};\n//# sourceMappingURL=SplitV_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nexport const sqrtGradConfig = {\n kernelName: Sqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, mul(sqrt(cast(x, 'float32')), 2)) };\n }\n};\n//# sourceMappingURL=Sqrt_grad.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nexport const squareGradConfig = {\n kernelName: Square,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, mul(cast(x, 'float32'), 2)) };\n }\n};\n//# sourceMappingURL=Square_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const squaredDifferenceGradConfig = {\n kernelName: SquaredDifference,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const two = scalar(2);\n const derA = () => mul(dy, mul(two, sub(a, b)));\n const derB = () => mul(dy, mul(two, sub(b, a)));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=SquaredDifference_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const stepGradConfig = {\n kernelName: Step,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports\n // it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Step_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const subGradConfig = {\n kernelName: Sub,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(neg(res), b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Sub_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sum } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { ones } from '../ops/ones';\nimport { reshape } from '../ops/reshape';\nimport { parseAxisParam } from '../util';\nexport const sumGradConfig = {\n kernelName: Sum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const expandedDyShape = x.shape.slice();\n const { axis } = attrs;\n const axes = parseAxisParam(axis, x.shape);\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = mul(expandedDy, ones(x.shape, 'float32'));\n return { x: () => derX };\n }\n};\n//# sourceMappingURL=Sum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '../kernel_names';\nimport { cos } from '../ops/cos';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const tanGradConfig = {\n kernelName: Tan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, square(cos(x))) };\n }\n};\n//# sourceMappingURL=Tan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const tanhGradConfig = {\n kernelName: Tanh,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(sub(scalar(1), square(y)), dy) };\n }\n};\n//# sourceMappingURL=Tanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tile } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { slice } from '../ops/slice';\nimport { zerosLike } from '../ops/zeros_like';\nexport const tileGradConfig = {\n kernelName: Tile,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { reps } = attrs;\n const derX = () => {\n let xGrad = zerosLike(x);\n // TODO(cais): Maybe reduce memory footprint by avoiding repeated\n // slicing.\n if (x.rank === 1) {\n for (let i = 0; i < reps[0]; ++i) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0]], [x.shape[0]]));\n }\n }\n else if (x.rank === 2) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1]], [\n x.shape[0], x.shape[1]\n ]));\n }\n }\n }\n else if (x.rank === 3) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n xGrad =\n add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1], k * x.shape[2]], [x.shape[0], x.shape[1], x.shape[2]]));\n }\n }\n }\n }\n else if (x.rank === 4) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n for (let l = 0; l < reps[3]; ++l) {\n xGrad =\n add(xGrad, slice(dy, [\n i * x.shape[0], j * x.shape[1], k * x.shape[2],\n l * x.shape[3]\n ], [x.shape[0], x.shape[1], x.shape[2], x.shape[3]]));\n }\n }\n }\n }\n }\n else {\n throw new Error(`Gradient for tile operation is not implemented for rank-` +\n `${x.rank} tensors yet.`);\n }\n return xGrad;\n };\n return { x: derX };\n },\n};\n//# sourceMappingURL=Tile_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '../kernel_names';\nimport * as axis_util from '../ops/axis_util';\nimport { transpose } from '../ops/transpose';\nexport const transposeGradConfig = {\n kernelName: Transpose,\n gradFunc: (dy, saved, attrs) => {\n const transposeAttrs = attrs;\n const { perm } = transposeAttrs;\n const undoPerm = axis_util.getUndoAxesPermutation(perm);\n return { x: () => transpose(dy, undoPerm) };\n }\n};\n//# sourceMappingURL=Transpose_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unpack } from '../kernel_names';\nimport { stack } from '../ops/stack';\nexport const unpackGradConfig = {\n kernelName: Unpack,\n gradFunc: (dy, saved, attrs) => {\n const unpackAttrs = attrs;\n const { axis } = unpackAttrs;\n return { value: () => stack(dy, axis) };\n }\n};\n//# sourceMappingURL=Unpack_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { expandDims } from '../ops/expand_dims';\nimport { gather } from '../ops/gather';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { maximum } from '../ops/maximum';\nimport { ones } from '../ops/ones';\nimport { scalar } from '../ops/scalar';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const unsortedSegmentSumGradConfig = {\n kernelName: UnsortedSegmentSum,\n inputsToSave: ['segmentIds'],\n gradFunc: (dy, saved) => {\n const [segmentIds] = saved;\n const derX = () => {\n return gatherDropNegatives(dy, segmentIds);\n };\n return { x: derX };\n }\n};\nfunction gatherDropNegatives(x, indices) {\n // Helper function for unsorted segment ops. Gathers params for\n // positive segment ids and gathers 0 for inputs with negative segment id.\n // Mirrors _GatherDropNegatives from tensorflow/python/ops/math_grad.py\n const zeroClippedIndices = maximum(indices, zerosLike(indices));\n const gathered = gather(x, zeroClippedIndices);\n let isPositive = greaterEqual(indices, scalar(0, 'int32'));\n const numIters = gathered.rank - isPositive.rank;\n for (let i = 0; i < numIters; ++i) {\n isPositive = expandDims(isPositive, i + 1);\n }\n isPositive = logicalAnd(isPositive, ones(gathered.shape, 'bool'));\n const zeroSlice = zerosLike(gathered);\n return where(isPositive, gathered, zeroSlice);\n}\n//# sourceMappingURL=UnsortedSegmentSum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ZerosLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const zerosLikeGradConfig = {\n kernelName: ZerosLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=ZerosLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { absGradConfig } from './gradients/Abs_grad';\nimport { acosGradConfig } from './gradients/Acos_grad';\nimport { acoshGradConfig } from './gradients/Acosh_grad';\nimport { addGradConfig } from './gradients/Add_grad';\nimport { addNGradConfig } from './gradients/AddN_grad';\nimport { argMaxGradConfig } from './gradients/ArgMax_grad';\nimport { argMinGradConfig } from './gradients/ArgMin_grad';\nimport { asinGradConfig } from './gradients/Asin_grad';\nimport { asinhGradConfig } from './gradients/Asinh_grad';\nimport { atan2GradConfig } from './gradients/Atan2_grad';\nimport { atanGradConfig } from './gradients/Atan_grad';\nimport { atanhGradConfig } from './gradients/Atanh_grad';\nimport { avgPool3DGradConfig } from './gradients/AvgPool3D_grad';\nimport { avgPoolGradConfig } from './gradients/AvgPool_grad';\nimport { batchMatMulGradConfig } from './gradients/BatchMatMul_grad';\nimport { batchToSpaceNDGradConfig } from './gradients/BatchToSpaceND_grad';\nimport { broadcastToGradConfig } from './gradients/BroadcastTo_grad';\nimport { castGradConfig } from './gradients/Cast_grad';\nimport { ceilGradConfig } from './gradients/Ceil_grad';\nimport { clipByValueGradConfig } from './gradients/ClipByValue_grad';\nimport { concatGradConfig } from './gradients/Concat_grad';\nimport { conv2DGradConfig } from './gradients/Conv2D_grad';\nimport { conv2DBackpropInputGradConfig } from './gradients/Conv2DBackpropInput_grad';\nimport { conv3DGradConfig } from './gradients/Conv3D_grad';\nimport { cosGradConfig } from './gradients/Cos_grad';\nimport { coshGradConfig } from './gradients/Cosh_grad';\nimport { cumsumGradConfig } from './gradients/Cumsum_grad';\nimport { depthwiseConv2dNativeGradConfig } from './gradients/DepthwiseConv2dNative_grad';\nimport { dilation2dGradConfig } from './gradients/Dilation2D_grad';\nimport { divGradConfig } from './gradients/Div_grad';\nimport { eluGradConfig } from './gradients/Elu_grad';\nimport { erfGradConfig } from './gradients/Erf_grad';\nimport { expGradConfig } from './gradients/Exp_grad';\nimport { expm1GradConfig } from './gradients/Expm1_grad';\nimport { floorGradConfig } from './gradients/Floor_grad';\nimport { floorDivGradConfig } from './gradients/FloorDiv_grad';\nimport { fusedBatchNormGradConfig } from './gradients/FusedBatchNorm_grad';\nimport { gatherGradConfig } from './gradients/GatherV2_grad';\nimport { greaterEqualGradConfig } from './gradients/GreaterEqual_grad';\nimport { identityGradConfig } from './gradients/Identity_grad';\nimport { isFiniteGradConfig } from './gradients/IsFinite_grad';\nimport { isInfGradConfig } from './gradients/IsInf_grad';\nimport { isNanGradConfig } from './gradients/IsNan_grad';\nimport { log1pGradConfig } from './gradients/Log1p_grad';\nimport { logGradConfig } from './gradients/Log_grad';\nimport { logSoftmaxGradConfig } from './gradients/LogSoftmax_grad';\nimport { lrnGradConfig } from './gradients/LRN_grad';\nimport { maxGradConfig } from './gradients/Max_grad';\nimport { maximumGradConfig } from './gradients/Maximum_grad';\nimport { maxPool3DGradConfig } from './gradients/MaxPool3D_grad';\nimport { maxPoolGradConfig } from './gradients/MaxPool_grad';\nimport { minGradConfig } from './gradients/Min_grad';\nimport { minimumGradConfig } from './gradients/Minimum_grad';\nimport { mirrorPadGradConfig } from './gradients/MirrorPad_grad';\nimport { modGradConfig } from './gradients/Mod_grad';\nimport { multiplyGradConfig } from './gradients/Multiply_grad';\nimport { negateGradConfig } from './gradients/Negate_grad';\nimport { oneHotGradConfig } from './gradients/OneHot_grad';\nimport { onesLikeGradConfig } from './gradients/OnesLike_grad';\nimport { padV2GradConfig } from './gradients/PadV2_grad';\nimport { powGradConfig } from './gradients/Pow_grad';\nimport { preluGradConfig } from './gradients/Prelu_grad';\nimport { reciprocalGradConfig } from './gradients/Reciprocal_grad';\nimport { relu6GradConfig } from './gradients/Relu6_grad';\nimport { reluGradConfig } from './gradients/Relu_grad';\nimport { reshapeGradConfig } from './gradients/Reshape_grad';\nimport { resizeBilinearGradConfig } from './gradients/ResizeBilinear_grad';\nimport { resizeNearestNeighborGradConfig } from './gradients/ResizeNearestNeighbor_grad';\nimport { reverseGradConfig } from './gradients/Reverse_grad';\nimport { roundGradConfig } from './gradients/Round_grad';\nimport { rsqrtGradConfig } from './gradients/Rsqrt_grad';\nimport { selectV2PoolGradConfig } from './gradients/SelectV2_grad';\nimport { seluGradConfig } from './gradients/Selu_grad';\nimport { sigmoidGradConfig } from './gradients/Sigmoid_grad';\nimport { signGradConfig } from './gradients/Sign_grad';\nimport { sinGradConfig } from './gradients/Sin_grad';\nimport { sinhGradConfig } from './gradients/Sinh_grad';\nimport { sliceGradConfig } from './gradients/Slice_grad';\nimport { softmaxGradConfig } from './gradients/Softmax_grad';\nimport { softplusGradConfig } from './gradients/Softplus_grad';\nimport { spaceToBatchNDGradConfig } from './gradients/SpaceToBatchND_grad';\nimport { splitVGradConfig } from './gradients/SplitV_grad';\nimport { sqrtGradConfig } from './gradients/Sqrt_grad';\nimport { squareGradConfig } from './gradients/Square_grad';\nimport { squaredDifferenceGradConfig } from './gradients/SquaredDifference_grad';\nimport { stepGradConfig } from './gradients/Step_grad';\nimport { subGradConfig } from './gradients/Sub_grad';\nimport { sumGradConfig } from './gradients/Sum_grad';\nimport { tanGradConfig } from './gradients/Tan_grad';\nimport { tanhGradConfig } from './gradients/Tanh_grad';\nimport { tileGradConfig } from './gradients/Tile_grad';\nimport { transposeGradConfig } from './gradients/Transpose_grad';\nimport { unpackGradConfig } from './gradients/Unpack_grad';\nimport { unsortedSegmentSumGradConfig } from './gradients/UnsortedSegmentSum_grad';\nimport { zerosLikeGradConfig } from './gradients/ZerosLike_grad';\nimport { registerGradient } from './kernel_registry';\n// Export all kernel configs here so that the package can auto register them\nconst gradConfigs = [\n absGradConfig,\n acosGradConfig,\n acoshGradConfig,\n addGradConfig,\n addNGradConfig,\n argMaxGradConfig,\n argMinGradConfig,\n asinGradConfig,\n asinhGradConfig,\n atan2GradConfig,\n atanGradConfig,\n atanhGradConfig,\n avgPool3DGradConfig,\n avgPoolGradConfig,\n batchMatMulGradConfig,\n batchToSpaceNDGradConfig,\n broadcastToGradConfig,\n castGradConfig,\n ceilGradConfig,\n clipByValueGradConfig,\n concatGradConfig,\n conv2DBackpropInputGradConfig,\n conv2DGradConfig,\n conv3DGradConfig,\n cosGradConfig,\n coshGradConfig,\n cumsumGradConfig,\n depthwiseConv2dNativeGradConfig,\n dilation2dGradConfig,\n divGradConfig,\n eluGradConfig,\n erfGradConfig,\n expGradConfig,\n expm1GradConfig,\n floorDivGradConfig,\n floorGradConfig,\n fusedBatchNormGradConfig,\n gatherGradConfig,\n greaterEqualGradConfig,\n identityGradConfig,\n isFiniteGradConfig,\n isInfGradConfig,\n isNanGradConfig,\n log1pGradConfig,\n logGradConfig,\n logSoftmaxGradConfig,\n lrnGradConfig,\n maxGradConfig,\n maxGradConfig,\n maximumGradConfig,\n maxPool3DGradConfig,\n maxPoolGradConfig,\n minGradConfig,\n minimumGradConfig,\n mirrorPadGradConfig,\n modGradConfig,\n multiplyGradConfig,\n negateGradConfig,\n oneHotGradConfig,\n onesLikeGradConfig,\n padV2GradConfig,\n padV2GradConfig,\n powGradConfig,\n preluGradConfig,\n reciprocalGradConfig,\n relu6GradConfig,\n reluGradConfig,\n reshapeGradConfig,\n resizeBilinearGradConfig,\n resizeNearestNeighborGradConfig,\n reverseGradConfig,\n roundGradConfig,\n rsqrtGradConfig,\n selectV2PoolGradConfig,\n seluGradConfig,\n sigmoidGradConfig,\n signGradConfig,\n sinGradConfig,\n sinhGradConfig,\n sliceGradConfig,\n softmaxGradConfig,\n softplusGradConfig,\n spaceToBatchNDGradConfig,\n spaceToBatchNDGradConfig,\n splitVGradConfig,\n splitVGradConfig,\n sqrtGradConfig,\n squaredDifferenceGradConfig,\n squareGradConfig,\n stepGradConfig,\n subGradConfig,\n sumGradConfig,\n tanGradConfig,\n tanhGradConfig,\n tileGradConfig,\n transposeGradConfig,\n unpackGradConfig,\n unsortedSegmentSumGradConfig,\n zerosLikeGradConfig\n];\nfor (const gradientConfig of gradConfigs) {\n registerGradient(gradientConfig);\n}\n//# sourceMappingURL=register_all_gradients.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { abs } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.abs = function () {\n this.throwIfDisposed();\n return abs(this);\n};\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acos = function () {\n this.throwIfDisposed();\n return acos(this);\n};\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acosh = function () {\n this.throwIfDisposed();\n return acosh(this);\n};\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { addStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.addStrict = function (x) {\n this.throwIfDisposed();\n return addStrict(this, x);\n};\n//# sourceMappingURL=add_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { add } from '../../ops/add';\nimport { Tensor } from '../../tensor';\nTensor.prototype.add = function (b) {\n this.throwIfDisposed();\n return add(this, b);\n};\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { all } from '../../ops/all';\nimport { Tensor } from '../../tensor';\nTensor.prototype.all = function (axis, keepDims) {\n this.throwIfDisposed();\n return all(this, axis, keepDims);\n};\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { any } from '../../ops/any';\nimport { Tensor } from '../../tensor';\nTensor.prototype.any = function (axis, keepDims) {\n this.throwIfDisposed();\n return any(this, axis, keepDims);\n};\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMax } from '../../ops/arg_max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMax = function (axis) {\n this.throwIfDisposed();\n return argMax(this, axis);\n};\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMin } from '../../ops/arg_min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMin = function (axis) {\n this.throwIfDisposed();\n return argMin(this, axis);\n};\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nimport { assert } from '../../util';\n/** Converts a size-1 `tf.Tensor` to a `tf.Scalar`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asScalar = function () {\n this.throwIfDisposed();\n assert(this.size === 1, () => 'The array must have only 1 element.');\n return reshape(this, []);\n};\n//# sourceMappingURL=as_scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * Casts a `tf.Tensor` to a specified dtype.\n *\n * @param dtype Data-type to cast the tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asType = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=as_type.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Converts a `tf.Tensor` to a `tf.Tensor1D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as1D = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=as1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor2D`.\n *\n * @param rows Number of rows in `tf.Tensor2D`.\n * @param columns Number of columns in `tf.Tensor2D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as2D = function (rows, columns) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns]);\n};\n//# sourceMappingURL=as2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor3D`.\n *\n * @param rows Number of rows in `tf.Tensor3D`.\n * @param columns Number of columns in `tf.Tensor3D`.\n * @param depth Depth of `tf.Tensor3D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as3D = function (rows, columns, depth) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth]);\n};\n//# sourceMappingURL=as3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor4D`.\n *\n * @param rows Number of rows in `tf.Tensor4D`.\n * @param columns Number of columns in `tf.Tensor4D`.\n * @param depth Depth of `tf.Tensor4D`.\n * @param depth2 4th dimension of `tf.Tensor4D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as4D = function (rows, columns, depth, depth2) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2]);\n};\n//# sourceMappingURL=as4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor5D`.\n *\n * @param rows Number of rows in `tf.Tensor5D`.\n * @param columns Number of columns in `tf.Tensor5D`.\n * @param depth Depth of `tf.Tensor5D`.\n * @param depth2 4th dimension of `tf.Tensor5D`.\n * @param depth3 5th dimension of 'tf.Tensor5D'\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as5D = function (rows, columns, depth, depth2, depth3) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2, depth3]);\n};\n//# sourceMappingURL=as5d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asin = function () {\n this.throwIfDisposed();\n return asin(this);\n};\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asinh = function () {\n this.throwIfDisposed();\n return asinh(this);\n};\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan = function () {\n this.throwIfDisposed();\n return atan(this);\n};\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { atan2 } from '../../ops/atan2';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan2 = function (b) {\n this.throwIfDisposed();\n return atan2(this, b);\n};\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atanh = function () {\n this.throwIfDisposed();\n return atanh(this);\n};\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { avgPool } from '../../ops/avg_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.avgPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return avgPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchToSpaceND } from '../../ops/batch_to_space_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchToSpaceND = function (blockShape, crops) {\n this.throwIfDisposed();\n return batchToSpaceND(this, blockShape, crops);\n};\n//# sourceMappingURL=batch_to_space_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchNorm } from '../../ops/batchnorm';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchNorm = function (mean, variance, offset, scale, varianceEpsilon) {\n this.throwIfDisposed();\n return batchNorm(this, mean, variance, offset, scale, varianceEpsilon);\n};\n//# sourceMappingURL=batchnorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { broadcastTo } from '../../ops/broadcast_to';\nimport { Tensor } from '../../tensor';\nTensor.prototype.broadcastTo = function (shape) {\n this.throwIfDisposed();\n return broadcastTo(this, shape);\n};\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cast = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ceil } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ceil = function () {\n this.throwIfDisposed();\n return ceil(this);\n};\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { clipByValue } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.clipByValue = function (min, max) {\n this.throwIfDisposed();\n return clipByValue(this, min, max);\n};\n//# sourceMappingURL=clip_by_value.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../../ops/concat';\nimport { Tensor } from '../../tensor';\nTensor.prototype.concat = function (x, axis) {\n this.throwIfDisposed();\n if (x instanceof Tensor) {\n x = [x];\n }\n return concat([this, ...x], axis);\n};\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv1d } from '../../ops/conv1d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv1d = function (filter, stride, pad, dataFormat, dilation, dimRoundingMode) {\n this.throwIfDisposed();\n return conv1d(this, filter, stride, pad, dataFormat, dilation, dimRoundingMode);\n};\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2dTranspose } from '../../ops/conv2d_transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2dTranspose = function (filter, outputShape, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2dTranspose(this, filter, outputShape, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from '../../ops/conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cos = function () {\n this.throwIfDisposed();\n return cos(this);\n};\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cosh = function () {\n this.throwIfDisposed();\n return cosh(this);\n};\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { cumsum } from '../../ops/cumsum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cumsum = function (axis, exclusive, reverse) {\n this.throwIfDisposed();\n return cumsum(this, axis, exclusive, reverse);\n};\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthToSpace } from '../../ops/depth_to_space';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthToSpace = function (blockSize, dataFormat) {\n this.throwIfDisposed();\n return depthToSpace(this, blockSize, dataFormat);\n};\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../../globals';\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated Use `depthwiseConv2d` instead.\n */\nTensor.prototype.depthwiseConv2D = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n deprecationWarn('depthwiseConv2D is deprecated, use depthwiseConv2d instead');\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2D_deprecated.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthwiseConv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dilation2d } from '../../ops/dilation2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dilation2d = function (filter, strides, pad, dilations, dataFormat) {\n this.throwIfDisposed();\n return dilation2d(this, filter, strides, pad, dilations, dataFormat);\n};\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { divNoNan } from '../../ops/div_no_nan';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divNoNan = function (b) {\n this.throwIfDisposed();\n return divNoNan(this, b);\n};\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { divStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divStrict = function (x) {\n this.throwIfDisposed();\n return divStrict(this, x);\n};\n//# sourceMappingURL=div_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { div } from '../../ops/div';\nimport { Tensor } from '../../tensor';\nTensor.prototype.div = function (b) {\n this.throwIfDisposed();\n return div(this, b);\n};\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dot } from '../../ops/dot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dot = function (b) {\n this.throwIfDisposed();\n return dot(this, b);\n};\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../../ops/elu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.elu = function () {\n this.throwIfDisposed();\n return elu(this);\n};\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { equalStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.equalStrict = function (x) {\n this.throwIfDisposed();\n return equalStrict(this, x);\n};\n//# sourceMappingURL=equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { equal } from '../../ops/equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.equal = function (b) {\n this.throwIfDisposed();\n return equal(this, b);\n};\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { erf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.erf = function () {\n this.throwIfDisposed();\n return erf(this);\n};\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { exp } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.exp = function () {\n this.throwIfDisposed();\n return exp(this);\n};\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { expandDims } from '../../ops/expand_dims';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expandDims = function (axis) {\n this.throwIfDisposed();\n return expandDims(this, axis);\n};\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { expm1 } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expm1 = function () {\n this.throwIfDisposed();\n return expm1(this);\n};\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { fft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.fft = function () {\n this.throwIfDisposed();\n return fft(this);\n};\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Flatten a Tensor to a 1D array.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.flatten = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=flatten.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { floor } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floor = function () {\n this.throwIfDisposed();\n return floor(this);\n};\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { floorDiv } from '../../ops/floorDiv';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floorDiv = function (b) {\n this.throwIfDisposed();\n return floorDiv(this, b);\n};\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { gather } from '../../ops/gather';\nimport { Tensor } from '../../tensor';\nTensor.prototype.gather = function (indices, axis) {\n this.throwIfDisposed();\n return gather(this, indices, axis);\n};\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterEqualStrict = function (x) {\n this.throwIfDisposed();\n return greaterEqualStrict(this, x);\n};\n//# sourceMappingURL=greater_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greaterEqual } from '../../ops/greater_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greaterEqual = function (b) {\n this.throwIfDisposed();\n return greaterEqual(this, b);\n};\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterStrict = function (x) {\n this.throwIfDisposed();\n return greaterStrict(this, x);\n};\n//# sourceMappingURL=greater_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greater } from '../../ops/greater';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greater = function (b) {\n this.throwIfDisposed();\n return greater(this, b);\n};\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ifft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ifft = function () {\n this.throwIfDisposed();\n return ifft(this);\n};\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { irfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.irfft = function () {\n this.throwIfDisposed();\n return irfft(this);\n};\n//# sourceMappingURL=irfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isFinite } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isFinite = function () {\n this.throwIfDisposed();\n return isFinite(this);\n};\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isInf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isInf = function () {\n this.throwIfDisposed();\n return isInf(this);\n};\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isNaN } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isNaN = function () {\n this.throwIfDisposed();\n return isNaN(this);\n};\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { leakyRelu } from '../../ops/leaky_relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.leakyRelu = function (alpha) {\n this.throwIfDisposed();\n return leakyRelu(this, alpha);\n};\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.lessEqualStrict = function (x) {\n this.throwIfDisposed();\n return lessEqualStrict(this, x);\n};\n//# sourceMappingURL=less_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { lessEqual } from '../../ops/less_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessEqual = function (b) {\n this.throwIfDisposed();\n return lessEqual(this, b);\n};\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessStrict = function (x) {\n this.throwIfDisposed();\n return lessStrict(this, x);\n};\n//# sourceMappingURL=less_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { less } from '../../ops/less';\nimport { Tensor } from '../../tensor';\nTensor.prototype.less = function (b) {\n this.throwIfDisposed();\n return less(this, b);\n};\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { localResponseNormalization } from '../../ops/local_response_normalization';\nimport { Tensor } from '../../tensor';\nTensor.prototype.localResponseNormalization = function (depthRadius, bias, alpha, beta) {\n this.throwIfDisposed();\n return localResponseNormalization(this, depthRadius, bias, alpha, beta);\n};\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSigmoid = function () {\n this.throwIfDisposed();\n return logSigmoid(this);\n};\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSoftmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSoftmax = function (axis) {\n this.throwIfDisposed();\n return logSoftmax(this, axis);\n};\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logSumExp } from '../../ops/log_sum_exp';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSumExp = function (axis, keepDims) {\n this.throwIfDisposed();\n return logSumExp(this, axis, keepDims);\n};\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log = function () {\n this.throwIfDisposed();\n return log(this);\n};\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log1p } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log1p = function () {\n this.throwIfDisposed();\n return log1p(this);\n};\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalAnd } from '../../ops/logical_and';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalAnd = function (b) {\n this.throwIfDisposed();\n return logicalAnd(this, b);\n};\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalNot } from '../../ops/logical_not';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalNot = function () {\n this.throwIfDisposed();\n return logicalNot(this);\n};\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalOr } from '../../ops/logical_or';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalOr = function (b) {\n this.throwIfDisposed();\n return logicalOr(this, b);\n};\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalXor } from '../../ops/logical_xor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalXor = function (b) {\n this.throwIfDisposed();\n return logicalXor(this, b);\n};\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { matMul } from '../../ops/mat_mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.matMul = function (b, transposeA, transposeB) {\n this.throwIfDisposed();\n return matMul(this, b, transposeA, transposeB);\n};\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maxPool } from '../../ops/max_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maxPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return maxPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { max } from '../../ops/max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.max = function (axis, keepDims) {\n this.throwIfDisposed();\n return max(this, axis, keepDims);\n};\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { maximumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.maximumStrict = function (x) {\n this.throwIfDisposed();\n return maximumStrict(this, x);\n};\n//# sourceMappingURL=maximum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maximum } from '../../ops/maximum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maximum = function (b) {\n this.throwIfDisposed();\n return maximum(this, b);\n};\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mean } from '../../ops/mean';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mean = function (axis, keepDims) {\n this.throwIfDisposed();\n return mean(this, axis, keepDims);\n};\n//# sourceMappingURL=mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { min } from '../../ops/min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.min = function (axis, keepDims) {\n this.throwIfDisposed();\n return min(this, axis, keepDims);\n};\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { minimumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.minimumStrict = function (x) {\n this.throwIfDisposed();\n return minimumStrict(this, x);\n};\n//# sourceMappingURL=minimum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { minimum } from '../../ops/minimum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.minimum = function (b) {\n this.throwIfDisposed();\n return minimum(this, b);\n};\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mirrorPad } from '../../ops/mirror_pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mirrorPad = function (paddings, mode) {\n this.throwIfDisposed();\n return mirrorPad(this, paddings, mode);\n};\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { modStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.modStrict = function (x) {\n this.throwIfDisposed();\n return modStrict(this, x);\n};\n//# sourceMappingURL=mod_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mod } from '../../ops/mod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mod = function (b) {\n this.throwIfDisposed();\n return mod(this, b);\n};\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { mulStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.mulStrict = function (x) {\n this.throwIfDisposed();\n return mulStrict(this, x);\n};\n//# sourceMappingURL=mul_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mul } from '../../ops/mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mul = function (b) {\n this.throwIfDisposed();\n return mul(this, b);\n};\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { neg } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.neg = function () {\n this.throwIfDisposed();\n return neg(this);\n};\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { norm } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.norm = function (ord, axis, keepDims) {\n this.throwIfDisposed();\n return norm(this, ord, axis, keepDims);\n};\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { notEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.notEqualStrict = function (x) {\n this.throwIfDisposed();\n return notEqualStrict(this, x);\n};\n//# sourceMappingURL=not_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { notEqual } from '../../ops/not_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.notEqual = function (b) {\n this.throwIfDisposed();\n return notEqual(this, b);\n};\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { oneHot } from '../../ops/one_hot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.oneHot = function (depth, onValue = 1, offValue = 0) {\n this.throwIfDisposed();\n return oneHot(this, depth, onValue, offValue);\n};\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { onesLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.onesLike = function () {\n this.throwIfDisposed();\n return onesLike(this);\n};\n//# sourceMappingURL=ones_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pad } from '../../ops/pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pad = function (paddings, constantValue) {\n this.throwIfDisposed();\n return pad(this, paddings, constantValue);\n};\n//# sourceMappingURL=pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pool } from '../../ops/pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pool = function (windowShape, poolingType, padding, dilationRate, strides) {\n this.throwIfDisposed();\n return pool(this, windowShape, poolingType, padding, dilationRate, strides);\n};\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { powStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.powStrict = function (exp) {\n this.throwIfDisposed();\n return powStrict(this, exp);\n};\n//# sourceMappingURL=pow_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pow } from '../../ops/pow';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pow = function (exp) {\n this.throwIfDisposed();\n return pow(this, exp);\n};\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prelu } from '../../ops/prelu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prelu = function (alpha) {\n this.throwIfDisposed();\n return prelu(this, alpha);\n};\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prod } from '../../ops/prod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prod = function (axis, keepDims) {\n this.throwIfDisposed();\n return prod(this, axis, keepDims);\n};\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { reciprocal } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reciprocal = function () {\n this.throwIfDisposed();\n return reciprocal(this);\n};\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu } from '../../ops/relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu = function () {\n this.throwIfDisposed();\n return relu(this);\n};\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu6 } from '../../ops/relu6';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu6 = function () {\n this.throwIfDisposed();\n return relu6(this);\n};\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Reshapes the tensor into the shape of the provided tensor.\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.reshapeAs = function (x) {\n this.throwIfDisposed();\n return reshape(this, x.shape);\n};\n//# sourceMappingURL=reshape_as.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reshape = function (shape) {\n this.throwIfDisposed();\n return reshape(this, shape);\n};\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeBilinear } from '../../ops/image/resize_bilinear';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeBilinear = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeBilinear(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeNearestNeighbor } from '../../ops/image/resize_nearest_neighbor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeNearestNeighbor = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeNearestNeighbor(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reverse } from '../../ops/reverse';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reverse = function (axis) {\n this.throwIfDisposed();\n return reverse(this, axis);\n};\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rfft = function () {\n this.throwIfDisposed();\n return rfft(this);\n};\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { round } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.round = function () {\n this.throwIfDisposed();\n return round(this);\n};\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rsqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rsqrt = function () {\n this.throwIfDisposed();\n return rsqrt(this);\n};\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { selu } from '../../ops/selu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.selu = function () {\n this.throwIfDisposed();\n return selu(this);\n};\n//# sourceMappingURL=selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { separableConv2d } from '../../ops/separable_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.separableConv2d = function (depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat) {\n this.throwIfDisposed();\n return separableConv2d(this, depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat);\n};\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sigmoid = function () {\n this.throwIfDisposed();\n return sigmoid(this);\n};\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sign } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sign = function () {\n this.throwIfDisposed();\n return sign(this);\n};\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sin = function () {\n this.throwIfDisposed();\n return sin(this);\n};\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sinh = function () {\n this.throwIfDisposed();\n return sinh(this);\n};\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { slice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.slice = function (begin, size) {\n this.throwIfDisposed();\n return slice(this, begin, size);\n};\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softmax = function (dim) {\n this.throwIfDisposed();\n return softmax(this, dim);\n};\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softplus } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softplus = function () {\n this.throwIfDisposed();\n return softplus(this);\n};\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { spaceToBatchND } from '../../ops/space_to_batch_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.spaceToBatchND = function (blockShape, paddings) {\n this.throwIfDisposed();\n return spaceToBatchND(this, blockShape, paddings);\n};\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { split } from '../../ops/split';\nimport { Tensor } from '../../tensor';\nTensor.prototype.split = function (numOrSizeSplits, axis) {\n this.throwIfDisposed();\n return split(this, numOrSizeSplits, axis);\n};\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sqrt = function () {\n this.throwIfDisposed();\n return sqrt(this);\n};\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { square } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.square = function () {\n this.throwIfDisposed();\n return square(this);\n};\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squaredDifference } from '../../ops/squared_difference';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squaredDifference = function (b) {\n this.throwIfDisposed();\n return squaredDifference(this, b);\n};\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { squaredDifferenceStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.squaredDifferenceStrict = function (x) {\n this.throwIfDisposed();\n return squaredDifferenceStrict(this, x);\n};\n//# sourceMappingURL=squared_difference_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squeeze } from '../../ops/squeeze';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squeeze = function (axis) {\n this.throwIfDisposed();\n return squeeze(this, axis);\n};\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { stack } from '../../ops/stack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stack = function (x, axis) {\n this.throwIfDisposed();\n const tensorsToBeStacked = x instanceof Tensor ? [this, x] : [this, ...x];\n return stack(tensorsToBeStacked, axis);\n};\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { step } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.step = function (alpha) {\n this.throwIfDisposed();\n return step(this, alpha);\n};\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { stridedSlice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stridedSlice = function (begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask) {\n this.throwIfDisposed();\n return stridedSlice(this, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask);\n};\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { subStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.subStrict = function (x) {\n this.throwIfDisposed();\n return subStrict(this, x);\n};\n//# sourceMappingURL=sub_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sub } from '../../ops/sub';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sub = function (b) {\n this.throwIfDisposed();\n return sub(this, b);\n};\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sum } from '../../ops/sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sum = function (axis, keepDims) {\n this.throwIfDisposed();\n return sum(this, axis, keepDims);\n};\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tan = function () {\n this.throwIfDisposed();\n return tan(this);\n};\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tanh = function () {\n this.throwIfDisposed();\n return tanh(this);\n};\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tile } from '../../ops/tile';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tile = function (reps) {\n this.throwIfDisposed();\n return tile(this, reps);\n};\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `bool`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toBool = function () {\n this.throwIfDisposed();\n return cast(this, 'bool');\n};\n//# sourceMappingURL=to_bool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `float32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toFloat = function () {\n this.throwIfDisposed();\n return cast(this, 'float32');\n};\n//# sourceMappingURL=to_float.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `int32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toInt = function () {\n this.throwIfDisposed();\n return cast(this, 'int32');\n};\n//# sourceMappingURL=to_int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { topk } from '../../ops/topk';\nimport { Tensor } from '../../tensor';\nTensor.prototype.topk = function (k, sorted) {\n this.throwIfDisposed();\n return topk(this, k, sorted);\n};\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { transpose } from '../../ops/transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.transpose = function (perm) {\n this.throwIfDisposed();\n return transpose(this, perm);\n};\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unique } from '../../ops/unique';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unique = function (axis) {\n this.throwIfDisposed();\n return unique(this, axis);\n};\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unsortedSegmentSum } from '../../ops/unsorted_segment_sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unsortedSegmentSum = function (segmentIds, numSegments) {\n this.throwIfDisposed();\n return unsortedSegmentSum(this, segmentIds, numSegments);\n};\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unstack } from '../../ops/unstack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unstack = function (axis) {\n this.throwIfDisposed();\n return unstack(this, axis);\n};\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { where } from '../../ops/where';\nimport { Tensor } from '../../tensor';\nTensor.prototype.where = function (condition, x) {\n this.throwIfDisposed();\n return where(condition, this, x);\n};\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { zerosLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.zerosLike = function () {\n this.throwIfDisposed();\n return zerosLike(this);\n};\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './abs';\nimport './acos';\nimport './acosh';\nimport './add_strict';\nimport './add';\nimport './all';\nimport './any';\nimport './arg_max';\nimport './arg_min';\nimport './as_scalar';\nimport './as_type';\nimport './as1d';\nimport './as2d';\nimport './as3d';\nimport './as4d';\nimport './as5d';\nimport './asin';\nimport './asinh';\nimport './atan';\nimport './atan2';\nimport './atanh';\nimport './avg_pool';\nimport './batch_to_space_nd';\nimport './batchnorm';\nimport './broadcast_to';\nimport './cast';\nimport './ceil';\nimport './clip_by_value';\nimport './concat';\nimport './conv1d';\nimport './conv2d_transpose';\nimport './conv2d';\nimport './cos';\nimport './cosh';\nimport './cumsum';\nimport './depth_to_space';\nimport './depthwise_conv2D_deprecated';\nimport './depthwise_conv2d';\nimport './dilation2d';\nimport './div_no_nan';\nimport './div_strict';\nimport './div';\nimport './dot';\nimport './elu';\nimport './equal_strict';\nimport './equal';\nimport './erf';\nimport './exp';\nimport './expand_dims';\nimport './expm1';\nimport './fft';\nimport './flatten';\nimport './floor';\nimport './floorDiv';\nimport './gather';\nimport './greater_equal_strict';\nimport './greater_equal';\nimport './greater_strict';\nimport './greater';\nimport './ifft';\nimport './irfft';\nimport './is_finite';\nimport './is_inf';\nimport './is_nan';\nimport './leaky_relu';\nimport './less_equal_strict';\nimport './less_equal';\nimport './less_strict';\nimport './less';\nimport './local_response_normalization';\nimport './log_sigmoid';\nimport './log_softmax';\nimport './log_sum_exp';\nimport './log';\nimport './log1p';\nimport './logical_and';\nimport './logical_not';\nimport './logical_or';\nimport './logical_xor';\nimport './mat_mul';\nimport './max_pool';\nimport './max';\nimport './maximum_strict';\nimport './maximum';\nimport './mean';\nimport './min';\nimport './minimum_strict';\nimport './minimum';\nimport './mirror_pad';\nimport './mod_strict';\nimport './mod';\nimport './mul_strict';\nimport './mul';\nimport './neg';\nimport './norm';\nimport './not_equal_strict';\nimport './not_equal';\nimport './one_hot';\nimport './ones_like';\nimport './pad';\nimport './pool';\nimport './pow_strict';\nimport './pow';\nimport './prelu';\nimport './prod';\nimport './reciprocal';\nimport './relu';\nimport './relu6';\nimport './reshape_as';\nimport './reshape';\nimport './resize_bilinear';\nimport './resize_nearest_neighbor';\nimport './reverse';\nimport './rfft';\nimport './round';\nimport './rsqrt';\nimport './selu';\nimport './separable_conv2d';\nimport './sigmoid';\nimport './sign';\nimport './sin';\nimport './sinh';\nimport './slice';\nimport './softmax';\nimport './softplus';\nimport './space_to_batch_nd';\nimport './split';\nimport './sqrt';\nimport './square';\nimport './squared_difference';\nimport './squared_difference_strict';\nimport './squeeze';\nimport './stack';\nimport './step';\nimport './strided_slice';\nimport './sub_strict';\nimport './sub';\nimport './sum';\nimport './tan';\nimport './tanh';\nimport './tile';\nimport './to_bool';\nimport './to_float';\nimport './to_int';\nimport './topk';\nimport './transpose';\nimport './unique';\nimport './unsorted_segment_sum';\nimport './unstack';\nimport './where';\nimport './zeros_like';\n//# sourceMappingURL=register_all_chained_ops.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code.\nimport './base_side_effects';\n// All exports from this package should be in base.\nexport * from './base';\n// Register all the gradients.\nimport './register_all_gradients';\n// Import all op chainers and add type info to Tensor.\nimport './public/chained_ops/register_all_chained_ops';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { backend } from '@tensorflow/tfjs-core';\nlet _epsilon;\n/**\n * Returns the value of the fuzz factor used in numeric expressions.\n */\nexport function epsilon() {\n if (_epsilon == null) {\n _epsilon = backend().epsilon();\n }\n return _epsilon;\n}\n/**\n * Sets the value of the fuzz factor used in numeric expressions.\n * @param e New value of epsilon.\n */\nexport function setEpsilon(e) {\n _epsilon = e;\n}\n/**\n * Returns the default image data format convention.\n */\nexport function imageDataFormat() {\n return 'channelsLast';\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Explicit error types.\n *\n * See the following link for more information about why the code includes\n * calls to setPrototypeOf:\n *\n * https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work\n */\n// tslint:enable\n/**\n * Equivalent of Python's AttributeError.\n */\nexport class AttributeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AttributeError.prototype);\n }\n}\n/**\n * Equivalent of Python's RuntimeError.\n */\nexport class RuntimeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, RuntimeError.prototype);\n }\n}\n/**\n * Equivalent of Python's ValueError.\n */\nexport class ValueError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, ValueError.prototype);\n }\n}\n/**\n * Equivalent of Python's NotImplementedError.\n */\nexport class NotImplementedError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, NotImplementedError.prototype);\n }\n}\n/**\n * Equivalent of Python's AssertionError.\n */\nexport class AssertionError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AssertionError.prototype);\n }\n}\n/**\n * Equivalent of Python's IndexError.\n */\nexport class IndexError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, IndexError.prototype);\n }\n}\n//# sourceMappingURL=errors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: utils/generic_utils.py */\nimport { util } from '@tensorflow/tfjs-core';\nimport { AssertionError, ValueError } from '../errors';\n// tslint:enable\n/**\n * If `value` is an Array, equivalent to Python's `value * numValues`.\n * If `value` is not an Array, equivalent to Python's `[value] * numValues`\n */\n// tslint:disable-next-line:no-any\nexport function pyListRepeat(value, numValues) {\n if (Array.isArray(value)) {\n // tslint:disable-next-line:no-any\n let newArray = [];\n for (let i = 0; i < numValues; i++) {\n newArray = newArray.concat(value);\n }\n return newArray;\n }\n else {\n const newArray = new Array(numValues);\n newArray.fill(value);\n return newArray;\n }\n}\nexport function assert(val, message) {\n if (!val) {\n throw new AssertionError(message);\n }\n}\n/**\n * Count the number of elements of the `array` that are equal to `reference`.\n */\nexport function count(array, refernce) {\n let counter = 0;\n for (const item of array) {\n if (item === refernce) {\n counter++;\n }\n }\n return counter;\n}\n/**\n * If an array is of length 1, just return the first element. Otherwise, return\n * the full array.\n * @param tensors\n */\nexport function singletonOrArray(xs) {\n if (xs.length === 1) {\n return xs[0];\n }\n return xs;\n}\n/**\n * Normalizes a list/tensor into a list.\n *\n * If a tensor is passed, we return\n * a list of size 1 containing the tensor.\n *\n * @param x target object to be normalized.\n */\n// tslint:disable-next-line:no-any\nexport function toList(x) {\n if (Array.isArray(x)) {\n return x;\n }\n return [x];\n}\n/**\n * Generate a UID for a list\n */\n// tslint:disable-next-line:no-any\nexport function objectListUid(objs) {\n const objectList = toList(objs);\n let retVal = '';\n for (const obj of objectList) {\n if (obj.id == null) {\n throw new ValueError(`Object ${obj} passed to objectListUid without an id`);\n }\n if (retVal !== '') {\n retVal = retVal + ', ';\n }\n retVal = `${retVal}${Math.abs(obj.id)}`;\n }\n return retVal;\n}\n/**\n * Converts string to snake-case.\n * @param name\n */\nexport function toSnakeCase(name) {\n const intermediate = name.replace(/(.)([A-Z][a-z0-9]+)/g, '$1_$2');\n const insecure = intermediate.replace(/([a-z])([A-Z])/g, '$1_$2').toLowerCase();\n /*\n If the class is private the name starts with \"_\" which is not secure\n for creating scopes. We prefix the name with \"private\" in this case.\n */\n if (insecure[0] !== '_') {\n return insecure;\n }\n return 'private' + insecure;\n}\nexport function toCamelCase(identifier) {\n // quick return for empty string or single character strings\n if (identifier.length <= 1) {\n return identifier;\n }\n // Check for the underscore indicating snake_case\n if (identifier.indexOf('_') === -1) {\n return identifier;\n }\n return identifier.replace(/[_]+(\\w|$)/g, (m, p1) => p1.toUpperCase());\n}\n// tslint:disable-next-line:no-any\nlet _GLOBAL_CUSTOM_OBJECTS = {};\nexport function serializeKerasObject(instance) {\n if (instance === null || instance === undefined) {\n return null;\n }\n const dict = {};\n dict['className'] = instance.getClassName();\n dict['config'] = instance.getConfig();\n return dict;\n}\n/**\n * Replace ndarray-style scalar objects in serialization objects with numbers.\n *\n * Background: In some versions of tf.keras, certain scalar values in the HDF5\n * model save file can be serialized as: `{'type': 'ndarray', 'value': num}`,\n * where in `num` is a plain number. This method converts such serialization\n * to a `number`.\n *\n * @param config The keras-format serialization object to be processed\n * (in place).\n */\nfunction convertNDArrayScalarsInConfig(config) {\n if (config == null || typeof config !== 'object') {\n return;\n }\n else if (Array.isArray(config)) {\n config.forEach(configItem => convertNDArrayScalarsInConfig(configItem));\n }\n else {\n const fields = Object.keys(config);\n for (const field of fields) {\n const value = config[field];\n if (value != null && typeof value === 'object') {\n if (!Array.isArray(value) && value['type'] === 'ndarray' &&\n typeof value['value'] === 'number') {\n config[field] = value['value'];\n }\n else {\n convertNDArrayScalarsInConfig(value);\n }\n }\n }\n }\n}\n/**\n * Deserialize a saved Keras Object\n * @param identifier either a string ID or a saved Keras dictionary\n * @param moduleObjects a list of Python class names to object constructors\n * @param customObjects a list of Python class names to object constructors\n * @param printableModuleName debug text for the object being reconstituted\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns a TensorFlow.js Layers object\n */\n// tslint:disable:no-any\nexport function deserializeKerasObject(identifier, moduleObjects = {}, customObjects = {}, printableModuleName = 'object', fastWeightInit = false) {\n // tslint:enable\n if (typeof identifier === 'string') {\n const functionName = identifier;\n let fn;\n if (functionName in customObjects) {\n fn = customObjects[functionName];\n }\n else if (functionName in _GLOBAL_CUSTOM_OBJECTS) {\n fn = _GLOBAL_CUSTOM_OBJECTS[functionName];\n }\n else {\n fn = moduleObjects[functionName];\n if (fn == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${identifier}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n }\n return fn;\n }\n else {\n // In this case we are dealing with a Keras config dictionary.\n const config = identifier;\n if (config['className'] == null || config['config'] == null) {\n throw new ValueError(`${printableModuleName}: Improper config format: ` +\n `${JSON.stringify(config)}.\\n` +\n `'className' and 'config' must set.`);\n }\n const className = config['className'];\n let cls, fromConfig;\n if (className in customObjects) {\n [cls, fromConfig] = customObjects[className];\n }\n else if (className in _GLOBAL_CUSTOM_OBJECTS) {\n [cls, fromConfig] = _GLOBAL_CUSTOM_OBJECTS['className'];\n }\n else if (className in moduleObjects) {\n [cls, fromConfig] = moduleObjects[className];\n }\n if (cls == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${className}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n if (fromConfig != null) {\n // Porting notes: Instead of checking to see whether fromConfig accepts\n // customObjects, we create a customObjects dictionary and tack it on to\n // config['config'] as config['config'].customObjects. Objects can use it,\n // if they want.\n // tslint:disable-next-line:no-any\n const customObjectsCombined = {};\n for (const key of Object.keys(_GLOBAL_CUSTOM_OBJECTS)) {\n customObjectsCombined[key] = _GLOBAL_CUSTOM_OBJECTS[key];\n }\n for (const key of Object.keys(customObjects)) {\n customObjectsCombined[key] = customObjects[key];\n }\n // Add the customObjects to config\n const nestedConfig = config['config'];\n nestedConfig['customObjects'] = customObjectsCombined;\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n convertNDArrayScalarsInConfig(config['config']);\n const returnObj = fromConfig(cls, config['config'], customObjects, fastWeightInit);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n else {\n // Then `cls` may be a function returning a class.\n // In this case by convention `config` holds\n // the kwargs of the function.\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n // In python this is **config['config'], for tfjs-layers we require\n // classes that use this fall-through construction method to take\n // a config interface that mimics the expansion of named parameters.\n const returnObj = new cls(config['config']);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n }\n}\n/**\n * Compares two numbers for sorting.\n * @param a\n * @param b\n */\nexport function numberCompare(a, b) {\n return (a < b) ? -1 : ((a > b) ? 1 : 0);\n}\n/**\n * Comparison of two numbers for reverse sorting.\n * @param a\n * @param b\n */\nexport function reverseNumberCompare(a, b) {\n return -1 * numberCompare(a, b);\n}\n/**\n * Convert a string into the corresponding DType.\n * @param dtype\n * @returns An instance of DType.\n */\nexport function stringToDType(dtype) {\n switch (dtype) {\n case 'float32':\n return 'float32';\n default:\n throw new ValueError(`Invalid dtype: ${dtype}`);\n }\n}\n/**\n * Test the element-by-element equality of two Arrays of strings.\n * @param xs First array of strings.\n * @param ys Second array of strings.\n * @returns Wether the two arrays are all equal, element by element.\n */\nexport function stringsEqual(xs, ys) {\n if (xs == null || ys == null) {\n return xs === ys;\n }\n if (xs.length !== ys.length) {\n return false;\n }\n for (let i = 0; i < xs.length; ++i) {\n if (xs[i] !== ys[i]) {\n return false;\n }\n }\n return true;\n}\n/**\n * Get the unique elements of an array.\n * @param xs Array.\n * @returns An Array consisting of the unique elements in `xs`.\n */\nexport function unique(xs) {\n if (xs == null) {\n return xs;\n }\n const out = [];\n // TODO(cais): Maybe improve performance by sorting.\n for (const x of xs) {\n if (out.indexOf(x) === -1) {\n out.push(x);\n }\n }\n return out;\n}\n/**\n * Determine if an Object is empty (i.e., does not have own properties).\n * @param obj Object\n * @returns Whether the Object is empty.\n * @throws ValueError: If object is `null` or `undefined`.\n */\nexport function isObjectEmpty(obj) {\n if (obj == null) {\n throw new ValueError(`Invalid value in obj: ${JSON.stringify(obj)}`);\n }\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n return false;\n }\n }\n return true;\n}\n/**\n * Helper function used to build type union/enum run-time checkers.\n * @param values The list of allowed values.\n * @param label A string name for the type\n * @param value The value to test.\n * @throws ValueError: If the value is not in values nor `undefined`/`null`.\n */\nexport function checkStringTypeUnionValue(values, label, value) {\n if (value == null) {\n return;\n }\n if (values.indexOf(value) < 0) {\n throw new ValueError(`${value} is not a valid ${label}. Valid values are ${values} or null/undefined.`);\n }\n}\n/**\n * Helper function for verifying the types of inputs.\n *\n * Ensures that the elements of `x` are all of type `expectedType`.\n * Also verifies that the length of `x` is within bounds.\n *\n * @param x Object to test.\n * @param expectedType The string expected type of all of the elements in the\n * Array.\n * @param minLength Return false if x.length is less than this.\n * @param maxLength Return false if x.length is greater than this.\n * @returns true if and only if `x` is an `Array` with\n * length >= `minLength` and <= `maxLength`.\n */\n// tslint:disable:no-any\nexport function checkArrayTypeAndLength(x, expectedType, minLength = 0, maxLength = Infinity) {\n assert(minLength >= 0);\n assert(maxLength >= minLength);\n return (Array.isArray(x) && x.length >= minLength && x.length <= maxLength &&\n x.every(e => typeof e === expectedType));\n}\n// tslint:enable:no-any\n/**\n * Assert that a value or an array of value are positive integer.\n *\n * @param value The value being asserted on. May be a single number or an array\n * of numbers.\n * @param name Name of the value, used to make the error message.\n */\nexport function assertPositiveInteger(value, name) {\n if (Array.isArray(value)) {\n util.assert(value.length > 0, () => `${name} is unexpectedly an empty array.`);\n value.forEach((v, i) => assertPositiveInteger(v, `element ${i + 1} of ${name}`));\n }\n else {\n util.assert(Number.isInteger(value) && value > 0, () => `Expected ${name} to be a positive integer, but got ` +\n `${formatAsFriendlyString(value)}.`);\n }\n}\n/**\n * Format a value into a display-friendly, human-readable fashion.\n *\n * - `null` is formatted as `'null'`\n * - Strings are formated with flanking pair of quotes.\n * - Arrays are formatted with flanking pair of square brackets.\n *\n * @param value The value to display.\n * @return Formatted string.\n */\n// tslint:disable-next-line:no-any\nexport function formatAsFriendlyString(value) {\n if (value === null) {\n return 'null';\n }\n else if (Array.isArray(value)) {\n return '[' + value.map(v => formatAsFriendlyString(v)).join(',') + ']';\n }\n else if (typeof value === 'string') {\n return `\"${value}\"`;\n }\n else {\n return `${value}`;\n }\n}\n/**\n * Returns a function `f2` (decorator) which wraps the original function\n * `f`. `f2` guarantees that `f` can be called at most once\n * every `waitMs` ms. If `f2` is called more often, it will return\n * the last returned result of `f`.\n *\n * @param f The original function `f` to wrap.\n * @param waitMs The time between two consecutive calls to `f` in ms.\n */\nexport function debounce(f, waitMs) {\n let lastTime = util.now();\n let lastResult;\n const f2 = (...args) => {\n const now = util.now();\n if (now - lastTime < waitMs) {\n return lastResult;\n }\n lastTime = now;\n lastResult = f(...args);\n return lastResult;\n };\n return f2;\n}\n/**\n * Returns the fusable activation given a layers identifier.\n *\n * @param activationName The layers identifier string.\n * @return The name of the fusable activation.\n */\nexport function mapActivationToFusedKernel(activationName) {\n if (activationName === 'relu') {\n return 'relu';\n }\n if (activationName === 'linear') {\n return 'linear';\n }\n if (activationName === 'elu') {\n return 'elu';\n }\n return null;\n}\n/**\n * Returns the cartesian product of sets of values.\n * This works the same as itertools.product in Python.\n *\n * Example:\n *\n * filters = [128, 256, 512]\n * paddings = ['same', 'valid']\n *\n * product = [ [128, 'same'], [128, 'valid'], [256, 'same'], [256, 'valid'],\n * [512, 'same'], [512, 'valid']]\n *\n * @param arrayOfValues List/array of values.\n * @return The cartesian product.\n */\nexport function getCartesianProductOfValues(...arrayOfValues) {\n assert(arrayOfValues.length > 0, 'arrayOfValues is empty');\n for (const values of arrayOfValues) {\n assert(Array.isArray(values), 'one of the values is not an array');\n assert(values.length > 0, 'one of the values is empty');\n }\n return arrayOfValues.reduce((products, values) => {\n if (products.length === 0) {\n return values.map(value => [value]);\n }\n return values\n .map(value => {\n return products.map((prevValue) => [...prevValue, value]);\n })\n .reduce((flattenedProduct, unflattenedProduct) => {\n return flattenedProduct.concat(unflattenedProduct);\n }, []);\n }, []);\n}\n//# sourceMappingURL=generic_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/contraints.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\n/**\n * Helper function used by many of the Constraints to find the L2Norms.\n */\nfunction calcL2Norms(w, axis) {\n return tidy(() => tfc.sqrt(tfc.sum(tfc.mul(w, w), axis, true)));\n}\n/**\n * Base class for functions that impose constraints on weight values\n *\n * @doc {\n * heading: 'Constraints',\n * subheading: 'Classes',\n * namespace: 'constraints'\n * }\n */\nexport class Constraint extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\nexport class MaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMaxValue = 2;\n this.defaultAxis = 0;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.clipByValue(norms, 0, this.maxValue);\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return { maxValue: this.maxValue, axis: this.axis };\n }\n}\n/** @nocollapse */\nMaxNorm.className = 'MaxNorm';\nserialization.registerClass(MaxNorm);\nexport class UnitNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultAxis = 0;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => tfc.div(w, tfc.add(epsilon(), calcL2Norms(w, this.axis))));\n }\n getConfig() {\n return { axis: this.axis };\n }\n}\n/** @nocollapse */\nUnitNorm.className = 'UnitNorm';\nserialization.registerClass(UnitNorm);\nexport class NonNeg extends Constraint {\n apply(w) {\n return tfc.relu(w);\n }\n}\n/** @nocollapse */\nNonNeg.className = 'NonNeg';\nserialization.registerClass(NonNeg);\nexport class MinMaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMinValue = 0.0;\n this.defaultMaxValue = 1.0;\n this.defaultRate = 1.0;\n this.defaultAxis = 0;\n this.minValue =\n args.minValue != null ? args.minValue : this.defaultMinValue;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.rate = args.rate != null ? args.rate : this.defaultRate;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.add(tfc.mul(this.rate, tfc.clipByValue(norms, this.minValue, this.maxValue)), tfc.mul(1.0 - this.rate, norms));\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return {\n minValue: this.minValue,\n maxValue: this.maxValue,\n rate: this.rate,\n axis: this.axis\n };\n }\n}\n/** @nocollapse */\nMinMaxNorm.className = 'MinMaxNorm';\nserialization.registerClass(MinMaxNorm);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'maxNorm': 'MaxNorm',\n 'minMaxNorm': 'MinMaxNorm',\n 'nonNeg': 'NonNeg',\n 'unitNorm': 'UnitNorm'\n};\nexport function serializeConstraint(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeConstraint(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'constraint');\n}\nexport function getConstraint(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeConstraint(config);\n }\n else if (identifier instanceof Constraint) {\n return identifier;\n }\n else {\n return deserializeConstraint(identifier);\n }\n}\n//# sourceMappingURL=constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { MaxNorm, MinMaxNorm, NonNeg, UnitNorm } from './constraints';\n/**\n * MaxNorm weight constraint.\n *\n * Constrains the weights incident to each hidden unit\n * to have a norm less than or equal to a desired value.\n *\n * References\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting\n * Srivastava, Hinton, et al.\n * 2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Constraints',namespace: 'constraints'}\n */\nexport function maxNorm(args) {\n return new MaxNorm(args);\n}\n/**\n * Constrains the weights incident to each hidden unit to have unit norm.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function unitNorm(args) {\n return new UnitNorm(args);\n}\n/**\n * Constains the weight to be non-negative.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function nonNeg() {\n return new NonNeg();\n}\n/** @doc {heading: 'Constraints', namespace: 'constraints'} */\nexport function minMaxNorm(config) {\n return new MinMaxNorm(config);\n}\n//# sourceMappingURL=exports_constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_DATA_FORMAT_VALUES = ['channelsFirst', 'channelsLast'];\nexport const VALID_PADDING_MODE_VALUES = ['valid', 'same', 'causal'];\nexport const VALID_POOL_MODE_VALUES = ['max', 'avg'];\nexport const VALID_BIDIRECTIONAL_MERGE_MODES = ['sum', 'mul', 'concat', 'ave'];\nexport const VALID_SAMPLE_WEIGHT_MODES = ['temporal'];\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Common functions for TensorFlow.js Layers.\n */\nimport { VALID_DATA_FORMAT_VALUES, VALID_PADDING_MODE_VALUES, VALID_POOL_MODE_VALUES } from './keras_format/common';\nimport { checkStringTypeUnionValue } from './utils/generic_utils';\n// A map from the requested scoped name of a Tensor to the number of Tensors\n// wanting that name so far. This allows enforcing name uniqueness by appending\n// an incrementing index, e.g. scope/name, scope/name_1, scope/name_2, etc.\nconst nameMap = new Map();\nexport function checkDataFormat(value) {\n checkStringTypeUnionValue(VALID_DATA_FORMAT_VALUES, 'DataFormat', value);\n}\nexport function checkPaddingMode(value) {\n checkStringTypeUnionValue(VALID_PADDING_MODE_VALUES, 'PaddingMode', value);\n}\nexport function checkPoolMode(value) {\n checkStringTypeUnionValue(VALID_POOL_MODE_VALUES, 'PoolMode', value);\n}\nconst _nameScopeStack = [];\nconst _nameScopeDivider = '/';\n/**\n * Enter namescope, which can be nested.\n */\nexport function nameScope(name, fn) {\n _nameScopeStack.push(name);\n try {\n const val = fn();\n _nameScopeStack.pop();\n return val;\n }\n catch (e) {\n _nameScopeStack.pop();\n throw e;\n }\n}\n/**\n * Get the current namescope as a flat, concatenated string.\n */\nfunction currentNameScopePrefix() {\n if (_nameScopeStack.length === 0) {\n return '';\n }\n else {\n return _nameScopeStack.join(_nameScopeDivider) + _nameScopeDivider;\n }\n}\n/**\n * Get the name a Tensor (or Variable) would have if not uniqueified.\n * @param tensorName\n * @return Scoped name string.\n */\nexport function getScopedTensorName(tensorName) {\n if (!isValidTensorName(tensorName)) {\n throw new Error('Not a valid tensor name: \\'' + tensorName + '\\'');\n }\n return currentNameScopePrefix() + tensorName;\n}\n/**\n * Get unique names for Tensors and Variables.\n * @param scopedName The fully-qualified name of the Tensor, i.e. as produced by\n * `getScopedTensorName()`.\n * @return A unique version of the given fully scoped name.\n * If this is the first time that the scoped name is seen in this session,\n * then the given `scopedName` is returned unaltered. If the same name is\n * seen again (producing a collision), an incrementing suffix is added to the\n * end of the name, so it takes the form 'scope/name_1', 'scope/name_2', etc.\n */\nexport function getUniqueTensorName(scopedName) {\n if (!isValidTensorName(scopedName)) {\n throw new Error('Not a valid tensor name: \\'' + scopedName + '\\'');\n }\n if (!nameMap.has(scopedName)) {\n nameMap.set(scopedName, 0);\n }\n const index = nameMap.get(scopedName);\n nameMap.set(scopedName, nameMap.get(scopedName) + 1);\n if (index > 0) {\n const result = `${scopedName}_${index}`;\n // Mark the composed name as used in case someone wants\n // to call getUniqueTensorName(\"name_1\").\n nameMap.set(result, 1);\n return result;\n }\n else {\n return scopedName;\n }\n}\nconst tensorNameRegex = new RegExp(/^[A-Za-z0-9][-A-Za-z0-9\\._\\/]*$/);\n/**\n * Determine whether a string is a valid tensor name.\n * @param name\n * @returns A Boolean indicating whether `name` is a valid tensor name.\n */\nexport function isValidTensorName(name) {\n return !!name.match(tensorNameRegex);\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Math utility functions.\n *\n * This file contains some frequently used math function that operates on\n * number[] or Float32Array and return a number. Many of these functions are\n * not-so-thick wrappers around TF.js Core functions. But they offer the\n * convenience of\n * 1) not having to convert the inputs into Tensors,\n * 2) not having to convert the returned Tensors to numbers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar, tensor1d } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\n/**\n * Determine if a number is an integer.\n */\nexport function isInteger(x) {\n return x === parseInt(x.toString(), 10);\n}\n/**\n * Calculate the product of an array of numbers.\n * @param array The array to calculate the product over.\n * @param begin Beginning index, inclusive.\n * @param end Ending index, exclusive.\n * @return The product.\n */\nexport function arrayProd(array, begin, end) {\n if (begin == null) {\n begin = 0;\n }\n if (end == null) {\n end = array.length;\n }\n let prod = 1;\n for (let i = begin; i < end; ++i) {\n prod *= array[i];\n }\n return prod;\n}\n/**\n * A helper function transforms the two input types to an instance of Tensor1D,\n * so the return value can be fed directly into various TF.js Core functions.\n * @param array\n */\nfunction toArray1D(array) {\n array = Array.isArray(array) ? new Float32Array(array) : array;\n return tensor1d(array);\n}\n/**\n * Compute minimum value.\n * @param array\n * @return minimum value.\n */\nexport function min(array) {\n return tfc.min(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute maximum value.\n * @param array\n * @return maximum value\n */\nexport function max(array) {\n return tfc.max(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute sum of array.\n * @param array\n * @return The sum.\n */\nexport function sum(array) {\n return tfc.sum(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute mean of array.\n * @param array\n * @return The mean.\n */\nexport function mean(array) {\n return sum(array) / array.length;\n}\n/**\n * Compute variance of array.\n * @param array\n * @return The variance.\n */\nexport function variance(array) {\n const demeaned = tfc.sub(toArray1D(array), scalar(mean(array)));\n const sumSquare = tfc.sum(tfc.mul(demeaned, demeaned)).dataSync()[0];\n return sumSquare / array.length;\n}\n/**\n * Compute median of array.\n * @param array\n * @return The median value.\n */\nexport function median(array) {\n const arraySorted = array.slice().sort((a, b) => a - b);\n const lowIdx = Math.floor((arraySorted.length - 1) / 2);\n const highIdx = Math.ceil((arraySorted.length - 1) / 2);\n if (lowIdx === highIdx) {\n return arraySorted[lowIdx];\n }\n return (arraySorted[lowIdx] + arraySorted[highIdx]) / 2;\n}\n/**\n * Generate an array of integers in [begin, end).\n * @param begin Beginning integer, inclusive.\n * @param end Ending integer, exclusive.\n * @returns Range array.\n * @throws ValueError, iff `end` < `begin`.\n */\nexport function range(begin, end) {\n if (end < begin) {\n throw new ValueError(`end (${end}) < begin (${begin}) is forbidden.`);\n }\n const out = [];\n for (let i = begin; i < end; ++i) {\n out.push(i);\n }\n return out;\n}\n//# sourceMappingURL=math_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * deeplearn.js backend.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { onesLike as coreOnesLike, scalar, tensor1d, tidy, where, zerosLike as coreZerosLike } from '@tensorflow/tfjs-core';\nimport { checkDataFormat } from '../common';\nimport { NotImplementedError, ValueError } from '../errors';\nimport * as math_utils from '../utils/math_utils';\nimport { imageDataFormat } from './common';\n// tslint:enable\n/* Setting and getting backend from deeplearn.js. */\n// Default deeplearn.js backend is WebGL (GPU).\nlet backend = 'webgl';\nexport function setBackend(requestedBackend) {\n tfc.setBackend(requestedBackend);\n backend = requestedBackend;\n}\nexport function getBackend() {\n return backend;\n}\n/**\n * Indicates whether the backend is operating symbolically.\n *\n * This function will be used to determine how to interpret user code. If\n * it returns true, calls to the backend construct a symbolic graph; if\n * it returns false, calls to the backend execute immediately.\n */\nexport function isBackendSymbolic() {\n return false;\n}\n/**\n * Get the number of elements in a Tensor.\n * @param x The Tensor.\n * @return Number of elements in `x`.\n */\nexport function countParams(x) {\n const shape = x.shape;\n if (shape.length > 0) {\n return shape.reduce((a, b) => a * b);\n }\n else {\n // Scalar.\n return 1;\n }\n}\n/**\n * Casts a tensor to a different dtype and returns it.\n * @param x Input tensor.\n * @param dtype String: 'float32'|'int32'|'bool'.\n * @returns Tensor of the specified `dtype`.\n */\nexport function cast(x, dtype) {\n return x.asType(dtype);\n}\n/**\n * Adds a 1-sized dimension at index \"axis\".\n * @param x Input tensor.\n * @param axis Position where to add the new axis.\n * @returns Result of the dimension expansion.\n */\nexport function expandDims(x, axis = -1) {\n const outShape = x.shape.slice();\n if (axis < 0) {\n axis = outShape.length + axis + 1;\n }\n outShape.splice(axis, 0, 1);\n return x.reshape(outShape);\n}\n/**\n * Repeats a 2D tensor.\n *\n * If `x` has shape `[samples, dim]` and `n` is 2, for example, the output\n * will have shape `[samples, 2, dim]`.\n *\n * @param x Input tensor.\n * @param n Integer, number of times to repeat.\n * @returns The result of the repeat operation.\n * @throws ValueError: If input tensor is not 2D.\n */\nexport function repeat(x, n) {\n return tidy(() => {\n if (x.shape.length !== 2) {\n throw new ValueError(`repeat() expects a rank-2 tensor, but received a ` +\n `rank-${x.shape.length} tensor.`);\n }\n const y = expandDims(x, 1);\n return tile(y, [1, n, 1]);\n });\n}\n/**\n * Flatten a Tensor into 1D.\n * @param x Input tensor.\n * @return The result of the flattening `x`.\n */\nexport function flatten(x) {\n const newShape = [math_utils.arrayProd(x.shape)];\n return x.reshape(newShape);\n}\n/**\n * Turn a nD tensor into a 2D tensor with same 0th dimension.\n * In other words, it flattens each data samples of a batch.\n *\n * @param x The tensor to flatten. The rank of this tensor is required to be 2\n * or higher.\n * @return The result of the flattening.\n */\nexport function batchFlatten(x) {\n if (x.rank <= 1) {\n throw new ValueError(`batchFlatten requires a minimum rank of 2. Got rank: ${x.rank}.`);\n }\n const newShape = [x.shape[0], math_utils.arrayProd(x.shape, 1)];\n return x.reshape(newShape);\n}\n/**\n * Do slicing along the first axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the first axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongFirstAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [start, 0], [size, array.shape[1]]);\n case 3:\n return tfc.slice3d(array, [start, 0, 0], [size, array.shape[1], array.shape[2]]);\n case 4:\n return tfc.slice4d(array, [start, 0, 0, 0], [size, array.shape[1], array.shape[2], array.shape[3]]);\n case 5:\n return tfc.slice(array, [start, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4]\n ]);\n case 6:\n return tfc.slice(array, [start, 0, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4],\n array.shape[5]\n ]);\n default:\n throw new ValueError(`sliceAlongFirstAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the last axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the last axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongLastAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [0, start], [array.shape[0], size]);\n case 3:\n return tfc.slice3d(array, [0, 0, start], [array.shape[0], array.shape[1], size]);\n case 4:\n return tfc.slice4d(array, [0, 0, 0, start], [array.shape[0], array.shape[1], array.shape[2], size]);\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the sepcified axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size of the slice along the chosen axis.\n * @param choose an axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongAxis(array, start, size, axis) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 3:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice3d(array, [0, start, 0], [array.shape[0], size, array.shape[2]]);\n case 3:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 4:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice4d(array, [0, start, 0, 0], [array.shape[0], size, array.shape[2], array.shape[3]]);\n case 3:\n return tfc.slice4d(array, [0, 0, start, 0], [array.shape[0], array.shape[1], size, array.shape[3]]);\n case 4:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Concatenates a list of tensors alongside the specified axis.\n * @param tensors `Array` of tensors to concatenate.\n * @param axis Concatenation axis.\n * @returns The result of the concatenation.\n */\nexport function concatenate(tensors, axis = -1) {\n let rank;\n if (axis < 0) {\n rank = tensors[0].rank;\n if (rank !== 0) {\n axis = rank;\n }\n else {\n axis = 0;\n }\n }\n if (axis === tensors[0].rank) {\n // Porting Note: This is necessary because tfc.concat() requires axis to be\n // in the interval [-rank, rank).\n axis = -1;\n }\n // Porting Note: Sparse concat is not supported yet.\n return tfc.concat(tensors, axis);\n}\n/**\n * Concatenate two arrays along the first dimension.\n * @param a The 1st `tf.Tensor` to concatenate.\n * @param b The 2nd `tf.Tensor` to concatenate.\n * @returns Result of the concatenation.\n * @throws ValueError: If `a` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function concatAlongFirstAxis(a, b) {\n switch (a.rank) {\n case 1:\n return tfc.concat1d([a, b]);\n case 2:\n return tfc.concat2d([a, b], 0);\n case 3:\n return tfc.concat3d([a, b], 0);\n case 4:\n return tfc.concat4d([a, b], 0);\n default:\n throw new ValueError(`concatAlongFirstAxis() received an unsupported ` +\n `tensor rank: ${a.rank}`);\n }\n}\n/**\n * Creates a tensor by tiling `x` by `n`.\n * @param x A tensor.\n * @param n An Array of integers or a single integer. If an Array, the length\n * must be the same as the number of dimensions in `x`. If a single integer,\n * it will be treated as an Array of length 1.\n */\nexport function tile(x, n) {\n if (!Array.isArray(n)) {\n n = [n];\n }\n if (x.rank !== n.length) {\n throw new ValueError(`The length of input n (${n.length}) does not match ` +\n `the number of dimensions in input x (${x.rank})`);\n }\n return tfc.tile(x, n);\n}\n/* Creation of random tensors. */\n/**\n * Get a tensor with normal distribution of values.\n *\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @return The normal tensor.\n */\nexport function randomNormal(shape, mean = 0.0, stddev = 1.0, dtype, seed) {\n return tfc.randomNormal(shape, mean, stddev, dtype, seed);\n}\n/* Linear Algebra */\n/**\n * Multiply two tensors and returns the result as a tensor.\n *\n * For 2D tensors, this is equivalent to matrix multiplication (matMul).\n * For tensors of higher ranks, it follows the Theano behavior,\n * (e.g. `(2, 3) * (4, 3, 5) -> (2, 4, 5)`). From the Theano documentation:\n *\n * For N dimensions it is a sum product over the last axis of x and the\n * second-to-last of y:\n *\n * @param a A tensor of at least rank 2.\n * @param b A tensor of at least rank 2.\n * @param activation (optional) A string identifying the activation\n * function.\n * @return Result of the dot operation.\n */\nexport function dot(a, b, activation, bias) {\n if ((a.rank < 2) || (b.rank < 2)) {\n throw new NotImplementedError(`dot requires both inputs to be rank >= 2` +\n ` but got x shape = ${a.shape} and y shape = ${b.shape}`);\n }\n if (b.rank >= 3) {\n const xLastDim = a.shape.slice(-1)[0];\n const ySecondLastDim = b.shape.slice(-2)[0];\n if (xLastDim !== ySecondLastDim) {\n throw new NotImplementedError(`If rank y >= 3, then the second last dim` +\n ` of y must equal the last dim of x but got x shape = ${a.shape} and ` +\n ` y shape = ${b.shape}`);\n }\n }\n // Handle basic 2D x 2D case.\n if ((a.rank === 2) && (b.rank === 2)) {\n const transposeA = false;\n const transposeB = false;\n // tfc.fused.matMul only fuses certain activation functions. Unsupported\n // activation functions are treated as 'linear' activations, which is\n // equivalent to a no-op.\n return tfc.fused.matMul({\n a,\n b: b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n });\n }\n else {\n // Reshape x into the analogous 2D Tensor.\n const aFirstDims = a.shape.slice(); // Holds all but the last dim of x.\n const aLastDim = aFirstDims.pop();\n a = a.reshape([-1, aLastDim]);\n // Reshape y into the analogous 2D Tensor, and keep track of the\n // required dimensions to reproduce the output shape.\n const bShape = b.shape.slice();\n const bLastDim = bShape.pop();\n const ySecondLastDim = bShape.pop();\n const yOtherDims = [...bShape, bLastDim];\n // permutation should be like [r-2, 0, 1, 2, ... r-4, r-3, r-1]\n // where r is the rank of y.\n const perm = Array.from({ length: b.rank }, (_, i) => {\n if (i === 0) {\n return b.rank - 2;\n }\n else if (i <= b.rank - 2) {\n return i - 1;\n }\n return i;\n });\n b = b.transpose(perm).reshape([ySecondLastDim, -1]);\n // Multiply x and y as 2D Tensors, and then reshape back to original.\n const outputShape = [...aFirstDims, ...yOtherDims];\n const transposeA = false;\n const transposeB = false;\n return tfc.fused\n .matMul({\n a,\n b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n })\n .reshape(outputShape);\n }\n}\n/**\n * Compute the sign Tensor of an input Tensor.\n *\n * Elements of the input `tf.Tensor` that are === 0 are mapped to 0.\n * Elements of the input `tf.Tensor` that are > 0 are mapped to 1.\n * Elements of the input `tf.Tensor` that are < 0 are mapped to -1.\n *\n * @param x Input `tf.Tensor`.\n * @return The sign `tf.Tensor`.\n */\nexport function sign(x) {\n // TODO(cais): Move to the core.\n return tidy(() => {\n const zerosLikeX = coreZerosLike(x);\n const onesLikeX = coreOnesLike(x);\n return where(tfc.equal(x, zerosLikeX), zerosLikeX, where(tfc.greater(x, coreZerosLike(x)), onesLikeX, tfc.mul(-1, onesLikeX)));\n });\n}\n/**\n * Computes the one-hot representation of an integer tensor.\n * @param indices nD integer tensor of shape\n * `(batch_size, dim1, dim2, ... dim(n-1))`\n * @param numClasses Integer, number of classes to consider.\n * @returns (n + 1)D one hot representation of the input\n * with shape `(batch_size, dim1, dim2, ... dim(n-1), num_classes)`\n */\nexport function oneHot(indices, numClasses) {\n return tidy(() => {\n if (indices.rank !== 1) {\n throw new Error('Only 1D one-hot tensors are supported in the ' +\n 'deeplearn backend, at present.');\n }\n indices = indices.toInt();\n return tfc.oneHot(indices, numClasses).toFloat();\n });\n}\n/* Elementary math functions. */\n/**\n * Retrieves the elements of indices `indices` in the tensor `reference`.\n * @param reference A tensor.\n * @param indices An integer tensor of indices or an `Array` of integers.\n * @param axis Axis along which to perform the gather operation.\n * @returns The result of the gathering as a tensor.\n */\nexport function gather(reference, indices, axis) {\n return tidy(() => {\n if (Array.isArray(indices)) {\n indices = tensor1d(indices, 'int32');\n }\n else {\n indices = indices.toInt();\n }\n return tfc.gather(reference, indices, axis);\n });\n}\n/**\n * Element-wise square.\n * @param x Input tensor.\n * @return element-wise x^2\n */\nexport function square(x) {\n return tfc.mul(x, x);\n}\n/**\n * Element-wise exponentiation.\n *\n * Porting Note: In PyKeras, `a` (the exponent) is a Python integer, which\n * takes advatnage of the backend's (e.g., TensorFlow's) automatic\n * conversion to tensor. Here we allow `a` to be either a number or a tensor.\n *\n * @param x The base tensor.\n * @param a The exponent, tensor or number. If a number, it is rounded to the\n * nearest integer and converted to a tensor.\n * @returns A tensor of the same shape as `x`.\n */\nexport function pow(x, a) {\n return tidy(() => {\n if (typeof (a) === 'number') {\n a = scalar(Math.round(a), 'int32');\n }\n if (a.dtype !== 'int32') {\n throw new NotImplementedError(`Non-int32 dtype (${a.dtype}) is not supported by pow() yet`);\n }\n return tfc.pow(x, a);\n });\n}\n/**\n * Reshapes bias tensor according to rank of x.\n */\nfunction reshapeBias(xRank, bias, dataFormat) {\n const biasShape = bias.shape;\n if (bias.rank !== 1 && bias.rank !== xRank) {\n throw new ValueError(`Unexpected bias dimensions: ${bias.rank}` +\n `; expected it to be 1 or ${xRank}`);\n }\n if (xRank === 5) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[3], biasShape[0], biasShape[1], biasShape[2]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 4) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[2], biasShape[0], biasShape[1]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 3) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1]);\n }\n else {\n return bias.reshape([1, biasShape[1], biasShape[0]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank < 3) {\n return bias;\n }\n throw new ValueError(`Unsupported input rank by biasAdd: ${bias.rank}`);\n}\n/* Neural-network operations. */\n/**\n * Add a bias to a tensor.\n *\n * @param x The tensor to add the bias to.\n * @param bias The bias to add to `x`. Must be 1D or the same rank as `x`.\n * @return Result of the bias adding.\n * @throws ValueError: If the rank of `bias` is incorrect.\n */\nexport function biasAdd(x, bias, dataFormat) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n return x.add(reshapeBias(x.rank, bias, dataFormat));\n });\n}\n/**\n * Exponential linear unit (ELU).\n * @param x A tensor or variable to compute the activation function for.\n * @param alpha: A scalar, a scaling factor for the negative section.\n * @return Output of the ELU operation.\n */\nexport function elu(x, alpha = 1) {\n // TODO(cais): Add support for alpha values other than 1.\n if (alpha !== 1) {\n throw new NotImplementedError(`Support for alpha values other than 1 (${alpha}) is not implemented ` +\n `yet.`);\n }\n return tfc.elu(x);\n}\n/**\n * Softsign of a tensor.\n *\n * Defined as x / (abs(x) + 1), element-wise.\n *\n * @param x: Input.\n * @returns Output.\n */\nexport function softsign(x) {\n return tidy(() => tfc.div(x, tfc.abs(x).add(1)));\n}\n/**\n * Sets entries in `x` to zero at random, while scaling the entire tensor.\n *\n * @param x input tensor.\n * @param level fraction of the entries in the tensor that will be set to 0.\n * @param noiseShape shape of randomly generated keep/drop flags, must be\n * broadcastable to the shape of `x`. Optional.\n * @param seed random seed to ensure determinism. Optional.\n * @returns Result of the dropout operation.\n */\nexport function dropout(x, level, noiseShape, seed) {\n return tidy(() => tfc.dropout(x, level, noiseShape, seed));\n}\n/**\n * Element-wise, segment-wise linear approximation of sigmoid.\n *\n * Returns `0.` if `x < -2.5`, `1.` if `x > 2.5`.\n * In `-2.5 <= x <= 2.5`, returns `0.2 * x + 0.5`.\n *\n * @param x Input tensor.\n * @returns Output tensor.\n */\nexport function hardSigmoid(x) {\n return tidy(() => {\n const y = tfc.add(.5, tfc.mul(.2, x));\n return tfc.clipByValue(y, 0, 1);\n });\n}\n/**\n * Invoke `x` in the training phase, and `alt` otherwise.\n *\n * Porting Note: We do not create placeholder tensors for the `training`\n * boolean flag here, because there is no such thing in the TF.js imperative\n * backend.\n *\n * @param x The function to invoke iff `training` is `true`.\n * @param alt The function to invoke iff `training` is `false`.\n * @param training Boolean flag for whether training phase is active.\n * @returns The return value of `x()` if `training` is `true`, or the return\n * value of `alt()` if `training` is `false`.\n */\nexport function inTrainPhase(x, alt, training = false) {\n return training ? x() : alt();\n}\n//# sourceMappingURL=tfjs_backend.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_FAN_MODE_VALUES = ['fanIn', 'fanOut', 'fanAvg'];\nexport const VALID_DISTRIBUTION_VALUES = ['normal', 'uniform', 'truncatedNormal'];\n// We can't easily extract a string[] from the string union type, but we can\n// recapitulate the list, enforcing at compile time that the values are valid\n// and that we have the right number of them.\n/**\n * A string array of valid Initializer class names.\n *\n * This is guaranteed to match the `InitializerClassName` union type.\n */\nexport const initializerClassNames = [\n 'Zeros', 'Ones', 'Constant', 'RandomNormal', 'RandomUniform',\n 'TruncatedNormal', 'VarianceScaling', 'Orthogonal', 'Identity'\n];\n//# sourceMappingURL=initializer_config.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { eye, linalg, mul, ones, randomUniform, scalar, serialization, tidy, truncatedNormal, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { checkDataFormat } from './common';\nimport { NotImplementedError, ValueError } from './errors';\nimport { VALID_DISTRIBUTION_VALUES, VALID_FAN_MODE_VALUES } from './keras_format/initializer_config';\nimport { checkStringTypeUnionValue, deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nimport { arrayProd } from './utils/math_utils';\nexport function checkFanMode(value) {\n checkStringTypeUnionValue(VALID_FAN_MODE_VALUES, 'FanMode', value);\n}\nexport function checkDistribution(value) {\n checkStringTypeUnionValue(VALID_DISTRIBUTION_VALUES, 'Distribution', value);\n}\n/**\n * Initializer base class.\n *\n * @doc {\n * heading: 'Initializers', subheading: 'Classes', namespace: 'initializers'}\n */\nexport class Initializer extends serialization.Serializable {\n fromConfigUsesCustomObjects() {\n return false;\n }\n getConfig() {\n return {};\n }\n}\nexport class Zeros extends Initializer {\n apply(shape, dtype) {\n return zeros(shape, dtype);\n }\n}\n/** @nocollapse */\nZeros.className = 'Zeros';\nserialization.registerClass(Zeros);\nexport class Ones extends Initializer {\n apply(shape, dtype) {\n return ones(shape, dtype);\n }\n}\n/** @nocollapse */\nOnes.className = 'Ones';\nserialization.registerClass(Ones);\nexport class Constant extends Initializer {\n constructor(args) {\n super();\n if (typeof args !== 'object') {\n throw new ValueError(`Expected argument of type ConstantConfig but got ${args}`);\n }\n if (args.value === undefined) {\n throw new ValueError(`config must have value set but got ${args}`);\n }\n this.value = args.value;\n }\n apply(shape, dtype) {\n return tidy(() => mul(scalar(this.value), ones(shape, dtype)));\n }\n getConfig() {\n return {\n value: this.value,\n };\n }\n}\n/** @nocollapse */\nConstant.className = 'Constant';\nserialization.registerClass(Constant);\nexport class RandomUniform extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MINVAL = -0.05;\n this.DEFAULT_MAXVAL = 0.05;\n this.minval = args.minval || this.DEFAULT_MINVAL;\n this.maxval = args.maxval || this.DEFAULT_MAXVAL;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n return randomUniform(shape, this.minval, this.maxval, dtype);\n }\n getConfig() {\n return { minval: this.minval, maxval: this.maxval, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomUniform.className = 'RandomUniform';\nserialization.registerClass(RandomUniform);\nexport class RandomNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return K.randomNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomNormal.className = 'RandomNormal';\nserialization.registerClass(RandomNormal);\nexport class TruncatedNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`truncatedNormal does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nTruncatedNormal.className = 'TruncatedNormal';\nserialization.registerClass(TruncatedNormal);\nexport class Identity extends Initializer {\n constructor(args) {\n super();\n this.gain = args.gain != null ? args.gain : 1.0;\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length !== 2 || shape[0] !== shape[1]) {\n throw new ValueError('Identity matrix initializer can only be used for' +\n ' 2D square matrices.');\n }\n else {\n return mul(this.gain, eye(shape[0]));\n }\n });\n }\n getConfig() {\n return { gain: this.gain };\n }\n}\n/** @nocollapse */\nIdentity.className = 'Identity';\nserialization.registerClass(Identity);\n/**\n * Computes the number of input and output units for a weight shape.\n * @param shape Shape of weight.\n * @param dataFormat data format to use for convolution kernels.\n * Note that all kernels in Keras are standardized on the\n * CHANNEL_LAST ordering (even when inputs are set to CHANNEL_FIRST).\n * @return An length-2 array: fanIn, fanOut.\n */\nfunction computeFans(shape, dataFormat = 'channelsLast') {\n let fanIn;\n let fanOut;\n checkDataFormat(dataFormat);\n if (shape.length === 2) {\n fanIn = shape[0];\n fanOut = shape[1];\n }\n else if ([3, 4, 5].indexOf(shape.length) !== -1) {\n if (dataFormat === 'channelsFirst') {\n const receptiveFieldSize = arrayProd(shape, 2);\n fanIn = shape[1] * receptiveFieldSize;\n fanOut = shape[0] * receptiveFieldSize;\n }\n else if (dataFormat === 'channelsLast') {\n const receptiveFieldSize = arrayProd(shape, 0, shape.length - 2);\n fanIn = shape[shape.length - 2] * receptiveFieldSize;\n fanOut = shape[shape.length - 1] * receptiveFieldSize;\n }\n }\n else {\n const shapeProd = arrayProd(shape);\n fanIn = Math.sqrt(shapeProd);\n fanOut = Math.sqrt(shapeProd);\n }\n return [fanIn, fanOut];\n}\nexport class VarianceScaling extends Initializer {\n /**\n * Constructor of VarianceScaling.\n * @throws ValueError for invalid value in scale.\n */\n constructor(args) {\n super();\n if (args.scale < 0.0) {\n throw new ValueError(`scale must be a positive float. Got: ${args.scale}`);\n }\n this.scale = args.scale == null ? 1.0 : args.scale;\n this.mode = args.mode == null ? 'fanIn' : args.mode;\n checkFanMode(this.mode);\n this.distribution =\n args.distribution == null ? 'normal' : args.distribution;\n checkDistribution(this.distribution);\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n const fans = computeFans(shape);\n const fanIn = fans[0];\n const fanOut = fans[1];\n let scale = this.scale;\n if (this.mode === 'fanIn') {\n scale /= Math.max(1, fanIn);\n }\n else if (this.mode === 'fanOut') {\n scale /= Math.max(1, fanOut);\n }\n else {\n scale /= Math.max(1, (fanIn + fanOut) / 2);\n }\n if (this.distribution === 'normal') {\n const stddev = Math.sqrt(scale);\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`${this.getClassName()} does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, 0, stddev, dtype, this.seed);\n }\n else {\n const limit = Math.sqrt(3 * scale);\n return randomUniform(shape, -limit, limit, dtype);\n }\n }\n getConfig() {\n return {\n scale: this.scale,\n mode: this.mode,\n distribution: this.distribution,\n seed: this.seed\n };\n }\n}\n/** @nocollapse */\nVarianceScaling.className = 'VarianceScaling';\nserialization.registerClass(VarianceScaling);\nexport class GlorotUniform extends VarianceScaling {\n /**\n * Constructor of GlorotUniform\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotUniform.className = 'GlorotUniform';\nserialization.registerClass(GlorotUniform);\nexport class GlorotNormal extends VarianceScaling {\n /**\n * Constructor of GlorotNormal.\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotNormal.className = 'GlorotNormal';\nserialization.registerClass(GlorotNormal);\nexport class HeNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeNormal.className = 'HeNormal';\nserialization.registerClass(HeNormal);\nexport class HeUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeUniform.className = 'HeUniform';\nserialization.registerClass(HeUniform);\nexport class LeCunNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunNormal.className = 'LeCunNormal';\nserialization.registerClass(LeCunNormal);\nexport class LeCunUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunUniform.className = 'LeCunNormal';\nserialization.registerClass(LeCunUniform);\nexport class Orthogonal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_GAIN = 1;\n this.gain = args.gain == null ? this.DEFAULT_GAIN : args.gain;\n this.seed = args.seed;\n if (this.seed != null) {\n throw new NotImplementedError('Random seed is not implemented for Orthogonal Initializer yet.');\n }\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length < 2) {\n throw new NotImplementedError('Shape must be at least 2D.');\n }\n if (shape[0] * shape[1] > 2000) {\n console.warn(`Orthogonal initializer is being called on a matrix with more ` +\n `than 2000 (${shape[0] * shape[1]}) elements: ` +\n `Slowness may result.`);\n }\n // TODO(cais): Add seed support.\n const normalizedShape = shape[0] > shape[1] ? [shape[1], shape[0]] : shape;\n const a = K.randomNormal(normalizedShape, 0, 1, 'float32');\n let q = linalg.gramSchmidt(a);\n if (shape[0] > shape[1]) {\n q = q.transpose();\n }\n return mul(this.gain, q);\n });\n }\n getConfig() {\n return {\n gain: this.gain,\n seed: this.seed,\n };\n }\n}\n/** @nocollapse */\nOrthogonal.className = 'Orthogonal';\nserialization.registerClass(Orthogonal);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'constant': 'Constant',\n 'glorotNormal': 'GlorotNormal',\n 'glorotUniform': 'GlorotUniform',\n 'heNormal': 'HeNormal',\n 'heUniform': 'HeUniform',\n 'identity': 'Identity',\n 'leCunNormal': 'LeCunNormal',\n 'leCunUniform': 'LeCunUniform',\n 'ones': 'Ones',\n 'orthogonal': 'Orthogonal',\n 'randomNormal': 'RandomNormal',\n 'randomUniform': 'RandomUniform',\n 'truncatedNormal': 'TruncatedNormal',\n 'varianceScaling': 'VarianceScaling',\n 'zeros': 'Zeros'\n};\nfunction deserializeInitializer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'initializer');\n}\nexport function serializeInitializer(initializer) {\n return serializeKerasObject(initializer);\n}\nexport function getInitializer(identifier) {\n if (typeof identifier === 'string') {\n const className = identifier in INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n /* We have four 'helper' classes for common initializers that\n all get serialized as 'VarianceScaling' and shouldn't go through\n the deserializeInitializer pathway. */\n if (className === 'GlorotNormal') {\n return new GlorotNormal();\n }\n else if (className === 'GlorotUniform') {\n return new GlorotUniform();\n }\n else if (className === 'HeNormal') {\n return new HeNormal();\n }\n else if (className === 'HeUniform') {\n return new HeUniform();\n }\n else if (className === 'LeCunNormal') {\n return new LeCunNormal();\n }\n else if (className === 'LeCunUniform') {\n return new LeCunUniform();\n }\n else {\n const config = {};\n config['className'] = className;\n config['config'] = {};\n return deserializeInitializer(config);\n }\n }\n else if (identifier instanceof Initializer) {\n return identifier;\n }\n else {\n return deserializeInitializer(identifier);\n }\n}\n//# sourceMappingURL=initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { Constant, GlorotNormal, GlorotUniform, HeNormal, HeUniform, Identity, LeCunNormal, LeCunUniform, Ones, Orthogonal, RandomNormal, RandomUniform, TruncatedNormal, VarianceScaling, Zeros } from './initializers';\n/**\n * Initializer that generates tensors initialized to 0.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function zeros() {\n return new Zeros();\n}\n/**\n * Initializer that generates tensors initialized to 1.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function ones() {\n return new Ones();\n}\n/**\n * Initializer that generates values initialized to some constant.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function constant(args) {\n return new Constant(args);\n}\n/**\n * Initializer that generates random values initialized to a uniform\n * distribution.\n *\n * Values will be distributed uniformly between the configured minval and\n * maxval.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomUniform(args) {\n return new RandomUniform(args);\n}\n/**\n * Initializer that generates random values initialized to a normal\n * distribution.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomNormal(args) {\n return new RandomNormal(args);\n}\n/**\n * Initializer that generates random values initialized to a truncated normal.\n * distribution.\n *\n * These values are similar to values from a `RandomNormal` except that values\n * more than two standard deviations from the mean are discarded and re-drawn.\n * This is the recommended initializer for neural network weights and filters.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function truncatedNormal(args) {\n return new TruncatedNormal(args);\n}\n/**\n * Initializer that generates the identity matrix.\n * Only use for square 2D matrices.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function identity(args) {\n return new Identity(args);\n}\n/**\n * Initializer capable of adapting its scale to the shape of weights.\n * With distribution=NORMAL, samples are drawn from a truncated normal\n * distribution centered on zero, with `stddev = sqrt(scale / n)` where n is:\n * - number of input units in the weight tensor, if mode = FAN_IN.\n * - number of output units, if mode = FAN_OUT.\n * - average of the numbers of input and output units, if mode = FAN_AVG.\n * With distribution=UNIFORM,\n * samples are drawn from a uniform distribution\n * within [-limit, limit], with `limit = sqrt(3 * scale / n)`.\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function varianceScaling(config) {\n return new VarianceScaling(config);\n}\n/**\n * Glorot uniform initializer, also called Xavier uniform initializer.\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotUniform(args) {\n return new GlorotUniform(args);\n}\n/**\n * Glorot normal initializer, also called Xavier normal initializer.\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor.\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotNormal(args) {\n return new GlorotNormal(args);\n}\n/**\n * He normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function heNormal(args) {\n return new HeNormal(args);\n}\n/**\n * He uniform initializer.\n *\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / fan_in)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function heUniform(args) {\n return new HeUniform(args);\n}\n/**\n * LeCun normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(1 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * References:\n * [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n * [Efficient Backprop](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunNormal(args) {\n return new LeCunNormal(args);\n}\n/**\n * LeCun uniform initializer.\n *\n * It draws samples from a uniform distribution in the interval\n * `[-limit, limit]` with `limit = sqrt(3 / fanIn)`,\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunUniform(args) {\n return new LeCunUniform(args);\n}\n/**\n * Initializer that generates a random orthogonal matrix.\n *\n * Reference:\n * [Saxe et al., http://arxiv.org/abs/1312.6120](http://arxiv.org/abs/1312.6120)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function orthogonal(args) {\n return new Orthogonal(args);\n}\n//# sourceMappingURL=exports_initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Utilities related to persistent state in the backend.\n */\n/**\n * An ID to track `tf.SymbolicTensor`s and derived classes.\n * Required in different places in engine/topology.ts to identify unique\n * tensors.\n */\nlet _nextUniqueTensorId = 0;\nexport function getNextUniqueTensorId() {\n return _nextUniqueTensorId++;\n}\nconst _uidPrefixes = {};\n/**\n * Provides a unique UID given a string prefix.\n *\n * @param prefix\n */\nexport function getUid(prefix = '') {\n if (!(prefix in _uidPrefixes)) {\n _uidPrefixes[prefix] = 0;\n }\n _uidPrefixes[prefix] += 1;\n return prefix + _uidPrefixes[prefix].toString();\n}\n//# sourceMappingURL=state.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\n// tslint:enable\n/**\n * Determine whether the input is an Array of Shapes.\n */\nexport function isArrayOfShapes(x) {\n return Array.isArray(x) && Array.isArray(x[0]);\n}\n/**\n * Special case of normalizing shapes to lists.\n *\n * @param x A shape or list of shapes to normalize into a list of Shapes.\n * @return A list of Shapes.\n */\nexport function normalizeShapeList(x) {\n if (x.length === 0) {\n return [];\n }\n if (!Array.isArray(x[0])) {\n return [x];\n }\n return x;\n}\n/**\n * Helper function to obtain exactly one Tensor.\n * @param xs: A single `tf.Tensor` or an `Array` of `tf.Tensor`s.\n * @return A single `tf.Tensor`. If `xs` is an `Array`, return the first one.\n * @throws ValueError: If `xs` is an `Array` and its length is not 1.\n */\nexport function getExactlyOneTensor(xs) {\n let x;\n if (Array.isArray(xs)) {\n if (xs.length !== 1) {\n throw new ValueError(`Expected Tensor length to be 1; got ${xs.length}`);\n }\n x = xs[0];\n }\n else {\n x = xs;\n }\n return x;\n}\n/**\n * Helper function to obtain exactly on instance of Shape.\n *\n * @param shapes Input single `Shape` or Array of `Shape`s.\n * @returns If input is a single `Shape`, return it unchanged. If the input is\n * an `Array` containing exactly one instance of `Shape`, return the instance.\n * Otherwise, throw a `ValueError`.\n * @throws ValueError: If input is an `Array` of `Shape`s, and its length is not\n * 1.\n */\nexport function getExactlyOneShape(shapes) {\n if (Array.isArray(shapes) && Array.isArray(shapes[0])) {\n if (shapes.length === 1) {\n shapes = shapes;\n return shapes[0];\n }\n else {\n throw new ValueError(`Expected exactly 1 Shape; got ${shapes.length}`);\n }\n }\n else {\n return shapes;\n }\n}\n//# sourceMappingURL=types_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Count the elements in an Array of LayerVariables.\n *\n * @param weights: The LayerVariables of which the constituent numbers are to\n * be counted.\n * @returns A count of the elements in all the LayerVariables\n */\nexport function countParamsInWeights(weights) {\n let count = 0;\n for (const weight of weights) {\n if (weight.shape.length === 0) {\n count += 1;\n }\n else {\n count += weight.shape.reduce((a, b) => a * b);\n }\n }\n return count;\n}\n//# sourceMappingURL=variable_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { variableGrads } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId } from './backend/state';\nimport { getScopedTensorName, getUniqueTensorName } from './common';\nimport { NotImplementedError } from './errors';\nconst DEFAULT_VARIABLE_NAME_PREFIX = 'Variable';\n/**\n * A `tf.layers.LayerVariable` is similar to a `tf.Tensor` in that it has a\n * dtype and shape, but its value is mutable. The value is itself represented\n * as a`tf.Tensor`, and can be read with the `read()` method and updated with\n * the `write()` method.\n */\nexport class LayerVariable {\n /**\n * Construct Variable from a `tf.Tensor`.\n *\n * If not explicitly named, the Variable will be given a name with the\n * prefix 'Variable'. Variable names are unique. In the case of name\n * collision, suffixies '_' will be added to the name.\n *\n * @param val Initial value of the Variable.\n * @param name Name of the variable. If `null` or `undefined` is provided, it\n * will default a name with the prefix 'Variable'.\n * @param constraint Optional, projection function to be applied to the\n * variable after optimize updates\n * @throws ValueError if `name` is `null` or `undefined`.\n */\n constructor(val, dtype = 'float32', name = DEFAULT_VARIABLE_NAME_PREFIX, trainable = true, constraint = null) {\n this.dtype = dtype == null ? 'float32' : dtype;\n this.shape = val.shape;\n this.id = getNextUniqueTensorId();\n name = name == null ? DEFAULT_VARIABLE_NAME_PREFIX : name;\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n this.trainable_ = trainable;\n this.constraint = constraint;\n this.val = tfc.variable(val, this.trainable_, this.name, this.dtype);\n }\n /**\n * Get a snapshot of the Variable's value.\n *\n * The returned value is a snapshot of the Variable's value at the time of\n * the invocation. Future mutations in the value of the tensor will only\n * be reflected by future calls to this method.\n */\n read() {\n this.assertNotDisposed();\n return this.val;\n }\n /**\n * Update the value of the Variable.\n *\n * @param newVal: The new value to update to. Must be consistent with the\n * dtype and shape of the Variable.\n * @return This Variable.\n */\n write(newVal) {\n // TODO(cais): Once TF.js Core supports Tensor.dtype, check dtype match.\n this.assertNotDisposed();\n checkShapesMatch(this.val, newVal);\n // Skip updating if this is the exact same tensor.\n if (this.val.id !== newVal.id) {\n this.val.assign(newVal);\n if (this.constraint != null) {\n this.val.assign(this.constraint.apply(this.val));\n }\n }\n return this;\n }\n /**\n * Dispose this LayersVariable instance from memory.\n */\n dispose() {\n this.assertNotDisposed();\n this.val.dispose();\n }\n assertNotDisposed() {\n if (this.val.isDisposed) {\n throw new Error(`LayersVariable ${this.name} is already disposed.`);\n }\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.trainable_ = trainable;\n this.val.trainable = trainable;\n }\n}\nfunction checkShapesMatch(x, y) {\n if (x.shape.toString() !== y.shape.toString()) {\n throw new Error('Shape mismatch: ' + JSON.stringify(x.shape) + ' vs. ' +\n JSON.stringify(y.shape));\n }\n}\n/**\n * Create a Variable.\n * @param x The initial value of the `Variable`.\n * @param dtype optional, the type of the variable.\n * @param name optional, the name of the variable, default provided by\n * Variable.\n * @param constraint optional, a constraint to be applied after every update.\n * @return The newly instantiated `Variable`.\n */\nexport function variable(x, dtype, name, constraint) {\n return new LayerVariable(x, dtype, name, true, constraint);\n}\n/**\n * Instantiates an all-zeros Variable and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-zero Variable.\n */\nexport function zerosVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n return new LayerVariable(tfc.zeros(shape), dtype, name);\n}\n/**\n * Instantiates an all-zeros tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function zerosLike(x, dtype, name) {\n return new LayerVariable(tfc.zerosLike(x), dtype, name);\n}\n/**\n * Instantiates an all-ones tensor and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-ones Variable.\n */\nexport function onesVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n const allocated = tfc.ones(shape);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiates an all-ones tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function onesLike(x, dtype, name) {\n const allocated = tfc.onesLike(x);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiate an identity matrix and returns it, as a Variable\n *\n * @param size Number of rows/columns.\n * @param dtype Data type of returned Variable.\n * @param name Name of returned Variable.\n * @return A Variable, an identity matrix.\n */\nexport function eyeVariable(size, dtype, name) {\n return new LayerVariable(tfc.eye(size), dtype, name);\n}\n/**\n * Get a Variable with uniform distribution of values.\n * @param shape Shape of the tensor.\n * @param minval Lower bound of the uniform distribution.\n * @param maxval Upper bound of the uniform distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The uniform-random Variable.\n */\nexport function randomUniformVariable(shape, minval, maxval, dtype, seed, name = 'randomUniform') {\n return new LayerVariable(tfc.randomUniform(shape, minval, maxval, dtype), dtype, name);\n}\n/**\n * Get a Variable with truncated-normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function truncatedNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'truncatedNormal') {\n // TODO(cais): Implement logic for dtype and seed once they are supported\n // by deeplearn.js.\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.truncatedNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Get a Variable with normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function randomNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'randomNormal') {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormalVariable does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.randomNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Update the value of a Variable.\n * @param x The Variable to be updated.\n * @param xNew The new value to update to.\n * @return The Variable updated.\n */\nexport function update(x, xNew) {\n return x.write(xNew);\n}\n/**\n * Update the value of a Variable by adding an increment.\n * @param x The Variable to be updated.\n * @param increment The incrment to add to `x`.\n * @return The Variable updated.\n */\nexport function updateAdd(x, increment) {\n return x.write(tfc.add(x.read(), increment));\n}\n/**\n * Update the value of a Variable by subtracting a decrement.\n * @param x The Variable to be updated.\n * @param decrement The decrement to subtract from `x`.\n * @return The Variable updated.\n */\nexport function updateSub(x, decrement) {\n return x.write(tfc.sub(x.read(), decrement));\n}\n/**\n * Get the values of an array of Variables.\n *\n * @param tensors An `Array` of `Variable`s to get the values of.\n * @return The values of the inputs, as an `Array` of`tf.Tensor`s.\n */\nexport function batchGetValue(xs) {\n return xs.map(x => x.read());\n}\n/**\n * Update the value of multiple Variables at once.\n *\n * @param variablesAndValues An `Array`, each element is of type\n * [Variable, Tensor]. The first item is the\n * `Variable` of which the value is to be updated. The second item\n * carries the new value.\n */\nexport function batchSetValue(variablesAndValues) {\n variablesAndValues.forEach(variableAndValue => {\n const variable = variableAndValue[0];\n variable.write(variableAndValue[1]);\n });\n}\n/**\n * Returns the gradients of `variables` w.r.t. the return value of `lossFn`.\n * @param lossFn A function which returns a Scalar to be used as the function\n * value (i.e., numerator) for differentiation.\n * @param variables List of variables to be used as the independent variables\n * (i.e., denominator) for differentiation.\n * @returns An Array of gradients tensors.\n */\nexport function gradients(lossFn, variables) {\n // TODO(cais): The return type signature can be simplified if deeplearn makes\n // the corresponding type public.\n const variableList = variables.map(variable => variable.read());\n const valudAndGrads = variableGrads(lossFn, variableList);\n return variables.map(variable => valudAndGrads.grads[variable.name]);\n}\n//# sourceMappingURL=variables.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId, getUid } from '../backend/state';\nimport { getScopedTensorName, getUniqueTensorName, nameScope } from '../common';\nimport { AttributeError, NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { getInitializer } from '../initializers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as types_utils from '../utils/types_utils';\nimport * as variable_utils from '../utils/variable_utils';\nimport { batchGetValue, batchSetValue, LayerVariable } from '../variables';\n/**\n * Specifies the ndim, dtype and shape of every input to a layer.\n *\n * Every layer should expose (if appropriate) an `inputSpec` attribute:\n * a list of instances of InputSpec (one per input tensor).\n *\n * A null entry in a shape is compatible with any dimension,\n * a null shape is compatible with any shape.\n */\nexport class InputSpec {\n constructor(args) {\n this.dtype = args.dtype;\n this.shape = args.shape;\n /*\n TODO(michaelterry): Could throw error if ndim and shape are both defined\n (then backport).\n */\n if (args.shape != null) {\n this.ndim = args.shape.length;\n }\n else {\n this.ndim = args.ndim;\n }\n this.maxNDim = args.maxNDim;\n this.minNDim = args.minNDim;\n this.axes = args.axes || {};\n }\n}\n/**\n * `tf.SymbolicTensor` is a placeholder for a Tensor without any concrete value.\n *\n * They are most often encountered when building a graph of `Layer`s for a\n * a `tf.LayersModel` and the input data's shape, but not values are known.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\nexport class SymbolicTensor {\n /**\n *\n * @param dtype\n * @param shape\n * @param sourceLayer The Layer that produced this symbolic tensor.\n * @param inputs The inputs passed to sourceLayer's __call__() method.\n * @param nodeIndex\n * @param tensorIndex\n * @param callArgs The keyword arguments passed to the __call__() method.\n * @param name\n * @param outputTensorIndex The index of this tensor in the list of outputs\n * returned by apply().\n */\n constructor(dtype, shape, sourceLayer, inputs, callArgs, name, outputTensorIndex) {\n this.dtype = dtype;\n this.shape = shape;\n this.sourceLayer = sourceLayer;\n this.inputs = inputs;\n this.callArgs = callArgs;\n this.outputTensorIndex = outputTensorIndex;\n this.id = getNextUniqueTensorId();\n if (name != null) {\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n }\n this.rank = shape.length;\n }\n}\nlet _nextNodeID = 0;\n/**\n * A `Node` describes the connectivity between two layers.\n *\n * Each time a layer is connected to some new input,\n * a node is added to `layer.inboundNodes`.\n *\n * Each time the output of a layer is used by another layer,\n * a node is added to `layer.outboundNodes`.\n *\n * `nodeIndices` and `tensorIndices` are basically fine-grained coordinates\n * describing the origin of the `inputTensors`, verifying the following:\n *\n * `inputTensors[i] ==\n * inboundLayers[i].inboundNodes[nodeIndices[i]].outputTensors[\n * tensorIndices[i]]`\n *\n * A node from layer A to layer B is added to:\n * A.outboundNodes\n * B.inboundNodes\n */\nexport class Node {\n constructor(args, \n // TODO(michaelterry): Define actual type for this.\n callArgs) {\n this.callArgs = callArgs;\n this.id = _nextNodeID++;\n /*\n Layer instance (NOT a list).\n this is the layer that takes a list of input tensors\n and turns them into a list of output tensors.\n the current node will be added to\n the inboundNodes of outboundLayer.\n */\n this.outboundLayer = args.outboundLayer;\n /*\n The following 3 properties describe where\n the input tensors come from: which layers,\n and for each layer, which node and which\n tensor output of each node.\n */\n // List of layer instances.\n this.inboundLayers = args.inboundLayers;\n // List of integers, 1:1 mapping with inboundLayers.\n this.nodeIndices = args.nodeIndices;\n // List of integers, 1:1 mapping with inboundLayers.\n this.tensorIndices = args.tensorIndices;\n /*\n Following 2 properties:\n tensor inputs and outputs of outboundLayer.\n */\n // List of tensors. 1:1 mapping with inboundLayers.\n this.inputTensors = args.inputTensors;\n // List of tensors, created by outboundLayer.call().\n this.outputTensors = args.outputTensors;\n /*\n Following 2 properties: input and output masks.\n List of tensors, 1:1 mapping with inputTensor.\n */\n this.inputMasks = args.inputMasks;\n // List of tensors, created by outboundLayer.computeMask().\n this.outputMasks = args.outputMasks;\n // Following 2 properties: input and output shapes.\n // List of shape tuples, shapes of inputTensors.\n this.inputShapes = args.inputShapes;\n // List of shape tuples, shapes of outputTensors.\n this.outputShapes = args.outputShapes;\n // Add nodes to all layers involved.\n for (const layer of args.inboundLayers) {\n if (layer != null) {\n layer.outboundNodes.push(this);\n }\n }\n args.outboundLayer.inboundNodes.push(this);\n }\n getConfig() {\n const inboundNames = [];\n for (const layer of this.inboundLayers) {\n if (layer != null) {\n inboundNames.push(layer.name);\n }\n else {\n inboundNames.push(null);\n }\n }\n return {\n outboundLayer: this.outboundLayer ? this.outboundLayer.name : null,\n inboundLayers: inboundNames,\n nodeIndices: this.nodeIndices,\n tensorIndices: this.tensorIndices\n };\n }\n}\nlet _nextLayerID = 0;\n/**\n * A layer is a grouping of operations and weights that can be composed to\n * create a `tf.LayersModel`.\n *\n * Layers are constructed by using the functions under the\n * [tf.layers](#Layers-Basic) namespace.\n *\n * @doc {heading: 'Layers', subheading: 'Classes', namespace: 'layers'}\n */\nexport class Layer extends serialization.Serializable {\n constructor(args = {}) {\n super();\n this._callHook = null;\n this._addedWeightNames = [];\n // Porting Notes: PyKeras does not have this property in this base Layer\n // class. Instead lets Layer subclass set it dynamically and checks the\n // value with `hasattr`. In tfjs-layers, we let this be a member of this\n // base class.\n this._stateful = false;\n this.id = _nextLayerID++;\n this.activityRegularizer = null;\n this.inputSpec = null;\n this.supportsMasking = false;\n // These properties will be set upon call of this.build()\n this._trainableWeights = [];\n this._nonTrainableWeights = [];\n this._losses = [];\n this._updates = [];\n this._built = false;\n /*\n These lists will be filled via successive calls\n to this.addInboundNode().\n */\n this.inboundNodes = [];\n this.outboundNodes = [];\n let name = args.name;\n if (!name) {\n const prefix = this.getClassName();\n name = generic_utils.toSnakeCase(prefix) + '_' + getUid(prefix);\n }\n this.name = name;\n this.trainable_ = args.trainable == null ? true : args.trainable;\n if (args.inputShape != null || args.batchInputShape != null) {\n /*\n In this case we will later create an input layer\n to insert before the current layer\n */\n let batchInputShape;\n if (args.batchInputShape != null) {\n batchInputShape = args.batchInputShape;\n }\n else if (args.inputShape != null) {\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n batchInputShape = [batchSize].concat(args.inputShape);\n }\n this.batchInputShape = batchInputShape;\n // Set dtype.\n let dtype = args.dtype;\n if (dtype == null) {\n dtype = args.inputDType;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n this.dtype = dtype;\n }\n if (args.weights != null) {\n this.initialWeights = args.weights;\n }\n else {\n this.initialWeights = null;\n }\n // The value of `_refCount` is initialized to null. When the layer is used\n // in a symbolic way for the first time, it will be set to 1.\n this._refCount = null;\n this.fastWeightInitDuringBuild = false;\n }\n /**\n * Converts a layer and its index to a unique (immutable type) name.\n * This function is used internally with `this.containerNodes`.\n * @param layer The layer.\n * @param nodeIndex The layer's position (e.g. via enumerate) in a list of\n * nodes.\n *\n * @returns The unique name.\n */\n static nodeKey(layer, nodeIndex) {\n return layer.name + '_ib-' + nodeIndex.toString();\n }\n /**\n * Returns this.inboundNode at index nodeIndex.\n *\n * Porting note: This is a replacement for _get_node_attribute_at_index()\n * @param nodeIndex\n * @param attrName The name of the attribute related to request for this node.\n */\n getNodeAtIndex(nodeIndex, attrName) {\n if (this.inboundNodes.length === 0) {\n throw new RuntimeError('The layer has never been called ' +\n `and thus has no defined ${attrName}.`);\n }\n if (this.inboundNodes.length <= nodeIndex) {\n throw new ValueError(`Asked to get ${attrName} at node ${nodeIndex}, ` +\n `but the layer has only ${this.inboundNodes.length} inbound nodes.`);\n }\n return this.inboundNodes[nodeIndex];\n }\n /**\n * Retrieves the input tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple inputs).\n */\n getInputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple outputs).\n */\n getOutputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'output').outputTensors);\n }\n // Properties\n /**\n * Retrieves the input tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Input tensor or list of input tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get input() {\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer input\" ' +\n 'is ill-defined. ' +\n 'Use `getInputAt(nodeIndex)` instead.');\n }\n else if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' is not connected, no input to return.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Output tensor or list of output tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get output() {\n if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has no inbound nodes.');\n }\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer output\" ' +\n 'is ill-defined. ' +\n 'Use `getOutputAt(nodeIndex)` instead.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'output').outputTensors);\n }\n get losses() {\n return this._losses;\n }\n /**\n * Retrieves the Layer's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Layer.loss in PyKeras.\n // In PyKeras, Layer.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return this.losses.map(lossFn => lossFn());\n }\n get updates() {\n return this._updates;\n }\n get built() {\n return this._built;\n }\n set built(built) {\n this._built = built;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this._trainableWeights.forEach(w => w.trainable = trainable);\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n if (this.trainable_) {\n return this._trainableWeights.filter(w => w.trainable);\n }\n else {\n return [];\n }\n }\n set trainableWeights(weights) {\n this._trainableWeights = weights;\n }\n get nonTrainableWeights() {\n if (this.trainable) {\n return this._trainableWeights.filter(w => !w.trainable)\n .concat(this._nonTrainableWeights);\n }\n else {\n return this._trainableWeights.concat(this._nonTrainableWeights);\n }\n }\n set nonTrainableWeights(weights) {\n this._nonTrainableWeights = weights;\n }\n /**\n * The concatenation of the lists trainableWeights and nonTrainableWeights\n * (in this order).\n */\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n get stateful() {\n return this._stateful;\n }\n /**\n * Reset the states of the layer.\n *\n * This method of the base Layer class is essentially a no-op.\n * Subclasses that are stateful (e.g., stateful RNNs) should override this\n * method.\n */\n resetStates() {\n if (!this.stateful) {\n throw new Error('Cannot call the resetStates() method of a non-stateful Layer ' +\n 'object.');\n }\n }\n /**\n * Checks compatibility between the layer and provided inputs.\n *\n * This checks that the tensor(s) `input`\n * verify the input assumptions of the layer\n * (if any). If not, exceptions are raised.\n *\n * @param inputs Input tensor or list of input tensors.\n *\n * @exception ValueError in case of mismatch between\n * the provided inputs and the expectations of the layer.\n */\n assertInputCompatibility(inputs) {\n inputs = generic_utils.toList(inputs);\n if (this.inputSpec == null || this.inputSpec.length === 0) {\n return;\n }\n const inputSpec = generic_utils.toList(this.inputSpec);\n if (inputs.length !== inputSpec.length) {\n throw new ValueError(`Layer ${this.name} expects ${inputSpec.length} inputs, ` +\n `but it received ${inputs.length} input tensors. ` +\n `Input received: ${inputs}`);\n }\n for (let inputIndex = 0; inputIndex < inputs.length; inputIndex++) {\n const x = inputs[inputIndex];\n const spec = inputSpec[inputIndex];\n if (spec == null) {\n continue;\n }\n // Check ndim.\n const ndim = x.rank;\n if (spec.ndim != null) {\n if (ndim !== spec.ndim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}: ` +\n `expected ndim=${spec.ndim}, found ndim=${ndim}`);\n }\n }\n if (spec.maxNDim != null) {\n if (ndim > spec.maxNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected max_ndim=${spec.maxNDim}, found ndim=${ndim}`);\n }\n }\n if (spec.minNDim != null) {\n if (ndim < spec.minNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected min_ndim=${spec.minNDim}, found ndim=${ndim}.`);\n }\n }\n // Check dtype.\n if (spec.dtype != null) {\n if (x.dtype !== spec.dtype) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name} ` +\n `: expected dtype=${spec.dtype}, found dtype=${x.dtype}.`);\n }\n }\n // Check specific shape axes.\n if (spec.axes) {\n const xShape = x.shape;\n for (const key in spec.axes) {\n const axis = Number(key);\n const value = spec.axes[key];\n // Perform Python-style slicing in case axis < 0;\n // TODO(cais): Use https://github.com/alvivi/typescript-underscore to\n // ensure type safety through Underscore calls.\n const xShapeAtAxis = axis >= 0 ? xShape[axis] : xShape[xShape.length + axis];\n if (value != null && [value, null].indexOf(xShapeAtAxis) === -1) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected axis ${axis} of input shape to ` +\n `have value ${value} but got shape ${xShape}.`);\n }\n }\n }\n // Check shape.\n if (spec.shape != null) {\n for (let i = 0; i < spec.shape.length; ++i) {\n const specDim = spec.shape[i];\n const dim = x.shape[i];\n if (specDim != null && dim != null) {\n if (specDim !== dim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected shape=${spec.shape}, ` +\n `found shape=${x.shape}.`);\n }\n }\n }\n }\n }\n }\n /**\n * This is where the layer's logic lives.\n *\n * @param inputs Input tensor, or list/tuple of input tensors.\n * @param kwargs Additional keyword arguments.\n *\n * @return A tensor or list/tuple of tensors.\n */\n call(inputs, kwargs) {\n return inputs;\n }\n invokeCallHook(inputs, kwargs) {\n if (this._callHook != null) {\n this._callHook(inputs, kwargs);\n }\n }\n /**\n * Set call hook.\n * This is currently used for testing only.\n * @param callHook\n */\n setCallHook(callHook) {\n this._callHook = callHook;\n }\n /**\n * Clear call hook.\n * This is currently used for testing only.\n */\n clearCallHook() {\n this._callHook = null;\n }\n /**\n * Builds or executes a `Layer's logic.\n *\n * When called with `tf.Tensor`(s), execute the `Layer`s computation and\n * return Tensor(s). For example:\n *\n * ```js\n * const denseLayer = tf.layers.dense({\n * units: 1,\n * kernelInitializer: 'zeros',\n * useBias: false\n * });\n *\n * // Invoke the layer's apply() method with a `tf.Tensor` (with concrete\n * // numeric values).\n * const input = tf.ones([2, 2]);\n * const output = denseLayer.apply(input);\n *\n * // The output's value is expected to be [[0], [0]], due to the fact that\n * // the dense layer has a kernel initialized to all-zeros and does not have\n * // a bias.\n * output.print();\n * ```\n *\n * When called with `tf.SymbolicTensor`(s), this will prepare the layer for\n * future execution. This entails internal book-keeping on shapes of\n * expected Tensors, wiring layers together, and initializing weights.\n *\n * Calling `apply` with `tf.SymbolicTensor`s are typically used during the\n * building of non-`tf.Sequential` models. For example:\n *\n * ```js\n * const flattenLayer = tf.layers.flatten();\n * const denseLayer = tf.layers.dense({units: 1});\n *\n * // Use tf.layers.input() to obtain a SymbolicTensor as input to apply().\n * const input = tf.input({shape: [2, 2]});\n * const output1 = flattenLayer.apply(input);\n *\n * // output1.shape is [null, 4]. The first dimension is the undetermined\n * // batch size. The second dimension comes from flattening the [2, 2]\n * // shape.\n * console.log(JSON.stringify(output1.shape));\n *\n * // The output SymbolicTensor of the flatten layer can be used to call\n * // the apply() of the dense layer:\n * const output2 = denseLayer.apply(output1);\n *\n * // output2.shape is [null, 1]. The first dimension is the undetermined\n * // batch size. The second dimension matches the number of units of the\n * // dense layer.\n * console.log(JSON.stringify(output2.shape));\n *\n * // The input and output and be used to construct a model that consists\n * // of the flatten and dense layers.\n * const model = tf.model({inputs: input, outputs: output2});\n * ```\n *\n * @param inputs a `tf.Tensor` or `tf.SymbolicTensor` or an Array of them.\n * @param kwargs Additional keyword arguments to be passed to `call()`.\n *\n * @return Output of the layer's `call` method.\n *\n * @exception ValueError error in case the layer is missing shape information\n * for its `build` call.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n // Porting Note: This is a replacement for __call__() in Python.\n apply(inputs, kwargs) {\n kwargs = kwargs || {};\n this.assertNotDisposed();\n // Ensure inputs are all the same type.\n const inputsList = generic_utils.toList(inputs);\n let allAreSymbolic = true;\n for (const input of inputsList) {\n if (!(input instanceof SymbolicTensor)) {\n allAreSymbolic = false;\n break;\n }\n }\n let noneAreSymbolic = true;\n for (const input of inputsList) {\n if (input instanceof SymbolicTensor) {\n noneAreSymbolic = false;\n break;\n }\n }\n if (allAreSymbolic === noneAreSymbolic) {\n throw new ValueError('Arguments to apply() must be all ' +\n 'SymbolicTensors or all Tensors');\n }\n // TODO(michaelterry): nameScope() may not be necessary.\n return nameScope(this.name, () => {\n // Handle laying building (weight creating, input spec locking).\n if (!this.built) {\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec specified in the layer constructor.\n */\n this.assertInputCompatibility(inputs);\n // Collect input shapes to build layer.\n const inputShapes = [];\n for (const xElem of generic_utils.toList(inputs)) {\n inputShapes.push(xElem.shape);\n }\n this.build(generic_utils.singletonOrArray(inputShapes));\n this.built = true;\n // Load weights that were specified at layer instantiation.\n if (this.initialWeights) {\n this.setWeights(this.initialWeights);\n }\n if (this._refCount === null && noneAreSymbolic) {\n // The first use of this layer is a non-symbolic call, set ref count\n // to 1 so the Layer can be properly disposed if its dispose() method\n // is called.\n this._refCount = 1;\n }\n }\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec set at build time.\n */\n this.assertInputCompatibility(inputs);\n // Handle mask propagation.\n // TODO(michaelterry): Mask propagation not currently implemented.\n // Actually call the layer, collecting output(s), mask(s), and shape(s).\n if (noneAreSymbolic) {\n let output = this.call(inputs, kwargs);\n // TODO(michaelterry): Compute the outputMask\n // If the layer returns tensors from its inputs, unmodified,\n // we copy them to avoid loss of tensor metadata.\n const outputList = generic_utils.toList(output);\n const outputListCopy = [];\n // TODO(michaelterry): This copying may not be necessary given our eager\n // backend.\n for (let x of outputList) {\n if (inputsList.indexOf(x) !== -1) {\n x = x.clone();\n }\n outputListCopy.push(x);\n }\n output = generic_utils.singletonOrArray(outputListCopy);\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Call addInboundNode()?\n return output;\n }\n else {\n const inputShape = collectInputShape(inputs);\n const outputShape = this.computeOutputShape(inputShape);\n let output;\n const outputDType = guessOutputDType(inputs);\n this.warnOnIncompatibleInputShape(Array.isArray(inputs) ? inputShape[0] :\n inputShape);\n if (outputShape != null && outputShape.length > 0 &&\n Array.isArray(outputShape[0])) {\n // We have multiple output shapes. Create multiple output tensors.\n output = outputShape\n .map((shape, index) => new SymbolicTensor(outputDType, shape, this, generic_utils.toList(inputs), kwargs, this.name, index));\n }\n else {\n output = new SymbolicTensor(outputDType, outputShape, this, generic_utils.toList(inputs), kwargs, this.name);\n }\n /*\n Add an inbound node to the layer, so that it keeps track\n of the call and of all new variables created during the call.\n This also updates the layer history of the output tensor(s).\n If the input tensor(s) had no previous history,\n this does nothing.\n */\n this.addInboundNode(inputs, output, null, null, inputShape, outputShape, kwargs);\n this._refCount++;\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n return output;\n }\n });\n }\n /**\n * Check compatibility between input shape and this layer's batchInputShape.\n *\n * Print warning if any incompatibility is found.\n *\n * @param inputShape Input shape to be checked.\n */\n warnOnIncompatibleInputShape(inputShape) {\n if (this.batchInputShape == null) {\n return;\n }\n else if (inputShape.length !== this.batchInputShape.length) {\n console.warn(`The rank of the input tensor provided (shape: ` +\n `${JSON.stringify(inputShape)}) does not match that of the ` +\n `batchInputShape (${JSON.stringify(this.batchInputShape)}) ` +\n `of the layer ${this.name}`);\n }\n else {\n let dimMismatch = false;\n this.batchInputShape.forEach((dimension, i) => {\n if (dimension != null && inputShape[i] != null &&\n inputShape[i] !== dimension) {\n dimMismatch = true;\n }\n });\n if (dimMismatch) {\n console.warn(`The shape of the input tensor ` +\n `(${JSON.stringify(inputShape)}) does not ` +\n `match the expectation of layer ${this.name}: ` +\n `${JSON.stringify(this.batchInputShape)}`);\n }\n }\n }\n /**\n * Retrieves the output shape(s) of a layer.\n *\n * Only applicable if the layer has only one inbound node, or if all inbound\n * nodes have the same output shape.\n *\n * @returns Output shape or shapes.\n * @throws AttributeError: if the layer is connected to more than one incoming\n * nodes.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n get outputShape() {\n if (this.inboundNodes == null || this.inboundNodes.length === 0) {\n throw new AttributeError(`The layer ${this.name} has never been called and thus has no ` +\n `defined output shape.`);\n }\n const allOutputShapes = [];\n for (const node of this.inboundNodes) {\n const shapeString = JSON.stringify(node.outputShapes);\n if (allOutputShapes.indexOf(shapeString) === -1) {\n allOutputShapes.push(shapeString);\n }\n }\n if (allOutputShapes.length === 1) {\n const outputShapes = this.inboundNodes[0].outputShapes;\n if (Array.isArray(outputShapes) && Array.isArray(outputShapes[0]) &&\n outputShapes.length === 1) {\n return outputShapes[0];\n }\n else {\n return outputShapes;\n }\n }\n else {\n throw new AttributeError(`The layer ${this.name} has multiple inbound nodes with different ` +\n `output shapes. Hence the notion of \"output shape\" is ill-defined ` +\n `for the layer.`);\n // TODO(cais): Implement getOutputShapeAt().\n }\n }\n /**\n * Counts the total number of numbers (e.g., float32, int32) in the\n * weights.\n *\n * @returns An integer count.\n * @throws RuntimeError: If the layer is not built yet (in which case its\n * weights are not defined yet.)\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n countParams() {\n if (!this.built) {\n throw new RuntimeError(`You tried to call countParams() on ${this.name}, ` +\n `but the layer is not built yet. Build it first by calling ` +\n `build(batchInputShape).`);\n }\n return variable_utils.countParamsInWeights(this.weights);\n }\n /**\n * Creates the layer weights.\n *\n * Must be implemented on all layers that have weights.\n *\n * Called when apply() is called to construct the weights.\n *\n * @param inputShape A `Shape` or array of `Shape` (unused).\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n build(inputShape) {\n this.built = true;\n }\n /**\n * Returns the current values of the weights of the layer.\n *\n * @param trainableOnly Whether to get the values of only trainable weights.\n * @returns Weight values as an `Array` of `tf.Tensor`s.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getWeights(trainableOnly = false) {\n return batchGetValue(trainableOnly ? this.trainableWeights : this.weights);\n }\n /**\n * Sets the weights of the layer, from Tensors.\n *\n * @param weights a list of Tensors. The number of arrays and their shape\n * must match number of the dimensions of the weights of the layer (i.e.\n * it should match the output of `getWeights`).\n *\n * @exception ValueError If the provided weights list does not match the\n * layer's specifications.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n setWeights(weights) {\n tidy(() => {\n const params = this.weights;\n if (params.length !== weights.length) {\n // TODO(cais): Restore the following and use `providedWeights`, instead\n // of `weights` in the error message, once the deeplearn.js bug is\n // fixed: https://github.com/PAIR-code/deeplearnjs/issues/498 const\n // providedWeights = JSON.stringify(weights).substr(0, 50);\n throw new ValueError(`You called setWeights(weights) on layer \"${this.name}\" ` +\n `with a weight list of length ${weights.length}, ` +\n `but the layer was expecting ${params.length} weights. ` +\n `Provided weights: ${weights}...`);\n }\n if (params.length === 0) {\n return;\n }\n const weightValueTuples = [];\n const paramValues = batchGetValue(params);\n for (let i = 0; i < paramValues.length; ++i) {\n const pv = paramValues[i];\n const p = params[i];\n const w = weights[i];\n if (!util.arraysEqual(pv.shape, w.shape)) {\n throw new ValueError(`Layer weight shape ${pv.shape} ` +\n `not compatible with provided weight shape ${w.shape}`);\n }\n weightValueTuples.push([p, w]);\n }\n batchSetValue(weightValueTuples);\n });\n }\n /**\n * Adds a weight variable to the layer.\n *\n * @param name Name of the new weight variable.\n * @param shape The shape of the weight.\n * @param dtype The dtype of the weight.\n * @param initializer An initializer instance.\n * @param regularizer A regularizer instance.\n * @param trainable Whether the weight should be trained via backprop or not\n * (assuming that the layer itself is also trainable).\n * @param constraint An optional trainable.\n * @return The created weight variable.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addWeight(name, shape, dtype, initializer, regularizer, trainable, constraint) {\n // Reject duplicate weight names.\n if (this._addedWeightNames.indexOf(name) !== -1) {\n throw new ValueError(`Duplicate weight name ${name} for layer ${this.name}`);\n }\n this._addedWeightNames.push(name);\n if (dtype == null) {\n dtype = 'float32';\n }\n if (this.fastWeightInitDuringBuild) {\n initializer = getInitializer('zeros');\n }\n const initValue = initializer.apply(shape, dtype);\n const weight = new LayerVariable(initValue, dtype, name, trainable, constraint);\n initValue.dispose();\n // Request backend not to dispose the weights of the model on scope() exit.\n if (regularizer != null) {\n this.addLoss(() => regularizer.apply(weight.read()));\n }\n if (trainable == null) {\n trainable = true;\n }\n if (trainable) {\n this._trainableWeights.push(weight);\n }\n else {\n this._nonTrainableWeights.push(weight);\n }\n return weight;\n }\n /**\n * Set the fast-weight-initialization flag.\n *\n * In cases where the initialized weight values will be immediately\n * overwritten by loaded weight values during model loading, setting\n * the flag to `true` saves unnecessary calls to potentially expensive\n * initializers and speeds up the loading process.\n *\n * @param value Target value of the flag.\n */\n setFastWeightInitDuringBuild(value) {\n this.fastWeightInitDuringBuild = value;\n }\n /**\n * Add losses to the layer.\n *\n * The loss may potentionally be conditional on some inputs tensors,\n * for instance activity losses are conditional on the layer's inputs.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addLoss(losses) {\n if (losses == null || Array.isArray(losses) && losses.length === 0) {\n return;\n }\n // Update this.losses\n losses = generic_utils.toList(losses);\n if (this._losses !== undefined && this._losses !== null) {\n this.losses.push(...losses);\n }\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n computeOutputShape(inputShape) {\n return inputShape;\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n if (!this.supportsMasking) {\n if (mask != null) {\n if (Array.isArray(mask)) {\n mask.forEach(maskElement => {\n if (maskElement != null) {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n });\n }\n else {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n }\n // masking not explicitly supported: return null as mask\n return null;\n }\n // if masking is explictly supported, by default\n // carry over the input mask\n return mask;\n }\n /**\n * Internal method to create an inbound node for the layer.\n *\n * @param inputTensors List of input tensors.\n * @param outputTensors List of output tensors.\n * @param inputMasks List of input masks (a mask can be a tensor, or null).\n * @param outputMasks List of output masks (a mask can be a tensor, or null).\n * @param inputShapes List of input shape tuples.\n * @param outputShapes List of output shape tuples.\n * @param kwargs Dictionary of keyword arguments that were passed to the\n * `call` method of the layer at the call that created the node.\n */\n addInboundNode(inputTensors, outputTensors, inputMasks, outputMasks, inputShapes, outputShapes, kwargs = null) {\n const inputTensorList = generic_utils.toList(inputTensors);\n outputTensors = generic_utils.toList(outputTensors);\n inputMasks = generic_utils.toList(inputMasks);\n outputMasks = generic_utils.toList(outputMasks);\n inputShapes = types_utils.normalizeShapeList(inputShapes);\n outputShapes = types_utils.normalizeShapeList(outputShapes);\n // Collect input tensor(s) coordinates.\n const inboundLayers = [];\n const nodeIndices = [];\n const tensorIndices = [];\n for (const x of inputTensorList) {\n /*\n * TODO(michaelterry): Keras adds this value to tensors; it's not\n * clear whether we'll use this or not.\n */\n inboundLayers.push(x.sourceLayer);\n nodeIndices.push(x.nodeIndex);\n tensorIndices.push(x.tensorIndex);\n }\n // Create node, add it to inbound nodes.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers,\n nodeIndices,\n tensorIndices,\n inputTensors: inputTensorList,\n outputTensors,\n inputMasks,\n outputMasks,\n inputShapes,\n outputShapes\n }, kwargs);\n // Update tensor history\n for (let i = 0; i < outputTensors.length; i++) {\n // TODO(michaelterry: _uses_learning_phase not tracked.\n outputTensors[i].sourceLayer = this;\n outputTensors[i].nodeIndex = this.inboundNodes.length - 1;\n outputTensors[i].tensorIndex = i;\n }\n }\n /**\n * Returns the config of the layer.\n *\n * A layer config is a TS dictionary (serializable)\n * containing the configuration of a layer.\n * The same layer can be reinstantiated later\n * (without its trained weights) from this configuration.\n *\n * The config of a layer does not include connectivity\n * information, nor the layer class name. These are handled\n * by 'Container' (one layer of abstraction above).\n *\n * Porting Note: The TS dictionary follows TS naming standrds for\n * keys, and uses tfjs-layers type-safe Enums. Serialization methods\n * should use a helper function to convert to the pythonic storage\n * standard. (see serialization_utils.convertTsToPythonic)\n *\n * @returns TS dictionary of configuration.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getConfig() {\n const config = { name: this.name, trainable: this.trainable };\n if (this.batchInputShape != null) {\n config['batchInputShape'] = this.batchInputShape;\n }\n if (this.dtype != null) {\n config['dtype'] = this.dtype;\n }\n return config;\n }\n /**\n * Dispose the weight variables that this Layer instance holds.\n *\n * @returns {number} Number of disposed variables.\n */\n disposeWeights() {\n this.weights.forEach(weight => weight.dispose());\n return this.weights.length;\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Layer '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose layer's weights.\n *\n * This method decrease the reference count of the Layer object by 1.\n *\n * A Layer is reference-counted. Its reference count is incremented by 1\n * the first item its `apply()` method is called and when it becomes a part\n * of a new `Node` (through calling the `apply()`) method on a\n * `tf.SymbolicTensor`).\n *\n * If the reference count of a Layer becomes 0, all the weights will be\n * disposed and the underlying memory (e.g., the textures allocated in WebGL)\n * will be freed.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * weights of the Layer will *not* be disposed.\n *\n * After a Layer is disposed, it cannot be used in calls such as `apply()`,\n * `getWeights()` or `setWeights()` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the Container after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the layer has already\n * been disposed.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n dispose() {\n if (!this.built) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been ` +\n `built yet.`);\n }\n if (this._refCount === null) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been used ` +\n `yet.`);\n }\n this.assertNotDisposed();\n let numDisposedVariables = 0;\n if (--this._refCount === 0) {\n numDisposedVariables = this.disposeWeights();\n }\n return { refCountAfterDispose: this._refCount, numDisposedVariables };\n }\n}\n/**\n * Collects the input shape(s) of a list of `tf.Tensor`s or\n * `tf.SymbolicTensor`s.\n *\n * TODO(michaelterry): Update PyKeras docs (backport).\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return List of shape tuples (or single tuple), one tuple per input.\n */\nfunction collectInputShape(inputTensors) {\n inputTensors =\n generic_utils.toList(inputTensors);\n const shapes = [];\n for (const x of inputTensors) {\n shapes.push(x.shape);\n }\n return generic_utils.singletonOrArray(shapes);\n}\n/**\n * Guesses output dtype based on inputs.\n *\n * At present, just returns 'float32' for any input.\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return The guessed DType. At present, always returns 'float32'.\n */\nfunction guessOutputDType(inputTensors) {\n return 'float32';\n}\n/**\n * Returns the list of input tensors necessary to compute `tensor`.\n *\n * Output will always be a list of tensors (potentially with 1 element).\n *\n * @param tensor The tensor to start from.\n * @param layer Origin layer of the tensor.\n * @param nodeIndex Origin node index of the tensor.\n *\n * @return Array of input tensors.\n */\nexport function getSourceInputs(tensor, layer, nodeIndex) {\n if (layer == null || (nodeIndex != null && nodeIndex > 0)) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n }\n if (layer.inboundNodes.length === 0) {\n return [tensor];\n }\n else {\n const node = layer.inboundNodes[nodeIndex];\n if (node.inboundLayers.length === 0) {\n return node.inputTensors;\n }\n else {\n const sourceTensors = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const previousSources = getSourceInputs(x, layer, nodeIndex);\n // Avoid input redundancy.\n for (const x of previousSources) {\n if (sourceTensors.indexOf(x) === -1) {\n sourceTensors.push(x);\n }\n }\n }\n return sourceTensors;\n }\n }\n}\n//# sourceMappingURL=topology.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { ValueError } from '../errors';\nimport { Layer, Node, SymbolicTensor } from './topology';\nexport class InputLayer extends Layer {\n constructor(args) {\n super({\n dtype: args.dtype,\n name: args.name != null ? args.name : getUid('input').toString()\n });\n // Normalize config.batchSize and config.sparse\n if (args.batchSize == null) {\n args.batchSize = null;\n }\n if (args.sparse == null) {\n args.sparse = false;\n }\n this.trainable = false;\n this.built = true;\n this.sparse = args.sparse;\n if (args.inputShape != null && args.batchInputShape != null) {\n throw new ValueError('Only provide the inputShape OR ' +\n 'batchInputShape argument to inputLayer, not both at the same time.');\n }\n let batchInputShape = args.batchInputShape;\n if (batchInputShape == null) {\n if (args.inputShape == null) {\n throw new ValueError('An InputLayer should be passed either a ' +\n '`batchInputShape` or an `inputShape`.');\n }\n else {\n batchInputShape = [args.batchSize].concat(args.inputShape);\n }\n }\n else {\n // TODO(michaelterry): Backport to PyKeras\n if (args.batchSize != null) {\n throw new ValueError('Cannot specify batchSize if batchInputShape is ' +\n 'specified when creating an InputLayer.');\n }\n }\n const dtype = args.dtype || 'float32';\n this.batchInputShape = batchInputShape;\n this.dtype = dtype;\n // TODO(michaelterry): Backport this to PyKeras?\n this.inputSpec = [{ shape: batchInputShape }];\n const inputTensor = new SymbolicTensor(this.dtype, this.batchInputShape, this, [], {}, this.name);\n inputTensor.nodeIndex = 0;\n inputTensor.tensorIndex = 0;\n // Create an input node to add to this.outboundNode.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: [inputTensor],\n outputTensors: [inputTensor],\n inputMasks: [null],\n outputMasks: [null],\n inputShapes: [batchInputShape],\n outputShapes: [batchInputShape]\n });\n }\n apply(inputs, kwargs) {\n throw new ValueError('Cannot pass any input to an ' +\n `InputLayer's apply() method. InputLayer name: ${this.name}`);\n }\n dispose() {\n // dispose() for InputLayer is overridden as no-op.\n return { refCountAfterDispose: this._refCount, numDisposedVariables: 0 };\n }\n getConfig() {\n return {\n batchInputShape: this.batchInputShape,\n dtype: this.dtype,\n sparse: this.sparse,\n name: this.name\n };\n }\n}\n/** @nocollapse */\nInputLayer.className = 'InputLayer';\nserialization.registerClass(InputLayer);\nexport function Input(config) {\n if (config.batchShape == null && config.shape == null) {\n throw new Error('Please provide to Input either a `shape`' +\n ' or a `batchShape` argument. Note that ' +\n '`shape` does not include the batch ' +\n 'dimension.');\n }\n if (config.batchShape != null && config.shape != null) {\n // TODO(michaelterry): Backport to PyKeras.\n throw new ValueError('Please provide either a `shape` or `batchShape` ' +\n 'argument to Input, but not both.');\n }\n let batchShape = config.batchShape;\n if (config.shape != null && batchShape == null) {\n batchShape = [null].concat(config.shape);\n }\n let dtype = config.dtype;\n if (dtype == null) {\n dtype = 'float32';\n }\n const inputLayer = new InputLayer({\n batchInputShape: batchShape,\n name: config.name,\n dtype,\n sparse: config.sparse\n });\n const outputs = inputLayer.inboundNodes[0].outputTensors;\n return outputs[0];\n}\n//# sourceMappingURL=input_layer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose } from '@tensorflow/tfjs-core';\n/**\n * Turn any Scalar values in a Logs object into actual number values.\n *\n * @param logs The `Logs` object to be resolved in place.\n */\nexport async function resolveScalarsInLogs(logs) {\n if (logs == null) {\n return;\n }\n const promises = [];\n const keys = [];\n const scalarsToDispose = [];\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n const valueScalar = value;\n promises.push(valueScalar.data());\n keys.push(key);\n scalarsToDispose.push(valueScalar);\n }\n }\n if (promises.length > 0) {\n const values = await Promise.all(promises);\n for (let i = 0; i < values.length; ++i) {\n logs[keys[i]] = values[i][0];\n }\n // Dispose the original scalar tensors.\n dispose(scalarsToDispose);\n }\n}\n/**\n * Dispose all Tensors in an UnresolvedLogs object.\n *\n * @param logs An `UnresolvedLogs` object potentially containing `tf.Tensor`s in\n * places where the values can be `tf.Tensor` or `number`.\n */\nexport function disposeTensorsInLogs(logs) {\n if (logs == null) {\n return;\n }\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n value.dispose();\n }\n }\n}\n//# sourceMappingURL=logs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { add, div, keep, mul, nextFrame, tidy, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nimport * as generic_utils from './utils/generic_utils';\n/** Verbosity logging level when fitting a model. */\nexport var ModelLoggingVerbosity;\n(function (ModelLoggingVerbosity) {\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"SILENT\"] = 0] = \"SILENT\";\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"VERBOSE\"] = 1] = \"VERBOSE\";\n})(ModelLoggingVerbosity || (ModelLoggingVerbosity = {}));\n/** How often to yield to the main thread when training (in ms). */\nexport const DEFAULT_YIELD_EVERY_MS = 125;\n/**\n * Abstract base class used to build new callbacks.\n *\n * The `logs` dictionary that callback methods take as argument will contain\n * keys for quantities relevant to the current batch or epoch.\n *\n * Currently, the `.fit()` method of the `Sequential` model class\n * will include the following quantities in the `logs` that\n * it passes to its callbacks:\n *\n * onEpochEnd: Logs include `acc` and `loss`, and optionally include `valLoss`\n * (if validation is enabled in `fit`), and `valAcc` (if validation and\n * accuracy monitoring are enabled).\n * onBatchBegin: Logs include `size`, the number of samples in the current\n * batch.\n * onBatchEnd: Logs include `loss`, and optionally `acc` (if accuracy monitoring\n * is enabled).\n */\nexport class BaseCallback {\n constructor() {\n // TODO(michaelterry): This type is a best guess.\n this.validationData = null;\n }\n setParams(params) {\n this.params = params;\n }\n async onEpochBegin(epoch, logs) { }\n async onEpochEnd(epoch, logs) { }\n async onBatchBegin(batch, logs) { }\n async onBatchEnd(batch, logs) { }\n async onTrainBegin(logs) { }\n async onTrainEnd(logs) { }\n // LayersModel needs to call Callback.setModel(), but cannot actually depend\n // on Callback because that creates a cyclic dependency. Providing this no-op\n // method on BaseCallback breaks the cycle: this way LayersModel can depend on\n // BaseCallback but not on Callback. The argument is typed as `Container`\n // (the superclass of LayersModel) to avoid recapitulating the cycle. Callback\n // overrides this method and enforces that the argument is really a\n // LayersModel.\n setModel(model) {\n // Do nothing. Use Callback instead of BaseCallback to track the model.\n }\n}\n/**\n * Container abstracting a list of callbacks.\n */\nexport class CallbackList {\n // TODO(cais): When the need arises, uncomment the following lines and\n // implement the queue for time values.\n // private deltaTBatch: number;\n // private deltaTsBatchBegin: Array;\n // private deltaTsBatchEnd: Array;\n /**\n * Constructor of CallbackList.\n * @param callbacks Array of `Callback` instances.\n * @param queueLength Queue length for keeping running statistics over\n * callback execution time.\n */\n constructor(callbacks, queueLength = 10) {\n // TODO(cais): Make use of queueLength when implementing the queue for time\n // values.\n if (callbacks == null) {\n callbacks = [];\n }\n this.callbacks = callbacks;\n this.queueLength = queueLength;\n }\n append(callback) {\n this.callbacks.push(callback);\n }\n setParams(params) {\n for (const callback of this.callbacks) {\n callback.setParams(params);\n }\n }\n setModel(model) {\n for (const callback of this.callbacks) {\n callback.setModel(model);\n }\n }\n /**\n * Called at the start of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochBegin(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochBegin(epoch, logs);\n }\n }\n /**\n * Called at the end of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochEnd(epoch, logs);\n }\n }\n /**\n * Called right before processing a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchBegin(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchBegin(batch, logs);\n }\n }\n /**\n * Called at the end of a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchEnd(batch, logs);\n }\n }\n /**\n * Called at the beginning of training.\n * @param logs Dictionary of logs.\n */\n async onTrainBegin(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainBegin(logs);\n }\n }\n /**\n * Called at the end of training.\n * @param logs Dictionary of logs.\n */\n async onTrainEnd(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainEnd(logs);\n }\n }\n}\n/**\n * Callback that accumulates epoch averages of metrics.\n *\n * This callback is automatically applied to every LayersModel.\n */\nexport class BaseLogger extends BaseCallback {\n constructor() {\n super();\n }\n async onEpochBegin(epoch) {\n this.seen = 0;\n this.totals = {};\n }\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n const batchSize = logs['size'] == null ? 0 : logs['size'];\n this.seen += batchSize;\n for (const key in logs) {\n const value = logs[key];\n if (typeof value === 'number') {\n if (!this.totals.hasOwnProperty(key)) {\n this.totals[key] = 0;\n }\n this.totals[key] = this.totals[key] + value * batchSize;\n }\n else {\n let oldTotalsToDispose;\n if (key in this.totals) {\n oldTotalsToDispose = this.totals[key];\n }\n else {\n this.totals[key] = 0;\n }\n const total = tidy(() => add((this.totals[key]), mul(value, batchSize)));\n this.totals[key] = total;\n if (oldTotalsToDispose != null) {\n oldTotalsToDispose.dispose();\n }\n }\n }\n }\n async onEpochEnd(epoch, logs) {\n if (logs != null) {\n for (const key of this.params['metrics']) {\n if (this.totals[key] == null) {\n continue;\n }\n if (typeof this.totals[key] === 'number') {\n logs[key] = this.totals[key] / this.seen;\n }\n else {\n tidy(() => {\n const log = mul(div(1, this.seen), this.totals[key]);\n logs[key] = log;\n this.totals[key].dispose();\n keep(logs[key]);\n });\n }\n }\n }\n }\n}\n/**\n * Callback that records events into a `History` object. This callback is\n * automatically applied to every TF.js Layers model. The `History` object\n * gets returned by the `fit` method of models.\n */\nexport class History extends BaseCallback {\n async onTrainBegin(logs) {\n this.epoch = [];\n this.history = {};\n }\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n this.epoch.push(epoch);\n for (const key in logs) {\n if (this.history[key] == null) {\n this.history[key] = [];\n }\n this.history[key].push(logs[key]);\n }\n }\n /**\n * Await the values of all losses and metrics.\n */\n async syncData() {\n const promises = [];\n const keys = [];\n const indices = [];\n for (const key in this.history) {\n const valueArray = this.history[key];\n for (let i = 0; i < valueArray.length; ++i) {\n if (typeof valueArray[i] !== 'number') {\n const valueScalar = valueArray[i];\n promises.push(valueScalar.data());\n keys.push(key);\n indices.push(i);\n }\n }\n }\n const values = await Promise.all(promises);\n for (let n = 0; n < values.length; ++n) {\n const tensorToDispose = this.history[keys[n]][indices[n]];\n tensorToDispose.dispose();\n this.history[keys[n]][indices[n]] = values[n][0];\n }\n }\n}\n/**\n * Custom callback for training.\n */\nexport class CustomCallback extends BaseCallback {\n constructor(args, yieldEvery) {\n super();\n this.currentEpoch = 0;\n this.yieldEvery = yieldEvery || 'auto';\n if (this.yieldEvery === 'auto') {\n this.yieldEvery = DEFAULT_YIELD_EVERY_MS;\n }\n if (this.yieldEvery === 'never' && args.onYield != null) {\n throw new Error('yieldEvery is `never` but you provided an `onYield` callback. ' +\n 'Either change `yieldEvery` or remove the callback');\n }\n if (util.isNumber(this.yieldEvery)) {\n // Decorate `maybeWait` so it will be called at most once every\n // `yieldEvery` ms.\n this.maybeWait = generic_utils.debounce(this.maybeWait.bind(this), this.yieldEvery);\n }\n this.trainBegin = args.onTrainBegin;\n this.trainEnd = args.onTrainEnd;\n this.epochBegin = args.onEpochBegin;\n this.epochEnd = args.onEpochEnd;\n this.batchBegin = args.onBatchBegin;\n this.batchEnd = args.onBatchEnd;\n this.yield = args.onYield;\n }\n async maybeWait(epoch, batch, logs) {\n const ps = [];\n if (this.yield != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.yield(epoch, batch, logs));\n }\n ps.push(nextFrame());\n await Promise.all(ps);\n }\n async onEpochBegin(epoch, logs) {\n this.currentEpoch = epoch;\n if (this.epochBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.epochBegin(epoch, logs);\n }\n }\n async onEpochEnd(epoch, logs) {\n const ps = [];\n if (this.epochEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.epochEnd(epoch, logs));\n }\n if (this.yieldEvery === 'epoch') {\n ps.push(nextFrame());\n }\n await Promise.all(ps);\n }\n async onBatchBegin(batch, logs) {\n if (this.batchBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.batchBegin(batch, logs);\n }\n }\n async onBatchEnd(batch, logs) {\n const ps = [];\n if (this.batchEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.batchEnd(batch, logs));\n }\n if (this.yieldEvery === 'batch') {\n ps.push(nextFrame());\n }\n else if (util.isNumber(this.yieldEvery)) {\n ps.push(this.maybeWait(this.currentEpoch, batch, logs));\n }\n await Promise.all(ps);\n }\n async onTrainBegin(logs) {\n if (this.trainBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.trainBegin(logs);\n }\n }\n async onTrainEnd(logs) {\n if (this.trainEnd != null) {\n await resolveScalarsInLogs(logs);\n await this.trainEnd(logs);\n }\n }\n}\n/**\n * Standardize callbacks or configurations of them to an Array of callbacks.\n */\nexport function standardizeCallbacks(callbacks, yieldEvery) {\n if (callbacks == null) {\n callbacks = {};\n }\n if (callbacks instanceof BaseCallback) {\n return [callbacks];\n }\n if (Array.isArray(callbacks) && callbacks[0] instanceof BaseCallback) {\n return callbacks;\n }\n // Convert custom callback configs to custom callback objects.\n const callbackConfigs = generic_utils.toList(callbacks);\n return callbackConfigs.map(callbackConfig => new CustomCallback(callbackConfig, yieldEvery));\n}\n/**\n * A global registry for callback constructors to be used during\n * LayersModel.fit().\n */\nexport class CallbackConstructorRegistry {\n /**\n * Blocks public access to constructor.\n */\n constructor() { }\n /**\n * Register a tf.LayersModel.fit() callback constructor.\n *\n * The registered callback constructor will be used to instantiate\n * callbacks for every tf.LayersModel.fit() call afterwards.\n *\n * @param verbosityLevel Level of verbosity at which the `callbackConstructor`\n * is to be reigstered.\n * @param callbackConstructor A no-arg constructor for `tf.Callback`.\n * @throws Error, if the same callbackConstructor has been registered before,\n * either at the same or a different `verbosityLevel`.\n */\n static registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n util.assert(verbosityLevel >= 0 && Number.isInteger(verbosityLevel), () => `Verbosity level is expected to be an integer >= 0, ` +\n `but got ${verbosityLevel}`);\n CallbackConstructorRegistry.checkForDuplicate(callbackConstructor);\n if (CallbackConstructorRegistry.constructors[verbosityLevel] == null) {\n CallbackConstructorRegistry.constructors[verbosityLevel] = [];\n }\n CallbackConstructorRegistry.constructors[verbosityLevel].push(callbackConstructor);\n }\n static checkForDuplicate(callbackConstructor) {\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const constructors = CallbackConstructorRegistry.constructors[+levelName];\n constructors.forEach(ctor => {\n if (ctor === callbackConstructor) {\n throw new ValueError('Duplicate callback constructor.');\n }\n });\n }\n }\n /**\n * Clear all registered callback constructors.\n */\n static clear() {\n CallbackConstructorRegistry.constructors = {};\n }\n /**\n * Create callbacks using the registered callback constructors.\n *\n * Given `verbosityLevel`, all constructors registered at that level or above\n * will be called and the instantiated callbacks will be used.\n *\n * @param verbosityLevel: Level of verbosity.\n */\n static createCallbacks(verbosityLevel) {\n const constructors = [];\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const level = +levelName;\n if (verbosityLevel >= level) {\n constructors.push(...CallbackConstructorRegistry.constructors[level]);\n }\n }\n return constructors.map(ctor => new ctor());\n }\n}\nCallbackConstructorRegistry.constructors = {};\nexport function configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics) {\n const history = new History();\n const actualCallbacks = [\n new BaseLogger(), ...CallbackConstructorRegistry.createCallbacks(verbose)\n ];\n if (callbacks != null) {\n actualCallbacks.push(...callbacks);\n }\n actualCallbacks.push(history);\n const callbackList = new CallbackList(actualCallbacks);\n // TODO(cais): Figure out when this LayersModel instance can have a\n // dynamically\n // set property called 'callback_model' as in PyKeras.\n callbackList.setParams({\n epochs,\n initialEpoch,\n samples: numTrainSamples,\n steps: stepsPerEpoch,\n batchSize,\n verbose,\n doValidation,\n metrics: callbackMetrics,\n });\n return { callbackList, history };\n}\n//# sourceMappingURL=base_callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source layers/__init__.py */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { deserializeKerasObject } from '../utils/generic_utils';\n/**\n * Instantiate a layer from a config dictionary.\n * @param config dict of the form {class_name: str, config: dict}\n * @param customObjects dict mapping class names (or function names)\n * of custom (non-Keras) objects to class/functions\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns Layer instance (may be LayersModel, Sequential, Layer...)\n */\nexport function deserialize(config, customObjects = {}, fastWeightInit = false) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'layer', fastWeightInit);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: losses.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport * as K from './backend/tfjs_backend';\nimport { ValueError } from './errors';\n/**\n * Normalizes a tensor wrt the L2 norm alongside the specified axis.\n * @param x\n * @param axis Axis along which to perform normalization.\n */\nexport function l2Normalize(x, axis) {\n return tidy(() => {\n if (x.dtype !== 'float32') {\n x = x.asType('float32');\n }\n const squareSum = tfc.sum(K.square(x), axis, true);\n const epsilonTensor = tfc.fill(squareSum.shape, epsilon());\n const norm = tfc.sqrt(tfc.maximum(squareSum, epsilonTensor));\n return tfc.div(x, norm);\n });\n}\nexport function meanSquaredError(yTrue, yPred) {\n return tidy(() => tfc.mean(K.square(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsoluteError(yTrue, yPred) {\n return tidy(() => tfc.mean(tfc.abs(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return tidy(() => {\n const diff = tfc.sub(yTrue, yPred);\n const clippedTrue = tfc.clipByValue(tfc.abs(yTrue), epsilon(), Number.MAX_VALUE);\n const absResult = tfc.abs(tfc.div(diff, clippedTrue));\n return tfc.mul(100, tfc.mean(absResult, -1));\n });\n}\nexport function meanSquaredLogarithmicError(yTrue, yPred) {\n return tidy(() => {\n const clippedPred = tfc.clipByValue(yPred, epsilon(), Number.MAX_VALUE);\n const firstLog = tfc.log(tfc.add(1, clippedPred));\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), Number.MAX_VALUE);\n const secondLog = tfc.log(tfc.add(1, clippedTrue));\n return tfc.mean(K.square(tfc.sub(firstLog, secondLog)), -1);\n });\n}\nexport function squaredHinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(K.square(maxResult), -1);\n });\n}\nexport function hinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(maxResult, -1);\n });\n}\nexport function categoricalHinge(yTrue, yPred) {\n return tidy(() => {\n const pos = tfc.sum(tfc.mul(yTrue, yPred), -1);\n const neg = tfc.max(tfc.mul(tfc.sub(1, yTrue), yPred), -1);\n return tfc.maximum(0, tfc.add(1, tfc.sub(neg, pos)));\n });\n}\n/**\n * Logarithm of the hyperbolic cosine of the prediction error.\n *\n * `log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and\n * to `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly\n * like the mean squared error, but will not be so strongly affected by the\n * occasional wildly incorrect prediction.\n */\nexport function logcosh(yTrue, yPred) {\n return tidy(() => {\n const log2 = Math.log(2);\n const predictionDiff = tfc.sub(yPred, yTrue);\n const logcoshResult = tfc.sub(tfc.add(predictionDiff, tfc.softplus(tfc.mul(-2, predictionDiff))), log2);\n return tfc.mean(logcoshResult, -1);\n });\n}\nexport function categoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n if (fromLogits) {\n output = tfc.softmax(output);\n }\n else {\n // scale preds so that the class probabilities of each sample sum to 1.\n const outputSum = tfc.sum(output, output.shape.length - 1, true);\n output = tfc.div(output, outputSum);\n }\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n return tfc.neg(tfc.sum(tfc.mul(target.toFloat(), tfc.log(output)), output.shape.length - 1));\n });\n}\n/**\n * Categorical crossentropy with integer targets.\n *\n * @param target An integer tensor.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n */\nexport function sparseCategoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n const flatTarget = tfc.floor(K.flatten(target)).toInt();\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n const outputShape = output.shape;\n const oneHotTarget = tfc.oneHot(flatTarget, outputShape[outputShape.length - 1])\n .reshape(outputShape);\n return categoricalCrossentropy(oneHotTarget, output, fromLogits);\n });\n}\n/**\n * From TensorFlow's implementation in nn_impl.py:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n * Hence, to ensure stability and avoid overflow, the implementation uses this\n * equivalent formulation\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n *\n * @param labels The labels.\n * @param logits The logits.\n */\nexport function sigmoidCrossEntropyWithLogits(labels, logits) {\n if (!util.arraysEqual(labels.shape, logits.shape)) {\n throw new ValueError(`logits and labels must have the same shape, but got shapes ` +\n `${JSON.stringify(labels.shape)} and ${JSON.stringify(logits.shape)}`);\n }\n return tidy(() => {\n // The logistic loss formula from above is\n // x - x * z + log(1 + exp(-x))\n // For x < 0, a more numerically stable formula is\n // -x * z + log(1 + exp(x))\n // Note that these two expressions can be combined into the following:\n // max(x, 0) - x * z + log(1 + exp(-abs(x)))\n const reluLogits = logits.relu();\n const negAbsLogits = logits.abs().neg();\n return reluLogits.sub(logits.mul(labels)).add(negAbsLogits.exp().log1p());\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return tidy(() => {\n let y;\n y = tfc.clipByValue(yPred, epsilon(), 1 - epsilon());\n y = tfc.log(tfc.div(y, tfc.sub(1, y)));\n return tfc.mean(sigmoidCrossEntropyWithLogits(yTrue, y), -1);\n });\n}\nexport function kullbackLeiblerDivergence(yTrue, yPred) {\n return tidy(() => {\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), 1);\n const clippedPred = tfc.clipByValue(yPred, epsilon(), 1);\n return tfc.sum(tfc.mul(yTrue, tfc.log(tfc.div(clippedTrue, clippedPred))), -1);\n });\n}\nexport function poisson(yTrue, yPred) {\n return tidy(() => {\n const logPred = tfc.log(tfc.add(epsilon(), yPred));\n return tfc.mean(tfc.sub(yPred, tfc.mul(yTrue, logPred)), -1);\n });\n}\nexport function cosineProximity(yTrue, yPred) {\n return tidy(() => {\n const trueNormalized = l2Normalize(yTrue, -1);\n const predNormalized = l2Normalize(yPred, -1);\n const trueXPred = tfc.mul(trueNormalized, predNormalized);\n return tfc.neg(tfc.sum(trueXPred, -1));\n });\n}\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const msle = meanSquaredLogarithmicError;\nexport const MSLE = meanSquaredLogarithmicError;\nexport const kld = kullbackLeiblerDivergence;\nexport const KLD = kullbackLeiblerDivergence;\nexport const cosine = cosineProximity;\n// TODO(michaelterry): Add deserialize() function.\nexport const lossesMap = {\n meanSquaredError,\n meanAbsoluteError,\n meanAbsolutePercentageError,\n meanSquaredLogarithmicError,\n squaredHinge,\n hinge,\n categoricalHinge,\n logcosh,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n binaryCrossentropy,\n kullbackLeiblerDivergence,\n poisson,\n cosineProximity\n};\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function get(identifierOrFn) {\n if (typeof identifierOrFn === 'string') {\n if (identifierOrFn in lossesMap) {\n return lossesMap[identifierOrFn];\n }\n let errMsg = `Unknown loss ${identifierOrFn}`;\n if (identifierOrFn.toLowerCase().includes('softmaxcrossentropy')) {\n errMsg = `Unknown loss ${identifierOrFn}. ` +\n 'Use \"categoricalCrossentropy\" as the string name for ' +\n 'tf.losses.softmaxCrossEntropy';\n }\n throw new ValueError(errMsg);\n }\n else {\n return identifierOrFn;\n }\n}\n//# sourceMappingURL=losses.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Built-in metrics.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { NotImplementedError, ValueError } from './errors';\nimport { categoricalCrossentropy as categoricalCrossentropyLoss, cosineProximity, meanAbsoluteError, meanAbsolutePercentageError, meanSquaredError, sparseCategoricalCrossentropy as sparseCategoricalCrossentropyLoss } from './losses';\nimport { binaryCrossentropy as lossBinaryCrossentropy } from './losses';\nimport { lossesMap } from './losses';\nimport * as util from './utils/generic_utils';\nexport function binaryAccuracy(yTrue, yPred) {\n return tidy(() => {\n const threshold = tfc.mul(.5, tfc.onesLike(yPred));\n const yPredThresholded = K.cast(tfc.greater(yPred, threshold), yTrue.dtype);\n return tfc.mean(tfc.equal(yTrue, yPredThresholded), -1);\n });\n}\nexport function categoricalAccuracy(yTrue, yPred) {\n return tidy(() => K.cast(tfc.equal(tfc.argMax(yTrue, -1), tfc.argMax(yPred, -1)), 'float32'));\n}\nfunction truePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(1)).sum().cast('float32');\n });\n}\nfunction falseNegatives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(0)).sum().cast('float32');\n });\n}\nfunction falsePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(0), yPred.equal(1)).sum().cast('float32');\n });\n}\nexport function precision(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fp = falsePositives(yTrue, yPred);\n const denominator = tp.add(fp);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function recall(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fn = falseNegatives(yTrue, yPred);\n const denominator = tp.add(fn);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return lossBinaryCrossentropy(yTrue, yPred);\n}\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n if (yTrue.rank === yPred.rank) {\n yTrue = yTrue.squeeze([yTrue.rank - 1]);\n }\n yPred = yPred.argMax(-1);\n if (yPred.dtype !== yTrue.dtype) {\n yPred = yPred.asType(yTrue.dtype);\n }\n return tfc.equal(yTrue, yPred).asType('float32');\n}\nexport function topKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\nexport function sparseTopKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\n// Aliases.\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const categoricalCrossentropy = categoricalCrossentropyLoss;\nexport const cosine = cosineProximity;\nexport const sparseCategoricalCrossentropy = sparseCategoricalCrossentropyLoss;\n// TODO(cais, nielsene): Add serialize().\nexport const metricsMap = {\n binaryAccuracy,\n categoricalAccuracy,\n precision,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n mse,\n MSE,\n mae,\n MAE,\n mape,\n MAPE,\n cosine\n};\nexport function get(identifier) {\n if (typeof identifier === 'string' && identifier in metricsMap) {\n return metricsMap[identifier];\n }\n else if (typeof identifier !== 'string' && identifier != null) {\n return identifier;\n }\n else {\n throw new ValueError(`Unknown metric ${identifier}`);\n }\n}\n/**\n * Get the shortcut function name.\n *\n * If the fn name is a string,\n * directly return the string name.\n * If the function is included in metricsMap or lossesMap,\n * return key of the map.\n * - If the function relative to multiple keys,\n * return the first found key as the function name.\n * - If the function exists in both lossesMap and metricsMap,\n * search lossesMap first.\n * If the function is not included in metricsMap or lossesMap,\n * return the function name.\n *\n * @param fn loss function, metric function, or short cut name.\n * @returns Loss or Metric name in string.\n */\nexport function getLossOrMetricName(fn) {\n util.assert(fn !== null, `Unknown LossOrMetricFn ${fn}`);\n if (typeof fn === 'string') {\n return fn;\n }\n else {\n let fnName;\n for (const key of Object.keys(lossesMap)) {\n if (lossesMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n for (const key of Object.keys(metricsMap)) {\n if (metricsMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n return fn.name;\n }\n}\n//# sourceMappingURL=metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Optimizers.\n */\nimport { train } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { ValueError } from './errors';\n// Add (de)serialize()\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function getOptimizer(identifier) {\n const optimizerMap = {\n 'Adagrad': () => train.adagrad(0.01),\n 'Adadelta': () => train.adadelta(1, 0.95, epsilon()),\n 'Adam': () => train.adam(0.001, 0.9, 0.999, epsilon()),\n 'Adamax': () => train.adamax(0.002, 0.9, 0.999, epsilon(), 0),\n 'RMSProp': () => train.rmsprop(0.001, 0.9, 0, epsilon()),\n 'SGD': () => train.sgd(0.01)\n };\n optimizerMap['adagrad'] = optimizerMap['Adagrad'];\n optimizerMap['adadelta'] = optimizerMap['Adadelta'];\n optimizerMap['adam'] = optimizerMap['Adam'];\n optimizerMap['adamax'] = optimizerMap['Adamax'];\n optimizerMap['rmsprop'] = optimizerMap['RMSProp'];\n optimizerMap['sgd'] = optimizerMap['SGD'];\n if (identifier in optimizerMap) {\n return optimizerMap[identifier]();\n }\n throw new ValueError(`Unknown Optimizer ${identifier}`);\n}\n//# sourceMappingURL=optimizers.js.map", "/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/** Utility functions related to user-defined metadata. */\n// Maximum recommended serialized size for user-defined metadata.\n// Beyond this limit, a warning message will be printed during model loading and\n// saving.\nexport const MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH = 1 * 1024 * 1024;\n/**\n * Check validity of user-defined metadata.\n *\n * @param userDefinedMetadata\n * @param modelName Name of the model that the user-defined metadata belongs to.\n * Used during construction of error messages.\n * @param checkSize Whether to check the size of the metadata is under\n * recommended limit. Default: `false`. If `true`, will try stringify the\n * JSON object and print a console warning if the serialzied size is above the\n * limit.\n * @throws Error if `userDefinedMetadata` is not a plain JSON object.\n */\nexport function checkUserDefinedMetadata(userDefinedMetadata, modelName, checkSize = false) {\n if (userDefinedMetadata == null ||\n typeof userDefinedMetadata !== 'object' ||\n Object.getPrototypeOf(userDefinedMetadata) !== Object.prototype ||\n !plainObjectCheck(userDefinedMetadata)) {\n throw new Error('User-defined metadata is expected to be a JSON object, but is not.');\n }\n if (checkSize) {\n const out = JSON.stringify(userDefinedMetadata);\n if (out.length > MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH) {\n console.warn(`User-defined metadata of model \"${modelName}\" is too large in ` +\n `size (length=${out.length} when serialized). It is not ` +\n `recommended to store such large objects in user-defined metadata. ` +\n `Please make sure its serialized length is <= ` +\n `${MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH}.`);\n }\n }\n}\n/**\n * Check if an input is plain JSON object or any valid subfield of it.\n *\n * @param x The input to be checked.\n * @param assertObject Whether to assert `x` is a JSON object, i.e., reject\n * cases of arrays and primitives.\n * @return Returns `true` if and only if `x` is a plain JSON object,\n * a JSON-valid primitive including string, number, boolean and null,\n * or an array of the said types.\n */\n// tslint:disable-next-line:no-any\nexport function plainObjectCheck(x) {\n if (x === null) {\n // Note: typeof `null` is 'object', and `null` is valid in JSON.\n return true;\n }\n else if (typeof x === 'object') {\n if (Object.getPrototypeOf(x) === Object.prototype) {\n // `x` is a JavaScript object and its prototype is Object.\n const keys = Object.keys(x);\n for (const key of keys) {\n if (typeof key !== 'string') {\n // JSON keys must be strings.\n return false;\n }\n if (!plainObjectCheck(x[key])) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object but its prototype is not Object.\n if (Array.isArray(x)) {\n // `x` is a JavaScript array.\n for (const item of x) {\n if (!plainObjectCheck(item)) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object and its prototype is not Object,\n // and it's not an Array. I.e., it's a complex object such as\n // `Error` and `Date`.\n return false;\n }\n }\n }\n else {\n // `x` is not a JavaScript object or `null`.\n const xType = typeof x;\n return xType === 'string' || xType === 'number' || xType === 'boolean';\n }\n}\n//# sourceMappingURL=user_defined_metadata.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { countParamsInWeights } from './variable_utils';\n/**\n * Print the summary of a LayersModel object.\n *\n * @param model tf.LayersModel instance.\n * @param lineLength Total length of printed lines. Set this to adapt to the\n * display to different terminal or console sizes.\n * @param positions Relative or absolute positions of log elements in each\n * line. Each number corresponds to right-most (i.e., ending) position of a\n * column.\n * If not provided, defaults to `[0.45, 0.85, 1]` for sequential-like\n * models and `[0.33, 0.55, 0.67, 1]` for non-sequential like models.\n * @param printFn Print function to use.\n * It will be called on each line of the summary. You can provide a custom\n * function in order to capture the string summary. Defaults to `console.log`.\n */\nexport function printSummary(model, lineLength, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n const sequentialLike = isModelSequentialLike(model);\n // Header names for different log elements.\n const toDisplay = ['Layer (type)', 'Output shape', 'Param #'];\n if (sequentialLike) {\n lineLength = lineLength || 65;\n positions = positions || [0.45, 0.85, 1];\n }\n else {\n lineLength = lineLength || 98;\n positions = positions || [0.33, 0.55, 0.67, 1];\n // Header names for different log elements.\n }\n if (positions[positions.length - 1] <= 1) {\n // `positions` is relative. Convert it to absolute positioning.\n positions = positions.map(p => Math.floor(lineLength * p));\n }\n let relevantNodes;\n if (!sequentialLike) {\n toDisplay.push('Receives inputs');\n relevantNodes = [];\n for (const depth in model.nodesByDepth) {\n relevantNodes.push(...model.nodesByDepth[depth]);\n }\n }\n printFn('_'.repeat(lineLength));\n printRow(toDisplay, positions, printFn);\n printFn('='.repeat(lineLength));\n const layers = model.layers;\n for (let i = 0; i < layers.length; ++i) {\n if (sequentialLike) {\n printLayerSummary(layers[i], positions, printFn);\n }\n else {\n printLayerSummaryWithConnections(layers[i], positions, relevantNodes, printFn);\n }\n printFn((i === layers.length - 1 ? '=' : '_').repeat(lineLength));\n }\n // tslint:disable-next-line:no-any\n model.checkTrainableWeightsConsistency();\n const trainableCount = countTrainableParams(model);\n const nonTrainableCount = countParamsInWeights(model.nonTrainableWeights);\n printFn(`Total params: ${trainableCount + nonTrainableCount}`);\n printFn(`Trainable params: ${trainableCount}`);\n printFn(`Non-trainable params: ${nonTrainableCount}`);\n printFn('_'.repeat(lineLength));\n}\nfunction countTrainableParams(model) {\n let trainableCount;\n // tslint:disable:no-any\n if (model.collectedTrainableWeights != null) {\n trainableCount =\n countParamsInWeights(model.collectedTrainableWeights);\n }\n else {\n trainableCount = countParamsInWeights(model.trainableWeights);\n }\n // tslint:enable:no-any\n return trainableCount;\n}\nfunction isModelSequentialLike(model) {\n let sequentialLike = true;\n const nodesByDepth = [];\n const nodes = [];\n for (const depth in model.nodesByDepth) {\n nodesByDepth.push(model.nodesByDepth[depth]);\n }\n for (const depthNodes of nodesByDepth) {\n if (depthNodes.length > 1 ||\n depthNodes.length === 1 && depthNodes[0].inboundLayers.length > 1) {\n sequentialLike = false;\n break;\n }\n nodes.push(...depthNodes);\n }\n if (sequentialLike) {\n // Search for shared layers.\n for (const layer of model.layers) {\n let flag = false;\n for (const node of layer.inboundNodes) {\n if (nodes.indexOf(node) !== -1) {\n if (flag) {\n sequentialLike = false;\n break;\n }\n else {\n flag = true;\n }\n }\n }\n if (!sequentialLike) {\n break;\n }\n }\n }\n return sequentialLike;\n}\nfunction printRow(fields, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n let line = '';\n for (let i = 0; i < fields.length; ++i) {\n if (i > 0) {\n line = line.slice(0, line.length - 1) + ' ';\n }\n line += fields[i];\n line = line.slice(0, positions[i]);\n line += ' '.repeat(positions[i] - line.length);\n }\n printFn(line);\n}\n/**\n * Prints a summary for a single Layer, without connectivity information.\n *\n * @param layer: Layer instance to print.\n */\nfunction printLayerSummary(layer, positions, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const name = layer.name;\n const className = layer.getClassName();\n const fields = [`${name} (${className})`, outputShape, layer.countParams().toString()];\n printRow(fields, positions, printFn);\n}\n/**\n * Prints a summary for a single Layer, with connectivity information.\n */\nfunction printLayerSummaryWithConnections(layer, positions, relevantNodes, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const connections = [];\n for (const node of layer.inboundNodes) {\n if (relevantNodes != null && relevantNodes.length > 0 &&\n relevantNodes.indexOf(node) === -1) {\n continue;\n }\n for (let i = 0; i < node.inboundLayers.length; ++i) {\n const inboundLayer = node.inboundLayers[i].name;\n const inboundLayerIndex = node.nodeIndices[i];\n const inboundTensorIndex = node.tensorIndices[i];\n connections.push(`${inboundLayer}[${inboundLayerIndex}][${inboundTensorIndex}]`);\n }\n }\n const name = layer.name;\n const className = layer.getClassName();\n const firstConnection = connections.length === 0 ? '' : connections[0];\n const fields = [\n `${name} (${className})`, outputShape, layer.countParams().toString(),\n firstConnection\n ];\n printRow(fields, positions, printFn);\n for (let i = 1; i < connections.length; ++i) {\n printRow(['', '', '', connections[i]], positions, printFn);\n }\n}\n//# sourceMappingURL=layer_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as generic_utils from '../utils/generic_utils';\n// tslint:enable\n/**\n * Test whether a value in an array is the name of a LayersModel or Layer.\n * @param key The key name that the value is found under. Note that the key\n * may not be at the level immediately above the value, if the value is in a\n * nested array.\n * @param index Index of the value in the Array that it is found in.\n * @param value The value object.\n * @returns A boolean indicating whether value is a name.\n */\nfunction isArrayItemInputOrOutputName(key, index, value) {\n return (key === 'inboundNodes' || key === 'outputLayers' ||\n key === 'inputLayers') &&\n index === 0 && typeof value === 'string';\n}\n/**\n * Convert a Pythonic config object to TypeScript config object.\n * @param pythonicConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertPythonicToTs(pythonicConfig, key) {\n if (pythonicConfig === null) {\n return null;\n }\n else if (typeof pythonicConfig === 'string') {\n return generic_utils.toCamelCase(pythonicConfig);\n }\n else if ((typeof pythonicConfig === 'number') ||\n (typeof pythonicConfig === 'boolean')) {\n return pythonicConfig;\n }\n else if (pythonicConfig instanceof Array) {\n const tsArray = [];\n const arrayLength = pythonicConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = pythonicConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n tsArray.push(item);\n }\n else {\n tsArray.push(convertPythonicToTs(item, key));\n }\n }\n return tsArray;\n }\n else {\n const tsDict = {};\n for (const pythonicKey of Object.keys(pythonicConfig)) {\n const pythonicValue = pythonicConfig[pythonicKey];\n if (pythonicKey === 'name' && typeof pythonicValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // camel-case conversion.\n tsDict[pythonicKey] = pythonicValue;\n }\n else {\n const tsKey = generic_utils.toCamelCase(pythonicKey);\n tsDict[tsKey] = convertPythonicToTs(pythonicValue, tsKey);\n }\n }\n return tsDict;\n }\n}\n/**\n * Convert a TypeScript config object to Python config object.\n * @param tsConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertTsToPythonic(tsConfig, key) {\n if (tsConfig === null || tsConfig === undefined) {\n return null;\n }\n else if (typeof tsConfig === 'string') {\n return generic_utils.toSnakeCase(tsConfig);\n }\n else if ((typeof tsConfig === 'number') || (typeof tsConfig === 'boolean')) {\n return tsConfig;\n }\n else if (tsConfig instanceof Array) {\n const pyArray = [];\n const arrayLength = tsConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = tsConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n pyArray.push(item);\n }\n else {\n pyArray.push(convertTsToPythonic(item, key));\n }\n }\n return pyArray;\n }\n else {\n const pyDict = {};\n for (const tsKey of Object.keys(tsConfig)) {\n const tsValue = tsConfig[tsKey];\n const pyKey = generic_utils.toSnakeCase(tsKey);\n if ((tsKey === 'name' || tsKey === 'className') &&\n typeof tsValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // snake-case conversion.\n pyDict[pyKey] = tsValue;\n }\n else {\n pyDict[pyKey] = convertTsToPythonic(tsValue, tsKey);\n }\n }\n return pyDict;\n }\n}\n//# sourceMappingURL=serialization_utils.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Executor: Evaluates SymbolicTensor based on feeds.\n */\nimport { cast, dispose, memory, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\nimport { toList } from '../utils/generic_utils';\nimport { InputLayer } from './input_layer';\nimport { SymbolicTensor } from './topology';\n/**\n * Helper function to check the dtype and shape compatibility of a feed value.\n */\nfunction assertFeedCompatibility(key, val) {\n // Check dtype compatibility.\n if (key.dtype == null || key.dtype === val.dtype) {\n // a. If types match, return val tensor as is.\n return val;\n }\n try {\n // b. Attempt to convert to expected type.\n return cast(val, key.dtype);\n }\n catch (err) {\n // c. If conversion fails, return helpful error.\n throw new ValueError(`The dtype of the feed (${val.dtype}) can not be cast to the dtype ` +\n `of the key '${key.name}' (${key.dtype}).`);\n }\n}\n/**\n * FeedDict: A mapping from unique SymbolicTensors to feed values for them.\n * A feed value is a concrete value represented as an `Tensor`.\n */\nexport class FeedDict {\n /**\n * Constructor, optionally does copy-construction.\n * @param feeds An Array of `Feed`s, or another `FeedDict`, in which case\n * copy-construction will be performed.\n */\n constructor(feeds) {\n this.id2Value = {};\n this.id2Mask = {};\n this.name2Id = {};\n if (feeds instanceof FeedDict) {\n for (const id in feeds.id2Value) {\n this.id2Value[id] = feeds.id2Value[id];\n if (id in feeds.id2Mask) {\n this.id2Mask[id] = feeds.id2Mask[id];\n }\n }\n }\n else {\n if (feeds == null) {\n return;\n }\n for (const feed of feeds) {\n this.add(feed.key, feed.value);\n }\n }\n }\n /**\n * Add a key-value pair to the FeedDict.\n *\n * @param key The key of the feed.\n * @param value The value of the tensor feed.\n * @param mask The value of the mask feed (optional).\n * @returns This `FeedDict`.\n * @throws ValueError: If the key `SymbolicTensor` already exists in the\n * `FeedDict`.\n */\n add(key, value, mask) {\n if (this.id2Value[key.id] == null) {\n this.id2Value[key.id] = assertFeedCompatibility(key, value);\n this.name2Id[key.name] = key.id;\n if (mask != null) {\n this.id2Mask[key.id] = mask;\n }\n }\n else {\n throw new ValueError(`Duplicate key: name=${key.name}, id=${key.id}`);\n }\n return this;\n }\n /**\n * Add a Feed to the FeedDict.\n * @param feed The new `Feed` to add.\n * @returns This `FeedDict`.\n */\n addFeed(feed) {\n this.add(feed.key, feed.value);\n }\n /**\n * Probe whether a key already exists in the FeedDict.\n * @param key\n */\n hasKey(key) {\n return this.id2Value[key.id] != null;\n }\n /**\n * Get all the SymbolicTensor available in this FeedDict.\n */\n names() {\n return Object.keys(this.name2Id);\n }\n /**\n * Get the feed value for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed value.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getValue(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Value[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Value[id];\n }\n }\n /**\n * Get the feed mask for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed mask.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getMask(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Mask[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Mask[id];\n }\n }\n /** Dispose all mask Tensors held by this object. */\n disposeMasks() {\n if (this.id2Mask != null) {\n dispose(this.id2Mask);\n }\n }\n}\n// Cache for topologically sorted SymbolicTensors for given execution\n// targets (i.e., fetches).\nconst cachedSorted = {};\n// Cache for recipient count maps for given execution targets (i.e., fetches).\nconst cachedRecipientCounts = {};\n/**\n * Execute a SymbolicTensor by using concrete feed values.\n *\n * A `SymbolicTensor` object is a node in a computation graph of TF.js\n * Layers. The object is backed by a source layer and input\n * `SymbolicTensor`s to the source layer. This method evaluates\n * the `call()` method of the source layer, using concrete values of the\n * inputs obtained from either\n * * `feedDict`, if the input key exists in `feedDict`, or else,\n * * a recursive call to `execute()` itself.\n *\n * @param x: The `SymbolicTensor` to execute.\n * @param feedDict: The feed values, as base condition of the recursion.\n * execution.\n * @param kwargs: Optional keyword arguments.\n * @param probe: A probe object (of interface `ExecutionProbe`) used for\n * testing memory footprint of `execute` calls.\n * @returns Result of the execution.\n * @throws ValueError: If any `SymbolicTensor`s from `InputLayer`s\n * encountered during the execution lacks a feed value in `feedDict`.\n */\nexport function execute(fetches, feedDict, kwargs, probe) {\n const training = kwargs == null ? false : kwargs['training'];\n const arrayFetches = Array.isArray(fetches);\n const fetchArray = arrayFetches ? fetches : [fetches];\n const outputNames = fetchArray.map(t => t.name);\n const finalOutputs = [];\n const feedNames = feedDict.names();\n for (const outputName of outputNames) {\n if (feedNames.indexOf(outputName) !== -1) {\n finalOutputs.push(feedDict.getValue(outputName));\n }\n else {\n finalOutputs.push(null);\n }\n }\n if (probe != null) {\n // For optional probing of memory footprint during execution.\n probe.maxNumTensors = -Infinity;\n probe.minNumTensors = Infinity;\n }\n // Check cache.\n const fetchAndFeedKey = outputNames.join(',') + '|' + feedDict.names().join(',');\n let sorted;\n let recipientCounts;\n if (cachedSorted[fetchAndFeedKey] == null) {\n // Cache doesn't contain the desired combination of fetches. Compute\n // topological sort for the combination for the first time.\n const out = getTopologicalSortAndRecipientCounts(fetchArray, feedDict);\n sorted = out.sorted;\n recipientCounts = out.recipientCounts;\n // Store results in cache for future use.\n cachedSorted[fetchAndFeedKey] = sorted;\n cachedRecipientCounts[fetchAndFeedKey] = recipientCounts;\n }\n sorted = cachedSorted[fetchAndFeedKey];\n recipientCounts = {};\n if (!training) {\n Object.assign(recipientCounts, cachedRecipientCounts[fetchAndFeedKey]);\n }\n const internalFeedDict = new FeedDict(feedDict);\n // Start iterative execution on the topologically-sorted SymbolicTensors.\n for (let i = 0; i < sorted.length; ++i) {\n if (probe != null) {\n // For optional probing of memory usage during execution.\n const numTensors = memory().numTensors;\n if (numTensors > probe.maxNumTensors) {\n probe.maxNumTensors = numTensors;\n }\n if (numTensors < probe.minNumTensors) {\n probe.minNumTensors = numTensors;\n }\n }\n const symbolic = sorted[i];\n const srcLayer = symbolic.sourceLayer;\n if (srcLayer instanceof InputLayer) {\n continue;\n }\n const inputValues = [];\n const inputMasks = [];\n const tensorsToDispose = [];\n let maskExists = false;\n for (const input of symbolic.inputs) {\n const value = internalFeedDict.getValue(input);\n const mask = internalFeedDict.getMask(input);\n inputValues.push(value);\n inputMasks.push(mask);\n if (mask != null) {\n maskExists = true;\n }\n if (!training) {\n recipientCounts[input.name]--;\n if (recipientCounts[input.name] === 0 && !feedDict.hasKey(input) &&\n outputNames.indexOf(input.name) === -1 && !value.isDisposed &&\n input.sourceLayer.stateful !== true) {\n tensorsToDispose.push(value);\n }\n }\n }\n if (maskExists) {\n kwargs = kwargs || {};\n kwargs['mask'] = inputMasks[0];\n }\n const outputTensors = toList(srcLayer.apply(inputValues, kwargs));\n let outputMask = null;\n if (srcLayer.supportsMasking) {\n outputMask = srcLayer.computeMask(inputValues, inputMasks);\n }\n const layerOutputs = getNodeOutputs(symbolic);\n const outputSymbolicTensors = Array.isArray(layerOutputs) ? layerOutputs : [layerOutputs];\n for (let i = 0; i < outputSymbolicTensors.length; ++i) {\n if (!internalFeedDict.hasKey(outputSymbolicTensors[i])) {\n internalFeedDict.add(outputSymbolicTensors[i], outputTensors[i], Array.isArray(outputMask) ? outputMask[0] : outputMask);\n }\n const index = outputNames.indexOf(outputSymbolicTensors[i].name);\n if (index !== -1) {\n finalOutputs[index] = outputTensors[i];\n }\n }\n if (!training) {\n // Clean up Tensors that are no longer needed.\n dispose(tensorsToDispose);\n }\n }\n // NOTE(cais): Unlike intermediate tensors, we don't discard mask\n // tensors as we go, because these tensors are sometimes passed over a\n // series of mutliple layers, i.e., not obeying the immediate input\n // relations in the graph. If this becomes a memory-usage concern,\n // we can improve this in the future.\n internalFeedDict.disposeMasks();\n return arrayFetches ? finalOutputs : finalOutputs[0];\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for an array of fetches.\n *\n * This function calls getTopologicalSortAndRecipientCountsForOneFetch and\n * merges their results.\n *\n * @param fetch The array of fetches requested. Must be a non-empty array.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientCounts: Recipient counts for all SymbolicTensors in `sorted`.\n */\nfunction getTopologicalSortAndRecipientCounts(fetches, feedDict) {\n util.assert(fetches != null && fetches.length > 0, () => `Expected at least one fetch, got none`);\n let finalSorted = [];\n let finalRecipientMap = {};\n if (fetches.length === 1) {\n // Special-casing 1 fetch for efficiency.\n const out = getTopologicalSortAndRecipientCountsForOneFetch(fetches[0], feedDict);\n finalSorted = out.sorted;\n finalRecipientMap = out.recipientMap;\n }\n else {\n const visited = new Set();\n for (const fetch of fetches) {\n const { sorted, recipientMap } = getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict);\n // Merge sorted SymbolicTensor Arrays.\n for (const symbolicTensor of sorted) {\n if (!visited.has(symbolicTensor.name)) {\n finalSorted.push(symbolicTensor);\n visited.add(symbolicTensor.name);\n }\n }\n // Merge recipient maps.\n for (const name in recipientMap) {\n if (finalRecipientMap[name] == null) {\n finalRecipientMap[name] = new Set();\n }\n recipientMap[name].forEach(recipient => finalRecipientMap[name].add(recipient));\n }\n }\n }\n return {\n sorted: finalSorted,\n recipientCounts: recipientMap2Counts(finalRecipientMap)\n };\n}\nfunction recipientMap2Counts(recipientMap) {\n const recipientCounts = {};\n for (const name in recipientMap) {\n recipientCounts[name] = recipientMap[name].size;\n }\n return recipientCounts;\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for a single fetch.\n *\n * This helper function processes the upstream SymbolicTensors of a single\n * fetch.\n *\n * @param fetch The single fetch requested.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientMap: Recipient names for all SymbolicTensors in `sorted`.\n */\nexport function getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict) {\n const visited = new Set();\n const sorted = [];\n const recipientMap = {};\n // Put keys of the feedDict into visited first, so they don't have to be\n // walked. This is needed in case where there are feeds for intermediate\n // SymbolicTensors of the graph.\n for (const key of feedDict.names()) {\n visited.add(key);\n }\n const stack = [];\n const marks = [];\n // Initial population of stack and marks.\n stack.push(fetch);\n while (stack.length > 0) {\n const top = stack[stack.length - 1];\n if (visited.has(top.name)) {\n stack.pop();\n continue;\n }\n const topIsMarked = marks[marks.length - 1] === stack.length - 1;\n if (top.inputs.length === 0 || topIsMarked) {\n // Input SymbolicTensor or all children have been visited.\n stack.pop();\n sorted.push(top);\n visited.add(top.name);\n if (topIsMarked) {\n marks.pop();\n }\n }\n else {\n // A non-input SymbolicTensor whose upstream SymbolicTensors haven't\n // been visited yet. Push them onto the stack.\n marks.push(stack.length - 1);\n for (const input of top.inputs) {\n // Increment the recipient count. Note that this needs to happen\n // regardless of whether the SymbolicTensor has been visited before.\n if (recipientMap[input.name] == null) {\n recipientMap[input.name] = new Set();\n }\n recipientMap[input.name].add(top.name);\n if (visited.has(input.name)) {\n continue; // Avoid repeated visits to the same SymbolicTensor.\n }\n stack.push(input);\n }\n }\n }\n return { sorted, recipientMap };\n}\n/**\n * Get the symbolic output tensors of the node to which a given fetch belongs.\n * @param fetch The fetched symbolic tensor.\n * @returns The Array of symbolic tensors output by the node to which `fetch`\n * belongs.\n */\nfunction getNodeOutputs(fetch) {\n let layerOutputs;\n if (fetch.sourceLayer.inboundNodes.length === 1) {\n layerOutputs = fetch.sourceLayer.output;\n }\n else {\n let nodeIndex = null;\n for (let i = 0; i < fetch.sourceLayer.inboundNodes.length; ++i) {\n for (const outputTensor of fetch.sourceLayer.inboundNodes[i]\n .outputTensors) {\n if (outputTensor.id === fetch.id) {\n nodeIndex = i;\n break;\n }\n }\n }\n layerOutputs = fetch.sourceLayer.getOutputAt(nodeIndex);\n }\n return layerOutputs;\n}\n//# sourceMappingURL=executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { tidy } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize as deserializeLayer } from '../layers/serialization';\nimport * as generic_utils from '../utils/generic_utils';\nimport { convertTsToPythonic } from '../utils/serialization_utils';\nimport * as types_utils from '../utils/types_utils';\nimport { batchSetValue } from '../variables';\nimport { version as layersVersion } from '../version';\nimport { execute, FeedDict } from './executor';\nimport { InputLayer } from './input_layer';\nimport { Layer, Node } from './topology';\n/**\n * A Container is a directed acyclic graph of layers.\n *\n * It is the topological form of a \"model\". A LayersModel\n * is simply a Container with added training routines.\n *\n */\nexport class Container extends Layer {\n constructor(args) {\n // No args passed to super's constructor.\n super({});\n this.containerNodes = new Set();\n this.name = args.name;\n if (this.name == null) {\n const prefix = this.getClassName().toLowerCase();\n this.name = getUid(prefix);\n }\n this.supportsMasking = false;\n this.trainable_ = true;\n // TODO(michaelterry): Initialize perInputLosses/Updates here.\n // Container-specific properties.\n if (Array.isArray(args.inputs)) {\n this.inputs = args.inputs.slice();\n }\n else {\n this.inputs = [args.inputs];\n }\n if (Array.isArray(args.outputs)) {\n this.outputs = args.outputs.slice();\n }\n else {\n this.outputs = [args.outputs];\n }\n // Check for redundancy in inputs.\n if (generic_utils.unique(this.inputs).length !== this.inputs.length) {\n throw new ValueError('The list of inputs passed to the model is ' +\n 'redundant. All inputs should only appear once. Found: ' +\n `${this.inputs.map(x => x.name)}`);\n }\n // Check for redundancy in outputs.\n if (generic_utils.unique(this.outputs).length !== this.outputs.length) {\n console.warn('The list of outputs passed to the model is redundant. ' +\n 'All outputs should only appear once. Found: ' +\n `${this.outputs.map(x => x.name)}`);\n }\n /*\n List of initial layers (1 to 1 mapping with this.inputs, hence the same\n layer might appear twice)\n */\n this.inputLayers = [];\n this.inputLayersNodeIndices = [];\n this.inputLayersTensorIndices = [];\n /*\n List of layers (1 to 1 mapping with this.outputs, hence the same layer\n might appear twice)\n */\n this.outputLayers = [];\n this.outputLayersNodeIndices = [];\n this.outputLayersTensorIndices = [];\n /*\n All layers in order of horizontal graph traversal. Entries are unique.\n Includes input and output layers.\n */\n this.layers = [];\n /*\n References to container layers that were constructed internally. We need\n these to properly dispose of tensors from nested containers.\n */\n this.internalContainerRefs = [];\n // TODO(michaelterry): Determine if caching still needed with eager\n // backend.\n /*\n This is for performance optimization when calling the Container on new\n inputs. Every time the Container is called on a set on input tensors,\n we compute the output tensors, output masks and output shapes in one pass,\n then cache them here. When one of these outputs is queried later,\n we retrieve it from there instead of recomputing it.\n */\n // this.outputTensorCache = {};\n // this.outputShapeCache = {};\n // Build this.outputLayers:\n for (const x of this.outputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n this.outputLayers.push(layer);\n this.outputLayersNodeIndices.push(nodeIndex);\n this.outputLayersTensorIndices.push(tensorIndex);\n }\n // TODO(michaelterry): Add output mask cache code.\n // Build this.inputLayers:\n for (const x of this.inputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n /*\n It's supposed to be an input layer, so only one node\n and one tensor output.\n */\n generic_utils.assert(nodeIndex === 0, 'input layer has >1 nodes');\n generic_utils.assert(tensorIndex === 0, 'input layer has >1 tensors');\n this.inputLayers.push(layer);\n this.inputLayersNodeIndices.push(nodeIndex);\n this.inputLayersTensorIndices.push(tensorIndex);\n }\n // Build this.inputNames and this.outputNames.\n this.inputNames = [];\n this.outputNames = [];\n this.feedInputShapes = [];\n this.feedInputNames = [];\n this.feedOutputNames = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n // Check that layer is an InputLayer.\n if (!(layer instanceof InputLayer)) {\n throw new TypeError('Input layers to a LayersModel must be InputLayer objects. ' +\n `Received inputs: ${args.inputs}. ` +\n `Input ${i} (0-based) originates ` +\n `from layer type ${layer.getClassName()}.`);\n }\n this.inputNames.push(layer.name);\n this.feedInputShapes.push(layer.batchInputShape);\n this.feedInputNames.push(layer.name);\n }\n for (const layer of this.outputLayers) {\n this.outputNames.push(layer.name);\n }\n this.internalInputShapes = this.inputs.map(x => x.shape);\n this.internalOutputShapes = this.outputs.map(x => x.shape);\n /*\n Container_nodes: set of nodes included in the graph (not all nodes\n included in the layers are relevant to the current graph).\n */\n // ids of all nodes relevant to the Container:\n const nodesDepths = {};\n // To recover nodes from their ID.\n const nodeIDToNode = {};\n const layersDepths = {};\n // To layers from their ID.\n const layerIDToLayer = {};\n const layerIndices = {};\n const nodesInDecreasingDepth = [];\n /**\n * Builds a map of the graph of layers.\n *\n * This recursively updates the map `layerIndices`,\n * the list `nodesInDecreasingDepth` and the set `containerNodes`.\n *\n * @param tensor Some tensor in a graph.\n * @param finishedNodes Set of nodes whose subgraphs have been traversed\n * completely. Useful to prevent duplicated work.\n * @param nodesInProgress Set of nodes that are currently active on the\n * recursion stack. Useful to detect cycles.\n * @param layer Layer from which `tensor` comes from. If not provided,\n * will be obtained from tensor.sourceLayer.\n * @param nodeIndex Node index from which `tensor` comes from.\n * @param tensorIndex TensorIndex from which `tensor` comes from.\n *\n * @exception RuntimeError if a cycle is detected.\n */\n const buildMapOfGraph = (tensor, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex) => {\n if (layer == null || nodeIndex == null || tensorIndex == null) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n tensorIndex = tensor.tensorIndex;\n }\n const node = layer.inboundNodes[nodeIndex];\n // Prevent cycles.\n if (nodesInProgress.indexOf(node) !== -1) {\n throw new RuntimeError(`The tensor ${tensor.name} at layer \"${layer.name}\" ` +\n 'is part of a cycle.');\n }\n // Don't repeat work for shared subgraphs\n if (finishedNodes.indexOf(node) !== -1) {\n return;\n }\n // Update containerNodes.\n this.containerNodes.add(Container.nodeKey(layer, nodeIndex));\n // Store the traversal order for layer sorting.\n if (!(layer.id in layerIndices)) {\n layerIndices[layer.id] = Object.keys(layerIndices).length;\n }\n if (nodesInProgress.indexOf(node) === -1) {\n nodesInProgress.push(node);\n }\n // Propagate to all previous tensors connected to this node.\n const numInboundLayers = node.inboundLayers.length;\n for (let i = 0; i < numInboundLayers; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n buildMapOfGraph(x, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex);\n }\n finishedNodes.push(node);\n while (nodesInProgress.indexOf(node) >= 0) {\n nodesInProgress.splice(nodesInProgress.indexOf(node), 1);\n }\n nodesInDecreasingDepth.push(node);\n };\n const finishedNodes = [];\n const nodesInProgress = [];\n for (const x of this.outputs) {\n buildMapOfGraph(x, finishedNodes, nodesInProgress);\n }\n const reversedNodesInDecreasingDepth = nodesInDecreasingDepth.slice().reverse();\n for (const node of reversedNodesInDecreasingDepth) {\n nodeIDToNode[node.id] = node;\n // If the depth is not set, the node has no outbound nodes (depth 0).\n if (!(node.id in nodesDepths)) {\n nodesDepths[node.id] = 0;\n }\n let depth = nodesDepths[node.id];\n // Update the depth of the corresponding layer\n const previousDepth = (layersDepths[node.outboundLayer.id] == null ?\n 0 :\n layersDepths[node.outboundLayer.id]);\n /*\n If we've seen this layer before at a higher depth, we should use that\n depth instead of the node depth. This is necessary for shared layers\n that have inputs at different depth levels in the graph.\n */\n depth = Math.max(depth, previousDepth);\n layersDepths[node.outboundLayer.id] = depth;\n layerIDToLayer[node.outboundLayer.id] = node.outboundLayer;\n nodesDepths[node.id] = depth;\n // Update the depth of inbound nodes.\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const inboundNode = inboundLayer.inboundNodes[nodeIndex];\n const previousDepth = (nodesDepths[inboundNode.id] == null ? 0 :\n nodesDepths[inboundNode.id]);\n nodesDepths[inboundNode.id] = Math.max(depth + 1, previousDepth);\n nodeIDToNode[inboundNode.id] = inboundNode;\n }\n }\n // Build a dict {depth: list of nodes with this depth}\n const nodesByDepth = {};\n for (const nodeID in nodesDepths) {\n const depth = nodesDepths[nodeID];\n if (!(depth in nodesByDepth)) {\n nodesByDepth[depth] = [];\n }\n nodesByDepth[depth].push(nodeIDToNode[nodeID]);\n }\n // Build a dict {depth: list of layers with this depth}\n const layersByDepth = {};\n for (const layerID in layersDepths) {\n const depth = layersDepths[layerID];\n if (!(depth in layersByDepth)) {\n layersByDepth[depth] = [];\n }\n layersByDepth[depth].push(layerIDToLayer[layerID]);\n }\n // Get sorted list of layer depths.\n let depthKeys = Object.keys(layersByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Set this.layers and this.layersByDepth.\n this.layers = [];\n for (const depth of depthKeys) {\n const layersForDepth = layersByDepth[depth];\n // Container.layers needs to have a deterministic order:\n // here we order them by traversal order.\n layersForDepth.sort((a, b) => {\n const aIndex = layerIndices[a.id];\n const bIndex = layerIndices[b.id];\n if (aIndex < bIndex) {\n return -1;\n }\n if (aIndex > bIndex) {\n return 1;\n }\n return 0;\n });\n for (const layer of layersForDepth) {\n if (layer instanceof Container) {\n this.internalContainerRefs.push(layer);\n }\n this.layers.push(layer);\n }\n }\n this.layersByDepth = layersByDepth;\n // Get sorted list of node depths;\n depthKeys = Object.keys(nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Check that all tensors required are computable.\n // computable_tensors: all tensors in the graph\n // that can be computed from the inputs provided.\n const computableTensors = this.inputs.slice();\n // To provide a better error msg.\n const layersWithCompleteInput = [];\n for (const depth of depthKeys) {\n for (const node of nodesByDepth[depth]) {\n const layer = node.outboundLayer;\n if (layer != null) {\n for (const x of node.inputTensors) {\n if (computableTensors.indexOf(x) === -1) {\n throw new RuntimeError(`Graph disconnected: cannot obtain value for tensor ${x}` +\n ` at layer \"${layer.name}\". ` +\n 'The following previous layers were accessed without ' +\n `issue: ${layersWithCompleteInput}`);\n }\n }\n for (const x of node.outputTensors) {\n computableTensors.push(x);\n }\n layersWithCompleteInput.push(layer.name);\n }\n }\n }\n // Set this.containerNodes and this.nodesByDepth.\n this.nodesByDepth = nodesByDepth;\n // Ensure name unicity, which will be crucial for serialization\n // (since serialized nodes refer to layers by their name).\n const allNames = this.layers.map(x => x.name);\n for (const name of allNames) {\n const numOccurrences = allNames.filter(x => x === name).length;\n if (numOccurrences !== 1) {\n throw new RuntimeError(`The name \"${name}\" is used ${numOccurrences} times ` +\n 'in the model. All layer names should be unique. Layer names: ' +\n JSON.stringify(allNames));\n }\n }\n // Layer parameters.\n // The new container starts with a single inbound node\n // for its inputs, and no outbound nodes.\n // Will be appended to by future calls to apply().\n this.outboundNodes = [];\n // Will be appended to below, and by future calls to apply().\n this.inboundNodes = [];\n // Create the node linking internal inputs to internal outputs.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n inputMasks: this.inputs.map(x => null),\n outputMasks: this.outputs.map(x => null),\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs.map(x => x.shape)\n });\n this.built = true;\n this._refCount = 1; // The ref count of a container always start at 1.\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Container '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose a LayersModel's weights.\n *\n * This method decrease the reference count of the LayersModel object by 1.\n *\n * A LayersModel is reference-counted. Its reference count is incremented by 1\n * when it is first constructed and when it is used as a Layer of another\n * LayersModel.\n *\n * If the reference count of a LayersModel becomes 0, the `dispose` method of\n * all its constituent `Layer`s will be called.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * `dispose` method of its constituent `Layer`s will *not* be called.\n *\n * After a LayersModel is disposed, it cannot be used in calls such as\n * 'predict`, `evaluate` or `fit` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the LayersModel after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the LayersModel has\n * already been disposed.\n */\n dispose() {\n this.assertNotDisposed();\n const result = { refCountAfterDispose: null, numDisposedVariables: 0 };\n if (--this._refCount === 0) {\n for (const layer of this.layers) {\n result.numDisposedVariables += layer.dispose().numDisposedVariables;\n }\n // Call dispose on each internally created container layer again to ensure\n // their refCounts hit zero and their tensors are subsequently deleted.\n for (const container of this.internalContainerRefs) {\n result.numDisposedVariables += container.dispose().numDisposedVariables;\n }\n }\n result.refCountAfterDispose = this._refCount;\n return result;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.layers.forEach(layer => {\n // tslint:disable-next-line:no-any\n layer._trainableWeights\n .forEach(w => w.trainable = trainable);\n });\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n // Porting Note: This check below is to prevent errors where the\n // _trainableWeights inherited from the parent class (Layer) gets\n // inadvertently used.\n if (this._trainableWeights.length > 0) {\n throw new ValueError('Container instance unexpectedly contains _trainableWeights.' +\n 'The trainable weights of a Container are a union of the ' +\n 'trainable weights of its consituent Layers. Its own ' +\n '_trainableWeights must remain an empty Array.');\n }\n if (!this.trainable) {\n return [];\n }\n let weights = [];\n for (const layer of this.layers) {\n weights = weights.concat(layer.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const layer of this.layers) {\n weights.push(...layer.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const layer of this.layers) {\n trainableWeights.push(...layer.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n /**\n * Loads all layer weights from a JSON object.\n *\n * Porting Note: HDF5 weight files cannot be directly loaded in JavaScript /\n * TypeScript. The utility script at `scripts/pykeras.py` offers means\n * to convert them into JSON strings compatible with this method.\n * Porting Note: TensorFlow.js Layers supports only loading by name currently.\n *\n * @param weights A JSON mapping weight names to weight values as nested\n * arrays of numbers, or a `NamedTensorMap`, i.e., a JSON mapping weight\n * names to `tf.Tensor` objects.\n * @param strict Require that the provided weights exactly match those\n * required by the container. Default: `true`. Passing `false` means that\n * extra weights and missing weights will be silently ignored.\n */\n loadWeights(weights, strict = true) {\n const nameToWeight = {};\n let totalWeightsCount = 0;\n for (const layer of this.layers) {\n for (const weight of layer.weights) {\n if (nameToWeight[weight.originalName] != null) {\n throw new ValueError(`Duplicate weight name: ${weight.originalName}`);\n }\n nameToWeight[weight.originalName] = weight;\n totalWeightsCount++;\n }\n }\n const weightValueTuples = [];\n for (const name in weights) {\n // TF 2.2.0 added cell name to the weight name in the format of\n // layer_name/cell_name/weight_name, we need to remove\n // the inner cell name.\n let validatedName = name;\n if (nameToWeight[name] == null) {\n const tokens = name.split('/');\n const shortenNameArray = tokens.slice(0, -2).concat([tokens[tokens.length - 1]]);\n validatedName = shortenNameArray.join('/');\n }\n if (nameToWeight[validatedName] != null) {\n weightValueTuples.push([nameToWeight[validatedName], weights[name]]);\n }\n else if (strict) {\n throw new ValueError(`Provided weight data has no target variable: ${name}`);\n }\n delete nameToWeight[validatedName];\n }\n if (strict) {\n // Check that all weights are set.\n const unsetNames = [];\n for (const name in nameToWeight) {\n unsetNames.push(name);\n }\n if (unsetNames.length > 0) {\n throw new ValueError(`${unsetNames.length} of ${totalWeightsCount} weights are not set: ` +\n `${unsetNames}`);\n }\n }\n batchSetValue(weightValueTuples);\n }\n /**\n * Util shared between different serialization methods.\n * @returns LayersModel config with Keras version information added.\n */\n updatedConfig() {\n const theConfig = this.getConfig();\n const modelConfig = {};\n modelConfig['className'] = this.getClassName();\n modelConfig['config'] = theConfig;\n modelConfig['kerasVersion'] = `tfjs-layers ${layersVersion}`;\n // TODO(nielsene): Replace something like K.backend() once\n // possible.\n modelConfig['backend'] = 'TensorFlow.js';\n return modelConfig;\n }\n /**\n * Returns a JSON string containing the network configuration.\n *\n * To load a network from a JSON save file, use\n * models.modelFromJSON(jsonString);\n * @param extraJsonArgs Unused in tfjs-layers, maintained for PyKeras\n * @param returnString Whether the return value should be stringified\n * (default: `true`).\n * @returns a JSON string if `returnString` (default), or a JSON object if\n * `!returnString`.\n */\n // tslint:disable-next-line:no-any\n toJSON(unused, returnString = true) {\n const modelConfig = convertTsToPythonic(this.updatedConfig());\n return returnString ? JSON.stringify(modelConfig) : modelConfig;\n }\n /**\n * Call the model on new inputs.\n *\n * In this case `call` just reapplies all ops in the graph to the new inputs\n * (e.g. build a new computational graph from the provided inputs).\n *\n * @param inputs A tensor or list of tensors.\n * @param mask A mask or list of masks. A mask can be either a tensor or null\n * (no mask).\n *\n * @return A tensor if there is a single output, or a list of tensors if there\n * are more than one outputs.\n */\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n const feedDict = new FeedDict();\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n return execute(this.outputs, feedDict, kwargs);\n });\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n let masks;\n if (mask == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n else {\n masks = generic_utils.toList(mask);\n }\n // TODO(michaelterry): Add support for mask caching.\n return this.runInternalGraph(inputs, masks)[1];\n });\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n */\n computeOutputShape(inputShape) {\n const inputShapes = types_utils.normalizeShapeList(inputShape);\n if (inputShapes.length !== this.inputLayers.length) {\n throw new ValueError(`Invalid inputShape argument ${inputShape}: ` +\n `model has ${this.inputLayers.length} tensor inputs.`);\n }\n // TODO(michaelterry): Add caching\n const layersToOutputShapes = {};\n for (let i = 0; i < inputShapes.length; i++) {\n const layer = this.inputLayers[i];\n const inputShape = inputShapes[i];\n // It's an input layer: computeOutputShape is identity,\n // and there is only one node and one tensor output.\n const shapeKey = layer.name + '_0_0';\n layersToOutputShapes[shapeKey] = inputShape;\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Iterate over nodes, by depth level.\n if (depthKeys.length > 1) {\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n if (this.inputLayers.map(x => x.id).indexOf(layer.id) !== -1) {\n // We've already covered the input layers a few lines above.\n continue;\n }\n // Potentially redundant list, same size of node.inputTensors.\n const inputShapes = [];\n for (let j = 0; j < node.inboundLayers.length; j++) {\n const inboundLayer = node.inboundLayers[j];\n const nodeIndex = node.nodeIndices[j];\n const tensorIndex = node.tensorIndices[j];\n const shapeKey = `${inboundLayer.name}_${nodeIndex}_${tensorIndex}`;\n const inputShape = layersToOutputShapes[shapeKey];\n inputShapes.push(inputShape);\n }\n const outputShape = layer.computeOutputShape(generic_utils.singletonOrArray(inputShapes));\n const outputShapes = types_utils.normalizeShapeList(outputShape);\n const nodeIndex = layer.inboundNodes.indexOf(node);\n for (let j = 0; j < outputShapes.length; j++) {\n const shapeKey = `${layer.name}_${nodeIndex}_${j}`;\n layersToOutputShapes[shapeKey] = outputShapes[j];\n }\n }\n }\n }\n // Read final output shapes from layersToOutputShapes.\n const outputShapes = [];\n const outputShapeKeys = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const tensorIndex = this.outputLayersTensorIndices[i];\n const shapeKey = `${layer.name}_${nodeIndex}_${tensorIndex}`;\n outputShapeKeys.push(shapeKey);\n }\n for (let i = 0; i < outputShapeKeys.length; i++) {\n const key = outputShapeKeys[i];\n generic_utils.assert(key in layersToOutputShapes);\n outputShapes.push(layersToOutputShapes[key]);\n }\n // TODO(michaelterry): Update cache\n return generic_utils.singletonOrArray(outputShapes);\n }\n /**\n * Computes output tensors for new inputs.\n *\n * Note:\n * - Expects `inputs` to be a list (potentially with 1 element).\n *\n * @param inputs List of tensors\n * @param masks List of masks (tensors or null).\n * @return Three lists: outputTensors, outputMasks, outputShapes\n */\n runInternalGraph(inputs, masks) {\n if (masks == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n // Dictionary mapping reference tensors to tuples\n // (computed tensor, compute mask)\n // we assume a 1:1 mapping from tensor to mask\n // TODO: raise exception when a `.computeMask()` call\n // does not return a list the same size as `call`\n const tensorMap = {};\n for (let i = 0; i < this.inputs.length; ++i) {\n const x = this.inputs[i];\n const y = inputs[i];\n const mask = masks[i];\n tensorMap[x.id] = [y, mask];\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n const referenceInputTensors = node.inputTensors;\n const referenceOutputTensors = node.outputTensors;\n // If all previous input tensors are available in tensorMap,\n // then call node.inboundLayer on them.\n // List of tuples [input, mask]:\n const computedData = new Array();\n for (const x of referenceInputTensors) {\n if (x.id in tensorMap) {\n computedData.push(tensorMap[x.id]);\n }\n }\n if (computedData.length === referenceInputTensors.length) {\n // TODO(michaelterry): Add K.name_scope here, if we need it.\n let kwargs = {};\n let computedTensors;\n let computedMasks;\n let outputTensors;\n let outputMasks;\n // call layer\n if (node.callArgs != null) {\n kwargs = node.callArgs;\n }\n if (computedData.length === 1) {\n const [computedTensor, computedMask] = computedData[0];\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMask;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensor, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensor, computedMask));\n computedTensors = [computedTensor];\n computedMasks = [computedMask];\n }\n else {\n computedTensors = computedData.map(x => x[0]);\n computedMasks = computedData.map(x => x[1]);\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMasks;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensors, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensors, computedMasks));\n }\n if (layer.activityRegularizer) {\n throw new NotImplementedError('LayersModel invocation with concrete Tensor value(s) in the ' +\n 'presence of activity regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Add model updates and losses\n // Update tensor map.\n for (let i = 0; i < referenceOutputTensors.length; ++i) {\n const x = referenceOutputTensors[i];\n const y = outputTensors[i];\n const mask = outputMasks[i];\n tensorMap[x.id] = [y, mask];\n }\n }\n }\n }\n const outputTensors = [];\n const outputMasks = [];\n const outputShapes = [];\n for (const x of this.outputs) {\n generic_utils.assert(x.id in tensorMap, `Could not compute output ${x.name} : ${x.id}`);\n const [tensor, mask] = tensorMap[x.id];\n outputShapes.push(tensor.shape);\n outputTensors.push(tensor);\n outputMasks.push(mask);\n }\n // TODO(michaelterry): Add support for caches.\n return [outputTensors, outputMasks, outputShapes];\n }\n /**\n * Builds a map of internal node keys to node ordering.\n * Used in serializaion a node orderings may change as unused nodes are\n * dropped. Porting Note: This helper method was pulled out of getConfig to\n * improve readability.\n * @param layers An array of Layers in the model.\n * @returns Map of Node Keys to index order within the layer.\n */\n buildNodeConversionMap(layers) {\n const nodeConversionMap = {};\n let keptNodes;\n for (const layer of this.layers) {\n keptNodes = layer instanceof Container ? 1 : 0;\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n // i.e. we mark it to be saved\n nodeConversionMap[nodeKey] = keptNodes;\n keptNodes += 1;\n }\n }\n }\n return nodeConversionMap;\n }\n /**\n * Retrieves a layer based on either its name (unique) or index.\n *\n * Indices are based on order of horizontal graph traversal (bottom-up).\n *\n * If both `name` and `index` are specified, `index` takes precedence.\n *\n * @param name Name of layer.\n * @param index Index of layer.\n * @returns A Layer instance.\n * @throws ValueError: In case of invalid layer name or index.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Classes',\n * namespace: 'layers',\n * subclasses: ['LayersModel']\n * }\n */\n getLayer(name, index) {\n if (index != null) {\n if (this.layers.length <= index) {\n throw new ValueError(`Was asked to retrieve layer at index ${index}, but model only ` +\n `has ${this.layers.length} layer(s).`);\n }\n else {\n return this.layers[index];\n }\n }\n else {\n if (name == null) {\n throw new ValueError('Provide either a layer name or layer index');\n }\n }\n for (const layer of this.layers) {\n if (layer.name === name) {\n return layer;\n }\n }\n throw new ValueError(`No such layer: ${name}`);\n }\n /**\n * Retrieves the Container's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Container.loss in PyKeras.\n // In PyKeras, Container.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return tidy(() => {\n const losses = [];\n for (const layer of this.layers) {\n for (let nodeIndex = 0; nodeIndex < layer.inboundNodes.length; ++nodeIndex) {\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n losses.push(...layer.calculateLosses());\n }\n }\n }\n // TODO(cais): Add any unconditional model-level losses?\n return losses;\n });\n }\n getConfig() {\n const config = { name: this.name };\n // Build a map from layer unique name (self._node_key)\n // to the index of the nodes that are saved in the config.\n // Only nodes in container_nodes are saved.\n const nodeConversionMap = this.buildNodeConversionMap(this.layers);\n // Serialize and save the layers in layerConfigs\n const layerConfigs = [];\n for (const layer of this.layers) {\n const layerClassName = layer.getClassName();\n const layerConfig = layer.getConfig();\n const filteredInboundNodes = [];\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const node = layer.inboundNodes[originalNodeIndex];\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n let kwargs = {};\n if (this.containerNodes.has(nodeKey)) {\n // The node is relevant to the model:\n // add to filteredInboundNodes.\n if (node.callArgs) {\n try {\n JSON.stringify(node.callArgs);\n kwargs = node.callArgs;\n }\n catch (err) {\n console.warn(`Layer ${layer.name} was passed ` +\n `non-serializable keyword arguments: ` +\n `${node.callArgs}. They will not be included ` +\n `in the serialized model (and thus will be ` +\n `missing at deserialization time).`);\n kwargs = {};\n }\n }\n if (node.inboundLayers.length > 0) {\n const nodeData = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n const nodeKey = Container.nodeKey(inboundLayer, nodeIndex);\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex == null) {\n newNodeIndex = 0;\n }\n nodeData.push([inboundLayer.name, newNodeIndex, tensorIndex, kwargs]);\n }\n filteredInboundNodes.push(nodeData);\n }\n }\n }\n const dict = {};\n dict['name'] = layer.name;\n dict['className'] = layerClassName;\n dict['config'] = layerConfig;\n dict['inboundNodes'] = filteredInboundNodes;\n layerConfigs.push(dict);\n }\n config['layers'] = layerConfigs;\n // Gather info about inputs and outputs\n const modelInputs = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n const nodeIndex = this.inputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.inputLayersTensorIndices[i];\n modelInputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['inputLayers'] = modelInputs;\n const modelOutputs = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.outputLayersTensorIndices[i];\n modelOutputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['outputLayers'] = modelOutputs;\n return config;\n }\n /**\n * Instantiates a LayersModel from its config (output of `get_config()`).\n * @param cls the class to create\n * @param config LayersModel config dictionary.\n * @param customObjects An optional dictionary of custom objects.\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns A LayersModel instance.\n * @throws ValueError: In case of improperly formatted config dict.\n */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n // Layer instances created during\n // the graph reconstruction process\n const createdLayers = {};\n // Dictionary mapping layer instances to\n // node data that specifies a layer call.\n // It acts as a queue that maintains any unprocessed\n // layer call until it becomes possible to process it\n // (i.e. until the input tensors to the call all exist).\n const unprocessedNodes = {};\n function addUnprocessedNode(layer, nodeData) {\n if (!(layer.name in unprocessedNodes)) {\n unprocessedNodes[layer.name] = [nodeData];\n }\n else {\n unprocessedNodes[layer.name].push(nodeData);\n }\n }\n function processNode(layer, nodeData) {\n const inputTensors = [];\n let kwargs;\n for (const inputData of nodeData) {\n const inboundLayerName = inputData[0];\n const inboundNodeIndex = inputData[1];\n const inboundTensorIndex = inputData[2];\n kwargs = inputData[3] == null ?\n {} :\n inputData[3];\n if (!(inboundLayerName in createdLayers)) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundLayer = createdLayers[inboundLayerName];\n if (inboundLayer.inboundNodes.length <= inboundNodeIndex) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundNode = inboundLayer.inboundNodes[inboundNodeIndex];\n inputTensors.push(inboundNode.outputTensors[inboundTensorIndex]);\n }\n // Call layer on its inputs, thus creating the node\n // and building the layer if needed.\n // Note: This has Eager vs Graph Implications.\n if (inputTensors.length > 0) {\n layer.apply(generic_utils.singletonOrArray(inputTensors), kwargs); // was ** kwargs\n }\n }\n /**\n * Deserialize a layer, then call it on appropriate inputs.\n * @param layerData: layer config dict.\n * @throws ValueError: In case of improperly formatted `layer_data`\n * dict.\n */\n function processLayer(layerData) {\n const layerName = layerData['name'];\n // Instantiate layer.\n const layer = deserializeLayer(layerData, config['customObjects'] != null ?\n config['customObjects'] :\n {});\n layer.setFastWeightInitDuringBuild(fastWeightInit);\n createdLayers[layerName] = layer;\n // Gather layer inputs.\n const inboundNodesData = layerData['inboundNodes'];\n inboundNodesData.forEach(nodeData => {\n if (!(nodeData instanceof Array)) {\n throw new ValueError(`Corrupted configuration, expected array for nodeData: ${nodeData}`);\n }\n // We don't process nodes (i.e. make layer calls)\n // on the fly because the inbound node may not yet exist,\n // in case of layer shared at different topological depths\n // (e.g.a model such as A(B(A(B(x)))))\n addUnprocessedNode(layer, nodeData);\n });\n }\n // First, we create all layers and enqueue nodes to be processed.\n const name = config['name'];\n const layersFromConfig = config['layers'];\n for (const layerData of layersFromConfig) {\n processLayer(layerData);\n }\n // Then we process nodes in order of layer depth.\n // Nodes that cannot yet be processed(if the inbound node\n // does not yet exist) are re - enqueued, and the process\n // is repeated until all nodes are processed.\n while (!generic_utils.isObjectEmpty(unprocessedNodes)) {\n for (const layerData of layersFromConfig) {\n const layer = createdLayers[layerData['name']];\n if (layer.name in unprocessedNodes) {\n const currentUnprocessedNodesForLayer = unprocessedNodes[layer.name];\n delete unprocessedNodes[layer.name];\n for (const nodeData of currentUnprocessedNodesForLayer) {\n processNode(layer, nodeData);\n }\n }\n }\n }\n const inputTensors = [];\n const outputTensors = [];\n const inputLayersFromConfig = config['inputLayers'];\n for (const layerData of inputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n inputTensors.push(layerOutputTensors[tensorIndex]);\n }\n const outputLayersFromConfig = config['outputLayers'];\n for (const layerData of outputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n outputTensors.push(layerOutputTensors[tensorIndex]);\n }\n return new cls({ inputs: inputTensors, outputs: outputTensors, name });\n }\n /**\n * Determine whether the container is stateful.\n *\n * Porting Note: this is the equivalent of the stateful @property of\n * the Container class in PyKeras.\n */\n get stateful() {\n // Porting Note: This check is to prevent inadvertent setting of the\n // _stateful property of the Container instance.\n if (this._stateful) {\n throw new ValueError('Container instance unexpectedly has _stateful = true. The ' +\n 'statefulness of a Container is determined by the Layers it ' +\n 'contains. Its _stateful property must remain the default false.');\n }\n for (const layer of this.layers) {\n if (layer.stateful) {\n return true;\n }\n }\n return false;\n }\n /**\n * Reset the state of all stateful constituent layers (if any).\n *\n * Examples of stateful layers include RNN layers whose `stateful` property\n * is set as `true`.\n */\n resetStates() {\n tidy(() => {\n this.layers.forEach(layer => {\n // tslint:disable:no-any\n if (layer.stateful) {\n layer.resetStates();\n }\n // tslint:enable:no-any\n });\n });\n }\n}\n//# sourceMappingURL=container.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose, mul, tensor1d, tidy } from '@tensorflow/tfjs-core';\nfunction standardizeSampleOrClassWeights(xWeight, outputNames, weightType) {\n const numOutputs = outputNames.length;\n if (xWeight == null || (Array.isArray(xWeight) && xWeight.length === 0)) {\n return outputNames.map(name => null);\n }\n if (numOutputs === 1) {\n if (Array.isArray(xWeight) && xWeight.length === 1) {\n return xWeight;\n }\n else if (typeof xWeight === 'object' && outputNames[0] in xWeight) {\n return [xWeight[outputNames[0]]];\n }\n else {\n return [xWeight];\n }\n }\n if (Array.isArray(xWeight)) {\n if (xWeight.length !== numOutputs) {\n throw new Error(`Provided ${weightType} is an array of ${xWeight.length} ` +\n `element(s), but the model has ${numOutputs} outputs. ` +\n `Make sure a set of weights is provided for each model output.`);\n }\n return xWeight;\n }\n else if (typeof xWeight === 'object' && Object.keys(xWeight).length > 0 &&\n typeof xWeight[Object.keys(xWeight)[0]] ===\n 'object') {\n const output = [];\n outputNames.forEach(outputName => {\n if (outputName in xWeight) {\n output.push(xWeight[outputName]);\n }\n else {\n output.push(null);\n }\n });\n return output;\n }\n else {\n throw new Error(`The model has multiple (${numOutputs}) outputs, ` +\n `so ${weightType} must be either an array with ` +\n `${numOutputs} elements or an object with ${outputNames} keys. ` +\n `Provided ${weightType} not understood: ${JSON.stringify(xWeight)}`);\n }\n}\n/**\n * Standardize class weighting objects.\n *\n * This function takes a single class-weighting object, an array of them,\n * or a map from output name to class-weighting object. It compares it to the\n * output name(s) of the model, base on which it outputs an array of\n * class-weighting objects of which the length matches the number of outputs.\n *\n * @param classWeight Input class-weighting object(s).\n * @param outputNames All output name(s) of the model.\n * @return An array of class-weighting objects. The length of the array matches\n * the model's number of outputs.\n */\nexport function standardizeClassWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'classWeight');\n}\nexport function standardizeSampleWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'sampleWeight');\n}\n/**\n * Standardize by-sample and/or by-class weights for training.\n *\n * Note that this function operates on one model output at a time. For a model\n * with multiple outputs, you must call this function multiple times.\n *\n * @param y The target tensor that the by-sample and/or by-class weight is for.\n * The values of y are assumed to encode the classes, either directly\n * as an integer index, or as one-hot encoding.\n * @param sampleWeight By-sample weights.\n * @param classWeight By-class weights: an object mapping class indices\n * (integers) to a weight (float) to apply to the model's loss for the\n * samples from this class during training. This can be useful to tell the\n * model to \"pay more attention\" to samples from an under-represented class.\n * @param sampleWeightMode The mode for the sample weights.\n * @return A Promise of weight tensor, of which the size of the first dimension\n * matches that of `y`.\n */\nexport async function standardizeWeights(y, sampleWeight, classWeight, sampleWeightMode) {\n if (sampleWeight != null || sampleWeightMode != null) {\n // TODO(cais): Once 'temporal' mode is implemented, document it in the doc\n // string.\n throw new Error('Support sampleWeight is not implemented yet');\n }\n if (classWeight != null) {\n // Apply class weights per sample.\n const yClasses = tidy(() => {\n if (y.shape.length === 1) {\n // Assume class indices.\n return y.clone();\n }\n else if (y.shape.length === 2) {\n if (y.shape[1] > 1) {\n // Assume one-hot encoding of classes.\n const axis = 1;\n return y.argMax(axis);\n }\n else if (y.shape[1] === 1) {\n // Class index.\n return y.reshape([y.shape[0]]);\n }\n else {\n throw new Error(`Encountered unexpected last-dimension size (${y.shape[1]}) ` +\n `during handling of class weights. The size is expected to be ` +\n `>= 1.`);\n }\n }\n else {\n throw new Error(`Unexpected rank of target (y) tensor (${y.rank}) during ` +\n `handling of class weights. The rank is expected to be 1 or 2.`);\n }\n });\n const yClassIndices = Array.from(await yClasses.data());\n dispose(yClasses);\n const classSampleWeight = [];\n yClassIndices.forEach(classIndex => {\n if (classWeight[classIndex] == null) {\n throw new Error(`classWeight must contain all classes in the training data. ` +\n `The class ${classIndex} exists in the data but not in ` +\n `classWeight`);\n }\n else {\n classSampleWeight.push(classWeight[classIndex]);\n }\n });\n return tensor1d(classSampleWeight, 'float32');\n }\n else {\n return null;\n }\n}\n/**\n * Apply per-sample weights on the loss values from a number of samples.\n *\n * @param losses Loss tensor of shape `[batchSize]`.\n * @param sampleWeights Per-sample weight tensor of shape `[batchSize]`.\n * @returns Tensor of the same shape as`losses`.\n */\nexport function computeWeightedLoss(losses, sampleWeights) {\n return mul(losses, sampleWeights);\n}\n//# sourceMappingURL=training_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using TensorFlow.js datasets.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { singletonOrArray, toList } from '../utils/generic_utils';\nimport { standardizeClassWeights, standardizeWeights } from './training_utils';\n// Default batch size used during tensor-based validation.\nconst DEFAULT_VALIDATION_BATCH_SIZE = 32;\n/**\n * Standardize the output of a dataset iterator for use by\n * LayersModel.fitDataset().\n *\n * @param model: A `tf.LayersModel` object.\n * @param iteratorOut The output of a dataset iterator. It is required to be\n * an object of the form `{xs: TensorOrArrayOrMap, ys:\n * TensorOrArrayOrMap}`, where `TensorOrArrayOrMap` is a single `tf.Tensor`,\n * a `tf.Tensor[]`, or a flat map from string names to `tf.Tensor`s.\n * @returns A flat array of `tf.Tensor` objects: the input `tf.Tensor`s\n * followed by the target `tf.Tensor`s. When `tf.Tensor`s are provided\n * as a map, the order in the resulting array is taken from the `inputNames`\n * and `outputNames` of the model.\n */\nfunction standardizeDataIteratorOutput(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, iteratorOut) {\n let xs;\n let ys;\n const iteratorOutObj = iteratorOut;\n xs = iteratorOutObj['xs'];\n ys = iteratorOutObj['ys'];\n tfc.util.assert(xs != null && ys != null, () => 'A Dataset iterator for fitDataset() is expected to generate ' +\n 'objects of the form `{xs: xVal, ys: yVal}`, where the two ' +\n 'values may be `tf.Tensor`, an array of Tensors, or a map of ' +\n 'string to Tensor. The provided Dataset instead generates ' +\n `${iteratorOut}`);\n const flattenedXs = flattenTensorOrArrayOrMap('input', model.inputNames, xs);\n const flattenedYs = flattenTensorOrArrayOrMap('output', model.outputNames, ys);\n const batchSize = flattenedXs[0].shape[0];\n tfc.util.assert(flattenedXs.length === model.inputs.length, () => `LayersModel has ${model.inputs.length} inputs, but the dataset ` +\n `provides ${flattenedXs.length} inputs. (Expected input keys: ` +\n `${JSON.stringify(model.inputNames)})`);\n tfc.util.assert(flattenedYs.length === model.outputs.length, () => `LayersModel has ${model.outputs.length} outputs, but the dataset ` +\n `provides ${flattenedYs.length} outputs. (Expected output keys: ` +\n `${JSON.stringify(model.outputNames)})`);\n for (let xIndex = 0; xIndex < flattenedXs.length; xIndex++) {\n tfc.util.assert(flattenedXs[xIndex].shape[0] === batchSize, () => `Batch size mismatch: input ` +\n `${model.inputNames[xIndex]} has ${flattenedXs[xIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n for (let yIndex = 0; yIndex < flattenedYs.length; yIndex++) {\n tfc.util.assert(flattenedYs[yIndex].shape[0] === batchSize, () => `Batch size mismatch: output ` +\n `${model.outputNames[yIndex]} has ${flattenedYs[yIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n return { xs: flattenedXs, ys: flattenedYs };\n}\nfunction flattenTensorOrArrayOrMap(inputOrOutput, names, values) {\n if (values instanceof tfc.Tensor) {\n return [values];\n }\n else if (Array.isArray(values)) {\n tfc.util.assert(values.length === names.length, () => `Received an array of ${values.length} Tensors, but expected ${names.length} to match the ${inputOrOutput} keys ${names}.`);\n return values;\n }\n else {\n const result = [];\n // Check that all the required keys are available.\n for (const name of names) {\n if (values[name] == null) {\n throw new ValueError(`The feature data generated by the dataset lacks the required ` +\n `${inputOrOutput} key '${name}'.`);\n }\n result.push(values[name]);\n }\n return result;\n }\n}\nfunction standardizeTensorValidationData(data) {\n if (data.length === 3) {\n throw new NotImplementedError('Validation with sample weights is not implemented yet.');\n }\n return { xs: data[0], ys: data[1] };\n}\nexport async function fitDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n const hasBatchesPerEpoch = args.batchesPerEpoch != null;\n tfc.util.assert(model.optimizer != null, () => 'You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileConfig).');\n tfc.util.assert(args != null, () => `For fitDataset(), the 2nd argument (config) is required, ` +\n `but it is not provided in this call.`);\n tfc.util.assert(args.epochs != null && args.epochs > 0 && Number.isInteger(args.epochs), () => `For fitDataset(), config.epochs is expected to be a positive ` +\n `integer, but got ${args.epochs}`);\n tfc.util.assert(!hasBatchesPerEpoch ||\n (args.batchesPerEpoch > 0 && Number.isInteger(args.batchesPerEpoch)), () => `For fitDataset(), config.batchesPerEpoch is expected to be a ` +\n `positive integer if specified, but got ${args.batchesPerEpoch}`);\n tfc.util.assert(\n // tslint:disable-next-line:no-any\n args['validationSplit'] == null, () => '`validationSplit` is not supported by `fitDataset()`. ' +\n 'Use validationData instead.');\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n try {\n const doValidation = args.validationData != null;\n let valXs;\n let valYs;\n if (doValidation) {\n if (isDatasetObject(args.validationData)) {\n tfc.util.assert(args.validationBatches == null ||\n (args.validationBatches > 0 &&\n Number.isInteger(args.validationBatches)), () => `For fitDataset() with dataset-based validation, ` +\n `config.validationBatches is expected not to be provided, ` +\n `or to be a positive integer, ` +\n `but got ${args.validationBatches}`);\n }\n else {\n const validationData = standardizeTensorValidationData(args.validationData);\n valXs = validationData.xs;\n valYs = validationData.ys;\n }\n }\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let callbackMetrics;\n if (doValidation) {\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const verbose = args.verbose == null ? 1 : args.verbose;\n const { callbackList, history } = configureCallbacks(callbacks, verbose, args.epochs, null, null, getStepsPerEpoch(dataset, args), null, // Batch size determined by the dataset itself.\n doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n let epoch = args.initialEpoch == null ? 0 : args.initialEpoch;\n let dataIterator = await dataset.iterator();\n while (epoch < args.epochs) {\n const epochLogs = {};\n await callbackList.onEpochBegin(epoch);\n let stepsDone = 0;\n let batchIndex = 0;\n if (!hasBatchesPerEpoch) {\n dataIterator = await dataset.iterator();\n }\n while (hasBatchesPerEpoch ? stepsDone < args.batchesPerEpoch : true) {\n const iteratorOut = await dataIterator.next();\n // If `batchesPerEpoch` is specified, the dataset should not be\n // exhausted until all epoches are done.\n if (hasBatchesPerEpoch && iteratorOut.done) {\n console.warn('You provided `batchesPerEpoch` as ' +\n `${args.batchesPerEpoch}, ` +\n 'but your dataset iterator ran out of data after ' +\n `${stepsDone} batches; ` +\n 'interrupting training. Make sure that your ' +\n 'dataset can generate at least `batchesPerEpoch * epochs` ' +\n 'batches (in this case, ' +\n `${args.batchesPerEpoch * args.epochs} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n break;\n }\n if (iteratorOut.value != null) {\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const batchLogs = {};\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = xs[0].shape[0];\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n const sampleWeights = [];\n if (args.classWeight != null) {\n const standardClassWeights = standardizeClassWeights(args.classWeight, model.outputNames);\n for (let i = 0; i < standardClassWeights.length; ++i) {\n sampleWeights.push(await standardizeWeights(ys[i], null, standardClassWeights[i]));\n }\n }\n // Train on batch.\n const ins = xs.concat(ys).concat(sampleWeights);\n const outs = trainFunction(ins);\n tfc.dispose(ins);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n }\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n batchIndex++;\n stepsDone++;\n }\n if (hasBatchesPerEpoch ? stepsDone >= args.batchesPerEpoch :\n iteratorOut.done) {\n // Epoch finished. Perform validation.\n if (doValidation) {\n let valOuts;\n if (isDatasetObject(args.validationData)) {\n valOuts = toList(await model.evaluateDataset(args.validationData, { batches: args.validationBatches }));\n }\n else {\n valOuts = toList(model.evaluate(valXs, valYs, {\n batchSize: args.validationBatchSize == null ?\n DEFAULT_VALIDATION_BATCH_SIZE :\n args.validationBatchSize,\n verbose: 0\n }));\n }\n for (let i = 0; i < model.metricsNames.length; ++i) {\n epochLogs[`val_${model.metricsNames[i]}`] = valOuts[i];\n }\n }\n // Call `break` to exit one epoch lopp after validation is done. If\n // config.batchesPerEpoch is specified, an epoch while loop will\n // stop when `stepsDone >= config.batchesPerEpoch`. When\n // config.batchesPerEpoch is not provided, the following `break` is\n // required to exit the while lopp after dataset is exhausted.\n break;\n }\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onEpochEnd(epoch, epochLogs);\n epoch++;\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n }\n finally {\n model.isTraining = false;\n }\n}\n/** Helper function that determines number of steps (batches) per epoch. */\nfunction getStepsPerEpoch(dataset, args) {\n // Attempt to determine # of batches in an epoch.\n let stepsPerEpoch = null;\n if (args.batchesPerEpoch != null) {\n stepsPerEpoch = args.batchesPerEpoch;\n }\n else if (Number.isFinite(dataset.size)) {\n stepsPerEpoch = dataset.size;\n }\n return stepsPerEpoch;\n}\n// Check if provided object is a Dataset object by checking its .iterator\n// element.\nfunction isDatasetObject(dataset) {\n return (typeof dataset.iterator === 'function');\n}\n// Check if provided object is a LazyIterator object by checking it's .next\n// element.\nfunction isLazyIteratorObject(iterator) {\n return (typeof iterator.next === 'function');\n}\nexport async function evaluateDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n args = args || {};\n const hasBatches = args.batches != null;\n const f = model.testFunction;\n let outs = [];\n if (args.verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n tfc.util.assert(!hasBatches || (args.batches > 0 && Number.isInteger(args.batches)), () => 'Test loop expects `batches` to be a positive integer, but ' +\n `received ${JSON.stringify(args.batches)}`);\n const dataIterator = isLazyIteratorObject(dataset) ?\n dataset :\n await dataset.iterator();\n // Keeps track of number of examples used in this evaluation.\n let numExamples = 0;\n let batch = 0;\n while (hasBatches ? batch < args.batches : true) {\n const iteratorOut = await dataIterator.next();\n outs = tfc.tidy(() => {\n if (iteratorOut.value) {\n // TODO(cais): Once real dataset is available, use\n // `map(x => standardizeDataIteratorOutput(model, x).map(f)`.\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const xsAndYs = xs.concat(ys);\n const batchOuts = tfc.tidy(() => f(xsAndYs));\n tfc.dispose(xsAndYs);\n if (batch === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n const batchSize = xsAndYs[0].shape[0];\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n const oldScalar = outs[i];\n outs[i] =\n tfc.tidy(() => tfc.add(outs[i], tfc.mul(batchSize, batchOut)));\n if (batch > 0) {\n tfc.dispose(oldScalar);\n }\n }\n tfc.dispose(batchOuts);\n numExamples += batchSize;\n ++batch;\n }\n return outs;\n });\n if (iteratorOut.done) {\n if (hasBatches) {\n console.warn('Your dataset iterator ran out of data during evaluateDataset(). ' +\n 'Interrupting evalution. Make sure that your ' +\n 'dataset can generate at least `batches` ' +\n `batches (in this case, ${args.batches} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n }\n break;\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n const oldScalar = outs[i];\n outs[i] = tfc.div(outs[i], numExamples);\n tfc.dispose(oldScalar);\n }\n return singletonOrArray(outs);\n}\n//# sourceMappingURL=training_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using tf.Tensor objects.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport { expandDims, gather, sliceAlongFirstAxis } from '../backend/tfjs_backend';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { range } from '../utils/math_utils';\nexport function checkBatchSize(batchSize) {\n tfc.util.assert(batchSize > 0 && Number.isInteger(batchSize), () => `batchSize is required to be a positive integer, but got ${batchSize}`);\n}\n/**\n * Slice a Tensor or an Array of Tensors, by start and stop indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArraysByIndices()` together.\n *\n * @param arrays: the input.\n * @param start: the starting index (inclusive).\n * @param stop: the stopping index (exclusive).\n * @returns The result of the slicing. If `arrays` is an `Array` of\n * `tf.Tensor`s, the slicing will be applied to all elements of the `Array`\n * in the same way.\n */\nexport function sliceArrays(arrays, start, stop) {\n if (arrays == null) {\n return [null];\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceAlongFirstAxis(array, start, stop - start));\n }\n else { // Tensor.\n return sliceAlongFirstAxis(arrays, start, stop - start);\n }\n}\n/**\n * Slice a Tensor or an Array of Tensors, by random-order indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArrays()` together.\n *\n * @param arrays The input `tf.Tensor` or `Array` of `tf.Tensor`s to slice.\n * If an `Array` of `tf.Tensor`s, all `tf.Tensor`s will be sliced in the\n * same fashion.\n * @param indices The indices to use for slicing along the first (batch)\n * dimension.\n * @returns Result(s) of the slicing.\n */\nexport function sliceArraysByIndices(arrays, indices) {\n return tfc.tidy(() => {\n if (arrays == null) {\n return null;\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceArraysByIndices(array, indices));\n }\n else {\n // TODO(cais): indices should be a pre-constructed Tensor1D to avoid\n // tensor1d() calls.\n return gather(arrays, indices.dtype === 'int32' ? indices : indices.toInt());\n }\n });\n}\n/**\n * Returns a list of batch indices (tuples of indices).\n * @param size: Integer, total size of the data to slice into batches.\n * @param batchSize: Integer, batch size.\n * @returns An Array of [batchStart, batchEnd] tuples. batchStart is\n * inclusive; batchEnd is exclusive. I.e., each batch consists of indices x\n * that satisfy batchStart <= x < batchEnd.\n */\nexport function makeBatches(size, batchSize) {\n const output = [];\n let batchStart = 0;\n let batchEnd = null;\n while (batchStart < size) {\n batchEnd = batchStart + batchSize;\n if (batchEnd >= size) {\n batchEnd = size;\n }\n output.push([batchStart, batchEnd]);\n batchStart = batchEnd;\n }\n return output;\n}\n/**\n * Abstract fit function for `f(ins)`.\n * @param f A Function returning a list of tensors. For training, this\n * function is expected to perform the updates to the variables.\n * @param ins List of tensors to be fed to `f`.\n * @param outLabels List of strings, display names of the outputs of `f`.\n * @param batchSize Integer batch size or `== null` if unknown. Default : 32.\n * @param epochs Number of times to iterate over the data. Default : 1.\n * @param verbose Verbosity mode: 0, 1, or 2. Default: 1.\n * @param callbacks List of callbacks to be called during training.\n * @param valF Function to call for validation.\n * @param valIns List of tensors to be fed to `valF`.\n * @param shuffle Whether to shuffle the data at the beginning of every\n * epoch. Default : true.\n * @param callbackMetrics List of strings, the display names of the metrics\n * passed to the callbacks. They should be the concatenation of the\n * display names of the outputs of `f` and the list of display names\n * of the outputs of `valF`.\n * @param initialEpoch Epoch at which to start training (useful for\n * resuming a previous training run). Default : 0.\n * @param stepsPerEpoch Total number of steps (batches on samples) before\n * declaring one epoch finished and starting the next epoch. Ignored with\n * the default value of `undefined` or `null`.\n * @param validationSteps Number of steps to run validation for (only if\n * doing validation from data tensors). Not applicable for tfjs-layers.\n * @returns A `History` object.\n */\nasync function fitLoop(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, f, ins, outLabels, batchSize, epochs, verbose, callbacks, valF, valIns, shuffle, callbackMetrics, initialEpoch, stepsPerEpoch, validationSteps) {\n if (batchSize == null) {\n batchSize = 32;\n }\n if (epochs == null) {\n epochs = 1;\n }\n if (shuffle == null) {\n shuffle = true;\n }\n if (initialEpoch == null) {\n initialEpoch = 0;\n }\n // TODO(cais): Change const to let below when implementing validation.\n let doValidation = false;\n if (valF != null && valIns != null) {\n doValidation = true;\n // TODO(cais): verbose message.\n }\n if (validationSteps != null) {\n doValidation = true;\n if (stepsPerEpoch == null) {\n throw new ValueError('Can only use `validationSteps` when doing step-wise training, ' +\n 'i.e., `stepsPerEpoch` must be set.');\n }\n }\n const numTrainSamples = model.checkNumSamples(ins, batchSize, stepsPerEpoch, 'steps_per_epoch');\n let indexArray;\n if (numTrainSamples != null) {\n indexArray = range(0, numTrainSamples);\n }\n if (verbose == null) {\n verbose = 1;\n }\n const { callbackList, history } = configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n // TODO(cais): Take care of callbacks.validation_data as in PyKeras.\n // TODO(cais): Pre-convert feeds for performance as in PyKeras.\n for (let epoch = initialEpoch; epoch < epochs; ++epoch) {\n await callbackList.onEpochBegin(epoch);\n const epochLogs = {};\n if (stepsPerEpoch != null) {\n throw new NotImplementedError('stepsPerEpoch mode is not implemented yet.');\n }\n else {\n if (shuffle === 'batch') {\n throw new NotImplementedError('batch shuffling is not implemneted yet');\n }\n else if (shuffle) {\n util.shuffle(indexArray);\n }\n // Convert the potentially shuffled indices to Tensor1D, to avoid the\n // cost of repeated creation of Array1Ds later on.\n const epochIndexArray1D = tensor1d(indexArray);\n const batches = makeBatches(numTrainSamples, batchSize);\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchLogs = {};\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = sliceAlongFirstAxis(epochIndexArray1D, batchStart, batchEnd - batchStart);\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = batchEnd - batchStart;\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const outs = f(insBatch);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n }\n if (batchIndex === batches.length - 1) { // Last batch.\n if (doValidation) {\n const valOuts = model.testLoop(valF, valIns, batchSize);\n // Porting Notes: In tfjs-layers, valOuts is always an Array.\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = valOuts[i];\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n epochLogs['val_' + label] = out;\n }\n }\n }\n });\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n if (model.stopTraining_) {\n break;\n }\n // TODO(cais): return outs as list of Tensor.\n }\n epochIndexArray1D.dispose();\n }\n // TODO(cais): Run validation at the end of the epoch.\n await callbackList.onEpochEnd(epoch, epochLogs);\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n}\nexport async function fitTensors(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, x, y, args = {}) {\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n let inputs;\n let targets;\n let inputValX;\n let inputValY;\n let valX;\n let valY;\n let sampleWeights;\n try {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // Validate user data.\n // TODO(cais): Support sampleWeight.\n const checkBatchAxis = false;\n const standardizedOuts = await model.standardizeUserData(x, y, args.sampleWeight, args.classWeight, checkBatchAxis, batchSize);\n inputs = standardizedOuts[0];\n targets = standardizedOuts[1];\n sampleWeights = standardizedOuts[2];\n // Prepare validation data.\n let doValidation = false;\n let valIns;\n if (args.validationData != null && args.validationData.length > 0) {\n doValidation = true;\n if (args.validationData.length === 2) {\n // config.validationData consists of valX and valY.\n inputValX = args.validationData[0];\n inputValY = args.validationData[1];\n }\n else if (args.validationData.length === 3) {\n throw new NotImplementedError('validationData including sample weights is not supported yet.');\n }\n else {\n throw new ValueError(`When passing validation data, it must contain 2 (valX, valY) ` +\n `or 3 (valX, valY, valSampleWeight) items; ` +\n `${args.validationData} is invalid.`);\n }\n const checkBatchAxis = true;\n const valStandardized = await model.standardizeUserData(inputValX, inputValY, null, /** Unused sample weights. */ null, /** Unused class weights. */ checkBatchAxis, batchSize);\n valX = valStandardized[0];\n valY = valStandardized[1];\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSplit != null && args.validationSplit > 0 &&\n args.validationSplit < 1) {\n doValidation = true;\n // Porting Note: In tfjs-layers, inputs[0] is always a Tensor.\n const splitAt = Math.floor(inputs[0].shape[0] * (1 - args.validationSplit));\n const originalBatchSize = inputs[0].shape[0];\n valX = sliceArrays(inputs, splitAt, originalBatchSize);\n inputs = sliceArrays(inputs, 0, splitAt);\n valY = sliceArrays(targets, splitAt, originalBatchSize);\n targets = sliceArrays(targets, 0, splitAt);\n // TODO(cais): Once sampleWeights becomes available, slice it to get\n // valSampleWeights.\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSteps != null) {\n doValidation = true;\n // TODO(cais): Add useLearningPhase.\n }\n const ins = inputs.concat(targets).concat(sampleWeights);\n model.checkTrainableWeightsConsistency();\n // TODO(cais): Handle use_learning_phase and learning_phase?\n // Porting Note: Here we see a key deviation of tfjs-layers from\n // Keras.\n // Due to the imperative nature of tfjs-layers' backend (tfjs-core),\n // we do not construct symbolic computation graphs to embody the\n // training process. Instead, we define a function that performs the\n // training action. In PyKeras, the data (inputs and targets) are fed\n // through graph placeholders. In tfjs-layers, the data are fed as\n // function arguments. Since the function are defined below in the\n // scope, we don't have equivalents of PyKeras's\n // `_make_train_funciton`.\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let valFunction;\n let callbackMetrics;\n if (doValidation) {\n model.makeTestFunction();\n valFunction = model.testFunction;\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n valFunction = null;\n valIns = [];\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const out = await fitLoop(model, trainFunction, ins, outLabels, batchSize, args.epochs, args.verbose, callbacks, valFunction, valIns, args.shuffle, callbackMetrics, args.initialEpoch, null, null);\n return out;\n }\n finally {\n model.isTraining = false;\n // Memory clean up.\n disposeNewTensors(inputs, x);\n disposeNewTensors(targets, y);\n disposeNewTensors(valX, inputValX);\n disposeNewTensors(valY, inputValY);\n if (sampleWeights != null) {\n tfc.dispose(sampleWeights);\n }\n }\n // TODO(cais): Add value to outLabels.\n}\n/**\n * Ensure tensors all have a rank of at least 2.\n *\n * If a tensor has a rank of 1, it is dimension-expanded to rank 2.\n * If any tensor has a rank of 0 (i.e., is a scalar), an error will be thrown.\n */\nexport function ensureTensorsRank2OrHigher(tensors) {\n const outs = [];\n if (tensors instanceof Tensor) {\n tensors = [tensors];\n }\n // Make Tensors at least 2D.\n for (let i = 0; i < tensors.length; ++i) {\n const tensor = tensors[i];\n if (tensor.rank === 1) {\n outs.push(expandDims(tensor, 1));\n }\n else if (tensor.rank === 0) {\n throw new Error('Expected tensor to be at least 1D, but received a 0D tensor ' +\n '(scalar).');\n }\n else {\n outs.push(tensor);\n }\n }\n return outs;\n}\n/**\n * Compare a set of tensors with a reference (old) set, discard the ones\n * in the new set that are not present in the reference set.\n *\n * This method is used for memory clenaup during calls such as\n * LayersModel.fit().\n *\n * @param tensors New set which may contain Tensors not present in\n * `refTensors`.\n * @param refTensors Reference Tensor set.\n */\n// TODO(cais, kangyizhang): Deduplicate with tfjs-data.\nexport function disposeNewTensors(tensors, refTensors) {\n if (tensors == null) {\n return;\n }\n const oldTensorIds = [];\n if (refTensors instanceof Tensor) {\n oldTensorIds.push(refTensors.id);\n }\n else if (Array.isArray(refTensors)) {\n refTensors.forEach(t => oldTensorIds.push(t.id));\n }\n else if (refTensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in refTensors) {\n const oldTensor = refTensors[name];\n oldTensorIds.push(oldTensor.id);\n }\n }\n const tensorsToDispose = [];\n if (tensors instanceof Tensor) {\n if (oldTensorIds.indexOf(tensors.id) === -1) {\n tensorsToDispose.push(tensors);\n }\n }\n else if (Array.isArray(tensors)) {\n tensors.forEach(t => {\n if (oldTensorIds.indexOf(t.id) === -1) {\n tensorsToDispose.push(t);\n }\n });\n }\n else if (tensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in tensors) {\n const tensor = tensors[name];\n if (oldTensorIds.indexOf(tensor.id) === -1) {\n tensorsToDispose.push(tensor);\n }\n }\n }\n tensorsToDispose.forEach(t => {\n if (!t.isDisposed) {\n t.dispose();\n }\n });\n}\n//# sourceMappingURL=training_tensors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: engine/training.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { io, Optimizer, scalar, serialization, Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize } from '../layers/serialization';\nimport * as losses from '../losses';\nimport * as Metrics from '../metrics';\nimport * as optimizers from '../optimizers';\nimport { checkUserDefinedMetadata } from '../user_defined_metadata';\nimport { count, pyListRepeat, singletonOrArray, toCamelCase, toSnakeCase, unique } from '../utils/generic_utils';\nimport { printSummary } from '../utils/layer_utils';\nimport { range } from '../utils/math_utils';\nimport { convertPythonicToTs } from '../utils/serialization_utils';\nimport { version } from '../version';\nimport { Container } from './container';\nimport { execute, FeedDict } from './executor';\nimport { evaluateDataset, fitDataset } from './training_dataset';\nimport { checkBatchSize, disposeNewTensors, ensureTensorsRank2OrHigher, fitTensors, makeBatches, sliceArrays, sliceArraysByIndices } from './training_tensors';\nimport { computeWeightedLoss, standardizeClassWeights, standardizeWeights } from './training_utils';\n/**\n * Helper function for polymorphic input data: 1. singleton Tensor.\n */\nexport function isDataTensor(x) {\n return x instanceof Tensor;\n}\n/**\n * Helper function for polymorphic input data: 2. Array of Tensor.\n */\nexport function isDataArray(x) {\n return Array.isArray(x);\n}\n/**\n * Helper function for polymorphic input data: 3. \"dict\" of Tensor.\n */\nexport function isDataDict(x) {\n return !isDataTensor(x) && !isDataArray(x);\n}\n/**\n * Normalizes inputs and targets provided by users.\n * @param data User-provided input data (polymorphic).\n * @param names An Array of expected Tensor names.\n * @param shapes Optional Array of expected Tensor shapes.\n * @param checkBatchAxis Whether to check that the batch axis of the arrays\n * match the expected value found in `shapes`.\n * @param exceptionPrefix String prefix used for exception formatting.\n * @returns List of standardized input Tensors (one Tensor per model input).\n * @throws ValueError: in case of improperly formatted user data.\n */\nexport function standardizeInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n if (names == null || names.length === 0) {\n // Check for the case where the model expected no data, but some data got\n // sent.\n if (data != null) {\n let gotUnexpectedData = false;\n if (isDataArray(data) && data.length > 0) {\n gotUnexpectedData = true;\n }\n else if (isDataDict(data)) {\n for (const key in data) {\n if (data.hasOwnProperty(key)) {\n gotUnexpectedData = true;\n break;\n }\n }\n }\n else {\n // `data` is a singleton Tensor in this case.\n gotUnexpectedData = true;\n }\n if (gotUnexpectedData) {\n throw new ValueError(`Error when checking model ${exceptionPrefix} expected no data, ` +\n `but got ${data}`);\n }\n }\n return [];\n }\n if (data == null) {\n return names.map(name => null);\n }\n let arrays;\n if (isDataDict(data)) {\n data = data;\n arrays = [];\n for (const name of names) {\n if (data[name] == null) {\n throw new ValueError(`No data provided for \"${name}\". Need data for each key in: ` +\n `${names}`);\n }\n arrays.push(data[name]);\n }\n }\n else if (isDataArray(data)) {\n data = data;\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `model expected. Expected to see ${names.length} Tensor(s), but ` +\n `instead got the following list of Tensor(s): ${data}`);\n }\n arrays = data;\n }\n else {\n data = data;\n if (names.length > 1) {\n throw new ValueError(`The model ${exceptionPrefix} expects ${names.length} Tensor(s), ` +\n `but only received one Tensor. Found: Tensor with shape ${data.shape}`);\n }\n arrays = [data];\n }\n arrays = ensureTensorsRank2OrHigher(arrays);\n // Check shape compatibility.\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s). but got array with ` +\n `shape ${array.shape}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n // Skip the first (batch) axis.\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null && refDim >= 0 && dim !== refDim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have shape [${shapes[i]}], but got array with shape ` +\n `[${array.shape}].`);\n }\n }\n }\n }\n return arrays;\n}\n/**\n * User input validation for Tensors.\n * @param inputs `Array` of `tf.Tensor`s for inputs.\n * @param targets `Array` of `tf.Tensor`s for targets.\n * @param weights Optional `Array` of `tf.Tensor`s for sample weights.\n * @throws ValueError: in case of incorrectly formatted data.\n */\nexport function checkArrayLengths(inputs, targets, weights) {\n const setX = unique(inputs.map(input => input.shape[0]));\n setX.sort();\n const setY = unique(targets.map(target => target.shape[0]));\n setY.sort();\n // TODO(cais): Check `weights` as well.\n if (setX.length > 1) {\n throw new ValueError(`All input Tensors (x) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(inputs.map(input => input.shape))}`);\n }\n if (setY.length > 1) {\n throw new ValueError(`All target Tensors (y) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(targets.map(target => target.shape))}`);\n }\n if (setX.length > 0 && setY.length > 0 && !util.arraysEqual(setX, setY)) {\n throw new ValueError(`Input Tensors should have the same number of samples as target ` +\n `Tensors. Found ${setX[0]} input sample(s) and ${setY[0]} target ` +\n `sample(s).`);\n }\n}\n/**\n * Validation on the compatibility of targes and loss functions.\n *\n * This helps prevent users from using loss functions incorrectly.\n *\n * @param targets `Array` of `tf.Tensor`s of targets.\n * @param lossFns `Array` of loss functions.\n * @param outputShapes `Array` of shapes of model outputs.\n */\nfunction checkLossAndTargetCompatibility(targets, lossFns, outputShapes) {\n // TODO(cais): Dedicated test coverage?\n const keyLosses = [\n losses.meanSquaredError, losses.binaryCrossentropy,\n losses.categoricalCrossentropy\n ];\n for (let i = 0; i < targets.length; ++i) {\n const y = targets[i];\n const loss = lossFns[i];\n const shape = outputShapes[i];\n if (loss == null) {\n continue;\n }\n if (loss === losses.categoricalCrossentropy) {\n if (y.shape[y.shape.length - 1] === 1) {\n throw new ValueError(`You are passing a target array of shape ${y.shape} while using ` +\n `a loss 'categorical_crossentropy'. 'categorical_crossentropy'` +\n `expects targets to be binary matrices (1s and 0s) of shape ` +\n `[samples, classes].`);\n // TODO(cais): Example code in error message.\n }\n }\n if (keyLosses.indexOf(loss) !== -1) {\n const slicedYShape = y.shape.slice(1);\n const slicedShape = shape.slice(1);\n for (let j = 0; j < slicedYShape.length; ++j) {\n const targetDim = slicedYShape[j];\n const outDim = slicedShape[j];\n if (outDim != null && targetDim !== outDim) {\n throw new ValueError(`A target Tensor with shape ${y.shape} was passed for an ` +\n `output of shape ${shape}, while using a loss function that ` +\n `expects targets to have the same shape as the output.`);\n }\n }\n }\n }\n}\n/**\n * Check inputs provided by the user.\n *\n * Porting Note: This corresponds to _standardize_input_data() in Python\n * Keras. Because of the strong typing in TF.js, we do not need to convert\n * the data. Specifically:\n * 1) in PyKeras, `data` can be `DataFrame` instances from pandas, for\n * example. We don't need to worry about that here because there is no\n * widely popular javascript/typesdcript equivalent of pandas (so far).\n * If one becomes available in the future, we can add support.\n * 2) in PyKeras, inputs can be Python dict. But here we are stipulating\n * that the data is either a single `tf.Tensor` or an Array of `tf.Tensor`s. We\n * may add support for `Object` data inputs in the future when the need\n * arises.\n *\n * Instead, we perform basic checks for number of parameters and shapes.\n *\n * @param data: The input data.\n * @param names: Name for the inputs, from the model.\n * @param shapes: Expected shapes for the input data, from the model.\n * @param checkBatchAxis: Whether the size along the batch axis (i.e., the\n * first dimension) will be checked for matching.\n * @param exceptionPrefix: Execption prefix message, used in generating error\n * messages.\n * @throws ValueError: on incorrect number of inputs or mismatches in shapes.\n */\nfunction checkInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n let arrays;\n if (Array.isArray(data)) {\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `the model expected. Expected to see ${names.length} Tensor(s),` +\n ` but instead got ${data.length} Tensors(s).`);\n }\n arrays = data;\n }\n else {\n if (names.length > 1) {\n throw new ValueError(`The model expects ${names.length} ${exceptionPrefix} Tensors, ` +\n `but only received one Tensor. Found: array with shape ` +\n `${JSON.stringify(data.shape)}.`);\n }\n arrays = [data];\n }\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s), but got array with ` +\n `shape ${JSON.stringify(array.shape)}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null) {\n if (refDim !== dim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ` +\n `${names[i]} to have shape ${JSON.stringify(shapes[i])} but ` +\n `got array with shape ${JSON.stringify(array.shape)}.`);\n }\n }\n }\n }\n }\n}\n/**\n * Maps metric functions to model outputs.\n * @param metrics An shortcut strings name, metric function, `Array` or dict\n * (`Object`) of metric functions.\n * @param outputNames An `Array` of the names of model outputs.\n * @returns An `Array` (one entry per model output) of `Array` of metric\n * functions. For instance, if the model has 2 outputs, and for the first\n * output we want to compute `binaryAccuracy` and `binaryCrossentropy`,\n * and just `binaryAccuracy` for the second output, the `Array` would look\n * like:\n * `[[binaryAccuracy, binaryCrossentropy], [binaryAccuracy]]`\n * @throws TypeError: incompatible metrics format.\n */\nexport function collectMetrics(metrics, outputNames) {\n if (metrics == null || Array.isArray(metrics) && metrics.length === 0) {\n return outputNames.map(name => []);\n }\n let wrappedMetrics;\n if (typeof metrics === 'string' || typeof metrics === 'function') {\n wrappedMetrics = [metrics];\n }\n else if (Array.isArray(metrics) || typeof metrics === 'object') {\n wrappedMetrics = metrics;\n }\n else {\n throw new TypeError('Type of metrics argument not understood. Expected an string,' +\n `function, Array, or Object, found: ${metrics}`);\n }\n if (Array.isArray(wrappedMetrics)) {\n // We then apply all metrics to all outputs.\n return outputNames.map(name => wrappedMetrics);\n }\n else {\n // In this case, metrics is a dict.\n const nestedMetrics = [];\n for (const name of outputNames) {\n let outputMetrics = wrappedMetrics.hasOwnProperty(name) ? wrappedMetrics[name] : [];\n if (!Array.isArray(outputMetrics)) {\n outputMetrics = [outputMetrics];\n }\n nestedMetrics.push(outputMetrics);\n }\n return nestedMetrics;\n }\n}\nconst LAYERS_MODEL_FORMAT_NAME = 'layers-model';\n/**\n * A `tf.LayersModel` is a directed, acyclic graph of `tf.Layer`s plus methods\n * for training, evaluation, prediction and saving.\n *\n * `tf.LayersModel` is the basic unit of training, inference and evaluation in\n * TensorFlow.js. To create a `tf.LayersModel`, use `tf.LayersModel`.\n *\n * See also:\n * `tf.Sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class LayersModel extends Container {\n constructor(args) {\n super(args);\n this.isTraining = false;\n }\n /**\n * Print a text summary of the model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - If the model has non-sequential-like topology, the inputs each layer\n * receives\n * - The total number of trainable and non-trainable parameters of the model.\n *\n * ```js\n * const input1 = tf.input({shape: [10]});\n * const input2 = tf.input({shape: [20]});\n * const dense1 = tf.layers.dense({units: 4}).apply(input1);\n * const dense2 = tf.layers.dense({units: 8}).apply(input2);\n * const concat = tf.layers.concatenate().apply([dense1, dense2]);\n * const output =\n * tf.layers.dense({units: 3, activation: 'softmax'}).apply(concat);\n *\n * const model = tf.model({inputs: [input1, input2], outputs: output});\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n throw new ValueError(`This model has never been called, thus its weights have not been ` +\n `created yet. So no summary can be displayed. Build the model ` +\n `first (e.g., by calling it on some test data).`);\n }\n printSummary(this, lineLength, positions, printFn);\n }\n /**\n * Configures and prepares the model for training and evaluation. Compiling\n * outfits the model with an optimizer, loss, and/or metrics. Calling `fit`\n * or `evaluate` on an un-compiled model will throw an error.\n *\n * @param args a `ModelCompileArgs` specifying the loss, optimizer, and\n * metrics to be used for fitting and evaluating this model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n compile(args) {\n if (args.loss == null) {\n args.loss = [];\n }\n this.loss = args.loss;\n if (typeof args.optimizer === 'string') {\n this.optimizer_ = optimizers.getOptimizer(args.optimizer);\n this.isOptimizerOwned = true;\n }\n else {\n if (!(args.optimizer instanceof Optimizer)) {\n throw new ValueError(`User-defined optimizer must be an instance of tf.Optimizer.`);\n }\n this.optimizer_ = args.optimizer;\n this.isOptimizerOwned = false;\n }\n // TODO(cais): Add lossWeights.\n // TODO(cais): Add sampleWeightMode.\n // Prepare loss functions.\n let lossFunctions = [];\n if (!Array.isArray(args.loss) && typeof args.loss !== 'string' &&\n typeof args.loss !== 'function') {\n args.loss = args.loss;\n for (const name in args.loss) {\n if (this.outputNames.indexOf(name) === -1) {\n throw new ValueError(`Unknown entry in loss dictionary: \"${name}\". ` +\n `Only expected the following keys: ${this.outputNames}`);\n }\n }\n for (const name of this.outputNames) {\n if (args.loss[name] == null) {\n console.warn(`Output \"${name}\" is missing from loss dictionary. We assume ` +\n `this was done on purpose, and we will not be expecting data ` +\n `to be passed to ${name} during training`);\n }\n lossFunctions.push(losses.get(args.loss[name]));\n }\n }\n else if (Array.isArray(args.loss)) {\n if (args.loss.length !== this.outputs.length) {\n throw new ValueError(`When passing an Array as loss, it should have one entry per ` +\n `model output. The model has ${this.outputs.length} output(s), ` +\n `but you passed loss=${args.loss}.`);\n }\n const theLosses = args.loss;\n lossFunctions = theLosses.map(l => losses.get(l));\n }\n else {\n const lossFunction = losses.get(args.loss);\n this.outputs.forEach(_ => {\n lossFunctions.push(lossFunction);\n });\n }\n this.lossFunctions = lossFunctions;\n this.feedOutputNames = [];\n this.feedOutputShapes = [];\n this.feedLossFns = [];\n for (let i = 0; i < this.outputs.length; ++i) {\n // TODO(cais): Logic for skipping target(s).\n const shape = this.internalOutputShapes[i];\n const name = this.outputNames[i];\n this.feedOutputNames.push(name);\n this.feedOutputShapes.push(shape);\n this.feedLossFns.push(this.lossFunctions[i]);\n }\n // TODO(cais): Add logic for output masks.\n // TODO(cais): Add logic for sample weights.\n const skipTargetIndices = [];\n // Prepare metrics.\n this.metrics = args.metrics;\n // TODO(cais): Add weightedMetrics.\n this.metricsNames = ['loss'];\n this.metricsTensors = [];\n // Compute total loss.\n // Porting Note: In PyKeras, metrics_tensors are symbolic tensor objects.\n // Here, metricsTensors are TypeScript functions. This difference is due\n // to the difference in symbolic/imperative property of the backends.\n nameScope('loss', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n // TODO(cais): Add weightedLoss, sampleWeight and mask.\n // The following line should be weightedLoss\n const weightedLoss = this.lossFunctions[i];\n if (this.outputs.length > 1) {\n this.metricsTensors.push([weightedLoss, i]);\n this.metricsNames.push(this.outputNames[i] + '_loss');\n }\n }\n // Porting Note: Due to the imperative nature of the backend, we calculate\n // the regularizer penalties in the totalLossFunction, instead of here.\n });\n const nestedMetrics = collectMetrics(args.metrics, this.outputNames);\n // TODO(cais): Add nestedWeightedMetrics.\n /**\n * Helper function used in loop below.\n */\n const appendMetric = (outputIndex, metricName, metricTensor) => {\n if (this.outputNames.length > 1) {\n metricName = this.outputNames[outputIndex] + '_' + metricName;\n }\n this.metricsNames.push(metricName);\n this.metricsTensors.push([metricTensor, outputIndex]);\n };\n nameScope('metric', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n const outputMetrics = nestedMetrics[i];\n // TODO(cais): Add weights and outputWeightedMetrics.\n // TODO(cais): Add optional arg `weights` to the following function.\n const handleMetrics = (metrics) => {\n const metricNamePrefix = '';\n let metricName;\n let accFn;\n let weightedMetricFn;\n // TODO(cais): Use 'weights_' for weighted metrics.\n for (const metric of metrics) {\n if (typeof metric === 'string' &&\n ['accuracy', 'acc', 'crossentropy', 'ce'].indexOf(metric) !==\n -1) {\n const outputShape = this.internalOutputShapes[i];\n if (outputShape[outputShape.length - 1] === 1 ||\n this.lossFunctions[i] === losses.binaryCrossentropy) {\n // case: binary accuracy/crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryCrossentropy;\n }\n }\n else if (this.lossFunctions[i] ===\n losses.sparseCategoricalCrossentropy) {\n // case: categorical accuracy / crossentropy with sparse\n // targets.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalCrossentropy;\n }\n }\n else {\n // case: categorical accuracy / crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalCrossentropy;\n }\n }\n let suffix;\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n suffix = 'acc';\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n suffix = 'ce';\n }\n // TODO(cais): Add weighting actually.\n weightedMetricFn = accFn;\n metricName = metricNamePrefix + suffix;\n }\n else {\n const metricFn = Metrics.get(metric);\n // TODO(cais): Add weighting actually.\n weightedMetricFn = metricFn;\n metricName =\n metricNamePrefix + Metrics.getLossOrMetricName(metric);\n }\n // TODO(cais): Add weighting and masking to metricResult.\n let metricResult;\n nameScope(metricName, () => {\n metricResult = weightedMetricFn;\n });\n appendMetric(i, metricName, metricResult);\n }\n };\n handleMetrics(outputMetrics);\n // TODO(cais): Call handleMetrics with weights.\n }\n });\n // Porting Notes: Given the imperative backend of tfjs-core,\n // there is no need for constructing the symbolic graph and placeholders.\n this.collectedTrainableWeights = this.trainableWeights;\n }\n /**\n * Check trainable weights count consistency.\n *\n * This will raise a warning if `this.trainableWeights` and\n * `this.collectedTrainableWeights` are inconsistent (i.e., have different\n * numbers of parameters).\n * Inconsistency will typically arise when one modifies `model.trainable`\n * without calling `model.compile()` again.\n */\n checkTrainableWeightsConsistency() {\n if (this.collectedTrainableWeights == null) {\n return;\n }\n if (this.trainableWeights.length !==\n this.collectedTrainableWeights.length) {\n console.warn('Discrepancy between trainableweights and collected trainable ' +\n 'weights. Did you set `model.trainable` without calling ' +\n '`model.compile()` afterwards?');\n }\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(\n * tf.ones([8, 10]), tf.ones([8, 1]), {batchSize: 4});\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateArgs`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // TODO(cais): Standardize `config.sampleWeights` as well.\n // Validate user data.\n const checkBatchAxis = true;\n const standardizedOuts = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n try {\n // TODO(cais): If uses `useLearningPhase`, set the corresponding element\n // of the input to 0.\n const ins = standardizedOuts[0].concat(standardizedOuts[1]);\n this.makeTestFunction();\n const f = this.testFunction;\n const testOuts = this.testLoop(f, ins, batchSize, args.verbose, args.steps);\n return singletonOrArray(testOuts);\n }\n finally {\n disposeNewTensors(standardizedOuts[0], x);\n disposeNewTensors(standardizedOuts[1], y);\n }\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n this.makeTestFunction();\n return evaluateDataset(this, dataset, args);\n }\n /**\n * Get number of samples provided for training, evaluation or prediction.\n *\n * @param ins Input `tf.Tensor`.\n * @param batchSize Integer batch size, optional.\n * @param steps Total number of steps (batches of samples) before\n * declaring loop finished. Optional.\n * @param stepsName The public API's parameter name for `steps`.\n * @returns Number of samples provided.\n */\n checkNumSamples(ins, batchSize, steps, stepsName = 'steps') {\n let numSamples;\n if (steps != null) {\n numSamples = null;\n if (batchSize != null) {\n throw new ValueError(`If ${stepsName} is set, batchSize must be null or undefined.` +\n `Got batchSize = ${batchSize}`);\n }\n }\n else if (ins != null) {\n if (Array.isArray(ins)) {\n numSamples = ins[0].shape[0];\n }\n else {\n numSamples = ins.shape[0];\n }\n }\n else {\n throw new ValueError(`Either the input data should have a defined shape, or ` +\n `${stepsName} shoud be specified.`);\n }\n return numSamples;\n }\n /**\n * Execute internal tensors of the model with input data feed.\n * @param inputs Input data feed. Must match the inputs of the model.\n * @param outputs Names of the output tensors to be fetched. Must match\n * names of the SymbolicTensors that belong to the graph.\n * @returns Fetched values for `outputs`.\n */\n execute(inputs, outputs) {\n if (Array.isArray(outputs) && outputs.length === 0) {\n throw new ValueError('`outputs` is an empty Array, which is not allowed.');\n }\n const outputsIsArray = Array.isArray(outputs);\n const outputNames = (outputsIsArray ? outputs : [outputs]);\n const outputSymbolicTensors = this.retrieveSymbolicTensors(outputNames);\n // Format the input into a FeedDict.\n const feedDict = new FeedDict();\n if (inputs instanceof Tensor) {\n inputs = [inputs];\n }\n if (Array.isArray(inputs)) {\n if (inputs.length !== this.inputs.length) {\n throw new ValueError(`The number of inputs provided (${inputs.length}) ` +\n `does not match the number of inputs of this model ` +\n `(${this.inputs.length}).`);\n }\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n }\n else {\n for (const input of this.inputs) {\n const tensorValue = inputs[input.name];\n if (tensorValue == null) {\n throw new ValueError(`No value is provided for the model's input ${input.name}`);\n }\n feedDict.add(input, tensorValue);\n }\n }\n // Run execution.\n const executeOutputs = execute(outputSymbolicTensors, feedDict);\n return outputsIsArray ? executeOutputs : executeOutputs[0];\n }\n /**\n * Retrieve the model's internal symbolic tensors from symbolic-tensor names.\n */\n retrieveSymbolicTensors(symbolicTensorNames) {\n const outputSymbolicTensors = pyListRepeat(null, symbolicTensorNames.length);\n let outputsRemaining = symbolicTensorNames.length;\n for (const layer of this.layers) {\n const layerOutputs = Array.isArray(layer.output) ? layer.output : [layer.output];\n const layerOutputNames = layerOutputs.map(output => output.name);\n for (let i = 0; i < symbolicTensorNames.length; ++i) {\n const index = layerOutputNames.indexOf(symbolicTensorNames[i]);\n if (index !== -1) {\n outputSymbolicTensors[i] = layerOutputs[index];\n outputsRemaining--;\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining > 0) {\n const remainingNames = [];\n outputSymbolicTensors.forEach((tensor, i) => {\n if (tensor == null) {\n remainingNames.push(symbolicTensorNames[i]);\n }\n });\n throw new ValueError(`Cannot find SymbolicTensors for output name(s): ` +\n `${JSON.stringify(remainingNames)}`);\n }\n return outputSymbolicTensors;\n }\n /**\n * Helper method to loop over some data in batches.\n *\n * Porting Note: Not using the functional approach in the Python equivalent\n * due to the imperative backend.\n * Porting Note: Does not support step mode currently.\n *\n * @param ins: input data\n * @param batchSize: integer batch size.\n * @param verbose: verbosity model\n * @returns: Predictions as `tf.Tensor` (if a single output) or an `Array` of\n * `tf.Tensor` (if multipe outputs).\n */\n predictLoop(ins, batchSize = 32, verbose = false) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins);\n if (verbose) {\n throw new NotImplementedError('Verbose predictLoop() is not implemented yet.');\n }\n // Sample-based predictions.\n // Porting Note: Tensor currently does not support sliced assignments as\n // in numpy, e.g., x[1:3] = y. Therefore we use concatenation while\n // iterating over the batches.\n const batches = makeBatches(numSamples, batchSize);\n const outsBatches = this.outputs.map(output => []);\n // TODO(cais): Can the scope() be pushed down inside the for loop?\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchOuts = tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n // TODO(cais): Take care of the case of the last element is a flag for\n // training/test.\n const insBatch = sliceArrays(ins, batchStart, batchEnd);\n // Construct the feeds for execute();\n const feeds = [];\n if (Array.isArray(insBatch)) {\n for (let i = 0; i < insBatch.length; ++i) {\n feeds.push({ key: this.inputs[i], value: insBatch[i] });\n }\n }\n else {\n feeds.push({ key: this.inputs[0], value: insBatch });\n }\n const feedDict = new FeedDict(feeds);\n return execute(this.outputs, feedDict);\n });\n batchOuts.forEach((batchOut, i) => outsBatches[i].push(batchOut));\n }\n return singletonOrArray(outsBatches.map(batches => tfc.concat(batches, 0)));\n });\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFlow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([8, 10]), {batchSize: 4}).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param args A `ModelPredictArgs` object containing optional fields.\n *\n * @return Prediction results as a `tf.Tensor`(s).\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n const xsRank2OrHigher = ensureTensorsRank2OrHigher(x);\n checkInputData(xsRank2OrHigher, this.inputNames, this.feedInputShapes, false);\n try {\n // TODO(cais): Take care of stateful models.\n // if (this.stateful) ...\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n return this.predictLoop(xsRank2OrHigher, batchSize);\n }\n finally {\n disposeNewTensors(xsRank2OrHigher, x);\n }\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predictOnBatch(tf.ones([8, 10])).print();\n * ```\n * @param x: Input samples, as a Tensor (for models with exactly one\n * input) or an array of Tensors (for models with more than one input).\n * @return Tensor(s) of predictions\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predictOnBatch(x) {\n checkInputData(x, this.inputNames, this.feedInputShapes, true);\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = (Array.isArray(x) ? x[0] : x).shape[0];\n return this.predictLoop(x, batchSize);\n }\n standardizeUserDataXY(x, y, checkBatchAxis = true, batchSize) {\n // TODO(cais): Add sampleWeight, classWeight\n if (this.optimizer_ == null) {\n throw new RuntimeError('You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileArgs).');\n }\n const outputShapes = [];\n for (let i = 0; i < this.feedOutputShapes.length; ++i) {\n const outputShape = this.feedOutputShapes[i];\n const lossFn = this.feedLossFns[i];\n if (lossFn === losses.sparseCategoricalCrossentropy) {\n outputShapes.push(outputShape.slice(0, outputShape.length - 1).concat([1]));\n }\n else {\n // Porting Note: Because of strong typing `lossFn` must be a function.\n outputShapes.push(outputShape);\n }\n }\n x = standardizeInputData(x, this.feedInputNames, this.feedInputShapes, false, 'input');\n y = standardizeInputData(y, this.feedOutputNames, outputShapes, false, 'target');\n // TODO(cais): Standardize sampleWeights & classWeights.\n checkArrayLengths(x, y, null);\n // TODO(cais): Check sampleWeights as well.\n checkLossAndTargetCompatibility(y, this.feedLossFns, this.feedOutputShapes);\n if (this.stateful && batchSize != null && batchSize > 0) {\n if (x[0].shape[0] % batchSize !== 0) {\n throw new ValueError(`In a stateful network, you should only pass inputs with a ` +\n `number of samples that is divisible by the batch size ` +\n `${batchSize}. Found: ${x[0].shape[0]} sample(s).`);\n }\n }\n return [x, y];\n }\n async standardizeUserData(x, y, sampleWeight, classWeight, checkBatchAxis = true, batchSize) {\n const [standardXs, standardYs] = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n // TODO(cais): Handle sampleWeights.\n if (sampleWeight != null) {\n throw new Error('sample weight is not supported yet.');\n }\n let standardSampleWeights = null;\n if (classWeight != null) {\n const classWeights = standardizeClassWeights(classWeight, this.outputNames);\n standardSampleWeights = [];\n for (let i = 0; i < classWeights.length; ++i) {\n standardSampleWeights.push(await standardizeWeights(standardYs[i], null, classWeights[i]));\n }\n }\n // TODO(cais): Deal with the case of model.stateful == true.\n return [standardXs, standardYs, standardSampleWeights];\n }\n /**\n * Loop over some test data in batches.\n * @param f A Function returning a list of tensors.\n * @param ins Array of tensors to be fed to `f`.\n * @param batchSize Integer batch size or `null` / `undefined`.\n * @param verbose verbosity mode.\n * @param steps Total number of steps (batches of samples) before\n * declaring test finished. Ignored with the default value of `null` /\n * `undefined`.\n * @returns Array of Scalars.\n */\n testLoop(f, ins, batchSize, verbose = 0, steps) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins, batchSize, steps, 'steps');\n const outs = [];\n if (verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n // TODO(cais): Use `indicesForConversionToDense' to prevent slow down.\n if (steps != null) {\n throw new NotImplementedError('steps mode in testLoop() is not implemented yet');\n }\n else {\n const batches = makeBatches(numSamples, batchSize);\n const indexArray = tensor1d(range(0, numSamples));\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = K.sliceAlongFirstAxis(indexArray, batchStart, batchEnd - batchStart);\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const batchOuts = f(insBatch);\n if (batchIndex === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n outs[i] =\n tfc.add(outs[i], tfc.mul(batchEnd - batchStart, batchOut));\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n outs[i] = tfc.div(outs[i], numSamples);\n }\n }\n return outs;\n });\n }\n getDedupedMetricsNames() {\n const outLabels = this.metricsNames;\n // Rename duplicated metrics names (can happen with an output layer\n // shared among multiple dataflows).\n const dedupedOutLabels = [];\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n let newLabel = label;\n if (count(outLabels, label) > 1) {\n const dupIndex = count(outLabels.slice(0, i), label);\n newLabel += `_${dupIndex}`;\n }\n dedupedOutLabels.push(newLabel);\n }\n return dedupedOutLabels;\n }\n /**\n * Creates a function that performs the following actions:\n *\n * 1. computes the losses\n * 2. sums them to get the total loss\n * 3. call the optimizer computes the gradients of the LayersModel's\n * trainable weights w.r.t. the total loss and update the variables\n * 4. calculates the metrics\n * 5. returns the values of the losses and metrics.\n */\n makeTrainFunction() {\n return (data) => {\n const lossValues = [];\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const sampleWeights = data.slice(this.inputs.length + this.outputs.length, this.inputs.length + this.outputs.length * 2);\n const metricsValues = [];\n // Create a function that computes the total loss based on the\n // inputs. This function is used for obtaining gradients through\n // backprop.\n const totalLossFunction = () => {\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict, { 'training': true });\n // TODO(cais): Take care of the case of multiple outputs from a\n // single layer?\n let totalLoss;\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n let loss = lossFunction(targets[i], outputs[i]);\n if (sampleWeights[i] != null) {\n loss = computeWeightedLoss(loss, sampleWeights[i]);\n }\n // TODO(cais): push Scalar instead.\n const meanLoss = tfc.mean(loss);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n lossValues.push(meanLoss);\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n }\n // Compute the metrics.\n // TODO(cais): These should probably be calculated outside\n // totalLossFunction to benefit speed?\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n let weightedMetric;\n if (this.outputs.length > 1 && i < this.outputs.length) {\n weightedMetric = lossValues[i];\n }\n else {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n weightedMetric =\n tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n }\n tfc.keep(weightedMetric);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n metricsValues.push(weightedMetric);\n }\n totalLoss = tfc.mean(totalLoss);\n // Add regularizer penalties.\n this.calculateLosses().forEach(regularizerLoss => {\n totalLoss = tfc.add(totalLoss, regularizerLoss);\n });\n return totalLoss;\n };\n const variables = this.collectedTrainableWeights.map(param => param.read());\n const returnCost = true;\n const totalLossValue = this.optimizer_.minimize(totalLossFunction, returnCost, variables);\n return [totalLossValue].concat(metricsValues);\n };\n }\n /**\n * Create a function which, when invoked with an array of `tf.Tensor`s as a\n * batch of inputs, returns the prespecified loss and metrics of the model\n * under the batch of input data.\n */\n makeTestFunction() {\n this.testFunction = (data) => {\n return tfc.tidy(() => {\n const valOutputs = [];\n let totalLoss;\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict);\n // Compute total loss.\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n // TODO(cais): Add sample weighting and replace the simple\n // averaging.\n const loss = tfc.mean(lossFunction(targets[i], outputs[i]));\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n valOutputs.push(totalLoss);\n }\n // Compute the metrics.\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n // TODO(cais): Replace K.mean() with a proper weighting function.\n const meanMetric = tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n valOutputs.push(meanMetric);\n }\n return valOutputs;\n });\n };\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a\n * dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * for (let i = 1; i < 5 ; ++i) {\n * const h = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(\"Loss after Epoch \" + i + \" : \" + h.history.loss[0]);\n * }\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you\n * can also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named,\n * you can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input\n * data and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n return fitTensors(this, x, y, args);\n }\n // TODO(cais): Add code snippet below when it's possible to instantiate\n // actual dataset objects.\n /**\n * Trains the model using a dataset object.\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for training. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs.\n * Of the two items in the array, the first is the input feature(s) and\n * the second is the output target(s).\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fitDataset(dataset, args) {\n return fitDataset(this, dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n // TODO(cais): Support sampleWeight and classWeight.\n // TODO(cais): Support Dataset objects.\n const standardizeOut = await this.standardizeUserData(x, y);\n const inputs = standardizeOut[0];\n const targets = standardizeOut[1];\n const trainFunction = this.makeTrainFunction();\n const losses = trainFunction(inputs.concat(targets));\n const lossValues = [];\n for (const loss of losses) {\n const v = await loss.data();\n lossValues.push(v[0]);\n }\n tfc.dispose(losses);\n return singletonOrArray(lossValues);\n }\n /**\n * Extract weight values of the model.\n *\n * @param config: An instance of `io.SaveConfig`, which specifies\n * model-saving options such as whether only trainable weights are to be\n * saved.\n * @returns A `NamedTensorMap` mapping original weight names (i.e.,\n * non-uniqueified weight names) to their values.\n */\n getNamedWeights(config) {\n const namedWeights = [];\n const trainableOnly = config != null && config.trainableOnly;\n const weights = trainableOnly ? this.trainableWeights : this.weights;\n const weightValues = this.getWeights(trainableOnly);\n for (let i = 0; i < weights.length; ++i) {\n if (trainableOnly && !weights[i].trainable) {\n // Optionally skip non-trainable weights.\n continue;\n }\n namedWeights.push({ name: weights[i].originalName, tensor: weightValues[i] });\n }\n return namedWeights;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const input = tf.input({shape: [10]});\n * const output = tf.layers.dense({units: 1}).apply(input);\n * const model = tf.model({inputs: [input], outputs: [output]});\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10\n * values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n this.stopTraining_ = stop;\n }\n get stopTraining() {\n return this.stopTraining_;\n }\n get optimizer() {\n return this.optimizer_;\n }\n set optimizer(optimizer) {\n if (this.optimizer_ !== optimizer) {\n this.optimizer_ = optimizer;\n this.isOptimizerOwned = false;\n }\n }\n dispose() {\n const result = super.dispose();\n if (result.refCountAfterDispose === 0 && this.optimizer != null &&\n this.isOptimizerOwned) {\n const numTensorsBeforeOptmizerDisposal = tfc.memory().numTensors;\n this.optimizer_.dispose();\n result.numDisposedVariables +=\n numTensorsBeforeOptmizerDisposal - tfc.memory().numTensors;\n }\n return result;\n }\n getLossIdentifiers() {\n let lossNames;\n if (typeof this.loss === 'string') {\n lossNames = toSnakeCase(this.loss);\n }\n else if (Array.isArray(this.loss)) {\n for (const loss of this.loss) {\n if (typeof loss !== 'string') {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n lossNames = this.loss.map(name => toSnakeCase(name));\n }\n else {\n const outputNames = Object.keys(this.loss);\n lossNames = {};\n const losses = this.loss;\n for (const outputName of outputNames) {\n if (typeof losses[outputName] === 'string') {\n lossNames[outputName] =\n toSnakeCase(losses[outputName]);\n }\n else {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n }\n return lossNames;\n }\n getMetricIdentifiers() {\n if (typeof this.metrics === 'string' ||\n typeof this.metrics === 'function') {\n return [toSnakeCase(Metrics.getLossOrMetricName(this.metrics))];\n }\n else if (Array.isArray(this.metrics)) {\n return this.metrics.map(metric => toSnakeCase(Metrics.getLossOrMetricName(metric)));\n }\n else {\n const metricsIdentifiers = {};\n for (const key in this.metrics) {\n metricsIdentifiers[key] =\n toSnakeCase(Metrics.getLossOrMetricName(this.metrics[key]));\n }\n return metricsIdentifiers;\n }\n }\n getTrainingConfig() {\n return {\n loss: this.getLossIdentifiers(),\n metrics: this.getMetricIdentifiers(),\n optimizer_config: {\n class_name: this.optimizer.getClassName(),\n config: this.optimizer.getConfig()\n }\n };\n // TODO(cais): Add weight_metrics when they are supported.\n // TODO(cais): Add sample_weight_mode when it's supported.\n // TODO(cais): Add loss_weights when it's supported.\n }\n loadTrainingConfig(trainingConfig) {\n if (trainingConfig.weighted_metrics != null) {\n throw new Error('Loading weight_metrics is not supported yet.');\n }\n if (trainingConfig.loss_weights != null) {\n throw new Error('Loading loss_weights is not supported yet.');\n }\n if (trainingConfig.sample_weight_mode != null) {\n throw new Error('Loading sample_weight_mode is not supported yet.');\n }\n const tsConfig = convertPythonicToTs(trainingConfig.optimizer_config);\n const optimizer = deserialize(tsConfig);\n let loss;\n if (typeof trainingConfig.loss === 'string') {\n loss = toCamelCase(trainingConfig.loss);\n }\n else if (Array.isArray(trainingConfig.loss)) {\n loss = trainingConfig.loss.map(lossEntry => toCamelCase(lossEntry));\n }\n else if (trainingConfig.loss != null) {\n loss = {};\n for (const key in trainingConfig.loss) {\n loss[key] = toCamelCase(trainingConfig.loss[key]);\n }\n }\n let metrics;\n if (Array.isArray(trainingConfig.metrics)) {\n metrics = trainingConfig.metrics.map(metric => toCamelCase(metric));\n }\n else if (trainingConfig.metrics != null) {\n metrics = {};\n for (const key in trainingConfig.metrics) {\n metrics[key] = toCamelCase(trainingConfig.metrics[key]);\n }\n }\n this.compile({ loss, metrics, optimizer });\n }\n /**\n * Save the configuration and/or weights of the LayersModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights as two files\n * (`my-model-1.json` and `my-model-1.weights.bin`) downloaded from\n * browser.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('downloads://my-model-1');\n * ```\n *\n * Example 4. Send `model`'s topology and weights to an HTTP server.\n * See the documentation of `tf.io.http` for more details\n * including specifying request parameters and implementation of the\n * server.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('http://my-server/model/upload');\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new ValueError(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new ValueError('LayersModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n const weightDataAndSpecs = await io.encodeWeights(this.getNamedWeights(config));\n const returnString = false;\n const unusedArg = null;\n const modelConfig = this.toJSON(unusedArg, returnString);\n const modelArtifacts = {\n modelTopology: modelConfig,\n format: LAYERS_MODEL_FORMAT_NAME,\n generatedBy: `TensorFlow.js tfjs-layers v${version}`,\n convertedBy: null,\n };\n const includeOptimizer = config == null ? false : config.includeOptimizer;\n if (includeOptimizer && this.optimizer != null) {\n modelArtifacts.trainingConfig = this.getTrainingConfig();\n const weightType = 'optimizer';\n const { data: optimizerWeightData, specs: optimizerWeightSpecs } = await io.encodeWeights(await this.optimizer.getWeights(), weightType);\n weightDataAndSpecs.specs.push(...optimizerWeightSpecs);\n weightDataAndSpecs.data = io.concatenateArrayBuffers([weightDataAndSpecs.data, optimizerWeightData]);\n }\n if (this.userDefinedMetadata != null) {\n // Check serialized size of user-defined metadata.\n const checkSize = true;\n checkUserDefinedMetadata(this.userDefinedMetadata, this.name, checkSize);\n modelArtifacts.userDefinedMetadata = this.userDefinedMetadata;\n }\n modelArtifacts.weightData = weightDataAndSpecs.data;\n modelArtifacts.weightSpecs = weightDataAndSpecs.specs;\n return handlerOrURL.save(modelArtifacts);\n }\n /**\n * Set user-defined metadata.\n *\n * The set metadata will be serialized together with the topology\n * and weights of the model during `save()` calls.\n *\n * @param setUserDefinedMetadata\n */\n setUserDefinedMetadata(userDefinedMetadata) {\n checkUserDefinedMetadata(userDefinedMetadata, this.name);\n this.userDefinedMetadata = userDefinedMetadata;\n }\n /**\n * Get user-defined metadata.\n *\n * The metadata is supplied via one of the two routes:\n * 1. By calling `setUserDefinedMetadata()`.\n * 2. Loaded during model loading (if the model is constructed\n * via `tf.loadLayersModel()`.)\n *\n * If no user-defined metadata is available from either of the\n * two routes, this function will return `undefined`.\n */\n getUserDefinedMetadata() {\n return this.userDefinedMetadata;\n }\n}\n// The class name is 'Model' rather than 'LayersModel' for backwards\n// compatibility since this class name shows up in the serialization format.\n/** @nocollapse */\nLayersModel.className = 'Model';\nserialization.registerClass(LayersModel);\n/**\n * A `tf.Functional` is an alias to `tf.LayersModel`.\n *\n * See also:\n * `tf.LayersModel`, `tf.Sequential`, `tf.loadLayersModel`.\n */\n/** @doc {heading: 'Models', subheading: 'Classes'} */\nexport class Functional extends LayersModel {\n}\nFunctional.className = 'Functional';\nserialization.registerClass(Functional);\n//# sourceMappingURL=training.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source keras/models.py */\nimport { dispose, io, serialization, util } from '@tensorflow/tfjs-core';\nimport { getUid } from './backend/state';\nimport { Input } from './engine/input_layer';\nimport { getSourceInputs, Node } from './engine/topology';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError, RuntimeError, ValueError } from './errors';\nimport { deserialize } from './layers/serialization';\nimport * as generic_utils from './utils/generic_utils';\nimport { convertPythonicToTs } from './utils/serialization_utils';\nimport { getExactlyOneShape } from './utils/types_utils';\n/**\n * Parses a JSON model configuration file and returns a model instance.\n *\n * ```js\n * // This example shows how to serialize a model using `toJSON()` and\n * // deserialize it as another model using `tf.models.modelFromJSON()`.\n * // Note: this example serializes and deserializes only the topology\n * // of the model; the weights of the loaded model will be different\n * // from those of the the original model, due to random weight\n * // initialization.\n * // To load the topology and weights of a model, use `tf.loadLayersModel()`.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.repeatVector({inputShape: [2], n: 4}));\n * // Serialize `model1` as a JSON object.\n * const model1JSON = model1.toJSON(null, false);\n * model1.summary();\n *\n * const model2 = await tf.models.modelFromJSON(model1JSON);\n * model2.summary();\n * ```\n *\n * @param modelAndWeightsConfig JSON object or string encoding a model and\n * weights configuration. It can also be only the topology JSON of the\n * model, in which case the weights will not be loaded.\n * @param custom_objects Optional dictionary mapping names\n * (strings) to custom classes or functions to be\n * considered during deserialization.\n * @returns A TensorFlow.js Layers `tf.LayersModel` instance (uncompiled).\n */\nexport async function modelFromJSON(modelAndWeightsConfig, customObjects) {\n if (!('modelTopology' in modelAndWeightsConfig)) {\n modelAndWeightsConfig = { modelTopology: modelAndWeightsConfig };\n }\n modelAndWeightsConfig = modelAndWeightsConfig;\n let modelTopology = modelAndWeightsConfig.modelTopology;\n if (modelTopology['model_config'] != null) {\n // If the model-topology JSON contains a 'model_config' field, then it is\n // a full model JSON (e.g., from `keras.Model.save()`), which contains\n // not only the model's architecture in its 'model_config' field, but\n // additional information such as the model's optimizer. We use only the\n // 'model_config' field currently.\n modelTopology = modelTopology['model_config'];\n }\n const tsConfig = convertPythonicToTs(modelTopology);\n const model = deserialize(tsConfig, customObjects);\n if (modelAndWeightsConfig.weightsManifest != null) {\n // Load the weight values keyed by the original tensor names in the model\n // file that was loaded. These should match the keys of the weight\n // manifest.\n const weightValues = await io.loadWeights(modelAndWeightsConfig.weightsManifest, modelAndWeightsConfig.pathPrefix, model.weights.map(weight => weight.originalName));\n // Map the weights to the unique tensor names generated during model loading\n const uniqueWeightValues = {};\n for (const weight of model.weights) {\n uniqueWeightValues[weight.originalName] =\n weightValues[weight.originalName];\n }\n model.loadWeights(uniqueWeightValues);\n // Dispose temporary weight values.\n dispose(weightValues);\n }\n return model;\n}\n/**\n * Load a model, including its topology and optionally weights. See the\n * Tutorial named \"How to import a Keras Model\" for usage examples.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * Example 4. Load a model from an HTTP server.\n *\n * ```js\n * const model = await\n * tf.loadLayersModel('https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. This path will be\n * interpreted as a relative HTTP path, to which `fetch` will be used to\n * request the model topology and weight manifest JSON.\n * The content of the JSON file is assumed to be a JSON object with the\n * following fields and values:\n * - 'modelTopology': A JSON object that can be either of:\n * 1. a model architecture JSON consistent with the format of the return\n * value of `keras.Model.to_json()`\n * 2. a full model JSON in the format of `keras.models.save_model()`.\n * - 'weightsManifest': A TensorFlow.js weights manifest.\n * See the Python converter function `save_model()` for more details.\n * It is also assumed that model weights can be accessed from relative\n * paths described by the `paths` fields in weights manifest.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A progress callback of the form:\n * `(fraction: number) => void`. This callback can be used to monitor the\n * model-loading process.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n */\nexport async function loadLayersModelInternal(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n if (typeof pathOrIOHandler === 'string') {\n const handlers = io.getLoadHandlers(pathOrIOHandler, options);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n // TODO(cais): Reformat the args into a single `LoadOptions` once the core\n // is refactored.\n handlers.push(io.browserHTTPRequest(pathOrIOHandler, options));\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${pathOrIOHandler}'`);\n }\n pathOrIOHandler = handlers[0];\n }\n return loadLayersModelFromIOHandler(pathOrIOHandler, undefined, options);\n}\n/**\n * Load a model and optionally its weights, using an IOHandler object.\n *\n * @param handler The instance of `IOHandler` to be used during the model\n * loading.\n * @param customObjects Any optional custom objects to be used during model\n * loading.\n * @param strict Whether the weight loading will be done in strict mode.\n * Default: `true`.\n */\nexport async function loadLayersModelFromIOHandler(handler, customObjects, options) {\n if (options == null) {\n options = {};\n }\n if (handler.load == null) {\n throw new ValueError('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await handler.load();\n let modelTopology = artifacts.modelTopology;\n if (modelTopology['model_config'] != null) {\n modelTopology = modelTopology['model_config'];\n }\n const strict = options.strict == null ? true : options.strict;\n // If weights are provided and the weight-loading mode is strict, use\n // fast weight initialization. This skips costly initializers such as\n // 'orthogonal' and saves unnecessary computation in cases where\n // the initialized weight values will immediately be overwritten by\n // loaded weight values.\n const fastWeightInit = artifacts.weightData != null && artifacts.weightSpecs != null && strict;\n const model = deserialize(convertPythonicToTs(modelTopology), customObjects, fastWeightInit);\n const trainingConfig = artifacts.trainingConfig;\n if (trainingConfig != null) {\n model.loadTrainingConfig(trainingConfig);\n }\n if (artifacts.userDefinedMetadata != null) {\n model.setUserDefinedMetadata(artifacts.userDefinedMetadata);\n }\n // If weightData is present, load the weights into the model.\n if (artifacts.weightData != null) {\n // Loading weights requires weightSpecs.\n if (artifacts.weightSpecs == null) {\n throw new ValueError('LayersModel artifacts contains weight data, but not weight specs. ' +\n 'Therefore loading of weights cannot proceed.');\n }\n const { modelWeights, optimizerWeights } = decodeModelAndOptimizerWeights(artifacts.weightData, artifacts.weightSpecs);\n model.loadWeights(modelWeights, strict);\n if (model.optimizer != null && optimizerWeights.length > 0) {\n await model.optimizer.setWeights(optimizerWeights);\n }\n // Dispose temporary weight values.\n dispose(modelWeights);\n dispose(optimizerWeights.map(w => w.tensor));\n }\n return model;\n}\nfunction decodeModelAndOptimizerWeights(buffer, specs) {\n const name2Tensor = io.decodeWeights(buffer, specs);\n const modelWeights = {};\n const optimizerWeights = [];\n specs.forEach(spec => {\n if (spec.group === 'optimizer') {\n optimizerWeights.push({ name: spec.name, tensor: name2Tensor[spec.name] });\n }\n else {\n modelWeights[spec.name] = name2Tensor[spec.name];\n }\n });\n return { modelWeights, optimizerWeights };\n}\n/**\n * A model with a stack of layers, feeding linearly from one to the next.\n *\n * `tf.sequential` is a factory function that creates an instance of\n * `tf.Sequential`.\n *\n * ```js\n * // Define a model for linear regression.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [1]}));\n *\n * // Prepare the model for training: Specify the loss and the optimizer.\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n *\n * // Generate some synthetic data for training.\n * const xs = tf.tensor2d([1, 2, 3, 4], [4, 1]);\n * const ys = tf.tensor2d([1, 3, 5, 7], [4, 1]);\n *\n * // Train the model using the data then do inference on a data point the\n * // model hasn't seen:\n * await model.fit(xs, ys);\n * model.predict(tf.tensor2d([5], [1, 1])).print();\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class Sequential extends LayersModel {\n constructor(args) {\n super({ inputs: [], outputs: [] });\n args = args || {};\n this.trainable = true;\n this.built = false;\n // Set model name.\n this.name = (args.name != null) ? args.name : getUid('sequential_');\n // Add to the model any layers passed to the constructor.\n if (args.layers != null) {\n for (const layer of args.layers) {\n this.add(layer);\n }\n }\n }\n // Helper function to Sequential.add Throws if the new output shape will be\n // invalid.\n checkShape(layer) {\n const shape = layer.inboundNodes[0].outputTensors[0].shape;\n if (shape.some(x => x < 0)) {\n throw new ValueError('Negative dimension size caused by adding layer ' +\n `${layer.name} with input shape [` +\n `${layer.inboundNodes[0].inputTensors[0].shape}]`);\n }\n }\n /**\n * Adds a layer instance on top of the layer stack.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 8, inputShape: [1]}));\n * model.add(tf.layers.dense({units: 4, activation: 'relu6'}));\n * model.add(tf.layers.dense({units: 1, activation: 'relu6'}));\n * // Note that the untrained model is random at this point.\n * model.predict(tf.randomNormal([10, 1])).print();\n * ```\n * @param layer Layer instance.\n *\n * @exception ValueError In case the `layer` argument does not know its\n * input shape.\n * @exception ValueError In case the `layer` argument has multiple output\n * tensors, or is already connected somewhere else (forbidden in\n * `Sequential` models).\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n add(layer) {\n const isLayerModelInstance = layer instanceof Sequential || layer instanceof LayersModel;\n let modelLayer;\n if (isLayerModelInstance) {\n modelLayer = layer;\n if (modelLayer.outputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n if (modelLayer.inputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single input tensor. ' +\n 'For multi-input layers, ' +\n 'use the functional API.');\n }\n }\n if (this.outputs.length === 0) {\n // first layer in model: check that it is an input layer\n if (layer.inboundNodes.length === 0) {\n // create an input layer\n if (layer.batchInputShape == null) {\n throw new ValueError('The first layer in a Sequential model must ' +\n 'get an `inputShape` or `batchInputShape` argument.');\n }\n // Instantiate the input layer.\n const x = Input({\n batchShape: layer.batchInputShape,\n dtype: layer.dtype,\n name: layer.name + '_input'\n });\n // This will build the current layer and create the node connecting\n // the current layer to the input layer we just created.\n layer.apply(x);\n }\n if (isLayerModelInstance) {\n this.outputs = modelLayer.outputs;\n this.inputs = modelLayer.inputs;\n }\n else {\n if (layer.inboundNodes.length !== 1) {\n throw new ValueError('A layer added to a Sequential model must not already be ' +\n `connected somewhere else. LayersModel received layer ${layer.name} ` +\n `which has ${layer.inboundNodes.length} pre-existing inbound ` +\n 'connections.');\n }\n if (layer.inboundNodes[0].outputTensors.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [layer.inboundNodes[0].outputTensors[0]];\n this.inputs = getSourceInputs(this.outputs[0]);\n }\n this.inboundNodes = [];\n // We create an input node, which we will keep updated\n // as we add more layers.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n // no model-level masking for now\n inputMasks: generic_utils.pyListRepeat(null, this.inputs.length),\n outputMasks: [null],\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs[0].shape\n });\n }\n else {\n const outputTensor = layer.apply(this.outputs[0]);\n if (Array.isArray(outputTensor)) {\n throw new TypeError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [outputTensor];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n this.layers.push(layer);\n this.built = false;\n }\n /**\n * Removes the last layer in the model.\n *\n * @exception TypeError if there are no layers in the model.\n */\n pop() {\n if (this.layers.length === 0) {\n throw new TypeError('There are no layers in the model.');\n }\n this.layers.pop();\n if (this.layers.length === 0) {\n this.outputs = [];\n this.inboundNodes = [];\n this.outboundNodes = [];\n }\n else {\n const lastLayerIndex = this.layers.length - 1;\n this.layers[lastLayerIndex].outboundNodes = [];\n this.outputs = [this.layers[lastLayerIndex].output];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n }\n call(inputs, kwargs) {\n if (this.model == null) {\n this.build();\n }\n return this.model.call(inputs, kwargs);\n }\n build(inputShape) {\n // Call `getExactlyOneShape` without using its return value,\n // to verify that exactly one input shape is provided.\n getExactlyOneShape(inputShape);\n if (this.inputs.length === 0 || this.outputs.length === 0) {\n throw new TypeError('Sequential model cannot be built: model is empty.' +\n ' Add some layers first.');\n }\n // actually create the model\n this.model = new LayersModel({\n inputs: this.inputs,\n outputs: this.outputs[0],\n name: this.name + '_model'\n });\n this.model.trainable = this.trainable;\n // mirror model attributes\n this.supportsMasking = this.model.supportsMasking;\n // TODO(michaelterry): Add caches\n this.inputLayers = this.model.inputLayers;\n this.inputLayersNodeIndices = this.model.inputLayersNodeIndices;\n this.inputLayersTensorIndices = this.model.inputLayersTensorIndices;\n this.outputLayers = this.model.outputLayers;\n this.outputLayersNodeIndices = this.model.outputLayersNodeIndices;\n this.outputLayersTensorIndices = this.model.outputLayersTensorIndices;\n this.nodesByDepth = this.model.nodesByDepth;\n this.containerNodes = this.model.containerNodes;\n this.outputNames = this.model.outputNames;\n this.inputNames = this.model.inputNames;\n // TODO(michaelterry): Add feedInputNames, feedInputs, if needed.\n // TODO(michaelterry): Add callbackModel if needed.\n this.built = true;\n }\n countParams() {\n if (!this.built) {\n this.build();\n }\n return super.countParams();\n }\n /**\n * Print a text summary of the Sequential model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - The total number of trainable and non-trainable parameters of the\n * model.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 100, inputShape: [10], activation: 'relu'}));\n * model.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n *\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n this.build();\n }\n super.summary(lineLength, positions, printFn);\n }\n /**\n * Sets the weights of the model.\n *\n * @param weights Should be a list of Tensors with shapes and types matching\n * the output of `model.getWeights()`.\n */\n setWeights(weights) {\n if (this.model == null) {\n this.build();\n }\n this.model.setWeights(weights);\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * });\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateConfig`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluate(x, y, args);\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluateDataset(dataset, args);\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([2, 10])).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param conifg A `ModelPredictConfig` object containing optional fields.\n *\n * @return `tf.Tensor`(s) of predictions.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predict(x, args);\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * @param x: Input samples, as a Tensor, or list of Tensors (if the model\n * has multiple inputs).\n * @return Tensor(s) of predictions\n */\n predictOnBatch(x) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predictOnBatch(x);\n }\n /**\n * See `LayersModel.compile`.\n *\n * @param args\n */\n compile(args) {\n this.build();\n this.model.compile(args);\n this.optimizer_ = this.model.optimizer;\n // tslint:disable-next-line:no-any\n this.isOptimizerOwned = this.model.isOptimizerOwned;\n this.loss = this.model.loss;\n this.metrics = this.model.metrics;\n // TODO(cais): Add this.lossWeights, this.sampleWeightMode,\n // this.weightedMetrics, this.targets.\n this.metricsTensors = this.model.metricsTensors;\n this.metricsNames = this.model.metricsNames;\n // TODO(cais): Add sampleWeights.\n }\n get optimizer() {\n return this.model == null ? undefined : this.model.optimizer;\n }\n set optimizer(optimizer) {\n this.model.optimizer = optimizer;\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(history.history.loss[0]);\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you can\n * also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named, you\n * can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fit(x, y, args);\n }\n /**\n * Trains the model using a dataset object.\n *\n * ```js\n * const xArray = [\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * ];\n * const yArray = [1, 1, 1, 1];\n * // Create a dataset from the JavaScript array.\n * const xDataset = tf.data.array(xArray);\n * const yDataset = tf.data.array(yArray);\n * // Zip combines the `x` and `y` Datasets into a single Dataset, the\n * // iterator of which will return an object containing of two tensors,\n * // corresponding to `x` and `y`. The call to `batch(4)` will bundle\n * // four such samples into a single object, with the same keys now pointing\n * // to tensors that hold 4 examples, organized along the batch dimension.\n * // The call to `shuffle(4)` causes each iteration through the dataset to\n * // happen in a different order. The size of the shuffle window is 4.\n * const xyDataset = tf.data.zip({xs: xDataset, ys: yDataset})\n * .batch(4)\n * .shuffle(4);\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [9]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fitDataset(xyDataset, {\n * epochs: 4,\n * callbacks: {onEpochEnd: (epoch, logs) => console.log(logs.loss)}\n * });\n * ```\n *\n * @param dataset A dataset object. Its `iterator()` method is expected to\n * generate a dataset iterator object, the `next()` method of which is\n * expected to produce data batches for evaluation. The return value of the\n * `next()` call ought to contain a boolean `done` field and a `value`\n * field.\n *\n * The `value` field is expected to be an object of with fields\n * `xs` and `ys`, which point to the feature tensor and the target tensor,\n * respectively. This case is for models with exactly one input and one\n * output (e.g.. a sequential model). For example:\n * ```js\n * {value: {xs: xsTensor, ys: ysTensor}, done: false}\n * ```\n *\n * If the model has multiple inputs, the `xs` field of `value` should\n * be an object mapping input names to their respective feature tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: {\n * input_1: xsTensor1,\n * input_2: xsTensor2\n * },\n * ys: ysTensor\n * },\n * done: false\n * }\n * ```\n * If the model has multiple outputs, the `ys` field of `value` should\n * be an object mapping output names to their respective target tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: xsTensor,\n * ys: {\n * output_1: ysTensor1,\n * output_2: ysTensor2\n * },\n * },\n * done: false\n * }\n * ```\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async fitDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fitDataset(dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n return this.model.trainOnBatch(x, y);\n }\n /* See parent class for JsDoc */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n let configArray;\n let extraModelConfig = {};\n if (config instanceof Array) {\n if (!(config[0].className != null) ||\n config[0]['className'] === 'Merge') {\n throw new ValueError('Legacy serialization format not supported yet.');\n }\n configArray = config;\n }\n else {\n util.assert(config['layers'] != null, () => `When the config data for a Sequential model is not an Array, ` +\n `it must be an Object that contains the 'layers' field.`);\n configArray = config['layers'];\n delete config['layers'];\n extraModelConfig = config;\n }\n const model = new cls(extraModelConfig);\n if (!(model instanceof Sequential)) {\n throw new NotImplementedError(`Sequential.fromConfig called on non-Sequential input: ${model}`);\n }\n for (const conf of configArray) {\n const customObjects = undefined;\n const layer = deserialize(conf, customObjects, fastWeightInit);\n if (fastWeightInit) {\n layer.setFastWeightInitDuringBuild(true);\n }\n model.add(layer);\n }\n return model;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [10]}));\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10 values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n // TODO(cais): When refactoring to remove the composition pattern happens,\n // remove this method overriding.\n if (this.model == null) {\n throw new ValueError('Cannot set the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n this.model.stopTraining = stop;\n }\n get stopTraining() {\n if (this.model == null) {\n throw new ValueError('Cannot get the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n return this.model.stopTraining;\n }\n // TODO(cais): Override get trainableWeights() here\n // tslint:disable-next-line:no-any\n getConfig() {\n // NOTE(cais): We override the return type of getConfig() to `any` here,\n // because the `Sequential` class is a special case among `Container`\n // subtypes in that its getConfig() method returns an Array (not a\n // dict).\n const layers = [];\n for (const layer of this.layers) {\n const dict = {};\n dict['className'] = layer.getClassName();\n dict['config'] = layer.getConfig();\n layers.push(dict);\n }\n return { name: this.name, layers };\n }\n}\n/** @nocollapse */\nSequential.className = 'Sequential';\nserialization.registerClass(Sequential);\n//# sourceMappingURL=models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { CallbackConstructorRegistry } from './base_callbacks';\nimport { Input, } from './engine/input_layer';\nimport { LayersModel } from './engine/training';\nimport { loadLayersModelInternal, Sequential } from './models';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// LayersModel and related factory methods.\n/**\n * A model is a data structure that consists of `Layers` and defines inputs\n * and outputs.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.model` is more generic, supporting an arbitrary graph (without\n * cycles) of layers. `tf.sequential` is less generic and supports only a linear\n * stack of layers.\n *\n * When creating a `tf.LayersModel`, specify its input(s) and output(s). Layers\n * are used to wire input(s) to output(s).\n *\n * For example, the following code snippet defines a model consisting of\n * two `dense` layers, with 10 and 4 units, respectively.\n *\n * ```js\n * // Define input, which has a size of 5 (not including batch dimension).\n * const input = tf.input({shape: [5]});\n *\n * // First dense layer uses relu activation.\n * const denseLayer1 = tf.layers.dense({units: 10, activation: 'relu'});\n * // Second dense layer uses softmax activation.\n * const denseLayer2 = tf.layers.dense({units: 4, activation: 'softmax'});\n *\n * // Obtain the output symbolic tensor by applying the layers on the input.\n * const output = denseLayer2.apply(denseLayer1.apply(input));\n *\n * // Create the model based on the inputs.\n * const model = tf.model({inputs: input, outputs: output});\n *\n * // The model can be used for training, evaluation and prediction.\n * // For example, the following line runs prediction with the model on\n * // some fake data.\n * model.predict(tf.ones([2, 5])).print();\n * ```\n * See also:\n * `tf.sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function model(args) {\n return new LayersModel(args);\n}\n/**\n * Creates a `tf.Sequential` model. A sequential model is any model where the\n * outputs of one layer are the inputs to the next layer, i.e. the model\n * topology is a simple 'stack' of layers, with no branching or skipping.\n *\n * This means that the first layer passed to a `tf.Sequential` model should have\n * a defined input shape. What that means is that it should have received an\n * `inputShape` or `batchInputShape` argument, or for some type of layers\n * (recurrent, Dense...) an `inputDim` argument.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.sequential` is less generic, supporting only a linear stack of layers.\n * `tf.model` is more generic and supports an arbitrary graph (without\n * cycles) of layers.\n *\n * Examples:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have an input shape defined.\n * model.add(tf.layers.dense({units: 32, inputShape: [50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output, which equals\n * // `[null, 4]`. The 1st dimension is the undetermined batch dimension; the\n * // 2nd is the output size of the model's last layer.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * It is also possible to specify a batch size (with potentially undetermined\n * batch dimension, denoted by \"null\") for the first layer using the\n * `batchInputShape` key. The following example is equivalent to the above:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have a defined input shape\n * model.add(tf.layers.dense({units: 32, batchInputShape: [null, 50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * You can also use an `Array` of already-constructed `Layer`s to create\n * a `tf.Sequential` model:\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 32, inputShape: [50]}),\n * tf.layers.dense({units: 4})]\n * });\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function sequential(config) {\n return new Sequential(config);\n}\n/**\n * Load a model composed of Layer objects, including its topology and optionally\n * weights. See the Tutorial named \"How to import a Keras Model\" for usage\n * examples.\n *\n * This method is applicable to:\n *\n * 1. Models created with the `tf.layers.*`, `tf.sequential`, and\n * `tf.model` APIs of TensorFlow.js and later saved with the\n * `tf.LayersModel.save` method.\n * 2. Models converted from Keras or TensorFlow tf.keras using the\n * [tensorflowjs_converter](https://github.com/tensorflow/tfjs/tree/master/tfjs-converter).\n *\n * This mode is *not* applicable to TensorFlow `SavedModel`s or their converted\n * forms. For those models, use `tf.loadGraphModel`.\n *\n * Example 1. Load a model from an HTTP server.\n *\n * ```js\n * const model = await tf.loadLayersModel(\n * 'https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * Example 2: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 4. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. For file://\n * (tfjs-node-only), http:// and https:// schemas, the path can be\n * either absolute or relative.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A function of the signature `(fraction: number) => void',\n * that can be used as the progress callback for the model loading.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport function loadLayersModel(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n return loadLayersModelInternal(pathOrIOHandler, options);\n}\n/**\n * Used to instantiate an input to a model as a `tf.SymbolicTensor`.\n *\n * Users should call the `input` factory function for\n * consistency with other generator functions.\n *\n * Example:\n *\n * ```js\n * // Defines a simple logistic regression model with 32 dimensional input\n * // and 3 dimensional output.\n * const x = tf.input({shape: [32]});\n * const y = tf.layers.dense({units: 3, activation: 'softmax'}).apply(x);\n * const model = tf.model({inputs: x, outputs: y});\n * model.predict(tf.ones([2, 32])).print();\n * ```\n *\n * Note: `input` is only necessary when using `model`. When using\n * `sequential`, specify `inputShape` for the first layer or use `inputLayer`\n * as the first layer.\n *\n * @doc {heading: 'Models', subheading: 'Inputs'}\n */\nexport function input(config) {\n return Input(config);\n}\nexport function registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n CallbackConstructorRegistry.registerCallbackConstructor(verbosityLevel, callbackConstructor);\n}\n//# sourceMappingURL=exports.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// Layer activation functions\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject } from './utils/generic_utils';\n/**\n * Base class for Activations.\n *\n * Special note: due to cross-language compatibility reasons, the\n * static readonly className field in this family of classes must be set to\n * the initialLowerCamelCase name of the activation.\n */\nexport class Activation extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\n/**\n * Exponential linear unit (ELU).\n * Reference: https://arxiv.org/abs/1511.07289\n */\nexport class Elu extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x: Input.\n * @param alpha: Scaling factor the negative section.\n * @return Output of the ELU activation.\n */\n apply(x, alpha = 1) {\n return K.elu(x, alpha);\n }\n}\n/** @nocollapse */\nElu.className = 'elu';\nserialization.registerClass(Elu);\n/**\n * Scaled Exponential Linear Unit. (Klambauer et al., 2017).\n * Reference: Self-Normalizing Neural Networks, https://arxiv.org/abs/1706.02515\n * Notes:\n * - To be used together with the initialization \"lecunNormal\".\n * - To be used together with the dropout variant \"AlphaDropout\".\n */\nexport class Selu extends Activation {\n apply(x) {\n return tfc.selu(x);\n }\n}\n/** @nocollapse */\nSelu.className = 'selu';\nserialization.registerClass(Selu);\n/**\n * Rectified linear unit\n */\nexport class Relu extends Activation {\n apply(x) {\n return tfc.relu(x);\n }\n}\n/** @nocollapse */\nRelu.className = 'relu';\nserialization.registerClass(Relu);\n/**\n * Rectified linear unit activation maxing out at 6.0.\n */\nexport class Relu6 extends Activation {\n apply(x) {\n return tidy(() => tfc.minimum(6.0, tfc.relu(x)));\n }\n}\n/** @nocollapse */\nRelu6.className = 'relu6';\nserialization.registerClass(Relu6);\n//* Linear activation (no-op) */\nexport class Linear extends Activation {\n apply(x) {\n return x;\n }\n}\n/** @nocollapse */\nLinear.className = 'linear';\nserialization.registerClass(Linear);\n/**\n * Sigmoid activation function.\n */\nexport class Sigmoid extends Activation {\n apply(x) {\n return tfc.sigmoid(x);\n }\n}\n/** @nocollapse */\nSigmoid.className = 'sigmoid';\nserialization.registerClass(Sigmoid);\n/**\n * Segment-wise linear approximation of sigmoid.\n */\nexport class HardSigmoid extends Activation {\n apply(x) {\n return K.hardSigmoid(x);\n }\n}\n/** @nocollapse */\nHardSigmoid.className = 'hardSigmoid';\nserialization.registerClass(HardSigmoid);\n/**\n * Softplus activation function.\n */\nexport class Softplus extends Activation {\n apply(x) {\n return tfc.softplus(x);\n }\n}\n/** @nocollapse */\nSoftplus.className = 'softplus';\nserialization.registerClass(Softplus);\n/**\n * Softsign activation function.\n */\nexport class Softsign extends Activation {\n apply(x) {\n return K.softsign(x);\n }\n}\n/** @nocollapse */\nSoftsign.className = 'softsign';\nserialization.registerClass(Softsign);\n/**\n * Hyperbolic tangent function.\n */\nexport class Tanh extends Activation {\n apply(x) {\n return tfc.tanh(x);\n }\n}\n/** @nocollapse */\nTanh.className = 'tanh';\nserialization.registerClass(Tanh);\n/**\n * Softmax activation function\n */\nexport class Softmax extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.softmax(x, axis);\n }\n}\n/** @nocollapse */\nSoftmax.className = 'softmax';\nserialization.registerClass(Softmax);\n/**\n * Log softmax activation function\n */\nexport class LogSoftmax extends Activation {\n /**\n * Calculate the activation function of log softmax:\n * log( exp(x_i) / sum(exp(x)) )\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.logSoftmax(x, axis);\n }\n}\n/** @nocollapse */\nLogSoftmax.className = 'logSoftmax';\nserialization.registerClass(LogSoftmax);\n/**\n * Swish activation function\n */\nexport class Swish extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param alpha Scaling factor for the sigmoid function.\n * @returns a Tensor of the same shape as x\n */\n apply(x, alpha = 1) {\n return tidy(() => tfc.sigmoid(x.mul(alpha)).mul(x));\n }\n}\n/** @nocollapse */\nSwish.className = 'swish';\nserialization.registerClass(Swish);\nexport function serializeActivation(activation) {\n return activation.getClassName();\n}\nexport function deserializeActivation(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'activation');\n}\nexport function getActivation(identifier) {\n if (identifier == null) {\n const config = {};\n config['className'] = 'linear';\n config['config'] = {};\n return deserializeActivation(config);\n }\n if (typeof identifier === 'string') {\n const config = {};\n config['className'] = identifier;\n config['config'] = {};\n return deserializeActivation(config);\n }\n else if (identifier instanceof Activation) {\n return identifier;\n }\n else {\n return deserializeActivation(identifier);\n }\n}\n//# sourceMappingURL=activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* original source: keras/regularizers.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { abs, add, serialization, sum, tidy, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nfunction assertObjectArgs(args) {\n if (args != null && typeof args !== 'object') {\n throw new Error(`Argument to L1L2 regularizer's constructor is expected to be an ` +\n `object, but received: ${args}`);\n }\n}\n/**\n * Regularizer base class.\n */\nexport class Regularizer extends serialization.Serializable {\n}\nexport class L1L2 extends Regularizer {\n constructor(args) {\n super();\n assertObjectArgs(args);\n this.l1 = args == null || args.l1 == null ? 0.01 : args.l1;\n this.l2 = args == null || args.l2 == null ? 0.01 : args.l2;\n this.hasL1 = this.l1 !== 0;\n this.hasL2 = this.l2 !== 0;\n }\n /**\n * Porting note: Renamed from __call__.\n * @param x Variable of which to calculate the regularization score.\n */\n apply(x) {\n return tidy(() => {\n let regularization = zeros([1]);\n if (this.hasL1) {\n regularization = add(regularization, sum(tfc.mul(this.l1, abs(x))));\n }\n if (this.hasL2) {\n regularization =\n add(regularization, sum(tfc.mul(this.l2, K.square(x))));\n }\n return regularization.asScalar();\n });\n }\n getConfig() {\n return { 'l1': this.l1, 'l2': this.l2 };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls({ l1: config['l1'], l2: config['l2'] });\n }\n}\n/** @nocollapse */\nL1L2.className = 'L1L2';\nserialization.registerClass(L1L2);\nexport function l1(args) {\n assertObjectArgs(args);\n return new L1L2({ l1: args != null ? args.l1 : null, l2: 0 });\n}\nexport function l2(args) {\n assertObjectArgs(args);\n return new L1L2({ l2: args != null ? args.l2 : null, l1: 0 });\n}\n// Maps the JavaScript-like identifier keys to the corresponding keras symbols.\nexport const REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'l1l2': 'L1L2'\n};\nexport function serializeRegularizer(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeRegularizer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'regularizer');\n}\nexport function getRegularizer(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeRegularizer(config);\n }\n else if (identifier instanceof Regularizer) {\n return identifier;\n }\n else {\n return deserializeRegularizer(identifier);\n }\n}\n//# sourceMappingURL=regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Advanced activation layers.\n */\nimport { clipByValue, elu, leakyRelu, prelu, relu, serialization } from '@tensorflow/tfjs-core';\nimport { Softmax as softmaxActivation } from '../activations';\nimport { cast } from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class ReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maxValue = args.maxValue;\n }\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n let output = relu(inputs);\n if (this.maxValue != null) {\n output = clipByValue(output, 0, this.maxValue);\n }\n return output;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { maxValue: this.maxValue };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReLU.className = 'ReLU';\nserialization.registerClass(ReLU);\nexport class LeakyReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 0.3;\n if (args == null) {\n args = {};\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return leakyRelu(x, this.alpha);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLeakyReLU.className = 'LeakyReLU';\nserialization.registerClass(LeakyReLU);\nexport class PReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA_INITIALIZER = 'zeros';\n if (args == null) {\n args = {};\n }\n this.supportsMasking = true;\n this.alphaInitializer =\n getInitializer(args.alphaInitializer || this.DEFAULT_ALPHA_INITIALIZER);\n this.alphaRegularizer = getRegularizer(args.alphaRegularizer);\n this.alphaConstraint = getConstraint(args.alphaConstraint);\n if (args.sharedAxes == null) {\n this.sharedAxes = null;\n }\n else if (Array.isArray(args.sharedAxes)) {\n this.sharedAxes = args.sharedAxes;\n }\n else if (typeof args.sharedAxes === 'number') {\n this.sharedAxes = [args.sharedAxes];\n }\n else {\n throw new ValueError(`Expected sharedAxes to be a number or an array of numbers, ` +\n `but got ${args.sharedAxes}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const paramShape = inputShape.slice(1);\n if (this.sharedAxes != null) {\n for (const i of this.sharedAxes) {\n paramShape[i - 1] = 1;\n }\n }\n this.alpha = this.addWeight('alpha', paramShape, 'float32', this.alphaInitializer, this.alphaRegularizer, true, this.alphaConstraint);\n // Set input spec.\n const axes = {};\n if (this.sharedAxes != null) {\n for (let i = 1; i < inputShape.length; ++i) {\n axes[i] = inputShape[i];\n }\n }\n this.inputSpec = [new InputSpec({\n ndim: inputShape.length,\n axes,\n })];\n this.built = true;\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n return prelu(inputs, this.alpha.read());\n }\n getConfig() {\n const config = {\n alphaInitializer: serializeInitializer(this.alphaInitializer),\n alphaRegularizer: serializeRegularizer(this.alphaRegularizer),\n alphaConstraint: serializeConstraint(this.alphaConstraint),\n sharedAxes: this.sharedAxes\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPReLU.className = 'PReLU';\nserialization.registerClass(PReLU);\nexport class ELU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 1.0;\n if (args == null) {\n args = {};\n }\n if (args.alpha != null && args.alpha !== this.DEFAULT_ALPHA) {\n throw new NotImplementedError(`Non-default alpha value (${args.alpha}) is not supported by the ` +\n `ELU layer yet.`);\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return elu(x);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nELU.className = 'ELU';\nserialization.registerClass(ELU);\nexport class ThresholdedReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_THETA = 1.0;\n if (args == null) {\n args = {};\n }\n this.theta = args.theta == null ? this.DEFAULT_THETA : args.theta;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return x.mul(cast(x.greater(this.theta), 'float32'));\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { theta: this.theta };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nThresholdedReLU.className = 'ThresholdedReLU';\nserialization.registerClass(ThresholdedReLU);\nexport class Softmax extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_AXIS = 1.0;\n if (args == null) {\n args = {};\n }\n this.softmax = new softmaxActivation().apply;\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return this.softmax(x, this.axis);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { axis: this.axis };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nSoftmax.className = 'Softmax';\nserialization.registerClass(Softmax);\n//# sourceMappingURL=advanced_activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\nimport { pyListRepeat } from './generic_utils';\nimport { isInteger, max } from './math_utils';\n/**\n * Transforms a single number of array of numbers into an array of numbers.\n * @param value\n * @param n: The size of the tuple to be returned.\n * @param name: Name of the parameter, used for generating error messages.\n * @returns An array of numbers.\n */\nexport function normalizeArray(value, n, name) {\n if (typeof value === 'number') {\n return pyListRepeat(value, n);\n }\n else {\n if (value.length !== n) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n} integers.` +\n ` Received: ${value.length} elements.`);\n }\n for (let i = 0; i < n; ++i) {\n const singleValue = value[i];\n if (!isInteger(singleValue)) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n}` +\n ` integers. Received: ${JSON.stringify(value)} including a` +\n ` non-integer number ${singleValue}`);\n }\n }\n return value;\n }\n}\n/**\n * Determines output length of a convolution given input length.\n * @param inputLength\n * @param filterSize\n * @param padding\n * @param stride\n * @param dilation: dilation rate.\n */\nexport function convOutputLength(inputLength, filterSize, padding, stride, dilation = 1) {\n if (inputLength == null) {\n return inputLength;\n }\n const dilatedFilterSize = filterSize + (filterSize - 1) * (dilation - 1);\n let outputLength;\n if (padding === 'same') {\n outputLength = inputLength;\n }\n else { // VALID\n outputLength = inputLength - dilatedFilterSize + 1;\n }\n return Math.floor((outputLength + stride - 1) / stride);\n}\nexport function deconvLength(dimSize, strideSize, kernelSize, padding) {\n if (dimSize == null) {\n return null;\n }\n if (padding === 'valid') {\n dimSize = dimSize * strideSize + max([kernelSize - strideSize, 0]);\n }\n else if (padding === 'same') {\n dimSize = dimSize * strideSize;\n }\n else {\n throw new ValueError(`Unsupport padding mode: ${padding}.`);\n }\n return dimSize;\n}\n//# sourceMappingURL=conv_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength, deconvLength, normalizeArray } from '../utils/conv_utils';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Transpose and cast the input before the conv2d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv2DInput(x, dataFormat) {\n // TODO(cais): Cast type to float32 if not.\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * Transpose and cast the input before the conv3d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv3DInput(x, dataFormat) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 4, 1]); // NCDHW -> NDHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * 1D-convolution with bias added.\n *\n * Porting Note: This function does not exist in the Python Keras backend.\n * It is exactly the same as `conv2d`, except the added `bias`.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.\n * @param bias Bias, rank-3, of shape `[outDepth]`.\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1dWithBias(x, kernel, bias, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n // Check the ranks of x, kernel and bias.\n if (x.shape.length !== 3) {\n throw new ValueError(`The input of a conv1dWithBias operation should be 3, but is ` +\n `${x.shape.length} instead.`);\n }\n if (kernel.shape.length !== 3) {\n throw new ValueError(`The kernel for a conv1dWithBias operation should be 3, but is ` +\n `${kernel.shape.length} instead`);\n }\n if (bias != null && bias.shape.length !== 1) {\n throw new ValueError(`The bias for a conv1dWithBias operation should be 1, but is ` +\n `${kernel.shape.length} instead`);\n }\n // TODO(cais): Support CAUSAL padding mode.\n if (dataFormat === 'channelsFirst') {\n x = tfc.transpose(x, [0, 2, 1]); // NCW -> NWC.\n }\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n let y = tfc.conv1d(x, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n return y;\n });\n}\n/**\n * 1D-convolution.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.s\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1d(x, kernel, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv1dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 2D pooling.\n */\nexport function conv2d(x, kernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv2dWithBiasActivation(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution with an added bias and optional activation.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv2d`, except the added `bias`.\n */\nexport function conv2dWithBiasActivation(x, kernel, bias, strides = [1, 1], padding = 'valid', dataFormat, dilationRate, activation = null) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 3 && x.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects input to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n if (kernel.rank !== 3 && kernel.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects kernel to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n let y = preprocessConv2DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.fused.conv2d({\n x: y,\n filter: kernel,\n strides: strides,\n pad: padding === 'same' ? 'same' : 'valid',\n dilations: dilationRate,\n dataFormat: 'NHWC',\n bias,\n activation\n });\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\n/**\n * 3D Convolution.\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 3D convolution.\n */\nexport function conv3d(x, kernel, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv3dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 3D Convolution with an added bias.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv3d`, except the added `bias`.\n */\nexport function conv3dWithBias(x, kernel, bias, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 4 && x.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects input to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n if (kernel.rank !== 4 && kernel.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects kernel to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n let y = preprocessConv3DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv3dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.conv3d(y, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NDHWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]);\n }\n return y;\n });\n}\n/**\n * Abstract convolution layer.\n */\nexport class BaseConv extends Layer {\n constructor(rank, args) {\n super(args);\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n BaseConv.verifyArgs(args);\n this.rank = rank;\n generic_utils.assertPositiveInteger(this.rank, 'rank');\n if (this.rank !== 1 && this.rank !== 2 && this.rank !== 3) {\n throw new NotImplementedError(`Convolution layer for rank other than 1, 2, or 3 (${this.rank}) is ` +\n `not implemented yet.`);\n }\n this.kernelSize = normalizeArray(args.kernelSize, rank, 'kernelSize');\n this.strides = normalizeArray(args.strides == null ? 1 : args.strides, rank, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.activation = getActivation(args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.dilationRate = normalizeArray(args.dilationRate == null ? 1 : args.dilationRate, rank, 'dilationRate');\n if (this.rank === 1 &&\n (Array.isArray(this.dilationRate) && this.dilationRate.length !== 1)) {\n throw new ValueError(`dilationRate must be a number or an array of a single number ` +\n `for 1D convolution, but received ` +\n `${JSON.stringify(this.dilationRate)}`);\n }\n else if (this.rank === 2) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate = [this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 2) {\n throw new ValueError(`dilationRate must be a number or array of two numbers for 2D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n else if (this.rank === 3) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate =\n [this.dilationRate, this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 3) {\n throw new ValueError(`dilationRate must be a number or array of three numbers for 3D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n }\n static verifyArgs(args) {\n // Check config.kernelSize type and shape.\n generic_utils.assert('kernelSize' in args, `required key 'kernelSize' not in config`);\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 3)) {\n throw new ValueError(`BaseConv expects config.kernelSize to be number or number[] with ` +\n `length 1, 2, or 3, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n getConfig() {\n const config = {\n kernelSize: this.kernelSize,\n strides: this.strides,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n biasInitializer: serializeInitializer(this.biasInitializer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/**\n * Abstract nD convolution layer. Ancestor of convolution layers which reduce\n * across channels, i.e., Conv1D and Conv2D, but not DepthwiseConv2D.\n */\nexport class Conv extends BaseConv {\n constructor(rank, args) {\n super(rank, args);\n this.kernel = null;\n Conv.verifyArgs(args);\n this.filters = args.filters;\n generic_utils.assertPositiveInteger(this.filters, 'filters');\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([inputDim, this.filters]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.inputSpec = [{ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } }];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs;\n const biasValue = this.bias == null ? null : this.bias.read();\n const fusedActivationName = generic_utils.mapActivationToFusedKernel(this.activation.getClassName());\n if (fusedActivationName != null && this.rank === 2) {\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate, fusedActivationName);\n }\n else {\n if (this.rank === 1) {\n outputs = conv1dWithBias(inputs, this.kernel.read(), biasValue, this.strides[0], this.padding, this.dataFormat, this.dilationRate[0]);\n }\n else if (this.rank === 2) {\n // TODO(cais): Move up to constructor.\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else if (this.rank === 3) {\n outputs = conv3dWithBias(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else {\n throw new NotImplementedError('convolutions greater than 3D are not implemented yet.');\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const newSpace = [];\n const space = (this.dataFormat === 'channelsLast') ?\n inputShape.slice(1, inputShape.length - 1) :\n inputShape.slice(2);\n for (let i = 0; i < space.length; ++i) {\n const newDim = convOutputLength(space[i], this.kernelSize[i], this.padding, this.strides[i], typeof this.dilationRate === 'number' ? this.dilationRate :\n this.dilationRate[i]);\n newSpace.push(newDim);\n }\n let outputShape = [inputShape[0]];\n if (this.dataFormat === 'channelsLast') {\n outputShape = outputShape.concat(newSpace);\n outputShape.push(this.filters);\n }\n else {\n outputShape.push(this.filters);\n outputShape = outputShape.concat(newSpace);\n }\n return outputShape;\n }\n getConfig() {\n const config = {\n filters: this.filters,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n static verifyArgs(args) {\n // Check config.filters type, shape, and value.\n if (!('filters' in args) || typeof args.filters !== 'number' ||\n args.filters < 1) {\n throw new ValueError(`Convolution layer expected config.filters to be a 'number' > 0 ` +\n `but got ${JSON.stringify(args.filters)}`);\n }\n }\n}\nexport class Conv2D extends Conv {\n constructor(args) {\n super(2, args);\n Conv2D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if ((typeof args.kernelSize !== 'number') &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 2)) {\n throw new ValueError(`Conv2D expects config.kernelSize to be number or number[] with ` +\n `length 1 or 2, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv2D.className = 'Conv2D';\nserialization.registerClass(Conv2D);\nexport class Conv3D extends Conv {\n constructor(args) {\n super(3, args);\n Conv3D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number') {\n if (!(Array.isArray(args.kernelSize) &&\n (args.kernelSize.length === 1 || args.kernelSize.length === 3))) {\n throw new ValueError(`Conv3D expects config.kernelSize to be number or` +\n ` [number, number, number], but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n }\n}\n/** @nocollapse */\nConv3D.className = 'Conv3D';\nserialization.registerClass(Conv3D);\nexport class Conv2DTranspose extends Conv2D {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n if (this.padding !== 'same' && this.padding !== 'valid') {\n throw new ValueError(`Conv2DTranspose currently supports only padding modes 'same' ` +\n `and 'valid', but received padding mode ${this.padding}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length !== 4) {\n throw new ValueError('Input should have rank 4; Received input shape: ' +\n JSON.stringify(inputShape));\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError('The channel dimension of the inputs should be defined. ' +\n 'Found `None`.');\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([this.filters, inputDim]);\n this.kernel = this.addWeight('kernel', kernelShape, 'float32', this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n // Set input spec.\n this.inputSpec =\n [new InputSpec({ ndim: 4, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n if (input.shape.length !== 4) {\n throw new ValueError(`Conv2DTranspose.call() expects input tensor to be rank-4, but ` +\n `received a tensor of rank-${input.shape.length}`);\n }\n const inputShape = input.shape;\n const batchSize = inputShape[0];\n let hAxis;\n let wAxis;\n if (this.dataFormat === 'channelsFirst') {\n hAxis = 2;\n wAxis = 3;\n }\n else {\n hAxis = 1;\n wAxis = 2;\n }\n const height = inputShape[hAxis];\n const width = inputShape[wAxis];\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n // Infer the dynamic output shape.\n const outHeight = deconvLength(height, strideH, kernelH, this.padding);\n const outWidth = deconvLength(width, strideW, kernelW, this.padding);\n // Porting Note: We don't branch based on `this.dataFormat` here,\n // because\n // the tjfs-core function `conv2dTranspose` called below always\n // assumes channelsLast.\n const outputShape = [batchSize, outHeight, outWidth, this.filters];\n if (this.dataFormat !== 'channelsLast') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n }\n let outputs = tfc.conv2dTranspose(input, this.kernel.read(), outputShape, this.strides, this.padding);\n if (this.dataFormat !== 'channelsLast') {\n outputs = tfc.transpose(outputs, [0, 3, 1, 2]);\n }\n if (this.bias != null) {\n outputs =\n K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n let channelAxis;\n let heightAxis;\n let widthAxis;\n if (this.dataFormat === 'channelsFirst') {\n channelAxis = 1;\n heightAxis = 2;\n widthAxis = 3;\n }\n else {\n channelAxis = 3;\n heightAxis = 1;\n widthAxis = 2;\n }\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n outputShape[channelAxis] = this.filters;\n outputShape[heightAxis] =\n deconvLength(outputShape[heightAxis], strideH, kernelH, this.padding);\n outputShape[widthAxis] =\n deconvLength(outputShape[widthAxis], strideW, kernelW, this.padding);\n return outputShape;\n }\n getConfig() {\n const config = super.getConfig();\n delete config['dilationRate'];\n return config;\n }\n}\n/** @nocollapse */\nConv2DTranspose.className = 'Conv2DTranspose';\nserialization.registerClass(Conv2DTranspose);\nexport class SeparableConv extends Conv {\n constructor(rank, config) {\n super(rank, config);\n this.DEFAULT_DEPTHWISE_INITIALIZER = 'glorotUniform';\n this.DEFAULT_POINTWISE_INITIALIZER = 'glorotUniform';\n this.depthwiseKernel = null;\n this.pointwiseKernel = null;\n if (config.filters == null) {\n throw new ValueError('The `filters` configuration field is required by SeparableConv, ' +\n 'but is unspecified.');\n }\n if (config.kernelInitializer != null || config.kernelRegularizer != null ||\n config.kernelConstraint != null) {\n throw new ValueError('Fields kernelInitializer, kernelRegularizer and kernelConstraint ' +\n 'are invalid for SeparableConv2D. Use depthwiseInitializer, ' +\n 'depthwiseRegularizer, depthwiseConstraint, pointwiseInitializer, ' +\n 'pointwiseRegularizer and pointwiseConstraint instead.');\n }\n if (config.padding != null && config.padding !== 'same' &&\n config.padding !== 'valid') {\n throw new ValueError(`SeparableConv${this.rank}D supports only padding modes: ` +\n `'same' and 'valid', but received ${JSON.stringify(config.padding)}`);\n }\n this.depthMultiplier =\n config.depthMultiplier == null ? 1 : config.depthMultiplier;\n this.depthwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_DEPTHWISE_INITIALIZER);\n this.depthwiseRegularizer = getRegularizer(config.depthwiseRegularizer);\n this.depthwiseConstraint = getConstraint(config.depthwiseConstraint);\n this.pointwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_POINTWISE_INITIALIZER);\n this.pointwiseRegularizer = getRegularizer(config.pointwiseRegularizer);\n this.pointwiseConstraint = getConstraint(config.pointwiseConstraint);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < this.rank + 2) {\n throw new ValueError(`Inputs to SeparableConv${this.rank}D should have rank ` +\n `${this.rank + 2}, but received input shape: ` +\n `${JSON.stringify(inputShape)}`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError(`The channel dimension of the inputs should be defined, ` +\n `but found ${JSON.stringify(inputShape[channelAxis])}`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = this.kernelSize.concat([inputDim, this.depthMultiplier]);\n const pointwiseKernelShape = [];\n for (let i = 0; i < this.rank; ++i) {\n pointwiseKernelShape.push(1);\n }\n pointwiseKernelShape.push(inputDim * this.depthMultiplier, this.filters);\n const trainable = true;\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, 'float32', this.depthwiseInitializer, this.depthwiseRegularizer, trainable, this.depthwiseConstraint);\n this.pointwiseKernel = this.addWeight('pointwise_kernel', pointwiseKernelShape, 'float32', this.pointwiseInitializer, this.pointwiseRegularizer, trainable, this.pointwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, trainable, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.inputSpec =\n [new InputSpec({ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let output;\n if (this.rank === 1) {\n throw new NotImplementedError('1D separable convolution is not implemented yet.');\n }\n else if (this.rank === 2) {\n if (this.dataFormat === 'channelsFirst') {\n inputs = tfc.transpose(inputs, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n output = tfc.separableConv2d(inputs, this.depthwiseKernel.read(), this.pointwiseKernel.read(), this.strides, this.padding, this.dilationRate, 'NHWC');\n }\n if (this.useBias) {\n output = K.biasAdd(output, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n if (this.dataFormat === 'channelsFirst') {\n output = tfc.transpose(output, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return output;\n });\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['kernelInitializer'];\n delete config['kernelRegularizer'];\n delete config['kernelConstraint'];\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['pointwiseInitializer'] =\n serializeInitializer(this.pointwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['pointwiseRegularizer'] =\n serializeRegularizer(this.pointwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseConstraint);\n config['pointwiseConstraint'] =\n serializeConstraint(this.pointwiseConstraint);\n return config;\n }\n}\n/** @nocollapse */\nSeparableConv.className = 'SeparableConv';\nexport class SeparableConv2D extends SeparableConv {\n constructor(args) {\n super(2, args);\n }\n}\n/** @nocollapse */\nSeparableConv2D.className = 'SeparableConv2D';\nserialization.registerClass(SeparableConv2D);\nexport class Conv1D extends Conv {\n constructor(args) {\n super(1, args);\n Conv1D.verifyArgs(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['dataFormat'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 1)) {\n throw new ValueError(`Conv1D expects config.kernelSize to be number or number[] with ` +\n `length 1, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv1D.className = 'Conv1D';\nserialization.registerClass(Conv1D);\nexport class Cropping2D extends Layer {\n constructor(args) {\n super(args);\n if (typeof args.cropping === 'number') {\n this.cropping =\n [[args.cropping, args.cropping], [args.cropping, args.cropping]];\n }\n else if (typeof args.cropping[0] === 'number') {\n this.cropping = [\n [args.cropping[0], args.cropping[0]],\n [args.cropping[1], args.cropping[1]]\n ];\n }\n else {\n this.cropping = args.cropping;\n }\n this.dataFormat =\n args.dataFormat === undefined ? 'channelsLast' : args.dataFormat;\n this.inputSpec = [{ ndim: 4 }];\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n return [\n inputShape[0], inputShape[1],\n inputShape[2] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[3] - this.cropping[1][0] - this.cropping[1][1]\n ];\n }\n else {\n return [\n inputShape[0],\n inputShape[1] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[2] - this.cropping[1][0] - this.cropping[1][1], inputShape[3]\n ];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[1] - this.cropping[0][0] - this.cropping[0][1], 2);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[2] - this.cropping[1][1] - this.cropping[1][0], 3);\n }\n else {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[2] - this.cropping[0][0] - this.cropping[0][1], 3);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[3] - this.cropping[1][1] - this.cropping[1][0], 4);\n }\n });\n }\n getConfig() {\n const config = { cropping: this.cropping, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nCropping2D.className = 'Cropping2D';\nserialization.registerClass(Cropping2D);\nexport class UpSampling2D extends Layer {\n constructor(args) {\n super(args);\n this.DEFAULT_SIZE = [2, 2];\n this.inputSpec = [{ ndim: 4 }];\n this.size = args.size == null ? this.DEFAULT_SIZE : args.size;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n const height = inputShape[2] == null ? null : this.size[0] * inputShape[2];\n const width = inputShape[3] == null ? null : this.size[1] * inputShape[3];\n return [inputShape[0], inputShape[1], height, width];\n }\n else {\n const height = inputShape[1] == null ? null : this.size[0] * inputShape[1];\n const width = inputShape[2] == null ? null : this.size[1] * inputShape[2];\n return [inputShape[0], height, width, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n if (this.dataFormat === 'channelsFirst') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n const height = this.size[0] * inputShape[2];\n const width = this.size[1] * inputShape[3];\n const resized = input.resizeNearestNeighbor([height, width]);\n return tfc.transpose(resized, [0, 3, 1, 2]);\n }\n else {\n const height = this.size[0] * inputShape[1];\n const width = this.size[1] * inputShape[2];\n return input.resizeNearestNeighbor([height, width]);\n }\n });\n }\n getConfig() {\n const config = { size: this.size, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nUpSampling2D.className = 'UpSampling2D';\nserialization.registerClass(UpSampling2D);\n//# sourceMappingURL=convolutional.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Depthwise Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { BaseConv, preprocessConv2DInput } from './convolutional';\n/**\n * 2D convolution with separable filters.\n * @param x Input tensor.\n * @param depthwiseKernel Convolution kernel for depthwise convolution.\n * @param strides Strides (Array of two integers).\n * @param padding Padding model.\n * @param dataFormat Data format.\n * @param dilationRate Array of two integers, dilation rates for the separable\n * convolution.\n * @returns Output tensor.\n * @throws ValueError If depthwiseKernel is not a 4D array.\n */\nexport function depthwiseConv2d(x, depthwiseKernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n let y = preprocessConv2DInput(x, dataFormat);\n if (x.rank !== 4) {\n throw new ValueError(`Input for depthwiseConv2d is required to be 4-D, but is instead ` +\n `${x.rank}-D`);\n }\n if (depthwiseKernel.rank !== 4) {\n throw new ValueError(`depthwiseKernel is required to be 4-D, but is instead ` +\n `${depthwiseKernel.rank}-D`);\n }\n y = tfc.depthwiseConv2d(y, depthwiseKernel, strides, padding === 'same' ? 'same' : 'valid', 'NHWC', dilationRate);\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\nexport class DepthwiseConv2D extends BaseConv {\n constructor(args) {\n super(2, args);\n this.depthwiseKernel = null;\n this.depthMultiplier =\n args.depthMultiplier == null ? 1 : args.depthMultiplier;\n this.depthwiseInitializer = getInitializer(args.depthwiseInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.depthwiseConstraint = getConstraint(args.depthwiseConstraint);\n this.depthwiseRegularizer = getRegularizer(args.depthwiseRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 4) {\n throw new ValueError(`Inputs to DepthwiseConv2D should have rank 4. ` +\n `Received input shape: ${JSON.stringify(inputShape)}.`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : 3;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError('The channel dimension of the inputs to DepthwiseConv2D should ' +\n `be defined, but is not (${inputShape[channelAxis]}).`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = [\n this.kernelSize[0], this.kernelSize[1], inputDim, this.depthMultiplier\n ];\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, null, this.depthwiseInitializer, this.depthwiseRegularizer, true, this.depthwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [inputDim * this.depthMultiplier], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs = depthwiseConv2d(inputs, this.depthwiseKernel.read(), this.strides, this.padding, this.dataFormat, null);\n // TODO(cais): Add support for dilation.\n if (this.useBias) {\n outputs = K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n const cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n const outFilters = this.dataFormat === 'channelsFirst' ?\n inputShape[1] * this.depthMultiplier :\n inputShape[3] * this.depthMultiplier;\n const outRows = convOutputLength(rows, this.kernelSize[0], this.padding, this.strides[0]);\n const outCols = convOutputLength(cols, this.kernelSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], outFilters, outRows, outCols];\n }\n else {\n // In this case, assume 'channelsLast'.\n return [inputShape[0], outRows, outCols, outFilters];\n }\n }\n getConfig() {\n const config = super.getConfig();\n config['depthMultiplier'] = this.depthMultiplier;\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseRegularizer);\n return config;\n }\n}\n/** @nocollapse */\nDepthwiseConv2D.className = 'DepthwiseConv2D';\nserialization.registerClass(DepthwiseConv2D);\n//# sourceMappingURL=convolutional_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Recurrent Neural Network Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, SymbolicTensor } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, Initializer, Ones, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor, isArrayOfShapes } from '../utils/types_utils';\nimport { batchGetValue, batchSetValue } from '../variables';\nimport { deserialize } from './serialization';\n/**\n * Standardize `apply()` args to a single list of tensor inputs.\n *\n * When running a model loaded from file, the input tensors `initialState` and\n * `constants` are passed to `RNN.apply()` as part of `inputs` instead of the\n * dedicated kwargs fields. `inputs` consists of\n * `[inputs, initialState0, initialState1, ..., constant0, constant1]` in this\n * case.\n * This method makes sure that arguments are\n * separated and that `initialState` and `constants` are `Array`s of tensors\n * (or None).\n *\n * @param inputs Tensor or `Array` of tensors.\n * @param initialState Tensor or `Array` of tensors or `null`/`undefined`.\n * @param constants Tensor or `Array` of tensors or `null`/`undefined`.\n * @returns An object consisting of\n * inputs: A tensor.\n * initialState: `Array` of tensors or `null`.\n * constants: `Array` of tensors or `null`.\n * @throws ValueError, if `inputs` is an `Array` but either `initialState` or\n * `constants` is provided.\n */\nexport function standardizeArgs(inputs, initialState, constants, numConstants) {\n if (Array.isArray(inputs)) {\n if (initialState != null || constants != null) {\n throw new ValueError('When inputs is an array, neither initialState or constants ' +\n 'should be provided');\n }\n if (numConstants != null) {\n constants = inputs.slice(inputs.length - numConstants, inputs.length);\n inputs = inputs.slice(0, inputs.length - numConstants);\n }\n if (inputs.length > 1) {\n initialState = inputs.slice(1, inputs.length);\n }\n inputs = inputs[0];\n }\n function toListOrNull(x) {\n if (x == null || Array.isArray(x)) {\n return x;\n }\n else {\n return [x];\n }\n }\n initialState = toListOrNull(initialState);\n constants = toListOrNull(constants);\n return { inputs, initialState, constants };\n}\n/**\n * Iterates over the time dimension of a tensor.\n *\n * @param stepFunction RNN step function.\n * Parameters:\n * inputs: tensor with shape `[samples, ...]` (no time dimension),\n * representing input for the batch of samples at a certain time step.\n * states: an Array of tensors.\n * Returns:\n * outputs: tensor with shape `[samples, outputDim]` (no time dimension).\n * newStates: list of tensors, same length and shapes as `states`. The first\n * state in the list must be the output tensor at the previous timestep.\n * @param inputs Tensor of temporal data of shape `[samples, time, ...]` (at\n * least 3D).\n * @param initialStates Tensor with shape `[samples, outputDim]` (no time\n * dimension), containing the initial values of the states used in the step\n * function.\n * @param goBackwards If `true`, do the iteration over the time dimension in\n * reverse order and return the reversed sequence.\n * @param mask Binary tensor with shape `[sample, time, 1]`, with a zero for\n * every element that is masked.\n * @param constants An Array of constant values passed at each step.\n * @param unroll Whether to unroll the RNN or to use a symbolic loop. *Not*\n * applicable to this imperative deeplearn.js backend. Its value is ignored.\n * @param needPerStepOutputs Whether the per-step outputs are to be\n * concatenated into a single tensor and returned (as the second return\n * value). Default: `false`. This arg is included so that the relatively\n * expensive concatenation of the stepwise outputs can be omitted unless\n * the stepwise outputs need to be kept (e.g., for an LSTM layer of which\n * `returnSequence` is `true`.)\n * @returns An Array: `[lastOutput, outputs, newStates]`.\n * lastOutput: the lastest output of the RNN, of shape `[samples, ...]`.\n * outputs: tensor with shape `[samples, time, ...]` where each entry\n * `output[s, t]` is the output of the step function at time `t` for sample\n * `s`. This return value is provided if and only if the\n * `needPerStepOutputs` is set as `true`. If it is set as `false`, this\n * return value will be `undefined`.\n * newStates: Array of tensors, latest states returned by the step function,\n * of shape `(samples, ...)`.\n * @throws ValueError If input dimension is less than 3.\n *\n * TODO(nielsene): This needs to be tidy-ed.\n */\nexport function rnn(stepFunction, inputs, initialStates, goBackwards = false, mask, constants, unroll = false, needPerStepOutputs = false) {\n return tfc.tidy(() => {\n const ndim = inputs.shape.length;\n if (ndim < 3) {\n throw new ValueError(`Input should be at least 3D, but is ${ndim}D.`);\n }\n // Transpose to time-major, i.e., from [batch, time, ...] to [time, batch,\n // ...].\n const axes = [1, 0].concat(math_utils.range(2, ndim));\n inputs = tfc.transpose(inputs, axes);\n if (constants != null) {\n throw new NotImplementedError('The rnn() functoin of the deeplearn.js backend does not support ' +\n 'constants yet.');\n }\n // Porting Note: the unroll option is ignored by the imperative backend.\n if (unroll) {\n console.warn('Backend rnn(): the unroll = true option is not applicable to the ' +\n 'imperative deeplearn.js backend.');\n }\n if (mask != null) {\n mask = mask.asType('bool').asType('float32');\n if (mask.rank === ndim - 1) {\n mask = tfc.expandDims(mask, -1);\n }\n mask = tfc.transpose(mask, axes);\n }\n if (goBackwards) {\n inputs = tfc.reverse(inputs, 0);\n if (mask != null) {\n mask = tfc.reverse(mask, 0);\n }\n }\n // Porting Note: PyKeras with TensorFlow backend uses a symbolic loop\n // (tf.while_loop). But for the imperative deeplearn.js backend, we just\n // use the usual TypeScript control flow to iterate over the time steps in\n // the inputs.\n // Porting Note: PyKeras patches a \"_use_learning_phase\" attribute to\n // outputs.\n // This is not idiomatic in TypeScript. The info regarding whether we are\n // in a learning (i.e., training) phase for RNN is passed in a different\n // way.\n const perStepOutputs = [];\n let lastOutput;\n let states = initialStates;\n const timeSteps = inputs.shape[0];\n const perStepInputs = tfc.unstack(inputs);\n let perStepMasks;\n if (mask != null) {\n perStepMasks = tfc.unstack(mask);\n }\n for (let t = 0; t < timeSteps; ++t) {\n const currentInput = perStepInputs[t];\n const stepOutputs = tfc.tidy(() => stepFunction(currentInput, states));\n if (mask == null) {\n lastOutput = stepOutputs[0];\n states = stepOutputs[1];\n }\n else {\n const maskedOutputs = tfc.tidy(() => {\n const stepMask = perStepMasks[t];\n const negStepMask = tfc.onesLike(stepMask).sub(stepMask);\n // TODO(cais): Would tfc.where() be better for performance?\n const output = stepOutputs[0].mul(stepMask).add(states[0].mul(negStepMask));\n const newStates = states.map((state, i) => {\n return stepOutputs[1][i].mul(stepMask).add(state.mul(negStepMask));\n });\n return { output, newStates };\n });\n lastOutput = maskedOutputs.output;\n states = maskedOutputs.newStates;\n }\n if (needPerStepOutputs) {\n perStepOutputs.push(lastOutput);\n }\n }\n let outputs;\n if (needPerStepOutputs) {\n const axis = 1;\n outputs = tfc.stack(perStepOutputs, axis);\n }\n return [lastOutput, outputs, states];\n });\n}\nexport class RNN extends Layer {\n constructor(args) {\n super(args);\n let cell;\n if (args.cell == null) {\n throw new ValueError('cell property is missing for the constructor of RNN.');\n }\n else if (Array.isArray(args.cell)) {\n cell = new StackedRNNCells({ cells: args.cell });\n }\n else {\n cell = args.cell;\n }\n if (cell.stateSize == null) {\n throw new ValueError('The RNN cell should have an attribute `stateSize` (tuple of ' +\n 'integers, one integer per RNN state).');\n }\n this.cell = cell;\n this.returnSequences =\n args.returnSequences == null ? false : args.returnSequences;\n this.returnState = args.returnState == null ? false : args.returnState;\n this.goBackwards = args.goBackwards == null ? false : args.goBackwards;\n this._stateful = args.stateful == null ? false : args.stateful;\n this.unroll = args.unroll == null ? false : args.unroll;\n this.supportsMasking = true;\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n this.stateSpec = null;\n this.states_ = null;\n // TODO(cais): Add constantsSpec and numConstants.\n this.numConstants = null;\n // TODO(cais): Look into the use of initial_state in the kwargs of the\n // constructor.\n this.keptStates = [];\n }\n // Porting Note: This is the equivalent of `RNN.states` property getter in\n // PyKeras.\n getStates() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n return math_utils.range(0, numStates).map(x => null);\n }\n else {\n return this.states_;\n }\n }\n // Porting Note: This is the equivalent of the `RNN.states` property setter in\n // PyKeras.\n setStates(states) {\n this.states_ = states;\n }\n computeOutputShape(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n // TODO(cais): Remove the casting once stacked RNN cells become supported.\n let stateSize = this.cell.stateSize;\n if (!Array.isArray(stateSize)) {\n stateSize = [stateSize];\n }\n const outputDim = stateSize[0];\n let outputShape;\n if (this.returnSequences) {\n outputShape = [inputShape[0], inputShape[1], outputDim];\n }\n else {\n outputShape = [inputShape[0], outputDim];\n }\n if (this.returnState) {\n const stateShape = [];\n for (const dim of stateSize) {\n stateShape.push([inputShape[0], dim]);\n }\n return [outputShape].concat(stateShape);\n }\n else {\n return outputShape;\n }\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n const outputMask = this.returnSequences ? mask : null;\n if (this.returnState) {\n const stateMask = this.states.map(s => null);\n return [outputMask].concat(stateMask);\n }\n else {\n return outputMask;\n }\n });\n }\n /**\n * Get the current state tensors of the RNN.\n *\n * If the state hasn't been set, return an array of `null`s of the correct\n * length.\n */\n get states() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n const output = [];\n for (let i = 0; i < numStates; ++i) {\n output.push(null);\n }\n return output;\n }\n else {\n return this.states_;\n }\n }\n set states(s) {\n this.states_ = s;\n }\n build(inputShape) {\n // Note inputShape will be an Array of Shapes of initial states and\n // constants if these are passed in apply().\n const constantShape = null;\n if (this.numConstants != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n const batchSize = this.stateful ? inputShape[0] : null;\n const inputDim = inputShape.slice(2);\n this.inputSpec[0] = new InputSpec({ shape: [batchSize, null, ...inputDim] });\n // Allow cell (if RNNCell Layer) to build before we set or validate\n // stateSpec.\n const stepInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (constantShape != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n else {\n this.cell.build(stepInputShape);\n }\n // Set or validate stateSpec.\n let stateSize;\n if (Array.isArray(this.cell.stateSize)) {\n stateSize = this.cell.stateSize;\n }\n else {\n stateSize = [this.cell.stateSize];\n }\n if (this.stateSpec != null) {\n if (!util.arraysEqual(this.stateSpec.map(spec => spec.shape[spec.shape.length - 1]), stateSize)) {\n throw new ValueError(`An initialState was passed that is not compatible with ` +\n `cell.stateSize. Received stateSpec=${this.stateSpec}; ` +\n `However cell.stateSize is ${this.cell.stateSize}`);\n }\n }\n else {\n this.stateSpec =\n stateSize.map(dim => new InputSpec({ shape: [null, dim] }));\n }\n if (this.stateful) {\n this.resetStates();\n }\n }\n /**\n * Reset the state tensors of the RNN.\n *\n * If the `states` argument is `undefined` or `null`, will set the\n * state tensor(s) of the RNN to all-zero tensors of the appropriate\n * shape(s).\n *\n * If `states` is provided, will set the state tensors of the RNN to its\n * value.\n *\n * @param states Optional externally-provided initial states.\n * @param training Whether this call is done during training. For stateful\n * RNNs, this affects whether the old states are kept or discarded. In\n * particular, if `training` is `true`, the old states will be kept so\n * that subsequent backpropgataion through time (BPTT) may work properly.\n * Else, the old states will be discarded.\n */\n resetStates(states, training = false) {\n tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const batchSize = this.inputSpec[0].shape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.states_ == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_ = [tfc.zeros([batchSize, this.cell.stateSize])];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_[0] = tfc.zeros([batchSize, this.cell.stateSize]);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training === true) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const dim = Array.isArray(this.cell.stateSize) ?\n this.cell.stateSize[index] :\n this.cell.stateSize;\n const expectedShape = [batchSize, dim];\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n apply(inputs, kwargs) {\n // TODO(cais): Figure out whether initialState is in kwargs or inputs.\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n // If any of `initial_state` or `constants` are specified and are\n // `tf.SymbolicTensor`s, then add them to the inputs and temporarily modify\n // the input_spec to include them.\n let additionalInputs = [];\n let additionalSpecs = [];\n if (initialState != null) {\n kwargs['initialState'] = initialState;\n additionalInputs = additionalInputs.concat(initialState);\n this.stateSpec = [];\n for (const state of initialState) {\n this.stateSpec.push(new InputSpec({ shape: state.shape }));\n }\n // TODO(cais): Use the following instead.\n // this.stateSpec = initialState.map(state => new InputSpec({shape:\n // state.shape}));\n additionalSpecs = additionalSpecs.concat(this.stateSpec);\n }\n if (constants != null) {\n kwargs['constants'] = constants;\n additionalInputs = additionalInputs.concat(constants);\n // TODO(cais): Add this.constantsSpec.\n this.numConstants = constants.length;\n }\n const isTensor = additionalInputs[0] instanceof SymbolicTensor;\n if (isTensor) {\n // Compute full input spec, including state and constants.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call with temporarily replaced inputSpec.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n // tslint:disable-next-line:no-any\n call(inputs, kwargs) {\n // Input shape: `[samples, time (padded with zeros), input_dim]`.\n // Note that the .build() method of subclasses **must** define\n // this.inputSpec and this.stateSpec owith complete input shapes.\n return tidy(() => {\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n let initialState = kwargs == null ? null : kwargs['initialState'];\n inputs = getExactlyOneTensor(inputs);\n if (initialState == null) {\n if (this.stateful) {\n initialState = this.states_;\n }\n else {\n initialState = this.getInitialState(inputs);\n }\n }\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n if (initialState.length !== numStates) {\n throw new ValueError(`RNN Layer has ${numStates} state(s) but was passed ` +\n `${initialState.length} initial state(s).`);\n }\n if (this.unroll) {\n console.warn('Ignoring unroll = true for RNN layer, due to imperative backend.');\n }\n const cellCallKwargs = { training };\n // TODO(cais): Add support for constants.\n const step = (inputs, states) => {\n // `inputs` and `states` are concatenated to form a single `Array` of\n // `tf.Tensor`s as the input to `cell.call()`.\n const outputs = this.cell.call([inputs].concat(states), cellCallKwargs);\n // Marshall the return value into output and new states.\n return [outputs[0], outputs.slice(1)];\n };\n // TODO(cais): Add support for constants.\n const rnnOutputs = rnn(step, inputs, initialState, this.goBackwards, mask, null, this.unroll, this.returnSequences);\n const lastOutput = rnnOutputs[0];\n const outputs = rnnOutputs[1];\n const states = rnnOutputs[2];\n if (this.stateful) {\n this.resetStates(states, training);\n }\n const output = this.returnSequences ? outputs : lastOutput;\n // TODO(cais): Porperty set learning phase flag.\n if (this.returnState) {\n return [output].concat(states);\n }\n else {\n return output;\n }\n });\n }\n getInitialState(inputs) {\n return tidy(() => {\n // Build an all-zero tensor of shape [samples, outputDim].\n // [Samples, timeSteps, inputDim].\n let initialState = tfc.zeros(inputs.shape);\n // [Samples].\n initialState = tfc.sum(initialState, [1, 2]);\n initialState = K.expandDims(initialState); // [Samples, 1].\n if (Array.isArray(this.cell.stateSize)) {\n return this.cell.stateSize.map(dim => dim > 1 ? K.tile(initialState, [1, dim]) : initialState);\n }\n else {\n return this.cell.stateSize > 1 ?\n [K.tile(initialState, [1, this.cell.stateSize])] :\n [initialState];\n }\n });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n return this.cell.trainableWeights;\n }\n get nonTrainableWeights() {\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n if (!this.trainable) {\n return this.cell.weights;\n }\n return this.cell.nonTrainableWeights;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.cell != null) {\n this.cell.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n returnSequences: this.returnSequences,\n returnState: this.returnState,\n goBackwards: this.goBackwards,\n stateful: this.stateful,\n unroll: this.unroll,\n };\n if (this.numConstants != null) {\n config['numConstants'] = this.numConstants;\n }\n const cellConfig = this.cell.getConfig();\n if (this.getClassName() === RNN.className) {\n config['cell'] = {\n 'className': this.cell.getClassName(),\n 'config': cellConfig,\n };\n }\n // this order is necessary, to prevent cell name from replacing layer name\n return Object.assign({}, cellConfig, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cellConfig = config['cell'];\n const cell = deserialize(cellConfig, customObjects);\n return new cls(Object.assign(config, { cell }));\n }\n}\n/** @nocollapse */\nRNN.className = 'RNN';\nserialization.registerClass(RNN);\n// Porting Note: This is a common parent class for RNN cells. There is no\n// equivalent of this in PyKeras. Having a common parent class forgoes the\n// need for `has_attr(cell, ...)` checks or its TypeScript equivalent.\n/**\n * An RNNCell layer.\n *\n * @doc {heading: 'Layers', subheading: 'Classes'}\n */\nexport class RNNCell extends Layer {\n}\nexport class SimpleRNNCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, `units`);\n this.activation = getActivation(args.activation == null ? this.DEFAULT_ACTIVATION : args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n // TODO(cais): Use regularizer.\n this.kernel = this.addWeight('kernel', [inputShape[inputShape.length - 1], this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n // Porting Note: PyKeras' equivalent of this method takes two tensor inputs:\n // `inputs` and `states`. Here, the two tensors are combined into an\n // `Tensor[]` Array as the first input argument.\n // Similarly, PyKeras' equivalent of this method returns two values:\n // `output` and `[output]`. Here the two are combined into one length-2\n // `Tensor[]`, consisting of `output` repeated.\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`SimpleRNNCell expects 2 input Tensors, got ${inputs.length}.`);\n }\n let prevOutput = inputs[1];\n inputs = inputs[0];\n const training = kwargs['training'] == null ? false : kwargs['training'];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(prevOutput),\n rate: this.recurrentDropout,\n training\n });\n }\n let h;\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n if (dpMask != null) {\n h = K.dot(tfc.mul(inputs, dpMask), this.kernel.read());\n }\n else {\n h = K.dot(inputs, this.kernel.read());\n }\n if (this.bias != null) {\n h = K.biasAdd(h, this.bias.read());\n }\n if (recDpMask != null) {\n prevOutput = tfc.mul(prevOutput, recDpMask);\n }\n let output = tfc.add(h, K.dot(prevOutput, this.recurrentKernel.read()));\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n // TODO(cais): Properly set learning phase on output tensor?\n return [output, output];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nSimpleRNNCell.className = 'SimpleRNNCell';\nserialization.registerClass(SimpleRNNCell);\nexport class SimpleRNN extends RNN {\n constructor(args) {\n args.cell = new SimpleRNNCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nSimpleRNN.className = 'SimpleRNN';\nserialization.registerClass(SimpleRNN);\nexport class GRUCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.resetAfter) {\n throw new ValueError(`GRUCell does not support reset_after parameter set to true.`);\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 3], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 3], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units * 3], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`GRUCell expects 2 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] == null ? false : kwargs['training'];\n let hTMinus1 = inputs[1]; // Previous memory state.\n inputs = inputs[0];\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2, regardless of the actual value of\n // config.implementation.\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 3\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 3\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n let z;\n let r;\n let hh;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let matrixX = K.dot(inputs, this.kernel.read());\n if (this.useBias) {\n matrixX = K.biasAdd(matrixX, this.bias.read());\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n const recurrentKernelValue = this.recurrentKernel.read();\n const [rk1, rk2] = tfc.split(recurrentKernelValue, [2 * this.units, this.units], recurrentKernelValue.rank - 1);\n const matrixInner = K.dot(hTMinus1, rk1);\n const [xZ, xR, xH] = tfc.split(matrixX, 3, matrixX.rank - 1);\n const [recurrentZ, recurrentR] = tfc.split(matrixInner, 2, matrixInner.rank - 1);\n z = this.recurrentActivation.apply(tfc.add(xZ, recurrentZ));\n r = this.recurrentActivation.apply(tfc.add(xR, recurrentR));\n const recurrentH = K.dot(tfc.mul(r, hTMinus1), rk2);\n hh = this.activation.apply(tfc.add(xH, recurrentH));\n const h = tfc.add(tfc.mul(z, hTMinus1), tfc.mul(tfc.add(1, tfc.neg(z)), hh));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n resetAfter: false\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nGRUCell.className = 'GRUCell';\nserialization.registerClass(GRUCell);\nexport class GRU extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new GRUCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nGRU.className = 'GRU';\nserialization.registerClass(GRU);\nexport class LSTMCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.unitForgetBias = args.unitForgetBias;\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = [this.units, this.units];\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 4], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 4], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n let biasInitializer;\n if (this.useBias) {\n if (this.unitForgetBias) {\n const capturedBiasInit = this.biasInitializer;\n const capturedUnits = this.units;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n // TODO(cais): More informative variable names?\n const bI = capturedBiasInit.apply([capturedUnits]);\n const bF = (new Ones()).apply([capturedUnits]);\n const bCAndH = capturedBiasInit.apply([capturedUnits * 2]);\n return K.concatAlongFirstAxis(K.concatAlongFirstAxis(bI, bF), bCAndH);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.units * 4], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n inputs = inputs;\n if (inputs.length !== 3) {\n throw new ValueError(`LSTMCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n let hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n inputs = inputs[0];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 4\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 4\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2 regardless of the actual value of\n // config.implementation.\n let i;\n let f;\n let c;\n let o;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let z = K.dot(inputs, this.kernel.read());\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n z = tfc.add(z, K.dot(hTMinus1, this.recurrentKernel.read()));\n if (this.useBias) {\n z = K.biasAdd(z, this.bias.read());\n }\n const [z0, z1, z2, z3] = tfc.split(z, 4, z.rank - 1);\n i = this.recurrentActivation.apply(z0);\n f = this.recurrentActivation.apply(z1);\n c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(z2)));\n o = this.recurrentActivation.apply(z3);\n const h = tfc.mul(o, this.activation.apply(c));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h, c];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n unitForgetBias: this.unitForgetBias,\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nLSTMCell.className = 'LSTMCell';\nserialization.registerClass(LSTMCell);\nexport class LSTM extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new LSTMCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nLSTM.className = 'LSTM';\nserialization.registerClass(LSTM);\nexport class StackedRNNCells extends RNNCell {\n constructor(args) {\n super(args);\n this.cells = args.cells;\n }\n get stateSize() {\n // States are a flat list in reverse order of the cell stack.\n // This allows perserving the requirement `stack.statesize[0] ===\n // outputDim`. E.g., states of a 2-layer LSTM would be `[h2, c2, h1, c1]`,\n // assuming one LSTM has states `[h, c]`.\n const stateSize = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n stateSize.push(...cell.stateSize);\n }\n else {\n stateSize.push(cell.stateSize);\n }\n }\n return stateSize;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n let states = inputs.slice(1);\n // Recover per-cell states.\n const nestedStates = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n nestedStates.push(states.splice(0, cell.stateSize.length));\n }\n else {\n nestedStates.push(states.splice(0, 1));\n }\n }\n nestedStates.reverse();\n // Call the cells in order and store the returned states.\n const newNestedStates = [];\n let callInputs;\n for (let i = 0; i < this.cells.length; ++i) {\n const cell = this.cells[i];\n states = nestedStates[i];\n // TODO(cais): Take care of constants.\n if (i === 0) {\n callInputs = [inputs[0]].concat(states);\n }\n else {\n callInputs = [callInputs[0]].concat(states);\n }\n callInputs = cell.call(callInputs, kwargs);\n newNestedStates.push(callInputs.slice(1));\n }\n // Format the new states as a flat list in reverse cell order.\n states = [];\n for (const cellStates of newNestedStates.slice().reverse()) {\n states.push(...cellStates);\n }\n return [callInputs[0]].concat(states);\n });\n }\n build(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n // TODO(cais): Take care of input constants.\n // const constantShape = inputShape.slice(1);\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n let outputDim;\n this.cells.forEach((cell, i) => {\n nameScope(`RNNCell_${i}`, () => {\n // TODO(cais): Take care of input constants.\n cell.build(inputShape);\n if (Array.isArray(cell.stateSize)) {\n outputDim = cell.stateSize[0];\n }\n else {\n outputDim = cell.stateSize;\n }\n inputShape = [inputShape[0], outputDim];\n });\n });\n this.built = true;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const getCellConfig = (cell) => {\n return {\n 'className': cell.getClassName(),\n 'config': cell.getConfig(),\n };\n };\n const cellConfigs = this.cells.map(getCellConfig);\n const config = { 'cells': cellConfigs };\n return Object.assign({}, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cells = [];\n for (const cellConfig of config['cells']) {\n cells.push(deserialize(cellConfig, customObjects));\n }\n return new cls({ cells });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const cell of this.cells) {\n trainableWeights.push(...cell.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n /**\n * Retrieve the weights of a the model.\n *\n * @returns A flat `Array` of `tf.Tensor`s.\n */\n getWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.weights);\n }\n return batchGetValue(weights);\n }\n /**\n * Set the weights of the model.\n *\n * @param weights An `Array` of `tf.Tensor`s with shapes and types matching\n * the output of `getWeights()`.\n */\n setWeights(weights) {\n const tuples = [];\n for (const cell of this.cells) {\n const numParams = cell.weights.length;\n const inputWeights = weights.splice(numParams);\n for (let i = 0; i < cell.weights.length; ++i) {\n tuples.push([cell.weights[i], inputWeights[i]]);\n }\n }\n batchSetValue(tuples);\n }\n}\n/** @nocollapse */\nStackedRNNCells.className = 'StackedRNNCells';\nserialization.registerClass(StackedRNNCells);\nexport function generateDropoutMask(args) {\n const { ones, rate, training = false, count = 1 } = args;\n const droppedInputs = () => K.dropout(ones(), rate);\n const createMask = () => K.inTrainPhase(droppedInputs, ones, training);\n // just in case count is provided with null or undefined\n if (!count || count <= 1) {\n return tfc.keep(createMask().clone());\n }\n const masks = Array(count).fill(undefined).map(createMask);\n return masks.map(m => tfc.keep(m.clone()));\n}\n//# sourceMappingURL=recurrent.js.map", "/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { Initializer } from '../initializers';\nimport { convOutputLength, normalizeArray } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\nimport { generateDropoutMask, LSTMCell, RNN, RNNCell } from './recurrent';\nclass ConvRNN2DCell extends RNNCell {\n}\n/**\n * Base class for convolutional-recurrent layers.\n */\nclass ConvRNN2D extends RNN {\n constructor(args) {\n if (args.unroll) {\n throw new NotImplementedError('Unrolling is not possible with convolutional RNNs.');\n }\n if (Array.isArray(args.cell)) {\n throw new NotImplementedError('It is not possible at the moment to stack convolutional cells.');\n }\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n if (kwargs && kwargs['constants']) {\n throw new ValueError('ConvRNN2D cell does not support constants');\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n computeOutputShape(inputShape) {\n let outShape = this.computeSingleOutputShape(inputShape);\n if (!this.returnSequences) {\n outShape = [outShape[0], ...outShape.slice(2)];\n }\n if (this.returnState) {\n outShape =\n [outShape, ...Array(2).fill([inputShape[0], ...outShape.slice(-3)])];\n }\n return outShape;\n }\n getInitialState(inputs) {\n return tfc.tidy(() => {\n const { stateSize } = this.cell;\n const inputShape = inputs.shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const initialState = tfc.zeros(stateShape);\n if (Array.isArray(stateSize)) {\n return Array(stateSize.length).fill(initialState);\n }\n return [initialState];\n });\n }\n resetStates(states, training = false) {\n tfc.tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const inputShape = this.inputSpec[0].shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const batchSize = inputShape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.getStates() == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_ = [tfc.zeros(stateShape)];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_[0] = tfc.zeros(stateShape);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const expectedShape = stateShape;\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n computeSingleOutputShape(inputShape) {\n const { dataFormat, filters, kernelSize, padding, strides, dilationRate } = this.cell;\n const isChannelsFirst = dataFormat === 'channelsFirst';\n const h = inputShape[isChannelsFirst ? 3 : 2];\n const w = inputShape[isChannelsFirst ? 4 : 3];\n const hOut = convOutputLength(h, kernelSize[0], padding, strides[0], dilationRate[0]);\n const wOut = convOutputLength(w, kernelSize[1], padding, strides[1], dilationRate[1]);\n const outShape = [\n ...inputShape.slice(0, 2),\n ...(isChannelsFirst ? [filters, hOut, wOut] : [hOut, wOut, filters])\n ];\n return outShape;\n }\n}\n/** @nocollapse */\nConvRNN2D.className = 'ConvRNN2D';\nexport class ConvLSTM2DCell extends LSTMCell {\n constructor(args) {\n const { filters, kernelSize, strides, padding, dataFormat, dilationRate, } = args;\n super(Object.assign({}, args, { units: filters }));\n this.filters = filters;\n assertPositiveInteger(this.filters, 'filters');\n this.kernelSize = normalizeArray(kernelSize, 2, 'kernelSize');\n this.kernelSize.forEach(size => assertPositiveInteger(size, 'kernelSize'));\n this.strides = normalizeArray(strides || 1, 2, 'strides');\n this.strides.forEach(stride => assertPositiveInteger(stride, 'strides'));\n this.padding = padding || 'valid';\n checkPaddingMode(this.padding);\n this.dataFormat = dataFormat || 'channelsLast';\n checkDataFormat(this.dataFormat);\n this.dilationRate = normalizeArray(dilationRate || 1, 2, 'dilationRate');\n this.dilationRate.forEach(rate => assertPositiveInteger(rate, 'dilationRate'));\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const numOfKernels = 4;\n const kernelShape = this.kernelSize.concat([inputDim, this.filters * numOfKernels]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n const recurrentKernelShape = this.kernelSize.concat([this.filters, this.filters * numOfKernels]);\n this.recurrentKernel = this.addWeight('recurrent_kernel', recurrentKernelShape, null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n let biasInitializer;\n if (this.unitForgetBias) {\n const init = this.biasInitializer;\n const filters = this.filters;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n const biasI = init.apply([filters]);\n const biasF = tfc.ones([filters]);\n const biasCAndO = init.apply([filters * 2]);\n return K.concatenate([biasI, biasF, biasCAndO]);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.filters * numOfKernels], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (inputs.length !== 3) {\n throw new ValueError(`ConvLSTM2DCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] || false;\n const x = inputs[0]; // Current input\n const hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n const numOfKernels = 4;\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(x),\n rate: this.dropout,\n training,\n count: numOfKernels\n });\n }\n const dropoutMask = this.dropoutMask;\n const applyDropout = (x, mask, index) => {\n if (!mask || !mask[index]) {\n return x;\n }\n return tfc.mul(mask[index], x);\n };\n let xI = applyDropout(x, dropoutMask, 0);\n let xF = applyDropout(x, dropoutMask, 1);\n let xC = applyDropout(x, dropoutMask, 2);\n let xO = applyDropout(x, dropoutMask, 3);\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: numOfKernels\n });\n }\n const recDropoutMask = this.recurrentDropoutMask;\n let hI = applyDropout(hTMinus1, recDropoutMask, 0);\n let hF = applyDropout(hTMinus1, recDropoutMask, 1);\n let hC = applyDropout(hTMinus1, recDropoutMask, 2);\n let hO = applyDropout(hTMinus1, recDropoutMask, 3);\n const kernelChannelAxis = 3;\n const [kernelI, kernelF, kernelC, kernelO] = tfc.split(this.kernel.read(), numOfKernels, kernelChannelAxis);\n const [biasI, biasF, biasC, biasO] = this.useBias ?\n tfc.split(this.bias.read(), numOfKernels) :\n [null, null, null, null];\n xI = this.inputConv(xI, kernelI, biasI, this.padding);\n xF = this.inputConv(xF, kernelF, biasF, this.padding);\n xC = this.inputConv(xC, kernelC, biasC, this.padding);\n xO = this.inputConv(xO, kernelO, biasO, this.padding);\n const [recKernelI, recKernelF, recKernelC, recKernelO] = tfc.split(this.recurrentKernel.read(), numOfKernels, kernelChannelAxis);\n hI = this.recurrentConv(hI, recKernelI);\n hF = this.recurrentConv(hF, recKernelF);\n hC = this.recurrentConv(hC, recKernelC);\n hO = this.recurrentConv(hO, recKernelO);\n const i = this.recurrentActivation.apply(tfc.add(xI, hI));\n const f = this.recurrentActivation.apply(tfc.add(xF, hF));\n const c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(tfc.add(xC, hC))));\n const h = tfc.mul(this.recurrentActivation.apply(tfc.add(xO, hO)), this.activation.apply(c));\n return [h, h, c];\n });\n }\n getConfig() {\n const _a = super.getConfig(), { 'units': _ } = _a, baseConfig = __rest(_a, ['units']);\n const config = {\n filters: this.filters,\n kernelSize: this.kernelSize,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n strides: this.strides,\n };\n return Object.assign({}, baseConfig, config);\n }\n inputConv(x, w, b, padding) {\n const out = tfc.conv2d(x, w, this.strides, (padding || 'valid'), this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC', this.dilationRate);\n if (b) {\n return K.biasAdd(out, b, this.dataFormat);\n }\n return out;\n }\n recurrentConv(x, w) {\n const strides = 1;\n return tfc.conv2d(x, w, strides, 'same', this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC');\n }\n}\n/** @nocollapse */\nConvLSTM2DCell.className = 'ConvLSTM2DCell';\ntfc.serialization.registerClass(ConvLSTM2DCell);\nexport class ConvLSTM2D extends ConvRNN2D {\n constructor(args) {\n const cell = new ConvLSTM2DCell(args);\n super(Object.assign({}, args, { cell }));\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nConvLSTM2D.className = 'ConvLSTM2D';\ntfc.serialization.registerClass(ConvLSTM2D);\n//# sourceMappingURL=convolutional_recurrent.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Basic Layers.\n */\nimport { any, notEqual, serialization, tidy, transpose, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger, mapActivationToFusedKernel } from '../utils/generic_utils';\nimport { arrayProd, range } from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Dropout extends Layer {\n constructor(args) {\n super(args);\n this.rate = Math.max(Math.min(args.rate, 1), 0);\n // So that the scalar doesn't get tidied up between executions.\n this.noiseShape = args.noiseShape;\n this.seed = args.seed;\n this.supportsMasking = true;\n }\n getNoiseShape(input) {\n if (this.noiseShape == null) {\n return this.noiseShape;\n }\n const inputShape = input.shape;\n const noiseShape = [];\n for (let i = 0; i < this.noiseShape.length; ++i) {\n noiseShape.push(this.noiseShape[i] == null ? inputShape[i] : this.noiseShape[i]);\n }\n return noiseShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (0 < this.rate && this.rate < 1) {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const noiseShape = this.getNoiseShape(input);\n const output = K.inTrainPhase(() => K.dropout(input, this.rate, noiseShape, this.seed), () => input, training);\n return output;\n }\n return inputs;\n });\n }\n getConfig() {\n const config = {\n rate: this.rate,\n noiseShape: this.noiseShape,\n seed: this.seed,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n dispose() {\n return super.dispose();\n }\n}\n/** @nocollapse */\nDropout.className = 'Dropout';\nserialization.registerClass(Dropout);\nexport class SpatialDropout1D extends Dropout {\n constructor(args) {\n super(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getNoiseShape(input) {\n const inputShape = input.shape;\n return [inputShape[0], 1, inputShape[2]];\n }\n}\n/** @nocollapse */\nSpatialDropout1D.className = 'SpatialDropout1D';\nserialization.registerClass(SpatialDropout1D);\nexport class Dense extends Layer {\n constructor(args) {\n super(args);\n // Default activation: Linear (none).\n this.activation = null;\n this.useBias = true;\n this.kernel = null;\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.batchInputShape == null && args.inputShape == null &&\n args.inputDim != null) {\n // This logic is copied from Layer's constructor, since we can't\n // do exactly what the Python constructor does for Dense().\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n this.batchInputShape = [batchSize, args.inputDim];\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation);\n if (args.useBias != null) {\n this.useBias = args.useBias;\n }\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.supportsMasking = true;\n this.inputSpec = [{ minNDim: 2 }];\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputLastDim = inputShape[inputShape.length - 1];\n if (this.kernel == null) {\n this.kernel = this.addWeight('kernel', [inputLastDim, this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n }\n this.inputSpec = [{ minNDim: 2, axes: { [-1]: inputLastDim } }];\n this.built = true;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n outputShape[outputShape.length - 1] = this.units;\n return outputShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Dense layer accepts only a single input.\n const input = getExactlyOneTensor(inputs);\n const fusedActivationName = mapActivationToFusedKernel(this.activation.getClassName());\n let output;\n if (fusedActivationName != null) {\n output = K.dot(input, this.kernel.read(), fusedActivationName, this.bias ? this.bias.read() : null);\n }\n else {\n output = K.dot(input, this.kernel.read());\n if (this.bias != null) {\n output = K.biasAdd(output, this.bias.read());\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n }\n return output;\n });\n }\n getConfig() {\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDense.className = 'Dense';\nserialization.registerClass(Dense);\nexport class Flatten extends Layer {\n constructor(args) {\n args = args || {};\n super(args);\n this.inputSpec = [{ minNDim: 3 }];\n this.dataFormat = args.dataFormat;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n for (const dim of inputShape.slice(1)) {\n if (dim == null) {\n throw new ValueError(`The shape of the input to \"Flatten\" is not fully defined ` +\n `(got ${inputShape.slice(1)}). Make sure to pass a complete ` +\n `\"input_shape\" or \"batch_input_shape\" argument to the first ` +\n `layer in your model.`);\n }\n }\n return [inputShape[0], arrayProd(inputShape, 1)];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n let input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsFirst' && input.rank > 1) {\n const permutation = [0];\n for (let i = 2; i < input.rank; ++i) {\n permutation.push(i);\n }\n permutation.push(1);\n input = input.transpose(permutation);\n }\n return K.batchFlatten(input);\n });\n }\n getConfig() {\n const config = {};\n if (this.dataFormat != null) {\n config['dataFormat'] = this.dataFormat;\n }\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nFlatten.className = 'Flatten';\nserialization.registerClass(Flatten);\nexport class Activation extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.activation = getActivation(args.activation);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n return this.activation.apply(input);\n });\n }\n getConfig() {\n const config = { activation: serializeActivation(this.activation) };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nActivation.className = 'Activation';\nserialization.registerClass(Activation);\nexport class RepeatVector extends Layer {\n constructor(args) {\n super(args);\n this.n = args.n;\n this.inputSpec = [{ ndim: 2 }];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], this.n, inputShape[1]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n return K.repeat(inputs, this.n);\n });\n }\n getConfig() {\n const config = {\n n: this.n,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nRepeatVector.className = 'RepeatVector';\nserialization.registerClass(RepeatVector);\nexport class Reshape extends Layer {\n constructor(args) {\n super(args);\n this.targetShape = args.targetShape;\n // Make sure that all unknown dimensions are represented as `null`.\n for (let i = 0; i < this.targetShape.length; ++i) {\n if (this.isUnknown(this.targetShape[i])) {\n this.targetShape[i] = null;\n }\n }\n }\n isUnknown(dim) {\n return dim < 0 || dim == null;\n }\n /**\n * Finds and replaces a missing dimension in output shape.\n *\n * This is a near direct port of the internal Numpy function\n * `_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`.\n *\n * @param inputShape: Original shape of array begin reshape.\n * @param outputShape: Target shape of the array, with at most a single\n * `null` or negative number, which indicates an underdetermined dimension\n * that should be derived from `inputShape` and the known dimensions of\n * `outputShape`.\n * @returns: The output shape with `null` replaced with its computed value.\n * @throws: ValueError: If `inputShape` and `outputShape` do not match.\n */\n fixUnknownDimension(inputShape, outputShape) {\n const errorMsg = 'Total size of new array must be unchanged.';\n const finalShape = outputShape.slice();\n let known = 1;\n let unknown = null;\n for (let i = 0; i < finalShape.length; ++i) {\n const dim = finalShape[i];\n if (this.isUnknown(dim)) {\n if (unknown === null) {\n unknown = i;\n }\n else {\n throw new ValueError('Can only specifiy one unknown dimension.');\n }\n }\n else {\n known *= dim;\n }\n }\n const originalSize = arrayProd(inputShape);\n if (unknown !== null) {\n if (known === 0 || originalSize % known !== 0) {\n throw new ValueError(errorMsg);\n }\n finalShape[unknown] = originalSize / known;\n }\n else if (originalSize !== known) {\n throw new ValueError(errorMsg);\n }\n return finalShape;\n }\n computeOutputShape(inputShape) {\n let anyUnknownDims = false;\n for (let i = 0; i < inputShape.length; ++i) {\n if (this.isUnknown(inputShape[i])) {\n anyUnknownDims = true;\n break;\n }\n }\n if (anyUnknownDims) {\n return inputShape.slice(0, 1).concat(this.targetShape);\n }\n else {\n return inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const outputShape = inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n return input.reshape(outputShape);\n });\n }\n getConfig() {\n const config = {\n targetShape: this.targetShape,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReshape.className = 'Reshape';\nserialization.registerClass(Reshape);\nexport class Permute extends Layer {\n constructor(args) {\n super(args);\n if (args.dims == null) {\n throw new Error('Required configuration field `dims` is missing during Permute ' +\n 'constructor call.');\n }\n if (!Array.isArray(args.dims)) {\n throw new Error('Permute constructor requires `dims` to be an Array, but received ' +\n `${args.dims} instead.`);\n }\n // Check the validity of the permutation indices.\n const expectedSortedIndices = range(1, args.dims.length + 1);\n if (!util.arraysEqual(args.dims.slice().sort(), expectedSortedIndices)) {\n throw new Error('Invalid permutation `dims`: ' + JSON.stringify(args.dims) +\n ' `dims` must contain consecutive integers starting from 1.');\n }\n this.dims = args.dims;\n this.dimsIncludingBatch = [0].concat(this.dims);\n this.inputSpec = [new InputSpec({ ndim: this.dims.length + 1 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n this.dims.forEach((dim, i) => {\n outputShape[i + 1] = inputShape[dim];\n });\n return outputShape;\n }\n call(inputs, kwargs) {\n return transpose(getExactlyOneTensor(inputs), this.dimsIncludingBatch);\n }\n getConfig() {\n const config = {\n dims: this.dims,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPermute.className = 'Permute';\nserialization.registerClass(Permute);\nexport class Masking extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maskValue = args.maskValue == null ? 0 : args.maskValue;\n }\n else {\n this.maskValue = 0;\n }\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { maskValue: this.maskValue };\n Object.assign(config, baseConfig);\n return config;\n }\n computeMask(inputs, mask) {\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n return any(notEqual(input, this.maskValue), axis);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n const keepDims = true;\n const booleanMask = any(notEqual(input, this.maskValue), axis, keepDims);\n const output = input.mul(booleanMask.asType(input.dtype));\n return output;\n });\n }\n}\n/** @nocollapse */\nMasking.className = 'Masking';\nserialization.registerClass(Masking);\n//# sourceMappingURL=core.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Embedding Layer.\n *\n * Original source: keras/constraints.py\n */\nimport { notEqual, serialization, tidy, zerosLike } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Embedding extends Layer {\n constructor(args) {\n super(args);\n this.embeddings = null;\n this.DEFAULT_EMBEDDINGS_INITIALIZER = 'randomUniform';\n if (args.batchInputShape == null && args.inputShape == null) {\n // Porting Note: This logic is copied from Layer's constructor, since we\n // can't do exactly what the Python constructor does for Embedding().\n // Specifically, the super constructor can not be called after the\n // mutation of the `config` argument.\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n if (args.inputLength == null) {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (None, )\n this.batchInputShape = [batchSize, null];\n }\n else {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (config.inputLength, )\n this.batchInputShape =\n [batchSize].concat(generic_utils.toList(args.inputLength));\n }\n }\n this.inputDim = args.inputDim;\n generic_utils.assertPositiveInteger(this.inputDim, 'inputDim');\n this.outputDim = args.outputDim;\n generic_utils.assertPositiveInteger(this.outputDim, 'outputDim');\n this.embeddingsInitializer = getInitializer(args.embeddingsInitializer || this.DEFAULT_EMBEDDINGS_INITIALIZER);\n this.embeddingsRegularizer = getRegularizer(args.embeddingsRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.embeddingsConstraint = getConstraint(args.embeddingsConstraint);\n this.maskZero = args.maskZero;\n this.supportsMasking = args.maskZero;\n this.inputLength = args.inputLength;\n }\n build(inputShape) {\n this.embeddings = this.addWeight('embeddings', [this.inputDim, this.outputDim], this.dtype, this.embeddingsInitializer, this.embeddingsRegularizer, true, this.embeddingsConstraint);\n this.built = true;\n }\n // Override warnOnIncompatibleInputShape because an embedding layer allows\n // the input to have varying ranks.\n warnOnIncompatibleInputShape(inputShape) { }\n computeMask(inputs, mask) {\n return tidy(() => {\n if (!this.maskZero) {\n return null;\n }\n else {\n inputs = getExactlyOneTensor(inputs);\n return notEqual(inputs, zerosLike(inputs));\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (this.inputLength == null) {\n return [...inputShape, this.outputDim];\n }\n // inputLength can be an array if input is 3D or higher.\n const inLens = generic_utils.toList(this.inputLength);\n if (inLens.length !== inputShape.length - 1) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else {\n let i = 0;\n for (let k = 0; k < inLens.length; ++k) {\n const s1 = inLens[k];\n const s2 = inputShape[k + 1];\n if ((s1 != null) && (s2 != null) && (s1 !== s2)) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else if (s1 == null) {\n inLens[i] = s2;\n }\n i++;\n }\n }\n return [inputShape[0], ...inLens, this.outputDim];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Embedding layer accepts only a single input.\n let input = getExactlyOneTensor(inputs);\n if (input.dtype !== 'int32') {\n input = K.cast(input, 'int32');\n }\n const output = K.gather(this.embeddings.read(), input.as1D());\n return output.reshape(getExactlyOneShape(this.computeOutputShape(input.shape)));\n });\n }\n getConfig() {\n const config = {\n inputDim: this.inputDim,\n outputDim: this.outputDim,\n embeddingsInitializer: serializeInitializer(this.embeddingsInitializer),\n embeddingsRegularizer: serializeRegularizer(this.embeddingsRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n embeddingsConstraint: serializeConstraint(this.embeddingsConstraint),\n maskZero: this.maskZero,\n inputLength: this.inputLength\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nEmbedding.className = 'Embedding';\nserialization.registerClass(Embedding);\n//# sourceMappingURL=embeddings.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Merge Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { l2Normalize } from '../losses';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as mathUtils from '../utils/math_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\n/**\n * Generic Merge layer for element-wise merge functions.\n *\n * Used to implement `Sum`, `Average`, `Concatenate`, etc.\n */\nexport class Merge extends Layer {\n constructor(args) {\n super(args || {});\n this.supportsMasking = true;\n }\n /**\n * Logic for merging multiple tensors, to be overridden by subclasses.\n * @param inputs\n */\n mergeFunction(inputs) {\n throw new NotImplementedError();\n }\n /**\n * Computes the shape of the result of an elementwise operation.\n *\n * @param shape1: Shape of the first tensor.\n * @param shape2: Shape of the second tensor.\n * @returns Expected output shape when an elementwise operation is carried\n * out on 2 tensors with shapes `shape1` and `shape2`.\n * @throws ValueError: If `shape1` and `shape2` are not compatible for\n * element-wise operations.\n */\n computeElementwiseOpOutputShape(shape1, shape2) {\n if (shape1 == null || shape2 == null) {\n return null;\n }\n else if (shape1.length < shape2.length) {\n return this.computeElementwiseOpOutputShape(shape2, shape1);\n }\n else if (shape2.length === 0) {\n return shape1;\n }\n const outputShape = shape1.slice(0, shape1.length - shape2.length);\n for (let k = 0; k < shape2.length; ++k) {\n const i = shape1[shape1.length - shape2.length + k];\n const j = shape2[k];\n if (i == null || j == null || i < 0 || j < 0) {\n outputShape.push(null);\n }\n else if (i === 1) {\n outputShape.push(j);\n }\n else if (j === 1) {\n outputShape.push(i);\n }\n else {\n if (i !== j) {\n throw new ValueError('Operands could not be broadcast together with shapes ' +\n JSON.stringify(shape1) + ' ' + JSON.stringify(shape2));\n }\n outputShape.push(i);\n }\n }\n return outputShape;\n }\n build(inputShape) {\n // Used purely for shape validation.\n if (Array.isArray(inputShape) && !Array.isArray(inputShape[0])) {\n // Make sure that inputShape is an Array of shape.\n inputShape = [getExactlyOneShape(inputShape)];\n }\n inputShape = inputShape;\n if (inputShape.length < 2) {\n throw new ValueError('A merge layer should be called on an Array of at least 2 inputs.' +\n ` Got ${inputShape.length} input(s).`);\n }\n // Make sure that there is at most one unique batch size among the input\n // shapes.\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length > 1) {\n throw new ValueError(`Can not merge tensors with different batch sizes. ` +\n `Got tensors with shapes: ${JSON.stringify(inputShape)}.`);\n }\n let outputShape = inputShape[0] == null ? null : inputShape[0].slice(1);\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n // If the inputs have different ranks, we have to reshape them to make them\n // broadcastable.\n const allRanks = inputShape.map(shape => shape.length);\n if (inputShape.indexOf(null) === -1 &&\n generic_utils.unique(allRanks).length === 1) {\n this.reshapeRequired = false;\n }\n else {\n this.reshapeRequired = true;\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (this.reshapeRequired) {\n const reshapedInputs = [];\n const inputDims = inputs.map(input => input.rank);\n if (inputDims.indexOf(null) === -1) {\n // If ranks of all inputs are available, we simply expand each of them\n // at axis=1 until all of them have the same rank.\n const maxNDim = mathUtils.max(inputDims);\n for (let x of inputs) {\n const xNDim = x.rank;\n for (let k = 0; k < maxNDim - xNDim; ++k) {\n x = K.expandDims(x, 1);\n }\n reshapedInputs.push(x);\n }\n return this.mergeFunction(reshapedInputs);\n }\n else {\n // Transpose all inputs so that batch size is the last dimension.\n // [batchSize, dim1, dim2, ...] -> [dim1, dim2, ..., batchSize]\n let transposed = false;\n for (const x of inputs) {\n const xNDim = x.rank;\n if (xNDim == null) {\n const xShape = x.shape;\n const batchSize = xShape[0];\n const newShape = xShape.slice(1).concat([batchSize]);\n let xTransposed = x.reshape([batchSize].concat(mathUtils.arrayProd(xShape.slice(1))));\n xTransposed = tfc.transpose(xTransposed, [1, 0]);\n xTransposed = xTransposed.reshape(newShape);\n reshapedInputs.push(xTransposed);\n transposed = true;\n }\n else if (xNDim > 1) {\n const dims = mathUtils.range(1, xNDim).concat([0]);\n reshapedInputs.push(tfc.transpose(x, dims));\n transposed = true;\n }\n else {\n // We don't transpose inputs if they are 1D vectors or scalars.\n reshapedInputs.push(x);\n }\n }\n let y = this.mergeFunction(reshapedInputs);\n const yNDim = y.rank;\n if (transposed) {\n // If inputs have been transposed, we have to transpose the output\n // too.\n if (yNDim == null) {\n const yShape = y.shape;\n const yNDim = yShape.length;\n const batchSize = yShape[yNDim - 1];\n const newShape = [batchSize].concat(yShape.slice(0, yShape.length - 1));\n y = tfc.transpose(y.reshape([-1, batchSize]), [1, 0])\n .reshape(newShape);\n }\n else if (yNDim > 1) {\n const dims = [yNDim - 1].concat(mathUtils.range(0, yNDim - 1));\n y = tfc.transpose(y, dims);\n }\n }\n return y;\n }\n }\n else {\n return this.mergeFunction(inputs);\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n let outputShape;\n if (inputShape[0] == null) {\n outputShape = null;\n }\n else {\n outputShape = inputShape[0].slice(1);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length === 1) {\n outputShape = batchSizes.concat(outputShape);\n }\n else {\n outputShape = [null].concat(outputShape);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an Array');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an Array');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`The Array 'inputs' and 'mask' are expected to have the same ` +\n `length, but have different lengths ` +\n `(${inputs.length} vs ${mask.length})`);\n }\n if (mask.every(m => m == null)) {\n return null;\n }\n mask = mask.map(m => m == null ? m : tfc.expandDims(m, 0));\n let output = mask[0];\n for (let i = 1; i < mask.length - 1; ++i) {\n output = tfc.logicalAnd(output, mask[i]);\n }\n return output;\n });\n }\n}\nexport class Add extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nAdd.className = 'Add';\nserialization.registerClass(Add);\n/**\n * Calculate the element-wise sum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Add` layer, by using no input argument\n * or a single configuration argument. The resultant `Add` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const addLayer = tf.layers.add();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = addLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.add([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.add([input1, input2]).print();\n * // Gives [[11, 22], [33, 44]].\n *\n */\nexport function add(config) {\n if (Array.isArray(config)) {\n const layer = new Add({});\n return layer.apply(config);\n }\n else {\n return new Add(config);\n }\n}\nexport class Multiply extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.mul(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMultiply.className = 'Multiply';\nserialization.registerClass(Multiply);\n/**\n * Calculate the element-wise product of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Multiply` layer, by using no input argument\n * or a single configuration argument. The resultant `Multiply` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const multiplyLayer = tf.layers.multiply();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = multiplyLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.multiply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.multiply([input1, input2]).print();\n * // Gives [[10, 40], [90, 160]].\n *\n */\nexport function multiply(config) {\n if (Array.isArray(config)) {\n const layer = new Multiply({});\n return layer.apply(config);\n }\n else {\n return new Multiply(config);\n }\n}\nexport class Average extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return tfc.mul(1 / inputs.length, output);\n });\n }\n}\n/** @nocollapse */\nAverage.className = 'Average';\nserialization.registerClass(Average);\n/**\n * Calculate the element-wise arithmetic mean of inputs, which all have the same\n * shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Average` layer, by using no input argument\n * or a single configuration argument. The resultant `Average` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const averageLayer = tf.layers.average();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = averageLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.average([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.average([input1, input2]).print();\n * // Gives [[5.5, 11], [16.5, 22]].\n *\n */\nexport function average(config) {\n if (Array.isArray(config)) {\n const layer = new Average({});\n return layer.apply(config);\n }\n else {\n return new Average(config);\n }\n}\nexport class Maximum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.maximum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMaximum.className = 'Maximum';\nserialization.registerClass(Maximum);\n/**\n * Calculate the element-wise maximum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Maximum` layer, by using no input argument\n * or a single configuration argument. The resultant `Maximum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const maximumLayer = tf.layers.maximum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = maximumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.maximum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.maximum([input1, input2]).print();\n * // Gives [[10, 20], [30, 40]].\n *\n */\nexport function maximum(config) {\n if (Array.isArray(config)) {\n const layer = new Maximum({});\n return layer.apply(config);\n }\n else {\n return new Maximum(config);\n }\n}\nexport class Minimum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.minimum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMinimum.className = 'Minimum';\nserialization.registerClass(Minimum);\n/**\n * Calculate the element-wise minimum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Minimum` layer, by using no input argument\n * or a single configuration argument. The resultant `Minimum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const minimumLayer = tf.layers.minimum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = minimumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.minimum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.minimum([input1, input2]).print();\n * // Gives [[1, 2], [3, 4]].\n *\n */\nexport function minimum(config) {\n if (Array.isArray(config)) {\n const layer = new Minimum({});\n return layer.apply(config);\n }\n else {\n return new Minimum(config);\n }\n}\nexport class Concatenate extends Merge {\n constructor(args) {\n super(args);\n this.DEFAULT_AXIS = -1;\n if (args == null) {\n args = {};\n }\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n // Used purely for shape validation.]\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0])) ||\n inputShape.length === 1) {\n throw new ValueError('A `Concatenate` layer should be called on a list of at least 2 ' +\n 'inputs');\n }\n inputShape = inputShape;\n let allNoneShape = true;\n for (const shape of inputShape) {\n if (shape != null) {\n allNoneShape = false;\n break;\n }\n }\n if (allNoneShape) {\n return;\n }\n const shapeSet = [];\n for (let i = 0; i < inputShape.length; ++i) {\n const shapeWithoutConcatAxis = inputShape[i].slice();\n shapeWithoutConcatAxis.splice(this.axis, 1);\n let exists = false;\n for (const shape of shapeSet) {\n if (util.arraysEqual(shape, shapeWithoutConcatAxis)) {\n exists = true;\n break;\n }\n }\n if (!exists) {\n shapeSet.push(shapeWithoutConcatAxis);\n }\n }\n if (shapeSet.length > 1) {\n throw new ValueError('A `Concatenate` layer requires inputs with matching shapes ' +\n 'except for the concat axis. Got input shapes: ' +\n JSON.stringify(inputShape));\n }\n }\n mergeFunction(inputs) {\n return tidy(() => {\n return K.concatenate(inputs, this.axis);\n });\n }\n computeOutputShape(inputShape) {\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0]))) {\n throw new ValueError('A `Concatenate` layer should be called on a list of inputs.');\n }\n const inputShapes = inputShape;\n const outputShape = inputShapes[0].slice();\n const axis = this.axis < 0 ? outputShape.length + this.axis : this.axis;\n // Porting Note: the line above is because TypeScript doesn't support\n // negative indices.\n for (const shape of inputShapes.slice(1)) {\n if (outputShape[axis] == null || shape[axis] == null) {\n outputShape[axis] = null;\n break;\n }\n outputShape[axis] += shape[axis];\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an array for Concatenate');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an array for Concatenate');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`Mismatch in the length of mask (${mask.length}) ` +\n `and the legnth of inputs (${inputs.length})`);\n }\n return tfc.tidy(() => {\n let allNullMasks = true;\n mask.forEach(m => {\n if (m != null) {\n allNullMasks = false;\n return;\n }\n });\n if (allNullMasks) {\n return null;\n }\n const outputMasks = [];\n for (let i = 0; i < inputs.length; ++i) {\n if (mask[i] == null) {\n // Input is unmasked. Append all 1's to masks.\n outputMasks.push(tfc.onesLike(inputs[i]).asType('bool'));\n }\n else if (mask[i].rank < inputs[i].rank) {\n // Mask is smaller than the input, expand it.\n outputMasks.push(tfc.expandDims(mask[i], -1));\n }\n else {\n outputMasks.push(mask[i]);\n }\n }\n const concatenatedMasks = tfc.concat(outputMasks, this.axis);\n return tfc.all(concatenatedMasks, -1, false);\n });\n }\n getConfig() {\n const config = {\n 'axis': this.axis,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nConcatenate.className = 'Concatenate';\nserialization.registerClass(Concatenate);\n/**\n * Concatenate an `Array` of inputs.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Concatenate` layer, by using no input argument\n * or a single configuration argument. The resultant `Concatenate` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const concatLayer = tf.layers.concatenate();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = concatLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 7], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = tf.layers.concatenate([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([[1, 2], [3, 4]], [2, 2]);\n * const input2 = tf.tensor2d([[10, 20], [30, 40]], [2, 2]);\n * tf.layers.concatenate([input1, input2]).print();\n * // Gives [[1, 2, 10, 20], [3, 4, 30, 40]].\n *\n */\nexport function concatenate(config) {\n if (Array.isArray(config)) {\n const layer = new Concatenate({});\n return layer.apply(config);\n }\n else {\n return new Concatenate(config);\n }\n}\n/**\n * Interpretable potentially negative axis index.\n *\n * For example, given axis = -1, and dim = 3, this function will return 2.\n *\n * @param axis The axis index, may be a positive, zero or negative integer.\n * @param dim Total number of dimensions, a positive integer.\n * @returns A non-negative axis index equivalent to the input `axis`.\n */\nfunction interpretAxis(axis, dim) {\n while (axis < 0) {\n axis += dim;\n }\n return axis;\n}\nfunction batchDot(x, y, axes) {\n if (x.shape.length > 3 || y.shape.length > 3) {\n throw new NotImplementedError('batchDot is not implemented for tensors of 4D or higher rank yet');\n }\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of x to be >= 2, ` +\n `but got ${x.shape.length}`);\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of y to be >= 2, ` +\n `but got ${y.shape.length}`);\n if (typeof axes === 'number') {\n axes = [axes, axes];\n }\n if (x.dtype === 'complex64' || y.dtype === 'complex64') {\n throw new NotImplementedError('batchDot is not implemented for complex64-type Tensors yet.');\n }\n const xNDim = x.shape.length;\n const yNDim = y.shape.length;\n if (axes == null) {\n // Behave like batchMatmul by default.\n axes = [xNDim - 1, yNDim - 2];\n }\n const axesArray = axes;\n return tfc.tidy(() => {\n let diff;\n if (xNDim > yNDim) {\n diff = xNDim - yNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n y = y.reshape(y.shape.concat(diffShape));\n }\n else if (yNDim > xNDim) {\n diff = yNDim - xNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n x = x.reshape(x.shape.concat(diffShape));\n }\n else {\n diff = 0;\n }\n let out;\n if (x.shape.length === 2 && y.shape.length === 2) {\n if (axesArray[0] === axesArray[1]) {\n out = x.mul(y).sum(axesArray[0]);\n }\n else {\n out = x.transpose([1, 0]).mul(y).sum(axesArray[1]);\n }\n }\n else {\n const adjX = axesArray[0] !== x.shape.length - 1;\n const adjY = axesArray[1] === y.shape.length - 1;\n out = x.matMul(y, adjX, adjY);\n }\n if (diff > 0) {\n let idx;\n if (xNDim > yNDim) {\n idx = xNDim + yNDim - 3;\n }\n else {\n idx = xNDim - 1;\n }\n const squeezeAxes = [];\n for (let i = idx; i < idx + diff; ++i) {\n squeezeAxes.push(i);\n }\n out = out.squeeze(squeezeAxes);\n }\n if (out.shape.length === 1) {\n out = out.expandDims(1);\n }\n return out;\n });\n}\nexport class Dot extends Merge {\n constructor(args) {\n super(args);\n this.axes = args.axes;\n this.normalize = args.normalize == null ? false : args.normalize;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0];\n const shape2 = inputShape[1];\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n if (shape1[axes[0]] !== shape2[axes[1]]) {\n throw new ValueError(`Dimension incompatibility: ` +\n `${shape1[axes[0]]} !== ${shape2[axes[1]]}`);\n }\n }\n mergeFunction(inputs) {\n if (inputs.length !== 2) {\n throw new ValueError('A `Dot` layer must be called on exactly 2 inputs, ' +\n `but received ${inputs.length} input(s).`);\n }\n let x1 = inputs[0];\n let x2 = inputs[1];\n let axes;\n if (!Array.isArray(this.axes)) {\n axes = [\n interpretAxis(this.axes, x1.shape.length),\n interpretAxis(this.axes, x2.shape.length)\n ];\n }\n else {\n axes = this.axes.map((axis, i) => interpretAxis(axis, inputs[i].shape.length));\n }\n if (this.normalize) {\n x1 = l2Normalize(x1, axes[0]);\n x2 = l2Normalize(x2, axes[1]);\n }\n return batchDot(x1, x2, axes);\n }\n interpretAxes(shape1, shape2) {\n let axes;\n if (!Array.isArray(this.axes)) {\n // `this.axes` is a single integer.\n axes = [\n interpretAxis(this.axes, shape1.length),\n interpretAxis(this.axes, shape2.length)\n ];\n }\n else {\n // `this.axes` is an Array of integers.\n axes = this.axes;\n }\n return axes;\n }\n computeOutputShape(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0].slice();\n const shape2 = inputShape[1].slice();\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n shape1.splice(axes[0], 1);\n shape2.splice(axes[1], 1);\n shape2.splice(0, 1);\n const outputShape = shape1.concat(shape2);\n if (outputShape.length === 1) {\n outputShape.push(1);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return null;\n }\n getConfig() {\n const config = {\n 'axes': this.axes,\n 'normalize': this.normalize\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDot.className = 'Dot';\nserialization.registerClass(Dot);\n// TODO(cais): Add functional interfaces for the merge layers.\n//# sourceMappingURL=merge.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Noise Layers.\n */\nimport { greaterEqual, randomUniform, serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { getExactlyOneTensor } from '../utils/types_utils';\nexport class GaussianNoise extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.stddev = args.stddev;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { stddev: this.stddev };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const noised = () => K.randomNormal(input.shape, 0, this.stddev).add(input);\n const output = K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n return output;\n });\n }\n}\n/** @nocollapse */\nGaussianNoise.className = 'GaussianNoise';\nserialization.registerClass(GaussianNoise);\nexport class GaussianDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (this.rate > 0 && this.rate < 1) {\n const noised = () => {\n const stddev = Math.sqrt(this.rate / (1 - this.rate));\n return input.mul(K.randomNormal(input.shape, 1, stddev));\n };\n return K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n }\n return input;\n });\n }\n}\n/** @nocollapse */\nGaussianDropout.className = 'GaussianDropout';\nserialization.registerClass(GaussianDropout);\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n */\nexport class AlphaDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n this.noiseShape = args.noiseShape;\n }\n _getNoiseShape(inputs) {\n return this.noiseShape || getExactlyOneTensor(inputs).shape;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.rate < 1 && this.rate > 0) {\n const noiseShape = this._getNoiseShape(inputs);\n const droppedInputs = () => {\n const input = getExactlyOneTensor(inputs);\n const alpha = 1.6732632423543772848170429916717;\n const scale = 1.0507009873554804934193349852946;\n const alphaP = -alpha * scale;\n let keptIdx = greaterEqual(randomUniform(noiseShape), this.rate);\n keptIdx = K.cast(keptIdx, 'float32'); // get default dtype.\n // Get affine transformation params.\n const a = ((1 - this.rate) * (1 + this.rate * alphaP ** 2)) ** -0.5;\n const b = -a * alphaP * this.rate;\n // Apply mask.\n const x = input.mul(keptIdx).add(keptIdx.add(-1).mul(alphaP));\n return x.mul(a).add(b);\n };\n return K.inTrainPhase(droppedInputs, () => getExactlyOneTensor(inputs), kwargs['training'] || false);\n }\n return inputs;\n });\n }\n}\n/** @nocollapse */\nAlphaDropout.className = 'AlphaDropout';\nserialization.registerClass(AlphaDropout);\n//# sourceMappingURL=noise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Normalization layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { moments, serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Applies batch normalization on x given mean, var, beta and gamma.\n *\n * I.e. returns:\n * `output = (x - mean) / (sqrt(var) + epsilon) * gamma + beta`\n *\n * @param x Input tensor.\n * @param mean Mean of batch.\n * @param variance Variance of batch.\n * @param beta Tensor with which to center the input.\n * @param gamma Tensor by which to scale the input.\n * @param epsilon Fuzz factor.\n * @returns The result of the batch normalization.\n */\nexport function batchNormalization(x, mean, variance, beta, gamma, epsilon = 1e-3) {\n let out;\n if (x.rank === 2) {\n out = tfc.batchNorm2d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 3) {\n // TODO(cais): Check rank; give proper error message.\n out = tfc.batchNorm3d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 4) {\n out = tfc.batchNorm4d(x, mean, variance, beta, gamma, epsilon);\n }\n else {\n throw new NotImplementedError(`batchNormalization is not implemented for array of rank ${x.rank} ` +\n `yet`);\n }\n return out;\n}\n/**\n * Non-broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const normed = batchNormalization(x, mean, variance, beta, gamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const targetShape = [];\n for (const axis of math_utils.range(0, x.rank)) {\n if (reductionAxes.indexOf(axis) !== -1) {\n targetShape.push(1);\n }\n else {\n targetShape.push(x.shape[axis]);\n }\n }\n const broadcastMean = mean.reshape(targetShape);\n const broadcastVariance = variance.reshape(targetShape);\n const broadcastGamma = gamma == null ? null : gamma.reshape(targetShape);\n const broadcastBeta = beta == null ? null : beta.reshape(targetShape);\n const normed = batchNormalization(x, broadcastMean, broadcastVariance, broadcastBeta, broadcastGamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Batch normalization for use in training (not inference).\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nexport function normalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n if (util.arraysEqual(reductionAxes.slice().sort(), math_utils.range(0, x.rank - 1))) {\n return regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n else {\n return broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n}\nexport class BatchNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.supportsMasking = true;\n this.axis = args.axis == null ? -1 : args.axis;\n this.momentum = args.momentum == null ? 0.99 : args.momentum;\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.movingMeanInitializer =\n getInitializer(args.movingMeanInitializer || 'zeros');\n this.movingVarianceInitializer =\n getInitializer(args.movingVarianceInitializer || 'ones');\n this.betaConstraint = getConstraint(args.betaConstraint);\n this.gammaConstraint = getConstraint(args.gammaConstraint);\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const axis = this.axis >= 0 ? this.axis : (this.axis + inputShape.length);\n const dim = inputShape[axis];\n if (dim == null) {\n throw new ValueError(`Axis ${axis} of input tensor should have a defined dimension but ` +\n `the layer received an input with shape ` +\n `${JSON.stringify(inputShape)}.`);\n }\n this.inputSpec =\n [new InputSpec({ ndim: inputShape.length, axes: { [axis]: dim } })];\n const shape = [dim];\n if (this.scale) {\n this.gamma = this.addWeight('gamma', shape, null, this.gammaInitializer, this.gammaRegularizer, true, this.gammaConstraint);\n }\n if (this.center) {\n this.beta = this.addWeight('beta', shape, null, this.betaInitializer, this.betaRegularizer, true, this.betaConstraint);\n }\n this.movingMean = this.addWeight('moving_mean', shape, null, this.movingMeanInitializer, null, false);\n this.movingVariance = this.addWeight('moving_variance', shape, null, this.movingVarianceInitializer, null, false);\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const ndim = inputShape.length;\n const reductionAxes = math_utils.range(0, ndim);\n const axis = this.axis >= 0 ? this.axis : (this.axis + ndim);\n reductionAxes.splice(axis, 1);\n const broadcastShape = generic_utils.pyListRepeat(1, ndim);\n broadcastShape[axis] = inputShape[axis];\n const sortedReductionAxes = reductionAxes.slice();\n sortedReductionAxes.sort();\n const needsBroadcasting = !util.arraysEqual(sortedReductionAxes, math_utils.range(0, ndim).slice(0, ndim - 1));\n const normalizeInference = () => {\n if (needsBroadcasting) {\n const broadcastMovingMean = this.movingMean.read().reshape(broadcastShape);\n const broadcastMovingVariance = this.movingVariance.read().reshape(broadcastShape);\n const broadcastBeta = this.center ? this.beta.read().reshape(broadcastShape) : null;\n const broadcastGamma = this.scale ? this.gamma.read().reshape(broadcastShape) : null;\n return batchNormalization(input, broadcastMovingMean, broadcastMovingVariance, broadcastBeta, broadcastGamma, this.epsilon);\n }\n else {\n return batchNormalization(input, this.movingMean.read(), this.movingVariance.read(), this.beta == null ? null : this.beta.read(), this.gamma == null ? null : this.gamma.read(), this.epsilon);\n }\n };\n if (!training) {\n return normalizeInference();\n }\n const [normedTraining, mean, variance] = normalizeBatchInTraining(input, this.gamma.read(), this.beta.read(), reductionAxes, this.epsilon);\n const doMovingAverage = (variable, value, momentum) => {\n tfc.tidy(() => {\n const decay = 1 - momentum;\n const origValue = variable.read();\n const updateDelta = origValue.sub(value).mul(decay);\n variable.write(origValue.sub(updateDelta));\n });\n };\n // Perform updates to moving mean and moving variance for training.\n // Porting Note: In PyKeras, these updates to `movingMean` and\n // `movingAverage` are done as a deferred Graph, added to the `Layer`'s\n // `update`s using the `add_update()` method. Here we do it imperatively\n // and encapsulate the updates in a function that is invoked\n // immediately.\n const updateMovingMeanAndVariance = () => {\n doMovingAverage(this.movingMean, mean, this.momentum);\n doMovingAverage(this.movingVariance, variance, this.momentum);\n };\n updateMovingMeanAndVariance();\n return normedTraining;\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n momentum: this.momentum,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n movingMeanInitializer: serializeInitializer(this.movingMeanInitializer),\n movingVarianceInitializer: serializeInitializer(this.movingVarianceInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer),\n betaConstraint: serializeConstraint(this.betaConstraint),\n gammaConstraint: serializeConstraint(this.gammaConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nBatchNormalization.className = 'BatchNormalization';\nserialization.registerClass(BatchNormalization);\nexport class LayerNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.axis = args.axis == null ? -1 : args.axis;\n if (typeof this.axis === 'number') {\n if (!Number.isInteger(this.axis)) {\n throw new Error(`Expected axis to be an integer, but received ${this.axis}`);\n }\n }\n else if (Array.isArray(this.axis)) {\n for (const axis of this.axis) {\n if (!Number.isInteger(axis)) {\n throw new Error(`Expected axis to be an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n }\n }\n else {\n throw new Error(`Expected axis to be an integer or an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const nDims = inputShape.length;\n // Convert axis to array and resolve negatives.\n if (typeof this.axis === 'number') {\n this.axis = [this.axis];\n }\n for (let i = 0; i < this.axis.length; ++i) {\n if (this.axis[i] < 0) {\n this.axis[i] += nDims;\n }\n }\n // Further validate axes.\n for (const axis of this.axis) {\n if (axis < 0 || axis >= nDims) {\n throw new Error(`Invalid axis: ${axis}`);\n }\n }\n if (this.axis.length !== generic_utils.unique(this.axis).length) {\n throw new Error(`Found duplicate axes in: ${this.axis}`);\n }\n const paramShape = this.axis.map(axis => inputShape[axis]);\n const trainable = true;\n if (this.scale) {\n this.gamma = this.addWeight('gamma', paramShape, 'float32', this.gammaInitializer, this.gammaRegularizer, trainable);\n }\n else {\n this.gamma = null;\n }\n if (this.center) {\n this.beta = this.addWeight('beta', paramShape, 'float32', this.betaInitializer, this.betaRegularizer, trainable);\n }\n else {\n this.beta = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const nDims = inputShape.length;\n return tidy(() => {\n const keepDims = true;\n let { mean, variance } = moments(input, this.axis, keepDims);\n const broadcastShape = generic_utils.pyListRepeat(1, nDims);\n for (const dim of this.axis) {\n broadcastShape[dim] = inputShape[dim];\n }\n const broadcast = (v) => {\n if (v != null && v.shape.length !== nDims &&\n this.axis !== [nDims - 1]) {\n return v.reshape(broadcastShape);\n }\n else {\n return v;\n }\n };\n let scale = broadcast(this.gamma.read());\n let offset = broadcast(this.beta.read());\n // TODO(https://github.com/tensorflow/tfjs/issues/2120): The tiling below\n // is a workaround for the limitation of core's batchNormalization?d don't\n // support broadcasting in their gradients. In addition, the tiling is\n // necessary to ensure correctness on the browser CPU backend regardless\n // of forward or backward computation. Remove this workaround once the\n // limitation is addressed. See .\n const momentsTiling = [];\n const scaleOffsetTiling = [];\n for (let i = 0; i < nDims; ++i) {\n if (this.axis.indexOf(i) !== -1) {\n momentsTiling.push(inputShape[i]);\n scaleOffsetTiling.push(1);\n }\n else {\n momentsTiling.push(1);\n scaleOffsetTiling.push(inputShape[i]);\n }\n }\n mean = mean.tile(momentsTiling);\n variance = variance.tile(momentsTiling);\n scale = scale.tile(scaleOffsetTiling);\n offset = offset.tile(scaleOffsetTiling);\n return batchNormalization(input, mean, variance, offset, scale, this.epsilon);\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLayerNormalization.className = 'LayerNormalization';\nserialization.registerClass(LayerNormalization);\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Padding Layers.\n */\n// Porting Note: In Python Keras, the padding layers are in convolutional.py,\n// but we decided to put them in a separate file (padding.ts) for clarity.\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Pads the middle dimension of a 3D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of 2 integers, how many zeros to add at the start and\n * end of the middle dimension (i.e., dimension 1).\n * @return A padded 3D `tf.Tensor`.\n */\nexport function temporalPadding(x, padding) {\n return tidy(() => {\n if (x.rank !== 3) {\n throw new ValueError(`temporalPadding expects input tensor to be 3-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [1, 1];\n }\n if (padding.length !== 2) {\n throw new ValueError(`temporalPadding expects input padding pattern to be a length-2 ` +\n `array, but received a length-${padding.length} array.`);\n }\n const pattern = [[0, 0], padding, [0, 0]];\n return tfc.pad(x, pattern);\n });\n}\n/**\n * Pads the 2nd and 3rd dimensions of a 4D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of two `Array`s, each of which is an `Array` of two\n * integers. The amount of padding at the beginning and end of the 2nd and 3rd\n * dimensions, respectively.\n * @param dataFormat 'channelsLast' (default) or 'channelsFirst'.\n * @return Padded 4D `tf.Tensor`.\n */\nexport function spatial2dPadding(x, padding, dataFormat) {\n return tidy(() => {\n if (x.rank !== 4) {\n throw new ValueError(`temporalPadding expects input tensor to be 4-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [[1, 1], [1, 1]];\n }\n if (padding.length !== 2 || padding[0].length !== 2 ||\n padding[1].length !== 2) {\n throw new ValueError('spatial2dPadding expects `padding` to be an Array of two Arrays, ' +\n 'each of which is an Array of two integers.');\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (dataFormat !== 'channelsLast' && dataFormat !== 'channelsFirst') {\n throw new ValueError(`Unknown data format: ${dataFormat}. ` +\n `Supported data formats are 'channelsLast' and 'channelsFirst.`);\n }\n let pattern;\n if (dataFormat === 'channelsFirst') {\n pattern = [[0, 0], [0, 0], padding[0], padding[1]];\n }\n else {\n pattern = [[0, 0], padding[0], padding[1], [0, 0]];\n }\n return tfc.pad(x, pattern);\n });\n}\nexport class ZeroPadding2D extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.dataFormat =\n args.dataFormat == null ? imageDataFormat() : args.dataFormat;\n // TODO(cais): Maybe refactor the following logic surrounding `padding`\n // into a helper method.\n if (args.padding == null) {\n this.padding = [[1, 1], [1, 1]];\n }\n else if (typeof args.padding === 'number') {\n this.padding =\n [[args.padding, args.padding], [args.padding, args.padding]];\n }\n else {\n args.padding = args.padding;\n if (args.padding.length !== 2) {\n throw new ValueError(`ZeroPadding2D expects padding to be a length-2 array, but ` +\n `received a length-${args.padding.length} array.`);\n }\n let heightPadding;\n let widthPadding;\n if (typeof args.padding[0] === 'number') {\n heightPadding = [args.padding[0], args.padding[0]];\n widthPadding = [args.padding[1], args.padding[1]];\n }\n else {\n args.padding = args.padding;\n if (args.padding[0].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects height padding to be a length-2 array, ` +\n `but received a length-${args.padding[0].length} array.`);\n }\n heightPadding = args.padding[0];\n if (args.padding[1].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects width padding to be a length-2 array, ` +\n `but received a length-${args.padding[1].length} array.`);\n }\n widthPadding = args.padding[1];\n }\n this.padding = [heightPadding, widthPadding];\n }\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows;\n let cols;\n if (this.dataFormat === 'channelsFirst') {\n if (inputShape[2] != null && inputShape[2] >= 0) {\n rows = inputShape[2] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[3] != null && inputShape[3] >= 0) {\n cols = inputShape[3] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n if (inputShape[1] != null && inputShape[1] >= 0) {\n rows = inputShape[1] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[2] != null && inputShape[2] >= 0) {\n cols = inputShape[2] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => spatial2dPadding(getExactlyOneTensor(inputs), this.padding, this.dataFormat));\n }\n getConfig() {\n const config = {\n padding: this.padding,\n dataFormat: this.dataFormat,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nZeroPadding2D.className = 'ZeroPadding2D';\nserialization.registerClass(ZeroPadding2D);\n//# sourceMappingURL=padding.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Pooling Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode, checkPoolMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { preprocessConv2DInput, preprocessConv3DInput } from './convolutional';\n/**\n * 2D pooling.\n * @param x\n * @param poolSize\n * @param stridesdes strides. Defaults to [1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 2D pooling.\n */\nexport function pool2d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // TODO(cais): Remove the preprocessing step once deeplearn.js supports\n // dataFormat as an input argument.\n x = preprocessConv2DInput(x, dataFormat); // x is NHWC after preprocessing.\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n // TODO(cais): Rank check?\n y = tfc.maxPool(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n // TODO(cais): Check the dtype and rank of x and give clear error message\n // if those are incorrect.\n y = tfc.avgPool(\n // TODO(cais): Rank check?\n x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return y;\n });\n}\n/**\n * 3D pooling.\n * @param x\n * @param poolSize. Default to [1, 1, 1].\n * @param strides strides. Defaults to [1, 1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 3D pooling.\n */\nexport function pool3d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // x is NDHWC after preprocessing.\n x = preprocessConv3DInput(x, dataFormat);\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n y = tfc.maxPool3d(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n y = tfc.avgPool3d(x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]); // NDHWC -> NCDHW.\n }\n return y;\n });\n}\n/**\n * Abstract class for different pooling 1D layers.\n */\nexport class Pooling1D extends Layer {\n /**\n *\n * @param args Parameters for the Pooling layer.\n *\n * config.poolSize defaults to 2.\n */\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = 2;\n }\n super(args);\n if (typeof args.poolSize === 'number') {\n this.poolSize = [args.poolSize];\n }\n else if (Array.isArray(args.poolSize) &&\n args.poolSize.length === 1 &&\n typeof args.poolSize[0] === 'number') {\n this.poolSize = args.poolSize;\n }\n else {\n throw new ValueError(`poolSize for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.poolSize)}`);\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else {\n if (typeof args.strides === 'number') {\n this.strides = [args.strides];\n }\n else if (Array.isArray(args.strides) &&\n args.strides.length === 1 &&\n typeof args.strides[0] === 'number') {\n this.strides = args.strides;\n }\n else {\n throw new ValueError(`strides for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.strides)}`);\n }\n }\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const length = convOutputLength(inputShape[1], this.poolSize[0], this.padding, this.strides[0]);\n return [inputShape[0], length, inputShape[2]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Add dummy last dimension.\n inputs = K.expandDims(getExactlyOneTensor(inputs), 2);\n const output = this.poolingFunction(getExactlyOneTensor(inputs), [this.poolSize[0], 1], [this.strides[0], 1], this.padding, 'channelsLast');\n // Remove dummy last dimension.\n return tfc.squeeze(output, [2]);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling1D.className = 'MaxPooling1D';\nserialization.registerClass(MaxPooling1D);\nexport class AveragePooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling1D.className = 'AveragePooling1D';\nserialization.registerClass(AveragePooling1D);\n/**\n * Abstract class for different pooling 2D layers.\n */\nexport class Pooling2D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 2) {\n throw new ValueError(`If the strides property of a 2D pooling layer is an Array, ` +\n `it is expected to have a length of 2, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n rows =\n convOutputLength(rows, this.poolSize[0], this.padding, this.strides[0]);\n cols =\n convOutputLength(cols, this.poolSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling2D.className = 'MaxPooling2D';\nserialization.registerClass(MaxPooling2D);\nexport class AveragePooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling2D.className = 'AveragePooling2D';\nserialization.registerClass(AveragePooling2D);\n/**\n * Abstract class for different pooling 3D layers.\n */\nexport class Pooling3D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 3) {\n throw new ValueError(`If the strides property of a 3D pooling layer is an Array, ` +\n `it is expected to have a length of 3, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let depths = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[4] : inputShape[3];\n depths = convOutputLength(depths, this.poolSize[0], this.padding, this.strides[0]);\n rows =\n convOutputLength(rows, this.poolSize[1], this.padding, this.strides[1]);\n cols =\n convOutputLength(cols, this.poolSize[2], this.padding, this.strides[2]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], depths, rows, cols];\n }\n else {\n return [inputShape[0], depths, rows, cols, inputShape[4]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling3D.className = 'MaxPooling3D';\nserialization.registerClass(MaxPooling3D);\nexport class AveragePooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling3D.className = 'AveragePooling3D';\nserialization.registerClass(AveragePooling3D);\n/**\n * Abstract class for different global pooling 1D layers.\n */\nexport class GlobalPooling1D extends Layer {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], inputShape[2]];\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n}\nexport class GlobalAveragePooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.mean(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling1D.className = 'GlobalAveragePooling1D';\nserialization.registerClass(GlobalAveragePooling1D);\nexport class GlobalMaxPooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.max(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling1D.className = 'GlobalMaxPooling1D';\nserialization.registerClass(GlobalMaxPooling1D);\n/**\n * Abstract class for different global pooling 2D layers.\n */\nexport class GlobalPooling2D extends Layer {\n constructor(args) {\n super(args);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n if (this.dataFormat === 'channelsLast') {\n return [inputShape[0], inputShape[3]];\n }\n else {\n return [inputShape[0], inputShape[1]];\n }\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n getConfig() {\n const config = { dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class GlobalAveragePooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.mean(input, [1, 2]);\n }\n else {\n return tfc.mean(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling2D.className = 'GlobalAveragePooling2D';\nserialization.registerClass(GlobalAveragePooling2D);\nexport class GlobalMaxPooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.max(input, [1, 2]);\n }\n else {\n return tfc.max(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling2D.className = 'GlobalMaxPooling2D';\nserialization.registerClass(GlobalMaxPooling2D);\n//# sourceMappingURL=pooling.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Layers that augment the functionality of a base layer.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { InputSpec, Layer, SymbolicTensor } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { VALID_BIDIRECTIONAL_MERGE_MODES } from '../keras_format/common';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { rnn, standardizeArgs } from './recurrent';\nimport { deserialize } from './serialization';\n/**\n * Abstract wrapper base class.\n *\n * Wrappers take another layer and augment it in various ways.\n * Do not use this class as a layer, it is only an abstract base class.\n * Two usable wrappers are the `TimeDistributed` and `Bidirectional` wrappers.\n */\nexport class Wrapper extends Layer {\n constructor(args) {\n // Porting Note: In PyKeras, `self.layer` is set prior to the calling\n // `super()`. But we can't do that here due to TypeScript's restriction.\n // See: https://github.com/Microsoft/TypeScript/issues/8277\n // As a result, we have to add checks in `get trainable()` and\n // `set trainable()` below in order to prevent using `this.layer` when\n // its value is `undefined`. The super constructor does use the getter\n // and the setter of `this.layer`.\n super(args);\n this.layer = args.layer;\n }\n build(inputShape) {\n this.built = true;\n }\n // TODO(cais): Implement activityRegularizer getter.\n get trainable() {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n return this.layer.trainable;\n }\n else {\n return false;\n }\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n this.layer.trainable = value;\n }\n }\n get trainableWeights() {\n return this.layer.trainableWeights;\n }\n // TODO(cais): Implement setter for trainableWeights.\n get nonTrainableWeights() {\n return this.layer.nonTrainableWeights;\n }\n // TODO(cais): Implement setter for nonTrainableWeights.\n get updates() {\n // tslint:disable-next-line:no-any\n return this.layer._updates;\n }\n // TODO(cais): Implement getUpdatesFor().\n get losses() {\n return this.layer.losses;\n }\n // TODO(cais): Implement getLossesFor().\n getWeights() {\n return this.layer.getWeights();\n }\n setWeights(weights) {\n this.layer.setWeights(weights);\n }\n getConfig() {\n const config = {\n 'layer': {\n 'className': this.layer.getClassName(),\n 'config': this.layer.getConfig(),\n }\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.layer != null) {\n this.layer.setFastWeightInitDuringBuild(value);\n }\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const layerConfig = config['layer'];\n const layer = deserialize(layerConfig, customObjects);\n delete config['layer'];\n const newConfig = { layer };\n Object.assign(newConfig, config);\n return new cls(newConfig);\n }\n}\nexport class TimeDistributed extends Wrapper {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 3) {\n throw new ValueError(`TimeDistributed layer expects an input shape >= 3D, but received ` +\n `input shape ${JSON.stringify(inputShape)}`);\n }\n this.inputSpec = [{ shape: inputShape }];\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (!this.layer.built) {\n this.layer.build(childInputShape);\n this.layer.built = true;\n }\n super.build(inputShape);\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n const childOutputShape = this.layer.computeOutputShape(childInputShape);\n const timesteps = inputShape[1];\n return [childOutputShape[0], timesteps].concat(childOutputShape.slice(1));\n }\n call(inputs, kwargs) {\n return tidy(() => {\n // TODO(cais): Add 'training' and 'useLearningPhase' to kwargs.\n inputs = getExactlyOneTensor(inputs);\n // Porting Note: In tfjs-layers, `inputs` are always concrete tensor\n // values. Hence the inputs can't have an undetermined first (batch)\n // dimension, which is why we always use the K.rnn approach here.\n const step = (inputs, states) => {\n // TODO(cais): Add useLearningPhase.\n // NOTE(cais): `layer.call` may return a length-1 array of Tensor in\n // some cases (e.g., `layer` is a `Sequential` instance), which is\n // why `getExactlyOneTensor` is used below.\n const output = getExactlyOneTensor(this.layer.call(inputs, kwargs));\n return [output, []];\n };\n const rnnOutputs = rnn(step, inputs, [], false /* goBackwards */, null /* mask */, null /* constants */, false /* unroll */, true /* needPerStepOutputs */);\n const y = rnnOutputs[1];\n // TODO(cais): Add activity regularization.\n // TODO(cais): Add useLearningPhase.\n return y;\n });\n }\n}\n/** @nocollapse */\nTimeDistributed.className = 'TimeDistributed';\nserialization.registerClass(TimeDistributed);\nexport function checkBidirectionalMergeMode(value) {\n generic_utils.checkStringTypeUnionValue(VALID_BIDIRECTIONAL_MERGE_MODES, 'BidirectionalMergeMode', value);\n}\nconst DEFAULT_BIDIRECTIONAL_MERGE_MODE = 'concat';\nexport class Bidirectional extends Wrapper {\n constructor(args) {\n super(args);\n // Note: When creating `this.forwardLayer`, the original Layer object\n // (`config.layer`) ought to be cloned. This is why we call\n // `getConfig()` followed by `deserialize()`. Without this cloning,\n // the layer names saved during serialization will incorrectly contain\n // the 'forward_' prefix. In Python Keras, this is done using\n // `copy.copy` (shallow copy), which does not have a simple equivalent\n // in JavaScript. JavaScript's `Object.assign()` does not copy\n // methods.\n const layerConfig = args.layer.getConfig();\n const forwDict = {};\n forwDict['className'] = args.layer.getClassName();\n forwDict['config'] = layerConfig;\n this.forwardLayer = deserialize(forwDict);\n layerConfig['goBackwards'] =\n layerConfig['goBackwards'] === true ? false : true;\n const backDict = {};\n backDict['className'] = args.layer.getClassName();\n backDict['config'] = layerConfig;\n this.backwardLayer = deserialize(backDict);\n this.forwardLayer.name = 'forward_' + this.forwardLayer.name;\n this.backwardLayer.name = 'backward_' + this.backwardLayer.name;\n this.mergeMode = args.mergeMode === undefined ?\n DEFAULT_BIDIRECTIONAL_MERGE_MODE :\n args.mergeMode;\n checkBidirectionalMergeMode(this.mergeMode);\n if (args.weights) {\n throw new NotImplementedError('weights support is not implemented for Bidirectional layer yet.');\n }\n this._stateful = args.layer.stateful;\n this.returnSequences = args.layer.returnSequences;\n this.returnState = args.layer.returnState;\n this.supportsMasking = true;\n this._trainable = true;\n this.inputSpec = args.layer.inputSpec;\n this.numConstants = null;\n }\n get trainable() {\n return this._trainable;\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n this._trainable = value;\n if (this.forwardLayer != null) {\n this.forwardLayer.trainable = value;\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.trainable = value;\n }\n }\n getWeights() {\n return this.forwardLayer.getWeights().concat(this.backwardLayer.getWeights());\n }\n setWeights(weights) {\n const numWeights = weights.length;\n const numeightsOver2 = Math.floor(numWeights / 2);\n this.forwardLayer.setWeights(weights.slice(0, numeightsOver2));\n this.backwardLayer.setWeights(weights.slice(numeightsOver2));\n }\n computeOutputShape(inputShape) {\n let layerShapes = this.forwardLayer.computeOutputShape(inputShape);\n if (!(Array.isArray(layerShapes) && Array.isArray(layerShapes[0]))) {\n layerShapes = [layerShapes];\n }\n layerShapes = layerShapes;\n let outputShape;\n let outputShapes;\n let stateShape;\n if (this.returnState) {\n stateShape = layerShapes.slice(1);\n outputShape = layerShapes[0];\n }\n else {\n outputShape = layerShapes[0];\n }\n outputShape = outputShape;\n if (this.mergeMode === 'concat') {\n outputShape[outputShape.length - 1] *= 2;\n outputShapes = [outputShape];\n }\n else if (this.mergeMode == null) {\n outputShapes = [outputShape, outputShape.slice()];\n }\n else {\n outputShapes = [outputShape];\n }\n if (this.returnState) {\n if (this.mergeMode == null) {\n return outputShapes.concat(stateShape).concat(stateShape.slice());\n }\n return [outputShape].concat(stateShape).concat(stateShape.slice());\n }\n return generic_utils.singletonOrArray(outputShapes);\n }\n apply(inputs, kwargs) {\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n if (Array.isArray(inputs)) {\n initialState = inputs.slice(1);\n inputs = inputs[0];\n }\n if ((initialState == null || initialState.length === 0) &&\n constants == null) {\n return super.apply(inputs, kwargs);\n }\n const additionalInputs = [];\n const additionalSpecs = [];\n if (initialState != null) {\n const numStates = initialState.length;\n if (numStates % 2 > 0) {\n throw new ValueError('When passing `initialState` to a Bidrectional RNN, ' +\n 'the state should be an Array containing the states of ' +\n 'the underlying RNNs.');\n }\n kwargs['initialState'] = initialState;\n additionalInputs.push(...initialState);\n const stateSpecs = initialState\n .map(state => new InputSpec({ shape: state.shape }));\n this.forwardLayer.stateSpec = stateSpecs.slice(0, numStates / 2);\n this.backwardLayer.stateSpec = stateSpecs.slice(numStates / 2);\n additionalSpecs.push(...stateSpecs);\n }\n if (constants != null) {\n throw new NotImplementedError('Support for constants in Bidirectional layers is not ' +\n 'implemented yet.');\n }\n const isSymbolicTensor = additionalInputs[0] instanceof SymbolicTensor;\n for (const tensor of additionalInputs) {\n if (tensor instanceof SymbolicTensor !== isSymbolicTensor) {\n throw new ValueError('The initial state of a Bidirectional layer cannot be ' +\n 'specified as a mix of symbolic and non-symbolic tensors');\n }\n }\n if (isSymbolicTensor) {\n // Compute the full input and specs, including the states.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call temporarily and replace inputSpec.\n // Note: with initial states symbolic calls and non-symbolic calls to\n // this method differ in how the initial states are passed. For\n // symbolic calls, the initial states are passed in the first arg, as\n // an Array of SymbolicTensors; for non-symbolic calls, they are\n // passed in the second arg as a part of the kwargs. Hence the need to\n // temporarily modify inputSpec here.\n // TODO(cais): Make refactoring so that this hacky code below is no\n // longer needed.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const initialState = kwargs['initialState'];\n let y;\n let yRev;\n if (initialState == null) {\n y = this.forwardLayer.call(inputs, kwargs);\n yRev = this.backwardLayer.call(inputs, kwargs);\n }\n else {\n const forwardState = initialState.slice(0, initialState.length / 2);\n const backwardState = initialState.slice(initialState.length / 2);\n y = this.forwardLayer.call(inputs, Object.assign(kwargs, { initialState: forwardState }));\n yRev = this.backwardLayer.call(inputs, Object.assign(kwargs, { initialState: backwardState }));\n }\n let states;\n if (this.returnState) {\n if (Array.isArray(y)) {\n states = y.slice(1).concat(yRev.slice(1));\n }\n else {\n }\n y = y[0];\n yRev = yRev[0];\n }\n if (this.returnSequences) {\n yRev = tfc.reverse(yRev, 1);\n }\n let output;\n if (this.mergeMode === 'concat') {\n output = K.concatenate([y, yRev]);\n }\n else if (this.mergeMode === 'sum') {\n output = tfc.add(y, yRev);\n }\n else if (this.mergeMode === 'ave') {\n output = tfc.mul(.5, tfc.add(y, yRev));\n }\n else if (this.mergeMode === 'mul') {\n output = tfc.mul(y, yRev);\n }\n else if (this.mergeMode == null) {\n output = [y, yRev];\n }\n // TODO(cais): Properly set learning phase.\n if (this.returnState) {\n if (this.mergeMode == null) {\n return output.concat(states);\n }\n return [output].concat(states);\n }\n return output;\n });\n }\n resetStates(states) {\n this.forwardLayer.resetStates();\n this.backwardLayer.resetStates();\n }\n build(inputShape) {\n nameScope(this.forwardLayer.name, () => {\n this.forwardLayer.build(inputShape);\n });\n nameScope(this.backwardLayer.name, () => {\n this.backwardLayer.build(inputShape);\n });\n this.built = true;\n }\n computeMask(inputs, mask) {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n let outputMask;\n if (this.returnSequences) {\n if (this.mergeMode == null) {\n outputMask = [mask, mask];\n }\n else {\n outputMask = mask;\n }\n }\n else {\n if (this.mergeMode == null) {\n outputMask = [null, null];\n }\n else {\n outputMask = null;\n }\n }\n if (this.returnState) {\n const states = this.forwardLayer.states;\n const stateMask = states.map(state => null);\n if (Array.isArray(outputMask)) {\n return outputMask.concat(stateMask).concat(stateMask);\n }\n else {\n return [outputMask].concat(stateMask).concat(stateMask);\n }\n }\n else {\n return outputMask;\n }\n }\n get trainableWeights() {\n return this.forwardLayer.trainableWeights.concat(this.backwardLayer.trainableWeights);\n }\n get nonTrainableWeights() {\n return this.forwardLayer.nonTrainableWeights.concat(this.backwardLayer.nonTrainableWeights);\n }\n // TODO(cais): Implement constraints().\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.forwardLayer != null) {\n this.forwardLayer.setFastWeightInitDuringBuild(value);\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const config = {\n 'mergeMode': this.mergeMode,\n };\n // TODO(cais): Add logic for `numConstants` once the property is added.\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n const rnnLayer = deserialize(config['layer']);\n delete config['layer'];\n // TODO(cais): Add logic for `numConstants` once the property is added.\n if (config['numConstants'] != null) {\n throw new NotImplementedError(`Deserialization of a Bidirectional layer with numConstants ` +\n `present is not supported yet.`);\n }\n // tslint:disable-next-line:no-any\n const newConfig = config;\n newConfig['layer'] = rnnLayer;\n return new cls(newConfig);\n }\n}\n/** @nocollapse */\nBidirectional.className = 'Bidirectional';\nserialization.registerClass(Bidirectional);\n//# sourceMappingURL=wrappers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { InputLayer } from './engine/input_layer';\nimport { Layer } from './engine/topology';\nimport { input } from './exports';\nimport { ELU, LeakyReLU, PReLU, ReLU, Softmax, ThresholdedReLU } from './layers/advanced_activations';\nimport { Conv1D, Conv2D, Conv2DTranspose, Conv3D, Cropping2D, SeparableConv2D, UpSampling2D } from './layers/convolutional';\nimport { DepthwiseConv2D } from './layers/convolutional_depthwise';\nimport { ConvLSTM2D, ConvLSTM2DCell } from './layers/convolutional_recurrent';\nimport { Activation, Dense, Dropout, Flatten, Masking, Permute, RepeatVector, Reshape, SpatialDropout1D } from './layers/core';\nimport { Embedding } from './layers/embeddings';\nimport { Add, Average, Concatenate, Dot, Maximum, Minimum, Multiply } from './layers/merge';\nimport { AlphaDropout, GaussianDropout, GaussianNoise } from './layers/noise';\nimport { BatchNormalization, LayerNormalization } from './layers/normalization';\nimport { ZeroPadding2D } from './layers/padding';\nimport { AveragePooling1D, AveragePooling2D, AveragePooling3D, GlobalAveragePooling1D, GlobalAveragePooling2D, GlobalMaxPooling1D, GlobalMaxPooling2D, MaxPooling1D, MaxPooling2D, MaxPooling3D } from './layers/pooling';\nimport { GRU, GRUCell, LSTM, LSTMCell, RNN, RNNCell, SimpleRNN, SimpleRNNCell, StackedRNNCells } from './layers/recurrent';\nimport { Bidirectional, TimeDistributed } from './layers/wrappers';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// Input Layer.\n/**\n * An input layer is an entry point into a `tf.LayersModel`.\n *\n * `InputLayer` is generated automatically for `tf.Sequential`` models by\n * specifying the `inputshape` or `batchInputShape` for the first layer. It\n * should not be specified explicitly. However, it can be useful sometimes,\n * e.g., when constructing a sequential model from a subset of another\n * sequential model's layers. Like the code snippet below shows.\n *\n * ```js\n * // Define a model which simply adds two inputs.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.dense({inputShape: [4], units: 3, activation: 'relu'}));\n * model1.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n * model1.summary();\n * model1.predict(tf.zeros([1, 4])).print();\n *\n * // Construct another model, reusing the second layer of `model1` while\n * // not using the first layer of `model1`. Note that you cannot add the second\n * // layer of `model` directly as the first layer of the new sequential model,\n * // because doing so will lead to an error related to the fact that the layer\n * // is not an input layer. Instead, you need to create an `inputLayer` and add\n * // it to the new sequential model before adding the reused layer.\n * const model2 = tf.sequential();\n * // Use an inputShape that matches the input shape of `model1`'s second\n * // layer.\n * model2.add(tf.layers.inputLayer({inputShape: [3]}));\n * model2.add(model1.layers[1]);\n * model2.summary();\n * model2.predict(tf.zeros([1, 3])).print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Inputs', namespace: 'layers'}\n */\nexport function inputLayer(args) {\n return new InputLayer(args);\n}\n// Advanced Activation Layers.\n/**\n * Exponetial Linear Unit (ELU).\n *\n * It follows:\n * `f(x) = alpha * (exp(x) - 1.) for x < 0`,\n * `f(x) = x for x >= 0`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Fast and Accurate Deep Network Learning by Exponential Linear Units\n * (ELUs)](https://arxiv.org/abs/1511.07289v1)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function elu(args) {\n return new ELU(args);\n}\n/**\n * Rectified Linear Unit activation function.\n *\n * Input shape:\n * Arbitrary. Use the config field `inputShape` (Array of integers, does\n * not include the sample axis) when using this layer as the first layer\n * in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function reLU(args) {\n return new ReLU(args);\n}\n/**\n * Leaky version of a rectified linear unit.\n *\n * It allows a small gradient when the unit is not active:\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function leakyReLU(args) {\n return new LeakyReLU(args);\n}\n/**\n * Parameterized version of a leaky rectified linear unit.\n *\n * It follows\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n * wherein `alpha` is a trainable weight.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function prelu(args) {\n return new PReLU(args);\n}\n/**\n * Softmax activation layer.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function softmax(args) {\n return new Softmax(args);\n}\n/**\n * Thresholded Rectified Linear Unit.\n *\n * It follows:\n * `f(x) = x for x > theta`,\n * `f(x) = 0 otherwise`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Zero-Bias Autoencoders and the Benefits of Co-Adapting\n * Features](http://arxiv.org/abs/1402.3337)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function thresholdedReLU(args) {\n return new ThresholdedReLU(args);\n}\n// Convolutional Layers.\n/**\n * 1D convolution layer (e.g., temporal convolution).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input over a single spatial (or temporal) dimension\n * to produce a tensor of outputs.\n *\n * If `use_bias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model, provide an\n * `inputShape` argument `Array` or `null`.\n *\n * For example, `inputShape` would be:\n * - `[10, 128]` for sequences of 10 vectors of 128-dimensional vectors\n * - `[null, 128]` for variable-length sequences of 128-dimensional vectors.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv1d(args) {\n return new Conv1D(args);\n}\n/**\n * 2D convolution layer (e.g. spatial convolution over images).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 3]` for 128x128 RGB pictures\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2d(args) {\n return new Conv2D(args);\n}\n/**\n * Transposed convolutional layer (sometimes called Deconvolution).\n *\n * The need for transposed convolutions generally arises\n * from the desire to use a transformation going in the opposite direction of\n * a normal convolution, i.e., from something that has the shape of the output\n * of some convolution to something that has the shape of its input while\n * maintaining a connectivity pattern that is compatible with said\n * convolution.\n *\n * When using this layer as the first layer in a model, provide the\n * configuration `inputShape` (`Array` of integers, does not include the\n * sample axis), e.g., `inputShape: [128, 128, 3]` for 128x128 RGB pictures in\n * `dataFormat: 'channelsLast'`.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if `dataFormat` is `'channelsFirst'`.\n * or 4D tensor with shape\n * `[batch, rows, cols, channels]` if `dataFormat` is `'channelsLast`.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if `dataFormat` is\n * `'channelsFirst'`. or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if `dataFormat` is `'channelsLast'`.\n *\n * References:\n * - [A guide to convolution arithmetic for deep\n * learning](https://arxiv.org/abs/1603.07285v1)\n * - [Deconvolutional\n * Networks](http://www.matthewzeiler.com/pubs/cvpr2010/cvpr2010.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2dTranspose(args) {\n return new Conv2DTranspose(args);\n}\n/**\n * 3D convolution layer (e.g. spatial convolution over volumes).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 128, 1]` for 128x128x128 grayscale volumes\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv3d(args) {\n return new Conv3D(args);\n}\n/**\n * Depthwise separable 2D convolution.\n *\n * Separable convolution consists of first performing\n * a depthwise spatial convolution\n * (which acts on each input channel separately)\n * followed by a pointwise convolution which mixes together the resulting\n * output channels. The `depthMultiplier` argument controls how many\n * output channels are generated per input channel in the depthwise step.\n *\n * Intuitively, separable convolutions can be understood as\n * a way to factorize a convolution kernel into two smaller kernels,\n * or as an extreme version of an Inception block.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, rows, cols, channels]` if data_format='channelsLast'.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if data_format='channelsLast'.\n * `rows` and `cols` values might have changed due to padding.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function separableConv2d(args) {\n return new SeparableConv2D(args);\n}\n/**\n * Cropping layer for 2D input (e.g., image).\n *\n * This layer can crop an input\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, croppedRows, croppedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, croppedRows, croppedCols]`.\n *\n * Examples\n * ```js\n *\n * const model = tf.sequential();\n * model.add(tf.layers.cropping2D({cropping:[[2, 2], [2, 2]],\n * inputShape: [128, 128, 3]}));\n * //now output shape is [batch, 124, 124, 3]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function cropping2D(args) {\n return new Cropping2D(args);\n}\n/**\n * Upsampling layer for 2D inputs.\n *\n * Repeats the rows and columns of the data\n * by size[0] and size[1] respectively.\n *\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, rows, cols]`\n *\n * Output shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, upsampledRows, upsampledCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, upsampledRows, upsampledCols]`\n *\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function upSampling2d(args) {\n return new UpSampling2D(args);\n}\n// Convolutional(depthwise) Layers.\n/**\n * Depthwise separable 2D convolution.\n *\n * Depthwise Separable convolutions consists in performing just the first step\n * in a depthwise spatial convolution (which acts on each input channel\n * separately). The `depthMultplier` argument controls how many output channels\n * are generated per input channel in the depthwise step.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function depthwiseConv2d(args) {\n return new DepthwiseConv2D(args);\n}\n// Basic Layers.\n/**\n * Applies an activation function to an output.\n *\n * This layer applies element-wise activation function. Other layers, notably\n * `dense` can also apply activation functions. Use this isolated activation\n * function to extract the values before and after the\n * activation. For instance:\n *\n * ```js\n * const input = tf.input({shape: [5]});\n * const denseLayer = tf.layers.dense({units: 1});\n * const activationLayer = tf.layers.activation({activation: 'relu6'});\n *\n * // Obtain the output symbolic tensors by applying the layers in order.\n * const denseOutput = denseLayer.apply(input);\n * const activationOutput = activationLayer.apply(denseOutput);\n *\n * // Create the model based on the inputs.\n * const model = tf.model({\n * inputs: input,\n * outputs: [denseOutput, activationOutput]\n * });\n *\n * // Collect both outputs and print separately.\n * const [denseOut, activationOut] = model.predict(tf.randomNormal([6, 5]));\n * denseOut.print();\n * activationOut.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function activation(args) {\n return new Activation(args);\n}\n/**\n * Creates a dense (fully connected) layer.\n *\n * This layer implements the operation:\n * `output = activation(dot(input, kernel) + bias)`\n *\n * `activation` is the element-wise activation function\n * passed as the `activation` argument.\n *\n * `kernel` is a weights matrix created by the layer.\n *\n * `bias` is a bias vector created by the layer (only applicable if `useBias`\n * is `true`).\n *\n * **Input shape:**\n *\n * nD `tf.Tensor` with shape: `(batchSize, ..., inputDim)`.\n *\n * The most common situation would be\n * a 2D input with shape `(batchSize, inputDim)`.\n *\n * **Output shape:**\n *\n * nD tensor with shape: `(batchSize, ..., units)`.\n *\n * For instance, for a 2D input with shape `(batchSize, inputDim)`,\n * the output would have shape `(batchSize, units)`.\n *\n * Note: if the input to the layer has a rank greater than 2, then it is\n * flattened prior to the initial dot product with the kernel.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dense(args) {\n return new Dense(args);\n}\n/**\n * Applies\n * [dropout](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf) to\n * the input.\n *\n * Dropout consists in randomly setting a fraction `rate` of input units to 0 at\n * each update during training time, which helps prevent overfitting.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dropout(args) {\n return new Dropout(args);\n}\n/**\n * Spatial 1D version of Dropout.\n *\n * This Layer type performs the same function as the Dropout layer, but it drops\n * entire 1D feature maps instead of individual elements. For example, if an\n * input example consists of 3 timesteps and the feature map for each timestep\n * has a size of 4, a `spatialDropout1d` layer may zero out the feature maps\n * of the 1st timesteps and 2nd timesteps completely while sparing all feature\n * elements of the 3rd timestep.\n *\n * If adjacent frames (timesteps) are strongly correlated (as is normally the\n * case in early convolution layers), regular dropout will not regularize the\n * activation and will otherwise just result in merely an effective learning\n * rate decrease. In this case, `spatialDropout1d` will help promote\n * independence among feature maps and should be used instead.\n *\n * **Arguments:**\n * rate: A floating-point number >=0 and <=1. Fraction of the input elements\n * to drop.\n *\n * **Input shape:**\n * 3D tensor with shape `(samples, timesteps, channels)`.\n *\n * **Output shape:**\n * Same as the input shape.\n *\n * References:\n * - [Efficient Object Localization Using Convolutional\n * Networks](https://arxiv.org/abs/1411.4280)\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function spatialDropout1d(args) {\n return new SpatialDropout1D(args);\n}\n/**\n * Flattens the input. Does not affect the batch size.\n *\n * A `Flatten` layer flattens each batch in its inputs to 1D (making the output\n * 2D).\n *\n * For example:\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const flattenLayer = tf.layers.flatten();\n * // Inspect the inferred output shape of the flatten layer, which\n * // equals `[null, 12]`. The 2nd dimension is 4 * 3, i.e., the result of the\n * // flattening. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(flattenLayer.apply(input).shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function flatten(args) {\n return new Flatten(args);\n}\n/**\n * Repeats the input n times in a new dimension.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.repeatVector({n: 4, inputShape: [2]}));\n * const x = tf.tensor2d([[10, 20]]);\n * // Use the model to do inference on a data point the model hasn't see\n * model.predict(x).print();\n * // output shape is now [batch, 2, 4]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function repeatVector(args) {\n return new RepeatVector(args);\n}\n/**\n * Reshapes an input to a certain shape.\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const reshapeLayer = tf.layers.reshape({targetShape: [2, 6]});\n * // Inspect the inferred output shape of the Reshape layer, which\n * // equals `[null, 2, 6]`. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(reshapeLayer.apply(input).shape));\n * ```\n *\n * Input shape:\n * Arbitrary, although all dimensions in the input shape must be fixed.\n * Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n *\n * Output shape:\n * [batchSize, targetShape[0], targetShape[1], ...,\n * targetShape[targetShape.length - 1]].\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function reshape(args) {\n return new Reshape(args);\n}\n/**\n * Permutes the dimensions of the input according to a given pattern.\n *\n * Useful for, e.g., connecting RNNs and convnets together.\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.permute({\n * dims: [2, 1],\n * inputShape: [10, 64]\n * }));\n * console.log(model.outputShape);\n * // Now model's output shape is [null, 64, 10], where null is the\n * // unpermuted sample (batch) dimension.\n * ```\n *\n * Input shape:\n * Arbitrary. Use the configuration field `inputShape` when using this\n * layer as the first layer in a model.\n *\n * Output shape:\n * Same rank as the input shape, but with the dimensions re-ordered (i.e.,\n * permuted) according to the `dims` configuration of this layer.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function permute(args) {\n return new Permute(args);\n}\n/**\n * Maps positive integers (indices) into dense vectors of fixed size.\n * eg. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]\n *\n * **Input shape:** 2D tensor with shape: `[batchSize, sequenceLength]`.\n *\n * **Output shape:** 3D tensor with shape: `[batchSize, sequenceLength,\n * outputDim]`.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function embedding(args) {\n return new Embedding(args);\n}\n// Merge Layers.\n/**\n * Layer that performs element-wise addition on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). The inputs are specified as an\n * `Array` when the `apply` method of the `Add` layer instance is called. For\n * example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const addLayer = tf.layers.add();\n * const sum = addLayer.apply([input1, input2]);\n * console.log(JSON.stringify(sum.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function add(args) {\n return new Add(args);\n}\n/**\n * Layer that performs element-wise averaging on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const averageLayer = tf.layers.average();\n * const average = averageLayer.apply([input1, input2]);\n * console.log(JSON.stringify(average.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function average(args) {\n return new Average(args);\n}\n/**\n * Layer that concatenates an `Array` of inputs.\n *\n * It takes a list of tensors, all of the same shape except for the\n * concatenation axis, and returns a single tensor, the concatenation\n * of all inputs. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 3]});\n * const concatLayer = tf.layers.concatenate();\n * const output = concatLayer.apply([input1, input2]);\n * console.log(JSON.stringify(output.shape));\n * // You get [null, 2, 5], with the first dimension as the undetermined batch\n * // dimension. The last dimension (5) is the result of concatenating the\n * // last dimensions of the inputs (2 and 3).\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function concatenate(args) {\n return new Concatenate(args);\n}\n/**\n * Layer that computes the element-wise maximum an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const maxLayer = tf.layers.maximum();\n * const max = maxLayer.apply([input1, input2]);\n * console.log(JSON.stringify(max.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function maximum(args) {\n return new Maximum(args);\n}\n/**\n * Layer that computes the element-wise minimum of an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const minLayer = tf.layers.minimum();\n * const min = minLayer.apply([input1, input2]);\n * console.log(JSON.stringify(min.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function minimum(args) {\n return new Minimum(args);\n}\n/**\n * Layer that multiplies (element-wise) an `Array` of inputs.\n *\n * It takes as input an Array of tensors, all of the same\n * shape, and returns a single tensor (also of the same shape).\n * For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const input3 = tf.input({shape: [2, 2]});\n * const multiplyLayer = tf.layers.multiply();\n * const product = multiplyLayer.apply([input1, input2, input3]);\n * console.log(product.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function multiply(args) {\n return new Multiply(args);\n}\n/**\n * Layer that computes a dot product between samples in two tensors.\n *\n * E.g., if applied to a list of two tensors `a` and `b` both of shape\n * `[batchSize, n]`, the output will be a tensor of shape `[batchSize, 1]`,\n * where each entry at index `[i, 0]` will be the dot product between\n * `a[i, :]` and `b[i, :]`.\n *\n * Example:\n *\n * ```js\n * const dotLayer = tf.layers.dot({axes: -1});\n * const x1 = tf.tensor2d([[10, 20], [30, 40]]);\n * const x2 = tf.tensor2d([[-1, -2], [-3, -4]]);\n *\n * // Invoke the layer's apply() method in eager (imperative) mode.\n * const y = dotLayer.apply([x1, x2]);\n * y.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function dot(args) {\n return new Dot(args);\n}\n// Normalization Layers.\n/**\n * Batch normalization layer (Ioffe and Szegedy, 2014).\n *\n * Normalize the activations of the previous layer at each batch,\n * i.e. applies a transformation that maintains the mean activation\n * close to 0 and the activation standard deviation close to 1.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape` (Array of integers, does\n * not include the sample axis) when calling the constructor of this class,\n * if this layer is used as a first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Batch Normalization: Accelerating Deep Network Training by Reducing\n * Internal Covariate Shift](https://arxiv.org/abs/1502.03167)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function batchNormalization(args) {\n return new BatchNormalization(args);\n}\n/**\n * Layer-normalization layer (Ba et al., 2016).\n *\n * Normalizes the activations of the previous layer for each given example in a\n * batch independently, instead of across a batch like in `batchNormalization`.\n * In other words, this layer applies a transformation that maintanis the mean\n * activation within each example close to0 and activation variance close to 1.\n *\n * Input shape:\n * Arbitrary. Use the argument `inputShape` when using this layer as the first\n * layer in a model.\n *\n * Output shape:\n * Same as input.\n *\n * References:\n * - [Layer Normalization](https://arxiv.org/abs/1607.06450)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function layerNormalization(args) {\n return new LayerNormalization(args);\n}\n// Padding Layers.\n/**\n * Zero-padding layer for 2D input (e.g., image).\n *\n * This layer can add rows and columns of zeros\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, paddedRows, paddedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, paddedRows, paddedCols]`.\n *\n * @doc {heading: 'Layers', subheading: 'Padding', namespace: 'layers'}\n */\nexport function zeroPadding2d(args) {\n return new ZeroPadding2D(args);\n}\n// Pooling Layers.\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * `tf.avgPool1d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling1d(args) {\n return new AveragePooling1D(args);\n}\nexport function avgPool1d(args) {\n return averagePooling1d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling1d(args) {\n return averagePooling1d(args);\n}\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * `tf.avgPool2d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling2d(args) {\n return new AveragePooling2D(args);\n}\nexport function avgPool2d(args) {\n return averagePooling2d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling2d(args) {\n return averagePooling2d(args);\n}\n/**\n * Average pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 4D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling3d(args) {\n return new AveragePooling3D(args);\n}\nexport function avgPool3d(args) {\n return averagePooling3d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling3d(args) {\n return averagePooling3d(args);\n}\n/**\n * Global average pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling1d(args) {\n return new GlobalAveragePooling1D(args);\n}\n/**\n * Global average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling2d(args) {\n return new GlobalAveragePooling2D(args);\n}\n/**\n * Global max pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling1d(args) {\n return new GlobalMaxPooling1D(args);\n}\n/**\n * Global max pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling2d(args) {\n return new GlobalMaxPooling2D(args);\n}\n/**\n * Max pooling operation for temporal data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling1d(args) {\n return new MaxPooling1D(args);\n}\n/**\n * Max pooling operation for spatial data.\n *\n * Input shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat=CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling2d(args) {\n return new MaxPooling2D(args);\n}\n/**\n * Max pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling3d(args) {\n return new MaxPooling3D(args);\n}\n// Recurrent Layers.\n/**\n * Gated Recurrent Unit - Cho et al. 2014.\n *\n * This is an `RNN` layer consisting of one `GRUCell`. However, unlike\n * the underlying `GRUCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.gru({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `GRUCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gru(args) {\n return new GRU(args);\n}\n/**\n * Cell class for `GRU`.\n *\n * `GRUCell` is distinct from the `RNN` subclass `GRU` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `GRU` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.gruCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `GRUCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.gruCell({units: 4}),\n * tf.layers.gruCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `gruCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `GRUCell`, use the\n * `tf.layers.gru`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gruCell(args) {\n return new GRUCell(args);\n}\n/**\n * Long-Short Term Memory layer - Hochreiter 1997.\n *\n * This is an `RNN` layer consisting of one `LSTMCell`. However, unlike\n * the underlying `LSTMCell`, the `apply` method of `LSTM` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const lstm = tf.layers.lstm({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = lstm.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `LSTMCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstm(args) {\n return new LSTM(args);\n}\n/**\n * Cell class for `LSTM`.\n *\n * `LSTMCell` is distinct from the `RNN` subclass `LSTM` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `LSTM` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.lstmCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `LSTMCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.lstmCell({units: 4}),\n * tf.layers.lstmCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `lstmCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `LSTMCell`, use the\n * `tf.layers.lstm`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstmCell(args) {\n return new LSTMCell(args);\n}\n/**\n * Fully-connected RNN where the output is to be fed back to input.\n *\n * This is an `RNN` layer consisting of one `SimpleRNNCell`. However, unlike\n * the underlying `SimpleRNNCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.simpleRNN({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `SimpleRNNCell`'s number of units.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNN(args) {\n return new SimpleRNN(args);\n}\n/**\n * Cell class for `SimpleRNN`.\n *\n * `SimpleRNNCell` is distinct from the `RNN` subclass `SimpleRNN` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `SimpleRNN` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.simpleRNNCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `SimpleRNNCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.simpleRNNCell({units: 4}),\n * tf.layers.simpleRNNCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `SimpleRNNCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `SimpleRNNCell`, use the\n * `tf.layers.simpleRNN`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNNCell(args) {\n return new SimpleRNNCell(args);\n}\n/**\n * Convolutional LSTM layer - Xingjian Shi 2015.\n *\n * This is an `ConvRNN2D` layer consisting of one `ConvLSTM2DCell`. However,\n * unlike the underlying `ConvLSTM2DCell`, the `apply` method of `ConvLSTM2D`\n * operates on a sequence of inputs. The shape of the input (not including the\n * first, batch dimension) needs to be 4-D, with the first dimension being time\n * steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const batchSize = 4;\n * const sequenceLength = 2;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [batchSize, sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const layer = tf.layers.convLstm2d({filters, kernelSize});\n *\n * const output = layer.apply(input);\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2d(args) {\n return new ConvLSTM2D(args);\n}\n/**\n * Cell class for `ConvLSTM2D`.\n *\n * `ConvLSTM2DCell` is distinct from the `ConvRNN2D` subclass `ConvLSTM2D` in\n * that its `call` method takes the input data of only a single time step and\n * returns the cell's output at the time step, while `ConvLSTM2D` takes the\n * input data over a number of time steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const sequenceLength = 1;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const cell = tf.layers.convLstm2dCell({filters, kernelSize});\n *\n * cell.build(input.shape);\n *\n * const outputSize = size - kernelSize + 1;\n * const outShape = [sequenceLength, outputSize, outputSize, filters];\n *\n * const initialH = tf.zeros(outShape);\n * const initialC = tf.zeros(outShape);\n *\n * const [o, h, c] = cell.call([input, initialH, initialC], {});\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2dCell(args) {\n return new ConvLSTM2DCell(args);\n}\n/**\n * Base class for recurrent layers.\n *\n * Input shape:\n * 3D tensor with shape `[batchSize, timeSteps, inputDim]`.\n *\n * Output shape:\n * - if `returnState`, an Array of tensors (i.e., `tf.Tensor`s). The first\n * tensor is the output. The remaining tensors are the states at the\n * last time step, each with shape `[batchSize, units]`.\n * - if `returnSequences`, the output will have shape\n * `[batchSize, timeSteps, units]`.\n * - else, the output will have shape `[batchSize, units]`.\n *\n * Masking:\n * This layer supports masking for input data with a variable number\n * of timesteps. To introduce masks to your data,\n * use an embedding layer with the `mask_zero` parameter\n * set to `True`.\n *\n * Notes on using statefulness in RNNs:\n * You can set RNN layers to be 'stateful', which means that the states\n * computed for the samples in one batch will be reused as initial states\n * for the samples in the next batch. This assumes a one-to-one mapping\n * between samples in different successive batches.\n *\n * To enable statefulness:\n * - specify `stateful: true` in the layer constructor.\n * - specify a fixed batch size for your model, by passing\n * if sequential model:\n * `batchInputShape=[...]` to the first layer in your model.\n * else for functional model with 1 or more Input layers:\n * `batchShape=[...]` to all the first layers in your model.\n * This is the expected shape of your inputs *including the batch size*.\n * It should be a tuple of integers, e.g. `(32, 10, 100)`.\n * - specify `shuffle=False` when calling fit().\n *\n * To reset the states of your model, call `.resetStates()` on either\n * a specific layer, or on your entire model.\n *\n * Note on specifying the initial state of RNNs\n * You can specify the initial state of RNN layers symbolically by\n * calling them with the option `initialState`. The value of\n * `initialState` should be a tensor or list of tensors representing\n * the initial state of the RNN layer.\n *\n * You can specify the initial state of RNN layers numerically by\n * calling `resetStates` with the keyword argument `states`. The value of\n * `states` should be a numpy array or list of numpy arrays representing\n * the initial state of the RNN layer.\n *\n * Note on passing external constants to RNNs\n * You can pass \"external\" constants to the cell using the `constants`\n * keyword argument of `RNN.call` method. This requires that the `cell.call`\n * method accepts the same keyword argument `constants`. Such constants\n * can be used to conditon the cell transformation on additional static inputs\n * (not changing over time), a.k.a an attention mechanism.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function rnn(args) {\n return new RNN(args);\n}\n/**\n * Wrapper allowing a stack of RNN cells to behave as a single cell.\n *\n * Used to implement efficient stacked RNNs.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function stackedRNNCells(args) {\n return new StackedRNNCells(args);\n}\n// Wrapper Layers.\n/** @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'} */\nexport function bidirectional(args) {\n return new Bidirectional(args);\n}\n/**\n * This wrapper applies a layer to every temporal slice of an input.\n *\n * The input should be at least 3D, and the dimension of the index `1` will be\n * considered to be the temporal dimension.\n *\n * Consider a batch of 32 samples, where each sample is a sequence of 10 vectors\n * of 16 dimensions. The batch input shape of the layer is then `[32, 10,\n * 16]`, and the `inputShape`, not including the sample dimension, is\n * `[10, 16]`.\n *\n * You can then use `TimeDistributed` to apply a `Dense` layer to each of the 10\n * timesteps, independently:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.dense({units: 8}),\n * inputShape: [10, 16],\n * }));\n *\n * // Now model.outputShape = [null, 10, 8].\n * // The output will then have shape `[32, 10, 8]`.\n *\n * // In subsequent layers, there is no need for `inputShape`:\n * model.add(tf.layers.timeDistributed({layer: tf.layers.dense({units: 32})}));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * // Now model.outputShape = [null, 10, 32].\n * ```\n *\n * The output will then have shape `[32, 10, 32]`.\n *\n * `TimeDistributed` can be used with arbitrary layers, not just `Dense`, for\n * instance a `Conv2D` layer.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.conv2d({filters: 64, kernelSize: [3, 3]}),\n * inputShape: [10, 299, 299, 3],\n * }));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'}\n */\nexport function timeDistributed(args) {\n return new TimeDistributed(args);\n}\n// Aliases for pooling.\nexport const globalMaxPool1d = globalMaxPooling1d;\nexport const globalMaxPool2d = globalMaxPooling2d;\nexport const maxPool1d = maxPooling1d;\nexport const maxPool2d = maxPooling2d;\nexport { Layer, RNN, RNNCell, input /* alias for tf.input */ };\n/**\n * Apply additive zero-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * This is useful to mitigate overfitting\n * (you could see it as a form of random data augmentation).\n * Gaussian Noise (GS) is a natural choice as corruption process\n * for real valued inputs.\n *\n * # Arguments\n * stddev: float, standard deviation of the noise distribution.\n *\n * # Input shape\n * Arbitrary. Use the keyword argument `input_shape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * # Output shape\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianNoise(args) {\n return new GaussianNoise(args);\n}\n/**\n * Apply multiplicative 1-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting](\n * http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianDropout(args) {\n return new GaussianDropout(args);\n}\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function alphaDropout(args) {\n return new AlphaDropout(args);\n}\n/**\n * Masks a sequence by using a mask value to skip timesteps.\n *\n * If all features for a given sample timestep are equal to `mask_value`,\n * then the sample timestep will be masked (skipped) in all downstream layers\n * (as long as they support masking).\n *\n * If any downstream layer does not support masking yet receives such\n * an input mask, an exception will be raised.\n *\n * Arguments:\n * - `maskValue`: Either None or mask value to skip.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Mask', namespace: 'layers'}\n */\nexport function masking(args) {\n return new Masking(args);\n}\n//# sourceMappingURL=exports_layers.js.map", "import * as losses from './losses';\nimport * as metrics from './metrics';\n/**\n * Binary accuracy metric function.\n *\n * `yTrue` and `yPred` can have 0-1 values. Example:\n * ```js\n * const x = tf.tensor2d([[1, 1, 1, 1], [0, 0, 0, 0]], [2, 4]);\n * const y = tf.tensor2d([[1, 0, 1, 0], [0, 0, 0, 1]], [2, 4]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * `yTrue` and `yPred` can also have floating-number values between 0 and 1, in\n * which case the values will be thresholded at 0.5 to yield 0-1 values (i.e.,\n * a value >= 0.5 and <= 1.0 is interpreted as 1.\n * )\n * Example:\n * ```js\n * const x = tf.tensor1d([1, 1, 1, 1, 0, 0, 0, 0]);\n * const y = tf.tensor1d([0.2, 0.4, 0.6, 0.8, 0.2, 0.3, 0.4, 0.7]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryAccuracy(yTrue, yPred) {\n return metrics.binaryAccuracy(yTrue, yPred);\n}\n/**\n * Binary crossentropy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0], [1], [1], [1]]);\n * const y = tf.tensor2d([[0], [0], [0.5], [1]]);\n * const crossentropy = tf.metrics.binaryCrossentropy(x, y);\n * crossentropy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction, probabilities for the `1` case.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryCrossentropy(yTrue, yPred) {\n return metrics.binaryCrossentropy(yTrue, yPred);\n}\n/**\n * Sparse categorical accuracy metric function.\n *\n * Example:\n * ```js\n *\n * const yTrue = tf.tensor1d([1, 1, 2, 2, 0]);\n * const yPred = tf.tensor2d(\n * [[0, 1, 0], [1, 0, 0], [0, 0.4, 0.6], [0, 0.6, 0.4], [0.7, 0.3, 0]]);\n * const crossentropy = tf.metrics.sparseCategoricalAccuracy(yTrue, yPred);\n * crossentropy.print();\n * ```\n *\n * @param yTrue True labels: indices.\n * @param yPred Predicted probabilities or logits.\n * @returns Accuracy tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n return metrics.sparseCategoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical accuracy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0, 0, 0, 1], [0, 0, 0, 1]]);\n * const y = tf.tensor2d([[0.1, 0.8, 0.05, 0.05], [0.1, 0.05, 0.05, 0.8]]);\n * const accuracy = tf.metrics.categoricalAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth: one-hot encoding of categories.\n * @param yPred Binary Tensor of prediction: probabilities or logits for the\n * same categories as in `yTrue`.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalAccuracy(yTrue, yPred) {\n return metrics.categoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical crossentropy between an output tensor and a target tensor.\n *\n * @param target A tensor of the same shape as `output`.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalCrossentropy(yTrue, yPred) {\n return metrics.categoricalCrossentropy(yTrue, yPred);\n}\n/**\n * Computes the precision of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const precision = tf.metrics.precision(x, y);\n * precision.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Precision Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function precision(yTrue, yPred) {\n return metrics.precision(yTrue, yPred);\n}\n/**\n * Computes the recall of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const recall = tf.metrics.recall(x, y);\n * recall.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Recall Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function recall(yTrue, yPred) {\n return metrics.recall(yTrue, yPred);\n}\n/**\n * Loss or metric function: Cosine proximity.\n *\n * Mathematically, cosine proximity is defined as:\n * `-sum(l2Normalize(yTrue) * l2Normalize(yPred))`,\n * wherein `l2Normalize()` normalizes the L2 norm of the input to 1 and `*`\n * represents element-wise multiplication.\n *\n * ```js\n * const yTrue = tf.tensor2d([[1, 0], [1, 0]]);\n * const yPred = tf.tensor2d([[1 / Math.sqrt(2), 1 / Math.sqrt(2)], [0, 1]]);\n * const proximity = tf.metrics.cosineProximity(yTrue, yPred);\n * proximity.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Cosine proximity Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function cosineProximity(yTrue, yPred) {\n return losses.cosineProximity(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute error.\n *\n * Mathematically, mean absolute error is defined as:\n * `mean(abs(yPred - yTrue))`,\n * wherein the `mean` is applied over feature dimensions.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [0, 0], [2, 3]]);\n * const yPred = tf.tensor2d([[0, 1], [0, 1], [-2, -3]]);\n * const mse = tf.metrics.meanAbsoluteError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsoluteError(yTrue, yPred) {\n return losses.meanAbsoluteError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute percentage error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [10, 20]]);\n * const yPred = tf.tensor2d([[0, 1], [11, 24]]);\n * const mse = tf.metrics.meanAbsolutePercentageError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MAPE`, `tf.metrics.mape`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute percentage error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function MAPE(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function mape(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean squared error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [3, 4]]);\n * const yPred = tf.tensor2d([[0, 1], [-3, -4]]);\n * const mse = tf.metrics.meanSquaredError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MSE`, `tf.metrics.mse`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean squared error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanSquaredError(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function MSE(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function mse(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\n//# sourceMappingURL=exports_metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport { modelFromJSON } from './models';\n//# sourceMappingURL=exports_models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as regularizers from './regularizers';\n// tslint:disable-next-line:max-line-length\nimport { L1L2 } from './regularizers';\n/**\n * Regularizer for L1 and L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x)) + sum(l2 * x^2)\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1l2(config) {\n return new L1L2(config);\n}\n/**\n * Regularizer for L1 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x))\n * @param args l1 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1(config) {\n return regularizers.l1(config);\n}\n/**\n * Regularizer for L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l2 * x^2)\n * @param args l2 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l2(config) {\n return regularizers.l2(config);\n}\n//# sourceMappingURL=exports_regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { BaseCallback } from './base_callbacks';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nexport class Callback extends BaseCallback {\n constructor() {\n super(...arguments);\n /** Instance of `keras.models.Model`. Reference of the model being trained. */\n this.model = null;\n }\n setModel(model) {\n if (!(model instanceof LayersModel)) {\n throw new Error('model must be a LayersModel, not some other Container');\n }\n this.model = model;\n }\n}\nfunction less(currVal, prevVal) {\n return currVal < prevVal;\n}\nfunction greater(currVal, prevVal) {\n return currVal > prevVal;\n}\n/**\n * A Callback that stops training when a monitored quantity has stopped\n * improving.\n */\nexport class EarlyStopping extends Callback {\n constructor(args) {\n super();\n if (args == null) {\n args = {};\n }\n if (args.restoreBestWeights) {\n throw new NotImplementedError('restoreBestWeights = True is not implemented in EarlyStopping yet.');\n }\n this.monitor = args.monitor || 'val_loss';\n this.minDelta = Math.abs(args.minDelta || 0);\n this.patience = args.patience || 0;\n this.verbose = args.verbose || 0;\n this.mode = args.mode || 'auto';\n this.baseline = args.baseline;\n if (['auto', 'min', 'max'].indexOf(this.mode) === -1) {\n console.warn(`EarlyStopping mode '${this.mode}' is invalid. ` +\n `Falling back to mode 'auto'.`);\n this.mode = 'auto';\n }\n if (this.mode === 'min') {\n this.monitorFunc = less;\n }\n else if (this.mode === 'max') {\n this.monitorFunc = greater;\n }\n else {\n // For mode === 'auto'.\n if (this.monitor.indexOf('acc') !== -1) {\n this.monitorFunc = greater;\n }\n else {\n this.monitorFunc = less;\n }\n }\n if (this.monitorFunc === less) {\n this.minDelta *= -1;\n }\n }\n async onTrainBegin(logs) {\n this.wait = 0;\n this.stoppedEpoch = 0;\n if (this.baseline != null) {\n this.best = this.baseline;\n }\n else {\n this.best = this.monitorFunc === less ? Infinity : -Infinity;\n }\n }\n async onEpochEnd(epoch, logs) {\n await resolveScalarsInLogs(logs);\n const current = this.getMonitorValue(logs);\n if (current == null) {\n return;\n }\n if (this.monitorFunc(current - this.minDelta, this.best)) {\n this.best = current;\n this.wait = 0;\n // TODO(cais): Logic for restoreBestWeights.\n }\n else {\n this.wait++;\n if (this.wait >= this.patience) {\n this.stoppedEpoch = epoch;\n this.model.stopTraining = true;\n }\n // TODO(cais): Logic for restoreBestWeights.\n }\n }\n async onTrainEnd(logs) {\n if (this.stoppedEpoch > 0 && this.verbose) {\n console.log(`Epoch ${this.stoppedEpoch}: early stopping.`);\n }\n }\n getMonitorValue(logs) {\n if (logs == null) {\n logs = {};\n }\n const monitorValue = logs[this.monitor];\n if (monitorValue == null) {\n console.warn(`Metric for EarlyStopping ${this.monitor} is not available. ` +\n `Available metrics are: ${Object.keys(logs)}`);\n }\n return monitorValue;\n }\n}\n/**\n * Factory function for a Callback that stops training when a monitored\n * quantity has stopped improving.\n *\n * Early stopping is a type of regularization, and protects model against\n * overfitting.\n *\n * The following example based on fake data illustrates how this callback\n * can be used during `tf.LayersModel.fit()`:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * units: 3,\n * activation: 'softmax',\n * kernelInitializer: 'ones',\n * inputShape: [2]\n * }));\n * const xs = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const ys = tf.tensor2d([[1, 0, 0], [0, 1, 0]], [2, 3]);\n * const xsVal = tf.tensor2d([4, 3, 2, 1], [2, 2]);\n * const ysVal = tf.tensor2d([[0, 0, 1], [0, 1, 0]], [2, 3]);\n * model.compile(\n * {loss: 'categoricalCrossentropy', optimizer: 'sgd', metrics: ['acc']});\n *\n * // Without the EarlyStopping callback, the val_acc value would be:\n * // 0.5, 0.5, 0.5, 0.5, ...\n * // With val_acc being monitored, training should stop after the 2nd epoch.\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * validationData: [xsVal, ysVal],\n * callbacks: tf.callbacks.earlyStopping({monitor: 'val_acc'})\n * });\n *\n * // Expect to see a length-2 array.\n * console.log(history.history.val_acc);\n * ```\n *\n * @doc {\n * heading: 'Callbacks',\n * namespace: 'callbacks'\n * }\n */\nexport function earlyStopping(args) {\n return new EarlyStopping(args);\n}\nexport const callbacks = { earlyStopping };\n//# sourceMappingURL=callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// This file lists all exports of TensorFlow.js Layers\nimport * as constraints from './exports_constraints';\nimport * as initializers from './exports_initializers';\nimport * as layers from './exports_layers';\nimport * as metrics from './exports_metrics';\nimport * as models from './exports_models';\nimport * as regularizers from './exports_regularizers';\nexport { CallbackList, CustomCallback, History } from './base_callbacks';\nexport { Callback, callbacks, EarlyStopping } from './callbacks';\nexport { InputSpec, SymbolicTensor } from './engine/topology';\nexport { LayersModel } from './engine/training';\nexport { input, loadLayersModel, model, registerCallbackConstructor, sequential } from './exports';\nexport { RNN } from './layers/recurrent';\nexport { Sequential } from './models';\nexport { LayerVariable } from './variables';\nexport { version as version_layers } from './version';\nexport { constraints, initializers, layers, metrics, models, regularizers };\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/** DataType enum. */\nexport var DataType;\n(function (DataType) {\n DataType[DataType[\"DT_INVALID\"] = 0] = \"DT_INVALID\";\n DataType[DataType[\"DT_FLOAT\"] = 1] = \"DT_FLOAT\";\n DataType[DataType[\"DT_DOUBLE\"] = 2] = \"DT_DOUBLE\";\n DataType[DataType[\"DT_INT32\"] = 3] = \"DT_INT32\";\n DataType[DataType[\"DT_UINT8\"] = 4] = \"DT_UINT8\";\n DataType[DataType[\"DT_INT16\"] = 5] = \"DT_INT16\";\n DataType[DataType[\"DT_INT8\"] = 6] = \"DT_INT8\";\n DataType[DataType[\"DT_STRING\"] = 7] = \"DT_STRING\";\n DataType[DataType[\"DT_COMPLEX64\"] = 8] = \"DT_COMPLEX64\";\n DataType[DataType[\"DT_INT64\"] = 9] = \"DT_INT64\";\n DataType[DataType[\"DT_BOOL\"] = 10] = \"DT_BOOL\";\n DataType[DataType[\"DT_QINT8\"] = 11] = \"DT_QINT8\";\n DataType[DataType[\"DT_QUINT8\"] = 12] = \"DT_QUINT8\";\n DataType[DataType[\"DT_QINT32\"] = 13] = \"DT_QINT32\";\n DataType[DataType[\"DT_BFLOAT16\"] = 14] = \"DT_BFLOAT16\";\n DataType[DataType[\"DT_FLOAT_REF\"] = 101] = \"DT_FLOAT_REF\";\n DataType[DataType[\"DT_DOUBLE_REF\"] = 102] = \"DT_DOUBLE_REF\";\n DataType[DataType[\"DT_INT32_REF\"] = 103] = \"DT_INT32_REF\";\n DataType[DataType[\"DT_UINT8_REF\"] = 104] = \"DT_UINT8_REF\";\n DataType[DataType[\"DT_INT16_REF\"] = 105] = \"DT_INT16_REF\";\n DataType[DataType[\"DT_INT8_REF\"] = 106] = \"DT_INT8_REF\";\n DataType[DataType[\"DT_STRING_REF\"] = 107] = \"DT_STRING_REF\";\n DataType[DataType[\"DT_COMPLEX64_REF\"] = 108] = \"DT_COMPLEX64_REF\";\n DataType[DataType[\"DT_INT64_REF\"] = 109] = \"DT_INT64_REF\";\n DataType[DataType[\"DT_BOOL_REF\"] = 110] = \"DT_BOOL_REF\";\n DataType[DataType[\"DT_QINT8_REF\"] = 111] = \"DT_QINT8_REF\";\n DataType[DataType[\"DT_QUINT8_REF\"] = 112] = \"DT_QUINT8_REF\";\n DataType[DataType[\"DT_QINT32_REF\"] = 113] = \"DT_QINT32_REF\";\n DataType[DataType[\"DT_BFLOAT16_REF\"] = 114] = \"DT_BFLOAT16_REF\";\n})(DataType || (DataType = {}));\nexport var SaverDef;\n(function (SaverDef) {\n /** CheckpointFormatVersion enum. */\n let CheckpointFormatVersion;\n (function (CheckpointFormatVersion) {\n CheckpointFormatVersion[CheckpointFormatVersion[\"LEGACY\"] = 0] = \"LEGACY\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V1\"] = 1] = \"V1\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V2\"] = 2] = \"V2\";\n })(CheckpointFormatVersion = SaverDef.CheckpointFormatVersion || (SaverDef.CheckpointFormatVersion = {}));\n})(SaverDef || (SaverDef = {}));\n//# sourceMappingURL=compiled_api.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst CUSTOM_OPS = {};\n/**\n * Register an Op for graph model executor. This allow you to register\n * TensorFlow custom op or override existing op.\n *\n * Here is an example of registering a new MatMul Op.\n * ```js\n * const customMatmul = (node) =>\n * tf.matMul(\n * node.inputs[0], node.inputs[1],\n * node.attrs['transpose_a'], node.attrs['transpose_b']);\n *\n * tf.registerOp('MatMul', customMatmul);\n * ```\n * The inputs and attrs of the node object is based on the TensorFlow op\n * registry.\n *\n * @param name The Tensorflow Op name.\n * @param opFunc An op function which is called with the current graph node\n * during execution and needs to return a tensor or a list of tensors. The node\n * has the following attributes:\n * - attr: A map from attribute name to its value\n * - inputs: A list of input tensors\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function registerOp(name, opFunc) {\n const opMapper = {\n tfOpName: name,\n category: 'custom',\n inputs: [],\n attrs: [],\n customExecutor: opFunc\n };\n CUSTOM_OPS[name] = opMapper;\n}\n/**\n * Retrieve the OpMapper object for the registered op.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function getRegisteredOp(name) {\n return CUSTOM_OPS[name];\n}\n/**\n * Deregister the Op for graph model executor.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function deregisterOp(name) {\n delete CUSTOM_OPS[name];\n}\n//# sourceMappingURL=register.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { clone, util } from '@tensorflow/tfjs-core';\nexport function getParamValue(paramName, node, tensorMap, context, resourceManager) {\n const inputParam = node.inputParams[paramName];\n if (inputParam && inputParam.inputIndexStart !== undefined) {\n const start = inputParam.inputIndexStart;\n const end = inputParam.inputIndexEnd === 0 ?\n undefined :\n (inputParam.inputIndexEnd === undefined ? start + 1 :\n inputParam.inputIndexEnd);\n if (inputParam.type === 'tensor') {\n return getTensor(node.inputNames[inputParam.inputIndexStart], tensorMap, context, resourceManager);\n }\n if (inputParam.type === 'tensors') {\n const inputs = node.inputNames.slice(start, end);\n return inputs.map(name => getTensor(name, tensorMap, context, resourceManager));\n }\n const tensor = getTensor(node.inputNames.slice(start)[0], tensorMap, context, resourceManager);\n const data = tensor.dataSync();\n return inputParam.type === 'number' ?\n data[0] :\n util.toNestedArray(tensor.shape, data);\n }\n const attrParam = node.attrParams[paramName];\n return attrParam && attrParam.value;\n}\n/**\n * Retrieve the tensor from tensorsMap based on input name.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function getTensor(name, tensorsMap, context, resourceManager) {\n const [nodeName, index] = parseNodeName(name);\n if (resourceManager != null) {\n const tensor = resourceManager.getHashTableHandleByName(nodeName);\n if (tensor != null) {\n return tensor;\n }\n }\n const contextId = context.currentContextIds.find(contextId => {\n return !!tensorsMap[getNodeNameWithContextId(nodeName, contextId)];\n });\n return contextId !== undefined ?\n tensorsMap[getNodeNameWithContextId(nodeName, contextId)][index] :\n undefined;\n}\n/**\n * Retrieve the tensors based on input name for current context.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n */\nexport function getTensorsForCurrentContenxt(name, tensorsMap, context) {\n return tensorsMap[getNodeNameWithContextId(name, context.currentContextId)];\n}\n/**\n * Returns the node name and index from the Node input name.\n * @param inputName The input name of the node, in format of\n * node_name:output_index, i.e. MatMul:0, if the output_index is not set, it is\n * default to 0.\n */\nexport function getNodeNameAndIndex(inputName, context) {\n const [nodeName, index] = parseNodeName(inputName);\n return [\n getNodeNameWithContextId(nodeName, context && context.currentContextId),\n index\n ];\n}\nfunction getNodeNameWithContextId(name, contextId) {\n return !!contextId ? `${name}-${contextId}` : name;\n}\nexport function parseNodeName(name) {\n const parts = name.split(':');\n if (parts.length === 1) {\n return [name, 0];\n }\n const nodeName = parts[0];\n return [nodeName, Number(parts[parts.length - 1])];\n}\nexport function split(arr, size) {\n const res = [];\n for (let i = 0; i < arr.length; i += size) {\n res.push(arr.slice(i, i + size));\n }\n return res;\n}\nexport function getPadding(node, tensorMap, context) {\n let pad = getParamValue('pad', node, tensorMap, context);\n if (pad === 'explicit') {\n // This is 1d array, we need to convert it to 2d array\n pad = getParamValue('explicitPaddings', node, tensorMap, context);\n const explicitPadding = [[0, 0], [0, 0], [0, 0], [0, 0]];\n for (let i = 0; i < 4; i++) {\n explicitPadding[i][0] = pad[i * 2];\n explicitPadding[i][1] = pad[i * 2 + 1];\n }\n return explicitPadding;\n }\n return pad;\n}\n/**\n * Reuse the tensor if it is marked as keep, otherwise clone the tensor to\n * avoid disposal. This is important for TensorArray and TensorList ops, since\n * internally they use a tensor as the id for TensorArray and TensorList, and\n * to simplify lookup, they also use Tensor.id as the key to the internal map.\n * These id tensors have been marked as kept in the backend, we need avoid clone\n * them in order to create new Tensor.id.\n * @param tensor\n */\nexport function cloneTensor(tensor) {\n return tensor.kept ? tensor : clone(tensor);\n}\n//# sourceMappingURL=utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Add',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddV2',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddN',\n 'category': 'arithmetic',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'BiasAdd',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sub',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'RealDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Div',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'DivNoNan',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mul',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Maximum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Minimum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Pow',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SquaredDifference',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorMod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n }\n];\n//# sourceMappingURL=arithmetic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Abs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan2',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ceil',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ClipByValue',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'clip_value_min', 'name': 'clipValueMin', 'type': 'number' },\n { 'tfName': 'clip_value_max', 'name': 'clipValueMax', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Complex',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'real', 'type': 'tensor' },\n { 'start': 1, 'name': 'imag', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ComplexAbs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Elu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Exp',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Floor',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Imag',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Neg',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Real',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Prelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'alpha', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu6',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'clipValueMin',\n 'name': 'clipValueMin',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'clipValueMax',\n 'name': 'clipValueMax',\n 'type': 'number',\n 'defaultValue': 6\n }\n ]\n },\n {\n 'tfOpName': 'Selu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sigmoid',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Rsqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Square',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sign',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Round',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Expm1',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log1p',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Reciprocal',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Softplus',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Erf',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axes', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'keep_dims',\n 'name': 'keepDims',\n 'type': 'bool',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LeakyRelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 0.2\n },\n {\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=basic_math.js.map", "export const json = [\n {\n 'tfOpName': 'LoopCond',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'pred', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Switch',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'data', 'type': 'tensor' },\n { 'start': 1, 'name': 'pred', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Merge',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Enter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'frame_name', 'name': 'frameName', 'type': 'string' },\n { 'tfName': 'is_constant', 'name': 'isConstant', 'type': 'bool' }\n ]\n },\n {\n 'tfOpName': 'Exit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NextIteration',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'size', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'dynamic_size', 'name': 'dynamicSize', 'type': 'bool' },\n { 'tfName': 'clear_after_read', 'name': 'clearAfterRead', 'type': 'bool' },\n {\n 'tfName': 'identical_element_shapes',\n 'name': 'identicalElementShapes',\n 'type': 'bool'\n },\n { 'tfName': 'tensor_array_name', 'name': 'name', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayWriteV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayReadV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{\n 'tfName': 'dtype',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n },\n {\n 'tfOpName': 'TensorArrayGatherV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayScatterV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArrayConcatV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }, {\n 'tfName': 'element_shape_except0',\n 'name': 'elementShapeExcept0',\n 'type': 'shape',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArraySplitV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArraySizeV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayCloseV3',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'StatelessIf',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'If',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'StatelessWhile',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'While',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'TensorListScatter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListScatterV2',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 3, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGather',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListSetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListReserve',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 1, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListFromTensor',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListStack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' },\n { 'tfName': 'num_elements', 'name': 'numElements', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListSplit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListConcat',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListPopBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListPushBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=control.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'AvgPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPoolWithArgmax',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' }, {\n 'tfName': 'include_batch_in_index',\n 'name': 'includeBatchInIndex',\n 'type': 'bool'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AvgPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Conv1D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'stride', 'name': 'stride', 'type': 'number' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NWC'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'dilation',\n 'name': 'dilation',\n 'type': 'number',\n 'defaultValue': 1\n }\n ]\n },\n {\n 'tfOpName': 'Conv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'useCudnnOnGpu', 'name': 'useCudnnOnGpu', 'type': 'bool' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': '_FusedConv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'use_cudnn_on_gpu',\n 'name': 'useCudnnOnGpu',\n 'type': 'bool',\n 'defaultValue': true\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n ]\n },\n {\n 'tfOpName': 'Conv2DBackpropInput',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 2, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 0, 'name': 'outputShape', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2d',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'FusedDepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n }\n ]\n },\n {\n 'tfOpName': 'Conv3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ],\n },\n {\n 'tfOpName': 'Dilation2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'rates', 'name': 'dilations', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }\n ]\n }\n];\n//# sourceMappingURL=convolution.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Fill',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n { 'start': 1, 'name': 'value', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'LinSpace',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'num', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'OneHot',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'depth', 'type': 'number' },\n { 'start': 2, 'name': 'onValue', 'type': 'number', 'defaultValue': 1 },\n { 'start': 3, 'name': 'offValue', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [\n {\n 'tfName': 'axis',\n 'name': 'axis',\n 'type': 'number',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ones',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'OnesLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'RandomUniform',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'minval',\n 'name': 'minval',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'maxval',\n 'name': 'maxval',\n 'type': 'number',\n 'defaultValue': 1\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Range',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'step', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [{ 'tfName': 'Tidx', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TruncatedNormal',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'means',\n 'name': 'mean',\n 'type': 'number',\n 'defaultValue': 0.0\n },\n {\n 'tfName': 'stddev',\n 'name': 'stdDev',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Zeros',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'ZerosLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'Multinomial',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'logits', 'type': 'tensor' },\n { 'start': 1, 'name': 'numSamples', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' },\n { 'tfName': 'seed2', 'name': 'seed2', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'output_dtype', 'name': 'output_dtype', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=creation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'NonMaxSuppressionV2',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV3',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV4',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'T_threshold',\n 'name': 'threshold',\n 'type': 'dtype',\n 'notSupported': true\n },\n {\n 'tfName': 'pad_to_max_output_size',\n 'name': 'padToMaxOutputSize',\n 'type': 'bool'\n }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV5',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' },\n { 'start': 5, 'name': 'softNmsSigma', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Where',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ListDiff',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=dynamic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'TopKV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'k', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'sorted', 'name': 'sorted', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Unique',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n },\n {\n 'tfOpName': 'UniqueV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n },\n];\n//# sourceMappingURL=evaluation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'PlaceholderWithDefault',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'default', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'Placeholder',\n 'category': 'graph',\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n { 'tfOpName': 'Const', 'category': 'graph' }, {\n 'tfOpName': 'Identity',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IdentityN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Snapshot',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Rank',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Size',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Shape',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'ShapeN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Print',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'data', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'message', 'name': 'message', 'type': 'string' }, {\n 'tfName': 'first_n',\n 'name': 'firstN',\n 'type': 'number',\n 'notSupported': true\n },\n {\n 'tfName': 'summarize',\n 'name': 'summarize',\n 'type': 'number',\n 'defaultValue': 3\n }\n ]\n },\n { 'tfOpName': 'NoOp', 'category': 'graph', 'inputs': [] }, {\n 'tfOpName': 'StopGradient',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'FakeQuantWithMinMaxVars',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'min', 'name': 'min', 'type': 'number' },\n { 'tfName': 'max', 'name': 'max', 'type': 'number' }\n ]\n }\n];\n//# sourceMappingURL=graph.js.map", "export const json = [\n {\n 'tfOpName': 'HashTable',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'HashTableV2',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'LookupTableImport',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableImportV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFind',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFindV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ResizeBilinear',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ResizeNearestNeighbor',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'CropAndResize',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'image', 'type': 'tensor' },\n { 'start': 1, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 2, 'name': 'boxInd', 'type': 'tensor' },\n { 'start': 3, 'name': 'cropSize', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'method', 'name': 'method', 'type': 'string' }, {\n 'tfName': 'extrapolation_value',\n 'name': 'extrapolationValue',\n 'type': 'number'\n }\n ]\n }\n];\n//# sourceMappingURL=image.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Equal',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NotEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Greater',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'GreaterEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Less',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LessEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalAnd',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalNot',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalOr',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Select',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SelectV2',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=logical.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': '_FusedMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' }, {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMulV2',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Transpose',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'perm', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=matrices.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FusedBatchNorm',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV2',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV3',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LRN',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'depth_radius',\n 'name': 'radius',\n 'type': 'number',\n 'defaultValue': 5\n },\n { 'tfName': 'bias', 'name': 'bias', 'type': 'number', 'defaultValue': 1.0 },\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n {\n 'tfName': 'beta',\n 'name': 'beta',\n 'type': 'number',\n 'defaultValue': 0.5\n }\n ]\n },\n {\n 'tfOpName': 'Softmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'LogSoftmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': true,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Max',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Mean',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Min',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Sum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'All',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Any',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'ArgMax',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'ArgMin',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Cumsum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'exclusive', 'name': 'exclusive', 'type': 'bool' },\n { 'tfName': 'reverse', 'name': 'reverse', 'type': 'bool' }\n ]\n }\n];\n//# sourceMappingURL=reduction.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ConcatV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': -1, 'name': 'tensors', 'type': 'tensors' },\n { 'start': -1, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'Concat',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 1, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n { 'start': 0, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'GatherV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Gather',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Reverse',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'dims', 'type': 'bool', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ReverseV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Slice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'size', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'StridedSlice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'end', 'type': 'number[]' },\n { 'start': 3, 'name': 'strides', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'begin_mask',\n 'name': 'beginMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'end_mask',\n 'name': 'endMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'new_axis_mask',\n 'name': 'newAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'ellipsis_mask',\n 'name': 'ellipsisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'shrink_axis_mask',\n 'name': 'shrinkAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Pack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Unpack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'num',\n 'name': 'num',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Tile',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'reps', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Split',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'axis', 'type': 'number', 'defaultValue': 0 },\n { 'start': 1, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'num_split',\n 'name': 'numOrSizeSplits',\n 'type': 'number',\n 'defaultValue': 1\n }]\n },\n {\n 'tfOpName': 'SplitV',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'numOrSizeSplits', 'type': 'number[]' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'ScatterNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'values', 'type': 'tensor' },\n { 'start': 2, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'GatherNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': false,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=slice_join.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IFFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'RFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'IRFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=spectral.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Cast',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'SrcT',\n 'name': 'sdtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n { 'tfName': 'DstT', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'ExpandDims',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'MirrorPad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'mode', 'name': 'mode', 'type': 'string' }]\n },\n {\n 'tfOpName': 'Pad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'constant_value',\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }]\n },\n {\n 'tfOpName': 'PadV2',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' }, {\n 'start': 2,\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Reshape',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Squeeze',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'axis',\n 'tfDeprecatedName': 'squeeze_dims',\n 'name': 'axis',\n 'type': 'number[]'\n }]\n },\n {\n 'tfOpName': 'SpaceToBatchND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'paddings', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'BatchToSpaceND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'crops', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthToSpace',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'block_size', 'name': 'blockSize', 'type': 'number' },\n { 'tfName': 'data_format', 'name': 'dataFormat', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'BroadcastTo',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': []\n }\n];\n//# sourceMappingURL=transformation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as tensorflow from '../data/compiled_api';\nimport { getRegisteredOp } from './custom_op/register';\nimport { getNodeNameAndIndex } from './executors/utils';\nimport * as arithmetic from './op_list/arithmetic';\nimport * as basicMath from './op_list/basic_math';\nimport * as control from './op_list/control';\nimport * as convolution from './op_list/convolution';\nimport * as creation from './op_list/creation';\nimport * as dynamic from './op_list/dynamic';\nimport * as evaluation from './op_list/evaluation';\nimport * as graph from './op_list/graph';\nimport * as hashTable from './op_list/hash_table';\nimport * as image from './op_list/image';\nimport * as logical from './op_list/logical';\nimport * as matrices from './op_list/matrices';\nimport * as normalization from './op_list/normalization';\nimport * as reduction from './op_list/reduction';\nimport * as sliceJoin from './op_list/slice_join';\nimport * as spectral from './op_list/spectral';\nimport * as transformation from './op_list/transformation';\nexport class OperationMapper {\n // Singleton instance for the mapper\n static get Instance() {\n return this._instance || (this._instance = new this());\n }\n // Loads the op mapping from the JSON file.\n constructor() {\n const ops = [\n arithmetic, basicMath, control, convolution, creation, dynamic,\n evaluation, logical, image, graph, matrices, normalization, reduction,\n sliceJoin, spectral, transformation, hashTable\n ];\n const mappersJson = [].concat(...ops.map(op => op.json));\n this.opMappers = mappersJson.reduce((map, mapper) => {\n map[mapper.tfOpName] = mapper;\n return map;\n }, {});\n }\n // Converts the model inference graph from Tensorflow GraphDef to local\n // representation for TensorFlow.js API\n transformGraph(graph, signature = {}) {\n const tfNodes = graph.node;\n const placeholders = [];\n const weights = [];\n const initNodes = [];\n const nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op.startsWith('Placeholder')) {\n placeholders.push(map[node.name]);\n }\n else if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n else if (node.input == null || node.input.length === 0) {\n initNodes.push(map[node.name]);\n }\n return map;\n }, {});\n let inputs = [];\n const outputs = [];\n let inputNodeNameToKey = {};\n let outputNodeNameToKey = {};\n if (signature != null) {\n inputNodeNameToKey = this.mapSignatureEntries(signature.inputs);\n outputNodeNameToKey = this.mapSignatureEntries(signature.outputs);\n }\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n // if signature has not outputs set, add any node that does not have\n // outputs.\n if (Object.keys(outputNodeNameToKey).length === 0) {\n allNodes.forEach(key => {\n const node = nodes[key];\n if (node.children.length === 0) {\n outputs.push(node);\n }\n });\n }\n else {\n Object.keys(outputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node != null) {\n node.signatureKey = outputNodeNameToKey[name];\n outputs.push(node);\n }\n });\n }\n if (Object.keys(inputNodeNameToKey).length > 0) {\n Object.keys(inputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node) {\n node.signatureKey = inputNodeNameToKey[name];\n inputs.push(node);\n }\n });\n }\n else {\n inputs = placeholders;\n }\n let functions = {};\n if (graph.library != null && graph.library.function != null) {\n functions = graph.library.function.reduce((functions, func) => {\n functions[func.signature.name] = this.mapFunction(func);\n return functions;\n }, {});\n }\n const result = { nodes, inputs, outputs, weights, placeholders, signature, functions };\n if (initNodes.length > 0) {\n result.initNodes = initNodes;\n }\n return result;\n }\n mapSignatureEntries(entries) {\n return Object.keys(entries || {})\n .reduce((prev, curr) => {\n prev[entries[curr].name] = curr;\n return prev;\n }, {});\n }\n mapNode(node) {\n // Unsupported ops will cause an error at run-time (not parse time), since\n // they may not be used by the actual execution subgraph.\n const mapper = getRegisteredOp(node.op) || this.opMappers[node.op] || {};\n if (node.attr == null) {\n node.attr = {};\n }\n const newNode = {\n name: node.name,\n op: node.op,\n category: mapper.category,\n inputNames: (node.input ||\n []).map(input => input.startsWith('^') ? input.substr(1) : input),\n inputs: [],\n children: [],\n inputParams: {},\n attrParams: {},\n rawAttrs: node.attr\n };\n if (mapper.inputs != null) {\n newNode.inputParams =\n mapper.inputs.reduce((map, param) => {\n map[param.name] = {\n type: param.type,\n inputIndexStart: param.start,\n inputIndexEnd: param.end\n };\n return map;\n }, {});\n }\n if (mapper.attrs != null) {\n newNode.attrParams =\n mapper.attrs.reduce((map, param) => {\n const type = param.type;\n let value = undefined;\n switch (param.type) {\n case 'string':\n value = getStringParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'string[]':\n value = getStringArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number':\n value = getNumberParam(node.attr, param.tfName, (param.defaultValue || 0));\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumberParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number[]':\n value = getNumericArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumericArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool':\n value = getBoolParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool[]':\n value = getBoolArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape':\n value = getTensorShapeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape[]':\n value = getTensorShapeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype':\n value = getDtypeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype[]':\n value = getDtypeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'func':\n value = getFuncParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getFuncParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'tensor':\n case 'tensors':\n break;\n default:\n throw new Error(`Unsupported param type: ${param.type} for op: ${node.op}`);\n }\n map[param.name] = { value, type };\n return map;\n }, {});\n }\n return newNode;\n }\n // map the TFunctionDef to TFJS graph object\n mapFunction(functionDef) {\n const tfNodes = functionDef.nodeDef;\n const placeholders = [];\n const weights = [];\n let nodes = {};\n if (tfNodes != null) {\n nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n return map;\n }, {});\n }\n const inputs = [];\n const outputs = [];\n functionDef.signature.inputArg.forEach(arg => {\n const [nodeName,] = getNodeNameAndIndex(arg.name);\n const node = {\n name: nodeName,\n op: 'Placeholder',\n inputs: [],\n inputNames: [],\n category: 'graph',\n inputParams: {},\n attrParams: { dtype: { value: parseDtypeParam(arg.type), type: 'dtype' } },\n children: []\n };\n node.signatureKey = arg.name;\n inputs.push(node);\n nodes[nodeName] = node;\n });\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n const returnNodeMap = functionDef.ret;\n functionDef.signature.outputArg.forEach(output => {\n const [nodeName, index] = getNodeNameAndIndex(returnNodeMap[output.name]);\n const node = nodes[nodeName];\n if (node != null) {\n node.defaultOutput = index;\n outputs.push(node);\n }\n });\n const signature = this.mapArgsToSignature(functionDef);\n return { nodes, inputs, outputs, weights, placeholders, signature };\n }\n mapArgsToSignature(functionDef) {\n return {\n methodName: functionDef.signature.name,\n inputs: functionDef.signature.inputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg);\n return map;\n }, {}),\n outputs: functionDef.signature.outputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg, functionDef.ret);\n return map;\n }, {}),\n };\n }\n mapArgToTensorInfo(arg, nameMap) {\n let name = arg.name;\n if (nameMap != null) {\n name = nameMap[name];\n }\n return { name, dtype: arg.type };\n }\n}\nexport function decodeBase64(text) {\n const global = env().global;\n if (typeof global.atob !== 'undefined') {\n return global.atob(text);\n }\n else if (typeof Buffer !== 'undefined') {\n return new Buffer(text, 'base64').toString();\n }\n else {\n throw new Error('Unable to decode base64 in this environment. ' +\n 'Missing built-in atob() or Buffer()');\n }\n}\nexport function parseStringParam(s, keepCase) {\n const value = Array.isArray(s) ? String.fromCharCode.apply(null, s) : decodeBase64(s);\n return keepCase ? value : value.toLowerCase();\n}\nexport function getStringParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param != null) {\n return parseStringParam(param.s, keepCase);\n }\n return def;\n}\nexport function getBoolParam(attrs, name, def) {\n const param = attrs[name];\n return param ? param.b : def;\n}\nexport function getNumberParam(attrs, name, def) {\n const param = attrs[name] || {};\n const value = param['i'] != null ? param['i'] : (param['f'] != null ? param['f'] : def);\n return (typeof value === 'number') ? value : parseInt(value, 10);\n}\nexport function parseDtypeParam(value) {\n if (typeof (value) === 'string') {\n // tslint:disable-next-line:no-any\n value = tensorflow.DataType[value];\n }\n switch (value) {\n case tensorflow.DataType.DT_FLOAT:\n return 'float32';\n case tensorflow.DataType.DT_INT32:\n case tensorflow.DataType.DT_INT64:\n case tensorflow.DataType.DT_INT8:\n case tensorflow.DataType.DT_UINT8:\n return 'int32';\n case tensorflow.DataType.DT_BOOL:\n return 'bool';\n case tensorflow.DataType.DT_DOUBLE:\n return 'float32';\n case tensorflow.DataType.DT_STRING:\n return 'string';\n default:\n // Unknown dtype error will happen at runtime (instead of parse time),\n // since these nodes might not be used by the actual subgraph execution.\n return null;\n }\n}\nexport function getFuncParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.func) {\n return param.func.name;\n }\n return def;\n}\nexport function getDtypeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.type) {\n return parseDtypeParam(param.type);\n }\n return def;\n}\nexport function getDtypeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.type) {\n return param.list.type.map(v => parseDtypeParam(v));\n }\n return def;\n}\nexport function parseTensorShapeParam(shape) {\n if (shape.unknownRank) {\n return undefined;\n }\n if (shape.dim != null) {\n return shape.dim.map(dim => (typeof dim.size === 'number') ? dim.size : parseInt(dim.size, 10));\n }\n return [];\n}\nexport function getTensorShapeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.shape) {\n return parseTensorShapeParam(param.shape);\n }\n return def;\n}\nexport function getNumericArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param) {\n return ((param.list.f && param.list.f.length ? param.list.f :\n param.list.i) ||\n [])\n .map(v => (typeof v === 'number') ? v : parseInt(v, 10));\n }\n return def;\n}\nexport function getStringArrayParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param && param.list && param.list.s) {\n return param.list.s.map((v) => {\n return parseStringParam(v, keepCase);\n });\n }\n return def;\n}\nexport function getTensorShapeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.shape) {\n return param.list.shape.map((v) => {\n return parseTensorShapeParam(v);\n });\n }\n return def;\n}\nexport function getBoolArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.b) {\n return param.list.b;\n }\n return def;\n}\n//# sourceMappingURL=operation_mapper.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getTensor } from '../executors/utils';\nimport { getBoolArrayParam, getBoolParam, getDtypeArrayParam, getDtypeParam, getNumberParam, getNumericArrayParam, getStringArrayParam, getStringParam, getTensorShapeArrayParam, getTensorShapeParam } from '../operation_mapper';\n/**\n * Helper class for lookup inputs and params for nodes in the model graph.\n */\nexport class NodeValueImpl {\n constructor(node, tensorMap, context) {\n this.node = node;\n this.tensorMap = tensorMap;\n this.context = context;\n this.inputs = [];\n this.attrs = {};\n this.inputs = node.inputNames.map(name => this.getInput(name));\n if (node.rawAttrs != null) {\n this.attrs = Object.keys(node.rawAttrs)\n .reduce((attrs, key) => {\n attrs[key] = this.getAttr(key);\n return attrs;\n }, {});\n }\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getInput(name) {\n return getTensor(name, this.tensorMap, this.context);\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getAttr(name, defaultValue) {\n const value = this.node.rawAttrs[name];\n if (value.tensor != null) {\n return getTensor(name, this.tensorMap, this.context);\n }\n if (value.i != null || value.f != null) {\n return getNumberParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.s != null) {\n return getStringParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.b != null) {\n return getBoolParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.shape != null) {\n return getTensorShapeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.type != null) {\n return getDtypeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list != null) {\n if (value.list.i != null || value.list.f != null) {\n return getNumericArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.s != null) {\n return getStringArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.shape != null) {\n return getTensorShapeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.b != null) {\n return getBoolArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.type != null) {\n return getDtypeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n }\n return defaultValue;\n }\n}\n//# sourceMappingURL=node_value_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This file exports ops used by the converters executors. By default it\n * re-exports all ops. In a custom build this is aliased to a file that will\n * only exports ops for a given model.json.\n */\nexport * from './ops';\n//# sourceMappingURL=ops_for_converter.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BiasAdd':\n case 'AddV2':\n case 'Add': {\n return [tfOps.add(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'AddN': {\n return [tfOps.addN(getParamValue('tensors', node, tensorMap, context))];\n }\n case 'FloorMod':\n case 'Mod':\n return [tfOps.mod(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'Mul':\n return [tfOps.mul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'RealDiv':\n case 'Div': {\n return [tfOps.div(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'DivNoNan': {\n return [tfOps.divNoNan(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'FloorDiv': {\n return [tfOps.floorDiv(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Sub': {\n return [tfOps.sub(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Minimum': {\n return [tfOps.minimum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Maximum': {\n return [tfOps.maximum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Pow': {\n return [tfOps.pow(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'SquaredDifference': {\n return [tfOps.squaredDifference(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'arithmetic';\n//# sourceMappingURL=arithmetic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Abs':\n case 'ComplexAbs':\n return [tfOps.abs(getParamValue('x', node, tensorMap, context))];\n case 'Acos':\n return [tfOps.acos(getParamValue('x', node, tensorMap, context))];\n case 'Acosh':\n return [tfOps.acosh(getParamValue('x', node, tensorMap, context))];\n case 'Asin':\n return [tfOps.asin(getParamValue('x', node, tensorMap, context))];\n case 'Asinh':\n return [tfOps.asinh(getParamValue('x', node, tensorMap, context))];\n case 'Atan':\n return [tfOps.atan(getParamValue('x', node, tensorMap, context))];\n case 'Atan2':\n return [tfOps.atan2(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context))];\n case 'Atanh':\n return [tfOps.atanh(getParamValue('x', node, tensorMap, context))];\n case 'Ceil':\n return [tfOps.ceil(getParamValue('x', node, tensorMap, context))];\n case 'Complex':\n return [tfOps.complex(getParamValue('real', node, tensorMap, context), getParamValue('imag', node, tensorMap, context))];\n case 'Cos':\n return [tfOps.cos(getParamValue('x', node, tensorMap, context))];\n case 'Cosh':\n return [tfOps.cosh(getParamValue('x', node, tensorMap, context))];\n case 'Elu':\n return [tfOps.elu(getParamValue('x', node, tensorMap, context))];\n case 'Erf':\n return [tfOps.erf(getParamValue('x', node, tensorMap, context))];\n case 'Exp':\n return [tfOps.exp(getParamValue('x', node, tensorMap, context))];\n case 'Expm1': {\n return [tfOps.expm1(getParamValue('x', node, tensorMap, context))];\n }\n case 'Floor':\n return [tfOps.floor(getParamValue('x', node, tensorMap, context))];\n case 'Log':\n return [tfOps.log(getParamValue('x', node, tensorMap, context))];\n case 'Log1p': {\n return [tfOps.log1p(getParamValue('x', node, tensorMap, context))];\n }\n case 'Imag':\n return [tfOps.imag(getParamValue('x', node, tensorMap, context))];\n case 'Neg':\n return [tfOps.neg(getParamValue('x', node, tensorMap, context))];\n case 'Reciprocal': {\n return [tfOps.reciprocal(getParamValue('x', node, tensorMap, context))];\n }\n case 'Real':\n return [tfOps.real(getParamValue('x', node, tensorMap, context))];\n case 'Relu':\n return [tfOps.relu(getParamValue('x', node, tensorMap, context))];\n case 'Round': {\n return [tfOps.round(getParamValue('x', node, tensorMap, context))];\n }\n case 'Selu':\n return [tfOps.selu(getParamValue('x', node, tensorMap, context))];\n case 'Sigmoid':\n return [tfOps.sigmoid(getParamValue('x', node, tensorMap, context))];\n case 'Sin':\n return [tfOps.sin(getParamValue('x', node, tensorMap, context))];\n case 'Sign': {\n return [tfOps.sign(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sinh': {\n return [tfOps.sinh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Softplus': {\n return [tfOps.softplus(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sqrt': {\n return [tfOps.sqrt(getParamValue('x', node, tensorMap, context))];\n }\n case 'Square': {\n return [tfOps.square(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tanh': {\n return [tfOps.tanh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tan':\n return [tfOps.tan(getParamValue('x', node, tensorMap, context))];\n case 'Relu6':\n case 'ClipByValue':\n return [tfOps.clipByValue(getParamValue('x', node, tensorMap, context), getParamValue('clipValueMin', node, tensorMap, context), getParamValue('clipValueMax', node, tensorMap, context))];\n case 'Rsqrt':\n return [tfOps.rsqrt(getTensor(node.inputNames[0], tensorMap, context))];\n case 'Prod':\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), getParamValue('axes', node, tensorMap, context))];\n case 'LeakyRelu':\n return [tfOps.leakyRelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n case 'Prelu':\n return [tfOps.prelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'basic_math';\n//# sourceMappingURL=basic_math_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This differs from util.assertShapesMatch in that it allows values of\n * negative one, an undefined size of a dimensinon, in a shape to match\n * anything.\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertShapesMatchAllowUndefinedSize(shapeA, shapeB, errorMessagePrefix = '') {\n util.assert(shapesEqualAllowUndefinedSize(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function shapesEqualAllowUndefinedSize(n1, n2) {\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== -1 && n2[i] !== -1 && n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\n//# sourceMappingURL=tensor_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * The TensorArray object keeps an array of Tensors. It\n * allows reading from the array and writing to the array.\n */\nexport class TensorArray {\n constructor(name, dtype, maxSize, elementShape, identicalElementShapes, dynamicSize, clearAfterRead) {\n this.name = name;\n this.dtype = dtype;\n this.maxSize = maxSize;\n this.elementShape = elementShape;\n this.identicalElementShapes = identicalElementShapes;\n this.dynamicSize = dynamicSize;\n this.clearAfterRead = clearAfterRead;\n this.tensors = [];\n this.closed_ = false;\n this.idTensor = scalar(0);\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n get closed() {\n return this.closed_;\n }\n /**\n * Dispose the tensors and idTensor and mark the TensoryArray as closed.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.tensor.id)) {\n tensor.tensor.dispose();\n }\n });\n this.tensors = [];\n this.closed_ = true;\n this.idTensor.dispose();\n }\n size() {\n return this.tensors.length;\n }\n /**\n * Read the value at location index in the TensorArray.\n * @param index Number the index to read from.\n */\n read(index) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || index >= this.size()) {\n throw new Error(`Tried to read from index ${index}, but array size is: ${this.size()}`);\n }\n const tensorWithState = this.tensors[index];\n if (tensorWithState.cleared) {\n throw new Error(`TensorArray ${this.name}: Could not read index ${index} twice because it was cleared after a previous read ` +\n `(perhaps try setting clear_after_read = false?).`);\n }\n if (this.clearAfterRead) {\n tensorWithState.cleared = true;\n }\n tensorWithState.read = true;\n return tensorWithState.tensor;\n }\n /**\n * Helper method to read multiple tensors from the specified indices.\n */\n readMany(indices) {\n return indices.map(index => this.read(index));\n }\n /**\n * Write value into the index of the TensorArray.\n * @param index number the index to write to.\n * @param tensor\n */\n write(index, tensor) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || !this.dynamicSize && index >= this.maxSize) {\n throw new Error(`Tried to write to index ${index}, but array is not resizeable and size is: ${this.maxSize}`);\n }\n const t = this.tensors[index] || {};\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index},\n because the value dtype is ${tensor.dtype}, but TensorArray dtype is ${this.dtype}.`);\n }\n // Set the shape for the first time write to unknow shape tensor array\n if (this.size() === 0 &&\n (this.elementShape == null || this.elementShape.length === 0)) {\n this.elementShape = tensor.shape;\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, `TensorArray ${this.name}: Could not write to TensorArray index ${index}.`);\n if (t.read) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been read.`);\n }\n if (t.written) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been written.`);\n }\n t.tensor = tensor;\n keep(tensor);\n t.written = true;\n this.tensors[index] = t;\n }\n /**\n * Helper method to write multiple tensors to the specified indices.\n */\n writeMany(indices, tensors) {\n if (indices.length !== tensors.length) {\n throw new Error(`TensorArray ${this.name}: could not write multiple tensors,` +\n `because the index size: ${indices.length} is not the same as tensors size: ${tensors.length}.`);\n }\n indices.forEach((i, index) => this.write(i, tensors[index]));\n }\n /**\n * Return selected values in the TensorArray as a packed Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param [indices] number[] Optional. Taking values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size(). If not specified returns\n * all tensors in the original order.\n * @param [dtype]\n */\n gather(indices, dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but gather requested dtype ${dtype}`);\n }\n if (!indices) {\n indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n }\n else {\n indices = indices.slice(0, this.size());\n }\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n // Read all the PersistentTensors into a vector to keep track of\n // their memory.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, 'TensorArray shape mismatch: ');\n return stack(tensors, 0);\n }\n /**\n * Return the values in the TensorArray as a concatenated Tensor.\n */\n concat(dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but concat requested dtype ${dtype}`);\n }\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n const indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n // Collect all the tensors from the tensors array.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, `TensorArray shape mismatch: tensor array shape (${this.elementShape}) vs first tensor shape (${tensors[0].shape})`);\n return concat(tensors, 0);\n }\n /**\n * Scatter the values of a Tensor in specific indices of a TensorArray.\n * @param indices nummber[] values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size().\n * @param tensor Tensor input tensor.\n */\n scatter(indices, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (!this.dynamicSize && maxIndex >= this.maxSize) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${this.maxSize})`);\n }\n this.writeMany(indices, unstack(tensor, 0));\n }\n /**\n * Split the values of a Tensor into the TensorArray.\n * @param length number[] with the lengths to use when splitting value along\n * its first dimension.\n * @param tensor Tensor, the tensor to split.\n */\n split(length, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n if (!this.dynamicSize && length.length !== this.maxSize) {\n throw new Error(`TensorArray's size is not equal to the size of lengths (${this.maxSize} vs. ${length.length}), ` +\n 'and the TensorArray is not marked as dynamically resizeable');\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = [];\n tidy(() => {\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), this.elementShape);\n }\n return tensors;\n });\n const indices = [];\n for (let i = 0; i < length.length; i++) {\n indices[i] = i;\n }\n this.writeMany(indices, tensors);\n }\n}\n//# sourceMappingURL=tensor_array.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * TensorList stores a container of `tf.Tensor` objects, which are accessible\n * via tensors field.\n *\n * In order to get a copy of the underlying list, use the copy method:\n * ```\n * TensorList b = a.copy();\n * b.tensors().pushBack(t); // This does not modify a.tensors().\n * ```\n *\n * Note that this is not a deep copy: the memory locations of the underlying\n * tensors will still point to the same locations of the corresponding tensors\n * in the original.\n */\nexport class TensorList {\n /**\n *\n * @param tensors list of tensors\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param maxNumElements The maximum allowed size of `tensors`. Defaults to -1\n * meaning that the size of `tensors` is unbounded.\n */\n constructor(tensors, elementShape, elementDtype, maxNumElements = -1) {\n this.tensors = tensors;\n this.elementShape = elementShape;\n this.elementDtype = elementDtype;\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (elementDtype !== tensor.dtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${tensor.dtype}`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n });\n }\n this.idTensor = scalar(0);\n this.maxNumElements = maxNumElements;\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n /**\n * Get a new TensorList containing a copy of the underlying tensor container.\n */\n copy() {\n return new TensorList([...this.tensors], this.elementShape, this.elementDtype);\n }\n /**\n * Dispose the tensors and idTensor and clear the tensor list.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n this.tensors.length = 0;\n this.idTensor.dispose();\n }\n /**\n * The size of the tensors in the tensor list.\n */\n size() {\n return this.tensors.length;\n }\n /**\n * Return a tensor that stacks a list of rank-R tf.Tensors into one rank-(R+1)\n * tf.Tensor.\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param numElements the number of elements to stack\n */\n stack(elementShape, elementDtype, numElements = -1) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (numElements !== -1 && this.tensors.length !== numElements) {\n throw new Error(`Operation expected a list with ${numElements} elements but got a list with ${this.tensors.length} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, this.elementShape, 'TensorList shape mismatch: ');\n return tidy(() => {\n const reshapedTensors = this.tensors.map(tensor => reshape(tensor, elementShape));\n return stack(reshapedTensors, 0);\n });\n }\n /**\n * Pop a tensor from the end of the list.\n * @param elementShape shape of the tensor\n * @param elementDtype data type of the tensor\n */\n popBack(elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (this.size() === 0) {\n throw new Error('Trying to pop from an empty list.');\n }\n const tensor = this.tensors.pop();\n assertShapesMatchAllowUndefinedSize(tensor.shape, elementShape, 'TensorList shape mismatch: ');\n return reshape(tensor, elementShape);\n }\n /**\n * Push a tensor to the end of the list.\n * @param tensor Tensor to be pushed.\n */\n pushBack(tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(tensor.shape, this.elementShape, 'TensorList shape mismatch: ');\n if (this.maxNumElements === this.size()) {\n throw new Error(`Trying to push element into a full list.`);\n }\n keep(tensor);\n this.tensors.push(tensor);\n }\n /**\n * Update the size of the list.\n * @param size the new size of the list.\n */\n resize(size) {\n if (size < 0) {\n throw new Error(`TensorListResize expects size to be non-negative. Got: ${size}`);\n }\n if (this.maxNumElements !== -1 && size > this.maxNumElements) {\n throw new Error(`TensorListResize input size ${size} is greater maxNumElement ${this.maxNumElements}.`);\n }\n this.tensors.length = size;\n }\n /**\n * Retrieve the element at the provided index\n * @param elementShape shape of the tensor\n * @param elementDtype dtype of the tensor\n * @param elementIndex index of the tensor\n */\n getItem(elementIndex, elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 || elementIndex > this.tensors.length) {\n throw new Error(`Trying to access element ${elementIndex} in a list with ${this.tensors.length} elements.`);\n }\n if (this.tensors[elementIndex] == null) {\n throw new Error(`element at index ${elementIndex} is null.`);\n }\n assertShapesMatchAllowUndefinedSize(this.tensors[elementIndex].shape, elementShape, 'TensorList shape mismatch: ');\n return this.tensors[elementIndex];\n }\n /**\n * Set the tensor at the index\n * @param elementIndex index of the tensor\n * @param tensor the tensor to be inserted into the list\n */\n setItem(elementIndex, tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 ||\n this.maxNumElements !== -1 && elementIndex >= this.maxNumElements) {\n throw new Error(`Trying to set element ${elementIndex} in a list with max ${this.maxNumElements} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n this.tensors[elementIndex] = tensor;\n }\n /**\n * Return selected values in the TensorList as a stacked Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param indices indices of tensors to gather\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n gather(indices, elementDtype, elementShape) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n // When indices is greater than the size of the list, indices beyond the\n // size of the list are ignored.\n indices = indices.slice(0, this.size());\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = indices.map(i => reshape(this.tensors[i], elementShape));\n return stack(tensors, 0);\n });\n }\n /**\n * Return the values in the TensorList as a concatenated Tensor.\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n concat(elementDtype, elementShape) {\n if (!!elementDtype && elementDtype !== this.elementDtype) {\n throw new Error(`TensorList dtype is ${this.elementDtype} but concat requested dtype ${elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = this.tensors.map(t => reshape(t, elementShape));\n return concat(tensors, 0);\n });\n }\n}\n/**\n * Creates a TensorList which, when stacked, has the value of tensor.\n * @param tensor from tensor\n * @param elementShape output tensor element shape\n */\nexport function fromTensor(tensor, elementShape, elementDtype) {\n const dtype = tensor.dtype;\n if (tensor.shape.length < 1) {\n throw new Error(`Tensor must be at least a vector, but saw shape: ${tensor.shape}`);\n }\n if (tensor.dtype !== elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${elementDtype}`);\n }\n const outputShape = tensor.shape.slice(1);\n assertShapesMatchAllowUndefinedSize(outputShape, elementShape, 'TensorList shape mismatch: ');\n const tensorList = unstack(tensor);\n return new TensorList(tensorList, elementShape, dtype);\n}\n/**\n * Return a TensorList of the given size with empty elements.\n * @param elementShape the shape of the future elements of the list\n * @param elementDtype the desired type of elements in the list\n * @param numElements the number of elements to reserve\n */\nexport function reserve(elementShape, elementDtype, numElements) {\n return new TensorList([], elementShape, elementDtype, numElements);\n}\n/**\n * Put tensors at specific indices of a stacked tensor into a TensorList.\n * @param indices list of indices on how to scatter the tensor.\n * @param tensor input tensor.\n * @param elementShape the shape of the future elements of the list\n * @param numElements the number of elements to scatter\n */\nexport function scatter(tensor, indices, elementShape, numElements) {\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (numElements != null && numElements !== -1 && maxIndex >= numElements) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${numElements})`);\n }\n const list = new TensorList([], elementShape, tensor.dtype, numElements);\n const tensors = unstack(tensor, 0);\n indices.forEach((value, index) => {\n list.setItem(value, tensors[index]);\n });\n return list;\n}\n/**\n * Split the values of a Tensor into a TensorList.\n * @param length the lengths to use when splitting value along\n * its first dimension.\n * @param tensor the tensor to split.\n * @param elementShape the shape of the future elements of the list\n */\nexport function split(tensor, length, elementShape) {\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = tidy(() => {\n const tensors = [];\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), elementShape);\n }\n tensor.dispose();\n return tensors;\n });\n const list = new TensorList([], elementShape, tensor.dtype, length.length);\n for (let i = 0; i < tensors.length; i++) {\n list.setItem(i, tensors[i]);\n }\n return list;\n}\n//# sourceMappingURL=tensor_list.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { TensorArray } from '../../executor/tensor_array';\nimport { fromTensor, reserve, scatter, split } from '../../executor/tensor_list';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'If':\n case 'StatelessIf': {\n const thenFunc = getParamValue('thenBranch', node, tensorMap, context);\n const elseFunc = getParamValue('elseBranch', node, tensorMap, context);\n const cond = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n const condValue = await cond.data();\n if (condValue[0]) {\n return context.functionMap[thenFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n else {\n return context.functionMap[elseFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n }\n case 'While':\n case 'StatelessWhile': {\n const bodyFunc = getParamValue('body', node, tensorMap, context);\n const condFunc = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n // Calculate the condition of the loop\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap));\n const argIds = args.map(tensor => tensor.id);\n let condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n let result = args;\n while (condValue[0]) {\n // Record the previous result for intermediate tensor tracking\n const origResult = result;\n // Execution the body of the loop\n result = await context.functionMap[bodyFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap);\n const resultIds = result.map(tensor => tensor.id);\n // Dispose the intermediate tensor for body function that is not global\n // kept, not input/output of the body function\n origResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n // Recalcuate the condition of the loop using the latest results.\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap));\n condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n }\n return result;\n }\n case 'LoopCond': {\n const pred = getParamValue('pred', node, tensorMap, context);\n return [cloneTensor(pred)];\n }\n case 'Switch': {\n const pred = getParamValue('pred', node, tensorMap, context);\n let data = getParamValue('data', node, tensorMap, context);\n if (!data.kept) {\n data = cloneTensor(data);\n }\n // Outputs nodes :0 => false, :1 => true\n return (await pred.data())[0] ? [undefined, data] : [data, undefined];\n }\n case 'Merge': {\n const inputName = node.inputNames.find(name => getTensor(name, tensorMap, context) !== undefined);\n if (inputName) {\n const data = getTensor(inputName, tensorMap, context);\n return [cloneTensor(data)];\n }\n return undefined;\n }\n case 'Enter': {\n const frameId = getParamValue('frameName', node, tensorMap, context);\n const data = getParamValue('tensor', node, tensorMap, context);\n context.enterFrame(frameId);\n return [cloneTensor(data)];\n }\n case 'Exit': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.exitFrame();\n return [cloneTensor(data)];\n }\n case 'NextIteration': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.nextIteration();\n return [cloneTensor(data)];\n }\n case 'TensorArrayV3': {\n const size = getParamValue('size', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const dynamicSize = getParamValue('dynamicSize', node, tensorMap, context);\n const clearAfterRead = getParamValue('clearAfterRead', node, tensorMap, context);\n const identicalElementShapes = getParamValue('identicalElementShapes', node, tensorMap, context);\n const name = getParamValue('name', node, tensorMap, context);\n const tensorArray = new TensorArray(name, dtype, size, elementShape, identicalElementShapes, dynamicSize, clearAfterRead);\n context.addTensorArray(tensorArray);\n return [tensorArray.idTensor, scalar(1.0)];\n }\n case 'TensorArrayWriteV3': {\n const id = getParamValue('tensorArrayId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const writeTensorArray = context.getTensorArray(id.id);\n writeTensorArray.write(index, writeTensor);\n return [writeTensorArray.idTensor];\n }\n case 'TensorArrayReadV3': {\n const readId = getParamValue('tensorArrayId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const readTensorArray = context.getTensorArray(readId.id);\n return [readTensorArray.read(readIndex)];\n }\n case 'TensorArrayGatherV3': {\n const gatherId = getParamValue('tensorArrayId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const gatherDtype = getParamValue('dtype', node, tensorMap, context);\n const gatherTensorArray = context.getTensorArray(gatherId.id);\n return [gatherTensorArray.gather(gatherIndices, gatherDtype)];\n }\n case 'TensorArrayScatterV3': {\n const scatterId = getParamValue('tensorArrayId', node, tensorMap, context);\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const scatterTensorArray = context.getTensorArray(scatterId.id);\n scatterTensorArray.scatter(scatterIndices, scatterTensor);\n return [scatterTensorArray.idTensor];\n }\n case 'TensorArrayConcatV3': {\n const concatId = getParamValue('tensorArrayId', node, tensorMap, context);\n const concatTensorArray = context.getTensorArray(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n return [concatTensorArray.concat(concatDtype)];\n }\n case 'TensorArraySplitV3': {\n const splitId = getParamValue('tensorArrayId', node, tensorMap, context);\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const splitTensorArray = context.getTensorArray(splitId.id);\n splitTensorArray.split(lengths, splitTensor);\n return [splitTensorArray.idTensor];\n }\n case 'TensorArraySizeV3': {\n const sizeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const sizeTensorArray = context.getTensorArray(sizeId.id);\n return [scalar(sizeTensorArray.size(), 'int32')];\n }\n case 'TensorArrayCloseV3': {\n const closeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const closeTensorArray = context.getTensorArray(closeId.id);\n closeTensorArray.clearAndClose();\n return [closeTensorArray.idTensor];\n }\n case 'TensorListSetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.setItem(index, writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListGetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.getItem(readIndex, elementShape, elementDType)];\n }\n case 'TensorListScatterV2':\n case 'TensorListScatter': {\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = scatter(scatterTensor, scatterIndices, elementShape, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListReserve': {\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = reserve(elementShape, elementDtype, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListGather': {\n const gatherId = getParamValue('tensorListId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(gatherId.id);\n return [tensorList.gather(gatherIndices, elementDtype, elementShape)];\n }\n case 'TensorListStack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.stack(elementShape, elementDtype, numElements)];\n }\n case 'TensorListFromTensor': {\n const tensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = fromTensor(tensor, elementShape, elementDtype);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListConcat': {\n const concatId = getParamValue('tensorListId', node, tensorMap, context);\n const tensorList = context.getTensorList(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n return [tensorList.concat(concatDtype, elementShape)];\n }\n case 'TensorListPushBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.pushBack(writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListPopBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.popBack(elementShape, elementDType)];\n }\n case 'TensorListSplit': {\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const tensorList = split(splitTensor, lengths, elementShape);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'control';\n//# sourceMappingURL=control_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getPadding, getParamValue } from './utils';\nfunction fusedConvAndDepthWiseParams(node, tensorMap, context) {\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const isBatchNorm = extraOp === 'fusedbatchnorm';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd and Prelu ' +\n 'must have two extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd must have ' +\n 'one extra argument: bias.');\n }\n }\n if (isBatchNorm) {\n throw new Error('FusedConv2d and DepthwiseConv2d with FusedBatchNorm is not supported.');\n }\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return {\n stride,\n pad,\n dataFormat,\n dilations,\n biasArg,\n preluArg,\n activationFunc\n };\n}\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Conv1D': {\n const stride = getParamValue('stride', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilation = getParamValue('dilation', node, tensorMap, context);\n return [tfOps.conv1d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), stride, pad, dataFormat, dilation)];\n }\n case 'Conv2D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case '_FusedConv2D': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.conv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'FusedDepthwiseConv2dNative': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.depthwiseConv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'Conv2DBackpropInput':\n case 'Conv2dTranspose': {\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n return [tfOps.conv2dTranspose(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), shape, [stride[1], stride[2]], pad)];\n }\n case 'DepthwiseConv2dNative':\n case 'DepthwiseConv2d': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n return [tfOps.depthwiseConv2d(getParamValue('input', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case 'Conv3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv3d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2], stride[3]], pad, dataFormat, [dilations[1], dilations[2], dilations[3]])];\n }\n case 'AvgPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPoolWithArgmax': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n const includeBatchInIndex = getParamValue('includeBatchInIndex', node, tensorMap, context);\n const { result, indexes } = tfOps.maxPoolWithArgmax(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad, includeBatchInIndex);\n return [result, indexes];\n }\n case 'AvgPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'MaxPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'Dilation2D': {\n const strides = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n // strides: [1, stride_height, stride_width, 1].\n const strideHeight = strides[1];\n const strideWidth = strides[2];\n // dilations: [1, dilation_height, dilation_width, 1].\n const dilationHeight = dilations[1];\n const dilationWidth = dilations[2];\n return [tfOps.dilation2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [strideHeight, strideWidth], pad, [dilationHeight, dilationWidth], 'NHWC' /* dataFormat */)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'convolution';\n//# sourceMappingURL=convolution_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Fill': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const value = getParamValue('value', node, tensorMap, context);\n return [tfOps.fill(shape, value, dtype)];\n }\n case 'LinSpace': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const num = getParamValue('num', node, tensorMap, context);\n return [tfOps.linspace(start, stop, num)];\n }\n case 'Multinomial': {\n const logits = getParamValue('logits', node, tensorMap, context);\n const numSamples = getParamValue('numSamples', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.multinomial(logits, numSamples, seed)];\n }\n case 'OneHot': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const depth = getParamValue('depth', node, tensorMap, context);\n const onValue = getParamValue('onValue', node, tensorMap, context);\n const offValue = getParamValue('offValue', node, tensorMap, context);\n return [tfOps.oneHot(indices, depth, onValue, offValue)];\n }\n case 'Ones': {\n return [tfOps.ones(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'OnesLike': {\n return [tfOps.onesLike(getParamValue('x', node, tensorMap, context))];\n }\n case 'RandomUniform': {\n return [tfOps.randomUniform(\n // tslint:disable-next-line:no-any\n getParamValue('shape', node, tensorMap, context), getParamValue('minval', node, tensorMap, context), getParamValue('maxval', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'Range': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const step = getParamValue('step', node, tensorMap, context);\n return [tfOps.range(start, stop, step, getParamValue('dtype', node, tensorMap, context))];\n }\n case 'TruncatedNormal': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const mean = getParamValue('mean', node, tensorMap, context);\n const stdDev = getParamValue('stdDev', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.truncatedNormal(shape, mean, stdDev, getParamValue('dtype', node, tensorMap, context), seed)];\n }\n case 'Zeros': {\n return [tfOps.zeros(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ZerosLike': {\n return [tfOps.zerosLike(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'creation';\n//# sourceMappingURL=creation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nfunction nmsParams(node, tensorMap, context) {\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const scores = getParamValue('scores', node, tensorMap, context);\n const maxOutputSize = getParamValue('maxOutputSize', node, tensorMap, context);\n const iouThreshold = getParamValue('iouThreshold', node, tensorMap, context);\n const scoreThreshold = getParamValue('scoreThreshold', node, tensorMap, context);\n const softNmsSigma = getParamValue('softNmsSigma', node, tensorMap, context);\n return {\n boxes,\n scores,\n maxOutputSize,\n iouThreshold,\n scoreThreshold,\n softNmsSigma\n };\n}\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'NonMaxSuppressionV5': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = nmsParams(node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionWithScoreAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n return [result.selectedIndices, result.selectedScores];\n }\n case 'NonMaxSuppressionV4': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n const padToMaxOutputSize = getParamValue('padToMaxOutputSize', node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionPaddedAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [result.selectedIndices, result.validOutputs];\n }\n case 'NonMaxSuppressionV3':\n case 'NonMaxSuppressionV2': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n return [await tfOps.image.nonMaxSuppressionAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold)];\n }\n case 'Where': {\n const condition = tfOps.cast(getParamValue('condition', node, tensorMap, context), 'bool');\n const result = [await tfOps.whereAsync(condition)];\n condition.dispose();\n return result;\n }\n case 'ListDiff': {\n return tfOps.setdiff1dAsync(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context));\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'dynamic';\n//# sourceMappingURL=dynamic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'TopKV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const k = getParamValue('k', node, tensorMap, context);\n const sorted = getParamValue('sorted', node, tensorMap, context);\n const result = tfOps.topk(x, k, sorted);\n return [result.values, result.indices];\n }\n case 'Unique': {\n const x = getParamValue('x', node, tensorMap, context);\n const result = tfOps.unique(x);\n return [result.values, result.indices];\n }\n case 'UniqueV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n const result = tfOps.unique(x, axis);\n return [result.values, result.indices];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'evaluation';\n//# sourceMappingURL=evaluation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Const': {\n return tensorMap[node.name];\n }\n case 'PlaceholderWithDefault':\n const def = getParamValue('default', node, tensorMap, context);\n return [getTensor(node.name, tensorMap, context) || def];\n case 'Placeholder':\n return [getTensor(node.name, tensorMap, context)];\n case 'Identity':\n case 'StopGradient':\n case 'FakeQuantWithMinMaxVars': { // This op is currently ignored.\n const data = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(data)];\n }\n case 'IdentityN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => cloneTensor(t));\n case 'Snapshot':\n const snapshot = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(snapshot)];\n case 'Shape':\n return [tfOps.tensor1d(getParamValue('x', node, tensorMap, context).shape, 'int32')];\n case 'ShapeN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => tfOps.tensor1d(t.shape));\n case 'Size':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).size, 'int32')];\n case 'Rank':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).rank, 'int32')];\n case 'NoOp':\n return [tfOps.scalar(1)];\n case 'Print':\n const input = getParamValue('x', node, tensorMap, context);\n const data = getParamValue('data', node, tensorMap, context);\n const message = getParamValue('message', node, tensorMap, context);\n const summarize = getParamValue('summarize', node, tensorMap, context);\n console.warn('The graph has a tf.print() operation,' +\n 'usually used for debugging, which slows down performance.');\n console.log(message);\n for (let i = 0; i < data.length; i++) {\n console.log(Array.prototype.slice.call(data[i].dataSync())\n .slice(0, summarize));\n }\n return [input];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'graph';\n//# sourceMappingURL=graph_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { keep, scalar, stack, tidy, unstack, util } from '@tensorflow/tfjs-core';\n/**\n * Hashtable contains a set of tensors, which can be accessed by key.\n */\nexport class HashTable {\n /**\n * Constructor of HashTable. Creates a hash table.\n *\n * @param keyDType `dtype` of the table keys.\n * @param valueDType `dtype` of the table values.\n */\n constructor(keyDType, valueDType) {\n this.keyDType = keyDType;\n this.valueDType = valueDType;\n this.handle = scalar(0);\n // tslint:disable-next-line: no-any\n this.tensorMap = new Map();\n keep(this.handle);\n }\n get id() {\n return this.handle.id;\n }\n /**\n * Dispose the tensors and handle and clear the hashtable.\n */\n clearAndClose() {\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n this.handle.dispose();\n }\n /**\n * The number of items in the hash table.\n */\n size() {\n return this.tensorMap.size;\n }\n /**\n * Replaces the contents of the table with the specified keys and values.\n * @param keys Keys to store in the hashtable.\n * @param values Values to store in the hashtable.\n */\n async import(keys, values) {\n this.checkKeyAndValueTensor(keys, values);\n // We only store the primitive values of the keys, this allows lookup\n // to be O(1).\n const $keys = await keys.data();\n // Clear the hashTable before inserting new values.\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n return tidy(() => {\n const $values = unstack(values);\n const keysLength = $keys.length;\n const valuesLength = $values.length;\n util.assert(keysLength === valuesLength, () => `The number of elements doesn't match, keys has ` +\n `${keysLength} elements, the values has ${valuesLength} ` +\n `elements.`);\n for (let i = 0; i < keysLength; i++) {\n const key = $keys[i];\n const value = $values[i];\n keep(value);\n this.tensorMap.set(key, value);\n }\n return this.handle;\n });\n }\n /**\n * Looks up keys in a hash table, outputs the corresponding values.\n *\n * Performs batch lookups, for every element in the key tensor, `find`\n * stacks the corresponding value into the return tensor.\n *\n * If an element is not present in the table, the given `defaultValue` is\n * used.\n *\n * @param keys Keys to look up. Must have the same type as the keys of the\n * table.\n * @param defaultValue The scalar `defaultValue` is the value output for keys\n * not present in the table. It must also be of the same type as the\n * table values.\n */\n async find(keys, defaultValue) {\n this.checkKeyAndValueTensor(keys, defaultValue);\n const $keys = await keys.data();\n return tidy(() => {\n const result = [];\n for (let i = 0; i < $keys.length; i++) {\n const key = $keys[i];\n const value = this.findWithDefault(key, defaultValue);\n result.push(value);\n }\n return stack(result);\n });\n }\n // tslint:disable-next-line: no-any\n findWithDefault(key, defaultValue) {\n const result = this.tensorMap.get(key);\n return result != null ? result : defaultValue;\n }\n checkKeyAndValueTensor(key, value) {\n if (key.dtype !== this.keyDType) {\n throw new Error(`Expect key dtype ${this.keyDType}, but got ` +\n `${key.dtype}`);\n }\n if (value.dtype !== this.valueDType) {\n throw new Error(`Expect value dtype ${this.valueDType}, but got ` +\n `${value.dtype}`);\n }\n }\n}\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { HashTable } from '../../executor/hash_table';\nimport { getParamValue } from './utils';\nexport const executeOp = async (node, tensorMap, context, resourceManager) => {\n switch (node.op) {\n case 'HashTable':\n case 'HashTableV2': {\n const keyDType = getParamValue('keyDType', node, tensorMap, context);\n const valueDType = getParamValue('valueDType', node, tensorMap, context);\n const hashTable = new HashTable(keyDType, valueDType);\n resourceManager.addHashTable(node.name, hashTable);\n return [hashTable.handle];\n }\n case 'LookupTableImport':\n case 'LookupTableImportV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.import(keys, values)];\n }\n case 'LookupTableFind':\n case 'LookupTableFindV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.find(keys, defaultValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'hash_table';\n//# sourceMappingURL=hash_table_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ResizeBilinear': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeBilinear(images, [size[0], size[1]], alignCorners)];\n }\n case 'ResizeNearestNeighbor': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeNearestNeighbor(images, [size[0], size[1]], alignCorners)];\n }\n case 'CropAndResize': {\n const image = getParamValue('image', node, tensorMap, context);\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const boxInd = getParamValue('boxInd', node, tensorMap, context);\n const cropSize = getParamValue('cropSize', node, tensorMap, context);\n const method = getParamValue('method', node, tensorMap, context);\n const extrapolationValue = getParamValue('extrapolationValue', node, tensorMap, context);\n return [tfOps.image.cropAndResize(image, boxes, boxInd, cropSize, method, extrapolationValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'image';\n//# sourceMappingURL=image_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Equal': {\n return [tfOps.equal(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'NotEqual': {\n return [tfOps.notEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Greater': {\n return [tfOps.greater(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'GreaterEqual': {\n return [tfOps.greaterEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Less': {\n return [tfOps.less(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LessEqual': {\n return [tfOps.lessEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalAnd': {\n return [tfOps.logicalAnd(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalNot': {\n return [tfOps.logicalNot(getParamValue('a', node, tensorMap, context))];\n }\n case 'LogicalOr': {\n return [tfOps.logicalOr(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Select':\n case 'SelectV2': {\n return [tfOps.where(getParamValue('condition', node, tensorMap, context), getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'logical';\n//# sourceMappingURL=logical_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BatchMatMul':\n case 'BatchMatMulV2':\n case 'MatMul':\n return [tfOps.matMul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context), getParamValue('transposeA', node, tensorMap, context), getParamValue('transposeB', node, tensorMap, context))];\n case 'Transpose':\n return [tfOps.transpose(getParamValue('x', node, tensorMap, context), getParamValue('perm', node, tensorMap, context))];\n case '_FusedMatMul':\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('Fused MatMul with BiasAdd and Prelu must have two ' +\n 'extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('Fused MatMul with BiasAdd must have one extra argument: bias.');\n }\n }\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return [tfOps.fused.matMul({\n a: getParamValue('a', node, tensorMap, context),\n b: getParamValue('b', node, tensorMap, context),\n transposeA: getParamValue('transposeA', node, tensorMap, context),\n transposeB: getParamValue('transposeB', node, tensorMap, context),\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'matrices';\n//# sourceMappingURL=matrices_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FusedBatchNorm':\n case 'FusedBatchNormV2': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'FusedBatchNormV3': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'LRN': {\n return [tfOps.localResponseNormalization(getParamValue('x', node, tensorMap, context), getParamValue('radius', node, tensorMap, context), getParamValue('bias', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context), getParamValue('beta', node, tensorMap, context))];\n }\n case 'Softmax': {\n return [tfOps.softmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'LogSoftmax': {\n return [tfOps.logSoftmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'SparseToDense': {\n return [tfOps.sparseToDense(getParamValue('sparseIndices', node, tensorMap, context), getParamValue('outputShape', node, tensorMap, context), getParamValue('sparseValues', node, tensorMap, context), getParamValue('defaultValue', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'normalization';\n//# sourceMappingURL=normalization_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Max': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.max(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Mean': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.mean(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Min': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.min(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Sum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.sum(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'All': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.all(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Any': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.any(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'ArgMax': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMax(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'ArgMin': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMin(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Prod': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Cumsum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const exclusive = getParamValue('exclusive', node, tensorMap, context);\n const reverse = getParamValue('reverse', node, tensorMap, context);\n return [tfOps.cumsum(getParamValue('x', node, tensorMap, context), axis, exclusive, reverse)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'reduction';\n//# sourceMappingURL=reduction_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ConcatV2':\n case 'Concat': {\n const n = getParamValue('n', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n let inputs = getParamValue('tensors', node, tensorMap, context);\n inputs = inputs.slice(0, n);\n return [tfOps.concat(inputs, axis)];\n }\n case 'GatherV2':\n case 'Gather': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gather(input, tfOps.cast(indices, 'int32'), axis)];\n }\n case 'ReverseV2':\n case 'Reverse': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n return [tfOps.reverse(input, axis)];\n }\n case 'Slice': {\n // tslint:disable-next-line:no-any\n const begin = getParamValue('begin', node, tensorMap, context);\n // tslint:disable-next-line:no-any\n const size = getParamValue('size', node, tensorMap, context);\n return [tfOps.slice(getParamValue('x', node, tensorMap, context), begin, size)];\n }\n case 'StridedSlice': {\n const begin = getParamValue('begin', node, tensorMap, context);\n const end = getParamValue('end', node, tensorMap, context);\n const strides = getParamValue('strides', node, tensorMap, context);\n const beginMask = getParamValue('beginMask', node, tensorMap, context);\n const endMask = getParamValue('endMask', node, tensorMap, context);\n const ellipsisMask = getParamValue('ellipsisMask', node, tensorMap, context);\n const newAxisMask = getParamValue('newAxisMask', node, tensorMap, context);\n const shrinkAxisMask = getParamValue('shrinkAxisMask', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return [tfOps.stridedSlice(tensor, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask)];\n }\n case 'Pack': {\n return tidy(() => {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensors = getParamValue('tensors', node, tensorMap, context);\n // Reshape the tensors to the first tensor's shape if they don't\n // match.\n const shape = tensors[0].shape;\n const squeezedShape = tfOps.squeeze(tensors[0]).shape;\n const mapped = tensors.map(tensor => {\n const sameShape = util.arraysEqual(tensor.shape, shape);\n if (!sameShape &&\n !util.arraysEqual(tfOps.squeeze(tensor).shape, squeezedShape)) {\n throw new Error('the input tensors shape does not match');\n }\n return sameShape ? tensor : tfOps.reshape(tensor, shape);\n });\n return [tfOps.stack(mapped, axis)];\n });\n }\n case 'Unpack': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensor = getParamValue('tensor', node, tensorMap, context);\n return tfOps.unstack(tensor, axis);\n }\n case 'Tile': {\n const reps = getParamValue('reps', node, tensorMap, context);\n return [tfOps.tile(getParamValue('x', node, tensorMap, context), reps)];\n }\n case 'Split':\n case 'SplitV': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const numOrSizeSplits = getParamValue('numOrSizeSplits', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return tfOps.split(tensor, numOrSizeSplits, axis);\n }\n case 'ScatterNd': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const shape = getParamValue('shape', node, tensorMap, context);\n return [tfOps.scatterND(indices, values, shape)];\n }\n case 'GatherNd': {\n const x = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gatherND(x, indices)];\n }\n case 'SparseToDense': {\n const indices = getParamValue('sparseIndices', node, tensorMap, context);\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const sparseValues = getParamValue('sparseValues', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n return [tfOps.sparseToDense(indices, sparseValues, shape, sparseValues.dtype === defaultValue.dtype ?\n defaultValue :\n tfOps.cast(defaultValue, sparseValues.dtype))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'slice_join';\n//# sourceMappingURL=slice_join_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FFT': {\n return [tfOps.fft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IFFT': {\n return [tfOps.ifft(getParamValue('x', node, tensorMap, context))];\n }\n case 'RFFT': {\n return [tfOps.rfft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IRFFT': {\n return [tfOps.irfft(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'spectral';\n//# sourceMappingURL=spectral_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Cast': {\n return [tfOps.cast(getParamValue('x', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ExpandDims': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.expandDims(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Squeeze': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.squeeze(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Reshape': {\n return [tfOps.reshape(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n case 'MirrorPad': {\n return [tfOps.mirrorPad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('mode', node, tensorMap, context))];\n }\n case 'PadV2':\n case 'Pad': {\n return [tfOps.pad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('constantValue', node, tensorMap, context))];\n }\n case 'SpaceToBatchND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const paddings = getParamValue('paddings', node, tensorMap, context);\n return [tfOps.spaceToBatchND(getParamValue('x', node, tensorMap, context), blockShape, paddings)];\n }\n case 'BatchToSpaceND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const crops = getParamValue('crops', node, tensorMap, context);\n return [tfOps.batchToSpaceND(getParamValue('x', node, tensorMap, context), blockShape, crops)];\n }\n case 'DepthToSpace': {\n const blockSize = getParamValue('blockSize', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context).toUpperCase();\n return [tfOps.depthToSpace(getParamValue('x', node, tensorMap, context), blockSize, dataFormat)];\n }\n case 'BroadcastTo': {\n return [tfOps.broadcastTo(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'transformation';\n//# sourceMappingURL=transformation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { NodeValueImpl } from './custom_op/node_value_impl';\nimport { getRegisteredOp } from './custom_op/register';\nimport * as arithmetic from './executors/arithmetic_executor';\nimport * as basicMath from './executors/basic_math_executor';\nimport * as control from './executors/control_executor';\nimport * as convolution from './executors/convolution_executor';\nimport * as creation from './executors/creation_executor';\nimport * as dynamic from './executors/dynamic_executor';\nimport * as evaluation from './executors/evaluation_executor';\nimport * as graph from './executors/graph_executor';\nimport * as hashTable from './executors/hash_table_executor';\nimport * as image from './executors/image_executor';\nimport * as logical from './executors/logical_executor';\nimport * as matrices from './executors/matrices_executor';\nimport * as normalization from './executors/normalization_executor';\nimport * as reduction from './executors/reduction_executor';\nimport * as sliceJoin from './executors/slice_join_executor';\nimport * as spectral from './executors/spectral_executor';\nimport * as transformation from './executors/transformation_executor';\n/**\n * Executes the op defined by the node object.\n * @param node\n * @param tensorMap contains tensors for executed nodes and weights\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function executeOp(node, tensorMap, context, resourceManager) {\n const value = ((node, tensorMap, context) => {\n switch (node.category) {\n case 'arithmetic':\n return tfc.tidy(() => arithmetic.executeOp(node, tensorMap, context));\n case 'basic_math':\n return tfc.tidy(() => basicMath.executeOp(node, tensorMap, context));\n case 'control':\n return control.executeOp(node, tensorMap, context);\n case 'convolution':\n return tfc.tidy(() => convolution.executeOp(node, tensorMap, context));\n case 'creation':\n return tfc.tidy(() => creation.executeOp(node, tensorMap, context));\n case 'dynamic':\n return dynamic.executeOp(node, tensorMap, context);\n case 'evaluation':\n return tfc.tidy(() => evaluation.executeOp(node, tensorMap, context));\n case 'image':\n return tfc.tidy(() => image.executeOp(node, tensorMap, context));\n case 'graph':\n return tfc.tidy(() => graph.executeOp(node, tensorMap, context));\n case 'logical':\n return tfc.tidy(() => logical.executeOp(node, tensorMap, context));\n case 'matrices':\n return tfc.tidy(() => matrices.executeOp(node, tensorMap, context));\n case 'normalization':\n return tfc.tidy(() => normalization.executeOp(node, tensorMap, context));\n case 'reduction':\n return tfc.tidy(() => reduction.executeOp(node, tensorMap, context));\n case 'slice_join':\n return tfc.tidy(() => sliceJoin.executeOp(node, tensorMap, context));\n case 'spectral':\n return tfc.tidy(() => spectral.executeOp(node, tensorMap, context));\n case 'transformation':\n return tfc.tidy(() => transformation.executeOp(node, tensorMap, context));\n case 'hash_table':\n return hashTable.executeOp(node, tensorMap, context, resourceManager);\n case 'custom':\n const opMapper = getRegisteredOp(node.op);\n if (opMapper && opMapper.customExecutor) {\n return opMapper.customExecutor(new NodeValueImpl(node, tensorMap, context));\n }\n else {\n throw TypeError(`Custom op ${node.op} is not registered.`);\n }\n default:\n throw TypeError(`Unknown op '${node.op}'. File an issue at ` +\n `https://github.com/tensorflow/tfjs/issues so we can add it` +\n `, or register a custom execution with tf.registerOp()`);\n }\n })(node, tensorMap, context);\n if (tfc.util.isPromise(value)) {\n return value.then((data) => [].concat(data));\n }\n return [].concat(value);\n}\n//# sourceMappingURL=operation_executor.js.map", "/**\n * ExecutionContext captures the runtime environment of the node. It keeps\n * track of the current frame and iteration for the control flow ops.\n *\n * For example, typical Dynamic RNN model may contain loops, for which\n * TensorFlow will generate graphs with Enter/Exit nodes to control the\n * current execution frame, and NextIteration Nodes for iteration id increment.\n * For model with branch logic, TensorFLow will generate Switch/Merge ops.\n */\nexport class ExecutionContext {\n constructor(weightMap = {}, tensorArrayMap = {}, tensorListMap = {}, functionMap = {}) {\n this.weightMap = weightMap;\n this.tensorArrayMap = tensorArrayMap;\n this.tensorListMap = tensorListMap;\n this.functionMap = functionMap;\n this.rootContext = { id: 0, frameName: '', iterationId: 0 };\n this.contexts = [this.rootContext];\n this.lastId = 0;\n this.generateCurrentContextIds();\n }\n newFrame(id, frameName) {\n return { id, frameName, iterationId: 0 };\n }\n /**\n * Set the current context\n * @param contexts: ExecutionContextInfo[] the current path of execution\n * frames\n */\n set currentContext(contexts) {\n if (this.contexts !== contexts) {\n this.contexts = contexts;\n this.generateCurrentContextIds();\n }\n }\n get currentContext() {\n return this.contexts;\n }\n /**\n * Returns the current context in string format.\n */\n get currentContextId() {\n return this._currentContextIds[0];\n }\n /**\n * Returns the current context and all parent contexts in string format.\n * This allow access to the nodes in the current and parent frames.\n */\n get currentContextIds() {\n return this._currentContextIds;\n }\n generateCurrentContextIds() {\n const names = [];\n for (let i = 0; i < this.contexts.length - 1; i++) {\n const contexts = this.contexts.slice(0, this.contexts.length - i);\n names.push(this.contextIdforContexts(contexts));\n }\n names.push('');\n this._currentContextIds = names;\n }\n contextIdforContexts(contexts) {\n return contexts ?\n contexts\n .map(context => (context.id === 0 && context.iterationId === 0) ?\n '' :\n `${context.frameName}-${context.iterationId}`)\n .join('/') :\n '';\n }\n /**\n * Enter a new frame, a new context is pushed on the current context list.\n * @param frameId new frame id\n */\n enterFrame(frameId) {\n if (this.contexts) {\n this.lastId++;\n this.contexts = this.contexts.slice();\n this.contexts.push(this.newFrame(this.lastId, frameId));\n this._currentContextIds.unshift(this.contextIdforContexts(this.contexts));\n }\n }\n /**\n * Exit the current frame, the last context is removed from the current\n * context list.\n */\n exitFrame() {\n if (this.contexts && this.contexts.length > 1) {\n this.contexts = this.contexts.slice();\n this.contexts.splice(-1);\n this.currentContextIds.shift();\n }\n else {\n throw new Error('Cannot exit frame, the context is empty');\n }\n }\n /**\n * Enter the next iteration of a loop, the iteration id of last context is\n * increased.\n */\n nextIteration() {\n if (this.contexts && this.contexts.length > 0) {\n this.contexts = this.contexts.slice();\n this.lastId++;\n const context = Object.assign({}, this.contexts[this.contexts.length - 1]);\n context.iterationId += 1;\n context.id = this.lastId;\n this.contexts.splice(-1, 1, context);\n this._currentContextIds.splice(0, 1, this.contextIdforContexts(this.contexts));\n }\n else {\n throw new Error('Cannot increase frame iteration, the context is empty');\n }\n }\n getWeight(name) {\n return this.weightMap[name];\n }\n addTensorArray(tensorArray) {\n this.tensorArrayMap[tensorArray.id] = tensorArray;\n }\n getTensorArray(id) {\n return this.tensorArrayMap[id];\n }\n addTensorList(tensorList) {\n this.tensorListMap[tensorList.id] = tensorList;\n }\n getTensorList(id) {\n return this.tensorListMap[id];\n }\n dispose(keepIds) {\n for (const key in this.tensorArrayMap) {\n this.tensorArrayMap[key].clearAndClose(keepIds);\n }\n for (const key in this.tensorListMap) {\n this.tensorListMap[key].clearAndClose(keepIds);\n }\n }\n}\n//# sourceMappingURL=execution_context.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { parseNodeName } from '../operations/executors/utils';\n/**\n * Given graph inputs and desired outputs, find the minimal set of nodes\n * to execute in order to compute the outputs. In addition return other useful\n * info such:\n * - Missing inputs needed to compute the output.\n * - Whether the subgraph contains dynamic ops (control flow, dynamic shape).\n * - Alternative inputs in order to avoid async (dynamic op) execution.\n */\nexport function getExecutionSubgraph(inputs, outputs, weightMap, initNodes) {\n const usedNodes = new Set();\n const missingInputs = [];\n let dynamicNode = null;\n let syncInputs = null;\n // Start with the outputs, going backwards and find all the nodes that are\n // needed to compute those outputs.\n const seen = new Set();\n const inputNodeNames = Object.keys(inputs).map(name => parseNodeName(name)[0]);\n let initNodeNames = [];\n if (initNodes != null) {\n initNodeNames = initNodes.map(node => parseNodeName(node.name)[0]);\n }\n const frontier = [...outputs];\n while (frontier.length > 0) {\n const node = frontier.pop();\n if (isControlFlow(node) || isDynamicShape(node) || isHashTable(node)) {\n if (dynamicNode == null) {\n dynamicNode = node;\n syncInputs = dynamicNode.children.map(child => child.name)\n .filter(name => usedNodes.has(name));\n }\n }\n usedNodes.add(node.name);\n // Weights are dead end since we already have their values.\n if (weightMap[node.name] != null) {\n continue;\n }\n // This node is a dead end since it's one of the user-provided inputs.\n if (inputNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n // This node is a dead end since it doesn't have any inputs.\n if (initNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n if (node.inputs.length === 0) {\n missingInputs.push(node.name);\n continue;\n }\n node.inputs.forEach(input => {\n // Don't add to the frontier if it is already there.\n if (seen.has(input.name)) {\n return;\n }\n seen.add(input.name);\n frontier.push(input);\n });\n }\n return { inputs, outputs, usedNodes, missingInputs, dynamicNode, syncInputs };\n}\n/**\n * Given the execution info, return a list of nodes in topological order that\n * need to be executed to compute the output.\n */\nexport function getNodesInTopologicalOrder(graph, weightMap, executionInfo) {\n const { usedNodes, inputs } = executionInfo;\n const frontier = [];\n const inputNodes = Object.keys(inputs)\n .map(name => parseNodeName(name)[0])\n .map(name => graph.nodes[name]);\n const initNodes = graph.initNodes;\n inputNodes.forEach(input => {\n if (usedNodes.has(input.name)) {\n frontier.push(input);\n }\n });\n graph.weights.forEach(weight => {\n if (usedNodes.has(weight.name)) {\n frontier.push(weight);\n }\n });\n if (initNodes != null) {\n initNodes.forEach(node => {\n if (usedNodes.has(node.name)) {\n frontier.push(node);\n }\n });\n }\n const seen = new Set();\n const orderedNodes = [];\n while (frontier.length > 0) {\n const node = frontier.pop();\n seen.add(node.name);\n if (!weightMap[node.name]) {\n orderedNodes.push(node);\n }\n node.children.forEach(child => {\n if (!seen.has(child.name) && usedNodes.has(child.name) &&\n child.inputs.every(input => seen.has(input.name))) {\n frontier.push(child);\n }\n });\n }\n return orderedNodes;\n}\nconst CONTROL_FLOW_OPS = [\n 'Switch', 'Merge', 'Enter', 'Exit', 'NextIteration', 'StatelessIf',\n 'StatelessWhile', 'if', 'While'\n];\nconst DYNAMIC_SHAPE_OPS = [\n 'NonMaxSuppressionV2', 'NonMaxSuppressionV3', 'NonMaxSuppressionV5', 'Where'\n];\nconst HASH_TABLE_OPS = [\n 'HashTable', 'HashTableV2', 'LookupTableImport', 'LookupTableImportV2',\n 'LookupTableFind', 'LookupTableFindV2'\n];\nexport function isControlFlow(node) {\n return CONTROL_FLOW_OPS.indexOf(node.op) >= 0;\n}\nexport function isDynamicShape(node) {\n return DYNAMIC_SHAPE_OPS.indexOf(node.op) >= 0;\n}\nexport function isHashTable(node) {\n return HASH_TABLE_OPS.indexOf(node.op) >= 0;\n}\n//# sourceMappingURL=model_analysis.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { getNodeNameAndIndex, getParamValue, getTensor, getTensorsForCurrentContenxt, parseNodeName } from '../operations/executors/utils';\nimport { executeOp } from '../operations/operation_executor';\nimport { ExecutionContext } from './execution_context';\nimport { getExecutionSubgraph, getNodesInTopologicalOrder, isControlFlow } from './model_analysis';\nexport class GraphExecutor {\n /**\n *\n * @param graph Graph the model or function graph to be executed.\n * @param parent When building function exector you need to set the parent\n * executor. Since the weights and function executor maps are set at parant\n * level, that function executor can access the function maps and weight maps\n * through the parent.\n */\n constructor(graph, parent) {\n this.graph = graph;\n this.parent = parent;\n this.compiledMap = new Map();\n this._weightMap = {};\n this.SEPERATOR = ',';\n this._functions = {};\n this._functionExecutorMap = {};\n this._outputs = graph.outputs;\n this._inputs = graph.inputs;\n this._initNodes = graph.initNodes;\n this._signature = graph.signature;\n this._functions = graph.functions;\n // create sub-graph executors\n if (graph.functions != null) {\n Object.keys(graph.functions).forEach(name => {\n this._functionExecutorMap[name] =\n new GraphExecutor(graph.functions[name], this);\n });\n }\n }\n get weightIds() {\n return this.parent ? this.parent.weightIds : this._weightIds;\n }\n get functionExecutorMap() {\n return this.parent ? this.parent.functionExecutorMap :\n this._functionExecutorMap;\n }\n get weightMap() {\n return this.parent ? this.parent.weightMap : this._weightMap;\n }\n set weightMap(weightMap) {\n const weightIds = Object.keys(weightMap).map(key => weightMap[key].map(tensor => tensor.id));\n this._weightIds = [].concat(...weightIds);\n this._weightMap = weightMap;\n }\n /**\n * Set `ResourceManager` shared by executors of a model.\n * @param resourceManager: `ResourceManager` of the `GraphModel`.\n */\n set resourceManager(resourceManager) {\n this._resourceManager = resourceManager;\n }\n get inputs() {\n return this._inputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get outputs() {\n return this._outputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get inputNodes() {\n return this._inputs.map(node => node.signatureKey || node.name);\n }\n get outputNodes() {\n return this._outputs.map((node) => {\n const name = node.signatureKey || node.name;\n return node.defaultOutput ? (`${name}:${node.defaultOutput}`) : name;\n });\n }\n get functions() {\n return Object.keys(this._functions).reduce((map, key) => {\n map[key] = this._functions[key].signature;\n return map;\n }, {});\n }\n getCompilationKey(inputs, outputs) {\n const sortedInputs = inputs.map(node => node.name).sort();\n const sortedOutputs = outputs.map(node => node.name).sort();\n return sortedInputs.join(this.SEPERATOR) + '--' +\n sortedOutputs.join(this.SEPERATOR);\n }\n /**\n * Compiles the inference graph and returns the minimal set of nodes that are\n * required for execution, in the correct execution order.\n */\n compile(inputs, outputs) {\n const executionInfo = getExecutionSubgraph(inputs, outputs, this.weightMap, this._initNodes);\n const { missingInputs, dynamicNode, syncInputs } = executionInfo;\n if (dynamicNode != null) {\n throw new Error(`This execution contains the node '${dynamicNode.name}', which has ` +\n `the dynamic op '${dynamicNode.op}'. Please use ` +\n `model.executeAsync() instead. Alternatively, to avoid the ` +\n `dynamic ops, specify the inputs [${syncInputs}]`);\n }\n if (missingInputs.length > 0) {\n const outNames = outputs.map(n => n.name);\n const inNames = Object.keys(inputs);\n throw new Error(`Cannot compute the outputs [${outNames}] from the provided inputs ` +\n `[${inNames}]. Missing the following inputs: [${missingInputs}]`);\n }\n return getNodesInTopologicalOrder(this.graph, this.weightMap, executionInfo);\n }\n /**\n * Executes the inference for given input tensors.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model, if\n * no outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n */\n execute(inputs, outputs) {\n inputs = this.mapInputs(inputs);\n const names = Object.keys(inputs).sort();\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputs.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const compilationKey = this.getCompilationKey(inputNodes, outputNodes);\n // Do nothing if the compiled graph cache contains the input.\n let orderedNodes = this.compiledMap.get(compilationKey);\n if (orderedNodes == null) {\n orderedNodes = this.compile(inputs, outputNodes);\n this.compiledMap.set(compilationKey, orderedNodes);\n }\n const tensorArrayMap = {};\n const tensorListMap = {};\n return tidy(() => {\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const intermediateTensorConsumerCount = {};\n for (let i = 0; i < orderedNodes.length; i++) {\n const node = orderedNodes[i];\n if (!tensorsMap[node.name]) {\n const tensors = executeOp(node, tensorsMap, context, this._resourceManager);\n if (util.isPromise(tensors)) {\n throw new Error(`The execution of the op '${node.op}' returned a promise. ` +\n `Please use model.executeAsync() instead.`);\n }\n tensorsMap[node.name] = tensors;\n this.checkTensorForDisposal(node.name, node, tensorsMap, context, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount);\n }\n }\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(tensorsToKeep);\n }\n return outputs.map(name => getTensor(name, tensorsMap, context));\n });\n }\n getFrozenTensorIds(tensorMap) {\n const ids = [].concat.apply([], Object.keys(tensorMap)\n .map(key => tensorMap[key])\n .map(tensors => tensors.map(tensor => tensor.id)));\n return new Set(ids);\n }\n checkTensorForDisposal(nodeName, node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount) {\n // Skip output nodes and any control flow nodes, since its dependency is\n // tricky to track correctly.\n if (node.category === 'control' || outputNames.indexOf(nodeName) !== -1) {\n return;\n }\n tensorMap[nodeName].forEach(tensor => {\n if (tensor != null) {\n intermediateTensorConsumerCount[tensor.id] =\n (intermediateTensorConsumerCount[tensor.id] || 0) +\n node.children.length;\n }\n });\n node.inputs.forEach(input => {\n // Skip any control flow nodes, since its dependency is tricky to track\n // correctly.\n if (input.category !== 'control') {\n const tensors = getTensorsForCurrentContenxt(input.name, tensorMap, context);\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (tensor && !tensorsToKeep.has(tensor.id)) {\n const count = intermediateTensorConsumerCount[tensor.id];\n if (count === 1) {\n tensor.dispose();\n delete intermediateTensorConsumerCount[tensor.id];\n }\n else if (count != null) {\n // only intermediate nodes has count set, inputs and weights are\n // not.\n intermediateTensorConsumerCount[tensor.id]--;\n }\n }\n });\n }\n }\n });\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n */\n async executeAsync(inputs, outputs) {\n return this._executeAsync(inputs, outputs);\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Optional. Flag for executing a function.\n * @param tensorArrayMap Optional, global TensorArray map by id. Used for\n * function execution.\n * @param tensorArrayMap Optinal global TensorList map by id. Used for\n * function execution.\n */\n async _executeAsync(inputs, outputs, isFunctionExecution = false, tensorArrayMap = {}, tensorListMap = {}) {\n if (!isFunctionExecution) {\n inputs = this.mapInputs(inputs);\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n }\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n // Graph with control flow op requires runtime evaluation of the execution\n // order, while without control flow the execution order is pre-determined\n // in the compile method.\n const tensorMap = await this.executeWithControlFlow(inputs, context, outputs, isFunctionExecution);\n const results = outputs.map(name => getTensor(name, tensorMap, context));\n // dispose all the intermediate tensors\n const outputIds = results.map(t => t.id);\n const inputIds = Object.keys(inputs).map(name => inputs[name].id);\n const keepIds = new Set([...outputIds, ...inputIds, ...this.weightIds]);\n Object.keys(tensorMap).forEach(key => {\n const tensorArray = tensorMap[key];\n tensorArray.forEach(tensor => {\n if (tensor && !tensor.isDisposed && !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n });\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(keepIds);\n }\n return results;\n }\n async executeFunctionAsync(inputs, tensorArrayMap, tensorListMap) {\n const mappedInputs = inputs.reduce((map, tensor, index) => {\n map[this.inputs[index].name] = tensor;\n return map;\n }, {});\n return this._executeAsync(mappedInputs, this.outputNodes, true, tensorArrayMap, tensorListMap);\n }\n /**\n * When there are control flow nodes in the graph, the graph execution use\n * ExecutionContext to keep track of the frames and loop iterators.\n * @param inputs placeholder tensors for the graph.\n * @param context the execution context object for current execution.\n * @param outputNames Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Flag for executing a function.\n */\n async executeWithControlFlow(inputs, context, outputNames, isFunctionExecution) {\n const names = Object.keys(inputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputNames.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const { usedNodes, missingInputs, dynamicNode, syncInputs } = getExecutionSubgraph(inputs, outputNodes, this.weightMap, this._initNodes);\n // First nodes to execute include inputNodes, weights, and initNodes.\n const stack = [\n ...inputNodes, ...this.graph.weights, ...(this._initNodes || [])\n ].map(node => {\n return { node, contexts: context.currentContext };\n });\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const intermediateTensorConsumerCount = {};\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const added = {};\n while (stack.length > 0) {\n const promises = this.processStack(inputNodes, stack, context, tensorsMap, added, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount, usedNodes);\n await Promise.all(promises);\n }\n if (dynamicNode == null && !isFunctionExecution) {\n console.warn(`This model execution did not contain any nodes with control flow ` +\n `or dynamic output shapes. You can use model.execute() instead.`);\n }\n const missingOutputs = outputNodes\n .filter(node => !isControlFlow(node) &&\n !getTensor(node.name, tensorsMap, context))\n .map(node => node.name);\n if (missingOutputs.length > 0) {\n let alternativeMsg = '';\n if (dynamicNode != null) {\n alternativeMsg =\n `Alternatively, to avoid the dynamic ops, use model.execute() ` +\n `and specify the inputs [${syncInputs}]`;\n }\n throw new Error(`Cannot compute the outputs [${missingOutputs}] from the provided ` +\n `inputs [${names}]. Consider providing the following inputs: ` +\n `[${missingInputs}]. ${alternativeMsg}`);\n }\n return tensorsMap;\n }\n processStack(inputNodes, stack, context, tensorMap, added, tensorsToKeep, outputNames, intermediateTensorConsumerCount, usedNodes) {\n const promises = [];\n while (stack.length > 0) {\n const item = stack.pop();\n context.currentContext = item.contexts;\n let nodeName = '';\n // The tensor of the Enter op with isConstant set should be set\n // in the parent scope, so it will be available as constant for the\n // whole loop.\n if (item.node.op === 'Enter' &&\n getParamValue('isConstant', item.node, tensorMap, context)) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n // only process nodes that are not in the tensorMap yet, this include\n // inputNodes and internal initNodes.\n if (tensorMap[item.node.name] == null) {\n const tensors = executeOp(item.node, tensorMap, context, this._resourceManager);\n if (!nodeName) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n const currentContext = context.currentContext;\n if (util.isPromise(tensors)) {\n promises.push(tensors.then(t => {\n tensorMap[nodeName] = t;\n context.currentContext = currentContext;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n return t;\n }));\n }\n else {\n tensorMap[nodeName] = tensors;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n else {\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n return promises;\n }\n processChildNodes(node, stack, context, tensorMap, added, usedNodes) {\n node.children.forEach((childNode) => {\n const [nodeName,] = getNodeNameAndIndex(childNode.name, context);\n if (added[nodeName] || !usedNodes.has(childNode.name)) {\n return;\n }\n // Merge op can be pushed if any of its inputs has value.\n if (childNode.op === 'Merge') {\n if (childNode.inputNames.some(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n }\n else // Otherwise all inputs must to have value.\n if (childNode.inputNames.every(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n });\n }\n /**\n * Releases the memory used by the weight tensors.\n */\n dispose() {\n Object.keys(this.weightMap)\n .forEach(key => this.weightMap[key].forEach(tensor => tensor.dispose()));\n }\n checkInputShapeAndType(inputs) {\n Object.keys(inputs).forEach(name => {\n const input = inputs[name];\n const [nodeName,] = parseNodeName(name);\n const node = this.graph.nodes[nodeName];\n if (node.attrParams['shape'] && node.attrParams['shape'].value) {\n const shape = node.attrParams['shape'].value;\n const match = shape.length === input.shape.length &&\n input.shape.every((dim, index) => shape[index] === -1 || shape[index] === dim);\n util.assert(match, () => `The shape of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be [${shape}], but was ` +\n `[${input.shape}]`);\n }\n if (node.attrParams['dtype'] && node.attrParams['dtype'].value) {\n util.assert(input.dtype === node.attrParams['dtype'].value, () => `The dtype of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be ` +\n `${node.attrParams['dtype'].value}, but was ${input.dtype}`);\n }\n });\n }\n mapInputs(inputs) {\n const result = {};\n for (const inputName in inputs) {\n if (this._signature != null && this._signature.inputs != null &&\n this._signature.inputs[inputName] != null) {\n const tensor = this._signature.inputs[inputName];\n result[tensor.name] = inputs[inputName];\n }\n else {\n result[inputName] = inputs[inputName];\n }\n }\n return result;\n }\n checkInputs(inputs) {\n const notInGraph = Object.keys(inputs).filter(name => {\n const [nodeName] = parseNodeName(name);\n return this.graph.nodes[nodeName] == null;\n });\n if (notInGraph.length > 0) {\n throw new Error(`The dict provided in model.execute(dict) has ` +\n `keys: [${notInGraph}] that are not part of graph`);\n }\n }\n mapOutputs(outputs) {\n return outputs.map(name => {\n if (this._signature != null && this._signature.outputs != null &&\n this._signature.outputs[name] != null) {\n const tensor = this._signature.outputs[name];\n return tensor.name;\n }\n return name;\n }, {});\n }\n checkOutputs(outputs) {\n outputs.forEach(name => {\n const [normalizedName] = parseNodeName(name);\n if (!this.graph.nodes[normalizedName]) {\n throw new Error(`The output '${name}' is not found in the graph`);\n }\n });\n }\n}\n//# sourceMappingURL=graph_executor.js.map", "/**\n * Contains global resources of a model.\n */\nexport class ResourceManager {\n constructor(hashTableNameToHandle = {}, hashTableMap = {}) {\n this.hashTableNameToHandle = hashTableNameToHandle;\n this.hashTableMap = hashTableMap;\n }\n /**\n * Register a `HashTable` in the resource manager.\n *\n * The `HashTable` can be retrieved by `resourceManager.getHashTableById`,\n * where id is the table handle tensor's id.\n *\n * @param name Op node name that creates the `HashTable`.\n * @param hashTable The `HashTable` to be added to resource manager.\n */\n addHashTable(name, hashTable) {\n this.hashTableNameToHandle[name] = hashTable.handle;\n this.hashTableMap[hashTable.id] = hashTable;\n }\n /**\n * Get the table handle by node name.\n * @param name Op node name that creates the `HashTable`. This name is also\n * used in the inputs list of lookup and import `HashTable` ops.\n */\n getHashTableHandleByName(name) {\n return this.hashTableNameToHandle[name];\n }\n /**\n * Get the actual `HashTable` by its handle tensor's id.\n * @param id The id of the handle tensor.\n */\n getHashTableById(id) {\n return this.hashTableMap[id];\n }\n /**\n * Dispose `ResourceManager`, including its hashTables and tensors in them.\n */\n dispose() {\n for (const key in this.hashTableMap) {\n this.hashTableMap[key].clearAndClose();\n delete this.hashTableMap[key];\n }\n for (const name in this.hashTableNameToHandle) {\n this.hashTableNameToHandle[name].dispose();\n delete this.hashTableNameToHandle[name];\n }\n }\n}\n//# sourceMappingURL=resource_manager.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { io, Tensor } from '@tensorflow/tfjs-core';\nimport { OperationMapper } from '../operations/operation_mapper';\nimport { GraphExecutor } from './graph_executor';\nimport { ResourceManager } from './resource_manager';\nexport const TFHUB_SEARCH_PARAM = '?tfjs-format=file';\nexport const DEFAULT_MODEL_NAME = 'model.json';\n/**\n * A `tf.GraphModel` is a directed, acyclic graph built from a\n * SavedModel GraphDef and allows inference execution.\n *\n * A `tf.GraphModel` can only be created by loading from a model converted from\n * a [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) using\n * the command line converter tool and loaded via `tf.loadGraphModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class GraphModel {\n /**\n * @param modelUrl url for the model, or an `io.IOHandler`.\n * @param weightManifestUrl url for the weight file generated by\n * scripts/convert.py script.\n * @param requestOption options for Request, which allows to send credentials\n * and custom headers.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n */\n constructor(modelUrl, loadOptions = {}) {\n this.modelUrl = modelUrl;\n this.loadOptions = loadOptions;\n this.version = 'n/a';\n if (loadOptions == null) {\n this.loadOptions = {};\n }\n this.resourceManager = new ResourceManager();\n }\n // Returns the version information for the tensorflow model GraphDef.\n get modelVersion() {\n return this.version;\n }\n get inputNodes() {\n return this.executor.inputNodes;\n }\n get outputNodes() {\n return this.executor.outputNodes;\n }\n get inputs() {\n return this.executor.inputs;\n }\n get outputs() {\n return this.executor.outputs;\n }\n get weights() {\n return this.executor.weightMap;\n }\n findIOHandler() {\n const path = this.modelUrl;\n if (path.load != null) {\n // Path is an IO Handler.\n this.handler = path;\n }\n else if (this.loadOptions.requestInit != null) {\n this.handler = io.browserHTTPRequest(path, this.loadOptions);\n }\n else {\n const handlers = io.getLoadHandlers(path, this.loadOptions);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n handlers.push(io.browserHTTPRequest(path, this.loadOptions));\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${[path]}'`);\n }\n this.handler = handlers[0];\n }\n }\n /**\n * Loads the model and weight files, construct the in memory weight map and\n * compile the inference graph.\n */\n async load() {\n this.findIOHandler();\n if (this.handler.load == null) {\n throw new Error('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await this.handler.load();\n return this.loadSync(artifacts);\n }\n /**\n * Synchronously construct the in memory weight map and\n * compile the inference graph. Also initialize hashtable if any.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n loadSync(artifacts) {\n this.artifacts = artifacts;\n const graph = this.artifacts.modelTopology;\n let signature = {};\n if (this.artifacts.userDefinedMetadata != null) {\n signature = // tslint:disable-next-line:no-any\n this.artifacts.userDefinedMetadata.signature;\n }\n this.version = `${graph.versions.producer}.${graph.versions.minConsumer}`;\n const weightMap = io.decodeWeights(this.artifacts.weightData, this.artifacts.weightSpecs);\n this.executor = new GraphExecutor(OperationMapper.Instance.transformGraph(graph, signature));\n this.executor.weightMap = this.convertTensorMapToTensorsMap(weightMap);\n // Attach a model-level resourceManager to each executor to share resources,\n // such as `HashTable`.\n this.executor.resourceManager = this.resourceManager;\n if (artifacts.modelInitializer != null) {\n const initializer = OperationMapper.Instance.transformGraph(artifacts.modelInitializer);\n this.initializer = new GraphExecutor(initializer);\n this.initializer.weightMap = this.executor.weightMap;\n // Attach a model-level resourceManager to the initializer, the\n // hashTables created from when executing the initializer will be stored\n // in the resourceManager.\n this.initializer.resourceManager = this.resourceManager;\n this.initializer.executeAsync({}, []);\n }\n return true;\n }\n /**\n * Save the configuration and/or weights of the GraphModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadGraphModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * model.predict(zeros).print();\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new Error(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new Error('GraphModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n return handlerOrURL.save(this.artifacts);\n }\n /**\n * Execute the inference for the input tensors.\n *\n * @param input The input tensors, when there is single input for the model,\n * inputs param should be a `tf.Tensor`. For models with mutliple inputs,\n * inputs params should be in either `tf.Tensor`[] if the input order is\n * fixed, or otherwise NamedTensorMap format.\n *\n * For model with multiple inputs, we recommend you use NamedTensorMap as the\n * input type, if you use `tf.Tensor`[], the order of the array needs to\n * follow the\n * order of inputNodes array. @see {@link GraphModel.inputNodes}\n *\n * You can also feed any intermediate nodes using the NamedTensorMap as the\n * input type. For example, given the graph\n * InputNode => Intermediate => OutputNode,\n * you can execute the subgraph Intermediate => OutputNode by calling\n * model.execute('IntermediateNode' : tf.tensor(...));\n *\n * This is useful for models that uses tf.dynamic_rnn, where the intermediate\n * state needs to be fed manually.\n *\n * For batch inference execution, the tensors for each input need to be\n * concatenated together. For example with mobilenet, the required input shape\n * is [1, 244, 244, 3], which represents the [batch, height, width, channel].\n * If we are provide a batched data of 100 images, the input tensor should be\n * in the shape of [100, 244, 244, 3].\n *\n * @param config Prediction configuration for specifying the batch size and\n * output node names. Currently the batch size option is ignored for graph\n * model.\n *\n * @returns Inference result tensors. The output would be single `tf.Tensor`\n * if model has single output node, otherwise Tensor[] or NamedTensorMap[]\n * will be returned for model with multiple outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(inputs, config) {\n return this.execute(inputs, this.outputNodes);\n }\n normalizeInputs(inputs) {\n if (!(inputs instanceof Tensor) && !Array.isArray(inputs)) {\n // The input is already a NamedTensorMap.\n return inputs;\n }\n inputs = Array.isArray(inputs) ? inputs : [inputs];\n if (inputs.length !== this.inputNodes.length) {\n throw new Error('Input tensor count mismatch,' +\n `the graph model has ${this.inputNodes.length} placeholders, ` +\n `while there are ${inputs.length} input tensors.`);\n }\n return this.inputNodes.reduce((map, inputName, i) => {\n map[inputName] = inputs[i];\n return map;\n }, {});\n }\n normalizeOutputs(outputs) {\n outputs = outputs || this.outputNodes;\n return !Array.isArray(outputs) ? [outputs] : outputs;\n }\n /**\n * Executes inference for the model for given input tensors.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no\n * outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n *\n * @returns A single tensor if provided with a single output or no outputs\n * are provided and there is only one default output, otherwise return a\n * tensor array. The order of the tensor array is the same as the outputs\n * if provided, otherwise the order of outputNodes attribute of the model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n execute(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = this.executor.execute(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n /**\n * Executes inference for the model for given input tensors in async\n * fashion, use this method when your model contains control flow ops.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n *\n * @returns A Promise of single tensor if provided with a single output or\n * no outputs are provided and there is only one default output, otherwise\n * return a tensor map.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async executeAsync(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = await this.executor.executeAsync(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n convertTensorMapToTensorsMap(map) {\n return Object.keys(map).reduce((newMap, key) => {\n newMap[key] = [map[key]];\n return newMap;\n }, {});\n }\n /**\n * Releases the memory used by the weight tensors and resourceManager.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n dispose() {\n this.executor.dispose();\n if (this.initializer) {\n this.initializer.dispose();\n }\n this.resourceManager.dispose();\n }\n}\n/**\n * Load a graph model given a URL to the model definition.\n *\n * Example of loading MobileNetV2 from a URL and making a prediction with a\n * zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n *\n * Example of loading MobileNetV2 from a TF Hub URL and making a prediction with\n * a zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://tfhub.dev/google/imagenet/mobilenet_v2_140_224/classification/2';\n * const model = await tf.loadGraphModel(modelUrl, {fromTFHub: true});\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n * @param modelUrl The url or an `io.IOHandler` that loads the model.\n * @param options Options for the HTTP request, which allows to send credentials\n * and custom headers.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport async function loadGraphModel(modelUrl, options = {}) {\n if (modelUrl == null) {\n throw new Error('modelUrl in loadGraphModel() cannot be null. Please provide a url ' +\n 'or an IOHandler that loads the model');\n }\n if (options == null) {\n options = {};\n }\n if (options.fromTFHub) {\n if (modelUrl.load == null) {\n if (!modelUrl.endsWith('/')) {\n modelUrl = modelUrl + '/';\n }\n modelUrl = `${modelUrl}${DEFAULT_MODEL_NAME}${TFHUB_SEARCH_PARAM}`;\n }\n }\n const model = new GraphModel(modelUrl, options);\n await model.load();\n return model;\n}\n//# sourceMappingURL=graph_model.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { GraphModel, loadGraphModel } from './executor/graph_model';\nexport { deregisterOp, registerOp } from './operations/custom_op/register';\nexport { version as version_converter } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\n/**\n * Apply a mapping function to a nested structure in a recursive manner.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapResult`. The `DeepMapResult` either provides a\n * replacement value for that node (i.e., replacing the subtree), or indicates\n * that the node should be processed recursively.\n */\nexport function deepMap(input, mapFn) {\n return deepMapInternal(input, mapFn);\n}\n/**\n * @param seen: A Map of known object mappings (i.e., memoized results of\n * `mapFn()`)\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepMapInternal(input, mapFn, seen = new Map(), containedIn = new Set()) {\n if (input == null) {\n return null;\n }\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n if (seen.has(input)) {\n return seen.get(input);\n }\n const result = mapFn(input);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep map function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n seen.set(input, result.value);\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const child = input[k];\n const childResult = deepMapInternal(child, mapFn, seen, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// TODO(soergel, kangyizhang) Reconsider naming of deepZip() to avoid confusion\n// with zip()\n/**\n * Zip nested structures together in a recursive manner.\n *\n * This has the effect of transposing or pivoting data, e.g. converting it from\n * a row-major representation to a column-major representation.\n *\n * For example, `deepZip([{a: 1, b: 2}, {a: 3, b: 4}])` returns\n * `{a: [1, 3], b: [2, 4]}`.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure, where\n * the leaves are arrays collecting the values of the inputs at that location\n * (or, optionally, the result of a custom function applied to those arrays).\n *\n * @param inputs: An array of the objects to zip together.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n */\nexport function deepZip(inputs, zipFn = zipToList) {\n return deepZipInternal(inputs, zipFn);\n}\n/**\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepZipInternal(inputs, zipFn, containedIn = new Set()) {\n // The recursion follows the structure of input 0; it's assumed that all the\n // other inputs have the same structure.\n const input = inputs[0];\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n const result = zipFn(inputs);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep zip function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const children = inputs.map(x => x[k]);\n const childResult = deepZipInternal(children, zipFn, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// tslint:disable-next-line:no-any\nexport function zipToList(x) {\n if (x === null) {\n return null;\n }\n // TODO(soergel): validate array type?\n if (isIterable(x[0])) {\n return { value: null, recurse: true };\n }\n else {\n return { value: x, recurse: false };\n }\n}\n/**\n * Apply an async mapping function to a nested structure in a recursive manner.\n *\n * This first creates a nested structure of Promises, and then awaits all of\n * those, resulting in a single Promise for a resolved nested structure.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapAsyncResult`. The `DeepMapAsyncResult` either provides\n * a `Promise` for a replacement value for that node (i.e., replacing the\n * subtree), or indicates that the node should be processed recursively. Note\n * that the decision whether or not to recurse must be made immediately; only\n * the mapped value may be promised.\n */\nexport async function deepMapAndAwaitAll(input, mapFn) {\n const seen = new Map();\n // First do a normal deepMap, collecting Promises in 'seen' as a side effect.\n deepMapInternal(input, mapFn, seen);\n // Replace the Promises in 'seen' in place.\n // Note TypeScript provides no async map iteration, and regular map iteration\n // is broken too, so sadly we have to do Array.from() to make it work.\n // (There's no advantage to Promise.all(), and that would be tricky anyway.)\n for (const key of Array.from(seen.keys())) {\n const value = seen.get(key);\n if (tf.util.isPromise(value)) {\n const mappedValue = await value;\n seen.set(key, mappedValue);\n }\n }\n // Normal deepMap again, this time filling in the resolved values.\n // It's unfortunate that we have to do two passes.\n // TODO(soergel): test performance and think harder about a fast solution.\n const result = deepMapInternal(input, mapFn, seen);\n return result;\n}\n/**\n * Determine whether the argument is iterable.\n *\n * @returns true if the argument is an array or any non-Tensor object.\n */\n// tslint:disable-next-line:no-any\nexport function isIterable(obj) {\n return obj != null && (!ArrayBuffer.isView(obj)) &&\n (Array.isArray(obj) ||\n (typeof obj === 'object' && !(obj instanceof tf.Tensor)));\n}\n/**\n * Determine whether the argument can be converted to Tensor.\n *\n * Tensors, primitives, arrays, and TypedArrays all qualify; anything else does\n * not.\n *\n * @returns true if the argument can be converted to Tensor.\n */\n// tslint:disable-next-line:no-any\nexport function canTensorify(obj) {\n return obj == null || isPrimitive(obj) || Array.isArray(obj) ||\n (typeof obj === 'object' && (obj instanceof tf.Tensor)) ||\n tf.util.isTypedArray(obj);\n}\n/**\n * Returns true if the given `value` is a primitive type. Otherwise returns\n * false. This is equivalant to node util.isPrimitive\n */\nfunction isPrimitive(value) {\n return (value === null ||\n (typeof value !== 'object' && typeof value !== 'function'));\n}\n//# sourceMappingURL=deep_map.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { deepMap, isIterable } from './deep_map';\nexport function deepClone(container) {\n return deepMap(container, cloneIfTensor);\n}\n// tslint:disable-next-line: no-any\nfunction cloneIfTensor(item) {\n if (item instanceof tf.Tensor) {\n return ({ value: item.clone(), recurse: false });\n }\n else if (isIterable(item)) {\n return { value: null, recurse: true };\n }\n else {\n return { value: item, recurse: false };\n }\n}\n//# sourceMappingURL=deep_clone.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * A ring buffer, providing O(1) FIFO, LIFO, and related operations.\n */\nexport class RingBuffer {\n /**\n * Constructs a `RingBuffer`.\n * @param capacity The number of items that the buffer can accomodate.\n */\n constructor(capacity) {\n this.capacity = capacity;\n // Note we store the indices in the range 0 <= index < 2*capacity.\n // This allows us to distinguish the full from the empty case.\n // See https://www.snellman.net/blog/archive/2016-12-13-ring-buffers/\n this.begin = 0; // inclusive\n this.end = 0; // exclusive\n if (capacity == null) {\n throw new RangeError('Can\\'t create a ring buffer of unknown capacity.');\n }\n if (capacity < 1) {\n throw new RangeError('Can\\'t create ring buffer of capacity < 1.');\n }\n this.data = new Array(capacity);\n this.doubledCapacity = 2 * capacity;\n }\n /**\n * Map any index into the range 0 <= index < 2*capacity.\n */\n wrap(index) {\n // don't trust % on negative numbers\n while (index < 0) {\n index += this.doubledCapacity;\n }\n return index % this.doubledCapacity;\n }\n get(index) {\n if (index < 0) {\n throw new RangeError('Can\\'t get item at a negative index.');\n }\n return this.data[index % this.capacity];\n }\n set(index, value) {\n if (index < 0) {\n throw new RangeError('Can\\'t set item at a negative index.');\n }\n this.data[index % this.capacity] = value;\n }\n /**\n * Returns the current number of items in the buffer.\n */\n length() {\n let length = this.end - this.begin;\n if (length < 0) {\n length = this.doubledCapacity + length;\n }\n return length;\n }\n /**\n * Reports whether the buffer is full.\n * @returns true if the number of items in the buffer equals its capacity, and\n * false otherwise.\n */\n isFull() {\n return this.length() === this.capacity;\n }\n /**\n * Reports whether the buffer is empty.\n * @returns true if the number of items in the buffer equals zero, and\n * false otherwise.\n */\n isEmpty() {\n return this.length() === 0;\n }\n /**\n * Adds an item to the end of the buffer.\n */\n push(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.set(this.end, value);\n this.end = this.wrap(this.end + 1);\n }\n /**\n * Adds many items to the end of the buffer, in order.\n */\n pushAll(values) {\n for (const value of values) {\n this.push(value);\n }\n }\n /**\n * Removes and returns the last item in the buffer.\n */\n pop() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n this.end = this.wrap(this.end - 1);\n const result = this.get(this.end);\n this.set(this.end, undefined);\n return result;\n }\n /**\n * Adds an item to the beginning of the buffer.\n */\n unshift(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.begin = this.wrap(this.begin - 1);\n this.set(this.begin, value);\n }\n /**\n * Removes and returns the first item in the buffer.\n */\n shift() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const result = this.get(this.begin);\n this.set(this.begin, undefined);\n this.begin = this.wrap(this.begin + 1);\n return result;\n }\n /**\n * Removes and returns a specific item in the buffer, and moves the last item\n * to the vacated slot. This is useful for implementing a shuffling stream.\n * Note that this operation necessarily scrambles the original order.\n *\n * @param relativeIndex: the index of the item to remove, relative to the\n * first item in the buffer (e.g., hiding the ring nature of the underlying\n * storage).\n */\n shuffleExcise(relativeIndex) {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const index = this.wrap(this.begin + relativeIndex);\n const result = this.get(index);\n this.set(index, this.pop());\n return result;\n }\n}\n//# sourceMappingURL=ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { RingBuffer } from './ring_buffer';\nexport class GrowingRingBuffer extends RingBuffer {\n /**\n * Constructs a `GrowingRingBuffer`.\n */\n constructor() {\n super(GrowingRingBuffer.INITIAL_CAPACITY);\n }\n isFull() {\n return false;\n }\n push(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.push(value);\n }\n unshift(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.unshift(value);\n }\n /**\n * Doubles the capacity of the buffer.\n */\n expand() {\n const newCapacity = this.capacity * 2;\n const newData = new Array(newCapacity);\n const len = this.length();\n // Rotate the buffer to start at index 0 again, since we can't just\n // allocate more space at the end.\n for (let i = 0; i < len; i++) {\n newData[i] = this.get(this.wrap(this.begin + i));\n }\n this.data = newData;\n this.capacity = newCapacity;\n this.doubledCapacity = 2 * this.capacity;\n this.begin = 0;\n this.end = len;\n }\n}\nGrowingRingBuffer.INITIAL_CAPACITY = 32;\n//# sourceMappingURL=growing_ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { deepClone } from '../util/deep_clone';\nimport { deepMapAndAwaitAll, deepZip, zipToList } from '../util/deep_map';\nimport { GrowingRingBuffer } from '../util/growing_ring_buffer';\nimport { RingBuffer } from '../util/ring_buffer';\n// Here we implement a simple asynchronous iterator.\n// This lets us avoid using either third-party stream libraries or\n// recent TypeScript language support requiring polyfills.\n/**\n * Create a `LazyIterator` from an array of items.\n */\nexport function iteratorFromItems(items) {\n return new ArrayIterator(items);\n}\n/**\n * Create a `LazyIterator` of incrementing integers.\n */\nexport function iteratorFromIncrementing(start) {\n let i = start;\n return iteratorFromFunction(() => ({ value: i++, done: false }));\n}\n/**\n * Create a `LazyIterator` from a function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * await iter.forEachAsync(e => console.log(e));\n * ```\n *\n * @param func A function that produces data on each call.\n */\nexport function iteratorFromFunction(func) {\n return new FunctionCallIterator(func);\n}\n/**\n * Create a `LazyIterator` by concatenating underlying streams, which are\n * themselves provided as a stream.\n *\n * This can also be thought of as a \"stream flatten\" operation.\n *\n * @param baseIterators A stream of streams to be concatenated.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenated(baseIterators, baseErrorHandler) {\n return new ChainedIterator(baseIterators, baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by concatenating streams produced by calling a\n * stream-generating function a given number of times.\n *\n * Since a `LazyIterator` is read-once, it cannot be repeated, but this\n * function can be used to achieve a similar effect:\n *\n * LazyIterator.ofConcatenatedFunction(() => new MyIterator(), 6);\n *\n * @param iteratorFunc: A function that produces a new stream on each call.\n * @param count: The number of times to call the function.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenatedFunction(iteratorFunc, count, baseErrorHandler) {\n return iteratorFromConcatenated(iteratorFromFunction(iteratorFunc).take(count), baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by zipping together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nexport function iteratorFromZipped(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n return new ZipIterator(iterators, mismatchMode);\n}\n/**\n * An asynchronous iterator, providing lazy access to a potentially\n * unbounded stream of elements.\n *\n * Iterator can be obtained from a dataset:\n * `const iter = await dataset.iterator();`\n */\nexport class LazyIterator {\n /**\n * Collect all remaining elements of a bounded stream into an array.\n * Obviously this will succeed only for small streams that fit in memory.\n * Useful for testing.\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArray() {\n const result = [];\n let x = await this.next();\n while (!x.done) {\n result.push(x.value);\n x = await this.next();\n }\n return result;\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArrayForTest() {\n const stream = this.prefetch(100);\n const result = [];\n let x = await stream.next();\n while (!x.done) {\n result.push(x.value);\n x = await stream.next();\n }\n return result;\n }\n /**\n * Draw items from the stream until it is exhausted.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveFully() {\n let x = await this.next();\n while (!x.done) {\n x = await this.next();\n }\n }\n /**\n * Draw items from the stream until it is exhausted, or a predicate fails.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveWhile(predicate) {\n let x = await this.next();\n let shouldContinue = predicate(x.value);\n while ((!x.done) && shouldContinue) {\n x = await this.next();\n shouldContinue = predicate(x.value);\n }\n }\n /**\n * Handles errors thrown on this stream using a provided handler function.\n *\n * @param handler A function that handles any `Error` thrown during a `next()`\n * call and returns true if the stream should continue (dropping the failed\n * call) or false if the stream should quietly terminate. If the handler\n * itself throws (or rethrows) an `Error`, that will be propagated.\n *\n * @returns A `LazyIterator` of elements passed through from upstream,\n * possibly filtering or terminating on upstream `next()` calls that\n * throw an `Error`.\n */\n handleErrors(handler) {\n return new ErrorHandlingLazyIterator(this, handler);\n }\n // TODO(soergel): Implement reduce() etc.\n /**\n * Filters this stream according to `predicate`.\n *\n * @param predicate A function mapping a stream element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `LazyIterator` of elements for which the predicate was true.\n */\n filter(predicate) {\n return new FilterIterator(this, predicate);\n }\n /**\n * Maps this stream through a 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n map(transform) {\n return new MapIterator(this, transform);\n }\n /**\n * Maps this stream through an async 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a `Promise` for a\n * transformed stream element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n mapAsync(transform) {\n return new AsyncMapIterator(this, transform);\n }\n /**\n * Maps this stream through a 1-to-1 transform, forcing serial execution.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n serialMapAsync(transform) {\n return new AsyncMapIterator(this, transform).serial();\n }\n /**\n * Maps this stream through a 1-to-many transform.\n *\n * @param transform A function mapping a stream element to an array of\n * transformed elements.\n *\n * @returns A `DataStream` of transformed elements.\n */\n flatmap(transform) {\n return new FlatmapIterator(this, transform);\n }\n /**\n * Apply a function to every element of the stream.\n *\n * @param f A function to apply to each stream element.\n */\n async forEachAsync(f) {\n return this.map(f).resolveFully();\n }\n /**\n * Apply a function to every element of the stream, forcing serial execution.\n *\n * @param f A function to apply to each stream element. Should return 'true'\n * to indicate that the stream should continue, or 'false' to cause it to\n * terminate.\n */\n async serialForEach(f) {\n return this.serialMapAsync(f).resolveWhile(x => (x === true));\n }\n /**\n * Groups elements into batches, represented as arrays of elements.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"Row-major\" means that the resulting batch is simply a collection of\n * rows: `[row1, row2, row3, ...]`. This is contrast to the column-major\n * form, which is needed for vectorized computation.\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `LazyIterator` of batches of elements, represented as arrays\n * of the original element type.\n */\n rowMajorBatch(batchSize, smallLastBatch = true) {\n return new RowMajorBatchIterator(this, batchSize, smallLastBatch);\n }\n /**\n * Groups elements into batches, represented in column-major form.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"column-major\" means that the resulting batch is a (potentially\n * nested) structure representing the columns. Each column entry, then,\n * contains a collection of the values found in that column for a range of\n * input elements. This representation allows for vectorized computation, in\n * contrast to the row-major form.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure,\n * where the leaves are arrays collecting the values of the inputs at that\n * location (or, optionally, the result of a custom function applied to those\n * arrays).\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n * @returns A `LazyIterator` of batches of elements, represented as an object\n * with collections at the leaves.\n */\n columnMajorBatch(batchSize, smallLastBatch = true, \n // tslint:disable-next-line:no-any\n zipFn = zipToList) {\n // First collect the desired number of input elements as a row-major batch.\n const rowBatches = this.rowMajorBatch(batchSize, smallLastBatch);\n // Now 'rotate' or 'pivot' the data, collecting all values from each column\n // in the batch (i.e., for each key within the elements) into an array.\n return rowBatches.map(x => deepZip(x, zipFn));\n }\n /**\n * Concatenate this `LazyIterator` with another.\n *\n * @param iterator A `LazyIterator` to be concatenated onto this one.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can\n * decide whether the error should be propagated, whether the error should\n * be ignored, or whether the base stream should be terminated.\n * @returns A `LazyIterator`.\n */\n concatenate(iterator, baseErrorHandler) {\n return new ChainedIterator(iteratorFromItems([this, iterator]), baseErrorHandler);\n }\n /**\n * Limits this stream to return at most `count` items.\n *\n * @param count The maximum number of items to provide from the stream. If\n * a negative or undefined value is given, the entire stream is returned\n * unaltered.\n */\n take(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new TakeIterator(this, count);\n }\n /**\n * Skips the first `count` items in this stream.\n *\n * @param count The number of items to skip. If a negative or undefined\n * value is given, the entire stream is returned unaltered.\n */\n skip(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new SkipIterator(this, count);\n }\n /**\n * Prefetch the first `bufferSize` items in this stream.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n */\n prefetch(bufferSize) {\n return new PrefetchIterator(this, bufferSize);\n }\n // TODO(soergel): deep sharded shuffle, where supported\n /**\n * Randomly shuffles the elements of this stream.\n *\n * @param bufferSize: An integer specifying the number of elements from\n * this stream from which the new stream will sample.\n * @param seed: (Optional.) An integer specifying the random seed that\n * will be used to create the distribution.\n */\n shuffle(windowSize, seed) {\n return new ShuffleIterator(this, windowSize, seed);\n }\n /**\n * Force an iterator to execute serially: each next() call will await the\n * prior one, so that they cannot execute concurrently.\n */\n serial() {\n return new SerialIterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on LazyIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// Iterators that just extend LazyIterator directly\n// ============================================================================\nclass ArrayIterator extends LazyIterator {\n constructor(items) {\n super();\n this.items = items;\n this.trav = 0;\n }\n summary() {\n return `Array of ${this.items.length} items`;\n }\n async next() {\n if (this.trav >= this.items.length) {\n return { value: null, done: true };\n }\n const item = this.items[this.trav];\n this.trav++;\n return { value: deepClone(item), done: false };\n }\n}\nclass FunctionCallIterator extends LazyIterator {\n constructor(nextFn) {\n super();\n this.nextFn = nextFn;\n }\n summary() {\n return `Function call`;\n }\n async next() {\n try {\n return this.nextFn();\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message =\n `Error thrown while iterating through a dataset: ${e.message}`;\n throw e;\n }\n }\n}\nclass SerialIterator extends LazyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Serial`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n return this.upstream.next();\n }\n}\nclass SkipIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n // Local state that should not be clobbered by out-of-order execution.\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Skip`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // TODO(soergel): consider tradeoffs of reading in parallel, eg.\n // collecting next() promises in an Array and then waiting for\n // Promise.all() of those. Benefit: pseudo-parallel execution. Drawback:\n // maybe delayed GC.\n while (this.count++ < this.maxCount) {\n const skipped = await this.upstream.next();\n // short-circuit if upstream is already empty\n if (skipped.done) {\n return skipped;\n }\n tf.dispose(skipped.value);\n }\n return this.upstream.next();\n }\n}\nclass TakeIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n this.count = 0;\n }\n summary() {\n return `${this.upstream.summary()} -> Take`;\n }\n async next() {\n if (this.count++ >= this.maxCount) {\n return { value: null, done: true };\n }\n return this.upstream.next();\n }\n}\n// Note this batch just groups items into row-wise element arrays.\n// Rotating these to a column-wise representation happens only at the dataset\n// level.\nclass RowMajorBatchIterator extends LazyIterator {\n constructor(upstream, batchSize, enableSmallLastBatch = true) {\n super();\n this.upstream = upstream;\n this.batchSize = batchSize;\n this.enableSmallLastBatch = enableSmallLastBatch;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> RowMajorBatch`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n const batch = [];\n while (batch.length < this.batchSize) {\n const item = await this.upstream.next();\n if (item.done) {\n if (this.enableSmallLastBatch && batch.length > 0) {\n return { value: batch, done: false };\n }\n return { value: null, done: true };\n }\n batch.push(item.value);\n }\n return { value: batch, done: false };\n }\n}\nclass FilterIterator extends LazyIterator {\n constructor(upstream, predicate) {\n super();\n this.upstream = upstream;\n this.predicate = predicate;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Filter`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n const item = await this.upstream.next();\n if (item.done || this.predicate(item.value)) {\n return item;\n }\n tf.dispose(item.value);\n }\n }\n}\nclass MapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Map`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\nclass ErrorHandlingLazyIterator extends LazyIterator {\n constructor(upstream, handler) {\n super();\n this.upstream = upstream;\n this.handler = handler;\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> handleErrors`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n try {\n return await this.upstream.next();\n }\n catch (e) {\n if (!this.handler(e)) {\n return { value: null, done: true };\n }\n // If the handler returns true, loop and fetch the next upstream item.\n // If the upstream iterator throws an endless stream of errors, and if\n // the handler says to ignore them, then we loop forever here. That is\n // the correct behavior-- it's up to the handler to decide when to stop.\n }\n }\n }\n}\nclass AsyncMapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> AsyncMap`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = await this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\n// Iterators that maintain a queue of pending items\n// ============================================================================\n/**\n * A base class for transforming streams that operate by maintaining an\n * output queue of elements that are ready to return via next(). This is\n * commonly required when the transformation is 1-to-many: A call to next()\n * may trigger a call to the underlying stream, which will produce many\n * mapped elements of this stream-- of which we need to return only one, so\n * we have to queue the rest.\n */\nexport class OneToManyIterator extends LazyIterator {\n constructor() {\n super();\n this.outputQueue = new GrowingRingBuffer();\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // Fetch so that the queue contains at least one item if possible.\n // If the upstream source is exhausted, AND there are no items left in\n // the output queue, then this stream is also exhausted.\n while (this.outputQueue.length() === 0) {\n // TODO(soergel): consider parallel reads.\n if (!await this.pump()) {\n return { value: null, done: true };\n }\n }\n return { value: this.outputQueue.shift(), done: false };\n }\n}\nclass FlatmapIterator extends OneToManyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Flatmap`;\n }\n async pump() {\n const item = await this.upstream.next();\n if (item.done) {\n return false;\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // that's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying any\n // intermediate Tensors. Here we are concerned only about the inputs.\n const mappedArray = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mappedArray);\n this.outputQueue.pushAll(mappedArray);\n // TODO(soergel) faster intersection, and deduplicate outputTensors\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return true;\n }\n}\n/**\n * Provides a `LazyIterator` that concatenates a stream of underlying\n * streams.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n */\nexport class ChainedIterator extends LazyIterator {\n constructor(iterators, baseErrorHandler) {\n super();\n this.baseErrorHandler = baseErrorHandler;\n // Strict Promise execution order:\n // a next() call may not even begin until the previous one completes.\n this.lastRead = null;\n // Local state that should not be clobbered by out-of-order execution.\n this.iterator = null;\n this.moreIterators = iterators;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of chained summaries';\n return `${upstreamSummaries} -> Chained`;\n }\n async next() {\n this.lastRead = this.readFromChain(this.lastRead);\n return this.lastRead;\n }\n async readFromChain(lastRead) {\n // Must await on the previous read since the previous read may have advanced\n // the stream of streams, from which we need to read.\n // This is unfortunate since we can't parallelize reads. Which means\n // prefetching of chained streams is a no-op.\n // One solution is to prefetch immediately upstream of this.\n await lastRead;\n if (this.iterator == null) {\n const iteratorResult = await this.moreIterators.next();\n if (iteratorResult.done) {\n // No more streams to stream from.\n return { value: null, done: true };\n }\n this.iterator = iteratorResult.value;\n if (this.baseErrorHandler != null) {\n this.iterator = this.iterator.handleErrors(this.baseErrorHandler);\n }\n }\n const itemResult = await this.iterator.next();\n if (itemResult.done) {\n this.iterator = null;\n return this.readFromChain(lastRead);\n }\n return itemResult;\n }\n}\nexport var ZipMismatchMode;\n(function (ZipMismatchMode) {\n ZipMismatchMode[ZipMismatchMode[\"FAIL\"] = 0] = \"FAIL\";\n ZipMismatchMode[ZipMismatchMode[\"SHORTEST\"] = 1] = \"SHORTEST\";\n ZipMismatchMode[ZipMismatchMode[\"LONGEST\"] = 2] = \"LONGEST\"; // use nulls for exhausted streams; use up the longest stream.\n})(ZipMismatchMode || (ZipMismatchMode = {}));\n/**\n * Provides a `LazyIterator` that zips together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nclass ZipIterator extends LazyIterator {\n constructor(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n super();\n this.iterators = iterators;\n this.mismatchMode = mismatchMode;\n this.count = 0;\n this.currentPromise = null;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of zip summaries';\n return `{${upstreamSummaries}} -> Zip`;\n }\n async nextState(afterState) {\n // This chaining ensures that the underlying next() are not even called\n // before the previous ones have resolved.\n await afterState;\n // Collect underlying iterator \"done\" signals as a side effect in\n // getNext()\n let numIterators = 0;\n let iteratorsDone = 0;\n function getNext(container) {\n if (container instanceof LazyIterator) {\n const result = container.next();\n return {\n value: result.then(x => {\n numIterators++;\n if (x.done) {\n iteratorsDone++;\n }\n return x.value;\n }),\n recurse: false\n };\n }\n else {\n return { value: null, recurse: true };\n }\n }\n const mapped = await deepMapAndAwaitAll(this.iterators, getNext);\n if (numIterators === iteratorsDone) {\n // The streams have all ended.\n return { value: null, done: true };\n }\n if (iteratorsDone > 0) {\n switch (this.mismatchMode) {\n case ZipMismatchMode.FAIL:\n throw new Error('Zipped streams should have the same length. ' +\n `Mismatched at element ${this.count}.`);\n case ZipMismatchMode.SHORTEST:\n return { value: null, done: true };\n case ZipMismatchMode.LONGEST:\n default:\n // Continue. The exhausted streams already produced value: null.\n }\n }\n this.count++;\n return { value: mapped, done: false };\n }\n async next() {\n this.currentPromise = this.nextState(this.currentPromise);\n return this.currentPromise;\n }\n}\n// Iterators that maintain a ring buffer of pending promises\n// ============================================================================\n/**\n * A stream that prefetches a given number of items from an upstream source,\n * returning them in FIFO order.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n */\nexport class PrefetchIterator extends LazyIterator {\n constructor(upstream, bufferSize) {\n super();\n this.upstream = upstream;\n this.bufferSize = bufferSize;\n this.buffer = new RingBuffer(bufferSize);\n }\n summary() {\n return `${this.upstream.summary()} -> Prefetch`;\n }\n /**\n * Refill the prefetch buffer. Returns only after the buffer is full, or\n * the upstream source is exhausted.\n */\n refill() {\n while (!this.buffer.isFull()) {\n const v = this.upstream.next();\n this.buffer.push(v);\n }\n }\n next() {\n this.refill();\n // This shift will never throw an error because the buffer is always\n // full after a refill. If the stream is exhausted, the buffer will be\n // full of Promises that will resolve to the end-of-stream signal.\n return this.buffer.shift();\n }\n}\n/**\n * A stream that performs a sliding-window random shuffle on an upstream\n * source. This is like a `PrefetchIterator` except that the items are\n * returned in randomized order. Mixing naturally improves as the buffer\n * size increases.\n */\nexport class ShuffleIterator extends PrefetchIterator {\n constructor(upstream, windowSize, seed) {\n super(upstream, windowSize);\n this.upstream = upstream;\n this.windowSize = windowSize;\n // Local state that should not be clobbered by out-of-order execution.\n this.upstreamExhausted = false;\n this.random = seedrandom.alea(seed || tf.util.now().toString());\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n randomInt(max) {\n return Math.floor(this.random() * max);\n }\n chooseIndex() {\n return this.randomInt(this.buffer.length());\n }\n async serialNext() {\n // TODO(soergel): consider performance\n if (!this.upstreamExhausted) {\n this.refill();\n }\n while (!this.buffer.isEmpty()) {\n const chosenIndex = this.chooseIndex();\n const result = await this.buffer.shuffleExcise(chosenIndex);\n if (result.done) {\n this.upstreamExhausted = true;\n }\n else {\n this.refill();\n return result;\n }\n }\n return { value: null, done: true };\n }\n}\n//# sourceMappingURL=lazy_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { iteratorFromConcatenated, iteratorFromFunction, iteratorFromItems, iteratorFromZipped, ZipMismatchMode } from './iterators/lazy_iterator';\nimport { canTensorify, deepMapAndAwaitAll, isIterable } from './util/deep_map';\n// TODO(soergel): consider vectorized operations within the pipeline.\n/**\n * Represents a potentially large list of independent data elements (typically\n * 'samples' or 'examples').\n *\n * A 'data example' may be a primitive, an array, a map from string keys to\n * values, or any nested structure of these.\n *\n * A `Dataset` represents an ordered collection of elements, together with a\n * chain of transformations to be performed on those elements. Each\n * transformation is a method of `Dataset` that returns another `Dataset`, so\n * these may be chained, e.g.\n * `const processedDataset = rawDataset.filter(...).map(...).batch(...)`.\n *\n * Data loading and transformation is done in a lazy, streaming fashion. The\n * dataset may be iterated over multiple times; each iteration starts the data\n * loading anew and recapitulates the transformations.\n *\n * A `Dataset` is typically processed as a stream of unbatched examples --i.e.,\n * its transformations are applied one example at a time. Batching produces a\n * new `Dataset` where each element is a batch. Batching should usually come\n * last in a pipeline, because data transformations are easier to express on a\n * per-example basis than on a per-batch basis.\n *\n * The following code examples are calling `await dataset.forEachAsync(...)` to\n * iterate once over the entire dataset in order to print out the data.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class Dataset {\n constructor() {\n this.size = null;\n }\n // TODO(soergel): Make Datasets report whether repeated iterator() calls\n // produce the same result (e.g., reading from a file) or different results\n // (e.g., from the webcam). Currently we don't make this distinction but it\n // could be important for the user to know.\n // abstract isDeterministic(): boolean;\n /**\n * Groups elements into batches.\n *\n * It is assumed that each of the incoming dataset elements has the same\n * structure-- i.e. the same set of keys at each location in an object\n * hierarchy. For each key, the resulting `Dataset` provides a batched\n * element collecting all of the incoming values for that key.\n *\n * * Incoming primitives are grouped into a 1-D Tensor.\n * * Incoming Tensors are grouped into a new Tensor where the 0'th axis is\n * the batch dimension.\n * * Incoming arrays are converted to Tensor and then batched.\n * * A nested array is interpreted as an n-D Tensor, so the batched result\n * has n+1 dimensions.\n * * An array that cannot be converted to Tensor produces an error.\n *\n * If an array should not be batched as a unit, it should first be converted\n * to an object with integer keys.\n *\n * Here are a few examples:\n *\n * Batch a dataset of numbers:\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8]).batch(4);\n * await a.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of arrays:\n * ```js\n * const b = tf.data.array([[1], [2], [3], [4], [5], [6], [7], [8]]).batch(4);\n * await b.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of objects:\n * ```js\n * const c = tf.data.array([{a: 1, b: 11}, {a: 2, b: 12}, {a: 3, b: 13},\n * {a: 4, b: 14}, {a: 5, b: 15}, {a: 6, b: 16}, {a: 7, b: 17},\n * {a: 8, b: 18}]).batch(4);\n * await c.forEachAsync(e => {\n * console.log('{');\n * for(var key in e) {\n * console.log(key+':');\n * e[key].print();\n * }\n * console.log('}');\n * })\n * ```\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `Dataset`, from which a stream of batches can be obtained.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n batch(batchSize, smallLastBatch = true) {\n const base = this;\n tf.util.assert(batchSize > 0, () => `batchSize needs to be positive, but it is\n ${batchSize}`);\n let size;\n if (this.size === Infinity || this.size == null) {\n // If the size of this dataset is infinity or null, the new size keeps the\n // same.\n size = this.size;\n }\n else if (smallLastBatch) {\n // If the size of this dataset is known and include small last batch, the\n // new size is full batch count plus last batch.\n size = Math.ceil(this.size / batchSize);\n }\n else {\n // If the size of this dataset is known and not include small last batch,\n // the new size is full batch count.\n size = Math.floor(this.size / batchSize);\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator())\n .columnMajorBatch(batchSize, smallLastBatch, deepBatchConcat);\n }, size);\n }\n /**\n * Concatenates this `Dataset` with another.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * const b = tf.data.array([4, 5, 6]);\n * const c = a.concatenate(b);\n * await c.forEachAsync(e => console.log(e));\n * ```\n *\n * @param dataset A `Dataset` to be concatenated onto this one.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n concatenate(dataset) {\n const base = this;\n let size;\n if (this.size === Infinity || dataset.size === Infinity) {\n // If the size of any of these two dataset is infinity, new size is\n // infinity.\n size = Infinity;\n }\n else if (this.size != null && dataset.size != null) {\n // If the size of both datasets are known and not infinity, new size is\n // sum the size of these two datasets.\n size = this.size + dataset.size;\n }\n else {\n // If neither of these two datasets has infinite size and any of these two\n // datasets' size is null, the new size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).concatenate(await dataset.iterator()), size);\n }\n /**\n * Filters this dataset according to `predicate`.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])\n * .filter(x => x%2 === 0);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param predicate A function mapping a dataset element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `Dataset` of elements for which the predicate was true.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n filter(predicate) {\n const base = this;\n let size;\n if (this.size === Infinity) {\n // If the size of this dataset is infinity, new size is infinity\n size = Infinity;\n }\n else {\n // If this dataset has limited elements, new size is null because it might\n // exhausted randomly.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).filter(x => tf.tidy(() => predicate(x)));\n }, size);\n }\n /**\n * Apply a function to every element of the dataset.\n *\n * After the function is applied to a dataset element, any Tensors contained\n * within that element are disposed.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function to apply to each dataset element.\n * @returns A `Promise` that resolves after all elements have been processed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async forEachAsync(f) {\n return (await this.iterator()).forEachAsync(f);\n }\n /**\n * Maps this dataset through a 1-to-1 transform.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).map(x => x*x);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param transform A function mapping a dataset element to a transformed\n * dataset element.\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n map(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).map(x => tf.tidy(() => transform(x)));\n }, this.size);\n }\n /**\n * Maps this dataset through an async 1-to-1 transform.\n *\n * ```js\n * const a =\n * tf.data.array([1, 2, 3]).mapAsync(x => new Promise(function(resolve){\n * setTimeout(() => {\n * resolve(x * x);\n * }, Math.random()*1000 + 500);\n * }));\n * console.log(await a.toArray());\n * ```\n *\n * @param transform A function mapping a dataset element to a `Promise` for a\n * transformed dataset element. This transform is responsible for disposing\n * any intermediate `Tensor`s, i.e. by wrapping its computation in\n * `tf.tidy()`; that cannot be automated here (as it is in the synchronous\n * `map()` case).\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n mapAsync(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).mapAsync(transform);\n }, this.size);\n }\n /**\n * Creates a `Dataset` that prefetches elements from this dataset.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n prefetch(bufferSize) {\n if (bufferSize == null) {\n throw new RangeError('`Dataset.prefetch()` requires bufferSize to be specified.');\n }\n const base = this;\n return datasetFromIteratorFn(async () => (await base.iterator()).prefetch(bufferSize), this.size);\n }\n /**\n * Repeats this dataset `count` times.\n *\n * NOTE: If this dataset is a function of global state (e.g. a random number\n * generator), then different repetitions may produce different elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).repeat(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: (Optional) An integer, representing the number of times\n * the dataset should be repeated. The default behavior (if `count` is\n * `undefined` or negative) is for the dataset be repeated indefinitely.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n repeat(count) {\n const base = this;\n let size;\n if (this.size != null && count > 0) {\n // If this dataset has size and count is positive, new size is current\n // size multiply count. This also covers the case that current size is\n // infinity.\n size = this.size * count;\n }\n else if (count === 0) {\n // If count is 0, new size is 0.\n size = 0;\n }\n else if (this.size != null && (count === undefined || count < 0)) {\n // If this dataset has size and count is undefined or negative, the\n // dataset will be repeated indefinitely and new size is infinity.\n size = Infinity;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n const iteratorIterator = iteratorFromFunction(async () => ({ value: await base.iterator(), done: false }));\n return iteratorFromConcatenated(iteratorIterator.take(count));\n }, size);\n }\n /**\n * Creates a `Dataset` that skips `count` initial elements from this dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).skip(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be skipped\n * to form the new dataset. If `count` is greater than the size of this\n * dataset, the new dataset will contain no elements. If `count`\n * is `undefined` or negative, skips the entire dataset.\n *\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n skip(count) {\n const base = this;\n let size;\n if (this.size != null && count >= 0 && this.size >= count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is current size minus skipped size.This also covers the case that\n // current size is infinity.\n size = this.size - count;\n }\n else if (this.size != null &&\n (this.size < count || count === undefined || count < 0)) {\n // If the size of this dataset is smaller than count, or count is\n // undefined or negative, skips the entire dataset and the new size is 0.\n size = 0;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).skip(count), size);\n }\n /**\n * Pseudorandomly shuffles the elements of this dataset. This is done in a\n * streaming manner, by sampling from a given number of prefetched elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).shuffle(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param bufferSize: An integer specifying the number of elements from this\n * dataset from which the new dataset will sample.\n * @param seed: (Optional) An integer specifying the random seed that will\n * be used to create the distribution.\n * @param reshuffleEachIteration: (Optional) A boolean, which if true\n * indicates that the dataset should be pseudorandomly reshuffled each time\n * it is iterated over. If false, elements will be returned in the same\n * shuffled order on each iteration. (Defaults to `true`.)\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n shuffle(bufferSize, seed, reshuffleEachIteration = true) {\n if (bufferSize == null || bufferSize < 0) {\n if (this.size == null) {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified.');\n }\n else {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified. ' +\n 'If your data fits in main memory (for regular JS objects), ' +\n 'and/or GPU memory (for `tf.Tensor`s), consider setting ' +\n `bufferSize to the dataset size (${this.size} elements)`);\n }\n }\n const base = this;\n const random = seedrandom.alea(seed || tf.util.now().toString());\n return datasetFromIteratorFn(async () => {\n let seed2 = random.int32();\n if (reshuffleEachIteration) {\n seed2 += random.int32();\n }\n return (await base.iterator()).shuffle(bufferSize, seed2.toString());\n }, this.size);\n }\n /**\n * Creates a `Dataset` with at most `count` initial elements from this\n * dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).take(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be taken\n * to form the new dataset. If `count` is `undefined` or negative, or if\n * `count` is greater than the size of this dataset, the new dataset will\n * contain all elements of this dataset.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n take(count) {\n const base = this;\n let size;\n if (this.size != null && this.size > count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is count.\n size = count;\n }\n else if (this.size != null && this.size <= count) {\n // If the size of this dataset is equal or smaller than count, the new\n // dataset's size is the size of this dataset.\n size = this.size;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).take(count), size);\n }\n /**\n * Collect all elements of this dataset into an array.\n *\n * Obviously this will succeed only for small datasets that fit in memory.\n * Useful for testing and generally should be avoided if possible.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]);\n * console.log(await a.toArray());\n * ```\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async toArray() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArray();\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n */\n async toArrayForTest() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArrayForTest();\n }\n}\n// TODO(soergel): deep sharded shuffle, where supported\nDataset.MAX_BUFFER_SIZE = 10000;\n/**\n * Create a `Dataset` defined by a provided iterator() function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * const ds = tf.data.datasetFromIteratorFn(iter);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n */\nexport function datasetFromIteratorFn(iteratorFn, size = null) {\n return new class extends Dataset {\n constructor() {\n super(...arguments);\n this.size = size;\n }\n /*\n * Provide a new stream of elements. Note this will also start new streams\n * from any underlying `Dataset`s.\n */\n async iterator() {\n return iteratorFn();\n }\n }();\n}\n/**\n * Create a `Dataset` from an array of elements.\n *\n * Create a Dataset from an array of objects:\n * ```js\n * const a = tf.data.array([{'item': 1}, {'item': 2}, {'item': 3}]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * Create a Dataset from an array of numbers:\n * ```js\n * const a = tf.data.array([4, 5, 6]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n * @param items An array of elements that will be parsed as items in a dataset.\n *\n * @doc {heading: 'Data', subheading: 'Creation', namespace: 'data'}\n */\nexport function array(items) {\n return datasetFromIteratorFn(async () => iteratorFromItems(items), items.length);\n}\n/**\n * Create a `Dataset` by zipping together an array, dict, or nested\n * structure of `Dataset`s (and perhaps additional constants).\n * The underlying datasets must provide elements in a consistent order such that\n * they correspond.\n *\n * The number of elements in the resulting dataset is the same as the size of\n * the smallest dataset in datasets.\n *\n * The nested structure of the `datasets` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Note this means that, given an array of two datasets that produce dict\n * elements, the result is a dataset that produces elements that are arrays\n * of two dicts:\n *\n * Zip an array of datasets:\n * ```js\n * console.log('Zip two datasets of objects:');\n * const ds1 = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const ds2 = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const ds3 = tf.data.zip([ds1, ds2]);\n * await ds3.forEachAsync(e => console.log(JSON.stringify(e)));\n *\n * // If the goal is to merge the dicts in order to produce elements like\n * // {a: ..., b: ...}, this requires a second step such as:\n * console.log('Merge the objects:');\n * const ds4 = ds3.map(x => {return {a: x[0].a, b: x[1].b}});\n * await ds4.forEachAsync(e => console.log(e));\n * ```\n *\n * Zip a dict of datasets:\n * ```js\n * const a = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const b = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const c = tf.data.zip({c: a, d: b});\n * await c.forEachAsync(e => console.log(JSON.stringify(e)));\n * ```\n *\n * @doc {heading: 'Data', subheading: 'Operations', namespace: 'data'}\n */\nexport function zip(datasets) {\n // manually type-check the argument for JS users\n if (!isIterable(datasets)) {\n throw new Error('The argument to zip() must be an object or array.');\n }\n let size;\n if (Array.isArray(datasets)) {\n for (let i = 0; i < datasets.length; i++) {\n size = size == null ? datasets[i].size :\n Math.min(size, datasets[i].size);\n }\n }\n else if (datasets instanceof Object) {\n for (const ds in datasets) {\n size = size == null ? datasets[ds].size :\n Math.min(size, datasets[ds].size);\n }\n }\n return datasetFromIteratorFn(async () => {\n const streams = await deepMapAndAwaitAll(datasets, d => {\n if (d instanceof Dataset) {\n return { value: d.iterator(), recurse: false };\n }\n else if (isIterable(d)) {\n return { value: null, recurse: true };\n }\n else {\n throw new Error('Leaves of the structure passed to zip() must be Datasets, ' +\n 'not primitives.');\n }\n });\n return iteratorFromZipped(streams, ZipMismatchMode.SHORTEST);\n }, size);\n}\n/**\n * A zip function for use with deepZip, passed via the columnMajorBatch call.\n *\n * Accepts an array of identically-structured nested elements and either batches\n * them (if they are primitives, numeric arrays, or Tensors) or requests\n * recursion (if not).\n */\n// tslint:disable-next-line:no-any\nfunction deepBatchConcat(rows) {\n if (rows === null) {\n return null;\n }\n // use the first item to decide whether to recurse or batch here.\n const exampleRow = rows[0];\n if (canTensorify(exampleRow)) {\n // rows is an array of primitives, Tensors, or arrays. Batch them.\n const value = batchConcat(rows);\n return { value, recurse: false };\n }\n // the example row is an object, so recurse into it.\n return { value: null, recurse: true };\n}\n/**\n * Assembles a list of same-shaped numbers, number arrays, or Tensors\n * into a single new Tensor where axis 0 is the batch dimension.\n */\nfunction batchConcat(arrays) {\n if (arrays.length === 0) {\n // We can't return an empty Tensor because we don't know the element shape.\n throw new Error('Can\\'t make a batch of zero elements.');\n }\n if (arrays[0] instanceof tf.Tensor) {\n // Input is an array of Tensors\n return tf.stack(arrays);\n }\n else {\n // Input is a possibly-nested array of numbers.\n return tf.tensor(arrays);\n }\n}\n//# sourceMappingURL=dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { Dataset } from '../dataset';\n/**\n * Represents a potentially large collection of text lines.\n *\n * The results are not batched.\n */\nexport class TextLineDataset extends Dataset {\n /**\n * Create a `TextLineDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n */\n constructor(input) {\n super();\n this.input = input;\n }\n async iterator() {\n const inputIterator = await this.input.iterator();\n const utf8Iterator = inputIterator.decodeUTF8();\n const lineIterator = utf8Iterator.split('\\n').map(line => {\n // Windows/DOS format text file has extra line breaker at the end of line.\n if (line.endsWith('\\r')) {\n line = line.slice(0, -1);\n }\n return line;\n });\n return lineIterator;\n }\n}\n//# sourceMappingURL=text_line_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { Dataset } from '../dataset';\nimport { TextLineDataset } from './text_line_dataset';\nconst CODE_QUOTE = '\"';\nconst STATE_OUT = Symbol('out');\nconst STATE_FIELD = Symbol('field');\nconst STATE_QUOTE = Symbol('quote');\nconst STATE_QUOTE_AFTER_QUOTE = Symbol('quoteafterquote');\nconst STATE_WITHIN_QUOTE_IN_QUOTE = Symbol('quoteinquote');\n/**\n * Represents a potentially large collection of delimited text records.\n *\n * The produced `TensorContainer`s each contain one key-value pair for\n * every column of the table. When a field is empty in the incoming data, the\n * resulting value is `undefined`, or throw error if it is required. Values\n * that can be parsed as numbers are emitted as type `number`, other values\n * are parsed as `string`.\n *\n * The results are not batched.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class CSVDataset extends Dataset {\n /**\n * Create a `CSVDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * hasHeader: (Optional) A boolean value that indicates whether the first\n * row of provided CSV file is a header line with column names, and should\n * not be included in the data. Defaults to `true`.\n *\n * columnNames: (Optional) A list of strings that corresponds to\n * the CSV column names, in order. If provided, it ignores the column\n * names inferred from the header row. If not provided, infers the column\n * names from the first row of the records. If hasHeader is false and\n * columnNames is not provided, this method throws an error.\n *\n * columnConfigs: (Optional) A dictionary whose key is column names, value\n * is an object stating if this column is required, column's data type,\n * default value, and if this column is label. If provided, keys must\n * correspond to names provided in columnNames or inferred from the file\n * header lines. If isLabel is true any column, returns an array of two\n * items: the first item is a dict of features key/value pairs, the second\n * item is a dict of labels key/value pairs. If no feature is marked as\n * label, returns a dict of features only.\n *\n * configuredColumnsOnly (Optional) If true, only columns provided in\n * columnConfigs will be parsed and provided during iteration.\n *\n * delimiter (Optional) The string used to parse each line of the input\n * file. Defaults to `,`.\n */\n constructor(input, csvConfig) {\n super();\n this.input = input;\n this.hasHeader = true;\n this.fullColumnNames = null;\n this.columnNamesValidated = false;\n this.columnConfigs = null;\n this.configuredColumnsOnly = false;\n this.delimiter = ',';\n this.delimWhitespace = false;\n this.base = new TextLineDataset(input);\n if (!csvConfig) {\n csvConfig = {};\n }\n this.hasHeader = csvConfig.hasHeader === false ? false : true;\n this.fullColumnNames = csvConfig.columnNames;\n this.columnConfigs = csvConfig.columnConfigs;\n this.configuredColumnsOnly = csvConfig.configuredColumnsOnly;\n if (csvConfig.delimWhitespace) {\n util.assert(csvConfig.delimiter == null, () => 'Delimiter should not be provided when delimWhitespace is true.');\n this.delimWhitespace = true;\n this.delimiter = ' ';\n }\n else {\n this.delimiter = csvConfig.delimiter ? csvConfig.delimiter : ',';\n }\n }\n /**\n * Returns column names of the csv dataset. If `configuredColumnsOnly` is\n * true, return column names in `columnConfigs`. If `configuredColumnsOnly` is\n * false and `columnNames` is provided, `columnNames`. If\n * `configuredColumnsOnly` is false and `columnNames` is not provided, return\n * all column names parsed from the csv file. For example usage please go to\n * `tf.data.csv`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async columnNames() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n return this.configuredColumnsOnly ? Object.keys(this.columnConfigs) :\n this.fullColumnNames;\n }\n /* 1) If `columnNames` is provided as string[], use this string[] as output\n * keys in corresponding order. The length must match the number of inferred\n * columns if `hasHeader` is true .\n * 2) If `columnNames` is not provided, parse header line as `columnNames` if\n * hasHeader is true. If `hasHeader` is false, throw an error.\n * 3) If `columnConfigs` is provided, all the keys in `columnConfigs` must\n * exist in parsed `columnNames`.\n */\n async setColumnNames() {\n const columnNamesFromFile = await this.maybeReadHeaderLine();\n if (!this.fullColumnNames && !columnNamesFromFile) {\n // Throw an error if columnNames is not provided and no header line.\n throw new Error('Column names must be provided if there is no header line.');\n }\n else if (this.fullColumnNames && columnNamesFromFile) {\n // Check provided columnNames match header line.\n util.assert(columnNamesFromFile.length === this.fullColumnNames.length, () => 'The length of provided columnNames (' +\n this.fullColumnNames.length.toString() +\n ') does not match the length of the header line read from ' +\n 'file (' + columnNamesFromFile.length.toString() + ').');\n }\n if (!this.fullColumnNames) {\n this.fullColumnNames = columnNamesFromFile;\n }\n // Check if there are duplicate column names.\n const counts = this.fullColumnNames.reduce((countAcc, name) => {\n countAcc[name] = (countAcc[name] + 1) || 1;\n return countAcc;\n }, {});\n const duplicateNames = Object.keys(counts).filter((name) => (counts[name] > 1));\n util.assert(duplicateNames.length === 0, () => 'Duplicate column names found: ' + duplicateNames.toString());\n // Check if keys in columnConfigs match columnNames.\n if (this.columnConfigs) {\n for (const key of Object.keys(this.columnConfigs)) {\n const index = this.fullColumnNames.indexOf(key);\n if (index === -1) {\n throw new Error('The key \"' + key +\n '\" provided in columnConfigs does not match any of the column ' +\n 'names (' + this.fullColumnNames.toString() + ').');\n }\n }\n }\n this.columnNamesValidated = true;\n }\n async maybeReadHeaderLine() {\n if (this.hasHeader) {\n const iter = await this.base.iterator();\n const firstElement = await iter.next();\n if (firstElement.done) {\n throw new Error('No data was found for CSV parsing.');\n }\n const firstLine = firstElement.value;\n const headers = this.parseRow(firstLine, false);\n return headers;\n }\n else {\n return null;\n }\n }\n async iterator() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n let lines = await this.base.iterator();\n if (this.hasHeader) {\n // We previously read the first line to get the columnNames.\n // Now that we're providing data, skip it.\n lines = lines.skip(1);\n }\n return lines.map(x => this.makeDataElement(x));\n }\n makeDataElement(line) {\n const values = this.parseRow(line);\n const features = {};\n const labels = {};\n for (let i = 0; i < this.fullColumnNames.length; i++) {\n const key = this.fullColumnNames[i];\n const config = this.columnConfigs ? this.columnConfigs[key] : null;\n if (this.configuredColumnsOnly && !config) {\n // This column is not selected.\n continue;\n }\n else {\n const value = values[i];\n let parsedValue = null;\n if (value === '') {\n // If default value is provided, use it. If default value is not\n // provided, set as undefined.\n if (config && config.default !== undefined) {\n parsedValue = config.default;\n }\n else if (config && (config.required || config.isLabel)) {\n throw new Error(`Required column ${key} is empty in this line: ${line}`);\n }\n else {\n parsedValue = undefined;\n }\n }\n else {\n // A value is present, so parse it based on type\n const valueAsNum = Number(value);\n if (isNaN(valueAsNum)) {\n // The value is a string and this column is declared as boolean\n // in config, parse it as boolean.\n if (config && config.dtype === 'bool') {\n parsedValue = this.getBoolean(value);\n }\n else {\n // Set value as string\n parsedValue = value;\n }\n }\n else if (!config || !config.dtype) {\n // If this value is a number and no type config is provided, return\n // it as number.\n parsedValue = valueAsNum;\n }\n else {\n // If this value is a number and data type is provided, parse it\n // according to provided data type.\n switch (config.dtype) {\n case 'float32':\n parsedValue = valueAsNum;\n break;\n case 'int32':\n parsedValue = Math.floor(valueAsNum);\n break;\n case 'bool':\n parsedValue = this.getBoolean(value);\n break;\n default:\n parsedValue = valueAsNum;\n }\n }\n }\n // Check if this column is label.\n (config && config.isLabel) ? labels[key] = parsedValue :\n features[key] = parsedValue;\n }\n }\n // If label exists, return an object of features and labels as {xs:features,\n // ys:labels}, otherwise return features only.\n if (Object.keys(labels).length === 0) {\n return features;\n }\n else {\n return { xs: features, ys: labels };\n }\n }\n getBoolean(value) {\n if (value === '1' || value.toLowerCase() === 'true') {\n return 1;\n }\n else {\n return 0;\n }\n }\n // adapted from https://beta.observablehq.com/@mbostock/streaming-csv\n parseRow(line, validateElementCount = true) {\n const result = [];\n let readOffset = 0;\n const readLength = line.length;\n let currentState = STATE_OUT;\n // Goes through the line to parse quote.\n for (let i = 0; i < readLength; i++) {\n switch (currentState) {\n // Before enter a new field\n case STATE_OUT:\n switch (line.charAt(i)) {\n // Enter a quoted field\n case CODE_QUOTE:\n readOffset = i + 1;\n currentState = STATE_QUOTE;\n break;\n // Read an empty field\n case this.delimiter:\n readOffset = i + 1;\n // If delimiter is white space and configured to collapse\n // multiple white spaces, ignore this white space.\n if (this.delimiter === ' ' && this.delimWhitespace) {\n break;\n }\n result.push('');\n currentState = STATE_OUT;\n break;\n // Enter an unquoted field\n default:\n currentState = STATE_FIELD;\n readOffset = i;\n break;\n }\n break;\n // In an unquoted field\n case STATE_FIELD:\n switch (line.charAt(i)) {\n // Exit an unquoted field, add it to result\n case this.delimiter:\n result.push(line.substring(readOffset, i));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n default:\n }\n break;\n // In a quoted field\n case STATE_QUOTE:\n switch (line.charAt(i)) {\n // Read a quote after a quote\n case CODE_QUOTE:\n currentState = STATE_QUOTE_AFTER_QUOTE;\n break;\n default:\n }\n break;\n // This state means it's right after a second quote in a field\n case STATE_QUOTE_AFTER_QUOTE:\n switch (line.charAt(i)) {\n // Finished a quoted field\n case this.delimiter:\n result.push(line.substring(readOffset, i - 1));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n // Finished a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n // In a quoted part in a quoted field\n default:\n currentState = STATE_WITHIN_QUOTE_IN_QUOTE;\n break;\n }\n break;\n case STATE_WITHIN_QUOTE_IN_QUOTE:\n switch (line.charAt(i)) {\n // Exit a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n default:\n }\n break;\n default:\n }\n }\n // Adds last item based on if it is quoted.\n if (currentState === STATE_QUOTE_AFTER_QUOTE) {\n result.push(line.substring(readOffset, readLength - 1));\n }\n else {\n result.push(line.substring(readOffset));\n }\n // Check if each row has the same number of elements as column names.\n if (validateElementCount && result.length !== this.fullColumnNames.length) {\n throw new Error(`Invalid row in csv file. Should have ${this.fullColumnNames.length} elements in a row, but got ${result}`);\n }\n return result;\n }\n}\n// TODO(soergel): add more basic datasets for parity with tf.data\n// tf.data.FixedLengthRecordDataset()\n// tf.data.TFRecordDataset()\n//# sourceMappingURL=csv_dataset.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env, tensor, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of tensors from microphone audio stream. The tensors are\n * representing audio data as frequency-domain spectrogram generated with\n * browser's native FFT. Tensors representing time-domain waveform is available\n * based on configuration. Only works in browser environment.\n */\nexport class MicrophoneIterator extends LazyIterator {\n constructor(microphoneConfig) {\n super();\n this.microphoneConfig = microphoneConfig;\n this.isClosed = false;\n this.fftSize = microphoneConfig.fftSize || 1024;\n const fftSizeLog2 = Math.log2(this.fftSize);\n if (this.fftSize < 0 || fftSizeLog2 < 4 || fftSizeLog2 > 14 ||\n !Number.isInteger(fftSizeLog2)) {\n throw new Error(`Invalid fftSize: it must be a power of 2 between ` +\n `2 to 4 and 2 to 14, but got ${this.fftSize}`);\n }\n this.numFrames = microphoneConfig.numFramesPerSpectrogram || 43;\n this.sampleRateHz = microphoneConfig.sampleRateHz;\n this.columnTruncateLength =\n microphoneConfig.columnTruncateLength || this.fftSize;\n this.audioTrackConstraints = microphoneConfig.audioTrackConstraints;\n this.smoothingTimeConstant = microphoneConfig.smoothingTimeConstant || 0;\n this.includeSpectrogram =\n microphoneConfig.includeSpectrogram === false ? false : true;\n this.includeWaveform =\n microphoneConfig.includeWaveform === true ? true : false;\n if (!this.includeSpectrogram && !this.includeWaveform) {\n throw new Error('Both includeSpectrogram and includeWaveform are false. ' +\n 'At least one type of data should be returned.');\n }\n }\n summary() {\n return `microphone`;\n }\n // Construct a MicrophoneIterator and start the audio stream.\n static async create(microphoneConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('microphone API is only supported in browser environment.');\n }\n const microphoneIterator = new MicrophoneIterator(microphoneConfig);\n // Call async function start() to initialize the audio stream.\n await microphoneIterator.start();\n return microphoneIterator;\n }\n // Start the audio stream and FFT.\n async start() {\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n audio: this.audioTrackConstraints == null ? true :\n this.audioTrackConstraints,\n video: false\n });\n }\n catch (e) {\n throw new Error(`Error thrown while initializing video stream: ${e.message}`);\n }\n if (!this.stream) {\n throw new Error('Could not obtain audio from microphone.');\n }\n const ctxConstructor = \n // tslint:disable-next-line:no-any\n window.AudioContext || window.webkitAudioContext;\n this.audioContext = new ctxConstructor();\n if (!this.sampleRateHz) {\n // If sample rate is not provided, use the available sample rate on\n // device.\n this.sampleRateHz = this.audioContext.sampleRate;\n }\n else if (this.audioContext.sampleRate !== this.sampleRateHz) {\n throw new Error(`Mismatch in sampling rate: ` +\n `Expected: ${this.sampleRateHz}; ` +\n `Actual: ${this.audioContext.sampleRate}`);\n }\n const streamSource = this.audioContext.createMediaStreamSource(this.stream);\n this.analyser = this.audioContext.createAnalyser();\n this.analyser.fftSize = this.fftSize * 2;\n this.analyser.smoothingTimeConstant = this.smoothingTimeConstant;\n streamSource.connect(this.analyser);\n this.freqData = new Float32Array(this.fftSize);\n this.timeData = new Float32Array(this.fftSize);\n return;\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let spectrogramTensor;\n let waveformTensor;\n const audioDataQueue = await this.getAudioData();\n if (this.includeSpectrogram) {\n const freqData = this.flattenQueue(audioDataQueue.freqDataQueue);\n spectrogramTensor = this.getTensorFromAudioDataArray(freqData, [this.numFrames, this.columnTruncateLength, 1]);\n }\n if (this.includeWaveform) {\n const timeData = this.flattenQueue(audioDataQueue.timeDataQueue);\n waveformTensor = this.getTensorFromAudioDataArray(timeData, [this.numFrames * this.fftSize, 1]);\n }\n return {\n value: { 'spectrogram': spectrogramTensor, 'waveform': waveformTensor },\n done: false\n };\n }\n // Capture one result from the audio stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n async getAudioData() {\n const freqDataQueue = [];\n const timeDataQueue = [];\n let currentFrames = 0;\n return new Promise(resolve => {\n const intervalID = setInterval(() => {\n if (this.includeSpectrogram) {\n this.analyser.getFloatFrequencyData(this.freqData);\n // If the audio stream is initializing, return empty queue.\n if (this.freqData[0] === -Infinity) {\n resolve({ freqDataQueue, timeDataQueue });\n }\n freqDataQueue.push(this.freqData.slice(0, this.columnTruncateLength));\n }\n if (this.includeWaveform) {\n this.analyser.getFloatTimeDomainData(this.timeData);\n timeDataQueue.push(this.timeData.slice());\n }\n // Clean interval and return when all frames have been collected\n if (++currentFrames === this.numFrames) {\n clearInterval(intervalID);\n resolve({ freqDataQueue, timeDataQueue });\n }\n }, this.fftSize / this.sampleRateHz * 1e3);\n });\n }\n // Stop the audio stream and pause the iterator.\n stop() {\n if (!this.isClosed) {\n this.isClosed = true;\n this.analyser.disconnect();\n this.audioContext.close();\n if (this.stream != null && this.stream.getTracks().length > 0) {\n this.stream.getTracks()[0].stop();\n }\n }\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite audio stream to array.');\n }\n // Return audio sampling rate in Hz\n getSampleRate() {\n return this.sampleRateHz;\n }\n flattenQueue(queue) {\n const frameSize = queue[0].length;\n const freqData = new Float32Array(queue.length * frameSize);\n queue.forEach((data, i) => freqData.set(data, i * frameSize));\n return freqData;\n }\n getTensorFromAudioDataArray(freqData, shape) {\n const vals = new Float32Array(util.sizeFromShape(shape));\n // If the data is less than the output shape, the rest is padded with zeros.\n vals.set(freqData, vals.length - freqData.length);\n return tensor(vals, shape);\n }\n}\n//# sourceMappingURL=microphone_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { browser, env, image, tensor1d, tensor2d, tidy, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of image tensors from webcam video stream. Only works in\n * browser environment.\n */\nexport class WebcamIterator extends LazyIterator {\n constructor(webcamVideoElement, webcamConfig) {\n super();\n this.webcamVideoElement = webcamVideoElement;\n this.webcamConfig = webcamConfig;\n this.isClosed = true;\n this.resize = false;\n if (this.needToResize()) {\n this.resize = true;\n this.cropSize =\n [this.webcamConfig.resizeHeight, this.webcamConfig.resizeWidth];\n this.cropBoxInd = tensor1d([0], 'int32');\n if (this.webcamConfig.centerCrop) {\n // Calculate the box based on resizing shape.\n const widthCroppingRatio = this.webcamConfig.resizeWidth * 1.0 / this.webcamVideoElement.width;\n const heightCroppingRatio = this.webcamConfig.resizeHeight * 1.0 /\n this.webcamVideoElement.height;\n const widthCropStart = (1 - widthCroppingRatio) / 2;\n const heightCropStart = (1 - heightCroppingRatio) / 2;\n const widthCropEnd = widthCropStart + widthCroppingRatio;\n const heightCropEnd = heightCroppingRatio + heightCropStart;\n this.cropBox = tensor2d([heightCropStart, widthCropStart, heightCropEnd, widthCropEnd], [1, 4]);\n }\n else {\n this.cropBox = tensor2d([0, 0, 1, 1], [1, 4]);\n }\n }\n }\n summary() {\n return `webcam`;\n }\n // Construct a WebcamIterator and start it's video stream.\n static async create(webcamVideoElement, webcamConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('tf.data.webcam is only supported in browser environment.');\n }\n if (!webcamVideoElement) {\n // If webcam video element is not provided, create a hidden video element\n // with provided width and height.\n webcamVideoElement = document.createElement('video');\n if (!webcamConfig.resizeWidth || !webcamConfig.resizeHeight) {\n throw new Error('Please provide webcam video element, or resizeWidth and ' +\n 'resizeHeight to create a hidden video element.');\n }\n webcamVideoElement.width = webcamConfig.resizeWidth;\n webcamVideoElement.height = webcamConfig.resizeHeight;\n }\n const webcamIterator = new WebcamIterator(webcamVideoElement, webcamConfig);\n // Call async function to initialize the video stream.\n await webcamIterator.start();\n return webcamIterator;\n }\n // Async function to start video stream.\n async start() {\n if (this.webcamConfig.facingMode) {\n util.assert((this.webcamConfig.facingMode === 'user') ||\n (this.webcamConfig.facingMode === 'environment'), () => `Invalid webcam facing mode: ${this.webcamConfig.facingMode}. ` +\n `Please provide 'user' or 'environment'`);\n }\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n video: {\n deviceId: this.webcamConfig.deviceId,\n facingMode: this.webcamConfig.facingMode ?\n this.webcamConfig.facingMode :\n 'user',\n width: this.webcamVideoElement.width,\n height: this.webcamVideoElement.height\n }\n });\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message = `Error thrown while initializing video stream: ${e.message}`;\n throw e;\n }\n if (!this.stream) {\n throw new Error('Could not obtain video from webcam.');\n }\n // Older browsers may not have srcObject\n try {\n this.webcamVideoElement.srcObject = this.stream;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = window.URL.createObjectURL(this.stream);\n }\n // Start the webcam video stream\n this.webcamVideoElement.play();\n this.isClosed = false;\n return new Promise(resolve => {\n // Add event listener to make sure the webcam has been fully initialized.\n this.webcamVideoElement.onloadedmetadata = () => {\n resolve();\n };\n });\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let img;\n try {\n img = browser.fromPixels(this.webcamVideoElement);\n }\n catch (e) {\n throw new Error(`Error thrown converting video to pixels: ${JSON.stringify(e)}`);\n }\n if (this.resize) {\n try {\n return { value: this.cropAndResizeFrame(img), done: false };\n }\n catch (e) {\n throw new Error(`Error thrown cropping the video: ${e.message}`);\n }\n finally {\n img.dispose();\n }\n }\n else {\n return { value: img, done: false };\n }\n }\n needToResize() {\n // If resizeWidth and resizeHeight are provided, and different from the\n // width and height of original HTMLVideoElement, then resizing and cropping\n // is required.\n if (this.webcamConfig.resizeWidth && this.webcamConfig.resizeHeight &&\n (this.webcamVideoElement.width !== this.webcamConfig.resizeWidth ||\n this.webcamVideoElement.height !== this.webcamConfig.resizeHeight)) {\n return true;\n }\n return false;\n }\n // Cropping and resizing each frame based on config\n cropAndResizeFrame(img) {\n return tidy(() => {\n const expandedImage = img.toFloat().expandDims(0);\n let resizedImage;\n resizedImage = image.cropAndResize(expandedImage, this.cropBox, this.cropBoxInd, this.cropSize, 'bilinear');\n // Extract image from batch cropping.\n const shape = resizedImage.shape;\n return resizedImage.reshape(shape.slice(1));\n });\n }\n // Capture one frame from the video stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n // Stop the video stream and pause webcam iterator.\n stop() {\n const tracks = this.stream.getTracks();\n tracks.forEach(track => track.stop());\n try {\n this.webcamVideoElement.srcObject = null;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = null;\n }\n this.isClosed = true;\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite video stream to array.');\n }\n}\n//# sourceMappingURL=webcam_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * Represents a data source readable as a stream of binary data chunks.\n *\n * Because `Dataset`s can be read repeatedly (via `Dataset.iterator()`), this\n * provides a means to repeatedly create streams from the underlying data\n * sources.\n */\nexport class DataSource {\n}\n// TODO(soergel): consider convenience factory functions here\n// in combination with chainable source->dataset above, e.g.:\n// tf.data.url(...).asCsvDataset().shuffle().batch()\n//# sourceMappingURL=datasource.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nexport class StringIterator extends LazyIterator {\n /**\n * Splits a string stream on a given separator.\n *\n * It is assumed that the incoming chunk boundaries have no semantic meaning,\n * so conceptually the incoming stream is treated simply as the concatenation\n * of its elements.\n *\n * The outgoing stream provides chunks corresponding to the results of the\n * standard string split() operation (even if such a chunk spanned incoming\n * chunks). The separators are not included.\n *\n * A typical usage is to split a text file (represented as a stream with\n * arbitrary chunk boundaries) into lines.\n *\n * @param upstream A readable stream of strings that can be treated as\n * concatenated.\n * @param separator A character to split on.\n */\n split(separator) {\n return new SplitIterator(this, separator);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on StringIterator. Unfortunately they can't be placed in separate files, due\n// to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class SplitIterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass SplitIterator extends StringIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.impl = new SplitIteratorImpl(upstream, separator);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\nclass SplitIteratorImpl extends OneToManyIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.separator = separator;\n // A partial string at the end of an upstream chunk\n this.carryover = '';\n }\n summary() {\n return `${this.upstream.summary()} -> Split('${this.separator}')`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n if (chunkResult.done) {\n if (this.carryover === '') {\n return false;\n }\n // Pretend that the pump succeeded in order to emit the small last batch.\n // The next pump() call will actually fail.\n this.outputQueue.push(this.carryover);\n this.carryover = '';\n return true;\n }\n const lines = chunkResult.value.split(this.separator);\n // Note the behavior: \" ab \".split(' ') === ['', 'ab', '']\n // Thus the carryover may be '' if the separator falls on a chunk\n // boundary; this produces the correct result.\n lines[0] = this.carryover + lines[0];\n for (const line of lines.slice(0, -1)) {\n this.outputQueue.push(line);\n }\n this.carryover = lines[lines.length - 1];\n return true;\n }\n}\n//# sourceMappingURL=string_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nimport { StringIterator } from './string_iterator';\nexport class ByteChunkIterator extends LazyIterator {\n /**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * The byte arrays producetd from the ByteChunkIterator on which this is\n * called will be interpreted as concatenated. No assumptions are made about\n * the boundaries of the incoming chunks, so a multi-byte UTF8 encoding of a\n * character may span the boundary between chunks. This naturally happens,\n * for instance, when reading fixed-size byte arrays from a file.\n */\n decodeUTF8() {\n return new Utf8Iterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on ByteChunkIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class Utf8Iterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass Utf8Iterator extends StringIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.impl = new Utf8IteratorImpl(upstream);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\n/**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * This is tricky because the incoming byte array boundaries may disrupt a\n * multi-byte UTF8 character. Thus any incomplete character data at the end of\n * a chunk must be carried over and prepended to the next chunk before\n * decoding. Luckily with native decoder, TextDecoder in browser and\n * string_decoder in node, byte array boundaries are handled automatically.\n *\n * In the context of an input pipeline for machine learning, UTF8 decoding is\n * needed to parse text files containing training examples or prediction\n * requests (e.g., formatted as CSV or JSON). We cannot use the built-in\n * decoding provided by FileReader.readAsText() because here we are in a\n * streaming context, which FileReader does not support.\n *\n * @param upstream A `LazyIterator` of `Uint8Arrays` containing UTF8-encoded\n * text, which should be interpreted as concatenated. No assumptions are\n * made about the boundaries of the incoming chunks, so a multi-byte UTF8\n * encoding of a character may span the boundary between chunks. This\n * naturally happens, for instance, when reading fixed-size byte arrays from a\n * file.\n */\nclass Utf8IteratorImpl extends OneToManyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n if (env().get('IS_BROWSER')) {\n this.decoder = new TextDecoder('utf-8');\n }\n else {\n // tslint:disable-next-line:no-require-imports\n const { StringDecoder } = require('string_decoder');\n this.decoder = new StringDecoder('utf8');\n }\n }\n summary() {\n return `${this.upstream.summary()} -> Utf8`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n let chunk;\n if (chunkResult.done) {\n return false;\n }\n else {\n chunk = chunkResult.value;\n }\n let text;\n if (env().get('IS_BROWSER')) {\n text = this.decoder.decode(chunk, { stream: true });\n }\n else {\n text = this.decoder.write(Buffer.from(chunk.buffer));\n }\n this.outputQueue.push(text);\n return true;\n }\n}\n//# sourceMappingURL=byte_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// inspired by https://github.com/maxogden/filereader-stream\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { ByteChunkIterator } from './byte_chunk_iterator';\n/**\n * Provide a stream of chunks from a File, Blob, or Uint8Array.\n * @param file The source File, Blob or Uint8Array.\n * @param options Optional settings controlling file reading.\n * @returns a lazy Iterator of Uint8Arrays containing sequential chunks of the\n * input File, Blob or Uint8Array.\n */\nexport class FileChunkIterator extends ByteChunkIterator {\n constructor(file, options = {}) {\n super();\n this.file = file;\n this.options = options;\n util.assert((file instanceof Uint8Array) ||\n (env().get('IS_BROWSER') ?\n (file instanceof File || file instanceof Blob) :\n false), () => 'FileChunkIterator only supports File, Blob and Uint8Array ' +\n 'right now.');\n this.offset = options.offset || 0;\n // default 1MB chunk has tolerable perf on large files\n this.chunkSize = options.chunkSize || 1024 * 1024;\n }\n summary() {\n return `FileChunks ${this.file}`;\n }\n async next() {\n if (this.offset >= ((this.file instanceof Uint8Array) ?\n this.file.byteLength :\n this.file.size)) {\n return { value: null, done: true };\n }\n const chunk = new Promise((resolve, reject) => {\n const end = this.offset + this.chunkSize;\n if (this.file instanceof Uint8Array) {\n // Note if end > this.uint8Array.byteLength, we just get a small last\n // chunk.\n resolve(new Uint8Array(this.file.slice(this.offset, end)));\n }\n else {\n // This branch assumes that this.file type is File or Blob, which\n // means it is in the browser environment.\n // TODO(soergel): is this a performance issue?\n const fileReader = new FileReader();\n fileReader.onload = (event) => {\n let data = fileReader.result;\n // Not sure we can trust the return type of\n // FileReader.readAsArrayBuffer See e.g.\n // https://github.com/node-file-api/FileReader/issues/2\n if (data instanceof ArrayBuffer) {\n data = new Uint8Array(data);\n }\n if (!(data instanceof Uint8Array)) {\n return reject(new TypeError('FileReader returned unknown type.'));\n }\n resolve(data);\n };\n fileReader.onabort = (event) => {\n return reject(new Error('Aborted'));\n };\n fileReader.onerror = (event) => {\n return reject(new Error(event.type));\n };\n // TODO(soergel): better handle onabort, onerror\n // Note if end > this.file.size, we just get a small last chunk.\n const slice = this.file.slice(this.offset, end);\n // We can't use readAsText here (even if we know the file is text)\n // because the slice boundary may fall within a multi-byte character.\n fileReader.readAsArrayBuffer(slice);\n }\n this.offset = end;\n });\n return { value: (await chunk), done: false };\n }\n}\n//# sourceMappingURL=file_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FileChunkIterator } from './file_chunk_iterator';\n/**\n * Provide a stream of chunks from a URL.\n *\n * Note this class first downloads the entire file into memory before providing\n * the first element from the stream. This is because the Fetch API does not\n * yet reliably provide a reader stream for the response body.\n */\nexport async function urlChunkIterator(url, options = {}) {\n let urlString;\n let requestInit;\n if ((typeof url) === 'string') {\n urlString = url;\n }\n else {\n urlString = url.url;\n requestInit = getRequestInitFromRequest(url);\n }\n const response = await util.fetch(urlString, requestInit);\n if (response.ok) {\n const uint8Array = new Uint8Array(await response.arrayBuffer());\n return new FileChunkIterator(uint8Array, options);\n }\n else {\n throw new Error(response.statusText);\n }\n}\n// Generate RequestInit from Request to match tf.util.fetch signature.\nconst getRequestInitFromRequest = (request) => {\n const init = {\n method: request.method,\n headers: request.headers,\n body: request.body,\n mode: request.mode,\n credentials: request.credentials,\n cache: request.cache,\n redirect: request.redirect,\n referrer: request.referrer,\n integrity: request.integrity,\n };\n return init;\n};\n//# sourceMappingURL=url_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// Skip tslint any type check cause this method is aiming to check type of\n// input.\n// tslint:disable-next-line:no-any\nexport function isLocalPath(source) {\n return (typeof source === 'string') && source.substr(0, 7) === 'file://';\n}\n//# sourceMappingURL=source_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { DataSource } from '../datasource';\nimport { FileChunkIterator } from '../iterators/file_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\n/**\n * Represents a file, blob, or Uint8Array readable as a stream of binary data\n * chunks.\n */\nexport class FileDataSource extends DataSource {\n /**\n * Create a `FileDataSource`.\n *\n * @param input Local file path, or `File`/`Blob`/`Uint8Array` object to\n * read. Local file only works in node environment.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(input, options = {}) {\n super();\n this.input = input;\n this.options = options;\n }\n async iterator() {\n if (isLocalPath(this.input) && env().get('IS_NODE')) {\n // tslint:disable-next-line:no-require-imports\n const fs = require('fs');\n this.input = fs.readFileSync(this.input.substr(7));\n }\n // TODO(kangyizhang): Add LocalFileChunkIterator to split local streaming\n // with file in browser.\n return new FileChunkIterator(this.input, this.options);\n }\n}\n//# sourceMappingURL=file_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { DataSource } from '../datasource';\nimport { urlChunkIterator } from '../iterators/url_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\nimport { FileDataSource } from './file_data_source';\n/*\n * Represents a URL readable as a stream of binary data chunks.\n */\nexport class URLDataSource extends DataSource {\n /**\n * Create a `URLDataSource`.\n *\n * @param url A source URL string, or a `Request` object.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(url, fileOptions = {}) {\n super();\n this.url = url;\n this.fileOptions = fileOptions;\n }\n // TODO(soergel): provide appropriate caching options. Currently this\n // will download the URL anew for each call to iterator(). Since we have\n // to treat the downloaded file as a blob/buffer anyway, we may as well retain\n // it-- but that raises GC issues. Also we may want a persistent disk cache.\n async iterator() {\n if (isLocalPath(this.url)) {\n return (new FileDataSource(this.url, this.fileOptions))\n .iterator();\n }\n else {\n return urlChunkIterator(this.url, this.fileOptions);\n }\n }\n}\n//# sourceMappingURL=url_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { datasetFromIteratorFn } from './dataset';\nimport { CSVDataset } from './datasets/csv_dataset';\nimport { iteratorFromFunction } from './iterators/lazy_iterator';\nimport { MicrophoneIterator } from './iterators/microphone_iterator';\nimport { WebcamIterator } from './iterators/webcam_iterator';\nimport { URLDataSource } from './sources/url_data_source';\n/**\n * Create a `CSVDataset` by reading and decoding CSV file(s) from provided URL\n * or local path if it's in Node environment.\n *\n * Note: If isLabel in columnConfigs is `true` for at least one column, the\n * element in returned `CSVDataset` will be an object of\n * `{xs:features, ys:labels}`: xs is a dict of features key/value pairs, ys\n * is a dict of labels key/value pairs. If no column is marked as label,\n * returns a dict of features only.\n *\n * ```js\n * const csvUrl =\n * 'https://storage.googleapis.com/tfjs-examples/multivariate-linear-regression/data/boston-housing-train.csv';\n *\n * async function run() {\n * // We want to predict the column \"medv\", which represents a median value of\n * // a home (in $1000s), so we mark it as a label.\n * const csvDataset = tf.data.csv(\n * csvUrl, {\n * columnConfigs: {\n * medv: {\n * isLabel: true\n * }\n * }\n * });\n *\n * // Number of features is the number of column names minus one for the label\n * // column.\n * const numOfFeatures = (await csvDataset.columnNames()).length - 1;\n *\n * // Prepare the Dataset for training.\n * const flattenedDataset =\n * csvDataset\n * .map(({xs, ys}) =>\n * {\n * // Convert xs(features) and ys(labels) from object form (keyed by\n * // column name) to array form.\n * return {xs:Object.values(xs), ys:Object.values(ys)};\n * })\n * .batch(10);\n *\n * // Define the model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * inputShape: [numOfFeatures],\n * units: 1\n * }));\n * model.compile({\n * optimizer: tf.train.sgd(0.000001),\n * loss: 'meanSquaredError'\n * });\n *\n * // Fit the model using the prepared Dataset\n * return model.fitDataset(flattenedDataset, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * console.log(epoch + ':' + logs.loss);\n * }\n * }\n * });\n * }\n *\n * await run();\n * ```\n *\n * @param source URL or local path to get CSV file. If it's a local path, it\n * must have prefix `file://` and it only works in node environment.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function csv(source, csvConfig = {}) {\n return new CSVDataset(new URLDataSource(source), csvConfig);\n}\n/**\n * Create a `Dataset` that produces each element by calling a provided function.\n *\n * Note that repeated iterations over this `Dataset` may produce different\n * results, because the function will be called anew for each element of each\n * iteration.\n *\n * Also, beware that the sequence of calls to this function may be out of order\n * in time with respect to the logical order of the Dataset. This is due to the\n * asynchronous lazy nature of stream processing, and depends on downstream\n * transformations (e.g. .shuffle()). If the provided function is pure, this is\n * no problem, but if it is a closure over a mutable state (e.g., a traversal\n * pointer), then the order of the produced elements may be scrambled.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const ds = tf.data.func(func);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function that produces one data element on each call.\n */\nexport function func(f) {\n const iter = iteratorFromFunction(f);\n return datasetFromIteratorFn(async () => iter);\n}\n/**\n * Create a `Dataset` that produces each element from provided JavaScript\n * generator, which is a function*\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions),\n * or a function that returns an\n * iterator\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions).\n *\n * The returned iterator should have `.next()` function that returns element in\n * format of `{value: TensorContainer, done:boolean}`.\n *\n * Example of creating a dataset from an iterator factory:\n * ```js\n * function makeIterator() {\n * const numElements = 10;\n * let index = 0;\n *\n * const iterator = {\n * next: () => {\n * let result;\n * if (index < numElements) {\n * result = {value: index, done: false};\n * index++;\n * return result;\n * }\n * return {value: index, done: true};\n * }\n * };\n * return iterator;\n * }\n * const ds = tf.data.generator(makeIterator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * Example of creating a dataset from a generator:\n * ```js\n * function* dataGenerator() {\n * const numElements = 10;\n * let index = 0;\n * while (index < numElements) {\n * const x = index;\n * index++;\n * yield x;\n * }\n * }\n *\n * const ds = tf.data.generator(dataGenerator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param generator A Javascript generator function that returns a JavaScript\n * iterator.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function generator(generator) {\n return datasetFromIteratorFn(async () => {\n const gen = await generator();\n return iteratorFromFunction(() => gen.next());\n });\n}\n/**\n * Create an iterator that generate `Tensor`s from webcam video stream. This API\n * only works in Browser environment when the device has webcam.\n *\n * Note: this code snippet only works when the device has a webcam. It will\n * request permission to open the webcam when running.\n * ```js\n * const videoElement = document.createElement('video');\n * videoElement.width = 100;\n * videoElement.height = 100;\n * const cam = await tf.data.webcam(videoElement);\n * const img = await cam.capture();\n * img.print();\n * cam.stop();\n * ```\n *\n * @param webcamVideoElement A `HTMLVideoElement` used to play video from\n * webcam. If this element is not provided, a hidden `HTMLVideoElement` will\n * be created. In that case, `resizeWidth` and `resizeHeight` must be\n * provided to set the generated tensor shape.\n * @param webcamConfig A `WebcamConfig` object that contains configurations of\n * reading and manipulating data from webcam video stream.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function webcam(webcamVideoElement, webcamConfig) {\n return WebcamIterator.create(webcamVideoElement, webcamConfig);\n}\n/**\n * Create an iterator that generate frequency-domain spectrogram `Tensor`s from\n * microphone audio stream with browser's native FFT. This API only works in\n * browser environment when the device has microphone.\n *\n * Note: this code snippet only works when the device has a microphone. It will\n * request permission to open the microphone when running.\n * ```js\n * const mic = await tf.data.microphone({\n * fftSize: 1024,\n * columnTruncateLength: 232,\n * numFramesPerSpectrogram: 43,\n * sampleRateHz:44100,\n * includeSpectrogram: true,\n * includeWaveform: true\n * });\n * const audioData = await mic.capture();\n * const spectrogramTensor = audioData.spectrogram;\n * spectrogramTensor.print();\n * const waveformTensor = audioData.waveform;\n * waveformTensor.print();\n * mic.stop();\n * ```\n *\n * @param microphoneConfig A `MicrophoneConfig` object that contains\n * configurations of reading audio data from microphone.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function microphone(microphoneConfig) {\n return MicrophoneIterator.create(microphoneConfig);\n}\n//# sourceMappingURL=readers.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { array, Dataset, zip } from './dataset';\nexport { CSVDataset } from './datasets/csv_dataset';\nexport { TextLineDataset } from './datasets/text_line_dataset';\nexport { csv, func, generator, microphone, webcam } from './readers';\nexport { FileDataSource } from './sources/file_data_source';\nexport { URLDataSource } from './sources/url_data_source';\nexport { version as version_data } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors in the CPU backend.`);\n }\n });\n}\n//# sourceMappingURL=cpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { backend_util, DataStorage, engine, env, kernel_impls, KernelBackend, max, slice_util, TensorBuffer, upcastType, util } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV3Impl = kernel_impls.nonMaxSuppressionV3Impl;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport * as seedrandom from 'seedrandom';\nimport { assertNotComplex } from './cpu_util';\nexport class MathBackendCPU extends KernelBackend {\n constructor() {\n super();\n this.blockSize = 48;\n this.firstUse = true;\n this.data = new DataStorage(this, engine());\n }\n write(values, shape, dtype) {\n if (this.firstUse) {\n this.firstUse = false;\n if (env().get('IS_NODE')) {\n backend_util.warn('\\n============================\\n' +\n 'Hi there \uD83D\uDC4B. Looks like you are running TensorFlow.js in ' +\n 'Node.js. To speed things up dramatically, install our node ' +\n 'backend, which binds to TensorFlow C++, by running ' +\n 'npm i @tensorflow/tfjs-node, ' +\n 'or npm i @tensorflow/tfjs-node-gpu if you have CUDA. ' +\n 'Then call require(\\'@tensorflow/tfjs-node\\'); (-gpu ' +\n 'suffix for CUDA) at the start of your program. ' +\n 'Visit https://github.com/tensorflow/tfjs-node for more details.' +\n '\\n============================');\n }\n }\n const dataId = {};\n this.data.set(dataId, { values, dtype, refCount: 1 });\n return dataId;\n }\n /**\n * Create a data bucket in cpu backend.\n * @param shape Shape of the `TensorInfo`.\n * @param dtype DType of the `TensorInfo`.\n * @param values The value of the `TensorInfo` stored as a flattened array.\n */\n makeTensorInfo(shape, dtype, values) {\n let outId;\n if (dtype === 'string' && values != null && values.length > 0 &&\n util.isString(values[0])) {\n const encodedValues = values.map(d => util.encodeString(d));\n outId = this.write(encodedValues, shape, dtype);\n }\n else {\n outId = this.write(values, shape, dtype);\n }\n return { dataId: outId, shape, dtype };\n }\n /** Increase refCount of a `TensorData`. */\n incRef(dataId) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount++;\n }\n /** Decrease refCount of a `TensorData`. */\n decRef(dataId) {\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n this.data.set(dataId, { values, dtype, refCount: 1 });\n }\n numDataIds() {\n return this.data.numDataIds();\n }\n async read(dataId) {\n return this.readSync(dataId);\n }\n readSync(dataId) {\n const { dtype, complexTensorInfos } = this.data.get(dataId);\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n return backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n return this.data.get(dataId).values;\n }\n bufferSync(t) {\n const data = this.readSync(t.dataId);\n let decodedData = data;\n if (t.dtype === 'string') {\n try {\n // Decode the bytes into string.\n decodedData = data.map(d => util.decodeString(d));\n }\n catch (_a) {\n throw new Error('Failed to decode encoded string bytes into utf-8');\n }\n }\n return tf.buffer(t.shape, t.dtype, decodedData);\n }\n makeOutput(values, shape, dtype) {\n const dataId = this.write(values, shape, dtype);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n disposeData(dataId) {\n if (this.data.has(dataId)) {\n const { complexTensorInfos } = this.data.get(dataId);\n if (complexTensorInfos != null) {\n this.disposeData(complexTensorInfos.real.dataId);\n this.disposeData(complexTensorInfos.imag.dataId);\n }\n this.data.delete(dataId);\n }\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n if (tensorData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n async time(f) {\n const start = util.now();\n f();\n const kernelMs = util.now() - start;\n return { kernelMs };\n }\n memory() {\n return {\n // Unreliable due to automatic gc. The numbers above are cumulative.\n unreliable: true,\n reasons: ['The reported memory is an upper bound. Due to automatic garbage ' +\n 'collection, the true allocated memory may be less.']\n };\n }\n stridedSlice(x, begin, end, strides) {\n assertNotComplex(x, 'stridedSlice');\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tf.tensor([], outShape);\n }\n const buffer = tf.buffer(outShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const loc = buffer.indexToLoc(i);\n const newLoc = new Array(loc.length);\n for (let j = 0; j < newLoc.length; j++) {\n newLoc[j] = loc[j] * strides[j] + begin[j];\n }\n buffer.set(xBuf.get(...newLoc), ...loc);\n }\n return buffer.toTensor();\n }\n diag(x) {\n const xVals = this.readSync(x.dataId);\n const buffer = tf.buffer([x.size, x.size], x.dtype);\n const vals = buffer.values;\n for (let i = 0; i < xVals.length; i++) {\n vals[i * x.size + i] = xVals[i];\n }\n return buffer.toTensor();\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = tf.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n reverse(x, axis) {\n assertNotComplex(x, 'reverse');\n const buffer = tf.buffer(x.shape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const outLoc = buffer.indexToLoc(i);\n const inLoc = outLoc.slice();\n axis.forEach(ax => inLoc[ax] = x.shape[ax] - 1 - inLoc[ax]);\n buffer.set(xBuf.get(...inLoc), ...outLoc);\n }\n return buffer.toTensor();\n }\n neg(x) {\n assertNotComplex(x, 'neg');\n // TODO(lina128): Use mul directly once neg is modularized.\n return tf.mul(tf.scalar(-1), x);\n }\n addN(tensors) {\n assertNotComplex(tensors, 'addN');\n const vals = tensors.map(t => this.readSync(t.dataId));\n const result = tf.buffer(tensors[0].shape, tensors[0].dtype);\n const resultVals = result.values;\n for (let i = 0; i < tensors.length; i++) {\n const currVals = vals[i];\n for (let j = 0; j < resultVals.length; j++) {\n resultVals[j] += currVals[j];\n }\n }\n return result.toTensor();\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(lina128): Use sub directly once softmax is modularized.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = tf.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax\n // kernel modularization.\n return tf.div(b, sumExp);\n }\n pow(a, b) {\n assertNotComplex([a, b], 'pow');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.pow(aValue, bValue));\n }\n floorDiv(a, b) {\n assertNotComplex([a, b], 'floorDiv');\n const op = (a, b) => Math.floor(a / b);\n const outputDtype = 'int32';\n return this.broadcastedBinaryOp(a, b, outputDtype, op);\n }\n sum(x, axes) {\n assertNotComplex(x, 'sum');\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let sum = 0;\n for (let j = 0; j < reduceSize; ++j) {\n sum += aVals[offset + j];\n }\n vals[i] = sum;\n }\n return result;\n }\n prod(x, axes) {\n assertNotComplex(x, 'sum');\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let prod = 1;\n for (let j = 0; j < reduceSize; ++j) {\n prod *= aVals[offset + j];\n }\n vals[i] = prod;\n }\n return result;\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n assertNotComplex(x, 'unsortedSegmentSum');\n const res = [];\n // Reshape the segment id's so that they can be broadcast with\n // x. The new shape should be [segmentIds.shape, 1, ..., 1]\n const numIters = x.rank - segmentIds.rank;\n for (let i = 0; i < numIters; ++i) {\n segmentIds = segmentIds.expandDims(i + 1);\n }\n for (let i = 0; i < numSegments; ++i) {\n const segmentId = tf.scalar(i, 'int32');\n const mask = tf.equal(segmentId, segmentIds).asType('float32');\n const sum = mask.mul(x).sum(0);\n res.push(sum);\n }\n return tf.stack(res);\n }\n argMin(x, axis) {\n assertNotComplex(x, 'argMin');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMin', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n let minIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n minIndex = j;\n }\n }\n vals[i] = minIndex;\n }\n return result;\n }\n argMax(x, axis) {\n assertNotComplex(x, 'argMax');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMax', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n let maxIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n maxIndex = j;\n }\n }\n vals[i] = maxIndex;\n }\n return result;\n }\n cumsum(x, axis, exclusive, reverse) {\n assertNotComplex(x, 'cumsum');\n if (axis !== x.rank - 1) {\n throw new Error(`backend.cumsum in CPU expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(x.shape, resultDtype);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n const finalDim = x.shape[x.rank - 1];\n const indexAdjuster = reverse ?\n (i, j) => i + finalDim - j - 1 :\n (i, j) => i + j;\n for (let i = 0; i < aVals.length; i += finalDim) {\n for (let j = 0; j < finalDim; j++) {\n const idx = indexAdjuster(i, j);\n if (j === 0) {\n vals[idx] = exclusive ? 0 : aVals[idx];\n }\n else {\n const prevIdx = indexAdjuster(i, j - 1);\n vals[idx] = exclusive ? aVals[prevIdx] + vals[prevIdx] :\n aVals[idx] + vals[prevIdx];\n }\n }\n }\n return result;\n }\n equal(a, b) {\n assertNotComplex([a, b], 'equal');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal === bVal) ? 1 : 0;\n });\n }\n notEqual(a, b) {\n assertNotComplex([a, b], 'notEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal !== bVal) ? 1 : 0;\n });\n }\n less(a, b) {\n assertNotComplex([a, b], 'less');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal < bVal) ? 1 : 0;\n });\n }\n lessEqual(a, b) {\n assertNotComplex([a, b], 'lessEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal <= bVal) ? 1 : 0;\n });\n }\n greater(a, b) {\n assertNotComplex([a, b], 'greater');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal > bVal) ? 1 : 0;\n });\n }\n greaterEqual(a, b) {\n assertNotComplex([a, b], 'greaterEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal >= bVal) ? 1 : 0;\n });\n }\n logicalAnd(a, b) {\n assertNotComplex([a, b], 'logicalAnd');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal && bVal;\n });\n }\n logicalOr(a, b) {\n assertNotComplex([a, b], 'logicalOr');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal || bVal;\n });\n }\n select(condition, a, b) {\n assertNotComplex([condition, a, b], 'select');\n const values = this.readSync(condition.dataId);\n const aValues = this.readSync(a.dataId);\n const bValues = this.readSync(b.dataId);\n const result = tf.zeros(a.shape, upcastType(a.dtype, b.dtype));\n const newValues = this.readSync(result.dataId);\n let index = 0;\n const offset = condition.rank === 0 || condition.rank > 1 || a.rank === 1 ?\n 1 :\n util.sizeFromShape(a.shape.slice(1));\n for (let i = 0; i < values.length; i++) {\n for (let j = 0; j < offset; j++) {\n if (values[i] === 1) {\n newValues[index++] = aValues[i];\n }\n else {\n newValues[index++] = bValues[i];\n }\n }\n }\n return result;\n }\n where(condition) {\n assertNotComplex([condition], 'where');\n const condVals = this.readSync(condition.dataId);\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n assertNotComplex(x, 'topk');\n const xVals = this.readSync(x.dataId);\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n assertNotComplex(x, 'min');\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n }\n }\n vals[i] = min;\n }\n return result;\n }\n minimum(a, b) {\n assertNotComplex([a, b], 'minimum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.min(aVal, bVal));\n }\n mod(a, b) {\n assertNotComplex([a, b], 'mod');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const rem = aVal % bVal;\n if ((aVal < 0 && bVal < 0) || (aVal >= 0 && bVal >= 0)) {\n return rem;\n }\n else {\n return (rem + bVal) % bVal;\n }\n });\n }\n maximum(a, b) {\n assertNotComplex([a, b], 'maximum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.max(aVal, bVal));\n }\n all(x, axes) {\n assertNotComplex(x, 'all');\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let all = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n all = all && value;\n }\n vals[i] = all;\n }\n return result;\n }\n any(x, axes) {\n assertNotComplex(x, 'any');\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let anyVal = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n anyVal = anyVal || value;\n }\n vals[i] = anyVal;\n }\n return result;\n }\n squaredDifference(a, b) {\n assertNotComplex([a, b], 'squaredDifference');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const diff = aVal - bVal;\n return diff * diff;\n });\n }\n eluDer(dy, y) {\n assertNotComplex([dy, y], 'eluDer');\n const resultValues = new Float32Array(y.size);\n const values = this.readSync(y.dataId);\n const dyValues = this.readSync(dy.dataId);\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n if (v >= 1) {\n resultValues[i] = dyValues[i];\n }\n else {\n resultValues[i] = dyValues[i] * (v + 1);\n }\n }\n return this.makeOutput(resultValues, y.shape, 'float32');\n }\n atan2(a, b) {\n assertNotComplex([a, b], 'atan2');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.atan2(aValue, bValue));\n }\n tile(x, reps) {\n assertNotComplex(x, 'tile');\n return tile(this.bufferSync(x), reps);\n }\n gather(x, indices, axis) {\n assertNotComplex([x, indices], 'gather');\n const newShape = x.shape.slice();\n const indicesValues = this.readSync(indices.dataId);\n newShape[axis] = indicesValues.length;\n const result = tf.buffer(newShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < result.size; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = newLoc.slice();\n originalLoc[axis] = indicesValues[newLoc[axis]];\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n }\n batchToSpaceND(x, blockShape, crops) {\n assertNotComplex([x], 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return tf.transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n pool3d(x, convInfo, poolType) {\n assertNotComplex(x, 'pool3d');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const xValues = this.readSync(x.dataId);\n const output = tf.buffer(convInfo.outShape, x.dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] *\n convInfo.outShape[3] * convInfo.outShape[4];\n const outputDepthStrides = convInfo.outShape[2] * convInfo.outShape[3] * convInfo.outShape[4];\n const outputRowStrides = convInfo.outShape[3] * convInfo.outShape[4];\n const outputColStrides = convInfo.outShape[4];\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n const outputBatchOffset = batch * outputBatchStrides;\n const inputBatchOffset = batch * x.strides[0];\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n const outputDepthOffset = outputBatchOffset + yDepth * outputDepthStrides;\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n const outputRowOffset = outputDepthOffset + yRow * outputRowStrides;\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n const outputColOffset = outputRowOffset + yCol * outputColStrides;\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const xDepthOffset = inputBatchOffset + xDepth * x.strides[1];\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const xRowOffset = xDepthOffset + xRow * x.strides[2];\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const xColOffset = xRowOffset + xCol * x.strides[3];\n const pixel = xValues[xColOffset + channel];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputColOffset + channel;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n avgPool3d(x, convInfo) {\n assertNotComplex(x, 'avgPool3d');\n return this.pool3d(x, convInfo, 'avg').toFloat();\n }\n avgPool3dBackprop(dy, x, convInfo) {\n assertNotComplex([dy, x], 'avgPool3dBackprop');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins.\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel;\n }\n }\n }\n dx.set(dotProd * avgMultiplier, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n maxPool3d(x, convInfo) {\n assertNotComplex(x, 'maxPool3d');\n return this.pool3d(x, convInfo, 'max').toFloat();\n }\n maxPool3dPositions(x, convInfo) {\n const maxPositions = tf.buffer(convInfo.outShape, 'int32');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = this.bufferSync(x);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const wDepth = xDepth - xDepthCorner;\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const wRow = xRow - xRowCorner;\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const wCol = xCol - xColCorner;\n const pixel = xBuf.get(batch, xDepth, xRow, xCol, channel);\n if (pixel >= maxValue) {\n maxValue = pixel;\n maxPosition = wDepth * effectiveFilterHeight *\n effectiveFilterWidth +\n wRow * effectiveFilterHeight + wCol;\n }\n }\n }\n }\n maxPositions.set(maxPosition, batch, yDepth, yRow, yCol, channel);\n }\n }\n }\n }\n }\n return maxPositions.toTensor();\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n assertNotComplex([x, y], 'maxPool3dBackprop');\n const maxPositions = this.maxPool3dPositions(x, convInfo);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const maxPosBuf = this.bufferSync(maxPositions);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const maxPos = effectiveFilterDepth *\n effectiveFilterHeight * effectiveFilterWidth -\n 1 -\n maxPosBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n const curPos = wDepth * effectiveFilterHeight * effectiveFilterWidth +\n wRow * effectiveFilterWidth + wCol;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel * mask;\n }\n }\n }\n dx.set(dotProd, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeBilinear');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(util.sizeFromShape([batch, newHeight, newWidth, numChannels]));\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n let outputIdx = 0;\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n for (let b = 0; b < batch; b++) {\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceRowFloor = Math.floor(sourceFracRow);\n const rowFrac = sourceFracRow - sourceRowFloor;\n const sourceRowCeil = Math.min(oldHeight - 1, Math.ceil(sourceFracRow));\n const topRowOffset = b * x.strides[0] + sourceRowFloor * x.strides[1];\n const botRowOffset = b * x.strides[0] + sourceRowCeil * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceColFloor = Math.floor(sourceFracCol);\n const colFrac = sourceFracCol - sourceColFloor;\n const sourceColCeil = Math.min(oldWidth - 1, Math.ceil(sourceFracCol));\n const topLeftOffest = topRowOffset + sourceColFloor * x.strides[2];\n const botLeftOffset = botRowOffset + sourceColFloor * x.strides[2];\n const topRightOffset = topRowOffset + sourceColCeil * x.strides[2];\n const botRightOffest = botRowOffset + sourceColCeil * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const topLeft = xValues[topLeftOffest + d];\n const bottomLeft = xValues[botLeftOffset + d];\n const topRight = xValues[topRightOffset + d];\n const bottomRight = xValues[botRightOffest + d];\n const top = topLeft + (topRight - topLeft) * colFrac;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * colFrac;\n const newValue = top + (bottom - top) * rowFrac;\n result[outputIdx++] = newValue;\n }\n }\n }\n }\n return tf.tensor(result, [batch, newHeight, newWidth, numChannels]);\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeBilinearBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass and add the\n // corresponding coefficient from dy to the gradient (with some\n // interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/3039375c86a5bbc9610c7725dcaa95d635f87ba2/tensorflow/core/kernels/resize_bilinear_op.cc#L275\n const dyValues = this.readSync(dy.dataId);\n let offset = 0;\n for (let b = 0; b < batch; b++) {\n const bOffset = b * x.strides[0];\n for (let r = 0; r < yHeight; r++) {\n const dxR = r * heightScale;\n const topDxRIndex = Math.floor(dxR);\n const bottomDxRIndex = Math.min(Math.ceil(dxR), xHeight - 1);\n const topDxROffset = bOffset + topDxRIndex * x.strides[1];\n const bottomDxROffset = bOffset + bottomDxRIndex * x.strides[1];\n const dxRLerp = dxR - topDxRIndex;\n const inverseDxRLerp = 1.0 - dxRLerp;\n for (let c = 0; c < yWidth; c++) {\n const dxC = c * widthScale;\n const leftDxCIndex = Math.floor(dxC);\n const rightDxCIndex = Math.min(Math.ceil(dxC), xWidth - 1);\n const dxCLerp = dxC - leftDxCIndex;\n const inverseDxCLerp = 1.0 - dxCLerp;\n const topLeftRCOffset = topDxROffset + leftDxCIndex * x.strides[2];\n const topRightRCOffset = topDxROffset + rightDxCIndex * x.strides[2];\n const bottomLeftRCOffset = bottomDxROffset + leftDxCIndex * x.strides[2];\n const bottomRightRCOffset = bottomDxROffset + rightDxCIndex * x.strides[2];\n const inverseDxRLerpTimesInverseDxCLerp = inverseDxRLerp * inverseDxCLerp;\n const inverseDxRLerpTimesDxCLerp = inverseDxRLerp * dxCLerp;\n const dxRLerpTimesInverseDxCLerp = dxRLerp * inverseDxCLerp;\n const dxRLerpTimesDxCLerp = dxRLerp * dxCLerp;\n for (let d = 0; d < depth; d++) {\n const dyVal = dyValues[offset++];\n output[topLeftRCOffset + d] +=\n dyVal * inverseDxRLerpTimesInverseDxCLerp;\n output[topRightRCOffset + d] += dyVal * inverseDxRLerpTimesDxCLerp;\n output[bottomLeftRCOffset + d] +=\n dyVal * dxRLerpTimesInverseDxCLerp;\n output[bottomRightRCOffset + d] += dyVal * dxRLerpTimesDxCLerp;\n }\n }\n }\n }\n return tf.tensor4d(output, [batch, xWidth, xHeight, depth], x.dtype);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeNearestNeighbor');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const output = new Float32Array(batch * newHeight * newWidth * numChannels);\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n let outputOffset = 0;\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceNearestRow = Math.min(oldHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n const rowOffset = batchOffset + sourceNearestRow * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceNearestCol = Math.min(oldWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n const colOffset = rowOffset + sourceNearestCol * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const newVal = xValues[colOffset + d];\n output[outputOffset++] = newVal;\n }\n }\n }\n }\n return tf.tensor(output, [batch, newHeight, newWidth, numChannels], x.dtype);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeNearestNeighborBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n const dyValues = this.readSync(dy.dataId);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n // Loop over the output space.\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < xHeight; r++) {\n const rowOffset = batchOffset + r * x.strides[1];\n // Compute bounds for where in dy we will look\n const startRLerp = Math.floor(r * invHeightScale);\n const startDyR = Math.floor(startRLerp - (winHeight / 2));\n for (let c = 0; c < xWidth; c++) {\n const colOffset = rowOffset + c * x.strides[2];\n // Compute bounds for where in dy we will look\n const startCLerp = Math.floor(c * invWidthScale);\n const startDyC = Math.floor(startCLerp - (winWidth / 2));\n for (let d = 0; d < depth; d++) {\n let accum = 0;\n // loop over dy\n for (let dyRIndex = 0; dyRIndex < winHeight; dyRIndex++) {\n const dyR = dyRIndex + startDyR;\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= yHeight) {\n continue;\n }\n const dyROffset = batchOffset + dyR * dy.strides[1];\n const sourceFracRow = dyR * heightScale;\n const sourceNearestRow = Math.min(xHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n if (r !== sourceNearestRow) {\n continue;\n }\n for (let dyCIndex = 0; dyCIndex < winWidth; dyCIndex++) {\n const dyC = dyCIndex + startDyC;\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= yWidth) {\n continue;\n }\n const dyCOffset = dyROffset + dyC * dy.strides[2];\n const sourceFracCol = dyC * widthScale;\n const sourceNearestCol = Math.min(xWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n if (c === sourceNearestCol) {\n accum += dyValues[dyCOffset + d];\n }\n }\n }\n output[colOffset + d] = accum;\n }\n }\n }\n }\n return tf.tensor4d(output, x.shape, x.dtype);\n }\n localResponseNormalization4D(x, depthRadius, bias, alpha, beta) {\n assertNotComplex(x, 'localResponseNormalization4D');\n const channels = x.shape[3];\n const maxD = channels - 1;\n const xValues = this.readSync(x.dataId);\n const size = x.size;\n const result = new Float32Array(size);\n function sumAcrossChannels(offset) {\n const currentChannel = offset % channels;\n let beginSumOffset = offset - currentChannel + Math.max(0, currentChannel - depthRadius);\n const endSumOffset = offset - currentChannel +\n Math.min(currentChannel + depthRadius, maxD);\n let sum = 0.0;\n for (; beginSumOffset <= endSumOffset; beginSumOffset++) {\n const z = xValues[beginSumOffset];\n sum += z * z;\n }\n return sum;\n }\n for (let offset = 0; offset < size; offset++) {\n const sum = sumAcrossChannels(offset);\n const val = xValues[offset] * Math.pow(bias + alpha * sum, -beta);\n result[offset] = val;\n }\n return tf.tensor4d(result, x.shape);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n assertNotComplex(dy, 'LRNGrad');\n const channels = dy.shape[3];\n const dyValues = this.readSync(dy.dataId);\n const inputImageValues = this.readSync(inputImage.dataId);\n const outputImageValues = this.readSync(outputImage.dataId);\n const result = new Float32Array(dy.size);\n const size = dy.size;\n for (let offset = 0; offset < size; offset++) {\n const currentChannel = offset % channels;\n const depthBegin = (offset - currentChannel) + Math.max(0, currentChannel - depthRadius);\n const depthEnd = (offset - currentChannel) +\n Math.min(channels, currentChannel + depthRadius + 1);\n let norm = 0;\n for (let k = depthBegin; k < depthEnd; k++) {\n norm += Math.pow(inputImageValues[k], 2);\n }\n norm = alpha * norm + bias;\n for (let k = depthBegin; k < depthEnd; k++) {\n let dyi = -2 * alpha * beta * inputImageValues[k] *\n outputImageValues[offset] / norm;\n if (offset === k) {\n dyi += Math.pow(norm, -beta);\n }\n dyi *= dyValues[offset];\n result[k] += dyi;\n }\n }\n return tf.tensor4d(result, dy.shape);\n }\n multinomial(logits, normalized, numSamples, seed) {\n assertNotComplex(logits, 'multinomial');\n const probabilities = normalized ? logits : tf.softmax(logits);\n const batchSize = probabilities.shape[0];\n const numEvents = probabilities.shape[1];\n const res = tf.zeros([batchSize, numSamples], 'int32');\n const resVals = this.readSync(res.dataId);\n const probVals = this.readSync(probabilities.dataId);\n for (let b = 0; b < batchSize; ++b) {\n const offset = b * numEvents;\n // The cdf won't include the last event. It will be implicit if no other\n // event happened.\n const cdf = new Float32Array(numEvents - 1);\n cdf[0] = probVals[offset];\n for (let event = 1; event < cdf.length; ++event) {\n cdf[event] = cdf[event - 1] + probVals[offset + event];\n }\n const random = seedrandom.alea(seed.toString());\n const outOffset = b * numSamples;\n for (let sampleId = 0; sampleId < numSamples; ++sampleId) {\n const r = random();\n // Assume last event happened by default.\n resVals[outOffset + sampleId] = cdf.length;\n for (let event = 0; event < cdf.length; event++) {\n if (r < cdf[event]) {\n resVals[outOffset + sampleId] = event;\n break;\n }\n }\n }\n }\n return res;\n }\n oneHot(indices, depth, onValue, offValue) {\n assertNotComplex(indices, 'oneHot');\n const res = new Float32Array(indices.size * depth);\n res.fill(offValue);\n const indicesVal = this.readSync(indices.dataId);\n for (let event = 0; event < indices.size; ++event) {\n if (indicesVal[event] >= 0 && indicesVal[event] < depth) {\n res[event * depth + indicesVal[event]] = onValue;\n }\n }\n return tf.tensor2d(res, [indices.size, depth], 'int32');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n assertNotComplex(boxes, 'nonMaxSuppression');\n const boxesVals = this.readSync(boxes.dataId);\n const scoresVals = this.readSync(scores.dataId);\n return nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(dataFormat === 'NHWC', () => `Only NHWC dataFormat supported on CPU for depthToSpace. Got ${dataFormat}`);\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = x.shape[1];\n const inputWidth = x.shape[2];\n const inputDepth = x.shape[3];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(batchSize * outputHeight * outputWidth * outputDepth);\n let outputIdx = 0;\n for (let b = 0; b < batchSize; ++b) {\n for (let h = 0; h < outputHeight; ++h) {\n const inH = Math.floor(h / blockSize);\n const offsetH = (h % blockSize);\n for (let w = 0; w < outputWidth; ++w) {\n const inW = Math.floor(w / blockSize);\n const offsetW = (w % blockSize);\n const offsetD = (offsetH * blockSize + offsetW) * outputDepth;\n for (let d = 0; d < outputDepth; ++d) {\n const inD = d + offsetD;\n const inputIdx = inD + inputDepth * (inW + inputWidth * (inH + inputHeight * b));\n result[outputIdx++] = xValues[inputIdx];\n }\n }\n }\n }\n return tf.tensor4d(result, [batchSize, outputHeight, outputWidth, outputDepth]);\n }\n broadcastedBinaryOp(a, b, dtype, op) {\n const newShape = backend_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const result = tf.buffer(newShape, dtype);\n const aVals = this.readSync(a.dataId);\n const bVals = this.readSync(b.dataId);\n const aBroadcastDims = backend_util.getBroadcastDims(a.shape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(b.shape, newShape);\n const resVals = result.values;\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resVals.length; ++i) {\n resVals[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n const aBuf = this.bufferSync(a);\n const bBuf = this.bufferSync(b);\n for (let i = 0; i < resVals.length; ++i) {\n const loc = result.indexToLoc(i);\n const aLoc = loc.slice(-a.rank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = aBuf.locToIndex(aLoc);\n const bLoc = loc.slice(-b.rank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = bBuf.locToIndex(bLoc);\n resVals[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return result.toTensor();\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n dispose() { }\n floatPrecision() {\n return 32;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return super.epsilon();\n }\n cropAndResize(images, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const [batch, imageHeight, imageWidth, numChannels] = images.shape;\n const numBoxes = boxes.shape[0];\n const [cropHeight, cropWidth] = cropSize;\n const output = tf.buffer([numBoxes, cropHeight, cropWidth, numChannels], 'float32');\n const boxVals = this.readSync(boxes.dataId);\n const boxIndVals = this.readSync(boxIndex.dataId);\n const imageVals = this.readSync(images.dataId);\n const inStride = images.strides; // to calculate flat indexes into image\n const outStride = output.strides; // to calculate flat indexes into output\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op.cc\n for (let b = 0; b < numBoxes; b++) {\n const startInd = b * 4;\n const y1 = boxVals[startInd];\n const x1 = boxVals[startInd + 1];\n const y2 = boxVals[startInd + 2];\n const x2 = boxVals[startInd + 3];\n const bInd = boxIndVals[b];\n if (bInd >= batch) {\n continue;\n }\n const heightScale = (cropHeight > 1) ?\n (y2 - y1) * (imageHeight - 1) / (cropHeight - 1) :\n 0;\n const widthScale = (cropWidth > 1) ? (x2 - x1) * (imageWidth - 1) / (cropWidth - 1) : 0;\n for (let y = 0; y < cropHeight; y++) {\n const yInd = (cropHeight > 1) ?\n y1 * (imageHeight - 1) + y * (heightScale) :\n 0.5 * (y1 + y2) * (imageHeight - 1);\n if (yInd < 0 || yInd > imageHeight - 1) {\n for (let x = 0; x < cropWidth; x++) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n }\n continue;\n }\n if (method === 'bilinear') {\n const topInd = Math.floor(yInd);\n const bottomInd = Math.ceil(yInd);\n const yLerp = yInd - topInd;\n for (let x = 0; x < cropWidth; x++) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const leftInd = Math.floor(xInd);\n const rightInd = Math.ceil(xInd);\n const xLerp = xInd - leftInd;\n for (let c = 0; c < numChannels; c++) {\n let ind = c + leftInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topRight = imageVals[ind];\n ind = c + leftInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomRight = imageVals[ind];\n const top = topLeft + (topRight - topLeft) * xLerp;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * xLerp;\n ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = top + ((bottom - top) * yLerp);\n }\n }\n }\n else { // method == \"nearest\"\n for (let x = 0; x < cropWidth; ++x) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const closestX = Math.round(xInd);\n const closestY = Math.round(yInd);\n for (let c = 0; c < numChannels; c++) {\n const inInd = c + closestX * inStride[2] +\n closestY * inStride[1] + bInd * inStride[0];\n const outInd = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[outInd] = imageVals[inInd];\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n return this.scatter(sparseIndices, sparseValues, outputShape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n if (numSlices === 0) {\n return tf.tensor([], resultShape, x.dtype);\n }\n const buffer = new TensorBuffer([numSlices, sliceSize], x.dtype);\n const indicesData = this.readSync(indices.dataId);\n const xData = this.readSync(x.dataId);\n for (let i = 0; i < numSlices; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n flattenIndex += dim * strides[j];\n index.push(dim);\n }\n if (flattenIndex < 0 || flattenIndex >= x.size / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${x.shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n buffer.values[i * sliceSize + k] = xData[flattenIndex * sliceSize + k];\n }\n }\n return buffer.toTensor().reshape(resultShape);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const defaultValue = tf.scalar(0);\n const sumDupeIndices = true;\n return this.scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported for string tensors');\n }\n else {\n // TODO(lina128): Use fill kernel directly once this kernel is\n // modularized.\n return tf.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n const values = util.getArrayFromDType(x.dtype, util.sizeFromShape(x.shape));\n return this.makeOutput(values, x.shape, x.dtype);\n }\n linspace(start, stop, num) {\n return backend_util.linspaceImpl(start, stop, num);\n }\n scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices) {\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const indicesData = this.readSync(indices.dataId);\n const updatesData = this.readSync(updates.dataId);\n if (outputSize === 0) {\n return tf.tensor([], shape, updates.dtype);\n }\n const buffer = new TensorBuffer(flattenShape, updates.dtype);\n buffer.values.fill(this.readSync(defaultValue.dataId)[0]);\n for (let i = 0; i < numUpdates; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n index.push(dim);\n flattenIndex += dim * strides[j];\n }\n if (flattenIndex < 0 || flattenIndex >= outputSize / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n if (sumDupeIndices) {\n buffer.values[flattenIndex * sliceSize + k] +=\n updatesData[i * sliceSize + k];\n }\n else {\n buffer.values[flattenIndex * sliceSize + k] = updates.rank === 0 ?\n updatesData[0] :\n updatesData[i * sliceSize + k];\n }\n }\n }\n return buffer.toTensor().reshape(shape);\n }\n}\n//# sourceMappingURL=backend_cpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs, util } from '@tensorflow/tfjs-core';\nexport function simpleAbsImpl(vals) {\n const resultValues = new Float32Array(vals.length);\n for (let i = 0; i < vals.length; ++i) {\n resultValues[i] = Math.abs(vals[i]);\n }\n return resultValues;\n}\nexport const abs = (args) => {\n const { x } = args.inputs;\n const cpuBackend = args.backend;\n let resultValues = new Float32Array(util.sizeFromShape(x.shape));\n if (x.dtype !== 'complex64') {\n const values = cpuBackend.data.get(x.dataId).values;\n resultValues = simpleAbsImpl(values);\n }\n else {\n const complexVals = cpuBackend.data.get(x.dataId);\n const real = complexVals.complexTensorInfos.real;\n const imag = complexVals.complexTensorInfos.imag;\n const realVals = cpuBackend.data.get(real.dataId).values;\n const imagVals = cpuBackend.data.get(imag.dataId).values;\n for (let i = 0; i < realVals.length; i++) {\n const real = realVals[i];\n const imag = imagVals[i];\n resultValues[i] = Math.hypot(real, imag);\n }\n }\n return cpuBackend.makeOutput(resultValues, x.shape, 'float32');\n};\nexport const absConfig = {\n kernelName: Abs,\n backendName: 'cpu',\n kernelFunc: abs,\n};\n//# sourceMappingURL=Abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for binary ops. Supports broadcast.\n */\nexport function createSimpleBinaryKernelImpl(op) {\n return (aShape, bShape, aVals, bVals, dtype) => {\n const newShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultRank = newShape.length;\n const resultStrides = util.computeStrides(newShape);\n const resultSize = util.sizeFromShape(newShape);\n const result = util.getTypedArrayFromDType(dtype, resultSize);\n const aRank = aShape.length;\n const bRank = bShape.length;\n const aStrides = util.computeStrides(aShape);\n const bStrides = util.computeStrides(bShape);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, newShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < result.length; ++i) {\n result[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n for (let i = 0; i < result.length; ++i) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n result[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return [result, newShape];\n };\n}\n//# sourceMappingURL=binary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const realVals = backend.data.get(real.dataId).values;\n const imagVals = backend.data.get(imag.dataId).values;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.data.get(complexInfo.dataId);\n // The complex tensor owns the underlying real and imag tensorInfos, only the\n // complex tensor tracks refCount, when complexData is disposed the\n // underlying tensorData will be disposed.\n complex.complexTensorInfos = {\n real: backend.makeTensorInfo(real.shape, 'float32', realVals),\n imag: backend.makeTensorInfo(imag.shape, 'float32', imagVals)\n };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'cpu',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'cpu',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const real = backend.data.get(input.dataId).complexTensorInfos.real;\n const realVal = backend.data.get(real.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the real value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(real.shape, real.dtype, realVal);\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'cpu',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { real } from './Real';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(lina128): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n const values = backend.data.get(x.dataId).values;\n const resultValues = Int32Array.from(values);\n return backend.makeTensorInfo(x.shape, 'int32', resultValues);\n }\n if (dtype === 'bool') {\n // This is essentially the result of notEqual(x, 0). We avoid using\n // kernel notEqual to avoid circular dependency, i.e. binary_utils ->\n // cast -> notEqual -> binary_utils.\n const xVals = backend.data.get(x.dataId).values;\n const zero = util.toTypedArray([0], x.dtype);\n const [resultData, resultShape] = createSimpleBinaryKernelImpl((a, b) => (a !== b) ? 1 : 0)(x.shape, [], xVals, zero, 'bool');\n return backend.makeTensorInfo(resultShape, 'bool', resultData);\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'cpu',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { cast } from '../kernels/Cast';\nimport { complex } from '../kernels/Complex';\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param name Kernel name.\n * @param binaryKernelImpl A `SimpleBinaryKernelImpl` for the kernel.\n * @param binaryKernelComplexImpl Optional. If exists, represents a\n * `ComplexBinaryKernelImpl` for the kernel, will be used when input dtype\n * is `complex64`.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc(name, simpleImpl, complexImpl, dtype) {\n if (complexImpl == null) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n assertNotComplex([a, b], name);\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n };\n }\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n if (a.dtype === 'complex64' || b.dtype === 'complex64') {\n const $aComplex = cast({ inputs: { x: a }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $aComplexVals = cpuBackend.data.get($aComplex.dataId);\n const aReal = $aComplexVals.complexTensorInfos.real;\n const aImag = $aComplexVals.complexTensorInfos.imag;\n const aRealVals = cpuBackend.data.get(aReal.dataId).values;\n const aImagVals = cpuBackend.data.get(aImag.dataId).values;\n const $bComplex = cast({ inputs: { x: b }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $bComplexVals = cpuBackend.data.get($bComplex.dataId);\n const bReal = $bComplexVals.complexTensorInfos.real;\n const bImag = $bComplexVals.complexTensorInfos.imag;\n const bRealVals = cpuBackend.data.get(bReal.dataId).values;\n const bImagVals = cpuBackend.data.get(bImag.dataId).values;\n const [resultRealData, resultImagData, resultShape] = complexImpl(a.shape, b.shape, aRealVals, aImagVals, bRealVals, bImagVals);\n const resultReal = cpuBackend.makeTensorInfo(resultShape, 'float32', resultRealData);\n const resultImag = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImagData);\n const result = complex({ inputs: { real: resultReal, imag: resultImag }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($aComplex);\n cpuBackend.disposeIntermediateTensorInfo($bComplex);\n cpuBackend.disposeIntermediateTensorInfo(resultReal);\n cpuBackend.disposeIntermediateTensorInfo(resultImag);\n return result;\n }\n else {\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n }\n };\n}\n/**\n * Template that creates the complex type implementation for binary ops.\n * Supports broadcast.\n */\nexport function createComplexBinaryKernelImpl(op) {\n return (aShape, bShape, aRealVals, aImagVals, bRealVals, bImagVals) => {\n const resultShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultSize = util.sizeFromShape(resultShape);\n const resultRank = resultShape.length;\n const resultStrides = util.computeStrides(resultShape);\n const resultRealVals = util.getTypedArrayFromDType('float32', resultSize);\n const resultImagVals = util.getTypedArrayFromDType('float32', resultSize);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, resultShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, resultShape);\n const aVals = backend_util.mergeRealAndImagArrays(aRealVals, aImagVals);\n const bVals = backend_util.mergeRealAndImagArrays(bRealVals, bImagVals);\n const aRank = aShape.length;\n const aStrides = util.computeStrides(aShape);\n const bRank = bShape.length;\n const bStrides = util.computeStrides(bShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resultRealVals.length; i++) {\n const aIdx = i % aVals.length;\n const bIdx = i % bVals.length;\n const result = op(aVals[aIdx * 2], aVals[aIdx * 2 + 1], bVals[bIdx * 2], bVals[bIdx * 2 + 1]);\n resultRealVals[i] = result.real;\n resultImagVals[i] = result.imag;\n }\n }\n else {\n for (let i = 0; i < resultRealVals.length; i++) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n const opResult = op(aVals[aIndex * 2], aVals[aIndex * 2 + 1], bVals[bIndex * 2], bVals[bIndex * 2 + 1]);\n resultRealVals[i] = opResult.real;\n resultImagVals[i] = opResult.imag;\n }\n }\n return [resultRealVals, resultImagVals, resultShape];\n };\n}\n//# sourceMappingURL=kernel_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const addImpl = createSimpleBinaryKernelImpl(((a, b) => a + b));\nexport const addComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal + bReal, imag: aImag + bImag };\n}));\nexport const add = binaryKernelFunc(Add, addImpl, addComplexImpl);\nexport const addConfig = {\n kernelName: Add,\n backendName: 'cpu',\n kernelFunc: add\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for unary op.\n */\nexport function createSimpleUnaryImpl(op) {\n return (values, dtype, attrs) => {\n const newValues = util.getTypedArrayFromDType(dtype, values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return newValues;\n };\n}\n//# sourceMappingURL=unary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param name Kernel name.\n * @param op A `SimpleUnaryOperation` for the kernel.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFunc(name, op, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const $dtype = dtype || x.dtype;\n const newValues = util.getArrayFromDType($dtype, xSize);\n for (let i = 0; i < xSize; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n/**\n * Template that creates a `KernelFunc` for unary ops from the given\n * `SimpleUnaryImpl`..\n * @param name Kernel name.\n * @param unaryImpl A `SimpleUnaryImpl` that implements the op.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFuncFromImpl(name, unaryImpl, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const $dtype = dtype || x.dtype;\n const newValues = unaryImpl(values, $dtype, attrs);\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n//# sourceMappingURL=unary_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const ceilImpl = createSimpleUnaryImpl((xi) => Math.ceil(xi));\nexport const ceil = unaryKernelFuncFromImpl(Ceil, ceilImpl);\nexport const ceilConfig = {\n kernelName: Ceil,\n backendName: 'cpu',\n kernelFunc: ceil,\n};\n//# sourceMappingURL=Ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expImpl = createSimpleUnaryImpl((xi) => Math.exp(xi));\nexport const exp = unaryKernelFuncFromImpl(Exp, expImpl);\nexport const expConfig = {\n kernelName: Exp,\n backendName: 'cpu',\n kernelFunc: exp,\n};\n//# sourceMappingURL=Exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expm1Impl = createSimpleUnaryImpl((xi) => Math.expm1(xi));\nexport const expm1 = unaryKernelFuncFromImpl(Expm1, expm1Impl);\nexport const expm1Config = {\n kernelName: Expm1,\n backendName: 'cpu',\n kernelFunc: expm1,\n};\n//# sourceMappingURL=Expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const floorImpl = createSimpleUnaryImpl((xi) => Math.floor(xi));\nexport const floor = unaryKernelFuncFromImpl(Floor, floorImpl);\nexport const floorConfig = {\n kernelName: Floor,\n backendName: 'cpu',\n kernelFunc: floor,\n};\n//# sourceMappingURL=Floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const logImpl = createSimpleUnaryImpl((xi) => Math.log(xi));\nexport const log = unaryKernelFuncFromImpl(Log, logImpl);\nexport const logConfig = {\n kernelName: Log,\n backendName: 'cpu',\n kernelFunc: log,\n};\n//# sourceMappingURL=Log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function maxImpl(aVals, reduceSize, outShape, dtype) {\n const vals = util.getTypedArrayFromDType(dtype, util.sizeFromShape(outShape));\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n }\n }\n vals[i] = max;\n }\n return vals;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const multiplyImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue * bValue));\nexport const multiplyComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return {\n real: aReal * bReal - aImag * bImag,\n imag: aReal * bImag + aImag * bReal\n };\n}));\nexport const multiply = binaryKernelFunc(Multiply, multiplyImpl, multiplyComplexImpl);\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'cpu',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const notEqualImpl = createSimpleBinaryKernelImpl(((a, b) => (a !== b) ? 1 : 0));\nexport const notEqual = binaryKernelFunc(NotEqual, notEqualImpl, null /* complexOp */, 'bool');\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'cpu',\n kernelFunc: notEqual\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const rsqrtImpl = createSimpleUnaryImpl((xi) => 1 / Math.sqrt(xi));\nexport const rsqrt = unaryKernelFuncFromImpl(Rsqrt, rsqrtImpl);\nexport const rsqrtConfig = {\n kernelName: Rsqrt,\n backendName: 'cpu',\n kernelFunc: rsqrt,\n};\n//# sourceMappingURL=Rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice, slice_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function sliceImpl(vals, begin, size, shape, dtype) {\n const isContinous = slice_util.isSliceContinous(shape, begin, size);\n const length = util.sizeFromShape(size);\n const xStrides = util.computeStrides(shape);\n if (isContinous) {\n const flatOffset = slice_util.computeFlatOffset(begin, xStrides);\n return vals.subarray(flatOffset, flatOffset + length);\n }\n const outVals = util.getTypedArrayFromDType(dtype, length);\n for (let i = 0; i < length; ++i) {\n const rank = size.length;\n const strides = util.computeStrides(size);\n const loc = util.indexToLoc(i, rank, strides);\n const xLoc = loc.map((idx, j) => idx + begin[j]);\n const xIndex = util.locToIndex(xLoc, shape.length, xStrides);\n outVals[i] = vals[xIndex];\n }\n return outVals;\n}\nexport function slice(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { begin, size } = attrs;\n assertNotComplex(x, 'slice');\n const [$begin, $size] = slice_util.parseSliceParams(x, begin, size);\n slice_util.assertParamsValid(x, $begin, $size);\n const vals = backend.data.get(x.dataId).values;\n const outVals = sliceImpl(vals, $begin, $size, x.shape, x.dtype);\n return backend.makeTensorInfo($size, x.dtype, outVals);\n}\nexport const sliceConfig = {\n kernelName: Slice,\n backendName: 'cpu',\n kernelFunc: slice\n};\n//# sourceMappingURL=Slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const squaredDifferenceImpl = createSimpleBinaryKernelImpl(((a, b) => {\n const diff = a - b;\n return diff * diff;\n}));\nexport const squaredDifference = binaryKernelFunc(SquaredDifference, squaredDifferenceImpl);\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'cpu',\n kernelFunc: squaredDifference\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const subImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue - bValue));\nexport const subComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal - bReal, imag: aImag - bImag };\n}));\nexport const sub = binaryKernelFunc(Sub, subImpl, subComplexImpl);\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'cpu',\n kernelFunc: sub\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function transposeImpl(xVals, xShape, dtype, perm, newShape) {\n const xRank = xShape.length;\n const xSize = util.sizeFromShape(xShape);\n const xStrides = util.computeStrides(xShape);\n const newStrides = util.computeStrides(newShape);\n const result = util.getTypedArrayFromDType(dtype, util.sizeFromShape(newShape));\n for (let i = 0; i < xSize; ++i) {\n const loc = util.indexToLoc(i, xRank, xStrides);\n // Permute location.\n const newLoc = new Array(loc.length);\n for (let i = 0; i < newLoc.length; i++) {\n newLoc[i] = loc[perm[i]];\n }\n const newIndex = util.locToIndex(newLoc, xRank, newStrides);\n result[newIndex] = xVals[i];\n }\n return result;\n}\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer, util } from '@tensorflow/tfjs-core';\nexport function uniqueImpl(values, axis, shape, dtype) {\n // Normalize and validate axis.\n const $axis = util.parseAxisParam(axis, shape)[0];\n // Calculate the new shape that is suitable for extracting data along the\n // given axis.\n //\n // The rank is 3.\n // The size of the 1st dimension is the size of all the axes < the given axis.\n // The size of the 2nd dimension is the same as the size of the given axis.\n // The size of the 3rd dimension is the size of all the axes > the given axis.\n //\n // For example, for a 4D tensor with shape=[2, 3, 5, 4] and axis=2, the\n // newShape would be: [2*3, 5, 4].\n //\n // Note that this is not the final output shape. This will be the shape for an\n // intermediate TensorBuffer (see inputBuffer below) to allow us to extract\n // values along the given axis. To demonstrate how it works, consider the\n // following example:\n //\n // Input: a 3D tensor, with shape [1, 2, 3]\n // [\n // [\n // [1,2,3],\n // [4,5,6]\n // ]\n // ]\n // Axis: 2 (the last axis).\n // Along axis 2, we expect to extract 3 tensors: [1,4], [2,5], [3,6].\n //\n // For this example, newShape would be: [2, 3, 1], where 2 is calculated from\n // 1*2. The re-shaped data would look like:\n //\n // [\n // [\n // [1], [2], [3]\n // ],\n // [\n // [4], [5], [6]\n // ]\n // ]\n //\n // Then, we can construct a 3-level nested loop by the following dimension\n // order to extract the values along the axis (dimension1):\n // i: dimension1 // 0,1,2 (newShape[1])\n // m: dimension0 // 0,1 (newShape[0])\n // n: dimension2 // 0 (newShape[2])\n //\n // m, i, n\n // ---------\n // Iteration 0: data at [0, 0, 0] => \"1\"\n // Iteration 1: data at [1, 0, 0] => \"4\"\n // We got [1,4].\n // Iteration 2: data at [0, 1, 0] => \"2\"\n // Iteration 3: data at [1, 1, 0] => \"5\"\n // We got [2,5].\n // Iteration 4: data at [0, 2, 0] => \"3\"\n // Iteration 5: data at [1, 2, 0] => \"6\"\n // We got [3,6].\n const newShape = [1, shape[0], 1];\n for (let i = 0; i < $axis; i++) {\n newShape[0] *= shape[i];\n }\n newShape[1] = shape[$axis];\n for (let i = $axis + 1; i < shape.length; i++) {\n newShape[2] *= shape[i];\n }\n // A map from unique elements (their string representations) to their values\n // in \"indices\" (below).\n const uniqueElements = {};\n // The indices of each unique element in the original tensor along the given\n // axis. It is 1D and has the same size as the given axis.\n const indices = new Int32Array(shape[$axis]);\n // Create a buffer so we can easily extract value at a given location.\n const inputBuffer = new TensorBuffer(newShape, dtype, values);\n // The indices along the given axis that have unique elements. This is a\n // de-duped version of \"indices\" above.\n const uniqueIndices = [];\n const is1DTensor = newShape[0] === 1 && newShape[2] === 1;\n for (let i = 0; i < shape[$axis]; i++) {\n // Extract values along the axis.\n let element;\n if (is1DTensor) {\n // Fast path for 1D tensor input.\n element = values[i].toString();\n }\n else {\n const axisValues = [];\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n axisValues.push(inputBuffer.get(m, i, n));\n }\n }\n element = axisValues.join(',');\n }\n // Dedup and update various indices.\n if (uniqueElements[element] !== undefined) {\n indices[i] = uniqueElements[element];\n }\n else {\n const uniqueIndex = Object.keys(uniqueElements).length;\n uniqueElements[element] = uniqueIndex;\n indices[i] = uniqueIndex;\n uniqueIndices.push(i);\n }\n }\n // Now we know where each of the unique elements are located along the axis\n // (uniqueIndices). Extract them from input buffer and store them in the\n // output buffer.\n const outputTmpShape = newShape.slice();\n outputTmpShape[1] = Object.keys(uniqueElements).length;\n const outputBuffer = new TensorBuffer(outputTmpShape, dtype);\n uniqueIndices.forEach((uniqueElementIndex, i) => {\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n outputBuffer.set(inputBuffer.get(m, uniqueElementIndex, n), m, i, n);\n }\n }\n });\n // The output shape can be calculated from the input shape with the size of\n // the given axis replaced by the number of unique elements along that axis.\n const outputShape = shape.slice();\n outputShape[$axis] = outputTmpShape[1];\n return {\n outputValues: outputBuffer.values,\n outputShape,\n indices,\n };\n}\n//# sourceMappingURL=Unique_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Shared functionality among backends.\nexport { simpleAbsImpl } from './kernels/Abs';\nexport { addImpl } from './kernels/Add';\nexport { ceilImpl } from './kernels/Ceil';\nexport { expImpl } from './kernels/Exp';\nexport { expm1Impl } from './kernels/Expm1';\nexport { floorImpl } from './kernels/Floor';\nexport { logImpl } from './kernels/Log';\nexport { maxImpl } from './kernels/Max_impl';\nexport { multiplyImpl } from './kernels/Multiply';\nexport { notEqualImpl } from './kernels/NotEqual';\nexport { rsqrtImpl } from './kernels/Rsqrt';\nexport { sliceImpl } from './kernels/Slice';\nexport { squaredDifferenceImpl } from './kernels/SquaredDifference';\nexport { subImpl } from './kernels/Sub';\nexport { transposeImpl } from './kernels/Transpose_impl';\nexport { uniqueImpl } from './kernels/Unique_impl';\n//# sourceMappingURL=shared.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/*\n * base.ts contains all the exports from tfjs-backend-cpu\n * without auto-kernel registration\n */\nimport { registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendCPU } from './backend_cpu';\nimport * as shared from './shared';\nexport { MathBackendCPU } from './backend_cpu';\nexport { version as version_cpu } from './version';\nexport { shared };\n// Side effects for default initialization of MathBackendCPU\nregisterBackend('cpu', () => new MathBackendCPU(), 1 /* priority */);\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Elu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const elu = unaryKernelFunc(Elu, (xi) => xi >= 0 ? xi : (Math.exp(xi) - 1));\nexport const eluConfig = {\n kernelName: Elu,\n backendName: 'cpu',\n kernelFunc: elu,\n};\n//# sourceMappingURL=Elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nconst preluImpl = createSimpleBinaryKernelImpl((xValue, aValue) => xValue < 0 ? aValue * xValue : xValue);\nexport function prelu(args) {\n const { inputs, backend } = args;\n const { x, alpha } = inputs;\n assertNotComplex([x, alpha], 'prelu');\n const aVals = backend.data.get(x.dataId).values;\n const bVals = backend.data.get(alpha.dataId).values;\n const [resultData, resultShape] = preluImpl(x.shape, alpha.shape, aVals, bVals, x.dtype);\n return backend.makeTensorInfo(resultShape, x.dtype, resultData);\n}\nexport const preluConfig = {\n kernelName: Prelu,\n backendName: 'cpu',\n kernelFunc: prelu,\n};\n//# sourceMappingURL=Prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu = unaryKernelFunc(Relu, (xi) => Math.max(0, xi));\nexport const reluConfig = {\n kernelName: Relu,\n backendName: 'cpu',\n kernelFunc: relu,\n};\n//# sourceMappingURL=Relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu6 = unaryKernelFunc(Relu6, (xi) => Math.min(Math.max(0, xi), 6));\nexport const relu6Config = {\n kernelName: Relu6,\n backendName: 'cpu',\n kernelFunc: relu6,\n};\n//# sourceMappingURL=Relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../kernels/Elu';\nimport { identity } from '../kernels/Identity';\nimport { prelu } from '../kernels/Prelu';\nimport { relu } from '../kernels/Relu';\nimport { relu6 } from '../kernels/Relu6';\nexport function applyActivation(backend, x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return identity({ inputs: { x }, backend });\n }\n else if (activation === 'relu') {\n return relu({ inputs: { x }, backend });\n }\n else if (activation === 'elu') {\n return elu({ inputs: { x }, backend });\n }\n else if (activation === 'relu6') {\n return relu6({ inputs: { x }, backend });\n }\n else if (activation === 'prelu') {\n return prelu({ inputs: { x, alpha: preluActivationWeights }, backend });\n }\n throw new Error(`Activation ${activation} has not been implemented for the CPU backend.`);\n}\n//# sourceMappingURL=fused_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n backend.incRef(x.dataId);\n const xData = backend.data.get(x.dataId);\n if (xData.complexTensorInfos != null) {\n const real = xData.complexTensorInfos.real;\n const imag = xData.complexTensorInfos.imag;\n real.shape = $shape;\n imag.shape = $shape;\n }\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'cpu',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul, buffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { reshape } from './Reshape';\nexport function batchMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b } = inputs;\n const { transposeA, transposeB } = attrs;\n assertNotComplex([a, b], 'matMul');\n const aRank = a.shape.length;\n const bRank = b.shape.length;\n const innerShapeA = transposeA ? a.shape[aRank - 2] : a.shape[aRank - 1];\n const innerShapeB = transposeB ? b.shape[bRank - 1] : b.shape[bRank - 2];\n const outerShapeA = transposeA ? a.shape[aRank - 1] : a.shape[aRank - 2];\n const outerShapeB = transposeB ? b.shape[bRank - 2] : b.shape[bRank - 1];\n const outerDimsA = a.shape.slice(0, -2);\n const outerDimsB = b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert(aRank >= 2 && bRank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n const outShapeOuterDims = batchDimA > batchDimB ? a.shape.slice(0, -2) : b.shape.slice(0, -2);\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${a.shape} and ` +\n `${b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const a3dShape = transposeA ? [batchDimA, innerShapeA, outerShapeA] :\n [batchDimA, outerShapeA, innerShapeA];\n const b3dShape = transposeB ? [batchDimB, outerShapeB, innerShapeB] :\n [batchDimB, innerShapeB, outerShapeB];\n // The rest of the implementation is designed to operate on rank-3 tensors\n const a3d = reshape({ inputs: { x: a }, backend, attrs: { shape: a3dShape } });\n const b3d = reshape({ inputs: { x: b }, backend, attrs: { shape: b3dShape } });\n const sharedDim = transposeA ? a3d.shape[1] : a3d.shape[2];\n const leftDim = transposeA ? a3d.shape[2] : a3d.shape[1];\n const rightDim = transposeB ? b3d.shape[1] : b3d.shape[2];\n const batchDim = Math.max(batchDimA, batchDimB);\n const a3dValues = backend.data.get(a3d.dataId).values;\n const b3dValues = backend.data.get(b3d.dataId).values;\n const a3dStrides = util.computeStrides(a3d.shape);\n const b3dStrides = util.computeStrides(b3d.shape);\n const [aBatch, aOuterStep, aInnerStep] = transposeA ?\n [a3dStrides[0], 1, a3dStrides[1]] :\n [a3dStrides[0], a3dStrides[1], 1];\n const [bInnerStep, bOuterStep, bBatch] = transposeB ?\n [1, b3dStrides[1], b3dStrides[0]] :\n [b3dStrides[1], 1, b3dStrides[0]];\n const size = leftDim * rightDim;\n const result = buffer([batchDim, leftDim, rightDim], a3d.dtype);\n const resVals = result.values;\n const blockSize = backend.blockSize;\n for (let bi = 0; bi < batchDim; bi++) {\n for (let i0 = 0; i0 < leftDim; i0 += blockSize) {\n for (let j0 = 0; j0 < rightDim; j0 += blockSize) {\n for (let k0 = 0; k0 < sharedDim; k0 += blockSize) {\n // for when blockSize doesn't evenly divide the input\n const iBlock = Math.min(i0 + blockSize, leftDim);\n const jBlock = Math.min(j0 + blockSize, rightDim);\n const kBlock = Math.min(k0 + blockSize, sharedDim);\n for (let i = i0; i < iBlock; i++) {\n for (let j = j0; j < jBlock; j++) {\n let sum = 0.0;\n for (let k = k0; k < kBlock; k++) {\n const batchOffsetA = Math.min(bi, batchDimA - 1) * aBatch;\n const batchOffsetB = Math.min(bi, batchDimB - 1) * bBatch;\n const aVal = a3dValues[batchOffsetA + i * aOuterStep + k * aInnerStep];\n const bVal = b3dValues[k * bInnerStep + j * bOuterStep + batchOffsetB];\n sum += aVal * bVal;\n }\n resVals[bi * size + (i * rightDim + j)] += sum;\n }\n }\n }\n }\n }\n }\n backend.disposeIntermediateTensorInfo(a3d);\n backend.disposeIntermediateTensorInfo(b3d);\n // set correct shape on output.\n return backend.makeTensorInfo(outShape, result.dtype, result.values);\n}\nexport const batchMatMulConfig = {\n kernelName: BatchMatMul,\n backendName: 'cpu',\n kernelFunc: batchMatMul,\n};\n//# sourceMappingURL=BatchMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { _FusedMatMul } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { batchMatMul } from './BatchMatMul';\nexport function _fusedMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b, bias, preluActivationWeights } = inputs;\n const { transposeA, transposeB, activation } = attrs;\n let current;\n let addRes;\n let activationRes;\n const intermediates = [];\n const matMulRes = batchMatMul({ inputs: { a, b }, attrs: { transposeA, transposeB }, backend });\n current = matMulRes;\n if (bias) {\n addRes = add({ inputs: { a: current, b: bias }, backend });\n intermediates.push(current);\n current = addRes;\n }\n if (activation) {\n activationRes =\n applyActivation(backend, current, activation, preluActivationWeights);\n intermediates.push(current);\n current = activationRes;\n }\n for (const i of intermediates) {\n backend.disposeIntermediateTensorInfo(i);\n }\n return current;\n}\nexport const _fusedMatMulConfig = {\n kernelName: _FusedMatMul,\n backendName: 'cpu',\n kernelFunc: _fusedMatMul,\n};\n//# sourceMappingURL=_FusedMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acos = unaryKernelFunc(Acos, (xi) => Math.acos(xi));\nexport const acosConfig = {\n kernelName: Acos,\n backendName: 'cpu',\n kernelFunc: acos,\n};\n//# sourceMappingURL=Acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acosh = unaryKernelFunc(Acosh, (xi) => Math.acosh(xi));\nexport const acoshConfig = {\n kernelName: Acosh,\n backendName: 'cpu',\n kernelFunc: acosh,\n};\n//# sourceMappingURL=Acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asin = unaryKernelFunc(Asin, (xi) => Math.asin(xi));\nexport const asinConfig = {\n kernelName: Asin,\n backendName: 'cpu',\n kernelFunc: asin,\n};\n//# sourceMappingURL=Asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asinh = unaryKernelFunc(Asinh, (xi) => Math.asinh(xi));\nexport const asinhConfig = {\n kernelName: Asinh,\n backendName: 'cpu',\n kernelFunc: asinh,\n};\n//# sourceMappingURL=Asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atan = unaryKernelFunc(Atan, (xi) => Math.atan(xi));\nexport const atanConfig = {\n kernelName: Atan,\n backendName: 'cpu',\n kernelFunc: atan,\n};\n//# sourceMappingURL=Atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atanh = unaryKernelFunc(Atanh, (xi) => Math.atanh(xi));\nexport const atanhConfig = {\n kernelName: Atanh,\n backendName: 'cpu',\n kernelFunc: atanh,\n};\n//# sourceMappingURL=Atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from '@tensorflow/tfjs-core';\nexport function pool(xValues, xShape, dtype, strides, convInfo, poolType) {\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const output = buffer(convInfo.outShape, dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] * convInfo.outShape[3];\n const outputRowStrides = convInfo.outShape[2] * convInfo.outShape[3];\n const outputColStrides = convInfo.outShape[3];\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const outputBatchOffset = b * outputBatchStrides;\n const inputBatchOffset = b * strides[0];\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n const outputRowOffset = outputBatchOffset + yR * outputRowStrides;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n const xCMin = Math.max(0, xCCorner);\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const xROffset = inputBatchOffset + xR * strides[1];\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const xCOffset = xROffset + xC * strides[2];\n const pixel = xValues[xCOffset + d];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputRowOffset + yC * outputColStrides + d;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n return output;\n}\nexport function maxPoolPositions(xValues, xShape, dtype, convInfo, flattenPositions = false, includeBatchInIndex = false) {\n const maxPositions = buffer(convInfo.outShape, 'int32');\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = buffer(xShape, dtype, xValues);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n let xRMin = xRCorner;\n while (xRMin < 0) {\n xRMin += dilationHeight;\n }\n // const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n let xCMin = xCCorner;\n while (xCMin < 0) {\n xCMin += dilationWidth;\n }\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const wR = xR - xRCorner;\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const wC = xC - xCCorner;\n const pixel = xBuf.get(b, xR, xC, d);\n if (pixel > maxValue) {\n maxValue = pixel;\n if (flattenPositions) {\n maxPosition = includeBatchInIndex ?\n ((b * convInfo.inHeight + xR) * convInfo.inWidth + xC) *\n convInfo.inChannels +\n d :\n (xR * convInfo.inWidth + xC) * convInfo.inChannels + d;\n }\n else {\n maxPosition = wR * effectiveFilterWidth + wC;\n }\n }\n }\n }\n maxPositions.set(maxPosition, b, yR, yC, d);\n }\n }\n }\n }\n return maxPositions;\n}\n//# sourceMappingURL=pool_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'avg');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'cpu',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util, buffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel;\n }\n }\n dx.set(dotProd * avgMultiplier, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'cpu',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function batchNorm(args) {\n const { inputs, backend, attrs } = args;\n const { x, scale, offset, mean, variance } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n assertNotComplex([x, mean, variance, scale, offset], 'batchNorm');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const xVals = backend.data.get(x.dataId).values;\n const mVals = backend.data.get(mean.dataId).values;\n const varVals = backend.data.get(variance.dataId).values;\n const sVals = scale ? backend.data.get(scale.dataId).values :\n new Float32Array([1]);\n const offVals = offset ?\n backend.data.get(offset.dataId).values :\n new Float32Array([0]);\n const outVals = new Float32Array(xVals.length);\n const offValsLength = offVals.length;\n const sValsLength = sVals.length;\n const varValsLength = varVals.length;\n const mValsLength = mVals.length;\n let offi = 0;\n let mi = 0;\n let si = 0;\n let vi = 0;\n for (let i = 0; i < xVals.length; ++i) {\n outVals[i] = offVals[offi++] +\n (xVals[i] - mVals[mi++]) * sVals[si++] /\n Math.sqrt(varVals[vi++] + varianceEpsilon);\n if (offi >= offValsLength) {\n offi = 0;\n }\n if (mi >= mValsLength) {\n mi = 0;\n }\n if (si >= sValsLength) {\n si = 0;\n }\n if (vi >= varValsLength) {\n vi = 0;\n }\n }\n return backend.makeTensorInfo(x.shape, x.dtype, outVals);\n}\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'cpu',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const clip = unaryKernelFunc(ClipByValue, (xi, attrs) => {\n const clipAttrs = attrs;\n if (xi > clipAttrs.clipValueMax) {\n return clipAttrs.clipValueMax;\n }\n return xi < clipAttrs.clipValueMin ? clipAttrs.clipValueMin : xi;\n});\nexport const clipConfig = {\n kernelName: ClipByValue,\n backendName: 'cpu',\n kernelFunc: clip,\n};\n//# sourceMappingURL=Clip.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const imag = backend.data.get(input.dataId).complexTensorInfos.imag;\n const imagVal = backend.data.get(imag.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the imag value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(imag.shape, imag.dtype, imagVal);\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'cpu',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n let outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n if ($inputs[0].dtype === 'complex64') {\n const reals = $inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = $inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concat({ inputs: reals, backend, attrs: { axis: $axis } });\n const imagConcated = concat({ inputs: imags, backend, attrs: { axis: $axis } });\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const inputs2D = $inputs.map(t => {\n const innerSize = util.sizeFromShape(t.shape.slice($axis));\n const shape = [-1, innerSize];\n return reshape({ inputs: { x: t }, backend, attrs: { shape } });\n });\n // Concats 2d tensors along axis=1.\n outShape =\n backend_util.computeOutShape(inputs2D.map(t => t.shape), 1 /* axis */);\n const outVals = util.getTypedArrayFromDType($inputs[0].dtype, util.sizeFromShape(outShape));\n if (inputs2D[0].shape[0] === 1) {\n // Use built-in TypedArray.set() method for speed.\n let offset = 0;\n inputs2D.forEach(t => {\n const val = backend.data.get(t.dataId).values;\n const size = util.sizeFromShape(t.shape);\n outVals.set(val, offset);\n offset += size;\n });\n }\n else {\n let colOffset = 0;\n inputs2D.forEach(t => {\n const tVals = backend.data.get(t.dataId).values;\n let tIdx = 0;\n for (let row = 0; row < t.shape[0]; ++row) {\n const resIdx = row * outShape[1] + colOffset;\n for (let col = 0; col < t.shape[1]; ++col) {\n outVals[resIdx + col] = tVals[tIdx++];\n }\n }\n colOffset += t.shape[1];\n });\n }\n const finalOutShape = backend_util.computeOutShape($inputs.map(t => t.shape), $axis);\n const outInfo = backend.makeTensorInfo(finalOutShape, inputs[0].dtype, outVals);\n inputs2D.forEach(t => backend.disposeIntermediateTensorInfo(t));\n return outInfo;\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'cpu',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'conv2d');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, dilations, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const padLeft = convInfo.padInfo.left;\n const padTop = convInfo.padInfo.top;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const xBatchStride = xStrides[0];\n const xRowStride = isChannelsLast ? xStrides[1] : xStrides[2];\n const xColStride = isChannelsLast ? xStrides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : xStrides[1];\n const yBatchStride = y.strides[0];\n const yRowStride = isChannelsLast ? y.strides[1] : y.strides[2];\n const yColStride = isChannelsLast ? y.strides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : y.strides[1];\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xBatchStride;\n const yOffset1 = b * yBatchStride;\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * yRowStride;\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xRowStride;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * yColStride;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * xColStride;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1 * xChannelStride];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset3 + d2 * yChannelStride] +=\n xVal * wVals[wOffset3 + d2];\n }\n wOffset3 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, yVals);\n}\nexport const conv2DConfig = {\n kernelName: Conv2D,\n backendName: 'cpu',\n kernelFunc: conv2D\n};\n//# sourceMappingURL=Conv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, dataFormat, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv2dBackpropFilter');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const xVals = backend.data.get(x.dataId).values;\n const dyVals = backend.data.get(dy.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n if (isChannelsLast) {\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n else {\n dotProd += xBuf.get(b, d1, xR, xC) *\n dyBuf.get(b, d2, yR, yC);\n }\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, d2);\n }\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const conv2DBackpropFilterConfig = {\n kernelName: Conv2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropFilter\n};\n//# sourceMappingURL=Conv2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { inputShape, strides, pad, dataFormat, dimRoundingMode } = attrs;\n assertNotComplex([dy, filter], 'conv2dBackpropInput');\n const filterStrides = util.computeStrides(filter.shape);\n const dyStrides = util.computeStrides(dy.shape);\n let $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad, dimRoundingMode, false, $dataFormat);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const dyValues = backend.data.get(dy.dataId).values;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n $dataFormat = convInfo.dataFormat;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const isChannelsLast = $dataFormat === 'channelsLast';\n const xBatchStride = dx.strides[0];\n const xRowStride = isChannelsLast ? dx.strides[1] : dx.strides[2];\n const xColStride = isChannelsLast ? dx.strides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : dx.strides[1];\n const yBatchStride = dyStrides[0];\n const yRowStride = isChannelsLast ? dyStrides[1] : dyStrides[2];\n const yColStride = isChannelsLast ? dyStrides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : dyStrides[1];\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = yBatchStride * b + yRowStride * yR + yColStride * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + yChannelStride * d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n const dxOffset = xBatchStride * b + xRowStride * xR +\n xColStride * xC + xChannelStride * d1;\n dxValues[dxOffset] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv2DBackpropInputConfig = {\n kernelName: Conv2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropInput\n};\n//# sourceMappingURL=Conv2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n assertNotComplex([x, filter], 'conv3d');\n const convInfo = backend_util.computeConv3DInfo(x.shape, filter.shape, strides, dilations, pad);\n const { filterDepth, filterHeight, filterWidth, dilationDepth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padFront = padInfo.front;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yF = 0; yF < convInfo.outDepth; ++yF) {\n const yOffset2 = yOffset1 + yF * y.strides[1];\n const xFCorner = yF * convInfo.strideDepth - padFront;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const xF = xFCorner + wF * dilationDepth;\n if (xF < 0 || xF >= convInfo.inDepth) {\n continue;\n }\n const wOffset1 = wF * filterStrides[0];\n const xOffset2 = xOffset1 + xF * xStrides[1];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset3 = yOffset2 + yR * y.strides[2];\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset2 = wOffset1 + wR * filterStrides[1];\n const xOffset3 = xOffset2 + xR * xStrides[2];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset4 = yOffset3 + yC * convInfo.outChannels;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset3 = wOffset2 + wC * filterStrides[2];\n const xOffset4 = xOffset3 + xC * convInfo.inChannels;\n let wOffset4 = wOffset3;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset4 + d1];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset4 + d2] += xVal * wVals[wOffset4 + d2];\n }\n wOffset4 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const conv3DConfig = {\n kernelName: Conv3D,\n backendName: 'cpu',\n kernelFunc: conv3D\n};\n//# sourceMappingURL=Conv3D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropFilterV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropFilterV2(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv3dBackpropFilterV2');\n const xStrides = util.computeStrides(x.shape);\n const dyStrides = util.computeStrides(dy.shape);\n const convInfo = backend_util.computeConv3DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dw = new TensorBuffer(convInfo.filterShape, 'float32');\n const dwValues = dw.values;\n const [dwS0, dwS1, dwS2, dwS3] = dw.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const xValues = backend.data.get(x.dataId).values;\n const [xS0, xS1, xS2, xS3] = xStrides;\n const frontPad = convInfo.padInfo.front;\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const yFMin = Math.max(0, Math.ceil((frontPad - wF) / strideDepth));\n const yFMax = Math.min(convInfo.outDepth, (convInfo.inDepth + frontPad - wF) / strideDepth);\n const wOffset1 = wF * dwS0;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n const wOffset2 = wR * dwS1 + wOffset1;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n const wOffset3 = wC * dwS2 + wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const wOffset4 = d1 * dwS3 + wOffset3;\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xS0;\n const yOffset1 = b * dyS0;\n for (let yF = yFMin; yF < yFMax; ++yF) {\n const xF = wF + yF * strideDepth - frontPad;\n const xOffset2 = xF * xS1 + xOffset1;\n const yOffset2 = yF * dyS1 + yOffset1;\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n const xOffset3 = xR * xS2 + xOffset2;\n const yOffset3 = yR * dyS2 + yOffset2;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n const xOffset4 = xC * xS3 + xOffset3;\n const yOffset4 = yC * dyS3 + yOffset3;\n dotProd += xValues[xOffset4 + d1] * dyValues[yOffset4 + d2];\n }\n }\n }\n }\n dwValues[wOffset4 + d2] = dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dw.shape, dw.dtype, dw.values);\n}\nexport const conv3DBackpropFilterV2Config = {\n kernelName: Conv3DBackpropFilterV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropFilterV2\n};\n//# sourceMappingURL=Conv3DBackpropFilterV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropInputV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropInputV2(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { pad, strides, inputShape } = attrs;\n assertNotComplex([dy], 'conv3dBackpropInputV2');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv3DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2, dxS3] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2, fltS3] = filterStrides;\n const { batchSize, filterDepth, filterHeight, filterWidth, inChannels, inDepth, inHeight, inWidth, outChannels, outDepth, outHeight, outWidth, strideDepth, strideHeight, strideWidth } = convInfo;\n const frontPad = filterDepth - 1 - convInfo.padInfo.front;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n // Frames of depth\n for (let xF = 0; xF < inDepth; ++xF) {\n const xFCorner = xF - frontPad;\n const xFMin = Math.max(0, Math.ceil(xFCorner / strideDepth));\n const yFMax = Math.min(outDepth, (filterDepth + xFCorner) / strideDepth);\n // Rows as per standard 2d matrix notation\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n // Columns as per standard 2d matrix notation\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yF = xFMin; yF < yFMax; ++yF) {\n const wF = yF * strideDepth - xFCorner;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yF + dyS2 * yR + dyS3 * yC;\n const fltOffset = fltS0 * (filterDepth - 1 - wF) +\n fltS1 * (filterHeight - 1 - wR) +\n fltS2 * (filterWidth - 1 - wC) + fltS3 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xF + dxS2 * xR + dxS3 * xC + d1] =\n dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv3DBackpropInputV2Config = {\n kernelName: Conv3DBackpropInputV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropInputV2\n};\n//# sourceMappingURL=Conv3DBackpropInputV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cos = unaryKernelFunc(Cos, (xi) => Math.cos(xi));\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'cpu',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cosh = unaryKernelFunc(Cosh, (xi) => Math.cosh(xi));\nexport const coshConfig = {\n kernelName: Cosh,\n backendName: 'cpu',\n kernelFunc: cosh,\n};\n//# sourceMappingURL=Cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNative, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNative(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'depthwiseConv2DNative');\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n let $dilations = dilations;\n if ($dilations == null) {\n $dilations = [1, 1];\n }\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${$dilations}'`);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, $dilations, pad, dimRoundingMode, true /* depthwise */);\n const { filterHeight, filterWidth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * y.strides[1];\n const xRCorner = yR * convInfo.strideHeight - padLeft;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xStrides[1];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * y.strides[2];\n const xCCorner = yC * convInfo.strideWidth - padTop;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * convInfo.inChannels;\n let yOffset4 = yOffset3;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1];\n for (let q = 0; q < chMul; ++q) {\n yVals[yOffset4 + q] += xVal * wVals[wOffset3 + q];\n }\n yOffset4 += chMul;\n wOffset3 += chMul;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const depthwiseConv2dNativeConfig = {\n kernelName: DepthwiseConv2dNative,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNative\n};\n//# sourceMappingURL=DepthwiseConv2dNative.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, dilations, pad, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'depthwiseConv2dNativeBackpropFilter');\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const xVals = backend.data.get(x.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyVals = backend.data.get(dy.dataId).values;\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n const d1 = Math.trunc(d2 / chMul);\n const dm = d2 % chMul;\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, dm);\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const depthwiseConv2dNativeBackpropFilterConfig = {\n kernelName: DepthwiseConv2dNativeBackpropFilter,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropFilter\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { strides, dilations, pad, dimRoundingMode, inputShape } = attrs;\n assertNotComplex([dy, filter], 'depthwiseConv2DNativeBackpropInput');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const chMul = outChannels / inChannels;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yR + dyS2 * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let dm = 0; dm < chMul; ++dm) {\n const d2 = d1 * chMul + dm;\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + dm];\n dotProd += pixel * weight;\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xR + dxS2 * xC + d1] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const depthwiseConv2dNativeBackpropInputConfig = {\n kernelName: DepthwiseConv2dNativeBackpropInput,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropInput\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2D, util } from '@tensorflow/tfjs-core';\nexport const dilation2dConfig = {\n kernelName: Dilation2D,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const xVals = cpuBackend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const filterVals = cpuBackend.data.get(filter.dataId).values;\n const filterRank = filter.shape.length;\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n const outSize = util.sizeFromShape(outShape);\n const outRank = outShape.length;\n const outputVals = util.getArrayFromDType(x.dtype, outSize);\n // Upsampling the input by fill in `dilation size - 1` values between each\n // input value.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const xIndex = util.locToIndex([b, hIn, wIn, d], xRank, util.computeStrides(x.shape));\n const filterIndex = util.locToIndex([h, w, d], filterRank, util.computeStrides(filter.shape));\n const val = xVals[xIndex] + filterVals[filterIndex];\n if (val > curVal) {\n curVal = val;\n }\n }\n }\n }\n }\n const outputIndex = util.locToIndex([b, hOut, wOut, d], outRank, util.computeStrides(outShape));\n outputVals[outputIndex] = curVal;\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(outputVals, x.dtype), outShape, x.dtype);\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropFilter, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropFilterConfig = {\n kernelName: Dilation2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropFilter}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed filter gradients has the same dimensions as the filter:\n // [filterHeight, filterWidth, depth]\n const gradients = util.makeZerosNestedTypedArray(filter.shape, filter.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hMax = 0;\n let wMax = 0;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hMax = h;\n wMax = w;\n }\n }\n }\n }\n }\n gradients[hMax][wMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), filter.shape, filter.dtype);\n return { dataId, shape: filter.shape, dtype: filter.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropInput, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropInputConfig = {\n kernelName: Dilation2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropInput}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed gradients has the same dimensions as the input:\n // [batch, inputHeight, inputCols, inChannel]\n const gradients = util.makeZerosNestedTypedArray(x.shape, x.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hInMax = (hBeg < 0) ? 0 : hBeg;\n let wInMax = (wBeg < 0) ? 0 : wBeg;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hInMax = hIn;\n wInMax = wIn;\n }\n }\n }\n }\n }\n gradients[b][hInMax][wInMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const divImpl = createSimpleBinaryKernelImpl((a, b) => a / b);\nexport const div = binaryKernelFunc(Div, divImpl);\nexport const divConfig = {\n kernelName: Div,\n backendName: 'cpu',\n kernelFunc: div\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Erf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst p = backend_util.ERF_P;\nconst a1 = backend_util.ERF_A1;\nconst a2 = backend_util.ERF_A2;\nconst a3 = backend_util.ERF_A3;\nconst a4 = backend_util.ERF_A4;\nconst a5 = backend_util.ERF_A5;\nexport const erf = unaryKernelFunc(Erf, (xi) => {\n const sign = Math.sign(xi);\n const v = Math.abs(xi);\n const t = 1.0 / (1.0 + p * v);\n return sign *\n (1.0 -\n (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t *\n Math.exp(-v * v));\n});\nexport const erfConfig = {\n kernelName: Erf,\n backendName: 'cpu',\n kernelFunc: erf,\n};\n//# sourceMappingURL=Erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { add } from '../kernels/Add';\nimport { complex } from '../kernels/Complex';\nimport { concat } from '../kernels/Concat';\nimport { divConfig } from '../kernels/Div';\nimport { identity } from '../kernels/Identity';\nimport { imag } from '../kernels/Imag';\nimport { multiply } from '../kernels/Multiply';\nimport { real } from '../kernels/Real';\nimport { slice } from '../kernels/Slice';\nimport { sub } from '../kernels/Sub';\n/**\n * Calculate FFT of inner most elements of batch tensor.\n */\nexport function fftBatch(input, inverse, cpuBackend) {\n const inputShape = input.shape;\n const batch = inputShape[0];\n const innerDim = inputShape[1];\n const inputVals = cpuBackend.data.get(input.dataId);\n const real2D = inputVals.complexTensorInfos.real;\n const imag2D = inputVals.complexTensorInfos.imag;\n // Collects real and imaginary values separately.\n const resultShape = [batch, innerDim];\n const resultSize = util.sizeFromShape(resultShape);\n const resultReal = util.getTypedArrayFromDType('float32', resultSize);\n const resultImag = util.getTypedArrayFromDType('float32', resultSize);\n for (let b = 0; b < batch; b++) {\n // TODO: Support slice ops for complex type.\n const r = slice({\n inputs: { x: real2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const i = slice({\n inputs: { x: imag2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const input = complex({ inputs: { real: r, imag: i }, backend: cpuBackend });\n // Run FFT by batch element.\n const { real, imag } = fftImpl(input, inverse, cpuBackend);\n const res = backend_util.mergeRealAndImagArrays(real, imag);\n for (let d = 0; d < innerDim; d++) {\n const c = backend_util.getComplexWithIndex(res, d);\n resultReal[b * innerDim + d] = c.real;\n resultImag[b * innerDim + d] = c.imag;\n }\n cpuBackend.disposeIntermediateTensorInfo(r);\n cpuBackend.disposeIntermediateTensorInfo(i);\n cpuBackend.disposeIntermediateTensorInfo(input);\n }\n const $realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultReal);\n const $imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImag);\n const result = complex({ inputs: { real: $realInfo, imag: $imagInfo }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($realInfo);\n cpuBackend.disposeIntermediateTensorInfo($imagInfo);\n return result;\n}\nexport function fftImpl(input, inverse, cpuBackend) {\n const inputSize = util.sizeFromShape(input.shape);\n const inputVals = cpuBackend.data.get(input.dataId);\n const realVals = cpuBackend.data.get(inputVals.complexTensorInfos.real.dataId).values;\n const imagVals = cpuBackend.data.get(inputVals.complexTensorInfos.imag.dataId).values;\n if (isExponentOf2(inputSize)) {\n const result = fftRadix2(realVals, imagVals, inputSize, inverse, cpuBackend);\n const resultShape = [input.shape[0], input.shape[1]];\n if (inverse) {\n const realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.real);\n const imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.imag);\n const sizeInfo = cpuBackend.makeTensorInfo([], 'float32', util.createScalarValue(inputSize, 'float32'));\n const sizeInfoCopy = identity({ inputs: { x: sizeInfo }, backend: cpuBackend });\n const divRealInfo = divConfig.kernelFunc({ inputs: { a: realInfo, b: sizeInfo }, backend: cpuBackend });\n const divImagInfo = divConfig.kernelFunc({ inputs: { a: imagInfo, b: sizeInfoCopy }, backend: cpuBackend });\n const divRealVals = cpuBackend.data.get(divRealInfo.dataId).values;\n const divImagVals = cpuBackend.data.get(divImagInfo.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(realInfo);\n cpuBackend.disposeIntermediateTensorInfo(imagInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfoCopy);\n cpuBackend.disposeIntermediateTensorInfo(divRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(divImagInfo);\n return { real: divRealVals, imag: divImagVals };\n }\n return result;\n }\n else {\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const rawOutput = fourierTransformByMatmul(data, inputSize, inverse);\n return backend_util.splitRealAndImagArrays(rawOutput);\n }\n}\nfunction isExponentOf2(size) {\n return (size & size - 1) === 0;\n}\n// FFT using Cooley-Tukey algorithm on radix 2 dimensional input.\nfunction fftRadix2(realVals, imagVals, size, inverse, cpuBackend) {\n if (size === 1) {\n return { real: realVals, imag: imagVals };\n }\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const half = size / 2;\n const evenComplex = backend_util.complexWithEvenIndex(data);\n const evenRealVals = evenComplex.real;\n const evenImagVals = evenComplex.imag;\n const evenShape = [evenRealVals.length];\n const evenRealInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenRealVals);\n const evenImagInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenImagVals);\n const evenTensorInfo = complex({ inputs: { real: evenRealInfo, imag: evenImagInfo }, backend: cpuBackend });\n const oddComplex = backend_util.complexWithOddIndex(data);\n const oddRealVals = oddComplex.real;\n const oddImagVals = oddComplex.imag;\n const oddShape = [oddRealVals.length];\n const oddRealInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddRealVals);\n const oddImagInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddImagVals);\n const oddTensorInfo = complex({ inputs: { real: oddRealInfo, imag: oddImagInfo }, backend: cpuBackend });\n // Recursive call for half part of original input.\n const $evenComplex = fftRadix2(evenRealVals, evenImagVals, half, inverse, cpuBackend);\n const $evenRealVals = $evenComplex.real;\n const $evenImagVals = $evenComplex.imag;\n const $evenShape = [$evenRealVals.length];\n const $evenRealInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenRealVals);\n const $evenImagInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenImagVals);\n const $evenTensorInfo = complex({\n inputs: { real: $evenRealInfo, imag: $evenImagInfo },\n backend: cpuBackend\n });\n const $oddComplex = fftRadix2(oddRealVals, oddImagVals, half, inverse, cpuBackend);\n const $oddRealVals = $oddComplex.real;\n const $oddImagVals = $oddComplex.imag;\n const $oddShape = [$oddRealVals.length];\n const $oddRealInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddRealVals);\n const $oddImagInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddImagVals);\n const $oddTensorInfo = complex({ inputs: { real: $oddRealInfo, imag: $oddImagInfo }, backend: cpuBackend });\n const e = backend_util.exponents(size, inverse);\n const eShape = [e.real.length];\n const eRealInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.real);\n const eImagInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.imag);\n const complexInfo = complex({ inputs: { real: eRealInfo, imag: eImagInfo }, backend: cpuBackend });\n const exponentInfo = multiply({ inputs: { a: complexInfo, b: $oddTensorInfo }, backend: cpuBackend });\n const addPart = add({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const subPart = sub({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const addPartReal = real({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartReal = real({ inputs: { input: subPart }, backend: cpuBackend });\n const addPartImag = imag({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartImag = imag({ inputs: { input: subPart }, backend: cpuBackend });\n const $real = concat({\n inputs: [addPartReal, subPartReal],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $imag = concat({\n inputs: [addPartImag, subPartImag],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $realVals = cpuBackend.data.get($real.dataId).values;\n const $imagVals = cpuBackend.data.get($imag.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(eRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(eImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(complexInfo);\n cpuBackend.disposeIntermediateTensorInfo(exponentInfo);\n cpuBackend.disposeIntermediateTensorInfo(addPart);\n cpuBackend.disposeIntermediateTensorInfo(subPart);\n cpuBackend.disposeIntermediateTensorInfo(addPartReal);\n cpuBackend.disposeIntermediateTensorInfo(addPartImag);\n cpuBackend.disposeIntermediateTensorInfo(subPartReal);\n cpuBackend.disposeIntermediateTensorInfo(subPartImag);\n cpuBackend.disposeIntermediateTensorInfo($real);\n cpuBackend.disposeIntermediateTensorInfo($imag);\n return { real: $realVals, imag: $imagVals };\n}\n// Calculate fourier transform by multplying sinusoid matrix.\nfunction fourierTransformByMatmul(data, size, inverse) {\n const ret = new Float32Array(size * 2);\n // TODO: Use matmul instead once it supports complex64 type.\n for (let r = 0; r < size; r++) {\n let real = 0.0;\n let imag = 0.0;\n for (let c = 0; c < size; c++) {\n const e = backend_util.exponent(r * c, size, inverse);\n const term = backend_util.getComplexWithIndex(data, c);\n real += term.real * e.real - term.imag * e.imag;\n imag += term.real * e.imag + term.imag * e.real;\n }\n if (inverse) {\n real /= size;\n imag /= size;\n }\n backend_util.assignToTypedArray(ret, real, imag, r);\n }\n return ret;\n}\n//# sourceMappingURL=fft_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, false, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'cpu',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Fill, util } from '@tensorflow/tfjs-core';\nexport function fill(args) {\n const { backend, attrs } = args;\n const { shape, value, dtype } = attrs;\n const $dtype = dtype || util.inferDtype(value);\n const values = util.getArrayFromDType($dtype, util.sizeFromShape(shape));\n fillValues(values, value, $dtype);\n return backend.makeTensorInfo(shape, $dtype, values);\n}\nexport const fillConfig = {\n kernelName: Fill,\n backendName: 'cpu',\n kernelFunc: fill\n};\nfunction fillValues(values, value, dtype) {\n if (dtype === 'string') {\n values.fill(value);\n }\n else {\n values.fill(value);\n }\n}\n//# sourceMappingURL=Fill.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight, util } from '@tensorflow/tfjs-core';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const coordX = Math.round(imageWidth - x);\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n let outputValue = imageVals[outIdx];\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth) {\n // set the output to the image value at the coordinate position.\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { conv2D } from './Conv2D';\nexport function fusedConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = conv2D({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const resultOld = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n if (activation) {\n const resultOld = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n return result;\n}\nexport const fusedConv2DConfig = {\n kernelName: FusedConv2D,\n backendName: 'cpu',\n kernelFunc: fusedConv2D\n};\n//# sourceMappingURL=FusedConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedDepthwiseConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { depthwiseConv2dNative } from './DepthwiseConv2dNative';\nexport function fusedDepthwiseConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = depthwiseConv2dNative({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const oldResult = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n if (activation) {\n const oldResult = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n return result;\n}\nexport const fusedDepthwiseConv2DConfig = {\n kernelName: FusedDepthwiseConv2D,\n backendName: 'cpu',\n kernelFunc: fusedDepthwiseConv2D\n};\n//# sourceMappingURL=FusedDepthwiseConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, true, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'cpu',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isFinite = unaryKernelFunc(IsFinite, (xi) => Number.isFinite(xi) ? 1 : 0, 'bool');\nexport const isFiniteConfig = {\n kernelName: IsFinite,\n backendName: 'cpu',\n kernelFunc: isFinite,\n};\n//# sourceMappingURL=IsFinite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isInf = unaryKernelFunc(IsInf, (xi) => Math.abs(xi) === Infinity ? 1 : 0, 'bool');\nexport const isInfConfig = {\n kernelName: IsInf,\n backendName: 'cpu',\n kernelFunc: isInf,\n};\n//# sourceMappingURL=IsInf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isNaN = unaryKernelFunc(IsNan, (xi) => Number.isNaN(xi) ? 1 : 0, 'bool');\nexport const isNaNConfig = {\n kernelName: IsNan,\n backendName: 'cpu',\n kernelFunc: isNaN,\n};\n//# sourceMappingURL=IsNaN.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const log1p = unaryKernelFunc(Log1p, (xi) => Math.log1p(xi));\nexport const log1pConfig = {\n kernelName: Log1p,\n backendName: 'cpu',\n kernelFunc: log1p,\n};\n//# sourceMappingURL=Log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogicalNot } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const logicalNot = unaryKernelFunc(LogicalNot, (xi) => xi ? 0 : 1, 'bool');\nexport const logicalNotConfig = {\n kernelName: LogicalNot,\n backendName: 'cpu',\n kernelFunc: logicalNot,\n};\n//# sourceMappingURL=LogicalNot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const cpuBackend = backend;\n let xShape = x.shape;\n const xRank = xShape.length;\n const origAxes = util.parseAxisParam(reductionIndices, xShape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n let xVals = cpuBackend.data.get(x.dataId).values;\n if (permutedAxes != null) {\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xShape[permutedAxes[i]];\n }\n xVals = transposeImpl(xVals, xShape, x.dtype, permutedAxes, newShape);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n xShape = newShape;\n }\n assertNotComplex(x, 'max');\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(xShape, axes);\n const reduceSize = util.sizeFromShape(reduceShape);\n const result = maxImpl(xVals, reduceSize, maxOutShape, x.dtype);\n const dataId = cpuBackend.write(result, maxOutShape, x.dtype);\n let outShape = maxOutShape;\n if (keepDims) {\n // reshape\n const newShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n outShape = newShape;\n }\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'max');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'cpu',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, buffer, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolPositions } from '../utils/pool_utils';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const xValues = backend.data.get(x.dataId).values;\n const maxPosBuf = buffer(convInfo.outShape, x.dtype, maxPoolPositions(xValues, x.shape, x.dtype, convInfo).values);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const maxPos = effectiveFilterHeight * effectiveFilterWidth - 1 -\n maxPosBuf.get(b, dyR, dyC, d);\n const curPos = wR * effectiveFilterWidth + wC;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel * mask;\n }\n }\n dx.set(dotProd, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'cpu',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { maxPoolPositions, pool } from '../utils/pool_utils';\nexport function maxPoolWithArgmaxImpl(xValues, xShape, dtype, includeBatchInIndex, convInfo) {\n const strides = util.computeStrides(xShape);\n const maxPools = pool(xValues, xShape, dtype, strides, convInfo, 'max');\n const maxPositions = maxPoolPositions(xValues, xShape, dtype, convInfo, true, includeBatchInIndex);\n return [maxPools.values, maxPositions.values];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const cpuBackend = backend;\n assertNotComplex(x, 'MaxPoolWithArgmax');\n const values = cpuBackend.data.get(x.dataId).values;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, [1, 1], pad);\n const [pooled, indexes] = maxPoolWithArgmaxImpl(values, x.shape, x.dtype, includeBatchInIndex, convInfo);\n const pooledDataId = cpuBackend.write(pooled, convInfo.outShape, x.dtype);\n const indexesDataId = cpuBackend.write(indexes, convInfo.outShape, x.dtype);\n return [\n { dataId: pooledDataId, shape: convInfo.outShape, dtype: x.dtype },\n { dataId: indexesDataId, shape: convInfo.outShape, dtype: 'int32' }\n ];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function mirrorPad(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, mode } = attrs;\n assertNotComplex(x, 'mirrorPad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const end = paddings.map((p, i) => p[0] + x.shape[i]);\n const offset = mode === 'reflect' ? 0 : 1;\n const xVals = backend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n for (let i = 0; i < resultSize; i++) {\n let coords = util.indexToLoc(i, resultRank, resultStrides);\n for (let i = 0; i < resultRank; i++) {\n if (coords[i] < start[i]) {\n coords[i] = start[i] * 2 - coords[i] - offset;\n }\n else if (coords[i] >= end[i]) {\n coords[i] = (end[i] - 1) * 2 - coords[i] + offset;\n }\n }\n coords = coords.map((c, i) => c - start[i]);\n const inIndex = util.locToIndex(coords, xRank, xStrides);\n resVals[i] = xVals[inIndex];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'cpu',\n kernelFunc: mirrorPad\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionPadded');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionWithScore');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function padV2(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, constantValue } = attrs;\n assertNotComplex(x, 'pad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const xVals = backend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n if (constantValue !== 0) {\n resVals.fill(constantValue);\n }\n for (let i = 0; i < xSize; i++) {\n const coords = util.indexToLoc(i, xRank, xStrides);\n const outCoords = coords.map((c, i) => c + start[i]);\n const outIndex = util.locToIndex(outCoords, resultRank, resultStrides);\n resVals[outIndex] = xVals[i];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const padV2Config = {\n kernelName: PadV2,\n backendName: 'cpu',\n kernelFunc: padV2\n};\n//# sourceMappingURL=PadV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const reciprocal = unaryKernelFunc(Reciprocal, (xi) => 1 / xi);\nexport const reciprocalConfig = {\n kernelName: Reciprocal,\n backendName: 'cpu',\n kernelFunc: reciprocal,\n};\n//# sourceMappingURL=Reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, RotateWithOffset, util } from '@tensorflow/tfjs-core';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const fullOpacityValue = 255;\n const sinFactor = Math.sin(radians);\n const cosFactor = Math.cos(radians);\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const y = coords[1];\n // coordX/coordY are the result of rotating and translating x/y.\n let coordX = (x - centerX) * cosFactor - (y - centerY) * sinFactor;\n let coordY = (x - centerX) * sinFactor + (y - centerY) * cosFactor;\n coordX = Math.round(coordX + centerX);\n coordY = Math.round(coordY + centerY);\n let outputValue = fillValue;\n if (typeof fillValue !== 'number') {\n if (channel === 3) {\n outputValue = fullOpacityValue;\n }\n else {\n outputValue = fillValue[channel];\n }\n }\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth && coordY >= 0 &&\n coordY < imageHeight) {\n // set the output to the image value at the coordinate position.\n const rotatedRowOffset = coordY * (imageWidth * numChannels);\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rotatedRowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const round = unaryKernelFunc(Round, (xi) => {\n // The algorithm is based on banker's rounding.\n const base = Math.floor(xi);\n if (xi - base < 0.5) {\n return Math.floor(xi);\n }\n else if (xi - base > 0.5) {\n return Math.ceil(xi);\n }\n else {\n if (base % 2.0 === 0.0) {\n return base;\n }\n else {\n return base + 1.0;\n }\n }\n});\nexport const roundConfig = {\n kernelName: Round,\n backendName: 'cpu',\n kernelFunc: round,\n};\n//# sourceMappingURL=Round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Selu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst scaleAlpha = backend_util.SELU_SCALEALPHA;\nconst scale = backend_util.SELU_SCALE;\nexport const selu = unaryKernelFunc(Selu, (xi) => {\n if (xi >= 0) {\n return scale * xi;\n }\n else {\n return scaleAlpha * (Math.exp(xi) - 1);\n }\n});\nexport const seluConfig = {\n kernelName: Selu,\n backendName: 'cpu',\n kernelFunc: selu,\n};\n//# sourceMappingURL=Selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sigmoid = unaryKernelFunc(Sigmoid, (xi) => 1 / (1 + Math.exp(-xi)));\nexport const sigmoidConfig = {\n kernelName: Sigmoid,\n backendName: 'cpu',\n kernelFunc: sigmoid,\n};\n//# sourceMappingURL=Sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sign = unaryKernelFunc(Sign, (xi) => {\n if (xi < 0) {\n return -1;\n }\n else if (xi > 0) {\n return 1;\n }\n else {\n return 0;\n }\n});\nexport const signConfig = {\n kernelName: Sign,\n backendName: 'cpu',\n kernelFunc: sign,\n};\n//# sourceMappingURL=Sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sin = unaryKernelFunc(Sin, (xi) => Math.sin(xi));\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'cpu',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sinh = unaryKernelFunc(Sinh, (xi) => Math.sinh(xi));\nexport const sinhConfig = {\n kernelName: Sinh,\n backendName: 'cpu',\n kernelFunc: sinh,\n};\n//# sourceMappingURL=Sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\n// mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n// epsilon is the difference between 1.0 and the next representable float.\n// For a single precision 32 bit float this should be 2^-23, see:\n// https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\nconst epsilon = 1.1920928955078125e-7;\nconst threshold = Math.log(epsilon) + 2.0;\nexport const softplus = unaryKernelFunc(Softplus, (xi) => {\n // Value above which exp(x) may overflow, but softplus(x) == x\n // is within machine epsilon.\n const tooLarge = xi > -threshold;\n // Value below which exp(x) may underflow, but softplus(x) == exp(x)\n // is within machine epsilon.\n const tooSmall = xi < threshold;\n const expX = Math.exp(xi);\n let result;\n if (tooSmall) {\n result = expX;\n }\n else if (tooLarge) {\n result = xi;\n }\n else {\n result = Math.log(1.0 + expX);\n }\n return result;\n});\nexport const softplusConfig = {\n kernelName: Softplus,\n backendName: 'cpu',\n kernelFunc: softplus,\n};\n//# sourceMappingURL=Softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { transposeImpl } from './Transpose_impl';\nexport function transpose(args) {\n const { inputs, attrs, backend } = args;\n const { x } = inputs;\n const { perm } = attrs;\n assertNotComplex(x, 'transpose');\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n const values = backend.data.get(x.dataId).values;\n const result = transposeImpl(values, x.shape, x.dtype, perm, newShape);\n const dataId = backend.write(result, newShape, x.dtype);\n return { dataId, shape: newShape, dtype: x.dtype };\n}\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'cpu',\n kernelFunc: transpose\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, SpaceToBatchND, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { padV2Config } from './PadV2';\nimport { reshape } from './Reshape';\nimport { transpose } from './Transpose';\nexport function spaceToBatchND(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { blockShape, paddings } = attrs;\n assertNotComplex([x], 'spaceToBatchND');\n const prod = util.sizeFromShape(blockShape);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = padV2Config.kernelFunc({\n inputs: { x },\n backend,\n attrs: { paddings: completePaddings, constantValue: 0 }\n });\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const reshapeInputs = { x: paddedX };\n const reshapeAttrs = { shape: reshapedPaddedShape };\n const paddedXReshaped = reshape({ inputs: reshapeInputs, backend, attrs: reshapeAttrs });\n const transposeInputs = { x: paddedXReshaped };\n const transposeAttrs = { perm: permutedReshapedPaddedPermutation };\n const paddedXT = transpose({ inputs: transposeInputs, backend, attrs: transposeAttrs });\n const resultReshapeInputs = { x: paddedXT };\n const resultReshapeAttrs = { shape: flattenShape };\n const result = reshape({ inputs: resultReshapeInputs, backend, attrs: resultReshapeAttrs });\n backend.disposeIntermediateTensorInfo(paddedX);\n backend.disposeIntermediateTensorInfo(paddedXReshaped);\n backend.disposeIntermediateTensorInfo(paddedXT);\n return result;\n}\nexport const spaceToBatchNDConfig = {\n kernelName: SpaceToBatchND,\n backendName: 'cpu',\n kernelFunc: spaceToBatchND\n};\n//# sourceMappingURL=SpaceToBatchND.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sqrt = unaryKernelFunc(Sqrt, (xi) => Math.sqrt(xi));\nexport const sqrtConfig = {\n kernelName: Sqrt,\n backendName: 'cpu',\n kernelFunc: sqrt,\n};\n//# sourceMappingURL=Sqrt.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend }) => {\n const { x } = inputs;\n const cpuBackend = backend;\n assertNotComplex(x, 'square');\n const values = cpuBackend.data.get(x.dataId).values;\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = value * value;\n }\n const dataId = cpuBackend.write(newValues, x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const step = unaryKernelFunc(Step, (xi, attrs) => {\n const stepAttrs = attrs;\n if (isNaN(xi)) {\n return NaN;\n }\n else {\n return xi > 0 ? 1 : stepAttrs.alpha;\n }\n});\nexport const stepConfig = {\n kernelName: Step,\n backendName: 'cpu',\n kernelFunc: step,\n};\n//# sourceMappingURL=Step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tan = unaryKernelFunc(Tan, (xi) => Math.tan(xi));\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'cpu',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tanh = unaryKernelFunc(Tanh, (xi) => Math.tanh(xi));\nexport const tanhConfig = {\n kernelName: Tanh,\n backendName: 'cpu',\n kernelFunc: tanh,\n};\n//# sourceMappingURL=Tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { uniqueImpl } from './Unique_impl';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n const values = backend.data.get(x.dataId).values;\n const { outputValues, outputShape, indices } = uniqueImpl(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'cpu',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// We explicitly import the modular kernels so they get registered in the\n// global registry when we compile the library. A modular build would replace\n// the contents of this file and import only the kernels that are needed.\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { _fusedMatMulConfig } from './kernels/_FusedMatMul';\nimport { absConfig } from './kernels/Abs';\nimport { acosConfig } from './kernels/Acos';\nimport { acoshConfig } from './kernels/Acosh';\nimport { addConfig } from './kernels/Add';\nimport { asinConfig } from './kernels/Asin';\nimport { asinhConfig } from './kernels/Asinh';\nimport { atanConfig } from './kernels/Atan';\nimport { atanhConfig } from './kernels/Atanh';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchMatMulConfig } from './kernels/BatchMatMul';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { ceilConfig } from './kernels/Ceil';\nimport { clipConfig } from './kernels/Clip';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { conv2DConfig } from './kernels/Conv2D';\nimport { conv2DBackpropFilterConfig } from './kernels/Conv2DBackpropFilter';\nimport { conv2DBackpropInputConfig } from './kernels/Conv2DBackpropInput';\nimport { conv3DConfig } from './kernels/Conv3D';\nimport { conv3DBackpropFilterV2Config } from './kernels/Conv3DBackpropFilterV2';\nimport { conv3DBackpropInputV2Config } from './kernels/Conv3DBackpropInputV2';\nimport { cosConfig } from './kernels/Cos';\nimport { coshConfig } from './kernels/Cosh';\nimport { depthwiseConv2dNativeConfig } from './kernels/DepthwiseConv2dNative';\nimport { depthwiseConv2dNativeBackpropFilterConfig } from './kernels/DepthwiseConv2dNativeBackpropFilter';\nimport { depthwiseConv2dNativeBackpropInputConfig } from './kernels/DepthwiseConv2dNativeBackpropInput';\nimport { dilation2dConfig } from './kernels/Dilation2D';\nimport { dilation2dBackpropFilterConfig } from './kernels/Dilation2DBackpropFilter';\nimport { dilation2dBackpropInputConfig } from './kernels/Dilation2DBackpropInput';\nimport { divConfig } from './kernels/Div';\nimport { eluConfig } from './kernels/Elu';\nimport { erfConfig } from './kernels/Erf';\nimport { expConfig } from './kernels/Exp';\nimport { expm1Config } from './kernels/Expm1';\nimport { fftConfig } from './kernels/FFT';\nimport { fillConfig } from './kernels/Fill';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { floorConfig } from './kernels/Floor';\nimport { fusedConv2DConfig } from './kernels/FusedConv2D';\nimport { fusedDepthwiseConv2DConfig } from './kernels/FusedDepthwiseConv2D';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { isFiniteConfig } from './kernels/IsFinite';\nimport { isInfConfig } from './kernels/IsInf';\nimport { isNaNConfig } from './kernels/IsNaN';\nimport { logConfig } from './kernels/Log';\nimport { log1pConfig } from './kernels/Log1p';\nimport { logicalNotConfig } from './kernels/LogicalNot';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { padV2Config } from './kernels/PadV2';\nimport { preluConfig } from './kernels/Prelu';\nimport { realConfig } from './kernels/Real';\nimport { reciprocalConfig } from './kernels/Reciprocal';\nimport { reluConfig } from './kernels/Relu';\nimport { relu6Config } from './kernels/Relu6';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { roundConfig } from './kernels/Round';\nimport { rsqrtConfig } from './kernels/Rsqrt';\nimport { seluConfig } from './kernels/Selu';\nimport { sigmoidConfig } from './kernels/Sigmoid';\nimport { signConfig } from './kernels/Sign';\nimport { sinConfig } from './kernels/Sin';\nimport { sinhConfig } from './kernels/Sinh';\nimport { sliceConfig } from './kernels/Slice';\nimport { softplusConfig } from './kernels/Softplus';\nimport { spaceToBatchNDConfig } from './kernels/SpaceToBatchND';\nimport { sqrtConfig } from './kernels/Sqrt';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { stepConfig } from './kernels/Step';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { tanhConfig } from './kernels/Tanh';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n _fusedMatMulConfig,\n absConfig,\n acosConfig,\n acoshConfig,\n addConfig,\n asinConfig,\n asinhConfig,\n atanConfig,\n atanhConfig,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchMatMulConfig,\n batchNormConfig,\n castConfig,\n ceilConfig,\n clipConfig,\n complexConfig,\n concatConfig,\n conv2DBackpropFilterConfig,\n conv2DBackpropInputConfig,\n conv2DConfig,\n conv3DBackpropFilterV2Config,\n conv3DBackpropInputV2Config,\n conv3DConfig,\n cosConfig,\n coshConfig,\n depthwiseConv2dNativeConfig,\n depthwiseConv2dNativeBackpropFilterConfig,\n depthwiseConv2dNativeBackpropInputConfig,\n dilation2dConfig,\n dilation2dBackpropInputConfig,\n dilation2dBackpropFilterConfig,\n divConfig,\n eluConfig,\n erfConfig,\n expConfig,\n expm1Config,\n fftConfig,\n fillConfig,\n flipLeftRightConfig,\n floorConfig,\n fusedConv2DConfig,\n fusedDepthwiseConv2DConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n isFiniteConfig,\n isInfConfig,\n isNaNConfig,\n logConfig,\n log1pConfig,\n logicalNotConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n maxConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n padV2Config,\n preluConfig,\n realConfig,\n reciprocalConfig,\n reluConfig,\n relu6Config,\n reshapeConfig,\n rotateWithOffsetConfig,\n roundConfig,\n rsqrtConfig,\n seluConfig,\n sigmoidConfig,\n signConfig,\n sinConfig,\n sinhConfig,\n sliceConfig,\n softplusConfig,\n spaceToBatchNDConfig,\n sqrtConfig,\n squareConfig,\n squaredDifferenceConfig,\n stepConfig,\n subConfig,\n tanConfig,\n tanhConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst contexts = {};\nconst WEBGL_ATTRIBUTES = {\n alpha: false,\n antialias: false,\n premultipliedAlpha: false,\n preserveDrawingBuffer: false,\n depth: false,\n stencil: false,\n failIfMajorPerformanceCaveat: true\n};\nexport function clearWebGLContext(webGLVersion) {\n delete contexts[webGLVersion];\n}\nexport function setWebGLContext(webGLVersion, gl) {\n contexts[webGLVersion] = gl;\n}\nexport function getWebGLContext(webGLVersion) {\n if (!(webGLVersion in contexts)) {\n const newCtx = getWebGLRenderingContext(webGLVersion);\n if (newCtx !== null) {\n contexts[webGLVersion] = newCtx;\n }\n else {\n console.log('Could not get context for WebGL version', webGLVersion);\n return null;\n }\n }\n const gl = contexts[webGLVersion];\n if (gl.isContextLost()) {\n delete contexts[webGLVersion];\n return getWebGLContext(webGLVersion);\n }\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n return contexts[webGLVersion];\n}\nfunction createCanvas(webGLVersion) {\n if (typeof OffscreenCanvas !== 'undefined' && webGLVersion === 2) {\n return new OffscreenCanvas(300, 150);\n }\n else if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n }\n else {\n throw new Error('Cannot create a canvas in this context');\n }\n}\nfunction getWebGLRenderingContext(webGLVersion) {\n if (webGLVersion !== 1 && webGLVersion !== 2) {\n throw new Error('Cannot get WebGL rendering context, WebGL is disabled.');\n }\n const canvas = createCanvas(webGLVersion);\n canvas.addEventListener('webglcontextlost', (ev) => {\n ev.preventDefault();\n delete contexts[webGLVersion];\n }, false);\n if (webGLVersion === 1) {\n return (canvas.getContext('webgl', WEBGL_ATTRIBUTES) ||\n canvas.getContext('experimental-webgl', WEBGL_ATTRIBUTES));\n }\n return canvas.getContext('webgl2', WEBGL_ATTRIBUTES);\n}\n//# sourceMappingURL=canvas_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nexport var PackingScheme;\n(function (PackingScheme) {\n /**\n * All values in a single texel are densely packed without any constraints.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 4]\n * (indices are [batch, row, col]).\n *\n * 000|001 010|011 020|021\n * ------- ------- -------\n * 002|003 012|013 022|023\n *\n * 100|101 110|111 120|121\n * ------- ------- -------\n * 102|103 112|113 122|123\n *\n */\n PackingScheme[PackingScheme[\"DENSE\"] = 0] = \"DENSE\";\n /**\n * Single texels contain only values from the same batch, and from adjacent\n * rows and columns.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 5]\n * (indices are [batch, row, col]).\n *\n * 000|001 002|003 004|xxx 020|021 022|023 024|xxx\n * ------- ------- ------- ------- ------- -------\n * 010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n * 100|101 102|103 104|xxx 120|121 122|123 124|xxx\n * ------- ------- ------- ------- ------- -------\n * 110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n */\n PackingScheme[PackingScheme[\"SHARED_BATCH\"] = 1] = \"SHARED_BATCH\";\n})(PackingScheme || (PackingScheme = {}));\nexport var TextureUsage;\n(function (TextureUsage) {\n TextureUsage[TextureUsage[\"RENDER\"] = 0] = \"RENDER\";\n TextureUsage[TextureUsage[\"UPLOAD\"] = 1] = \"UPLOAD\";\n TextureUsage[TextureUsage[\"PIXELS\"] = 2] = \"PIXELS\";\n TextureUsage[TextureUsage[\"DOWNLOAD\"] = 3] = \"DOWNLOAD\";\n})(TextureUsage || (TextureUsage = {}));\nexport var PhysicalTextureType;\n(function (PhysicalTextureType) {\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT16\"] = 0] = \"UNPACKED_FLOAT16\";\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT32\"] = 1] = \"UNPACKED_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_4X1_UNSIGNED_BYTE\"] = 2] = \"PACKED_4X1_UNSIGNED_BYTE\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT32\"] = 3] = \"PACKED_2X2_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT16\"] = 4] = \"PACKED_2X2_FLOAT16\";\n})(PhysicalTextureType || (PhysicalTextureType = {}));\nexport function getUnpackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns, rows];\n}\nexport function getUnpackedArraySizeFromMatrixSize(matrixSize, channelsPerTexture) {\n return matrixSize * channelsPerTexture;\n}\nexport function getColorMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns * 4, rows];\n}\n/**\n * Get shape for densely packed RGBA texture.\n */\nexport function getDenseTexShape(shape) {\n const size = util.sizeFromShape(shape);\n const texelsNeeded = Math.ceil(size / 4);\n return util.sizeToSquarishShape(texelsNeeded);\n}\nexport function getMatrixSizeFromUnpackedArraySize(unpackedSize, channelsPerTexture) {\n if (unpackedSize % channelsPerTexture !== 0) {\n throw new Error(`unpackedSize (${unpackedSize}) must be a multiple of ` +\n `${channelsPerTexture}`);\n }\n return unpackedSize / channelsPerTexture;\n}\nexport function decodeMatrixFromUnpackedColorRGBAArray(unpackedArray, matrix, channels) {\n const requiredSize = unpackedArray.length * channels / 4;\n if (matrix.length < requiredSize) {\n throw new Error(`matrix length (${matrix.length}) must be >= ${requiredSize}`);\n }\n let dst = 0;\n for (let src = 0; src < unpackedArray.length; src += 4) {\n for (let c = 0; c < channels; c++) {\n matrix[dst++] = unpackedArray[src + c];\n }\n }\n}\nexport function getPackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [\n Math.max(1, Math.ceil(columns / 2)), Math.max(1, Math.ceil(rows / 2))\n ];\n}\nexport function getPackedRGBAArraySizeFromMatrixShape(rows, columns) {\n const [w, h] = getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return w * h * 4;\n}\nexport function getTextureConfig(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n let internalFormatFloat;\n let internalFormatHalfFloat;\n let internalFormatPackedHalfFloat;\n let internalFormatPackedFloat;\n let textureFormatFloat;\n let downloadTextureFormat;\n let downloadUnpackNumChannels;\n let defaultNumChannels;\n let textureTypeHalfFloat;\n let textureTypeFloat;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n internalFormatFloat = glany.R32F;\n internalFormatHalfFloat = glany.R16F;\n internalFormatPackedHalfFloat = glany.RGBA16F;\n internalFormatPackedFloat = glany.RGBA32F;\n textureFormatFloat = glany.RED;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 1;\n textureTypeHalfFloat = glany.HALF_FLOAT;\n textureTypeFloat = glany.FLOAT;\n }\n else {\n internalFormatFloat = gl.RGBA;\n internalFormatHalfFloat = gl.RGBA;\n internalFormatPackedHalfFloat = gl.RGBA;\n internalFormatPackedFloat = glany.RGBA;\n textureFormatFloat = gl.RGBA;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 4;\n textureTypeHalfFloat = textureHalfFloatExtension != null ?\n textureHalfFloatExtension.HALF_FLOAT_OES :\n null;\n textureTypeFloat = gl.FLOAT;\n }\n downloadTextureFormat = gl.RGBA;\n return {\n internalFormatFloat,\n internalFormatHalfFloat,\n internalFormatPackedHalfFloat,\n internalFormatPackedFloat,\n textureFormatFloat,\n downloadTextureFormat,\n downloadUnpackNumChannels,\n defaultNumChannels,\n textureTypeHalfFloat,\n textureTypeFloat\n };\n}\n//# sourceMappingURL=tex_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext } from './canvas_util';\nimport { getTextureConfig } from './tex_util';\nexport function callAndCheck(gl, func) {\n const returnValue = func();\n if (env().getBool('DEBUG')) {\n checkWebGLError(gl);\n }\n return returnValue;\n}\nfunction checkWebGLError(gl) {\n const error = gl.getError();\n if (error !== gl.NO_ERROR) {\n throw new Error('WebGL Error: ' + getWebGLErrorMessage(gl, error));\n }\n}\n// https://en.wikipedia.org/wiki/Half-precision_floating-point_format\nconst MIN_FLOAT16 = 5.96e-8;\nconst MAX_FLOAT16 = 65504;\nexport function canBeRepresented(num) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED') || num === 0 ||\n (MIN_FLOAT16 < Math.abs(num) && Math.abs(num) < MAX_FLOAT16)) {\n return true;\n }\n return false;\n}\nexport function getWebGLErrorMessage(gl, status) {\n switch (status) {\n case gl.NO_ERROR:\n return 'NO_ERROR';\n case gl.INVALID_ENUM:\n return 'INVALID_ENUM';\n case gl.INVALID_VALUE:\n return 'INVALID_VALUE';\n case gl.INVALID_OPERATION:\n return 'INVALID_OPERATION';\n case gl.INVALID_FRAMEBUFFER_OPERATION:\n return 'INVALID_FRAMEBUFFER_OPERATION';\n case gl.OUT_OF_MEMORY:\n return 'OUT_OF_MEMORY';\n case gl.CONTEXT_LOST_WEBGL:\n return 'CONTEXT_LOST_WEBGL';\n default:\n return `Unknown error code ${status}`;\n }\n}\nexport function getExtensionOrThrow(gl, extensionName) {\n return throwIfNull(gl, () => gl.getExtension(extensionName), 'Extension \"' + extensionName + '\" not supported on this browser.');\n}\nexport function createVertexShader(gl, vertexShaderSource) {\n const vertexShader = throwIfNull(gl, () => gl.createShader(gl.VERTEX_SHADER), 'Unable to create vertex WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(vertexShader, vertexShaderSource));\n callAndCheck(gl, () => gl.compileShader(vertexShader));\n if (gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS) === false) {\n console.log(gl.getShaderInfoLog(vertexShader));\n throw new Error('Failed to compile vertex shader.');\n }\n return vertexShader;\n}\nexport function createFragmentShader(gl, fragmentShaderSource) {\n const fragmentShader = throwIfNull(gl, () => gl.createShader(gl.FRAGMENT_SHADER), 'Unable to create fragment WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(fragmentShader, fragmentShaderSource));\n callAndCheck(gl, () => gl.compileShader(fragmentShader));\n if (gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS) === false) {\n logShaderSourceAndInfoLog(fragmentShaderSource, gl.getShaderInfoLog(fragmentShader));\n throw new Error('Failed to compile fragment shader.');\n }\n return fragmentShader;\n}\nconst lineNumberRegex = /ERROR: [0-9]+:([0-9]+):/g;\nfunction logShaderSourceAndInfoLog(shaderSource, shaderInfoLog) {\n const lineNumberRegexResult = lineNumberRegex.exec(shaderInfoLog);\n if (lineNumberRegexResult == null) {\n console.log(`Couldn't parse line number in error: ${shaderInfoLog}`);\n console.log(shaderSource);\n return;\n }\n const lineNumber = +lineNumberRegexResult[1];\n const shaderLines = shaderSource.split('\\n');\n const pad = shaderLines.length.toString().length + 2;\n const linesWithLineNumbers = shaderLines.map((line, lineNumber) => util.rightPad((lineNumber + 1).toString(), pad) + line);\n let maxLineLength = 0;\n for (let i = 0; i < linesWithLineNumbers.length; i++) {\n maxLineLength = Math.max(linesWithLineNumbers[i].length, maxLineLength);\n }\n const beforeErrorLines = linesWithLineNumbers.slice(0, lineNumber - 1);\n const errorLine = linesWithLineNumbers.slice(lineNumber - 1, lineNumber);\n const afterErrorLines = linesWithLineNumbers.slice(lineNumber);\n console.log(beforeErrorLines.join('\\n'));\n console.log(shaderInfoLog.split('\\n')[0]);\n console.log(`%c ${util.rightPad(errorLine[0], maxLineLength)}`, 'border:1px solid red; background-color:#e3d2d2; color:#a61717');\n console.log(afterErrorLines.join('\\n'));\n}\nexport function createProgram(gl) {\n return throwIfNull(gl, () => gl.createProgram(), 'Unable to create WebGLProgram.');\n}\nexport function linkProgram(gl, program) {\n callAndCheck(gl, () => gl.linkProgram(program));\n if (gl.getProgramParameter(program, gl.LINK_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Failed to link vertex and fragment shaders.');\n }\n}\nexport function validateProgram(gl, program) {\n callAndCheck(gl, () => gl.validateProgram(program));\n if (gl.getProgramParameter(program, gl.VALIDATE_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Shader program validation failed.');\n }\n}\nexport function createStaticVertexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function createStaticIndexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function getNumChannels() {\n if (env().getNumber('WEBGL_VERSION') === 2) {\n return 1;\n }\n return 4;\n}\nexport function createTexture(gl) {\n return throwIfNull(gl, () => gl.createTexture(), 'Unable to create WebGLTexture.');\n}\nexport function validateTextureSize(width, height) {\n const maxTextureSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if ((width <= 0) || (height <= 0)) {\n const requested = `[${width}x${height}]`;\n throw new Error('Requested texture size ' + requested + ' is invalid.');\n }\n if ((width > maxTextureSize) || (height > maxTextureSize)) {\n const requested = `[${width}x${height}]`;\n const max = `[${maxTextureSize}x${maxTextureSize}]`;\n throw new Error('Requested texture size ' + requested +\n ' greater than WebGL maximum on this browser / GPU ' + max + '.');\n }\n}\nexport function createFramebuffer(gl) {\n return throwIfNull(gl, () => gl.createFramebuffer(), 'Unable to create WebGLFramebuffer.');\n}\nexport function bindVertexBufferToProgramAttribute(gl, program, attribute, buffer, arrayEntriesPerItem, itemStrideInBytes, itemOffsetInBytes) {\n const loc = gl.getAttribLocation(program, attribute);\n if (loc === -1) {\n // The GPU compiler decided to strip out this attribute because it's unused,\n // thus no need to bind.\n return false;\n }\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.vertexAttribPointer(loc, arrayEntriesPerItem, gl.FLOAT, false, itemStrideInBytes, itemOffsetInBytes));\n callAndCheck(gl, () => gl.enableVertexAttribArray(loc));\n return true;\n}\nexport function bindTextureUnit(gl, texture, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n}\nexport function unbindTextureUnit(gl, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function getProgramUniformLocationOrThrow(gl, program, uniformName) {\n return throwIfNull(gl, () => gl.getUniformLocation(program, uniformName), 'uniform \"' + uniformName + '\" not present in program.');\n}\nexport function getProgramUniformLocation(gl, program, uniformName) {\n return gl.getUniformLocation(program, uniformName);\n}\nexport function bindTextureToProgramUniformSampler(gl, texture, uniformSamplerLocation, textureUnit) {\n callAndCheck(gl, () => bindTextureUnit(gl, texture, textureUnit));\n callAndCheck(gl, () => gl.uniform1i(uniformSamplerLocation, textureUnit));\n}\nexport function bindCanvasToFramebuffer(gl) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n callAndCheck(gl, () => gl.viewport(0, 0, gl.canvas.width, gl.canvas.height));\n callAndCheck(gl, () => gl.scissor(0, 0, gl.canvas.width, gl.canvas.height));\n}\nexport function bindColorTextureToFramebuffer(gl, texture, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0));\n}\nexport function unbindColorTextureFromFramebuffer(gl, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, null, 0));\n}\nexport function validateFramebuffer(gl) {\n const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);\n if (status !== gl.FRAMEBUFFER_COMPLETE) {\n throw new Error('Error binding framebuffer: ' + getFramebufferErrorMessage(gl, status));\n }\n}\nexport function getFramebufferErrorMessage(gl, status) {\n switch (status) {\n case gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:\n return 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS';\n case gl.FRAMEBUFFER_UNSUPPORTED:\n return 'FRAMEBUFFER_UNSUPPORTED';\n default:\n return `unknown error ${status}`;\n }\n}\nfunction throwIfNull(gl, returnTOrNull, failureMessage) {\n const tOrNull = callAndCheck(gl, () => returnTOrNull());\n if (tOrNull == null) {\n throw new Error(failureMessage);\n }\n return tOrNull;\n}\nfunction validateTextureUnit(gl, textureUnit) {\n const maxTextureUnit = gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS - 1;\n const glTextureUnit = textureUnit + gl.TEXTURE0;\n if (glTextureUnit < gl.TEXTURE0 || glTextureUnit > maxTextureUnit) {\n const textureUnitRange = `[gl.TEXTURE0, gl.TEXTURE${maxTextureUnit}]`;\n throw new Error(`textureUnit must be in ${textureUnitRange}.`);\n }\n}\nexport function getBatchDim(shape, dimsToSkip = 2) {\n return util.sizeFromShape(shape.slice(0, shape.length - dimsToSkip));\n}\nexport function getRowsCols(shape) {\n if (shape.length === 0) {\n throw Error('Cannot get rows and columns of an empty shape array.');\n }\n return [\n shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]\n ];\n}\nexport function getShapeAs3D(shape) {\n let shapeAs3D = [1, 1, 1];\n const isScalar = shape.length === 0 || (shape.length === 1 && shape[0] === 1);\n if (!isScalar) {\n shapeAs3D =\n [getBatchDim(shape), ...getRowsCols(shape)];\n }\n return shapeAs3D;\n}\nexport function getTextureShapeFromLogicalShape(logShape, isPacked = false) {\n let maxTexSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if (isPacked) {\n maxTexSize = maxTexSize * 2;\n // This logic ensures we accurately count the number of packed texels needed\n // to accommodate the tensor. We can only pack values in the same texel if\n // they are from adjacent pairs of rows/cols within the same batch. So if a\n // tensor has 3 rows, we pretend it has 4 rows in order to account for the\n // fact that the texels containing the third row are half empty.\n logShape = logShape.map((d, i) => i >= logShape.length - 2 ?\n util.nearestLargerEven(logShape[i]) :\n logShape[i]);\n // Packed texture height is at least 2 (the channel height of a single\n // texel).\n if (logShape.length === 1) {\n logShape = [2, logShape[0]];\n }\n }\n // If logical shape is 2, we don't squeeze, since we want to match physical.\n if (logShape.length !== 2) {\n const squeezeResult = util.squeezeShape(logShape);\n logShape = squeezeResult.newShape;\n }\n let size = util.sizeFromShape(logShape);\n if (logShape.length <= 1 && size <= maxTexSize) {\n return [1, size];\n }\n else if (logShape.length === 2 && logShape[0] <= maxTexSize &&\n logShape[1] <= maxTexSize) {\n return logShape;\n }\n else if (logShape.length === 3 && logShape[0] * logShape[1] <= maxTexSize &&\n logShape[2] <= maxTexSize) {\n return [logShape[0] * logShape[1], logShape[2]];\n }\n else if (logShape.length === 3 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2]];\n }\n else if (logShape.length === 4 &&\n logShape[0] * logShape[1] * logShape[2] <= maxTexSize &&\n logShape[3] <= maxTexSize) {\n return [logShape[0] * logShape[1] * logShape[2], logShape[3]];\n }\n else if (logShape.length === 4 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] * logShape[3] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2] * logShape[3]];\n }\n else {\n if (isPacked) {\n // For packed textures size equals the number of channels required to\n // accommodate the texture data. However in order to squarify such that\n // inner dimensions stay even, we rewrite size to equal the number of\n // texels. Then in the return statement we rehydrate the squarified\n // dimensions to channel units.\n const batchDim = getBatchDim(logShape);\n let rows = 2, cols = 2;\n if (logShape.length) {\n [rows, cols] = getRowsCols(logShape);\n }\n size = batchDim * (rows / 2) * (cols / 2);\n return util.sizeToSquarishShape(size).map(d => d * 2);\n }\n return util.sizeToSquarishShape(size);\n }\n}\nfunction isEven(n) {\n return n % 2 === 0;\n}\n/**\n * This determines whether reshaping a packed texture requires rearranging\n * the data within the texture, assuming 2x2 packing.\n */\nexport function isReshapeFree(shape1, shape2) {\n shape1 = shape1.slice(-2);\n shape2 = shape2.slice(-2);\n if (util.arraysEqual(shape1, shape2)) {\n return true;\n }\n if (!shape1.length || !shape2.length) { // One of the shapes is a scalar.\n return true;\n }\n if (shape1[0] === 0 || shape1[1] === 0 || shape2[0] === 0 ||\n shape2[1] === 0) {\n return true;\n }\n if (shape1.length !== shape2.length) { // One of the shapes is a vector.\n const shape1Cols = shape1.slice(-1)[0];\n const shape2Cols = shape2.slice(-1)[0];\n if (shape1Cols === shape2Cols) {\n return true;\n }\n if (isEven(shape1Cols) && isEven(shape2Cols) &&\n (shape1[0] === 1 || shape2[0] === 1)) {\n return true;\n }\n }\n return shape1[1] === shape2[1] && isEven(shape1[0]) && isEven(shape2[0]);\n}\n// We cache webgl params because the environment gets reset between\n// unit tests and we don't want to constantly query the WebGLContext for\n// MAX_TEXTURE_SIZE.\nlet MAX_TEXTURE_SIZE;\nlet MAX_TEXTURES_IN_SHADER;\nexport function getWebGLMaxTextureSize(webGLVersion) {\n if (MAX_TEXTURE_SIZE == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURE_SIZE = gl.getParameter(gl.MAX_TEXTURE_SIZE);\n }\n return MAX_TEXTURE_SIZE;\n}\nexport function resetMaxTextureSize() {\n MAX_TEXTURE_SIZE = null;\n}\nexport function resetMaxTexturesInShader() {\n MAX_TEXTURES_IN_SHADER = null;\n}\nexport function getMaxTexturesInShader(webGLVersion) {\n if (MAX_TEXTURES_IN_SHADER == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURES_IN_SHADER = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);\n }\n // We cap at 16 to avoid spurious runtime \"memory exhausted\" error.\n return Math.min(16, MAX_TEXTURES_IN_SHADER);\n}\nexport function getWebGLDisjointQueryTimerVersion(webGLVersion) {\n if (webGLVersion === 0) {\n return 0;\n }\n let queryTimerVersion;\n const gl = getWebGLContext(webGLVersion);\n if (hasExtension(gl, 'EXT_disjoint_timer_query_webgl2') &&\n webGLVersion === 2) {\n queryTimerVersion = 2;\n }\n else if (hasExtension(gl, 'EXT_disjoint_timer_query')) {\n queryTimerVersion = 1;\n }\n else {\n queryTimerVersion = 0;\n }\n return queryTimerVersion;\n}\nexport function hasExtension(gl, extensionName) {\n const ext = gl.getExtension(extensionName);\n return ext != null;\n}\nexport function isWebGLVersionEnabled(webGLVersion) {\n try {\n const gl = getWebGLContext(webGLVersion);\n if (gl != null) {\n return true;\n }\n }\n catch (e) {\n console.log('Error when getting WebGL context: ', e);\n return false;\n }\n return false;\n}\nexport function isCapableOfRenderingToFloatTexture(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n }\n else {\n if (!hasExtension(gl, 'EXT_color_buffer_float')) {\n return false;\n }\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\n/**\n * Check if we can download values from a float/half-float texture.\n *\n * Note that for performance reasons we use binding a texture to a framebuffer\n * as a proxy for ability to download float values later using readPixels. The\n * texture params of this texture will not match those in readPixels exactly\n * but if we are unable to bind some kind of float texture to the frameBuffer\n * then we definitely will not be able to read float values from it.\n */\nexport function isDownloadFloatTextureEnabled(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n if (!hasExtension(gl, 'WEBGL_color_buffer_float')) {\n return false;\n }\n }\n else {\n if (hasExtension(gl, 'EXT_color_buffer_float')) {\n return createFloatTextureAndBindToFramebuffer(gl);\n }\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (hasExtension(gl, COLOR_BUFFER_HALF_FLOAT)) {\n const textureHalfFloatExtension = gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n return createHalfFloatTextureAndBindToFramebuffer(gl, textureHalfFloatExtension);\n }\n return false;\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\nfunction createFloatTextureAndBindToFramebuffer(gl) {\n const texConfig = getTextureConfig(gl);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nfunction createHalfFloatTextureAndBindToFramebuffer(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n const texConfig = getTextureConfig(gl, textureHalfFloatExtension);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatHalfFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeHalfFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nexport function isWebGLFenceEnabled(webGLVersion) {\n if (webGLVersion !== 2) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n // tslint:disable-next-line:no-any\n const isEnabled = gl.fenceSync != null;\n return isEnabled;\n}\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors ` +\n 'in the WebGL backend.');\n }\n });\n}\n//# sourceMappingURL=webgl_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { device_util, env } from '@tensorflow/tfjs-core';\nimport { getMaxTexturesInShader, getWebGLDisjointQueryTimerVersion, getWebGLMaxTextureSize, isCapableOfRenderingToFloatTexture, isDownloadFloatTextureEnabled, isWebGLFenceEnabled, isWebGLVersionEnabled } from './webgl_util';\nconst ENV = env();\n/**\n * This file contains WebGL-specific flag registrations.\n */\n/**\n * True if WebGL is supported.\n */\nENV.registerFlag('HAS_WEBGL', () => ENV.getNumber('WEBGL_VERSION') > 0);\n/** 0: No WebGL, 1: WebGL 1.0, 2: WebGL 2.0. */\nENV.registerFlag('WEBGL_VERSION', () => {\n if (isWebGLVersionEnabled(2)) {\n return 2;\n }\n else if (isWebGLVersionEnabled(1)) {\n return 1;\n }\n return 0;\n});\n/** Whether to check for numerical representation problems. */\nENV.registerFlag('WEBGL_CHECK_NUMERICAL_PROBLEMS', () => false);\nENV.registerFlag('WEBGL_BUFFER_SUPPORTED', () => ENV.get('WEBGL_VERSION') === 2);\n/** Whether the WebGL backend will sometimes forward ops to the CPU. */\nENV.registerFlag('WEBGL_CPU_FORWARD', () => true);\n/** Whether the WebGL backend will always use f16 textures for rendering. */\nENV.registerFlag('WEBGL_FORCE_F16_TEXTURES', () => false);\n/** Whether to turn all packing related flags on. */\nENV.registerFlag('WEBGL_PACK', () => ENV.getBool('HAS_WEBGL'));\n/** Whether we will pack the batchnormalization op. */\nENV.registerFlag('WEBGL_PACK_NORMALIZATION', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the clip op. */\nENV.registerFlag('WEBGL_PACK_CLIP', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the depthwise conv op. */\n// TODO: https://github.com/tensorflow/tfjs/issues/1679\nENV.registerFlag('WEBGL_PACK_DEPTHWISECONV', () => false);\n/** Whether we will pack binary ops. */\nENV.registerFlag('WEBGL_PACK_BINARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack unary ops. */\nENV.registerFlag('WEBGL_PACK_UNARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack array ops. */\nENV.registerFlag('WEBGL_PACK_ARRAY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack image ops. */\nENV.registerFlag('WEBGL_PACK_IMAGE_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack reduce ops. */\nENV.registerFlag('WEBGL_PACK_REDUCE', () => ENV.getBool('WEBGL_PACK'));\n/** Whether packed WebGL kernels lazily unpack their outputs. */\nENV.registerFlag('WEBGL_LAZILY_UNPACK', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will use the im2col algorithm to speed up convolutions. */\nENV.registerFlag('WEBGL_CONV_IM2COL', () => ENV.getBool('WEBGL_PACK'));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURE_SIZE', () => getWebGLMaxTextureSize(ENV.getNumber('WEBGL_VERSION')));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURES_IN_SHADER', () => getMaxTexturesInShader(ENV.getNumber('WEBGL_VERSION')));\n/**\n * The disjoint_query_timer extension version.\n * 0: disabled, 1: EXT_disjoint_timer_query, 2:\n * EXT_disjoint_timer_query_webgl2.\n * In Firefox with WebGL 2.0,\n * EXT_disjoint_timer_query_webgl2 is not available, so we must use the\n * WebGL 1.0 extension.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION', () => {\n const webGLVersion = ENV.getNumber('WEBGL_VERSION');\n if (webGLVersion === 0) {\n return 0;\n }\n return getWebGLDisjointQueryTimerVersion(webGLVersion);\n});\n/**\n * Whether the timer object from the disjoint_query_timer extension gives\n * timing information that is reliable.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE', () => ENV.getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0 &&\n !device_util.isMobile());\n/**\n * Whether the device is physically capable of rendering to float32 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_CAPABLE', () => isCapableOfRenderingToFloatTexture(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Whether rendering to float32 textures is enabled. If disabled, renders to\n * float16 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_ENABLED', () => {\n return ENV.getBool('WEBGL_FORCE_F16_TEXTURES') ?\n false :\n ENV.getBool('WEBGL_RENDER_FLOAT32_CAPABLE');\n});\n/**\n * Whether downloading float textures is enabled (16 or 32 bit). If disabled,\n * uses IEEE 754 encoding of the float32 values to 4 uint8 when downloading.\n */\nENV.registerFlag('WEBGL_DOWNLOAD_FLOAT_ENABLED', () => isDownloadFloatTextureEnabled(ENV.getNumber('WEBGL_VERSION')));\n/** Whether the fence API is available. */\nENV.registerFlag('WEBGL_FENCE_API_ENABLED', () => isWebGLFenceEnabled(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Tensors with size <= than this will be uploaded as uniforms, not textures.\n */\nENV.registerFlag('WEBGL_SIZE_UPLOAD_UNIFORM', () => {\n // Use uniform uploads only when 32bit floats are supported. In\n // 16bit\n // environments there are problems with comparing a 16bit texture value\n // with a 32bit uniform value.\n const useUniforms = ENV.getBool('WEBGL_RENDER_FLOAT32_ENABLED');\n return useUniforms ? 4 : 0;\n});\n/**\n * If the total number of bytes allocated on the GPU is greater than this\n * number, we will aggressively delete textures upon disposal with\n * gl.deleteMatrixTexture, rather than making them available for reuse.\n *\n * Default value -1 indicates that we will never aggressively delete textures.\n */\nENV.registerFlag('WEBGL_DELETE_TEXTURE_THRESHOLD', () => {\n return -1;\n}, threshold => {\n if (threshold < 0 && threshold !== -1) {\n throw new Error(`WEBGL_DELETE_TEXTURE_THRESHOLD must be -1 (indicating never ` +\n `delete) or at least 0, but got ${threshold}.`);\n }\n});\n//# sourceMappingURL=flags_webgl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import shared functionality from tfjs-backend-cpu without triggering\n// side effects.\n// tslint:disable-next-line: no-imports-from-dist\nimport * as shared from '@tensorflow/tfjs-backend-cpu/dist/shared';\nconst { simpleAbsImpl: simpleAbsImplCPU, addImpl: addImplCPU, ceilImpl: ceilImplCPU, expImpl: expImplCPU, expm1Impl: expm1ImplCPU, floorImpl: floorImplCPU, logImpl: logImplCPU, maxImpl: maxImplCPU, multiplyImpl: multiplyImplCPU, rsqrtImpl: rsqrtImplCPU, sliceImpl: sliceImplCPU, subImpl: subImplCPU, transposeImpl: transposeImplCPU, uniqueImpl: uniqueImplCPU, } = shared;\nexport { simpleAbsImplCPU, addImplCPU, ceilImplCPU, expImplCPU, expm1ImplCPU, logImplCPU, multiplyImplCPU, sliceImplCPU, subImplCPU, floorImplCPU, maxImplCPU, rsqrtImplCPU, transposeImplCPU, uniqueImplCPU, };\n//# sourceMappingURL=shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`float v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n float result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNPackedProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`vec4 v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n vec4 result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ArgMinMaxProgram {\n constructor(reduceInfo, op, firstPass) {\n this.variableNames = ['A'];\n const { windowSize, batchSize, outSize } = reduceInfo;\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n this.outputShape = [batchSize, outSize];\n const compOp = (op === 'max') ? '>' : '<';\n const indexSnippet = firstPass ?\n 'inOffset + i;' :\n 'round(getBestIndicesA(batch, inOffset + i));';\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n int bestIndex = inOffset;\n float bestValue = getA(batch, bestIndex);\n\n for (int i = 0; i < ${windowSize}; i++) {\n int inIdx = ${indexSnippet};\n float candidate = getA(batch, inIdx);\n if (candidate ${compOp} bestValue) {\n bestValue = candidate;\n bestIndex = inIdx;\n }\n }\n setOutput(float(bestIndex));\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport function getVecChannels(name, rank) {\n return ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank).map(d => `${name}.${d}`);\n}\nexport function getChannels(name, rank) {\n if (rank === 1) {\n return [name];\n }\n return getVecChannels(name, rank);\n}\nexport function getSourceCoords(rank, dims) {\n if (rank === 1) {\n return 'rc';\n }\n let coords = '';\n for (let i = 0; i < rank; i++) {\n coords += dims[i];\n if (i < rank - 1) {\n coords += ',';\n }\n }\n return coords;\n}\n//# sourceMappingURL=packing_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nexport function getGlslDifferences() {\n let version;\n let attribute;\n let varyingVs;\n let varyingFs;\n let texture2D;\n let output;\n let defineOutput;\n let defineSpecialNaN;\n let defineSpecialInf;\n let defineRound;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n version = '#version 300 es';\n attribute = 'in';\n varyingVs = 'out';\n varyingFs = 'in';\n texture2D = 'texture';\n output = 'outputColor';\n defineOutput = 'out vec4 outputColor;';\n // Use custom isnan definition to work across differences between\n // implementations on various platforms. While this should happen in ANGLE\n // we still see differences between android and windows (on chrome) when\n // using isnan directly.\n defineSpecialNaN = `\n bool isnan_custom(float val) {\n return (val > 0.0 || val < 0.0) ? false : val != 0.0;\n }\n\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan_custom(val.x),\n isnan_custom(val.y), isnan_custom(val.z), isnan_custom(val.w));\n }\n\n #define isnan(value) isnan_custom(value)\n `;\n // In webgl 2 we do not need to specify a custom isinf so there is no\n // need for a special INFINITY constant.\n defineSpecialInf = ``;\n defineRound = `\n #define round(value) newRound(value)\n int newRound(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 newRound(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n else {\n version = '';\n attribute = 'attribute';\n varyingVs = 'varying';\n varyingFs = 'varying';\n texture2D = 'texture2D';\n output = 'gl_FragColor';\n defineOutput = '';\n // WebGL1 has no built in isnan so we define one here.\n defineSpecialNaN = `\n #define isnan(value) isnan_custom(value)\n bool isnan_custom(float val) {\n return (val > 0. || val < 1. || val == 0.) ? false : true;\n }\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan(val.x), isnan(val.y), isnan(val.z), isnan(val.w));\n }\n `;\n defineSpecialInf = `\n uniform float INFINITY;\n\n bool isinf(float val) {\n return abs(val) == INFINITY;\n }\n bvec4 isinf(vec4 val) {\n return equal(abs(val), vec4(INFINITY));\n }\n `;\n defineRound = `\n int round(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 round(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n return {\n version,\n attribute,\n varyingVs,\n varyingFs,\n texture2D,\n output,\n defineOutput,\n defineSpecialNaN,\n defineSpecialInf,\n defineRound\n };\n}\n//# sourceMappingURL=glsl_version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Produces GLSL code that derives logical coordinates from a flat\n * index. The code performs integer division with each stride and decrements\n * the index until the index equals the final dimension coordinate.\n */\nexport function getLogicalCoordinatesFromFlatIndex(coords, shape, index = 'index') {\n const strides = util.computeStrides(shape);\n return strides\n .map((stride, i) => {\n const line1 = `int ${coords[i]} = ${index} / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coords[i + 1]} = ${index} - ${coords[i]} * ${stride}` :\n `index -= ${coords[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n}\nfunction buildVec(x) {\n if (x.length === 1) {\n return `${x[0]}`;\n }\n return `vec${x.length}(${x.join(',')})`;\n}\n/**\n * Produces GLSL code that computes the dot product of the input x and y\n * vectors. Handles splitting inputs into increments of vec4s when necessary.\n */\nexport function dotify(x, y) {\n if (x.length !== y.length) {\n throw new Error(`Vectors to be dotted must be of the same length -` +\n `got ${x.length} and ${y.length}`);\n }\n const slices = [];\n const nearestVec4 = Math.floor(x.length / 4);\n const nearestVec4Remainder = x.length % 4;\n for (let i = 0; i < nearestVec4; i++) {\n const xSlice = x.slice(i * 4, i * 4 + 4);\n const ySlice = y.slice(i * 4, i * 4 + 4);\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n if (nearestVec4Remainder !== 0) {\n let xSlice = x.slice(nearestVec4 * 4);\n let ySlice = y.slice(nearestVec4 * 4);\n if (xSlice.length === 1) {\n xSlice = xSlice.map(d => `float(${d})`);\n ySlice = ySlice.map(d => `float(${d})`);\n }\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n return slices.map((d, i) => `dot(${d})`).join('+');\n}\n/**\n * Produces GLSL that computes the flat index from 3D coordinates.\n */\nexport function getFlatIndexFrom3D(shape) {\n const strides = util.computeStrides(shape).map(d => d.toString());\n return `\n int getFlatIndex(ivec3 coords) {\n return coords.x * ${strides[0]} + coords.y * ${strides[1]} + coords.z;\n }\n`;\n}\nexport const ENCODE_FLOAT_SNIPPET = `\n const float FLOAT_MAX = 1.70141184e38;\n const float FLOAT_MIN = 1.17549435e-38;\n\n lowp vec4 encode_float(highp float v) {\n if (isnan(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n }\n`;\n//# sourceMappingURL=shader_compiler_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nconst { getBroadcastDims } = backend_util;\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport function makeShader(inputsInfo, outputShape, userCode, usesPackedTextures) {\n const prefixSnippets = [];\n inputsInfo.forEach(x => {\n const size = util.sizeFromShape(x.shapeInfo.logicalShape);\n // Snippet when we decided to upload the values as uniform.\n if (x.shapeInfo.isUniform) {\n prefixSnippets.push(`uniform float ${x.name}${size > 1 ? `[${size}]` : ''};`);\n }\n else {\n prefixSnippets.push(`uniform sampler2D ${x.name};`);\n prefixSnippets.push(`uniform int offset${x.name};`);\n }\n });\n const inputPrefixSnippet = prefixSnippets.join('\\n');\n const inputSamplingSnippet = inputsInfo\n .map(x => getInputSamplingSnippet(x, outputShape, usesPackedTextures))\n .join('\\n');\n const outTexShape = outputShape.texShape;\n const glsl = getGlslDifferences();\n const floatTextureSampleSnippet = getFloatTextureSampleSnippet(glsl);\n let outputSamplingSnippet;\n let floatTextureSetOutputSnippet;\n let shaderPrefix = getShaderPrefix(glsl);\n if (outputShape.isPacked) {\n outputSamplingSnippet =\n getPackedOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRGBASnippet(glsl);\n }\n else {\n outputSamplingSnippet =\n getOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRSnippet(glsl);\n }\n if (usesPackedTextures) {\n shaderPrefix += SHADER_PACKED_PREFIX;\n }\n const source = [\n shaderPrefix, floatTextureSampleSnippet, floatTextureSetOutputSnippet,\n inputPrefixSnippet, outputSamplingSnippet, inputSamplingSnippet, userCode\n ].join('\\n');\n return source;\n}\nfunction getSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getSamplerScalar(inInfo);\n case 1:\n return getSampler1D(inInfo);\n case 2:\n return getSampler2D(inInfo);\n case 3:\n return getSampler3D(inInfo);\n case 4:\n return getSampler4D(inInfo);\n case 5:\n return getSampler5D(inInfo);\n case 6:\n return getSampler6D(inInfo);\n default:\n throw new Error(`${shape.length}-D input sampling` +\n ` is not yet supported`);\n }\n}\nfunction getPackedSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getPackedSamplerScalar(inInfo);\n case 1:\n return getPackedSampler1D(inInfo);\n case 2:\n return getPackedSampler2D(inInfo);\n case 3:\n return getPackedSampler3D(inInfo);\n default:\n return getPackedSamplerND(inInfo);\n }\n}\nfunction getInputSamplingSnippet(inInfo, outShapeInfo, usesPackedTextures = false) {\n let res = '';\n if (usesPackedTextures) {\n res += getPackedSamplerFromInInfo(inInfo);\n }\n else {\n res += getSamplerFromInInfo(inInfo);\n }\n const inShape = inInfo.shapeInfo.logicalShape;\n const outShape = outShapeInfo.logicalShape;\n if (inShape.length <= outShape.length) {\n if (usesPackedTextures) {\n res += getPackedSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n else {\n res += getSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n }\n return res;\n}\nfunction getPackedOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutputPacked1DCoords(outShape, outTexShape);\n case 2:\n return getOutputPacked2DCoords(outShape, outTexShape);\n case 3:\n return getOutputPacked3DCoords(outShape, outTexShape);\n default:\n return getOutputPackedNDCoords(outShape, outTexShape);\n }\n}\nfunction getOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutput1DCoords(outShape, outTexShape);\n case 2:\n return getOutput2DCoords(outShape, outTexShape);\n case 3:\n return getOutput3DCoords(outShape, outTexShape);\n case 4:\n return getOutput4DCoords(outShape, outTexShape);\n case 5:\n return getOutput5DCoords(outShape, outTexShape);\n case 6:\n return getOutput6DCoords(outShape, outTexShape);\n default:\n throw new Error(`${outShape.length}-D output sampling is not yet supported`);\n }\n}\nfunction getFloatTextureSampleSnippet(glsl) {\n return `\n float sampleTexture(sampler2D textureSampler, vec2 uv) {\n return ${glsl.texture2D}(textureSampler, uv).r;\n }\n `;\n}\nfunction getFloatTextureSetRSnippet(glsl) {\n return `\n void setOutput(float val) {\n ${glsl.output} = vec4(val, 0, 0, 0);\n }\n `;\n}\nfunction getFloatTextureSetRGBASnippet(glsl) {\n return `\n void setOutput(vec4 val) {\n ${glsl.output} = val;\n }\n `;\n}\nfunction getShaderPrefix(glsl) {\n const SHADER_PREFIX = `${glsl.version}\n precision highp float;\n precision highp int;\n precision highp sampler2D;\n ${glsl.varyingFs} vec2 resultUV;\n ${glsl.defineOutput}\n const vec2 halfCR = vec2(0.5, 0.5);\n\n struct ivec5\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n };\n\n struct ivec6\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n };\n\n uniform float NAN;\n ${glsl.defineSpecialNaN}\n ${glsl.defineSpecialInf}\n ${glsl.defineRound}\n\n int imod(int x, int y) {\n return x - y * (x / y);\n }\n\n int idiv(int a, int b, float sign) {\n int res = a / b;\n int mod = imod(a, b);\n if (sign < 0. && mod != 0) {\n res -= 1;\n }\n return res;\n }\n\n //Based on the work of Dave Hoskins\n //https://www.shadertoy.com/view/4djSRW\n #define HASHSCALE1 443.8975\n float random(float seed){\n vec2 p = resultUV * seed;\n vec3 p3 = fract(vec3(p.xyx) * HASHSCALE1);\n p3 += dot(p3, p3.yzx + 19.19);\n return fract((p3.x + p3.y) * p3.z);\n }\n\n ${SAMPLE_1D_SNIPPET}\n ${SAMPLE_2D_SNIPPET}\n ${SAMPLE_3D_SNIPPET}\n `;\n return SHADER_PREFIX;\n}\nconst SAMPLE_1D_SNIPPET = `\nvec2 uvFromFlat(int texNumR, int texNumC, int index) {\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom1D(int texNumR, int texNumC, int index) {\n int texelIndex = index / 2;\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_2D_SNIPPET = `\nvec2 packedUVfrom2D(int texelsInLogicalRow, int texNumR,\n int texNumC, int row, int col) {\n int texelIndex = (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_3D_SNIPPET = `\nvec2 packedUVfrom3D(int texNumR, int texNumC,\n int texelsInBatch, int texelsInLogicalRow, int b,\n int row, int col) {\n int index = b * texelsInBatch + (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SHADER_PACKED_PREFIX = `\n float getChannel(vec4 frag, vec2 innerDims) {\n vec2 modCoord = mod(innerDims, 2.);\n return modCoord.x == 0. ?\n (modCoord.y == 0. ? frag.r : frag.g) :\n (modCoord.y == 0. ? frag.b : frag.a);\n }\n float getChannel(vec4 frag, int dim) {\n float modCoord = mod(float(dim), 2.);\n return modCoord == 0. ? frag.r : frag.g;\n }\n`;\nfunction getOutputScalarCoords() {\n return `\n int getOutputCoords() {\n return 0;\n }\n `;\n}\nfunction getOutputPacked1DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (packedTexShape[0] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.x * ${packedTexShape[1]}.0);\n }\n `;\n }\n if (packedTexShape[1] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.y * ${packedTexShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n return 2 * (resTexRC.x * ${packedTexShape[1]} + resTexRC.y);\n }\n `;\n}\nfunction getOutput1DCoords(shape, texShape) {\n if (texShape[0] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.x * ${texShape[1]}.0);\n }\n `;\n }\n if (texShape[1] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.y * ${texShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n return resTexRC.x * ${texShape[1]} + resTexRC.y;\n }\n `;\n}\nfunction getOutputPacked3DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[1] / 2);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec3(b, r, c);\n }\n `;\n}\nfunction getOutput3DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\nfunction getOutputPackedNDCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[shape.length - 1] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[shape.length - 2] / 2);\n let texelsInBatchN = texelsInBatch;\n let batches = ``;\n let coords = 'b, r, c';\n for (let b = 2; b < shape.length - 1; b++) {\n texelsInBatchN *= shape[shape.length - b - 1];\n batches = `\n int b${b} = index / ${texelsInBatchN};\n index -= b${b} * ${texelsInBatchN};\n ` + batches;\n coords = `b${b}, ` + coords;\n }\n return `\n ivec${shape.length} getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n ${batches}\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec${shape.length}(${coords});\n }\n `;\n}\nfunction getOutput4DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2'], shape);\n return `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec4(r, c, d, d2);\n }\n `;\n}\nfunction getOutput5DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3'], shape);\n return `\n ivec5 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx * vec2(${texShape[0]},\n ${texShape[1]}));\n\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec5 outShape = ivec5(r, c, d, d2, d3);\n return outShape;\n }\n `;\n}\nfunction getOutput6DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3', 'd4'], shape);\n return `\n ivec6 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec6 result = ivec6(r, c, d, d2, d3, d4);\n return result;\n }\n `;\n}\nfunction getOutputPacked2DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return 2 * ivec2(resultUV.yx * vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n }\n `;\n }\n // texels needed to accommodate a logical row\n const texelsInLogicalRow = Math.ceil(shape[1] / 2);\n /**\n * getOutputCoords\n *\n * resTexRC: The rows and columns of the texels. If you move over one\n * texel to the right in the packed texture, you are moving over one column\n * (not two).\n *\n * index: The texel index\n */\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec2(r, c);\n }\n `;\n}\nfunction getOutput2DCoords(shape, texShape) {\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return ivec2(resultUV.yx * vec2(${texShape[0]}, ${texShape[1]}));\n }\n `;\n }\n if (shape[1] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(index, 0);\n }\n `;\n }\n if (shape[0] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(0, index);\n }\n `;\n }\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n int r = index / ${shape[1]};\n int c = index - r * ${shape[1]};\n return ivec2(r, c);\n }\n `;\n}\nfunction getFlatOffsetUniformName(texName) {\n return `offset${texName}`;\n}\nfunction getPackedSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}() {\n return ${glsl.texture2D}(${texName}, halfCR);\n }\n `;\n}\nfunction getSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n return `float ${funcName}() {return ${texName};}`;\n }\n const [texNumR, texNumC] = inputInfo.shapeInfo.texShape;\n if (texNumR === 1 && texNumC === 1) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const [tNumR, tNumC] = inputInfo.shapeInfo.texShape;\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}() {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int index) {\n vec2 uv = packedUVfrom1D(\n ${packedTexShape[0]}, ${packedTexShape[1]}, index);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int index) {\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const tNumR = texShape[0];\n const tNumC = texShape[1];\n if (tNumC === 1 && tNumR === 1) {\n return `\n float ${funcName}(int index) {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n if (tNumC === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2(0.5, (float(index + ${offset}) + 0.5) / ${tNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (tNumR === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2((float(index + ${offset}) + 0.5) / ${tNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int index) {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const glsl = getGlslDifferences();\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n }\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const valuesPerRow = Math.ceil(shape[1] / 2);\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = packedUVfrom2D(${valuesPerRow}, ${packedTexShape[0]}, ${packedTexShape[1]}, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n return `\n float ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n int index = round(dot(vec2(row, col), vec2(${shape[1]}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const offset = getFlatOffsetUniformName(texName);\n if (texNumC === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2(0.5, (index + 0.5) / ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumR === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2((index + 0.5) / ${texNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int row, int col) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${shape[1]} + col + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n`;\n}\nfunction getPackedSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (shape[0] === 1) {\n const squeezedShape = shape.slice(1);\n const keptDims = [1, 2];\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['b', 'row', 'col'];\n return `\n ${getPackedSamplerFromInInfo(newInputInfo)}\n vec4 ${funcName}(int b, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[1] / 2);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int b, int row, int col) {\n vec2 uv = packedUVfrom3D(\n ${texNumR}, ${texNumC}, ${texelsInBatch}, ${valuesPerRow}, b, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride0 = shape[1] * shape[2];\n const stride1 = shape[2];\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col', 'depth'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n int index = round(dot(vec3(row, col, depth),\n vec3(${stride0}, ${stride1}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = float(row);\n float texC = dot(vec2(col, depth), vec2(${stride1}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride1 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = dot(vec2(row, col), vec2(${shape[1]}, 1));\n float texC = float(depth);\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSamplerND(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const rank = shape.length;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[rank - 1] / 2);\n let texelsInBatch = valuesPerRow * Math.ceil(shape[rank - 2] / 2);\n let params = `int b, int row, int col`;\n let index = `b * ${texelsInBatch} + (row / 2) * ${valuesPerRow} + (col / 2)`;\n for (let b = 2; b < rank - 1; b++) {\n params = `int b${b}, ` + params;\n texelsInBatch *= shape[rank - b - 1];\n index = `b${b} * ${texelsInBatch} + ` + index;\n }\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(${params}) {\n int index = ${index};\n int texR = index / ${texNumC};\n int texC = index - texR * ${texNumC};\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}, ${texNumR});\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler4D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride2 = shape[3];\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n int index = round(dot(vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = float(row);\n float texC =\n dot(vec3(col, depth, depth2),\n vec3(${stride1}, ${stride2}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride2 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = dot(vec3(row, col, depth),\n vec3(${shape[1] * shape[2]}, ${shape[2]}, 1));\n float texC = float(depth2);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} +\n depth * ${stride2} + depth2;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler5D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride3 = shape[4];\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float index = dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n depth3;\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride3 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float texR = dot(\n vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3]},\n ${shape[2] * shape[3]}, ${shape[3]}, 1));\n int texC = depth3;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler6D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3', 'depth4'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const stride4 = shape[5];\n const stride3 = shape[4] * stride4;\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int index = round(dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n dot(\n vec2(depth3, depth4),\n vec2(${stride4}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, ${stride4})) +\n float(depth4);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride4 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n float texR = dot(vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3] * shape[4]},\n ${shape[2] * shape[3] * shape[4]},\n ${shape[3] * shape[4]},\n ${shape[4]})) + float(depth3);\n int texC = depth4;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 * ${stride4} + depth4 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getUniformSampler(inputInfo) {\n const texName = inputInfo.name;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n if (inSize < 2) {\n return `return ${texName};`;\n }\n return `\n for (int i = 0; i < ${inSize}; i++) {\n if (i == index) {\n return ${texName}[i];\n }\n }\n `;\n}\nfunction getPackedSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const type = getCoordsDataType(outRank);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n let output = `return outputValue;`;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n const isInputScalar = inSize === 1;\n const outSize = util.sizeFromShape(outShapeInfo.logicalShape);\n const isOutputScalar = outSize === 1;\n if (inRank === 1 && !isInputScalar && !isOutputScalar) {\n output = `\n return vec4(outputValue.xy, outputValue.xy);\n `;\n }\n else if (isInputScalar && !isOutputScalar) {\n if (outRank === 1) {\n output = `\n return vec4(outputValue.x, outputValue.x, 0., 0.);\n `;\n }\n else {\n output = `\n return vec4(outputValue.x);\n `;\n }\n }\n else if (broadcastDims.length) {\n const rows = inRank - 2;\n const cols = inRank - 1;\n if (broadcastDims.indexOf(rows) > -1 && broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.x);`;\n }\n else if (broadcastDims.indexOf(rows) > -1) {\n output = `return vec4(outputValue.x, outputValue.y, ` +\n `outputValue.x, outputValue.y);`;\n }\n else if (broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.xx, outputValue.zz);`;\n }\n }\n return `\n vec4 ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n vec4 outputValue = get${texFuncSnippet}(${unpackedCoordsSnippet});\n ${output}\n }\n `;\n}\nfunction getSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const outTexShape = outShapeInfo.texShape;\n const inTexShape = inputInfo.shapeInfo.texShape;\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n if (!inputInfo.shapeInfo.isUniform && inRank === outRank &&\n inputInfo.shapeInfo.flatOffset == null &&\n util.arraysEqual(inTexShape, outTexShape)) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, resultUV);\n }\n `;\n }\n const type = getCoordsDataType(outRank);\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n return `\n float ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n return get${texFuncSnippet}(${unpackedCoordsSnippet});\n }\n `;\n}\nexport function getCoordsDataType(rank) {\n if (rank <= 1) {\n return 'int';\n }\n else if (rank === 2) {\n return 'ivec2';\n }\n else if (rank === 3) {\n return 'ivec3';\n }\n else if (rank === 4) {\n return 'ivec4';\n }\n else if (rank === 5) {\n return 'ivec5';\n }\n else if (rank === 6) {\n return 'ivec6';\n }\n else {\n throw Error(`GPU for rank ${rank} is not yet supported`);\n }\n}\n/** Returns a new input info (a copy) that has a squeezed logical shape. */\nfunction squeezeInputInfo(inInfo, squeezedShape) {\n // Deep copy.\n const newInputInfo = JSON.parse(JSON.stringify(inInfo));\n newInputInfo.shapeInfo.logicalShape = squeezedShape;\n return newInputInfo;\n}\nfunction getSqueezedParams(params, keptDims) {\n return keptDims.map(d => params[d]).join(', ');\n}\n//# sourceMappingURL=shader_compiler.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ArgMinMaxPackedProgram {\n constructor(shape, windowSize, op, firstPass) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n util.assert(shape.length > 2, () => `Packed arg${op.charAt(0).toUpperCase() +\n op.slice(1)} supports only inputs with rank above 2.`);\n const inSize = shape[shape.length - 1];\n const outSize = Math.ceil(inSize / windowSize);\n this.outputShape = shape.slice(0, -1);\n if (outSize > 1) {\n this.outputShape.push(outSize);\n }\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n const outShape = this.outputShape;\n const rank = outShape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n let sourceLocSetup;\n let sourceRank;\n if (outSize === 1) {\n sourceRank = rank + 1;\n const sourceLocDType = getCoordsDataType(sourceRank);\n sourceLocSetup = `\n ${sourceLocDType} sourceLocR = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 1]};\n ${sourceLocDType} sourceLocG = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 2]};\n ${sourceLocDType} sourceLocA = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 1]};\n ${sourceLocDType} sourceLocB = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 2]};`;\n }\n else {\n sourceRank = rank;\n sourceLocSetup = `\n ${dtype} sourceLocR = coords;\n ++${coords[rank - 1]};\n ${dtype} sourceLocG = coords;\n ++${coords[rank - 2]};\n ${dtype} sourceLocA = coords;\n --${coords[rank - 1]};\n ${dtype} sourceLocB = coords;\n --${coords[rank - 2]};`;\n }\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, sourceRank);\n const inChannel = '.' + channels[sourceRank - 1]; // e.g. \".b\" for rank 3.\n const intChannels = channels.map(x => 'int ' + x);\n const srcRCoords = getChannels('sourceLocR', sourceRank - 1).concat('inIdx.r');\n const srcGCoords = getChannels('sourceLocG', sourceRank - 1).concat('inIdx.g');\n const srcBCoords = getChannels('sourceLocB', sourceRank - 1).concat('inIdx.b');\n const srcACoords = getChannels('sourceLocA', sourceRank - 1).concat('inIdx.a');\n const compOp = (op === 'max') ? 'greaterThan' : 'lessThan';\n const fetchCandidateIdx = firstPass ? '' : `\n inIdx = round(vec4(getBestIndicesAChannel(${srcRCoords.join()}),\n getBestIndicesAChannel(${srcGCoords.join()}),\n getBestIndicesAChannel(${srcBCoords.join()}),\n getBestIndicesAChannel(${srcACoords.join()})));`;\n const fetchValue = `vec4(\n getAChannel(${srcRCoords.join()}),\n hasNextCol ? getAChannel(${srcGCoords.join()}) : 0.,\n hasNextRow ? getAChannel(${srcBCoords.join()}) : 0.,\n hasNextRow && hasNextCol ? getAChannel(${srcACoords.join()}) : 0.)`;\n const getBestIndicesAChannelSnippet = firstPass ? '' : `\n float getBestIndicesAChannel(${intChannels.join()}) {\n return getChannel(getBestIndicesA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }`;\n this.userCode = `\n float getAChannel(${intChannels.join()}) {\n return getChannel(getA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }\n ${getBestIndicesAChannelSnippet}\n void main() {\n ${dtype} coords = getOutputCoords();\n bool hasNextCol = ${coords[rank - 1]} < ${outShape[rank - 1] - 1};\n bool hasNextRow = ${coords[rank - 2]} < ${outShape[rank - 2] - 1};\n ${sourceLocSetup}\n ivec4 srcIdx = ivec4(sourceLocR${inChannel}, sourceLocG${inChannel},\n sourceLocB${inChannel}, sourceLocA${inChannel}) * ${windowSize};\n ivec4 inIdx = srcIdx;\n vec4 bestIndex = vec4(inIdx);\n vec4 bestValue = ${fetchValue};\n\n for (int i = 0; i < ${windowSize}; i++) {\n inIdx = srcIdx;\n ${fetchCandidateIdx}\n vec4 candidate = ${fetchValue};\n bvec4 nan = isnan(candidate);\n bvec4 replace = bvec4(\n vec4(${compOp}(candidate, bestValue)) * (vec4(1.0) - vec4(nan)));\n\n bestValue = vec4(replace.x ? candidate.x : bestValue.x,\n replace.y ? candidate.y : bestValue.y,\n replace.z ? candidate.z : bestValue.z,\n replace.w ? candidate.w : bestValue.w);\n bestIndex = mix(bestIndex, vec4(inIdx), vec4(replace));\n srcIdx++;\n }\n setOutput(bestIndex);\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AvgPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC+= ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class AvgPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, d) with pos mask(:, :, :, ch) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=avg_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nconst CHECK_NAN_SNIPPET = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\n// We use native integer division to deal with floating point imprecision. Since\n// we implement floor division and glsl implements truncated division, we\n// correct for this by subtracting 1 from result when the result is negative and\n// there is a remainder.\nexport const INT_DIV = `\n float s = sign(a) * sign(b);\n int ia = round(a);\n int ib = round(b);\n if (ib != 0) {\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n return float(idiv(ia, ib, s));\n } else {\n return NAN;\n }\n`;\nexport const POW = `\nif(a < 0.0 && floor(b) < b){\n return NAN;\n}\nif (b == 0.0) {\n return 1.0;\n}\nreturn (round(mod(b, 2.0)) != 1) ?\n pow(abs(a), b) : sign(a) * pow(abs(a), b);\n`;\nexport const SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const EQUAL = `return float(a == b);`;\nexport const LESS = `return float(a < b);`;\nexport const LESS_EQUAL = `return float(a <= b);`;\nexport const GREATER = `return float(a > b);`;\nexport const GREATER_EQUAL = `return float(a >= b);`;\nexport const LOGICAL_AND = `return float(a >= 1.0 && b >= 1.0);`;\nexport const LOGICAL_OR = `return float(a >= 1.0 || b >= 1.0);`;\nexport const MAX = CHECK_NAN_SNIPPET + `\n return max(a, b);\n`;\nexport const MIN = CHECK_NAN_SNIPPET + `\n return min(a, b);\n`;\nexport const MOD = `if (b == 0.0) return NAN;\n return mod(a, b);`;\nexport const ELU_DER = `return (b >= 1.0) ? a : a * (b + 1.0);`;\nexport const PRELU = `return (a < 0.) ? b * a : a;`;\nexport class BinaryOpProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['A', 'B'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOperation(float a, float b) {\n ${op}\n }\n\n void main() {\n float a = getAAtOutCoords();\n float b = getBAtOutCoords();\n setOutput(binaryOperation(a, b));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nconst CHECK_NAN_SNIPPET = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\nexport const INT_DIV = `\n ivec4 ia = round(a);\n ivec4 ib = round(b);\n bvec4 cond = notEqual(ib, ivec4(0));\n ivec4 result = ivec4(0);\n vec4 s = sign(a) * sign(b);\n\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n if (cond[0]) {\n result[0] = idiv(ia[0], ib[0], s[0]);\n }\n if (cond[1]) {\n result[1] = idiv(ia[1], ib[1], s[1]);\n }\n if (cond[2]) {\n result[2] = idiv(ia[2], ib[2], s[2]);\n }\n if (cond[3]) {\n result[3] = idiv(ia[3], ib[3], s[3]);\n }\n return vec4(result);\n`;\nexport const POW = `\n // isModRound1 has 1 for components with round(mod(b, 2.0)) == 1, 0 otherwise.\n vec4 isModRound1 = vec4(equal(round(mod(b, 2.0)), ivec4(1)));\n vec4 multiplier = sign(a) * isModRound1 + (vec4(1.0) - isModRound1);\n vec4 result = multiplier * pow(abs(a), b);\n\n // Ensure that a^0 = 1, including 0^0 = 1 as this correspond to TF and JS\n bvec4 isExpZero = equal(b, vec4(0.0));\n result.r = isExpZero.r ? 1.0 : result.r;\n result.g = isExpZero.g ? 1.0 : result.g;\n result.b = isExpZero.b ? 1.0 : result.b;\n result.a = isExpZero.a ? 1.0 : result.a;\n\n vec4 isNaN = vec4(lessThan(a, vec4(0.0))) * vec4(lessThan(floor(b), b));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const PRELU = `\n vec4 aLessThanZero = vec4(lessThan(a, vec4(0.)));\n return (aLessThanZero * (b * a)) + ((vec4(1.0) - aLessThanZero) * a);\n`;\nexport const ELU_DER = `\n vec4 bGTEZero = vec4(greaterThanEqual(b, vec4(0.)));\n return (bGTEZero * a) + ((vec4(1.0) - bGTEZero) * (a * (b + vec4(1.0))));\n`;\nexport const EQUAL = `\n return vec4(equal(a, b));\n`;\nexport const NOT_EQUAL = `\n return vec4(notEqual(a, b));\n`;\nexport const LESS = `\n return vec4(lessThan(a, b));\n`;\nexport const LESS_EQUAL = `\n return vec4(lessThanEqual(a, b));\n`;\nexport const GREATER = `\n return vec4(greaterThan(a, b));\n`;\nexport const GREATER_EQUAL = `\n return vec4(greaterThanEqual(a, b));\n`;\nexport const LOGICAL_AND = `\n return vec4(\n vec4(greaterThanEqual(a, vec4(1.0))) *\n vec4(greaterThanEqual(b, vec4(1.0))));\n`;\nexport const LOGICAL_OR = `\n return min(\n vec4(greaterThanEqual(a, vec4(1.0))) +\n vec4(greaterThanEqual(b, vec4(1.0))),\n vec4(1.0));\n`;\nexport const MAX = `\n vec4 result = vec4(max(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MIN = `\n vec4 result = vec4(min(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MOD = `\n vec4 result = mod(a, b);\n vec4 isNaN = vec4(equal(b, vec4(0.0)));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport class BinaryOpPackedProgram {\n constructor(op, aShape, bShape, checkOutOfBounds = false) {\n this.variableNames = ['A', 'B'];\n this.supportsBroadcasting = true;\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const rank = this.outputShape.length;\n let checkOutOfBoundsString = '';\n if (checkOutOfBounds) {\n if (rank === 0 || util.sizeFromShape(this.outputShape) === 1) {\n checkOutOfBoundsString = `\n result.y = 0.;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const dtype = getCoordsDataType(rank);\n checkOutOfBoundsString = `\n ${dtype} coords = getOutputCoords();\n `;\n if (rank === 1) {\n checkOutOfBoundsString += `\n result.y = (coords + 1) >= ${this.outputShape[0]} ? 0. : result.y;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const channels = getChannels('coords', rank);\n checkOutOfBoundsString += `\n bool nextRowOutOfBounds =\n (${channels[rank - 2]} + 1) >= ${this.outputShape[rank - 2]};\n bool nextColOutOfBounds =\n (${channels[rank - 1]} + 1) >= ${this.outputShape[rank - 1]};\n result.y = nextColOutOfBounds ? 0. : result.y;\n result.z = nextRowOutOfBounds ? 0. : result.z;\n result.w = nextColOutOfBounds || nextRowOutOfBounds ? 0. : result.w;\n `;\n }\n }\n }\n this.userCode = `\n vec4 binaryOperation(vec4 a, vec4 b) {\n ${op}\n }\n\n void main() {\n vec4 a = getAAtOutCoords();\n vec4 b = getBAtOutCoords();\n\n vec4 result = binaryOperation(a, b);\n ${checkOutOfBoundsString}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n float value = getAAtOutCoords();\n if (isnan(value)) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, minVal, maxVal));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipPackedProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n vec4 value = getAAtOutCoords();\n\n if (any(isnan(value))) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, vec4(minVal), vec4(maxVal)));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ComplexAbsProgram {\n constructor(shape) {\n this.variableNames = ['real', 'imag'];\n this.outputShape = shape;\n this.userCode = `\n void main() {\n float re = abs(getRealAtOutCoords());\n float im = abs(getImagAtOutCoords());\n float mx = max(re, im);\n\n // sadly the length function in glsl is not underflow-safe\n // (at least not on Intel GPUs). So the safe solution is\n // to ensure underflow-safety in all cases.\n setOutput(\n mx == 0.0 ? 0.0 : mx * length(vec2(1, min(re, im)/mx))\n );\n }\n `;\n }\n}\n//# sourceMappingURL=complex_abs_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int d2 = coords.w;\n\n // Convolve x(?, ?, d1) with dy(:, :, d2) to get dw(wR, wC, d1, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n if (${isChannelsLast}) {\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n } else {\n float dyValue = getDy(b, d2, yR, yC);\n float xValue = getX(b, d1, xR, xC);\n dotProd += (xValue * dyValue);\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[${channelDim}];\n\n ivec2 dyCorner = ivec2(coords[${rowDim}], coords[${colDim}]) - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n\n if (${isChannelsLast}) {\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n } else {\n float xValue = getDy(batch, d2, idyR, idyC);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.userCode = `\n void main() {\n ivec5 coords = getOutputCoords();\n int wF = coords.x;\n int wR = coords.y;\n int wC = coords.z;\n int d1 = coords.w;\n int d2 = coords.u;\n\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yF = 0; yF < ${convInfo.outDepth}; yF++) {\n int xF = wF + yF * ${strideDepth} - ${padFront};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yF, yR, yC, d2);\n float xValue = getX(b, xF, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = filterDepth - 1 - convInfo.padInfo.front;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d1 = coords.u;\n\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyFCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n float dyF = float(dyFCorner + wF) / ${strideDepth}.0;\n\n if (dyF < 0.0 || dyF >= ${convInfo.outDepth}.0 || fract(dyF) > 0.0) {\n continue;\n }\n int idyF = int(dyF);\n\n int wFPerm = ${filterDepth} - 1 - wF;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n float xValue = getDy(batch, idyF, idyR, idyC, d2);\n float wValue = getW(wFPerm, wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int dm = coords.w;\n int d2 = d1 * ${channelMul} + dm;\n\n float dotProd = 0.0;\n\n // TO DO: Vec4 over the batch size\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class DepthwiseConv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[3];\n ivec2 dyCorner = coords.yz - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n float dotProd = 0.0;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n // TO DO: Vec4 over the channelMul\n for (int dm = 0; dm < ${channelMul}; dm++) {\n int d2 = d1 * ${channelMul} + dm;\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, dm);\n dotProd += xValue * wValue;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivationWeights = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivationWeights) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivationWeights) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d2 = coords[${channelDim}];\n\n ivec2 xRCCorner =\n ivec2(coords[${rowDim}], coords[${colDim}]) * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, d2) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 wValues = vec4(\n getW(wR, wC, d1, d2),\n getW(wR, wC, d1 + 1, d2),\n getW(wR, wC, d1 + 2, d2),\n getW(wR, wC, d1 + 3, d2)\n );\n\n if (${isChannelsLast}) {\n vec4 xValues = vec4(\n getX(batch, xR, xC, d1),\n getX(batch, xR, xC, d1 + 1),\n getX(batch, xR, xC, d1 + 2),\n getX(batch, xR, xC, d1 + 3)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec4 xValues = vec4(\n getX(batch, d1, xR, xC),\n getX(batch, d1 + 1, xR, xC),\n getX(batch, d1 + 2, xR, xC),\n getX(batch, d1 + 3, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n\n if (${isChannelsLast}) {\n dotProd +=\n getX(batch, xR, xC, ${inputDepthNearestVec4}) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n } else {\n dotProd +=\n getX(batch, ${inputDepthNearestVec4}, xR, xC) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n }\n\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 wValues = vec2(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n\n if (${isChannelsLast}) {\n vec2 xValues = vec2(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec2 xValues = vec2(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 wValues = vec3(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n\n if (${isChannelsLast}) {\n vec3 xValues = vec3(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec3 xValues = vec3(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 2, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n }\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\nexport class Conv3DProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n this.userCode = `\n const ivec3 strides = ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d2 = coords.u;\n\n ivec3 xFRCCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xFCorner = xFRCCorner.x;\n int xRCorner = xFRCCorner.y;\n int xCCorner = xFRCCorner.z;\n\n // Convolve x(?, ?, ?, d1) with w(:, :, :, d1, d2) to get\n // y(yF, yR, yC, d2). ? = to be determined. : = across all\n // values in that axis.\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n int xF = xFCorner + wF * ${dilationDepth};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3)\n );\n vec4 wValues = vec4(\n getW(wF, wR, wC, d1, d2),\n getW(wF, wR, wC, d1 + 1, d2),\n getW(wF, wR, wC, d1 + 2, d2),\n getW(wF, wR, wC, d1 + 3, d2)\n );\n\n dotProd += dot(xValues, wValues);\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n dotProd +=\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}) *\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2);\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 xValues = vec2(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n vec2 wValues = vec2(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n dotProd += dot(xValues, wValues);\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 xValues = vec3(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n vec3 wValues = vec3(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2 / ${channelMul};\n int q = d2 - d1 * ${channelMul};\n\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, q) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n // TO DO(dsmilkov): Flatten the two for loops and vec4 the operations.\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${xNumRows}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${xNumCols}) {\n continue;\n }\n\n float xVal = getX(batch, xR, xC, d1);\n float wVal = getW(wR, wC, d1, q);\n dotProd += xVal * wVal;\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class DepthwiseConvPacked2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const texelsAcross = filterWidth;\n let mainLoop = `int xR; int xC; int xCOffset;`;\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `\n vec4 xTexelR${r}C${c * 2} = vec4(0.);\n vec4 wR${r}C${c} = vec4(0.);\n vec4 xR${r}C${c} = vec4(0.);`;\n }\n }\n /**\n * This vectorized implementation works by gathering the values needed for\n * each output channel's dot product into vec4's and then multiplying them\n * all together (this happens in the final double for-loop below). Most of\n * the main loop consists of constructing these vec4's with the minimum\n * number of texture2D calls, which means making use of all four returned\n * values from a texture2D call at once.\n */\n for (let r = 0; r < filterHeight; r++) {\n for (let texelC = 0; texelC < texelsAcross; texelC++) {\n const c = texelC * 2;\n mainLoop += `\n xR = xRCorner + ${r * dilationHeight};\n xC = xCCorner + ${c * dilationWidth};\n `;\n if (strideWidth === 1) {\n if (c < filterWidth) {\n // If padding is odd, the outer texels have to be composed.\n if (padLeft % 2 === 1) {\n // TODO: Ensure vec4 previous does not result in redundant sample,\n // and avoid setting xTexelRC's that exceed the boundary in the\n // first place rather than resetting them to vec4(0)).\n // To compute xCOffset:\n // - If padding is odd, we must add 1 to ensure we ask for an\n // even-numbered row.\n // - We subtract 2 to access the previous texel.\n mainLoop += `\n xCOffset = xC + 1;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n xTexelR${r}C${c}.zw = vec2(0.);\n }\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + 1 - 2;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n vec4 previous = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n previous.zw = vec2(0.);\n }\n\n xR${r}C${c} = vec4(previous.zw, xTexelR${r}C${c}.xy);\n } else {\n xR${r}C${c} = vec4(0, 0, xTexelR${r}C${c}.xy);\n }\n `;\n }\n else {\n // Padding is even, so xRC corresponds to a single texel.\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows} && xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xR${r}C${c} = xTexelR${r}C${c};\n `;\n }\n if (c + 1 < filterWidth) {\n // If dilation is even, the second entry should match the first\n // (either both are composed or both are single samples). But if\n // dilation is odd, then the second entry should be the opposite\n // of the first (if the first is composed, the second is a single\n // sample, and vice versa.)\n const nextTexelOffset = padLeft % 2 === 0 ?\n util.nearestLargerEven(dilationWidth) :\n dilationWidth;\n if ((dilationWidth % 2 === 0 && padLeft % 2 === 1) ||\n (dilationWidth % 2 !== 0 && padLeft % 2 !== 1)) {\n mainLoop += `\n xCOffset = xC + ${padLeft % 2} + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n `;\n // If dilation > 1 then the xRC's will not be able to share any\n // values, so each xRC will require two unique calls to getX.\n if (dilationWidth > 1) {\n mainLoop += `\n xCOffset -= 2;\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n `;\n }\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.xy);\n `;\n }\n else {\n mainLoop += `\n xCOffset = xC + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n\n xR${r}C${c + 1} = xTexelR${r}C${c + 2};\n `;\n }\n }\n }\n }\n else { // stride > 1\n if (c < filterWidth) {\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows}) {\n `;\n // Depending on whether padLeft is even or odd, we want either the\n // xy or zw channels from X texels for xR${r}C${c}. If padLeft is\n // even, xR${r}C${c + 1} is simply the zw channels of texels we've\n // already sampled. But if padLeft is odd, xR${r}C{$c + 1}.zw will\n // need to come from the xy channels of a new texel, hence the `vec4\n // final` initialized below.\n if (padLeft % 2 === 1) {\n mainLoop += `\n xCOffset = xC + 1 - ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n if(xC + 1 >= 0 && xC + 1 < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xC + 1, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 final = vec4(0.);\n xCOffset = xC + 1 + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n final = getX(batch, xR, xCOffset, d1);\n }\n xR${r}C${c + 1} = vec4(xTexelR${r}C${c + 2}.xy, final.xy);\n `;\n }\n }\n else {\n mainLoop += `\n if(xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.xy, xTexelR${r}C${c + 2}.xy);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n }\n }\n mainLoop += `}`;\n }\n }\n if (c < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c} = getW(${r}, ${c}, d1, q);\n wR${r}C${c} = vec4(wTexelR${r}C${c}.xz, wTexelR${r}C${c}.xz);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c + 1} = getW(${r}, ${c + 1}, d1, q);\n wR${r}C${c + 1} =\n vec4(wTexelR${r}C${c + 1}.xz, wTexelR${r}C${c + 1}.xz);`;\n }\n }\n }\n }\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `dotProd += xR${r}C${c} * wR${r}C${c};`;\n }\n }\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2;\n int q = 0;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n vec4 dotProd = vec4(0.);\n\n ${mainLoop}\n\n vec4 result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_packed_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class CropAndResizeProgram {\n constructor(imageShape, boxShape, cropSize, method, extrapolationValue) {\n this.variableNames = ['Image', 'Boxes', 'BoxInd'];\n this.outputShape = [];\n const [batch, imageHeight, imageWidth, depth] = imageShape;\n const [numBoxes,] = boxShape;\n const [cropHeight, cropWidth] = cropSize;\n this.outputShape = [numBoxes, cropHeight, cropWidth, depth];\n const methodId = method === 'bilinear' ? 1 : 0;\n const [inputHeightFloat, inputWidthFloat] = [`${imageHeight - 1}.0`, `${imageWidth - 1}.0`];\n const [heightRatio, heightScale, inY] = cropHeight > 1 ?\n [\n `${(imageHeight - 1) / (cropHeight - 1)}`,\n '(y2-y1) * height_ratio',\n `y1*${inputHeightFloat} + float(y)*(height_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (y1+y2) * ${inputHeightFloat}`,\n ];\n const [widthRatio, widthScale, inX] = cropWidth > 1 ?\n [\n `${(imageWidth - 1) / (cropWidth - 1)}`,\n '(x2-x1) * width_ratio',\n `x1*${inputWidthFloat} + float(x)*(width_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (x1+x2) * ${inputWidthFloat}`,\n ];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op_gpu.cu.cc\n this.userCode = `\n const float height_ratio = float(${heightRatio});\n const float width_ratio = float(${widthRatio});\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int y = coords[1];\n int x = coords[2];\n int d = coords[3];\n\n // get box vals\n float y1 = getBoxes(b,0);\n float x1 = getBoxes(b,1);\n float y2 = getBoxes(b,2);\n float x2 = getBoxes(b,3);\n\n // get image in batch index\n int bInd = round(getBoxInd(b));\n if(bInd < 0 || bInd >= ${batch}) {\n return;\n }\n\n float height_scale = ${heightScale};\n float width_scale = ${widthScale};\n\n float in_y = ${inY};\n if( in_y < 0.0 || in_y > ${inputHeightFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n float in_x = ${inX};\n if( in_x < 0.0 || in_x > ${inputWidthFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n\n vec2 sourceFracIndexCR = vec2(in_x,in_y);\n if(${methodId} == 1) {\n // Compute the four integer indices.\n ivec2 sourceFloorCR = ivec2(sourceFracIndexCR);\n ivec2 sourceCeilCR = ivec2(ceil(sourceFracIndexCR));\n\n float topLeft = getImage(b, sourceFloorCR.y, sourceFloorCR.x, d);\n float bottomLeft = getImage(b, sourceCeilCR.y, sourceFloorCR.x, d);\n float topRight = getImage(b, sourceFloorCR.y, sourceCeilCR.x, d);\n float bottomRight = getImage(b, sourceCeilCR.y, sourceCeilCR.x, d);\n\n vec2 fracCR = sourceFracIndexCR - vec2(sourceFloorCR);\n\n float top = topLeft + (topRight - topLeft) * fracCR.x;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracCR.x;\n float newValue = top + (bottom - top) * fracCR.y;\n setOutput(newValue);\n } else {\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestCR = ivec2(floor(\n sourceFracIndexCR + vec2(0.5,0.5)));\n float newValue = getImage(b, sourceNearestCR.y, sourceNearestCR.x, d);\n setOutput(newValue);\n }\n }\n `;\n }\n}\n//# sourceMappingURL=crop_and_resize_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class CumSumProgram {\n constructor(shape, exclusive, reverse) {\n this.variableNames = ['x'];\n this.outputShape = shape;\n const rank = shape.length;\n const val = exclusive ? '0.0' : `getX(${getCoords(rank, 'coords')})`;\n const length = shape[shape.length - 1];\n let condition = '';\n let idxString = '';\n // When exclusive is set, the cumsum op becomes roll op that copies the\n // value from the previous index based on the direction specified by the\n // reverse flag.\n if (exclusive) {\n condition = reverse ? `end != ${length - 1}` : 'end != 0';\n idxString = reverse ? 'end + 1' : 'end - 1';\n }\n else {\n condition = reverse ? `end + pow2 < ${length}` : 'end >= pow2';\n idxString = (reverse ? 'end + pow2' : 'end - pow2');\n }\n this.userCode = `\n uniform float index;\n void main() {\n ${getCoordsDataType(rank)} coords = getOutputCoords();\n int end = ${getFinalCoord(rank, 'coords')};\n float val = ${val};\n int pow2 = int(pow(2.0, index));\n if (${condition}) {\n int idx = ${idxString};\n ${getFinalCoord(rank, 'coords')} = idx;\n val += getX(${getCoords(rank, 'coords')});\n }\n setOutput(val);\n }\n `;\n }\n getCustomSetupFunc(index) {\n return (gpgpu, webGLProgram) => {\n if (this.index == null) {\n this.index = gpgpu.getUniformLocation(webGLProgram, 'index');\n }\n gpgpu.gl.uniform1f(this.index, index);\n };\n }\n}\nfunction getCoords(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.x, ${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.x, ${name}.y, ${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.x, ${name}.y, ${name}.z, ${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\nfunction getFinalCoord(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=cumsum_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getA(rc.x, rc.y, rc.z);\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getChannel(getA(rc.x, rc.y, rc.z), vec2(rc.y, rc.z));\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthToSpaceProgram {\n constructor(outputShape, blockSize, dataFormat) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.outputShape = outputShape;\n this.blockSize = blockSize;\n this.dataFormat = dataFormat;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int h = ${this.getHeightCoordString()};\n int w = ${this.getWidthCoordString()};\n int d = ${this.getDepthCoordString()};\n\n int in_h = h / ${blockSize};\n int offset_h = imod(h, ${blockSize});\n int in_w = w / ${blockSize};\n int offset_w = imod(w, ${blockSize});\n int offset_d = (offset_h * ${blockSize} + offset_w) *\n ${this.getOutputDepthSize()};\n int in_d = d + offset_d;\n\n float result = ${this.getInputSamplingString()};\n setOutput(result);\n }\n `;\n }\n getHeightCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[1]`;\n }\n else {\n return `coords[2]`;\n }\n }\n getWidthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[2]`;\n }\n else {\n return `coords[3]`;\n }\n }\n getDepthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[3]`;\n }\n else {\n return `coords[1]`;\n }\n }\n getOutputDepthSize() {\n if (this.dataFormat === 'NHWC') {\n return this.outputShape[3];\n }\n else {\n return this.outputShape[1];\n }\n }\n getInputSamplingString() {\n if (this.dataFormat === 'NHWC') {\n return `getX(b, in_h, in_w, in_d)`;\n }\n else {\n return `getX(b, in_d, in_h, in_w)`;\n }\n }\n}\n//# sourceMappingURL=depth_to_space_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DiagProgram {\n constructor(size) {\n this.variableNames = ['X'];\n this.outputShape = [size, size];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n float val = coords[0] == coords[1] ? getX(coords[0]) : 0.0;\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=diag_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n float x = getAAtOutCoords();\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n ivec3 coords = getOutputCoords();\n float x = getChannel(getAAtOutCoords(), vec2(coords.y, coords.z));\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport class EncodeMatrixProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let output = `result`;\n if (inputIsUnsignedByte) {\n output = `floor(result * 255. + 0.5)`;\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n int flatIndex = getFlatIndex(coords);\n int offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n int r = flatIndex / ${width};\n int c = imod(flatIndex, ${width});\n vec2 uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n\n float result;\n\n if(offset == 0) {\n result = values[0];\n } else if(offset == 1) {\n result = values[1];\n } else if(offset == 2) {\n result = values[2];\n } else {\n result = values[3];\n }\n\n ${glsl.output} = vec4(${output}, 0., 0., 0.);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\n/*\nThis is how the shader encodes a tensor with shape = [2, 3, 5]\n(indices are [batch, row, col]).\n\n000|001 002|003 004|xxx 020|021 022|023 024|xxx\n------- ------- ------- ------- ------- -------\n010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n\n100|101 102|103 104|xxx 120|121 122|123 124|xxx\n------- ------- ------- ------- ------- -------\n110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n\nSingle texels contain only values from the same batch, and from adjacent rows\nand columns.\n */\nexport class EncodeMatrixPackedProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let mainLoop = '';\n let output = 'result';\n if (inputIsUnsignedByte) {\n output = 'floor(result * 255. + 0.5)';\n }\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n const channel = row * 2 + col;\n mainLoop += `\n localCoords = coords;\n if(localCoords[2] + ${col} < ${outputShape[2]}) {\n localCoords[2] += ${col};\n if(localCoords[1] + ${row} < ${outputShape[1]}) {\n localCoords[1] += ${row};\n\n flatIndex = getFlatIndex(localCoords);\n offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n r = flatIndex / ${width};\n c = imod(flatIndex, ${width});\n uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n values = ${glsl.texture2D}(A, uv);\n\n if(offset == 0) {\n result[${channel}] = values[0];\n } else if(offset == 1) {\n result[${channel}] = values[1];\n } else if(offset == 2) {\n result[${channel}] = values[2];\n } else {\n result[${channel}] = values[3];\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n vec4 result = vec4(0.);\n int flatIndex, r, c, offset;\n ivec3 localCoords;\n vec2 uv;\n vec4 values;\n\n ${mainLoop}\n\n ${glsl.output} = ${output};\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FillProgram {\n constructor(shape, value) {\n this.outputShape = [];\n this.variableNames = ['x'];\n this.outputShape = shape;\n this.userCode = `\n uniform float value;\n void main() {\n // Input can be obtained from uniform value.\n setOutput(value);\n }\n `;\n }\n getCustomSetupFunc(value) {\n return (gpgpu, webGLProgram) => {\n if (this.valueLoc == null) {\n this.valueLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'value');\n }\n gpgpu.gl.uniform1f(this.valueLoc, value);\n };\n }\n}\n//# sourceMappingURL=fill_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class GatherProgram {\n constructor(aShape, indicesLength, axis) {\n this.variableNames = ['A', 'indices'];\n const outputShape = aShape.slice();\n outputShape[axis] = indicesLength;\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape, axis);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape, axis) {\n const rank = aShape.length;\n if (rank > 4) {\n throw Error(`Gather for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `int(getIndices(resRC))`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n if (i === axis) {\n sourceCoords.push(`int(getIndices(${currentCoords[i]}))`);\n }\n else {\n sourceCoords.push(`${currentCoords[i]}`);\n }\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=gather_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class GatherNDProgram {\n constructor(sliceDim, strides, shape) {\n this.sliceDim = sliceDim;\n this.strides = strides;\n this.variableNames = ['x', 'indices'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n const strideString = this.sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${this.strides});\n void main() {\n ${dtype} coords = getOutputCoords();\n int flattenIndex = 0;\n for (int j = 0; j < ${this.sliceDim}; j++) {\n int index = round(getIndices(coords[0], j));\n flattenIndex += index * ${strideString};\n }\n setOutput(getX(flattenIndex, coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=gather_nd_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport function createVertexShader(gl) {\n const glsl = getGlslDifferences();\n const vertexShaderSource = `${glsl.version}\n precision highp float;\n ${glsl.attribute} vec3 clipSpacePos;\n ${glsl.attribute} vec2 uv;\n ${glsl.varyingVs} vec2 resultUV;\n\n void main() {\n gl_Position = vec4(clipSpacePos, 1);\n resultUV = uv;\n }`;\n return webgl_util.createVertexShader(gl, vertexShaderSource);\n}\nexport function createVertexBuffer(gl) {\n // [x y z u v] * [upper-left, lower-left, upper-right, lower-right]\n const vertexArray = new Float32Array([-1, 1, 0, 0, 1, -1, -1, 0, 0, 0, 1, 1, 0, 1, 1, 1, -1, 0, 1, 0]);\n return webgl_util.createStaticVertexBuffer(gl, vertexArray);\n}\nexport function createIndexBuffer(gl) {\n // OpenGL (and WebGL) have \"CCW == front\" winding\n const triangleVertexIndices = new Uint16Array([0, 1, 2, 2, 1, 3]);\n return webgl_util.createStaticIndexBuffer(gl, triangleVertexIndices);\n}\nfunction createAndConfigureTexture(gl, width, height, internalFormat, textureFormat, textureType) {\n webgl_util.validateTextureSize(width, height);\n const texture = webgl_util.createTexture(gl);\n const tex2d = gl.TEXTURE_2D;\n webgl_util.callAndCheck(gl, () => gl.bindTexture(tex2d, texture));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MIN_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MAG_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texImage2D(tex2d, 0, internalFormat, width, height, 0, textureFormat, textureType, null));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n return texture;\n}\nexport function getInternalFormatForFloat32MatrixTexture(textureConfig) {\n return textureConfig.internalFormatFloat;\n}\nexport function createFloat32MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat32MatrixTexture(textureConfig), textureConfig.textureFormatFloat, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16MatrixTexture(textureConfig) {\n return textureConfig.internalFormatHalfFloat;\n}\nexport function createFloat16MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16MatrixTexture(textureConfig), textureConfig.textureFormatFloat, textureConfig.textureTypeHalfFloat);\n}\nexport function getInternalFormatForUnsignedBytesMatrixTexture(textureConfig) {\n return textureConfig.downloadTextureFormat;\n}\nexport function createUnsignedBytesMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForUnsignedBytesMatrixTexture(textureConfig), gl.RGBA, gl.UNSIGNED_BYTE);\n}\nexport function getInternalFormatForPackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedFloat;\n}\nexport function createPackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForPackedMatrixTexture(textureConfig), gl.RGBA, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16PackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedHalfFloat;\n}\nexport function createFloat16PackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16PackedMatrixTexture(textureConfig), gl.RGBA, textureConfig.textureTypeHalfFloat);\n}\nexport function bindVertexProgramAttributeStreams(gl, program, vertexBuffer) {\n const posOffset = 0; // x is the first buffer element\n const uvOffset = 3 * 4; // uv comes after [x y z]\n const stride = (3 * 4) + (2 * 4); // xyz + uv, each entry is 4-byte float.\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer));\n const success = webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'clipSpacePos', vertexBuffer, 3, stride, posOffset);\n return success &&\n webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'uv', vertexBuffer, 2, stride, uvOffset);\n}\nexport function uploadDenseMatrixToTexture(gl, texture, width, height, data, textureConfig) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n let dataForUpload, texelDataType, internalFormat;\n if (data instanceof Uint8Array) {\n dataForUpload = new Uint8Array(width * height * 4);\n texelDataType = gl.UNSIGNED_BYTE;\n internalFormat = gl.RGBA;\n }\n else {\n dataForUpload = new Float32Array(width * height * 4);\n texelDataType = gl.FLOAT;\n internalFormat = textureConfig.internalFormatPackedFloat;\n }\n dataForUpload.set(data);\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, gl.RGBA, texelDataType, dataForUpload));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function uploadPixelDataToTexture(gl, texture, pixels) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n if (pixels.data instanceof Uint8Array) {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, pixels.width, pixels.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, pixels.data));\n }\n else {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, pixels));\n }\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function createBufferFromOutputTexture(gl2, rows, columns, textureConfig) {\n // Create and bind the buffer.\n const buffer = gl2.createBuffer();\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer));\n // Initialize the buffer to the size of the texture in bytes.\n const bytesPerFloat = 4;\n const valuesPerTexel = 4;\n const bufferSizeBytes = bytesPerFloat * valuesPerTexel * rows * columns;\n webgl_util.callAndCheck(gl2, () => gl2.bufferData(gl2.PIXEL_PACK_BUFFER, bufferSizeBytes, gl2.STREAM_READ));\n // Enqueue a command on the GPU command queue to copy of texture into the\n // buffer.\n webgl_util.callAndCheck(gl2, () => gl2.readPixels(0, 0, columns, rows, gl2.RGBA, gl2.FLOAT, 0));\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null));\n return buffer;\n}\nexport function downloadFloat32MatrixFromBuffer(gl, buffer, size) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(size);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadByteEncodedFloatMatrixFromOutputTexture(gl, rows, columns, textureConfig) {\n const [w, h] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n const numChannels = 4;\n const downloadTarget = new Uint8Array(tex_util.getUnpackedArraySizeFromMatrixSize(rows * columns, numChannels));\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, w, h, textureConfig.downloadTextureFormat, gl.UNSIGNED_BYTE, downloadTarget));\n // By wrapping the buffer in a Float32Array, we use native browser IEEE 754\n // decoding of the 4 bytes that back each 32 bit float.\n return new Float32Array(downloadTarget.buffer);\n}\nexport function downloadPackedMatrixFromBuffer(gl, buffer, batch, rows, cols, physicalRows, physicalCols, textureConfig) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(tex_util.getPackedRGBAArraySizeFromMatrixShape(physicalRows, physicalCols));\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadMatrixFromPackedOutputTexture(gl, physicalRows, physicalCols) {\n const packedRGBA = new Float32Array(physicalRows * physicalCols * 4);\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, physicalCols, physicalRows, gl.RGBA, gl.FLOAT, packedRGBA));\n return packedRGBA;\n}\n//# sourceMappingURL=gpgpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext, setWebGLContext } from './canvas_util';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport class GPGPUContext {\n constructor(gl) {\n this.outputTexture = null;\n this.program = null;\n this.disposed = false;\n this.vertexAttrsAreBound = false;\n this.itemsToPoll = [];\n const glVersion = env().getNumber('WEBGL_VERSION');\n if (gl != null) {\n this.gl = gl;\n setWebGLContext(glVersion, gl);\n }\n else {\n this.gl = getWebGLContext(glVersion);\n }\n // WebGL 2.0 enables texture floats without an extension.\n let COLOR_BUFFER_FLOAT = 'WEBGL_color_buffer_float';\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (env().getNumber('WEBGL_VERSION') === 1) {\n const TEXTURE_FLOAT = 'OES_texture_float';\n const TEXTURE_HALF_FLOAT = 'OES_texture_half_float';\n this.textureFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_FLOAT);\n if (webgl_util.hasExtension(this.gl, TEXTURE_HALF_FLOAT)) {\n this.textureHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support half float textures, yet the ' +\n 'environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n this.colorBufferFloatExtension = this.gl.getExtension(COLOR_BUFFER_FLOAT);\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, COLOR_BUFFER_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support color renderable half floats, yet ' +\n 'the environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n }\n else {\n COLOR_BUFFER_FLOAT = 'EXT_color_buffer_float';\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_FLOAT)) {\n this.colorBufferFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_FLOAT);\n }\n else if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n }\n else {\n throw new Error('GL context does not support color renderable floats');\n }\n }\n this.vertexBuffer = gpgpu_util.createVertexBuffer(this.gl);\n this.indexBuffer = gpgpu_util.createIndexBuffer(this.gl);\n this.framebuffer = webgl_util.createFramebuffer(this.gl);\n this.textureConfig =\n tex_util.getTextureConfig(this.gl, this.textureHalfFloatExtension);\n }\n get debug() {\n return env().getBool('DEBUG');\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n if (this.program != null) {\n console.warn('Disposing a GPGPUContext that still has a bound WebGLProgram.' +\n ' This is probably a resource leak, delete the program with ' +\n 'GPGPUContext.deleteProgram before disposing.');\n }\n if (this.outputTexture != null) {\n console.warn('Disposing a GPGPUContext that still has a bound output matrix ' +\n 'texture. This is probably a resource leak, delete the output ' +\n 'matrix texture with GPGPUContext.deleteMatrixTexture before ' +\n 'disposing.');\n }\n const gl = this.gl;\n webgl_util.callAndCheck(gl, () => gl.finish());\n webgl_util.callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteFramebuffer(this.framebuffer));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteBuffer(this.indexBuffer));\n this.disposed = true;\n }\n createFloat32MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat32MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createFloat16MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createUnsignedBytesMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createUnsignedBytesMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n uploadPixelDataToTexture(texture, pixels) {\n this.throwIfDisposed();\n gpgpu_util.uploadPixelDataToTexture(this.gl, texture, pixels);\n }\n uploadDenseMatrixToTexture(texture, width, height, data) {\n this.throwIfDisposed();\n gpgpu_util.uploadDenseMatrixToTexture(this.gl, texture, width, height, data, this.textureConfig);\n }\n createFloat16PackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16PackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createPackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createPackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n deleteMatrixTexture(texture) {\n this.throwIfDisposed();\n if (this.outputTexture === texture) {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n this.outputTexture = null;\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteTexture(texture));\n }\n downloadByteEncodedFloatMatrixFromOutputTexture(texture, rows, columns) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadByteEncodedFloatMatrixFromOutputTexture(this.gl, rows, columns, this.textureConfig));\n }\n downloadPackedMatrixFromBuffer(buffer, batch, rows, columns, physicalRows, physicalCols) {\n return gpgpu_util.downloadPackedMatrixFromBuffer(this.gl, buffer, batch, rows, columns, physicalRows, physicalCols, this.textureConfig);\n }\n downloadFloat32MatrixFromBuffer(buffer, size) {\n return gpgpu_util.downloadFloat32MatrixFromBuffer(this.gl, buffer, size);\n }\n createBufferFromTexture(texture, rows, columns) {\n this.bindTextureToFrameBuffer(texture);\n const result = gpgpu_util.createBufferFromOutputTexture(this.gl, rows, columns, this.textureConfig);\n this.unbindTextureToFrameBuffer();\n return result;\n }\n createAndWaitForFence() {\n const fenceContext = this.createFence(this.gl);\n return this.pollFence(fenceContext);\n }\n createFence(gl) {\n let query;\n let isFencePassed;\n if (env().getBool('WEBGL_FENCE_API_ENABLED')) {\n const gl2 = gl;\n const sync = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);\n gl.flush();\n isFencePassed = () => {\n const status = gl2.clientWaitSync(sync, 0, 0);\n return status === gl2.ALREADY_SIGNALED ||\n status === gl2.CONDITION_SATISFIED;\n };\n query = sync;\n }\n else if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n query = this.beginQuery();\n this.endQuery();\n isFencePassed = () => this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n else {\n // If we have no way to fence, return true immediately. This will fire in\n // WebGL 1.0 when there is no disjoint query timer. In this case, because\n // the fence passes immediately, we'll immediately ask for a download of\n // the texture, which will cause the UI thread to hang.\n isFencePassed = () => true;\n }\n return { query, isFencePassed };\n }\n downloadMatrixFromPackedTexture(texture, physicalRows, physicalCols) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadMatrixFromPackedOutputTexture(this.gl, physicalRows, physicalCols));\n }\n createProgram(fragmentShaderSource) {\n this.throwIfDisposed();\n const gl = this.gl;\n const fragmentShader = webgl_util.createFragmentShader(gl, fragmentShaderSource);\n const vertexShader = gpgpu_util.createVertexShader(gl);\n const program = webgl_util.createProgram(gl);\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, vertexShader));\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, fragmentShader));\n webgl_util.linkProgram(gl, program);\n if (this.debug) {\n webgl_util.validateProgram(gl, program);\n }\n if (!this.vertexAttrsAreBound) {\n this.setProgram(program);\n this.vertexAttrsAreBound = gpgpu_util.bindVertexProgramAttributeStreams(gl, this.program, this.vertexBuffer);\n }\n return program;\n }\n deleteProgram(program) {\n this.throwIfDisposed();\n if (program === this.program) {\n this.program = null;\n }\n if (program != null) {\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteProgram(program));\n }\n }\n setProgram(program) {\n this.throwIfDisposed();\n this.program = program;\n if ((this.program != null) && this.debug) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.useProgram(program));\n }\n getUniformLocation(program, uniformName, shouldThrow = true) {\n this.throwIfDisposed();\n if (shouldThrow) {\n return webgl_util.getProgramUniformLocationOrThrow(this.gl, program, uniformName);\n }\n else {\n return webgl_util.getProgramUniformLocation(this.gl, program, uniformName);\n }\n }\n getAttributeLocation(program, attribute) {\n this.throwIfDisposed();\n return webgl_util.callAndCheck(this.gl, () => this.gl.getAttribLocation(program, attribute));\n }\n getUniformLocationNoThrow(program, uniformName) {\n this.throwIfDisposed();\n return this.gl.getUniformLocation(program, uniformName);\n }\n setInputMatrixTexture(inputMatrixTexture, uniformLocation, textureUnit) {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n webgl_util.bindTextureToProgramUniformSampler(this.gl, inputMatrixTexture, uniformLocation, textureUnit);\n }\n setOutputMatrixTexture(outputMatrixTexture, rows, columns) {\n this.setOutputMatrixTextureDriver(outputMatrixTexture, columns, rows);\n }\n setOutputPackedMatrixTexture(outputPackedMatrixTexture, rows, columns) {\n this.throwIfDisposed();\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n this.setOutputMatrixTextureDriver(outputPackedMatrixTexture, width, height);\n }\n setOutputMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n this.setOutputMatrixWriteRegionDriver(startColumn, startRow, numColumns, numRows);\n }\n setOutputPackedMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n throw new Error('setOutputPackedMatrixWriteRegion not implemented.');\n }\n debugValidate() {\n if (this.program != null) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.validateFramebuffer(this.gl);\n }\n executeProgram() {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n const gl = this.gl;\n if (this.debug) {\n this.debugValidate();\n }\n webgl_util.callAndCheck(gl, () => gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0));\n }\n blockUntilAllProgramsCompleted() {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.finish());\n }\n getQueryTimerExtension() {\n if (this.disjointQueryTimerExtension == null) {\n this.disjointQueryTimerExtension =\n webgl_util.getExtensionOrThrow(this.gl, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2 ?\n 'EXT_disjoint_timer_query_webgl2' :\n 'EXT_disjoint_timer_query');\n }\n return this.disjointQueryTimerExtension;\n }\n getQueryTimerExtensionWebGL2() {\n return this.getQueryTimerExtension();\n }\n getQueryTimerExtensionWebGL1() {\n return this.getQueryTimerExtension();\n }\n beginQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const query = gl2.createQuery();\n gl2.beginQuery(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n const query = ext.createQueryEXT();\n ext.beginQueryEXT(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n endQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n gl2.endQuery(ext.TIME_ELAPSED_EXT);\n return;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n ext.endQueryEXT(ext.TIME_ELAPSED_EXT);\n }\n async waitForQueryAndGetTime(query) {\n await util.repeatedTry(() => this.disposed || // while testing contexts are created / disposed\n // in rapid succession, so without this check we\n // may poll for the query timer indefinitely\n this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION')));\n return this.getQueryTime(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n getQueryTime(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return null;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const timeElapsedNanos = gl2.getQueryParameter(query, gl2.QUERY_RESULT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const timeElapsedNanos = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_EXT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n }\n isQueryAvailable(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return true;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const available = gl2.getQueryParameter(query, gl2.QUERY_RESULT_AVAILABLE);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const available = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_AVAILABLE_EXT);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n }\n pollFence(fenceContext) {\n return new Promise(resolve => {\n this.addItemToPoll(() => fenceContext.isFencePassed(), () => resolve());\n });\n }\n pollItems() {\n // Find the last query that has finished.\n const index = linearSearchLastTrue(this.itemsToPoll.map(x => x.isDoneFn));\n for (let i = 0; i <= index; ++i) {\n const { resolveFn } = this.itemsToPoll[i];\n resolveFn();\n }\n this.itemsToPoll = this.itemsToPoll.slice(index + 1);\n }\n addItemToPoll(isDoneFn, resolveFn) {\n this.itemsToPoll.push({ isDoneFn, resolveFn });\n if (this.itemsToPoll.length > 1) {\n // We already have a running loop that polls.\n return;\n }\n // Start a new loop that polls.\n util.repeatedTry(() => {\n this.pollItems();\n // End the loop if no more items to poll.\n return this.itemsToPoll.length === 0;\n });\n }\n bindTextureToFrameBuffer(texture) {\n this.throwIfDisposed();\n webgl_util.bindColorTextureToFramebuffer(this.gl, texture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n unbindTextureToFrameBuffer() {\n if (this.outputTexture != null) {\n webgl_util.bindColorTextureToFramebuffer(this.gl, this.outputTexture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n else {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n }\n }\n downloadMatrixDriver(texture, downloadAndDecode) {\n this.bindTextureToFrameBuffer(texture);\n const result = downloadAndDecode();\n this.unbindTextureToFrameBuffer();\n return result;\n }\n setOutputMatrixTextureDriver(outputMatrixTextureMaybePacked, width, height) {\n this.throwIfDisposed();\n const gl = this.gl;\n webgl_util.bindColorTextureToFramebuffer(gl, outputMatrixTextureMaybePacked, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(gl);\n }\n this.outputTexture = outputMatrixTextureMaybePacked;\n webgl_util.callAndCheck(gl, () => gl.viewport(0, 0, width, height));\n webgl_util.callAndCheck(gl, () => gl.scissor(0, 0, width, height));\n }\n setOutputMatrixWriteRegionDriver(x, y, width, height) {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.scissor(x, y, width, height));\n }\n throwIfDisposed() {\n if (this.disposed) {\n throw new Error('Attempted to use disposed GPGPUContext.');\n }\n }\n throwIfNoProgram() {\n if (this.program == null) {\n throw new Error('No GPU program is currently set.');\n }\n }\n}\n/**\n * Finds the index of the last true element using linear search.\n * Note: We can't do binary search because Chrome expects us to explicitly\n * test all fences before download:\n * https://github.com/tensorflow/tfjs/issues/1145\n */\nexport function linearSearchLastTrue(arr) {\n let i = 0;\n for (; i < arr.length; ++i) {\n const isDone = arr[i]();\n if (!isDone) {\n break;\n }\n }\n return i - 1;\n}\n//# sourceMappingURL=gpgpu_context.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport * as shader_compiler from './shader_compiler';\nexport function compileProgram(gpgpu, program, inputs, output) {\n const userCode = program.userCode;\n const inputInfos = inputs.map((input, i) => {\n const shapeInfo = {\n logicalShape: input.shape,\n texShape: input.isUniform ? null : input.texData.texShape,\n isUniform: input.isUniform,\n isPacked: input.isUniform ? false : input.texData.isPacked,\n flatOffset: null\n };\n if (input.texData != null && input.texData.slice != null &&\n input.texData.slice.flatOffset > 0) {\n shapeInfo.flatOffset = input.texData.slice.flatOffset;\n }\n return { name: program.variableNames[i], shapeInfo };\n });\n const inShapeInfos = inputInfos.map(x => x.shapeInfo);\n const outShapeInfo = {\n logicalShape: output.shape,\n texShape: output.texData.texShape,\n isUniform: false,\n isPacked: output.texData.isPacked,\n flatOffset: null\n };\n const source = shader_compiler.makeShader(inputInfos, outShapeInfo, userCode, program.packedInputs);\n const webGLProgram = gpgpu.createProgram(source);\n // Add special uniforms (NAN, INFINITY)\n let infLoc = null;\n const nanLoc = gpgpu.getUniformLocation(webGLProgram, 'NAN', false);\n if (env().getNumber('WEBGL_VERSION') === 1) {\n infLoc = gpgpu.getUniformLocation(webGLProgram, 'INFINITY', false);\n }\n // Add user-defined uniforms\n const uniformLocations = {};\n for (let i = 0; i < program.variableNames.length; i++) {\n const varName = program.variableNames[i];\n const shouldThrow = false;\n uniformLocations[varName] =\n gpgpu.getUniformLocation(webGLProgram, varName, shouldThrow);\n uniformLocations[`offset${varName}`] =\n gpgpu.getUniformLocation(webGLProgram, `offset${varName}`, shouldThrow);\n }\n return {\n program,\n source,\n webGLProgram,\n uniformLocations,\n inShapeInfos,\n outShapeInfo,\n infLoc,\n nanLoc,\n };\n}\nfunction validateBinaryAndProgram(shapeInfos, inputs) {\n if (shapeInfos.length !== inputs.length) {\n throw Error(`Binary was compiled with ${shapeInfos.length} inputs, but ` +\n `was executed with ${inputs.length} inputs`);\n }\n shapeInfos.forEach((s, i) => {\n const shapeA = s.logicalShape;\n const input = inputs[i];\n const shapeB = input.shape;\n if (!util.arraysEqual(shapeA, shapeB)) {\n throw Error(`Binary was compiled with different shapes than ` +\n `the current args. Shapes ${shapeA} and ${shapeB} must match`);\n }\n // The input is uploaded as uniform.\n if (s.isUniform && input.isUniform) {\n return;\n }\n const texShapeA = s.texShape;\n const texShapeB = input.isUniform ? null : input.texData.texShape;\n if (!util.arraysEqual(texShapeA, texShapeB)) {\n throw Error(`Binary was compiled with different texture shapes than the` +\n ` current args. Shape ${texShapeA} and ${texShapeB} must match`);\n }\n });\n}\nexport function runProgram(gpgpu, binary, inputs, output, customSetup) {\n validateBinaryAndProgram(binary.inShapeInfos, inputs);\n validateBinaryAndProgram([binary.outShapeInfo], [output]);\n const outTex = output.texData.texture;\n const outTexShape = output.texData.texShape;\n if (output.texData.isPacked) {\n gpgpu.setOutputPackedMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n else {\n gpgpu.setOutputMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n gpgpu.setProgram(binary.webGLProgram);\n // Set special uniforms (NAN, INFINITY)\n if (env().getNumber('WEBGL_VERSION') === 1) {\n if (binary.infLoc !== null) {\n gpgpu.gl.uniform1f(binary.infLoc, Infinity);\n }\n }\n if (binary.nanLoc !== null) {\n gpgpu.gl.uniform1f(binary.nanLoc, NaN);\n }\n // Set user-defined inputs\n inputs.forEach((input, i) => {\n const varName = binary.program.variableNames[i];\n const varLoc = binary.uniformLocations[varName];\n const varOffsetLoc = binary.uniformLocations[`offset${varName}`];\n if (varLoc == null) {\n // The compiler inferred that this variable is not used in this shader.\n return;\n }\n if (input.isUniform) {\n // Upload the values of the tensor as uniform.\n if (util.sizeFromShape(input.shape) < 2) {\n gpgpu.gl.uniform1f(varLoc, input.uniformValues[0]);\n }\n else {\n let vals = input.uniformValues;\n if (!(vals instanceof Float32Array)) {\n vals = new Float32Array(vals);\n }\n gpgpu.gl.uniform1fv(varLoc, vals);\n }\n return;\n }\n // If the input was sliced, upload the flat offset index.\n if (input.texData.slice != null && varOffsetLoc != null) {\n gpgpu.gl.uniform1i(varOffsetLoc, input.texData.slice.flatOffset);\n }\n gpgpu.setInputMatrixTexture(input.texData.texture, varLoc, i);\n });\n if (customSetup != null) {\n customSetup(gpgpu, binary.webGLProgram);\n }\n gpgpu.executeProgram();\n}\nexport function makeShaderKey(program, inputs, output) {\n let keyInputs = '';\n inputs.concat(output).forEach(x => {\n const hasOffset = x.texData != null && x.texData.slice != null &&\n x.texData.slice.flatOffset > 0;\n const texShape = x.isUniform ? 'uniform' : x.texData.texShape;\n keyInputs += `${x.shape}_${texShape}_${hasOffset}`;\n });\n const keyUserCode = program.userCode;\n let key = program.constructor.name;\n // Fast string concat. See https://jsperf.com/string-concatenation/14.\n key += '_' + keyInputs + '_' + keyUserCode;\n return key;\n}\n//# sourceMappingURL=gpgpu_math.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nexport class Im2ColPackedProgram {\n constructor(outputShape, inputShape, convInfo) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const { filterWidth, inChannels, strideWidth, strideHeight, padInfo, outWidth, dilationWidth, dilationHeight, dataFormat } = convInfo;\n const { left, top } = padInfo;\n const itemsPerBlockRow = inChannels * filterWidth;\n const glsl = getGlslDifferences();\n const isChannelsLast = dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 0 : 1;\n const colDim = isChannelsLast ? 1 : 2;\n let unrolled = ``;\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n unrolled += `\n blockIndex = rc.y + ${col};\n pos = rc.x + ${row};\n\n if(blockIndex < ${outputShape[1]} && pos < ${outputShape[0]}) {\n offsetY = int(blockIndex / (${outWidth})) * ${strideHeight} - ${top};\n d0 = offsetY + ${dilationHeight} * (pos / ${itemsPerBlockRow});\n\n if(d0 < ${inputShape[rowDim]} && d0 >= 0) {\n\n offsetX = int(mod(float(blockIndex), ${outWidth}.) * ${strideWidth}. - ${left}.);\n d1 = offsetX + ${dilationWidth} * (int(mod(float(pos), ${itemsPerBlockRow}.) / ${inChannels}.));\n\n if(d1 < ${inputShape[colDim]} && d1 >= 0) {\n\n ch = int(mod(float(pos), ${inChannels}.));\n\n if (${isChannelsLast}) {\n innerDims = vec2(d1, ch);\n result[${row * 2 + col}] = getChannel(\n getA(d0, int(innerDims.x),\n int(innerDims.y)), innerDims);\n } else {\n innerDims = vec2(d0, d1);\n result[${row * 2 + col}] = getChannel(\n getA(ch, int(innerDims.x),\n int(innerDims.y)), innerDims);\n }\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n void main() {\n ivec2 rc = getOutputCoords();\n\n vec4 result = vec4(0);\n\n int blockIndex, pos, offsetY, d0, offsetX, d1, ch;\n vec2 innerDims;\n\n ${unrolled}\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=im2col_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n int d = coords[3];\n float x = getX(b, r, c, d);\n float sum = 0.0;\n for (int j = -${rad}; j <= ${rad}; j++) {\n int idx = d + j;\n if (idx >= 0 && idx <= ${maxD}) {\n float z = getX(b, r, c, idx);\n sum += z * z;\n }\n }\n float val = x * ${powOperator};\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNGradProgram {\n constructor(inputShape, depthRadius, bias, alpha, beta) {\n this.variableNames = ['inputImage', 'outputImage', 'dy'];\n this.outputShape = [];\n this.outputShape = inputShape;\n this.depth = inputShape[3];\n this.depthRadius = depthRadius;\n this.bias = bias;\n this.alpha = alpha;\n this.beta = beta;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n\n float result = 0.0;\n for (int d = 0; d < ${this.depth}; ++d) {\n int depthBegin = int(max(0.0, float(d - ${depthRadius})));\n int depthEnd = int(min(float(${this.depth}),\n float(d + ${depthRadius} + 1)));\n\n const int MIN_DEPTH_BEGIN = 0;\n const int MAX_DEPTH_END = ${this.depth};\n\n float norm = 0.0;\n for (int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k) {\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd) {\n norm += getInputImage(b, r, c, k) * getInputImage(b, r, c, k);\n }\n else {\n break;\n }\n }\n\n norm = float(${alpha}) * norm + float(${bias});\n\n for(int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k){\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd){\n float dyi = -2.0 * float(${alpha})\n * float(${beta})\n * getInputImage(b ,r ,c, k) * getOutputImage(b, r, c, d)\n / norm;\n if (k == d) {\n dyi += pow(norm, -1.0 * ${beta});\n }\n if (k == coords[3]) {\n dyi *= getDy(b, r, c, d);\n result += dyi;\n }\n }\n else {\n break;\n }\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_grad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNPackedProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords.x;\n int r = coords.y;\n int c = coords.z;\n int d = coords.w;\n\n bool hasNextCol = d < ${this.outputShape[3]};\n bool hasNextRow = c < ${this.outputShape[2]};\n\n vec4 sum = vec4(0.);\n vec4 xFragAtOutputCoords = getX(b, r, c, d);\n\n vec4 xAtOutputCoords = vec4(\n getChannel(xFragAtOutputCoords, vec2(c, d)),\n hasNextCol ?\n getChannel(xFragAtOutputCoords, vec2(c, d + 1)) : 0.0,\n hasNextRow ?\n getChannel(xFragAtOutputCoords , vec2(c + 1, d)) : 0.0,\n (hasNextRow && hasNextCol) ?\n getChannel(xFragAtOutputCoords, vec2(c + 1, d + 1)) : 0.0\n );\n\n int firstChannel = d - ${rad};\n vec2 cache = vec2(0.);\n if(firstChannel >= 0){\n vec4 firstChannelFrag = getX(b, r, c, firstChannel);\n cache.x = getChannel(firstChannelFrag, vec2(c, firstChannel));\n if(hasNextRow){\n cache.y = getChannel(firstChannelFrag, vec2(c + 1, firstChannel));\n }\n }\n\n ivec2 depth = ivec2(d, d + 1);\n for (int j = - ${rad}; j <= ${rad}; j++) {\n ivec2 idx = depth + j;\n bvec2 aboveLowerBound = greaterThanEqual(idx, ivec2(0));\n bvec2 belowUpperBound = lessThanEqual(idx, ivec2(${maxD}));\n\n bool depthInRange = aboveLowerBound.x && belowUpperBound.x;\n bool depthPlusOneInRange = aboveLowerBound.y && belowUpperBound.y;\n\n if(depthInRange || depthPlusOneInRange){\n vec4 z = vec4(0.);\n vec4 xFragAtCurrentDepth;\n z.xz = cache.xy;\n if(depthPlusOneInRange && hasNextCol){\n xFragAtCurrentDepth = idx.y != d ?\n getX(b, r, c, idx.y) : xFragAtOutputCoords;\n z.y = getChannel(xFragAtCurrentDepth, vec2(c, idx.y));\n if(hasNextRow){\n z.w = getChannel(xFragAtCurrentDepth, vec2(c + 1, idx.y));\n }\n }\n cache.xy = z.yw;\n sum += z * z;\n }\n }\n vec4 result = xAtOutputCoords * ${powOperator};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MaxPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n int maxPosValue = ${lastIndex} - int(getMaxPos(b, idyR, idyC, d));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue = wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class MaxPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterDepth * effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, ch) with pos mask(:, :, :, d) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n int maxPosValue = ${lastIndex} -\n int(getMaxPos(batch, idyD, idyR, idyC, ch));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue =\n wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=max_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MatMulPackedProgram {\n constructor(aShape, bShape, outputShape, transposeA = false, transposeB = false, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['matrixA', 'matrixB'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const sharedDim = transposeA ? aShape[1] : aShape[2];\n const sharedDimensionPacked = Math.ceil(sharedDim / 2);\n const aSample = transposeA ? 'i * 2, rc.y' : 'rc.y, i * 2';\n const bSample = transposeB ? 'rc.z, i * 2' : 'i * 2, rc.z';\n const aSwizzle = transposeA ? ['a.xxyy', 'a.zzww'] : ['a.xxzz', 'a.yyww'];\n const bSwizzle = transposeB ? ['b.xzxz', 'b.ywyw'] : ['b.xyxy', 'b.zwzw'];\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n let batchASnippet = 'rc.x';\n let batchBSnippet = 'rc.x';\n if (aShape[0] < bShape[0]) {\n batchASnippet = `int(min(float(rc.x), ${aShape[0] - 1}.))`;\n }\n else if (bShape[0] < aShape[0]) {\n batchBSnippet = `int(min(float(rc.x), ${bShape[0] - 1}.))`;\n }\n this.userCode = `\n ${activationSnippet}\n\n const float sharedDimension = ${sharedDimensionPacked}.0;\n\n vec4 dot2x2ARowBCol(ivec3 rc) {\n vec4 result = vec4(0);\n for (int i = 0; i < ${sharedDimensionPacked}; i++) {\n int batchA = ${batchASnippet};\n int batchB = ${batchBSnippet};\n vec4 a = getMatrixA(batchA, ${aSample});\n vec4 b = getMatrixB(batchB, ${bSample});\n\n // These swizzled products need to be separately added.\n // See: https://github.com/tensorflow/tfjs/issues/1735\n result += (${aSwizzle[0]} * ${bSwizzle[0]});\n result += (${aSwizzle[1]} * ${bSwizzle[1]});\n }\n return result;\n }\n\n void main() {\n ivec3 rc = getOutputCoords();\n vec4 result = dot2x2ARowBCol(rc);\n\n ${addBiasSnippet}\n\n ${applyActivationSnippet}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mulmat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MultinomialProgram {\n constructor(batchSize, numOutcomes, numSamples) {\n this.variableNames = ['probs'];\n this.outputShape = [batchSize, numSamples];\n this.userCode = `\n uniform float seed;\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n\n float r = random(seed);\n float cdf = 0.0;\n\n for (int i = 0; i < ${numOutcomes - 1}; i++) {\n cdf += getProbs(batch, i);\n\n if (r < cdf) {\n setOutput(float(i));\n return;\n }\n }\n\n // If no other event happened, last event happened.\n setOutput(float(${numOutcomes - 1}));\n }\n `;\n }\n getCustomSetupFunc(seed) {\n return (gpgpu, webGLProgram) => {\n if (this.seedLoc == null) {\n this.seedLoc = gpgpu.getUniformLocation(webGLProgram, 'seed');\n }\n gpgpu.gl.uniform1f(this.seedLoc, seed);\n };\n }\n}\n//# sourceMappingURL=multinomial_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class OneHotProgram {\n constructor(numIndices, depth, onValue, offValue) {\n this.variableNames = ['indices'];\n this.outputShape = [numIndices, depth];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int index = round(getIndices(coords.x));\n setOutput(mix(float(${offValue}), float(${onValue}),\n float(index == coords.y)));\n }\n `;\n }\n}\n//# sourceMappingURL=onehot_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n // Only input / output 3D tensors.\n this.outputShape = outputShape;\n const rank = outputShape.length;\n if (rank === 0) {\n this.userCode = `\n void main() {\n setOutput(vec4(getA(), 0., 0., 0.));\n }\n `;\n }\n else {\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const outOfBoundsCondition = getOutOfBoundsCondition(rank, outputShape, channels);\n const setup = getSetup(rank, outputShape[outputShape.length - 1], outputShape[outputShape.length - 2], channels);\n const output = getOutput(outputShape, channels);\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n\n if(${outOfBoundsCondition}) {\n setOutput(vec4(0));\n } else {\n ${setup}\n\n setOutput(vec4(${output}));\n }\n }\n `;\n }\n }\n}\nfunction getSourceCoordsArr(rank, dims) {\n const coords = [];\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n let coord = `${row === 0 ? 'r' : 'rp1'}, ${col === 0 ? 'c' : 'cp1'}`;\n for (let d = 2; d < rank; d++) {\n coord = `${dims[dims.length - 1 - d]},` + coord;\n }\n coords.push(coord);\n }\n }\n return coords;\n}\nfunction getOutOfBoundsCondition(rank, shape, dims) {\n if (rank === 1) {\n return `rc > ${shape[0]}`;\n }\n let cond = '';\n for (let i = rank - 2; i < rank; i++) {\n cond += `${dims[i]} >= ${shape[i]}`;\n if (i < rank - 1) {\n cond += '||';\n }\n }\n return cond;\n}\nfunction getSetup(rank, cols, rows, dims) {\n if (rank === 1) {\n return '';\n }\n const innerDims = dims.slice(-2);\n return `\n int r = ${innerDims[0]};\n int c = ${innerDims[1]};\n int rp1 = r + 1;\n int cp1 = c + 1;\n\n bool cEdge = cp1 >= ${cols};\n bool rEdge = rp1 >= ${rows};\n `;\n}\nfunction getOutput(shape, dims) {\n const rank = shape.length;\n const sourceCoords = getSourceCoordsArr(rank, dims);\n if (rank === 1) {\n return `getA(rc),\n rc + 1 >= ${shape[0]} ? 0. : getA(rc + 1),\n 0, 0`;\n }\n return `getA(${sourceCoords[0]}),\n cEdge ? 0. : getA(${sourceCoords[1]}),\n rEdge ? 0. : getA(${sourceCoords[2]}),\n rEdge || cEdge ? 0. : getA(${sourceCoords[3]})`;\n}\n//# sourceMappingURL=pack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const type = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start || outC >= end) {\n setOutput(float(${constantValue}));\n } else {\n setOutput(getX(outC - start));\n }\n }\n `;\n return;\n }\n this.userCode = `\n ${type} start = ${type}(${start});\n ${type} end = ${type}(${end});\n\n void main() {\n ${type} outC = getOutputCoords();\n if (any(lessThan(outC, start)) || any(greaterThanEqual(outC, end))) {\n setOutput(float(${constantValue}));\n } else {\n ${type} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadPackedProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const componentSetup = [\n `${dtype} rc = outputLoc;`, `${coords[rank - 1]} += 1;\n if(${cLimit}) {\n `,\n rank === 1 ? '' : `}\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {`,\n rank === 1 ? '' : ` ${coords[rank - 1]} += 1;\n if(${cLimit}) {`\n ];\n const paddingArea = rank === 1 ?\n 'rc < start || rc >= end' :\n 'any(lessThan(rc, start)) || any(greaterThanEqual(rc, end))';\n let mainLoop = '';\n for (let i = 0, j = rank === 1 ? 2 : 4; i < j; i++) {\n mainLoop += `\n ${componentSetup[i]}\n if (${paddingArea}) {\n result[${i}] = float(${constantValue});\n } else {\n ${dtype} source = rc - start;\n result[${i}] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n mainLoop += (rank === 1 ? `} ` : `}}`);\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Pool2DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n const batchFlattenPositionStr = `((batch * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n const flattenPositionStr = `(xR * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n float avgValue = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xR, xC, d);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ? (includeBatchInIndex ? batchFlattenPositionStr :\n flattenPositionStr) :\n `wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xR, int xC, int d) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xR, xC, d);\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n getValue(batch, xR, xC + 3 * ${dilationWidth}, d)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\nexport class Pool3DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, ch) to get y(yD, yR, yC, ch).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xD, xR, xC, ch);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ?\n (includeBatchInIndex ?\n `(((batch * ${convInfo.inDepth} + xD) * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch` :\n `((xD * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch`) :\n `wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xD, int xR, int xC, int ch) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xD, xR, xC, ch);\n }\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, d) to get y(yD, yR, yC, ch).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 3 * ${dilationWidth}, ch)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pool_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ReduceProgram {\n constructor(reduceInfo, reduceType) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n let initializationValue = '0.0';\n let compareOp = ``;\n if (reduceType === 'prod') {\n initializationValue = '1.0';\n }\n else if (reduceType === 'min') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '1.0 / 1e-20';\n compareOp = `min`;\n }\n else if (reduceType === 'max') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n compareOp = `max`;\n }\n let returnValue = `${reduceType}(${reduceType}(${reduceType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (reduceType === 'sum') {\n returnValue = `sumValue`;\n }\n else if (reduceType === 'prod') {\n returnValue = `prodValue`;\n }\n else if (reduceType === 'all') {\n returnValue = `allValue`;\n }\n else if (reduceType === 'any') {\n returnValue = `anyValue`;\n }\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `\n if (${reduceType === 'sum'}) {\n sumValue += dot(values, ones);\n } else if (${reduceType === 'prod'}) {\n vec2 tmp = vec2(values[0], values[1]) * vec2(values[2], values[3]);\n prodValue *= tmp[0] * tmp[1];\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n let vecType = `vec4`;\n if (reduceType === 'all') {\n initializationValue = '1.0';\n updateSnippet = `\n bool reducedAllValue = all(values);\n float floatedReducedAllValue = float(reducedAllValue);\n allValue = float(allValue >= 1.0 && floatedReducedAllValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n else if (reduceType === 'any') {\n initializationValue = '0.0';\n updateSnippet = `\n bool reducedAnyValue = any(values);\n float floatedReducedAnyValue = float(reducedAnyValue);\n anyValue = float(anyValue >= 1.0 || floatedReducedAnyValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n vec4 minMaxValue = vec4(${initializationValue});\n float prodValue = 1.0;\n float sumValue = 0.0;\n float allValue = 1.0;\n float anyValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=reduce_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as shader_util from './shader_compiler_util';\nexport class ReshapePackedProgram {\n constructor(outputShape, inputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n let mainLoop = ``;\n for (let i = 0; i < 4; i++) {\n let thisRC = `thisRC = rc;`;\n if (i % 2 === 1) {\n thisRC += `thisRC.z += 1;`;\n }\n if (i > 1) {\n thisRC += `thisRC.y += 1;`;\n }\n mainLoop += `\n ${thisRC}\n ${i > 0 ? `if(thisRC.y < rows && thisRC.z < cols){` : ''}\n int flatIndex = getFlatIndex(thisRC);\n\n ivec3 inputRC = inputCoordsFromReshapedOutCoords(flatIndex);\n vec2 inputRCInnerDims = vec2(float(inputRC.y),float(inputRC.z));\n\n result[${i}] =\n getChannel(getA(inputRC.x, inputRC.y, inputRC.z), inputRCInnerDims);\n ${i > 0 ? '}' : ''}\n `;\n }\n this.userCode = `\n ${getReshapedInputCoords(inputShape)}\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 rc = getOutputCoords();\n\n vec4 result = vec4(0.);\n\n ivec3 thisRC;\n int rows = ${outputShape[1]};\n int cols = ${outputShape[2]};\n\n ${mainLoop}\n\n setOutput(result);\n }\n `;\n }\n}\nfunction getReshapedInputCoords(shape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 inputCoordsFromReshapedOutCoords(int index) {\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\n//# sourceMappingURL=reshape_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(startRLerp - float(winHeight / 2));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(startCLerp - float(winWidth / 2));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float dxR = float(dyR) * heightScale;\n int topDxRIndex = int(floor(dxR));\n int bottomDxRIndex = int(min(ceil(dxR), ${xHeight - 1}.0));\n float dxRLerp = dxR - float(topDxRIndex);\n float inverseDxRLerp = 1.0 - dxRLerp;\n\n float dxC = float(dyC) * widthScale;\n int leftDxCIndex = int(floor(dxC));\n int rightDxCIndex = int(min(ceil(dxC), ${xWidth - 1}.0));\n float dxCLerp = dxC - float(leftDxCIndex);\n float inverseDxCLerp = 1.0 - dxCLerp;\n\n if (r == topDxRIndex && c == leftDxCIndex) {\n // topLeft\n accumulator +=\n getDy(b, dyR, dyC, d) * inverseDxRLerp * inverseDxCLerp;\n }\n\n if (r == topDxRIndex && c == rightDxCIndex) {\n // topRight\n accumulator += getDy(b, dyR, dyC, d) * inverseDxRLerp * dxCLerp;\n }\n\n if (r == bottomDxRIndex && c == leftDxCIndex) {\n // bottomLeft\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * inverseDxCLerp;\n }\n\n if (r == bottomDxRIndex && c == rightDxCIndex) {\n // bottomRight\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * dxCLerp;\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec2 sourceFloorRC = ivec2(sourceFracIndexRC);\n ivec2 sourceCeilRC = ivec2(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n float topLeft = getA(b, sourceFloorRC.x, sourceFloorRC.y, d);\n float bottomLeft = getA(b, sourceCeilRC.x, sourceFloorRC.y, d);\n float topRight = getA(b, sourceFloorRC.x, sourceCeilRC.y, d);\n float bottomRight = getA(b, sourceCeilRC.x, sourceCeilRC.y, d);\n\n vec2 fracRC = sourceFracIndexRC - vec2(sourceFloorRC);\n\n float top = topLeft + (topRight - topLeft) * fracRC.y;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracRC.y;\n float newValue = top + (bottom - top) * fracRC.x;\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearPackedProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec3 effectiveInputOverOutputRatioRC = vec3(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec3 inputShapeRC = vec3(${oldHeight}.0, ${oldWidth}.0,\n ${oldWidth}.0);\n\n float getAValue(int b, int r, int c, int d) {\n return getChannel(getA(b, r, c, d), vec2(c, d));\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n // Calculate values for next column in yRC.z.\n ivec3 yRC = coords.yzz + ivec3(0, 0, 1);\n\n // Fractional source index.\n vec3 sourceFracIndexRC = vec3(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec3 sourceFloorRC = ivec3(sourceFracIndexRC);\n ivec3 sourceCeilRC = ivec3(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n // Should we calculate next column and row elements in 2x2 packed cell.\n bool hasNextCol = d < ${depth - 1};\n bool hasNextRow = coords.z < ${newWidth - 1};\n\n // In parallel, construct four corners for all four components in\n // packed 2x2 cell.\n vec4 topLeft = vec4(\n getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 bottomLeft = vec4(\n getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 topRight = vec4(\n getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec4 bottomRight = vec4(\n getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec3 fracRC = sourceFracIndexRC - vec3(sourceFloorRC);\n\n vec4 top = mix(topLeft, topRight, fracRC.yyzz);\n vec4 bottom = mix(bottomLeft, bottomRight, fracRC.yyzz);\n vec4 newValue = mix(top, bottom, fracRC.x);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeigborBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(floor(startRLerp - float(winHeight / 2)));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(floor(startCLerp - float(winWidth / 2)));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float sourceFracRow =\n float(${effectiveXSize[0]}) *\n (float(dyR) / float(${effectiveYSize[0]}));\n\n float sourceFracCol =\n float(${effectiveXSize[1]}) *\n (float(dyC) / float(${effectiveYSize[1]}));\n\n int sourceNearestRow = int(min(\n float(int(${xHeight}) - 1),\n ${alignCorners} ? float(round(sourceFracRow)) :\n float(floor(sourceFracRow))));\n\n int sourceNearestCol = int(min(\n float(int(${xWidth}) - 1),\n ${alignCorners} ? float(round(sourceFracCol)) :\n float(floor(sourceFracCol))));\n\n if (r == sourceNearestRow && c == sourceNearestCol) {\n accumulator += getDy(b, dyR, dyC, d);\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeighborProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n // When align corners is false, we rounds the value with floor.\n const roundBase = alignCorners ? '0.5' : '0.0';\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestRC = ivec2(\n min(inputShapeRC - 1.0, floor(sourceFracIndexRC + ${roundBase})));\n\n float newValue = getA(b, sourceNearestRC.x, sourceNearestRC.y, d);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReverseProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n if (rank === 1) {\n this.userCode = `\n void main() {\n int coord = getOutputCoords();\n setOutput(getX(${xShape[0]} - coord - 1));\n }\n `;\n return;\n }\n const getInCoord = (i) => {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - coords[${i}] - 1`;\n }\n return `coords[${i}]`;\n };\n const inCoords = xShape.map((_, i) => getInCoord(i)).join(',');\n const type = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${type} coords = getOutputCoords();\n setOutput(getX(${inCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=reverse_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReversePackedProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n const channels = getChannels('rc', rank);\n const nextColumn = `${channels[rank - 1]} + 1 < ${this.outputShape[rank - 1]}`;\n const nextRow = `${channels[rank - 2]} + 1 < ${this.outputShape[rank - 2]}`;\n const type = getCoordsDataType(rank);\n if (rank === 1) {\n this.userCode = `\n void main(){\n int rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = getChannel(getX(${xShape[0]} - rc - 1),\n ${xShape[0]} - rc - 1);\n if(${nextColumn}){\n result.g = getChannel(getX(${xShape[0]} - (rc + 1) - 1),\n ${xShape[0]} - (rc + 1) - 1);\n }\n setOutput(result);\n }\n `;\n }\n else {\n this.userCode = `\n void main() {\n ${type} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = ${getR(channels.slice())};\n if(${nextColumn}){\n result.g = ${getG(channels.slice())};\n }\n if(${nextRow}) {\n result.b = ${getB(channels.slice())};\n if(${nextColumn}) {\n result.a = ${getA(channels.slice())};\n }\n }\n setOutput(result);\n }\n `;\n }\n function getR(channels) {\n return getChannel(channels);\n }\n function getG(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n return getChannel(channels);\n }\n function getB(channels) {\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getA(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getChannel(channels) {\n const inCoordsArray = xShape.map((_, i) => getInCoord(i, channels));\n const inCoords = inCoordsArray.join(',');\n const innerDims = inCoordsArray.slice(-2).join(',');\n return `getChannel(getX(${inCoords}), vec2(${innerDims}))`;\n }\n function getInCoord(i, channels1) {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - ${channels1[i]} - 1`;\n }\n else {\n return `${channels1[i]}`;\n }\n }\n }\n}\n//# sourceMappingURL=reverse_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ScatterProgram {\n constructor(updateSize, sliceDim, indicesRank, updatesRank, strides, shape, summingDupeIndex = true) {\n this.variableNames = ['updates', 'indices', 'defaultValue'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n let indicesString = '';\n if (indicesRank === 1) {\n indicesString = 'i';\n }\n else if (indicesRank === 2) {\n indicesString = 'i, j';\n }\n const indicesSnippet = `getIndices(${indicesString})`;\n let updatesString = '';\n if (updatesRank === 1) {\n updatesString = 'i';\n }\n else if (updatesRank === 2) {\n updatesString = 'i, coords[1]';\n }\n const updatesSnippet = `getUpdates(${updatesString})`;\n const strideString = sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n float sum = 0.0;\n bool found = false;\n for (int i = 0; i < ${updateSize}; i++) {\n int flattenedIndex = 0;\n for (int j = 0; j < ${sliceDim}; j++) {\n int index = round(${indicesSnippet});\n flattenedIndex += index * ${strideString};\n }\n if (flattenedIndex == coords[0]) {\n sum += ${updatesSnippet};\n found = true;\n }\n }\n setOutput(mix(getDefaultValue(), sum, float(found)));\n }\n `;\n }\n}\n//# sourceMappingURL=scatter_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class SegmentOpProgram {\n constructor(segOpInfo, segOpType) {\n this.variableNames = ['x', 'segmentIds'];\n const windowSize = segOpInfo.windowSize;\n const batchSize = segOpInfo.batchSize;\n const inSize = segOpInfo.inSize;\n const numSegments = segOpInfo.numSegments;\n const outSize = numSegments * Math.ceil(inSize / windowSize);\n this.outputShape = [batchSize, outSize];\n const initializationValue = '0.0';\n const returnValue = `sumValue`;\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n const updateSnippet = `\n sumValue += dot(values, segFilter);\n `;\n let checkValueOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkValueOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n let checkSegmentIdOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkSegmentIdOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return -1.0;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n\n float getValue(int batch, int inIdx) {\n ${checkValueOutOfBounds}\n return getX(batch, inIdx);\n }\n\n float getSegmentIdAtIndex(int inIdx) {\n ${checkSegmentIdOutOfBounds}\n return getSegmentIds(inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = int(floor(float(outIdx) / float(\n ${numSegments})) * float(${windowSize}));\n int currentSeg = int(mod(float(outIdx), float(${numSegments})));\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 3)) == currentSeg ? 1 : 0\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n int inIdxSeg = int(getSegmentIdAtIndex(inIdx));\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n 0\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=segment_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SelectProgram {\n constructor(cRank, shape, rank) {\n this.variableNames = ['c', 'a', 'b'];\n this.outputShape = shape;\n let cCoords;\n let abCoords;\n if (rank > 4) {\n throw Error(`Where for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n abCoords = `resRC`;\n cCoords = `resRC`;\n }\n else {\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const cCoordVars = [];\n const abCoordVars = [];\n for (let i = 0; i < shape.length; i++) {\n abCoordVars.push(`${currentCoords[i]}`);\n if (i < cRank) {\n cCoordVars.push(`${currentCoords[i]}`);\n }\n }\n cCoords = cCoordVars.join();\n abCoords = abCoordVars.join();\n }\n const dtype = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n float cVal = getC(${cCoords});\n if (cVal >= 1.0) {\n setOutput(getA(${abCoords}));\n } else {\n setOutput(getB(${abCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=select_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SliceProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const uniformPart = `uniform int start[${this.rank}];`;\n const sourceCoords = getCoords(this.rank);\n let body;\n const coordSum = destSize.map((_, i) => {\n return `sourceLoc.${coords[i]} = start[${i}] + coords.${coords[i]};`;\n });\n body = `\n ${dtype} sourceLoc;\n ${dtype} coords = getOutputCoords();\n ${coordSum.join('\\n')}\n `;\n this.userCode = `\n ${uniformPart}\n void main() {\n ${body}\n setOutput(getSource(${sourceCoords}));\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\nconst coords = ['x', 'y', 'z', 'w', 'u', 'v'];\nfunction getCoords(rank) {\n if (rank === 1) {\n return 'sourceLoc';\n }\n else if (rank <= 6) {\n return coords.slice(0, rank).map(x => 'sourceLoc.' + x).join(',');\n }\n else {\n throw Error(`Slicing for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=slice_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class SlicePackedProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const coords = getChannels('coords', this.rank);\n const sourceLoc = getChannels('sourceLoc', this.rank);\n const innerDims = this.rank === 1 ? 'sourceLoc' : `vec2(${sourceLoc.slice(-2).join()})`;\n const getChannel = `getChannel(getSource(${sourceLoc.join()}), ${innerDims})`;\n const upperRow = `\n result.x = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.y = ${getChannel};\n --${sourceLoc[this.rank - 1]};\n }\n `;\n const lowerRow = this.rank === 1 ? '' : `\n --${coords[this.rank - 1]};\n if (++${coords[this.rank - 2]} < ${destSize[this.rank - 2]}) {\n ++${sourceLoc[this.rank - 2]};\n result.z = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.w = ${getChannel};\n }\n }\n `;\n const sourceLocSetup = this.rank <= 4 ?\n `sourceLoc = coords +\n ${dtype}(${destSize.map((_, i) => `start[${i}]`).join()});` :\n destSize.map((_, i) => `${sourceLoc[i]} = ${coords[i]} + start[${i}];`)\n .join('\\n');\n this.userCode = `\n uniform int start[${this.rank}];\n void main() {\n ${dtype} coords = getOutputCoords();\n ${dtype} sourceLoc;\n ${sourceLocSetup}\n vec4 result = vec4(0.);\n ${upperRow}\n ${lowerRow}\n setOutput(result);\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\n//# sourceMappingURL=slice_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class StridedSliceProgram {\n constructor(begin, strides, size) {\n this.variableNames = ['x'];\n this.outputShape = size;\n const rank = size.length;\n const inputDtype = getCoordsDataType(size.length);\n const dtype = getCoordsDataType(size.length);\n let newCoords = '';\n if (rank === 1) {\n newCoords = 'coords * strides + begin';\n }\n else {\n let outputAxis = 0;\n newCoords =\n size.map((_, i) => {\n outputAxis++;\n return size.length === 1 ?\n `coords * strides[${i}] + begin[${i}]` :\n `coords[${outputAxis - 1}] * strides[${i}] + begin[${i}]`;\n })\n .join(',');\n }\n this.userCode = `\n ${inputDtype} begin = ${inputDtype}(${begin});\n ${inputDtype} strides = ${inputDtype}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n setOutput(getX(${newCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=strided_slice_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { getInternalFormatForFloat16MatrixTexture, getInternalFormatForFloat16PackedMatrixTexture, getInternalFormatForFloat32MatrixTexture, getInternalFormatForPackedMatrixTexture, getInternalFormatForUnsignedBytesMatrixTexture } from './gpgpu_util';\nimport { getPackedMatrixTextureShapeWidthHeight, getUnpackedMatrixTextureShapeWidthHeight, PhysicalTextureType, TextureUsage } from './tex_util';\nexport class TextureManager {\n constructor(gpgpu) {\n this.gpgpu = gpgpu;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0; // How many bytes that have been allocated\n // are available for reuse.\n this.freeTextures = {};\n this.logEnabled = false;\n this.usedTextures = {};\n }\n acquireTexture(shapeRC, usage, isPacked) {\n const physicalTexType = getPhysicalFromLogicalTextureType(usage, isPacked);\n const shapeKey = getKeyFromTextureShape(shapeRC, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n if (!(shapeKey in this.usedTextures)) {\n this.usedTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shapeRC, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n if (this.freeTextures[shapeKey].length > 0) {\n this.numFreeTextures--;\n this.numUsedTextures++;\n this._numBytesFree -= texBytes;\n this.log();\n const newTexture = this.freeTextures[shapeKey].shift();\n this.usedTextures[shapeKey].push(newTexture);\n return newTexture;\n }\n let newTexture;\n if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT32) {\n newTexture = this.gpgpu.createPackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16PackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT32) {\n newTexture =\n this.gpgpu.createFloat32MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE) {\n newTexture =\n this.gpgpu.createUnsignedBytesMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n this.usedTextures[shapeKey].push(newTexture);\n this.numUsedTextures++;\n this._numBytesAllocated += texBytes;\n this.log();\n return newTexture;\n }\n releaseTexture(texture, shape, logicalTexType, isPacked) {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n const physicalTexType = getPhysicalFromLogicalTextureType(logicalTexType, isPacked);\n const shapeKey = getKeyFromTextureShape(shape, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shape, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n const deleteTexThreshold = env().get('WEBGL_DELETE_TEXTURE_THRESHOLD');\n if (deleteTexThreshold !== -1 &&\n this._numBytesAllocated > deleteTexThreshold) {\n this.gpgpu.deleteMatrixTexture(texture);\n this._numBytesAllocated -= texBytes;\n }\n else {\n this.freeTextures[shapeKey].push(texture);\n this.numFreeTextures++;\n this._numBytesFree += texBytes;\n }\n this.numUsedTextures--;\n const texList = this.usedTextures[shapeKey];\n const texIndex = texList.indexOf(texture);\n if (texIndex < 0) {\n throw new Error('Cannot release a texture that was never provided by this ' +\n 'texture manager');\n }\n texList.splice(texIndex, 1);\n this.log();\n }\n log() {\n if (!this.logEnabled) {\n return;\n }\n const total = this.numFreeTextures + this.numUsedTextures;\n console.log('Free/Used', `${this.numFreeTextures} / ${this.numUsedTextures}`, `(${total})`);\n const freeRatio = this._numBytesFree / this._numBytesAllocated;\n console.log(`Bytes allocated: ${this._numBytesAllocated}`);\n console.log(`Bytes unused: ${this._numBytesFree} (${Math.round(100 * freeRatio)}%)`);\n }\n get numBytesAllocated() {\n return this._numBytesAllocated;\n }\n get numBytesFree() {\n return this._numBytesFree;\n }\n getNumUsedTextures() {\n return this.numUsedTextures;\n }\n getNumFreeTextures() {\n return this.numFreeTextures;\n }\n dispose() {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n for (const texShape in this.freeTextures) {\n this.freeTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n for (const texShape in this.usedTextures) {\n this.usedTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n this.freeTextures = null;\n this.usedTextures = null;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0;\n }\n}\nfunction numBytesForInternalFormat(gl, internalFormat) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n if (internalFormat === glany.R32F) {\n return 4;\n }\n else if (internalFormat === glany.R16F) {\n return 2;\n }\n else if (internalFormat === glany.RGBA32F) {\n return 16;\n }\n else if (internalFormat === gl.RGBA) {\n return 16;\n }\n else if (internalFormat === glany.RGBA16F) {\n return 8;\n }\n throw new Error(`Unknown internal format ${internalFormat}`);\n}\nexport function computeBytes(shape, physicalTexType, gl, textureConfig, isPacked) {\n // It is not possible to infer packed status from the texture type because\n // depending on the textureConfig, different texture types may resolve to the\n // same internal format (e.g. in WebGL1, the internal format for\n // UNPACKED_FLOAT16 textures is gl.RGBA). Therefore we pass in `isPacked`\n // explicitly.\n const internalFormat = internalFormatForPhysicalTexType(physicalTexType, textureConfig);\n let numElements;\n if (isPacked) {\n const [packedWidth, packedHeight] = getPackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = packedWidth * packedHeight;\n }\n else {\n const [width, height] = getUnpackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = width * height;\n }\n const bytesPerElement = numBytesForInternalFormat(gl, internalFormat);\n return numElements * bytesPerElement;\n}\nfunction internalFormatForPhysicalTexType(physicalTexType, textureConfig) {\n switch (physicalTexType) {\n case PhysicalTextureType.PACKED_2X2_FLOAT32:\n return getInternalFormatForPackedMatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_2X2_FLOAT16:\n return getInternalFormatForFloat16PackedMatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT32:\n return getInternalFormatForFloat32MatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT16:\n return getInternalFormatForFloat16MatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE:\n return getInternalFormatForUnsignedBytesMatrixTexture(textureConfig);\n default:\n throw new Error(`Unknown physical texture type ${physicalTexType}`);\n }\n}\nfunction getPhysicalTextureForRendering(isPacked) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED')) {\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n return PhysicalTextureType.UNPACKED_FLOAT32;\n }\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT16;\n }\n return PhysicalTextureType.UNPACKED_FLOAT16;\n}\nfunction getPhysicalFromLogicalTextureType(logicalTexType, isPacked) {\n if (logicalTexType === TextureUsage.UPLOAD) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n else if (logicalTexType === TextureUsage.RENDER || logicalTexType == null) {\n return getPhysicalTextureForRendering(isPacked);\n }\n else if (logicalTexType === TextureUsage.DOWNLOAD ||\n logicalTexType === TextureUsage.PIXELS) {\n return PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE;\n }\n throw new Error(`Unknown logical texture type ${logicalTexType}`);\n}\nfunction getKeyFromTextureShape(shapeRowsCol, physicalTexType, isPacked) {\n return `${shapeRowsCol[0]}_${shapeRowsCol[1]}_${physicalTexType}_${isPacked}`;\n}\n//# sourceMappingURL=texture_manager.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TileProgram {\n constructor(aShape, reps) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[i] * reps[i];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape) {\n const rank = aShape.length;\n if (rank > 5) {\n throw Error(`Tile for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `imod(resRC, ${aShape[0]})`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n sourceCoords.push(`imod(${currentCoords[i]}, ${aShape[i]})`);\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=tile_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class UnaryOpProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n float unaryOperation(float x) {\n ${opSnippet}\n }\n\n void main() {\n float x = getAAtOutCoords();\n float y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\nconst CHECK_NAN_SNIPPET = `if (isnan(x)) return x;`;\nexport const LINEAR = `return x;`;\nexport const ABS = `return abs(x);`;\nexport const RELU = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : x;\n`;\nexport const RELU6 = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : min(6.0, x);\n`;\nexport const ELU = `return (x >= 0.0) ? x : (exp(x) - 1.0);`;\nexport const SELU = `\n // Stable and Attracting Fixed Point (0, 1) for Normalized Weights.\n // see: https://arxiv.org/abs/1706.02515\n float scaleAlpha = ${backend_util.SELU_SCALEALPHA};\n float scale = ${backend_util.SELU_SCALE};\n return (x >= 0.0) ? scale * x : scaleAlpha * (exp(x) - 1.0);\n`;\nexport function STEP(alpha = 0.0) {\n return CHECK_NAN_SNIPPET + `\n return x > 0.0 ? 1.0 : float(${alpha});\n `;\n}\nexport const NEG = `return -x;`;\nexport const CEIL = `return ceil(x);`;\nexport const FLOOR = `return floor(x);`;\nexport const SIGN = `\n if (isnan(x)) { return 0.0; }\n return sign(x);\n`;\nexport const IS_NAN = `return float(isnan(x));`;\nexport const IS_INF = `return float(isinf(x));`;\nexport const IS_FINITE = `return float(!isnan(x) && !isinf(x));`;\nexport const ROUND = `\n // OpenGL ES does not support round function.\n // The algorithm is based on banker's rounding.\n float base = floor(x);\n if ((x - base) < 0.5) {\n return floor(x);\n } else if ((x - base) > 0.5) {\n return ceil(x);\n } else {\n if (mod(base, 2.0) == 0.0) {\n return base;\n } else {\n return base + 1.0;\n }\n }\n`;\nexport const EXP = `return exp(x);`;\nexport const EXPM1 = `return exp(x) - 1.0;`;\nexport const LOG = `if (x < 0.0) return NAN;\n return log(x);`;\nexport const LOG1P = `return log(1.0 + x);`;\nexport const SQRT = `return sqrt(x);`;\nexport const RSQRT = `return inversesqrt(x);`;\nexport const SIGMOID = `return 1.0 / (1.0 + exp(-1.0 * x));`;\n/**\n * mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n *\n * epsilon is the difference between 1.0 and the next representable\n * float. For a single precision 32 bit float this should be 2^-23, see:\n * https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\n *\n * too_large = (x > -threshold) is value above which exp(x) may overflow\n * but softplus(x) == x is within machine epsilon\n *\n * too_small = (x < threshold) is value below which exp(x) may underflow,\n * but softplus(x) == exp(x) is within machine epsilon.\n */\nexport const SOFTPLUS = `\n float epsilon = 1.1920928955078125e-7;\n float threshold = log(epsilon) + 2.0;\n\n bool too_large = x > -threshold;\n bool too_small = x < threshold;\n\n float result;\n float exp_x = exp(x);\n\n if (too_large){\n result = x;\n }\n else if (too_small){\n result = exp_x;\n }\n else{\n result = log(exp_x + 1.0);\n }\n return result;\n`;\nexport const ASIN = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return asin(x);\n`;\nexport const ACOS = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return acos(x);\n`;\nexport const ATAN = CHECK_NAN_SNIPPET + `\n return atan(x);\n`;\nexport const SINH = `\n float e2x = exp(x);\n return (e2x - 1.0 / e2x) / 2.0;\n`;\nexport const COSH = `\n float e2x = exp(-x);\n return (e2x + 1.0 / e2x) / 2.0;\n`;\nexport const TANH = `\n float e2x = exp(-2.0 * abs(x));\n return sign(x) * (1.0 - e2x) / (1.0 + e2x);\n`;\nexport const ASINH = CHECK_NAN_SNIPPET + `return log(x + sqrt(x * x + 1.0));`;\nexport const ACOSH = CHECK_NAN_SNIPPET + `\n if (x < 1.0) return NAN;\n return log(x + sqrt(x * x - 1.0));`;\nexport const ATANH = CHECK_NAN_SNIPPET + `\n if ((x < -1.0) || (x > 1.0)) return NAN;\n return (log(1.0 + x) - log(1.0 - x)) / 2.0;`;\nexport const ERF = `\n // Error function is calculated approximately with elementary function.\n // See \"Handbook of Mathematical Functions with Formulas,\n // Graphs, and Mathematical Tables\", Abramowitz and Stegun.\n float p = ${backend_util.ERF_P};\n float a1 = ${backend_util.ERF_A1};\n float a2 = ${backend_util.ERF_A2};\n float a3 = ${backend_util.ERF_A3};\n float a4 = ${backend_util.ERF_A4};\n float a5 = ${backend_util.ERF_A5};\n\n float sign = sign(x);\n x = abs(x);\n float t = 1.0 / (1.0 + p * x);\n return sign * (1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*exp(-x*x));\n`;\nexport const RECIPROCAL = `return 1.0 / x;`;\nexport const LOGICAL_NOT = `return float(!(x >= 1.0));`;\nexport const CLONE = 'return x;';\n//# sourceMappingURL=unaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const LINEAR = `return x;`;\nexport const LOG = `\n vec4 result = log(x);\n vec4 isNaN = vec4(lessThan(x, vec4(0.0)));\n result.r = isNaN.r == 1.0 ? NAN : result.r;\n result.g = isNaN.g == 1.0 ? NAN : result.g;\n result.b = isNaN.b == 1.0 ? NAN : result.b;\n result.a = isNaN.a == 1.0 ? NAN : result.a;\n\n return result;\n`;\nexport const RELU = `\n vec4 result = x * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const RELU6 = `\n vec4 result = min(x, vec4(6.)) * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const ELU = `\n vec4 result;\n\n result.r = (x.r >= 0.0) ? x.r : (exp(x.r) - 1.0);\n result.g = (x.g >= 0.0) ? x.g : (exp(x.g) - 1.0);\n result.b = (x.b >= 0.0) ? x.b : (exp(x.b) - 1.0);\n result.a = (x.a >= 0.0) ? x.a : (exp(x.a) - 1.0);\n\n return result;\n`;\nexport class UnaryOpPackedProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n vec4 unaryOperation(vec4 x) {\n ${opSnippet}\n }\n\n void main() {\n vec4 x = getAAtOutCoords();\n vec4 y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\n//# sourceMappingURL=unaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels, getSourceCoords } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class UnpackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outputShape = outputShape;\n const rank = outputShape.length;\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const sourceCoords = getSourceCoords(rank, channels);\n const innerDims = channels.slice(-2);\n const coords = rank <= 1 ? 'rc' : `vec2(${innerDims.join(',')})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 packedInput = getA(${sourceCoords});\n\n setOutput(getChannel(packedInput, ${coords}));\n }\n `;\n }\n}\n//# sourceMappingURL=unpack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import webgl flags.\nimport './flags_webgl';\nimport * as tf from '@tensorflow/tfjs-core';\nimport { div, engine, env, max, range, reshape, scalar, softmax, tensor, tidy, transpose } from '@tensorflow/tfjs-core';\nimport { backend_util, buffer, kernel_impls, slice_util, util } from '@tensorflow/tfjs-core';\nimport { DataStorage, KernelBackend, upcastType } from '@tensorflow/tfjs-core';\nimport { ceilImplCPU, expImplCPU, expm1ImplCPU, floorImplCPU, logImplCPU, rsqrtImplCPU, simpleAbsImplCPU, sliceImplCPU } from './kernel_utils/shared';\nconst { segment_util } = backend_util;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport { AddNProgram } from './addn_gpu';\nimport { AddNPackedProgram } from './addn_packed_gpu';\nimport { ArgMinMaxProgram } from './argminmax_gpu';\nimport { ArgMinMaxPackedProgram } from './argminmax_packed_gpu';\nimport { AvgPool3DBackpropProgram } from './avg_pool_backprop_gpu';\nimport * as binaryop_gpu from './binaryop_gpu';\nimport { BinaryOpProgram } from './binaryop_gpu';\nimport * as binaryop_packed_gpu from './binaryop_packed_gpu';\nimport { BinaryOpPackedProgram } from './binaryop_packed_gpu';\nimport { getWebGLContext } from './canvas_util';\nimport { ClipProgram } from './clip_gpu';\nimport { ClipPackedProgram } from './clip_packed_gpu';\nimport { ComplexAbsProgram } from './complex_abs_gpu';\nimport { Conv2DDerFilterProgram, Conv2DDerInputProgram, Conv3DDerFilterProgram, Conv3DDerInputProgram } from './conv_backprop_gpu';\nimport { DepthwiseConv2DDerFilterProgram, DepthwiseConv2DDerInputProgram } from './conv_backprop_gpu_depthwise';\nimport { Conv2DProgram, Conv3DProgram } from './conv_gpu';\nimport { DepthwiseConv2DProgram } from './conv_gpu_depthwise';\nimport { DepthwiseConvPacked2DProgram } from './conv_packed_gpu_depthwise';\nimport { CropAndResizeProgram } from './crop_and_resize_gpu';\nimport { CumSumProgram } from './cumsum_gpu';\nimport { DecodeMatrixProgram } from './decode_matrix_gpu';\nimport { DecodeMatrixPackedProgram } from './decode_matrix_packed_gpu';\nimport { DepthToSpaceProgram } from './depth_to_space_gpu';\nimport { DiagProgram } from './diag_gpu';\nimport { EncodeFloatProgram } from './encode_float_gpu';\nimport { EncodeFloatPackedProgram } from './encode_float_packed_gpu';\nimport { EncodeMatrixProgram } from './encode_matrix_gpu';\nimport { EncodeMatrixPackedProgram } from './encode_matrix_packed_gpu';\nimport { FillProgram } from './fill_gpu';\nimport { GatherProgram } from './gather_gpu';\nimport { GatherNDProgram } from './gather_nd_gpu';\nimport { GPGPUContext } from './gpgpu_context';\nimport * as gpgpu_math from './gpgpu_math';\nimport { Im2ColPackedProgram } from './im2col_packed_gpu';\nimport { LRNProgram } from './lrn_gpu';\nimport { LRNGradProgram } from './lrn_grad_gpu';\nimport { LRNPackedProgram } from './lrn_packed_gpu';\nimport { MaxPool3DBackpropProgram } from './max_pool_backprop_gpu';\nimport { MatMulPackedProgram } from './mulmat_packed_gpu';\nimport { MultinomialProgram } from './multinomial_gpu';\nimport { OneHotProgram } from './onehot_gpu';\nimport { PackProgram } from './pack_gpu';\nimport { PadProgram } from './pad_gpu';\nimport { PadPackedProgram } from './pad_packed_gpu';\nimport { Pool3DProgram } from './pool_gpu';\nimport { ReduceProgram } from './reduce_gpu';\nimport { ReshapePackedProgram } from './reshape_packed_gpu';\nimport { ResizeBilinearBackpropProgram } from './resize_bilinear_backprop_gpu';\nimport { ResizeBilinearProgram } from './resize_bilinear_gpu';\nimport { ResizeBilinearPackedProgram } from './resize_bilinear_packed_gpu';\nimport { ResizeNearestNeigborBackpropProgram } from './resize_nearest_neighbor_backprop_gpu';\nimport { ResizeNearestNeighborProgram } from './resize_nearest_neighbor_gpu';\nimport { ReverseProgram } from './reverse_gpu';\nimport { ReversePackedProgram } from './reverse_packed_gpu';\nimport { ScatterProgram } from './scatter_gpu';\nimport { SegmentOpProgram } from './segment_gpu';\nimport { SelectProgram } from './select_gpu';\nimport { SliceProgram } from './slice_gpu';\nimport { SlicePackedProgram } from './slice_packed_gpu';\nimport { StridedSliceProgram } from './strided_slice_gpu';\nimport * as tex_util from './tex_util';\nimport { TextureUsage } from './tex_util';\nimport { TextureManager } from './texture_manager';\nimport { TileProgram } from './tile_gpu';\nimport * as unary_op from './unaryop_gpu';\nimport { UnaryOpProgram } from './unaryop_gpu';\nimport * as unary_packed_op from './unaryop_packed_gpu';\nimport { UnaryOpPackedProgram } from './unaryop_packed_gpu';\nimport { UnpackProgram } from './unpack_gpu';\nimport * as webgl_util from './webgl_util';\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\nconst binaryCaches = {};\nexport function getBinaryCache(webGLVersion) {\n if (webGLVersion in binaryCaches) {\n return binaryCaches[webGLVersion];\n }\n binaryCaches[webGLVersion] = {};\n return binaryCaches[webGLVersion];\n}\nfunction mapActivationToShaderProgram(activation, packed = false) {\n if (activation === 'linear') {\n if (packed) {\n return unary_packed_op.LINEAR;\n }\n return unary_op.LINEAR;\n }\n else if (activation === 'relu') {\n if (packed) {\n return unary_packed_op.RELU;\n }\n return unary_op.RELU;\n }\n else if (activation === 'elu') {\n if (packed) {\n return unary_packed_op.ELU;\n }\n return unary_op.ELU;\n }\n else if (activation === 'relu6') {\n if (packed) {\n return unary_packed_op.RELU6;\n }\n return unary_op.RELU6;\n }\n else if (activation === 'prelu') {\n if (packed) {\n return binaryop_packed_gpu.PRELU;\n }\n return binaryop_gpu.PRELU;\n }\n throw new Error(`Activation ${activation} has not been implemented for the WebGL backend.`);\n}\n// Empirically determined constant used to determine size threshold for handing\n// off execution to the CPU.\nconst CPU_HANDOFF_SIZE_THRESHOLD = 128;\n// Empirically determined constant used to decide the number of MB on GPU\n// before we warn about high memory use. The MB are this constant * screen area\n// * dpi / 1024 / 1024.\nconst BEFORE_PAGING_CONSTANT = 600;\nfunction numMBBeforeWarning() {\n if (env().global.screen == null) {\n return 1024; // 1 GB.\n }\n return (env().global.screen.height * env().global.screen.width *\n window.devicePixelRatio) *\n BEFORE_PAGING_CONSTANT / 1024 / 1024;\n}\n// Empirically determined minimal shared dimension in matmul before we forward\n// to a.mul(b).sum() in order to take advantage of GPU parallelism. See\n// https://github.com/tensorflow/tfjs-core/pull/1379 for benchmarks.\nexport const MATMUL_SHARED_DIM_THRESHOLD = 1000;\nexport class MathBackendWebGL extends KernelBackend {\n constructor(gpgpu) {\n super();\n // Maps data ids that have a pending read operation, to list of subscribers.\n this.pendingRead = new WeakMap();\n // List of data ids that are scheduled for disposal, but are waiting on a\n // pending read operation.\n this.pendingDisposal = new WeakSet();\n // Used to count the number of 'shallow' sliced tensors that point to the\n // same data id.\n this.dataRefCount = new WeakMap();\n this.numBytesInGPU = 0;\n // Accumulated time spent (including blocking) in uploading data to webgl.\n this.uploadWaitMs = 0;\n // Accumulated time spent (including blocking in downloading data from webgl.\n this.downloadWaitMs = 0;\n this.warnedAboutMemory = false;\n this.warnedAboutCPUBackend = false;\n this.pendingDeletes = 0;\n this.disposed = false;\n if (!env().getBool('HAS_WEBGL')) {\n throw new Error('WebGL is not supported on this device');\n }\n if (gpgpu == null) {\n const gl = getWebGLContext(env().getNumber('WEBGL_VERSION'));\n this.binaryCache = getBinaryCache(env().getNumber('WEBGL_VERSION'));\n this.gpgpu = new GPGPUContext(gl);\n this.canvas = gl.canvas;\n this.gpgpuCreatedLocally = true;\n }\n else {\n this.gpgpu = gpgpu;\n this.binaryCache = {};\n this.gpgpuCreatedLocally = false;\n this.canvas = gpgpu.gl.canvas;\n }\n this.textureManager = new TextureManager(this.gpgpu);\n this.numMBBeforeWarning = numMBBeforeWarning();\n this.texData = new DataStorage(this, engine());\n }\n numDataIds() {\n return this.texData.numDataIds() +\n (this.cpuBackend ? this.cpuBackend.numDataIds() : 0) -\n this.pendingDeletes;\n }\n write(values, shape, dtype) {\n if (env().getBool('WEBGL_CHECK_NUMERICAL_PROBLEMS') ||\n env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64' && values != null) {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n const dataId = {};\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n return dataId;\n }\n /** Increase refCount of a `TextureData`. */\n incRef(dataId) {\n const texData = this.texData.get(dataId);\n texData.refCount++;\n }\n /** Decrease refCount of a `TextureData`. */\n decRef(dataId) {\n if (this.texData.has(dataId)) {\n const texData = this.texData.get(dataId);\n texData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n if (env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.texData.has(dataId)) {\n const textureData = this.texData.get(dataId);\n textureData.refCount--;\n if (textureData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n readSync(dataId) {\n const texData = this.texData.get(dataId);\n const { values, dtype, complexTensorInfos, slice, shape, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.readSync(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (dtype === 'string') {\n return values;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let result;\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n result = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else {\n result = this.getValuesFromTexture(dataId);\n }\n if (shouldTimeProgram) {\n this.downloadWaitMs += util.now() - start;\n }\n return this.convertAndCacheOnCPU(dataId, result);\n }\n async read(dataId) {\n if (this.pendingRead.has(dataId)) {\n const subscribers = this.pendingRead.get(dataId);\n return new Promise(resolve => subscribers.push(resolve));\n }\n const texData = this.texData.get(dataId);\n const { values, shape, slice, dtype, complexTensorInfos, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.read(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (!env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED') &&\n env().getNumber('WEBGL_VERSION') === 2) {\n throw new Error(`tensor.data() with WEBGL_DOWNLOAD_FLOAT_ENABLED=false and ` +\n `WEBGL_VERSION=2 not yet supported.`);\n }\n let buffer = null;\n let tmpDownloadTarget;\n if (dtype !== 'complex64' && env().get('WEBGL_BUFFER_SUPPORTED')) {\n // Possibly copy the texture into a buffer before inserting a fence.\n tmpDownloadTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpDownloadTarget.dataId);\n buffer = this.gpgpu.createBufferFromTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape));\n }\n this.pendingRead.set(dataId, []);\n if (dtype !== 'complex64') {\n // Create a fence and wait for it to resolve.\n await this.gpgpu.createAndWaitForFence();\n }\n // Download the values from the GPU.\n let vals;\n if (dtype === 'complex64') {\n const ps = await Promise.all([\n this.read(complexTensorInfos.real.dataId),\n this.read(complexTensorInfos.imag.dataId)\n ]);\n const realValues = ps[0];\n const imagValues = ps[1];\n vals = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else if (buffer == null) {\n vals = this.getValuesFromTexture(dataId);\n }\n else {\n const size = util.sizeFromShape(shape);\n vals = this.gpgpu.downloadFloat32MatrixFromBuffer(buffer, size);\n }\n if (tmpDownloadTarget != null) {\n this.disposeIntermediateTensorInfo(tmpDownloadTarget);\n }\n const dTypeVals = this.convertAndCacheOnCPU(dataId, vals);\n const subscribers = this.pendingRead.get(dataId);\n this.pendingRead.delete(dataId);\n // Notify all pending reads.\n subscribers.forEach(resolve => resolve(dTypeVals));\n if (this.pendingDisposal.has(dataId)) {\n this.pendingDisposal.delete(dataId);\n this.disposeData(dataId);\n this.pendingDeletes--;\n }\n return dTypeVals;\n }\n checkNumericalProblems(values) {\n if (values == null) {\n return;\n }\n for (let i = 0; i < values.length; i++) {\n const num = values[i];\n if (!webgl_util.canBeRepresented(num)) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_CAPABLE')) {\n throw Error(`The value ${num} cannot be represented with your ` +\n `current settings. Consider enabling float32 rendering: ` +\n `'tf.env().set('WEBGL_RENDER_FLOAT32_ENABLED', true);'`);\n }\n throw Error(`The value ${num} cannot be represented on this device.`);\n }\n }\n }\n getValuesFromTexture(dataId) {\n const { shape, dtype, isPacked } = this.texData.get(dataId);\n const size = util.sizeFromShape(shape);\n if (env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED')) {\n const tmpTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpTarget.dataId);\n const vals = this.gpgpu\n .downloadMatrixFromPackedTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape))\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(tmpTarget);\n return vals;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK') && isPacked === true;\n const outputShape = shouldUsePackedProgram ? webgl_util.getShapeAs3D(shape) : shape;\n const program = shouldUsePackedProgram ?\n new EncodeFloatPackedProgram(outputShape) :\n new EncodeFloatProgram(outputShape);\n const output = this.runWebGLProgram(program, [{ shape: outputShape, dtype, dataId }], 'float32');\n const tmpData = this.texData.get(output.dataId);\n const vals = this.gpgpu\n .downloadByteEncodedFloatMatrixFromOutputTexture(tmpData.texture, tmpData.texShape[0], tmpData.texShape[1])\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(output);\n return vals;\n }\n async time(f) {\n const oldActiveTimers = this.activeTimers;\n const newActiveTimers = [];\n let outerMostTime = false;\n if (this.programTimersStack == null) {\n this.programTimersStack = newActiveTimers;\n outerMostTime = true;\n }\n else {\n this.activeTimers.push(newActiveTimers);\n }\n this.activeTimers = newActiveTimers;\n f();\n // needing to split these up because util.flatten only accepts certain types\n const flattenedActiveTimerQueries = util.flatten(this.activeTimers.map((d) => d.query))\n .filter(d => d != null);\n const flattenedActiveTimerNames = util.flatten(this.activeTimers.map((d) => d.name))\n .filter(d => d != null);\n this.activeTimers = oldActiveTimers;\n if (outerMostTime) {\n this.programTimersStack = null;\n }\n const res = {\n uploadWaitMs: this.uploadWaitMs,\n downloadWaitMs: this.downloadWaitMs,\n kernelMs: null,\n wallMs: null // will be filled by the engine\n };\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n const kernelMs = await Promise.all(flattenedActiveTimerQueries);\n res['kernelMs'] = util.sum(kernelMs);\n res['getExtraProfileInfo'] = () => kernelMs.map((d, i) => ({ name: flattenedActiveTimerNames[i], ms: d }))\n .map(d => `${d.name}: ${d.ms}`)\n .join(', ');\n }\n else {\n res['kernelMs'] = {\n error: 'WebGL query timers are not supported in this environment.'\n };\n }\n this.uploadWaitMs = 0;\n this.downloadWaitMs = 0;\n return res;\n }\n memory() {\n return {\n unreliable: false,\n numBytesInGPU: this.numBytesInGPU,\n numBytesInGPUAllocated: this.textureManager.numBytesAllocated,\n numBytesInGPUFree: this.textureManager.numBytesFree\n };\n }\n startTimer() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.beginQuery();\n }\n return { startMs: util.now(), endMs: null };\n }\n endTimer(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n this.gpgpu.endQuery();\n return query;\n }\n query.endMs = util.now();\n return query;\n }\n async getQueryTime(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.waitForQueryAndGetTime(query);\n }\n const timerQuery = query;\n return timerQuery.endMs - timerQuery.startMs;\n }\n disposeData(dataId) {\n if (this.pendingDisposal.has(dataId)) {\n return;\n }\n if (this.pendingRead.has(dataId)) {\n this.pendingDisposal.add(dataId);\n this.pendingDeletes++;\n return;\n }\n // No-op if already disposed.\n if (!this.texData.has(dataId)) {\n return;\n }\n // Trying to dispose a textureData that has a 'kept' refCount, e.g. trying\n // to dispose a tensor whose data bucket is shared with a complex tensor. In\n // this case we are removing a reference to the textureData, but we\n // shouldn't actually dispose the texture.\n if (this.texData.get(dataId).complexParentRefCount > 0) {\n this.texData.get(dataId).refCount--;\n return;\n }\n this.releaseGPUData(dataId);\n const { complexTensorInfos } = this.texData.get(dataId);\n if (complexTensorInfos != null) {\n this.texData.get(complexTensorInfos.real.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.real);\n this.texData.get(complexTensorInfos.imag.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.imag);\n }\n this.texData.delete(dataId);\n }\n releaseGPUData(dataId) {\n const { texture, dtype, texShape, usage, isPacked, slice } = this.texData.get(dataId);\n const key = slice && slice.origDataId || dataId;\n const refCount = this.dataRefCount.get(key);\n if (refCount > 1) {\n this.dataRefCount.set(key, refCount - 1);\n }\n else {\n this.dataRefCount.delete(key);\n if (texture != null) {\n this.numBytesInGPU -= this.computeBytes(texShape, dtype);\n this.textureManager.releaseTexture(texture, texShape, usage, isPacked);\n }\n }\n const texData = this.texData.get(dataId);\n texData.texture = null;\n texData.texShape = null;\n texData.isPacked = false;\n texData.slice = null;\n }\n getTexture(dataId) {\n this.uploadToGPU(dataId);\n return this.texData.get(dataId).texture;\n }\n /**\n * Returns internal information for the specific data bucket. Used in unit\n * tests.\n */\n getDataInfo(dataId) {\n return this.texData.get(dataId);\n }\n getCPUBackend() {\n if (!env().getBool('WEBGL_CPU_FORWARD')) {\n return null;\n }\n if (this.cpuBackend == null) {\n this.cpuBackend = engine().findBackend('cpu');\n }\n return this.cpuBackend;\n }\n /*\n Tests whether all the inputs to an op are small and on the CPU. This heuristic\n determines when it would be faster to execute a kernel on the CPU. WebGL\n kernels opt into running this check and forwarding when appropriate.\n TODO(https://github.com/tensorflow/tfjs/issues/872): Develop a more\n sustainable strategy for optimizing backend execution of ops.\n */\n shouldExecuteOnCPU(inputs, sizeThreshold = CPU_HANDOFF_SIZE_THRESHOLD) {\n const cpuBackend = this.getCPUBackend();\n if (!this.warnedAboutCPUBackend && cpuBackend == null) {\n console.warn('Your application contains ops that are small enough to be ' +\n 'executed on the CPU backend, however the CPU backend cannot ' +\n 'be found. Consider importing the CPU backend ' +\n '(@tensorflow/tfjs-backend-cpu) for better performance.');\n this.warnedAboutCPUBackend = true;\n }\n return cpuBackend != null &&\n inputs.every(input => this.texData.get(input.dataId).texture == null &&\n util.sizeFromShape(input.shape) < sizeThreshold);\n }\n getGPGPUContext() {\n return this.gpgpu;\n }\n slice(x, begin, size) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = sliceImplCPU(this.texData.get(x.dataId).values, begin, size, x.shape, x.dtype);\n return this.makeOutput(size, x.dtype, outValues);\n }\n // Short-circuit computation if the slice is zero-sized.\n if (util.sizeFromShape(size) === 0) {\n return tensor([], size, x.dtype);\n }\n const { isPacked } = this.texData.get(x.dataId);\n const isContinous = slice_util.isSliceContinous(x.shape, begin, size);\n if (isPacked || !isContinous) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new SlicePackedProgram(size) :\n new SliceProgram(size);\n const customSetup = program.getCustomSetupFunc(begin);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n this.uploadToGPU(x.dataId);\n return this.shallowSlice(x, begin, size);\n }\n shallowSlice(x, begin, size) {\n const xTexData = this.texData.get(x.dataId);\n const t = this.makeOutput(size, x.dtype);\n const newTexData = this.texData.get(t.dataId);\n // Copy texture data from the original tensor.\n Object.assign(newTexData, xTexData);\n newTexData.shape = size;\n newTexData.dtype = x.dtype;\n let flatOffset = slice_util.computeFlatOffset(begin, x.strides);\n if (xTexData.slice) {\n // We are slicing an already sliced tensor, so we have to accumulate\n // the offset.\n flatOffset += xTexData.slice.flatOffset;\n }\n newTexData.slice = {\n flatOffset,\n // Point to the original dataId, which is used to do ref counting.\n origDataId: xTexData.slice && xTexData.slice.origDataId || x.dataId\n };\n // Increase the ref count for that data bucket.\n const refCount = this.dataRefCount.get(newTexData.slice.origDataId) || 1;\n this.dataRefCount.set(newTexData.slice.origDataId, refCount + 1);\n return t;\n }\n stridedSlice(x, begin, end, strides) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.stridedSlice(x, begin, end, strides));\n if (cpuRes) {\n return cpuRes;\n }\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tensor([], outShape);\n }\n const program = new StridedSliceProgram(begin, strides, outShape);\n return this.compileAndRun(program, [x]);\n }\n reverse(x, axis) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new ReversePackedProgram(x.shape, axis) :\n new ReverseProgram(x.shape, axis);\n return this.compileAndRun(program, [x]);\n }\n neg(x) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.neg(x));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.NEG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.NEG);\n return this.compileAndRun(program, [x]);\n }\n batchMatMul(a, b, transposeA, transposeB) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const sharedDim = transposeA ? a.shape[1] : a.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n // Since the matrices are vectors, it is faster to call mul().sum()\n // because sum() is O(sqrt(N)) due to divide-and-conquer.\n if ((outerShapeA === 1 || outerShapeB === 1) &&\n sharedDim > MATMUL_SHARED_DIM_THRESHOLD) {\n if (transposeA) {\n a = transpose(a, [0, 2, 1]);\n }\n if (transposeB) {\n b = transpose(b, [0, 2, 1]);\n }\n const a3D = outerShapeB === 1 ? a : a.as3D(batch, sharedDim, 1);\n const axis = outerShapeB === 1 ? 2 : 1;\n const b3D = outerShapeB === 1 ? b.as3D(batch, 1, sharedDim) : b;\n // TODO(annxingyuan): Call multiply directly as part of batchMatMul\n // modularization.\n const product = tf.mul(a3D, b3D);\n return product.sum(axis, true /* keepDims */);\n }\n const dtype = upcastType(a.dtype, b.dtype);\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB);\n return this.compileAndRun(program, [a, b], dtype);\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n const dtype = upcastType(a.dtype, b.dtype);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [a, b];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs, dtype);\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new LRNPackedProgram(x.shape, radius, bias, alpha, beta) :\n new LRNProgram(x.shape, radius, bias, alpha, beta);\n return this.compileAndRun(program, [x]);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n const program = new LRNGradProgram(inputImage.shape, depthRadius, bias, alpha, beta);\n return this.compileAndRun(program, [inputImage, outputImage, dy]);\n }\n tile(x, reps) {\n if (x.dtype === 'string') {\n const data = this.readSync(x.dataId);\n const decodedData = data.map(d => util.decodeString(d));\n const buf = buffer(x.shape, x.dtype, decodedData);\n return tile(buf, reps);\n }\n const program = new TileProgram(x.shape, reps);\n return this.compileAndRun(program, [x]);\n }\n pad(x, paddings, constantValue) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new PadPackedProgram(x.shape, paddings, constantValue) :\n new PadProgram(x.shape, paddings, constantValue);\n return this.compileAndRun(program, [x]);\n }\n gather(x, indices, axis) {\n const cpuRes = this.tryRunOnCpuOrThrow([x, indices], () => this.cpuBackend.gather(x, indices, axis));\n if (cpuRes) {\n return cpuRes;\n }\n const program = new GatherProgram(x.shape, indices.size, axis);\n return this.compileAndRun(program, [x, indices]);\n }\n batchToSpaceND(x, blockShape, crops) {\n util.assert(x.rank <= 4, () => 'batchToSpaceND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n spaceToBatchND(x, blockShape, paddings) {\n util.assert(x.rank <= 4, () => 'spaceToBatchND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = x.pad(completePaddings);\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const paddedXT = transpose(paddedX.reshape(reshapedPaddedShape), permutedReshapedPaddedPermutation);\n return reshape(paddedXT, flattenShape);\n }\n reduce(x, reduceType, dtype) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const outSize = Math.ceil(inSize / windowSize);\n const reduceInfo = { windowSize, inSize, batchSize, outSize };\n const program = new ReduceProgram(reduceInfo, reduceType);\n const output = this.compileAndRun(program, [x], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.reduce(output, reduceType, dtype);\n }\n argReduce(x, reduceType, bestIndicesA = null) {\n let batchSize = x.shape[0];\n let inSize = x.shape[1];\n if (bestIndicesA != null) {\n batchSize = bestIndicesA.shape[0];\n inSize = bestIndicesA.shape[1];\n }\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const reduceInfo = {\n windowSize,\n inSize,\n batchSize,\n outSize: Math.ceil(inSize / windowSize)\n };\n const program = new ArgMinMaxProgram(reduceInfo, reduceType, bestIndicesA == null);\n const inputs = [x];\n if (bestIndicesA != null) {\n inputs.push(bestIndicesA);\n }\n const output = this.compileAndRun(program, inputs, 'int32');\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.argReduce(x, reduceType, output);\n }\n argReducePacked(x, reduceType, bestIndicesA = null) {\n const inShape = bestIndicesA != null ? bestIndicesA.shape : x.shape;\n const inSize = inShape[inShape.length - 1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const program = new ArgMinMaxPackedProgram(inShape, windowSize, reduceType, bestIndicesA == null);\n const inputs = bestIndicesA == null ? [x] : [x, bestIndicesA];\n const output = this.compileAndRun(program, inputs, 'int32');\n if (output.rank === x.rank) {\n return this.argReducePacked(x, reduceType, output);\n }\n return output;\n }\n sum(x, axes) {\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'sum', outputDType).reshape(outShape);\n }\n prod(x, axes) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.prod(x, axes));\n if (cpuRes) {\n return cpuRes;\n }\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'prod', outputDType).reshape(outShape);\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n let axis = 0;\n const permutation = backend_util.getAxesPermutation([axis], x.rank);\n let permutedX = x;\n if (permutation != null) {\n permutedX = transpose(x, permutation);\n axis = backend_util.getInnerMostAxes(1, x.rank)[0];\n }\n const outShape = segment_util.computeOutShape(permutedX.shape, axis, numSegments);\n const inSize = util.sizeFromShape([permutedX.shape[axis]]);\n const a2D = permutedX.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n let result = this.segOpCompute(a2D, 'unsortedSegmentSum', segmentIds, outputDType, numSegments)\n .reshape(outShape);\n if (permutation != null) {\n result =\n transpose(result, backend_util.getUndoAxesPermutation(permutation));\n }\n return result;\n }\n segOpCompute(x, segOpType, segmentIds, dtype, numSegments) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = segment_util.segOpComputeOptimalWindowSize(inSize, numSegments);\n const segOpInfo = { windowSize, inSize, batchSize, numSegments };\n const program = new SegmentOpProgram(segOpInfo, segOpType);\n const output = this.compileAndRun(program, [x, segmentIds], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === numSegments) {\n return output;\n }\n segmentIds = range(0, numSegments).tile([inSize / windowSize]);\n return this.segOpCompute(output, segOpType, segmentIds, dtype, numSegments);\n }\n argMinMaxReduce(x, axis, reduceType) {\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('arg' + reduceType.charAt(0).toUpperCase() + reduceType.slice(1), axes, x.rank);\n if (!env().getBool('WEBGL_PACK_REDUCE') || x.rank <= 2) {\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.argReduce(a2D, reduceType).reshape(outShape);\n }\n return this.argReducePacked(x, reduceType);\n }\n argMin(x, axis) {\n return this.argMinMaxReduce(x, axis, 'min');\n }\n argMax(x, axis) {\n return this.argMinMaxReduce(x, axis, 'max');\n }\n cumsum(x, axis, exclusive, reverse) {\n if (axis !== x.rank - 1) {\n throw new Error(`WebGL cumsum shader expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const size = x.shape[axis];\n let result = x;\n // Use cumsum parallel algorithm, ref:\n // https://developer.nvidia.com/gpugems/gpugems3/part-vi-gpu-computing/chapter-39-parallel-prefix-sum-scan-cuda\n for (let i = 0; i <= Math.ceil(Math.log2(size)) - 1; i++) {\n const program = new CumSumProgram(x.shape, false, reverse);\n const customSetup = program.getCustomSetupFunc(i);\n const prevResult = result;\n result = this.compileAndRun(program, [result], result.dtype, customSetup);\n prevResult.dispose();\n }\n // For exclusive cumsum, shift the end result in the direction of sum and\n // add 0 to the front index.\n if (exclusive) {\n const program = new CumSumProgram(x.shape, exclusive, reverse);\n const prevResult = result;\n result = this.compileAndRun(program, [result]);\n prevResult.dispose();\n }\n return result;\n }\n equal(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n less(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.less(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n lessEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greater(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.greater(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greaterEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalNot(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOGICAL_NOT);\n return this.compileAndRun(program, [x]);\n }\n logicalAnd(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_AND, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_AND, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalOr(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_OR, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_OR, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n select(condition, a, b) {\n const program = new SelectProgram(condition.rank, a.shape, a.rank);\n return this.compileAndRun(program, [condition, a, b], upcastType(a.dtype, b.dtype));\n }\n where(condition) {\n backend_util.warn('tf.where() in webgl locks the UI thread. ' +\n 'Call tf.whereAsync() instead');\n const condVals = condition.dataSync();\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n const xVals = x.dataSync();\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'min', a2D.dtype).reshape(outShape);\n }\n minimum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.minimum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MIN, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MIN, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n mod(a, b) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MOD, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MOD, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n maximum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.maximum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MAX, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MAX, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n all(x, axes) {\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'all', a2D.dtype).reshape(outShape);\n }\n any(x, axes) {\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'any', a2D.dtype).reshape(outShape);\n }\n floorDiv(a, b) {\n const op = binaryop_gpu.INT_DIV;\n const outputDtype = 'int32';\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.INT_DIV, outputDtype);\n }\n const program = new BinaryOpProgram(op, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], outputDtype);\n }\n packedUnaryOp(x, op, dtype) {\n const program = new UnaryOpPackedProgram(x.shape, op);\n return this.compileAndRun(program, [x], dtype);\n }\n packedBinaryOp(a, b, op, dtype, checkOutOfBounds = false) {\n const program = new BinaryOpPackedProgram(op, a.shape, b.shape, checkOutOfBounds);\n return this.compileAndRun(program, [a, b], dtype);\n }\n // Returns a TensorInfo with the complex shape and the dataId of the\n // underlying part. We need to do this because a reshaped complex tensor is\n // not reflected in its parts.\n makeComplexComponentTensorInfo(complexTensor, complexPart) {\n return {\n dataId: complexPart.dataId,\n dtype: complexPart.dtype,\n shape: complexTensor.shape\n };\n }\n addN(tensors) {\n if (tensors.length === 1) {\n return tensors[0];\n }\n // Limit the number of uploaded textures for optimization.\n if (tensors.length > env().get('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(tensors.length / 2);\n const leftSide = this.addN(tensors.slice(0, midIndex));\n const rightSide = this.addN(tensors.slice(midIndex));\n return this.addN([leftSide, rightSide]);\n }\n const dtype = tensors.map(t => t.dtype).reduce((d1, d2) => upcastType(d1, d2));\n const shapes = tensors.map(t => t.shape);\n // We can make sure shapes are identical in op level.\n const usePackedOp = env().getBool('WEBGL_PACK');\n const program = usePackedOp ?\n new AddNPackedProgram(tensors[0].shape, shapes) :\n new AddNProgram(tensors[0].shape, shapes);\n return this.compileAndRun(program, tensors, dtype);\n }\n pow(a, b) {\n const usePackedOp = env().getBool('WEBGL_PACK_BINARY_OPERATIONS');\n const program = usePackedOp ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.POW, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.POW, a.shape, b.shape);\n const dtype = upcastType(a.dtype, b.dtype);\n return this.compileAndRun(program, [a, b], dtype);\n }\n ceil(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = ceilImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.CEIL, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.CEIL);\n return this.compileAndRun(program, [x]);\n }\n floor(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = floorImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.FLOOR, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.FLOOR);\n return this.compileAndRun(program, [x]);\n }\n sign(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGN);\n return this.compileAndRun(program, [x]);\n }\n isNaN(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_NAN);\n return this.compileAndRun(program, [x], 'bool');\n }\n isInf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_INF);\n return this.compileAndRun(program, [x], 'bool');\n }\n isFinite(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_FINITE);\n return this.compileAndRun(program, [x], 'bool');\n }\n round(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ROUND);\n return this.compileAndRun(program, [x]);\n }\n exp(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXP, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXP);\n return this.compileAndRun(program, [x]);\n }\n expm1(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expm1ImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXPM1, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXPM1);\n return this.compileAndRun(program, [x]);\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(annxingyuan): Call sub directly as part of softmax kernel\n // modularization.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = this.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax kernel\n // modularization.\n return div(b, sumExp);\n }\n log(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = logImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.LOG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.LOG);\n return this.compileAndRun(program, [x]);\n }\n log1p(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOG1P);\n return this.compileAndRun(program, [x]);\n }\n sqrt(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SQRT);\n return this.compileAndRun(program, [x]);\n }\n rsqrt(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = rsqrtImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.RSQRT);\n return this.compileAndRun(program, [x]);\n }\n reciprocal(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.RECIPROCAL);\n return this.compileAndRun(program, [x]);\n }\n relu(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU);\n }\n return this.compileAndRun(program, [x]);\n }\n relu6(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU6);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU6);\n }\n return this.compileAndRun(program, [x]);\n }\n prelu(x, alpha) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.PRELU, x.shape, alpha.shape) :\n new BinaryOpProgram(binaryop_gpu.PRELU, x.shape, alpha.shape);\n return this.compileAndRun(program, [x, alpha]);\n }\n elu(x) {\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.ELU, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ELU);\n return this.compileAndRun(program, [x]);\n }\n eluDer(dy, y) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.ELU_DER, dy.shape, y.shape) :\n new BinaryOpProgram(binaryop_gpu.ELU_DER, dy.shape, y.shape);\n return this.compileAndRun(program, [dy, y]);\n }\n selu(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SELU);\n return this.compileAndRun(program, [x]);\n }\n clip(x, min, max) {\n let program;\n if (env().getBool('WEBGL_PACK_CLIP')) {\n program = new ClipPackedProgram(x.shape);\n }\n else {\n program = new ClipProgram(x.shape);\n }\n const customSetup = program.getCustomSetupFunc(min, max);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n abs(x) {\n // TODO: handle cases when x is complex.\n if (this.shouldExecuteOnCPU([x]) && x.dtype !== 'complex64') {\n const outValues = simpleAbsImplCPU(this.texData.get(x.dataId).values);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.ABS, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ABS);\n return this.compileAndRun(program, [x]);\n }\n complexAbs(x) {\n const xData = this.texData.get(x.dataId);\n const program = new ComplexAbsProgram(x.shape);\n const inputs = [\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.real),\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.imag),\n ];\n return this.compileAndRun(program, inputs);\n }\n sigmoid(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGMOID);\n return this.compileAndRun(program, [x]);\n }\n softplus(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SOFTPLUS);\n return this.compileAndRun(program, [x]);\n }\n asin(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASIN);\n return this.compileAndRun(program, [x]);\n }\n acos(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOS);\n return this.compileAndRun(program, [x]);\n }\n atan(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATAN);\n return this.compileAndRun(program, [x]);\n }\n sinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SINH);\n return this.compileAndRun(program, [x]);\n }\n cosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.COSH);\n return this.compileAndRun(program, [x]);\n }\n tanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.TANH);\n return this.compileAndRun(program, [x]);\n }\n asinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASINH);\n return this.compileAndRun(program, [x]);\n }\n acosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOSH);\n return this.compileAndRun(program, [x]);\n }\n atanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATANH);\n return this.compileAndRun(program, [x]);\n }\n erf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ERF);\n return this.compileAndRun(program, [x]);\n }\n step(x, alpha) {\n const program = new UnaryOpProgram(x.shape, unary_op.STEP(alpha));\n return this.compileAndRun(program, [x]);\n }\n conv2dByMatMul(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Reshapes conv2D input to 2D tensors, uses matMul and then reshape the\n // result from 2D to 4D.\n const xShape = x.shape;\n const xTexData = this.texData.get(x.dataId);\n const sharedMatMulDim = convInfo.inChannels;\n const outerShapeX = xShape[0] * xShape[1] * xShape[2];\n const outerShapeFilter = convInfo.outChannels;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const transposeA = false;\n const transposeB = false;\n // TODO: Once reduction ops are packed, batchMatMul will always be packed\n // and we can remove this condition.\n const batchMatMulWillBeUnpacked = (outerShapeX === 1 || outerShapeFilter === 1) &&\n sharedMatMulDim > MATMUL_SHARED_DIM_THRESHOLD;\n const reshapeWillBeExpensive = xShape[2] % 2 !== 0 && !!xTexData.isPacked;\n if (batchMatMulWillBeUnpacked || !env().getBool('WEBGL_LAZILY_UNPACK') ||\n !env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ||\n !reshapeWillBeExpensive) {\n const targetShape = isChannelsLast ? xShape[0] * xShape[1] * xShape[2] :\n xShape[0] * xShape[2] * xShape[3];\n const xReshaped = reshape(x, [1, targetShape, convInfo.inChannels]);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const result = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n return reshape(result, convInfo.outShape);\n }\n // Following optimization is specific to packed |x| with odd row count\n // (For example, in channelLast mode, 'row count' refers to x.shape[2]):\n // we avoid expensive packed 2x2 reshape by padding row count to next,\n // even number. When x.shape[2] is odd, the result of packed batchMatMul is\n // the same (has the same texture layout and and values in the texture) as\n // it is for even x.shape[2] + 1. We make the odd-rows tensor to look like\n // even-rows tensor before the operation and, after the batchMatMul,\n // fix the even-rows result to have odd number of rows.\n const targetShape = isChannelsLast ?\n xShape[0] * xShape[1] * (xShape[2] + 1) :\n xShape[0] * xShape[2] * (xShape[3] + 1);\n const xReshaped = {\n dataId: x.dataId,\n shape: [1, targetShape, convInfo.inChannels],\n dtype: x.dtype\n };\n // xTexData.shape gets referenced from GPGPUBinary.inShapeInfos.\n // Decrementing row count, after batchMatMul->...->compileProgram leads to\n // invalid row count within the reference in GPGPUBinary.inShapeInfos.\n // Alternative fix would be to provide a copy to GPGPUBinary.inShapeInfos\n // in compileProgram method, but that would affect compilation of all\n // programs - instead, provide a copy here, with even row count, before\n // calling batchMatMul->...->compileProgram and after that, the original\n // xTexData.shape is restored.\n const originalXTexDataShape = xTexData.shape;\n xTexData.shape = xTexData.shape.slice();\n xTexData.shape[xTexData.shape.length - 2]++;\n util.assert(webgl_util.isReshapeFree(xTexData.shape, xReshaped.shape), () => `packed reshape ${xTexData.shape} to ${xReshaped.shape} isn't free`);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const pointwiseConv = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n const pointwiseConvTexData = this.texData.get(pointwiseConv.dataId);\n util.assert(pointwiseConvTexData.isPacked, () => 'batchMatMul result is expected to be packed');\n // Restore the input shape to original.\n xTexData.shape = originalXTexDataShape;\n // Set the output shape - there is no need for expensive reshape as data\n // layout is already correct.\n pointwiseConvTexData.shape = convInfo.outShape;\n return engine().makeTensorFromDataId(pointwiseConv.dataId, convInfo.outShape, pointwiseConv.dtype);\n }\n conv2dWithIm2Row(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Rearranges conv2d input so each block to be convolved over forms the\n // column of a new matrix with shape [filterWidth * filterHeight *\n // inChannels, outHeight * outWidth]. The filter is also rearranged so each\n // output channel forms a row of a new matrix with shape [outChannels,\n // filterWidth * filterHeight * inChannels]. The convolution is then\n // computed by multiplying these matrices and reshaping the result.\n const { filterWidth, filterHeight, inChannels, outWidth, outHeight, dataFormat } = convInfo;\n const isChannelsLast = dataFormat === 'channelsLast';\n const sharedDim = filterWidth * filterHeight * inChannels;\n const numCols = outHeight * outWidth;\n const x2ColShape = [sharedDim, numCols];\n const transposeA = true;\n const transposeB = false;\n const xSqueezed = x.squeeze([0]);\n const w2Row = filter.reshape([1, sharedDim, -1]);\n const im2ColProgram = new Im2ColPackedProgram(x2ColShape, xSqueezed.shape, convInfo);\n const im2Col = this.compileAndRun(im2ColProgram, [xSqueezed]).reshape([\n 1, x2ColShape[0], x2ColShape[1]\n ]);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const matmulProgram = new MatMulPackedProgram(im2Col.shape, w2Row.shape, [1, numCols, convInfo.outChannels], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [im2Col, w2Row];\n if (bias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n const product = this.compileAndRun(matmulProgram, inputs);\n if (isChannelsLast) {\n return product.reshape([1, outHeight, outWidth, convInfo.outChannels]);\n }\n else {\n return product.reshape([1, convInfo.outChannels, outHeight, outWidth]);\n }\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && input.shape[0] === 1) {\n return this.conv2dWithIm2Row(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, false) : null;\n const program = new Conv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [input, filter];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs);\n }\n conv2d(x, filter, convInfo) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(x, filter, convInfo);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && x.shape[0] === 1) {\n return this.conv2dWithIm2Row(x, filter, convInfo);\n }\n const program = new Conv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv2dDerInput(dy, filter, convInfo) {\n const program = new Conv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv2dDerFilter(x, dy, convInfo) {\n const program = new Conv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n const shouldPackDepthwiseConv = env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1;\n const fusedActivation = activation ?\n mapActivationToShaderProgram(activation, shouldPackDepthwiseConv) :\n null;\n const inputs = [input, filter];\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n if (hasBias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n let program;\n if (shouldPackDepthwiseConv) {\n program = new DepthwiseConvPacked2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n program = new DepthwiseConv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n depthwiseConv2D(x, filter, convInfo) {\n let program;\n if (env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1) {\n program = new DepthwiseConvPacked2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n program = new DepthwiseConv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n const program = new DepthwiseConv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n depthwiseConv2DDerFilter(x, dy, convInfo) {\n const program = new DepthwiseConv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n conv3d(x, filter, convInfo) {\n const program = new Conv3DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv3dDerInput(dy, filter, convInfo) {\n const program = new Conv3DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv3dDerFilter(x, dy, convInfo) {\n const program = new Conv3DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = this.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n avgPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'avg', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n const avgPool3dBackpropProgram = new AvgPool3DBackpropProgram(convInfo);\n return this.compileAndRun(avgPool3dBackpropProgram, [dy], x.dtype);\n }\n maxPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'max', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n const getPositions = true;\n const maxPool3dPositionsProgram = new Pool3DProgram(convInfo, 'max', getPositions);\n const maxPool3dPositions = this.compileAndRun(maxPool3dPositionsProgram, [x]);\n const maxPool3dBackPropProgram = new MaxPool3DBackpropProgram(convInfo);\n const result = this.compileAndRun(maxPool3dBackPropProgram, [dy, maxPool3dPositions], x.dtype);\n maxPool3dPositions.dispose();\n return result;\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n const program = env().getBool('WEBGL_PACK_IMAGE_OPERATIONS') ?\n new ResizeBilinearPackedProgram(x.shape, newHeight, newWidth, alignCorners) :\n new ResizeBilinearProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x], 'float32');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n const program = new ResizeBilinearBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n const program = new ResizeNearestNeighborProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x]);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n const program = new ResizeNearestNeigborBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n multinomial(logits, normalized, numSamples, seed) {\n const probs = normalized ? logits : softmax(logits);\n const batchSize = probs.shape[0];\n const numOutcomes = probs.shape[1];\n const program = new MultinomialProgram(batchSize, numOutcomes, numSamples);\n const customSetup = program.getCustomSetupFunc(seed);\n return this.compileAndRun(program, [probs], 'int32', customSetup);\n }\n oneHot(indices, depth, onValue, offValue) {\n const program = new OneHotProgram(indices.size, depth, onValue, offValue);\n return this.compileAndRun(program, [indices]);\n }\n diag(x) {\n const program = new DiagProgram(x.size);\n return this.compileAndRun(program, [x]);\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const program = new CropAndResizeProgram(image.shape, boxes.shape, cropSize, method, extrapolationValue);\n return this.compileAndRun(program, [image, boxes, boxIndex], 'float32');\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = (dataFormat === 'NHWC') ? x.shape[1] : x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? x.shape[2] : x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? x.shape[3] : x.shape[1];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const outputShape = (dataFormat === 'NHWC') ?\n [batchSize, outputHeight, outputWidth, outputDepth] :\n [batchSize, outputDepth, outputHeight, outputWidth];\n const program = new DepthToSpaceProgram(outputShape, blockSize, dataFormat);\n return this.compileAndRun(program, [x]);\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const flattenIndices = indices.reshape([numUpdates, sliceRank]);\n const flattenX = updates.reshape([numUpdates, sliceSize]);\n if (outputSize === 0) {\n return backend_util.reshapeTensor(tensor([]), shape);\n }\n const defaultValue = scalar(0);\n const program = new ScatterProgram(numUpdates, sliceRank, flattenIndices.rank, flattenX.rank, strides, flattenShape);\n const res = this.compileAndRun(program, [flattenX, flattenIndices, defaultValue]);\n return res.reshape(shape);\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n const program = new ScatterProgram(numUpdates, sliceRank, sparseIndices.rank, sparseValues.rank, strides, [outputSize, 1], sumDupeIndices);\n const res = this.compileAndRun(program, [sparseValues, sparseIndices, defaultValue]);\n return res.reshape(outputShape);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n const flattenIndices = indices.reshape([numSlices, sliceRank]);\n const flattenX = x.reshape([x.size / sliceSize, sliceSize]);\n const program = new GatherNDProgram(sliceRank, strides, [numSlices, sliceSize]);\n const res = this.compileAndRun(program, [flattenX, flattenIndices]);\n return res.reshape(resultShape);\n }\n fill(shape, value, dtype) {\n dtype = dtype || util.inferDtype(value);\n if (dtype === 'string') {\n // String type should be handled in CPU memory.\n const values = util.getArrayFromDType(dtype, util.sizeFromShape(shape));\n values.fill(value);\n return engine().makeTensor(values, shape, dtype, this);\n }\n else {\n const program = new FillProgram(shape, value);\n const customSetup = program.getCustomSetupFunc(value);\n return this.compileAndRun(program, [], dtype, customSetup);\n }\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported under string dtype');\n }\n else {\n // TODO(cais, smilkov): Add WebGL shader for onesLike:\n // https://github.com/tensorflow/tfjs/issues/1293\n return this.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n return this.fill(x.shape, x.dtype === 'string' ? '' : 0, x.dtype);\n }\n linspace(start, stop, num) {\n // TODO: Use CPU implementation due to the precision problem in Safari.\n return backend_util.linspaceImpl(start, stop, num);\n }\n makeTensorInfo(shape, dtype, values) {\n const dataId = this.write(values, shape, dtype);\n this.texData.get(dataId).usage = null;\n return { dataId, shape, dtype };\n }\n makeOutput(shape, dtype, values) {\n const { dataId } = this.makeTensorInfo(shape, dtype, values);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n unpackTensor(input) {\n const program = new UnpackProgram(input.shape);\n return this.runWebGLProgram(program, [input], input.dtype);\n }\n packTensor(input) {\n const program = new PackProgram(input.shape);\n const preventEagerUnpackingOutput = true;\n return this.runWebGLProgram(program, [input], input.dtype, null /* customSetup */, preventEagerUnpackingOutput);\n }\n packedReshape(input, afterShape) {\n const input3DShape = [\n webgl_util.getBatchDim(input.shape),\n ...webgl_util.getRowsCols(input.shape)\n ];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [\n webgl_util.getBatchDim(afterShape), ...webgl_util.getRowsCols(afterShape)\n ];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = this.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n }\n decode(dataId) {\n const texData = this.texData.get(dataId);\n const { isPacked, shape, dtype } = texData;\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n if (isPacked) {\n program = new DecodeMatrixPackedProgram(shapeAs3D);\n }\n else {\n program = new DecodeMatrixProgram(shapeAs3D);\n }\n const preventEagerUnpackingOfOutput = true;\n const out = this.runWebGLProgram(program, [{ shape: shapeAs3D, dtype, dataId }], dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dtype, shape, dataId: out.dataId };\n }\n runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n const output = this.makeTensorInfo(program.outputShape, outputDtype);\n const outData = this.texData.get(output.dataId);\n if (program.packedOutput) {\n outData.isPacked = true;\n }\n if (program.outPackingScheme === tex_util.PackingScheme.DENSE) {\n const texelShape = tex_util.getDenseTexShape(program.outputShape);\n // For a densely packed output, we explicitly set texShape\n // so it doesn't get assigned later according to our typical packing\n // scheme wherein a single texel can only contain values from adjacent\n // rows/cols.\n outData.texShape = texelShape.map(d => d * 2);\n }\n if (program.outTexUsage != null) {\n outData.usage = program.outTexUsage;\n }\n if (util.sizeFromShape(output.shape) === 0) {\n // Short-circuit the computation since the result is empty (has 0 in its\n // shape).\n outData.values =\n util.getTypedArrayFromDType(output.dtype, 0);\n return output;\n }\n const dataToDispose = [];\n const inputsData = inputs.map(input => {\n if (input.dtype === 'complex64') {\n throw new Error(`GPGPUProgram does not support complex64 input. For complex64 ` +\n `dtypes, please separate the program into real and imaginary ` +\n `parts.`);\n }\n let texData = this.texData.get(input.dataId);\n if (texData.texture == null) {\n if (!program.packedInputs &&\n util.sizeFromShape(input.shape) <=\n env().getNumber('WEBGL_SIZE_UPLOAD_UNIFORM')) {\n // Upload small tensors that live on the CPU as uniforms, not as\n // textures. Do this only when the environment supports 32bit floats\n // due to problems when comparing 16bit floats with 32bit floats.\n // TODO(https://github.com/tensorflow/tfjs/issues/821): Make it\n // possible for packed shaders to sample from uniforms.\n return {\n shape: input.shape,\n texData: null,\n isUniform: true,\n uniformValues: texData.values\n };\n }\n // This ensures that if a packed program's inputs have not yet been\n // uploaded to the GPU, they get uploaded as packed right off the bat.\n if (program.packedInputs) {\n texData.isPacked = true;\n texData.shape = input.shape;\n }\n }\n else if (!!texData.isPacked !== !!program.packedInputs) {\n input = texData.isPacked ? this.unpackTensor(input) :\n this.packTensor(input);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n }\n else if (texData.isPacked &&\n !webgl_util.isReshapeFree(texData.shape, input.shape)) {\n // This is a special case where a texture exists for a tensor\n // but the shapes are incompatible (due to packing constraints) because\n // the tensor did not have a chance to go through the packed reshape\n // shader. This only happens when we reshape the *same* tensor to form\n // *distinct* inputs to an op, e.g. dotting a vector with itself. This\n // case will disappear once packed uploading is the default.\n const savedInput = input;\n const targetShape = input.shape;\n input.shape = texData.shape;\n input = this.packedReshape(input, targetShape);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n savedInput.shape = targetShape;\n }\n this.uploadToGPU(input.dataId);\n return { shape: input.shape, texData, isUniform: false };\n });\n this.uploadToGPU(output.dataId);\n const outputData = { shape: output.shape, texData: outData, isUniform: false };\n const key = gpgpu_math.makeShaderKey(program, inputsData, outputData);\n const binary = this.getAndSaveBinary(key, () => {\n return gpgpu_math.compileProgram(this.gpgpu, program, inputsData, outputData);\n });\n const shouldTimeProgram = this.activeTimers != null;\n let query;\n if (shouldTimeProgram) {\n query = this.startTimer();\n }\n gpgpu_math.runProgram(this.gpgpu, binary, inputsData, outputData, customSetup);\n dataToDispose.forEach(info => this.disposeIntermediateTensorInfo(info));\n if (shouldTimeProgram) {\n query = this.endTimer(query);\n this.activeTimers.push({ name: program.constructor.name, query: this.getQueryTime(query) });\n }\n if (!env().getBool('WEBGL_LAZILY_UNPACK') && outData.isPacked &&\n preventEagerUnpackingOfOutput === false) {\n const unpacked = this.unpackTensor(output);\n this.disposeIntermediateTensorInfo(output);\n return unpacked;\n }\n return output;\n }\n compileAndRun(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n outputDtype = outputDtype || inputs[0].dtype;\n const outInfo = this.runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput);\n return engine().makeTensorFromDataId(outInfo.dataId, outInfo.shape, outInfo.dtype);\n }\n getAndSaveBinary(key, getBinary) {\n if (!(key in this.binaryCache)) {\n this.binaryCache[key] = getBinary();\n }\n return this.binaryCache[key];\n }\n getTextureManager() {\n return this.textureManager;\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n // Avoid disposing the compiled webgl programs during unit testing because\n // it slows down test execution.\n if (!env().getBool('IS_TEST')) {\n const allKeys = Object.keys(this.binaryCache);\n allKeys.forEach(key => {\n this.gpgpu.deleteProgram(this.binaryCache[key].webGLProgram);\n delete this.binaryCache[key];\n });\n }\n this.textureManager.dispose();\n if (this.canvas != null &&\n (typeof (HTMLCanvasElement) !== 'undefined' &&\n this.canvas instanceof HTMLCanvasElement)) {\n this.canvas.remove();\n }\n else {\n this.canvas = null;\n }\n if (this.gpgpuCreatedLocally) {\n this.gpgpu.program = null;\n this.gpgpu.dispose();\n }\n this.disposed = true;\n }\n floatPrecision() {\n if (this.floatPrecisionValue == null) {\n this.floatPrecisionValue = tidy(() => {\n if (!env().get('WEBGL_RENDER_FLOAT32_ENABLED')) {\n // Momentarily switching DEBUG flag to false so we don't throw an\n // error trying to upload a small value.\n const debugFlag = env().getBool('DEBUG');\n env().set('DEBUG', false);\n const underflowCheckValue = this.abs(scalar(1e-8)).dataSync()[0];\n env().set('DEBUG', debugFlag);\n if (underflowCheckValue > 0) {\n return 32;\n }\n }\n return 16;\n });\n }\n return this.floatPrecisionValue;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n uploadToGPU(dataId) {\n const texData = this.texData.get(dataId);\n const { shape, dtype, values, texture, usage, isPacked } = texData;\n if (texture != null) {\n // Array is already on GPU. No-op.\n return;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let texShape = texData.texShape;\n if (texShape == null) {\n texShape = webgl_util.getTextureShapeFromLogicalShape(shape, isPacked);\n texData.texShape = texShape;\n }\n if (values != null) {\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n let width = texShape[1], height = texShape[0];\n const isByteArray = values instanceof Uint8Array;\n if (isPacked) {\n [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(texShape[0], texShape[1]);\n program = new EncodeMatrixPackedProgram(shapeAs3D, [height, width], isByteArray);\n }\n else {\n program =\n new EncodeMatrixProgram(shapeAs3D, [height, width], isByteArray);\n }\n const tempDenseInputHandle = this.makeTensorInfo([height, width], dtype);\n if (isByteArray) {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.PIXELS;\n }\n else {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.UPLOAD;\n }\n this.gpgpu.uploadDenseMatrixToTexture(this.getTexture(tempDenseInputHandle.dataId), width, height, values);\n // We want the output to remain packed regardless of the value of\n // WEBGL_PACK.\n const preventEagerUnpacking = true;\n const encodedOutputTarget = this.runWebGLProgram(program, [tempDenseInputHandle], dtype, null, preventEagerUnpacking);\n // Have the original texture assume the identity of the encoded output.\n const outputTexData = this.texData.get(encodedOutputTarget.dataId);\n texData.texture = outputTexData.texture;\n texData.texShape = outputTexData.texShape;\n texData.isPacked = outputTexData.isPacked;\n texData.usage = outputTexData.usage;\n this.disposeIntermediateTensorInfo(tempDenseInputHandle);\n this.texData.delete(encodedOutputTarget.dataId);\n // Once uploaded, don't store the values on cpu.\n texData.values = null;\n if (shouldTimeProgram) {\n this.uploadWaitMs += util.now() - start;\n }\n }\n else {\n const newTexture = this.acquireTexture(texShape, usage, dtype, isPacked);\n texData.texture = newTexture;\n }\n }\n convertAndCacheOnCPU(dataId, float32Values) {\n const texData = this.texData.get(dataId);\n const { dtype } = texData;\n this.releaseGPUData(dataId);\n if (float32Values != null) {\n texData.values = float32ToTypedArray(float32Values, dtype);\n }\n return texData.values;\n }\n acquireTexture(texShape, texType, dtype, isPacked) {\n this.numBytesInGPU += this.computeBytes(texShape, dtype);\n if (!this.warnedAboutMemory &&\n this.numBytesInGPU > this.numMBBeforeWarning * 1024 * 1024) {\n const mb = (this.numBytesInGPU / 1024 / 1024).toFixed(2);\n this.warnedAboutMemory = true;\n console.warn(`High memory usage in GPU: ${mb} MB, ` +\n `most likely due to a memory leak`);\n }\n return this.textureManager.acquireTexture(texShape, texType, isPacked);\n }\n computeBytes(shape, dtype) {\n return shape[0] * shape[1] * util.bytesPerElement(dtype);\n }\n tryRunOnCpuOrThrow(inputs, fn) {\n if (this.shouldExecuteOnCPU(inputs)) {\n try {\n return fn();\n }\n catch (e) {\n if (env().getBool('IS_TEST')) {\n throw new Error('CPU forwarding failed');\n }\n }\n }\n return null;\n }\n}\nfunction float32ToTypedArray(a, dtype) {\n if (dtype === 'float32' || dtype === 'complex64') {\n return a;\n }\n else if (dtype === 'int32' || dtype === 'bool') {\n const result = (dtype === 'int32') ? new Int32Array(a.length) :\n new Uint8Array(a.length);\n for (let i = 0; i < result.length; ++i) {\n result[i] = Math.round(a[i]);\n }\n return result;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n//# sourceMappingURL=backend_webgl.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as webgl_util from './webgl_util';\nexport { MathBackendWebGL } from './backend_webgl';\nexport { setWebGLContext } from './canvas_util';\nexport { GPGPUContext } from './gpgpu_context';\n// WebGL specific utils.\nexport { gpgpu_util, webgl_util };\n/**\n * Enforce use of half precision textures if available on the platform.\n *\n * @doc {heading: 'Environment', namespace: 'webgl'}\n */\nexport function forceHalfFloat() {\n env().set('WEBGL_FORCE_F16_TEXTURES', true);\n}\n//# sourceMappingURL=webgl.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is the webgl backend without auto kernel registration.\nimport { device_util, registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendWebGL } from './backend_webgl';\nexport { version as version_webgl } from './version';\nif (device_util.isBrowser()) {\n registerBackend('webgl', () => new MathBackendWebGL(), 2 /* priority */);\n}\n// Export webgl utilities\nexport * from './webgl';\n// Export forceHalfFlost under webgl namespace for the union bundle.\nimport { forceHalfFloat } from './webgl';\nexport const webgl = { forceHalfFloat };\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'webgl',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\n/**\n * In WebGL data is stored in GPU textures which can't be efficiently copied, so\n * complex tensors share data with their real and imaginary components. Complex\n * tensors increment the `complexParentRefCount` properties of the underlying\n * data buckets to prevent them from being disposed, as the engine's disposal\n * logic does not account for data sharing by complex tensors.\n *\n * When a complex tensor is disposed, it will explicitly decrease the\n * `complexParentRefCount` properties of its underlying components.\n */\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.texData.get(complexInfo.dataId);\n const realTensorInfo = identity({ inputs: { x: real }, backend });\n const realData = backend.texData.get(realTensorInfo.dataId);\n realData.complexParentRefCount++;\n const imagTensorInfo = identity({ inputs: { x: imag }, backend });\n const imagData = backend.texData.get(imagTensorInfo.dataId);\n imagData.complexParentRefCount++;\n complex.complexTensorInfos = { real: realTensorInfo, imag: imagTensorInfo };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'webgl',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, upcastType } from '@tensorflow/tfjs-core';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { complex } from '../kernels/Complex';\nimport { UnaryOpProgram } from '../unaryop_gpu';\nexport const CHECK_NAN_SNIPPET_UNARY = `if (isnan(x)) return x;`;\nexport const CHECK_NAN_SNIPPET_BINARY = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\nexport const CHECK_NAN_SNIPPET_BINARY_PACKED = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param opSnippets Op snippet to create `UnaryOpProgram`.\n */\nexport function unaryKernelFunc(opSnippet) {\n return ({ inputs, backend }) => {\n const { x } = inputs;\n const webglBackend = backend;\n const program = new UnaryOpProgram(x.shape, opSnippet);\n return webglBackend.runWebGLProgram(program, [x], x.dtype);\n };\n}\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param opSnippet Op snippet to create `BinaryOpProgram`.\n * @param packedOpSnippet Op snippet to create `BinaryOpPackedProgram`.\n * @param checkOutOfBoundsForPackedProgram Whether to set checkOutOfBounds=true\n * when creating BinaryOpPackedProgram.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc({ opSnippet, packedOpSnippet, checkOutOfBounds = false, supportsComplex = false, cpuKernelImpl, dtype }) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const webglBackend = backend;\n if (supportsComplex && a.dtype === 'complex64') {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [real, imag] = [\n [aData.complexTensorInfos.real, bData.complexTensorInfos.real],\n [aData.complexTensorInfos.imag, bData.complexTensorInfos.imag]\n ].map(complexParts => {\n const [aPart, bPart] = complexParts;\n const aHandle = {\n dataId: aPart.dataId,\n dtype: aPart.dtype,\n shape: a.shape\n };\n const bHandle = {\n dataId: bPart.dataId,\n dtype: bPart.dtype,\n shape: b.shape\n };\n const program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n return webglBackend.runWebGLProgram(program, [aHandle, bHandle], upcastType(aPart.dtype, bPart.dtype));\n });\n const complexOutput = complex({ inputs: { real, imag }, backend: webglBackend });\n webglBackend.disposeIntermediateTensorInfo(real);\n webglBackend.disposeIntermediateTensorInfo(imag);\n // TODO(annxingyuan): Implement CPU forwarding for complex inputs.\n return complexOutput;\n }\n const $dtype = dtype || upcastType(a.dtype, b.dtype);\n if (webglBackend.shouldExecuteOnCPU([a, b]) && cpuKernelImpl != null) {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [outValues, outShape] = cpuKernelImpl(a.shape, b.shape, aData.values, bData.values, $dtype);\n const out = webglBackend.makeTensorInfo(outShape, $dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') &&\n packedOpSnippet != null;\n let program;\n if (shouldUsePackedProgram) {\n program = new BinaryOpPackedProgram(packedOpSnippet, a.shape, b.shape, checkOutOfBounds);\n }\n else {\n program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n }\n return webglBackend.runWebGLProgram(program, [a, b], $dtype);\n };\n}\n//# sourceMappingURL=kernel_funcs_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { addImplCPU as cpuAdd } from '../kernel_utils/shared';\nconst ADD = 'return a + b;';\nexport const addKernelFunc = binaryKernelFunc({\n opSnippet: ADD,\n packedOpSnippet: ADD,\n supportsComplex: true,\n cpuKernelImpl: cpuAdd\n});\nexport const addConfig = {\n kernelName: Add,\n backendName: 'webgl',\n kernelFunc: addKernelFunc\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc, CHECK_NAN_SNIPPET_BINARY, CHECK_NAN_SNIPPET_BINARY_PACKED } from '../kernel_utils/kernel_funcs_utils';\nconst ATAN2 = CHECK_NAN_SNIPPET_BINARY + `\n return atan(a, b);\n`;\nconst ATAN2_PACKED = `\n vec4 result = atan(a, b);\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET_BINARY_PACKED + `\n return result;\n`;\nexport const atan2 = binaryKernelFunc({ opSnippet: ATAN2, packedOpSnippet: ATAN2_PACKED });\nexport const atan2Config = {\n kernelName: Atan2,\n backendName: 'webgl',\n kernelFunc: atan2,\n};\n//# sourceMappingURL=Atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const avgPoolProgram = new Pool2DProgram(convInfo, 'avg', false);\n return backend.runWebGLProgram(avgPoolProgram, [x], 'float32');\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'webgl',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util } from '@tensorflow/tfjs-core';\nimport { AvgPool2DBackpropProgram } from '../avg_pool_backprop_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const avgPoolBackpropProgram = new AvgPool2DBackpropProgram(convInfo);\n return backend.runWebGLProgram(avgPoolBackpropProgram, [dy], x.dtype);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'webgl',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.outputShape = [];\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = '0.0';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = '1.0';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n float x = getXAtOutCoords();\n float mean = getMeanAtOutCoords();\n float variance = getVarianceAtOutCoords();\n float offset = ${offsetSnippet};\n float scale = ${scaleSnippet};\n float inv = scale * inversesqrt(variance + float(${varianceEpsilon}));\n setOutput(dot(vec3(x, -mean, offset), vec3(inv, inv, 1)));\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormPackedProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = 'vec4(0.0)';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = 'vec4(1.0)';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n vec4 offset = ${offsetSnippet};\n vec4 scale = ${scaleSnippet};\n\n vec4 x = getXAtOutCoords();\n vec4 mean = getMeanAtOutCoords();\n vec4 variance = getVarianceAtOutCoords();\n\n vec4 inv = scale * inversesqrt(variance + vec4(${varianceEpsilon}));\n\n setOutput((x - mean) * inv + offset);\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { BatchNormProgram } from '../batchnorm_gpu';\nimport { BatchNormPackedProgram } from '../batchnorm_packed_gpu';\nexport const batchNorm = ({ inputs, backend, attrs }) => {\n const { x, mean, variance, offset, scale } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const finalInputs = [x, mean, variance];\n let offsetShape = null;\n if (offset != null) {\n offsetShape = offset.shape;\n finalInputs.push(offset);\n }\n let scaleShape = null;\n if (scale != null) {\n scaleShape = scale.shape;\n finalInputs.push(scale);\n }\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new BatchNormPackedProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon) :\n new BatchNormProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon);\n const output = backend.runWebGLProgram(program, finalInputs, finalInputs[0].dtype);\n return output;\n};\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'webgl',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst NOT_EQUAL = `return float(a != b);`;\nexport const notEqual = binaryKernelFunc({ opSnippet: NOT_EQUAL, dtype: 'bool' });\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'webgl',\n kernelFunc: notEqual,\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.real }, backend });\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'webgl',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnaryOpProgram } from '../unaryop_gpu';\nconst TO_INT = `return float(int(x));`;\nexport function int(input, backend) {\n const program = new UnaryOpProgram(input.shape, TO_INT);\n const output = backend.runWebGLProgram(program, [input], 'int32');\n return { dataId: output.dataId, shape: output.shape, dtype: output.dtype };\n}\n//# sourceMappingURL=int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { notEqual } from './NotEqual';\nimport { real } from './Real';\nimport { int } from '../kernel_utils/int';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(annxingyuan): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n return int(x, backend);\n }\n if (dtype === 'bool') {\n const zerosTensorInfo = backend.makeTensorInfo([], 'bool', util.getTypedArrayFromDType('bool', 1));\n const binaryInputs = { a: x, b: zerosTensorInfo };\n const result = notEqual({ inputs: binaryInputs, backend });\n backend.disposeIntermediateTensorInfo(zerosTensorInfo);\n return result;\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'webgl',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class ConcatProgram {\n // Concats 2d tensors along axis=1. See comments in MathBackendWebGL.concat().\n constructor(shapes) {\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, 1 /* axis */);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][1];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][1];\n }\n const snippets = [`if (yC < ${offsets[0]}) setOutput(getT0(yR, yC));`];\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n snippets.push(`else if (yC < ${offsets[i]}) ` +\n `setOutput(getT${i}(yR, yC-${shift}));`);\n }\n const lastIndex = offsets.length;\n const lastShift = offsets[offsets.length - 1];\n snippets.push(`else setOutput(getT${lastIndex}(yR, yC-${lastShift}));`);\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int yR = coords.x;\n int yC = coords.y;\n\n ${snippets.join('\\n ')}\n }\n `;\n }\n}\n//# sourceMappingURL=concat_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ConcatPackedProgram {\n constructor(shapes, axis) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, axis);\n const shape = this.outputShape;\n const rank = shape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][axis];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][axis];\n }\n const channel = channels[axis];\n const lastChannels = channels.slice(-2);\n const allChannels = channels.join();\n let getValueSnippet = `if (${channel} < ${offsets[0]}) {\n return getChannel(\n getT0(${allChannels}), vec2(${lastChannels.join()}));\n }`;\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n // Note: the >= comparison below may seem unnecessary given the check\n // above but is needed to workaround branch execution issues on some\n // devices. It makes all the conditions exclusive without relying on\n // execution order.\n getValueSnippet += `\n if (${channel} < ${offsets[i]} && ${channel} >= ${offsets[i - 1]}) {\n return getChannel(\n getT${i}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));\n }`;\n }\n const lastIndex = offsets.length;\n const shift = offsets[offsets.length - 1];\n getValueSnippet += `\n return getChannel(\n getT${lastIndex}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));`;\n this.userCode = `\n float getValue(${channels.map(x => 'int ' + x)}) {\n ${getValueSnippet}\n }\n\n void main() {\n ${dtype} coords = getOutputCoords();\n vec4 result = vec4(getValue(${coords}), 0., 0., 0.);\n\n ${coords[rank - 1]} = ${coords[rank - 1]} + 1;\n if (${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.g = getValue(${coords});\n }\n\n ${coords[rank - 2]} = ${coords[rank - 2]} + 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]}) {\n result.a = getValue(${coords});\n }\n\n ${coords[rank - 1]} = ${coords[rank - 1]} - 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]} &&\n ${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.b = getValue(${coords});\n }\n setOutput(result);\n }\n `;\n }\n}\n/**\n * Return an expression for coordinates into a vector where a given channel\n * will be offset by [shift].\n *\n * @param channels the channels to consider\n * @param channel the channel we want shifted\n * @param shift the amount to subtract from the channel.\n *\n * @returns a string of the form 'x, y-[shift], z' where any one channel can\n * have the shift applied.\n */\nfunction shiftedChannels(channels, channel, shift) {\n const channelIdx = channels.indexOf(channel);\n const res = channels.map((c, idx) => {\n if (idx === channelIdx) {\n return `${c} - ${shift}`;\n }\n else {\n return c;\n }\n });\n return res.join();\n}\n//# sourceMappingURL=concat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.imag }, backend });\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'webgl',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ReshapePackedProgram } from '../reshape_packed_gpu';\nimport { getBatchDim, getRowsCols } from '../webgl_util';\nexport function packedReshape(input, afterShape, backend) {\n const input3DShape = [getBatchDim(input.shape),\n ...getRowsCols(input.shape)];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [getBatchDim(afterShape),\n ...getRowsCols(afterShape)];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = backend.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n}\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nimport { packedReshape } from '../kernel_utils/reshape';\nimport { isReshapeFree } from '../webgl_util';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const webglBackend = backend;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n const xTexData = webglBackend.texData.get(x.dataId);\n if (xTexData.isPacked && !isReshapeFree(x.shape, $shape) &&\n !(xTexData.texture !== null && isReshapeFree(xTexData.shape, $shape))) {\n return packedReshape(x, $shape, webglBackend);\n }\n webglBackend.incRef(x.dataId);\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'webgl',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, util } from '@tensorflow/tfjs-core';\nimport { ConcatProgram } from '../concat_gpu';\nimport { ConcatPackedProgram } from '../concat_packed_gpu';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concatImpl(inputs, axis, backend) {\n const dtype = inputs[0].dtype;\n if (dtype === 'complex64') {\n const reals = inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concatImpl(reals, axis, backend);\n const imagConcated = concatImpl(imags, axis, backend);\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n if (inputs.length > env().getNumber('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(inputs.length / 2);\n const leftSide = concatImpl(inputs.slice(0, midIndex), axis, backend);\n const rightSide = concatImpl(inputs.slice(midIndex), axis, backend);\n const result = concatImpl([leftSide, rightSide], axis, backend);\n backend.disposeIntermediateTensorInfo(leftSide);\n backend.disposeIntermediateTensorInfo(rightSide);\n return result;\n }\n if (env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') &&\n inputs[0].shape.length > 1) {\n const program = new ConcatPackedProgram(inputs.map(t => t.shape), axis);\n return backend.runWebGLProgram(program, inputs, dtype);\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), axis);\n const tensors2D = inputs.map(x => reshape({\n inputs: { x },\n attrs: { shape: [-1, util.sizeFromShape(x.shape.slice(axis))] },\n backend\n }));\n const program = new ConcatProgram(tensors2D.map(t => t.shape));\n const result = backend.runWebGLProgram(program, tensors2D, dtype);\n tensors2D.forEach(r => backend.disposeIntermediateTensorInfo(r));\n const reshapedResult = reshape({ inputs: { x: result }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(result);\n return reshapedResult;\n}\n//# sourceMappingURL=Concat_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { concatImpl } from './Concat_impl';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n return concatImpl($inputs, $axis, backend);\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'webgl',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst COS = CHECK_NAN_SNIPPET_UNARY + `\n return cos(x);\n`;\nexport const cos = unaryKernelFunc(COS);\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'webgl',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\n// Without the equality check div produces 0.9999 for a = b, which when\n// floored can cause errors.\nconst DIV = `\nif (a == b) {\n return 1.0;\n};\nreturn a / b;`;\n// We do the same as in ./binaryop_gpu, with vec4 and ivec4.\n// On Linux, the vectorized implementation produces NaNs when a and b are 0.\nconst DIV_PACKED = `\n // vec4 one = vec4(equal(a, b));\n // return one + (vec4(1.0) - one) * a / b;\n vec4 result = a / b;\n if(a.x == b.x) {\n result.x = 1.;\n }\n if(a.y == b.y) {\n result.y = 1.;\n }\n if(a.z == b.z) {\n result.z = 1.;\n }\n if(a.w == b.w) {\n result.w = 1.;\n }\n\n return result;\n`;\nexport const div = binaryKernelFunc({ opSnippet: DIV, packedOpSnippet: DIV_PACKED, checkOutOfBounds: true });\nexport const divConfig = {\n kernelName: Div,\n backendName: 'webgl',\n kernelFunc: div,\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FFTProgram {\n constructor(component, inputShape, inverse) {\n this.variableNames = ['real', 'imag'];\n const innerDim = inputShape[1];\n this.outputShape = inputShape;\n const exponentMultiplierSnippet = inverse ? `2.0 * ${Math.PI}` : `-2.0 * ${Math.PI}`;\n const resultDenominator = inverse ? `${innerDim}.0` : '1.0';\n let opString;\n if (component === 'real') {\n opString = 'return real * expR - imag * expI;';\n }\n else if (component === 'imag') {\n opString = 'return real * expI + imag * expR;';\n }\n else {\n throw new Error(`FFT component must be either \"real\" or \"imag\", got ${component}.`);\n }\n this.userCode = `\n const float exponentMultiplier = ${exponentMultiplierSnippet};\n\n float unaryOpComplex(float real, float expR, float imag, float expI) {\n ${opString}\n }\n\n float mulMatDFT(int batch, int index) {\n float indexRatio = float(index) / float(${innerDim});\n float exponentMultiplierTimesIndexRatio =\n exponentMultiplier * indexRatio;\n\n float result = 0.0;\n\n for (int i = 0; i < ${innerDim}; i++) {\n // x = (-2|2 * PI / N) * index * i;\n float x = exponentMultiplierTimesIndexRatio * float(i);\n float expR = cos(x);\n float expI = sin(x);\n float real = getReal(batch, i);\n float imag = getImag(batch, i);\n\n result +=\n unaryOpComplex(real, expR, imag, expI) / ${resultDenominator};\n }\n\n return result;\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n setOutput(mulMatDFT(coords[0], coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=fft_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FFTProgram } from '../fft_gpu';\nimport { complex } from './Complex';\nimport { reshape } from './Reshape';\nexport function fftImpl(x, inverse, backend) {\n const xData = backend.texData.get(x.dataId);\n const inputSize = util.sizeFromShape(x.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = x.shape[x.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({ inputs: { x }, backend, attrs: { shape: [batch, innerDimensionSize] } });\n const xShape = input2D.shape;\n const realProgram = new FFTProgram('real', xShape, inverse);\n const imagProgram = new FFTProgram('imag', xShape, inverse);\n const inputs = [\n {\n dataId: xData.complexTensorInfos.real.dataId,\n dtype: xData.complexTensorInfos.real.dtype,\n shape: xShape\n },\n {\n dataId: xData.complexTensorInfos.imag.dataId,\n dtype: xData.complexTensorInfos.imag.dtype,\n shape: xShape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n const complexOutputReshaped = reshape({ inputs: { x: complexOutput }, backend, attrs: { shape: x.shape } });\n backend.disposeIntermediateTensorInfo(complexOutputReshaped);\n return complexOutputReshaped;\n}\n//# sourceMappingURL=FFT_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, false /* inverse */, backend);\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'webgl',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FlipLeftRightProgram {\n constructor(imageShape) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageWidth = imageShape[2];\n this.outputShape = imageShape;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n\n int coordX = ${imageWidth} - x;\n float outputValue;\n if(coordX >= 0 && coordX < ${imageWidth}) {\n outputValue = getImage(coords[0], coords[1], coordX, coords[3]);\n } else {\n outputValue = getImage(coords[0], coords[1], coords[2], coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=flip_left_right_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight } from '@tensorflow/tfjs-core';\nimport { FlipLeftRightProgram } from '../flip_left_right_gpu';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend }) => {\n const { image } = inputs;\n const webglBackend = backend;\n const program = new FlipLeftRightProgram(image.shape);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${width}.0, ${height}.0);\n\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n setOutput(floor(value * 255.0 + 0.5));\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n\n vec4 result = vec4(0.);\n\n for(int row=0; row<=1; row++) {\n for(int col=0; col<=1; col++) {\n texC = coords[1] + row;\n depth = coords[2] + col;\n\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n result[row * 2 + col] = floor(value * 255.0 + 0.5);\n }\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { FromPixels } from '@tensorflow/tfjs-core';\nimport { TextureUsage } from '../tex_util';\nimport { FromPixelsProgram } from './FromPixels_utils/from_pixels_gpu';\nimport { FromPixelsPackedProgram } from './FromPixels_utils/from_pixels_packed_gpu';\nexport const fromPixelsConfig = {\n kernelName: FromPixels,\n backendName: 'webgl',\n kernelFunc: fromPixels,\n};\nlet fromPixels2DContext;\nfunction fromPixels(args) {\n const { inputs, backend, attrs } = args;\n let { pixels } = inputs;\n const { numChannels } = attrs;\n const isVideo = typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement;\n const isImage = typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement;\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n const texShape = [height, width];\n const outShape = [height, width, numChannels];\n if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n pixels = fromPixels2DContext.canvas;\n }\n const tempPixelHandle = backend.makeTensorInfo(texShape, 'int32');\n // This is a byte texture with pixels.\n backend.texData.get(tempPixelHandle.dataId).usage = TextureUsage.PIXELS;\n backend.gpgpu.uploadPixelDataToTexture(backend.getTexture(tempPixelHandle.dataId), pixels);\n const program = env().getBool('WEBGL_PACK') ?\n new FromPixelsPackedProgram(outShape) :\n new FromPixelsProgram(outShape);\n const res = backend.runWebGLProgram(program, [tempPixelHandle], 'int32');\n backend.disposeData(tempPixelHandle.dataId);\n return res;\n}\n//# sourceMappingURL=FromPixels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, true /* inverse */, backend);\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'webgl',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class MeanProgram {\n constructor(reduceInfo, divisor) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `sumValue += dot(values, ones);`;\n if (divisor != null) {\n const denominator = 1 / divisor;\n updateSnippet = `sumValue += dot(values * ${util.isInt(denominator) ? denominator.toPrecision(2) :\n denominator}, ones);`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return 0.0;\n }\n `;\n }\n this.userCode = `\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(getValue(batch, inIdx), 0.0, 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1), 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2), 0.0);\n\n ${updateSnippet}\n }\n setOutput(sumValue);\n }\n `;\n }\n}\n//# sourceMappingURL=mean_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { MeanProgram } from '../mean_gpu';\nimport { ReduceProgram } from '../reduce_gpu';\n// Returns an array of configuration objects that describe each stage of the\n// reduction.\nfunction getReductionStages(inShape) {\n const stages = [];\n while (stages.length === 0 || stages[stages.length - 1].outSize !== 1) {\n const outSize = stages.length ? stages[stages.length - 1].outSize : inShape[1];\n const windowSize = backend_util.computeOptimalWindowSize(outSize);\n stages.push({\n inSize: outSize,\n windowSize,\n outSize: Math.ceil(outSize / windowSize)\n });\n }\n return stages;\n}\nexport function reduce(x, dtype, reductionType, backend) {\n const reductionStages = getReductionStages(x.shape);\n let result = x;\n for (let i = 0; i < reductionStages.length; i++) {\n const { inSize, windowSize, outSize } = reductionStages[i];\n let program;\n let previousResult;\n if (reductionType === 'mean') {\n program = i === 0 ?\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, inSize) :\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize });\n }\n else {\n program = new ReduceProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, reductionType);\n }\n previousResult = result;\n result = backend.runWebGLProgram(program, [result], dtype);\n if (previousResult.dataId !== x.dataId) {\n backend.disposeIntermediateTensorInfo(previousResult);\n }\n }\n return result;\n}\n//# sourceMappingURL=reduce.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function maxImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, x.dtype, 'max', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposeProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const switched = getSwitchedCoords(newDim);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${switched}));\n }\n `;\n }\n}\nfunction getSwitchedCoords(newDim) {\n const rank = newDim.length;\n if (rank > 6) {\n throw Error(`Transpose for rank ${rank} is not yet supported`);\n }\n const originalOrder = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u', 'resRC.v'];\n const switchedCoords = new Array(rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedCoords[newDim[i]] = originalOrder[i];\n }\n return switchedCoords.join();\n}\n//# sourceMappingURL=transpose_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getVecChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposePackedProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n if (this.rank > 6) {\n throw Error(`Packed transpose for rank ${this.rank} is not yet supported.`);\n }\n const dtype = getCoordsDataType(this.rank);\n const outputOrder = getVecChannels('rc', this.rank);\n const switchedOrder = new Array(this.rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedOrder[newDim[i]] = outputOrder[i];\n }\n const innerDims = `vec2(${switchedOrder.slice(-2).join()})`;\n const nextColumn = `++${outputOrder[this.rank - 1]} < ${outputShape[this.rank - 1]}`;\n const getc = `getChannel(getA(${switchedOrder.join()}), ${innerDims})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result[0] = ${getc};\n if(${nextColumn}) {\n result[1] = ${getc};\n }\n --${outputOrder[this.rank - 1]};\n if(++${outputOrder[this.rank - 2]} < ${outputShape[this.rank - 2]}) {\n result[2] = ${getc};\n if(${nextColumn}) {\n result[3] = ${getc};\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=transpose_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { transposeImplCPU } from '../kernel_utils/shared';\nimport { TransposeProgram } from '../transpose_gpu';\nimport { TransposePackedProgram } from '../transpose_packed_gpu';\nexport function transposeImpl(x, perm, backend) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new TransposePackedProgram(x.shape, perm) :\n new TransposeProgram(x.shape, perm);\n return backend.runWebGLProgram(program, [x], x.dtype);\n}\nexport { transposeImplCPU };\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxImplCPU } from '../kernel_utils/shared';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const maxInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n let maxInput = x;\n if (maxInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const maxInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n maxInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const maxInputData = webglBackend.texData.get(maxInput.dataId);\n maxInputData.values = maxInputValues;\n }\n else {\n maxInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(maxInput.shape, axes);\n let outShape = maxOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n }\n let out;\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const outValues = maxImplCPU(values, util.sizeFromShape(reduceShape), outShape, x.dtype);\n out = webglBackend.makeTensorInfo(outShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = maxImpl(maxInput, reduceShape, outShape, webglBackend);\n }\n if (maxInputIsTransposed) {\n webglBackend.disposeIntermediateTensorInfo(maxInput);\n }\n return out;\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const maxPoolProgram = new Pool2DProgram(convInfo, 'max', false);\n return backend.runWebGLProgram(maxPoolProgram, [x], x.dtype);\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'webgl',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { MaxPool2DBackpropProgram } from '../max_pool_backprop_gpu';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const getPositions = true;\n const maxPoolPositionsProgram = new Pool2DProgram(convInfo, 'max', getPositions);\n const maxPoolPositions = backend.runWebGLProgram(maxPoolPositionsProgram, [x], x.dtype);\n const maxPoolBackPropProgram = new MaxPool2DBackpropProgram(convInfo);\n const result = backend.runWebGLProgram(maxPoolBackPropProgram, [dy, maxPoolPositions], x.dtype);\n backend.disposeIntermediateTensorInfo(maxPoolPositions);\n return result;\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'webgl',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pool2DProgram } from '../pool_gpu';\nexport function maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, backend) {\n let program = new Pool2DProgram(convInfo, 'max', false);\n const poolOutput = backend.runWebGLProgram(program, [x], 'float32');\n program = new Pool2DProgram(convInfo, 'max', true, true, includeBatchInIndex);\n const indexOutput = backend.runWebGLProgram(program, [x], 'float32');\n return [poolOutput, indexOutput];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const webglBackend = backend;\n util.assert(x.shape.length === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x.shape.length}.`);\n const dilations = [1, 1];\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad);\n const [result, indexes] = maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, webglBackend);\n return [result, indexes];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function meanImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, 'float32', 'mean', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Mean_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Mean, util } from '@tensorflow/tfjs-core';\nimport { meanImpl } from './Mean_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const meanConfig = {\n kernelName: Mean,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { keepDims, axis } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(axis, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const meanInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n const intermediates = [];\n let meanInput = x;\n if (meanInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(meanInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const meanInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n meanInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const meanInputData = webglBackend.texData.get(meanInput.dataId);\n meanInputData.values = meanInputValues;\n }\n else {\n meanInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n intermediates.push(meanInput);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('sum', axes, xRank);\n const [meanOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(meanInput.shape, axes);\n let outShape = meanOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(meanOutShape, origAxes);\n }\n const out = meanImpl(meanInput, reduceShape, outShape, webglBackend);\n for (const i of intermediates) {\n webglBackend.disposeIntermediateTensorInfo(i);\n }\n return out;\n }\n};\n//# sourceMappingURL=Mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class MirrorPadProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n const offset = mode === 'reflect' ? 0 : 1;\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start) {\n outC = start * 2 - outC - ${offset};\n } else if(outC >= end) {\n outC = (end - 1) * 2 - outC + ${offset};\n }\n setOutput(getX(outC - start));\n }\n `;\n return;\n }\n this.userCode = `\n ${dtype} start = ${dtype}(${start});\n ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outC = getOutputCoords();\n for (int i = 0; i < ${rank}; i++) {\n if (outC[i] < start[i]) {\n outC[i] = start[i] * 2 - outC[i] - ${offset};\n } else if(outC[i] >= end[i]) {\n outC[i] = (end[i] - 1) * 2 - outC[i] + ${offset};\n }\n }\n ${dtype} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\n/**\n * Example shader code for\n * `mirrorPad(tf.tensor1d([1, 2, 3], 'int32'), [[2, 2]], 'reflect')`\n * ```\n * const int start = int(2);\n * const int end = int(5);\n *\n * void main() {\n * int outputLoc = getOutputCoords();\n * vec4 result = vec4(0.);\n *\n * int rc = outputLoc;\n *\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[0] = getChannel(getX(source), source);\n * rc += 1;\n * if(rc < 6) {\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[1] = getChannel(getX(source), source);\n * }\n *\n * setOutput(result);\n * }\n * ```\n */\nexport class MirrorPadPackedProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const offset = mode === 'reflect' ? 0 : 1;\n let mainLoop = '';\n if (rank === 1) {\n const padSetup = `\n ${dtype} source = rc;\n if (source < start) {\n source = start * 2 - source - ${offset};\n } else if (source >= end) {\n source = (end - 1) * 2 - source + ${offset};\n }\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n else {\n const padSetup = `\n ${dtype} source = rc;\n ${dtype} lt = ${dtype}(lessThan(source, start));\n ${dtype} gte = ${dtype}(greaterThanEqual(source, end));\n ${dtype} orig = 1 - (lt + gte);\n source = orig * source +\n lt * (start * 2 - source - ${offset}) +\n gte * ((end - 1) * 2 - source + ${offset});\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {\n ${padSetup}\n result[2] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[3] = getChannel(getX(${source.join()}), ${innerDims});\n }\n }\n `;\n }\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, MirrorPad } from '@tensorflow/tfjs-core';\nimport { MirrorPadProgram } from '../mirror_pad_gpu';\nimport { MirrorPadPackedProgram } from '../mirror_pad_packed_gpu';\nexport const mirrorPadKernelFunc = ({ inputs, backend, attrs }) => {\n const { x } = inputs;\n const { paddings, mode } = attrs;\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new MirrorPadPackedProgram(x.shape, paddings, mode) :\n new MirrorPadProgram(x.shape, paddings, mode);\n const output = backend.runWebGLProgram(program, [x], x.dtype);\n return output;\n};\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'webgl',\n kernelFunc: mirrorPadKernelFunc,\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\n// (Ar + Ai)(Br + Bi) =\n// ArBr + ArBi + AiBr + AiBi = ArBr - AB + ArBi + AiBr\n// Yr = ArBr - AB\n// Yi = ArBi + AiBr\nexport const COMPLEX_MULTIPLY = {\n REAL: 'return areal * breal - aimag * bimag;',\n IMAG: 'return areal * bimag + aimag * breal;'\n};\nexport class BinaryOpComplexProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['AReal', 'AImag', 'BReal', 'BImag'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOpComplex(\n float areal, float aimag, float breal, float bimag) {\n ${op}\n }\n\n void main() {\n float areal = getARealAtOutCoords();\n float aimag = getAImagAtOutCoords();\n float breal = getBRealAtOutCoords();\n float bimag = getBImagAtOutCoords();\n setOutput(binaryOpComplex(areal, aimag, breal, bimag));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_complex_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, Multiply } from '@tensorflow/tfjs-core';\nimport * as binaryop_complex_gpu from '../binaryop_complex_gpu';\nimport { BinaryOpComplexProgram } from '../binaryop_complex_gpu';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { multiplyImplCPU as cpuMultiply } from '../kernel_utils/shared';\nimport { complex } from './Complex';\nconst MUL = 'return a * b;';\nexport function multiply(args) {\n const { inputs, backend } = args;\n const { a, b } = inputs;\n const dtype = backend_util.upcastType(a.dtype, b.dtype);\n if (a.dtype === 'complex64') {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const realProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.REAL, a.shape, b.shape);\n const imagProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.IMAG, a.shape, b.shape);\n const inputs = [\n {\n dataId: aData.complexTensorInfos.real.dataId,\n dtype: aData.complexTensorInfos.real.dtype,\n shape: a.shape\n },\n {\n dataId: aData.complexTensorInfos.imag.dataId,\n dtype: aData.complexTensorInfos.imag.dtype,\n shape: a.shape\n },\n {\n dataId: bData.complexTensorInfos.real.dataId,\n dtype: bData.complexTensorInfos.real.dtype,\n shape: b.shape\n },\n {\n dataId: bData.complexTensorInfos.imag.dataId,\n dtype: bData.complexTensorInfos.imag.dtype,\n shape: b.shape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n // TODO(annxingyuan): CPU forwarding for complex inputs.\n return complexOutput;\n }\n if (backend.shouldExecuteOnCPU([a, b])) {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const [outValues, outShape] = cpuMultiply(a.shape, b.shape, aData.values, bData.values, dtype);\n const out = backend.makeTensorInfo(outShape, dtype);\n const outData = backend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n let program;\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n program = new BinaryOpPackedProgram(MUL, a.shape, b.shape);\n }\n else {\n program = new BinaryOpProgram(MUL, a.shape, b.shape);\n }\n return backend.runWebGLProgram(program, [a, b], dtype);\n}\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'webgl',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV3 } from '@tensorflow/tfjs-core';\nexport const nonMaxSuppressionV3Config = {\n kernelName: NonMaxSuppressionV3,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n return kernel_impls.nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal);\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV3.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls } from '@tensorflow/tfjs-core';\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class RotateProgram {\n constructor(imageShape, radians, fillValue, center) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageHeight = imageShape[1];\n const imageWidth = imageShape[2];\n const sinFactor = Math.sin(radians).toFixed(3);\n const cosFactor = Math.cos(radians).toFixed(3);\n this.outputShape = imageShape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const centerXString = centerX.toFixed(3);\n const centerYString = centerY.toFixed(3);\n let fillSnippet = '';\n if (typeof fillValue === 'number') {\n fillSnippet = `float outputValue = ${fillValue.toFixed(2)};`;\n }\n else {\n fillSnippet = `\n vec3 fill = vec3(${fillValue.join(',')});\n float outputValue = fill[coords[3]];`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n int y = coords[1];\n float coordXFloat = (float(x) - ${centerXString}) * ${cosFactor} - (float(y) - ${centerYString}) * ${sinFactor};\n float coordYFloat = (float(x) - ${centerXString}) * ${sinFactor} + (float(y) - ${centerYString}) * ${cosFactor};\n int coordX = int(round(coordXFloat + ${centerXString}));\n int coordY = int(round(coordYFloat + ${centerYString}));\n ${fillSnippet}\n if(coordX >= 0 && coordX < ${imageWidth} && coordY >= 0 && coordY < ${imageHeight}) {\n outputValue = getImage(coords[0], coordY, coordX, coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=rotate_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { RotateWithOffset } from '@tensorflow/tfjs-core';\nimport { RotateProgram } from '../rotate_gpu';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const webglBackend = backend;\n const program = new RotateProgram(image.shape, radians, fillValue, center);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SIN = CHECK_NAN_SNIPPET_UNARY + `\n return sin(x);\n`;\nexport const sin = unaryKernelFunc(SIN);\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'webgl',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARE = `return x * x;`;\nexport const square = unaryKernelFunc(SQUARE);\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'webgl',\n kernelFunc: square,\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const squaredDifference = binaryKernelFunc({ opSnippet: SQUARED_DIFFERENCE, packedOpSnippet: SQUARED_DIFFERENCE });\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'webgl',\n kernelFunc: squaredDifference,\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { subImplCPU as cpuSub } from '../kernel_utils/shared';\nconst SUB = 'return a - b;';\nexport const subKernelFunc = binaryKernelFunc({\n opSnippet: SUB,\n packedOpSnippet: SUB,\n supportsComplex: true,\n cpuKernelImpl: cpuSub\n});\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'webgl',\n kernelFunc: subKernelFunc\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst TAN = `return tan(x);`;\nexport const tan = unaryKernelFunc(TAN);\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'webgl',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { transposeImpl } from './Transpose_impl';\nimport { transposeImplCPU as cpuTranspose } from './Transpose_impl';\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { perm } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n let out;\n if (webglBackend.shouldExecuteOnCPU([x])) {\n const xTexData = webglBackend.texData.get(x.dataId);\n const values = xTexData.values;\n const outValues = cpuTranspose(values, x.shape, x.dtype, perm, newShape);\n out = webglBackend.makeTensorInfo(newShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = transposeImpl(x, perm, webglBackend);\n }\n return out;\n }\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { uniqueImplCPU } from '../kernel_utils/shared';\nimport { assertNotComplex } from '../webgl_util';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n // For now, always forward calculation to the CPU backend.\n console.warn('WARNING: ', 'UI might be locked temporarily as data is being downloaded');\n const values = backend.readSync(x.dataId);\n const { outputValues, outputShape, indices } = uniqueImplCPU(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'webgl',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { addConfig } from './kernels/Add';\nimport { atan2Config } from './kernels/Atan2';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { cosConfig } from './kernels/Cos';\nimport { divConfig } from './kernels/Div';\nimport { fftConfig } from './kernels/FFT';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { fromPixelsConfig } from './kernels/FromPixels';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { meanConfig } from './kernels/Mean';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV3Config } from './kernels/NonMaxSuppressionV3';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { realConfig } from './kernels/Real';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { sinConfig } from './kernels/Sin';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n addConfig,\n atan2Config,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchNormConfig,\n castConfig,\n complexConfig,\n concatConfig,\n cosConfig,\n divConfig,\n fftConfig,\n flipLeftRightConfig,\n fromPixelsConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n maxConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n meanConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV3Config,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n realConfig,\n reshapeConfig,\n rotateWithOffsetConfig,\n sinConfig,\n squareConfig,\n subConfig,\n squaredDifferenceConfig,\n tanConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport {version};\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport * from '@tensorflow/tfjs-core';\nexport * from '@tensorflow/tfjs-layers';\nexport * from '@tensorflow/tfjs-converter';\n\n// Export data api as tf.data\nimport * as data from '@tensorflow/tfjs-data';\nexport {data};\n\n// Import and register backends.\nimport '@tensorflow/tfjs-backend-cpu';\nimport '@tensorflow/tfjs-backend-webgl';\n\n// Import versions of all sub-packages.\nimport {version_core} from '@tensorflow/tfjs-core';\nimport {version_cpu} from '@tensorflow/tfjs-backend-cpu';\nimport {version_webgl} from '@tensorflow/tfjs-backend-webgl';\nimport {version_data} from '@tensorflow/tfjs-data';\nimport {version_layers} from '@tensorflow/tfjs-layers';\nimport {version_converter} from '@tensorflow/tfjs-converter';\nimport {version as version_union} from './version';\n\nexport const version = {\n 'tfjs-core': version_core,\n 'tfjs-backend-cpu': version_cpu,\n 'tfjs-backend-webgl': version_webgl,\n 'tfjs-data': version_data,\n 'tfjs-layers': version_layers,\n 'tfjs-converter': version_converter,\n 'tfjs': version_union\n};\n", "", "", "", "\nvar WasmBackendModuleThreadedSimd = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModuleThreadedSimd) {\n WasmBackendModuleThreadedSimd = WasmBackendModuleThreadedSimd || {};\n\nfunction GROWABLE_HEAP_I8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP8}function GROWABLE_HEAP_U8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU8}function GROWABLE_HEAP_I32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP32}function GROWABLE_HEAP_U32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU32}function GROWABLE_HEAP_F64(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPF64}var Module=typeof WasmBackendModuleThreadedSimd!==\"undefined\"?WasmBackendModuleThreadedSimd:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var ENVIRONMENT_IS_PTHREAD=Module[\"ENVIRONMENT_IS_PTHREAD\"]||false;if(ENVIRONMENT_IS_PTHREAD){buffer=Module[\"buffer\"];DYNAMIC_BASE=Module[\"DYNAMIC_BASE\"];DYNAMICTOP_PTR=Module[\"DYNAMICTOP_PTR\"]}var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"};var nodeWorkerThreads;try{nodeWorkerThreads=require(\"worker_threads\")}catch(e){console.error('The \"worker_threads\" module is not supported in this node.js build - perhaps a newer version is needed?');throw e}Worker=nodeWorkerThreads.Worker}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}if(ENVIRONMENT_IS_NODE){read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret}}else{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}if(ENVIRONMENT_IS_NODE){if(typeof performance===\"undefined\"){performance=require(\"perf_hooks\").performance}}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var Atomics_load=Atomics.load;var Atomics_store=Atomics.store;var Atomics_compareExchange=Atomics.compareExchange;var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":165,\"maximum\":165+0,\"element\":\"anyfunc\"});var wasmModule;var threadInfoStruct=0;var selfThreadId=0;var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx)){var u0=heap[idx++];if(!u0)return str;if(!(u0&128)){str+=String.fromCharCode(u0);continue}var u1=heap[idx++]&63;if((u0&224)==192){str+=String.fromCharCode((u0&31)<<6|u1);continue}var u2=heap[idx++]&63;if((u0&240)==224){u0=(u0&15)<<12|u1<<6|u2}else{u0=(u0&7)<<18|u1<<12|u2<<6|heap[idx++]&63}if(u0<65536){str+=String.fromCharCode(u0)}else{var ch=u0-65536;str+=String.fromCharCode(55296|ch>>10,56320|ch&1023)}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(GROWABLE_HEAP_U8(),ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,GROWABLE_HEAP_U8(),outPtr,maxBytesToWrite)}function lengthBytesUTF8(str){var len=0;for(var i=0;i=55296&&u<=57343)u=65536+((u&1023)<<10)|str.charCodeAt(++i)&1023;if(u<=127)++len;else if(u<=2047)len+=2;else if(u<=65535)len+=3;else len+=4}return len}function writeArrayToMemory(array,buffer){GROWABLE_HEAP_I8().set(array,buffer)}var WASM_PAGE_SIZE=65536;function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var STACK_BASE=5256384,STACKTOP=STACK_BASE,STACK_MAX=13504,DYNAMIC_BASE=5256384,DYNAMICTOP_PTR=12576;if(ENVIRONMENT_IS_PTHREAD){}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;if(ENVIRONMENT_IS_PTHREAD){wasmMemory=Module[\"wasmMemory\"];buffer=Module[\"buffer\"]}else{if(Module[\"wasmMemory\"]){wasmMemory=Module[\"wasmMemory\"]}else{wasmMemory=new WebAssembly.Memory({\"initial\":INITIAL_INITIAL_MEMORY/WASM_PAGE_SIZE,\"maximum\":2147483648/WASM_PAGE_SIZE,\"shared\":true});if(!(wasmMemory.buffer instanceof SharedArrayBuffer)){err(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\");if(ENVIRONMENT_IS_NODE){console.log(\"(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and also use a recent version)\")}throw Error(\"bad memory\")}}}if(wasmMemory){buffer=wasmMemory.buffer}INITIAL_INITIAL_MEMORY=buffer.byteLength;updateGlobalBufferAndViews(buffer);if(!ENVIRONMENT_IS_PTHREAD){GROWABLE_HEAP_I32()[DYNAMICTOP_PTR>>2]=DYNAMIC_BASE}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATEXIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;if(ENVIRONMENT_IS_PTHREAD)runtimeInitialized=true;function preRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){if(ENVIRONMENT_IS_PTHREAD)return;callRuntimeCallbacks(__ATMAIN__)}function postRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){assert(!ENVIRONMENT_IS_PTHREAD,\"addRunDependency cannot be used in a pthread worker\");runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}if(ENVIRONMENT_IS_PTHREAD)console.error(\"Pthread aborting at \"+(new Error).stack);what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm-threaded-simd.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"a\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmModule=module;if(!ENVIRONMENT_IS_PTHREAD){var numWorkersToLoad=PThread.unusedWorkers.length;PThread.unusedWorkers.forEach(function(w){PThread.loadWasmModuleToWorker(w,function(){if(!--numWorkersToLoad)removeRunDependency(\"wasm-instantiate\")})})}}if(!ENVIRONMENT_IS_PTHREAD){addRunDependency(\"wasm-instantiate\")}function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"],output[\"module\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}var ASM_CONSTS={};function initPthreadsJS(){PThread.initRuntime()}if(!ENVIRONMENT_IS_PTHREAD)__ATINIT__.push({func:function(){___wasm_call_ctors()}});var __pthread_ptr=0;var __pthread_is_main_runtime_thread=0;var __pthread_is_main_browser_thread=0;function __register_pthread_ptr(pthreadPtr,isMainBrowserThread,isMainRuntimeThread){pthreadPtr=pthreadPtr|0;isMainBrowserThread=isMainBrowserThread|0;isMainRuntimeThread=isMainRuntimeThread|0;__pthread_ptr=pthreadPtr;__pthread_is_main_browser_thread=isMainBrowserThread;__pthread_is_main_runtime_thread=isMainRuntimeThread}Module[\"__register_pthread_ptr\"]=__register_pthread_ptr;var ERRNO_CODES={EPERM:63,ENOENT:44,ESRCH:71,EINTR:27,EIO:29,ENXIO:60,E2BIG:1,ENOEXEC:45,EBADF:8,ECHILD:12,EAGAIN:6,EWOULDBLOCK:6,ENOMEM:48,EACCES:2,EFAULT:21,ENOTBLK:105,EBUSY:10,EEXIST:20,EXDEV:75,ENODEV:43,ENOTDIR:54,EISDIR:31,EINVAL:28,ENFILE:41,EMFILE:33,ENOTTY:59,ETXTBSY:74,EFBIG:22,ENOSPC:51,ESPIPE:70,EROFS:69,EMLINK:34,EPIPE:64,EDOM:18,ERANGE:68,ENOMSG:49,EIDRM:24,ECHRNG:106,EL2NSYNC:156,EL3HLT:107,EL3RST:108,ELNRNG:109,EUNATCH:110,ENOCSI:111,EL2HLT:112,EDEADLK:16,ENOLCK:46,EBADE:113,EBADR:114,EXFULL:115,ENOANO:104,EBADRQC:103,EBADSLT:102,EDEADLOCK:16,EBFONT:101,ENOSTR:100,ENODATA:116,ETIME:117,ENOSR:118,ENONET:119,ENOPKG:120,EREMOTE:121,ENOLINK:47,EADV:122,ESRMNT:123,ECOMM:124,EPROTO:65,EMULTIHOP:36,EDOTDOT:125,EBADMSG:9,ENOTUNIQ:126,EBADFD:127,EREMCHG:128,ELIBACC:129,ELIBBAD:130,ELIBSCN:131,ELIBMAX:132,ELIBEXEC:133,ENOSYS:52,ENOTEMPTY:55,ENAMETOOLONG:37,ELOOP:32,EOPNOTSUPP:138,EPFNOSUPPORT:139,ECONNRESET:15,ENOBUFS:42,EAFNOSUPPORT:5,EPROTOTYPE:67,ENOTSOCK:57,ENOPROTOOPT:50,ESHUTDOWN:140,ECONNREFUSED:14,EADDRINUSE:3,ECONNABORTED:13,ENETUNREACH:40,ENETDOWN:38,ETIMEDOUT:73,EHOSTDOWN:142,EHOSTUNREACH:23,EINPROGRESS:26,EALREADY:7,EDESTADDRREQ:17,EMSGSIZE:35,EPROTONOSUPPORT:66,ESOCKTNOSUPPORT:137,EADDRNOTAVAIL:4,ENETRESET:39,EISCONN:30,ENOTCONN:53,ETOOMANYREFS:141,EUSERS:136,EDQUOT:19,ESTALE:72,ENOTSUP:138,ENOMEDIUM:148,EILSEQ:25,EOVERFLOW:61,ECANCELED:11,ENOTRECOVERABLE:56,EOWNERDEAD:62,ESTRPIPE:135};var __main_thread_futex_wait_address=13488;function _emscripten_futex_wake(addr,count){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0||count<0)return-28;if(count==0)return 0;if(count>=2147483647)count=Infinity;var mainThreadWaitAddress=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2);var mainThreadWoken=0;if(mainThreadWaitAddress==addr){var loadedAddr=Atomics.compareExchange(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,mainThreadWaitAddress,0);if(loadedAddr==mainThreadWaitAddress){--count;mainThreadWoken=1;if(count<=0)return 1}}var ret=Atomics.notify(GROWABLE_HEAP_I32(),addr>>2,count);if(ret>=0)return ret+mainThreadWoken;throw\"Atomics.notify returned an unexpected value \"+ret}Module[\"_emscripten_futex_wake\"]=_emscripten_futex_wake;function __kill_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _kill_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _kill_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];pthread.worker.terminate();PThread.freeThreadData(pthread);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(pthread.worker),1);pthread.worker.pthread=undefined}function __cancel_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cancel_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cancel_thread!\";var pthread=PThread.pthreads[pthread_ptr];pthread.worker.postMessage({\"cmd\":\"cancel\"})}function __cleanup_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cleanup_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cleanup_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];if(pthread){var worker=pthread.worker;PThread.returnWorkerToPool(worker)}}var PThread={MAIN_THREAD_ID:1,mainThreadInfo:{schedPolicy:0,schedPrio:0},unusedWorkers:[],runningWorkers:[],initRuntime:function(){__register_pthread_ptr(PThread.mainThreadBlock,!ENVIRONMENT_IS_WORKER,1);_emscripten_register_main_browser_thread_id(PThread.mainThreadBlock)},initMainThreadBlock:function(){var pthreadPoolSize=8;for(var i=0;i>2]=PThread.mainThreadBlock;var headPtr=PThread.mainThreadBlock+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var tlsMemory=12976;for(var i=0;i<128;++i)GROWABLE_HEAP_U32()[tlsMemory/4+i]=0;Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+104>>2,tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+40>>2,PThread.mainThreadBlock);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+44>>2,42)},initWorker:function(){},pthreads:{},exitHandlers:null,setThreadStatus:function(){},runExitHandlers:function(){if(PThread.exitHandlers!==null){while(PThread.exitHandlers.length>0){PThread.exitHandlers.pop()()}PThread.exitHandlers=null}if(ENVIRONMENT_IS_PTHREAD&&threadInfoStruct)___pthread_tsd_run_dtors()},threadExit:function(exitCode){var tb=_pthread_self();if(tb){Atomics.store(GROWABLE_HEAP_U32(),tb+4>>2,exitCode);Atomics.store(GROWABLE_HEAP_U32(),tb+0>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+60>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+64>>2,0);PThread.runExitHandlers();_emscripten_futex_wake(tb+0,2147483647);__register_pthread_ptr(0,0,0);threadInfoStruct=0;if(ENVIRONMENT_IS_PTHREAD){postMessage({\"cmd\":\"exit\"})}}},threadCancel:function(){PThread.runExitHandlers();Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+4>>2,-1);Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+0>>2,1);_emscripten_futex_wake(threadInfoStruct+0,2147483647);threadInfoStruct=selfThreadId=0;__register_pthread_ptr(0,0,0);postMessage({\"cmd\":\"cancelDone\"})},terminateAllThreads:function(){for(var t in PThread.pthreads){var pthread=PThread.pthreads[t];if(pthread&&pthread.worker){PThread.returnWorkerToPool(pthread.worker)}}PThread.pthreads={};for(var i=0;i>2];GROWABLE_HEAP_I32()[pthread.threadInfoStruct+104>>2]=0;_free(tlsMemory);_free(pthread.threadInfoStruct)}pthread.threadInfoStruct=0;if(pthread.allocatedOwnStack&&pthread.stackBase)_free(pthread.stackBase);pthread.stackBase=0;if(pthread.worker)pthread.worker.pthread=null},returnWorkerToPool:function(worker){delete PThread.pthreads[worker.pthread.thread];PThread.unusedWorkers.push(worker);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(worker),1);PThread.freeThreadData(worker.pthread);worker.pthread=undefined},receiveObjectTransfer:function(data){},loadWasmModuleToWorker:function(worker,onFinishedLoading){worker.onmessage=function(e){var d=e[\"data\"];var cmd=d[\"cmd\"];if(worker.pthread)PThread.currentProxiedOperationCallerThread=worker.pthread.threadInfoStruct;if(d[\"targetThread\"]&&d[\"targetThread\"]!=_pthread_self()){var thread=PThread.pthreads[d.targetThread];if(thread){thread.worker.postMessage(e.data,d[\"transferList\"])}else{console.error('Internal error! Worker sent a message \"'+cmd+'\" to target pthread '+d[\"targetThread\"]+\", but that thread no longer exists!\")}PThread.currentProxiedOperationCallerThread=undefined;return}if(cmd===\"processQueuedMainThreadWork\"){_emscripten_main_thread_process_queued_calls()}else if(cmd===\"spawnThread\"){__spawn_thread(e.data)}else if(cmd===\"cleanupThread\"){__cleanup_thread(d[\"thread\"])}else if(cmd===\"killThread\"){__kill_thread(d[\"thread\"])}else if(cmd===\"cancelThread\"){__cancel_thread(d[\"thread\"])}else if(cmd===\"loaded\"){worker.loaded=true;if(onFinishedLoading)onFinishedLoading(worker);if(worker.runPthread){worker.runPthread();delete worker.runPthread}}else if(cmd===\"print\"){out(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"printErr\"){err(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"alert\"){alert(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"exit\"){var detached=worker.pthread&&Atomics.load(GROWABLE_HEAP_U32(),worker.pthread.thread+68>>2);if(detached){PThread.returnWorkerToPool(worker)}}else if(cmd===\"cancelDone\"){PThread.returnWorkerToPool(worker)}else if(cmd===\"objectTransfer\"){PThread.receiveObjectTransfer(e.data)}else if(e.data.target===\"setimmediate\"){worker.postMessage(e.data)}else{err(\"worker sent an unknown command \"+cmd)}PThread.currentProxiedOperationCallerThread=undefined};worker.onerror=function(e){err(\"pthread sent an error! \"+e.filename+\":\"+e.lineno+\": \"+e.message)};if(ENVIRONMENT_IS_NODE){worker.on(\"message\",function(data){worker.onmessage({data:data})});worker.on(\"error\",function(data){worker.onerror(data)});worker.on(\"exit\",function(data){console.log(\"worker exited - TODO: update the worker queue?\")})}worker.postMessage({\"cmd\":\"load\",\"urlOrBlob\":Module[\"mainScriptUrlOrBlob\"]||_scriptDir,\"wasmMemory\":wasmMemory,\"wasmModule\":wasmModule,\"DYNAMIC_BASE\":DYNAMIC_BASE,\"DYNAMICTOP_PTR\":DYNAMICTOP_PTR})},allocateUnusedWorker:function(){var pthreadMainJs=locateFile(\"tfjs-backend-wasm-threaded-simd.worker.js\");PThread.unusedWorkers.push(new Worker(pthreadMainJs))},getNewWorker:function(){if(PThread.unusedWorkers.length==0){PThread.allocateUnusedWorker();PThread.loadWasmModuleToWorker(PThread.unusedWorkers[0])}if(PThread.unusedWorkers.length>0)return PThread.unusedWorkers.pop();else return null},busySpinWait:function(msecs){var t=performance.now()+msecs;while(performance.now()>2]=value;return value}function _atexit(func,arg){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(1,1,func,arg);__ATEXIT__.unshift({func:func,arg:arg})}function __emscripten_notify_thread_queue(targetThreadId,mainThreadId){if(targetThreadId==mainThreadId){postMessage({\"cmd\":\"processQueuedMainThreadWork\"})}else if(ENVIRONMENT_IS_PTHREAD){postMessage({\"targetThread\":targetThreadId,\"cmd\":\"processThreadQueue\"})}else{var pthread=PThread.pthreads[targetThreadId];var worker=pthread&&pthread.worker;if(!worker){return}worker.postMessage({\"cmd\":\"processThreadQueue\"})}return 1}function _abort(){abort()}function _emscripten_conditional_set_current_thread_status(expectedStatus,newStatus){expectedStatus=expectedStatus|0;newStatus=newStatus|0}function _emscripten_futex_wait(addr,val,timeout){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0)return-28;if(ENVIRONMENT_IS_WORKER){var ret=Atomics.wait(GROWABLE_HEAP_I32(),addr>>2,val,timeout);if(ret===\"timed-out\")return-73;if(ret===\"not-equal\")return-6;if(ret===\"ok\")return 0;throw\"Atomics.wait returned an unexpected value \"+ret}else{var loadedVal=Atomics.load(GROWABLE_HEAP_I32(),addr>>2);if(val!=loadedVal)return-6;var tNow=performance.now();var tEnd=tNow+timeout;Atomics.store(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,addr);var ourWaitAddress=addr;while(addr==ourWaitAddress){tNow=performance.now();if(tNow>tEnd){return-73}_emscripten_main_thread_process_queued_calls();addr=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2)}return 0}}function _emscripten_is_main_browser_thread(){return __pthread_is_main_browser_thread|0}function _emscripten_is_main_runtime_thread(){return __pthread_is_main_runtime_thread|0}function _emscripten_memcpy_big(dest,src,num){GROWABLE_HEAP_U8().copyWithin(dest,src,src+num)}function _emscripten_num_logical_cores(){return navigator[\"hardwareConcurrency\"]}function _emscripten_proxy_to_main_thread_js(index,sync){var numCallArgs=arguments.length-2;var stack=stackSave();var args=stackAlloc(numCallArgs*8);var b=args>>3;for(var i=0;i>3]);buf+=8}else{buf=buf+3&~3;args.push(GROWABLE_HEAP_I32()[buf>>2]);buf+=4}}return args}function _emscripten_receive_on_main_thread_js(index,numCallArgs,args){_emscripten_receive_on_main_thread_js_callArgs.length=numCallArgs;var b=args>>3;for(var i=0;i>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){requestedSize=requestedSize>>>0;var oldSize=_emscripten_get_heap_size();if(requestedSize<=oldSize){return false}var PAGE_MULTIPLE=65536;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}var minHeapSize=16777216;for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(minHeapSize,requestedSize,overGrownHeapSize),PAGE_MULTIPLE));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}var JSEvents={keyEvent:0,mouseEvent:0,wheelEvent:0,uiEvent:0,focusEvent:0,deviceOrientationEvent:0,deviceMotionEvent:0,fullscreenChangeEvent:0,pointerlockChangeEvent:0,visibilityChangeEvent:0,touchEvent:0,previousFullscreenElement:null,previousScreenX:null,previousScreenY:null,removeEventListenersRegistered:false,removeAllEventListeners:function(){for(var i=JSEvents.eventHandlers.length-1;i>=0;--i){JSEvents._removeHandler(i)}JSEvents.eventHandlers=[];JSEvents.deferredCalls=[]},registerRemoveEventListeners:function(){if(!JSEvents.removeEventListenersRegistered){__ATEXIT__.push(JSEvents.removeAllEventListeners);JSEvents.removeEventListenersRegistered=true}},deferredCalls:[],deferCall:function(targetFunction,precedence,argsList){function arraysHaveEqualContent(arrA,arrB){if(arrA.length!=arrB.length)return false;for(var i in arrA){if(arrA[i]!=arrB[i])return false}return true}for(var i in JSEvents.deferredCalls){var call=JSEvents.deferredCalls[i];if(call.targetFunction==targetFunction&&arraysHaveEqualContent(call.argsList,argsList)){return}}JSEvents.deferredCalls.push({targetFunction:targetFunction,precedence:precedence,argsList:argsList});JSEvents.deferredCalls.sort(function(x,y){return x.precedence>2]=eventTypeId;GROWABLE_HEAP_I32()[varargs+4>>2]=eventData;GROWABLE_HEAP_I32()[varargs+8>>2]=userData;_emscripten_async_queue_on_thread_(targetThread,637534208,eventHandlerFunc,eventData,varargs);stackRestore(stackTop)},getTargetThreadForEventCallback:function(targetThread){switch(targetThread){case 1:return 0;case 2:return PThread.currentProxiedOperationCallerThread;default:return targetThread}},getNodeNameForTarget:function(target){if(!target)return\"\";if(target==window)return\"#window\";if(target==screen)return\"#screen\";return target&&target.nodeName?target.nodeName:\"\"},fullscreenEnabled:function(){return document.fullscreenEnabled||document.webkitFullscreenEnabled}};function stringToNewUTF8(jsString){var length=lengthBytesUTF8(jsString)+1;var cString=_malloc(length);stringToUTF8(jsString,cString,length);return cString}function _emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height){var stackTop=stackSave();var varargs=stackAlloc(12);var targetCanvasPtr=0;if(targetCanvas){targetCanvasPtr=stringToNewUTF8(targetCanvas)}GROWABLE_HEAP_I32()[varargs>>2]=targetCanvasPtr;GROWABLE_HEAP_I32()[varargs+4>>2]=width;GROWABLE_HEAP_I32()[varargs+8>>2]=height;_emscripten_async_queue_on_thread_(targetThread,657457152,0,targetCanvasPtr,varargs);stackRestore(stackTop)}function _emscripten_set_offscreencanvas_size_on_target_thread(targetThread,targetCanvas,width,height){targetCanvas=targetCanvas?UTF8ToString(targetCanvas):\"\";_emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height)}function __maybeCStringToJsString(cString){return cString>2?UTF8ToString(cString):cString}var specialHTMLTargets=[0,typeof document!==\"undefined\"?document:0,typeof window!==\"undefined\"?window:0];function __findEventTarget(target){target=__maybeCStringToJsString(target);var domElement=specialHTMLTargets[target]||(typeof document!==\"undefined\"?document.querySelector(target):undefined);return domElement}function __findCanvasEventTarget(target){return __findEventTarget(target)}function _emscripten_set_canvas_element_size_calling_thread(target,width,height){var canvas=__findCanvasEventTarget(target);if(!canvas)return-4;if(canvas.canvasSharedPtr){GROWABLE_HEAP_I32()[canvas.canvasSharedPtr>>2]=width;GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+4>>2]=height}if(canvas.offscreenCanvas||!canvas.controlTransferredOffscreen){if(canvas.offscreenCanvas)canvas=canvas.offscreenCanvas;var autoResizeViewport=false;if(canvas.GLctxObject&&canvas.GLctxObject.GLctx){var prevViewport=canvas.GLctxObject.GLctx.getParameter(2978);autoResizeViewport=prevViewport[0]===0&&prevViewport[1]===0&&prevViewport[2]===canvas.width&&prevViewport[3]===canvas.height}canvas.width=width;canvas.height=height;if(autoResizeViewport){canvas.GLctxObject.GLctx.viewport(0,0,width,height)}}else if(canvas.canvasSharedPtr){var targetThread=GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+8>>2];_emscripten_set_offscreencanvas_size_on_target_thread(targetThread,target,width,height);return 1}else{return-4}return 0}function _emscripten_set_canvas_element_size_main_thread(target,width,height){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(2,1,target,width,height);return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}function _emscripten_set_canvas_element_size(target,width,height){var canvas=__findCanvasEventTarget(target);if(canvas){return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}else{return _emscripten_set_canvas_element_size_main_thread(target,width,height)}}function _emscripten_set_current_thread_status(newStatus){newStatus=newStatus|0}function _emscripten_set_thread_name(threadId,name){threadId=threadId|0;name=name|0}function __webgl_enable_ANGLE_instanced_arrays(ctx){var ext=ctx.getExtension(\"ANGLE_instanced_arrays\");if(ext){ctx[\"vertexAttribDivisor\"]=function(index,divisor){ext[\"vertexAttribDivisorANGLE\"](index,divisor)};ctx[\"drawArraysInstanced\"]=function(mode,first,count,primcount){ext[\"drawArraysInstancedANGLE\"](mode,first,count,primcount)};ctx[\"drawElementsInstanced\"]=function(mode,count,type,indices,primcount){ext[\"drawElementsInstancedANGLE\"](mode,count,type,indices,primcount)};return 1}}function __webgl_enable_OES_vertex_array_object(ctx){var ext=ctx.getExtension(\"OES_vertex_array_object\");if(ext){ctx[\"createVertexArray\"]=function(){return ext[\"createVertexArrayOES\"]()};ctx[\"deleteVertexArray\"]=function(vao){ext[\"deleteVertexArrayOES\"](vao)};ctx[\"bindVertexArray\"]=function(vao){ext[\"bindVertexArrayOES\"](vao)};ctx[\"isVertexArray\"]=function(vao){return ext[\"isVertexArrayOES\"](vao)};return 1}}function __webgl_enable_WEBGL_draw_buffers(ctx){var ext=ctx.getExtension(\"WEBGL_draw_buffers\");if(ext){ctx[\"drawBuffers\"]=function(n,bufs){ext[\"drawBuffersWEBGL\"](n,bufs)};return 1}}var GL={counter:1,lastError:0,buffers:[],mappedBuffers:{},programs:[],framebuffers:[],renderbuffers:[],textures:[],uniforms:[],shaders:[],vaos:[],contexts:{},currentContext:null,offscreenCanvases:{},timerQueriesEXT:[],programInfos:{},stringCache:{},unpackAlignment:4,init:function(){var miniTempFloatBuffer=new Float32Array(GL.MINI_TEMP_BUFFER_SIZE);for(var i=0;i>2]:-1;source+=UTF8ToString(GROWABLE_HEAP_I32()[string+i*4>>2],len<0?undefined:len)}return source},createContext:function(canvas,webGLContextAttributes){var ctx=canvas.getContext(\"webgl\",webGLContextAttributes);if(!ctx)return 0;var handle=GL.registerContext(ctx,webGLContextAttributes);return handle},registerContext:function(ctx,webGLContextAttributes){var handle=_malloc(8);GROWABLE_HEAP_I32()[handle+4>>2]=_pthread_self();var context={handle:handle,attributes:webGLContextAttributes,version:webGLContextAttributes.majorVersion,GLctx:ctx};if(ctx.canvas)ctx.canvas.GLctxObject=context;GL.contexts[handle]=context;if(typeof webGLContextAttributes.enableExtensionsByDefault===\"undefined\"||webGLContextAttributes.enableExtensionsByDefault){GL.initExtensions(context)}return handle},makeContextCurrent:function(contextHandle){GL.currentContext=GL.contexts[contextHandle];Module.ctx=GLctx=GL.currentContext&&GL.currentContext.GLctx;return!(contextHandle&&!GLctx)},getContext:function(contextHandle){return GL.contexts[contextHandle]},deleteContext:function(contextHandle){if(GL.currentContext===GL.contexts[contextHandle])GL.currentContext=null;if(typeof JSEvents===\"object\")JSEvents.removeAllHandlersOnTarget(GL.contexts[contextHandle].GLctx.canvas);if(GL.contexts[contextHandle]&&GL.contexts[contextHandle].GLctx.canvas)GL.contexts[contextHandle].GLctx.canvas.GLctxObject=undefined;_free(GL.contexts[contextHandle].handle);GL.contexts[contextHandle]=null},initExtensions:function(context){if(!context)context=GL.currentContext;if(context.initExtensionsDone)return;context.initExtensionsDone=true;var GLctx=context.GLctx;__webgl_enable_ANGLE_instanced_arrays(GLctx);__webgl_enable_OES_vertex_array_object(GLctx);__webgl_enable_WEBGL_draw_buffers(GLctx);GLctx.disjointTimerQueryExt=GLctx.getExtension(\"EXT_disjoint_timer_query\");var automaticallyEnabledExtensions=[\"OES_texture_float\",\"OES_texture_half_float\",\"OES_standard_derivatives\",\"OES_vertex_array_object\",\"WEBGL_compressed_texture_s3tc\",\"WEBGL_depth_texture\",\"OES_element_index_uint\",\"EXT_texture_filter_anisotropic\",\"EXT_frag_depth\",\"WEBGL_draw_buffers\",\"ANGLE_instanced_arrays\",\"OES_texture_float_linear\",\"OES_texture_half_float_linear\",\"EXT_blend_minmax\",\"EXT_shader_texture_lod\",\"EXT_texture_norm16\",\"WEBGL_compressed_texture_pvrtc\",\"EXT_color_buffer_half_float\",\"WEBGL_color_buffer_float\",\"EXT_sRGB\",\"WEBGL_compressed_texture_etc1\",\"EXT_disjoint_timer_query\",\"WEBGL_compressed_texture_etc\",\"WEBGL_compressed_texture_astc\",\"EXT_color_buffer_float\",\"WEBGL_compressed_texture_s3tc_srgb\",\"EXT_disjoint_timer_query_webgl2\",\"WEBKIT_WEBGL_compressed_texture_pvrtc\"];var exts=GLctx.getSupportedExtensions()||[];exts.forEach(function(ext){if(automaticallyEnabledExtensions.indexOf(ext)!=-1){GLctx.getExtension(ext)}})},populateUniformTable:function(program){var p=GL.programs[program];var ptable=GL.programInfos[program]={uniforms:{},maxUniformLength:0,maxAttributeLength:-1,maxUniformBlockNameLength:-1};var utable=ptable.uniforms;var numUniforms=GLctx.getProgramParameter(p,35718);for(var i=0;i>2;contextAttributes[\"alpha\"]=!!GROWABLE_HEAP_I32()[a+(0>>2)];contextAttributes[\"depth\"]=!!GROWABLE_HEAP_I32()[a+(4>>2)];contextAttributes[\"stencil\"]=!!GROWABLE_HEAP_I32()[a+(8>>2)];contextAttributes[\"antialias\"]=!!GROWABLE_HEAP_I32()[a+(12>>2)];contextAttributes[\"premultipliedAlpha\"]=!!GROWABLE_HEAP_I32()[a+(16>>2)];contextAttributes[\"preserveDrawingBuffer\"]=!!GROWABLE_HEAP_I32()[a+(20>>2)];var powerPreference=GROWABLE_HEAP_I32()[a+(24>>2)];contextAttributes[\"powerPreference\"]=__emscripten_webgl_power_preferences[powerPreference];contextAttributes[\"failIfMajorPerformanceCaveat\"]=!!GROWABLE_HEAP_I32()[a+(28>>2)];contextAttributes.majorVersion=GROWABLE_HEAP_I32()[a+(32>>2)];contextAttributes.minorVersion=GROWABLE_HEAP_I32()[a+(36>>2)];contextAttributes.enableExtensionsByDefault=GROWABLE_HEAP_I32()[a+(40>>2)];contextAttributes.explicitSwapControl=GROWABLE_HEAP_I32()[a+(44>>2)];contextAttributes.proxyContextToMainThread=GROWABLE_HEAP_I32()[a+(48>>2)];contextAttributes.renderViaOffscreenBackBuffer=GROWABLE_HEAP_I32()[a+(52>>2)];var canvas=__findCanvasEventTarget(target);if(!canvas){return-4}if(contextAttributes.explicitSwapControl){return-1}var contextHandle=GL.createContext(canvas,contextAttributes);return contextHandle}function _emscripten_webgl_create_context(a0,a1){return _emscripten_webgl_do_create_context(a0,a1)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=GROWABLE_HEAP_I32()[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(3,1,fd);return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(4,1,fd,offset_low,offset_high,whence,newOffset)}function _fd_write(fd,iov,iovcnt,pnum){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(5,1,fd,iov,iovcnt,pnum);var num=0;for(var i=0;i>2];var len=GROWABLE_HEAP_I32()[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _pthread_cleanup_pop(execute){var routine=PThread.exitHandlers.pop();if(execute)routine()}function _pthread_cleanup_push(routine,arg){if(PThread.exitHandlers===null){PThread.exitHandlers=[]}PThread.exitHandlers.push(function(){dynCall_vi(routine,arg)})}function __spawn_thread(threadParams){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _spawn_thread() can only ever be called from main application thread!\";var worker=PThread.getNewWorker();if(worker.pthread!==undefined)throw\"Internal error!\";if(!threadParams.pthread_ptr)throw\"Internal error, no pthread ptr!\";PThread.runningWorkers.push(worker);var tlsMemory=_malloc(128*4);for(var i=0;i<128;++i){GROWABLE_HEAP_I32()[tlsMemory+i*4>>2]=0}var stackHigh=threadParams.stackBase+threadParams.stackSize;var pthread=PThread.pthreads[threadParams.pthread_ptr]={worker:worker,stackBase:threadParams.stackBase,stackSize:threadParams.stackSize,allocatedOwnStack:threadParams.allocatedOwnStack,thread:threadParams.pthread_ptr,threadInfoStruct:threadParams.pthread_ptr};var tis=pthread.threadInfoStruct>>2;Atomics.store(GROWABLE_HEAP_U32(),tis+(0>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(4>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(8>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(68>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(104>>2),tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),tis+(48>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(40>>2),pthread.threadInfoStruct);Atomics.store(GROWABLE_HEAP_U32(),tis+(44>>2),42);Atomics.store(GROWABLE_HEAP_U32(),tis+(108>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(84>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(80>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+8>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+12>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+20>>2),threadParams.schedPolicy);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+24>>2),threadParams.schedPrio);var global_libc=_emscripten_get_global_libc();var global_locale=global_libc+40;Atomics.store(GROWABLE_HEAP_U32(),tis+(176>>2),global_locale);worker.pthread=pthread;var msg={\"cmd\":\"run\",\"start_routine\":threadParams.startRoutine,\"arg\":threadParams.arg,\"threadInfoStruct\":threadParams.pthread_ptr,\"selfThreadId\":threadParams.pthread_ptr,\"parentThreadId\":threadParams.parent_pthread_ptr,\"stackBase\":threadParams.stackBase,\"stackSize\":threadParams.stackSize};worker.runPthread=function(){msg.time=performance.now();worker.postMessage(msg,threadParams.transferList)};if(worker.loaded){worker.runPthread();delete worker.runPthread}}function _pthread_getschedparam(thread,policy,schedparam){if(!policy&&!schedparam)return ERRNO_CODES.EINVAL;if(!thread){err(\"pthread_getschedparam called with a null thread pointer!\");return ERRNO_CODES.ESRCH}var self=GROWABLE_HEAP_I32()[thread+12>>2];if(self!==thread){err(\"pthread_getschedparam attempted on thread \"+thread+\", which does not point to a valid thread, or does not exist anymore!\");return ERRNO_CODES.ESRCH}var schedPolicy=Atomics.load(GROWABLE_HEAP_U32(),thread+108+20>>2);var schedPrio=Atomics.load(GROWABLE_HEAP_U32(),thread+108+24>>2);if(policy)GROWABLE_HEAP_I32()[policy>>2]=schedPolicy;if(schedparam)GROWABLE_HEAP_I32()[schedparam>>2]=schedPrio;return 0}function _pthread_self(){return __pthread_ptr|0}Module[\"_pthread_self\"]=_pthread_self;function _pthread_create(pthread_ptr,attr,start_routine,arg){if(typeof SharedArrayBuffer===\"undefined\"){err(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\");return 6}if(!pthread_ptr){err(\"pthread_create called with a null thread pointer!\");return 28}var transferList=[];var error=0;if(ENVIRONMENT_IS_PTHREAD&&(transferList.length===0||error)){return _emscripten_sync_run_in_main_thread_4(687865856,pthread_ptr,attr,start_routine,arg)}if(error)return error;var stackSize=0;var stackBase=0;var detached=0;var schedPolicy=0;var schedPrio=0;if(attr){stackSize=GROWABLE_HEAP_I32()[attr>>2];stackSize+=81920;stackBase=GROWABLE_HEAP_I32()[attr+8>>2];detached=GROWABLE_HEAP_I32()[attr+12>>2]!==0;var inheritSched=GROWABLE_HEAP_I32()[attr+16>>2]===0;if(inheritSched){var prevSchedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];var prevSchedPrio=GROWABLE_HEAP_I32()[attr+24>>2];var parentThreadPtr=PThread.currentProxiedOperationCallerThread?PThread.currentProxiedOperationCallerThread:_pthread_self();_pthread_getschedparam(parentThreadPtr,attr+20,attr+24);schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2];GROWABLE_HEAP_I32()[attr+20>>2]=prevSchedPolicy;GROWABLE_HEAP_I32()[attr+24>>2]=prevSchedPrio}else{schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2]}}else{stackSize=2097152}var allocatedOwnStack=stackBase==0;if(allocatedOwnStack){stackBase=_memalign(16,stackSize)}else{stackBase-=stackSize;assert(stackBase>0)}var threadInfoStruct=_malloc(232);for(var i=0;i<232>>2;++i)GROWABLE_HEAP_U32()[(threadInfoStruct>>2)+i]=0;GROWABLE_HEAP_I32()[pthread_ptr>>2]=threadInfoStruct;GROWABLE_HEAP_I32()[threadInfoStruct+12>>2]=threadInfoStruct;var headPtr=threadInfoStruct+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var threadParams={stackBase:stackBase,stackSize:stackSize,allocatedOwnStack:allocatedOwnStack,schedPolicy:schedPolicy,schedPrio:schedPrio,detached:detached,startRoutine:start_routine,pthread_ptr:threadInfoStruct,parent_pthread_ptr:_pthread_self(),arg:arg,transferList:transferList};if(ENVIRONMENT_IS_PTHREAD){threadParams.cmd=\"spawnThread\";postMessage(threadParams,transferList)}else{__spawn_thread(threadParams)}return 0}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}function _sysconf(name){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(6,1,name);switch(name){case 30:return 16384;case 85:var maxHeapSize=2147483648;return maxHeapSize/16384;case 132:case 133:case 12:case 137:case 138:case 15:case 235:case 16:case 17:case 18:case 19:case 20:case 149:case 13:case 10:case 236:case 153:case 9:case 21:case 22:case 159:case 154:case 14:case 77:case 78:case 139:case 80:case 81:case 82:case 68:case 67:case 164:case 11:case 29:case 47:case 48:case 95:case 52:case 51:case 46:case 79:return 200809;case 27:case 246:case 127:case 128:case 23:case 24:case 160:case 161:case 181:case 182:case 242:case 183:case 184:case 243:case 244:case 245:case 165:case 178:case 179:case 49:case 50:case 168:case 169:case 175:case 170:case 171:case 172:case 97:case 76:case 32:case 173:case 35:return-1;case 176:case 177:case 7:case 155:case 8:case 157:case 125:case 126:case 92:case 93:case 129:case 130:case 131:case 94:case 91:return 1;case 74:case 60:case 69:case 70:case 4:return 1024;case 31:case 42:case 72:return 32;case 87:case 26:case 33:return 2147483647;case 34:case 1:return 47839;case 38:case 36:return 99;case 43:case 37:return 2048;case 0:return 2097152;case 3:return 65536;case 28:return 32768;case 44:return 32767;case 75:return 16384;case 39:return 1e3;case 89:return 700;case 71:return 256;case 40:return 255;case 2:return 100;case 180:return 64;case 25:return 20;case 5:return 16;case 6:return 6;case 73:return 4;case 84:{if(typeof navigator===\"object\")return navigator[\"hardwareConcurrency\"]||1;return 1}}setErrNo(28);return-1}if(!ENVIRONMENT_IS_PTHREAD)PThread.initMainThreadBlock();else PThread.initWorker();var GLctx;GL.init();var proxiedFunctionTable=[null,_atexit,_emscripten_set_canvas_element_size_main_thread,_fd_close,_fd_seek,_fd_write,_sysconf];var asmLibraryArg={\"e\":___assert_fail,\"r\":___call_main,\"w\":__emscripten_notify_thread_queue,\"a\":_abort,\"l\":_emscripten_conditional_set_current_thread_status,\"d\":_emscripten_futex_wait,\"c\":_emscripten_futex_wake,\"h\":_emscripten_get_now,\"g\":_emscripten_is_main_browser_thread,\"x\":_emscripten_is_main_runtime_thread,\"q\":_emscripten_memcpy_big,\"B\":_emscripten_num_logical_cores,\"t\":_emscripten_receive_on_main_thread_js,\"A\":_emscripten_resize_heap,\"u\":_emscripten_set_canvas_element_size,\"k\":_emscripten_set_current_thread_status,\"s\":_emscripten_set_thread_name,\"v\":_emscripten_webgl_create_context,\"m\":_fd_close,\"o\":_fd_seek,\"i\":_fd_write,\"p\":initPthreadsJS,\"memory\":wasmMemory||Module[\"wasmMemory\"],\"y\":_pthread_cleanup_pop,\"z\":_pthread_cleanup_push,\"j\":_pthread_create,\"b\":_pthread_self,\"f\":_roundf,\"n\":_sysconf,\"table\":wasmTable};var asm=createWasm();Module[\"asm\"]=asm;var ___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=function(){return(___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=Module[\"asm\"][\"C\"]).apply(null,arguments)};var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"D\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"E\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"F\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"G\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"H\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"I\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"J\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"K\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"L\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"M\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"N\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"O\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"P\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Q\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"R\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"S\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"T\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"U\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"V\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"W\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"X\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"Y\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"Z\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"_\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"$\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"aa\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"ba\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"ca\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"da\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"ea\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"fa\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"ga\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"ha\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"ia\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"ja\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"ka\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"la\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"ma\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"na\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"oa\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"pa\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"qa\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"ra\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"sa\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"ta\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"ua\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"va\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"wa\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"xa\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"ya\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"za\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"Aa\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Ba\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"Ca\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Da\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"Ea\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"Fa\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Ga\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Ha\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Ia\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Ja\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Ka\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"La\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"Ma\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Na\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Oa\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Pa\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Qa\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Ra\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"Sa\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"Ta\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"Ua\"]).apply(null,arguments)};var _emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=function(){return(_emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=Module[\"asm\"][\"Va\"]).apply(null,arguments)};var ___errno_location=Module[\"___errno_location\"]=function(){return(___errno_location=Module[\"___errno_location\"]=Module[\"asm\"][\"Wa\"]).apply(null,arguments)};var ___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=function(){return(___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=Module[\"asm\"][\"Xa\"]).apply(null,arguments)};var _memalign=Module[\"_memalign\"]=function(){return(_memalign=Module[\"_memalign\"]=Module[\"asm\"][\"Ya\"]).apply(null,arguments)};var ___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=function(){return(___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=Module[\"asm\"][\"Za\"]).apply(null,arguments)};var _emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=function(){return(_emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=Module[\"asm\"][\"_a\"]).apply(null,arguments)};var _emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=function(){return(_emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=Module[\"asm\"][\"$a\"]).apply(null,arguments)};var _emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=function(){return(_emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=Module[\"asm\"][\"ab\"]).apply(null,arguments)};var _emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=function(){return(_emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=Module[\"asm\"][\"bb\"]).apply(null,arguments)};var _emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=function(){return(_emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=Module[\"asm\"][\"cb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=function(){return(_emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=Module[\"asm\"][\"db\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=function(){return(_emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=Module[\"asm\"][\"eb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=function(){return(_emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=Module[\"asm\"][\"fb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=function(){return(_emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=Module[\"asm\"][\"gb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=function(){return(_emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=Module[\"asm\"][\"hb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=function(){return(_emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=Module[\"asm\"][\"ib\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=function(){return(_emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=Module[\"asm\"][\"jb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=function(){return(_emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=Module[\"asm\"][\"kb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=function(){return(_emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=Module[\"asm\"][\"lb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=function(){return(_emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=Module[\"asm\"][\"mb\"]).apply(null,arguments)};var _emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=function(){return(_emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=Module[\"asm\"][\"nb\"]).apply(null,arguments)};var _emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=function(){return(_emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=Module[\"asm\"][\"ob\"]).apply(null,arguments)};var _emscripten_tls_init=Module[\"_emscripten_tls_init\"]=function(){return(_emscripten_tls_init=Module[\"_emscripten_tls_init\"]=Module[\"asm\"][\"pb\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"qb\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"rb\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"sb\"]).apply(null,arguments)};var dynCall_vi=Module[\"dynCall_vi\"]=function(){return(dynCall_vi=Module[\"dynCall_vi\"]=Module[\"asm\"][\"tb\"]).apply(null,arguments)};var dynCall_v=Module[\"dynCall_v\"]=function(){return(dynCall_v=Module[\"dynCall_v\"]=Module[\"asm\"][\"ub\"]).apply(null,arguments)};var dynCall_ii=Module[\"dynCall_ii\"]=function(){return(dynCall_ii=Module[\"dynCall_ii\"]=Module[\"asm\"][\"vb\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;Module[\"PThread\"]=PThread;Module[\"PThread\"]=PThread;Module[\"_pthread_self\"]=_pthread_self;Module[\"wasmMemory\"]=wasmMemory;Module[\"ExitStatus\"]=ExitStatus;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}if(!ENVIRONMENT_IS_PTHREAD)noExitRuntime=true;if(!ENVIRONMENT_IS_PTHREAD)run();\n\n\n return WasmBackendModuleThreadedSimd\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModuleThreadedSimd;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModuleThreadedSimd; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModuleThreadedSimd\"] = WasmBackendModuleThreadedSimd;\n ", "\nvar WasmBackendModule = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModule) {\n WasmBackendModule = WasmBackendModule || {};\n\nvar Module=typeof WasmBackendModule!==\"undefined\"?WasmBackendModule:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":147,\"maximum\":147+0,\"element\":\"anyfunc\"});var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str=\"\";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,HEAPU8,outPtr,maxBytesToWrite)}function writeArrayToMemory(array,buffer){HEAP8.set(array,buffer)}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"env\":asmLibraryArg,\"wasi_snapshot_preview1\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmMemory=exports[\"memory\"];updateGlobalBufferAndViews(wasmMemory.buffer);removeRunDependency(\"wasm-instantiate\")}addRunDependency(\"wasm-instantiate\");function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}__ATINIT__.push();function _emscripten_notify_memory_growth(memoryIndex){updateGlobalBufferAndViews(wasmMemory.buffer)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=HEAP32[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){}function _fd_write(fd,iov,iovcnt,pnum){var num=0;for(var i=0;i>2];var len=HEAP32[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _exit(status){exit(status)}function _proc_exit(code){_exit(code)}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}var asmLibraryArg={\"emscripten_notify_memory_growth\":_emscripten_notify_memory_growth,\"fd_close\":_fd_close,\"fd_seek\":_fd_seek,\"fd_write\":_fd_write,\"proc_exit\":_proc_exit,\"roundf\":_roundf};var asm=createWasm();Module[\"asm\"]=asm;var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"init\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"register_tensor\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"dispose_data\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"dispose\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"Abs\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"Add\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"AddN\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"ArgMax\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"AvgPool\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"BatchMatMul\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"ClipByValue\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"Conv2D\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"Conv2DBackpropInput\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Cos\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"CropAndResize\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"Cumsum\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"DepthToSpace\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"DepthwiseConv2dNative\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"Div\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"Equal\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"Exp\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"FlipLeftRight\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"FloorDiv\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"FusedBatchNorm\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"FusedConv2D\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"FusedDepthwiseConv2D\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"Gather\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"GatherNd\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"Greater\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"GreaterEqual\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"Less\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"LessEqual\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"Log\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"LogicalAnd\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"Max\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"MaxPool\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"Maximum\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"Min\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"Minimum\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"Multiply\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"Negate\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"NonMaxSuppressionV3\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"NonMaxSuppressionV4\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"NonMaxSuppressionV5\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"NotEqual\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"OneHot\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"PadV2\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"Pow\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"Prelu\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"Relu\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"Relu6\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"ResizeBilinear\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Reverse\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"RotateWithOffset\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Rsqrt\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"ScatterNd\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"SelectV2\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Sigmoid\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Sin\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Softmax\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Sqrt\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Square\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"SquaredDifference\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"StridedSlice\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Sub\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Sum\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Tanh\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Tile\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Transpose\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"_FusedMatMul\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"malloc\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"free\"]).apply(null,arguments)};var __start=Module[\"__start\"]=function(){return(__start=Module[\"__start\"]=Module[\"asm\"][\"_start\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"stackSave\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"stackAlloc\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"stackRestore\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}var calledMain=false;dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function callMain(args){var entryFunction=Module[\"__start\"];try{entryFunction();var ret=0;exit(ret,true)}catch(e){if(e instanceof ExitStatus){return}else if(e==\"unwind\"){noExitRuntime=true;return}else{var toLog=e;if(e&&typeof e===\"object\"&&e.stack){toLog=[e,e.stack]}err(\"exception thrown: \"+toLog);quit_(1,e)}}finally{calledMain=true}}function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();if(shouldRunNow)callMain(args);postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;function exit(status,implicit){if(implicit&&noExitRuntime&&status===0){return}if(noExitRuntime){}else{ABORT=true;EXITSTATUS=status;exitRuntime();if(Module[\"onExit\"])Module[\"onExit\"](status)}quit_(status,new ExitStatus(status))}if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}var shouldRunNow=true;if(Module[\"noInitialRun\"])shouldRunNow=false;noExitRuntime=true;run();\n\n\n return WasmBackendModule\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModule;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModule; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModule\"] = WasmBackendModule;\n ", "import { tf, loadGraphModel } from '../tf.js';\n\nconst NUM_LANDMARKS = 6;\n\nfunction generateAnchors(inputSize) {\n const spec = { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] };\n const anchors = [];\n for (let i = 0; i < spec.strides.length; i++) {\n const stride = spec.strides[i];\n const gridRows = Math.floor((inputSize + stride - 1) / stride);\n const gridCols = Math.floor((inputSize + stride - 1) / stride);\n const anchorsNum = spec.anchors[i];\n for (let gridY = 0; gridY < gridRows; gridY++) {\n const anchorY = stride * (gridY + 0.5);\n for (let gridX = 0; gridX < gridCols; gridX++) {\n const anchorX = stride * (gridX + 0.5);\n for (let n = 0; n < anchorsNum; n++) {\n anchors.push([anchorX, anchorY]);\n }\n }\n }\n }\n return anchors;\n}\n\nconst disposeBox = (box) => {\n box.startEndTensor.dispose();\n box.startPoint.dispose();\n box.endPoint.dispose();\n};\n\nconst createBox = (startEndTensor) => ({\n startEndTensor,\n startPoint: tf.slice(startEndTensor, [0, 0], [-1, 2]),\n endPoint: tf.slice(startEndTensor, [0, 2], [-1, 2]),\n});\n\nconst scaleBox = (box, factors) => {\n const starts = tf.mul(box.startPoint, factors);\n const ends = tf.mul(box.endPoint, factors);\n const newCoordinates = tf.concat2d([starts, ends], 1);\n return createBox(newCoordinates);\n};\n\nfunction decodeBounds(boxOutputs, anchors, inputSize) {\n const boxStarts = tf.slice(boxOutputs, [0, 1], [-1, 2]);\n const centers = tf.add(boxStarts, anchors);\n const boxSizes = tf.slice(boxOutputs, [0, 3], [-1, 2]);\n const boxSizesNormalized = tf.div(boxSizes, inputSize);\n const centersNormalized = tf.div(centers, inputSize);\n const halfBoxSize = tf.div(boxSizesNormalized, 2);\n const starts = tf.sub(centersNormalized, halfBoxSize);\n const ends = tf.add(centersNormalized, halfBoxSize);\n const startNormalized = tf.mul(starts, inputSize);\n const endNormalized = tf.mul(ends, inputSize);\n const concatAxis = 1;\n return tf.concat2d([startNormalized, endNormalized], concatAxis);\n}\n\nfunction scaleBoxFromPrediction(face, scaleFactor) {\n return tf.tidy(() => {\n const box = face['box'] ? face['box'] : face;\n return scaleBox(box, scaleFactor).startEndTensor.squeeze();\n });\n}\n\nclass BlazeFaceModel {\n constructor(model, config) {\n this.blazeFaceModel = model;\n this.width = config.detector.inputSize;\n this.height = config.detector.inputSize;\n this.anchorsData = generateAnchors(config.detector.inputSize);\n this.anchors = tf.tensor2d(this.anchorsData);\n this.inputSize = tf.tensor1d([this.width, this.height]);\n this.config = config;\n this.scaleFaces = 0.8;\n }\n\n async getBoundingBoxes(inputImage) {\n // sanity check on input\n if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;\n const [detectedOutputs, boxes, scores] = tf.tidy(() => {\n const resizedImage = inputImage.resizeBilinear([this.width, this.height]);\n // const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);\n const normalizedImage = tf.sub(resizedImage.div(127.5), 1);\n const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);\n let prediction;\n // are we using tfhub or pinto converted model?\n if (Array.isArray(batchedPrediction)) {\n const sorted = batchedPrediction.sort((a, b) => a.size - b.size);\n const concat384 = tf.concat([sorted[0], sorted[2]], 2); // dim: 384, 1 + 16\n const concat512 = tf.concat([sorted[1], sorted[3]], 2); // dim: 512, 1 + 16\n const concat = tf.concat([concat512, concat384], 1);\n prediction = concat.squeeze(0);\n } else {\n prediction = batchedPrediction.squeeze(); // when using tfhub model\n }\n const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);\n const logits = tf.slice(prediction, [0, 0], [-1, 1]);\n const scoresOut = tf.sigmoid(logits).squeeze();\n return [prediction, decodedBounds, scoresOut];\n });\n const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);\n const boxIndices = boxIndicesTensor.arraySync();\n boxIndicesTensor.dispose();\n const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));\n const boundingBoxes = boundingBoxesMap.map((boundingBox) => {\n const vals = boundingBox.arraySync();\n boundingBox.dispose();\n return vals;\n });\n\n const scoresVal = scores.dataSync();\n const annotatedBoxes = [];\n for (const i in boundingBoxes) {\n const boxIndex = boxIndices[i];\n const confidence = scoresVal[boxIndex];\n if (confidence > this.config.detector.minConfidence) {\n const box = createBox(boundingBoxes[i]);\n const anchor = this.anchorsData[boxIndex];\n const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));\n annotatedBoxes.push({ box, landmarks, anchor, confidence });\n }\n }\n detectedOutputs.dispose();\n boxes.dispose();\n scores.dispose();\n detectedOutputs.dispose();\n return {\n boxes: annotatedBoxes,\n scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height],\n };\n }\n\n async estimateFaces(input) {\n const { boxes, scaleFactor } = await this.getBoundingBoxes(input);\n const faces = [];\n for (const face of boxes) {\n const landmarkData = face.landmarks.arraySync();\n const scaledBox = scaleBoxFromPrediction(face, scaleFactor);\n const boxData = scaleBox.arraySync();\n const probabilityData = face.probability.arraySync();\n const anchor = face.anchor;\n const [scaleFactorX, scaleFactorY] = scaleFactor;\n const scaledLandmarks = landmarkData\n .map((landmark) => ([\n (landmark[0] + anchor[0]) * scaleFactorX,\n (landmark[1] + anchor[1]) * scaleFactorY,\n ]));\n const normalizedFace = {\n topLeft: boxData.slice(0, 2),\n bottomRight: boxData.slice(2),\n landmarks: scaledLandmarks,\n probability: probabilityData,\n };\n disposeBox(face.box);\n face.landmarks.dispose();\n face.probability.dispose();\n scaledBox.dispose();\n faces.push(normalizedFace);\n }\n return faces;\n }\n}\n\nasync function load(config) {\n const blazeface = await loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });\n const model = new BlazeFaceModel(blazeface, config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n return model;\n}\n\nexports.load = load;\nexports.BlazeFaceModel = BlazeFaceModel;\nexports.disposeBox = disposeBox;\n", "exports.MESH_ANNOTATIONS = {\n silhouette: [\n 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288,\n 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136,\n 172, 58, 132, 93, 234, 127, 162, 21, 54, 103, 67, 109,\n ],\n lipsUpperOuter: [61, 185, 40, 39, 37, 0, 267, 269, 270, 409, 291],\n lipsLowerOuter: [146, 91, 181, 84, 17, 314, 405, 321, 375, 291],\n lipsUpperInner: [78, 191, 80, 81, 82, 13, 312, 311, 310, 415, 308],\n lipsLowerInner: [78, 95, 88, 178, 87, 14, 317, 402, 318, 324, 308],\n rightEyeUpper0: [246, 161, 160, 159, 158, 157, 173],\n rightEyeLower0: [33, 7, 163, 144, 145, 153, 154, 155, 133],\n rightEyeUpper1: [247, 30, 29, 27, 28, 56, 190],\n rightEyeLower1: [130, 25, 110, 24, 23, 22, 26, 112, 243],\n rightEyeUpper2: [113, 225, 224, 223, 222, 221, 189],\n rightEyeLower2: [226, 31, 228, 229, 230, 231, 232, 233, 244],\n rightEyeLower3: [143, 111, 117, 118, 119, 120, 121, 128, 245],\n rightEyebrowUpper: [156, 70, 63, 105, 66, 107, 55, 193],\n rightEyebrowLower: [35, 124, 46, 53, 52, 65],\n rightEyeIris: [473, 474, 475, 476, 477],\n leftEyeUpper0: [466, 388, 387, 386, 385, 384, 398],\n leftEyeLower0: [263, 249, 390, 373, 374, 380, 381, 382, 362],\n leftEyeUpper1: [467, 260, 259, 257, 258, 286, 414],\n leftEyeLower1: [359, 255, 339, 254, 253, 252, 256, 341, 463],\n leftEyeUpper2: [342, 445, 444, 443, 442, 441, 413],\n leftEyeLower2: [446, 261, 448, 449, 450, 451, 452, 453, 464],\n leftEyeLower3: [372, 340, 346, 347, 348, 349, 350, 357, 465],\n leftEyebrowUpper: [383, 300, 293, 334, 296, 336, 285, 417],\n leftEyebrowLower: [265, 353, 276, 283, 282, 295],\n leftEyeIris: [468, 469, 470, 471, 472],\n midwayBetweenEyes: [168],\n noseTip: [1],\n noseBottom: [2],\n noseRightCorner: [98],\n noseLeftCorner: [327],\n rightCheek: [205],\n leftCheek: [425],\n};\nexports.MESH_TO_IRIS_INDICES_MAP = [ // A mapping from facemesh model keypoints to iris model keypoints.\n { key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] },\n { key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] },\n { key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] },\n { key: 'EyeLower0', indices: [0, 1, 2, 3, 4, 5, 6, 7, 8] },\n { key: 'EyeLower1', indices: [16, 17, 18, 19, 20, 21, 22, 23, 24] },\n { key: 'EyeLower2', indices: [32, 33, 34, 35, 36, 37, 38, 39, 40] },\n { key: 'EyeLower3', indices: [54, 55, 56, 57, 58, 59, 60, 61, 62] },\n { key: 'EyebrowUpper', indices: [63, 64, 65, 66, 67, 68, 69, 70] },\n { key: 'EyebrowLower', indices: [48, 49, 50, 51, 52, 53] },\n];\n", "import { tf } from '../tf.js';\n\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n return { startPoint, endPoint };\n}\nexports.scaleBoxCoordinates = scaleBoxCoordinates;\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nexports.getBoxSize = getBoxSize;\n\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nexports.getBoxCenter = getBoxCenter;\n\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nexports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;\n\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.enlargeBox = enlargeBox;\n\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.squarifyBox = squarifyBox;\n", "exports.IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];\n/**\n * Normalizes the provided angle to the range -pi to pi.\n * @param angle The angle in radians to be normalized.\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nexports.normalizeRadians = normalizeRadians;\n/**\n * Computes the angle of rotation between two anchor points.\n * @param point1 First anchor point\n * @param point2 Second anchor point\n */\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nexports.computeRotation = computeRotation;\nfunction radToDegrees(rad) {\n return rad * 180 / Math.PI;\n}\nexports.radToDegrees = radToDegrees;\nfunction buildTranslationMatrix(x, y) {\n return [[1, 0, x], [0, 1, y], [0, 0, 1]];\n}\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nexports.dot = dot;\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nexports.getColumnFrom2DArr = getColumnFrom2DArr;\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nexports.buildRotationMatrix = buildRotationMatrix;\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nexports.invertTransformMatrix = invertTransformMatrix;\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexports.rotatePoint = rotatePoint;\nfunction xyDistanceBetweenPoints(a, b) {\n return Math.sqrt(((a[0] - b[0]) ** 2) + ((a[1] - b[1]) ** 2));\n}\nexports.xyDistanceBetweenPoints = xyDistanceBetweenPoints;\n", "/* eslint-disable class-methods-use-this */\nimport { tf } from '../tf.js';\nimport * as bounding from './box';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nconst LANDMARKS_COUNT = 468;\nconst MESH_MOUTH_INDEX = 13;\nconst MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [MESH_MOUTH_INDEX, keypoints.MESH_ANNOTATIONS['midwayBetweenEyes'][0]];\nconst BLAZEFACE_MOUTH_INDEX = 3;\nconst BLAZEFACE_NOSE_INDEX = 2;\nconst BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [BLAZEFACE_MOUTH_INDEX, BLAZEFACE_NOSE_INDEX];\nconst LEFT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['leftEyeLower0'];\nconst LEFT_EYE_BOUNDS = [LEFT_EYE_OUTLINE[0], LEFT_EYE_OUTLINE[LEFT_EYE_OUTLINE.length - 1]];\nconst RIGHT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['rightEyeLower0'];\nconst RIGHT_EYE_BOUNDS = [RIGHT_EYE_OUTLINE[0], RIGHT_EYE_OUTLINE[RIGHT_EYE_OUTLINE.length - 1]];\nconst IRIS_UPPER_CENTER_INDEX = 3;\nconst IRIS_LOWER_CENTER_INDEX = 4;\nconst IRIS_IRIS_INDEX = 71;\nconst IRIS_NUM_COORDINATES = 76;\n\n// Replace the raw coordinates returned by facemesh with refined iris model coordinates. Update the z coordinate to be an average of the original and the new. This produces the best visual effect.\nfunction replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {\n for (let i = 0; i < keypoints.MESH_TO_IRIS_INDICES_MAP.length; i++) {\n const { key, indices } = keypoints.MESH_TO_IRIS_INDICES_MAP[i];\n const originalIndices = keypoints.MESH_ANNOTATIONS[`${prefix}${key}`];\n const shouldReplaceAllKeys = keys == null;\n if (shouldReplaceAllKeys || keys.includes(key)) {\n for (let j = 0; j < indices.length; j++) {\n const index = indices[j];\n rawCoords[originalIndices[j]] = [\n newCoords[index][0], newCoords[index][1],\n (newCoords[index][2] + rawCoords[originalIndices[j]][2]) / 2,\n ];\n }\n }\n }\n}\n// The Pipeline coordinates between the bounding box and skeleton models.\nclass Pipeline {\n constructor(boundingBoxDetector, meshDetector, irisModel, config) {\n // An array of facial bounding boxes.\n this.storedBoxes = [];\n this.runsWithoutFaceDetector = 0;\n this.boundingBoxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.irisModel = irisModel;\n this.meshWidth = config.mesh.inputSize;\n this.meshHeight = config.mesh.inputSize;\n this.irisSize = config.iris.inputSize;\n this.irisEnlarge = 2.3;\n this.skipped = 1000;\n this.detectedFaces = 0;\n }\n\n transformRawCoords(rawCoords, box, angle, rotationMatrix) {\n const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });\n const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight];\n const coordsScaled = rawCoords.map((coord) => ([\n scaleFactor[0] * (coord[0] - this.meshWidth / 2),\n scaleFactor[1] * (coord[1] - this.meshHeight / 2), coord[2],\n ]));\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]]));\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => ([\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1], coord[2],\n ]));\n }\n\n getLeftToRightEyeDepthDifference(rawCoords) {\n const leftEyeZ = rawCoords[LEFT_EYE_BOUNDS[0]][2];\n const rightEyeZ = rawCoords[RIGHT_EYE_BOUNDS[0]][2];\n return leftEyeZ - rightEyeZ;\n }\n\n // Returns a box describing a cropped region around the eye fit for passing to the iris model.\n getEyeBox(rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {\n const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));\n const boxSize = bounding.getBoxSize(box);\n let crop = tf.image.cropAndResize(face, [[\n box.startPoint[1] / this.meshHeight,\n box.startPoint[0] / this.meshWidth, box.endPoint[1] / this.meshHeight,\n box.endPoint[0] / this.meshWidth,\n ]], [0], [this.irisSize, this.irisSize]);\n if (flip) {\n crop = tf.image.flipLeftRight(crop);\n }\n return { box, boxSize, crop };\n }\n\n // Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.\n getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) {\n const eyeRawCoords = [];\n for (let i = 0; i < IRIS_NUM_COORDINATES; i++) {\n const x = eyeData[i * 3];\n const y = eyeData[i * 3 + 1];\n const z = eyeData[i * 3 + 2];\n eyeRawCoords.push([\n (flip\n ? (1 - (x / this.irisSize))\n : (x / this.irisSize)) * eyeBoxSize[0] + eyeBox.startPoint[0],\n (y / this.irisSize) * eyeBoxSize[1] + eyeBox.startPoint[1], z,\n ]);\n }\n return { rawCoords: eyeRawCoords, iris: eyeRawCoords.slice(IRIS_IRIS_INDEX) };\n }\n\n // The z-coordinates returned for the iris are unreliable, so we take the z values from the surrounding keypoints.\n getAdjustedIrisCoords(rawCoords, irisCoords, direction) {\n const upperCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeUpper0`][IRIS_UPPER_CENTER_INDEX]][2];\n const lowerCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeLower0`][IRIS_LOWER_CENTER_INDEX]][2];\n const averageZ = (upperCenterZ + lowerCenterZ) / 2;\n // Iris indices: 0: center | 1: right | 2: above | 3: left | 4: below\n return irisCoords.map((coord, i) => {\n let z = averageZ;\n if (i === 2) {\n z = upperCenterZ;\n } else if (i === 4) {\n z = lowerCenterZ;\n }\n return [coord[0], coord[1], z];\n });\n }\n\n async predict(input, config) {\n this.skipped++;\n let useFreshBox = false;\n // run new detector every skipFrames unless we only want box to start with\n let detector;\n if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled) {\n detector = await this.boundingBoxDetector.getBoundingBoxes(input);\n // don't reset on test image\n if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.detector.maxFaces))) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n for (const possible of detector.boxes) {\n this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks, confidence: possible.confidence });\n }\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n\n if (useFreshBox) {\n if (!detector || !detector.boxes || (detector.boxes.length === 0)) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n return null;\n }\n for (const i in this.storedBoxes) {\n const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);\n const enlargedBox = bounding.enlargeBox(scaledBox);\n const landmarks = this.storedBoxes[i].landmarks.arraySync();\n const confidence = this.storedBoxes[i].confidence;\n this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };\n }\n this.runsWithoutFaceDetector = 0;\n }\n if (detector && detector.boxes) {\n detector.boxes.forEach((prediction) => {\n prediction.box.startPoint.dispose();\n prediction.box.endPoint.dispose();\n prediction.landmarks.dispose();\n });\n }\n\n // console.log(this.skipped, config.detector.skipFrames, this.detectedFaces, config.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);\n\n let results = tf.tidy(() => this.storedBoxes.map((box, i) => {\n let angle = 0;\n // The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).\n const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;\n let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n if (boxLandmarksFromMeshModel === false) {\n [indexOfMouth, indexOfForehead] = BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n }\n angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);\n const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });\n const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];\n let rotatedImage = input;\n let rotationMatrix = util.IDENTITY_MATRIX;\n if (angle !== 0) {\n rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);\n rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);\n }\n const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };\n const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);\n\n // if we're not going to produce mesh, don't spend time with further processing\n if (!config.mesh.enabled) {\n const prediction = {\n coords: null,\n box,\n faceConfidence: null,\n confidence: box.confidence,\n image: face,\n };\n return prediction;\n }\n\n // The first returned tensor represents facial contours, which are included in the coordinates.\n const [, confidence, coords] = this.meshDetector.predict(face);\n const confidenceVal = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceVal < config.detector.minConfidence) {\n coords.dispose();\n return null;\n }\n const coordsReshaped = tf.reshape(coords, [-1, 3]);\n let rawCoords = coordsReshaped.arraySync();\n if (config.iris.enabled) {\n const { box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop } = this.getEyeBox(rawCoords, face, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true);\n const { box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop } = this.getEyeBox(rawCoords, face, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]);\n const eyePredictions = (this.irisModel.predict(tf.concat([leftEyeCrop, rightEyeCrop])));\n const eyePredictionsData = eyePredictions.dataSync();\n eyePredictions.dispose();\n const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3);\n const { rawCoords: leftEyeRawCoords, iris: leftIrisRawCoords } = this.getEyeCoords(leftEyeData, leftEyeBox, leftEyeBoxSize, true);\n const rightEyeData = eyePredictionsData.slice(IRIS_NUM_COORDINATES * 3);\n const { rawCoords: rightEyeRawCoords, iris: rightIrisRawCoords } = this.getEyeCoords(rightEyeData, rightEyeBox, rightEyeBoxSize);\n const leftToRightEyeDepthDifference = this.getLeftToRightEyeDepthDifference(rawCoords);\n if (Math.abs(leftToRightEyeDepthDifference) < 30) { // User is looking straight ahead.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left');\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right');\n // If the user is looking to the left or to the right, the iris coordinates tend to diverge too much from the mesh coordinates for them to be merged. So we only update a single contour line above and below the eye.\n } else if (leftToRightEyeDepthDifference < 1) { // User is looking towards the right.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', ['EyeUpper0', 'EyeLower0']);\n } else { // User is looking towards the left.\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right', ['EyeUpper0', 'EyeLower0']);\n }\n const adjustedLeftIrisCoords = this.getAdjustedIrisCoords(rawCoords, leftIrisRawCoords, 'left');\n const adjustedRightIrisCoords = this.getAdjustedIrisCoords(rawCoords, rightIrisRawCoords, 'right');\n rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);\n }\n const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);\n tf.dispose(rawCoords);\n const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));\n const transformedCoords = tf.tensor2d(transformedCoordsData);\n const prediction = {\n coords: transformedCoords,\n box: landmarksBox,\n faceConfidence: confidenceVal,\n confidence: box.confidence,\n image: face,\n };\n this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };\n return prediction;\n }));\n results = results.filter((a) => a !== null);\n this.detectedFaces = results.length;\n return results;\n }\n\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint, landmarks };\n }\n}\nexports.Pipeline = Pipeline;\n", "exports.UV_COORDS = [\n [0.499976992607117, 0.652534008026123],\n [0.500025987625122, 0.547487020492554],\n [0.499974012374878, 0.602371990680695],\n [0.482113003730774, 0.471979022026062],\n [0.500150978565216, 0.527155995368958],\n [0.499909996986389, 0.498252987861633],\n [0.499523013830185, 0.40106201171875],\n [0.289712011814117, 0.380764007568359],\n [0.499954998493195, 0.312398016452789],\n [0.499987006187439, 0.269918978214264],\n [0.500023007392883, 0.107050001621246],\n [0.500023007392883, 0.666234016418457],\n [0.5000159740448, 0.679224014282227],\n [0.500023007392883, 0.692348003387451],\n [0.499976992607117, 0.695277988910675],\n [0.499976992607117, 0.70593398809433],\n [0.499976992607117, 0.719385027885437],\n [0.499976992607117, 0.737019002437592],\n [0.499967992305756, 0.781370997428894],\n [0.499816000461578, 0.562981009483337],\n [0.473773002624512, 0.573909997940063],\n [0.104906998574734, 0.254140973091125],\n [0.365929991006851, 0.409575998783112],\n [0.338757991790771, 0.41302502155304],\n [0.311120003461838, 0.409460008144379],\n [0.274657994508743, 0.389131009578705],\n [0.393361985683441, 0.403706014156342],\n [0.345234006643295, 0.344011008739471],\n [0.370094001293182, 0.346076011657715],\n [0.319321990013123, 0.347265005111694],\n [0.297903001308441, 0.353591024875641],\n [0.24779200553894, 0.410809993743896],\n [0.396889001131058, 0.842755019664764],\n [0.280097991228104, 0.375599980354309],\n [0.106310002505779, 0.399955987930298],\n [0.2099249958992, 0.391353011131287],\n [0.355807989835739, 0.534406006336212],\n [0.471751004457474, 0.65040397644043],\n [0.474155008792877, 0.680191993713379],\n [0.439785003662109, 0.657229006290436],\n [0.414617002010345, 0.66654098033905],\n [0.450374007225037, 0.680860996246338],\n [0.428770989179611, 0.682690978050232],\n [0.374971002340317, 0.727805018424988],\n [0.486716985702515, 0.547628998756409],\n [0.485300987958908, 0.527395009994507],\n [0.257764995098114, 0.314490020275116],\n [0.401223003864288, 0.455172002315521],\n [0.429818987846375, 0.548614978790283],\n [0.421351999044418, 0.533740997314453],\n [0.276895999908447, 0.532056987285614],\n [0.483370006084442, 0.499586999416351],\n [0.33721199631691, 0.282882988452911],\n [0.296391993761063, 0.293242990970612],\n [0.169294998049736, 0.193813979625702],\n [0.447580009698868, 0.302609980106354],\n [0.392390012741089, 0.353887975215912],\n [0.354490011930466, 0.696784019470215],\n [0.067304998636246, 0.730105042457581],\n [0.442739009857178, 0.572826027870178],\n [0.457098007202148, 0.584792017936707],\n [0.381974011659622, 0.694710969924927],\n [0.392388999462128, 0.694203019142151],\n [0.277076005935669, 0.271932005882263],\n [0.422551989555359, 0.563233017921448],\n [0.385919004678726, 0.281364023685455],\n [0.383103013038635, 0.255840003490448],\n [0.331431001424789, 0.119714021682739],\n [0.229923993349075, 0.232002973556519],\n [0.364500999450684, 0.189113974571228],\n [0.229622006416321, 0.299540996551514],\n [0.173287004232407, 0.278747975826263],\n [0.472878992557526, 0.666198015213013],\n [0.446828007698059, 0.668527007102966],\n [0.422762006521225, 0.673889994621277],\n [0.445307999849319, 0.580065965652466],\n [0.388103008270264, 0.693961024284363],\n [0.403039008378983, 0.706539988517761],\n [0.403629004955292, 0.693953037261963],\n [0.460041999816895, 0.557139039039612],\n [0.431158006191254, 0.692366003990173],\n [0.452181994915009, 0.692366003990173],\n [0.475387006998062, 0.692366003990173],\n [0.465828001499176, 0.779190003871918],\n [0.472328990697861, 0.736225962638855],\n [0.473087012767792, 0.717857003211975],\n [0.473122000694275, 0.704625964164734],\n [0.473033010959625, 0.695277988910675],\n [0.427942007780075, 0.695277988910675],\n [0.426479011774063, 0.703539967536926],\n [0.423162013292313, 0.711845993995667],\n [0.4183090031147, 0.720062971115112],\n [0.390094995498657, 0.639572978019714],\n [0.013953999616206, 0.560034036636353],\n [0.499913990497589, 0.58014702796936],\n [0.413199990987778, 0.69539999961853],\n [0.409626007080078, 0.701822996139526],\n [0.468080013990402, 0.601534962654114],\n [0.422728985548019, 0.585985004901886],\n [0.463079988956451, 0.593783974647522],\n [0.37211999297142, 0.47341400384903],\n [0.334562003612518, 0.496073007583618],\n [0.411671012639999, 0.546965003013611],\n [0.242175996303558, 0.14767599105835],\n [0.290776997804642, 0.201445996761322],\n [0.327338010072708, 0.256527006626129],\n [0.399509996175766, 0.748921036720276],\n [0.441727995872498, 0.261676013469696],\n [0.429764986038208, 0.187834024429321],\n [0.412198007106781, 0.108901023864746],\n [0.288955003023148, 0.398952007293701],\n [0.218936994671822, 0.435410976409912],\n [0.41278201341629, 0.398970007896423],\n [0.257135003805161, 0.355440020561218],\n [0.427684992551804, 0.437960982322693],\n [0.448339998722076, 0.536936044692993],\n [0.178560003638268, 0.45755398273468],\n [0.247308000922203, 0.457193970680237],\n [0.286267012357712, 0.467674970626831],\n [0.332827985286713, 0.460712015628815],\n [0.368755996227264, 0.447206974029541],\n [0.398963987827301, 0.432654976844788],\n [0.476410001516342, 0.405806005001068],\n [0.189241006970406, 0.523923993110657],\n [0.228962004184723, 0.348950982093811],\n [0.490725994110107, 0.562400996685028],\n [0.404670000076294, 0.485132992267609],\n [0.019469000399113, 0.401564002037048],\n [0.426243007183075, 0.420431017875671],\n [0.396993011236191, 0.548797011375427],\n [0.266469985246658, 0.376977026462555],\n [0.439121007919312, 0.51895797252655],\n [0.032313998788595, 0.644356966018677],\n [0.419054001569748, 0.387154996395111],\n [0.462783008813858, 0.505746960639954],\n [0.238978996872902, 0.779744982719421],\n [0.198220998048782, 0.831938028335571],\n [0.107550002634525, 0.540755033493042],\n [0.183610007166862, 0.740257024765015],\n [0.134409993886948, 0.333683013916016],\n [0.385764002799988, 0.883153975009918],\n [0.490967005491257, 0.579378008842468],\n [0.382384985685349, 0.508572995662689],\n [0.174399003386497, 0.397670984268188],\n [0.318785011768341, 0.39623498916626],\n [0.343364000320435, 0.400596976280212],\n [0.396100014448166, 0.710216999053955],\n [0.187885001301765, 0.588537991046906],\n [0.430987000465393, 0.944064974784851],\n [0.318993002176285, 0.898285031318665],\n [0.266247987747192, 0.869701027870178],\n [0.500023007392883, 0.190576016902924],\n [0.499976992607117, 0.954452991485596],\n [0.366169989109039, 0.398822009563446],\n [0.393207013607025, 0.39553701877594],\n [0.410373002290726, 0.391080021858215],\n [0.194993004202843, 0.342101991176605],\n [0.388664990663528, 0.362284004688263],\n [0.365961998701096, 0.355970978736877],\n [0.343364000320435, 0.355356991291046],\n [0.318785011768341, 0.35834002494812],\n [0.301414996385574, 0.363156020641327],\n [0.058132998645306, 0.319076001644135],\n [0.301414996385574, 0.387449026107788],\n [0.499987989664078, 0.618434011936188],\n [0.415838003158569, 0.624195992946625],\n [0.445681989192963, 0.566076993942261],\n [0.465844005346298, 0.620640993118286],\n [0.49992299079895, 0.351523995399475],\n [0.288718998432159, 0.819945991039276],\n [0.335278987884521, 0.852819979190826],\n [0.440512001514435, 0.902418971061707],\n [0.128294005990028, 0.791940987110138],\n [0.408771991729736, 0.373893976211548],\n [0.455606997013092, 0.451801002025604],\n [0.499877005815506, 0.908990025520325],\n [0.375436991453171, 0.924192011356354],\n [0.11421000212431, 0.615022003650665],\n [0.448662012815475, 0.695277988910675],\n [0.4480200111866, 0.704632043838501],\n [0.447111994028091, 0.715808033943176],\n [0.444831997156143, 0.730794012546539],\n [0.430011987686157, 0.766808986663818],\n [0.406787008047104, 0.685672998428345],\n [0.400738000869751, 0.681069016456604],\n [0.392399996519089, 0.677703022956848],\n [0.367855995893478, 0.663918972015381],\n [0.247923001646996, 0.601333022117615],\n [0.452769994735718, 0.420849978923798],\n [0.43639200925827, 0.359887003898621],\n [0.416164010763168, 0.368713974952698],\n [0.413385987281799, 0.692366003990173],\n [0.228018000721931, 0.683571994304657],\n [0.468268007040024, 0.352671027183533],\n [0.411361992359161, 0.804327011108398],\n [0.499989002943039, 0.469825029373169],\n [0.479153990745544, 0.442654013633728],\n [0.499974012374878, 0.439637005329132],\n [0.432112008333206, 0.493588984012604],\n [0.499886006116867, 0.866917014122009],\n [0.49991300702095, 0.821729004383087],\n [0.456548988819122, 0.819200992584229],\n [0.344549000263214, 0.745438992977142],\n [0.37890899181366, 0.574010014533997],\n [0.374292999505997, 0.780184984207153],\n [0.319687992334366, 0.570737957954407],\n [0.357154995203018, 0.604269981384277],\n [0.295284003019333, 0.621580958366394],\n [0.447750002145767, 0.862477004528046],\n [0.410986006259918, 0.508723020553589],\n [0.31395098567009, 0.775308012962341],\n [0.354128003120422, 0.812552988529205],\n [0.324548006057739, 0.703992962837219],\n [0.189096003770828, 0.646299958229065],\n [0.279776990413666, 0.71465802192688],\n [0.1338230073452, 0.682700991630554],\n [0.336768001317978, 0.644733011722565],\n [0.429883986711502, 0.466521978378296],\n [0.455527991056442, 0.548622965812683],\n [0.437114000320435, 0.558896005153656],\n [0.467287987470627, 0.529924988746643],\n [0.414712011814117, 0.335219979286194],\n [0.37704598903656, 0.322777986526489],\n [0.344107985496521, 0.320150971412659],\n [0.312875986099243, 0.32233202457428],\n [0.283526003360748, 0.333190023899078],\n [0.241245999932289, 0.382785975933075],\n [0.102986000478268, 0.468762993812561],\n [0.267612010240555, 0.424560010433197],\n [0.297879010438919, 0.433175981044769],\n [0.333433985710144, 0.433878004550934],\n [0.366427004337311, 0.426115989685059],\n [0.396012008190155, 0.416696012020111],\n [0.420121014118195, 0.41022801399231],\n [0.007561000064015, 0.480777025222778],\n [0.432949006557465, 0.569517970085144],\n [0.458638995885849, 0.479089021682739],\n [0.473466008901596, 0.545744001865387],\n [0.476087987422943, 0.563830018043518],\n [0.468472003936768, 0.555056989192963],\n [0.433990985155106, 0.582361996173859],\n [0.483518004417419, 0.562983989715576],\n [0.482482999563217, 0.57784903049469],\n [0.42645001411438, 0.389798998832703],\n [0.438998997211456, 0.39649498462677],\n [0.450067013502121, 0.400434017181396],\n [0.289712011814117, 0.368252992630005],\n [0.276670008897781, 0.363372981548309],\n [0.517862021923065, 0.471948027610779],\n [0.710287988185883, 0.380764007568359],\n [0.526226997375488, 0.573909997940063],\n [0.895093023777008, 0.254140973091125],\n [0.634069979190826, 0.409575998783112],\n [0.661242008209229, 0.41302502155304],\n [0.688880026340485, 0.409460008144379],\n [0.725341975688934, 0.389131009578705],\n [0.606630027294159, 0.40370500087738],\n [0.654766023159027, 0.344011008739471],\n [0.629905998706818, 0.346076011657715],\n [0.680678009986877, 0.347265005111694],\n [0.702096998691559, 0.353591024875641],\n [0.75221198797226, 0.410804986953735],\n [0.602918028831482, 0.842862963676453],\n [0.719901978969574, 0.375599980354309],\n [0.893692970275879, 0.399959981441498],\n [0.790081977844238, 0.391354024410248],\n [0.643998026847839, 0.534487962722778],\n [0.528249025344849, 0.65040397644043],\n [0.525849997997284, 0.680191040039062],\n [0.560214996337891, 0.657229006290436],\n [0.585384011268616, 0.66654098033905],\n [0.549625992774963, 0.680860996246338],\n [0.57122802734375, 0.682691991329193],\n [0.624852001667023, 0.72809898853302],\n [0.513050019741058, 0.547281980514526],\n [0.51509702205658, 0.527251958847046],\n [0.742246985435486, 0.314507007598877],\n [0.598631024360657, 0.454979002475739],\n [0.570338010787964, 0.548575043678284],\n [0.578631997108459, 0.533622980117798],\n [0.723087012767792, 0.532054007053375],\n [0.516445994377136, 0.499638974666595],\n [0.662801027297974, 0.282917976379395],\n [0.70362401008606, 0.293271005153656],\n [0.830704987049103, 0.193813979625702],\n [0.552385985851288, 0.302568018436432],\n [0.607609987258911, 0.353887975215912],\n [0.645429015159607, 0.696707010269165],\n [0.932694971561432, 0.730105042457581],\n [0.557260990142822, 0.572826027870178],\n [0.542901992797852, 0.584792017936707],\n [0.6180260181427, 0.694710969924927],\n [0.607590973377228, 0.694203019142151],\n [0.722943007946014, 0.271963000297546],\n [0.577413976192474, 0.563166975975037],\n [0.614082992076874, 0.281386971473694],\n [0.616907000541687, 0.255886018276215],\n [0.668509006500244, 0.119913995265961],\n [0.770092010498047, 0.232020974159241],\n [0.635536015033722, 0.189248979091644],\n [0.77039098739624, 0.299556016921997],\n [0.826722025871277, 0.278755009174347],\n [0.527121007442474, 0.666198015213013],\n [0.553171992301941, 0.668527007102966],\n [0.577238023281097, 0.673889994621277],\n [0.554691970348358, 0.580065965652466],\n [0.611896991729736, 0.693961024284363],\n [0.59696102142334, 0.706539988517761],\n [0.596370995044708, 0.693953037261963],\n [0.539958000183105, 0.557139039039612],\n [0.568841993808746, 0.692366003990173],\n [0.547818005084991, 0.692366003990173],\n [0.52461302280426, 0.692366003990173],\n [0.534089982509613, 0.779141008853912],\n [0.527670979499817, 0.736225962638855],\n [0.526912987232208, 0.717857003211975],\n [0.526877999305725, 0.704625964164734],\n [0.526966989040375, 0.695277988910675],\n [0.572058022022247, 0.695277988910675],\n [0.573521018028259, 0.703539967536926],\n [0.57683801651001, 0.711845993995667],\n [0.581691026687622, 0.720062971115112],\n [0.609944999217987, 0.639909982681274],\n [0.986046016216278, 0.560034036636353],\n [0.5867999792099, 0.69539999961853],\n [0.590372025966644, 0.701822996139526],\n [0.531915009021759, 0.601536989212036],\n [0.577268004417419, 0.585934996604919],\n [0.536915004253387, 0.593786001205444],\n [0.627542972564697, 0.473352015018463],\n [0.665585994720459, 0.495950996875763],\n [0.588353991508484, 0.546862006187439],\n [0.757824003696442, 0.14767599105835],\n [0.709249973297119, 0.201507985591888],\n [0.672684013843536, 0.256581008434296],\n [0.600408971309662, 0.74900496006012],\n [0.55826598405838, 0.261672019958496],\n [0.570303976535797, 0.187870979309082],\n [0.588165998458862, 0.109044015407562],\n [0.711045026779175, 0.398952007293701],\n [0.781069993972778, 0.435405015945435],\n [0.587247014045715, 0.398931980133057],\n [0.742869973182678, 0.355445981025696],\n [0.572156012058258, 0.437651991844177],\n [0.55186802148819, 0.536570012569427],\n [0.821442008018494, 0.457556009292603],\n [0.752701997756958, 0.457181990146637],\n [0.71375697851181, 0.467626988887787],\n [0.66711300611496, 0.460672974586487],\n [0.631101012229919, 0.447153985500336],\n [0.6008620262146, 0.432473003864288],\n [0.523481011390686, 0.405627012252808],\n [0.810747981071472, 0.523926019668579],\n [0.771045982837677, 0.348959028720856],\n [0.509127020835876, 0.562718033790588],\n [0.595292985439301, 0.485023975372314],\n [0.980530977249146, 0.401564002037048],\n [0.573499977588654, 0.420000016689301],\n [0.602994978427887, 0.548687994480133],\n [0.733529984951019, 0.376977026462555],\n [0.560611009597778, 0.519016981124878],\n [0.967685997486115, 0.644356966018677],\n [0.580985009670258, 0.387160003185272],\n [0.537728011608124, 0.505385041236877],\n [0.760966002941132, 0.779752969741821],\n [0.801778972148895, 0.831938028335571],\n [0.892440974712372, 0.54076099395752],\n [0.816350996494293, 0.740260004997253],\n [0.865594983100891, 0.333687007427216],\n [0.614073991775513, 0.883246004581451],\n [0.508952975273132, 0.579437971115112],\n [0.617941975593567, 0.508316040039062],\n [0.825608015060425, 0.397674977779388],\n [0.681214988231659, 0.39623498916626],\n [0.656635999679565, 0.400596976280212],\n [0.603900015354156, 0.710216999053955],\n [0.81208598613739, 0.588539004325867],\n [0.56801301240921, 0.944564998149872],\n [0.681007981300354, 0.898285031318665],\n [0.733752012252808, 0.869701027870178],\n [0.633830010890961, 0.398822009563446],\n [0.606792986392975, 0.39553701877594],\n [0.589659988880157, 0.391062021255493],\n [0.805015981197357, 0.342108011245728],\n [0.611334979534149, 0.362284004688263],\n [0.634037971496582, 0.355970978736877],\n [0.656635999679565, 0.355356991291046],\n [0.681214988231659, 0.35834002494812],\n [0.698584973812103, 0.363156020641327],\n [0.941866993904114, 0.319076001644135],\n [0.698584973812103, 0.387449026107788],\n [0.584177017211914, 0.624107003211975],\n [0.554318010807037, 0.566076993942261],\n [0.534153997898102, 0.62064003944397],\n [0.711217999458313, 0.819975018501282],\n [0.664629995822906, 0.852871000766754],\n [0.559099972248077, 0.902631998062134],\n [0.871706008911133, 0.791940987110138],\n [0.591234028339386, 0.373893976211548],\n [0.544341027736664, 0.451583981513977],\n [0.624562978744507, 0.924192011356354],\n [0.88577002286911, 0.615028977394104],\n [0.551338016986847, 0.695277988910675],\n [0.551980018615723, 0.704632043838501],\n [0.552887976169586, 0.715808033943176],\n [0.555167973041534, 0.730794012546539],\n [0.569944024085999, 0.767035007476807],\n [0.593203008174896, 0.685675978660583],\n [0.599261999130249, 0.681069016456604],\n [0.607599973678589, 0.677703022956848],\n [0.631937980651855, 0.663500010967255],\n [0.752032995223999, 0.601315021514893],\n [0.547226011753082, 0.420395016670227],\n [0.563543975353241, 0.359827995300293],\n [0.583841025829315, 0.368713974952698],\n [0.586614012718201, 0.692366003990173],\n [0.771915018558502, 0.683578014373779],\n [0.531597018241882, 0.352482974529266],\n [0.588370978832245, 0.804440975189209],\n [0.52079701423645, 0.442565023899078],\n [0.567984998226166, 0.493479013442993],\n [0.543282985687256, 0.819254994392395],\n [0.655317008495331, 0.745514988899231],\n [0.621008992195129, 0.574018001556396],\n [0.625559985637665, 0.78031200170517],\n [0.680198013782501, 0.570719003677368],\n [0.64276397228241, 0.604337990283966],\n [0.704662978649139, 0.621529996395111],\n [0.552012026309967, 0.862591981887817],\n [0.589071989059448, 0.508637011051178],\n [0.685944974422455, 0.775357007980347],\n [0.645735025405884, 0.812640011310577],\n [0.675342977046967, 0.703978002071381],\n [0.810858011245728, 0.646304965019226],\n [0.72012197971344, 0.714666962623596],\n [0.866151988506317, 0.682704985141754],\n [0.663187026977539, 0.644596993923187],\n [0.570082008838654, 0.466325998306274],\n [0.544561982154846, 0.548375964164734],\n [0.562758982181549, 0.558784961700439],\n [0.531987011432648, 0.530140042304993],\n [0.585271000862122, 0.335177004337311],\n [0.622952997684479, 0.32277899980545],\n [0.655896008014679, 0.320163011550903],\n [0.687132000923157, 0.322345972061157],\n [0.716481983661652, 0.333200991153717],\n [0.758756995201111, 0.382786989212036],\n [0.897013008594513, 0.468769013881683],\n [0.732392013072968, 0.424547016620636],\n [0.70211398601532, 0.433162987232208],\n [0.66652500629425, 0.433866024017334],\n [0.633504986763, 0.426087975502014],\n [0.603875994682312, 0.416586995124817],\n [0.579657971858978, 0.409945011138916],\n [0.992439985275269, 0.480777025222778],\n [0.567192018032074, 0.569419980049133],\n [0.54136598110199, 0.478899002075195],\n [0.526564002037048, 0.546118021011353],\n [0.523913025856018, 0.563830018043518],\n [0.531529009342194, 0.555056989192963],\n [0.566035985946655, 0.582329034805298],\n [0.51631098985672, 0.563053965568542],\n [0.5174720287323, 0.577877044677734],\n [0.573594987392426, 0.389806985855103],\n [0.560697972774506, 0.395331978797913],\n [0.549755990505219, 0.399751007556915],\n [0.710287988185883, 0.368252992630005],\n [0.723330020904541, 0.363372981548309],\n];\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as blazeface from './blazeface';\nimport * as keypoints from './keypoints';\nimport * as pipe from './facepipeline';\nimport * as uv_coords from './uvcoords';\nimport * as triangulation from './triangulation';\n\nclass MediaPipeFaceMesh {\n constructor(blazeFace, blazeMeshModel, irisModel, config) {\n this.pipeline = new pipe.Pipeline(blazeFace, blazeMeshModel, irisModel, config);\n if (config) this.config = config;\n }\n\n async estimateFaces(input, config) {\n if (config) this.config = config;\n const predictions = await this.pipeline.predict(input, config);\n const results = [];\n for (const prediction of (predictions || [])) {\n // guard against disposed tensors on long running operations such as pause in middle of processing\n if (prediction.isDisposedInternal) continue;\n const mesh = prediction.coords ? prediction.coords.arraySync() : null;\n const annotations = {};\n if (mesh && mesh.length > 0) {\n for (const key in keypoints.MESH_ANNOTATIONS) {\n if (this.config.iris.enabled || key.includes('Iris') === false) {\n annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => mesh[index]);\n }\n }\n }\n results.push({\n confidence: prediction.confidence || 0,\n box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,\n mesh,\n annotations,\n image: prediction.image ? tf.clone(prediction.image) : null,\n });\n if (prediction.coords) prediction.coords.dispose();\n if (prediction.image) prediction.image.dispose();\n }\n return results;\n }\n}\n\nasync function load(config) {\n const models = await Promise.all([\n blazeface.load(config),\n loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),\n ]);\n const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.mesh.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.iris.modelPath.match(/\\/(.*)\\./)[1]}`);\n return faceMesh;\n}\n\nexports.load = load;\nexports.MediaPipeFaceMesh = MediaPipeFaceMesh;\nexports.uv_coords = uv_coords;\nexports.triangulation = triangulation.default;\n", "const profileData = {};\n\nfunction profile(name, data) {\n if (!data || !data.kernels) return;\n const maxResults = 5;\n const time = data.kernels\n .filter((a) => a.kernelTimeMs > 0)\n .reduce((a, b) => a += b.kernelTimeMs, 0);\n const slowest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.kernelTimeMs > 0)\n .sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);\n const largest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.totalBytesSnapshot > 0)\n .sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);\n if (slowest.length > maxResults) slowest.length = maxResults;\n if (largest.length > maxResults) largest.length = maxResults;\n const res = { newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };\n profileData[name] = res;\n // eslint-disable-next-line no-console\n console.log('Human profiler', name, res);\n}\n\nexports.run = profile;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { age: 0 };\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\n\nasync function load(config) {\n if (!models.age) {\n models.age = await loadGraphModel(config.face.age.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.age.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.age;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n const enhance = tf.mul(resize, [255.0]);\n tf.dispose(resize);\n\n let ageT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.age.enabled) ageT = await models.age.predict(enhance);\n } else {\n const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};\n ageT = profileAge.result.clone();\n profileAge.result.dispose();\n profile.run('age', profileAge);\n }\n enhance.dispose();\n\n if (ageT) {\n const data = ageT.dataSync();\n obj.age = Math.trunc(10 * data[0]) / 10;\n }\n ageT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { gender: '' };\nlet frame = Number.MAX_SAFE_INTEGER;\nlet alternative = false;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\n\nasync function load(config) {\n if (!models.gender) {\n models.gender = await loadGraphModel(config.face.gender.modelPath);\n alternative = models.gender.inputs[0].shape[3] === 1;\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.gender.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.gender;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.gender.skipFrames) && last.gender !== '') {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);\n let enhance;\n if (alternative) {\n enhance = tf.tidy(() => {\n const [red, green, blue] = tf.split(resize, 3, 3);\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n return grayscale.sub(0.5).mul(2);\n });\n } else {\n enhance = tf.mul(resize, [255.0]);\n }\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n tf.dispose(resize);\n\n let genderT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);\n } else {\n const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};\n genderT = profileGender.result.clone();\n profileGender.result.dispose();\n profile.run('gender', profileGender);\n }\n enhance.dispose();\n\n if (genderT) {\n const data = genderT.dataSync();\n if (alternative) {\n // returns two values 0..1, bigger one is prediction\n const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] > data[1] ? 'female' : 'male';\n obj.confidence = confidence;\n }\n } else {\n // returns one value 0..1, .5 is prediction threshold\n const confidence = Math.trunc(200 * Math.abs((data[0] - 0.5))) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] <= 0.5 ? 'female' : 'male';\n obj.confidence = Math.min(0.99, confidence);\n }\n }\n }\n genderT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];\nconst models = {};\nlet last = [];\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\nconst scale = 1; // score multiplication factor\n\nasync function load(config) {\n if (!models.emotion) {\n models.emotion = await loadGraphModel(config.face.emotion.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.emotion;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);\n const [red, green, blue] = tf.split(resize, 3, 3);\n resize.dispose();\n // weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n red.dispose();\n green.dispose();\n blue.dispose();\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n redNorm.dispose();\n greenNorm.dispose();\n blueNorm.dispose();\n const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));\n grayscale.dispose();\n const obj = [];\n if (config.face.emotion.enabled) {\n let data;\n if (!config.profile) {\n const emotionT = await models.emotion.predict(normalize);\n data = emotionT.dataSync();\n tf.dispose(emotionT);\n } else {\n const profileData = await tf.profile(() => models.emotion.predict(normalize));\n data = profileData.result.dataSync();\n profileData.result.dispose();\n profile.run('emotion', profileData);\n }\n for (let i = 0; i < data.length; i++) {\n if (scale * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * scale * data[i]) / 100), emotion: annotations[i] });\n }\n obj.sort((a, b) => b.score - a.score);\n }\n normalize.dispose();\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf } from '../tf.js';\n\nclass BaseModel {\n constructor(model, outputStride) {\n this.model = model;\n this.outputStride = outputStride;\n }\n\n predict(input) {\n return tf.tidy(() => {\n const asFloat = this.preprocessInput(input.toFloat());\n const asBatch = asFloat.expandDims(0);\n const results = this.model.predict(asBatch);\n const results3d = results.map((y) => y.squeeze([0]));\n const namedResults = this.nameOutputResults(results3d);\n return {\n heatmapScores: namedResults.heatmap.sigmoid(),\n offsets: namedResults.offsets,\n displacementFwd: namedResults.displacementFwd,\n displacementBwd: namedResults.displacementBwd,\n };\n });\n }\n\n /**\n * Releases the CPU and GPU memory allocated by the model.\n */\n dispose() {\n this.model.dispose();\n }\n}\nexports.BaseModel = BaseModel;\n", "import { tf } from '../tf.js';\nimport * as modelBase from './modelBase';\n\nclass MobileNet extends modelBase.BaseModel {\n // eslint-disable-next-line class-methods-use-this\n preprocessInput(input) {\n // Normalize the pixels [0, 255] to be between [-1, 1].\n return tf.tidy(() => tf.div(input, 127.5).sub(1.0));\n }\n\n // eslint-disable-next-line class-methods-use-this\n nameOutputResults(results) {\n const [offsets, heatmap, displacementFwd, displacementBwd] = results;\n return { offsets, heatmap, displacementFwd, displacementBwd };\n }\n}\nexports.MobileNet = MobileNet;\n", "// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort\nfunction half(k) {\n return Math.floor(k / 2);\n}\nclass MaxHeap {\n constructor(maxSize, getElementValue) {\n this.priorityQueue = new Array(maxSize);\n this.numberOfElements = -1;\n this.getElementValue = getElementValue;\n }\n\n enqueue(x) {\n this.priorityQueue[++this.numberOfElements] = x;\n this.swim(this.numberOfElements);\n }\n\n dequeue() {\n const max = this.priorityQueue[0];\n this.exchange(0, this.numberOfElements--);\n this.sink(0);\n this.priorityQueue[this.numberOfElements + 1] = null;\n return max;\n }\n\n empty() {\n return this.numberOfElements === -1;\n }\n\n size() {\n return this.numberOfElements + 1;\n }\n\n all() {\n return this.priorityQueue.slice(0, this.numberOfElements + 1);\n }\n\n max() {\n return this.priorityQueue[0];\n }\n\n swim(k) {\n while (k > 0 && this.less(half(k), k)) {\n this.exchange(k, half(k));\n k = half(k);\n }\n }\n\n sink(k) {\n while (2 * k <= this.numberOfElements) {\n let j = 2 * k;\n if (j < this.numberOfElements && this.less(j, j + 1)) j++;\n if (!this.less(k, j)) break;\n this.exchange(k, j);\n k = j;\n }\n }\n\n getValueAt(i) {\n return this.getElementValue(this.priorityQueue[i]);\n }\n\n less(i, j) {\n return this.getValueAt(i) < this.getValueAt(j);\n }\n\n exchange(i, j) {\n const t = this.priorityQueue[i];\n this.priorityQueue[i] = this.priorityQueue[j];\n this.priorityQueue[j] = t;\n }\n}\nexports.MaxHeap = MaxHeap;\n", "import * as heapSort from './heapSort';\n\nfunction scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores) {\n const [height, width] = scores.shape;\n let localMaximum = true;\n const yStart = Math.max(heatmapY - localMaximumRadius, 0);\n const yEnd = Math.min(heatmapY + localMaximumRadius + 1, height);\n for (let yCurrent = yStart; yCurrent < yEnd; ++yCurrent) {\n const xStart = Math.max(heatmapX - localMaximumRadius, 0);\n const xEnd = Math.min(heatmapX + localMaximumRadius + 1, width);\n for (let xCurrent = xStart; xCurrent < xEnd; ++xCurrent) {\n if (scores.get(yCurrent, xCurrent, keypointId) > score) {\n localMaximum = false;\n break;\n }\n }\n if (!localMaximum) {\n break;\n }\n }\n return localMaximum;\n}\n/**\n * Builds a priority queue with part candidate positions for a specific image in\n * the batch. For this we find all local maxima in the score maps with score\n * values above a threshold. We create a single priority queue across all parts.\n */\nfunction buildPartWithScoreQueue(scoreThreshold, localMaximumRadius, scores) {\n const [height, width, numKeypoints] = scores.shape;\n const queue = new heapSort.MaxHeap(height * width * numKeypoints, ({ score }) => score);\n for (let heatmapY = 0; heatmapY < height; ++heatmapY) {\n for (let heatmapX = 0; heatmapX < width; ++heatmapX) {\n for (let keypointId = 0; keypointId < numKeypoints; ++keypointId) {\n const score = scores.get(heatmapY, heatmapX, keypointId);\n // Only consider parts with score greater or equal to threshold as root candidates.\n if (score < scoreThreshold) continue;\n // Only consider keypoints whose score is maximum in a local window.\n if (scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores)) {\n queue.enqueue({ score, part: { heatmapY, heatmapX, id: keypointId } });\n }\n }\n }\n }\n return queue;\n}\nexports.buildPartWithScoreQueue = buildPartWithScoreQueue;\n", "exports.partNames = [\n 'nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftShoulder',\n 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist',\n 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle',\n];\nexports.NUM_KEYPOINTS = exports.partNames.length;\nexports.partIds = exports.partNames.reduce((result, jointName, i) => {\n result[jointName] = i;\n return result;\n}, {});\nconst connectedPartNames = [\n ['leftHip', 'leftShoulder'], ['leftElbow', 'leftShoulder'],\n ['leftElbow', 'leftWrist'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['rightHip', 'rightShoulder'],\n ['rightElbow', 'rightShoulder'], ['rightElbow', 'rightWrist'],\n ['rightHip', 'rightKnee'], ['rightKnee', 'rightAnkle'],\n ['leftShoulder', 'rightShoulder'], ['leftHip', 'rightHip'],\n];\n/*\n * Define the skeleton. This defines the parent->child relationships of our\n * tree. Arbitrarily this defines the nose as the root of the tree, however\n * since we will infer the displacement for both parent->child and\n * child->parent, we can define the tree root as any node.\n */\nexports.poseChain = [\n ['nose', 'leftEye'], ['leftEye', 'leftEar'], ['nose', 'rightEye'],\n ['rightEye', 'rightEar'], ['nose', 'leftShoulder'],\n ['leftShoulder', 'leftElbow'], ['leftElbow', 'leftWrist'],\n ['leftShoulder', 'leftHip'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['nose', 'rightShoulder'],\n ['rightShoulder', 'rightElbow'], ['rightElbow', 'rightWrist'],\n ['rightShoulder', 'rightHip'], ['rightHip', 'rightKnee'],\n ['rightKnee', 'rightAnkle'],\n];\nexports.connectedPartIndices = connectedPartNames.map(([jointNameA, jointNameB]) => ([exports.partIds[jointNameA], exports.partIds[jointNameB]]));\nexports.partChannels = [\n 'left_face',\n 'right_face',\n 'right_upper_leg_front',\n 'right_lower_leg_back',\n 'right_upper_leg_back',\n 'left_lower_leg_front',\n 'left_upper_leg_front',\n 'left_upper_leg_back',\n 'left_lower_leg_back',\n 'right_feet',\n 'right_lower_leg_front',\n 'left_feet',\n 'torso_front',\n 'torso_back',\n 'right_upper_arm_front',\n 'right_upper_arm_back',\n 'right_lower_arm_back',\n 'left_lower_arm_front',\n 'left_upper_arm_front',\n 'left_upper_arm_back',\n 'left_lower_arm_back',\n 'right_hand',\n 'right_lower_arm_front',\n 'left_hand',\n];\n", "import * as kpt from './keypoints';\n\nfunction getOffsetPoint(y, x, keypoint, offsets) {\n return {\n y: offsets.get(y, x, keypoint),\n x: offsets.get(y, x, keypoint + kpt.NUM_KEYPOINTS),\n };\n}\nexports.getOffsetPoint = getOffsetPoint;\n\nfunction getImageCoords(part, outputStride, offsets) {\n const { heatmapY, heatmapX, id: keypoint } = part;\n const { y, x } = getOffsetPoint(heatmapY, heatmapX, keypoint, offsets);\n return {\n x: part.heatmapX * outputStride + x,\n y: part.heatmapY * outputStride + y,\n };\n}\nexports.getImageCoords = getImageCoords;\n\nfunction fillArray(element, size) {\n const result = new Array(size);\n for (let i = 0; i < size; i++) {\n result[i] = element;\n }\n return result;\n}\nexports.fillArray = fillArray;\n\nfunction clamp(a, min, max) {\n if (a < min) return min;\n if (a > max) return max;\n return a;\n}\nexports.clamp = clamp;\n\nfunction squaredDistance(y1, x1, y2, x2) {\n const dy = y2 - y1;\n const dx = x2 - x1;\n return dy * dy + dx * dx;\n}\nexports.squaredDistance = squaredDistance;\n\nfunction addVectors(a, b) {\n return { x: a.x + b.x, y: a.y + b.y };\n}\nexports.addVectors = addVectors;\n\nfunction clampVector(a, min, max) {\n return { y: clamp(a.y, min, max), x: clamp(a.x, min, max) };\n}\nexports.clampVector = clampVector;\n", "import * as keypoints from './keypoints';\nimport * as vectors from './vectors';\n\nconst parentChildrenTuples = keypoints.poseChain.map(([parentJoinName, childJoinName]) => ([keypoints.partIds[parentJoinName], keypoints.partIds[childJoinName]]));\nconst parentToChildEdges = parentChildrenTuples.map(([, childJointId]) => childJointId);\nconst childToParentEdges = parentChildrenTuples.map(([parentJointId]) => parentJointId);\nfunction getDisplacement(edgeId, point, displacements) {\n const numEdges = displacements.shape[2] / 2;\n return {\n y: displacements.get(point.y, point.x, edgeId),\n x: displacements.get(point.y, point.x, numEdges + edgeId),\n };\n}\nfunction getStridedIndexNearPoint(point, outputStride, height, width) {\n return {\n y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1),\n x: vectors.clamp(Math.round(point.x / outputStride), 0, width - 1),\n };\n}\n/**\n * We get a new keypoint along the `edgeId` for the pose instance, assuming\n * that the position of the `idSource` part is already known. For this, we\n * follow the displacement vector from the source to target part (stored in\n * the `i`-t channel of the displacement tensor). The displaced keypoint\n * vector is refined using the offset vector by `offsetRefineStep` times.\n */\nfunction traverseToTargetKeypoint(edgeId, sourceKeypoint, targetKeypointId, scoresBuffer, offsets, outputStride, displacements, offsetRefineStep = 2) {\n const [height, width] = scoresBuffer.shape;\n // Nearest neighbor interpolation for the source->target displacements.\n const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, outputStride, height, width);\n const displacement = getDisplacement(edgeId, sourceKeypointIndices, displacements);\n const displacedPoint = vectors.addVectors(sourceKeypoint.position, displacement);\n let targetKeypoint = displacedPoint;\n for (let i = 0; i < offsetRefineStep; i++) {\n const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const offsetPoint = vectors.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetKeypointId, offsets);\n targetKeypoint = vectors.addVectors({\n x: targetKeypointIndices.x * outputStride,\n y: targetKeypointIndices.y * outputStride,\n }, { x: offsetPoint.x, y: offsetPoint.y });\n }\n const targetKeyPointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const score = scoresBuffer.get(targetKeyPointIndices.y, targetKeyPointIndices.x, targetKeypointId);\n return { position: targetKeypoint, part: keypoints.partNames[targetKeypointId], score };\n}\n/**\n * Follows the displacement fields to decode the full pose of the object\n * instance given the position of a part that acts as root.\n *\n * @return An array of decoded keypoints and their scores for a single pose\n */\nfunction decodePose(root, scores, offsets, outputStride, displacementsFwd, displacementsBwd) {\n const numParts = scores.shape[2];\n const numEdges = parentToChildEdges.length;\n const instanceKeypoints = new Array(numParts);\n // Start a new detection instance at the position of the root.\n const { part: rootPart, score: rootScore } = root;\n const rootPoint = vectors.getImageCoords(rootPart, outputStride, offsets);\n instanceKeypoints[rootPart.id] = {\n score: rootScore,\n part: keypoints.partNames[rootPart.id],\n position: rootPoint,\n };\n // Decode the part positions upwards in the tree, following the backward\n // displacements.\n for (let edge = numEdges - 1; edge >= 0; --edge) {\n const sourceKeypointId = parentToChildEdges[edge];\n const targetKeypointId = childToParentEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsBwd);\n }\n }\n // Decode the part positions downwards in the tree, following the forward\n // displacements.\n for (let edge = 0; edge < numEdges; ++edge) {\n const sourceKeypointId = childToParentEdges[edge];\n const targetKeypointId = parentToChildEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsFwd);\n }\n }\n return instanceKeypoints;\n}\nexports.decodePose = decodePose;\n", "import * as buildParts from './buildParts';\nimport * as decodePose from './decodePose';\nimport * as vectors from './vectors';\n\nfunction withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) {\n return poses.some(({ keypoints }) => {\n const correspondingKeypoint = keypoints[keypointId].position;\n return vectors.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;\n });\n}\n/* Score the newly proposed object instance without taking into account\n * the scores of the parts that overlap with any previously detected\n * instance.\n */\nfunction getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {\n const notOverlappedKeypointScores = instanceKeypoints.reduce((result, { position, score }, keypointId) => {\n if (!withinNmsRadiusOfCorrespondingPoint(existingPoses, squaredNmsRadius, position, keypointId)) {\n result += score;\n }\n return result;\n }, 0.0);\n return notOverlappedKeypointScores / instanceKeypoints.length;\n}\n// A point (y, x) is considered as root part candidate if its score is a\n// maximum in a window |y - y'| <= kLocalMaximumRadius, |x - x'| <=\n// kLocalMaximumRadius.\nconst kLocalMaximumRadius = 1;\n/**\n * Detects multiple poses and finds their parts from part scores and\n * displacement vectors. It returns up to `maxDetections` object instance\n * detections in decreasing root score order. It works as follows: We first\n * create a priority queue with local part score maxima above\n * `scoreThreshold`, considering all parts at the same time. Then we\n * iteratively pull the top element of the queue (in decreasing score order)\n * and treat it as a root candidate for a new object instance. To avoid\n * duplicate detections, we reject the root candidate if it is within a disk\n * of `nmsRadius` pixels from the corresponding part of a previously detected\n * instance, which is a form of part-based non-maximum suppression (NMS). If\n * the root candidate passes the NMS check, we start a new object instance\n * detection, treating the corresponding part as root and finding the\n * positions of the remaining parts by following the displacement vectors\n * along the tree-structured part graph. We assign to the newly detected\n * instance a score equal to the sum of scores of its parts which have not\n * been claimed by a previous instance (i.e., those at least `nmsRadius`\n * pixels away from the corresponding part of all previously detected\n * instances), divided by the total number of parts `numParts`.\n *\n * @param heatmapScores 3-D tensor with shape `[height, width, numParts]`.\n * The value of heatmapScores[y, x, k]` is the score of placing the `k`-th\n * object part at position `(y, x)`.\n *\n * @param offsets 3-D tensor with shape `[height, width, numParts * 2]`.\n * The value of [offsets[y, x, k], offsets[y, x, k + numParts]]` is the\n * short range offset vector of the `k`-th object part at heatmap\n * position `(y, x)`.\n *\n * @param displacementsFwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the forward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param displacementsBwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the backward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param outputStride The output stride that was used when feed-forwarding\n * through the PoseNet model. Must be 32, 16, or 8.\n *\n * @param maxPoseDetections Maximum number of returned instance detections per\n * image.\n *\n * @param scoreThreshold Only return instance detections that have root part\n * score greater or equal to this value. Defaults to 0.5.\n *\n * @param nmsRadius Non-maximum suppression part distance. It needs to be\n * strictly positive. Two parts suppress each other if they are less than\n * `nmsRadius` pixels away. Defaults to 20.\n *\n * @return An array of poses and their scores, each containing keypoints and\n * the corresponding keypoint scores.\n */\nfunction decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, maxPoseDetections, scoreThreshold = 0.5, nmsRadius = 20) {\n const poses = [];\n const queue = buildParts.buildPartWithScoreQueue(scoreThreshold, kLocalMaximumRadius, scoresBuffer);\n const squaredNmsRadius = nmsRadius * nmsRadius;\n // Generate at most maxDetections object instances per image in\n // decreasing root part score order.\n while (poses.length < maxPoseDetections && !queue.empty()) {\n // The top element in the queue is the next root candidate.\n const root = queue.dequeue();\n // Part-based non-maximum suppression: We reject a root candidate if it\n // is within a disk of `nmsRadius` pixels from the corresponding part of\n // a previously detected instance.\n const rootImageCoords = vectors.getImageCoords(root.part, outputStride, offsetsBuffer);\n if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;\n // Start a new detection instance at the position of the root.\n const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, outputStride, displacementsFwdBuffer, displacementsBwdBuffer);\n const score = getInstanceScore(poses, squaredNmsRadius, keypoints);\n poses.push({ keypoints, score });\n }\n return poses;\n}\nexports.decodeMultiplePoses = decodeMultiplePoses;\n", "import * as kpt from './keypoints';\n\nfunction eitherPointDoesntMeetConfidence(a, b, minConfidence) {\n return (a < minConfidence || b < minConfidence);\n}\n\nfunction getAdjacentKeyPoints(keypoints, minConfidence) {\n return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {\n if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {\n return result;\n }\n result.push([keypoints[leftJoint], keypoints[rightJoint]]);\n return result;\n }, []);\n}\nexports.getAdjacentKeyPoints = getAdjacentKeyPoints;\n\nconst { NEGATIVE_INFINITY, POSITIVE_INFINITY } = Number;\nfunction getBoundingBox(keypoints) {\n return keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({\n maxX: Math.max(maxX, x),\n maxY: Math.max(maxY, y),\n minX: Math.min(minX, x),\n minY: Math.min(minY, y),\n }), {\n maxX: NEGATIVE_INFINITY,\n maxY: NEGATIVE_INFINITY,\n minX: POSITIVE_INFINITY,\n minY: POSITIVE_INFINITY,\n });\n}\nexports.getBoundingBox = getBoundingBox;\n\nfunction getBoundingBoxPoints(keypoints) {\n const { minX, minY, maxX, maxY } = getBoundingBox(keypoints);\n return [{ x: minX, y: minY }, { x: maxX, y: minY }, { x: maxX, y: maxY }, { x: minX, y: maxY }];\n}\nexports.getBoundingBoxPoints = getBoundingBoxPoints;\n\nasync function toTensorBuffers3D(tensors) {\n return Promise.all(tensors.map((tensor) => tensor.buffer()));\n}\nexports.toTensorBuffers3D = toTensorBuffers3D;\n\nfunction scalePose(pose, scaleY, scaleX) {\n return {\n score: pose.score,\n keypoints: pose.keypoints.map(({ score, part, position }) => ({\n score,\n part,\n position: { x: position.x * scaleX, y: position.y * scaleY },\n })),\n };\n}\nexports.scalePose = scalePose;\n\nfunction resizeTo(image, [targetH, targetW]) {\n const input = image.squeeze(0);\n const resized = input.resizeBilinear([targetH, targetW]);\n input.dispose();\n return resized;\n}\nexports.resizeTo = resizeTo;\n\nfunction scaleAndFlipPoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]) {\n const scaledPoses = poses.map((pose) => scalePose(pose, height / inputResolutionHeight, width / inputResolutionWidth));\n return scaledPoses;\n}\nexports.scaleAndFlipPoses = scaleAndFlipPoses;\n", "import { loadGraphModel } from '../tf.js';\nimport * as modelMobileNet from './modelMobileNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as util from './util';\n\nclass PoseNet {\n constructor(net) {\n this.baseModel = net;\n this.outputStride = 16;\n }\n\n async estimatePoses(input, config) {\n return new Promise(async (resolve) => {\n const height = input.shape[1];\n const width = input.shape[2];\n const resized = util.resizeTo(input, [config.body.inputSize, config.body.inputSize]);\n const res = this.baseModel.predict(resized);\n const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);\n const scoresBuffer = allTensorBuffers[0];\n const offsetsBuffer = allTensorBuffers[1];\n const displacementsFwdBuffer = allTensorBuffers[2];\n const displacementsBwdBuffer = allTensorBuffers[3];\n const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, this.outputStride, config.body.maxDetections, config.body.scoreThreshold, config.body.nmsRadius);\n const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);\n res.heatmapScores.dispose();\n res.offsets.dispose();\n res.displacementFwd.dispose();\n res.displacementBwd.dispose();\n resized.dispose();\n resolve(resultPoses);\n });\n }\n\n dispose() {\n this.baseModel.dispose();\n }\n}\nexports.PoseNet = PoseNet;\n\nasync function load(config) {\n const graphModel = await loadGraphModel(config.body.modelPath);\n const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.body.modelPath.match(/\\/(.*)\\./)[1]}`);\n return new PoseNet(mobilenet);\n}\nexports.load = load;\n", "import * as modelMobileNet from './modelMobileNet';\nimport * as modelPoseNet from './modelPoseNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nexports.load = modelPoseNet.load;\nexports.PoseNet = modelPoseNet.PoseNet;\n\nexports.MobileNet = modelMobileNet.MobileNet;\nexports.decodeMultiplePoses = decodeMultiple.decodeMultiplePoses;\nexports.partChannels = keypoints.partChannels;\nexports.partIds = keypoints.partIds;\nexports.partNames = keypoints.partNames;\nexports.poseChain = keypoints.poseChain;\nexports.getAdjacentKeyPoints = util.getAdjacentKeyPoints;\nexports.getBoundingBox = util.getBoundingBox;\nexports.getBoundingBoxPoints = util.getBoundingBoxPoints;\nexports.scaleAndFlipPoses = util.scaleAndFlipPoses;\nexports.scalePose = util.scalePose;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\n\nclass HandDetector {\n constructor(model, inputSize, anchorsAnnotated) {\n this.model = model;\n this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);\n this.anchorsTensor = tf.tensor2d(this.anchors);\n this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);\n this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);\n }\n\n normalizeBoxes(boxes) {\n return tf.tidy(() => {\n const boxOffsets = tf.slice(boxes, [0, 0], [-1, 2]);\n const boxSizes = tf.slice(boxes, [0, 2], [-1, 2]);\n const boxCenterPoints = tf.add(tf.div(boxOffsets, this.inputSizeTensor), this.anchorsTensor);\n const halfBoxSizes = tf.div(boxSizes, this.doubleInputSizeTensor);\n const startPoints = tf.mul(tf.sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n const endPoints = tf.mul(tf.add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n return tf.concat2d([startPoints, endPoints], 1);\n });\n }\n\n normalizeLandmarks(rawPalmLandmarks, index) {\n return tf.tidy(() => {\n const landmarks = tf.add(tf.div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]);\n return tf.mul(landmarks, this.inputSizeTensor);\n });\n }\n\n async getBoxes(input, config) {\n const batched = this.model.predict(input);\n const predictions = batched.squeeze();\n batched.dispose();\n const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());\n const scoresVal = scores.dataSync();\n const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);\n const boxes = this.normalizeBoxes(rawBoxes);\n rawBoxes.dispose();\n const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);\n const filtered = filteredT.arraySync();\n\n scores.dispose();\n filteredT.dispose();\n const hands = [];\n for (const boxIndex of filtered) {\n if (scoresVal[boxIndex] >= config.minConfidence) {\n const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);\n const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);\n const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));\n rawPalmLandmarks.dispose();\n hands.push({ box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex] });\n }\n }\n predictions.dispose();\n boxes.dispose();\n return hands;\n }\n\n async estimateHandBounds(input, config) {\n const inputHeight = input.shape[1];\n const inputWidth = input.shape[2];\n const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));\n const predictions = await this.getBoxes(image, config);\n image.dispose();\n if (!predictions || predictions.length === 0) return null;\n const hands = [];\n for (const prediction of predictions) {\n const boxes = prediction.box.dataSync();\n const startPoint = boxes.slice(0, 2);\n const endPoint = boxes.slice(2, 4);\n const palmLandmarks = prediction.palmLandmarks.arraySync();\n prediction.box.dispose();\n prediction.palmLandmarks.dispose();\n hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));\n }\n return hands;\n }\n}\nexports.HandDetector = HandDetector;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\nimport * as util from './util';\n\nconst PALM_BOX_SHIFT_VECTOR = [0, -0.4];\nconst PALM_BOX_ENLARGE_FACTOR = 3;\nconst HAND_BOX_SHIFT_VECTOR = [0, -0.1]; // move detected hand box by x,y to ease landmark detection\nconst HAND_BOX_ENLARGE_FACTOR = 1.65; // increased from model default 1.65;\nconst PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];\nconst PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;\nconst PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;\n\nclass HandPipeline {\n constructor(boundingBoxDetector, meshDetector, inputSize) {\n this.boxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.inputSize = inputSize;\n this.storedBoxes = [];\n this.skipped = 1000;\n this.detectedHands = 0;\n }\n\n getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {\n const rotatedPalmLandmarks = palmLandmarks.map((coord) => {\n const homogeneousCoordinate = [...coord, 1];\n return util.rotatePoint(homogeneousCoordinate, rotationMatrix);\n });\n const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);\n return box.enlargeBox(box.squarifyBox(box.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), PALM_BOX_ENLARGE_FACTOR);\n }\n\n getBoxForHandLandmarks(landmarks) {\n const boundingBox = this.calculateLandmarksBoundingBox(landmarks);\n const boxAroundHand = box.enlargeBox(box.squarifyBox(box.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const palmLandmarks = [];\n for (let i = 0; i < PALM_LANDMARK_IDS.length; i++) {\n palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));\n }\n boxAroundHand.palmLandmarks = palmLandmarks;\n return boxAroundHand;\n }\n\n transformRawCoords(rawCoords, box2, angle, rotationMatrix) {\n const boxSize = box.getBoxSize(box2);\n const scaleFactor = [boxSize[0] / this.inputSize, boxSize[1] / this.inputSize];\n const coordsScaled = rawCoords.map((coord) => [\n scaleFactor[0] * (coord[0] - this.inputSize / 2),\n scaleFactor[1] * (coord[1] - this.inputSize / 2),\n coord[2],\n ]);\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => {\n const rotated = util.rotatePoint(coord, coordsRotationMatrix);\n return [...rotated, coord[2]];\n });\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...box.getBoxCenter(box2), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => [\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1],\n coord[2],\n ]);\n }\n\n async estimateHands(image, config) {\n this.skipped++;\n let useFreshBox = false;\n\n // run new detector every skipFrames unless we only want box to start with\n let boxes;\n if ((this.skipped > config.skipFrames) || !config.landmarks) {\n boxes = await this.boxDetector.estimateHandBounds(image, config);\n // don't reset on test image\n if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.maxHands) || !config.landmarks)) {\n this.storedBoxes = [];\n this.detectedHands = 0;\n for (const possible of boxes) this.storedBoxes.push(possible);\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n const hands = [];\n // console.log(`skipped: ${this.skipped} max: ${config.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);\n\n // go through working set of boxes\n for (const i in this.storedBoxes) {\n const currentBox = this.storedBoxes[i];\n if (!currentBox) continue;\n if (config.landmarks) {\n const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);\n const palmCenter = box.getBoxCenter(currentBox);\n const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];\n const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);\n const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);\n const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;\n const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);\n const handImage = croppedInput.div(255);\n croppedInput.dispose();\n rotatedImage.dispose();\n const [confidence, keypoints] = await this.meshDetector.predict(handImage);\n handImage.dispose();\n const confidenceValue = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceValue >= config.minConfidence) {\n const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);\n const rawCoords = keypointsReshaped.arraySync();\n keypoints.dispose();\n keypointsReshaped.dispose();\n const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);\n const nextBoundingBox = this.getBoxForHandLandmarks(coords);\n this.storedBoxes[i] = nextBoundingBox;\n const result = {\n landmarks: coords,\n confidence: confidenceValue,\n box: {\n topLeft: nextBoundingBox.startPoint,\n bottomRight: nextBoundingBox.endPoint,\n },\n };\n hands.push(result);\n } else {\n this.storedBoxes[i] = null;\n }\n keypoints.dispose();\n } else {\n const enlarged = box.enlargeBox(box.squarifyBox(box.shiftBox(currentBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const result = {\n confidence: currentBox.confidence,\n box: {\n topLeft: enlarged.startPoint,\n bottomRight: enlarged.endPoint,\n },\n };\n hands.push(result);\n }\n }\n this.storedBoxes = this.storedBoxes.filter((a) => a !== null);\n this.detectedHands = hands.length;\n return hands;\n }\n\n // eslint-disable-next-line class-methods-use-this\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint };\n }\n}\n\nexports.HandPipeline = HandPipeline;\n", "exports.anchors = [\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n];\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html\n\nimport { loadGraphModel } from '../tf.js';\nimport * as handdetector from './handdetector';\nimport * as pipeline from './handpipeline';\nimport * as anchors from './anchors';\n\nconst MESH_ANNOTATIONS = {\n thumb: [1, 2, 3, 4],\n indexFinger: [5, 6, 7, 8],\n middleFinger: [9, 10, 11, 12],\n ringFinger: [13, 14, 15, 16],\n pinky: [17, 18, 19, 20],\n palmBase: [0],\n};\n\nclass HandPose {\n constructor(pipe) {\n this.pipeline = pipe;\n }\n\n static getAnnotations() {\n return MESH_ANNOTATIONS;\n }\n\n async estimateHands(input, config) {\n const predictions = await this.pipeline.estimateHands(input, config);\n if (!predictions) return [];\n const hands = [];\n for (const prediction of predictions) {\n const annotations = {};\n if (prediction.landmarks) {\n for (const key of Object.keys(MESH_ANNOTATIONS)) {\n annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);\n }\n }\n hands.push({\n confidence: prediction.confidence,\n box: prediction.box ? [\n prediction.box.topLeft[0],\n prediction.box.topLeft[1],\n prediction.box.bottomRight[0] - prediction.box.topLeft[0],\n prediction.box.bottomRight[1] - prediction.box.topLeft[1],\n ] : 0,\n landmarks: prediction.landmarks,\n annotations,\n });\n }\n return hands;\n }\n}\nexports.HandPose = HandPose;\n\nasync function load(config) {\n const [handDetectorModel, handPoseModel] = await Promise.all([\n loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),\n ]);\n const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);\n const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);\n const handpose = new HandPose(pipe);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.skeleton.modelPath.match(/\\/(.*)\\./)[1]}`);\n return handpose;\n}\nexports.load = load;\n", "exports.body = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const pose of res) {\n // raising hands\n const leftWrist = pose.keypoints.find((a) => (a.part === 'leftWrist'));\n const rightWrist = pose.keypoints.find((a) => (a.part === 'rightWrist'));\n const nose = pose.keypoints.find((a) => (a.part === 'nose'));\n if (nose && leftWrist && rightWrist && (leftWrist.position.y < nose.position.y) && (rightWrist.position.y < nose.position.y)) gestures.push('i give up');\n else if (nose && leftWrist && (leftWrist.position.y < nose.position.y)) gestures.push('raise left hand');\n else if (nose && rightWrist && (rightWrist.position.y < nose.position.y)) gestures.push('raise right hand');\n\n // leaning\n const leftShoulder = pose.keypoints.find((a) => (a.part === 'leftShoulder'));\n const rightShoulder = pose.keypoints.find((a) => (a.part === 'rightShoulder'));\n if (leftShoulder && rightShoulder) gestures.push(`leaning ${(leftShoulder.position.y > rightShoulder.position.y) ? 'left' : 'right'}`);\n }\n return gestures;\n};\n\nexports.face = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const face of res) {\n // if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {\n // gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);\n // }\n if (face.mesh && face.mesh.length > 0) {\n const eyeFacing = face.mesh[35][2] - face.mesh[263][2];\n if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');\n else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);\n const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openLeft < 0.2) gestures.push('blink left eye');\n const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openRight < 0.2) gestures.push('blink right eye');\n const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));\n if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);\n const chinDepth = face.mesh[152][2];\n if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);\n }\n }\n return gestures;\n};\n\nexports.hand = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const hand of res) {\n const fingers = [];\n for (const [finger, pos] of Object.entries(hand['annotations'])) {\n if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger\n }\n if (fingers && fingers.length > 0) {\n const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));\n const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));\n gestures.push(`${closest.name} forward ${highest.name} up`);\n }\n }\n return gestures;\n};\n", "/* eslint-disable no-use-before-define */\n/*\nWebGLImageFilter - MIT Licensed\n2013, Dominic Szablewski - phoboslab.org\n\n*/\n\nconst WebGLProgram = function (gl, vertexSource, fragmentSource) {\n const _collect = function (source, prefix, collection) {\n const r = new RegExp('\\\\b' + prefix + ' \\\\w+ (\\\\w+)', 'ig');\n source.replace(r, (match, name) => {\n collection[name] = 0;\n return match;\n });\n };\n\n const _compile = function (source, type) {\n const shader = gl.createShader(type);\n gl.shaderSource(shader, source);\n gl.compileShader(shader);\n\n if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {\n throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));\n }\n return shader;\n };\n\n this.uniform = {};\n this.attribute = {};\n\n const _vsh = _compile(vertexSource, gl.VERTEX_SHADER);\n const _fsh = _compile(fragmentSource, gl.FRAGMENT_SHADER);\n\n this.id = gl.createProgram();\n gl.attachShader(this.id, _vsh);\n gl.attachShader(this.id, _fsh);\n gl.linkProgram(this.id);\n\n if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {\n throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));\n }\n\n gl.useProgram(this.id);\n\n // Collect attributes\n _collect(vertexSource, 'attribute', this.attribute);\n for (const a in this.attribute) {\n this.attribute[a] = gl.getAttribLocation(this.id, a);\n }\n\n // Collect uniforms\n _collect(vertexSource, 'uniform', this.uniform);\n _collect(fragmentSource, 'uniform', this.uniform);\n for (const u in this.uniform) {\n this.uniform[u] = gl.getUniformLocation(this.id, u);\n }\n};\n\nconst WebGLImageFilter = function (params) {\n if (!params) params = { };\n let _drawCount = 0;\n let _sourceTexture = null;\n let _lastInChain = false;\n let _currentFramebufferIndex = -1;\n let _tempFramebuffers = [null, null];\n let _filterChain = [];\n let _width = -1;\n let _height = -1;\n let _vertexBuffer = null;\n let _currentProgram = null;\n const _canvas = params.canvas || document.createElement('canvas');\n\n // key is the shader program source, value is the compiled program\n const _shaderProgramCache = { };\n\n const gl = _canvas.getContext('webgl');\n if (!gl) throw new Error('Filter: getContext() failed');\n\n this.addFilter = function (name) {\n // eslint-disable-next-line prefer-rest-params\n const args = Array.prototype.slice.call(arguments, 1);\n const filter = _filter[name];\n\n _filterChain.push({ func: filter, args });\n };\n\n this.reset = function () {\n _filterChain = [];\n };\n\n this.apply = function (image) {\n _resize(image.width, image.height);\n _drawCount = 0;\n\n // Create the texture for the input image if we haven't yet\n if (!_sourceTexture) _sourceTexture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, _sourceTexture);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);\n\n // No filters? Just draw\n if (_filterChain.length === 0) {\n // const program = _compileShader(SHADER.FRAGMENT_IDENTITY);\n _draw();\n return _canvas;\n }\n\n for (let i = 0; i < _filterChain.length; i++) {\n _lastInChain = (i === _filterChain.length - 1);\n const f = _filterChain[i];\n f.func.apply(this, f.args || []);\n }\n\n return _canvas;\n };\n\n const _resize = function (width, height) {\n // Same width/height? Nothing to do here\n if (width === _width && height === _height) { return; }\n\n _canvas.width = width;\n _width = width;\n _canvas.height = height;\n _height = height;\n\n // Create the context if we don't have it yet\n if (!_vertexBuffer) {\n // Create the vertex buffer for the two triangles [x, y, u, v] * 6\n const vertices = new Float32Array([\n -1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0,\n -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0,\n ]);\n // eslint-disable-next-line no-unused-expressions\n (_vertexBuffer = gl.createBuffer(), gl.bindBuffer(gl.ARRAY_BUFFER, _vertexBuffer));\n gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);\n\n // Note sure if this is a good idea; at least it makes texture loading\n // in Ejecta instant.\n gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);\n }\n\n gl.viewport(0, 0, _width, _height);\n\n // Delete old temp framebuffers\n _tempFramebuffers = [null, null];\n };\n\n const _getTempFramebuffer = function (index) {\n _tempFramebuffers[index] = _tempFramebuffers[index]\n || _createFramebufferTexture(_width, _height);\n\n return _tempFramebuffers[index];\n };\n\n const _createFramebufferTexture = function (width, height) {\n const fbo = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);\n\n const renderbuffer = gl.createRenderbuffer();\n gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);\n\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);\n\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n\n return { fbo, texture };\n };\n\n const _draw = function (flags) {\n let source = null;\n let target = null;\n let flipY = false;\n\n // Set up the source\n if (_drawCount === 0) {\n // First draw call - use the source texture\n source = _sourceTexture;\n } else {\n // All following draw calls use the temp buffer last drawn to\n source = _getTempFramebuffer(_currentFramebufferIndex).texture;\n }\n _drawCount++;\n\n // Set up the target\n if (_lastInChain && !(flags & DRAW.INTERMEDIATE)) {\n // Last filter in our chain - draw directly to the WebGL Canvas. We may\n // also have to flip the image vertically now\n target = null;\n flipY = _drawCount % 2 === 0;\n } else {\n // Intermediate draw call - get a temp buffer to draw to\n _currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;\n target = _getTempFramebuffer(_currentFramebufferIndex).fbo;\n }\n\n // Bind the source and target and draw the two triangles\n gl.bindTexture(gl.TEXTURE_2D, source);\n gl.bindFramebuffer(gl.FRAMEBUFFER, target);\n\n gl.uniform1f(_currentProgram.uniform.flipY, (flipY ? -1 : 1));\n gl.drawArrays(gl.TRIANGLES, 0, 6);\n };\n\n const _compileShader = function (fragmentSource) {\n if (_shaderProgramCache[fragmentSource]) {\n _currentProgram = _shaderProgramCache[fragmentSource];\n gl.useProgram(_currentProgram.id);\n return _currentProgram;\n }\n\n // Compile shaders\n _currentProgram = new WebGLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);\n\n const floatSize = Float32Array.BYTES_PER_ELEMENT;\n const vertSize = 4 * floatSize;\n gl.enableVertexAttribArray(_currentProgram.attribute.pos);\n gl.vertexAttribPointer(_currentProgram.attribute.pos, 2, gl.FLOAT, false, vertSize, 0 * floatSize);\n gl.enableVertexAttribArray(_currentProgram.attribute.uv);\n gl.vertexAttribPointer(_currentProgram.attribute.uv, 2, gl.FLOAT, false, vertSize, 2 * floatSize);\n\n _shaderProgramCache[fragmentSource] = _currentProgram;\n return _currentProgram;\n };\n\n let DRAW = { INTERMEDIATE: 1 };\n\n let SHADER = {};\n SHADER.VERTEX_IDENTITY = [\n 'precision highp float;',\n 'attribute vec2 pos;',\n 'attribute vec2 uv;',\n 'varying vec2 vUv;',\n 'uniform float flipY;',\n\n 'void main(void) {',\n 'vUv = uv;',\n 'gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.);',\n '}',\n ].join('\\n');\n\n SHADER.FRAGMENT_IDENTITY = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n\n 'void main(void) {',\n 'gl_FragColor = texture2D(texture, vUv);',\n '}',\n ].join('\\n');\n\n let _filter = {};\n\n // -------------------------------------------------------------------------\n // Color Matrix Filter\n\n _filter.colorMatrix = function (matrix) {\n // Create a Float32 Array and normalize the offset component to 0-1\n const m = new Float32Array(matrix);\n m[4] /= 255;\n m[9] /= 255;\n m[14] /= 255;\n m[19] /= 255;\n\n // Can we ignore the alpha value? Makes things a bit faster.\n const shader = (m[18] === 1 && m[3] === 0 && m[8] === 0 && m[13] === 0 && m[15] === 0 && m[16] === 0 && m[17] === 0 && m[19] === 0)\n ? _filter.colorMatrix.SHADER.WITHOUT_ALPHA\n : _filter.colorMatrix.SHADER.WITH_ALPHA;\n\n const program = _compileShader(shader);\n gl.uniform1fv(program.uniform.m, m);\n _draw();\n };\n\n _filter.colorMatrix.SHADER = {};\n _filter.colorMatrix.SHADER.WITH_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14];',\n 'gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19];',\n '}',\n ].join('\\n');\n _filter.colorMatrix.SHADER.WITHOUT_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14];',\n 'gl_FragColor.a = c.a;',\n '}',\n ].join('\\n');\n\n _filter.brightness = function (brightness) {\n const b = (brightness || 0) + 1;\n _filter.colorMatrix([\n b, 0, 0, 0, 0,\n 0, b, 0, 0, 0,\n 0, 0, b, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.saturation = function (amount) {\n const x = (amount || 0) * 2 / 3 + 1;\n const y = ((x - 1) * -0.5);\n _filter.colorMatrix([\n x, y, y, 0, 0,\n y, x, y, 0, 0,\n y, y, x, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturate = function () {\n _filter.saturation(-1);\n };\n\n _filter.contrast = function (amount) {\n const v = (amount || 0) + 1;\n const o = -128 * (v - 1);\n\n _filter.colorMatrix([\n v, 0, 0, 0, o,\n 0, v, 0, 0, o,\n 0, 0, v, 0, o,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.negative = function () {\n _filter.contrast(-2);\n };\n\n _filter.hue = function (rotation) {\n rotation = (rotation || 0) / 180 * Math.PI;\n const cos = Math.cos(rotation);\n const sin = Math.sin(rotation);\n const lumR = 0.213;\n const lumG = 0.715;\n const lumB = 0.072;\n\n _filter.colorMatrix([\n lumR + cos * (1 - lumR) + sin * (-lumR), lumG + cos * (-lumG) + sin * (-lumG), lumB + cos * (-lumB) + sin * (1 - lumB), 0, 0,\n lumR + cos * (-lumR) + sin * (0.143), lumG + cos * (1 - lumG) + sin * (0.140), lumB + cos * (-lumB) + sin * (-0.283), 0, 0,\n lumR + cos * (-lumR) + sin * (-(1 - lumR)), lumG + cos * (-lumG) + sin * (lumG), lumB + cos * (1 - lumB) + sin * (lumB), 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturateLuminance = function () {\n _filter.colorMatrix([\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.sepia = function () {\n _filter.colorMatrix([\n 0.393, 0.7689999, 0.18899999, 0, 0,\n 0.349, 0.6859999, 0.16799999, 0, 0,\n 0.272, 0.5339999, 0.13099999, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.brownie = function () {\n _filter.colorMatrix([\n 0.5997023498159715, 0.34553243048391263, -0.2708298674538042, 0, 47.43192855600873,\n -0.037703249837783157, 0.8609577587992641, 0.15059552388459913, 0, -36.96841498319127,\n 0.24113635128153335, -0.07441037908422492, 0.44972182064877153, 0, -7.562075277591283,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.vintagePinhole = function () {\n _filter.colorMatrix([\n 0.6279345635605994, 0.3202183420819367, -0.03965408211312453, 0, 9.651285835294123,\n 0.02578397704808868, 0.6441188644374771, 0.03259127616149294, 0, 7.462829176470591,\n 0.0466055556782719, -0.0851232987247891, 0.5241648018700465, 0, 5.159190588235296,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.kodachrome = function () {\n _filter.colorMatrix([\n 1.1285582396593525, -0.3967382283601348, -0.03992559172921793, 0, 63.72958762196502,\n -0.16404339962244616, 1.0835251566291304, -0.05498805115633132, 0, 24.732407896706203,\n -0.16786010706155763, -0.5603416277695248, 1.6014850761964943, 0, 35.62982807460946,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.technicolor = function () {\n _filter.colorMatrix([\n 1.9125277891456083, -0.8545344976951645, -0.09155508482755585, 0, 11.793603434377337,\n -0.3087833385928097, 1.7658908555458428, -0.10601743074722245, 0, -70.35205161461398,\n -0.231103377548616, -0.7501899197440212, 1.847597816108189, 0, 30.950940869491138,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.polaroid = function () {\n _filter.colorMatrix([\n 1.438, -0.062, -0.062, 0, 0,\n -0.122, 1.378, -0.122, 0, 0,\n -0.016, -0.016, 1.483, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.shiftToBGR = function () {\n _filter.colorMatrix([\n 0, 0, 1, 0, 0,\n 0, 1, 0, 0, 0,\n 1, 0, 0, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Convolution Filter\n\n _filter.convolution = function (matrix) {\n const m = new Float32Array(matrix);\n const pixelSizeX = 1 / _width;\n const pixelSizeY = 1 / _height;\n\n const program = _compileShader(_filter.convolution.SHADER);\n gl.uniform1fv(program.uniform.m, m);\n gl.uniform2f(program.uniform.px, pixelSizeX, pixelSizeY);\n _draw();\n };\n\n _filter.convolution.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n 'uniform float m[9];',\n\n 'void main(void) {',\n 'vec4 c11 = texture2D(texture, vUv - px);', // top left\n 'vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y));', // top center\n 'vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y));', // top right\n\n 'vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) );', // mid left\n 'vec4 c22 = texture2D(texture, vUv);', // mid center\n 'vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) );', // mid right\n\n 'vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) );', // bottom left\n 'vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) );', // bottom center\n 'vec4 c33 = texture2D(texture, vUv + px );', // bottom right\n\n 'gl_FragColor = ',\n 'c11 * m[0] + c12 * m[1] + c22 * m[2] +',\n 'c21 * m[3] + c22 * m[4] + c23 * m[5] +',\n 'c31 * m[6] + c32 * m[7] + c33 * m[8];',\n 'gl_FragColor.a = c22.a;',\n '}',\n ].join('\\n');\n\n _filter.detectEdges = function () {\n _filter.convolution.call(this, [\n 0, 1, 0,\n 1, -4, 1,\n 0, 1, 0,\n ]);\n };\n\n _filter.sobelX = function () {\n _filter.convolution.call(this, [\n -1, 0, 1,\n -2, 0, 2,\n -1, 0, 1,\n ]);\n };\n\n _filter.sobelY = function () {\n _filter.convolution.call(this, [\n -1, -2, -1,\n 0, 0, 0,\n 1, 2, 1,\n ]);\n };\n\n _filter.sharpen = function (amount) {\n const a = amount || 1;\n _filter.convolution.call(this, [\n 0, -1 * a, 0,\n -1 * a, 1 + 4 * a, -1 * a,\n 0, -1 * a, 0,\n ]);\n };\n\n _filter.emboss = function (size) {\n const s = size || 1;\n _filter.convolution.call(this, [\n -2 * s, -1 * s, 0,\n -1 * s, 1, 1 * s,\n 0, 1 * s, 2 * s,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Blur Filter\n\n _filter.blur = function (size) {\n const blurSizeX = (size / 7) / _width;\n const blurSizeY = (size / 7) / _height;\n\n const program = _compileShader(_filter.blur.SHADER);\n\n // Vertical\n gl.uniform2f(program.uniform.px, 0, blurSizeY);\n _draw(DRAW.INTERMEDIATE);\n\n // Horizontal\n gl.uniform2f(program.uniform.px, blurSizeX, 0);\n _draw();\n };\n\n _filter.blur.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv )*0.159576912161;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265;',\n '}',\n ].join('\\n');\n\n // -------------------------------------------------------------------------\n // Pixelate Filter\n\n _filter.pixelate = function (size) {\n const blurSizeX = (size) / _width;\n const blurSizeY = (size) / _height;\n\n const program = _compileShader(_filter.pixelate.SHADER);\n\n // Horizontal\n gl.uniform2f(program.uniform.size, blurSizeX, blurSizeY);\n _draw();\n };\n\n _filter.pixelate.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform vec2 size;',\n 'uniform sampler2D texture;',\n\n 'vec2 pixelate(vec2 coord, vec2 size) {',\n 'return floor( coord / size ) * size;',\n '}',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'vec2 coord = pixelate(vUv, size);',\n 'gl_FragColor += texture2D(texture, coord);',\n '}',\n ].join('\\n');\n};\n\nexports.Canvas = WebGLImageFilter;\n", "import { tf } from './tf.js';\nimport * as fxImage from './imagefx.js';\n\n// internal temp canvases\nlet inCanvas = null;\nlet outCanvas = null;\n\n// process input image and return tensor\n// input can be tensor, imagedata, htmlimageelement, htmlvideoelement\n// input is resized and run through imagefx filter\nfunction process(input, config) {\n let tensor;\n if (input instanceof tf.Tensor) {\n tensor = tf.clone(input);\n } else {\n const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));\n const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));\n let targetWidth = originalWidth;\n let targetHeight = originalHeight;\n if (config.filter.width > 0) targetWidth = config.filter.width;\n else if (config.filter.height > 0) targetWidth = originalWidth * (config.filter.height / originalHeight);\n if (config.filter.height > 0) targetHeight = config.filter.height;\n else if (config.filter.width > 0) targetHeight = originalHeight * (config.filter.width / originalWidth);\n if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) {\n inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n if (inCanvas.width !== targetWidth) inCanvas.width = targetWidth;\n if (inCanvas.height !== targetHeight) inCanvas.height = targetHeight;\n }\n const ctx = inCanvas.getContext('2d');\n if (input instanceof ImageData) ctx.putImageData(input, 0, 0);\n else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);\n if (config.filter.enabled) {\n if (!this.fx || !outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) {\n outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');\n if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;\n if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;\n this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')\n }\n this.fx.reset();\n this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled\n if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast);\n if (config.filter.sharpness !== 0) this.fx.addFilter('sharpen', config.filter.sharpness);\n if (config.filter.blur !== 0) this.fx.addFilter('blur', config.filter.blur);\n if (config.filter.saturation !== 0) this.fx.addFilter('saturation', config.filter.saturation);\n if (config.filter.hue !== 0) this.fx.addFilter('hue', config.filter.hue);\n if (config.filter.negative) this.fx.addFilter('negative');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.vintage) this.fx.addFilter('brownie');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.kodachrome) this.fx.addFilter('kodachrome');\n if (config.filter.technicolor) this.fx.addFilter('technicolor');\n if (config.filter.polaroid) this.fx.addFilter('polaroid');\n if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);\n this.fx.apply(inCanvas);\n // read pixel data\n // const gl = outCanvas.getContext('webgl');\n const gl = false;\n if (gl) {\n const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);\n const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);\n gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);\n // gl returns rbga while we only need rgb, so discarding alpha channel\n // gl returns starting point as lower left, so need to invert vertical\n let i = 0;\n for (let y = outCanvas.height - 1; y >= 0; y--) {\n for (let x = 0; x < outCanvas.width; x++) {\n const index = (x + y * outCanvas.width) * 4;\n pixBuffer[i++] = glBuffer[index + 0];\n pixBuffer[i++] = glBuffer[index + 1];\n pixBuffer[i++] = glBuffer[index + 2];\n }\n }\n outCanvas.data = pixBuffer;\n }\n } else {\n outCanvas = inCanvas;\n }\n let pixels;\n if (outCanvas.data) {\n const shape = [outCanvas.height, outCanvas.width, 3];\n pixels = tf.tensor3d(outCanvas.data, shape, 'int32');\n } else if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {\n // tf kernel-optimized method to get imagedata, also if input is imagedata, just use it\n pixels = tf.browser.fromPixels(outCanvas);\n } else {\n // cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas\n const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n tempCanvas.width = targetWidth;\n tempCanvas.height = targetHeight;\n const tempCtx = tempCanvas.getContext('2d');\n tempCtx.drawImage(outCanvas, 0, 0);\n const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);\n pixels = tf.browser.fromPixels(data);\n }\n const casted = pixels.toFloat();\n tensor = casted.expandDims(0);\n pixels.dispose();\n casted.dispose();\n }\n return { tensor, canvas: config.filter.return ? outCanvas : null };\n}\n\nexports.process = process;\n", "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "export const wasmWorkerContents = 'var threadInfoStruct=0;var selfThreadId=0;var parentThreadId=0;var Module={};function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(\" \");console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(\" \");postMessage({cmd:\"alert\",text:text,threadId:selfThreadId})}var err=threadPrintErr;this.alert=threadAlert;Module[\"instantiateWasm\"]=function(info,receiveInstance){var instance=new WebAssembly.Instance(Module[\"wasmModule\"],info);Module[\"wasmModule\"]=null;receiveInstance(instance);return instance.exports};this.onmessage=function(e){try{if(e.data.cmd===\"load\"){Module[\"DYNAMIC_BASE\"]=e.data.DYNAMIC_BASE;Module[\"DYNAMICTOP_PTR\"]=e.data.DYNAMICTOP_PTR;Module[\"wasmModule\"]=e.data.wasmModule;Module[\"wasmMemory\"]=e.data.wasmMemory;Module[\"buffer\"]=Module[\"wasmMemory\"].buffer;Module[\"ENVIRONMENT_IS_PTHREAD\"]=true;if(typeof e.data.urlOrBlob===\"string\"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}Module=WasmBackendModuleThreadedSimd(Module);postMessage({\"cmd\":\"loaded\"})}else if(e.data.cmd===\"objectTransfer\"){Module[\"PThread\"].receiveObjectTransfer(e.data)}else if(e.data.cmd===\"run\"){Module[\"__performance_now_clock_drift\"]=performance.now()-e.data.time;threadInfoStruct=e.data.threadInfoStruct;Module[\"__register_pthread_ptr\"](threadInfoStruct,0,0);selfThreadId=e.data.selfThreadId;parentThreadId=e.data.parentThreadId;var max=e.data.stackBase;var top=e.data.stackBase+e.data.stackSize;Module[\"establishStackSpace\"](top,max);Module[\"_emscripten_tls_init\"]();Module[\"PThread\"].receiveObjectTransfer(e.data);Module[\"PThread\"].setThreadStatus(Module[\"_pthread_self\"](),1);try{var result=Module[\"dynCall_ii\"](e.data.start_routine,e.data.arg);if(!Module[\"getNoExitRuntime\"]())Module[\"PThread\"].threadExit(result)}catch(ex){if(ex===\"Canceled!\"){Module[\"PThread\"].threadCancel()}else if(ex!=\"unwind\"){Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+4>>2,ex instanceof Module[\"ExitStatus\"]?ex.status:-2);Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+0>>2,1);Module[\"_emscripten_futex_wake\"](threadInfoStruct+0,2147483647);if(!(ex instanceof Module[\"ExitStatus\"]))throw ex}}}else if(e.data.cmd===\"cancel\"){if(threadInfoStruct){Module[\"PThread\"].threadCancel()}}else if(e.data.target===\"setimmediate\"){}else if(e.data.cmd===\"processThreadQueue\"){if(threadInfoStruct){Module[\"_emscripten_current_thread_process_queued_calls\"]()}}else{err(\"worker.js received unknown command \"+e.data.cmd);err(e.data)}}catch(ex){err(\"worker.js onmessage() captured an uncaught exception: \"+ex);if(ex.stack)err(ex.stack);throw ex}};if(typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\"){self={location:{href:__filename}};var onmessage=this.onmessage;var nodeWorkerThreads=require(\"worker_threads\");Worker=nodeWorkerThreads.Worker;var parentPort=nodeWorkerThreads.parentPort;parentPort.on(\"message\",function(data){onmessage({data:data})});var nodeFS=require(\"fs\");var nodeRead=function(filename){return nodeFS.readFileSync(filename,\"utf8\")};function globalEval(x){global.require=require;global.Module=Module;eval.call(null,x)}importScripts=function(f){globalEval(nodeRead(f))};postMessage=function(msg){parentPort.postMessage(msg)};if(typeof performance===\"undefined\"){performance={now:function(){return Date.now()}}}}';", null, null, null, null, "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", "export default [\n 127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121,\n 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,\n 151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92,\n 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,\n 157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4,\n 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,\n 181, 85, 84, 17, 206, 203, 36, 148, 171, 140, 92, 40, 39, 193, 189, 244,\n 159, 158, 28, 247, 246, 161, 236, 3, 196, 54, 68, 104, 193, 168, 8, 117,\n 228, 31, 189, 193, 55, 98, 97, 99, 126, 47, 100, 166, 79, 218, 155, 154, 26,\n 209, 49, 131, 135, 136, 150, 47, 126, 217, 223, 52, 53, 45, 51, 134, 211,\n 170, 140, 67, 69, 108, 43, 106, 91, 230, 119, 120, 226, 130, 247, 63, 53,\n 52, 238, 20, 242, 46, 70, 156, 78, 62, 96, 46, 53, 63, 143, 34, 227, 173,\n 155, 133, 123, 117, 111, 44, 125, 19, 236, 134, 51, 216, 206, 205, 154, 153,\n 22, 39, 37, 167, 200, 201, 208, 36, 142, 100, 57, 212, 202, 20, 60, 99, 28,\n 158, 157, 35, 226, 113, 160, 159, 27, 204, 202, 210, 113, 225, 46, 43, 202,\n 204, 62, 76, 77, 137, 123, 116, 41, 38, 72, 203, 129, 142, 64, 98, 240, 49,\n 102, 64, 41, 73, 74, 212, 216, 207, 42, 74, 184, 169, 170, 211, 170, 149,\n 176, 105, 66, 69, 122, 6, 168, 123, 147, 187, 96, 77, 90, 65, 55, 107, 89,\n 90, 180, 101, 100, 120, 63, 105, 104, 93, 137, 227, 15, 86, 85, 129, 102,\n 49, 14, 87, 86, 55, 8, 9, 100, 47, 121, 145, 23, 22, 88, 89, 179, 6, 122,\n 196, 88, 95, 96, 138, 172, 136, 215, 58, 172, 115, 48, 219, 42, 80, 81, 195,\n 3, 51, 43, 146, 61, 171, 175, 199, 81, 82, 38, 53, 46, 225, 144, 163, 110,\n 246, 33, 7, 52, 65, 66, 229, 228, 117, 34, 127, 234, 107, 108, 69, 109, 108,\n 151, 48, 64, 235, 62, 78, 191, 129, 209, 126, 111, 35, 143, 163, 161, 246,\n 117, 123, 50, 222, 65, 52, 19, 125, 141, 221, 55, 65, 3, 195, 197, 25, 7,\n 33, 220, 237, 44, 70, 71, 139, 122, 193, 245, 247, 130, 33, 71, 21, 162,\n 153, 158, 159, 170, 169, 150, 188, 174, 196, 216, 186, 92, 144, 160, 161, 2,\n 97, 167, 141, 125, 241, 164, 167, 37, 72, 38, 12, 145, 159, 160, 38, 82, 13,\n 63, 68, 71, 226, 35, 111, 158, 153, 154, 101, 50, 205, 206, 92, 165, 209,\n 198, 217, 165, 167, 97, 220, 115, 218, 133, 112, 243, 239, 238, 241, 214,\n 135, 169, 190, 173, 133, 171, 208, 32, 125, 44, 237, 86, 87, 178, 85, 86,\n 179, 84, 85, 180, 83, 84, 181, 201, 83, 182, 137, 93, 132, 76, 62, 183, 61,\n 76, 184, 57, 61, 185, 212, 57, 186, 214, 207, 187, 34, 143, 156, 79, 239,\n 237, 123, 137, 177, 44, 1, 4, 201, 194, 32, 64, 102, 129, 213, 215, 138, 59,\n 166, 219, 242, 99, 97, 2, 94, 141, 75, 59, 235, 24, 110, 228, 25, 130, 226,\n 23, 24, 229, 22, 23, 230, 26, 22, 231, 112, 26, 232, 189, 190, 243, 221, 56,\n 190, 28, 56, 221, 27, 28, 222, 29, 27, 223, 30, 29, 224, 247, 30, 225, 238,\n 79, 20, 166, 59, 75, 60, 75, 240, 147, 177, 215, 20, 79, 166, 187, 147, 213,\n 112, 233, 244, 233, 128, 245, 128, 114, 188, 114, 217, 174, 131, 115, 220,\n 217, 198, 236, 198, 131, 134, 177, 132, 58, 143, 35, 124, 110, 163, 7, 228,\n 110, 25, 356, 389, 368, 11, 302, 267, 452, 350, 349, 302, 303, 269, 357,\n 343, 277, 452, 453, 357, 333, 332, 297, 175, 152, 377, 384, 398, 382, 347,\n 348, 330, 303, 304, 270, 9, 336, 337, 278, 279, 360, 418, 262, 431, 304,\n 408, 409, 310, 415, 407, 270, 409, 410, 450, 348, 347, 422, 430, 434, 313,\n 314, 17, 306, 307, 375, 387, 388, 260, 286, 414, 398, 335, 406, 418, 364,\n 367, 416, 423, 358, 327, 251, 284, 298, 281, 5, 4, 373, 374, 253, 307, 320,\n 321, 425, 427, 411, 421, 313, 18, 321, 405, 406, 320, 404, 405, 315, 16, 17,\n 426, 425, 266, 377, 400, 369, 322, 391, 269, 417, 465, 464, 386, 257, 258,\n 466, 260, 388, 456, 399, 419, 284, 332, 333, 417, 285, 8, 346, 340, 261,\n 413, 441, 285, 327, 460, 328, 355, 371, 329, 392, 439, 438, 382, 341, 256,\n 429, 420, 360, 364, 394, 379, 277, 343, 437, 443, 444, 283, 275, 440, 363,\n 431, 262, 369, 297, 338, 337, 273, 375, 321, 450, 451, 349, 446, 342, 467,\n 293, 334, 282, 458, 461, 462, 276, 353, 383, 308, 324, 325, 276, 300, 293,\n 372, 345, 447, 382, 398, 362, 352, 345, 340, 274, 1, 19, 456, 248, 281, 436,\n 427, 425, 381, 256, 252, 269, 391, 393, 200, 199, 428, 266, 330, 329, 287,\n 273, 422, 250, 462, 328, 258, 286, 384, 265, 353, 342, 387, 259, 257, 424,\n 431, 430, 342, 353, 276, 273, 335, 424, 292, 325, 307, 366, 447, 345, 271,\n 303, 302, 423, 266, 371, 294, 455, 460, 279, 278, 294, 271, 272, 304, 432,\n 434, 427, 272, 407, 408, 394, 430, 431, 395, 369, 400, 334, 333, 299, 351,\n 417, 168, 352, 280, 411, 325, 319, 320, 295, 296, 336, 319, 403, 404, 330,\n 348, 349, 293, 298, 333, 323, 454, 447, 15, 16, 315, 358, 429, 279, 14, 15,\n 316, 285, 336, 9, 329, 349, 350, 374, 380, 252, 318, 402, 403, 6, 197, 419,\n 318, 319, 325, 367, 364, 365, 435, 367, 397, 344, 438, 439, 272, 271, 311,\n 195, 5, 281, 273, 287, 291, 396, 428, 199, 311, 271, 268, 283, 444, 445,\n 373, 254, 339, 263, 466, 249, 282, 334, 296, 449, 347, 346, 264, 447, 454,\n 336, 296, 299, 338, 10, 151, 278, 439, 455, 292, 407, 415, 358, 371, 355,\n 340, 345, 372, 390, 249, 466, 346, 347, 280, 442, 443, 282, 19, 94, 370,\n 441, 442, 295, 248, 419, 197, 263, 255, 359, 440, 275, 274, 300, 383, 368,\n 351, 412, 465, 263, 467, 466, 301, 368, 389, 380, 374, 386, 395, 378, 379,\n 412, 351, 419, 436, 426, 322, 373, 390, 388, 2, 164, 393, 370, 462, 461,\n 164, 0, 267, 302, 11, 12, 374, 373, 387, 268, 12, 13, 293, 300, 301, 446,\n 261, 340, 385, 384, 381, 330, 266, 425, 426, 423, 391, 429, 355, 437, 391,\n 327, 326, 440, 457, 438, 341, 382, 362, 459, 457, 461, 434, 430, 394, 414,\n 463, 362, 396, 369, 262, 354, 461, 457, 316, 403, 402, 315, 404, 403, 314,\n 405, 404, 313, 406, 405, 421, 418, 406, 366, 401, 361, 306, 408, 407, 291,\n 409, 408, 287, 410, 409, 432, 436, 410, 434, 416, 411, 264, 368, 383, 309,\n 438, 457, 352, 376, 401, 274, 275, 4, 421, 428, 262, 294, 327, 358, 433,\n 416, 367, 289, 455, 439, 462, 370, 326, 2, 326, 370, 305, 460, 455, 254,\n 449, 448, 255, 261, 446, 253, 450, 449, 252, 451, 450, 256, 452, 451, 341,\n 453, 452, 413, 464, 463, 441, 413, 414, 258, 442, 441, 257, 443, 442, 259,\n 444, 443, 260, 445, 444, 467, 342, 445, 459, 458, 250, 289, 392, 290, 290,\n 328, 460, 376, 433, 435, 250, 290, 392, 411, 416, 433, 341, 463, 464, 453,\n 464, 465, 357, 465, 412, 343, 412, 399, 360, 363, 440, 437, 399, 456, 420,\n 456, 363, 401, 435, 288, 372, 383, 353, 339, 255, 249, 448, 261, 255, 133,\n 243, 190, 133, 155, 112, 33, 246, 247, 33, 130, 25, 398, 384, 286, 362, 398,\n 414, 362, 463, 341, 263, 359, 467, 263, 249, 255, 466, 467, 260, 75, 60,\n 166, 238, 239, 79, 162, 127, 139, 72, 11, 37, 121, 232, 120, 73, 72, 39,\n 114, 128, 47, 233, 232, 128, 103, 104, 67, 152, 175, 148, 173, 157, 155,\n 119, 118, 101, 74, 73, 40, 107, 9, 108, 49, 48, 131, 32, 194, 211, 184, 74,\n 185, 191, 80, 183, 185, 40, 186, 119, 230, 118, 210, 202, 214, 84, 83, 17,\n 77, 76, 146, 161, 160, 30, 190, 56, 173, 182, 106, 194, 138, 135, 192, 129,\n 203, 98, 54, 21, 68, 5, 51, 4, 145, 144, 23, 90, 77, 91, 207, 205, 187, 83,\n 201, 18, 181, 91, 182, 180, 90, 181, 16, 85, 17, 205, 206, 36, 176, 148,\n 140, 165, 92, 39, 245, 193, 244, 27, 159, 28, 30, 247, 161, 174, 236, 196,\n 103, 54, 104, 55, 193, 8, 111, 117, 31, 221, 189, 55, 240, 98, 99, 142, 126,\n 100, 219, 166, 218, 112, 155, 26, 198, 209, 131, 169, 135, 150, 114, 47,\n 217, 224, 223, 53, 220, 45, 134, 32, 211, 140, 109, 67, 108, 146, 43, 91,\n 231, 230, 120, 113, 226, 247, 105, 63, 52, 241, 238, 242, 124, 46, 156, 95,\n 78, 96, 70, 46, 63, 116, 143, 227, 116, 123, 111, 1, 44, 19, 3, 236, 51,\n 207, 216, 205, 26, 154, 22, 165, 39, 167, 199, 200, 208, 101, 36, 100, 43,\n 57, 202, 242, 20, 99, 56, 28, 157, 124, 35, 113, 29, 160, 27, 211, 204, 210,\n 124, 113, 46, 106, 43, 204, 96, 62, 77, 227, 137, 116, 73, 41, 72, 36, 203,\n 142, 235, 64, 240, 48, 49, 64, 42, 41, 74, 214, 212, 207, 183, 42, 184, 210,\n 169, 211, 140, 170, 176, 104, 105, 69, 193, 122, 168, 50, 123, 187, 89, 96,\n 90, 66, 65, 107, 179, 89, 180, 119, 101, 120, 68, 63, 104, 234, 93, 227, 16,\n 15, 85, 209, 129, 49, 15, 14, 86, 107, 55, 9, 120, 100, 121, 153, 145, 22,\n 178, 88, 179, 197, 6, 196, 89, 88, 96, 135, 138, 136, 138, 215, 172, 218,\n 115, 219, 41, 42, 81, 5, 195, 51, 57, 43, 61, 208, 171, 199, 41, 81, 38,\n 224, 53, 225, 24, 144, 110, 105, 52, 66, 118, 229, 117, 227, 34, 234, 66,\n 107, 69, 10, 109, 151, 219, 48, 235, 183, 62, 191, 142, 129, 126, 116, 111,\n 143, 7, 163, 246, 118, 117, 50, 223, 222, 52, 94, 19, 141, 222, 221, 65,\n 196, 3, 197, 45, 220, 44, 156, 70, 139, 188, 122, 245, 139, 71, 162, 145,\n 153, 159, 149, 170, 150, 122, 188, 196, 206, 216, 92, 163, 144, 161, 164, 2,\n 167, 242, 141, 241, 0, 164, 37, 11, 72, 12, 144, 145, 160, 12, 38, 13, 70,\n 63, 71, 31, 226, 111, 157, 158, 154, 36, 101, 205, 203, 206, 165, 126, 209,\n 217, 98, 165, 97, 237, 220, 218, 237, 239, 241, 210, 214, 169, 140, 171, 32,\n 241, 125, 237, 179, 86, 178, 180, 85, 179, 181, 84, 180, 182, 83, 181, 194,\n 201, 182, 177, 137, 132, 184, 76, 183, 185, 61, 184, 186, 57, 185, 216, 212,\n 186, 192, 214, 187, 139, 34, 156, 218, 79, 237, 147, 123, 177, 45, 44, 4,\n 208, 201, 32, 98, 64, 129, 192, 213, 138, 235, 59, 219, 141, 242, 97, 97, 2,\n 141, 240, 75, 235, 229, 24, 228, 31, 25, 226, 230, 23, 229, 231, 22, 230,\n 232, 26, 231, 233, 112, 232, 244, 189, 243, 189, 221, 190, 222, 28, 221,\n 223, 27, 222, 224, 29, 223, 225, 30, 224, 113, 247, 225, 99, 60, 240, 213,\n 147, 215, 60, 20, 166, 192, 187, 213, 243, 112, 244, 244, 233, 245, 245,\n 128, 188, 188, 114, 174, 134, 131, 220, 174, 217, 236, 236, 198, 134, 215,\n 177, 58, 156, 143, 124, 25, 110, 7, 31, 228, 25, 264, 356, 368, 0, 11, 267,\n 451, 452, 349, 267, 302, 269, 350, 357, 277, 350, 452, 357, 299, 333, 297,\n 396, 175, 377, 381, 384, 382, 280, 347, 330, 269, 303, 270, 151, 9, 337,\n 344, 278, 360, 424, 418, 431, 270, 304, 409, 272, 310, 407, 322, 270, 410,\n 449, 450, 347, 432, 422, 434, 18, 313, 17, 291, 306, 375, 259, 387, 260,\n 424, 335, 418, 434, 364, 416, 391, 423, 327, 301, 251, 298, 275, 281, 4,\n 254, 373, 253, 375, 307, 321, 280, 425, 411, 200, 421, 18, 335, 321, 406,\n 321, 320, 405, 314, 315, 17, 423, 426, 266, 396, 377, 369, 270, 322, 269,\n 413, 417, 464, 385, 386, 258, 248, 456, 419, 298, 284, 333, 168, 417, 8,\n 448, 346, 261, 417, 413, 285, 326, 327, 328, 277, 355, 329, 309, 392, 438,\n 381, 382, 256, 279, 429, 360, 365, 364, 379, 355, 277, 437, 282, 443, 283,\n 281, 275, 363, 395, 431, 369, 299, 297, 337, 335, 273, 321, 348, 450, 349,\n 359, 446, 467, 283, 293, 282, 250, 458, 462, 300, 276, 383, 292, 308, 325,\n 283, 276, 293, 264, 372, 447, 346, 352, 340, 354, 274, 19, 363, 456, 281,\n 426, 436, 425, 380, 381, 252, 267, 269, 393, 421, 200, 428, 371, 266, 329,\n 432, 287, 422, 290, 250, 328, 385, 258, 384, 446, 265, 342, 386, 387, 257,\n 422, 424, 430, 445, 342, 276, 422, 273, 424, 306, 292, 307, 352, 366, 345,\n 268, 271, 302, 358, 423, 371, 327, 294, 460, 331, 279, 294, 303, 271, 304,\n 436, 432, 427, 304, 272, 408, 395, 394, 431, 378, 395, 400, 296, 334, 299,\n 6, 351, 168, 376, 352, 411, 307, 325, 320, 285, 295, 336, 320, 319, 404,\n 329, 330, 349, 334, 293, 333, 366, 323, 447, 316, 15, 315, 331, 358, 279,\n 317, 14, 316, 8, 285, 9, 277, 329, 350, 253, 374, 252, 319, 318, 403, 351,\n 6, 419, 324, 318, 325, 397, 367, 365, 288, 435, 397, 278, 344, 439, 310,\n 272, 311, 248, 195, 281, 375, 273, 291, 175, 396, 199, 312, 311, 268, 276,\n 283, 445, 390, 373, 339, 295, 282, 296, 448, 449, 346, 356, 264, 454, 337,\n 336, 299, 337, 338, 151, 294, 278, 455, 308, 292, 415, 429, 358, 355, 265,\n 340, 372, 388, 390, 466, 352, 346, 280, 295, 442, 282, 354, 19, 370, 285,\n 441, 295, 195, 248, 197, 457, 440, 274, 301, 300, 368, 417, 351, 465, 251,\n 301, 389, 385, 380, 386, 394, 395, 379, 399, 412, 419, 410, 436, 322, 387,\n 373, 388, 326, 2, 393, 354, 370, 461, 393, 164, 267, 268, 302, 12, 386, 374,\n 387, 312, 268, 13, 298, 293, 301, 265, 446, 340, 380, 385, 381, 280, 330,\n 425, 322, 426, 391, 420, 429, 437, 393, 391, 326, 344, 440, 438, 458, 459,\n 461, 364, 434, 394, 428, 396, 262, 274, 354, 457, 317, 316, 402, 316, 315,\n 403, 315, 314, 404, 314, 313, 405, 313, 421, 406, 323, 366, 361, 292, 306,\n 407, 306, 291, 408, 291, 287, 409, 287, 432, 410, 427, 434, 411, 372, 264,\n 383, 459, 309, 457, 366, 352, 401, 1, 274, 4, 418, 421, 262, 331, 294, 358,\n 435, 433, 367, 392, 289, 439, 328, 462, 326, 94, 2, 370, 289, 305, 455, 339,\n 254, 448, 359, 255, 446, 254, 253, 449, 253, 252, 450, 252, 256, 451, 256,\n 341, 452, 414, 413, 463, 286, 441, 414, 286, 258, 441, 258, 257, 442, 257,\n 259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305,\n 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,\n 453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360,\n 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n async image(input, userConfig = {}) {\n this.state = 'image';\n this.config = mergeDeep(this.config, userConfig);\n const process = image.process(input, this.config);\n process.tensor.dispose();\n return process.canvas;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tf } from '../tf.js';\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h,\n box.startPoint[0] / w,\n box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n const palmLandmarks = box.palmLandmarks.map((coord) => {\n const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];\n return scaledCoord;\n });\n return { startPoint, endPoint, palmLandmarks, confidence: box.confidence };\n}\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction shiftBox(box, shiftFactor) {\n const boxSize = [\n box.endPoint[0] - box.startPoint[0],\n box.endPoint[1] - box.startPoint[1],\n ];\n const shiftVector = [boxSize[0] * shiftFactor[0], boxSize[1] * shiftFactor[1]];\n const startPoint = [box.startPoint[0] + shiftVector[0], box.startPoint[1] + shiftVector[1]];\n const endPoint = [box.endPoint[0] + shiftVector[0], box.endPoint[1] + shiftVector[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nexport {\n cutBoxFromImageAndResize,\n enlargeBox,\n getBoxCenter,\n getBoxSize,\n scaleBoxCoordinates,\n shiftBox,\n squarifyBox,\n};\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nconst buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexport {\n buildRotationMatrix,\n computeRotation,\n dot,\n getColumnFrom2DArr,\n invertTransformMatrix,\n normalizeRadians,\n rotatePoint,\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n async image(input, userConfig = {}) {\n this.state = 'image';\n this.config = mergeDeep(this.config, userConfig);\n const process = image.process(input, this.config);\n process.tensor.dispose();\n return process.canvas;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/* eslint-disable indent */\n/* eslint-disable no-multi-spaces */\n\nexport default {\n backend: 'webgl', // select tfjs backend to use\n wasmPath: '../assets/', // path for wasm binaries\n // only used for backend: wasm\n console: true, // enable debugging output to console\n async: true, // execute enabled models in parallel\n // this disables per-model performance data but\n // slightly increases performance\n // cannot be used if profiling is enabled\n profile: false, // enable tfjs profiling\n // this has significant performance impact\n // only enable for debugging purposes\n // currently only implemented for age,gender,emotion models\n deallocate: false, // aggresively deallocate gpu memory after each usage\n // only valid for webgl backend and only during first call\n // cannot be changed unless library is reloaded\n // this has significant performance impact\n // only enable on low-memory devices\n scoped: false, // enable scoped runs\n // some models *may* have memory leaks,\n // this wrapps everything in a local scope at a cost of performance\n // typically not needed\n videoOptimized: true, // perform additional optimizations when input is video,\n // must be disabled for images\n // basically this skips object box boundary detection for every n frames\n // while maintaining in-box detection since objects cannot move that fast\n\n filter: {\n enabled: true, // enable image pre-processing filters\n width: 0, // resize input width\n height: 0, // resize input height\n // if both width and height are set to 0, there is no resizing\n // if just one is set, second one is scaled automatically\n // if both are set, values are used as-is\n return: true, // return processed canvas imagedata in result\n brightness: 0, // range: -1 (darken) to 1 (lighten)\n contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)\n sharpness: 0, // range: 0 (no sharpening) to 1 (maximum sharpening)\n blur: 0, // range: 0 (no blur) to N (blur radius in pixels)\n saturation: 0, // range: -1 (reduce saturation) to 1 (increase saturation)\n hue: 0, // range: 0 (no change) to 360 (hue rotation in degrees)\n negative: false, // image negative\n sepia: false, // image sepia colors\n vintage: false, // image vintage colors\n kodachrome: false, // image kodachrome colors\n technicolor: false, // image technicolor colors\n polaroid: false, // image polaroid camera effect\n pixelate: 0, // range: 0 (no pixelate) to N (number of pixels to pixelate)\n },\n\n gesture: {\n enabled: true, // enable simple gesture recognition\n },\n\n face: {\n enabled: true, // controls if specified modul is enabled\n // face.enabled is required for all face models:\n // detector, mesh, iris, age, gender, emotion\n // (note: module is not loaded until it is required)\n detector: {\n modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.\n // 'front' is optimized for large faces\n // such as front-facing camera and\n // 'back' is optimized for distanct faces.\n inputSize: 256, // fixed value: 128 for front and 256 for 'back'\n maxFaces: 10, // maximum number of faces detected in the input\n // should be set to the minimum number for performance\n skipFrames: 15, // how many frames to go without re-running the face bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated face analysis as the head probably hasn't moved much\n // in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in\n // non-maximum suppression (0.1 means drop if overlap 10%)\n scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression,\n // this is applied on detection objects only and before minConfidence\n },\n\n mesh: {\n enabled: true,\n modelPath: '../models/facemesh.json',\n inputSize: 192, // fixed value\n },\n\n iris: {\n enabled: true,\n modelPath: '../models/iris.json',\n inputSize: 64, // fixed value\n },\n\n age: {\n enabled: true,\n modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'\n // which determines training set for model\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n gender: {\n enabled: true,\n minConfidence: 0.1, // threshold for discarding a prediction\n modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n emotion: {\n enabled: true,\n inputSize: 64, // fixed value\n minConfidence: 0.2, // threshold for discarding a prediction\n skipFrames: 15, // how many frames to go without re-running the detector\n modelPath: '../models/emotion-large.json', // can be 'mini', 'large'\n },\n },\n\n body: {\n enabled: true,\n modelPath: '../models/posenet.json',\n inputSize: 257, // fixed value\n maxDetections: 10, // maximum number of people detected in the input\n // should be set to the minimum number for performance\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression\n nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression\n },\n\n hand: {\n enabled: true,\n inputSize: 256, // fixed value\n skipFrames: 15, // how many frames to go without re-running the hand bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated hand skeleton analysis as the hand probably\n // hasn't moved much in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much\n // in non-maximum suppression\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on\n // score in non-maximum suppression\n maxHands: 1, // maximum number of hands detected in the input\n // should be set to the minimum number for performance\n landmarks: true, // detect hand landmarks or just hand boundary box\n detector: {\n modelPath: '../models/handdetect.json',\n },\n skeleton: {\n modelPath: '../models/handskeleton.json',\n },\n },\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n async image(input, userConfig = {}) {\n this.state = 'image';\n this.config = mergeDeep(this.config, userConfig);\n const process = image.process(input, this.config);\n process.tensor.dispose();\n return process.canvas;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "async function drawGesture(result, canvas, ui) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n ctx.font = ui.baseFont;\n ctx.fillStyle = ui.baseLabel;\n let i = 1;\n for (const [key, val] of Object.entries(result)) {\n if (val.length > 0) {\n const label = `${key}: ${val.join(', ')}`;\n ctx.fillText(label, 6, i * (ui.baseLineHeight + 24));\n i += 1;\n }\n }\n}\n\nasync function drawFace(result, canvas, ui, triangulation) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n for (const face of result) {\n ctx.font = ui.baseFont;\n ctx.strokeStyle = ui.baseColor;\n ctx.fillStyle = ui.baseColor;\n ctx.lineWidth = ui.baseLineWidth;\n ctx.beginPath();\n if (ui.drawBoxes) {\n ctx.rect(face.box[0], face.box[1], face.box[2], face.box[3]);\n }\n // silly hack since fillText does not suport new line\n const labels = [];\n // labels.push(`${Math.trunc(100 * face.confidence)}% face`);\n if (face.genderConfidence) labels.push(`${Math.trunc(100 * face.genderConfidence)}% ${face.gender || ''}`);\n if (face.age) labels.push(`age: ${face.age || ''}`);\n if (face.iris) labels.push(`iris: ${face.iris}`);\n if (face.emotion && face.emotion.length > 0) {\n const emotion = face.emotion.map((a) => `${Math.trunc(100 * a.score)}% ${a.emotion}`);\n labels.push(emotion.join(' '));\n }\n ctx.fillStyle = ui.baseLabel;\n for (const i in labels) ctx.fillText(labels[i], face.box[0] + 8, face.box[1] + 24 + ((i + 1) * ui.baseLineHeight));\n ctx.stroke();\n ctx.lineWidth = 1;\n if (face.mesh) {\n if (ui.drawPoints) {\n for (const point of face.mesh) {\n ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor;\n ctx.beginPath();\n ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);\n ctx.fill();\n }\n }\n if (ui.drawPolygons) {\n for (let i = 0; i < triangulation.length / 3; i++) {\n const points = [\n triangulation[i * 3 + 0],\n triangulation[i * 3 + 1],\n triangulation[i * 3 + 2],\n ].map((index) => face.mesh[index]);\n const path = new Path2D();\n path.moveTo(points[0][0], points[0][1]);\n for (const point of points) {\n path.lineTo(point[0], point[1]);\n }\n path.closePath();\n ctx.strokeStyle = ui.useDepth ? `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)` : ui.baseColor;\n ctx.stroke(path);\n if (ui.fillPolygons) {\n ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * points[0][2])}, ${127.5 - (2 * points[0][2])}, 255, 0.3)` : ui.baseColor;\n ctx.fill(path);\n }\n }\n // iris: array[center, left, top, right, bottom]\n if (face.annotations && face.annotations.leftEyeIris) {\n ctx.strokeStyle = ui.useDepth ? 'rgba(255, 200, 255, 0.3)' : ui.baseColor;\n ctx.beginPath();\n const sizeX = Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]) / 2;\n const sizeY = Math.abs(face.annotations.leftEyeIris[4][1] - face.annotations.leftEyeIris[2][1]) / 2;\n ctx.ellipse(face.annotations.leftEyeIris[0][0], face.annotations.leftEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);\n ctx.stroke();\n if (ui.fillPolygons) {\n ctx.fillStyle = ui.useDepth ? 'rgba(255, 255, 200, 0.3)' : ui.baseColor;\n ctx.fill();\n }\n }\n if (face.annotations && face.annotations.rightEyeIris) {\n ctx.strokeStyle = ui.useDepth ? 'rgba(255, 200, 255, 0.3)' : ui.baseColor;\n ctx.beginPath();\n const sizeX = Math.abs(face.annotations.rightEyeIris[3][0] - face.annotations.rightEyeIris[1][0]) / 2;\n const sizeY = Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]) / 2;\n ctx.ellipse(face.annotations.rightEyeIris[0][0], face.annotations.rightEyeIris[0][1], sizeX, sizeY, 0, 0, 2 * Math.PI);\n ctx.stroke();\n if (ui.fillPolygons) {\n ctx.fillStyle = ui.useDepth ? 'rgba(255, 255, 200, 0.3)' : ui.baseColor;\n ctx.fill();\n }\n }\n }\n }\n }\n}\n\nconst lastDrawnPose = [];\nasync function drawBody(result, canvas, ui) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n ctx.lineJoin = 'round';\n for (const i in result) {\n if (!lastDrawnPose[i] && ui.buffered) lastDrawnPose[i] = { ...result[i] };\n ctx.fillStyle = ui.baseColor;\n ctx.strokeStyle = ui.baseColor;\n ctx.font = ui.baseFont;\n ctx.lineWidth = ui.baseLineWidth;\n if (ui.drawPoints) {\n for (const pt in result[i].keypoints) {\n ctx.beginPath();\n if (ui.buffered) {\n lastDrawnPose[i].keypoints[pt].position.x = (lastDrawnPose[i].keypoints[pt].position.x + result[i].keypoints[pt].position.x) / 2;\n lastDrawnPose[i].keypoints[pt].position.y = (lastDrawnPose[i].keypoints[pt].position.y + result[i].keypoints[pt].position.y) / 2;\n ctx.arc(lastDrawnPose[i].keypoints[pt].position.x, lastDrawnPose[i].keypoints[pt].position.y, 2, 0, 2 * Math.PI);\n } else {\n ctx.arc(result[i].keypoints[pt].position.x, result[i].keypoints[pt].position.y, 2, 0, 2 * Math.PI);\n }\n ctx.fill();\n }\n }\n if (ui.drawPolygons) {\n const path = new Path2D();\n let part;\n // torso\n part = result[i].keypoints.find((a) => a.part === 'leftShoulder');\n path.moveTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightShoulder');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightHip');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'leftHip');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'leftShoulder');\n path.lineTo(part.position.x, part.position.y);\n // legs\n part = result[i].keypoints.find((a) => a.part === 'leftHip');\n path.moveTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'leftKnee');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'leftAnkle');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightHip');\n path.moveTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightKnee');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightAnkle');\n path.lineTo(part.position.x, part.position.y);\n // arms\n part = result[i].keypoints.find((a) => a.part === 'rightShoulder');\n path.moveTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'leftShoulder');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'leftElbow');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'leftWrist');\n path.lineTo(part.position.x, part.position.y);\n // arms\n part = result[i].keypoints.find((a) => a.part === 'leftShoulder');\n path.moveTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightShoulder');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightElbow');\n path.lineTo(part.position.x, part.position.y);\n part = result[i].keypoints.find((a) => a.part === 'rightWrist');\n path.lineTo(part.position.x, part.position.y);\n // draw all\n ctx.stroke(path);\n }\n }\n}\n\nasync function drawHand(result, canvas, ui) {\n if (!result) return;\n const ctx = canvas.getContext('2d');\n ctx.lineJoin = 'round';\n for (const hand of result) {\n ctx.font = ui.baseFont;\n ctx.lineWidth = ui.baseLineWidth;\n if (ui.drawBoxes) {\n ctx.lineWidth = ui.baseLineWidth;\n ctx.beginPath();\n ctx.strokeStyle = ui.baseColor;\n ctx.fillStyle = ui.baseColor;\n ctx.rect(hand.box[0], hand.box[1], hand.box[2], hand.box[3]);\n ctx.fillStyle = ui.baseLabel;\n ctx.fillText('hand', hand.box[0] + 2, hand.box[1] + 22, hand.box[2]);\n ctx.stroke();\n }\n if (ui.drawPoints) {\n if (hand.landmarks && hand.landmarks.length > 0) {\n for (const point of hand.landmarks) {\n ctx.fillStyle = ui.useDepth ? `rgba(${127.5 + (2 * point[2])}, ${127.5 - (2 * point[2])}, 255, 0.5)` : ui.baseColor;\n ctx.beginPath();\n ctx.arc(point[0], point[1], 2, 0, 2 * Math.PI);\n ctx.fill();\n }\n }\n }\n if (ui.drawPolygons) {\n const addPart = (part) => {\n if (!part) return;\n for (let i = 0; i < part.length; i++) {\n ctx.lineWidth = ui.baseLineWidth;\n ctx.beginPath();\n ctx.strokeStyle = ui.useDepth ? `rgba(${127.5 + (2 * part[i][2])}, ${127.5 - (2 * part[i][2])}, 255, 0.5)` : ui.baseColor;\n ctx.moveTo(part[i > 0 ? i - 1 : 0][0], part[i > 0 ? i - 1 : 0][1]);\n ctx.lineTo(part[i][0], part[i][1]);\n ctx.stroke();\n }\n };\n addPart(hand.annotations.indexFinger);\n addPart(hand.annotations.middleFinger);\n addPart(hand.annotations.ringFinger);\n addPart(hand.annotations.pinky);\n addPart(hand.annotations.thumb);\n // addPart(hand.annotations.palmBase);\n }\n }\n}\n\nconst draw = {\n face: drawFace,\n body: drawBody,\n hand: drawHand,\n gesture: drawGesture,\n};\n\nexport default draw;\n", "let instance = 0;\nlet CSScreated = false;\n\nlet theme = {\n background: 'darkslategray',\n hover: 'lightgray',\n itemBackground: 'black',\n itemColor: 'white',\n buttonBackground: 'lightblue',\n buttonHover: 'lightgreen',\n checkboxOn: 'lightgreen',\n checkboxOff: 'lightcoral',\n rangeBackground: 'lightblue',\n rangeLabel: 'white',\n chartColor: 'lightblue',\n};\n\nfunction createCSS() {\n if (CSScreated) return;\n const css = `\n :root { --rounded: 0.2rem; }\n .menu { position: absolute; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem);\n box-shadow: 0 0 8px dimgrey; background: ${theme.background}; border-radius: var(--rounded); border-color: black; border-style: solid; border-width: thin; }\n\n .menu:hover { box-shadow: 0 0 8px ${theme.hover}; }\n .menu-container { display: block; max-height: 100vh; }\n .menu-container-fadeout { max-height: 0; overflow: hidden; transition: max-height, 0.5s ease; }\n .menu-container-fadein { max-height: 100vh; overflow: hidden; transition: max-height, 0.5s ease; }\n .menu-item { display: flex; white-space: nowrap; padding: 0.2rem; width: max-content; cursor: default; }\n .menu-title { text-align: right; cursor: pointer; }\n .menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }\n .menu-label { padding: 0; font-weight: 800; }\n\n .menu-list { margin-right: 0.8rem; }\n select:focus { outline: none; }\n .menu-list-item { background: ${theme.itemBackground}; color: ${theme.itemColor}; border: none; padding: 0.2rem; font-family: inherit;\n font-variant: inherit; border-radius: var(--rounded); font-weight: 800; }\n\n .menu-chart-title { padding: 0; font-size: 0.8rem; font-weight: 800; align-items: center}\n .menu-chart-canvas { background: transparent; margin: 0.2rem 0 0.2rem 0.6rem; }\n \n .menu-button { border: 0; background: ${theme.buttonBackground}; width: 100%; padding: 8px; margin: 8px 0 8px 0; cursor: pointer; box-shadow: 4px 4px 4px 0 dimgrey;\n border-radius: var(--rounded); justify-content: center; font-family: inherit; font-variant: inherit; font-size: 1rem; font-weight: 800; }\n .menu-button:hover { background: ${theme.buttonHover}; box-shadow: 4px 4px 4px 0 black; }\n .menu-button:focus { outline: none; }\n\n .menu-checkbox { width: 2.8rem; height: 1rem; background: ${theme.itemBackground}; margin: 0.5rem 0.8rem 0 0; position: relative; border-radius: var(--rounded); }\n .menu-checkbox:after { content: 'OFF'; color: ${theme.checkboxOff}; position: absolute; right: 0.2rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }\n .menu-checkbox:before { content: 'ON'; color: ${theme.checkboxOn}; position: absolute; left: 0.3rem; top: -0.4rem; font-weight: 800; font-size: 0.5rem; }\n .menu-checkbox-label { width: 1.3rem; height: 0.8rem; cursor: pointer; position: absolute; top: 0.1rem; left: 0.1rem; z-index: 1; background: ${theme.checkboxOff};\n border-radius: var(--rounded); transition: left 0.6s ease; }\n\n input[type=checkbox] { visibility: hidden; }\n input[type=checkbox]:checked + label { left: 1.4rem; background: ${theme.checkboxOn}; }\n\n .menu-range { margin: 0 0.8rem 0 0; width: 5rem; background: transparent; color: ${theme.rangeBackground}; }\n .menu-range:before { color: ${theme.rangeLabel}; margin: 0 0.4rem 0 0; font-weight: 800; font-size: 0.6rem; position: relative; top: 0.3rem; content: attr(value); }\n\n input[type=range] { -webkit-appearance: none; }\n input[type=range]::-webkit-slider-runnable-track { width: 100%; height: 1rem; cursor: pointer; background: ${theme.itemBackground}; border-radius: var(--rounded); border: 1px; }\n input[type=range]::-moz-range-track { width: 100%; height: 1rem; cursor: pointer; background: ${theme.itemBackground}; border-radius: var(--rounded); border: 1px; }\n input[type=range]::-webkit-slider-thumb { border: 1px solid #000000; margin-top: 0.05rem; height: 0.9rem; width: 1.5rem; border-radius: var(--rounded); background: ${theme.rangeBackground}; cursor: pointer; -webkit-appearance: none; }\n input[type=range]::-moz-range-thumb { border: 1px solid #000000; margin-top: 0.05rem; height: 0.9rem; width: 1.5rem; border-radius: var(--rounded); background: ${theme.rangeBackground}; cursor: pointer; -webkit-appearance: none; }\n\n .svg-background { fill:darkslategrey; cursor:pointer; opacity: 0.6; }\n .svg-foreground { fill:white; cursor:pointer; opacity: 0.8; }\n `;\n const el = document.createElement('style');\n el.innerHTML = css;\n document.getElementsByTagName('head')[0].appendChild(el);\n CSScreated = true;\n}\n\nclass Menu {\n constructor(parent, title, position, userTheme) {\n if (userTheme) theme = { ...theme, ...userTheme };\n createCSS();\n this.createMenu(parent, title, position);\n this.id = 0;\n this.instance = instance;\n instance++;\n this._maxFPS = 0;\n this.hidden = 0;\n }\n\n createMenu(parent, title = '', position = { top: null, left: null, bottom: null, right: null }) {\n this.menu = document.createElement('div');\n this.menu.id = `menu-${instance}`;\n this.menu.className = 'menu';\n if (position) {\n if (position.top) this.menu.style.top = position.top;\n if (position.bottom) this.menu.style.bottom = position.bottom;\n if (position.left) this.menu.style.left = position.left;\n if (position.right) this.menu.style.right = position.right;\n }\n\n this.container = document.createElement('div');\n this.container.id = `menu-container-${instance}`;\n this.container.className = 'menu-container menu-container-fadein';\n\n // set menu title with pulldown arrow\n const elTitle = document.createElement('div');\n elTitle.className = 'menu-title';\n elTitle.id = `menu-title-${instance}`;\n const svg = `\n \n \n `;\n elTitle.innerHTML = `${title}${svg}`;\n this.menu.appendChild(elTitle);\n elTitle.addEventListener('click', () => {\n this.container.classList.toggle('menu-container-fadeout');\n this.container.classList.toggle('menu-container-fadein');\n this.menu.style.borderStyle = this.container.classList.contains('menu-container-fadeout') ? 'none' : 'solid';\n });\n\n this.menu.appendChild(this.container);\n if (typeof parent === 'object') parent.appendChild(this.menu);\n else document.getElementById(parent).appendChild(this.menu);\n }\n\n get newID() {\n this.id++;\n return `menu-${this.instance}-${this.id}`;\n }\n\n get ID() {\n return `menu-${this.instance}-${this.id}`;\n }\n\n get width() {\n return this.menu.offsetWidth;\n }\n\n get height() {\n return this.menu.offsetHeight;\n }\n\n hide() {\n if (this.container.classList.contains('menu-container-fadein')) {\n this.container.classList.toggle('menu-container-fadeout');\n this.container.classList.toggle('menu-container-fadein');\n }\n }\n\n visible() {\n return (this.container.classList.contains('menu-container-fadein'));\n }\n\n toggle(evt) {\n this.container.classList.toggle('menu-container-fadeout');\n this.container.classList.toggle('menu-container-fadein');\n if (this.container.classList.contains('menu-container-fadein') && evt) {\n const x = evt.x || (evt.touches && evt.touches[0] ? evt.touches[0].pageX : null);\n const y = evt.y || (evt.touches && evt.touches[0] ? evt.touches[0].pageY : null);\n if (x) this.menu.style.left = `${x - 105}px`;\n if (y) this.menu.style.top = '5.5rem'; // `${evt.y + 55}px`;\n if (this.menu.offsetLeft < 0) this.menu.style.left = 0;\n if ((this.menu.offsetLeft + this.menu.offsetWidth) > window.innerWidth) {\n this.menu.style.left = null;\n this.menu.style.right = 0;\n }\n this.menu.style.borderStyle = 'solid';\n } else {\n this.menu.style.borderStyle = 'none';\n }\n }\n\n addTitle(title) {\n const el = document.createElement('div');\n el.className = 'menu-title';\n el.id = this.newID;\n el.innerHTML = title;\n this.menu.appendChild(el);\n el.addEventListener('click', () => {\n this.hidden = !this.hidden;\n const all = document.getElementsByClassName('menu');\n for (const item of all) {\n item.style.display = this.hidden ? 'none' : 'block';\n }\n });\n return el;\n }\n\n addLabel(title) {\n const el = document.createElement('div');\n el.className = 'menu-item menu-label';\n el.id = this.newID;\n el.innerHTML = title;\n this.container.appendChild(el);\n return el;\n }\n\n addBool(title, object, variable, callback) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.innerHTML = `${title}`;\n this.container.appendChild(el);\n el.addEventListener('change', (evt) => {\n object[variable] = evt.target.checked;\n if (callback) callback(evt.target.checked);\n });\n return el;\n }\n\n async addList(title, items, selected, callback) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n let options = '';\n for (const item of items) {\n const def = item === selected ? 'selected' : '';\n options += `${item} `;\n }\n el.innerHTML = `${title}`;\n el.style.fontFamily = document.body.style.fontFamily;\n el.style.fontSize = document.body.style.fontSize;\n el.style.fontVariant = document.body.style.fontVariant;\n this.container.appendChild(el);\n el.addEventListener('change', (evt) => {\n if (callback) callback(items[evt.target.selectedIndex]);\n });\n return el;\n }\n\n addRange(title, object, variable, min, max, step, callback) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.innerHTML = `${title}`;\n this.container.appendChild(el);\n el.addEventListener('change', (evt) => {\n object[variable] = parseInt(evt.target.value) === parseFloat(evt.target.value) ? parseInt(evt.target.value) : parseFloat(evt.target.value);\n evt.target.setAttribute('value', evt.target.value);\n if (callback) callback(evt.target.value);\n });\n el.input = el.children[0];\n return el;\n }\n\n addHTML(html) {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.id = this.newID;\n if (html) el.innerHTML = html;\n this.container.appendChild(el);\n return el;\n }\n\n addButton(titleOn, titleOff, callback) {\n const el = document.createElement('button');\n el.className = 'menu-item menu-button';\n el.style.fontFamily = document.body.style.fontFamily;\n el.style.fontSize = document.body.style.fontSize;\n el.style.fontVariant = document.body.style.fontVariant;\n el.type = 'button';\n el.id = this.newID;\n el.innerText = titleOn;\n this.container.appendChild(el);\n el.addEventListener('click', () => {\n if (el.innerText === titleOn) el.innerText = titleOff;\n else el.innerText = titleOn;\n if (callback) callback(el.innerText !== titleOn);\n });\n return el;\n }\n\n addValue(title, val, suffix = '') {\n const el = document.createElement('div');\n el.className = 'menu-item';\n el.id = `menu-val-${title}`;\n el.innerText = `${title}: ${val}${suffix}`;\n this.container.appendChild(el);\n return el;\n }\n\n // eslint-disable-next-line class-methods-use-this\n updateValue(title, val, suffix = '') {\n const el = document.getElementById(`menu-val-${title}`);\n if (el) el.innerText = `${title}: ${val}${suffix}`;\n else this.addValue(title, val);\n }\n\n addChart(title, id, width = 200, height = 40, color) {\n if (color) theme.chartColor = color;\n const el = document.createElement('div');\n el.className = 'menu-item menu-chart-title';\n el.id = this.newID;\n el.innerHTML = `${title} `;\n this.container.appendChild(el);\n return el;\n }\n\n // eslint-disable-next-line class-methods-use-this\n async updateChart(id, values) {\n if (!values || (values.length === 0)) return;\n const canvas = document.getElementById(`menu-canvas-${id}`);\n if (!canvas) return;\n const ctx = canvas.getContext('2d');\n ctx.fillStyle = theme.background;\n ctx.fillRect(0, 0, canvas.width, canvas.height);\n const width = canvas.width / values.length;\n const max = 1 + Math.max(...values);\n const height = canvas.height / max;\n for (const i in values) {\n const gradient = ctx.createLinearGradient(0, (max - values[i]) * height, 0, 0);\n gradient.addColorStop(0.1, theme.chartColor);\n gradient.addColorStop(0.4, theme.background);\n ctx.fillStyle = gradient;\n ctx.fillRect(i * width, 0, width - 4, canvas.height);\n ctx.fillStyle = theme.background;\n ctx.font = `${width / 1.5}px \"Segoe UI\"`;\n ctx.fillText(Math.round(values[i]), i * width + 1, canvas.height - 1, width - 1);\n }\n }\n}\n\nexport default Menu;\n", "import Human from '../dist/human.esm.js';\nimport draw from './draw.js';\nimport Menu from './menu.js';\n\nconst userConfig = {}; // add any user configuration overrides\n\nconst human = new Human(userConfig);\n\n// ui options\nconst ui = {\n baseColor: 'rgba(173, 216, 230, 0.3)', // 'lightblue' with light alpha channel\n baseBackground: 'rgba(50, 50, 50, 1)', // 'grey'\n baseLabel: 'rgba(173, 216, 230, 0.9)', // 'lightblue' with dark alpha channel\n baseFontProto: 'small-caps {size} \"Segoe UI\"',\n baseLineWidth: 12,\n baseLineHeightProto: 2,\n crop: true,\n columns: 2,\n busy: false,\n facing: true,\n useWorker: false,\n worker: 'demo/worker.js',\n samples: ['../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg'],\n drawBoxes: true,\n drawPoints: false,\n drawPolygons: true,\n fillPolygons: false,\n useDepth: true,\n console: true,\n maxFPSframes: 10,\n modelsPreload: true,\n modelsWarmup: true,\n menuWidth: 0,\n menuHeight: 0,\n camera: {},\n fps: [],\n buffered: true,\n bufferedFPSTarget: 24,\n drawThread: null,\n framesDraw: 0,\n framesDetect: 0,\n};\n\n// global variables\nlet menu;\nlet menuFX;\nlet worker;\nlet lastDetectedResult = {};\n\n// helper function: translates json to human readable string\nfunction str(...msg) {\n if (!Array.isArray(msg)) return msg;\n let line = '';\n for (const entry of msg) {\n if (typeof entry === 'object') line += JSON.stringify(entry).replace(/{|}|\"|\\[|\\]/g, '').replace(/,/g, ', ');\n else line += entry;\n }\n return line;\n}\n\n// helper function: wrapper around console output\nconst log = (...msg) => {\n // eslint-disable-next-line no-console\n if (ui.console) console.log(...msg);\n};\n\nconst status = (msg) => {\n // eslint-disable-next-line no-console\n document.getElementById('status').innerText = msg;\n};\n\n// draws processed results and starts processing of a next frame\nasync function drawResults(input) {\n const result = lastDetectedResult;\n const canvas = document.getElementById('canvas');\n\n // update fps data\n // const elapsed = performance.now() - timeStamp;\n ui.fps.push(1000 / result.performance.total);\n if (ui.fps.length > ui.maxFPSframes) ui.fps.shift();\n\n // enable for continous performance monitoring\n // console.log(result.performance);\n\n // draw fps chart\n await menu.updateChart('FPS', ui.fps);\n\n // get updated canvas\n result.canvas = await human.image(input, userConfig);\n\n // draw image from video\n const ctx = canvas.getContext('2d');\n ctx.fillStyle = ui.baseBackground;\n ctx.fillRect(0, 0, canvas.width, canvas.height);\n if (result.canvas) {\n if (result.canvas.width !== canvas.width) canvas.width = result.canvas.width;\n if (result.canvas.height !== canvas.height) canvas.height = result.canvas.height;\n ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, result.canvas.width, result.canvas.height);\n } else {\n ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);\n }\n // draw all results\n await draw.face(result.face, canvas, ui, human.facemesh.triangulation);\n await draw.body(result.body, canvas, ui);\n await draw.hand(result.hand, canvas, ui);\n await draw.gesture(result.gesture, canvas, ui);\n // update log\n const engine = human.tf.engine();\n const gpu = engine.backendInstance ? `gpu: ${(engine.backendInstance.numBytesInGPU ? engine.backendInstance.numBytesInGPU : 0).toLocaleString()} bytes` : '';\n const memory = `system: ${engine.state.numBytes.toLocaleString()} bytes ${gpu} | tensors: ${engine.state.numTensors.toLocaleString()}`;\n const processing = result.canvas ? `processing: ${result.canvas.width} x ${result.canvas.height}` : '';\n const avg = Math.trunc(10 * ui.fps.reduce((a, b) => a + b) / ui.fps.length) / 10;\n const warning = (ui.fps.length > 5) && (avg < 5) ? 'warning: your performance is low: try switching to higher performance backend, lowering resolution or disabling some models ' : '';\n document.getElementById('log').innerHTML = `\n video: ${ui.camera.name} | facing: ${ui.camera.facing} | resolution: ${ui.camera.width} x ${ui.camera.height} ${processing} \n backend: ${human.tf.getBackend()} | ${memory} \n performance: ${str(result.performance)} FPS:${avg} \n ${warning}\n `;\n\n ui.framesDraw++;\n ui.lastFrame = performance.now();\n // if buffered, immediate loop but limit frame rate although it's going to run slower as JS is singlethreaded\n if (ui.buffered && !ui.drawThread) ui.drawThread = setInterval(() => drawResults(input, canvas), 1000 / ui.bufferedFPSTarget);\n // stop buffering\n if (!ui.buffered && ui.drawThread) {\n clearTimeout(ui.drawThread);\n ui.drawThread = null;\n }\n}\n\n// setup webcam\nasync function setupCamera() {\n if (ui.busy) return null;\n ui.busy = true;\n const video = document.getElementById('video');\n const canvas = document.getElementById('canvas');\n const output = document.getElementById('log');\n const live = video.srcObject ? ((video.srcObject.getVideoTracks()[0].readyState === 'live') && (video.readyState > 2) && (!video.paused)) : false;\n let msg = '';\n status('setting up camera');\n // setup webcam. note that navigator.mediaDevices requires that page is accessed via https\n if (!navigator.mediaDevices) {\n msg = 'camera access not supported';\n output.innerText += `\\n${msg}`;\n log(msg);\n status(msg);\n return null;\n }\n let stream;\n const constraints = {\n audio: false,\n video: {\n facingMode: ui.facing ? 'user' : 'environment',\n resizeMode: ui.crop ? 'crop-and-scale' : 'none',\n width: { ideal: window.innerWidth },\n height: { ideal: window.innerHeight },\n },\n };\n try {\n // if (window.innerWidth > window.innerHeight) constraints.video.width = { ideal: window.innerWidth };\n // else constraints.video.height = { ideal: window.innerHeight };\n stream = await navigator.mediaDevices.getUserMedia(constraints);\n } catch (err) {\n if (err.name === 'PermissionDeniedError') msg = 'camera permission denied';\n else if (err.name === 'SourceUnavailableError') msg = 'camera not available';\n else msg = 'camera error';\n output.innerText += `\\n${msg}`;\n status(msg);\n log(err);\n }\n if (stream) video.srcObject = stream;\n else return null;\n const track = stream.getVideoTracks()[0];\n const settings = track.getSettings();\n // log('camera constraints:', constraints, 'window:', { width: window.innerWidth, height: window.innerHeight }, 'settings:', settings, 'track:', track);\n ui.camera = { name: track.label?.toLowerCase(), width: settings.width, height: settings.height, facing: settings.facingMode === 'user' ? 'front' : 'back' };\n return new Promise((resolve) => {\n video.onloadeddata = async () => {\n video.width = video.videoWidth;\n video.height = video.videoHeight;\n canvas.width = video.width;\n canvas.height = video.height;\n canvas.style.width = canvas.width > canvas.height ? '100vw' : '';\n canvas.style.height = canvas.width > canvas.height ? '' : '100vh';\n ui.menuWidth.input.setAttribute('value', video.width);\n ui.menuHeight.input.setAttribute('value', video.height);\n // silly font resizing for paint-on-canvas since viewport can be zoomed\n const size = 14 + (6 * canvas.width / window.innerWidth);\n ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);\n if (live) video.play();\n ui.busy = false;\n // do once more because onresize events can be delayed or skipped\n // if (video.width > window.innerWidth) await setupCamera();\n status('');\n resolve(video);\n };\n });\n}\n\n// wrapper for worker.postmessage that creates worker if one does not exist\nfunction webWorker(input, image, canvas) {\n if (!worker) {\n // create new webworker and add event handler only once\n log('creating worker thread');\n worker = new Worker(ui.worker, { type: 'module' });\n worker.warned = false;\n // after receiving message from webworker, parse&draw results and send new frame for processing\n worker.addEventListener('message', (msg) => {\n if (!worker.warned) {\n log('warning: cannot transfer canvas from worked thread');\n log('warning: image will not show filter effects');\n worker.warned = true;\n }\n lastDetectedResult = msg.data.result;\n ui.framesDetect++;\n if (!ui.drawThread) drawResults(input);\n // eslint-disable-next-line no-use-before-define\n requestAnimationFrame(() => runHumanDetect(input, canvas));\n });\n }\n // pass image data as arraybuffer to worker by reference to avoid copy\n worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height }, [image.data.buffer]);\n}\n\n// main processing function when input is webcam, can use direct invocation or web worker\nfunction runHumanDetect(input, canvas) {\n // if live video\n const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);\n if (!live && input.srcObject) {\n // stop ui refresh\n if (ui.drawThread) clearTimeout(ui.drawThread);\n ui.drawThread = null;\n // if we want to continue and camera not ready, retry in 0.5sec, else just give up\n if (input.paused) log('camera paused');\n else if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);\n else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);\n clearTimeout(ui.drawThread);\n ui.drawThread = null;\n log('frame statistics: drawn:', ui.framesDraw, 'detected:', ui.framesDetect);\n return;\n }\n status('');\n if (ui.useWorker) {\n // get image data from video as we cannot send html objects to webworker\n const offscreen = new OffscreenCanvas(canvas.width, canvas.height);\n const ctx = offscreen.getContext('2d');\n ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);\n const data = ctx.getImageData(0, 0, canvas.width, canvas.height);\n // perform detection in worker\n webWorker(input, data, canvas, userConfig);\n } else {\n human.detect(input, userConfig).then((result) => {\n if (result.error) log(result.error);\n else {\n lastDetectedResult = result;\n if (!ui.drawThread) drawResults(input);\n ui.framesDetect++;\n requestAnimationFrame(() => runHumanDetect(input, canvas));\n }\n });\n }\n}\n\n// main processing function when input is image, can use direct invocation or web worker\nasync function processImage(input) {\n return new Promise((resolve) => {\n const image = new Image();\n image.onload = async () => {\n log('Processing image:', image.src);\n const canvas = document.getElementById('canvas');\n image.width = image.naturalWidth;\n image.height = image.naturalHeight;\n canvas.width = human.config.filter.width && human.config.filter.width > 0 ? human.config.filter.width : image.naturalWidth;\n canvas.height = human.config.filter.height && human.config.filter.height > 0 ? human.config.filter.height : image.naturalHeight;\n const result = await human.detect(image, userConfig);\n drawResults(image, result, canvas);\n const thumb = document.createElement('canvas');\n thumb.className = 'thumbnail';\n thumb.width = window.innerWidth / (ui.columns + 0.1);\n thumb.height = canvas.height / (window.innerWidth / thumb.width);\n const ctx = thumb.getContext('2d');\n ctx.drawImage(canvas, 0, 0, canvas.width, canvas.height, 0, 0, thumb.width, thumb.height);\n document.getElementById('samples-container').appendChild(thumb);\n image.src = '';\n resolve(true);\n };\n image.src = input;\n });\n}\n\n// just initialize everything and call main function\nasync function detectVideo() {\n human.config.videoOptimized = true;\n document.getElementById('samples-container').style.display = 'none';\n document.getElementById('canvas').style.display = 'block';\n const video = document.getElementById('video');\n const canvas = document.getElementById('canvas');\n ui.baseLineHeight = ui.baseLineHeightProto;\n if ((video.srcObject !== null) && !video.paused) {\n document.getElementById('play').style.display = 'block';\n status('paused');\n video.pause();\n } else {\n await setupCamera();\n document.getElementById('play').style.display = 'none';\n status('');\n video.play();\n }\n runHumanDetect(video, canvas);\n}\n\n// just initialize everything and call main function\nasync function detectSampleImages() {\n document.getElementById('play').style.display = 'none';\n human.config.videoOptimized = false;\n const size = 12 + Math.trunc(12 * ui.columns * window.innerWidth / document.body.clientWidth);\n ui.baseFont = ui.baseFontProto.replace(/{size}/, `${size}px`);\n ui.baseLineHeight = ui.baseLineHeightProto * ui.columns;\n document.getElementById('canvas').style.display = 'none';\n document.getElementById('samples-container').style.display = 'block';\n log('Running detection of sample images');\n status('processing images');\n document.getElementById('samples-container').innerHTML = '';\n for (const sample of ui.samples) await processImage(sample);\n status('');\n}\n\nfunction setupMenu() {\n menu = new Menu(document.body, '', { top: '1rem', right: '1rem' });\n const btn = menu.addButton('start video', 'pause video', () => detectVideo());\n menu.addButton('process images', 'process images', () => detectSampleImages());\n document.getElementById('play').addEventListener('click', () => btn.click());\n\n menu.addHTML(' ');\n menu.addList('backend', ['cpu', 'webgl', 'wasm'], human.config.backend, (val) => human.config.backend = val);\n menu.addBool('async operations', human.config, 'async', (val) => human.config.async = val);\n menu.addBool('enable profiler', human.config, 'profile', (val) => human.config.profile = val);\n menu.addBool('memory shield', human.config, 'deallocate', (val) => human.config.deallocate = val);\n menu.addBool('use web worker', ui, 'useWorker');\n menu.addHTML(' ');\n menu.addLabel('enabled models');\n menu.addBool('face detect', human.config.face, 'enabled');\n menu.addBool('face mesh', human.config.face.mesh, 'enabled');\n menu.addBool('face iris', human.config.face.iris, 'enabled');\n menu.addBool('face age', human.config.face.age, 'enabled');\n menu.addBool('face gender', human.config.face.gender, 'enabled');\n menu.addBool('face emotion', human.config.face.emotion, 'enabled');\n menu.addBool('body pose', human.config.body, 'enabled');\n menu.addBool('hand pose', human.config.hand, 'enabled');\n menu.addBool('gesture analysis', human.config.gesture, 'enabled');\n\n menu.addHTML(' ');\n menu.addLabel('model parameters');\n menu.addRange('max objects', human.config.face.detector, 'maxFaces', 1, 50, 1, (val) => {\n human.config.face.detector.maxFaces = parseInt(val);\n human.config.body.maxDetections = parseInt(val);\n human.config.hand.maxHands = parseInt(val);\n });\n menu.addRange('skip frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {\n human.config.face.detector.skipFrames = parseInt(val);\n human.config.face.emotion.skipFrames = parseInt(val);\n human.config.face.age.skipFrames = parseInt(val);\n human.config.hand.skipFrames = parseInt(val);\n });\n menu.addRange('min confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {\n human.config.face.detector.minConfidence = parseFloat(val);\n human.config.face.gender.minConfidence = parseFloat(val);\n human.config.face.emotion.minConfidence = parseFloat(val);\n human.config.hand.minConfidence = parseFloat(val);\n });\n menu.addRange('score threshold', human.config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {\n human.config.face.detector.scoreThreshold = parseFloat(val);\n human.config.hand.scoreThreshold = parseFloat(val);\n human.config.body.scoreThreshold = parseFloat(val);\n });\n menu.addRange('overlap', human.config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {\n human.config.face.detector.iouThreshold = parseFloat(val);\n human.config.hand.iouThreshold = parseFloat(val);\n });\n\n menu.addHTML(' ');\n menu.addChart('FPS', 'FPS');\n\n menuFX = new Menu(document.body, '', { top: '1rem', right: '18rem' });\n menuFX.addLabel('ui options');\n menuFX.addBool('buffered output', ui, 'buffered', (val) => ui.buffered = val);\n menuFX.addBool('crop & scale', ui, 'crop', () => setupCamera());\n menuFX.addBool('camera front/back', ui, 'facing', () => setupCamera());\n menuFX.addBool('use 3D depth', ui, 'useDepth');\n menuFX.addBool('draw boxes', ui, 'drawBoxes');\n menuFX.addBool('draw polygons', ui, 'drawPolygons');\n menuFX.addBool('Fill Polygons', ui, 'fillPolygons');\n menuFX.addBool('draw points', ui, 'drawPoints');\n menuFX.addHTML(' ');\n menuFX.addLabel('image processing');\n menuFX.addBool('enabled', human.config.filter, 'enabled');\n ui.menuWidth = menuFX.addRange('image width', human.config.filter, 'width', 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));\n ui.menuHeight = menuFX.addRange('image height', human.config.filter, 'height', 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));\n menuFX.addRange('brightness', human.config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => human.config.filter.brightness = parseFloat(val));\n menuFX.addRange('contrast', human.config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => human.config.filter.contrast = parseFloat(val));\n menuFX.addRange('sharpness', human.config.filter, 'sharpness', 0, 1.0, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));\n menuFX.addRange('blur', human.config.filter, 'blur', 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));\n menuFX.addRange('saturation', human.config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => human.config.filter.saturation = parseFloat(val));\n menuFX.addRange('hue', human.config.filter, 'hue', 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));\n menuFX.addRange('pixelate', human.config.filter, 'pixelate', 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));\n menuFX.addBool('negative', human.config.filter, 'negative');\n menuFX.addBool('sepia', human.config.filter, 'sepia');\n menuFX.addBool('vintage', human.config.filter, 'vintage');\n menuFX.addBool('kodachrome', human.config.filter, 'kodachrome');\n menuFX.addBool('technicolor', human.config.filter, 'technicolor');\n menuFX.addBool('polaroid', human.config.filter, 'polaroid');\n}\n\nasync function main() {\n log('Human: demo starting ...');\n setupMenu();\n document.getElementById('log').innerText = `Human: version ${human.version} TensorFlow/JS: version ${human.tf.version_core}`;\n // human.tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n // this is not required, just pre-loads all models\n if (ui.modelsPreload) {\n status('loading');\n await human.load(userConfig);\n }\n // this is not required, just pre-warms all models for faster initial inference\n if (ui.modelsWarmup) {\n status('initializing');\n await human.warmup(userConfig);\n }\n status('human: ready');\n document.getElementById('loader').style.display = 'none';\n document.getElementById('play').style.display = 'block';\n}\n\nwindow.onload = main;\nwindow.onresize = setupCamera;\n"],
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,sBAAA,WAAA;AAAA;AAGA,mBAAgB;AAIf,QAAI,OAAO,SAAS;AAAe,aAAO;;AAC1C,QAAI,OAAO,WAAW;AAAe,aAAO;;AAC5C,QAAI,OAAO,YAAW;AAAe,aAAO;;AAC5C,UAAM,IAAI,MAAM;;AAGjB,gBAAa;AAEb,SAAO,UAAU,UAAU,QAAO;AAGlC,MAAI,QAAO;AACV,YAAQ,UAAU,QAAO,MAAM,KAAK;;AAGrC,UAAQ,UAAU,QAAO;AACzB,UAAQ,UAAU,QAAO;AACzB,UAAQ,WAAW,QAAO;;ACxB1B,0BAAA,WAAA;AACA,gBAAa;AACb,gBAAa,QAAO;AAGpB;AACE,oBAAgB;AACd,UAAI,OAAO,IAAI;;;AAGnB,MAAI,QAAO,QAAQ,QAAO,SAAS,QAAO,eAAe,QAAO;AAC9D,WAAO,UAAU;;AAGjB,cAAU,SAAQ;AAClB,YAAQ,SAAS;;AAGnB;AACE,WAAO,QAAO,KAAK,kBAAkB;;AAIvC,YAAU,SAAQ;AAElB,aAAW,OAAO;AAChB,QAAI,OAAO,QAAQ;AACjB,YAAM,IAAI,UAAU;;AAEtB,WAAO,QAAO,KAAK,kBAAkB;;AAGvC,aAAW,QAAQ;AACjB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;;AAEtB,cAAU,QAAO;AACjB,QAAI,UAAS;AACX,UAAI,OAAO,aAAa;AACtB,YAAI,KAAK,OAAM;;AAEf,YAAI,KAAK;;;AAGX,UAAI,KAAK;;AAEX,WAAO;;AAGT,aAAW,cAAc;AACvB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;;AAEtB,WAAO,QAAO;;AAGhB,aAAW,kBAAkB;AAC3B,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;;AAEtB,WAAO,QAAO,WAAW;;;AC5D3B,6BAAA,WAAA;AAqBA;AAIA,gBAAa,sBAAuB;AAGpC,mBAAiB,QAAO,cAAc;AACpC,eAAW,KAAK;AAChB,YAAQ,YAAY,SAAS;WACtB;WAAW;WAAY;WAAa;WAAa;WAAc;WAAc;WAAY;WAAa;WAAe;WAAgB;AACxI,eAAO;;AAEP,eAAO;;;AAIb;AACE,QAAI,CAAC;AAAK,aAAO;AACjB;AACA,WAAO;AACL,cAAQ;aACD;aACA;AACH,iBAAO;aACJ;aACA;aACA;aACA;AACH,iBAAO;aACJ;aACA;AACH,iBAAO;aACJ;aACA;aACA;AACH,iBAAO;;AAEP,cAAI;AAAS;AACb,gBAAO,MAAK,KAAK;AACjB,oBAAU;;;;AAOlB;AACE,eAAW,mBAAmB;AAC9B,QAAI,OAAO,SAAS,YAAa,SAAO,eAAe,cAAc,CAAC,WAAW;AAAO,YAAM,IAAI,MAAM,uBAAuB;AAC/H,WAAO,QAAQ;;AAMjB,UAAQ,gBAAgB;AACxB;AACE,SAAK,WAAW,kBAAkB;AAClC;AACA,YAAQ,KAAK;WACN;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;WACG;AACH,aAAK,WAAW;AAChB,aAAK;AACL;WACG;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;;AAEA,aAAK,QAAQ;AACb,aAAK,MAAM;AACX;;AAEJ,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,WAAW,QAAO,YAAY;;AAGrC,gBAAc,UAAU,QAAQ;AAC9B,QAAI,IAAI,WAAW;AAAG,aAAO;AAC7B;AACA;AACA,QAAI,KAAK;AACP,UAAI,KAAK,SAAS;AAClB,UAAI,MAAM;AAAW,eAAO;AAC5B,UAAI,KAAK;AACT,WAAK,WAAW;;AAEhB,UAAI;;AAEN,QAAI,IAAI,IAAI;AAAQ,aAAO,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AACtE,WAAO,KAAK;;AAGd,gBAAc,UAAU,MAAM;AAG9B,gBAAc,UAAU,OAAO;AAG/B,gBAAc,UAAU,WAAW;AACjC,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,KAAK;AAChE,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;;AAEvD,QAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,IAAI;AAC/D,SAAK,YAAY,IAAI;;AAKvB;AACE,QAAI,QAAQ;AAAM,aAAO;aAAW,QAAQ,MAAM;AAAM,aAAO;aAAW,QAAQ,MAAM;AAAM,aAAO;aAAW,QAAQ,MAAM;AAAM,aAAO;AAC3I,WAAO,QAAQ,MAAM,IAAO,KAAK;;AAMnC;AACE,YAAQ,IAAI,SAAS;AACrB,QAAI,IAAI;AAAG,aAAO;AAClB,aAAS,cAAc,IAAI;AAC3B,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AACP,YAAI,OAAO;AAAG,eAAK;;AAAO,gBAAK,WAAW,KAAK;;AAEjD,aAAO;;AAET,WAAO;;AAWT;AACE,QAAK,KAAI,KAAK,SAAU;AACtB,YAAK,WAAW;AAChB,aAAO;;AAET,QAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,UAAK,KAAI,KAAK,SAAU;AACtB,cAAK,WAAW;AAChB,eAAO;;AAET,UAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,YAAK,KAAI,KAAK,SAAU;AACtB,gBAAK,WAAW;AAChB,iBAAO;;;;;AAOf;AACE,YAAQ,KAAK,YAAY,KAAK;AAC9B,YAAQ,oBAAoB,MAAM,KAAK;AACvC,QAAI,MAAM;AAAW,aAAO;AAC5B,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,GAAG,GAAG,KAAK;AACnC,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;;AAEvD,QAAI,KAAK,KAAK,UAAU,GAAG,GAAG,IAAI;AAClC,SAAK,YAAY,IAAI;;AAMvB;AACE,gBAAY,oBAAoB,MAAM,KAAK;AAC3C,QAAI,CAAC,KAAK;AAAU,aAAO,IAAI,SAAS,QAAQ;AAChD,SAAK,YAAY;AACjB,cAAU,IAAI,SAAU,SAAQ,KAAK;AACrC,QAAI,KAAK,KAAK,UAAU,GAAG;AAC3B,WAAO,IAAI,SAAS,QAAQ,GAAG;;AAKjC;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI;AAC9B,WAAO;;AAOT;AACE,QAAK,KAAI,SAAS,KAAK,MAAM;AAC3B,cAAQ,IAAI,SAAS,WAAW;AAChC,UAAI;AACF,gBAAQ,EAAE,WAAW,EAAE,SAAS;AAChC,YAAI,KAAK,SAAU,KAAK;AACtB,eAAK,WAAW;AAChB,eAAK,YAAY;AACjB,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,iBAAO,EAAE,MAAM,GAAG;;;AAGtB,aAAO;;AAET,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAO,IAAI,SAAS,WAAW,GAAG,IAAI,SAAS;;AAKjD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AACP,gBAAU,KAAK,YAAY,KAAK;AAChC,aAAO,IAAI,KAAK,SAAS,SAAS,WAAW,GAAG;;AAElD,WAAO;;AAGT;AACE,YAAS,KAAI,SAAS,KAAK;AAC3B,QAAI,MAAM;AAAG,aAAO,IAAI,SAAS,UAAU;AAC3C,SAAK,WAAW,IAAI;AACpB,SAAK,YAAY;AACjB,QAAI,MAAM;AACR,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;;AAEpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;;AAEtC,WAAO,IAAI,SAAS,UAAU,GAAG,IAAI,SAAS;;AAGhD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI,KAAK,SAAS,SAAS,UAAU,GAAG,IAAI,KAAK;AAC3E,WAAO;;AAIT;AACE,WAAO,IAAI,SAAS,KAAK;;AAG3B;AACE,WAAO,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;;;;;;;;;;;;;;;;;;;;;;;;ACtS/C;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,6BAAwB;;MAG3B;AACI,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,aAAK,OAAO,IAAI;AAChB,aAAK,eAAe;;MAExB;AACI,YAAI,CAAC,KAAK,KAAK,IAAI;AACf,eAAK,UAAU,SAAS,KAAK,SAAS;;AAE1C,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,aAAK,KAAK,IAAI,QAAQ;;MAE1B;AACI,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,eAAO,KAAK,KAAK,OAAO;;MAE5B;AACI,eAAO,KAAK;;;;MAUhB;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,KAAK,qBAAqB,KAAK,mBAAkB;;MAE5D;AACI,eAAO,mBAAkB;;MAE7B,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;;AAGjC;AACI,YAAM,IAAI,MAAM,IAAI;;ACvfxB;;;;;;;;;;;;;;;;AA8BO;AACH,oBAAc,OAAM;AACpB,iBAAW;AACX,mBAAY;AAEZ,aAAO,UAAU;AAEb,iBAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,eAAO,OAAM;AACb,eAAM,WAAW,OAAM;AACvB,eAAM,UAAS;;;AAIhB;AACH,aAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAE9B;AACH,aAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAEhC;AACH,iBAAU;AACV,mBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAO,IAAI;;AAEf,aAAO;;AASJ;AACH,gBAAU,KAAK;AACf,aAAQ,IAAI,IAAM,KAAI,KAAK;;AAGxB;AACH,mBAAa;AACb,mBAAa,GAAG,IAAI,EAAE,QAAQ;AAC1B,qBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,kBAAU,OAAO;;AAErB,aAAO;;AAiBJ;AACH,UAAI,CAAC;AACD,cAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAGjD,qEAAgE;AACnE,cAAO,aAAY,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAErF;AACH,cAAO,KAAK,MAAM,MAAM;;AAqBrB,oCAA+B,qBAAqB;AACvD,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,MAAM,QAAQ,QAAQ,cAAa,QAAQ,CAAC;AAC5C,qBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,mBAAQ,IAAI,IAAI,QAAQ;;;AAI5B,eAAO,KAAK;;AAEhB,aAAO;;AAaJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO;;AAEX,iBAAW,MAAM;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,gBAAQ,MAAM;;AAElB,aAAO;;AAEJ;AACH,aAAO,MAAM,WAAW;;AAErB;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO;;AAEX,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,aAAO,IAAI,MAAM;;AAEd;AAEH,UAAI,KAAK,QAAQ;AAEb,eAAO,KAAK,KAAK;;AAErB,UAAI,MAAM;AACN,eAAO;iBAEF,MAAM;AACX,eAAO;;AAGP,oBAAY,KAAK,IAAI,IAAI;AACzB,eAAQ,OAAM,KAAM,OAAM;;;AAG3B;AACH,oBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,aAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAc7B;AACH,8BAAwB,IAAI,YAAY;AACxC,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,wBAAgB,KAAK;;AAEzB,eAAQ;AACR,aAAO;;AAEJ;AACH,UAAI,QAAQ,EAAE;AACV,eAAO;;AAEX,aAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAE5B,6CAAwC,aAAa;AACxD,aAAO,IAAI,QAAQ;AACf,uBAAe;AACf,sBAAc;AACV,cAAI;AACA;AACA;;AAEJ;AACA,8BAAoB,QAAQ;AAC5B,cAAI,cAAc,QAAQ,YAAY;AAClC;AACA;;AAEJ,qBAAW,OAAO;;AAEtB;;;AAYD;AACH,sBAAgB;AAChB,wBAAkB;AAClB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,MAAM,MAAM;AACZ,uBAAa,MAAM;mBAEd,MAAM,OAAO;AAClB,cAAI,gBAAgB;AAChB,kBAAM,MAAM,yDACW,uBAAuB;;AAElD,wBAAc;mBAET,MAAM,KAAK;AAChB,gBAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAGvE,UAAI,gBAAgB;AAChB,YAAI,OAAO,KAAK,SAAS;AACrB,gBAAM,MAAM,QAAQ,yCAAyC;;AAEjE,eAAO;;AAEX,UAAI,cAAc;AACd,cAAM,MAAM,qCAAqC;;AAGrD,UAAI,OAAO,cAAc;AACrB,cAAM,MAAM,wDACD,UAAU;;AAEzB,uBAAiB,MAAM;AACvB,eAAS,eAAe,OAAO;AAC/B,aAAO;;AAEJ;AACH,mBAAa,MAAM;AAEnB,aAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAEzD,cAAO,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OAAO,MAAM,+CAA+C,SAAS,sBACjG;AAEhB,cAAO,KAAK,MAAM,QAAM,OAAM,MAAM,MAAM,0DAC1B;AAEhB,aAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAGrC;AACH,uBAAiB;AACjB,uBAAiB;AACjB,2BAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,mBAAc,QAAQ,QAAQ,eAC1B,OACA,gBAAe,MAAM,OAAO;AAChC,cAAQ;AACR,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,QAAQ;AACR,cAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAC9B,kBAAM,IAAI,MAAM,sBAAsB,oBAAoB,MAAM;;AAEpE,cAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACjD,qBAAS,KAAK,MAAM;AACpB,qBAAS,KAAK;;AAElB,cAAI,KAAK,MAAM;AACX;;;AAGR,YAAI,MAAM,OAAO;AACb,mBAAS,KAAK,MAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,UAAU;;AAEhB;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,MAAM;;AAGnB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AACxB,gBAAM,MAAM,oBAAoB,iCAAiC;;;;AAKtE;AACH,aAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAMhC;AACH,UAAI,YAAY;AACZ,eAAO;;AAEX,UAAI,YAAY,aAAa,YAAY;AACrC,eAAO;;AAEX,UAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC5D,eAAO;;AAEX,UAAI,YAAY,UAAU,YAAY;AAClC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAEd;AACH,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU;AACf,eAAO;iBAEF,UAAU;AACf,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;AASlC;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,kBAAY;AACZ,UAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,aAAO;;AAGJ;AACH,aAAO,OAAO,UAAU,YAAY,iBAAiB;;AAElD;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,UAAI,MAAM,QAAQ;AACd,eAAO,YAAW,OAAO;;AAE7B,UAAI,kBAAkB;AAClB,eAAO;iBAEF,kBAAkB,cAAc,kBAAkB;AACvD,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,WAAU;AACf,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAEzC;AACH,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,YAAI,OAAO,MAAM;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,MAAM;AACnB,UAAI,OAAO;AACP,eAAO;;AAIX,sBAAgB,IAAI,MAAM,OAAO;AACjC,cAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,mBAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC7B,gBAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE5C,aAAO;;AAEX;AACI,kBAAY,IAAI;AAChB,UAAI,MAAM,WAAW;AACjB,kBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,EAAE,SAAS;;;AAIxB,kBAAU,MAAM;AAChB,qBAAa,MAAM,MAAM;AACzB,oBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,mBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAG3D,aAAO;;AAGJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO,EAAE;;AAEb,mBAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,UAAI,SAAS;AAET,eAAO;;AAEX,UAAI,SAAS,EAAE;AACX,cAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAElE,aAAO,mBAAkB,GAAG,OAAO;;AAEhC;AACH,qBAAc,qBAAoB,MAAM;AACxC,mBAAa,GAAG,IAAI,OAAM,QAAQ;AAC9B,eAAM,KAAK;;AAEf,aAAO;;AAEJ;AACH,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,eAAO,IAAI,WAAW;;AAGtB,cAAM,IAAI,MAAM,qBAAqB;;;AAQtC;AACH,mBAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,UAAI,SAAS,QAAQ,UAAU;AAC3B,eAAO,eAAc,OAAO,IAAI,aAAa;iBAExC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;iBAEtC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;;AAG3C,cAAM,IAAI,MAAM,qBAAqB;;;AAGtC;AACH,YAAM,QAAQ;AACV,gBAAO,OAAO,UAAU,YAAY,WAAW,GAAG,MAAM,0EAC1C;;;AAWf;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,KAAK;;AAEhB,mBAAY,KAAK,KAAK,SAAS;AAC/B,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,kBAAS,QAAQ,KAAK,KAAK;;AAE/B,aAAO;;AAUJ;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,CAAC;;AAEZ,mBAAa,IAAI,MAAM;AACvB,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,aAAK,KAAK,KAAK,MAAM,SAAQ,QAAQ;AACrC,kBAAS,KAAK,KAAK,QAAQ;;AAE/B,WAAK,KAAK,SAAS,KAAK;AACxB,aAAO;;AAOJ;AAOH,aAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;ACrnB3D;;;;;;;;;;;;;;;;AAkBA,uCAAkC;;MAU9B;AACI,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,KAAK,YAAY;AACjB,kBAAQ,KAAK,YAAY,KAAK,oEACO;;AAEzC,aAAK,eAAe;AACpB,aAAK,WAAW;;MAEpB;AACI,aAAK,aAAa,YAAY,CAAE,cAAc;AAG9C,YAAI,KAAK,SAAS,aAAa;AAC3B,4BAAkB,KAAK,SAAS;AAChC,kBAAQ,KAAK,qCAAqC,aAAa;AAC/D,eAAK,IAAI,UAAU;;;YAGrB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,aAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,eAAO,KAAK,MAAM;;MAEtB;AACI,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,0BAAkB,KAAK,aAAa;AACpC,YAAI,WAAU;AACV,gBAAM,IAAI,MAAM,QAAQ;;AAG5B,aAAK,MAAM,YAAY;AACvB,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK;;UAGZ;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,mBAAmB;;AAEvC,aAAK,MAAM,YAAY;AACvB,YAAI,KAAK,aAAa,UAAU,WAAW;AACvC,eAAK,aAAa,UAAU,QAAQ;;;MAG5C;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,yBAAyB;;AAE7C,eAAO,KAAK,aAAa,UAAU;;MAEvC;AACI,aAAK,QAAQ,OAAO,OAAO,IAAI;;MAEnC;AACI,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACvC;;AAEJ,0BAAkB,gBAAe,KAAK,OAAO,SAAS;AACtD,YAAI,8BAA6B;AAC7B,4BAAkB,UAAU,4BAA2B,MAAM;AAC7D,oBAAU,QAAQ;AACd,iCAAqB,SAAS,MAAM;AACpC,iBAAK,SAAS,OAAO,YAAW,KAAK;;;;;AAK9C;AACH,qBAAe;AACf,kBAAY,QAAQ,+BAA+B;AAC/C,qBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,eAAO,EAAE,KAAK;;AAElB,aAAO;;AAEX;AACI,aAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAEnE;AACI,cAAQ,MAAM;AACd,UAAI,UAAU,UAAU,UAAU;AAC9B,eAAO,UAAU;iBAEZ,GAAG,CAAC,YAAY;AACrB,eAAO,CAAC;;AAEZ,YAAM,IAAI,MAAM,oCAAoC,kBAAkB;;AAUnE;AACH,aAAO,SAAA;;AAEA,aAAA,MAAM;AACV;AACH,eAAA,MAAM;;AC9JV;;;;;;;;;;;;;;;;AAoBA;AAEO;AACH,UAAI,oBAAmB;AAEnB;AACA,YAAI,OAAQ,WAAY;AACpB,eAAK;mBAEA,OAAQ,WAAY;AACzB,eAAK;mBAEA,OAAQ,YAAa;AAC1B,eAAK;mBAEA,OAAQ,SAAU;AACvB,eAAK;;AAGL,gBAAM,IAAI,MAAM;;AAEpB,2BAAkB;;AAEtB,aAAO;;AAGX;AACI,iBAAW;AACX,UAAI,GAAG,cAAc;AACjB,WAAG,aAAa,IAAI;;AAExB,aAAO,GAAG;;AASP;AACH,wBAAkB;AAClB,UAAI,UAAU,IAAI;AACd,eAAO,UAAU,IAAI;;AAGrB,0BAAkB;AAClB,kBAAU,IAAI,KAAK;AACnB,eAAO,UAAU,IAAI;;;ACpEjB,iBAAO;AACP,iBAAQ;AACR,kBAAS;AACT,iBAAO;AACP,kBAAQ;AACR,gBAAO;AACP,gBAAO;AACP,oBAAU;AACV,mBAAU;AACV,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,kBAAS;AACT,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,yBAAe;AACf,2BAAkB;AAClB,wBAAe;AACf,kBAAQ;AACR,iBAAQ;AACR,yBAAe;AACf,qBAAW;AACX,oBAAU;AACV,oBAAU;AACV,iCAAwB;AACxB,iCAAuB;AACvB,mBAAU;AACV,mCAA0B;AAC1B,kCAAyB;AACzB,iBAAO;AACP,iBAAQ;AACR,oBAAU;AACV,2BAAiB;AACjB,0BAAgB;AAChB,mCAAyB;AACzB,gDAAuC;AACvC,+CAAsC;AACtC,iBAAQ;AACR,uBAAc;AACd,oCAA2B;AAC3B,qCAA4B;AAC5B,iBAAO;AACP,iBAAO;AACP,oBAAW;AACX,gBAAO;AACP,mBAAS;AACT,iBAAO;AACP,kBAAS;AACT,gBAAO;AACP,kBAAQ;AACR,2BAAiB;AACjB,kBAAS;AACT,sBAAY;AACZ,4BAAkB;AAClB,sBAAY;AACZ,sBAAY;AACZ,qBAAW;AACX,0BAAgB;AAChB,sBAAY;AACZ,iBAAQ;AACR,iBAAQ;AACR,qBAAY;AACZ,kBAAS;AACT,kBAAS;AACT,kBAAQ;AACR,uBAAa;AACb,qBAAY;AACZ,iBAAO;AACP,kBAAS;AACT,wBAAc;AACd,uBAAc;AACd,sBAAa;AACb,uBAAc;AACd,gBAAO;AACP,wBAAe;AACf,iBAAO;AACP,qBAAW;AACX,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,8BAAqB;AACrB,iBAAQ;AACR,iBAAO;AACP,qBAAW;AACX,sBAAa;AACb,gBAAO;AACP,sBAAY;AACZ,oBAAU;AACV,sBAAY;AACZ,iCAAuB;AACvB,iCAAuB;AACvB,iCAAuB;AACvB,sBAAY;AACZ,oBAAU;AACV,mBAAS;AACT,iBAAQ;AACR,iBAAO;AACP,mBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,uBAAc;AACd,kBAAQ;AACR,qBAAW;AACX,kCAAyB;AACzB,sCAA6B;AAC7B,4BAAkB;AAClB,+BAAsB;AACtB,mBAAS;AACT,qBAAW;AACX,kBAAS;AACT,mBAAS;AACT,uBAAa;AACb,sBAAY;AACZ,iBAAQ;AACR,mBAAS;AACT,iBAAO;AACP,iBAAQ;AACR,iBAAQ;AACR,qBAAW;AACX,qBAAY;AACZ,kBAAQ;AACR,iBAAO;AACP,2BAAkB;AAClB,oBAAU;AACV,qBAAW;AACX,+BAAqB;AACrB,oBAAU;AACV,iBAAO;AACP,0BAAiB;AACjB,0BAAgB;AAChB,gBAAO;AACP,kBAAQ;AACR,kBAAQ;AACR,iBAAQ;AACR,uBAAa;AACb,mBAAU;AACV,oBAAU;AACV,+BAAsB;AACtB,uBAAa;AAIb,kBAAQ;AACR,uBAAc;AACd,8BAAoB;AACpB,0BAAgB;AAChB,yBAAe;AACf,kCAAwB;ACxJpC;;;;;;;;;;;;;;;;AAkBA,4BAAuB,WAAU,kBAAkB,MAAM,IAAI;AAC7D,0BAAqB,WAAU,gBAAgB,MAAM,IAAI;AAOlD;AACH,kBAAY,SAAQ,YAAY;AAChC,aAAO,gBAAe,IAAI;;AAMvB;AACH,aAAO,cAAa,IAAI;;AAErB;AACH,iBAAW,gBAAe;AAC1B,qBAAe;AACf,aAAO;AACH,eAAQ,MAAM,SAAU,GAAG;AAC3B,YAAI;AACA;;AAEJ,+BAAsB;AACtB,2BAAmB,IAAI,MAAM;AAC7B,YAAI,aAAY;AACZ,iBAAO,KAAK;;;AAGpB,aAAO;;AAaJ;AACH,aAAQ,YAAY,eAAgB;AACpC,kBAAY,SAAQ,YAAY;AAChC,UAAI,gBAAe,IAAI;AACnB,gBAAQ,KAAK,eAAe,4BACpB;;AAEZ,sBAAe,IAAI,KAAK;;AAUrB;AACH,aAAQ,cAAe;AACvB,UAAI,cAAa,IAAI;AAGjB,YAAI,OAAM,QAAQ;AACd,kBAAQ,KAAK,gCAAgC;;;AAGrD,oBAAa,IAAI,YAAY;;AAS1B;AACH,kBAAY,SAAQ,YAAY;AAChC,UAAI,CAAC,gBAAe,IAAI;AACpB,cAAM,IAAI,MAAM,eAAe,4BACvB;;AAEZ,sBAAe,OAAO;;AAGnB;AACH,UAAI,CAAC,cAAa,IAAI;AAClB,cAAM,IAAI,MAAM,iBAAiB;;AAErC,oBAAa,OAAO;;AAQjB;AACH,sBAAgB,sBAAqB;AACrC,cAAQ,QAAQ;AACZ,gCAAwB,OAAO,OAAO,IAAI,cAAc,CAAE,aAAa;AACvE,wBAAe;;;AAGvB;AACI,aAAO,GAAG,eAAe;;AChI7B;;;;;;;;;;;;;;;;AAsBO;AACH,UAAI,UAAU;AACV,eAAO,cAAa;;AAExB,aAAO,cAAa,CAAC,QAAQ;;AAEjC;AACI,aAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAEvC;AACH,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,UAAI,MAAM,QAAQ;AACd,YAAI,SAAa;;AAErB,UAAI,OAAM,QAAQ;AACd,kCAA8B,GAAG;;AAErC,UAAI,oBAAmB,GAAG;AACtB,eAAO;;AAEX,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,qBAAa,IAAI,WAAW,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,cAAI,KAAK,MAAM,EAAE,QAAQ;AACrB,iBAAK,KAAK;;;AAGlB,eAAO;;AAGP,cAAM,IAAI,MAAM,qBAAqB;;;AActC;AACH,aAAO,OAAM,SAAS;;AAkBnB;AACH,aAAO,OAAM,SAAS,MAAM,MAAM;;AAU/B,yCAAoC;AACvC,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,GAAG;;AAU7B,6CAAwC;AAC3C,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACxHxC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,SAAS;AACd,YAAI,UAAU;AACV,eAAK,SAAS,IAAI;;;MAG1B;AACI;AACA,oCAA4B;AACxB,oBAAU;;AAEd,sBAAc,KAAK,aAAa,KAAK;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAe,QAAQ;AAGvB,iBAAO,OAAO,KAAK;AACf,uCAA0B,YAAY,OAAO,OAAO;;;AAG5D,8BAAsB;UAClB;UACA;UACA;UACA,QAAQ,MAAM,KAAK,YAAU,OAAO;UACpC,WAAW,MAAM,KAAK,YAAU,OAAO,uBAAuB,OAC1D,OAAO,wBACP;;AAER,eAAO;;MAEX;AACI,eAAQ,YAAY,SAAS,QAAQ,QAAQ,aAAc;AAC3D,gBAAQ,QAAQ;AACZ,kBAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACjD,iBAAK,OAAO,iBAAiB,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAAQ,eAAe;;;;;AAKvH;AACH,UAAI,UAAU;AAEV,eAAO;;AAEX,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AAExB,kBAAQ,KAAK,SAAS,yBAAyB;AAC/C,iBAAO;;;AAGf,aAAO;;;MAGP;AACI,sBAAa,OAAO,WAAW,WAAW,UAAc,GAAG,YAAY,KACnE,OAAO;AACX,2BAAmB,UAAc,MAAM;AACvC,qBAAa,OAAO;AACpB,qBAAa,OAAO;AACpB,sBAAc,UAAc,OAAO,MAAM,YAAY;AACrD,qCAA6B;AAC7B,4BAAmB;AACf,yBAAc,OAAO;AACrB,cAAI,UAAS;AAGT,+BAAmB,OAAM,SAAS,OAAO;AACzC,8BAAkB,WAAW;AAC7B,sCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAGnE,gBAAQ,IAAI,KAAK,gBAAiB,WAAW,SAAS,WAAY,UAAW,4BAA6B,aAAa,oBAAoB,aAAa,cAAc,iBAAiB,gBAAgB;;;AC9F/M;;;;;;;;;;;;;;;;AAyBO;AAGH,2BAAqB;AACrB,yBAAmB;AACnB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,qBAAa,GAAG,GAAG,MAAM;;AAE7B,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AACxB,gCAAwB;AACpB,yBAAc,WAAW;AACzB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,gBAAI,aAAa,OAAM;AACnB,mBAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,8BAAgB;AAChB,yBAAW,KAAK,MAAM;AACtB;;;AAGR,cAAI;AACA;;;;AAKZ,6BAAuB;AACvB,qBAAe,EAAE,MAAM;AACvB,uBAAiB;AACjB,mBAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AAClC,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACrC,cAAI,eAAe,KAAK,QAAQ,GAAG;AAC/B,oCAAwB;AACpB,6BAAe,WAAW,WAAW,MAAM;AAC3C,uBAAS,KAAK,MAAM;;AAExB;;;;AAKZ,2BAAqB;AACrB,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,YAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAErC,+BAAqB;AACrB,kCAAwB,KAAK;AACzB,8BAAkB,KAAK,OAAO;AAC9B,gBAAI,aAAa,UAAU;AACvB,2BAAa,aAAa;;;AAIlC,6BAAmB,OAAO,OAAO,IAAI;AACrC,qBAAW,SAAS;AACpB,qBAAW,UAAU,KAAK;AAC1B,uBAAa,KAAK;;;AAG1B,aAAO;;AASJ;AAEH,mBAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC1C,qBAAa,aAAa;AAC1B,oBAAY;AACZ,aAAK,QAAQ,QAAQ;AACjB,6BAAmB,6BAA6B,EAAE;AAClD,cAAI,cAAc;AACd,gBAAI,KAAK;;AAKT,gBAAI,KAAK;;;AAGjB,YAAI,KAAK,YAAY;AACjB,gBAAM,IAAI,MAAM,4DACL,KAAK;;AAGpB,+BAAuB,KAAK,SAAS;AACrC,gCAAwB,KAAK;AACzB,cAAI,CAAE,cAAa;AACf,kBAAM,IAAI,MAAM,iCAAiC,yCACf,OAAO,KAAK;;AAGlD,qBAAW,MAAK,MAAM,eAAe;AACrC,cAAI,GAAG,UAAU;AACb,kBAAM,IAAI,MAAM,4BAA4B,KAAK,qCAC1C,iDAAiD,GAAG;;AAE/D,oBAAU,KAAK,OAAO;AACtB,cAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,kBAAM,IAAI,MAAM,4BAA4B,KAAK,sCACzC,yBAAyB,GAAG,wDACL,EAAE;;AAErC,cAAI,6BAA6B,EAAE,OAAO;AACtC,yCAA6B,EAAE,MAAM;;AAGrC,gCAAoB,6BAA6B,EAAE;AACnD,yCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,wBAAY;;;;;AChJ5B;;;;;;;;;;;;;;;;AAkBA,mCAA8B;AAE9B,wCAAmC;AAEnC,mCAA8B;AACvB;AACH,sBAAgB,gBAAe;AAC/B,wBAAkB,yBAAwB,MAAM,OAAO,OAAO;AAC9D,mBAAa,MAAM;AACnB,wBAAkB,mBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,oBAAc,CAAC;AACf,UAAI;AACA,cAAM,KAAK,YAAY;AACvB,cAAM,KAAK,WAAW;AACtB,cAAM,KAAK,aAAa;AACxB,cAAM,KAAK;;AAEf,YAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,aAAO,MAAM,KAAK;;AAEtB;AACI,gBAAU,eAAc;AACxB,sBAAgB,QAAQ,QAAQ,SAAS;AACzC,wBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,mBAAa,MAAM;AACnB,6BAAuB,UAAU,cAAc,qBAAoB,QAAQ;AAC3E,UAAI,OAAO;AACP,uBAAe,GAAG,MAAM,IAAI,SAAS;AACjC,yBAAe,MAAM;AACrB,uBAAa,GAAG,IAAI,SAAS;AACzB,sBAAU,KAAK,KAAK,IAAI,UAAU,IAAI,aAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAIpG,aAAO;;AAEX;AACI;AACA,UAAI,MAAM,QAAQ;AACd,iBAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,8BAC/B,WAAW,IAAI,GAAG,QAAQ;iBAE5B,UAAS;AACd,iBAAS,IAAI;iBAER,UAAU;AACf,iBAAS,iBAAgB;;AAGzB,iBAAS,WAAW,IAAI,QAAQ,yBAAwB;;AAE5D,aAAO,UAAS,QAAQ;;AAE5B;AACI,aAAO,MAAM,IAAI,UAAU;;AAE/B,iFAA4E;AACxE,gCAA0B,UAAU,cAAc,IAAI;AACtD,mBAAa,MAAM;AACnB,mBAAa,MAAM;AACnB,UAAI,SAAS;AACT,YAAI,UAAU;AACV,+BAAqB,qBAAoB;AACzC,iBAAO,CAAC,aAAY,aAAa,IAAI,GAAG;;AAE5C,YAAI,UAAU;AACV,iBAAO,CAAC,iBAAgB,KAAK;;AAEjC,eAAO,CAAC,KAAK,GAAG;;AAEpB,UAAI,SAAS;AACT,YAAI,OAAO;AACP,gCAAsB,8BAA6B;AACnD,0BAAgB,MAAM,KAAK,KAAK,MAAM,GAAG;AACzC,yBAAe,MAAM,KAAK,KAAK,MAAO,QAAO,+BAA8B,mBAAmB,OAAO;AACrG,cAAI,UAAU;AACV,wBAAY,qBAAoB;AAChC,uBAAW,qBAAoB;;AAEnC,iBAAO;YACH,MACI,UAAU,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IAAI,UAAU,aAAY,GAAG,UAAU,OAAO,8BAA6B,IAAI,QAC/E,KAAK,QACV;;;AAGZ,4BAAoB,UAAU,cAAc,qBAAoB,QAC5D,MAAM,KAAK;AACf,eAAO;UACH,MACI,YAAY,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAIZ,uBAAiB,MAAM,MAAM;AAC7B,yBAAmB,QAAQ,MAAM;AACjC,qBAAe,QAAQ,KAAK;AAC5B,oBAAc;AACd,UAAI,OAAO;AACP,qBAAa,GAAG,IAAI,6BAA4B;AAC5C,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW;;AAEpG,cAAM,KAAK;AACX,qBAAa,OAAO,6BAA4B,IAAI,MAAM;AACtD,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAIjH,qBAAa,GAAG,IAAI,MAAM;AACtB,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAGrH,kBAAY,SAAS,IAAI,MAAM;AAC/B,YAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,cAAM,KAAK,MAAM,MAAM,KAAK;;AAEhC,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM;AACtB,sBAAc;;AAElB,YAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,aAAO;;AAEX;AACI,4BAAsB;AACtB,mBAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,sBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAE1C,aAAO;;AChKX;;;;;;;;;;;;;;;;;MA4BI;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ,MAAM;AACnB,aAAK,OAAO,eAAmB;AAC/B,YAAI,UAAU;AACV,oBAAU,OAAO;AACjB,kBAAY,MAAM,KAAK,MAAM,MAAM,qBAAqB,qDAC1B,KAAK;;AAEvC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAIpB,aAAK,SAAS,UAAU,mBAAuB,OAAO,KAAK;AAC3D,aAAK,UAAU,gBAAe;;MAUlC;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAY,KAAK,WAAW,KAAK,MAAM,MAAM,uCAAuC,KAAK,gCAClE,KAAK;AAC5B,uBAAc,KAAK,WAAW;AAC9B,aAAK,OAAO,UAAS;;MASzB;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAQ;AACR,0BAAkB;AACd,cAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC7B,wBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,kBAAM,IAAI,MAAM;;AAEpB;;AAEJ,qBAAY,KAAK,KAAK,SAAS;AAC/B,sBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,MAAK,KAAK;;AAEpC,eAAO,KAAK,OAAO;;MAEvB;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,KAAK;;AAEhB,qBAAY,KAAK,KAAK,SAAS;AAC/B,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,KAAK,KAAK;;AAEpC,eAAO;;MAEX;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,CAAC;;AAEZ,qBAAa,IAAI,MAAM,KAAK,MAAM;AAClC,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,eAAK,KAAK,KAAK,MAAM,SAAQ,KAAK,QAAQ;AAC1C,oBAAS,KAAK,KAAK,KAAK,QAAQ;;AAEpC,aAAK,KAAK,SAAS,KAAK;AACxB,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAOtB;AACI,eAAO,aAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAIpE,qBAAgB;AAEhB,qBAAgB;AAEhB,gCAA2B;AAI3B,KAAC;AAMM;AACH,mBAAY;;AAOT;AACH,mBAAY;;AAMT;AACH,8BAAuB;;;MAWvB;AAEI,aAAK,OAAO;AACZ,aAAK,qBAAqB;AAC1B,aAAK,QAAQ,MAAM;AACnB,aAAK,QAAQ,SAAS;AACtB,aAAK,OAAO,eAAmB;AAC/B,aAAK,UAAU,gBAAe;AAC9B,aAAK,SAAS;AACd,aAAK,KAAK;AACV,aAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;UAExD;AACA,eAAO,KAAK,MAAM;;YAOhB;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO;;MAMpD;AACI,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK;;YAQnD;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,eAAc,KAAK,OAAO;;MAQrC;AACI,eAAO,eAAc,KAAK,OAAO,KAAK;;YAQpC;AACF,aAAK;AACL,sBAAa,aAAY,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU;AACf,wBAAc,MAAM;AACpB;AACI,mBAAO,MAAM,IAAI,OAAK,cAAkB;;AAGxC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;MAQX;AACI,aAAK;AACL,sBAAa,aAAY,SAAS,KAAK;AACvC,YAAI,KAAK,UAAU;AACf;AACI,mBAAO,MAAK,IAAI,OAAK,cAAkB;;AAGvC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;YAGL;AACF,aAAK;AACL,sBAAa,MAAM,aAAY,KAAK,KAAK;AACzC,YAAI,KAAK,UAAU;AACf,iBAAO;;AAGP,iBAAO,IAAI,WAAW,MAAK;;;MAQnC;AACI,YAAI,KAAK;AACL;;AAEJ,qBAAY,cAAc;AAC1B,aAAK,qBAAqB;;UAE1B;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAWxB,gBAAgB;AACZ,eAAO,WAAU,MAAM,MAAM;;MAMjC;AACI,aAAK;AACL,eAAO,WAAU,MAAM;;MAO3B,mBAAmB;AACf,qBAAa,KAAK;AAClB,eAAO,gBAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;MAExD;AACI,aAAK;AACL,eAAO,WAAU,KAAK,MAAM;;MAEhC,qBAAqB;AACjB,aAAK;AACL,eAAO,aAAY,aAAa,MAAM,WAAW,MAAM;;;AAG/D,WAAO,eAAe,SAAQ,OAAO,aAAa;MAC9C,OAAO;AAMH,eAAO,CAAC,CAAC,aAAY,UAAS,QAAQ,QAAQ,UAAS,YAAY,QAC/D,UAAS,mBAAmB;;;4BAQV;MAC1B;AACI,cAAM,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AACnE,aAAK,YAAY;AACjB,aAAK,OAAO;;MAUhB;AACI,YAAI,SAAS,UAAU,KAAK;AACxB,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,YAAI,CAAC,aAAiB,SAAS,OAAO,KAAK;AACvC,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,qBAAY,cAAc;AAC1B,aAAK,SAAS,SAAS;AACvB,qBAAY,OAAO,MAAM;;MAE7B;AACI,qBAAY,gBAAgB;AAC5B,aAAK,qBAAqB;;;AAGlC,WAAO,eAAe,WAAU,OAAO,aAAa;MAChD,OAAO;AACH,eAAO,qBAAoB,WAAU,UAAS,UAAU,QACpD,UAAS,kBAAkB;;;AC5XvC;;;;;;;;;;;;;;;;AAiBC,IAAA;AACG,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;OACd,SAAA,QAAS,UAAA,OAAO;AAGnB;AACC,IAAA;AACG,yBAAkB,aAAa;AAC/B,yBAAkB,WAAW;AAC7B,yBAAkB,UAAU;AAC5B,yBAAkB,eAAe;OAClC,sBAAsB,sBAAoB;AAC7C;AACC,IAAA;AACG,wBAAiB,aAAa;AAC9B,wBAAiB,WAAW;AAC5B,wBAAiB,UAAU;AAC3B,wBAAiB,eAAe;OACjC,qBAAqB,qBAAmB;AAC3C;AACC,IAAA;AACG,2BAAoB,aAAa;AACjC,2BAAoB,WAAW;AAC/B,2BAAoB,UAAU;AAC9B,2BAAoB,eAAe;OACpC,wBAAwB,wBAAsB;AACjD;AACC,IAAA;AACG,6BAAsB,aAAa;AACnC,6BAAsB,WAAW;AACjC,6BAAsB,UAAU;AAChC,6BAAsB,eAAe;OACtC,0BAA0B,0BAAwB;AACrD,2BAAsB;MAClB,SAAW;MACX,OAAS;MACT,MAAQ;MACR,WAAa;;AAEV;AACH,UAAI,UAAU,YAAY,UAAU;AAChC,YAAI,UAAU,YAAY,UAAU;AAChC,iBAAO;;AAEX,cAAM,IAAI,MAAM,kBAAkB,cAAc;;AAEpD,aAAO,eAAc,OAAO;;AAGzB;AACH,aAAO,YAAW,MAAM;;ACzE5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,EAAE,UAAU,EAAE;AACd,eAAO,CAAC,GAAG;;AAEf,oBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,aAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AAE3B;AACH,cAAO,EAAE,UAAU,EAAE,OAAO,MAAM,2BAA2B,EAAE,qBAChD,EAAE;;AAEd;AACH,aAAO,WAAW,KAAK,OAAK,EAAE,OAAO,QAAO;;AAczC;AACH,mBAAa;AACb,mBAAa,IAAI;AACjB,2BAAoB,QAAQ,MAAM;AAClC,aAAO;;AAEX;AACI,UAAI,aAAa;AACb;;AAEJ,UAAI,qBAAqB;AACrB,aAAK,KAAK;AACV;;AAEJ,UAAI,CAAC,YAAW;AACZ;;AAGJ,uBAAiB;AACjB,sBAAgB;AACZ,oBAAY,SAAS;AACrB,YAAI,CAAC,KAAK,IAAI;AACV,eAAK,IAAI;AACT,+BAAoB,KAAK,MAAM;;;;AAK3C;AACI,aAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;;;;;;;;AC1EhD;;;;;;;;;;;;;;;;;MA4BI;AAEI,aAAK,sBAAsB;AAC3B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AAItB,aAAK,gBAAgB;AAGrB,aAAK,cAAc;AACnB,aAAK,aAAa;AAKlB,aAAK,oBAAoB;AACzB,aAAK,cAAc;AACnB,aAAK,aAAa,IAAI;AACtB,aAAK,YAAY;AACjB,aAAK,gBAAgB,CAAE,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;MAE1F;AACI,mCAA2B,KAAK;AAC5B,eAAK,oBAAoB,cAAc;;;;;MAK/C;AACI,aAAK,MAAM;AACX,aAAK,WAAW;AAChB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,QAAQ,IAAI;;YAEf;AACF,YAAI,KAAK,sBAAsB;AAC3B,iBAAO,KAAK,mBAAmB,KAAK;;;AAExC,YAAI,KAAK,mBAAmB;AACxB;;AAEJ,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,0BAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,cAAI;AACA,kBAAM,KAAK,WAAW;AACtB;;;AAGR,cAAM,IAAI,MAAM;;UAGhB;AACA,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM,YAAY,KAAK;;AAIrC,YAAI,KAAK,mBAAmB;AACxB,iBAAQ,MAAM,aAAc,KAAK;AACjC,cAAI;AACA,kBAAM,IAAI,MAAM,iCAAiC;;AAIrD,eAAK,WAAW;;AAEpB,eAAO,KAAK;;MAEhB;AACI,eAAO,OAAO,KAAK,KAAK;;MAE5B;AACI,YAAI,CAAE,gBAAe,KAAK;AAGtB,cAAI,eAAe,KAAK;AACpB,mBAAQ,aAAc,KAAK,kBAAkB;AAC7C,gBAAI;AAEA,qBAAO;;;AAIX,mBAAO;;;AAGf,eAAO,KAAK,SAAS;;MAEzB;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,iBAAO;;AAEX,eAAO,KAAK,gBAAgB,aAAa;;MAE7C,iDAAiD;AAC7C,YAAI,eAAe,KAAK;AACpB,kBAAQ,KAAK,GAAG;AAEhB,iBAAO;;AAEX,aAAK,gBAAgB,eAAe,CAAE,SAAS;AAC/C,eAAO;;YAEL;AACF,YAAI,KAAK,gBAAgB,gBAAgB;AACrC,gBAAM,IAAI,MAAM,iBAAiB;;AAErC,aAAK,cAAc;AACnB,YAAI,KAAK,SAAS,gBAAgB;AAC9B,eAAK,kBAAkB;AACvB,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,yBAAe,YAAY,MAAM,UAAU;AAC3C,cAAI,CAAC;AACD,mBAAO;;;AAGf,aAAK,kBAAkB,KAAK,SAAS;AACrC,aAAK;AAEL,aAAK,WAAW,IAAI,UAAS,KAAK;AAClC,eAAO;;MAEX;AACI,wBAAgB,sBAAqB,KAAK;AAC1C,gBAAQ,QAAQ;AACZ,cAAI,OAAO,aAAa;AACpB,mBAAO,UAAU,KAAK;;;;MAIlC;AACI,wBAAgB,sBAAqB;AACrC,gBAAQ,QAAQ;AACZ,cAAI,OAAO,eAAe;AACtB,mBAAO,YAAY,KAAK,SAAS;;;;MAU7C;AACI,qCAA6B,KAAK,gBAAgB;AAClD,YAAI,wBAAwB;AACxB,gBAAM,IAAI,MAAM,6BAA6B;;AAEjD;AACI,2BAAgB,qBAAqB;AAMrC,cAAI,YAAW,CAAE,qBAAmB,mBAC7B,OAAO,SAAQ,SAAS;AAC3B,8BAAkB,EAAE,KAAK;AACzB,4BAAgB,SACX,KAAK;AAEN,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,SAAS,eAAe;AAC7B,mBAAK,qBAAqB;AAC1B,qBAAO;eAEN,MAAM;AAEP,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,qBAAqB;AAC1B,sBAAQ,KAAK,6BAA6B;AAC1C,sBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,qBAAO;;AAEX,iBAAK,qBAAqB;AAC1B,mBAAO,CAAE,SAAS,WAAW;;AAG7B,iBAAK,SAAS,eAAe;AAC7B,mBAAO,CAAE,SAAS,MAAM,WAAW;;;AAIvC,kBAAQ,KAAK,6BAA6B;AAC1C,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO,CAAE,SAAS,OAAO,WAAW;;;MAG5C;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,gBAAM,IAAI,MAAM,GAAG;;AAEvB,YAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAG/D,eAAK;;AAET,YAAI,eAAe,KAAK;AACpB,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,eAAO,KAAK,gBAAgB;AAE5B,YAAI,KAAK,gBAAgB;AACrB,eAAK,qBAAqB;AAC1B,eAAK,cAAc;AACnB,eAAK,kBAAkB;;;MAG/B;AACI,YAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC7C,gBAAM,IAAI,MAAM;;AAEpB,eAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE1C,iBAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;MAGpC;AACI,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,cAAI,aAAa;AACb,mBAAO,CAAE,MAAM,aAAa;;;AAGpC,cAAM,IAAI,MAAM;;MAGpB;AACI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAmB,KAAK;AACxB,uBAAe,KAAK,SAAS;AAG7B,mBAAW,YAAY;AACvB,aAAK,UAAU;AACf,iBAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,YAAI,KAAK;AAGL,eAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;MAG3E;AACI,mBAAW;AACX,YAAI,MAAM;AAEN,cAAI,OAAO,aAAa;AACpB,kBAAM,IAAI,MAAM;;AAEpB,eAAK;;AAIL,cAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACtD,kBAAM,IAAI,MAAM;;AAGpB,cAAI,OAAO,OAAO;AACd,kBAAM,IAAI,MAAM;;AAGpB,iBAAO;;AAIX;AACA,eAAO,KAAK,UAAU,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AAC5E,mBAAS;AACT,cAAI,kBAAkB;AAClB,oBAAQ,MAAM;;AAElB,iBAAO;;;MAGf;AACI;AACA;AACI,sBAAY;AACZ;AACA,iBAAO;;AAGP;AACA,gBAAM;;;MAGd;AACI,eAAO,QAAO;;MAElB;AACI,eAAO,QAAO;;MAWlB;AACI,kBAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,uBAAe,CAAE;AACjB,sBAAa,QAAS;UAClB,GAAG;AACC,0BAAc;AACd,+BAAmB,CAAE,GAAG;AACxB,0BAAc,CAAE;AAChB,mBAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,YAAY,MAAiB,OAAM;;;AAG3G,sBAAc;AACd,aAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,OAAM,OAAO;AACxE,eAAO;;MAeX;AACI,4BAAoB;AACpB,8BAAsB;AAItB,eAAO,KAAK,cAAc,aAAa,QAAQ,eAAe,YAAY,OAAO,cAAc;;MAEnG;AACI,eAAO,KAAK,IAAI,QAAQ;;MAE5B;AACI,gCAAwB,KAAK,QAAQ;AAErC,+BAAuB;AACvB,iBAAS,QAAQ;AAGb,8BAAqB,KAAK,UAAU,cAAc,IAAI;;AAO1D,yBAAiB,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACpF,8BAAsB,kBAAkB,mBAAmB,mBAAmB;AAC9E,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,YAAY,KAAK,6CACzB,0CAA0C;;;MAO1D;AACI;AACA,oBAAY;AACZ,yBAAiB,KAAK;AACtB,YAAI,cAAc;AACd,uBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAEvE,kCAA0B,KAAK,MAAM;AACrC,mCAA2B,KAAK,MAAM;AACtC,YAAI,KAAK;AACL,eAAK,MAAM,kBAAkB,KAAK;;AAEtC;AACA,uBAAe,WAAU,YAAY,KAAK;AAC1C;AACA,YAAI,UAAU;AACV,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,OAAO,WAAW,CAAE,QAAQ,OAAO,SAAS,KAAK;AACvD,6BAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,+BAAmB,SAAS,IAAI,EAAG,QAAQ,OAAO,WAAY,KAAK,qBAAqB,QAAQ,OAAO;AAKvG,gBAAI;AACA,kCAAoB,KAAK,sBAAsB,YAAY,QAAQ;AACnE,kBAAI,iBAAiB;AAKjB,oBAAI,iBAAiB;AACjB,kCAAgB;;AAEpB,mCAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,gCAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAExD,sBAAQ,KAAK,2BAA2B;;AAE5C,mBAAO;;;AAIX,2BAAiB;AAIb,gBAAI,CAAC;AACD;;AAEJ,oBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAEvD,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,yBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,mBAAO;;;AAIf;AACA,aAAK,UAAU,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC3E,cAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC1C,sBAAU;;AAGV,4BAAgB,KAAK,SAAS,cAAc,YAAY,QAAQ,MAAM;AACtE,gBAAI,KAAK,IAAI,QAAQ;AACjB,mBAAK,SAAS,iBAAiB;;AAEnC,sBAAU,cAAc;;;AAGhC,YAAI;AACA,eAAK,YAAY,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAExE,YAAI,KAAK,MAAM;AACX,eAAK,MAAM,cAAc,QAAQ,KAAK;YAClC,MAAM;YACN,YAAY,KAAK,MAAM,WAAW;YAClC,oBAAoB,KAAK,MAAM;YAC/B,cAAc,KAAK,MAAM,aAAa;YACtC,sBAAsB,KAAK,MAAM;YACjC,aAAa,OAAO,KAAK,QAAQ,IAAI,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;YACtF,cAAc,QAAQ,IAAI,UAAQ,KAAK;YACvC,cAAc,cAAc;YAC5B,WAAW,cAAc;;;AAGjC,eAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;MAOnD;AACI,sBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,eAAO;;MAYX;AACI,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,+BAAqB,WAAW,gBAAgB;AAChD,gCAAsB,WAAW,iBAAiB;AAGlD;AACA,cAAI,WAAW;AACX,oBAAY,MAAM,QAAQ,SAAS,MAAM;AACzC,iCAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAG7D,iCAAqB,aAAa,IAAI,eAAe,OAAO;;AAEhE,sCAA4B,QAAQ,OAAO,UAAU,cAAc;AACnE,iBAAO,mBAAmB,OAAO;;AAIrC,eAAO;;MAOX;AACI,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAEpB,gBAAQ,SAAS;AACjB,mBAAU,YAAW,KAAK;AAC1B,0BAAkB;AAClB,YAAI,UAAU,YAAY,UAAc,OAAO;AAC3C,wBAAc,OAAO,IAAI,OAAK,cAAkB;;AAEpD,uBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AAEf,YAAI,UAAU;AACV,uBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAiB,sBAAqB;AACtC,eAAK,MAAM,YAAY,WAAW,KAAK;AACvC,eAAK,QAAQ;;AAEjB,eAAO;;MAOX;AACI,gBAAQ,SAAS;AACjB,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AACf,eAAO;;MAEX,uCAAuC;AACnC,eAAO,QAAQ,KAAK,iBAAiB;AACrC,YAAI,SAAS,QAAQ,UAAU,aAAa;AACxC,yBAAe,aAAa,KAAK;;AAErC,kBAAU,IAAI,UAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,gBAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE5C,aAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,aAAK,OAAO,GAAG,KAAK;AACpB,eAAO;;MAEX;AACI,yBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,YAAI,aAAa;AACb,eAAK,MAAM;AAGX,sBAAY;AACZ,cAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,oBAAQ,EAAE,OAAO,iBAAqB,EAAE;;AAE5C,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;YAChC,SAAS,YAAW,KAAK;YACzB,OAAO,EAAE;YACT,OAAO,EAAE;YACT;YACA,UAAU;;AAEd,eAAK,MAAM,YAAY;;AAE3B,aAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AACpC,YAAI,CAAE,cAAa;AACf,eAAK,MAAM;;;MAGnB;AACI,YAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC7B;;AAEJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,qBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,yBAAiB,KAAK;AACtB,YAAI,YAAY;AAGZ,cAAI,EAAE,UAAU;AACZ,iBAAK,MAAM,YAAY,KAAK;;AAEhC,eAAK,MAAM;AACX,eAAK,QAAQ,YAAY,EAAE;AAC3B,eAAK,MAAM,WAAW,OAAO,EAAE;;AAG/B,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;MAM5C;AACI,8BAAsB,KAAK,MAAM;AAC7B,oBAAU,KAAK,MAAM,oBAAoB;AACzC,eAAK,gBAAgB;;;MAG7B;AACI,aAAK,cAAc;AACnB,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,iBAAO,KAAK,MAAM,oBAAoB,EAAE;;;MAGhD;AACI,qBAAa,KAAK,QAAQ;AAC1B,aAAK,aAAa,KAAK,MAAM;AAC7B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,WAAW,KAAK,MAAM;AAC3B,YAAI,KAAK,MAAM,mBAAmB;AAC9B,eAAK,aAAa;AAClB,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU;;AAEnB,eAAK,QAAQ,KAAK;;AAGtB,eAAO;;YAEL;AACF,aAAK,MAAM,YAAY;AACvB,2BAAmB,KAAK,MAAM;AAC9B,gCAAwB,KAAK,MAAM;AACnC,aAAK,MAAM,cAAc,UAAU;AACnC,aAAK,MAAM,cAAc,SAAS,MAAM;AACxC,aAAK,MAAM,YAAY;AACvB,aAAK,MAAM,cAAc,YAAY,KAAK,IAAI,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AAC7F,aAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,aAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,6BAAqB,KAAK,MAAM,cAAc;AAC1C,iBAAO,eAAe,MAAM,OAAO;AACnC,iBAAO,YAAY,MAAM,OAAO;;AAEpC,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;MAEtE;AACI,yBAAiB,CAAE,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AACjF,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,0BAAgB,WAAW;;AAE/B,YAAI,iBAAiB;AACjB,mBAAS,WAAW;AAGhB,kBAAM,IAAI,IAAI;AACV,kBAAI,MAAM;AACN,+BAAe,QAAQ;AACvB,6BAAa,qBAAyB,OAAO,MAAM,OAAO;AAC1D,uBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEtD,qBAAO;;AAIX,mBAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAGnE,aAAK,MAAM,WAAW,KAAK;;MAE/B;AACI,eAAO,OAAO;AACd,eAAO;;MAEX;AACI,YAAI,KAAK,MAAM,kBAAkB;AAC7B,eAAK,MAAM,aAAa;;AAE5B,aAAK,MAAM;;MAEf;AACI,aAAK,MAAM;;MAMf;AACI,0BAAkB;UACd,OAAO;UACP,MAAM;UACN,IAAI,KAAK,MAAM;;AAEnB,YAAI;AACA,oBAAU,OAAO;;AAErB,aAAK,MAAM,WAAW,KAAK;AAC3B,aAAK,MAAM,cAAc;;MAM7B;AACI,uCAA+B,uBAAsB;AACrD,0CAAkC,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAE5E,qBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACrD,0BAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,cAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACtD,oBAAO;;;AAGf,yBAAiB,KAAK,MAAM,WAAW;AACvC,aAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAEzD,+BAAuB,QAAQ;AAG3B,cAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC5C,iBAAK,MAAM;;;;MAUvB,wCAAwC;AACpC,gBAAY,GAAG,SAAS,GAAG,MAAM;AACjC,YAAI,MAAM,QAAQ,GAAG,UAAU;AAC3B,gBAAM,IAAI,MAAM,0CAA0C,GAAG;;AAEjE,kBAAU,KAAK,UAAU,MAAM,KAAK,aAAa,MAAM,KAAK,WAAW,MAAM,KAAK,KAAK,WAAW;AAClG,gBAAY,aAAa,SAAQ,MAAM;AAEvC,6BAAqB,sBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,YAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAC9D,gBAAM,IAAI,MAAM;;AAIpB,eAAO,KAAK,KAAK,YAAY;AACzB,yCAA+B;AAC/B,iCAAuB,EAAE,MAAO,MAAM,OAAQ,MAAK,EAAE,SAAS;AAE9D,kCAAuB,wBAAwB,cAE/C,QAAK,KAAK,KAAK,KAEf;AACA,yBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AACnD,cAAI,KAAK,MAAM,kBAAkB;AAG7B,iBAAK,MAAM,WAAW,QAAQ;AAC1B,oCAAqB,KAAK;AACtB,wBAAO;;;AAGf,iBAAK,MAAM,aAAa;;AAE5B,iBAAO,CAAE,OAAO,GAAG,OAAA;;;MAG3B;AACI,gBAAY,YAAgB,IAAI,MAAM;AACtC,eAAO;AACH,kBAAY,OAAO,MAAM,OAAK,aAAa,UAAS,MAAM;AAE1D;AACA,2BAAiB;AACjB,iBAAO,QAAQ;AACX,qBAAS,KAAK;;AAElB,iBAAO,KAAK,cAAc;AACtB,kBAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AACvB,oBAAY,IAAI,iBAAiB,SAAQ,MAAM;AAE/C,oBAAY,YAAgB,IAAI,WAAW,MAAM;AAEjD,mBAAO,IAAI;aACZ,UAAU;AACT,4BAAgB,IAAI,SAAS,IAAI;AACjC,2BAAc,MAAM,QAAQ,WAAW,UAAU,CAAC;AAClD,oBAAY,OAAM,WAAW,OAAO,QAAQ,MAAM;AAGlD,oBAAY,OAAM,MAAM,OAAK,aAAa,UAAS,MAAM;AAGzD,4BAAgB;AAChB,mBAAM,QAAQ;AACV,sBAAQ,KAAK,MAAM;;AAEvB,mBAAO;;;;MAInB;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,SAAS;;MAEjC;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,KAAK;;YAEvB;AACF,sBAAc;AACd,2BAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,mBAAW,SAAS,SAAQ;AAC5B,eAAO;;MAQX;AACI,YAAI,KAAK,MAAM,eAAe;AAC1B,iBAAO,UAAU,KAAK,MAAM,YAAY;AACxC,eAAK,MAAM,YAAY,MAAM,KAAK;;AAEtC,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAMtB;AAEI,aAAK;AACL,aAAK,MAAM;AACX,aAAK,IAAI;AACT,aAAK,QAAQ,IAAI;AACjB,kCAA0B,KAAK;AAC3B,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;;;AAGlC,YAAO,eAAe;AACtB,YAAO,iBAAiB;AACxB;AACI,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAErC;AACH,iBAAW;AACX,UAAI,GAAG,aAAa;AAChB,6BAAoB,IAAI,aAAY;AACpC,WAAG,YAAY,IAAI,QAAO;;AAE9B,4BAAqB,GAAG,UAAU;AAGlC,wBAAiB,MAAM,GAAG;AAC1B,aAAO,GAAG;;AAEP,oBAAe;AAOf;AAEH,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI,GAAG;AAC3B,aAAK,CAAC,GAAG;AACT,eAAO;SACR,QAAQ,MAAqB;;ACp7BpC;;;;;;;;;;;;;;;;AAiBA;AACI,aAAO,OAAO,cAAc,eAAe,aAAa;;AAErD;AACH,UAAI;AAEA,kBAAU,UAAU,aAAa,UAAU,UAAU,OAAO;AAE5D,eAAO,2TACF,KAAK,MAEN,0kDACK,KAAK,EAAE,OAAO,GAAG;;AAE9B,aAAO;;AAEJ;AACH,aAAQ,OAAO,WAAW,eAAe,OAAO,YAAY,QAEvD,OAAO,sBAAsB;;;;;;;ACpCtC;;;;;;;;;;;;;;;;AAmBA,iBAAY;AAKZ,SAAI,aAAa,SAAS,MAAM,OAAO;AACnC,UAAI;AACA,gBAAQ,KAAK;;;AAMrB,SAAI,aAAa,cAAc,MAAM;AAErC,SAAI,aAAa,WAAW,MAAO,OAAO,YAAY,eACjD,OAAO,QAAQ,aAAa,eAC5B,OAAO,QAAQ,SAAS,SAAS;AAEtC,SAAI,aAAa,aAAa,MAAM,OAAO,cAAc,eAAe,aAAa,QACjF,UAAU,aAAa,QAAQ,SAAS,KAAK,UAAU,cACvD,aAAa,KAAK,UAAU;AAKhC,SAAI,aAAa,QAAQ,MAAM;AAK/B,SAAI,aAAa,sCAAsC,MAAM,KAAI,QAAQ;AAEzE,SAAI,aAAa,gCAAgC,MAAM;AAEvD,SAAI,aAAa,WAAW,MAAM;ACtDlC;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB;AAChB,UAAI,cAAa;AACb,eAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAE1C,UAAI,CAAC,MAAM,QAAQ;AACf,eAAO;;AAEX,oBAAc;AACd,aAAO,MAAM,QAAQ,cACjB,cAAa,cAAc,UAAU;AACrC,cAAM,KAAK,UAAU;AACrB,oBAAY,UAAU;;AAE1B,UAAI,MAAM,QAAQ,QACd,OAAM,QAAQ;AACd,oCAA2B,KAAK,OAAO;;AAE3C,aAAO;;AAEX;AACI,gBAAU,WAAW;AACrB,UAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,cAAa;AACvC,gBAAO,MAAM,WAAW,GAAG,MAAM,eAAe,QAAQ,KAAK,+DACjB,MAAM;AAClD;;AAEJ,cAAO,MAAM,SAAS,GAAG,MAAM,eAAe,QAAQ,KAAK,oDACjC,IAAI;AAC9B,cAAO,IAAI,WAAW,MAAM,IAAI,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrE,IAAI;AAC7B,uBAAiB,MAAM,MAAM;AAC7B,mBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,oCAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAGpE;AACI,UAAI,iBAAiB;AACjB;;AAEJ,UAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AAC/C,cAAM,IAAI,MAAM,aAAa,uBAAuB,yBAC1C,iCAAiC;;;AAG5C,uEAAkE;AACrE,UAAI,aAAa;AACb,qBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,eAAO;;AAEX,0BAAoB,YAAW;AAG/B,UAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACtD,wBAAgB;;AAEpB,mBAAY,cAAc,eAAe,SAAS;AAClD,UAAK,KAAK,QACL,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACnD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC3C,qBAAa,KAAK,OAAO,SAAS,EAAE,YAAY;AAChD,cAAM,IAAI,MAAM,aAAa,uBAAuB,0DACd;;AAE1C,4BAAsB,YAAW,GAAG;AACpC,UAAI,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ;AACnC,YAAI,CAAC;;AAET,6BAAuB;AACvB,qBAAe,kBAAkB,WAC7B,cAAa,GAAG,iBAChB,SAAQ,GAAG,IAAI;AACnB,aAAO,QAAO,WAAW,QAAQ,eAAe;;AAE7C,6EAAyE;AAC5E,UAAI,CAAC,MAAM,QAAQ;AACf,cAAM,IAAI,MAAM,YAAY,qBAAqB;;AAGrD,sBAAgB;AAChB,aAAO,QAAQ,IAAI,UAAU,iBAAgB,GAAG,GAAG,WAAW,MAAM,eAAe;;ACtGvF;;;;;;;;;;;;;;;;AAkBY,6BAAmB;AAMxB;AACH,mBAAa,OAAO,KAAK;AACzB,UAAI,KAAK,WAAW;AAChB,cAAM,IAAI,MAAM,yGAET,KAAK;;AAEhB,mBAAa,KAAK;AAClB,iBAAW,EAAE;AAEb,UAAI,OAAO,SAAS;AAChB,iBAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAGjD,eAAS,SAAS;AAElB,iBAAW;AACP,gBAAO,WAAW;AAClB;AACI,yBAAe,GAAG,GAAG;AACrB,cAAI,WAAU;AACV,oBAAQ,MAAM;;AAElB,kBAAO,SAAS;AAChB,iBAAO;;AAGP,kBAAO,SAAS;AAChB,gBAAM;;;AAGd,aAAO,eAAe,IAAI,QAAQ,CAAE,OAAO,QAAQ,cAAc;AAEjE,aAAO;;ACzDX;;;;;;;;;;;;;;;;AAyCA;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,yBAAuB,MAAM,OAAO,MAAM,OAAO,yBAAyB,MAAM,aAAa,MAAM;AAEnG,sBAAgB;AACZ,eAAO,SAAQ,QAAQ,OAAO;;AAElC,qBAAe,CAAE,MAAM,OAAO,MAAM;AACpC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAW,IAAG,CAAE,UAAA;ACpD5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,SAAS;AACT,gBAAQ,YAAW;;AAEvB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AAClB,cAAM,IAAI,MAAM;;AAGpB,UAAI,SAAS;AACT,4CAAmC;AACnC,6BAAqB,eAAc;AACnC,6BAAqB,eAAc;AACnC,gBAAO,iBAAiB,cAAc,MAAM,iCAAiC,kCACtE,+BAA+B;AACtC,qBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AACxC,2BAAiB,cAAc;AAC/B,oCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,eAAc,MAAM,MAAM,MACvC;AACJ,kBAAO,cAAc,OAAO,MAAM,MAAM,CAAC,mBAAmB,MAAM,gDAC1D,qDACM;;;AAGtB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ;AACxC,iBAAS,CAAC;;AAEd,cAAQ,SAAS;AACjB,eAAS,UAAU,WACf,cAAa,QAAQ,SACrB,SAAQ,QAAQ,IAAI;AACxB,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxD5C;;;;;;;;;;;;;;;;AA8CO;AACH,4BAAsB,YAAW,QAAQ;AACzC,aAAO,YAAW,QAAQ,OAAO,eAAe;;AChDpD;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,SAAW;MACX,SAAW;MACX,OAAS;MACT,QAAU;MACV,OAAS;MACT,MAAQ;MACR,WAAa;;AC3BjB;;;;;;;;;;;;;;;;AAqBA,oCAAgC;AAkBzB;AAEH,oBAAc;AACd,2BAAqB;AACrB,oBAAc,MAAM,QAAQ,WACxB,QAAQ,IAAI,aAAU,QAAO,QAC7B,OAAO,KAAK;AAChB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,qBAAa,MAAM;AACnB,kBAAU,MAAM,QAAQ,WAAW,QAAQ,GAAG,SAAS,QAAQ;AAC/D,YAAI,EAAE,UAAU,aAAa,EAAE,UAAU,WAAW,EAAE,UAAU,UAC5D,EAAE,UAAU,YAAY,EAAE,UAAU;AACpC,gBAAM,IAAI,MAAM,gCAAgC,UAAU,EAAE;;AAEhE,qBAAa,CAAE,MAAM,OAAO,EAAE,OAAO,OAAO,EAAE;AAC9C,YAAI,EAAE,UAAU;AACZ,4BAAkB,IAAI,QAAQ;AAC1B,yBAAa,MAAM,EAAE;AACrB,kCAAsB,KAAK,OAAO,WAAU,KAAI,EAAE,QAAQ,KACtD,0BAA0B,KAAK;AACnC,0BAAc,IAAI,WAAW;AAC7B,yBAAa;AACb,0BAAa,GAAG,KAAI,KAAK,QAAQ;AAC7B,0BAAY,KAAK;AACjB,oCAAsB,IAAI,WAAW,IAAI,YAAY,CAAC,IAAI,SAAS;AACnE,oBAAM,IAAI,eAAe;AACzB,wBAAU;AACV,oBAAM,IAAI,KAAK;AACf,wBAAU,IAAI;;AAElB,oBAAQ;;AAEZ,uBAAa,KAAK;;AAGlB,uBAAa,KAAK,EAAE;;AAExB,YAAI,SAAS;AACT,eAAK,QAAQ;;AAEjB,cAAM,KAAK;;AAEf,2BAAqB,MAAM,QAAQ,IAAI;AACvC,aAAO,CAAE,MAAM,uBAAuB,eAAe;;AAiBlD;AAEH,kBAAY;AACZ;AACA,mBAAa;AACb,yBAAmB;AACf,qBAAa,KAAK;AAClB,sBAAc,KAAK;AACnB,sBAAc,KAAK;AACnB,qBAAa,eAAc;AAC3B;AACA,YAAI,kBAAkB;AAClB,+BAAqB,KAAK;AAC1B,cAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,gBAAI,CAAE,UAAS,gBAAgB,WAAW;AACtC,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa;;qBAIrE,aAAa,UAAU;AAC5B,gBAAI,UAAU;AACV,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa,yDACf;;;AAI3D,kBAAM,IAAI,MAAM,UAAU,KAAK,uCACL,aAAa;;AAI3C,yCAA+B,qBAAqB,aAAa;AACjE,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,iCAAwB,aAAa,UAAU,UAC3C,IAAI,WAAW,cACf,IAAI,YAAY;AACpB,cAAI,UAAU;AACV,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,uBAAS,IAAI,aAAa,eAAe;AACzC,2BAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,0BAAU,eAAe;AACzB,uBAAO,KAAK,IAAI,aAAa,QAAQ,aAAa;;uBAGjD,aAAa,UAAU;AAC5B,kBAAI,kBAAkB;AAClB,gCAAgB;;AAEpB,uBAAS,cAAc;;AAGvB,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;qBAI7D,UAAU;AACf,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;AAGlE,qBAAS,IAAI,WAAW,eAAe;AACvC,yBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAU,eAAe;AACzB,qBAAO,KAAK,KAAK,MAAM,IAAI,aAAa,QAAQ,aAAa;;;AAIjE,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;mBAEZ,UAAU;AACf,wBAAa,eAAc,KAAK;AAChC,mBAAS;AACT,uBAAa,GAAG,IAAI,OAAM;AACtB,+BAAmB,IAAI,YAAY,QAAO,MAAM,QAAQ,SAAS,0BAA0B;AAC3F,sBAAU;AACV,0BAAc,IAAI,WAAW,QAAO,MAAM,QAAQ,SAAS;AAC3D,mBAAO,KAAK;AACZ,sBAAU;;;AAId,8BAAoB,qBAAqB;AACzC,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,cAAI,UAAU;AACV,qBAAS,IAAI,aAAa;qBAErB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,aAAa;AAC1B,0BAAa,IAAI,aAAa,OAAO,SAAS;AAC9C,2BAAc,IAAI,aAAa,OAAO,SAAS;AAC/C,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAK,KAAK,OAAO,IAAI;AACrB,qBAAM,KAAK,OAAO,IAAI,IAAI;;AAE9B,+BAAmB,QAAO,OAAM,OAAO;AACvC,gCAAoB,QAAO,QAAO,OAAO;AACzC,gBAAI,QAAQ,SAAQ,YAAY;AAChC,uBAAW;AACX,wBAAY;;AAGZ,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;;AAErB,YAAI,UAAU;AACV,cAAI,QAAQ,QAAO,QAAQ,OAAO;;;AAG1C,aAAO;;AAKJ;AAEH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM,wBAAwB,KAAK,UAAU;;AAE3D,4BAAsB;AAQtB,2BAAqB;AACrB,SAAG,QAAQ;AACP,2BAAmB,EAAE;AAErB,qBAAa,KAAK,EAAE,eAAe,EAAE,OAAO,aAAa,IACrD,IAAI,EAAE,YAAY;AACtB,YAAI,CAAE,cAAa,gBAAgB,aAAa,cAC5C,aAAa;AACb,gBAAM,IAAI,MAAM,mCAAmC,EAAE,YAAY;;;AAIzE,gBAAU,IAAI,WAAW;AACzB,mBAAa;AACb,mBAAa,QAAQ;AACjB,UAAE,IAAI,IAAI,WAAW,EAAE,SAAS;AAChC,kBAAU,EAAE;;AAEhB,aAAO,EAAE;;AAGb,0BAAsB,OAAO,WAAW,eACnC,QAAO,SAAS,eAAe,OAAO,SAAS,eAC5C,OAAO,SAAS;AAUjB;AACH,UAAI;AACA,eAAO,OAAO,WAAW;;AAE7B,aAAO,IAAI,KAAK,CAAC,OAAM;;AAQpB;AACH,UAAI;AACA,eAAO,OAAO,KAAK,SAAQ,SAAS;;AAExC,kBAAY,IAAI,WAAW;AAC3B,cAAQ;AACR,mBAAa,OAAO,IAAI,QAAQ,IAAI,GAAG;AACnC,aAAK,OAAO,aAAa,IAAI;;AAEjC,aAAO,KAAK;;AAQT;AACH,UAAI;AACA,oBAAY,OAAO,KAAK,MAAK;AAC7B,eAAO,IAAI,OAAO,MAAM,IAAI,YAAY,IAAI,aAAa,IAAI;;AAEjE,gBAAU,KAAK;AACf,sBAAe,IAAI,WAAW,EAAE;AAChC,mBAAa,GAAG,IAAI,EAAE,QAAQ,EAAE;AAC5B,gBAAO,IAAI,CAAC,EAAE,WAAW,KAAK;;AAElC,aAAO,QAAO;;AAQX;AACH,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,4BAAsB;AACtB,cAAQ,QAAQ;AACZ,2BAAmB,QAAO;;AAE9B,mBAAa,IAAI,WAAW;AAC5B,mBAAa;AACb,cAAQ,QAAQ;AACZ,aAAK,IAAI,IAAI,WAAW,UAAS;AACjC,kBAAU,QAAO;;AAErB,aAAO,KAAK;;AAST;AACH,wBAAkB;AAClB,aAAO,KAAK;AACZ,aAAO,KAAK,SAAS;AACjB,eAAO,KAAK,MAAM,GAAG,KAAK,SAAS;;AAEvC,oBAAc,KAAK,MAAM;AACzB,aAAO,MAAM,MAAM,SAAS;;AAOzB;AACH,UAAI,eAAe,yBAAyB;AACxC,cAAM,IAAI,MAAM;;AAEpB,aAAO;QACH,WAAW,IAAI;QACf,mBAAmB;QACnB,oBAAoB,eAAe,iBAAiB,OAChD,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,kBAAkB,eAAe,eAAe,OAC5C,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,iBAAiB,eAAe,cAAc,OAC1C,IACA,eAAe,WAAW;;;AAStC;AACI,8BAAwB;AACpB,gBAAQ,KAAK;AACb,gBAAQ;AACR,eAAQ,KAAI,aAAgB;AACxB,eAAK;AACL,gBAAM;;AAEV,aAAK,CAAC;AACN,aAAK;AACL,eAAO,IAAI;;AAEf,2BAAqB,IAAI,YAAY;AACrC,mBAAa,KAAK;AAClB,mBAAa,GAAG,IAAI,MAAM;AACtB,qBAAa,KAAK,gBAAgB;;AAEtC,mBAAa,MAAM,IAAI,MAAM;AACzB,qBAAa,KAAK,YAAe,KAAI,QAAS;;AAElD,aAAO;;AAQX;AACI,4BAAsB,IAAI,YAAY;AACtC,oBAAc,KAAK;AACnB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,mBAAa,GAAG,IAAI,IAAI;AACpB,sBAAc,KAAK,KAAK;;AAE5B,mBAAa,IAAI,IAAI,IAAI;AACrB,sBAAc,KAAK,aAAe,KAAI,MAAO;;AAEjD,aAAO;;AAQX;AACI,0BAAoB,IAAI,YAAY;AACpC,mBAAa,GAAG,IAAI,IAAI;AACpB,oBAAY,KAAK;;AAErB,kBAAY,KAAK,YAAY,MAAM;AACnC,aAAO;;AASJ;AAIH,2BAAqB;AACrB,4BAAsB;AACtB,0BAAoB;AACpB,aAAO;AACH,wBAAe,IAAI,YAAY,IAAI,eAAe;AAClD,iCAAyB,IAAI,YAAY;AACzC,0BAAiB,GAAG,SAAQ,eAAe,QAAQ;AAC/C,8BAAoB,eAAe;AACnC,8BAAoB,aAAa,YAAY,eAAe,MAAO,eAAc,SAC7E,cAAc,eAAe;AACjC,2BAAiB,UAAS;;AAE9B,eAAO,IAAI,aAAa;;;ACtchC;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;;aAEhB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAQrB;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAQ7C;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAU7C;AACH,eAAO,iBAAiB,YAAY,KAAK;;aAUtC;AACH,eAAO,iBAAiB,YAAY,KAAK,QAAQ;;aAE9C;AACH,8BAAsB;AACtB,wBAAgB,gBAAgB,SAC5B,iBAAiB,cAAc,cAC/B,iBAAiB,cAAc;AACnC,gBAAQ,QAAQ;AACZ,0BAAgB,OAAO,KAAK;AAC5B,cAAI,YAAY;AACZ,0BAAc,KAAK;;;AAG3B,eAAO;;;AAGR,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,4BAAwB,SAAS,iBAAiB,gBAAgB;AAClE,4BAAwB,sBAAsB,iBAAiB,gBAAgB,KAAK;ACpF3F;;;;;;;;;;;;;;;;AAoBA,0BAAsB;AACtB,6BAAyB;AAIzB,6BAAyB;AAIzB,4BAAwB;AAIjB;AACH,yBAAmB;AACnB,aAAO,IAAI,QAAQ;AACf,8BAAsB,WAAW,eAAe;AAChD,sBAAc,YAAY,MAAM;AAChC,sBAAc,UAAU,WAAS,OAAO;;;AAGhD;AACI,UAAI,CAAC,OAAM,QAAQ;AAIf,cAAM,IAAI,MAAM;;AAIpB,wBAAkB,OAAO,WAAW,cAAc,OAAO;AACzD,sBAAgB,UAAU,aAAa,UAAU,gBAC7C,UAAU,mBAAmB,UAAU,eACvC,UAAU;AACd,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,iBAAW,YAAY;AACvB,SAAG,kBAAkB,kBAAkB,CAAE,SAAS;AAClD,SAAG,kBAAkB,iBAAiB,CAAE,SAAS;;;MAQjD;AACI,aAAK,YAAY;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;;YAEf;AAEF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,eAAO,KAAK,eAAe,KAAK,WAAW;;YAEzC;AACF,eAAO,KAAK,eAAe,KAAK;;MAgBpC;AACI,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,gBAAI,kBAAkB;AAElB,8BAAgB,GAAG,YAAY,kBAAkB;AACjD,iCAAmB,QAAQ,YAAY;AACvC,iCAAmB,WAAW,IAAI,KAAK;AACvC,yBAAW,YAAY;AACnB,oBAAI,WAAW,UAAU;AACrB,qBAAG;AACH,yBAAO,OAAO,IAAI,MAAM,gCAAgC,KAAK;;AAI7D,0BAAQ,WAAW,OAAO;;;AAGlC,yBAAW,UAAU;AACjB,mBAAG;AACH,uBAAO,OAAO,WAAW;;AAE7B,sBAAQ,aAAa,MAAM,GAAG;;AAI9B,yCAA2B,6BAA6B;AAExD,6BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAgB,OAAO,YAAY;AACnC,qCAAuB,UAAU,IAAI,CAAE,WAAW,KAAK,WAAW;AAClE;AACA,6BAAe,YAAY;AAEvB,0BAAU,GAAG,YAAY,kBAAkB;AAC3C,mCAAmB,QAAQ,YAAY;AACvC,wCAAwB,WAAW,IAAI;kBACnC,WAAW,KAAK;kBAChB;kBACA;;AAEJ,gCAAgB,YAAY,MAAM,QAAQ,CAAE;AAC5C,gCAAgB,UAAU;AAGtB,8BAAY,OAAO,YAAY;AAC/B,4CAA0B,UAAU,OAAO,KAAK;AAChD,oCAAkB,YAAY;AAC1B,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;AAElC,oCAAkB,UAAU;AACxB,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;;;AAI1C,6BAAe,UAAU;AACrB,mBAAG;AACH,uBAAO,OAAO,eAAe;;AAEjC,qBAAO,aAAa;AAChB,oBAAI,WAAW;AACX,qBAAG;;AAGH,0BAAQ,aAAa,MAAM,GAAG;;;;;AAK9C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;AAI9D,qBAAiB,aAAa;AACvB,4BAAwB;AAC3B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAkB7B;AACH,aAAO,IAAI,iBAAiB;;AAEhC;AACI,aAAO,IAAI,WAAW,iBAAiB,cACnC,IAAI,MAAM,iBAAiB,WAAW,UACtC;;;MAGJ;AACI,aAAK,YAAY;;YAEf;AACF,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,uBAAW,GAAG,YAAY,iBAAiB;AAC3C,0BAAc,GAAG,YAAY;AAS7B,sCAA0B,MAAM;AAChC,8BAAkB,YAAY;AAC1B,0BAAY;AACZ,iCAAmB,kBAAkB;AACjC,oBAAI,KAAK,aAAa,KAAK;;AAE/B,sBAAQ;;AAEZ,8BAAkB,UAAU;AACxB,iBAAG;AACH,qBAAO,OAAO,kBAAkB;;AAEpC,eAAG,aAAa,MAAM,GAAG;;AAE7B,sBAAY,UAAU,WAAS,OAAO,YAAY;;;YAGpD;AACF,eAAO,iBAAiB;AACxB,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,2BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAkB,OAAO,YAAY;AACrC,mCAAuB,UAAU,IAAI;AACrC;AACA,2BAAe,YAAY;AACvB,kBAAI,eAAe,UAAU;AACzB,mBAAG;AACH,uBAAO,OAAO,IAAI,MAAM,gCAAgC;;AAKxD,0CAA0B,UAAU,OAAO;AAC3C,wCAAwB;AAEpB,4BAAU,GAAG,YAAY,kBAAkB;AAC3C,qCAAmB,QAAQ,YAAY;AACvC,6CAA2B,WAAW,OAAO;AAC7C,qCAAmB,YAAY,MAAM,QAAQ,eAAe,OAAO;AACnE,qCAAmB,UAAU,WAAS,OAAO,eAAe;;AAIhE,kCAAkB,YAAY;AAC9B,kCAAkB,UAAU;AACxB;AACA,qBAAG;AACH,yBAAO,OAAO,eAAe;;;;AAIzC,2BAAe,UAAU;AACrB,iBAAG;AACH,qBAAO,OAAO,eAAe;;AAEjC,mBAAO,aAAa;AAChB,kBAAI,WAAW;AACX,mBAAG;;AAGH,wBAAQ,aAAa,MAAM,GAAG;;;;AAI1C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;ACrT9D;;;;;;;;;;;;;;;;AAqBA,2BAAuB;AACvB,wBAAoB;AACpB,wBAAoB;AACpB,kCAA8B;AAC9B,gCAA4B;AAC5B,+BAA2B;AAC3B,kCAA8B;AAMvB;AACH,UAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAC/B,cAAM,IAAI,MAAM;;AAGpB,iBAAW,OAAO;AAClB,+BAAyB;AACzB,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,oBAAY,GAAG,IAAI;AACnB,uBAAe,cAAc;AAC7B,YAAI,IAAI,WAAW,WAAW,IAAI,SAAS,OAAO;AAC9C,aAAG,WAAW;AACd,4BAAkB,oBAAoB;AACtC,cAAI,iBAAiB,QAAQ,eAAe;AACxC,6BAAiB,KAAK;;;;AAIlC,aAAO;;AAEX;AACI,aAAO;QACH,MAAM,CAAC,aAAa,MAAM,aAAa,KAAK;QAC5C,UAAU,CAAC,aAAa,MAAM,uBAAuB,KAAK;QAC1D,aAAa,CAAC,aAAa,MAAM,qBAAqB,KAAK;QAC3D,YAAY,CAAC,aAAa,MAAM,oBAAoB,KAAK;QACzD,eAAe,CAAC,aAAa,MAAM,uBAAuB,KAAK;;;AAUvE;AACI,oBAAc,IAAI,MAAM;AACxB,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,uBAAuB;;AAE3C,aAAO,MAAM,MAAM,GAAG,MAAM,SAAS,GAAG,KAAK;;AAEjD;AACI,aAAO,IAAI,WAAW,oBAAoB,cACtC,IAAI,MAAM,oBAAoB,WAAW,UACzC;;;MAQJ;AACI,YAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAK/B,gBAAM,IAAI,MAAM;;AAEpB,aAAK,KAAK,OAAO;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;AACjB,aAAK,OAAO,aAAa,KAAK;;YAW5B;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,2BAAiB,KAAK,UAAU,eAAe;AAC/C,8BAAoB,KAAK,UAAU,eAAe;AAClD,qCAA2B,6BAA6B;AACxD;AACI,iBAAK,GAAG,QAAQ,KAAK,KAAK,MAAM,KAAK,UAAU;AAC/C,iBAAK,GAAG,QAAQ,KAAK,KAAK,UAAU;AACpC,iBAAK,GAAG,QAAQ,KAAK,KAAK,aAAa;AACvC,iBAAK,GAAG,QAAQ,KAAK,KAAK,YAAY,0BAA0B,eAAe;AAC/E,iBAAK,GAAG,QAAQ,KAAK,KAAK,eAAe,KAAK,UAAU;cACpD,QAAQ,eAAe;cACvB,aAAa,eAAe;cAC5B,aAAa,eAAe;cAC5B,qBAAqB,eAAe;;AAExC,mBAAO,CAAE;;AAIT,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,kBAAM,IAAI,MAAM,yBAAyB,KAAK,kHAEpB,mBAAmB,wCACrB,mBAAmB,qCACpB,mBAAmB;;;;YAYhD;AACF,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AAClD,YAAI,QAAQ;AACR,gBAAM,IAAI,MAAM,kDAAkD,KAAK;;AAE3E,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM;;AAGpB,oBAAY;AAEZ,yBAAiB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACtD,YAAI,YAAY;AACZ,gBAAM,IAAI,MAAM,4CAA4C,KAAK;;AAGrE,YAAI,gBAAgB;AAEpB,4BAAoB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACzD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,gDAAgD,KAAK;;AAGzE,YAAI,cAAc;AAElB,+BAAuB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACjD,YAAI,kBAAkB;AAClB,2BAAiB,KAAK,MAAM;AAC5B,cAAI,SAAS,SAAS;AACtB,cAAI,cAAc,SAAS;AAC3B,cAAI,cAAc,SAAS;AAC3B,cAAI,sBAAsB,SAAS;;AAGvC,iCAAyB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACnD,YAAI,oBAAoB;AACpB,gBAAM,IAAI,MAAM,wDACR,KAAK;;AAEjB,YAAI,aAAa,0BAA0B;AAC3C,eAAO;;;AAGf,wBAAoB,aAAa;AAC1B,+BAA2B;AAC9B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,oBAAoB;AAC1D,iBAAO,oBAAoB,IAAI,MAAM,oBAAoB,WAAW;;AAGpE,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAyB7B;AACH,aAAO,IAAI,oBAAoB;;;MAG/B;AACI,gBAAO,OAAM,QAAQ,eAAe,MAAM;AAC1C,gBAAO,OAAO,WAAW,eACrB,OAAO,OAAO,iBAAiB,aAAa,MAAM;AACtD,aAAK,KAAK,OAAO;;YAEf;AACF,oBAAY;AACZ,uBAAe,cAAc;AAC7B,uBAAe,iBAAiB;AAChC,qBAAa,GAAG,IAAI,KAAK,GAAG,QAAQ,EAAE;AAClC,sBAAY,KAAK,GAAG,IAAI;AACxB,cAAI,IAAI,WAAW,WAAW,IAAI,SAAS;AACvC,8BAAkB,oBAAoB;AACtC,gBAAI,aAAa,KAAK,MAAM,KAAK,GAAG,QAAQ;;;AAGpD,eAAO;;YAEL;AACF,eAAO,mBAAiB;AACxB,qBAAa,aAAa;AAC1B,YAAI,KAAK,GAAG,QAAQ,KAAK,SAAS;AAC9B,gBAAM,IAAI,MAAM,8BAA8B;;AAElD,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK;AAC7C,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,eAAO;;;ACnRf;;;;;;;;;;;;;;;;AA4BA,8BAA0B;;MAEtB;AACI,aAAK,WAAW;;aAEb;AACH,YAAI,0BAA0B,YAAY;AACtC,oCAA0B,WAAW,IAAI;;AAE7C,eAAO,0BAA0B;;aAQ9B;AACH,gBAAO,UAAU,MAAM,MAAM;AAC7B,YAAI,OAAO,SAAS;AAChB,mBAAS,OAAO,MAAM,GAAG,OAAO,QAAQ;;AAE5C,gBAAO,OAAO,SAAS,GAAG,MAAM;AAChC,yBAAiB,0BAA0B;AAC3C,gBAAO,SAAS,SAAS,WAAW,MAAM,MAAM,2DAA2D;AAC3G,iBAAS,SAAS,UAAU;;aAEzB;AACH,wBAAgB,KAAK,cAAc,SAAS;AAC5C,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM,yCAAyC;;AAE7D,eAAO;;aAEJ;AACH,eAAO,OAAO,KAAK,KAAK,cAAc;;;AAW9C;AACI,UAAI,IAAI,QAAQ,uBAAuB;AACnC,cAAM,IAAI,MAAM,6EAET,0BAA0B,aAAa,KAAK;;AAEvD,aAAO;QACH,QAAQ,IAAI,MAAM,mBAAmB;QACrC,MAAM,IAAI,MAAM,mBAAmB;;;AAG3C,yEAAqE;AACjE,cAAO,cAAc,SAAS,MAAM,wCAAwC;AAC5E,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,kEAAkE;AACxG,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,wCACxD;AACpC,0BAAoB,aAAa;AACjC,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,uEAC3B;AACX,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,6CACnD;AACzC,0BAAoB,aAAa;AACjC,2BAAqB,SAAS,WAAW;AACzC,yBAAmB,SAAS,WAAW;AACvC,yBAAmB,iBAAiB,SAAS,WAAW;AACxD,6BAAuB,MAAM,YAAY;AAIzC,UAAI,gBAAgB;AAChB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,yBAAmB,MAAM,YAAY,KAAK;AAI1C,UAAI,gBAAgB,CAAC;AACjB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,aAAO,WAAW;;AAqCtB;AACI,sBAAgB,0BAA0B;AAC1C,kBAAY;AACZ,2BAAqB;AACjB,0BAAkB,MAAM,0BAA0B,WAAW,QAAQ;AACrE,2BAAmB;AACf,sBAAY,SAAS,oBAAoB;AACzC,cAAI,OAAO,UAAU;;;AAG7B,aAAO;;AAmCX;AACI,4BAAsB,SAAS;AAC/B,sBAAgB,0BAA0B,WAAW,cAAc;AACnE,aAAO,QAAQ,YAAY,cAAc;;AAiD7C;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AAgDlD;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AC/SlD;;;;;;;;;;;;;;;;;MAsBI;AACI,eAAO,MAAM,MAAM;;MAEvB;AACI,eAAO,YAAY;;MAEvB;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,kDAAkD;;AAEtE,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc,IAAI;;AAE3B,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,eAAO,IAAI,YAAY,UAAU,OAAO;;;AAGhD,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,WAAW,IAAI;AAEjC;AACI,kCAA0B,gBAAgB,oBAAoB,YAAY,IAAI;;;AAKlF;AACI,kCAA0B,gBAAgB,iBAAiB,YAAY,IAAI;;;;ACnDnF;;;;;;;;;;;;;;;;AAkBO,yBAAqB;MAExB,aAAa,MAAM;;AAEvB;AAGO;AACH,oBAAc;;AAEX;AACH,oBAAc;;AAEX;AACH,aAAO;;;MAGP;AAEI,aAAK,OAAO;AAGZ,aAAK,cAAc,IAAI,KAAK,KAAK;;MAErC;AACI,YAAI,OAAM,OAAO,SAAS;AACtB,iBAAO,OAAM,OAAO,MAAM,MAAM;;AAEpC,YAAI,eAAe;AACf,wBAAc,aAAa;;AAE/B,eAAO,YAAY,MAAM;;MAE7B;AACI,sBAAa,QAAQ;AACrB,eAAO,MAAK,KAAK,MAAO,MAAK,KAAK;;MAEtC;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,YAAI,MAAM,WAAW;AACjB,iBAAO;;AAEX,eAAO,IAAI,KAAK,KAAK,YAAY,UAAU,OAAO;;;AAG1D,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,QAAQ,IAAI;;ACrElC;;;;;;;;;;;;;;;;AA4CO,oCAA+B;AAClC,cAAQ,SAAS;AACjB,0CAAwC;AACxC,aAAO,IAAI,cAAa,OAAO,OAAO;;AC/C1C;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAEnC,UAAI,CAAC,cAAkB;AACnB,cAAM,IAAI,MAAM,mCAAmC;;AAEvD,UAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE,OAAA;AC/CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,SAAS;AAC5C,sBAAgB,MAAM,QAAO,qBAAqB,GAAG,QAAQ,GAAG,OAAO,GAAG;AAC1E,qBAAe,CAAE,GAAG;AAGpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA6BO,iCAA4B;AAC/B,cAAQ,IAAI,EAAE,SAAS;;AC9B3B;;;;;;;;;;;;;;;;AAmBA;AAYA,wBAAkB;MACd,QAAA;MACA,MAAA;MACA;MACA,OAAA;;AAEJ,iBAAa;ACrCb;;;;;;;;;;;;;;;;AAwBA,qCAAiC;AACjC,wCAAoC;AACpC,+CAA2C;AAC3C;AACI,aAAO,IAAI,QAAQ,aAAW,WAAW,UAAU,KAAK;;;MAGxD;AACI,YAAI,CAAC,OAAM,QAAQ;AAGf,gBAAM,IAAI,MAAM;;AAGpB,YAAI,eAAe,WAAW,iBAAiB;AAC3C,2BAAiB,eAAe,MAAM,iBAAiB,WAAW;;AAEtE,YAAI,kBAAkB,QAAQ,eAAe,WAAW;AACpD,2BAAiB;;AAErB,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,qBACD,iBAAiB;;YAEnB;AACF,YAAI,OAAQ,aAAc;AACtB,gBAAM,IAAI,MAAM;;AAGpB,2BAAmB,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM;AAC5F,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,kCAAwB,CAAC;YACjB,OAAO,CAAC,OAAO,KAAK;YACpB,SAAS,eAAe;;AAEhC,iDAAuC;YACnC,eAAe,eAAe;YAC9B,QAAQ,eAAe;YACvB,aAAa,eAAe;YAC5B,aAAa,eAAe;YAC5B;;AAEJ,oDAA0C,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM;AAGxI,6BAAmB,KAAK,cAAc,OAAO,SAAS,cAAc,OAChE,KAAK;AACT,qBAAW,WAAW,KAAK;AAC3B,qBAAW,OAAO;AAIlB,gBAAM,MAAM,MAAM,WAAW,cAAc,IAAI,WAAW;AAC1D,cAAI,eAAe,cAAc;AAC7B,qCAAyB,KAAK,oBAAoB,OAC9C,SAAS,cAAc,OACvB,KAAK;AACT,6BAAiB,WAAW,KAAK;AACjC,6BAAiB,OAAO;AACxB,kBAAM,MAAM,MAAM,iBAAiB,cAAc,IAAI,WAAW;;AAEpE,iBAAO,CAAE,oBAAoB,6BAA6B;;;;AAItE,qBAAiB,aAAa;;MAE1B;AACI,YAAI,SAAS,QAAQ,MAAM,SAAS;AAChC,gBAAM,IAAI,MAAM,wEACI;;AAExB,aAAK,QAAQ;;YAEX;AACF,yBAAiB,KAAK,MAAM;AAC5B,4BAAoB,KAAK,MAAM,MAAM;AACrC,eAAO,IAAI,QAAQ;AACf,6BAAmB,IAAI;AACvB,qBAAW,SAAS;AAEhB,8BAAkB,KAAK,MAAM,MAAM,OAAO;AAC1C,kCAAsB,UAAU;AAChC,gBAAI,iBAAiB;AACjB,qBAAO,IAAI,MAAM,4CAA4C,SAAS;AACtE;;AAEJ,gBAAI,YAAY,WAAW;AACvB,sBAAQ,CAAE;;AAEd,oCAAwB,UAAU;AAClC,gBAAI,mBAAmB;AACnB,qBAAO,IAAI,MAAM,6CAA6C,SAAS;AACvE;;AAEJ;AACA;AACI,2BACI,KAAK,4BAA4B,iBAAiB;;AAGtD,qBAAO;AACP;;AAEJ,gCAAoB;AACpB,0BAAc;AACd,mCAAuB;AACvB,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,sBAAM,KAAK;AACX,+BAAe,KAAK;;AAExB,0BAAY,KAAK,GAAG,aAAa;;AAErC,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,yCAAyB,IAAI;AAC7B,iCAAiB,SAAS;AAEtB,qCAAmB,OAAM,OAAO;AAChC,iCAAc,MAAM,QAAQ;AAC5B,iCAAe,UAAS;AACxB,sBAAI,eAAe,QAAQ,UAAU;AACjC,4BAAQ;sBACJ;sBACA;sBACA,YAAY,wBAAwB;sBACpC,QAAQ,UAAU;sBAClB,aAAa,UAAU;sBACvB,aAAa,UAAU;sBACvB,qBAAqB,UAAU;;;;AAI3C,iCAAiB,UAAU,WAAS,OAAO,6CAA6C;AACxF,iCAAiB,kBAAkB,WAAW;;;;AAI1D,qBAAW,UAAU,WAAS,OAAO,sEACnB,SAAS;AAE3B,qBAAW,WAAW;;;MAM9B;AACI,0BAAkB;AAClB,0BAAkB,MAAM,IAAI,UAAQ,SAAS,KAAK;AAClD,2BAAmB;AACnB,4BAAoB;AAChB,gBAAM,MAAM,QAAQ;AAChB,iCAAqB,SAAS;AAC9B,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,uDACR;;AAEZ,sBAAU,KAAK;AACf,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,8BAA8B;;AAG9C,yBAAW,QAAQ,MAAM,UAAU,QAAQ;;;;AAIvD,YAAI,UAAU,WAAW,MAAM;AAC3B,gBAAM,IAAI,MAAM,wDACR,UAAU,oDACV,MAAM;;AAElB,eAAO;;;AAGR,mCAA+B;AAClC,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AAwC7B,+CAA2C;AAC9C,aAAO,IAAI,iBAAiB;;AA0CzB;AACH,aAAO,IAAI,aAAa;;AC7S5B;;;;;;;;;;;;;;;;AAyBO;AACH,oBAAc;AACd,sBAAgB,iBAAiB,OAAO,IAAI;AAC5C,oBAAc,eAAe,OAAO,IAAI;AACxC,oBAAc,eAAe;AAC7B,4BAAsB;AACtB,8BAAwB;AACpB,gBAAQ,KAAK;AACT,2BAAiB,gBACb,EAAE,kBAAkB,SAAS,SAAU,eAAc;AAEzD,qBAAW;AACX,iBAAO;;AAEX,eAAO;;AAEX;AACI,gBAAO,aAAY,QAAQ,MAAM,QAAQ,cAAa,UAAS,SAAS,GAAG,MAAM;;AAErF;AACI,gBAAO,kBAAiB,KAAK,kBAAiB,GAAG,MAAM,oEAC9B;AACzB,gBAAO,gBAAe,KAAK,gBAAe,GAAG,MAAM,kEAC5B;AACvB,gBAAO,gBAAe,gBAAe,MAAM,yEAClB,kCAClB;;AAEX,aAAO,QAAQ,IAAI,SAAS,IAAI;;ACrDpC;;;;;;;;;;;;;;;;AAgCO;AACH,UAAI,eAAe;AACf,sBAAc;;AAElB,wBAAkB,YAAY,aAAa,OAAO,OAAM,SAAS,QAC7D,YAAY;AAEhB,uBAAiB,UAAU,IAAI,cAAY,UAAU,UAAU,YAAY,aAAa,CAAE,UAAU;AACpG,iCAA2B;AAC3B,+BAAyB;AACzB,wBAAkB,YAAY,cAAc,OACxC,MAAM,QAAQ,IAAI,YAClB,MAAM,wBAAwB,UAAU,YAAY,YAAY,oBAAoB;AACxF,6BAAuB,UAAU,IAAI,cAAY,SAAS;AAC1D,kCAA4B;AAC5B,gCAA0B;AAC1B,sBAAgB,YAAY,cAAc,OACtC,MAAM,QAAQ,IAAI,kBAClB,MAAM,wBAAwB,gBAAgB,YAAY,YAAY,qBAAqB;AAC/F,aAAO;;AAWJ,0DAAsD;AAMzD,2BAAqB,eAAe,yBAAyB,WAAW,CAAE;AAC1E,2BAAoB,qBAAqB;AACzC,aAAO,aAAY,UAAU,gBAAgB;;AA0B1C;AACH,aAAO,kCAAkC;AAGrC,uCAA+B,SAAS,IAAI,MAAM;AAClD,oCAA4B;AAC5B,6BAAqB,eAAe,OAAO,YAAY,IAAI,MAAM,SAAS;AAC1E,uCAA+B;AAC/B,iBAAS,QAAQ;AACb,4BAAkB;AAClB,8BAAoB,QAAQ,QAAQ;AAChC,6BAAkB,kBAAkB,eAChC,aAAa,aAAa,QAC1B,aAAa;AACjB,iCAAqB,qBAAqB,YACtC,eAAmB,aAAa;AACpC,gDAAoC;AAChC,qCAAuB,cAAc;AACrC,kBAAI,oBAAoB,eAAe;AACnC,oCAAoB,cAAc;;AAEtC,kCAAoB,YAAY,KAAK;gBACjC,eAAe;gBACf;gBACA,WAAW;;;AAGnB,gBAAI,eAAe;AACf,0BAAY,QAAQ;AAChB,oBAAI,eAAe,aAAa;AAC5B;AACA,+BAAa,eAAe;;;;AAKpC;;AAEJ,mCAAuB,KAAK,aAAa;AACzC,2BAAe;;;AAGvB,YAAI,CAAC,aAAa,MAAM,WAAS;AAC7B,kCAAwB,YAAY,OAAO,UAAU,CAAC,aAAa;AACnE,gBAAM,IAAI,MAAM,kDACT,gBAAgB,KAAK;wCAErB,uBAAuB,KAAK;;AAIvC,oCAA4B,uBAAuB,OAAO;AACtD,cAAI;AACA,wBAAY,KAAK;;AAErB,iBAAO;WACR;AACH,0BAAkB;AAClB,4BAAoB,QAAQ;AACxB,mBAAS,GAAG,MAAM,QAAQ;AACtB,6BAAiB,iBACZ,EAAC,eAAe,SAAS,OAAO,MAAM,MAAM;AACjD,sBAAU,KAAK;;;AAGvB,wBAAgB,MAAM,qBAAqB;AAC3C,iCAAyB;AACzB,gCAAwB;AACxB,4BAAoB,QAAQ;AACxB,6BAAmB,SAAS,GAAG,MAAM;AACrC,2BAAiB;AACjB,wBAAa,GAAG,KAAI,YAAY;AAC5B,0BAAc,QAAQ,oBAAoB,IAAG;;AAGjD,8BAAoB,IAAI,YAAY;AACpC,kCAAwB,IAAI,WAAW;AACvC,kCAAwB;AACxB,wBAAa,GAAG,KAAI,YAAY;AAC5B,4BAAe,IAAI,WAAW,QAAQ,oBAAoB;AAC1D,4BAAgB,IAAI,SAAQ;AAC5B,iCAAqB,QAAO;;AAEhC,iCAAuB,oBAAoB;AAC3C,yBAAe,QAAQ;AACnB,+BAAmB,YAAY,MAAM,aAAa,aAAa,aAAa,cAAc,aAAa;AACvG,oCAAwB,cAAc,YAAY,CAAC,aAAa;AAChE,+BAAmB;AACf,+BAAiB,QAAQ,gBAAgB;;;AAGjD,+BAAqB;;AAEzB,eAAO;;;AC7Lf;;;;;;;;;;;;;;;;AA0BA,mCAA+B;AAC/B,sBAAkB;;MAEd;AACI,aAAK,iBAAiB;AACtB,YAAI,eAAe;AACf,wBAAc;;AAElB,aAAK,mBAAmB,YAAY;AACpC,aAAK,aAAa,YAAY;AAC9B,aAAK,qBAAqB,YAAY;AACtC,YAAI,YAAY,aAAa;AACzB,kBAAO,OAAO,YAAY,cAAc,YAAY,MAAM;AAG1D,eAAK,QAAQ,YAAY;;AAGzB,eAAK,QAAQ,OAAM,SAAS;;AAEhC,gBAAO,QAAQ,QAAQ,KAAK,SAAS,GAAG,MAAM;AAE9C,YAAI,MAAM,QAAQ;AACd,kBAAO,KAAK,WAAW,GAAG,MAAM,iEACP,KAAK;;AAElC,aAAK,OAAO;AACZ,YAAI,YAAY,eAAe,QAC3B,YAAY,YAAY,QAAQ;AAChC,gBAAM,IAAI,MAAM;;AAEpB,aAAK,cAAc,YAAY,eAAe;;YAE5C;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa,OAAO,OAAO,CAAE,QAAQ,KAAK,iBAAkB,KAAK;AACjE,cAAK,OAAO,IAAI;AAChB,gCAAwB,CAAC;UACjB,OAAO,CAAC;UACR,SAAS,eAAe;;AAEhC,+CAAuC;UACnC,eAAe,eAAe;UAC9B,QAAQ,eAAe;UACvB,aAAa,eAAe;UAC5B,aAAa,eAAe;UAC5B,qBAAqB,eAAe;UACpC;;AAEJ,cAAK,KAAK,OAAO,cAAc,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM,aAAc;AAChH,YAAI,eAAe,cAAc;AAC7B,gBAAK,KAAK,OAAO,qBAAqB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM,0BAA2B;;AAEnH,yBAAiB,MAAM,KAAK,MAAM,KAAK,MAAM;AAC7C,YAAI,SAAS;AACT,iBAAO;YACH,oBAAoB,6BAA6B;YACjD,WAAW,CAAC;;;AAIhB,gBAAM,IAAI,MAAM,gEACT,SAAS;;;YAWlB;AACF,mCAA2B,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK;AAC5D,YAAI,CAAC,mBAAmB;AACpB,gBAAM,IAAI,MAAM,cAAc,KAAK,gCAC5B,mBAAmB;;AAG9B;AACA;AACI,wBAAc,MAAM,mBAAmB;;AAGvC,wBAAc,+CAA+C,KAAK;AAGlE,cAAI,KAAK,KAAK,SAAS;AACnB,uBAAW;;AAQX,uBAAW;;AAGf,gBAAM,IAAI,MAAM;;AAEpB,8BAAsB,YAAY;AAClC,gCAAwB,YAAY;AACpC,4BAAoB,YAAY;AAChC,4BAAoB,YAAY;AAChC,uBAAe,YAAY;AAC3B,oCAA4B,YAAY;AAExC,YAAI,iBAAiB,QAAQ,mBAAmB;AAC5C,gBAAM,IAAI,MAAM,2BAA2B,KAAK;;AAGpD;AACA;AACA,YAAI,mBAAmB;AACnB,0BAAgB,MAAM,KAAK,YAAY;AACvC,WAAC,aAAa,cAAc;;AAEhC,0BAAkB;UACd;UACA;UACA;UACA;UACA;UACA;UACA;;AAEJ,4BAAoB,YAAY;AAChC,YAAI;AACA,oBAAU,mBAAmB;;AAEjC,eAAO;;YAEL;AACF,2BAAmB,MAAM,QAAQ,KAAK,QAAQ,KAAK,KAAK,KAAK,KAAK;AAClE,iCAAyB,SAAS;AAClC,2BAAmB,KAAK,oBAAoB;AAC5C,4BAAoB;AACpB,4BAAoB;AAChB,sBAAY,KAAK,GAAG,MAAM;;AAE9B,0BAAkB;AAClB,4BAAoB;AACpB,mCAA2B;AACvB,6BAAmB,aAAa;AAC5B,gBAAI,KAAK,sBAAsB;AAC3B,0BAAY,KAAK,KAAK,mBAAmB;;AAGzC,wBAAU,KAAK,aAAa,OAAO;;;;AAI/C,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,MAAM,QAAQ,IAAI;;AAExC,wBAAgB,MAAM,yBAAyB,WAAW;UACtD,aAAa,KAAK;UAClB,WAAW,KAAK;UAChB,YAAY,KAAK;;AAErB,eAAO,CAAC,aAAa,wBAAwB;;;AAGrD,gBAAY,mBAAmB;AAYxB;AACH,wBAAkB,IAAI,YAAY;AAClC,8BAAwB,IAAI,YAAY;AACxC,qBAAe,IAAI,UAAU,GAAG;AAChC,qBAAe,kBAAkB,YAAY,IAAI,UAAU,mBAAmB;AAC9E,aAAO,CAAC,SAAS,KAAK;;AAEnB;AACH,aAAO,IAAI,MAAM,YAAY,qBAAqB;;AAE/C,uBAAmB;AACtB,UAAI,OAAO,UAAU,eAChB,gBAAe,QAAQ,YAAY,aAAa;AAIjD,eAAO;;AAGP,qBAAa;AACb,YAAI,MAAM,QAAQ;AACd,mBAAS,IAAI,MAAM,aAAW,aAAa;;AAG3C,mBAAS,aAAa;;AAE1B,YAAI;AACA,iBAAO,KAAK,KAAK;;;AAGzB,aAAO;;AAEX,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAuE7B;AACH,aAAO,IAAI,YAAY,MAAM;;AAO1B;AACH,aAAO,KAAK,MAAM;;AC/TtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,iBAAiB;;YAEpB;AACF,eAAO,KAAK;;;;MAIhB;AACI,aAAK,cAAc;;YAEjB;AACF,eAAO,KAAK,YAAY;;;AAwBzB;AACH,UAAI,UAAU,WAAW;AACrB,iCAAyB,eAAe,iBAAiB,QACrD,eAAe,eAAe;AAClC,YAAI;AACA,iBAAO,IAAI,kBAAkB;;AAK7B,kBAAQ,KAAK;AAIb,iBAAO,IAAI,kBAAkB,CAAE,eAAe;;;AAMlD,gBAAQ,KAAK;AAIb,eAAO,IAAI,kBAAkB;UACzB,eAAe;UACf;UACA;UACA;;;;AAmBL;AACH,aAAO,IAAI,iBAAiB;;ACrGhC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA+CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,sBAAgB;AACZ,gBAAQ,wBAA4B,OAAO,GAAG;AAC9C,gBAAY,GAAG,SAAS,eAAmB,QAAQ,MAAM;AACzD,aAAK,CAAC;AACN,eAAO,SAAQ,QAAQ,IAAI;;AAE/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;;AAE/D,qBAAW,IAAG,CAAE,UAAA;AC3D5B;;;;;;;;;;;;;;;;AAuCA,wCAAoC,oBAAoB;AACpD,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,aAAK,CAAC,IAAI;AACV,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,0BAAkB,eAAmB;AACrC,0BAAkB,eAAmB;AACrC,oCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,gBAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,qBAAqB,MAAM,uJAE3C,oBAAoB;AAChD,gBAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,kCAA0B,YAAY,YAAY,aAAa;AAC/D,yBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,sBAAc,SAAQ,YAAY,KAAK,KAAK,YAAY;AACxD,eAAO,SAAQ,OAAO;;AAE1B,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;;AAEnE,mBAAU,IAAG,CAAE;AC5E3B;;;;;;;;;;;;;;;;AAwCA,+CAA2C,cAAc;AACrD,UAAI,QAAQ;AACR,cAAM,IAAI,MAAM,iDAAiD;;AAErE,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,uBAAiB,CAAC,GAAG,SAAS,OAAO;AACrC,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,SAAQ,OAAO,SAAQ,UAAU,CAAC,SAAS,QAAQ,OAAO,SAAS,WAAW;;AAEjG,qBAAe,CAAE,SAAS;AAC1B,oBAAc,CAAE,OAAO,SAAS;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;ACtD3B;;;;;;;;;;;;;;;;AAwCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,QAAQ;AACR,eAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAErC,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,WAAK,QAAQ;AACT,gBAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,MAAM,+CAA+C,GAAG,OAAO,aACxF;;AAEpB,UAAI,GAAG,QAAQ;AACX,eAAO,GAAG;;AAEd,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,OAAO,QAAQ,MAAqB,YAAW;;AAEpG,uBAAa,IAAG,CAAE,YAAA;AC1D9B;;;;;;;;;;;;;;;;AAoDO;AACH,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,cAAY,cAAc,QAAQ,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,+DACzE;AACf,cAAY,QAAQ,SAAS,GAAG,MAAM,gDAAgD,QAAQ;AAC9F,cAAY,aAAa,SAAS,GAAG,MAAM,qDAC5B,aAAa;AAC5B,cAAY,QAAQ,MAAM,OAAO,aAAa,MAAM,IAAI,MAAM,uCACvD,QAAQ,MAAM,UAAU,aAAa,MAAM;AAElD,cAAY,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,4DAC3D;AAGP,2BAAqB,QAAO,MAAK,SAAS,UAAU;AACpD,gCAA0B,QAAO,MAAK,cAAc,UAAU;AAC9D,4BAAsB,WAAU;AAChC,sBAAgB,OAAO,eAAe;AACtC,aAAO,MAAK,SAAS;;AAElB,4BAAwB,IAAG,CAAE;ACzEpC;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AAwBA;AAwBA,+CAA2C;AAEvC,UAAI,cAAc;AACd,cAAM,IAAI,MAAM;;AAEpB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,wBAAkB;AAClB,wBAAkB;AAClB,oBAAc;AACd,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,gBAAgB;AACvB,sBAAc;iBAET,OAAQ,cAAe,eAAe,kBAAkB;AAC7D,sBAAc;iBAET,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAEL,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAGL,OAAO,cAAc;AAC1B,uBAAe;;AAGf,cAAM,IAAI,MAAM,qPAID,OAAO,YAAY;;AAEtC,UAAI;AACA,8CAAsC;AACtC,YAAI,WACA,OAAO,aACH;AACJ,gBAAM,IAAI,MAAM;;;AAMxB,qBAAe,WAAU,YAAY,QAAO;AAC5C,UAAI,UAAU;AACV,uBAAe,CAAE;AACjB,sBAAc,CAAE;AAChB,eAAO,QAAO,UAAU,YAAY,QAAQ;;AAEhD,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B;AACA,UAAI;AACA,eAEI,OAAO,WAAW,MAAM,aAAa,GAAG,GAAG,OAAO,QAAQ;iBAEzD,eAAe;AACpB,eAAO,OAAO;iBAET,WAAW;AAChB,YAAI,uBAAuB;AACvB,gCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,4BAAoB,OAAO,QAAQ;AACnC,4BAAoB,OAAO,SAAS;AACpC,4BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,eAAO,oBAAoB,aAAa,GAAG,GAAG,OAAO,QAAQ;;AAEjE;AACA,UAAI,gBAAgB;AAChB,iBAAS,IAAI,WAAW;;AAGxB,0BAAkB,QAAQ;AAC1B,iBAAS,IAAI,WAAW,YAAY;AACpC,qBAAa,GAAG,IAAI,WAAW;AAC3B,6BAAmB,GAAG,UAAU,aAAa,EAAE;AAC3C,mBAAO,IAAI,cAAc,WAAW,KAAK,IAAI,IAAI;;;;AAI7D,uBAAiB,CAAC,QAAQ,OAAO;AACjC,aAAO,SAAS,QAAQ,UAAU;;AAqB/B;AACH,iBAAW,iBAAgB,KAAK,OAAO;AACvC,UAAI,CAAE,gBAAe;AAEjB,kCAA0B;AAC1B,eAAO,MAAK,mBAAmB;AAC/B,0BAAkB;;AAEtB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,MAAM,wDAAwD,KAAK;;AAEjF,8BAAwB,KAAK,MAAM,MAAM,GAAG;AAC5C,oBAAc,KAAK,SAAS,IAAI,IAAI,KAAK,MAAM;AAC/C,UAAI,QAAQ,KAAK,UAAU;AACvB,cAAM,IAAI,MAAM,0DACS;;AAE7B,UAAI,KAAK,UAAU,aAAa,KAAK,UAAU;AAC3C,cAAM,IAAI,MAAM,kCAAkC,KAAK;;AAG3D,oBAAa,MAAM,KAAK;AACxB,yBAAmB,KAAK,UAAU,YAAY,MAAM;AACpD,oBAAc,IAAI,kBAAkB,QAAQ,SAAS;AACrD,mBAAa,GAAG,IAAI,SAAS,OAAO,EAAE;AAClC,qBAAa,CAAC,GAAG,GAAG,GAAG;AACvB,qBAAa,GAAG,IAAI,OAAO;AACvB,wBAAc,MAAK,IAAI,QAAQ;AAC/B,cAAI,KAAK,UAAU;AACf,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACqB;;qBAGpC,KAAK,UAAU;AACpB,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACuB;;;AAG/C,cAAI,UAAU;AACV,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;;AAGlB,iBAAK,KAAK,QAAQ;;;AAG1B,kBAAU,IAAI;AACd,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;;AAEnC,UAAI,UAAU;AACV,eAAO,QAAQ;AACf,eAAO,SAAS;AAChB,oBAAY,OAAO,WAAW;AAC9B,0BAAkB,IAAI,UAAU,OAAO,OAAO;AAC9C,YAAI,aAAa,WAAW,GAAG;;AAEnC,UAAI,SAAS;AACT,aAAK;;AAET,aAAO;;AAEJ,uBAAmB,IAAG,CAAE;;;;;;AC5NxB;AACH,UAAI,QAAO,OAAO;AACd,cAAM,IAAI,MAAM,4EACS,QAAO;;AAEpC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,8EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,yEACU,QAAQ;;AAEtC,UAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AACzC,cAAM,IAAI,MAAM,iEACT,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAEzD,UAAI,QAAO,SAAS;AAChB,cAAM,IAAI,MAAM,mEACK,QAAO;;AAEhC,2BAAqB,QAAQ;AAC7B,wBAAkB,aAAa,aAAa,SAAS;AAGrD,oBAAc;AACd,mBAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC3C,mBAAW,aAAa;;AAE5B,yBAAmB,QAAO;AAC1B,0BAAoB,aAAa;AACjC,kBAAY;AACZ,sBAAgB;AAChB,mBAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACvC,qBAAa,WAAW;AACxB,oBAAY,KAAK,WAAW;;AAEhC,sBAAgB;QAAC,GAAG,gBAAe,QAAO,OAAO,IAAI,YAAU,SAAS;QACpE;QAAG,MAAM,GAAG;AAChB,aAAO,CAAC,aAAa,SAAS,WAAW;;;;;;ACzCtC;AACH,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AACzD,yBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAC9C,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEnD,UAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC1C,cAAM,IAAI,MAAM,aACZ,0BAA0B,WAAY,SAAQ,OAAO;;AAE7D,UAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC3C,cAAM,IAAI,MAAM,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAE9E,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,YAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACnC,gBAAM,IAAI,MAAM,aACZ,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAAO,QAAQ,MAAM;;;AAGhG,mBAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC3C,YAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC1C,gBAAM,IAAI,MAAM,aACZ,kBAAkB,IAAI,cAAc,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAAc,MAAM,IAAI;;;;AAWlH;AACH,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,0DAA0D,QAAQ;;AAEtF,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,6DAA6D;;AAEjF,UAAI,MAAM,WAAW;AACjB,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;AAElF,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;;AAGtF,2BAAoB,OAAO,SAAS;;AAWjC;AAEH,0BAAoB,QAAQ,MAAM;AAClC,wBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAIvE,sBAAgB,MAAM;AACtB,sBAAgB;AAChB,mBAAa,WAAW,IAAI,SAAS,EAAE;AACnC,qBAAa,MAAM;;AAEvB,2BAAsB,YAAY,IAAK,IAAI;AAC3C,yBAAmB,eAAc,QAAQ,SAAS;AAClD,sBAAgB,CAAC,GAAG,gBAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,yBAAmB,eAAc;AACjC,aAAO,CAAE,WAAW,YAAY,WAAW,SAAS;;;;;;;;AC9FxD;;;;;;;;;;;;;;;;AAiBO;AACH,wBAAkB,OAAM,MAAM;AAC9B,cAAY,cAAc,MAAM,QAAQ,MAAM,iBAAiB,+BAA+B,2CAC1D;AACpC,cAAY,cAAc,KAAK,QAAQ,MAAM,iBAAiB,8BAA8B,0CACxD;AACpC,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,gBAAY,MAAM,KAAK,KAAK,MAAM,OAAM,MAAM,IAAI,MAAM,iBAAiB,qBAAqB,aAAa,OACnG,MAAM,KAAK,KAAK,kCAAkC,OAAO,OAAM,MAAM;;;AAI9E;AACH,mBAAa;AACb,iBAAW;AACX,aAAO,OAAO;AACV,YAAI,OAAO;AACP,eAAK,KAAK;;AAEd,gBAAQ;AACR;;AAEJ,aAAO;;AAGJ;AACH,mBAAa;AACb,sBAAgB,GAAG,OAAO,MAAM,QAAQ;AACpC,aAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE/D,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,mBAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACnD,mBAAW,KAAK;;AAEpB,mBAAa,GAAG,IAAI,eAAe;AAC/B,YAAI,MAAM;AACN,qBAAW,0BAA0B;;AAGrC,qBAAW,OAAO,wBAAwB,GAAgC;AAC1E,qBAAW;;;AAGnB,aAAO;;AAEX;AACI,UAAI,kBAAkB;AAClB,eAAO;;AAEX,aAAO,iBAAkB,iBAAgB;;AAE7C;AACI,yBAAmB;AACnB,mBAAa,GAAG,IAAI,eAAe;AAC/B,mBAAW,KAAK,yBAAyB;;AAE7C,aAAO;;AAGJ;AACH,wBAAkB,WAAW;AAC7B,4BAAsB,IAAI,MAAM,4BAA4B,IAAI,MAAM,gCAAgC,IAAI,MAAM;AAChH,UAAI,aAAa,UAAU,sBAAsB;AAC7C,0BAAkB,aAAa;AAG/B,8BAAsB,sBAAsB;AAC5C,0BAAkB,4BAA2B,WAAW,WAAW,eAAe,OAAO;AACzF,wBAAgB,2BAA0B,SAAS,WAAW,eAAe,KAAK;AAClF,4BACI,uBAAsB,SAAS,WAAW,eAAe;;AAG7D,wBAAgB,GAAG,OAAO,WAAW;AACjC,0BAAgB,QAAQ,cAAa,WAAW,OAAO,SAAS,YAAY,MAAM;AAClF,wBAAc,QACV,aAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,4BAAkB,QAAQ,gBAAe,SAAS,MAAM;;;AAGhE,aAAO;QACH,OAAO;QACP,KAAK;QACL,SAAS;;;AAKV;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ;;AAGnB,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,cAAc;AAClC,cAAI,YAAY,KAAK;AACjB,4BAAgB;;AAEpB,qBAAW,QAAQ;;;AAG3B,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ,OAAO;;AAG1B,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,YAAY;AAChC,cAAI,UAAU,KAAK;AACf,4BAAgB,OAAO;;AAE3B,qBAAW,QAAQ;;;AAG3B,mBAAa,GAAG,IAAI,WAAW,QAAQ;AAEnC,yBAAiB,WAAW;AAC5B,YAAI,WAAW,KAAK;AAChB,qBAAW,MAAM;;AAErB,mBAAW,KAAK,OAAW,GAAG,WAAW,IAAI,WAAW;;AAE5D,aAAO;;AAEJ;AACH,mBAAa,QAAQ;AACrB,UAAI,eAAgB,KAAK,QAAS,UAAU;AACxC,iBAAS;;AAEb,aAAO;;AAEJ;AAEH,kBAAY,aAAa;AACzB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAC9D,YAAI,SAAS;AAIT,kBAAQ,OAAO;;AAIf,kBAAQ,OAAO;;;AAIvB,uBAAiB,WAAW;AAC5B,UAAI,QAAQ;AACR,iBAAS;;AAGb,cAAQ,OAAW,GAAG,OAAO,WAAW;AACxC,aAAO;;AAEJ;AAEH,iBAAW,YAAY;AACvB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AAC/D,YAAI,SAAS;AAGT,iBAAO,OAAO;;AAId,iBAAO,OAAO;;;AAItB,uBAAiB,WAAW;AAC5B,UAAI,OAAO;AACP,gBAAQ;;AAKZ,UAAI,SAAS;AAET,eAAO,OAAW,GAAG,MAAM;;AAI3B,eAAO,OAAW,IAAI,MAAM,WAAW;;AAE3C,aAAO;;AAMJ;AAEH,4BAAsB,KAAK;AAC3B,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,YAAI,KAAK,KAAK;AACV,4BAAkB;AAClB;;;AAGR,mBAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AAC/C,YAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AAClC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,uBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,sBAAc,MAAM,KAAK,QAAQ;;AAErC,aAAO;;AAEJ;AAEH;AACA,oBAAc,EAAE,MAAM;AACtB,UAAI,OAAO,UAAU;AACjB,iBAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEzC,MAAM,SAAS;AACpB,iBAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAG3D,iBAAS,MAAM;;AAEnB,aAAO,QAAQ;AACX,gBAAY,MAAM,IAAI,MAAM;;AAEhC;AACA,UAAI,QAAQ;AACR,gBAAQ,IAAI,MAAM,OAAO,KAAK;iBAEzB,OAAO,SAAS;AACrB,gBAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEvC,KAAK,SAAS;AACnB,gBAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAGxD,gBAAQ;;AAEZ,cAAQ,MAAM,IAAI;AACd,YAAI,KAAK;AACL,iBAAO;;AAGP,kBAAY,MAAM,IAAI,MAAM,qDACrB,mCAAmC;AAC1C,iBAAO,EAAE,MAAM,KAAK,OAAO;;;AAGnC,aAAO,CAAC,QAAQ;;;;;;;;;;;;;;;;;;ACnSpB;;;;;;;;;;;;;;;;;MAmCI;AACI,eAAO,KAAK,YACP;;aAWF;AACH,eAAO,IAAI,IAAI;;;;MAWnB;AACI,aAAK,eAAe;;aAKjB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAKrB;AACH,yBAAiB,SAAS,aAAa,IAAI,aACvC,CAAC,KAAK,IAAI;;;AA2Bf;AACH,cAAO,IAAI,aAAa,MAAM,MAAM;AAEpC,cAAO,OAAO,IAAI,cAAc,UAAU,MAAM,wDAC5C,OAAO,IAAI;AACf,cAAO,IAAI,UAAU,SAAS,GAAG,MAAM;AAEvC,uBAAiB,SAAS;;;;;;;;AC/G9B;;;;;;;;;;;;;;;;AAmBA,iCAA6B;AACtB,iCAA6B;AAC7B;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,aAAO,QAAO,QAAQ,qBAAqB,KAAK,uBAC5C;;AAER;AACI,2BAAqB;AACrB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI;AACA,sBAAc,OAAO,YAAY;AACjC,sBAAc,SAAS,YAAY;AACnC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM,yCAAyC,oBACxC;;;AAGzB,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ;AACvC,4BAAoB,YAAW;AAC/B,8BAAsB,YAAW;AACjC,YAAI,CAAC,aAAY,aAAa;AAC1B,gBAAM,IAAI,MAAM,0CACA,4BAA4B;;;AAGpD,yBAAmB,cAAa,UAAU,SAAS,SAAQ;AAC3D,2BAAqB,cAAa,YAC9B,WACA,SAAQ;AACZ,UAAI,WAAW,WAAW,aAAa;AACnC,cAAM,IAAI,MAAM,yCAAyC,WAAW,uBACnD,aAAa;YACb;YACA;;AAErB,mBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kBAAU,WAAW;AACrB,kBAAU,aAAa;AACvB,YAAI,CAAC,UAAU,GAAG;AACd,gBAAM,IAAI,MAAM,yBAAyB,QAAQ,eAAe,QAAQ;YACvD;YACA;;;;AAItB;AACH,WAAK,KAAK,MAAM,KAAK,QAAQ,MAAM;;AAEhC;AACH,mBAAY,OAAO,aAAa,YAAY,OAAO,aAAa,YAC5D,OAAO,aAAa,YACpB,CAAC,YACD;AACJ,UAAI,UAAS,WAAW,UAAS,OAAO,OACpC,UAAS,aAAa,UAAS,SAAS;AAExC,eAAO,sBAAsB,QAAQ,MAAK,UAAU,KAAK;;AAE7D,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,UAAI,CAAC,SAAS,GAAG,GAAG;AAChB,cAAM,IAAI,MAAM,8BAA8B,mBAAmB;;;AAGzE;AACI,UAAI,CAAC,SAAS,MAAM,CAAC,SAAS;AAC1B,eAAO;;AAEX,UAAI,MAAM,MAAM,MAAM,MAAM,KAAK,IAAI,IAAI,KAAK;AAC1C,eAAO;;AAEX,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK,OAAO,OAAO,KAAK;AAC/B,gBAAM,IAAI,MAAM,sBAAsB,OAAO,WAAW,cAAc;;;;AAI3E;AAGH,aAAO,IAAI,aAAa,SAAS,QAAQ,IAAI,aAAa;;;;;;;;;;;;;ACrH9D;AAEK,qBAAW;ACFhB;;;;;;;;;;;;;;;;AA0BO;AACH,aAAM,IAAI,QAAQ;;AAgBf;AACH,aAAM,IAAI,SAAS;;AAGhB;AACH,aAAM,IAAI,gCAAgC;AAC1C,cAAQ,KAAK;;AAGV;AACH,UAAI,OAAM,QAAQ;AACd,gBAAQ,KAAK,MAAM;;;AAI3B,6BAAwB;AAMjB;AACH,cAAO;;AAOJ;AACH,aAAO;;AAuBJ;AACH,aAAO,QAAO;;AA+BX;AACH,aAAO,QAAO,QAAQ;;AA0CnB;AACH,aAAO,QAAO,KAAK,UAAU;;AAa1B;AACH,sBAAgB,uBAAsB;AACtC,cAAQ,QAAQ,aAAU,QAAO;;AAkC9B;AACH,aAAO,QAAO,KAAK;;AA2BhB;AACH,aAAO,QAAO,KAAK;;AAiBhB;AACH,aAAO,QAAO,WAAW;;AAStB;AACH,aAAO,QAAO;;AAQX;AACH,aAAO,QAAO;;AAOX;AACH,cAAO,cAAc;;AAMlB;AACH,aAAO,QAAO,YAAY;;AAOvB;AACH,aAAO,QAAO,mBAAmB;;AAiB9B,wDAAmD;AACtD,aAAO,QAAO,gBAAgB,MAAM,SAAS;;AAU1C;AACH,aAAO,QAAO;;AAQX;AACH,aAAM,YAAY,cAAc;;AC5VpC;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AA+CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;AC3D7B;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU,WAAW,GAAG,UAAU;AACrC,eAAO,SAAS,IAAI;;AAExB,sBAAgB;AACZ,oBAAY,SAAQ,WAAW,IAAI;AACnC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,gBAAO,IAAG,CAAE;AC9DxB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAO,IAAG,CAAE,MAAA;AC1DxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,SAAQ,WAAW;;AAE9B,eAAO,SAAQ,IAAI;SACpB,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AC3CxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,cAAY,MAAM,QAAQ,UAAU,MAAM;AAC1C,cAAY,QAAQ,UAAU,GAAG,MAAM,uDAChC,QAAQ;AACf,uBAAiB,QAAQ,IAAI,UAAU,iBAAgB,GAAG,UAAU,KAAK;AACzE,0BAAoB,SAAS;AAC7B,eAAS,QAAQ;AACb,YAAI,EAAE,UAAU,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAGxB,eAAS,QAAQ;AACb,YAAI,CAAC,aAAiB,EAAE,OAAO,YAAY;AACvC,gBAAM,IAAI,MAAM;;;AAGxB,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC1DzB;;;;;;;;;;;;;;;;AAqBO;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,YAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AACzC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,UAAU,SAAS,UAAU;AAC1C,kBAAY;AACZ,mBAAa;AACb,sBAAgB;AAChB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,cAAI,KAAK,UAAU;;AAGnB,cAAI,KAAK,UAAU;;;AAG3B,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,mBAAS,KAAK,OAAO;;;AAG7B,0BAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,aAAO,CAAC,UAAU;;AAEf;AACH,6BAAuB,KAAK,IAAI,OAAK;AACrC,aAAO,kBAAiB,OAAO,gBAAgB;;AAE5C;AACH,cAAY,sBAAqB,MAAM,OAAO,MAAM,GAAG,uDACvC,iBAAiB;;AAO9B;AACH,UAAI,sBAAqB,MAAM;AAC3B,eAAO;;AAEX,qBAAe;AACf,mBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,YAAI,KAAK,QAAQ,OAAO;AACpB,iBAAO,KAAK;;;AAGpB,WAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,aAAO;;AAGJ;AACH,aAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAEb;AACH,kBAAY;AACZ,mBAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACrC,YAAI,KAAK;;AAEb,aAAO;;AC5FX;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAE3D,gBAAO,IAAG,CAAE;AC1ExB;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAG3D,gBAAO,IAAG,CAAE;AC3ExB;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAU,IAAG,CAAE;AC/D3B;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,QAAQ;AACR,iBAAO;;AAEX,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;;AAE9D,mBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;ACjD1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAyCO,yFAAmF;AAKtF,4BAAsB,WAAW;AACjC,2BAAqB,CAAC,GAAG,aAAa;AACtC,0BAAoB,yBAAwB;AAC5C,aAAO,mBAAkB,YAAY,cAAc,SAAS,WAAW,MAAK,MAAyB,MAAsB;;AAExH,0GAAoG;AACvG,0CAAoC,iBAAgB;AACpD;AACA,UAAI,eAAe;AACf,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAEzD,eAAe;AACpB,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAG9D,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAAO;;AAK1F,0GAAoG;AACvG,uDAAiD,kBAAiB;AAClE;AACA;AACA,UAAI,eAAe;AACf,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAE5D,eAAe;AACpB,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAGjE,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aAAa;;AAMzF,0GAAoG,oBAAoB;AAC3H,uDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,UAAI,eAAe;AACf,SAAC,WAAW,UAAU,SAAS,cAAc;iBAExC,eAAe;AACpB,SAAC,WAAW,YAAY,UAAU,WAAW;;AAG7C,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,4DAAsD;AACtD,0CAAoC,iBAAgB;AACpD,8CAAwC,iBAAgB;AACxD,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,WAAW,YAAa,kBAAiB,MAAK,UAAU,SAAS,cAAc,aAAa,uBAAuB,sBAAsB,cAAc;AACxK,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,WAAW;iBAE1C,eAAe;AACpB,mBAAW,CAAC,WAAW,WAAW,UAAU;;AAEhD,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAOD,4FAAsF,oBAAoB;AAC7G,gEAA0D,CAAC,IAAI,IAAI,IAAI,IAAI;AAC3E,UAAI,eAAe;AACf,SAAC,WAAW,SAAS,UAAU,SAAS,cAAc;iBAEjD,eAAe;AACpB,SAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAGtD,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,yEAAmE;AACnE,uDAAiD,kBAAiB;AAClE,6DAAuD,kBAAiB;AACxE,mCAA6B,wBAAuB,aAAa;AACjE,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,UAAU,WAAW,YAAa,oBAAmB,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAAa,sBAAsB,uBAAuB,sBAAsB;AAClN,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,UAAU,WAAW;iBAEpD,eAAe;AACpB,mBAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAE1D,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,YAAY;;AAExB;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,yBAAmB,QAAQ;AAC3B,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,2BAAqB,kBAAkB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AAC3F,cAAY,OAAW,eAAe,MAAM,2BAA2B;AAEvE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,cAAc,YAAY,YAAY;;AAE3C,0EAAqE;AACxE,iCAA2B,wBAAuB,WAAW;AAC7D,aAAO,KAAK,MAAO,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAErF;AACI,UAAI,OAAO,UAAU;AACjB,eAAO,CAAC,OAAO,OAAO;;AAE1B,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAEhC,aAAO;;AAEX;AACI,aAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAa/D;AACI,UAAI,YAAY;AACZ,eAAO;;AAEX,aAAO,aAAc,cAAa,KAAM,YAAW;;AAEvD;AACI;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU,CAAE,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAChE,yBAAiB,sBAAqB,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC5F,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,+BAAuB,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AACnF,8BAAsB,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC/E,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;iBAEvC,SAAQ;AACb,kBAAU,CAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACxD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;iBAE9C,OAAO,SAAQ;AACpB,oBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,uBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,qBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,sBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,wBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC5C,oBAAY,kBAAkB,YAAW,eAAe,MAAM,UAAU,eAAe,GAAG;AAC1F,mBAAW,kBAAkB,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAGtF,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,WAAW;;AAEjC;AACI;AACA;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,yBAAiB,sBAAqB,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAAK;AACzG,mBAAW,SAAS;AACpB,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,mBAAW,KAAK,KAAK,UAAU;AAC/B,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,+BAAwB,aAAY,KAAK,eAAe,eAAe;AACvE,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,sBAAc,KAAK,MAAM,gBAAgB;AACzC,qBAAa,gBAAgB;AAC7B,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;iBAEpD,SAAQ;AACb,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAGnD,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,UAAU,WAAW;;AAO3C;AACI,UAAI,CAAC;AACD,eAAO;;AAEX,cAAQ;aACC;AAED,iBAAO,KAAK,MAAM;aACjB;AAED,iBAAO,KAAK,KAAK;aAChB;AACD,iBAAO,KAAK,MAAM;;AAElB,gBAAM,IAAI,MAAM,wBAAwB;;;AAG7C;AACH,iCAA2B,iBAAgB;AAC3C,aAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAEzC;AACH,aAAO,mBAAkB,YAAY,mBAAkB;;AASpD;AACH,UAAI,eAAe;AACf,eAAO;iBAEF,eAAe;AACpB,eAAO;;AAGP,cAAM,IAAI,MAAM,sBAAsB;;;ACtZ9C;;;;;;;;;;;;;;;;AA6CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,wBAAkB;AAClB,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,mDAAmD,IAAI;AACzF,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG,aAAK,CAAC;AACN,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,iBAAO,IAAI;;AAEf,eAAO,SAAQ,QAAQ,KAAK;;AAEhC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC1E,YAAM,MAAK,KAAK,GAAG;AACnB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;AC/E5B;;;;;;;;;;;;;;;;AAsEA,oFAA+E;AAC3E,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,aAAK,CAAC;AACN,eAAO,SAAQ,UAAU,KAAK;;AAElC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK,iBAAiB,YAAY;AACvE,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC5E,YAAM,MAAK,KAAK,IAAI;AACpB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC/G9B;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,OAAO,GAAG;AACvB,aAAO,QAAQ;AACX,gBAAY,MAAM,WAAW,MAAM,MAAM,kBAAkB,0BAA0B,gDACrD;;AAEpC,cAAY,QAAQ,KAAK,OAAO,MAAM,MAAM,kBAAkB,qCAAqC,OAAO;AAC1G,yBAAmB,OAAO;AAC1B,aAAO,QAAQ;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,kBAAa,MAAM,QAAU,MAAM,OAAO,WAAW,IAAK,MAAM,kBAAkB,2BAA2B,OAAO,gDACvE,+CACN;;;;AAI5C;AACH,0BAAoB,OAAO,GAAG;AAC9B,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,oBAAY,SAAS,OAAO,GAAG;;AAEnC,aAAO;;ACtCX;;;;;;;;;;;;;;;;AA+DA,qCAAiC;AAC7B,cAAO,QAAQ,UAAU,GAAG,MAAM;AAClC,qBAAe,qBAAqB,SAAS,WAAW;AACxD,UAAI,SAAS,GAAG,UAAU;AACtB,iBAAS,QAAQ;AACb,cAAI,QAAO,UAAU;AACjB,kBAAM,IAAI,MAAM;uBACT,QAAO;;;;AAI1B,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,SAAS,GAAG,OAAO;AACtD,yBAAiB,kBAAgB,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,YAAI,eAAc,cAAc;AAC5B,iBAAO,QAAO,IAAI;;AAGtB,mBAAW,SAAS,OAAO,OAAK,EAAE,OAAO;AACzC,YAAI,SAAS,WAAW;AACpB,iBAAO,SAAS;;AAEpB,uBAAe,SAAS,IAAI,OAAK,EAAE;AACnC,gCAAuB,QAAQ;AAC/B,oBAAY,SAAQ,OAAO,UAAU;AACrC,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,mBAAa,CAAE;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AC/F3B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,QAAQ;AAC5B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAW,IAAG,CAAE;ACzC5B;;;;;;;;;;;;;;;;AAuDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,gCAAwB,kBAA4B,IAAI,OAAO;AAC/D,2BAA6B,IAAI,QAAQ;AACzC,aAAK,CAAC;AACN,eAAO,SAAQ,MAAM,IAAI,QAAQ;;AAErC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,mBAAS,IAAG,CAAE;ACtE1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,KAAK;AACvB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,mBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAyCA;AACI,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,wBAAkB,iBAAgB,UAAU,YAAY;AACxD,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,QAAO,CAAC,OAAO,KAAK;AACrC,uBAAiB,OAAO,UAAU;AAClC,kBAAY,MAAI,UAAU;AAE1B,wBAAkB,IAAI,MAAM;AAC5B,wBAAkB,IAAI,MAAM,KAAK;AACjC,wBAAkB,CAAC,WAAW;AAC9B,gBAAU,OAAM,KAAK,CAAC,GAAG,IAAI;AAC7B,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY;AACrC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,mBAAa,MAAI,KAAI,SAAQ,IAAI,OAAK,KAAK,KAAI,IAAI,SAAQ,MAAI,aAAa;AAC5E,mBAAa,KAAI,OAAK,OAAO,SAAQ;AACrC,aAAO,CAAC,MAAM;;AAEN,0BAAiB,IAAG,CAAE;AC/DlC;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,iBAAiB,GAAG,+CAA+C,WAAW;AAClI,cAAY,MAAM,WAAW,WAAW,QAAQ,MAAM,mBAAmB,MAAM,oDAAoD,WAAW;AAC9I,cAAY,GAAG,MAAM,KAAK,UAAS,GAAG,MAAM,yBAAyB,GAAG,MAAM,wEAC5C,WAAW,KAAK,cAAc;AAChE,sBAAgB;AACZ,eAAO,SAAQ,eAAe,IAAI,YAAY;;AAElD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;AClF5B;AACH;AACA,UAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,GAAG,EAAE;iBAExB,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAEvC,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAGrD,cAAM;;AAEV,aAAO;;ACfX;;;;;;;;;;;;;;;;AAiDA;AACI,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,MAAM,SAAS,UAAU,MAAM,MAAM;AAEjD,cAAY,WAAW,QAAQ,MAAM,SAAS,QAAQ,MAAM,MAAM;AAElE,cAAY,UAAU,QAAQ,MAAM,SAAS,OAAO,MAAM,MAAM;AAEhE,kBAAY,MAAM;AAClB,sBAAgB;AACZ,aAAK,CAAC,KAAK,OAAO,WAAW;AAC7B,eAAO,SAAQ,UAAU,KAAK,SAAS,QAAQ,SAAS,YAAY,SAAS,UAAU,SAAS,SAAS;;AAE7G,qBAAe;QACX,GAAG;QACH,OAAO;QACP,QAAQ;QACR,MAAM;QACN,UAAU;;AAEd,oBAAc,CAAE;AAChB,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,aAAO,SAAQ,KAAK,GAAG;;AAE3B;AACI,UAAI,KAAK;AACL,eAAO;;AAEX,UAAI,EAAE,SAAS;AAEX,eAAO,SAAQ,GAAG,CAAC,EAAE;iBAEhB,EAAE,SAAS;AAChB,eAAO;iBAEF,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAExC,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAE1D,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC5F9B;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC3ChC;;;;;;;;;;;;;;;;AAoCA;AACI,mBAAY,iBAAgB,GAAG,eAAe;AAC9C,qBAAe,OAAM;AACrB,UAAI,MAAM,KAAK,OAAK,CAAE,KAAI,MAAM,IAAI,MAAM;AACtC,cAAM,IAAI,MAAM,2CAA2C;;AAE/D,UAAI,MAAM,SAAS,OAAM;AACrB,cAAM,IAAI,MAAM,+BAA+B,MAAM,uBAAuB,OAAM;;AAEtF,UAAI,MAAM,SAAS,OAAM;AACrB,yBAAiB,OAAM,MAAM;AAC7B,eAAO,SAAS,SAAS,MAAM;AAC3B,mBAAS,QAAQ;;AAErB,iBAAQ,SAAQ,QAAO;;AAE3B,yBAAmB,OAAM;AACzB,mBAAa,MAAM,KAAK;AACxB,mBAAa,MAAM,SAAS,GAAG,KAAK,GAAG;AACnC,YAAI,WAAW,OAAO,MAAM;AACxB,eAAK,KAAK;mBAEL,OAAM,MAAM,OAAO;AACxB,gBAAM,IAAI,MAAM,mBAAmB,mCAAmC;;;AAG9E,mBAAa,KAAK,IAAI,UAAU,IAAI,IAAI,IAAI,IAAI,OAAO,OAAK,KAAK;AACjE,UAAI,KAAK,WAAW;AAChB,eAAO,MAAM;;AAEjB,sBAAgB,cAAa,SAAQ,KAAK,QAAO;AACjD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAEnE,wBAAe,IAAG,CAAE;ACvEhC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAa,gBAAgB,cAAe,MAAM,uBAAuB,oDACvC;AAClC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,cAAc;AAC9B,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK,IAAI,cAAc;AAC3C,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,cAAa;;AAEjC,wBAAe,IAAG,CAAE;AClChC;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACa7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACC7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;AC1B7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACb7B;;;;;;;;;;;;;;;;AAwDA,4DAAuD,oBAAoB,CAAC,GAAG;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,cAAY,YAAY,QAAQ,MAAM,IAAI,MAAM,oCAAoC,8CACtD,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,sBAAgB;AACZ,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACxH,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,oBAAU,IAAG,CAAE;ACzD3B,2DAAsD,kBAAkB;AACpE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM;;AAEhD,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,QAAQ,WAAW,MAAM,oEAC5D,wBAAwB;AAC1C,cAAY,eAAe,OAAO,MAAM,sCAAsC;AAC9E,uBAAiB,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;AACxF,sBAAgB,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM;AACvE,sBAAgB,CAAC,GAAG;AACpB,wBAAkB,CAAC,GAAG;AACtB,+BAAyB;AACzB,kBAAY,QAAO,SAAS,UAAU,SAAS,MAAK,kBAAkB,WAAW;AACjF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEjD,aAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEnD,mBAAU,IAAG,CAAE;ACnE3B;;;;;;;;;;;;;;;;AA+CA,kFAA6E;AACzE,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AAC1D,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO;;AAEhD,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,sBAAgB,eAAe,SAAS,SAAS,KAAK,SAAS;AAC/D,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,+EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,qBAAY,SAAQ,eAAe,MAAM,QAAQ;AACjD,aAAK,CAAC,MAAM;AACZ,eAAO;;AAEX,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,sBAAqB;AACxF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,iCAA4B,IAAG,CAAE;AClExC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,qBAAoB,aAAa,IAAI,SAAS,SAAS,MAAK,QAAQ;;AAEnE,4BAAmB,IAAG,CAAE;AC7BpC;;;;;;;;;;;;;;;;AAwDA,4DAAuD,qBAAqB,CAAC,GAAG,GAAG;AAC/E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAA+B,SAAS,YAAY,MAAM,uEACnD,0BAA0B;AAC7C,cAAY,eAAe,SAAS,MAAM,sCAAsC;AAChF,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW;AAC3F,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,mBAAU,IAAG,CAAE;ACvF3B;;;;;;;;;;;;;;;;AAwCA;AACI,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACvE,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI,OAAO;;AAE3D,sBAAgB,SAAS;AACzB,uBAAiB,KAAK,MAAM;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW;AACzF,eAAO,SAAQ,eAAe,MAAM,QAAQ;;AAEhD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,KAAA,MAAK,SAAS,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,uBAAuB;AAC/E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,gCAA4B,IAAG,CAAE;ACtDxC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,oBAAoB,aAAa,IAAI,SAAS,SAAS;;AAEtD,4BAAmB,IAAG,CAAE;AC3BpC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA6CA,+BAA2B,eAAe,kBAAiB;AACvD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,4BAAoB,oBAAmB,CAAC,OAAO,GAAG;AAClD,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;;AAE9B,6BAAqB,kBAAiB,GAAG,GAAG,MAAM;AAClD,oBAAY,SAAQ,OAAO,WAAW,cAAc,WAAW;AAC/D,aAAK,CAAC;AACN,YAAI,eAAe;AACf,qCAA2B,wBAAuB;AAClD,kBAAQ,WAAU,OAAO;;AAE7B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM,WAAW,SAAA;AACjC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA0DA,sDAAkD;AAC9C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAqB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACrE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,cAAY,cAAc,aAAa,GAAG,MAAM;MAC9C,mBAAmB;MACnB,GAAG;AACL,cAAY,aAAa,aAAa,GAAG,MAAM;MAC7C,kBAAkB;UACd,GAAG;AACT,cAAa,aAAc,aAAY,eAAe,GAAI,MAAM,8CAA8C,YAAY,oBAAoB,gDAAgD,GAAG;AACjM,sBAAgB,cAAW,SAAQ,aAAa,IAAI,WAAW;AAC/D,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,WAAW;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,eAAc;;AAExE,0BAAgB,IAAG,CAAE;AC3EjC;;;;;;;;;;;;;;;;AAqEA,qEAAgE,oBAAoB,CAAC,GAAG;AACpF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,uDAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG;;AAEpB,gBAAY,gCAAyC,SAAS,YAAY,MAAM,gFAC1D,0BAA0B;AAChD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH,qBAAY,SAAQ,gBAAgB,KAAK,SAAS;AAClD,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,wBAAuB;AAC1F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,6BAAmB,IAAG,CAAE;AC5GpC;;;;;;;;;;;;;;;;AA0CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,SAAQ,IAAI,CAAC,GAAG;AAC7B,uBAAe,SAAQ,KAAK;AAC5B,yBAAiB,CAAC,GAAG,EAAE,OAAO,GAAG,EAAE;AACnC,eAAO,SAAQ,QAAQ;;AAE3B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACrDzB;;;;;;;;;;;;;;;;AAqDA,+DAA0D,CAAC,GAAG,iBAAiB;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM,gEAC3C,GAAG;AACV,cAAY,QAAQ,SAAS,GAAG,MAAM,4DAC/B,QAAQ;AACf,cAAY,eAAe,QAAQ,MAAM,gFACZ;AAC7B,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACzD,uBAAe;;AAEnB,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,KAAA,MAAK;AAC9B,kBAAY,QAAO,UAAU,YAAY,QAAQ;AACjD,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,uBAAc,IAAG,CAAE;AC5E/B;;;;;;;;;;;;;;;;AAyBO;AACH,qBAAe,QAAQ;AACvB,mBAAa;AACb,mBAAa,GAAG,IAAI,QAAQ;AACxB,oBAAY,SAAS,IAAI;AACzB,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,YAAI,IAAI,KAAK,MAAM;AACf,eAAK,QAAQ;;;AAGrB,aAAO;;AAMJ;AACH,qBAAe;AACf,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,sBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,wBAAgB,SAAS,SAAS,IAAI;AACtC,uBAAe,SAAS;AACxB,YAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC1C,iBAAO,QAAQ;;;AAGvB,aAAO;;AAEJ;AACH,qBAAe;AACf,gBAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AACzC,mBAAa,GAAG,IAAI,GAAG;AACnB,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,YAAI,MAAM;AACN,iBAAO,QAAQ;mBAEV,MAAM;AACX,iBAAO,QAAQ;mBAEV,MAAM;AACX,yBAAe,wDACR,cAAc;AACrB,gBAAM,MAAM;;AAGZ,iBAAO,QAAQ;;;AAGvB,aAAO;;ACjFX;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,MAAM,IAAI;AAC7C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM;;AAE3C,kBAAS,IAAG,CAAE;AC9C1B;;;;;;;;;;;;;;;;AA8CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAmB,iBAAgB,WAAW,aAAa,SAAS;AAIpE,6BAAuB,4BAA2B,GAAG,OAAO,GAAG;AAC/D,4BAAsB,YAAY,IAAI;AACtC,4BAAsB,YAAY,IAAI;AACtC,UAAI,WAAW,SAAS;AAGpB,gBAAO,WAAW,MAAM,OAAO,GAAG,MAAM,IAAI,MAAM;;AAEtD,UAAI,WAAW,SAAS;AAEpB,2BAAkB,WAAW,OAAO,cAAc,OAAO;;AAE7D,sBAAgB;AACZ,oBAAY,SAAQ,OAAO,YAAY,eAAe;AACtD,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe;QACX,WAAW;QACX,GAAG;QACH,GAAG;;AAEP,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;AC7E1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,KAAK,QAAQ,MAAiB;;AAE/E,uBAAa,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AAqDA;AAEI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,wBAAkB,IAAI,IAAI;AAC1B,qBAAc,WAAU;AACxB,0BAAoB,MAAM,IAAI;AAC9B,aAAO,MAAM,aAAa,QAAO;;AAEzB,qBAAY,IAAG,CAAE;AC/D7B;;;;;;;;;;;;;;;;AAsCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAa,KAAI,SAAS,KAAK,IAAI,SAAS,MAAO,KAAI,SAAS,KAAK,IAAI,SAAS,IAAI,MAAM,+DACrF,IAAI,YAAY,IAAI;AAC3B,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,cAAY,YAAY,SAAS,MAAM,gEAChC,eAAe;AACtB,UAAI,IAAI,SAAS,KAAK,IAAI,SAAS;AAC/B,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM;iBAEhB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM,CAAC,KAAK;iBAEtB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,KAAK;AACzB,eAAO,SAAQ,MAAM,CAAC,KAAK;;AAG3B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,KAAK;AACzB,eAAO;;;AAGH,iBAAO,IAAG,CAAE;ACtExB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,kBAAU,SAAQ,IAAI;AACtB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAO,IAAG,CAAE,MAAA;AC1CxB;;;;;;;;;;;;;;;;AAmCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,cAAY,GAAG,UAAU,WAAW,GAAG,UAAU,WAAW,MAAM;AAClE,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AChDxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAoCA,mCAA+B;AAC3B,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,cAAY,QAAQ,GAAG,MAAM,MAAM;AACnC,uBAAiB,GAAG,MAAM;AAC1B,UAAI,OAAO;AAEP,gBAAY,CAAE,IAAG,OAAO,MAAM,MAAM,MAAM,iCAAiC,CAAE,IAAG,OAAO,OAAO,GAAG;AACjG,eAAO,GAAG,OAAO,OAAO;;AAE5B,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,SAAQ,IAAI;;AAEX,uBAAc,IAAG,CAAE;ACjD/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA8CA;AACI,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,QAAQ;AAC3C,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK,IAAI;AAC7B,aAAK,CAAC;AACN,eAAO;;AAEX,2BAAqB,CAAC;AACtB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,OAAM,OAAO;;AAEnE,kBAAQ,IAAG,CAAE;AC7DzB;;;;;;;;;;;;;;;;AAmCA,2DAAuD;AACnD,UAAI,cAAc;AACd,qBAAa;;AAEjB,mBAAa,QAAO,CAAC,SAAS,aAAa;AAC3C,gBAAU,WAAW,aAAa,UAAU;AAC5C,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,aAAK,IAAI,GAAG,GAAG;;AAEnB,kBAAY,SAAQ,KAAK,YAAY,CAAC,SAAS;AAC/C,UAAI,cAAc;AACd,eAAO;;AAGP,YAAI,WAAW,WAAW;AACtB,iBAAO,MAAK,WAAW,KAAK,IAAI,CAAC,WAAW,IAAI,GAAG;mBAE9C,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,KAAK,IAAI,IAAI,CAAC,WAAW,IAAI,WAAW,IAAI,GAAG;mBAE5E,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,WAAW,KAAK,IAAI,IAAI,IAAI;YAC1D,WAAW;YAAI,WAAW;YAAI,WAAW;YAAI;YAAG;;;AAIpD,gBAAM,IAAI,MAAM,qEAEiB,WAAW;;;;AAI5C,gBAAO,IAAG,CAAE;ACrExB;;;;;;;;;;;;;;;;AAgCA;AACI,oBAAc,CAAE,OAAO,OAAO;AAC9B,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,OAAO,OAAO,QAAQ,IAAI,MAAM,OAAM;;AClC9F;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAqBO,mCAA8B;AAC9B;AACH,UAAI,UAAU;AACV,eAAO;;AAEX,aAAO,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AC1BvD;;;;;;;;;;;;;;;;AAkBO;AACH,iBAAW;AACX;AACA,UAAI,UAAU;AACV,cAAM;AACN,eAAO;;AAGP,cAAM,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAEtD,aAAO,CAAC;AACJ,YAAI,MAAM,eAAe,QAAQ;AAC7B,iBAAO;;AAGP,gBAAM,gBAAe,QAAQ,MAAM;;;AAG3C,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,QAAQ;AACR,mBAAS,KAAK,OAAO;;AAGrB,mBAAS,KAAK;;;AAGtB,aAAO;;AAEJ;AACH,sBAAgB,EAAE,MAAM;AACxB,0BAAoB;AACpB,sBAAgB;AAChB,sBAAgB;AAChB,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,mBAAa,GAAG,IAAI,QAAQ,MAAM;AAC9B,oBAAY,KAAK,QAAQ,MAAM;;AAEnC,mBAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AAC/B,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,aAAO,CAAE,WAAW,WAAW,SAAS;;;;;;;;ACnE5C;;;;;;;;;;;;;;;;AA6CA,wCAAoC;AAChC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,qBAAe,CAAE,GAAG,IAAI,SAAS;AACjC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,2BAAmB,gBAAe,MAAM,GAAG,OAAO;AAClD,0BAAkB,0BAAyB,IAAI,UAAU;AACzD,oBAAY,SAAQ,OAAO,IAAI,SAAQ,UAAU,CAAC,SAAS,QAAQ;AACnE,aAAK,CAAC,IAAI;AACV,eAAO,SAAQ,KAAK,UAAU;;AAElC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAU;;AAEhE,mBAAU,IAAG,CAAE;AC3D3B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,QAAQ,IAAI;AAC/C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,aAAa,IAAI;AACrC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,yBAAgB,IAAG,CAAE;AClDjC;;;;;;;;;;;;;;;;AAkCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC1CzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,SAAS,KAAK,QAAQ,MAAiB;;AAEhF,uBAAY,IAAG,CAAE;ACrC7B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,oBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AAiCO;AACH,UAAM,eAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,UAAU,YAAY,cAAa,UACnC,CAAE,kBAAiB;AACnB,cAAM,IAAI,MAAM;;AAGpB,oBAAc;AACd,4BAAsB;AACtB,aAAO,YAAW,OAAO,OAAO,eAAe;;AC9CnD;;;;;;;;;;;;;;;;AAsCA,mCAA+B;AAC3B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,QAAQ,KAAI,QAAO,QAAQ,KAAK;;AAE/B,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,KAAK,IAAI;AAC5C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC7CzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,UAAU,IAAI;AAClC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAa,IAAG,CAAE;AClD9B;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM;;AAEpB,oBAAc,CAAE,OAAO,MAAM;AAC7B,aAAO,QAAO,cAAc,cAAW,SAAQ,SAAS,OAAO,MAAM,MAAM,IAAiB,MAAiB,UAAU;;ACnC3H;;;;;;;;;;;;;;;;AAqCA,0DAAsD,UAAU,WAAW,UAAU;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM;sBAChC,GAAG;AACrB,cAAY,OAAW,cAAc,MAAM,2FACR;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,sBAAgB;AACZ,kBAAU,SAAQ,6BAA6B,KAAK,aAAa,MAAM,OAAO;AAC9E,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;AACxE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAG3D,eAAO;;;AAGH,uCAA8B,IAAG,CAAE;AChE/C;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAsDA;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AAEH,mBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,aAAa;AAClE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,KAAK,CAAC,KAAK;AAC7D,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO,OAAM;;;;AAiCzB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,OAAO,MAAM;AAGvC,sBAAc,qBAAqB,MAAM,QAAQ,YAAY;AAC7D,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,cAAc;AACnE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,GAAG,QAAQ,OAAO;AACpE,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO;;;;AA6BnB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,aAAa,SAAQ,MAAM;AACvC,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,eAAQ,eAAO,SAAU,QAAO,UAAU,MAAM,EAAE,IAAI,CAAC,IAAI;AAC3D,mBAAW;AACX,eAAO,CAAE,MAAM,OAAM,IAAI;;;AAkCjC;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,SAAS,KAAK,MAAM,SAAO,eAAe,UAAS,MAAM;AAEnF,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,oBAAY,QAAO,UAAU,MAAM,EAAE,GAAG,OAAO,MAAM;AACrD,YAAI,MAAM;AACN,6BAAuB,IAAI,MAAM,OAAO,GAAG,OAAO;;AAGtD,mBAAW,IAAI;AACf,eAAO;;;AAiCf;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,cAAY,WAAW,QACnB,MAAM,QAAQ,YAAY,QAAQ,MAAM,OAAK,aAAa,YAAW,MAAM;AAE/E,+BAAyB,WAAW;AACpC,UAAI,CAAC;AAED,kBAAU;AACV,8BAAsB,QAAO;AACzB,kBAAQ,KAAK,QAAO,oBAAoB;;;AAGhD,oCAA8B,mBAAmB,QAAQ,OAAO,eAAY,CAAC,UAAS,aAAa;AAEnG,+BAAyB,QAAQ;AACjC,gBAAU,QAAQ,OAAO,eAAY,UAAS;AAC9C,cAAY,QAAQ,SAAS,GAAG,MAAM,gGACD;AAErC,+BAAyB;AACzB,aAAQ,OAAO,iBAAU,QAAO,UAAU,GAAG,SAAS,MAAM;AAC5D,cAAY,OAAM,KAAK,OAAK,KAAK,OAAO,MAAM;AAG9C,cAAY,MAAM,SAAS,GAAG,MAAM,iFACb,MAAM;AAC7B,yBAAmB;AACnB,cAAQ,QAAQ;AACZ,YAAI,OAAM,MAAM;AACZ,qBAAW,EAAE,QAAQ,OAAM;;;AAGnC,UAAI,yBAAyB;AAGzB,8BAAsB,QAAQ,OAAK,WAAW,EAAE,QAAQ;;AAE5D,aAAO,CAAE,OAAO,OAAO;;AA0C3B;AACI,aAAO,QAAO,WAAW;;AAE7B;AACI,+BAAyB,OAAM,OAAO,OAAK,KAAK,MAAM;AACtD,UAAI,mBAAmB;AACnB,cAAM,IAAI,MAAM;;;;AC5TxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,IAAI,KAAK,QAAQ,MAAiB;;AAEzE,gBAAO,IAAG,CAAE;ACtCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,SAAS;AAC7B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAY,IAAG,CAAE;ACzC7B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAInC,uBAAiB,WAAW;AAIxB,sBAAc,IAAI,SAAS,IAAI;AAC/B,yBAAiB;AACb,uBAAa,KAAI,IAAI,SAAQ,IAAI;AACjC,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,uBAAc,IAAG,CAAE;ACtD/B;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAoB,MAAM,GAAG;AAC9C,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,SAAS;;AAE5D,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO,gBAAoB,MAAM,GAAG;AAC7F,gBAAM,SAAQ,KAAK;AACnB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,kBAAkB,MAAM;AACxC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjFxB;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,gBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AAuDA,6BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,aAAK,CAAC;AACN,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,IAAI,WAAW;AACnC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAK;;AAE3D,kBAAO,IAAG,CAAE,MAAA;ACjFxB;;;;;;;;;;;;;;;;AA+CA,wCAAoC;AAChC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,UAAI,SAAS;AACT,eAAO,QAAQ,OAAO;;AAE1B,UAAI,SAAS,QAAQ,OAAO;AACxB,cAAM,MAAM,gFACW,QAAQ,qBAAqB;;AAExD,sBAAgB;AACZ,yBAAiB;AACjB,qBAAa,KAAI,QAAQ,MAAM;AAC/B,wBAAgB,IAAI,QAAQ;AAC5B,sBAAc,IAAI,MAAK,SAAS,YAAY,MAAI,MAAI,IAAI,UAAU,MAAM;AACxE,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAY;;AAElE,uBAAc,IAAG,CAAE;ACpE/B;;;;;;;;;;;;;;;;AAwDA,kCAA8B,iBAAiB;AAC3C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,mBAAa,KAAI,IAAI,MAAM;AAC3B,gBAAU,IAAI,IAAI;AAClB,gBAAU,IAAI;AACd,gBAAU,MAAI,GAAG;AACjB,gBAAU,MAAI;AACd,kBAAY,MAAI,SAAQ,MAAM,EAAE,QAAQ;AACxC,UAAI;AACA,yBAAiB,sBAAqB,IAAI,OAAO;AACjD,eAAO,SAAQ,KAAK;;AAExB,aAAO;;AAEC,sBAAa,IAAG,CAAE;ACvE9B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,IAAI,KAAK,QAAQ,MAAiB;;AAEpF,uBAAc,IAAG,CAAE;AC3C/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,KAAK,QAAQ,MAAiB;;AAEhF,uBAAc,IAAG,CAAE;ACtC/B;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,KAAK,QAAQ,MAAiB;;AAEnF,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAqCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AAExC,aAAO,WAAW,UAAU,GAAG,IAAI,WAAW,WAAW,GAAG;;AAEpD,uBAAc,IAAG,CAAE;AC5C/B;;;;;;;;;;;;;;;;AAiDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,wBAAkB;AAClB,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG;AACA,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAI,IAAI;;AAGR,cAAI,SAAQ,QAAQ,KAAK;;AAE7B,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC5E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;ACtF5B;;;;;;;;;;;;;;;;AAoEA,wCAAoC,CAAC,GAAG,GAAG,iDAAgD;AACvF,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,kBAAU,SAAQ,UAAU,KAAK;AACjC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC9E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC7G9B;;;;;;;;;;;;;;;;AAqDA,oFAA+E;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,qBAAe,QAAO,UAAU,mBAAmB,QAAQ;AAC3D,aAAO,CAAE,QAAQ,OAAO,IAAI,SAAS,OAAO;;AAEpC,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAgCO,mCAA8B;AACjC,UAAI,UAAU;AACV,sBAAa,OAAM,OAAO;AAC1B,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,qBAAoB,eAAc,QAAQ;AACzD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACvC5C;;;;;;;;;;;;;;;;AAiCO,mCAA6B;AAChC,UAAI,UAAU;AACV,sBAAa,OAAK,OAAO;AACzB,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxC5C;;;;;;;;;;;;;;;;AA2DA,6BAAyB,iBAAiB;AACtC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,qBAAe,2BAA0B,GAAG,OAAO;AACnD,0BAAoB,OAAO;AAC3B,yBAAmB,eAAc;AACjC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,sBAAgB;AACZ,iCAAyB,QAAO;AAEhC,wBAAgB,iBAAiB,UAAU,GAAG,QAC1C,KACA,MAAK,IAAI,iBAAiB;AAC9B,oBAAY,IAAI,SAAS;AACzB,eAAO,MAAI,KAAK,MAAM;;AAI1B,uBAAiB,WAAW;AACxB,sBAAc,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;AAC3E,yBAAiB;AACb,kCAAwB,GAAE,MAAM;AAChC,eAAK,QAAQ;AACT,4BAAgB,SAAQ;;AAE5B,6BAAmB,SAAQ,IAAI;AAC/B,uBAAa,IAAI,KAAI,YAAY,OAAK,GAAE,OAAO,aAAa;AAC5D,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,iBAAQ,IAAG,CAAE;ACxDzB,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO;AAChE,gBAAM,SAAQ,GAAG;AACjB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjExB;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AA8CA;AACI,cAAY,SAAS,aAAa,SAAS,aAAa,MAAM,+DACnD;AACX,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAGpB,cAAY,SAAS,WAAW,GAAG,MAAM,MAAM,wCAAwC,GAAG,aAC/E,SAAS;AACpB,0BAAoB,SAAS,YAAY,IAAI;AAC7C,mBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,gBAAY,SAAS,GAAG,WAAW,GAAG,MAAM;AAC5C,gBAAY,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,eAC/D,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,aAAa,MAAM,wBAAwB,wCAC5F,GAAG,MAAM,KAAK,iDACX,GAAG;;AAEpB,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,UAAU,WAAW,QAAQ;;AAEnC,sBAAa,IAAG,CAAE;ACpE9B;;;;;;;;;;;;;;;;AAiDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc;AACd,2BAAqB,CAAC;AACtB,4BAAsB;AACtB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,eAAO,SAAQ,OAAO;SACvB,CAAE,GAAG,KAAM,MAAiB,UAAU,OAAO,cAAc;;AAEtD,mBAAU,IAAG,CAAE;ACzC3B;;;;;;;;;;;;;;;;AAuCA,gCAA4B,iBAAiB;AACzC,UAAI,iBAAgB,GAAG,KAAK;AAC5B,mBAAa,gBAAe,MAAM,EAAE;AACpC,oBAAc,KAAK,GAAG,MAAM;AAC5B,0BAAoB,MAAM;AAC1B,UAAI,CAAC;AACD,wBAAgB,sBAAqB,MAAM,OAAO;;AAEtD,yBAAmB,OAAO,IAAI,MAAK,GAAG,YAAY,SAAQ,OAAO;AACjE,wBAAiB,KAAK,YAAY,MAAM;AACxC,aAAO,CAAE,MAAM,OAAO,UAAA;;AAEd,oBAAW,IAAG,CAAE;ACjC5B;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,qBAAqB,GAAG,KAAK;AACxC,iBAAW,qBAAqB,GAAG,KAAK;AACxC,mBAAY;AACZ,wBAAkB;AAClB,mBAAa,GAAG,IAAI,UAAU,QAAQ;AAClC,uBAAe,UAAU,GAAG,QAAO,GAAG,IAAI,GAAG;AAC7C,kBAAU,KAAK,OAAO;AACtB,kBAAU,KAAK,OAAO;AACtB,iBAAQ,OAAO;;AAEnB,mBAAa;AACb,mBAAa;AACb,mBAAa,GAAG,IAAI,UAAU,QAAQ,KAAK;AACvC,aAAK,KAAK,UAAU;AACpB,aAAK,KAAK,UAAU,IAAI;;AAE5B,aAAO,CAAC,MAAM;;AAEN,yBAAgB,IAAG,CAAE;ACtCjC;;;;;;;;;;;;;;;;AAwCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,0BAAoB,QAAQ;AAC5B,uBAAiB,QAAQ;AACzB,UAAI,cAAc;AACd,cAAM,IAAI,MAAM,+DACT;;AAEX,UAAI,WAAW;AACX,cAAM,IAAI,MAAM,gDAAgD;;AAEpE,aAAO,QAAQ,KAAK;AACpB,uBAAiB,aAAa,IAAI,SAAQ,SAAS,CAAC,GAAG,OAAO;AAC9D,kBAAY,QAAO,cAAc,cAAW,SAAQ,YAAY,UAAU,YAAY,YAAY,OAAO,CAAE;AAE3G,aAAO,aAAa,IAAI,SAAQ,KAAK,CAAC,IAAI,SAAS;;AAE3C,wBAAe,IAAG,CAAE;ACzDhC;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAa,SAAQ,SAAS,IAAI;AAClD,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,qBAAY,IAAG,CAAE;AC7C7B;;;;;;;;;;;;;;;;AAmCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC3CzB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,YAAI,GAAG,UAAU;AACb,oBAAU,UAAS,KAAK;AACxB,oBAAU,WAAU,KAAK;AACzB,iBAAO,SAAQ,GAAG;;AAEtB,eAAO,SAAQ,SAAS;;AAE5B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAY,IAAG,CAAE;AC9B7B;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAY,IAAI,SAAS,KAAK,IAAI,SAAS,GAAG,MAAM,+DAC7C,IAAI,YAAY,IAAI;AAC3B,mBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,mBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,aAAO,OAAO,MAAM;;AAEZ,yBAAgB,IAAG,CAAE;AC5BjC;;;;;;;;;;;;;;;;AA6CA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,IAAI,IAAI,UAAU;;AAErC,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,iBAAO,IAAG,CAAE;ACpDxB,iDAA6C;AACzC,cAAO,SAAS,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,CAAC,WAAW;;AAElB,kBAAS,IAAG,CAAE;ACJ1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,GAAG,MAAM;AAChE,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACZ1B;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,cAAc,GAAG,sCAAsC,WAAW;AACtH,cAAY,SAAS,WAAW,WAAW,QAAQ,MAAM,qBAAqB,SAAS,wCAAwC,WAAW;AAC1I,cAAY,GAAG,MAAM,OAAO;AACxB,YAAI,IAAI,KAAK,KAAK,WAAW;AACzB,iBAAO,KACD,KAAI,SAAS,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,MACvC,WAAW,IAAI,OACf;;AAEZ,eAAO;SACR,OAAO,MAAM,4BAA4B,GAAG,MAAM,MAAM,oBAAoB,SAAS,+CAA+C,WAAW;AAClJ,sBAAgB,cAAW,SAAQ,eAAe,IAAI,YAAY;AAClE,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;ACvFnC;;;;;;;;;;;;;;;;AAmDA;AACI,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,SAAQ;AACR,eAAM;;AAEV,iBAAW,iBAAgB,QAAO,KAAK;AACvC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,gCAAyC,SAAS,YAAY,MAAM,qEAC7D,0BAA0B;AAC7C,uBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,uBAAiB,CAAC,SAAS,gBAAgB,SAAS;AAKpD;AACA,UAAI,SAAQ;AACR,sBAAc,6BAA6B,CAAC,SAAS,cAAc,SAAS,cAAc;;AAG1F,sBAAc,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE/B,4BAAsB,SAAS,OAAO,KAAK,SAAS,OAAO;AAC3D,+CAAyC,6BAA6B,CAAC,SAAS,UAAU,SAAS,UAAU,UAAU;AACvH,2BAAqB,gBAAgB,OAAM;AAC3C,yBAAmB,gBAAgB,MAAM,eAAe,KAAK,UAAU;AACvE,wBAAkB,gBAAgB,QAC9B,MAAM,SAAQ,YAAY,aAAa,SAAS,gBAChD,MAAM,SAAQ,YAAY,aAAa,SAAS;AACpD,gBAAU;AACV,kBAAY,gBAAgB,IAAI,eAAe,GAAG,UAAU;AAC5D,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAKX;AACI,uBAAiB,YAAY,IAAI,OAAK,EAAE;AACxC,yBAAmB,YAAY,IAAI,OAAK,EAAE;AAC1C,6BAAuB,WAAW,OAAO,UAAU;AACnD,0BAAoB,WAAW,IAAI,UAAW,KAAI,eAAe,KAAK,KAAK;AAC3E,qBAAe,WAAW,IAAI,UAAU,IAAI,YAAY;AACxD,uBAAiB,WAAW,IAAI,UAAU,CAAC,SAAS,IAAI,OAAO;AAC/D,oBAAc,WAAW,IAAI,UAAU,CAAC,GAAG,YAAY;AACvD,aAAO,CAAC,UAAU;;AAKtB;AAGI,iCAA2B,YAAY,IAAI;AACvC,eAAO,IAAK,KAAI,KAAM,UAAS,KAAK;;AAExC,4BAAsB,mBAAmB,IAAI,OAAK,IAAI;AAGtD,4BAAsB,cAAc,IAAI,OAAK,KAAK,MAAM,IAAI;AAC5D,0BAAoB,cAAc,IAAI,UAAU,IAAI,cAAc;AAClE,aAAO,cAAc,IAAI;AACrB,eAAO,CAAC,cAAc,IAAI,YAAY;;;AAGlC,iBAAQ,IAAG,CAAE;AChIzB;;;;;;;;;;;;;;;;AAiDA;AACI,kBAAY,iBAAgB,OAAM,QAAQ;AAC1C,iBAAW,iBAAgB,MAAK,OAAO;AACvC,OAAC,OAAO,QAAQ,gBAAe,OAAO;AACtC,qBAAe,CAAE,GAAG,OAAO,GAAG;AAC9B,sBAAgB;AACZ,kBAAU,SAAQ,IAAI,OAAO;AAC7B,aAAK,CAAC,OAAO,MAAM;AACnB,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,OAAO;AAC/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE,QAAA;AC/C1B;;;;;;;;;;;;;;;;AAuDA,6BAAyB,iBAAiB;AACtC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AAEb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,KAAK,WAAW;AACpC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;;AAE5D,iBAAQ,IAAG,CAAE;ACjFzB;;;;;;;;;;;;;;;;AA4BA;AACI,mBAAa,eAAc;AAC3B,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,mBAAa,GAAG,IAAI,MAAM;AACtB,eAAO,KAAK;;AAEhB,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAEhC,iBAAQ,IAAG,CAAE;;;;;;;;;;;;;;;ACrBxB,MAAA;AAED;AACE,mBAAS,aAAa;AAEtB,aAAG,OAAO;AACR,oBAAQ,UAAU,GAAG,KAAK,GAAG,IAAI;AACjC,eAAG,KAAK,GAAG;AACX,eAAG,KAAK,GAAG;AACX,mBAAO,GAAG,KAAK,IAAK,IAAG,IAAI,IAAI;;AAIjC,aAAG,IAAI;AACP,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,iBAAO;;AAGT;AACE,YAAE,IAAI,EAAE;AACR,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,iBAAO;;AAGT;AACE,mBAAS,IAAI,KAAK,eACN,QAAQ,KAAK,cACd,GAAG;AACd,eAAK,QAAQ;AAAa,mBAAQ,GAAG,SAAS,aAAe;;AAC7D,eAAK,SAAS;AACZ,mBAAO,SAAU,UAAS,UAAW,KAAK;;AAE5C,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT;AACE,kBAAQ;AAER,qBAAW;AACT,oBAAO,MAAK;AACZ,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,mBAAK,MAAK,WAAW;AACrB,sBAAQ,sBAAsB;AAC9B,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK;AACL,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK,IAAI;;AAEX,mBAAQ,OAAM,KAAK;;AAGrB,iBAAO;;AAIT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,OAAO;;SAIZ,gBAC+B,SAC9B;;;AC3GF,MAAA;AAED;AACE,mBAAS,gBAAgB;AAEzB,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAGP,aAAG,OAAO;AACR,oBAAQ,GAAG,IAAK,GAAG,KAAK;AACxB,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,mBAAO,GAAG,KAAM,GAAG,MAAM,KAAM,IAAK,MAAM;;AAG5C,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC1EF,MAAA;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAS,GAAG,IAAK,GAAG,MAAM;AAC1B,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AACjD,mBAAQ,IAAG,IAAK,GAAG,IAAI,SAAS,KAC5B,IAAG,IAAK,GAAG,IAAK,GAAG,KAAK,IAAO,KAAK,KAAK,MAAO;;AAGtD,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAEP,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,gBAAI,KAAK,QAAQ;AACf,iBAAG,IAAI,GAAG,KAAK,KAAK,GAAG,MAAM;;AAE/B,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC7EF,MAAA;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AAER,oBAAQ,GAAG,OAAO,GAAG;AACrB,gBAAI,EAAE;AAAI,iBAAM,MAAM;AAAI,gBAAI,IAAK,KAAK;AACxC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,KAAK;AACnC,gBAAI,EAAG,IAAI,IAAK;AAAI,gBAAI,IAAK,KAAK;AAAK,iBAAK,IAAK,KAAK;AACtD,cAAE,KAAK;AACP,eAAG,IAAK,IAAI,IAAK;AACjB,mBAAO;;AAGT;AACE,0BAAc;AAEd,gBAAI,UAAU,SAAO;AAEnB,kBAAI,EAAE,KAAK;;AAGX,sBAAO,KAAK;AACZ,mBAAK,IAAI,GAAG,IAAI,MAAK,QAAQ,EAAE;AAC7B,kBAAE,IAAI,KAAM,EAAE,IAAI,MAAM,KACnB,MAAK,WAAW,KAAK,EAAG,IAAI,IAAK,MAAM;;;AAIhD,mBAAO,EAAE,SAAS;AAAG,gBAAE,KAAK;AAC5B,iBAAK,IAAI,GAAG,IAAI,KAAK,EAAE,OAAO,GAAG,EAAE;AAAE;AACrC,gBAAI,KAAK;AAAG,kBAAI,EAAE,KAAK;;AAAS,kBAAI,EAAE;AAEtC,gBAAG,IAAI;AACP,gBAAG,IAAI;AAGP,iBAAK,IAAI,KAAK,IAAI,GAAG,EAAE;AACrB,kBAAG;;;AAIP,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE,EAAE;AACV,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,YAAY;;SAIjB,gBAC+B,SAC9B;;;ACrEF,MAAA;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AACR,oBAAQ,GAAG,OACH,GAAG,OAAO,GAAG;AAErB,eAAG,IAAI,IAAK,IAAI,aAAc;AAE9B,gBAAI,EAAG,IAAI,KAAM;AACjB,gBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,iBAAK,KAAK;AACV,iBAAK,KAAK;AACV,iBAAK,MAAM;AACX,iBAAK,MAAM;AAEX,gBAAI,EAAE,KAAK,IAAI;AACf,eAAG,IAAI;AAEP,mBAAQ,IAAK,KAAK,MAAM,MAAQ;;AAGlC;AACE,mCAAuB,YAAY;AACnC,gBAAI,UAAU,SAAO;AAEnB,kBAAI;AACJ,sBAAO;;AAGP,sBAAO,QAAO;AACd,kBAAI;AACJ,sBAAQ,KAAK,IAAI,OAAO,MAAK;;AAG/B,iBAAK,IAAI,GAAG,IAAI,KAAK,IAAI,OAAO,EAAE;AAEhC,kBAAI;AAAM,qBAAK,MAAK,WAAY,KAAI,MAAM,MAAK;AAE/C,kBAAI,MAAM;AAAG,oBAAI;AACjB,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,kBAAI,KAAK;AACP,oBAAK,IAAI,aAAc;AACvB,oBAAK,EAAE,IAAI,QAAS,IAAI;AACxB,oBAAU,KAAL,IAAU,IAAI,IAAI;;;AAI3B,gBAAI,KAAK;AACP,gBAAG,UAAQ,MAAK,UAAU,KAAK,OAAO;;AAKxC,gBAAI;AACJ,iBAAK,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE;AACzB,kBAAI,EAAG,IAAI,KAAM;AACjB,kBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,mBAAK,KAAK;AACV,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,MAAM;AACX,gBAAE,KAAK,IAAI;;AAGb,gBAAG,IAAI;AACP,gBAAG,IAAI;AACP,gBAAG,IAAI;;AAGT,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAED;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,UAAU;;SAIf,gBAC+B,SAC9B;;;AC5IF,MAAA;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAQ,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,GAAG;AACzC,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,eAAG,IAAI,IAAK,KAAK,KAAO,MAAM,KAAM;AACpC,eAAG,IAAI,IAAK,IAAI,IAAK;AACrB,eAAG,IAAK,KAAK,KAAO,MAAM,KAAM;AAChC,mBAAO,GAAG,IAAK,IAAI,IAAK;;AAmB1B,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI,aAAa;AACpB,aAAG,IAAI;AAEP,cAAI,SAAS,KAAK,MAAM;AAEtB,eAAG,IAAK,OAAO,aAAe;AAC9B,eAAG,IAAI,OAAO;;AAGd,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AACR;AAED;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC3EF,MAAA;AAID,sBAAa,cACD,cACC,YACA,cACC,uBACG,MAAK,IAAI,OAAO,wBACd,MAAK,IAAI,GAAG,oBAChB,eAAe,UACnB,QAAQ;AAOnB;AACE,oBAAU;AACV,oBAAW,WAAW,OAAQ,CAAE,SAAS,QAAU,WAAW;AAG9D,0BAAgB,OAAO,SACrB,QAAQ,UAAU,CAAC,MAAM,SAAS,UACjC,QAAQ,OAAQ,aAAa,MAAM,IAAI;AAG1C,qBAAW,IAAI,KAAK;AAIpB,qBAAW;AACT,oBAAQ,KAAK,EAAE,aACP,gBACA;AACR,mBAAO,IAAI;AACT,kBAAK,KAAI,KAAK;AACd,mBAAK;AACL,kBAAI,KAAK,EAAE;;AAEb,mBAAO,KAAK;AACV,mBAAK;AACL,mBAAK;AACL,qBAAO;;AAET,mBAAQ,KAAI,KAAK;;AAGnB,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,SAAS;AAGd,iBAAO,SAAS,KAAK,IAAI;AAGzB,iBAAQ,SAAQ,QAAQ,YACpB;AACE,gBAAI;AAEF,kBAAI,MAAM;AAAK,qBAAK,OAAO;;AAE3B,oBAAK,QAAQ;AAAa,uBAAO,KAAK,MAAM;;;AAK9C,gBAAI;AAAgB,oBAAK,WAAW;AAAM,qBAAO;;AAI5C,qBAAO;aAElB,MACA,WACA,YAAY,UAAU,QAAQ,SAAU,QAAQ,OAChD,QAAQ;;AAEV,cAAK,SAAS,WAAW;AAYzB;AACE,0BAAgB,IAAI,aACX,UAAU,OAAO,GAAG,IAAI,GAAG,IAAI,OAAO,GAAG,IAAI;AAGtD,cAAI,CAAC;AAAU,kBAAM,CAAC;;AAGtB,iBAAO,IAAI;AACT,cAAE,KAAK;;AAET,eAAK,IAAI,GAAG,IAAI,OAAO;AACrB,cAAE,KAAK,EAAE,IAAI,OAAQ,IAAI,IAAI,IAAI,UAAW,KAAI,EAAE;AAClD,cAAE,KAAK;;AAIR,UAAA,IAAG,IAAI;AAEN,wBAAW,QACH,GAAG,QAAO,GAAG,QAAO,GAAG;AAC/B,mBAAO;AACL,mBAAI,GAAE,KAAI,OAAQ,KAAI;AACtB,kBAAI,IAAI,QAAQ,GAAE,OAAS,IAAE,MAAK,GAAE,KAAI,OAAQ,KAAI,OAAQ,IAAE,MAAK;;AAErE,eAAG,IAAI;AAAG,eAAG,IAAI;AACjB,mBAAO;aAIN;;AAOL;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAMD;AACE,uBAAa,UAAW,OAAO;AAC/B,cAAI,SAAS,OAAO;AAClB,iBAAK,QAAQ;AACX;AAAM,uBAAO,KAAK,SAAQ,IAAI,OAAO,QAAQ;;;;;AAGjD,iBAAQ,OAAO,SAAS,SAAS,OAAO,WAAW,MAAM,MAAM;;AAQjE;AACE,2BAAiB,OAAO,eAAe;AACvC,iBAAO,IAAI,WAAW;AACpB,gBAAI,OAAO,KACT,OAAS,UAAS,IAAI,OAAO,KAAK,MAAM,WAAW,WAAW;;AAElE,iBAAO,SAAS;;AAQlB;AACE;AACE;AACA,gBAAI,cAAe,OAAM,WAAW;AAElC,oBAAM,IAAI;;AAEV,oBAAM,IAAI,WAAW;AACpB,cAAA,SAAO,UAAU,QAAO,UAAU,gBAAgB;;AAErD,mBAAO,SAAS;;AAEhB,2BAAc,QAAO,qBACP,YAAW,SAAQ;AACjC,mBAAO,CAAC,CAAC,IAAI,QAAM,SAAQ,SAAS,QAAO,QAAQ,SAAS;;;AAQhE;AACE,iBAAO,OAAO,aAAa,MAAM,GAAG;;AAUtC,eAAO,MAAK,UAAU;AAMtB,YAAmC,QAAO;AACxC,kBAAA,UAAiB;AAEjB;AACE,yBAAa;;;mBAEL;AACV,UAAA,SAAO;AAAa,mBAAO;;;SAK3B,IACA;;ACjMF,eAAG,OAAO;AACV,eAAG,SAAS;AACZ,eAAG,SAAS;AACZ,eAAG,YAAY;AACf,eAAG,UAAU;AACb,eAAG,SAAS;AAEZ,uBAAiB;;AC3DjB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,OAAO;AACZ,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;AACvC,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;;AAE3C,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,SAAS,aAAgB,UAAU;;MAG5C;AACI,YAAI,CAAC,MAAM,KAAK;AACZ,wBAAc,KAAK;AACnB,eAAK,UAAU;AACf,iBAAO;;AAEX;AACA,sBAAc;AACd,eAAO,CAAC;AACJ;AACA;AACI,iBAAK,IAAI,KAAK,WAAW;AACzB,iBAAK,IAAI,KAAK,WAAW;AACzB,gBAAI,KAAK,KAAK,KAAK;mBACd,KAAK,KAAK,MAAM;AACzB,uBAAY,KAAK,KAAK,KAAO,KAAK,IAAI,KAAK;AAC3C,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,cAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,sBAAU;;;AAGlB,YAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,eAAK,UAAU,KAAK,aAAa;;AAErC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU;AACrC,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,SAAS,KAAK,SAAS,SAAS,KAAK;;;;MAMhD;AACI,aAAK,QAAQ;AACb,aAAK,OAAO,IAAI;AAChB,aAAK,QAAQ;AACb,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,QAAQ,aAAgB,UAAU;AACvC,aAAK,QAAQ,IAAI,YAAY,GAAG,GAAG,OAAO,OAAO,KAAK;AACtD,YAAI,QAAQ;AACR,eAAK,IAAI,QAAS,IAAI;;AAGtB,eAAK,IAAI,QAAS,IAAI;;AAE1B,aAAK,IAAI,IAAI,KAAK,KAAK,IAAI,KAAK;;MAGpC;AACI;AACA,eAAO;AACH;AACI,gBAAI,KAAK,MAAM;AACf,gBAAI,IAAK,KAAK,IAAI;mBACb,KAAK;AACd,eAAK,IAAI;AACT,eAAK,IAAI;AACT,eAAK,IAAK,QAAQ,KAAK;AACvB,eAAM,MAAM,KAAO,KAAK,IAAK,KAAI,IAAI,KAAK,IAAI;AAC9C,cAAI,KAAK;AACT,cAAI,IAAI,MAAM,KAAK,IAAI,KAAK;AACxB;;;AAGR,YAAK,IAAI,KAAK,OAAQ,KAAK,IAAI;AAC/B,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,IAAI,KAAK,SAAS,IAAI,KAAK;;AAEzC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,UAAU;AACf,iBAAO;;AAEX,eAAO,KAAK,MAAM;;;;MAItB,mBAAkB,UAAS;AAEvB,aAAK,iBAAiB,MAAO,KAAK,SAAS,QAAQ,KAAK,UAAU;AAClE,aAAK,MAAM;AACX,aAAK,QAAQ,OAAM;AACnB,aAAK,QAAQ;AACb,YAAI,QAAQ;AACR,iBAAO,KAAK;;AAEhB,YAAI,OAAO,SAAS;AAChB,iBAAO,KAAK;;AAEhB,YAAI,CAAC,KAAK,oBAAoB,KAAK,SAAS;AACxC,gBAAM,IAAI,MAAM,0BAA0B,UAAS;;AAEvD,aAAK,SAAS,aAAgB;;MAElC;AACI,YAAI,KAAK;AACL,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,aAAa,KAAK,MAAM,KAAK,QAAQ,KAAK;;;AAGvD;AAEH,gBAAU,OAAO;AACjB,gBAAU,SAAS;AACnB,gBAAU,SAAS;AACnB,iBAAW,IAAI,IAAK,MAAK,IAAI,GAAG,KAAK,OAAO,KAAK,IAAI,IAAI,GAAG;AAG5D,8BAAwB;AACxB,UAAI,KAAK;AACL,cAAM,IAAI,MAAM,2BAA2B;;;AAG5C;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,yBAAmB,OAAK;AACxB,yBAAmB,YAAY,cAAc;AAC7C,yBAAmB,kBAAkB,QAAQ,aAAa,gBAAgB;;AAE9E;AACI,iBAAU;AACV,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,gBAAO,OAAO;;AAElB,aAAO,OAAM,OAAO;;AAExB;AACI,0BAAoB;AACpB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,OAAO,KAAK;AACzB,yBAAiB,OAAO;;AAE5B,aAAO,KAAK,KAAK,gBAAgB,OAAO;;AAE5C;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAI,IAAK,OAAM;;AAErD;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAK,KAAI,KAAM,OAAM,IAAI;;AClN/D;;;;;;;;;;;;;;;;AAmCA,+CAA2C,WAAW;AAClD,UAAI,QAAQ;AACR,eAAO;;AAEX,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,qBAAe,IAAI,UAAU,OAAO,MAAM,OAAO;AACjD,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,wBAAe,IAAG,CAAE;ACpDhC;;;;;;;;;;;;;;;;AAkCA,0CAAqC,YAAY;AAC7C,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,OAAuB;AAC9E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,yBAAgB,IAAG,CAAE;AC7CjC;;;;;;;;;;;;;;;;AAuCA,4CAAwC,YAAY,WAAW;AAC3D,kBAAY,QAAO,OAAO;AAC1B,qBAAe,IAAI,cAAc,QAAQ,QAAQ,MAAM;AACvD,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,0BAAiB,IAAG,CAAE;AC/ClC;;;;;;;;;;;;;;;;AAmCO;AACH,qBAAc;AACd,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW;AACzB,cAAM,IAAI,MAAM;;AAEpB,oBAAc;AACd,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1CpD;;;;;;;;;;;;;;;;AAwCO,wCAAmC,WAAW;AACjD,UAAI,UAAS;AACT,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,8BAAsB,UAAU;AAChC,4CAAoC,QAAQ,QAAQ,QAAO;AAC3D,4CAAoC,OAAO,SAAS,QAAO;AAC3D,YAAI,iBAAiB,+BACjB;AACA,iBAAO,OAAM,CAAC,IAAI;;AAEtB,4BAAoB,KAAK,IAAI,KAAK,KAAM,QAAO,SAAS;AACxD,uBAAe,qBAAoB,aAAa;AAChD,YAAI,OAAO,SAAS,UAAS;AAGzB,kBAAO;;AAEX,eAAO,KAAK;AACZ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,iBAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,eAAO,UAAS,QAAQ;;AAE5B,oBAAc,CAAE,OAAO,MAAM,MAAA,OAAM;AACnC,aAAO,QAAO,cAAc,SAAS,IAAiB,MAAiB,OAAO;;AClElF;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,WAAW;AAC/B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,uBAAc,IAAG,CAAE;ACzC/B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAQ,IAAG,CAAE,OAAA;AC9CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,MAAM;;AAEzB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE,QAAA;AC9C1B;;;;;;;;;;;;;;;;AAsDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,YAAI,GAAG,SAAS;AACZ,iBAAO,MAAM;;AAEjB,oBAAY,SAAQ,QAAQ,IAAI;AAChC,eAAO,SAAQ,KAAK,GAAG;;AAE3B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,UAAS;;AAEnE,qBAAW,IAAG,CAAE;ACpE5B;;;;;;;;;;;;;;;;AAyBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AC9B9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACtC1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACOzB,6FAAwF,CAAC,GAAG,iBAAiB;AACzG,iBAAW,iBAAgB,GAAG,KAAK;AACnC,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,UAAI,eAAe;AACf,cAAM,IAAI,MAAM;;AAGpB,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFACxB,iBAAiB,MAAM;AAClD,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFAClB,iBAAiB,MAAM;AACxD,yBAAmB,iBAAiB,MAAM;AAC1C,gCAA0B,iBAAiB,MAAM;AACjD,cAAY,iBAAiB,MAAM,OAAO,aAAa,mBAAmB,MAAM,6EACjE,aAAa,8BACb,iBAAiB,MAAM;AACtC,wBAAkB,iBAAgB,KAAK,kBAAkB,SAAS,MAAK,YAAY;AACnF,8BAAwB;AACxB,kBAAY,QAAO,WAAW,kBAAkB,iBAAiB,SAAS;AAC1E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,4BAAmB,IAAG,CAAE;ACxFpC;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,GAAG,OAAO,MAAM,kDAAkD,GAAG,iBAAiB,GAAG;AAClH,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,oBAAc,MAAM,GAAG;AACvB,oBAAc,MAAM,GAAG;AACvB,mBAAa,IAAI,IAAI;AACrB,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB;;;AAGR,sBAAe,IAAI,cAAa,CAAC,aAAa,GAAG;AACjD,sBAAgB,IAAI,cAAa,CAAC,aAAa;AAC/C,mBAAa,QAAO,GAAG,IAAI,MAAM,QAAQ;AACrC,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB,kBAAO,OAAO,MAAK,MAAM;AACzB,kBAAQ,OAAO,MAAK;AACpB;;;AAGR,aAAO,CAAC,QAAO,YAAY,QAAQ;;AAE3B,2BAAkB;AC1E9B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,CAAC,QAAQ,CAAC;;AAEnB,oBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAyCA,oCAAgC;AAC5B,sBAAgB,iBAAgB,QAAQ,UAAU,WAAW;AAC7D,UAAI,QAAQ;AACR,cAAM,QAAQ,OAAO;;AAEzB,UAAI,QAAQ,QAAQ,OAAO;AACvB,cAAM,MAAM,4EACW,QAAQ,oBAAoB;;AAEvD,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,QAAQ,SAAS;AACnC,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,UAAS;;AAE7B,qBAAW,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,6DAC3B,OAAM;AACrB,qBAAe,CAAE,OAAA;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,OAAM,KAAK,OAAO;AAClC,uBAAe,SAAQ,IAAI;AAC3B,eAAO,OAAO,QAAQ,OAAM;SAC7B,QAAQ,MAAqB;;AAExB,gBAAO,IAAG,CAAE;AClDxB;;;;;;;;;;;;;;;;AAsCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,8DAC3B,OAAM;AACrB,qBAAe,CAAE,OAAA;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,SAAQ,QAAO,CAAC,OAAO;AACvC,uBAAe,SAAQ,KAAK;AAC5B,eAAO,SAAQ,QAAQ,OAAM;SAC9B,QAAQ,MAAqB;;AAExB,iBAAQ,IAAG,CAAE;ACnDzB;;;;;;;;;;;;;;;;AA4CA;AACI,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,sBAAsB;AACtB,6BAAqB,SAAQ,QAAO,CAAC,OAAO;AAC5C,cAAM,KAAK;;AAKX,4BAAoB,CAAC,OAAO,IAAK,sBAAqB;AACtD,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,8BAAsB,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK;AACzF,8BAAsB,KAAI,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK,IAAI,QAAO;AACxG,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,6BAAqB,SAAQ,SAAQ,GAAG,IAAI,CAAC,YAAY,IAAI,YAAY;AACzE,cAAM,KAAK;;AAEf,YAAM,KAAK;AAEX,UAAI,OAAM,SAAS,KAAK,OAAM,MAAM,OAAO;AACvC,qBAAa;AACb,uBAAc,OAAM,MAAM;AAC1B,cAAM,SAAQ,KAAK,CAAC,QAAO,IAAI,MAAM,KAAK,QAAO,IAAI,MAAM;AAC3D,aAAK;;AAET,aAAO;;AAEC,kBAAS,IAAG,CAAE;ACrEnB,0DAAqD;AACxD,uBAAiB;AACjB,UAAI,OAAQ,oBAAqB;AAC7B,gBAAO,EAAE,MAAM,QAAQ,oBAAoB,GAAG,MAAM;AACpD,qBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAGpD,0BAAkB,gBAAgB,OAAO;AACrC,cAAI,UAAU;AACV,sBAAS;;AAEb,iBAAO;WACR;AACH,gBAAO,aAAa,GAAG,MAAM;AAC7B,yBAAiB,gBAAgB,QAAQ;AAGzC,YAAI,aAAa;AACb,wBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,0BAAgB,YAAY,EAAE,MAAM,QAAQ;;AAEhD,gBAAO,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IAAI,MAAM;AACxE,qBAAa;;AAEjB,aAAO;;AC/BX;;;;;;;;;;;;;;;;AAyDA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,GAAG,OAAO;AAC7C,2BAAmB,kBAAiB,IAAI,iBAAiB;AACzD,eAAO,SAAQ,MAAM,IAAI,YAAY;;AAEzC,qBAAe,CAAE,GAAG;AACpB,mBAAa,CAAE,iBAAiB;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAS,IAAG,CAAE;ACpE1B;;;;;;;;;;;;;;;;AA2CA;AACI,cAAO,OAAM,UAAU,WAAW,MAAM,mDAAmD,OAAM;AACjG,+BAAyB,OAAM,MAAM,OAAM,MAAM,SAAS;AAC1D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,aAAa,QAAQ,YAAY;AAEjC,sBAAc,OAAM,MAAM,IAAI,OAAK;AACnC,qBAAa,OAAM,MAAM,IAAI,OAAK;AAClC,aAAK,OAAM,MAAM,SAAS,KAAK;AAC/B,wBAAgB,OAAM,QAAO,OAAO;AACpC,6BAAqB;iBAEhB,aAAa,QAAQ,YAAY;AAEtC,2BAAmB,OAAM,MAAM,IAAI,OAAK;AACxC,mBAAW,OAAM,MAAM,SAAS,KAAK,YAAY;AACjD,wBAAgB,QAAO,CAAC,QAAO,OAAM,cAAc,OAAM,MAAM,SAAS;AACxE,6BAAqB;;AAGrB,wBAAgB;;AAGpB,yBAAmB,WAAU;AAC7B,2BAAqB,SAAQ,SAAQ,eAAe,aAAa,CAAC,OAAO;AACzE,kBAAY,IAAI;AAEhB,mBAAa,KAAK,MAAM,qBAAqB,KAAK;AAClD,yBAAmB,KAAK;AACxB,yBAAmB,KAAK;AACxB,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,0BAAoB,cAAc,MAAM;AACxC,kBAAY,cAAc,MAAM,SAAS,KAAK;AAC9C,aAAO,SAAQ,SAAQ,qBAAqB,IAAI,qBAAqB,KAAK;;AAElE,iBAAQ,IAAG,CAAE;AChFzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,kBAAkB,IAAI;AAC1C,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAmB;;AAEzE,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,SAAQ,IAAI,cAAa,GAAG,OAAO,MAAM;;AAExC,oBAAW,IAAG,CAAE;ACvC5B;;;;;;;;;;;;;;;;AAoCA,oCAAgC;AAC5B,uBAAiB,qBAAqB,SAAS,WAAW;AAC1D,cAAY,SAAS,UAAU,GAAG,MAAM;AACxC,UAAI,SAAS,WAAW;AACpB,eAAO,WAAW,SAAS,IAAI;;AAEnC,mBAAa,SAAS,GAAG;AACzB,oBAAc,SAAS,GAAG;AAC1B,oBAAc,SAAS,GAAG;AAC1B,cAAY,QAAQ,MAAM,MAAM;AAChC,eAAS,QAAQ;AACb,2BAAuB,OAAO,EAAE,OAAO;AACvC,gBAAY,UAAU,EAAE,OAAO,MAAM;;AAEzC,8BAAwB,SAAS,IAAI,OAAK,WAAW,GAAG;AAOxD,aAAO,QAAO,iBAAiB;;AAEvB,kBAAS,IAAG,CAAE;AC3D1B;;;;;;;;;;;;;;;;AAiCA,+BAA0B;AACtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE,OAAA;ACvCzB;;;;;;;;;;;;;;;;AAwDA,+DAA2D,aAAa,kBAAkB,iBAAiB,oBAAoB;AAC3H,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,YAAI,WAAW;AACX,oBAAU,IAAI,MAAM,MAAM;;AAE9B,6BAAqB,YAAW;AAChC,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,gBAAgB;AACtC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,mBAAmB;AACzC,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B,GAAG,OAAO,MAAM;AAE5C,2BAAmB,YAAW;AAC9B,yBAAiB,GAAG,MAAM;AAC1B,mBAAW,QAAQ;AACf,gBAAM,QAAQ;AACd,cAAI,QAAQ;AACZ,mBAAS,OAAO,MAAM,GAAG;;AAE7B,aAAK,SAAQ,IAAI;AACjB,eAAQ,wBAAwB,oBAAoB,8BAA+B,mBAAkB,GAAG,OAAO,cAAc,qBAAqB,OAAO,KAAK,SAAS,WAAW,SAAS;AAC3L,gBAAQ;AACR,cAAM;AACN,kBAAU;AACV,2BAAmB,YAAW;AAE9B,mBAAW,QAAQ;AACf,cAAI,QAAQ,MAAM,QAAQ;AAC1B,kBAAQ,QAAQ;;AAGpB,qBAAa,iBAAgB,OAAO,KAAK;AAEzC,yBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AACvE,2BAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,YAAI;AACA,iBAAO,SAAQ,OAAM,IAAI,OAAO,OAAO;;AAE3C,oBAAY,SAAQ,aAAa,IAAI,OAAO,KAAK;AACjD,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,oBAAc;QACV;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;AAEJ,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;;AAEpE,0BAAgB,IAAG,CAAE;ACpHjC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACxDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,cAAQ,SACJ;AACJ,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1DpD;;;;;;;;;;;;;;;;AA4CA,0BAAsB,YAAY;AAC9B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB,GAAG,MAAM,GAAG,MAAM,SAAS;AAC3C,UAAI,IAAI;AACJ,cAAM,IAAI,MAAM,uDAAuD,oBACxD;;AAEnB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,GAAG;AACnB,gCAA0B,QAAO,cAAc,OAAK,EAAE,KAAK,IAAI,GAAG,SAAS,QAAQ,MAAiB,MAAM;AAC1G,aAAO,CAAE,QAAQ;;AAET,iBAAQ,IAAG,CAAE;AC3DzB;;;;;;;;;;;;;;;;AAuCA,6CAAwC,YAAY;AAChD,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM;;AAEpB,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,MAAsB;AAC7E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,4BAAmB,IAAG,CAAE;AClDpC;;;;;;;;;;;;;;;;AAsEA,+BAA2B;AAEvB,iBAAW,iBAAgB,GAAG,KAAK,UAAU;AAC7C,cAAO,GAAG,OAAO,GAAG,MAAM;AAC1B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,gCAA0B,QAAO,UAAU,QAAQ,QAAQ;AAC3D,aAAO,CAAE,QAAQ;;AAET,mBAAU,IAAG,CAAE;AC/E3B;;;;;;;;;;;;;;;;AAuCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAoB,iBAAgB,YAAY,cAAc,sBAAsB;AACpF,cAAO,OAAM,cAAc,MAAM;AACjC,qBAAe,CAAE,GAAG,IAAI,YAAY;AACpC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,oBAAY,SAAQ,mBAAmB,IAAI,aAAa;AACxD,aAAK,CAAC;AACN,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAoB;;AAE1E,+BAAsB,IAAG,CAAE;ACpDvC;;;;;;;;;;;;;;;;AAmCA,gCAA4B;AACxB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,QAAQ,CAAC,GAAG,MAAM,UAAU,OAAO,GAAG,MAAM,QAAQ,MAAM,UAAU,oBAAoB,GAAG,MAAM,WAAW,GAAG,MAAM;AACjI,UAAI,OAAO;AACP,gBAAQ,GAAG,MAAM;;AAErB,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE;AAChB,sBAAgB,cAAa,SAAQ,QAAQ,IAAI;AACjD,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAiCO,gDAA4C;AAC/C,aAAO,QAAO,aAAa,cAAc,WAAW,MAAM;;AClC9D;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB;AAChB,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,YAAI,SAAS;AACT,kBAAQ,KAAK;;;AAGrB,uBAAiB,QAAO,WAAW;AACnC,kBAAY,QAAO,CAAC,QAAQ,QAAQ,UAAU,SAAS;AACvD,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,oBAAY,SAAS,WAAW,QAAQ;AACxC,uBAAe,IAAI,UAAU;AAC7B,YAAI,OAAO,IAAI,KAAK;;AAExB,aAAO,IAAI;;AChCf;;;;;;;;;;;;;;;;AAsCA;AACI,yBAAmB,iBAAgB,WAAW,aAAa,cAAc;AACzE,mBAAa,MAAM,WAAW;AAC9B,kBAAY,UAAU,WAAW,OAAO;AACxC,UAAI,cAAc;AACd,mBAAW;;AAEf,aAAO;;AAEC,uBAAc;AC/C1B;;;;;;;;;;;;;;;;AAwCA;AACI,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,oBAAc,iBAAgB,MAAM,QAAQ,YAAY;AACxD,uBAAiB,QAAQ,OAAO,IAAI;AACpC,sBAAgB,MAAM;AACtB,0BAAoB,QAAQ;AAC5B,cAAY,UAAU,GAAG,MAAM;AAC/B,yBAAuB,YAAY,MAAM,UAAU,WAAW,UAAU,MAAM,OAAO;AACrF,wBAAkB;AAClB,mBAAa,UAAU,IAAI,WAAW,SAAS;AAC3C,uBAAe,YAAY;;AAE/B,gCAA0B,YAAY,MAAM,GAAG,UAC1C,OAAO,CAAC,cAAc,YAAY,MAAM,WAAW;AACxD,6BAAuB,SAAQ,SAAS;AACxC,2BAAqB,SAAQ,OAAO,CAAC;AACrC,gCAA0B,MAAM,WAAW;AAC3C,sBAAgB,QAAQ,mBAAmB,CAAC;AAC5C,kBAAY,OAAO,gBAAgB,SAAS;AAE5C,UAAI,YAAW;AACX,gBAAQ;;AAEZ,UAAI,SAAS;AACT,cAAM;;AAEV,cAAQ;AACR,qBAAe;AACf,mBAAa;AACb,wBAAkB;AAClB,aAAO;;AAEC,6BAAoB;ACxEhC;;;;;;;;;;;;;;;;AAmCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,SAAS,IAAI;;AAWxB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,KAAK,IAAI;;AAEpB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,MAAM,IAAI;;AAErB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,UAAU,IAAI;;AAEzB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,QAAQ,IAAI;;AAEvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,aAAa,IAAI;;AAEhB,wBAAe,IAAG,CAAE;AACpB,+BAAsB,IAAG,CAAE;AAC3B,0BAAiB,IAAG,CAAE;AACtB,4BAAmB,IAAG,CAAE;AACxB,uBAAc,IAAG,CAAE;AACnB,2BAAkB,IAAG,CAAE;ACjGnC;;;;;;;;;;;;;;;;AAsCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,MAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,yBAAuB,MAAK,OAAO,KAAI,OAAO;AAC9C,aAAO,IAAI,OAAM;;AAYrB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,KAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAUvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAYvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,kBAAkB,IAAI;;AAErB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,0BAAiB,IAAG,CAAE;AACtB,0BAAiB,IAAG,CAAE;AACtB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,oCAA2B,IAAG,CAAE;AAChC,sBAAa,IAAG,CAAE;AC5L9B;;;;;;;;;;;;;;;;AAmEA,4BAAwB,oBAAoB,iBAAiB;AACzD,UAAI,iBAAgB,GAAG,KAAK;AAC5B,oBAAa,SAAS,GAAG,KAAK;AAC9B,0BAAoB,MAAK;AACzB,UAAI;AACA,qBAAa,gBAAe,MAAM,EAAE;AACpC,wBAAgB,sBAA+B,MAAK,OAAO;;AAE/D,aAAO,SAAQ,OAAM;;AAEzB,oCAA+B;AAC3B,UAAI,EAAE,SAAS;AACX,eAAO,IAAI;;AAGf,UAAI,EAAE,SAAS,KAAK,SAAS;AACzB,eAAO,SAAS,SAAQ,GAAG,CAAC,MAAM,IAAG;;AAGzC,UAAI,EAAE,SAAS,KAAK,OAAO,SAAS,YAChC,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,MAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM,eAAe,OAAM;AAE3B,iBAAO,KAAK,MAAI,IAAI,IAAI,IAAI,QAAO,GAAG,WAAW;;AAErD,cAAM,IAAI,MAAM,qCAAqC;;AAGzD,UAAI,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;;AAE/C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM,SAAS,OAAM;AAErB,iBAAO,KAAK,MAAI,OAAO,IAAI;;AAE/B,cAAM,IAAI,MAAM,qCAAqC;;AAEzD,YAAM,IAAI,MAAM,gCAAgC;;AAExC,iBAAQ,IAAG,CAAE;AC1HzB;;;;;;;;;;;;;;;;AAsDA,6DAAwD;AACpD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,uBAAiB,IAAI;AACrB,cAAY,aAAiB,GAAG,OAAO,GAAG,QAAQ,MAAM;AACxD,kBAAY,QAAO;AACnB,4BAAsB,IAAI,KAAK;AAC/B,oBAAa,KAAI,IAAI,IAAI,KAAK;AAC9B,UAAI;AACA,gBAAY,SAAQ,MAAM,MAAM;AAChC,sBAAc,iBAAgB,OAAM,QAAQ;AAC5C,kBAAS,IAAI,SAAQ,IAAI,KAAK,IAAI,QAAQ;;AAE9C,aAAO,MAAI,IAAI;;AAEP,0BAAiB,IAAG,CAAE;ACtElC;;;;;;;;;;;;;;;;AAwCA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,aAAa;AAClE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,qBAA8B,UAAU,UAAU;AAClD,sBAAgB;AACZ,eAAO,SAAQ,UAAU,UAAU,UAAU;;AAEjD,qBAAe,CAAE,SAAS,UAAU,SAAS;AAC7C,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAW;;AAEjE,sBAAa,IAAG,CAAE;ACtCvB;AACH,UAAI,cAAc,UAAU;AACxB,cAAM,IAAI,MAAM,8EACU,cAAc;;AAE5C,UAAI,cAAc,OAAO;AACrB,cAAM,IAAI,MAAM,sEACM,cAAc;;AAExC,uBAAiB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AACnE,sBAAgB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AAClE,UAAI,YAAY,WAAW;AACvB,cAAM,IAAI,MAAM,kDACR,YAAY,sBAAsB;;AAE9C,wBAAkB,aAAa;AAC/B,UAAI,CAAE,cAAa,SAAS,KACxB,aAAa,SAAS,KAAK,cAAc;AACzC,cAAM,IAAI,MAAM,oCACT,aAAa,2BAA2B;;AAEnD,UAAI,aAAa,UAAU,cAAc;AACrC,cAAM,IAAI,MAAM;;;ACnCxB;;;;;;;;;;;;;;;;AA2DA,qFAAiF;AAC7E,6BAAuB,iBAAgB,eAAe,iBAAiB,iBAAiB;AACxF,4BAAsB,iBAAgB,cAAc,gBAAgB;AACpE,4BAAsB,iBAAgB,cAAc,gBAAgB,iBAAiB,cAAc;AACnG,sBAA8B,gBAAgB,eAAe,aAAa;AAC1E,qBAAe;QACX,eAAe;QACf,cAAc;QACd,cAAc;;AAElB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,cAAc,gBAAgB,eAAe,aAAa,gBAAgB,QAAQ,MAAiB,eAAe;;AAEzJ,0BAAiB,IAAG,CAAE;ACxElC;;;;;;;;;;;;;;;;AA0DA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,YAAY;AACjE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,eAAO,SAAQ,SAAS,IAAI;;AAEhC,qBAAe,CAAE,QAAQ,IAAI,SAAS;AACtC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;ACnE7B;;;;;;;;;;;;;;;;AAyBO;AACH,UAAI,cAAc;AACd,eAAO,EAAE,MAAM;;AAEnB,UAAI,aAAiB,EAAE,OAAO;AAC1B,eAAO;;AAEX,UAAI,EAAE,MAAM,WAAW,WAAW;AAC9B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,EAAE,MAAM,QAAQ;AAChC,cAAI,WAAW,MAAM,QAAQ,EAAE,MAAM,MAAM;AACvC,yBAAa,KAAK,EAAE,MAAM;;AAG1B,yBAAa,KAAK,WAAW;;;AAGrC,eAAO;;AAEX,aAAO;;AC5CX;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,WAAW,MAAM,gFACjB,GAAG;AAC5B,cAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,qDAAqD;AAC9F,UAAI,SAAS;AACT,eAAO,aAAa,UAAS,GAAG,UAAU;;AAE9C,0BAAoB,cAAc,IAAI;AACtC,uBAAiB,IAAI;AACrB,yBAAmB,IAAI,MAAM,MAAI,cAAc,aAAa,GAAG,GAAG,WAAW,OAAO,YAAY;AAChG,aAAO,KAAI,IAAI;;AAEP,oBAAW,IAAG,CAAE;AC7D5B;;;;;;;;;;;;;;;;AAiBO;AAEH,aAAO,KAAK,MAAM,KAAK,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,SAAS,KAAK,IAAI;;AAEhE;AACH,mBAAa,IAAI,eAAe;AAChC,wBAAkB,IAAI,aAAa;AACnC,mBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,uBAAgB,IAAM,KAAK,KAAK,IAAM,gBAAe,OAAO;AAC5D,kBAAU,KAAK,IAAI,IAAI,KAAK,IAAI;;AAEpC,aAAO,UAAS,WAAW;;AC5B/B;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,cAAO,aAAa,OAAO,GAAG,MAAM,uEACrB,aAAa;AAC5B,cAAO,aAAa,OAAO,MAAM,SAAS,MAAM,MAAM,mFAE/C,aAAa,yBAAyB,SAAS;AACtD,yBAAkB,aAAa,MAAM,MAAM,GAAG,aAAa,MAAM,SAAS,IAAI,SAAS,OAAO;AAE9F,sBAAgB,aAAa,MAAM,aAAa,MAAM,SAAS;AAC/D,cAAO,IAAI,KAAK,KAAK,SAAS,MAAM,4EAClB,qBAAqB;AACvC,8BAAwB,MAAM,aAAa;AAC3C,0BAAoB,MAAM,SAAS;AAGnC,4BAAsB,CAAC,gBAAgB,SAAS,SAAS;AACzD,yBAAkB,wBAAuB,QAAQ;AACjD,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,gBAAgB,SAAS,QAAQ,SAAS;AACvD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,mBAAU,KAAK;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,UAAU,GAAG,UAAU,YAAY;AACnC,uBAAU,KAAK;AACf;;;;AAIZ,UAAI,gBAAgB;AAChB,qBAAa;;AAEjB,UAAI,YAAY;AACZ,iBAAS;;AAGb,aAAO,QAAO,YAAW,SAAS,OAAO;;AAEjC,wBAAe;AChF3B;;;;;;;;;;;;;;;;AA4CA,mFAA8E;AAC1E,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,YAAY,IAAI,MAAM,4CAA4C,8CACtD,YAAY;AAChD,cAAY,aAAa,YAAY,IAAI,MAAM,0CAA0C,iDACnD,YAAY;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,iBAAiB;AAC3D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,sBAAsB;;AAEtE,iCAA6B,IAAG,CAAE;AC/EzC;;;;;;;;;;;;;;;;AA0BO;AACH,UAAI,eAAc,QAAQ,gBAAe;AACrC,eAAO;;AAEX,UAAI,gBAAe;AACf,eAAO,KAAI,IAAI,MAAK;;AAExB,YAAM,IAAI,MAAM,gDAAgD;;AAG7D;AACH,gBAAU;AACV,yBAAmB,kBAAgC,KAAK,OAAO,aAAa;AAC5E,UAAI,WAAW,SAAS;AACpB,cAAM,MAAI,KAAK;;AAEnB,aAAO,SAAQ,KAAK,KAAK;;AAEtB;AACH,UAAI,gBAAe;AACf,eAAO;iBAEF,gBAAe;AACpB,eAAO,MAAK;iBAEP,gBAAe;AACpB,eAAO,KAAI;iBAEN,gBAAe;AACpB,eAAO,OAAM;iBAER,gBAAe;AACpB,eAAO,OAAM,GAAG;;AAEpB,YAAM,IAAI,MAAM,4BAA4B;;AAGzC,wBAAmB;AACtB,2BAAqB,gBAAgB;AACrC,aAAO,CAAC,gBAAgB,gBAAe;;ACjE3C;;;;;;;;;;;;;;;;AAuFA,2BAAwB,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AACpI,oBAAa,eAAc;AAC3B,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,QAAc,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AAC3E,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,6DAC3B,IAAI;AACX,cAAY,QAAQ,SAAS,GAAG,MAAM,8DAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,6EACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,cAAY,eAAe,QAAQ,MAAM,sCAAsC;AAC/E,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK;AAChG;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,4CAAiC;AACjC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,gBAAY,mBAA4B,YAAY,MAAM,uHAEA;AAC1D,qBAAa,qBAAoB,KAAI,OAAO,cAAc,UAAS,SAAS;AAC5E,0BAAkB,qBAAqB,MAAK,cAAc,SAAQ,OAAO,SAAS;AAClF,oBAAY,CAAC,MAAM;AACnB,YAAI,UAAS;AACT,0BAAgB,sBAAqB,QAAO;AAC5C,cAAI,KAAK;;AAEb,eAAO;;AAEX,sBAAgB;AACZ,oBAAY,SAAQ,YAAY;UAC5B,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN,YAAA;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW,iBAAiB,YAAA;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,qBAAe,IAAG,CAAE;AC1L3B;;;;;;;;;;;;;;;;AAqBA,iGAA4F,CAAC,GAAG;AAC5F,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,yBAAyB,KAAK,MAAM;;AAEvD,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,KAAA,MAAK,iBAAiB,WAAW;AAC1D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,qCAAqC;;AAErF,gDAA4C,IAAG,CAAE;ACtCxD;;;;;;;;;;;;;;;;AAqBA,gGAA2F,CAAC,GAAG;AAC3F,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,wBAAwB,MAAM,QAAQ;;AAEzD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,KAAA,MAAK,iBAAiB,WAAW,YAAY;AACtE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,oCAAoC;AAC5F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,+CAA2C,IAAG,CAAE;ACxCvD;;;;;;;;;;;;;;;;AAiFA,oCAAiC,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AAC7I,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,iBAAuB,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AACpF,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,sEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,uEAClB,QAAQ;AAC5B,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,6DAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,cAAY,gCAAyC,SAAS,YAAY,MAAM,sFACvD,0BAA0B;AACnD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,qFACN,+BAA+B;;AAEhE,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,gBAAY,mBAA4B,YAAY,MAAM,mHAElD;AACR,2CAAgC;AAChC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,qBAAa,mCAAmC,KAAI,OAAO,cAAc,UAAS,SAAS,MAAK,WAAW;AAC3G,0BAAkB,oCAAoC,MAAK,cAAc,SAAQ,OAAO,SAAS,MAAK,WAAW;AACjH,YAAI,SAAQ;AACR,0BAAgB,sBAAqB,OAAO;AAC5C,iBAAO,CAAC,MAAM,WAAW;;AAE7B,eAAO,CAAC,MAAM;;AAElB,sBAAgB;AACZ,oBAAY,SAAQ,qBAAqB;UACrC,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN,YAAA;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW,iBAAiB,YAAA;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,8BAAwB,IAAG,CAAE;ACrLpC;;;;;;;;;;;;;;;;AAgDA,2BAAwB,GAAG,GAAG,aAAa,OAAO,aAAa,OAAO,MAAM,0BAAa,UAAU;AAC/F,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,OAAc,GAAG,GAAG,YAAY;AAC7C,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,cAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,GAAG,SAAS,GAAG,MAAM,MAAM,kFACnD,GAAG,YAAY,GAAG;AACtC,cAAY,aAAiB,YAAY,aAAa,MAAM,4CAA4C,oBACjG,sCAAsC,GAAG,aACzC,GAAG;AACV,cAAY,gBAAgB,aAAa,MAAM,wCAAwC,qBAChF,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,uBAAiB,GAAG,MAAM,MAAM,GAAG,IAAI,OAAO,CAAC,aAAa;AAC5D,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,UAAU,MAAM;;AAE9D;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,wCAA6B;AAI7B,6BAAqB,sBAAqB,SAAQ,IAAI,EAAE,QAAQ,GAAG;AACnE;AACA;AACA,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,MAAM;mBAEzC,CAAC,cAAc;AACpB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,cAAc,MAAK,MAAM;mBAEzC,cAAc,CAAC;AACpB,iBAAO,OAAc,MAAK,cAAc,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,OAAO;;AAG/C,iBAAO,OAAc,MAAK,cAAc,MAAM;AAC9C,iBAAO,OAAc,cAAc,MAAK,MAAM;;AAElD,YAAI,QAAQ;AACR,0BAAgB,sBAAqB,QAAO;AAC5C,iBAAO,CAAC,MAAM,MAAM;;AAGpB,iBAAO,CAAC,MAAM;;;AAGtB,sBAAgB;AACZ,kBAAU,SAAQ,iBAAiB;UAC/B,GAAG;UACH,GAAG;UACH;UACA;UACA,MAAM;UACN,YAAA;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,GAAG;QACH,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,YAAY,YAAY,YAAA;AAGxC,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK;AAChB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK,KAAK;AACrB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,iBAAiB,KAAK,KAAK;;;AAGnC,qBAAe,IAAG,CAAE;ACnK3B;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,MAAM;;AAErC,0BAAsB,IAAG,CAAE;ACjClC;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,KAAK;;AAEpC,uBAAmB,IAAG,CAAE;ACjC/B;;;;;;;;;;;;;;;;AAsCA,8DAAyD,kBAAkB;AACvE,kBAAY;AACZ,qBAAe;AACf,aAAO,QAAQ,eAAe,QAAO;AACjC,eAAO,KAAK,OAAM,SAAQ,OAAO;AACjC,iBAAS;;AAEb,UAAI;AACA,eAAO,QAAQ,QAAO;AAClB,yBAAgB,QAAQ,cAAe,QAAO;AAC9C,uBAAY,QAAO;YACf,OAAM,SAAQ,OAAO,cAAc;YAAS,MAAK,CAAC,SAAS;;AAE/D,iBAAO,KAAK;AACZ,mBAAS;;;AAGjB,UAAI,OAAO,WAAW;AAClB,eAAO,SAAS,IAAI,CAAC,GAAG;;AAE5B,aAAO,SAAQ,QAAO,SAAS,CAAC,OAAO,QAAQ;;AAE5C,kBAAc,IAAG,CAAE;AC5D1B;;;;;;;;;;;;;;;;AAwCA,0EAAqE;AACjE,UAAI,aAAa;AACb,oBAAY,oBAAoB;;AAEpC,2BAAqB,MAAM,SAAQ,aAAa;AAChD,6BAAuB,KAAI,cAAc,SAAS;AAClD,qBAAe;AACf,mBAAa,GAAG,IAAI,aAAa,MAAM,IAAI;AACvC,eAAO,KAAK,KAAK,OAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,eAAe;;AAEtE,aAAO,QAAO;;AAEX,iBAAa,IAAG,CAAE;ACpDzB;;;;;;;;;;;;;;;;AA4CA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,qBAAe,iBAAgB,OAAO,SAAS,iBAAiB;AAChE,sBAAgB,iBAAgB,QAAQ,UAAU,iBAAiB;AACnE,eAAS,UAAU;AACnB,2BAAqB,sBAAsB;AAC3C,uBAAiB,OAAO,MAAM;AAC9B,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,cAAY,OAAO,SAAS,KAAK,OAAO,MAAM,OAAO,GAAG,MAAM,oDAAoD,6BAC7F,OAAO;AAC5B,cAAY,QAAQ,SAAS,KAAK,QAAQ,MAAM,OAAO,UAAU,MAAM,qDAAqD,2BACvG,OAAO;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,wEAC3B,SAAS;AACvB,cAAY,SAAS,MAAM,KAAK,SAAS,MAAM,GAAG,MAAM,2CAA2C;AACnG,cAAY,WAAW,cAAc,WAAW,WAAW,MAAM,+CAA+C;AAChH,sBAAgB,cAAa,SAAQ,cAAc,QAAQ,QAAQ,SAAS,UAAU,QAAQ;AAC9F,qBAAe,CAAE,OAAO,QAAQ,OAAO,QAAQ,QAAQ;AACvD,oBAAc,CAAE,QAAQ,oBAAoB;AAC5C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,gBAAe;AAClF,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACnElC;;;;;;;;;;;;;;;;AA4BA;AACI,qBAAe,iBAAgB,QAAO,SAAS,iBAAiB;AAChE,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,kBAAY,QAAO,UAAU,gBAAe,QAAQ;AACpD,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACpClC;;;;;;;;;;;;;;;;AAqCA,4DAAuD,YAAY;AAC/D,qBAAe,iBAAgB,QAAO,SAAS,oBAAoB;AACnE,cAAY,OAAO,SAAS,GAAG,MAAM,gEACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE,SAAS,WAAW;AACpC,kBAAY,QAAO,UAAU,mBAAkB,QAAQ;AACvD,aAAO;;AAEJ,8BAAyB,IAAG,CAAE;AC9CrC;;;;;;;;;;;;;;;;AAiBA;AACI,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,UAAI,kBAAkB;AAClB,yBAAiB,OAAO;;AAE5B,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,uBAAiB,MAAM,MAAM;AAC7B,sBAAgB,KAAK,IAAI,eAAe;AACxC,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,cAAY,MAAM,SAAS,GAAG,MAAM,+CAA+C,MAAM;AACzF,cAAY,MAAM,MAAM,OAAO,GAAG,MAAM,oDAAoD,MAAM,MAAM;AACxG,cAAY,OAAO,SAAS,GAAG,MAAM;AACrC,cAAY,OAAO,MAAM,OAAO,UAAU,MAAM,sDAAsD,qBACvF,OAAO,MAAM;AAC5B,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,aAAO,CAAE,eAAe,cAAc,gBAAgB;;ACpC1D;;;;;;;;;;;;;;;;AAqBA,6EAAyE,sBAAsB,OAAO;AAClG,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,oBAAc,CAAE,eAAe,cAAc;AAC7C,aAAO,QAAO,cAAc,OAAK,EAAE,kBAAkB,QAAQ,SAAS,eAAe,cAAc,iBAAiB,CAAE,OAAO,QAAQ,QAAQ,UAAW,MAAiB,sBAAqB;;AAE3L,8BAA0B,IAAG,CAAE;AC/BtC;;;;;;;;;;;;;;;;AA6BO;AACH,qBAAc,aAAa,KAAK,SAAS;AACzC,6BAAuB,SAAQ,IAAI,CAAE,UAAQ,KAAK;AAClD,UAAI,OAAO,gBAAgB,GAAG;;AAiB3B;AACH,aAAO,cAAc,KAAK,QAAQ,cAAc;;AASpD;AACI,aAAO,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK;;AAEpC;AACI,iBAAW;AACX,kBAAY,IAAI;AAChB,mBAAa;AACb,kBAAY;AACZ,aAAO,OAAO;AACV,iBAAS,OAAS,SAAQ,SAAU;AACpC,8BAAsB,WAAW,QAAQ,IAAI;AAC7C,YAAI,gBAAgB;AAChB,iBAAO,SAAS;;AAGhB,kBAAQ;AAGR,kBAAQ,CAAC;;;AAGjB,aAAO,QAAQ,OAAO,CAAC,OAAO;;AChFlC;;;;;;;;;;;;;;;;AAsBO;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GACrF;;AAEF;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GAAsB,OAAgC,oBAA6C;;AAG1L;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,cAAc;;AAE5G,mIAA+H,4BAA4B,4BAA4B;AAGnL,yBAAmB;AACnB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK;AACZ,qBAAW,KAAK,CAAE,OAAO,OAAO,IAAI,UAAU,GAAG,oBAAoB;;;AAG7E,iBAAW,KAAK;AAGhB,qBAAc,eAAe,IAAK,OAAO,eAAgB;AACzD,8BAAwB;AACxB,6BAAuB;AACvB,aAAO,gBAAgB,SAAS,iBAAiB,WAAW,SAAS;AACjE,0BAAkB,WAAW;AAC7B,eAAQ,sBAAsB,UAAU,sBAAuB;AAC/D,YAAI,gBAAgB;AAChB;;AAQJ,8BAAsB;AACtB,qBAAa,gBAAgB,SAAS,GAAG,KAAK,oBAAoB,EAAE;AAChE,sBAAY,sBAAsB,OAAO,UAAU,gBAAgB;AACnE,cAAI,OAAO;AACP,8BAAkB;AAClB;;AAEJ,oBAAU,QACN,UAAU,QAAQ,eAAe,cAAc,QAAO;AAC1D,cAAI,UAAU,SAAS;AACnB;;;AAUR,kBAAU,qBAAqB,gBAAgB;AAC/C,YAAI,CAAC;AAGD,cAAI,UAAU,UAAU;AACpB,4BAAgB,KAAK;AACrB,2BAAe,KAAK,UAAU;qBAEzB,UAAU,QAAQ;AAGvB,yBAAa,YAAY,WAAW;;;;AAKhD,2BAAqB,gBAAgB;AACrC,yBAAmB,gBAAgB;AACnC,UAAI,sBAAsB,aAAa;AACnC,wBAAgB,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;AACnD,uBAAe,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;;AAEtD,qBAAe,CAAE,iBAAiB,UAAS,iBAAiB;AAC5D,UAAI;AACA,eAAO,oBAAoB,UAAS,gBAAgB;;AAExD,UAAI;AACA,eAAO,kBAAkB,QAAO,cAAc;;AAElD,aAAO;;AAEX;AACI,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,UAAI,SAAS,KAAK,SAAS;AACvB,eAAO;;AAEX,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,mBAAmB,kBAAkB,KACnE,KAAK,IAAI,mBAAmB,kBAAkB;AAClD,aAAO,mBAAoB,SAAQ,QAAQ;;AAM/C;AACI,qBAAe,KAAK,IAAI,SAAQ,MAAM;AACtC,aAAO,OAAO,eAAe,SAAS;;AAE1C;AAKI,aAAQ,GAAG,QAAQ,GAAG,SAChB,GAAG,UAAU,GAAG,SAAW,GAAG,WAAW,GAAG;;ACrJtD;;;;;;;;;;;;;;;;AAuCA,wFAAoF,sBAAsB,OAAO;AAC7G,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc;AACxF,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,mCAA+B;AC7DtC;;;;;;;;;;;;;;;;AAkDA,sFAAkF,sBAAsB,OAAO,kCAAkC;AAC7I,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc,CAAE,eAAe,cAAc,gBAAgB;AAC7D,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,gBAAgB,OAAO;;AAEzD,uCAAmC,IAAG,CAAE;AC/D/C;;;;;;;;;;;;;;;;AAgDA,iGAA6F,sBAAsB,OAAO,kCAAkC;AACxJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACxG,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,4CAAwC;ACvE/C;;;;;;;;;;;;;;;;AA4CA,mFAA+E,sBAAsB,OAAO,wCAAwC;AAChJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc;QACV,eAAe;QACf,cAAc;QACd,gBAAgB;QAChB;;AAEJ,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,cAAc,OAAO;;AAEvD,oCAAgC,IAAG,CAAE;AC7D5C;;;;;;;;;;;;;;;;AA0CA,8FAA0F,sBAAsB,OAAO,wCAAwC;AAC3J,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,sCAAgC,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AAI1E,kBAAY,wBAAwB,WAAW,YAAY,gBAAgB,eAAe,iBAAiB;AAC3G,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,yCAAqC;AC9D5C;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,gEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,6DAC9B;AACP,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,eAAe,aAAa,WAAW,UAAU;;AAEpE,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAuB,IAAG,CAAE;AC7DnC;;;;;;;;;;;;;;;;AAoCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,uEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,oEAC9B;AACP,cAAY,QAAQ,UAAU,aAAa,QAAQ,UAAU,SAAS,MAAM;AAC5E,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,sBAAsB,aAAa,WAAW,UAAU;;AAE3E,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,uBAAuB;AAC9F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,kCAA8B,IAAG,CAAE;AC9D1C;;;;;;;;;;;;;;;;AAoEA;AACI,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAO,GAAG,QAAQ,GAAG,MAAM,4CAA4C,GAAG;AAC1E,oBAAc,GAAG;AACjB,qBAAe,GAAG,MAAM,MAAM;AAC9B,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,0DACY;;AAEzD,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,6DACe;;AAE5D,UAAI,WAAW;AACX,mBAAW;;AAEf,UAAI,WAAW;AACX,mBAAW;;AAEf,gBAAU,SAAQ,MAAM,GAAG,GAAG,GAAG,UAAU,CAAC,IAAI;AAChD,gBAAU,MAAM,GAAG,GAAG,GAAG;AACzB,iBAAW,IAAI,GAAG;AAClB,qBAAe,WAAW,UAAU,IAAI,QAAO,CAAC,UAAU,WAAW,aAAa,IAAI,QAAO,CAAC,UAAU;AACxG,mBAAa,OAAM,CAAC,GAAG,IAAI,GAAG;AAC9B,aAAO,SAAQ,MAAM,QAAQ,SAAQ,IAAI,CAAC,IAAI,GAAG,KAC5C,IAAI,SAAO,MAAM,QAAQ,KAAK,SAAS;;AAEzC,qBAAiB,IAAG,CAAE;ACjG7B;;;;;;;;;;;;;;;;AAuDA;AACI;AACA,UAAI,MAAM,QAAQ;AACd,0BAAkB;AAClB,gBAAO,MAAM,QAAQ,GAAG,SAAS,GAAG,MAAM;AAE1C,oBAAY,GAAG,GAAG,MAAM;AACxB,qBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,kBAAO,GAAG,GAAG,MAAM,OAAO,KAAK,MAAM,iEAC7B,GAAG,GAAG,MAAM,UAAU;;;AAIlC,0BAAkB;AAClB,aAAK,OAAM,IAAI,GAAG,MAAM,IAAI,GAAG,IAAI,OAAK,QAAQ,GAAG,CAAC;;AAExD,cAAO,GAAG,UAAU,GAAG,GAAG,MAAM,IAAI,MAAM,oCAAoC,GAAG,yCACpD,GAAG,GAAG,MAAM;AACzC,iBAAW;AACX,mBAAa;AACb,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,WAAG,KAAK,QAAO,KAAK;AAChB,kBAAQ,KAAK;AACb,cAAI,IAAI;AACJ,yBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,2BAAa,KAAI,MAAI,KAAI,GAAG,IAAI,KAAK,GAAG;AACxC,kBAAI,IAAI,GAAG;;;AAGnB,iBAAO,IAAI,GAAG,KAAK,GAAG;;;AAG9B,UAAI;AACA,eAAO,MAAM,IAAI;;AAGjB,eAAO;;;AAGR,wBAAoB,IAAG,CAAE;AC9FhC;;;;;;;;;;;;;;;;AAgFA,mCAA+B;AAC3B,cAAO,EAAE,QAAQ,GAAG,MAAM,gEAAgE,EAAE;AAC5F,UAAI,EAAE,SAAS;AACX,eAAO,KAAK,GAAG;;AAOf,8BAAsB,EAAE,MAAM,MAAM,GAAG,EAAE,MAAM,SAAS,GACnD,OAAO,iBAAiB,QAAQ;AACrC,qBAAa,QAAQ,SAAQ,GAAG;UAC5B;UAAe,EAAE,MAAM,EAAE,MAAM,SAAS;UACxC,EAAE,MAAM,EAAE,MAAM,SAAS;YACzB;AACJ,qBAAa;AACb,qBAAa;AACb,aAAK,QAAQ;AACT,6BAAmB,KAAK,KAAK;AAC7B,eAAK,KAAK;AACV,eAAK,KAAK;;AAEd,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,eAAO,CAAC,GAAG;;;AAGnB,oCAAgC;AAC5B,aAAO,QAAO,KAAK;AACf,gBAAO,EAAE,MAAM,WAAW,GAAG,MAAM,0CAA0C,EAAE,MAAM;AACrF,kBAAU,EAAE,MAAM;AAClB,kBAAU,EAAE,MAAM;AAClB,gBAAQ,IAAI;AACZ,gBAAQ,MAAM;AACd,sBAAc,SAAS,CAAC,CAAC,KAAK,CAAC,GAAG;AAClC,gBAAQ,MAAM;AACd,sBAAc,KAAK,IAAI,IAAI;AAC3B,qBAAa,GAAG,IAAI,OAAO,EAAE;AAGzB,wBAAc;AACd,wBAAc;AACd,wBAAc;AACd,WAAC,GAAG,GAAG,KAAK,QAAO,KAAK;AAEpB,2BAAe,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AACxC,0BAAc,KAAK;AACnB,wBAAY,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AAEjC,sBAAU,MAAM,QAAQ,KAAK,IAAI,SAAS,CAAC,CAAC,OAAO,SAAS,CAAC,CAAC;AAC9D,uBAAW,IAAI,KAAK,KAAI,GAAG;AAC3B,yBAAa,IAAI,QAAQ;AACzB,gBAAI,KAAK,MAAM,OAAO;AAClB,kBAAI,MAAM;;AAGV,kBAAI,QAAO;gBACP;gBACA,OAAM,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,MAAM,KAAK,GAAG,KAAK,MAAM;iBACpD;;AAEP,wBAAY,IAAI,IAAI,OAAO,GAAG,KAAK;AAEnC,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AAC1C,8BAAkB,KAAI,KAAK;AAC3B,uBAAW,WAAU;AACrB,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;;AAG/C,gCAAkB,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;AAC7D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,+BAAmB,WAAU;AAC7B,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,MAAM,KAAK;AACnD,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;;AAG9C,gCAAkB,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;AAC5D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,mBAAO,CAAC,GAAG,GAAG;;AAElB,kBAAQ,CAAC,OAAO,OAAO;;AAE3B,YAAI,CAAC,gBAAgB,IAAI;AACrB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AACzB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE7B,eAAO,CAAC,GAAG;;;AAGZ,eAAW,IAAG,CAAE;AC9KvB;;;;;;;;;;;;;;;;AAiBC,IAAA;AACG,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,SAAS,KAAK;AAClC,gBAAU,UAAU,4BAA4B,KAAK;OACtD,SAAA,aAAc,UAAA,YAAY;ACA7B,iEAA2D,SAAA,UAAU;AACjE,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,2BAAsB,YAAY,OAAQ,UAAU,KAAI,SAAS;AACjE,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO;;AAEX,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO,MAAI;;AAEf,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,KAAK;;AAGZ,kCAAwB,QAAQ,OAAO,SAAS;AAChD,yBAAe,IAAI,MAAI,eAAe,MAAI;AAC1C,iBAAO,kBAAkB,IAAI,IAAI,QAAQ,QAAO,oBAC5C;;;AAGZ,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,IAAI,MAAI,eAAe,QAAO,QAAQ;;AAG7C,qCAA2B,KAAI,UAAU,OAAK,QAAQ;AACtD,8BAAoB,MAAK,MAAI,SAAS,oBAAoB,QAAO,MAAM;AACvE,iBAAO,IAAI,MAAI,eAAe;;;AAGtC,YAAM,MAAM,sBAAsB;;AAE/B,gCAA4B,IAAG,CAAE;AC1DxC;;;;;;;;;;;;;;;;AAsCA,4EAAuE,SAAA,UAAU;AAC7E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,IAAI,IAAI,SAAS;AAChC,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,+BAA2B,IAAG,CAAE;ACxBvC,8EAAyE,SAAA,UAAU;AAC/E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,sBAAe,IAAI,KAAK,MAAI,KAAI,SAAS,eAAe,MAAM;AAC9D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,2BAAuB,IAAG,CAAE;ACbnC,mEAA8D,SAAA,UAAU;AACpE,oBAAc,iBAAgB,QAAQ,UAAU;AAChD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AAEnB,gBAAU,IAAI,KAAI,QAAO,IAAI,UAAU;AACvC,sBAAe,MAAK,IAAI,KAAK,KAAI,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AA4CA,8DAA0D,gBAAiB,SAAA,UAAU;AACjF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,0BAAoB,QAAO;AAC3B,oBAAc,IAAI,IAAI,cAAc;AACpC,wBAAkB,QAAQ,OAAO;AACjC,qBAAe,IAAI,OAAO;AAC1B,sBAAe,MAAI,KAAI,QAAO,MAAM,OAAO,aAAa,KAAI,aAAa;AACzE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;AC3D9B;;;;;;;;;;;;;;;;AA2CA,+DAA0D,mBAAkB,SAAA,UAAU;AAClF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,4BAAsB,QAAO;AAC7B,kBAAW,IAAI,KAAI,SAAS,MAAI,MAAI,cAAc;AAClD,kBAAW,KAAI,IAAI,KAAK,UAAU,MAAI,MAAI,IAAI,KAAK,eAAe;AAClE,sBAAe,IAAI,KAAI;AACvB,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,oBAAgB,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA,0EAAqE,SAAA,UAAU;AAC3E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,kBAAkB,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,6BAAyB,IAAG,CAAE;AChDrC;;;;;;;;;;;;;;;;AA8BA;AACI,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,yBAAkB,QAAQ,OAAO,QAAQ,OAAO;AAqBhD,wBAAkB,MAAK;AACvB,4BAAsB,KAAI,SAAS;AACnC,4BAAsB,MAAM,IAAI,IAAI,IAAI;AACxC,aAAO,MAAI,IAAI,WAAW,gBAAgB;;AAuB9C,sFAAkF,gBAAe,SAAA,UAAU;AACvG,8BAAwB,iBAAgB,kBAAkB,oBAAoB;AAC9E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,kBAAkB,OAAO,QAAQ,OAAO;AAC1D,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,qBAAa,QAAO;AACpB,4BACI,MAAI,KAAI,mBAAmB,IAAI,KAAK,wBAAwB,KAAI,MAAM;;AAE9E,sBAAe,+BAA+B,mBAAmB;AACjE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AClGxC;;;;;;;;;;;;;;;;AA0DA,kEAA8D;AAC1D,UAAI,QAAQ;AACR,cAAM,OAAO,OAAO;;AAExB,UAAI,QAAQ,OAAO,OAAO;AACtB,cAAM,MAAM,mGAC+B,OAAO,oBAC/B;;AAGvB,uBAAiB,WAAW;AAIxB,yBAAiB;AACjB,oBAAY,UAAU,SAAQ,CAAC,MAAM;AACrC,0BAAkB,IAAI,MAAK,SAAQ,YAAY;AAC/C,aAAK,CAAC,SAAQ;AACd,2BAAmB,IAAI,KAAI,WAAW;AACtC,sBAAc,MAAI,YAAY,CAAC;AAC/B,yBAAiB;AACb,wCAA4B;AAC5B,0BAAgB,sBAAqB,GAAG,OAAO,CAAC;AAChD,iBAAO;YACH,KAAI,SAAQ,IAAI,UAAU,IAAI,MAAK,SAAQ,YAAY,IAAI;YAC3D,KAAI,SAAQ,IAAI,UAAU,IAAI,IAAI,aAAY,MAAK,SAAQ;;;AAGnE,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS,QAAQ;;AAqB5B,kFAA8E,gBAAe,SAAA,UAAU;AACnG,0BAAoB,iBAAgB,cAAc,gBAAgB;AAClE,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,cAAc,OAAO,QAAQ,OAAO;AACtD,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,2BAAmB,QAAO,cAAc,MAAM;AAC9C,wBACI,MAAI,KAAI,eAAe,IAAI,KAAK,wBAAwB,IAAI,sBAAsB;;AAE1F,sBAAe,+BAA+B,eAAe;AAC7D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AC/HxC;;;;;;;;;;;;;;;;AAoNK,qBAAY;MACb;MACA;MACA;MACA;;AAOC,mBAAU;MACX;MACA;MACA;MACA;;AAcC,mBAAS;MACV,eAAA;MACA;MACA,gBAAA;MACA,kBAAA;MACA,eAAA;MACA;MACA;MACA;MACA;MACA;MACA;;AAMC,mBAAU;MACX;MACA;MACA;;AAYC,mBAAU;MACX;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AC1RJ;;;;;;;;;;;;;;;;4BAqB+B;MAe3B,yBAAyB;AACrB,eAAQ,OAAO,iBAAU,KAAK,iBAAiB,GAAG;AAClD,YAAI,WAAW;AACX,4BAAkB,QAAQ,IAAI,OAAM,EAAE,MAAM,EAAE,MAAM,QAAQ,OAAM,EAAE;AACpE,eAAK,eAAe;;AAGpB,eAAK,eAAe;;AAGxB,gBAAQ;AACR,YAAI;AACA,iBAAO;;AAGP,gBAAM;AACN,iBAAO;;;UAMX;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO,KAAK;;MAEhB;AACI,aAAK,cAAc,KAAK,aAAa;;MAezC;AACI,eAAO,cAAc,GAAG;;MAK5B;AACI,YAAI,KAAK,eAAe;AACpB,kBAAQ,KAAK;;;YAGf;AACF,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO;UACH,MAAM;UAEN,QAAQ,QAAO,KAAK,aAAa;;;YAGnC;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM,4DACT,KAAK;;YASV;AACF,aAAK,cAAe,OAAM,aAAa,GAAG,OAAO,QAAQ;AACzD,eAAO,aAAa,MAAM;;;AAGlC,WAAO,eAAe,WAAW,OAAO,aAAa;MACjD,OAAO;AACH,eAAO,UAAS,YAAY,QAAQ,UAAS,oBAAoB,QAC7D,UAAS,kBAAkB;;;AC3HvC;;;;;;;;;;;;;;;;oCA2BuC;MACnC,0CAAyC;AACrC;AACA,aAAK,eAAe;AACpB,aAAK,MAAM;AACX,aAAK,UAAU;AACf,aAAK,mBAAmB;AACxB,aAAK,qBAAqB;AAC1B,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,iBAAiB,MAAM;AAC5B,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,oCAA0B,KAAK,mBAAmB,GAAG;AACrD,eAAK;AACD,uCAA2B,MAAI,KAAI,iBAAiB,KAAK,MAAM,KAAI,OAAO,WAAW,IAAI,KAAK;AAC9F,4BAAgB,KAAI,IAAI,KAAK,MAAI,mBAAmB,KAAK,WAAW,KAAK,MAAI,iBAAiB,KAAK,YAAY;AAC/G,yCAA6B,MAAI,KAAI,mBAAmB,KAAK,MAAM,KAAI,OAAO,UAAU,IAAI,KAAK;AACjG,4BAAgB,OAAO;AACvB,8BAAkB,OAAO;AACzB,6BAAiB,MAAI,KAAI,SAAS,CAAC,KAAK,eAAe;AACvD,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;AACzC,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,kBAAkB,GAAG,KAAK;AACrD,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,mBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,KAAO,KAAK;UACZ,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,QAAQ,QAAO;;;AAIrE,sBAAkB,YAAY;AAC9B,kBAAc;ACvHd;;;;;;;;;;;;;;;;mCA2BsC;MAClC,oDAAoD;AAChD;AACA,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,mBAAmB;;MAE5B;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,iBAAiB,MAAM;AAC5B,8BAAkB;AAClB,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,MAAK,MAAM,OAAO,KAAK,yBACvC,SAAS;;;AAGtB,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,eAAK;AACD,uCAA2B,MAAI,iBAAiB,OAAO;AACvD,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,UAAU,KAAK,MAAI,oBAAoB,QAAO,QAAQ,cAAc,CAAC,KAAK,eAAe;AACtH,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,oBAAoB;AACzB,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;;;YAG3C;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,iBAAiB,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAE5G;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,mBAAmB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEvG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,yBAA2B,KAAK;;;aAIjC;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO;;;AAItD,qBAAiB,YAAY;AAC7B,kBAAc;AC3Fd;;;;;;;;;;;;;;;;gCA6BmC;MAC/B,mDAAkD;AAC9C;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,0BAA0B;AAC/B,aAAK;AAED,eAAK,WAAW,QAAO,OAAO;AAC9B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,mCAAyB,IAAI,GAAG,KAAK;AACrC,mBAAS,QAAQ;AACb,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,gBAAI,KAAK,wBAAwB,MAAM;AACnC,mBAAK,wBAAwB,KAAK;gBAC9B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,iCAAqB,KAAK,wBAAwB,GAAG;AACrD,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,oCAAwB,MAAI,KAAI,cAAc,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC1F,6CAAiC,IAAI,gBAAgB;AACrD,8CAAkC,IAAI,iBAAiB;AACvD,wBAAY,OAAO;AACnB,yBAAa,OAAO;AACpB,6BAAiB,MAAI,KAAI,IAAI,0BAA0B,MAAI,KAAK,4BAA4B,KAAK,WAAW,CAAC,KAAK,eAAe;AACjI,kBAAM,OAAO;;AAEjB,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;AAC7C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,SAAS;AACd,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,2BAA2B;AAChC,kBAAQ,KAAK,wBAAwB,IAAI,OAAK,EAAE;;;YAGlD;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,aAAK;AACD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;AACxD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;;AAE5D,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,0BACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO;;;AAIxF,kBAAc,YAAY;AAC1B,kBAAc;AC7Id;;;;;;;;;;;;;;;;kCA4BqC;MACjC,mDAAkD,cAAc;AAC5D;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,yBAAyB;AAC9B,aAAK,6BAA6B;AAClC,aAAK;AACD,eAAK,YAAY,QAAO,GAAG;AAC3B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,qBAAW,IAAI,CAAC,KAAK,cAAc,MAAI,KAAI,KAAK,WAAW,KAAK,QAAQ;AACxE,wBAAc,QAAQ;AAClB,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,gBAAI,KAAK,2BAA2B,MAAM;AACtC,mBAAK,2BAA2B,KAAK;gBACjC,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,oCAAwB,KAAK,2BAA2B,GAAG;AAC3D,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,wBAAY,KAAI,iBAAiB,KAAK;AACtC,wBAAY,IAAI;AAChB,uCAA2B,QAAQ,KAAK;AACxC,wBAAY,OAAO;AACnB,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,IAAI,mBAAmB,IAAI,gBAAgB,MAAI,oBAAoB,KAAK,YAAY;AACjH,kBAAM,OAAO;;AAEjB,eAAK,UAAU,OAAO,MAAI,KAAK,WAAW;AAC1C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,UAAU;AACf,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,8BAA8B;AACnC,kBAAQ,KAAK,2BAA2B,IAAI,OAAK,EAAE;;;YAGrD;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM;;MAEpB;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;UAChB,OAAS,KAAK;;;aAIf;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO,YAAY,QAAO;;;AAI3G,oBAAgB,YAAY;AAC5B,kBAAc;AC1Hd;;;;;;;;;;;;;;;;+BAwBkC;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,gBAAgB;;MAEzB;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,iBAAS,QAAQ;AACb,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wBAAc,QAAO,oBAAoB;AACzC,eAAK;AACD,6BAAiB,MAAI,KAAI,KAAK,GAAG,WAAW;AAC5C,kBAAM,OAAO;;;AAGrB,aAAK;;MAKT;AACI,aAAK,eAAe;AACpB,YAAI,KAAK,KAAK;AACV,eAAK,EAAE;;AAEX,aAAK,IAAI,KAAK,QAAO,CAAC;;MAE1B;AACI,aAAK,EAAE;;YAEL;AACF,eAAO,CAAC,MAAM,KAAK;;YAEjB;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,YAAI,aAAa,WAAW;AACxB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,CAAE,cAAgB,KAAK;;aAG3B;AACH,eAAO,IAAI,IAAI,QAAO;;;AAI9B,iBAAa,YAAY;AACzB,kBAAc;ACjFd;;;;;;;;;;;;;;;;oCAyBuC;MACnC,kDAAkD;AAC9C,cAAM;AACN,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,aAAK,IAAI,QAAO,KAAK;;MAEzB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,cAAc,MAAM;AACzB,8BAAkB;AAClB,iBAAK,cAAc,KAAK;cACpB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,+BAAqB,KAAK,cAAc,GAAG;AAC3C,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,eAAK;AACD;AACA,oCAAwB,MAAI,KAAI,KAAK,GAAG,eAAe;AACvD,gBAAI,KAAK;AACL,yBAAW,MAAI,KAAI,KAAK,GAAG,MAAI,UAAU,KAAI,iBAAiB,KAAK,MAAM;;AAGzE,yBAAW,MAAI,KAAI,KAAK,GAAG,kBAAkB;;AAEjD,yBAAa,OAAO;AACpB,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,aAAK,EAAE;AACP,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK,cAAc,IAAI,OAAK,EAAE;;;MAQ9C;AACI,aAAK,WAAW;;YAEd;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,cAAc,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEzG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,gBAAgB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEpG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,UAAY,KAAK;UACjB,aAAe,KAAK;;;aAIrB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,aAAa,QAAO;;;AAI1E,sBAAkB,YAAY;AAC9B,kBAAc;AC1Gd;;;;;;;;;;;;;;;;mCA4BsC;MAClC,kCAAkC,gBAAgB,cAAe,iBAAiB;AAC9E;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,qBAAqB;AAC1B,aAAK,uBAAuB;AAC5B,aAAK,WAAW;AAChB,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;AAElC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,uBAAuB,MAAM;AAClC,iBAAK,uBAAuB,KAAK;cAC7B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,qBAAqB,MAAM,QAAQ,KAAK;AAC7C,iBAAK,qBAAqB,KAAK;cAC3B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wCAA8B,KAAK,uBAAuB,GAAG;AAC7D,qCAA2B,KAAK,mBAAmB,GAAG;AACtD,eAAK;AACD,6CAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,gBAAI,KAAK;AACL,0CAA4B,KAAK,qBAAqB,GAAG;AAEzD,6CAA+B,MAAI,KAAI,qBAAqB,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChG,uCAAyB,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,IAAI,0BAA0B,MAAI,OAAO,yBAAyB,KAAK;AAC3I,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW;AAC1E,oCAAsB,OAAO;AAC7B,kCAAoB,OAAO;AAC3B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;AAIb,gDAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,MAAI,2BAA0B,KAAK;AACxJ,oCAAsB,OAAO;AAC7B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;;;AAIzB,aAAK;;MAET;AACI,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,wBAAwB,QAAQ,KAAK;AAC1C,kBAAQ,KAAK,qBAAqB,IAAI,OAAK,EAAE;;AAEjD,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,KAAK;;AAE3B,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,KAAK,WAAW,aAAa,SAAS,IAAI,aAAa,SAAS;AACtF,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,YAAI,KAAK;AACL,eAAK,uBACD,aAAa,MAAM,gBAAgB,GAAG,gBAAgB,GACjD,IAAI,OAAM;YACX,cAAc,EAAE;YAChB,UAAU,EAAE,OAAO,SAAS;;;;MAI5C;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,UAAY,KAAK;UACjB,SAAW,KAAK;UAChB,UAAY,KAAK;;;aAIlB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,aAAa,QAAO,YAAY,QAAO;;;AAI9G,qBAAiB,YAAY;AAC7B,kBAAc;ACrKd;;;;;;;;;;;;;;;;;aA6DW;AACH,eAAO,IAAI,aAAa;;aAiBrB,+CAA+C;AAClD,eAAO,IAAI,kBAAkB,cAAc,UAAU;;aAsBlD,8BAA8B,gBAAe,cAAe,iBAAiB;AAChF,eAAO,IAAI,iBAAiB,cAAc,OAAO,UAAU,UAAS;;aAcjE,oBAAoB,cAAe,aAAa,kBAAiB;AACpE,eAAO,IAAI,cAAc,cAAc,OAAO,OAAO;;aAclD,wBAAwB,YAAY,iBAAe;AACtD,eAAO,IAAI,kBAAkB,cAAc,KAAK;;aAe7C,sBAAsB,cAAe,aAAa,kBAAiB,cAAc;AACpF,eAAO,IAAI,gBAAgB,cAAc,OAAO,OAAO,UAAS;;aAkB7D,gDAAgD;AACnD,eAAO,IAAI,iBAAiB,cAAc;;;ACxKlD;;;;;;;;;;;;;;;;AA0BA;MAAC;MAAmB;MAAc;MAAmB;MACjD;MAAkB;MAAiB;;AAC3B,kBAAS;MACjB,KAAK,sBAAsB;MAC3B,UAAU,sBAAsB;MAChC,UAAU,sBAAsB;MAChC,SAAS,sBAAsB;MAC/B,SAAS,sBAAsB;MAC/B,QAAQ,sBAAsB;MAC9B,MAAM,sBAAsB;;ACnChC;;;;;;;;;;;;;;;;AAgBA,0BAAuB;AACnB,UAAI,OAAO,0BAA0B;AACjC,eAAO;iBAEF,OAAO,iBAAiB;AAC7B,eAAO;;AAEX,aAAO,OAAO;;AAYlB;AACI,aAAO,IAAI,QAAQ,aAAW,cAAc,MAAM;;ACpCtD;;;;;;;;;;;;;;;;AAiBO;AACH,sBAAgB,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC3E,sBAAgB,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAC5E,aAAO,CAAC,SAAS;;ACpBrB;;;;;;;;;;;;;;;;AAwBO,wEAAkE;AACrE,qBAAe;AACf,UAAI;AACA,mBAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,iBAAS,KAAK,WAAW,KAAK;AAC9B,mBAAW,SAAS,OAAO,WAAW,MAAM;;AAG5C,mBAAW,SAAS,OAAO,WAAW;AACtC,8BAAsB,WAAW;AACjC,qBAAa,GAAG,IAAI,eAAe,EAAE;AACjC,qBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAEvE,mBAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAEhE,aAAO;;AAWJ,uEAAkE;AACrE,uBAAiB;AACjB,UAAI;AACA,iBAAS,KAAK;AACd,qBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACjD,cAAI,KAAK,IAAI;AACT,qBAAS,KAAK;AACd,qBAAS,KAAK,IAAK,kBAAiB;;AAGpC,qBAAS,KAAK;;;;AAKtB,oCAA4B;AAC5B,mCAA2B;AAC3B,qBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,cAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AACzC,+BAAmB,KAAK;;AAGxB,gCAAoB,KAAK;;;AAGjC,iBAAS,KAAK,GAAG;AACjB,iBAAS,KAAK;AACd,iBAAS,KAAK,GAAG;;AAErB,aAAO;;AAWJ,gFAA0E;AAC7E,+BAAyB;AACzB,UAAI;AACA,yBAAiB,KAAK,WAAW,KAAK;;AAGtC,yBAAiB,KAAK,WAAW,KAAK;;AAE1C,mBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,YAAI,KAAK,WAAW;AAChB,cAAI;AACA,6BAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAGrD,6BAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAIzD,2BAAiB,KAAK,WAAW;;;AAGzC,aAAO;;AAMJ;AACH,+BAAyB,CAAC;AAC1B,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,yBAAiB,KAAK,MAAM,GAAG;;AAEnC,aAAO;;AAaJ;AACH,wBAAkB,eAAe,MAAM,GAAG;AAC1C,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,kBAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAElE,aAAO;;AC7IX;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,wBAAmB;ACjB1B;;;;;;;;;;;;;;;;AAgBO,mBAAc;AACd,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;ACrBtB;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,KAAK,GAAG;;;AAGjB;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,IAAI,GAAG;;;ACxBvB;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,MAAK,WAAW,MAAK;AACrB,cAAM,IAAI,MAAM,gEACT,MAAK,iBAAiB,MAAK;;AAEtC,qBAAe,IAAI,aAAa,MAAK,SAAS;AAC9C,mBAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACpC,eAAO,KAAK,MAAK,IAAI;AACrB,eAAO,IAAI,KAAK,MAAK,IAAI;;AAE7B,aAAO;;AAgBJ;AACH,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,IAAI,KAAK,SAAQ;AACtB,cAAK,IAAI,KAAK,SAAQ,IAAI;;AAE9B,aAAO,CAAE,MAAA,OAAM,MAAA;;AAMZ;AACH,kBAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,MAAA,OAAM,MAAA;;AAMZ;AACH,kBAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,MAAA,OAAM,MAAA;;AAOZ;AACH,oBAAa,SAAQ,SAAQ;AAC7B,oBAAa,SAAQ,SAAQ,IAAI;AACjC,aAAO,CAAE,MAAA,OAAM,MAAA;;AAQZ;AACH,YAAK,SAAQ,KAAK;AAClB,YAAK,SAAQ,IAAI,KAAK;;AAKnB;AACH,oBAAa,IAAI,aAAa,IAAI;AAClC,oBAAa,IAAI,aAAa,IAAI;AAClC,mBAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AAClC,kBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,cAAK,KAAK,KAAK,IAAI;AACnB,cAAK,KAAK,KAAK,IAAI;;AAEvB,aAAO,CAAE,MAAA,OAAM,MAAA;;AAKZ;AACH,gBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,oBAAa,KAAK,IAAI;AACtB,oBAAa,KAAK,IAAI;AACtB,aAAO,CAAE,MAAA,OAAM,MAAA;;ACrInB;;;;;;;;;;;;;;;;AA4CO;AACH,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,EAAE;;AAEb,4BAAoB,OAAM,EAAE;AAC5B,uBAAe,MAAK,GAAG;AACvB,uBAAe,SAAQ,QAAQ,QAAQ;AACvC,oBAAY;AACZ,eAAO;AACP,eAAO;;AAEX,UAAI,CAAC,iBAAgB,EAAE,OAAO;AAG1B,eAAO,QAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAE1D,UAAI,EAAE,UAAU;AACZ,sBAAa,SAAQ,KAAK;AAC1B,uBAAe,MAAK,OAAM;AAC1B,cAAK;AACL,eAAO;;AAEX,UAAI,UAAU;AACV,eAAO,SAAQ,IAAI;iBAEd,UAAU;AACf,qBAAa,QAAO,GAAG,EAAE;AACzB,uBAAe,SAAQ,SAAS,GAAG;AACnC,aAAK;AACL,eAAO;;AAGP,cAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAGhE;AACH,aAAO,QAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAEnD;AACH,oBAAc,QAAO,SAAU,OAAM;AACrC,qBAAe,qBAAoB,KAAK;AACxC,aAAO,KAAK;AACZ,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,eAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,aAAO,UAAS,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1F5B;;;;;;;;;;;;;;;;AAqBO;AACH,oBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,mBAAa,EAAE,MAAM;AACrB,aAAO,WAAW,IAAI;AAClB,0BAAkB,CAAC,GAAG;AACtB,kBAAU,QAAQ;AAClB,uBAAe,OAAM,GAAG,OAAO;AAC/B,cAAM,SAAS;AACf,eAAO;;;AC7Bf;;;;;;;;;;;;;;;;AAqBO;AACH,uBAAiB,IAAI,MAAM,KAAK;AAChC,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,KAAK,MAAM,KAAK,KAAK;;AAEvC,qBAAe,QAAO,UAAU,KAAK;AACrC,mBAAa,GAAG,IAAI,OAAO,OAAO,QAAQ,EAAE;AACxC,uBAAe,OAAO,WAAW;AACjC,4BAAoB,IAAI,MAAM,KAAK;AACnC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK,MAAM;;AAE5C,8BAAsB,KAAK,WAAW;AACtC,eAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,aAAO,OAAO;;ACpClB;;;;;;;;;;;;;;;;AAmBO;AAEH,sBAAgB,OAAO,OAAO,SAAS;AACvC,4BAAsB,CAAC,EAAE,SAAS,SAAS;AAC3C,0BAAoB,wBAAuB,QAAQ,QAAQ;AAC3D,6BAAuB,wBAAuB,SAAS,QAAQ;AAC/D,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,EAAE,SAAS,QAAQ,SAAS;AACzC,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,0BAAkB,IAAI;AACtB,yBAAiB,YAAY,SAAS,WAAW,YAAY;AAC7D,4BAAoB,eAAe,SAAS,WAAW,YAAY;AACnE,qBAAa,GAAG,IAAI,GAAG;AACnB,mBAAS,KAAK,UAAU,GAAG;AAC3B,sBAAY,KAAK,UAAU,GAAG;;;AAKtC,0BAAoB,OAAO;AAC3B,kBAAY,YAAY,SAAS,KAAK;AACtC,aAAO;QACH,QAAO,aAAa,aAAa;QACjC,QAAO,gBAAgB,aAAa;;;AC/C5C;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,GAAG,YAAY;;;ACzB3D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,OAAO,MAAK,GAAG;AACzB,sBAAU,KAAK,IAAI,QAAO,IAAI;AAC9B,mBAAO,IAAI,IAAI,IAAI;;;;;ACjCnC;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,IAAI,OAAO,MAAK,GAAG,aAAa;AAC/C,mBAAO,IAAI,IAAI;;;;;AC9B/B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC1C7B;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,qBAAa;AACb,cAAM,QAAQ;AACV,eAAK,KAAK,MAAM,GAAG;;AAEvB,eAAO;;;ACzBf;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAuBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAK,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC5BrE;;;;;;;;;;;;;;;;AAuBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,MAAI,QAAO,IAAI,OAAO,MAAK,GAAG;AAC7C,mBAAO,IAAI,IAAI;;;;;AC/B/B;;;;;;;;;;;;;;;;AAyBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,KAAI,IAAI,IAAI,GAAG;AACzB,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,IAAI,KAAI,IAAI,IAAI,GAAG;AAC7B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACjD7B;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,OAAO,MAAK,GAAG,aAAa;;;AC1BlE;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC3BhE;;;;;;;;;;;;;;;;AAoDA,6EAAwE,CAAC,GAAG,GAAG;AAC3E,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,iBAAW;AACX,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;;AAG3E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS;;AAEpD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS,WAAW,KAAA,MAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;ACvFtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC1BpF;;;;;;;;;;;;;;;;AAuCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAAoC,IAAI;AAC7G,oBAAc;AACd,iBAAW;AACX,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,kBACI,SAAQ,QAAQ,CAAC,GAAG,OAAO,MAAM,IAAI,OAAO,MAAM,IAAI,OAAO,MAAM;AACvE,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAElE,cAAY,KAAK,SAAS,GAAG,MAAM,4DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,+DAC/B,QAAQ;AACf,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,GAAmB;AACpG,eAAO,SAAQ,gBAAgB,MAAM,SAAS;;AAElD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS,KAAA;AACrC,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;AACzE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAwB,IAAG,CAAE;ACpEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,YAAY,SAAS;;;;ACzBjE;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,cAAe;AACnC,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;;mBAG5B,CAAC,cAAc;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;mBAG5B,cAAc,CAAC;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;;;AAIlC,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;YAC7B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;;;;AC7C7C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,SAAU;AAC9B,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,UAAU;AACN,iCAAyB;AACzB,2BAAmB,iBAAiB;AACpC,4BAAoB,iBAAiB;AACrC,qBAAa,MAAM,KAAK;AACxB,qBAAa,WAAW,SAAS,GAAG,KAAK,GAAG;AACxC,cAAI,WAAW,OAAO,YAAY;AAC9B,iBAAK,KAAK;qBAEL,WAAW,OAAO;AACvB,kBAAM,IAAI,MAAM,mBAAmB,uCAAuC;;;AAGlF,qBAAa;AACb,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,cAAI,KAAK,KAAK;AACV,iBAAK,KAAK;;;AAGlB,eAAO,CAAE,GAAG,MAAM,MAAI,IAAI,MAAM;;;ACvCxC;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACpB7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAEN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACtBpC;;;;;;;;;;;;;;;;AAsBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,cAAc,gBAAiB;AACvC,eAAO;UACH,GAAG,MAAM,MAAM,WAAW,aAAa,GAAG,eAAe,UAAU,GAAG,gBAAgB,IAAI,WAAU;;;;AC7BhH;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,uBAAe,MAAM,IAAI,OAAK,EAAE;AAChC,eAAQ,QAAS;AACjB,sBAAc,gBAAe,MAAM,MAAM,GAAG,OAAO;AACnD,2BAAmB,OAAO,IAAI,OAAK,EAAE;AACrC,2BAAmB,OAAM,IAAI,YAAY;AACzC,eAAO,WAAW,IAAI,OAAK,MAAM;;;AC5BzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,+BAAuB;AACvB,eAAQ,WAAW,SAAS,WAAK,cAAe;AAChD,gBAAY,mBAA4B,YAAY,MAAM,iHACA;AAC1D,eAAO;UACH,GAAG,MAAM,qBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS,MAAK;UACnE,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS,MAAK;;;;AC/BrF;;;;;;;;;;;;;;;;AAmBO,0CAAsC;MACzC,YAAY;MACZ,cAAc,CAAC,MAAM;MACrB,UAAU;AACN,6BAAqB;AACrB,eAAQ,SAAS,WAAK,YAAY,mBAAoB;AACtD,eAAO;UACH,IAAI,MAAM,QAAO,KAAK,QAAQ,SAAS,MAAK,YAAY,GAAmB;UAC3E,QAAQ,MAAM,qBAAqB,KAAK,IAAI,OAAO,OAAO,SAAS,MAAK,YAAY;;;;AC3BhG;;;;;;;;;;;;;;;;AAsCA;AACI,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAErE,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE3E,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,cAAY,IAAI,MAAM,OAAO,YAAY,IAAI,MAAM,4CAA4C,IAAI,MAAM,yCACrE,YAAY;AAChD,cAAY,KAAK,MAAM,OAAO,YAAY,IAAI,MAAM,0CAA0C,KAAK,MAAM,2CACnE,YAAY;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,KAAA,MAAK;AAC9B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,wBAAwB;;AAExE,iCAA6B,IAAG,CAAE;AClEzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,aAAQ;AACpC,gBAAY,mBAAkB,YAAY,MAAM,iHACM;AACtD,+BAAuB;AACvB,eAAO;UACH,GAAG,MAAM,oBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS;UAC9D,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS;;;;AC/BhF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI,MAAK,GAAG,cAAc;;;AC1B5D;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAoBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,MAAM,WAAW,qBAAY;AACrC,eAAO;UACH,GAAG;AACC,gCAAoB,oBAAmB,CAAC,OAAO,EAAE;AACjD,sBAAU,QAAO,IAAI,MAAM,WAAW,CAAC;AACvC,gBAAI,eAAe;AACf,oBAAM,WAAU,KAAK;;AAEzB,mBAAO;;;;;ACjCvB;;;;;;;;;;;;;;;;AAqBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,WAAK,mBAAoB;AACrD,2BAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAChD,gBAAY,mBAA4B,aAAa,MAAM,mHAEnD;AACR,4BAAoB;AACpB,gBAAY,EAAE,SAAS,GAAG,MAAM,kFACJ,EAAE;AAC9B,gBAAY,OAAO,SAAS,GAAG,MAAM,mFACT,OAAO;AACnC,gBAAY,EAAE,MAAM,OAAO,OAAO,MAAM,IAAI,MAAM,mEACjC,EAAE,MAAM,qDACR,OAAO,MAAM;AAC9B,gBAAY,gCAAyC,SAAS,aAAa,MAAM,6FACxC,0BACjC;AACR,YAAI,mBAAmB;AACnB,kBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,eAAO;UACH,GAAG,MAAM,mCAAmC,EAAE,OAAO,IAAI,QAAQ,SAAS,MAAK,WAAW;UAC1F,QAAQ,MAAM,oCAAoC,GAAG,IAAI,OAAO,OAAO,SAAS,MAAK,WAAW;;;;AC/C5G;;;;;;;;;;;;;;;;AAkBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,4BAAoB;AACpB,4BAAoB,CAAE,GAAG,QAAQ;AACjC,6BAAqB,CAAE,GAAG,QAAQ;AAClC,eAAO;UACH,GAAG,MAAM,QAAO,UAAU,yBAAyB,aAAa;UAChE,QAAQ,MAAM,QAAO,UAAU,0BAA0B,cAAc;;;;AC3BnF;;;;;;;;;;;;;;;;AAyBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,mCAA2B;AACvB,iBAAO,SAAQ,OAAO,IAAI;;AAE9B,uBAAe,CAAE,IAAI;AACrB,eAAO;UACH,GAAG,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAiB;;;;AC5BvF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,kBAAU,KAAI,IAAI,IAAI,OAAO,MAAM,IAAI,KAAK,KAAK,KAAK;AACtD,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;AC3BlC;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACvBlC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAyBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AA0BO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC,KAAK,QAAQ,YAAY;MACxC,UAAU;AACN,eAAQ,mBAAoB;AAC5B,8CAAmC;AACnC,2BAAmB,UAAS,OAAO,QAAO,KAAK;AAC/C,8BAAsB,kBAAiB,MAAK,OAAO,EAAE;AACrD,0BAAkB;AAClB,YAAI,MAAK,SAAS;AACd,uBAAa,GAAG,IAAI,EAAE,MAAM,SAAS,GAAG,EAAE;AACtC,sBAAU,KAAK,EAAE,MAAM;;AAE3B,oBAAU,KAAK;;AAEnB,2BAAmB,IAAI,GAAG;AAC1B,kCAA0B,KAAI,IAAI;AAClC,oCAA4B,MAAM,MAAI,WAAU,QAAO;AACvD,+BAAuB,KAAI,KAAI,KAAI,qBAAqB,sBAAsB,sBAAsB,QAAO;AAC3G,qBAAa;AACT,cAAI,MAAK,SAAS;AACd,mBAAO,SAAQ,KAAI,KAAI,IAAI,MAAK,SAAQ,qBAAqB,CAAC,GAAG,GAAG,GAAG,MAAK,MAAM,MAAM,aAAa,aAAa,EAAE;;AAGpH,mBAAO,SAAQ,KAAI,KAAI,IAAI,sBAAsB,aAAa,EAAE;;;AAGxE,wBAAgB;AACZ,wBAAc,KAAI,KAAI,qBAAqB,QAAO,MAAM;AACxD,cAAI,MAAK,SAAS;AACd,sBAAU,MAAI,SAAS;;AAE3B,iBAAO,SAAQ,SAAS,MAAK;;AAEjC,4BAAoB;AAChB,4BAAkB,KAAI,KAAI,gBAAgB,aAAa;AACvD,cAAI,MAAK,SAAS;AACd,0BAAc,MAAI,aAAa;;AAEnC,iBAAO,SAAQ,aAAa,MAAK;;AAErC,yBAAiB;AACb,wCAA8B,KAAI,YAAY;AAC9C,yBAAe,KAAI,IAAI;AACvB,cAAI,MAAK,SAAS;AACd,uBAAW,MAAI,UAAU;;AAE7B,iBAAO,SAAQ,UAAU,MAAK;;AAElC,0BAAkB;AACd,0BAAgB;AAChB,cAAI,MAAK,SAAS;AACd,wBAAY,MAAI,WAAW;;AAE/B,iBAAO,SAAQ,WAAW,MAAK;;AAEnC,eAAO;UACH,GAAG;UACH,MAAM;UACN,UAAU;UACV,OAAO;UACP,QAAQ;;;;ACvFpB;;;;;;;;;;;;;;;;AAsBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,6BAAqB;AACrB,eAAQ,QAAS;AACjB,2BAAmB,gBAAe,MAAM,EAAE,OAAO;AACjD,qBAAa;AACT,8BAAoB,EAAE;AACtB,8BAAoB,QAAQ;AAC5B,6BAAmB,YAAY,MAAM,GAAG;AACxC,4BAAkB,WAAW;AAC7B,6BAAmB,YAAY,MAAM,MAAM,YAAY,QAAQ,MAAM;AACrE,4BAAkB,WAAW;AAC7B,mCAAyB,WAAW,GAAG;AACvC,mCAAyB,WAAW,YAAY,GAAG,YAAY,IAAI;AACnE,8BAAoB,YAAY,CAAC,YAAY,CAAC,cAAc;AAC5D,yBAAe,SAAQ,IAAI;AAC3B,kCAAwB,SAAQ,SAAS,CAAC;AAC1C,gCAAsB,YAAY,CAAC,CAAC,YAAY,kBAAkB;AAClE,kCAAwB,WAAU,QAAQ;AAC1C,2BAAiB,mBAAmB,iBAAiB,iBAAiB,EAAE,MAAM;AAC9E,sCAA4B,wBAAuB;AACnD,uBAAa,WAAU,YAAY;AACnC,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,SAAS,MAAM;;;AAGzC;AACI,qBAAe;AACf,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,eAAO,KAAK;;AAEhB,aAAO;;AAEX;AACI,qBAAe;AACf,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,iBAAO,KAAK,OAAO,GAAG;;;AAG9B,aAAO;;ACjEX;;;;;;;;;;;;;;;;AAkBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI,GAAG,MAAM,WAAU;;;ACvB3D;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,MAAK,IAAI;;;ACrBnC;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,GAAG;;;ACxBzC;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAK,GAAG;;;ACxB1C;;;;;;;;;;;;;;;;AAqBO,iCAA6B;MAChC,YAAY;MACZ,cAAc;MACd,eAAe,CAAC;MAChB,UAAU;AACN,wBAAgB;AAChB,eAAQ,QAAS;AACjB,eAAO;UACH,QAAQ;AACJ,6BAAiB;AACjB,6BAAgB,IAAI;AACpB,mBAAO,IAAI,IAAI,KAAI,MAAI,IAAI,MAAM,WAAW;;;;;AChC5D;;;;;;;;;;;;;;;;AAmBA,yEAAqE,UAAU,WAAW,UAAU;AAChG,sBAAgB,cAAW,SAAQ,QAAQ,IAAI,GAAG,GAAG,aAAa,MAAM,OAAO;AAC/E,qBAAe,CAAE,GAAG,GAAG;AACvB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAExE,+CAA2C,IAAG,CAAE;ACzBvD;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,aAAa,MAAM,OAAO,QAAS;AAC3C,eAAO;UACH,GAAG,MAAM,mCAAmC,GAAG,GAAG,IAAI,aAAa,MAAM,OAAO;;;;AC1B5F;;;;;;;;;;;;;;;;AAwBO;AACH,UAAI,EAAE,OAAO,MAAM;AACf,YAAI,SAAQ,GAAG,sBAA+B,EAAE,OAAO;;AAE3D,UAAI,GAAG,OAAO,MAAM;AAChB,aAAK,SAAQ,IAAI,sBAA+B,GAAG,OAAO;;AAE9D,aAAO;QACH,GAAG;AACC,qBAAW,KAAI,IAAI,MAAK,MAAM,OAAO,IAAI,GAAG;AAC5C,iBAAO;;;;AClCnB;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,oBAAqB;AAC7B,kBAAU,MAAM;AAChB,kBAAU,MAAM;AAChB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AChC/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,aAAa,GAAG,IAAI;AACpD,qBAAa,MAAM,KAAI,IAAI,MAAK,KAAK,GAAG,IAAI;AAC5C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAsDA,qFAAgF,CAAC,GAAG,GAAG;AACnF,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,iBAAW;AACX,oBAAc;AACd,qBAAe;AACf,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;AAEvE,mBAAW,SAAQ,SAAS;UACxB;UAAG,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;;;AAG/E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,SAAS,SAAS,GAAG,MAAM,kEAChC,SAAS;AAChB,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS,UAAU;;AAE9D,qBAAe,CAAE,IAAI,MAAM,OAAO,SAAS,QAAQ;AACnD,oBAAc,CAAE,YAAY,SAAS,WAAW,KAAA,MAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;AChGtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC3BvF;;;;;;;;;;;;;;;;AA2CA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAC7D,IAAI;AACZ,cAAY,IAAI,SAAS,GAAG,MAAM,4DAC3B,IAAI;AACX,cAAY,OAAO,SAAS,GAAG,MAAM,+DAC9B,OAAO;AACd,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,OAAO,OAAO,YAAY,SAAS,GAAmB,MAAK;AACxG,eAAO,SAAQ,gBAAgB,KAAK,QAAQ,SAAS;;AAEzD,qBAAe,CAAE,IAAI,KAAK,OAAO,QAAQ,QAAQ;AACjD,oBAAc,CAAE,YAAY,SAAS,KAAA,MAAK;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;;AAEjE,4BAAwB,IAAG,CAAE;ACjEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,GAAG,YAAY,SAAS;;;;AC1BpE;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,QAAS;AACjB,uBAAe;AACf,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AC/B/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,UAAU,GAAG,IAAI;AACjD,qBAAa,MAAM,KAAI,IAAI,MAAK,QAAQ,GAAG,IAAI;AAC/C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AAwBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,IAAI,aAAa,EAAE;;AAE1C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,IAAI,MAAM,IAAI,GAAG;AACrC,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC7C7B;;;;;;;;;;;;;;;;AAsBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5C7B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,IAAI;;;ACrB9B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,wBAAgB,MAAM;AACtB,eAAO,CAAE,SAAS,MAAM,OAAM,QAAQ,OAAO;;;ACvBrD;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AA6BO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,eAAe,CAAC;MAChB,UAAU;AACN,0BAAkB;AAClB,sBAAa;AACb,qBAAY;AACZ,yBAAiB,4BAA0C,MAAK,OAAO,KAAI;AAC3E,wBAAgB;AACZ,2BAAiB,MAAK,MAAK;AAC3B,oBAAU,KAAI,IAAI,KAAI,UAAU,IAAI,OAAM,IAAI,UAAU,QAAO;AAC/D,6BAAmB,kBAAgC,MAAK,OAAO;AAC/D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,MAAK;;AAE7B,uBAAe;AACX,4BAAkB,QAAQ,OAAM;AAChC,0BAAgB,MAAM,WAAW,MAAI,QAAO,WAAU;AACtD,oBAAU,KAAI,IAAI,KAAI,GAAG;AACzB,6BAAmB,kBAAgC,KAAI,OAAO;AAC9D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,KAAI;;AAE5B,eAAO,CAAE,GAAG,SAAS,GAAG;;;ACzDhC;;;;;;;;;;;;;;;;AAwBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,2BAAmB;AACnB,qBAAa,QAAQ,GAAG;AACxB,eAAO;UACH,GAAG,MAAM,MAAM,MAAM,IAAI,KAAI,IAAI;UACjC,OAAO;AACH,sBAAU,MAAM,MAAM,WAAU,KAAK,KAAI,IAAI;AAC7C,+BAAmB,kBAAiB,MAAM,OAAO,GAAG;AACpD,gBAAI,WAAW,SAAS;AACpB,oBAAM,MAAI,KAAK;;AAEnB,mBAAO,SAAQ,KAAK,MAAM;;;;;ACtC1C;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,OAAO;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,qBAAa,KAAI,UAAU,GAAG,IAAI,MAAK;AACvC,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAM;;;AC3B7C;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,IAAI;;;ACzBhD;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI,EAAE;;;ACvBxC;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,uBAAuB,IAAI,QAAQ;;AAEtD,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,oBAAoB;AAClH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAkBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,8BAA8B,IAAI,QAAQ;;AAE7D,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,2BAA2B;AACzH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAmBO,8BAA0B;MAC7B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,GAAG;AACrC,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,KAAI,IAAI,GAAG,MAAM;;;AC1BvD;;;;;;;;;;;;;;;;AAqBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,4BAAoB;AACpB,eAAO;UAGH,WAAW,MAAM,MAAK,WAAU,YAAY;UAC5C,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,GAAG;UACpC,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,YAAY,GAAG;;;;AC/B5D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,yBAAa,QAAQ,GAAG,QAAO;AAC/B,gCAAmB,QAAO;AAC1B,2BAAc,QAAO;AACrB,uCAA2B,KAAI,IAAI;AACnC,qCAAyB,KAAI,KAAI,IAAI,cAAa,IAAI,MAAK,GAAG;AAC9D,mBAAO,MAAM,MAAM,oBAAoB;;;;;ACpCvD;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,GAAG,IAAI,QAAO,IAAI;;;ACzBxD;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,GAAG,aAAa;;;ACzBvD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAO,QAAS;AACxB,2BAAmB,EAAE;AACrB,gCAAwB,kBAAiB,GAAG,OAAO;AAMnD,yBAAiB;AACjB,qBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,mBAAS,KAAK,CAAC,OAAO,IAAI,WAAW,KAAK,OAAO,KAAK,MAAM;;AAEhE,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACpClC;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAQ;AAChB,yBAAiB;AACjB,yBAAiB,KAAI,IAAI;AACzB,eAAO;UACH,QAAQ,MAAM,IAAI,UAAU,KAAI,MAAI,UAAU,CAAC,MAAM,WAAW;;;;AC7B5E;;;;;;;;;;;;;;;;AAmBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,SAAQ;;;ACxB1C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,YAAa;AACjC,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,eAAO,CAAE,GAAG,MAAM,QAAO,IAAI;;;ACtBrC;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAI,KAAK,MAAK,GAAG,aAAa;;;AC1BhE;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,MAAK,GAAG,YAAY;;;ACxB1D;;;;;;;;;;;;;;;;AAoBO,wCAAoC;MACvC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,oBAAY,QAAO;AACnB,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,IAAI,MAAM,EAAE;;AAE/B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC3C7B;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,gCAAwB,EAAE,MAAM;AAChC,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,EAAE;AACpC,aAAK,QAAQ;AACT,0BAAgB,SAAQ;;AAE5B,2BAAmB,SAAQ,IAAI;AAC/B,qBAAa,KAAI,YAAY,OAAK,EAAE,OAAO;AAC3C,eAAO,CAAE,GAAG,MAAM;;;AClC1B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,OAAO,IAAI;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,QAAO,IAAI,OAAO,KAAK;;;AC1BzD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,QAAS;AACjB,qBAAa;AACT,sBAAY,WAAU;AAGtB,cAAI,EAAE,SAAS;AACX,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,sBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM;;qBAGvD,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,wBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK;kBAC3D,EAAE,MAAM;kBAAI,EAAE,MAAM;;;;qBAK3B,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,0BACI,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;qBAKnH,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,+BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,4BACI,MAAI,OAAO,OAAM,IAAI;sBACjB,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAC5C,IAAI,EAAE,MAAM;uBACb,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;;;AAOpE,kBAAM,IAAI,MAAM,2DACT,EAAE;;AAEb,iBAAO;;AAEX,eAAO,CAAE,GAAG;;;AC3EpB;;;;;;;;;;;;;;;;AAmBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,+BAAuB;AACvB,eAAQ,QAAS;AACjB,yBAAiB,wBAAiC;AAClD,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI;;;ACzBxC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,4BAAoB;AACpB,eAAQ,QAAS;AACjB,eAAO,CAAE,OAAO,MAAM,MAAM,IAAI;;;ACvBxC;;;;;;;;;;;;;;;;AA0BO,yCAAqC;MACxC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,6BAAqB;AACrB,qBAAa;AACT,iBAAO,oBAAoB,IAAI;;AAEnC,eAAO,CAAE,GAAG;;;AAGpB;AAII,iCAA2B,QAAQ,SAAS,WAAU;AACtD,uBAAiB,OAAO,GAAG;AAC3B,uBAAiB,aAAa,SAAS,QAAO,GAAG;AACjD,uBAAiB,SAAS,OAAO,WAAW;AAC5C,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,qBAAa,WAAW,YAAY,IAAI;;AAE5C,mBAAa,WAAW,YAAY,OAAK,SAAS,OAAO;AACzD,wBAAkB,WAAU;AAC5B,aAAO,MAAM,YAAY,UAAU;;AClDvC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAiHA,wBAAoB;MAChB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,iCAA6B;AACzB,uBAAiB;;ACvNrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,cAAO,KAAK,SAAS,GAAG,MAAM;AAC9B,aAAO,SAAQ,MAAM;;ACzBzB;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;AC5BtB;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM;;AC3BhC;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS;;AC5BzC;;;;;;;;;;;;;;;;AA2BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO;;AC7BhD;;;;;;;;;;;;;;;;AA6BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ;;AC/BxD;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,OAAM,WAAU,QAAQ,QAAO;;ACpB1D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACpB7B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM,MAAK;;ACrBlC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,UAAI,aAAa;AACb,YAAI,CAAC;;AAET,aAAO,QAAO,CAAC,MAAM,GAAG,IAAI;;ACvBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,QAAQ,QAAQ,MAAK,YAAY,UAAU;;ACpBnE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,QAAQ,aAAa,SAAS,MAAK;;ACpBpE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpBrE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,uBAAgB;AAChB,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACzB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM,QAAQ,SAAS,MAAK,WAAW;;ACpB7D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACrB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI;;ACpBf;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACxB7B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,SAAS;;ACpBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM;;ACxBpC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,aAAa,MAAM;;ACpB9B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,WAAS;;ACrBpB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,QAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM;;ACxBjC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,6BAA6B;AAC1C,WAAK;AACL,aAAO,2BAA2B,MAAM,aAAa,MAAM,OAAO;;ACpBtE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,MAAM;;ACpBjC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,GAAG,YAAY;;ACpBvC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,UAAU;;ACpBrC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,KAAK,MAAM;;ACrBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM;;ACxBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS,0BAA2B,cAAc;AAC/D,WAAK;AACL,aAAO,QAAO,MAAM,OAAO,SAAS;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,UAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,UAAU;;ACpB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,aAAa,aAAa,SAAS,cAAc;;ACpBvE;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM;;ACpBjB;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,SAAQ,MAAM,EAAE;;AC3B3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,gBAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,wBAAwB;AACrC,WAAK;AACL,aAAO,sBAAsB,MAAM,YAAY;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,iBAAiB,iBAAiB,SAAS,MAAK,UAAU;;ACpB3F;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ;;ACrBnB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,OAAO;;ACrB9B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACrBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,iBAAiB;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO;;ACrBlB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,oBAAoB;AACjC,WAAK;AACL,aAAO,kBAAkB,MAAM;;ACpBnC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,0BAA0B;AACvC,WAAK;AACL,aAAO,wBAAwB,MAAM;;ACxBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,iCAA2B,aAAa,UAAS,CAAC,MAAM,KAAK,CAAC,MAAM,GAAG;AACvE,aAAO,MAAM,oBAAoB;;ACrBrC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;ACrBlG;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,OAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,GAAG;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM,YAAY;;ACpBhD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,WAAW,MAAM;;ACpBlC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU;;ACrBrB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;AAUA;AAIO;AACH,UAAI,YAAY;AACZ,mBAAW,WAAU;;AAEzB,aAAO;;AAMJ;AACH,iBAAW;;AAKR;AACH,aAAO;;AC/BX;;;;;;;;;iCAqBoC;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;+BAMjB;MAC9B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,aAAa;;;6BAMjB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;sCAMN;MACrC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,oBAAoB;;;iCAMpB;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;6BAMnB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;AC3E/C;;;;;;;;;AAkBO;AACH,UAAI,MAAM,QAAQ;AAEd,uBAAe;AACf,qBAAa,GAAG,IAAI,WAAW;AAC3B,qBAAW,SAAS,OAAO;;AAE/B,eAAO;;AAGP,yBAAiB,IAAI,MAAM;AAC3B,iBAAS,KAAK;AACd,eAAO;;;AAGR;AACH,UAAI,CAAC;AACD,cAAM,IAAI,eAAe;;;AAM1B;AACH,oBAAc;AACd,yBAAmB;AACf,YAAI,SAAS;AACT;;;AAGR,aAAO;;AAOJ;AACH,UAAI,GAAG,WAAW;AACd,eAAO,GAAG;;AAEd,aAAO;;AAWJ;AACH,UAAI,MAAM,QAAQ;AACd,eAAO;;AAEX,aAAO,CAAC;;AAML;AACH,yBAAmB,OAAO;AAC1B,mBAAa;AACb,wBAAkB;AACd,YAAI,IAAI,MAAM;AACV,gBAAM,IAAI,WAAW,UAAU;;AAEnC,YAAI,WAAW;AACX,mBAAS,SAAS;;AAEtB,iBAAS,GAAG,SAAS,KAAK,IAAI,IAAI;;AAEtC,aAAO;;AAMJ;AACH,2BAAqB,KAAK,QAAQ,wBAAwB;AAC1D,uBAAiB,aAAa,QAAQ,mBAAmB,SAAS;AAKlE,UAAI,SAAS,OAAO;AAChB,eAAO;;AAEX,aAAO,YAAY;;AAEhB;AAEH,UAAI,WAAW,UAAU;AACrB,eAAO;;AAGX,UAAI,WAAW,QAAQ,SAAS;AAC5B,eAAO;;AAEX,aAAO,WAAW,QAAQ,eAAe,WAAW,GAAG;;AAG3D,iCAA6B;AACtB;AACH,UAAI,cAAa,QAAQ,cAAa;AAClC,eAAO;;AAEX,mBAAa;AACb,WAAK,eAAe,UAAS;AAC7B,WAAK,YAAY,UAAS;AAC1B,aAAO;;AAaX;AACI,UAAI,WAAU,QAAQ,OAAO,YAAW;AACpC;iBAEK,MAAM,QAAQ;AACnB,gBAAO,QAAQ,gBAAc,8BAA8B;;AAG3D,uBAAe,OAAO,KAAK;AAC3B,4BAAoB;AAChB,wBAAc,QAAO;AACrB,cAAI,SAAS,QAAQ,OAAO,UAAU;AAClC,gBAAI,CAAC,MAAM,QAAQ,UAAU,MAAM,YAAY,aAC3C,OAAO,MAAM,aAAa;AAC1B,sBAAO,SAAS,MAAM;;AAGtB,4CAA8B;;;;;;AAmB3C,gEAA4D,oBAAoB,0BAA0B,2BAA2B;AAExI,UAAI,OAAO,eAAe;AACtB,6BAAqB;AACrB;AACA,YAAI,gBAAgB;AAChB,eAAK,cAAc;mBAEd,gBAAgB;AACrB,eAAK,uBAAuB;;AAG5B,eAAK,cAAc;AACnB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;;AAM7B,eAAO;;AAIP,wBAAe;AACf,YAAI,QAAO,gBAAgB,QAAQ,QAAO,aAAa;AACnD,gBAAM,IAAI,WAAW,GAAG,gDACjB,KAAK,UAAU;;;AAG1B,0BAAkB,QAAO;AACzB;AACA,YAAI,aAAa;AACb,WAAC,KAAK,cAAc,cAAc;mBAE7B,aAAa;AAClB,WAAC,KAAK,cAAc,uBAAuB;mBAEtC,aAAa;AAClB,WAAC,KAAK,cAAc,cAAc;;AAEtC,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;AAKzB,YAAI,cAAc;AAMd,wCAA8B;AAC9B,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,uBAAuB;;AAExD,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,cAAc;;AAG/C,+BAAqB,QAAO;AAC5B,uBAAa,mBAAmB;AAChC,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAEhD,wCAA8B,QAAO;AACrC,4BAAkB,WAAW,KAAK,QAAO,WAAW,eAAe;AACnE,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;AAMP,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAKhD,4BAAkB,IAAI,IAAI,QAAO;AACjC,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;;;AASZ;AACH,aAAQ,IAAI,IAAK,KAAO,IAAI,IAAK,IAAI;;AAOlC;AACH,aAAO,KAAK,cAAc,GAAG;;AAO1B;AACH,cAAQ;aACC;AACD,iBAAO;;AAEP,gBAAM,IAAI,WAAW,kBAAkB;;;AAS5C;AACH,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO,OAAO;;AAElB,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAOJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,kBAAY;AAEZ,sBAAgB;AACZ,YAAI,IAAI,QAAQ,OAAO;AACnB,cAAI,KAAK;;;AAGjB,aAAO;;AAQJ;AACH,UAAI,OAAO;AACP,cAAM,IAAI,WAAW,yBAAyB,KAAK,UAAU;;AAEjE,wBAAkB;AACd,YAAI,IAAI,eAAe;AACnB,iBAAO;;;AAGf,aAAO;;AASJ;AACH,UAAI,SAAS;AACT;;AAEJ,UAAI,OAAO,QAAQ,SAAS;AACxB,cAAM,IAAI,WAAW,GAAG,wBAAwB,4BAA4B;;;AAkB7E,kEAA8D,eAAe;AAChF,eAAO,aAAa;AACpB,eAAO,aAAa;AACpB,aAAQ,MAAM,QAAQ,MAAM,EAAE,UAAU,aAAa,EAAE,UAAU,aAC7D,EAAE,MAAM,OAAK,OAAO,MAAM;;AAU3B;AACH,UAAI,MAAM,QAAQ;AACd,gBAAY,MAAM,SAAS,GAAG,MAAM,GAAG;AACvC,cAAM,QAAQ,UAAU,sBAAsB,GAAG,WAAW,IAAI,QAAQ;;AAGxE,gBAAY,OAAO,UAAU,UAAU,QAAQ,GAAG,MAAM,YAAY,0CAC7D,uBAAuB;;;AAc/B;AACH,UAAI,UAAU;AACV,eAAO;iBAEF,MAAM,QAAQ;AACnB,eAAO,MAAM,MAAM,IAAI,OAAK,uBAAuB,IAAI,KAAK,OAAO;iBAE9D,OAAO,UAAU;AACtB,eAAO,IAAI;;AAGX,eAAO,GAAG;;;AAYX;AACH,qBAAe;AACf;AACA,iBAAW;AACP,sBAAY;AACZ,YAAI,QAAM,WAAW;AACjB,iBAAO;;AAEX,mBAAW;AACX,qBAAa,EAAE,GAAG;AAClB,eAAO;;AAEX,aAAO;;AAQJ;AACH,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,aAAO;;AAiBJ;AACH,eAAO,cAAc,SAAS,GAAG;AACjC,2BAAqB;AACjB,iBAAO,MAAM,QAAQ,SAAS;AAC9B,iBAAO,OAAO,SAAS,GAAG;;AAE9B,aAAO,cAAc,OAAO;AACxB,YAAI,SAAS,WAAW;AACpB,iBAAO,OAAO,IAAI,WAAS,CAAC;;AAEhC,eAAO,OACF,IAAI;AACL,iBAAO,SAAS,IAAI,eAAe,CAAC,GAAG,WAAW;WAEjD,OAAO;AACR,iBAAO,iBAAiB,OAAO;WAChC;SACJ;;AClgBP;;;;;;;;;AAiBA;AACI,aAAO,KAAK,MAAM,KAAS,MAAQ,KAAQ,GAAG,IAAI,MAAM;;6BAW5B;MAC5B;AACI,eAAO;;;0BAGc;MACzB;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,YAAgB,OAAO,GAAG,KAAK;AAC/C,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO,CAAE,UAAU,KAAK,UAAU,MAAM,KAAK;;;AAIrD,YAAQ,YAAY;AACpB,kBAA4B;2BACE;MAC1B;AACI;AACA,aAAK,cAAc;AACnB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK,MAAM,IAAQ,GAAG,MAAQ,WAAW,YAAY,GAAG,KAAK;;MAExE;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,aAAS,YAAY;AACrB,kBAA4B;yBACA;MACxB;AACI,eAAO,MAAS;;;AAIxB,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,MAAQ,KAAQ,KAAK,MAAM,YAAgB,OAAO,KAAK,UAAU,KAAK,YAAY,KAAQ,IAAM,KAAK,MAAM;AAC3H,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO;UACH,UAAU,KAAK;UACf,UAAU,KAAK;UACf,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,sDAAkD;MACrD,SAAW;MACX,YAAc;MACd,QAAU;MACV,UAAY;;AAET;AACH,aAAO,qBAAqB;;AAEzB,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,4CAC5B,0CAA0C,cAC1C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC/IrC;;;;;;;;;AAwBO;AACH,aAAO,IAAI,QAAQ;;AAOhB;AACH,aAAO,IAAI,SAAS;;AAOjB;AACH,aAAO,IAAI;;AAGR;AACH,aAAO,IAAI,WAAW;;;;;;;;;AC7C1B;;;;;;;;;AASO,qCAAiC,CAAC,iBAAiB;AACnD,sCAAkC,CAAC,SAAS,QAAQ;AACpD,mCAA+B,CAAC,OAAO;AACvC,4CAAwC,CAAC,OAAO,OAAO,UAAU;AACjE,sCAAkC,CAAC;ACb1C;;;;;;;;;AAiBA,oBAAgB,IAAI;AACb;AACH,gCAA0B,0BAA0B,cAAc;;AAE/D;AACH,gCAA0B,2BAA2B,eAAe;;AAEjE;AACH,gCAA0B,wBAAwB,YAAY;;AAElE,4BAAwB;AACxB,8BAA0B;AAInB;AACH,sBAAgB,KAAK;AACrB;AACI,oBAAY;AACZ,wBAAgB;AAChB,eAAO;;AAGP,wBAAgB;AAChB,cAAM;;;AAMd;AACI,UAAI,gBAAgB,WAAW;AAC3B,eAAO;;AAGP,eAAO,gBAAgB,KAAK,qBAAqB;;;AAQlD;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,aAAO,2BAA2B;;AAY/B;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,UAAI,CAAC,QAAQ,IAAI;AACb,gBAAQ,IAAI,YAAY;;AAE5B,qBAAc,QAAQ,IAAI;AAC1B,cAAQ,IAAI,YAAY,QAAQ,IAAI,cAAc;AAClD,UAAI,SAAQ;AACR,uBAAe,GAAG,cAAc;AAGhC,gBAAQ,IAAI,QAAQ;AACpB,eAAO;;AAGP,eAAO;;;AAGf,4BAAwB,IAAI,OAAO;AAM5B;AACH,aAAO,CAAC,CAAC,KAAK,MAAM;;ACvGxB;;;;;;;;;AAyBO;AACH,aAAO,MAAM,SAAS,EAAE,YAAY;;AASjC;AACH,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,OAAO;AACP,cAAM,OAAM;;AAEhB,kBAAW;AACX,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,iBAAQ,OAAM;;AAElB,aAAO;;AAOX;AACI,eAAQ,MAAM,QAAQ,UAAS,IAAI,aAAa,UAAS;AACzD,aAAO,UAAS;;AAOb;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAI,UAAS,OAAM;;AAOvB;AACH,uBAAiB,IAAQ,UAAU,SAAQ,QAAO,OAAK;AACvD,wBAAkB,MAAQ,KAAQ,UAAU,WAAW,WAAW;AAClE,aAAO,YAAY,OAAM;;AAOtB;AACH,0BAAoB,OAAM,QAAQ,KAAK,UAAU,IAAI;AACrD,qBAAe,KAAK,MAAO,aAAY,SAAS,KAAK;AACrD,sBAAgB,KAAK,KAAM,aAAY,SAAS,KAAK;AACrD,UAAI,WAAW;AACX,eAAO,YAAY;;AAEvB,aAAQ,aAAY,UAAU,YAAY,YAAY;;AASnD;AACH,UAAI,MAAM;AACN,cAAM,IAAI,WAAW,QAAQ,iBAAiB;;AAElD,kBAAY;AACZ,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,YAAI,KAAK;;AAEb,aAAO;;AChIX;;;;;;;;;AAqBA,oBAAc;AACP;AACH,iBAAe;AACf,kBAAU;;AAEP;AACH,aAAO;;AASJ;AACH,aAAO;;AAOJ;AACH,oBAAc,EAAE;AAChB,UAAI,MAAM,SAAS;AACf,eAAO,MAAM,OAAO,UAAU,IAAI;;AAIlC,eAAO;;;AASR;AACH,aAAO,EAAE,OAAO;;AAQb,oCAA8B;AACjC,uBAAiB,EAAE,MAAM;AACzB,UAAI,OAAO;AACP,eAAO,SAAS,SAAS,OAAO;;AAEpC,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,EAAE,QAAQ;;AAad;AACH,aAAO,KAAK;AACR,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,yDACT,EAAE,MAAM;;AAExB,kBAAU,aAAW,GAAG;AACxB,eAAO,OAAK,GAAG,CAAC,GAAG,GAAG;;;AAQvB;AACH,uBAAiB,CAAC,UAAqB,EAAE;AACzC,aAAO,EAAE,QAAQ;;AAUd;AACH,UAAI,EAAE,QAAQ;AACV,cAAM,IAAI,WAAW,wDAAwD,EAAE;;AAEnF,uBAAiB,CAAC,EAAE,MAAM,IAAI,UAAqB,EAAE,OAAO;AAC5D,aAAO,EAAE,QAAQ;;AAUd;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,IAAI,CAAC,MAAM,OAAM,MAAM;eACxD;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;eAC3E;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM;eAC9F;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,IAAI;cACzC;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;;eAErE;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,GAAG,IAAI;cAC5C;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAClE,OAAM,MAAM;;;AAGhB,kBAAM,IAAI,WAAW,8DACd,OAAM;;;;AAYtB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI;eACtD;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;eACzE;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;;AAE7F,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAatB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC3E;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;mBAC9F;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC9F;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;;AAGf,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAUtB,yCAAqC;AACxC;AACA,UAAI,OAAO;AACP,eAAO,QAAQ,GAAG;AAClB,YAAI,SAAS;AACT,iBAAO;;AAGP,iBAAO;;;AAGf,UAAI,SAAS,QAAQ,GAAG;AAGpB,eAAO;;AAGX,aAAO,QAAW,SAAS;;AASxB;AACH,cAAQ,EAAE;aACD;AACD,iBAAO,SAAa,CAAC,GAAG;aACvB;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;;AAE5B,gBAAM,IAAI,WAAW,+DACD,EAAE;;;AAU3B;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,YAAI,CAAC;;AAET,UAAI,EAAE,SAAS,EAAE;AACb,cAAM,IAAI,WAAW,0BAA0B,EAAE,+DACL,EAAE;;AAElD,aAAO,MAAS,GAAG;;AAahB,2CAAoC,YAAc;AACrD,aAAO,aAAiB,OAAO,OAAM,QAAQ,OAAO;;AAmBjD;AACH,UAAK,EAAE,OAAO,KAAO,EAAE,OAAO;AAC1B,cAAM,IAAI,oBAAoB,8DACJ,EAAE,uBAAuB,EAAE;;AAEzD,UAAI,EAAE,QAAQ;AACV,yBAAiB,EAAE,MAAM,MAAM,IAAI;AACnC,+BAAuB,EAAE,MAAM,MAAM,IAAI;AACzC,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB,gGAC8B,EAAE,wBAC5C,EAAE;;;AAI5B,UAAK,EAAE,SAAS,KAAO,EAAE,SAAS;AAC9B,2BAAmB;AACnB,2BAAmB;AAInB,eAAO,SAAiB;UACpB;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D,YAAA;;;AAKJ,2BAAmB,EAAE,MAAM;AAC3B,yBAAiB,WAAW;AAC5B,YAAI,EAAE,QAAQ,CAAC,IAAI;AAGnB,uBAAe,EAAE,MAAM;AACvB,yBAAiB,OAAO;AACxB,+BAAuB,OAAO;AAC9B,2BAAmB,CAAC,GAAG,QAAQ;AAG/B,qBAAa,MAAM,KAAK,CAAE,QAAQ,EAAE,OAAQ;AACxC,cAAI,MAAM;AACN,mBAAO,EAAE,OAAO;qBAEX,KAAK,EAAE,OAAO;AACnB,mBAAO,IAAI;;AAEf,iBAAO;;AAEX,YAAI,EAAE,UAAU,MAAM,QAAQ,CAAC,gBAAgB;AAE/C,4BAAoB,CAAC,GAAG,YAAY,GAAG;AACvC,2BAAmB;AACnB,2BAAmB;AACnB,eAAO,SACK;UACR;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D,YAAA;WAEC,QAAQ;;;AAad;AAEH,aAAO,KAAK;AACR,2BAAmB,WAAc;AACjC,0BAAkB,UAAa;AAC/B,eAAO,MAAM,MAAU,GAAG,aAAa,YAAY,MAAM,QAAY,GAAG,WAAc,KAAK,WAAW,KAAQ,IAAI;;;AAWnH;AACH,aAAO,KAAK;AACR,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM;;AAGpB,kBAAU,QAAQ;AAClB,eAAO,QAAW,SAAS,YAAY;;;AAWxC;AACH,aAAO,KAAK;AACR,YAAI,MAAM,QAAQ;AACd,oBAAU,UAAS,SAAS;;AAG5B,oBAAU,QAAQ;;AAEtB,eAAO,OAAW,WAAW,SAAS;;;AAQvC;AACH,aAAO,KAAQ,GAAG;;AAcf;AACH,aAAO,KAAK;AACR,YAAI,OAAQ,MAAO;AACf,cAAI,QAAO,KAAK,MAAM,IAAI;;AAE9B,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,oBAAoB,oBAAoB,EAAE;;AAExD,eAAO,IAAQ,GAAG;;;AAM1B;AACI,wBAAkB,KAAK;AACvB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,WAAW,+BAA+B,KAAK,gCACzB;;AAEpC,UAAI,UAAU;AACV,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG,GAAG;;AAG5C,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG3E,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,GAAG,UAAU;;AAG3C,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG;;AAGzC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG7D,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,UAAU;;AAGxC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI;;AAGtC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU;;mBAG/C,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,UAAU;;AAGrC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,QAAQ;AACb,eAAO;;AAEX,YAAM,IAAI,WAAW,sCAAsC,KAAK;;AAW7D;AACH,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,eAAO,EAAE,IAAI,YAAY,EAAE,MAAM,MAAM;;;AASxC,8BAAwB;AAE3B,UAAI,UAAU;AACV,cAAM,IAAI,oBAAoB,0CAA0C;;AAG5E,aAAO,KAAQ;;AAUZ;AACH,aAAO,KAAK,MAAM,IAAQ,GAAG,IAAQ,GAAG,IAAI;;AAYzC;AACH,aAAO,KAAK,MAAM,QAAY,GAAG,OAAO,YAAY;;AAWjD;AACH,aAAO,KAAK;AACR,kBAAU,MAAQ,KAAI,KAAQ,KAAI;AAClC,eAAO,YAAgB,GAAG,GAAG;;;AAgB9B,6CAAyC;AAC5C,aAAO,WAAW,MAAM;;AChoB5B;;;;;;;;;AASO,kCAA8B,CAAC,SAAS,UAAU;AAClD,sCAAkC,CAAC,UAAU,WAAW;AASxD,kCAA8B;MACjC;MAAS;MAAQ;MAAY;MAAgB;MAC7C;MAAmB;MAAmB;MAAc;;ACrBxD;;;;;;;;;AAgBO;AACH,gCAA0B,uBAAuB,WAAW;;AAEzD;AACH,gCAA0B,2BAA2B,gBAAgB;;8BAQxC;MAC7B;AACI,eAAO;;MAEX;AACI,eAAO;;;wBAGY;MACvB;AACI,eAAO,OAAM,OAAO;;;AAI5B,UAAM,YAAY;AAClB,kBAA4B;uBACF;MACtB;AACI,eAAO,OAAK,OAAO;;;AAI3B,SAAK,YAAY;AACjB,kBAA4B;2BACE;MAC1B;AACI;AACA,YAAI,OAAO,SAAS;AAChB,gBAAM,IAAI,WAAW,oDAAoD;;AAE7E,YAAI,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,sCAAsC;;AAE/D,aAAK,QAAQ,KAAK;;MAEtB;AACI,eAAO,KAAK,MAAM,KAAI,QAAO,KAAK,QAAQ,OAAK,OAAO;;MAE1D;AACI,eAAO;UACH,OAAO,KAAK;;;;AAKxB,aAAS,YAAY;AACrB,kBAA4B;gCACO;MAC/B;AACI;AACA,aAAK,iBAAiB;AACtB,aAAK,iBAAiB;AACtB,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO,cAAc,OAAO,KAAK,QAAQ,KAAK,QAAQ;;MAE1D;AACI,eAAO,CAAE,QAAQ,KAAK,QAAQ,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAItE,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,eAAO,eAAe,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAErE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,iBAAa,YAAY;AACzB,kBAA4B;kCACS;MACjC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,0CAA0C;;AAE5E,eAAO,gBAAgB,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAEtE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,oBAAgB,YAAY;AAC5B,kBAA4B;6BACE;MAC1B;AACI;AACA,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO;;MAEhD;AACI,eAAO,KAAK;AACR,cAAI,MAAM,WAAW,KAAK,MAAM,OAAO,MAAM;AACzC,kBAAM,IAAI,WAAW;;AAIrB,mBAAO,KAAI,KAAK,MAAM,IAAI,MAAM;;;;MAI5C;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;AAS5B,6CAAyC;AACrC;AACA;AACA,sBAAgB;AAChB,UAAI,MAAM,WAAW;AACjB,gBAAQ,MAAM;AACd,iBAAS,MAAM;iBAEV,CAAC,GAAG,GAAG,GAAG,QAAQ,MAAM,YAAY;AACzC,YAAI,eAAe;AACf,qCAA2B,UAAU,OAAO;AAC5C,kBAAQ,MAAM,KAAK;AACnB,mBAAS,MAAM,KAAK;mBAEf,eAAe;AACpB,qCAA2B,UAAU,OAAO,GAAG,MAAM,SAAS;AAC9D,kBAAQ,MAAM,MAAM,SAAS,KAAK;AAClC,mBAAS,MAAM,MAAM,SAAS,KAAK;;;AAIvC,0BAAkB,UAAU;AAC5B,gBAAQ,KAAK,KAAK;AAClB,iBAAS,KAAK,KAAK;;AAEvB,aAAO,CAAC,OAAO;;kCAEkB;MAKjC;AACI;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW,wCAAwC,KAAK;;AAEtE,aAAK,QAAQ,KAAK,SAAS,OAAO,IAAM,KAAK;AAC7C,aAAK,OAAO,KAAK,QAAQ,OAAO,UAAU,KAAK;AAC/C,qBAAa,KAAK;AAClB,aAAK,eACD,KAAK,gBAAgB,OAAO,WAAW,KAAK;AAChD,0BAAkB,KAAK;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,qBAAa,YAAY;AACzB,sBAAc,KAAK;AACnB,uBAAe,KAAK;AACpB,qBAAY,KAAK;AACjB,YAAI,KAAK,SAAS;AACd,oBAAS,KAAK,IAAI,GAAG;mBAEhB,KAAK,SAAS;AACnB,oBAAS,KAAK,IAAI,GAAG;;AAGrB,oBAAS,KAAK,IAAI,GAAI,SAAQ,UAAU;;AAE5C,YAAI,KAAK,iBAAiB;AACtB,yBAAe,KAAK,KAAK;AACzB,kBAAQ,SAAS;AACjB,cAAI,UAAU,aAAa,UAAU;AACjC,kBAAM,IAAI,oBAAoB,GAAG,KAAK,yCAAyC;;AAEnF,iBAAO,gBAAgB,OAAO,GAAG,QAAQ,OAAO,KAAK;;AAGrD,wBAAc,KAAK,KAAK,IAAI;AAC5B,iBAAO,cAAc,OAAO,CAAC,OAAO,OAAO;;;MAGnD;AACI,eAAO;UACH,OAAO,KAAK;UACZ,MAAM,KAAK;UACX,cAAc,KAAK;UACnB,MAAM,KAAK;;;;AAKvB,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAQ/B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAQ9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;2BACE;MAC1B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,aAAS,YAAY;AACrB,kBAA4B;4BACG;MAC3B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,cAAU,YAAY;AACtB,kBAA4B;8BACK;MAC7B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,gBAAY,YAAY;AACxB,kBAA4B;+BACM;MAC9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,oBAAoB;;;MAGtC;AACI,eAAO,KAAK;AACR,cAAI,MAAM,SAAS;AACf,kBAAM,IAAI,oBAAoB;;AAElC,cAAI,MAAM,KAAK,MAAM,KAAK;AACtB,oBAAQ,KAAK,2EACK,MAAM,KAAK,MAAM;;AAIvC,kCAAwB,MAAM,KAAK,MAAM,KAAK,CAAC,MAAM,IAAI,MAAM,MAAM;AACrE,oBAAU,eAAe,iBAAiB,GAAG,GAAG;AAChD,kBAAQ,OAAO,YAAY;AAC3B,cAAI,MAAM,KAAK,MAAM;AACjB,gBAAI,EAAE;;AAEV,iBAAO,KAAI,KAAK,MAAM;;;MAG9B;AACI,eAAO;UACH,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,uDAAmD;MACtD,UAAY;MACZ,cAAgB;MAChB,eAAiB;MACjB,UAAY;MACZ,WAAa;MACb,UAAY;MACZ,aAAe;MACf,cAAgB;MAChB,MAAQ;MACR,YAAc;MACd,cAAgB;MAChB,eAAiB;MACjB,iBAAmB;MACnB,iBAAmB;MACnB,OAAS;;AAEb,6DAAwD;AACpD,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,aAAO,qBAAqB;;AAEzB;AACH,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AAIJ,YAAI,cAAc;AACd,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;;AAGX,0BAAe;AACf,kBAAO,eAAe;AACtB,kBAAO,YAAY;AACnB,iBAAO,uBAAuB;;iBAG7B,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;ACretC;;;;;;;;;AAgBO;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI,SAAS;;AAWjB;AACH,aAAO,IAAI,cAAc;;AAQtB;AACH,aAAO,IAAI,aAAa;;AAYrB;AACH,aAAO,IAAI,gBAAgB;;AAQxB;AACH,aAAO,IAAI,WAAS;;AAejB;AACH,aAAO,IAAI,gBAAgB;;AAexB;AACH,aAAO,IAAI,cAAc;;AAetB;AACH,aAAO,IAAI,aAAa;;AAcrB;AACH,aAAO,IAAI,SAAS;;AAcjB;AACH,aAAO,IAAI,UAAU;;AAelB;AACH,aAAO,IAAI,YAAY;;AAWpB;AACH,aAAO,IAAI,aAAa;;AAUrB;AACH,aAAO,IAAI,WAAW;;;;;;;;;;;;;;;;;;;;ACjM1B;;;;;;;;;AAiBA,8BAA0B;AACnB;AACH,aAAO;;AAEX,yBAAqB;AAMd,6BAAyB;AAC5B,UAAI,CAAE,WAAU;AACZ,qBAAa,UAAU;;AAE3B,mBAAa,WAAW;AACxB,aAAO,SAAS,aAAa,QAAQ;;AChCzC;;;;;;;;;AAcO;AACH,aAAO,MAAM,QAAQ,MAAM,MAAM,QAAQ,EAAE;;AAQxC;AACH,UAAI,EAAE,WAAW;AACb,eAAO;;AAEX,UAAI,CAAC,MAAM,QAAQ,EAAE;AACjB,eAAO,CAAC;;AAEZ,aAAO;;AAQJ;AACH;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,GAAG,WAAW;AACd,gBAAM,IAAI,WAAW,uCAAuC,GAAG;;AAEnE,YAAI,GAAG;;AAGP,YAAI;;AAER,aAAO;;AAYJ;AACH,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ,OAAO;AAC9C,YAAI,OAAO,WAAW;AAClB,mBAAS;AACT,iBAAO,OAAO;;AAGd,gBAAM,IAAI,WAAW,iCAAiC,OAAO;;;AAIjE,eAAO;;;ACxEf;;;;;;;;;AAgBO;AACH,mBAAY;AACZ,2BAAqB;AACjB,YAAI,OAAO,MAAM,WAAW;AACxB,oBAAS;;AAGT,oBAAS,OAAO,MAAM,OAAO,UAAU,IAAI;;;AAGnD,aAAO;;AC1BX;;;;;;;;;AAcA,yCAAqC;;MAsBjC,yBAAyB,kBAAkB,0CAA0C,mBAAmB;AACpG,aAAK,QAAQ,SAAS,OAAO,YAAY;AACzC,aAAK,QAAQ,IAAI;AACjB,aAAK,KAAK;AACV,eAAO,QAAQ,OAAO,+BAA+B;AACrD,aAAK,eAAe,oBAAoB;AACxC,aAAK,OAAO,oBAAoB,KAAK;AACrC,aAAK,aAAa;AAClB,aAAK,aAAa;AAClB,aAAK,MAAM,SAAa,KAAK,KAAK,YAAY,KAAK,MAAM,KAAK;;MASlE;AACI,aAAK;AACL,eAAO,KAAK;;MAShB;AAEI,aAAK;AACL,yBAAiB,KAAK,KAAK;AAE3B,YAAI,KAAK,IAAI,OAAO,OAAO;AACvB,eAAK,IAAI,OAAO;AAChB,cAAI,KAAK,cAAc;AACnB,iBAAK,IAAI,OAAO,KAAK,WAAW,MAAM,KAAK;;;AAGnD,eAAO;;MAKX;AACI,aAAK;AACL,aAAK,IAAI;;MAEb;AACI,YAAI,KAAK,IAAI;AACT,gBAAM,IAAI,MAAM,kBAAkB,KAAK;;;UAG3C;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,aAAa;AAClB,aAAK,IAAI,YAAY;;;AAG7B;AACI,UAAI,EAAE,MAAM,eAAe,EAAE,MAAM;AAC/B,cAAM,IAAI,MAAM,qBAAqB,KAAK,UAAU,EAAE,SAAS,UAC3D,KAAK,UAAU,EAAE;;;AAYtB;AACH,aAAO,IAAI,cAAc,GAAG,OAAO,MAAM,MAAM;;AAU5C;AAEH,aAAO,IAAI,cAAc,OAAU,QAAQ,OAAO;;AAU/C;AACH,aAAO,IAAI,cAAc,WAAc,IAAI,OAAO;;AAU/C;AAEH,wBAAkB,OAAS;AAC3B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,wBAAkB,UAAa;AAC/B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,aAAO,IAAI,cAAc,IAAQ,OAAO,OAAO;;AAY5C,8EAA0E;AAC7E,aAAO,IAAI,cAAc,cAAkB,OAAO,QAAQ,QAAQ,QAAQ,OAAO;;AAY9E,oDAA+C,YAAc,uBAAyB;AAGzF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,aAAO,IAAI,cAAc,gBAAoB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAYpF,iDAA4C,YAAc,uBAAyB;AACtF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,+CAA+C;;AAEjF,aAAO,IAAI,cAAc,aAAiB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAQjF;AACH,aAAO,EAAE,MAAM;;AAQZ;AACH,aAAO,EAAE,MAAM,MAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,EAAE,MAAM,IAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,GAAG,IAAI,OAAK,EAAE;;AAUlB;AACH,yBAAmB,QAAQ;AACvB,0BAAiB,iBAAiB;AAClC,kBAAS,MAAM,iBAAiB;;;AAWjC;AAGH,2BAAqB,UAAU,IAAI,eAAY,UAAS;AACxD,4BAAsB,cAAc,QAAQ;AAC5C,aAAO,UAAU,IAAI,eAAY,cAAc,MAAM,UAAS;;AC/RlE;;;;;;;;;;MA6BI;AACI,aAAK,QAAQ,KAAK;AAClB,aAAK,QAAQ,KAAK;AAKlB,YAAI,KAAK,SAAS;AACd,eAAK,OAAO,KAAK,MAAM;;AAGvB,eAAK,OAAO,KAAK;;AAErB,aAAK,UAAU,KAAK;AACpB,aAAK,UAAU,KAAK;AACpB,aAAK,OAAO,KAAK,QAAQ;;;;MAyB7B;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,cAAc;AACnB,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,oBAAoB;AACzB,aAAK,KAAK;AACV,YAAI,QAAQ;AACR,eAAK,eAAe,oBAAoB;AACxC,eAAK,OAAO,oBAAoB,KAAK;;AAEzC,aAAK,OAAO,MAAM;;;AAG1B,sBAAkB;;MAsBd;AAGI,aAAK,WAAW;AAChB,aAAK,KAAK;AAQV,aAAK,gBAAgB,KAAK;AAQ1B,aAAK,gBAAgB,KAAK;AAE1B,aAAK,cAAc,KAAK;AAExB,aAAK,gBAAgB,KAAK;AAM1B,aAAK,eAAe,KAAK;AAEzB,aAAK,gBAAgB,KAAK;AAK1B,aAAK,aAAa,KAAK;AAEvB,aAAK,cAAc,KAAK;AAGxB,aAAK,cAAc,KAAK;AAExB,aAAK,eAAe,KAAK;AAEzB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,kBAAM,cAAc,KAAK;;;AAGjC,aAAK,cAAc,aAAa,KAAK;;MAEzC;AACI,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,yBAAa,KAAK,MAAM;;AAGxB,yBAAa,KAAK;;;AAG1B,eAAO;UACH,eAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;UAC9D,eAAe;UACf,aAAa,KAAK;UAClB,eAAe,KAAK;;;;AAIhC,uBAAmB;wBAUQ;MACvB,mBAAmB;AACf;AACA,aAAK,YAAY;AACjB,aAAK,oBAAoB;AAKzB,aAAK,YAAY;AACjB,aAAK,KAAK;AACV,aAAK,sBAAsB;AAC3B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AAEvB,aAAK,oBAAoB;AACzB,aAAK,uBAAuB;AAC5B,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,SAAS;AAKd,aAAK,eAAe;AACpB,aAAK,gBAAgB;AACrB,mBAAW,KAAK;AAChB,YAAI,CAAC;AACD,yBAAe,KAAK;AACpB,iBAAO,YAA0B,UAAU,MAAM,OAAO;;AAE5D,aAAK,OAAO;AACZ,aAAK,aAAa,KAAK,aAAa,OAAO,OAAO,KAAK;AACvD,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AAKnD;AACA,cAAI,KAAK,mBAAmB;AACxB,8BAAkB,KAAK;qBAElB,KAAK,cAAc;AACxB,4BAAgB;AAChB,gBAAI,KAAK,aAAa;AAClB,0BAAY,KAAK;;AAErB,8BAAkB,CAAC,WAAW,OAAO,KAAK;;AAE9C,eAAK,kBAAkB;AAEvB,sBAAY,KAAK;AACjB,cAAI,SAAS;AACT,oBAAQ,KAAK;;AAEjB,cAAI,SAAS;AACT,oBAAQ;;AAEZ,eAAK,QAAQ;;AAEjB,YAAI,KAAK,WAAW;AAChB,eAAK,iBAAiB,KAAK;;AAG3B,eAAK,iBAAiB;;AAI1B,aAAK,YAAY;AACjB,aAAK,4BAA4B;;aAW9B;AACH,eAAO,MAAM,OAAO,SAAS,UAAU;;MAS3C;AACI,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,aAAa,2DACQ;;AAEnC,YAAI,KAAK,aAAa,UAAU;AAC5B,gBAAM,IAAI,WAAW,gBAAgB,oBAAoB,qCAC3B,KAAK,aAAa;;AAEpD,eAAO,KAAK,aAAa;;MAW7B;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,SAAS;;MAWlF;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,UAAU;;UAc/E;AACA,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;mBAMlC,KAAK,aAAa,WAAW;AAClC,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,SAAS;;UAatE;AACA,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAM3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,UAAU;;UAEvE;AACA,eAAO,KAAK;;MAOhB;AAKI,eAAO,KAAK,OAAO,IAAI,YAAU;;UAEjC;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,SAAS;;UAEd;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,kBAAkB,QAAQ,OAAK,EAAE,YAAY;AAClD,aAAK,aAAa;;UAElB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,EAAE;;AAG5C,iBAAO;;;UAGX;AACA,aAAK,oBAAoB;;UAEzB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,CAAC,EAAE,WACxC,OAAO,KAAK;;AAGjB,iBAAO,KAAK,kBAAkB,OAAO,KAAK;;;UAG9C;AACA,aAAK,uBAAuB;;UAM5B;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;UAEzC;AACA,eAAO,KAAK;;MAShB;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;;MAgBxB;AACI,iBAAS,OAAqB;AAC9B,YAAI,KAAK,aAAa,QAAQ,KAAK,UAAU,WAAW;AACpD;;AAEJ,0BAAkB,OAAqB,KAAK;AAC5C,YAAI,OAAO,WAAW,UAAU;AAC5B,gBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,UAAU,kCACtC,OAAO,yCACP;;AAE3B,8BAAsB,GAAG,aAAa,OAAO,QAAQ;AACjD,oBAAU,OAAO;AACjB,uBAAa,UAAU;AACvB,cAAI,QAAQ;AACR;;AAGJ,uBAAa,EAAE;AACf,cAAI,KAAK,QAAQ;AACb,gBAAI,SAAS,KAAK;AACd,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,uBACvD,KAAK,oBAAoB;;;AAGtD,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAG/D,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAI/D,cAAI,KAAK,SAAS;AACd,gBAAI,EAAE,UAAU,KAAK;AACjB,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,yBACpD,KAAK,sBAAsB,EAAE;;;AAI7D,cAAI,KAAK;AACL,2BAAe,EAAE;AACjB,8BAAkB,KAAK;AACnB,2BAAa,OAAO;AACpB,4BAAc,KAAK,KAAK;AAIxB,mCAAqB,QAAQ,IAAI,OAAO,QAAQ,OAAO,OAAO,SAAS;AACvE,kBAAI,SAAS,QAAQ,CAAC,OAAO,MAAM,QAAQ,kBAAkB;AACzD,sBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,uBAAuB,qCACjB,uBAAuB;;;;AAKrD,cAAI,KAAK,SAAS;AACd,yBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,8BAAgB,KAAK,MAAM;AAC3B,0BAAY,EAAE,MAAM;AACpB,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,YAAY;AACZ,wBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,wBAAwB,KAAK,sBACtB,EAAE;;;;;;;MAe7C;AACI,eAAO;;MAEX;AACI,YAAI,KAAK,aAAa;AAClB,eAAK,UAAU,QAAQ;;;MAQ/B;AACI,aAAK,YAAY;;MAMrB;AACI,aAAK,YAAY;;MAuErB;AACI,iBAAS,UAAU;AACnB,aAAK;AAEL,2BAAmB,OAAqB;AACxC,6BAAqB;AACrB,6BAAoB;AAChB,cAAI,CAAE,mBAAiB;AACnB,6BAAiB;AACjB;;;AAGR,8BAAsB;AACtB,6BAAoB;AAChB,cAAI,kBAAiB;AACjB,8BAAkB;AAClB;;;AAGR,YAAI,mBAAmB;AACnB,gBAAM,IAAI,WAAW;;AAIzB,eAAO,UAAU,KAAK,MAAM;AAExB,cAAI,CAAC,KAAK;AAKN,iBAAK,yBAAyB;AAE9B,gCAAoB;AACpB,gCAAoB,OAAqB;AACrC,0BAAY,KAAK,MAAM;;AAE3B,iBAAK,MAAM,iBAA+B;AAC1C,iBAAK,QAAQ;AAEb,gBAAI,KAAK;AACL,mBAAK,WAAW,KAAK;;AAEzB,gBAAI,KAAK,cAAc,QAAQ;AAI3B,mBAAK,YAAY;;;AAOzB,eAAK,yBAAyB;AAI9B,cAAI;AACA,yBAAa,KAAK,KAAK,QAAQ;AAI/B,+BAAmB,OAAqB;AACxC,mCAAuB;AAGvB,0BAAc;AACV,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,EAAE;;AAEV,6BAAe,KAAK;;AAExB,qBAAS,iBAA+B;AACxC,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAIlC,mBAAO;;AAGP,+BAAmB,kBAAkB;AACrC,gCAAoB,KAAK,mBAAmB;AAC5C;AACA,gCAAoB,iBAAiB;AACrC,iBAAK,6BAA6B,MAAM,QAAQ,UAAU,WAAW,KACjE;AACJ,gBAAI,eAAe,QAAQ,YAAY,SAAS,KAC5C,MAAM,QAAQ,YAAY;AAE1B,uBAAS,YACJ,IAAI,mBAAkB,IAAI,eAAe,aAAa,OAAO,MAAM,OAAqB,SAAS,QAAQ,KAAK,MAAM;;AAGzH,uBAAS,IAAI,eAAe,aAAa,aAAa,MAAM,OAAqB,SAAS,QAAQ,KAAK;;AAS3G,iBAAK,eAAe,QAAQ,QAAQ,MAAM,MAAM,YAAY,aAAa;AACzE,iBAAK;AACL,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAGlC,mBAAO;;;;MAWnB;AACI,YAAI,KAAK,mBAAmB;AACxB;mBAEK,WAAW,WAAW,KAAK,gBAAgB;AAChD,kBAAQ,KAAK,iDACN,KAAK,UAAU,4DACE,KAAK,UAAU,KAAK,kCACxB,KAAK;;AAGzB,4BAAkB;AAClB,eAAK,gBAAgB,QAAQ;AACzB,gBAAI,aAAa,QAAQ,WAAW,MAAM,QACtC,WAAW,OAAO;AAClB,4BAAc;;;AAGtB,cAAI;AACA,oBAAQ,KAAK,kCACL,KAAK,UAAU,wDACe,KAAK,SACpC,KAAK,UAAU,KAAK;;;;UAgBnC;AACA,YAAI,KAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC1D,gBAAM,IAAI,eAAe,aAAa,KAAK;;AAG/C,gCAAwB;AACxB,2BAAmB,KAAK;AACpB,8BAAoB,KAAK,UAAU,KAAK;AACxC,cAAI,gBAAgB,QAAQ,iBAAiB;AACzC,4BAAgB,KAAK;;;AAG7B,YAAI,gBAAgB,WAAW;AAC3B,+BAAqB,KAAK,aAAa,GAAG;AAC1C,cAAI,MAAM,QAAQ,iBAAiB,MAAM,QAAQ,aAAa,OAC1D,aAAa,WAAW;AACxB,mBAAO,aAAa;;AAGpB,mBAAO;;;AAIX,gBAAM,IAAI,eAAe,aAAa,KAAK;;;MAgBnD;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa,sCAAsC,KAAK;;AAItE,eAAO,qBAAoC,KAAK;;MAapD;AACI,aAAK,QAAQ;;MAUjB,2BAA2B;AACvB,eAAO,cAAc,gBAAgB,KAAK,mBAAmB,KAAK;;MActE;AACI,aAAK;AACD,yBAAe,KAAK;AACpB,cAAI,OAAO,WAAW,QAAQ;AAK1B,kBAAM,IAAI,WAAW,4CAA4C,KAAK,sCAClC,QAAQ,uCACT,OAAO,qCACjB;;AAE7B,cAAI,OAAO,WAAW;AAClB;;AAEJ,oCAA0B;AAC1B,8BAAoB,cAAc;AAClC,uBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,uBAAW,YAAY;AACvB,uBAAU,OAAO;AACjB,sBAAU,QAAQ;AAClB,gBAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,oBAAM,IAAI,WAAW,sBAAsB,GAAG,mDACG,EAAE;;AAEvD,8BAAkB,KAAK,CAAC,IAAG;;AAE/B,wBAAc;;;MAkBtB;AAEI,YAAI,KAAK,kBAAkB,QAAQ,UAAU;AACzC,gBAAM,IAAI,WAAW,yBAAyB,kBAAkB,KAAK;;AAEzE,aAAK,kBAAkB,KAAK;AAC5B,YAAI,SAAS;AACT,kBAAQ;;AAEZ,YAAI,KAAK;AACL,wBAAc,eAAe;;AAEjC,0BAAkB,YAAY,MAAM,OAAO;AAC3C,uBAAe,IAAI,cAAc,WAAW,OAAO,MAAM,WAAW;AACpE,kBAAU;AAEV,YAAI,eAAe;AACf,eAAK,QAAQ,MAAM,YAAY,MAAM,OAAO;;AAEhD,YAAI,aAAa;AACb,sBAAY;;AAEhB,YAAI;AACA,eAAK,kBAAkB,KAAK;;AAG5B,eAAK,qBAAqB,KAAK;;AAEnC,eAAO;;MAYX;AACI,aAAK,4BAA4B;;MAUrC;AACI,YAAI,WAAU,QAAQ,MAAM,QAAQ,YAAW,QAAO,WAAW;AAC7D;;AAGJ,kBAAS,OAAqB;AAC9B,YAAI,KAAK,YAAY,UAAa,KAAK,YAAY;AAC/C,eAAK,OAAO,KAAK,GAAG;;;MAc5B;AACI,eAAO;;MAWX;AACI,YAAI,CAAC,KAAK;AACN,cAAI,QAAQ;AACR,gBAAI,MAAM,QAAQ;AACd,mBAAK,QAAQ;AACT,oBAAI,eAAe;AACf,wBAAM,IAAI,UAAU,SAAS,KAAK;;;;AAM1C,oBAAM,IAAI,UAAU,SAAS,KAAK;;;AAK1C,iBAAO;;AAIX,eAAO;;MAcX,yGAAyG;AACrG,gCAAwB,OAAqB;AAC7C,wBAAgB,OAAqB;AACrC,qBAAa,OAAqB;AAClC,sBAAc,OAAqB;AACnC,sBAAc,mBAA+B;AAC7C,uBAAe,mBAA+B;AAE9C,8BAAsB;AACtB,4BAAoB;AACpB,8BAAsB;AACtB,wBAAgB;AAKZ,wBAAc,KAAK,EAAE;AACrB,sBAAY,KAAK,EAAE;AACnB,wBAAc,KAAK,EAAE;;AAKzB,YAAI,KAAK;UACL,eAAe;UACf;UACA;UACA;UACA,cAAc;UACd;UACA;UACA;UACA;UACA;WACD;AAEH,qBAAa,GAAG,IAAI,cAAc,QAAQ;AAEtC,wBAAc,GAAG,cAAc;AAC/B,wBAAc,GAAG,YAAY,KAAK,aAAa,SAAS;AACxD,wBAAc,GAAG,cAAc;;;MAwBvC;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,WAAW,KAAK;AAClD,YAAI,KAAK,mBAAmB;AACxB,kBAAO,qBAAqB,KAAK;;AAErC,YAAI,KAAK,SAAS;AACd,kBAAO,WAAW,KAAK;;AAE3B,eAAO;;MAOX;AACI,aAAK,QAAQ,QAAQ,YAAU,OAAO;AACtC,eAAO,KAAK,QAAQ;;MAExB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,UAAU,KAAK;;;MAiCvC;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,aAAK;AACL,mCAA2B;AAC3B,YAAI,EAAE,KAAK,cAAc;AACrB,iCAAuB,KAAK;;AAEhC,eAAO,CAAE,sBAAsB,KAAK,WAAW;;;AAavD;AACI,qBACI,OAAqB;AACzB,qBAAe;AACf,sBAAgB;AACZ,eAAO,KAAK,EAAE;;AAElB,aAAO,iBAA+B;;AAW1C;AACI,aAAO;;AAaJ;AACH,UAAI,SAAS,QAAS,aAAa,QAAQ,YAAY;AACnD,gBAAQ,QAAO;AACf,oBAAY,QAAO;;AAEvB,UAAI,MAAM,aAAa,WAAW;AAC9B,eAAO,CAAC;;AAGR,qBAAa,MAAM,aAAa;AAChC,YAAI,KAAK,cAAc,WAAW;AAC9B,iBAAO,KAAK;;AAGZ,gCAAsB;AACtB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,oCAAwB,gBAAgB,GAAG,QAAO;AAElD,6BAAgB;AACZ,kBAAI,cAAc,QAAQ,QAAO;AAC7B,8BAAc,KAAK;;;;AAI/B,iBAAO;;;;ACztCnB;;;;;;;;;6BAagC;MAC5B;AACI,cAAM;UACF,OAAO,KAAK;UACZ,MAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,OAAO,SAAS;;AAG1D,YAAI,KAAK,aAAa;AAClB,eAAK,YAAY;;AAErB,YAAI,KAAK,UAAU;AACf,eAAK,SAAS;;AAElB,aAAK,YAAY;AACjB,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK;AACnB,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AACnD,gBAAM,IAAI,WAAW;;AAGzB,8BAAsB,KAAK;AAC3B,YAAI,mBAAmB;AACnB,cAAI,KAAK,cAAc;AACnB,kBAAM,IAAI,WAAW;;AAIrB,8BAAkB,CAAC,KAAK,WAAW,OAAO,KAAK;;;AAKnD,cAAI,KAAK,aAAa;AAClB,kBAAM,IAAI,WAAW;;;AAI7B,sBAAc,KAAK,SAAS;AAC5B,aAAK,kBAAkB;AACvB,aAAK,QAAQ;AAEb,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,4BAAoB,IAAI,eAAe,KAAK,OAAO,KAAK,iBAAiB,MAAM,IAAI,IAAI,KAAK;AAC5F,oBAAY,YAAY;AACxB,oBAAY,cAAc;AAI1B,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,CAAC;UACf,eAAe,CAAC;UAChB,YAAY,CAAC;UACb,aAAa,CAAC;UACd,aAAa,CAAC;UACd,cAAc,CAAC;;;MAGvB;AACI,cAAM,IAAI,WAAW,6EACgC,KAAK;;MAE9D;AAEI,eAAO,CAAE,sBAAsB,KAAK,WAAW,sBAAsB;;MAEzE;AACI,eAAO;UACH,iBAAiB,KAAK;UACtB,OAAO,KAAK;UACZ,QAAQ,KAAK;UACb,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AACrB;AACH,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAC7C,cAAM,IAAI,MAAM;;AAKpB,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAE7C,cAAM,IAAI,WAAW;;AAGzB,uBAAiB,QAAO;AACxB,UAAI,QAAO,SAAS,QAAQ,cAAc;AACtC,qBAAa,CAAC,MAAM,OAAO,QAAO;;AAEtC,kBAAY,QAAO;AACnB,UAAI,SAAS;AACT,gBAAQ;;AAEZ,0BAAmB,IAAI,WAAW;QAC9B,iBAAiB;QACjB,MAAM,QAAO;QACb;QACA,QAAQ,QAAO;;AAEnB,sBAAgB,YAAW,aAAa,GAAG;AAC3C,aAAO,QAAQ;;ACzHnB;;;;;;;;;AAeO;AACH,UAAI,QAAQ;AACR;;AAEJ,uBAAiB;AACjB,mBAAa;AACb,+BAAyB;AACzB,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,8BAAoB;AACpB,mBAAS,KAAK,YAAY;AAC1B,eAAK,KAAK;AACV,2BAAiB,KAAK;;;AAG9B,UAAI,SAAS,SAAS;AAClB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,eAAK,KAAK,MAAM,OAAO,GAAG;;AAG9B,gBAAQ;;;AAST;AACH,UAAI,QAAQ;AACR;;AAEJ,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,gBAAM;;;;ACrDlB;;;;;;;;;AAeO;AACN,IAAA;AACG,6BAAsB,uBAAsB,YAAY,KAAK;AAC7D,6BAAsB,uBAAsB,aAAa,KAAK;OAC/D,yBAA0B,yBAAwB;AAE9C,mCAA+B;;MAoBlC;AAEI,aAAK,iBAAiB;;MAE1B;AACI,aAAK,SAAS;;YAEZ;;YACA;;YACA;;YACA;;YACA;;YACA;;MAQN;;;;MAmBA,sCAAqC;AAGjC,YAAI,cAAa;AACb,uBAAY;;AAEhB,aAAK,YAAY;AACjB,aAAK,cAAc;;MAEvB;AACI,aAAK,UAAU,KAAK;;MAExB;AACI,+BAAuB,KAAK;AACxB,mBAAS,UAAU;;;MAG3B;AACI,+BAAuB,KAAK;AACxB,mBAAS,SAAS;;;YAQpB;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAQnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAOnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa;;;YAO9B;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW;;;;6BASN;MAC5B;AACI;;YAEE;AACF,aAAK,OAAO;AACZ,aAAK,SAAS;;YAEZ;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,0BAAkB,KAAK,WAAW,OAAO,IAAI,KAAK;AAClD,aAAK,QAAQ;AACb,0BAAkB;AACd,wBAAc,KAAK;AACnB,cAAI,OAAO,UAAU;AACjB,gBAAI,CAAC,KAAK,OAAO,eAAe;AAC5B,mBAAK,OAAO,OAAO;;AAEvB,iBAAK,OAAO,OAAO,KAAK,OAAO,OAAO,QAAQ;;AAG9C;AACA,gBAAI,OAAO,KAAK;AACZ,mCAAqB,KAAK,OAAO;;AAGjC,mBAAK,OAAO,OAAO;;AAEvB,0BAAc,KAAK,MAAM,MAAK,KAAK,OAAO,MAAO,KAAI,OAAO;AAC5D,iBAAK,OAAO,OAAO;AACnB,gBAAI,sBAAsB;AACtB,iCAAmB;;;;;YAK7B;AACF,YAAI,QAAQ;AACR,4BAAkB,KAAK,OAAO;AAC1B,gBAAI,KAAK,OAAO,QAAQ;AACpB;;AAEJ,gBAAI,OAAO,KAAK,OAAO,SAAS;AAC5B,mBAAK,OAAO,KAAK,OAAO,OAAO,KAAK;;AAGpC,mBAAK;AACD,6BAAY,KAAI,IAAI,GAAG,KAAK,OAAO,KAAK,OAAO;AAC/C,qBAAK,OAAO;AACZ,qBAAK,OAAO,KAAK;AACjB,qBAAK,KAAK;;;;;;;0BAYL;YACnB;AACF,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,MAAM,KAAK;AAChB,0BAAkB;AACd,cAAI,KAAK,QAAQ,QAAQ;AACrB,iBAAK,QAAQ,OAAO;;AAExB,eAAK,QAAQ,KAAK,KAAK,KAAK;;;YAM9B;AACF,yBAAiB;AACjB,qBAAa;AACb,wBAAgB;AAChB,0BAAkB,KAAK;AACnB,6BAAmB,KAAK,QAAQ;AAChC,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,gBAAI,OAAO,WAAW,OAAO;AACzB,kCAAoB,WAAW;AAC/B,uBAAS,KAAK,YAAY;AAC1B,mBAAK,KAAK;AACV,sBAAQ,KAAK;;;;AAIzB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,kCAAwB,KAAK,QAAQ,KAAK,IAAI,QAAQ;AACtD,0BAAgB;AAChB,eAAK,QAAQ,KAAK,IAAI,QAAQ,MAAM,OAAO,GAAG;;;;iCAOtB;MAChC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,aAAa,cAAc;AAChC,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;;AAEtB,YAAI,KAAK,eAAe,WAAW,KAAK,WAAW;AAC/C,gBAAM,IAAI,MAAM;;AAGpB,YAAI,UAAc,KAAK;AAGnB,eAAK,YAAY,SAAuB,KAAK,UAAU,KAAK,OAAO,KAAK;;AAE5E,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,QAAQ,KAAK;;YAEhB;AACF,mBAAW;AACX,YAAI,KAAK,SAAS;AACd,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,MAAM,OAAO,OAAO;;AAErC,WAAG,KAAK;AACR,cAAM,QAAQ,IAAI;;YAEhB;AACF,aAAK,eAAe;AACpB,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;;AAEZ,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;mBAEH,UAAc,KAAK;AACxB,aAAG,KAAK,KAAK,UAAU,KAAK,cAAc,OAAO;;AAErD,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW;;;YAGxB;AACF,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,SAAS;;;;AAOzB;AACH,UAAI,cAAa;AACb,qBAAY;;AAEhB,UAAI,sBAAqB;AACrB,eAAO,CAAC;;AAEZ,UAAI,MAAM,QAAQ,eAAc,WAAU,cAAc;AACpD,eAAO;;AAGX,8BAAwB,OAAqB;AAC7C,aAAO,gBAAgB,IAAI,oBAAkB,IAAI,eAAe,gBAAgB;;;MAUhF;;aAaO;AACH,gBAAY,kBAAkB,KAAK,OAAO,UAAU,iBAAiB,MAAM,8DAC5D;AACf,oCAA4B,kBAAkB;AAC9C,YAAI,4BAA4B,aAAa,mBAAmB;AAC5D,sCAA4B,aAAa,kBAAkB;;AAE/D,oCAA4B,aAAa,gBAAgB,KAAK;;aAE3D;AACH,gCAAwB,4BAA4B;AAChD,+BAAqB,4BAA4B,aAAa,CAAC;AAC/D,uBAAa,QAAQ;AACjB,gBAAI,SAAS;AACT,oBAAM,IAAI,WAAW;;;;;aAQ9B;AACH,oCAA4B,eAAe;;aAUxC;AACH,6BAAqB;AACrB,gCAAwB,4BAA4B;AAChD,wBAAc,CAAC;AACf,cAAI,kBAAkB;AAClB,yBAAa,KAAK,GAAG,4BAA4B,aAAa;;;AAGtE,eAAO,aAAa,IAAI,UAAQ,IAAI;;;AAG5C,gCAA4B,eAAe;AACpC;AACH,sBAAgB,IAAI;AACpB,8BAAwB;QACpB,IAAI;QAAc,GAAG,4BAA4B,gBAAgB;;AAErE,UAAI,cAAa;AACb,wBAAgB,KAAK,GAAG;;AAE5B,sBAAgB,KAAK;AACrB,2BAAqB,IAAI,aAAa;AAItC,mBAAa,UAAU;QACnB;QACA;QACA,SAAS;QACT,OAAO;QACP;QACA;QACA;QACA,SAAS;;AAEb,aAAO,CAAE,cAAc;;ACre3B;;;;;;;;;AAuBO,kDAA6C,qBAAqB;AACrE,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe,SAAS;;ACxBxH;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,UAAU;AACZ,cAAI,EAAE,OAAO;;AAEjB,0BAAkB,MAAQ,SAAS,IAAI,MAAM;AAC7C,8BAAsB,MAAS,UAAU,OAAO;AAChD,sBAAa,KAAS,QAAY,WAAW;AAC7C,eAAO,IAAQ,GAAG;;;AAGnB;AACH,aAAO,KAAK,MAAM,KAAS,SAAS,IAAQ,OAAO,SAAS;;AAEzD;AACH,aAAO,KAAK,MAAM,KAAS,IAAQ,IAAQ,OAAO,SAAS;;AAExD;AACH,aAAO,KAAK;AACR,qBAAa,IAAQ,OAAO;AAC5B,4BAAoB,YAAgB,IAAQ,QAAQ,WAAW,OAAO;AACtE,0BAAkB,IAAQ,IAAQ,MAAM;AACxC,eAAO,KAAQ,KAAK,KAAS,WAAW;;;AAGzC;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,yBAAiB,MAAQ,MAAQ,GAAG;AACpC,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,0BAAkB,MAAQ,MAAQ,GAAG;AACrC,eAAO,KAAS,SAAS,IAAQ,UAAU,aAAa;;;AAGzD;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,SAAS,YAAY;;;AAGtC;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,WAAW;;;AAG5B;AACH,aAAO,KAAK;AACR,oBAAY,MAAQ,KAAQ,OAAO,QAAQ;AAC3C,qBAAY,KAAQ,KAAQ,IAAQ,GAAG,QAAQ,QAAQ;AACvD,eAAO,QAAY,GAAG,MAAQ,GAAG,IAAQ,MAAK;;;AAW/C;AACH,aAAO,KAAK;AACR,uBAAa,KAAK,IAAI;AACtB,+BAAuB,IAAQ,OAAO;AACtC,8BAAsB,IAAQ,MAAQ,gBAAgB,SAAa,KAAQ,IAAI,mBAAmB;AAClG,eAAO,KAAS,eAAe;;;AAGhC,kEAA8D;AACjE,aAAO,KAAK;AACR,YAAI;AACA,mBAAS,SAAY;;AAIrB,4BAAkB,MAAQ,QAAQ,OAAO,MAAM,SAAS,GAAG;AAC3D,mBAAS,IAAQ,QAAQ;;AAE7B,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,eAAO,IAAQ,MAAQ,KAAQ,OAAO,WAAW,MAAQ,UAAU,OAAO,MAAM,SAAS;;;AAY1F,wEAAoE;AACvE,aAAO,KAAK;AACR,2BAAmB,MAAU,UAAU,SAAS;AAChD,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,4BAAoB,OAAO;AAC3B,6BAAqB,QAAW,YAAY,YAAY,YAAY,SAAS,IACxE,QAAQ;AACb,eAAO,wBAAwB,cAAc,QAAQ;;;AAwBtD;AACH,UAAI,CAAC,aAAiB,OAAO,OAAO,OAAO;AACvC,cAAM,IAAI,WAAW,8DACd,KAAK,UAAU,OAAO,cAAc,KAAK,UAAU,OAAO;;AAErE,aAAO,KAAK;AAOR,2BAAmB,OAAO;AAC1B,6BAAqB,OAAO,MAAM;AAClC,eAAO,WAAW,IAAI,OAAO,IAAI,SAAS,IAAI,aAAa,MAAM;;;AAGlE;AACH,aAAO,KAAK;AACR;AACA,YAAI,YAAgB,OAAO,WAAW,IAAI;AAC1C,YAAI,MAAQ,IAAQ,GAAG,IAAQ,GAAG;AAClC,eAAO,KAAS,8BAA8B,OAAO,IAAI;;;AAG1D;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW;AACtD,4BAAoB,YAAgB,OAAO,WAAW;AACtD,eAAO,MAAQ,KAAQ,OAAO,MAAQ,IAAQ,aAAa,gBAAgB;;;AAG5E;AACH,aAAO,KAAK;AACR,wBAAgB,MAAQ,MAAQ,WAAW;AAC3C,eAAO,KAAS,IAAQ,OAAO,KAAQ,OAAO,WAAW;;;AAG1D;AACH,aAAO,KAAK;AACR,+BAAuB,YAAY,OAAO;AAC1C,+BAAuB,YAAY,OAAO;AAC1C,0BAAkB,KAAQ,gBAAgB;AAC1C,eAAO,IAAQ,MAAQ,WAAW;;;AAGnC,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,gBAAY;AACZ,gBAAY;AACZ,mBAAe;AAEf,sBAAkB;MACzB,kBAAI;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAIG;AACH,UAAI,OAAO,mBAAmB;AAC1B,YAAI,kBAAkB;AAClB,iBAAO,UAAU;;AAErB,qBAAa,gBAAgB;AAC7B,YAAI,eAAe,cAAc,SAAS;AACtC,mBAAS,gBAAgB;;AAI7B,cAAM,IAAI,WAAW;;AAGrB,eAAO;;;ACzOf;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,2BAAkB,KAAQ,KAAI,UAAa;AAC3C,iCAAyB,OAAO,QAAY,OAAO,aAAY,MAAM;AACrE,eAAO,KAAS,MAAU,OAAO,mBAAmB;;;AAGrD;AACH,aAAO,KAAK,MAAM,OAAO,MAAU,OAAW,OAAO,KAAK,OAAW,OAAO,MAAM;;AAEtF;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGlE;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,mBAAuB,OAAO;;AAElC;AACH,UAAI,MAAM,SAAS,MAAM;AACrB,gBAAQ,MAAM,QAAQ,CAAC,MAAM,OAAO;;AAExC,cAAQ,MAAM,OAAO;AACrB,UAAI,MAAM,UAAU,MAAM;AACtB,gBAAQ,MAAM,OAAO,MAAM;;AAE/B,aAAO,MAAU,OAAO,OAAO,OAAO;;AAEnC;AACH,YAAM,IAAI;;AAEP;AACH,YAAM,IAAI;;AAGP,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa;AACb,sCAAgC;AAChC,qBAAe;AACf,4CAAsC;AAEtC,uBAAmB;MACtB;MACA;MACA;MACJ,yBAAI;MACJ,+BAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,MAAI;MACJ,MAAI;MACJ,QAAI;;AAEG;AACH,UAAI,OAAO,eAAe,YAAY,cAAc;AAChD,eAAO,WAAW;iBAEb,OAAO,eAAe,YAAY,cAAc;AACrD,eAAO;;AAGP,cAAM,IAAI,WAAW,kBAAkB;;;AAoBxC;AACH,eAAY,OAAO,MAAM,0BAA0B;AACnD,UAAI,OAAO,OAAO;AACd,eAAO;;AAGP;AACA,0BAAkB,OAAO,KAAK;AAC1B,cAAI,UAAU,SAAS;AACnB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,0BAAkB,OAAO,KAAK;AAC1B,cAAI,WAAW,SAAS;AACpB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,eAAO,GAAG;;;AChKlB;;;;;;;;;AAkBO;AACH,2BAAqB;QACjB,SAAW,MAAM,MAAM,QAAQ;QAC/B,UAAY,MAAM,MAAM,SAAS,GAAG,MAAM;QAC1C,MAAQ,MAAM,MAAM,KAAK,MAAO,KAAK,OAAO;QAC5C,QAAU,MAAM,MAAM,OAAO,MAAO,KAAK,OAAO,WAAW;QAC3D,SAAW,MAAM,MAAM,QAAQ,MAAO,KAAK,GAAG;QAC9C,KAAO,MAAM,MAAM,IAAI;;AAE3B,mBAAa,aAAa,aAAa;AACvC,mBAAa,cAAc,aAAa;AACxC,mBAAa,UAAU,aAAa;AACpC,mBAAa,YAAY,aAAa;AACtC,mBAAa,aAAa,aAAa;AACvC,mBAAa,SAAS,aAAa;AACnC,UAAI,cAAc;AACd,eAAO,aAAa;;AAExB,YAAM,IAAI,WAAW,qBAAqB;;ACpC9C;;;;;;;;;AAaO,wDAAoD,IAAI,OAAO;AAa/D,kFAA8E;AACjF,UAAI,uBAAuB,QACvB,OAAO,wBAAwB,YAC/B,OAAO,eAAe,yBAAyB,OAAO,aACtD,CAAC,iBAAiB;AAClB,cAAM,IAAI,MAAM;;AAEpB,UAAI;AACA,oBAAY,KAAK,UAAU;AAC3B,YAAI,IAAI,SAAS;AACb,kBAAQ,KAAK,mCAAmC,2CAC5B,IAAI,qJAGjB;;;;AAeZ;AACH,UAAI,MAAM;AAEN,eAAO;iBAEF,OAAO,MAAM;AAClB,YAAI,OAAO,eAAe,OAAO,OAAO;AAEpC,uBAAa,OAAO,KAAK;AACzB,4BAAkB;AACd,gBAAI,OAAO,QAAQ;AAEf,qBAAO;;AAEX,gBAAI,CAAC,iBAAiB,EAAE;AACpB,qBAAO;;;AAGf,iBAAO;;AAIP,cAAI,MAAM,QAAQ;AAEd,+BAAmB;AACf,kBAAI,CAAC,iBAAiB;AAClB,uBAAO;;;AAGf,mBAAO;;AAMP,mBAAO;;;;AAMf,sBAAc,OAAO;AACrB,eAAO,UAAU,YAAY,UAAU,YAAY,UAAU;;;ACjGrE;;;;;;;;;AAyBO,mEAEG,QAAQ;AACd,6BAAuB,sBAAsB;AAE7C,wBAAkB,CAAC,gBAAgB,gBAAgB;AACnD,UAAI;AACA,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM;;AAGtC,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM,MAAM;;AAGhD,UAAI,UAAU,UAAU,SAAS,MAAM;AAEnC,oBAAY,UAAU,IAAI,QAAK,KAAK,MAAM,aAAa;;AAE3D;AACA,UAAI,CAAC;AACD,kBAAU,KAAK;AACf,wBAAgB;AAChB,4BAAoB,OAAM;AACtB,wBAAc,KAAK,GAAG,OAAM,aAAa;;;AAGjD,cAAQ,IAAI,OAAO;AACnB,eAAS,WAAW,WAAW;AAC/B,cAAQ,IAAI,OAAO;AACnB,qBAAe,OAAM;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI;AACA,4BAAkB,OAAO,IAAI,WAAW;;AAGxC,2CAAiC,OAAO,IAAI,WAAW,eAAe;;AAE1E,gBAAS,OAAM,OAAO,SAAS,IAAI,MAAM,KAAK,OAAO;;AAGzD,aAAM;AACN,6BAAuB,qBAAqB;AAC5C,gCAA0B,qBAAqB,OAAM;AACrD,cAAQ,iBAAiB,iBAAiB;AAC1C,cAAQ,qBAAqB;AAC7B,cAAQ,yBAAyB;AACjC,cAAQ,IAAI,OAAO;;AAEvB;AACI;AAEA,UAAI,OAAM,6BAA6B;AACnC,yBACI,qBAAqB,OAAM;;AAG/B,yBAAiB,qBAAqB,OAAM;;AAGhD,aAAO;;AAEX;AACI,2BAAqB;AACrB,2BAAqB;AACrB,oBAAc;AACd,0BAAoB,OAAM;AACtB,qBAAa,KAAK,OAAM,aAAa;;AAEzC,+BAAyB;AACrB,YAAI,WAAW,SAAS,KACpB,WAAW,WAAW,KAAK,WAAW,GAAG,cAAc,SAAS;AAChE,2BAAiB;AACjB;;AAEJ,cAAM,KAAK,GAAG;;AAElB,UAAI;AAEA,4BAAoB,OAAM;AACtB,qBAAW;AACX,6BAAmB,MAAM;AACrB,gBAAI,MAAM,QAAQ,UAAU;AACxB,kBAAI;AACA,iCAAiB;AACjB;;AAGA,uBAAO;;;;AAInB,cAAI,CAAC;AACD;;;;AAIZ,aAAO;;AAEX,mDAEU,QAAQ;AACd,iBAAW;AACX,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,IAAI;AACJ,iBAAO,KAAK,MAAM,GAAG,KAAK,SAAS,KAAK;;AAE5C,gBAAQ,OAAO;AACf,eAAO,KAAK,MAAM,GAAG,UAAU;AAC/B,gBAAQ,IAAI,OAAO,UAAU,KAAK,KAAK;;AAE3C,cAAQ;;AAOZ;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,qBAAe,CAAC,GAAG,SAAS,cAAc,aAAa,MAAM,cAAc;AAC3E,eAAS,QAAQ,WAAW;;AAKhC;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,0BAAoB;AACpB,yBAAmB,MAAM;AACrB,YAAI,iBAAiB,QAAQ,cAAc,SAAS,KAChD,cAAc,QAAQ,UAAU;AAChC;;AAEJ,qBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,+BAAqB,KAAK,cAAc,GAAG;AAC3C,oCAA0B,KAAK,YAAY;AAC3C,qCAA2B,KAAK,cAAc;AAC9C,sBAAY,KAAK,GAAG,gBAAgB,sBAAsB;;;AAGlE,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,8BAAwB,YAAY,WAAW,IAAI,KAAK,YAAY;AACpE,qBAAe;QACX,GAAG,SAAS;QAAc;QAAa,MAAM,cAAc;QAC3D;;AAEJ,eAAS,QAAQ,WAAW;AAC5B,mBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,iBAAS,CAAC,IAAI,IAAI,IAAI,YAAY,KAAK,WAAW;;;ACjM1D;;;;;;;;;AAoBA;AACI,aAAQ,SAAQ,kBAAkB,QAAQ,kBACtC,QAAQ,kBACR,WAAU,KAAK,OAAO,UAAU;;AAQjC;AACH,UAAI,mBAAmB;AACnB,eAAO;iBAEF,OAAO,mBAAmB;AAC/B,eAAO,YAA0B;iBAE3B,OAAO,mBAAmB,YAC/B,OAAO,mBAAmB;AAC3B,eAAO;iBAEF,0BAA0B;AAC/B,wBAAgB;AAChB,4BAAoB,eAAe;AACnC,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,eAAe;AAC5B,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,kCAA0B,OAAO,KAAK;AAClC,gCAAsB,eAAe;AACrC,cAAI,gBAAgB,UAAU,OAAO,kBAAkB;AAInD,mBAAO,eAAe;;AAGtB,0BAAc,YAA0B;AACxC,mBAAO,SAAS,oBAAoB,eAAe;;;AAG3D,eAAO;;;AASR;AACH,UAAI,aAAa,QAAQ,aAAa;AAClC,eAAO;iBAEF,OAAO,aAAa;AACzB,eAAO,YAA0B;iBAE3B,OAAO,aAAa,YAAc,OAAO,aAAa;AAC5D,eAAO;iBAEF,oBAAoB;AACzB,wBAAgB;AAChB,4BAAoB,SAAS;AAC7B,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,SAAS;AACtB,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,4BAAoB,OAAO,KAAK;AAC5B,0BAAgB,SAAS;AACzB,wBAAc,YAA0B;AACxC,cAAK,WAAU,UAAU,UAAU,gBAC/B,OAAO,YAAY;AAInB,mBAAO,SAAS;;AAGhB,mBAAO,SAAS,oBAAoB,SAAS;;;AAGrD,eAAO;;;ACxHf;AAEK,sBAAW;ACFhB;;;;;;;;;AAoBA;AAEI,UAAI,IAAI,SAAS,QAAQ,IAAI,UAAU,IAAI;AAEvC,eAAO;;AAEX;AAEI,eAAO,MAAK,KAAK,IAAI;;AAIrB,cAAM,IAAI,WAAW,0BAA0B,IAAI,mDAChC,IAAI,UAAU,IAAI;;;;MAazC;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,YAAI,iBAAiB;AACjB,2BAAiB,MAAM;AACnB,iBAAK,SAAS,MAAM,MAAM,SAAS;AACnC,gBAAI,MAAM,MAAM;AACZ,mBAAK,QAAQ,MAAM,MAAM,QAAQ;;;;AAKzC,cAAI,SAAS;AACT;;AAEJ,6BAAmB;AACf,iBAAK,IAAI,KAAK,KAAK,KAAK;;;;MAcpC;AACI,YAAI,KAAK,SAAS,IAAI,OAAO;AACzB,eAAK,SAAS,IAAI,MAAM,wBAAwB,KAAK;AACrD,eAAK,QAAQ,IAAI,QAAQ,IAAI;AAC7B,cAAI,QAAQ;AACR,iBAAK,QAAQ,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,uBAAuB,IAAI,YAAY,IAAI;;AAEpE,eAAO;;MAOX;AACI,aAAK,IAAI,KAAK,KAAK,KAAK;;MAM5B;AACI,eAAO,KAAK,SAAS,IAAI,OAAO;;MAKpC;AACI,eAAO,OAAO,KAAK,KAAK;;MAS5B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,SAAS,IAAI;;;AAI7B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,SAAS;;;MAU7B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,QAAQ,IAAI;;;AAI5B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,QAAQ;;;MAI5B;AACI,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;;;AAMzB,yBAAqB;AAErB,kCAA8B;AAsBvB;AACH,uBAAiB,UAAU,OAAO,QAAQ,OAAO;AACjD,2BAAqB,MAAM,QAAQ;AACnC,yBAAmB,eAAe,UAAU,CAAC;AAC7C,0BAAoB,WAAW,IAAI,OAAK,EAAE;AAC1C,2BAAqB;AACrB,wBAAkB,SAAS;AAC3B,+BAAyB;AACrB,YAAI,UAAU,QAAQ,gBAAgB;AAClC,uBAAa,KAAK,SAAS,SAAS;;AAGpC,uBAAa,KAAK;;;AAG1B,UAAI,SAAS;AAET,cAAM,gBAAgB;AACtB,cAAM,gBAAgB;;AAG1B,8BAAwB,YAAY,KAAK,OAAO,MAAM,SAAS,QAAQ,KAAK;AAC5E;AACA;AACA,UAAI,aAAa,oBAAoB;AAGjC,oBAAY,qCAAqC,YAAY;AAC7D,iBAAS,IAAI;AACb,0BAAkB,IAAI;AAEtB,qBAAa,mBAAmB;AAChC,8BAAsB,mBAAmB;;AAE7C,eAAS,aAAa;AACtB,wBAAkB;AAClB,UAAI,CAAC;AACD,eAAO,OAAO,iBAAiB,sBAAsB;;AAEzD,+BAAyB,IAAI,SAAS;AAEtC,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,SAAS;AAET,6BAAmB,SAAS;AAC5B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;AAE1B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;;AAG9B,yBAAiB,OAAO;AACxB,yBAAiB,SAAS;AAC1B,YAAI,oBAAoB;AACpB;;AAEJ,4BAAoB;AACpB,2BAAmB;AACnB,iCAAyB;AACzB,yBAAiB;AACjB,6BAAoB,SAAS;AACzB,wBAAc,iBAAiB,SAAS;AACxC,uBAAa,iBAAiB,QAAQ;AACtC,sBAAY,KAAK;AACjB,qBAAW,KAAK;AAChB,cAAI,QAAQ;AACR,yBAAa;;AAEjB,cAAI,CAAC;AACD,4BAAgB,OAAM;AACtB,gBAAI,gBAAgB,OAAM,UAAU,KAAK,CAAC,SAAS,OAAO,WACtD,YAAY,QAAQ,OAAM,UAAU,MAAM,CAAC,MAAM,cACjD,OAAM,YAAY,aAAa;AAC/B,+BAAiB,KAAK;;;;AAIlC,YAAI;AACA,mBAAS,UAAU;AACnB,iBAAO,UAAU,WAAW;;AAEhC,8BAAsB,OAAO,SAAS,MAAM,aAAa;AACzD,yBAAiB;AACjB,YAAI,SAAS;AACT,uBAAa,SAAS,YAAY,aAAa;;AAEnD,6BAAqB,eAAe;AACpC,sCAA8B,MAAM,QAAQ,gBAAgB,eAAe,CAAC;AAC5E,sBAAa,GAAG,KAAI,sBAAsB,QAAQ,EAAE;AAChD,cAAI,CAAC,iBAAiB,OAAO,sBAAsB;AAC/C,6BAAiB,IAAI,sBAAsB,KAAI,cAAc,KAAI,MAAM,QAAQ,cAAc,WAAW,KAAK;;AAEjH,yBAAc,YAAY,QAAQ,sBAAsB,IAAG;AAC3D,cAAI,WAAU;AACV,yBAAa,UAAS,cAAc;;;AAG5C,YAAI,CAAC;AAED,kBAAQ;;;AAQhB,uBAAiB;AACjB,aAAO,eAAe,eAAe,aAAa;;AAatD;AACI,cAAY,WAAW,QAAQ,QAAQ,SAAS,GAAG,MAAM;AACzD,wBAAkB;AAClB,8BAAwB;AACxB,UAAI,QAAQ,WAAW;AAEnB,oBAAY,gDAAgD,QAAQ,IAAI;AACxE,sBAAc,IAAI;AAClB,4BAAoB,IAAI;;AAGxB,wBAAgB,IAAI;AACpB,6BAAoB;AAChB,iBAAQ,QAAQ,gBAAiB,gDAAgD,QAAO;AAExF,uCAA6B;AACzB,gBAAI,CAAC,QAAQ,IAAI,eAAe;AAC5B,0BAAY,KAAK;AACjB,sBAAQ,IAAI,eAAe;;;AAInC,6BAAmB;AACf,gBAAI,kBAAkB,SAAS;AAC3B,gCAAkB,QAAQ,IAAI;;AAElC,yBAAa,MAAM,QAAQ,eAAa,kBAAkB,MAAM,IAAI;;;;AAIhF,aAAO;QACH,QAAQ;QACR,iBAAiB,oBAAoB;;;AAG7C;AACI,8BAAwB;AACxB,yBAAmB;AACf,wBAAgB,QAAQ,aAAa,MAAM;;AAE/C,aAAO;;AAaJ;AACH,sBAAgB,IAAI;AACpB,qBAAe;AACf,2BAAqB;AAIrB,wBAAkB,SAAS;AACvB,gBAAQ,IAAI;;AAEhB,qBAAc;AACd,oBAAc;AAEd,aAAM,KAAK;AACX,aAAO,OAAM,SAAS;AAClB,oBAAY,OAAM,OAAM,SAAS;AACjC,YAAI,QAAQ,IAAI,IAAI;AAChB,iBAAM;AACN;;AAEJ,4BAAoB,MAAM,MAAM,SAAS,OAAO,OAAM,SAAS;AAC/D,YAAI,IAAI,OAAO,WAAW,KAAK;AAE3B,iBAAM;AACN,iBAAO,KAAK;AACZ,kBAAQ,IAAI,IAAI;AAChB,cAAI;AACA,kBAAM;;;AAMV,gBAAM,KAAK,OAAM,SAAS;AAC1B,+BAAoB,IAAI;AAGpB,gBAAI,aAAa,OAAM,SAAS;AAC5B,2BAAa,OAAM,QAAQ,IAAI;;AAEnC,yBAAa,OAAM,MAAM,IAAI,IAAI;AACjC,gBAAI,QAAQ,IAAI,OAAM;AAClB;;AAEJ,mBAAM,KAAK;;;;AAIvB,aAAO,CAAE,QAAQ;;AAQrB;AACI;AACA,UAAI,OAAM,YAAY,aAAa,WAAW;AAC1C,uBAAe,OAAM,YAAY;;AAGjC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAM,YAAY,aAAa,QAAQ,EAAE;AACzD,qCAA2B,OAAM,YAAY,aAAa,GACrD;AACD,gBAAI,aAAa,OAAO,OAAM;AAC1B,0BAAY;AACZ;;;;AAIZ,uBAAe,OAAM,YAAY,YAAY;;AAEjD,aAAO;;ACzbX;;;;;;;;;4BA6B+B;MAC3B;AAEI,cAAM;AACN,aAAK,iBAAiB,IAAI;AAC1B,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,yBAAe,KAAK,eAAe;AACnC,eAAK,OAAO,OAAO;;AAEvB,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAGlB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,SAAS,KAAK,OAAO;;AAG1B,eAAK,SAAS,CAAC,KAAK;;AAExB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,UAAU,KAAK,QAAQ;;AAG5B,eAAK,UAAU,CAAC,KAAK;;AAGzB,YAAI,SAAqB,KAAK,QAAQ,WAAW,KAAK,OAAO;AACzD,gBAAM,IAAI,WAAW,mGAEd,KAAK,OAAO,IAAI,OAAK,EAAE;;AAGlC,YAAI,SAAqB,KAAK,SAAS,WAAW,KAAK,QAAQ;AAC3D,kBAAQ,KAAK,qGAEN,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAMnC,aAAK,cAAc;AACnB,aAAK,yBAAyB;AAC9B,aAAK,2BAA2B;AAKhC,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,4BAA4B;AAKjC,aAAK,SAAS;AAKd,aAAK,wBAAwB;AAa7B,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,wBAAwB,KAAK;AAClC,eAAK,0BAA0B,KAAK;;AAIxC,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AAKtB,mBAAqB,cAAc,GAAG;AACtC,mBAAqB,gBAAgB,GAAG;AACxC,eAAK,YAAY,KAAK;AACtB,eAAK,uBAAuB,KAAK;AACjC,eAAK,yBAAyB,KAAK;;AAGvC,aAAK,aAAa;AAClB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,iBAAiB;AACtB,aAAK,kBAAkB;AACvB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAE/B,cAAI,CAAE,kBAAiB;AACnB,kBAAM,IAAI,UAAU,8EACI,KAAK,iBAChB,0CACU,MAAM;;AAEjC,eAAK,WAAW,KAAK,MAAM;AAC3B,eAAK,gBAAgB,KAAK,MAAM;AAChC,eAAK,eAAe,KAAK,MAAM;;AAEnC,4BAAoB,KAAK;AACrB,eAAK,YAAY,KAAK,MAAM;;AAEhC,aAAK,sBAAsB,KAAK,OAAO,IAAI,OAAK,EAAE;AAClD,aAAK,uBAAuB,KAAK,QAAQ,IAAI,OAAK,EAAE;AAMpD,4BAAoB;AAEpB,6BAAqB;AACrB,6BAAqB;AAErB,+BAAuB;AACvB,6BAAqB;AACrB,uCAA+B;AAmB/B,gCAAwB;AACpB,cAAI,SAAS,QAAQ,aAAa,QAAQ,eAAe;AACrD,oBAAQ,QAAO;AACf,wBAAY,QAAO;AACnB,0BAAc,QAAO;;AAEzB,uBAAa,MAAM,aAAa;AAEhC,cAAI,iBAAgB,QAAQ,UAAU;AAClC,kBAAM,IAAI,aAAa,cAAc,QAAO,kBAAkB,MAAM;;AAIxE,cAAI,eAAc,QAAQ,UAAU;AAChC;;AAGJ,eAAK,eAAe,IAAI,UAAU,QAAQ,OAAO;AAEjD,cAAI,CAAE,OAAM,MAAM;AACd,yBAAa,MAAM,MAAM,OAAO,KAAK,cAAc;;AAEvD,cAAI,iBAAgB,QAAQ,UAAU;AAClC,6BAAgB,KAAK;;AAGzB,mCAAyB,KAAK,cAAc;AAC5C,uBAAa,GAAG,IAAI,kBAAkB;AAClC,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,iCAAoB,KAAK,cAAc;AACvC,4BAAgB,GAAG,gBAAe,kBAAiB,QAAO,YAAW;;AAEzE,yBAAc,KAAK;AACnB,iBAAO,iBAAgB,QAAQ,SAAS;AACpC,6BAAgB,OAAO,iBAAgB,QAAQ,OAAO;;AAE1D,iCAAuB,KAAK;;AAEhC,8BAAsB;AACtB,gCAAwB;AACxB,wBAAgB,KAAK;AACjB,0BAAgB,GAAG,eAAe;;AAEtC,+CAAuC,uBAAuB,QAAQ;AACtE,2BAAmB;AACf,uBAAa,KAAK,MAAM;AAExB,cAAI,CAAE,MAAK,MAAM;AACb,wBAAY,KAAK,MAAM;;AAE3B,sBAAY,YAAY,KAAK;AAE7B,gCAAuB,aAAa,KAAK,cAAc,OAAO,OAC1D,IACA,aAAa,KAAK,cAAc;AAMpC,kBAAQ,KAAK,IAAI,OAAO;AACxB,uBAAa,KAAK,cAAc,MAAM;AACtC,yBAAe,KAAK,cAAc,MAAM,KAAK;AAC7C,sBAAY,KAAK,MAAM;AAEvB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,iCAAqB,KAAK,cAAc;AACxC,8BAAkB,KAAK,YAAY;AACnC,gCAAoB,aAAa,aAAa;AAC9C,mCAAuB,YAAY,YAAY,OAAO,OAAO,IACzD,YAAY,YAAY;AAC5B,wBAAY,YAAY,MAAM,KAAK,IAAI,QAAQ,GAAG;AAClD,yBAAa,YAAY,MAAM;;;AAIvC,6BAAqB;AACrB,6BAAqB;AACjB,wBAAc,YAAY;AAC1B,cAAI,CAAE,UAAS;AACX,yBAAa,SAAS;;AAE1B,uBAAa,OAAO,KAAK,aAAa;;AAG1C,8BAAsB;AACtB,8BAAsB;AAClB,wBAAc,aAAa;AAC3B,cAAI,CAAE,UAAS;AACX,0BAAc,SAAS;;AAE3B,wBAAc,OAAO,KAAK,eAAe;;AAG7C,wBAAgB,OAAO,KAAK,eACvB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,aAAK,SAAS;AACd,4BAAoB;AAChB,iCAAuB,cAAc;AAGrC,yBAAe,KAAK;AAChB,2BAAe,aAAa,EAAE;AAC9B,2BAAe,aAAa,EAAE;AAC9B,gBAAI,SAAS;AACT,qBAAO;;AAEX,gBAAI,SAAS;AACT,qBAAO;;AAEX,mBAAO;;AAEX,8BAAoB;AAChB,gBAAI,iBAAiB;AACjB,mBAAK,sBAAsB,KAAK;;AAEpC,iBAAK,OAAO,KAAK;;;AAGzB,aAAK,gBAAgB;AAErB,oBAAY,OAAO,KAAK,cACnB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAIV,kCAA0B,KAAK,OAAO;AAEtC,wCAAgC;AAChC,4BAAoB;AAChB,6BAAmB,aAAa;AAC5B,0BAAc,KAAK;AACnB,gBAAI,SAAS;AACT,8BAAgB,KAAK;AACjB,oBAAI,kBAAkB,QAAQ,OAAO;AACjC,wBAAM,IAAI,aAAa,sDAAsD,eAC3D,MAAM,qEAEV;;;AAGtB,8BAAgB,KAAK;AACjB,kCAAkB,KAAK;;AAE3B,sCAAwB,KAAK,MAAM;;;;AAK/C,aAAK,eAAe;AAGpB,yBAAiB,KAAK,OAAO,IAAI,OAAK,EAAE;AACxC,2BAAmB;AACf,iCAAuB,SAAS,OAAO,OAAK,MAAM,MAAM;AACxD,cAAI,mBAAmB;AACnB,kBAAM,IAAI,aAAa,aAAa,iBAAiB,uFAEjD,KAAK,UAAU;;;AAO3B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAIpB,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,KAAK;UACnB,eAAe,KAAK;UACpB,YAAY,KAAK,OAAO,IAAI,OAAK;UACjC,aAAa,KAAK,QAAQ,IAAI,OAAK;UACnC,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;UACpC,cAAc,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAE1C,aAAK,QAAQ;AACb,aAAK,YAAY;;MAErB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,cAAc,KAAK;;;MA6B3C;AACI,aAAK;AACL,uBAAe,CAAE,sBAAsB,MAAM,sBAAsB;AACnE,YAAI,EAAE,KAAK,cAAc;AACrB,8BAAoB,KAAK;AACrB,mBAAO,wBAAwB,MAAM,UAAU;;AAInD,kCAAwB,KAAK;AACzB,mBAAO,wBAAwB,UAAU,UAAU;;;AAG3D,eAAO,uBAAuB,KAAK;AACnC,eAAO;;UAEP;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,OAAO,QAAQ;AAEhB,gBAAM,kBACD,QAAQ,OAAK,EAAE,YAAY;;AAEpC,aAAK,aAAa;;UAElB;AAIA,YAAI,KAAK,kBAAkB,SAAS;AAChC,gBAAM,IAAI,WAAW;;AAKzB,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,sBAAc;AACd,4BAAoB,KAAK;AACrB,oBAAU,QAAQ,OAAO,MAAM;;AAEnC,eAAO;;UAEP;AACA,wBAAgB;AAChB,4BAAoB,KAAK;AACrB,kBAAQ,KAAK,GAAG,MAAM;;AAE1B,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,8BAAoB,KAAK;AACrB,6BAAiB,KAAK,GAAG,MAAM;;AAEnC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;UAEP;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;MAiB7C,8BAA8B;AAC1B,6BAAqB;AACrB,gCAAwB;AACxB,4BAAoB,KAAK;AACrB,+BAAqB,MAAM;AACvB,gBAAI,aAAa,OAAO,iBAAiB;AACrC,oBAAM,IAAI,WAAW,0BAA0B,OAAO;;AAE1D,yBAAa,OAAO,gBAAgB;AACpC;;;AAGR,kCAA0B;AAC1B,2BAAmB;AAIf,8BAAoB;AACpB,cAAI,aAAa,SAAS;AACtB,2BAAe,KAAK,MAAM;AAC1B,qCAAyB,OAAO,MAAM,GAAG,IAAI,OAAO,CAAC,OAAO,OAAO,SAAS;AAC5E,4BAAgB,iBAAiB,KAAK;;AAE1C,cAAI,aAAa,kBAAkB;AAC/B,8BAAkB,KAAK,CAAC,aAAa,gBAAgB,QAAQ;qBAExD;AACL,kBAAM,IAAI,WAAW,gDAAgD;;AAEzE,iBAAO,aAAa;;AAExB,YAAI;AAEA,6BAAmB;AACnB,6BAAmB;AACf,uBAAW,KAAK;;AAEpB,cAAI,WAAW,SAAS;AACpB,kBAAM,IAAI,WAAW,GAAG,WAAW,aAAa,0CACzC;;;AAGf,sBAAc;;MAMlB;AACI,0BAAkB,KAAK;AACvB,4BAAoB;AACpB,oBAAY,eAAe,KAAK;AAChC,oBAAY,YAAY;AACxB,oBAAY,kBAAkB,eAAe;AAG7C,oBAAY,aAAa;AACzB,eAAO;;MAcX,8BAA8B;AAC1B,4BAAoB,oBAAoB,KAAK;AAC7C,eAAO,eAAe,KAAK,UAAU,eAAe;;MAexD;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B,2BAAiB,IAAI;AACrB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;AAExC,iBAAO,QAAQ,KAAK,SAAS,UAAU;;;MAY/C;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B;AACA,cAAI,QAAQ;AACR,oBAAQ,aAA2B,MAAM,OAAO;;AAGhD,oBAAQ,OAAqB;;AAGjC,iBAAO,KAAK,iBAAiB,QAAQ,OAAO;;;MAYpD;AACI,4BAAoB,mBAA+B;AACnD,YAAI,YAAY,WAAW,KAAK,YAAY;AACxC,gBAAM,IAAI,WAAW,+BAA+B,yBACnC,KAAK,YAAY;;AAGtC,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,wBAAc,KAAK,YAAY;AAC/B,8BAAmB,YAAY;AAG/B,2BAAiB,MAAM,OAAO;AAC9B,+BAAqB,YAAY;;AAErC,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,YAAI,UAAU,SAAS;AACnB,8BAAoB;AAChB,0BAAc,KAAK,aAAa;AAChC,+BAAmB;AAEf,4BAAc,KAAK;AACnB,kBAAI,KAAK,YAAY,IAAI,OAAK,EAAE,IAAI,QAAQ,MAAM,QAAQ;AAEtD;;AAGJ,mCAAoB;AACpB,2BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,qCAAqB,KAAK,cAAc;AACxC,mCAAkB,KAAK,YAAY;AACnC,oCAAoB,KAAK,cAAc;AACvC,iCAAiB,GAAG,aAAa,QAAQ,cAAa;AACtD,oCAAmB,qBAAqB;AACxC,6BAAY,KAAK;;AAErB,kCAAoB,MAAM,mBAAmB,iBAA+B;AAC5E,oCAAqB,mBAA+B;AACpD,gCAAkB,MAAM,aAAa,QAAQ;AAC7C,2BAAa,GAAG,IAAI,cAAa,QAAQ;AACrC,iCAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,qCAAqB,YAAY,cAAa;;;;;AAM9D,6BAAqB;AACrB,gCAAwB;AACxB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,8BAAoB,KAAK,0BAA0B;AACnD,2BAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,0BAAgB,KAAK;;AAEzB,qBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,sBAAY,gBAAgB;AAC5B,mBAAqB,OAAO;AAC5B,uBAAa,KAAK,qBAAqB;;AAG3C,eAAO,iBAA+B;;MAY1C;AACI,YAAI,SAAS;AACT,kBAAQ,aAA2B,MAAM,OAAO;;AAOpD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAU,KAAK,OAAO;AACtB,oBAAU,OAAO;AACjB,uBAAa,MAAM;AACnB,oBAAU,EAAE,MAAM,CAAC,GAAG;;AAE1B,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AACV,4BAAoB;AAChB,wBAAc,KAAK,aAAa;AAChC,6BAAmB;AAEf,0BAAc,KAAK;AACnB,0CAA8B,KAAK;AACnC,2CAA+B,KAAK;AAIpC,iCAAqB,IAAI;AACzB,4BAAgB;AACZ,kBAAI,EAAE,MAAM;AACR,6BAAa,KAAK,UAAU,EAAE;;;AAGtC,gBAAI,aAAa,WAAW,sBAAsB;AAE9C,2BAAa;AACb;AACA;AACA;AACA;AAEA,kBAAI,KAAK,YAAY;AACjB,yBAAS,KAAK;;AAElB,kBAAI,aAAa,WAAW;AACxB,uDAAuC,aAAa;AACpD,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,gBAAgB;AACpD,+BAAc,OAAqB,MAAM,YAAY,gBAAgB;AACrE,kCAAkB,CAAC;AACnB,gCAAgB,CAAC;;AAGjB,kCAAkB,aAAa,IAAI,OAAK,EAAE;AAC1C,gCAAgB,aAAa,IAAI,OAAK,EAAE;AACxC,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,iBAAiB;AACrD,+BAAc,OAAqB,MAAM,YAAY,iBAAiB;;AAE1E,kBAAI,MAAM;AACN,sBAAM,IAAI,oBAAoB;;AAKlC,2BAAa,GAAG,IAAI,uBAAuB,QAAQ,EAAE;AACjD,0BAAU,uBAAuB;AACjC,0BAAU,eAAc;AACxB,6BAAa,aAAY;AACzB,0BAAU,EAAE,MAAM,CAAC,GAAG;;;;;AAKtC,8BAAsB;AACtB,4BAAoB;AACpB,6BAAqB;AACrB,wBAAgB,KAAK;AACjB,mBAAqB,EAAE,MAAM,WAAW,4BAA4B,EAAE,UAAU,EAAE;AAClF,kCAAuB,UAAU,EAAE;AACnC,uBAAa,KAAK,QAAO;AACzB,wBAAc,KAAK;AACnB,sBAAY,KAAK;;AAGrB,eAAO,CAAC,eAAe,aAAa;;MAUxC;AACI,kCAA0B;AAC1B;AACA,4BAAoB,KAAK;AACrB,sBAAY,iBAAiB,YAAY,IAAI;AAC7C,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,4BAAgB,UAAU,QAAQ,OAAO;AACzC,gBAAI,KAAK,eAAe,IAAI;AAExB,gCAAkB,WAAW;AAC7B,2BAAa;;;;AAIzB,eAAO;;MAqBX;AACI,YAAI,UAAS;AACT,cAAI,KAAK,OAAO,UAAU;AACtB,kBAAM,IAAI,WAAW,wCAAwC,8BAClD,KAAK,OAAO;;AAGvB,mBAAO,KAAK,OAAO;;;AAIvB,cAAI,QAAQ;AACR,kBAAM,IAAI,WAAW;;;AAG7B,4BAAoB,KAAK;AACrB,cAAI,MAAM,SAAS;AACf,mBAAO;;;AAGf,cAAM,IAAI,WAAW,kBAAkB;;MAO3C;AAKI,eAAO,KAAK;AACR,0BAAe;AACf,8BAAoB,KAAK;AACrB,iCAAqB,GAAG,YAAY,MAAM,aAAa,QAAQ,EAAE;AAC7D,8BAAgB,UAAU,QAAQ,OAAO;AACzC,kBAAI,KAAK,eAAe,IAAI;AACxB,wBAAO,KAAK,GAAG,MAAM;;;;AAKjC,iBAAO;;;MAGf;AACI,wBAAe,CAAE,MAAM,KAAK;AAI5B,kCAA0B,KAAK,uBAAuB,KAAK;AAE3D,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,iCAAuB,MAAM;AAC7B,8BAAoB,MAAM;AAC1B,uCAA6B;AAC7B,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,yBAAa,MAAM,aAAa;AAChC,4BAAgB,UAAU,QAAQ,OAAO;AACzC,yBAAa;AACb,gBAAI,KAAK,eAAe,IAAI;AAGxB,kBAAI,KAAK;AACL;AACI,uBAAK,UAAU,KAAK;AACpB,2BAAS,KAAK;;AAGd,0BAAQ,KAAK,SAAS,MAAM,uDAErB,KAAK;AAGZ,2BAAS;;;AAGjB,kBAAI,KAAK,cAAc,SAAS;AAC5B,iCAAiB;AACjB,6BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,uCAAqB,KAAK,cAAc;AACxC,oCAAkB,KAAK,YAAY;AACnC,sCAAoB,KAAK,cAAc;AACvC,mCAAgB,UAAU,QAAQ,cAAc;AAChD,qCAAmB,kBAAkB;AACrC,sBAAI,gBAAgB;AAChB,mCAAe;;AAEnB,2BAAS,KAAK,CAAC,aAAa,MAAM,cAAc,aAAa;;AAEjE,qCAAqB,KAAK;;;;AAItC,uBAAa;AACb,eAAK,UAAU,MAAM;AACrB,eAAK,eAAe;AACpB,eAAK,YAAY;AACjB,eAAK,kBAAkB;AACvB,uBAAa,KAAK;;AAEtB,gBAAO,YAAY;AAEnB,4BAAoB;AACpB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAC/B,4BAAkB,KAAK,uBAAuB;AAC9C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,yBAAyB;AAClD,sBAAY,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEhD,gBAAO,iBAAiB;AACxB,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,0BAA0B;AACnD,uBAAa,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEjD,gBAAO,kBAAkB;AACzB,eAAO;;aAeJ,yCAAwC,qBAAqB;AAGhE,8BAAsB;AAMtB,iCAAyB;AACzB;AACI,cAAI,CAAE,OAAM,QAAQ;AAChB,6BAAiB,MAAM,QAAQ,CAAC;;AAGhC,6BAAiB,MAAM,MAAM,KAAK;;;AAG1C;AACI,gCAAqB;AACrB;AACA,kCAAwB;AACpB,qCAAyB,UAAU;AACnC,qCAAyB,UAAU;AACnC,uCAA2B,UAAU;AACrC,qBAAS,UAAU,MAAM,OACrB,KACA,UAAU;AACd,gBAAI,CAAE,qBAAoB;AACtB,iCAAmB,OAAO;AAC1B;;AAEJ,iCAAqB,cAAc;AACnC,gBAAI,aAAa,aAAa,UAAU;AACpC,iCAAmB,OAAO;AAC1B;;AAEJ,gCAAoB,aAAa,aAAa;AAC9C,0BAAa,KAAK,YAAY,cAAc;;AAKhD,cAAI,cAAa,SAAS;AACtB,kBAAM,MAAM,iBAA+B,gBAAe;;;AASlE;AACI,4BAAkB,UAAU;AAE5B,wBAAc,YAAiB,WAAW,QAAO,oBAAoB,OACjE,QAAO,mBACP;AACJ,gBAAM,6BAA6B;AACnC,wBAAc,aAAa;AAE3B,mCAAyB,UAAU;AACnC,2BAAiB,QAAQ;AACrB,gBAAI,CAAE,qBAAoB;AACtB,oBAAM,IAAI,WAAW,yDAAyD;;AAMlF,+BAAmB,OAAO;;;AAIlC,qBAAa,QAAO;AACpB,iCAAyB,QAAO;AAChC,gCAAwB;AACpB,uBAAa;;AAMjB,eAAO,CAAC,cAA4B;AAChC,kCAAwB;AACpB,0BAAc,cAAc,UAAU;AACtC,gBAAI,MAAM,QAAQ;AACd,sDAAwC,iBAAiB,MAAM;AAC/D,qBAAO,iBAAiB,MAAM;AAC9B,qCAAuB;AACnB,4BAAY,OAAO;;;;;AAKnC,6BAAqB;AACrB,8BAAsB;AACtB,sCAA8B,QAAO;AACrC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,uBAAa,KAAK,mBAAmB;;AAEzC,uCAA+B,QAAO;AACtC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,wBAAc,KAAK,mBAAmB;;AAE1C,eAAO,IAAI,IAAI,CAAE,QAAQ,cAAc,SAAS,eAAe;;UAQ/D;AAGA,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAIzB,4BAAoB,KAAK;AACrB,cAAI,MAAM;AACN,mBAAO;;;AAGf,eAAO;;MAQX;AACI,aAAK;AACD,eAAK,OAAO,QAAQ;AAEhB,gBAAI,MAAM;AACN,oBAAM;;;;;;AC5mC1B;;;;;;;;;AAUA;AACI,yBAAmB,YAAY;AAC/B,UAAI,WAAW,QAAS,MAAM,QAAQ,YAAY,QAAQ,WAAW;AACjE,eAAO,YAAY,IAAI,UAAQ;;AAEnC,UAAI,eAAe;AACf,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,iBAAO;mBAEF,OAAO,YAAY,YAAY,YAAY,MAAM;AACtD,iBAAO,CAAC,QAAQ,YAAY;;AAG5B,iBAAO,CAAC;;;AAGhB,UAAI,MAAM,QAAQ;AACd,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,MAAM,YAAY,6BAA6B,QAAQ,wCAC5B;;AAGzC,eAAO;iBAEF,OAAO,YAAY,YAAY,OAAO,KAAK,SAAS,SAAS,KAClE,OAAO,QAAQ,OAAO,KAAK,SAAS,QAChC;AACJ,uBAAe;AACf,oBAAY,QAAQ;AAChB,cAAI,cAAc;AACd,mBAAO,KAAK,QAAQ;;AAGpB,mBAAO,KAAK;;;AAGpB,eAAO;;AAGP,cAAM,IAAI,MAAM,2BAA2B,2BACjC,2CACH,yCAAyC,8BAChC,8BAA8B,KAAK,UAAU;;;AAgB9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAE9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAoB9D;AACH,UAAI,gBAAgB,QAAQ,oBAAoB;AAG5C,cAAM,IAAI,MAAM;;AAEpB,UAAI,eAAe;AAEf,yBAAiB,KAAK;AAClB,cAAI,EAAE,MAAM,WAAW;AAEnB,mBAAO,EAAE;qBAEJ,EAAE,MAAM,WAAW;AACxB,gBAAI,EAAE,MAAM,KAAK;AAEb,2BAAa;AACb,qBAAO,EAAE,OAAO;uBAEX,EAAE,MAAM,OAAO;AAEpB,qBAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;;AAG1B,oBAAM,IAAI,MAAM,+CAA+C,EAAE,MAAM;;;AAM3E,kBAAM,IAAI,MAAM,yCAAyC,EAAE;;;AAInE,8BAAsB,MAAM,KAAK,MAAM,SAAS;AAChD,gBAAQ;AACR,kCAA0B;AAC1B,sBAAc,QAAQ;AAClB,cAAI,YAAY,eAAe;AAC3B,kBAAM,IAAI,MAAM,wEACC;;AAIjB,8BAAkB,KAAK,YAAY;;;AAG3C,eAAO,UAAS,mBAAmB;;AAGnC,eAAO;;;AAUR;AACH,aAAO,KAAI,SAAQ;;ACzJvB;;;;;;;;;AAoBA,0CAAsC;AAetC;AAKI;AACA;AACA,6BAAuB;AACvB,WAAK,eAAe;AACpB,WAAK,eAAe;AACpB,cAAgB,MAAM,QAAQ,MAAM,MAAM,MAAM,mPAIzC;AACP,0BAAoB,0BAA0B,SAAS,OAAM,YAAY;AACzE,0BAAoB,0BAA0B,UAAU,OAAM,aAAa;AAC3E,wBAAkB,YAAY,GAAG,MAAM;AACvC,cAAgB,YAAY,WAAW,OAAM,OAAO,QAAQ,MAAM,mBAAmB,OAAM,OAAO,2CAClF,YAAY,yCACrB,KAAK,UAAU,OAAM;AAC5B,cAAgB,YAAY,WAAW,OAAM,QAAQ,QAAQ,MAAM,mBAAmB,OAAM,QAAQ,4CACpF,YAAY,2CACrB,KAAK,UAAU,OAAM;AAC5B,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,8BAC3D,OAAM,WAAW,eAAe,YAAY,QAAQ,MAAM,iBAChD,4BAA4B,OAAM,WAAW;;AAElE,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,+BAC3D,OAAM,YAAY,eAAe,YAAY,QAAQ,MAAM,iBACjD,4BAA4B,OAAM,WAAW;;AAElE,aAAO,CAAE,IAAI,aAAa,IAAI;;AAElC;AACI,UAAI,kBAAkB;AAClB,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,gBAAgB,OAAO,WAAW,MAAM,QAAQ,MAAM,wBAAwB,OAAO,gCAAgC,MAAM,uBAAuB,sBAAsB;AACxK,eAAO;;AAGP,uBAAe;AAEf,2BAAmB;AACf,cAAI,OAAO,SAAS;AAChB,kBAAM,IAAI,WAAW,gEACd,sBAAsB;;AAEjC,iBAAO,KAAK,OAAO;;AAEvB,eAAO;;;AAGf;AACI,UAAI,MAAK,WAAW;AAChB,cAAM,IAAI,oBAAoB;;AAElC,aAAO,CAAE,IAAI,MAAK,IAAI,IAAI,MAAK;;AAE5B;AAKH,iCAA2B,KAAK,mBAAmB;AACnD,cAAgB,OAAM,aAAa,MAAM,MAAM;AAE/C,cAAgB,QAAQ,MAAM,MAAM;AAEpC,cAAgB,KAAK,UAAU,QAAQ,KAAK,SAAS,KAAK,OAAO,UAAU,KAAK,SAAS,MAAM,iFACvE,KAAK;AAC7B,cAAgB,CAAC,sBACZ,KAAK,kBAAkB,KAAK,OAAO,UAAU,KAAK,kBAAmB,MAAM,uGAClC,KAAK;AACnD,cAEA,KAAK,sBAAsB,MAAM,MAAM;AAEvC,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACI,6BAAqB,KAAK,kBAAkB;AAC5C;AACA;AACA,YAAI;AACA,cAAI,gBAAgB,KAAK;AACrB,oBAAgB,KAAK,qBAAqB,QACrC,KAAK,oBAAoB,KACtB,OAAO,UAAU,KAAK,oBAAqB,MAAM,iJAG1C,KAAK;;AAGpB,mCAAuB,gCAAgC,KAAK;AAC5D,oBAAQ,eAAe;AACvB,oBAAQ,eAAe;;;AAG/B,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA,YAAI;AACA,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,wBAAgB,KAAK,WAAW,OAAO,IAAI,KAAK;AAChD,eAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,KAAK,QAAQ,MAAM,MAAM,iBAAiB,SAAS,OAAO,MACnI,cAAc;AACd,qBAAa,SAAS;AACtB,eAAM,UAAU;AAChB,cAAM,aAAa;AACnB,eAAM,gBAAgB;AACtB,oBAAY,KAAK,gBAAgB,OAAO,IAAI,KAAK;AACjD,2BAAmB,MAAM,QAAQ;AACjC,eAAO,QAAQ,KAAK;AAChB,4BAAkB;AAClB,gBAAM,aAAa,aAAa;AAChC,0BAAgB;AAChB,2BAAiB;AACjB,cAAI,CAAC;AACD,2BAAe,MAAM,QAAQ;;AAEjC,iBAAO,qBAAqB,YAAY,KAAK,kBAAkB;AAC3D,gCAAoB,MAAM,aAAa;AAGvC,gBAAI,sBAAsB,YAAY;AAClC,sBAAQ,KAAK,uCACN,KAAK,oEAEL,mJAIA,KAAK,kBAAkB,KAAK;AAGnC;;AAEJ,gBAAI,YAAY,SAAS;AACrB,qBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,gCAAkB;AAClB,wBAAU,WAAW;AACrB,wBAAU,UAAU,GAAG,GAAG,MAAM;AAChC,oBAAM,aAAa,aAAa,YAAY;AAC5C,oCAAsB;AACtB,kBAAI,KAAK,eAAe;AACpB,6CAA6B,wBAAwB,KAAK,aAAa,OAAM;AAC7E,6BAAa,GAAG,IAAI,qBAAqB,QAAQ,EAAE;AAC/C,gCAAc,KAAK,MAAM,mBAAmB,GAAG,IAAI,MAAM,qBAAqB;;;AAItF,0BAAY,GAAG,OAAO,IAAI,OAAO;AACjC,2BAAa,cAAc;AAC3B,sBAAY;AACZ,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAEb,oBAAM,aAAa,WAAW,YAAY;AAC1C,mCAAqB;AACrB;AACA;;AAEJ,gBAAI,qBAAqB,aAAa,KAAK,kBACvC,YAAY;AAEZ,kBAAI;AACA;AACA,oBAAI,gBAAgB,KAAK;AACrB,4BAAU,OAAO,MAAM,OAAM,gBAAgB,KAAK,gBAAgB,CAAE,SAAS,KAAK;;AAGlF,4BAAU,OAAO,OAAM,SAAS,OAAO,OAAO;oBAC1C,WAAW,KAAK,uBAAuB,OACnC,gCACA,KAAK;oBACT,SAAS;;;AAGjB,6BAAa,GAAG,IAAI,OAAM,aAAa,QAAQ,EAAE;AAC7C,4BAAU,OAAO,OAAM,aAAa,QAAQ,QAAQ;;;AAQ5D;;AAEJ,gBAAI,OAAM;AACN;;;AAGR,gBAAM,aAAa,WAAW,OAAO;AACrC;AACA,cAAI,OAAM;AACN;;;AAGR,cAAM,aAAa;AACnB,cAAM,OAAM,QAAQ;AACpB,eAAO,OAAM;;AAGb,eAAM,aAAa;;;AAI3B;AAEI,0BAAoB;AACpB,UAAI,KAAK,mBAAmB;AACxB,wBAAgB,KAAK;iBAEhB,OAAO,SAAS,QAAQ;AAC7B,wBAAgB,QAAQ;;AAE5B,aAAO;;AAIX;AACI,aAAQ,OAAO,QAAQ,aAAa;;AAIxC;AACI,aAAQ,OAAO,SAAS,SAAS;;AAE9B;AAKH,aAAO,QAAQ;AACf,yBAAmB,KAAK,WAAW;AACnC,gBAAU,OAAM;AAChB,iBAAW;AACX,UAAI,KAAK,UAAU;AACf,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,CAAC,cAAe,KAAK,UAAU,KAAK,OAAO,UAAU,KAAK,UAAW,MAAM,wEAC3E,KAAK,UAAU,KAAK;AACpC,2BAAqB,qBAAqB,WACtC,UACA,MAAM,QAAQ;AAElB,wBAAkB;AAClB,kBAAY;AACZ,aAAO,aAAa,QAAQ,KAAK,UAAU;AACvC,4BAAoB,MAAM,aAAa;AACvC,eAAO,KAAS;AACZ,cAAI,YAAY;AAGZ,mBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,4BAAgB,GAAG,OAAO;AAC1B,8BAAkB,KAAS,MAAM,EAAE;AACnC,oBAAY;AACZ,gBAAI,UAAU;AACV,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,qBAAK,KAAK,QAAO;;;AAGzB,8BAAkB,QAAQ,GAAG,MAAM;AACnC,yBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,+BAAiB,UAAU;AAC3B,gCAAkB,KAAK;AACvB,mBAAK,KACD,KAAS,MAAM,MAAQ,KAAK,IAAI,KAAQ,WAAW;AACvD,kBAAI,QAAQ;AACR,wBAAY;;;AAGpB,oBAAY;AACZ,2BAAe;AACf,cAAE;;AAEN,iBAAO;;AAEX,YAAI,YAAY;AACZ,cAAI;AACA,oBAAQ,KAAK,gLAGiB,KAAK;;AAIvC;;;AAGR,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,0BAAkB,KAAK;AACvB,aAAK,KAAK,IAAQ,KAAK,IAAI;AAC3B,gBAAY;;AAEhB,aAAO,iBAAiB;;AC5V5B;;;;;;;;;AAmBO;AACH,cAAgB,YAAY,KAAK,OAAO,UAAU,YAAY,MAAM,2DAA2D;;AAe5H;AACH,UAAI,UAAU;AACV,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,eAAO,OAAO,IAAI,YAAS,oBAAoB,QAAO,OAAO,OAAO;;AAGpE,eAAO,oBAAoB,QAAQ,OAAO,OAAO;;;AAgBlD;AACH,aAAO,KAAS;AACZ,YAAI,UAAU;AACV,iBAAO;mBAEF,MAAM,QAAQ;AACnB,iBAAO,OAAO,IAAI,YAAS,qBAAqB,QAAO;;AAKvD,iBAAO,SAAO,QAAQ,QAAQ,UAAU,UAAU,UAAU,QAAQ;;;;AAYzE;AACH,qBAAe;AACf,uBAAiB;AACjB,qBAAe;AACf,aAAO,aAAa;AAChB,mBAAW,aAAa;AACxB,YAAI,YAAY;AACZ,qBAAW;;AAEf,eAAO,KAAK,CAAC,YAAY;AACzB,qBAAa;;AAEjB,aAAO;;AA6BX;AAII,UAAI,aAAa;AACb,oBAAY;;AAEhB,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,aAAW;AACX,oBAAU;;AAEd,UAAI,gBAAgB;AAChB,uBAAe;;AAGnB,yBAAmB;AACnB,UAAI,QAAQ,QAAQ,UAAU;AAC1B,uBAAe;;AAGnB,UAAI,mBAAmB;AACnB,uBAAe;AACf,YAAI,iBAAiB;AACjB,gBAAM,IAAI,WAAW;;;AAI7B,8BAAwB,OAAM,gBAAgB,KAAK,WAAW,eAAe;AAC7E;AACA,UAAI,mBAAmB;AACnB,qBAAa,QAAM,GAAG;;AAE1B,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,QAAQ,cAAc,iBAAiB,eAAe,WAAW,cAAc;AACxJ,mBAAa,SAAS;AACtB,aAAM,UAAU;AAChB,YAAM,aAAa;AACnB,aAAM,gBAAgB;AAGtB,uBAAiB,cAAc,QAAQ,QAAQ,EAAE;AAC7C,cAAM,aAAa,aAAa;AAChC,0BAAkB;AAClB,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,cAAI,cAAY;AACZ,kBAAM,IAAI,oBAAoB;qBAEzB;AACL,qBAAa;;AAIjB,oCAA0B,UAAS;AACnC,0BAAgB,YAAY,iBAAiB;AAC7C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB;AAClB,kBAAM,aAAa,aAAa,YAAY;AAC5C,iBAAS;AACL,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAoB,mBAAmB,YAAY,WAAW;AAC/E,wBAAU,WAAW;AACrB,wBAAU,UAAU,WAAW;AAG/B,+BAAiB,qBAAqB,KAAK;AAC3C,2BAAa,EAAE;AACf,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAGb,kBAAI,eAAe,QAAQ,SAAS;AAChC,oBAAI;AACA,kCAAgB,OAAM,SAAS,MAAM,QAAQ;AAE7C,+BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,kCAAc,UAAU;AACxB,gCAAY,QAAQ;AACpB,yBAAS;AAET,8BAAU,SAAS,SAAS;;;;;AAK5C,kBAAM,aAAa,WAAW,YAAY;AAC1C,iCAAqB;AACrB,gBAAI,OAAM;AACN;;;AAIR,4BAAkB;;AAGtB,cAAM,aAAa,WAAW,OAAO;AACrC,YAAI,OAAM;AACN;;;AAGR,YAAM,aAAa;AACnB,YAAM,OAAM,QAAQ;AACpB,aAAO,OAAM;;AAEV,mDAGa;AAChB,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACI,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,MAAM,OAAM,oBAAoB,GAAG,GAAG,KAAK,cAAc,KAAK,aAAa,gBAAgB;AACpH,iBAAS,iBAAiB;AAC1B,kBAAU,iBAAiB;AAC3B,wBAAgB,iBAAiB;AAEjC,2BAAmB;AACnB;AACA,YAAI,KAAK,kBAAkB,QAAQ,KAAK,eAAe,SAAS;AAC5D,yBAAe;AACf,cAAI,KAAK,eAAe,WAAW;AAE/B,wBAAY,KAAK,eAAe;AAChC,wBAAY,KAAK,eAAe;qBAE3B,KAAK,eAAe,WAAW;AACpC,kBAAM,IAAI,oBAAoB;;AAG9B,kBAAM,IAAI,WAAW,0GAEd,KAAK;;AAEhB,kCAAuB;AACvB,kCAAwB,MAAM,OAAM,oBAAoB,WAAW,WAAW,MAAoC,MAAmC,iBAAgB;AACrK,iBAAO,gBAAgB;AACvB,iBAAO,gBAAgB;AACvB,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB,QAAQ,KAAK,kBAAkB,KAC5D,KAAK,kBAAkB;AACvB,yBAAe;AAEf,0BAAgB,KAAK,MAAM,OAAO,GAAG,MAAM,KAAM,KAAI,KAAK;AAC1D,oCAA0B,OAAO,GAAG,MAAM;AAC1C,iBAAO,YAAY,QAAQ,SAAS;AACpC,mBAAS,YAAY,QAAQ,GAAG;AAChC,iBAAO,YAAY,SAAS,SAAS;AACrC,oBAAU,YAAY,SAAS,GAAG;AAGlC,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB;AAC7B,yBAAe;;AAGnB,oBAAY,OAAO,OAAO,SAAS,OAAO;AAC1C,eAAM;AAYN,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA;AACA,YAAI;AACA,iBAAM;AACN,wBAAc,OAAM;AACpB,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,wBAAc;AACd,mBAAS;AACT,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,oBAAY,MAAM,QAAQ,QAAO,eAAe,KAAK,WAAW,WAAW,KAAK,QAAQ,KAAK,SAAS,YAAW,aAAa,QAAQ,KAAK,SAAS,iBAAiB,KAAK,cAAc,MAAM;AAC9L,eAAO;;AAGP,eAAM,aAAa;AAEnB,0BAAkB,QAAQ;AAC1B,0BAAkB,SAAS;AAC3B,0BAAkB,MAAM;AACxB,0BAAkB,MAAM;AACxB,YAAI,iBAAiB;AACjB,kBAAY;;;;AAWjB;AACH,mBAAa;AACb,UAAI,mBAAmB;AACnB,kBAAU,CAAC;;AAGf,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAe,QAAQ;AACvB,YAAI,QAAO,SAAS;AAChB,eAAK,KAAK,aAAW,SAAQ;mBAExB,QAAO,SAAS;AACrB,gBAAM,IAAI,MAAM;;AAIhB,eAAK,KAAK;;;AAGlB,aAAO;;AAcJ;AACH,UAAI,WAAW;AACX;;AAEJ,2BAAqB;AACrB,UAAI,sBAAsB;AACtB,qBAAa,KAAK,WAAW;iBAExB,MAAM,QAAQ;AACnB,mBAAW,QAAQ,OAAK,aAAa,KAAK,EAAE;iBAEvC,cAAc;AAEnB,2BAAmB;AACf,4BAAkB,WAAW;AAC7B,uBAAa,KAAK,UAAU;;;AAGpC,+BAAyB;AACzB,UAAI,mBAAmB;AACnB,YAAI,aAAa,QAAQ,QAAQ,QAAQ;AACrC,2BAAiB,KAAK;;iBAGrB,MAAM,QAAQ;AACnB,gBAAQ,QAAQ;AACZ,cAAI,aAAa,QAAQ,EAAE,QAAQ;AAC/B,6BAAiB,KAAK;;;iBAIzB,WAAW;AAEhB,2BAAmB;AACf,0BAAe,QAAQ;AACvB,cAAI,aAAa,QAAQ,QAAO,QAAQ;AACpC,6BAAiB,KAAK;;;;AAIlC,uBAAiB,QAAQ;AACrB,YAAI,CAAC,EAAE;AACH,YAAE;;;;AChbd;;;;;;;;;AAiCO;AACH,aAAO,aAAa;;AAKjB;AACH,aAAO,MAAM,QAAQ;;AAKlB;AACH,aAAO,CAAC,aAAa,MAAM,CAAC,YAAY;;AAarC,yEAAoE,wBAAwB;AAC/F,UAAI,SAAS,QAAQ,MAAM,WAAW;AAGlC,YAAI,SAAQ;AACR,kCAAwB;AACxB,cAAI,YAAY,UAAS,MAAK,SAAS;AACnC,gCAAoB;qBAEf,WAAW;AAChB,8BAAkB;AACd,kBAAI,MAAK,eAAe;AACpB,oCAAoB;AACpB;;;;AAMR,gCAAoB;;AAExB,cAAI;AACA,kBAAM,IAAI,WAAW,6BAA6B,6CACnC;;;AAGvB,eAAO;;AAEX,UAAI,SAAQ;AACR,eAAO,MAAM,IAAI,UAAQ;;AAE7B;AACA,UAAI,WAAW;AACX,gBAAO;AACP,iBAAS;AACT,2BAAmB;AACf,cAAI,MAAK,SAAS;AACd,kBAAM,IAAI,WAAW,yBAAyB,qCACvC;;AAEX,iBAAO,KAAK,MAAK;;iBAGhB,YAAY;AACjB,gBAAO;AACP,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,gIAEX,MAAM,sEACO;;AAExD,iBAAS;;AAGT,gBAAO;AACP,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,aAAa,2BAA2B,MAAM,4EACL,MAAK;;AAEvE,iBAAS,CAAC;;AAEd,eAAS,2BAA2B;AAEpC,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,OAAM;;AAEvB,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AAEZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU,QAAQ,UAAU,KAAK,QAAQ;AACzC,oBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,qBACzD,OAAO,kCACrB,OAAM;;;;;AAK9B,aAAO;;AASJ;AACH,mBAAa,SAAO,OAAO,IAAI,YAAS,OAAM,MAAM;AACpD,WAAK;AACL,mBAAa,SAAO,QAAQ,IAAI,YAAU,OAAO,MAAM;AACvD,WAAK;AAEL,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,mFAEd,KAAK,UAAU,OAAO,IAAI,YAAS,OAAM;;AAEpD,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,oFAEd,KAAK,UAAU,QAAQ,IAAI,YAAU,OAAO;;AAEvD,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,CAAC,aAAiB,MAAM;AAC9D,cAAM,IAAI,WAAW,iFACC,KAAK,0BAA0B,KAAK;;;AAalE;AAEI,wBAAkB;QACd;QAAyB;QACzB;;AAEJ,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,kBAAU,QAAQ;AAClB,qBAAa,QAAQ;AACrB,sBAAc,aAAa;AAC3B,YAAI,QAAQ;AACR;;AAEJ,YAAI,SAAS;AACT,cAAI,EAAE,MAAM,EAAE,MAAM,SAAS,OAAO;AAChC,kBAAM,IAAI,WAAW,2CAA2C,EAAE;;;AAO1E,YAAI,UAAU,QAAQ,UAAU;AAC5B,+BAAqB,EAAE,MAAM,MAAM;AACnC,8BAAoB,MAAM,MAAM;AAChC,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,8BAAkB,aAAa;AAC/B,2BAAe,YAAY;AAC3B,gBAAI,UAAU,QAAQ,cAAc;AAChC,oBAAM,IAAI,WAAW,8BAA8B,EAAE,2CAC9B;;;;;;AAiC3C,mEAA8D,wBAAwB;AAClF;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,oIAEP,MAAM,qCACzB,MAAK;;AAEjC,iBAAS;;AAGT,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,qBAAqB,MAAM,UAAU,kFAEnD,KAAK,UAAU,MAAK;;AAE/B,iBAAS,CAAC;;AAEd,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,KAAK,UAAU,OAAM;;AAEtC,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AACZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU;AACV,kBAAI,WAAW;AACX,sBAAM,IAAI,WAAW,uBAAuB,6BACrC,MAAM,oBAAoB,KAAK,UAAU,OAAO,gCAC3B,KAAK,UAAU,OAAM;;;;;;;AAoBlE;AACH,UAAI,WAAW,QAAQ,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAChE,eAAO,YAAY,IAAI,UAAQ;;AAEnC;AACA,UAAI,OAAO,YAAY,YAAY,OAAO,YAAY;AAClD,yBAAiB,CAAC;iBAEb,MAAM,QAAQ,YAAY,OAAO,YAAY;AAClD,yBAAiB;;AAGjB,cAAM,IAAI,UAAU,kGACsB;;AAE9C,UAAI,MAAM,QAAQ;AAEd,eAAO,YAAY,IAAI,UAAQ;;AAI/B,8BAAsB;AACtB,2BAAmB;AACf,8BAAoB,eAAe,eAAe,QAAQ,eAAe,QAAQ;AACjF,cAAI,CAAC,MAAM,QAAQ;AACf,4BAAgB,CAAC;;AAErB,wBAAc,KAAK;;AAEvB,eAAO;;;AAGf,qCAAiC;8BAaA;MAC7B;AACI,cAAM;AACN,aAAK,aAAa;;MAqCtB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,WAAW;;AAIzB,qBAAa,MAAM,YAAY,WAAW;;MAY9C;AACI,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,aAAK,OAAO,KAAK;AACjB,YAAI,OAAO,KAAK,cAAc;AAC1B,eAAK,aAAa,aAAwB,KAAK;AAC/C,eAAK,mBAAmB;;AAGxB,cAAI,CAAE,MAAK,qBAAqB;AAC5B,kBAAM,IAAI,WAAW;;AAEzB,eAAK,aAAa,KAAK;AACvB,eAAK,mBAAmB;;AAK5B,4BAAoB;AACpB,YAAI,CAAC,MAAM,QAAQ,KAAK,SAAS,OAAO,KAAK,SAAS,YAClD,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,KAAK;AACjB,6BAAmB,KAAK;AACpB,gBAAI,KAAK,YAAY,QAAQ,UAAU;AACnC,oBAAM,IAAI,WAAW,sCAAsC,4CAClB,KAAK;;;AAGtD,6BAAmB,KAAK;AACpB,gBAAI,KAAK,KAAK,SAAS;AACnB,sBAAQ,KAAK,WAAW,gIAED;;AAE3B,0BAAc,KAAK,IAAW,KAAK,KAAK;;mBAGvC,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,KAAK,WAAW,KAAK,QAAQ;AAClC,kBAAM,IAAI,WAAW,2FACc,KAAK,QAAQ,yCACrB,KAAK;;AAEpC,4BAAkB,KAAK;AACvB,0BAAgB,UAAU,IAAI,OAAK,IAAW;;AAG9C,+BAAqB,IAAW,KAAK;AACrC,eAAK,QAAQ,QAAQ;AACjB,0BAAc,KAAK;;;AAG3B,aAAK,gBAAgB;AACrB,aAAK,kBAAkB;AACvB,aAAK,mBAAmB;AACxB,aAAK,cAAc;AACnB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AAEvC,wBAAc,KAAK,qBAAqB;AACxC,uBAAa,KAAK,YAAY;AAC9B,eAAK,gBAAgB,KAAK;AAC1B,eAAK,iBAAiB,KAAK;AAC3B,eAAK,YAAY,KAAK,KAAK,cAAc;;AAI7C,kCAA0B;AAE1B,aAAK,UAAU,KAAK;AAEpB,aAAK,eAAe,CAAC;AACrB,aAAK,iBAAiB;AAKtB,kBAAU,QAAQ;AACd,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAIJ,iCAAqB,KAAK,cAAc;AACxC,gBAAI,KAAK,QAAQ,SAAS;AACtB,mBAAK,eAAe,KAAK,CAAC,cAAc;AACxC,mBAAK,aAAa,KAAK,KAAK,YAAY,KAAK;;;;AAMzD,8BAAsB,eAAe,KAAK,SAAS,KAAK;AAKxD,6BAAqB;AACjB,cAAI,KAAK,YAAY,SAAS;AAC1B,yBAAa,KAAK,YAAY,eAAe,MAAM;;AAEvD,eAAK,aAAa,KAAK;AACvB,eAAK,eAAe,KAAK,CAAC,cAAc;;AAE5C,kBAAU,UAAU;AAChB,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAEJ,kCAAsB,cAAc;AAGpC,kCAAsB;AAClB,uCAAyB;AACzB;AACA;AACA;AAEA,mCAAqB;AACjB,oBAAI,OAAO,WAAW,YAClB,CAAC,YAAY,OAAO,gBAAgB,MAAM,QAAQ,YAC9C;AACJ,sCAAoB,KAAK,qBAAqB;AAC9C,sBAAI,YAAY,YAAY,SAAS,OAAO,KACxC,KAAK,cAAc,OAAO;AAE1B,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;6BAGP,KAAK,cAAc,OACxB;AAGA,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAKZ,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAGhB;AACA,sBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,6BAAS;6BAEJ,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,6BAAS;;AAGb,qCAAmB;AACnB,+BAAa,mBAAmB;;AAGhC,mCAAiB,MAAY;AAE7B,qCAAmB;AACnB,+BACI,mBAAmB,oBAA4B;;AAGvD;AACA,0BAAU,YAAY;AAClB,iCAAe;;AAEnB,6BAAa,GAAG,YAAY;;;AAGpC,0BAAc;;;AAMtB,aAAK,4BAA4B,KAAK;;MAW1C;AACI,YAAI,KAAK,6BAA6B;AAClC;;AAEJ,YAAI,KAAK,iBAAiB,WACtB,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK;;;MAoCrB,sBAAsB;AAClB,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAC1E;AAGI,sBAAY,iBAAiB,GAAG,OAAO,iBAAiB;AACxD,eAAK;AACL,oBAAU,KAAK;AACf,2BAAiB,KAAK,SAAS,GAAG,KAAK,WAAW,KAAK,SAAS,KAAK;AACrE,iBAAO,iBAAiB;;AAGxB,4BAAkB,iBAAiB,IAAI;AACvC,4BAAkB,iBAAiB,IAAI;;;YAyBzC;AACF,aAAK;AACL,eAAO,gBAAgB,MAAM,SAAS;;MAY1C,mDAAmD;AAC/C;AACA,YAAI,SAAS;AACT,uBAAa;AACb,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW,MAAM,yEACJ;;mBAGtB,OAAO;AACZ,cAAI,MAAM,QAAQ;AACd,yBAAa,IAAI,GAAG,MAAM;;AAG1B,yBAAa,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,yDACd;;AAEX,eAAO;;MASX;AACI,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,gBAAM,IAAI,WAAW;;AAEzB,+BAAuB,MAAM,QAAQ;AACrC,4BAAqB,iBAAiB,UAAU,CAAC;AACjD,sCAA8B,KAAK,wBAAwB;AAE3D,yBAAiB,IAAI;AACrB,YAAI,kBAAkB;AAClB,mBAAS,CAAC;;AAEd,YAAI,MAAM,QAAQ;AACd,cAAI,OAAO,WAAW,KAAK,OAAO;AAC9B,kBAAM,IAAI,WAAW,kCAAkC,OAAO,8DAEtD,KAAK,OAAO;;AAExB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;;AAIxC,+BAAoB,KAAK;AACrB,gCAAoB,OAAO,OAAM;AACjC,gBAAI,eAAe;AACf,oBAAM,IAAI,WAAW,8CAA8C,OAAM;;AAE7E,qBAAS,IAAI,QAAO;;;AAI5B,+BAAuB,QAAQ,uBAAuB;AACtD,eAAO,iBAAiB,iBAAiB,eAAe;;MAK5D;AACI,sCAA8B,aAAa,MAAM,oBAAoB;AACrE,+BAAuB,oBAAoB;AAC3C,4BAAoB,KAAK;AACrB,+BAAqB,MAAM,QAAQ,MAAM,UAAU,MAAM,SAAS,CAAC,MAAM;AACzE,mCAAyB,aAAa,IAAI,YAAU,OAAO;AAC3D,uBAAa,GAAG,IAAI,oBAAoB,QAAQ,EAAE;AAC9C,2BAAc,iBAAiB,QAAQ,oBAAoB;AAC3D,gBAAI,WAAU;AACV,oCAAsB,KAAK,aAAa;AACxC;;AAEJ,gBAAI,qBAAqB;AACrB;;;AAGR,cAAI,qBAAqB;AACrB;;;AAGR,YAAI,mBAAmB;AACnB,iCAAuB;AACvB,gCAAsB,QAAQ;AAC1B,gBAAI,WAAU;AACV,6BAAe,KAAK,oBAAoB;;;AAGhD,gBAAM,IAAI,WAAW,mDACd,KAAK,UAAU;;AAE1B,eAAO;;MAeX,6BAA6B,cAAc;AACvC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB;AACxC,cAAI;AACA,kBAAM,IAAI,oBAAoB;;AAMlC,0BAAgB,YAAY,YAAY;AACxC,8BAAoB,KAAK,QAAQ,IAAI,YAAU;AAE/C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB,KAAS;AACvB,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AAGrC,+BAAiB,YAAY,KAAK,YAAY;AAE9C,4BAAc;AACd,kBAAI,MAAM,QAAQ;AACd,6BAAa,GAAG,IAAI,SAAS,QAAQ,EAAE;AACnC,wBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,SAAS;;;AAItD,sBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO;;AAE7C,+BAAiB,IAAI,SAAS;AAC9B,qBAAO,QAAQ,KAAK,SAAS;;AAEjC,sBAAU,QAAQ,iBAAiB,YAAY,GAAG,KAAK;;AAE3D,iBAAO,iBAAiB,YAAY,IAAI,cAAW,QAAW,UAAS;;;MA8B/E,kBAAkB;AACd,gCAAwB,2BAA2B;AACnD,uBAAe,iBAAiB,KAAK,YAAY,KAAK,iBAAiB;AACvE;AAKI,4BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,yBAAe;AACf,iBAAO,KAAK,YAAY,iBAAiB;;AAGzC,4BAAkB,iBAAiB;;;MAkB3C;AACI,uBAAe,GAAG,KAAK,YAAY,KAAK,iBAAiB;AAGzD,0BAAmB,OAAM,QAAQ,KAAK,EAAE,KAAK,GAAG,MAAM;AACtD,eAAO,KAAK,YAAY,GAAG;;MAE/B,6CAA6C;AAEzC,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,aAAa;;AAG3B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,iBAAiB,QAAQ,EAAE;AAChD,8BAAoB,KAAK,iBAAiB;AAC1C,yBAAe,KAAK,YAAY;AAChC,cAAI,WAAW;AACX,yBAAa,KAAK,YAAY,MAAM,GAAG,YAAY,SAAS,GAAG,OAAO,CAAC;;AAIvE,yBAAa,KAAK;;;AAG1B,YAAI,qBAAqB,GAAG,KAAK,gBAAgB,KAAK,iBAAiB,OAAO;AAC9E,YAAI,qBAAqB,GAAG,KAAK,iBAAiB,cAAc,OAAO;AAEvE,0BAAkB,GAAG,GAAG;AAExB,wCAAgC,GAAG,KAAK,aAAa,KAAK;AAC1D,YAAI,KAAK,YAAY,aAAa,QAAQ,YAAY;AAClD,cAAI,EAAE,GAAG,MAAM,KAAK,cAAc;AAC9B,kBAAM,IAAI,WAAW,mHAEd,qBAAqB,EAAE,GAAG,MAAM;;;AAG/C,eAAO,CAAC,GAAG;;YAET,sEAAsE;AACxE,yCAAiC,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAElF,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B;AAC5B,YAAI,eAAe;AACf,+BAAqB,wBAAwB,aAAa,KAAK;AAC/D,kCAAwB;AACxB,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kCAAsB,KAAK,MAAM,mBAAmB,WAAW,IAAI,MAAM,aAAa;;;AAI9F,eAAO,CAAC,YAAY,YAAY;;MAapC,sCAAsC;AAClC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB,KAAK,WAAW,OAAO;AAC/D,uBAAa;AACb,cAAI,UAAU;AACV,kBAAM,IAAI,oBAAoB;;AAGlC,cAAI,SAAS;AACT,kBAAM,IAAI,oBAAoB;;AAG9B,4BAAgB,YAAY,YAAY;AACxC,+BAAmB,UAAS,QAAM,GAAG;AACrC,kCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAsB,YAAY,YAAY,WAAW;AAG1E,+BAAiB,qBAAqB,KAAK;AAC3C,gCAAkB,EAAE;AACpB,kBAAI,eAAe;AACf,6BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,uBAAK,KAAK,QAAO;;;AAGzB,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,iCAAiB,UAAU;AAC3B,qBAAK,KACD,MAAQ,KAAK,IAAI,KAAQ,WAAW,YAAY;;;AAG5D,yBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,mBAAK,KAAK,IAAQ,KAAK,IAAI;;;AAGnC,iBAAO;;;MAGf;AACI,0BAAkB,KAAK;AAGvB,iCAAyB;AACzB,qBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,wBAAc,UAAU;AACxB,yBAAe;AACf,cAAI,MAAM,WAAW,SAAS;AAC1B,6BAAiB,MAAM,UAAU,MAAM,GAAG,IAAI;AAC9C,wBAAY,IAAI;;AAEpB,2BAAiB,KAAK;;AAE1B,eAAO;;MAYX;AACI,eAAO;AACH,6BAAmB;AACnB,yBAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,0BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,gCAAsB,MAAK,MAAM,KAAK,OAAO,SAAS,KAAK,QAAQ,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ,SAAS;AACtH,gCAAsB;AAItB,oCAA0B;AACtB,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS,UAAU,CAAE,UAAY;AAG9D;AACA,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AACxC,yBAAW,aAAa,QAAQ,IAAI,QAAQ;AAC5C,kBAAI,cAAc,MAAM;AACpB,uBAAO,sBAAoB,MAAM,cAAc;;AAGnD,+BAAiB,KAAS;AAE1B,yBAAW,KAAK;AAChB,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;;AAMvC,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C;AACA,kBAAI,KAAK,QAAQ,SAAS,KAAK,IAAI,KAAK,QAAQ;AAC5C,iCAAiB,WAAW;;AAG5B,+BAAe,KAAK,eAAe,GAAG;AACtC,oCAAoB,KAAK,eAAe,GAAG;AAC3C,iCACI,KAAS,OAAO,QAAQ,cAAc,QAAQ;;AAEtD,mBAAS;AAET,4BAAc,KAAK;;AAEvB,wBAAY,KAAS;AAErB,iBAAK,kBAAkB,QAAQ;AAC3B,0BAAY,MAAQ,WAAW;;AAEnC,mBAAO;;AAEX,4BAAkB,KAAK,0BAA0B,IAAI,WAAS,MAAM;AACpE,6BAAmB;AACnB,iCAAuB,KAAK,WAAW,SAAS,mBAAmB,YAAY;AAC/E,iBAAO,CAAC,gBAAgB,OAAO;;;MAQvC;AACI,aAAK,eAAe;AAChB,iBAAO,KAAS;AACZ,+BAAmB;AACnB;AACA,2BAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,4BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS;AAEtC,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AAGxC,2BAAa,KAAS,aAAa,QAAQ,IAAI,QAAQ;AACvD,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;AAEnC,yBAAW,KAAK;;AAGpB,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C,6BAAe,KAAK,eAAe,GAAG;AACtC,kCAAoB,KAAK,eAAe,GAAG;AAE3C,iCAAmB,KAAS,OAAO,QAAQ,cAAc,QAAQ;AACjE,yBAAW,KAAK;;AAEpB,mBAAO;;;;YAsCb,iBAAiB;AACnB,eAAO,WAAW,MAAM,GAAG,GAAG;;YAyB5B;AACF,eAAO,WAAW,MAAM,SAAS;;YAyB/B;AAGF,+BAAuB,MAAM,KAAK,oBAAoB,GAAG;AACzD,uBAAe,eAAe;AAC9B,wBAAgB,eAAe;AAC/B,8BAAsB,KAAK;AAC3B,wBAAe,cAAc,OAAO,OAAO;AAC3C,2BAAmB;AACnB,2BAAmB;AACf,oBAAU,MAAM,KAAK;AACrB,qBAAW,KAAK,EAAE;;AAEtB,gBAAY;AACZ,eAAO,iBAAiB;;MAW5B;AACI,6BAAqB;AACrB,8BAAsB,WAAU,QAAQ,QAAO;AAC/C,wBAAgB,gBAAgB,KAAK,mBAAmB,KAAK;AAC7D,6BAAqB,KAAK,WAAW;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,cAAI,iBAAiB,CAAC,QAAQ,GAAG;AAE7B;;AAEJ,uBAAa,KAAK,CAAE,MAAM,QAAQ,GAAG,cAAc,QAAQ,aAAa;;AAE5E,eAAO;;UAgCP;AACA,aAAK,gBAAgB;;UAErB;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;AAClB,eAAK,mBAAmB;;;MAGhC;AACI,uBAAe,MAAM;AACrB,YAAI,OAAO,yBAAyB,KAAK,KAAK,aAAa,QACvD,KAAK;AACL,mDAAyC,SAAa;AACtD,eAAK,WAAW;AAChB,iBAAO,wBACH,mCAAmC,SAAa;;AAExD,eAAO;;MAEX;AACI;AACA,YAAI,OAAO,KAAK,SAAS;AACrB,sBAAY,YAAY,KAAK;mBAExB,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,OAAO,SAAS;AAChB,oBAAM,IAAI,MAAM;;;AAGxB,sBAAY,KAAK,KAAK,IAAI,UAAQ,YAAY;;AAG9C,8BAAoB,OAAO,KAAK,KAAK;AACrC,sBAAY;AACZ,0BAAe,KAAK;AACpB,mCAAyB;AACrB,gBAAI,OAAO,QAAO,gBAAgB;AAC9B,wBAAU,cACN,YAAY,QAAO;;AAGvB,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;MAEX;AACI,YAAI,OAAO,KAAK,YAAY,YACxB,OAAO,KAAK,YAAY;AACxB,iBAAO,CAAC,YAAY,oBAA4B,KAAK;mBAEhD,MAAM,QAAQ,KAAK;AACxB,iBAAO,KAAK,QAAQ,IAAI,YAAU,YAAY,oBAA4B;;AAG1E,qCAA2B;AAC3B,4BAAkB,KAAK;AACnB,+BAAmB,OACf,YAAY,oBAA4B,KAAK,QAAQ;;AAE7D,iBAAO;;;MAGf;AACI,eAAO;UACH,MAAM,KAAK;UACX,SAAS,KAAK;UACd,kBAAkB;YACd,YAAY,KAAK,UAAU;YAC3B,QAAQ,KAAK,UAAU;;;;MAOnC;AACI,YAAI,eAAe,oBAAoB;AACnC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,gBAAgB;AAC/B,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,sBAAsB;AACrC,gBAAM,IAAI,MAAM;;AAEpB,yBAAiB,oBAAoB,eAAe;AACpD,0BAAkB,YAAY;AAC9B;AACA,YAAI,OAAO,eAAe,SAAS;AAC/B,iBAAO,YAAY,eAAe;mBAE7B,MAAM,QAAQ,eAAe;AAClC,iBAAO,eAAe,KAAK,IAAI,eAAa,YAAY;mBAEnD,eAAe,QAAQ;AAC5B,iBAAO;AACP,4BAAkB,eAAe;AAC7B,iBAAK,OAAO,YAAY,eAAe,KAAK;;;AAGpD;AACA,YAAI,MAAM,QAAQ,eAAe;AAC7B,oBAAU,eAAe,QAAQ,IAAI,YAAU,YAAY;mBAEtD,eAAe,WAAW;AAC/B,oBAAU;AACV,4BAAkB,eAAe;AAC7B,oBAAQ,OAAO,YAAY,eAAe,QAAQ;;;AAG1D,aAAK,QAAQ,CAAE,MAAM,SAAS;;YAmF5B;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,WAAW,0CAA0C;qBAE1D,SAAS,SAAS;AACvB,kBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,WAAW;;AAGzB,mCAA2B,MAAM,cAAiB,KAAK,gBAAgB;AACvE,6BAAqB;AACrB,0BAAkB;AAClB,4BAAoB,KAAK,OAAO,WAAW;AAC3C,+BAAuB;UACnB,eAAe;UACf,QAAQ;UACR,aAAa,8BAA8B;UAC3C,aAAa;;AAEjB,iCAAyB,WAAU,OAAO,QAAQ,QAAO;AACzD,YAAI,oBAAoB,KAAK,aAAa;AACtC,yBAAe,iBAAiB,KAAK;AACrC,6BAAmB;AACnB,iBAAQ,2BAA2B,+BAAgC,MAAM,cAAiB,MAAM,KAAK,UAAU,cAAc;AAC7H,6BAAmB,MAAM,KAAK,GAAG;AACjC,6BAAmB,OAAO,wBAA2B,CAAC,mBAAmB,MAAM;;AAEnF,YAAI,KAAK,uBAAuB;AAE5B,4BAAkB;AAClB,mCAAyB,KAAK,qBAAqB,KAAK,MAAM;AAC9D,yBAAe,sBAAsB,KAAK;;AAE9C,uBAAe,aAAa,mBAAmB;AAC/C,uBAAe,cAAc,mBAAmB;AAChD,eAAO,aAAa,KAAK;;MAU7B;AACI,iCAAyB,qBAAqB,KAAK;AACnD,aAAK,sBAAsB;;MAa/B;AACI,eAAO,KAAK;;;AAMpB,gBAAY,YAAY;AACxB,kBAA4B;6BAQI;;AAEhC,eAAW,YAAY;AACvB,kBAA4B;ACzkD5B;;;;;;;;;AAiDO;AACH,UAAI,CAAE,oBAAmB;AACrB,gCAAwB,CAAE,eAAe;;AAE7C,8BAAwB;AACxB,0BAAoB,sBAAsB;AAC1C,UAAI,cAAc,mBAAmB;AAMjC,wBAAgB,cAAc;;AAElC,uBAAiB,oBAAoB;AACrC,qBAAc,YAAY,UAAU;AACpC,UAAI,sBAAsB,mBAAmB;AAIzC,6BAAqB,MAAM,YAAe,sBAAsB,iBAAiB,sBAAsB,YAAY,OAAM,QAAQ,IAAI,YAAU,OAAO;AAEtJ,mCAA2B;AAC3B,6BAAqB,OAAM;AACvB,6BAAmB,OAAO,gBACtB,aAAa,OAAO;;AAE5B,eAAM,YAAY;AAElB,gBAAQ;;AAEZ,aAAO;;AA0FJ;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,OAAO,oBAAoB;AAC3B,yBAAiB,gBAAmB,iBAAiB;AACrD,YAAI,SAAS,WAAW;AAKpB,mBAAS,KAAK,mBAAsB,iBAAiB;mBAEhD,SAAS,SAAS;AACvB,gBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,0BAAkB,SAAS;;AAE/B,aAAO,6BAA6B,iBAAiB,QAAW;;AAY7D;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ,QAAQ;AAChB,cAAM,IAAI,WAAW;;AAGzB,wBAAkB,MAAM,QAAQ;AAChC,0BAAoB,UAAU;AAC9B,UAAI,cAAc,mBAAmB;AACjC,wBAAgB,cAAc;;AAElC,qBAAe,QAAQ,UAAU,OAAO,OAAO,QAAQ;AAMvD,6BAAuB,UAAU,cAAc,QAAQ,UAAU,eAAe,QAAQ;AACxF,qBAAc,YAAY,oBAAoB,gBAAgB,eAAe;AAC7E,6BAAuB,UAAU;AACjC,UAAI,kBAAkB;AAClB,eAAM,mBAAmB;;AAE7B,UAAI,UAAU,uBAAuB;AACjC,eAAM,uBAAuB,UAAU;;AAG3C,UAAI,UAAU,cAAc;AAExB,YAAI,UAAU,eAAe;AACzB,gBAAM,IAAI,WAAW;;AAGzB,eAAQ,cAAc,oBAAqB,+BAA+B,UAAU,YAAY,UAAU;AAC1G,eAAM,YAAY,cAAc;AAChC,YAAI,OAAM,aAAa,QAAQ,iBAAiB,SAAS;AACrD,gBAAM,OAAM,UAAU,WAAW;;AAGrC,gBAAQ;AACR,gBAAQ,iBAAiB,IAAI,OAAK,EAAE;;AAExC,aAAO;;AAEX;AACI,0BAAoB,cAAiB,SAAQ;AAC7C,2BAAqB;AACrB,+BAAyB;AACzB,YAAM,QAAQ;AACV,YAAI,KAAK,UAAU;AACf,2BAAiB,KAAK,CAAE,MAAM,KAAK,MAAM,QAAQ,YAAY,KAAK;;AAGlE,uBAAa,KAAK,QAAQ,YAAY,KAAK;;;AAGnD,aAAO,CAAE,cAAc;;6BA4BK;MAC5B;AACI,cAAM,CAAE,QAAQ,IAAI,SAAS;AAC7B,eAAO,QAAQ;AACf,aAAK,YAAY;AACjB,aAAK,QAAQ;AAEb,aAAK,OAAQ,KAAK,QAAQ,OAAQ,KAAK,OAAO,OAAO;AAErD,YAAI,KAAK,UAAU;AACf,8BAAoB,KAAK;AACrB,iBAAK,IAAI;;;;MAMrB;AACI,sBAAc,MAAM,aAAa,GAAG,cAAc,GAAG;AACrD,YAAI,MAAM,KAAK,OAAK,IAAI;AACpB,gBAAM,IAAI,WAAW,kDACd,MAAM,0BACN,MAAM,aAAa,GAAG,aAAa,GAAG;;;MAwBrD;AACI,qCAA6B,iBAAiB,cAAc,iBAAiB;AAC7E;AACA,YAAI;AACA,uBAAa;AACb,cAAI,WAAW,QAAQ,WAAW;AAC9B,kBAAM,IAAI,WAAW;;AAKzB,cAAI,WAAW,OAAO,WAAW;AAC7B,kBAAM,IAAI,WAAW;;;AAM7B,YAAI,KAAK,QAAQ,WAAW;AAExB,cAAI,MAAM,aAAa,WAAW;AAE9B,gBAAI,MAAM,mBAAmB;AACzB,oBAAM,IAAI,WAAW;;AAIzB,sBAAU,MAAM;cACZ,YAAY,MAAM;cAClB,OAAO,MAAM;cACb,MAAM,MAAM,OAAO;;AAIvB,kBAAM,MAAM;;AAEhB,cAAI;AACA,iBAAK,UAAU,WAAW;AAC1B,iBAAK,SAAS,WAAW;;AAGzB,gBAAI,MAAM,aAAa,WAAW;AAC9B,oBAAM,IAAI,WAAW,gHACuC,MAAM,kBACjD,MAAM,aAAa;;AAGxC,gBAAI,MAAM,aAAa,GAAG,cAAc,WAAW;AAC/C,oBAAM,IAAI,WAAW;;AAKzB,iBAAK,WAAW;AAChB,iBAAK,UAAU,CAAC,MAAM,aAAa,GAAG,cAAc;AACpD,iBAAK,SAAS,gBAAgB,KAAK,QAAQ;;AAE/C,eAAK,eAAe;AAKpB,cAAI,KAAK;YACL,eAAe;YACf,eAAe;YACf,aAAa;YACb,eAAe;YACf,cAAc,KAAK;YACnB,eAAe,KAAK;YAEpB,YAAY,aAA2B,MAAM,KAAK,OAAO;YACzD,aAAa,CAAC;YACd,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;YACpC,cAAc,KAAK,QAAQ,GAAG;;;AAIlC,+BAAqB,MAAM,MAAM,KAAK,QAAQ;AAC9C,cAAI,MAAM,QAAQ;AACd,kBAAM,IAAI,UAAU;;AAKxB,eAAK,WAAW;AAChB,eAAK,UAAU,CAAC;AAEhB,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;AAEzD,aAAK,OAAO,KAAK;AACjB,aAAK,QAAQ;;MAOjB;AACI,YAAI,KAAK,OAAO,WAAW;AACvB,gBAAM,IAAI,UAAU;;AAExB,aAAK,OAAO;AACZ,YAAI,KAAK,OAAO,WAAW;AACvB,eAAK,UAAU;AACf,eAAK,eAAe;AACpB,eAAK,gBAAgB;;AAGrB,iCAAuB,KAAK,OAAO,SAAS;AAC5C,eAAK,OAAO,gBAAgB,gBAAgB;AAC5C,eAAK,UAAU,CAAC,KAAK,OAAO,gBAAgB;AAE5C,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;;MAG7D;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,KAAK,QAAQ;;MAEnC;AAGI,2BAAmB;AACnB,YAAI,KAAK,OAAO,WAAW,KAAK,KAAK,QAAQ,WAAW;AACpD,gBAAM,IAAI,UAAU;;AAIxB,aAAK,QAAQ,IAAI,YAAY;UACzB,QAAQ,KAAK;UACb,SAAS,KAAK,QAAQ;UACtB,MAAM,KAAK,OAAO;;AAEtB,aAAK,MAAM,YAAY,KAAK;AAE5B,aAAK,kBAAkB,KAAK,MAAM;AAElC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,yBAAyB,KAAK,MAAM;AACzC,aAAK,2BAA2B,KAAK,MAAM;AAC3C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,0BAA0B,KAAK,MAAM;AAC1C,aAAK,4BAA4B,KAAK,MAAM;AAC5C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,aAAa,KAAK,MAAM;AAG7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,MAAM;;MAgCjB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,cAAM,QAAQ,YAAY,WAAW;;MAQzC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,aAAK,MAAM,WAAW;;MAkC1B,sBAAsB;AAClB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,SAAS,GAAG,GAAG;;YAwB/B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,gBAAgB,SAAS;;MA6B/C,kBAAkB;AACd,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,QAAQ,GAAG;;MASjC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,eAAe;;MAOrC;AACI,aAAK;AACL,aAAK,MAAM,QAAQ;AACnB,aAAK,aAAa,KAAK,MAAM;AAE7B,aAAK,mBAAmB,KAAK,MAAM;AACnC,aAAK,OAAO,KAAK,MAAM;AACvB,aAAK,UAAU,KAAK,MAAM;AAG1B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,eAAe,KAAK,MAAM;;UAG/B;AACA,eAAO,KAAK,SAAS,OAAO,SAAY,KAAK,MAAM;;UAEnD;AACA,aAAK,MAAM,YAAY;;YAiCrB,iBAAiB;AACnB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,IAAI,GAAG,GAAG;;YAuF1B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,WAAW,SAAS;;YAyBpC;AACF,eAAO,KAAK,MAAM,aAAa,GAAG;;aAI/B,yCAAwC,qBAAqB;AAChE;AACA,+BAAuB;AACvB,YAAI,mBAAkB;AAClB,cAAI,CAAE,SAAO,GAAG,aAAa,SACzB,QAAO,GAAG,iBAAiB;AAC3B,kBAAM,IAAI,WAAW;;AAEzB,wBAAc;;AAGd,kBAAY,QAAO,aAAa,MAAM,MAAM;AAE5C,wBAAc,QAAO;AACrB,iBAAO,QAAO;AACd,6BAAmB;;AAEvB,uBAAc,IAAI,IAAI;AACtB,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,oBAAoB,yDAAyD;;AAE3F,2BAAmB;AACf,iCAAsB;AACtB,wBAAc,YAAY,MAAM,gBAAe;AAC/C,cAAI;AACA,kBAAM,6BAA6B;;AAEvC,iBAAM,IAAI;;AAEd,eAAO;;UA8BP;AAGA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,aAAK,MAAM,eAAe;;UAE1B;AACA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,eAAO,KAAK,MAAM;;MAItB;AAKI,uBAAe;AACf,4BAAoB,KAAK;AACrB,uBAAa;AACb,eAAK,eAAe,MAAM;AAC1B,eAAK,YAAY,MAAM;AACvB,iBAAO,KAAK;;AAEhB,eAAO,CAAE,MAAM,KAAK,MAAM;;;AAIlC,eAAW,YAAY;AACvB,kBAA4B;ACn6B5B;;;;;;;;;AAyDO;AACH,aAAO,IAAI,YAAY;;AA8DpB;AACH,aAAO,IAAI,WAAW;;AA6FnB;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAO,wBAAwB,iBAAiB;;AAyB7C;AACH,aAAO,MAAM;;AAEV;AACH,kCAA4B,4BAA4B,gBAAgB;;ACvP5E;;;;;;;;;6BAqBgC;MAC5B;AACI,eAAO;;;wBAOU;MAQrB,iBAAiB;AACb,eAAO,MAAM,GAAG;;;AAIxB,UAAI,YAAY;AAChB,kBAA4B;yBAQF;MACtB;AACI,eAAO,KAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;yBAIF;MACtB;AACI,eAAO,MAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;0BAID;MACvB;AACI,eAAO,KAAK,MAAM,QAAY,GAAK,MAAS;;;AAIpD,YAAM,YAAY;AAClB,kBAA4B;yBAEA;MACxB;AACI,eAAO;;;AAIf,WAAO,YAAY;AACnB,kBAA4B;4BAIC;MACzB;AACI,eAAO,SAAY;;;AAI3B,cAAQ,YAAY;AACpB,kBAA4B;8BAIK;MAC7B;AACI,eAAO,YAAc;;;AAI7B,gBAAY,YAAY;AACxB,kBAA4B;6BAIE;MAC1B;AACI,eAAO,SAAa;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;2BAIE;MAC1B;AACI,eAAO,SAAW;;;AAI1B,aAAS,YAAY;AACrB,kBAA4B;yBAIF;MACtB;AACI,eAAO,OAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;4BAIC;MAazB,gBAAiB;AACb,eAAO,SAAY,GAAG;;;AAI9B,cAAQ,YAAY;AACpB,kBAA4B;+BAII;MAc5B,gBAAiB;AACb,eAAO,WAAe,GAAG;;;AAIjC,iBAAW,YAAY;AACvB,kBAA4B;wBAID;MAQvB,iBAAiB;AACb,eAAO,KAAK,MAAM,SAAY,EAAE,IAAI,QAAQ,IAAI;;;AAIxD,UAAM,YAAY;AAClB,kBAA4B;AACrB;AACH,aAAO,YAAW;;AAEf,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;;AAEjC,UAAI,OAAO,eAAe;AACtB,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC1OrC;;;;;;;;;AAcA;AACI,UAAI,QAAQ,QAAQ,OAAO,SAAS;AAChC,cAAM,IAAI,MAAM,yFACa;;;8BAMJ;;uBAEP;MACtB;AACI;AACA,yBAAiB;AACjB,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,QAAQ,KAAK,OAAO;AACzB,aAAK,QAAQ,KAAK,OAAO;;MAM7B;AACI,eAAO,KAAK;AACR,+BAAqB,OAAM,CAAC;AAC5B,cAAI,KAAK;AACL,6BAAiB,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,IAAI;;AAElE,cAAI,KAAK;AACL,6BACI,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,SAAS;;AAE1D,iBAAO,eAAe;;;MAG9B;AACI,eAAO,CAAE,IAAM,KAAK,IAAI,IAAM,KAAK;;aAGhC;AACH,eAAO,IAAI,IAAI,CAAE,IAAI,QAAO,OAAO,IAAI,QAAO;;;AAItD,SAAK,YAAY;AACjB,kBAA4B;AACrB;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAEtD;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAGtD,uDAAmD;MACtD,MAAQ;;AAEL;AACH,aAAO,qBAAqB;;AAEzB,6DAAwD;AAC3D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,uBAAuB;iBAEzB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;AC/FtC;;;;;;;;;uBAqB0B;MACtB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,WAAW,KAAK;;;MAG7B;AACI,iBAAS,oBAAoB;AAC7B,qBAAa,MAAK;AAClB,YAAI,KAAK,YAAY;AACjB,mBAAS,YAAY,QAAQ,GAAG,KAAK;;AAEzC,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,UAAU,KAAK;AAChC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,SAAK,YAAY;AACjB,kBAA4B;4BACG;MAC3B;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,UAAU,GAAG,KAAK;;MAE7B;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;wBACD;MACvB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,4BAA4B;AACjC,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,kBAAkB;AACvB,aAAK,mBACD,eAAe,KAAK,oBAAoB,KAAK;AACjD,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB,cAAc,KAAK;AAC1C,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa;mBAEb,MAAM,QAAQ,KAAK;AACxB,eAAK,aAAa,KAAK;mBAElB,OAAO,KAAK,eAAe;AAChC,eAAK,aAAa,CAAC,KAAK;;AAGxB,gBAAM,IAAI,WAAW,sEACN,KAAK;;;MAG5B;AACI,qBAAa,mBAAmB;AAChC,2BAAmB,WAAW,MAAM;AACpC,YAAI,KAAK,cAAc;AACnB,0BAAgB,KAAK;AACjB,uBAAW,IAAI,KAAK;;;AAG5B,aAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;AAErH,qBAAa;AACb,YAAI,KAAK,cAAc;AACnB,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,iBAAK,KAAK,WAAW;;;AAG7B,aAAK,YAAY,CAAC,IAAI,UAAU;UACxB,MAAM,WAAW;UACjB;;AAER,aAAK,QAAQ;;MAEjB;AACI,iBAAS,oBAAoB;AAC7B,eAAO,OAAM,QAAQ,KAAK,MAAM;;MAEpC;AACI,wBAAe;UACX,kBAAkB,qBAAqB,KAAK;UAC5C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,oBAAoB,KAAK;UAC1C,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;sBACH;MACrB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU,KAAK;AAC1C,gBAAM,IAAI,oBAAoB,4BAA4B,KAAK;;AAGnE,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAI;;MAEf;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;kCACS;MACjC;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,EAAE,IAAI,OAAK,EAAE,QAAQ,KAAK,QAAQ;;MAE7C;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;4BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,UAAU,IAAI,YAAoB;AACvC,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;;MAE7D;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAK,QAAQ,GAAG,KAAK;;MAEhC;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,MAAM,KAAK;AAC5B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;ACjO5B;;;;;;;;;AAmBO;AACH,UAAI,OAAO,UAAU;AACjB,eAAO,aAAa,OAAO;;AAG3B,YAAI,MAAM,WAAW;AACjB,gBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAC1D,MAAM;;AAE5B,qBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,8BAAoB,MAAM;AAC1B,cAAI,CAAC,UAAU;AACX,kBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAChD,KAAK,UAAU,yCAChB;;;AAGnC,eAAO;;;AAWR,mFAA+E;AAClF,UAAI,eAAe;AACf,eAAO;;AAEX,gCAA0B,aAAc,cAAa,KAAM,YAAW;AACtE;AACA,UAAI,YAAY;AACZ,uBAAe;;AAGf,uBAAe,cAAc,oBAAoB;;AAErD,aAAO,KAAK,MAAO,gBAAe,SAAS,KAAK;;AAE7C;AACH,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,kBAAU,UAAU,aAAa,MAAI,CAAC,aAAa,YAAY;iBAE1D,YAAY;AACjB,kBAAU,UAAU;;AAGpB,cAAM,IAAI,WAAW,2BAA2B;;AAEpD,aAAO;;AC1EX;;;;;;;;;AA+BO;AAEH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAGlC,iBAAO;;;;AASZ;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAGrC,iBAAO;;;;AAoBZ,uDAAmD,aAAa,oCAAoC;AACvG,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAEhB,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,+DACd,EAAE,MAAM;;AAEnB,YAAI,OAAO,MAAM,WAAW;AACxB,gBAAM,IAAI,WAAW,iEACd,OAAO,MAAM;;AAExB,YAAI,QAAQ,QAAQ,KAAK,MAAM,WAAW;AACtC,gBAAM,IAAI,WAAW,+DACd,OAAO,MAAM;;AAGxB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG;;AAEhC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,gBAAQ,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,OAAO;AACrF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,eAAO;;;AAeR,2CAAqC,aAAa,oCAAoC;AACzF,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAatE,2CAAqC,CAAC,GAAG,cAAc;AAC1D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,yBAAyB,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQhF,iEAA6D,CAAC,GAAG,cAAc,iDAAgD;AAClI,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,6EACD,EAAE;;AAE1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,8EACD,EAAE;;AAE1B,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,SAAiB;UACjB,GAAG;UACH,QAAQ;UACR;UACA,KAAK,YAAY,SAAS,SAAS;UACnC,WAAW;UACX,YAAY;UACZ;UACA,YAAA;;AAEJ,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR,2CAAqC,CAAC,GAAG,GAAG,cAAc;AAC7D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQtE,uDAAmD,CAAC,GAAG,GAAG,cAAc;AAC3E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,oEACd,EAAE;;AAEb,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,SAAS;AACnF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;2BAMe;MAC1B;AACI,cAAM;AACN,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,iBAAS,WAAW;AACpB,aAAK,OAAO;AACZ,8BAAoC,KAAK,MAAM;AAC/C,YAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,KAAK,SAAS;AACpD,gBAAM,IAAI,oBAAoB,qDAAqD,KAAK;;AAG5F,aAAK,aAAa,eAAe,KAAK,YAAY,MAAM;AACxD,aAAK,UAAU,eAAe,KAAK,WAAW,OAAO,IAAI,KAAK,SAAS,MAAM;AAC7E,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,aAAa,cAAc,KAAK;AACrC,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,eAAe,eAAe,KAAK,gBAAgB,OAAO,IAAI,KAAK,cAAc,MAAM;AAC5F,YAAI,KAAK,SAAS,KACb,OAAM,QAAQ,KAAK,iBAAiB,KAAK,aAAa,WAAW;AAClE,gBAAM,IAAI,WAAW,iGAEd,KAAK,UAAU,KAAK;mBAEtB,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eAAe,CAAC,KAAK,cAAc,KAAK;qBAExC,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,0FACY,KAAK,UAAU,KAAK;;mBAGpD,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eACD,CAAC,KAAK,cAAc,KAAK,cAAc,KAAK;qBAE3C,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,4FACY,KAAK,UAAU,KAAK;;;;aAI1D;AAEH,iBAAqB,gBAAgB,MAAM;AAC3C,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,oGACkB,KAAK,UAAU,KAAK;;;MAGnE;AACI,wBAAe;UACX,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,iBAAiB,qBAAqB,KAAK;UAC3C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;uBAOW;MACtB;AACI,cAAM,MAAM;AACZ,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,UAAU,KAAK;AACpB,8BAAoC,KAAK,SAAS;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,oBAAoB,eAAe,KAAK;;MAEjD;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AAC3D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAEpH,aAAK,YAAY,CAAC,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AAChE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,4BAAkB,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK;AACvD,sCAA4B,2BAAyC,KAAK,WAAW;AACrF,cAAI,uBAAuB,QAAQ,KAAK,SAAS;AAC7C,sBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK,cAAc;;AAG1I,gBAAI,KAAK,SAAS;AACd,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,QAAQ,IAAI,KAAK,SAAS,KAAK,YAAY,KAAK,aAAa;uBAE7H,KAAK,SAAS;AAEnB,wBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;uBAEvH,KAAK,SAAS;AACnB,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;;AAGlH,oBAAM,IAAI,oBAAoB;;AAElC,gBAAI,KAAK,cAAc;AACnB,wBAAU,KAAK,WAAW,MAAM;;;AAGxC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,yBAAiB;AACjB,sBAAe,KAAK,eAAe,iBAC/B,WAAW,MAAM,GAAG,WAAW,SAAS,KACxC,WAAW,MAAM;AACrB,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,yBAAe,iBAAiB,MAAM,IAAI,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ,IAAI,OAAO,KAAK,iBAAiB,WAAW,KAAK,eACtI,KAAK,aAAa;AACtB,mBAAS,KAAK;;AAElB,0BAAkB,CAAC,WAAW;AAC9B,YAAI,KAAK,eAAe;AACpB,wBAAc,YAAY,OAAO;AACjC,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,KAAK,KAAK;AACtB,wBAAc,YAAY,OAAO;;AAErC,eAAO;;MAEX;AACI,wBAAe;UACX,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,mBAAmB,qBAAqB,KAAK;UAC7C,kBAAkB,oBAAoB,KAAK;;AAE/C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAEJ;AAEH,YAAI,CAAE,cAAa,SAAS,OAAO,KAAK,YAAY,YAChD,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,0EACN,KAAK,UAAU,KAAK;;;;2BAInB;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAK,OAAO,KAAK,eAAe,YAC5B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,8FACc,KAAK,UAAU,KAAK;;;;AAKnE,aAAO,YAAY;AACnB,kBAA4B;2BACA;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe;AAC3B,cAAI,CAAE,OAAM,QAAQ,KAAK,eACpB,MAAK,WAAW,WAAW,KAAK,KAAK,WAAW,WAAW;AAC5D,kBAAM,IAAI,WAAW,2FAC0B,KAAK,UAAU,KAAK;;;;;AAMnF,aAAO,YAAY;AACnB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,YAAI,KAAK,YAAY,UAAU,KAAK,YAAY;AAC5C,gBAAM,IAAI,WAAW,uGACyB,KAAK;;;MAG3D;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW,qDACjB,KAAK,UAAU;;AAEvB,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW;;AAGzB,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS;AAC1D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,WAAW,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AAC1H,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGzH,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,GAAG,MAAM,EAAG,cAAc;AACrD,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,cAAI,OAAM,MAAM,WAAW;AACvB,kBAAM,IAAI,WAAW,2FACY,OAAM,MAAM;;AAEjD,6BAAmB,OAAM;AACzB,4BAAkB,WAAW;AAC7B;AACA;AACA,cAAI,KAAK,eAAe;AACpB,oBAAQ;AACR,oBAAQ;;AAGR,oBAAQ;AACR,oBAAQ;;AAEZ,yBAAe,WAAW;AAC1B,wBAAc,WAAW;AACzB,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,QAAQ;AAC7B,0BAAgB,KAAK,QAAQ;AAE7B,4BAAkB,aAAa,QAAQ,SAAS,SAAS,KAAK;AAC9D,2BAAiB,aAAa,OAAO,SAAS,SAAS,KAAK;AAK5D,8BAAoB,CAAC,WAAW,WAAW,UAAU,KAAK;AAC1D,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;;AAE3C,wBAAc,gBAAoB,QAAO,KAAK,OAAO,QAAQ,aAAa,KAAK,SAAS,KAAK;AAC7F,cAAI,KAAK,eAAe;AACpB,sBAAU,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAE/C,cAAI,KAAK,QAAQ;AACb,sBACI,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAElD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B;AACA;AACA;AACA,YAAI,KAAK,eAAe;AACpB,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAGZ,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAEhB,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,QAAQ;AAC7B,wBAAgB,KAAK,QAAQ;AAC7B,oBAAY,eAAe,KAAK;AAChC,oBAAY,cACR,aAAa,YAAY,aAAa,SAAS,SAAS,KAAK;AACjE,oBAAY,aACR,aAAa,YAAY,YAAY,SAAS,SAAS,KAAK;AAChE,eAAO;;MAEX;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAC/B;AACI,cAAM,MAAM;AACZ,aAAK,gCAAgC;AACrC,aAAK,gCAAgC;AACrC,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,YAAI,QAAO,WAAW;AAClB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,QAAO,qBAAqB,QAAQ,QAAO,qBAAqB,QAChE,QAAO,oBAAoB;AAC3B,gBAAM,IAAI,WAAW;;AAKzB,YAAI,QAAO,WAAW,QAAQ,QAAO,YAAY,UAC7C,QAAO,YAAY;AACnB,gBAAM,IAAI,WAAW,gBAAgB,KAAK,uEACF,KAAK,UAAU,QAAO;;AAElE,aAAK,kBACD,QAAO,mBAAmB,OAAO,IAAI,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS,KAAK,OAAO;AAChC,gBAAM,IAAI,WAAW,0BAA0B,KAAK,0BAC7C,KAAK,OAAO,gCACZ,KAAK,UAAU;;AAE1B,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,oEACJ,KAAK,UAAU,WAAW;;AAE/C,yBAAiB,WAAW;AAC5B,qCAA6B,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AACpE,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,KAAK,MAAM,EAAE;AAC7B,+BAAqB,KAAK;;AAE9B,6BAAqB,KAAK,WAAW,KAAK,iBAAiB,KAAK;AAChE,0BAAkB;AAClB,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,WAAW,KAAK;;AAG1H,eAAK,OAAO;;AAEhB,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AACjE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,cAAI,KAAK,SAAS;AACd,kBAAM,IAAI,oBAAoB;qBAEzB,KAAK,SAAS;AACnB,gBAAI,KAAK,eAAe;AACpB,uBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,qBAAS,gBAAoB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,cAAc;;AAElJ,cAAI,KAAK;AACL,qBAAS,QAAU,QAAQ,KAAK,KAAK,QAAQ,KAAK;;AAEtD,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAEnC,cAAI,KAAK,eAAe;AACpB,qBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,iBAAO;;;MAGf;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,kBAAc,YAAY;kCACW;MACjC;AACI,cAAM,GAAG;;;AAIjB,oBAAgB,YAAY;AAC5B,kBAA4B;yBACA;MACxB;AACI,cAAM,GAAG;AACT,eAAO,WAAW;AAClB,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,yFACS,KAAK,UAAU,KAAK;;;;AAK9D,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WACD,CAAC,CAAC,KAAK,UAAU,KAAK,WAAW,CAAC,KAAK,UAAU,KAAK;mBAErD,OAAO,KAAK,SAAS,OAAO;AACjC,eAAK,WAAW;YACZ,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;YACjC,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;;;AAIrC,eAAK,WAAW,KAAK;;AAEzB,aAAK,aACD,KAAK,eAAe,SAAY,iBAAiB,KAAK;AAC1D,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;YACH,WAAW;YAAI,WAAW;YAC1B,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;;;AAI3D,iBAAO;YACH,WAAW;YACX,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YAAI,WAAW;;;;MAIlF;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,cAAI,KAAK,eAAe;AACpB,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;AAGnH,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;;;MAI/H;AACI,wBAAe,CAAE,UAAU,KAAK,UAAU,YAAY,KAAK;AAC3D,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,eAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,eAAe,CAAC,GAAG;AACxB,aAAK,YAAY,CAAC,CAAE,MAAM;AAC1B,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;;MAExD;AACI,YAAI,KAAK,eAAe;AACpB,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ;;AAG9C,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,QAAQ,OAAO,WAAW;;;MAGzD;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,6BAAmB,OAAM;AACzB,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;AACvC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,4BAAgB,OAAM,sBAAsB,CAAC,QAAQ;AACrD,mBAAO,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAGxC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,mBAAO,OAAM,sBAAsB,CAAC,QAAQ;;;;MAIxD;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,YAAY,KAAK;AACnD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;ACn0B5B;;;;;;;;;AAoCO,6DAAuD,CAAC,GAAG,cAAc;AAC5E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,gBAAgB,SAAS;AACzB,gBAAM,IAAI,WAAW,yDACd,gBAAgB;;AAE3B,YAAI,iBAAoB,GAAG,iBAAiB,SAAS,YAAY,SAAS,SAAS,SAAS,QAAQ;AACpG,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;kCAGsB;MACjC;AACI,cAAM,GAAG;AACT,aAAK,kBAAkB;AACvB,aAAK,kBACD,KAAK,mBAAmB,OAAO,IAAI,KAAK;AAC5C,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,uBAAuB,eAAe,KAAK;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,uEACQ,KAAK,UAAU;;AAEhD,4BAAoB,KAAK,eAAe,kBAAkB,IAAI;AAC9D,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,yFACU,WAAW;;AAE9C,yBAAiB,WAAW;AAC5B,qCAA6B;UACzB,KAAK,WAAW;UAAI,KAAK,WAAW;UAAI;UAAU,KAAK;;AAE3D,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,WAAW,KAAK,kBAAkB,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGnI,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,wBAAc,kBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY;AAEhH,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAExD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,2BAAmB,KAAK,eAAe,kBACnC,WAAW,KAAK,KAAK,kBACrB,WAAW,KAAK,KAAK;AACzB,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,YAAY,SAAS;;AAI5C,iBAAO,CAAC,WAAW,IAAI,SAAS,SAAS;;;MAGjD;AACI,wBAAe,MAAM;AACrB,gBAAO,qBAAqB,KAAK;AACjC,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;ACzI5B;;;;;;;;;AAkDO;AACH,UAAI,MAAM,QAAQ;AACd,YAAI,gBAAgB,QAAQ,aAAa;AACrC,gBAAM,IAAI,WAAW;;AAGzB,YAAI,gBAAgB;AAChB,sBAAY,OAAO,MAAM,OAAO,SAAS,cAAc,OAAO;AAC9D,mBAAS,OAAO,MAAM,GAAG,OAAO,SAAS;;AAE7C,YAAI,OAAO,SAAS;AAChB,yBAAe,OAAO,MAAM,GAAG,OAAO;;AAE1C,iBAAS,OAAO;;AAEpB;AACI,YAAI,KAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;;AAGP,iBAAO,CAAC;;;AAGhB,qBAAe,aAAa;AAC5B,kBAAY,aAAa;AACzB,aAAO,CAAE,QAAQ,cAAc;;AA6C5B,oEAAgE,iCAAiC,4BAA4B;AAChI,aAAO,KAAS;AACZ,qBAAa,OAAO,MAAM;AAC1B,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,uCAAuC;;AAIhE,qBAAa,CAAC,GAAG,GAAG,OAAO,QAAiB,GAAG;AAC/C,iBAAS,WAAc,QAAQ;AAC/B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAIlC,YAAI;AACA,kBAAQ,KAAK;;AAGjB,YAAI,QAAQ;AACR,iBAAO,KAAK,OAAO,QAAQ,OAAO;AAClC,cAAI,KAAK,SAAS,OAAO;AACrB,mBAAO,WAAe,MAAM;;AAEhC,iBAAO,WAAc,MAAM;;AAE/B,YAAI;AACA,mBAAS,SAAY,QAAQ;AAC7B,cAAI,QAAQ;AACR,mBAAO,SAAY,MAAM;;;AAYjC,+BAAuB;AACvB;AACA,qBAAa;AACb,0BAAkB,OAAO,MAAM;AAC/B,8BAAsB,QAAY;AAClC;AACA,YAAI,QAAQ;AACR,yBAAe,QAAY;;AAE/B,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,+BAAqB,cAAc;AACnC,8BAAoB,KAAS,MAAM,aAAa,cAAc;AAC9D,cAAI,QAAQ;AACR,yBAAa,YAAY;AACzB,qBAAS,YAAY;;AAGrB,kCAAsB,KAAS;AAC3B,+BAAiB,aAAa;AAC9B,kCAAoB,UAAa,UAAU,IAAI;AAE/C,6BAAe,YAAY,GAAG,IAAI,UAAU,IAAI,OAAO,GAAG,IAAI;AAC9D,gCAAkB,OAAO,IAAI;AACzB,uBAAO,YAAY,GAAG,GAAG,IAAI,UAAU,IAAI,MAAM,IAAI;;AAEzD,qBAAO,CAAE,QAAQ;;AAErB,yBAAa,cAAc;AAC3B,qBAAS,cAAc;;AAE3B,cAAI;AACA,2BAAe,KAAK;;;AAG5B;AACA,YAAI;AACA,uBAAa;AACb,oBAAU,MAAU,gBAAgB;;AAExC,eAAO,CAAC,YAAY,SAAS;;;sBAGZ;MACrB;AACI,cAAM;AACN;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW;mBAEhB,MAAM,QAAQ,KAAK;AACxB,iBAAO,IAAI,gBAAgB,CAAE,OAAO,KAAK;;AAGzC,iBAAO,KAAK;;AAEhB,YAAI,KAAK,aAAa;AAClB,gBAAM,IAAI,WAAW;;AAGzB,aAAK,OAAO;AACZ,aAAK,kBACD,KAAK,mBAAmB,OAAO,QAAQ,KAAK;AAChD,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,YAAY,KAAK,YAAY,OAAO,QAAQ,KAAK;AACtD,aAAK,SAAS,KAAK,UAAU,OAAO,QAAQ,KAAK;AACjD,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,aAAK,YAAY;AACjB,aAAK,UAAU;AAEf,aAAK,eAAe;AAGpB,aAAK,aAAa;;MAItB;AACI,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,iBAAO,QAAiB,GAAG,WAAW,IAAI,OAAK;;AAG/C,iBAAO,KAAK;;;MAKpB;AACI,aAAK,UAAU;;MAEnB;AACI,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AAEb,wBAAgB,KAAK,KAAK;AAC1B,YAAI,CAAC,MAAM,QAAQ;AACf,sBAAY,CAAC;;AAEjB,0BAAkB,UAAU;AAC5B;AACA,YAAI,KAAK;AACL,wBAAc,CAAC,WAAW,IAAI,WAAW,IAAI;;AAG7C,wBAAc,CAAC,WAAW,IAAI;;AAElC,YAAI,KAAK;AACL,6BAAmB;AACnB,4BAAkB;AACd,uBAAW,KAAK,CAAC,WAAW,IAAI;;AAEpC,iBAAO,CAAC,aAAa,OAAO;;AAG5B,iBAAO;;;MAGf;AACI,eAAO,KAAS;AACZ,cAAI,MAAM,QAAQ;AACd,mBAAO,KAAK;;AAEhB,6BAAmB,KAAK,kBAAkB,OAAO;AACjD,cAAI,KAAK;AACL,8BAAkB,KAAK,OAAO,IAAI,OAAK;AACvC,mBAAO,CAAC,YAAY,OAAO;;AAG3B,mBAAO;;;;UAUf;AACA,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,yBAAe;AACf,uBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,mBAAO,KAAK;;AAEhB,iBAAO;;AAGP,iBAAO,KAAK;;;UAGhB;AACA,aAAK,UAAU;;MAEnB;AAGI,8BAAsB;AACtB,YAAI,KAAK,gBAAgB;AACrB,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AACb,0BAAkB,KAAK,WAAW,WAAW,KAAK;AAClD,yBAAiB,WAAW,MAAM;AAClC,aAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,CAAC,WAAW,MAAM,GAAG;AAGhE,+BAAuB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAC/D,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,eAAK,KAAK,MAAM;;AAGpB;AACA,YAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,CAAC,KAAK,KAAK;;AAE3B,YAAI,KAAK,aAAa;AAClB,cAAI,CAAC,aAAiB,KAAK,UAAU,IAAI,UAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,KAAK;AACjF,kBAAM,IAAI,WAAW,6FACqB,KAAK,wCACd,KAAK,KAAK;;;AAI/C,eAAK,YACD,UAAU,IAAI,SAAO,IAAI,UAAU,CAAE,OAAO,CAAC,MAAM;;AAE3D,YAAI,KAAK;AACL,eAAK;;;MAoBb,+BAA+B;AAC3B,aAAK;AACD,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,4BAAkB,KAAK,UAAU,GAAG,MAAM;AAC1C,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,WAAW;AAChB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,UAAU,CAAC,OAAU,CAAC,WAAW,KAAK,KAAK;;qBAG/C,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,QAAQ,KAAK,OAAU,CAAC,WAAW,KAAK,KAAK;;;AAItD,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI,aAAa;AAKb,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,0BAAY,MAAM,QAAQ,KAAK,KAAK,aAChC,KAAK,KAAK,UAAU,UACpB,KAAK,KAAK;AACd,oCAAsB,CAAC,WAAW;AAClC,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AAEI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AAIzB,+BAAuB;AACvB,8BAAsB;AACtB,YAAI,gBAAgB;AAChB,iBAAO,kBAAkB;AACzB,6BAAmB,iBAAiB,OAAO;AAC3C,eAAK,YAAY;AACjB,8BAAoB;AAChB,iBAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,MAAM;;AAKrD,4BAAkB,gBAAgB,OAAO,KAAK;;AAElD,YAAI,aAAa;AACb,iBAAO,eAAe;AACtB,6BAAmB,iBAAiB,OAAO;AAE3C,eAAK,eAAe,UAAU;;AAElC,yBAAiB,iBAAiB,cAAc;AAChD,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAE5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAInC;AAII,eAAO,KAAK;AACR,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,6BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,mBAAS,oBAAoB;AAC7B,cAAI,gBAAgB;AAChB,gBAAI,KAAK;AACL,6BAAe,KAAK;;AAGpB,6BAAe,KAAK,gBAAgB;;;AAG5C,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,cAAI,aAAa,WAAW;AACxB,kBAAM,IAAI,WAAW,iBAAiB,qCAC/B,aAAa;;AAExB,cAAI,KAAK;AACL,oBAAQ,KAAK;;AAEjB,iCAAuB,CAAE;AAEzB,wBAAa;AAGT,6BAAgB,KAAK,KAAK,KAAK,CAAC,SAAQ,OAAO,UAAS;AAExD,mBAAO,CAAC,SAAQ,IAAI,SAAQ,MAAM;;AAGtC,6BAAmB,IAAI,OAAM,QAAQ,cAAc,KAAK,aAAa,MAAM,MAAM,KAAK,QAAQ,KAAK;AACnG,6BAAmB,WAAW;AAC9B,0BAAgB,WAAW;AAC3B,yBAAe,WAAW;AAC1B,cAAI,KAAK;AACL,iBAAK,YAAY,QAAQ;;AAE7B,yBAAe,KAAK,kBAAkB,UAAU;AAEhD,cAAI,KAAK;AACL,mBAAO,CAAC,QAAQ,OAAO;;AAGvB,mBAAO;;;;MAInB;AACI,eAAO,KAAK;AAGR,6BAAmB,OAAU,OAAO;AAEpC,yBAAe,MAAQ,cAAc,CAAC,GAAG;AACzC,yBAAe,aAAa;AAC5B,cAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAO,KAAK,KAAK,UAAU,IAAI,SAAO,MAAM,IAAI,OAAO,cAAc,CAAC,GAAG,QAAQ;;AAGjF,mBAAO,KAAK,KAAK,YAAY,IACzB,CAAC,OAAO,cAAc,CAAC,GAAG,KAAK,KAAK,eACpC,CAAC;;;;UAIb;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAGX,eAAO,KAAK,KAAK;;UAEjB;AAEA,YAAI,CAAC,KAAK;AACN,iBAAO,KAAK,KAAK;;AAErB,eAAO,KAAK,KAAK;;MAErB;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,6BAA6B;;;MAG/C;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,iBAAiB,KAAK;UACtB,aAAa,KAAK;UAClB,aAAa,KAAK;UAClB,UAAU,KAAK;UACf,QAAQ,KAAK;;AAEjB,YAAI,KAAK,gBAAgB;AACrB,kBAAO,kBAAkB,KAAK;;AAElC,2BAAmB,KAAK,KAAK;AAC7B,YAAI,KAAK,mBAAmB,IAAI;AAC5B,kBAAO,UAAU;YACb,WAAa,KAAK,KAAK;YACvB,QAAU;;;AAIlB,eAAO,OAAO,OAAO,IAAI,YAAY,YAAY;;aAG9C,yCAAwC;AAC3C,2BAAmB,QAAO;AAC1B,qBAAa,YAAY,YAAY;AACrC,eAAO,IAAI,IAAI,OAAO,OAAO,SAAQ,CAAE;;;AAI/C,QAAI,YAAY;AAChB,kBAA4B;0BASC;;gCAEM;MAC/B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,cAAc,OAAO,KAAK,qBAAqB,KAAK;AACzF,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAEhC,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,WAAW,WAAW,SAAS,IAAI,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACzJ,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC3J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG9G,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAQjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8CAA8C,OAAO;;AAE9E,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR;AACA,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB,cAAI,UAAU;AACV,gBAAI,MAAM,KAAQ,QAAQ,SAAS,KAAK,OAAO;;AAG/C,gBAAI,MAAM,QAAQ,KAAK,OAAO;;AAElC,cAAI,KAAK,QAAQ;AACb,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,cAAI,aAAa;AACb,yBAAa,KAAQ,YAAY;;AAErC,uBAAa,MAAQ,GAAG,MAAM,YAAY,KAAK,gBAAgB;AAC/D,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAGnC,iBAAO,CAAC,QAAQ;;;MAGxB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;;AAE3B,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,kBAAc,YAAY;AAC1B,kBAA4B;4BACG;MAC3B;AACI,aAAK,OAAO,IAAI,cAAc;AAC9B,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,eAAO,IAAI,IAAI;;;AAIvB,cAAU,YAAY;AACtB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGlH,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,uDACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAe,OAAO;AACtB,mBAAS,OAAO;AAIhB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,wBAAc,MAAM,QAAQ,KAAK,OAAO;AACxC,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK;;AAE3C,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,uCAA6B,KAAK,gBAAgB;AAClD,6BAAmB,OAAU,sBAAsB,CAAC,IAAI,KAAK,OAAO,KAAK,QAAQ,qBAAqB,OAAO;AAC7G,8BAAoB,MAAM,UAAU;AACpC,+BAAqB,OAAU,SAAS,GAAG,QAAQ,OAAO;AAC1D,2CAAiC,OAAU,aAAa,GAAG,YAAY,OAAO;AAC9E,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,6BAAmB,MAAM,KAAQ,GAAG,WAAW;AAC/C,eAAK,KAAK,WAAW,MAAM,MAAQ,IAAI;AACvC,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,MAAQ,GAAG,IAAQ,KAAK;AAExE,iBAAO,CAAC,GAAG;;;MAGnB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;UACrB,YAAY;;AAEhB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,YAAQ,YAAY;AACpB,kBAA4B;sBACH;MACrB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,QAAQ;AACxB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,QAAI,YAAY;AAChB,kBAA4B;2BACE;MAC1B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,KAAK;AAC3B,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,CAAC,KAAK,OAAO,KAAK;AACnC,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI;AACA,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J;AACA,YAAI,KAAK;AACL,cAAI,KAAK;AACL,qCAAyB,KAAK;AAC9B,kCAAsB,KAAK;AAC3B,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AAEI,2BAAW,iBAAiB,MAAM,CAAC;AACnC,2BAAY,IAAI,OAAQ,MAAM,CAAC;AAC/B,+BAAe,iBAAiB,MAAM,CAAC,gBAAgB;AACvD,uBAAO,qBAAuB,qBAAuB,IAAI,KAAK;;eAItE,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG7G,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,wDACd,OAAO;;AAElB,yBAAe,OAAO;AACtB,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AAIvB;AACA;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,kBAAQ,MAAM,QAAQ,KAAK,OAAO;AAClC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,cAAI,MAAQ,GAAG,MAAM,UAAU,KAAK,gBAAgB;AACpD,cAAI,KAAK;AACL,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,mCAAyB,OAAU,GAAG,GAAG,EAAE,OAAO;AAClD,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM;AACnE,cAAI,KAAK,oBAAoB,MAAM;AACnC,oBAAU,KAAQ,GAAG,KAAK,WAAW,MAAM;AAE3C,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,gBAAgB,KAAK;UACrB,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;;AAEzB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,aAAS,YAAY;AACrB,kBAA4B;uBACF;MACtB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,SAAS;AACzB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,SAAK,YAAY;AACjB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,QAAQ,KAAK;;UAElB;AAKA,0BAAkB;AAClB,2BAAmB,KAAK,MAAM,QAAQ;AAClC,cAAI,MAAM,QAAQ,KAAK;AACnB,sBAAU,KAAK,GAAG,KAAK;;AAGvB,sBAAU,KAAK,KAAK;;;AAG5B,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,mBAAS;AACT,uBAAa,OAAO,MAAM;AAE1B,+BAAqB;AACrB,6BAAmB,KAAK,MAAM,QAAQ;AAClC,gBAAI,MAAM,QAAQ,KAAK;AACnB,2BAAa,KAAK,OAAO,OAAO,GAAG,KAAK,UAAU;;AAGlD,2BAAa,KAAK,OAAO,OAAO,GAAG;;;AAG3C,uBAAa;AAEb,kCAAwB;AACxB;AACA,uBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,yBAAa,KAAK,MAAM;AACxB,qBAAS,aAAa;AAEtB,gBAAI,MAAM;AACN,2BAAa,CAAC,OAAO,IAAI,OAAO;;AAGhC,2BAAa,CAAC,WAAW,IAAI,OAAO;;AAExC,yBAAa,KAAK,KAAK,YAAY;AACnC,4BAAgB,KAAK,WAAW,MAAM;;AAG1C,mBAAS;AACT,mCAAyB,gBAAgB,QAAQ;AAC7C,mBAAO,KAAK,GAAG;;AAEnB,iBAAO,CAAC,WAAW,IAAI,OAAO;;;MAGtC;AACI,YAAI,gBAAgB;AAGhB,uBAAa,WAAW;;AAE5B,qBAAa;AACb;AACA,aAAK,MAAM,QAAQ;AACf,oBAAU,WAAW,KAAK;AAEtB,iBAAK,MAAM;AACX,gBAAI,MAAM,QAAQ,KAAK;AACnB,0BAAY,KAAK,UAAU;;AAG3B,0BAAY,KAAK;;AAErB,yBAAa,CAAC,WAAW,IAAI;;;AAGrC,aAAK,QAAQ;;MAEjB;AACI,2BAAmB,MAAM;AACzB,8BAAsB;AAClB,iBAAO;YACH,WAAa,KAAK;YAClB,QAAU,KAAK;;;AAGvB,4BAAoB,KAAK,MAAM,IAAI;AACnC,wBAAe,CAAE,OAAS;AAC1B,eAAO,OAAO,OAAO,IAAI,YAAY;;aAGlC,yCAAwC;AAC3C,sBAAc;AACd,iCAAyB,QAAO;AAC5B,gBAAM,KAAK,YAAY,YAAY;;AAEvC,eAAO,IAAI,IAAI,CAAE;;UAEjB;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO;;UAEP;AACA,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,6BAAmB,KAAK;AACpB,6BAAiB,KAAK,GAAG,KAAK;;AAElC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;MAOX;AACI,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO,cAAc;;MAQzB;AACI,uBAAe;AACf,2BAAmB,KAAK;AACpB,4BAAkB,KAAK,QAAQ;AAC/B,+BAAqB,QAAQ,OAAO;AACpC,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,mBAAO,KAAK,CAAC,KAAK,QAAQ,IAAI,aAAa;;;AAGnD,sBAAc;;;AAItB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,aAAQ,aAAM,MAAM,WAAW,OAAO,gBAAQ,KAAM;AACpD,4BAAsB,MAAM,UAAU,SAAQ;AAC9C,yBAAmB,MAAM,aAAe,eAAe,OAAM;AAE7D,UAAI,CAAC,UAAS,UAAS;AACnB,eAAO,KAAS,aAAa;;AAEjC,oBAAc,MAAM,QAAO,KAAK,QAAW,IAAI;AAC/C,aAAO,MAAM,IAAI,OAAK,KAAS,EAAE;;ACtzCrC;;;;;;;;;AASA,iBAAsC;AAClC,cAAQ;AACR,qBAAc;AAAG,YAAI,OAAO,UAAU,eAAe,KAAK,GAAG,OAAM,EAAE,QAAQ,MAAK;AAC9E,YAAE,MAAK,EAAE;AACb,UAAI,KAAK,QAAQ,OAAO,OAAO,0BAA0B;AACrD,qBAAa,QAAO,OAAO,sBAAsB,IAAI,IAAI,GAAE,QAAQ;AAC/D,cAAI,EAAE,QAAQ,GAAE,MAAM,KAAK,OAAO,UAAU,qBAAqB,KAAK,GAAG,GAAE;AACvE,cAAE,GAAE,MAAM,EAAE,GAAE;;AAE1B,aAAO;;gCAaiB;;4BAKJ;MACpB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,MAAM,QAAQ,KAAK;AACnB,gBAAM,IAAI,oBAAoB;;AAElC,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,KAAS;AACZ,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,cAAI,UAAU,OAAO;AACjB,kBAAM,IAAI,WAAW;;AAEzB,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;MAGpD;AACI,uBAAe,KAAK,yBAAyB;AAC7C,YAAI,CAAC,KAAK;AACN,qBAAW,CAAC,SAAS,IAAI,GAAG,SAAS,MAAM;;AAE/C,YAAI,KAAK;AACL,qBACI,CAAC,UAAU,GAAG,MAAM,GAAG,KAAK,CAAC,WAAW,IAAI,GAAG,SAAS,MAAM;;AAEtE,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,iBAAQ,aAAc,KAAK;AAC3B,6BAAmB,OAAO;AAC1B,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,+BAAqB,OAAU;AAC/B,cAAI,MAAM,QAAQ;AACd,mBAAO,MAAM,UAAU,QAAQ,KAAK;;AAExC,iBAAO,CAAC;;;MAGhB,+BAA+B;AAC3B,aAAS;AACL,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,6BAAmB,KAAK,UAAU,GAAG;AACrC,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,4BAAkB,WAAW;AAC7B,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,eAAe;AACpB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,UAAU,CAAC,OAAU;;qBAGzB,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,QAAQ,KAAK,OAAU;;;AAIhC,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI;AAKA,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,oCAAsB;AACtB,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AACI,eAAQ,YAAY,SAAS,YAAY,SAAS,SAAS,gBAAiB,KAAK;AACjF,gCAAwB,eAAe;AACvC,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,yBAAiB;UACb,GAAG,WAAW,MAAM,GAAG;UACvB,GAAI,kBAAkB,CAAC,SAAS,MAAM,QAAQ,CAAC,MAAM,MAAM;;AAE/D,eAAO;;;AAIf,cAAU,YAAY;iCACc;MAChC;AACI,eAAQ,SAAS,YAAY,SAAS,SAAS,YAAY,gBAAkB;AAC7E,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE,OAAO;AACvC,aAAK,UAAU;AACf,8BAAsB,KAAK,SAAS;AACpC,aAAK,aAAa,eAAe,YAAY,GAAG;AAChD,aAAK,WAAW,QAAQ,UAAQ,sBAAsB,MAAM;AAC5D,aAAK,UAAU,eAAe,WAAW,GAAG,GAAG;AAC/C,aAAK,QAAQ,QAAQ,YAAU,sBAAsB,QAAQ;AAC7D,aAAK,UAAU,WAAW;AAC1B,yBAAiB,KAAK;AACtB,aAAK,aAAa,cAAc;AAChC,wBAAgB,KAAK;AACrB,aAAK,eAAe,eAAe,gBAAgB,GAAG,GAAG;AACzD,aAAK,aAAa,QAAQ,UAAQ,sBAAsB,MAAM;;MAElE;AACI;AACA,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,6BAAqB;AACrB,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK,UAAU;AACrE,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,qCAA6B,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS,KAAK,UAAU;AAClF,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL;AACA,cAAI,KAAK;AACL,0BAAa,KAAK;AAClB,4BAAgB,KAAK;AACrB,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AACI,8BAAc,MAAK,MAAM,CAAC;AAC1B,8BAAc,OAAS,CAAC;AACxB,kCAAkB,MAAK,MAAM,CAAC,UAAU;AACxC,uBAAO,YAAc,CAAC,OAAO,OAAO;;eAI5C,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,eAAe,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE9H,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8DACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe;AACvC,oBAAU,OAAO;AACjB,2BAAiB,OAAO;AACxB,2BAAiB,OAAO;AACxB,+BAAqB;AACrB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,8BAAoB,KAAK;AACzB,+BAAqB;AACjB,gBAAI,CAAC,QAAQ,CAAC,KAAK;AACf,qBAAO;;AAEX,mBAAO,KAAQ,KAAK,SAAQ;;AAEhC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,iCAAuB,KAAK;AAC5B,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,oCAA0B;AAC1B,uDAA6C,OAAU,KAAK,OAAO,QAAQ,cAAc;AACzF,+CAAqC,KAAK,UACtC,OAAU,KAAK,KAAK,QAAQ,gBAC5B,CAAC,MAAM,MAAM,MAAM;AACvB,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,mEAAyD,OAAU,KAAK,gBAAgB,QAAQ,cAAc;AAC9G,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM,MAAQ,IAAI;AACrF,oBAAU,KAAQ,KAAK,oBAAoB,MAAM,MAAQ,IAAI,MAAM,KAAK,WAAW,MAAM;AACzF,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,mBAAW,MAAM,cAAe,YAAe,iBAAiB,OAAO,IAAI,CAAC;AAC5E,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,SAAS,KAAK;;AAElB,eAAO,OAAO,OAAO,IAAI,YAAY;;MAEzC;AACI,oBAAY,QAAW,GAAG,GAAG,KAAK,SAAU,WAAW,SAAU,KAAK,eAAe,kBAAkB,SAAS,QAAQ,KAAK;AAC7H,YAAI;AACA,iBAAO,QAAU,KAAK,GAAG,KAAK;;AAElC,eAAO;;MAEX;AACI,wBAAgB;AAChB,eAAO,QAAW,GAAG,GAAG,SAAS,QAAQ,KAAK,eAAe,kBAAkB,SAAS;;;AAIhG,mBAAe,YAAY;AAC3B,kBAAgC;6BACA;MAC5B;AACI,qBAAa,IAAI,eAAe;AAChC,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE;;aAG7B;AACH,eAAO,IAAI,IAAI;;;AAIvB,eAAW,YAAY;AACvB,kBAAgC;AClVhC;;;;;;;;;0BAuB6B;MACzB;AACI,cAAM;AACN,aAAK,OAAO,KAAK,IAAI,KAAK,IAAI,KAAK,MAAM,IAAI;AAE7C,aAAK,aAAa,KAAK;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,kBAAkB;;MAE3B;AACI,YAAI,KAAK,cAAc;AACnB,iBAAO,KAAK;;AAEhB,2BAAmB,OAAM;AACzB,2BAAmB;AACnB,qBAAa,GAAG,IAAI,KAAK,WAAW,QAAQ,EAAE;AAC1C,qBAAW,KAAK,KAAK,WAAW,MAAM,OAAO,WAAW,KAAK,KAAK,WAAW;;AAEjF,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,IAAI,KAAK,QAAQ,KAAK,OAAO;AAC7B,6BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,+BAAmB,KAAK,cAAc;AACtC,2BAAe,aAAe,MAAM,UAAU,QAAO,KAAK,MAAM,YAAY,KAAK,OAAO,MAAM,QAAO;AACrG,mBAAO;;AAEX,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,YAAY,KAAK;UACjB,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,MAAM;;;AAIrB,YAAQ,YAAY;AACpB,kBAA4B;mCACU;MAClC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,2BAAmB,OAAM;AACzB,eAAO,CAAC,WAAW,IAAI,GAAG,WAAW;;;AAI7C,qBAAiB,YAAY;AAC7B,kBAA4B;wBACD;MACvB;AACI,cAAM;AAEN,aAAK,aAAa;AAClB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc,QACnD,KAAK,YAAY;AAGjB,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,eAAK,kBAAkB,CAAC,WAAW,KAAK;;AAE5C,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAExB,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,CAAE,SAAS;;MAEjC;AACI,qBAAa,mBAAmB;AAChC,6BAAqB,WAAW,WAAW,SAAS;AACpD,YAAI,KAAK,UAAU;AACf,eAAK,SAAS,KAAK,UAAU,UAAU,CAAC,cAAc,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,cAAI,KAAK;AACL,iBAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;;AAGtH,aAAK,YAAY,CAAC,CAAE,SAAS,GAAG,MAAM,EAAG,KAAK;AAC9C,aAAK,QAAQ;;MAEjB;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,oBAAY,YAAY,SAAS,KAAK,KAAK;AAC3C,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,yBAAc,oBAAoB;AAClC,sCAA4B,2BAA2B,KAAK,WAAW;AACvE;AACA,cAAI,uBAAuB;AACvB,qBAAS,MAAM,QAAO,KAAK,OAAO,QAAQ,qBAAqB,KAAK,OAAO,KAAK,KAAK,SAAS;;AAG9F,qBAAS,MAAM,QAAO,KAAK,OAAO;AAClC,gBAAI,KAAK,QAAQ;AACb,uBAAS,QAAU,QAAQ,KAAK,KAAK;;AAEzC,gBAAI,KAAK,cAAc;AACnB,uBAAS,KAAK,WAAW,MAAM;;;AAGvC,iBAAO;;;MAGf;AACI,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;0BACC;MACzB;AACI,eAAO,QAAQ;AACf,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,SAAS;AAC7B,aAAK,aAAa,KAAK;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,0BAAkB,WAAW,MAAM;AAC/B,cAAI,OAAO;AACP,kBAAM,IAAI,WAAW,iEACT,WAAW,MAAM;;;AAKrC,eAAO,CAAC,WAAW,IAAI,UAAU,YAAY;;MAEjD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,uBAAY,oBAAoB;AAChC,cAAI,KAAK,eAAe,mBAAmB,OAAM,OAAO;AACpD,gCAAoB,CAAC;AACrB,yBAAa,GAAG,IAAI,OAAM,MAAM,EAAE;AAC9B,0BAAY,KAAK;;AAErB,wBAAY,KAAK;AACjB,qBAAQ,OAAM,UAAU;;AAE5B,iBAAO,aAAe;;;MAG9B;AACI,wBAAe;AACf,YAAI,KAAK,cAAc;AACnB,kBAAO,gBAAgB,KAAK;;AAEhC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;+BACI;MAC5B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,aAAa,cAAc,KAAK;;MAEzC;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,iBAAO,KAAK,WAAW,MAAM;;;MAGrC;AACI,wBAAe,CAAE,YAAY,oBAAoB,KAAK;AACtD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,IAAI,KAAK;AACd,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,eAAO,CAAC,WAAW,IAAI,KAAK,GAAG,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,iBAAO,OAAS,QAAQ,KAAK;;;MAGrC;AACI,wBAAe;UACX,GAAG,KAAK;;AAEZ,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;4BACC;MACzB;AACI,cAAM;AACN,aAAK,cAAc,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ,EAAE;AAC3C,cAAI,KAAK,UAAU,KAAK,YAAY;AAChC,iBAAK,YAAY,KAAK;;;;MAIlC;AACI,eAAO,MAAM,KAAK,OAAO;;MAgB7B;AACI,yBAAiB;AACjB,2BAAmB,YAAY;AAC/B,oBAAY;AACZ,sBAAc;AACd,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,sBAAY,WAAW;AACvB,cAAI,KAAK,UAAU;AACf,gBAAI,YAAY;AACZ,wBAAU;;AAGV,oBAAM,IAAI,WAAW;;;AAIzB,qBAAS;;;AAGjB,6BAAqB,UAAU;AAC/B,YAAI,YAAY;AACZ,cAAI,UAAU,KAAK,eAAe,UAAU;AACxC,kBAAM,IAAI,WAAW;;AAEzB,qBAAW,WAAW,eAAe;mBAEhC,iBAAiB;AACtB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;MAEX;AACI,6BAAqB;AACrB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,cAAI,KAAK,UAAU,WAAW;AAC1B,6BAAiB;AACjB;;;AAGR,YAAI;AACA,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK;;AAG1C,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;;;MAGhG;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,8BAAoB,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;AACrG,iBAAO,OAAM,QAAQ;;;MAG7B;AACI,wBAAe;UACX,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,gBAAM,IAAI,MAAM,sEACT,KAAK;;AAGhB,sCAA8B,QAAM,GAAG,KAAK,KAAK,SAAS;AAC1D,YAAI,CAAC,aAAiB,KAAK,KAAK,QAAQ,QAAQ;AAC5C,gBAAM,IAAI,MAAM,iCAAiC,KAAK,UAAU,KAAK,QACjE;;AAER,aAAK,OAAO,KAAK;AACjB,aAAK,qBAAqB,CAAC,GAAG,OAAO,KAAK;AAC1C,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,KAAK,SAAS;;MAE/D;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,aAAK,KAAK,QAAQ;AACd,sBAAY,IAAI,KAAK,WAAW;;AAEpC,eAAO;;MAEX;AACI,eAAO,WAAU,oBAAoB,SAAS,KAAK;;MAEvD;AACI,wBAAe;UACX,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,YAAY,KAAK,aAAa,OAAO,IAAI,KAAK;;AAGnD,eAAK,YAAY;;;MAGzB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,WAAW,KAAK;AACjC,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,uBAAc,oBAAoB;AAClC,qBAAa;AACb,eAAO,IAAI,SAAS,QAAO,KAAK,YAAY;;MAEhD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,uBAAa;AACb,2BAAiB;AACjB,8BAAoB,IAAI,SAAS,QAAO,KAAK,YAAY,MAAM;AAC/D,yBAAe,OAAM,IAAI,YAAY,OAAO,OAAM;AAClD,iBAAO;;;;AAKnB,YAAQ,YAAY;AACpB,kBAA4B;AC3c5B;;;;;;;;;4BAuB+B;MAC3B;AACI,cAAM;AACN,aAAK,aAAa;AAClB,aAAK,iCAAiC;AACtC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc;AAKnD,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,cAAI,KAAK,eAAe;AAGpB,iBAAK,kBAAkB,CAAC,WAAW;;AAKnC,iBAAK,kBACD,CAAC,WAAW,OAAO,OAAqB,KAAK;;;AAGzD,aAAK,WAAW,KAAK;AACrB,8BAAoC,KAAK,UAAU;AACnD,aAAK,YAAY,KAAK;AACtB,8BAAoC,KAAK,WAAW;AACpD,aAAK,wBAAwB,eAAe,KAAK,yBAAyB,KAAK;AAC/E,aAAK,wBAAwB,eAAe,KAAK;AACjD,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,uBAAuB,cAAc,KAAK;AAC/C,aAAK,WAAW,KAAK;AACrB,aAAK,kBAAkB,KAAK;AAC5B,aAAK,cAAc,KAAK;;MAE5B;AACI,aAAK,aAAa,KAAK,UAAU,cAAc,CAAC,KAAK,UAAU,KAAK,YAAY,KAAK,OAAO,KAAK,uBAAuB,KAAK,uBAAuB,MAAM,KAAK;AAC/J,aAAK,QAAQ;;MAIjB;;MACA;AACI,eAAO,KAAK;AACR,cAAI,CAAC,KAAK;AACN,mBAAO;;AAGP,qBAAS,oBAAoB;AAC7B,mBAAO,SAAS,QAAQ,WAAU;;;;MAI9C;AACI,qBAAa,mBAAmB;AAChC,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,GAAG,YAAY,KAAK;;AAGhC,uBAAe,OAAqB,KAAK;AACzC,YAAI,OAAO,WAAW,WAAW,SAAS;AACtC,gBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;;AAG7B,kBAAQ;AACR,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,uBAAW,OAAO;AAClB,uBAAW,WAAW,IAAI;AAC1B,gBAAK,MAAM,QAAU,MAAM,QAAU,OAAO;AACxC,oBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;uBAExB,MAAM;AACX,qBAAO,KAAK;;AAEhB;;;AAGR,eAAO,CAAC,WAAW,IAAI,GAAG,QAAQ,KAAK;;MAE3C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,uBAAY,oBAAoB;AAChC,cAAI,OAAM,UAAU;AAChB,qBAAQ,OAAO,QAAO;;AAE1B,yBAAe,SAAS,KAAK,WAAW,QAAQ,OAAM;AACtD,iBAAO,OAAO,QAAQ,mBAAmB,KAAK,mBAAmB,OAAM;;;MAG/E;AACI,wBAAe;UACX,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,uBAAuB,qBAAqB,KAAK;UACjD,uBAAuB,qBAAqB,KAAK;UACjD,qBAAqB,qBAAqB,KAAK;UAC/C,sBAAsB,oBAAoB,KAAK;UAC/C,UAAU,KAAK;UACf,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;ACzI5B;;;;;;;;;wBA0B2B;MACvB;AACI,cAAM,QAAQ;AACd,aAAK,kBAAkB;;MAM3B;AACI,cAAM,IAAI;;MAYd;AACI,YAAI,UAAU,QAAQ,UAAU;AAC5B,iBAAO;mBAEF,OAAO,SAAS,OAAO;AAC5B,iBAAO,KAAK,gCAAgC,QAAQ;mBAE/C,OAAO,WAAW;AACvB,iBAAO;;AAEX,4BAAoB,OAAO,MAAM,GAAG,OAAO,SAAS,OAAO;AAC3D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO,OAAO,SAAS,OAAO,SAAS;AACjD,oBAAU,OAAO;AACjB,cAAI,KAAK,QAAQ,KAAK,QAAQ,IAAI,KAAK,IAAI;AACvC,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;;AAGjB,gBAAI,MAAM;AACN,oBAAM,IAAI,WAAW,0DACjB,KAAK,UAAU,UAAU,MAAM,KAAK,UAAU;;AAEtD,wBAAY,KAAK;;;AAGzB,eAAO;;MAEX;AAEI,YAAI,MAAM,QAAQ,eAAe,CAAC,MAAM,QAAQ,WAAW;AAEvD,uBAAa,CAAC,mBAAmB;;AAErC,qBAAa;AACb,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,wEACT,WAAW;;AAI3B,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,8EACW,KAAK,UAAU;;AAEnD,0BAAkB,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACrE,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAIpE,yBAAiB,WAAW,IAAI,WAAS,MAAM;AAC/C,YAAI,WAAW,QAAQ,UAAU,MAC7B,SAAqB,UAAU,WAAW;AAC1C,eAAK,kBAAkB;;AAGvB,eAAK,kBAAkB;;;MAG/B;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,KAAK;AACL,mCAAuB;AACvB,8BAAkB,OAAO,IAAI,YAAS,OAAM;AAC5C,gBAAI,UAAU,QAAQ,UAAU;AAG5B,8BAAgB,MAAc;AAC9B,4BAAc;AACV,8BAAc,EAAE;AAChB,6BAAa,GAAG,IAAI,UAAU,OAAO,EAAE;AACnC,sBAAI,aAAa,GAAG;;AAExB,+BAAe,KAAK;;AAExB,qBAAO,KAAK,cAAc;;AAK1B,+BAAiB;AACjB,8BAAgB;AACZ,8BAAc,EAAE;AAChB,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,oCAAkB,OAAO;AACzB,mCAAiB,OAAO,MAAM,GAAG,OAAO,CAAC;AACzC,oCAAkB,EAAE,QAAQ,CAAC,WAAW,OAAO,UAAoB,OAAO,MAAM;AAChF,gCAAc,WAAc,aAAa,CAAC,GAAG;AAC7C,gCAAc,YAAY,QAAQ;AAClC,iCAAe,KAAK;AACpB,+BAAa;2BAER,QAAQ;AACb,+BAAa,QAAgB,GAAG,OAAO,OAAO,CAAC;AAC/C,iCAAe,KAAK,WAAc,GAAG;AACrC,+BAAa;;AAIb,iCAAe,KAAK;;;AAG5B,sBAAQ,KAAK,cAAc;AAC3B,4BAAc,EAAE;AAChB,kBAAI;AAGA,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,iCAAc,OAAO;AACrB,oCAAkB,OAAO,SAAQ;AACjC,mCAAiB,CAAC,WAAW,OAAO,OAAO,MAAM,GAAG,OAAO,SAAS;AACpE,sBAAI,WAAc,EAAE,QAAQ,CAAC,IAAI,aAAa,CAAC,GAAG,IAC7C,QAAQ;2BAER,QAAQ;AACb,+BAAa,CAAC,QAAQ,GAAG,OAAO,QAAgB,GAAG,QAAQ;AAC3D,sBAAI,WAAc,GAAG;;;AAG7B,qBAAO;;;AAIX,mBAAO,KAAK,cAAc;;;;MAItC;AACI,qBAAa;AACb;AACA,YAAI,WAAW,MAAM;AACjB,wBAAc;;AAGd,wBAAc,WAAW,GAAG,MAAM;;AAEtC,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAEpE,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,WAAW;AACtB,wBAAc,WAAW,OAAO;;AAGhC,wBAAc,CAAC,MAAM,OAAO;;AAEhC,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,cAAI,QAAQ;AACR,mBAAO;;AAEX,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,KAAK,WAAW,OAAO;AACvB,kBAAM,IAAI,WAAW,mGAEb,OAAO,aAAa,KAAK;;AAErC,cAAI,KAAK,MAAM,OAAK,KAAK;AACrB,mBAAO;;AAEX,iBAAO,KAAK,IAAI,OAAK,KAAK,OAAO,IAAI,WAAe,GAAG;AACvD,uBAAa,KAAK;AAClB,uBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,qBAAS,WAAe,QAAQ,KAAK;;AAEzC,iBAAO;;;;wBAIM;MACrB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,UAAI,YAAY;AAChB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,MAAI;AACtB,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,MAAI;;;6BAGO;MAC1B;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,eAAS,YAAY;AACrB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,WAAS;AAC3B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,WAAS;;;0BAGC;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO,KAAQ,IAAI,OAAO,QAAQ;;;;AAK9C,YAAQ,YAAY;AACpB,kBAA4B;AAgDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,QAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,QAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;8BAGM;MAC7B;AACI,cAAM;AACN,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AAEI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW,QACxD,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,qBAAa;AACb,2BAAmB;AACnB,4BAAoB;AAChB,cAAI,SAAS;AACT,2BAAe;AACf;;;AAGR,YAAI;AACA;;AAEJ,yBAAiB;AACjB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,yCAA+B,WAAW,GAAG;AAC7C,iCAAuB,OAAO,KAAK,MAAM;AACzC,uBAAa;AACb,8BAAoB;AAChB,gBAAI,aAAiB,OAAO;AACxB,uBAAS;AACT;;;AAGR,cAAI,CAAC;AACD,qBAAS,KAAK;;;AAGtB,YAAI,SAAS,SAAS;AAClB,gBAAM,IAAI,WAAW,8GAEjB,KAAK,UAAU;;;MAG3B;AACI,eAAO,KAAK;AACR,iBAAO,YAAc,QAAQ,KAAK;;;MAG1C;AACI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW;AACxD,gBAAM,IAAI,WAAW;;AAEzB,4BAAoB;AACpB,4BAAoB,YAAY,GAAG;AACnC,qBAAa,KAAK,OAAO,IAAI,YAAY,SAAS,KAAK,OAAO,KAAK;AAGnE,4BAAoB,YAAY,MAAM;AAClC,cAAI,YAAY,SAAS,QAAQ,MAAM,SAAS;AAC5C,wBAAY,QAAQ;AACpB;;AAEJ,sBAAY,SAAS,MAAM;;AAE/B,eAAO;;MAEX;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,KAAK,WAAW,OAAO;AACvB,gBAAM,IAAI,WAAW,mCAAmC,KAAK,qCAC5B,OAAO;;AAE5C,eAAO,KAAS;AACZ,6BAAmB;AACnB,eAAK,QAAQ;AACT,gBAAI,KAAK;AACL,6BAAe;AACf;;;AAGR,cAAI;AACA,mBAAO;;AAEX,8BAAoB;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,gBAAI,KAAK,MAAM;AAEX,0BAAY,KAAK,UAAa,OAAO,IAAI,OAAO;uBAE3C,KAAK,GAAG,OAAO,OAAO,GAAG;AAE9B,0BAAY,KAAK,WAAe,KAAK,IAAI;;AAGzC,0BAAY,KAAK,KAAK;;;AAG9B,oCAA0B,QAAW,aAAa,KAAK;AACvD,iBAAO,IAAQ,mBAAmB,IAAI;;;MAG9C;AACI,wBAAe;UACX,MAAQ,KAAK;;AAEjB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,gBAAY,YAAY;AACxB,kBAA4B;AAiDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,YAAY;AAC9B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,YAAY;;;AAY/B;AACI,aAAO,OAAO;AACV,gBAAQ;;AAEZ,aAAO;;AAEX;AACI,UAAI,EAAE,MAAM,SAAS,KAAK,EAAE,MAAM,SAAS;AACvC,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,UAAI,OAAO,SAAS;AAChB,eAAO,CAAC,MAAM;;AAElB,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,cAAM,IAAI,oBAAoB;;AAElC,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,UAAI,QAAQ;AAER,eAAO,CAAC,QAAQ,GAAG,QAAQ;;AAE/B,wBAAkB;AAClB,aAAO,KAAS;AACZ;AACA,YAAI,QAAQ;AACR,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;mBAExB,QAAQ;AACb,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;;AAG7B,iBAAO;;AAEX;AACA,YAAI,EAAE,MAAM,WAAW,KAAK,EAAE,MAAM,WAAW;AAC3C,cAAI,UAAU,OAAO,UAAU;AAC3B,kBAAM,EAAE,IAAI,GAAG,IAAI,UAAU;;AAG7B,kBAAM,EAAE,UAAU,CAAC,GAAG,IAAI,IAAI,GAAG,IAAI,UAAU;;;AAInD,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,gBAAM,EAAE,OAAO,GAAG,MAAM;;AAE5B,YAAI,OAAO;AACP;AACA,cAAI,QAAQ;AACR,kBAAM,QAAQ,QAAQ;;AAGtB,kBAAM,QAAQ;;AAElB,8BAAoB;AACpB,uBAAa,KAAK,IAAI,MAAM,MAAM,EAAE;AAChC,wBAAY,KAAK;;AAErB,gBAAM,IAAI,QAAQ;;AAEtB,YAAI,IAAI,MAAM,WAAW;AACrB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;;sBAGU;MACrB;AACI,cAAM;AACN,aAAK,OAAO,KAAK;AACjB,aAAK,YAAY,KAAK,aAAa,OAAO,QAAQ,KAAK;AACvD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW;AAC1B,uBAAe,WAAW;AAC1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,YAAI,OAAO,KAAK,QAAQ,OAAO,KAAK;AAChC,gBAAM,IAAI,WAAW,8BACd,OAAO,KAAK,WAAW,OAAO,KAAK;;;MAGlD;AACI,YAAI,OAAO,WAAW;AAClB,gBAAM,IAAI,WAAW,oEACD,OAAO;;AAE/B,iBAAS,OAAO;AAChB,iBAAS,OAAO;AAChB;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,iBAAO;YACH,cAAc,KAAK,MAAM,GAAG,MAAM;YAClC,cAAc,KAAK,MAAM,GAAG,MAAM;;;AAItC,iBAAO,KAAK,KAAK,IAAI,aAAa,cAAc,MAAM,OAAO,GAAG,MAAM;;AAE1E,YAAI,KAAK;AACL,eAAK,YAAY,IAAI,KAAK;AAC1B,eAAK,YAAY,IAAI,KAAK;;AAE9B,eAAO,SAAS,IAAI,IAAI;;MAE5B;AACI;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AAEpB,iBAAO;YACH,cAAc,KAAK,MAAM,OAAO;YAChC,cAAc,KAAK,MAAM,OAAO;;;AAKpC,iBAAO,KAAK;;AAEhB,eAAO;;MAEX;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW,GAAG;AAC7B,uBAAe,WAAW,GAAG;AAC7B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,GAAG;AACjB,4BAAoB,OAAO,OAAO;AAClC,YAAI,YAAY,WAAW;AACvB,sBAAY,KAAK;;AAErB,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe;UACX,MAAQ,KAAK;UACb,WAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;ACl9B5B;;;;;;;;;gCAgBmC;MAC/B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,SAAS,KAAK;;MAEvB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,QAAQ,KAAK;AAC9B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,yBAAe,MAAM,eAAe,OAAM,OAAO,GAAG,KAAK,QAAQ,IAAI;AACrE,yBAAe,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;AACzE,iBAAO;;;;AAKnB,kBAAc,YAAY;AAC1B,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,2BAAe;AACX,6BAAe,KAAK,KAAK,KAAK,OAAQ,KAAI,KAAK;AAC/C,qBAAO,OAAM,IAAI,eAAe,OAAM,OAAO,GAAG;;AAEpD,mBAAO,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;;AAErE,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;+BA8BM;MAC9B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,aAAa,KAAK;;MAE3B;AACI,eAAO,KAAK,cAAc,oBAAoB,QAAQ;;MAE1D;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,+BAAmB,KAAK,eAAe;AACvC,kCAAsB;AAClB,6BAAc,oBAAoB;AAClC,4BAAc;AACd,6BAAc;AACd,6BAAe,CAAC,QAAQ;AACxB,4BAAc,aAAa,cAAc,aAAa,KAAK;AAC3D,wBAAU,OAAO,SAAS;AAE1B,wBAAY,MAAI,KAAK,QAAS,KAAI,KAAK,OAAO,UAAU,OAAO;AAC/D,wBAAU,CAAC,IAAI,SAAS,KAAK;AAE7B,wBAAU,OAAM,IAAI,SAAS,IAAI,QAAQ,IAAI,IAAI,IAAI;AACrD,qBAAO,EAAE,IAAI,GAAG,IAAI;;AAExB,mBAAO,aAAe,eAAe,MAAM,oBAAoB,SAAS,OAAO,eAAe;;AAElG,iBAAO;;;;AAKnB,iBAAa,YAAY;AACzB,kBAA4B;ACvJ5B;;;;;;;;;AAoCO,6EAAsE;AACzE;AACA,UAAI,EAAE,SAAS;AACX,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAEhB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAChB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;;AAGtD,cAAM,IAAI,oBAAoB,2DAA2D,EAAE;;AAG/F,aAAO;;AAmBX,uFAAkF;AAC9E,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,uBAAe,mBAAmB,GAAG,OAAM,WAAU,MAAM,OAAO;AAClE,eAAO,CAAC,QAAQ,OAAM;;;AAoB9B,yFAAoF;AAChF,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,4BAAoB;AACpB,2BAAmB,QAAiB,GAAG,EAAE;AACrC,cAAI,cAAc,QAAQ,UAAU;AAChC,wBAAY,KAAK;;AAGjB,wBAAY,KAAK,EAAE,MAAM;;;AAGjC,8BAAsB,MAAK,QAAQ;AACnC,kCAA0B,UAAS,QAAQ;AAC3C,+BAAuB,SAAS,OAAO,OAAO,MAAM,QAAQ;AAC5D,8BAAsB,QAAQ,OAAO,OAAO,KAAK,QAAQ;AACzD,uBAAe,mBAAmB,GAAG,eAAe,mBAAmB,eAAe,gBAAgB;AACtG,eAAO,CAAC,QAAQ,OAAM;;;AAcvB,gFAA2E;AAC9E,UAAI,aAAiB,cAAc,QAAQ,QAAQ,QAAiB,GAAG,EAAE,OAAO;AAC5E,eAAO,gCAAgC,GAAG,OAAO,MAAM,eAAe;;AAGtE,eAAO,kCAAkC,GAAG,OAAO,MAAM,eAAe;;;qCAGxC;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,aAAK,WAAW,KAAK,YAAY,OAAO,OAAO,KAAK;AACpD,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,wBACD,eAAe,KAAK,yBAAyB;AACjD,aAAK,4BACD,eAAe,KAAK,6BAA6B;AACrD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,cAAc,KAAK;AAC1C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;;MAEhD;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO,WAAW;AAClE,oBAAY,WAAW;AACvB,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,QAAQ,mGAEtB,KAAK,UAAU;;AAE1B,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,WAAW,QAAQ,MAAM,EAAG,OAAO;AAC9D,sBAAc,CAAC;AACf,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,OAAO,MAAM,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;;AAE/G,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,OAAO,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE3G,aAAK,aAAa,KAAK,UAAU,eAAe,OAAO,MAAM,KAAK,uBAAuB,MAAM;AAC/F,aAAK,iBAAiB,KAAK,UAAU,mBAAmB,OAAO,MAAM,KAAK,2BAA2B,MAAM;AAC3G,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,uBAAa,WAAW;AACxB,gCAAsB,QAAiB,GAAG;AAC1C,uBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO;AACvD,wBAAc,OAAO,MAAM;AAC3B,iCAAuB,aAA2B,GAAG;AACrD,yBAAe,QAAQ,WAAW;AAClC,sCAA4B,cAAc;AAC1C,8BAAoB;AACpB,oCAA0B,CAAC,aAAiB,qBAAqB,QAAiB,GAAG,MAAM,MAAM,GAAG,OAAO;AAC3G,qCAA2B;AACvB,gBAAI;AACA,0CAA4B,KAAK,WAAW,OAAO,QAAQ;AAC3D,8CAAgC,KAAK,eAAe,OAAO,QAAQ;AACnE,oCAAsB,KAAK,SAAS,KAAK,KAAK,OAAO,QAAQ,kBAAkB;AAC/E,qCAAuB,KAAK,QAAQ,KAAK,MAAM,OAAO,QAAQ,kBAAkB;AAChF,qBAAO,mBAAmB,QAAO,qBAAqB,yBAAyB,eAAe,gBAAgB,KAAK;;AAGnH,qBAAO,mBAAmB,QAAO,KAAK,WAAW,QAAQ,KAAK,eAAe,QAAQ,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK,MAAM,QAAQ,KAAK;;;AAG9L,cAAI,CAAC;AACD,mBAAO;;AAEX,qDAAyC,yBAAyB,QAAO,KAAK,MAAM,QAAQ,KAAK,KAAK,QAAQ,eAAe,KAAK;AAClI,kCAAwB;AACpB,iBAAS;AACL,4BAAc,IAAI;AAClB,gCAAkB,UAAS;AAC3B,kCAAoB,UAAU,IAAI,OAAO,IAAI;AAC7C,wBAAS,MAAM,UAAU,IAAI;;;AASrC,8CAAoC;AAChC,4BAAgB,KAAK,YAAY,OAAM,KAAK;AAC5C,4BAAgB,KAAK,gBAAgB,WAAU,KAAK;;AAExD;AACA,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,uBAAuB,qBAAqB,KAAK;UACjD,2BAA2B,qBAAqB,KAAK;UACrD,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,gBAAgB,oBAAoB,KAAK;UACzC,iBAAiB,oBAAoB,KAAK;;AAE9C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;qCACY;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,YAAI,OAAO,KAAK,SAAS;AACrB,cAAI,CAAC,OAAO,UAAU,KAAK;AACvB,kBAAM,IAAI,MAAM,gDAAgD,KAAK;;mBAGpE,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,CAAC,OAAO,UAAU;AAClB,oBAAM,IAAI,MAAM,0DACI,KAAK,UAAU,KAAK;;;;AAKhD,gBAAM,IAAI,MAAM,wEACI,KAAK,UAAU,KAAK;;AAE5C,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,sBAAc,WAAW;AAEzB,YAAI,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,CAAC,KAAK;;AAEtB,qBAAa,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE;AACpC,cAAI,KAAK,KAAK,KAAK;AACf,iBAAK,KAAK,MAAM;;;AAIxB,2BAAmB,KAAK;AACpB,cAAI,OAAO,KAAK,QAAQ;AACpB,kBAAM,IAAI,MAAM,iBAAiB;;;AAGzC,YAAI,KAAK,KAAK,WAAW,SAAqB,KAAK,MAAM;AACrD,gBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAErD,2BAAmB,KAAK,KAAK,IAAI,UAAQ,WAAW;AACpD,0BAAkB;AAClB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB;;AAG1G,eAAK,QAAQ;;AAEjB,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,YAAY,WAAW,KAAK,iBAAiB,KAAK,iBAAiB;;AAGtG,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,uBAAc,oBAAoB;AAClC,2BAAmB,OAAM;AACzB,sBAAc,WAAW;AACzB,eAAO,KAAK;AACR,2BAAiB;AACjB,eAAM,aAAM,uBAAa,QAAQ,QAAO,KAAK,MAAM;AACnD,iCAAuB,aAA2B,GAAG;AACrD,4BAAkB,KAAK;AACnB,2BAAe,OAAO,WAAW;;AAErC,4BAAkB;AACd,gBAAI,KAAK,QAAQ,EAAE,MAAM,WAAW,SAChC,KAAK,SAAS,CAAC,QAAQ;AACvB,qBAAO,EAAE,QAAQ;;AAGjB,qBAAO;;;AAGf,uBAAY,UAAU,KAAK,MAAM;AACjC,uBAAa,UAAU,KAAK,KAAK;AAOjC,gCAAsB;AACtB,oCAA0B;AAC1B,uBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,gBAAI,KAAK,KAAK,QAAQ,OAAO;AACzB,4BAAc,KAAK,WAAW;AAC9B,gCAAkB,KAAK;;AAGvB,4BAAc,KAAK;AACnB,gCAAkB,KAAK,WAAW;;;AAG1C,kBAAO,MAAK,KAAK;AACjB,sBAAW,UAAS,KAAK;AACzB,mBAAQ,OAAM,KAAK;AACnB,mBAAS,OAAO,KAAK;AACrB,iBAAO,mBAAmB,QAAO,OAAM,WAAU,QAAQ,QAAO,KAAK;;;MAG7E;AACI,wBAAe;UACX,MAAM,KAAK;UACX,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;;AAEhD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;AC1Y5B;;;;;;;;;AA4BO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,WAAW,+FACe,QAAQ;;AAEhD,wBAAgB,CAAC,CAAC,GAAG,IAAI,SAAS,CAAC,GAAG;AACtC,eAAO,KAAQ,GAAG;;;AAanB;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE3B,YAAI,QAAQ,WAAW,KAAK,QAAQ,GAAG,WAAW,KAC9C,QAAQ,GAAG,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,eAAe,kBAAkB,eAAe;AAChD,gBAAM,IAAI,WAAW,wBAAwB;;AAGjD;AACA,YAAI,eAAe;AACf,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ;;AAG/C,oBAAU,CAAC,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ,IAAI,CAAC,GAAG;;AAEnD,eAAO,KAAQ,GAAG;;;gCAGS;MAC/B;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,oBAAoB,KAAK;AAGvD,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;mBAEvB,OAAO,KAAK,YAAY;AAC7B,eAAK,UACD,CAAC,CAAC,KAAK,SAAS,KAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAGvD,eAAK,UAAU,KAAK;AACpB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,+EACI,KAAK,QAAQ;;AAE1C;AACA;AACA,cAAI,OAAO,KAAK,QAAQ,OAAO;AAC3B,4BAAgB,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;AAC/C,2BAAe,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;;AAG9C,iBAAK,UAAU,KAAK;AACpB,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,sFACQ,KAAK,QAAQ,GAAG;;AAEjD,4BAAgB,KAAK,QAAQ;AAC7B,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,qFACQ,KAAK,QAAQ,GAAG;;AAEjD,2BAAe,KAAK,QAAQ;;AAEhC,eAAK,UAAU,CAAC,eAAe;;AAEnC,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC;AACA;AACA,YAAI,KAAK,eAAe;AACpB,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK,MAAM,iBAAiB,oBAAoB,SAAS,KAAK,SAAS,KAAK;;MAEvF;AACI,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,kBAAc,YAAY;AAC1B,kBAA4B;ACtL5B;;;;;;;;;AAkCO;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAIf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AAEb,cAAI,SAAY,GAAG,UAAU,SAAS;;AAKtC,cAAI,SAEJ,GAAG,UAAU,SAAS;;AAE1B,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG,GAAG;;AAErB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAGf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AACb,cAAI,UAAc,GAAG,UAAU,SAAS;;AAGxC,cAAI,UAAc,GAAG,UAAU,SAAS;;AAE5C,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;4BAMgB;MAO3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW;;AAEpB,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WAAW,CAAC,KAAK;mBAEjB,MAAM,QAAQ,KAAK,aACxB,KAAK,SAAS,WAAW,KACzB,OAAO,KAAK,SAAS,OAAO;AAC5B,eAAK,WAAW,KAAK;;AAGrB,gBAAM,IAAI,WAAW,qGAEd,KAAK,UAAU,KAAK;;AAE/B,8BAAsB,KAAK,UAAU;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAGpB,cAAI,OAAO,KAAK,YAAY;AACxB,iBAAK,UAAU,CAAC,KAAK;qBAEhB,MAAM,QAAQ,KAAK,YACxB,KAAK,QAAQ,WAAW,KACxB,OAAO,KAAK,QAAQ,OAAO;AAC3B,iBAAK,UAAU,KAAK;;AAGpB,kBAAM,IAAI,WAAW,oGAEd,KAAK,UAAU,KAAK;;;AAGnC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,uBAAe,iBAAiB,WAAW,IAAI,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC5F,eAAO,CAAC,WAAW,IAAI,QAAQ,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,mBAAS,aAAa,oBAAoB,SAAS;AACnD,yBAAe,KAAK,gBAAgB,oBAAoB,SAAS,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,QAAQ,IAAI,IAAI,KAAK,SAAS;AAE5H,iBAAO,QAAY,QAAQ,CAAC;;;MAGpC;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;;AAElB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG;;AAExB,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK;AACzB,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAEvC,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG,GAAG;;AAE3B,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK,UAAU,KAAK;AACxC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK,SAAS,KAAK;;AAErD,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,iBAAS,iBAAiB,QAAQ,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC/E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ,MAAM;;AAGpD,iBAAO,CAAC,WAAW,IAAI,QAAQ,MAAM,MAAM,WAAW;;;MAG9D;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,CAAC,WAAW,IAAI,WAAW;;MAEtC;AACI,cAAM,IAAI;;;yCAG0B;MACxC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAS,QAAO;;;;AAKnC,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAQ,QAAO;;;;AAKlC,uBAAmB,YAAY;AAC/B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa;AACb,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW;;AAGlC,iBAAO,CAAC,WAAW,IAAI,WAAW;;;MAG1C;AACI,cAAM,IAAI;;MAEd;AACI,wBAAe,CAAE,YAAY,KAAK;AAClC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;yCAG6B;MACxC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAS,QAAO,CAAC,GAAG;;AAG3B,mBAAO,KAAS,QAAO,CAAC,GAAG;;;;;AAM3C,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAQ,QAAO,CAAC,GAAG;;AAG1B,mBAAO,KAAQ,QAAO,CAAC,GAAG;;;;;AAM1C,uBAAmB,YAAY;AAC/B,kBAA4B;ACpgB5B;;;;;;;;;0BA8B6B;MACzB;AAQI,cAAM;AACN,aAAK,QAAQ,KAAK;;MAEtB;AACI,aAAK,QAAQ;;UAGb;AAIA,YAAI,KAAK,SAAS;AACd,iBAAO,KAAK,MAAM;;AAGlB,iBAAO;;;UAGX;AAIA,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,YAAY;;;UAG3B;AACA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;UAGlB;AAEA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,KAAK,MAAM;;MAEtB;AACI,aAAK,MAAM,WAAW;;MAE1B;AACI,wBAAe;UACX,OAAS;YACL,WAAa,KAAK,MAAM;YACxB,QAAU,KAAK,MAAM;;;AAG7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,6BAA6B;;;aAIzC,yCAAwC;AAC3C,4BAAoB,QAAO;AAC3B,sBAAc,YAAY,aAAa;AACvC,eAAO,QAAO;AACd,0BAAkB,CAAE;AACpB,eAAO,OAAO,WAAW;AACzB,eAAO,IAAI,IAAI;;;kCAGc;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,gFACF,KAAK,UAAU;;AAEtC,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,YAAI,CAAC,KAAK,MAAM;AACZ,eAAK,MAAM,MAAM;AACjB,eAAK,MAAM,QAAQ;;AAEvB,cAAM,MAAM;;MAEhB;AACI,qBAAa,mBAAmB;AAChC,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,iCAAyB,KAAK,MAAM,mBAAmB;AACvD,0BAAkB,WAAW;AAC7B,eAAO,CAAC,iBAAiB,IAAI,WAAW,OAAO,iBAAiB,MAAM;;MAE1E;AACI,eAAO,KAAK;AAER,mBAAS,oBAAoB;AAI7B,wBAAa;AAKT,2BAAe,oBAAoB,KAAK,MAAM,KAAK,SAAQ;AAC3D,mBAAO,CAAC,QAAQ;;AAEpB,6BAAmB,IAAI,OAAM,QAAQ,IAAI,OAAyB,MAAiB,MAAsB,OAAoB;AAC7H,oBAAU,WAAW;AAGrB,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,gCAAwC,iCAAiC,0BAA0B;;AAEvG,6CAAyC;gCACN;MAC/B;AACI,cAAM;AASN,4BAAoB,KAAK,MAAM;AAC/B,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,eAAe,YAAY;AAChC,oBAAY,iBACR,YAAY,mBAAmB,OAAO,QAAQ;AAClD,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,gBAAgB,YAAY;AACjC,aAAK,aAAa,OAAO,aAAa,KAAK,aAAa;AACxD,aAAK,cAAc,OAAO,cAAc,KAAK,cAAc;AAC3D,aAAK,YAAY,KAAK,cAAc,SAChC,mCACA,KAAK;AACT,oCAA4B,KAAK;AACjC,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,kBAAkB,KAAK,MAAM;AAClC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,eAAe;;UAEpB;AACA,eAAO,KAAK;;UAEZ;AAIA,aAAK,aAAa;AAClB,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,YAAY;;AAElC,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,YAAY;;;MAGvC;AACI,eAAO,KAAK,aAAa,aAAa,OAAO,KAAK,cAAc;;MAEpE;AACI,2BAAmB,QAAQ;AAC3B,+BAAuB,KAAK,MAAM,aAAa;AAC/C,aAAK,aAAa,WAAW,QAAQ,MAAM,GAAG;AAC9C,aAAK,cAAc,WAAW,QAAQ,MAAM;;MAEhD;AACI,0BAAkB,KAAK,aAAa,mBAAmB;AACvD,YAAI,CAAE,OAAM,QAAQ,gBAAgB,MAAM,QAAQ,YAAY;AAC1D,wBAAc,CAAC;;AAEnB,sBAAc;AACd;AACA;AACA;AACA,YAAI,KAAK;AACL,uBAAa,YAAY,MAAM;AAC/B,wBAAc,YAAY;;AAG1B,wBAAc,YAAY;;AAE9B,sBAAc;AACd,YAAI,KAAK,cAAc;AACnB,sBAAY,YAAY,SAAS,MAAM;AACvC,yBAAe,CAAC;mBAEX,KAAK,aAAa;AACvB,yBAAe,CAAC,aAAa,YAAY;;AAGzC,yBAAe,CAAC;;AAEpB,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,mBAAO,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE7D,iBAAO,CAAC,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE9D,eAAO,iBAA+B;;MAE1C;AACI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AACzB,YAAI,MAAM,QAAQ;AACd,yBAAe,OAAO,MAAM;AAC5B,mBAAS,OAAO;;AAEpB,YAAK,iBAAgB,QAAQ,aAAa,WAAW,MACjD,aAAa;AACb,iBAAO,MAAM,MAAM,QAAQ;;AAE/B,iCAAyB;AACzB,gCAAwB;AACxB,YAAI,gBAAgB;AAChB,4BAAkB,aAAa;AAC/B,cAAI,YAAY,IAAI;AAChB,kBAAM,IAAI,WAAW;;AAIzB,iBAAO,kBAAkB;AACzB,2BAAiB,KAAK,GAAG;AACzB,6BAAmB,aACd,IAAI,WAAS,IAAI,UAAU,CAAE,OAAO,MAAM;AAC/C,eAAK,aAAa,YAAY,WAAW,MAAM,GAAG,YAAY;AAC9D,eAAK,cAAc,YAAY,WAAW,MAAM,YAAY;AAC5D,0BAAgB,KAAK,GAAG;;AAE5B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAGlC,iCAAyB,iBAAiB,cAAc;AACxD,8BAAqB;AACjB,cAAI,mBAAkB,mBAAmB;AACrC,kBAAM,IAAI,WAAW;;;AAI7B,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAU5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAGnC;AACI,eAAO,KAAK;AACR,+BAAqB,OAAO;AAC5B;AACA;AACA,cAAI,gBAAgB;AAChB,gBAAI,KAAK,aAAa,KAAK,QAAQ;AACnC,mBAAO,KAAK,cAAc,KAAK,QAAQ;;AAGvC,iCAAqB,aAAa,MAAM,GAAG,aAAa,SAAS;AACjE,kCAAsB,aAAa,MAAM,aAAa,SAAS;AAC/D,gBAAI,KAAK,aAAa,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;AACzE,mBAAO,KAAK,cAAc,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;;AAEjF;AACA,cAAI,KAAK;AACL,gBAAI,MAAM,QAAQ;AACd,uBAAS,EAAE,MAAM,GAAG,OAAO,KAAK,MAAM;;;AAI1C,gBAAI,EAAE;AACN,mBAAO,KAAK;;AAEhB,cAAI,KAAK;AACL,mBAAO,SAAY,MAAM;;AAE7B;AACA,cAAI,KAAK,cAAc;AACnB,qBAAS,YAAc,CAAC,GAAG;qBAEtB,KAAK,cAAc;AACxB,qBAAS,MAAQ,GAAG;qBAEf,KAAK,cAAc;AACxB,qBAAS,KAAQ,KAAI,MAAQ,GAAG;qBAE3B,KAAK,cAAc;AACxB,qBAAS,KAAQ,GAAG;qBAEf,KAAK,aAAa;AACvB,qBAAS,CAAC,GAAG;;AAGjB,cAAI,KAAK;AACL,gBAAI,KAAK,aAAa;AAClB,qBAAO,OAAO,OAAO;;AAEzB,mBAAO,CAAC,QAAQ,OAAO;;AAE3B,iBAAO;;;MAGf;AACI,aAAK,aAAa;AAClB,aAAK,cAAc;;MAEvB;AACI,kBAAU,KAAK,aAAa,MAAM;AAC9B,eAAK,aAAa,MAAM;;AAE5B,kBAAU,KAAK,cAAc,MAAM;AAC/B,eAAK,cAAc,MAAM;;AAE7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,MAAM,QAAQ;AACd,iBAAO,KAAK;;AAEhB;AACA,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAIjB,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAGrB,YAAI,KAAK;AACL,yBAAe,KAAK,aAAa;AACjC,4BAAkB,OAAO,IAAI,WAAS;AACtC,cAAI,MAAM,QAAQ;AACd,mBAAO,WAAW,OAAO,WAAW,OAAO;;AAG3C,mBAAO,CAAC,YAAY,OAAO,WAAW,OAAO;;;AAIjD,iBAAO;;;UAGX;AACA,eAAO,KAAK,aAAa,iBAAiB,OAAO,KAAK,cAAc;;UAEpE;AACA,eAAO,KAAK,aAAa,oBAAoB,OAAO,KAAK,cAAc;;MAG3E;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,6BAA6B;;AAEnD,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,6BAA6B;;;MAGxD;AACI,wBAAe;UACX,WAAa,KAAK;;AAGtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAGJ;AACH,yBAAiB,YAAY,QAAO;AACpC,eAAO,QAAO;AAEd,YAAI,QAAO,mBAAmB;AAC1B,gBAAM,IAAI,oBAAoB;;AAIlC,0BAAkB;AAClB,kBAAU,WAAW;AACrB,eAAO,IAAI,IAAI;;;AAIvB,kBAAc,YAAY;AAC1B,kBAA4B;ACle5B;;;;;;;;;AA+DO;AACH,aAAO,IAAI,WAAW;;AA2BnB;AACH,aAAO,IAAI,IAAI;;AAmBZ;AACH,aAAO,IAAI,KAAK;;AAsBb;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAM;;AAkBd;AACH,aAAO,IAAI,UAAQ;;AA0BhB;AACH,aAAO,IAAI,gBAAgB;;AAuBxB;AACH,aAAO,IAAI,OAAO;;AAoBf;AACH,aAAO,IAAI,SAAO;;AAqCf;AACH,aAAO,IAAI,gBAAgB;;AAoBxB;AACH,aAAO,IAAI,SAAO;;AA+Bf;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,WAAW;;AA0BnB;AACH,aAAO,IAAI,aAAa;;AAarB;AACH,aAAO,IAAI,gBAAgB;;AAkCxB;AACH,aAAO,IAAI,aAAW;;AAmCnB;AACH,aAAO,IAAI,MAAM;;AAYd;AACH,aAAO,IAAI,QAAQ;;AAkChB;AACH,aAAO,IAAI,iBAAiB;;AAqBzB;AACH,aAAO,IAAI,QAAQ;;AAgBhB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,UAAQ;;AA8BhB;AACH,aAAO,IAAI,QAAQ;;AAahB;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAI;;AAoBZ;AACH,aAAO,IAAI,QAAQ;;AAsBhB;AACH,aAAO,IAAI,YAAY;;AAoBpB;AACH,aAAO,IAAI,UAAQ;;AAoBhB;AACH,aAAO,IAAI,UAAQ;;AAqBhB;AACH,aAAO,IAAI,WAAS;;AAwBjB;AACH,aAAO,IAAI,IAAI;;AAwBZ;AACH,aAAO,IAAI,mBAAmB;;AAsB3B;AACH,aAAO,IAAI,mBAAmB;;AAyB3B;AACH,aAAO,IAAI,cAAc;;AActB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAyBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAuBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAWrB;AACH,aAAO,IAAI,uBAAuB;;AAgB/B;AACH,aAAO,IAAI,uBAAuB;;AAW/B;AACH,aAAO,IAAI,mBAAmB;;AAgB3B;AACH,aAAO,IAAI,mBAAmB;;AAW3B;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AA0BrB;AACH,aAAO,IAAI,IAAI;;AA+CZ;AACH,aAAO,IAAI,QAAQ;;AAyBhB;AACH,aAAO,IAAI,KAAK;;AA+Cb;AACH,aAAO,IAAI,SAAS;;AA0BjB;AACH,aAAO,IAAI,UAAU;;AA+ClB;AACH,aAAO,IAAI,cAAc;;AA6BtB;AACH,aAAO,IAAI,WAAW;;AAmCnB;AACH,aAAO,IAAI,eAAe;;AA8DvB;AACH,aAAO,IAAI,IAAI;;AASZ;AACH,aAAO,IAAI,gBAAgB;;AAIxB;AACH,aAAO,IAAI,cAAc;;AAgDtB;AACH,aAAO,IAAI,gBAAgB;;AAGxB,4BAAwB;AACxB,4BAAwB;AACxB,sBAAkB;AAClB,sBAAkB;AAyBlB;AACH,aAAO,IAAI,cAAc;;AA0BtB;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACpjDhB;AACH,aAAO,eAAuB,OAAO;;AAmBlC;AACH,aAAO,qBAA2B,OAAO;;AAqBtC;AACH,aAAO,0BAAkC,OAAO;;AAoB7C;AACH,aAAO,oBAA4B,OAAO;;AAavC;AACH,aAAO,0BAAgC,OAAO;;AAqC3C;AACH,aAAO,UAAkB,OAAO;;AAqC7B;AACH,aAAO,OAAe,OAAO;;AAuB1B;AACH,aAAO,gBAAuB,OAAO;;AAsBlC;AACH,aAAO,kBAAyB,OAAO;;AAoBpC;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAoB9C;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;;;;;;;;;;;;;;;;;;;AC9R1C;;;;;;;;;;;;;ACAA;;;;;;;;;AAoBO;AACH,aAAO,IAAI,KAAK;;AAWb;AACH,aAAO,GAAgB;;AAWpB;AACH,aAAO,GAAgB;;;;;;;;AC7C3B;;;;;;;;;2BAc8B;MAC1B;AACI,cAAM,GAAG;AAET,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,QAAQ;;;AAGrB;AACI,aAAO,UAAU;;AAErB;AACI,aAAO,UAAU;;gCAMc;MAC/B;AACI;AACA,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,WAAW,KAAK,IAAI,KAAK,YAAY;AAC1C,aAAK,WAAW,KAAK,YAAY;AACjC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,OAAO,KAAK,QAAQ;AACzB,aAAK,WAAW,KAAK;AACrB,YAAI,CAAC,QAAQ,OAAO,OAAO,QAAQ,KAAK,UAAU;AAC9C,kBAAQ,KAAK,uBAAuB,KAAK;AAEzC,eAAK,OAAO;;AAEhB,YAAI,KAAK,SAAS;AACd,eAAK,cAAc;mBAEd,KAAK,SAAS;AACnB,eAAK,cAAc;;AAInB,cAAI,KAAK,QAAQ,QAAQ,WAAW;AAChC,iBAAK,cAAc;;AAGnB,iBAAK,cAAc;;;AAG3B,YAAI,KAAK,gBAAgB;AACrB,eAAK,YAAY;;;YAGnB;AACF,aAAK,OAAO;AACZ,aAAK,eAAe;AACpB,YAAI,KAAK,YAAY;AACjB,eAAK,OAAO,KAAK;;AAGjB,eAAK,OAAO,KAAK,gBAAgB,SAAO,WAAW;;;YAGrD;AACF,cAAM,qBAAqB;AAC3B,wBAAgB,KAAK,gBAAgB;AACrC,YAAI,WAAW;AACX;;AAEJ,YAAI,KAAK,YAAY,UAAU,KAAK,UAAU,KAAK;AAC/C,eAAK,OAAO;AACZ,eAAK,OAAO;;AAIZ,eAAK;AACL,cAAI,KAAK,QAAQ,KAAK;AAClB,iBAAK,eAAe;AACpB,iBAAK,MAAM,eAAe;;;;YAKhC;AACF,YAAI,KAAK,eAAe,KAAK,KAAK;AAC9B,kBAAQ,IAAI,SAAS,KAAK;;;MAGlC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,6BAAqB,KAAK,KAAK;AAC/B,YAAI,gBAAgB;AAChB,kBAAQ,KAAK,4BAA4B,KAAK,oDAChB,OAAO,KAAK;;AAE9C,eAAO;;;AA8CR;AACH,aAAO,IAAI,cAAc;;AAEjB,sBAAa,CAAE;ACzK3B;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAkBO;AACN,IAAA;AACG,gBAAS,UAAS,gBAAgB,KAAK;AACvC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,KAAK;AACpC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,kBAAkB,KAAK;AACzC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,MAAM;AACrC,gBAAS,UAAS,cAAc,MAAM;AACtC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,iBAAiB,MAAM;AACzC,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,sBAAsB,OAAO;AAC/C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,qBAAqB,OAAO;OAC/C,YAAa,YAAW;AACpB;AACN,IAAA;AAEG;AACC,MAAA;AACG,iCAAwB,yBAAwB,YAAY,KAAK;AACjE,iCAAwB,yBAAwB,QAAQ,KAAK;AAC7D,iCAAwB,yBAAwB,QAAQ,KAAK;SAC9D,0BAA0B,UAAS,2BAA4B,WAAS,0BAA0B;OACtG,YAAa,YAAW;AC3D3B;;;;;;;;;;;;;;;;AAgBA,uBAAmB;AA0BZ;AACH,uBAAiB;QACb,UAAU;QACV,UAAU;QACV,QAAQ;QACR,OAAO;QACP,gBAAgB;;AAEpB,iBAAW,QAAQ;;AAShB;AACH,aAAO,WAAW;;AASf;AACH,aAAO,WAAW;;ACtEtB;;;;;;;;;;;;;;;;AAiBO;AACH,yBAAmB,KAAK,YAAY;AACpC,UAAI,cAAc,WAAW,oBAAoB;AAC7C,sBAAc,WAAW;AACzB,oBAAY,WAAW,kBAAkB,IACrC,SACC,WAAW,kBAAkB,SAAY,QAAQ,IAC9C,WAAW;AACnB,YAAI,WAAW,SAAS;AACpB,iBAAO,UAAU,KAAK,WAAW,WAAW,kBAAkB,WAAW,SAAS;;AAEtF,YAAI,WAAW,SAAS;AACpB,yBAAe,KAAK,WAAW,MAAM,OAAO;AAC5C,iBAAO,OAAO,IAAI,UAAQ,UAAU,MAAM,WAAW,SAAS;;AAElE,wBAAe,UAAU,KAAK,WAAW,MAAM,OAAO,IAAI,WAAW,SAAS;AAC9E,sBAAa,QAAO;AACpB,eAAO,WAAW,SAAS,WACvB,MAAK,KACL,eAAmB,QAAO,OAAO;;AAEzC,wBAAkB,KAAK,WAAW;AAClC,aAAO,aAAa,UAAU;;AAS3B;AACH,iCAA0B,cAAc;AACxC,UAAI,mBAAmB;AACnB,wBAAe,gBAAgB,yBAAyB;AACxD,YAAI,WAAU;AACV,iBAAO;;;AAGf,wBAAkB,QAAQ,kBAAkB,KAAK;AAC7C,eAAO,CAAC,CAAC,WAAW,yBAAyB,UAAU;;AAE3D,aAAO,cAAc,SACjB,WAAW,yBAAyB,UAAU,YAAY,UAC1D;;AAOD;AACH,aAAO,WAAW,yBAAyB,MAAM,QAAQ;;AAQtD;AACH,iCAA0B,cAAc;AACxC,aAAO;QACH,yBAAyB,UAAU,WAAW,QAAQ;QACtD;;;AAGR;AACI,aAAO,CAAC,CAAC,YAAY,GAAG,QAAQ,cAAc;;AAE3C;AACH,oBAAc,KAAK,MAAM;AACzB,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM;;AAElB,uBAAiB,MAAM;AACvB,aAAO,CAAC,UAAU,OAAO,MAAM,MAAM,SAAS;;AAE3C;AACH,kBAAY;AACZ,mBAAa,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,YAAI,KAAK,IAAI,MAAM,GAAG,IAAI;;AAE9B,aAAO;;AAEJ;AACH,iBAAU,cAAc,OAAO,MAAM,WAAW;AAChD,UAAI,SAAQ;AAER,eAAM,cAAc,oBAAoB,MAAM,WAAW;AACzD,gCAAwB,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG;AACrD,qBAAa,GAAG,IAAI,GAAG;AACnB,0BAAgB,GAAG,KAAK,KAAI,IAAI;AAChC,0BAAgB,GAAG,KAAK,KAAI,IAAI,IAAI;;AAExC,eAAO;;AAEX,aAAO;;AAWJ;AACH,aAAO,QAAO,OAAO,UAAS,MAAM;;AC9HxC;;;;;;;;;;;;;;;;AAgBO,iBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;;;;;;AClL/E;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;UAC9D,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC5dzB,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;MAErD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;UAC3D,CAAE,QAAU,oBAAoB,MAAQ,kBAAkB,MAAQ;UAClE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,qBAAqB,MAAQ,QAAQ,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAAW;YACrD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;;MAGhD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;;;MAGnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;;;;;;ACvUzE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UAAc;YAC7D,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAChD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,iBAAiB,MAAQ;UAAU;YACpE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,SAAS,MAAQ,aAAa,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;;;;;;;;ACnV1D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;;QAEzC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ,UAAU,cAAgB;UACnE,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ,UAAU,cAAgB;;QAExE,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;QAEpE,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;MAE3D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAAY;YACpD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;;QAEhD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;UAC1C,CAAE,QAAU,gBAAgB,MAAQ,gBAAgB,MAAQ;;;;;;;;AC1KxE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;;MAGxD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;;;MAIpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;UAChD,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;;QAE/C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;;;;;;ACtClD;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD,CAAE,UAAY,SAAS,UAAY;MAAW;QAC1C,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAAY;YAC1D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B,CAAE,UAAY,QAAQ,UAAY,SAAS,QAAU;MAAM;QACvD,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;UAC1C,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;;;;;;;;AC1G/C,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAAY;YACxD,QAAU;YACV,MAAQ;YACR,MAAQ;;;;;;;;;ACtDxB;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC3IhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UAAY;YAC3D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;ACrIhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UACtE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;AChJhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;UACtD,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;;;;;;;;ACzG9D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,IAAI,MAAQ,WAAW,MAAQ;UACpD,CAAE,OAAS,IAAI,MAAQ,QAAQ,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;UACnD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,QAAQ,cAAgB;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;UACrC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;QAEvD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAG9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAChE,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,mBAAmB,MAAQ;UACjD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;;MAGjD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;ACvNhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/ChC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;;MAGrD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;MAE1D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UAAc;YACnD,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,kBAAoB;UACpB,MAAQ;UACR,MAAQ;;;MAGpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;;MAGlD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;;;;;;;ACtIjB;;;;;;;;;;;;;;;;;iBAuCe;AACP,eAAO,KAAK,aAAc,MAAK,YAAY,IAAI;;MAGnD;AACI,oBAAY;UACR;UAAY;UAAW;UAAS;UAAa;UAAU;UACvD;UAAY;UAAS;UAAO;UAAO;UAAU;UAAe;UAC5D;UAAW;UAAU;UAAgB;;AAEzC,4BAAoB,GAAG,OAAO,GAAG,IAAI,IAAI,SAAM,IAAG;AAClD,aAAK,YAAY,YAAY,OAAO;AAChC,cAAI,OAAO,YAAY;AACvB,iBAAO;WACR;;MAIP,mCAAkC;AAC9B,wBAAgB,OAAM;AACtB,6BAAqB;AACrB,wBAAgB;AAChB,0BAAkB;AAClB,sBAAc,QAAQ,OAAO;AACzB,cAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,cAAI,KAAK,GAAG,WAAW;AACnB,yBAAa,KAAK,IAAI,KAAK;qBAEtB,KAAK,OAAO;AACjB,oBAAQ,KAAK,IAAI,KAAK;qBAEjB,KAAK,SAAS,QAAQ,KAAK,MAAM,WAAW;AACjD,sBAAU,KAAK,IAAI,KAAK;;AAE5B,iBAAO;WACR;AACH,qBAAa;AACb,wBAAgB;AAChB,iCAAyB;AACzB,kCAA0B;AAC1B,YAAI,aAAa;AACb,+BAAqB,KAAK,oBAAoB,UAAU;AACxD,gCAAsB,KAAK,oBAAoB,UAAU;;AAE7D,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAKtC,YAAI,OAAO,KAAK,qBAAqB,WAAW;AAC5C,mBAAS,QAAQ;AACb,yBAAa,MAAM;AACnB,gBAAI,KAAK,SAAS,WAAW;AACzB,sBAAQ,KAAK;;;;AAKrB,iBAAO,KAAK,qBAAqB,QAAQ;AACrC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI,QAAQ;AACR,mBAAK,eAAe,oBAAoB;AACxC,sBAAQ,KAAK;;;;AAIzB,YAAI,OAAO,KAAK,oBAAoB,SAAS;AACzC,iBAAO,KAAK,oBAAoB,QAAQ;AACpC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI;AACA,mBAAK,eAAe,mBAAmB;AACvC,qBAAO,KAAK;;;;AAKpB,mBAAS;;AAEb,wBAAgB;AAChB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,YAAY;AACnD,sBAAY,OAAM,QAAQ,SAAS,OAAO;AACtC,uBAAU,MAAK,UAAU,QAAQ,KAAK,YAAY;AAClD,mBAAO;aACR;;AAEP,uBAAe,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc,WAAW;AAC3E,YAAI,UAAU,SAAS;AACnB,iBAAO,YAAY;;AAEvB,eAAO;;MAEX;AACI,eAAO,OAAO,KAAK,WAAW,IACzB,OAAO;AACR,eAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;WACR;;MAEP;AAGI,uBAAe,gBAAgB,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO;AACtE,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,wBAAgB;UACZ,MAAM,KAAK;UACX,IAAI,KAAK;UACT,UAAU,OAAO;UACjB,YAAa,MAAK,SACd,IAAI,IAAI,YAAS,OAAM,WAAW,OAAO,OAAM,OAAO,KAAK;UAC/D,QAAQ;UACR,UAAU;UACV,aAAa;UACb,YAAY;UACZ,UAAU,KAAK;;AAEnB,YAAI,OAAO,UAAU;AACjB,kBAAQ,cACJ,OAAO,OAAO,OAAO;AACjB,gBAAI,MAAM,QAAQ;cACd,MAAM,MAAM;cACZ,iBAAiB,MAAM;cACvB,eAAe,MAAM;;AAEzB,mBAAO;aACR;;AAEX,YAAI,OAAO,SAAS;AAChB,kBAAQ,aACJ,OAAO,MAAM,OAAO;AAChB,yBAAa,MAAM;AACnB,wBAAY;AACZ,oBAAQ,MAAM;mBACL;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAQ,MAAM;AACtD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAS,MAAM,gBAAgB;AACvE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,qBAAqB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC5D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,qBAAqB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE1E;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;AACD,wBAAQ,kBAAkB,KAAK,MAAM,MAAM,QAAQ,MAAM;AACzD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,kBAAkB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEvE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,yBAAyB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAChE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,yBAAyB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE9E;mBACC;AACD,wBAAQ,cAAc,KAAK,MAAM,MAAM,QAAQ,MAAM;AACrD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,cAAc,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEnE;mBACC;AACD,wBAAQ,mBAAmB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC1D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,mBAAmB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAExE;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;mBACA;AACD;;AAEA,sBAAM,IAAI,MAAM,2BAA2B,MAAM,gBAAgB,KAAK;;AAE9E,gBAAI,MAAM,QAAQ,CAAE,OAAO;AAC3B,mBAAO;aACR;;AAEX,eAAO;;MAGX;AACI,wBAAgB,YAAY;AAC5B,6BAAqB;AACrB,wBAAgB;AAChB,oBAAY;AACZ,YAAI,WAAW;AACX,kBAAQ,QAAQ,OAAO;AACnB,gBAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,gBAAI,KAAK,OAAO;AACZ,sBAAQ,KAAK,IAAI,KAAK;;AAE1B,mBAAO;aACR;;AAEP,uBAAe;AACf,wBAAgB;AAChB,oBAAY,UAAU,SAAS,QAAQ;AACnC,6BAAoB,oBAAoB,IAAI;AAC5C,uBAAa;YACT,MAAM;YACN,IAAI;YACJ,QAAQ;YACR,YAAY;YACZ,UAAU;YACV,aAAa;YACb,YAAY,CAAE,OAAO,CAAE,OAAO,gBAAgB,IAAI,OAAO,MAAM;YAC/D,UAAU;;AAEd,eAAK,eAAe,IAAI;AACxB,iBAAO,KAAK;AACZ,gBAAM,YAAY;;AAEtB,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAGtC,8BAAsB,YAAY;AAClC,oBAAY,UAAU,UAAU,QAAQ;AACpC,qCAA0B,oBAAoB,cAAc,OAAO;AACnE,uBAAa,MAAM;AACnB,cAAI,QAAQ;AACR,iBAAK,gBAAgB;AACrB,oBAAQ,KAAK;;;AAGrB,0BAAkB,KAAK,mBAAmB;AAC1C,eAAO,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc;;MAE5D;AACI,eAAO;UACH,YAAY,YAAY,UAAU;UAClC,QAAQ,YAAY,UAAU,SAAS,OAAO;AAC1C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB;AACxC,mBAAO;aACR;UACH,SAAS,YAAY,UAAU,UAAU,OAAO;AAC5C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB,KAAK,YAAY;AACzD,mBAAO;aACR;;;MAGX;AACI,mBAAW,IAAI;AACf,YAAI,YAAW;AACX,iBAAO,SAAQ;;AAEnB,eAAO,CAAE,MAAM,OAAO,IAAI;;;AAG3B;AACH,sBAAe,OAAM;AACrB,UAAI,OAAO,QAAO,SAAS;AACvB,eAAO,QAAO,KAAK;iBAEd,OAAO,WAAW;AACvB,eAAO,IAAI,OAAO,MAAM,UAAU;;AAGlC,cAAM,IAAI,MAAM;;;AAIjB;AACH,oBAAc,MAAM,QAAQ,KAAK,OAAO,aAAa,MAAM,MAAM,KAAK,aAAa;AACnF,aAAO,WAAW,QAAQ,MAAM;;AAE7B,yDAAqD;AACxD,oBAAc,MAAM;AACpB,UAAI,SAAS;AACT,eAAO,iBAAiB,MAAM,GAAG;;AAErC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,aAAO,QAAQ,MAAM,IAAI;;AAEtB;AACH,oBAAc,MAAM,SAAS;AAC7B,oBAAc,MAAM,QAAQ,OAAO,MAAM,OAAQ,MAAM,QAAQ,OAAO,MAAM,OAAO;AACnF,aAAQ,OAAO,UAAU,WAAY,QAAQ,SAAS,OAAO;;AAE1D;AACH,UAAI,OAAQ,UAAW;AAEnB,gBAAQ,SAAoB;;AAEhC,cAAQ;aACC,SAAoB;AACrB,iBAAO;aACN,SAAoB;aACpB,SAAoB;aACpB,SAAoB;aACpB,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;;AAIP,iBAAO;;;AAGZ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,gBAAgB,MAAM;;AAEjC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,KAAK,IAAI,OAAK,gBAAgB;;AAEpD,aAAO;;AAEJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,UAAI,MAAM,OAAO;AACb,eAAO,MAAM,IAAI,IAAI,SAAQ,OAAO,IAAI,SAAS,WAAY,IAAI,OAAO,SAAS,IAAI,MAAM;;AAE/F,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,sBAAsB,MAAM;;AAEvC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI;AACA,eAAS,QAAM,KAAK,KAAK,MAAM,KAAK,EAAE,SAAS,MAAM,KAAK,IACtD,MAAM,KAAK,MACX,IACC,IAAI,OAAM,OAAO,MAAM,WAAY,IAAI,SAAS,GAAG;;AAE5D,aAAO;;AAEJ,8DAA0D;AAC7D,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,EAAE,IAAI;AACpB,iBAAO,iBAAiB,GAAG;;;AAGnC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,MAAM,IAAI;AACxB,iBAAO,sBAAsB;;;AAGrC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AC9cX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,OAAO;AACZ,aAAK,YAAY;AACjB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK,WAAW,IAAI,UAAQ,KAAK,SAAS;AACxD,YAAI,KAAK,YAAY;AACjB,eAAK,QAAQ,OAAO,KAAK,KAAK,UACzB,OAAO;AACR,kBAAM,OAAO,KAAK,QAAQ;AAC1B,mBAAO;aACR;;;MAOX;AACI,eAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;MAMhD;AACI,sBAAc,KAAK,KAAK,SAAS;AACjC,YAAI,MAAM,UAAU;AAChB,iBAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;AAEhD,YAAI,MAAM,KAAK,QAAQ,MAAM,KAAK;AAC9B,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,aAAa,KAAK,KAAK,UAAU,MAAM;;AAElD,YAAI,MAAM,SAAS;AACf,iBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,YAAI,MAAM,QAAQ;AACd,iBAAO,cAAc,KAAK,KAAK,UAAU,MAAM;;AAEnD,YAAI,MAAM,QAAQ;AACd,cAAI,MAAM,KAAK,KAAK,QAAQ,MAAM,KAAK,KAAK;AACxC,mBAAO,qBAAqB,KAAK,KAAK,UAAU,MAAM;;AAE1D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,cAAI,MAAM,KAAK,SAAS;AACpB,mBAAO,yBAAyB,KAAK,KAAK,UAAU,MAAM;;AAE9D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,kBAAkB,KAAK,KAAK,UAAU,MAAM;;AAEvD,cAAI,MAAM,KAAK,QAAQ;AACnB,mBAAO,mBAAmB,KAAK,KAAK,UAAU,MAAM;;;AAG5D,eAAO;;;ACrFf;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAmBO,sBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,KAAW,cAAc,WAAW,MAAM,WAAW;;aAE5D;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAGlH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,qBAAiB;AC/DxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACrG;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;aACzD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW;;aAExD;AACD,iBAAO,CAAC,OAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;aACA;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;aAC/K;AACD,iBAAO,CAAC,MAAY,UAAU,KAAK,WAAW,IAAI,WAAW;aAC5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aACvG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;AAE1G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrHxB;;;;;;;;;;;;;;;;AAsBO,sFAAkF;AACrF,cAAY,8BAA8B,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAE5G;AACH,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,MAAM,GAAG,OAAO,MAAM,GAAG,OAAO,GAAG;AAC7C,iBAAO;;;AAGf,aAAO;;AClCX;;;;;;;;;;;;;;;;;MAuBI;AACI,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,yBAAyB;AAC9B,aAAK,cAAc;AACnB,aAAK,iBAAiB;AACtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,WAAW,QAAO;AACvB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK;;MAKhB;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO,OAAO;AAC9C,oBAAO,OAAO;;;AAGtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,SAAS;;MAElB;AACI,eAAO,KAAK,QAAQ;;MAMxB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,UAAS,KAAK;AAC3B,gBAAM,IAAI,MAAM,4BAA4B,8BAA6B,KAAK;;AAElF,gCAAwB,KAAK,QAAQ;AACrC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;;AAGtE,YAAI,KAAK;AACL,0BAAgB,UAAU;;AAE9B,wBAAgB,OAAO;AACvB,eAAO,gBAAgB;;MAK3B;AACI,eAAO,QAAQ,IAAI,YAAS,KAAK,KAAK;;MAO1C;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,CAAC,KAAK,eAAe,UAAS,KAAK;AAChD,gBAAM,IAAI,MAAM,2BAA2B,oDAAmD,KAAK;;AAEvG,kBAAU,KAAK,QAAQ,WAAU;AACjC,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;uCACvD,QAAO,mCAAmC,KAAK;;AAG9E,YAAI,KAAK,WAAW,KACf,MAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC3D,eAAK,eAAe,QAAO;;AAE/B,4CAAoC,KAAK,cAAc,QAAO,OAAO,eAAe,KAAK,8CAA8C;AACvI,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,UAAE,SAAS;AACX,aAAK;AACL,UAAE,UAAU;AACZ,aAAK,QAAQ,UAAS;;MAK1B;AACI,YAAI,QAAQ,WAAW,QAAQ;AAC3B,gBAAM,IAAI,MAAM,eAAe,KAAK,kEACL,QAAQ,2CAA2C,QAAQ;;AAE9F,gBAAQ,QAAQ,eAAc,KAAK,MAAM,GAAG,QAAQ;;MAUxD;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,CAAC;AACD,oBAAU;AACV,uBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAQ,KAAK;;;AAIjB,oBAAU,QAAQ,MAAM,GAAG,KAAK;;AAEpC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAItC,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO;AACzE,eAAO,MAAM,SAAS;;MAK1B;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,kBAAQ,KAAK;;AAGjB,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO,mDAAmD,KAAK,wCAAwC,QAAQ,GAAG;AACpL,eAAO,QAAO,SAAS;;MAQ3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,YAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,gBAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,yBAAiB,KAAK,IAAI,GAAG;AAC7B,YAAI,CAAC,KAAK,eAAe,YAAY,KAAK;AACtC,gBAAM,IAAI,MAAM,mCAAmC,iBAAiB,KAAK;;AAE7E,aAAK,UAAU,SAAS,QAAQ,SAAQ;;MAQ5C;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,0BAAkB;AAClB,kCAA0B,OAAO,IAAI;AACjC,yBAAe;AACf,iBAAO;;AAEX,YAAI,gBAAgB,QAAO,MAAM;AAC7B,gBAAM,IAAI,MAAM;;UAElB,uCAAuC,QAAO;;AAEhD,YAAI,CAAC,KAAK,eAAe,OAAO,WAAW,KAAK;AAC5C,gBAAM,IAAI,MAAM,2DAA2D,KAAK,eAAe,OAAO;;AAG1G,8BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,wBAAgB;AAChB,aAAK;AACD,oBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,6BAAgB,CAAC,GAAG,gBAAgB;AACpC,0BAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,oBAAQ,KAAK,SAAQ,OAAM,SAAQ,UAAS,QAAQ,KAAK;;AAE7D,iBAAO;;AAEX,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,kBAAQ,KAAK;;AAEjB,aAAK,UAAU,SAAS;;;AC9OhC;;;;;;;;;;;;;;;;;MAyCI,kEAAkE;AAC9D,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,YAAI,WAAW;AACX,kBAAQ,QAAQ;AACZ,gBAAI,iBAAiB,QAAO;AACxB,oBAAM,IAAI,MAAM,mCAAmC,mCAAmC,QAAO;;AAEjG,gDAAoC,cAAc,QAAO,OAAO;AAChE,iBAAK;;;AAGb,aAAK,WAAW,QAAO;AACvB,aAAK,iBAAiB;AACtB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;MAKzB;AACI,eAAO,IAAI,WAAW,CAAC,GAAG,KAAK,UAAU,KAAK,cAAc,KAAK;;MAKrE;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO;AACvC,oBAAO;;;AAGf,aAAK,QAAQ,SAAS;AACtB,aAAK,SAAS;;MAKlB;AACI,eAAO,KAAK,QAAQ;;MASxB,gDAAgD;AAC5C,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,gBAAgB,MAAM,KAAK,QAAQ,WAAW;AAC9C,gBAAM,IAAI,MAAM,kCAAkC,4CAA4C,KAAK,QAAQ;;AAE/G,4CAAoC,cAAc,KAAK,cAAc;AACrE,eAAO,KAAK;AACR,kCAAwB,KAAK,QAAQ,IAAI,aAAU,SAAQ,SAAQ;AACnE,iBAAO,MAAM,iBAAiB;;;MAQtC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;AAEpB,wBAAe,KAAK,QAAQ;AAC5B,4CAAoC,QAAO,OAAO,cAAc;AAChE,eAAO,SAAQ,SAAQ;;MAM3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,4CAAoC,QAAO,OAAO,KAAK,cAAc;AACrE,YAAI,KAAK,mBAAmB,KAAK;AAC7B,gBAAM,IAAI,MAAM;;AAEpB,aAAK;AACL,aAAK,QAAQ,KAAK;;MAMtB;AACI,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,0DAA0D;;AAE9E,YAAI,KAAK,mBAAmB,MAAM,OAAO,KAAK;AAC1C,gBAAM,IAAI,MAAM,+BAA+B,iCAAiC,KAAK;;AAEzF,aAAK,QAAQ,SAAS;;MAQ1B;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,eAAe,KAAK,eAAe,KAAK,QAAQ;AAChD,gBAAM,IAAI,MAAM,4BAA4B,+BAA+B,KAAK,QAAQ;;AAE5F,YAAI,KAAK,QAAQ,iBAAiB;AAC9B,gBAAM,IAAI,MAAM,oBAAoB;;AAExC,4CAAoC,KAAK,QAAQ,cAAc,OAAO,cAAc;AACpF,eAAO,KAAK,QAAQ;;MAOxB;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,YAAI,eAAe,KACf,KAAK,mBAAmB,MAAM,gBAAgB,KAAK;AACnD,gBAAM,IAAI,MAAM,yBAAyB,mCAAmC,KAAK;;AAErF,4CAAoC,KAAK,cAAc,QAAO,OAAO;AACrE,aAAK;AACL,aAAK,QAAQ,gBAAgB;;MASjC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,4CAAoC,KAAK,cAAc,cAAc;AAGrE,kBAAU,QAAQ,MAAM,GAAG,KAAK;AAChC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,QAAQ,IAAI,OAAK,SAAQ,KAAK,QAAQ,IAAI;AAC1D,iBAAO,MAAM,SAAS;;;MAQ9B;AACI,YAAI,CAAC,CAAC,gBAAgB,iBAAiB,KAAK;AACxC,gBAAM,IAAI,MAAM,uBAAuB,KAAK,2CAA2C;;AAE3F,4CAAoC,KAAK,cAAc,cAAc;AACrE,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,KAAK,QAAQ,IAAI,OAAK,SAAQ,GAAG;AACjD,iBAAO,QAAO,SAAS;;;;AAS5B;AACH,oBAAc,QAAO;AACrB,UAAI,QAAO,MAAM,SAAS;AACtB,cAAM,IAAI,MAAM,oDAAoD,QAAO;;AAE/E,UAAI,QAAO,UAAU;AACjB,cAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B;;AAE1F,0BAAoB,QAAO,MAAM,MAAM;AACvC,0CAAoC,aAAa,cAAc;AAC/D,yBAAmB,QAAQ;AAC3B,aAAO,IAAI,WAAW,YAAY,cAAc;;AAQ7C;AACH,aAAO,IAAI,WAAW,IAAI,cAAc,cAAc;;AASnD;AACH,UAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,cAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,uBAAiB,KAAK,IAAI,GAAG;AAC7B,UAAI,eAAe,QAAQ,gBAAgB,MAAM,YAAY;AACzD,cAAM,IAAI,MAAM,mCAAmC,iBAAiB;;AAExE,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO;AAC5D,sBAAgB,QAAQ,SAAQ;AAChC,cAAQ,QAAQ;AACZ,aAAK,QAAQ,OAAO,QAAQ;;AAEhC,aAAO;;AASJ;AACH,wBAAkB;AAClB,gCAA0B,OAAO,IAAI;AACjC,uBAAe;AACf,eAAO;;AAEX,UAAI,gBAAgB,QAAO,MAAM;AAC7B,cAAM,IAAI,MAAM;;UAEd,uCAAuC,QAAO;;AAEpD,4BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,sBAAgB,KAAK;AACjB,yBAAgB;AAChB,kBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,0BAAgB,CAAC,GAAG,gBAAgB;AACpC,wBAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,mBAAQ,KAAK,SAAQ,OAAM,SAAQ,SAAS,QAAQ;;AAExD,gBAAO;AACP,eAAO;;AAEX,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO,OAAO;AACnE,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,aAAK,QAAQ,GAAG,QAAQ;;AAE5B,aAAO;;ACvTX;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,MAAM,KAAK;AAC7B,cAAI,UAAU;AACV,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;AAGhG,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;;aAGnG;aACA;AACD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AAEpD,6BAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;AACnH,yBAAe,KAAK,IAAI,aAAU,QAAO;AACzC,0BAAgB,MAAM,WAAW,GAAG;AAEpC,qBAAW,QAAQ;AACf,gBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ;AAC9C,sBAAO;;;AAGf,uBAAa;AACb,iBAAO,UAAU;AAEb,+BAAmB;AAEnB,qBAAS,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AAC1G,8BAAkB,OAAO,IAAI,aAAU,QAAO;AAG9C,uBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;AAIf,gCAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AACrH,wBAAY,MAAM,YAAW,GAAG;AAEhC,wBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;;AAInB,iBAAO;;aAEN;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAW,cAAc,QAAQ,MAAM,WAAW;AAClD,cAAI,CAAC,MAAK;AACN,oBAAO,YAAY;;AAGvB,iBAAQ,OAAM,KAAK,QAAQ,KAAK,CAAC,QAAW,SAAQ,CAAC,OAAM;;aAE1D;AACD,4BAAkB,KAAK,WAAW,KAAK,UAAQ,UAAU,MAAM,WAAW,aAAa;AACvF,cAAI;AACA,0BAAa,UAAU,WAAW,WAAW;AAC7C,mBAAO,CAAC,YAAY;;AAExB,iBAAO;;aAEN;AACD,0BAAgB,cAAc,aAAa,MAAM,WAAW;AAC5D,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ,WAAW;AACnB,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,yCAA+B,cAAc,0BAA0B,MAAM,WAAW;AACxF,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,8BAAoB,IAAI,YAAY,MAAM,OAAO,MAAM,cAAc,wBAAwB,aAAa;AAC1G,kBAAQ,eAAe;AACvB,iBAAO,CAAC,YAAY,UAAU,QAAO;;aAEpC;AACD,qBAAW,cAAc,iBAAiB,MAAM,WAAW;AAC3D,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,mCAAyB,QAAQ,eAAe,GAAG;AACnD,2BAAiB,MAAM,QAAO;AAC9B,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,gBAAgB,KAAK;;aAE5B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,iBAAO,CAAC,kBAAkB,OAAO,eAAe;;aAE/C;AACD,4BAAkB,cAAc,iBAAiB,MAAM,WAAW;AAClE,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,qCAA2B,QAAQ,eAAe,UAAU;AAC5D,6BAAmB,QAAQ,gBAAgB;AAC3C,iBAAO,CAAC,mBAAmB;;aAE1B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,iBAAO,CAAC,kBAAkB,OAAO;;aAEhC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB,MAAM,SAAS;AAChC,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,QAAO,gBAAgB,QAAQ;;aAEtC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB;AACjB,iBAAO,CAAC,iBAAiB;;aAExB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,QAAQ,QAAO;AAC1B,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,WAAW,cAAc;;aAEnD;aACA;AACD,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,eAAe,gBAAgB,cAAc;AACxE,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,cAAc;AACvD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,OAAO,eAAe,cAAc;;aAEtD;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,MAAM,cAAc,cAAc;;aAEpD;AACD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,WAAW,SAAQ,cAAc;AACpD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,WAAW,OAAO,aAAa;;aAEtC;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,SAAS;AACpB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,cAAc;;aAExC;AACD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,6BAAmB,QAAM,aAAa,SAAS;AAC/C,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;;AAGnB,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACjRxB;;;;;;;;;;;;;;;;AAmBA;AACI,wCAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,wBAAkB,YAAY;AAC9B,sBAAgB,mBAAmB;AACnC,0BAAoB,YAAY;AAChC,sBAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,UAAI;AACA,YAAI,WAAW,YAAY;AACvB,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,WAAW,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAIxB,UAAI;AACA,cAAM,IAAI,MAAM;;AAEpB,qBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,mBAAY,WAAW,MAAM,WAAW;AACxC,yBAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,wBAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,aAAO;QACH;QACA,KAAA;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,QAAQ,MAAK,YAAY;;aAE9I;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEvL;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,kBAA4B;YAC5B,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;aACA;AACD,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,iBAAO,CAAC,gBAAsB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE7J;aACA;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,iBAAO,CAAC,iBAAsB,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEpM;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU,IAAI,UAAU;;aAEhN;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,sCAA4B,cAAc,uBAAuB,MAAM,WAAW;AAClF,iBAAQ,QAAQ,WAAY,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK;AAC/J,iBAAO,CAAC,QAAQ;;aAEf;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAE9D,+BAAqB,QAAQ;AAC7B,8BAAoB,QAAQ;AAE5B,iCAAuB,UAAU;AACjC,gCAAsB,UAAU;AAChC,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,cAAc,cAAc,MAAK,CAAC,gBAAgB,gBAAgB;;;AAG7L,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5KxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,MAAW,OAAO,OAAO;;aAEhC;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAe,OAAO,MAAM;;aAEnC;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAkB,QAAQ,YAAY;;aAE7C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,QAAa,SAAS,OAAO,SAAS;;aAE7C;AACD,iBAAO,CAAC,OAAW,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,cAEJ,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAEnM;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAY,OAAO,MAAM,OAAM,cAAc,SAAS,MAAM,WAAW;;aAE9E;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,gBAAsB,OAAO,OAAM,QAAQ,cAAc,SAAS,MAAM,WAAW,UAAU;;aAEpG;AACD,iBAAO,CAAC,OAAY,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE7G;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW;;;AAG5D,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChFxB;;;;;;;;;;;;;;;;AAmBA;AACI,oBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,qBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,4BAAsB,cAAc,iBAAiB,MAAM,WAAW;AACtE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,gBAAgB,gBAAiB,UAAU,MAAM,WAAW;AAChH,yBAAe,MAAM,OAAY,gCAAgC,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC7H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,yBAAe,MAAM,OAAY,6BAA6B,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC1H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;aACA;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,iBAAO,CAAC,MAAM,OAAY,uBAAuB,OAAO,QAAQ,eAAe,cAAc;;aAE5F;AACD,4BAAkB,MAAW,cAAc,aAAa,MAAM,WAAW,UAAU;AACnF,yBAAe,CAAC,MAAM,WAAiB;AACvC,oBAAU;AACV,iBAAO;;aAEN;AACD,iBAAO,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG9G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AClExB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,yBAAe,KAAW,GAAG,GAAG;AAChC,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,OAAa;AAC5B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,OAAa,GAAG;AAC/B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;;AAG9B,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC3CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,UAAU,KAAK;;aAErB;AACD,sBAAY,cAAc,WAAW,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW,YAAY;aACnD;AACD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW;aACvC;aACA;aACA;AACD,wBAAa,cAAc,KAAK,MAAM,WAAW;AACjD,iBAAO,CAAC,YAAY;;aAEnB;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,YAAY;aAC3B;AACD,2BAAiB,cAAc,KAAK,MAAM,WAAW;AACrD,iBAAO,CAAC,YAAY;aACnB;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW,SAAS,OAAO;aAC1E;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,UAAe,EAAE;aAChC;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa;aACpB;AACD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kBAAQ,KAAK;AAEb,kBAAQ,IAAI;AACZ,uBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAQ,IAAI,MAAM,UAAU,MAAM,KAAK,MAAK,GAAG,YAC1C,MAAM,GAAG;;AAElB,iBAAO,CAAC;;AAER,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrExB;;;;;;;;;;;;;;;;;MA2BI;AACI,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,QAAO;AAErB,aAAK,YAAY,IAAI;AACrB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,OAAO;;MAKvB;AACI,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,aAAK,OAAO;;MAKhB;AACI,eAAO,KAAK,UAAU;;YAOpB;AACF,aAAK,uBAAuB,MAAM;AAGlC,sBAAc,MAAM,KAAK;AAEzB,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,eAAO,KAAK;AACR,0BAAgB,QAAQ;AACxB,6BAAmB,MAAM;AACzB,+BAAqB,QAAQ;AAC7B,kBAAY,eAAe,cAAc,MAAM,kDACxC,uCAAuC;AAE9C,uBAAa,GAAG,IAAI,YAAY;AAC5B,wBAAY,MAAM;AAClB,0BAAc,QAAQ;AACtB,iBAAK;AACL,iBAAK,UAAU,IAAI,KAAK;;AAE5B,iBAAO,KAAK;;;YAkBd;AACF,aAAK,uBAAuB,MAAM;AAClC,sBAAc,MAAM,KAAK;AACzB,eAAO,KAAK;AACR,yBAAe;AACf,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,MAAM;AAClB,0BAAc,KAAK,gBAAgB,KAAK;AACxC,mBAAO,KAAK;;AAEhB,iBAAO,MAAM;;;MAIrB;AACI,uBAAe,KAAK,UAAU,IAAI;AAClC,eAAO,UAAU,OAAO,SAAS;;MAErC;AACI,YAAI,IAAI,UAAU,KAAK;AACnB,gBAAM,IAAI,MAAM,oBAAoB,KAAK,qBAClC,IAAI;;AAEf,YAAI,MAAM,UAAU,KAAK;AACrB,gBAAM,IAAI,MAAM,sBAAsB,KAAK,uBACpC,MAAM;;;;ACzHzB;;;;;;;;;;;;;;;;AAkBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,6BAAkB,IAAI,UAAU,UAAU;AAC1C,0BAAgB,aAAa,KAAK,MAAM;AACxC,iBAAO,CAAC,WAAU;;aAEjB;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,OAAO,MAAM;;aAEpC;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,KAAK,MAAM;;;AAGnC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,eAAe,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAE9D;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,sBAAsB,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAErE;AACD,2BAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,iBAAO,CAAC,OAAY,cAAc,UAAO,OAAO,QAAQ,UAAU,QAAQ;;;AAG1E,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC9CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAErG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,aAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEpG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE1G;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;aACA;AACD,iBAAO,CAAC,MAAY,cAAc,aAAa,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG5J,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACxDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW;aACpN;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC5G;AACD,4CAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,4BAAkB,YAAY;AAC9B,0BAAgB,mBAAmB;AACnC,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,cAAI;AACA,gBAAI,WAAW,YAAY;AACvB,oBAAM,IAAI,MAAM;;AAGpB,gBAAI,CAAC,WAAW,YAAY;AACxB,oBAAM,IAAI,MAAM;;;AAGxB,sCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,MAAM;YACN,YAAY;YACZ,wBAAwB;;;AAGhC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACvDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,2BAAiC,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEnR;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;;aAEzD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,cAAoB,cAAc,iBAAiB,MAAM,WAAW,UAAU,cAAc,eAAe,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;;;AAGtP,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,2BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM,WAAW;;;AAGpF,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC1ExB;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,WAAW,MAAM,WAAW;AACvD,mBAAS,OAAO,MAAM,GAAG;AACzB,iBAAO,CAAC,QAAa,QAAQ;;aAE5B;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,OAAa,QAAO,MAAW,SAAS,UAAU;;aAEzD;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAc,QAAO;;aAE5B;AAED,wBAAc,cAAc,SAAS,MAAM,WAAW;AAEtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,OAAO;;aAExE;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,CAAC,cAAmB,SAAQ,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;aAEtG;AACD,iBAAO,KAAK;AACR,yBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAgB,cAAc,WAAW,MAAM,WAAW;AAG1D,0BAAc,QAAQ,GAAG;AACzB,kCAAsB,QAAc,QAAQ,IAAI;AAChD,2BAAe,QAAQ,IAAI;AACvB,gCAAkB,aAAiB,QAAO,OAAO;AACjD,kBAAI,CAAC,aACD,CAAC,aAAiB,QAAc,SAAQ,OAAO;AAC/C,sBAAM,IAAI,MAAM;;AAEpB,qBAAO,YAAY,UAAS,SAAc,SAAQ;;AAEtD,mBAAO,CAAC,MAAY,QAAQ;;;aAG/B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,iBAAO,QAAc,SAAQ;;aAE5B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEhE;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,kCAAwB,cAAc,mBAAmB,MAAM,WAAW;AAC1E,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,OAAY,SAAQ,iBAAiB;;aAE3C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAgB,SAAS,QAAQ;;aAExC;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,SAAe,GAAG;;aAEzB;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,cAAoB,SAAS,cAAc,OAAO,aAAa,UAAU,aAAa,QACtF,eACA,MAAW,cAAc,aAAa;;;AAG9C,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACzHxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;;aAErD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;;AAGxD,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrCxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAExG;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEnE;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE3G;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEhK;aACA;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,iBAAiB,MAAM,WAAW;;aAEnK;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAAS;AACzE,iBAAO,CAAC,cAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,WAAW;;aAEnF;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;;AAGhH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChExB;;;;;;;;;;;;;;;;AA2CO;AACH,oBAAe;AACX,gBAAQ,MAAK;eACJ;AACD,mBAAO,KAAS,MAAM,UAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAsB,OAAM,YAAW;eAC5D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAkB,OAAM,YAAW;eACxD;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAwB,OAAM,YAAW;eAC9D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAyB,OAAM,YAAW;eAC/D;AACD,mBAAO,YAAoB,OAAM,YAAW,UAAS;eACpD;AACD,6BAAiB,gBAAgB,MAAK;AACtC,gBAAI,YAAY,SAAS;AACrB,qBAAO,SAAS,eAAe,IAAI,cAAc,OAAM,YAAW;;AAGlE,oBAAM,UAAU,aAAa,MAAK;;;AAGtC,kBAAM,UAAU,eAAe,MAAK;;SAI7C,MAAM,WAAW;AACpB,UAAI,WAAmB;AACnB,eAAO,MAAM,KAAK,WAAU,GAAG,OAAO;;AAE1C,aAAO,GAAG,OAAO;;;MCvFjB,wBAAwB,qBAAqB,oBAAoB,kBAAkB;AAC/E,aAAK,YAAY;AACjB,aAAK,iBAAiB;AACtB,aAAK,gBAAgB;AACrB,aAAK,cAAc;AACnB,aAAK,cAAc,CAAE,IAAI,GAAG,WAAW,IAAI,aAAa;AACxD,aAAK,WAAW,CAAC,KAAK;AACtB,aAAK,SAAS;AACd,aAAK;;MAET;AACI,eAAO,CAAE,IAAI,WAAW,aAAa;;UAOrC;AACA,YAAI,KAAK,aAAa;AAClB,eAAK,WAAW;AAChB,eAAK;;;UAGT;AACA,eAAO,KAAK;;UAKZ;AACA,eAAO,KAAK,mBAAmB;;UAM/B;AACA,eAAO,KAAK;;MAEhB;AACI,sBAAc;AACd,qBAAa,GAAG,IAAI,KAAK,SAAS,SAAS,GAAG;AAC1C,4BAAiB,KAAK,SAAS,MAAM,GAAG,KAAK,SAAS,SAAS;AAC/D,gBAAM,KAAK,KAAK,qBAAqB;;AAEzC,cAAM,KAAK;AACX,aAAK,qBAAqB;;MAE9B;AACI,eAAO,YACH,UACK,IAAI,aAAY,QAAQ,OAAO,KAAK,QAAQ,gBAAgB,IAC7D,KACA,GAAG,QAAQ,aAAa,QAAQ,eAC/B,KAAK,OACV;;MAMR;AACI,YAAI,KAAK;AACL,eAAK;AACL,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,KAAK,KAAK,SAAS,KAAK,QAAQ;AAC9C,eAAK,mBAAmB,QAAQ,KAAK,qBAAqB,KAAK;;;MAOvE;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,OAAO;AACrB,eAAK,kBAAkB;;AAGvB,gBAAM,IAAI,MAAM;;;MAOxB;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK;AACL,0BAAgB,OAAO,OAAO,IAAI,KAAK,SAAS,KAAK,SAAS,SAAS;AACvE,kBAAQ,eAAe;AACvB,kBAAQ,KAAK,KAAK;AAClB,eAAK,SAAS,OAAO,IAAI,GAAG;AAC5B,eAAK,mBAAmB,OAAO,GAAG,GAAG,KAAK,qBAAqB,KAAK;;AAGpE,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,KAAK,UAAU;;MAE1B;AACI,aAAK,eAAe,YAAY,MAAM;;MAE1C;AACI,eAAO,KAAK,eAAe;;MAE/B;AACI,aAAK,cAAc,WAAW,MAAM;;MAExC;AACI,eAAO,KAAK,cAAc;;MAE9B;AACI,0BAAkB,KAAK;AACnB,eAAK,eAAe,KAAK,cAAc;;AAE3C,0BAAkB,KAAK;AACnB,eAAK,cAAc,KAAK,cAAc;;;;ACpIlD;;;;;;;;;;;;;;;;AAyBO;AACH,wBAAkB,IAAI;AACtB,4BAAsB;AACtB,wBAAkB;AAClB,uBAAiB;AAGjB,mBAAa,IAAI;AACjB,6BAAuB,OAAO,KAAK,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAC3E,0BAAoB;AACpB,UAAI,aAAa;AACb,wBAAgB,UAAU,IAAI,UAAQ,cAAc,KAAK,MAAM;;AAEnE,uBAAiB,CAAC,GAAG;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,YAAI,cAAc,SAAS,eAAe,SAAS,YAAY;AAC3D,cAAI,eAAe;AACf,0BAAc;AACd,yBAAa,YAAY,SAAS,IAAI,WAAS,MAAM,MAChD,OAAO,UAAQ,UAAU,IAAI;;;AAG1C,kBAAU,IAAI,KAAK;AAEnB,YAAI,UAAU,KAAK,SAAS;AACxB;;AAGJ,YAAI,eAAe,QAAQ,KAAK,UAAU;AACtC;;AAGJ,YAAI,cAAc,QAAQ,KAAK,UAAU;AACrC;;AAEJ,YAAI,KAAK,OAAO,WAAW;AACvB,wBAAc,KAAK,KAAK;AACxB;;AAEJ,aAAK,OAAO,QAAQ;AAEhB,cAAI,KAAK,IAAI,OAAM;AACf;;AAEJ,eAAK,IAAI,OAAM;AACf,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,QAAQ,SAAS,WAAW,eAAe,aAAa;;AAM9D;AACH,aAAQ,WAAW,UAAW;AAC9B,uBAAiB;AACjB,yBAAmB,OAAO,KAAK,QAC1B,IAAI,UAAQ,cAAc,MAAM,IAChC,IAAI,UAAQ,OAAM,MAAM;AAC7B,wBAAkB,OAAM;AACxB,iBAAW,QAAQ;AACf,YAAI,UAAU,IAAI,OAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAM,QAAQ,QAAQ;AAClB,YAAI,UAAU,IAAI,OAAO;AACrB,mBAAS,KAAK;;;AAGtB,UAAI,aAAa;AACb,kBAAU,QAAQ;AACd,cAAI,UAAU,IAAI,KAAK;AACnB,qBAAS,KAAK;;;;AAI1B,mBAAa,IAAI;AACjB,2BAAqB;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,aAAK,IAAI,KAAK;AACd,YAAI,CAAC,UAAU,KAAK;AAChB,uBAAa,KAAK;;AAEtB,aAAK,SAAS,QAAQ;AAClB,cAAI,CAAC,KAAK,IAAI,MAAM,SAAS,UAAU,IAAI,MAAM,SAC7C,MAAM,OAAO,MAAM,YAAS,KAAK,IAAI,OAAM;AAC3C,qBAAS,KAAK;;;;AAI1B,aAAO;;AAEX,6BAAyB;MACrB;MAAU;MAAS;MAAS;MAAQ;MAAiB;MACrD;MAAkB;MAAM;;AAE5B,8BAA0B;MACtB;MAAuB;MAAuB;MAAuB;;AAEzE,2BAAuB;MACnB;MAAa;MAAe;MAAqB;MACjD;MAAmB;;AAEhB;AACH,aAAO,iBAAiB,QAAQ,KAAK,OAAO;;AAEzC;AACH,aAAO,kBAAkB,QAAQ,KAAK,OAAO;;AAE1C;AACH,aAAO,eAAe,QAAQ,KAAK,OAAO;;AC3I9C;;;;;;;;;;;;;;;;;MA8BI;AACI,aAAK,QAAQ;AACb,aAAK,SAAS;AACd,aAAK,cAAc,IAAI;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,OAAM;AACtB,aAAK,UAAU,OAAM;AACrB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AAExB,YAAI,OAAM,aAAa;AACnB,iBAAO,KAAK,OAAM,WAAW,QAAQ;AACjC,iBAAK,qBAAqB,QACtB,IAAI,cAAc,OAAM,UAAU,OAAO;;;;UAIrD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,sBAC7B,KAAK;;UAET;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,0BAAkB,OAAO,KAAK,WAAW,IAAI,SAAO,UAAU,KAAK,IAAI,aAAU,QAAO;AACxF,aAAK,aAAa,GAAG,OAAO,GAAG;AAC/B,aAAK,aAAa;;UAMlB;AACA,aAAK,mBAAmB;;UAExB;AACA,eAAO,KAAK,QAAQ,IAAI;AACpB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,QAAQ,IAAI,UAAQ,KAAK,gBAAgB,KAAK;;UAE1D;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,uBAAa,KAAK,gBAAgB,KAAK;AACvC,iBAAO,KAAK,gBAAiB,GAAG,QAAQ,KAAK,kBAAmB;;;UAGpE;AACA,eAAO,OAAO,KAAK,KAAK,YAAY,OAAO;AACvC,cAAI,OAAO,KAAK,WAAW,KAAK;AAChC,iBAAO;WACR;;MAEP;AACI,6BAAqB,OAAO,IAAI,UAAQ,KAAK,MAAM;AACnD,8BAAsB,QAAQ,IAAI,UAAQ,KAAK,MAAM;AACrD,eAAO,aAAa,KAAK,KAAK,aAAa,OACvC,cAAc,KAAK,KAAK;;MAMhC;AACI,8BAAsB,qBAAqB,QAAQ,SAAS,KAAK,WAAW,KAAK;AACjF,eAAQ,eAAe,aAAa,cAAe;AACnD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,qCAAqC,YAAY,oCAC1C,YAAY,8GAEK;;AAE5C,YAAI,cAAc,SAAS;AACvB,2BAAiB,QAAQ,IAAI,OAAK,EAAE;AACpC,0BAAgB,OAAO,KAAK;AAC5B,gBAAM,IAAI,MAAM,+BAA+B,uCACvC,4CAA4C;;AAExD,eAAO,2BAA2B,KAAK,OAAO,KAAK,WAAW;;MAWlE;AACI,iBAAS,KAAK,UAAU;AACxB,sBAAc,OAAO,KAAK,QAAQ;AAClC,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,kBAAU,KAAK,WAAW;AAC1B,aAAK,aAAa;AAClB,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAChE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,+BAAuB,KAAK,kBAAkB,YAAY;AAE1D,2BAAmB,KAAK,YAAY,IAAI;AACxC,YAAI,gBAAgB;AAChB,yBAAe,KAAK,QAAQ,QAAQ;AACpC,eAAK,YAAY,IAAI,gBAAgB;;AAEzC,+BAAuB;AACvB,8BAAsB;AACtB,eAAO,KAAK;AACR,0BAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AACzF,6BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,iBAAO,KAAK,QAAQ,QAAQ;AACxB,uCAA0B,cAAc;AACxC,4BAAgB;AAChB,oBAAQ,UAAS,OAAO;AACxB,uBAAW,YAAY;;AAE3B,gCAAsB,KAAK,mBAAmB;AAC9C,kDAAwC;AACxC,uBAAa,GAAG,IAAI,aAAa,QAAQ;AACrC,yBAAa,aAAa;AAC1B,gBAAI,CAAC,WAAW,KAAK;AACjB,8BAAgB,YAAU,MAAM,YAAY,SAAS,KAAK;AAC1D,kBAAI,WAAe;AACf,sBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAGrD,yBAAW,KAAK,QAAQ;AACxB,mBAAK,uBAAuB,KAAK,MAAM,MAAM,YAAY,SAAS,eAAe,iBAAiB;;;AAI1G,cAAI,KAAK,UAAU;AACf,oBAAQ,QAAQ;;AAEpB,iBAAO,QAAQ,IAAI,UAAQ,UAAU,MAAM,YAAY;;;MAG/D;AACI,oBAAY,GAAG,OAAO,MAAM,IAAI,OAAO,KAAK,WACvC,IAAI,SAAO,UAAU,MACrB,IAAI,aAAW,QAAQ,IAAI,aAAU,QAAO;AACjD,eAAO,IAAI,IAAI;;MAEnB;AAGI,YAAI,KAAK,aAAa,aAAa,YAAY,QAAQ,cAAc;AACjE;;AAEJ,kBAAU,UAAU,QAAQ;AACxB,cAAI,WAAU;AACV,4CAAgC,QAAO,MAClC,iCAAgC,QAAO,OAAO,KAC3C,KAAK,SAAS;;;AAG9B,aAAK,OAAO,QAAQ;AAGhB,cAAI,OAAM,aAAa;AACnB,4BAAgB,6BAA6B,OAAM,MAAM,WAAW;AACpE,gBAAI,WAAW;AACX,sBAAQ,QAAQ;AACZ,oBAAI,WAAU,CAAC,cAAc,IAAI,QAAO;AACpC,iCAAc,gCAAgC,QAAO;AACrD,sBAAI,WAAU;AACV,4BAAO;AACP,2BAAO,gCAAgC,QAAO;6BAEzC,UAAS;AAGd,oDAAgC,QAAO;;;;;;;;YAiB7D;AACF,eAAO,KAAK,cAAc,QAAQ;;YAgBhC,qDAAqD,wBAAwB,oBAAoB;AACnG,YAAI,CAAC;AACD,mBAAS,KAAK,UAAU;AACxB,eAAK,YAAY;AACjB,eAAK,uBAAuB;AAC5B,oBAAU,KAAK,WAAW;AAC1B,eAAK,aAAa;;AAEtB,wBAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AAIzF,0BAAkB,MAAM,KAAK,uBAAuB,QAAQ,SAAS,SAAS;AAC9E,wBAAgB,QAAQ,IAAI,UAAQ,UAAU,MAAM,WAAW;AAE/D,0BAAkB,QAAQ,IAAI,OAAK,EAAE;AACrC,yBAAiB,OAAO,KAAK,QAAQ,IAAI,UAAQ,OAAO,MAAM;AAC9D,wBAAgB,IAAI,IAAI,CAAC,GAAG,WAAW,GAAG,UAAU,GAAG,KAAK;AAC5D,eAAO,KAAK,WAAW,QAAQ;AAC3B,8BAAoB,UAAU;AAC9B,sBAAY,QAAQ;AAChB,gBAAI,WAAU,CAAC,QAAO,cAAc,CAAC,QAAQ,IAAI,QAAO;AACpD,sBAAO;;;;AAKnB,YAAI,KAAK,UAAU;AACf,kBAAQ,QAAQ;;AAEpB,eAAO;;YAEL;AACF,6BAAqB,OAAO,OAAO;AAC/B,cAAI,KAAK,OAAO,QAAO,QAAQ;AAC/B,iBAAO;WACR;AACH,eAAO,KAAK,cAAc,cAAc,KAAK,aAAa,MAAM,gBAAgB;;YAa9E;AACF,sBAAc,OAAO,KAAK;AAC1B,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,YAAY,IAAI,UAAQ,cAAc,MAAM;AACpE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,eAAQ,WAAW,eAAe,aAAa,cAAe,qBAAqB,QAAQ,aAAa,KAAK,WAAW,KAAK;AAE7H,uBAAc;UACV,GAAG;UAAY,GAAG,KAAK,MAAM;UAAS,GAAI,KAAK,cAAc;UAC/D,IAAI;AACF,iBAAO,CAAE,MAAM,UAAU,QAAQ;;AAErC,2BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,eAAO,KAAK,QAAQ,QAAQ;AACxB,qCAA0B,cAAc;AACxC,0BAAgB;AAChB,kBAAQ,UAAS,OAAO;AACxB,qBAAW,YAAY;;AAE3B,gDAAwC;AACxC,8BAAsB,KAAK,mBAAmB;AAC9C,sBAAc;AACd,eAAO,OAAM,SAAS;AAClB,2BAAiB,KAAK,aAAa,YAAY,QAAO,SAAS,YAAY,OAAO,eAAe,iBAAiB,iCAAiC;AACnJ,gBAAM,QAAQ,IAAI;;AAEtB,YAAI,eAAe,QAAQ,CAAC;AACxB,kBAAQ,KAAK;;AAGjB,+BAAuB,YAClB,OAAO,UAAQ,CAAC,cAAc,SAC/B,CAAC,UAAU,KAAK,MAAM,YAAY,UACjC,IAAI,UAAQ,KAAK;AACtB,YAAI,eAAe,SAAS;AACxB,+BAAqB;AACrB,cAAI,eAAe;AACf,6BACI,wFAC+B;;AAEvC,gBAAM,IAAI,MAAM,+BAA+B,6CAChC,qDACP,mBAAmB;;AAE/B,eAAO;;MAEX;AACI,yBAAiB;AACjB,eAAO,OAAM,SAAS;AAClB,uBAAa,OAAM;AACnB,kBAAQ,iBAAiB,KAAK;AAC9B,yBAAe;AAIf,cAAI,KAAK,KAAK,OAAO,WACjB,cAAc,cAAc,KAAK,MAAM,WAAW;AAClD,aAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAIrD,cAAI,UAAU,KAAK,KAAK,SAAS;AAC7B,4BAAgB,YAAU,KAAK,MAAM,WAAW,SAAS,KAAK;AAC9D,gBAAI,CAAC;AACD,eAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAErD,mCAAuB,QAAQ;AAC/B,gBAAI,WAAe;AACf,uBAAS,KAAK,QAAQ,KAAK;AACvB,0BAAU,YAAY;AACtB,wBAAQ,iBAAiB;AACzB,qBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,qBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;AACpE,uBAAO;;;AAIX,wBAAU,YAAY;AACtB,mBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,mBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAIxE,iBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAG5E,eAAO;;MAEX;AACI,aAAK,SAAS,QAAQ;AAClB,6BAAoB,oBAAoB,UAAU,MAAM;AACxD,cAAI,MAAM,aAAa,CAAC,UAAU,IAAI,UAAU;AAC5C;;AAGJ,cAAI,UAAU,OAAO;AACjB,gBAAI,UAAU,WAAW,KAAK;AAC1B,qBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,oBAAM,YAAY;AAClB,qBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;qBAIxD,UAAU,WAAW,MAAM;AAC5B,mBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,kBAAM,YAAY;AAClB,mBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;;;MAOjE;AACI,eAAO,KAAK,KAAK,WACZ,QAAQ,SAAO,KAAK,UAAU,KAAK,QAAQ,aAAU,QAAO;;MAErE;AACI,eAAO,KAAK,QAAQ,QAAQ;AACxB,yBAAc,OAAO;AACrB,6BAAoB,cAAc;AAClC,uBAAa,KAAK,MAAM,MAAM;AAC9B,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,0BAAc,KAAK,WAAW,SAAS;AACvC,0BAAc,MAAM,WAAW,OAAM,MAAM,UACvC,OAAM,MAAM,MAAM,iBAAgB,MAAM,YAAW,MAAM,MAAM,YAAW;AAC9E,oBAAY,OAAO,MAAM,sBAAsB,KAAK,mDAChB,oBAC5B,OAAM;;AAElB,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,oBAAY,OAAM,UAAU,KAAK,WAAW,SAAS,OAAO,MAAM,sBAAsB,KAAK,kDAEtF,KAAK,WAAW,SAAS,kBAAkB,OAAM;;;;MAIpE;AACI,uBAAe;AACf,gCAAwB;AACpB,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,UAAU,QACrD,KAAK,WAAW,OAAO,cAAc;AACrC,4BAAe,KAAK,WAAW,OAAO;AACtC,mBAAO,QAAO,QAAQ,OAAO;;AAG7B,mBAAO,aAAa,OAAO;;;AAGnC,eAAO;;MAEX;AACI,2BAAmB,OAAO,KAAK,QAAQ,OAAO;AAC1C,6BAAmB,cAAc;AACjC,iBAAO,KAAK,MAAM,MAAM,aAAa;;AAEzC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,MAAM,uDACF;;;MAGtB;AACI,eAAO,QAAQ,IAAI;AACf,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,WAAW,QACtD,KAAK,WAAW,QAAQ,SAAS;AACjC,4BAAe,KAAK,WAAW,QAAQ;AACvC,mBAAO,QAAO;;AAElB,iBAAO;WACR;;MAEP;AACI,gBAAQ,QAAQ;AACZ,mCAAyB,cAAc;AACvC,cAAI,CAAC,KAAK,MAAM,MAAM;AAClB,kBAAM,IAAI,MAAM,eAAe;;;;;;MCpf3C,oCAAoC,mBAAmB;AACnD,aAAK,wBAAwB;AAC7B,aAAK,eAAe;;MAWxB;AACI,aAAK,sBAAsB,QAAQ,WAAU;AAC7C,aAAK,aAAa,WAAU,MAAM;;MAOtC;AACI,eAAO,KAAK,sBAAsB;;MAMtC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,0BAAkB,KAAK;AACnB,eAAK,aAAa,KAAK;AACvB,iBAAO,KAAK,aAAa;;AAE7B,2BAAmB,KAAK;AACpB,eAAK,sBAAsB,MAAM;AACjC,iBAAO,KAAK,sBAAsB;;;;AC9C9C;;;;;;;;;;;;;;;;AAoBO,+BAA2B;AAC3B,+BAA2B;;MAqB9B,oCAAoC;AAChC,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,UAAU;AACf,YAAI,eAAe;AACf,eAAK,cAAc;;AAEvB,aAAK,kBAAkB,IAAI;;UAG3B;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;MAEzB;AACI,qBAAa,KAAK;AAClB,YAAI,KAAK,QAAQ;AAEb,eAAK,UAAU;mBAEV,KAAK,YAAY,eAAe;AACrC,eAAK,UAAU,mBAAsB,MAAM,KAAK;;AAGhD,2BAAiB,gBAAmB,MAAM,KAAK;AAC/C,cAAI,SAAS,WAAW;AAGpB,qBAAS,KAAK,mBAAsB,MAAM,KAAK;qBAE1C,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC,CAAC;;AAEjB,eAAK,UAAU,SAAS;;;YAO1B;AACF,aAAK;AACL,YAAI,KAAK,QAAQ,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,0BAAkB,MAAM,KAAK,QAAQ;AACrC,eAAO,KAAK,SAAS;;MAQzB;AACI,aAAK,YAAY;AACjB,uBAAc,KAAK,UAAU;AAC7B,wBAAgB;AAChB,YAAI,KAAK,UAAU,uBAAuB;AACtC,sBACI,KAAK,UAAU,oBAAoB;;AAE3C,aAAK,UAAU,GAAG,OAAM,SAAS,YAAY,OAAM,SAAS;AAC5D,0BAAkB,cAAiB,KAAK,UAAU,YAAY,KAAK,UAAU;AAC7E,aAAK,WAAW,IAAI,cAAc,gBAAgB,SAAS,eAAe,QAAO;AACjF,aAAK,SAAS,YAAY,KAAK,6BAA6B;AAG5D,aAAK,SAAS,kBAAkB,KAAK;AACrC,YAAI,UAAU,oBAAoB;AAC9B,8BAAoB,gBAAgB,SAAS,eAAe,UAAU;AACtE,eAAK,cAAc,IAAI,cAAc;AACrC,eAAK,YAAY,YAAY,KAAK,SAAS;AAI3C,eAAK,YAAY,kBAAkB,KAAK;AACxC,eAAK,YAAY,aAAa,IAAI;;AAEtC,eAAO;;YA8CL;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,MAAM,0CAA0C;qBAErD,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,eAAO,aAAa,KAAK,KAAK;;MAwClC;AACI,eAAO,KAAK,QAAQ,QAAQ,KAAK;;MAErC;AACI,YAAI,CAAE,mBAAkB,YAAW,CAAC,MAAM,QAAQ;AAE9C,iBAAO;;AAEX,iBAAS,MAAM,QAAQ,UAAU,SAAS,CAAC;AAC3C,YAAI,OAAO,WAAW,KAAK,WAAW;AAClC,gBAAM,IAAI,MAAM,mDACW,KAAK,WAAW,wCACpB,OAAO;;AAElC,eAAO,KAAK,WAAW,OAAO;AAC1B,cAAI,aAAa,OAAO;AACxB,iBAAO;WACR;;MAEP;AACI,kBAAU,WAAW,KAAK;AAC1B,eAAO,CAAC,MAAM,QAAQ,WAAW,CAAC,WAAW;;MAkBjD;AACI,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,KAAK,SAAS,QAAQ,QAAQ;AAC7C,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;YAkBzC;AACF,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,MAAM,KAAK,SAAS,aAAa,QAAQ;AACxD,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;MAE/C;AACI,eAAO,OAAO,KAAK,KAAK,OAAO;AAC3B,iBAAO,OAAO,CAAC,IAAI;AACnB,iBAAO;WACR;;MAOP;AACI,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,YAAY;;AAErB,aAAK,gBAAgB;;;AAiCtB,uDAAkD;AACrD,UAAI,YAAY;AACZ,cAAM,IAAI,MAAM;;AAGpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ;AACR,YAAI,SAAS,QAAQ;AACjB,cAAI,CAAC,SAAS,SAAS;AACnB,uBAAW,WAAW;;AAE1B,qBAAW,GAAG,WAAW,qBAAqB;;;AAGtD,qBAAc,IAAI,WAAW,UAAU;AACvC,YAAM,OAAM;AACZ,aAAO;;ACrXX;AAEK,sBAAW;ACFhB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAmCO;AACH,aAAO,gBAAgB,QAAO;;AAQlC,mDAA8C,IAAI,qBAAqB,IAAI;AACvE,UAAI,UAAS;AACT,eAAO;;AAEX,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,UAAI,KAAK,IAAI;AACT,eAAO,KAAK,IAAI;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,aAAK,IAAI,QAAO,OAAO;AACvB,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,wBAAc,OAAM;AACpB,8BAAoB,gBAAgB,OAAO,OAAO,MAAM;AACxD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AA2B1D,qCAAiC;AACpC,aAAO,gBAAgB,QAAQ;;AAMnC,0DAAsD,IAAI;AAGtD,qBAAc,OAAO;AACrB,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,2BAAiB,OAAO,IAAI,OAAK,EAAE;AACnC,8BAAoB,gBAAgB,UAAU,OAAO;AACrD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AAI1D;AACH,UAAI,MAAM;AACN,eAAO;;AAGX,UAAI,aAAW,EAAE;AACb,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,GAAG,SAAS;;;AAyB7B;AACH,mBAAa,IAAI;AAEjB,sBAAgB,QAAO,OAAO;AAK9B,wBAAkB,MAAM,KAAK,KAAK;AAC9B,sBAAc,KAAK,IAAI;AACvB,YAAI,WAAkB;AAClB,8BAAoB,MAAM;AAC1B,eAAK,IAAI,KAAK;;;AAMtB,qBAAe,gBAAgB,QAAO,OAAO;AAC7C,aAAO;;AAQJ;AACH,aAAO,OAAO,QAAS,CAAC,YAAY,OAAO,QACtC,OAAM,QAAQ,QACV,OAAO,QAAQ,YAAY,CAAE,gBAAe;;AAWlD;AACH,aAAO,OAAO,QAAQ,YAAY,QAAQ,MAAM,QAAQ,QACnD,OAAO,QAAQ,YAAa,eAAe,WAC5C,cAAqB;;AAM7B;AACI,aAAQ,UAAU,QACb,OAAO,UAAU,YAAY,OAAO,UAAU;;AClOvD;;;;;;;;;;;;;;;;;AAmBO;AACH,aAAO,QAAQ,WAAW;;AAG9B;AACI,UAAI,gBAAgB;AAChB,eAAQ,CAAE,OAAO,KAAK,SAAS,SAAS;iBAEnC,aAAW;AAChB,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,MAAM,SAAS;;;AC/BvC;;;;;;;;;;;;;;;;;;MAyBI;AACI,aAAK,WAAW;AAIhB,aAAK,QAAQ;AACb,aAAK,MAAM;AACX,YAAI,YAAY;AACZ,gBAAM,IAAI,WAAW;;AAEzB,YAAI,WAAW;AACX,gBAAM,IAAI,WAAW;;AAEzB,aAAK,OAAO,IAAI,MAAM;AACtB,aAAK,kBAAkB,IAAI;;MAK/B;AAEI,eAAO,SAAQ;AACX,oBAAS,KAAK;;AAElB,eAAO,SAAQ,KAAK;;MAExB;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,eAAO,KAAK,KAAK,SAAQ,KAAK;;MAElC;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,aAAK,KAAK,SAAQ,KAAK,YAAY;;MAKvC;AACI,qBAAa,KAAK,MAAM,KAAK;AAC7B,YAAI,SAAS;AACT,mBAAS,KAAK,kBAAkB;;AAEpC,eAAO;;MAOX;AACI,eAAO,KAAK,aAAa,KAAK;;MAOlC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,IAAI,KAAK,KAAK;AACnB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;;MAKpC;AACI,4BAAoB;AAChB,eAAK,KAAK;;;MAMlB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;AAChC,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,KAAK;AACnB,eAAO;;MAKX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,aAAK,IAAI,KAAK,OAAO;;MAKzB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,OAAO;AACrB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,eAAO;;MAWX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAc,KAAK,KAAK,KAAK,QAAQ;AACrC,uBAAe,KAAK,IAAI;AACxB,aAAK,IAAI,QAAO,KAAK;AACrB,eAAO;;;AC7Jf;;;;;;;;;;;;;;;;;oCAkBuC;MAInC;AACI,cAAM,kBAAkB;;MAE5B;AACI,eAAO;;MAEX;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,KAAK;;MAEf;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,QAAQ;;MAKlB;AACI,4BAAoB,KAAK,WAAW;AACpC,wBAAgB,IAAI,MAAM;AAC1B,oBAAY,KAAK;AAGjB,qBAAa,GAAG,IAAI,KAAK;AACrB,kBAAQ,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK,QAAQ;;AAEjD,aAAK,OAAO;AACZ,aAAK,WAAW;AAChB,aAAK,kBAAkB,IAAI,KAAK;AAChC,aAAK,QAAQ;AACb,aAAK,MAAM;;;AAGnB,sBAAkB,mBAAmB;AC3DrC;;;;;;;;;;;;;;;;;AA6BO;AACH,aAAO,IAAI,cAAc;;AAKtB;AACH,cAAQ;AACR,aAAO,qBAAqB,MAAO,EAAE,OAAO,KAAK,MAAM;;AAepD;AACH,aAAO,IAAI,qBAAqB;;AAc7B;AACH,aAAO,IAAI,gBAAgB,eAAe;;AAkBvC;AACH,aAAO,yBAAyB,qBAAqB,cAAc,KAAK,SAAQ;;AA0B7E,0DAAsD,gBAAgB;AACzE,aAAO,IAAI,YAAY,WAAW;;;YAkB5B;AACF,uBAAe;AACf,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,KAAK;;AAEnB,eAAO;;YAaL;AACF,uBAAe,KAAK,SAAS;AAC7B,uBAAe;AACf,gBAAQ,MAAM,OAAO;AACrB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,OAAO;;AAErB,eAAO;;YASL;AACF,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,cAAI,MAAM,KAAK;;;YAUjB;AACF,gBAAQ,MAAM,KAAK;AACnB,6BAAqB,UAAU,EAAE;AACjC,eAAQ,CAAC,EAAE,QAAS;AAChB,cAAI,MAAM,KAAK;AACf,2BAAiB,UAAU,EAAE;;;MAerC;AACI,eAAO,IAAI,0BAA0B,MAAM;;MAW/C;AACI,eAAO,IAAI,eAAe,MAAM;;MAUpC;AACI,eAAO,IAAI,YAAY,MAAM;;MAUjC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAUtC;AACI,eAAO,IAAI,iBAAiB,MAAM,WAAW;;MAUjD;AACI,eAAO,IAAI,gBAAgB,MAAM;;YAO/B;AACF,eAAO,KAAK,IAAI,GAAG;;YASjB;AACF,eAAO,KAAK,eAAe,GAAG,aAAa,OAAM,MAAM;;MAoB3D,0CAA0C;AACtC,eAAO,IAAI,sBAAsB,MAAM,WAAW;;MAkCtD,6CAA6C,cAErC;AAEJ,2BAAmB,KAAK,cAAc,WAAW;AAGjD,eAAO,WAAW,IAAI,OAAK,QAAQ,GAAG;;MAY1C;AACI,eAAO,IAAI,gBAAgB,kBAAkB,CAAC,MAAM,YAAY;;MASpE;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAQlC;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAWlC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAWtC;AACI,eAAO,IAAI,gBAAgB,MAAM,YAAY;;MAMjD;AACI,eAAO,IAAI,eAAe;;;gCAUN;MACxB;AACI;AACA,aAAK,QAAQ;AACb,aAAK,OAAO;;MAEhB;AACI,eAAO,YAAY,KAAK,MAAM;;YAE5B;AACF,YAAI,KAAK,QAAQ,KAAK,MAAM;AACxB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,qBAAa,KAAK,MAAM,KAAK;AAC7B,aAAK;AACL,eAAO,CAAE,OAAO,UAAU,OAAO,MAAM;;;uCAGZ;MAC/B;AACI;AACA,aAAK,SAAS;;MAElB;AACI,eAAO;;YAEL;AACF;AACI,iBAAO,KAAK;;AAIZ,YAAE,UACE,mDAAmD,EAAE;AACzD,gBAAM;;;;iCAIW;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAEhB,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAKF,eAAO,KAAK,UAAU,KAAK;AACvB,0BAAgB,MAAM,KAAK,SAAS;AAEpC,cAAI,QAAQ;AACR,mBAAO;;AAEX,kBAAW,QAAQ;;AAEvB,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAChB,aAAK,QAAQ;;MAEjB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,YAAI,KAAK,WAAW,KAAK;AACrB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAO,KAAK,SAAS;;;wCAMO;MAChC,wDAAwD;AACpD;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,sBAAc;AACd,eAAO,MAAM,SAAS,KAAK;AACvB,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK;AACL,gBAAI,KAAK,wBAAwB,MAAM,SAAS;AAC5C,qBAAO,CAAE,OAAO,OAAO,MAAM;;AAEjC,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,gBAAM,KAAK,KAAK;;AAEpB,eAAO,CAAE,OAAO,OAAO,MAAM;;;iCAGR;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK,QAAQ,KAAK,UAAU,KAAK;AACjC,mBAAO;;AAEX,kBAAW,KAAK;;;;8BAIF;MACtB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,KAAK,UAAU,KAAK;AACnC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;4CAGE;MACpC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH;AACI,mBAAO,MAAM,KAAK,SAAS;;AAG3B,gBAAI,CAAC,KAAK,QAAQ;AACd,qBAAO,CAAE,OAAO,MAAM,MAAM;;;;;;mCAUjB;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,MAAM,KAAK,UAAU,KAAK;AACzC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;oCAaC;MACnC;AACI;AACA,aAAK,cAAc,IAAI;AACvB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAIF,eAAO,KAAK,YAAY,aAAa;AAEjC,cAAI,CAAC,MAAM,KAAK;AACZ,mBAAO,CAAE,OAAO,MAAM,MAAM;;;AAGpC,eAAO,CAAE,OAAO,KAAK,YAAY,SAAS,MAAM;;;kCAG1B;MAC1B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO;;AAEX,6BAAqB,uBAAqC,KAAK;AAM/D,4BAAoB,KAAK,UAAU,KAAK;AACxC,8BAAsB,uBAAqC;AAC3D,aAAK,YAAY,QAAQ;AAGzB,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO;;;kCAYsB;MACjC;AACI;AACA,aAAK,mBAAmB;AAGxB,aAAK,WAAW;AAEhB,aAAK,WAAW;AAChB,aAAK,gBAAgB;;MAEzB;AACI,kCAA0B;AAC1B,eAAO,GAAG;;YAER;AACF,aAAK,WAAW,KAAK,cAAc,KAAK;AACxC,eAAO,KAAK;;YAEV;AAMF,cAAM;AACN,YAAI,KAAK,YAAY;AACjB,iCAAuB,MAAM,KAAK,cAAc;AAChD,cAAI,eAAe;AAEf,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAK,WAAW,eAAe;AAC/B,cAAI,KAAK,oBAAoB;AACzB,iBAAK,WAAW,KAAK,SAAS,aAAa,KAAK;;;AAGxD,2BAAmB,MAAM,KAAK,SAAS;AACvC,YAAI,WAAW;AACX,eAAK,WAAW;AAChB,iBAAO,KAAK,cAAc;;AAE9B,eAAO;;;AAGR;AACN,IAAA;AACG,uBAAgB,iBAAgB,UAAU,KAAK;AAC/C,uBAAgB,iBAAgB,cAAc,KAAK;AACnD,uBAAgB,iBAAgB,aAAa,KAAK;OACnD,mBAAoB,mBAAkB;8BA8Bf;MACtB,sCAAsC,gBAAgB;AAClD;AACA,aAAK,YAAY;AACjB,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,iBAAiB;;MAE1B;AACI,kCAA0B;AAC1B,eAAO,IAAI;;YAET;AAGF,cAAM;AAGN,2BAAmB;AACnB,4BAAoB;AACpB;AACI,cAAI,qBAAqB;AACrB,2BAAe,UAAU;AACzB,mBAAO;cACH,OAAO,OAAO,KAAK;AACf;AACA,oBAAI,EAAE;AACF;;AAEJ,uBAAO,EAAE;;cAEb,SAAS;;;AAIb,mBAAO,CAAE,OAAO,MAAM,SAAS;;;AAGvC,uBAAe,MAAM,mBAAmB,KAAK,WAAW;AACxD,YAAI,iBAAiB;AAEjB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,YAAI,gBAAgB;AAChB,kBAAQ,KAAK;iBACJ,gBAAgB;AACjB,oBAAM,IAAI,MAAM,qEACa,KAAK;iBACjC,gBAAgB;AACjB,qBAAO,CAAE,OAAO,MAAM,MAAM;iBAC3B,gBAAgB;;;;AAK7B,aAAK;AACL,eAAO,CAAE,OAAO,QAAQ,MAAM;;YAE5B;AACF,aAAK,iBAAiB,KAAK,UAAU,KAAK;AAC1C,eAAO,KAAK;;;mCAYkB;MAClC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,IAAI,WAAW;;MAEjC;AACI,eAAO,GAAG,KAAK,SAAS;;MAM5B;AACI,eAAO,CAAC,KAAK,OAAO;AAChB,oBAAU,KAAK,SAAS;AACxB,eAAK,OAAO,KAAK;;;MAGzB;AACI,aAAK;AAIL,eAAO,KAAK,OAAO;;;kCASU;MACjC;AACI,cAAM,UAAU;AAChB,aAAK,WAAW;AAChB,aAAK,aAAa;AAElB,aAAK,oBAAoB;AACzB,aAAK,SAAS,aAAgB,QAAQ,OAAc;AACpD,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK,MAAM,KAAK,WAAW;;MAEtC;AACI,eAAO,KAAK,UAAU,KAAK,OAAO;;YAEhC;AAEF,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,CAAC,KAAK,OAAO;AAChB,8BAAoB,KAAK;AACzB,yBAAe,MAAM,KAAK,OAAO,cAAc;AAC/C,cAAI,OAAO;AACP,iBAAK,oBAAoB;;AAGzB,iBAAK;AACL,mBAAO;;;AAGf,eAAO,CAAE,OAAO,MAAM,MAAM;;;AC1+BpC;;;;;;;;;;;;;;;;;;MAmDI;AACI,aAAK,OAAO;;MA8DhB,kCAAkC;AAC9B,sBAAa;AACb,gBAAe,YAAY,GAAG,MAAM;QACpC;AACA;AACA,YAAI,KAAK,SAAS,YAAY,KAAK,QAAQ;AAGvC,iBAAO,KAAK;mBAEP;AAGL,iBAAO,KAAK,KAAK,KAAK,OAAO;;AAK7B,iBAAO,KAAK,MAAM,KAAK,OAAO;;AAElC,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YACd,iBAAiB,WAAW,gBAAgB;WAClD;;MAiBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS,YAAY,QAAQ,SAAS;AAG3C,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,QAAQ,QAAQ;AAG1C,iBAAO,KAAK,OAAO,QAAQ;;AAK3B,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,YAAY,MAAM,QAAQ,aAAa;;MAkB5G;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS;AAEd,iBAAO;;AAKP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,OAAO,OAAK,KAAQ,MAAM,UAAU;WACpE;;YAkBD;AACF,eAAQ,OAAM,KAAK,YAAY,aAAa;;MAiBhD;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,IAAI,OAAK,KAAQ,MAAM,UAAU;WACjE,KAAK;;MAyBZ;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,SAAS;WACzC,KAAK;;MAWZ;AACI,YAAI,cAAc;AACd,gBAAM,IAAI,WAAW;;AAEzB,sBAAa;AACb,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,SAAS,aAAa,KAAK;;MAoBhG;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,SAAQ;AAI7B,iBAAO,KAAK,OAAO;mBAEd,WAAU;AAEf,iBAAO;mBAEF,KAAK,QAAQ,QAAS,YAAU,UAAa,SAAQ;AAG1D,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,mCAAyB,qBAAqB,YAAa,EAAE,OAAO,MAAM,MAAK,YAAY,MAAM;AACjG,iBAAO,yBAAyB,iBAAiB,KAAK;WACvD;;MAmBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,UAAS,KAAK,KAAK,QAAQ;AAIhD,iBAAO,KAAK,OAAO;mBAEd,KAAK,QAAQ,QACjB,MAAK,OAAO,UAAS,WAAU,UAAa,SAAQ;AAGrD,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;MAuBlF,mDAAmD;AAC/C,YAAI,cAAc,QAAQ,aAAa;AACnC,cAAI,KAAK,QAAQ;AACb,kBAAM,IAAI,WAAW;;AAGrB,kBAAM,IAAI,WAAW,mNAGkB,KAAK;;;AAGpD,sBAAa;AACb,uBAAe,aAAgB,QAAQ,OAAc;AACrD,eAAO,sBAAsB;AACzB,sBAAY,OAAO;AACnB,cAAI;AACA,qBAAS,OAAO;;AAEpB,iBAAQ,OAAM,MAAK,YAAY,QAAQ,YAAY,MAAM;WAC1D,KAAK;;MAmBZ;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,KAAK,OAAO;AAGjC,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,KAAK,QAAQ;AAGvC,iBAAO,KAAK;;AAIZ,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;YAkB5E;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;YAa7B;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;;AAIvC,YAAQ,kBAAkB;AAanB,sDAAkD;AACrD,aAAO,IAAI,cAAc;QACrB;AACI,gBAAM,GAAG;AACT,eAAK,OAAO;;cAMV;AACF,iBAAO;;;;AAsBZ;AACH,aAAO,sBAAsB,YAAY,kBAAkB,QAAQ,MAAM;;AA2CtE;AAEH,UAAI,CAAC,aAAW;AACZ,cAAM,IAAI,MAAM;;AAEpB;AACA,UAAI,MAAM,QAAQ;AACd,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAO,QAAQ,OAAO,SAAS,GAAG,OAC9B,KAAK,IAAI,MAAM,SAAS,GAAG;;iBAG9B,oBAAoB;AACzB,yBAAiB;AACb,iBAAO,QAAQ,OAAO,SAAS,IAAI,OAC/B,KAAK,IAAI,MAAM,SAAS,IAAI;;;AAGxC,aAAO,sBAAsB;AACzB,wBAAgB,MAAM,mBAAmB,UAAU;AAC/C,cAAI,aAAa;AACb,mBAAO,CAAE,OAAO,EAAE,YAAY,SAAS;qBAElC,aAAW;AAChB,mBAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,kBAAM,IAAI,MAAM;;;AAIxB,eAAO,mBAAmB,SAAS,gBAAgB;SACpD;;AAUP;AACI,UAAI,SAAS;AACT,eAAO;;AAGX,yBAAmB,KAAK;AACxB,UAAI,aAAa;AAEb,sBAAc,YAAY;AAC1B,eAAO,CAAE,OAAO,SAAS;;AAG7B,aAAO,CAAE,OAAO,MAAM,SAAS;;AAMnC;AACI,UAAI,OAAO,WAAW;AAElB,cAAM,IAAI,MAAM;;AAEpB,UAAI,OAAO,cAAc;AAErB,eAAO,MAAS;;AAIhB,eAAO,QAAU;;;AC/oBzB;;;;;;;;;;;;;;;;;kCAuBqC;MAMjC;AACI;AACA,aAAK,QAAQ;;YAEX;AACF,8BAAsB,MAAM,KAAK,MAAM;AACvC,6BAAqB,cAAc;AACnC,6BAAqB,aAAa,MAAM,MAAM,IAAI;AAE9C,cAAI,KAAK,SAAS;AACd,mBAAO,KAAK,MAAM,GAAG;;AAEzB,iBAAO;;AAEX,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;;AAoBA,uBAAmB;AACnB,sBAAkB,OAAO;AACzB,wBAAoB,OAAO;AAC3B,wBAAoB,OAAO;AAC3B,oCAAgC,OAAO;AACvC,wCAAoC,OAAO;6BAcX;MAiC5B;AACI;AACA,aAAK,QAAQ;AACb,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,gBAAgB;AACrB,aAAK,wBAAwB;AAC7B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,OAAO,IAAI,gBAAgB;AAChC,YAAI,CAAC;AACD,sBAAY;;AAEhB,aAAK,YAAY,UAAU,cAAc,QAAQ,QAAQ;AACzD,aAAK,kBAAkB,UAAU;AACjC,aAAK,gBAAgB,UAAU;AAC/B,aAAK,wBAAwB,UAAU;AACvC,YAAI,UAAU;AACV,kBAAY,UAAU,aAAa,MAAM,MAAM;AAC/C,eAAK,kBAAkB;AACvB,eAAK,YAAY;;AAGjB,eAAK,YAAY,UAAU,YAAY,UAAU,YAAY;;;YAa/D;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,eAAO,KAAK,wBAAwB,OAAO,KAAK,KAAK,iBACjD,KAAK;;YAUP;AACF,oCAA4B,MAAM,KAAK;AACvC,YAAI,CAAC,KAAK,mBAAmB,CAAC;AAE1B,gBAAM,IAAI,MAAM;mBAEX,KAAK,mBAAmB;AAE7B,kBAAY,oBAAoB,WAAW,KAAK,gBAAgB,QAAQ,MAAM,yCAC1E,KAAK,gBAAgB,OAAO,aAC5B,oEACW,oBAAoB,OAAO,aAAa;;AAE3D,YAAI,CAAC,KAAK;AACN,eAAK,kBAAkB;;AAG3B,uBAAe,KAAK,gBAAgB,OAAO;AACvC,mBAAS,QAAS,SAAS,QAAQ,KAAM;AACzC,iBAAO;WACR;AACH,+BAAuB,OAAO,KAAK,QAAQ,OAAO,UAAW,OAAO,QAAQ;AAC5E,gBAAY,eAAe,WAAW,GAAG,MAAM,mCAAmC,eAAe;AAEjG,YAAI,KAAK;AACL,4BAAkB,OAAO,KAAK,KAAK;AAC/B,2BAAc,KAAK,gBAAgB,QAAQ;AAC3C,gBAAI,WAAU;AACV,oBAAM,IAAI,MAAM,cAAc,MAC1B,yEACY,KAAK,gBAAgB,aAAa;;;;AAI9D,aAAK,uBAAuB;;YAE1B;AACF,YAAI,KAAK;AACL,uBAAa,MAAM,KAAK,KAAK;AAC7B,+BAAqB,MAAM,KAAK;AAChC,cAAI,aAAa;AACb,kBAAM,IAAI,MAAM;;AAEpB,4BAAkB,aAAa;AAC/B,0BAAgB,KAAK,SAAS,WAAW;AACzC,iBAAO;;AAGP,iBAAO;;;YAGT;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,oBAAY,MAAM,KAAK,KAAK;AAC5B,YAAI,KAAK;AAGL,kBAAQ,MAAM,KAAK;;AAEvB,eAAO,MAAM,IAAI,OAAK,KAAK,gBAAgB;;MAE/C;AACI,uBAAe,KAAK,SAAS;AAC7B,yBAAiB;AACjB,uBAAe;AACf,qBAAa,GAAG,IAAI,KAAK,gBAAgB,QAAQ;AAC7C,sBAAY,KAAK,gBAAgB;AACjC,0BAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;AAC9D,cAAI,KAAK,yBAAyB,CAAC;AAE/B;;AAGA,0BAAc,OAAO;AACrB,8BAAkB;AAClB,gBAAI,UAAU;AAGV,kBAAI,WAAU,QAAO,YAAY;AAC7B,8BAAc,QAAO;yBAEhB,WAAW,SAAO,YAAY,QAAO;AAC1C,sBAAM,IAAI,MAAM,mBAAmB,8BAA8B;;AAGjE,8BAAc;;;AAKlB,iCAAmB,OAAO;AAC1B,kBAAI,MAAM;AAGN,oBAAI,WAAU,QAAO,UAAU;AAC3B,gCAAc,KAAK,WAAW;;AAI9B,gCAAc;;yBAGb,CAAC,WAAU,CAAC,QAAO;AAGxB,8BAAc;;AAKd,wBAAQ,QAAO;uBACN;AACD,kCAAc;AACd;uBACC;AACD,kCAAc,KAAK,MAAM;AACzB;uBACC;AACD,kCAAc,KAAK,WAAW;AAC9B;;AAEA,kCAAc;;;;AAK7B,uBAAU,QAAO,UAAW,OAAO,OAAO,cACvC,SAAS,OAAO;;;AAK5B,YAAI,OAAO,KAAK,QAAQ,WAAW;AAC/B,iBAAO;;AAGP,iBAAO,CAAE,IAAI,UAAU,IAAI;;;MAGnC;AACI,YAAI,UAAU,OAAO,MAAM,kBAAkB;AACzC,iBAAO;;AAGP,iBAAO;;;MAIf,sCAAsC;AAClC,uBAAe;AACf,yBAAiB;AACjB,2BAAmB,KAAK;AACxB,2BAAmB;AAEnB,qBAAa,GAAG,IAAI,YAAY;AAC5B,kBAAQ;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,+BAAa,IAAI;AACjB,iCAAe;AACf;qBAEC,KAAK;AACN,+BAAa,IAAI;AAGjB,sBAAI,KAAK,cAAc,OAAO,KAAK;AAC/B;;AAEJ,yBAAO,KAAK;AACZ,iCAAe;AACf;;AAGA,iCAAe;AACf,+BAAa;AACb;;AAER;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY;AACvC,iCAAe;AACf,+BAAa,IAAI;AACjB;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY,IAAI;AAC3C,iCAAe;AACf,+BAAa,IAAI;AACjB;qBAEC;AACD,iCAAe;AACf;;AAGA,iCAAe;AACf;;AAER;iBACC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;;;;AAKZ,YAAI,iBAAiB;AACjB,iBAAO,KAAK,KAAK,UAAU,YAAY,aAAa;;AAGpD,iBAAO,KAAK,KAAK,UAAU;;AAG/B,YAAI,wBAAwB,OAAO,WAAW,KAAK,gBAAgB;AAC/D,gBAAM,IAAI,MAAM,wCAAwC,KAAK,gBAAgB,qCAAqC;;AAEtH,eAAO;;;ACpXf;;;;;;;;;;;;;;;;;qCAyBwC;MACpC;AACI;AACA,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,aAAK,UAAU,iBAAiB,WAAW;AAC3C,4BAAoB,KAAK,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU,KAAK,cAAc,KAAK,cAAc,MACrD,CAAC,OAAO,UAAU;AAClB,gBAAM,IAAI,MAAM,gFACmB,KAAK;;AAE5C,aAAK,YAAY,iBAAiB,2BAA2B;AAC7D,aAAK,eAAe,iBAAiB;AACrC,aAAK,uBACD,iBAAiB,wBAAwB,KAAK;AAClD,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,wBAAwB,iBAAiB,yBAAyB;AACvE,aAAK,qBACD,iBAAiB,uBAAuB,QAAQ,QAAQ;AAC5D,aAAK,kBACD,iBAAiB,oBAAoB,OAAO,OAAO;AACvD,YAAI,CAAC,KAAK,sBAAsB,CAAC,KAAK;AAClC,gBAAM,IAAI,MAAM;;;MAIxB;AACI,eAAO;;mBAGE,0BAA0B;AACnC,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,mCAA2B,IAAI,mBAAmB;AAElD,cAAM,mBAAmB;AACzB,eAAO;;YAGL;AACF;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO,KAAK,yBAAyB,OAAO,OACxC,KAAK;YACT,OAAO;;;AAIX,gBAAM,IAAI,MAAM,iDAAiD,EAAE;;AAEvE,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAEpB,+BAEA,OAAO,gBAAgB,OAAO;AAC9B,aAAK,eAAe,IAAI;AACxB,YAAI,CAAC,KAAK;AAGN,eAAK,eAAe,KAAK,aAAa;mBAEjC,KAAK,aAAa,eAAe,KAAK;AAC3C,gBAAM,IAAI,MAAM,wCACC,KAAK,yBACP,KAAK,aAAa;;AAErC,6BAAqB,KAAK,aAAa,wBAAwB,KAAK;AACpE,aAAK,WAAW,KAAK,aAAa;AAClC,aAAK,SAAS,UAAU,KAAK,UAAU;AACvC,aAAK,SAAS,wBAAwB,KAAK;AAC3C,qBAAa,QAAQ,KAAK;AAC1B,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC;;YAEE;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACA,+BAAuB,MAAM,KAAK;AAClC,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,8BAAoB,KAAK,4BAA4B,UAAU,CAAC,KAAK,WAAW,KAAK,sBAAsB;;AAE/G,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,2BAAiB,KAAK,4BAA4B,UAAU,CAAC,KAAK,YAAY,KAAK,SAAS;;AAEhG,eAAO;UACH,OAAO,CAAE,aAAe,mBAAmB,UAAY;UACvD,MAAM;;;YAKR;AACF,eAAQ,OAAM,KAAK,QAAQ;;YAEzB;AACF,8BAAsB;AACtB,8BAAsB;AACtB,4BAAoB;AACpB,eAAO,IAAI,QAAQ;AACf,6BAAmB,YAAY;AAC3B,gBAAI,KAAK;AACL,mBAAK,SAAS,sBAAsB,KAAK;AAEzC,kBAAI,KAAK,SAAS,OAAO;AACrB,wBAAQ,CAAE,eAAe;;AAE7B,4BAAc,KAAK,KAAK,SAAS,MAAM,GAAG,KAAK;;AAEnD,gBAAI,KAAK;AACL,mBAAK,SAAS,uBAAuB,KAAK;AAC1C,4BAAc,KAAK,KAAK,SAAS;;AAGrC,gBAAI,EAAE,kBAAkB,KAAK;AACzB,4BAAc;AACd,sBAAQ,CAAE,eAAe;;aAE9B,KAAK,UAAU,KAAK,eAAe;;;MAI9C;AACI,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,SAAS;AACd,eAAK,aAAa;AAClB,cAAI,KAAK,UAAU,QAAQ,KAAK,OAAO,YAAY,SAAS;AACxD,iBAAK,OAAO,YAAY,GAAG;;;;MAKvC;AACI,cAAM,IAAI,MAAM;;MAGpB;AACI,eAAO,KAAK;;MAEhB;AACI,0BAAkB,MAAM,GAAG;AAC3B,yBAAiB,IAAI,aAAa,MAAM,SAAS;AACjD,cAAM,QAAQ,cAAa,SAAS,IAAI,OAAM,IAAI;AAClD,eAAO;;MAEX;AACI,qBAAa,IAAI,aAAa,eAAmB;AAEjD,aAAK,IAAI,UAAU,KAAK,SAAS,SAAS;AAC1C,eAAO,QAAO,MAAM;;;ACvL5B;;;;;;;;;;;;;;;;;iCAuBoC;MAChC;AACI;AACA,aAAK,qBAAqB;AAC1B,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,SAAS;AACd,eAAK,WACD,CAAC,KAAK,aAAa,cAAc,KAAK,aAAa;AACvD,eAAK,aAAa,UAAS,CAAC,IAAI;AAChC,cAAI,KAAK,aAAa;AAElB,uCAA2B,KAAK,aAAa,cAAc,IAAM,KAAK,mBAAmB;AACzF,wCAA4B,KAAK,aAAa,eAAe,IACzD,KAAK,mBAAmB;AAC5B,mCAAwB,KAAI,sBAAsB;AAClD,oCAAyB,KAAI,uBAAuB;AACpD,iCAAqB,iBAAiB;AACtC,kCAAsB,sBAAsB;AAC5C,iBAAK,UAAU,SAAS,CAAC,iBAAiB,gBAAgB,eAAe,eAAe,CAAC,GAAG;;AAG5F,iBAAK,UAAU,SAAS,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG;;;;MAItD;AACI,eAAO;;mBAGE,0CAA0C;AACnD,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,YAAI,CAAC;AAGD,+BAAqB,SAAS,cAAc;AAC5C,cAAI,CAAC,aAAa,eAAe,CAAC,aAAa;AAC3C,kBAAM,IAAI,MAAM;;AAGpB,6BAAmB,QAAQ,aAAa;AACxC,6BAAmB,SAAS,aAAa;;AAE7C,+BAAuB,IAAI,eAAe,oBAAoB;AAE9D,cAAM,eAAe;AACrB,eAAO;;YAGL;AACF,YAAI,KAAK,aAAa;AAClB,kBAAa,KAAK,aAAa,eAAe,UACzC,KAAK,aAAa,eAAe,eAAgB,MAAM,+BAA+B,KAAK,aAAa;;AAGjH;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO;cACH,UAAU,KAAK,aAAa;cAC5B,YAAY,KAAK,aAAa,aAC1B,KAAK,aAAa,aAClB;cACJ,OAAO,KAAK,mBAAmB;cAC/B,QAAQ,KAAK,mBAAmB;;;;AAMxC,YAAE,UAAU,iDAAiD,EAAE;AAC/D,gBAAM;;AAEV,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAGpB;AACI,eAAK,mBAAmB,YAAY,KAAK;;AAGzC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM,OAAO,IAAI,gBAAgB,KAAK;;AAGlE,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,eAAO,IAAI,QAAQ;AAEf,eAAK,mBAAmB,mBAAmB;AACvC;;;;YAIN;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACI,gBAAM,WAAmB,KAAK;;AAG9B,gBAAM,IAAI,MAAM,4CAA4C,KAAK,UAAU;;AAE/E,YAAI,KAAK;AACL;AACI,mBAAO,CAAE,OAAO,KAAK,mBAAmB,MAAM,MAAM;;AAGpD,kBAAM,IAAI,MAAM,oCAAoC,EAAE;;AAGtD,gBAAI;;;AAIR,iBAAO,CAAE,OAAO,KAAK,MAAM;;;MAGnC;AAII,YAAI,KAAK,aAAa,eAAe,KAAK,aAAa,gBAClD,MAAK,mBAAmB,UAAU,KAAK,aAAa,eACjD,KAAK,mBAAmB,WAAW,KAAK,aAAa;AACzD,iBAAO;;AAEX,eAAO;;MAGX;AACI,eAAO,KAAK;AACR,gCAAsB,IAAI,UAAU,WAAW;AAC/C;AACA,yBAAe,OAAM,cAAc,eAAe,KAAK,SAAS,KAAK,YAAY,KAAK,UAAU;AAEhG,wBAAc,aAAa;AAC3B,iBAAO,aAAa,QAAQ,MAAM,MAAM;;;YAK1C;AACF,eAAQ,OAAM,KAAK,QAAQ;;MAG/B;AACI,uBAAe,KAAK,OAAO;AAC3B,eAAO,QAAQ,WAAS,MAAM;AAC9B;AACI,eAAK,mBAAmB,YAAY;;AAGpC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM;;AAElC,aAAK,WAAW;;MAGpB;AACI,cAAM,IAAI,MAAM;;;AC5LxB;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;iCAkBoC;MAmBhC;AACI,eAAO,IAAI,cAAc,MAAM;;;gCAYX;MACxB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,kBAAkB,UAAU;;MAEhD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;oCAGO;MAC5B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AAEjB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS,uBAAuB,KAAK;;YAElD;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC,YAAI,YAAY;AACZ,cAAI,KAAK,cAAc;AACnB,mBAAO;;AAIX,eAAK,YAAY,KAAK,KAAK;AAC3B,eAAK,YAAY;AACjB,iBAAO;;AAEX,sBAAc,YAAY,MAAM,MAAM,KAAK;AAI3C,cAAM,KAAK,KAAK,YAAY,MAAM;AAClC,2BAAmB,MAAM,MAAM,GAAG;AAC9B,eAAK,YAAY,KAAK;;AAE1B,aAAK,YAAY,MAAM,MAAM,SAAS;AACtC,eAAO;;;AC/Ff;;;;;;;;;;;;;;;;;oCAoBuC;MAUnC;AACI,eAAO,IAAI,aAAa;;;+BAYL;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,iBAAiB;;MAErC;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;mCAyBM;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,YAAI,OAAM,IAAI;AACV,eAAK,UAAU,IAAI,YAAY;;AAI/B,iBAAQ,iBAAkB;AAC1B,eAAK,UAAU,IAAI,cAAc;;;MAGzC;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC;AACA,YAAI,YAAY;AACZ,iBAAO;;AAGP,kBAAQ,YAAY;;AAExB;AACA,YAAI,OAAM,IAAI;AACV,iBAAO,KAAK,QAAQ,OAAO,OAAO,CAAE,QAAQ;;AAG5C,iBAAO,KAAK,QAAQ,MAAM,OAAO,KAAK,MAAM;;AAEhD,aAAK,YAAY,KAAK;AACtB,eAAO;;;AC/Gf;;;;;;;;;;;;;;;;;oCA2BuC;MACnC,4BAA4B;AACxB;AACA,aAAK,OAAO;AACZ,aAAK,UAAU;AACf,gBAAa,gBAAgB,cACxB,QAAM,IAAI,gBACN,gBAAgB,QAAQ,gBAAgB,OACzC,QAAQ,MAAM;AAEtB,aAAK,SAAS,QAAQ,UAAU;AAEhC,aAAK,YAAY,QAAQ,aAAa,OAAO;;MAEjD;AACI,eAAO,cAAc,KAAK;;YAExB;AACF,YAAI,KAAK,UAAY,MAAK,gBAAgB,aACtC,KAAK,KAAK,aACV,KAAK,KAAK;AACV,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,sBAAc,IAAI,QAAQ;AACtB,sBAAY,KAAK,SAAS,KAAK;AAC/B,cAAI,KAAK,gBAAgB;AAGrB,oBAAQ,IAAI,WAAW,KAAK,KAAK,MAAM,KAAK,QAAQ;;AAMpD,+BAAmB,IAAI;AACvB,uBAAW,SAAS;AAChB,0BAAW,WAAW;AAItB,kBAAI,iBAAgB;AAChB,wBAAO,IAAI,WAAW;;AAE1B,kBAAI,CAAE,kBAAgB;AAClB,uBAAO,OAAO,IAAI,UAAU;;AAEhC,sBAAQ;;AAEZ,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM;;AAE5B,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM,MAAM;;AAIlC,2BAAc,KAAK,KAAK,MAAM,KAAK,QAAQ;AAG3C,uBAAW,kBAAkB;;AAEjC,eAAK,SAAS;;AAElB,eAAO,CAAE,OAAQ,MAAM,OAAQ,MAAM;;;AC1F7C;;;;;;;;;;;;;;;;;AA0BO,mDAA+C;AAClD;AACA;AACA,UAAK,OAAO,QAAS;AACjB,oBAAY;;AAGZ,oBAAY,IAAI;AAChB,sBAAc,0BAA0B;;AAE5C,uBAAiB,MAAM,QAAW,WAAW;AAC7C,UAAI,SAAS;AACT,2BAAmB,IAAI,WAAW,MAAM,SAAS;AACjD,eAAO,IAAI,kBAAkB,YAAY;;AAGzC,cAAM,IAAI,MAAM,SAAS;;;AAIjC,sCAAkC;AAC9B,oBAAa;QACT,QAAQ,QAAQ;QAChB,SAAS,QAAQ;QACjB,MAAM,QAAQ;QACd,MAAM,QAAQ;QACd,aAAa,QAAQ;QACrB,OAAO,QAAQ;QACf,UAAU,QAAQ;QAClB,UAAU,QAAQ;QAClB,WAAW,QAAQ;;AAEvB,aAAO;;AC1DX;;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,OAAO,WAAW,YAAa,OAAO,OAAO,GAAG,OAAO;;ACrBnE;;;;;;;;;;;;;;;;;iCAyBoC;MAShC,8BAA6B;AACzB;AACA,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,YAAY,KAAK,UAAU,OAAM,IAAI;AAErC,qBAAW;AACX,eAAK,QAAQ,GAAG,aAAa,KAAK,MAAM,OAAO;;AAInD,eAAO,IAAI,kBAAkB,KAAK,OAAO,KAAK;;;AC/CtD;;;;;;;;;;;;;;;;;gCAwBmC;MAQ/B,+BAA+B;AAC3B;AACA,aAAK,MAAM;AACX,aAAK,cAAc;;YAMjB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAQ,IAAI,eAAe,KAAK,KAAK,KAAK,aACrC;;AAGL,iBAAO,iBAAiB,KAAK,KAAK,KAAK;;;;AC/CnD;;;;;;;;;;;;;;;;;AAqGO,qCAAiC;AACpC,aAAO,IAAI,WAAW,IAAI,cAAc,SAAS;;AA0B9C;AACH,mBAAa,qBAAqB;AAClC,aAAO,sBAAsB,YAAY;;AA8DtC;AACH,aAAO,sBAAsB;AACzB,oBAAY,MAAM;AAClB,eAAO,qBAAqB,MAAM,IAAI;;;AAiCvC;AACH,aAAO,eAAe,OAAO,oBAAoB;;AAoC9C;AACH,aAAO,mBAAmB,OAAO;;AC1QrC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;ACvB1D;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAChC,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;iCAGkB;MAChC;AACI;AACA,aAAK,YAAY;AACjB,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,aAAY,MAAM;;MAEtC;AACI,YAAI,KAAK;AACL,eAAK,WAAW;AAChB,cAAI,OAAM,IAAI;AACV,kBAAkB;;;AAY1B,uBAAe;AACf,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;AACjD,eAAO;;MAQX;AACI;AACA,YAAI,UAAU,YAAY,UAAU,QAAQ,OAAO,SAAS,KACxD,UAAc,OAAO;AACrB,gCAAsB,OAAO,IAAI,OAAK,cAAkB;AACxD,kBAAQ,KAAK,MAAM,eAAe,OAAO;;AAGzC,kBAAQ,KAAK,MAAM,QAAQ,OAAO;;AAEtC,eAAO,CAAE,QAAQ,OAAO,OAAO;;MAGnC;AACI,2BAAmB,KAAK,KAAK,IAAI;AACjC,mBAAW;;MAGf;AACI,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;;;MAGnB;AACI,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;;MAErD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,SAAS;;MAEzB;AACI,eAAQ,OAAO,sBAAuB,KAAK,KAAK,IAAI;AACpD,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,iBAAO,wBAAoC,YAAY;;AAE3D,eAAO,KAAK,KAAK,IAAI,QAAQ;;MAEjC;AACI,sBAAa,KAAK,SAAS,EAAE;AAC7B,0BAAkB;AAClB,YAAI,EAAE,UAAU;AACZ;AAEI,0BAAc,MAAK,IAAI,OAAK,cAAkB;;AAG9C,kBAAM,IAAI,MAAM;;;AAGxB,eAAO,QAAU,EAAE,OAAO,EAAE,OAAO;;MAEvC;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,YAAI,KAAK,KAAK,IAAI;AACd,iBAAQ,sBAAuB,KAAK,KAAK,IAAI;AAC7C,cAAI,sBAAsB;AACtB,iBAAK,YAAY,mBAAmB,KAAK;AACzC,iBAAK,YAAY,mBAAmB,KAAK;;AAE7C,eAAK,KAAK,OAAO;;;MAGzB;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;AACX,cAAI,WAAW,WAAW;AACtB,iBAAK,YAAY;;;;YAIvB;AACF,sBAAc;AACd;AACA,yBAAiB,SAAa;AAC9B,eAAO,CAAE;;MAEb;AACI,eAAO;UAEH,YAAY;UACZ,SAAS,CAAC;;;MAIlB;AACI,yBAAiB,GAAG;AACpB,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAU,IAAI;;AAEzB,yBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,sBAAY,SAAO,WAAW;AAC9B,yBAAe,IAAI,MAAM,IAAI;AAC7B,uBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,mBAAO,KAAK,IAAI,KAAK,QAAQ,KAAK,MAAM;;AAE5C,mBAAO,IAAI,KAAK,IAAI,GAAG,SAAS,GAAG;;AAEvC,eAAO,SAAO;;MAElB;AACI,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAe,QAAU,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE;AAC7C,qBAAa,SAAO;AACpB,qBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,eAAK,IAAI,EAAE,OAAO,KAAK,MAAM;;AAEjC,eAAO,SAAO;;MAElB;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,OAAS,GAAG,OAAO,MAAM,QAAQ;;AAE9C,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,yBAAe,QAAU,EAAE,OAAO,EAAE;AACpC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,yBAAe,SAAO,WAAW;AACjC,wBAAc,OAAO;AACrB,eAAK,QAAQ,QAAM,MAAM,MAAM,EAAE,MAAM,MAAM,IAAI,MAAM;AACvD,mBAAO,IAAI,KAAK,IAAI,GAAG,QAAQ,GAAG;;AAEtC,eAAO,SAAO;;MAElB;AACI,yBAAiB,GAAG;AAEpB,eAAO,KAAO,QAAU,KAAK;;MAEjC;AACI,yBAAiB,SAAS;AAC1B,qBAAa,QAAQ,IAAI,OAAK,KAAK,SAAS,EAAE;AAC9C,uBAAe,QAAU,QAAQ,GAAG,OAAO,QAAQ,GAAG;AACtD,2BAAmB,OAAO;AAC1B,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,2BAAiB,KAAK;AACtB,uBAAa,GAAG,IAAI,WAAW,QAAQ;AACnC,uBAAW,MAAM,SAAS;;;AAGlC,eAAO,OAAO;;MAElB;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAExE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,IAAO;AACjB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAO,GAAG;;MAErB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,IAAI,QAAQ;;MAExF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAW,YAAU,KAAK,MAAM,KAAI;AACpC,4BAAoB;AACpB,eAAO,KAAK,oBAAoB,GAAG,GAAG,aAAa;;MAEvD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU;AACV,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,oBAAO,MAAM,SAAS;;AAE1B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,sBAAW;AACX,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,qBAAQ,MAAM,SAAS;;AAE3B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oBAAY;AAGZ,yBAAiB,EAAE,OAAO,WAAW;AACrC,qBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,uBAAa,WAAW,WAAW,IAAI;;AAE3C,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAkB,QAAU,GAAG;AAC/B,uBAAa,MAAS,WAAW,YAAY,OAAO;AACpD,uBAAY,KAAK,IAAI,GAAG,IAAI;AAC5B,cAAI,KAAK;;AAEb,eAAO,MAAS;;MAEpB;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,oDAAoD,EAAE,OAAO,kBACzD;;AAExB,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,EAAE,OAAO;AACjC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAiB,EAAE,MAAM,EAAE,OAAO;AAClC,8BAAsB,WAClB,UAAU,IAAI,WAAW,IAAI,IAC7B,UAAU,IAAI;AAClB,qBAAa,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,uBAAa,GAAG,IAAI,UAAU;AAC1B,wBAAY,cAAc,GAAG;AAC7B,gBAAI,MAAM;AACN,mBAAK,OAAO,YAAY,IAAI,MAAM;;AAGlC,8BAAgB,cAAc,GAAG,IAAI;AACrC,mBAAK,OAAO,YAAY,MAAM,WAAW,KAAK,WAC1C,MAAM,OAAO,KAAK;;;;AAIlC,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,WAAW,GAAG,IAAI;AACpC,uBAAe,KAAK,SAAS,UAAU;AACvC,wBAAgB,KAAK,SAAS,EAAE;AAChC,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,OAAS,EAAE,OAAO,YAAW,EAAE,OAAO,EAAE;AACvD,0BAAkB,KAAK,SAAS,OAAO;AACvC,qBAAY;AACZ,uBAAe,UAAU,SAAS,KAAK,UAAU,OAAO,KAAK,EAAE,SAAS,IACpE,IACA,eAAmB,EAAE,MAAM,MAAM;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAa,GAAG,IAAI,QAAQ;AACxB,gBAAI,OAAO,OAAO;AACd,wBAAU,YAAW,QAAQ;;AAG7B,wBAAU,YAAW,QAAQ;;;;AAIzC,eAAO;;MAEX;AACI,yBAAiB,CAAC,YAAY;AAC9B,yBAAiB,KAAK,SAAS,UAAU;AACzC,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,yBAAiB,GAAG;AACpB,sBAAc,KAAK,SAAS,EAAE;AAC9B,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;;;AAGd,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,sBAAY,OAAO;AACnB,cAAK,OAAO,KAAK,OAAO,KAAO,QAAQ,KAAK,QAAQ;AAChD,mBAAO;;AAGP,mBAAQ,OAAM,QAAQ;;;;MAIlC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,mBAAM,QAAO;;AAEjB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,uBAAa,MAAM;AACnB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,qBAAS,UAAU;;AAEvB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,uBAAa,OAAO;AACpB,iBAAO,OAAO;;;MAGtB;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,6BAAqB,IAAI,aAAa,EAAE;AACxC,uBAAe,KAAK,SAAS,EAAE;AAC/B,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO;AACjB,cAAI,KAAK;AACL,yBAAa,KAAK,SAAS;;AAG3B,yBAAa,KAAK,SAAS,KAAM,KAAI;;;AAG7C,eAAO,KAAK,WAAW,cAAc,EAAE,OAAO;;MAElD;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,MAAM,QAAQ;;MAE1F;AACI,yBAAiB,GAAG;AACpB,eAAO,OAAK,KAAK,WAAW,IAAI;;MAEpC;AACI,yBAAiB,CAAC,GAAG,UAAU;AAC/B,yBAAiB,EAAE,MAAM;AACzB,8BAAsB,KAAK,SAAS,QAAQ;AAC5C,iBAAS,QAAQ,cAAc;AAC/B,uBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AAC/B,yBAAe,OAAO,WAAW;AACjC,8BAAoB,OAAO;AAC3B,sBAAY,QAAQ,cAAc,OAAO;AACzC,gCAAsB,KAAK,WAAW;AACtC,iBAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,eAAO,OAAO;;MAElB;AACI,yBAAiB,CAAC,IAAI;AACtB,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAa,EAAE,QAAQ,WAAW,UACpC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,yBAAiB,GAAG;AACpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,QAAU,SAAS,UAAU,EAAE;AAC9C,2BAAmB,OAAO;AAC1B,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAChE,SAAS,SAAS,KAAK,SAAS,SAAS;AAC7C,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,iCAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,iCAAyB,SAAS,SAAS;AAC3C,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,oCAA0B,QAAQ;AAClC,mCAAyB,QAAQ,EAAE,QAAQ;AAC3C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,wCAA0B,oBAAoB,SAAS;AACvD,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,wCAAwB,oBAAoB,OAAO;AACnD,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,0CAAwB,kBAAkB,OAAO;AACjD,oCAAkB;AAClB,iCAAe;AACf,+BAAY;AACZ,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,yCAAqB,mBAAmB,SAAS,EAAE,QAAQ;AAC3D,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,yCAAmB,eAAe,OAAO,EAAE,QAAQ;AACnD,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,2CAAmB,aAAa,OAAO,EAAE,QAAQ;AACjD,sCAAc,QAAQ,aAAa;AACnC,4BAAK,aAAa,SAAS,QAAQ;AAC/B,wCAAc;mCAET,aAAa;AAClB,sCAAY;AACZ;;AAEJ,4BAAI,MAAM;AACN;;;AAGR,0BAAI,MAAM;AACN;;;AAGR,wBAAI,MAAM;AACN;;;AAGR,uCAAqB,kBAAkB;AACvC,6BAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;;AAMhE,eAAO,OAAO;;MAElB;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,8BAAsB,IAAK,eAAc,eAAe;AACxD,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW;;;;AAIvB,qBAAG,IAAI,UAAU,eAAe,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlF,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,6BAAqB,QAAU,SAAS,UAAU;AAClD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,qBAAa,KAAK,WAAW;AAC7B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,iCAAe,OAAO;AACtB,oCAAkB;AAClB,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,mCAAe,SAAS;AACxB,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,mCAAa,OAAO;AACpB,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,qCAAa,OAAO;AACpB,sCAAc,KAAK,IAAI,OAAO,QAAQ,MAAM,MAAM;AAClD,4BAAI,SAAS;AACT,qCAAW;AACX,wCAAc,SAAS,wBACnB,uBACA,OAAO,wBAAwB;;;;;AAKnD,+BAAa,IAAI,aAAa,OAAO,QAAQ,MAAM,MAAM;;;;;;AAM7E,eAAO,aAAa;;MAExB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,6BAAqB,KAAK,mBAAmB,GAAG;AAChD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,0BAAkB,KAAK,WAAW;AAClC,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,uCAAe,uBACX,wBAAwB,uBACxB,IACA,UAAU,IAAI,OAAO,SAAS,OAAO,OAAO;AAChD,uCAAe,SAAS,wBAAwB,uBAC5C,OAAO,uBAAuB;AAClC,qCAAa,WAAW,SAAS,IAAI;AACrC,4BAAI,SAAS;AACT;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW,QAAQ;;;;AAI/B,qBAAG,IAAI,SAAS,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlE,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,eAAmB,CAAC,OAAO,WAAW,UAAU;AAChF,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,wBAAgB;AAChB,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,qBAAa,GAAG,IAAI,OAAO;AACvB,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,mCAAuB,KAAK,MAAM;AAClC,4BAAgB,gBAAgB;AAChC,kCAAsB,KAAK,IAAI,YAAY,GAAG,KAAK,KAAK;AACxD,iCAAqB,IAAI,EAAE,QAAQ,KAAK,iBAAiB,EAAE,QAAQ;AACnE,iCAAqB,IAAI,EAAE,QAAQ,KAAK,gBAAgB,EAAE,QAAQ;AAClE,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,qCAAuB,KAAK,MAAM;AAClC,8BAAgB,gBAAgB;AAChC,oCAAsB,KAAK,IAAI,WAAW,GAAG,KAAK,KAAK;AACvD,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,2BAAa,GAAG,IAAI,aAAa;AAG7B,gCAAgB,QAAQ,gBAAgB;AACxC,mCAAmB,QAAQ,gBAAgB;AAC3C,iCAAiB,QAAQ,iBAAiB;AAC1C,oCAAoB,QAAQ,iBAAiB;AAC7C,4BAAY,UAAW,YAAW,WAAW;AAC7C,+BAAe,aAAc,eAAc,cAAc;AACzD,iCAAiB,MAAO,UAAS,OAAO;AACxC,uBAAO,eAAe;;;;;AAKtC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU;;MAE1D;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAK3D,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AAItD,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa;AACb,qBAAa,GAAG,IAAI,OAAO;AACvB,0BAAgB,IAAI,EAAE,QAAQ;AAC9B,uBAAa,GAAG,IAAI,SAAS;AACzB,wBAAY,IAAI;AAChB,gCAAoB,KAAK,MAAM;AAC/B,mCAAuB,KAAK,IAAI,KAAK,KAAK,MAAM,UAAU;AAC1D,iCAAqB,UAAU,cAAc,EAAE,QAAQ;AACvD,oCAAwB,UAAU,iBAAiB,EAAE,QAAQ;AAC7D,4BAAgB,MAAM;AACtB,mCAAuB,IAAM;AAC7B,yBAAa,GAAG,IAAI,QAAQ;AACxB,0BAAY,IAAI;AAChB,mCAAqB,KAAK,MAAM;AAChC,oCAAsB,KAAK,IAAI,KAAK,KAAK,MAAM,SAAS;AACxD,8BAAgB,MAAM;AACtB,qCAAuB,IAAM;AAC7B,sCAAwB,eAAe,eAAe,EAAE,QAAQ;AAChE,uCAAyB,eAAe,gBAAgB,EAAE,QAAQ;AAClE,yCAA2B,kBAAkB,eAAe,EAAE,QAAQ;AACtE,0CAA4B,kBAAkB,gBAAgB,EAAE,QAAQ;AACxE,wDAA0C,iBAAiB;AAC3D,iDAAmC,iBAAiB;AACpD,iDAAmC,UAAU;AAC7C,0CAA4B,UAAU;AACtC,2BAAa,GAAG,IAAI,OAAO;AACvB,8BAAc,SAAS;AACvB,uBAAO,kBAAkB,MACrB,QAAQ;AACZ,uBAAO,mBAAmB,MAAM,QAAQ;AACxC,uBAAO,qBAAqB,MACxB,QAAQ;AACZ,uBAAO,sBAAsB,MAAM,QAAQ;;;;;AAK3D,eAAO,SAAY,QAAQ,CAAC,OAAO,QAAQ,SAAS,QAAQ,EAAE;;MAElE;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,QAAQ,YAAY,WAAW;AAC/D,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,2BAAmB;AACnB,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,qCAAyB,KAAK,IAAI,YAAY,GAAG,eAAe,KAAK,MAAM,iBACvE,KAAK,MAAM;AACf,8BAAkB,cAAc,mBAAmB,EAAE,QAAQ;AAC7D,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,uCAAyB,KAAK,IAAI,WAAW,GAAG,eAAe,KAAK,MAAM,iBACtE,KAAK,MAAM;AACf,gCAAkB,YAAY,mBAAmB,EAAE,QAAQ;AAC3D,2BAAa,GAAG,IAAI,aAAa;AAG7B,+BAAe,QAAQ,YAAY;AACnC,uBAAO,kBAAkB;;;;;AAKzC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU,cAAc,EAAE;;MAE1E;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAC3D,yBAAiB,KAAK,SAAS,GAAG;AAGlC,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAElD,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,SAAS;AACzB,8BAAkB,cAAc,IAAI,EAAE,QAAQ;AAE9C,+BAAmB,KAAK,MAAM,IAAI;AAClC,6BAAiB,KAAK,MAAM,aAAc,YAAY;AACtD,yBAAa,GAAG,IAAI,QAAQ;AACxB,gCAAkB,YAAY,IAAI,EAAE,QAAQ;AAE5C,iCAAmB,KAAK,MAAM,IAAI;AAClC,+BAAiB,KAAK,MAAM,aAAc,WAAW;AACrD,2BAAa,GAAG,IAAI,OAAO;AACvB,4BAAY;AAEZ,oCAAoB,GAAG,WAAW,WAAW;AACzC,8BAAY,WAAW;AAEvB,sBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,oCAAkB,cAAc,MAAM,GAAG,QAAQ;AACjD,wCAAsB,MAAM;AAC5B,2CAAyB,KAAK,IAAI,UAAU,GAAG,eAAe,KAAK,MAAM,iBACrE,KAAK,MAAM;AACf,sBAAI,MAAM;AACN;;AAEJ,sCAAoB,GAAG,WAAW,UAAU;AACxC,gCAAY,WAAW;AAEvB,wBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,sCAAkB,YAAY,MAAM,GAAG,QAAQ;AAC/C,0CAAsB,MAAM;AAC5B,6CAAyB,KAAK,IAAI,SAAS,GAAG,eAAe,KAAK,MAAM,iBACpE,KAAK,MAAM;AACf,wBAAI,MAAM;AACN,+BAAS,SAAS,YAAY;;;;AAI1C,uBAAO,YAAY,KAAK;;;;;AAKxC,eAAO,SAAY,QAAQ,EAAE,OAAO,EAAE;;MAE1C;AACI,yBAAiB,GAAG;AACpB,yBAAiB,EAAE,MAAM;AACzB,qBAAa,WAAW;AACxB,wBAAgB,KAAK,SAAS,EAAE;AAChC,qBAAa,EAAE;AACf,uBAAe,IAAI,aAAa;AAChC;AACI,iCAAuB,SAAS;AAChC,+BAAqB,SAAS,iBAAiB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,+BAAqB,SAAS,iBAC1B,KAAK,IAAI,iBAAiB,aAAa;AAC3C,qBAAU;AACV,iBAAO,kBAAkB,cAAc;AACnC,sBAAU,QAAQ;AAClB,oBAAO,IAAI;;AAEf,iBAAO;;AAEX,0BAAkB,GAAG,SAAS,MAAM;AAChC,uBAAY,kBAAkB;AAC9B,sBAAY,QAAQ,UAAU,KAAK,IAAI,OAAO,QAAQ,MAAK,CAAC;AAC5D,iBAAO,UAAU;;AAErB,eAAO,SAAY,QAAQ,EAAE;;MAEjC;AACI,yBAAiB,IAAI;AACrB,yBAAiB,GAAG,MAAM;AAC1B,yBAAiB,KAAK,SAAS,GAAG;AAClC,iCAAyB,KAAK,SAAS,WAAW;AAClD,kCAA0B,KAAK,SAAS,YAAY;AACpD,uBAAe,IAAI,aAAa,GAAG;AACnC,qBAAa,GAAG;AAChB,0BAAkB,GAAG,SAAS,MAAM;AAChC,iCAAuB,SAAS;AAChC,6BAAoB,SAAS,iBAAkB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,2BAAkB,SAAS,iBACvB,KAAK,IAAI,UAAU,iBAAiB,cAAc;AACtD,sBAAW;AACX,uBAAa,YAAY,IAAI,UAAU;AACnC,qBAAQ,KAAK,IAAI,iBAAiB,IAAI;;AAE1C,kBAAO,QAAQ,QAAO;AACtB,uBAAa,YAAY,IAAI,UAAU;AACnC,sBAAU,KAAK,QAAQ,OAAO,iBAAiB,KAC3C,kBAAkB,UAAU;AAChC,gBAAI,WAAW;AACX,qBAAO,KAAK,IAAI,OAAM,CAAC;;AAE3B,mBAAO,SAAS;AAChB,mBAAO,MAAM;;;AAGrB,eAAO,SAAY,QAAQ,GAAG;;MAElC;AACI,yBAAiB,QAAQ;AACzB,8BAAsB,aAAa,SAAS,SAAW;AACvD,0BAAkB,cAAc,MAAM;AACtC,0BAAkB,cAAc,MAAM;AACtC,oBAAY,OAAS,CAAC,WAAW,aAAa;AAC9C,wBAAgB,KAAK,SAAS,IAAI;AAClC,yBAAiB,KAAK,SAAS,cAAc;AAC7C,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,yBAAe,IAAI;AAGnB,sBAAY,IAAI,aAAa,YAAY;AACzC,cAAI,KAAK,SAAS;AAClB,2BAAiB,GAAG,QAAQ,IAAI,QAAQ,EAAE;AACtC,gBAAI,SAAS,IAAI,QAAQ,KAAK,SAAS,SAAS;;AAEpD,yBAAe,aAAgB,KAAK;AACpC,4BAAkB,IAAI;AACtB,8BAAoB,GAAG,WAAW,YAAY,EAAE;AAC5C,sBAAU;AAEV,oBAAQ,YAAY,YAAY,IAAI;AACpC,6BAAiB,GAAG,QAAQ,IAAI,QAAQ;AACpC,kBAAI,IAAI,IAAI;AACR,wBAAQ,YAAY,YAAY;AAChC;;;;;AAKhB,eAAO;;MAEX;AACI,yBAAiB,SAAS;AAC1B,oBAAY,IAAI,aAAa,QAAQ,OAAO;AAC5C,YAAI,KAAK;AACT,2BAAmB,KAAK,SAAS,QAAQ;AACzC,yBAAiB,GAAG,QAAQ,QAAQ,MAAM,EAAE;AACxC,cAAI,WAAW,UAAU,KAAK,WAAW,SAAS;AAC9C,gBAAI,QAAQ,QAAQ,WAAW,UAAU;;;AAGjD,eAAO,SAAY,KAAK,CAAC,QAAQ,MAAM,QAAQ;;MAEnD;AACI,yBAAiB,OAAO;AACxB,0BAAkB,KAAK,SAAS,MAAM;AACtC,2BAAmB,KAAK,SAAS,OAAO;AACxC,eAAO,0BAAwB,WAAW,YAAY,eAAe,cAAc;;MAEvF;AACI,gBAAY,eAAe,QAAQ,MAAM,+DAA+D;AACxG,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAoB,EAAE,MAAM;AAC5B,2BAAmB,EAAE,MAAM;AAC3B,2BAAmB,EAAE,MAAM;AAC3B,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,YAAY,eAAe,cAAc;AACzE,wBAAgB;AAChB,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,uBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,wBAAY,KAAK,MAAM,IAAI;AAC3B,4BAAiB,IAAI;AACrB,yBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,0BAAY,KAAK,MAAM,IAAI;AAC3B,8BAAiB,IAAI;AACrB,8BAAiB,WAAU,YAAY,WAAW;AAClD,2BAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAY,IAAI;AAChB,iCAAiB,MAAM,aAAc,OAAM,aAAc,OAAM,cAAc;AAC7E,uBAAO,eAAe,QAAQ;;;;;AAK9C,eAAO,SAAY,QAAQ,CAAC,WAAW,cAAc,aAAa;;MAEtE;AACI,yBAAiB,4BAAwC,EAAE,OAAO,EAAE;AACpE,uBAAe,QAAU,UAAU;AACnC,sBAAc,KAAK,SAAS,EAAE;AAC9B,sBAAc,KAAK,SAAS,EAAE;AAC9B,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,wBAAgB,OAAO;AACvB,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,oBAAQ,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI7D,uBAAa,KAAK,WAAW;AAC7B,uBAAa,KAAK,WAAW;AAC7B,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAY,OAAO,WAAW;AAC9B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,oBAAQ,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG7C,eAAO,OAAO;;MAElB;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;;MACA;AACI,eAAO;;MAGX;AACI,eAAO,MAAM;;MAEjB;AACI,8DAAsD,OAAO;AAC7D,yBAAiB,MAAM,MAAM;AAC7B,wCAAgC;AAChC,uBAAe,QAAU,CAAC,UAAU,YAAY,WAAW,cAAc;AACzE,wBAAgB,KAAK,SAAS,MAAM;AACpC,2BAAmB,KAAK,SAAS,SAAS;AAC1C,0BAAkB,KAAK,SAAS,OAAO;AACvC,yBAAiB,OAAO;AACxB,0BAAkB,OAAO;AAIzB,qBAAa,GAAG,IAAI,UAAU;AAC1B,2BAAiB,IAAI;AACrB,qBAAW,QAAQ;AACnB,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,uBAAa,WAAW;AACxB,cAAI,QAAQ;AACR;;AAEJ,8BAAqB,aAAa,IAC7B,MAAK,MAAO,eAAc,KAAM,cAAa,KAC9C;AACJ,6BAAoB,YAAY,IAAM,MAAK,MAAO,cAAa,KAAM,aAAY,KAAK;AACtF,uBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc,aAAa,IACvB,KAAM,eAAc,KAAK,IAAK,cAC9B,MAAO,MAAK,MAAO,eAAc;AACrC,gBAAI,OAAO,KAAK,OAAO,cAAc;AACjC,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAa,GAAG,IAAI,aAAa;AAC7B,8BAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,yBAAO,OAAO,OAAO;;;AAG7B;;AAEJ,gBAAI,WAAW;AACX,6BAAe,KAAK,MAAM;AAC1B,gCAAkB,KAAK,KAAK;AAC5B,4BAAc,OAAO;AACrB,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,gCAAgB,KAAK,MAAM;AAC3B,iCAAiB,KAAK,KAAK;AAC3B,8BAAc,OAAO;AACrB,6BAAa,GAAG,IAAI,aAAa;AAC7B,4BAAU,IAAI,UAAU,SAAS,KAAK,SAAS,SAAS,KACpD,OAAO,SAAS;AACpB,kCAAgB,UAAU;AAC1B,wBAAM,IAAI,WAAW,SAAS,KAAK,SAAS,SAAS,KACjD,OAAO,SAAS;AACpB,mCAAiB,UAAU;AAC3B,wBAAM,IAAI,UAAU,SAAS,KAAK,YAAY,SAAS,KACnD,OAAO,SAAS;AACpB,qCAAmB,UAAU;AAC7B,wBAAM,IAAI,WAAW,SAAS,KAAK,YAAY,SAAS,KACpD,OAAO,SAAS;AACpB,sCAAoB,UAAU;AAC9B,8BAAY,UAAW,YAAW,WAAW;AAC7C,iCAAe,aAAc,eAAc,cAAc;AACzD,wBAAM,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AAC9D,yBAAO,OAAO,OAAO,MAAQ,UAAS,OAAO;;;;AAKrD,2BAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,iCAAiB,KAAK,MAAM;AAC5B,iCAAiB,KAAK,MAAM;AAC5B,6BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAc,IAAI,WAAW,SAAS,KAClC,WAAW,SAAS,KAAK,OAAO,SAAS;AAC7C,iCAAe,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACvE,yBAAO,OAAO,UAAU,UAAU;;;;;;AAMtD,eAAO,OAAO;;MAElB;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,cAAc,eAAe;AAC5H,+BAAuB;AACvB,eAAO,KAAK,QAAQ,eAAe,cAAc,aAAa,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEvI;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,YAAI,cAAc;AACd,iBAAO,QAAU,IAAI,aAAa,EAAE;;AAExC,wBAAe,IAAI,cAAa,CAAC,WAAW,YAAY,EAAE;AAC1D,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,WAAW;AAC3B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,4BAAgB,MAAM,QAAQ;AAC9B,mBAAM,KAAK;;AAEf,cAAI,eAAe,KAAK,gBAAgB,EAAE,OAAO;AAC7C,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B,EAAE;;AAEvE,uBAAa,GAAG,IAAI,WAAW;AAC3B,oBAAO,OAAO,IAAI,YAAY,KAAK,MAAM,eAAe,YAAY;;;AAG5E,eAAO,QAAO,WAAW,QAAQ;;MAErC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,QAAU;AAC/B,+BAAuB;AACvB,eAAO,KAAK,QAAQ,SAAS,SAAS,OAAO,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEtH;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,MAAQ,EAAE,OAAO,GAAG,EAAE;;;MAGrC;AACI,uBAAe,mBAAuB,EAAE,OAAO,eAAmB,EAAE;AACpE,eAAO,KAAK,WAAW,QAAQ,EAAE,OAAO,EAAE;;MAE9C;AACI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,6BAAqB,CAAC,aAAa,WAAW;AAC9C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,YAAI,eAAe;AACf,iBAAO,QAAU,IAAI,OAAO,QAAQ;;AAExC,wBAAe,IAAI,cAAa,cAAc,QAAQ;AACtD,gBAAO,OAAO,KAAK,KAAK,SAAS,aAAa,QAAQ;AACtD,qBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,mBAAM,KAAK;AACX,4BAAgB,MAAM,QAAQ;;AAElC,cAAI,eAAe,KAAK,gBAAgB,aAAa;AACjD,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B;;AAErE,uBAAa,GAAG,IAAI,WAAW;AAC3B,gBAAI;AACA,sBAAO,OAAO,eAAe,YAAY,MACrC,YAAY,IAAI,YAAY;;AAGhC,sBAAO,OAAO,eAAe,YAAY,KAAK,QAAQ,SAAS,IAC3D,YAAY,KACZ,YAAY,IAAI,YAAY;;;;AAI5C,eAAO,QAAO,WAAW,QAAQ;;;ACp9CzC;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,IAAI,aAAa,KAAK;AAC3C,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,qBAAa,KAAK,KAAK,IAAI,KAAK;;AAEpC,aAAO;;AAEJ,kBAAY;AACf,aAAQ,KAAM,KAAK;AACnB,yBAAmB,KAAK;AACxB,yBAAmB,IAAI,aAAa,eAAmB,EAAE;AACzD,UAAI,EAAE,UAAU;AACZ,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,cAAc;;AAG7B,4BAAoB,WAAW,KAAK,IAAI,EAAE;AAC1C,sBAAa,YAAY,mBAAmB;AAC5C,sBAAa,YAAY,mBAAmB;AAC5C,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,wBAAa,SAAS;AACtB,wBAAa,SAAS;AACtB,uBAAa,KAAK,KAAK,MAAM,OAAM;;;AAG3C,aAAO,WAAW,WAAW,cAAc,EAAE,OAAO;;AAEjD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,yBAAiB,4BAAwC,QAAQ;AACjE,2BAAmB,SAAS;AAC5B,8BAAsB,gBAAoB;AAC1C,2BAAmB,eAAmB;AACtC,uBAAe,wBAA4B,OAAO;AAClD,sBAAc,OAAO;AACrB,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,yBAAiB,gBAAoB;AACrC,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mBAAO,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI5D,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,mBAAO,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG5C,eAAO,CAAC,QAAQ;;;AClDxB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,KAAK,IAAI,YAAY;AAI7C,eAAQ,qBAAqB;QACzB,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;QACpD,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;;AAExD,aAAO;;AAEJ,0BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAsBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe,SAAA;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAA,UAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,uBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,6BAAqB,WAAW,KAAK;AACrC,eAAO,SAAQ,eAAe,EAAE,OAAO,SAAS;;AAEpD,UAAI,UAAU;AAIV,sBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAa,cAAkB,CAAC,IAAI,EAAE;AACtC,0CAAkC,6BAA6B,UAAW,MAAM,IAAK,IAAI,GAAG,EAAE,OAAO,IAAI,OAAO,MAAM;AACtH,eAAO,SAAQ,eAAe,aAAa,QAAQ;;AAEvD,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AA+BO;AACH,UAAI,eAAe;AACf,eAAO,EAAG,QAAQ;AACd,iBAAQ,GAAG,KAAM;AACjB,6BAAmB;AACnB,2BAAiB,CAAC,GAAG,IAAI;AACzB,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;AAG9D,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,2BAAmB;AACnB,YAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,gEAAsD,YAAY,EAAE,OAAO,EAAE,OAAO,WAAW,WAAW,WAAW;AACrH,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,yBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,YAAY,MAAM,aAAc,SAAS;AAClF,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO;;AAGP,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;;AAQ3D;AACH,aAAO;AACH,4BAAoB,4BAAwC,QAAQ;AACpE,2BAAmB,eAAmB;AACtC,2BAAmB,YAAY;AAC/B,8BAAsB,gBAAoB;AAC1C,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,yBAAa,IAAI,MAAM;AACvB,yBAAa,IAAI,MAAM;AACvB,2BAAe,IAAG,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI,IAAI,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI;AAC1F,2BAAe,KAAK,OAAO;AAC3B,2BAAe,KAAK,OAAO;;;AAI/B,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,6BAAiB,IAAG,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI,IAAI,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI;AACpG,2BAAe,KAAK,SAAS;AAC7B,2BAAe,KAAK,SAAS;;;AAGrC,eAAO,CAAC,gBAAgB,gBAAgB;;;AC1HhD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,UAAU,IAAI;AAC5D,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,0BAAkB,wBAA4B,OAAO,OAAO;AAC5D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;AA0BO;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,sBAAc,eAAmB,EAAE;AACnC,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,mBAAuB,QAAQ;AACjD,qBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AAYnD;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,UAAU,QAAQ,QAAQ;AAC5C,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AChE1D;;;;;;;;;;;;;;;;AAmBO,qBAAiB,sBAAsB,QAAQ,KAAK,KAAK;AACzD,mBAAa,wBAAwB,MAAM;AAC3C,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,wBAA4B,OAAO,eAAmB;AACnE,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,uBAAe,IAAI;AACnB,mBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,wBAAc,MAAM,SAAS;AAC7B,cAAI,QAAQ;AACR,mBAAM;;;AAGd,aAAK,KAAK;;AAEd,aAAO;;AC9BX;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,oBAAoB,SAAS;AAChF,gCAA4B,8BAA+B;AAC9D,aAAO;QACH,MAAM,QAAQ,QAAQ,QAAQ;QAC9B,MAAM,QAAQ,QAAQ,QAAQ;;;AAG/B,uBAAiB,iBAAiB,WAAU,cAAc;AAC1D,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,UAAW,MAAM,IAAK,IAAI;AAC7E,uBAAiB,iBAAiB,WAAU,cAAc,MAAsB;AAChF,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,IAAI,KAAK,KAAK;AAC9D,oBAAc,wBAAwB,QAAO;AAC7C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBO;AACH,0BAAoB,kBAA4B,OAAO,OAAO;AAC9D,qBAAe,eAAmB;AAClC,uBAAiB,gBAAoB;AACrC,UAAI;AACA,2BAAmB,mBAA6B,OAAO;AACvD,eAAO,KAAK,SAAS,YAAY,aAAa;;AAElD,sBAAgB,wBAA4B,OAAO;AACnD,mBAAa,GAAG,IAAI,QAAQ,EAAE;AAC1B,qBAAa,KAAK;AAClB,wBAAgB,gBAAoB;AACpC,oBAAY,YAAgB,GAAG,MAAM;AACrC,qBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,uBAAe,YAAgB,MAAM,MAAM,QAAQ;AACnD,gBAAQ,KAAK,KAAK;;AAEtB,aAAO;;AAEJ;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,OAAO,QAAS;AACxB,uBAAiB,GAAG;AACpB,8BAAwB,kBAA4B,GAAG,OAAO;AAC9D,yBAA6B,GAAG,QAAQ;AACxC,mBAAa,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACxC,sBAAgB,UAAU,MAAM,QAAQ,OAAO,EAAE,OAAO,EAAE;AAC1D,aAAO,SAAQ,eAAe,OAAO,EAAE,OAAO;;AAE3C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAmBO,kCAA8B,6BAA8B;AAC/D,mBAAa,IAAI;AACjB,aAAO,OAAO;;AAEX,gCAA0B,iBAAiB,oBAAmB;AAC9D,qCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,oBAAoB,SAAS;AAC3E,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,OAAO;AACrB,oBAAc,eAAmB;AACjC,uBAAiB,gBAAoB;AACrC,yBAAmB,gBAAoB;AACvC,qBAAe,wBAA4B,OAAO,eAAmB;AACrE,mBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAY,YAAgB,GAAG,OAAO;AAEtC,uBAAe,IAAI,MAAM,IAAI;AAC7B,sBAAa,GAAG,KAAI,OAAO,QAAQ;AAC/B,iBAAO,MAAK,IAAI,KAAK;;AAEzB,yBAAiB,YAAgB,QAAQ,OAAO;AAChD,eAAO,YAAY,MAAM;;AAE7B,aAAO;;ACjCX;;;;;;;;;;;;;;;;AAiBO;AAEH,oBAAc,gBAAoB,MAAM,OAAO;AAwD/C,uBAAiB,CAAC,GAAG,MAAM,IAAI;AAC/B,mBAAa,GAAG,IAAI,OAAO;AACvB,iBAAS,MAAM,MAAM;;AAEzB,eAAS,KAAK,MAAM;AACpB,mBAAa,QAAQ,GAAG,IAAI,MAAM,QAAQ;AACtC,iBAAS,MAAM,MAAM;;AAIzB,6BAAuB;AAGvB,sBAAgB,IAAI,WAAW,MAAM;AAErC,0BAAoB,IAAI,cAAa,UAAU,OAAO;AAGtD,4BAAsB;AACtB,yBAAmB,SAAS,OAAO,KAAK,SAAS,OAAO;AACxD,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAE9B;AACA,YAAI;AAEA,oBAAU,OAAO,GAAG;;AAGpB,6BAAmB;AACnB,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAW,KAAK,YAAY,IAAI,GAAG,GAAG;;;AAG9C,oBAAU,WAAW,KAAK;;AAG9B,YAAI,eAAe,aAAa;AAC5B,kBAAQ,KAAK,eAAe;;AAG5B,8BAAoB,OAAO,KAAK,gBAAgB;AAChD,yBAAe,WAAW;AAC1B,kBAAQ,KAAK;AACb,wBAAc,KAAK;;;AAM3B,6BAAuB,SAAS;AAChC,qBAAe,KAAK,OAAO,KAAK,gBAAgB;AAChD,2BAAqB,IAAI,cAAa,gBAAgB;AACtD,oBAAc,QAAQ;AAClB,qBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,IAAI,YAAY,IAAI,GAAG,oBAAoB,IAAI,GAAG,GAAG;;;;AAM9E,0BAAoB,MAAM;AAC1B,kBAAY,SAAS,eAAe;AACpC,aAAO;QACH,cAAc,aAAa;QAC3B;QACA;;;AC9IR;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA2BA,qBAAgB,OAAO,MAAM,IAAI,kBAAkB;AC3BnD;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,MAAM,IAAI,KAAM,KAAK,IAAI,MAAM;AACxE,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBA,sBAAkB,6BAA6B,oBAAoB,SAAS,IAAI,SAAS,SAAS;AAC3F;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,SAAU;AACrB,uBAAiB,CAAC,GAAG,QAAQ;AAC7B,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAM,QAAQ;AAC7C,wCAAkC,UAAU,EAAE,OAAO,MAAM,OAAO,OAAO,OAAO,EAAE;AAClF,aAAO,SAAQ,eAAe,aAAa,EAAE,OAAO;;AAEjD,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,IAAI,GAAG;AACvD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,QAAO,QAAQ,KAAK,IAAI,KAAK,IAAI,GAAG,KAAK;AACvE,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAqBO;AACH,UAAI,gBAAe;AACf,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAE5B,gBAAe;AACpB,eAAO,OAAK,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAExB,gBAAe;AACpB,eAAO,MAAI,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAEvB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,IAAK,SAAA;iBAEzB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,GAAG,OAAO,yBAA0B,SAAA;;AAEjE,YAAM,IAAI,MAAM,cAAc;;ACrClC;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,eAAQ,OAAO,EAAE;AACjB,oBAAc,SAAQ,KAAK,IAAI,EAAE;AACjC,UAAI,MAAM,sBAAsB;AAC5B,sBAAa,MAAM,mBAAmB;AACtC,sBAAa,MAAM,mBAAmB;AACtC,cAAK,QAAQ;AACb,cAAK,QAAQ;;AAEjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,KAAM;AACjB,aAAQ,YAAY,cAAe;AACnC,uBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,kCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,cAAY,SAAS,KAAK,SAAS,KAAK,qBAAqB,MAAM,uJAEvC,oBAAoB;AAChD,gCAA0B,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AAC1F,uBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,cAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AACvB,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAC7B,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAE7B,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AACjE,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AACjE,wBAAkB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACxD,sBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,uBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,uBAAiB,KAAK,IAAI,WAAW;AACrC,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,yBAAmB,gBAAoB,IAAI;AAC3C,yBAAmB,gBAAoB,IAAI;AAC3C,+CAAyC,aACrC,CAAC,WAAW,IAAI,GAAG,WAAW,MAC9B,CAAC,WAAW,IAAI,WAAW,IAAI;AACnC,+CAAyC,aACrC,CAAC,GAAG,WAAW,IAAI,WAAW,MAC9B,CAAC,WAAW,IAAI,GAAG,WAAW;AAClC,mBAAa,UAAU;AACvB,qBAAe,QAAO,CAAC,UAAU,SAAS,WAAW,IAAI;AACzD,sBAAgB,OAAO;AACvB,wBAAkB,SAAQ;AAC1B,oBAAc,GAAG,KAAK,UAAU;AAC5B,sBAAc,GAAG,KAAK,SAAS,MAAM;AACjC,wBAAc,GAAG,KAAK,UAAU,MAAM;AAClC,0BAAc,GAAG,KAAK,WAAW,MAAM;AAEnC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,2BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAU;AACV,+BAAa,IAAI,IAAI,QAAQ;AACzB,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,iCAAa,UAAU,eAAe,IAAI,aAAa,IAAI;AAC3D,iCAAa,UAAU,IAAI,aAAa,IAAI,aAAa;AACzD,4BAAO,OAAO;;AAElB,0BAAQ,KAAK,OAAQ,KAAI,WAAW,OAAO;;;;;;;AAOnE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AAEtC,aAAO,SAAQ,eAAe,UAAU,OAAO,OAAO,OAAO;;AAE1D,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtGhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,GAAG,MAAM,0BAA2B;AAC/C,aAAQ,YAAY,YAAY,2BAAe;AAC/C;AACA;AACA;AACA,4BAAsB;AACtB,wBAAkB,aAAY,CAAE,QAAQ,CAAE,GAAG,IAAK,OAAO,CAAE,YAAY,aAAc,SAAA;AACrF,gBAAU;AACV,UAAI;AACA,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,SAAS,GAAG,OAAQ,SAAA;AAChD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,UAAI;AACA,wBACI,kBAAgB,UAAS,SAAS,aAAY;AAClD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,sBAAgB;AACZ,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA2B;MAC9B,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,2BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,qBAAe,QAAO,SAAS,UAAU;AACzC,yBAAmB,OAAO;AAC1B,iCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,+BAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,+BAAyB,SAAS,SAAS;AAC3C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,kCAA0B,IAAI;AAC9B,iCAAyB,IAAI,QAAQ;AACrC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,0BAAc,KAAK,IAAI,GAAG;AAC1B,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,oCAAwB,oBAAoB,KAAK;AACjD,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,4BAAc,KAAK,IAAI,GAAG;AAC1B,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,gCAAkB;AAClB,6BAAe;AACf,2BAAY;AACZ,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,iCAAiB,mBAAmB,KAAK,QAAQ;AACjD,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,mCAAiB,WAAW,KAAK,QAAQ;AACzC,gCAAc,QAAQ,WAAW;AACjC,sBAAK,aAAa,SAAS,QAAQ;AAC/B,kCAAc;6BAET,aAAa;AAClB,gCAAY;AACZ;;;AAGR,oBAAI,MAAM;AACN;;;AAGR,mCAAqB,kBAAkB,KAAK,mBAAmB;AAC/D,yBAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;AAK5D,aAAO;;AAEJ,mFAA+E,6BAA6B;AAC/G,2BAAqB,QAAO,SAAS,UAAU;AAC/C,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,mBAAa,QAAO,QAAQ,OAAO;AACnC,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,wBAAY;AACZ,mBAAO,QAAQ;AACX,uBAAS;;AAGb,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,0BAAY;AACZ,qBAAO,QAAQ;AACX,yBAAS;;AAEb,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,6BAAe,OAAO;AACtB,gCAAkB;AAClB,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,2BAAW,KAAK;AAChB,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,6BAAW,KAAK;AAChB,gCAAc,KAAK,IAAI,GAAG,IAAI,IAAI;AAClC,sBAAI,QAAQ;AACR,+BAAW;AACX,wBAAI;AACA,oCAAc,sBACR,MAAI,SAAS,WAAW,MAAM,SAAS,UAAU,MAC/C,SAAS,aACT,IACH,MAAK,SAAS,UAAU,MAAM,SAAS,aAAa;;AAGzD,oCAAc,KAAK,uBAAuB;;;;;AAK1D,2BAAa,IAAI,aAAa,GAAG,IAAI,IAAI;;;;;AAKzD,aAAO;;AClIX;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,uBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,4BAAsB,IAAK,gBAAe;AAC1C,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW;;;AAGnB,iBAAG,IAAI,UAAU,eAAe,GAAG,KAAK,KAAK;;;;;AAK7D,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,eAAO,QAAQ,aAAM,uBAAa;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,uBAAiB,CAAC,GAAG,OAAM,WAAU,QAAO,SAAS;AACrD,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC5C,sBAAgB,SAAQ,KAAK,IAAI,UAAS,QAAQ;AAClD,oBAAc,SAAQ,SAAQ,KAAK,IAAI,OAAM,QAAQ,SACjD,IAAI,aAAa,CAAC;AACtB,sBAAgB,SACZ,SAAQ,KAAK,IAAI,OAAO,QAAQ,SAChC,IAAI,aAAa,CAAC;AACtB,sBAAgB,IAAI,aAAa,MAAM;AACvC,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,iBAAW;AACX,eAAS;AACT,eAAS;AACT,eAAS;AACT,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,gBAAQ,KAAK,QAAQ,UAChB,OAAM,KAAK,MAAM,SAAS,MAAM,QAC7B,KAAK,KAAK,QAAQ,QAAQ;AAClC,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;;AAGb,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AAkBO,kBAAa,gBAAgB,cAAa;AAC7C,wBAAkB;AAClB,UAAI,KAAK,UAAU;AACf,eAAO,UAAU;;AAErB,aAAO,KAAK,UAAU,eAAe,UAAU,eAAe;;AAE3D,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC5BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,qBAAe,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACtE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,UAAI,QAAQ,GAAG,UAAU;AACrB,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9D,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9D,6BAAqB,SAAO,CAAE,QAAQ,OAAO,SAAA,UAAS,OAAO,CAAE,MAAM;AACrE,6BAAqB,SAAO,CAAE,QAAQ,OAAO,SAAA,UAAS,OAAO,CAAE,MAAM;AACrE,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAA;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AASX,uBAAiB,QAAQ,IAAI;AACzB,0BAAkB,eAAmB,EAAE,MAAM,MAAM;AACnD,sBAAc,CAAC,IAAI;AACnB,eAAO,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAA,UAAS,OAAO,CAAE;;AAGzD,iBACI,kBAA6B,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,sBAAgB,wBAA4B,QAAQ,GAAG,OAAO,eAAmB;AACjF,UAAI,SAAS,GAAG,MAAM,OAAO;AAEzB,qBAAa;AACb,iBAAS,QAAQ;AACb,sBAAY,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACvC,uBAAa,eAAmB,EAAE;AAClC,kBAAQ,IAAI,KAAK;AACjB,oBAAU;;;AAId,wBAAgB;AAChB,iBAAS,QAAQ;AACb,wBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAW;AACX,yBAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,2BAAe,MAAM,SAAS,KAAK;AACnC,2BAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,sBAAQ,SAAS,OAAO,MAAM;;;AAGtC,uBAAa,EAAE,MAAM;;;AAG7B,4BAAsB,kBAA6B,QAAQ,IAAI,OAAK,EAAE,QAAQ;AAC9E,sBAAgB,SAAQ,eAAe,eAAe,OAAO,GAAG,OAAO;AACvE,eAAS,QAAQ,OAAK,SAAQ,8BAA8B;AAC5D,aAAO;;AAEJ,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AChGhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,YAAY,WAAW,mBAAoB;AACjE,uBAAiB,CAAC,GAAG,SAAS;AAC9B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAuB;AACxI,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,6BAAuB,SAAS,eAAe;AAC/C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,2BAAqB,SAAS;AAC9B,yBAAmB,iBAAiB,SAAS,KAAK,SAAS;AAC3D,yBAAmB,iBAAiB,SAAS,KAAK;AAClD,6BAAuB,iBAAiB,IAAI,SAAS;AACrD,2BAAqB,EAAE,QAAQ;AAC/B,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK,EAAE,QAAQ;AAC7D,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,EAAE,QAAQ;AACtD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI;AACrB,yBAAiB,IAAI;AACrB,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK;AACjC,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK;AACjC,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK;AACjC,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK;AACjC,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW,KAAK;AACnC,gCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,0BAAM,WAAW,KAAK,mBAClB,OAAO,MAAM,WAAW;;AAEhC,8BAAY,SAAS;;;;;;;AAO7C,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,YAAY,iBAAiB,eAAgB;AACnE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB,MAAK,iBAAiB,OAAuB;AAC/I,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,6BAAuB,SAAS,eAAe;AAC/C,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,0BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,4BAAc;AACd,2BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,eAAe;AACpC,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,wBAAI;AACA,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;AAGzB,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;;AAKzC,iBAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;;AAK5C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,YAAY,SAAS,WAAK,YAAY,mBAAoB;AAClE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,4BAAsB,gBAAoB,OAAO;AACjD,wBAAkB,gBAAoB,GAAG;AACzC,wBAAkB,yBAAqC;AACvD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB,MAAK,iBAAiB,OAAO;AACnI,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,oBAAc,SAAS;AACvB,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,6BAAuB,gBAAgB;AACvC,2BAAqB,GAAG,QAAQ;AAChC,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK,GAAG,QAAQ;AAC/D,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK;AACpD,6BAAuB,iBAAiB,IAAI,GAAG,QAAQ;AACvD,2BAAqB,UAAU;AAC/B,yBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,yBAAmB,iBAAiB,UAAU,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,UAAU;AACtD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,eAAe,IAAI,aAAa,KAAK,aAAa;AACnE,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,kCAAc,SAAS,WAAW,iBAAiB;AACnD,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,+BAAiB,eAAe,IAAI,aAAa,KAC7C,aAAa,KAAK,iBAAiB;AACvC,uBAAS,YAAY;;;;;AAKrC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY;;AClFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,aAAc;AACpC,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW;AAC3F,aAAQ,aAAa,cAAc,aAAa,eAAe,gBAAgB,eAAe,WAAY;AAC1G,uBAAiB,QAAQ;AACzB,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,cAAc;AAC7C,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,eAAe;AAC9C,4BAAc,GAAG,KAAK,cAAc,EAAE;AAClC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,8BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,mCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAiB,KAAK,SAAS,cAAc;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,+BAAW,WAAW,KAAK;AAC3B,wBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,qCAAiB,WAAW,KAAK,cAAc;AAC/C,qCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAe;AACf,kCAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,mCAAa,MAAM,WAAW;AAC9B,oCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAM,WAAW,OAAO,OAAO,MAAM,WAAW;;AAEpD,kCAAY,SAAS;;;;;;;;;AASrD,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,eAAgB;AACtC,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,gBAAoB,EAAE;AACvC,wBAAkB,gBAAoB,GAAG;AACzC,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB;AAClG,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,mCAA6B;AAC7B,uBAAiB,SAAS,QAAQ;AAClC,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,YAAW,MAAM;AACtD,sBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,WAAW,MAAM;AAC/E,yBAAiB,KAAK;AACtB,sBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,wBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,2BAAiB,KAAK,OAAO;AAC7B,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,0BAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,6BAAiB,KAAK,OAAO;AAC7B,0BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAiB,KAAK,OAAO;AAC7B,4BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAc;AACd,6BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,mCAAiB,IAAI;AACrB,mCAAiB,IAAI;AACrB,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,qCAAiB,KAAK,MAAM;AAC5B,qCAAiB,KAAK,OAAO;AAC7B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,KAAK,eAAe;AACpC,uCAAiB,KAAK,MAAM;AAC5B,uCAAiB,KAAK,OAAO;AAC7B,oCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,mCAAW,KAAK,KAAK,cAAc;AACnC,yCAAiB,KAAK,MAAM;AAC5B,yCAAiB,KAAK,OAAO;AAC7B,mCAAW,QAAQ,WAAW,MAAM,SAAS,WAAW;;;;;AAKxE,yBAAS,WAAW,MAAM;;;;;;AAM9C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,yCAAqC;MACxC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,WAAK,SAAS,cAAe;AACrC,uBAAiB,CAAC,KAAK;AACvB,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB;AACtG,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,2CAAqC;AACrC,aAAQ,WAAW,aAAa,cAAc,aAAa,YAAY,SAAS,UAAU,SAAS,aAAa,UAAU,WAAW,UAAU,aAAa,cAAc,eAAgB;AAC1L,uBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAEhC,wBAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAE5D,0BAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAE9D,4BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,iCAAiB,KAAK;AACtB,8BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,8BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,8BAAc;AACd,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,eAAe;AAC/B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,cAAc;AAC9B,uCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO;AAC3D,wCAAkB,QAAS,eAAc,IAAI,MACzC,QAAS,gBAAe,IAAI,MAC5B,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,oCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sCAAc,SAAS,WAAW;AAClC,uCAAe,UAAU,YAAY;AACrC,mCAAW,QAAQ;;;;;AAKnC,yBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK,MACpD;;;;;;AAMxB,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,wCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrFhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,WAAW,mBAAoB;AACrD,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB;AACjB,UAAI,cAAc;AACd,qBAAa,CAAC,GAAG;;AAErB,cAAY,gCAA4C,SAAS,aAAa,MAAM,gFAC9D,0BAA0B;AAChD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,YAAY,MAAK,iBAAiB;AAClH,aAAQ,cAAc,aAAa,gBAAgB,eAAe,WAAY;AAC9E,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,oBAAc,SAAS,cAAc,SAAS;AAC9C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,+BAAe;AACf,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW;AAC9B,+BAAa,GAAG,IAAI,OAAO,EAAE;AACzB,0BAAM,WAAW,MAAM,OAAO,MAAM,WAAW;;AAEnD,8BAAY;AACZ,8BAAY;;;;;;;AAOpC,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACnFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,eAAgB;AAClE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAChH,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAS,cAAc,SAAS;AAC9C,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,uBAAW,KAAK,MAAM,KAAK;AAC3B,uBAAW,KAAK;AAChB,0BAAc;AACd,yBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,KAAK,eAAe;AACpC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,cAAc;AACnC,6BAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;AAIrC,eAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;AAIxC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,sDAAkD;MACrD,YAAY;MACZ,aAAa;MACb,YAAY;;AC9DhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,cAAe;AACjE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACpH,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,iCAA2B,GAAG;AAC9B,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,iCAA2B;AAC3B,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,oBAAc,cAAc;AAC5B,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO;AAC/C,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,OAAO,EAAE;AAC3B,+BAAW,KAAK,QAAQ;AACxB,kCAAc,SAAS,WAAW;AAClC,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,uBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,MAAM;;;;;AAKlE,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,qDAAiD;MACpD,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAiBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,UAAW;AACtB,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,sBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,sBAAc,EAAE,MAAM;AACtB,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,2BAAmB,OAAO,MAAM;AAChC,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,wBAAgB,eAAmB;AACnC,wBAAgB,SAAS;AACzB,2BAAmB,mBAAuB,EAAE,OAAO;AAKnD,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,uCAAe,YAAgB,CAAC,GAAG,KAAK,KAAK,IAAI,OAAO,gBAAoB,EAAE;AAC9E,4CAAoB,YAAgB,CAAC,GAAG,GAAG,IAAI,YAAY,gBAAoB,OAAO;AACtF,oCAAY,MAAM,UAAU,WAAW;AACvC,4BAAI,MAAM;AACN,mCAAS;;;;;;AAM7B,oCAAoB,YAAgB,CAAC,GAAG,MAAM,MAAM,IAAI,SAAS,gBAAoB;AACrF,2BAAW,eAAe;;;;;AAK1C,uBAAe,WAAW,MAAM,cAAkB,YAAY,EAAE,QAAQ,UAAU,EAAE;AACpF,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;AClEnD;;;;;;;;;;;;;;;;AAiBO,2CAAuC;MAC1C,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,kEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,OAAO,OAAO,OAAO;AAMtE,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,2BAAW;AACX,2BAAW;AACX,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,iCAAO;AACP,iCAAO;;;;;;AAM3B,2BAAU,MAAM,MAAM,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAK/D,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,OAAO,OAAO,OAAO;AAC5F,eAAO,CAAE,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;;ACtE5D;;;;;;;;;;;;;;;;AAiBO,0CAAsC;MACzC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,iEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,EAAE,OAAO,EAAE;AAM5D,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,mCAAS;AACT,mCAAS;;;;;;AAM7B,2BAAU,GAAG,QAAQ,QAAQ,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAKtE,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,EAAE,OAAO,EAAE;AAClF,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;ACtElD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA6B,UAAU,IAAI;AAC3D,kBAAY,iBAAiB,MAAK;AAClC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBA,cAAU;AACV,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACJ,kBAAY,gBAAgB,KAAK;AACpC,oBAAa,KAAK,KAAK;AACvB,gBAAU,KAAK,IAAI;AACnB,gBAAU,IAAO,KAAM,IAAI;AAC3B,aAAO,QACF,KACQ,SAAK,IAAI,MAAM,IAAK,MAAM,IAAI,MAAM,IAAI,MAAM,IAC/C,KAAK,IAAI,CAAC,IAAI;;AAEvB,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AA8BO;AACH,yBAAmB,OAAM;AACzB,oBAAc,WAAW;AACzB,uBAAiB,WAAW;AAC5B,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,qBAAe,UAAU,mBAAmB;AAC5C,qBAAe,UAAU,mBAAmB;AAE5C,0BAAoB,CAAC,OAAO;AAC5B,yBAAmB,eAAmB;AACtC,yBAAmB,wBAA4B,WAAW;AAC1D,yBAAmB,wBAA4B,WAAW;AAC1D,mBAAa,GAAG,IAAI,OAAO;AAEvB,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,uBAAc,UAAQ,CAAE,QAAQ,CAAE,MAAM,GAAG,MAAM,IAAK,SAAS;AAE/D,eAAQ,aAAM,eAAS,QAAQ,QAAO,SAAS;AAC/C,oBAAY,wBAAoC,OAAM;AACtD,qBAAa,GAAG,IAAI,UAAU;AAC1B,oBAAU,qBAAiC,KAAK;AAChD,qBAAW,IAAI,WAAW,KAAK,EAAE;AACjC,qBAAW,IAAI,WAAW,KAAK,EAAE;;AAErC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;;AAE7C,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,qBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AAChF,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO;;AAEJ;AACH,wBAAkB,eAAmB,OAAM;AAC3C,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,UAAI,cAAc;AACd,uBAAe,UAAU,UAAU,UAAU,WAAW,SAAS;AACjE,4BAAoB,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM;AACjD,YAAI;AACA,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,IAAI,WAAW,mBAAuB,WAAW;AAC5F,+BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAS;AAClE,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,WAAY,SAAS;AAC1F,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,eAAgB,SAAS;AAC9F,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO,CAAE,MAAM,aAAa,MAAM;;AAEtC,eAAO;;AAGP,sBAAa,wBAAoC,UAAU;AAC3D,0BAAkB,yBAAyB,OAAM,WAAW;AAC5D,eAAO,wBAAoC;;;AAGnD;AACI,aAAQ,QAAO,OAAO,OAAO;;AAGjC;AACI,UAAI,SAAS;AACT,eAAO,CAAE,MAAM,UAAU,MAAM;;AAEnC,oBAAa,wBAAoC,UAAU;AAC3D,mBAAa,OAAO;AACpB,0BAAoB,sBAAkC;AACtD,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,yBAAmB,qBAAiC;AACpD,0BAAoB,WAAW;AAC/B,0BAAoB,WAAW;AAC/B,uBAAiB,CAAC,YAAY;AAC9B,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,aAAa,MAAM,cAAe,SAAS;AAE3F,2BAAqB,UAAU,cAAc,cAAc,MAAM,SAAS;AAC1E,4BAAsB,aAAa;AACnC,4BAAsB,aAAa;AACnC,yBAAmB,CAAC,cAAc;AAClC,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,8BAAwB,UAAQ;QAC5B,QAAQ,CAAE,MAAM,eAAe,MAAM;QACrC,SAAS;;AAEb,0BAAoB,UAAU,aAAa,aAAa,MAAM,SAAS;AACvE,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,gBAAU,WAAuB,MAAM;AACvC,qBAAe,CAAC,EAAE,KAAK;AACvB,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,0BAAoB,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AACrF,2BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,aAAa,GAAG,iBAAkB,SAAS;AACxF,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO,CAAE,MAAM,WAAW,MAAM;;AAGpC;AACI,kBAAY,IAAI,aAAa,OAAO;AAEpC,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAW;AACX,oBAAW;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,oBAAU,UAAsB,IAAI,GAAG,MAAM;AAC7C,uBAAa,qBAAiC,OAAM;AACpD,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;AAC3C,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;;AAE/C,YAAI;AACA,mBAAQ;AACR,mBAAQ;;AAEZ,4BAAgC,KAAK,OAAM,OAAM;;AAErD,aAAO;;AChOX;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,OAAO;AACxC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,SAAA,UAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,mBAAS,SAAU;AAC3B,aAAQ,OAAO,OAAO,SAAU;AAChC,qBAAe,SAAS,YAAgB;AACxC,qBAAe,mBAAuB,QAAQ,eAAmB;AACjE,iBAAW,QAAQ,OAAO;AAC1B,aAAO,SAAQ,eAAe,OAAO,QAAQ;;AAE1C,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACI,UAAI,UAAU;AACV,eAAO,KAAK;;AAGZ,eAAO,KAAK;;;ACnCpB;;;;;;;;;;;;;;;;AAiBO,iCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,+BAAe,KAAK,MAAM,aAAa;AACvC,+BAAe,cAAc,YAAY,YAAY;AACrD,kCAAkB,UAAU;AAE5B,oBAAI,UAAU,KAAK,SAAS;AAExB,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,YAAY,mBAAmB;AAC9D,gCAAc,UAAU;;AAE5B,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACnD1D;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,OAAO;QAChB,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ,SAAA;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,sBAAsB;QAC/B,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,SAAS,KAAA,MAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ,SAAA;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,wCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb,SAAA;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,MAAM;AACvC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,SAAA,UAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAkBO,uBAAiB,gBAAgB,UAAU,QAAQ,OAAO,SAAS,MAAM,IAAI,GAAG;AAChF,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,IAAI,QAAQ,WAAW,IAAI,GAAG;AAChF,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,OAAO,MAAM,MAAM,IAAI,GAAG;AACvE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,KAAK,IAAI,GAAG;AACnE,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBO,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,2BAAmB;AACnB,qBAAa,EAAE;AACf,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB,kBAAkB;AACvD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,oBAAY,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC1C,YAAI,gBAAgB;AAChB,2BAAiB,IAAI,MAAM;AAC3B,uBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,qBAAS,KAAK,OAAO,aAAa;;AAEtC,kBAAQ,cAAc,OAAO,QAAQ,EAAE,OAAO,cAAc;AAC5D,iBAAO,kBAA8B,KAAK,QAAQ;AAClD,mBAAS;;AAEb,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,QAAQ;AAClF,2BAAmB,eAAmB;AACtC,uBAAe,QAAQ,OAAO,YAAY,aAAa,EAAE;AACzD,uBAAe,WAAW,MAAM,QAAQ,aAAa,EAAE;AACvD,uBAAe;AACf,YAAI;AAEA,2BAAiB,sBAAkC,aAAa;AAChE,qBAAW;;AAEf,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;ACxDnD;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,uBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,wBAAkB,QAAO,SAAS,UAAU,EAAE,OAAO,iBAAiB,SAAS,EAAE,OAAO,EAAE,OAAO,UAAU;AAC3G,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,iCAAe,wBAAwB,uBAAuB,IAC1D,UAAU,IAAI,GAAG,KAAK,KAAK;AAC/B,iCAAe,KAAK,uBAAuB;AAC3C,+BAAa,WAAW,SAAS,IAAI;AACrC,sBAAI,SAAS;AACT;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW,QAAQ;;;AAG3B,iBAAG,IAAI,SAAS,GAAG,KAAK,KAAK;;;;;AAK7C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChFhB;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB,gBAAoB;AACpC,uBAAiB,OAAK,SAAS,QAAQ,OAAO,SAAS,UAAU;AACjE,2BAAqB,iBAAiB,SAAS,QAAQ,OAAO,UAAU,MAAM;AAC9E,aAAO,CAAC,SAAS,QAAQ,aAAa;;ACtB1C;;;;;;;;;;;;;;;;AAoBO,oCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,CAAC,GAAG,IAAI;AACtF,kCAA0B,sBAAsB,QAAQ,EAAE,OAAO,EAAE,OAAO,qBAAqB;AAC/F,6BAAqB,WAAW,MAAM,QAAQ,SAAS,UAAU,EAAE;AACnE,8BAAsB,WAAW,MAAM,SAAS,SAAS,UAAU,EAAE;AACrE,eAAO;UACH,CAAE,QAAQ,cAAc,OAAO,SAAS,UAAU,OAAO,EAAE;UAC3D,CAAE,QAAQ,eAAe,OAAO,SAAS,UAAU,OAAO;;;;ACnCtE;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,kBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,EAAE,MAAM;AAClD,qBAAe,SAAS,YAAY,IAAI;AACxC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,mBAAa,GAAG,IAAI,YAAY;AAC5B,sBAAa,YAAgB,GAAG,YAAY;AAC5C,sBAAa,GAAG,KAAI,YAAY;AAC5B,cAAI,QAAO,MAAK,MAAM;AAClB,oBAAO,MAAK,MAAM,MAAK,IAAI,QAAO,MAAK;qBAElC,QAAO,OAAM,IAAI;AACtB,oBAAO,MAAM,KAAI,MAAK,KAAK,IAAI,QAAO,MAAK;;;AAGnD,kBAAS,QAAO,IAAI,WAAU,IAAI,MAAM;AACxC,wBAAgB,YAAgB,SAAQ,OAAO;AAC/C,gBAAQ,KAAK,MAAM;;AAEvB,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtDhB;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC/BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,iBAAkB;AACpC,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,eAAmB,EAAE;AACnC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,UAAI,kBAAkB;AAClB,gBAAQ,KAAK;;AAEjB,mBAAa,GAAG,IAAI,OAAO;AACvB,wBAAe,YAAgB,GAAG,OAAO;AACzC,0BAAkB,QAAO,IAAI,WAAU,IAAI,MAAM;AACjD,yBAAiB,YAAgB,WAAW,YAAY;AACxD,gBAAQ,YAAY,MAAM;;AAE9B,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChDhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,IAAI;AAC3D,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO,oCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,iCAAyB;AACzB,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,0BAAU,QAAO;AAEjB,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,yBAAS,KAAK,MAAM,SAAS;AAC7B,yBAAS,KAAK,MAAM,SAAS;AAC7B,kCAAkB;AAClB,oBAAI,OAAO,cAAc;AACrB,sBAAI,YAAY;AACZ,kCAAc;;AAGd,kCAAc,UAAU;;;AAIhC,oBAAI,UAAU,KAAK,SAAS,cAAc,UAAU,KAChD,SAAS;AAET,2CAAyB,SAAU,cAAa;AAChD,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,mBAAmB,mBAAmB;AACrE,gCAAc,UAAU;;AAE5B,+BAAe,cAAc,YAAY,YAAY;AACrD,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACvE1D;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO;AAExC,oBAAa,KAAK,MAAM;AACxB,UAAI,KAAK,QAAO;AACZ,eAAO,KAAK,MAAM;iBAEb,KAAK,QAAO;AACjB,eAAO,KAAK,KAAK;;AAGjB,YAAI,QAAO,MAAQ;AACf,iBAAO;;AAGP,iBAAO,QAAO;;;;AAInB,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAkBA,uBAAmB;AACnB,kBAAc;AACP,mBAAa,gBAAgB,MAAM;AACtC,UAAI,MAAM;AACN,eAAO,QAAQ;;AAGf,eAAO,aAAc,MAAK,IAAI,MAAM;;;AAGrC,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAkBO,sBAAgB,gBAAgB,UAAS,QAAQ,IAAK,KAAI,KAAK,IAAI,CAAC;AACpE,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM;AACtC,UAAI,KAAK;AACL,eAAO;iBAEF,KAAK;AACV,eAAO;;AAGP,eAAO;;;AAGR,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBA,sBAAgB;AAChB,sBAAkB,KAAK,IAAI,aAAW;AAC/B,uBAAiB,gBAAgB,UAAU;AAG9C,uBAAiB,KAAK,CAAC;AAGvB,uBAAiB,KAAK;AACtB,mBAAa,KAAK,IAAI;AACtB;AACA,UAAI;AACA,iBAAS;iBAEJ;AACL,iBAAS;;AAGT,iBAAS,KAAK,IAAI,IAAM;;AAE5B,aAAO;;AAEJ,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,KAAM;AACd,aAAQ,QAAS;AACjB,uBAAiB,GAAG;AACpB,oBAAc,EAAE,MAAM;AACtB,uBAAiB,IAAI,MAAM;AAC3B,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,qBAAe,cAAc,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC7D,qBAAe,SAAQ,MAAM,QAAQ,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;AAExC,6BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACrChB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,YAAY,YAAa;AACjC,uBAAiB,CAAC,IAAI;AACtB,oBAAa,eAAmB;AAChC,+BAAyB,CAAC,CAAC,GAAG;AAC9B,uBAAiB,KAAK,GAAG;AACzB,mBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,yBAAiB,KAAK,CAAC,GAAG;;AAE9B,sBAAgB,aAAY,WAAW;QACnC,QAAQ,CAAE;QACV,SAAA;QACA,OAAO,CAAE,UAAU,kBAAkB,eAAe;;AAExD,kCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,gDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,2BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,4BAAsB,CAAE,GAAG;AAC3B,2BAAqB,CAAE,OAAO;AAC9B,8BAAwB,UAAQ,CAAE,QAAQ,eAAe,SAAA,UAAS,OAAO;AACzE,8BAAwB,CAAE,GAAG;AAC7B,6BAAuB,CAAE,MAAM;AAC/B,uBAAiB,YAAU,CAAE,QAAQ,iBAAiB,SAAA,UAAS,OAAO;AACtE,kCAA4B,CAAE,GAAG;AACjC,iCAA2B,CAAE,OAAO;AACpC,qBAAe,UAAQ,CAAE,QAAQ,qBAAqB,SAAA,UAAS,OAAO;AACtE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,iCAA6B;MAChC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,KAAM;AACd,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,0BAAkB,IAAI,aAAa,OAAO;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAc,OAAO;AACrB,oBAAU,KAAK,QAAQ;;AAE3B,uBAAe,WAAW,MAAM,WAAW,EAAE,OAAO,EAAE;AACtD,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;AChClD;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM;AACtC,wBAAkB;AAClB,UAAI,MAAM;AACN,eAAO;;AAGP,eAAO,KAAK,IAAI,IAAI,UAAU;;;AAG/B,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,KAAK,QAAQ,KAAK,IAAI;AAClD,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,aAAQ,cAAc,aAAa,WAAY,WAAW,QAAQ,MAAM,EAAE,OAAO,EAAE;AACnF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AClChB;;;;;;;;;;;;;;;;AA6GA,2BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;ACxMnB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAgBA,qBAAiB;AACjB,6BAAyB;MACrB,OAAO;MACP,WAAW;MACX,oBAAoB;MACpB,uBAAuB;MACvB,OAAO;MACP,SAAS;MACT,8BAA8B;;AAE3B;AACH,aAAO,SAAS;;AAEb;AACH,eAAS,gBAAgB;;AAEtB;AACH,UAAI,CAAE,iBAAgB;AAClB,uBAAe,yBAAyB;AACxC,YAAI,WAAW;AACX,mBAAS,gBAAgB;;AAGzB,kBAAQ,IAAI,2CAA2C;AACvD,iBAAO;;;AAGf,iBAAW,SAAS;AACpB,UAAI,GAAG;AACH,eAAO,SAAS;AAChB,eAAO,gBAAgB;;AAE3B,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,OAAO,GAAG;AACb,SAAG,OAAO,GAAG;AACb,SAAG,SAAS,GAAG;AACf,aAAO,SAAS;;AAEpB;AACI,UAAI,OAAO,oBAAoB,eAAe,iBAAiB;AAC3D,eAAO,IAAI,gBAAgB,KAAK;iBAE3B,OAAO,aAAa;AACzB,eAAO,SAAS,cAAc;;AAG9B,cAAM,IAAI,MAAM;;;AAGxB;AACI,UAAI,iBAAiB,KAAK,iBAAiB;AACvC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,aAAa;AAC5B,aAAO,iBAAiB,oBAAoB;AACxC,WAAG;AACH,eAAO,SAAS;SACjB;AACH,UAAI,iBAAiB;AACjB,eAAQ,OAAO,WAAW,SAAS,qBAC/B,OAAO,WAAW,sBAAsB;;AAEhD,aAAO,OAAO,WAAW,UAAU;;ACnFvC;;;;;;;;;;;;;;;;AAiBO;AACN,IAAA;AAgBG,qBAAc,eAAc,WAAW,KAAK;AAiB5C,qBAAc,eAAc,kBAAkB,KAAK;OACpD,iBAAkB,iBAAgB;AAC9B;AACN,IAAA;AACG,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,cAAc,KAAK;OAC9C,gBAAiB,gBAAe;AAC5B;AACN,IAAA;AACG,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,8BAA8B,KAAK;AAC3E,2BAAoB,qBAAoB,wBAAwB,KAAK;AACrE,2BAAoB,qBAAoB,wBAAwB,KAAK;OACtE,uBAAwB,uBAAsB;AAC1C;AACH,aAAO,CAAC,SAAS;;AAEd;AACH,aAAO,aAAa;;AAEjB;AACH,aAAO,CAAC,UAAU,GAAG;;AAKlB;AACH,mBAAa,eAAmB;AAChC,2BAAqB,KAAK,KAAK,OAAO;AACtC,aAAO,qBAAyB;;AAE7B;AACH,UAAI,eAAe,uBAAuB;AACtC,cAAM,IAAI,MAAM,iBAAiB,uCAC1B;;AAEX,aAAO,eAAe;;AAEnB;AACH,2BAAqB,cAAc,SAAS,WAAW;AACvD,UAAI,OAAO,SAAS;AAChB,cAAM,IAAI,MAAM,kBAAkB,OAAO,sBAAsB;;AAEnE,gBAAU;AACV,qBAAe,GAAG,MAAM,cAAc,QAAQ,OAAO;AACjD,qBAAa,GAAG,IAAI,UAAU;AAC1B,iBAAO,SAAS,cAAc,MAAM;;;;AAIzC;AACH,aAAO;QACH,KAAK,IAAI,GAAG,KAAK,KAAK,UAAU;QAAK,KAAK,IAAI,GAAG,KAAK,KAAK,OAAO;;;AAGnE;AACH,qBAAe,uCAAuC,MAAM;AAC5D,aAAO,IAAI,IAAI;;AAEZ;AAIH,oBAAc;AACd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,8BAAsB,MAAM;AAC5B,kCAA0B,MAAM;AAChC,wCAAgC,MAAM;AACtC,oCAA4B,MAAM;AAClC,6BAAqB,MAAM;AAC3B,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,MAAM;AAC7B,2BAAmB,MAAM;;AAGzB,8BAAsB,GAAG;AACzB,kCAA0B,GAAG;AAC7B,wCAAgC,GAAG;AACnC,oCAA4B,MAAM;AAClC,6BAAqB,GAAG;AACxB,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,6BAA6B,OAChD,0BAA0B,iBAC1B;AACJ,2BAAmB,GAAG;;AAE1B,8BAAwB,GAAG;AAC3B,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;ACnKR;;;;;;;;;;;;;;;;AAmBO;AACH,0BAAoB;AACpB,UAAI,OAAM,QAAQ;AACd,wBAAgB;;AAEpB,aAAO;;AAEX;AACI,oBAAc,GAAG;AACjB,UAAI,UAAU,GAAG;AACb,cAAM,IAAI,MAAM,kBAAkB,qBAAqB,IAAI;;;AAInE,wBAAoB;AACpB,wBAAoB;AACb;AACH,UAAI,OAAM,QAAQ,mCAAmC,QAAQ,KACxD,cAAc,KAAK,IAAI,QAAQ,KAAK,IAAI,OAAO;AAChD,eAAO;;AAEX,aAAO;;AAEJ;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,sBAAsB;;;AAGlC;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,aAAa,gBAAgB,gBAAgB,gBAAgB;;AAE1F;AACH,2BAAqB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,gBAAgB;AAC9E,mBAAa,IAAI,MAAM,GAAG,aAAa,cAAc;AACrD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,cAAc,GAAG,oBAAoB;AAC3D,gBAAQ,IAAI,GAAG,iBAAiB;AAChC,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEJ;AACH,6BAAuB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,kBAAkB;AAClF,mBAAa,IAAI,MAAM,GAAG,aAAa,gBAAgB;AACvD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,gBAAgB,GAAG,oBAAoB;AAC7D,kCAA0B,sBAAsB,GAAG,iBAAiB;AACpE,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX,4BAAwB;AACxB;AACI,oCAA8B,gBAAgB,KAAK;AACnD,UAAI,yBAAyB;AACzB,gBAAQ,IAAI,wCAAwC;AACpD,gBAAQ,IAAI;AACZ;;AAEJ,yBAAmB,CAAC,sBAAsB;AAC1C,0BAAoB,aAAa,MAAM;AACvC,mBAAY,YAAY,OAAO,WAAW,SAAS;AACnD,mCAA6B,YAAY,IAAI,uBAAsB,UAAe,eAAa,GAAG,YAAY,QAAO;AACrH,0BAAoB;AACpB,mBAAa,GAAG,IAAI,qBAAqB,QAAQ;AAC7C,wBAAgB,KAAK,IAAI,qBAAqB,GAAG,QAAQ;;AAE7D,+BAAyB,qBAAqB,MAAM,GAAG,aAAa;AACpE,wBAAkB,qBAAqB,MAAM,aAAa,GAAG;AAC7D,8BAAwB,qBAAqB,MAAM;AACnD,cAAQ,IAAI,iBAAiB,KAAK;AAClC,cAAQ,IAAI,cAAc,MAAM,MAAM;AACtC,cAAQ,IAAI,MAAM,UAAc,UAAU,IAAI,kBAAkB;AAChE,cAAQ,IAAI,gBAAgB,KAAK;;AAE9B;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,mBAAa,IAAI,MAAM,GAAG,YAAY;AACtC,UAAI,GAAG,oBAAoB,SAAS,GAAG,iBAAiB;AACpD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB;AAC1C,UAAI,GAAG,oBAAoB,SAAS,GAAG,qBAAqB;AACxD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc,OAAM,GAAG;AAC/D,aAAO;;AAEJ;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AAC9D,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB,OAAM,GAAG;AACvE,aAAO;;AAEJ;AACH,UAAI,OAAM,UAAU,qBAAqB;AACrC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,6BAAuB,OAAM,UAAU;AACvC,UAAK,SAAS,KAAO,UAAU;AAC3B,0BAAkB,IAAI,SAAS;AAC/B,cAAM,IAAI,MAAM,4BAA4B,YAAY;;AAE5D,UAAK,QAAQ,kBAAoB,SAAS;AACtC,0BAAkB,IAAI,SAAS;AAC/B,qBAAY,IAAI,kBAAkB;AAClC,cAAM,IAAI,MAAM,4BAA4B,YACxC,uDAAuD,OAAM;;;AAGlE;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,qBAAqB;;AAElD;AACH,kBAAY,GAAG,kBAAkB,SAAS;AAC1C,UAAI,QAAQ;AAGR,eAAO;;AAEX,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,oBAAoB,KAAK,qBAAqB,GAAG,OAAO,OAAO,mBAAmB;AAC5G,mBAAa,IAAI,MAAM,GAAG,wBAAwB;AAClD,aAAO;;AAEJ;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,mBAAmB,SAAS,cAAc,cAAc,cAAc;;AAEnG;AACH,aAAO,GAAG,mBAAmB,SAAS;;AAEnC;AACH,mBAAa,IAAI,MAAM,gBAAgB,IAAI,SAAS;AACpD,mBAAa,IAAI,MAAM,GAAG,UAAU,wBAAwB;;AAEzD;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;AACpE,mBAAa,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;;AAEhE;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;;AAE1G;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,MAAM;;AAEvG;AACH,sBAAe,GAAG,uBAAuB,GAAG;AAC5C,UAAI,YAAW,GAAG;AACd,cAAM,IAAI,MAAM,gCAAgC,2BAA2B,IAAI;;;AAGhF;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,iBAAiB;;;AAGpC;AACI,sBAAgB,aAAa,IAAI,MAAM;AACvC,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,6BAAuB,GAAG,mCAAmC;AAC7D,4BAAsB,cAAc,GAAG;AACvC,UAAI,gBAAgB,GAAG,YAAY,gBAAgB;AAC/C,iCAAyB,2BAA2B;AACpD,cAAM,IAAI,MAAM,0BAA0B;;;AAG3C,6CAAyC;AAC5C,aAAO,eAAmB,MAAM,MAAM,GAAG,MAAM,SAAS;;AAErD;AACH,UAAI,MAAM,WAAW;AACjB,cAAM,MAAM;;AAEhB,aAAO;QACH,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;QAAG,MAAM,MAAM,SAAS;;;AAGtE;AACH,sBAAgB,CAAC,GAAG,GAAG;AACvB,uBAAiB,MAAM,WAAW,KAAM,MAAM,WAAW,KAAK,MAAM,OAAO;AAC3E,UAAI,CAAC;AACD,oBACI,CAAC,YAAY,QAAQ,GAAG,YAAY;;AAE5C,aAAO;;AAEJ,kEAA8D;AACjE,uBAAiB,OAAM,UAAU;AACjC,UAAI;AACA,qBAAa,aAAa;AAM1B,mBAAW,SAAS,IAAI,UAAU,KAAK,SAAS,SAAS,IACrD,mBAAuB,SAAS,MAChC,SAAS;AAGb,YAAI,SAAS,WAAW;AACpB,qBAAW,CAAC,GAAG,SAAS;;;AAIhC,UAAI,SAAS,WAAW;AACpB,8BAAsB,cAAkB;AACxC,mBAAW,cAAc;;AAE7B,iBAAW,eAAmB;AAC9B,UAAI,SAAS,UAAU,KAAK,QAAQ;AAChC,eAAO,CAAC,GAAG;iBAEN,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,MAAM;AACf,eAAO;iBAEF,SAAS,WAAW,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3D,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,IAAI,SAAS;iBAEvC,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,MAAM;AAC7B,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS;iBAEvC,SAAS,WAAW,KACzB,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3C,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,KAAK,SAAS,IAAI,SAAS;iBAErD,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM;AAC3C,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS,KAAK,SAAS;;AAG1D,YAAI;AAMA,2BAAiB,YAAY;AAC7B,qBAAW,UAAU;AACrB,cAAI,SAAS;AACT,aAAC,MAAM,QAAQ,YAAY;;AAE/B,iBAAO,WAAY,QAAO,KAAM,QAAO;AACvC,iBAAO,qBAAyB,MAAM,IAAI,OAAK,IAAI;;AAEvD,eAAO,qBAAyB;;;AAGxC;AACI,aAAO,IAAI,MAAM;;AAMd;AACH,eAAS,OAAO,MAAM;AACtB,eAAS,OAAO,MAAM;AACtB,UAAI,aAAiB,QAAQ;AACzB,eAAO;;AAEX,UAAI,CAAC,OAAO,UAAU,CAAC,OAAO;AAC1B,eAAO;;AAEX,UAAI,OAAO,OAAO,KAAK,OAAO,OAAO,KAAK,OAAO,OAAO,KACpD,OAAO,OAAO;AACd,eAAO;;AAEX,UAAI,OAAO,WAAW,OAAO;AACzB,2BAAmB,OAAO,MAAM,IAAI;AACpC,2BAAmB,OAAO,MAAM,IAAI;AACpC,YAAI,eAAe;AACf,iBAAO;;AAEX,YAAI,OAAO,eAAe,OAAO,eAC5B,QAAO,OAAO,KAAK,OAAO,OAAO;AAClC,iBAAO;;;AAGf,aAAO,OAAO,OAAO,OAAO,MAAM,OAAO,OAAO,OAAO,OAAO,OAAO;;AAKzE;AACA;AACO;AACH,UAAI,oBAAoB;AACpB,mBAAW,gBAAgB;AAC3B,2BAAmB,GAAG,aAAa,GAAG;;AAE1C,aAAO;;AAEJ;AACH,yBAAmB;;AAEhB;AACH,+BAAyB;;AAEtB;AACH,UAAI,0BAA0B;AAC1B,mBAAW,gBAAgB;AAC3B,iCAAyB,GAAG,aAAa,GAAG;;AAGhD,aAAO,KAAK,IAAI,IAAI;;AAEjB;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX;AACA,iBAAW,gBAAgB;AAC3B,UAAI,aAAa,IAAI,sCACjB,iBAAiB;AACjB,4BAAoB;iBAEf,aAAa,IAAI;AACtB,4BAAoB;;AAGpB,4BAAoB;;AAExB,aAAO;;AAEJ;AACH,kBAAY,GAAG,aAAa;AAC5B,aAAO,OAAO;;AAEX;AACH;AACI,mBAAW,gBAAgB;AAC3B,YAAI,MAAM;AACN,iBAAO;;;AAIX,gBAAQ,IAAI,sCAAsC;AAClD,eAAO;;AAEX,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAGf,oCAA8B,uCAAuC;AACrE,aAAO;;AAWJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;AAEX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,aAAa,IAAI;AACjB,iBAAO,uCAAuC;;AAElD,wCAAgC;AAChC,YAAI,aAAa,IAAI;AACjB,4CAAkC,GAAG,aAAa;AAClD,iBAAO,2CAA2C,IAAI;;AAE1D,eAAO;;AAEX,oCAA8B,uCAAuC;AACrE,aAAO;;AAEX;AACI,wBAAkB,iBAAiB;AACnC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,qBAAqB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,kBAAkB;AAC3I,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEX;AAGI,wBAAkB,iBAAiB,IAAI;AACvC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,yBAAyB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,sBAAsB;AACnJ,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAE3B,wBAAkB,GAAG,aAAa;AAClC,aAAO;;AAEJ;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;AChhB1D;;;;;;;;;;;;;;;;AAkBA,kBAAY;AAOZ,UAAI,aAAa,aAAa,MAAM,MAAI,UAAU,mBAAmB;AAErE,UAAI,aAAa,iBAAiB;AAC9B,UAAI,sBAAsB;AACtB,eAAO;iBAEF,sBAAsB;AAC3B,eAAO;;AAEX,aAAO;;AAGX,UAAI,aAAa,kCAAkC,MAAM;AACzD,UAAI,aAAa,0BAA0B,MAAM,MAAI,IAAI,qBAAqB;AAE9E,UAAI,aAAa,qBAAqB,MAAM;AAE5C,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,cAAc,MAAM,MAAI,QAAQ;AAEjD,UAAI,aAAa,4BAA4B,MAAM,MAAI,QAAQ;AAE/D,UAAI,aAAa,mBAAmB,MAAM,MAAI,QAAQ;AAGtD,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,gCAAgC,MAAM,MAAI,QAAQ;AAEnE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,uBAAuB,MAAM,MAAI,QAAQ;AAE1D,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,0BAA0B,MAAM,uBAAuB,MAAI,UAAU;AAEtF,UAAI,aAAa,gCAAgC,MAAM,uBAAuB,MAAI,UAAU;AAS5F,UAAI,aAAa,gDAAgD;AAC7D,2BAAqB,MAAI,UAAU;AACnC,UAAI,iBAAiB;AACjB,eAAO;;AAEX,aAAO,kCAAkC;;AAM7C,UAAI,aAAa,iDAAiD,MAAM,MAAI,UAAU,kDAAkD,KACpI,CAAC;AAIL,UAAI,aAAa,gCAAgC,MAAM,mCAAmC,MAAI,UAAU;AAKxG,UAAI,aAAa,gCAAgC;AAC7C,aAAO,MAAI,QAAQ,8BACf,QACA,MAAI,QAAQ;;AAMpB,UAAI,aAAa,gCAAgC,MAAM,8BAA8B,MAAI,UAAU;AAEnG,UAAI,aAAa,2BAA2B,MAAM,oBAAoB,MAAI,UAAU;AAIpF,UAAI,aAAa,6BAA6B;AAK1C,0BAAoB,MAAI,QAAQ;AAChC,aAAO,cAAc,IAAI;;AAS7B,UAAI,aAAa,kCAAkC;AAC/C,aAAO;OACR;AACC,UAAI,aAAY,KAAK,eAAc;AAC/B,cAAM,IAAI,MAAM,8FACsB;;;ACtI9C;;;;;;;;;;;;;;;;AAoBA,WAAQ,iCAAiC,qBAAqB,uBAAuB,qBAAqB,yBAAyB,yBAAyB,qBAAqB,qBAAqB,+BAA+B,yBAAyB,yBAAyB,qBAAqB,iCAAiC,6BAA+B;ACpB5W;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,UAAU,kBAAiB;;AAG7C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;yBAEC;;;;;;ACpCzB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,SAAS,kBAAiB;;AAG5C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;wBAEA;;;;;;ACtCxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,WAAY;AAC3C,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,aAAK,cAAc,CAAC,WAAW;AAC/B,uBAAgB,QAAO,QAAS,MAAM;AACtC,6BAAqB,YACjB,kBACA;AACJ,aAAK,WAAW;;;;;kCAKU;;;;;8BAKJ;wBACN;;0BAEE;;;;;;;;;;ACzC1B;;;;;;;;;;;;;;;;AAgBO;AACH,aAAO,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG,MAAM,IAAI,OAAK,GAAG,QAAQ;;AAEtE;AACH,UAAI,SAAS;AACT,eAAO,CAAC;;AAEZ,aAAO,eAAe,MAAM;;AAEzB;AACH,UAAI,SAAS;AACT,eAAO;;AAEX,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM;AACtB,mBAAU,KAAK;AACf,YAAI,IAAI,OAAO;AACX,qBAAU;;;AAGlB,aAAO;;ACpCX;;;;;;;;;;;;;;;;AAiBO;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAKf,2BAAmB;;;;;;;;;;;;AAcnB,2BAAmB;AACnB,sBAAc;;;;;;;;;;;AAYd,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAEf,2BAAmB;;;;;;;;;AASnB,2BAAmB;;;;;;;;;;AAUnB,sBAAc;;;;;;;;;;AAUlB,aAAO;QACH,SAAA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AClHR;;;;;;;;;;;;;;;;AAsBO,yEAAmE;AACtE,sBAAgB,gBAAoB;AACpC,aAAO,QACF,IAAI;AACL,sBAAc,OAAO,QAAO,QAAQ,YAAW;AAC/C,sBAAc,MAAM,QAAQ,SAAS,IACjC,OAAO,QAAO,IAAI,QAAQ,YAAW,QAAO,QAAQ,WACpD,YAAY,QAAO,QAAQ;AAC/B,eAAO,GAAG,UAAU;SAEnB,KAAK;;AAEd;AACI,UAAI,EAAE,WAAW;AACb,eAAO,GAAG,EAAE;;AAEhB,aAAO,MAAM,EAAE,UAAU,EAAE,KAAK;;AAM7B;AACH,UAAI,EAAE,WAAW,EAAE;AACf,cAAM,IAAI,MAAM,wDACL,EAAE,cAAc,EAAE;;AAEjC,qBAAe;AACf,0BAAoB,KAAK,MAAM,EAAE,SAAS;AAC1C,mCAA6B,EAAE,SAAS;AACxC,mBAAa,GAAG,IAAI,aAAa;AAC7B,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,UAAI,yBAAyB;AACzB,qBAAa,EAAE,MAAM,cAAc;AACnC,qBAAa,EAAE,MAAM,cAAc;AACnC,YAAI,OAAO,WAAW;AAClB,mBAAS,OAAO,IAAI,OAAK,SAAS;AAClC,mBAAS,OAAO,IAAI,OAAK,SAAS;;AAEtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,aAAO,OAAO,IAAI,UAAU,OAAO,MAAM,KAAK;;AAK3C;AACH,sBAAgB,gBAAoB,OAAO,IAAI,OAAK,EAAE;AACtD,aAAO;;wBAEa,QAAQ,mBAAmB,QAAQ;;;;AAIpD,iCAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC/EpC;;;;;;;;;;;;;;;;AAiBA,WAAM,wCAAuB;AAGtB;AACH,6BAAuB;AACvB,iBAAW,QAAQ;AACf,qBAAa,eAAmB,EAAE,UAAU;AAE5C,YAAI,EAAE,UAAU;AACZ,yBAAe,KAAK,iBAAiB,EAAE,OAAO,OAAO,IAAI,IAAI,UAAU;;AAGvE,yBAAe,KAAK,qBAAqB,EAAE;AAC3C,yBAAe,KAAK,qBAAqB,EAAE;;;AAGnD,iCAA2B,eAAe,KAAK;AAC/C,mCAA6B,WACxB,IAAI,OAAK,wBAAwB,GAAG,aAAa,qBACjD,KAAK;AACV,0BAAoB,YAAY;AAChC,mBAAa;AACb,wCAAkC,6BAA6B;AAC/D;AACA;AACA,yBAAmB,gBAAgB;AACnC,UAAI,YAAY;AACZ,gCACI,+BAA+B,YAAY,cAAc;AAC7D,uCAA+B,8BAA8B;;AAG7D,gCACI,yBAAyB,YAAY,cAAc;AACvD,uCAA+B,2BAA2B;;AAE9D,UAAI;AACA,wBAAgB;;AAEpB,qBAAe;QACX;QAAc;QAA2B;QACzC;QAAoB;QAAuB;QAAsB;QACnE,KAAK;AACP,aAAO;;AAEX;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,iBAAiB;aACvB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;;AAEpB,gBAAM,IAAI,MAAM,GAAG,MAAM;;;AAIrC;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,uBAAuB;aAC7B;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;;AAE1B,iBAAO,mBAAmB;;;AAGtC,gFAA4E;AACxE,gBAAU;AACV,UAAI;AACA,eAAO,2BAA2B;;AAGlC,eAAO,qBAAqB;;AAEhC,sBAAgB,OAAO,UAAU;AACjC,uBAAiB,aAAa;AAC9B,UAAI,QAAQ,UAAU,SAAS;AAC3B,YAAI;AACA,iBAAO,+BAA+B,QAAQ;;AAG9C,iBAAO,yBAAyB,QAAQ;;;AAGhD,aAAO;;AAEX;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;;AAEzC,iBAAO,wBAAwB,UAAU;;;AAGrD;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;;AAEnC,gBAAM,IAAI,MAAM,GAAG,SAAS;;;AAGxC;AACI,aAAO;;eAEI,KAAK;;;;AAIpB;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,4BAAsB,GAAG,KAAK;;;;MAI5B,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;MAuBL,KAAK;MACL,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;MAyBL;MACA;MACA;;AAEF,aAAO;;AAEX,8BAA0B;;;;;;;;;;;;;AAa1B,8BAA0B;;;;;;;;;AAS1B,8BAA0B;;;;;;;;;;AAU1B,iCAA6B;;;;;;;;;;;;AAY7B;AACI,aAAO;;;;;;AAMX;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;;;AAIhD;AACI,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;4BACjC,SAAS;;;;AAIrC;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAChD,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,KAAK;AAChE,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;wBAExB;qBACH;;6BAEQ;4BACD;;;;;;AAM5B;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;QAClC;;;;;AAKR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/D,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/E,2BAAqB;AACrB,oBAAc;AACd,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,0BAAkB,MAAM,MAAM,SAAS,IAAI;AAC3C,kBAAU;aACL,eAAe;kBACV,OAAO;QACjB;AACA,kBAAS,IAAI,QAAQ;;AAEzB,aAAO;UACD,MAAM;;oCAEoB,eAAe,OAAO,eAAe;iCACxC,eAAe;;QAExC;;wBAEgB;qBACH;;6BAEQ;4BACD;;mBAET,MAAM,UAAU;;;;AAInC;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,OAAO;AACrG,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;QAClC;;;;;AAKR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,OAAO;AAC3G,aAAO;;kDAEuC,SAAS;+BAC5B,SAAS;;iCAEP,SAAS;;QAElC;;;;;;;AAOR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,MAAM,OAAO;AACjH,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;;QAElC;;;;;;;AAOR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,aAAiB,OAAO;AACxB,eAAO;;8CAE+B,eAAe,OAAO,eAAe;;;;AAK/E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAUhD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;;iCAExC,eAAe;6BACnB;4BACD;;;;;;AAM5B;AACI,UAAI,aAAiB,OAAO;AACxB,eAAO;;0CAE2B,SAAS,OAAO,SAAS;;;;AAI/D,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;wBAClB,MAAM;4BACF,MAAM;;;;;AAKlC;AACI,aAAO,SAAS;;AAEpB;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,mBAAa;AACb,aAAO;WACA;eACI,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AACpB,eAAO,SAAS,sBAAsB;;AAE1C,iCAA2B,UAAU,UAAU;AAC/C,UAAI,YAAY,KAAK,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,6BAAuB,UAAU,UAAU;AAC3C,qBAAe,yBAAyB;AACxC,aAAO;YACC;6BACiB,UAAU,UAAU;6BACpB;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,mBAAa;AACb,aAAO;WACA;;UAED,eAAe,OAAO,eAAe;eAChC,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;UACJ,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,oBAAc,SAAS;AACvB,oBAAc,SAAS;AACvB,UAAI,UAAU,KAAK,UAAU;AACzB,eAAO;cACD;+BACiB;;;;AAI3B,qBAAe,yBAAyB;AACxC,UAAI,UAAU;AACV,eAAO;cACD;6CAC+B,oBAAoB;+BAClC;;;;AAI3B,UAAI,UAAU;AACV,eAAO;cACD;wCAC0B,oBAAoB;+BAC7B;;;;AAI3B,aAAO;YACC;6BACiB,UAAU,kBAAkB;6BAC5B;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,mBAAa;AACb,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,eAAO;aACF;qDACwC,cAAc;;iBAElD,KAAK,aAAa;;;;AAI/B,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,aAAO;WACA;iCACsB,iBAAiB,eAAe,OAAO,eAAe;eACxE,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,yBAAgB,SAAS;AACzB,yBAAgB,SAAS;AACzB,eAAO;YACH;mDACuC,eAAc;6BACpC;;;;AAIzB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO;AACvB,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;qDACuC,MAAM;UACjD,kBAAkB;;;;AAIxB,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,qBAAe,yBAAyB;AACxC,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;4CACpB;6BACf;;;;AAIzB,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;uCACzB;6BACV;;;;AAIzB,aAAO;UACD;;wBAEc,MAAM,cAAc;2BACjB,YAAY;2BACZ;;;;AAI3B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,MAAM,OAAO;AACb,8BAAsB,MAAM,MAAM;AAClC,yBAAiB,CAAC,GAAG;AACrB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,KAAK,OAAO;AAC5B,eAAO;UACL,2BAA2B;eACtB;mBACI,YAAY,kBAAkB,QAAQ;;;;AAIrD,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,4BAAsB,eAAe,KAAK,KAAK,MAAM,KAAK;AAC1D,mBAAa;AACb,aAAO;WACA;;UAED,YAAY,YAAY,kBAAkB;eACrC,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM,KAAK,MAAM;AACjC,sBAAgB,MAAM;AACtB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO;AAC9B,eAAO;UACL,qBAAqB;gBACf;mBACG,YAAY,kBAAkB,QAAQ;;;;AAIrD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY;UACnC,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,yBAAmB,UAAU,UAAU;AACvC,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;gBACC;;oDAEoC;;4BAExB,cAAc;iCACT;;;;AAI7B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;YACH;8CACkC,MAAM;;qDAEC,cAAc;6BACtC;;;;AAIzB,qBAAe,yBAAyB;AACxC,aAAO;cACG;;4BAEc,mBAAmB,qBAAqB;+BACrC,YAAY;+BACZ;;;;AAI/B;AACI,oBAAc,UAAU,UAAU;AAClC,mBAAa,MAAM;AACnB,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,OAAO,KAAK;AACjD,0BAAoB,eAAe,KAAK,KAAK,MAAM,OAAO,KAAK;AAC/D,mBAAa;AACb,mBAAY,OAAO,+BAA+B;AAClD,mBAAa,GAAG,IAAI,OAAO,GAAG;AAC1B,iBAAS,QAAQ,QAAQ;AACzB,yBAAiB,MAAM,OAAO,IAAI;AAClC,iBAAQ,IAAI,OAAO,qBAAqB;;AAE5C,mBAAa;AACb,aAAO;WACA,YAAY;oBACH;2BACO;kCACO;qDACmB,YAAY;eAClD,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS;AACvC,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY,YAAY;UAC/C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;uBAIS,YAAY;;0BAET,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;gCAEkB,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGrC,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB;oBACzB;6BACS,YAAY,oBAAoB;6BAChC;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU;AACjD,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;iBAGG,YAAY,YAAY,YAAY;;UAE3C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;gCAGkB,YAAY,YAAY;;0BAE9B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGtB,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB,qBAAqB;qBAC7C,sBAAsB;6BACd,YAAY;6BACZ;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU,UAAU;AAC3D,eAAO;QACP,qBAAqB;cACf;;iBAEG,YAAY,kBAAkB,QAAQ;;;;AAInD,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;mBAGlC;UACT,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;0BAG3B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM;iBACvC,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM;iBACjB,MAAM;;;yBAGE,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;;0BAGc,mBAAmB,qBAAqB;qBAC7C,sBAAsB,sBAAsB;6BACpC,YAAY;6BACZ;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,qBAAe,eAAmB,UAAU,UAAU;AACtD,UAAI,SAAS;AACT,eAAO,UAAU;;AAErB,aAAO;0BACe;;iBAET;;;;;AAKjB;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,mBAAa,kBAAkB;AAC/B,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,mBAAa;AACb,qBAAe,eAAmB,UAAU,UAAU;AACtD,4BAAsB,WAAW;AACjC,sBAAgB,eAAmB,aAAa;AAChD,6BAAuB,YAAY;AACnC,UAAI,WAAW,KAAK,CAAC,iBAAiB,CAAC;AACnC,iBAAS;;;iBAIJ,iBAAiB,CAAC;AACvB,YAAI,YAAY;AACZ,mBAAS;;;;AAKT,mBAAS;;;;iBAKR,cAAc;AACnB,qBAAa,SAAS;AACtB,qBAAa,SAAS;AACtB,YAAI,cAAc,QAAQ,QAAQ,MAAM,cAAc,QAAQ,QAAQ;AAClE,mBAAS;mBAEJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;mBAGJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;;;AAGjB,aAAO;WACA;QACH;QACA;8BACsB,kBAAkB;QACxC;;;;AAIR;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,0BAAoB,aAAa;AACjC,yBAAmB,UAAU,UAAU;AACvC,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,UAAI,CAAC,UAAU,UAAU,aAAa,WAAW,WAC7C,UAAU,UAAU,cAAc,QAClC,aAAiB,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,mBAAa,kBAAkB;AAC/B,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,aAAO;YACC;QACJ;QACA;kBACU,kBAAkB;;;;AAI7B;AACH,UAAI,QAAQ;AACR,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;;AAGP,cAAM,MAAM,gBAAgB;;;AAIpC;AAEI,2BAAqB,KAAK,MAAM,KAAK,UAAU;AAC/C,mBAAa,UAAU,eAAe;AACtC,aAAO;;AAEX;AACI,aAAO,SAAS,IAAI,OAAK,OAAO,IAAI,KAAK;;AC3sC7C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,gBAAY,MAAM,SAAS,GAAG,MAAM,aAAa,IAAG,OAAO,GAAG,gBAC1D,IAAG,MAAM;AACb,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB,KAAK,KAAK,SAAS;AACnC,aAAK,cAAc,MAAM,MAAM,GAAG;AAClC,YAAI,UAAU;AACV,eAAK,YAAY,KAAK;;AAE1B,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,yBAAiB,KAAK;AACtB,qBAAa,SAAS;AACtB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC;AACA;AACA,YAAI,YAAY;AACZ,uBAAa,OAAO;AACpB,iCAAuB,kBAAkB;AACzC,2BAAiB;UACnB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;;AAGd,uBAAa;AACb,2BAAiB;UACnB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;;AAElB,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,0BAAkB,MAAM,SAAS,aAAa;AAC9C,4BAAoB,SAAS,IAAI,OAAK,SAAS;AAC/C,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,uBAAgB,QAAO,QAAS,gBAAgB;AAChD,kCAA0B,YAAY,KAAK;sDACG,WAAW;sDACX,WAAW;sDACX,WAAW;sDACX,WAAW;AACzD,2BAAmB;0BACD,WAAW;uCACE,WAAW;uCACX,WAAW;qDACG,WAAW;AACxD,8CAAsC,YAAY,KAAK;qCAC1B,YAAY;4CACL,SAAS;iDACJ,SAAS,MAAM,IAAI;;AAE5D,aAAK,WAAW;0BACE,YAAY;iCACL,SAAS;sCACJ,SAAS,MAAM,IAAI;;QAEjD;;UAEE;4BACkB,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;4BAC3C,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;UAC7D;yCAC+B,wBAAwB;sBAC3C,wBAAwB,gBAAgB;;;2BAGnC;;8BAEG;;YAElB;6BACiB;;;mBAGV;;;;;;;;;;;;;;AChHnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,gBAAe;AAC1C,aAAK,WAAW;iCACS,WAAW;0CACF;;;;;;;;;;;;;;gCAcV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;mBACf;kDAC+B;;sCAEZ,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,eAAc,eAAe;AACxD,aAAK,WAAW;iCACS,aAAa,WAAW;0CACf;;;;;;;;;;;;;;;;;gCAiBV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;;;;;;;;;;;AC1IjD;;;;;;;;;;;;;;;;AAiBA,8BAA0B;;;;AAQnB,oBAAgB;;;;;;;;;;;AAWhB,gBAAY;;;;;;;;;;AAUZ,+BAA2B;AAC3B,kBAAc;AACd,iBAAa;AACb,uBAAmB;AACnB,oBAAgB;AAChB,0BAAsB;AACtB,wBAAoB;AACpB,uBAAmB;AACnB,gBAAY,oBAAoB;;;AAGhC,gBAAY,oBAAoB;;;AAGhC,gBAAY;;AAEZ,oBAAgB;AAChB,kBAAc;;MAEjB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;UAEd;;;;;;;;;;;ACtEV;;;;;;;;;;;;;;;;AAmBA,gCAA0B;;;;;;AAMnB,sBAAgB;;;;;;;;;;;;;;;;;;;;;;AAsBhB,kBAAY;;;;;;;;;;;;;;MAef,sBAAoB;;;AAGjB,oBAAc;;;;AAId,sBAAgB;;;;AAIhB,oBAAc;;;AAGd,sBAAkB;;;AAGlB,mBAAa;;;AAGb,yBAAmB;;;AAGnB,sBAAgB;;;AAGhB,4BAAsB;;;AAGtB,0BAAoB;;;;;AAKpB,yBAAmB;;;;;;AAMnB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;;MAIpB,oDAAmD;AAC/C,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,uBAAuB;AAC5B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,4BAAwC,QAAQ;AACnE,qBAAa,KAAK,YAAY;AAC9B,qCAA6B;AAC7B,YAAI;AACA,cAAI,SAAS,KAAK,eAAmB,KAAK,iBAAiB;AACvD,qCAAyB;;;;;;AAOzB,0BAAc,kBAAkB;AAChC,qCAAyB;YAC7B;;AAEI,gBAAI,SAAS;AACT,wCAA0B;yCACL,KAAK,YAAY;;;;;AAMtC,+BAAiB,YAAY,UAAU;AACvC,wCAA0B;;iBAE7B,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;iBAEtD,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;;;;;;;AAQ/D,aAAK,WAAW;;UAEd;;;;;;;;UAQA;;;;;;;AChLV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;MAepB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC1C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;MAgBpB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC7C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,+BAAuB,SAAS,eAAe;AAC/C,aAAK,WAAW;;;;;;;;;;;;8BAYM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;oBAIzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,aAAK,WAAW;iCACS,WAAW;;;;;0BAKlB;;wCAEc,mBAAmB;;;;;;;gCAO3B;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES,SAAS;;oBAEzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,WAAW;;;;;;;;;;;8BAWM,SAAS;kCACL,SAAS;iCACV,iBAAiB;;kCAEhB,SAAS;;;;oCAIP,SAAS;mCACV,kBAAkB;;oCAEjB,SAAS;;;;sCAIP,SAAS;qCACV,iBAAiB;;sCAEhB,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;gCAczB;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES;oDACgB;;wCAEZ,SAAS;;;;;;6BAMpB;;sCAES,SAAS;;;;;;;;;;;;;AC/P/C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;;;;;;;wBAOA;;;;;8BAKM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;;;;;;;;;;;;;MAgBzC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;gCAYZ;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;;oCAGS;8BACN;;;;;;;;;;;;AC9G9B;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,kCAAkC;AAClF,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;0BAKlB;;;2BAGC,mBAAmB;;;;;;;gCAOd;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;;;;;;;;oBAQhB;;;;;;;;;;;;;;;;;;;kBAmBF,4BAA4B;;oBAE1B;;0CAEsB;mCACP;;;kCAGD;mCACC;;;yBAGV,4BAA4B;;+BAEtB;+BACA;;;oBAGX;;wCAEoB;wCACA;;;;;gCAKR;gCACA;;;;;yBAKP,4BAA4B;;+BAEtB;+BACA;+BACA;;;oBAGX;;wCAEoB;wCACA;wCACA;;;;;gCAKR;gCACA;gCACA;;;;;;;;;;UAUtB;UACA;;;;;;;MAON;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,aAAK,WAAW;oCACY,gBAAgB,iBAAiB;iCACpC,aAAa,WAAW;;;;;;;;;;;;;;;;gCAgBzB;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;yCACK;;oCAEL,SAAS;;;;sCAIP;;;;;;;;;;;;;;;;;oBAiBlB,4BAA4B;;4CAEJ;qCACP;2BACV,4BAA4B;;4CAEX;4CACA;;;qCAGP;qCACA;;;2BAGV,4BAA4B;;4CAEX;4CACA;4CACA;;;qCAGP;qCACA;qCACA;;;;;;;;;;;;AC1RrC;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,2BAAmB,SAAS,cAAc,SAAS;AACnD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;wBAOpB;4BACI;;;;;;;;;gCASI;qCACK;;gCAEL;;;;kCAIE;uCACK;;kCAEL;;;;;;;;;;;UAWxB;UACA;;;;;;AClGV;;;;;;;;;;;;;;;;;MAkBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB;AACrB,uBAAe;AACf,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY;wBACJ,KAAK,IAAI;mBACd,KAAK;mBACL,KAAK;;;AAWhB,qBAAa,GAAG,IAAI,cAAc;AAC9B,4BAAkB,GAAG,SAAS,cAAc;AACxC,sBAAU,SAAS;AACnB,wBAAY;4BACA,IAAI;4BACJ,IAAI;;AAEhB,gBAAI,gBAAgB;AAChB,kBAAI,IAAI;AAEJ,oBAAI,UAAU,MAAM;AAQhB,8BAAY;;qCAEH,2CAA2C;2BACrD,KAAK;;;;uCAIO;6BACV,KAAK;;;2BAGP,KAAK;;;;qCAIK,2CAA2C;;;;;uCAKzC;;;;sBAIjB,KAAK,gCAAgC,KAAK;;sBAE1C,KAAK,yBAAyB,KAAK;;;;AAM7B,8BAAY;qCACH,+BAA+B;2BACzC,KAAK;;2BAEL,KAAK;;;oBAGZ,KAAK,cAAc,KAAK;;;AAGpB,oBAAI,IAAI,IAAI;AAMR,0CAAwB,UAAU,MAAM,IACpC,mBAAuB,iBACvB;AACJ,sBAAK,gBAAgB,MAAM,KAAK,UAAU,MAAM,KAC3C,gBAAgB,MAAM,KAAK,UAAU,MAAM;AAC5C,gCAAY;oCACR,UAAU,OAAO;;uCAEd;kDACW;6BACrB,KAAK,IAAI;;;AAKN,wBAAI,gBAAgB;AAChB,kCAAY;;yCAEP;oDACW;+BACrB,KAAK;;+BAEL,KAAK;;;;AAIJ,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;AAI3B,gCAAY;oCACR;;uCAEG;kDACW;6BACrB,KAAK,IAAI;;;sBAGhB,KAAK,IAAI,cAAc,KAAK,IAAI;;;;;;AAOlC,kBAAI,IAAI;AACJ,4BAAY;mCACD;;AAQX,oBAAI,UAAU,MAAM;AAChB,8BAAY;sCACF;iDACW;2BACtB,KAAK;;2BAEL,KAAK;;;6CAGa;2BAClB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;;wCAEJ;mDACW;;;sBAG7B,KAAK,IAAI,mBAAmB,KAAK,IAAI;;;;AAK/B,8BAAY;qCACH;2BACV,KAAK;;2BAEL,KAAK;;;kCAGE;iDACe;2BACtB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;;AAInC,4BAAY;;;AAGpB,gBAAI,IAAI;AACJ,0BAAY;0BACN,KAAK,YAAY,MAAM;gBACjC,KAAK,mBAAmB,KAAK,gBAAgB,KAAK;;AAE9C,kBAAI,IAAI,IAAI;AACR,4BAAY;4BACR,KAAK,IAAI,YAAY,MAAM,IAAI;kBACzC,KAAK,IAAI;8BACG,KAAK,IAAI,gBAAgB,KAAK,IAAI;;;;;AAKxD,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY,gBAAgB,KAAK,SAAS,KAAK;;;AAGvD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;;;;;;;;;UAelC;;;UAGA;UACA;;;;;;ACvSV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS;AACxC,aAAK,cAAc;AACnB,wDAAgD;AAChD,2BAAoB;AACpB,wCAAgC;AAChC,aAAK,cAAc,CAAC,UAAU,YAAY,WAAW;AACrD,yBAAiB,WAAW,aAAa,IAAI;AAC7C,oDAA4C,CAAC,GAAG,cAAc,OAAO,GAAG,aAAa;AACrF,gDAAwC,aAAa,IACjD;UACI,GAAI,eAAc,KAAM,cAAa;UACrC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAE3B,8CAAsC,YAAY,IAC9C;UACI,GAAI,cAAa,KAAM,aAAY;UACnC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAK3B,aAAK,WAAW;yCACiB;wCACD;;;;;;;;;;;;;;;;iCAgBP;;;;+BAIF;8BACD;;uBAEP;mCACY;4BACP;;;uBAGL;mCACY;4BACP;;;;;aAKf;;;;;;;;;;;;;;;;;;;;;;;;;;;;MCtFT;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,MAAM;AACnB,oBAAY,YAAY,QAAQ,QAAQ,UAAU,MAAM;AACxD,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB;AAChB,wBAAgB;AAIhB,YAAI;AACA,sBAAY,WAAU,UAAU,SAAS,MAAM;AAC/C,sBAAY,WAAU,YAAY;;AAGlC,sBAAY,WAAU,gBAAgB,WAAW;AACjD,sBAAa,WAAU,eAAe;;AAE1C,aAAK,WAAW;;;UAGd,kBAAkB;oBACR,cAAc,MAAM;sBAClB;;cAER;sBACQ;YACV,cAAc,MAAM;wBACR,UAAU,MAAM;;;;;;MAMpC;AACI,eAAO;AACH,cAAI,KAAK,SAAS;AACd,iBAAK,QAAQ,MAAM,mBAAmB,cAAc;;AAExD,gBAAM,GAAG,UAAU,KAAK,OAAO;;;;AAI3C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG,WAAW;iBAEhB,SAAS;AACd,eAAO,GAAG,WAAW,WAAW;iBAE3B,SAAS;AACd,eAAO,GAAG,WAAW,WAAW,WAAW;;AAG3C,cAAM,MAAM,2BAA2B;;;AAG/C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;;AAGV,cAAM,MAAM,2BAA2B;;;AC7E/C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,WAAW;;;;gBAIR,KAAK;gBACL,KAAK;gBACL,KAAK;;uBAEE;+BACQ;uBACR;+BACQ;mCACI;UACzB,KAAK;;;uBAGQ,KAAK;;;;;MAKxB;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO,KAAK,YAAY;;AAGxB,iBAAO,KAAK,YAAY;;;MAGhC;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;;ACjFnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,MAAM;AAC1B,aAAK,WAAW;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;UAIE,KAAK;;;;;AC9Bf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;;UAKE,KAAK;;;;;ACjCf;;;;;;;;;;;;;;;;;MAmBI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;8BAUT;kCACI;iDACe,YAAY;wBACrC,KAAK;;;;;;;;;;;;;;UAcnB,KAAK,iBAAiB;;;;;ACxDhC;;;;;;;;;;;;;;;;;MAkCI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,4BAAgB,MAAM,IAAI;AAC1B,wBAAY;;gCAEI,SAAS,YAAY;gCACrB;kCACE,SAAS,YAAY;kCACrB;;;;;;;gCAOF;oCACI;kDACc,YAAY;yBACrC,KAAK;;;yBAGL;;yBAEA;;yBAEA;;yBAEA;;;;;;;AAOjB,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;;UAW7B;;UAEA,KAAK,YAAY;;;;;AC9F3B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;MAQpB;AACI,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;;AAElE,gBAAM,GAAG,UAAU,KAAK,UAAU;;;;AClC9C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,4BAAoB,OAAO;AAC3B,oBAAY,QAAQ;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB,QAAQ;AAC7C,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,mBAAmB;;AAEnC,UAAI,SAAS;AACT,eAAO;;AAEX,4BAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,MAAM;AACN,uBAAa,KAAK,kBAAkB,cAAc;;AAGlD,uBAAa,KAAK,GAAG,cAAc;;;AAG3C,aAAO,aAAa;;;MClDpB;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,6BAAqB,KAAK,WAAW,IAAI,eAAe;AACxD,aAAK,WAAW;UACd,yBAAyB,eAAe,KAAK;;YAE3C;;gCAEoB,KAAK;;sCAEC;;;;;;;ACjBtC;;;;;;;;;;;;;;;;AAmBO;AACH,mBAAa;AACb,iCAA2B,GAAG,KAAK;;MAEjC,KAAK;MACL,KAAK;MACL,KAAK;;;;;;AAMP,aAAO,mBAA8B,IAAI;;AAEtC;AAEH,0BAAoB,IAAI,aAAa,CAAC,IAAI,GAAG,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,IAAI,GAAG,GAAG;AACnG,aAAO,yBAAoC,IAAI;;AAE5C;AAEH,oCAA8B,IAAI,YAAY,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG;AAC9D,aAAO,wBAAmC,IAAI;;AAElD;AACI,0BAA+B,OAAO;AACtC,sBAAgB,cAAyB;AACzC,oBAAc,GAAG;AACjB,mBAAwB,IAAI,MAAM,GAAG,YAAY,OAAO;AACxD,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,WAAW,OAAO,GAAG,gBAAgB,OAAO,QAAQ,GAAG,eAAe,aAAa;AACxH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,aAAO;;AAEJ;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,GAAG;;AAE/I;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,cAAc;;AAE1J;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,GAAG;;AAE5H;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,wCAAwC,gBAAgB,GAAG,MAAM,GAAG;;AAErH;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,cAAc;;AAEvI;AACH,wBAAkB;AAClB,uBAAiB,IAAI;AACrB,qBAAgB,IAAI,IAAM,IAAI;AAC9B,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,sBAAgB,mCAA8C,IAAI,SAAS,gBAAgB,cAAc,GAAG,QAAQ;AACpH,aAAO,WACH,mCAA8C,IAAI,SAAS,MAAM,cAAc,GAAG,QAAQ;;AAE3F;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE;AACA,UAAI,iBAAgB;AAChB,wBAAgB,IAAI,WAAW,QAAQ,SAAS;AAChD,wBAAgB,GAAG;AACnB,yBAAiB,GAAG;;AAGpB,wBAAgB,IAAI,aAAa,QAAQ,SAAS;AAClD,wBAAgB,GAAG;AACnB,yBAAiB,cAAc;;AAEnC,oBAAc,IAAI;AAClB,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,gBAAgB,OAAO,QAAQ,GAAG,GAAG,MAAM,eAAe;AAC5H,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,UAAI,OAAO,gBAAgB;AACvB,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe,OAAO;;AAG7I,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;;AAE1G,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AAEH,sBAAe,IAAI;AACnB,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AAEzE,4BAAsB;AACtB,6BAAuB;AACvB,8BAAwB,gBAAgB,iBAAiB,OAAO;AAChE,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB,iBAAiB,IAAI;AAG9F,mBAAwB,KAAK,MAAM,IAAI,WAAW,GAAG,GAAG,SAAS,MAAM,IAAI,MAAM,IAAI,OAAO;AAC5F,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AACzE,aAAO;;AAEJ;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa;AACxC,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,qBAAe,yCAAkD,MAAM;AACvE,0BAAoB;AACpB,6BAAuB,IAAI,WAAW,mCAA4C,OAAO,SAAS;AAClG,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,GAAG,GAAG,cAAc,uBAAuB,GAAG,eAAe;AAGnH,aAAO,IAAI,aAAa,eAAe;;AAEpC;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa,sCAA+C,cAAc;AACrG,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,yBAAmB,IAAI,aAAa,eAAe,eAAe;AAClE,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,cAAc,cAAc,GAAG,MAAM,GAAG,OAAO;AACrG,aAAO;;AC1KX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,gBAAgB;AACrB,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,sBAAsB;AAC3B,aAAK,cAAc;AACnB,0BAAkB,OAAM,UAAU;AAClC,YAAI,MAAM;AACN,eAAK,KAAK;AACV,0BAAgB,WAAW;;AAG3B,eAAK,KAAK,gBAAgB;;AAG9B,iCAAyB;AACzB,wCAAgC;AAChC,YAAI,OAAM,UAAU,qBAAqB;AACrC,gCAAsB;AACtB,qCAA2B;AAC3B,eAAK,wBACD,oBAA+B,KAAK,IAAI;AAC5C,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;AAGpB,eAAK,4BAA4B,KAAK,GAAG,aAAa;AACtD,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,gCACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;;AAKpB,+BAAqB;AACrB,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,KAAK,GAAG,aAAa;qBAEpB,aAAwB,KAAK,IAAI;AACtC,iBAAK,gCACD,KAAK,GAAG,aAAa;;AAGzB,kBAAM,IAAI,MAAM;;;AAGxB,aAAK,eAAe,mBAA8B,KAAK;AACvD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,gBACD,iBAA0B,KAAK,IAAI,KAAK;;UAE5C;AACA,eAAO,OAAM,QAAQ;;MAEzB;AACI,YAAI,KAAK;AACL;;AAEJ,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;AAIjB,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK;;AAKjB,mBAAW,KAAK;AAChB,qBAAwB,IAAI,MAAM,GAAG;AACrC,qBAAwB,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AACrE,qBAAwB,IAAI,MAAM,GAAG,kBAAkB,KAAK;AAC5D,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AACzE,qBAAwB,IAAI,MAAM,GAAG,aAAa,KAAK;AACvD,aAAK,WAAW;;MAEpB;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,iCAAoC,KAAK,IAAI,SAAS;;MAE1D;AACI,aAAK;AACL,mCAAsC,KAAK,IAAI,SAAS,OAAO,QAAQ,OAAM,KAAK;;MAEtF;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,eAAO,0BAAqC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE7E;AACI,aAAK;AACL,YAAI,KAAK,kBAAkB;AACvB,4CAA6C,KAAK,IAAI,KAAK;AAC3D,eAAK,gBAAgB;;AAEzB,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;MAEjE;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,gDAA2D,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE5I;AACI,eAAO,+BAA0C,KAAK,IAAI,SAAQ,OAAO,MAAM,SAAS,cAAc,cAAc,KAAK;;MAE7H;AACI,eAAO,gCAA2C,KAAK,IAAI,SAAQ;;MAEvE;AACI,aAAK,yBAAyB;AAC9B,uBAAe,8BAAyC,KAAK,IAAI,MAAM,SAAS,KAAK;AACrF,aAAK;AACL,eAAO;;MAEX;AACI,6BAAqB,KAAK,YAAY,KAAK;AAC3C,eAAO,KAAK,UAAU;;MAE1B;AACI;AACA;AACA,YAAI,OAAM,QAAQ;AACd,sBAAY;AACZ,uBAAa,IAAI,UAAU,IAAI,4BAA4B;AAC3D,aAAG;AACH,0BAAgB;AACZ,4BAAe,IAAI,eAAe,MAAM,GAAG;AAC3C,mBAAO,YAAW,IAAI,oBAClB,YAAW,IAAI;;AAEvB,kBAAQ;mBAEH,OAAM,UAAU,kDAAkD;AACvE,kBAAQ,KAAK;AACb,eAAK;AACL,0BAAgB,MAAM,KAAK,iBAAiB,OAAO,OAAM,UAAU;;AAOnE,0BAAgB,MAAM;;AAE1B,eAAO,CAAE,OAAO;;MAEpB;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,sCAAiD,KAAK,IAAI,cAAc;;MAE5H;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,+BAAuB,qBAAgC,IAAI;AAC3D,6BAAqB,qBAA8B;AACnD,wBAAgB,cAAyB;AACzC,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,oBAAuB,IAAI;AAC3B,YAAI,KAAK;AACL,0BAA2B,IAAI;;AAEnC,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,sBAAsB,kCAA6C,IAAI,KAAK,SAAS,KAAK;;AAEnG,eAAO;;MAEX;AACI,aAAK;AACL,YAAI,YAAY,KAAK;AACjB,eAAK,UAAU;;AAEnB,YAAI,WAAW;AACX,uBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;;MAGrE;AACI,aAAK;AACL,aAAK,UAAU;AACf,YAAK,KAAK,WAAW,QAAS,KAAK;AAC/B,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,WAAW;;MAE9D,uDAAuD;AACnD,aAAK;AACL,YAAI;AACA,iBAAO,iCAA4C,KAAK,IAAI,SAAS;;AAGrE,iBAAO,0BAAqC,KAAK,IAAI,SAAS;;;MAGtE;AACI,aAAK;AACL,eAAO,aAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,kBAAkB,SAAS;;MAErF;AACI,aAAK;AACL,eAAO,KAAK,GAAG,mBAAmB,SAAS;;MAE/C;AACI,aAAK;AACL,aAAK;AACL,2CAA8C,KAAK,IAAI,oBAAoB,iBAAiB;;MAEhG;AACI,aAAK,6BAA6B,qBAAqB,SAAS;;MAEpE;AACI,aAAK;AACL,gCAAwB,uCAAgD,MAAM;AAC9E,aAAK,6BAA6B,2BAA2B,OAAO;;MAExE;AACI,aAAK,iCAAiC,aAAa,UAAU,YAAY;;MAE7E;AACI,cAAM,IAAI,MAAM;;MAEpB;AACI,YAAI,KAAK,WAAW;AAChB,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,4BAA+B,KAAK;;MAExC;AACI,aAAK;AACL,aAAK;AACL,mBAAW,KAAK;AAChB,YAAI,KAAK;AACL,eAAK;;AAET,qBAAwB,IAAI,MAAM,GAAG,aAAa,GAAG,WAAW,GAAG,GAAG,gBAAgB;;MAE1F;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG;;MAEnD;AACI,YAAI,KAAK,+BAA+B;AACpC,eAAK,8BACD,oBAA+B,KAAK,IAAI,OAAM,UAAU,oDAAoD,IACxG,oCACA;;AAEZ,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,yBAAc,IAAI;AAClB,cAAI,WAAW,KAAI,kBAAkB;AACrC,iBAAO;;AAEX,oBAAY,KAAK;AACjB,sBAAc,IAAI;AAClB,YAAI,cAAc,IAAI,kBAAkB;AACxC,eAAO;;MAEX;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,cAAI,SAAS,KAAI;AACjB;;AAEJ,oBAAY,KAAK;AACjB,YAAI,YAAY,IAAI;;YAElB;AACF,cAAM,aAAiB,MAAM,KAAK,YAG9B,KAAK,iBAAiB,OAAO,OAAM,UAAU;AACjD,eAAO,KAAK,aAAa,OAAO,OAAM,UAAU;;MAEpD;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;AAG1B,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;;MAGlC;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;AAG1B,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;;MAGlC;AACI,eAAO,IAAI,QAAQ;AACf,eAAK,cAAc,MAAM,aAAa,iBAAiB,MAAM;;;MAGrE;AAEI,uBAAc,qBAAqB,KAAK,YAAY,IAAI,OAAK,EAAE;AAC/D,qBAAa,GAAG,KAAK,QAAO,EAAE;AAC1B,iBAAQ,aAAc,KAAK,YAAY;AACvC;;AAEJ,aAAK,cAAc,KAAK,YAAY,MAAM,SAAQ;;MAEtD;AACI,aAAK,YAAY,KAAK,CAAE,UAAU;AAClC,YAAI,KAAK,YAAY,SAAS;AAE1B;;AAGJ,qBAAiB;AACb,eAAK;AAEL,iBAAO,KAAK,YAAY,WAAW;;;MAG3C;AACI,aAAK;AACL,sCAAyC,KAAK,IAAI,SAAS,KAAK;AAChE,YAAI,KAAK;AACL,8BAA+B,KAAK;;;MAG5C;AACI,YAAI,KAAK,iBAAiB;AACtB,wCAAyC,KAAK,IAAI,KAAK,eAAe,KAAK;AAC3E,cAAI,KAAK;AACL,gCAA+B,KAAK;;;AAIxC,4CAA6C,KAAK,IAAI,KAAK;;;MAGnE;AACI,aAAK,yBAAyB;AAC9B,uBAAe;AACf,aAAK;AACL,eAAO;;MAEX;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,sCAAyC,IAAI,gCAAgC,KAAK;AAClF,YAAI,KAAK;AACL,8BAA+B;;AAEnC,aAAK,gBAAgB;AACrB,qBAAwB,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,OAAO;AAC3D,qBAAwB,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAE9D;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAExE;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAGxB;AACI,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;;;AAUrB;AACH,cAAQ;AACR,aAAO,IAAI,IAAI,QAAQ,EAAE;AACrB,uBAAe,IAAI;AACnB,YAAI,CAAC;AACD;;;AAGR,aAAO,IAAI;;AC5cf;;;;;;;;;;;;;;;;AAkBO;AACH,uBAAiB,QAAQ;AACzB,yBAAmB,OAAO,IAAI;AAC1B,0BAAkB;UACd,cAAc,OAAM;UACpB,UAAU,OAAM,YAAY,OAAO,OAAM,QAAQ;UACjD,WAAW,OAAM;UACjB,UAAU,OAAM,YAAY,QAAQ,OAAM,QAAQ;UAClD,YAAY;;AAEhB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,SAAS,QAChD,OAAM,QAAQ,MAAM,aAAa;AACjC,oBAAU,aAAa,OAAM,QAAQ,MAAM;;AAE/C,eAAO,CAAE,MAAM,QAAQ,cAAc,IAAI;;AAE7C,2BAAqB,WAAW,IAAI,OAAK,EAAE;AAC3C,2BAAqB;QACjB,cAAc,OAAO;QACrB,UAAU,OAAO,QAAQ;QACzB,WAAW;QACX,UAAU,OAAO,QAAQ;QACzB,YAAY;;AAEhB,qBAAe,WAA2B,YAAY,cAAc,UAAU,QAAQ;AACtF,2BAAqB,MAAM,cAAc;AAEzC,mBAAa;AACb,qBAAe,MAAM,mBAAmB,cAAc,OAAO;AAC7D,UAAI,OAAM,UAAU,qBAAqB;AACrC,iBAAS,MAAM,mBAAmB,cAAc,YAAY;;AAGhE,+BAAyB;AACzB,mBAAa,GAAG,IAAI,QAAQ,cAAc,QAAQ;AAC9C,wBAAgB,QAAQ,cAAc;AACtC,4BAAoB;AACpB,yBAAiB,WACb,MAAM,mBAAmB,cAAc,SAAS;AACpD,yBAAiB,SAAS,aACtB,MAAM,mBAAmB,cAAc,SAAS,WAAW;;AAEnE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW,WAAW,OAAO;AAC7B,cAAM,MAAM,4BAA4B,WAAW,wCAC1B,OAAO;;AAEpC,iBAAW,QAAQ;AACf,uBAAe,EAAE;AACjB,uBAAc,OAAO;AACrB,uBAAe,OAAM;AACrB,YAAI,CAAC,aAAiB,QAAQ;AAC1B,gBAAM,MAAM,2EACoB,cAAc;;AAGlD,YAAI,EAAE,aAAa,OAAM;AACrB;;AAEJ,0BAAkB,EAAE;AACpB,0BAAkB,OAAM,YAAY,OAAO,OAAM,QAAQ;AACzD,YAAI,CAAC,aAAiB,WAAW;AAC7B,gBAAM,MAAM,kFACgB,iBAAiB;;;;AAIlD;AACH,+BAAyB,OAAO,cAAc;AAC9C,+BAAyB,CAAC,OAAO,eAAe,CAAC;AACjD,qBAAe,OAAO,QAAQ;AAC9B,0BAAoB,OAAO,QAAQ;AACnC,UAAI,OAAO,QAAQ;AACf,cAAM,6BAA6B,QAAQ,YAAY,IAAI,YAAY;;AAGvE,cAAM,uBAAuB,QAAQ,YAAY,IAAI,YAAY;;AAErE,YAAM,WAAW,OAAO;AAExB,UAAI,OAAM,UAAU,qBAAqB;AACrC,YAAI,OAAO,WAAW;AAClB,gBAAM,GAAG,UAAU,OAAO,QAAQ;;;AAG1C,UAAI,OAAO,WAAW;AAClB,cAAM,GAAG,UAAU,OAAO,QAAQ;;AAGtC,aAAO,QAAQ;AACX,wBAAgB,OAAO,QAAQ,cAAc;AAC7C,uBAAe,OAAO,iBAAiB;AACvC,6BAAqB,OAAO,iBAAiB,SAAS;AACtD,YAAI,UAAU;AAEV;;AAEJ,YAAI,OAAM;AAEN,cAAI,eAAmB,OAAM,SAAS;AAClC,kBAAM,GAAG,UAAU,QAAQ,OAAM,cAAc;;AAG/C,uBAAW,OAAM;AACjB,gBAAI,CAAE,iBAAgB;AAClB,qBAAO,IAAI,aAAa;;AAE5B,kBAAM,GAAG,WAAW,QAAQ;;AAEhC;;AAGJ,YAAI,OAAM,QAAQ,SAAS,QAAQ,gBAAgB;AAC/C,gBAAM,GAAG,UAAU,cAAc,OAAM,QAAQ,MAAM;;AAEzD,cAAM,sBAAsB,OAAM,QAAQ,SAAS,QAAQ;;AAE/D,UAAI,eAAe;AACf,oBAAY,OAAO,OAAO;;AAE9B,YAAM;;AAEH;AACH,sBAAgB;AAChB,aAAO,OAAO,QAAQ,QAAQ;AAC1B,0BAAkB,EAAE,WAAW,QAAQ,EAAE,QAAQ,SAAS,QACtD,EAAE,QAAQ,MAAM,aAAa;AACjC,yBAAiB,EAAE,YAAY,YAAY,EAAE,QAAQ;AACrD,qBAAa,GAAG,EAAE,SAAS,YAAY;;AAE3C,0BAAoB,QAAQ;AAC5B,gBAAU,QAAQ,YAAY;AAE9B,aAAO,MAAM,YAAY,MAAM;AAC/B,aAAO;;ACnKX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,eAAQ,aAAa,YAAY,aAAa,cAAc,SAAS,UAAU,eAAe,gBAAgB,cAAe;AAC7H,eAAQ,MAAM,OAAQ;AACtB,iCAAyB,aAAa;AACtC,qBAAa;AACb,+BAAuB,eAAe;AACtC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe;AACf,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,wBAAY;gCACI;yBACP;;4BAEG,YAAY,eAAe,YAAY;0CACzB,gBAAgB,kBAAkB;6BAC/C,2BAA2B;;sBAElC,WAAW;;qDAEoB,gBAAgB,kBAAkB;+BACxD,wCAAwC,wBAAwB;;wBAEvE,WAAW;;2CAEQ;;sBAErB;;2BAEK,MAAM,IAAI;;;;;2BAKV,MAAM,IAAI;;;;;;;;;;AAU7B,aAAK,WAAW;;;;;;;;;UASd;;UAEA,KAAK;;;;;AC9Ef;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;;wBASA,aAAa;;oCAED;;;;;0BAKV;;;;;;ACtD1B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,cAAc,eAAe;AACnD,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,QAAQ,WAAW;AACxB,aAAK,cAAc;AACnB,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,OAAO;AACZ,aAAK,WAAW;;;;;;;;8BAQM,KAAK;oDACiB;yCACX,KAAK;0BACpB;;;sCAGY,KAAK;;;;;;;;;;;;;;;yBAelB,yBAAyB;;;;;;;yCAOT;0BACf;;;;0CAIgB;;;;;;;;;;;;;;;;;ACnE1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;gCAQQ,KAAK,YAAY;gCACjB,KAAK,YAAY;;;;;;;;;;;;;;;iCAehB;;;;;;;;;;;yBAWR,aAAa;;;6DAGuB;;;;;;;;;;;;;;;;;;;;;0CAqBnB;;;;;;ACnG1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,wBAAwB,uBAAuB;AACjE,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;;;gCAcZ;kBACd;gDAC8B;;oCAEZ,SAAS;;;;;kCAKX;kDACgB;;sCAEZ,SAAS;;;;;;;gCAOf;;;;qCAIK;;;;;;;;;;;;MAYjC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,uBAAuB,wBAAwB,uBAAuB;AACxF,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;;;;gCAiBzB;mBACb;gDAC6B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;kCAOf;;;;;;yBAMT,2BAA2B;yBAC3B;;;;;;;;;;;;ACrJzB;;;;;;;;;;;;;;;;;MAiBI,sDAAsD,oBAAoB,iBAAiB,qBAAoB,2BAA2B;AACtI,aAAK,gBAAgB,CAAC,WAAW;AACjC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,0BAAkB,aAAa,OAAO,KAAK,OAAO;AAClD,sCAA8B,KAAK,KAAK,YAAY;AACpD,wBAAgB,aAAa,gBAAgB;AAC7C,wBAAgB,aAAa,gBAAgB;AAC7C,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,4BAAoB;AACpB,4BAAoB;AACpB,YAAI,OAAO,KAAK,OAAO;AACnB,0BAAgB,wBAAwB,OAAO,KAAK;mBAE/C,OAAO,KAAK,OAAO;AACxB,0BAAgB,wBAAwB,OAAO,KAAK;;AAExD,aAAK,WAAW;QAChB;;sCAE8B;;;;8BAIR;yBACL;yBACA;wCACe;wCACA;;;;uBAIjB,SAAS,QAAQ,SAAS;uBAC1B,SAAS,QAAQ,SAAS;;;;;;;;;UASvC;;UAEA;;;;;;;ACrFV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,WAAW;AAC/B,aAAK,WAAW;;;;;;;;;;8BAUM,cAAc;;;;;;;;;;0BAUlB,cAAc;;;;MAIpC;AACI,eAAO;AACH,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU,MAAM,mBAAmB,cAAc;;AAE1D,gBAAM,GAAG,UAAU,KAAK,SAAS;;;;ACjD7C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,YAAY;AAChC,aAAK,WAAW;;;;8BAIM,oBAAoB;;;;;;ACxBlD;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AAEpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,YAAI,SAAS;AACT,eAAK,WAAW;;;;;;AAOhB,2BAAiB,YAAY,MAAM;AACnC,wBAAc,kBAAkB;AAChC,uCAA6B,wBAAwB,MAAM,aAAa;AACxE,0BAAc,SAAS,MAAM,YAAY,YAAY,SAAS,IAAI,YAAY,YAAY,SAAS,IAAI;AACvG,yBAAe,UAAU,aAAa;AACtC,eAAK,WAAW;;YAEhB;;eAEG;;;cAGD;;6BAEe;;;;;;;AAO7B;AACI,sBAAe;AACf,qBAAe,GAAG,OAAO,GAAG;AACxB,uBAAe,GAAG,OAAO,GAAG;AACxB,sBAAY,GAAG,QAAQ,IAAI,MAAM,UAAU,QAAQ,IAAI,MAAM;AAC7D,uBAAa,GAAG,IAAI,MAAM;AACtB,oBAAQ,GAAG,KAAK,KAAK,SAAS,IAAI,QAAQ;;AAE9C,kBAAO,KAAK;;;AAGpB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO,QAAQ,MAAM;;AAEzB,iBAAW;AACX,mBAAa,OAAO,GAAG,IAAI,MAAM;AAC7B,gBAAQ,GAAG,KAAK,SAAS,MAAM;AAC/B,YAAI,IAAI,OAAO;AACX,kBAAQ;;;AAGhB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO;;AAEX,wBAAkB,KAAK,MAAM;AAC7B,aAAO;cACG,UAAU;cACV,UAAU;;;;0BAIE;0BACA;;;AAG1B;AACI,mBAAa,MAAM;AACnB,2BAAqB,mBAAmB,MAAM;AAC9C,UAAI,SAAS;AACT,eAAO;wBACS,MAAM;;;AAG1B,aAAO,QAAQ,aAAa;8BACF,aAAa;8BACb,aAAa;uCACJ,aAAa;;AC3GpD;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,qBAAa,kBAAkB;AAC/B,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;8BAKU;;;;;;AAMlB;;AAEJ,aAAK,WAAW;QAChB,gBAAgB,QAAQ;QACxB,cAAc,QAAQ;;;UAGpB;;4BAEkB;;YAEhB;2BACe;;;;;;ACpD3B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,+BAAuB;UACnB,GAAG;UAAyB,GAAG,QAAO,OAAO;YAC7C;;UAEA,SAAS,IAAI,KAAK;;SAErB,QAAO,OAAO;YACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;UAC9C,SAAS,IAAI,KAAK,KAAK,QAAO,OAAO;cACnC;;AAEN,4BAAoB,SAAS,IACzB,4BACA;AACJ,uBAAe;AACf,qBAAa,OAAO,SAAS,IAAI,IAAI,GAAG,IAAI,GAAG;AAC3C,sBAAY;UACd,eAAe;cACX;mBACK,cAAc;;YAErB;mBACO,wBAAwB,OAAO,YAAY;;;;AAItD,oBAAa,SAAS,IAAI,OAAO;AACjC,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;AClEV;;;;;;;;;;;;;;;;;MAiBI,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,wCAAgC,cAAc,SAAS,oBAAoB,SAAS,mBAAmB,SAAS;AAChH,mCAA2B,SAAS,SAAS,mBAAmB,SAAS;AACzE,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;sCACU,iBAAiB;mCACpB,WAAW;;;;;;;;;;;;;;;;;;kCAkBZ;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;;;;;;;0BAUnB;;;mCAGS,mBAAoB,sBAAsB,0BAC7D,qBACA,QAAQ;;;;;;;AAOZ;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;oCACY,iBAAiB;iCACpB,WAAW;0CACF;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;kCAkBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;uCACK;;;;yCAIE;6CACI;6CACA;;;cAG/B;;;gCAGkB;gBAChB,6BAA6B;;;;;;;;cAQ/B;uBACS,6BAA6B;;;yCAGX;;;;;cAK3B;uBACS,6BAA6B;;;yCAGX;6CACI;;;;cAI/B;;;oBAGM;;;;;;MAMhB,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;;oBAER,gBAAgB,iBAAiB;mCAClB,aAAa,WAAW;;;;;;;;;;;;;;;;;;kCAkBzB;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;sCAIP;0BACZ;;;sCAGY,SAAS;;;;;;;;;;4BAUnB;;;qCAGS,mBACpB,sBACG,cAAc,SAAS,mBAAmB,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAC5G,UAAU,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAChF,QAAQ,2BAA2B;6BACtB;;;;;;;;AAQjB;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;;gBAER,gBAAgB,iBAAiB;iCAChB,aAAa,WAAW;0CACf;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;;kCAmBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;oBACd;;;kCAGc,SAAS;;;;oCAIP;yCACK;;;;+CAIM;mDACI;mDACA;;;gBAGnC;;;kCAGkB;kBAChB,6BAA6B;;;;;;;;gBAQ/B;yBACS,6BAA6B;;;+CAGP;;;;;gBAK/B;yBACS,6BAA6B;;;+CAGP;mDACI;;;;gBAInC;;;sBAGM;;;;;;ACpZtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,kCAA0B;AAC1B,wBAAgB;AAChB,YAAI,eAAe;AACf,gCAAsB;mBAEjB,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;mBAEP,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;;AAEhB,0BAAkB,GAAG,cAAc,cAAc;AAEjD,YAAI,eAAe;AACf,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;;AAElB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;YAChB,eAAe;;mBAER,eAAe;;;;wBAIV;;;AAGhB,sBAAc;AACd,YAAI,eAAe;AACf,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;mBAEL,eAAe;AACpB,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;;AAEd,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;0CACkB;;;;UAIhC;;;;;;;;kCAQwB;;kCAEA;;;;;;8BAMJ;;YAElB,oBAAoB;;;;;;;YAOpB;;;iCAGqB;cACnB,4BAA4B;YAC9B,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;;oBAEQ;;;;;ACvJpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,uBAAa;AACb,cAAI,IAAI,MAAM;AACV,sBAAU;;AAEd,cAAI,IAAI;AACJ,sBAAU;;AAEd,sBAAY;UACd;UACA,IAAI,IAAI,4CAA4C;;;;;;mBAM3C;;UAET,IAAI,IAAI,MAAM;;;AAGhB,aAAK,WAAW;QAChB,uBAAuB;QACvB,mBAA+B;;;;;;;;qBAQlB,YAAY;qBACZ,YAAY;;UAEvB;;;;;;;AAOV;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;QAEH;;;;;ACrER;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;;sDAMkB,UAAU;;;;;;qDAMX,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7F9D;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AClCvD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;uCAChB;;;;;;;;;;;;;;;;;;;;;;gCAsBP,QAAQ;uCACD,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7DlD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;sBAKd,eAAe;sCACC,eAAe;;;wBAG7B,eAAe;wCACC,eAAe;;;4BAG3B;kBACV;;;;4BAIU;kBACV;;;;;;;;;;;;;;;ACpGlB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAGpD,0BAAkB,eAAe,QAAQ;AACzC,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;8DAaO;;;;;;;;;ACjD9D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,YAAI,SAAS;AACT,eAAK,WAAW;;;2BAGD,OAAO;;;AAGtB;;AAEJ,2BAAmB;AACf,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,eAAe;;AAEpC,iBAAO,UAAU;;AAErB,yBAAiB,OAAO,IAAI,UAAU,WAAW,IAAI,KAAK;AAC1D,qBAAa,kBAAkB;AAC/B,aAAK,WAAW;;UAEd;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,yBAAiB,YAAY,MAAM;AACnC,2BAAmB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AAC1E,wBAAgB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AACvE,qBAAa,kBAAkB;AAC/B,YAAI,SAAS;AACT,eAAK,WAAW;;;;uCAIW,OAAO;cAChC,OAAO;eACN;2CAC4B,OAAO;kBAChC,OAAO;;;;;;AAOb,eAAK,WAAW;;YAEhB;;uBAEW,KAAK,SAAS;eACtB;yBACU,KAAK,SAAS;;eAExB;yBACU,KAAK,SAAS;iBACtB;2BACU,KAAK,SAAS;;;;;;;AAOjC;AACI,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,gCAAsB,OAAO,IAAI,UAAU,WAAW,GAAG;AACzD,2BAAiB,cAAc,KAAK;AACpC,4BAAkB,cAAc,MAAM,IAAI,KAAK;AAC/C,iBAAO,mBAAmB,mBAAmB;;AAEjD;AACI,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,QAAQ,UAAU;;AAGnC,mBAAO,GAAG,UAAU;;;;;AC7FpC;;;;;;;;;;;;;;;;;MAkBI,+FAA+F;AAC3F,aAAK,gBAAgB,CAAC,WAAW,WAAW;AAC5C,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,6BAAqB,WAAW,IAAI,eAAe;AACnD,aAAK,WAAW;UACd,yBAAyB,eAAe;;;YAGtC;;;gCAGoB;;kCAEE;kCACA;0CACQ;;;uBAGnB;;;;;;;;;ACtDvB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,2BAAmB,UAAU;AAC7B,0BAAkB,UAAU;AAC5B,uBAAe,UAAU;AACzB,4BAAoB,UAAU;AAC9B,wBAAgB,cAAc,KAAK,KAAK,SAAS;AACjD,aAAK,cAAc,CAAC,WAAW;AAC/B,oCAA4B;AAC5B,4BAAoB;AACpB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,8BAAsB;;;AAGtB,oCAA4B;AAC5B,YAAI,SAAS,aAAa;AACtB,kCAAwB;oCACA;;;;;AAK5B,wCAAgC;AAChC,YAAI,SAAS,aAAa;AACtB,sCAA4B;oCACJ;;;;;AAK5B,aAAK,WAAW;0CACkB;;;UAGhC;;;;;UAKA;;;;;;;;;YASE,yBAAyB;wDACmB;;;;8BAI1B;;;;;;;;;;;;;;;;YAgBlB;;;iCAGqB;cACnB,4BAA4B;;;;;;;;;;;;;;;;;YAiB9B;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;;oBAEQ;;;;;AC9IpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK,KAAK;AAChC,aAAK,cAAc;AACnB;AACA;AACA,YAAI,OAAO;AACP,gBAAM,MAAM,kBAAkB;;AAElC,YAAI,SAAS;AACT,qBAAW;AACX,oBAAU;;AAGV,gCAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,6BAAmB;AACnB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,KAAK,GAAG,cAAc;AAClC,gBAAI,IAAI;AACJ,yBAAW,KAAK,GAAG,cAAc;;;AAGzC,oBAAU,WAAW;AACrB,qBAAW,YAAY;;AAE3B,sBAAc,kBAAkB;AAChC,aAAK,WAAW;;UAEd;4BACkB;;2BAED;;2BAEA;;;;;;ACnD3B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,qBAAqB,KAAK;AAC9C,6BAAqB,YAAU,KAAK;AACpC;AACA,yBAAiB,SAAS,IAAI;AAC1B,iBAAO,aAAa,OAAO,cAAc,eAAe,OAAO;;AAEnE,gBAAO;UACL;UACA;UACA,SAAS,KAAK;;AAEhB,aAAK,WAAW;QAChB;;UAEE;8BACoB;;;;MAI1B;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;AAI/C,mBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC;AACI,UAAI,SAAS;AACT,eAAO;iBAEF,QAAQ;AACb,eAAO,OAAO,MAAM,GAAG,MAAM,IAAI,OAAK,eAAe,GAAG,KAAK;;AAG7D,cAAM,MAAM,oBAAoB;;;ACrExC;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,wBAAe,YAAY,UAAU,KAAK;AAC1C,0BAAkB,YAAY,aAAa,KAAK;AAChD,0BAAkB,KAAK,SAAS,IAAI,cAAc,QAAQ,UAAU,MAAM,IAAI;AAC9E,2BAAmB,wBAAwB,UAAU,YAAY;AACjE,yBAAiB;mBACN;cACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;YACT,UAAU,KAAK,OAAO;;;AAG1B,yBAAiB,KAAK,SAAS,IAAI,KAAK;UACtC,QAAO,KAAK,OAAO;cACf,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;gBACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;cAClD,UAAU,KAAK,OAAO;uBACb;;;;AAIf,+BAAuB,KAAK,QAAQ,IAChC;cACE,SAAS,SAAS,IAAI,UAAU,SAAS,MAAM,aACjD,SAAS,IAAI,UAAU,GAAG,UAAU,QAAQ,QAAO,cAAc,OAC5D,KAAK;AACd,aAAK,WAAW;0BACE,KAAK;;UAErB;UACA;UACA;;UAEA;UACA;;;;;MAKN;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;ACjF/C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,KAAK;AAClB,2BAAmB,kBAAkB,KAAK;AAC1C,sBAAc,kBAAkB,KAAK;AACrC,wBAAgB;AAChB,YAAI,SAAS;AACT,sBAAY;;AAGZ,2BAAiB;AACjB,sBACI,KAAK,IAAI;AACL;AACA,mBAAO,KAAK,WAAW,IACnB,oBAAoB,cAAc,OAClC,UAAU,aAAa,gBAAgB,cAAc;aAExD,KAAK;;AAElB,aAAK,WAAW;QAChB,sBAAsB,cAAc;QACpC,wBAAwB,cAAc;;;UAGpC;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,QAAQ;AACb,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AACpB,aAAK,aAAa;AAClB,aAAK,eAAe;;MAExB;AACI,gCAAwB,kCAAkC,OAAO;AACjE,yBAAiB,uBAAuB,SAAS,iBAAiB;AAClE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,SAAS,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AACjG,YAAI,KAAK,aAAa,UAAU,SAAS;AACrC,eAAK;AACL,eAAK;AACL,eAAK,iBAAiB;AACtB,eAAK;AACL,8BAAmB,KAAK,aAAa,UAAU;AAC/C,eAAK,aAAa,UAAU,KAAK;AACjC,iBAAO;;AAEX;AACA,YAAI,oBAAoB,oBAAoB;AACxC,uBAAa,KAAK,MAAM,0BAA0B,QAAQ,IAAI,QAAQ;mBAEjE,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;mBAE/D,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;;AAExE,aAAK,aAAa,UAAU,KAAK;AACjC,aAAK;AACL,aAAK,sBAAsB;AAC3B,aAAK;AACL,eAAO;;MAEX;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,gCAAwB,kCAAkC,gBAAgB;AAC1E,yBAAiB,uBAAuB,OAAO,iBAAiB;AAChE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,OAAO,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AAC/F,mCAA2B,OAAM,IAAI;AACrC,YAAI,uBAAuB,MACvB,KAAK,qBAAqB;AAC1B,eAAK,MAAM,oBAAoB;AAC/B,eAAK,sBAAsB;;AAG3B,eAAK,aAAa,UAAU,KAAK;AACjC,eAAK;AACL,eAAK,iBAAiB;;AAE1B,aAAK;AACL,wBAAgB,KAAK,aAAa;AAClC,yBAAiB,QAAQ,QAAQ;AACjC,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM;;AAGpB,gBAAQ,OAAO,UAAU;AACzB,aAAK;;MAET;AACI,YAAI,CAAC,KAAK;AACN;;AAEJ,sBAAc,KAAK,kBAAkB,KAAK;AAC1C,gBAAQ,IAAI,aAAa,GAAG,KAAK,qBAAqB,KAAK,mBAAmB,IAAI;AAClF,0BAAkB,KAAK,gBAAgB,KAAK;AAC5C,gBAAQ,IAAI,oBAAoB,KAAK;AACrC,gBAAQ,IAAI,iBAAiB,KAAK,kBAAkB,KAAK,MAAM,MAAM;;UAErE;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;;;AAG7B;AAEI,oBAAc;AACd,UAAI,mBAAmB,MAAM;AACzB,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,GAAG;AAC3B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;;AAEX,YAAM,IAAI,MAAM,2BAA2B;;AAExC;AAMH,6BAAuB,iCAAiC,iBAAiB;AACzE;AACA,UAAI;AACA,4CAAoC,uCAAuC,MAAM,IAAI,MAAM;AAC3F,sBAAc,cAAc;;AAG5B,gCAAwB,yCAAyC,MAAM,IAAI,MAAM;AACjF,sBAAc,QAAQ;;AAE1B,+BAAwB,0BAA0B,IAAI;AACtD,aAAO,cAAc;;AAEzB;AACI,cAAQ;aACC,oBAAoB;AACrB,iBAAO,wCAAwC;aAC9C,oBAAoB;AACrB,iBAAO,+CAA+C;aACrD,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,+CAA+C;;AAEtD,gBAAM,IAAI,MAAM,iCAAiC;;;AAG7D;AACI,UAAI,OAAM,QAAQ;AACd,YAAI;AACA,iBAAO,oBAAoB;;AAE/B,eAAO,oBAAoB;;AAE/B,UAAI;AACA,eAAO,oBAAoB;;AAE/B,aAAO,oBAAoB;;AAE/B;AACI,UAAI,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;iBAEtB,mBAAmB,aAAa,UAAU,kBAAkB;AACjE,eAAO,+BAA+B;iBAEjC,mBAAmB,aAAa,YACrC,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;;AAE/B,YAAM,IAAI,MAAM,gCAAgC;;AAEpD;AACI,aAAO,GAAG,aAAa,MAAM,aAAa,MAAM,mBAAmB;;AC1OvE;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK;;AAEtC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB;AACrC,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,iBAAiB;;AAEjC,UAAI,SAAS;AACT,eAAO,eAAe,OAAO;;AAEjC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW;AACnE,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,KAAK,QAAQ,cAAc,OAAO,OAAO;;AAE1D,aAAO,aAAa;;ACjDxB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;AAYV,gCAA0B;AACnB,mBAAe;AACf,gBAAY;AACZ,iBAAa,sBAAoB;;;AAGjC,kBAAc,sBAAoB;;;AAGlC,kBAAY;AACZ,iBAAa;;;uBAGG;kBACL;;;AAGX,0BAAsB;AACzB,aAAO,sBAAoB;mCACI;;;AAG5B,gBAAY;AACZ,iBAAa;AACb,kBAAc;AACd,iBAAa;;;;AAIb,mBAAe;AACf,mBAAe;AACf,sBAAkB;AAClB,kBAAc;;;;;;;;;;;;;;;;AAgBd,gBAAY;AACZ,kBAAc;AACd,gBAAY;;AAEZ,kBAAc;AACd,iBAAa;AACb,kBAAc;AACd,oBAAgB;AAchB,qBAAiB;;;;;;;;;;;;;;;;;;;;;AAqBjB,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;AAGjC,iBAAa;;;;AAIb,iBAAa;;;;AAIb,iBAAa;;;;AAIb,kBAAc,sBAAoB;AAClC,kBAAc,sBAAoB;;;AAGlC,kBAAc,sBAAoB;;;AAGlC,gBAAY;;;;cAIL;eACC;eACA;eACA;eACA;eACA;;;;;;;AAOR,uBAAmB;AACnB,wBAAoB;AACpB,kBAAc;ACjLrB;;;;;;;;;;;;;;;;AAgBO,qBAAe;AACf,kBAAY;;;;;;;;;;AAUZ,mBAAa;;;;;;;;;;;AAWb,oBAAc;;;;;;;;;;;AAWd,kBAAY;;;;;;;;;;;MAWf;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;ACnEV;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,yBAAiB,YAAY,MAAM;AACnC,sBAAc,kBAAkB;AAChC,6BAAqB,gBAAgB,MAAM;AAC3C,0BAAkB,SAAS,MAAM;AACjC,wBAAe,QAAQ,IAAI,OAAO,QAAQ,UAAU,KAAK;AACzD,aAAK,WAAW;;UAEd;kCACwB;;4CAEU;;;;;ACnC5C;;;;;;;;;;;;;;;;AAuBA,WAAM,gCAAmB;AACzB,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;AAuEX,8BAAwB;AACxB,8BAAwB;AAC/B,yBAAqB;AACd;AACH,UAAI,gBAAgB;AAChB,eAAO,aAAa;;AAExB,mBAAa,gBAAgB;AAC7B,aAAO,aAAa;;AAExB,gEAA2D;AACvD,UAAI,gBAAe;AACf,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;;AAEX,YAAM,IAAI,MAAM,cAAc;;AAIlC,uCAAmC;AAInC,mCAA+B;AAC/B;AACI,UAAI,OAAM,OAAO,UAAU;AACvB,eAAO;;AAEX,aAAQ,OAAM,OAAO,OAAO,SAAS,OAAM,OAAO,OAAO,QACrD,OAAO,mBACP,yBAAyB,OAAO;;AAKjC,wCAAoC;mCACL;MAClC;AACI;AAEA,aAAK,cAAc,IAAI;AAGvB,aAAK,kBAAkB,IAAI;AAG3B,aAAK,eAAe,IAAI;AACxB,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAEpB,aAAK,iBAAiB;AACtB,aAAK,oBAAoB;AACzB,aAAK,wBAAwB;AAC7B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,YAAI,CAAC,OAAM,QAAQ;AACf,gBAAM,IAAI,MAAM;;AAEpB,YAAI,SAAS;AACT,qBAAW,gBAAgB,OAAM,UAAU;AAC3C,eAAK,cAAc,eAAe,OAAM,UAAU;AAClD,eAAK,QAAQ,IAAI,aAAa;AAC9B,eAAK,SAAS,GAAG;AACjB,eAAK,sBAAsB;;AAG3B,eAAK,QAAQ;AACb,eAAK,cAAc;AACnB,eAAK,sBAAsB;AAC3B,eAAK,SAAS,MAAM,GAAG;;AAE3B,aAAK,iBAAiB,IAAI,eAAe,KAAK;AAC9C,aAAK,qBAAqB;AAC1B,aAAK,UAAU,IAAI,aAAY,MAAM;;MAEzC;AACI,eAAO,KAAK,QAAQ,eACf,MAAK,aAAa,KAAK,WAAW,eAAe,KAClD,KAAK;;MAEb;AACI,YAAI,OAAM,QAAQ,qCACd,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU,eAAe,UAAU;AACnC,gBAAM,IAAI,MAAM;;AAGpB,uBAAe;AACf,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;AAE3B,eAAO;;MAGX;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ;;MAGZ;AACI,YAAI,KAAK,QAAQ,IAAI;AACjB,0BAAgB,KAAK,QAAQ,IAAI;AACjC,kBAAQ;;;MAGhB;AACI,YAAI,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAGpB,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;;MAG/B;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,QAAQ,IAAI;AACjB,8BAAoB,KAAK,QAAQ,IAAI;AACrC,sBAAY;AACZ,cAAI,YAAY,WAAW;AACvB,iBAAK,YAAY;;;;MAI7B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,oBAAoB,eAAO,OAAO,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,SAAS,IAAI;AAC/B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,UAAU;AACV,iBAAO;;AAEX,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ;AACA,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,mBAAS,wBAAoC,YAAY;;AAGzD,mBAAS,KAAK,qBAAqB;;AAEvC,YAAI;AACA,eAAK,kBAAkB,SAAa;;AAExC,eAAO,KAAK,qBAAqB,QAAQ;;YAEvC;AACF,YAAI,KAAK,YAAY,IAAI;AACrB,+BAAoB,KAAK,YAAY,IAAI;AACzC,iBAAO,IAAI,QAAQ,aAAW,aAAY,KAAK;;AAEnD,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,eAAO,OAAO,oBAAoB,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,KAAK,IAAI;AAC3B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,CAAC,OAAM,QAAQ,mCACf,OAAM,UAAU,qBAAqB;AACrC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa;AACb;AACA,YAAI,UAAU,eAAe,OAAM,IAAI;AAEnC,8BAAoB,KAAK,OAAO;AAChC,0BAAgB,KAAK,QAAQ,IAAI,kBAAkB;AACnD,oBAAS,KAAK,MAAM,wBAAwB,QAAQ,SAAS,GAAG,iBAA0B;;AAE9F,aAAK,YAAY,IAAI,QAAQ;AAC7B,YAAI,UAAU;AAEV,gBAAM,KAAK,MAAM;;AAGrB;AACA,YAAI,UAAU;AACV,qBAAW,MAAM,QAAQ,IAAI;YACzB,KAAK,KAAK,mBAAmB,KAAK;YAClC,KAAK,KAAK,mBAAmB,KAAK;;AAEtC,6BAAmB,GAAG;AACtB,6BAAmB,GAAG;AACtB,iBAAO,wBAAoC,YAAY;mBAElD,WAAU;AACf,iBAAO,KAAK,qBAAqB;;AAGjC,uBAAa,eAAmB;AAChC,iBAAO,KAAK,MAAM,gCAAgC,SAAQ;;AAE9D,YAAI,qBAAqB;AACrB,eAAK,8BAA8B;;AAEvC,0BAAkB,KAAK,qBAAqB,QAAQ;AACpD,4BAAoB,KAAK,YAAY,IAAI;AACzC,aAAK,YAAY,OAAO;AAExB,oBAAY,QAAQ,aAAW,QAAQ;AACvC,YAAI,KAAK,gBAAgB,IAAI;AACzB,eAAK,gBAAgB,OAAO;AAC5B,eAAK,YAAY;AACjB,eAAK;;AAET,eAAO;;MAEX;AACI,YAAI,UAAU;AACV;;AAEJ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,sBAAY,OAAO;AACnB,cAAI,CAAC,iBAA4B;AAC7B,gBAAI,OAAM,QAAQ;AACd,oBAAM,MAAM,aAAa;;AAI7B,kBAAM,MAAM,aAAa;;;;MAIrC;AACI,eAAQ,OAAO,OAAO,YAAa,KAAK,QAAQ,IAAI;AACpD,qBAAa,eAAmB;AAChC,YAAI,OAAM,QAAQ;AACd,4BAAkB,KAAK,OAAO;AAC9B,2BAAgB,KAAK,QAAQ,IAAI,UAAU;AAC3C,wBAAa,KAAK,MACb,gCAAgC,SAAQ,SAAS,GAAG,iBAA0B,QAC9E,SAAS,GAAG;AACjB,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,iBAAiB,aAAa;AAC3E,4BAAoB,yBAAyB,aAAwB,SAAS;AAC9E,wBAAgB,yBACZ,IAAI,yBAAyB,eAC7B,IAAI,mBAAmB;AAC3B,uBAAe,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,aAAa,OAAO,UAAW;AACtF,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,qBAAa,KAAK,MACb,gDAAgD,QAAQ,SAAS,QAAQ,SAAS,IAAI,QAAQ,SAAS,IACvG,SAAS,GAAG;AACjB,aAAK,8BAA8B;AACnC,eAAO;;YAEL;AACF,gCAAwB,KAAK;AAC7B,gCAAwB;AACxB,4BAAoB;AACpB,YAAI,KAAK,sBAAsB;AAC3B,eAAK,qBAAqB;AAC1B,0BAAgB;;AAGhB,eAAK,aAAa,KAAK;;AAE3B,aAAK,eAAe;AACpB;AAEA,4CAAoC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,QAC3E,OAAO,OAAK,KAAK;AACtB,0CAAkC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,OACzE,OAAO,OAAK,KAAK;AACtB,aAAK,eAAe;AACpB,YAAI;AACA,eAAK,qBAAqB;;AAE9B,oBAAY;UACR,cAAc,KAAK;UACnB,gBAAgB,KAAK;UACrB,UAAU;UACV,QAAQ;;AAEZ,YAAI,OAAM,UAAU,mDAAmD;AACnE,2BAAiB,MAAM,QAAQ,IAAI;AACnC,cAAI,cAAc,KAAS;AAC3B,cAAI,yBAAyB,MAAM,SAAS,IAAI,UAAW,EAAE,MAAM,0BAA0B,IAAI,IAAI,KAChG,IAAI,OAAK,GAAG,EAAE,SAAS,EAAE,MACzB,KAAK;;AAGV,cAAI,cAAc;YACd,OAAO;;;AAGf,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,eAAO;;MAEX;AACI,eAAO;UACH,YAAY;UACZ,eAAe,KAAK;UACpB,wBAAwB,KAAK,eAAe;UAC5C,mBAAmB,KAAK,eAAe;;;MAG/C;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM;;AAEtB,eAAO,CAAE,SAAS,QAAY,OAAO;;MAEzC;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,eAAK,MAAM;AACX,iBAAO;;AAEX,cAAM,QAAQ;AACd,eAAO;;YAEL;AACF,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM,uBAAuB;;AAE7C,2BAAmB;AACnB,eAAO,WAAW,QAAQ,WAAW;;MAEzC;AACI,YAAI,KAAK,gBAAgB,IAAI;AACzB;;AAEJ,YAAI,KAAK,YAAY,IAAI;AACrB,eAAK,gBAAgB,IAAI;AACzB,eAAK;AACL;;AAGJ,YAAI,CAAC,KAAK,QAAQ,IAAI;AAClB;;AAMJ,YAAI,KAAK,QAAQ,IAAI,QAAQ,wBAAwB;AACjD,eAAK,QAAQ,IAAI,QAAQ;AACzB;;AAEJ,aAAK,eAAe;AACpB,eAAQ,sBAAuB,KAAK,QAAQ,IAAI;AAChD,YAAI,sBAAsB;AACtB,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;AACtD,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;;AAE1D,aAAK,QAAQ,OAAO;;MAExB;AACI,eAAQ,SAAS,OAAO,UAAU,OAAO,UAAU,iBAAU,KAAK,QAAQ,IAAI;AAC9E,oBAAY,UAAS,OAAM,cAAc;AACzC,yBAAiB,KAAK,aAAa,IAAI;AACvC,YAAI,WAAW;AACX,eAAK,aAAa,IAAI,KAAK,WAAW;;AAGtC,eAAK,aAAa,OAAO;AACzB,cAAI,WAAW;AACX,iBAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,iBAAK,eAAe,eAAe,SAAS,UAAU,OAAO;;;AAGrE,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ,UAAU;AAClB,gBAAQ,WAAW;AACnB,gBAAQ,WAAW;AACnB,gBAAQ,QAAQ;;MAEpB;AACI,aAAK,YAAY;AACjB,eAAO,KAAK,QAAQ,IAAI,QAAQ;;MAMpC;AACI,eAAO,KAAK,QAAQ,IAAI;;MAE5B;AACI,YAAI,CAAC,OAAM,QAAQ;AACf,iBAAO;;AAEX,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa,WAAS,YAAY;;AAE3C,eAAO,KAAK;;MAShB,2CAA2C;AACvC,2BAAmB,KAAK;AACxB,YAAI,CAAC,KAAK,yBAAyB,cAAc;AAC7C,kBAAQ,KAAK;AAIb,eAAK,wBAAwB;;AAEjC,eAAO,cAAc,QACjB,OAAO,MAAM,YAAS,KAAK,QAAQ,IAAI,OAAM,QAAQ,WAAW,QAC5D,eAAmB,OAAM,SAAS;;MAE9C;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,OAAO,MAAM,EAAE,OAAO,EAAE;AAC1F,iBAAO,KAAK,WAAW,MAAM,EAAE,OAAO;;AAG1C,YAAI,eAAmB,UAAU;AAC7B,iBAAO,QAAO,IAAI,MAAM,EAAE;;AAE9B,eAAQ,YAAa,KAAK,QAAQ,IAAI,EAAE;AACxC,4BAAoB,kBAA4B,EAAE,OAAO,OAAO;AAChE,YAAI,YAAY,CAAC;AACb,0BAAgB,OAAM,QAAQ,iCAC1B,IAAI,mBAAmB,QACvB,IAAI,aAAa;AACrB,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;AAElD,aAAK,YAAY,EAAE;AACnB,eAAO,KAAK,aAAa,GAAG,OAAO;;MAEvC;AACI,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,kBAAU,KAAK,WAAW,MAAM,EAAE;AAClC,2BAAmB,KAAK,QAAQ,IAAI,EAAE;AAEtC,eAAO,OAAO,YAAY;AAC1B,mBAAW,QAAQ;AACnB,mBAAW,QAAQ,EAAE;AACrB,yBAAiB,mBAA6B,OAAO,EAAE;AACvD,YAAI,SAAS;AAGT,wBAAc,SAAS,MAAM;;AAEjC,mBAAW,QAAQ;UACf;UAEA,YAAY,SAAS,SAAS,SAAS,MAAM,cAAc,EAAE;;AAGjE,yBAAiB,KAAK,aAAa,IAAI,WAAW,MAAM,eAAe;AACvE,aAAK,aAAa,IAAI,WAAW,MAAM,YAAY,WAAW;AAC9D,eAAO;;MAEX;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,aAAa,GAAG,OAAO,KAAK;AAC9F,YAAI;AACA,iBAAO;;AAEX,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAO,IAAI;;AAEtB,wBAAgB,IAAI,oBAAoB,OAAO,SAAS;AACxD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,qBAAqB,EAAE,OAAO,QAClC,IAAI,eAAe,EAAE,OAAO;AAChC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,IAAI;AACtE,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,0BAAkB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACpD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAG3C,YAAK,iBAAgB,KAAK,gBAAgB,MACtC,YAAY;AACZ,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,sBAAY,gBAAgB,IAAI,IAAI,EAAE,KAAK,OAAO,WAAW;AAC7D,uBAAa,gBAAgB,IAAI,IAAI;AACrC,sBAAY,gBAAgB,IAAI,EAAE,KAAK,OAAO,GAAG,aAAa;AAG9D,0BAAgB,KAAO,KAAK;AAC5B,iBAAO,QAAQ,IAAI,MAAM;;AAE7B,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY;AACzG,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAC3C,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/I,uBAAe,CAAC,GAAG;AACnB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS,QAAQ;;MAE/C;AACI,wBAAgB,OAAM,QAAQ,8BAC1B,IAAI,iBAAiB,EAAE,OAAO,QAAQ,MAAM,OAAO,QACnD,IAAI,WAAW,EAAE,OAAO,QAAQ,MAAM,OAAO;AACjD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,WAAW,OAAO,aAAa,MAAM,OAAO;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC,YAAY,aAAa;;MAEjE;AACI,YAAI,EAAE,UAAU;AACZ,wBAAa,KAAK,SAAS,EAAE;AAC7B,8BAAoB,MAAK,IAAI,OAAK,cAAkB;AACpD,sBAAY,QAAO,EAAE,OAAO,EAAE,OAAO;AACrC,iBAAO,OAAK,KAAK;;AAErB,wBAAgB,IAAI,YAAY,EAAE,OAAO;AACzC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,iBAAiB,EAAE,OAAO,UAAU,iBACxC,IAAI,WAAW,EAAE,OAAO,UAAU;AACtC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,UAAU,MAAM,KAAK,WAAW,OAAO,GAAG,SAAS;AAC9F,YAAI;AACA,iBAAO;;AAEX,wBAAgB,IAAI,cAAc,EAAE,OAAO,QAAQ,MAAM;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAU,EAAE,QAAQ,WAAW,UACjC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,iCAAyB,CAAC,CAAC,GAAG;AAC9B,yBAAiB,KAAK,GAAG;AACzB,qBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,2BAAiB,KAAK,CAAC,GAAG;;AAE9B,wBAAgB,EAAE,IAAI;AACtB,oCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,kDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,6BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,yBAAiB,WAAU,QAAQ,QAAQ,sBAAsB;AACjE,eAAO,SAAQ,UAAU;;MAE7B;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,0BAAsC;AACzD,wBAAgB,KAAK,KAAK,SAAS;AACnC,2BAAmB,CAAE,YAAY,QAAQ,WAAW;AACpD,wBAAgB,IAAI,cAAc,YAAY;AAC9C,uBAAe,KAAK,cAAc,SAAS,CAAC,IAAI;AAEhD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,OAAO,QAAQ,YAAY;;MAE3C,wCAAwC;AACpC,wBAAgB,EAAE,MAAM;AACxB,qBAAa,EAAE,MAAM;AACrB,YAAI,gBAAgB;AAChB,sBAAY,aAAa,MAAM;AAC/B,mBAAS,aAAa,MAAM;;AAEhC,2BAAmB,0BAAsC;AACzD,2BAAmB;UACf;UACA;UACA;UACA,SAAS,KAAK,KAAK,SAAS;;AAEhC,wBAAgB,IAAI,iBAAiB,YAAY,YAAY,gBAAgB;AAC7E,uBAAe,CAAC;AAChB,YAAI,gBAAgB;AAChB,iBAAO,KAAK;;AAEhB,uBAAe,KAAK,cAAc,SAAS,QAAQ;AAEnD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,UAAU,GAAG,YAAY;;MAEzC,8CAA8C;AAC1C,wBAAgB,gBAAgB,OAAO,aAAa,QAAQ,EAAE;AAC9D,uBAAe,QAAQ,QAAQ,SAAS;AACxC,2BAAmB,0BAAsC;AACzD,wBAAgB,IAAI,uBAAuB,SAAS,YAAY,YAAY,gBAAgB;AAC5F,uBAAe,gBAAgB,OAAO,CAAC,KAAK,CAAC,GAAG;AAChD,uBAAe,KAAK,cAAc,SAAS,QAAQ;AACnD,YAAI,OAAO,SAAS,EAAE;AAClB,iBAAO,KAAK,gBAAgB,GAAG,YAAY;;AAE/C,eAAO;;MAEX;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,OAAO,aAAa,QAAQ;;MAExD;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC1E,YAAI;AACA,iBAAO;;AAEX,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,QAAQ,aAAa,QAAQ;;MAEzD;AACI,mBAAW;AACX,4BAAoB,oBAAgC,CAAC,OAAO,EAAE;AAC9D,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,GAAG;AACzB,iBAAO,kBAA8B,GAAG,EAAE,MAAM;;AAEpD,yBAAiB,eAAa,gBAAgB,UAAU,OAAO,MAAM;AACrE,uBAAe,eAAmB,CAAC,UAAU,MAAM;AACnD,oBAAY,UAAU,KAAK,IAAI;AAC/B,4BAAoB,WAAc,EAAE;AACpC,qBAAa,KAAK,aAAa,KAAK,sBAAsB,YAAY,aAAa,aAC9E,QAAQ;AACb,YAAI,eAAe;AACf,mBACI,WAAU,QAAQ,wBAAoC;;AAE9D,eAAO;;MAEX;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,eAAa,8BAA8B,QAAQ;AACtE,0BAAkB,CAAE,YAAY,QAAQ,WAAW;AACnD,wBAAgB,IAAI,iBAAiB,WAAW;AAChD,uBAAe,KAAK,cAAc,SAAS,CAAC,GAAG,aAAa;AAE5D,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,qBAAa,MAAM,GAAG,aAAa,KAAK,CAAC,SAAS;AAClD,eAAO,KAAK,aAAa,QAAQ,WAAW,YAAY,OAAO;;MAEnE;AACI,qBAAa,CAAC;AACd,oCAAwC,QAAQ,WAAW,OAAO,GAAG,gBAAgB,WAAW,MAAM,IAAI,MAAM,EAAE;AAClH,YAAI,CAAC,OAAM,QAAQ,wBAAwB,EAAE,QAAQ;AACjD,0CAAgC,2BAAuC,EAAE,OAAO;AAChF,yBAAe,eAAmB;AAClC,sBAAY,EAAE,KAAK,IAAI;AACvB,iBAAO,KAAK,UAAU,KAAK,YAAY,QAAQ;;AAEnD,eAAO,KAAK,gBAAgB,GAAG;;MAEnC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,kDAAkD,EAAE,OAAO,kBACvD;;AAExB,qBAAa,EAAE,MAAM;AACrB,qBAAa;AAGb,qBAAa,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,SAAS,GAAG;AACjD,0BAAgB,IAAI,cAAc,EAAE,OAAO,OAAO;AAClD,8BAAoB,QAAQ,mBAAmB;AAC/C,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC,SAAS,OAAO,OAAO;AAC7D,qBAAW;;AAIf,YAAI;AACA,0BAAgB,IAAI,cAAc,EAAE,OAAO,WAAW;AACtD,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC;AACtC,qBAAW;;AAEf,eAAO;;MAEX;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,SAA2B;;AAEhE,wBAAgB,IAAI,gBAAgB,OAAoB,EAAE,OAAO,EAAE;AACnE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC7E,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,QAA0B;;AAE/D,wBAAgB,IAAI,gBAAgB,MAAmB,EAAE,OAAO,EAAE;AAClE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,SAAsB,EAAE,OAAO,EAAE;AACrE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,iBAAmC;;AAExE,wBAAgB,IAAI,gBAAgB,eAA4B,EAAE,OAAO,EAAE;AAC3E,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,eAAiC;;AAEtE,wBAAgB,IAAI,gBAAgB,aAA0B,EAAE,OAAO,EAAE;AACzE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,cAAc,UAAU,MAAM,EAAE,OAAO,EAAE;AAC7D,eAAO,KAAK,cAAc,SAAS,CAAC,WAAW,GAAG,IAAI,YAAW,EAAE,OAAO,EAAE;;MAEhF;AACI,cAAkB;AAElB,yBAAiB,UAAU;AAC3B,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,sBAAc,EAAE;AAChB,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oBAAW;AACX,4BAAoB;AACpB,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,KAAI,EAAE,OAAO,EAAE;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,qBAAqB,EAAE,OAAO;AAClD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C,oDAAmD;AAC/C,wBAAgB,IAAI,sBAAsB,KAAI,EAAE,OAAO,EAAE,OAAO;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAK/C;AACI,eAAO;UACH,QAAQ,YAAY;UACpB,OAAO,YAAY;UACnB,OAAO,cAAc;;;MAG7B;AACI,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAQ;;AAGnB,YAAI,QAAQ,SAAS,OAAM,IAAI;AAC3B,2BAAiB,KAAK,MAAM,QAAQ,SAAS;AAC7C,2BAAiB,KAAK,KAAK,QAAQ,MAAM,GAAG;AAC5C,4BAAkB,KAAK,KAAK,QAAQ,MAAM;AAC1C,iBAAO,KAAK,KAAK,CAAC,UAAU;;AAEhC,sBAAc,QAAQ,IAAI,OAAK,EAAE,OAAO,OAAO,YAAY,YAAW,IAAI;AAC1E,uBAAe,QAAQ,IAAI,OAAK,EAAE;AAElC,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,kBAAkB,QAAQ,GAAG,OAAO,UACxC,IAAI,YAAY,QAAQ,GAAG,OAAO;AACtC,eAAO,KAAK,cAAc,SAAS,SAAS;;MAEhD;AACI,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,YAAY,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACnE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,MAAe,EAAE;;AAElD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAGxE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,KAAK,IAAI;AACnB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAI,GAAG;;MAElB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,SAA2B,EAAE,OAAO,MAAM,SACpE,IAAI,gBAAgB,OAAoB,EAAE,OAAO,MAAM;AAC3D,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,WAA6B,GAAG,OAAO,EAAE,SACnE,IAAI,gBAAgB,SAAsB,GAAG,OAAO,EAAE;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,kBAAkB,EAAE;;AAGlC,oBAAU,IAAI,YAAY,EAAE;;AAEhC,4BAAoB,QAAQ,mBAAmB,MAAK;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;MAElD;AAEI,YAAI,KAAK,mBAAmB,CAAC,OAAO,EAAE,UAAU;AAC5C,4BAAkB,iBAAiB,KAAK,QAAQ,IAAI,EAAE,QAAQ;AAC9D,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,KAAK,QAAQ,IAAI,EAAE;AACjC,wBAAgB,IAAI,kBAAkB,EAAE;AACxC,uBAAe;UACX,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;UAChE,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;;AAEpE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO,KAAc;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AAGI,uBAAe,EAAE;AACjB,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,gCAAwB,SAAS;AACjC,4BAAoB,OAAO,KAAK,OAAO,KAAK,OAAO;AACnD,iCAAyB,SAAS;AAClC,+BAAuB,SAAS,eAAe;AAC/C,2BAAmB;AACnB,2BAAmB;AAGnB,0CAAmC,iBAAgB,KAAK,qBAAqB,MACzE,kBAAkB;AACtB,uCAA+B,OAAO,KAAK,MAAM,KAAK,CAAC,CAAC,SAAS;AACjE,YAAI,6BAA6B,CAAC,OAAM,QAAQ,0BAC5C,CAAC,OAAM,QAAQ,mCACf,CAAC;AACD,+BAAoB,iBAAiB,OAAO,KAAK,OAAO,KAAK,OAAO,KAChE,OAAO,KAAK,OAAO,KAAK,OAAO;AACnC,6BAAkB,SAAQ,GAAG,CAAC,GAAG,cAAa,SAAS;AACvD,kCAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,yBAAe,KAAK,iBAAiB;YACjC,GAAG;YACH,GAAG;YACH;YACA;YACA;YACA,YAAA;YACA;;AAEJ,iBAAO,SAAQ,QAAQ,SAAS;;AAUpC,4BAAoB,iBAChB,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK,KACrC,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK;AACzC,0BAAkB;UACd,QAAQ,EAAE;UACV,OAAO,CAAC,GAAG,aAAa,SAAS;UACjC,OAAO,EAAE;;AAUb,sCAA8B,SAAS;AACvC,iBAAS,QAAQ,SAAS,MAAM;AAChC,iBAAS,MAAM,SAAS,MAAM,SAAS;AACvC,gBAAY,cAAyB,SAAS,OAAO,UAAU,QAAQ,MAAM,kBAAkB,SAAS,YAAY,UAAU;AAC9H,+BAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,8BAAsB,KAAK,iBAAiB;UACxC,GAAG;UACH,GAAG;UACH;UACA;UACA;UACA,YAAA;UACA;;AAEJ,qCAA6B,KAAK,QAAQ,IAAI,cAAc;AAC5D,gBAAY,qBAAqB,UAAU,MAAM;AAEjD,iBAAS,QAAQ;AAGjB,6BAAqB,QAAQ,SAAS;AACtC,eAAO,WAAS,qBAAqB,cAAc,QAAQ,SAAS,UAAU,cAAc;;MAEhG;AAOI,eAAQ,aAAa,cAAc,YAAY,UAAU,WAAW,cAAe;AACnF,+BAAuB,eAAe;AACtC,0BAAkB,cAAc,eAAe;AAC/C,wBAAgB,YAAY;AAC5B,2BAAmB,CAAC,WAAW;AAC/B,2BAAmB;AACnB,2BAAmB;AACnB,0BAAkB,EAAE,QAAQ,CAAC;AAC7B,sBAAc,OAAO,QAAQ,CAAC,GAAG,WAAW;AAC5C,8BAAsB,IAAI,oBAAoB,YAAY,UAAU,OAAO;AAC3E,uBAAe,KAAK,cAAc,eAAe,CAAC,YAAY,QAAQ;UAClE;UAAG,WAAW;UAAI,WAAW;;AAEjC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,8BAAsB,IAAI,oBAAoB,OAAO,OAAO,MAAM,OAAO,CAAC,GAAG,SAAS,SAAS,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/J,uBAAe,CAAC,QAAQ;AACxB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,wBAAgB,KAAK,cAAc,eAAe;AAClD,YAAI;AACA,iBAAO,QAAQ,QAAQ,CAAC,GAAG,WAAW,UAAU,SAAS;;AAGzD,iBAAO,QAAQ,QAAQ,CAAC,GAAG,SAAS,aAAa,WAAW;;;MAGpE,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE1E,YAAI,OAAM,QAAQ,wBAAwB,OAAM,MAAM,OAAO;AACzD,iBAAO,KAAK,iBAAiB,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE5E,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,SAAS;AACvF,wBAAgB,IAAI,cAAc,UAAU,SAAS,iBAAiB;AACtE,uBAAe,CAAC,QAAO;AACvB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,GAAG,QAAQ;;AAE1C,YAAI,OAAM,QAAQ,wBAAwB,EAAE,MAAM,OAAO;AACrD,iBAAO,KAAK,iBAAiB,GAAG,QAAQ;;AAE5C,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,wCAAgC,OAAM,QAAQ,+BAC1C,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AACnD,gCAAwB,cACpB,6BAA6B,aAAY,2BACzC;AACJ,uBAAe,CAAC,QAAO;AACvB,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB;AACA,YAAI;AACA,oBAAU,IAAI,6BAA6B,UAAU,SAAS,iBAAiB;AAC/E,iBAAO,KAAK,cAAc,SAAS;;AAEvC,kBAAU,IAAI,uBAAuB,UAAU,SAAS,iBAAiB;AACzE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI;AACA,YAAI,OAAM,QAAQ,+BACd,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AAC/C,oBAAU,IAAI,6BAA6B;AAC3C,iBAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;AAE3C,kBAAU,IAAI,uBAAuB;AACrC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,+BAA+B;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,gCAAgC;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,KAAK,MAAM,GAAG,OAAO,MAAM,QAAQ;;AAEhD,eAAO;;MAEX;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,yCAAiC,IAAI,yBAAyB;AAC9D,eAAO,KAAK,cAAc,0BAA0B,CAAC,KAAK,EAAE;;MAEhE;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,6BAAqB;AACrB,0CAAkC,IAAI,cAAc,UAAU,OAAO;AACrE,mCAA2B,KAAK,cAAc,2BAA2B,CAAC;AAC1E,yCAAiC,IAAI,yBAAyB;AAC9D,uBAAe,KAAK,cAAc,0BAA0B,CAAC,IAAI,qBAAqB,EAAE;AACxF,2BAAmB;AACnB,eAAO;;MAEX;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,4BAA4B,EAAE,OAAO,WAAW,UAAU,gBAC9D,IAAI,sBAAsB,EAAE,OAAO,WAAW,UAAU;AAC5D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,8BAA8B,IAAI,GAAG;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,6BAA6B,EAAE,OAAO,WAAW,UAAU;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,oCAAoC,IAAI,GAAG;AAC/D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,aAAa,SAAS,SAAQ;AAC5C,0BAAkB,MAAM,MAAM;AAC9B,4BAAoB,MAAM,MAAM;AAChC,wBAAgB,IAAI,mBAAmB,WAAW,aAAa;AAC/D,4BAAoB,QAAQ,mBAAmB;AAC/C,eAAO,KAAK,cAAc,SAAS,CAAC,QAAQ,SAAS;;MAEzD;AACI,wBAAgB,IAAI,cAAc,QAAQ,MAAM,OAAO,SAAS;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,YAAY,EAAE;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,qBAAqB,OAAM,OAAO,MAAM,OAAO,UAAU,QAAQ;AACrF,eAAO,KAAK,cAAc,SAAS,CAAC,QAAO,OAAO,WAAW;;MAEjE;AACI,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,4BAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAC3C,wBAAgB,IAAI,oBAAoB,aAAa,WAAW;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,CAAC,aAAa,WAAW;AAC9C,+BAAuB,QAAQ,QAAQ,CAAC,YAAY;AACpD,yBAAiB,QAAQ,QAAQ,CAAC,YAAY;AAC9C,YAAI,eAAe;AACf,iBAAO,eAA2B,QAAO,KAAK;;AAElD,6BAAqB,QAAO;AAC5B,wBAAgB,IAAI,eAAe,YAAY,WAAW,eAAe,MAAM,SAAS,MAAM,SAAS;AACvG,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU,gBAAgB;AACnE,eAAO,IAAI,QAAQ;;MAEvB;AACI,eAAQ,WAAW,YAAY,SAAS,cAAe,iBAA6B,cAAc,eAAe;AACjH,+BAAuB;AACvB,wBAAgB,IAAI,eAAe,YAAY,WAAW,cAAc,MAAM,aAAa,MAAM,SAAS,CAAC,YAAY,IAAI;AAC3H,oBAAY,KAAK,cAAc,SAAS,CAAC,cAAc,eAAe;AACtE,eAAO,IAAI,QAAQ;;MAEvB;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,+BAAuB,QAAQ,QAAQ,CAAC,WAAW;AACnD,yBAAiB,EAAE,QAAQ,CAAC,EAAE,OAAO,WAAW;AAChD,wBAAgB,IAAI,gBAAgB,WAAW,SAAS,CAAC,WAAW;AACpE,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU;AACnD,eAAO,IAAI,QAAQ;;MAEvB;AACI,gBAAQ,SAAS,YAAgB;AACjC,YAAI,UAAU;AAEV,yBAAe,mBAAuB,OAAO,eAAmB;AAChE,iBAAO,KAAK;AACZ,iBAAO,WAAS,WAAW,QAAQ,OAAO,OAAO;;AAGjD,0BAAgB,IAAI,YAAY,OAAO;AACvC,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,IAAI,OAAO;;;MAGtD;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,KAAK,KAAK,EAAE,OAAO,GAAG,EAAE;;;MAGvC;AACI,eAAO,KAAK,KAAK,EAAE,OAAO,EAAE,UAAU,WAAW,KAAK,GAAG,EAAE;;MAE/D;AAEI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,aAAK,QAAQ,IAAI,QAAQ,QAAQ;AACjC,eAAO,CAAE,QAAQ,OAAO;;MAE5B;AACI,eAAQ,UAAW,KAAK,eAAe,OAAO,OAAO;AACrD,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,wBAAgB,IAAI,cAAc,OAAM;AACxC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM;;MAExD;AACI,wBAAgB,IAAI,YAAY,OAAM;AACtC,4CAAoC;AACpC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM,OAAO,MAAwB;;MAEvF;AACI,6BAAqB;UACjB,YAAuB,OAAM;UAC7B,GAAG,YAAuB,OAAM;;AAEpC,wBAAgB;UACZ,OAAO,OAAM;UACb,OAAO;UACP,QAAQ,OAAM;;AAElB,+BAAuB;UACnB,YAAuB;UAAa,GAAG,YAAuB;;AAElE,wBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,8CAAsC;AACtC,uBAAe,KAAK,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAC7F,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;MAErE;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,UAAU,OAAO,SAAU;AACnC,0BAAkB,aAAwB;AAC1C;AACA,YAAI;AACA,oBAAU,IAAI,0BAA0B;;AAGxC,oBAAU,IAAI,oBAAoB;;AAEtC,8CAAsC;AACtC,oBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,WAAW,OAAO,UAAW,OAAO,MAAwB;AAChH,eAAO,CAAE,OAAO,OAAO,QAAQ,IAAI;;MAEvC,2FAA2F;AACvF,uBAAe,KAAK,eAAe,QAAQ,aAAa;AACxD,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,YAAI,QAAQ;AACR,kBAAQ,WAAW;;AAEvB,YAAI,QAAQ,qBAAqB,cAAuB;AACpD,6BAAmB,iBAA0B,QAAQ;AAKrD,kBAAQ,WAAW,WAAW,IAAI,OAAK,IAAI;;AAE/C,YAAI,QAAQ,eAAe;AACvB,kBAAQ,QAAQ,QAAQ;;AAE5B,YAAI,eAAmB,OAAO,WAAW;AAGrC,kBAAQ,SACJ,wBAA4B,OAAO,OAAO;AAC9C,iBAAO;;AAEX,8BAAsB;AACtB,2BAAmB,OAAO,IAAI;AAC1B,cAAI,OAAM,UAAU;AAChB,kBAAM,IAAI,MAAM;;AAIpB,wBAAc,KAAK,QAAQ,IAAI,OAAM;AACrC,cAAI,QAAQ,WAAW;AACnB,gBAAI,CAAC,QAAQ,gBACT,eAAmB,OAAM,UACrB,OAAM,UAAU;AAMpB,qBAAO;gBACH,OAAO,OAAM;gBACb,SAAS;gBACT,WAAW;gBACX,eAAe,QAAQ;;;AAK/B,gBAAI,QAAQ;AACR,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ,OAAM;;qBAGrB,CAAC,CAAC,QAAQ,aAAa,CAAC,CAAC,QAAQ;AACtC,qBAAQ,QAAQ,WAAW,KAAK,aAAa,UACzC,KAAK,WAAW;AACpB,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;qBAE5B,QAAQ,YACb,CAAC,cAAyB,QAAQ,OAAO,OAAM;AAO/C,+BAAmB;AACnB,gCAAoB,OAAM;AAC1B,mBAAM,QAAQ,QAAQ;AACtB,qBAAQ,KAAK,cAAc,QAAO;AAClC,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;AACjC,uBAAW,QAAQ;;AAEvB,eAAK,YAAY,OAAM;AACvB,iBAAO,CAAE,OAAO,OAAM,OAAO,SAAS,WAAW;;AAErD,aAAK,YAAY,OAAO;AACxB,2BAAmB,CAAE,OAAO,OAAO,OAAO,SAAS,SAAS,WAAW;AACvE,oBAAY,cAAyB,SAAS,YAAY;AAC1D,uBAAe,KAAK,iBAAiB,KAAK;AACtC,iBAAO,eAA0B,KAAK,OAAO,SAAS,YAAY;;AAEtE,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ,KAAK;;AAEjB,mBAAsB,KAAK,OAAO,QAAQ,YAAY,YAAY;AAClE,sBAAc,QAAQ,UAAQ,KAAK,8BAA8B;AACjE,YAAI;AACA,kBAAQ,KAAK,SAAS;AACtB,eAAK,aAAa,KAAK,CAAE,MAAM,QAAQ,YAAY,MAAM,OAAO,KAAK,aAAa;;AAEtF,YAAI,CAAC,OAAM,QAAQ,0BAA0B,QAAQ,YACjD,kCAAkC;AAClC,2BAAiB,KAAK,aAAa;AACnC,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,eAAO;;MAEX,yFAAyF;AACrF,sBAAc,eAAe,OAAO,GAAG;AACvC,wBAAgB,KAAK,gBAAgB,SAAS,QAAQ,aAAa,aAAa;AAChF,eAAO,WAAS,qBAAqB,QAAQ,QAAQ,QAAQ,OAAO,QAAQ;;MAEhF;AACI,YAAI,CAAE,QAAO,KAAK;AACd,eAAK,YAAY,OAAO;;AAE5B,eAAO,KAAK,YAAY;;MAE5B;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL;;AAIJ,YAAI,CAAC,OAAM,QAAQ;AACf,0BAAgB,OAAO,KAAK,KAAK;AACjC,kBAAQ,QAAQ;AACZ,iBAAK,MAAM,cAAc,KAAK,YAAY,KAAK;AAC/C,mBAAO,KAAK,YAAY;;;AAGhC,aAAK,eAAe;AACpB,YAAI,KAAK,UAAU,QACd,QAAQ,sBAAuB,eAC5B,KAAK,kBAAkB;AAC3B,eAAK,OAAO;;AAGZ,eAAK,SAAS;;AAElB,YAAI,KAAK;AACL,eAAK,MAAM,UAAU;AACrB,eAAK,MAAM;;AAEf,aAAK,WAAW;;MAEpB;AACI,YAAI,KAAK,uBAAuB;AAC5B,eAAK,sBAAsB,KAAK;AAC5B,gBAAI,CAAC,OAAM,IAAI;AAGX,gCAAkB,OAAM,QAAQ;AAChC,qBAAM,IAAI,SAAS;AACnB,0CAA4B,KAAK,IAAI,QAAO,OAAO,WAAW;AAC9D,qBAAM,IAAI,SAAS;AACnB,kBAAI,sBAAsB;AACtB,uBAAO;;;AAGf,mBAAO;;;AAGf,eAAO,KAAK;;MAGhB;AACI,eAAO,KAAK,qBAAqB,KAAK,oBAAkB;;MAE5D;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,OAAO,OAAO,QAAQ,SAAS,OAAO,YAAa;AAC3D,YAAI,WAAW;AAEX;;AAEJ,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ,uBAAe,QAAQ;AACvB,YAAI,YAAY;AACZ,qBAAW,gCAA2C,OAAO;AAC7D,kBAAQ,WAAW;;AAEvB,YAAI,UAAU;AACV,4BAAkB,aAAwB;AAC1C;AACA,sBAAY,SAAS,aAAa,SAAS;AAC3C,8BAAoB,kBAAkB;AACtC,cAAI;AACA,aAAC,OAAO,UAAU,uCAAgD,SAAS,IAAI,SAAS;AACxF,sBAAU,IAAI,0BAA0B,WAAW,CAAC,QAAQ,QAAQ;;AAGpE,sBACI,IAAI,oBAAoB,WAAW,CAAC,QAAQ,QAAQ;;AAE5D,uCAA6B,KAAK,eAAe,CAAC,QAAQ,QAAQ;AAClE,cAAI;AACA,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAGjB,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAErB,eAAK,MAAM,2BAA2B,KAAK,WAAW,qBAAqB,SAAS,OAAO,QAAQ;AAGnG,wCAA8B;AAC9B,sCAA4B,KAAK,gBAAgB,SAAS,CAAC,uBAAuB,OAAO,MAAM;AAE/F,gCAAsB,KAAK,QAAQ,IAAI,oBAAoB;AAC3D,kBAAQ,UAAU,cAAc;AAChC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,QAAQ,cAAc;AAC9B,eAAK,8BAA8B;AACnC,eAAK,QAAQ,OAAO,oBAAoB;AAExC,kBAAQ,SAAS;AACjB,cAAI;AACA,iBAAK,gBAAgB,SAAa;;;AAItC,6BAAmB,KAAK,eAAe,UAAU,OAAO,OAAO;AAC/D,kBAAQ,UAAU;;;MAG1B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,SAAU;AAClB,aAAK,eAAe;AACpB,YAAI,iBAAiB;AACjB,kBAAQ,SAAS,oBAAoB,eAAe;;AAExD,eAAO,QAAQ;;MAEnB;AACI,aAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,YAAI,CAAC,KAAK,qBACN,KAAK,gBAAgB,KAAK,qBAAqB,OAAO;AACtD,qBAAY,MAAK,gBAAgB,OAAO,MAAM,QAAQ;AACtD,eAAK,oBAAoB;AACzB,kBAAQ,KAAK,6BAA6B;;AAG9C,eAAO,KAAK,eAAe,eAAe,UAAU,SAAS;;MAEjE;AACI,eAAO,MAAM,KAAK,MAAM,KAAK,iBAAqB;;MAEtD;AACI,YAAI,KAAK,mBAAmB;AACxB;AACI,mBAAO;;AAGP,gBAAI,OAAM,QAAQ;AACd,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;;AAGf;AACI,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU,WAAW,UAAU;AACpC,uBAAgB,UAAU,UAAW,IAAI,WAAW,EAAE,UAClD,IAAI,WAAW,EAAE;AACrB,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iBAAO,KAAK,KAAK,MAAM,EAAE;;AAE7B,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;ACjgEzC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA6BO;AACH,aAAM,IAAI,4BAA4B;;AC9B1C;;;;;;;;;;;;;;;;AAoBA,QAAI;AACA,uBAAgB,SAAS,MAAM,IAAI,oBAAoB;;AAMpD,kBAAc,CAAE;AC3BvB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AA4BO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,QAAQ,IAAI,YAAY;AAChD,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ,SAAA;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ,SAAA;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,eAAQ,qBAAqB,CAAE,MAAM,gBAAgB,MAAM;AAC3D,aAAO;;AAEJ,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAqBO,oCAAgC;AAChC,qCAAiC;;;;AAIjC,4CAAwC;;;;;;AAUxC;AACH,aAAO,EAAG,QAAQ;AACd,eAAQ,KAAM;AACd,6BAAqB;AACrB,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,aAAa,gBAAgB,SAAS,CAAC,IAAI,EAAE;;;AAarD,iCAA4B,WAAW,iBAAiB,mBAAmB,OAAO,kBAAkB,OAAO,eAAe;AAC7H,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,6BAAqB;AACrB,YAAI,mBAAmB,EAAE,UAAU;AAC/B,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,iCAAqB;YACjB,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YACzD,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YAC3D,IAAI;AACF,mCAAuB;AACvB,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,6BAAgB,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;AAC1D,mBAAO,aAAa,gBAAgB,UAAS,CAAC,SAAS,UAAU,YAAW,MAAM,OAAO,MAAM;;AAEnG,gCAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAA,OAAM,MAAA,QAAQ,SAAS;AACjE,uBAAa,8BAA8B;AAC3C,uBAAa,8BAA8B;AAE3C,iBAAO;;AAEX,uBAAe,SAAS,YAAW,EAAE,OAAO,EAAE;AAC9C,YAAI,aAAa,mBAAmB,CAAC,GAAG,OAAO,iBAAiB;AAC5D,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wCAA8B,cAAc,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AAC1F,sBAAY,aAAa,eAAe,UAAU;AAClD,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;AACjB,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,mCACzC,mBAAmB;AACvB;AACA,YAAI;AACA,oBAAU,IAAI,sBAAsB,iBAAiB,EAAE,OAAO,EAAE,OAAO;;AAGvE,oBAAU,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;;AAExD,eAAO,aAAa,gBAAgB,SAAS,CAAC,GAAG,IAAI;;;ACxG7D;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,kBAAc,2BAA2B;;;AAGzC,yBAAqB;;;MAIjB,kCAAkC;;;AAG/B,oBAAc,mBAAiB,CAAE,WAAW,OAAO,iBAAiB;AACpE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI;;AAEjD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,yBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,qCAA+B,IAAI,yBAAyB;AAC5D,aAAO,SAAQ,gBAAgB,wBAAwB,CAAC,KAAK,EAAE;;AAE5D,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;wBACD;2DACmC;;;;;;AC3C3D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;wBAEA;uBACD;;;;;;yDAMkC;;;;;;;AC9CzD;;;;;;;;;;;;;;;;AAmBO,wBAAkB,EAAG,QAAQ,mBAAS;AACzC,aAAQ,GAAG,aAAM,qBAAU,QAAQ,iBAAU;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,0BAAoB,CAAC,GAAG,OAAM;AAC9B,wBAAkB;AAClB,UAAI,UAAU;AACV,sBAAc,OAAO;AACrB,oBAAY,KAAK;;AAErB,uBAAiB;AACjB,UAAI,UAAS;AACT,qBAAa,OAAM;AACnB,oBAAY,KAAK;;AAErB,sBAAgB,OAAM,QAAQ,8BAC1B,IAAI,uBAAuB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY,mBACzF,IAAI,iBAAiB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY;AACvF,qBAAe,SAAQ,gBAAgB,SAAS,aAAa,YAAY,GAAG;AAC5E,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAkBA,wBAAkB;AACX,uBAAiB,mBAAiB,CAAE,WAAW,aAAW,OAAO;AACjE,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ,SAAA;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBA,mBAAe;AACR;AACH,sBAAgB,IAAI,eAAe,OAAM,OAAO;AAChD,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,SAAQ;AACzD,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;ACrBvE;;;;;;;;;;;;;;;;AAuBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe,SAAA;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAA,UAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,eAAO,IAAI,GAAG;;AAElB,UAAI,UAAU;AACV,gCAAwB,SAAQ,eAAe,IAAI,QAAQ,wBAA4B,QAAQ;AAC/F,6BAAqB,CAAE,GAAG,GAAG,GAAG;AAChC,uBAAe,WAAS,CAAE,QAAQ,cAAc,SAAA;AAChD,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpEhB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,yBAAiB,CAAC,YAAY,QAAQ;AACtC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,wBAAc,QAAQ,IAAI;AAC1B,mBAAS,KAAK,iBAAiB,QAAQ,qBAClB,YAAY;;AAErC,0BAAkB,QAAQ;AAC1B,0BAAkB,QAAQ,QAAQ,SAAS;AAC3C,iBAAS,KAAK,sBAAsB,oBAAoB;AACxD,aAAK,WAAW;;;;;;UAMd,SAAS,KAAK;;;;;AC3CxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,sBAAc,KAAK;AACnB,qBAAa,MAAM;AACnB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,wBAAgB,SAAS;AACzB,6BAAqB,SAAS,MAAM;AACpC,4BAAoB,SAAS;AAC7B,8BAAsB,OAAO,aAAa,QAAQ;;oBAEtC,sBAAsB,aAAa;;AAE/C,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAc,QAAQ,IAAI;AAK1B,6BAAmB;cACjB,aAAa,QAAQ,UAAU,cAAc,QAAQ,IAAI;;kBAErD,KAAK,gBAAgB,UAAU,SAAS;mBACvC,gBAAgB,cAAc,SAAS;;;AAGlD,0BAAkB,QAAQ;AAC1B,sBAAc,QAAQ,QAAQ,SAAS;AACvC,2BAAmB;;gBAEX,aAAa,gBAAgB,UAAU,SAAS;iBAC/C,gBAAgB,cAAc,SAAS;AAChD,aAAK,WAAW;uBACD,SAAS,IAAI,OAAK,SAAS;UACxC;;;;UAIA;sCAC4B;;UAE5B,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;cACnC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;;;;;AAkBhC;AACI,yBAAmB,SAAS,QAAQ;AACpC,kBAAY,SAAS,IAAI;AACrB,YAAI,QAAQ;AACR,iBAAO,GAAG,OAAO;;AAGjB,iBAAO;;;AAGf,aAAO,IAAI;;AChHf;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ,SAAA;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAkBO;AACH,2BAAqB;QAAC,YAAY,OAAM;QACpC,GAAG,YAAY,OAAM;;AACzB,sBAAgB;QACZ,OAAO,OAAM;QACb,OAAO;QACP,QAAQ,OAAM;;AAElB,6BAAuB;QAAC,YAAY;QAChC,GAAG,YAAY;;AACnB,sBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,4CAAsC;AACtC,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAChG,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;AC/BrE;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,2BAAqB;AACrB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,uBAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,UAAI,SAAS,YAAY,CAAC,cAAc,EAAE,OAAO,WAC7C,CAAE,UAAS,YAAY,QAAQ,cAAc,SAAS,OAAO;AAC7D,eAAO,cAAc,GAAG,QAAQ;;AAEpC,mBAAa,OAAO,EAAE;AACtB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzChB;;;;;;;;;;;;;;;;AAuBO;AACH,oBAAc,OAAO,GAAG;AACxB,UAAI,UAAU;AACV,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC7D,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK,SAAA;AAC7D,6BAAqB,WAAW,OAAO,MAAM;AAC7C,6BAAqB,WAAW,OAAO,MAAM;AAC7C,wBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAA;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAO,SAAS,OAAM,UAAU;AAChC,yBAAiB,KAAK,MAAM,OAAO,SAAS;AAC5C,yBAAiB,WAAW,OAAO,MAAM,GAAG,WAAW,MAAM;AAC7D,0BAAkB,WAAW,OAAO,MAAM,WAAW,MAAM;AAC3D,wBAAe,WAAW,CAAC,UAAU,YAAY,MAAM;AACvD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAM,QAAQ,kCACd,OAAO,GAAG,MAAM,SAAS;AACzB,yBAAgB,IAAI,oBAAoB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAClE,eAAO,SAAQ,gBAAgB,UAAS,QAAQ;;AASpD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,wBAAkB,OAAO,IAAI,OAAK,UAAQ;QACtC,QAAQ,CAAE;QACV,OAAO,CAAE,OAAO,CAAC,IAAI,eAAmB,EAAE,MAAM,MAAM;QACtD,SAAA;;AAEJ,sBAAgB,IAAI,cAAc,UAAU,IAAI,OAAK,EAAE;AACvD,qBAAe,SAAQ,gBAAgB,SAAS,WAAW;AAC3D,gBAAU,QAAQ,OAAK,SAAQ,8BAA8B;AAC7D,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,OAAO,CAAE,OAAO,WAAY,SAAA;AACpF,eAAQ,8BAA8B;AACtC,aAAO;;ACrEX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,aAAO,WAAW,SAAS,OAAO;;AAE/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAoBA,gBAAY;;;;;AAOZ,uBAAmB;;;;;;;;;;;;;;;;;;;AAmBZ,kBAAY,mBAAiB,CAAE,WAAW,KAAK,iBAAiB,YAAY,kBAAkB;AAC9F,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AClDhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,yBAAiB,WAAW;AAC5B,aAAK,cAAc;AACnB,0CAAkC,UAAU,SAAS,KAAK,OAAO,UAAU,KAAK;AAChF,kCAA0B,UAAU,GAAG,eAAe;AACtD;AACA,YAAI,cAAc;AACd,qBAAW;mBAEN,cAAc;AACnB,qBAAW;;AAGX,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,aAAK,WAAW;yCACiB;;;UAG/B;;;;kDAIwC;;;;;;8BAMpB;;;;;;;;;yDAS2B;;;;;;;;;;;;;ACxDzD;;;;;;;;;;;;;;;;AAoBO;AACH,oBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,wBAAkB,eAAmB,EAAE;AAEvC,iCAA2B,EAAE,MAAM,EAAE,MAAM,SAAS;AACpD,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ,CAAE,QAAQ,CAAE,IAAK,SAAA,UAAS,OAAO,CAAE,OAAO,CAAC,OAAO;AAC1E,qBAAe,QAAQ;AACvB,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,qBAAe;QACX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;QAEX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;;AAGf,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY,SAAA;AAC5E,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,oCAA8B,UAAQ,CAAE,QAAQ,CAAE,GAAG,gBAAiB,SAAA,UAAS,OAAO,CAAE,OAAO,EAAE;AACjG,eAAQ,8BAA8B;AACtC,aAAO;;ACjDX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,OAAqB;;AAExC,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,2BAAmB,WAAW;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;;uCAEc;;;;;;;;;;AC7BvC;;;;;;;;;;;;;;;;AAkBO,kCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,iBAAU;AAClB,6BAAqB;AACrB,wBAAgB,IAAI,qBAAqB,OAAM;AAC/C,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;uDAM+B,YAAY;;wBAE3C,KAAK;;;;;;;;;;;;;;;;;AC/B7B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;8BAeM,YAAY;4BACd,KAAK;;;;;;;;;;;;;;;;UAgBvB,KAAK;;;;;ACzDf;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACA;AACI,aAAQ,QAAQ,mBAAS,SAAU;AACnC,WAAM,UAAW;AACjB,aAAQ,eAAgB;AACxB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B,uBAAiB,CAAC,QAAQ;AAC1B,uBAAiB,CAAC,QAAQ,OAAO;AACjC,UAAI,WAAW;AACX,YAAI,yBAAuB;AACvB,kCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,8BAAoB,OAAO,QAAQ;AACnC,8BAAoB,OAAO,SAAS;AACpC,8BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,iBAAS,sBAAoB;;AAEjC,8BAAwB,SAAQ,eAAe,UAAU;AAEzD,eAAQ,QAAQ,IAAI,gBAAgB,QAAQ,QAAQ,aAAa;AACjE,eAAQ,MAAM,yBAAyB,SAAQ,WAAW,gBAAgB,SAAS;AACnF,sBAAgB,OAAM,QAAQ,gBAC1B,IAAI,wBAAwB,YAC5B,IAAI,kBAAkB;AAC1B,kBAAY,SAAQ,gBAAgB,SAAS,CAAC,kBAAkB;AAChE,eAAQ,YAAY,gBAAgB;AACpC,aAAO;;AC7DX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,MAAoB;;AAEvC,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;AACpB,YAAI,WAAW;AACX,8BAAoB,IAAI;AACxB,0BAAgB,4BAA4B,OAAW,eAAe,YAAY,YAAY,KAC1F;;AAER,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;;;;UAId;;;;;;;;kCAQwB;;;;8BAIJ;;;;;;;;;YASlB;;;iCAGqB;cACnB,4BAA4B;;;YAG9B;qBACS,4BAA4B;;;;;YAKrC;qBACS,4BAA4B;;;;;;YAMrC;;;;;;;ACnFZ;;;;;;;;;;;;;;;;AAqBA;AACI,qBAAe;AACf,aAAO,OAAO,WAAW,KAAK,OAAO,OAAO,SAAS,GAAG,YAAY;AAChE,wBAAgB,OAAO,SAAS,OAAO,OAAO,SAAS,GAAG,UAAU,QAAQ;AAC5E,2BAAmB,0BAAsC;AACzD,eAAO,KAAK;UACR,QAAQ;UACR;UACA,SAAS,KAAK,KAAK,UAAU;;;AAGrC,aAAO;;AAEJ;AACH,8BAAwB,mBAAmB,EAAE;AAC7C,mBAAa;AACb,mBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,eAAQ,QAAQ,YAAY,WAAY,gBAAgB;AACxD;AACA;AACA,YAAI,kBAAkB;AAClB,oBAAU,MAAM,IACZ,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW,UACxE,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI;;AAGjE,oBAAU,IAAI,cAAc,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW;;AAExF,yBAAiB;AACjB,iBAAS,SAAQ,gBAAgB,SAAS,CAAC,SAAS;AACpD,YAAI,eAAe,WAAW,EAAE;AAC5B,mBAAQ,8BAA8B;;;AAG9C,aAAO;;ACvDX;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW,SAAA;AACtF,sBAAgB,OAAO,eAAe,EAAE,OAAO,OAAO;AACtD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY,SAAA;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,yBAAiB,kBAAkB;AACnC,aAAK,WAAW;;QAEhB;uBACe;;;;;AAKvB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,sBAAsB;;AAEtC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW,WAAW;AAC9E,6BAAuB,IAAI,MAAM;AACjC,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAe,OAAO,MAAM,cAAc;;AAE9C,aAAO,eAAe;;AC9C1B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,YAAI,KAAK,OAAO;AACZ,gBAAM,MAAM,6BAA6B,KAAK;;AAElD,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,eAAe,MAAM,KAAK;AAC9C,8BAAsB,IAAI,MAAM,KAAK;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,wBAAc,OAAO,MAAM,YAAY;;AAE3C,0BAAkB,QAAQ,cAAc,MAAM,IAAI;AAClD,2BAAmB,KAAK,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;AAChF,qBAAa,mBAAmB,cAAc,YAAY;AAC1D,aAAK,WAAW;;QAEhB;;oBAEY;WACT;sBACW;;UAEZ,YAAY,KAAK,OAAO;aACrB,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;sBAC/C;aACT;wBACW;;;;;;;;ACrDxB;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,QACpC,IAAI,iBAAiB,EAAE,OAAO;AAClC,aAAO,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;;ACxBnD;;;;;;;;;;;;;;;;AAqBO,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,qCAA6B,gBAAgB;AAC7C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,uBAAe;AACf,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,mCAAuB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AAChF,uBAAW,aAAa,eAAe,UAAU,EAAE;AACnD,iCAAqB,aAAa,QAAQ,IAAI,SAAS;AACvD,yBAAa,SAAS;;AAGtB,uBAAW,gBAAc,GAAG,cAAc;;AAE9C,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,SAAS,OAAO;AAC1F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,aAAa;;AAE9D;AACA,YAAI;AACA,2BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,yBAAe,SAAS;AACxB,4BAAkB,WAAW,QAAQ,eAAmB,cAAc,UAAU,EAAE;AAClF,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,UAAQ,UAAU,aAAa,UAAU;;AAEnD,YAAI;AACA,uBAAa,8BAA8B;;AAE/C,eAAO;;;AC3Ef;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK,SAAA;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI,EAAE;;AAEnD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,yBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,2BAAqB;AACrB,sCAAgC,IAAI,cAAc,UAAU,OAAO;AACnE,gCAAyB,SAAQ,gBAAgB,yBAAyB,CAAC,IAAI,EAAE;AACjF,qCAA+B,IAAI,yBAAyB;AAC5D,qBAAe,SAAQ,gBAAgB,wBAAwB,CAAC,IAAI,oBAAmB,EAAE;AACzF,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,IAAI,cAAc,UAAU,OAAO;AACjD,yBAAmB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AACzD,gBAAU,IAAI,cAAc,UAAU,OAAO,MAAM,MAAM;AACzD,0BAAoB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AAC1D,aAAO,CAAC,YAAY;;ACtBxB;;;;;;;;;;;;;;;;AAmBO,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,6BAAqB;AACrB,gBAAY,EAAE,MAAM,WAAW,GAAG,MAAM,uDAAuD,EAAE,MAAM;AACvG,0BAAkB,CAAC,GAAG;AACtB,gBAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW;AACzF,kCAA0B,wBAAsB,GAAG,qBAAqB,UAAU;AAClF,eAAO,CAAC,QAAQ;;;AChCxB;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW,SAAA;AACtF,sBAAgB,OAAO,eAAe,WAAW,QAAQ;AACzD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY,SAAA;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;AAmBO,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,UAAU,QAAS;AAC3B,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,sCAA8B,gBAAgB;AAC9C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,8BAAsB;AACtB,wBAAgB;AAChB,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,UAAU;AACpD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,oCAAwB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AACjF,wBAAY,aAAa,eAAe,UAAU,EAAE;AACpD,kCAAsB,aAAa,QAAQ,IAAI,UAAU;AACzD,0BAAc,SAAS;;AAGvB,wBAAY,gBAAc,GAAG,cAAc;;AAE/C,wBAAc,KAAK;AACnB,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,4CAAoC,2BAAuC,UAAU,OAAO;AAC5F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,cAAc;;AAE/D,oBAAY,SAAS,WAAW,aAAa,UAAU;AACvD,wBAAgB;AACZ,uBAAa,8BAA8B;;AAE/C,eAAO;;;AChEf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,uBAAe,SAAS,YAAY,IAAI;AACxC,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;wCAKoB;;4CAEI;;;;;AAKhC;;AAEJ,aAAK,WAAW;QAChB,iBAAiB,SAAS;QAC1B,eAAe,SAAS;;;UAGtB;8BACoB;;iDAEmB;;qDAEI;;;UAG3C;yBACe;;;;;AC1DzB;;;;;;;;;;;;;;;;;MA0DI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,uBAAe,SAAS,YAAY,IAAI;AACxC,uBAAe;AACf,YAAI,SAAS;AACT,2BAAiB;UACnB;;0CAEgC;;8CAEI;;;;AAIlC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;;AAK/C,2BAAiB;UACnB;UACA,cAAc;UACd,eAAe;UACf;;6CAEmC;kDACK;;;AAGtC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;UAGjD,QAAO,OAAO;aACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;YAC/C;wCAC4B,OAAO,YAAY;YAC/C,QAAO,OAAO;eACX;cACD;0CAC4B,OAAO,YAAY;;;;;AAKrD,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;ACtIV;;;;;;;;;;;;;;;;AAmBO,gCAA4B,EAAG,QAAQ,mBAAS;AACnD,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,UAAU,QAC9C,IAAI,iBAAiB,EAAE,OAAO,UAAU;AAC5C,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;AACvD,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,MAAM;MACN,MAAM;;;MAGN;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS,SAAS;AACjD,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;;UAGd;;;;;;;;;;;;;AChCV;;;;;;;;;;;;;;;;AAuBA,gBAAY;AACL;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,KAAM;AACjB,oBAAc,YAAwB,EAAE,OAAO,EAAE;AACjD,UAAI,EAAE,UAAU;AACZ,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,wBAAe;UACX;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;;AAGjB,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,8BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY,SAAA;AAC5E,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AAEtC,eAAO;;AAEX,UAAI,SAAQ,mBAAmB,CAAC,GAAG;AAC/B,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sCAA8B,gBAAY,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AACxF,oBAAY,SAAQ,eAAe,UAAU;AAC7C,wBAAgB,SAAQ,QAAQ,IAAI,IAAI;AACxC,gBAAQ,SAAS;AACjB,eAAO;;AAEX;AACA,UAAI,OAAM,QAAQ;AACd,kBAAU,IAAI,sBAAsB,KAAK,EAAE,OAAO,EAAE;;AAGpD,kBAAU,IAAI,gBAAgB,KAAK,EAAE,OAAO,EAAE;;AAElD,aAAO,SAAQ,gBAAgB,SAAS,CAAC,GAAG,IAAI;;AAE7C,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACpFhB;;;;;;;;;;;;;;;;AAiBO,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,kBAAmB;AACxD,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,eAAO,wBAAqC,WAAW,YAAY,kBAAkB,iBAAiB;;;AC/B9G;;;;;;;;;;;;;;;;AAiBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC9BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,4BAAoB,WAAW;AAC/B,2BAAmB,WAAW;AAC9B,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,aAAK,cAAc;AACnB,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,8BAAsB,QAAQ,QAAQ;AACtC,8BAAsB,QAAQ,QAAQ;AACtC,0BAAkB;AAClB,YAAI,OAAO,cAAc;AACrB,wBAAc,uBAAuB,UAAU,QAAQ;;AAGvD,wBAAc;2BACC,UAAU,KAAK;;;AAGlC,aAAK,WAAW;;;;;4CAKoB,oBAAoB,2BAA2B,oBAAoB;4CACnE,oBAAoB,2BAA2B,oBAAoB;iDAC9D;iDACA;YACrC;uCAC2B,yCAAyC;;;;;;;;AChDhF;;;;;;;;;;;;;;;;AAkBO,qCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,6BAAqB;AACrB,wBAAgB,IAAI,cAAc,OAAM,OAAO,SAAS,WAAW;AACnE,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC3Bf;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAkBA,mBAAe;AACR,qBAAe,kBAAgB;AAC/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBA,iCAA2B;AACpB,gCAA0B,mBAAiB,CAAE,WAAW,sBAAoB,iBAAiB;AAC7F,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,gBAAY;AACL,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBO,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,QAAS;AACjB,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,IAAI,MAAM;AAC3B,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,mBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B;AACA,YAAI,aAAa,mBAAmB,CAAC;AACjC,2BAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,yBAAe,SAAS;AACxB,4BAAkB,iBAAa,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC/D,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,gBAAc,GAAG,MAAM;;AAEjC,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,yBAAiB,GAAG;AAEpB,cAAQ,KAAK,aAAa;AAC1B,qBAAe,SAAQ,SAAS,EAAE;AAClC,aAAQ,cAAc,aAAa,WAAY,cAAc,QAAQ,MAAM,EAAE,OAAO,EAAE;AACtF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAuDA,4BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;AC/FnB;;;;;;;;;;;;;;;;ACAA;AAGA,sBAAgB;ACHhB;;;;;;;;;;;;;;;;sBAsCuB;MACrB,aAAa;MACb,oBAAoB;MACpB,sBAAsB;MACtB,aAAa;MACb,eAAe;MACf,kBAAkB;MAClB,MAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7CV,mBAAA,WAAA;;ACAA,6BAAA,WAAA;;ACAA,yBAAA,WAAA;;ACAA,8CAAA,WAAA;AACA,sCAAqC;AACnC,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,uCAAgC,kCAAiC;AAEnE;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAM;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAQ;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;;AAAQ,eAAO;;AAAQ,mBAAW,OAAO,mCAAgC,cAAY,iCAA8B;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;;;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,mCAA2B,OAAO,6BAA2B;AAAM,UAAG;AAAwB,kBAAO,OAAO;AAAU,uBAAa,OAAO;AAAgB,yBAAe,OAAO;;AAAkB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;;AAAiB,eAAO,kBAAgB;;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;;AAAS,4BAAgB,YAAU;;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;;AAAK,kBAAO,IAAI;AAAQ,iBAAO;;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;;;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;;AAAS,eAAO,aAAW;AAAW,iBAAM;;AAA8B;AAAsB;AAAI,8BAAkB;;AAAmC,kBAAQ,MAAM;AAA2G,gBAAM;;AAAE,iBAAO,kBAAkB;iBAAe;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;;;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;mBAAmB,OAAO,aAAW;AAAa,uBAAW;;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;;;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;;iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;;AAAI,YAAG;AAAY,4BAAgB;;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;;AAAQ,4BAAgB;;AAAG,YAAG;AAAqB,kBAAM;AAAqC,gBAAG,CAAC;AAAO,uBAAO;AAAc,gBAAG,CAAC;AAAS,yBAAS;AAAgB,uBAAS,SAAS,aAAa;AAAU,mBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;;AAAS,uBAAW;AAA8B,sBAAQ,MAAM,UAAS;AAAM,gBAAG,CAAC,IAAI;AAAQ,oBAAI,IAAI,WAAW;;AAAK,oBAAO,IAAI;AAAQ,mBAAO;;;AAAU,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;;;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;;AAAO;;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;;;AAAO,yBAAe;AAAgB,mBAAS,QAAM;;;;AAAa,UAAG;AAAqB,YAAG,OAAO,gBAAc;AAAa,wBAAY,qBAAsB;;;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;;;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ,yBAAiB,QAAQ;AAAK,0BAAkB,QAAQ;AAAM,oCAA4B,QAAQ;AAAgB;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY;AAAe,6BAAqB;AAAE,yBAAiB;AAAE,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;;;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,SAAM,QAAM,SAAM,UAAW,SAAM;AAAG,sBAAS,MAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,MAAI,MAAI;;AAAK,iBAAO;WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;;AAAS,oBAAM,KAAG,KAAK;;;;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;;;AAAO;AAAoD,qBAAW,MAAI;AAAe,mBAAQ;AAAG,eAAM,CAAE,QAAK;AAAS,mBAAO,KAAK;AAAO,cAAG,CAAC;AAAG,mBAAO;AAAI,cAAG,CAAE,MAAG;AAAM,oBAAK,OAAO,aAAa;AAAI;;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,oBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,iBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;;AAAQ,iBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;;AAAG,cAAG,KAAG;AAAO,oBAAK,OAAO,aAAa;;AAAS,qBAAO,KAAG;AAAM,oBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;;;AAAO,eAAO;;AAAI;AAA0C,eAAO,MAAI,kBAAkB,oBAAmB,KAAI,kBAAgB;;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,KAAI,QAAO,EAAE;AAAG,kBAAM,KAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,KAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;;AAAS;AAAkD,eAAO,kBAAkB,MAAI,oBAAmB,QAAO;;AAAiB;AAA8B,kBAAQ;AAAE,qBAAU,GAAE,IAAE,KAAI,QAAO,EAAE;AAAG,kBAAM,KAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAM,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAI,WAAW,EAAE,KAAG;AAAK,cAAG,KAAG;AAAI,cAAE;mBAAY,KAAG;AAAK,mBAAK;mBAAU,KAAG;AAAM,mBAAK;;AAAO,mBAAK;;AAAE,eAAO;;AAAI;AAA0C,2BAAmB,IAAI,OAAM;;AAAQ,2BAAmB;AAAM;AAA6B,YAAG,IAAE,WAAS;AAAG,eAAG,WAAS,IAAE;;AAAS,eAAO;;AAAE;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;;AAAK,uBAAe,oBAAiB,wBAAqB,sBAAmB,0BAAuB;AAAM,UAAG;;AAAyB,mCAA2B,OAAO,qBAAmB;AAAS,UAAG;AAAwB,qBAAW,OAAO;AAAc,kBAAO,OAAO;;AAAe,YAAG,OAAO;AAAe,uBAAW,OAAO;;AAAmB,uBAAW,IAAI,YAAY,OAAO,CAAC,SAAU,yBAAuB,gBAAe,SAAU,aAAW,gBAAe,QAAS;AAAO,cAAG,CAAE,YAAW,kBAAkB;AAAoB,gBAAI;AAA+N,gBAAG;AAAqB,sBAAQ,IAAI;;AAAqH,kBAAM,MAAM;;;;AAAgB,UAAG;AAAY,kBAAO,WAAW;;AAAO,+BAAuB,QAAO;AAAW,iCAA2B;AAAQ,UAAG,CAAC;AAAwB,4BAAoB,kBAAgB,KAAG;;AAAa;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;;AAAW,qBAAO,cAAc,MAAK,SAAS;;;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;;;;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,UAAG;AAAuB,6BAAmB;AAAK;AAAkB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;;;AAAU,6BAAqB;;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;;AAAY;AAAmB,YAAG;AAAuB;AAAO,6BAAqB;;AAAY;AAAmB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;;;AAAU,6BAAqB;;AAAe;AAAyB,qBAAa,QAAQ;;AAAI;AAA0B,sBAAc,QAAQ;;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B,gBAAO,CAAC,wBAAuB;AAAuD;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;;;;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;;AAAM,YAAG;AAAuB,kBAAQ,MAAM,yBAAwB,IAAI,QAAO;AAAO,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,KAAI,WAAW,UAAQ,KAAI,QAAQ,YAAU;;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;;AAAe,2BAAmB;AAAuC,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;;AAAY,cAAG;AAAY,mBAAO,WAAW;;AAAqB,kBAAK;;;AAA8D,gBAAM;;;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;;AAAI,mBAAO,SAAS;aAAmB,MAAM;AAAW,mBAAO;;;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;;;AAAe;AAAsB,mBAAS,CAAC,GAAI;AAAe;AAA0C,yBAAY,UAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW;AAAO,cAAG,CAAC;AAAwB,mCAAqB,QAAQ,cAAc;AAAO,oBAAQ,cAAc,QAAQ;AAAY,sBAAQ,uBAAuB,GAAE;AAAW,oBAAG,CAAC,EAAE;AAAiB,sCAAoB;;;;;AAAyB,YAAG,CAAC;AAAwB,2BAAiB;;AAAoB;AAA2C,0BAAgB,OAAO,aAAY,OAAO;;AAAW;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;;;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;;;;AAAoC,mBAAO,uBAAuB;;;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;;;AAAO;AAAmB,eAAM;;AAAG,uBAAe;AAAG;AAA0B,gBAAQ;;AAAc,UAAG,CAAC;AAAuB,mBAAW,KAAK,CAAC,MAAK;AAAW;;AAAwB,0BAAkB;AAAE,6CAAqC;AAAE,6CAAqC;AAAE;AAAoF,qBAAW,aAAW;AAAE,8BAAoB,sBAAoB;AAAE,8BAAoB,sBAAoB;AAAE,wBAAc;AAAW,2CAAiC;AAAoB,2CAAiC;;AAAoB,aAAO,4BAA0B;AAAuB,wBAAgB,CAAC,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,OAAM,IAAG,KAAI,IAAG,OAAM,IAAG,OAAM,GAAE,SAAQ,IAAG,OAAM,GAAE,QAAO,IAAG,QAAO,GAAE,aAAY,GAAE,QAAO,IAAG,QAAO,GAAE,QAAO,IAAG,SAAQ,KAAI,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,SAAQ,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,SAAQ,IAAG,OAAM,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,MAAK,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,KAAI,UAAS,KAAI,QAAO,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,IAAG,QAAO,IAAG,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,WAAU,IAAG,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,IAAG,MAAK,KAAI,QAAO,KAAI,OAAM,KAAI,QAAO,IAAG,WAAU,IAAG,SAAQ,KAAI,SAAQ,GAAE,UAAS,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,UAAS,KAAI,QAAO,IAAG,WAAU,IAAG,cAAa,IAAG,OAAM,IAAG,YAAW,KAAI,cAAa,KAAI,YAAW,IAAG,SAAQ,IAAG,cAAa,GAAE,YAAW,IAAG,UAAS,IAAG,aAAY,IAAG,WAAU,KAAI,cAAa,IAAG,YAAW,GAAE,cAAa,IAAG,aAAY,IAAG,UAAS,IAAG,WAAU,IAAG,WAAU,KAAI,cAAa,IAAG,aAAY,IAAG,UAAS,GAAE,cAAa,IAAG,UAAS,IAAG,iBAAgB,IAAG,iBAAgB,KAAI,eAAc,GAAE,WAAU,IAAG,SAAQ,IAAG,UAAS,IAAG,cAAa,KAAI,QAAO,KAAI,QAAO,IAAG,QAAO,IAAG,SAAQ,KAAI,WAAU,KAAI,QAAO,IAAG,WAAU,IAAG,WAAU,IAAG,iBAAgB,IAAG,YAAW,IAAG,UAAS;AAAK,6CAAqC;AAAM;AAA4C,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK,QAAM,QAAM;AAAE,iBAAM;AAAI,YAAG,SAAO;AAAE,iBAAO;AAAE,YAAG,SAAO;AAAW,kBAAM;AAAS,oCAA0B,QAAQ,KAAK,qBAAoB,oCAAkC;AAAG,8BAAoB;AAAE,YAAG,yBAAuB;AAAM,2BAAe,QAAQ,gBAAgB,qBAAoB,oCAAkC,GAAE,uBAAsB;AAAG,cAAG,cAAY;AAAuB,cAAE;AAAM,8BAAgB;AAAE,gBAAG,SAAO;AAAE,qBAAO;;;AAAG,kBAAQ,QAAQ,OAAO,qBAAoB,QAAM,GAAE;AAAO,YAAG,OAAK;AAAE,iBAAO,MAAI;AAAgB,cAAK,iDAA+C;;AAAI,aAAO,4BAA0B;AAAuB;AAAoC,YAAG;AAAuB,gBAAK;AAAuF,YAAG,CAAC;AAAY,gBAAK;AAAoD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO;AAAY,gBAAQ,eAAe;AAAS,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,QAAQ,SAAQ;AAAG,gBAAQ,OAAO,UAAQ;;AAAU;AAAsC,YAAG;AAAuB,gBAAK;AAAyF,YAAG,CAAC;AAAY,gBAAK;AAAsD,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO,YAAY,CAAC,KAAM;;AAAW;AAAuC,YAAG;AAAuB,gBAAK;AAA0F,YAAG,CAAC;AAAY,gBAAK;AAAuD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,YAAG;AAAS,wBAAW,QAAQ;AAAO,kBAAQ,mBAAmB;;;AAAS,oBAAY,CAAC,gBAAe,GAAE,gBAAe,CAAC,aAAY,GAAE,WAAU,IAAG,eAAc,IAAG,gBAAe,IAAG,aAAY;AAAW,+BAAuB,QAAQ,iBAAgB,CAAC,uBAAsB;AAAG,oDAA4C,QAAQ;SAAkB,qBAAoB;AAAW,8BAAoB;AAAE,qBAAU,GAAE,IAAE,iBAAgB,EAAE;AAAG,kBAAQ;;AAAuB,gBAAQ,kBAAgB;AAAM,qBAAU,GAAE,IAAE,MAAI,GAAE,EAAE;AAAE,8BAAoB,QAAQ,kBAAgB,IAAE,KAAG;AAAE,4BAAoB,QAAQ,kBAAgB,MAAI,KAAG,QAAQ;AAAgB,sBAAY,QAAQ,kBAAgB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,wBAAc;AAAM,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAE,8BAAoB,YAAU,IAAE,KAAG;AAAE,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,OAAK,GAAE;AAAW,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE,QAAQ;AAAiB,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE;SAAK,YAAW;SAAa,UAAS,IAAG,cAAa,MAAK,iBAAgB;SAAa,iBAAgB;AAAW,YAAG,QAAQ,iBAAe;AAAM,iBAAM,QAAQ,aAAa,SAAO;AAAG,oBAAQ,aAAa;;AAAQ,kBAAQ,eAAa;;AAAK,YAAG,0BAAwB;AAAiB;SAA4B,YAAW;AAAmB,iBAAO;AAAgB,YAAG;AAAI,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAU,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ;AAAkB,iCAAuB,KAAG,GAAE;AAAY,iCAAuB,GAAE,GAAE;AAAG,6BAAiB;AAAE,cAAG;AAAwB,wBAAY,CAAC,KAAM;;;SAAY,cAAa;AAAW,gBAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAI,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAG,+BAAuB,mBAAiB,GAAE;AAAY,2BAAiB,eAAa;AAAE,+BAAuB,GAAE,GAAE;AAAG,oBAAY,CAAC,KAAM;SAAgB,qBAAoB;AAAW,sBAAa,QAAQ;AAAU,wBAAY,QAAQ,SAAS;AAAG,cAAG,WAAS,QAAQ;AAAQ,oBAAQ,mBAAmB,QAAQ;;;AAAS,gBAAQ,WAAS;AAAG,qBAAU,GAAE,IAAE,QAAQ,cAAc,QAAO,EAAE;AAAG,wBAAW,QAAQ,cAAc;AAAG,kBAAO;;AAAY,gBAAQ,gBAAc;AAAG,qBAAU,GAAE,IAAE,QAAQ,eAAe,QAAO,EAAE;AAAG,wBAAW,QAAQ,eAAe;AAAG,wBAAY,QAAO;AAAQ,kBAAQ,eAAe;AAAS,kBAAO;;AAAY,gBAAQ,iBAAe;SAAI,gBAAe;AAAkB,YAAG,CAAC;AAAQ;AAAO,YAAG,QAAQ;AAAkB,0BAAc,oBAAoB,QAAQ,mBAAiB,OAAK;AAAG,8BAAoB,QAAQ,mBAAiB,OAAK,KAAG;AAAE,gBAAM;AAAW,gBAAM,QAAQ;;AAAkB,gBAAQ,mBAAiB;AAAE,YAAG,QAAQ,qBAAmB,QAAQ;AAAU,gBAAM,QAAQ;AAAW,gBAAQ,YAAU;AAAE,YAAG,QAAQ;AAAO,kBAAQ,OAAO,UAAQ;SAAM,oBAAmB;AAAiB,eAAO,QAAQ,SAAS,QAAO,QAAQ;AAAQ,gBAAQ,cAAc,KAAK;AAAQ,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,UAAQ;AAAG,gBAAQ,eAAe,QAAO;AAAS,gBAAO,UAAQ;SAAW,uBAAsB;SAAiB,wBAAuB;AAAmC,gBAAO,YAAU;AAAY,kBAAM,EAAE;AAAQ,oBAAQ,EAAE;AAAO,cAAG,QAAO;AAAQ,oBAAQ,sCAAoC,QAAO,QAAQ;AAAiB,cAAG,EAAE,mBAAiB,EAAE,mBAAiB;AAAiB,yBAAW,QAAQ,SAAS,EAAE;AAAc,gBAAG;AAAQ,qBAAO,OAAO,YAAY,EAAE,MAAK,EAAE;;AAAsB,sBAAQ,MAAM,4CAA0C,MAAI,yBAAuB,EAAE,kBAAgB;;AAAuC,oBAAQ,sCAAoC;AAAU;;AAAO,cAAG,QAAM;AAA+B;qBAAuD,QAAM;AAAe,2BAAe,EAAE;qBAAc,QAAM;AAAiB,6BAAiB,EAAE;qBAAmB,QAAM;AAAc,0BAAc,EAAE;qBAAmB,QAAM;AAAgB,4BAAgB,EAAE;qBAAmB,QAAM;AAAU,oBAAO,SAAO;AAAK,gBAAG;AAAkB,gCAAkB;AAAQ,gBAAG,QAAO;AAAY,sBAAO;AAAa,qBAAO,QAAO;;qBAAoB,QAAM;AAAS,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;qBAAiB,QAAM;AAAY,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;qBAAiB,QAAM;AAAS,kBAAM,YAAU,EAAE,cAAY,OAAK,EAAE;qBAAiB,QAAM;AAAQ,2BAAa,QAAO,WAAS,QAAQ,KAAK,qBAAoB,QAAO,QAAQ,SAAO,MAAI;AAAG,gBAAG;AAAU,sBAAQ,mBAAmB;;qBAAiB,QAAM;AAAc,oBAAQ,mBAAmB;qBAAgB,QAAM;AAAkB,oBAAQ,sBAAsB,EAAE;qBAAc,EAAE,KAAK,WAAS;AAAgB,oBAAO,YAAY,EAAE;;AAAW,gBAAI,oCAAkC;;AAAK,kBAAQ,sCAAoC;;AAAW,gBAAO,UAAQ;AAAY,cAAI,4BAA0B,EAAE,WAAS,MAAI,EAAE,SAAO,OAAK,EAAE;;AAAU,YAAG;AAAqB,kBAAO,GAAG,WAAU;AAAe,oBAAO,UAAU,CAAC,MAAK;;AAAS,kBAAO,GAAG,SAAQ;AAAe,oBAAO,QAAQ;;AAAQ,kBAAO,GAAG,QAAO;AAAe,oBAAQ,IAAI;;;AAAoD,gBAAO,YAAY,CAAC,KAAM,QAAO,WAAY,OAAO,0BAAwB,YAAW,YAAwB,YAAwB,cAA4B;SAAmC,sBAAqB;AAAW,4BAAkB,WAAW;AAA6C,gBAAQ,cAAc,KAAK,IAAI,OAAO;SAAiB,cAAa;AAAW,YAAG,QAAQ,cAAc,UAAQ;AAAG,kBAAQ;AAAuB,kBAAQ,uBAAuB,QAAQ,cAAc;;AAAI,YAAG,QAAQ,cAAc,SAAO;AAAE,iBAAO,QAAQ,cAAc;;AAAW,iBAAO;SAAM,cAAa;AAAgB,gBAAM,YAAY,QAAM;AAAM,eAAM,YAAY,QAAM;;;AAAO;AAAgD,qBAAW,WAAS;AAAS,oBAAU;AAAS,qBAAa;;AAAU,aAAO,yBAAuB;AAAoB;AAA4B,eAAO;;AAAc,aAAO,sBAAoB;AAAiB;AAAsD,cAAM,uBAAqB,aAAa,aAAW,WAAS,CAAC,WAAS,aAAa,YAAU,oBAAmB,MAAK,OAAK,aAAa,QAAM;;AAAqB;AAAiC,yBAAe,MAAM,MAAK;;AAAM;AAAwB,UAAG;AAAqB,8BAAoB;AAAW,kBAAM,QAAQ;AAAY,iBAAO,EAAE,KAAG,MAAI,EAAE,KAAG;;iBAAa;AAAwB,8BAAoB;AAAW,iBAAO,YAAY,QAAM,OAAO;;iBAA0C,OAAO,YAAU;AAAa,8BAAoB;;AAAa,8BAAoB;AAAW,iBAAO,YAAY;;AAAO;AAAyB,4BAAoB,uBAAqB,KAAG;AAAM,eAAO;;AAAM;AAA2B,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,MAAK;AAAK,mBAAW,QAAQ,CAAC,MAAU;;AAAU;AAAuE,YAAG,kBAAgB;AAAc,sBAAY,CAAC,KAAM;mBAAwC;AAAwB,sBAAY,CAAC,cAAe,gBAAe,KAAM;;AAA4B,wBAAY,QAAQ,SAAS;AAAgB,wBAAW,WAAS,QAAQ;AAAO,cAAG,CAAC;AAAQ;;AAAO,kBAAO,YAAY,CAAC,KAAM;;AAAuB,eAAO;;AAAE;AAAkB;;AAAQ;AAAqF,yBAAe,iBAAe;AAAE,oBAAU,YAAU;;AAAE;AAAkD,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK;AAAK,iBAAM;AAAI,YAAG;AAAuB,oBAAQ,QAAQ,KAAK,qBAAoB,QAAM,GAAE,KAAI;AAAS,cAAG,QAAM;AAAY,mBAAM;AAAI,cAAG,QAAM;AAAY,mBAAM;AAAG,cAAG,QAAM;AAAK,mBAAO;AAAE,gBAAK,+CAA6C;;AAAS,0BAAc,QAAQ,KAAK,qBAAoB,QAAM;AAAG,cAAG,OAAK;AAAU,mBAAM;AAAG,qBAAS,YAAY;AAAM,qBAAS,OAAK;AAAQ,kBAAQ,MAAM,qBAAoB,oCAAkC,GAAE;AAAM,+BAAmB;AAAK,iBAAM,QAAM;AAAgB,mBAAK,YAAY;AAAM,gBAAG,OAAK;AAAM,qBAAM;;AAAI;AAA+C,mBAAK,QAAQ,KAAK,qBAAoB,oCAAkC;;AAAG,iBAAO;;;AAAG;AAA8C,eAAO,mCAAiC;;AAAE;AAA8C,eAAO,mCAAiC;;AAAE;AAA8C,2BAAmB,WAAW,MAAK,KAAI,MAAI;;AAAK;AAAyC,eAAO,UAAU;;AAAuB;AAAyD,0BAAgB,UAAU,SAAO;AAAE,oBAAU;AAAY,mBAAS,WAAW,cAAY;AAAG,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,8BAAoB,IAAE,KAAG,UAAU,IAAE;;AAAG,kBAAQ,0CAA0C,OAAM,aAAY,MAAK;AAAM,qBAAa;AAAO,eAAO;;AAAI,2DAAmD;AAAG;AAAsC,YAAG,CAAC,iBAAiB;AAAO,2BAAiB,QAAM;;AAAG,mBAAS,iBAAiB;AAAM,aAAK,SAAO;AAAE;AAAO,eAAM,KAAG,mBAAmB;AAAW,cAAG,OAAK,OAAK,OAAK;AAAK,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;;AAAO,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;;;AAAG,eAAO;;AAAK;AAAuE,uDAA+C,SAAO;AAAY,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,yDAA+C,KAAG,oBAAoB,IAAE;;AAAG,2BAAiB,QAAM;AAAE,mBAAS,CAAC,eAAa,qBAAqB,SAAO,WAAW,CAAC,QAAM;AAAG,YAAG;AAAc,uBAAW,+CAA+C;AAAG,0BAAc,+CAA+C;AAAG,0BAAc,iBAAiB,QAAO;AAAW,iBAAO,KAAK,MAAM,MAAK;;AAAW,eAAO,KAAK,MAAM,MAAK;;AAAgD;AAAqC,eAAO,mBAAmB;;AAAO;AAAyC;AAAI,qBAAW,KAAK,OAAK,QAAO,aAAW,UAAQ;AAAI,qCAA2B,WAAW;AAAQ,iBAAO;;;;AAAa;AAAgD,wBAAc,kBAAgB;AAAE,sBAAY;AAA4B,YAAG,iBAAe;AAAS,iBAAO;;AAAM,4BAAkB;AAAM,0BAAgB;AAAW,YAAG,gBAAc;AAAa,iBAAO;;AAAM,0BAAgB;AAAS,2BAAgB,GAAE,WAAS,GAAE,WAAS;AAAG,kCAAsB,UAAS,KAAE,MAAG;AAAS,8BAAkB,KAAK,IAAI,mBAAkB,gBAAc;AAAW,wBAAY,KAAK,IAAI,aAAY,QAAQ,KAAK,IAAI,aAAY,eAAc,oBAAmB;AAAgB,4BAAgB,0BAA0B;AAAS,cAAG;AAAa,mBAAO;;;AAAM,eAAO;;AAAM,qBAAa,CAAC,UAAS,GAAE,YAAW,GAAE,YAAW,GAAE,SAAQ,GAAE,YAAW,GAAE,wBAAuB,GAAE,mBAAkB,GAAE,uBAAsB,GAAE,wBAAuB,GAAE,uBAAsB,GAAE,YAAW,GAAE,2BAA0B,MAAK,iBAAgB,MAAK,iBAAgB,MAAK,gCAA+B,OAAM,yBAAwB;AAAW,qBAAU,SAAS,cAAc,SAAO,GAAE,KAAG,GAAE,EAAE;AAAG,mBAAS,eAAe;;AAAG,iBAAS,gBAAc;AAAG,iBAAS,gBAAc;SAAI,8BAA6B;AAAW,YAAG,CAAC,SAAS;AAAgC,qBAAW,KAAK,SAAS;AAAyB,mBAAS,iCAA+B;;SAAO,eAAc,IAAG,WAAU;AAA6C;AAA2C,cAAG,KAAK,UAAQ,KAAK;AAAO,mBAAO;AAAM,yBAAa;AAAM,gBAAG,KAAK,OAAI,KAAK;AAAG,qBAAO;;AAAM,iBAAO;;AAAK,sBAAa,SAAS;AAAe,qBAAS,SAAS,cAAc;AAAG,cAAG,KAAK,kBAAgB,kBAAgB,uBAAuB,KAAK,UAAS;AAAW;;;AAAQ,iBAAS,cAAc,KAAK,CAAC,gBAA8B,YAAsB;AAAoB,iBAAS,cAAc,KAAK;AAAc,iBAAO,EAAE,aAAW,EAAE;;SAAc,qBAAoB;AAAyB,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,kBAAgB;AAAgB,qBAAS,cAAc,OAAO,GAAE;AAAG,cAAE;;;SAAK,gCAA+B;AAAW,eAAO,SAAS,kBAAgB,SAAS,oBAAoB;SAAqB,kBAAiB;AAAW,YAAG,CAAC,SAAS;AAAkC;;AAAO,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,qBAAS,SAAS,cAAc;AAAG,mBAAS,cAAc,OAAO,GAAE;AAAG,YAAE;AAAE,eAAK,eAAe,MAAM,MAAK,KAAK;;SAAY,gBAAe,GAAE,qBAAoB,MAAK,eAAc,IAAG,2BAA0B;AAAiC,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,UAAQ,UAAS,EAAC,mBAAiB,mBAAiB,SAAS,cAAc,GAAG;AAAkB,qBAAS,eAAe;;;SAAQ,gBAAe;AAAY,gBAAM,SAAS,cAAc;AAAG,UAAE,OAAO,oBAAoB,EAAE,iBAAgB,EAAE,mBAAkB,EAAE;AAAY,iBAAS,cAAc,OAAO,GAAE;SAAI,yBAAwB;AAAuB,6BAAmB;AAA+B,YAAE,SAAS;AAAe,mBAAS,sBAAoB;AAAa,mBAAS;AAAmB,uBAAa,YAAY;AAAO,mBAAS;AAAmB,YAAE,SAAS;;AAAgB,YAAG,aAAa;AAAc,uBAAa,oBAAkB;AAAe,uBAAa,OAAO,iBAAiB,aAAa,iBAAgB,gBAAe,aAAa;AAAY,mBAAS,cAAc,KAAK;AAAc,mBAAS;;AAAoC,uBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,gBAAG,SAAS,cAAc,GAAG,UAAQ,aAAa,UAAQ,SAAS,cAAc,GAAG,mBAAiB,aAAa;AAAiB,uBAAS,eAAe;;;;SAAS,gCAA+B;AAAuE,uBAAa;AAAY,sBAAY,WAAW;AAAI,4BAAoB,WAAS,KAAG;AAAY,4BAAoB,UAAQ,KAAG,KAAG;AAAU,4BAAoB,UAAQ,KAAG,KAAG;AAAS,2CAAmC,cAAa,WAAU,kBAAiB,WAAU;AAAS,qBAAa;SAAW,iCAAgC;AAAuB,gBAAO;eAAmB;AAAE,mBAAO;eAAO;AAAE,mBAAO,QAAQ;;AAA4C,mBAAO;;SAAe,sBAAqB;AAAiB,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,UAAQ;AAAO,iBAAM;AAAU,YAAG,UAAQ;AAAO,iBAAM;AAAU,eAAO,UAAQ,OAAO,WAAS,OAAO,WAAS;SAAI,mBAAkB;AAAW,eAAO,SAAS,qBAAmB,SAAS;;AAA0B;AAAmC,qBAAW,gBAAgB,YAAU;AAAE,sBAAY,QAAQ;AAAQ,qBAAa,UAAS,SAAQ;AAAQ,eAAO;;AAAQ;AAA0G,uBAAa;AAAY,sBAAY,WAAW;AAAI,8BAAoB;AAAE,YAAG;AAAc,4BAAgB,gBAAgB;;AAAc,4BAAoB,WAAS,KAAG;AAAgB,4BAAoB,UAAQ,KAAG,KAAG;AAAM,4BAAoB,UAAQ,KAAG,KAAG;AAAO,2CAAmC,cAAa,WAAU,GAAE,iBAAgB;AAAS,qBAAa;;AAAU;AAAuG,uBAAa,eAAa,aAAa,gBAAc;AAAG,iEAAyD,cAAa,cAAa,OAAM;;AAAQ;AAA2C,eAAO,UAAQ,IAAE,aAAa,WAAS;;AAAQ,+BAAuB,CAAC,GAAE,OAAO,aAAW,cAAY,WAAS,GAAE,OAAO,WAAS,cAAY,SAAO;AAAG;AAAmC,iBAAO,yBAAyB;AAAQ,yBAAe,mBAAmB,WAAU,QAAO,aAAW,cAAY,SAAS,cAAc,UAAQ;AAAW,eAAO;;AAAW;AAAyC,eAAO,kBAAkB;;AAAQ;AAAiF,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,OAAO;AAAiB,8BAAoB,OAAO,mBAAiB,KAAG;AAAM,8BAAoB,OAAO,kBAAgB,KAAG,KAAG;;AAAO,YAAG,OAAO,mBAAiB,CAAC,OAAO;AAA6B,cAAG,OAAO;AAAgB,qBAAO,OAAO;AAAgB,mCAAuB;AAAM,cAAG,OAAO,eAAa,OAAO,YAAY;AAAO,+BAAiB,OAAO,YAAY,MAAM,aAAa;AAAM,iCAAmB,aAAa,OAAK,KAAG,aAAa,OAAK,KAAG,aAAa,OAAK,OAAO,SAAO,aAAa,OAAK,OAAO;;AAAO,iBAAO,QAAM;AAAM,iBAAO,SAAO;AAAO,cAAG;AAAoB,mBAAO,YAAY,MAAM,SAAS,GAAE,GAAE,OAAM;;mBAAiB,OAAO;AAAiB,6BAAiB,oBAAoB,OAAO,kBAAgB,KAAG;AAAG,gEAAsD,cAAa,QAAO,OAAM;AAAQ,iBAAO;;AAAO,iBAAM;;AAAG,eAAO;;AAAE;AAA8E,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,QAAO,OAAM;AAAQ,eAAO,mDAAmD,QAAO,OAAM;;AAAQ;AAAkE,qBAAW,wBAAwB;AAAQ,YAAG;AAAQ,iBAAO,mDAAmD,QAAO,OAAM;;AAAa,iBAAO,gDAAgD,QAAO,OAAM;;;AAAS;AAA0D,oBAAU,YAAU;;AAAE;AAAoD,mBAAS,WAAS;AAAE,eAAK,OAAK;;AAAE;AAAoD,kBAAQ,IAAI,aAAa;AAA0B,YAAG;AAAK,cAAI,yBAAuB;AAAwB,gBAAI,4BAA4B,OAAM;;AAAU,cAAI,yBAAuB;AAAqC,gBAAI,4BAA4B,MAAK,OAAM,OAAM;;AAAY,cAAI,2BAAyB;AAA4C,gBAAI,8BAA8B,MAAK,OAAM,MAAK,SAAQ;;AAAY,iBAAO;;;AAAG;AAAqD,kBAAQ,IAAI,aAAa;AAA2B,YAAG;AAAK,cAAI,uBAAqB;AAAW,mBAAO,IAAI;;AAA2B,cAAI,uBAAqB;AAAc,gBAAI,wBAAwB;;AAAM,cAAI,qBAAmB;AAAc,gBAAI,sBAAsB;;AAAM,cAAI,mBAAiB;AAAc,mBAAO,IAAI,oBAAoB;;AAAM,iBAAO;;;AAAG;AAAgD,kBAAQ,IAAI,aAAa;AAAsB,YAAG;AAAK,cAAI,iBAAe;AAAiB,gBAAI,oBAAoB,GAAE;;AAAO,iBAAO;;;AAAG,eAAO,CAAC,SAAQ,GAAE,WAAU,GAAE,SAAQ,IAAG,eAAc,IAAG,UAAS,IAAG,cAAa,IAAG,eAAc,IAAG,UAAS,IAAG,UAAS,IAAG,SAAQ,IAAG,MAAK,IAAG,UAAS,IAAG,gBAAe,MAAK,mBAAkB,IAAG,iBAAgB,IAAG,cAAa,IAAG,aAAY,IAAG,iBAAgB,GAAE,MAAK;AAAW,kCAAwB,IAAI,aAAa,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,yBAAyB,KAAG,oBAAoB,SAAS,GAAE,IAAE;;AAAG,gCAAsB,IAAI,WAAW,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,uBAAuB,KAAG,kBAAkB,SAAS,GAAE,IAAE;;SAAK,aAAY;AAAgC,YAAG,CAAC,GAAG;AAAW,aAAG,YAAU;;SAAY,UAAS;AAAgB,kBAAQ,GAAG;AAAU,qBAAU,MAAM,QAAO,IAAE,KAAI;AAAK,gBAAM,KAAG;;AAAK,eAAO;SAAK,uBAAsB,KAAI,0BAAyB,CAAC,IAAG,wBAAuB,CAAC,IAAG,WAAU;AAAqC,qBAAW;AAAG,qBAAU,GAAE,IAAE,OAAM,EAAE;AAAG,oBAAQ,SAAO,oBAAoB,SAAO,IAAE,KAAG,KAAG;AAAG,oBAAQ,aAAa,oBAAoB,SAAO,IAAE,KAAG,IAAG,MAAI,IAAE,SAAU;;AAAK,eAAO;SAAQ,eAAc;AAAwC,kBAAQ,OAAO,WAAW,SAAQ;AAAwB,YAAG,CAAC;AAAI,iBAAO;AAAE,qBAAW,GAAG,gBAAgB,KAAI;AAAwB,eAAO;SAAQ,iBAAgB;AAAqC,qBAAW,QAAQ;AAAG,4BAAoB,SAAO,KAAG,KAAG;AAAgB,sBAAY,CAAC,QAAc,YAAW,wBAAuB,SAAQ,uBAAuB,cAAa,OAAM;AAAK,YAAG,IAAI;AAAO,cAAI,OAAO,cAAY;AAAQ,WAAG,SAAS,UAAQ;AAAQ,YAAG,OAAO,uBAAuB,8BAA4B,eAAa,uBAAuB;AAA2B,aAAG,eAAe;;AAAS,eAAO;SAAQ,oBAAmB;AAAwB,WAAG,iBAAe,GAAG,SAAS;AAAe,eAAO,MAAI,QAAM,GAAG,kBAAgB,GAAG,eAAe;AAAM,eAAM,CAAE,kBAAe,CAAC;SAAQ,YAAW;AAAwB,eAAO,GAAG,SAAS;SAAgB,eAAc;AAAwB,YAAG,GAAG,mBAAiB,GAAG,SAAS;AAAe,aAAG,iBAAe;AAAK,YAAG,OAAO,aAAW;AAAS,mBAAS,0BAA0B,GAAG,SAAS,eAAe,MAAM;AAAQ,YAAG,GAAG,SAAS,kBAAgB,GAAG,SAAS,eAAe,MAAM;AAAO,aAAG,SAAS,eAAe,MAAM,OAAO,cAAY;AAAU,cAAM,GAAG,SAAS,eAAe;AAAQ,WAAG,SAAS,iBAAe;SAAM,gBAAe;AAAkB,YAAG,CAAC;AAAQ,oBAAQ,GAAG;AAAe,YAAG,QAAQ;AAAmB;AAAO,gBAAQ,qBAAmB;AAAK,qBAAU,QAAQ;AAAM,8CAAsC;AAAO,+CAAuC;AAAO,0CAAkC;AAAO,eAAM,wBAAsB,OAAM,aAAa;AAA4B,6CAAmC,CAAC,qBAAoB,0BAAyB,4BAA2B,2BAA0B,iCAAgC,uBAAsB,0BAAyB,kCAAiC,kBAAiB,sBAAqB,0BAAyB,4BAA2B,iCAAgC,oBAAmB,0BAAyB,sBAAqB,kCAAiC,+BAA8B,4BAA2B,YAAW,iCAAgC,4BAA2B,gCAA+B,iCAAgC,0BAAyB,sCAAqC,mCAAkC;AAAyC,mBAAS,OAAM,4BAA0B;AAAG,aAAK,QAAQ;AAAc,cAAG,+BAA+B,QAAQ,QAAM;AAAI,mBAAM,aAAa;;;SAAS,sBAAqB;AAAkB,gBAAM,GAAG,SAAS;AAAS,qBAAW,GAAG,aAAa,WAAS,CAAC,UAAS,IAAG,kBAAiB,GAAE,oBAAmB,IAAG,2BAA0B;AAAI,qBAAW,OAAO;AAAS,0BAAgB,MAAM,oBAAoB,GAAE;AAAO,qBAAU,GAAE,IAAE,aAAY,EAAE;AAAG,kBAAM,MAAM,iBAAiB,GAAE;AAAG,qBAAS,EAAE;AAAK,iBAAO,mBAAiB,KAAK,IAAI,OAAO,kBAAiB,KAAK,SAAO;AAAG,cAAG,KAAK,MAAM,OAAK;AAAK,mBAAK,KAAK,MAAM,GAAE,KAAK,YAAY;;AAAM,oBAAQ,MAAM,mBAAmB,GAAE;AAAM,cAAG;AAAK,qBAAO,GAAG,SAAS,GAAG;AAAU,mBAAO,QAAM,CAAC,EAAE,MAAK;AAAI,eAAG,SAAS,MAAI;AAAI,yBAAU,GAAE,IAAE,EAAE,MAAK,EAAE;AAAG,sBAAM,OAAK,MAAI,IAAE;AAAI,oBAAI,MAAM,mBAAmB,GAAE;AAAG,mBAAG,GAAG,SAAS,GAAG;AAAU,iBAAG,SAAS,MAAI;;;;;AAAS,iDAAyC,CAAC,WAAU,aAAY;AAAoB;AAAgE,gCAAsB;AAAG,gBAAM,cAAY;AAAE,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,aAAW,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,eAAa,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,wBAAsB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,8BAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,qBAAmB,qCAAqC;AAAiB,0BAAkB,kCAAgC,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,4BAA0B,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,sBAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,+BAA6B,oBAAoB,IAAG,OAAI;AAAI,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAQ,iBAAM;;AAAG,YAAG,kBAAkB;AAAqB,iBAAM;;AAAG,4BAAkB,GAAG,cAAc,QAAO;AAAmB,eAAO;;AAAc;AAAiD,eAAO,oCAAoC,IAAG;;AAAI,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;;;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;;;AAAO,eAAO;SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;;AAAI,YAAG,QAAM;AAAe,kBAAM;;AAAI,eAAO,cAAW,MAAI,MAAI;SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;;AAAG,eAAO,OAAK;SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAK,UAAA,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;;AAAO,kBAAO,KAAK;;SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,oBAAoB,SAAS,UAAQ,KAAG;AAAG,eAAO;SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;SAAK,OAAM;AAAmB,eAAO;;AAAM;AAAuB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAI,eAAO;;AAAE;AAA8D,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,YAAW,aAAY,QAAO;;AAAW;AAAuC,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,KAAI,QAAO;AAAM,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,oBAAoB,MAAI,IAAE,KAAG;AAAG,oBAAQ,oBAAoB,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,mBAAmB,MAAI;;AAAI,iBAAK;;AAAI,4BAAoB,QAAM,KAAG;AAAI,eAAO;;AAAE;AAAuC,sBAAY,QAAQ,aAAa;AAAM,YAAG;AAAQ;;AAAU;AAA4C,YAAG,QAAQ,iBAAe;AAAM,kBAAQ,eAAa;;AAAG,gBAAQ,aAAa,KAAK;AAAW,qBAAW,SAAQ;;;AAAO;AAAsC,YAAG;AAAuB,gBAAK;AAAwF,sBAAW,QAAQ;AAAe,YAAG,QAAO,YAAU;AAAU,gBAAK;AAAkB,YAAG,CAAC,aAAa;AAAY,gBAAK;AAAkC,gBAAQ,eAAe,KAAK;AAAQ,wBAAc,QAAQ,MAAI;AAAG,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAG,8BAAoB,YAAU,IAAE,KAAG,KAAG;;AAAE,wBAAc,aAAa,YAAU,aAAa;AAAU,sBAAY,QAAQ,SAAS,aAAa,eAAa,CAAC,iBAAc,WAAU,aAAa,WAAU,WAAU,aAAa,WAAU,mBAAkB,aAAa,mBAAkB,QAAO,aAAa,aAAY,kBAAiB,aAAa;AAAa,kBAAQ,QAAQ,oBAAkB;AAAE,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,QAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAI,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,KAAG,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAa,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAW,0BAAgB;AAA8B,4BAAkB,cAAY;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAe,gBAAO,UAAQ;AAAQ,kBAAQ,CAAC,KAAM,OAAM,eAAgB,aAAa,cAAa,KAAM,aAAa,KAAI,kBAAmB,aAAa,aAAY,cAAe,aAAa,aAAY,gBAAiB,aAAa,oBAAmB,WAAY,aAAa,WAAU,WAAY,aAAa;AAAW,gBAAO,aAAW;AAAW,cAAI,OAAK,YAAY;AAAM,kBAAO,YAAY,KAAI,aAAa;;AAAe,YAAG,QAAO;AAAQ,kBAAO;AAAa,iBAAO,QAAO;;;AAAY;AAA0D,YAAG,CAAC,UAAQ,CAAC;AAAW,iBAAO,YAAY;AAAO,YAAG,CAAC;AAAQ,cAAI;AAA4D,iBAAO,YAAY;;AAAM,oBAAS,oBAAoB,SAAO,MAAI;AAAG,YAAG,UAAO;AAAQ,cAAI,+CAA6C,SAAO;AAAwE,iBAAO,YAAY;;AAAM,0BAAgB,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,wBAAc,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,YAAG;AAAO,8BAAoB,UAAQ,KAAG;AAAY,YAAG;AAAW,8BAAoB,cAAY,KAAG;AAAU,eAAO;;AAAE;AAAyB,eAAO,gBAAc;;AAAE,aAAO,mBAAiB;AAAc;AAA6D,YAAG,OAAO,sBAAoB;AAAa,cAAI;AAAuF,iBAAO;;AAAE,YAAG,CAAC;AAAa,cAAI;AAAqD,iBAAO;;AAAG,2BAAiB;AAAG,oBAAU;AAAE,YAAG,0BAAyB,cAAa,WAAS,KAAG;AAAQ,iBAAO,sCAAsC,WAAU,aAAY,MAAK,eAAc;;AAAK,YAAG;AAAM,iBAAO;AAAM,wBAAc;AAAE,wBAAc;AAAE,uBAAa;AAAE,0BAAgB;AAAE,wBAAc;AAAE,YAAG;AAAM,sBAAU,oBAAoB,QAAM;AAAG,uBAAW;AAAM,sBAAU,oBAAoB,OAAK,KAAG;AAAG,qBAAS,oBAAoB,OAAK,MAAI,OAAK;AAAE,6BAAiB,oBAAoB,OAAK,MAAI,OAAK;AAAE,cAAG;AAAc,kCAAoB,oBAAoB,OAAK,MAAI;AAAG,gCAAkB,oBAAoB,OAAK,MAAI;AAAG,kCAAoB,QAAQ,sCAAoC,QAAQ,sCAAoC;AAAgB,mCAAuB,iBAAgB,OAAK,IAAG,OAAK;AAAI,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;AAAG,gCAAoB,OAAK,MAAI,KAAG;AAAgB,gCAAoB,OAAK,MAAI,KAAG;;AAAmB,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;;;AAAS,sBAAU;;AAAQ,gCAAsB,aAAW;AAAE,YAAG;AAAmB,sBAAU,UAAU,IAAG;;AAAgB,uBAAW;AAAU,kBAAO,YAAU;;AAAG,gCAAqB,QAAQ;AAAK,qBAAU,GAAE,IAAE,OAAK,GAAE,EAAE;AAAE,8BAAqB,sBAAkB,KAAG,KAAG;AAAE,4BAAoB,eAAa,KAAG;AAAiB,4BAAoB,oBAAiB,MAAI,KAAG;AAAiB,sBAAY,oBAAiB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,2BAAiB,CAAC,WAAoB,WAAoB,mBAAoC,aAAwB,WAAoB,UAAkB,cAAa,eAAc,aAAY,mBAAiB,oBAAmB,iBAAgB,KAAQ;AAA2B,YAAG;AAAwB,uBAAa,MAAI;AAAc,sBAAY,cAAa;;AAAmB,yBAAe;;AAAc,eAAO;;AAAE;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;;AAAK;AAAwB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAM,gBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,8BAAgB;AAAW,mBAAO,cAAY;eAAW;eAAS;eAAS;eAAQ;eAAS;eAAS;eAAQ;eAAS;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAO;eAAQ;eAAQ;eAAS;eAAS;eAAQ;eAAQ;eAAQ;eAAS;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAS;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;eAAQ;AAAG,mBAAO;eAAY;eAAQ;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAS;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAQ;eAAS;AAAG,mBAAM;eAAQ;eAAS;eAAS;eAAO;eAAS;eAAO;eAAS;eAAS;eAAS;eAAQ;eAAQ;eAAS;eAAS;eAAS;eAAQ;AAAG,mBAAO;eAAO;eAAQ;eAAQ;eAAQ;eAAQ;AAAE,mBAAO;eAAU;eAAQ;eAAQ;AAAG,mBAAO;eAAQ;eAAQ;eAAQ;AAAG,mBAAO;eAAgB;eAAQ;AAAE,mBAAO;eAAW;eAAQ;AAAG,mBAAO;eAAQ;eAAQ;AAAG,mBAAO;eAAU;AAAE,mBAAO;eAAa;AAAE,mBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,mBAAO;eAAW;AAAG,mBAAO;eAAS;AAAG,mBAAO;eAAS;AAAG,mBAAO;eAAS;AAAG,mBAAO;eAAS;AAAE,mBAAO;eAAS;AAAI,mBAAO;eAAQ;AAAG,mBAAO;eAAQ;AAAE,mBAAO;eAAQ;AAAE,mBAAO;eAAO;AAAG,mBAAO;eAAO;AAAI,gBAAG,OAAO,cAAY;AAAS,qBAAO,UAAU,0BAAwB;AAAE,mBAAO;;;AAAG,iBAAS;AAAI,eAAM;;AAAG,UAAG,CAAC;AAAuB,gBAAQ;;AAA2B,gBAAQ;AAAa;AAAU,SAAG;AAAO,iCAAyB,CAAC,MAAK,SAAQ,iDAAgD,WAAU,UAAS,WAAU;AAAU,0BAAkB,CAAC,GAAI,gBAAe,GAAI,cAAa,GAAI,kCAAiC,GAAI,QAAO,GAAI,mDAAkD,GAAI,wBAAuB,GAAI,wBAAuB,GAAI,qBAAoB,GAAI,oCAAmC,GAAI,oCAAmC,GAAI,wBAAuB,GAAI,+BAA8B,GAAI,uCAAsC,GAAI,yBAAwB,GAAI,qCAAoC,GAAI,uCAAsC,GAAI,6BAA4B,GAAI,kCAAiC,GAAI,WAAU,GAAI,UAAS,GAAI,WAAU,GAAI,gBAAe,QAAS,cAAY,OAAO,eAAc,GAAI,sBAAqB,GAAI,uBAAsB,GAAI,iBAAgB,GAAI,eAAc,GAAI,SAAQ,GAAI,UAAS,OAAQ;AAAW,gBAAQ;AAAa,aAAO,SAAO;AAAI,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,wCAAgC,OAAO,iCAA+B;AAAW,eAAO,+BAA4B,OAAO,iCAA+B,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,yDAAiD,OAAO,kDAAgD;AAAW,eAAO,gDAA6C,OAAO,kDAAgD,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,4DAAoD,OAAO,qDAAmD;AAAW,eAAO,mDAAgD,OAAO,qDAAmD,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,wDAAgD,OAAO,iDAA+C;AAAW,eAAO,+CAA4C,OAAO,iDAA+C,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iDAAyC,OAAO,0CAAwC;AAAW,eAAO,wCAAqC,OAAO,0CAAwC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,gDAAwC,OAAO,yCAAuC;AAAW,eAAO,uCAAoC,OAAO,yCAAuC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,gEAAwD,OAAO,yDAAuD;AAAW,eAAO,uDAAoD,OAAO,yDAAuD,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sDAA8C,OAAO,+CAA6C;AAAW,eAAO,6CAA0C,OAAO,+CAA6C,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM,aAAO,aAAW;AAAQ,aAAO,aAAW;AAAQ,aAAO,mBAAiB;AAAc,aAAO,gBAAc;AAAW,aAAO,gBAAc;AAAW;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;;;AAAS,eAAO;;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,UAAO;AAAI,aAAK,SAAO;;AAAO,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;;AAAW;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B;;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;eAAK;AAAG;aAAS;;AAAQ;;;AAAS,aAAO,SAAO;AAAI,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;;;AAAS,UAAG,CAAC;AAAuB,wBAAc;AAAK,UAAG,CAAC;AAAuB;AAGx1tE,aAAO;;;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;;WACxB,OAAO,YAAY;AAC1B,YAAQ,mCAAmC;;ACpBjD,gCAAA,WAAA;AACA,0BAAyB;AACvB,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,2BAAoB,sBAAqB;AAE3C,mBAAW,OAAO,uBAAoB,cAAY,qBAAkB;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;;;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;;AAAiB,eAAO,kBAAgB;;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;;AAAS,4BAAgB,YAAU;;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;;AAAK,kBAAO,IAAI;AAAQ,iBAAO;;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;;;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;;AAAS,eAAO,aAAW;AAAW,iBAAM;;iBAAsC;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;;;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;mBAAmB,OAAO,aAAW;AAAa,uBAAW;;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;;;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;;iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;;AAAI,YAAG;AAAY,4BAAgB;;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;;AAAQ,4BAAgB;;AAAG;AAAC,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;;;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;;AAAO;;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;;;AAAO,yBAAe;AAAgB,mBAAS,QAAM;;;;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;;;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;;;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,SAAM,QAAM,SAAM,UAAW,SAAM;AAAG,sBAAS,MAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,MAAI,MAAI;;AAAK,iBAAO;WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;;AAAS,oBAAM,KAAG,KAAK;;;;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;;;AAAO,wBAAgB,OAAO,gBAAc,cAAY,IAAI,YAAY,UAAQ;AAAU;AAAoD,qBAAW,MAAI;AAAe,qBAAW;AAAI,eAAM,KAAK,WAAS,CAAE,WAAQ;AAAQ,YAAE;AAAO,YAAG,SAAO,MAAI,MAAI,KAAK,YAAU;AAAa,iBAAO,YAAY,OAAO,KAAK,SAAS,KAAI;;AAAc,qBAAQ;AAAG,iBAAM,MAAI;AAAQ,qBAAO,KAAK;AAAO,gBAAG,CAAE,MAAG;AAAM,sBAAK,OAAO,aAAa;AAAI;;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,sBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,mBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;;AAAQ,mBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;;AAAG,gBAAG,KAAG;AAAO,sBAAK,OAAO,aAAa;;AAAS,uBAAO,KAAG;AAAM,sBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;;;;AAAQ,eAAO;;AAAI;AAA0C,eAAO,MAAI,kBAAkB,QAAO,KAAI,kBAAgB;;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,KAAI,QAAO,EAAE;AAAG,kBAAM,KAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,KAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;;;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;;AAAS;AAAkD,eAAO,kBAAkB,MAAI,QAAO,QAAO;;AAAiB;AAA0C,cAAM,IAAI,OAAM;;AAAQ;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;;AAAK,mCAA2B,OAAO,qBAAmB;AAAS;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;;AAAW,qBAAO,cAAc,MAAK,SAAS;;;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;;;;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,0BAAkB;AAAM;AAAkB,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;;;AAAU,6BAAqB;;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;;AAAY;AAAmB,6BAAqB;;AAAY;AAAuB,wBAAc;;AAAK;AAAmB,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;;;AAAU,6BAAqB;;AAAe;AAAyB,qBAAa,QAAQ;;AAAI;AAA0B,sBAAc,QAAQ;;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;;;;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;;AAAM,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,KAAI,WAAW,UAAQ,KAAI,QAAQ,YAAU;;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;;AAAe,2BAAmB;AAAyB,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;;AAAY,cAAG;AAAY,mBAAO,WAAW;;AAAqB,kBAAK;;;AAA8D,gBAAM;;;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;;AAAI,mBAAO,SAAS;aAAmB,MAAM;AAAW,mBAAO;;;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;;;AAAe;AAAsB,mBAAS,CAAC,KAAM,eAAc,wBAAyB;AAAe;AAA0C,yBAAY,UAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW,SAAQ;AAAU,qCAA2B,WAAW;AAAQ,8BAAoB;;AAAoB,yBAAiB;AAAoB;AAA2C,0BAAgB,OAAO;;AAAa;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;;;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;;;;AAAoC,mBAAO,uBAAuB;;;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;;;AAAO;AAAmB,eAAM;;AAAG,iBAAW;AAAO;AAAuD,mCAA2B,WAAW;;AAAQ,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;;;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;;;AAAO,eAAO;SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;;AAAI,YAAG,QAAM;AAAe,kBAAM;;AAAI,eAAO,cAAW,MAAI,MAAI;SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;;AAAG,eAAO,OAAK;SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAK,UAAA,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;;AAAO,kBAAO,KAAK;;SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,OAAO,SAAS,UAAQ,KAAG;AAAG,eAAO;SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;SAAK,OAAM;AAAmB,eAAO;;AAAM;AAAuB,eAAO;;AAAE;;AAA+D;AAAuC,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,OAAO,MAAI,IAAE,KAAG;AAAG,oBAAQ,OAAO,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,OAAO,MAAI;;AAAI,iBAAK;;AAAI,eAAO,QAAM,KAAG;AAAI,eAAO;;AAAE;AAAuB,aAAK;;AAAQ;AAA0B,cAAM;;AAAM;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;;AAAK,0BAAkB,CAAC,iCAAkC,kCAAiC,UAAW,WAAU,SAAU,UAAS,UAAW,WAAU,WAAY,YAAW,QAAS;AAAS,gBAAQ;AAAa,aAAO,SAAO;AAAI,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,oBAAoB,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,0BAA0B,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,yBAAyB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,eAAe,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,qBAAqB,MAAM,MAAK;;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,sBAAsB,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,cAAc,MAAM,MAAK;;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,eAAe,MAAM,MAAK;;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,iBAAiB,MAAM,MAAK;;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;;;AAAS,eAAO;;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,UAAO;AAAI,aAAK,SAAO;;AAAO,uBAAe;AAAM,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;;AAAW;AAAwB,4BAAkB,OAAO;AAAW;AAAI;AAAgB,oBAAQ;AAAE,eAAK,KAAI;;AAAe,cAAG,aAAa;AAAY;qBAAe,KAAG;AAAU,4BAAc;AAAK;;AAAY,wBAAU;AAAE,gBAAG,KAAG,OAAO,MAAI,YAAU,EAAE;AAAO,sBAAM,CAAC,GAAE,EAAE;;AAAO,gBAAI,uBAAqB;AAAO,kBAAM,GAAE;;;AAAY,uBAAW;;;AAAM;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B,cAAG;AAAa,qBAAS;AAAM;;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;eAAK;AAAG;aAAS;;AAAQ;;;AAAS,aAAO,SAAO;AAAI;AAA+B,YAAG,YAAU,iBAAe,YAAS;AAAG;;AAAO,YAAG;;AAAqB,kBAAM;AAAK,uBAAW;AAAO;AAAc,cAAG,OAAO;AAAU,mBAAO,UAAU;;AAAQ,cAAM,SAAO,IAAI,WAAW;;AAAS,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;;;AAAS,yBAAiB;AAAK,UAAG,OAAO;AAAgB,uBAAa;AAAM,sBAAc;AAAK;AAGp30B,aAAO;;;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;;WACxB,OAAO,YAAY;AAC1B,YAAQ,uBAAuB;;ACpBrC,wBAAA,WAAA;AAEA,wBAAsB;AAEtB;AACE,iBAAa,CAAE,SAAS,CAAC,YAAY,IAAI,YAAY,IAAI,SAAS,CAAC,GAAG;AACtE,oBAAgB;AAChB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,qBAAe,KAAK,QAAQ;AAC5B,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,yBAAmB,KAAK,QAAQ;AAChC,uBAAiB,GAAG,QAAQ,UAAU;AACpC,wBAAgB,SAAU,SAAQ;AAClC,yBAAiB,GAAG,QAAQ,UAAU;AACpC,0BAAgB,SAAU,SAAQ;AAClC,uBAAa,GAAG,IAAI,YAAY;AAC9B,oBAAQ,KAAK,CAAC,SAAS;;;;;AAK/B,WAAO;;AAGT,qBAAmB;AACjB,QAAI,eAAe;AACnB,QAAI,WAAW;AACf,QAAI,SAAS;;AAGf,oBAAkB,oBAAqB;IACrC;IACA,YAAY,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;IAClD,UAAU,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;;AAGlD,mBAAiB;AACf,mBAAe,GAAG,IAAI,IAAI,YAAY;AACtC,iBAAa,GAAG,IAAI,IAAI,UAAU;AAClC,2BAAuB,GAAG,SAAS,CAAC,QAAQ,OAAO;AACnD,WAAO,UAAU;;AAGnB;AACE,sBAAkB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAgB,GAAG,IAAI,WAAW;AAClC,qBAAiB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACnD,+BAA2B,GAAG,IAAI,UAAU;AAC5C,8BAA0B,GAAG,IAAI,SAAS;AAC1C,wBAAoB,GAAG,IAAI,oBAAoB;AAC/C,mBAAe,GAAG,IAAI,mBAAmB;AACzC,iBAAa,GAAG,IAAI,mBAAmB;AACvC,4BAAwB,GAAG,IAAI,QAAQ;AACvC,0BAAsB,GAAG,IAAI,MAAM;AACnC,uBAAmB;AACnB,WAAO,GAAG,SAAS,CAAC,iBAAiB,gBAAgB;;AAGvD;AACE,WAAO,GAAG,KAAK;AACb,kBAAY,MAAK,SAAS,MAAK,SAAS;AACxC,aAAO,SAAS,KAAK,aAAa,eAAe;;;;IAKnD;AACE,WAAK,iBAAiB;AACtB,WAAK,QAAQ,QAAO,SAAS;AAC7B,WAAK,SAAS,QAAO,SAAS;AAC9B,WAAK,cAAc,gBAAgB,QAAO,SAAS;AACnD,WAAK,UAAU,GAAG,SAAS,KAAK;AAChC,WAAK,YAAY,GAAG,SAAS,CAAC,KAAK,OAAO,KAAK;AAC/C,WAAK,SAAS;AACd,WAAK,aAAa;;UAGd;AAEJ,UAAK,CAAC,cAAgB,WAAW,sBAAwB,WAAW,MAAM,WAAW,KAAO,WAAW,MAAM,KAAK,KAAO,WAAW,MAAM,KAAK;AAAI,eAAO;AAC1J,+CAAyC,GAAG,KAAK;AAC/C,6BAAqB,WAAW,eAAe,CAAC,KAAK,OAAO,KAAK;AAEjE,gCAAwB,GAAG,IAAI,aAAa,IAAI,QAAQ;AACxD,kCAA0B,KAAK,eAAe,QAAQ;AACtD;AAEA,YAAI,MAAM,QAAQ;AAChB,yBAAe,kBAAkB,KAAK,UAAU,EAAE,OAAO,EAAE;AAC3D,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,0BAAe,GAAG,OAAO,CAAC,WAAW,YAAY;AACjD,uBAAa,QAAO,QAAQ;;AAE5B,uBAAa,kBAAkB;;AAEjC,8BAAsB,aAAa,YAAY,KAAK,SAAS,KAAK;AAClE,uBAAe,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACjD,0BAAkB,GAAG,QAAQ,QAAQ;AACrC,eAAO,CAAC,YAAY,eAAe;;AAErC,+BAAyB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,KAAK,OAAO,SAAS,UAAU,KAAK,OAAO,SAAS,cAAc,KAAK,OAAO,SAAS;AACrK,yBAAmB,iBAAiB;AACpC,uBAAiB;AACjB,+BAAyB,WAAW,IAAI,cAAc,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACzF,4BAAsB,iBAAiB,IAAI;AACzC,qBAAa,YAAY;AACzB,oBAAY;AACZ,eAAO;;AAGT,wBAAkB,OAAO;AACzB,6BAAuB;AACvB,sBAAgB;AACd,yBAAiB,WAAW;AAC5B,2BAAmB,UAAU;AAC7B,YAAI,aAAa,KAAK,OAAO,SAAS;AACpC,sBAAY,UAAU,cAAc;AACpC,yBAAe,KAAK,YAAY;AAChC,4BAAkB,GAAG,KAAK,MAAM,GAAG,MAAM,iBAAiB,CAAC,UAAU,gBAAgB,IAAI,CAAC,GAAG,KAAK,UAAU,QAAQ,CAAC,eAAe;AACpI,yBAAe,KAAK,CAAE,KAAK,WAAW,QAAQ;;;AAGlD,sBAAgB;AAChB,YAAM;AACN,aAAO;AACP,sBAAgB;AAChB,aAAO;QACL,OAAO;QACP,aAAa,CAAC,WAAW,MAAM,KAAK,KAAK,OAAO,WAAW,MAAM,KAAK,KAAK;;;UAIzE;AACJ,aAAQ,OAAO,eAAgB,MAAM,KAAK,iBAAiB;AAC3D,oBAAc;AACd,0BAAmB;AACjB,6BAAqB,MAAK,UAAU;AACpC,0BAAkB,uBAAuB,OAAM;AAC/C,wBAAgB,SAAS;AACzB,gCAAwB,MAAK,YAAY;AACzC,uBAAe,MAAK;AACpB,6CAAqC;AACrC,gCAAwB,aACrB,IAAI,cAAe;UACjB,UAAS,KAAK,OAAO,MAAM;UAC3B,UAAS,KAAK,OAAO,MAAM;;AAEhC,+BAAuB;UACrB,SAAS,QAAQ,MAAM,GAAG;UAC1B,aAAa,QAAQ,MAAM;UAC3B,WAAW;UACX,aAAa;;AAEf,mBAAW,MAAK;AAChB,cAAK,UAAU;AACf,cAAK,YAAY;AACjB,kBAAU;AACV,cAAM,KAAK;;AAEb,aAAO;;;AAIX;AACE,sBAAkB,MAAM,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAClH,kBAAc,IAAI,eAAe,WAAW;AAE5C,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;;AAGT,UAAQ,OAAO;AACf,UAAQ,iBAAiB;AACzB,UAAQ,aAAa;;AC/KrB,wBAAA,WAAA;AAAA,UAAQ,mBAAmB;IACzB,YAAY;MACV;MAAI;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MACtD;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MAAK;MACvD;MAAK;MAAI;MAAK;MAAI;MAAK;MAAK;MAAK;MAAI;MAAI;MAAK;MAAI;;IAEpD,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK;IAC7D,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;IAC3D,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;IAC9D,gBAAgB,CAAC,IAAI,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;IAC9D,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC/C,gBAAgB,CAAC,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACtD,gBAAgB,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,IAAI;IAC1C,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK;IACpD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC/C,gBAAgB,CAAC,KAAK,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACzD,mBAAmB,CAAC,KAAK,IAAI,IAAI,KAAK,IAAI,KAAK,IAAI;IACnD,mBAAmB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI;IACzC,cAAc,CAAC,KAAK,KAAK,KAAK,KAAK;IACnC,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACxD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;IACtD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;IAC5C,aAAa,CAAC,KAAK,KAAK,KAAK,KAAK;IAClC,mBAAmB,CAAC;IACpB,SAAS,CAAC;IACV,YAAY,CAAC;IACb,iBAAiB,CAAC;IAClB,gBAAgB,CAAC;IACjB,YAAY,CAAC;IACb,WAAW,CAAC;;AAEd,UAAQ,2BAA2B;IACjC,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI;IACrD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG;IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC9D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;IAC7D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI;;;AC/CvD,kBAAA,WAAA;AAEA;AACE,uBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,qBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,WAAO,CAAE,YAAY;;AAEvB,UAAQ,sBAAsB;AAE9B;AACE,WAAO;MACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;MAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;;;AAG9C,UAAQ,aAAa;AAErB;AACE,WAAO;MACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;MAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;;;AAGhE,UAAQ,eAAe;AAEvB;AACE,cAAU,OAAM,MAAM;AACtB,cAAU,OAAM,MAAM;AACtB,kBAAc,CAAC;MACb,IAAI,WAAW,KAAK;MAAG,IAAI,WAAW,KAAK;MAAG,IAAI,SAAS,KAAK;MAChE,IAAI,SAAS,KAAK;;AAEpB,WAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;;AAEnD,UAAQ,2BAA2B;AAEnC,qCAAkC;AAChC,mBAAe,cAAa;AAC5B,iBAAa,YAAW;AACxB,wBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,uBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,qBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;;AAEhD,UAAQ,aAAa;AAErB;AACE,oBAAgB,cAAa;AAC7B,iBAAa,YAAW;AACxB,oBAAgB,KAAK,IAAI,GAAG;AAC5B,qBAAiB,UAAU;AAC3B,uBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,qBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;;AAEhD,UAAQ,cAAc;;ACvDtB,mBAAA,WAAA;AAAA,UAAQ,kBAAkB,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AAKxD;AACE,WAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;;AAExE,UAAQ,mBAAmB;AAM3B;AACE,oBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,WAAO,kBAAiB;;AAE1B,UAAQ,kBAAkB;AAC1B;AACE,WAAO,MAAM,MAAM,KAAK;;AAE1B,UAAQ,eAAe;AACvB;AACE,WAAO,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;;AAEvC;AACE,kBAAc;AACd,iBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAW,GAAG,KAAK,GAAG;;AAExB,WAAO;;AAET,UAAQ,MAAM;AACd;AACE,mBAAe;AACf,iBAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,aAAO,KAAK,IAAI,GAAG;;AAErB,WAAO;;AAET,UAAQ,qBAAqB;AAC7B;AACE,oBAAgB;AAChB,iBAAa,KAAK;AAClB,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK;AACb,qBAAe,GAAG,MAAM,MAAM;AAC5B,gBAAQ,KAAK,KAAK,KAAI,KAAK,MAAM,oBAAmB,MAAM;;;AAG9D,WAAO;;AAET;AACE,iBAAa,KAAK,IAAI;AACtB,iBAAa,KAAK,IAAI;AACtB,2BAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,8BAA0B,wBAAuB,OAAO,IAAI,OAAO;AACnE,qCAAiC,2BAA0B,mBAAmB;AAC9E,sCAAkC,wBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,WAAO,2BAA0B,0BAA0B;;AAE7D,UAAQ,sBAAsB;AAC9B;AACE,8BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,iCAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,gCAA4B;MAC1B,CAAC,KAAI,kBAAkB,IAAI;MAC3B,CAAC,KAAI,kBAAkB,IAAI;;AAE7B,WAAO;MACL,kBAAkB,GAAG,OAAO,oBAAoB;MAChD,kBAAkB,GAAG,OAAO,oBAAoB;MAChD,CAAC,GAAG,GAAG;;;AAGX,UAAQ,wBAAwB;AAChC;AACE,WAAO;MACL,KAAI,uBAAuB,eAAe;MAC1C,KAAI,uBAAuB,eAAe;;;AAG9C,UAAQ,cAAc;AACtB;AACE,WAAO,KAAK,KAAO,GAAE,KAAK,EAAE,OAAO,IAAO,GAAE,KAAK,EAAE,OAAO;;AAE5D,UAAQ,0BAA0B;;ACvFlC,2BAAA,WAAA;AAEA,mBAA0B,WAAA;AAC1B,oBAA2B,WAAA;AAC3B,iBAAsB,WAAA;AAEtB,0BAAwB;AACxB,2BAAyB;AACzB,kDAAgD,CAAC,kBAA4B,UAAA,iBAAiB,qBAAqB;AACnH,gCAA8B;AAC9B,+BAA6B;AAC7B,uDAAqD,CAAC,uBAAuB;AAC7E,2BAAmC,UAAA,iBAAiB;AACpD,0BAAwB,CAAC,iBAAiB,IAAI,iBAAiB,iBAAiB,SAAS;AACzF,4BAAoC,UAAA,iBAAiB;AACrD,2BAAyB,CAAC,kBAAkB,IAAI,kBAAkB,kBAAkB,SAAS;AAC7F,kCAAgC;AAChC,kCAAgC;AAChC,0BAAwB;AACxB,+BAA6B;AAG7B;AACE,iBAAa,GAAG,IAAc,UAAA,yBAAyB,QAAQ;AAC7D,aAAQ,KAAK,WAAsB,UAAA,yBAAyB;AAC5D,8BAAkC,UAAA,iBAAiB,GAAG,SAAS;AAC/D,mCAA6B,QAAQ;AACrC,UAAI,wBAAwB,KAAK,SAAS;AACxC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,wBAAc,QAAQ;AACtB,oBAAU,gBAAgB,MAAM;YAC9B,UAAU,OAAO;YAAI,UAAU,OAAO;YACrC,WAAU,OAAO,KAAK,UAAU,gBAAgB,IAAI,MAAM;;;;;;;IAQnE;AAEE,WAAK,cAAc;AACnB,WAAK,0BAA0B;AAC/B,WAAK,sBAAsB;AAC3B,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,YAAY,QAAO,KAAK;AAC7B,WAAK,aAAa,QAAO,KAAK;AAC9B,WAAK,WAAW,QAAO,KAAK;AAC5B,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;;IAGvB;AACE,sBAAyB,SAAA,WAAW,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAChF,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAY;QAC7C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,aAAa;QAAI,MAAM;;AAE3D,mCAAkC,OAAA,oBAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI,WAAY,CAAC,GAAQ,OAAA,YAAY,OAAO,uBAAuB,MAAM;AAC5G,oCAAmC,OAAA,sBAAsB;AACzD,wBAAkB,CAAC,GAAY,SAAA,aAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI,YAAa;AACrG,gCAA0B;QACnB,OAAA,IAAI,WAAW,sBAAsB;QACrC,OAAA,IAAI,WAAW,sBAAsB;;AAE5C,aAAO,cAAc,IAAI,WAAY;QACnC,MAAM,KAAK,kBAAkB;QAC7B,MAAM,KAAK,kBAAkB;QAAI,MAAM;;;IAI3C;AACE,uBAAiB,UAAU,gBAAgB,IAAI;AAC/C,wBAAkB,UAAU,iBAAiB,IAAI;AACjD,aAAO,WAAW;;IAIpB,6EAA4E;AAC1E,kBAAqB,SAAA,YAAqB,SAAA,WAAW,KAAK,8BAA8B,CAAC,UAAU,sBAAsB,UAAU,wBAAwB,KAAK;AAChK,sBAAyB,SAAA,WAAW;AACpC,iBAAW,GAAG,MAAM,cAAc,OAAM,CAAC;QACvC,IAAI,WAAW,KAAK,KAAK;QACzB,IAAI,WAAW,KAAK,KAAK;QAAW,IAAI,SAAS,KAAK,KAAK;QAC3D,IAAI,SAAS,KAAK,KAAK;UACrB,CAAC,IAAI,CAAC,KAAK,UAAU,KAAK;AAC9B,UAAI;AACF,eAAO,GAAG,MAAM,cAAc;;AAEhC,aAAO,CAAE,KAAK,SAAS;;IAIzB,iDAAiD;AAC/C,2BAAqB;AACrB,mBAAa,GAAG,IAAI,sBAAsB;AACxC,kBAAU,QAAQ,IAAI;AACtB,kBAAU,QAAQ,IAAI,IAAI;AAC1B,kBAAU,QAAQ,IAAI,IAAI;AAC1B,qBAAa,KAAK;UACf,QACI,IAAK,IAAI,KAAK,WACd,IAAI,KAAK,YAAa,WAAW,KAAK,OAAO,WAAW;UAC5D,IAAI,KAAK,WAAY,WAAW,KAAK,OAAO,WAAW;UAAI;;;AAGhE,aAAO,CAAE,WAAW,cAAc,MAAM,aAAa,MAAM;;IAI7D;AACE,2BAAqB,UAAoB,UAAA,iBAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,2BAAqB,UAAoB,UAAA,iBAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,uBAAkB,gBAAe,gBAAgB;AAEjD,aAAO,WAAW,IAAI;AACpB,gBAAQ;AACR,YAAI,MAAM;AACR,cAAI;mBACK,MAAM;AACf,cAAI;;AAEN,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;;UAI1B;AACJ,WAAK;AACL,wBAAkB;AAElB;AACA,UAAK,KAAK,UAAU,QAAO,SAAS,cAAe,CAAC,QAAO,KAAK;AAC9D,mBAAW,MAAM,KAAK,oBAAoB,iBAAiB;AAE3D,YAAK,MAAM,MAAM,OAAO,OAAS,MAAM,MAAM,OAAO;AAAM,eAAK,UAAU;;AAI3E,UAAI,YAAY,SAAS,SAAU,SAAS,MAAM,SAAS,KAAO,EAAC,QAAO,KAAK,WAAY,SAAS,MAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,SAAS;AAClL,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB,SAAS;AAC9B,eAAK,YAAY,KAAK,CAAE,YAAY,SAAS,IAAI,WAAW,YAAY,UAAU,SAAS,IAAI,SAAS,YAAY,WAAW,SAAS,WAAW,YAAY,SAAS;;AAE1K,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;;AAGjD,UAAI;AACF,YAAI,CAAC,YAAY,CAAC,SAAS,SAAU,SAAS,MAAM,WAAW;AAC7D,eAAK,cAAc;AACnB,eAAK,gBAAgB;AACrB,iBAAO;;AAET,wBAAgB,KAAK;AACnB,4BAA2B,SAAA,oBAAoB,CAAE,YAAY,KAAK,YAAY,GAAG,YAAY,UAAU,KAAK,YAAY,GAAG,WAAY,SAAS;AAChJ,8BAA6B,SAAA,WAAW;AACxC,4BAAkB,KAAK,YAAY,GAAG,UAAU;AAChD,6BAAmB,KAAK,YAAY,GAAG;AACvC,eAAK,YAAY,KAAK,IAAK,aAAa,YAAY;;AAEtD,aAAK,0BAA0B;;AAEjC,UAAI,YAAY,SAAS;AACvB,iBAAS,MAAM,QAAQ;AACrB,qBAAW,IAAI,WAAW;AAC1B,qBAAW,IAAI,SAAS;AACxB,qBAAW,UAAU;;;AAMzB,oBAAc,GAAG,KAAK,MAAM,KAAK,YAAY,IAAI;AAC/C,oBAAY;AAEZ,0CAAkC,IAAI,UAAU,UAAU;AAC1D,8CAAsC;AACtC,YAAI,8BAA8B;AAChC,WAAC,cAAc,mBAAmB;;AAEpC,gBAAa,OAAA,gBAAgB,IAAI,UAAU,eAAe,IAAI,UAAU;AACxE,2BAA4B,SAAA,aAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AACrF,qCAA6B,CAAC,WAAW,KAAK,MAAM,MAAM,IAAI,WAAW,KAAK,MAAM,MAAM;AAC1F,2BAAmB;AACnB,6BAA0B,OAAA;AAC1B,YAAI,UAAU;AACZ,yBAAe,GAAG,MAAM,iBAAiB,OAAO,OAAO,GAAG;AAC1D,2BAAsB,OAAA,oBAAoB,CAAC,OAAO;;AAEpD,uBAAe,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAC3D,sBAAsB,SAAA,yBAAyB,QAAQ,cAAc,CAAC,KAAK,YAAY,KAAK,YAAY,IAAI;AAG5G,YAAI,CAAC,QAAO,KAAK;AACf,8BAAmB;YACjB,QAAQ;YACR;YACA,gBAAgB;YAChB,YAAY,IAAI;YAChB,OAAO;;AAET,iBAAO;;AAIT,uCAA+B,KAAK,aAAa,QAAQ;AACzD,8BAAsB,WAAW,WAAW;AAC5C,mBAAW;AACX,YAAI,gBAAgB,QAAO,SAAS;AAClC,iBAAO;AACP,iBAAO;;AAET,+BAAuB,GAAG,QAAQ,QAAQ,CAAC,IAAI;AAC/C,wBAAgB,eAAe;AAC/B,YAAI,QAAO,KAAK;AACd,iBAAQ,iBAAiB,yBAAyB,qBAAsB,KAAK,UAAU,WAAW,OAAM,gBAAgB,IAAI,gBAAgB,IAAI;AAChJ,iBAAQ,kBAAkB,0BAA0B,sBAAuB,KAAK,UAAU,WAAW,OAAM,iBAAiB,IAAI,iBAAiB;AACjJ,iCAAwB,KAAK,UAAU,QAAQ,GAAG,OAAO,CAAC,aAAa;AACvE,qCAA2B,eAAe;AAC1C,yBAAe;AACf,8BAAoB,mBAAmB,MAAM,GAAG,uBAAuB;AACvE,iBAAQ,6BAA6B,2BAA4B,KAAK,aAAa,aAAa,YAAY,gBAAgB;AAC5H,+BAAqB,mBAAmB,MAAM,uBAAuB;AACrE,iBAAQ,8BAA8B,4BAA6B,KAAK,aAAa,cAAc,aAAa;AAChH,gDAAsC,KAAK,iCAAiC;AAC5E,cAAI,KAAK,IAAI,iCAAiC;AAC5C,kCAAsB,WAAW,kBAAkB;AACnD,kCAAsB,WAAW,mBAAmB;qBAE3C,gCAAgC;AACzC,kCAAsB,WAAW,kBAAkB,QAAQ,CAAC,aAAa;;AAEzE,kCAAsB,WAAW,mBAAmB,SAAS,CAAC,aAAa;;AAE7E,yCAA+B,KAAK,sBAAsB,WAAW,mBAAmB;AACxF,0CAAgC,KAAK,sBAAsB,WAAW,oBAAoB;AAC1F,sBAAY,UAAU,OAAO,wBAAwB,OAAO;;AAE9D,sCAA8B,KAAK,mBAAmB,WAAW,KAAK,OAAO;AAC7E,WAAG,QAAQ;AACX,6BAA8B,SAAA,WAAW,KAAK,8BAA8B;AAC5E,kCAA0B,GAAG,SAAS;AACtC,2BAAmB;UACjB,QAAQ;UACR,KAAK;UACL,gBAAgB;UAChB,YAAY,IAAI;UAChB,OAAO;;AAET,aAAK,YAAY,KAAK,IAAK,cAAc,WAAW,kBAAkB,aAAa,YAAY,IAAI,YAAY,gBAAgB;AAC/H,eAAO;;AAET,gBAAU,QAAQ,OAAO,OAAO,MAAM;AACtC,WAAK,gBAAgB,QAAQ;AAC7B,aAAO;;IAGT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY,UAAU;;;AAGnC,UAAQ,WAAW;;AC9QnB,uBAAA,WAAA;AAAA,UAAQ,YAAY;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,kBAAkB;IACnB,CAAC,gBAAgB;IACjB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,kBAAkB;IACnB,CAAC,iBAAiB;IAClB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;IACpB,CAAC,mBAAmB;;;ACpdtB,uBAAA,WAAA;AACA,oBAA2B,WAAA;AAC3B,oBAA2B,WAAA;AAC3B,eAAsB,WAAA;AACtB,oBAA2B,WAAA;;IAIzB;AACE,WAAK,WAAW,IAAS,KAAA,SAAS,WAAW,gBAAgB,WAAW;AACxE,UAAI;AAAQ,aAAK,SAAS;;UAGtB;AACJ,UAAI;AAAQ,aAAK,SAAS;AAC1B,0BAAoB,MAAM,KAAK,SAAS,QAAQ,OAAO;AACvD,sBAAgB;AAChB,+BAA0B,eAAe;AAEvC,YAAI,WAAW;AAAoB;AACnC,qBAAa,WAAW,SAAS,WAAW,OAAO,cAAc;AACjE,4BAAoB;AACpB,YAAI,QAAQ,KAAK,SAAS;AACxB,4BAA4B,UAAA;AAC1B,gBAAI,KAAK,OAAO,KAAK,WAAW,IAAI,SAAS,YAAY;AACvD,0BAAY,OAAiB,UAAA,iBAAiB,KAAK,IAAI,WAAW,KAAK;;;;AAI7E,gBAAQ,KAAK;UACX,YAAY,WAAW,cAAc;UACrC,KAAK,WAAW,MAAM,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,MAAM;UAC3M;UACA;UACA,OAAO,WAAW,QAAQ,GAAG,MAAM,WAAW,SAAS;;AAEzD,YAAI,WAAW;AAAQ,qBAAW,OAAO;AACzC,YAAI,WAAW;AAAO,qBAAW,MAAM;;AAEzC,aAAO;;;AAIX;AACE,mBAAe,MAAM,QAAQ,IAAI;MACrB,UAAA,KAAK;MACf,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;MAClF,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;;AAEpF,qBAAiB,IAAI,kBAAkB,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI;AAExE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAE1E,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO;;AAGT,UAAQ,OAAO;AACf,UAAQ,oBAAoB;AAC5B,UAAQ,YAAY;AACpB,UAAQ,gBAA8B;;AC5DtC,sBAAA,WAAA;AAAA,sBAAoB;AAEpB;AACE,QAAI,CAAC,SAAQ,CAAC,MAAK;AAAS;AAC5B,uBAAmB;AACnB,iBAAa,MAAK,QACf,OAAO,OAAO,EAAE,eAAe,GAC/B,OAAO,UAAU,KAAK,EAAE,cAAc;AACzC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;OACjC,OAAO,OAAO,EAAE,eAAe,GAC/B,KAAK,UAAU,EAAE,eAAe,EAAE;AACrC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;OACjC,OAAO,OAAO,EAAE,qBAAqB,GACrC,KAAK,UAAU,EAAE,qBAAqB,EAAE;AAC3C,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,gBAAY,CAAE,UAAU,MAAK,UAAU,YAAY,MAAK,YAAY,WAAW,MAAK,WAAW,cAAc,MAAK,QAAQ,QAAQ,eAAe,MAAM,kBAAkB,SAAS,kBAAkB;AACpM,gBAAY,QAAQ;AAEpB,YAAQ,IAAI,kBAAkB,MAAM;;AAGtC,UAAQ,MAAM;;ACxBd,kBAAA,WAAA;AACA,mBAAyB,WAAA;AAEzB,iBAAe;AACf,aAAW,CAAE,KAAK;AAClB,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AAEjB;AACE,QAAI,CAAC,OAAO;AACV,aAAO,MAAM,MAAM,eAAe,QAAO,KAAK,IAAI;AAElD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,IAAI,UAAU,MAAM,YAAY;;AAEhF,WAAO,OAAO;;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,IAAI,cAAe,KAAK,OAAQ,KAAK,MAAM;AAClE,eAAS;AACT,aAAO;;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,IAAI,WAAW,QAAO,KAAK,IAAI;AAEnG,sBAAgB,GAAG,IAAI,QAAQ,CAAC;AAChC,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,IAAI;AAAS,iBAAO,MAAM,OAAO,IAAI,QAAQ;;AAE7D,2BAAmB,QAAO,KAAK,IAAI,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,IAAI,QAAQ,YAAY;AACnG,eAAO,WAAW,OAAO;AACzB,mBAAW,OAAO;AACV,iBAAA,IAAI,OAAO;;AAErB,cAAQ;AAER,UAAI;AACF,sBAAa,KAAK;AAClB,YAAI,MAAM,KAAK,MAAM,KAAK,MAAK,MAAM;;AAEvC,WAAK;AAEL,aAAO;AACP,cAAQ;;;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;;AC9Df,qBAAA,WAAA;AACA,mBAAyB,WAAA;AAEzB,iBAAe;AACf,aAAW,CAAE,QAAQ;AACrB,cAAY,OAAO;AACnB,oBAAkB;AAGlB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAE7B;AACE,QAAI,CAAC,OAAO;AACV,aAAO,SAAS,MAAM,eAAe,QAAO,KAAK,OAAO;AACxD,oBAAc,OAAO,OAAO,OAAO,GAAG,MAAM,OAAO;AAEnD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,YAAY;;AAEnF,WAAO,OAAO;;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,OAAO,cAAe,KAAK,WAAW;AAC7D,eAAS;AACT,aAAO;;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,OAAO,WAAW,QAAO,KAAK,OAAO;AACzG;AACA,UAAI;AACF,kBAAU,GAAG,KAAK;AAChB,qCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,0BAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,4BAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,2BAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,4BAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,iBAAO,UAAU,IAAI,KAAK,IAAI;;;AAGhC,kBAAU,GAAG,IAAI,QAAQ,CAAC;;AAG5B,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,OAAO;AAAS,oBAAU,MAAM,OAAO,OAAO,QAAQ;;AAEtE,8BAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,OAAO,QAAQ,YAAY;AAC5G,kBAAU,cAAc,OAAO;AAC/B,sBAAc,OAAO;AACb,iBAAA,IAAI,UAAU;;AAExB,cAAQ;AAER,UAAI;AACF,sBAAa,QAAQ;AACrB,YAAI;AAEF,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAI,MAAK,KAAK,MAAK,OAAO;AACnE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,KAAK,MAAK,KAAK,WAAW;AAC5C,gBAAI,aAAa;;;AAInB,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAK,MAAK,KAAK,QAAS;AACjE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,MAAM,MAAM,WAAW;AACzC,gBAAI,aAAa,KAAK,IAAI,MAAM;;;;AAItC,cAAQ;AAER,aAAO;AACP,cAAQ;;;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;;AC3Ff,sBAAA,WAAA;AACA,mBAAyB,WAAA;AAEzB,sBAAoB,CAAC,SAAS,WAAW,QAAQ,SAAS,OAAO,WAAW;AAC5E,iBAAe;AACf,aAAW;AACX,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAC7B,gBAAc;AAEd;AACE,QAAI,CAAC,OAAO;AACV,aAAO,UAAU,MAAM,eAAe,QAAO,KAAK,QAAQ;AAE1D,cAAQ,IAAI,sBAAsB,QAAO,KAAK,QAAQ,UAAU,MAAM,YAAY;;AAEpF,WAAO,OAAO;;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,QAAQ,cAAgB,KAAK,SAAS;AAC7D,eAAS;AACT,aAAO;;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,QAAQ,WAAW,QAAO,KAAK,QAAQ;AAE3G,iCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,aAAO;AAEP,sBAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,wBAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,uBAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,UAAI;AACJ,YAAM;AACN,WAAK;AACL,wBAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,cAAQ;AACR,gBAAU;AACV,eAAS;AACT,wBAAkB,GAAG,KAAK,MAAM,UAAU,IAAI,KAAK,IAAI;AACvD,gBAAU;AACV,kBAAY;AACZ,UAAI,QAAO,KAAK,QAAQ;AACtB;AACA,YAAI,CAAC,QAAO;AACV,2BAAiB,MAAM,OAAO,QAAQ,QAAQ;AAC9C,kBAAO,SAAS;AAChB,aAAG,QAAQ;;AAEX,8BAAoB,MAAM,GAAG,QAAQ,MAAM,OAAO,QAAQ,QAAQ;AAClE,kBAAO,YAAY,OAAO;AAC1B,sBAAY,OAAO;AACX,mBAAA,IAAI,WAAW;;AAEzB,qBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,cAAI,QAAQ,MAAK,KAAK,QAAO,KAAK,QAAQ;AAAe,gBAAI,KAAK,CAAE,OAAO,KAAK,IAAI,MAAM,KAAK,MAAM,MAAM,QAAQ,MAAK,MAAM,MAAM,SAAS,YAAY;;AAE3J,YAAI,KAAK,UAAU,EAAE,QAAQ,EAAE;;AAEjC,gBAAU;AACV,aAAO;AACP,cAAQ;;;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;;AC7Ef,wBAAA,WAAA;;IAGE;AACE,WAAK,QAAQ;AACb,WAAK,eAAe;;IAGtB;AACE,aAAO,GAAG,KAAK;AACb,wBAAgB,KAAK,gBAAgB,MAAM;AAC3C,wBAAgB,QAAQ,WAAW;AACnC,wBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAkB,QAAQ,IAAI,OAAO,EAAE,QAAQ,CAAC;AAChD,6BAAqB,KAAK,kBAAkB;AAC5C,eAAO;UACL,eAAe,aAAa,QAAQ;UACpC,SAAS,aAAa;UACtB,iBAAiB,aAAa;UAC9B,iBAAiB,aAAa;;;;IAQpC;AACE,WAAK,MAAM;;;AAGf,UAAQ,YAAY;;AC/BpB,6BAAA,WAAA;AACA,oBAA2B,WAAA;0BAEO,UAAA;IAEhC;AAEE,aAAO,GAAG,KAAK,MAAM,GAAG,IAAI,OAAO,OAAO,IAAI;;IAIhD;AACE,mEAA6D;AAC7D,aAAO,CAAE,SAAS,SAAS,iBAAiB;;;AAGhD,UAAQ,YAAY;;AChBpB,uBAAA,WAAA;AACA;AACE,WAAO,KAAK,MAAM,IAAI;;;IAGtB;AACE,WAAK,gBAAgB,IAAI,MAAM;AAC/B,WAAK,mBAAmB;AACxB,WAAK,kBAAkB;;IAGzB;AACE,WAAK,cAAc,EAAE,KAAK,oBAAoB;AAC9C,WAAK,KAAK,KAAK;;IAGjB;AACE,mBAAY,KAAK,cAAc;AAC/B,WAAK,SAAS,GAAG,KAAK;AACtB,WAAK,KAAK;AACV,WAAK,cAAc,KAAK,mBAAmB,KAAK;AAChD,aAAO;;IAGT;AACE,aAAO,KAAK,qBAAqB;;IAGnC;AACE,aAAO,KAAK,mBAAmB;;IAGjC;AACE,aAAO,KAAK,cAAc,MAAM,GAAG,KAAK,mBAAmB;;IAG7D;AACE,aAAO,KAAK,cAAc;;IAG5B;AACE,aAAO,IAAI,KAAK,KAAK,KAAK,KAAK,IAAI;AACjC,aAAK,SAAS,GAAG,KAAK;AACtB,YAAI,KAAK;;;IAIb;AACE,aAAO,IAAI,KAAK,KAAK;AACnB,gBAAQ,IAAI;AACZ,YAAI,IAAI,KAAK,oBAAoB,KAAK,KAAK,GAAG,IAAI;AAAI;AACtD,YAAI,CAAC,KAAK,KAAK,GAAG;AAAI;AACtB,aAAK,SAAS,GAAG;AACjB,YAAI;;;IAIR;AACE,aAAO,KAAK,gBAAgB,KAAK,cAAc;;IAGjD;AACE,aAAO,KAAK,WAAW,KAAK,KAAK,WAAW;;IAG9C;AACE,gBAAU,KAAK,cAAc;AAC7B,WAAK,cAAc,KAAK,KAAK,cAAc;AAC3C,WAAK,cAAc,KAAK;;;AAG5B,UAAQ,UAAU;;ACvElB,yBAAA,WAAA;AAAA,mBAA0B,WAAA;AAE1B;AACE,4BAAwB,OAAO;AAC/B,uBAAmB;AACnB,mBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,iBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,wBAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,qBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,mBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,0BAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,YAAI,OAAO,IAAI,UAAU,UAAU,cAAc;AAC/C,yBAAe;AACf;;;AAGJ,UAAI,CAAC;AACH;;;AAGJ,WAAO;;AAOT;AACE,0CAAsC,OAAO;AAC7C,kBAAc,IAAa,SAAA,QAAQ,SAAS,QAAQ,cAAc,EAAG,WAAY;AACjF,wBAAoB,GAAG,WAAW,QAAQ,EAAE;AAC1C,0BAAoB,GAAG,WAAW,OAAO,EAAE;AACzC,8BAAsB,GAAG,aAAa,cAAc,EAAE;AACpD,wBAAc,OAAO,IAAI,UAAU,UAAU;AAE7C,cAAI,QAAQ;AAAgB;AAE5B,cAAI,4BAA4B,YAAY,OAAO,UAAU,UAAU,oBAAoB;AACzF,kBAAM,QAAQ,CAAE,OAAO,MAAM,CAAE,UAAU,UAAU,IAAI;;;;;AAK/D,WAAO;;AAET,UAAQ,0BAA0B;;AC7ClC,yBAAA,WAAA;AAAA,UAAQ,YAAY;IAClB;IAAQ;IAAW;IAAY;IAAW;IAAY;IACtD;IAAiB;IAAa;IAAc;IAAa;IACzD;IAAW;IAAY;IAAY;IAAa;IAAa;;AAE/D,UAAQ,gBAAgB,QAAQ,UAAU;AAC1C,UAAQ,UAAU,QAAQ,UAAU,OAAO;AACzC,WAAO,aAAa;AACpB,WAAO;KACN;AACH,6BAA2B;IACzB,CAAC,WAAW;IAAiB,CAAC,aAAa;IAC3C,CAAC,aAAa;IAAc,CAAC,WAAW;IACxC,CAAC,YAAY;IAAc,CAAC,YAAY;IACxC,CAAC,cAAc;IAAkB,CAAC,cAAc;IAChD,CAAC,YAAY;IAAc,CAAC,aAAa;IACzC,CAAC,gBAAgB;IAAkB,CAAC,WAAW;;AAQjD,UAAQ,YAAY;IAClB,CAAC,QAAQ;IAAY,CAAC,WAAW;IAAY,CAAC,QAAQ;IACtD,CAAC,YAAY;IAAa,CAAC,QAAQ;IACnC,CAAC,gBAAgB;IAAc,CAAC,aAAa;IAC7C,CAAC,gBAAgB;IAAY,CAAC,WAAW;IACzC,CAAC,YAAY;IAAc,CAAC,QAAQ;IACpC,CAAC,iBAAiB;IAAe,CAAC,cAAc;IAChD,CAAC,iBAAiB;IAAa,CAAC,YAAY;IAC5C,CAAC,aAAa;;AAEhB,UAAQ,uBAAuB,mBAAmB,IAAI,8BAA+B,CAAC,QAAQ,QAAQ,aAAa,QAAQ,QAAQ;AACnI,UAAQ,eAAe;IACrB;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AC3DF,sBAAA,WAAA;AAAA,cAAqB,WAAA;AAErB;AACE,WAAO;MACL,GAAG,QAAQ,IAAI,GAAG,GAAG;MACrB,GAAG,QAAQ,IAAI,GAAG,GAAG,WAAe,IAAA;;;AAGxC,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,UAAU,UAAU,gBAAiB;AAC7C,WAAQ,GAAG,KAAM,eAAe,UAAU,UAAU,UAAU;AAC9D,WAAO;MACL,GAAG,KAAK,WAAW,eAAe;MAClC,GAAG,KAAK,WAAW,eAAe;;;AAGtC,UAAQ,iBAAiB;AAEzB;AACE,mBAAe,IAAI,MAAM;AACzB,iBAAa,GAAG,IAAI,MAAM;AACxB,aAAO,KAAK;;AAEd,WAAO;;AAET,UAAQ,YAAY;AAEpB;AACE,QAAI,IAAI;AAAK,aAAO;AACpB,QAAI,IAAI;AAAK,aAAO;AACpB,WAAO;;AAET,UAAQ,QAAQ;AAEhB;AACE,eAAW,KAAK;AAChB,eAAW,KAAK;AAChB,WAAO,KAAK,KAAK,KAAK;;AAExB,UAAQ,kBAAkB;AAE1B;AACE,WAAO,CAAE,GAAG,EAAE,IAAI,EAAE,GAAG,GAAG,EAAE,IAAI,EAAE;;AAEpC,UAAQ,aAAa;AAErB;AACE,WAAO,CAAE,GAAG,OAAM,EAAE,GAAG,MAAK,OAAM,GAAG,OAAM,EAAE,GAAG,MAAK;;AAEvD,UAAQ,cAAc;;ACnDtB,yBAAA,WAAA;AAAA,oBAA2B,WAAA;AAC3B,kBAAyB,WAAA;AAEzB,+BAAuC,UAAA,UAAU,IAAI,qCAAsC,CAAW,UAAA,QAAQ,iBAA2B,UAAA,QAAQ;AACjJ,6BAA2B,qBAAqB,IAAI,sBAAsB;AAC1E,6BAA2B,qBAAqB,IAAI,qBAAqB;AACzE;AACE,qBAAiB,cAAc,MAAM,KAAK;AAC1C,WAAO;MACL,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG;MACvC,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG,WAAW;;;AAGtD;AACE,WAAO;MACL,GAAW,QAAA,MAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,SAAS;MACjE,GAAW,QAAA,MAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,QAAQ;;;AAUpE,qJAAmJ;AACjJ,4BAAwB,aAAa;AAErC,kCAA8B,yBAAyB,eAAe,UAAU,cAAc,QAAQ;AACtG,yBAAqB,gBAAgB,QAAQ,uBAAuB;AACpE,2BAA+B,QAAA,WAAW,eAAe,UAAU;AACnE,yBAAqB;AACrB,iBAAa,GAAG,IAAI,kBAAkB;AACpC,oCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,0BAA4B,QAAA,eAAe,sBAAsB,GAAG,sBAAsB,GAAG,kBAAkB;AAC/G,uBAAyB,QAAA,WAAW;QAClC,GAAG,sBAAsB,IAAI;QAC7B,GAAG,sBAAsB,IAAI;SAC5B,CAAE,GAAG,YAAY,GAAG,GAAG,YAAY;;AAExC,kCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,kBAAc,aAAa,IAAI,sBAAsB,GAAG,sBAAsB,GAAG;AACjF,WAAO,CAAE,UAAU,gBAAgB,MAAgB,UAAA,UAAU,mBAAmB;;AAQlF;AACE,qBAAiB,OAAO,MAAM;AAC9B,qBAAiB,mBAAmB;AACpC,8BAA0B,IAAI,MAAM;AAEpC,WAAQ,gBAAgB,oBAAqB;AAC7C,sBAA0B,QAAA,eAAe,UAAU,cAAc;AACjE,sBAAkB,SAAS,MAAM;MAC/B,OAAO;MACP,MAAgB,UAAA,UAAU,SAAS;MACnC,UAAU;;AAIZ,oBAAgB,WAAW,GAAG,QAAQ,GAAG,EAAE;AACzC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;;;AAK/J,oBAAgB,GAAG,OAAO,UAAU,EAAE;AACpC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;;;AAG/J,WAAO;;AAET,UAAQ,aAAa;;ACnFrB,6BAAA,WAAA;AAAA,qBAA4B,WAAA;AAC5B,qBAA4B,WAAA;AAC5B,kBAAyB,WAAA;AAEzB,yEAAwE,GAAG;AACzE,WAAO,MAAM,KAAK,EAAG;AACnB,oCAA8B,UAAU,YAAY;AACpD,aAAe,QAAA,gBAAgB,GAAG,GAAG,sBAAsB,GAAG,sBAAsB,MAAM;;;AAO9F;AACE,wCAAoC,kBAAkB,OAAO,UAAW,UAAU;AAChF,UAAI,CAAC,oCAAoC,eAAe,kBAAkB,UAAU;AAClF,kBAAU;;AAEZ,aAAO;OACN;AACH,WAAO,8BAA8B,kBAAkB;;AAKzD,8BAA4B;AAwD5B,8JAA4J,iBAAiB;AAC3K,kBAAc;AACd,kBAAyB,WAAA,wBAAwB,gBAAgB,qBAAqB;AACtF,6BAAyB,YAAY;AAGrC,WAAO,MAAM,SAAS,qBAAqB,CAAC,MAAM;AAEhD,mBAAa,MAAM;AAInB,8BAAgC,QAAA,eAAe,KAAK,MAAM,cAAc;AACxE,UAAI,oCAAoC,OAAO,kBAAkB,iBAAiB,KAAK,KAAK;AAAK;AAEjG,wBAA6B,WAAA,WAAW,MAAM,cAAc,eAAe,cAAc,wBAAwB;AACjH,oBAAc,iBAAiB,OAAO,kBAAkB;AACxD,YAAM,KAAK,CAAE,WAAW;;AAE1B,WAAO;;AAET,UAAQ,sBAAsB;;ACvG9B,oBAAA,WAAA;AAAA,cAAqB,WAAA;AAErB;AACE,WAAQ,IAAI,iBAAiB,IAAI;;AAGnC;AACE,WAAW,IAAA,qBAAqB,OAAO;AACrC,UAAI,gCAAgC,UAAU,WAAW,OAAO,UAAU,YAAY,OAAO;AAC3F,eAAO;;AAET,aAAO,KAAK,CAAC,UAAU,YAAY,UAAU;AAC7C,aAAO;OACN;;AAEL,UAAQ,uBAAuB;AAE/B,SAAQ,mBAAmB,qBAAsB;AACjD;AACE,WAAO,UAAU,OAAO,EAAG,MAAM,MAAM,MAAM,QAAU,WAAY,GAAG,QAAW;MAC/E,MAAM,KAAK,IAAI,MAAM;MACrB,MAAM,KAAK,IAAI,MAAM;MACrB,MAAM,KAAK,IAAI,MAAM;MACrB,MAAM,KAAK,IAAI,MAAM;QACnB;MACF,MAAM;MACN,MAAM;MACN,MAAM;MACN,MAAM;;;AAGV,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,MAAM,MAAM,MAAM,QAAS,eAAe;AAClD,WAAO,CAAC,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG;;AAE1F,UAAQ,uBAAuB;AAE/B;AACE,WAAO,QAAQ,IAAI,QAAQ,IAAI,aAAY,QAAO;;AAEpD,UAAQ,oBAAoB;AAE5B;AACE,WAAO;MACL,OAAO,KAAK;MACZ,WAAW,KAAK,UAAU,IAAI,EAAG,OAAO,MAAM,cAAgB;QAC5D;QACA;QACA,UAAU,CAAE,GAAG,SAAS,IAAI,QAAQ,GAAG,SAAS,IAAI;;;;AAI1D,UAAQ,YAAY;AAEpB;AACE,kBAAc,OAAM,QAAQ;AAC5B,oBAAgB,MAAM,eAAe,CAAC,SAAS;AAC/C,UAAM;AACN,WAAO;;AAET,UAAQ,WAAW;AAEnB;AACE,wBAAoB,MAAM,IAAI,UAAU,UAAU,MAAM,SAAS,uBAAuB,QAAQ;AAChG,WAAO;;AAET,UAAQ,oBAAoB;;ACpE5B,2BAAA,WAAA;AACA,yBAAgC,WAAA;AAChC,yBAAgC,WAAA;AAChC,iBAAsB,WAAA;;IAGpB;AACE,WAAK,YAAY;AACjB,WAAK,eAAe;;UAGhB;AACJ,aAAO,IAAI,QAAQ;AACjB,uBAAe,MAAM,MAAM;AAC3B,sBAAc,MAAM,MAAM;AAC1B,wBAAqB,OAAA,SAAS,OAAO,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACzE,oBAAY,KAAK,UAAU,QAAQ;AACnC,iCAAyB,MAAW,OAAA,kBAAkB,CAAC,IAAI,eAAe,IAAI,SAAS,IAAI,iBAAiB,IAAI;AAChH,6BAAqB,iBAAiB;AACtC,8BAAsB,iBAAiB;AACvC,uCAA+B,iBAAiB;AAChD,uCAA+B,iBAAiB;AAChD,sBAAc,MAAqB,eAAA,oBAAoB,cAAc,eAAe,wBAAwB,wBAAwB,KAAK,cAAc,QAAO,KAAK,eAAe,QAAO,KAAK,gBAAgB,QAAO,KAAK;AAC1N,4BAAyB,OAAA,kBAAkB,OAAO,CAAC,QAAQ,QAAQ,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACvG,YAAI,cAAc;AAClB,YAAI,QAAQ;AACZ,YAAI,gBAAgB;AACpB,YAAI,gBAAgB;AACpB,gBAAQ;AACR,gBAAQ;;;IAIZ;AACE,WAAK,UAAU;;;AAGnB,UAAQ,UAAU;AAElB;AACE,uBAAmB,MAAM,eAAe,QAAO,KAAK;AACpD,sBAAkB,IAAmB,eAAA,UAAU,YAAY,KAAK;AAEhE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO,IAAI,QAAQ;;AAErB,UAAQ,OAAO;;AC9Cf,sBAAA,WAAA;AAAA,yBAAgC,WAAA;AAChC,uBAA8B,WAAA;AAC9B,yBAAgC,WAAA;AAChC,oBAA2B,WAAA;AAC3B,iBAAsB,WAAA;AAEtB,UAAQ,OAAoB,aAAA;AAC5B,UAAQ,UAAuB,aAAA;AAE/B,UAAQ,YAA2B,eAAA;AACnC,UAAQ,sBAAqC,eAAA;AAC7C,UAAQ,eAAyB,UAAA;AACjC,UAAQ,UAAoB,UAAA;AAC5B,UAAQ,YAAsB,UAAA;AAC9B,UAAQ,YAAsB,UAAA;AAC9B,UAAQ,uBAA4B,OAAA;AACpC,UAAQ,iBAAsB,OAAA;AAC9B,UAAQ,uBAA4B,OAAA;AACpC,UAAQ,oBAAyB,OAAA;AACjC,UAAQ,YAAiB,OAAA;;ACnBzB,2BAAA,WAAA;AAAA;;;;;;;;;;;;;;;;;IAqBE;AACE,WAAK,QAAQ;AACb,WAAK,UAAU,iBAAiB,IAAI,YAAY,CAAC,OAAO,UAAU,OAAO;AACzE,WAAK,gBAAgB,GAAG,SAAS,KAAK;AACtC,WAAK,kBAAkB,GAAG,SAAS,CAAC,WAAW;AAC/C,WAAK,wBAAwB,GAAG,SAAS,CAAC,YAAY,GAAG,YAAY;;IAGvE;AACE,aAAO,GAAG,KAAK;AACb,2BAAmB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAChD,yBAAiB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAC9C,gCAAwB,GAAG,IAAI,GAAG,IAAI,YAAY,KAAK,kBAAkB,KAAK;AAC9E,6BAAqB,GAAG,IAAI,UAAU,KAAK;AAC3C,4BAAoB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACvE,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACrE,eAAO,GAAG,SAAS,CAAC,aAAa,YAAY;;;IAIjD;AACE,aAAO,GAAG,KAAK;AACb,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,QAAQ,CAAC,IAAI,GAAG,KAAK,KAAK,kBAAkB,KAAK,QAAQ;AAC1G,eAAO,GAAG,IAAI,WAAW,KAAK;;;UAI5B;AACJ,sBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAoB,QAAQ;AAC5B,cAAQ;AACR,qBAAe,GAAG,KAAK,MAAM,GAAG,QAAQ,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,KAAK;AAChF,wBAAkB,OAAO;AACzB,uBAAiB,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAc,KAAK,eAAe;AAClC,eAAS;AACT,wBAAkB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,QAAO,UAAU,QAAO,cAAc,QAAO;AACpH,uBAAiB,UAAU;AAE3B,aAAO;AACP,gBAAU;AACV,oBAAc;AACd,6BAAuB;AACrB,YAAI,UAAU,aAAa,QAAO;AAChC,8BAAoB,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACvD,mCAAyB,GAAG,MAAM,aAAa,CAAC,UAAU,IAAI,CAAC,GAAG;AAClE,gCAAsB,GAAG,KAAK,MAAM,KAAK,mBAAmB,kBAAkB,UAAU,QAAQ,CAAC,IAAI;AACrG,2BAAiB;AACjB,gBAAM,KAAK,CAAE,KAAK,aAAa,eAAe,YAAY,UAAU;;;AAGxE,kBAAY;AACZ,YAAM;AACN,aAAO;;UAGH;AACJ,0BAAoB,MAAM,MAAM;AAChC,yBAAmB,MAAM,MAAM;AAC/B,qBAAc,GAAG,KAAK,MAAM,MAAM,eAAe,CAAC,QAAO,WAAW,QAAO,YAAY,IAAI,OAAO,IAAI;AACtG,0BAAoB,MAAM,KAAK,SAAS,QAAO;AAC/C,aAAM;AACN,UAAI,CAAC,eAAe,YAAY,WAAW;AAAG,eAAO;AACrD,oBAAc;AACd,+BAAyB;AACvB,sBAAc,WAAW,IAAI;AAC7B,2BAAmB,MAAM,MAAM,GAAG;AAClC,yBAAiB,MAAM,MAAM,GAAG;AAChC,8BAAsB,WAAW,cAAc;AAC/C,mBAAW,IAAI;AACf,mBAAW,cAAc;AACzB,cAAM,KAAS,oBAAoB,CAAE,YAAY,UAAU,eAAe,YAAY,WAAW,aAAc,CAAC,aAAa,QAAO,WAAW,cAAc,QAAO;;AAEtK,aAAO;;;AAGX,UAAQ,eAAe;;ACjGvB,2BAAA,WAAA;AAAA;;;;;;;;;;;;;;;;AAqBA,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,4BAA0B,CAAC,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG;AAC/C,4CAA0C;AAC1C,qDAAmD;;IAGjD;AACE,WAAK,cAAc;AACnB,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;;IAGvB;AACE,mCAA6B,cAAc,IAAI;AAC7C,sCAA8B,CAAC,GAAG,OAAO;AACzC,eAAY,YAAY,uBAAuB;;AAEjD,4BAAsB,KAAK,8BAA8B;AACzD,aAAW,WAAe,YAAgB,SAAS,eAAe,yBAAyB;;IAG7F;AACE,0BAAoB,KAAK,8BAA8B;AACvD,4BAA0B,WAAe,YAAgB,SAAS,aAAa,yBAAyB;AACxG,4BAAsB;AACtB,mBAAa,GAAG,IAAI,kBAAkB,QAAQ;AAC5C,sBAAc,KAAK,UAAU,kBAAkB,IAAI,MAAM,GAAG;;AAE9D,oBAAc,gBAAgB;AAC9B,aAAO;;IAGT;AACE,sBAAoB,WAAW;AAC/B,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAW;QAC5C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;QAC9C,MAAM;;AAER,mCAAkC,oBAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI;AACrC,wBAAqB,YAAY,OAAO;AACxC,eAAO,CAAC,GAAG,SAAS,MAAM;;AAE5B,oCAAmC,sBAAsB;AACzD,wBAAkB,CAAC,GAAO,aAAa,OAAO;AAC9C,gCAA0B;QACnB,IAAI,WAAW,sBAAsB;QACrC,IAAI,WAAW,sBAAsB;;AAE5C,aAAO,cAAc,IAAI,WAAW;QAClC,MAAM,KAAK,kBAAkB;QAC7B,MAAM,KAAK,kBAAkB;QAC7B,MAAM;;;UAIJ;AACJ,WAAK;AACL,wBAAkB;AAGlB;AACA,UAAK,KAAK,UAAU,QAAO,cAAe,CAAC,QAAO;AAChD,gBAAQ,MAAM,KAAK,YAAY,mBAAmB,QAAO;AAEzD,YAAK,OAAM,MAAM,OAAO,OAAS,OAAM,MAAM,OAAO;AAAM,eAAK,UAAU;;AAI3E,UAAI,SAAU,MAAM,SAAS,KAAQ,OAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,YAAa,CAAC,QAAO;AAC/H,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB;AAAO,eAAK,YAAY,KAAK;AACpD,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;;AAEjD,oBAAc;AAId,sBAAgB,KAAK;AACnB,2BAAmB,KAAK,YAAY;AACpC,YAAI,CAAC;AAAY;AACjB,YAAI,QAAO;AACT,wBAAmB,gBAAgB,WAAW,cAAc,oCAAoC,WAAW,cAAc;AACzH,6BAAuB,aAAa;AACpC,uCAA6B,CAAC,WAAW,KAAK,OAAM,MAAM,IAAI,WAAW,KAAK,OAAM,MAAM;AAC1F,+BAAqB,GAAG,MAAM,iBAAiB,QAAO,OAAO,GAAG;AAChE,iCAA4B,oBAAoB,CAAC,OAAO;AACxD,yBAAe,cAAc,KAAK,uBAAuB,WAAW,eAAe,kBAAkB;AACrG,+BAAyB,yBAAyB,QAAQ,cAAc,CAAC,KAAK,WAAW,KAAK;AAC9F,4BAAkB,aAAa,IAAI;AACnC,uBAAa;AACb,uBAAa;AACb,0CAAgC,MAAM,KAAK,aAAa,QAAQ;AAChE,oBAAU;AACV,kCAAwB,WAAW,WAAW;AAC9C,qBAAW;AACX,cAAI,mBAAmB,QAAO;AAC5B,sCAA0B,GAAG,QAAQ,WAAW,CAAC,IAAI;AACrD,8BAAkB,kBAAkB;AACpC,sBAAU;AACV,8BAAkB;AAClB,2BAAe,KAAK,mBAAmB,WAAW,QAAQ,OAAO;AACjE,oCAAwB,KAAK,uBAAuB;AACpD,iBAAK,YAAY,KAAK;AACtB,2BAAe;cACb,WAAW;cACX,YAAY;cACZ,KAAK;gBACH,SAAS,gBAAgB;gBACzB,aAAa,gBAAgB;;;AAGjC,kBAAM,KAAK;;AAEX,iBAAK,YAAY,KAAK;;AAExB,oBAAU;;AAEV,2BAAqB,WAAe,YAAgB,SAAS,YAAY,yBAAyB;AAClG,yBAAe;YACb,YAAY,WAAW;YACvB,KAAK;cACH,SAAS,SAAS;cAClB,aAAa,SAAS;;;AAG1B,gBAAM,KAAK;;;AAGf,WAAK,cAAc,KAAK,YAAY,OAAO,OAAO,MAAM;AACxD,WAAK,gBAAgB,MAAM;AAC3B,aAAO;;IAIT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY;;;AAIzB,UAAQ,eAAe;;AC9KvB,sBAAA,WAAA;AAAA,UAAQ,UAAU;IAChB;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;IAEZ;MACE,GAAG;MACH,GAAG;MACH,UAAU;MACV,UAAU;;;;AC/viBd,uBAAA,WAAA;AAmBA,uBAA8B,WAAA;AAC9B,mBAA0B,WAAA;AAC1B,kBAAyB,WAAA;AArBzB;;;;;;;;;;;;;;;;AAuBA,2BAAyB;IACvB,OAAO,CAAC,GAAG,GAAG,GAAG;IACjB,aAAa,CAAC,GAAG,GAAG,GAAG;IACvB,cAAc,CAAC,GAAG,IAAI,IAAI;IAC1B,YAAY,CAAC,IAAI,IAAI,IAAI;IACzB,OAAO,CAAC,IAAI,IAAI,IAAI;IACpB,UAAU,CAAC;;;IAIX;AACE,WAAK,WAAW;;WAGX;AACL,aAAO;;UAGH;AACJ,0BAAoB,MAAM,KAAK,SAAS,cAAc,OAAO;AAC7D,UAAI,CAAC;AAAa,eAAO;AACzB,oBAAc;AACd,+BAAyB;AACvB,4BAAoB;AACpB,YAAI,WAAW;AACb,4BAAkB,OAAO,KAAK;AAC5B,wBAAY,OAAO,iBAAiB,KAAK,IAAI,WAAW,WAAW,UAAU;;;AAGjF,cAAM,KAAK;UACT,YAAY,WAAW;UACvB,KAAK,WAAW,MAAM;YACpB,WAAW,IAAI,QAAQ;YACvB,WAAW,IAAI,QAAQ;YACvB,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;YACvD,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;cACrD;UACJ,WAAW,WAAW;UACtB;;;AAGJ,aAAO;;;AAGX,UAAQ,WAAW;AAEnB;AACE,+CAA2C,MAAM,QAAQ,IAAI;MAC3D,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;MAC1F,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;;AAE5F,qBAAiB,IAAiB,aAAA,aAAa,mBAAmB,QAAO,WAAmB,QAAA;AAC5F,iBAAa,IAAa,SAAA,aAAa,UAAU,eAAe,QAAO;AACvE,sBAAiB,IAAI,SAAS;AAE9B,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAE9E,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;;AAET,UAAQ,OAAO;;ACnFf,sBAAA,WAAA;AAAA,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,uBAAmB;AAEjB,wBAAkB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACzD,yBAAmB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC1D,mBAAa,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACpD,UAAI,QAAQ,aAAa,cAAe,UAAU,SAAS,IAAI,KAAK,SAAS,KAAO,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;eACnI,QAAQ,aAAc,UAAU,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;eAC7E,QAAQ,cAAe,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAGxF,2BAAqB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC5D,4BAAsB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC7D,UAAI,gBAAgB;AAAe,iBAAS,KAAK,WAAY,aAAa,SAAS,IAAI,cAAc,SAAS,IAAK,SAAS;;AAE9H,WAAO;;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AAIjB,UAAI,MAAK,QAAQ,MAAK,KAAK,SAAS;AAClC,0BAAkB,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACpD,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK;;AACvC,mBAAS,KAAK,UAAU,YAAY,IAAI,UAAU;AACvD,yBAAiB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAC/G,YAAI,WAAW;AAAK,mBAAS,KAAK;AAClC,0BAAkB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAChH,YAAI,YAAY;AAAK,mBAAS,KAAK;AACnC,0BAAkB,KAAK,IAAI,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,IAAI,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACjI,YAAI,YAAY;AAAI,mBAAS,KAAK,SAAS,KAAK,MAAM;AACtD,0BAAkB,MAAK,KAAK,KAAK;AACjC,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK,QAAQ,YAAY,IAAI,OAAO;;;AAG/E,WAAO;;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AACjB,sBAAgB;AAChB,kCAA4B,OAAO,QAAQ,MAAK;AAC9C,YAAI,WAAW;AAAY,kBAAQ,KAAK,CAAE,MAAM,OAAO,eAAe,UAAU,IAAI;;AAEtF,UAAI,WAAW,QAAQ,SAAS;AAC9B,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,iBAAS,KAAK,GAAG,QAAQ,gBAAgB,QAAQ;;;AAGrD,WAAO;;;AC1DT,sBAAA,WAAA;AAOA,uBAAqB;AACnB,qBAAiB;AACf,gBAAU,IAAI,OAAO,QAAQ,SAAS,gBAAgB;AACtD,aAAO,QAAQ,GAAG;AAChB,mBAAW,QAAQ;AACnB,eAAO;;;AAIX,qBAAiB;AACf,qBAAe,GAAG,aAAa;AAC/B,SAAG,aAAa,QAAQ;AACxB,SAAG,cAAc;AAEjB,UAAI,CAAC,GAAG,mBAAmB,QAAQ,GAAG;AACpC,cAAM,IAAI,MAAM,6BAA6B,GAAG,iBAAiB;;AAEnE,aAAO;;AAGT,SAAK,UAAU;AACf,SAAK,YAAY;AAEjB,iBAAa,SAAS,cAAc,GAAG;AACvC,iBAAa,SAAS,gBAAgB,GAAG;AAEzC,SAAK,KAAK,GAAG;AACb,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,YAAY,KAAK;AAEpB,QAAI,CAAC,GAAG,oBAAoB,KAAK,IAAI,GAAG;AACtC,YAAM,IAAI,MAAM,0BAA0B,GAAG,kBAAkB,KAAK;;AAGtE,OAAG,WAAW,KAAK;AAGnB,aAAS,cAAc,aAAa,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,UAAU,KAAK,GAAG,kBAAkB,KAAK,IAAI;;AAIpD,aAAS,cAAc,WAAW,KAAK;AACvC,aAAS,gBAAgB,WAAW,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,QAAQ,KAAK,GAAG,mBAAmB,KAAK,IAAI;;;AAIrD,2BAAyB;AACvB,QAAI,CAAC;AAAQ,eAAS;AACtB,qBAAiB;AACjB,yBAAqB;AACrB,uBAAmB;AACnB,mCAA+B;AAC/B,4BAAwB,CAAC,MAAM;AAC/B,uBAAmB;AACnB,iBAAa;AACb,kBAAc;AACd,wBAAoB;AACpB,0BAAsB;AACtB,oBAAgB,OAAO,UAAU,SAAS,cAAc;AAGxD,gCAA4B;AAE5B,eAAW,QAAQ,WAAW;AAC9B,QAAI,CAAC;AAAI,YAAM,IAAI,MAAM;AAEzB,SAAK,YAAY;AAEf,mBAAa,MAAM,UAAU,MAAM,KAAK,WAAW;AACnD,qBAAe,QAAQ;AAEvB,mBAAa,KAAK,CAAE,MAAM,QAAQ;;AAGpC,SAAK,QAAQ;AACX,qBAAe;;AAGjB,SAAK,QAAQ;AACX,cAAQ,OAAM,OAAO,OAAM;AAC3B,mBAAa;AAGb,UAAI,CAAC;AAAgB,yBAAiB,GAAG;AACzC,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;AAGpE,UAAI,aAAa,WAAW;AAE1B;AACA,eAAO;;AAGT,mBAAa,GAAG,IAAI,aAAa,QAAQ;AACvC,uBAAgB,MAAM,aAAa,SAAS;AAC5C,kBAAU,aAAa;AACvB,UAAE,KAAK,MAAM,MAAM,EAAE,QAAQ;;AAG/B,aAAO;;AAGT,oBAAgB;AAEd,UAAI,UAAU,UAAU,WAAW;AAAW;;AAE9C,cAAQ,QAAQ;AAChB,eAAS;AACT,cAAQ,SAAS;AACjB,gBAAU;AAGV,UAAI,CAAC;AAEH,yBAAiB,IAAI,aAAa;UAChC;UAAI;UAAI;UAAG;UAAG;UAAG;UAAI;UAAG;UAAG;UAAI;UAAG;UAAG;UACrC;UAAI;UAAG;UAAG;UAAG;UAAG;UAAI;UAAG;UAAG;UAAG;UAAG;UAAG;;AAGpC,wBAAgB,GAAG,gBAAgB,GAAG,WAAW,GAAG,cAAc;AACnE,WAAG,WAAW,GAAG,cAAc,UAAU,GAAG;AAI5C,WAAG,YAAY,GAAG,gCAAgC;;AAGpD,SAAG,SAAS,GAAG,GAAG,QAAQ;AAG1B,0BAAoB,CAAC,MAAM;;AAG7B,gCAA4B;AAC1B,wBAAkB,SAAS,kBAAkB,UAC1C,0BAA0B,QAAQ;AAErC,aAAO,kBAAkB;;AAG3B,sCAAkC;AAChC,kBAAY,GAAG;AACf,SAAG,gBAAgB,GAAG,aAAa;AAEnC,2BAAqB,GAAG;AACxB,SAAG,iBAAiB,GAAG,cAAc;AAErC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe;AAEtF,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AAEtD,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AAEtF,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,aAAO,CAAE,KAAK;;AAGhB,kBAAc;AACZ,mBAAa;AACb,mBAAa;AACb,kBAAY;AAGZ,UAAI,eAAe;AAEjB,iBAAS;;AAGT,iBAAS,oBAAoB,0BAA0B;;AAEzD;AAGA,UAAI,gBAAgB,CAAE,SAAQ,KAAK;AAGjC,iBAAS;AACT,gBAAQ,aAAa,MAAM;;AAG3B,mCAA4B,4BAA2B,KAAK;AAC5D,iBAAS,oBAAoB,0BAA0B;;AAIzD,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,SAAG,UAAU,gBAAgB,QAAQ,OAAQ,QAAQ,KAAK;AAC1D,SAAG,WAAW,GAAG,WAAW,GAAG;;AAGjC,2BAAuB;AACrB,UAAI,oBAAoB;AACtB,0BAAkB,oBAAoB;AACtC,WAAG,WAAW,gBAAgB;AAC9B,eAAO;;AAIT,wBAAkB,IAAI,aAAa,IAAI,OAAO,iBAAiB;AAE/D,wBAAkB,aAAa;AAC/B,uBAAiB,IAAI;AACrB,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,KAAK,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AACxF,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,IAAI,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AAEvF,0BAAoB,kBAAkB;AACtC,aAAO;;AAGT,eAAW,CAAE,cAAc;AAE3B,iBAAa;AACb,WAAO,kBAAkB;MACvB;MACA;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA,KAAK;AAEP,WAAO,oBAAoB;MACzB;MACA;MACA;MAEA;MACA;MACA;MACA,KAAK;AAEP,kBAAc;AAKd,YAAQ,cAAc;AAEpB,gBAAU,IAAI,aAAa;AAC3B,QAAE,MAAM;AACR,QAAE,MAAM;AACR,QAAE,OAAO;AACT,QAAE,OAAO;AAGT,qBAAgB,EAAE,QAAQ,KAAK,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,IAC7H,QAAQ,YAAY,OAAO,gBAC3B,QAAQ,YAAY,OAAO;AAE/B,sBAAgB,eAAe;AAC/B,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC;;AAGF,YAAQ,YAAY,SAAS;AAC7B,YAAQ,YAAY,OAAO,aAAa;MACtC;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AACP,YAAQ,YAAY,OAAO,gBAAgB;MACzC;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AAEP,YAAQ,aAAa;AACnB,gBAAW,eAAc,KAAK;AAC9B,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,gBAAW,WAAU,KAAK,IAAI,IAAI;AAClC,gBAAY,KAAI,KAAK;AACrB,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,cAAQ,WAAW;;AAGrB,YAAQ,WAAW;AACjB,gBAAW,WAAU,KAAK;AAC1B,gBAAU,OAAQ,KAAI;AAEtB,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,WAAW;AACjB,cAAQ,SAAS;;AAGnB,YAAQ,MAAM;AACZ,iBAAY,aAAY,KAAK,MAAM,KAAK;AACxC,kBAAY,KAAK,IAAI;AACrB,kBAAY,KAAK,IAAI;AACrB,mBAAa;AACb,mBAAa;AACb,mBAAa;AAEb,cAAQ,YAAY;QAClB,OAAO,MAAO,KAAI,QAAQ,MAAO,CAAC;QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAC;QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,KAAI;QAAO;QAAG;QAC3H,OAAO,MAAO,CAAC,OAAQ,MAAO;QAAQ,OAAO,MAAO,KAAI,QAAQ,MAAO;QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;QAAS;QAAG;QACzH,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAE,KAAI;QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;QAAO,OAAO,MAAO,KAAI,QAAQ,MAAO;QAAO;QAAG;QAC5H;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,sBAAsB;AAC5B,cAAQ,YAAY;QAClB;QAAW;QAAW;QAAW;QAAG;QACpC;QAAW;QAAW;QAAW;QAAG;QACpC;QAAW;QAAW;QAAW;QAAG;QACpC;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,QAAQ;AACd,cAAQ,YAAY;QAClB;QAAO;QAAW;QAAY;QAAG;QACjC;QAAO;QAAW;QAAY;QAAG;QACjC;QAAO;QAAW;QAAY;QAAG;QACjC;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,UAAU;AAChB,cAAQ,YAAY;QAClB;QAAoB;QAAqB;QAAqB;QAAG;QACjE;QAAuB;QAAoB;QAAqB;QAAG;QACnE;QAAqB;QAAsB;QAAqB;QAAG;QACnE;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,iBAAiB;AACvB,cAAQ,YAAY;QAClB;QAAoB;QAAoB;QAAsB;QAAG;QACjE;QAAqB;QAAoB;QAAqB;QAAG;QACjE;QAAoB;QAAqB;QAAoB;QAAG;QAChE;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;QAClB;QAAoB;QAAqB;QAAsB;QAAG;QAClE;QAAsB;QAAoB;QAAsB;QAAG;QACnE;QAAsB;QAAqB;QAAoB;QAAG;QAClE;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,cAAc;AACpB,cAAQ,YAAY;QAClB;QAAoB;QAAqB;QAAsB;QAAG;QAClE;QAAqB;QAAoB;QAAsB;QAAG;QAClE;QAAoB;QAAqB;QAAmB;QAAG;QAC/D;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,WAAW;AACjB,cAAQ,YAAY;QAClB;QAAO;QAAQ;QAAQ;QAAG;QAC1B;QAAQ;QAAO;QAAQ;QAAG;QAC1B;QAAQ;QAAQ;QAAO;QAAG;QAC1B;QAAG;QAAG;QAAG;QAAG;;;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;QAClB;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;QACZ;QAAG;QAAG;QAAG;QAAG;;;AAOhB,YAAQ,cAAc;AACpB,gBAAU,IAAI,aAAa;AAC3B,yBAAmB,IAAI;AACvB,yBAAmB,IAAI;AAEvB,sBAAgB,eAAe,QAAQ,YAAY;AACnD,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC,SAAG,UAAU,QAAQ,QAAQ,IAAI,YAAY;AAC7C;;AAGF,YAAQ,YAAY,SAAS;MAC3B;MACA;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MAEA;MACA;MACA;MAEA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AAEP,YAAQ,cAAc;AACpB,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAG;QAAG;QACN;QAAG;QAAI;QACP;QAAG;QAAG;;;AAIV,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAI;QAAG;QACP;QAAI;QAAG;QACP;QAAI;QAAG;;;AAIX,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAI;QAAI;QACR;QAAG;QAAG;QACN;QAAG;QAAG;;;AAIV,YAAQ,UAAU;AAChB,gBAAU,UAAU;AACpB,cAAQ,YAAY,KAAK,MAAM;QAC7B;QAAG,KAAK;QAAG;QACX,KAAK;QAAG,IAAI,IAAI;QAAG,KAAK;QACxB;QAAG,KAAK;QAAG;;;AAIf,YAAQ,SAAS;AACf,gBAAU,QAAQ;AAClB,cAAQ,YAAY,KAAK,MAAM;QAC7B,KAAK;QAAG,KAAK;QAAG;QAChB,KAAK;QAAG;QAAG,IAAI;QACf;QAAG,IAAI;QAAG,IAAI;;;AAOlB,YAAQ,OAAO;AACb,wBAAmB,OAAO,IAAK;AAC/B,wBAAmB,OAAO,IAAK;AAE/B,sBAAgB,eAAe,QAAQ,KAAK;AAG5C,SAAG,UAAU,QAAQ,QAAQ,IAAI,GAAG;AACpC,YAAM,KAAK;AAGX,SAAG,UAAU,QAAQ,QAAQ,IAAI,WAAW;AAC5C;;AAGF,YAAQ,KAAK,SAAS;MACpB;MACA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA,KAAK;AAKP,YAAQ,WAAW;AACjB,wBAAmB,OAAQ;AAC3B,wBAAmB,OAAQ;AAE3B,sBAAgB,eAAe,QAAQ,SAAS;AAGhD,SAAG,UAAU,QAAQ,QAAQ,MAAM,WAAW;AAC9C;;AAGF,YAAQ,SAAS,SAAS;MACxB;MACA;MACA;MACA;MAEA;MACA;MACA;MAEA;MACA;MACA;MACA;MACA;MACA,KAAK;;AAGT,UAAQ,SAAS;;AC7lBjB,oBAAA,WAAA;AACA,kBAAyB,WAAA;AAGzB,iBAAe;AACf,kBAAgB;AAKhB;AACE;AACA,QAAI,iBAAiB,GAAG;AACtB,gBAAS,GAAG,MAAM;;AAElB,4BAAsB,MAAM,gBAAgB,MAAM,cAAc,MAAM,SAAU,MAAM,SAAU,MAAM,MAAM,KAAK;AACjH,6BAAuB,MAAM,iBAAiB,MAAM,eAAe,MAAM,UAAW,MAAM,SAAU,MAAM,MAAM,KAAK;AACrH,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAO,OAAO,QAAQ;AAAG,sBAAc,QAAO,OAAO;eAChD,QAAO,OAAO,SAAS;AAAG,sBAAc,gBAAiB,SAAO,OAAO,SAAS;AACzF,UAAI,QAAO,OAAO,SAAS;AAAG,uBAAe,QAAO,OAAO;eAClD,QAAO,OAAO,QAAQ;AAAG,uBAAe,iBAAkB,SAAO,OAAO,QAAQ;AACzF,UAAI,CAAC,YAAa,SAAS,UAAU,eAAiB,SAAS,WAAW;AACxE,mBAAY,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AAC9H,YAAI,SAAS,UAAU;AAAa,mBAAS,QAAQ;AACrD,YAAI,SAAS,WAAW;AAAc,mBAAS,SAAS;;AAE1D,kBAAY,SAAS,WAAW;AAChC,UAAI,iBAAiB;AAAW,YAAI,aAAa,OAAO,GAAG;;AACtD,YAAI,UAAU,OAAO,GAAG,GAAG,eAAe,gBAAgB,GAAG,GAAG,SAAS,OAAO,SAAS;AAC9F,UAAI,QAAO,OAAO;AAChB,YAAI,CAAC,KAAK,MAAM,CAAC,aAAc,SAAS,UAAU,UAAU,SAAW,SAAS,WAAW,UAAU;AACnG,sBAAa,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,SAAS,OAAO,SAAS,UAAU,SAAS,cAAc;AACrI,cAAI,UAAU,UAAU,SAAS;AAAO,sBAAU,QAAQ,SAAS;AACnE,cAAI,UAAU,WAAW,SAAS;AAAQ,sBAAU,SAAS,SAAS;AACtE,eAAK,KAAK,GAAG,IAAI,MAAM,aAAa,IAAY,QAAA,OAAO,CAAE,QAAQ,cAAe;;AAElF,aAAK,GAAG;AACR,aAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,cAAc;AAAG,eAAK,GAAG,UAAU,WAAW,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,SAAS;AAAG,eAAK,GAAG,UAAU,QAAQ,QAAO,OAAO;AACtE,YAAI,QAAO,OAAO,eAAe;AAAG,eAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAClF,YAAI,QAAO,OAAO,QAAQ;AAAG,eAAK,GAAG,UAAU,OAAO,QAAO,OAAO;AACpE,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAS,eAAK,GAAG,UAAU;AAC7C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAY,eAAK,GAAG,UAAU;AAChD,YAAI,QAAO,OAAO;AAAa,eAAK,GAAG,UAAU;AACjD,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,aAAK,GAAG,MAAM;AAGd,mBAAW;AACX,YAAI;AACF,2BAAiB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACrE,4BAAkB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACtE,aAAG,WAAW,GAAG,GAAG,UAAU,OAAO,UAAU,QAAQ,GAAG,MAAM,GAAG,eAAe;AAGlF,kBAAQ;AACR,uBAAa,UAAU,SAAS,GAAG,KAAK,GAAG;AACzC,yBAAa,GAAG,IAAI,UAAU,OAAO;AACnC,4BAAe,KAAI,IAAI,UAAU,SAAS;AAC1C,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;;;AAGtC,oBAAU,OAAO;;;AAGnB,oBAAY;;AAEd;AACA,UAAI,UAAU;AACZ,sBAAc,CAAC,UAAU,QAAQ,UAAU,OAAO;AAClD,iBAAS,GAAG,SAAS,UAAU,MAAM,OAAO;iBAClC,QAAO,YAAY,WAAa,qBAAqB;AAE/D,iBAAS,GAAG,QAAQ,WAAW;;AAG/B,2BAAoB,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AACtI,mBAAW,QAAQ;AACnB,mBAAW,SAAS;AACpB,wBAAgB,WAAW,WAAW;AACtC,gBAAQ,UAAU,WAAW,GAAG;AAChC,sBAAa,QAAQ,aAAa,GAAG,GAAG,aAAa;AACrD,iBAAS,GAAG,QAAQ,WAAW;;AAEjC,qBAAe,OAAO;AACtB,gBAAS,OAAO,WAAW;AAC3B,aAAO;AACP,aAAO;;AAET,WAAO,CAAE,QAAA,SAAQ,QAAQ,QAAO,OAAO,SAAS,YAAY;;AAG9D,UAAQ,UAAU;;AC5FlB,WAAoB,WAAA;ACVpB;;;;;;;;;;;;;;;;AAsBO,wBAAwB;AACxB,wBAAwB;;EA0B7B;AAAoB,SAAA,UAAA;AAAgC,SAAA,YAAA;AAH5C,SAAA,OAAO,IAAI;AACX,SAAA,eAAe;;EAIvB;AACE,QAAI,CAAC,KAAK,KAAK,IAAI;AACjB,WAAK,UAAU,SAAS,KAAK,SAAS;;AAExC,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,SAAK,KAAK,IAAI,QAAQ;;EAGxB;AACE,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,WAAO,KAAK,KAAK,OAAO;;EAG1B;AACE,WAAO,KAAK;;;;EAwBd;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,KAAK,qBAAqB,KAAK,kBAAkB;;EAG1D;AAGE,WAAO,kBAAkB;;EAG3B,kBACK,GAAG,GAAG,YAAY,YAAY,MAAM,YAAY;AAEnD,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B,aACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B,sBACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAI3B;AACE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;;AAI7B;AACE,QAAM,IAAI,MACN,IAAI;;ACzpBV;;;;;;;;;;;;;;;;AAiCM;AAEJ,gBAAc,MAAM;AACpB,aAAW;AACX,cAAY;AAEZ,SAAO,UAAU;AAEf,YAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,WAAO,MAAM;AACb,UAAM,WAAW,MAAM;AACvB,UAAM,SAAS;;;AAKb;AACJ,SAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAG7B;AACJ,SAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAG/B;AACJ,aAAU;AACV,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,YAAO,IAAI;;AAEb,SAAO;;AAUH;AACJ,YAAU,KAAK;AACf,SAAQ,IAAI,IAAM,KAAI,KAAK;;AAIvB;AACJ,eAAa;AACb,eAAa,GAAG,IAAI,EAAE,QAAQ;AAC5B,iBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,cAAU,OAAO;;AAEnB,SAAO;;AAkBH;AACJ,MAAI,CAAC;AACH,UAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAI9C,gEACuD;AAC3D,SACI,YAAY,QAAQ,SACpB,MAAM,qBAAqB,WAAW,cAAc;;AAGpD;AACJ,SACI,KAAK,MACL,MAAM;;AAsBN,+BAEsC,qBAAqB;AAC/D,MAAI,UAAU;AACZ,aAAS;;AAEX,MAAI,MAAM,QAAQ,QAAQ,aAAa,QAAQ,CAAC;AAC9C,iBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,cAAQ,IAAI,IAAI,QAAQ;;;AAG1B,WAAO,KAAK;;AAEd,SAAO;;AAcH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO;;AAET,aAAW,MAAM;AACjB,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,YAAQ,MAAM;;AAEhB,SAAO;;AAGH;AACJ,SAAO,MAAM,WAAW;;AAGpB;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,MAAI,MAAM,QAAQ,MAAM;AACtB,WAAO;;AAGT,MAAI,GAAG,WAAW,GAAG;AACnB,WAAO;;AAET,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,QAAI,GAAG,OAAO,GAAG;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,SAAO,IAAI,MAAM;;AAGb;AAEJ,MAAK,KAAa,QAAQ;AAExB,WAAQ,KAAa,KAAK;;AAE5B,MAAI,MAAM;AACR,WAAO;aACE,MAAM;AACf,WAAO;;AAEP,gBAAY,KAAK,IAAI,IAAI;AACzB,WAAQ,OAAM,KAAM,OAAM;;;AAIxB;AACJ,gBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,SAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAe5B;AACJ,0BAAwB,IAAI,YAAY;AACxC,eAAa,GAAG,IAAI,GAAG,EAAE;AACvB,oBAAgB,KAAK;;AAEvB,UAAQ;AACR,SAAO;;AAGH;AACJ,MAAI,QAAQ,EAAE;AACZ,WAAO;;AAET,SAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAG3B,wCACgC,aAAqB;AAEzD,SAAO,IAAI,QAAc;AACvB,mBAAe;AAEf,kBAAc;AACZ,UAAI;AACF;AACA;;AAGF;AAEA,0BAAoB,QAAQ;AAE5B,UAAI,cAAc,QAAQ,YAAY;AACpC;AACA;;AAEF,iBAAW,OAAO;;AAGpB;;;AAaE;AAEJ,kBAAgB;AAChB,oBAAkB;AAElB,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,MAAM;AACd,mBAAa,MAAM;eACV,MAAM,OAAO;AACtB,UAAI,gBAAgB;AAClB,cAAM,MACF,yDACmB,uBAAuB;;AAEhD,oBAAc;eACL,MAAM,KAAK;AACpB,YAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAInE,MAAI,gBAAgB;AAClB,QAAI,OAAO,KAAK,SAAS;AACvB,YAAM,MAAM,QAAQ,yCAAyC;;AAE/D,WAAO;;AAGT,MAAI,cAAc;AAChB,UAAM,MACF,qCAAqC;;AAG3C,MAAI,OAAO,cAAc;AACvB,UAAM,MACF,wDACO,UAAU;;AAGvB,mBAAiB,MAAM;AACvB,WAAS,eAAe,OAAO;AAC/B,SAAO;;AAGH;AAEJ,eAAa,MAAM;AAGnB,SAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAGzD,SACI,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OACrC,MACI,+CAA+C,SAAS,sBAC5C;AAGpB,SACI,KAAK,MAAM,QAAM,MAAM,MACvB,MAAM,0DACU;AAGpB,SAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAIpC;AAEJ,mBAA2B;AAC3B,mBAA2B;AAC3B,uBAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,eAAc,QAAQ,QAAQ,eAC1B,OACA,eAAe,MAAM,OAAO;AAChC,UAAQ;AACR,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,QAAQ;AACV,UAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAChC,cAAM,IAAI,MACN,sBAAsB,oBAAoB,MAAM;;AAEtD,UAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACnD,iBAAS,KAAK,MAAM;AACpB,iBAAS,KAAK;;AAEhB,UAAI,KAAK,MAAM;AACb;;;AAGJ,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;AACpB,eAAS,KAAK;;;AAGlB,SAAO,CAAC,UAAU;;AAGd;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;;AAExB,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,MAAgB;;AAE7B,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAC1B,YAAM,MAAM,oBAAoB,iCAAiC;;;;AAMjE;AACJ,SAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAO/B;AACJ,MAAI,YAAY;AACd,WAAO;;AAET,MAAI,YAAY,aAAa,YAAY;AACvC,WAAO;;AAET,MAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC9D,WAAO;;AAET,MAAI,YAAY,UAAU,YAAY;AACpC,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAGb;AACJ,MAAI,UAAU,aAAa,UAAU;AACnC,WAAO;aACE,UAAU;AACnB,WAAO;aACE,UAAU;AACnB,WAAO;;AAEP,UAAM,IAAI,MAAM,iBAAiB;;;AAU/B;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,cAAY;AACZ,MAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,SAAO;;AAIH;AACJ,SAAO,OAAO,UAAU,YAAY,iBAAiB;;AAGjD;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,MAAI,MAAM,QAAQ;AAChB,WAAO,WAAW,OAAO;;AAE3B,MAAI,kBAAkB;AACpB,WAAO;aACE,kBAAkB,cAAc,kBAAkB;AAC3D,WAAO;aACE,SAAS;AAClB,WAAO;aACE,SAAS;AAClB,WAAO;aACE,UAAU;AACnB,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAGxC;AACJ,eAAa,OAAO,IAAI,MAAM,EAAE;AAC9B,QAAI,OAAO,MAAM;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,eAAa,MAAM;AACnB,MAAI,OAAO;AACT,WAAO;;AAKT,kBAAgB,IAAI,MAAM,OAAO;AACjC,UAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,eAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC/B,YAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE1C,SAAO;;AAGT;AACE,cAAY,IAAI;AAChB,MAAI,MAAM,WAAW;AACnB,cAAU,MAAM;AAChB,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,EAAE,SAAS;;;AAGtB,cAAU,MAAM;AAChB,iBAAa,MAAM,MAAM;AACzB,gBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,kBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAGvD,SAAO;;AAIH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO,EAAE;;AAEX,eAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,MAAI,SAAS;AAEX,WAAO;;AAET,MAAI,SAAS,EAAE;AACb,UAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAGhE,SAAO,kBAAkB,GAAG,OAAO;;AAG/B;AAEJ,gBAAc,oBAAoB,MAAM;AACxC,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,UAAM,KAAK;;AAEb,SAAO;;AAGH;AAEJ,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,WAAO,IAAI,WAAW;;AAEtB,UAAM,IAAI,MAAM,qBAAqB;;;AASnC;AAEJ,eAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,MAAI,SAAS,QAAQ,UAAU;AAC7B,WAAO,cAAc,OAAO,IAAI,aAAa;aACpC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;aAClC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;;AAE3C,UAAM,IAAI,MAAM,qBAAqB;;;AAInC;AACJ,QAAM,QAAQ;AACZ,WACI,OAAO,UAAU,YAAY,WAAW,GACxC,MACI,0EACU;;;AAYhB;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,KAAK;;AAEd,cAAY,KAAK,KAAK,SAAS;AAC/B,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,aAAS,QAAQ,KAAK,KAAK;;AAE7B,SAAO;;AAWH;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,CAAC;;AAEV,eAAuB,IAAI,MAAM;AACjC,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,SAAK,KAAK,KAAK,MAAM,QAAQ,QAAQ;AACrC,aAAS,KAAK,KAAK,QAAQ;;AAE7B,OAAK,KAAK,SAAS,KAAK;AACxB,SAAO;;AAQH;AAOJ,SAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;AC1rBzD;;;;;;;;;;;;;;;;AAqBA,kCAAkC;;EA6BhC;AAAmB,SAAA,SAAA;AATX,SAAA,QAAe;AACf,SAAA,eAAwD;AAExD,SAAA,WAAkB;AAOxB,SAAK;;EAGP;AACE,QAAI,KAAK,YAAY;AACnB,cAAQ,KACJ,YAAY,KAAK,oEACgB;;AAEvC,SAAK,eAAe;AACpB,SAAK,WAAW;;EAGlB;AAGE,SAAK,aAAa,YAAY,CAAC,cAAc;AAI7C,QAAI,KAAK,SAAS,aAAa;AAC7B,wBAAkB,KAAK,SAAS;AAChC,cAAQ,KACJ,qCAAqC,aAAa;AACtD,WAAK,IAAI,UAAU;;;QAIjB;AACJ,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,SAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,WAAO,KAAK,MAAM;;EAGpB;AACE,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,sBAAkB,KAAK,aAAa;AACpC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN,QAAQ;;AAId,SAAK,MAAM,YAAY;AAEvB,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK;;MAGV;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,mBAAmB;;AAEzB,SAAK,MAAM,YAAY;AACvB,QAAI,KAAK,aAAa,UAAU,WAAW;AACzC,WAAK,aAAa,UAAU,QAAQ;;;EAIhC;AACN,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,yBAAyB;;AAE/B,WAAO,KAAK,aAAa,UAAU;;EAGrC;AACE,SAAK,QAAQ,OAAO,OAAO,IAAI;;EAGjC;AACE,SAAK,QAAQ;AACb,SAAK,WAAW;AAChB,SAAK;;EAGC;AACN,QAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACzC;;AAGF,sBAAkB,eAAe,KAAK,OAAO,SAAS;AACtD,QAAI,6BAA6B;AAC/B,wBAAkB,UAAU,2BAA2B,MAAM;AAC7D,gBAAU,QAAQ;AAChB,6BAAqB,SAAS,MAAM;AACpC,aAAK,SAAS,OAAO,WAAW,KAAK;;;;;AAMvC;AACJ,iBAAe;AACf,cAAY,QAAQ,+BAA+B;AACjD,gBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,WAAO,EAAE,KAAK;;AAEhB,SAAO;;AAGT;AAEE,SAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAGjE;AACE,UAAQ,MAAM;AACd,MAAI,UAAU,UAAU,UAAU;AAChC,WAAO,UAAU;aACR,GAAG,CAAE,YAAY;AAC1B,WAAO,CAAC;;AAEV,QAAM,IAAI,MACN,oCAAoC,kBAAkB;;AAWtD;AACJ,SAAO;;AAGF,UAAuB;AACxB;AACJ,QAAM;;AC/MR;;;;;;;;;;;;;;;;AAqBA;AAEM;AACJ,MAAI,mBAAmB;AAErB;AACA,QAAI,OAAQ,WAAY;AACtB,WAAK;eACI,OAAQ,WAAY;AAC7B,WAAK;eACI,OAAQ,YAAa;AAC9B,WAAK;eACI,OAAQ,SAAU;AAC3B,WAAK;;AAEL,YAAM,IAAI,MAAM;;AAElB,sBAAkB;;AAEpB,SAAO;;AAIT;AACE,aAAW;AACX,MAAI,GAAG,cAAc;AACnB,OAAG,aAAa,IAAI;;AAEtB,SAAO,GAAG;;AAUN;AACJ,oBAAkB;AAClB,MAAI,UAAU,IAAI;AAChB,WAAO,UAAU,IAAI;;AAErB,sBAAkB;AAClB,cAAU,IAAI,KAAK;AACnB,WAAO,UAAU,IAAI;;;ACzClB,YAAY;ACSZ,YAAY;AAGZ,aAAa;ACiBb,eAAe;AC2Bf,gBAAgB;ACsChB,oBAAoB;ACuBpB,aAAa;ACSb,oBAAoB;AAOpB,gBAAgB;AAGhB,eAAe;AAMf,eAAe;ACoBf,4BAA4B;ACoC5B,YAAY;ACMZ,eAAe;AAQf,sBAAsB;AAStB,qBAAqB;AAOrB,8BAA8B;ACsD9B,YAAY;AAGZ,YAAY;ACSZ,cAAc;AAGd,YAAY;ACSZ,aAAa;AAOb,sBAAsB;ACMtB,iBAAiB;AAGjB,uBAAuB;AAOvB,iBAAiB;AAMjB,iBAAiB;AAGjB,gBAAgB;AAGhB,qBAAqB;AAGrB,iBAAiB;ACkBjB,aAAa;AAGb,kBAAkB;ACSlB,YAAY;ACMZ,mBAAmB;ACiCnB,YAAY;AAOZ,gBAAgB;AAGhB,gBAAgB;ACyDhB,YAAY;AAOZ,gBAAgB;ACahB,iBAAiB;AAGjB,eAAe;AAGf,iBAAiB;AAGjB,4BAA4B;AAS5B,4BAA4B;AAU5B,4BAA4B;AAU5B,iBAAiB;AAGjB,eAAe;AAQf,cAAc;ACUd,YAAY;AAGZ,cAAc;ACwBd,aAAa;AAGb,gBAAgB;ACiBhB,uBAAuB;ACUvB,cAAc;AAGd,gBAAgB;ACShB,cAAc;AAGd,kBAAkB;AAMlB,iBAAiB;ACMjB,cAAc;AAMd,YAAY;ACSZ,gBAAgB;ACMhB,aAAa;AAGb,YAAY;ACcZ,eAAe;AAOf,gBAAgB;AAMhB,0BAA0B;AAG1B,eAAe;AAGf,YAAY;ACUZ,qBAAqB;ACgBrB,aAAa;AAGb,aAAa;ACab,kBAAkB;ACclB,eAAe;ACaf,kBAAkB;AAMlB,aAAa;ACeb,yBAAyB;AAQzB,qBAAqB;AAerB,oBAAoB;AAgBpB,6BAA6B;ACt1BpC;;;;;;;;;;;;;;;;AAuBA,uBACI,UAAU,kBAAkB,MAAM,IAAI;AAC1C,qBACI,UAAU,gBAAgB,MAAM,IAAI;AAoElC;AAEJ,cAAY,QAAQ,YAAY;AAChC,SAAO,eAAe,IAAI;;AAOtB;AACJ,SAAO,aAAa,IAAI;;AAGpB;AACJ,aAAW,eAAe;AAC1B,iBAA+B;AAE/B,SAAO;AACL,WAAO,MAAM,SAAS,GAAG;AACzB,QAAI;AACF;;AAEF,2BAAsB;AACtB,uBAAoB,IAAI,MAAM;AAC9B,QAAI,aAAY;AACd,aAAO,KAAK;;;AAGhB,SAAO;;AAcH;AACJ,SAAO,YAAY,eAAe;AAClC,cAAY,QAAQ,YAAY;AAChC,MAAI,eAAe,IAAI;AACrB,YAAQ,KACJ,eAAe,4BACX;;AAEV,iBAAe,IAAI,KAAK;;ACmE1B;AACE,SAAO,GAAG,eAAe;;ACrN3B,qBAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAyBM;AAEJ,MAAI,UAAU;AACZ,WAAO,aAAa;;AAGtB,SAAO,aAAa,CAAC,QAAQ;;AAG/B;AACE,SAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAGtC;AACJ,MAAI,UAAU;AACZ,UAAM,IAAI,MAAM;;AAElB,MAAI,MAAM,QAAQ;AAChB,QAAS,QAAQ;;AAGnB,MAAI,MAAM,QAAQ;AACX,6BAAyB,GAAe;;AAE/C,MAAI,mBAAmB,GAAG;AACxB,WAAO;;AAET,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,iBAAa,IAAI,WAAY,EAAe;AAC5C,iBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,UAAI,KAAK,MAAO,EAAe,QAAQ;AACrC,aAAK,KAAK;;;AAGd,WAAO;;AAEP,UAAM,IAAI,MAAM,qBAAqB;;;AAenC;AACJ,SAAO,MAAM,SAAS;;AAmBlB;AAEJ,SAAO,MAAM,SAAS,MAAM,MAAM;;AAW9B,oCAA6C;AACjD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,GAAG;;AAW5B,wCAAqD;AACzD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,OAAO;;AClItC;;;;;;;;;;;;;;;;;EAgCE;AAAoB,SAAA,eAAA;AAAoC,SAAA,SAAA;AACtD,QAAI,UAAU;AACZ,WAAK,SAAS,IAAI;;;EAItB;AAEE;AACA,gCAA4B;AAC1B,gBAAU;;AAEZ,kBAAc,KAAK,aAAa,KAAK;AAErC,iBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,qBAAe,QAAQ;AAGvB,aAAO,OAAO,KAAK;AACjB,kCAA0B,YAAY,OAAO,OAAO;;;AAIxD,0BAAsB;MACpB;MACA;MACA;MACA,QAAQ,MAAM,KAAK,YAAU,OAAO;MACpC,WAAW,MAAM,KACb,YAAU,OAAO,uBAAuB,OACpC,OAAO,wBACP;;AAEV,WAAO;;EAGT;AACE,WAAO,YAAY,SAAS,QAAQ,QAAQ,aAAa;AAEzD,YAAQ,QAAQ;AACd,cAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACnD,aAAK,OAAO,iBACR,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAC1D,eAAe;;;;;AAMrB;AAEJ,MAAI,UAAU;AAEZ,WAAO;;AAET,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAE1B,cAAQ,KAAK,SAAS,yBAAyB;AAC/C,aAAO;;;AAGX,SAAO;;;EAIP;AAIE,iBAAa,OAAO,WAAW,WAAgB,SAAS,GAAG,YAAY,KAC7B,OAAO;AACjD,uBAAwB,SAAS,MAAM;AACvC,iBAAa,OAAO;AACpB,iBAAa,OAAO;AACpB,kBAAmB,SAAS,OAAO,MAAM,YAAY;AACrD,iCAA6B;AAE7B,wBAAmB;AACjB,oBAAc,OAAO;AACrB,UAAI,SAAS;AAGX,2BAAmB,MAAM,SAAS,OAAO;AACzC,0BAAkB,WAAW;AAC7B,kCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAI/D,YAAQ,IACJ,KAAK,gBAAiB,UAAW,SAAS,WAAY,UAClD,4BAA6B,aACjC,oBAAoB,aAAa,cAAc,iBAC/C,gBAAgB;;;AC/HxB;;;;;;;;;;;;;;;;AA2CM;AAIJ,uBAAoD;AACpD,qBAAgD;AAChD,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAa,GAAG,GAAG,MAAM;;AAG3B,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AACxB,4BAAwB;AACtB,oBAAc,WAAW;AAEzB,0BAAoB;AACpB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,YAAI,aAAa,MAAM;AACrB,eAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,0BAAgB;AAChB,qBAAW,KAAK,MAAM;AACtB;;;AAIJ,UAAI;AACF;;;;AAMN,yBAAsD;AACtD,iBAAe,EAAE,MAAM;AACvB,mBAA8C;AAE9C,eAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AACpC,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AAGxB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,UAAI,eAAe,KAAK,QAAQ,GAAG;AACjC,gCAAwB;AACtB,yBAAe,WAAW,WAAW,MAAM;AAC3C,mBAAS,KAAK,MAAM;;AAEtB;;;;AAMN,uBAAiC;AACjC,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAElB,QAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAEvC,2BAAoD;AACpD,8BAAwB,KAAK;AAC3B,0BAAkB,KAAK,OAAO;AAC9B,YAAI,aAAa,UAAU;AACzB,uBAAa,aAAa;;;AAK9B,yBAAmB,OAAO,OAAO,IAAI;AACrC,iBAAW,SAAS;AACpB,iBAAW,UAAU,KAAK;AAE1B,mBAAa,KAAK;;;AAItB,SAAO;;AAUH;AAKJ,eAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC5C,iBAAa,aAAa;AAE1B,gBAAsB;AACtB,SAAK,QAAQ,QAAQ;AACnB,yBAAmB,6BAA6B,EAAE;AAClD,UAAI,cAAc;AAChB,YAAI,KAAK;;AAIT,YAAI,KAAK;;;AAIb,QAAI,KAAK,YAAY;AACnB,YAAM,IAAI,MACN,4DACO,KAAK;;AAIlB,2BAAuB,KAAK,SAAS;AAErC,4BAAwB,KAAK;AAC3B,UAAI,CAAE,cAAa;AACjB,cAAM,IAAI,MACN,iCAAiC,yCACH,OAAO,KAAK;;AAIhD,iBAAW,KAAK,MAAM,eAAe;AACrC,UAAI,GAAG,UAAU;AACf,cAAM,IAAI,MACN,4BACI,KAAK,qCACN,iDAAiD,GAAG;;AAE7D,gBAAU,KAAK,OAAO;AACtB,UAAI,CAAM,YAAY,GAAG,OAAO,EAAE;AAChC,cAAM,IAAI,MACN,4BACI,KAAK,sCACL,yBAAyB,GAAG,wDACL,EAAE;;AAGnC,UAAI,6BAA6B,EAAE,OAAO;AACxC,qCAA6B,EAAE,MAAM;;AAErC,4BAAoB,6BAA6B,EAAE;AACnD,qCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,oBAAY;;;;;AC5LpB;;;;;;;;;;;;;;;;AAqBA,8BAA8B;AAE9B,mCAAmC;AAEnC,8BAA8B;AAExB;AAGJ,kBAAgB,eAAe;AAC/B,oBAAkB,wBAAwB,MAAM,OAAO,OAAO;AAC9D,eAAa,MAAM;AACnB,oBAAkB,kBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,gBAAc,CAAC;AACf,MAAI;AACF,UAAM,KAAK,YAAY;AACvB,UAAM,KAAK,WAAW;AACtB,UAAM,KAAK,aAAa;AACxB,UAAM,KAAK;;AAEb,QAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,SAAO,MAAM,KAAK;;AAGpB;AAGE,YAAU,cAAc;AACxB,kBAAgB,QAAQ,QAAQ,SAAS;AACzC,oBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,eAAa,MAAM;AACnB,yBACI,UAAU,cAAc,oBAAoB,QAAQ;AAExD,MAAI,OAAO;AACT,mBAAe,GAAG,MAAM,IAAI,SAAS;AACnC,qBAAe,MAAM;AACrB,mBAAa,GAAG,IAAI,SAAS;AAC3B,kBAAU,KAAK,KAAK,IAChB,UAAU,IACV,YAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAI5D,SAAO;;AAGT;AAEE;AACA,MAAI,MAAM,QAAQ;AAChB,aAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,6BAC/B,WAAW,IAAI,GAAG,QAAQ;aACxB,SAAS;AAClB,aAAS,IAAI;aACJ,UAAU;AACnB,aAAS,gBAAgB;;AAEzB,aAAS,WAAW,IAAI,QAAQ,wBAAwB;;AAG1D,SAAO,SAAS,QAAQ;;AAG1B;AACE,SAAO,MAAM,IAAI,UAAU;;AAG7B,4EAEqD;AACnD,4BAA0B,UAAU,cAAc,IAAI;AAEtD,eAAa,MAAM;AACnB,eAAa,MAAM;AACnB,MAAI,SAAS;AACX,QAAI,UAAU;AACZ,2BAAqB,oBAAoB;AACzC,aAAO,CAAC,YAAY,aAAa,IAAI,GAAG;;AAE1C,QAAI,UAAU;AACZ,aAAO,CAAC,gBAAgB,KAAK;;AAE/B,WAAO,CAAC,KAAK,GAAG;;AAGlB,MAAI,SAAS;AACX,QAAI,OAAO;AACT,4BAAsB,6BAA6B;AAEnD,sBAAgB,MAAM,KAClB,KAAK,MAAM,GAAG;AAClB,qBAAe,MAAM,KAAqC,KAAK,MAC1D,QAAO,8BAA8B,mBACtC,OAAO;AACX,UAAI,UAAU;AACZ,oBAAY,oBAAoB;AAChC,mBAAW,oBAAoB;;AAEjC,aAAO;QACL,MACA,UAAU,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IACG,UAAU,YACN,GAAG,UAAU,OAAO,6BAA6B,IAAI,QAC5D,KAAK,QACV;;;AAGJ,wBACI,UAAU,cAAc,oBAAoB,QACpB,MAAM,KAAoB;AAEtD,WAAO;MACL,MACA,YAAY,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAKJ,mBAAiB,MAAM,MAAM;AAC7B,qBAAmB,QAAQ,MAAM;AACjC,iBAAe,QAAQ,KAAK;AAC5B,gBAAwB;AACxB,MAAI,OAAO;AACT,iBAAa,GAAG,IAAI,4BAA4B;AAC9C,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD;;AAEN,UAAM,KAAK;AACX,iBAAa,OAAO,4BAA4B,IAAI,MAAM;AACxD,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGnB,iBAAa,GAAG,IAAI,MAAM;AACxB,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGrB,cAAY,SAAS,IAAI,MAAM;AAC/B,QAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,UAAM,KAAK,MAAM,MAAM,KAAK;;AAE9B,mBAAiB;AACjB,eAAa,GAAG,IAAI,MAAM;AACxB,kBAAc;;AAEhB,QAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,SAAO;;AAGT;AAEE,wBAA+C;AAC/C,eAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,kBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAExC,SAAO;;ACnMT;;;;;;;;;;;;;;;;;EA6CE;AAAuC,SAAA,QAAA;AACrC,SAAK,QAAQ,MAAM;AACnB,SAAK,OAAY,cAAc;AAE/B,QAAI,UAAU;AACZ,gBAAU,OAAO;AACZ,aACD,MAAM,KAAK,MACX,MAAM,qBAAqB,qDACG,KAAK;;AAEzC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN;;AAIN,SAAK,SAAS,UAAe,kBAAkB,OAAO,KAAK;AAC3D,SAAK,UAAU,eAAe;;EAWhC;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEL,WACD,KAAK,WAAW,KAAK,MACrB,MAAM,uCAAuC,KAAK,gCAC3B,KAAK;AAEhC,kBAAc,KAAK,WAAW;AAC9B,SAAK,OAAO,SAAS;;EAUvB;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEV,YAAQ;AACR,sBAAkB;AAChB,UAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC/B,oBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,cAAM,IAAI,MAAM;;AAElB;;AAEF,gBAAY,KAAK,KAAK,SAAS;AAC/B,kBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,MAAK,KAAK;;AAElC,WAAO,KAAK,OAAO;;EAGrB;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,KAAK;;AAEd,gBAAY,KAAK,KAAK,SAAS;AAC/B,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,KAAK,KAAK;;AAElC,WAAO;;EAGT;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,CAAC;;AAEV,iBAAuB,IAAI,MAAM,KAAK,MAAM;AAC5C,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,WAAK,KAAK,KAAK,MAAM,QAAQ,KAAK,QAAQ;AAC1C,eAAS,KAAK,KAAK,KAAK,QAAQ;;AAElC,SAAK,KAAK,SAAS,KAAK;AACxB,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAQpB;AACE,WAAO,YAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAiChE,gBAAqC;AAErC,gBAA2B;AAE3B,2BAAkD;ACW5C;AACJ,cAAY;;ACgBR;AACJ,yBAAuB;;;EAoDvB;AAXA,SAAA,OAAO;AAoIG,SAAA,qBAAqB;AAxH7B,SAAK,QAAQ,MAAM;AACnB,SAAK,QAAQ,SAAS;AACtB,SAAK,OAAY,cAAc;AAC/B,SAAK,UAAU,eAAe;AAC9B,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;MAGtD;AACF,WAAO,KAAK,MAAM;;QAQd;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY;;EAOvD;AACE,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY,KAAK;;QAStD;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,cAAc,KAAK,OAAO;;EASnC;AACE,WAAO,cAAc,KAAK,OAAO,KAAK;;QASlC;AACJ,SAAK;AACL,kBAAa,YAAY,KAAK,KAAK;AACnC,QAAI,KAAK,UAAU;AACjB,oBAAc,MAAM;AACpB;AACE,eAAO,MAAM,IAAI,OAAU,aAAa;;AAExC,cAAM,IAAI,MACN;;;AAIR,WAAO;;EAST;AACE,SAAK;AACL,kBAAa,YAAY,SAAS,KAAK;AACvC,QAAI,KAAK,UAAU;AACjB;AACE,eAAQ,MAAsB,IAAI,OAAU,aAAa;;AAGzD,cAAM,IAAI,MACN;;;AAIR,WAAO;;QAIH;AACJ,SAAK;AACL,kBAAa,MAAM,YAAY,KAAK,KAAK;AACzC,QAAI,KAAK,UAAU;AACjB,aAAO;;AAEP,aAAO,IAAI,WAAY,MAAoB;;;EAS/C;AACE,QAAI,KAAK;AACP;;AAEF,gBAAY,cAAc;AAC1B,SAAK,qBAAqB;;MAIxB;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK;AACP,YAAM,IAAI,MAAM;;;EAYpB,gBAAgB;AACd,WAAO,UAAU,MAAM,MAAM;;EAO/B;AACE,SAAK;AACL,WAAO,UAAU,MAAM;;EAQzB,mBAAmB;AACjB,iBAAa,KAAK;AAClB,WAAO,eAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;EAGtD;AACE,SAAK;AACL,WAAO,UAAU,KAAK,MAAW;;EAEnC,qBAAqB;AACnB,SAAK;AACL,WAAO,YAAY,aAAa,MAAM,WAAW,MAAM;;;AAI3D,OAAO,eAAe,QAAQ,OAAO,aAAa;EAChD,OAAO;AAML,WAAO,CAAC,CAAC,aAAY,UAAS,QAAQ,QAAQ,UAAS,YAAY,QAC/D,UAAS,mBAAmB;;;uBAoCiB;EAGnD;AAGE,UACI,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AAH/B,SAAA,YAAA;AAIlC,SAAK,OAAO;;EAWd;AACE,QAAI,SAAS,UAAU,KAAK;AAC1B,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,QAAI,CAAM,YAAY,SAAS,OAAO,KAAK;AACzC,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,gBAAY,cAAc;AAC1B,SAAK,SAAS,SAAS;AACvB,gBAAY,OAAO,MAAM;;EAG3B;AACE,gBAAY,gBAAgB;AAC5B,SAAK,qBAAqB;;;AAI9B,OAAO,eAAe,UAAU,OAAO,aAAa;EAClD,OAAO;AACL,WAAO,qBAAoB,UAAU,UAAS,UAAU,QACpD,UAAS,kBAAkB;;;ACnhBnC;;;;;;;;;;;;;;;;AAgEA;AAAA,AAAA;AACE,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;GAPU,QAAA,QAAI;AAqBhB;AAAA,AAAA;AACE,qBAAA,aAAA;AACA,qBAAA,WAAA;AACA,qBAAA,UAAA;AACA,qBAAA,eAAA;GAJG,qBAAA,qBAAiB;AAOtB;AAAA,AAAA;AACE,oBAAA,aAAA;AACA,oBAAA,WAAA;AACA,oBAAA,UAAA;AACA,oBAAA,eAAA;GAJG,oBAAA,oBAAgB;AAOrB;AAAA,AAAA;AACE,uBAAA,aAAA;AACA,uBAAA,WAAA;AACA,uBAAA,UAAA;AACA,uBAAA,eAAA;GAJG,uBAAA,uBAAmB;AAOxB;AAAA,AAAA;AACE,yBAAA,aAAA;AACA,yBAAA,WAAA;AACA,yBAAA,UAAA;AACA,yBAAA,eAAA;GAJG,yBAAA,yBAAqB;AAO1B,sBAAsB;EACpB,SAAW;EACX,OAAS;EACT,MAAQ;EACR,WAAa;;AAGT;AACJ,MAAI,UAAU,YAAY,UAAU;AAClC,QAAI,UAAU,YAAY,UAAU;AAClC,aAAO;;AAET,UAAM,IAAI,MAAM,kBAAkB,cAAc;;AAElD,SAAO,cAAc,OAAO;;AC/H9B;;;;;;;;;;;;;;;;AAsBM;AACJ,MAAI,EAAE,UAAU,EAAE;AAChB,WAAO,CAAC,GAAG;;AAEb,gBAAc,WAAW,EAAE,OAAO,EAAE;AACpC,SAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AC0B1B;AACJ,eAAuB;AACvB,eAAa,IAAI;AACjB,sBAAoB,QAAQ,MAAM;AAClC,SAAO;;AAGT;AAEE,MAAI,aAAa;AACf;;AAEF,MAAI,qBAAqB;AACvB,SAAK,KAAK;AACV;;AAEF,MAAI,CAAC,WAAW;AACd;;AAGF,mBAAiB;AACjB,kBAAgB;AACd,gBAAY,SAAS;AACrB,QAAI,CAAC,KAAK,IAAI;AACZ,WAAK,IAAI;AACT,0BAAoB,KAAK,MAAM;;;;AAMrC;AACE,SAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;ACrF9C;;;;;;;;;;;;;;;;;EAmFA;AAEE,SAAA,sBAAwC;AAExC,SAAA,iBAAiB;AACjB,SAAA,WAAW;AACX,SAAA,aAAa;AACb,SAAA,mBAAmB;AACnB,SAAA,iBAAiB;AAMjB,SAAA,gBAAgB;AAGhB,SAAA,cAAc;AAId,SAAA,aAA2B;AAK3B,SAAA,oBAA8B;AAC9B,SAAA,cAAc;AAEd,SAAA,aAAa,IAAI;AAQjB,SAAA,YAAY;AACZ,SAAA,gBACI,CAAC,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;EAEpE;AACE,+BAA2B,KAAK;AAC9B,WAAK,oBAAoB,cAAc;;;;;EAqB3C;AAAmB,SAAA,MAAA;AAbnB,SAAA,WAA0C;AAC1C,SAAA,kBAKI;AAKI,SAAA,uBAAuB;AAG7B,SAAK,QAAQ,IAAI;;QAGb;AACJ,QAAI,KAAK,sBAAsB;AAC7B,aAAO,KAAK,mBAAmB,KAAK;;;AAEtC,QAAI,KAAK,mBAAmB;AAC1B;;AAEF,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,sBAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,UAAI;AACF,cAAM,KAAK,WAAW;AACtB;;;AAIJ,UAAM,IAAI,MACN;;MAIF;AACF,QAAI,KAAK,sBAAsB;AAC7B,YAAM,IAAI,MACN,YAAY,KAAK;;AAIvB,QAAI,KAAK,mBAAmB;AAC1B,aAAO,MAAM,aAAa,KAAK;AAC/B,UAAI;AACF,cAAM,IAAI,MACN,iCAAiC;;AAIvC,WAAK,WAAW;;AAElB,WAAO,KAAK;;EAGd;AACE,WAAO,OAAO,KAAK,KAAK;;EAG1B;AACE,QAAI,CAAE,gBAAe,KAAK;AAGxB,UAAI,eAAe,KAAK;AACtB,eAAO,aAAa,KAAK,kBAAkB;AAC3C,YAAI;AAEF,iBAAO;;;AAGT,eAAO;;;AAGX,WAAO,KAAK,SAAS;;EAGvB;AAEE,QAAI,CAAE,gBAAe,KAAK;AACxB,aAAO;;AAET,WAAO,KAAK,gBAAgB,aAAa;;EAG3C,iDAGe;AACb,QAAI,eAAe,KAAK;AACtB,cAAQ,KACJ,GAAG;AAEP,aAAO;;AAET,SAAK,gBAAgB,eAAe,CAAC,SAAS;AAC9C,WAAO;;QAGH;AACJ,QAAI,KAAK,gBAAgB,gBAAgB;AACvC,YAAM,IAAI,MAAM,iBAAiB;;AAEnC,SAAK,cAAc;AACnB,QAAI,KAAK,SAAS,gBAAgB;AAChC,WAAK,kBAAkB;AACvB,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,qBAAe,YAAY,MAAM,UAAU;AAC3C,UAAI,CAAC;AACH,eAAO;;;AAGX,SAAK,kBAAkB,KAAK,SAAS;AACrC,SAAK;AAEL,SAAK,WAAW,IAAI,SAAS,KAAK;AAElC,WAAO;;EAGD;AACN,oBAAgB,qBAAqB,KAAK;AAC1C,YAAQ,QAAQ;AACd,UAAI,OAAO,aAAa;AACtB,eAAO,UAAU,KAAK;;;;EAKpB;AACN,oBAAgB,qBAAqB;AACrC,YAAQ,QAAQ;AACd,UAAI,OAAO,eAAe;AACxB,eAAO,YAAY,KAAK,SAAS;;;;EAW/B;AAEN,iCAA6B,KAAK,gBAAgB;AAClD,QAAI,wBAAwB;AAC1B,YAAM,IAAI,MACN,6BAA6B;;AAGnC;AACE,uBAAgB,qBAAqB;AAMrC,UAAI,YAAW,CAAE,qBAAmB,kBAC7B,OAAO,SAAQ,SAAS;AAC7B,0BAAkB,EAAE,KAAK;AACzB,wBACI,SACK,KAAK;AAEJ,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,SAAS,eAAe;AAC7B,eAAK,qBAAqB;AAC1B,iBAAO;WAER,MAAM;AAEL,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,qBAAqB;AAC1B,kBAAQ,KACJ,6BAA6B;AACjC,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO;;AAEjB,aAAK,qBAAqB;AAC1B,eAAO,CAAC,SAAS,WAAW;;AAE5B,aAAK,SAAS,eAAe;AAC7B,eAAO,CAAC,SAAS,MAAM,WAAW;;;AAGpC,cAAQ,KAAK,6BAA6B;AAC1C,cAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,aAAO,CAAC,SAAS,OAAO,WAAW;;;EAIvC;AACE,QAAI,CAAE,gBAAe,KAAK;AACxB,YAAM,IAAI,MAAM,GAAG;;AAErB,QAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAGjE,WAAK;;AAGP,QAAI,eAAe,KAAK;AACtB,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAGvB,WAAO,KAAK,gBAAgB;AAG5B,QAAI,KAAK,gBAAgB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,cAAc;AACnB,WAAK,kBAAkB;;;EAInB;AACN,QAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC/C,YAAM,IAAI,MAAM;;AAElB,WAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE5C,aAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;EAIxB;AAEN,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,UAAI,aAAa;AACf,eAAO,CAAC,MAAM,aAAa;;;AAG/B,UAAM,IAAI,MACN;;EAIN;AACE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAmB,KAAK;AACxB,mBAAe,KAAK,SAAS;AAG7B,eAAW,YAAY;AACvB,SAAK,UAAU;AACf,aAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,QAAI,KAAK;AAGP,WAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;EAIvE;AAEE,eAAmB;AACnB,QAAI,MAAM;AAER,UAAI,OAAO,aAAa;AACtB,cAAM,IAAI,MAAM;;AAElB,WAAK;;AAGL,UAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACxD,cAAM,IAAI,MACN;;AAGN,UAAI,OAAO,OAAO;AAChB,cAAM,IAAI,MACN;;AAGN,aAAO;;AAIT;AACA,WAAO,KAAK,UACR,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AACxD,eAAS;AACT,UAAI,kBAAkB;AACpB,gBAAQ,MAAM;;AAEhB,aAAO;;;EAIP;AACN;AACA;AACE,kBAAY;AACZ;AACA,aAAO;;AAEP;AACA,YAAM;;;EAKF;AACN,WAAO,OAAO;;EAIR;AACN,WAAO,OAAO;;EAYR;AACN,cAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,mBAAe,CAAC;AAChB,iBAAa,QAAiB;MAC5B,GAAG;AACD,sBAAc;AACd,2BAAmB,CAAC,GAAG;AACvB,sBAAc,CAAC;AAEf,eAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAC5B,YAAoC,MAAiB,MACrD;;;AAGR,kBAAwB;AACxB,SAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,MAAM,OAAO;AACxE,WAAO;;EAgBT;AAGE,wBAA0B;AAC1B,0BAA4B;AAI5B,WAAO,KAAK,cACR,aAAa,QAAQ,eAAe,YAAY,OAAO,cACvD;;EAGE;AACN,WAAO,KAAK,IAAI,QAAQ;;EAGlB;AAGN,4BAAwB,KAAK,QAAQ;AAGrC,2BAAuB;AACvB,aAAS,QAAQ;AAGf,0BAAqB,KAAK,UAAU,cAAc,IAAI;;AAQxD,qBACI,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACvE,0BACI,kBAAkB,mBAAmB,mBAAmB;AAC5D,QAAI,gBAAgB;AAClB,YAAM,IAAI,MACN,YAAY,KAAK,6CACb,0CAA0C;;;EAQtD;AAKE;AACA,gBAAsB;AACtB,qBAAiB,KAAK;AACtB,QAAI,cAAc;AAChB,mBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAGrE,8BAA0B,KAAK,MAAM;AACrC,+BAA2B,KAAK,MAAM;AAEtC,QAAI,KAAK;AACP,WAAK,MAAM,kBAAkB,KAAK;;AAGpC;AACA,mBAAe,UAAU,YAAY,KAAK;AAC1C;AACA,QAAI,UAAU;AACZ,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,OAAO,WAAW,CAAC,QAAQ,OAAO,SAAS,KAAK;AACtD,yBAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,2BAAmB,SAAS,IACxB,EAAE,QAAQ,OAAO,WACb,KAAK,qBAAqB,QAAQ,OAAO;AAMjD,YAAI;AACF,8BACI,KAAK,sBAAsB,YAAY,QAAQ;AACnD,cAAI,iBAAiB;AAKnB,gBAAI,iBAAiB;AACnB,8BAAgB;;AAElB,+BAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,4BAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAEtD,kBAAQ,KAAK,2BAA2B;;AAE1C,eAAO;;;AAGT,uBAA+B;AAI7B,YAAI,CAAC;AACH;;AAEF,gBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAGrD,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,qBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,eAAO;;;AAKX;AACA,SAAK,UACD,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC9D,UAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC5C,kBAAU;;AAEV,wBAAgB,KAAK,SAAS,cAC1B,YAAY,QAAQ,MAAM;AAC9B,YAAI,KAAK,IAAI,QAAQ;AACnB,eAAK,SAAS,iBAAiB;;AAEjC,kBAAU,cAAc;;;AAIhC,QAAI;AACF,WAAK,YACD,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAGzD,QAAI,KAAK,MAAM;AACb,WAAK,MAAM,cAAc,QAAQ,KAAK;QACpC,MAAM;QACN,YAAY,KAAK,MAAM,WAAW;QAClC,oBAAoB,KAAK,MAAM;QAC/B,cAAc,KAAK,MAAM,aAAa;QACtC,sBAAsB,KAAK,MAAM;QACjC,aAAa,OAAO,KAAK,QAAQ,IAC7B,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;QACrD,cAAc,QAAQ,IAAI,UAAQ,KAAK;QACvC,cAAc,cAAc;QAC5B,WAAW,cAAc;;;AAG7B,WAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;EAQzC;AACN,kBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,WAAO;;EAaD;AAGN,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,2BAA+B,WAAW,gBAAgB;AAC1D,4BAAiC,WAAW,iBAAiB;AAI7D;AACA,UAAI,WAAW;AACR,eACD,MAAM,QAAQ,SACd,MAAM;AAEV,6BAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAE7D,6BAAqB,aAAa,IAAI,eAAe,OAAO;;AAG9D,kCACI,QAAQ,OAAO,UAAU,cAAc;AAE3C,aAAO,mBAAmB,OAAO;;AAInC,WAAO;;EAQT;AAGE,QAAI,UAAU;AACZ,YAAM,IAAI,MAAM;;AAElB,YAAQ,SAAS;AACjB,eAAU,YAAW,KAAK;AAC1B,sBAAkB;AAClB,QAAI,UAAU,YAAiB,SAAS,OAAO;AAC7C,oBAAe,OAAoB,IAAI,OAAU,aAAa;;AAEhE,mBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AAGf,QAAI,UAAU;AACZ,mBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAiB,qBAAqB;AACtC,WAAK,MAAM,YAAY,WAAW,KAAK;AACvC,WAAK,QAAQ;;AAEf,WAAO;;EAQT;AAGE,YAAQ,SAAS;AACjB,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AACf,WAAO;;EAGT,uCACsC;AAEpC,WAAO,QAAQ,KAAK,iBAAiB;AACrC,QAAI,SAAS,QAAQ,UAAU,aAAa;AAC1C,qBAAe,aAAa,KAAK;;AAEnC,cAAU,IAAI,SAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,YAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE1C,SAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,SAAK,OAAO,GAAG,KAAK;AACpB,WAAO;;EAGT;AACE,qBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,QAAI,aAAa;AACf,WAAK,MAAM;AAIX,kBAAY;AACZ,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACzC,gBAAQ,EAAE,OAAY,gBAAgB,EAAE;;AAE1C,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;QAClC,SAAS,YAAW,KAAK;QACzB,OAAO,EAAE;QACT,OAAO,EAAE;QACT;QACA,UAAU;;AAEZ,WAAK,MAAM,YAAY;;AAGzB,SAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AAEpC,QAAI,CAAE,cAAa;AACjB,WAAK,MAAM;;;EAIf;AACE,QAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC/B;;AAGF,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,iBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,qBAAiB,KAAK;AAEtB,QAAI,YAAY;AAGd,UAAI,EAAE,UAAU;AACd,aAAK,MAAM,YAAY,KAAK;;AAE9B,WAAK,MAAM;AAEX,WAAK,QAAQ,YAAY,EAAE;AAC3B,WAAK,MAAM,WAAW,OAAO,EAAE;;AAE/B,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;EAOxC;AACE,0BAAsB,KAAK,MAAM;AAC/B,gBAAU,KAAK,MAAM,oBAAoB;AACzC,WAAK,gBAAgB;;;EAIzB;AACE,SAAK,cAAc;AACnB,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,aAAO,KAAK,MAAM,oBAAoB,EAAE;;;EAI5C;AACE,iBAAa,KAAK,QAAQ;AAC1B,SAAK,aAAa,KAAK,MAAM;AAC7B,SAAK,iBAAiB,KAAK,MAAM;AACjC,SAAK,WAAW,KAAK,MAAM;AAC3B,QAAI,KAAK,MAAM,mBAAmB;AAChC,WAAK,aAAa;AAClB,UAAI,KAAK,WAAW;AAClB,aAAK,UAAU;;AAEjB,WAAK,QAAQ,KACT;;AAGN,WAAO;;QAGH;AAEJ,SAAK,MAAM,YAAY;AAEvB,uBAAmB,KAAK,MAAM;AAC9B,4BAAwB,KAAK,MAAM;AAEnC,SAAK,MAAM,cAAc,UAAU;AACnC,SAAK,MAAM,cAAc,SAAS,MAAM;AAExC,SAAK,MAAM,YAAY;AAEvB,SAAK,MAAM,cAAc,YAAY,KAAK,IACtC,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AACnD,SAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,SAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,yBAAqB,KAAK,MAAM,cAAc;AAC5C,aAAO,eAAe,MAAM,OAAO;AACnC,aAAO,YAAY,MAAM,OAAO;;AAElC,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;EAG5D;AAGN,qBACI,CAAC,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AAEnE,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,sBAAgB,WAAW;;AAE7B,QAAI,iBAAiB;AACnB,eAAS,WAAW;AAGlB,cAAM,IAAI,IAAI;AACZ,cAAI,MAAM;AACR,2BAAe,QAAQ;AACvB,yBAAkB,oBAAoB,OAAO,MAAM,OAAO;AAC1D,mBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEpD,iBAAO;;AAIT,eAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAG/D,SAAK,MAAM,WAAW,KAAK;;EAG7B;AACE,WAAO,OAAO;AACd,WAAO;;EAGD;AACN,QAAI,KAAK,MAAM,kBAAkB;AAC/B,WAAK,MAAM,aAAa;;AAE1B,SAAK,MAAM;;EAGL;AACN,SAAK,MAAM;;EAOb;AACE,sBAA8B;MAC5B,OAAO;MACP,MAAM;MACN,IAAI,KAAK,MAAM;;AAEjB,QAAI;AACF,gBAAU,OAAO;;AAEnB,SAAK,MAAM,WAAW,KAAK;AAC3B,SAAK,MAAM,cAAc;;EAO3B;AACE,mCAA+B,sBAAsB;AACrD,sCACI,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAG9C,iBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACvD,sBAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,UAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACxD,gBAAO;;;AAIX,qBAAiB,KAAK,MAAM,WAAW;AACvC,SAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAGzD,2BAAuB,QAAQ;AAG7B,UAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC9C,aAAK,MAAM;;;;EAWjB,wCAEuB;AAChB,WACD,GAAG,SAAS,GAAG,MAAM;AACzB,QAAI,MAAM,QAAQ,GAAG,UAAU;AAC7B,YAAM,IAAI,MAAM,0CAA0C,GAAG;;AAG/D,cAAU,KAAK,UACX,MAAM,KAAK,aAAa,MAAM,KAAK,WACnC,MAAM,KAAK,KAAK,WAAW;AAE1B,WACD,aAAa,QACb,MAAM;AAEV,yBAAqB,qBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,QAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAChE,YAAM,IAAI,MACN;;AAKN,WAAO,KAAK,KAAK,YAAY;AAC3B,qCAA6D;AAC7D,6BAAuB,EAAE,MAAO,MAAM,OAAQ,KAAK,EAAE,SAAS;AAG9D,6BACI,wBAAwB,cAExB,QAAK,KAAK,KAAK,KAEf;AACJ,oBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AAEnD,UAAI,KAAK,MAAM,kBAAkB;AAG/B,aAAK,MAAM,WAAW,QAAQ;AAC5B,gCAAqB,KAAK;AACxB,oBAAO;;;AAGX,aAAK,MAAM,aAAa;;AAE1B,aAAO,CAAC,OAAO,GAAG;;;EAItB;AAEO,WACI,WAAW,IAChB,MAAM;AACV,WAAO;AACA,aACD,OAAO,MAAM,OAAK,aAAa,SAC/B,MAAM;AAGV;AAIA,uBAAiC;AACjC,aAAO,QAAQ;AACb,iBAAS,KAAK;;AAEhB,aAAO,KAAK,cACR;AACE,cAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AAClB,eACD,IAAI,iBAAiB,QACrB,MAAM;AAEL,eACI,WAAW,IAAI,WACpB,MAAM;AAEV,eAAO,IAAI;SAEb,UACA;AACE,wBAAgB,IAAI,SAAS,IAAI;AACjC,sBACI,MAAM,QAAQ,WAAW,UAAU,CAAC;AACnC,eACD,MAAM,WAAW,OAAO,QACxB,MAAM;AAGL,eACD,MAAM,MAAM,OAAK,aAAa,SAC9B,MAAM;AAGV,wBAA+C;AAC/C,cAAM,QAAQ;AACZ,kBAAQ,KAAK,MAAM;;AAErB,eAAO;;;;EAKjB;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,SAAS;;EAE/B;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,KAAK;;QAGrB;AACJ,kBAAc;AACd,uBAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,eAAW,SAAS,QAAQ;AAC5B,WAAO;;EASD;AACN,QAAI,KAAK,MAAM,eAAe;AAC5B,aAAO,UAAU,KAAK,MAAM,YAAY;AACxC,WAAK,MAAM,YAAY,MAAM,KAAK;;AAGpC,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAOpB;AAEE,SAAK;AAEL,SAAK,MAAM;AACX,SAAK,IAAI;AACT,SAAK,QAAQ,IAAI;AAEjB,8BAA0B,KAAK;AAC7B,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAEvB,SAAK,cAAc;AACnB,SAAK,kBAAkB;AACvB,SAAK,qBAAqB;;;AA/sBb,OAAA,eAAe;AAKf,OAAA,iBAAiB;AA8sBlC;AACE,iBAAe,mBAAmB,cAAc,QAAQ;AACxD,SAAO,OAAO,WAAW,QAAQ,OAAO;;AAGpC;AACJ,aAAW;AACX,MAAI,GAAG,aAAa;AAClB,yBAAoB,IAAI,YAAY;AACpC,OAAG,YAAY,IAAI,OAAO;;AAE5B,uBAAqB,GAAG,UAAU;AAIlC,mBAAiB,MAAM,GAAG;AAC1B,SAAO,GAAG;;AAGL,eAAe;AAQhB;AAEJ,iBAAe,CAAC,GAAG;AACnB,SAAO,OAAO,cAAc;AAC1B,gBAAY,SAAQ,IAAI,GAAG;AAC3B,SAAK,CAAC,GAAG;AACT,WAAO;KACN,QAAgC,MAAqB;;ACzrC1D;;;;;;;;;;;;;;;;AAuBM;AACJ,kBAA4B;AAE5B,MAAI,aAAa;AACf,WAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAExC,MAAI,CAAC,MAAM,QAAQ;AACjB,WAAO;;AAET,gBAAwB;AAExB,SAAO,MAAM,QAAQ,cACd,aAAa,cAAc,UAAU;AAC1C,UAAM,KAAK,UAAU;AACrB,gBAAY,UAAU;;AAExB,MAAI,MAAM,QAAQ,QACd,MAAM,QAAQ;AAChB,+BAA2B,KAAK,OAAO;;AAGzC,SAAO;;AAGT;AAEE,YAAU,WAAW;AACrB,MAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,aAAa;AACzC,WACI,MAAM,WAAW,GACjB,MAAM,eAAe,QAAQ,KAAK,+DACU,MAAM;AACtD;;AAEF,SACI,MAAM,SAAS,GACf,MAAM,eAAe,QAAQ,KAAK,oDACR,IAAI;AAClC,SACI,IAAI,WAAW,MAAM,IACrB,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrC,IAAI;AACjC,mBAAiB,MAAM,MAAM;AAC7B,eAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,+BAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAIhE;AAGE,MAAI,iBAAiB;AACnB;;AAEF,MAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AACjD,UAAM,IAAI,MACN,aAAa,uBAAuB,yBAC9B,iCAAiC;;;AAIzC,kEAEiC;AACrC,MAAI,aAAa;AACf,gBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,WAAO;;AAET,sBAAoB,WAAW;AAG/B,MAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACxD,oBAAgB;;AAElB,cAAY,cAAc,eAAe,SAAS;AAElD,MAAK,KAAK,QACL,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACtD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC1C,iBAAa,KAAK,OAAO,SAAU,EAAS,YAAY;AACxD,UAAM,IAAI,MACN,aAAa,uBAAuB,0DACF;;AAExC,wBAAsB,WAAW,GAAG;AACpC,MAAI,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ;AACrC,QAAI,CAAC;;AAEP,yBAAuB;AACvB,iBAAe,kBAAkB,WAC7B,aAAa,GAAG,iBAChB,QAAQ,GAAe,IAAI;AAC/B,SAAO,OAAO,WAAW,QAAQ,eAAe;;ACrHlD;;;;;;;;;;;;;;;;AAmBO,wBAAwB;AAOzB;AACJ,eAAa,OAAO,KAAK;AACzB,MAAI,KAAK,WAAW;AAClB,UAAM,IAAI,MACN,yGAEG,KAAK;;AAGd,eAAa,KAAK;AAClB,aAAW,EAAE;AAGb,MAAI,OAAO,SAAS;AAClB,aAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAI/C,WAAS,SAAS;AAGlB,aAAW;AACT,WAAO,WAAW;AAClB;AACE,qBAAe,GAAG,GAAG;AACrB,UAAI,UAAU;AACZ,gBAAQ,MAAM;;AAEhB,aAAO,SAAS;AAChB,aAAO;;AAEP,aAAO,SAAS;AAChB,YAAM;;;AAGV,SAAO,eAAe,IAAI,QAAQ,CAAC,OAAO,QAAQ,cAAc;AAGhE,SAAO;;AChET;;;;;;;;;;;;;;;;AA8CA;AACE,gBAAc,gBAAgB,MAAM,QAAQ;AAC5C,gBAAc,gBAAgB,MAAM,QAAQ;AACvC,oBACD,MAAM,OAAO,MAAM,OACnB,yBAAyB,MAAM,aAAa,MAAM;AAGtD,kBAAqC;AACnC,WAAO,SAAQ,QAAQ,OAAO;;AAEhC,iBAA8B,CAAC,MAAM,OAAO,MAAM;AAClD,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAGN,gBAAgB,GAAG,CAAC;AC/D3B;;;;;;;;;;;;;;;;AAwBM;AAGJ,MAAI,SAAS;AACX,YAAQ,WAAW;;AAErB,MAAI,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AACpB,UAAM,IAAI,MACN;;AAGN,MAAI,SAAS;AACX,uCAAmC;AAEnC,yBAAqB,cAAc;AACnC,yBAAqB,cAAc;AACnC,WACI,iBAAiB,cACjB,MACI,iCAAiC,kCAC9B,+BAA+B;AAE1C,iBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AAC1C,uBAAiB,cAAc;AAC/B,gCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,cAAc,MAAM,MAAM,MACvC;AACJ,aACI,cAAc,OAAO,MAAM,MAAM,CAAC,mBAClC,MAAM,gDACE,qDACM;;;AAItB,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ;AAC1C,aAAS,CAAC;;AAGZ,UAAQ,SAAS;AACjB,WAAS,UAAU,WACf,aAAa,QAAQ,SACrB,QAAQ,QAAoB,IAAI;AACpC,SAAO,OAAO,WAAW,QAAsB,OAAO;;AC1ExD;;;;;;;;;;;;;;;;AA+CM,+BAC6B;AAEjC,UAAQ,SAAS;AACZ,qCAAmC;AACxC,SAAO,IAAI,aAAmB,OAAO,OAAO;;ACpD9C;;;;;;;;;;;;;;;;AAuCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAGnC,MAAI,CAAM,aAAa;AACrB,UAAM,IAAI,MAAM,mCAAmC;;AAErD,MAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACrC,UAAM,IAAI,MAAM;;AAGlB,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAGtB,aAAa,GAAG,CAAC;AC3DxB;;;;;;;;;;;;;;;;AAuDA;AAEE,aAAW,gBAAgB,GAAG,KAAK,WAAW;AAE9C,iBAA8B,CAAC,GAAG;AAClC,gBAA4B,CAAC;AAC7B,kBACiB;AACf,YAAa,uBAAuB,OAAO,GAAG;AACzC,WACD,GAAG,SAAc,cAAc,QAC/B,MAAM;AACV,SAAK,CAAC;AACN,WAAO,SAAQ,QAAQ,IAAI;;AAE7B,SAAO,OAAO,cACV,SAAS,QAAgC,MAAiB,SAC1D;;AAEC,gBAAgB,GAAG,CAAC;AC1E3B;;;;;;;;;;;;;;;;AA+CA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,MAAI,QAAQ;AACV,WAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAE9B,SACD,GAAG,SAAS,KAAK,QACjB,MAAM,qCAAqC,GAAG,kCACb;AACrC,OAAK,QAAQ;AACN,WACD,QAAQ,KAAK,OAAO,GAAG,MACvB,MAAM,+CAA+C,GAAG,OAAO,aAC/C;;AAGtB,MAAI,GAAG,QAAQ;AACb,WAAO,GAAG;;AAGZ,iBAAgC,CAAC,GAAG;AACpC,gBAA8B,CAAC;AAE/B,SAAO,OAAO,cACV,cAAW,SAAQ,UAAU,IAAI,OAAO,QACxC,MAAqB,WAAW;;AAG/B,kBAAkB,GAAG,CAAC;AC3D7B,+BAAA;;;;ACUM;AAEJ,MAAI,QAAO,OAAO;AAChB,UAAM,IAAI,MACN,4EACqB,QAAO;;AAElC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,8EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MACN,yEACsB,QAAQ;;AAEpC,MAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AAC3C,UAAM,IAAI,MACN,iEACG,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAGvD,MAAI,QAAO,SAAS;AAClB,UAAM,IAAI,MACN,mEACiB,QAAO;;AAG9B,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAIrD,gBAAc;AACd,eAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC7C,eAAW,aAAa;;AAG1B,qBAAmB,QAAO;AAE1B,sBAAoB,aAAa;AACjC,cAAY;AAEZ,kBAAgB;AAChB,eAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACzC,iBAAa,WAAW;AACxB,gBAAY,KAAK,WAAW;;AAG9B,kBACI;IAAC,GAAG,eAAe,QAAO,OAAO,IAAI,YAAU,SAAS;IACvD;IAAG,MAAM,GAAG;AAEjB,SAAO,CAAC,aAAa,SAAS,WAAW;;AC/D3C,gCAAA;;;;;;ACQM;AAEJ,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AAEzD,qBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAE9C,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEjD,MAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC5C,UAAM,IAAI,MACN,aACA,0BAA0B,WAAY,SAAQ,OAAO;;AAE3D,MAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC7C,UAAM,IAAI,MACN,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAEhE,eAAa,GAAG,IAAI,UAAU,EAAE;AAC9B,QAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACrC,YAAM,IAAI,MACN,aACA,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAC3D,QAAQ,MAAM;;;AAG1B,eAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC7C,QAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC5C,YAAM,IAAI,MACN,aACA,kBAAkB,IAAI,cAClB,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAC7C,MAAM,IAAI;;;;AAmBlB;AAEJ,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MAAM,0DACZ,QAAQ;;AAEd,MAAI,MAAM,SAAS;AACjB,UAAM,IAAI,MACN,6DAA6D;;AAGnE,MAAI,MAAM,WAAW;AACnB,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;AAEd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;;AAIhB,sBAAoB,OAAO,SAAS;;AAYhC;AAIJ,sBAAoB,QAAQ,MAAM;AAClC,oBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAKvE,kBAAgB,MAAM;AAEtB,kBAAgB;AAChB,eAAa,WAAW,IAAI,SAAS,EAAE;AACrC,iBAAa,MAAM;;AAGrB,uBAAsB,YAAY,IAAK,IAAI;AAC3C,qBAAmB,cAAc,QAAQ,SAAS;AAElD,kBAAgB,CAAC,GAAG,eAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,qBAAmB,cAAc;AACjC,SAAO,CAAC,WAAW,YAAY,WAAW,SAAS;;ACnJrD,2BAAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBM;AAEJ,oBAAkB,MAAM,MAAM;AACzB,SACD,cAAc,MAAM,QACpB,MAAM,iBAAiB,+BAA+B,2CAClB;AACnC,SACD,cAAc,KAAK,QACnB,MAAM,iBAAiB,8BAA8B,0CACjB;AAExC,eAAa,GAAG,IAAI,WAAW,EAAE;AAC1B,WACD,MAAM,KAAK,KAAK,MAAM,MAAM,MAAM,IAClC,MAAM,iBAAiB,qBAAqB,aAAa,OACjD,MAAM,KAAK,KAAK,kCAAkC,OAChD,MAAM,MAAM;;;AAKxB;AACJ,eAAa;AACb,aAAW;AACX,SAAO,OAAO;AACZ,QAAI,OAAO;AACT,WAAK,KAAK;;AAEZ,YAAQ;AACR;;AAEF,SAAO;;AAIH;AAEJ,eAAa;AACb,kBAAgB,GAAG,OAAO,MAAM,QAAQ;AACtC,SAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE7D,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,eAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACrD,eAAW,KAAK;;AAElB,eAAa,GAAG,IAAI,eAAe;AACjC,QAAI,MAAM;AACR,iBAAW,0BAA0B;;AAErC,iBAAW,OACP,wBAAwB,GACxB;AACJ,iBAAW;;;AAGf,SAAO;;AAGT;AAGE,MAAI,kBAAkB;AACpB,WAAO;;AAGT,SAAO,iBAAkB,iBAAgB;;AAG3C;AACE,qBAAmB;AACnB,eAAa,GAAG,IAAI,eAAe;AACjC,eAAW,KAAK,yBAAyB;;AAE3C,SAAO;;AAIH;AAKJ,oBAAkB,WAAW;AAC7B,wBAAsB,IAAI,MAAM,4BACZ,IAAI,MAAM,gCACN,IAAI,MAAM;AAClC,MAAI,aAAa,UAAU,sBAAsB;AAC/C,sBAAkB,aAAa;AAI/B,0BAAsB,sBAAsB;AAC5C,sBAAkB,2BACd,WAAW,WAAW,eAAe,OAAO;AAChD,oBAAgB,0BACZ,SAAS,WAAW,eAAe,KAAK;AAC5C,wBACI,sBAAsB,SAAS,WAAW,eAAe;;AAE7D,oBAAgB,GAAG,OAAO,WAAW;AACnC,sBAAgB,QAAQ,aACpB,WAAW,OAAO,SAAS,YAAY,MAAM;AACjD,oBAAc,QACV,YAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,wBAAkB,QAAQ,eAAe,SAAS,MAAM;;;AAI5D,SAAO;IACL,OAAO;IACP,KAAK;IACL,SAAS;;;AAMP;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ;;AAEnB,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,cAAc;AAClC,UAAI,YAAY,KAAK;AACnB,wBAAgB;;AAGlB,iBAAW,QAAQ;;;AAGvB,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ,OAAO;;AAE1B,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,YAAY;AAChC,UAAI,UAAU,KAAK;AACjB,wBAAgB,OAAO;;AAEzB,iBAAW,QAAQ;;;AAIvB,eAAa,GAAG,IAAI,WAAW,QAAQ;AAErC,qBAAiB,WAAW;AAC5B,QAAI,WAAW,KAAK;AAClB,iBAAW,MAAM;;AAEnB,eAAW,KAAU,MAAM,GAAG,WAAW,IAAI,WAAW;;AAE1D,SAAO;;AAGH;AAEJ,eAAa,QAAQ;AACrB,MAAI,eAAgB,KAAK,QAAS,UAAU;AAC1C,aAAS;;AAGX,SAAO;;AAGH;AAIJ,cAAY,aAAa;AACzB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAChE,QAAI,SAAS;AAIX,cAAQ,OAAO;;AAGf,cAAQ,OAAO;;;AAKnB,mBAAiB,WAAW;AAC5B,MAAI,QAAQ;AACV,aAAS;;AAIX,UAAa,MAAM,GAAG,OAAO,WAAW;AAExC,SAAO;;AAGH;AAIJ,aAAW,YAAY;AACvB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AACjE,QAAI,SAAS;AAGX,aAAO,OAAO;;AAGd,aAAO,OAAO;;;AAKlB,mBAAiB,WAAW;AAC5B,MAAI,OAAO;AACT,YAAQ;;AAMV,MAAI,SAAS;AAEX,WAAY,MAAM,GAAG,MAAM;;AAG3B,WAAY,MAAM,IAAI,MAAM,WAAW;;AAGzC,SAAO;;AAOH;AAGJ,wBAAsB,KAAK;AAC3B,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,KAAK;AACZ,wBAAkB;AAClB;;;AAIJ,eAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AACjD,QAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AACpC,aAAO;;;AAGX,SAAO;;AAGH;AACJ,mBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,kBAAc,MAAM,KAAK,QAAQ;;AAEnC,SAAO;;AAGH;AAGJ;AACA,gBAAc,EAAE,MAAM;AACtB,MAAI,OAAO,UAAU;AACnB,aAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACrC,MAAM,SAAS;AACxB,aAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAE3D,aAAS,MAAM;;AAEjB,SAAO,QAAQ;AACR,WACD,MAAM,IAAI,MAAM;;AAEtB;AACA,MAAI,QAAQ;AACV,YAAQ,IAAI,MAAM,OAAO,KAAK;aACrB,OAAO,SAAS;AACzB,YAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACnC,KAAK,SAAS;AACvB,YAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAExD,YAAQ;;AAEV,UAAQ,MAAM,IAAI;AAChB,QAAI,KAAK;AACP,aAAO;;AAEF,aACD,MAAM,IACN,MAAM,qDACC,mCAAmC;AAC9C,aAAO,EAAE,MAAM,KAAK,OAAO;;;AAG/B,SAAO,CAAC,QAAQ;;ACjWlB;;;;;;;;;;;;;;;;AC6DM;AACJ,MAAI,MAAM,QAAQ;AAChB,YAAQ,KACJ,MAAM;;;AAId,wBAAwB;ACgBlB;AACJ,SAAO;;AC8QH,mDAES;AACb,SAAO,OAAO,gBAAgB,MAAM,SAAS;;ACtW/C;;;;;;;;;;;;;;;;AAoDA;AACE,WAAS,gBAAgB,GAAG,KAAK;AACjC,WAAS,gBAAgB,GAAG,KAAK;AACjC,GAAC,IAAI,MAAM,eAAe,IAAI;AAE9B,kBAAqC;AACnC,gBAAY,SAAQ,SAAS,IAAI;AACjC,SAAK,CAAC,IAAI;AACV,WAAO;;AAET,iBAA+B,CAAC,GAAG,IAAI,GAAG;AAE1C,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAEN,YAAY,GAAG,CAAC;ACpEvB;;;;;;;;;;;;;;;;AAuBM;AACJ,eAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,QAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AAC3C,aAAO;;;AAGX,SAAO;;AAGH;AAEJ,eAAa,UAAU,SAAS,UAAU;AAC1C,cAAY;AACZ,eAAa;AACb,kBAAgB;AACd,iBAAe,GAAG,MAAM,MAAM;AAC9B,QAAI,KAAK,QAAQ,SAAS;AACxB,UAAI,KAAK,UAAU;;AAEnB,UAAI,KAAK,UAAU;;;AAGvB,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,KAAK,QAAQ,SAAS;AACxB,eAAS,KAAK,OAAO;;;AAGzB,sBAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,SAAO,CAAC,UAAU;;AAGd;AAEJ,yBAAuB,KAAK,IAAI,OAAK;AACrC,SAAO,iBAAiB,OAAO,gBAAgB;;AAG3C;AAEC,SACD,qBAAqB,MAAM,OAC3B,MAAM,GAAG,uDACO,iBAAiB;;AAQjC;AAEJ,MAAI,qBAAqB,MAAM;AAC7B,WAAO;;AAET,iBAAyB;AACzB,eAAa,GAAG,IAAI,MAAM,EAAE;AAC1B,QAAI,KAAK,QAAQ,OAAO;AACtB,aAAO,KAAK;;;AAGhB,OAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,SAAO;;AAIH;AACJ,SAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAGZ;AACJ,cAAsB;AACtB,eAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACvC,QAAI,KAAK;;AAEX,SAAO;;AC3GT;;;;;;;;;;;;;;;;AAmGM,oFAG+C;AAMnD,wBAAsB,WAAW;AACjC,uBACI,CAAC,GAAG,aAAa;AACrB,sBAAoB,wBAAwB;AAE5C,SAAO,kBACH,YAAY,cAAc,SAAS,WAAW,MAC9C,MAAyB,MAAsB;;AAG/C,qGAK2C;AAC/C,sCAAoC,gBAAgB;AAEpD;AACA,MAAI,eAAe;AACjB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACrD,eAAe;AACxB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAE9D,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAC7D;;AAMA,qGAM4B;AAChC,mDAAiD,iBAAiB;AAElE;AACA;AACA,MAAI,eAAe;AACjB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACxD,eAAe;AACxB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAEjE,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aACtD;;AAOA,qGAKiD,oBACN;AAC/C,mDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,MAAI,eAAe;AACjB,KAAC,WAAW,UAAU,SAAS,cAAc;aACpC,eAAe;AACxB,KAAC,WAAW,YAAY,UAAU,WAAW;;AAE7C,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,wDAAsD;AACtD,sCAAoC,gBAAgB;AACpD,0CAAwC,gBAAgB;AAExD,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,WAAW,YAAY,iBACnC,MAAK,UAAU,SAAS,cAAc,aAAa,uBACnD,sBAAsB,cAAc;AAExC,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,WAAW;aACtC,eAAe;AACxB,eAAW,CAAC,WAAW,WAAW,UAAU;;AAG9C,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AA0CE,uFAKU,oBACiC;AAE/C,4DACI,CAAC,IAAI,IAAI,IAAI,IAAI;AACrB,MAAI,eAAe;AACjB,KAAC,WAAW,SAAS,UAAU,SAAS,cAAc;aAC7C,eAAe;AACxB,KAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAEtD,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,qEACI;AACJ,mDAAiD,iBAAiB;AAClE,yDACI,iBAAiB;AAErB,+BACI,uBAAuB,aAAa;AACxC,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,UAAU,WAAW,YAAY,mBAC7C,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAC5D,sBAAsB,uBAAuB,sBAC7C;AAEJ,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,UAAU,WAAW;aAChD,eAAe;AACxB,eAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAGxD,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,YAAY;;AAGtB;AAIE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,qBAAmB,QAAQ;AAC3B,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,uBAAqB,iBAChB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AACpD,SACI,MAAM,eACX,MAAM,2BAA2B;AAGrC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACnD,SACI,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,cAAc,YAAY,YAAY;;AAG1C,qEAE4C;AAChD,6BAA2B,uBAAuB,WAAW;AAC7D,SAAO,KAAK,MACP,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAGrE;AACE,MAAI,OAAO,UAAU;AACnB,WAAO,CAAC,OAAO,OAAO;;AAExB,MAAI,MAAM,WAAW;AACnB,WAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAE9B,SAAO;;AAGT;AAEE,SAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAc7D;AACE,MAAI,YAAY;AACd,WAAO;;AAGT,SAAO,aAAc,cAAa,KAAM,YAAW;;AAGrD;AAOE;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU,CAAC,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAC/D,qBAAiB,qBACb,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC1D,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,2BACI,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AAChE,0BACI,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC7D,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAC9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;aAClC,SAAQ;AACjB,cAAU,CAAC,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACvD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;aAC1C,OAAO,SAAQ;AACxB,gBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,mBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,iBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,kBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,oBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC3C,gBAAY,iBACP,YAAW,eAAe,MAAM,UAAU,eAAe,GAC1D;AACJ,eAAW,iBACN,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAE9D,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,WAAW;;AAG9B;AAUE;AACA;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,qBAAiB,qBACb,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAC9D;AACJ,eAAW,SAAS;AACpB,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,eAAW,KAAK,KAAK,UAAU;AAC/B,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,2BACK,aAAY,KAAK,eAAe,eAAe;AACpD,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,kBAAc,KAAK,MAAM,gBAAgB;AACzC,iBAAa,gBAAgB;AAC7B,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAE9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;aAC/C,SAAQ;AACjB,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAEnD,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,UAAU,WAAW;;AAQxC;AAEE,MAAI,CAAC;AACH,WAAO;;AAET,UAAQ;SACD;AAEH,aAAO,KAAK,MAAM;SACf;AAEH,aAAO,KAAK,KAAK;SACd;AACH,aAAO,KAAK,MAAM;;AAElB,YAAM,IAAI,MAAM,wBAAwB;;;AAIxC;AACJ,6BAA2B,gBAAgB;AAC3C,SAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAGxC;AAEJ,SAAO,kBAAkB,YAAY,kBAAkB;;AAUnD;AAEJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO;;AAEP,UAAM,IAAI,MAAM,sBAAsB;;;AC5mB1C;;;;;;;;;;;;;;;;AAmBM;AACJ,eAAa,OAAO,GAAG;AACvB,SAAO,QAAQ;AACR,WACD,MAAM,WAAW,MACjB,MACI,kBAAkB,0BAA0B,gDAChB;;AAGjC,SACD,QAAQ,KAAK,OAAO,MACpB,MAAM,kBAAkB,qCAAqC,OAAO;AAExE,qBAAmB,OAAO;AAC1B,SAAO,QAAQ;AACb,iBAAa,GAAG,IAAI,MAAM;AACnB,aACA,MAAM,QAAU,MAAM,OAAO,WAAW,IACzC,MAAM,kBAAkB,2BAA2B,OAAO,gDACb,+CACN;;;;AAK3C;AACJ,sBAAoB,OAAO,GAAG;AAC9B,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,gBAAY,SAAS,OAAO,GAAG;;AAEjC,SAAO;;AClDT;;;;;;;;;;;;;;;;AA0BM;AAEJ,iBAAe,QAAQ;AACvB,eAAuB;AACvB,eAAa,GAAG,IAAI,QAAQ;AAC1B,gBAAY,SAAS,IAAI;AACzB,cAAU,QAAQ,QAAQ;AAC1B,cAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,QAAI,IAAI,KAAK,MAAM;AACjB,WAAK,QAAQ;;;AAGjB,SAAO;;AAOH;AAEJ,iBAAyB;AACzB,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,kBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,oBAAgB,SAAS,SAAS,IAAI;AACtC,mBAAe,SAAS;AACxB,QAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC5C,aAAO,QAAQ;;;AAGnB,SAAO;;AAGH;AAEJ,iBAAyB;AACzB,YAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AAEzC,eAAa,GAAG,IAAI,GAAG;AACrB,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,QAAI,MAAM;AACR,aAAO,QAAQ;eACN,MAAM;AACf,aAAO,QAAQ;eACN,MAAM;AACf,qBAAe,wDACR,cAAc;AACrB,YAAM,MAAM;;AAEZ,aAAO,QAAQ;;;AAGnB,SAAO;;ACrFT;;;;;;;;;;;;;;;;AAsCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,cAAU,SAAQ,IAAI;AACtB,SAAK,CAAC;AACN,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAE9B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,YAAY,GAAG,CAAC;ACtDvB,6BAAA;;;;;;ACAA;;;;;;;;;;;;;;;;AAuBO,8BAA8B;AAS/B;AACJ,MAAI,UAAU;AACZ,WAAO;;AAET,SAAO,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;ACpCrD;;;;;;;;;;;;;;;;AA6BM;AAEJ,aAAW;AACX;AAEA,MAAI,UAAU;AACZ,UAAM;AACN,WAAO;;AAEP,UAAM,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAGpD,SAAO,CAAC;AACN,QAAI,MAAM,eAAe,QAAQ;AAC/B,aAAO;;AAEP,YAAM,eAAe,QAAQ,MAAM;;;AAGvC,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,QAAQ;AACV,eAAS,KAAK,OAAO;;AAErB,eAAS,KAAK;;;AAGlB,SAAO;;AASH;AAEJ,kBAAgB,EAAE,MAAM;AAExB,sBAA8B;AAC9B,kBAAgB;AAChB,kBAAgB;AAChB,eAAa,GAAG,IAAI,MAAM;AACxB,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,eAAa,GAAG,IAAI,QAAQ,MAAM;AAChC,gBAAY,KAAK,QAAQ,MAAM;;AAGjC,eAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AACjC,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,SAAO,CAAC,WAAW,WAAW,SAAS;;AC5FzC;;;;;;;;;;;;;;;;AAqCM;AAEJ,MAAM,cAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,UAAU,YAAY,aAAa,UACnC,CAAE,kBAAiB;AACrB,UAAM,IAAI,MACN;;AAGN,gBAAwB;AACxB,wBAAgC;AAChC,SAAO,WAAW,OAAO,OAAO,eAAe;;ACrDjD;;;;;;;;;;;;;;;;AA6DA,wBACkD,iBAAiB;AACjE,WAAS,gBAAgB,GAAG,KAAK;AACjC,MAAI,GAAG,UAAU;AACf,SAAK,KAAK,IAAI;;AAGhB,kBAAqC;AACnC,SAAK,CAAC;AACN,iBAAa,eAAe,MAAM,GAAG;AAErC,wBAAoB,mBAAmB,MAAM,GAAG;AAChD,wBAAoB;AACpB,oBAAgB;AAChB,QAAI,eAAe;AACjB,kBAAY,UAAU,IAAI;AAC1B,sBAAgB,iBAAiB,cAAc,QAAQ,GAAG;;AAE5D,gBAAY,SAAQ,IAAI,WAAW;AACnC,QAAI;AACF,uBAAiB,qBAAqB,MAAM,OAAO;AACnD,cAAQ,QAAQ,OAAO;;AAEzB,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAC9B,gBAAwB,CAAC,MAAM;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB,KAC1D;;AAGN,aAAY,GAAG,CAAC;AC/FvB;;;;;;;;;;;;;;;;AAqCM,8BACoC;AACxC,MAAI,UAAU;AACZ,iBAAa,MAAM,OAAO;AAC1B,iBAAa,MAAM,OAAO;AAC1B,WAAO,QAAQ,MAAM;;AAEvB,iBAAe,oBAAoB,cAAc,QAAQ;AACzD,SAAO,OAAO,WAAW,QAAQ,OAAO;;AC7C1C;;;;;;;;;;;;;;;;AA0CA;AACE,aAAW,gBAAgB,GAAG,KAAK;AACnC,iBAAe,gBAAgB,OAAO,SAAS;AAE/C,kBAAqC;AACnC,gBAAY,SAAQ,MAAM,IAAI;AAC9B,SAAK,CAAC,IAAI;AACV,WAAO;;AAGT,iBAA4B,CAAC,GAAG,IAAI,OAAO;AAC3C,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;AC1DzB;;;;;;;;;;;;;;;;AAwCM;AACJ,gBAAc;AACd,wBAAsB,WAAW,QAAQ;AACzC,MAAI,cAAc,WAAW;AAC3B,UAAM,IAAI,MAAM;;AAElB,gBAAwB;AACxB,SAAO,WAAW,QAAQ,OAAO,eAAe;;AC/ClD;;;;;;;;;;;;;;;;AAwCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,KAAK;;AAGtB,iBAA2B,CAAC,GAAG;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,aAAa,GAAG,CAAC;AC5DxB;;;;;;;;;;;;;;;;AAwCA;AACE,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,MAAM;;AAGvB,iBAA4B,CAAC,GAAG;AAEhC,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;ACnCnB,qDAEK;AACT,mBAAiB;AACjB,MAAI,OAAQ,oBAAqB;AAC/B,WACI,EAAE,MAAM,QAAQ,oBAAoB,GACpC,MAAM;AACV,iBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAEpD,sBAAkB,gBAAgB,OAAO;AACvC,UAAI,UAAU;AACZ,iBAAS;;AAEX,aAAO;OACN;AACH,WACI,aAAa,GACb,MAAM;AACV,qBAAiB,gBAAgB,QAAQ;AAGzC,QAAI,aAAa;AACf,oBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,sBAAgB,YAAY,EAAE,MAAM,QAAQ;;AAE9C,WACI,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IACvD,MAAM;AACV,iBAAa;;AAGf,SAAO;;AC1DT;;;;;;;;;;;;;;;;AAwCA,0BAA0D;AACxD,aAAW,gBAAgB,GAAG,KAAK;AAEnC,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAEtB,aAAa,GAAG,CAAC;AClDxB;;;;;;;;;;;;;;;;AA+BM;AAEJ,MAAI,cAAc,QAAQ,eAAe;AACvC,WAAO;;AAET,MAAI,eAAe;AACjB,WAAO,IAAI,IAAI,KAAK;;AAEtB,QAAM,IAAI,MACN,gDAAgD;;AAIhD;AAEJ,YAAU;AACV,qBACmB,iBAAiB,KAAK,OAAO,aAAa;AAC7D,MAAI,WAAW,SAAS;AACtB,UAAM,KAAI,KAAK;;AAEjB,SAAO,QAAQ,KAAK,KAAK;;AAGrB;AAGJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO,KAAK;aACH,eAAe;AACxB,WAAO,IAAI;aACF,eAAe;AACxB,WAAO,MAAM;aACJ,eAAe;AACxB,WAAO,MAAM,GAAG;;AAElB,QAAM,IAAI,MAAM,4BAA4B;;AAIvC,mBAAmB;AACxB,uBAAqB,gBAAgB;AACrC,SAAO,CAAC,gBAAgB,eAAe;;AC3EzC,6BAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAkBM;AAGJ,kBACI,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC/D,kBACI,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAChE,SAAO,CAAC,SAAS;;ACzBnB;;;;;;;;;;;;;;;;AAyBM,kEAEa;AACjB,iBAAyB;AACzB,MAAI;AACF,eAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,aAAS,KAAK,WAAW,KAAK;AAC9B,eAAW,SAAS,OAAO,WAAW,MAAM;;AAE5C,eAAW,SAAS,OAAO,WAAW;AACtC,0BAAsB,WAAW;AACjC,iBAAa,GAAG,IAAI,eAAe,EAAE;AACnC,iBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAErE,eAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAE9D,SAAO;;AAYH,kEAEa;AACjB,mBAAiB;AACjB,MAAI;AACF,aAAS,KAAK;AACd,iBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACnD,UAAI,KAAK,IAAI;AACX,iBAAS,KAAK;AACd,iBAAS,KAAK,IAAK,kBAAiB;;AAEpC,iBAAS,KAAK;;;;AAIlB,gCAA4B;AAC5B,+BAA2B;AAC3B,iBAAa,GAAG,IAAI,cAAc,EAAE;AAClC,UAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AAC3C,2BAAmB,KAAK;;AAExB,4BAAoB,KAAK;;;AAG7B,aAAS,KAAK,GAAG;AACjB,aAAS,KAAK;AACd,aAAS,KAAK,GAAG;;AAEnB,SAAO;;AAYH,0EAEa;AACjB,2BAAyB;AAEzB,MAAI;AACF,qBAAiB,KAAK,WAAW,KAAK;;AAEtC,qBAAiB,KAAK,WAAW,KAAK;;AAGxC,eAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACvC,QAAI,KAAK,WAAW;AAClB,UAAI;AACF,yBAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAErD,yBAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAGvD,uBAAiB,KAAK,WAAW;;;AAIrC,SAAO;;AAOH;AAEJ,2BAAyB,CAAC;AAC1B,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,qBAAiB,KAAK,MAAM,GAAG;;AAEjC,SAAO;;AAcH;AAEJ,oBAAkB,eAAe,MAAM,GAAG;AAC1C,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,cAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAGhE,SAAO;;ACvJT;;;;;;;;;;;;;;;;AAiBO,wBAAwB;AACxB,mBAAmB;AClB1B;;;;;;;;;;;;;;;;AAiBO,cAAc;AACd,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;ACtBtB;;;;;;;;;;;;;;;;AAmBM;AACJ,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,KAAK,GAAG;;;AAId;AACJ,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,IAAI,GAAG;;;AC3BnB;;;;;;;;;;;;;;;;AAgCM;AAEJ,MAAI,KAAK,WAAW,KAAK;AACvB,UAAM,IAAI,MACN,gEACG,KAAK,iBAAiB,KAAK;;AAEpC,iBAAe,IAAI,aAAa,KAAK,SAAS;AAC9C,eAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,WAAO,KAAK,KAAK,IAAI;AACrB,WAAO,IAAI,KAAK,KAAK,IAAI;;AAE3B,SAAO;;AAiBH;AAEJ,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,IAAI,KAAK,SAAQ;AACtB,SAAK,IAAI,KAAK,SAAQ,IAAI;;AAE5B,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAQV;AAEJ,eAAa,SAAQ,QAAQ;AAC7B,eAAa,SAAQ,QAAQ,IAAI;AACjC,SAAO,CAAC,MAAM;;AASV;AAEJ,QAAK,QAAQ,KAAK;AAClB,QAAK,QAAQ,IAAI,KAAK;;AAMlB;AAEJ,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AACpC,cAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,SAAK,KAAK,KAAK,IAAI;AACnB,SAAK,KAAK,KAAK,IAAI;;AAErB,SAAO,CAAC,MAAM;;AAMV;AAEJ,YAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,SAAO,CAAC,MAAM;;ACvJhB;;;;;;;;;;;;;;;;ACwDM;AAEJ,MAAI,UAAU;AACZ,QAAI,EAAE,UAAU;AACd,aAAO,EAAE;;AAEX,wBAAoB,MAAM,EAAE;AAC5B,mBAAe,KAAK,GAAG;AACvB,mBAAe,SAAQ,QAAQ,QAAQ;AACvC,gBAAY;AACZ,WAAO;AACP,WAAO;;AAGT,MAAI,CAAC,gBAAgB,EAAE,OAAO;AAG5B,WAAO,OAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAExD,MAAI,EAAE,UAAU;AACd,iBAAa,SAAQ,KAAK;AAC1B,mBAAe,KAAK,MAAM;AAC1B,SAAK;AACL,WAAO;;AAET,MAAI,UAAU;AACZ,WAAO,SAAQ,IAAI;aACV,UAAU;AACnB,iBAAa,OAAO,GAAG,EAAE;AACzB,mBAAe,SAAQ,SAAS,GAAG;AACnC,SAAK;AACL,WAAO;;AAEP,UAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAI7D;AAEJ,SAAO,OAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAGlD;AACJ,gBAAc,QAAO,SAAU,OAAM;AAErC,iBAAe,oBAAoB,KAAK;AACxC,SAAO,KAAK;AACZ,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,WAAO,KAAK,OAAO,IAAI,KAAK;;AAG9B,SAAO,SAAS,QAAQ;;AC3G1B;;;;;;;;;;;;;;;;AAkBA;AAAA,AAAA;AACE,YAAA,UAAA,aAAA,KAAA;AACA,YAAA,UAAA,WAAA,KAAA;AACA,YAAA,UAAA,UAAA,KAAA;AACA,YAAA,UAAA,YAAA,KAAA;AACA,YAAA,UAAA,eAAA,KAAA;GALU,YAAA,YAAQ;AASpB;AAAA,AAAA;AACE,qBAAA,mBAAA,YAAA,KAAA;AACA,qBAAA,mBAAA,UAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;GAJU,qBAAA,qBAAiB;AC3B7B;;;;;;;;;;;;;;;;AAuBA;AAMA;AACE,oBAAkB,SAAQ,KAAK,MAAM,cAAc,MAAiB;IAClE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,GAAG,MAAM,0BAA0B;AAE7C,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,SAAO,YAAY,YAAY,cAAc;AAC7C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,uDACQ,SAAS,MAAM;;AAE7B,aAAS,SAAS;;AAEpB,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,kBAAgB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AAClD,mBAAiB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACnD,mBAAiB,EAAE,MAAM;AAEzB,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,EAAE;AAChE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,kBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,YAAY,YAAY,iBAAiB,QAAQ,0BACjD;AAEJ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC1Gd;;;;;;;;;;;;;;;;AAqBM;AACJ;AAEA;AACE,gBACI,SAAQ,KAAK,MAAM,YAAY,MAAiB,CAAC,UAAU;;AAGjE;AAEE,WAAO,mBAAS,SAAS,MAAM;AAC/B,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,QAAI,aAAK,cAAc,IAAI,WAAW;AACpC,aAAO;;AAGT,cAAS,KAAK;AACd,WAAO;;AAGT,SAAO,CAAC,YAAY,aAAa,QAAQ,WAAA,YAAW,YAAA;;AC7CtD;;;;;;;;;;;;;;;;AAoBO,kBAAgC,wBAAwB;ACpB/D;;;;;;;;;;;;;;;;AAuBM;AAGJ;AAKA;AACE,gBAAW,SAAQ,KAAK,MAAM,YAAY,MAAiB;MACzD;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;;AAIJ;AAEE,WAAO,mBAAS,UAAU;AAC1B,WAAO,GAAG,KAAK;AACf,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,uBAAmB,SAAS,OAAO,QAAQ,EAAE;AAC7C,qBAAiB,qBAAa,2BAA2B,EAAE,OAAO,EAAE;AACpE,gBAAY,SAAQ,WAAW,UAAU;AAGzC,QAAI,aAAK,cAAc,cAAc;AACnC,aAAO;;AAGT,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,wBAAmB,MAAM,UACrB,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,SAAS,EAAE,QAAQ;AAGvB,QAAI,2BAAyB,EAAE,UAAU;AACvC;AACA,aAAO;;AAGT,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,QAAI,mBAAmB;AACrB;AACA,aAAO;;AAEP,YAAM,IAAI,MACN,0DACiB,EAAE,SAAS;;;AAIpC,SAAO,CAAC,YAAY,aAAa,QAAQ,WAAA,YAAW,YAAA;;ACvFtD;;;;;;;;;;;;;;;;AAqBA,8BAA8B;AAEvB,kBACH,yBAAyB,KAAK;ACxBlC;;;;;;;;;;;;;;;;AAuBA;AAIA;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,cAAY,SAAQ,WAAW,OAAO,GAAG,OAAO,OAAO,GAAG;AAG1D,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,mBAAiB,OAAO,IAAI,OAAK,SAAQ,UAAU,IAAI,EAAE,QAAQ;AACjE,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAC9D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,eAAe,SAAS,QAAQ,SAAS,IAAI,QAAQ;AAE9D,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb;EACA,YAAY;;ACzDd;;;;;;;;;;;;;;;;AAsBM;AAEJ,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;ACnCd;;;;;;;;;;;;;;;;AAwBA;AAIA;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAiB;IAC7D;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAIJ,SAAO,QAAQ,mBAAS,SAAS;AAGjC,+BAA6B,kBAAkB,OAAO,EAAE,OAAO,MAAM;AAErE,mBAAiB;AACjB,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,OAAO;AACd,mBAAa;;;AAGjB,mBAAiB,iBAAgB,OAAO,EAAE,OAAO,MAAM;AACvD,YAAU;IACR,QAAQ,OAAO,EAAE;IACjB,OAAO;IACP,OAAO,OAAO,EAAE;;AAGlB,MAAI;AACF,mBAAe,SAAS,CAAC,QAAQ,SAAA;AACjC,WAAO,QAAQ;AACf,WAAO;;AAGT,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,gBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,OAAO,WAC5D,KAAK;AACT,SAAO;;AAGT;AACE,mBAAiB,IAAI,MAAM,QAAQ;AACnC,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,QAAQ,KAAK;;AAE7B,SAAO;;AAGT;AAEE,mBAA2B;AAC3B,kBAA0B;AAC1B,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;;AAEtB,QAAI,MAAM,KAAK,QAAQ;AACrB,cAAQ,KAAK,KAAK;;;AAGtB,eAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,oBAAgB;AAChB,iBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,UAAI,QAAQ,MAAM,KACb,eAAc,MAAM,QAAQ,aAAa,QAAQ;AACpD,oBAAY;;;AAGhB,YAAQ,aAAa;;AAEvB,SAAO,CAAC,UAAU;;AAGb,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACrHb;;;;;;;;;;;;;;;;AA6BM;AAOJ,iBAAe,EAAE;AACjB,gBAAc,EAAE,MAAM;AAEtB,uBAAqB,aAAK,eAAe,MAAM;AAC/C,aAAW;AACX,uBAAqB,qBAAa,mBAAmB,MAAM;AAC3D,oBAAkB;AAClB,2BAAyB;AACzB,MAAI,gBAAgB;AAClB,qBAA2B,IAAI,MAAM;AACrC,iBAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,eAAS,KAAK,OAAO,aAAa;;AAGpC,WAAO,qBAAa,iBAAiB,KAAK,QAAQ;AAClD,kBACI,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,eAAe,SAAA;AAEzD,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,yBAAqB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAC/D,QAAI,iBAAiB;AACnB,2BAAqB;;;AAIzB,SAAO,CAAC,YAAY,aAAa,cAAc,MAAM;;AC7DvD;;;;;;;;;;;;;;;;AAwBA;AAIA;AACE,cAAW,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACrD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ;AACf,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,sBACrB,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,mBAAiB,MAAM,MAAM,MAAM,GAAG;AACtC,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,aAAK,cAAc,IAAI;AACzC,oBAAkB,MAAM,MAAM,KAAK;AACnC,YAAS,SAAS,SAAS,MAAM,QAAQ,WAAW,WAAW;AAE/D,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;AC/Eb;;;;;;;;;;;;;;;;AAqBA;AAMA;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,mBAAiB,SAAS;AAE1B,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,MAAI,SAAS,kBAAkB,KAAK,SAAS,mBAAmB;AAC9D,UAAM,IAAI,MACN,0EACQ,SAAS,mBAAmB,SAAS;;AAGnD,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,cAAc,aAAa,UACjE;AACJ,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Fd;;;;;;;;;;;;;;;;AAqBM;AAKJ,SAAO,QAAQ,SAAS;AACxB,SAAO,KAAK;AACZ,SAAO,SAAS;AAEhB,gBAAc,aAAK,cAAc,EAAE;AACnC,iBAAe,aAAK,uBAAuB,OAAO;AAElD,eAAK,OACD,UAAU,aAAK,cAAc,SAC7B,MAAM,cAAc,sBAAsB,EAAE;AAGhD,SAAO,CAAC,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAG7C,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;;AC5Cd;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,KAAK;AACf,SAAO,YAAY,cAAc;AAEjC,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,gBAAc,EAAE,MAAM;AACtB,gBAAc,EAAE,MAAM;AAEtB,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,qBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,qBAAmB,EAAE,MAAM,MAAM,GAAG;AAEpC,oBAAkB,aAAK,cAAc;AACrC,oBAAkB,aAAK,cAAc;AAErC,8BACI,cAAc,aAAa,cAAc,KAAK,cAAc;AAEhE,eAAK,OACD,SAAS,KAAK,SAAS,KAAK,qBAC5B,MAAM,uJAEsB,oBAAoB;AAEpD,4BACI,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AACpE,mBAAiB,kBAAkB,OAAO,CAAC,aAAa;AAExD,eAAK,OACD,gBAAgB,aAChB,MAAM,kCAAkC,qBACjC,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AAE3B,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AACvD,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AAGvD,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,SAAA,UAAS,OAAO,CAAC,OAAO;AAC7D,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,SAAA,UAAS,OAAO,CAAC,OAAO;AAE7D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,kBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,mBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,mBAAiB,KAAK,IAAI,WAAW;AAErC,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,IAAI;AAClE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAC7D,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAE7D,kBACI,OAAO,aAAa,IAAI,MAAM,QAAQ,OAAO,aAC7C,IAAI,MAAM,QAAQ,YAAY,YAAY;AAE9C,MAAI,QAAQ;AACZ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Hd;;;;;;;;;;;;;;;;AAsBM;AAGJ,SAAO,SAAS,IAAI,QAAQ,QAAQ,qBAAW;AAC/C,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;ACpCd;;;;;;;;;;;;;;;;AAqBA;AAEA;AACE,aAAW,SAAQ,KAAK,MAAM,aAAa,MAAiB;IAC1D;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,cAAc,gBAAgB;AACrC,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,KAAK,cAAc,cAAc;AAC1C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACnDd;;;;;;;;;;;;;;;;AAqBA;AAEE,SAAO,QAAQ,qBAAW;AAE1B,eAAa,aAAK,eAAe,KAAK,MAAM,MAAM,OAAO,GAAG,OAAO;AAEnE,mBAAiB,qBAAa,gBAAgB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAExE,cAAY,SAAQ,WAAW,UAAU,OAAO,GAAG;AAEnD,MAAI,aAAK,cAAc,cAAc;AACnC,WAAO;;AAIT,kBAAgB,OAAO,OAAO,OAAK,aAAK,cAAc,EAAE,SAAS;AACjE,MAAI,QAAQ,WAAW;AACrB,WAAO,QAAQ;;AAGjB,iBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,uBAAa,uBAAuB,QAAQ;AAE5C,mBAAiB,aAAK,cAAc,QAAQ,GAAG,MAAM,MAAM,GAAG;AAC9D,qBAAmB;AACnB,oBAAkB,QAAQ,IAAI;AAC5B,qBAAiB,aAAK,cAAc,MAAM,MAAM,MAAM;AACtD,oBAAgB;AAChB,WAAO;;AAET,iBAAe,QAAQ,IAAI,WAAS,SAAQ,mBAAmB;AAC/D,kBAAgB,SAAQ,mBAAmB;AAC3C,eAAa,GAAG,IAAI,UAAU;AAC5B,oBAAgB,IAAI;AACpB,iBAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,uBAAiB,UAAU;AAC3B,uBAAiB,IAAI;AACrB,mBAAa,OAAO,GAAG,SAAS,UAAU,WAAW;AACrD,cAAQ,IAAI,MAAM;AAClB,mBAAa;;;AAGjB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;ACrEd;;;;;;;;;;;;;;;;AAqBA;AAQA;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,iBAAiB,cAAc;AAC/D,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB,OAAO;AAEjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,oDACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,aACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACrGd;;;;;;;;;;;;;;;;AAqBA;AAUA;AACE,4BAA0B,SAAQ,KAAK,MAAM,qBAAqB,MAAM;IACtE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,IAAI,UAAU;AACrB,SAAO,SAAS,WAAK,YAAY,iBAAiB,cAAc;AAEhE,oBAAkB;AAElB,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBAC1B,YAAY,OAAO,OAA2C,SAC9D,WAAW,MAAK,iBAAiB,OAAuB;AAC5D,SACE,WACA,cACA,aACA,YACA,UACA,SACA,aACA,WACA,UACA,cACA,eACE;AAEJ,iBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,kBAAgB,cAAc,IAAI,SAAS,QAAQ;AAEnD,yBAAuB,SAAS,eAAe;AAC/C,oBAAkB,aAAK,eAAe,SAAS;AAC/C,oBAAkB,aAAK,eAAe,GAAG;AACzC,gCAA8B,aAAK,eAAe,OAAO;AACzD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AACtD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AAEtD,cAAY,SAAQ,WAAW,SAAS,SAAS;AACjD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eAAa,SAAQ,UAAU,IAAI,GAAG,QAAQ;AAC9C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,0BACI,MAAM,UAAU,WAAW,cAAc,aAAa,UAAU,SAChE,YAAY,WAAW,UAAU,aAAa,cAAc,aAC5D,QAAQ,SAAS,OAAO,OAAO,OAAO,cAAc,YACpD,YAAY,gBAAgB,cAAc,YAAY,YACtD,gBAAgB;AACpB,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Hd;;;;;;;;;;;;;;;;AAqBO,kBAAgC,wBAAwB;ACrB/D;;;;;;;;;;;;;;;;AAwBA;AAAA,AAAA;AACE,uBAAA,qBAAA,cAAA,KAAA;AACA,uBAAA,qBAAA,aAAA,KAAA;GAFG,uBAAA,uBAAmB;AAKxB;AAKA;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAe;IACnE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ,oBAAoB,YAAY;AAC/C,SAAO,eAAO,OAAO,UAAU;AAE/B,mBAAiB,MAAM,MAAM;AAE7B,kCAAgC;AAChC,mBAAiB,CAAC,UAAU,YAAY,WAAW,OAAM,MAAM;AAE/D,mBAAiB,SAAQ,UAAU,IAAI,OAAM;AAC7C;AACA,MAAI,OAAM,UAAU;AAClB,iBAAa,MAAK,CAAC,SAAA,UAAS,QAAQ,CAAC,GAAG,SAAQ,OAAO,CAAC,OAAO;AAC/D,iBAAa,SAAQ,UAAU,IAAI,WAAW;;AAGhD,mBAAiB,WAAW;AAC5B,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,2BAAyB,IAAI,WAAW,IAAI,WAAW,OAAM,OAAO;AAEpE,oBACI,UAAU,SAAS,UAAU,UAAU,kBAAkB,YACzD,WACA,oBAAoB,SACpB,oBAAoB;AAExB,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AChGd;;;;;;;;;;;;;;;;AAyBA;AAGA;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,MAAM,WAAW,qBAAW;AACnC,gBAAc,EAAE,MAAM;AAEtB,eAAK,OAAO,EAAE,UAAU,aAAa,EAAE,UAAU,SAC/C,MAAM,2BAA2B,EAAE;AAErC,sBAAoB,qBAAa,mBAAmB,CAAC,OAAO;AAC5D,kBAAgB;AAChB,MAAI,gBAAgB;AAClB,gBAAY,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,cAAc,SAAA;;AAElE,uBAAqB,qBAAa,iBAAiB,GAAG,OAAO;AAC7D,uBAAa,2BAA2B,UAAU,CAAC,eAAe;AAElE,sBAAoB,SAAQ,WAAW,UAAU,OAAO,UAAU;AAClE,mBAAiB,UAAU,MAAM;AACjC,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,wBAAsB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAChE,aAAW,aAAa,YAAY,IAAI,GAAG,WAAU,IAAI,GAAG,UACjD,eAAe,SAAS,EAAE;AAGrC,YAAU;AACV,MAAI,gBAAgB;AAClB,4BAAwB,qBAAa,uBAAuB;AAC5D,UAAM,WACJ,CAAC,QAAQ,CAAC,GAAG,cAAc,OAAO,CAAC,MAAM,kBAAkB,SAAA;AAC7D,aAAQ,YAAY,UAAU;AAC9B,aAAQ,YAAY,YAAY;;AAElC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACjFd;;;;;;;;;;;;;;;;AAqBA;AAKA;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,WAAW,cAAc;AAEhC,eAAK,OACD,YAAY,GACZ,MAAM,sDAAsD;AAEhE,oBAAkB,EAAE,MAAM;AAC1B,sBAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAElE,uBAAqB,cAAc;AACnC,sBAAoB,aAAa;AACjC,sBAAoB,aAAc,aAAY;AAE9C,sBAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAE3C,cAAY,SAAQ,WAAW,aAAa;AAE5C,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAClB,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAEhE,2BAAyB,IAAI,WAAW,IAAI,WAAW,aAAa;AACpE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,cAAc;AAEpE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,uBAAqB,eAAe,SAAS,IAAI;AACjD,mBACI,KAAK,WAAW,cAAc,eAAe,EAAE,MAAM,SAAS,GAC9D,kBAAkB,iBAAiB,YAAY,QAAQ;AAE3D,SAAO;;AAGF,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC1Fd;;;;;;;;;;;;;;;;AAqBA;AAQA;AACE,wBACI,SAAQ,KAAK,MAAM,uBAAuB,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,mBAAmB;AAEnD,qBAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAEhD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAClD,YAA0C,MAAK,iBAChD;AAEJ,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,mEACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,oCAAkD;EACvD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC5Gd;;;;;;;;;;;;;;;;AAqBA,+BAA8B;AACvB,kBACH,yBAAyB,KAAK;ACvBlC;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,oBACH,yBAAyB,OAAO,wBAAuB;ACtB3D;;;;;;;;;;;;;;;;AAoBO,kBAAgC,wBAAwB;ACpB/D;;;;;;;;;;;;;;;;AAsBA;AACE,SAAO,QAAQ,OAAO,OAAO,QAAQ,qBAAW;AAChD,cAAY,SAAQ,WAAW,OAAO;AACtC,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;ACjCd;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAiB;IACrE;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAEJ,SAAO,QAAQ,qBAAW;AAC1B,SAAO,iBAAS;AAEhB,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,oBACI,SAAS,OAAO,aAAa,YAAY,aAAa;AAC1D,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACxDb;;;;;;;;;;;;;;;;AAqBA,+BAA8B;AACvB,uBACH,yBAAyB,UAAU;ACvBvC;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,kBAAgB,SAAQ,KAAK,MACzB,gBAAgB,MAChB,CAAC,UAAU,UAAU,UAAU,UAAU,UAAU,UAAU;;AAGnE;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,mBAAmB;AAC1B,SAAO,GAAG,MAAM,UAAU,QAAQ,SAAS;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,iBAAe,SAAQ,UAAU,IAAI,KAAK,QAAQ;AAClD,qBAAmB,SAAQ,UAAU,IAAI,SAAS,QAAQ;AAC1D,mBAAiB,UAAU,OAAO,SAAQ,UAAU,IAAI,OAAO,QAAQ,KAAK;AAC5E,kBAAgB,SAAS,OAAO,SAAQ,UAAU,IAAI,MAAM,QAAQ,KAAK;AAEzE,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAE1C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBACI,KAAK,QAAQ,YAAY,UAAU,SAAS,iBAAiB;AACjE,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Dd;;;;;;;;;;;;;;;;AAuBA;AASA;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK;AAET,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,sDACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,2BAA2B,SAAS,wDACI;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,yDACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,kBACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Id;;;;;;;;;;;;;;;;AAuBA;AASA;AACE,6BACI,SAAQ,KAAK,MAAM,sBAAsB,MAAiB;IACxD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB;AAE1B,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,+DACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,oCAAoC,SAAS,wDACL;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,kEACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,2BACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,mCAAiD;EACtD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Id;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,iBAAe,SAAQ,KAAK,MAAM,UAAU,MAAe;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,UAAU;AAC1B,SAAO,QAAQ,WAAW;AAE1B,uDACI,uBAAY,mBAAmB,QAAkB;AAErD,cAAY,SAAQ,WAAW,aAAa,OAAO;AACnD,MAAI,cAAc;AAChB,WAAO;;AAGT,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAErD,gBAAc,SAAQ,UAAU,IAAI,OAAO;AAC3C,cAAY,MAAM;AAClB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eACI,KAAK,SAAS,OAAO,QAAQ,WAAW,WAAW,WAAW,WAC9D,cAAc;AAElB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC5Ed;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAe;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,GAAG,WAAW;AACrB,SAAO,QAAQ;AAEf,mBAAiB,EAAE,MAAM;AACzB,WAAS,QAAQ,aAAK,cAAc,QAAQ;AAC5C,sBAAoB,EAAE,MAAM,SAAS;AAErC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAElB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAChE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AAEjE,aACI,KAAK,SAAS,EAAE,QAAQ,eAAe,aAAa,WAAW,MAC/D,iBAAiB;AAGrB,qBAAmB,aAAK,eAAe,MAAM,EAAE,OAAO;AACtD,oBAAkB,qBAAa,aAAa,yBACxC,GAAa,SAAmB;AAEpC,MAAI,QAAQ,UAAU;AACtB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACvFd;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,sBACH,yBAAyB,SAAS,wBAAuB;ACtB7D;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,2BACH,yBAAyB,cAAc,wBAAuB;ACtBlE;;;;;;;;;;;;;;;;AAmBA,+BAA8B;AACvB,mBACH,yBAAyB,MAAM,wBAAuB;ACrB1D;;;;;;;;;;;;;;;;AAoBA,+BAA8B;AACvB,wBACH,yBAAyB,WAAW,wBAAuB;ACtB/D;;;;;;;;;;;;;;;;AAmBO,kBAAgC,wBAAwB;ACnB/D;;;;;;;;;;;;;;;;AAmBA,+BAA8B;AACvB,yBACH,yBAAyB,YAAY,wBAAuB;ACrBhE;;;;;;;;;;;;;;;;AAwBA;AAEA;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,wBAAwB,YAAY;AAC3C,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,YAAQ;AACR,cAAU;;AAGZ,oBAAkB,MAAM,MAAM;AAC9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Ed;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,sBACH,yBAAyB,SAAS;ACrBtC;;;;;;;;;;;;;;;;AAqBA;AAOA;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAEhC,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,gBAAgB,eACtD,cAAc,aAAa,eAAe,gBAAgB;AAC9D,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Fd;;;;;;;;;;;;;;;;AAuBA;AAEA;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,oBAAkB,MAAM,MAAM;AAE9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AClFd;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,sBACH,yBAAyB,SAAS;ACrBtC;;;;;;;;;;;;;;;;AAqBA,gCAA8B;AACvB,uBACH,yBAAyB,UAAU;ACvBvC;;;;;;;;;;;;;;;;AAmBO,qBAAmC,wBAAwB;ACnBlE;;;;;;;;;;;;;;;;AA8BM;AAEJ,iBAAe,IAAI,WAAW,SAAQ,KAAK,OAAO,QAAQ,WAAW;AACrE,2BAAyB,OAAO;AAChC,uBAAqB,OAAO;AAC5B,0BAAwB,OAAO;AAC/B,wBAAsB,OAAO;AAE7B,WAAQ,KAAK,MAAM;AACnB,SAAO,CAAC,kBAAkB,cAAc,iBAAiB;;ACvC3D;;;;;;;;;;;;;;;;AAuBA;AAIA;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,kBAAkB;AACtD,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBACI,UAAS,SAAS,UAAU,eAAe,cAAc;AAE7D,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AACnB,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX;;ACxEF;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,sBAChD;AACJ,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,6BAA2B,SAAQ,WAAW,IAAI,SAAS;AAE3D,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Ed;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,gBAAgB;AACpE,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAI/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAChD,+BACI,SAAQ,WAAW,CAAC,eAAe,WAAW;AAElD,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC7Ed;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,uBACH,yBAAyB,UAAU,yBAAuB;ACrB9D;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,WAAW;AAClB,SAAO,OAAO,SAAS,YAAY;AAEnC,cAAY,SAAQ,WAAW,CAAC,GAAG,QAAQ,OAAO,QAAQ;AAC1D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,aAAW,WAAW,OAAO,SAAS,UAAU;AAEhD,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACxDd;;;;;;;;;;;;;;;;AAqBA;AACE,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;AChCd;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,SAAS,IAAI,mBAAS,QAAQ,UAAU,kBAAkB;AAEjE,mBAAiB,SAAS,IACtB,UAAU,EAAE,KAAqB,EAAE,MAAM,KAAK,EAAE;AACpD,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,0BAAwB,SAAS,IAAI,cAAY,SAAS;AAC1D,2BAAyB,SAAS,IAAI,cAAY,SAAS;AAC3D,2BACI,IAAI,WAAW,IAAI,WAAW,iBAAiB;AACnD,4BACI,IAAI,WAAW,IAAI,WAAW,kBAAkB;AAEpD,YACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,kBACrD,mBAAmB,eAAe;AACtC,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACrEb;;;;;;;;;;;;;;;;AAmBA,gCAA8B;AACvB,kBACH,yBAAyB,KAAK;ACrBlC;;;;;;;;;;;;;;;;AAqBA;AAEA;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,GAAG,SAAS;AACnB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,oBAAkB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AAEtD,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAU,KAAK,WAAW;AAC1B,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,mBAAiC,wBAAwB;ACnBhE;;;;;;;;;;;;;;;;AAmBO,oBAAkC,wBAAwB;ACnBjE;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,uBAAqB,SAAQ,KAAK,MAAM,gBAAgB,MAAe;IACrE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AAEjC,SAAO,UAAU;AACjB,SAAO,cAAc,QAAQ;AAC7B,gCAA8B;AAE9B,oDAAkD,OAAO;AACzD,mBAAiB,CAAC,OAAO,WAAW,UAAU;AAE9C,cAAY,SAAQ,UAAU,IAAI,OAAO;AACzC;AACA,MAAI,MAAM,UAAU;AAClB,iBACI,MAAK,CAAC,SAAA,UAAS,QAAQ,CAAC,GAAG,SAAS,OAAO,CAAC,OAAO;AACvD,YAAQ,SAAQ,UAAU,IAAI,WAAW;;AAE3C,cAAY,MAAM;AAElB,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,aAAK,cAAc,OAAO,WAAW;AACvC,WAAO;;AAET,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,OAAO,WAAW,UAAU,aAAa,WAAW,UACzD,eAAe,IAAI,GAAG;AAE1B,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACtFd;;;;;;;;;;;;;;;;AAwBA;AAIA;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAM;IAC9C;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,QAAQ;AAEf,eAAa,aAAK,eAAe,MAAM,EAAE;AAEzC,MAAI,EAAE,MAAM,WAAW;AACrB,WAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,SAAA;;AAGhC,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,wBAAsB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE7D,cACI,KAAK,WAAW,KAAK,QAAQ,eAAe,EAAE,MAAM,QAAQ;AAEhE,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,QAAQ,SAAA;;AAGtD,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;ACrEb;;;;;;;;;;;;;;;;AAsBA;AAKA;AACE,eAAa,SAAQ,KAAK,MAAM,kBAAkB,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,iBAAS;AAChB,SAAO,SAAS,WAAW,UAAU;AAErC,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,6BACI,qBAAa,eAAe,QAAQ,aAAa;AAErD,sBAAoB,cAAc;AAClC,2BAAyB;AAEzB,qBAAmB,OAAO,cAAc,WACpC,CAAC,WAAW,WAAW,WAAW,cAAc,IAAI,oBACpD,CAAC,GAAG,WAAW;AACnB,oBAAkB,IAAI,WAAW,IAAI,WAAW,YAAY;AAE5D,aACI,SAAS,OAAO,aAAa,YAAY,aAAa,SAAS,SAC/D,SAAS,WAAW,WAAW,QAAQ;AAC3C,SAAO;;AAGF,+BAA6C;EAClD,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;AC/Eb;;;;;;;;;;;;;;;;AAmBO,oBAAkC,wBAAwB;ACnBjE;;;;;;;;;;;;;;;;AAuBA;AAKA;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAe;IAC3D;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAIE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,SAAS,WAAW;AAC3B,SAAO,SAAS;AAEhB,cAAY,SAAQ,WAAW,OAAO,QAAQ;AAC9C,MAAI,aAAK,cAAc,WAAW;AAChC,WAAO;;AAGT,SAAO,WAAW,YAAY,WAAW,SAAS,cAC9C,wBAAa,gBAAgB,SAAS,SAAS;AAEnD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBACI,WAAW,WAAW,SAAS,QAAQ,QAAQ,WAAW,YAC1D,WAAW,cAAc,YAAY;AAEzC,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC9Ed;;;;;;;;;;;;;;;;AAqBA;AAIA;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAM;IAC9C;IACA;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,WAAW,GAAG,KAAK;AAE1B,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBAAc,UAAU,MAAM;AAC9B,gBAAc,EAAE,MAAM;AAEtB,iBAAe,UAAU,KAAK,QAAQ,KAAK,UAAU,IACjD,IACA,aAAK,cAAc,EAAE,MAAM,MAAM;AAErC,aAAW,aAAa,KAAK,KAAK,QAAQ;AAC1C,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;AC5Db;;;;;;;;;;;;;;;;AAqBA;AAEA;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB,CAAC,UAAU;;AAGrE;AAEE,SAAO,mBAAS,SAAS,MAAM;AAC/B,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK;AACd,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,kBAAgC,wBAAwB;ACnB/D;;;;;;;;;;;;;;;;AAsBM;AAEJ,SAAO,SAAS,IAAI,QAAQ,OAAO,OAAO,qBAAW;AAErD,0BAAwB,mBAAW,iBAAiB,GAAG,OAAO;AAE9D,sBAAoB,mBAAW,iBAAiB,EAAE,OAAO,QAAQ;AACjE,gBAAc,SAAQ,mBAAmB;AACzC,cAAY,SAAQ,WAAW,OAAO,EAAE;AACxC,kBAAgB,SAAQ,mBAAmB;AAC3C,mBAAiB,aAAK,eAAe,EAAE;AACvC,MAAI;AACF,uBAAmB,mBAAW,kBAAkB,QAAQ;AACxD,YAAQ,IACJ,MAAM,SAAS,YAAY,aAAa,aAAK,cAAc;AAC/D,WAAO;;AAET,eAAa,EAAE,MAAM;AACrB,MAAI,SAAS;AACX,YACI,OAAO,SAAS,IAAI,SAAS,QAC7B;aACK,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SACjC,QAAoC;aAC/B,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SAAS,IAAI,SAC9C,QACA;;AAEJ,qBAAiB,OAAO,GAAG,SAAS,QAAQ;;AAE9C,SAAO;;AAGT;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,oBAAgB,IAAI,UAAU;AAC9B,YAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,iBAAa,KAAK;;;AAItB;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,sBAAgB,IAAI,WAAW,IAAI,WAAW;AAC9C,cAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,mBAAa,KAAK;;;;AAKxB;AAKE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,iBAAe,MAAM;AAErB,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,mBAAa,QAAQ,IAAI,MAAM;AAC7B,wBAAgB,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW;AAC7D,gBAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,qBAAa,KAAK;;;;;AAM1B;AAGE,iBAAe,OAAO,MAAM,MAAM,OAAO;AACzC,eAAa,OAAO,MAAM,OAAO,MAAM,OAAO;AAC9C,eAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AACjC,gBAAY,OAAO,WAAW;AAC9B,iBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,YAAQ,KAAK,KAAK,IAAI,GAAG;;;AAItB,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;;ACrId;;;;;;;;;;;;;;;;AAqBA;AAGA;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACtD;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,SAAS,SAAS,QAAQ,QAAQ;AAClD,cAAY,SAAQ,UAAU,IAAI,OAAO,QAAQ;AACjD,cAAY,SAAQ,WAAW,OAAO,OAAO,OAAO;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,mBAAiB,OAAO,MAAM;AAC9B,gBAAc,aAAK,cAAc,OAAO,SAAS;AAGjD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK,OAAO,UAAU;AAC/B,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACzDd;;;;;;;;;;;;;;;;AAwBM;AAEJ,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,KAAK;AACZ,SAAO,iBAAiB,QAAQ;AAEhC,gBAAc,aAAK,eAAe,MAAM,EAAE,OAAO;AAEjD,qBAAmB,qBAAa,iBAAiB,GAAG,iBAAiB;AACrE,gBAAc,IAAI,MAAM,EAAE,MAAM,QAAQ,KAAK;AAC7C,eAAa,EAAE,MAAM;AACrB,SAAO,WAAW,IAAI;AACpB,uBAAmB,CAAC,GAAG;AACvB,eAAW,SAAS;AACpB,mBACI,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,MAAM,aAAa,SAAA;AAC1D,UAAM,UAAU;AAChB,WAAO;;;AAIJ,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;AChDd;;;;;;;;;;;;;;;;AAoBO,mBAAiC,wBAAwB;ACpBhE;;;;;;;;;;;;;;;;AAmBO,qBAAmC,wBAAwB;ACnBlE;;;;;;;;;;;;;;;;AAkBA,gCAA8B;AACvB,gCACH,yBAAyB,mBAAmB;ACpBhD;;;;;;;;;;;;;;;;AAuBA;AAMA;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AAEZ,OAAK,OAAO,KAAK,WAAW;AAC5B,MAAI,WAAW;AACb,cAAU,IAAI,MAAM,MAAM;;AAG5B,SAAO,WAAW,SAAS,cAAc,aAAa,kBAAkB;AAExE,uBAAqB,qBAAa,WAAW,WAAW;AACxD,MAAI,aAAa,SAAS;AACxB,UAAM,IAAI,MAAM;;AAGlB,MAAI,iBAAiB,KAAK,gBAAgB;AACxC,UAAM,IAAI,MACN;;AAGN,MAAI,iBAAiB,KAAK,mBAAmB;AAC3C,UAAM,IAAI,MACN;;AAGN,8BAA4B,EAAE,MAAM,SAAS,MAAM;AAGnD,qBAAmB,qBAAa,WAAW,WAAW;AACtD,mBAAiB,EAAE,MAAM;AACzB,aAAW,QAAQ;AACjB,UAAM,QAAQ;AACd,QAAI,QAAQ;AACZ,aAAS,OAAO,MAAM,GAAG;;AAG3B,oBAAkB,SAAQ,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,WAAW,SAAA;AAElE,SACE,wBACA,oBACA,8BAEE,qBAAa,WAAW,kBACpB,UAAU,OAAO,cAAc,qBAAqB,OAAO,KAC3D,SAAS,WAAW,SAAS;AACrC,UAAQ;AACR,QAAM;AACN,YAAU;AAEV,qBAAmB,qBAAa,WAAW,WAAW;AAEtD,aAAW,QAAQ;AACjB,QAAI,QAAQ,MAAM,QAAQ;AAC1B,YAAQ,QAAQ;;AAIlB,eAAa,qBAAa,WAAW,gBAAgB,OAAO,KAAK;AAEjE,mBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AAEvE,qBAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,MAAI;AACF,oBAAgB,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,OAAO,SAAA;AAC1D,WAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,UAAU,OAAO,CAAC,OAAO,WAAW,SAAA;;AAGlE,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,CAAC,SAAS,KAAK,UAAQ,SAAS;AAClC,gBAAY,SAAQ,UAAU,IAAI,UAAU,QAAQ;AACpD,0BAAsB,IAAI,WACtB,IAAI,WAAW,aAAK,eAAe,UAAU,QAAQ;AACzD,uBAAmB,IAAI,WAAW,IAAI,WAAW,OAAO;AACxD,qBAAiB,IAAI,WAAW,IAAI,WAAW,KAAK;AACpD,yBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,6BAAyB,IAAI,WAAW,IAAI,WAAW,UAAU;AACjE,4BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AACjE,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,eAAe,UAAU,MAAM,QAAQ,YAAY,UACxD,cAAc,kBAAkB,iBAAiB,SAAS,QAC1D;;AAGN,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,WAAW,SAAA;;AAGvD,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AChJd;;;;;;;;;;;;;;;;AAkBA,gCAA8B;AACvB,kBACH,yBAAyB,KAAK;ACpBlC;;;;;;;;;;;;;;;;AAuBA;AAEA;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,sBAAoB;AACpB,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;AACV,sBAAgB,qBAAa,iBACzB,cAAc,QAAQ,MAAM,MAAM;;;AAI1C,uBAAa,2BACT,OAAO,eAAe,MAAM,MAAM;AACtC,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;ACpFd;;;;;;;;;;;;;;;;AAmBO,mBAAiC,wBAAwB;ACnBhE;;;;;;;;;;;;;;;;AAuBA;AAIA;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,SAAO,QAAQ;AAEf,mBAA2B,IAAI,MAAM,EAAE,MAAM;AAC7C,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,EAAE,MAAM,KAAK,KAAK;;AAElC,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAE9D,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WACI,KAAK,aAAa,EAAE,MAAM,QAAQ,eAAe,SAAS,QAC1D,SAAS,IAAI,QAAQ;AACzB,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;AChEd;;;;;;;;;;;;;;;;AAuBA;AAGE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,SAAS;AAChB,SAAO,QAAQ;AACf,qBAAmB,MAAM,MAAM;AAC/B,eAAa,MAAM,MAAM;AACzB,mBAA2B,IAAI,MAAM,OAAO;AAC5C,iBAAe;AACf,eAAa,GAAG,IAAI,MAAM;AACxB,QAAI,MAAM;AACR,eAAS,cAAc,MAAM,MAAM;;;AAGvC,eAA2B,IAAI,MAAM;AACrC,gBAAc,IAAI,MAAM,MAAM,KAAK;AACnC,eAAa,MAAM,MAAM;AACzB,OAAK,QAAQ;AACb,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,UAAM,QAAQ;AACd,SAAK,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,QAAQ,OAAO,CAAC,OAAO,OAAO,SAAA;;AAE7D,SAAO,KAAK,IAAI,EAAE,QAAQ,WAAY,EAAC,QAAQ,OAAO,OAAO;;AAGxD,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;ACpDd;;;;;;;;;;;;;;;;AAqBA;AACE,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;;AChCd;;;;;;;;;;;;;;;;AAmGA,sBAAsC;EACpC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;AAGF,2BAA2B;AACzB,iBAAe;;ACnLjB;;;;;;;;;;;;;;;;AAmBA,aAAY;AAMZ,KAAI,aAIA,yBAAyB,YAAY,YAAY,SAAS,IAAI,WAAW;EACvE;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;EAAG;EACpD;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;;AAOvD,KAAI,aAAa,gCAAgC;AAG/C,MAAI,KAAI,IAAI;AACV,WAAO;;AAGT;AAGE,QAAI,iBAAiB,MAAM,YAAY,IAAI,kBAAkB;AAG7D,WAAO,YAAY,SAAS,IAAI,WAAW;MACzC;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;MAAG;MACnE;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;;;AAGlE,WAAO;;;ACnCX,wCAAoC,WAAA;ACrB7B,2BAA2B;ACwBlC,0BAAwB,WAAA;AAxBxB;;;;;;;;;;;;;;;;AA0BA,sBAAsB;0BAaW;EAK/B;AACE;AADiB,SAAA,OAAA;AAHX,SAAA,mBAAmB;AAKzB,SAAK,KAAK,KAAK;AACf,SAAK,YAAY,IAAI,YAAY,MAAM;;EAGzC;AAEE,mBAAe;AACf,SAAK,KAAK,QAAQ,QAAQ,OAAO;AACjC,WAAO;;EAGT;AACE,WAAO,KAAK,UAAU;;QAGlB;AACJ,kBAAc,aAAK;AACnB;AACA,qBAAiB,aAAK,QAAQ;AAC9B,WAAO,CAAC;;EAGV;AAGE,eAAW,KAAK;AAChB,QAAI,UAAU;AACZ,0BAAoB;AACpB,WAAK,UAAU,IACX,QAAQ,CAAC,IAAI,aAAa,OAAO,OAAO,cAAc;AAC1D;;AAGF,iBAAa,aAAK,cAAc;AAChC,qBAAiB,OAAO,aAAK,gBAAgB;AAC7C,yBAAqB,KAAK,KAAK,QAAQ;AAEvC,SAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AAErD,SAAK,KAAK,KAAK,eAAe,IAAI,MAAM;AAExC,QAAI,UAAU;AACZ,WAAK,KAAK,OAAO,IACb,IAAI,WACC,OAAmC,QACnC,OAAmC,YAAY,WACpD;;;QAIF;AACJ,WAAO,KAAK,SAAS;;EAGvB;AACE,WAAO,cAAc,OAAO,OAAO,eAC/B,KAAK,UAAU,IAAI;AACvB,QAAI,UAAU;AACZ,aAAO;;AAET,kBAAc,KAAK,KAAK,OAAO,MAC3B,cACA,eAAe,aAAK,cAAc,SAAS,aAAK,gBAAgB;AACpE,WAAO,qBAAqB,MAAM,QAAQ;;EAG5C;AACE,kBAAa,KAAK,UAAU,IAAI;AAChC,SAAK,KAAK,MAAM,MAAK;AACrB,SAAK,KAAK,KAAK,YAAY,MAAK;AAChC,SAAK,UAAU,OAAO;;EAGxB;AACE,WAAO;;EAKT;AACE,WAAO,KAAK,UAAU,IAAI,QAAQ;;EAGpC;AACE,SAAK,KAAK,KAAK;AACf,SAAK,OAAO;;EAGd;AACE,WAAO,CAAC,YAAY;;EAStB;AAEE;AACA,QAAI,gBAAgB;AAClB,eAAS,KAAK,MAAM,MAAmB,OAAO;;AAE9C,eAAS;AACT,iBAAW,KAAK;AAChB,WAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AACrD,mBAAa,aAAK,cAAc;AAChC,WAAK,KAAK,KAAK,eAAe,IAAI,MAAM;;AAE1C,WAAO,CAAC,QAAQ,OAAO;;EAGzB,oBAAoB,OAAO,OAAO;AAEhC,oBAAe,KAAK,KAAK,OAAO;AAChC,WAAO,gBAAgB,KAAK,UAAU,IAAI;AAC1C,iBAAa,aAAK,cAAc;AAChC,YAAQ;WACD;AACH,eAAO,IAAI,aAAa,SAAQ,cAAc;WAC3C;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;WACzC;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;;AAE5C,cAAM,IAAI,MAAM,iBAAiB;;;;AAKzC,gBAAgB,QAAQ;AACtB,SAAO,QAAQ,MAAM;AACrB,SAAO,IAAI,YAAY;GACtB;AAEH;AAEE,SAAO;AACL,iBAAK,MAAM,MAAM,CAAC,aAAa,gBAAgB,KAAK;AAClD,UAAI,CAAC,SAAS;AACZ,gBAAQ,IAAI,EAAE,uCAAuC;;AAEvD,eAAS,cAAc,KAAK;AAC1B,oBAAY,YAAY,QAAQ,SAAS,KAAK;AAC5C,mBAAS,OAAO;;;;AAItB,WAAO;;;AAUX;AAGE,MAAI,YAAY;AAGd,WAAO;;AAGT,aAA2B;AAC3B,MAAI,iBAAiB;AACnB,WAAO;aACE;AACT,WAAO;;AAGT,MAAI,eAAe;AACjB,QAAI,YAAY,SAAS;AACvB,aAAO,YAAY;;;AAIvB,SAAO,mBAAmB;;AAU5B;AACE,4CAA0C,MAAM,QAAQ,IAAI;IAC1D,MAAM,SAAS;IACf,MAAM,SAAS;;AAGjB,SAAO,IAAI,QAAQ;AACjB,0BAAyC;AAOzC,kBAAc,aAAa;AACzB,UAAI,KAAK,SAAS;AAChB,yBAAiB;AACjB,qBAAa,IAAI,KAAK,CAAC,WAAW,CAAC,MAAM;AACzC,eAAO,IAAI,gBAAgB;;AAG7B,UAAI,KAAK,SAAS;AAChB,eAAO,oBACH,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEhD,aAAO,SAAS;;AAMlB,QAAI;AACF,oBAAc,kBACV,0BAA0B,oBACtB,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEpD;AAEA,QAAI,oBAAoB,iBAAiB,YAAY;AACnD,aAAO,gCAAA,QAAwB;AAC/B,WAAK,sBAAsB,IAAI,KAC3B,CAAC,qEACA,gCAAA,QAAwB,aACzB,CAAC,MAAM;;AAGX,aAAO,kBAAA,QAAY;;AAGrB,2BAA+B;AAE/B,SAAK,OAAO;MACV,MAAM,KAAK,MAAM,QAAQ,MAAM;MAC/B,gBAAgB,KAAK,MACjB,mBAAmB,MACnB;QACE;QACA;QACA;;MAEN,aAAa,KAAK,MAAM,gBAAgB,gBAAgB,CAAC;MACzD,SAAS,KAAK,MAAM,WAAW,gBAAgB;;AAEjD,sBAAkB;AAClB,SAAK,uBAAuB;AAC1B,oBAAc;AACd,oBAAc;AACd,cAAQ,CAAC;;AAEX,SAAK,UAAU;AACb,UAAI;AAEF;;AAEF,UAAI;AAGF;;AAEF,oBAAc;AACd,wBACI;AAEJ,aAAO,CAAC,SAAS;;;;AAKvB;AAEE,UAAQ;SACD;AACH,aAAO,IAAI,aAAa;SACrB;AACH,aAAO,IAAI,WAAW;SACnB;AACH,aAAO,IAAI,WAAW;;AAEtB,YAAM,IAAI,MAAM,iBAAiB;;;AAIvC,wBAAwB;EACtB;EAA0B;EAC1B;;AAIF,eAAuB;AACvB,qBAA6B;AAC7B,kBAAsD;AACtD,kBAAkB;AAClB,kBAAkB;ACoDZ,0DAEiB;AACrB,MAAI;AACF,UAAM,IAAI,MACN;;AAKN,MAAI,OAAO,oBAAoB;AAC7B,qBAAiB;;AAEjB,kBAAc;AACd,yBACI,gBAAgB,OAAO,UAAQ,YAAY,SAAS;AACxD,QAAI,aAAa,SAAS;AACxB,YAAM,IAAI,MACN,2DACG,aAAa,KAAK;;;AAM7B,gBAAc;;AC5ahB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACaA,uBAA0B,GAAA;ACb1B,4BAAe;EACb;EAAK;EAAI;EAAK;EAAI;EAAG;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAC1E;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAI;EACzE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAI;EAC1E;EAAK;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EACxE;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EACpE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAG;EACpE;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EACzE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EACrE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACtE;EAAI;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EACxE;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EACxE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACvE;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EACxE;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EACvE;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EACrE;EAAI;EAAI;EAAI;EAAI;EAAI;EAAG;EAAG;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAG;EACrE;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EACxE;EAAG;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAG;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAG;EAAK;EAAK;EAAI;EACvE;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAC1E;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EACzE;EAAI;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EACtE;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EACxE;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACrE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAG;EAAG;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACzE;EAAK;EAAK;EAAK;EAAI;EAAI;EAAG;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACzE;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAI;EAAI;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAG;EACvE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAG;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACzE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EACxE;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAG;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACrE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EACrE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAG;EAAK;EAAI;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACvE;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAI;EAAI;EAAI;EAAI;EAAG;EAAI;EAAG;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EACxE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACpE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAG;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAI;EAAI;EAAG;EAAK;EACrE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACvE;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EACxE;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EACvE;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACxE;EAAI;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EACzE;EAAI;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAI;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAI;EAAK;EAAK;EAAG;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAI;EAAI;EAAI;EAAG;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EACrE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACtE;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EACrE;EAAK;EAAG;EAAK;EAAI;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAC1E;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAI;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAAI;EACvE;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACzE;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EACvE;EAAK;EAAK;EAAI;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAI;EAC1E;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAI;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EACrE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EACpE;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAI;EAAI;EAAK;EACtE;EAAK;EAAK;EAAI;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAI;EAAK;EAAK;EAAK;EAAI;EAAK;EAAG;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAG;EAAI;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EACrE;EAAK;EAAI;EAAK;EAAG;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACpE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAK;EACxE;EAAK;EAAK;EAAK;EAAI;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACrE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAG;EAAK;EAAG;EAAK;EAAK;EAAK;EAAK;EAAK;EACvE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAI;EAAG;EAAK;EAAK;EAAK;EAAK;EACxE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EACtE;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;EAAK;;ACvKnE,iBAA0B,WAAA;AAC1B,YAAqB,WAAA;AACrB,eAAwB,WAAA;AACxB,gBAAyB,WAAA;AACzB,gBAAyB,WAAA;ACLzB;;;;;;;;;;;;;;;;AAkBA;AACE,SAAO;IACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;IAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;;;AAG9C;AACE,SAAO;IACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;IAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;;;AAGhE;AACE,YAAU,OAAM,MAAM;AACtB,YAAU,OAAM,MAAM;AACtB,gBAAc,CAAC;IACb,IAAI,WAAW,KAAK;IACpB,IAAI,WAAW,KAAK;IACpB,IAAI,SAAS,KAAK;IAClB,IAAI,SAAS,KAAK;;AAEpB,SAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;;AAEnD;AACE,qBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,mBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,wBAAsB,IAAI,cAAc,IAAI;AAC1C,wBAAoB,CAAC,MAAM,KAAK,OAAO,IAAI,MAAM,KAAK,OAAO;AAC7D,WAAO;;AAET,SAAO,CAAE,YAAY,UAAU,eAAe,YAAY,IAAI;;AAEhE,kCAAkC;AAChC,iBAAe,aAAa;AAC5B,eAAa,WAAW;AACxB,sBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,qBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,mBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;;AAEpD;AACE,kBAAgB,aAAa;AAC7B,eAAa,WAAW;AACxB,kBAAgB,KAAK,IAAI,GAAG;AAC5B,mBAAiB,UAAU;AAC3B,qBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,mBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;;AAEpD;AACE,kBAAgB;IACd,IAAI,SAAS,KAAK,IAAI,WAAW;IACjC,IAAI,SAAS,KAAK,IAAI,WAAW;;AAEnC,sBAAoB,CAAC,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,YAAY;AAC3E,qBAAmB,CAAC,IAAI,WAAW,KAAK,YAAY,IAAI,IAAI,WAAW,KAAK,YAAY;AACxF,mBAAiB,CAAC,IAAI,SAAS,KAAK,YAAY,IAAI,IAAI,SAAS,KAAK,YAAY;AAClF,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;;AC3EpD;;;;;;;;;;;;;;;;AAgBA;AACE,SAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;;AAExE;AACE,kBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,SAAO,iBAAiB;;AAE1B,+BAA+B,UAAU,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AACvE;AACE,gBAAc;AACd,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,eAAW,GAAG,KAAK,GAAG;;AAExB,SAAO;;AAET;AACE,iBAAe;AACf,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,WAAO,KAAK,IAAI,GAAG;;AAErB,SAAO;;AAET;AACE,kBAAgB;AAChB,eAAa,KAAK;AAClB,iBAAe,GAAG,MAAM,MAAM;AAC5B,YAAQ,KAAK;AACb,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK,KAAK,IAAI,KAAK,MAAM,mBAAmB,MAAM;;;AAG9D,SAAO;;AAET;AACE,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,yBAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,4BAA0B,uBAAuB,OAAO,IAAI,OAAO;AACnE,mCAAiC,0BAA0B,mBAAmB;AAC9E,oCAAkC,uBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,SAAO,0BAA0B,0BAA0B;;AAE7D;AACE,4BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,+BAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,8BAA4B;IAC1B,CAAC,IAAI,kBAAkB,IAAI;IAC3B,CAAC,IAAI,kBAAkB,IAAI;;AAE7B,SAAO;IACL,kBAAkB,GAAG,OAAO,oBAAoB;IAChD,kBAAkB,GAAG,OAAO,oBAAoB;IAChD,CAAC,GAAG,GAAG;;;AAGX;AACE,SAAO;IACL,IAAI,uBAAuB,eAAe;IAC1C,IAAI,uBAAuB,eAAe;;;ACpE9C,iBAA0B,WAAA;AAC1B,gBAAyB,WAAA;AACzB,cAAuB,WAAA;AACvB,gBAAyB,WAAA;ACNzB,qBAAe;EACb,SAAS;EACT,UAAU;EAEV,SAAS;EACT,OAAO;EAIP,SAAS;EAIT,YAAY;EAKZ,QAAQ;EAIR,gBAAgB;EAKhB,QAAQ;IACN,SAAS;IACT,OAAO;IACP,QAAQ;IAIR,QAAQ;IACR,YAAY;IACZ,UAAU;IACV,WAAW;IACX,MAAM;IACN,YAAY;IACZ,KAAK;IACL,UAAU;IACV,OAAO;IACP,SAAS;IACT,YAAY;IACZ,aAAa;IACb,UAAU;IACV,UAAU;;EAGZ,SAAS;IACP,SAAS;;EAGX,MAAM;IACJ,SAAS;IAIT,UAAU;MACR,WAAW;MAIX,WAAW;MACX,UAAU;MAEV,YAAY;MAKZ,eAAe;MACf,cAAc;MAEd,gBAAgB;;IAKlB,MAAM;MACJ,SAAS;MACT,WAAW;MACX,WAAW;;IAGb,MAAM;MACJ,SAAS;MACT,WAAW;MACX,WAAW;;IAGb,KAAK;MACH,SAAS;MACT,WAAW;MAEX,WAAW;MACX,YAAY;;IAId,QAAQ;MACN,SAAS;MACT,eAAe;MACf,WAAW;MACX,WAAW;MACX,YAAY;;IAId,SAAS;MACP,SAAS;MACT,WAAW;MACX,eAAe;MACf,YAAY;MACZ,WAAW;;;EAIf,MAAM;IACJ,SAAS;IACT,WAAW;IACX,WAAW;IACX,eAAe;IAEf,gBAAgB;IAEhB,WAAW;;EAGb,MAAM;IACJ,SAAS;IACT,WAAW;IACX,YAAY;IAKZ,eAAe;IACf,cAAc;IAEd,gBAAgB;IAEhB,UAAU;IAEV,WAAW;IACX,UAAU;MACR,WAAW;;IAEb,UAAU;MACR,WAAW;;;;;AC3IjB,0BAA0B;EACxB,MAAM,CAAE,UAAU,CAAE,YAAY,IAAK,KAAK,CAAE,YAAY,IAAK,QAAQ,CAAE,YAAY,IAAK,SAAS,CAAE,YAAY;EAAO,MAAM,CAAE,YAAY;;AAI5I,aAAY;AACV,MAAI,OAAO,gBAAgB;AAAa,WAAO,YAAY;AAC3D,SAAO,SAAS,OAAO,QAAQ,OAAO,YAAY,MAAO;;AAI3D;AACE,mBAAiB,SAAS,OAAO,OAAO,QAAQ;AAChD,SAAO,QAAQ,OAAO;AACpB,WAAO,KAAK,OAAO,IAAI,QAAQ;AAC7B,mBAAa,KAAK;AAClB,mBAAa,IAAI;AACjB,UAAI,MAAM,QAAQ,SAAS,MAAM,QAAQ;AACvC,aAAK,OAAO,KAAK,OAAO,GAAG;iBAClB,SAAS,SAAS,SAAS;AACpC,aAAK,OAAO,UAAU,MAAM;;AAE5B,aAAK,OAAO;;;AAGhB,WAAO;KACN;;;EAIH,0BAAyB;AACvB,SAAK,KAAK;AACV,SAAK,UAAc;AACnB,SAAK,SAAS,UAAiB,gBAAS;AACxC,SAAK,KAAK;AACV,SAAK,QAAQ;AACb,SAAK,aAAa;AAClB,SAAK,qBAAqB;AAC1B,SAAK,cAAc;AACnB,SAAK,WAAW;AAChB,SAAK,OAAO;AAEZ,SAAK,SAAS;MACZ,UAAU;MACV,SAAS;MACT,UAAU;MACV,MAAM;MACN,KAAK;MACL,QAAQ;MACR,SAAS;;AAGX,SAAK,WAAW;AAChB,SAAK,MAAM;AACX,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,OAAO;;EAId;AAEE,QAAI,OAAO,KAAK,OAAO;AAAS,cAAQ,IAAI,UAAU,GAAG;;EAG3D;AACE,QAAI,KAAK,OAAO;AAAS,aAAe,QAAA;AACxC,WAAO;;EAIT;AACE,QAAI,CAAC,KAAK;AAAoB;AAC9B,oBAAgB,GAAG,SAAS,MAAM;AAClC,qBAAiB,KAAK;AACtB,SAAK,aAAa;AAClB,mBAAe,UAAU;AACzB,QAAI,WAAW;AAAG,WAAK,IAAI,GAAG,KAAK;;EAIrC;AACE,QAAI,CAAC,KAAK;AAAa,aAAO;AAC9B,QAAI,CAAC;AAAO,aAAO;AACnB,QAAI,GAAG,IAAI,MAAM,WAAW,CAAE,kBAAiB,GAAG;AAChD,aAAO;;AAET;AACE,SAAG;;AAEH,aAAO;;AAET,WAAO;;QAIH;AACJ,SAAK,QAAQ;AACb,sBAAkB;AAClB,QAAI;AAAY,WAAK,SAAS,UAAU,KAAK,QAAQ;AAErD,QAAI,KAAK;AACP,WAAK,aAAa;AAClB,WAAK,IAAI,YAAY,KAAK,kCAAkC,GAAG;AAC/D,WAAK,IAAI,kBAAkB,KAAK;AAChC,WAAK,IAAI,UAAU,GAAG,IAAI;AAC1B,WAAK,WAAW;;AAElB,QAAI,KAAK,OAAO;AACd;QACE,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;QACZ,KAAK,OAAO;UACV,MAAM,QAAQ,IAAI;QACpB,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAmB,SAAA,KAAK,KAAK,OAAO,QAAQ;QACtF,KAAK,OAAO,OAAS,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,UAAe,IAAA,KAAK,KAAK,UAAU;QACzG,KAAK,OAAO,UAAY,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,UAAkB,OAAA,KAAK,KAAK,UAAU;QAClH,KAAK,OAAO,WAAa,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,UAAmB,QAAA,KAAK,KAAK,UAAU;QACrH,KAAK,OAAO,WAAY,MAAK,OAAO,KAAK,UAAkB,QAAA,KAAK,KAAK,UAAU;QAC/E,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAmB,SAAA,KAAK,KAAK,OAAO,QAAQ;;;AAGxF,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAe,SAAA,KAAK,KAAK,OAAO;AAC9G,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,WAAW,CAAC,KAAK,OAAO;AAAK,aAAK,OAAO,MAAM,MAAU,IAAA,KAAK,KAAK;AACxH,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO;AAAQ,aAAK,OAAO,SAAS,MAAa,OAAA,KAAK,KAAK;AACpI,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAc,QAAA,KAAK,KAAK;AACxI,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAc,QAAA,KAAK,KAAK;AACpG,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAe,SAAA,KAAK,KAAK,OAAO;;AAEhH,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,QAAQ;AAAI,WAAK,KAAK,OAAO;;QAIlD;AACJ,sBAAkB;AAClB,QAAI,KAAK,OAAO,WAAY,KAAK,OAAO,YAAY,MAAO,SAAU,GAAG,iBAAiB,KAAK,OAAO;AACnG,WAAK,QAAQ;AAWb,WAAK,IAAI,oBAAoB,KAAK,OAAO;AAEzC,UAAI,KAAK,OAAO,YAAY;AAC1B,aAAK,IAAI,uBAAuB,KAAK,OAAO;AAC5C,qBAAa,KAAK,OAAO;AACzB,qBAAa,MAAM,GAAG,MAAM,SAAS;AACrC,YAAI,CAAC;AAAM,eAAK,IAAI;;AAGtB,YAAM,GAAG,WAAW,KAAK,OAAO;AAChC,SAAG;AAIH,UAAI,KAAK,OAAO,YAAY;AAC1B,YAAI,KAAK,OAAO;AACd,eAAK,IAAI,mDAAmD,KAAK,OAAO;AACxE,aAAG,IAAI,IAAI,kCAAkC,KAAK,OAAO,aAAa,IAAI;;AAG5E,WAAG,IAAI,IAAI,4BAA4B;;AAEzC,YAAM,GAAG;;AAEX,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,WAAW;AAAI,WAAK,KAAK,UAAU;;QAGxD;AAGJ;AACA;AACA;AACA;AACA,oBAAgB;AAChB,SAAK,QAAQ;AACb,gBAAY;AACZ,kBAAc,MAAM,KAAK,OAAO,SAAS,cAAc,OAAO,KAAK,OAAO;AAC1E,SAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AACpC,wBAAmB;AACjB,WAAK,QAAQ;AAEb,UAAI,CAAC,MAAK,SAAS,MAAK,MAAM;AAC5B,aAAK,IAAI,4BAA4B,MAAK;AAC1C;;AAGF,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAc,IAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;;AAE/E,aAAK,QAAQ;AACb,oBAAY;AACZ,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAU,MAAU,IAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;AACrF,aAAK,KAAK,MAAM,KAAK,MAAM,SAAQ;;AAIrC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAiB,OAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;;AAExF,aAAK,QAAQ;AACb,oBAAY;AACZ,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAU,MAAa,OAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;AAC9F,aAAK,KAAK,SAAS,KAAK,MAAM,SAAQ;;AAGxC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAkB,QAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;;AAE3F,aAAK,QAAQ;AACb,oBAAY;AACZ,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAU,MAAc,QAAA,QAAQ,MAAK,OAAO,KAAK,UAAU;AACjG,aAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;;AAEzC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,SAAC,QAAQ,WAAW,cAAc,MAAM,QAAQ,IAAI,CAAC,QAAQ,WAAW;;AAG1E,WAAK,QAAQ;AAEb,YAAK,MAAM;AAIX,uBAAkB,MAAK,YAAY,eAAe,MAAK,YAAY,eAE/D,OAAO,KAAK,IAAI,KAAK,IAAI,MAAK,YAAY,YAAY,GAAG,KAAK,MAAK,YAAY,YAAY,GAAG,KAAK,KAAK,IAAI,MAAK,YAAY,aAAa,GAAG,KAAK,MAAK,YAAY,aAAa,GAAG,OACnL;AAGJ,cAAQ,KAAK;QACX,YAAY,MAAK;QACjB,KAAK,MAAK;QACV,MAAM,MAAK;QACX,aAAa,MAAK;QAClB,KAAK,OAAO;QACZ,QAAQ,UAAU;QAClB,kBAAkB,UAAU;QAC5B,SAAS;QACT,MAAO,aAAa,IAAK,KAAK,MAAM,YAAY,MAAM;;AAExD,WAAK,QAAQ;;AAEf,SAAK,QAAQ;AACb,QAAI,KAAK,OAAO;AACd,UAAI,KAAK,KAAK;AAAM,eAAO,KAAK,KAAK;AACrC,UAAI,KAAK,KAAK;AAAK,eAAO,KAAK,KAAK;AACpC,UAAI,KAAK,KAAK;AAAQ,eAAO,KAAK,KAAK;AACvC,UAAI,KAAK,KAAK;AAAS,eAAO,KAAK,KAAK;;AAE1C,WAAO;;QAGH,2BAA0B;AAC9B,SAAK,QAAQ;AACb,SAAK,SAAS,UAAU,KAAK,QAAQ;AACrC,qBAAsB,MAAA,QAAQ,OAAO,KAAK;AAC1C,aAAQ,OAAO;AACf,WAAO,SAAQ;;QAIX,4BAA2B;AAC/B,SAAK,QAAQ;AACb;AAGA,SAAK,SAAS,UAAU,KAAK,QAAQ;AACrC,QAAI,CAAC,KAAK,OAAO;AAAgB,WAAK,SAAS,UAAU,KAAK,QAAQ;AAGtE,SAAK,QAAQ;AACb,kBAAc,KAAK,OAAO;AAC1B,QAAI;AACF,WAAK,IAAI,OAAO;AAChB,aAAO,CAAE;;AAIX,WAAO,IAAI,QAAQ;AACjB;AACA;AACA;AAEA,wBAAkB;AAGlB,YAAM,KAAK;AAGX,YAAM,KAAK;AAEX,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,kBAAY;AACZ,uBAAsB,MAAA,QAAQ,OAAO,KAAK;AAC1C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,WAAW,SAAQ,UAAU;AACvE,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,WAAW,SAAQ,UAAU;AAC7E,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;;AAItC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AACtG,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AAC5G,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;;AAEtC,WAAK,QAAQ;AAGb,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAC5G,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAClH,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;;AAKtC,UAAI,KAAK,OAAO;AACd,SAAC,SAAS,SAAS,WAAW,MAAM,QAAQ,IAAI,CAAC,SAAS,SAAS;;AAErE,eAAQ,OAAO;AAEf,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,uBAAiB;AACjB,UAAI,KAAK,OAAO,QAAQ;AACtB,oBAAY;AACZ,qBAAa,CAAE,MAAc,QAAA,KAAK,UAAU,MAAc,QAAA,KAAK,UAAU,MAAc,QAAA,KAAK;AAC5F,YAAI,CAAC,KAAK,OAAO;AAAO,eAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;iBACtD,KAAK,KAAK;AAAS,iBAAO,KAAK,KAAK;;AAG/C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AACb,cAAQ,CAAE,MAAM,SAAS,MAAM,SAAS,MAAM,SAAS,SAAS,YAAY,aAAa,KAAK,MAAM,QAAQ,SAAQ;;;QAIlH;AACJ,mBAAe,IAAI,UAAU,KAAK;AAClC,UAAM,KAAK,OAAO,QAAQ;AAC1B,SAAK,IAAI;;;;;AC5Yb,2BAA2B;AACzB,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,MAAI,OAAO,IAAG;AACd,MAAI,YAAY,IAAG;AACnB,UAAQ;AACR,2BAAyB,OAAO,QAAQ;AACtC,QAAI,IAAI,SAAS;AACf,oBAAc,GAAG,QAAQ,IAAI,KAAK;AAClC,UAAI,SAAS,OAAO,GAAG,IAAK,KAAG,iBAAiB;AAChD,WAAK;AAAA;AAAA;AAAA;AAKX;AACE,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,qBAAmB;AACjB,QAAI,OAAO,IAAG;AACd,QAAI,cAAc,IAAG;AACrB,QAAI,YAAY,IAAG;AACnB,QAAI,YAAY,IAAG;AACnB,QAAI;AACJ,QAAI,IAAG;AACL,UAAI,KAAK,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI;AAAA;AAG3D,mBAAe;AAEf,QAAI,KAAK;AAAkB,aAAO,KAAK,GAAG,KAAK,MAAM,MAAM,KAAK,sBAAsB,KAAK,UAAU;AACrG,QAAI,KAAK;AAAK,aAAO,KAAK,QAAQ,KAAK,OAAO;AAC9C,QAAI,KAAK;AAAM,aAAO,KAAK,SAAS,KAAK;AACzC,QAAI,KAAK,WAAW,KAAK,QAAQ,SAAS;AACxC,uBAAgB,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,MAAM,MAAM,EAAE,WAAW,EAAE;AAC3E,aAAO,KAAK,SAAQ,KAAK;AAAA;AAE3B,QAAI,YAAY,IAAG;AACnB,oBAAgB;AAAQ,UAAI,SAAS,OAAO,IAAI,KAAK,IAAI,KAAK,GAAG,KAAK,IAAI,KAAK,KAAO,KAAI,KAAK,IAAG;AAClG,QAAI;AACJ,QAAI,YAAY;AAChB,QAAI,KAAK;AACP,UAAI,IAAG;AACL,4BAAoB,KAAK;AACvB,cAAI,YAAY,IAAG,WAAW,QAAQ,QAAS,IAAI,MAAM,OAAQ,QAAS,IAAI,MAAM,kBAAmB,IAAG;AAC1G,cAAI;AACJ,cAAI,IAAI,MAAM,IAAI,MAAM,IAAI,GAAG,GAAG,IAAI,KAAK;AAC3C,cAAI;AAAA;AAAA;AAGR,UAAI,IAAG;AACL,qBAAa,GAAG,IAAI,cAAc,SAAS,GAAG;AAC5C,yBAAe;AAAA,YACb,cAAc,IAAI,IAAI;AAAA,YACtB,cAAc,IAAI,IAAI;AAAA,YACtB,cAAc,IAAI,IAAI;AAAA,YACtB,IAAI,WAAW,KAAK,KAAK;AAC3B,uBAAa,IAAI;AACjB,eAAK,OAAO,OAAO,GAAG,IAAI,OAAO,GAAG;AACpC,8BAAoB;AAClB,iBAAK,OAAO,MAAM,IAAI,MAAM;AAAA;AAE9B,eAAK;AACL,cAAI,cAAc,IAAG,WAAW,QAAQ,QAAS,IAAI,OAAO,GAAG,OAAQ,QAAS,IAAI,OAAO,GAAG,kBAAmB,IAAG;AACpH,cAAI,OAAO;AACX,cAAI,IAAG;AACL,gBAAI,YAAY,IAAG,WAAW,QAAQ,QAAS,IAAI,OAAO,GAAG,OAAQ,QAAS,IAAI,OAAO,GAAG,kBAAmB,IAAG;AAClH,gBAAI,KAAK;AAAA;AAAA;AAIb,YAAI,KAAK,eAAe,KAAK,YAAY;AACvC,cAAI,cAAc,IAAG,WAAW,6BAA6B,IAAG;AAChE,cAAI;AACJ,wBAAc,KAAK,IAAI,KAAK,YAAY,YAAY,GAAG,KAAK,KAAK,YAAY,YAAY,GAAG,MAAM;AAClG,wBAAc,KAAK,IAAI,KAAK,YAAY,YAAY,GAAG,KAAK,KAAK,YAAY,YAAY,GAAG,MAAM;AAClG,cAAI,QAAQ,KAAK,YAAY,YAAY,GAAG,IAAI,KAAK,YAAY,YAAY,GAAG,IAAI,OAAO,OAAO,GAAG,GAAG,IAAI,KAAK;AACjH,cAAI;AACJ,cAAI,IAAG;AACL,gBAAI,YAAY,IAAG,WAAW,6BAA6B,IAAG;AAC9D,gBAAI;AAAA;AAAA;AAGR,YAAI,KAAK,eAAe,KAAK,YAAY;AACvC,cAAI,cAAc,IAAG,WAAW,6BAA6B,IAAG;AAChE,cAAI;AACJ,wBAAc,KAAK,IAAI,KAAK,YAAY,aAAa,GAAG,KAAK,KAAK,YAAY,aAAa,GAAG,MAAM;AACpG,wBAAc,KAAK,IAAI,KAAK,YAAY,aAAa,GAAG,KAAK,KAAK,YAAY,aAAa,GAAG,MAAM;AACpG,cAAI,QAAQ,KAAK,YAAY,aAAa,GAAG,IAAI,KAAK,YAAY,aAAa,GAAG,IAAI,OAAO,OAAO,GAAG,GAAG,IAAI,KAAK;AACnH,cAAI;AACJ,cAAI,IAAG;AACL,gBAAI,YAAY,IAAG,WAAW,6BAA6B,IAAG;AAC9D,gBAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQhB,sBAAsB;AACtB;AACE,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,MAAI,WAAW;AACf,kBAAgB;AACd,QAAI,CAAC,cAAc,MAAM,IAAG;AAAU,oBAAc,KAAK,IAAK,OAAO;AACrE,QAAI,YAAY,IAAG;AACnB,QAAI,cAAc,IAAG;AACrB,QAAI,OAAO,IAAG;AACd,QAAI,YAAY,IAAG;AACnB,QAAI,IAAG;AACL,uBAAiB,OAAO,GAAG;AACzB,YAAI;AACJ,YAAI,IAAG;AACL,wBAAc,GAAG,UAAU,IAAI,SAAS,IAAK,eAAc,GAAG,UAAU,IAAI,SAAS,IAAI,OAAO,GAAG,UAAU,IAAI,SAAS,KAAK;AAC/H,wBAAc,GAAG,UAAU,IAAI,SAAS,IAAK,eAAc,GAAG,UAAU,IAAI,SAAS,IAAI,OAAO,GAAG,UAAU,IAAI,SAAS,KAAK;AAC/H,cAAI,IAAI,cAAc,GAAG,UAAU,IAAI,SAAS,GAAG,cAAc,GAAG,UAAU,IAAI,SAAS,GAAG,GAAG,GAAG,IAAI,KAAK;AAAA;AAE7G,cAAI,IAAI,OAAO,GAAG,UAAU,IAAI,SAAS,GAAG,OAAO,GAAG,UAAU,IAAI,SAAS,GAAG,GAAG,GAAG,IAAI,KAAK;AAAA;AAEjG,YAAI;AAAA;AAAA;AAGR,QAAI,IAAG;AACL,mBAAa,IAAI;AACjB;AAEA,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAC3C,aAAO,OAAO,GAAG,UAAU,KAAK,OAAO,EAAE,SAAS;AAClD,WAAK,OAAO,KAAK,SAAS,GAAG,KAAK,SAAS;AAE3C,UAAI,OAAO;AAAA;AAAA;AAAA;AAKjB;AACE,MAAI,CAAC;AAAQ;AACb,cAAY,OAAO,WAAW;AAC9B,MAAI,WAAW;AACf,qBAAmB;AACjB,QAAI,OAAO,IAAG;AACd,QAAI,YAAY,IAAG;AACnB,QAAI,IAAG;AACL,UAAI,YAAY,IAAG;AACnB,UAAI;AACJ,UAAI,cAAc,IAAG;AACrB,UAAI,YAAY,IAAG;AACnB,UAAI,KAAK,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI;AACzD,UAAI,YAAY,IAAG;AACnB,UAAI,SAAS,QAAQ,KAAK,IAAI,KAAK,GAAG,KAAK,IAAI,KAAK,IAAI,KAAK,IAAI;AACjE,UAAI;AAAA;AAEN,QAAI,IAAG;AACL,UAAI,KAAK,aAAa,KAAK,UAAU,SAAS;AAC5C,4BAAoB,KAAK;AACvB,cAAI,YAAY,IAAG,WAAW,QAAQ,QAAS,IAAI,MAAM,OAAQ,QAAS,IAAI,MAAM,kBAAmB,IAAG;AAC1G,cAAI;AACJ,cAAI,IAAI,MAAM,IAAI,MAAM,IAAI,GAAG,GAAG,IAAI,KAAK;AAC3C,cAAI;AAAA;AAAA;AAAA;AAIV,QAAI,IAAG;AACL,sBAAgB;AACd,YAAI,CAAC;AAAM;AACX,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,cAAI,YAAY,IAAG;AACnB,cAAI;AACJ,cAAI,cAAc,IAAG,WAAW,QAAQ,QAAS,IAAI,KAAK,GAAG,OAAQ,QAAS,IAAI,KAAK,GAAG,kBAAmB,IAAG;AAChH,cAAI,OAAO,KAAK,IAAI,IAAI,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,GAAG;AAC/D,cAAI,OAAO,KAAK,GAAG,IAAI,KAAK,GAAG;AAC/B,cAAI;AAAA;AAAA;AAGR,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AACzB,cAAQ,KAAK,YAAY;AAAA;AAAA;AAAA;AAM/B,aAAa;AAAA,EACX,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA,EACN,SAAS;AAAA;AAGX,mBAAe;;;ACvOf,IAAI,WAAW;AACf,iBAAiB;AAEjB,YAAY;AAAA,EACV,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,gBAAgB;AAAA,EAChB,WAAW;AAAA,EACX,kBAAkB;AAAA,EAClB,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,YAAY;AAAA,EACZ,YAAY;AAAA;AAGd;AACE,MAAI;AAAY;AAChB,cAAY;AAAA;AAAA;AAAA,qDAGuC,MAAM;AAAA;AAAA,sCAErB,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAWV,MAAM,0BAA0B,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0CAM9B,MAAM;AAAA;AAAA,qCAEX,MAAM;AAAA;AAAA;AAAA,8DAGmB,MAAM;AAAA,kDAClB,MAAM;AAAA,kDACN,MAAM;AAAA,kJAC0F,MAAM;AAAA;AAAA;AAAA;AAAA,qEAInF,MAAM;AAAA;AAAA,qFAEU,MAAM;AAAA,gCAC3D,MAAM;AAAA;AAAA;AAAA,+GAGyE,MAAM;AAAA,kGACnB,MAAM;AAAA,wKACgE,MAAM;AAAA,oKACV,MAAM;AAAA;AAAA;AAAA;AAAA;AAKxK,aAAW,SAAS,cAAc;AAClC,KAAG,YAAY;AACf,WAAS,qBAAqB,QAAQ,GAAG,YAAY;AACrD,eAAa;AAAA;AAtEf;AAAA,EA0EE;AACE,QAAI;AAAW,cAAQ,IAAK,UAAU;AACtC;AACA,SAAK,WAAW,QAAQ,OAAO;AAC/B,SAAK,KAAK;AACV,SAAK,WAAW;AAChB;AACA,SAAK,UAAU;AACf,SAAK,SAAS;AAAA;AAAA,EAGhB,2BAA2B,eAAe,CAAE,KAAK,MAAM,MAAM,MAAM,QAAQ,MAAM,OAAO;AACtF,SAAK,OAAO,SAAS,cAAc;AACnC,SAAK,KAAK,KAAK,QAAQ;AACvB,SAAK,KAAK,YAAY;AACtB,QAAI;AACF,UAAI,SAAS;AAAK,aAAK,KAAK,MAAM,MAAM,SAAS;AACjD,UAAI,SAAS;AAAQ,aAAK,KAAK,MAAM,SAAS,SAAS;AACvD,UAAI,SAAS;AAAM,aAAK,KAAK,MAAM,OAAO,SAAS;AACnD,UAAI,SAAS;AAAO,aAAK,KAAK,MAAM,QAAQ,SAAS;AAAA;AAGvD,SAAK,YAAY,SAAS,cAAc;AACxC,SAAK,UAAU,KAAK,kBAAkB;AACtC,SAAK,UAAU,YAAY;AAG3B,oBAAgB,SAAS,cAAc;AACvC,YAAQ,YAAY;AACpB,YAAQ,KAAK,cAAc;AAC3B,gBAAY;AAAA;AAAA;AAAA;AAIZ,YAAQ,YAAY,GAAG,QAAQ;AAC/B,SAAK,KAAK,YAAY;AACtB,YAAQ,iBAAiB,SAAS;AAChC,WAAK,UAAU,UAAU,OAAO;AAChC,WAAK,UAAU,UAAU,OAAO;AAChC,WAAK,KAAK,MAAM,cAAc,KAAK,UAAU,UAAU,SAAS,4BAA4B,SAAS;AAAA;AAGvG,SAAK,KAAK,YAAY,KAAK;AAC3B,QAAI,OAAO,WAAW;AAAU,aAAO,YAAY,KAAK;AAAA;AACnD,eAAS,eAAe,QAAQ,YAAY,KAAK;AAAA;AAAA,MAGpD;AACF,SAAK;AACL,WAAO,QAAQ,KAAK,YAAY,KAAK;AAAA;AAAA,MAGnC;AACF,WAAO,QAAQ,KAAK,YAAY,KAAK;AAAA;AAAA,MAGnC;AACF,WAAO,KAAK,KAAK;AAAA;AAAA,MAGf;AACF,WAAO,KAAK,KAAK;AAAA;AAAA,EAGnB;AACE,QAAI,KAAK,UAAU,UAAU,SAAS;AACpC,WAAK,UAAU,UAAU,OAAO;AAChC,WAAK,UAAU,UAAU,OAAO;AAAA;AAAA;AAAA,EAIpC;AACE,WAAQ,KAAK,UAAU,UAAU,SAAS;AAAA;AAAA,EAG5C;AACE,SAAK,UAAU,UAAU,OAAO;AAChC,SAAK,UAAU,UAAU,OAAO;AAChC,QAAI,KAAK,UAAU,UAAU,SAAS,4BAA4B;AAChE,gBAAU,IAAI,KAAM,KAAI,WAAW,IAAI,QAAQ,KAAK,IAAI,QAAQ,GAAG,QAAQ;AAC3E,gBAAU,IAAI,KAAM,KAAI,WAAW,IAAI,QAAQ,KAAK,IAAI,QAAQ,GAAG,QAAQ;AAC3E,UAAI;AAAG,aAAK,KAAK,MAAM,OAAO,GAAG,IAAI;AACrC,UAAI;AAAG,aAAK,KAAK,MAAM,MAAM;AAC7B,UAAI,KAAK,KAAK,aAAa;AAAG,aAAK,KAAK,MAAM,OAAO;AACrD,UAAK,KAAK,KAAK,aAAa,KAAK,KAAK,cAAe,OAAO;AAC1D,aAAK,KAAK,MAAM,OAAO;AACvB,aAAK,KAAK,MAAM,QAAQ;AAAA;AAE1B,WAAK,KAAK,MAAM,cAAc;AAAA;AAE9B,WAAK,KAAK,MAAM,cAAc;AAAA;AAAA;AAAA,EAIlC;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,OAAG,YAAY;AACf,SAAK,KAAK,YAAY;AACtB,OAAG,iBAAiB,SAAS;AAC3B,WAAK,SAAS,CAAC,KAAK;AACpB,kBAAY,SAAS,uBAAuB;AAC5C,yBAAmB;AACjB,aAAK,MAAM,UAAU,KAAK,SAAS,SAAS;AAAA;AAAA;AAGhD,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,OAAG,YAAY;AACf,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,YAAY,+EAA+E,KAAK,UAAU,OAAO,YAAY,YAAY,+CAA+C,KAAK,qBAAqB;AACrN,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,UAAU;AAC5B,aAAO,YAAY,IAAI,OAAO;AAC9B,UAAI;AAAU,iBAAS,IAAI,OAAO;AAAA;AAEpC,WAAO;AAAA;AAAA,QAGH;AACJ,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,kBAAc;AACd,uBAAmB;AACjB,kBAAY,SAAS,WAAW,aAAa;AAC7C,iBAAW,kBAAkB,SAAS,OAAO;AAAA;AAE/C,OAAG,YAAY,wCAAwC,KAAK,8BAA8B,+BAA+B,KAAK,qBAAqB;AACnJ,OAAG,MAAM,aAAa,SAAS,KAAK,MAAM;AAC1C,OAAG,MAAM,WAAW,SAAS,KAAK,MAAM;AACxC,OAAG,MAAM,cAAc,SAAS,KAAK,MAAM;AAC3C,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,UAAU;AAC5B,UAAI;AAAU,iBAAS,MAAM,IAAI,OAAO;AAAA;AAE1C,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,YAAY,8CAA8C,KAAK,eAAe,cAAa,eAAc,iBAAgB,OAAO,cAAc;AACjJ,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,UAAU;AAC5B,aAAO,YAAY,SAAS,IAAI,OAAO,WAAW,WAAW,IAAI,OAAO,SAAS,SAAS,IAAI,OAAO,SAAS,WAAW,IAAI,OAAO;AACpI,UAAI,OAAO,aAAa,SAAS,IAAI,OAAO;AAC5C,UAAI;AAAU,iBAAS,IAAI,OAAO;AAAA;AAEpC,OAAG,QAAQ,GAAG,SAAS;AACvB,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,QAAI;AAAM,SAAG,YAAY;AACzB,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,EAGT;AACE,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,MAAM,aAAa,SAAS,KAAK,MAAM;AAC1C,OAAG,MAAM,WAAW,SAAS,KAAK,MAAM;AACxC,OAAG,MAAM,cAAc,SAAS,KAAK,MAAM;AAC3C,OAAG,OAAO;AACV,OAAG,KAAK,KAAK;AACb,OAAG,YAAY;AACf,SAAK,UAAU,YAAY;AAC3B,OAAG,iBAAiB,SAAS;AAC3B,UAAI,GAAG,cAAc;AAAS,WAAG,YAAY;AAAA;AACxC,WAAG,YAAY;AACpB,UAAI;AAAU,iBAAS,GAAG,cAAc;AAAA;AAE1C,WAAO;AAAA;AAAA,EAGT,8BAA8B;AAC5B,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,YAAY;AACpB,OAAG,YAAY,GAAG,UAAU,MAAM;AAClC,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,EAIT,iCAAiC;AAC/B,eAAW,SAAS,eAAe,YAAY;AAC/C,QAAI;AAAI,SAAG,YAAY,GAAG,UAAU,MAAM;AAAA;AACrC,WAAK,SAAS,OAAO;AAAA;AAAA,EAG5B,4BAA4B,cAAc;AACxC,QAAI;AAAO,YAAM,aAAa;AAC9B,eAAW,SAAS,cAAc;AAClC,OAAG,YAAY;AACf,OAAG,KAAK,KAAK;AACb,OAAG,YAAY,eAAe,MAAM,cAAc,uCAAuC,wCAAwC,oBAAoB;AACrJ,SAAK,UAAU,YAAY;AAC3B,WAAO;AAAA;AAAA,QAIH;AACJ,QAAI,CAAC,UAAW,OAAO,WAAW;AAAI;AACtC,mBAAe,SAAS,eAAe,eAAe;AACtD,QAAI,CAAC;AAAQ;AACb,gBAAY,OAAO,WAAW;AAC9B,QAAI,YAAY,MAAM;AACtB,QAAI,SAAS,GAAG,GAAG,OAAO,OAAO,OAAO;AACxC,kBAAc,OAAO,QAAQ,OAAO;AACpC,iBAAY,IAAI,KAAK,IAAI,GAAG;AAC5B,mBAAe,OAAO,SAAS;AAC/B,oBAAgB;AACd,uBAAiB,IAAI,qBAAqB,GAAI,QAAM,OAAO,MAAM,QAAQ,GAAG;AAC5E,eAAS,aAAa,KAAK,MAAM;AACjC,eAAS,aAAa,KAAK,MAAM;AACjC,UAAI,YAAY;AAChB,UAAI,SAAS,IAAI,OAAO,GAAG,QAAQ,GAAG,OAAO;AAC7C,UAAI,YAAY,MAAM;AACtB,UAAI,OAAO,GAAG,QAAQ;AACtB,UAAI,SAAS,KAAK,MAAM,OAAO,KAAK,IAAI,QAAQ,GAAG,OAAO,SAAS,GAAG,QAAQ;AAAA;AAAA;AAAA;AAKpF,mBAAe;;;ACvTf,MAAM,aAAa;AAEnB,cAAc,IAAI,MAAM;AAGxB,WAAW;AAAA,EACT,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,WAAW;AAAA,EACX,eAAe;AAAA,EACf,eAAe;AAAA,EACf,qBAAqB;AAAA,EACrB,MAAM;AAAA,EACN,SAAS;AAAA,EACT,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,SAAS,CAAC,yBAAyB,yBAAyB,yBAAyB,yBAAyB,yBAAyB;AAAA,EACvI,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,cAAc;AAAA,EACd,UAAU;AAAA,EACV,SAAS;AAAA,EACT,cAAc;AAAA,EACd,eAAe;AAAA,EACf,cAAc;AAAA,EACd,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,UAAU;AAAA,EACV,mBAAmB;AAAA,EACnB,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,cAAc;AAAA;AAIhB;AACA;AACA;AACA,yBAAyB;AAGzB;AACE,MAAI,CAAC,MAAM,QAAQ;AAAM,WAAO;AAChC,aAAW;AACX,sBAAoB;AAClB,QAAI,OAAO,UAAU;AAAU,cAAQ,KAAK,UAAU,OAAO,QAAQ,gBAAgB,IAAI,QAAQ,MAAM;AAAA;AAClG,cAAQ;AAAA;AAEf,SAAO;AAAA;AAIT,aAAY;AAEV,MAAI,GAAG;AAAS,YAAQ,IAAI,GAAG;AAAA;AAGjC,eAAe;AAEb,WAAS,eAAe,UAAU,YAAY;AAAA;AAIhD;AACE,iBAAe;AACf,iBAAe,SAAS,eAAe;AAIvC,KAAG,IAAI,KAAK,MAAO,OAAO,YAAY;AACtC,MAAI,GAAG,IAAI,SAAS,GAAG;AAAc,OAAG,IAAI;AAM5C,QAAM,MAAK,YAAY,OAAO,GAAG;AAGjC,SAAO,SAAS,MAAM,MAAM,MAAM,OAAO;AAGzC,cAAY,OAAO,WAAW;AAC9B,MAAI,YAAY,GAAG;AACnB,MAAI,SAAS,GAAG,GAAG,OAAO,OAAO,OAAO;AACxC,MAAI,OAAO;AACT,QAAI,OAAO,OAAO,UAAU,OAAO;AAAO,aAAO,QAAQ,OAAO,OAAO;AACvE,QAAI,OAAO,OAAO,WAAW,OAAO;AAAQ,aAAO,SAAS,OAAO,OAAO;AAC1E,QAAI,UAAU,OAAO,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO,OAAO,OAAO;AAAA;AAEvH,QAAI,UAAU,OAAO,GAAG,GAAG,MAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO;AAAA;AAGnF,QAAM,aAAK,KAAK,OAAO,MAAM,QAAQ,IAAI,MAAM,SAAS;AACxD,QAAM,aAAK,KAAK,OAAO,MAAM,QAAQ;AACrC,QAAM,aAAK,KAAK,OAAO,MAAM,QAAQ;AACrC,QAAM,aAAK,QAAQ,OAAO,SAAS,QAAQ;AAE3C,iBAAe,MAAM,GAAG;AACxB,cAAY,OAAO,kBAAkB,QAAS,QAAO,gBAAgB,gBAAgB,OAAO,gBAAgB,gBAAgB,GAAG,2BAA2B;AAC1J,iBAAe,WAAW,OAAO,MAAM,SAAS,0BAA0B,kBAAkB,OAAO,MAAM,WAAW;AACpH,qBAAmB,OAAO,SAAS,eAAe,OAAO,OAAO,WAAW,OAAO,OAAO,WAAW;AACpG,cAAY,KAAK,MAAM,KAAK,GAAG,IAAI,OAAO,UAAU,IAAI,KAAK,GAAG,IAAI,UAAU;AAC9E,kBAAiB,GAAG,IAAI,SAAS,KAAO,MAAM,IAAK,gKAAgK;AACnN,WAAS,eAAe,OAAO,YAAY;AAAA,aAChC,GAAG,OAAO,kBAAkB,GAAG,OAAO,wBAAwB,GAAG,OAAO,WAAW,GAAG,OAAO,UAAU;AAAA,eACrG,MAAM,GAAG,kBAAkB;AAAA,mBACvB,IAAI,OAAO,oBAAoB;AAAA,MAC5C;AAAA;AAGJ,KAAG;AACH,KAAG,YAAY,YAAY;AAE3B,MAAI,GAAG,YAAY,CAAC,GAAG;AAAY,OAAG,aAAa,YAAY,MAAM,YAAY,OAAO,SAAS,MAAO,GAAG;AAE3G,MAAI,CAAC,GAAG,YAAY,GAAG;AACrB,iBAAa,GAAG;AAChB,OAAG,aAAa;AAAA;AAAA;AAKpB;AApIA;AAqIE,MAAI,GAAG;AAAM,WAAO;AACpB,KAAG,OAAO;AACV,gBAAc,SAAS,eAAe;AACtC,iBAAe,SAAS,eAAe;AACvC,iBAAe,SAAS,eAAe;AACvC,eAAa,MAAM,YAAc,MAAM,UAAU,iBAAiB,GAAG,eAAe,UAAY,MAAM,aAAa,KAAO,CAAC,MAAM,SAAW;AAC5I,YAAU;AACV,SAAO;AAEP,MAAI,CAAC,UAAU;AACb,UAAM;AACN,WAAO,aAAa;AAAA,EAAK;AACzB,SAAI;AACJ,WAAO;AACP,WAAO;AAAA;AAET;AACA,sBAAoB;AAAA,IAClB,OAAO;AAAA,IACP,OAAO;AAAA,MACL,YAAY,GAAG,SAAS,SAAS;AAAA,MACjC,YAAY,GAAG,OAAO,mBAAmB;AAAA,MACzC,OAAO,CAAE,OAAO,OAAO;AAAA,MACvB,QAAQ,CAAE,OAAO,OAAO;AAAA;AAAA;AAG5B;AAGE,aAAS,MAAM,UAAU,aAAa,aAAa;AAAA;AAEnD,QAAI,IAAI,SAAS;AAAyB,YAAM;AAAA,aACvC,IAAI,SAAS;AAA0B,YAAM;AAAA;AACjD,YAAM;AACX,WAAO,aAAa;AAAA,EAAK;AACzB,WAAO;AACP,SAAI;AAAA;AAEN,MAAI;AAAQ,UAAM,YAAY;AAAA;AACzB,WAAO;AACZ,gBAAc,OAAO,iBAAiB;AACtC,mBAAiB,MAAM;AAEvB,KAAG,SAAS,CAAE,MAAM,YAAM,UAAN,mBAAa,eAAe,OAAO,SAAS,OAAO,QAAQ,SAAS,QAAQ,QAAQ,SAAS,eAAe,SAAS,UAAU;AACnJ,SAAO,IAAI,QAAQ;AACjB,UAAM,eAAe;AACnB,YAAM,QAAQ,MAAM;AACpB,YAAM,SAAS,MAAM;AACrB,aAAO,QAAQ,MAAM;AACrB,aAAO,SAAS,MAAM;AACtB,aAAO,MAAM,QAAQ,OAAO,QAAQ,OAAO,SAAS,UAAU;AAC9D,aAAO,MAAM,SAAS,OAAO,QAAQ,OAAO,SAAS,KAAK;AAC1D,SAAG,UAAU,MAAM,aAAa,SAAS,MAAM;AAC/C,SAAG,WAAW,MAAM,aAAa,SAAS,MAAM;AAEhD,mBAAa,KAAM,IAAI,OAAO,QAAQ,OAAO;AAC7C,SAAG,WAAW,GAAG,cAAc,QAAQ,UAAU,GAAG;AACpD,UAAI;AAAM,cAAM;AAChB,SAAG,OAAO;AAGV,aAAO;AACP,cAAQ;AAAA;AAAA;AAAA;AAMd;AACE,MAAI,CAAC;AAEH,SAAI;AACJ,aAAS,IAAI,OAAO,GAAG,QAAQ,CAAE,MAAM;AACvC,WAAO,SAAS;AAEhB,WAAO,iBAAiB,WAAW;AACjC,UAAI,CAAC,OAAO;AACV,aAAI;AACJ,aAAI;AACJ,eAAO,SAAS;AAAA;AAElB,2BAAqB,IAAI,KAAK;AAC9B,SAAG;AACH,UAAI,CAAC,GAAG;AAAY,oBAAY;AAEhC,4BAAsB,MAAM,eAAe,OAAO;AAAA;AAAA;AAItD,SAAO,YAAY,CAAE,OAAO,OAAM,KAAK,QAAQ,OAAO,OAAO,OAAO,QAAQ,OAAO,SAAU,CAAC,OAAM,KAAK;AAAA;AAI3G;AAlOA;AAoOE,eAAa,MAAM,aAAc,MAAM,UAAU,iBAAiB,GAAG,eAAe,UAAY,MAAM,aAAa,KAAO,CAAC,MAAM;AACjI,MAAI,CAAC,QAAQ,MAAM;AAEjB,QAAI,GAAG;AAAY,mBAAa,GAAG;AACnC,OAAG,aAAa;AAEhB,QAAI,MAAM;AAAQ,WAAI;AAAA,aACZ,MAAM,UAAU,iBAAiB,GAAG,eAAe,UAAY,MAAM,cAAc;AAAI,iBAAW,MAAM,eAAe,OAAO,SAAS;AAAA;AAC5I,WAAI,kCAAkC,YAAM,cAAN,mBAAiB,iBAAiB,GAAG,4BAA4B,MAAM;AAClH,iBAAa,GAAG;AAChB,OAAG,aAAa;AAChB,SAAI,4BAA4B,GAAG,YAAY,aAAa,GAAG;AAC/D;AAAA;AAEF,SAAO;AACP,MAAI,GAAG;AAEL,sBAAkB,IAAI,gBAAgB,OAAO,OAAO,OAAO;AAC3D,gBAAY,UAAU,WAAW;AACjC,QAAI,UAAU,OAAO,GAAG,GAAG,MAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO;AACjF,iBAAa,IAAI,aAAa,GAAG,GAAG,OAAO,OAAO,OAAO;AAEzD,cAAU,OAAO,MAAM,QAAQ;AAAA;AAE/B,UAAM,OAAO,OAAO,YAAY,KAAK;AACnC,UAAI,OAAO;AAAO,aAAI,OAAO;AAAA;AAE3B,6BAAqB;AACrB,YAAI,CAAC,GAAG;AAAY,sBAAY;AAChC,WAAG;AACH,8BAAsB,MAAM,eAAe,OAAO;AAAA;AAAA;AAAA;AAAA;AAO1D;AACE,SAAO,IAAI,QAAQ;AACjB,mBAAc,IAAI;AAClB,WAAM,SAAS;AACb,WAAI,qBAAqB,OAAM;AAC/B,qBAAe,SAAS,eAAe;AACvC,aAAM,QAAQ,OAAM;AACpB,aAAM,SAAS,OAAM;AACrB,aAAO,QAAQ,MAAM,OAAO,OAAO,SAAS,MAAM,OAAO,OAAO,QAAQ,IAAI,MAAM,OAAO,OAAO,QAAQ,OAAM;AAC9G,aAAO,SAAS,MAAM,OAAO,OAAO,UAAU,MAAM,OAAO,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,SAAS,OAAM;AAClH,qBAAe,MAAM,MAAM,OAAO,QAAO;AACzC,kBAAY,QAAO,QAAQ;AAC3B,oBAAc,SAAS,cAAc;AACrC,YAAM,YAAY;AAClB,YAAM,QAAQ,OAAO,aAAc,IAAG,UAAU;AAChD,YAAM,SAAS,OAAO,SAAU,QAAO,aAAa,MAAM;AAC1D,kBAAY,MAAM,WAAW;AAC7B,UAAI,UAAU,QAAQ,GAAG,GAAG,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,MAAM,OAAO,MAAM;AAClF,eAAS,eAAe,qBAAqB,YAAY;AACzD,aAAM,MAAM;AACZ,cAAQ;AAAA;AAEV,WAAM,MAAM;AAAA;AAAA;AAKhB;AACE,QAAM,OAAO,iBAAiB;AAC9B,WAAS,eAAe,qBAAqB,MAAM,UAAU;AAC7D,WAAS,eAAe,UAAU,MAAM,UAAU;AAClD,gBAAc,SAAS,eAAe;AACtC,iBAAe,SAAS,eAAe;AACvC,KAAG,iBAAiB,GAAG;AACvB,MAAK,MAAM,cAAc,QAAS,CAAC,MAAM;AACvC,aAAS,eAAe,QAAQ,MAAM,UAAU;AAChD,WAAO;AACP,UAAM;AAAA;AAEN,UAAM;AACN,aAAS,eAAe,QAAQ,MAAM,UAAU;AAChD,WAAO;AACP,UAAM;AAAA;AAER,iBAAe,OAAO;AAAA;AAIxB;AACE,WAAS,eAAe,QAAQ,MAAM,UAAU;AAChD,QAAM,OAAO,iBAAiB;AAC9B,eAAa,KAAK,KAAK,MAAM,KAAK,GAAG,UAAU,OAAO,aAAa,SAAS,KAAK;AACjF,KAAG,WAAW,GAAG,cAAc,QAAQ,UAAU,GAAG;AACpD,KAAG,iBAAiB,GAAG,sBAAsB,GAAG;AAChD,WAAS,eAAe,UAAU,MAAM,UAAU;AAClD,WAAS,eAAe,qBAAqB,MAAM,UAAU;AAC7D,OAAI;AACJ,SAAO;AACP,WAAS,eAAe,qBAAqB,YAAY;AACzD,uBAAqB,GAAG;AAAS,UAAM,aAAa;AACpD,SAAO;AAAA;AAGT;AACE,UAAO,IAAI,aAAK,SAAS,MAAM,IAAI,CAAE,KAAK,QAAQ,OAAO;AACzD,cAAY,MAAK,UAAU,eAAe,eAAe,MAAM;AAC/D,QAAK,UAAU,kBAAkB,kBAAkB,MAAM;AACzD,WAAS,eAAe,QAAQ,iBAAiB,SAAS,MAAM,IAAI;AAEpE,QAAK,QAAQ;AACb,QAAK,QAAQ,WAAW,CAAC,OAAO,SAAS,SAAS,MAAM,OAAO,SAAS,SAAS,MAAM,OAAO,UAAU;AACxG,QAAK,QAAQ,oBAAoB,MAAM,QAAQ,SAAS,SAAS,MAAM,OAAO,QAAQ;AACtF,QAAK,QAAQ,mBAAmB,MAAM,QAAQ,WAAW,SAAS,MAAM,OAAO,UAAU;AACzF,QAAK,QAAQ,iBAAiB,MAAM,QAAQ,cAAc,SAAS,MAAM,OAAO,aAAa;AAC7F,QAAK,QAAQ,kBAAkB,IAAI;AACnC,QAAK,QAAQ;AACb,QAAK,SAAS;AACd,QAAK,QAAQ,eAAe,MAAM,OAAO,MAAM;AAC/C,QAAK,QAAQ,aAAa,MAAM,OAAO,KAAK,MAAM;AAClD,QAAK,QAAQ,aAAa,MAAM,OAAO,KAAK,MAAM;AAClD,QAAK,QAAQ,YAAY,MAAM,OAAO,KAAK,KAAK;AAChD,QAAK,QAAQ,eAAe,MAAM,OAAO,KAAK,QAAQ;AACtD,QAAK,QAAQ,gBAAgB,MAAM,OAAO,KAAK,SAAS;AACxD,QAAK,QAAQ,aAAa,MAAM,OAAO,MAAM;AAC7C,QAAK,QAAQ,aAAa,MAAM,OAAO,MAAM;AAC7C,QAAK,QAAQ,oBAAoB,MAAM,OAAO,SAAS;AAEvD,QAAK,QAAQ;AACb,QAAK,SAAS;AACd,QAAK,SAAS,eAAe,MAAM,OAAO,KAAK,UAAU,YAAY,GAAG,IAAI,GAAG;AAC7E,UAAM,OAAO,KAAK,SAAS,WAAW,SAAS;AAC/C,UAAM,OAAO,KAAK,gBAAgB,SAAS;AAC3C,UAAM,OAAO,KAAK,WAAW,SAAS;AAAA;AAExC,QAAK,SAAS,eAAe,MAAM,OAAO,KAAK,UAAU,cAAc,GAAG,IAAI,GAAG;AAC/E,UAAM,OAAO,KAAK,SAAS,aAAa,SAAS;AACjD,UAAM,OAAO,KAAK,QAAQ,aAAa,SAAS;AAChD,UAAM,OAAO,KAAK,IAAI,aAAa,SAAS;AAC5C,UAAM,OAAO,KAAK,aAAa,SAAS;AAAA;AAE1C,QAAK,SAAS,kBAAkB,MAAM,OAAO,KAAK,UAAU,iBAAiB,GAAK,GAAK,MAAM;AAC3F,UAAM,OAAO,KAAK,SAAS,gBAAgB,WAAW;AACtD,UAAM,OAAO,KAAK,OAAO,gBAAgB,WAAW;AACpD,UAAM,OAAO,KAAK,QAAQ,gBAAgB,WAAW;AACrD,UAAM,OAAO,KAAK,gBAAgB,WAAW;AAAA;AAE/C,QAAK,SAAS,mBAAmB,MAAM,OAAO,KAAK,UAAU,kBAAkB,KAAK,GAAK,MAAM;AAC7F,UAAM,OAAO,KAAK,SAAS,iBAAiB,WAAW;AACvD,UAAM,OAAO,KAAK,iBAAiB,WAAW;AAC9C,UAAM,OAAO,KAAK,iBAAiB,WAAW;AAAA;AAEhD,QAAK,SAAS,WAAW,MAAM,OAAO,KAAK,UAAU,gBAAgB,KAAK,GAAK,MAAM;AACnF,UAAM,OAAO,KAAK,SAAS,eAAe,WAAW;AACrD,UAAM,OAAO,KAAK,eAAe,WAAW;AAAA;AAG9C,QAAK,QAAQ;AACb,QAAK,SAAS,OAAO;AAErB,WAAS,IAAI,aAAK,SAAS,MAAM,IAAI,CAAE,KAAK,QAAQ,OAAO;AAC3D,SAAO,SAAS;AAChB,SAAO,QAAQ,mBAAmB,IAAI,YAAY,SAAS,GAAG,WAAW;AACzE,SAAO,QAAQ,gBAAgB,IAAI,QAAQ,MAAM;AACjD,SAAO,QAAQ,qBAAqB,IAAI,UAAU,MAAM;AACxD,SAAO,QAAQ,gBAAgB,IAAI;AACnC,SAAO,QAAQ,cAAc,IAAI;AACjC,SAAO,QAAQ,iBAAiB,IAAI;AACpC,SAAO,QAAQ,iBAAiB,IAAI;AACpC,SAAO,QAAQ,eAAe,IAAI;AAClC,SAAO,QAAQ;AACf,SAAO,SAAS;AAChB,SAAO,QAAQ,WAAW,MAAM,OAAO,QAAQ;AAC/C,KAAG,YAAY,OAAO,SAAS,eAAe,MAAM,OAAO,QAAQ,SAAS,GAAG,MAAM,IAAI,SAAS,MAAM,OAAO,OAAO,QAAQ,SAAS;AACvI,KAAG,aAAa,OAAO,SAAS,gBAAgB,MAAM,OAAO,QAAQ,UAAU,GAAG,MAAM,IAAI,SAAS,MAAM,OAAO,OAAO,SAAS,SAAS;AAC3I,SAAO,SAAS,cAAc,MAAM,OAAO,QAAQ,cAAc,IAAM,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,aAAa,WAAW;AACvI,SAAO,SAAS,YAAY,MAAM,OAAO,QAAQ,YAAY,IAAM,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,WAAW,WAAW;AACjI,SAAO,SAAS,aAAa,MAAM,OAAO,QAAQ,aAAa,GAAG,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,YAAY,WAAW;AACjI,SAAO,SAAS,QAAQ,MAAM,OAAO,QAAQ,QAAQ,GAAG,IAAI,GAAG,SAAS,MAAM,OAAO,OAAO,OAAO,SAAS;AAC5G,SAAO,SAAS,cAAc,MAAM,OAAO,QAAQ,cAAc,IAAM,GAAK,MAAM,SAAS,MAAM,OAAO,OAAO,aAAa,WAAW;AACvI,SAAO,SAAS,OAAO,MAAM,OAAO,QAAQ,OAAO,GAAG,KAAK,GAAG,SAAS,MAAM,OAAO,OAAO,MAAM,SAAS;AAC1G,SAAO,SAAS,YAAY,MAAM,OAAO,QAAQ,YAAY,GAAG,IAAI,GAAG,SAAS,MAAM,OAAO,OAAO,WAAW,SAAS;AACxH,SAAO,QAAQ,YAAY,MAAM,OAAO,QAAQ;AAChD,SAAO,QAAQ,SAAS,MAAM,OAAO,QAAQ;AAC7C,SAAO,QAAQ,WAAW,MAAM,OAAO,QAAQ;AAC/C,SAAO,QAAQ,cAAc,MAAM,OAAO,QAAQ;AAClD,SAAO,QAAQ,eAAe,MAAM,OAAO,QAAQ;AACnD,SAAO,QAAQ,YAAY,MAAM,OAAO,QAAQ;AAAA;AAGlD;AACE,OAAI;AACJ;AACA,WAAS,eAAe,OAAO,YAAY,kBAAkB,MAAM,kCAAkC,MAAM,GAAG;AAG9G,MAAI,GAAG;AACL,WAAO;AACP,UAAM,MAAM,KAAK;AAAA;AAGnB,MAAI,GAAG;AACL,WAAO;AACP,UAAM,MAAM,OAAO;AAAA;AAErB,SAAO;AACP,WAAS,eAAe,UAAU,MAAM,UAAU;AAClD,WAAS,eAAe,QAAQ,MAAM,UAAU;AAAA;AAGlD,OAAO,SAAS;AAChB,OAAO,WAAW;",
"names": []
}
diff --git a/dist/demo-browser-index.json b/dist/demo-browser-index.json
index 1946181f..7fc45e39 100644
--- a/dist/demo-browser-index.json
+++ b/dist/demo-browser-index.json
@@ -1,7 +1,7 @@
{
"inputs": {
"demo/browser.js": {
- "bytes": 18822,
+ "bytes": 19806,
"imports": [
{
"path": "dist/human.esm.js"
@@ -15,7 +15,7 @@
]
},
"demo/draw.js": {
- "bytes": 9119,
+ "bytes": 9814,
"imports": []
},
"demo/menu.js": {
@@ -23,7 +23,7 @@
"imports": []
},
"dist/human.esm.js": {
- "bytes": 3443254,
+ "bytes": 3443493,
"imports": []
}
},
@@ -31,25 +31,25 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
- "bytes": 5410764
+ "bytes": 5414325
},
"dist/demo-browser-index.js": {
"imports": [],
"inputs": {
"dist/human.esm.js": {
- "bytesInOutput": 3432571
+ "bytesInOutput": 3432788
},
"demo/draw.js": {
- "bytesInOutput": 8898
+ "bytesInOutput": 9599
},
"demo/menu.js": {
"bytesInOutput": 13813
},
"demo/browser.js": {
- "bytesInOutput": 16535
+ "bytesInOutput": 17362
}
},
- "bytes": 3471939
+ "bytes": 3473684
}
}
}
diff --git a/dist/human.esm.js b/dist/human.esm.js
index cc169e38..fcdf9fc4 100644
--- a/dist/human.esm.js
+++ b/dist/human.esm.js
@@ -98525,6 +98525,13 @@ class Human {
}
return faceRes;
}
+ async image(input, userConfig = {}) {
+ this.state = "image";
+ this.config = mergeDeep(this.config, userConfig);
+ const process3 = image.process(input, this.config);
+ process3.tensor.dispose();
+ return process3.canvas;
+ }
async detect(input, userConfig = {}) {
this.state = "config";
let timeStamp;
diff --git a/dist/human.esm.js.map b/dist/human.esm.js.map
index 0fe144e4..1005bf8b 100644
--- a/dist/human.esm.js.map
+++ b/dist/human.esm.js.map
@@ -1,7 +1,7 @@
{
"version": 3,
"sources": ["../node_modules/node-fetch/browser.js", "../node_modules/safe-buffer/index.js", "../node_modules/string_decoder/lib/string_decoder.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/backend.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/util_base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/environment.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/global_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/kernel_names.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/kernel_registry.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/profiler.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor_format.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/types.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/engine.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/device_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/flags.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/tensor_util_env.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/operation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/complex.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor_ops_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/types.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/io_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/router_registry.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/indexed_db.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/local_storage.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/model_management.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/platforms/platform_browser.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/platforms/platform_node.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/buffer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/clone.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/print.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/base_side_effects.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/browser_files.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/progress.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/weights_loader.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/http.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/passthrough.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/io/io.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mat_mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/one_hot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/confusion_matrix.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/math.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/browser.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/gather_nd_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/scatter_nd_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/serialization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/test_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/globals.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/floorDiv.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/abs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/acos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/acosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/add_n.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/axis_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/all.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/any.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/arg_max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/arg_min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/asin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/asinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/atan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/atan2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/atanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/basic_lstm_cell.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batch_to_space_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/batchnorm4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/broadcast_to.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ceil.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/clip_by_value.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/concat_4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_input.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_input.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/cumsum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depth_to_space.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/diag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dilation2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/broadcast_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/where.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/zeros_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/div_no_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/elu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/erf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/expand_dims.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/expm1.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tile.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/eye.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fill.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/floor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reduce_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/segment_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/gather.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/greater.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/greater_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/imag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/is_finite.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/is_inf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/is_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/maximum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/scalar.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/leaky_relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/less.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/less_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linspace.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/local_response_normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log1p.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/neg.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/softplus.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log_sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log_softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/log_sum_exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_and.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_not.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_or.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/logical_xor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_with_argmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/zeros.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ones.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mean.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/minimum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mirror_pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/mod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/moments.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/multi_rnn_cell.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/multinomial.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/not_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/real.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ones_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/outer_product.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pad4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/space_to_batch_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/pow.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/prelu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/prod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rand.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/alea.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xor128.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xorwow.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xorshift7.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/xor4096.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/lib/tychei.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/seedrandom.js", "../node_modules/@tensorflow/tfjs/node_modules/seedrandom/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rand_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/random_gamma.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/random_normal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/random_uniform.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/range.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reciprocal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/relu6.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/reverse_4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/round.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rsqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/selu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/separable_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/setdiff1d_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sign.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/slice4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/fft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/ifft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/irfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/split_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/split.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/spectral/rfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/squared_difference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/squeeze.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/stack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/step.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/strided_slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor5d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/tensor6d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/topk.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/truncated_normal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/unsorted_segment_sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/unstack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/variable.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/where_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/where_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/boolean_mask.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/compare.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/binary_ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/norm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/moving_average.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/scatter_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sparse_to_dense_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/sparse_to_dense.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/gather_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dropout_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/dropout.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal_ops_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/in_top_k.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv2d_backprop_filter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused/conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_filter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/depthwise_conv2d_native_backprop_input.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused/depthwise_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused/mat_mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/fused_ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/hamming_window.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/hann_window.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/frame.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/signal/stft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/crop_and_resize.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/flip_left_right.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/rotate_with_offset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/nonmax_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/array_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/non_max_suppression_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_with_score.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_with_score_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_padded.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/non_max_suppression_padded_async.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/resize_bilinear.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/image/resize_nearest_neighbor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linalg/band_part.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linalg/gram_schmidt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/linalg/qr.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/loss_ops_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/compute_weighted_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/absolute_difference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/cosine_distance.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/hinge_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/huber_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/log_loss.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/mean_squared_error.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/sigmoid_cross_entropy.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/losses/softmax_cross_entropy.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adadelta_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adagrad_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adam_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/adamax_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/sgd_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/momentum_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/rmsprop_optimizer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/optimizers/optimizer_constructors.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/train.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/browser_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/rotate_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/array_ops_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/selu_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/erf_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/complex_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/backend_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/split_shared.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/tile_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/topk_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/backends/kernel_impls.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Abs_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Acos_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Acosh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Add_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/AddN_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ArgMax_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ArgMin_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Asin_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Asinh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Atan2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Atan_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Atanh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_3d_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool3D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/avg_pool_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/AvgPool_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/BatchMatMul_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/BatchToSpaceND_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/BroadcastTo_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cast_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Ceil_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ClipByValue_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Concat_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Conv2DBackpropInput_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/conv3d_backprop_filter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Conv3D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cos_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cosh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Cumsum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/DepthwiseConv2dNative_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Dilation2D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Div_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Elu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Erf_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Exp_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Expm1_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Floor_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/FloorDiv_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/FusedBatchNorm_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/GatherV2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/GreaterEqual_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Identity_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/IsFinite_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/IsInf_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/IsNan_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Log1p_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Log_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/LogSoftmax_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/local_response_normalization_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/LRN_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/min_max_grad_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Max_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Maximum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_3d_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool3D_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/max_pool_backprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/MaxPool_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Min_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Minimum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/MirrorPad_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Mod_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Multiply_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Negate_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/OneHot_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/OnesLike_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/PadV2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Pow_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Prelu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Reciprocal_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Relu6_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Relu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Reshape_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ResizeBilinear_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ResizeNearestNeighbor_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Reverse_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Round_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Rsqrt_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SelectV2_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Selu_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sigmoid_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sign_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sin_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sinh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Slice_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Softmax_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Softplus_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SpaceToBatchND_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SplitV_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sqrt_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Square_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/SquaredDifference_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Step_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sub_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Sum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Tan_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Tanh_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Tile_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Transpose_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/Unpack_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/UnsortedSegmentSum_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/gradients/ZerosLike_grad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/register_all_gradients.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/abs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/acos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/acosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/add_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/all.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/any.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/arg_max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/arg_min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as_scalar.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as_type.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as3d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as4d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/as5d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/asin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/asinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/atan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/atan2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/atanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/avg_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/batch_to_space_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/batchnorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/broadcast_to.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/ceil.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/clip_by_value.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/conv1d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/conv2d_transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/cumsum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/depth_to_space.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/depthwise_conv2D_deprecated.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/depthwise_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/dilation2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/div_no_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/div_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/dot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/elu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/erf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/expand_dims.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/expm1.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/fft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/flatten.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/floor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/floorDiv.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/gather.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater_equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/greater.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/ifft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/irfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/is_finite.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/is_inf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/is_nan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/leaky_relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less_equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/less.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/local_response_normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log_sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log_softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log_sum_exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/log1p.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_and.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_not.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_or.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/logical_xor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mat_mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/max_pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/maximum_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/maximum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mean.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/min.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/minimum_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/minimum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mirror_pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mod_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mul_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/mul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/neg.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/norm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/not_equal_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/not_equal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/one_hot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/ones_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pow_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/pow.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/prelu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/prod.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reciprocal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/relu6.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reshape_as.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/resize_bilinear.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/resize_nearest_neighbor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/reverse.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/rfft.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/round.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/rsqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/selu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/separable_conv2d.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sign.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/softmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/softplus.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/space_to_batch_nd.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/split.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/squared_difference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/squared_difference_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/squeeze.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/stack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/step.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/strided_slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sub_strict.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/tanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/tile.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/to_bool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/to_float.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/to_int.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/topk.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/unsorted_segment_sum.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/unstack.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/where.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/zeros_like.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/public/chained_ops/register_all_chained_ops.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/backend/common.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/errors.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/generic_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/constraints.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_constraints.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/keras_format/common.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/common.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/math_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/backend/tfjs_backend.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/keras_format/initializer_config.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/initializers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_initializers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/backend/state.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/types_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/variable_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/variables.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/topology.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/input_layer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/logs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/base_callbacks.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/serialization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/losses.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/metrics.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/optimizers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/user_defined_metadata.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/layer_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/serialization_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/container.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training_dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training_tensors.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/engine/training.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/models.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/activations.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/regularizers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/advanced_activations.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/utils/conv_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/convolutional.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/convolutional_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/recurrent.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/convolutional_recurrent.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/core.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/embeddings.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/merge.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/noise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/padding.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/pooling.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/layers/wrappers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_layers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_metrics.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_models.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/exports_regularizers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/callbacks.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-layers/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/data/compiled_api.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/custom_op/register.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/arithmetic.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/basic_math.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/control.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/convolution.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/creation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/dynamic.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/evaluation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/graph.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/hash_table.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/image.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/logical.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/matrices.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/normalization.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/reduction.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/slice_join.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/spectral.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/op_list/transformation.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/operation_mapper.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/custom_op/node_value_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-core/dist/ops/ops_for_converter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/arithmetic_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/basic_math_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/tensor_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/tensor_array.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/tensor_list.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/control_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/convolution_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/creation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/dynamic_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/evaluation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/graph_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/hash_table.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/hash_table_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/image_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/logical_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/matrices_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/normalization_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/reduction_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/slice_join_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/spectral_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/executors/transformation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/operations/operation_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/execution_context.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/model_analysis.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/graph_executor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/resource_manager.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/executor/graph_model.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-converter/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/deep_map.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/deep_clone.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/ring_buffer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/growing_ring_buffer.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/lazy_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/datasets/text_line_dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/datasets/csv_dataset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/microphone_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/webcam_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/datasource.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/string_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/byte_chunk_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/file_chunk_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/iterators/url_chunk_iterator.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/util/source_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/sources/file_data_source.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/sources/url_data_source.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/readers.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-data/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/cpu_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/backend_cpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Abs.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/binary_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Complex.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Identity.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Real.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/kernel_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/unary_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/unary_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Ceil.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Exp.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Expm1.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Floor.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Log.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Max_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Multiply.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/NotEqual.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Rsqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Slice.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/SquaredDifference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Transpose_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Unique_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/shared.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Elu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Prelu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Relu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Relu6.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/fused_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/BatchMatMul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/_FusedMatMul.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Acos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Acosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Asin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Asinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Atan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Atanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/pool_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/AvgPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/AvgPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/BatchNorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Clip.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Imag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv2DBackpropFilter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv2DBackpropInput.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv3D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv3DBackpropFilterV2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Conv3DBackpropInputV2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Cosh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/DepthwiseConv2dNative.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/DepthwiseConv2dNativeBackpropFilter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/DepthwiseConv2dNativeBackpropInput.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Dilation2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Dilation2DBackpropFilter.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Dilation2DBackpropInput.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Erf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/utils/fft_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Fill.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FlipLeftRight.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FusedConv2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/FusedDepthwiseConv2D.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IFFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IsFinite.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IsInf.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/IsNaN.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Log1p.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/LogicalNot.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPoolWithArgmax_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MaxPoolWithArgmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/MirrorPad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/NonMaxSuppressionV4.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/NonMaxSuppressionV5.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/PadV2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Reciprocal.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/RotateWithOffset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Round.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Selu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sigmoid.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sign.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sinh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Softplus.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/SpaceToBatchND.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Sqrt.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Step.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Tanh.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/kernels/Unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/register_all_kernels.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-cpu/dist/index.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/canvas_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/tex_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/webgl_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/flags_webgl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/shared.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/addn_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/addn_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/argminmax_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/packing_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/glsl_version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/shader_compiler_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/shader_compiler.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/argminmax_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/avg_pool_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/binaryop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/binaryop_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/clip_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/clip_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/complex_abs_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_backprop_gpu_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_gpu_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/conv_packed_gpu_depthwise.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/crop_and_resize_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/cumsum_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/decode_matrix_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/decode_matrix_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/depth_to_space_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/diag_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_float_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_float_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_matrix_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/encode_matrix_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/fill_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gather_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gather_nd_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gpgpu_util.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gpgpu_context.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/gpgpu_math.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/im2col_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/lrn_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/lrn_grad_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/lrn_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/max_pool_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mulmat_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/multinomial_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/onehot_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pack_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pad_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pad_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/pool_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reduce_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reshape_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_bilinear_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_bilinear_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_bilinear_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_nearest_neighbor_backprop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/resize_nearest_neighbor_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reverse_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/reverse_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/scatter_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/segment_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/select_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/slice_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/slice_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/strided_slice_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/texture_manager.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/tile_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/unaryop_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/unaryop_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/unpack_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/backend_webgl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/version.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/webgl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/base.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Identity.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Complex.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/kernel_funcs_utils.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Add.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Atan2.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/AvgPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/AvgPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/batchnorm_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/batchnorm_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/BatchNorm.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NotEqual.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Real.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/int.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Cast.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/concat_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/concat_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Imag.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Reshape.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Concat_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Concat.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Cos.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Div.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/fft_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FFT_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/flip_left_right_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FlipLeftRight.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FromPixels_utils/from_pixels_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FromPixels_utils/from_pixels_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/FromPixels.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/IFFT.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mean_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernel_utils/reduce.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Max_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/transpose_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/transpose_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Transpose_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Max.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPool.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPoolBackprop.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPoolWithArgmax_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MaxPoolWithArgmax.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Mean_impl.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Mean.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mirror_pad_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/mirror_pad_packed_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/MirrorPad.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/binaryop_complex_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Multiply.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NonMaxSuppressionV3.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NonMaxSuppressionV4.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/NonMaxSuppressionV5.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/rotate_gpu.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/RotateWithOffset.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Sin.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Square.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/SquaredDifference.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Sub.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Tan.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Transpose.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/kernels/Unique.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/register_all_kernels.js", "../node_modules/@tensorflow/tfjs/node_modules/@tensorflow/tfjs-backend-webgl/dist/index.js", "../node_modules/@tensorflow/tfjs/src/version.ts", "../node_modules/@tensorflow/tfjs/src/index.ts", "empty:path", "empty:worker_threads", "empty:perf_hooks", "../node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.js", "../node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm.js", "../src/face/blazeface.js", "../src/face/keypoints.js", "../src/face/box.js", "../src/face/util.js", "../src/face/facepipeline.js", "../src/face/uvcoords.js", "../src/face/facemesh.js", "../src/profile.js", "../src/age/age.js", "../src/gender/gender.js", "../src/emotion/emotion.js", "../src/body/modelBase.js", "../src/body/modelMobileNet.js", "../src/body/heapSort.js", "../src/body/buildParts.js", "../src/body/keypoints.js", "../src/body/vectors.js", "../src/body/decodePose.js", "../src/body/decodeMultiple.js", "../src/body/util.js", "../src/body/modelPoseNet.js", "../src/body/posenet.js", "../src/hand/handdetector.js", "../src/hand/handpipeline.js", "../src/hand/anchors.js", "../src/hand/handpose.js", "../src/gesture.js", "../src/imagefx.js", "../src/image.js", "../src/tf.js", "../node_modules/@tensorflow/tfjs-core/src/backends/backend.ts", "../node_modules/@tensorflow/tfjs-core/src/util_base.ts", "../node_modules/@tensorflow/tfjs-core/src/environment.ts", "../node_modules/@tensorflow/tfjs-core/src/global_util.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_names.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_registry.ts", "../node_modules/@tensorflow/tfjs-core/src/kernel_registry.ts", "../node_modules/@tensorflow/tfjs-core/src/util.ts", "../node_modules/@tensorflow/tfjs-core/src/util.ts", "../node_modules/@tensorflow/tfjs-core/src/profiler.ts", "../node_modules/@tensorflow/tfjs-core/src/tape.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_format.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor.ts", "../node_modules/@tensorflow/tfjs-core/src/types.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_util.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_util.ts", "../node_modules/@tensorflow/tfjs-core/src/engine.ts", "../node_modules/@tensorflow/tfjs-core/src/tensor_util_env.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/operation.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/complex.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/tensor_ops_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/buffer.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/cast.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/reshape.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/transpose.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/gather_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/gather_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/scatter_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/scatter_nd_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/slice_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/slice_util.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/globals.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/mul.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/axis_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/conv_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/concat_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/broadcast_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/elu.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/segment_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/reduce_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/segment_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/scalar.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/sum.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/zeros.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/prelu.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/tensor1d.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/relu.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/relu6.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/split_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/step.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/fused_util.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/backend_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/rotate_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/array_ops_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/selu_util.ts", "../node_modules/@tensorflow/tfjs-core/src/ops/erf_util.ts", "../node_modules/@tensorflow/tfjs-core/src/log.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/complex_util.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/backend_util.ts", "../node_modules/@tensorflow/tfjs-core/src/backends/backend_util.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/types.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/_FusedMatMul.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/unary_kernel.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Abs.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/binary_kernel.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Add.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/AddN.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Identity.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Transpose.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/kernel_utils.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ArgMax.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/AvgPool.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Reshape.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/BatchMatMul.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Cast.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ClipByValue.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Concat.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Conv2D.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Conv2DBackpropInput.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Cos.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/CropAndResize.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Cumsum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/DepthToSpace.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/DepthwiseConv2dNative.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Div.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Equal.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Exp.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Fill.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FlipLeftRight.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FloorDiv.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FusedBatchNorm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FusedConv2D.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/FusedDepthwiseConv2D.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/GatherNd.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/GatherV2.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Greater.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/GreaterEqual.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Less.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/LessEqual.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Log.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/LogicalAnd.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Max.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Maximum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/MaxPool.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Min.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Minimum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Multiply.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Negate.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppression_util.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppressionV3.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppressionV4.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NonMaxSuppressionV5.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/NotEqual.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/OneHot.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/OnesLike.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/PadV2.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Pow.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Prelu.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Relu.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Relu6.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ResizeBilinear.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Reverse.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/RotateWithOffset.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Rsqrt.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ScatterNd.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/SelectV2.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sigmoid.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sin.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Slice.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Softmax.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Split.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sqrt.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Square.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/SquaredDifference.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/StridedSlice.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sub.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Sum.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Tanh.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Tile.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/Unpack.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/kernels/ZerosLike.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/register_all_kernels.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/flags_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/backend_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.worker.js", "../node_modules/@tensorflow/tfjs-backend-wasm/src/backend_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/backend_wasm.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/base.ts", "../node_modules/@tensorflow/tfjs-backend-wasm/src/index.ts", "../src/tf.js", "../src/face/triangulation.js", "../src/human.js", "../src/hand/box.js", "../src/hand/util.js", "../src/human.js", "../config.js", "../src/human.js"],
- "sourcesContent": ["\"use strict\";\n\n// ref: https://github.com/tc39/proposal-global\nvar getGlobal = function () {\n\t// the only reliable means to get the global object is\n\t// `Function('return this')()`\n\t// However, this causes CSP violations in Chrome apps.\n\tif (typeof self !== 'undefined') { return self; }\n\tif (typeof window !== 'undefined') { return window; }\n\tif (typeof global !== 'undefined') { return global; }\n\tthrow new Error('unable to locate global object');\n}\n\nvar global = getGlobal();\n\nmodule.exports = exports = global.fetch;\n\n// Needed for TypeScript and Webpack.\nif (global.fetch) {\n\texports.default = global.fetch.bind(global);\n}\n\nexports.Headers = global.Headers;\nexports.Request = global.Request;\nexports.Response = global.Response;", "/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n", "// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/* */\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\n/** Convenient class for storing tensor-related data. */\nexport class DataStorage {\n constructor(backend, dataMover) {\n this.backend = backend;\n this.dataMover = dataMover;\n this.data = new WeakMap();\n this.dataIdsCount = 0;\n }\n get(dataId) {\n if (!this.data.has(dataId)) {\n this.dataMover.moveData(this.backend, dataId);\n }\n return this.data.get(dataId);\n }\n set(dataId, value) {\n this.dataIdsCount++;\n this.data.set(dataId, value);\n }\n has(dataId) {\n return this.data.has(dataId);\n }\n delete(dataId) {\n this.dataIdsCount--;\n return this.data.delete(dataId);\n }\n numDataIds() {\n return this.dataIdsCount;\n }\n}\n/**\n * The interface that defines the kernels that should be implemented when\n * adding a new backend. New backends don't need to implement every one of the\n * methods, this can be done gradually (throw an error for unimplemented\n * methods).\n */\nexport class KernelBackend {\n time(f) {\n return notYetImplemented('time');\n }\n read(dataId) {\n return notYetImplemented('read');\n }\n readSync(dataId) {\n return notYetImplemented('readSync');\n }\n numDataIds() {\n return notYetImplemented('numDataIds');\n }\n disposeData(dataId) {\n return notYetImplemented('disposeData');\n }\n write(values, shape, dtype) {\n return notYetImplemented('write');\n }\n move(dataId, values, shape, dtype) {\n return notYetImplemented('move');\n }\n memory() {\n return notYetImplemented('memory');\n }\n /** Returns the highest precision for floats in bits (e.g. 16 or 32) */\n floatPrecision() {\n return notYetImplemented('floatPrecision');\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n batchMatMul(a, b, transposeA, transposeB) {\n return notYetImplemented('batchMatMul');\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedBatchMatMul');\n }\n slice(x, begin, size) {\n return notYetImplemented('slice');\n }\n stridedSlice(x, begin, end, strides) {\n return notYetImplemented('stridedSlice');\n }\n unstack(x, axis) {\n return notYetImplemented('unstack');\n }\n reverse(a, axis) {\n return notYetImplemented('reverse');\n }\n concat(tensors, axis) {\n return notYetImplemented('concat');\n }\n neg(a) {\n return notYetImplemented('neg');\n }\n add(a, b) {\n return notYetImplemented('add');\n }\n addN(tensors) {\n return notYetImplemented('addN');\n }\n subtract(a, b) {\n return notYetImplemented('subtract');\n }\n multiply(a, b) {\n return notYetImplemented('multiply');\n }\n realDivide(a, b) {\n return notYetImplemented('realDivide');\n }\n floorDiv(a, b) {\n return notYetImplemented('floorDiv');\n }\n sum(x, axes) {\n return notYetImplemented('sum');\n }\n prod(x, axes) {\n return notYetImplemented('prod');\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n return notYetImplemented('unsortedSegmentSum');\n }\n argMin(x, axis) {\n return notYetImplemented('argMin');\n }\n argMax(x, axis) {\n return notYetImplemented('argMax');\n }\n equal(a, b) {\n return notYetImplemented('equal');\n }\n notEqual(a, b) {\n return notYetImplemented('notEqual');\n }\n less(a, b) {\n return notYetImplemented('less');\n }\n lessEqual(a, b) {\n return notYetImplemented('lessEqual');\n }\n greater(a, b) {\n return notYetImplemented('greater');\n }\n greaterEqual(a, b) {\n return notYetImplemented('greaterEqual');\n }\n logicalNot(a) {\n return notYetImplemented('logicalNot');\n }\n logicalAnd(a, b) {\n return notYetImplemented('logicalAnd');\n }\n logicalOr(a, b) {\n return notYetImplemented('logicalOr');\n }\n where(condition) {\n return notYetImplemented('where');\n }\n select(condition, a, b) {\n return notYetImplemented('select');\n }\n topk(x, k, sorted) {\n return notYetImplemented('topk');\n }\n min(x, axes) {\n return notYetImplemented('min');\n }\n minimum(a, b) {\n return notYetImplemented('minimum');\n }\n mod(a, b) {\n return notYetImplemented('mod');\n }\n max(x, axes) {\n return notYetImplemented('max');\n }\n maximum(a, b) {\n return notYetImplemented('maximum');\n }\n all(x, axes) {\n return notYetImplemented('all');\n }\n any(x, axes) {\n return notYetImplemented('any');\n }\n squaredDifference(a, b) {\n return notYetImplemented('squaredDifference');\n }\n ceil(x) {\n return notYetImplemented('ceil');\n }\n floor(x) {\n return notYetImplemented('floor');\n }\n round(x) {\n return notYetImplemented('round');\n }\n sign(x) {\n return notYetImplemented('sign');\n }\n isNaN(x) {\n return notYetImplemented('isNaN');\n }\n isInf(x) {\n return notYetImplemented('isInf');\n }\n isFinite(x) {\n return notYetImplemented('isFinite');\n }\n pow(a, b) {\n return notYetImplemented('pow');\n }\n exp(x) {\n return notYetImplemented('exp');\n }\n expm1(x) {\n return notYetImplemented('expm1');\n }\n softmax(x, dim) {\n return notYetImplemented('softmax');\n }\n log(x) {\n return notYetImplemented('log');\n }\n log1p(x) {\n return notYetImplemented('log1p');\n }\n sqrt(x) {\n return notYetImplemented('sqrt');\n }\n rsqrt(x) {\n return notYetImplemented('rsqrt');\n }\n square(x) {\n return notYetImplemented('square');\n }\n reciprocal(x) {\n return notYetImplemented('reciprocal');\n }\n relu(x) {\n return notYetImplemented('relu');\n }\n relu6(x) {\n return notYetImplemented('relu6');\n }\n prelu(x, a) {\n return notYetImplemented('prelu');\n }\n elu(x) {\n return notYetImplemented('elu');\n }\n eluDer(dy, y) {\n return notYetImplemented('eluDer');\n }\n selu(x) {\n return notYetImplemented('selu');\n }\n int(x) {\n return notYetImplemented('int');\n }\n clip(x, min, max) {\n return notYetImplemented('clip');\n }\n abs(x) {\n return notYetImplemented('abs');\n }\n complexAbs(x) {\n return notYetImplemented('complexAbs');\n }\n sigmoid(x) {\n return notYetImplemented('sigmoid');\n }\n softplus(x) {\n return notYetImplemented('softplus');\n }\n sin(x) {\n return notYetImplemented('sin');\n }\n cos(x) {\n return notYetImplemented('cos');\n }\n tan(x) {\n return notYetImplemented('tan');\n }\n asin(x) {\n return notYetImplemented('asin');\n }\n acos(x) {\n return notYetImplemented('acos');\n }\n atan(x) {\n return notYetImplemented('atan');\n }\n atan2(a, b) {\n return notYetImplemented('atan2');\n }\n sinh(x) {\n return notYetImplemented('sinh');\n }\n cosh(x) {\n return notYetImplemented('cosh');\n }\n tanh(x) {\n return notYetImplemented('tanh');\n }\n asinh(x) {\n return notYetImplemented('asinh');\n }\n acosh(x) {\n return notYetImplemented('acosh');\n }\n atanh(x) {\n return notYetImplemented('atanh');\n }\n erf(x) {\n return notYetImplemented('erf');\n }\n step(x, alpha) {\n return notYetImplemented('step');\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedConv2d');\n }\n conv2d(x, filter, convInfo) {\n return notYetImplemented('conv2d');\n }\n conv2dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv2dDerInput');\n }\n conv2dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv2dDerFilter');\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedDepthwiseConv2D');\n }\n depthwiseConv2D(input, filter, convInfo) {\n return notYetImplemented('depthwiseConv2D');\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n return notYetImplemented('depthwiseConv2DDerInput');\n }\n depthwiseConv2DDerFilter(x, dY, convInfo) {\n return notYetImplemented('depthwiseConv2DDerFilter');\n }\n conv3d(x, filter, convInfo) {\n return notYetImplemented('conv3d');\n }\n conv3dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv3dDerInput');\n }\n conv3dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv3dDerFilter');\n }\n maxPool(x, convInfo) {\n return notYetImplemented('maxPool');\n }\n maxPoolBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPoolBackprop');\n }\n avgPool(x, convInfo) {\n return notYetImplemented('avgPool');\n }\n avgPoolBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPoolBackprop');\n }\n avgPool3d(x, convInfo) {\n return notYetImplemented('avgPool3d');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPool3dBackprop');\n }\n maxPool3d(x, convInfo) {\n return notYetImplemented('maxPool3d');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPool3dBackprop');\n }\n reshape(x, shape) {\n return notYetImplemented('reshape');\n }\n cast(x, dtype) {\n return notYetImplemented('cast');\n }\n tile(x, reps) {\n return notYetImplemented('tile');\n }\n pad(x, paddings, constantValue) {\n return notYetImplemented('pad');\n }\n transpose(x, perm) {\n return notYetImplemented('transpose');\n }\n gather(x, indices, axis) {\n return notYetImplemented('gather');\n }\n gatherND(x, indices) {\n return notYetImplemented('gatherND');\n }\n scatterND(indices, updates, shape) {\n return notYetImplemented('scatterND');\n }\n batchToSpaceND(x, blockShape, crops) {\n return notYetImplemented('batchToSpaceND');\n }\n spaceToBatchND(x, blockShape, paddings) {\n return notYetImplemented('spaceToBatchND');\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n return notYetImplemented('resizeBilinear');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeBilinearBackprop');\n }\n resizeNearestNeighbor(x, newHEight, newWidth, alignCorners) {\n return notYetImplemented('resizeNearestNeighbor');\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeNearestNeighborBackprop');\n }\n batchNorm(x, mean, variance, offset, scale, varianceEpsilon) {\n return notYetImplemented('batchNorm');\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n return notYetImplemented('localResponseNormalization4D');\n }\n LRNGrad(dy, inputImage, outputImage, radius, bias, alpha, beta) {\n return notYetImplemented('LRNGrad');\n }\n multinomial(logits, normalized, numSamples, seed) {\n return notYetImplemented('multinomial');\n }\n oneHot(indices, depth, onValue, offValue) {\n return notYetImplemented('oneHot');\n }\n cumsum(x, axis, exclusive, reverse) {\n return notYetImplemented('cumsum');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return notYetImplemented('nonMaxSuppression');\n }\n fft(x) {\n return notYetImplemented('fft');\n }\n ifft(x) {\n return notYetImplemented('ifft');\n }\n complex(real, imag) {\n return notYetImplemented('complex');\n }\n real(input) {\n return notYetImplemented('real');\n }\n imag(input) {\n return notYetImplemented('imag');\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n return notYetImplemented('cropAndResize');\n }\n depthToSpace(x, blockSize, dataFormat) {\n return notYetImplemented('depthToSpace');\n }\n // Aligns with the \"SplitV\" kernel in TensorFlow.\n split(value, sizeSplits, axis) {\n return notYetImplemented('split');\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n return notYetImplemented('sparseToDense');\n }\n diag(x) {\n return notYetImplemented('diag');\n }\n fill(shape, value, dtype) {\n return notYetImplemented('fill');\n }\n onesLike(x) {\n return notYetImplemented('onesLike');\n }\n zerosLike(x) {\n return notYetImplemented('zerosLike');\n }\n linspace(start, stop, num) {\n return notYetImplemented('linspace');\n }\n dispose() {\n return notYetImplemented('dispose');\n }\n}\nfunction notYetImplemented(kernelName) {\n throw new Error(`'${kernelName}' not yet implemented or not found in the registry. ` +\n `This kernel may not be supported by the tfjs backend you have chosen`);\n}\n//# sourceMappingURL=backend.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Shuffles the array in-place using Fisher-Yates algorithm.\n *\n * ```js\n * const a = [1, 2, 3, 4, 5];\n * tf.util.shuffle(a);\n * console.log(a);\n * ```\n *\n * @param array The array to shuffle in-place.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\n// tslint:disable-next-line:no-any\nexport function shuffle(array) {\n let counter = array.length;\n let temp = 0;\n let index = 0;\n // While there are elements in the array\n while (counter > 0) {\n // Pick a random index\n index = (Math.random() * counter) | 0;\n // Decrease counter by 1\n counter--;\n // And swap the last element with it\n temp = array[counter];\n array[counter] = array[index];\n array[index] = temp;\n }\n}\n/** Clamps a value to a specified range. */\nexport function clamp(min, x, max) {\n return Math.max(min, Math.min(x, max));\n}\nexport function nearestLargerEven(val) {\n return val % 2 === 0 ? val : val + 1;\n}\nexport function sum(arr) {\n let sum = 0;\n for (let i = 0; i < arr.length; i++) {\n sum += arr[i];\n }\n return sum;\n}\n/**\n * Returns a sample from a uniform [a, b) distribution.\n *\n * @param a The minimum support (inclusive).\n * @param b The maximum support (exclusive).\n * @return A pseudorandom number on the half-open interval [a,b).\n */\nexport function randUniform(a, b) {\n const r = Math.random();\n return (b * r) + (1 - r) * a;\n}\n/** Returns the squared Euclidean distance between two vectors. */\nexport function distSquared(a, b) {\n let result = 0;\n for (let i = 0; i < a.length; i++) {\n const diff = Number(a[i]) - Number(b[i]);\n result += diff * diff;\n }\n return result;\n}\n/**\n * Asserts that the expression is true. Otherwise throws an error with the\n * provided message.\n *\n * ```js\n * const x = 2;\n * tf.util.assert(x === 2, 'x is not 2');\n * ```\n *\n * @param expr The expression to assert (as a boolean).\n * @param msg A function that returns the message to report when throwing an\n * error. We use a function for performance reasons.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function assert(expr, msg) {\n if (!expr) {\n throw new Error(typeof msg === 'string' ? msg : msg());\n }\n}\nexport function assertShapesMatch(shapeA, shapeB, errorMessagePrefix = '') {\n assert(arraysEqual(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function assertNonNull(a) {\n assert(a != null, () => `The input to the tensor constructor must be a non-null value.`);\n}\n// NOTE: We explicitly type out what T extends instead of any so that\n// util.flatten on a nested array of number doesn't try to infer T as a\n// number[][], causing us to explicitly type util.flatten().\n/**\n * Flattens an arbitrarily nested array.\n *\n * ```js\n * const a = [[1, 2], [3, 4], [5, [6, [7]]]];\n * const flat = tf.util.flatten(a);\n * console.log(flat);\n * ```\n *\n * @param arr The nested array to flatten.\n * @param result The destination array which holds the elements.\n * @param skipTypedArray If true, avoids flattening the typed arrays. Defaults\n * to false.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function flatten(arr, result = [], skipTypedArray = false) {\n if (result == null) {\n result = [];\n }\n if (Array.isArray(arr) || isTypedArray(arr) && !skipTypedArray) {\n for (let i = 0; i < arr.length; ++i) {\n flatten(arr[i], result, skipTypedArray);\n }\n }\n else {\n result.push(arr);\n }\n return result;\n}\n/**\n * Returns the size (number of elements) of the tensor given its shape.\n *\n * ```js\n * const shape = [3, 4, 2];\n * const size = tf.util.sizeFromShape(shape);\n * console.log(size);\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function sizeFromShape(shape) {\n if (shape.length === 0) {\n // Scalar.\n return 1;\n }\n let size = shape[0];\n for (let i = 1; i < shape.length; i++) {\n size *= shape[i];\n }\n return size;\n}\nexport function isScalarShape(shape) {\n return shape.length === 0;\n}\nexport function arraysEqual(n1, n2) {\n if (n1 === n2) {\n return true;\n }\n if (n1 == null || n2 == null) {\n return false;\n }\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\nexport function isInt(a) {\n return a % 1 === 0;\n}\nexport function tanh(x) {\n // tslint:disable-next-line:no-any\n if (Math.tanh != null) {\n // tslint:disable-next-line:no-any\n return Math.tanh(x);\n }\n if (x === Infinity) {\n return 1;\n }\n else if (x === -Infinity) {\n return -1;\n }\n else {\n const e2x = Math.exp(2 * x);\n return (e2x - 1) / (e2x + 1);\n }\n}\nexport function sizeToSquarishShape(size) {\n const width = Math.ceil(Math.sqrt(size));\n return [width, Math.ceil(size / width)];\n}\n/**\n * Creates a new array with randomized indicies to a given quantity.\n *\n * ```js\n * const randomTen = tf.util.createShuffledIndices(10);\n * console.log(randomTen);\n * ```\n *\n * @param number Quantity of how many shuffled indicies to create.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function createShuffledIndices(n) {\n const shuffledIndices = new Uint32Array(n);\n for (let i = 0; i < n; ++i) {\n shuffledIndices[i] = i;\n }\n shuffle(shuffledIndices);\n return shuffledIndices;\n}\nexport function rightPad(a, size) {\n if (size <= a.length) {\n return a;\n }\n return a + ' '.repeat(size - a.length);\n}\nexport function repeatedTry(checkFn, delayFn = (counter) => 0, maxCounter) {\n return new Promise((resolve, reject) => {\n let tryCount = 0;\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n tryCount++;\n const nextBackoff = delayFn(tryCount);\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n tryFn();\n });\n}\n/**\n * Given the full size of the array and a shape that may contain -1 as the\n * implicit dimension, returns the inferred shape where -1 is replaced.\n * E.g. For shape=[2, -1, 3] and size=24, it will return [2, 4, 3].\n *\n * @param shape The shape, which may contain -1 in some dimension.\n * @param size The full size (number of elements) of the array.\n * @return The inferred shape where -1 is replaced with the inferred size.\n */\nexport function inferFromImplicitShape(shape, size) {\n let shapeProd = 1;\n let implicitIdx = -1;\n for (let i = 0; i < shape.length; ++i) {\n if (shape[i] >= 0) {\n shapeProd *= shape[i];\n }\n else if (shape[i] === -1) {\n if (implicitIdx !== -1) {\n throw Error(`Shapes can only have 1 implicit size. ` +\n `Found -1 at dim ${implicitIdx} and dim ${i}`);\n }\n implicitIdx = i;\n }\n else if (shape[i] < 0) {\n throw Error(`Shapes can not be < 0. Found ${shape[i]} at dim ${i}`);\n }\n }\n if (implicitIdx === -1) {\n if (size > 0 && size !== shapeProd) {\n throw Error(`Size(${size}) must match the product of shape ${shape}`);\n }\n return shape;\n }\n if (shapeProd === 0) {\n throw Error(`Cannot infer the missing size in [${shape}] when ` +\n `there are 0 elements`);\n }\n if (size % shapeProd !== 0) {\n throw Error(`The implicit shape can't be a fractional number. ` +\n `Got ${size} / ${shapeProd}`);\n }\n const newShape = shape.slice();\n newShape[implicitIdx] = size / shapeProd;\n return newShape;\n}\nexport function parseAxisParam(axis, shape) {\n const rank = shape.length;\n // Normalize input\n axis = axis == null ? shape.map((s, i) => i) : [].concat(axis);\n // Check for valid range\n assert(axis.every(ax => ax >= -rank && ax < rank), () => `All values in axis param must be in range [-${rank}, ${rank}) but ` +\n `got axis ${axis}`);\n // Check for only integers\n assert(axis.every(ax => isInt(ax)), () => `All values in axis param must be integers but ` +\n `got axis ${axis}`);\n // Handle negative axis.\n return axis.map(a => a < 0 ? rank + a : a);\n}\n/** Reduces the shape by removing all dimensions of shape 1. */\nexport function squeezeShape(shape, axis) {\n const newShape = [];\n const keptDims = [];\n const isEmptyArray = axis != null && Array.isArray(axis) && axis.length === 0;\n const axes = (axis == null || isEmptyArray) ?\n null :\n parseAxisParam(axis, shape).sort();\n let j = 0;\n for (let i = 0; i < shape.length; ++i) {\n if (axes != null) {\n if (axes[j] === i && shape[i] !== 1) {\n throw new Error(`Can't squeeze axis ${i} since its dim '${shape[i]}' is not 1`);\n }\n if ((axes[j] == null || axes[j] > i) && shape[i] === 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n if (axes[j] <= i) {\n j++;\n }\n }\n if (shape[i] !== 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n }\n return { newShape, keptDims };\n}\nexport function getTypedArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function getArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else if (dtype === 'string') {\n values = new Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function checkConversionForErrors(vals, dtype) {\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n throw Error(`A tensor of type ${dtype} being uploaded contains ${num}.`);\n }\n }\n}\n/** Returns true if the dtype is valid. */\nexport function isValidDtype(dtype) {\n return dtype === 'bool' || dtype === 'complex64' || dtype === 'float32' ||\n dtype === 'int32' || dtype === 'string';\n}\n/**\n * Returns true if the new type can't encode the old type without loss of\n * precision.\n */\nexport function hasEncodingLoss(oldType, newType) {\n if (newType === 'complex64') {\n return false;\n }\n if (newType === 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'int32' && oldType !== 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'bool' && oldType === 'bool') {\n return false;\n }\n return true;\n}\nexport function isTypedArray(a) {\n return a instanceof Float32Array || a instanceof Int32Array ||\n a instanceof Uint8Array;\n}\nexport function bytesPerElement(dtype) {\n if (dtype === 'float32' || dtype === 'int32') {\n return 4;\n }\n else if (dtype === 'complex64') {\n return 8;\n }\n else if (dtype === 'bool') {\n return 1;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n/**\n * Returns the approximate number of bytes allocated in the string array - 2\n * bytes per character. Computing the exact bytes for a native string in JS is\n * not possible since it depends on the encoding of the html page that serves\n * the website.\n */\nexport function bytesFromStringArray(arr) {\n if (arr == null) {\n return 0;\n }\n let bytes = 0;\n arr.forEach(x => bytes += x.length);\n return bytes;\n}\n/** Returns true if the value is a string. */\nexport function isString(value) {\n return typeof value === 'string' || value instanceof String;\n}\nexport function isBoolean(value) {\n return typeof value === 'boolean';\n}\nexport function isNumber(value) {\n return typeof value === 'number';\n}\nexport function inferDtype(values) {\n if (Array.isArray(values)) {\n return inferDtype(values[0]);\n }\n if (values instanceof Float32Array) {\n return 'float32';\n }\n else if (values instanceof Int32Array || values instanceof Uint8Array) {\n return 'int32';\n }\n else if (isNumber(values)) {\n return 'float32';\n }\n else if (isString(values)) {\n return 'string';\n }\n else if (isBoolean(values)) {\n return 'bool';\n }\n return 'float32';\n}\nexport function isFunction(f) {\n return !!(f && f.constructor && f.call && f.apply);\n}\nexport function nearestDivisor(size, start) {\n for (let i = start; i < size; ++i) {\n if (size % i === 0) {\n return i;\n }\n }\n return size;\n}\nexport function computeStrides(shape) {\n const rank = shape.length;\n if (rank < 2) {\n return [];\n }\n // Last dimension has implicit stride of 1, thus having D-1 (instead of D)\n // strides.\n const strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n return strides;\n}\nfunction createNestedArray(offset, shape, a) {\n const ret = new Array();\n if (shape.length === 1) {\n const d = shape[0];\n for (let i = 0; i < d; i++) {\n ret[i] = a[offset + i];\n }\n }\n else {\n const d = shape[0];\n const rest = shape.slice(1);\n const len = rest.reduce((acc, c) => acc * c);\n for (let i = 0; i < d; i++) {\n ret[i] = createNestedArray(offset + i * len, rest, a);\n }\n }\n return ret;\n}\n// Provide a nested array of TypedArray in given shape.\nexport function toNestedArray(shape, a) {\n if (shape.length === 0) {\n // Scalar type should return a single number.\n return a[0];\n }\n const size = shape.reduce((acc, c) => acc * c);\n if (size === 0) {\n // A tensor with shape zero should be turned into empty list.\n return [];\n }\n if (size !== a.length) {\n throw new Error(`[${shape}] does not match the input size ${a.length}.`);\n }\n return createNestedArray(0, shape, a);\n}\nexport function makeOnesTypedArray(size, dtype) {\n const array = makeZerosTypedArray(size, dtype);\n for (let i = 0; i < array.length; i++) {\n array[i] = 1;\n }\n return array;\n}\nexport function makeZerosTypedArray(size, dtype) {\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(size);\n }\n else if (dtype === 'int32') {\n return new Int32Array(size);\n }\n else if (dtype === 'bool') {\n return new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Make nested `TypedArray` filled with zeros.\n * @param shape The shape information for the nested array.\n * @param dtype dtype of the array element.\n */\nexport function makeZerosNestedTypedArray(shape, dtype) {\n const size = shape.reduce((prev, curr) => prev * curr, 1);\n if (dtype == null || dtype === 'float32') {\n return toNestedArray(shape, new Float32Array(size));\n }\n else if (dtype === 'int32') {\n return toNestedArray(shape, new Int32Array(size));\n }\n else if (dtype === 'bool') {\n return toNestedArray(shape, new Uint8Array(size));\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\nexport function assertNonNegativeIntegerDimensions(shape) {\n shape.forEach(dimSize => {\n assert(Number.isInteger(dimSize) && dimSize >= 0, () => `Tensor must have a shape comprised of positive integers but got ` +\n `shape [${shape}].`);\n });\n}\n/**\n * Computes flat index for a given location (multidimentionsal index) in a\n * Tensor/multidimensional array.\n *\n * @param locs Location in the tensor.\n * @param rank Rank of the tensor.\n * @param strides Tensor strides.\n */\nexport function locToIndex(locs, rank, strides) {\n if (rank === 0) {\n return 0;\n }\n else if (rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += strides[i] * locs[i];\n }\n return index;\n}\n/**\n * Computes the location (multidimensional index) in a tensor/multidimentional\n * array for a given flat index.\n *\n * @param index Index in flat array.\n * @param rank Rank of tensor.\n * @param strides Strides of tensor.\n */\nexport function indexToLoc(index, rank, strides) {\n if (rank === 0) {\n return [];\n }\n else if (rank === 1) {\n return [index];\n }\n const locs = new Array(rank);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / strides[i]);\n index -= locs[i] * strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n}\n/**\n * This method asserts whether an object is a Promise instance.\n * @param object\n */\n// tslint:disable-next-line: no-any\nexport function isPromise(object) {\n // We chose to not use 'obj instanceOf Promise' for two reasons:\n // 1. It only reliably works for es6 Promise, not other Promise\n // implementations.\n // 2. It doesn't work with framework that uses zone.js. zone.js monkey patch\n // the async calls, so it is possible the obj (patched) is comparing to a\n // pre-patched Promise.\n return object && object.then && typeof object.then === 'function';\n}\n//# sourceMappingURL=util_base.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isPromise } from './util_base';\n// Expects flags from URL in the format ?tfjsflags=FLAG1:1,FLAG2:true.\nconst TENSORFLOWJS_FLAGS_PREFIX = 'tfjsflags';\n/**\n * The environment contains evaluated flags as well as the registered platform.\n * This is always used as a global singleton and can be retrieved with\n * `tf.env()`.\n *\n * @doc {heading: 'Environment'}\n */\nexport class Environment {\n // tslint:disable-next-line: no-any\n constructor(global) {\n this.global = global;\n this.flags = {};\n this.flagRegistry = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n setPlatform(platformName, platform) {\n if (this.platform != null) {\n console.warn(`Platform ${this.platformName} has already been set. ` +\n `Overwriting the platform with ${platform}.`);\n }\n this.platformName = platformName;\n this.platform = platform;\n }\n registerFlag(flagName, evaluationFn, setHook) {\n this.flagRegistry[flagName] = { evaluationFn, setHook };\n // Override the flag value from the URL. This has to happen here because the\n // environment is initialized before flags get registered.\n if (this.urlFlags[flagName] != null) {\n const flagValue = this.urlFlags[flagName];\n console.warn(`Setting feature override from URL ${flagName}: ${flagValue}.`);\n this.set(flagName, flagValue);\n }\n }\n async getAsync(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n this.flags[flagName] = await this.evaluateFlag(flagName);\n return this.flags[flagName];\n }\n get(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n const flagValue = this.evaluateFlag(flagName);\n if (isPromise(flagValue)) {\n throw new Error(`Flag ${flagName} cannot be synchronously evaluated. ` +\n `Please use getAsync() instead.`);\n }\n this.flags[flagName] = flagValue;\n return this.flags[flagName];\n }\n getNumber(flagName) {\n return this.get(flagName);\n }\n getBool(flagName) {\n return this.get(flagName);\n }\n getFlags() {\n return this.flags;\n }\n // For backwards compatibility.\n get features() {\n return this.flags;\n }\n set(flagName, value) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot set flag ${flagName} as it has not been registered.`);\n }\n this.flags[flagName] = value;\n if (this.flagRegistry[flagName].setHook != null) {\n this.flagRegistry[flagName].setHook(value);\n }\n }\n evaluateFlag(flagName) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot evaluate flag '${flagName}': no evaluation function found.`);\n }\n return this.flagRegistry[flagName].evaluationFn();\n }\n setFlags(flags) {\n this.flags = Object.assign({}, flags);\n }\n reset() {\n this.flags = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n populateURLFlags() {\n if (typeof this.global === 'undefined' ||\n typeof this.global.location === 'undefined' ||\n typeof this.global.location.search === 'undefined') {\n return;\n }\n const urlParams = getQueryParams(this.global.location.search);\n if (TENSORFLOWJS_FLAGS_PREFIX in urlParams) {\n const keyValues = urlParams[TENSORFLOWJS_FLAGS_PREFIX].split(',');\n keyValues.forEach(keyValue => {\n const [key, value] = keyValue.split(':');\n this.urlFlags[key] = parseValue(key, value);\n });\n }\n }\n}\nexport function getQueryParams(queryString) {\n const params = {};\n queryString.replace(/[?&]([^=?&]+)(?:=([^&]*))?/g, (s, ...t) => {\n decodeParam(params, t[0], t[1]);\n return t.join('=');\n });\n return params;\n}\nfunction decodeParam(params, name, value) {\n params[decodeURIComponent(name)] = decodeURIComponent(value || '');\n}\nfunction parseValue(flagName, value) {\n value = value.toLowerCase();\n if (value === 'true' || value === 'false') {\n return value === 'true';\n }\n else if (`${+value}` === value) {\n return +value;\n }\n throw new Error(`Could not parse value flag value ${value} for flag ${flagName}.`);\n}\n/**\n * Returns the current environment (a global singleton).\n *\n * The environment object contains the evaluated feature values as well as the\n * active platform.\n *\n * @doc {heading: 'Environment'}\n */\nexport function env() {\n return ENV;\n}\nexport let ENV = null;\nexport function setEnvironmentGlobal(environment) {\n ENV = environment;\n}\n//# sourceMappingURL=environment.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Note that the identifier globalNameSpace is scoped to this module, but will\n// always resolve to the same global object regardless of how the module is\n// resolved.\n// tslint:disable-next-line:no-any\nlet globalNameSpace;\n// tslint:disable-next-line:no-any\nexport function getGlobalNamespace() {\n if (globalNameSpace == null) {\n // tslint:disable-next-line:no-any\n let ns;\n if (typeof (window) !== 'undefined') {\n ns = window;\n }\n else if (typeof (global) !== 'undefined') {\n ns = global;\n }\n else if (typeof (process) !== 'undefined') {\n ns = process;\n }\n else if (typeof (self) !== 'undefined') {\n ns = self;\n }\n else {\n throw new Error('Could not find a global object');\n }\n globalNameSpace = ns;\n }\n return globalNameSpace;\n}\n// tslint:disable-next-line:no-any\nfunction getGlobalMap() {\n const ns = getGlobalNamespace();\n if (ns._tfGlobals == null) {\n ns._tfGlobals = new Map();\n }\n return ns._tfGlobals;\n}\n/**\n * Returns a globally accessible 'singleton' object.\n *\n * @param key the name of the object\n * @param init a function to initialize to initialize this object\n * the first time it is fetched.\n */\nexport function getGlobal(key, init) {\n const globalMap = getGlobalMap();\n if (globalMap.has(key)) {\n return globalMap.get(key);\n }\n else {\n const singleton = init();\n globalMap.set(key, singleton);\n return globalMap.get(key);\n }\n}\n//# sourceMappingURL=global_util.js.map", "export const Abs = 'Abs';\nexport const Acos = 'Acos';\nexport const Acosh = 'Acosh';\nexport const Add = 'Add';\nexport const AddN = 'AddN';\nexport const All = 'All';\nexport const Any = 'Any';\nexport const ArgMax = 'ArgMax';\nexport const ArgMin = 'ArgMin';\nexport const Asin = 'Asin';\nexport const Asinh = 'Asinh';\nexport const Atan = 'Atan';\nexport const Atanh = 'Atanh';\nexport const Atan2 = 'Atan2';\nexport const AvgPool = 'AvgPool';\nexport const AvgPoolBackprop = 'AvgPoolBackprop';\nexport const AvgPool3D = 'AvgPool3D';\nexport const AvgPool3DBackprop = 'AvgPool3DBackprop';\nexport const BatchMatMul = 'BatchMatMul';\nexport const BatchToSpaceND = 'BatchToSpaceND';\nexport const BroadcastTo = 'BroadcastTo';\nexport const Cast = 'Cast';\nexport const Ceil = 'Ceil';\nexport const ClipByValue = 'ClipByValue';\nexport const Complex = 'Complex';\nexport const Concat = 'Concat';\nexport const Conv2D = 'Conv2D';\nexport const Conv2DBackpropFilter = 'Conv2DBackpropFilter';\nexport const Conv2DBackpropInput = 'Conv2DBackpropInput';\nexport const Conv3D = 'Conv3D';\nexport const Conv3DBackpropFilterV2 = 'Conv3DBackpropFilterV2';\nexport const Conv3DBackpropInputV2 = 'Conv3DBackpropInputV2';\nexport const Cos = 'Cos';\nexport const Cosh = 'Cosh';\nexport const Cumsum = 'Cumsum';\nexport const CropAndResize = 'CropAndResize';\nexport const DepthToSpace = 'DepthToSpace';\nexport const DepthwiseConv2dNative = 'DepthwiseConv2dNative';\nexport const DepthwiseConv2dNativeBackpropFilter = 'DepthwiseConv2dNativeBackpropFilter';\nexport const DepthwiseConv2dNativeBackpropInput = 'DepthwiseConv2dNativeBackpropInput';\nexport const Diag = 'Diag';\nexport const Dilation2D = 'Dilation2D';\nexport const Dilation2DBackpropInput = 'Dilation2DBackpropInput';\nexport const Dilation2DBackpropFilter = 'Dilation2DBackpropFilter';\nexport const Div = 'Div';\nexport const Elu = 'Elu';\nexport const EluGrad = 'EluGrad';\nexport const Erf = 'Erf';\nexport const Equal = 'Equal';\nexport const Exp = 'Exp';\nexport const Expm1 = 'Expm1';\nexport const FFT = 'FFT';\nexport const Fill = 'Fill';\nexport const FlipLeftRight = 'FlipLeftRight';\nexport const Floor = 'Floor';\nexport const FloorDiv = 'FloorDiv';\nexport const FusedBatchNorm = 'FusedBatchNorm';\nexport const GatherV2 = 'GatherV2';\nexport const GatherNd = 'GatherNd';\nexport const Greater = 'Greater';\nexport const GreaterEqual = 'GreaterEqual';\nexport const Identity = 'Identity';\nexport const IFFT = 'IFFT';\nexport const Imag = 'Imag';\nexport const IsFinite = 'IsFinite';\nexport const IsInf = 'IsInf';\nexport const IsNan = 'IsNan';\nexport const Less = 'Less';\nexport const LessEqual = 'LessEqual';\nexport const LinSpace = 'LinSpace';\nexport const Log = 'Log';\nexport const Log1p = 'Log1p';\nexport const LogicalAnd = 'LogicalAnd';\nexport const LogicalNot = 'LogicalNot';\nexport const LogicalOr = 'LogicalOr';\nexport const LogSoftmax = 'LogSoftmax';\nexport const LRN = 'LRN';\nexport const LRNBackprop = 'LRNBackprop';\nexport const Max = 'Max';\nexport const Maximum = 'Maximum';\nexport const MaxPool = 'MaxPool';\nexport const MaxPoolBackprop = 'MaxPoolBackprop';\nexport const MaxPool3D = 'MaxPool3D';\nexport const MaxPool3DBackprop = 'MaxPool3DBackprop';\nexport const MaxPoolWithArgmax = 'MaxPoolWithArgmax';\nexport const Mean = 'Mean';\nexport const Min = 'Min';\nexport const Minimum = 'Minimum';\nexport const MirrorPad = 'MirrorPad';\nexport const Mod = 'Mod';\nexport const Multiply = 'Multiply';\nexport const Negate = 'Negate';\nexport const NotEqual = 'NotEqual';\nexport const NonMaxSuppressionV3 = 'NonMaxSuppressionV3';\nexport const NonMaxSuppressionV4 = 'NonMaxSuppressionV4';\nexport const NonMaxSuppressionV5 = 'NonMaxSuppressionV5';\nexport const OnesLike = 'OnesLike';\nexport const OneHot = 'OneHot';\nexport const PadV2 = 'PadV2';\nexport const Pool = 'Pool';\nexport const Pow = 'Pow';\nexport const Prelu = 'Prelu';\nexport const Prod = 'Prod';\nexport const Range = 'Range';\nexport const Real = 'Real';\nexport const Reciprocal = 'Reciprocal';\nexport const Relu = 'Relu';\nexport const Reshape = 'Reshape';\nexport const ResizeNearestNeighbor = 'ResizeNearestNeighbor';\nexport const ResizeNearestNeighborGrad = 'ResizeNearestNeighborGrad';\nexport const ResizeBilinear = 'ResizeBilinear';\nexport const ResizeBilinearGrad = 'ResizeBilinearGrad';\nexport const Relu6 = 'Relu6';\nexport const Reverse = 'Reverse';\nexport const Round = 'Round';\nexport const Rsqrt = 'Rsqrt';\nexport const ScatterNd = 'ScatterNd';\nexport const SelectV2 = 'SelectV2';\nexport const Selu = 'Selu';\nexport const Slice = 'Slice';\nexport const Sin = 'Sin';\nexport const Sinh = 'Sinh';\nexport const Sign = 'Sign';\nexport const Sigmoid = 'Sigmoid';\nexport const Softplus = 'Softplus';\nexport const Sqrt = 'Sqrt';\nexport const Sum = 'Sum';\nexport const SpaceToBatchND = 'SpaceToBatchND';\nexport const SplitV = 'SplitV';\nexport const Softmax = 'Softmax';\nexport const SquaredDifference = 'SquaredDifference';\nexport const Square = 'Square';\nexport const Sub = 'Sub';\nexport const SparseToDense = 'SparseToDense';\nexport const StridedSlice = 'StridedSlice';\nexport const Tan = 'Tan';\nexport const Tanh = 'Tanh';\nexport const Tile = 'Tile';\nexport const TopK = 'TopK';\nexport const Transpose = 'Transpose';\nexport const Unique = 'Unique';\nexport const Unpack = 'Unpack';\nexport const UnsortedSegmentSum = 'UnsortedSegmentSum';\nexport const ZerosLike = 'ZerosLike';\n/**\n * TensorFlow.js-only kernels\n */\nexport const Step = 'Step';\nexport const FromPixels = 'FromPixels';\nexport const RotateWithOffset = 'RotateWithOffset';\nexport const _FusedMatMul = '_FusedMatMul';\nexport const FusedConv2D = 'FusedConv2D';\nexport const FusedDepthwiseConv2D = 'FusedDepthwiseConv2D';\n//# sourceMappingURL=kernel_names.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport { getGlobal } from './global_util';\nconst kernelRegistry = getGlobal('kernelRegistry', () => new Map());\nconst gradRegistry = getGlobal('gradRegistry', () => new Map());\n/**\n * Returns the kernel function (code) associated with the provided names.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n */\nexport function getKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n return kernelRegistry.get(key);\n}\n/**\n * Returns the registered gradient info associated with the provided kernel.\n * @param kernelName The official TF kernel name.\n */\nexport function getGradient(kernelName) {\n return gradRegistry.get(kernelName);\n}\nexport function getKernelsForBackend(backendName) {\n const it = kernelRegistry.entries();\n const result = [];\n while (true) {\n const { done, value } = it.next();\n if (done) {\n break;\n }\n const [key, config] = value;\n const [backend,] = key.split('_');\n if (backend === backendName) {\n result.push(config);\n }\n }\n return result;\n}\n/**\n * Registers the function (forward pass) for the kernel in a global registry.\n *\n * @param config A config object with the following properties:\n * - `kernelName` The official name of the kernel.\n * - `backendName` The official name of the backend.\n * - `kernelFunc` The function to run during the forward pass of the kernel.\n * - `setupFunc` Optional. Gets called once, after the backend initializes.\n * - `disposeFunc` Optional. Gets called once, right before the backend is\n * disposed.\n */\nexport function registerKernel(config) {\n const { kernelName, backendName } = config;\n const key = makeKey(kernelName, backendName);\n if (kernelRegistry.has(key)) {\n console.warn(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is already registered`);\n }\n kernelRegistry.set(key, config);\n}\n/**\n * Registers a gradient function for a given kernel in the global registry,\n * to be used during the back-propagation of that kernel.\n *\n * @param config An object with the following properties:\n * - `kernelName` The name of the kernel that the gradient function is for.\n * - `gradFunc` The function to run during back-propagation.\n */\nexport function registerGradient(config) {\n const { kernelName } = config;\n if (gradRegistry.has(kernelName)) {\n // TODO (yassogba) after 3.0 assess whether we need to keep this gated\n // to debug mode.\n if (env().getBool('DEBUG')) {\n console.warn(`Overriding the gradient for '${kernelName}'`);\n }\n }\n gradRegistry.set(kernelName, config);\n}\n/**\n * Removes the kernel function from the registry.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n *\n */\nexport function unregisterKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n if (!kernelRegistry.has(key)) {\n throw new Error(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is not registered`);\n }\n kernelRegistry.delete(key);\n}\n/** Removes the registered gradient from the global registry. */\nexport function unregisterGradient(kernelName) {\n if (!gradRegistry.has(kernelName)) {\n throw new Error(`The gradient '${kernelName}' for backend is not registered`);\n }\n gradRegistry.delete(kernelName);\n}\n/**\n * Finds kernels that have already been registered to a backend and re-registers\n * them for a new backend. Useful for registering custom backends.\n * @param registeredBackendName Already registered backend.\n * @param newBackendName New backend.\n */\nexport function copyRegisteredKernels(registeredBackendName, newBackendName) {\n const kernels = getKernelsForBackend(registeredBackendName);\n kernels.forEach(kernelConfig => {\n const newKernelConfig = Object.assign({}, kernelConfig, { backendName: newBackendName });\n registerKernel(newKernelConfig);\n });\n}\nfunction makeKey(kernelName, backendName) {\n return `${backendName}_${kernelName}`;\n}\n//# sourceMappingURL=kernel_registry.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport * as base from './util_base';\nexport * from './util_base';\n/**\n * Create typed array for scalar value. Used for storing in `DataStorage`.\n */\nexport function createScalarValue(value, dtype) {\n if (dtype === 'string') {\n return encodeString(value);\n }\n return toTypedArray([value], dtype);\n}\nfunction noConversionNeeded(a, dtype) {\n return (a instanceof Float32Array && dtype === 'float32') ||\n (a instanceof Int32Array && dtype === 'int32') ||\n (a instanceof Uint8Array && dtype === 'bool');\n}\nexport function toTypedArray(a, dtype) {\n if (dtype === 'string') {\n throw new Error('Cannot convert a string[] to a TypedArray');\n }\n if (Array.isArray(a)) {\n a = base.flatten(a);\n }\n if (env().getBool('DEBUG')) {\n base.checkConversionForErrors(a, dtype);\n }\n if (noConversionNeeded(a, dtype)) {\n return a;\n }\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(a);\n }\n else if (dtype === 'int32') {\n return new Int32Array(a);\n }\n else if (dtype === 'bool') {\n const bool = new Uint8Array(a.length);\n for (let i = 0; i < bool.length; ++i) {\n if (Math.round(a[i]) !== 0) {\n bool[i] = 1;\n }\n }\n return bool;\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Returns the current high-resolution time in milliseconds relative to an\n * arbitrary time in the past. It works across different platforms (node.js,\n * browsers).\n *\n * ```js\n * console.log(tf.util.now());\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function now() {\n return env().platform.now();\n}\n/**\n * Returns a platform-specific implementation of\n * [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n *\n * If `fetch` is defined on the global object (`window`, `process`, etc.),\n * `tf.util.fetch` returns that function.\n *\n * If not, `tf.util.fetch` returns a platform-specific solution.\n *\n * ```js\n * const resource = await tf.util.fetch('https://unpkg.com/@tensorflow/tfjs');\n * // handle response\n * ```\n *\n * @doc {heading: 'Util'}\n */\nexport function fetch(path, requestInits) {\n return env().platform.fetch(path, requestInits);\n}\n/**\n * Encodes the provided string into bytes using the provided encoding scheme.\n *\n * @param s The string to encode.\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function encodeString(s, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.encode(s, encoding);\n}\n/**\n * Decodes the provided bytes into a string using the provided encoding scheme.\n * @param bytes The bytes to decode.\n *\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function decodeString(bytes, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.decode(bytes, encoding);\n}\n//# sourceMappingURL=util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\nexport class Profiler {\n constructor(backendTimer, logger) {\n this.backendTimer = backendTimer;\n this.logger = logger;\n if (logger == null) {\n this.logger = new Logger();\n }\n }\n profileKernel(kernelName, inputs, f) {\n let outputs;\n const holdResultWrapperFn = () => {\n outputs = f();\n };\n const timer = this.backendTimer.time(holdResultWrapperFn);\n for (let i = 0; i < outputs.length; i++) {\n const output = outputs[i];\n // Dangling promise here because we don't want to propagate up\n // asynchronicity.\n output.data().then(tensorVals => {\n checkComputationForErrors(tensorVals, output.dtype, kernelName);\n });\n }\n const kernelProfile = {\n kernelName,\n outputs,\n inputs,\n timeMs: timer.then(timing => timing.kernelMs),\n extraInfo: timer.then(timing => timing.getExtraProfileInfo != null ?\n timing.getExtraProfileInfo() :\n '')\n };\n return kernelProfile;\n }\n logKernelProfile(kernelProfile) {\n const { kernelName, outputs, timeMs, inputs, extraInfo } = kernelProfile;\n outputs.forEach(result => {\n Promise.all([result.data(), timeMs, extraInfo]).then(valueContainer => {\n this.logger.logKernelProfile(kernelName, result, valueContainer[0], valueContainer[1], inputs, valueContainer[2]);\n });\n });\n }\n}\nexport function checkComputationForErrors(vals, dtype, kernelName) {\n if (dtype !== 'float32') {\n // Only floating point computations will generate NaN values\n return false;\n }\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n // Throwing custom exception so behavior is testable.\n console.warn(`Found ${num} in the result of '${kernelName}'`);\n return true;\n }\n }\n return false;\n}\nexport class Logger {\n logKernelProfile(name, result, vals, timeMs, inputs, extraInfo) {\n const time = typeof timeMs === 'number' ? util.rightPad(`${timeMs}ms`, 9) :\n timeMs['error'];\n const paddedName = util.rightPad(name, 25);\n const rank = result.rank;\n const size = result.size;\n const shape = util.rightPad(result.shape.toString(), 14);\n let inputShapesDescription = '';\n for (const name in inputs) {\n const input = inputs[name];\n if (input != null) {\n // The input might be a non-tensor (e.g HTMLImageElement), in which case\n // we claim the output shape as input shape.\n const inputShape = input.shape || result.shape;\n const inputRank = inputShape.length;\n inputShapesDescription +=\n `${name}: ${inputRank}D ${inputRank > 0 ? inputShape : ''} `;\n }\n }\n console.log(`%c${paddedName}\\t%c${time}\\t%c${rank}D ${shape}\\t%c${size}\\t%c${inputShapesDescription}\\t%c${extraInfo}`, 'font-weight:bold', 'color:red', 'color:blue', 'color: orange', 'color: green', 'color: steelblue');\n }\n}\n//# sourceMappingURL=profiler.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\n/**\n * Computes a list of TapeNodes that connect x to y, filtering everything else\n * out and preserving the order of the original tape elements.\n *\n * @param tape The tape elements to filter.\n * @param xs The input Tensors.\n * @param y The output Tensor.\n */\nexport function getFilteredNodesXToY(tape, xs, y) {\n // Forward pass to compute all the nodes and Tensors that are transitively a\n // function of x.\n const tensorsFromX = {};\n const nodesFromX = {};\n for (let i = 0; i < xs.length; i++) {\n tensorsFromX[xs[i].id] = true;\n }\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n for (const inputName in nodeInputs) {\n const input = nodeInputs[inputName];\n let anyInputFromX = false;\n for (let j = 0; j < xs.length; j++) {\n if (tensorsFromX[input.id]) {\n node.outputs.forEach(output => tensorsFromX[output.id] = true);\n anyInputFromX = true;\n nodesFromX[node.id] = true;\n break;\n }\n }\n if (anyInputFromX) {\n break;\n }\n }\n }\n // Backward pass to find all of the nodes and Tensors that lead to y.\n const tensorsLeadToY = {};\n tensorsLeadToY[y.id] = true;\n const nodesToY = {};\n for (let i = tape.length - 1; i >= 0; i--) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n // If any of the outputs lead to y, mark all of the inputs as leading to y.\n for (let j = 0; j < node.outputs.length; j++) {\n if (tensorsLeadToY[node.outputs[j].id]) {\n for (const inputName in nodeInputs) {\n tensorsLeadToY[nodeInputs[inputName].id] = true;\n nodesToY[node.id] = true;\n }\n break;\n }\n }\n }\n // Return the paths that come from x and lead to y.\n const filteredTape = [];\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n if (nodesFromX[node.id] && nodesToY[node.id]) {\n // Prune the inputs from the node that aren't a function of x.\n const prunedInputs = {};\n for (const inputName in node.inputs) {\n const nodeInput = node.inputs[inputName];\n if (tensorsFromX[nodeInput.id]) {\n prunedInputs[inputName] = nodeInput;\n }\n }\n // Copy the node and overwrite inputsAndArgs to the pruned version.\n const prunedNode = Object.assign({}, node);\n prunedNode.inputs = prunedInputs;\n prunedNode.outputs = node.outputs;\n filteredTape.push(prunedNode);\n }\n }\n return filteredTape;\n}\n/**\n * Backpropagate gradients through the filtered TapeNodes.\n *\n * @param tensorAccumulatedGradientMap A map of Tensor to its gradient. This map\n * is mutated by this method.\n * @param filteredTape The filtered TapeNodes to backprop through.\n */\nexport function backpropagateGradients(tensorAccumulatedGradientMap, filteredTape, tidy, add) {\n // Walk the tape backward and keep a map of Tensor to its gradient.\n for (let i = filteredTape.length - 1; i >= 0; i--) {\n const node = filteredTape[i];\n const dys = [];\n node.outputs.forEach(o => {\n const gradTensor = tensorAccumulatedGradientMap[o.id];\n if (gradTensor != null) {\n dys.push(gradTensor);\n }\n else {\n // This particular output is not in the back-propagation subgraph, so it\n // does not affect the final output, thus we put null for its dy.\n dys.push(null);\n }\n });\n if (node.gradient == null) {\n throw new Error(`Cannot compute gradient: gradient function not found ` +\n `for ${node.kernelName}.`);\n }\n // Backprop dy through this node and accumulate gradients over the inputs.\n const inputGradients = node.gradient(dys);\n for (const inputName in node.inputs) {\n if (!(inputName in inputGradients)) {\n throw new Error(`Cannot backprop through input ${inputName}. ` +\n `Available gradients found: ${Object.keys(inputGradients)}.`);\n }\n // Call the gradient function.\n const dx = tidy(() => inputGradients[inputName]());\n if (dx.dtype !== 'float32') {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `${inputName} must have 'float32' dtype, but has '${dx.dtype}'`);\n }\n const x = node.inputs[inputName];\n if (!util.arraysEqual(dx.shape, x.shape)) {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `'${inputName}' has shape '${dx.shape}', which does not match ` +\n `the shape of the input '${x.shape}'`);\n }\n if (tensorAccumulatedGradientMap[x.id] == null) {\n tensorAccumulatedGradientMap[x.id] = dx;\n }\n else {\n const curGradient = tensorAccumulatedGradientMap[x.id];\n tensorAccumulatedGradientMap[x.id] = add(curGradient, dx);\n curGradient.dispose();\n }\n }\n }\n}\n//# sourceMappingURL=tape.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { computeStrides, isString, rightPad, sizeFromShape } from './util';\n// Maximum number of values before we decide to show ellipsis.\nconst FORMAT_LIMIT_NUM_VALS = 20;\n// Number of first and last values to show when displaying a, b,...,y, z.\nconst FORMAT_NUM_FIRST_LAST_VALS = 3;\n// Number of significant digits to show.\nconst FORMAT_NUM_SIG_DIGITS = 7;\nexport function tensorToString(vals, shape, dtype, verbose) {\n const strides = computeStrides(shape);\n const padPerCol = computeMaxSizePerColumn(vals, shape, dtype, strides);\n const rank = shape.length;\n const valsLines = subTensorToString(vals, shape, dtype, strides, padPerCol);\n const lines = ['Tensor'];\n if (verbose) {\n lines.push(` dtype: ${dtype}`);\n lines.push(` rank: ${rank}`);\n lines.push(` shape: [${shape}]`);\n lines.push(` values:`);\n }\n lines.push(valsLines.map(l => ' ' + l).join('\\n'));\n return lines.join('\\n');\n}\nfunction computeMaxSizePerColumn(vals, shape, dtype, strides) {\n const n = sizeFromShape(shape);\n const numCols = strides[strides.length - 1];\n const padPerCol = new Array(numCols).fill(0);\n const rank = shape.length;\n const valuesOrTuples = dtype === 'complex64' ? createComplexTuples(vals) : vals;\n if (rank > 1) {\n for (let row = 0; row < n / numCols; row++) {\n const offset = row * numCols;\n for (let j = 0; j < numCols; j++) {\n padPerCol[j] = Math.max(padPerCol[j], valToString(valuesOrTuples[offset + j], 0, dtype).length);\n }\n }\n }\n return padPerCol;\n}\nfunction valToString(val, pad, dtype) {\n let valStr;\n if (Array.isArray(val)) {\n valStr = `${parseFloat(val[0].toFixed(FORMAT_NUM_SIG_DIGITS))} + ` +\n `${parseFloat(val[1].toFixed(FORMAT_NUM_SIG_DIGITS))}j`;\n }\n else if (isString(val)) {\n valStr = `'${val}'`;\n }\n else if (dtype === 'bool') {\n valStr = boolNumToString(val);\n }\n else {\n valStr = parseFloat(val.toFixed(FORMAT_NUM_SIG_DIGITS)).toString();\n }\n return rightPad(valStr, pad);\n}\nfunction boolNumToString(v) {\n return v === 0 ? 'false' : 'true';\n}\nfunction subTensorToString(vals, shape, dtype, strides, padPerCol, isLast = true) {\n const storagePerElement = dtype === 'complex64' ? 2 : 1;\n const size = shape[0];\n const rank = shape.length;\n if (rank === 0) {\n if (dtype === 'complex64') {\n const complexTuple = createComplexTuples(vals);\n return [valToString(complexTuple[0], 0, dtype)];\n }\n if (dtype === 'bool') {\n return [boolNumToString(vals[0])];\n }\n return [vals[0].toString()];\n }\n if (rank === 1) {\n if (size > FORMAT_LIMIT_NUM_VALS) {\n const firstValsSize = FORMAT_NUM_FIRST_LAST_VALS * storagePerElement;\n let firstVals = Array.from(vals.slice(0, firstValsSize));\n let lastVals = Array.from(vals.slice((size - FORMAT_NUM_FIRST_LAST_VALS) * storagePerElement, size * storagePerElement));\n if (dtype === 'complex64') {\n firstVals = createComplexTuples(firstVals);\n lastVals = createComplexTuples(lastVals);\n }\n return [\n '[' +\n firstVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ', ..., ' +\n lastVals\n .map((x, i) => valToString(x, padPerCol[size - FORMAT_NUM_FIRST_LAST_VALS + i], dtype))\n .join(', ') +\n ']'\n ];\n }\n const displayVals = dtype === 'complex64' ? createComplexTuples(vals) :\n Array.from(vals);\n return [\n '[' +\n displayVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ']'\n ];\n }\n // The array is rank 2 or more.\n const subshape = shape.slice(1);\n const substrides = strides.slice(1);\n const stride = strides[0] * storagePerElement;\n const lines = [];\n if (size > FORMAT_LIMIT_NUM_VALS) {\n for (let i = 0; i < FORMAT_NUM_FIRST_LAST_VALS; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, false /* isLast */));\n }\n lines.push('...');\n for (let i = size - FORMAT_NUM_FIRST_LAST_VALS; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n else {\n for (let i = 0; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n const sep = rank === 2 ? ',' : '';\n lines[0] = '[' + lines[0] + sep;\n for (let i = 1; i < lines.length - 1; i++) {\n lines[i] = ' ' + lines[i] + sep;\n }\n let newLineSep = ',\\n';\n for (let i = 2; i < rank; i++) {\n newLineSep += '\\n';\n }\n lines[lines.length - 1] =\n ' ' + lines[lines.length - 1] + ']' + (isLast ? '' : newLineSep);\n return lines;\n}\nfunction createComplexTuples(vals) {\n const complexTuples = [];\n for (let i = 0; i < vals.length; i += 2) {\n complexTuples.push([vals[i], vals[i + 1]]);\n }\n return complexTuples;\n}\n//# sourceMappingURL=tensor_format.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensorToString } from './tensor_format';\nimport * as util from './util';\nimport { computeStrides, toNestedArray } from './util';\n/**\n * A mutable object, similar to `tf.Tensor`, that allows users to set values\n * at locations before converting to an immutable `tf.Tensor`.\n *\n * See `tf.buffer` for creating a tensor buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class TensorBuffer {\n constructor(shape, dtype, values) {\n this.dtype = dtype;\n this.shape = shape.slice();\n this.size = util.sizeFromShape(shape);\n if (values != null) {\n const n = values.length;\n util.assert(n === this.size, () => `Length of values '${n}' does not match the size ` +\n `inferred by the shape '${this.size}'.`);\n }\n if (dtype === 'complex64') {\n throw new Error(`complex64 dtype TensorBuffers are not supported. Please create ` +\n `a TensorBuffer for the real and imaginary parts separately and ` +\n `call tf.complex(real, imag).`);\n }\n this.values = values || util.getArrayFromDType(dtype, this.size);\n this.strides = computeStrides(shape);\n }\n /**\n * Sets a value in the buffer at a given location.\n *\n * @param value The value to set.\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n set(value, ...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n util.assert(locs.length === this.rank, () => `The number of provided coordinates (${locs.length}) must ` +\n `match the rank (${this.rank})`);\n const index = this.locToIndex(locs);\n this.values[index] = value;\n }\n /**\n * Returns the value in the buffer at the provided location.\n *\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n get(...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n let i = 0;\n for (const loc of locs) {\n if (loc < 0 || loc >= this.shape[i]) {\n const msg = `Requested out of range element at ${locs}. ` +\n ` Buffer shape=${this.shape}`;\n throw new Error(msg);\n }\n i++;\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return this.values[index];\n }\n locToIndex(locs) {\n if (this.rank === 0) {\n return 0;\n }\n else if (this.rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return index;\n }\n indexToLoc(index) {\n if (this.rank === 0) {\n return [];\n }\n else if (this.rank === 1) {\n return [index];\n }\n const locs = new Array(this.shape.length);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / this.strides[i]);\n index -= locs[i] * this.strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Creates an immutable `tf.Tensor` object from the buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n toTensor() {\n return trackerFn().makeTensor(this.values, this.shape, this.dtype);\n }\n}\n// For tracking tensor creation and disposal.\nlet trackerFn = null;\n// Used by chaining methods to call into ops.\nlet opHandler = null;\n// Used to warn about deprecated methods.\nlet deprecationWarningFn = null;\n// This here so that we can use this method on dev branches and keep the\n// functionality at master.\n// tslint:disable-next-line:no-unused-expression\n[deprecationWarningFn];\n/**\n * An external consumer can register itself as the tensor tracker. This way\n * the Tensor class can notify the tracker for every tensor created and\n * disposed.\n */\nexport function setTensorTracker(fn) {\n trackerFn = fn;\n}\n/**\n * An external consumer can register itself as the op handler. This way the\n * Tensor class can have chaining methods that call into ops via the op\n * handler.\n */\nexport function setOpHandler(handler) {\n opHandler = handler;\n}\n/**\n * Sets the deprecation warning function to be used by this file. This way the\n * Tensor class can be a leaf but still use the environment.\n */\nexport function setDeprecationWarningFn(fn) {\n deprecationWarningFn = fn;\n}\n/**\n * A `tf.Tensor` object represents an immutable, multidimensional array of\n * numbers that has a shape and a data type.\n *\n * See `tf.tensor` for details on how to create a `tf.Tensor`.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Tensor {\n constructor(shape, dtype, dataId, id) {\n /** Whether this tensor has been globally kept. */\n this.kept = false;\n this.isDisposedInternal = false;\n this.shape = shape.slice();\n this.dtype = dtype || 'float32';\n this.size = util.sizeFromShape(shape);\n this.strides = computeStrides(shape);\n this.dataId = dataId;\n this.id = id;\n this.rankType = (this.rank < 5 ? this.rank.toString() : 'higher');\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Returns a promise of `tf.TensorBuffer` that holds the underlying data.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async buffer() {\n const vals = await this.data();\n return opHandler.buffer(this.shape, this.dtype, vals);\n }\n /**\n * Returns a `tf.TensorBuffer` that holds the underlying data.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n bufferSync() {\n return opHandler.buffer(this.shape, this.dtype, this.dataSync());\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * asynchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async array() {\n const vals = await this.data();\n return toNestedArray(this.shape, vals);\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * synchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n arraySync() {\n return toNestedArray(this.shape, this.dataSync());\n }\n /**\n * Asynchronously downloads the values from the `tf.Tensor`. Returns a\n * promise of `TypedArray` that resolves when the computation has finished.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async data() {\n this.throwIfDisposed();\n const data = trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n const bytes = await data;\n try {\n return bytes.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /**\n * Synchronously downloads the values from the `tf.Tensor`. This blocks the\n * UI thread until the values are ready, which can cause performance issues.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dataSync() {\n this.throwIfDisposed();\n const data = trackerFn().readSync(this.dataId);\n if (this.dtype === 'string') {\n try {\n return data.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /** Returns the underlying bytes of the tensor's data. */\n async bytes() {\n this.throwIfDisposed();\n const data = await trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n return data;\n }\n else {\n return new Uint8Array(data.buffer);\n }\n }\n /**\n * Disposes `tf.Tensor` from memory.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dispose() {\n if (this.isDisposed) {\n return;\n }\n trackerFn().disposeTensor(this);\n this.isDisposedInternal = true;\n }\n get isDisposed() {\n return this.isDisposedInternal;\n }\n throwIfDisposed() {\n if (this.isDisposed) {\n throw new Error(`Tensor is disposed.`);\n }\n }\n /**\n * Prints the `tf.Tensor`. See `tf.print` for details.\n *\n * @param verbose Whether to print verbose information about the tensor,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n print(verbose = false) {\n return opHandler.print(this, verbose);\n }\n /**\n * Returns a copy of the tensor. See `tf.clone` for details.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n clone() {\n this.throwIfDisposed();\n return opHandler.clone(this);\n }\n /**\n * Returns a human-readable description of the tensor. Useful for logging.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n toString(verbose = false) {\n const vals = this.dataSync();\n return tensorToString(vals, this.shape, this.dtype, verbose);\n }\n cast(dtype) {\n this.throwIfDisposed();\n return opHandler.cast(this, dtype);\n }\n variable(trainable = true, name, dtype) {\n this.throwIfDisposed();\n return trackerFn().makeVariable(this, trainable, name, dtype);\n }\n}\nObject.defineProperty(Tensor, Symbol.hasInstance, {\n value: (instance) => {\n // Implementation note: we should use properties of the object that will be\n // defined before the constructor body has finished executing (methods).\n // This is because when this code is transpiled by babel, babel will call\n // classCallCheck before the constructor body is run.\n // See https://github.com/tensorflow/tfjs/issues/3384 for backstory.\n return !!instance && instance.data != null && instance.dataSync != null &&\n instance.throwIfDisposed != null;\n }\n});\n/**\n * A mutable `tf.Tensor`, useful for persisting state, e.g. for training.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Variable extends Tensor {\n constructor(initialValue, trainable, name, tensorId) {\n super(initialValue.shape, initialValue.dtype, initialValue.dataId, tensorId);\n this.trainable = trainable;\n this.name = name;\n }\n /**\n * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have\n * the same shape and dtype as the old `tf.Tensor`.\n *\n * @param newValue New tensor to be assigned to this variable.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n assign(newValue) {\n if (newValue.dtype !== this.dtype) {\n throw new Error(`dtype of the new value (${newValue.dtype}) and ` +\n `previous value (${this.dtype}) must match`);\n }\n if (!util.arraysEqual(newValue.shape, this.shape)) {\n throw new Error(`shape of the new value (${newValue.shape}) and ` +\n `previous value (${this.shape}) must match`);\n }\n trackerFn().disposeTensor(this);\n this.dataId = newValue.dataId;\n trackerFn().incRef(this, null /* backend */);\n }\n dispose() {\n trackerFn().disposeVariable(this);\n this.isDisposedInternal = true;\n }\n}\nObject.defineProperty(Variable, Symbol.hasInstance, {\n value: (instance) => {\n return instance instanceof Tensor && instance.assign != null &&\n instance.assign instanceof Function;\n }\n});\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Rank;\n(function (Rank) {\n Rank[\"R0\"] = \"R0\";\n Rank[\"R1\"] = \"R1\";\n Rank[\"R2\"] = \"R2\";\n Rank[\"R3\"] = \"R3\";\n Rank[\"R4\"] = \"R4\";\n Rank[\"R5\"] = \"R5\";\n Rank[\"R6\"] = \"R6\";\n})(Rank || (Rank = {}));\n// Looks for upcasting types. Used, for example, in operations with mixed dtype\n// inputs.\nvar UpcastInt32AndMap;\n(function (UpcastInt32AndMap) {\n UpcastInt32AndMap[\"float32\"] = \"float32\";\n UpcastInt32AndMap[\"int32\"] = \"int32\";\n UpcastInt32AndMap[\"bool\"] = \"int32\";\n UpcastInt32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastInt32AndMap || (UpcastInt32AndMap = {}));\nvar UpcastBoolAndMap;\n(function (UpcastBoolAndMap) {\n UpcastBoolAndMap[\"float32\"] = \"float32\";\n UpcastBoolAndMap[\"int32\"] = \"int32\";\n UpcastBoolAndMap[\"bool\"] = \"bool\";\n UpcastBoolAndMap[\"complex64\"] = \"complex64\";\n})(UpcastBoolAndMap || (UpcastBoolAndMap = {}));\nvar UpcastFloat32AndMap;\n(function (UpcastFloat32AndMap) {\n UpcastFloat32AndMap[\"float32\"] = \"float32\";\n UpcastFloat32AndMap[\"int32\"] = \"float32\";\n UpcastFloat32AndMap[\"bool\"] = \"float32\";\n UpcastFloat32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastFloat32AndMap || (UpcastFloat32AndMap = {}));\nvar UpcastComplex64AndMap;\n(function (UpcastComplex64AndMap) {\n UpcastComplex64AndMap[\"float32\"] = \"complex64\";\n UpcastComplex64AndMap[\"int32\"] = \"complex64\";\n UpcastComplex64AndMap[\"bool\"] = \"complex64\";\n UpcastComplex64AndMap[\"complex64\"] = \"complex64\";\n})(UpcastComplex64AndMap || (UpcastComplex64AndMap = {}));\nconst upcastTypeMap = {\n 'float32': UpcastFloat32AndMap,\n 'int32': UpcastInt32AndMap,\n 'bool': UpcastBoolAndMap,\n 'complex64': UpcastComplex64AndMap\n};\nexport function upcastType(typeA, typeB) {\n if (typeA === 'string' || typeB === 'string') {\n if (typeA === 'string' && typeB === 'string') {\n return 'string';\n }\n throw new Error(`Can not upcast ${typeA} with ${typeB}`);\n }\n return upcastTypeMap[typeA][typeB];\n}\n/** Returns the output type after summation. */\nexport function sumOutType(type) {\n return upcastType(type, 'int32');\n}\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from './tensor';\nimport { upcastType } from './types';\nimport { assert } from './util';\nexport function makeTypesMatch(a, b) {\n if (a.dtype === b.dtype) {\n return [a, b];\n }\n const dtype = upcastType(a.dtype, b.dtype);\n return [a.cast(dtype), b.cast(dtype)];\n}\nexport function assertTypesMatch(a, b) {\n assert(a.dtype === b.dtype, () => `The dtypes of the first(${a.dtype}) and` +\n ` second(${b.dtype}) input must match`);\n}\nexport function isTensorInList(tensor, tensorList) {\n return tensorList.some(x => x.id === tensor.id);\n}\n/**\n * Extracts any `Tensor`s found within the provided object.\n *\n * @param container an object that may be a `Tensor` or may directly contain\n * `Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. In general it\n * is safe to pass any object here, except that `Promise`s are not\n * supported.\n * @returns An array of `Tensors` found within the passed object. If the\n * argument is simply a `Tensor', a list containing that `Tensor` is\n * returned. If the object is not a `Tensor` or does not\n * contain `Tensors`, an empty list is returned.\n */\nexport function getTensorsInContainer(result) {\n const list = [];\n const seen = new Set();\n walkTensorContainer(result, list, seen);\n return list;\n}\nfunction walkTensorContainer(container, list, seen) {\n if (container == null) {\n return;\n }\n if (container instanceof Tensor) {\n list.push(container);\n return;\n }\n if (!isIterable(container)) {\n return;\n }\n // Iteration over keys works also for arrays.\n const iterable = container;\n for (const k in iterable) {\n const val = iterable[k];\n if (!seen.has(val)) {\n seen.add(val);\n walkTensorContainer(val, list, seen);\n }\n }\n}\n// tslint:disable-next-line:no-any\nfunction isIterable(obj) {\n return Array.isArray(obj) || typeof obj === 'object';\n}\n//# sourceMappingURL=tensor_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { KernelBackend } from './backends/backend';\nimport { Environment, setEnvironmentGlobal } from './environment';\nimport { getGlobalNamespace } from './global_util';\nimport { Add, Cast } from './kernel_names';\nimport { getGradient, getKernel, getKernelsForBackend } from './kernel_registry';\nimport { Profiler } from './profiler';\nimport { backpropagateGradients, getFilteredNodesXToY } from './tape';\nimport { setTensorTracker, Tensor, Variable } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\nimport * as util from './util';\nimport { bytesFromStringArray, makeOnesTypedArray, now, sizeFromShape } from './util';\nclass EngineState {\n constructor() {\n // Public since optimizers will use it.\n this.registeredVariables = {};\n this.nextTapeNodeId = 0;\n this.numBytes = 0;\n this.numTensors = 0;\n this.numStringTensors = 0;\n this.numDataBuffers = 0;\n // Number of nested tf.grad() statements when computing higher-order\n // gradients. E.g. `1` for first-order gradients and `2` for second-order\n // gradients. Used to track if the tape should be removed after a backprop.\n this.gradientDepth = 0;\n // Number of nested kernel calls. When kernel depth is greater than 1, we turn\n // off the tape.\n this.kernelDepth = 0;\n this.scopeStack = [];\n /**\n * Keeps track of the number of data moves during a kernel execution. We\n * maintain a stack since kernels can call other kernels, recursively.\n */\n this.numDataMovesStack = [];\n this.nextScopeId = 0;\n this.tensorInfo = new WeakMap();\n this.profiling = false;\n this.activeProfile = { newBytes: 0, newTensors: 0, peakBytes: 0, kernels: [], result: null };\n }\n dispose() {\n for (const variableName in this.registeredVariables) {\n this.registeredVariables[variableName].dispose();\n }\n }\n}\nexport class Engine {\n constructor(ENV) {\n this.ENV = ENV;\n this.registry = {};\n this.registryFactory = {};\n this.pendingBackendInitId = 0;\n this.state = new EngineState();\n }\n async ready() {\n if (this.pendingBackendInit != null) {\n return this.pendingBackendInit.then(() => { });\n }\n if (this.backendInstance != null) {\n return;\n }\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const success = await this.initializeBackend(backendName).success;\n if (success) {\n await this.setBackend(backendName);\n return;\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n get backend() {\n if (this.pendingBackendInit != null) {\n throw new Error(`Backend '${this.backendName}' has not yet been initialized. Make ` +\n `sure to await tf.ready() or await tf.setBackend() before calling ` +\n `other methods`);\n }\n if (this.backendInstance == null) {\n const { name, asyncInit } = this.initializeBackendsAndReturnBest();\n if (asyncInit) {\n throw new Error(`The highest priority backend '${name}' has not yet been ` +\n `initialized. Make sure to await tf.ready() or ` +\n `await tf.setBackend() before calling other methods`);\n }\n this.setBackend(name);\n }\n return this.backendInstance;\n }\n backendNames() {\n return Object.keys(this.registryFactory);\n }\n findBackend(backendName) {\n if (!(backendName in this.registry)) {\n // If the backend hasn't been initialized but we have a registry entry for\n // it, initialize it and return it.\n if (backendName in this.registryFactory) {\n const { asyncInit } = this.initializeBackend(backendName);\n if (asyncInit) {\n // Backend is not ready yet.\n return null;\n }\n }\n else {\n return null;\n }\n }\n return this.registry[backendName];\n }\n findBackendFactory(backendName) {\n if (!(backendName in this.registryFactory)) {\n return null;\n }\n return this.registryFactory[backendName].factory;\n }\n registerBackend(backendName, factory, priority = 1) {\n if (backendName in this.registryFactory) {\n console.warn(`${backendName} backend was already registered. ` +\n `Reusing existing backend factory.`);\n return false;\n }\n this.registryFactory[backendName] = { factory, priority };\n return true;\n }\n async setBackend(backendName) {\n if (this.registryFactory[backendName] == null) {\n throw new Error(`Backend name '${backendName}' not found in registry`);\n }\n this.backendName = backendName;\n if (this.registry[backendName] == null) {\n this.backendInstance = null;\n const { success, asyncInit } = this.initializeBackend(backendName);\n const result = asyncInit ? await success : success;\n if (!result) {\n return false;\n }\n }\n this.backendInstance = this.registry[backendName];\n this.setupRegisteredKernels();\n // Reset the profiler.\n this.profiler = new Profiler(this.backendInstance);\n return true;\n }\n setupRegisteredKernels() {\n const kernels = getKernelsForBackend(this.backendName);\n kernels.forEach(kernel => {\n if (kernel.setupFunc != null) {\n kernel.setupFunc(this.backendInstance);\n }\n });\n }\n disposeRegisteredKernels(backendName) {\n const kernels = getKernelsForBackend(backendName);\n kernels.forEach(kernel => {\n if (kernel.disposeFunc != null) {\n kernel.disposeFunc(this.registry[backendName]);\n }\n });\n }\n /**\n * Initializes a backend by looking up the backend name in the factory\n * registry and calling the factory method. Returns a boolean representing\n * whether the initialization of the backend suceeded. Throws an error if\n * there is no backend in the factory registry.\n */\n initializeBackend(backendName) {\n const registryFactoryEntry = this.registryFactory[backendName];\n if (registryFactoryEntry == null) {\n throw new Error(`Cannot initialize backend ${backendName}, no registration found.`);\n }\n try {\n const backend = registryFactoryEntry.factory();\n /* Test if the factory returns a promise.\n Done in a more liberal way than\n previous 'Promise.resolve(backend)===backend'\n as we needed to account for custom Promise\n implementations (e.g. Angular) */\n if (backend && !(backend instanceof KernelBackend)\n && typeof backend.then === 'function') {\n const promiseId = ++this.pendingBackendInitId;\n const success = backend\n .then(backendInstance => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.registry[backendName] = backendInstance;\n this.pendingBackendInit = null;\n return true;\n })\n .catch(err => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.pendingBackendInit = null;\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return false;\n });\n this.pendingBackendInit = success;\n return { success, asyncInit: true };\n }\n else {\n this.registry[backendName] = backend;\n return { success: true, asyncInit: false };\n }\n }\n catch (err) {\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return { success: false, asyncInit: false };\n }\n }\n removeBackend(backendName) {\n if (!(backendName in this.registryFactory)) {\n throw new Error(`${backendName} backend not found in registry`);\n }\n if (this.backendName === backendName && this.pendingBackendInit != null) {\n // There is a pending promise of the backend we want to remove. Make it\n // obsolete.\n this.pendingBackendInitId++;\n }\n if (backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n delete this.registryFactory[backendName];\n // Unset the backend if it is active.\n if (this.backendName === backendName) {\n this.pendingBackendInit = null;\n this.backendName = null;\n this.backendInstance = null;\n }\n }\n getSortedBackends() {\n if (Object.keys(this.registryFactory).length === 0) {\n throw new Error('No backend found in registry.');\n }\n return Object.keys(this.registryFactory).sort((a, b) => {\n // Highest priority comes first.\n return this.registryFactory[b].priority -\n this.registryFactory[a].priority;\n });\n }\n initializeBackendsAndReturnBest() {\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const { success, asyncInit } = this.initializeBackend(backendName);\n if (asyncInit || success) {\n return { name: backendName, asyncInit };\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n moveData(backend, dataId) {\n const info = this.state.tensorInfo.get(dataId);\n const srcBackend = info.backend;\n const values = this.readSync(dataId);\n // Delete the tensor from the old backend and move it to the new\n // backend.\n srcBackend.disposeData(dataId);\n info.backend = backend;\n backend.move(dataId, values, info.shape, info.dtype);\n if (this.shouldCheckForMemLeaks()) {\n // Track the number of moves during a kernel execution to correctly\n // detect memory leaks.\n this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1]++;\n }\n }\n tidy(nameOrFn, fn) {\n let name = null;\n if (fn == null) {\n // Called with only 1 argument.\n if (typeof nameOrFn !== 'function') {\n throw new Error('Please provide a function to tidy()');\n }\n fn = nameOrFn;\n }\n else {\n // Called with 2 arguments.\n if (typeof nameOrFn !== 'string' && !(nameOrFn instanceof String)) {\n throw new Error('When calling with two arguments, the first argument ' +\n 'to tidy() must be a string');\n }\n if (typeof fn !== 'function') {\n throw new Error('When calling with two arguments, the 2nd argument ' +\n 'to tidy() must be a function');\n }\n name = nameOrFn;\n // TODO(nsthorat,smilkov): Do operation logging and performance\n // profiling.\n }\n let result;\n return this.scopedRun(() => this.startScope(name), () => this.endScope(result), () => {\n result = fn();\n if (result instanceof Promise) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n return result;\n });\n }\n scopedRun(start, end, f) {\n start();\n try {\n const res = f();\n end();\n return res;\n }\n catch (ex) {\n end();\n throw ex;\n }\n }\n nextTensorId() {\n return Engine.nextTensorId++;\n }\n nextVariableId() {\n return Engine.nextVariableId++;\n }\n /**\n * This method is called instead of the public-facing tensor.clone() when\n * saving a tensor for backwards pass. It makes sure to add the clone\n * operation to the tape regardless of being called inside a kernel\n * execution.\n *\n * This method will go away once all kernels are modularized since we won't\n * need to turn off the tape inside runKernel().\n */\n clone(x) {\n const y = this.makeTensorFromDataId(x.dataId, x.shape, x.dtype);\n const inputs = { x };\n const grad = (dy) => ({\n x: () => {\n const dtype = 'float32';\n const gradInputs = { x: dy };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast(dy, dtype), gradInputs, null /* grad */, Cast, attrs);\n }\n });\n const saved = [];\n this.addTapeNode(this.state.activeScope.name, inputs, [y], grad, saved, {});\n return y;\n }\n /**\n * Execute a kernel with the given name and return the output tensor.\n *\n * @param kernelName The name of the kernel to execute.\n * @param inputs A map of input names to tensors.\n * @param attrs A map of attribute names to their values. An attribute is a\n * primitive (non-tensor) input to the kernel.\n * @param inputsToSave A list of tensors, inputs to save for the backprop\n * computation.\n * @param outputsToSave A list of booleans, specifying which output to save\n * for the backprop computation. These are booleans since the output\n * tensors are not visible to the user.\n */\n runKernel(kernelName, inputs, attrs, inputsToSave, outputsToSave) {\n const forwardFunc = null;\n const backwardsFunc = null;\n // Call runKernel as a stop-gap until we modularize all kernels.\n // Once we modularize all kernels, we will remove the existing\n // `runKernelFunc`.\n return this.runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave);\n }\n shouldCheckForMemLeaks() {\n return this.ENV.getBool('IS_TEST');\n }\n checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos) {\n const numDataIdsAfter = this.backend.numDataIds();\n // Count the number of data ids associated with the result of the kernel.\n let numOutputDataIds = 0;\n outInfos.forEach(info => {\n // Complex numbers allocate 3 data ids, one for 'real', one for\n // 'imaginary', and one for the container that holds the former two.\n numOutputDataIds += (info.dtype === 'complex64' ? 3 : 1);\n });\n // Account for the number of moves during kernel execution. A \"data move\"\n // can happen in the middle of a kernel execution, placing a new (key,value)\n // pair in the data storage. Since data moves have net zero effect (we\n // always remove the data from the old backend), we have to cancel them out\n // when detecting memory leaks.\n const numMoves = this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1];\n const dataIdsLeaked = numDataIdsAfter - numDataIdsBefore - numOutputDataIds - numMoves;\n if (dataIdsLeaked > 0) {\n throw new Error(`Backend '${this.backendName}' has an internal memory leak ` +\n `(${dataIdsLeaked} data ids) after running '${kernelName}'`);\n }\n }\n /**\n * @deprecated Use `runKernel` for newly added kernels. Keep using this method\n * only for kernels that are not yet fully modularized.\n */\n runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave) {\n let outputs;\n let saved = [];\n const isTapeOn = this.isTapeOn();\n if (kernelName == null) {\n kernelName =\n this.state.activeScope != null ? this.state.activeScope.name : '';\n }\n const startingBytecount = this.state.numBytes;\n const startingNumTensors = this.state.numTensors;\n if (this.shouldCheckForMemLeaks()) {\n this.state.numDataMovesStack.push(0);\n }\n let kernelFunc;\n const kernel = getKernel(kernelName, this.backendName);\n let out;\n if (kernel != null) {\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = kernel.kernelFunc({ inputs, attrs, backend: this.backend });\n const outInfos = Array.isArray(out) ? out : [out];\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos);\n }\n const outTensors = outInfos.map(({ dataId, shape, dtype }) => this.makeTensorFromDataId(dataId, shape, dtype));\n // Save the inputs and outputs.\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (isTapeOn) {\n let tensorsToSave = this.getTensorsForGradient(kernelName, inputs, outTensors);\n if (tensorsToSave == null) {\n // Fallback for ops that call runKernelFunc and pass in\n // inputsToSave and outputsToSave. Currently this is the set of ops\n // with kernel support in the WASM backend. Once those ops and\n // respective gradients are modularised we can remove this path.\n if (outputsToSave == null) {\n outputsToSave = [];\n }\n const outsToSave = outTensors.filter((_, i) => outputsToSave[i]);\n tensorsToSave = (inputsToSave || []).slice().concat(outsToSave);\n }\n saved = this.saveTensorsForBackwardMode(tensorsToSave);\n }\n return outTensors;\n };\n }\n else {\n const saveFunc = (tensors) => {\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (!isTapeOn) {\n return;\n }\n saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n };\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = this.tidy(() => forwardFunc(this.backend, saveFunc));\n const outs = (Array.isArray(out) ? out : [out]);\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outs);\n }\n return outs;\n };\n }\n // Stop recording to a tape when running a kernel.\n let kernelProfile;\n this.scopedRun(() => this.state.kernelDepth++, () => this.state.kernelDepth--, () => {\n if (!this.ENV.getBool('DEBUG') && !this.state.profiling) {\n outputs = kernelFunc();\n }\n else {\n kernelProfile = this.profiler.profileKernel(kernelName, inputs, () => kernelFunc());\n if (this.ENV.getBool('DEBUG')) {\n this.profiler.logKernelProfile(kernelProfile);\n }\n outputs = kernelProfile.outputs;\n }\n });\n if (isTapeOn) {\n this.addTapeNode(kernelName, inputs, outputs, backwardsFunc, saved, attrs);\n }\n if (this.state.profiling) {\n this.state.activeProfile.kernels.push({\n name: kernelName,\n bytesAdded: this.state.numBytes - startingBytecount,\n totalBytesSnapshot: this.state.numBytes,\n tensorsAdded: this.state.numTensors - startingNumTensors,\n totalTensorsSnapshot: this.state.numTensors,\n inputShapes: Object.keys(inputs).map(key => inputs[key] != null ? inputs[key].shape : null),\n outputShapes: outputs.map(item => item.shape),\n kernelTimeMs: kernelProfile.timeMs,\n extraInfo: kernelProfile.extraInfo\n });\n }\n return (Array.isArray(out) ? outputs : outputs[0]);\n }\n /**\n * Saves tensors used in forward mode for use in backward mode.\n *\n * @param tensors the list of tensors to save.\n */\n saveTensorsForBackwardMode(tensors) {\n const saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n return saved;\n }\n /**\n * Returns a list of tensors to save for a given gradient calculation.\n *\n * Returns undefined if their is no registered gradient for this kernel in the\n * gradient registry.\n *\n * @param kernelName name of kernel to look up gradient for.\n * @param inputs a map of input tensors.\n * @param outputs an array of output tensors from forward mode of kernel.\n */\n getTensorsForGradient(kernelName, inputs, outputs) {\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n const inputsToSave = gradConfig.inputsToSave || [];\n const outputsToSave = gradConfig.outputsToSave || [];\n // If saveAllInputs is true, all inputs will be saved. Otherwise, inputs\n // specified in inputsToSave will be saved.\n let inputTensorsToSave;\n if (gradConfig.saveAllInputs) {\n util.assert(Array.isArray(inputs), () => 'saveAllInputs is true, expected inputs to be an array.');\n inputTensorsToSave = Object.keys(inputs).map((key) => inputs[key]);\n }\n else {\n inputTensorsToSave = inputsToSave.map((inputName) => inputs[inputName]);\n }\n const outputTensorsToSave = outputs.filter((_, i) => outputsToSave[i]);\n return inputTensorsToSave.concat(outputTensorsToSave);\n }\n // TODO(yassogba) throw exception here once all runkernelFunc calls with\n // inputsToSave/outputsToSave are removed\n return null;\n }\n /**\n * Internal method used by public APIs for tensor creation. Makes a new\n * tensor with the provided shape, dtype and values. It always\n * creates a new data id and writes the values to the underlying backend.\n */\n makeTensor(values, shape, dtype, backend) {\n if (values == null) {\n throw new Error('Values passed to engine.makeTensor() are null');\n }\n dtype = dtype || 'float32';\n backend = backend || this.backend;\n let backendVals = values;\n if (dtype === 'string' && util.isString(values[0])) {\n backendVals = values.map(d => util.encodeString(d));\n }\n const dataId = backend.write(backendVals, shape, dtype);\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n // Count bytes for string tensors.\n if (dtype === 'string') {\n const info = this.state.tensorInfo.get(dataId);\n const newBytes = bytesFromStringArray(backendVals);\n this.state.numBytes += newBytes - info.bytes;\n info.bytes = newBytes;\n }\n return t;\n }\n /**\n * Internal method used by backends. Makes a new tensor\n * that is a wrapper around an existing data id. It doesn't create\n * a new data id, only increments the ref count used in memory tracking.\n */\n makeTensorFromDataId(dataId, shape, dtype, backend) {\n dtype = dtype || 'float32';\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n return t;\n }\n makeVariable(initialValue, trainable = true, name, dtype) {\n name = name || this.nextVariableId().toString();\n if (dtype != null && dtype !== initialValue.dtype) {\n initialValue = initialValue.cast(dtype);\n }\n const v = new Variable(initialValue, trainable, name, this.nextTensorId());\n if (this.state.registeredVariables[v.name] != null) {\n throw new Error(`Variable with name ${v.name} was already registered`);\n }\n this.state.registeredVariables[v.name] = v;\n this.incRef(v, this.backend);\n return v;\n }\n incRef(a, backend) {\n const refCount = this.state.tensorInfo.has(a.dataId) ?\n this.state.tensorInfo.get(a.dataId).refCount :\n 0;\n this.state.numTensors++;\n if (a.dtype === 'string') {\n this.state.numStringTensors++;\n }\n if (refCount === 0) {\n this.state.numDataBuffers++;\n // Bytes for complex numbers are counted by their components. Bytes for\n // string tensors are counted when writing values.\n let bytes = 0;\n if (a.dtype !== 'complex64' && a.dtype !== 'string') {\n bytes = a.size * util.bytesPerElement(a.dtype);\n }\n this.state.tensorInfo.set(a.dataId, {\n backend: backend || this.backend,\n dtype: a.dtype,\n shape: a.shape,\n bytes,\n refCount: 0\n });\n this.state.numBytes += bytes;\n }\n this.state.tensorInfo.get(a.dataId).refCount++;\n if (!(a instanceof Variable)) {\n this.track(a);\n }\n }\n disposeTensor(a) {\n if (!this.state.tensorInfo.has(a.dataId)) {\n return;\n }\n this.state.numTensors--;\n if (a.dtype === 'string') {\n this.state.numStringTensors--;\n }\n const info = this.state.tensorInfo.get(a.dataId);\n const refCount = info.refCount;\n if (refCount <= 1) {\n // Don't count bytes for complex numbers as they are counted by their\n // components.\n if (a.dtype !== 'complex64') {\n this.state.numBytes -= info.bytes;\n }\n this.state.numDataBuffers--;\n info.backend.disposeData(a.dataId);\n this.state.tensorInfo.delete(a.dataId);\n }\n else {\n this.state.tensorInfo.get(a.dataId).refCount--;\n }\n // TODO(nsthorat): Construct an error and save the stack trace for\n // debugging when in debug mode. Creating a stack trace is too expensive\n // to do unconditionally.\n }\n disposeVariables() {\n for (const varName in this.state.registeredVariables) {\n const v = this.state.registeredVariables[varName];\n this.disposeVariable(v);\n }\n }\n disposeVariable(v) {\n this.disposeTensor(v);\n if (this.state.registeredVariables[v.name] != null) {\n delete this.state.registeredVariables[v.name];\n }\n }\n memory() {\n const info = this.backend.memory();\n info.numTensors = this.state.numTensors;\n info.numDataBuffers = this.state.numDataBuffers;\n info.numBytes = this.state.numBytes;\n if (this.state.numStringTensors > 0) {\n info.unreliable = true;\n if (info.reasons == null) {\n info.reasons = [];\n }\n info.reasons.push('Memory usage by string tensors is approximate ' +\n '(2 bytes per character)');\n }\n return info;\n }\n async profile(query) {\n this.state.profiling = true;\n const startBytes = this.state.numBytes;\n const startNumTensors = this.state.numTensors;\n this.state.activeProfile.kernels = [];\n this.state.activeProfile.result = await query();\n this.state.profiling = false;\n this.state.activeProfile.peakBytes = Math.max(...this.state.activeProfile.kernels.map(d => d.totalBytesSnapshot));\n this.state.activeProfile.newBytes = this.state.numBytes - startBytes;\n this.state.activeProfile.newTensors =\n this.state.numTensors - startNumTensors;\n for (const kernel of this.state.activeProfile.kernels) {\n kernel.kernelTimeMs = await kernel.kernelTimeMs;\n kernel.extraInfo = await kernel.extraInfo;\n }\n return this.state.activeProfile;\n }\n isTapeOn() {\n return this.state.gradientDepth > 0 && this.state.kernelDepth === 0;\n }\n addTapeNode(kernelName, inputs, outputs, gradientsFunc, saved, attrs) {\n const tapeNode = { id: this.state.nextTapeNodeId++, kernelName, inputs, outputs, saved };\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n gradientsFunc = gradConfig.gradFunc;\n }\n if (gradientsFunc != null) {\n tapeNode.gradient = (dys) => {\n // TODO(smilkov): To optimize back-prop, pass dys that are not used in\n // the backprop graph to the user as null instead of zeros\n dys = dys.map((dy, i) => {\n if (dy == null) {\n const output = outputs[i];\n const vals = util.makeZerosTypedArray(output.size, output.dtype);\n return this.makeTensor(vals, output.shape, output.dtype);\n }\n return dy;\n });\n // Grad functions of ops with single outputs expect a dy, while ops\n // with multiple outputs expect dys (array of dy).\n return gradientsFunc(dys.length > 1 ? dys : dys[0], saved, attrs);\n };\n }\n this.state.activeTape.push(tapeNode);\n }\n keep(result) {\n result.kept = true;\n return result;\n }\n startTape() {\n if (this.state.gradientDepth === 0) {\n this.state.activeTape = [];\n }\n this.state.gradientDepth++;\n }\n endTape() {\n this.state.gradientDepth--;\n }\n /**\n * Start a scope. Use this with endScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n startScope(name) {\n const scopeInfo = {\n track: [],\n name: 'unnamed scope',\n id: this.state.nextScopeId++\n };\n if (name) {\n scopeInfo.name = name;\n }\n this.state.scopeStack.push(scopeInfo);\n this.state.activeScope = scopeInfo;\n }\n /**\n * End a scope. Use this with startScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n endScope(result) {\n const tensorsToTrackInParent = getTensorsInContainer(result);\n const tensorsToTrackInParentSet = new Set(tensorsToTrackInParent.map(t => t.id));\n // Dispose the arrays tracked in this scope.\n for (let i = 0; i < this.state.activeScope.track.length; i++) {\n const tensor = this.state.activeScope.track[i];\n if (!tensor.kept && !tensorsToTrackInParentSet.has(tensor.id)) {\n tensor.dispose();\n }\n }\n const oldScope = this.state.scopeStack.pop();\n this.state.activeScope = this.state.scopeStack.length === 0 ?\n null :\n this.state.scopeStack[this.state.scopeStack.length - 1];\n // Track the current result in the parent scope.\n tensorsToTrackInParent.forEach(tensor => {\n // Only track the tensor if was allocated in the inner scope and is not\n // globally kept.\n if (!tensor.kept && tensor.scopeId === oldScope.id) {\n this.track(tensor);\n }\n });\n }\n /**\n * Returns gradients of `f` with respect to each of the `xs`. The gradients\n * returned are of the same length as `xs`, but some might be null if `f`\n * was not a function of that `x`. It also takes optional dy to multiply the\n * gradient, which defaults to `1`.\n */\n gradients(f, xs, dy, allowNoGradients = false) {\n util.assert(xs.length > 0, () => 'gradients() received an empty list of xs.');\n if (dy != null && dy.dtype !== 'float32') {\n throw new Error(`dy must have 'float32' dtype, but has '${dy.dtype}'`);\n }\n const y = this.scopedRun(() => this.startTape(), () => this.endTape(), () => this.tidy('forward', f));\n util.assert(y instanceof Tensor, () => 'The result y returned by f() must be a tensor.');\n // Filter out the nodes that don't connect x => y.\n const filteredTape = getFilteredNodesXToY(this.state.activeTape, xs, y);\n if (!allowNoGradients && filteredTape.length === 0 && xs.length > 0) {\n throw new Error('Cannot compute gradient of y=f(x) with respect to x. Make sure ' +\n 'that the f you passed encloses all operations that lead from x ' +\n 'to y.');\n }\n return this.tidy('backward', () => {\n const accumulatedGradientMap = {};\n accumulatedGradientMap[y.id] = (dy == null) ? ones(y.shape) : dy;\n // Backprop gradients through the filtered nodes.\n backpropagateGradients(accumulatedGradientMap, filteredTape, \n // Pass the tidy function to avoid circular dep with `tape.ts`.\n f => this.tidy(f), \n // Pass an add function to avoide a circular dep with `tape.ts`.\n add);\n const grads = xs.map(x => accumulatedGradientMap[x.id]);\n if (this.state.gradientDepth === 0) {\n // This means that we are not computing higher-order gradients\n // and can clean up the tape.\n this.state.activeTape.forEach(node => {\n for (const tensor of node.saved) {\n tensor.dispose();\n }\n });\n this.state.activeTape = null;\n }\n return { value: y, grads };\n });\n }\n customGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in customGrad(f) must be a function.');\n return (...inputs) => {\n util.assert(inputs.every(t => t instanceof Tensor), () => 'The args passed in customGrad(f)(x1, x2,...) must all be ' +\n 'tensors');\n let res;\n const inputMap = {};\n inputs.forEach((input, i) => {\n inputMap[i] = input;\n });\n return this.runKernelFunc((_, save) => {\n res = f(...[...inputs, save]);\n util.assert(res.value instanceof Tensor, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.value` is a tensor');\n util.assert(util.isFunction(res.gradFunc), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function.');\n return res.value;\n }, inputMap, (dy, saved) => {\n const gradRes = res.gradFunc(dy, saved);\n const grads = Array.isArray(gradRes) ? gradRes : [gradRes];\n util.assert(grads.length === inputs.length, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'the same number of tensors as inputs passed to f(...).');\n util.assert(grads.every(t => t instanceof Tensor), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'a list of only tensors.');\n const gradMap = {};\n grads.forEach((grad, i) => {\n gradMap[i] = () => grad;\n });\n return gradMap;\n });\n };\n }\n readSync(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.readSync(dataId);\n }\n read(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.read(dataId);\n }\n async time(query) {\n const start = now();\n const timingInfo = await this.backend.time(query);\n timingInfo.wallMs = now() - start;\n return timingInfo;\n }\n /**\n * Tracks a Tensor in the current scope to be automatically cleaned up\n * when the current scope ends, and returns the value.\n *\n * @param result The Tensor to track in the current scope.\n */\n track(result) {\n if (this.state.activeScope != null) {\n result.scopeId = this.state.activeScope.id;\n this.state.activeScope.track.push(result);\n }\n return result;\n }\n get registeredVariables() {\n return this.state.registeredVariables;\n }\n /**\n * Resets the engine state. Removes all backends but does not remove\n * registered backend factories.\n */\n reset() {\n // Make any pending promise obsolete.\n this.pendingBackendInitId++;\n this.state.dispose();\n this.ENV.reset();\n this.state = new EngineState();\n for (const backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n this.backendName = null;\n this.backendInstance = null;\n this.pendingBackendInit = null;\n }\n}\nEngine.nextTensorId = 0;\nEngine.nextVariableId = 0;\nfunction ones(shape) {\n const values = makeOnesTypedArray(sizeFromShape(shape), 'float32');\n return ENGINE.makeTensor(values, shape, 'float32');\n}\nexport function getOrMakeEngine() {\n const ns = getGlobalNamespace();\n if (ns._tfengine == null) {\n const environment = new Environment(ns);\n ns._tfengine = new Engine(environment);\n }\n setEnvironmentGlobal(ns._tfengine.ENV);\n // Tell the current tensor interface that the global engine is responsible\n // for tracking.\n setTensorTracker(() => ns._tfengine);\n return ns._tfengine;\n}\nexport const ENGINE = getOrMakeEngine();\n/**\n * A implementation of the add op for use within engine and tape.\n *\n * This allows us to avoid a circular dependency between add.ts and engine.\n * It is exported to be available in tape tests.\n */\nexport function add(a, b) {\n // We duplicate Add here to avoid a circular dependency with add.ts.\n const inputs = { a, b };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.add(a, b);\n save([a, b]);\n return res;\n }, inputs, null /* gradient */, Add);\n}\n//# sourceMappingURL=engine.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line:no-any\nfunction _isNavigatorDefined() {\n return typeof navigator !== 'undefined' && navigator != null;\n}\nexport function isMobile() {\n if (_isNavigatorDefined()) {\n // tslint:disable-next-line:no-any\n const a = navigator.userAgent || navigator.vendor || window.opera;\n // tslint:disable-next-line:max-line-length\n return /(android|bb\\d+|meego).+mobile|avantgo|bada\\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i\n .test(a) ||\n // tslint:disable-next-line:max-line-length\n /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\\-(n|u)|c55\\/|capi|ccwa|cdm\\-|cell|chtm|cldc|cmd\\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\\-s|devi|dica|dmob|do(c|p)o|ds(12|\\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\\-|_)|g1 u|g560|gene|gf\\-5|g\\-mo|go(\\.w|od)|gr(ad|un)|haie|hcit|hd\\-(m|p|t)|hei\\-|hi(pt|ta)|hp( i|ip)|hs\\-c|ht(c(\\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\\-(20|go|ma)|i230|iac( |\\-|\\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\\/)|klon|kpt |kwc\\-|kyo(c|k)|le(no|xi)|lg( g|\\/(k|l|u)|50|54|\\-[a-w])|libw|lynx|m1\\-w|m3ga|m50\\/|ma(te|ui|xo)|mc(01|21|ca)|m\\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\\-2|po(ck|rt|se)|prox|psio|pt\\-g|qa\\-a|qc(07|12|21|32|60|\\-[2-7]|i\\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\\-|oo|p\\-)|sdk\\/|se(c(\\-|0|1)|47|mc|nd|ri)|sgh\\-|shar|sie(\\-|m)|sk\\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\\-|v\\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\\-|tdg\\-|tel(i|m)|tim\\-|t\\-mo|to(pl|sh)|ts(70|m\\-|m3|m5)|tx\\-9|up(\\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\\-|your|zeto|zte\\-/i\n .test(a.substr(0, 4));\n }\n return false;\n}\nexport function isBrowser() {\n return (typeof window !== 'undefined' && window.document != null) ||\n //@ts-ignore\n (typeof WorkerGlobalScope !== 'undefined');\n}\n//# sourceMappingURL=device_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './engine';\nimport * as device_util from './device_util';\nimport { env } from './environment';\nconst ENV = env();\n/**\n * This file contains environment-related flag registrations.\n */\n/** Whether to enable debug mode. */\nENV.registerFlag('DEBUG', () => false, debugValue => {\n if (debugValue) {\n console.warn('Debugging mode is ON. The output of every math call will ' +\n 'be downloaded to CPU and checked for NaNs. ' +\n 'This significantly impacts performance.');\n }\n});\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_BROWSER', () => device_util.isBrowser());\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_NODE', () => (typeof process !== 'undefined') &&\n (typeof process.versions !== 'undefined') &&\n (typeof process.versions.node !== 'undefined'));\n/** Whether this browser is Chrome. */\nENV.registerFlag('IS_CHROME', () => typeof navigator !== 'undefined' && navigator != null &&\n navigator.userAgent != null && /Chrome/.test(navigator.userAgent) &&\n /Google Inc/.test(navigator.vendor));\n/**\n * True when the environment is \"production\" where we disable safety checks\n * to gain performance.\n */\nENV.registerFlag('PROD', () => false);\n/**\n * Whether to do sanity checks when inferring a shape from user-provided\n * values, used when creating a new tensor.\n */\nENV.registerFlag('TENSORLIKE_CHECK_SHAPE_CONSISTENCY', () => ENV.getBool('DEBUG'));\n/** Whether deprecation warnings are enabled. */\nENV.registerFlag('DEPRECATION_WARNINGS_ENABLED', () => true);\n/** True if running unit tests. */\nENV.registerFlag('IS_TEST', () => false);\n//# sourceMappingURL=flags.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { Tensor } from './tensor';\nimport { assert, flatten, inferDtype, isTypedArray, toTypedArray } from './util';\nexport function inferShape(val, dtype) {\n let firstElem = val;\n if (isTypedArray(val)) {\n return dtype === 'string' ? [] : [val.length];\n }\n if (!Array.isArray(val)) {\n return []; // Scalar.\n }\n const shape = [];\n while (Array.isArray(firstElem) ||\n isTypedArray(firstElem) && dtype !== 'string') {\n shape.push(firstElem.length);\n firstElem = firstElem[0];\n }\n if (Array.isArray(val) &&\n env().getBool('TENSORLIKE_CHECK_SHAPE_CONSISTENCY')) {\n deepAssertShapeConsistency(val, shape, []);\n }\n return shape;\n}\nfunction deepAssertShapeConsistency(val, shape, indices) {\n indices = indices || [];\n if (!(Array.isArray(val)) && !isTypedArray(val)) {\n assert(shape.length === 0, () => `Element arr[${indices.join('][')}] is a primitive, ` +\n `but should be an array/TypedArray of ${shape[0]} elements`);\n return;\n }\n assert(shape.length > 0, () => `Element arr[${indices.join('][')}] should be a primitive, ` +\n `but is an array of ${val.length} elements`);\n assert(val.length === shape[0], () => `Element arr[${indices.join('][')}] should have ${shape[0]} ` +\n `elements, but has ${val.length} elements`);\n const subShape = shape.slice(1);\n for (let i = 0; i < val.length; ++i) {\n deepAssertShapeConsistency(val[i], subShape, indices.concat(i));\n }\n}\nfunction assertDtype(expectedDtype, actualDType, argName, functionName) {\n if (expectedDtype == null) {\n return;\n }\n if (expectedDtype !== 'numeric' && expectedDtype !== actualDType ||\n expectedDtype === 'numeric' && actualDType === 'string') {\n throw new Error(`Argument '${argName}' passed to '${functionName}' must ` +\n `be ${expectedDtype} tensor, but got ${actualDType} tensor`);\n }\n}\nexport function convertToTensor(x, argName, functionName, parseAsDtype = 'numeric') {\n if (x instanceof Tensor) {\n assertDtype(parseAsDtype, x.dtype, argName, functionName);\n return x;\n }\n let inferredDtype = inferDtype(x);\n // If the user expects a bool/int/float, use that info to update the\n // inferredDtype when it is not a string.\n if (inferredDtype !== 'string' &&\n ['bool', 'int32', 'float32'].indexOf(parseAsDtype) >= 0) {\n inferredDtype = parseAsDtype;\n }\n assertDtype(parseAsDtype, inferredDtype, argName, functionName);\n if ((x == null) ||\n (!isTypedArray(x) && !Array.isArray(x) && typeof x !== 'number' &&\n typeof x !== 'boolean' && typeof x !== 'string')) {\n const type = x == null ? 'null' : x.constructor.name;\n throw new Error(`Argument '${argName}' passed to '${functionName}' must be a ` +\n `Tensor or TensorLike, but got '${type}'`);\n }\n const inferredShape = inferShape(x, inferredDtype);\n if (!isTypedArray(x) && !Array.isArray(x)) {\n x = [x];\n }\n const skipTypedArray = true;\n const values = inferredDtype !== 'string' ?\n toTypedArray(x, inferredDtype) :\n flatten(x, [], skipTypedArray);\n return ENGINE.makeTensor(values, inferredShape, inferredDtype);\n}\nexport function convertToTensorArray(arg, argName, functionName, parseAsDtype = 'numeric') {\n if (!Array.isArray(arg)) {\n throw new Error(`Argument ${argName} passed to ${functionName} must be a ` +\n '`Tensor[]` or `TensorLike[]`');\n }\n const tensors = arg;\n return tensors.map((t, i) => convertToTensor(t, `${argName}[${i}]`, functionName), parseAsDtype);\n}\n//# sourceMappingURL=tensor_util_env.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { isPromise } from '../util';\nexport const OP_SCOPE_SUFFIX = '__op';\n/**\n * Used for wrapping functions that perform math operations on\n * Tensors. The function will be wrapped in a named scope that cleans all\n * memory usage after the function is done.\n */\nexport function op(f) {\n const keys = Object.keys(f);\n if (keys.length !== 1) {\n throw new Error(`Please provide an object with a single key ` +\n `(operation name) mapping to a function. Got an object with ` +\n `${keys.length} keys.`);\n }\n let opName = keys[0];\n const fn = f[opName];\n // Strip the underscore from the end of the function name.\n if (opName.endsWith('_')) {\n opName = opName.substring(0, opName.length - 1);\n }\n // add an __op suffix to distinguish ops from kernels in tf.profile\n opName = opName + OP_SCOPE_SUFFIX;\n // tslint:disable-next-line:no-any\n const f2 = (...args) => {\n ENGINE.startScope(opName);\n try {\n const result = fn(...args);\n if (isPromise(result)) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n ENGINE.endScope(result);\n return result;\n }\n catch (ex) {\n ENGINE.endScope(null);\n throw ex;\n }\n };\n Object.defineProperty(f2, 'name', { value: opName, configurable: true });\n // tslint:disable-next-line:no-any\n return f2;\n}\n//# sourceMappingURL=operation.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Complex } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Converts two real numbers to a complex number.\n *\n * Given a tensor `real` representing the real part of a complex number, and a\n * tensor `imag` representing the imaginary part of a complex number, this\n * operation returns complex numbers elementwise of the form [r0, i0, r1, i1],\n * where r represents the real part and i represents the imag part.\n *\n * The input tensors real and imag must have the same shape.\n *\n * ```js\n * const real = tf.tensor1d([2.25, 3.25]);\n * const imag = tf.tensor1d([4.75, 5.75]);\n * const complex = tf.complex(real, imag);\n *\n * complex.print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction complex_(real, imag) {\n const $real = convertToTensor(real, 'real', 'complex');\n const $imag = convertToTensor(imag, 'imag', 'complex');\n util.assertShapesMatch($real.shape, $imag.shape, `real and imag shapes, ${$real.shape} and ${$imag.shape}, ` +\n `must match in call to tf.complex().`);\n const forward = (backend) => {\n return backend.complex($real, $imag);\n };\n const inputs = { real: $real, imag: $imag };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Complex);\n}\nexport const complex = op({ complex_ });\n//# sourceMappingURL=complex.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { assert, assertNonNegativeIntegerDimensions, flatten, inferDtype, isTypedArray, sizeFromShape, toTypedArray } from '../util';\n/** This is shared code across all tensor creation methods. */\nexport function makeTensor(values, shape, inferredShape, dtype) {\n if (dtype == null) {\n dtype = inferDtype(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot construct a complex64 tensor directly. ` +\n `Please use tf.complex(real, imag).`);\n }\n if (!isTypedArray(values) && !Array.isArray(values) &&\n typeof values !== 'number' && typeof values !== 'boolean' &&\n typeof values !== 'string') {\n throw new Error('values passed to tensor(values) must be a number/boolean/string or ' +\n 'an array of numbers/booleans/strings, or a TypedArray');\n }\n if (shape != null) {\n assertNonNegativeIntegerDimensions(shape);\n const providedSize = sizeFromShape(shape);\n const inferredSize = sizeFromShape(inferredShape);\n assert(providedSize === inferredSize, () => `Based on the provided shape, [${shape}], the tensor should have ` +\n `${providedSize} values but has ${inferredSize}`);\n for (let i = 0; i < inferredShape.length; ++i) {\n const inferred = inferredShape[i];\n const flatDimsDontMatch = i === inferredShape.length - 1 ?\n inferred !== sizeFromShape(shape.slice(i)) :\n true;\n assert(inferredShape[i] === shape[i] || !flatDimsDontMatch, () => `Error creating a new Tensor. Inferred shape ` +\n `(${inferredShape}) does not match the provided ` +\n `shape (${shape}). `);\n }\n }\n if (!isTypedArray(values) && !Array.isArray(values)) {\n values = [values];\n }\n shape = shape || inferredShape;\n values = dtype !== 'string' ?\n toTypedArray(values, dtype) :\n flatten(values, [], true);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=tensor_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates a `tf.Tensor` with the provided values, shape and dtype.\n *\n * ```js\n * // Pass an array of values to create a vector.\n * tf.tensor([1, 2, 3, 4]).print();\n * ```\n *\n * ```js\n * // Pass a nested array of values to make a matrix or a higher\n * // dimensional tensor.\n * tf.tensor([[1, 2], [3, 4]]).print();\n * ```\n *\n * ```js\n * // Pass a flat array and specify a shape yourself.\n * tf.tensor([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`. If the values are strings,\n * they will be encoded as utf-8 and kept as `Uint8Array[]`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor(values, shape, dtype) {\n const inferredShape = inferShape(values, dtype);\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/* Type definitions for exporting and importing of models. */\n/**\n * A map from Tensor dtype to number of bytes per element of the Tensor.\n */\nexport const DTYPE_VALUE_SIZE_MAP = {\n 'float32': 4,\n 'float16': 2,\n 'int32': 4,\n 'uint16': 2,\n 'uint8': 1,\n 'bool': 1,\n 'complex64': 8\n};\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../ops/complex';\nimport { tensor } from '../ops/tensor';\nimport { sizeFromShape } from '../util';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/** Number of bytes reserved for the length of the string. (32bit integer). */\nconst NUM_BYTES_STRING_LENGTH = 4;\n/**\n * Encode a map from names to weight values as an ArrayBuffer, along with an\n * `Array` of `WeightsManifestEntry` as specification of the encoded weights.\n *\n * This function does not perform sharding.\n *\n * This function is the reverse of `decodeWeights`.\n *\n * @param tensors A map (\"dict\") from names to tensors.\n * @param group Group to which the weights belong (optional).\n * @returns A `Promise` of\n * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s\n * concatenated.\n * - An `Array` of `WeightManifestEntry`s, carrying information including\n * tensor names, `dtype`s and shapes.\n * @throws Error: on unsupported tensor `dtype`.\n */\nexport async function encodeWeights(tensors, group) {\n // TODO(adarob, cais): Support quantization.\n const specs = [];\n const dataPromises = [];\n const names = Array.isArray(tensors) ?\n tensors.map(tensor => tensor.name) :\n Object.keys(tensors);\n for (let i = 0; i < names.length; ++i) {\n const name = names[i];\n const t = Array.isArray(tensors) ? tensors[i].tensor : tensors[name];\n if (t.dtype !== 'float32' && t.dtype !== 'int32' && t.dtype !== 'bool' &&\n t.dtype !== 'string' && t.dtype !== 'complex64') {\n throw new Error(`Unsupported dtype in weight '${name}': ${t.dtype}`);\n }\n const spec = { name, shape: t.shape, dtype: t.dtype };\n if (t.dtype === 'string') {\n const utf8bytes = new Promise(async (resolve) => {\n const vals = await t.bytes();\n const totalNumBytes = vals.reduce((p, c) => p + c.length, 0) +\n NUM_BYTES_STRING_LENGTH * vals.length;\n const bytes = new Uint8Array(totalNumBytes);\n let offset = 0;\n for (let i = 0; i < vals.length; i++) {\n const val = vals[i];\n const bytesOfLength = new Uint8Array(new Uint32Array([val.length]).buffer);\n bytes.set(bytesOfLength, offset);\n offset += NUM_BYTES_STRING_LENGTH;\n bytes.set(val, offset);\n offset += val.length;\n }\n resolve(bytes);\n });\n dataPromises.push(utf8bytes);\n }\n else {\n dataPromises.push(t.data());\n }\n if (group != null) {\n spec.group = group;\n }\n specs.push(spec);\n }\n const tensorValues = await Promise.all(dataPromises);\n return { data: concatenateTypedArrays(tensorValues), specs };\n}\n/**\n * Decode flat ArrayBuffer as weights.\n *\n * This function does not handle sharding.\n *\n * This function is the reverse of `encodeWeights`.\n *\n * @param buffer A flat ArrayBuffer carrying the binary values of the tensors\n * concatenated in the order specified in `specs`.\n * @param specs Specifications of the names, dtypes and shapes of the tensors\n * whose value are encoded by `buffer`.\n * @return A map from tensor name to tensor value, with the names corresponding\n * to names in `specs`.\n * @throws Error, if any of the tensors has unsupported dtype.\n */\nexport function decodeWeights(buffer, specs) {\n // TODO(adarob, cais): Support quantization.\n const out = {};\n let float16Decode;\n let offset = 0;\n for (const spec of specs) {\n const name = spec.name;\n const dtype = spec.dtype;\n const shape = spec.shape;\n const size = sizeFromShape(shape);\n let values;\n if ('quantization' in spec) {\n const quantization = spec.quantization;\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n if (!('min' in quantization && 'scale' in quantization)) {\n throw new Error(`Weight ${spec.name} with quantization ${quantization.dtype} ` +\n `doesn't have corresponding metadata min and scale.`);\n }\n }\n else if (quantization.dtype === 'float16') {\n if (dtype !== 'float32') {\n throw new Error(`Weight ${spec.name} is quantized with ${quantization.dtype} ` +\n `which only supports weights of type float32 not ${dtype}.`);\n }\n }\n else {\n throw new Error(`Weight ${spec.name} has unknown ` +\n `quantization dtype ${quantization.dtype}. ` +\n `Supported quantization dtypes are: ` +\n `'uint8', 'uint16', and 'float16'.`);\n }\n const quantizationSizeFactor = DTYPE_VALUE_SIZE_MAP[quantization.dtype];\n const byteBuffer = buffer.slice(offset, offset + size * quantizationSizeFactor);\n const quantizedArray = (quantization.dtype === 'uint8') ?\n new Uint8Array(byteBuffer) :\n new Uint16Array(byteBuffer);\n if (dtype === 'float32') {\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n values = new Float32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = v * quantization.scale + quantization.min;\n }\n }\n else if (quantization.dtype === 'float16') {\n if (float16Decode === undefined) {\n float16Decode = getFloat16Decoder();\n }\n values = float16Decode(quantizedArray);\n }\n else {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type float32.`);\n }\n }\n else if (dtype === 'int32') {\n if (quantization.dtype !== 'uint8' && quantization.dtype !== 'uint16') {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type int32.`);\n }\n values = new Int32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = Math.round(v * quantization.scale + quantization.min);\n }\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * quantizationSizeFactor;\n }\n else if (dtype === 'string') {\n const size = sizeFromShape(spec.shape);\n values = [];\n for (let i = 0; i < size; i++) {\n const byteLength = new Uint32Array(buffer.slice(offset, offset + NUM_BYTES_STRING_LENGTH))[0];\n offset += NUM_BYTES_STRING_LENGTH;\n const bytes = new Uint8Array(buffer.slice(offset, offset + byteLength));\n values.push(bytes);\n offset += byteLength;\n }\n }\n else {\n const dtypeFactor = DTYPE_VALUE_SIZE_MAP[dtype];\n const byteBuffer = buffer.slice(offset, offset + size * dtypeFactor);\n if (dtype === 'float32') {\n values = new Float32Array(byteBuffer);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(byteBuffer);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(byteBuffer);\n }\n else if (dtype === 'complex64') {\n values = new Float32Array(byteBuffer);\n const real = new Float32Array(values.length / 2);\n const image = new Float32Array(values.length / 2);\n for (let i = 0; i < real.length; i++) {\n real[i] = values[i * 2];\n image[i] = values[i * 2 + 1];\n }\n const realTensor = tensor(real, shape, 'float32');\n const imageTensor = tensor(image, shape, 'float32');\n out[name] = complex(realTensor, imageTensor);\n realTensor.dispose();\n imageTensor.dispose();\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * dtypeFactor;\n }\n if (dtype !== 'complex64') {\n out[name] = tensor(values, shape, dtype);\n }\n }\n return out;\n}\n/**\n * Concatenate TypedArrays into an ArrayBuffer.\n */\nexport function concatenateTypedArrays(xs) {\n // TODO(adarob, cais): Support quantization.\n if (xs === null) {\n throw new Error(`Invalid input value: ${JSON.stringify(xs)}`);\n }\n let totalByteLength = 0;\n // `normalizedXs` is here for this reason: a `TypedArray`'s `buffer'\n // can have a different byte length from that of the `TypedArray` itself,\n // for example, when the `TypedArray` is created from an offset in an\n // `ArrayBuffer`. `normliazedXs` holds `TypedArray`s whose `buffer`s match\n // the `TypedArray` in byte length. If an element of `xs` does not show\n // this property, a new `TypedArray` that satisfy this property will be\n // constructed and pushed into `normalizedXs`.\n const normalizedXs = [];\n xs.forEach((x) => {\n totalByteLength += x.byteLength;\n // tslint:disable:no-any\n normalizedXs.push(x.byteLength === x.buffer.byteLength ? x :\n new x.constructor(x));\n if (!(x instanceof Float32Array || x instanceof Int32Array ||\n x instanceof Uint8Array)) {\n throw new Error(`Unsupported TypedArray subtype: ${x.constructor.name}`);\n }\n // tslint:enable:no-any\n });\n const y = new Uint8Array(totalByteLength);\n let offset = 0;\n normalizedXs.forEach((x) => {\n y.set(new Uint8Array(x.buffer), offset);\n offset += x.byteLength;\n });\n return y.buffer;\n}\n// Use Buffer on Node.js instead of Blob/atob/btoa\nconst useNodeBuffer = typeof Buffer !== 'undefined' &&\n (typeof Blob === 'undefined' || typeof atob === 'undefined' ||\n typeof btoa === 'undefined');\n/**\n * Calculate the byte length of a JavaScript string.\n *\n * Note that a JavaScript string can contain wide characters, therefore the\n * length of the string is not necessarily equal to the byte length.\n *\n * @param str Input string.\n * @returns Byte length.\n */\nexport function stringByteLength(str) {\n if (useNodeBuffer) {\n return Buffer.byteLength(str);\n }\n return new Blob([str]).size;\n}\n/**\n * Encode an ArrayBuffer as a base64 encoded string.\n *\n * @param buffer `ArrayBuffer` to be converted.\n * @returns A string that base64-encodes `buffer`.\n */\nexport function arrayBufferToBase64String(buffer) {\n if (useNodeBuffer) {\n return Buffer.from(buffer).toString('base64');\n }\n const buf = new Uint8Array(buffer);\n let s = '';\n for (let i = 0, l = buf.length; i < l; i++) {\n s += String.fromCharCode(buf[i]);\n }\n return btoa(s);\n}\n/**\n * Decode a base64 string as an ArrayBuffer.\n *\n * @param str Base64 string.\n * @returns Decoded `ArrayBuffer`.\n */\nexport function base64StringToArrayBuffer(str) {\n if (useNodeBuffer) {\n const buf = Buffer.from(str, 'base64');\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n }\n const s = atob(str);\n const buffer = new Uint8Array(s.length);\n for (let i = 0; i < s.length; ++i) {\n buffer.set([s.charCodeAt(i)], i);\n }\n return buffer.buffer;\n}\n/**\n * Concatenate a number of ArrayBuffers into one.\n *\n * @param buffers A number of array buffers to concatenate.\n * @returns Result of concatenating `buffers` in order.\n */\nexport function concatenateArrayBuffers(buffers) {\n if (buffers.length === 1) {\n return buffers[0];\n }\n let totalByteLength = 0;\n buffers.forEach((buffer) => {\n totalByteLength += buffer.byteLength;\n });\n const temp = new Uint8Array(totalByteLength);\n let offset = 0;\n buffers.forEach((buffer) => {\n temp.set(new Uint8Array(buffer), offset);\n offset += buffer.byteLength;\n });\n return temp.buffer;\n}\n/**\n * Get the basename of a path.\n *\n * Behaves in a way analogous to Linux's basename command.\n *\n * @param path\n */\nexport function basename(path) {\n const SEPARATOR = '/';\n path = path.trim();\n while (path.endsWith(SEPARATOR)) {\n path = path.slice(0, path.length - 1);\n }\n const items = path.split(SEPARATOR);\n return items[items.length - 1];\n}\n/**\n * Populate ModelArtifactsInfo fields for a model with JSON topology.\n * @param modelArtifacts\n * @returns A ModelArtifactsInfo object.\n */\nexport function getModelArtifactsInfoForJSON(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('Expected JSON model topology, received ArrayBuffer.');\n }\n return {\n dateSaved: new Date(),\n modelTopologyType: 'JSON',\n modelTopologyBytes: modelArtifacts.modelTopology == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.modelTopology)),\n weightSpecsBytes: modelArtifacts.weightSpecs == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.weightSpecs)),\n weightDataBytes: modelArtifacts.weightData == null ?\n 0 :\n modelArtifacts.weightData.byteLength,\n };\n}\n/**\n * Computes mantisa table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 2048 mantissa lookup values.\n */\nfunction computeFloat16MantisaTable() {\n const convertMantissa = (i) => {\n let m = i << 13;\n let e = 0;\n while ((m & 0x00800000) === 0) {\n e -= 0x00800000;\n m <<= 1;\n }\n m &= ~0x00800000;\n e += 0x38800000;\n return m | e;\n };\n const mantisaTable = new Uint32Array(2048);\n mantisaTable[0] = 0;\n for (let i = 1; i < 1024; i++) {\n mantisaTable[i] = convertMantissa(i);\n }\n for (let i = 1024; i < 2048; i++) {\n mantisaTable[i] = 0x38000000 + ((i - 1024) << 13);\n }\n return mantisaTable;\n}\n/**\n * Computes exponent table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 64 exponent lookup values.\n */\nfunction computeFloat16ExponentTable() {\n const exponentTable = new Uint32Array(64);\n exponentTable[0] = 0;\n exponentTable[31] = 0x47800000;\n exponentTable[32] = 0x80000000;\n exponentTable[63] = 0xc7800000;\n for (let i = 1; i < 31; i++) {\n exponentTable[i] = i << 23;\n }\n for (let i = 33; i < 63; i++) {\n exponentTable[i] = 0x80000000 + ((i - 32) << 23);\n }\n return exponentTable;\n}\n/**\n * Computes offset table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 6d offset values.\n */\nfunction computeFloat16OffsetTable() {\n const offsetTable = new Uint32Array(64);\n for (let i = 0; i < 64; i++) {\n offsetTable[i] = 1024;\n }\n offsetTable[0] = offsetTable[32] = 0;\n return offsetTable;\n}\n/**\n * Retrieve a Float16 decoder which will decode a ByteArray of Float16 values\n * to a Float32Array.\n *\n * @returns Function (buffer: Uint16Array) => Float32Array which decodes\n * the Uint16Array of Float16 bytes to a Float32Array.\n */\nexport function getFloat16Decoder() {\n // Algorithm is based off of\n // http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n // Cache lookup tables\n const mantisaTable = computeFloat16MantisaTable();\n const exponentTable = computeFloat16ExponentTable();\n const offsetTable = computeFloat16OffsetTable();\n return (quantizedArray) => {\n const buffer = new ArrayBuffer(4 * quantizedArray.length);\n const bufferUint32View = new Uint32Array(buffer);\n for (let index = 0; index < quantizedArray.length; index++) {\n const float16Bits = quantizedArray[index];\n const float32Bits = mantisaTable[offsetTable[float16Bits >> 10] + (float16Bits & 0x3ff)] +\n exponentTable[float16Bits >> 10];\n bufferUint32View[index] = float32Bits;\n }\n return new Float32Array(buffer);\n };\n}\n//# sourceMappingURL=io_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class IORouterRegistry {\n constructor() {\n this.saveRouters = [];\n this.loadRouters = [];\n }\n static getInstance() {\n if (IORouterRegistry.instance == null) {\n IORouterRegistry.instance = new IORouterRegistry();\n }\n return IORouterRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerSaveRouter(saveRouter) {\n IORouterRegistry.getInstance().saveRouters.push(saveRouter);\n }\n /**\n * Register a load-handler router.\n *\n * @param loadRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `load` method defined or `null`.\n */\n static registerLoadRouter(loadRouter) {\n IORouterRegistry.getInstance().loadRouters.push(loadRouter);\n }\n /**\n * Look up IOHandler for saving, given a URL-like string.\n *\n * @param url\n * @returns If only one match is found, an instance of IOHandler with the\n * `save` method defined. If no match is found, `null`.\n * @throws Error, if more than one match is found.\n */\n static getSaveHandlers(url) {\n return IORouterRegistry.getHandlers(url, 'save');\n }\n /**\n * Look up IOHandler for loading, given a URL-like string.\n *\n * @param url\n * @param loadOptions Optional, custom load options.\n * @returns All valid handlers for `url`, given the currently registered\n * handler routers.\n */\n static getLoadHandlers(url, loadOptions) {\n return IORouterRegistry.getHandlers(url, 'load', loadOptions);\n }\n static getHandlers(url, handlerType, loadOptions) {\n const validHandlers = [];\n const routers = handlerType === 'load' ?\n IORouterRegistry.getInstance().loadRouters :\n IORouterRegistry.getInstance().saveRouters;\n routers.forEach(router => {\n const handler = router(url, loadOptions);\n if (handler !== null) {\n validHandlers.push(handler);\n }\n });\n return validHandlers;\n }\n}\nexport const registerSaveRouter = (loudRouter) => IORouterRegistry.registerSaveRouter(loudRouter);\nexport const registerLoadRouter = (loudRouter) => IORouterRegistry.registerLoadRouter(loudRouter);\nexport const getSaveHandlers = (url) => IORouterRegistry.getSaveHandlers(url);\nexport const getLoadHandlers = (url, loadOptions) => IORouterRegistry.getLoadHandlers(url, loadOptions);\n//# sourceMappingURL=router_registry.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DATABASE_NAME = 'tensorflowjs';\nconst DATABASE_VERSION = 1;\n// Model data and ModelArtifactsInfo (metadata) are stored in two separate\n// stores for efficient access of the list of stored models and their metadata.\n// 1. The object store for model data: topology, weights and weight manifests.\nconst MODEL_STORE_NAME = 'models_store';\n// 2. The object store for ModelArtifactsInfo, including meta-information such\n// as the type of topology (JSON vs binary), byte size of the topology, byte\n// size of the weights, etc.\nconst INFO_STORE_NAME = 'model_info_store';\n/**\n * Delete the entire database for tensorflow.js, including the models store.\n */\nexport async function deleteDatabase() {\n const idbFactory = getIndexedDBFactory();\n return new Promise((resolve, reject) => {\n const deleteRequest = idbFactory.deleteDatabase(DATABASE_NAME);\n deleteRequest.onsuccess = () => resolve();\n deleteRequest.onerror = error => reject(error);\n });\n}\nfunction getIndexedDBFactory() {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Add more info about what IOHandler subtypes are available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('Failed to obtain IndexedDB factory because the current environment' +\n 'is not a web browser.');\n }\n // tslint:disable-next-line:no-any\n const theWindow = typeof window === 'undefined' ? self : window;\n const factory = theWindow.indexedDB || theWindow.mozIndexedDB ||\n theWindow.webkitIndexedDB || theWindow.msIndexedDB ||\n theWindow.shimIndexedDB;\n if (factory == null) {\n throw new Error('The current browser does not appear to support IndexedDB.');\n }\n return factory;\n}\nfunction setUpDatabase(openRequest) {\n const db = openRequest.result;\n db.createObjectStore(MODEL_STORE_NAME, { keyPath: 'modelPath' });\n db.createObjectStore(INFO_STORE_NAME, { keyPath: 'modelPath' });\n}\n/**\n * IOHandler subclass: Browser IndexedDB.\n *\n * See the doc string of `browserIndexedDB` for more details.\n */\nexport class BrowserIndexedDB {\n constructor(modelPath) {\n this.indexedDB = getIndexedDBFactory();\n if (modelPath == null || !modelPath) {\n throw new Error('For IndexedDB, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n }\n async save(modelArtifacts) {\n // TODO(cais): Support saving GraphDef models.\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n return this.databaseAction(this.modelPath, modelArtifacts);\n }\n async load() {\n return this.databaseAction(this.modelPath);\n }\n /**\n * Perform database action to put model artifacts into or read model artifacts\n * from IndexedDB object store.\n *\n * Whether the action is put or get depends on whether `modelArtifacts` is\n * specified. If it is specified, the action will be put; otherwise the action\n * will be get.\n *\n * @param modelPath A unique string path for the model.\n * @param modelArtifacts If specified, it will be the model artifacts to be\n * stored in IndexedDB.\n * @returns A `Promise` of `SaveResult`, if the action is put, or a `Promise`\n * of `ModelArtifacts`, if the action is get.\n */\n databaseAction(modelPath, modelArtifacts) {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n if (modelArtifacts == null) {\n // Read model out from object store.\n const modelTx = db.transaction(MODEL_STORE_NAME, 'readonly');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const getRequest = modelStore.get(this.modelPath);\n getRequest.onsuccess = () => {\n if (getRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${this.modelPath}' ` +\n `in IndexedDB.`));\n }\n else {\n resolve(getRequest.result.modelArtifacts);\n }\n };\n getRequest.onerror = error => {\n db.close();\n return reject(getRequest.error);\n };\n modelTx.oncomplete = () => db.close();\n }\n else {\n // Put model into object store.\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n // First, put ModelArtifactsInfo into info store.\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n let infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const putInfoRequest = infoStore.put({ modelPath: this.modelPath, modelArtifactsInfo });\n let modelTx;\n putInfoRequest.onsuccess = () => {\n // Second, put model data into model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const putModelRequest = modelStore.put({\n modelPath: this.modelPath,\n modelArtifacts,\n modelArtifactsInfo\n });\n putModelRequest.onsuccess = () => resolve({ modelArtifactsInfo });\n putModelRequest.onerror = error => {\n // If the put-model request fails, roll back the info entry as\n // well.\n infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const deleteInfoRequest = infoStore.delete(this.modelPath);\n deleteInfoRequest.onsuccess = () => {\n db.close();\n return reject(putModelRequest.error);\n };\n deleteInfoRequest.onerror = error => {\n db.close();\n return reject(putModelRequest.error);\n };\n };\n };\n putInfoRequest.onerror = error => {\n db.close();\n return reject(putInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n }\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\nBrowserIndexedDB.URL_SCHEME = 'indexeddb://';\nexport const indexedDBRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserIndexedDB.URL_SCHEME)) {\n return browserIndexedDB(url.slice(BrowserIndexedDB.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(indexedDBRouter);\nIORouterRegistry.registerLoadRouter(indexedDBRouter);\n/**\n * Creates a browser IndexedDB IOHandler for saving and loading models.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save('indexeddb://MyModel'));\n * console.log(saveResult);\n * ```\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `BrowserIndexedDB` (sublcass of `IOHandler`),\n * which can be used with, e.g., `tf.Model.save`.\n */\nexport function browserIndexedDB(modelPath) {\n return new BrowserIndexedDB(modelPath);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserIndexedDB.URL_SCHEME) ?\n key.slice(BrowserIndexedDB.URL_SCHEME.length) :\n key;\n}\nexport class BrowserIndexedDBManager {\n constructor() {\n this.indexedDB = getIndexedDBFactory();\n }\n async listModels() {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const tx = db.transaction(INFO_STORE_NAME, 'readonly');\n const store = tx.objectStore(INFO_STORE_NAME);\n // tslint:disable:max-line-length\n // Need to cast `store` as `any` here because TypeScript's DOM\n // library does not have the `getAll()` method even though the\n // method is supported in the latest version of most mainstream\n // browsers:\n // https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/getAll\n // tslint:enable:max-line-length\n // tslint:disable-next-line:no-any\n const getAllInfoRequest = store.getAll();\n getAllInfoRequest.onsuccess = () => {\n const out = {};\n for (const item of getAllInfoRequest.result) {\n out[item.modelPath] = item.modelArtifactsInfo;\n }\n resolve(out);\n };\n getAllInfoRequest.onerror = error => {\n db.close();\n return reject(getAllInfoRequest.error);\n };\n tx.oncomplete = () => db.close();\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n const infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const getInfoRequest = infoStore.get(path);\n let modelTx;\n getInfoRequest.onsuccess = () => {\n if (getInfoRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${path}' ` +\n `in IndexedDB.`));\n }\n else {\n // First, delete the entry in the info store.\n const deleteInfoRequest = infoStore.delete(path);\n const deleteModelData = () => {\n // Second, delete the entry in the model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const deleteModelRequest = modelStore.delete(path);\n deleteModelRequest.onsuccess = () => resolve(getInfoRequest.result.modelArtifactsInfo);\n deleteModelRequest.onerror = error => reject(getInfoRequest.error);\n };\n // Proceed with deleting model data regardless of whether deletion\n // of info data succeeds or not.\n deleteInfoRequest.onsuccess = deleteModelData;\n deleteInfoRequest.onerror = error => {\n deleteModelData();\n db.close();\n return reject(getInfoRequest.error);\n };\n }\n };\n getInfoRequest.onerror = error => {\n db.close();\n return reject(getInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\n//# sourceMappingURL=indexed_db.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { arrayBufferToBase64String, base64StringToArrayBuffer, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst PATH_SEPARATOR = '/';\nconst PATH_PREFIX = 'tensorflowjs_models';\nconst INFO_SUFFIX = 'info';\nconst MODEL_TOPOLOGY_SUFFIX = 'model_topology';\nconst WEIGHT_SPECS_SUFFIX = 'weight_specs';\nconst WEIGHT_DATA_SUFFIX = 'weight_data';\nconst MODEL_METADATA_SUFFIX = 'model_metadata';\n/**\n * Purge all tensorflow.js-saved model artifacts from local storage.\n *\n * @returns Paths of the models purged.\n */\nexport function purgeLocalStorageArtifacts() {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n throw new Error('purgeLocalStorageModels() cannot proceed because local storage is ' +\n 'unavailable in the current environment.');\n }\n const LS = window.localStorage;\n const purgedModelPaths = [];\n for (let i = 0; i < LS.length; ++i) {\n const key = LS.key(i);\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n if (key.startsWith(prefix) && key.length > prefix.length) {\n LS.removeItem(key);\n const modelName = getModelPathFromKey(key);\n if (purgedModelPaths.indexOf(modelName) === -1) {\n purgedModelPaths.push(modelName);\n }\n }\n }\n return purgedModelPaths;\n}\nfunction getModelKeys(path) {\n return {\n info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR),\n topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR),\n weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR),\n weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR),\n modelMetadata: [PATH_PREFIX, path, MODEL_METADATA_SUFFIX].join(PATH_SEPARATOR)\n };\n}\n/**\n * Get model path from a local-storage key.\n *\n * E.g., 'tensorflowjs_models/my/model/1/info' --> 'my/model/1'\n *\n * @param key\n */\nfunction getModelPathFromKey(key) {\n const items = key.split(PATH_SEPARATOR);\n if (items.length < 3) {\n throw new Error(`Invalid key format: ${key}`);\n }\n return items.slice(1, items.length - 1).join(PATH_SEPARATOR);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserLocalStorage.URL_SCHEME) ?\n key.slice(BrowserLocalStorage.URL_SCHEME.length) :\n key;\n}\n/**\n * IOHandler subclass: Browser Local Storage.\n *\n * See the doc string to `browserLocalStorage` for more details.\n */\nexport class BrowserLocalStorage {\n constructor(modelPath) {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n // TODO(cais): Add more info about what IOHandler subtypes are\n // available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('The current environment does not support local storage.');\n }\n this.LS = window.localStorage;\n if (modelPath == null || !modelPath) {\n throw new Error('For local storage, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n this.keys = getModelKeys(this.modelPath);\n }\n /**\n * Save model artifacts to browser local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @param modelArtifacts The model artifacts to be stored.\n * @returns An instance of SaveResult.\n */\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const topology = JSON.stringify(modelArtifacts.modelTopology);\n const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs);\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n try {\n this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo));\n this.LS.setItem(this.keys.topology, topology);\n this.LS.setItem(this.keys.weightSpecs, weightSpecs);\n this.LS.setItem(this.keys.weightData, arrayBufferToBase64String(modelArtifacts.weightData));\n this.LS.setItem(this.keys.modelMetadata, JSON.stringify({\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata\n }));\n return { modelArtifactsInfo };\n }\n catch (err) {\n // If saving failed, clean up all items saved so far.\n this.LS.removeItem(this.keys.info);\n this.LS.removeItem(this.keys.topology);\n this.LS.removeItem(this.keys.weightSpecs);\n this.LS.removeItem(this.keys.weightData);\n this.LS.removeItem(this.keys.modelMetadata);\n throw new Error(`Failed to save model '${this.modelPath}' to local storage: ` +\n `size quota being exceeded is a possible cause of this failure: ` +\n `modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, ` +\n `weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, ` +\n `weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`);\n }\n }\n }\n /**\n * Load a model from local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @returns The loaded model (if loading succeeds).\n */\n async load() {\n const info = JSON.parse(this.LS.getItem(this.keys.info));\n if (info == null) {\n throw new Error(`In local storage, there is no model with name '${this.modelPath}'`);\n }\n if (info.modelTopologyType !== 'JSON') {\n throw new Error('BrowserLocalStorage does not support loading non-JSON model ' +\n 'topology yet.');\n }\n const out = {};\n // Load topology.\n const topology = JSON.parse(this.LS.getItem(this.keys.topology));\n if (topology == null) {\n throw new Error(`In local storage, the topology of model '${this.modelPath}' ` +\n `is missing.`);\n }\n out.modelTopology = topology;\n // Load weight specs.\n const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs));\n if (weightSpecs == null) {\n throw new Error(`In local storage, the weight specs of model '${this.modelPath}' ` +\n `are missing.`);\n }\n out.weightSpecs = weightSpecs;\n // Load meta-data fields.\n const metadataString = this.LS.getItem(this.keys.modelMetadata);\n if (metadataString != null) {\n const metadata = JSON.parse(metadataString);\n out.format = metadata['format'];\n out.generatedBy = metadata['generatedBy'];\n out.convertedBy = metadata['convertedBy'];\n out.userDefinedMetadata = metadata['userDefinedMetadata'];\n }\n // Load weight data.\n const weightDataBase64 = this.LS.getItem(this.keys.weightData);\n if (weightDataBase64 == null) {\n throw new Error(`In local storage, the binary weight values of model ` +\n `'${this.modelPath}' are missing.`);\n }\n out.weightData = base64StringToArrayBuffer(weightDataBase64);\n return out;\n }\n}\nBrowserLocalStorage.URL_SCHEME = 'localstorage://';\nexport const localStorageRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserLocalStorage.URL_SCHEME)) {\n return browserLocalStorage(url.slice(BrowserLocalStorage.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(localStorageRouter);\nIORouterRegistry.registerLoadRouter(localStorageRouter);\n/**\n * Factory function for local storage IOHandler.\n *\n * This `IOHandler` supports both `save` and `load`.\n *\n * For each model's saved artifacts, four items are saved to local storage.\n * - `${PATH_SEPARATOR}/${modelPath}/info`: Contains meta-info about the\n * model, such as date saved, type of the topology, size in bytes, etc.\n * - `${PATH_SEPARATOR}/${modelPath}/topology`: Model topology. For Keras-\n * style models, this is a stringized JSON.\n * - `${PATH_SEPARATOR}/${modelPath}/weight_specs`: Weight specs of the\n * model, can be used to decode the saved binary weight values (see\n * item below).\n * - `${PATH_SEPARATOR}/${modelPath}/weight_data`: Concatenated binary\n * weight values, stored as a base64-encoded string.\n *\n * Saving may throw an `Error` if the total size of the artifacts exceed the\n * browser-specific quota.\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `IOHandler`, which can be used with, e.g.,\n * `tf.Model.save`.\n */\nexport function browserLocalStorage(modelPath) {\n return new BrowserLocalStorage(modelPath);\n}\nexport class BrowserLocalStorageManager {\n constructor() {\n assert(env().getBool('IS_BROWSER'), () => 'Current environment is not a web browser');\n assert(typeof window === 'undefined' ||\n typeof window.localStorage !== 'undefined', () => 'Current browser does not appear to support localStorage');\n this.LS = window.localStorage;\n }\n async listModels() {\n const out = {};\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n const suffix = PATH_SEPARATOR + INFO_SUFFIX;\n for (let i = 0; i < this.LS.length; ++i) {\n const key = this.LS.key(i);\n if (key.startsWith(prefix) && key.endsWith(suffix)) {\n const modelPath = getModelPathFromKey(key);\n out[modelPath] = JSON.parse(this.LS.getItem(key));\n }\n }\n return out;\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n const keys = getModelKeys(path);\n if (this.LS.getItem(keys.info) == null) {\n throw new Error(`Cannot find model at path '${path}'`);\n }\n const info = JSON.parse(this.LS.getItem(keys.info));\n this.LS.removeItem(keys.info);\n this.LS.removeItem(keys.topology);\n this.LS.removeItem(keys.weightSpecs);\n this.LS.removeItem(keys.weightData);\n return info;\n }\n}\n//# sourceMappingURL=local_storage.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Classes and functions for model management across multiple storage mediums.\n *\n * Supported client actions:\n * - Listing models on all registered storage mediums.\n * - Remove model by URL from any registered storage mediums, by using URL\n * string.\n * - Moving or copying model from one path to another in the same medium or from\n * one medium to another, by using URL strings.\n */\nimport { assert } from '../util';\nimport { IORouterRegistry } from './router_registry';\nconst URL_SCHEME_SUFFIX = '://';\nexport class ModelStoreManagerRegistry {\n constructor() {\n this.managers = {};\n }\n static getInstance() {\n if (ModelStoreManagerRegistry.instance == null) {\n ModelStoreManagerRegistry.instance = new ModelStoreManagerRegistry();\n }\n return ModelStoreManagerRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerManager(scheme, manager) {\n assert(scheme != null, () => 'scheme must not be undefined or null.');\n if (scheme.endsWith(URL_SCHEME_SUFFIX)) {\n scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX));\n }\n assert(scheme.length > 0, () => 'scheme must not be an empty string.');\n const registry = ModelStoreManagerRegistry.getInstance();\n assert(registry.managers[scheme] == null, () => `A model store manager is already registered for scheme '${scheme}'.`);\n registry.managers[scheme] = manager;\n }\n static getManager(scheme) {\n const manager = this.getInstance().managers[scheme];\n if (manager == null) {\n throw new Error(`Cannot find model manager for scheme '${scheme}'`);\n }\n return manager;\n }\n static getSchemes() {\n return Object.keys(this.getInstance().managers);\n }\n}\n/**\n * Helper method for parsing a URL string into a scheme and a path.\n *\n * @param url E.g., 'localstorage://my-model'\n * @returns A dictionary with two fields: scheme and path.\n * Scheme: e.g., 'localstorage' in the example above.\n * Path: e.g., 'my-model' in the example above.\n */\nfunction parseURL(url) {\n if (url.indexOf(URL_SCHEME_SUFFIX) === -1) {\n throw new Error(`The url string provided does not contain a scheme. ` +\n `Supported schemes are: ` +\n `${ModelStoreManagerRegistry.getSchemes().join(',')}`);\n }\n return {\n scheme: url.split(URL_SCHEME_SUFFIX)[0],\n path: url.split(URL_SCHEME_SUFFIX)[1],\n };\n}\nasync function cloneModelInternal(sourceURL, destURL, deleteSource = false) {\n assert(sourceURL !== destURL, () => `Old path and new path are the same: '${sourceURL}'`);\n const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL);\n assert(loadHandlers.length > 0, () => `Copying failed because no load handler is found for source URL ${sourceURL}.`);\n assert(loadHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `load handlers for source URL ${sourceURL}.`);\n const loadHandler = loadHandlers[0];\n const saveHandlers = IORouterRegistry.getSaveHandlers(destURL);\n assert(saveHandlers.length > 0, () => `Copying failed because no save handler is found for destination ` +\n `URL ${destURL}.`);\n assert(saveHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `save handlers for destination URL ${destURL}.`);\n const saveHandler = saveHandlers[0];\n const sourceScheme = parseURL(sourceURL).scheme;\n const sourcePath = parseURL(sourceURL).path;\n const sameMedium = sourceScheme === parseURL(sourceURL).scheme;\n const modelArtifacts = await loadHandler.load();\n // If moving within the same storage medium, remove the old model as soon as\n // the loading is done. Without doing this, it is possible that the combined\n // size of the two models will cause the cloning to fail.\n if (deleteSource && sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n const saveResult = await saveHandler.save(modelArtifacts);\n // If moving between mediums, the deletion is done after the save succeeds.\n // This guards against the case in which saving to the destination medium\n // fails.\n if (deleteSource && !sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n return saveResult.modelArtifactsInfo;\n}\n/**\n * List all models stored in registered storage mediums.\n *\n * For a web browser environment, the registered mediums are Local Storage and\n * IndexedDB.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @returns A `Promise` of a dictionary mapping URLs of existing models to\n * their model artifacts info. URLs include medium-specific schemes, e.g.,\n * 'indexeddb://my/model/1'. Model artifacts info include type of the\n * model's topology, byte sizes of the topology, weights, etc.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function listModels() {\n const schemes = ModelStoreManagerRegistry.getSchemes();\n const out = {};\n for (const scheme of schemes) {\n const schemeOut = await ModelStoreManagerRegistry.getManager(scheme).listModels();\n for (const path in schemeOut) {\n const url = scheme + URL_SCHEME_SUFFIX + path;\n out[url] = schemeOut[path];\n }\n }\n return out;\n}\n/**\n * Remove a model specified by URL from a reigstered storage medium.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @param url A URL to a stored model, with a scheme prefix, e.g.,\n * 'localstorage://my-model-1', 'indexeddb://my/model/2'.\n * @returns ModelArtifactsInfo of the deleted model (if and only if deletion\n * is successful).\n * @throws Error if deletion fails, e.g., if no model exists at `path`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function removeModel(url) {\n const schemeAndPath = parseURL(url);\n const manager = ModelStoreManagerRegistry.getManager(schemeAndPath.scheme);\n return manager.removeModel(schemeAndPath.path);\n}\n/**\n * Copy a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Copying within a storage medium, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Copying between two storage mediums, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Copy the model, from Local Storage to IndexedDB.\n * await tf.io.copyModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove both models.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of copying.\n * @param destURL Destination URL of copying.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function copyModel(sourceURL, destURL) {\n const deleteSource = false;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\n/**\n * Move a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Moving within a storage medium, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Moving between two storage mediums, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Move the model, from Local Storage to IndexedDB.\n * await tf.io.moveModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove the moved model.\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of moving.\n * @param destURL Destination URL of moving.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function moveModel(sourceURL, destURL) {\n const deleteSource = true;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\nexport { moveModel, copyModel, removeModel, listModels };\n//# sourceMappingURL=model_management.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { BrowserIndexedDB, BrowserIndexedDBManager } from '../io/indexed_db';\nimport { BrowserLocalStorage, BrowserLocalStorageManager } from '../io/local_storage';\nimport { ModelStoreManagerRegistry } from '../io/model_management';\nexport class PlatformBrowser {\n fetch(path, init) {\n return fetch(path, init);\n }\n now() {\n return performance.now();\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);\n }\n if (this.textEncoder == null) {\n this.textEncoder = new TextEncoder();\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n return new TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_BROWSER')) {\n env().setPlatform('browser', new PlatformBrowser());\n // Register LocalStorage IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserLocalStorage.URL_SCHEME, new BrowserLocalStorageManager());\n }\n catch (err) {\n }\n // Register IndexedDB IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager());\n }\n catch (err) {\n }\n}\n//# sourceMappingURL=platform_browser.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\n// We are wrapping this within an object so it can be stubbed by Jasmine.\nexport const getNodeFetch = {\n // tslint:disable-next-line:no-require-imports\n importFetch: () => require('node-fetch')\n};\nlet systemFetch;\n// These getters and setters are for testing so we don't export a mutable\n// variable.\nexport function resetSystemFetch() {\n systemFetch = null;\n}\nexport function setSystemFetch(fetchFn) {\n systemFetch = fetchFn;\n}\nexport function getSystemFetch() {\n return systemFetch;\n}\nexport class PlatformNode {\n constructor() {\n // tslint:disable-next-line:no-require-imports\n this.util = require('util');\n // According to the spec, the built-in encoder can do only UTF-8 encoding.\n // https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder/TextEncoder\n this.textEncoder = new this.util.TextEncoder();\n }\n fetch(path, requestInits) {\n if (env().global.fetch != null) {\n return env().global.fetch(path, requestInits);\n }\n if (systemFetch == null) {\n systemFetch = getNodeFetch.importFetch();\n }\n return systemFetch(path, requestInits);\n }\n now() {\n const time = process.hrtime();\n return time[0] * 1000 + time[1] / 1000000;\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Node built-in encoder only supports utf-8, but got ${encoding}`);\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n if (bytes.length === 0) {\n return '';\n }\n return new this.util.TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_NODE')) {\n env().setPlatform('node', new PlatformNode());\n}\n//# sourceMappingURL=platform_node.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport * as util from '../util';\n/**\n * Creates an empty `tf.TensorBuffer` with the specified `shape` and `dtype`.\n *\n * The values are stored in CPU as `TypedArray`. Fill the buffer using\n * `buffer.set()`, or by modifying directly `buffer.values`.\n *\n * When done, call `buffer.toTensor()` to get an immutable `tf.Tensor` with\n * those values.\n *\n * ```js\n * // Create a buffer and set values at particular indices.\n * const buffer = tf.buffer([2, 2]);\n * buffer.set(3, 0, 0);\n * buffer.set(5, 1, 0);\n *\n * // Convert the buffer back to a tensor.\n * buffer.toTensor().print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The dtype of the buffer. Defaults to 'float32'.\n * @param values The values of the buffer as `TypedArray`. Defaults to\n * zeros.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function buffer(shape, dtype = 'float32', values) {\n dtype = dtype || 'float32';\n util.assertNonNegativeIntegerDimensions(shape);\n return new TensorBuffer(shape, dtype, values);\n}\n//# sourceMappingURL=buffer.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cast } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Casts a `tf.Tensor` to a new dtype.\n *\n * ```js\n * const x = tf.tensor1d([1.5, 2.5, 3]);\n * tf.cast(x, 'int32').print();\n * ```\n * @param x The input tensor to be casted.\n * @param dtype The dtype to cast the input tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction cast_(x, dtype) {\n const $x = convertToTensor(x, 'x', 'cast');\n // Sanity checks.\n if (!util.isValidDtype(dtype)) {\n throw new Error(`Failed to cast to unknown dtype ${dtype}`);\n }\n if (dtype === 'string' && $x.dtype !== 'string' ||\n dtype !== 'string' && $x.dtype === 'string') {\n throw new Error('Only strings can be casted to strings');\n }\n const inputs = { x: $x };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast($x, dtype), inputs, null /* grad */, Cast, attrs);\n}\nexport const cast = op({ cast_ });\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Identity } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a new tensor with the same values and shape as the specified\n * tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n *\n * x.clone().print();\n * ```\n *\n * @param x The tensor to clone.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction clone_(x) {\n const $x = convertToTensor(x, 'x', 'clone', null);\n const forward = () => ENGINE.makeTensorFromDataId($x.dataId, $x.shape, $x.dtype);\n const inputs = { x: $x };\n // Note this op is called tf.identity in python. Hence the kernel name used\n // here.\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Identity);\n}\nexport const clone = op({ clone_ });\n//# sourceMappingURL=clone.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Prints information about the `tf.Tensor` including its data.\n *\n * ```js\n * const verbose = true;\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print(verbose);\n * ```\n * @param x The tensor to be printed.\n * @param verbose Whether to print verbose information about the ` Tensor`,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function print(x, verbose = false) {\n console.log(x.toString(verbose));\n}\n//# sourceMappingURL=print.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code for tfjs-core\n// Set up Engine and ENV\nimport { getOrMakeEngine } from './engine';\ngetOrMakeEngine();\n// Register backend-agnostic flags.\nimport './flags';\n// Register platforms\nimport './platforms/platform_browser';\nimport './platforms/platform_node';\n// Set up OpHandler\nimport { buffer } from './ops/buffer';\nimport { cast } from './ops/cast';\nimport { clone } from './ops/clone';\nimport { print } from './ops/print';\nimport { setOpHandler } from './tensor';\nconst opHandler = {\n buffer,\n cast,\n clone,\n print\n};\nsetOpHandler(opHandler);\n//# sourceMappingURL=base_side_effects.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandlers related to files, such as browser-triggered file downloads,\n * user-selected files in browser.\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { basename, concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DEFAULT_FILE_NAME_PREFIX = 'model';\nconst DEFAULT_JSON_EXTENSION_NAME = '.json';\nconst DEFAULT_WEIGHT_DATA_EXTENSION_NAME = '.weights.bin';\nfunction defer(f) {\n return new Promise(resolve => setTimeout(resolve)).then(f);\n}\nexport class BrowserDownloads {\n constructor(fileNamePrefix) {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Provide info on what IOHandlers are available under the\n // current environment.\n throw new Error('browserDownloads() cannot proceed because the current environment ' +\n 'is not a browser.');\n }\n if (fileNamePrefix.startsWith(BrowserDownloads.URL_SCHEME)) {\n fileNamePrefix = fileNamePrefix.slice(BrowserDownloads.URL_SCHEME.length);\n }\n if (fileNamePrefix == null || fileNamePrefix.length === 0) {\n fileNamePrefix = DEFAULT_FILE_NAME_PREFIX;\n }\n this.modelTopologyFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME;\n this.weightDataFileName =\n fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME;\n }\n async save(modelArtifacts) {\n if (typeof (document) === 'undefined') {\n throw new Error('Browser downloads are not supported in ' +\n 'this environment since `document` is not present');\n }\n const weightsURL = window.URL.createObjectURL(new Blob([modelArtifacts.weightData], { type: 'application/octet-stream' }));\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserDownloads.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const weightsManifest = [{\n paths: ['./' + this.weightDataFileName],\n weights: modelArtifacts.weightSpecs\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n weightsManifest\n };\n const modelTopologyAndWeightManifestURL = window.URL.createObjectURL(new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: 'application/json' }));\n // If anchor elements are not provided, create them without attaching them\n // to parents, so that the downloaded file names can be controlled.\n const jsonAnchor = this.jsonAnchor == null ? document.createElement('a') :\n this.jsonAnchor;\n jsonAnchor.download = this.modelTopologyFileName;\n jsonAnchor.href = modelTopologyAndWeightManifestURL;\n // Trigger downloads by evoking a click event on the download anchors.\n // When multiple downloads are started synchronously, Firefox will only\n // save the last one.\n await defer(() => jsonAnchor.dispatchEvent(new MouseEvent('click')));\n if (modelArtifacts.weightData != null) {\n const weightDataAnchor = this.weightDataAnchor == null ?\n document.createElement('a') :\n this.weightDataAnchor;\n weightDataAnchor.download = this.weightDataFileName;\n weightDataAnchor.href = weightsURL;\n await defer(() => weightDataAnchor.dispatchEvent(new MouseEvent('click')));\n }\n return { modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts) };\n }\n }\n}\nBrowserDownloads.URL_SCHEME = 'downloads://';\nclass BrowserFiles {\n constructor(files) {\n if (files == null || files.length < 1) {\n throw new Error(`When calling browserFiles, at least 1 file is required, ` +\n `but received ${files}`);\n }\n this.files = files;\n }\n async load() {\n const jsonFile = this.files[0];\n const weightFiles = this.files.slice(1);\n return new Promise((resolve, reject) => {\n const jsonReader = new FileReader();\n jsonReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const modelJSON = JSON.parse(event.target.result);\n const modelTopology = modelJSON.modelTopology;\n if (modelTopology == null) {\n reject(new Error(`modelTopology field is missing from file ${jsonFile.name}`));\n return;\n }\n if (weightFiles.length === 0) {\n resolve({ modelTopology });\n }\n const weightsManifest = modelJSON.weightsManifest;\n if (weightsManifest == null) {\n reject(new Error(`weightManifest field is missing from file ${jsonFile.name}`));\n return;\n }\n let pathToFile;\n try {\n pathToFile =\n this.checkManifestAndWeightFiles(weightsManifest, weightFiles);\n }\n catch (err) {\n reject(err);\n return;\n }\n const weightSpecs = [];\n const paths = [];\n const perFileBuffers = [];\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n paths.push(path);\n perFileBuffers.push(null);\n });\n weightSpecs.push(...weightsGroup.weights);\n });\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n const weightFileReader = new FileReader();\n weightFileReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const weightData = event.target.result;\n const index = paths.indexOf(path);\n perFileBuffers[index] = weightData;\n if (perFileBuffers.indexOf(null) === -1) {\n resolve({\n modelTopology,\n weightSpecs,\n weightData: concatenateArrayBuffers(perFileBuffers),\n format: modelJSON.format,\n generatedBy: modelJSON.generatedBy,\n convertedBy: modelJSON.convertedBy,\n userDefinedMetadata: modelJSON.userDefinedMetadata\n });\n }\n };\n weightFileReader.onerror = error => reject(`Failed to weights data from file of path '${path}'.`);\n weightFileReader.readAsArrayBuffer(pathToFile[path]);\n });\n });\n };\n jsonReader.onerror = error => reject(`Failed to read model topology and weights manifest JSON ` +\n `from file '${jsonFile.name}'. BrowserFiles supports loading ` +\n `Keras-style tf.Model artifacts only.`);\n jsonReader.readAsText(jsonFile);\n });\n }\n /**\n * Check the compatibility between weights manifest and weight files.\n */\n checkManifestAndWeightFiles(manifest, files) {\n const basenames = [];\n const fileNames = files.map(file => basename(file.name));\n const pathToFile = {};\n for (const group of manifest) {\n group.paths.forEach(path => {\n const pathBasename = basename(path);\n if (basenames.indexOf(pathBasename) !== -1) {\n throw new Error(`Duplicate file basename found in weights manifest: ` +\n `'${pathBasename}'`);\n }\n basenames.push(pathBasename);\n if (fileNames.indexOf(pathBasename) === -1) {\n throw new Error(`Weight file with basename '${pathBasename}' is not provided.`);\n }\n else {\n pathToFile[path] = files[fileNames.indexOf(pathBasename)];\n }\n });\n }\n if (basenames.length !== files.length) {\n throw new Error(`Mismatch in the number of files in weights manifest ` +\n `(${basenames.length}) and the number of weight files provided ` +\n `(${files.length}).`);\n }\n return pathToFile;\n }\n}\nexport const browserDownloadsRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserDownloads.URL_SCHEME)) {\n return browserDownloads(url.slice(BrowserDownloads.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(browserDownloadsRouter);\n/**\n * Creates an IOHandler that triggers file downloads from the browser.\n *\n * The returned `IOHandler` instance can be used as model exporting methods such\n * as `tf.Model.save` and supports only saving.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * const saveResult = await model.save('downloads://mymodel');\n * // This will trigger downloading of two files:\n * // 'mymodel.json' and 'mymodel.weights.bin'.\n * console.log(saveResult);\n * ```\n *\n * @param fileNamePrefix Prefix name of the files to be downloaded. For use with\n * `tf.Model`, `fileNamePrefix` should follow either of the following two\n * formats:\n * 1. `null` or `undefined`, in which case the default file\n * names will be used:\n * - 'model.json' for the JSON file containing the model topology and\n * weights manifest.\n * - 'model.weights.bin' for the binary file containing the binary weight\n * values.\n * 2. A single string or an Array of a single string, as the file name prefix.\n * For example, if `'foo'` is provided, the downloaded JSON\n * file and binary weights file will be named 'foo.json' and\n * 'foo.weights.bin', respectively.\n * @param config Additional configuration for triggering downloads.\n * @returns An instance of `BrowserDownloads` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserDownloads(fileNamePrefix = 'model') {\n return new BrowserDownloads(fileNamePrefix);\n}\n/**\n * Creates an IOHandler that loads model artifacts from user-selected files.\n *\n * This method can be used for loading from files such as user-selected files\n * in the browser.\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * // Note: This code snippet won't run properly without the actual file input\n * // elements in the HTML DOM.\n *\n * // Suppose there are two HTML file input (` `)\n * // elements.\n * const uploadJSONInput = document.getElementById('upload-json');\n * const uploadWeightsInput = document.getElementById('upload-weights');\n * const model = await tf.loadLayersModel(tf.io.browserFiles(\n * [uploadJSONInput.files[0], uploadWeightsInput.files[0]]));\n * ```\n *\n * @param files `File`s to load from. Currently, this function supports only\n * loading from files that contain Keras-style models (i.e., `tf.Model`s), for\n * which an `Array` of `File`s is expected (in that order):\n * - A JSON file containing the model topology and weight manifest.\n * - Optionally, One or more binary files containing the binary weights.\n * These files must have names that match the paths in the `weightsManifest`\n * contained by the aforementioned JSON file, or errors will be thrown\n * during loading. These weights files have the same format as the ones\n * generated by `tensorflowjs_converter` that comes with the `tensorflowjs`\n * Python PIP package. If no weights files are provided, only the model\n * topology will be loaded from the JSON file above.\n * @returns An instance of `Files` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserFiles(files) {\n return new BrowserFiles(files);\n}\n//# sourceMappingURL=browser_files.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../util';\n/**\n * Monitor Promise.all progress, fire onProgress callback function.\n *\n * @param promises Promise list going to be monitored\n * @param onProgress Callback function. Fired when a promise resolved.\n * @param startFraction Optional fraction start. Default to 0.\n * @param endFraction Optional fraction end. Default to 1.\n */\nexport function monitorPromisesProgress(promises, onProgress, startFraction, endFraction) {\n checkPromises(promises);\n startFraction = startFraction == null ? 0 : startFraction;\n endFraction = endFraction == null ? 1 : endFraction;\n checkFraction(startFraction, endFraction);\n let resolvedPromise = 0;\n const registerMonitor = (promise) => {\n promise.then(value => {\n const fraction = startFraction +\n ++resolvedPromise / promises.length * (endFraction - startFraction);\n // pass fraction as parameter to callback function.\n onProgress(fraction);\n return value;\n });\n return promise;\n };\n function checkPromises(promises) {\n assert(promises != null && Array.isArray(promises) && promises.length > 0, () => 'promises must be a none empty array');\n }\n function checkFraction(startFraction, endFraction) {\n assert(startFraction >= 0 && startFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got startFraction ${startFraction}`);\n assert(endFraction >= 0 && endFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got endFraction ${endFraction}`);\n assert(endFraction >= startFraction, () => `startFraction must be no more than endFraction, but ` +\n `got startFraction ${startFraction} and endFraction ` +\n `${endFraction}`);\n }\n return Promise.all(promises.map(registerMonitor));\n}\n//# sourceMappingURL=progress.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\nimport * as util from '../util';\nimport { decodeWeights } from './io_utils';\nimport { monitorPromisesProgress } from './progress';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/**\n * Reads binary weights data from a number of URLs.\n *\n * @param fetchURLs URLs to send the HTTP requests at, using `fetch` calls.\n * @param requestOptions RequestInit (options) for the HTTP requests.\n * @param fetchFunc Optional overriding value for the `window.fetch` function.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns A `Promise` of an Array of `ArrayBuffer`. The Array has the same\n * length as `fetchURLs`.\n */\nexport async function loadWeightsAsArrayBuffer(fetchURLs, loadOptions) {\n if (loadOptions == null) {\n loadOptions = {};\n }\n const fetchFunc = loadOptions.fetchFunc == null ? env().platform.fetch :\n loadOptions.fetchFunc;\n // Create the requests for all of the weights in parallel.\n const requests = fetchURLs.map(fetchURL => fetchFunc(fetchURL, loadOptions.requestInit, { isBinary: true }));\n const fetchStartFraction = 0;\n const fetchEndFraction = 0.5;\n const responses = loadOptions.onProgress == null ?\n await Promise.all(requests) :\n await monitorPromisesProgress(requests, loadOptions.onProgress, fetchStartFraction, fetchEndFraction);\n const bufferPromises = responses.map(response => response.arrayBuffer());\n const bufferStartFraction = 0.5;\n const bufferEndFraction = 1;\n const buffers = loadOptions.onProgress == null ?\n await Promise.all(bufferPromises) :\n await monitorPromisesProgress(bufferPromises, loadOptions.onProgress, bufferStartFraction, bufferEndFraction);\n return buffers;\n}\n/**\n * Reads a weights manifest JSON configuration, fetches the weights and\n * returns them as `Tensor`s.\n *\n * @param manifest The weights manifest JSON.\n * @param filePathPrefix The path prefix for filenames given in the manifest.\n * Defaults to the empty string.\n * @param weightNames The names of the weights to be fetched.\n */\nexport async function loadWeights(manifest, filePathPrefix = '', weightNames, requestInit) {\n // TODO(nsthorat): Groups are currently fetched atomically. If you need a\n // single weight from a group, the whole group will be fetched. At a future\n // date, we should support fetching only the individual shards within a\n // group that are needed to reconstruct the requested weight.\n // TODO(cais): Use `decodeWeights` for implementation.\n const fetchWeights = (fetchUrls) => loadWeightsAsArrayBuffer(fetchUrls, { requestInit });\n const loadWeights = weightsLoaderFactory(fetchWeights);\n return loadWeights(manifest, filePathPrefix, weightNames);\n}\n/**\n * Creates a function, which reads a weights manifest JSON configuration,\n * fetches the weight files using the specified function and returns them as\n * `Tensor`s.\n *\n * ```js\n * // example for creating a nodejs weight loader, which reads the weight files\n * // from disk using fs.readFileSync\n *\n * import * as fs from 'fs'\n *\n * const fetchWeightsFromDisk = (filePaths: string[]) =>\n * filePaths.map(filePath => fs.readFileSync(filePath).buffer)\n *\n * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk)\n *\n * const manifest = JSON.parse(\n * fs.readFileSync('./my_model-weights_manifest').toString()\n * )\n * const weightMap = await loadWeights(manifest, './')\n * ```\n * @param fetchWeightsFunction The function used for fetching the weight files.\n * @returns Weight loading function.\n */\nexport function weightsLoaderFactory(fetchWeightsFunction) {\n return async (manifest, filePathPrefix = '', weightNames) => {\n // Collect all the groups, weights, and their relative offsets to be\n // fetched.\n const groupIndicesToFetchMap = manifest.map(() => false);\n const groupWeightsToFetch = {};\n const weightsFound = weightNames != null ? weightNames.map(() => false) : [];\n const allManifestWeightNames = [];\n manifest.forEach((manifestGroupConfig, groupIndex) => {\n let groupOffset = 0;\n manifestGroupConfig.weights.forEach(weightsEntry => {\n const rawDtype = ('quantization' in weightsEntry) ?\n weightsEntry.quantization.dtype :\n weightsEntry.dtype;\n const weightsBytes = DTYPE_VALUE_SIZE_MAP[rawDtype] *\n util.sizeFromShape(weightsEntry.shape);\n const enqueueWeightsForFetchingFn = () => {\n groupIndicesToFetchMap[groupIndex] = true;\n if (groupWeightsToFetch[groupIndex] == null) {\n groupWeightsToFetch[groupIndex] = [];\n }\n groupWeightsToFetch[groupIndex].push({\n manifestEntry: weightsEntry,\n groupOffset,\n sizeBytes: weightsBytes\n });\n };\n if (weightNames != null) {\n weightNames.forEach((weightName, weightIndex) => {\n if (weightName === weightsEntry.name) {\n enqueueWeightsForFetchingFn();\n weightsFound[weightIndex] = true;\n }\n });\n }\n else {\n enqueueWeightsForFetchingFn();\n }\n allManifestWeightNames.push(weightsEntry.name);\n groupOffset += weightsBytes;\n });\n });\n if (!weightsFound.every(found => found)) {\n const weightsNotFound = weightNames.filter((_, i) => !weightsFound[i]);\n throw new Error(`Could not find weights in manifest with names: ` +\n `${weightsNotFound.join(', ')}. \\n` +\n `Manifest JSON has weights with names: ` +\n `${allManifestWeightNames.join(', ')}.`);\n }\n // Convert the one-hot boolean groupId => shouldFetch map to a list of group\n // IDs.\n const groupIndicesToFetch = groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i) => {\n if (shouldFetch) {\n accumulator.push(i);\n }\n return accumulator;\n }, []);\n const fetchUrls = [];\n groupIndicesToFetch.forEach(i => {\n manifest[i].paths.forEach(filepath => {\n const fetchUrl = filePathPrefix +\n (!filePathPrefix.endsWith('/') ? '/' : '') + filepath;\n fetchUrls.push(fetchUrl);\n });\n });\n const buffers = await fetchWeightsFunction(fetchUrls);\n const weightsTensorMap = {};\n let bufferIndexOffset = 0;\n groupIndicesToFetch.forEach(i => {\n const numBuffers = manifest[i].paths.length;\n let groupBytes = 0;\n for (let i = 0; i < numBuffers; i++) {\n groupBytes += buffers[bufferIndexOffset + i].byteLength;\n }\n // Create a buffer for the whole group.\n const groupBuffer = new ArrayBuffer(groupBytes);\n const groupByteBuffer = new Uint8Array(groupBuffer);\n let groupBufferOffset = 0;\n for (let i = 0; i < numBuffers; i++) {\n const buffer = new Uint8Array(buffers[bufferIndexOffset + i]);\n groupByteBuffer.set(buffer, groupBufferOffset);\n groupBufferOffset += buffer.byteLength;\n }\n const weightsEntries = groupWeightsToFetch[i];\n weightsEntries.forEach(weightsEntry => {\n const byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes);\n const nameToTensorMap = decodeWeights(byteBuffer, [weightsEntry.manifestEntry]);\n for (const name in nameToTensorMap) {\n weightsTensorMap[name] = nameToTensorMap[name];\n }\n });\n bufferIndexOffset += numBuffers;\n });\n return weightsTensorMap;\n };\n}\n//# sourceMappingURL=weights_loader.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandler implementations based on HTTP requests in the web browser.\n *\n * Uses [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n */\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nimport { loadWeightsAsArrayBuffer } from './weights_loader';\nconst OCTET_STREAM_MIME_TYPE = 'application/octet-stream';\nconst JSON_TYPE = 'application/json';\nexport class HTTPRequest {\n constructor(path, loadOptions) {\n this.DEFAULT_METHOD = 'POST';\n if (loadOptions == null) {\n loadOptions = {};\n }\n this.weightPathPrefix = loadOptions.weightPathPrefix;\n this.onProgress = loadOptions.onProgress;\n this.weightUrlConverter = loadOptions.weightUrlConverter;\n if (loadOptions.fetchFunc != null) {\n assert(typeof loadOptions.fetchFunc === 'function', () => 'Must pass a function that matches the signature of ' +\n '`fetch` (see ' +\n 'https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)');\n this.fetch = loadOptions.fetchFunc;\n }\n else {\n this.fetch = env().platform.fetch;\n }\n assert(path != null && path.length > 0, () => 'URL path for http must not be null, undefined or ' +\n 'empty.');\n if (Array.isArray(path)) {\n assert(path.length === 2, () => 'URL paths for http must have a length of 2, ' +\n `(actual length is ${path.length}).`);\n }\n this.path = path;\n if (loadOptions.requestInit != null &&\n loadOptions.requestInit.body != null) {\n throw new Error('requestInit is expected to have no pre-existing body, but has one.');\n }\n this.requestInit = loadOptions.requestInit || {};\n }\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserHTTPRequest.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n const init = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit);\n init.body = new FormData();\n const weightsManifest = [{\n paths: ['./model.weights.bin'],\n weights: modelArtifacts.weightSpecs,\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata,\n weightsManifest\n };\n init.body.append('model.json', new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: JSON_TYPE }), 'model.json');\n if (modelArtifacts.weightData != null) {\n init.body.append('model.weights.bin', new Blob([modelArtifacts.weightData], { type: OCTET_STREAM_MIME_TYPE }), 'model.weights.bin');\n }\n const response = await this.fetch(this.path, init);\n if (response.ok) {\n return {\n modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts),\n responses: [response],\n };\n }\n else {\n throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ` +\n `${response.status}.`);\n }\n }\n /**\n * Load model artifacts via HTTP request(s).\n *\n * See the documentation to `tf.io.http` for details on the saved\n * artifacts.\n *\n * @returns The loaded model artifacts (if loading succeeds).\n */\n async load() {\n const modelConfigRequest = await this.fetch(this.path, this.requestInit);\n if (!modelConfigRequest.ok) {\n throw new Error(`Request to ${this.path} failed with status code ` +\n `${modelConfigRequest.status}. Please verify this URL points to ` +\n `the model JSON of the model to load.`);\n }\n let modelConfig;\n try {\n modelConfig = await modelConfigRequest.json();\n }\n catch (e) {\n let message = `Failed to parse model JSON of response from ${this.path}.`;\n // TODO(nsthorat): Remove this after some time when we're comfortable that\n // .pb files are mostly gone.\n if (this.path.endsWith('.pb')) {\n message += ' Your path contains a .pb file extension. ' +\n 'Support for .pb models have been removed in TensorFlow.js 1.0 ' +\n 'in favor of .json models. You can re-convert your Python ' +\n 'TensorFlow model using the TensorFlow.js 1.0 conversion scripts ' +\n 'or you can convert your.pb models with the \\'pb2json\\'' +\n 'NPM script in the tensorflow/tfjs-converter repository.';\n }\n else {\n message += ' Please make sure the server is serving valid ' +\n 'JSON for this request.';\n }\n throw new Error(message);\n }\n const modelTopology = modelConfig.modelTopology;\n const weightsManifest = modelConfig.weightsManifest;\n const generatedBy = modelConfig.generatedBy;\n const convertedBy = modelConfig.convertedBy;\n const format = modelConfig.format;\n const userDefinedMetadata = modelConfig.userDefinedMetadata;\n // We do not allow both modelTopology and weightsManifest to be missing.\n if (modelTopology == null && weightsManifest == null) {\n throw new Error(`The JSON from HTTP path ${this.path} contains neither model ` +\n `topology or manifest for weights.`);\n }\n let weightSpecs;\n let weightData;\n if (weightsManifest != null) {\n const results = await this.loadWeights(weightsManifest);\n [weightSpecs, weightData] = results;\n }\n const artifacts = {\n modelTopology,\n weightSpecs,\n weightData,\n userDefinedMetadata,\n generatedBy,\n convertedBy,\n format\n };\n const initializer = modelConfig.modelInitializer;\n if (initializer) {\n artifacts.modelInitializer = initializer;\n }\n return artifacts;\n }\n async loadWeights(weightsManifest) {\n const weightPath = Array.isArray(this.path) ? this.path[1] : this.path;\n const [prefix, suffix] = parseUrl(weightPath);\n const pathPrefix = this.weightPathPrefix || prefix;\n const weightSpecs = [];\n for (const entry of weightsManifest) {\n weightSpecs.push(...entry.weights);\n }\n const fetchURLs = [];\n const urlPromises = [];\n for (const weightsGroup of weightsManifest) {\n for (const path of weightsGroup.paths) {\n if (this.weightUrlConverter != null) {\n urlPromises.push(this.weightUrlConverter(path));\n }\n else {\n fetchURLs.push(pathPrefix + path + suffix);\n }\n }\n }\n if (this.weightUrlConverter) {\n fetchURLs.push(...await Promise.all(urlPromises));\n }\n const buffers = await loadWeightsAsArrayBuffer(fetchURLs, {\n requestInit: this.requestInit,\n fetchFunc: this.fetch,\n onProgress: this.onProgress\n });\n return [weightSpecs, concatenateArrayBuffers(buffers)];\n }\n}\nHTTPRequest.URL_SCHEME_REGEX = /^https?:\\/\\//;\n/**\n * Extract the prefix and suffix of the url, where the prefix is the path before\n * the last file, and suffix is the search params after the last file.\n * ```\n * const url = 'http://tfhub.dev/model/1/tensorflowjs_model.pb?tfjs-format=file'\n * [prefix, suffix] = parseUrl(url)\n * // prefix = 'http://tfhub.dev/model/1/'\n * // suffix = '?tfjs-format=file'\n * ```\n * @param url the model url to be parsed.\n */\nexport function parseUrl(url) {\n const lastSlash = url.lastIndexOf('/');\n const lastSearchParam = url.lastIndexOf('?');\n const prefix = url.substring(0, lastSlash);\n const suffix = lastSearchParam > lastSlash ? url.substring(lastSearchParam) : '';\n return [prefix + '/', suffix];\n}\nexport function isHTTPScheme(url) {\n return url.match(HTTPRequest.URL_SCHEME_REGEX) != null;\n}\nexport const httpRouter = (url, loadOptions) => {\n if (typeof fetch === 'undefined' &&\n (loadOptions == null || loadOptions.fetchFunc == null)) {\n // `http` uses `fetch` or `node-fetch`, if one wants to use it in\n // an environment that is not the browser or node they have to setup a\n // global fetch polyfill.\n return null;\n }\n else {\n let isHTTP = true;\n if (Array.isArray(url)) {\n isHTTP = url.every(urlItem => isHTTPScheme(urlItem));\n }\n else {\n isHTTP = isHTTPScheme(url);\n }\n if (isHTTP) {\n return http(url, loadOptions);\n }\n }\n return null;\n};\nIORouterRegistry.registerSaveRouter(httpRouter);\nIORouterRegistry.registerLoadRouter(httpRouter);\n/**\n * Creates an IOHandler subtype that sends model artifacts to HTTP server.\n *\n * An HTTP request of the `multipart/form-data` mime type will be sent to the\n * `path` URL. The form data includes artifacts that represent the topology\n * and/or weights of the model. In the case of Keras-style `tf.Model`, two\n * blobs (files) exist in form-data:\n * - A JSON file consisting of `modelTopology` and `weightsManifest`.\n * - A binary weights file consisting of the concatenated weight values.\n * These files are in the same format as the one generated by\n * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html).\n *\n * The following code snippet exemplifies the client-side code that uses this\n * function:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save(tf.io.http(\n * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}}));\n * console.log(saveResult);\n * ```\n *\n * If the default `POST` method is to be used, without any custom parameters\n * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`:\n *\n * ```js\n * const saveResult = await model.save('http://model-server:5000/upload');\n * ```\n *\n * The following GitHub Gist\n * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864\n * implements a server based on [flask](https://github.com/pallets/flask) that\n * can receive the request. Upon receiving the model artifacts via the requst,\n * this particular server reconsistutes instances of [Keras\n * Models](https://keras.io/models/model/) in memory.\n *\n *\n * @param path A URL path to the model.\n * Can be an absolute HTTP path (e.g.,\n * 'http://localhost:8000/model-upload)') or a relative path (e.g.,\n * './model-upload').\n * @param requestInit Request configurations to be used when sending\n * HTTP request to server using `fetch`. It can contain fields such as\n * `method`, `credentials`, `headers`, `mode`, etc. See\n * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n * for more information. `requestInit` must not have a body, because the\n * body will be set by TensorFlow.js. File blobs representing the model\n * topology (filename: 'model.json') and the weights of the model (filename:\n * 'model.weights.bin') will be appended to the body. If `requestInit` has a\n * `body`, an Error will be thrown.\n * @param loadOptions Optional configuration for the loading. It includes the\n * following fields:\n * - weightPathPrefix Optional, this specifies the path prefix for weight\n * files, by default this is calculated from the path param.\n * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js,\n * the `fetch` from node-fetch can be used here.\n * - onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns An instance of `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function http(path, loadOptions) {\n return new HTTPRequest(path, loadOptions);\n}\n/**\n * Deprecated. Use `tf.io.http`.\n * @param path\n * @param loadOptions\n */\nexport function browserHTTPRequest(path, loadOptions) {\n return http(path, loadOptions);\n}\n//# sourceMappingURL=http.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nclass PassthroughLoader {\n constructor(modelArtifacts) {\n this.modelArtifacts = modelArtifacts;\n }\n async load() {\n return this.modelArtifacts;\n }\n}\nclass PassthroughSaver {\n constructor(saveHandler) {\n this.saveHandler = saveHandler;\n }\n async save(modelArtifacts) {\n return this.saveHandler(modelArtifacts);\n }\n}\n/**\n * Creates an IOHandler that loads model artifacts from memory.\n *\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * const model = await tf.loadLayersModel(tf.io.fromMemory(\n * modelTopology, weightSpecs, weightData));\n * ```\n *\n * @param modelArtifacts a object containing model topology (i.e., parsed from\n * the JSON format).\n * @param weightSpecs An array of `WeightsManifestEntry` objects describing the\n * names, shapes, types, and quantization of the weight data.\n * @param weightData A single `ArrayBuffer` containing the weight data,\n * concatenated in the order described by the weightSpecs.\n * @param trainingConfig Model training configuration. Optional.\n *\n * @returns A passthrough `IOHandler` that simply loads the provided data.\n */\nexport function fromMemory(modelArtifacts, weightSpecs, weightData, trainingConfig) {\n if (arguments.length === 1) {\n const isModelArtifacts = modelArtifacts.modelTopology != null ||\n modelArtifacts.weightSpecs != null;\n if (isModelArtifacts) {\n return new PassthroughLoader(modelArtifacts);\n }\n else {\n // Legacy support: with only modelTopology.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({ modelTopology: modelArtifacts });\n }\n }\n else {\n // Legacy support.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({\n modelTopology: modelArtifacts,\n weightSpecs,\n weightData,\n trainingConfig\n });\n }\n}\n/**\n * Creates an IOHandler that passes saved model artifacts to a callback.\n *\n * ```js\n * function handleSave(artifacts) {\n * // ... do something with the artifacts ...\n * return {modelArtifactsInfo: {...}, ...};\n * }\n *\n * const saveResult = model.save(tf.io.withSaveHandler(handleSave));\n * ```\n *\n * @param saveHandler A function that accepts a `ModelArtifacts` and returns a\n * `SaveResult`.\n */\nexport function withSaveHandler(saveHandler) {\n return new PassthroughSaver(saveHandler);\n}\n//# sourceMappingURL=passthrough.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Importing local_storage and indexed_db is necessary for the routers to be\n// registered.\nimport './indexed_db';\nimport './local_storage';\nimport { browserFiles } from './browser_files';\nimport { browserHTTPRequest, http, isHTTPScheme } from './http';\nimport { concatenateArrayBuffers, decodeWeights, encodeWeights, getModelArtifactsInfoForJSON } from './io_utils';\nimport { fromMemory, withSaveHandler } from './passthrough';\nimport { getLoadHandlers, getSaveHandlers, registerLoadRouter, registerSaveRouter } from './router_registry';\nimport { loadWeights, weightsLoaderFactory } from './weights_loader';\nexport { copyModel, listModels, moveModel, removeModel } from './model_management';\nexport { browserFiles, browserHTTPRequest, concatenateArrayBuffers, decodeWeights, encodeWeights, fromMemory, getLoadHandlers, getModelArtifactsInfoForJSON, getSaveHandlers, http, isHTTPScheme, loadWeights, registerLoadRouter, registerSaveRouter, weightsLoaderFactory, withSaveHandler };\n//# sourceMappingURL=io.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reshape } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Reshapes a `tf.Tensor` to a given shape.\n *\n * Given an input tensor, returns a new tensor with the same values as the\n * input tensor with shape `shape`.\n *\n * If one component of shape is the special value -1, the size of that\n * dimension is computed so that the total size remains constant. In\n * particular, a shape of [-1] flattens into 1-D. At most one component of\n * shape can be -1.\n *\n * If shape is 1-D or higher, then the operation returns a tensor with shape\n * shape filled with the values of tensor. In this case, the number of\n * elements implied by shape must be the same as the number of elements in\n * tensor.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.reshape([2, 2]).print();\n * ```\n *\n * @param x The input tensor to be reshaped.\n * @param shape An array of integers defining the output tensor shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction reshape_(x, shape) {\n const $x = convertToTensor(x, 'x', 'reshape', null);\n const inputs = { x: $x };\n const attrs = { shape };\n const forward = (backend, save) => {\n shape = util.inferFromImplicitShape(shape, $x.size);\n util.assert($x.size === util.sizeFromShape(shape), () => 'new shape and old shape must have the same number of elements.');\n save([$x]);\n return backend.reshape($x, shape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Reshape, attrs);\n}\nexport const reshape = op({ reshape_ });\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchMatMul } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices, A * B. These must be matrices.\n *\n * ```js\n * const a = tf.tensor2d([1, 2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.matMul(b).print(); // or tf.matMul(a, b)\n * ```\n * @param a First matrix in dot product operation.\n * @param b Second matrix in dot product operation.\n * @param transposeA If true, `a` is transposed before multiplication.\n * @param transposeB If true, `b` is transposed before multiplication.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction matMul_(a, b, transposeA = false, transposeB = false) {\n let $a = convertToTensor(a, 'a', 'matMul');\n let $b = convertToTensor(b, 'b', 'matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n save([$a, $b]);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert($a.rank >= 2 && $b.rank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShapeOuterDims = batchDimA > batchDimB ? outerDimsA : outerDimsB;\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n const res3d = backend.batchMatMul(a3D, b3D, transposeA, transposeB);\n return reshape(res3d, outShape);\n };\n const inputs = { a: $a, b: $b };\n const attrs = { transposeA, transposeB };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BatchMatMul, attrs);\n}\nexport const matMul = op({ matMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OneHot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a one-hot `tf.Tensor`. The locations represented by `indices` take\n * value `onValue` (defaults to 1), while all other locations take value\n * `offValue` (defaults to 0). If `indices` is rank `R`, the output has rank\n * `R+1` with the last axis of size `depth`.\n *\n * ```js\n * tf.oneHot(tf.tensor1d([0, 1], 'int32'), 3).print();\n * ```\n *\n * @param indices `tf.Tensor` of indices with dtype `int32`.\n * @param depth The depth of the one hot dimension.\n * @param onValue A number used to fill in the output when the index matches\n * the location.\n * @param offValue A number used to fill in the output when the index does\n * not match the location.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction oneHot_(indices, depth, onValue = 1, offValue = 0) {\n if (depth < 2) {\n throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`);\n }\n const $indices = convertToTensor(indices, 'indices', 'oneHot', 'int32');\n const outShape = [...$indices.shape, depth];\n const forward = (backend, save) => {\n save([$indices]);\n return reshape(backend.oneHot(reshape($indices, [$indices.size]), depth, onValue, offValue), outShape);\n };\n const inputs = { indices: $indices };\n const attrs = { depth, onValue, offValue };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OneHot, attrs);\n}\nexport const oneHot = op({ oneHot_ });\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Transpose } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Transposes the `tf.Tensor`. Permutes the dimensions according to `perm`.\n *\n * The returned `tf.Tensor`'s dimension `i` will correspond to the input\n * dimension `perm[i]`. If `perm` is not given, it is set to `[n-1...0]`,\n * where `n` is the rank of the input `tf.Tensor`. Hence by default, this\n * operation performs a regular matrix transpose on 2-D input `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4, 5, 6], [2, 3]);\n *\n * a.transpose().print(); // or tf.transpose(a)\n * ```\n *\n * @param x The tensor to transpose.\n * @param perm The permutation of the dimensions of a.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction transpose_(x, perm) {\n const $x = convertToTensor(x, 'x', 'transpose');\n if (perm == null) {\n perm = $x.shape.map((s, i) => i).reverse();\n }\n util.assert($x.rank === perm.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of perm ${perm}.`);\n perm.forEach(axis => {\n util.assert(axis >= 0 && axis < $x.rank, () => `All entries in 'perm' must be between 0 and ${$x.rank - 1}` +\n ` but got ${perm}`);\n });\n if ($x.rank <= 1) {\n return $x.clone();\n }\n const inputs = { x: $x };\n const attrs = { perm };\n return ENGINE.runKernelFunc(backend => backend.transpose($x, perm), inputs, null /* gradient */, Transpose, attrs);\n}\nexport const transpose = op({ transpose_ });\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { matMul } from './mat_mul';\nimport { oneHot } from './one_hot';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the confusion matrix from true labels and predicted labels.\n *\n * ```js\n * const labels = tf.tensor1d([0, 1, 2, 1, 0], 'int32');\n * const predictions = tf.tensor1d([0, 2, 2, 1, 0], 'int32');\n * const numClasses = 3;\n * const out = tf.math.confusionMatrix(labels, predictions, numClasses);\n * out.print();\n * // Expected output matrix:\n * // [[2, 0, 0],\n * // [0, 1, 1],\n * // [0, 0, 1]]\n * ```\n *\n * @param labels The target labels, assumed to be 0-based integers\n * for the classes. The shape is `[numExamples]`, where\n * `numExamples` is the number of examples included.\n * @param predictions The predicted classes, assumed to be\n * 0-based integers for the classes. Must have the same shape as `labels`.\n * @param numClasses Number of all classes, as an integer.\n * Its value must be larger than the largest element in `labels` and\n * `predictions`.\n * @returns The confusion matrix as a int32-type 2D tensor. The value at\n * row `r` and column `c` is the number of times examples of actual class\n * `r` were predicted as class `c`.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nexport function confusionMatrix_(labels, predictions, numClasses) {\n const $labels = convertToTensor(labels, 'labels', 'confusionMatrix');\n const $predictions = convertToTensor(predictions, 'predictions', 'confusionMatrix');\n util.assert(numClasses == null || numClasses > 0 && Number.isInteger(numClasses), () => `If provided, numClasses must be a positive integer, ` +\n `but got ${numClasses}`);\n util.assert($labels.rank === 1, () => `Expected the rank of labels to be 1, but got ${$labels.rank}`);\n util.assert($predictions.rank === 1, () => `Expected the rank of predictions to be 1, ` +\n `but got ${$predictions.rank}`);\n util.assert($labels.shape[0] === $predictions.shape[0], () => `Mismatch in the number of examples: ` +\n `${$labels.shape[0]} vs. ${$predictions.shape[0]}. ` +\n `Labels and predictions should have the same number of elements.`);\n util.assert(numClasses > 0 && Number.isInteger(numClasses), () => `numClasses is required to be a positive integer, but got ` +\n `${numClasses}`);\n // TODO(cais): In the future, if oneHot supports tensors inputs for\n // `numClasses`, `confusionMatrix` can make `numClasses` optional.\n const oneHotLabels = oneHot(cast($labels, 'int32'), numClasses);\n const oneHotPredictions = oneHot(cast($predictions, 'int32'), numClasses);\n const oneHotLabelsT = transpose(oneHotLabels);\n const product = matMul(oneHotLabelsT, oneHotPredictions);\n return cast(product, 'int32');\n}\nexport const confusionMatrix = op({ confusionMatrix_ });\n//# sourceMappingURL=confusion_matrix.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Exports under the tf.math.* namespace.\n */\nimport { confusionMatrix } from './ops/confusion_matrix';\nexport { confusionMatrix };\n//# sourceMappingURL=math.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-3 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor3d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor3d([[[1], [2]], [[3], [4]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor3d([1, 2, 3, 4], [2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor3d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 3) {\n throw new Error('tensor3d() requires shape to have three numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 3 && inferredShape.length !== 1) {\n throw new Error('tensor3d() requires values to be number[][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor3d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor3d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FromPixels } from '../kernel_names';\nimport { getKernel } from '../kernel_registry';\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { tensor3d } from './tensor3d';\nlet fromPixels2DContext;\n/**\n * Creates a `tf.Tensor` from an image.\n *\n * ```js\n * const image = new ImageData(1, 1);\n * image.data[0] = 100;\n * image.data[1] = 150;\n * image.data[2] = 200;\n * image.data[3] = 255;\n *\n * tf.browser.fromPixels(image).print();\n * ```\n *\n * @param pixels The input image to construct the tensor from. The\n * supported image types are all 4-channel. You can also pass in an image\n * object with following attributes:\n * `{data: Uint8Array; width: number; height: number}`\n * @param numChannels The number of channels of the output tensor. A\n * numChannels value less than 4 allows you to ignore channels. Defaults to\n * 3 (ignores alpha channel of input image).\n *\n * @doc {heading: 'Browser', namespace: 'browser', ignoreCI: true}\n */\nfunction fromPixels_(pixels, numChannels = 3) {\n // Sanity checks.\n if (numChannels > 4) {\n throw new Error('Cannot construct Tensor with more than 4 channels from pixels.');\n }\n if (pixels == null) {\n throw new Error('pixels passed to tf.browser.fromPixels() can not be null');\n }\n let isPixelData = false;\n let isImageData = false;\n let isVideo = false;\n let isImage = false;\n let isCanvasLike = false;\n if (pixels.data instanceof Uint8Array) {\n isPixelData = true;\n }\n else if (typeof (ImageData) !== 'undefined' && pixels instanceof ImageData) {\n isImageData = true;\n }\n else if (typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement) {\n isVideo = true;\n }\n else if (typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement) {\n isImage = true;\n // tslint:disable-next-line: no-any\n }\n else if (pixels.getContext != null) {\n isCanvasLike = true;\n }\n else {\n throw new Error('pixels passed to tf.browser.fromPixels() must be either an ' +\n `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData ` +\n `in browser, or OffscreenCanvas, ImageData in webworker` +\n ` or {data: Uint32Array, width: number, height: number}, ` +\n `but was ${pixels.constructor.name}`);\n }\n if (isVideo) {\n const HAVE_CURRENT_DATA_READY_STATE = 2;\n if (isVideo &&\n pixels.readyState <\n HAVE_CURRENT_DATA_READY_STATE) {\n throw new Error('The video element has not loaded data yet. Please wait for ' +\n '`loadeddata` event on the element.');\n }\n }\n // If the current backend has 'FromPixels' registered, it has a more\n // efficient way of handling pixel uploads, so we call that.\n const kernel = getKernel(FromPixels, ENGINE.backendName);\n if (kernel != null) {\n const inputs = { pixels };\n const attrs = { numChannels };\n return ENGINE.runKernel(FromPixels, inputs, attrs);\n }\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n let vals;\n if (isCanvasLike) {\n vals =\n // tslint:disable-next-line:no-any\n pixels.getContext('2d').getImageData(0, 0, width, height).data;\n }\n else if (isImageData || isPixelData) {\n vals = pixels.data;\n }\n else if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n vals = fromPixels2DContext.getImageData(0, 0, width, height).data;\n }\n let values;\n if (numChannels === 4) {\n values = new Int32Array(vals);\n }\n else {\n const numPixels = width * height;\n values = new Int32Array(numPixels * numChannels);\n for (let i = 0; i < numPixels; i++) {\n for (let channel = 0; channel < numChannels; ++channel) {\n values[i * numChannels + channel] = vals[i * 4 + channel];\n }\n }\n }\n const outShape = [height, width, numChannels];\n return tensor3d(values, outShape, 'int32');\n}\n/**\n * Draws a `tf.Tensor` of pixel values to a byte array or optionally a\n * canvas.\n *\n * When the dtype of the input is 'float32', we assume values in the range\n * [0-1]. Otherwise, when input is 'int32', we assume values in the range\n * [0-255].\n *\n * Returns a promise that resolves when the canvas has been drawn to.\n *\n * @param img A rank-2 or rank-3 tensor. If rank-2, draws grayscale. If\n * rank-3, must have depth of 1, 3 or 4. When depth of 1, draws\n * grayscale. When depth of 3, we draw with the first three components of\n * the depth dimension corresponding to r, g, b and alpha = 1. When depth of\n * 4, all four components of the depth dimension correspond to r, g, b, a.\n * @param canvas The canvas to draw to.\n *\n * @doc {heading: 'Browser', namespace: 'browser'}\n */\nexport async function toPixels(img, canvas) {\n let $img = convertToTensor(img, 'img', 'toPixels');\n if (!(img instanceof Tensor)) {\n // Assume int32 if user passed a native array.\n const originalImgTensor = $img;\n $img = cast(originalImgTensor, 'int32');\n originalImgTensor.dispose();\n }\n if ($img.rank !== 2 && $img.rank !== 3) {\n throw new Error(`toPixels only supports rank 2 or 3 tensors, got rank ${$img.rank}.`);\n }\n const [height, width] = $img.shape.slice(0, 2);\n const depth = $img.rank === 2 ? 1 : $img.shape[2];\n if (depth > 4 || depth === 2) {\n throw new Error(`toPixels only supports depth of size ` +\n `1, 3 or 4 but got ${depth}`);\n }\n if ($img.dtype !== 'float32' && $img.dtype !== 'int32') {\n throw new Error(`Unsupported type for toPixels: ${$img.dtype}.` +\n ` Please use float32 or int32 tensors.`);\n }\n const data = await $img.data();\n const multiplier = $img.dtype === 'float32' ? 255 : 1;\n const bytes = new Uint8ClampedArray(width * height * 4);\n for (let i = 0; i < height * width; ++i) {\n const rgba = [0, 0, 0, 255];\n for (let d = 0; d < depth; d++) {\n const value = data[i * depth + d];\n if ($img.dtype === 'float32') {\n if (value < 0 || value > 1) {\n throw new Error(`Tensor values for a float32 Tensor must be in the ` +\n `range [0 - 1] but encountered ${value}.`);\n }\n }\n else if ($img.dtype === 'int32') {\n if (value < 0 || value > 255) {\n throw new Error(`Tensor values for a int32 Tensor must be in the ` +\n `range [0 - 255] but encountered ${value}.`);\n }\n }\n if (depth === 1) {\n rgba[0] = value * multiplier;\n rgba[1] = value * multiplier;\n rgba[2] = value * multiplier;\n }\n else {\n rgba[d] = value * multiplier;\n }\n }\n const j = i * 4;\n bytes[j + 0] = Math.round(rgba[0]);\n bytes[j + 1] = Math.round(rgba[1]);\n bytes[j + 2] = Math.round(rgba[2]);\n bytes[j + 3] = Math.round(rgba[3]);\n }\n if (canvas != null) {\n canvas.width = width;\n canvas.height = height;\n const ctx = canvas.getContext('2d');\n const imageData = new ImageData(bytes, width, height);\n ctx.putImageData(imageData, 0, 0);\n }\n if ($img !== img) {\n $img.dispose();\n }\n return bytes;\n}\nexport const fromPixels = op({ fromPixels_ });\n//# sourceMappingURL=browser.js.map", "import { computeStrides } from '../util';\n/**\n * Validate gather nd inputs.\n *\n * @param tensor The tensor contains the source values.\n * @param indices The tensor contains the indices to slice the source.\n *\n * @returns [resultShape, numUpdates, sliceSize, strides]\n */\nexport function prepareAndValidate(tensor, indices) {\n if (tensor.rank < 1) {\n throw new Error('tf.gatherND() expects the input to be rank 1 or higher,' +\n ` but the rank was ${tensor.rank}.`);\n }\n if (indices.rank < 1) {\n throw new Error('tf.gatherND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error('tf.gatherND() expects the indices to be int32 type,' +\n ` but the dtype was ${indices.dtype}.`);\n }\n if (indices.shape[indices.rank - 1] > tensor.rank) {\n throw new Error('index innermost dimension length must be <= tensor rank; saw: ' +\n `${indices.shape[indices.rank - 1]} vs. ${tensor.rank}`);\n }\n if (tensor.size === 0) {\n throw new Error('Requested more than 0 entries, but input is empty.' +\n ` Input shape: ${tensor.shape}.`);\n }\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n // The result shape is\n // indices.shape[:-1] + params.shape[indices.shape[-1]:]\n let nResult = 1;\n for (let i = 0; i < indicesShape.length - 1; ++i) {\n nResult *= indicesShape[i];\n }\n const inputShape = tensor.shape;\n const resultShape = indicesShape.slice();\n resultShape.pop();\n let sliceSize = 1;\n for (let i = sliceRank; i < tensor.rank; ++i) {\n sliceSize *= inputShape[i];\n resultShape.push(inputShape[i]);\n }\n const strides = [...computeStrides(tensor.shape).map(stride => stride / sliceSize),\n 1].slice(0, sliceRank);\n return [resultShape, nResult, sliceSize, strides];\n}\n//# sourceMappingURL=gather_nd_util.js.map", "import { computeStrides, sizeFromShape } from '../util';\n/**\n * Check whether updates.shape = indices.shape[:batchDim] +\n * shape[sliceDim:]\n *\n * @param x The input tensor.\n */\nexport function validateUpdateShape(shape, indices, updates) {\n const sliceDim = (indices.rank > 1) ? indices.shape[indices.rank - 1] : 1;\n const batchDim = (indices.rank > 1) ? indices.rank - 1 : 1;\n const shapeError = 'Must have updates.shape = indices.shape[:batchDim] + ' +\n `shape[sliceDim:], got updates.shape: ${updates.shape}` +\n `, indices.shape: ${indices.shape}, shape: ${shape}` +\n `, sliceDim: ${sliceDim}, and batchDim: ${batchDim}.`;\n if (updates.rank < batchDim) {\n throw new Error(shapeError + ` update.rank < ${batchDim}. `);\n }\n if (shape.length < sliceDim + (updates.rank - batchDim)) {\n throw new Error(shapeError +\n ` Output shape length < ${sliceDim + (updates.rank - batchDim)}`);\n }\n if (updates.rank !== batchDim + shape.length - sliceDim) {\n throw new Error(shapeError + ` update.rank != ${batchDim + shape.length - sliceDim}`);\n }\n for (let d = 0; d < batchDim; ++d) {\n if (updates.shape[d] !== indices.shape[d]) {\n throw new Error(shapeError +\n ` updates.shape[${d}] (${updates.shape[d]}) != indices.shape[${d}] (${indices.shape[d]}).`);\n }\n }\n for (let d = 0; d < updates.rank - batchDim; ++d) {\n if (updates.shape[d + batchDim] !== shape[d + sliceDim]) {\n throw new Error(shapeError +\n ` updates.shape[${d + batchDim}] (${updates.shape[d + batchDim]}) != shape[${d + batchDim}] (${shape[d + batchDim]})`);\n }\n }\n}\n/**\n * Validate scatter nd inputs.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n */\nexport function validateInput(updates, indices, shape) {\n if (indices.rank < 1) {\n throw new Error('tf.scatterND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (updates.rank < 1) {\n throw new Error('tf.scatterND() expects the updates to be rank 1 or higher,' +\n ` but the rank was ${updates.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error(`The dtype of 'indices' should be int32, but got dtype: ${indices.dtype}`);\n }\n if (shape.length < 1) {\n throw new Error(`Output rank must be greater or equal to 1, but got shape: ${shape}`);\n }\n if (shape.length === 0) {\n if (indices.size === 0) {\n throw new Error(`Indices specified for empty output. indices shape: ${indices.shape}`);\n }\n if (updates.size === 0) {\n throw new Error(`Updates specified for empty output. updates shape: ${updates.shape}`);\n }\n }\n validateUpdateShape(shape, indices, updates);\n}\n/**\n * Calculate the shape information for the output.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n *\n * @returns ScatterShapeInfo\n */\nexport function calculateShapes(updates, indices, shape) {\n // Calculate the number of dimensions in indices\n const indicesRank = indices.shape.length;\n const sliceRank = (indicesRank > 1) ? indices.shape[indicesRank - 1] : 1;\n // Calculate the number of elements that make up each slice of our updated\n // tensor. This allows us to work with flattened tensors and copy over whole\n // slices at a time.\n const totalNd = shape.length;\n let sliceSize = 1;\n for (let i = sliceRank; i < totalNd; ++i) {\n sliceSize *= shape[i];\n }\n const safeSliceDim = (sliceRank < 1) ? 1 : sliceRank;\n const numUpdates = sizeFromShape(indices.shape) / safeSliceDim;\n const strides = [...computeStrides(shape.slice(0, sliceRank)), 1];\n const outputSize = sizeFromShape(shape);\n return { sliceRank, numUpdates, sliceSize, strides, outputSize };\n}\n//# sourceMappingURL=scatter_nd_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsValid(input, begin, size) {\n const inputRank = input.shape.length;\n util.assert(inputRank === begin.length, () => `Error in slice${inputRank}D: Length of begin ${begin} must ` +\n `match the rank of the array (${inputRank}).`);\n util.assert(inputRank === size.length, () => `Error in slice${inputRank}D: Length of size ${size} must ` +\n `match the rank of the array (${inputRank}).`);\n for (let i = 0; i < inputRank; ++i) {\n util.assert(begin[i] + size[i] <= input.shape[i], () => `Error in slice${inputRank}D: begin[${i}] + size[${i}] ` +\n `(${begin[i] + size[i]}) would overflow input.shape[${i}] (${input.shape[i]})`);\n }\n}\n/** Converts a binary mask to an array of axes. Used in stridedSlice(). */\nexport function maskToAxes(mask) {\n const axes = [];\n let axis = 0;\n while (mask > 0) {\n if (mask & 1) {\n axes.push(axis);\n }\n mask /= 2;\n axis++;\n }\n return axes;\n}\n/** Computes the output shape given the strided slice params. */\nexport function computeOutShape(begin, end, strides) {\n const size = [];\n for (let axis = 0; axis < begin.length; axis++) {\n size[axis] = Math.ceil((end[axis] - begin[axis]) / strides[axis]);\n }\n return size;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stride value. Otherwise, insert.\nexport function stridesWithElidedDims(strides, ellipsisInsertionIndex, numElidedAxes, inputShape) {\n const newStrides = [...strides];\n for (let i = newStrides.length; i < inputShape.length; i++) {\n newStrides.push(1);\n }\n for (let i = 0; i < numElidedAxes; i++) {\n if (i === 0) {\n newStrides[ellipsisInsertionIndex] = 1;\n }\n else {\n newStrides.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 1 /* element to add */);\n newStrides.pop();\n }\n }\n return newStrides;\n}\nfunction unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, normalizedAxis) {\n if (normalizedAxis <= ellipsisInsertionIndex) {\n return normalizedAxis;\n }\n return normalizedAxis - (numElidedAxes - 1);\n}\nfunction getElidedAxes(numElidedAxes, ellipsisInsertionIndex) {\n const elidedAxes = [];\n for (let i = 0; i < numElidedAxes; i++) {\n elidedAxes.push(ellipsisInsertionIndex + i);\n }\n return elidedAxes;\n}\n// Normalize the start, end and strides.\nexport function getNormalizedAxes(inputShape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask) {\n const inputRank = inputShape.length;\n let normalizedBegin = new Array(inputRank), normalizedEnd = new Array(inputRank), normalizedStrides = new Array(inputRank);\n if (ellipsisAxes.length && numInterpolatedAxes > 0) {\n const fullIndex = ellipsisAxes[0];\n // The ellipsis applies to the masked index as well as any dimensions\n // that are interpolated.\n const numElidedAxes = numInterpolatedAxes + 1;\n normalizedBegin = startIndicesWithElidedDims(beginMask, fullIndex, numElidedAxes, begin, inputShape);\n normalizedEnd = stopIndicesWithElidedDims(endMask, fullIndex, numElidedAxes, end, inputShape);\n normalizedStrides =\n stridesWithElidedDims(strides, fullIndex, numElidedAxes, inputShape);\n }\n else {\n for (let axis = 0; axis < inputRank; axis++) {\n normalizedBegin[axis] = startForAxis(beginMask, begin, strides, inputShape, axis, ellipsisMask);\n normalizedEnd[axis] =\n stopForAxis(endMask, end, strides, inputShape, axis, ellipsisMask);\n normalizedStrides[axis] = stridesForAxis(strides, axis, ellipsisMask);\n }\n }\n return {\n begin: normalizedBegin,\n end: normalizedEnd,\n strides: normalizedStrides\n };\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current start value. Otherwise, insert.\nexport function startIndicesWithElidedDims(beginMask, ellipsisInsertionIndex, numElidedAxes, originalBegin, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = 0;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalBegin[originalAxis];\n if (beginMask & 1 << originalAxis) {\n originalValue = 0;\n }\n newIndices[axis] = originalValue;\n }\n }\n return newIndices;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stop value. Otherwise, insert.\nexport function stopIndicesWithElidedDims(endMask, ellipsisInsertionIndex, numElidedAxes, originalEnd, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = Number.MAX_SAFE_INTEGER;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalEnd[originalAxis];\n if (endMask & 1 << originalAxis) {\n originalValue = Number.MAX_SAFE_INTEGER;\n }\n newIndices[axis] = originalValue;\n }\n }\n for (let i = 0; i < newIndices.length; i++) {\n // Handle negative indices\n const axisSize = inputShape[i];\n if (newIndices[i] < 0) {\n newIndices[i] += axisSize;\n }\n newIndices[i] = util.clamp(0, newIndices[i], inputShape[i]);\n }\n return newIndices;\n}\nexport function stridesForAxis(strides, axis, ellipsisMask) {\n let stride = strides[axis];\n if (ellipsisMask & (1 << axis) || stride == null) {\n stride = 1;\n }\n return stride;\n}\nexport function startForAxis(beginMask, startIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let start = startIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or the begin index is not set\n // for the axis.\n if (beginMask & 1 << axis || ellipsisMask & 1 << axis || start == null) {\n if (stride > 0) {\n // Forward iteration - use the first element. These values will get\n // clamped below (Note: We could have set them to 0 and axis_size-1, but\n // use lowest() and max() to maintain symmetry with StopForAxis())\n start = Number.MIN_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the last element.\n start = Number.MAX_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (start < 0) {\n start += axisSize;\n }\n // Clamping\n start = util.clamp(0, start, axisSize - 1);\n return start;\n}\nexport function stopForAxis(endMask, stopIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let stop = stopIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or if the stop index is not\n // set for this axis.\n if (endMask & (1 << axis) || ellipsisMask & (1 << axis) || stop == null) {\n if (stride > 0) {\n // Forward iteration - use the last element. These values will get\n // clamped below\n stop = Number.MAX_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the first element.\n stop = Number.MIN_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (stop < 0) {\n stop += axisSize;\n }\n // Clamping\n // Because the end index points one past the last element, we need slightly\n // different clamping ranges depending on the direction.\n if (stride > 0) {\n // Forward iteration\n stop = util.clamp(0, stop, axisSize);\n }\n else {\n // Backward iteration\n stop = util.clamp(-1, stop, axisSize - 1);\n }\n return stop;\n}\n/**\n * Returns true if the slice occupies a continous set of elements in the\n * 'flat' space.\n */\nexport function isSliceContinous(shape, begin, size) {\n // Index of the first axis that has size > 1.\n let firstNonOneAxis = size.length;\n for (let i = 0; i < size.length; i++) {\n if (size[i] > 1) {\n firstNonOneAxis = i;\n break;\n }\n }\n for (let i = firstNonOneAxis + 1; i < size.length; i++) {\n if (begin[i] > 0 || size[i] !== shape[i]) {\n return false;\n }\n }\n return true;\n}\nexport function computeFlatOffset(begin, strides) {\n let flatOffset = begin.length > 0 ? begin[begin.length - 1] : 1;\n for (let i = 0; i < begin.length - 1; i++) {\n flatOffset += begin[i] * strides[i];\n }\n return flatOffset;\n}\nexport function parseSliceParams(x, begin, size) {\n // The following logic allows for more ergonomic calls.\n let begin_;\n const xRank = x.shape.length;\n if (typeof begin === 'number') {\n begin_ = [begin, ...new Array(xRank - 1).fill(0)];\n }\n else if (begin.length < xRank) {\n begin_ = begin.concat(new Array(xRank - begin.length).fill(0));\n }\n else {\n begin_ = begin.slice();\n }\n begin_.forEach(d => {\n util.assert(d !== -1, () => 'slice() does not support negative begin indexing.');\n });\n let size_;\n if (size == null) {\n size_ = new Array(xRank).fill(-1);\n }\n else if (typeof size === 'number') {\n size_ = [size, ...new Array(xRank - 1).fill(-1)];\n }\n else if (size.length < xRank) {\n size_ = size.concat(new Array(xRank - size.length).fill(-1));\n }\n else {\n size_ = size;\n }\n size_ = size_.map((d, i) => {\n if (d >= 0) {\n return d;\n }\n else {\n util.assert(d === -1, () => `Negative size values should be exactly -1 but got ` +\n `${d} for the slice() size at index ${i}.`);\n return x.shape[i] - begin_[i];\n }\n });\n return [begin_, size_];\n}\n//# sourceMappingURL=slice_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from './util';\n/**\n * Serializable defines the serialization contract.\n *\n * TFJS requires serializable classes to return their className when asked\n * to avoid issues with minification.\n */\nexport class Serializable {\n /**\n * Return the class name for this class to use in serialization contexts.\n *\n * Generally speaking this will be the same thing that constructor.name\n * would have returned. However, the class name needs to be robust\n * against minification for serialization/deserialization to work properly.\n *\n * There's also places such as initializers.VarianceScaling, where\n * implementation details between different languages led to different\n * class hierarchies and a non-leaf node is used for serialization purposes.\n */\n getClassName() {\n return this.constructor\n .className;\n }\n /**\n * Creates an instance of T from a ConfigDict.\n *\n * This works for most descendants of serializable. A few need to\n * provide special handling.\n * @param cls A Constructor for the class to instantiate.\n * @param config The Configuration for the object.\n */\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/**\n * Maps string keys to class constructors.\n *\n * Used during (de)serialization from the cross-language JSON format, which\n * requires the class name in the serialization format matches the class\n * names as used in Python, should it exist.\n */\nexport class SerializationMap {\n constructor() {\n this.classNameMap = {};\n }\n /**\n * Returns the singleton instance of the map.\n */\n static getMap() {\n if (SerializationMap.instance == null) {\n SerializationMap.instance = new SerializationMap();\n }\n return SerializationMap.instance;\n }\n /**\n * Registers the class as serializable.\n */\n static register(cls) {\n SerializationMap.getMap().classNameMap[cls.className] =\n [cls, cls.fromConfig];\n }\n}\n/**\n * Register a class with the serialization map of TensorFlow.js.\n *\n * This is often used for registering custom Layers, so they can be\n * serialized and deserialized.\n *\n * Example:\n *\n * ```js\n * class MyCustomLayer extends tf.layers.Layer {\n * static className = 'MyCustomLayer';\n *\n * constructor(config) {\n * super(config);\n * }\n * }\n * tf.serialization.registerClass(MyCustomLayer);\n * ```\n *\n * @param cls The class to be registered. It must have a public static member\n * called `className` defined and the value must be a non-empty string.\n *\n * @doc {heading: 'Models', subheading: 'Serialization', ignoreCI: true}\n */\nexport function registerClass(cls) {\n assert(cls.className != null, () => `Class being registered does not have the static className ` +\n `property defined.`);\n assert(typeof cls.className === 'string', () => `className is required to be a string, but got type ` +\n typeof cls.className);\n assert(cls.className.length > 0, () => `Class being registered has an empty-string as its className, ` +\n `which is disallowed.`);\n SerializationMap.register(cls);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { inferShape } from './tensor_util_env';\nimport { arraysEqual, flatten, isString, isTypedArray } from './util';\nconst TEST_EPSILON_FLOAT32 = 1e-3;\nexport const TEST_EPSILON_FLOAT16 = 1e-1;\nexport function expectArraysClose(actual, expected, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, epsilon));\n}\nexport function testEpsilon() {\n return ENGINE.backend.floatPrecision() === 32 ? TEST_EPSILON_FLOAT32 :\n TEST_EPSILON_FLOAT16;\n}\nfunction expectArraysPredicate(actual, expected, predicate) {\n let checkClassType = true;\n if (isTypedArray(actual) || isTypedArray(expected)) {\n checkClassType = false;\n }\n if (isTypedArray(actual) && isTypedArray(expected)) {\n checkClassType = true;\n }\n if (checkClassType) {\n const aType = actual.constructor.name;\n const bType = expected.constructor.name;\n if (aType !== bType) {\n throw new Error(`Arrays are of different type. Actual: ${aType}. ` +\n `Expected: ${bType}`);\n }\n }\n if (Array.isArray(actual) && Array.isArray(expected)) {\n const actualShape = inferShape(actual);\n const expectedShape = inferShape(expected);\n if (!arraysEqual(actualShape, expectedShape)) {\n throw new Error(`Arrays have different shapes. ` +\n `Actual: [${actualShape}]. Expected: [${expectedShape}]`);\n }\n }\n const actualFlat = isTypedArray(actual) ? actual : flatten(actual);\n const expectedFlat = isTypedArray(expected) ?\n expected :\n flatten(expected);\n if (actualFlat.length !== expectedFlat.length) {\n throw new Error(`Arrays have different lengths actual: ${actualFlat.length} vs ` +\n `expected: ${expectedFlat.length}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n for (let i = 0; i < expectedFlat.length; ++i) {\n const a = actualFlat[i];\n const e = expectedFlat[i];\n if (!predicate(a, e)) {\n throw new Error(`Arrays differ: actual[${i}] = ${a}, expected[${i}] = ${e}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n }\n}\nexport function expectPromiseToFail(fn, done) {\n fn().then(() => done.fail(), () => done());\n}\nexport function expectArraysEqual(actual, expected) {\n const exp = typeof expected === 'string' || typeof expected === 'number' ||\n typeof expected === 'boolean' ?\n [expected] :\n expected;\n if (isString(actual) || isString(actual[0]) ||\n isString(expected) || isString(expected[0])) {\n // tslint:disable-next-line: triple-equals\n return expectArraysPredicate(actual, exp, (a, b) => a == b);\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, 0));\n}\nexport function expectNumbersClose(a, e, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n if (!areClose(a, e, epsilon)) {\n throw new Error(`Numbers differ: actual === ${a}, expected === ${e}`);\n }\n}\nfunction areClose(a, e, epsilon) {\n if (!isFinite(a) && !isFinite(e)) {\n return true;\n }\n if (isNaN(a) || isNaN(e) || Math.abs(a - e) > epsilon) {\n return false;\n }\n return true;\n}\nexport function expectValuesInRange(actual, low, high) {\n for (let i = 0; i < actual.length; i++) {\n if (actual[i] < low || actual[i] > high) {\n throw new Error(`Value out of range:${actual[i]} low: ${low}, high: ${high}`);\n }\n }\n}\nexport function expectArrayBuffersEqual(actual, expected) {\n // Safari & Jasmine don't like comparing ArrayBuffers directly. Wrapping in\n // a Float32Array solves this issue.\n expect(new Float32Array(actual)).toEqual(new Float32Array(expected));\n}\n//# sourceMappingURL=test_util.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { setDeprecationWarningFn } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\n/**\n * Enables production mode which disables correctness checks in favor of\n * performance.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableProdMode() {\n env().set('PROD', true);\n}\n/**\n * Enables debug mode which will log information about all executed kernels:\n * the elapsed time of the kernel execution, as well as the rank, shape, and\n * size of the output tensor.\n *\n * Debug mode will significantly slow down your application as it will\n * download the result of every operation to the CPU. This should not be used in\n * production. Debug mode does not affect the timing information of the kernel\n * execution as we do not measure download time in the kernel execution time.\n *\n * See also: `tf.profile`, `tf.memory`.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableDebugMode() {\n env().set('DEBUG', true);\n}\n/** Globally disables deprecation warnings */\nexport function disableDeprecationWarnings() {\n env().set('DEPRECATION_WARNINGS_ENABLED', false);\n console.warn(`TensorFlow.js deprecation warnings have been disabled.`);\n}\n/** Warn users about deprecated functionality. */\nexport function deprecationWarn(msg) {\n if (env().getBool('DEPRECATION_WARNINGS_ENABLED')) {\n console.warn(msg + ' You can disable deprecation warnings with ' +\n 'tf.disableDeprecationWarnings().');\n }\n}\nsetDeprecationWarningFn(deprecationWarn);\n/**\n * Dispose all variables kept in backend engine.\n *\n * @doc {heading: 'Environment'}\n */\nexport function disposeVariables() {\n ENGINE.disposeVariables();\n}\n/**\n * It returns the global engine that keeps track of all tensors and backends.\n *\n * @doc {heading: 'Environment'}\n */\nexport function engine() {\n return ENGINE;\n}\n/**\n * Returns memory info at the current time in the program. The result is an\n * object with the following properties:\n *\n * - `numBytes`: Number of bytes allocated (undisposed) at this time.\n * - `numTensors`: Number of unique tensors allocated.\n * - `numDataBuffers`: Number of unique data buffers allocated\n * (undisposed) at this time, which is \u2264 the number of tensors\n * (e.g. `a.reshape(newShape)` makes a new Tensor that shares the same\n * data buffer with `a`).\n * - `unreliable`: True if the memory usage is unreliable. See `reasons` when\n * `unreliable` is true.\n * - `reasons`: `string[]`, reasons why the memory is unreliable, present if\n * `unreliable` is true.\n *\n * WebGL Properties:\n * - `numBytesInGPU`: Number of bytes allocated (undisposed) in the GPU only at\n * this time.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function memory() {\n return ENGINE.memory();\n}\n/**\n * Executes the provided function `f()` and returns a promise that resolves\n * with information about the function's memory use:\n * - `newBytes`: the number of new bytes allocated\n * - `newTensors`: the number of new tensors created\n * - `peakBytes`: the peak number of bytes allocated\n * - `kernels`: an array of objects for each kernel involved that reports\n * their input and output shapes, number of bytes used, and number of new\n * tensors created.\n *\n * ```js\n * const profile = await tf.profile(() => {\n * const x = tf.tensor1d([1, 2, 3]);\n * let x2 = x.square();\n * x2.dispose();\n * x2 = x.square();\n * x2.dispose();\n * return x;\n * });\n *\n * console.log(`newBytes: ${profile.newBytes}`);\n * console.log(`newTensors: ${profile.newTensors}`);\n * console.log(`byte usage over all kernels: ${profile.kernels.map(k =>\n * k.totalBytesSnapshot)}`);\n * ```\n *\n *\n * @doc {heading: 'Performance', subheading: 'Profile'}\n */\nexport function profile(f) {\n return ENGINE.profile(f);\n}\n/**\n * Executes the provided function `fn` and after it is executed, cleans up all\n * intermediate tensors allocated by `fn` except those returned by `fn`.\n * `fn` must not return a Promise (async functions not allowed). The returned\n * result can be a complex object.\n *\n * Using this method helps avoid memory leaks. In general, wrap calls to\n * operations in `tf.tidy` for automatic memory cleanup.\n *\n * NOTE: Variables do *not* get cleaned up when inside a tidy(). If you want to\n * dispose variables, please use `tf.disposeVariables` or call dispose()\n * directly on variables.\n *\n * ```js\n * // y = 2 ^ 2 + 1\n * const y = tf.tidy(() => {\n * // a, b, and one will be cleaned up when the tidy ends.\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n * const b = a.square();\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * y.print();\n * ```\n *\n * @param nameOrFn The name of the closure, or the function to execute.\n * If a name is provided, the 2nd argument should be the function.\n * If debug mode is on, the timing and the memory usage of the function\n * will be tracked and displayed on the console using the provided name.\n * @param fn The function to execute.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function tidy(nameOrFn, fn) {\n return ENGINE.tidy(nameOrFn, fn);\n}\n/**\n * Disposes any `tf.Tensor`s found within the provided object.\n *\n * @param container an object that may be a `tf.Tensor` or may directly\n * contain `tf.Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. If\n * the object is not a `tf.Tensor` or does not contain `Tensors`, nothing\n * happens. In general it is safe to pass any object here, except that\n * `Promise`s are not supported.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function dispose(container) {\n const tensors = getTensorsInContainer(container);\n tensors.forEach(tensor => tensor.dispose());\n}\n/**\n * Keeps a `tf.Tensor` generated inside a `tf.tidy` from being disposed\n * automatically.\n *\n * ```js\n * let b;\n * const y = tf.tidy(() => {\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n *\n * // b will not be cleaned up by the tidy. a and one will be cleaned up\n * // when the tidy ends.\n * b = tf.keep(a.square());\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * console.log('y:');\n * y.print();\n * console.log('b:');\n * b.print();\n * ```\n *\n * @param result The tensor to keep from being disposed.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function keep(result) {\n return ENGINE.keep(result);\n}\n/**\n * Executes `f()` and returns a promise that resolves with timing\n * information.\n *\n * The result is an object with the following properties:\n *\n * - `wallMs`: Wall execution time.\n * - `kernelMs`: Kernel execution time, ignoring data transfer. If using the\n * WebGL backend and the query timer extension is not available, this will\n * return an error object.\n * - On `WebGL` The following additional properties exist:\n * - `uploadWaitMs`: CPU blocking time on texture uploads.\n * - `downloadWaitMs`: CPU blocking time on texture downloads (readPixels).\n *\n * ```js\n * const x = tf.randomNormal([20, 20]);\n * const time = await tf.time(() => x.matMul(x));\n *\n * console.log(`kernelMs: ${time.kernelMs}, wallTimeMs: ${time.wallMs}`);\n * ```\n *\n * @param f The function to execute and time.\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nexport function time(f) {\n return ENGINE.time(f);\n}\n/**\n * Sets the backend (cpu, webgl, wasm, etc) responsible for creating tensors and\n * executing operations on those tensors. Returns a promise that resolves\n * to a boolean if the backend initialization was successful.\n *\n * Note this disposes the current backend, if any, as well as any tensors\n * associated with it. A new backend is initialized, even if it is of the\n * same type as the previous one.\n *\n * @param backendName The name of the backend. Currently supports\n * `'webgl'|'cpu'` in the browser, `'tensorflow'` under node.js\n * (requires tfjs-node), and `'wasm'` (requires tfjs-backend-wasm).\n *\n * @doc {heading: 'Backends'}\n */\nexport function setBackend(backendName) {\n return ENGINE.setBackend(backendName);\n}\n/**\n * Returns a promise that resolves when the currently selected backend (or the\n * highest priority one) has initialized. Await this promise when you are using\n * a backend that has async initialization.\n *\n * @doc {heading: 'Backends'}\n */\nexport function ready() {\n return ENGINE.ready();\n}\n/**\n * Returns the current backend name (cpu, webgl, etc). The backend is\n * responsible for creating tensors and executing operations on those tensors.\n *\n * @doc {heading: 'Backends'}\n */\nexport function getBackend() {\n return ENGINE.backendName;\n}\n/**\n * Removes a backend and the registered factory.\n *\n * @doc {heading: 'Backends'}\n */\nexport function removeBackend(name) {\n ENGINE.removeBackend(name);\n}\n/**\n * Finds the backend registered under the provided name. Returns null if the\n * name is not in the registry, or the registration hasn't finished yet.\n */\nexport function findBackend(name) {\n return ENGINE.findBackend(name);\n}\n/**\n * Finds the backend factory registered under the provided name. Returns a\n * function that produces a new backend when called. Returns null if the name\n * is not in the registry.\n */\nexport function findBackendFactory(name) {\n return ENGINE.findBackendFactory(name);\n}\n/**\n * Registers a global backend. The registration should happen when importing\n * a module file (e.g. when importing `backend_webgl.ts`), and is used for\n * modular builds (e.g. custom tfjs bundle with only webgl support).\n *\n * @param factory The backend factory function. When called, it should\n * return a backend instance, or a promise of an instance.\n * @param priority The priority of the backend (higher = more important).\n * In case multiple backends are registered, the priority is used to find\n * the best backend. Defaults to 1.\n * @return False if there is already a registered backend under this name, true\n * if not.\n *\n * @doc {heading: 'Backends'}\n */\nexport function registerBackend(name, factory, priority = 1) {\n return ENGINE.registerBackend(name, factory, priority);\n}\n/**\n * Gets the current backend. If no backends have been initialized, this will\n * attempt to initialize the best backend. Will throw an error if the highest\n * priority backend has async initialization, in which case, you should call\n * 'await tf.ready()' before running other code.\n *\n * @doc {heading: 'Backends'}\n */\nexport function backend() {\n return ENGINE.backend;\n}\n/**\n * Sets the global platform.\n *\n * @param platformName The name of this platform.\n * @param platform A platform implementation.\n */\nexport function setPlatform(platformName, platform) {\n env().setPlatform(platformName, platform);\n}\n//# sourceMappingURL=globals.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Add } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Adds two `tf.Tensor`s element-wise, A + B. Supports broadcasting.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n *\n * ```js\n * // Broadcast add a with b.\n * const a = tf.scalar(5);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n * @param a The first `tf.Tensor` to add.\n * @param b The second `tf.Tensor` to add. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction add_(a, b) {\n let $a = convertToTensor(a, 'a', 'add');\n let $b = convertToTensor(b, 'b', 'add');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.add($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Add);\n}\nexport const add = op({ add_ });\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FloorDiv } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n * The result is rounded with floor function.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.floorDiv(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.floorDiv(b).print(); // or tf.floorDiv(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction floorDiv_(a, b) {\n let $a = convertToTensor(a, 'a', 'floorDiv');\n let $b = convertToTensor(b, 'b', 'floorDiv');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.floorDiv($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FloorDiv);\n}\nexport const floorDiv = op({ floorDiv_ });\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Div } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { floorDiv } from './floorDiv';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction div_(a, b) {\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'int32' && $b.dtype === 'int32') {\n return floorDiv($a, $b);\n }\n const forward = (backend, save) => {\n const res = backend.realDivide($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Div, attrs);\n}\nexport const div = op({ div_ });\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Multiply } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Multiplies two `tf.Tensor`s element-wise, A * B. Supports broadcasting.\n *\n * We also expose `tf.mulStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([2, 3, 4, 5]);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n *\n * ```js\n * // Broadcast mul a with b.\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.scalar(5);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n * @param a The first tensor to multiply.\n * @param b The second tensor to multiply. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mul_(a, b) {\n let $a = convertToTensor(a, 'a', 'mul');\n let $b = convertToTensor(b, 'b', 'mul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.multiply($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Multiply);\n}\nexport const mul = op({ mul_ });\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Abs } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes absolute value element-wise: `abs(x)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.abs().print(); // or tf.abs(x)\n * ```\n * @param x The input `tf.Tensor`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction abs_(x) {\n const $x = convertToTensor(x, 'x', 'abs');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n if ($x.dtype === 'complex64') {\n return backend.complexAbs($x);\n }\n return backend.abs($x);\n }, inputs, null /* grad */, Abs);\n}\nexport const abs = op({ abs_ });\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes acos of the input `tf.Tensor` element-wise: `acos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.acos().print(); // or tf.acos(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acos_(x) {\n const $x = convertToTensor(x, 'x', 'acos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acos);\n}\nexport const acos = op({ acos_ });\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the inverse hyperbolic cos of the input `tf.Tensor` element-wise:\n * `acosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([10, 1, 3, 5.7]);\n *\n * x.acosh().print(); // or tf.acosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acosh_(x) {\n const $x = convertToTensor(x, 'x', 'acosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acosh);\n}\nexport const acosh = op({ acosh_ });\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AddN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Adds a list of `tf.Tensor`s element-wise, each with the same shape and dtype.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n *\n * tf.addN([a, b, c]).print();\n * ```\n * @param tensors A list of tensors with the same shape and dtype.\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction addN_(tensors) {\n util.assert(Array.isArray(tensors), () => 'The argument passed to tf.addN() must be a list of tensors');\n util.assert(tensors.length >= 1, () => `Must pass at least one tensor to tf.addN(), but got ` +\n `${tensors.length}`);\n const $tensors = tensors.map((t, i) => convertToTensor(t, `tensors${i}`, 'addN'));\n const firstTensor = $tensors[0];\n $tensors.forEach(t => {\n if (t.dtype !== firstTensor.dtype) {\n throw new Error('All tensors passed to tf.addN() must have the same dtype');\n }\n });\n $tensors.forEach(t => {\n if (!util.arraysEqual(t.shape, firstTensor.shape)) {\n throw new Error('All tensors passed to tf.addN() must have the same shape');\n }\n });\n const forward = (backend, save) => {\n const res = backend.addN($tensors);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, AddN);\n}\nexport const addN = op({ addN_ });\n//# sourceMappingURL=add_n.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Returns true if the axis specifies the inner most dimensions of the\n * array.\n */\nexport function axesAreInnerMostDims(axes, rank) {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n}\nexport function combineLocations(outputLoc, reduceLoc, axes) {\n const rank = outputLoc.length + reduceLoc.length;\n const loc = [];\n let outIdx = 0;\n let reduceIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n loc.push(outputLoc[outIdx++]);\n }\n else {\n loc.push(reduceLoc[reduceIdx++]);\n }\n }\n return loc;\n}\nexport function computeOutAndReduceShapes(aShape, axes) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outShape.push(aShape[dim]);\n }\n }\n const reduceShape = axes.map(dim => aShape[dim]);\n return [outShape, reduceShape];\n}\nexport function expandShapeToKeepDim(shape, axes) {\n const reduceSubShape = axes.map(x => 1);\n return combineLocations(shape, reduceSubShape, axes);\n}\nexport function assertAxesAreInnerMostDims(msg, axes, rank) {\n util.assert(axesAreInnerMostDims(axes, rank), () => `${msg} supports only inner-most axes for now. ` +\n `Got axes ${axes} and rank-${rank} input.`);\n}\n/**\n * Returns the axes permutation to be used with `tf.transpose`, if such\n * permutation is necessary. Otherwise it returns null. This method is used by\n * operations that operate only on inner-most axes.\n */\nexport function getAxesPermutation(axes, rank) {\n if (axesAreInnerMostDims(axes, rank)) {\n return null;\n }\n const result = [];\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n result.push(i);\n }\n }\n axes.forEach(axis => result.push(axis));\n return result;\n}\n/** Returns the axes permutation that undoes the original permutation. */\nexport function getUndoAxesPermutation(axes) {\n return axes.map((axis, i) => [i, axis])\n .sort((a, b) => a[1] - b[1])\n .map(x => x[0]);\n}\nexport function getInnerMostAxes(numAxes, rank) {\n const res = [];\n for (let i = rank - numAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n}\n//# sourceMappingURL=axis_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { All } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical and of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.all().print(); // or tf.all(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.all(axis).print(); // or tf.all(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction all_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'all', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.all($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, All, attrs);\n}\nexport const all = op({ all_ });\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Any } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical or of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.any().print(); // or tf.any(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.any(axis).print(); // or tf.any(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction any_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'any', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.any($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Any, attrs);\n}\n// tslint:disable-next-line:variable-name\nexport const any = op({ any_ });\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the maximum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMax().print(); // or tf.argMax(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMax(axis).print(); // or tf.argMax(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMax_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMax');\n const forward = (backend, save) => {\n save([$x]);\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMax($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMax, attrs);\n}\nexport const argMax = op({ argMax_ });\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the minimum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMin().print(); // or tf.argMin(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMin(axis).print(); // or tf.argMin(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMin_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMin');\n const forward = (backend, save) => {\n save([$x]);\n if (axis == null) {\n axis = 0;\n }\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMin($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMin, attrs);\n}\nexport const argMin = op({ argMin_ });\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes asin of the input `tf.Tensor` element-wise: `asin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asin().print(); // or tf.asin(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asin_(x) {\n const $x = convertToTensor(x, 'x', 'asin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asin);\n}\nexport const asin = op({ asin_ });\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic sin of the input `tf.Tensor` element-wise:\n * `asinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asinh().print(); // or tf.asinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asinh_(x) {\n const $x = convertToTensor(x, 'x', 'asinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asinh);\n}\nexport const asinh = op({ asinh_ });\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes atan of the input `tf.Tensor` element-wise: `atan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.atan().print(); // or tf.atan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan_(x) {\n const $x = convertToTensor(x, 'x', 'atan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atan);\n}\nexport const atan = op({ atan_ });\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan2 } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes arctangent of `tf.Tensor`s a / b element-wise: `atan2(a, b)`.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1.0, 1.0, -1.0, .7]);\n * const b = tf.tensor1d([2.0, 13.0, 3.5, .21]);\n *\n * tf.atan2(a, b).print()\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan2_(a, b) {\n let $a = convertToTensor(a, 'a', 'atan2');\n let $b = convertToTensor(b, 'b', 'atan2');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.atan2($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Atan2);\n}\nexport const atan2 = op({ atan2_ });\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic tan of the input `tf.Tensor` element-wise:\n * `atanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.atanh().print(); // or tf.atanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atanh_(x) {\n const $x = convertToTensor(x, 'x', 'atanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atanh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atanh);\n}\nexport const atanh = op({ atanh_ });\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n *\n * @param inputShape Input tensor shape is of the following dimensions:\n * `[batch, height, width, inChannels]`.\n * @param filterShape The filter shape is of the following dimensions:\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat The data format of the input and output data.\n * Defaults to 'NHWC'.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`.\n * Defaults to `[1, 1]`. If `dilations` is a single number, then\n * `dilationHeight == dilationWidth`.\n */\nexport function computeDilation2DInfo(inputShape, filterShape, strides, pad, dataFormat = 'NHWC', dilations) {\n // `computerConv2DInfo` require filterShape to be in the dimension of:\n // `[filterHeight, filterWidth, depth, outDepth]`, dilation2d doesn't have\n // outDepth, it should have the same depth as the input.\n // Input shape: [batch, height, width, inChannels]\n const inputChannels = inputShape[3];\n const $filterShape = [...filterShape, inputChannels];\n const $dataFormat = convertConv2DDataFormat(dataFormat);\n return computeConv2DInfo(inputShape, $filterShape, strides, dilations, pad, null /* roundingMode */, null /* depthWise */, $dataFormat);\n}\nexport function computePool2DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'channelsLast') {\n const [filterHeight, filterWidth] = parseTupleParam(filterSize);\n let filterShape;\n if (dataFormat === 'channelsLast') {\n filterShape = [filterHeight, filterWidth, inShape[3], inShape[3]];\n }\n else if (dataFormat === 'channelsFirst') {\n filterShape = [filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, false, dataFormat);\n}\n/**\n * Computes the information for a forward pass of a pooling3D operation.\n */\nexport function computePool3DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'NDHWC') {\n const [filterDepth, filterHeight, filterWidth] = parse3TupleParam(filterSize);\n let filterShape;\n let $dataFormat;\n if (dataFormat === 'NDHWC') {\n $dataFormat = 'channelsLast';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[4], inShape[4]];\n }\n else if (dataFormat === 'NCDHW') {\n $dataFormat = 'channelsFirst';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv3DInfo(inShape, filterShape, strides, dilations, pad, false, $dataFormat, roundingMode);\n}\n/**\n * Computes the information for a forward pass of a convolution/pooling\n * operation.\n */\nexport function computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, depthwise = false, dataFormat = 'channelsLast') {\n let [batchSize, inHeight, inWidth, inChannels] = [-1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideHeight, strideWidth] = parseTupleParam(strides);\n const [dilationHeight, dilationWidth] = parseTupleParam(dilations);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outHeight, outWidth } = getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, effectiveFilterHeight, effectiveFilterWidth, roundingMode, dataFormat);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inHeight,\n inWidth,\n inChannels,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideHeight,\n strideWidth,\n filterHeight,\n filterWidth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\n/**\n * Computes the information for a forward pass of a 3D convolution/pooling\n * operation.\n */\nexport function computeConv3DInfo(inShape, filterShape, strides, dilations, pad, depthwise = false, dataFormat = 'channelsLast', roundingMode) {\n let [batchSize, inDepth, inHeight, inWidth, inChannels] = [-1, -1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterDepth, filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outDepth, outHeight, outWidth } = get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth, effectiveFilterHeight, effectiveFilterWidth, roundingMode);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\nfunction computeOutputShape2D(inShape, fieldSize, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputRows = inShape[0];\n const inputCols = inShape[1];\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputRows, outputCols];\n}\nfunction computeOutputShape4D(inShape, fieldSize, outChannels, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputDepth = inShape[0];\n const inputRows = inShape[1];\n const inputCols = inShape[2];\n const outputDepths = conditionalRound((inputDepth - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputDepths), () => `The output # of depths (${outputDepths}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputDepths, outputRows, outputCols, outChannels];\n}\nexport function computeDefaultPad(inputShape, fieldSize, stride, dilation = 1) {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n}\nfunction parseTupleParam(param) {\n if (typeof param === 'number') {\n return [param, param, param];\n }\n if (param.length === 2) {\n return [param[0], param[1], 1];\n }\n return param;\n}\nfunction parse3TupleParam(param) {\n return typeof param === 'number' ? [param, param, param] : param;\n}\n/* See https://www.tensorflow.org/api_docs/python/tf/nn/atrous_conv2d\n * Atrous convolution is equivalent to standard convolution with upsampled\n * filters with effective_filter_height =\n * filter_height + (filter_height - 1) * (dilation - 1)\n * and effective_filter_width =\n * filter_width + (filter_width - 1) * (dilation - 1),\n * produced by inserting dilation - 1 zeros along consecutive elements across\n * the filters' spatial dimensions.\n * When there is a dilation, this converts a filter dimension to the\n * effective filter dimension, so it can be used in a standard convolution.\n */\nfunction getEffectiveFilterSize(filterSize, dilation) {\n if (dilation <= 1) {\n return filterSize;\n }\n return filterSize + (filterSize - 1) * (dilation - 1);\n}\nfunction getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, filterHeight, filterWidth, roundingMode, dataFormat) {\n let padInfo;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = { top: pad, bottom: pad, left: pad, right: pad, type: padType };\n const outShape = computeOutputShape2D([inHeight, inWidth], filterHeight, strideHeight, pad, roundingMode);\n outHeight = outShape[0];\n outWidth = outShape[1];\n }\n else if (pad === 'same') {\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongHeight = Math.max(0, (outHeight - 1) * strideHeight + filterHeight - inHeight);\n const padAlongWidth = Math.max(0, (outWidth - 1) * strideWidth + filterWidth - inWidth);\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = { top: 0, bottom: 0, left: 0, right: 0, type: 'VALID' };\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else if (typeof pad === 'object') {\n const top = dataFormat === 'channelsLast' ? pad[1][0] : pad[2][0];\n const bottom = dataFormat === 'channelsLast' ? pad[1][1] : pad[2][1];\n const left = dataFormat === 'channelsLast' ? pad[2][0] : pad[3][0];\n const right = dataFormat === 'channelsLast' ? pad[2][1] : pad[3][1];\n const padType = (top === 0 && bottom === 0 && left === 0 && right === 0) ?\n 'VALID' :\n 'EXPLICIT';\n padInfo = { top, bottom, left, right, type: padType };\n outHeight = conditionalRound((inHeight - filterHeight + top + bottom) / strideHeight + 1, roundingMode);\n outWidth = conditionalRound((inWidth - filterWidth + left + right) / strideWidth + 1, roundingMode);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outHeight, outWidth };\n}\nfunction get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, filterDepth, filterHeight, filterWidth, roundingMode) {\n let padInfo;\n let outDepth;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = {\n top: pad,\n bottom: pad,\n left: pad,\n right: pad,\n front: pad,\n back: pad,\n type: padType\n };\n const outShape = computeOutputShape4D([inDepth, inHeight, inWidth, 1], filterDepth, 1, strideDepth, pad, roundingMode);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n }\n else if (pad === 'same') {\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, front, back, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = {\n top: 0,\n bottom: 0,\n left: 0,\n right: 0,\n front: 0,\n back: 0,\n type: 'VALID'\n };\n outDepth = Math.ceil((inDepth - filterDepth + 1) / strideDepth);\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outDepth, outHeight, outWidth };\n}\n/**\n * Rounds a value depending on the rounding mode\n * @param value\n * @param roundingMode\n */\nfunction conditionalRound(value, roundingMode) {\n if (!roundingMode) {\n return value;\n }\n switch (roundingMode) {\n case 'round':\n // used for Caffe Conv\n return Math.round(value);\n case 'ceil':\n // used for Caffe Pool\n return Math.ceil(value);\n case 'floor':\n return Math.floor(value);\n default:\n throw new Error(`Unknown roundingMode ${roundingMode}`);\n }\n}\nexport function tupleValuesAreOne(param) {\n const [dimA, dimB, dimC] = parseTupleParam(param);\n return dimA === 1 && dimB === 1 && dimC === 1;\n}\nexport function eitherStridesOrDilationsAreOne(strides, dilations) {\n return tupleValuesAreOne(strides) || tupleValuesAreOne(dilations);\n}\n/**\n * Convert Conv2D dataFormat from 'NHWC'|'NCHW' to\n * 'channelsLast'|'channelsFirst'\n * @param dataFormat in 'NHWC'|'NCHW' mode\n * @return dataFormat in 'channelsLast'|'channelsFirst' mode\n * @throws unknown dataFormat\n */\nexport function convertConv2DDataFormat(dataFormat) {\n if (dataFormat === 'NHWC') {\n return 'channelsLast';\n }\n else if (dataFormat === 'NCHW') {\n return 'channelsFirst';\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n}\n//# sourceMappingURL=conv_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D average pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction avgPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'avgPool', 'float32');\n const dilations = 1;\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in avgPool: x must be rank 4 but got rank ${x4D.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n save([x4D]);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return x4D.clone();\n }\n return backend.avgPool(x4D, convInfo);\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool, attrs);\n res = cast(res, $x.dtype);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPool = op({ avgPool_ });\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { AvgPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D average pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.avgPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates:\n * `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction avgPool3d_(x, filterSize, strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'avgPool3d', 'float32');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in avgPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in avgPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n save([x5D]);\n return backend.avgPool3d(x5D, convInfo);\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3D, attrs);\n res = cast(res, x5D.dtype);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3d = op({ avgPool3d_ });\n//# sourceMappingURL=avg_pool_3d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsConsistent(shapes, axis) {\n const rank = shapes[0].length;\n shapes.forEach((shape, i) => {\n util.assert(shape.length === rank, () => `Error in concat${rank}D: rank of tensors[${i}] must be the same ` +\n `as the rank of the rest (${rank})`);\n });\n util.assert(axis >= 0 && axis < rank, () => `Error in concat${rank}D: axis must be between 0 and ${rank - 1}.`);\n const firstShape = shapes[0];\n shapes.forEach((shape, i) => {\n for (let r = 0; r < rank; r++) {\n util.assert((r === axis) || (shape[r] === firstShape[r]), () => `Error in concat${rank}D: Shape of tensors[${i}] (${shape}) ` +\n `does not match the shape of the rest (${firstShape}) ` +\n `along the non-concatenated axis ${i}.`);\n }\n });\n}\nexport function computeOutShape(shapes, axis) {\n const outputShape = shapes[0].slice();\n for (let i = 1; i < shapes.length; i++) {\n outputShape[axis] += shapes[i][axis];\n }\n return outputShape;\n}\n//# sourceMappingURL=concat_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Concat } from '../kernel_names';\nimport { convertToTensorArray } from '../tensor_util_env';\nimport { assert, parseAxisParam, sizeFromShape } from '../util';\nimport { assertParamsConsistent, computeOutShape } from './concat_util';\nimport { op } from './operation';\nimport { tensor } from './tensor';\n/**\n * Concatenates a list of `tf.Tensor`s along a given axis.\n *\n * The tensors ranks and types must match, and their sizes must match in all\n * dimensions except `axis`.\n *\n * Also available are stricter rank-specific methods that assert that\n * `tensors` are of the given rank:\n * - `tf.concat1d`\n * - `tf.concat2d`\n * - `tf.concat3d`\n * - `tf.concat4d`\n *\n * Except `tf.concat1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * a.concat(b).print(); // or a.concat(b)\n * ```\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.concat([a, b, c]).print();\n * ```\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [10, 20]]);\n * const b = tf.tensor2d([[3, 4], [30, 40]]);\n * const axis = 1;\n * tf.concat([a, b], axis).print();\n * ```\n * @param tensors A list of tensors to concatenate.\n * @param axis The axis to concate along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction concat_(tensors, axis = 0) {\n assert(tensors.length >= 1, () => 'Pass at least one tensor to concat');\n let $tensors = convertToTensorArray(tensors, 'tensors', 'concat');\n if ($tensors[0].dtype === 'complex64') {\n $tensors.forEach(tensor => {\n if (tensor.dtype !== 'complex64') {\n throw new Error(`Cannot concatenate complex64 tensors with a tensor\n with dtype ${tensor.dtype}. `);\n }\n });\n }\n const forward = (backend, save) => {\n const $axis = parseAxisParam(axis, $tensors[0].shape)[0];\n const outShape = computeOutShape($tensors.map(t => t.shape), $axis);\n if (sizeFromShape(outShape) === 0) {\n return tensor([], outShape);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n $tensors = $tensors.filter(t => t.size > 0);\n if ($tensors.length === 1) {\n return $tensors[0];\n }\n const shapes = $tensors.map(t => t.shape);\n assertParamsConsistent(shapes, $axis);\n const res = backend.concat($tensors, $axis);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n const attr = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Concat, attr);\n}\nexport const concat = op({ concat_ });\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sigmoid } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sigmoid element-wise, `1 / (1 + exp(-x))`\n *\n * ```js\n * const x = tf.tensor1d([0, -1, 2, -3]);\n *\n * x.sigmoid().print(); // or tf.sigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'sigmoid');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sigmoid($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Sigmoid);\n}\nexport const sigmoid = op({ sigmoid_ });\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Slice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as slice_util from './slice_util';\n/**\n * Extracts a slice from a `tf.Tensor` starting at coordinates `begin`\n * and is of size `size`.\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `x` is of the given rank:\n * - `tf.slice1d`\n * - `tf.slice2d`\n * - `tf.slice3d`\n * - `tf.slice4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.slice([1], [2]).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * x.slice([1, 0], [1, 2]).print();\n * ```\n * @param x The input `tf.Tensor` to slice from.\n * @param begin The coordinates to start the slice from. The length can be\n * less than the rank of x - the rest of the axes will have implicit 0 as\n * start. Can also be a single number, in which case it specifies the\n * first axis.\n * @param size The size of the slice. The length can be less than the rank of\n * x - the rest of the axes will have implicit -1. A value of -1 requests\n * the rest of the dimensions in the axis. Can also be a single number,\n * in which case it specifies the size of the first axis.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction slice_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice');\n if ($x.rank === 0) {\n throw new Error('Slicing scalar is not possible');\n }\n const forward = (backend, save) => {\n const [begin_, size_] = slice_util.parseSliceParams($x, begin, size);\n slice_util.assertParamsValid($x, begin_, size_);\n save([$x]);\n return backend.slice($x, begin_, size_);\n };\n const inputs = { x: $x };\n const attrs = { begin, size };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Slice, attrs);\n}\nexport const slice = op({ slice_ });\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic tangent of the input `tf.Tensor` element-wise: `tanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, 70]);\n *\n * x.tanh().print(); // or tf.tanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tanh_(x) {\n const $x = convertToTensor(x, 'x', 'tanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.tanh($x);\n save([y]);\n return y;\n }, inputs, null /* grad */, Tanh);\n}\nexport const tanh = op({ tanh_ });\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { add } from './add';\nimport { concat } from './concat';\nimport { matMul } from './mat_mul';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { slice } from './slice';\nimport { tanh } from './tanh';\n/**\n * Computes the next state and output of a BasicLSTMCell.\n *\n * Returns `[newC, newH]`.\n *\n * Derived from tf.contrib.rnn.BasicLSTMCell.\n *\n * @param forgetBias Forget bias for the cell.\n * @param lstmKernel The weights for the cell.\n * @param lstmBias The bias for the cell.\n * @param data The input to the cell.\n * @param c Previous cell state.\n * @param h Previous cell output.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction basicLSTMCell_(forgetBias, lstmKernel, lstmBias, data, c, h) {\n const $forgetBias = convertToTensor(forgetBias, 'forgetBias', 'basicLSTMCell');\n const $lstmKernel = convertToTensor(lstmKernel, 'lstmKernel', 'basicLSTMCell');\n const $lstmBias = convertToTensor(lstmBias, 'lstmBias', 'basicLSTMCell');\n const $data = convertToTensor(data, 'data', 'basicLSTMCell');\n const $c = convertToTensor(c, 'c', 'basicLSTMCell');\n const $h = convertToTensor(h, 'h', 'basicLSTMCell');\n const combined = concat([$data, $h], 1);\n const weighted = matMul(combined, $lstmKernel);\n const res = add(weighted, $lstmBias);\n // i = input_gate, j = new_input, f = forget_gate, o = output_gate\n const batchSize = res.shape[0];\n const sliceCols = res.shape[1] / 4;\n const sliceSize = [batchSize, sliceCols];\n const i = slice(res, [0, 0], sliceSize);\n const j = slice(res, [0, sliceCols], sliceSize);\n const f = slice(res, [0, sliceCols * 2], sliceSize);\n const o = slice(res, [0, sliceCols * 3], sliceSize);\n const newC = add(mul(sigmoid(i), tanh(j)), mul($c, sigmoid(add($forgetBias, f))));\n const newH = mul(tanh(newC), sigmoid(o));\n return [newC, newH];\n}\nexport const basicLSTMCell = op({ basicLSTMCell_ });\n//# sourceMappingURL=basic_lstm_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchToSpaceND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation reshapes the \"batch\" dimension 0 into `M + 1` dimensions of\n * shape `blockShape + [batch]`, interleaves these blocks back into the grid\n * defined by the spatial dimensions `[1, ..., M]`, to obtain a result with\n * the same rank as the input. The spatial dimensions of this intermediate\n * result are then optionally cropped according to `crops` to produce the\n * output. This is the reverse of `tf.spaceToBatchND`. See below for a precise\n * description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]);\n * const blockShape = [2, 2];\n * const crops = [[0, 0], [0, 0]];\n *\n * x.batchToSpaceND(blockShape, crops).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param crops A 2-D array. Must have shape `[M, 2]`, all values must be >= 0.\n * `crops[i] = [cropStart, cropEnd]` specifies the amount to crop from input\n * dimension `i + 1`, which corresponds to spatial dimension `i`. It is required\n * that `cropStart[i] + cropEnd[i] <= blockShape[i] * inputShape[i + 1]`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Reshape `x` to `reshaped` of shape: `[blockShape[0], ...,\n * blockShape[M-1], batch / prod(blockShape), x.shape[1], ...,\n * x.shape[N-1]]`\n *\n * 2. Permute dimensions of `reshaped`to produce `permuted` of shape `[batch /\n * prod(blockShape),x.shape[1], blockShape[0], ..., x.shape[M],\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 3. Reshape `permuted` to produce `reshapedPermuted` of shape `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0], ..., x.shape[M] *\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 4. Crop the start and end of dimensions `[1, ..., M]` of `reshapedPermuted`\n * according to `crops` to produce the output of shape: `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0] - crops[0,0] - crops[0,1],\n * ..., x.shape[M] * blockShape[M-1] - crops[M-1,0] -\n * crops[M-1,1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction batchToSpaceND_(x, blockShape, crops) {\n const $x = convertToTensor(x, 'x', 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank is ${$x.rank} but should be > than blockShape.length ${blockShape.length}`);\n util.assert(crops.length === blockShape.length, () => `crops.length is ${crops.length} but should be equal to blockShape.length ${blockShape.length}`);\n util.assert($x.shape[0] % prod === 0, () => `input tensor batch is ${$x.shape[0]} but is not divisible by the product of ` +\n `the elements of blockShape ${blockShape.join(' * ')} === ${prod}`);\n const forward = backend => {\n return backend.batchToSpaceND($x, blockShape, crops);\n };\n const inputs = { x: $x };\n const attrs = { blockShape, crops };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, BatchToSpaceND, attrs);\n}\nexport const batchToSpaceND = op({ batchToSpaceND_ });\n//# sourceMappingURL=batch_to_space_nd.js.map", "import { reshape } from './reshape';\nexport function xAs4D(x) {\n let x4D;\n if (x.rank === 0 || x.rank === 1) {\n x4D = reshape(x, [1, 1, 1, x.size]);\n }\n else if (x.rank === 2) {\n x4D = reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n else {\n x4D = x;\n }\n return x4D;\n}\n//# sourceMappingURL=batchnorm_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FusedBatchNorm } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { xAs4D } from './batchnorm_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Batch normalization.\n *\n * As described in\n * [http://arxiv.org/abs/1502.03167](http://arxiv.org/abs/1502.03167).\n *\n * Mean, variance, scale, and offset can be of two shapes:\n * - The same shape as the input.\n * - In the common case, the depth dimension is the last dimension of x, so\n * the values would be an `tf.Tensor1D` of shape [depth].\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that parameters passed are of given rank\n * - `tf.batchNorm2d`\n * - `tf.batchNorm3d`\n * - `tf.batchNorm4d`\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction batchNorm_(x, mean, variance, offset, scale, varianceEpsilon) {\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($mean.rank === $variance.rank, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert($offset == null || $mean.rank === $offset.rank, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert($scale == null || $mean.rank === $scale.rank, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n const x4D = xAs4D($x);\n const forward = (backend, save) => {\n save([x4D, $mean, $variance, $scale]);\n return backend.batchNorm(x4D, as1DOr4D($mean), as1DOr4D($variance), as1DOr4D($offset), as1DOr4D($scale), varianceEpsilon);\n };\n const inputs = {\n x: x4D,\n scale: $scale,\n offset: $offset,\n mean: $mean,\n variance: $variance\n };\n const attrs = { varianceEpsilon };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FusedBatchNorm, attrs);\n return reshape(res, $x.shape);\n}\nfunction as1DOr4D(x) {\n if (x == null) {\n return null;\n }\n if (x.rank === 0) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [x.size]);\n }\n else if (x.rank === 1) {\n return x;\n }\n else if (x.rank === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n return x;\n}\nexport const batchNorm = op({ batchNorm_ });\n//# sourceMappingURL=batchnorm.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 2D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm2d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 2, () => `Error in batchNorm2D: x must be rank 2 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 2 || $mean.rank === 1, () => `Error in batchNorm2D: mean must be rank 2 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 2 || $variance.rank === 1, () => `Error in batchNorm2D: variance must be rank 2 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 2 || $scale.rank === 1, () => `Error in batchNorm2D: scale must be rank 2 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 2 || $offset.rank === 1, () => `Error in batchNorm2D: offset must be rank 2 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm2d = op({ batchNorm2d_ });\n//# sourceMappingURL=batchnorm2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 3D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm3d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 3, () => `Error in batchNorm3D: x must be rank 3 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 3 || $mean.rank === 1, () => `Error in batchNorm3D: mean must be rank 3 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 3 || $variance.rank === 1, () => `Error in batchNorm3D: variance must be rank 3 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 3 || $scale.rank === 1, () => `Error in batchNorm3D: scale must be rank 3 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 3 || $offset.rank === 1, () => `Error in batchNorm3D: offset must be rank 3 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm3d = op({ batchNorm3d_ });\n//# sourceMappingURL=batchnorm3d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 4D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm4d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 4, () => `Error in batchNorm4D: x must be rank 4 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 4 || $mean.rank === 1, () => `Error in batchNorm4D: mean must be rank 4 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 4 || $variance.rank === 1, () => `Error in batchNorm4D: variance must be rank 4 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 4 || $scale.rank === 1, () => `Error in batchNorm4D: scale must be rank 4 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 4 || $offset.rank === 1, () => `Error in batchNorm4D: offset must be rank 4 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm4d = op({ batchNorm4d_ });\n//# sourceMappingURL=batchnorm4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BroadcastTo } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Broadcast an array to a compatible shape NumPy-style.\n *\n * The tensor's shape is compared to the broadcast shape from end to beginning.\n * Ones are prepended to the tensor's shape until is has the same length as\n * the broadcast shape. If input.shape[i]==shape[i], the (i+1)-th axis is\n * already broadcast-compatible. If input.shape[i]==1 and shape[i]==N, then\n * the input tensor is tiled N times along that axis (using tf.tile).\n *\n * @param input The tensor that is to be broadcasted.\n * @param shape The input is to be broadcast to this shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction broadcastTo_(x, shape) {\n let input = convertToTensor(x, 'broadcastTo', 'x');\n const xShape = input.shape;\n if (shape.some(d => !(d > 0) || d % 1 !== 0)) {\n throw new Error(`broadcastTo(): Invalid broadcast shape [${shape}].`);\n }\n if (shape.length < input.rank) {\n throw new Error(`broadcastTo(): shape.length=${shape.length} < input.rank=${input.rank}.`);\n }\n if (shape.length > input.rank) {\n const newShape = input.shape.slice();\n while (newShape.length < shape.length) {\n newShape.unshift(1);\n }\n input = reshape(input, newShape);\n }\n const inputShape = input.shape;\n const reps = Array.from(shape);\n for (let i = shape.length - 1; i >= 0; i--) {\n if (inputShape[i] === shape[i]) {\n reps[i] = 1;\n }\n else if (input.shape[i] !== 1) {\n throw new Error(`broadcastTo(): [${xShape}] cannot be broadcast to [${shape}].`);\n }\n }\n const axes = reps.map((n, i) => n > 1 ? i : -1).filter(i => i >= 0);\n if (axes.length === 0) {\n return clone(input);\n }\n const forward = (backend) => backend.tile(input, reps);\n const inputs = { x: input };\n const attrs = { shape, inputShape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BroadcastTo, attrs);\n}\nexport const broadcastTo = op({ broadcastTo_ });\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Ceil } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes ceiling of input `tf.Tensor` element-wise: `ceil(x)`\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.ceil().print(); // or tf.ceil(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction ceil_(x) {\n const $x = convertToTensor(x, 'x', 'ceil');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.ceil($x), inputs, null /* grad */, Ceil);\n}\nexport const ceil = op({ ceil_ });\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ClipByValue } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Clips values element-wise. `max(min(x, clipValueMax), clipValueMin)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.clipByValue(-2, 3).print(); // or tf.clipByValue(x, -2, 3)\n * ```\n * @param x The input tensor.\n * @param clipValueMin Lower-bound of range to be clipped to.\n * @param clipValueMax Upper-bound of range to be clipped to.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction clipByValue_(x, clipValueMin, clipValueMax) {\n const $x = convertToTensor(x, 'x', 'clipByValue');\n util.assert((clipValueMin <= clipValueMax), () => `Error in clip: min (${clipValueMin}) must be ` +\n `less than or equal to max (${clipValueMax}).`);\n const inputs = { x: $x };\n const attrs = { clipValueMin, clipValueMax };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.clip($x, clipValueMin, clipValueMax);\n save([$x]);\n return res;\n }, inputs, null /* grad */, ClipByValue, attrs);\n}\nexport const clipByValue = op({ clipByValue_ });\n//# sourceMappingURL=clip_by_value.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor1D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(3) = |r1, g1, b1|\n * B: shape(2) = |r2, g2|\n * C = tf.concat1d([A, B]) == |r1, g1, b1, r2, g2|\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @return The concatenated array.\n */\nfunction concat1d_(tensors) {\n return concat(tensors, 0 /* axis */);\n}\nexport const concat1d = op({ concat1d_ });\n//# sourceMappingURL=concat_1d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor2D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat2d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C = shape(2, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concatenate along.\n * @return The concatenated array.\n */\nfunction concat2d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat2d = op({ concat2d_ });\n//# sourceMappingURL=concat_2d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor3D`s along an axis.\n * See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 1, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat3d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C: shape(2, 2, 3) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * if axis = 2:\n * C = shape(2, 1, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat3d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat3d = op({ concat3d_ });\n//# sourceMappingURL=concat_3d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor4D`s along an axis.\n * See `concat` for details.\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat4d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat4d = op({ concat4d_ });\n//# sourceMappingURL=concat_4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 2D convolution over the input x.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2d: input must be rank 4, but got rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n util.assert(inDepth === $filter.shape[2], () => `Error in conv2d: depth of input (${inDepth}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const forward = (backend, save) => {\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2d(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2D, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2d = op({ conv2d_ });\n//# sourceMappingURL=conv2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 1D convolution over the input x.\n *\n * @param x The input tensor, of rank 3 or rank 2, of shape\n * `[batch, width, inChannels]`. If rank 2, batch of 1 is assumed.\n * @param filter The filter, rank 3, of shape\n * `[filterWidth, inDepth, outDepth]`.\n * @param stride The number of entries by which the filter is moved right at\n * each step.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from \"NWC\", \"NCW\". Defaults to \"NWC\",\n * the data is stored in the order of [batch, in_width, in_channels]. Only\n * \"NWC\" is currently supported.\n * @param dilation The dilation rate in which we sample input values in\n * atrous convolution. Defaults to `1`. If it is greater than 1, then\n * stride must be `1`.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv1d_(x, filter, stride, pad, dataFormat = 'NWC', dilation = 1, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv1d');\n const $filter = convertToTensor(filter, 'filter', 'conv1d');\n let x3D = $x;\n let reshapedTo3D = false;\n if ($x.rank === 2) {\n reshapedTo3D = true;\n x3D = reshape($x, [1, $x.shape[0], $x.shape[1]]);\n }\n util.assert(x3D.rank === 3, () => `Error in conv1d: input must be rank 3, but got rank ${x3D.rank}.`);\n util.assert($filter.rank === 3, () => `Error in conv1d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv1d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x3D.shape[2] === $filter.shape[1], () => `Error in conv1d: depth of input (${x3D.shape[2]}) must match ` +\n `input depth for filter ${$filter.shape[1]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(stride, dilation), () => 'Error in conv1D: Either stride or dilation must be 1. ' +\n `Got stride ${stride} and dilation '${dilation}'`);\n util.assert(dataFormat === 'NWC', () => `Error in conv1d: got dataFormat of ${dataFormat} but only NWC is currently supported.`);\n const filter4D = reshape($filter, [1, $filter.shape[0], $filter.shape[1], $filter.shape[2]]);\n const input4D = reshape(x3D, [x3D.shape[0], 1, x3D.shape[1], x3D.shape[2]]);\n const strides = [1, stride];\n const dilations = [1, dilation];\n const conv2dDataFormat = 'NHWC';\n const res = conv2d(input4D, filter4D, strides, pad, conv2dDataFormat, dilations, dimRoundingMode);\n if (reshapedTo3D) {\n return reshape(res, [res.shape[2], res.shape[3]]);\n }\n return reshape(res, [res.shape[0], res.shape[2], res.shape[3]]);\n}\nexport const conv1d = op({ conv1d_ });\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 2D convolution.\n *\n * @param xShape The shape of the input: [batch, height, width, inDepth].\n * If length of 3, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 4 or rank 3 of shape\n * `[batch, outHeight, outWidth, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction conv2DBackpropInput_(xShape, dy, filter, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape4D = xShape;\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n xShape4D = [1, xShape[0], xShape[1], xShape[2]];\n }\n util.assert(xShape4D.length === 4, () => `Error in conv2dDerInput: inShape must be length 4, but got length ` +\n `${xShape4D.length}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerInput: dy must be rank 4, but got ` +\n `rank ${dy4D.rank}`);\n util.assert(filter.rank === 4, () => `Error in conv2dDerInput: filter must be rank 4, but got ` +\n `rank ${filter.rank}`);\n const inDepth = dataFormat === 'NHWC' ? xShape4D[3] : xShape4D[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filter.shape[2], () => `Error in conv2dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[2]}.`);\n util.assert(outDepth === filter.shape[3], () => `Error in conv2dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[3]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerInput: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(xShape4D, filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2dDerInput(dy4D, filter, convInfo);\n save([dy4D, filter]);\n return res;\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, inputShape: xShape4D };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2DBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2DBackpropInput = op({ conv2DBackpropInput_ });\n//# sourceMappingURL=conv2d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv2DBackpropInput } from './conv2d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 2D convolution of an image, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 4 or rank 3, of shape\n * `[batch, height, width, inDepth]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 4 or rank 3:\n * `[batch, height, width, outDepth]`. If rank 3, batch of 1 is assumed.\n * @param strides The strides of the original convolution:\n * `[strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2dTranspose_(x, filter, outputShape, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv2dTranspose');\n return conv2DBackpropInput(outputShape, $x, $filter, strides, pad, 'NHWC', dimRoundingMode);\n}\nexport const conv2dTranspose = op({ conv2dTranspose_ });\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { eitherStridesOrDilationsAreOne } from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 3D convolution over the input x.\n *\n * @param x The input tensor, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, channels]`. If rank 4,\n * batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inChannels, outChannels]`.\n * inChannels must match between input and filter.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationDepth, dilationHeight,\n * dilationWidth]` in which we sample input values across the height\n * and width dimensions in atrous convolution. Defaults to `[1, 1, 1]`.\n * If `dilations` is a single number, then\n * `dilationDepth == dilationHeight == dilationWidth`. If it is greater\n * than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3d_(x, filter, strides, pad, dataFormat = 'NDHWC', dilations = [1, 1, 1]) {\n const $x = convertToTensor(x, 'x', 'conv3d');\n const $filter = convertToTensor(filter, 'filter', 'conv3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3d: input must be rank 5, but got rank ${x5D.rank}.`);\n util.assert($filter.rank === 5, () => `Error in conv3d: filter must be rank 5, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x5D.shape[4] === $filter.shape[3], () => `Error in conv3d: depth of input (${x5D.shape[4]}) must match ` +\n `input depth for filter ${$filter.shape[3]}.`);\n util.assert(eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv3D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NDHWC', () => `Error in conv3d: got dataFormat of ${dataFormat} but only NDHWC is currently supported.`);\n const forward = (backend, save) => {\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, $filter.shape, strides, dilations, pad);\n const res = backend.conv3d(x5D, $filter, convInfo);\n save([x5D, $filter]);\n return res;\n };\n const inputs = { x: x5D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3d = op({ conv3d_ });\n//# sourceMappingURL=conv3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropInputV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 3D convolution.\n *\n * @param xShape The shape of the input: [batch, depth, height, width,\n * in_channels]. If length of 4, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 5 or rank 4 of shape\n * `[batch, outDepth, outHeight, outWidth, in_channels]`.\n * If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n */\nfunction conv3DBackpropInput_(xShape, dy, filter, strides, pad) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape5D = xShape;\n let dy5D = dy;\n let reshapedTo5D = false;\n if (dy.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n xShape5D = [1, xShape[0], xShape[1], xShape[2], xShape[3]];\n }\n const inDepth = xShape5D[4];\n const outDepth = dy5D.shape[4];\n util.assert(xShape5D.length === 5, () => `Error in conv3dDerInput: inShape must be length 5, but got length ` +\n `${xShape5D.length}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerInput: dy must be rank 5, but got ` +\n `rank ${dy5D.rank}`);\n util.assert(filter.rank === 5, () => `Error in conv3dDerInput: filter must be rank 5, but got ` +\n `rank ${filter.rank}`);\n util.assert(inDepth === filter.shape[3], () => `Error in conv3dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[3]}.`);\n util.assert(outDepth === filter.shape[4], () => `Error in conv3dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[4]}.`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(xShape5D, filter.shape, strides, dilations, pad);\n return backend.conv3dDerInput(dy5D, filter, convInfo);\n };\n const inputs = { dy: dy5D, filter };\n const attrs = { pad, strides, inputShape: xShape5D };\n const res = ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropInputV2, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3DBackpropInput = op({ conv3DBackpropInput_ });\n//# sourceMappingURL=conv3d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv3DBackpropInput } from './conv3d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 3D convolution of a volume, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, inDepth]`. If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[depth, filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 5 or rank 4:\n * `[batch, depth, height, width, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param strides The strides of the original convolution:\n * `[strideDepth, strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3dTranspose_(x, filter, outputShape, strides, pad) {\n const $x = convertToTensor(x, 'x', 'conv3dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv3dTranspose');\n return conv3DBackpropInput(outputShape, $x, $filter, strides, pad);\n}\nexport const conv3dTranspose = op({ conv3dTranspose_ });\n//# sourceMappingURL=conv3d_transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes cos of the input `tf.Tensor` element-wise: `cos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.cos().print(); // or tf.cos(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cos_(x) {\n const $x = convertToTensor(x, 'x', 'cos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cos);\n}\nexport const cos = op({ cos_ });\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic cos of the input `tf.Tensor` element-wise: `cosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.cosh().print(); // or tf.cosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cosh_(x) {\n const $x = convertToTensor(x, 'x', 'cosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cosh);\n}\nexport const cosh = op({ cosh_ });\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cumsum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { getAxesPermutation, getInnerMostAxes, getUndoAxesPermutation } from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the cumulative sum of a `tf.Tensor` along `axis`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4]);\n * x.cumsum().print();\n * ```\n * ```js\n * const x = tf.tensor([[1, 2], [3, 4]]);\n * x.cumsum().print();\n * ```\n *\n * @param x The input tensor to be summed.\n * @param axis The axis along which to sum. Optional. Defaults to 0.\n * @param exclusive Whether to perform exclusive cumulative sum. Optional.\n * Defaults to false. If set to true then the sum of each tensor entry\n * does not include its own value, but only the values previous to it\n * along the specified axis.\n * @param reverse Whether to sum in the opposite direction. Optional.\n * Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Scan'}\n */\nfunction cumsum_(x, axis = 0, exclusive = false, reverse = false) {\n const $x = convertToTensor(x, 'x', 'cumsum');\n const forward = (backend, save) => {\n const permutation = getAxesPermutation([axis], $x.rank);\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n }\n const permutedAxis = getInnerMostAxes(1, $x.rank)[0];\n let value = backend.cumsum(permutedX, permutedAxis, exclusive, reverse);\n save([$x]);\n if (permutation != null) {\n const reversePermutation = getUndoAxesPermutation(permutation);\n value = transpose(value, reversePermutation);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, exclusive, reverse };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Cumsum, attrs);\n}\nexport const cumsum = op({ cumsum_ });\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthToSpace } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Rearranges data from depth into blocks of spatial data. More specifically,\n * this op outputs a copy of the input tensor where values from the `depth`\n * dimension are moved in spatial blocks to the `height` and `width` dimensions.\n * The attr `blockSize` indicates the input block size and how the data is\n * moved.\n *\n * - Chunks of data of size `blockSize * blockSize` from depth are rearranged\n * into non-overlapping blocks of size `blockSize x blockSize`\n *\n * - The width the output tensor is `inputWidth * blockSize`, whereas the\n * height is `inputHeight * blockSize`\n *\n * - The Y, X coordinates within each block of the output image are determined\n * by the high order component of the input channel index\n *\n * - The depth of the input tensor must be divisible by `blockSize *\n * blockSize`\n *\n * The `dataFormat` attr specifies the layout of the input and output tensors\n * with the following options: \"NHWC\": [ `batch, height, width, channels` ]\n * \"NCHW\": [ `batch, channels, height, width` ]\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 1, 1, 4]);\n * const blockSize = 2;\n * const dataFormat = \"NHWC\";\n *\n * tf.depthToSpace(x, blockSize, dataFormat).print();\n * ```\n *\n * @param x The input tensor of rank 4\n * @param blockSIze An `int` that is `>= 2`. The size of the spatial block\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to \"NHWC\"\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction depthToSpace_(x, blockSize, dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'depthToSpace');\n const inputHeight = (dataFormat === 'NHWC') ? $x.shape[1] : $x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? $x.shape[2] : $x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? $x.shape[3] : $x.shape[1];\n util.assert(inputHeight * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputHeight} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert(inputWidth * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputWidth} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert((inputDepth % (blockSize * blockSize) === 0), () => `Dimension size must be evenly divisible by ${blockSize * blockSize} but is ${inputDepth} for depthToSpace with input shape ${$x.shape}`);\n const forward = backend => backend.depthToSpace($x, blockSize, dataFormat);\n const inputs = { x: $x };\n const attrs = { blockSize, dataFormat };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, DepthToSpace, attrs);\n}\nexport const depthToSpace = op({ depthToSpace_ });\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Depthwise 2D convolution.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction depthwiseConv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in depthwiseConv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const res = backend.depthwiseConv2D(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, DepthwiseConv2dNative, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2d = op({ depthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Diag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a diagonal tensor with a given diagonal values.\n *\n * Given a diagonal, this operation returns a tensor with the diagonal and\n * everything else padded with zeros.\n *\n * Assume the input has dimensions `[D1,..., Dk]`, then the output is a tensor\n * of rank 2k with dimensions `[D1,..., Dk, D1,..., Dk]`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * tf.diag(x).print()\n * ```\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4, 5, 6, 6, 8], [4, 2])\n *\n * tf.diag(x).print()\n * ```\n * @param x The input tensor.\n */\nfunction diag_(x) {\n const $x = convertToTensor(x, 'x', 'diag');\n const forward = backend => {\n const flat = reshape($x, [$x.size]);\n const result = backend.diag(flat);\n const outShape = [...x.shape, ...x.shape];\n return reshape(result, outShape);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Diag);\n}\nexport const diag = op({ diag_ });\n//# sourceMappingURL=diag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the grayscale dilation over the input `x`.\n *\n * @param x The input tensor, rank 3 or rank 4 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter tensor, rank 3, of shape\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat Specify the data format of the input and output data.\n * Defaults to 'NHWC'. Only 'NHWC' is currently supported. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * for atrous morphological dilation. Defaults to `[1, 1]`. If `dilations`\n * is a single number, then `dilationHeight == dilationWidth`. If it is\n * greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction dilation2d_(x, filter, strides, pad, dilations = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'dilation2d');\n const $filter = convertToTensor(filter, 'filter', 'dilation2d');\n util.assert($x.rank === 3 || $x.rank === 4, () => `Error in dilation2d: input must be rank 3 or 4, but got rank ` +\n `${$x.rank}.`);\n util.assert($filter.rank === 3, () => `Error in dilation2d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n util.assert(dataFormat === 'NHWC', () => `Error in dilation2d: Only NHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n reshapedTo4D = true;\n }\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dilations };\n const res = ENGINE.runKernel(Dilation2D, inputs, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const dilation2d = op({ dilation2d_ });\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport function getBroadcastDims(inShape, outShape) {\n const inRank = inShape.length;\n const dims = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n}\n/**\n * Returns the axes in the output space that should be reduced to produce\n * the input space.\n */\nexport function getReductionAxes(inShape, outShape) {\n const result = [];\n for (let i = 0; i < outShape.length; i++) {\n const inDim = inShape[inShape.length - i - 1];\n const outAxis = outShape.length - i - 1;\n const outDim = outShape[outAxis];\n if (inDim == null || (inDim === 1 && outDim > 1)) {\n result.unshift(outAxis);\n }\n }\n return result;\n}\nexport function assertAndGetBroadcastShape(shapeA, shapeB) {\n const result = [];\n const l = Math.max(shapeA.length, shapeB.length);\n for (let i = 0; i < l; i++) {\n let a = shapeA[shapeA.length - i - 1];\n if (a == null) {\n a = 1;\n }\n let b = shapeB[shapeB.length - i - 1];\n if (b == null) {\n b = 1;\n }\n if (a === 1) {\n result.unshift(b);\n }\n else if (b === 1) {\n result.unshift(a);\n }\n else if (a !== b) {\n const errMsg = `Operands could not be broadcast together with shapes ` +\n `${shapeA} and ${shapeB}.`;\n throw Error(errMsg);\n }\n else {\n result.unshift(a);\n }\n }\n return result;\n}\n//# sourceMappingURL=broadcast_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Equal } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a == b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.equal(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction equal_(a, b) {\n let $a = convertToTensor(a, 'a', 'equal');\n let $b = convertToTensor(b, 'b', 'equal');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.equal($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null, Equal);\n}\nexport const equal = op({ equal_ });\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SelectV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch } from '../util';\nimport { broadcastTo } from './broadcast_to';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the elements, either `a` or `b` depending on the `condition`.\n *\n * If the condition is true, select from `a`, otherwise select from `b`.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const a = tf.tensor1d([1 , 2, 3]);\n * const b = tf.tensor1d([-1, -2, -3]);\n *\n * a.where(cond, b).print();\n * ```\n *\n * @param condition The input condition. Must be of dtype bool.\n * @param a If `condition` is rank 1, `a` may have a higher rank but\n * its first dimension must match the size of `condition`.\n * @param b A tensor with the same dtype as `a` and with shape that is\n * compatible with `a`.\n * @return A tensor with same dtype as `a` and `b`, and shape that is\n * broadcastable from `a` and `b`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction where_(condition, a, b) {\n const $a = convertToTensor(a, 'a', 'where');\n const $b = convertToTensor(b, 'b', 'where');\n const $condition = convertToTensor(condition, 'condition', 'where', 'bool');\n // TODO: move this logic to forward function when the broadcastTo op is\n // implemented in WASM.\n // Find the broadcastable shape for $a and $b.\n const broadcastShape = assertAndGetBroadcastShape($a.shape, $b.shape);\n const $broadcastedA = broadcastTo($a, broadcastShape);\n const $broadcastedB = broadcastTo($b, broadcastShape);\n if ($condition.rank === 1) {\n // If condition rank is 1, then the first dimension must match the size of\n // condition.\n assert($condition.shape[0] === $a.shape[0], () => 'The first dimension of `a` must match the size of `condition`.');\n }\n if ($condition.rank !== 1) {\n // A must have the same shape as condition.\n assertShapesMatch($condition.shape, $broadcastedB.shape, 'Error in where: ');\n }\n const forward = (backend, save) => {\n const res = backend.select($condition, $broadcastedA, $broadcastedB);\n save([$condition]);\n return res;\n };\n const inputs = {\n condition: $condition,\n t: $broadcastedA,\n e: $broadcastedB\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SelectV2);\n}\nexport const where = op({ where_ });\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ZerosLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with all elements set to 0 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.zerosLike(x).print();\n * ```\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction zerosLike_(x) {\n const $x = convertToTensor(x, 'x', 'zerosLike');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.zerosLike($x), inputs, null /* grad */, ZerosLike);\n}\nexport const zerosLike = op({ zerosLike_ });\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { div } from './div';\nimport { equal } from './equal';\nimport { op } from './operation';\nimport { where } from './where';\nimport { zerosLike } from './zeros_like';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. Return 0\n * if denominator is 0.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n * const c = tf.tensor1d([0, 0, 0, 0]);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n * const c = tf.scalar(0);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction divNoNan_(a, b) {\n // TODO: Make this into its own kernel.\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n const divResult = div($a, $b);\n const zeros = zerosLike(divResult);\n const bEqualsZero = equal($b, zeros);\n return where(bEqualsZero, zeros, divResult);\n}\nexport const divNoNan = op({ divNoNan_ });\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices and/or vectors, `t1` and `t2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor2d([[1, 2], [3, 4]]);\n * const c = tf.tensor2d([[1, 2, 3], [4, 5, 6]]);\n *\n * a.dot(b).print(); // or tf.dot(a, b)\n * b.dot(a).print();\n * b.dot(c).print();\n * ```\n * @param t1 The first tensor in the dot operation.\n * @param t2 The second tensor in the dot operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction dot_(t1, t2) {\n const $t1 = convertToTensor(t1, 't1', 'dot');\n const $t2 = convertToTensor(t2, 't2', 'dot');\n util.assert(($t1.rank === 1 || $t1.rank === 2) && ($t2.rank === 1 || $t2.rank === 2), () => `Error in dot: inputs must all be rank 1 or 2, but got ranks ` +\n `${$t1.rank} and ${$t2.rank}.`);\n const t1Inner = ($t1.rank === 1 ? $t1.size : $t1.shape[1]);\n const t2Inner = ($t2.rank === 1 ? $t2.size : $t2.shape[0]);\n util.assert(t1Inner === t2Inner, () => `Error in dot: inner dimensions of inputs must match, but got ` +\n `${t1Inner} and ${t2Inner}.`);\n if ($t1.rank === 1 && $t2.rank === 1) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, []);\n }\n else if ($t1.rank === 1 && $t2.rank === 2) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else if ($t1.rank === 2 && $t2.rank === 1) {\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul($t1, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else {\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul($t1, t22D);\n return t1t2;\n }\n}\nexport const dot = op({ dot_ });\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential linear element-wise: `x > 0 ? e ^ x - 1 : 0`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 1, -3, 2]);\n *\n * x.elu().print(); // or tf.elu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction elu_(x) {\n const $x = convertToTensor(x, 'x', 'elu');\n const forward = (backend, save) => {\n const y = backend.elu($x);\n save([y]);\n return y;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Elu);\n}\nexport const elu = op({ elu_ });\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Erf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes gause error function of the input `tf.Tensor` element-wise:\n * `erf(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.erf().print(); // or tf.erf(x);\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction erf_(x) {\n let $x = convertToTensor(x, 'x', 'erf');\n util.assert($x.dtype === 'int32' || $x.dtype === 'float32', () => 'Input dtype must be `int32` or `float32`.');\n if ($x.dtype === 'int32') {\n $x = cast($x, 'float32');\n }\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.erf($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Erf);\n}\nexport const erf = op({ erf_ });\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Exp } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` element-wise. `e ^ x`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.exp().print(); // or tf.exp(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction exp_(x) {\n const $x = convertToTensor(x, 'x', 'exp');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.exp($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Exp);\n}\nexport const exp = op({ exp_ });\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension\n * into the tensor's shape.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const axis = 1;\n * x.expandDims(axis).print();\n * ```\n *\n * @param x The input tensor whose dimensions to be expanded.\n * @param axis The dimension index at which to insert shape of `1`. Defaults\n * to 0 (the first dimension).\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction expandDims_(x, axis = 0) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'expandDims', parseAs);\n util.assert(axis <= $x.rank, () => 'Axis must be <= rank of the tensor');\n const newShape = $x.shape.slice();\n if (axis < 0) {\n // Negative value is counted from the tail of rank.\n util.assert(-($x.rank + 1) <= axis, () => `Axis must be in the interval [${-($x.rank + 1)}, ${$x.rank}]`);\n axis = $x.rank + axis + 1;\n }\n newShape.splice(axis, 0, 1);\n return reshape($x, newShape);\n}\nexport const expandDims = op({ expandDims_ });\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Expm1 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` minus one element-wise.\n * `e ^ x - 1`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.expm1().print(); // or tf.expm1(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction expm1_(x) {\n const $x = convertToTensor(x, 'x', 'expm1');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.expm1($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Expm1);\n}\nexport const expm1 = op({ expm1_ });\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tile } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Construct a tensor by repeating it the number of times given by reps.\n *\n * This operation creates a new tensor by replicating `input` `reps`\n * times. The output tensor's i'th dimension has `input.shape[i] *\n * reps[i]` elements, and the values of `input` are replicated\n * `reps[i]` times along the i'th dimension. For example, tiling\n * `[a, b, c, d]` by `[2]` produces `[a, b, c, d, a, b, c, d]`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n *\n * a.tile([2]).print(); // or a.tile([2])\n * ```\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.tile([1, 2]).print(); // or a.tile([1, 2])\n * ```\n * @param x The tensor to tile.\n * @param reps Determines the number of replications per dimension.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction tile_(x, reps) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'tile', parseAs);\n util.assert($x.rank === reps.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of reps ${reps}.`);\n const forward = (backend, save) => {\n const res = backend.tile($x, reps);\n save([$x]);\n return res;\n };\n const inputsToSave = [$x];\n const inputs = { x: $x };\n const attrs = { reps };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Tile, attrs, inputsToSave);\n}\nexport const tile = op({ tile_ });\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { tile } from './tile';\n/**\n * Create an identity matrix.\n *\n * @param numRows Number of rows.\n * @param numColumns Number of columns. Defaults to `numRows`.\n * @param batchShape If provided, will add the batch shape to the beginning\n * of the shape of the returned `tf.Tensor` by repeating the identity\n * matrix.\n * @param dtype Data type.\n * @returns Identity matrix of the specified size and data type, possibly\n * with batch repetition if `batchShape` is specified.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction eye_(numRows, numColumns, batchShape, dtype = 'float32') {\n if (numColumns == null) {\n numColumns = numRows;\n }\n const buff = buffer([numRows, numColumns], dtype);\n const n = numRows <= numColumns ? numRows : numColumns;\n for (let i = 0; i < n; ++i) {\n buff.set(1, i, i);\n }\n const out = reshape(buff.toTensor(), [numRows, numColumns]);\n if (batchShape == null) {\n return out;\n }\n else {\n if (batchShape.length === 1) {\n return tile(expandDims(out, 0), [batchShape[0], 1, 1]);\n }\n else if (batchShape.length === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(out, 0), 0), [batchShape[0], batchShape[1], 1, 1]);\n }\n else if (batchShape.length === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(expandDims(out, 0), 0), 0), [\n batchShape[0], batchShape[1], batchShape[2], 1, 1\n ]);\n }\n else {\n throw new Error(`eye() currently supports only 1D and 2D ` +\n // tslint:disable-next-line:no-any\n `batchShapes, but received ${batchShape.length}D.`);\n }\n }\n}\nexport const eye = op({ eye_ });\n//# sourceMappingURL=eye.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Fill } from '../kernel_names';\n/**\n * Creates a `tf.Tensor` filled with a scalar value.\n *\n * ```js\n * tf.fill([2, 2], 4).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param value The scalar value to fill the tensor with.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction fill(shape, value, dtype) {\n const attrs = { shape, value, dtype };\n return ENGINE.runKernelFunc(backend => backend.fill(shape, value, dtype), {}, null, Fill, attrs);\n}\nexport { fill };\n//# sourceMappingURL=fill.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Floor } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes floor of input `tf.Tensor` element-wise: `floor(x)`.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.floor().print(); // or tf.floor(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction floor_(x) {\n const $x = convertToTensor(x, 'x', 'floor');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.floor($x), inputs, null /* grad */, Floor);\n}\nexport const floor = op({ floor_ });\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inputs of size above this threshold will be parallelized by calling multiple\n * shader programs.\n */\nimport { nearestDivisor } from '../util';\nexport const PARALLELIZE_THRESHOLD = 30;\nexport function computeOptimalWindowSize(inSize) {\n if (inSize <= PARALLELIZE_THRESHOLD) {\n return inSize;\n }\n return nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n}\n//# sourceMappingURL=reduce_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nearestDivisor } from '../util';\nimport { PARALLELIZE_THRESHOLD } from './reduce_util';\nexport function segOpComputeOptimalWindowSize(inSize, numSegments) {\n let done = false;\n let res;\n if (inSize <= PARALLELIZE_THRESHOLD) {\n res = inSize;\n done = true;\n }\n else {\n res = nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n }\n while (!done) {\n if (res > numSegments || res === inSize) {\n done = true;\n }\n else {\n res = nearestDivisor(inSize, res + 1);\n }\n }\n return res;\n}\nexport function computeOutShape(aShape, axis, numSegments) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (dim !== axis) {\n outShape.push(aShape[dim]);\n }\n else {\n outShape.push(numSegments);\n }\n }\n return outShape;\n}\nexport function collectGatherOpShapeInfo(x, indices, axis) {\n const dimSize = x.shape[axis];\n const outputShape = [];\n let batchSize = 1;\n let sliceSize = 1;\n for (let i = 0; i < axis; i++) {\n outputShape.push(x.shape[i]);\n batchSize *= x.shape[i];\n }\n for (let i = 0; i < indices.rank; i++) {\n outputShape.push(indices.shape[i]);\n }\n for (let i = axis + 1; i < x.rank; i++) {\n outputShape.push(x.shape[i]);\n sliceSize *= x.shape[i];\n }\n return { batchSize, sliceSize, dimSize, outputShape };\n}\n//# sourceMappingURL=segment_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { collectGatherOpShapeInfo } from './segment_util';\n/**\n * Gather slices from tensor `x`'s axis `axis` according to `indices`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const indices = tf.tensor1d([1, 3, 3], 'int32');\n *\n * x.gather(indices).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const indices = tf.tensor1d([1, 1, 0], 'int32');\n *\n * x.gather(indices).print();\n * ```\n * @param x The input tensor whose slices to be gathered.\n * @param indices The indices of the values to extract.\n * @param axis The axis over which to select values. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction gather_(x, indices, axis = 0) {\n const $x = convertToTensor(x, 'x', 'gather');\n const $indices = convertToTensor(indices, 'indices', 'gather', 'int32');\n const inputs = { x: $x, indices: $indices };\n const attrs = { axis };\n const forward = (backend, save) => {\n const parsedAxis = parseAxisParam(axis, $x.shape)[0];\n const shapeInfo = collectGatherOpShapeInfo($x, $indices, parsedAxis);\n const res = backend.gather($x, reshape($indices, [$indices.size]), parsedAxis);\n save([$x, $indices]);\n return reshape(res, shapeInfo.outputShape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GatherV2, attrs);\n}\nexport const gather = op({ gather_ });\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Greater } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a > b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greater(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greater_(a, b) {\n let $a = convertToTensor(a, 'a', 'greater');\n let $b = convertToTensor(b, 'b', 'greater');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.greater($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Greater);\n}\nexport const greater = op({ greater_ });\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GreaterEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a >= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greaterEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greaterEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'greaterEqual');\n let $b = convertToTensor(b, 'b', 'greaterEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.greaterEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GreaterEqual);\n}\nexport const greaterEqual = op({ greaterEqual_ });\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Imag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the imaginary part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the imaginary part of each element in input considered as a complex number.\n * If input is real, a tensor of all zeros is returned.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.imag(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction imag_(input) {\n const $input = convertToTensor(input, 'input', 'imag');\n const forward = (backend) => {\n return backend.imag($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Imag);\n}\nexport const imag = op({ imag_ });\n//# sourceMappingURL=imag.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsFinite } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are finite.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isFinite().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isFinite_(x) {\n const $x = convertToTensor(x, 'x', 'isFinite');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isFinite($x), inputs, null /* grad */, IsFinite);\n}\nexport const isFinite = op({ isFinite_ });\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsInf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are Infinity or -Infinity.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isInf().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isInf_(x) {\n const $x = convertToTensor(x, 'x', 'isInf');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isInf($x), inputs, null /* grad */, IsInf);\n}\nexport const isInf = op({ isInf_ });\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsNan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * RReturns which elements of x are NaN.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isNaN().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isNaN_(x) {\n const $x = convertToTensor(x, 'x', 'isNaN');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.isNaN($x), inputs, null /* grad */, IsNan);\n}\nexport const isNaN = op({ isNaN_ });\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Maximum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the max of a and b (`a > b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `tf.maximumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * ```js\n * // Broadcast maximum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction maximum_(a, b) {\n let $a = convertToTensor(a, 'a', 'maximum');\n let $b = convertToTensor(b, 'b', 'maximum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.maximum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Maximum);\n}\nexport const maximum = op({ maximum_ });\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isTypedArray } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-0 `tf.Tensor` (scalar) with the provided value and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.scalar` as it makes the code more readable.\n *\n * ```js\n * tf.scalar(3.14).print();\n * ```\n *\n * @param value The value of the scalar.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function scalar(value, dtype) {\n if (((isTypedArray(value) && dtype !== 'string') || Array.isArray(value)) &&\n dtype !== 'complex64') {\n throw new Error('Error creating a new Scalar: value must be a primitive ' +\n '(number|boolean|string)');\n }\n if (dtype === 'string' && isTypedArray(value) &&\n !(value instanceof Uint8Array)) {\n throw new Error('When making a scalar from encoded string, ' +\n 'the value must be `Uint8Array`.');\n }\n const shape = [];\n const inferredShape = [];\n return makeTensor(value, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { maximum } from './maximum';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { scalar } from './scalar';\n/**\n * Computes leaky rectified linear element-wise.\n *\n * See\n * [http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf](\n * http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf)\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.leakyRelu(0.1).print(); // or tf.leakyRelu(x, 0.1)\n * ```\n * @param x The input tensor.\n * @param alpha The scaling factor for negative values, defaults to 0.2.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction leakyRelu_(x, alpha = 0.2) {\n const $x = convertToTensor(x, 'x', 'leakyRelu');\n return maximum(mul(scalar(alpha), $x), $x);\n}\nexport const leakyRelu = op({ leakyRelu_ });\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Less } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a < b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.less(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction less_(a, b) {\n let $a = convertToTensor(a, 'a', 'less');\n let $b = convertToTensor(b, 'b', 'less');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.less($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Less);\n}\nexport const less = op({ less_ });\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LessEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a <= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.lessEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction lessEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'lessEqual');\n let $b = convertToTensor(b, 'b', 'lessEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.lessEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LessEqual);\n}\nexport const lessEqual = op({ lessEqual_ });\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LinSpace } from '../kernel_names';\n/**\n * Return an evenly spaced sequence of numbers over the given interval.\n *\n * ```js\n * tf.linspace(0, 9, 10).print();\n * ```\n * @param start The start value of the sequence.\n * @param stop The end value of the sequence.\n * @param num The number of values to generate.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function linspace(start, stop, num) {\n if (num <= 0) {\n throw new Error('The number of values should be positive.');\n }\n const attrs = { start, stop, num };\n return ENGINE.runKernelFunc(backend => backend.linspace(start, stop, num), {} /* inputs */, null /* grad */, LinSpace, attrs);\n}\n//# sourceMappingURL=linspace.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Normalizes the activation of a local neighborhood across or within\n * channels.\n *\n * @param x The input tensor. The 4-D input tensor is treated as a 3-D array\n * of 1D vectors (along the last dimension), and each vector is\n * normalized independently.\n * @param depthRadius The number of adjacent channels in the 1D normalization\n * window.\n * @param bias A constant bias term for the basis.\n * @param alpha A scale factor, usually positive.\n * @param beta An exponent.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction localResponseNormalization_(x, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const $x = convertToTensor(x, 'x', 'localResponseNormalization');\n util.assert($x.rank === 4 || $x.rank === 3, () => `Error in localResponseNormalization: x must be rank 3 or 4 but got\n rank ${$x.rank}.`);\n util.assert(util.isInt(depthRadius), () => `Error in localResponseNormalization: depthRadius must be an ` +\n `integer but got depthRadius ${depthRadius}.`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n const forward = (backend, save) => {\n const y = backend.localResponseNormalization4D(x4D, depthRadius, bias, alpha, beta);\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { depthRadius, bias, alpha, beta };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRN, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n else {\n return res;\n }\n}\nexport const localResponseNormalization = op({ localResponseNormalization_ });\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` element-wise: `ln(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E]);\n *\n * x.log().print(); // or tf.log(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log_(x) {\n const $x = convertToTensor(x, 'x', 'log');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log);\n}\nexport const log = op({ log_ });\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log1p } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` plus one\n * element-wise: `ln(1 + x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E - 1]);\n *\n * x.log1p().print(); // or tf.log1p(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log1p_(x) {\n const $x = convertToTensor(x, 'x', 'log1p');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log1p($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log1p);\n}\nexport const log1p = op({ log1p_ });\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { Tensor, Variable } from './tensor';\nimport { convertToTensor, convertToTensorArray } from './tensor_util_env';\nimport * as util from './util';\n/**\n * Provided `f(x)`, returns another function `g(x, dy?)`, which gives the\n * gradient of `f(x)` with respect to `x`.\n *\n * If `dy` is provided, the gradient of `f(x).mul(dy).sum()` with respect to\n * `x` is computed instead. `f(x)` must take a single tensor `x` and return a\n * single tensor `y`. If `f()` takes multiple inputs, use `tf.grads` instead.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.grad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * g(x).print();\n * ```\n *\n * ```js\n * // f(x) = x ^ 3\n * const f = x => x.pow(tf.scalar(3, 'int32'));\n * // f'(x) = 3x ^ 2\n * const g = tf.grad(f);\n * // f''(x) = 6x\n * const gg = tf.grad(g);\n *\n * const x = tf.tensor1d([2, 3]);\n * gg(x).print();\n * ```\n *\n * @param f The function f(x), to compute gradient for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grad(f) must be a function');\n return (x, dy) => {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'tf.grad', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grad') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f($x), [$x], $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grad(f)(x, dy) must match the shape ' +\n 'returned by f(x)');\n }\n checkGrads(grads);\n return grads[0];\n });\n };\n}\n/**\n * Provided `f(x1, x2,...)`, returns another function `g([x1, x2,...], dy?)`,\n * which gives an array of gradients of `f()` with respect to each input\n * [`x1`,`x2`,...].\n *\n * If `dy` is passed when calling `g()`, the gradient of\n * `f(x1,...).mul(dy).sum()` with respect to each input is computed instead.\n * The provided `f` must take one or more tensors and return a single tensor\n * `y`. If `f()` takes a single input, we recommend using `tf.grad` instead.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df / da = b, df / db = a\n * const g = tf.grads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const [da, db] = g([a, b]);\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @param f The function `f(x1, x2,...)` to compute gradients for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args), () => 'The args passed in grads(f)(args) must be an array ' +\n 'of `Tensor`s or `TensorLike`s');\n // args can be of any dtype, thus null as the last argument.\n const $args = convertToTensorArray(args, 'args', 'tf.grads', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grads') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f(...$args), $args, $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(grads);\n return grads;\n });\n };\n}\n/**\n * Like `tf.grad`, but also returns the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grad: The gradient of `f(x)` w.r.t `x` (result of `tf.grad`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.valueAndGrad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * const {value, grad} = g(x);\n *\n * console.log('value');\n * value.print();\n * console.log('grad');\n * grad.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrad(f) must be a function');\n return (x, dy) => {\n util.assert(x instanceof Tensor, () => 'The x passed in valueAndGrad(f)(x) must be a tensor');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrad(f)(x, dy) must be a tensor');\n const { grads, value } = ENGINE.gradients(() => f(x), [x], dy);\n checkGrads(grads);\n return { grad: grads[0], value };\n };\n}\n/**\n * Like `tf.grads`, but returns also the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grads: The gradients of `f()` w.r.t each input (result of `tf.grads`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df/da = b, df/db = a\n * const g = tf.valueAndGrads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const {value, grads} = g([a, b]);\n *\n * const [da, db] = grads;\n *\n * console.log('value');\n * value.print();\n *\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args) && args.every(arg => arg instanceof Tensor), () => 'The args passed in valueAndGrads(f)(args) must be array of ' +\n 'tensors');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrads(f)(args, dy) must be a tensor');\n const res = ENGINE.gradients(() => f(...args), args, dy);\n if (dy != null) {\n util.assertShapesMatch(res.value.shape, dy.shape, 'The shape of dy passed in valueAndGrads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(res.grads);\n return res;\n };\n}\n/**\n * Computes and returns the gradient of f(x) with respect to the list of\n * trainable variables provided by `varList`. If no list is provided, it\n * defaults to all trainable variables.\n *\n * ```js\n * const a = tf.variable(tf.tensor1d([3, 4]));\n * const b = tf.variable(tf.tensor1d([5, 6]));\n * const x = tf.tensor1d([1, 2]);\n *\n * // f(a, b) = a * x ^ 2 + b * x\n * const f = () => a.mul(x.square()).add(b.mul(x)).sum();\n * // df/da = x ^ 2, df/db = x\n * const {value, grads} = tf.variableGrads(f);\n *\n * Object.keys(grads).forEach(varName => grads[varName].print());\n * ```\n *\n * @param f The function to execute. f() should return a scalar.\n * @param varList The list of variables to compute the gradients with respect\n * to. Defaults to all trainable variables.\n * @returns An object with the following keys and values:\n * - `value`: The value of the function `f`.\n * - `grads`: A map from the names of the variables to the gradients.\n * If the `varList` argument is provided explicitly and contains a subset of\n * non-trainable variables, this map in the return value will contain keys\n * that map the names of the non-trainable variables to `null`.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction variableGrads(f, varList) {\n util.assert(util.isFunction(f), () => 'The f passed in variableGrads(f) must be a function');\n util.assert(varList == null ||\n Array.isArray(varList) && varList.every(v => v instanceof Variable), () => 'The varList passed in variableGrads(f, varList) must be an array ' +\n 'of variables');\n const specifiedVarList = varList != null;\n if (!specifiedVarList) {\n // Get all of the trainable variables.\n varList = [];\n for (const varName in ENGINE.registeredVariables) {\n varList.push(ENGINE.registeredVariables[varName]);\n }\n }\n const specifiedNonTrainable = specifiedVarList ? varList.filter(variable => !variable.trainable) : null;\n // Prune non-trainable variables.\n const originalVarCount = varList.length;\n varList = varList.filter(variable => variable.trainable);\n util.assert(varList.length > 0, () => `variableGrads() expects at least one of the input variables to ` +\n `be trainable, but none of the ${originalVarCount} variables is ` +\n `trainable.`);\n const allowNoGradients = true;\n const { value, grads } = ENGINE.gradients(f, varList, null, allowNoGradients);\n util.assert(grads.some(g => g != null), () => 'Cannot find a connection between any variable and the result of ' +\n 'the loss function y=f(x). Please make sure the operations that ' +\n 'use variables are inside the function f passed to minimize().');\n util.assert(value.rank === 0, () => `The f passed in variableGrads(f) must return a scalar, but it ` +\n `returned a rank-${value.rank} tensor`);\n const namedGrads = {};\n varList.forEach((v, i) => {\n if (grads[i] != null) {\n namedGrads[v.name] = grads[i];\n }\n });\n if (specifiedNonTrainable != null) {\n // If varList is explicitly provided and contains non-trainable values,\n // add them to the returned gradients with `null` values.\n specifiedNonTrainable.forEach(v => namedGrads[v.name] = null);\n }\n return { value, grads: namedGrads };\n}\n/**\n * Overrides the gradient computation of a function `f`.\n *\n * Takes a function\n * `f(...inputs, save) => {value: Tensor, gradFunc: (dy, saved) => Tensor[]}`\n * and returns another function `g(...inputs)` which takes the same inputs as\n * `f`. When called, `g` returns `f().value`. In backward mode, custom gradients\n * with respect to each input of `f` are computed using `f().gradFunc`.\n *\n * The `save` function passsed to `f` should be used for saving tensors needed\n * in the gradient. And the `saved` passed to the `gradFunc` is a\n * `NamedTensorMap`, which contains those saved tensor.\n *\n * ```js\n * const customOp = tf.customGrad((x, save) => {\n * // Save x to make sure it's available later for the gradient.\n * save([x]);\n * // Override gradient of our custom x ^ 2 op to be dy * abs(x);\n * return {\n * value: x.square(),\n * // Note `saved.x` which points to the `x` we saved earlier.\n * gradFunc: (dy, saved) => [dy.mul(saved[0].abs())]\n * };\n * });\n *\n * const x = tf.tensor1d([-1, -2, 3]);\n * const dx = tf.grad(x => customOp(x));\n *\n * console.log(`f(x):`);\n * customOp(x).print();\n * console.log(`f'(x):`);\n * dx(x).print();\n * ```\n *\n * @param f The function to evaluate in forward mode, which should return\n * `{value: Tensor, gradFunc: (dy, saved) => Tensor[]}`, where `gradFunc`\n * returns the custom gradients of `f` with respect to its inputs.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction customGrad(f) {\n return ENGINE.customGrad(f);\n}\nfunction checkGrads(grads) {\n const numNullGradients = grads.filter(g => g == null).length;\n if (numNullGradients > 0) {\n throw new Error(`Cannot compute gradient of y=f(x) with respect to x. Make sure that\n the f you passed encloses all operations that lead from x to y.`);\n }\n}\nexport { customGrad, variableGrads, valueAndGrad, valueAndGrads, grad, grads, };\n//# sourceMappingURL=gradients.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Negate } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes `-1 * x` element-wise.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, -2, 0], [2, 2]);\n *\n * x.neg().print(); // or tf.neg(x)\n * ```\n *\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction neg_(x) {\n const $x = convertToTensor(x, 'x', 'neg');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.neg($x), inputs, null /* grad */, Negate);\n}\nexport const neg = op({ neg_ });\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softplus } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes softplus of the input `tf.Tensor` element-wise: `log(exp(x) + 1)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.softplus().print(); // or tf.softplus(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction softplus_(x) {\n const $x = convertToTensor(x, 'x', 'softplus');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.softplus($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Softplus);\n}\nexport const softplus = op({ softplus_ });\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../gradients';\nimport { convertToTensor } from '../tensor_util_env';\nimport { mul } from './mul';\nimport { neg } from './neg';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { softplus } from './softplus';\n/**\n * Computes log sigmoid of the input `tf.Tensor` element-wise:\n * `logSigmoid(x)`. For numerical stability, we use `-tf.softplus(-x)`.\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.logSigmoid().print(); // or tf.logSigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction logSigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'logSigmoid');\n // Use a custom gradient to maintain previous implementation.\n // There is no LogSigmoid kernel in TF so we can't use engine.runKernel\n // directly\n const customOp = customGrad((x) => {\n // TODO(yassogba) we can remove the chained softplus call here only\n // after backends have modualrized softplus at which point we can call\n // engine runKernel(..., Sotfplus, ...) directly.\n const value = neg(softplus(neg(x)));\n const gradFunc = (dy) => {\n const derX = mul(dy, sigmoid(neg(x)));\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const logSigmoid = op({ logSigmoid_ });\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Max } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the maximum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.max().print(); // or tf.max(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.max(axis).print(); // or tf.max(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction max_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'max');\n const forward = (backend, save) => {\n const origAxes = util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let maxInput = $x;\n if (permutedAxes != null) {\n maxInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, maxInput.rank);\n }\n const y = backend.max(maxInput, axes);\n if (permutedAxes != null) {\n maxInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, util.parseAxisParam(axis, $x.shape));\n res = reshape(res, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { reductionIndices: axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Max, attrs);\n}\nexport const max = op({ max_ });\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sub } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Subtracts two `tf.Tensor`s element-wise, A - B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n *\n * ```js\n * // Broadcast subtract a with b.\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.scalar(5);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n * @param a The first `tf.Tensor` to subtract from.\n * @param b The second `tf.Tensor` to be subtracted. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction sub_(a, b) {\n let $a = convertToTensor(a, 'a', 'sub');\n let $b = convertToTensor(b, 'b', 'sub');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.subtract($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sub);\n}\nexport const sub = op({ sub_ });\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the sum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If axes has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.sum().print(); // or tf.sum(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.sum(axis).print(); // or tf.sum(x, axis)\n * ```\n *\n * @param x The input tensor to compute the sum over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction sum_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'sum');\n if ($x.dtype === 'bool') {\n $x = cast($x, 'int32');\n }\n const forward = (backend, save) => {\n save([$x]);\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.sum(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sum, attrs);\n}\nexport const sum = op({ sum_ });\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogSoftmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log softmax.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param axis The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction logSoftmax_(logits, axis = -1) {\n const $logits = convertToTensor(logits, 'logits', 'logSoftmax');\n if (axis === -1) {\n axis = $logits.rank - 1;\n }\n if (axis !== $logits.rank - 1) {\n throw Error('Log Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and axis was ${axis}`);\n }\n const forward = (backend, save) => {\n const keepDims = true;\n const xMax = max(logits, axis, true);\n const shifted = sub(logits, xMax);\n const value = sub(cast(shifted, 'float32'), log(sum(exp(shifted), axis, keepDims)));\n save([value]);\n return value;\n };\n const inputs = { logits: $logits };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LogSoftmax, attrs);\n}\nexport const logSoftmax = op({ logSoftmax_ });\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { add } from './add';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log(sum(exp(elements across the reduction dimensions)).\n *\n * Reduces the input along the dimensions given in `axis`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.logSumExp().print(); // or tf.logSumExp(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.logSumExp(axis).print(); // or tf.logSumExp(a, axis)\n * ```\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. If null (the default),\n * reduces all dimensions.\n * @param keepDims If true, retains reduced dimensions with length\n * of 1. Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction logSumExp_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'logSumExp');\n const axes = parseAxisParam(axis, $x.shape);\n const xMax = max($x, axes, true /* keepDims */);\n const a = sub($x, xMax);\n const b = exp(a);\n const c = sum(b, axes);\n const d = log(c);\n const res = add(reshape(xMax, d.shape), d);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, axes);\n return reshape(res, newShape);\n }\n return res;\n}\nexport const logSumExp = op({ logSumExp_ });\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalAnd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a AND b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalAnd(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalAnd_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalAnd', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalAnd', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalAnd($a, $b), inputs, null /* grad */, LogicalAnd);\n}\nexport const logicalAnd = op({ logicalAnd_ });\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalNot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the truth value of `NOT x` element-wise.\n *\n * ```js\n * const a = tf.tensor1d([false, true], 'bool');\n *\n * a.logicalNot().print();\n * ```\n *\n * @param x The input tensor. Must be of dtype 'bool'.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalNot_(x) {\n const $x = convertToTensor(x, 'x', 'logicalNot', 'bool');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.logicalNot($x), inputs, null /* grad */, LogicalNot);\n}\nexport const logicalNot = op({ logicalNot_ });\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalOr } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a OR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalOr(b).print();\n * ```\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalOr_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalOr', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalOr', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalOr($a, $b), inputs, null /* grad */, LogicalOr);\n}\nexport const logicalOr = op({ logicalOr_ });\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { logicalAnd } from './logical_and';\nimport { logicalNot } from './logical_not';\nimport { logicalOr } from './logical_or';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a XOR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalXor(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalXor_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalXor', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalXor', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n // x ^ y = (x | y) & ~(x & y)\n return logicalAnd(logicalOr(a, b), logicalNot(logicalAnd(a, b)));\n}\nexport const logicalXor = op({ logicalXor_ });\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D max pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction maxPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'maxPool');\n const dilations = 1;\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x4D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n let y;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n y = x4D.clone();\n }\n else {\n y = backend.maxPool(x4D, convInfo);\n }\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const maxPool = op({ maxPool_ });\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { MaxPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D max pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.maxPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPool3d_(x, filterSize = [1, 1, 1], strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'maxPool3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in maxPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in maxPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n const y = backend.maxPool3d(x5D, convInfo);\n save([x5D, y]);\n return y;\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3d = op({ maxPool3d_ });\n//# sourceMappingURL=max_pool_3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolWithArgmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the 2D max pooling of an image with Argmax index.\n * The indices in argmax are flattened, so that a maximum value at position `[b,\n * y, x, c]` becomes flattened index: `(y * width + x) * channels + c` if\n * include_batch_in_index is False; `((b * height + y) * width + x) * channels\n * +c` if include_batch_in_index is True.\n *\n * The indices returned are always in `[0, height) x [0, width)` before\n * flattening.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param includeBatchIndex Defaults to False. Whether to include batch\n * dimension in flattened index of argmax.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPoolWithArgmax_(x, filterSize, strides, pad, includeBatchInIndex = false) {\n const $x = convertToTensor(x, 'x', 'maxPoolWithArgmax');\n const inputs = { x: $x };\n const attrs = { filterSize, strides, pad, includeBatchInIndex };\n const result = ENGINE.runKernel(MaxPoolWithArgmax, inputs, attrs);\n return { result: result[0], indexes: result[1] };\n}\nexport const maxPoolWithArgmax = op({ maxPoolWithArgmax_ });\n//# sourceMappingURL=max_pool_with_argmax.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeZerosTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\n/**\n * Creates a `tf.Tensor` with all elements set to 0.\n *\n * ```js\n * tf.zeros([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Can\n * be 'float32', 'int32' or 'bool'. Defaults to 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function zeros(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = zeros(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeZerosTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=zeros.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeOnesTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\nimport { zeros } from './zeros';\n/**\n * Creates a `tf.Tensor` with all elements set to 1.\n *\n * ```js\n * tf.ones([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function ones(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = ones(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeOnesTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=ones.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { customGrad } from '../gradients';\nimport { Mean } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam, sizeFromShape } from '../util';\nimport { computeOutAndReduceShapes } from './axis_util';\nimport { cast } from './cast';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { ones } from './ones';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sum } from './sum';\n/**\n * Computes the mean of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is\n * true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with\n * a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.mean().print(); // or tf.mean(a)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.mean(axis).print(); // or tf.mean(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction mean_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'mean');\n const axes = parseAxisParam(axis, $x.shape);\n const shapes = computeOutAndReduceShapes($x.shape, axes);\n const reduceShape = shapes[1];\n const reduceSize = sizeFromShape(reduceShape);\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n const forward = () => {\n const reduceSizeScalar = scalar(reduceSize);\n // Cast if needed.\n const xReduce = reduceSizeScalar.dtype === $x.dtype ?\n $x :\n cast($x, reduceSizeScalar.dtype);\n const res = div(xReduce, reduceSizeScalar);\n return sum(res, axis, keepDims);\n };\n // Use a custom gradient to bypass 2 gradient backprops since mean is used\n // extremely often.\n const customOp = customGrad((x) => {\n const value = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Mean, attrs);\n const gradFunc = (dy) => {\n const expandedDyShape = x.shape.slice();\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = div(mul(expandedDy, ones(x.shape, 'float32')), reduceSize);\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const mean = op({ mean_ });\n//# sourceMappingURL=mean.js.map", "import { ENGINE } from '../engine';\nimport { Min } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the minimum value from the input.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axes`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axes` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.min().print(); // or tf.min(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.min(axis).print(); // or tf.min(x, axis)\n * ```\n *\n * @param x The input Tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction min_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'min');\n const forward = (backend, save) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let minInput = $x;\n if (permutedAxes != null) {\n minInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const y = backend.min(minInput, axes);\n if (permutedAxes != null) {\n minInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n res = reshape(y, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Min, attrs);\n}\nexport const min = op({ min_ });\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Minimum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the min of a and b (`a < b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `minimumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * ```js\n * // Broadcast minimum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction minimum_(a, b) {\n let $a = convertToTensor(a, 'a', 'minimum');\n let $b = convertToTensor(b, 'b', 'minimum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.minimum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Minimum);\n}\nexport const minimum = op({ minimum_ });\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MirrorPad } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` using mirror padding.\n *\n * This operation implements the `REFLECT` and `SYMMETRIC` modes of pad.\n *\n * ```js\n * const x = tf.range(0, 9).reshape([1, 1, 3, 3]);\n * x.mirrorPad([[0, 0], [0, 0], [2, 2], [2, 2]], 'reflect').print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * In \"reflect\" mode, the padded regions do not include the borders,\n * while in \"symmetric\" mode the padded regions do include the borders.\n * For example, if the input is `[1, 2, 3]` and paddings is `[0, 2]`,\n * then the output is `[1, 2, 3, 2, 1]` in \"reflect\" mode, and\n * `[1, 2, 3, 3, 2]` in \"symmetric\" mode.\n * If `mode` is \"reflect\" then both `paddings[D, 0]` and `paddings[D, 1]`\n * must be no greater than `x.shape[D] - 1`. If mode is \"symmetric\"\n * then both `paddings[D, 0]` and `paddings[D, 1]` must be no greater than\n * `x.shape[D]`\n * @param mode String to specify padding mode. Can be `'reflect' | 'symmetric'`\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction mirrorPad_(x, paddings, mode) {\n util.assert(mode === 'reflect' || mode === 'symmetric', () => `Invalid mode. Mode must be either reflect or symmetric. ` +\n `Got ${mode}.`);\n const $x = convertToTensor(x, 'x', 'mirrorPad');\n if ($x.rank === 0) {\n throw new Error('mirrorPad(scalar) is not defined. ' +\n 'Pass non-scalar to mirrorPad');\n }\n util.assert(paddings.length === $x.rank, () => `Padding doesn't match input. Must be ${$x.rank}. ` +\n `Got ${paddings.length}.`);\n const shapeOffset = mode === 'reflect' ? 1 : 0;\n for (let i = 0; i < $x.rank; i++) {\n util.assert(paddings[i].length === 2, () => `Invalid number of paddings. Must be length of 2 each.`);\n util.assert(paddings[i][0] >= 0 && paddings[i][0] <= $x.shape[i] - shapeOffset &&\n paddings[i][1] >= 0 && paddings[i][1] <= $x.shape[i] - shapeOffset, () => `Padding in dimension ${i} cannot be greater than or equal ` +\n `to ${$x.shape[i] - shapeOffset} or less than 0 for input of ` +\n `shape ${$x.shape}`);\n }\n const attrs = { paddings, mode };\n const inputs = { x: $x };\n return ENGINE.runKernel(MirrorPad, inputs, attrs);\n}\nexport const mirrorPad = op({ mirrorPad_ });\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Mod } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the mod of a and b element-wise.\n * `floor(x / y) * y + mod(x, y) = x`\n * Supports broadcasting.\n *\n * We also expose `tf.modStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * ```js\n * // Broadcast a mod b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mod_(a, b) {\n let $a = convertToTensor(a, 'a', 'mod');\n let $b = convertToTensor(b, 'b', 'mod');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.mod($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Mod);\n}\nexport const mod = op({ mod_ });\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square of `x` element-wise: `x ^ 2`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.sqrt(2), -1]);\n *\n * x.square().print(); // or tf.square(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction square_(x) {\n const $x = convertToTensor(x, 'x', 'square');\n const attrs = {};\n const inputsToSave = [$x];\n const outputsToSave = [];\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n return backend.square($x);\n }, { x: $x }, null /* grad */, 'Square', attrs, inputsToSave, outputsToSave);\n}\nexport const square = op({ square_ });\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { cast } from './cast';\nimport { mean } from './mean';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { square } from './square';\nimport { sub } from './sub';\n/**\n * Calculates the mean and variance of `x`. The mean and variance are\n * calculated by aggregating the contents of `x` across `axes`. If `x` is\n * 1-D and `axes = [0]` this is just the mean and variance of a vector.\n *\n * @param x The input tensor.\n * @param axis The dimension(s) along with to compute mean and\n * variance. By default it reduces all dimensions.\n * @param keepDims If true, the moments have the same dimensionality as the\n * input.\n * @return An object with two keys: `mean` and `variance`.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction moments_(x, axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'moments');\n const axes = parseAxisParam(axis, x.shape);\n const xMean = mean(x, axes, keepDims);\n let keepDimsShape = xMean.shape;\n if (!keepDims) {\n keepDimsShape = expandShapeToKeepDim(xMean.shape, axes);\n }\n const devSquared = square(sub(cast(x, 'float32'), reshape(xMean, keepDimsShape)));\n const variance = mean(devSquared, axes, keepDims);\n return { mean: xMean, variance };\n}\nexport const moments = op({ moments_ });\n//# sourceMappingURL=moments.js.map", "import { convertToTensor, convertToTensorArray } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the next states and outputs of a stack of LSTMCells.\n *\n * Each cell output is used as input to the next cell.\n *\n * Returns `[cellState, cellOutput]`.\n *\n * Derived from tf.contrib.rn.MultiRNNCell.\n *\n * @param lstmCells Array of LSTMCell functions.\n * @param data The input to the cell.\n * @param c Array of previous cell states.\n * @param h Array of previous cell outputs.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction multiRNNCell_(lstmCells, data, c, h) {\n const $data = convertToTensor(data, 'data', 'multiRNNCell');\n const $c = convertToTensorArray(c, 'c', 'multiRNNCell');\n const $h = convertToTensorArray(h, 'h', 'multiRNNCell');\n let input = $data;\n const newStates = [];\n for (let i = 0; i < lstmCells.length; i++) {\n const output = lstmCells[i](input, $c[i], $h[i]);\n newStates.push(output[0]);\n newStates.push(output[1]);\n input = output[1];\n }\n const newC = [];\n const newH = [];\n for (let i = 0; i < newStates.length; i += 2) {\n newC.push(newStates[i]);\n newH.push(newStates[i + 1]);\n }\n return [newC, newH];\n}\nexport const multiRNNCell = op({ multiRNNCell_ });\n//# sourceMappingURL=multi_rnn_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a `tf.Tensor` with values drawn from a multinomial distribution.\n *\n * ```js\n * const probs = tf.tensor([.75, .25]);\n * tf.multinomial(probs, 3).print();\n * ```\n *\n * @param logits 1D array with unnormalized log-probabilities, or\n * 2D array of shape `[batchSize, numOutcomes]`. See the `normalized`\n * parameter.\n * @param numSamples Number of samples to draw for each row slice.\n * @param seed The seed number.\n * @param normalized Whether the provided `logits` are normalized true\n * probabilities (sum to 1). Defaults to false.\n * @return 1D array of shape `[numSamples]`, or 2D array of shape\n * `[batchSize, numSamples]`, depending on the rank of the input.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction multinomial_(logits, numSamples, seed, normalized = false) {\n const $logits = convertToTensor(logits, 'logits', 'multinomial');\n const numOutcomes = $logits.size;\n const origRank = $logits.rank;\n if (numOutcomes < 2) {\n throw new Error(`Error in multinomial: you need at least 2 outcomes, but got ` +\n `${numOutcomes}.`);\n }\n if (origRank > 2) {\n throw new Error(`Rank of probabilities must be 1 or 2, but is ${origRank}`);\n }\n seed = seed || Math.random();\n const logits2D = origRank === 1 ? reshape($logits, [1, -1]) : $logits;\n const res = ENGINE.runKernelFunc(backend => backend.multinomial(logits2D, normalized, numSamples, seed), { logits2D });\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return origRank === 1 ? reshape(res, [res.size]) : res;\n}\nexport const multinomial = op({ multinomial_ });\n//# sourceMappingURL=multinomial.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { NotEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a != b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([0, 2, 3]);\n *\n * a.notEqual(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction notEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'notEqual');\n let $b = convertToTensor(b, 'b', 'notEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend) => backend.notEqual($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, NotEqual);\n}\nexport const notEqual = op({ notEqual_ });\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Real } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the real part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the real part of each element in input considered as a complex number.\n *\n * If the input is real, it simply makes a clone.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.real(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction real_(input) {\n const $input = convertToTensor(input, 'input', 'real');\n const forward = (backend) => {\n return backend.real($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Real);\n}\nexport const real = op({ real_ });\n//# sourceMappingURL=real.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OnesLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { complex } from './complex';\nimport { imag } from './imag';\nimport { op } from './operation';\nimport { real } from './real';\nimport { zerosLike } from './zeros_like';\n/**\n * Creates a `tf.Tensor` with all elements set to 1 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.onesLike(x).print();\n * ```\n * @param x A tensor.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction onesLike_(x) {\n const $x = convertToTensor(x, 'x', 'onesLike');\n const forward = (backend, save) => {\n if ($x.dtype === 'complex64') {\n const r = onesLike(real($x));\n const i = zerosLike(imag($x));\n return complex(r, i);\n }\n return backend.onesLike($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OnesLike);\n}\nexport const onesLike = op({ onesLike_ });\n//# sourceMappingURL=ones_like.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the outer product of two vectors, `v1` and `v2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([3, 4, 5]);\n *\n * tf.outerProduct(a, b).print();\n * ```\n * @param v1 The first vector in the outer product operation.\n * @param v2 The second vector in the outer product operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction outerProduct_(v1, v2) {\n const $v1 = convertToTensor(v1, 'v1', 'outerProduct');\n const $v2 = convertToTensor(v2, 'v2', 'outerProduct');\n util.assert($v1.rank === 1 && $v2.rank === 1, () => `Error in outerProduct: inputs must be rank 1, but got ranks ` +\n `${$v1.rank} and ${$v2.rank}.`);\n const v12D = reshape($v1, [-1, 1]);\n const v22D = reshape($v2, [1, -1]);\n return matMul(v12D, v22D);\n}\nexport const outerProduct = op({ outerProduct_ });\n//# sourceMappingURL=outer_product.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { PadV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` with a given value and paddings.\n *\n * This operation implements `CONSTANT` mode. For `REFLECT` and `SYMMETRIC`,\n * refer to `tf.mirrorPad`\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `paddings` is of given length.\n * - `tf.pad1d`\n * - `tf.pad2d`\n * - `tf.pad3d`\n * - `tf.pad4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.pad([[1, 2]]).print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * @param constantValue The pad value to use. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction pad_(x, paddings, constantValue = 0) {\n const $x = convertToTensor(x, 'x', 'pad');\n if ($x.rank === 0) {\n throw new Error('pad(scalar) is not defined. Pass non-scalar to pad');\n }\n const forward = (backend, save) => {\n save([$x]);\n return backend.pad($x, paddings, constantValue);\n };\n const attrs = { paddings, constantValue };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, PadV2, attrs);\n}\nexport const pad = op({ pad_ });\n//# sourceMappingURL=pad.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor1D` with a given value and paddings. See `pad` for details.\n */\nfunction pad1d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2, () => 'Invalid number of paddings. Must be length of 2.');\n return pad(x, [paddings], constantValue);\n}\nexport const pad1d = op({ pad1d_ });\n//# sourceMappingURL=pad1d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor2D` with a given value and paddings. See `pad` for details.\n */\nfunction pad2d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2 && paddings[0].length === 2 &&\n paddings[1].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad2d = op({ pad2d_ });\n//# sourceMappingURL=pad2d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor3D` with a given value and paddings. See `pad` for details.\n */\nfunction pad3d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 3 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad3d = op({ pad3d_ });\n//# sourceMappingURL=pad3d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor4D` with a given value and paddings. See `pad` for details.\n */\nfunction pad4d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 4 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2 &&\n paddings[3].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad4d = op({ pad4d_ });\n//# sourceMappingURL=pad4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SpaceToBatchND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation divides \"spatial\" dimensions `[1, ..., M]` of the input into\n * a grid of blocks of shape `blockShape`, and interleaves these blocks with\n * the \"batch\" dimension (0) such that in the output, the spatial\n * dimensions `[1, ..., M]` correspond to the position within the grid,\n * and the batch dimension combines both the position within a spatial block\n * and the original batch position. Prior to division into blocks,\n * the spatial dimensions of the input are optionally zero padded\n * according to `paddings`. See below for a precise description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]);\n * const blockShape = [2, 2];\n * const paddings = [[0, 0], [0, 0]];\n *\n * x.spaceToBatchND(blockShape, paddings).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param paddings A 2-D array. Must have shape `[M, 2]`, all values must be >=\n * 0. `paddings[i] = [padStart, padEnd]` specifies the amount to zero-pad\n * from input dimension `i + 1`, which corresponds to spatial dimension `i`. It\n * is required that\n * `(inputShape[i + 1] + padStart + padEnd) % blockShape[i] === 0`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Zero-pad the start and end of dimensions `[1, ..., M]` of the input\n * according to `paddings` to produce `padded` of shape paddedShape.\n *\n * 2. Reshape `padded` to `reshapedPadded` of shape:\n * `[batch] + [paddedShape[1] / blockShape[0], blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1], blockShape[M-1]] + remainingShape`\n *\n * 3. Permute dimensions of `reshapedPadded` to produce `permutedReshapedPadded`\n * of shape: `blockShape + [batch] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * 4. Reshape `permutedReshapedPadded` to flatten `blockShape` into the\n * batch dimension, producing an output tensor of shape:\n * `[batch * prod(blockShape)] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction spaceToBatchND_(x, blockShape, paddings) {\n const $x = convertToTensor(x, 'x', 'spaceToBatchND');\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank ${$x.rank} should be > than [blockShape] ${blockShape.length}`);\n util.assert(paddings.length === blockShape.length, () => `paddings.shape[0] ${paddings.length} must be equal to [blockShape] ${blockShape.length}`);\n util.assert($x.shape.reduce((a, b, i) => {\n if (i > 0 && i <= blockShape.length) {\n return a &&\n ((b + paddings[i - 1][0] + paddings[i - 1][1]) %\n blockShape[i - 1] ===\n 0);\n }\n return a;\n }, true), () => `input spatial dimensions ${$x.shape.slice(1)} with paddings ${paddings.toString()} must be divisible by blockShapes ${blockShape.toString()}`);\n const forward = backend => backend.spaceToBatchND($x, blockShape, paddings);\n const inputs = { x: $x };\n const attrs = { blockShape, paddings };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SpaceToBatchND, attrs);\n}\nexport const spaceToBatchND = op({ spaceToBatchND_ });\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { avgPool } from './avg_pool';\nimport { batchToSpaceND } from './batch_to_space_nd';\nimport * as conv_util from './conv_util';\nimport { maxPool } from './max_pool';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { spaceToBatchND } from './space_to_batch_nd';\n/**\n * Performs an N-D pooling operation\n *\n * @param input The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param windowShape The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param poolingType The type of pooling, either 'max' or 'avg'.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilationRate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction pool_(input, windowShape, poolingType, pad, dilations, strides) {\n if (dilations == null) {\n dilations = [1, 1];\n }\n if (strides == null) {\n strides = 1;\n }\n if (pad === 0) {\n pad = 'valid';\n }\n const $x = convertToTensor(input, 'x', 'maxPool');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in pool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computePool2DInfo(x4D.shape, windowShape, strides, dilations, pad);\n const dilation = [convInfo.dilationHeight, convInfo.dilationWidth];\n // The following implementation does batchToSpace(pool(spaceToBatch(x)))\n // whenever dilation > 1 since the TF kernels do not support dilation > 1.\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L1037\n let basePadding;\n if (pad === 'same') {\n basePadding = withSpaceToBatchBasePaddings([convInfo.filterHeight, convInfo.filterWidth], dilation);\n }\n else {\n basePadding = [[0, 0], [0, 0]];\n }\n const isDilationOne = dilation[0] === 1 && dilation[1] === 1;\n const [adjustedPadding, adjustedCrops] = requiredSpaceToBatchPaddings([convInfo.inHeight, convInfo.inWidth], dilation, basePadding);\n const convertedPad = isDilationOne ? pad : 'valid';\n const convertedX = isDilationOne ? x4D : spaceToBatchND(x4D, dilation, adjustedPadding);\n const forwardOp = poolingType === 'avg' ?\n () => avgPool(convertedX, windowShape, strides, convertedPad) :\n () => maxPool(convertedX, windowShape, strides, convertedPad);\n const y = forwardOp();\n const res = isDilationOne ? y : batchToSpaceND(y, dilation, adjustedCrops);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\n// Helper function to compute crops and paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/array_ops.py#L2184\nfunction requiredSpaceToBatchPaddings(inputShape, blockShape, basePadding) {\n const padStart = basePadding.map(b => b[0]);\n const origPadEnd = basePadding.map(b => b[1]);\n const fullInputShape = inputShape.concat(padStart, origPadEnd);\n const padEndExtra = blockShape.map((b, i) => (b - fullInputShape[i] % b) % b);\n const padEnd = origPadEnd.map((s, i) => s + padEndExtra[i]);\n const paddings = blockShape.map((_, i) => [padStart[i], padEnd[i]]);\n const crops = blockShape.map((_, i) => [0, padEndExtra[i]]);\n return [paddings, crops];\n}\n// Helper function to compute base paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L524\nfunction withSpaceToBatchBasePaddings(filterShape, dilation) {\n // Spatial dimensions of the filters and the upsampled filters in which we\n // introduce (rate - 1) zeros between consecutive filter values.\n const dilatedFilterShape = filterShape.map((s, i) => {\n return s + (s - 1) * (dilation[i] - 1);\n });\n const padExtraShape = dilatedFilterShape.map(s => s - 1);\n // When padding is odd, we pad more at end, following the same\n // convention as conv2d.\n const padExtraStart = padExtraShape.map(s => Math.floor(s / 2));\n const padExtraEnd = padExtraShape.map((s, i) => s - padExtraStart[i]);\n return padExtraShape.map((_, i) => {\n return [padExtraStart[i], padExtraEnd[i]];\n });\n}\nexport const pool = op({ pool_ });\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Pow } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the power of one `tf.Tensor` to another. Supports broadcasting.\n *\n * Given a `tf.Tensor` x and a `tf.Tensor` y, this operation computes x^y for\n * corresponding elements in x and y. The result's dtype will be the upcasted\n * type of the `base` and `exp` dtypes.\n *\n * ```js\n * const a = tf.tensor([[2, 3], [4, 5]])\n * const b = tf.tensor([[1, 2], [3, 0]]).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n *\n * ```js\n * const a = tf.tensor([[1, 2], [3, 4]])\n * const b = tf.tensor(2).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n * We also expose `powStrict` which has the same signature as this op and\n * asserts that `base` and `exp` are the same shape (does not broadcast).\n *\n * @param base The base `tf.Tensor` to pow element-wise.\n * @param exp The exponent `tf.Tensor` to pow element-wise.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction pow_(base, exp) {\n let $base = convertToTensor(base, 'base', 'pow');\n let $exp = convertToTensor(exp, 'exp', 'pow');\n [$base, $exp] = makeTypesMatch($base, $exp);\n const inputs = { a: $base, b: $exp };\n const forward = (backend, save) => {\n const y = backend.pow($base, $exp);\n save([$base, $exp, y]);\n return y;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Pow);\n}\nexport const pow = op({ pow_ });\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prelu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes leaky rectified linear element-wise with parametric alphas.\n *\n * `x < 0 ? alpha * x : f(x) = x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n * const alpha = tf.scalar(0.1);\n *\n * x.prelu(alpha).print(); // or tf.prelu(x, alpha)\n * ```\n * @param x The input tensor.\n * @param alpha Scaling factor for negative values.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction prelu_(x, alpha) {\n const $x = convertToTensor(x, 'x', 'prelu');\n const $alpha = convertToTensor(alpha, 'alpha', 'prelu');\n const forward = (backend, save) => {\n const res = backend.prelu($x, $alpha);\n save([$x, $alpha]);\n return res;\n };\n const inputs = { x: $x, alpha: $alpha };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prelu);\n}\nexport const prelu = op({ prelu_ });\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prod } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the product of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.prod().print(); // or tf.prod(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.prod(axis).print(); // or tf.prod(x, axis)\n * ```\n *\n * @param x The input tensor to compute the product over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction prod_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'prod');\n if ($x.dtype === 'bool') {\n // bool is not an allowed type for the underlying kernel.\n $x = cast($x, 'int32');\n }\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.prod(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prod, attrs);\n}\nexport const prod = op({ prod_ });\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { sizeFromShape } from '../util';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with values sampled from a random number generator\n * function defined by the user.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param randFunction A random number generator function which is called\n * for each element in the output tensor.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n */\nfunction rand_(shape, randFunction, dtype) {\n const size = sizeFromShape(shape);\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n for (let i = 0; i < size; i++) {\n values[i] = randFunction();\n }\n return ENGINE.makeTensor(values, shape, dtype);\n}\nexport const rand = op({ rand_ });\n//# sourceMappingURL=rand.js.map", "// A port of an algorithm by Johannes Baag\u00F8e , 2010\n// http://baagoe.com/en/RandomMusings/javascript/\n// https://github.com/nquinlan/better-random-numbers-for-javascript-mirror\n// Original work is under MIT license -\n\n// Copyright (C) 2010 by Johannes Baag\u00F8e \n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n// \n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n// \n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n\n\n\n(function(global, module, define) {\n\nfunction Alea(seed) {\n var me = this, mash = Mash();\n\n me.next = function() {\n var t = 2091639 * me.s0 + me.c * 2.3283064365386963e-10; // 2^-32\n me.s0 = me.s1;\n me.s1 = me.s2;\n return me.s2 = t - (me.c = t | 0);\n };\n\n // Apply the seeding algorithm from Baagoe.\n me.c = 1;\n me.s0 = mash(' ');\n me.s1 = mash(' ');\n me.s2 = mash(' ');\n me.s0 -= mash(seed);\n if (me.s0 < 0) { me.s0 += 1; }\n me.s1 -= mash(seed);\n if (me.s1 < 0) { me.s1 += 1; }\n me.s2 -= mash(seed);\n if (me.s2 < 0) { me.s2 += 1; }\n mash = null;\n}\n\nfunction copy(f, t) {\n t.c = f.c;\n t.s0 = f.s0;\n t.s1 = f.s1;\n t.s2 = f.s2;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new Alea(seed),\n state = opts && opts.state,\n prng = xg.next;\n prng.int32 = function() { return (xg.next() * 0x100000000) | 0; }\n prng.double = function() {\n return prng() + (prng() * 0x200000 | 0) * 1.1102230246251565e-16; // 2^-53\n };\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nfunction Mash() {\n var n = 0xefc8249d;\n\n var mash = function(data) {\n data = data.toString();\n for (var i = 0; i < data.length; i++) {\n n += data.charCodeAt(i);\n var h = 0.02519603282416938 * n;\n n = h >>> 0;\n h -= n;\n h *= n;\n n = h >>> 0;\n h -= n;\n n += h * 0x100000000; // 2^32\n }\n return (n >>> 0) * 2.3283064365386963e-10; // 2^-32\n };\n\n return mash;\n}\n\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.alea = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xor128\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n\n // Set up generator function.\n me.next = function() {\n var t = me.x ^ (me.x << 11);\n me.x = me.y;\n me.y = me.z;\n me.z = me.w;\n return me.w ^= (me.w >>> 19) ^ t ^ (t >>> 8);\n };\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor128 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorwow\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var t = (me.x ^ (me.x >>> 2));\n me.x = me.y; me.y = me.z; me.z = me.w; me.w = me.v;\n return (me.d = (me.d + 362437 | 0)) +\n (me.v = (me.v ^ (me.v << 4)) ^ (t ^ (t << 1))) | 0;\n };\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n me.v = 0;\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n if (k == strseed.length) {\n me.d = me.x << 10 ^ me.x >>> 4;\n }\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n t.v = f.v;\n t.d = f.d;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorwow = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorshift7\" algorithm by\n// Fran\u00E7ois Panneton and Pierre L'ecuyer:\n// \"On the Xorgshift Random Number Generators\"\n// http://saluc.engr.uconn.edu/refs/crypto/rng/panneton05onthexorshift.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n // Update xor generator.\n var X = me.x, i = me.i, t, v, w;\n t = X[i]; t ^= (t >>> 7); v = t ^ (t << 24);\n t = X[(i + 1) & 7]; v ^= t ^ (t >>> 10);\n t = X[(i + 3) & 7]; v ^= t ^ (t >>> 3);\n t = X[(i + 4) & 7]; v ^= t ^ (t << 7);\n t = X[(i + 7) & 7]; t = t ^ (t << 13); v ^= t ^ (t << 9);\n X[i] = v;\n me.i = (i + 1) & 7;\n return v;\n };\n\n function init(me, seed) {\n var j, w, X = [];\n\n if (seed === (seed | 0)) {\n // Seed state array using a 32-bit integer.\n w = X[0] = seed;\n } else {\n // Seed state using a string.\n seed = '' + seed;\n for (j = 0; j < seed.length; ++j) {\n X[j & 7] = (X[j & 7] << 15) ^\n (seed.charCodeAt(j) + X[(j + 1) & 7] << 13);\n }\n }\n // Enforce an array length of 8, not all zeroes.\n while (X.length < 8) X.push(0);\n for (j = 0; j < 8 && X[j] === 0; ++j);\n if (j == 8) w = X[7] = -1; else w = X[j];\n\n me.x = X;\n me.i = 0;\n\n // Discard an initial 256 values.\n for (j = 256; j > 0; --j) {\n me.next();\n }\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.x = f.x.slice();\n t.i = f.i;\n return t;\n}\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.x) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorshift7 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n", "// A Javascript implementaion of Richard Brent's Xorgens xor4096 algorithm.\n//\n// This fast non-cryptographic random number generator is designed for\n// use in Monte-Carlo algorithms. It combines a long-period xorshift\n// generator with a Weyl generator, and it passes all common batteries\n// of stasticial tests for randomness while consuming only a few nanoseconds\n// for each prng generated. For background on the generator, see Brent's\n// paper: \"Some long-period random number generators using shifts and xors.\"\n// http://arxiv.org/pdf/1004.3115v1.pdf\n//\n// Usage:\n//\n// var xor4096 = require('xor4096');\n// random = xor4096(1); // Seed with int32 or string.\n// assert.equal(random(), 0.1520436450538547); // (0, 1) range, 53 bits.\n// assert.equal(random.int32(), 1806534897); // signed int32, 32 bits.\n//\n// For nonzero numeric keys, this impelementation provides a sequence\n// identical to that by Brent's xorgens 3 implementaion in C. This\n// implementation also provides for initalizing the generator with\n// string seeds, or for saving and restoring the state of the generator.\n//\n// On Chrome, this prng benchmarks about 2.1 times slower than\n// Javascript's built-in Math.random().\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n var w = me.w,\n X = me.X, i = me.i, t, v;\n // Update Weyl generator.\n me.w = w = (w + 0x61c88647) | 0;\n // Update xor generator.\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n // Update Xor generator array state.\n v = X[i] = v ^ t;\n me.i = i;\n // Result is the combination.\n return (v + (w ^ (w >>> 16))) | 0;\n };\n\n function init(me, seed) {\n var t, v, i, j, w, X = [], limit = 128;\n if (seed === (seed | 0)) {\n // Numeric seeds initialize v, which is used to generates X.\n v = seed;\n seed = null;\n } else {\n // String seeds are mixed into v and X one character at a time.\n seed = seed + '\\0';\n v = 0;\n limit = Math.max(limit, seed.length);\n }\n // Initialize circular array and weyl value.\n for (i = 0, j = -32; j < limit; ++j) {\n // Put the unicode characters into the array, and shuffle them.\n if (seed) v ^= seed.charCodeAt((j + 32) % seed.length);\n // After 32 shuffles, take v as the starting w value.\n if (j === 0) w = v;\n v ^= v << 10;\n v ^= v >>> 15;\n v ^= v << 4;\n v ^= v >>> 13;\n if (j >= 0) {\n w = (w + 0x61c88647) | 0; // Weyl.\n t = (X[j & 127] ^= (v + w)); // Combine xor and weyl to init array.\n i = (0 == t) ? i + 1 : 0; // Count zeroes.\n }\n }\n // We have detected all zeroes; make the key nonzero.\n if (i >= 128) {\n X[(seed && seed.length || 0) & 127] = -1;\n }\n // Run the generator 512 times to further mix the state before using it.\n // Factoring this as a function slows the main generator, so it is just\n // unrolled here. The weyl generator is not advanced while warming up.\n i = 127;\n for (j = 4 * 128; j > 0; --j) {\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n X[i] = v ^ t;\n }\n // Storing state as object members is faster than using closure variables.\n me.w = w;\n me.X = X;\n me.i = i;\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.i = f.i;\n t.w = f.w;\n t.X = f.X.slice();\n return t;\n};\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.X) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor4096 = impl;\n}\n\n})(\n this, // window object or global\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n", "// A Javascript implementaion of the \"Tyche-i\" prng algorithm by\n// Samuel Neves and Filipe Araujo.\n// See https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var b = me.b, c = me.c, d = me.d, a = me.a;\n b = (b << 25) ^ (b >>> 7) ^ c;\n c = (c - d) | 0;\n d = (d << 24) ^ (d >>> 8) ^ a;\n a = (a - b) | 0;\n me.b = b = (b << 20) ^ (b >>> 12) ^ c;\n me.c = c = (c - d) | 0;\n me.d = (d << 16) ^ (c >>> 16) ^ a;\n return me.a = (a - b) | 0;\n };\n\n /* The following is non-inverted tyche, which has better internal\n * bit diffusion, but which is about 25% slower than tyche-i in JS.\n me.next = function() {\n var a = me.a, b = me.b, c = me.c, d = me.d;\n a = (me.a + me.b | 0) >>> 0;\n d = me.d ^ a; d = d << 16 ^ d >>> 16;\n c = me.c + d | 0;\n b = me.b ^ c; b = b << 12 ^ d >>> 20;\n me.a = a = a + b | 0;\n d = d ^ a; me.d = d = d << 8 ^ d >>> 24;\n me.c = c = c + d | 0;\n b = b ^ c;\n return me.b = (b << 7 ^ b >>> 25);\n }\n */\n\n me.a = 0;\n me.b = 0;\n me.c = 2654435769 | 0;\n me.d = 1367130551;\n\n if (seed === Math.floor(seed)) {\n // Integer seed.\n me.a = (seed / 0x100000000) | 0;\n me.b = seed | 0;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 20; k++) {\n me.b ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.a = f.a;\n t.b = f.b;\n t.c = f.c;\n t.d = f.d;\n return t;\n};\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.tychei = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "/*\nCopyright 2014 David Bau.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n*/\n\n(function (pool, math) {\n//\n// The following constants are related to IEEE 754 limits.\n//\nvar global = this,\n width = 256, // each RC4 output is 0 <= x < 256\n chunks = 6, // at least six RC4 outputs for each double\n digits = 52, // there are 52 significant digits in a double\n rngname = 'random', // rngname: name for Math.random and Math.seedrandom\n startdenom = math.pow(width, chunks),\n significance = math.pow(2, digits),\n overflow = significance * 2,\n mask = width - 1,\n nodecrypto; // node.js crypto module, initialized at the bottom.\n\n//\n// seedrandom()\n// This is the seedrandom function described above.\n//\nfunction seedrandom(seed, options, callback) {\n var key = [];\n options = (options == true) ? { entropy: true } : (options || {});\n\n // Flatten the seed string or build one from local entropy if needed.\n var shortseed = mixkey(flatten(\n options.entropy ? [seed, tostring(pool)] :\n (seed == null) ? autoseed() : seed, 3), key);\n\n // Use the seed to initialize an ARC4 generator.\n var arc4 = new ARC4(key);\n\n // This function returns a random double in [0, 1) that contains\n // randomness in every bit of the mantissa of the IEEE 754 value.\n var prng = function() {\n var n = arc4.g(chunks), // Start with a numerator n < 2 ^ 48\n d = startdenom, // and denominator d = 2 ^ 48.\n x = 0; // and no 'extra last byte'.\n while (n < significance) { // Fill up all significant digits by\n n = (n + x) * width; // shifting numerator and\n d *= width; // denominator and generating a\n x = arc4.g(1); // new least-significant-byte.\n }\n while (n >= overflow) { // To avoid rounding up, before adding\n n /= 2; // last byte, shift everything\n d /= 2; // right using integer math until\n x >>>= 1; // we have exactly the desired bits.\n }\n return (n + x) / d; // Form the number within [0, 1).\n };\n\n prng.int32 = function() { return arc4.g(4) | 0; }\n prng.quick = function() { return arc4.g(4) / 0x100000000; }\n prng.double = prng;\n\n // Mix the randomness into accumulated entropy.\n mixkey(tostring(arc4.S), pool);\n\n // Calling convention: what to return as a function of prng, seed, is_math.\n return (options.pass || callback ||\n function(prng, seed, is_math_call, state) {\n if (state) {\n // Load the arc4 state from the given state if it has an S array.\n if (state.S) { copy(state, arc4); }\n // Only provide the .state method if requested via options.state.\n prng.state = function() { return copy(arc4, {}); }\n }\n\n // If called as a method of Math (Math.seedrandom()), mutate\n // Math.random because that is how seedrandom.js has worked since v1.0.\n if (is_math_call) { math[rngname] = prng; return seed; }\n\n // Otherwise, it is a newer calling convention, so return the\n // prng directly.\n else return prng;\n })(\n prng,\n shortseed,\n 'global' in options ? options.global : (this == math),\n options.state);\n}\nmath['seed' + rngname] = seedrandom;\n\n//\n// ARC4\n//\n// An ARC4 implementation. The constructor takes a key in the form of\n// an array of at most (width) integers that should be 0 <= x < (width).\n//\n// The g(count) method returns a pseudorandom integer that concatenates\n// the next (count) outputs from ARC4. Its return value is a number x\n// that is in the range 0 <= x < (width ^ count).\n//\nfunction ARC4(key) {\n var t, keylen = key.length,\n me = this, i = 0, j = me.i = me.j = 0, s = me.S = [];\n\n // The empty key [] is treated as [0].\n if (!keylen) { key = [keylen++]; }\n\n // Set up S using the standard key scheduling algorithm.\n while (i < width) {\n s[i] = i++;\n }\n for (i = 0; i < width; i++) {\n s[i] = s[j = mask & (j + key[i % keylen] + (t = s[i]))];\n s[j] = t;\n }\n\n // The \"g\" method returns the next (count) outputs as one number.\n (me.g = function(count) {\n // Using instance members instead of closure state nearly doubles speed.\n var t, r = 0,\n i = me.i, j = me.j, s = me.S;\n while (count--) {\n t = s[i = mask & (i + 1)];\n r = r * width + s[mask & ((s[i] = s[j = mask & (j + t)]) + (s[j] = t))];\n }\n me.i = i; me.j = j;\n return r;\n // For robust unpredictability, the function call below automatically\n // discards an initial batch of values. This is called RC4-drop[256].\n // See http://google.com/search?q=rsa+fluhrer+response&btnI\n })(width);\n}\n\n//\n// copy()\n// Copies internal state of ARC4 to or from a plain object.\n//\nfunction copy(f, t) {\n t.i = f.i;\n t.j = f.j;\n t.S = f.S.slice();\n return t;\n};\n\n//\n// flatten()\n// Converts an object tree to nested arrays of strings.\n//\nfunction flatten(obj, depth) {\n var result = [], typ = (typeof obj), prop;\n if (depth && typ == 'object') {\n for (prop in obj) {\n try { result.push(flatten(obj[prop], depth - 1)); } catch (e) {}\n }\n }\n return (result.length ? result : typ == 'string' ? obj : obj + '\\0');\n}\n\n//\n// mixkey()\n// Mixes a string seed into a key that is an array of integers, and\n// returns a shortened string seed that is equivalent to the result key.\n//\nfunction mixkey(seed, key) {\n var stringseed = seed + '', smear, j = 0;\n while (j < stringseed.length) {\n key[mask & j] =\n mask & ((smear ^= key[mask & j] * 19) + stringseed.charCodeAt(j++));\n }\n return tostring(key);\n}\n\n//\n// autoseed()\n// Returns an object for autoseeding, using window.crypto and Node crypto\n// module if available.\n//\nfunction autoseed() {\n try {\n var out;\n if (nodecrypto && (out = nodecrypto.randomBytes)) {\n // The use of 'out' to remember randomBytes makes tight minified code.\n out = out(width);\n } else {\n out = new Uint8Array(width);\n (global.crypto || global.msCrypto).getRandomValues(out);\n }\n return tostring(out);\n } catch (e) {\n var browser = global.navigator,\n plugins = browser && browser.plugins;\n return [+new Date, global, plugins, global.screen, tostring(pool)];\n }\n}\n\n//\n// tostring()\n// Converts an array of charcodes to a string\n//\nfunction tostring(a) {\n return String.fromCharCode.apply(0, a);\n}\n\n//\n// When seedrandom.js is loaded, we immediately mix a few bits\n// from the built-in RNG into the entropy pool. Because we do\n// not want to interfere with deterministic PRNG state later,\n// seedrandom will not call math.random on its own again after\n// initialization.\n//\nmixkey(math.random(), pool);\n\n//\n// Nodejs and AMD support: export the implementation as a module using\n// either convention.\n//\nif ((typeof module) == 'object' && module.exports) {\n module.exports = seedrandom;\n // When in node.js, try using crypto package for autoseeding.\n try {\n nodecrypto = require('crypto');\n } catch (ex) {}\n} else if ((typeof define) == 'function' && define.amd) {\n define(function() { return seedrandom; });\n}\n\n// End anonymous scope, and pass initial values.\n})(\n [], // pool: entropy pool starts empty\n Math // math: package containing random, pow, and seedrandom\n);\n", "// A library of seedable RNGs implemented in Javascript.\n//\n// Usage:\n//\n// var seedrandom = require('seedrandom');\n// var random = seedrandom(1); // or any seed.\n// var x = random(); // 0 <= x < 1. Every bit is random.\n// var x = random.quick(); // 0 <= x < 1. 32 bits of randomness.\n\n// alea, a 53-bit multiply-with-carry generator by Johannes Baag\u00F8e.\n// Period: ~2^116\n// Reported to pass all BigCrush tests.\nvar alea = require('./lib/alea');\n\n// xor128, a pure xor-shift generator by George Marsaglia.\n// Period: 2^128-1.\n// Reported to fail: MatrixRank and LinearComp.\nvar xor128 = require('./lib/xor128');\n\n// xorwow, George Marsaglia's 160-bit xor-shift combined plus weyl.\n// Period: 2^192-2^32\n// Reported to fail: CollisionOver, SimpPoker, and LinearComp.\nvar xorwow = require('./lib/xorwow');\n\n// xorshift7, by Fran\u00E7ois Panneton and Pierre L'ecuyer, takes\n// a different approach: it adds robustness by allowing more shifts\n// than Marsaglia's original three. It is a 7-shift generator\n// with 256 bits, that passes BigCrush with no systmatic failures.\n// Period 2^256-1.\n// No systematic BigCrush failures reported.\nvar xorshift7 = require('./lib/xorshift7');\n\n// xor4096, by Richard Brent, is a 4096-bit xor-shift with a\n// very long period that also adds a Weyl generator. It also passes\n// BigCrush with no systematic failures. Its long period may\n// be useful if you have many generators and need to avoid\n// collisions.\n// Period: 2^4128-2^32.\n// No systematic BigCrush failures reported.\nvar xor4096 = require('./lib/xor4096');\n\n// Tyche-i, by Samuel Neves and Filipe Araujo, is a bit-shifting random\n// number generator derived from ChaCha, a modern stream cipher.\n// https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n// Period: ~2^127\n// No systematic BigCrush failures reported.\nvar tychei = require('./lib/tychei');\n\n// The original ARC4-based prng included in this library.\n// Period: ~2^1600\nvar sr = require('./seedrandom');\n\nsr.alea = alea;\nsr.xor128 = xor128;\nsr.xorwow = xorwow;\nsr.xorshift7 = xorshift7;\nsr.xor4096 = xor4096;\nsr.tychei = tychei;\n\nmodule.exports = sr;\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as seedrandom from 'seedrandom';\nimport { expectNumbersClose, testEpsilon } from '../test_util';\n// https://en.wikipedia.org/wiki/Marsaglia_polar_method\nexport class MPRandGauss {\n constructor(mean, stdDeviation, dtype, truncated, seed) {\n this.mean = mean;\n this.stdDev = stdDeviation;\n this.dtype = dtype;\n this.nextVal = NaN;\n this.truncated = truncated;\n if (this.truncated) {\n this.upper = this.mean + this.stdDev * 2;\n this.lower = this.mean - this.stdDev * 2;\n }\n const seedValue = seed ? seed : Math.random();\n this.random = seedrandom.alea(seedValue.toString());\n }\n /** Returns next sample from a Gaussian distribution. */\n nextValue() {\n if (!isNaN(this.nextVal)) {\n const value = this.nextVal;\n this.nextVal = NaN;\n return value;\n }\n let resultX, resultY;\n let isValid = false;\n while (!isValid) {\n let v1, v2, s;\n do {\n v1 = 2 * this.random() - 1;\n v2 = 2 * this.random() - 1;\n s = v1 * v1 + v2 * v2;\n } while (s >= 1 || s === 0);\n const mul = Math.sqrt(-2.0 * Math.log(s) / s);\n resultX = this.mean + this.stdDev * v1 * mul;\n resultY = this.mean + this.stdDev * v2 * mul;\n if (!this.truncated || this.isValidTruncated(resultX)) {\n isValid = true;\n }\n }\n if (!this.truncated || this.isValidTruncated(resultY)) {\n this.nextVal = this.convertValue(resultY);\n }\n return this.convertValue(resultX);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype == null || this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n /** Returns true if less than 2-standard-deviations from the mean. */\n isValidTruncated(value) {\n return value <= this.upper && value >= this.lower;\n }\n}\n// Marsaglia, George, and Wai Wan Tsang. 2000. \"A Simple Method for Generating\n// Gamma Variables.\"\nexport class RandGamma {\n constructor(alpha, beta, dtype, seed) {\n this.alpha = alpha;\n this.beta = 1 / beta; // convert rate to scale parameter\n this.dtype = dtype;\n const seedValue = seed ? seed : Math.random();\n this.randu = seedrandom.alea(seedValue.toString());\n this.randn = new MPRandGauss(0, 1, dtype, false, this.randu());\n if (alpha < 1) {\n this.d = alpha + (2 / 3);\n }\n else {\n this.d = alpha - (1 / 3);\n }\n this.c = 1 / Math.sqrt(9 * this.d);\n }\n /** Returns next sample from a gamma distribution. */\n nextValue() {\n let x2, v0, v1, x, u, v;\n while (true) {\n do {\n x = this.randn.nextValue();\n v = 1 + (this.c * x);\n } while (v <= 0);\n v *= v * v;\n x2 = x * x;\n v0 = 1 - (0.331 * x2 * x2);\n v1 = (0.5 * x2) + (this.d * (1 - v + Math.log(v)));\n u = this.randu();\n if (u < v0 || Math.log(u) < v1) {\n break;\n }\n }\n v = (1 / this.beta) * this.d * v;\n if (this.alpha < 1) {\n v *= Math.pow(this.randu(), 1 / this.alpha);\n }\n return this.convertValue(v);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n}\nexport class UniformRandom {\n constructor(min = 0, max = 1, dtype, seed) {\n /** Handles proper rounding for non floating point numbers. */\n this.canReturnFloat = () => (this.dtype == null || this.dtype === 'float32');\n this.min = min;\n this.range = max - min;\n this.dtype = dtype;\n if (seed == null) {\n seed = Math.random();\n }\n if (typeof seed === 'number') {\n seed = seed.toString();\n }\n if (!this.canReturnFloat() && this.range <= 1) {\n throw new Error(`The difference between ${min} - ${max} <= 1 and dtype is not float`);\n }\n this.random = seedrandom.alea(seed);\n }\n convertValue(value) {\n if (this.canReturnFloat()) {\n return value;\n }\n return Math.round(value);\n }\n nextValue() {\n return this.convertValue(this.min + this.range * this.random());\n }\n}\nexport function jarqueBeraNormalityTest(values) {\n // https://en.wikipedia.org/wiki/Jarque%E2%80%93Bera_test\n const n = values.length;\n const s = skewness(values);\n const k = kurtosis(values);\n const jb = n / 6 * (Math.pow(s, 2) + 0.25 * Math.pow(k - 3, 2));\n // JB test requires 2-degress of freedom from Chi-Square @ 0.95:\n // http://www.itl.nist.gov/div898/handbook/eda/section3/eda3674.htm\n const CHI_SQUARE_2DEG = 5.991;\n if (jb > CHI_SQUARE_2DEG) {\n throw new Error(`Invalid p-value for JB: ${jb}`);\n }\n}\nexport function expectArrayInMeanStdRange(actual, expectedMean, expectedStdDev, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n const actualMean = mean(actual);\n expectNumbersClose(actualMean, expectedMean, epsilon);\n expectNumbersClose(standardDeviation(actual, actualMean), expectedStdDev, epsilon);\n}\nfunction mean(values) {\n let sum = 0;\n for (let i = 0; i < values.length; i++) {\n sum += values[i];\n }\n return sum / values.length;\n}\nfunction standardDeviation(values, mean) {\n let squareDiffSum = 0;\n for (let i = 0; i < values.length; i++) {\n const diff = values[i] - mean;\n squareDiffSum += diff * diff;\n }\n return Math.sqrt(squareDiffSum / values.length);\n}\nfunction kurtosis(values) {\n // https://en.wikipedia.org/wiki/Kurtosis\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum4 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum4 += Math.pow(v, 4);\n }\n return (1 / n) * sum4 / Math.pow((1 / n) * sum2, 2);\n}\nfunction skewness(values) {\n // https://en.wikipedia.org/wiki/Skewness\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum3 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum3 += Math.pow(v, 3);\n }\n return (1 / n) * sum3 / Math.pow((1 / (n - 1)) * sum2, 3 / 2);\n}\n//# sourceMappingURL=rand_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { RandGamma } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a gamma distribution.\n *\n * ```js\n * tf.randomGamma([2, 2], 1).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param alpha The shape parameter of the gamma distribution.\n * @param beta The inverse scale parameter of the gamma distribution. Defaults\n * to 1.\n * @param dtype The data type of the output. Defaults to float32.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomGamma_(shape, alpha, beta = 1, dtype = 'float32', seed) {\n if (beta == null) {\n beta = 1;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const rgamma = new RandGamma(alpha, beta, dtype, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = rgamma.nextValue();\n }\n return res.toTensor();\n}\nexport const randomGamma = op({ randomGamma_ });\n//# sourceMappingURL=random_gamma.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a normal distribution.\n *\n * ```js\n * tf.randomNormal([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, false /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const randomNormal = op({ randomNormal_ });\n//# sourceMappingURL=random_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { UniformRandom } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a uniform distribution.\n *\n * The generated values follow a uniform distribution in the range [minval,\n * maxval). The lower bound minval is included in the range, while the upper\n * bound maxval is excluded.\n *\n * ```js\n * tf.randomUniform([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param minval The lower bound on the range of random values to generate.\n * Defaults to 0.\n * @param maxval The upper bound on the range of random values to generate.\n * Defaults to 1.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomUniform_(shape, minval = 0, maxval = 1, dtype = 'float32', seed) {\n const res = buffer(shape, dtype);\n const random = new UniformRandom(minval, maxval, null, seed);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = random.nextValue();\n }\n return res.toTensor();\n}\nexport const randomUniform = op({ randomUniform_ });\n//# sourceMappingURL=random_uniform.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-1 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor1d` as it makes the code more readable.\n *\n * ```js\n * tf.tensor1d([1, 2, 3]).print();\n * ```\n *\n * @param values The values of the tensor. Can be array of numbers,\n * or a `TypedArray`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor1d(values, dtype) {\n assertNonNull(values);\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 1) {\n throw new Error('tensor1d() requires values to be a flat/TypedArray');\n }\n const shape = null;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Range } from '../kernel_names';\nimport { makeZerosTypedArray } from '../util';\nimport { tensor1d } from './tensor1d';\nimport { zeros } from './zeros';\n/**\n * Creates a new `tf.Tensor1D` filled with the numbers in the range provided.\n *\n * The tensor is a is half-open interval meaning it includes start, but\n * excludes stop. Decrementing ranges and negative step values are also\n * supported.sv\n *\n *\n * ```js\n * tf.range(0, 9, 2).print();\n * ```\n *\n * @param start An integer start value\n * @param stop An integer stop value\n * @param step An integer increment (will default to 1 or -1)\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function range(start, stop, step = 1, dtype = 'float32') {\n if (step === 0) {\n throw new Error('Cannot have a step of zero');\n }\n const forward = () => {\n const sameStartStop = start === stop;\n const increasingRangeNegativeStep = start < stop && step < 0;\n const decreasingRangePositiveStep = stop < start && step > 1;\n if (sameStartStop || increasingRangeNegativeStep ||\n decreasingRangePositiveStep) {\n return zeros([0], dtype);\n }\n const numElements = Math.abs(Math.ceil((stop - start) / step));\n const values = makeZerosTypedArray(numElements, dtype);\n if (stop < start && step === 1) {\n // Auto adjust the step's sign if it hasn't been set\n // (or was set to 1)\n step = -1;\n }\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, dtype);\n };\n const attrs = { start, stop, step, dtype };\n return ENGINE.runKernelFunc(forward, {} /* inputs */, null /* grad */, Range, attrs);\n}\n//# sourceMappingURL=range.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reciprocal } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of x element-wise: `1 / x`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, 2]);\n *\n * x.reciprocal().print(); // or tf.reciprocal(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction reciprocal_(x) {\n const $x = convertToTensor(x, 'x', 'reciprocal');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.reciprocal($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Reciprocal);\n}\nexport const reciprocal = op({ reciprocal_ });\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { cast } from './cast';\n/**\n * Computes rectified linear element-wise: `max(x, 0)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.relu().print(); // or tf.relu(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu_(x) {\n const $x = convertToTensor(x, 'x', 'relu');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu);\n}\nexport const relu = op({ relu_ });\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu6 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes rectified linear 6 element-wise: `min(max(x, 0), 6)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 8]);\n *\n * x.relu6().print(); // or tf.relu6(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu6_(x) {\n const $x = convertToTensor(x, 'x', 'relu6');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu6($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu6);\n}\nexport const relu6 = op({ relu6_ });\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reverse } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Reverses a `tf.Tensor` along a specified axis.\n *\n * Also available are stricter rank-specific methods that assert that `x` is\n * of the given rank:\n * - `tf.reverse1d`\n * - `tf.reverse2d`\n * - `tf.reverse3d`\n * - `tf.reverse4d`\n *\n * Except `tf.reverse1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.reverse().print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.reverse(axis).print();\n * ```\n * @param x The input tensor to be reversed.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction reverse_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n if ($x.rank === 0) {\n return clone($x);\n }\n const res = backend.reverse($x, axes);\n return reshape(res, $x.shape);\n };\n const inputs = { x: $x };\n const attrs = { dims: axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Reverse, attrs);\n}\nexport const reverse = op({ reverse_ });\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor1D`.\n *\n * @param x The input tensor.\n */\nfunction reverse1d_(x) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 1, () => `Error in reverse1D: x must be rank 1 but got rank ${$x.rank}.`);\n return reverse($x, 0);\n}\nexport const reverse1d = op({ reverse1d_ });\n//# sourceMappingURL=reverse_1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor2D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse2d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 2, () => `Error in reverse2D: x must be rank 2 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse2d = op({ reverse2d_ });\n//# sourceMappingURL=reverse_2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor3D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse3d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 3, () => `Error in reverse3D: x must be rank 3 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse3d = op({ reverse3d_ });\n//# sourceMappingURL=reverse_3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor4D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse4d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 4, () => `Error in reverse4D: x must be rank 4 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse4d = op({ reverse4d_ });\n//# sourceMappingURL=reverse_4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Round } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes round of input `tf.Tensor` element-wise: `round(x)`.\n * It implements banker's rounding.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.round().print(); // or tf.round(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction round_(x) {\n const $x = convertToTensor(x, 'x', 'round');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.round($x), inputs, null /* grad */, Round);\n}\nexport const round = op({ round_ });\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Rsqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of square root of the input `tf.Tensor` element-wise:\n * `y = 1 / sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.rsqrt().print(); // or tf.rsqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction rsqrt_(x) {\n const $x = convertToTensor(x, 'x', 'rsqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.rsqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Rsqrt);\n}\nexport const rsqrt = op({ rsqrt_ });\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Selu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes scaled exponential linear element-wise.\n *\n * `x < 0 ? scale * alpha * (exp(x) - 1) : x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.selu().print(); // or tf.selu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction selu_(x) {\n const $x = convertToTensor(x, 'x', 'selu');\n const forward = (backend, save) => {\n const res = backend.selu($x);\n save([$x]);\n return res;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Selu);\n}\nexport const selu = op({ selu_ });\n//# sourceMappingURL=selu.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport { depthwiseConv2d } from './depthwise_conv2d';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * 2-D convolution with separable filters.\n *\n * Performs a depthwise convolution that acts separately on channels followed\n * by a pointwise convolution that mixes channels. Note that this is\n * separability between dimensions [1, 2] and 3, not spatial separability\n * between dimensions 1 and 2.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param depthwiseFilter The depthwise filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`. This is\n * the filter used in the first step.\n * @param pointwiseFilter The pointwise filter tensor, rank 4, of shape\n * `[1, 1, inChannels * channelMultiplier, outChannels]`. This is\n * the filter used in the second step.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction separableConv2d_(x, depthwiseFilter, pointwiseFilter, strides, pad, dilation = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'separableConv2d');\n const $depthwiseFilter = convertToTensor(depthwiseFilter, 'depthwiseFilter', 'separableConv2d');\n const $pointwiseFilter = convertToTensor(pointwiseFilter, 'pointwiseFilter', 'separableConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n if (dataFormat === 'NCHW') {\n throw new Error('separableConv2d currently does not support dataFormat NCHW; only ' +\n 'NHWC is supported');\n }\n util.assert(x4D.rank === 4, () => `Error in separableConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($depthwiseFilter.rank === 4, () => `Error in separableConv2d: depthwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.rank === 4, () => `Error in separableConv2d: pointwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.shape[0] === 1, () => `Error in separableConv2d: the first dimension of pointwise filter ` +\n ` must be 1, but got ${$pointwiseFilter.shape[0]}.`);\n util.assert($pointwiseFilter.shape[1] === 1, () => `Error in separableConv2d: the second dimension of pointwise ` +\n `filter must be 1, but got ${$pointwiseFilter.shape[1]}.`);\n const inChannels = $depthwiseFilter.shape[2];\n const channelMultiplier = $depthwiseFilter.shape[3];\n util.assert($pointwiseFilter.shape[2] === inChannels * channelMultiplier, () => `Error in separableConv2d: the third dimension of pointwise filter ` +\n `must be ${inChannels * channelMultiplier}, ` +\n `but got ${$pointwiseFilter.shape[2]}.`);\n const depthwise = depthwiseConv2d(x4D, $depthwiseFilter, strides, pad, dataFormat, dilation);\n const pointwiseStride = 1;\n const res = conv2d(depthwise, $pointwiseFilter, pointwiseStride, 'valid', dataFormat);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const separableConv2d = op({ separableConv2d_ });\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\n/**\n * Computes the difference between two lists of numbers.\n *\n * Given a Tensor `x` and a Tensor `y`, this operation returns a Tensor `out`\n * that represents all values that are in `x` but not in `y`. The returned\n * Tensor `out` is sorted in the same order that the numbers appear in `x`\n * (duplicates are preserved). This operation also returns a Tensor indices that\n * represents the position of each out element in `x`. In other words:\n *\n * `out[i] = x[idx[i]] for i in [0, 1, ..., out.length - 1]`\n *\n * ```js\n * const x = [1, 2, 3, 4, 5, 6];\n * const y = [1, 3, 5];\n *\n * const [out, indices] = await tf.setdiff1dAsync(x, y);\n * out.print(); // [2, 4, 6]\n * indices.print(); // [1, 3, 5]\n * ```\n *\n * @param x 1-D Tensor. Values to keep.\n * @param y 1-D Tensor. Must have the same type as x. Values to exclude in the\n * output.\n * @returns Promise of Tensor tuple [out, indices].\n * out: Tensor with the same type as x.\n * indices: A Tensor of type int32.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nasync function setdiff1dAsync_(x, y) {\n const $x = convertToTensor(x, 'x', 'setdiff1d');\n const $y = convertToTensor(y, 'y', 'setdiff1d');\n util.assert($x.dtype === $y.dtype, () => `x and y should have the same dtype, but got x (${$x.dtype}) and y (${$y.dtype}).`);\n util.assert($x.rank === 1, () => `x should be 1D tensor, but got x (${$x.shape}).`);\n util.assert($y.rank === 1, () => `y should be 1D tensor, but got y (${$y.shape}).`);\n const xVals = await $x.data();\n const yVals = await $y.data();\n const ySet = new Set(yVals);\n let outputSize = 0;\n for (let i = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n outputSize++;\n }\n }\n const buffer = new TensorBuffer([outputSize], $x.dtype);\n const indices = new TensorBuffer([outputSize], 'int32');\n for (let i = 0, p = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n buffer.values[p] = xVals[i];\n indices.values[p] = i;\n p++;\n }\n }\n return [buffer.toTensor(), indices.toTensor()];\n}\nexport const setdiff1dAsync = setdiff1dAsync_;\n//# sourceMappingURL=setdiff1d_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sign } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns an element-wise indication of the sign of a number.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3, NaN, 0]);\n *\n * x.sign().print(); // or tf.sign(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sign_(x) {\n const $x = convertToTensor(x, 'x', 'sign');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.sign($x), inputs, null /* grad */, Sign);\n}\nexport const sign = op({ sign_ });\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sin of the input Tensor element-wise: `sin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.sin().print(); // or tf.sin(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sin_(x) {\n const $x = convertToTensor(x, 'x', 'sin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sin);\n}\nexport const sin = op({ sin_ });\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic sin of the input `tf.Tensor` element-wise: `sinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.sinh().print(); // or tf.sinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sinh_(x) {\n const $x = convertToTensor(x, 'x', 'sinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sinh);\n}\nexport const sinh = op({ sinh_ });\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 1D slice from 1D array starting at coordinates `begin` and is\n * of length `size`. See `slice` for details.\n */\nfunction slice1d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice1d');\n util.assert($x.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, [begin], [size]);\n}\nexport const slice1d = op({ slice1d_ });\n//# sourceMappingURL=slice1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 2D slice from a 2D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice2d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice2d');\n util.assert($x.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice2d = op({ slice2d_ });\n//# sourceMappingURL=slice2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 3D slice from a 3D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice3d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice3d');\n util.assert($x.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice3d = op({ slice3d_ });\n//# sourceMappingURL=slice3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 4D slice from a 4D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice4d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice4d');\n util.assert($x.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice4d = op({ slice4d_ });\n//# sourceMappingURL=slice4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the softmax normalized vector given the logits.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction softmax_(logits, dim = -1) {\n const $logits = convertToTensor(logits, 'logits', 'softmax', 'float32');\n if (dim === -1) {\n dim = $logits.rank - 1;\n }\n if (dim !== $logits.rank - 1) {\n throw Error('Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and dim was ${dim}`);\n }\n const inputs = { logits: $logits };\n const attrs = { dim };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.softmax($logits, dim);\n save([y]);\n return y;\n }, inputs, null /* grad */, Softmax, attrs);\n}\nexport const softmax = op({ softmax_ });\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\n/**\n * Fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the inner-most\n * dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.fft().print(); // tf.spectral.fft(x).print();\n * ```\n * @param input The complex input to compute an fft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction fft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.fft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = input.as2D(batch, innerDimensionSize);\n const result = backend.fft(input2D);\n return result.reshape(input.shape);\n }, inputs, null /* gradient */, FFT);\n}\nexport const fft = op({ fft_ });\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { IFFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Inverse fast Fourier transform.\n *\n * Computes the inverse 1-dimensional discrete Fourier transform over the\n * inner-most dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.ifft().print(); // tf.spectral.ifft(x).print();\n * ```\n * @param input The complex input to compute an ifft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction ifft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.ifft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = reshape(input, [batch, innerDimensionSize]);\n const result = backend.ifft(input2D);\n return reshape(result, input.shape);\n }, inputs, null /* gradient */, IFFT);\n}\nexport const ifft = op({ ifft_ });\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { reverse } from '../reverse';\nimport { scalar } from '../scalar';\nimport { slice } from '../slice';\nimport { ifft } from './ifft';\n/**\n * Inversed real value input fast Fourier transform.\n *\n * Computes the 1-dimensional inversed discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([0, 0, 0]);\n * const x = tf.complex(real, imag);\n *\n * x.irfft().print();\n * ```\n * @param input The real value input to compute an irfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction irfft_(input) {\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let ret;\n if (innerDimensionSize <= 2) {\n const complexInput = reshape(input, [batch, innerDimensionSize]);\n ret = ifft(complexInput);\n }\n else {\n // The length of unique components of the DFT of a real-valued signal\n // is 2 * (input_len - 1)\n const outputShape = [batch, 2 * (innerDimensionSize - 1)];\n const realInput = reshape(real(input), [batch, innerDimensionSize]);\n const imagInput = reshape(imag(input), [batch, innerDimensionSize]);\n const realConjugate = reverse(slice(realInput, [0, 1], [batch, innerDimensionSize - 2]), 1);\n const imagConjugate = mul(reverse(slice(imagInput, [0, 1], [batch, innerDimensionSize - 2]), 1), scalar(-1));\n const r = concat([realInput, realConjugate], 1);\n const i = concat([imagInput, imagConjugate], 1);\n const complexInput = reshape(complex(r, i), [outputShape[0], outputShape[1]]);\n ret = ifft(complexInput);\n }\n ret = real(ret);\n // reshape the result if the input is 3D tensor.\n if (input.rank === 3 && input.shape[0] !== 0) {\n const temp = ret;\n const batch = input.shape[0];\n ret = reshape(ret, [batch, ret.shape[0] / batch, ret.shape[1]]);\n temp.dispose();\n }\n return ret;\n}\nexport const irfft = op({ irfft_ });\n//# sourceMappingURL=irfft.js.map", "import { assert } from '../util';\n/**\n * Prepare the split size array. When the input is a number, the axis is evenly\n * divided among the split size. When the input contains the negative value, the\n * rest of the axis is allocated toward that.\n */\nexport function prepareSplitSize(x, numOrSizeSplits, axis = 0) {\n let splitSizes = [];\n if (typeof (numOrSizeSplits) === 'number') {\n assert(x.shape[axis] % numOrSizeSplits === 0, () => 'Number of splits must evenly divide the axis.');\n splitSizes =\n new Array(numOrSizeSplits).fill(x.shape[axis] / numOrSizeSplits);\n }\n else {\n const numOfNegs = numOrSizeSplits.reduce((count, value) => {\n if (value === -1) {\n count += 1;\n }\n return count;\n }, 0);\n assert(numOfNegs <= 1, () => 'There should be only one negative value in split array.');\n const negIndex = numOrSizeSplits.indexOf(-1);\n // Allow the number of split array to be -1, which indicates the rest\n // of dimension is allocated to that split.\n if (negIndex !== -1) {\n const total = numOrSizeSplits.reduce((a, b) => b > 0 ? a + b : a);\n numOrSizeSplits[negIndex] = x.shape[axis] - total;\n }\n assert(x.shape[axis] === numOrSizeSplits.reduce((a, b) => a + b), () => 'The sum of sizes must match the size of the axis dimension.');\n splitSizes = numOrSizeSplits;\n }\n return splitSizes;\n}\n//# sourceMappingURL=split_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SplitV } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { prepareSplitSize } from './split_util';\n/**\n * Splits a `tf.Tensor` into sub tensors.\n *\n * If `numOrSizeSplits` is a number, splits `x` along dimension `axis`\n * into `numOrSizeSplits` smaller tensors.\n * Requires that `numOrSizeSplits` evenly divides `x.shape[axis]`.\n *\n * If `numOrSizeSplits` is a number array, splits `x` into\n * `numOrSizeSplits.length` pieces. The shape of the `i`-th piece has the\n * same size as `x` except along dimension `axis` where the size is\n * `numOrSizeSplits[i]`.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [2, 4]);\n * const [a, b] = tf.split(x, 2, 1);\n * a.print();\n * b.print();\n *\n * const [c, d, e] = tf.split(x, [1, 2, 1], 1);\n * c.print();\n * d.print();\n * e.print();\n * ```\n *\n * @param x The input tensor to split.\n * @param numOrSizeSplits Either an integer indicating the number of\n * splits along the axis or an array of integers containing the sizes of\n * each output tensor along the axis. If a number then it must evenly divide\n * `x.shape[axis]`; otherwise the sum of sizes must match `x.shape[axis]`.\n * Can contain one -1 indicating that dimension is to be inferred.\n * @param axis The dimension along which to split. Defaults to 0 (the first\n * dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction split_(x, numOrSizeSplits, axis = 0) {\n const $x = convertToTensor(x, 'x', 'split');\n const forward = (backend, _) => {\n const $axis = parseAxisParam(axis, $x.shape)[0];\n const splitSizes = prepareSplitSize($x, numOrSizeSplits, $axis);\n return backend.split($x, splitSizes, $axis);\n };\n const inputs = { x: $x };\n const attr = { numOrSizeSplits, axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SplitV, attr);\n}\nexport const split = op({ split_ });\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../../util';\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { split } from '../split';\nimport { zeros } from '../zeros';\nimport { zerosLike } from '../zeros_like';\nimport { fft } from './fft';\n/**\n * Real value input fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n *\n * real.rfft().print();\n * ```\n * @param input The real value input to compute an rfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction rfft_(input, fftLength) {\n assert(input.dtype === 'float32', () => `The dtype for rfft() must be real value but got ${input.dtype}`);\n let innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let adjustedInput;\n if (fftLength != null && fftLength < innerDimensionSize) {\n // Need to crop\n const begin = input.shape.map(v => 0);\n const size = input.shape.map(v => v);\n size[input.shape.length - 1] = fftLength;\n adjustedInput = slice(input, begin, size);\n innerDimensionSize = fftLength;\n }\n else if (fftLength != null && fftLength > innerDimensionSize) {\n // Need to pad with zeros\n const zerosShape = input.shape.map(v => v);\n zerosShape[input.shape.length - 1] = fftLength - innerDimensionSize;\n adjustedInput = concat([input, zeros(zerosShape)], input.shape.length - 1);\n innerDimensionSize = fftLength;\n }\n else {\n adjustedInput = input;\n }\n // Complement the input with zero imaginary numbers.\n const zerosInput = zerosLike(adjustedInput);\n const complexInput = reshape(complex(adjustedInput, zerosInput), [batch, innerDimensionSize]);\n const ret = fft(complexInput);\n // Exclude complex conjugations. These conjugations are put symmetrically.\n const half = Math.floor(innerDimensionSize / 2) + 1;\n const realValues = real(ret);\n const imagValues = imag(ret);\n const realComplexConjugate = split(realValues, [half, innerDimensionSize - half], realValues.shape.length - 1);\n const imagComplexConjugate = split(imagValues, [half, innerDimensionSize - half], imagValues.shape.length - 1);\n const outputShape = adjustedInput.shape.slice();\n outputShape[adjustedInput.shape.length - 1] = half;\n return reshape(complex(realComplexConjugate[0], imagComplexConjugate[0]), outputShape);\n}\nexport const rfft = op({ rfft_ });\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square root of the input `tf.Tensor` element-wise: `y = sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.sqrt().print(); // or tf.sqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sqrt_(x) {\n const $x = convertToTensor(x, 'x', 'sqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sqrt);\n}\nexport const sqrt = op({ sqrt_ });\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SquaredDifference } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns (a - b) * (a - b) element-wise.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * ```js\n * // Broadcast squared difference a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction squaredDifference_(a, b) {\n let $a = convertToTensor(a, 'a', 'squaredDifference');\n let $b = convertToTensor(b, 'b', 'squaredDifference');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.squaredDifference($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SquaredDifference, attrs);\n}\nexport const squaredDifference = op({ squaredDifference_ });\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { squeezeShape } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Removes dimensions of size 1 from the shape of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4], [1, 1, 4]);\n * x.squeeze().print();\n * ```\n *\n * @param x The input tensor to be squeezed.\n * @param axis An optional list of numbers. If specified, only\n * squeezes the dimensions listed. The dimension index starts at 0. It\n * is an error to squeeze a dimension that is not 1.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction squeeze_(x, axis) {\n const $x = convertToTensor(x, 'x', 'squeeze');\n return reshape($x, squeezeShape($x.shape, axis).newShape);\n}\nexport const squeeze = op({ squeeze_ });\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensorArray } from '../tensor_util_env';\nimport * as util from '../util';\nimport { concat } from './concat';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\n/**\n * Stacks a list of rank-`R` `tf.Tensor`s into one rank-`(R+1)` `tf.Tensor`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.stack([a, b, c]).print();\n * ```\n *\n * @param tensors A list of tensor objects with the same shape and dtype.\n * @param axis The axis to stack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction stack_(tensors, axis = 0) {\n const $tensors = convertToTensorArray(tensors, 'tensors', 'stack');\n util.assert($tensors.length >= 1, () => 'Pass at least one tensor to tf.stack');\n if ($tensors.length === 1) {\n return expandDims($tensors[0], axis);\n }\n const rank = $tensors[0].rank;\n const shape = $tensors[0].shape;\n const dtype = $tensors[0].dtype;\n util.assert(axis <= rank, () => 'Axis must be <= rank of the tensor');\n $tensors.forEach(t => {\n util.assertShapesMatch(shape, t.shape, 'All tensors passed to stack must have matching shapes');\n util.assert(dtype === t.dtype, () => 'All tensors passed to stack must have matching dtypes');\n });\n const expandedTensors = $tensors.map(t => expandDims(t, axis));\n // Stack exists in the TensorFlow C++ API\n // (https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/stack) but not\n // in\n // https://raw.githubusercontent.com/tensorflow/tensorflow/master/tensorflow/core/ops/ops.pbtxt.\n // Therefore we are treating it like a high-level op rather than\n // creating a dedicated stack kernel.\n return concat(expandedTensors, axis);\n}\nexport const stack = op({ stack_ });\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Step } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes step of the input `tf.Tensor` element-wise: `x > 0 ? 1 : alpha * x`\n *\n * ```js\n * const x = tf.tensor1d([0, 2, -1, -3]);\n *\n * x.step(.5).print(); // or tf.step(x, .5)\n * ```\n * @param x The input tensor.\n * @param alpha The gradient when input is negative.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction step_(x, alpha = 0.0) {\n const $x = convertToTensor(x, 'x', 'step');\n const inputs = { x: $x };\n const attrs = { alpha };\n return ENGINE.runKernelFunc(backend => backend.step($x, alpha), inputs, null /* grad */, Step, attrs);\n}\nexport const step = op({ step_ });\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { StridedSlice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { slice } from './slice';\nimport { computeOutShape, getNormalizedAxes, maskToAxes } from './slice_util';\n/**\n * Extracts a strided slice of a tensor.\n *\n * Roughly speaking, this op extracts a slice of size (end-begin)/stride from\n * the given input tensor (x). Starting at the location specified by begin the\n * slice continues by adding stride to the index until all dimensions are not\n * less than end. Note that a stride can be negative, which causes a reverse\n * slice.\n *\n * ```js\n * const t = tf.tensor3d([1, 1, 1 ,2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6],\n * [3, 2, 3]);\n * t.stridedSlice([1, 0, 0], [2, 1, 3], [1, 1, 1]).print() // [[[3, 3, 3]]]\n * t.stridedSlice([1, 0, 0], [2, 2, 3], [1, 1, 1]).print() // [[[3, 3, 3],\n * // [4, 4, 4]]]\n * t.stridedSlice([1, -1, 0], [2, -3, 3], [1, -1, 1]).print() // [[[4, 4, 4],\n * // [3, 3, 3]]]\n * ```\n *\n * @param x The tensor to stride slice.\n * @param begin The coordinates to start the slice from.\n * @param end: The coordinates to end the slice at.\n * @param strides: The size of the slice.\n * @param beginMask: If the ith bit of beginMask is set, begin[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param endMask: If the ith bit of endMask is set, end[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param shrinkAxisMask: a bitmask where bit i implies that\n * the ith specification should shrink the dimensionality. begin and end must\n * imply a slice of size 1 in the dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction stridedSlice_(x, begin, end, strides, beginMask = 0, endMask = 0, ellipsisMask = 0, newAxisMask = 0, shrinkAxisMask = 0) {\n let $x = convertToTensor(x, 'x', 'stridedSlice');\n const forward = (backend) => {\n if (strides == null) {\n strides = new Array(begin.length);\n }\n const ellipsisAxes = maskToAxes(ellipsisMask);\n if (ellipsisAxes.length > 1) {\n throw new Error('Multiple ellipses in slice is not allowed.');\n }\n if (ellipsisMask !== 0 && newAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and newAxisMask is not yet supported.');\n }\n if (ellipsisMask !== 0 && shrinkAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and shrinkAxisMask is not yet supported.');\n }\n const numInterpolatedAxes = $x.rank - begin.length;\n // Expand the dims of x based on the newAxisMask.\n const expandAxes = maskToAxes(newAxisMask);\n const newShape = $x.shape.slice();\n expandAxes.forEach(axis => {\n begin[axis] = 0;\n end[axis] = 1;\n newShape.splice(axis, 0, 1);\n });\n $x = reshape($x, newShape);\n const { begin: normalizedBegin, end: normalizedEnd, strides: normalizedStrides } = getNormalizedAxes($x.shape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask);\n begin = normalizedBegin;\n end = normalizedEnd;\n strides = normalizedStrides;\n const shrinkAxes = maskToAxes(shrinkAxisMask);\n // Adjust the ends based on the shrink mask.\n shrinkAxes.forEach(axis => {\n end[axis] = begin[axis] + 1;\n strides[axis] = 1;\n });\n // Figure out the output shape.\n const size = computeOutShape(begin, end, strides);\n // Remove the axes based on shrinkMask.\n const outShape = size.filter((_, axis) => shrinkAxes.indexOf(axis) === -1);\n const nonStrided = strides.every(v => v === 1);\n if (nonStrided) {\n return reshape(slice($x, begin, size), outShape);\n }\n const res = backend.stridedSlice($x, begin, end, strides);\n return reshape(res, outShape);\n };\n const inputs = { x: $x };\n const attrs = {\n begin,\n end,\n strides,\n beginMask,\n endMask,\n ellipsisMask,\n newAxisMask,\n shrinkAxisMask\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, StridedSlice, attrs);\n}\nexport const stridedSlice = op({ stridedSlice_ });\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes tan of the input `tf.Tensor` element-wise, `tan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.tan().print(); // or tf.tan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tan_(x) {\n const $x = convertToTensor(x, 'x', 'tan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.tan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Tan);\n}\nexport const tan = op({ tan_ });\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-2 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor2d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor2d([[1, 2], [3, 4]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor2d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 2) {\n throw new Error('tensor2d() requires shape to have two numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 2 && inferredShape.length !== 1) {\n throw new Error('tensor2d() requires values to be number[][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor2d() requires shape to be provided when `values` ' +\n 'are a flat/TypedArray');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-4 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor4d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor4d([[[[1], [2]], [[3], [4]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor4d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 4) {\n throw new Error('tensor4d() requires shape to have four numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 4 && inferredShape.length !== 1) {\n throw new Error('tensor4d() requires values to be number[][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor4d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-5 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor5d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor5d([[[[[1], [2]], [[3], [4]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor5d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 5) {\n throw new Error('tensor5d() requires shape to have five numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 5 && inferredShape.length !== 1) {\n throw new Error('tensor5d() requires values to be ' +\n 'number[][][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor5d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor5d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-6 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor6d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor6d([[[[[[1],[2]],[[3],[4]]],[[[5],[6]],[[7],[8]]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor6d([1, 2, 3, 4, 5, 6, 7, 8], [1, 1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor6d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 6) {\n throw new Error('tensor6d() requires shape to have six numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 6 && inferredShape.length !== 1) {\n throw new Error('tensor6d() requires values to be number[][][][][][] or ' +\n 'flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor6d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape ||\n inferredShape;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor6d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { TopK } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Finds the values and indices of the `k` largest entries along the last\n * dimension.\n *\n * If the input is a vector (rank=1), finds the k largest entries in the vector\n * and outputs their values and indices as vectors. Thus values[j] is the j-th\n * largest entry in input, and its index is indices[j].\n * For higher rank inputs, computes the top k entries along the last dimension.\n *\n * If two elements are equal, the lower-index element appears first.\n *\n * ```js\n * const a = tf.tensor2d([[1, 5], [4, 3]]);\n * const {values, indices} = tf.topk(a);\n * values.print();\n * indices.print();\n * ```\n * @param x 1-D or higher `tf.Tensor` with last dimension being at least `k`.\n * @param k Number of top elements to look for along the last dimension.\n * @param sorted If true, the resulting `k` elements will be sorted by the\n * values in descending order.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction topk_(x, k = 1, sorted = true) {\n const $x = convertToTensor(x, 'x', 'topk');\n if ($x.rank === 0) {\n throw new Error('topk() expects the input to be of rank 1 or higher');\n }\n const lastDim = $x.shape[$x.shape.length - 1];\n if (k > lastDim) {\n throw new Error(`'k' passed to topk() must be <= the last dimension (${lastDim}) ` +\n `but got ${k}`);\n }\n const inputs = { x: $x };\n const attrs = { k, sorted };\n const [values, indices] = ENGINE.runKernelFunc(b => b.topk($x, k, sorted), inputs, null /* grad */, TopK, attrs);\n return { values, indices };\n}\nexport const topk = op({ topk_ });\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a truncated normal\n * distribution.\n *\n * ```js\n * tf.truncatedNormal([2, 2]).print();\n * ```\n *\n * The generated values follow a normal distribution with specified mean and\n * standard deviation, except that values whose magnitude is more than 2\n * standard deviations from the mean are dropped and re-picked.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output tensor.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction truncatedNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type $ { dtype }`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, true /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const truncatedNormal = op({ truncatedNormal_ });\n//# sourceMappingURL=truncated_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unique } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert } from '../util';\nimport { op } from './operation';\n/**\n * Finds unique elements along an axis of a tensor.\n *\n * It returns a tensor `values` containing all of the unique elements along the\n * `axis` of the given tensor `x` in the same order that they occur along the\n * `axis` in `x`; `x` does not need to be sorted. It also returns a tensor\n * `indices` the same size as the number of the elements in `x` along the `axis`\n * dimension. It contains the index in the unique output `values`.\n *\n * ```js\n * // A 1-D tensor\n * const a = tf.tensor1d([1, 1, 2, 4, 4, 4, 7, 8, 8]);\n * const {values, indices} = tf.unique(a);\n * values.print(); // [1, 2, 4, 7, 8,]\n * indices.print(); // [0, 0, 1, 2, 2, 2, 3, 4, 4]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=0\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 0)\n * values.print(); // [[1, 0, 0],\n * // [2, 0, 0]]\n * indices.print(); // [0, 0, 1]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=1\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 1)\n * values.print(); // [[1, 0],\n * // [1, 0],\n * // [2, 0]]\n * indices.print(); // [0, 1, 1]\n * ```\n * @param x A tensor (int32, string, bool).\n * @param axis The axis of the tensor to find the unique elements.\n * @returns [uniqueElements, indices] (see above for details)\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction unique_(x, axis = 0) {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'unique', null);\n assert($x.rank > 0, () => 'The input tensor must be at least 1D');\n const inputs = { x: $x };\n const attrs = { axis };\n const [values, indices] = ENGINE.runKernel(Unique, inputs, attrs);\n return { values, indices };\n}\nexport const unique = op({ unique_ });\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, isInt } from '../util';\nimport { op } from './operation';\n/**\n * Computes the sum along segments of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const segmentIds = tf.tensor1d([1, 2, 0, 1], 'int32');\n * const numSegments = 3;\n *\n * x.unsortedSegmentSum(segmentIds, numSegments).print()\n * //or tf.unsortedSegmentSum(x, segmentIds, numSegments)\n * ```\n * @param x The `tf.Tensor` that will be summed along its segments.\n * @param segmentIds A `tf.Tensor1D` whose rank is equal to the rank of `x`'s\n * dimension along the `axis`. Maps each element of `x` to a segment.\n * @param numSegments The number of distinct `segmentIds`.\n *\n * @doc {heading: 'Operations', subheading: 'Segment'}\n */\nfunction unsortedSegmentSum_(x, segmentIds, numSegments) {\n const $x = convertToTensor(x, 'x', 'unsortedSegmentSum');\n const $segmentIds = convertToTensor(segmentIds, 'segmentIds', 'unsortedSegmentSum', 'int32');\n assert(isInt(numSegments), () => 'numSegments must be of dtype int');\n const inputs = { x: $x, segmentIds: $segmentIds };\n const attrs = { numSegments };\n const forward = (backend, save) => {\n const res = backend.unsortedSegmentSum($x, $segmentIds, numSegments);\n save([$segmentIds]);\n return res;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, UnsortedSegmentSum, attrs);\n}\nexport const unsortedSegmentSum = op({ unsortedSegmentSum_ });\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unpack } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Unstacks a `tf.Tensor` of rank-`R` into a list of rank-`(R-1)` `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * tf.unstack(a).forEach(tensor => tensor.print());\n * ```\n *\n * @param x A tensor object.\n * @param axis The axis to unstack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction unstack_(x, axis = 0) {\n const $x = convertToTensor(x, 'x', 'unstack');\n util.assert(axis >= -$x.shape.length && axis < $x.shape.length, () => `Axis = ${axis} is not in [-${$x.shape.length}, ${$x.shape.length})`);\n if (axis < 0) {\n axis += $x.shape.length;\n }\n const inputs = { value: $x };\n const attrs = { axis };\n const forward = (backend) => backend.unstack($x, axis);\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Unpack, attrs);\n}\nexport const unstack = op({ unstack_ });\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\n/**\n * Creates a new variable with the provided initial value.\n * ```js\n * const x = tf.variable(tf.tensor([1, 2, 3]));\n * x.assign(tf.tensor([4, 5, 6]));\n *\n * x.print();\n * ```\n *\n * @param initialValue Initial value for the tensor.\n * @param trainable If true, optimizers are allowed to update it.\n * @param name Name of the variable. Defaults to a unique id.\n * @param dtype If set, initialValue will be converted to the given type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function variable(initialValue, trainable = true, name, dtype) {\n return ENGINE.makeVariable(initialValue, trainable, name, dtype);\n}\n//# sourceMappingURL=variable.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the Where kernel shared between cpu and webgl */\nimport { buffer } from '../ops/buffer';\nexport function whereImpl(condShape, condVals) {\n const indices = [];\n for (let i = 0; i < condVals.length; i++) {\n if (condVals[i]) {\n indices.push(i);\n }\n }\n const inBuffer = buffer(condShape, 'int32');\n const out = buffer([indices.length, condShape.length], 'int32');\n for (let i = 0; i < indices.length; i++) {\n const loc = inBuffer.indexToLoc(indices[i]);\n const offset = i * condShape.length;\n out.values.set(loc, offset);\n }\n return out.toTensor();\n}\n//# sourceMappingURL=where_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { whereImpl } from '../backends/where_impl';\nimport { convertToTensor } from '../tensor_util_env';\n/**\n * Returns the coordinates of true elements of condition.\n *\n * The coordinates are returned in a 2-D tensor where the first dimension (rows)\n * represents the number of true elements, and the second dimension (columns)\n * represents the coordinates of the true elements. Keep in mind, the shape of\n * the output tensor can vary depending on how many true values there are in\n * input. Indices are output in row-major order. The resulting tensor has the\n * shape `[numTrueElems, condition.rank]`.\n *\n * This is analogous to calling the python `tf.where(cond)` without an x or y.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const result = await tf.whereAsync(cond);\n * result.print();\n * ```\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nasync function whereAsync_(condition) {\n const $condition = convertToTensor(condition, 'condition', 'whereAsync', 'bool');\n const vals = await $condition.data();\n const res = whereImpl($condition.shape, vals);\n if (condition !== $condition) {\n $condition.dispose();\n }\n return res;\n}\nexport const whereAsync = whereAsync_;\n//# sourceMappingURL=where_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { gather } from './gather';\nimport { reshape } from './reshape';\nimport { squeeze } from './squeeze';\nimport { whereAsync } from './where_async';\n/**\n * Apply boolean mask to tensor.\n *\n * ```js\n * const tensor = tf.tensor2d([1, 2, 3, 4, 5, 6], [3, 2]);\n * const mask = tf.tensor1d([1, 0, 1], 'bool');\n * const result = await tf.booleanMaskAsync(tensor, mask);\n * result.print();\n * ```\n *\n * @param tensor N-D tensor.\n * @param mask K-D boolean tensor, K <= N and K must be known statically.\n * @param axis A 0-D int Tensor representing the axis in tensor to mask from.\n * By default, axis is 0 which will mask from the first dimension.\n * Otherwise K + axis <= N.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nasync function booleanMaskAsync_(tensor, mask, axis) {\n const $tensor = convertToTensor(tensor, 'tensor', 'boolMask');\n const $mask = convertToTensor(mask, 'mask', 'boolMask', 'bool');\n const axisFrom = axis == null ? 0 : axis;\n const maskDim = $mask.rank;\n const tensorShape = $tensor.shape;\n util.assert(maskDim > 0, () => 'mask cannot be scalar');\n util.assertShapesMatch(tensorShape.slice(axisFrom, axisFrom + maskDim), $mask.shape, `mask's shape must match the first K dimensions of tensor's shape,`);\n let leadingSize = 1;\n for (let i = axisFrom; i < axisFrom + maskDim; i++) {\n leadingSize *= tensorShape[i];\n }\n const targetTensorShape = tensorShape.slice(0, axisFrom)\n .concat([leadingSize], tensorShape.slice(axisFrom + maskDim));\n const reshapedTensor = reshape($tensor, targetTensorShape);\n const reshapedMask = reshape($mask, [-1]);\n const positivePositions = await whereAsync(reshapedMask);\n const indices = squeeze(positivePositions, [1]);\n const res = gather(reshapedTensor, indices, axisFrom);\n // Ensure no memory leak.\n if (tensor !== $tensor) {\n $tensor.dispose();\n }\n if (mask !== $mask) {\n $mask.dispose();\n }\n indices.dispose();\n reshapedTensor.dispose();\n reshapedMask.dispose();\n positivePositions.dispose();\n return res;\n}\nexport const booleanMaskAsync = booleanMaskAsync_;\n//# sourceMappingURL=boolean_mask.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertShapesMatch } from '../util';\nimport { equal } from './equal';\nimport { greater } from './greater';\nimport { greaterEqual } from './greater_equal';\nimport { less } from './less';\nimport { lessEqual } from './less_equal';\nimport { notEqual } from './not_equal';\nimport { op } from './operation';\n/**\n * @deprecated\n * Strict version of `tf.notEqual` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction notEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'notEqualStrict');\n const $b = convertToTensor(b, 'b', 'notEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in notEqualStrict: ');\n return notEqual($a, $b);\n}\n/**\n * @deprecated\n * Strict version of `tf.less` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction lessStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessStrict');\n const $b = convertToTensor(b, 'b', 'lessStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessStrict: ');\n return less($a, $b);\n}\nfunction equalStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'equalStrict');\n const $b = convertToTensor(b, 'b', 'equalStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in equalStrict: ');\n return equal($a, $b);\n}\nfunction lessEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessEqualStrict');\n const $b = convertToTensor(b, 'b', 'lessEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessEqualStrict: ');\n return lessEqual($a, $b);\n}\nfunction greaterStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterStrict');\n const $b = convertToTensor(b, 'b', 'greaterStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterStrict: ');\n return greater($a, $b);\n}\nfunction greaterEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterEqualStrict');\n const $b = convertToTensor(b, 'b', 'greaterEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterEqualStrict: ');\n return greaterEqual($a, $b);\n}\nexport const equalStrict = op({ equalStrict_ });\nexport const greaterEqualStrict = op({ greaterEqualStrict_ });\nexport const greaterStrict = op({ greaterStrict_ });\nexport const lessEqualStrict = op({ lessEqualStrict_ });\nexport const lessStrict = op({ lessStrict_ });\nexport const notEqualStrict = op({ notEqualStrict_ });\n//# sourceMappingURL=compare.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { maximum } from './maximum';\nimport { minimum } from './minimum';\nimport { mod } from './mod';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { squaredDifference } from './squared_difference';\nimport { sub } from './sub';\n/**\n * @deprecated\n * Adds two `tf.Tensor`s element-wise, A + B.\n *\n * Inputs must be the same shape. For broadcasting support, use add() instead.\n *\n * @param a The first Tensor to add element-wise.\n * @param b The second Tensor to add element-wise.\n */\nfunction addStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'addStrict');\n const $b = convertToTensor(b, 'b', 'addStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in addStrict: ');\n return add($a, $b);\n}\n/**\n * @deprecated\n * Subtracts two `tf.Tensor`s element-wise, A - B. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.sub` instead.\n *\n * @param a The first Tensor to subtract element-wise.\n * @param b The second Tensor to subtract element-wise.\n */\nfunction subStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'subStrict');\n const $b = convertToTensor(b, 'b', 'subStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in subStrict: ');\n return sub($a, $b);\n}\n/**\n * @deprecated\n * Computes the power of one `tf.Tensor` to another. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.pow` instead.\n *\n * @param base The base tensor to pow element-wise.\n * @param exp The exponent tensor to pow element-wise.\n */\nfunction powStrict_(base, exp) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n util.assertShapesMatch(base.shape, exp.shape, 'Error in powStrict: ');\n return pow(base, exp);\n}\n/**\n * @deprecated\n * Multiplies two `tf.Tensor`s element-wise, A * B.\n *\n * Inputs must be the same shape. For broadcasting support, use `tf.mul`.\n *\n * @param a The first tensor to multiply.\n * @param b The first tensor to multiply. Must have the same\n * dtype as `a`.\n */\nfunction mulStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'mul');\n const $b = convertToTensor(b, 'b', 'mul');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in multiplyStrict: ');\n return mul($a, $b);\n}\n/**\n * @deprecated\n * Divides two `tf.Tensor`s element-wise, A / B. Inputs must\n * be the same shape.\n *\n * @param a The first tensor as the numerator for element-wise division.\n * @param b The second tensor as the denominator for element-wise division.\n */\nfunction divStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'div');\n const $b = convertToTensor(b, 'b', 'div');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in divideStrict: ');\n return div($a, $b);\n}\n/**\n * @deprecated\n * Returns the mod of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use mod().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction modStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'modStrict');\n const $b = convertToTensor(b, 'b', 'modStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in modStrict: ');\n return mod($a, $b);\n}\n/**\n * @deprecated\n * Returns the min of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use minimum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction minimumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'minimumStrict');\n const $b = convertToTensor(b, 'b', 'minimumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in minimumStrict: ');\n return minimum($a, $b);\n}\n/**\n * @deprecated\n * Returns the max of a and b (`a > b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use maximum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction maximumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'maximumStrict');\n const $b = convertToTensor(b, 'b', 'maximumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in maximumStrict: ');\n return maximum($a, $b);\n}\n/**\n * @deprecated\n * Returns (a - b) * (a - b) element-wise.\n *\n * Inputs must be the same shape. For broadcasting support, use\n * `tf.squaredDifference` instead.\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\nfunction squaredDifferenceStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'squaredDifferenceStrict');\n const $b = convertToTensor(b, 'b', 'squaredDifferenceStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in squaredDifferenceStrict: ');\n return squaredDifference($a, $b);\n}\nexport const addStrict = op({ addStrict_ });\nexport const divStrict = op({ divStrict_ });\nexport const maximumStrict = op({ maximumStrict_ });\nexport const minimumStrict = op({ minimumStrict_ });\nexport const modStrict = op({ modStrict_ });\nexport const mulStrict = op({ mulStrict_ });\nexport const powStrict = op({ powStrict_ });\nexport const squaredDifferenceStrict = op({ squaredDifferenceStrict_ });\nexport const subStrict = op({ subStrict_ });\n//# sourceMappingURL=binary_ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { abs } from './abs';\nimport * as axis_util from './axis_util';\nimport { max } from './max';\nimport { min } from './min';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sqrt } from './sqrt';\nimport { square } from './square';\nimport { sum } from './sum';\n/**\n * Computes the norm of scalar, vectors, and matrices.\n * This function can compute several different vector norms (the 1-norm, the\n * Euclidean or 2-norm, the inf-norm, and in general the p-norm for p > 0)\n * and matrix norms (Frobenius, 1-norm, and inf-norm).\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.norm().print(); // or tf.norm(x)\n * ```\n *\n * @param x The input array.\n * @param ord Optional. Order of the norm. Supported norm types are\n * following:\n *\n * | ord | norm for matrices | norm for vectors\n * |------------|---------------------------|---------------------\n * |'euclidean' |Frobenius norm |2-norm\n * |'fro' |Frobenius norm\t |\n * |Infinity |max(sum(abs(x), axis=1)) |max(abs(x))\n * |-Infinity |min(sum(abs(x), axis=1)) |min(abs(x))\n * |1 |max(sum(abs(x), axis=0)) |sum(abs(x))\n * |2 | |sum(abs(x)^2)^1/2*\n *\n * @param axis Optional. If axis is null (the default), the input is\n * considered a vector and a single vector norm is computed over the entire\n * set of values in the Tensor, i.e. norm(x, ord) is equivalent\n * to norm(x.reshape([-1]), ord). If axis is a integer, the input\n * is considered a batch of vectors, and axis determines the axis in x\n * over which to compute vector norms. If axis is a 2-tuple of integer it is\n * considered a batch of matrices and axis determines the axes in NDArray\n * over which to compute a matrix norm.\n * @param keepDims Optional. If true, the norm have the same dimensionality\n * as the input.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction norm_(x, ord = 'euclidean', axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'norm');\n const norm = normImpl(x, ord, axis);\n let keepDimsShape = norm.shape;\n if (keepDims) {\n const axes = parseAxisParam(axis, x.shape);\n keepDimsShape = axis_util.expandShapeToKeepDim(norm.shape, axes);\n }\n return reshape(norm, keepDimsShape);\n}\nfunction normImpl(x, p, axis = null) {\n if (x.rank === 0) {\n return abs(x);\n }\n // consider vector when no axis is specified\n if (x.rank !== 1 && axis === null) {\n return normImpl(reshape(x, [-1]), p, axis);\n }\n // vector\n if (x.rank === 1 || typeof axis === 'number' ||\n Array.isArray(axis) && axis.length === 1) {\n if (p === 1) {\n return sum(abs(x), axis);\n }\n if (p === Infinity) {\n return max(abs(x), axis);\n }\n if (p === -Infinity) {\n return min(abs(x), axis);\n }\n if (p === 'euclidean' || p === 2) {\n // norm(x, 2) = sum(abs(xi) ^ 2) ^ 1/2\n return sqrt(sum(pow(abs(x), scalar(2, 'int32')), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n // matrix (assumption axis[0] < axis[1])\n if (Array.isArray(axis) && axis.length === 2) {\n if (p === 1) {\n return max(sum(abs(x), axis[0]), axis[1] - 1);\n }\n if (p === Infinity) {\n return max(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === -Infinity) {\n return min(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === 'fro' || p === 'euclidean') {\n // norm(x) = sqrt(sum(pow(x, 2)))\n return sqrt(sum(square(x), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n throw new Error(`Error in norm: invalid axis: ${axis}`);\n}\nexport const norm = op({ norm_ });\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assertTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { scalar } from './scalar';\nimport { sub } from './sub';\n/**\n * Compute the moving average of a variable.\n *\n * Without zeroDebias, the moving average operation is defined by:\n * `v += delta`\n * where\n * `delta = (1 - decay) * (x - v)`\n *\n * With zeroDebias (default), the `delta` term is scaled to debias the\n * effect of the (assumed) zero-initialization of `v`.\n * `delta /= (1 - decay ^ step)`\n *\n * For more details on the zero-debiasing algorithm, see:\n * https://arxiv.org/abs/1412.6980\n *\n * Note that this function is completely stateless and does not keep track of\n * step count. The step count needs to be maintained by the caller and passed\n * in as `step`.\n *\n * @param v The current moving average value.\n * @param x New input value, must have the same shape and dtype as `v`.\n * @param decay The decay factor. Typical values are 0.95 and 0.99.\n * @param step Step count.\n * @param zeroDebias: Whether zeroDebias is to be performed (default: `true`).\n * @returns The new moving average value.\n *\n * @doc {heading: 'Operations', subheading: 'Moving Average'}\n */\nfunction movingAverage_(v, x, decay, step, zeroDebias = true) {\n const $v = convertToTensor(v, 'v', 'movingAverage');\n const $x = convertToTensor(x, 'x', 'movingAverage');\n const $decay = convertToTensor(decay, 'decay', 'movingAverage');\n assertTypesMatch($v, $x);\n util.assert(util.arraysEqual($v.shape, $x.shape), () => 'Shape mismatch in v and x');\n const one = scalar(1);\n const oneMinusDecay = sub(one, $decay);\n let update = mul(sub($x, $v), oneMinusDecay);\n if (zeroDebias) {\n util.assert(step != null, () => 'When using zeroDebias: true, step is required.');\n const $step = convertToTensor(step, 'step', 'movingAverage');\n update = div(update, sub(one, pow($decay, $step)));\n }\n return add($v, update);\n}\nexport const movingAverage = op({ movingAverage_ });\n//# sourceMappingURL=moving_average.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ScatterNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as scatter_nd_util from './scatter_nd_util';\n/**\n * Creates a new tensor by applying sparse updates to individual\n * values or slices within a zero tensor of the given shape tensor according to\n * indices. This operator is the inverse of the `tf.gatherND` operator which\n * extracts values or slices from a given tensor.\n *\n * ```js\n * const indices = tf.tensor2d([4, 3, 1, 7], [4, 1], 'int32');\n * const updates = tf.tensor1d([9, 10, 11, 12]);\n * const shape = [8];\n * tf.scatterND(indices, updates, shape).print() //[0, 11, 0, 10, 9, 0, 0, 12]\n * ```\n *\n * @param indices The tensor contains the indices into the output tensor.\n * @param updates The tensor contains the value for the indices.\n * @param shape: The shape of the output tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction scatterND_(indices, updates, shape) {\n const $indices = convertToTensor(indices, 'indices', 'scatterND', 'int32');\n const $updates = convertToTensor(updates, 'updates', 'scatterND');\n scatter_nd_util.validateInput($updates, $indices, shape);\n const forward = (backend) => {\n return backend.scatterND($indices, $updates, shape);\n };\n const inputs = { indices: $indices, updates: $updates };\n const attrs = { shape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ScatterNd, attrs);\n}\nexport const scatterND = op({ scatterND_ });\n//# sourceMappingURL=scatter_nd.js.map", "/**\n * Validate sparseToDense inputs.\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape number[]. Shape of the dense output tensor.\n * @param validateIndices boolean. indice validation is not supported, error\n * will be thrown if it is set.\n */\nexport function validateInput(sparseIndices, sparseValues, outputShape, defaultValues) {\n if (sparseIndices.dtype !== 'int32') {\n throw new Error('tf.sparseToDense() expects the indices to be int32 type,' +\n ` but the dtype was ${sparseIndices.dtype}.`);\n }\n if (sparseIndices.rank > 2) {\n throw new Error('sparseIndices should be a scalar, vector, or matrix,' +\n ` but got shape ${sparseIndices.shape}.`);\n }\n const numElems = sparseIndices.rank > 0 ? sparseIndices.shape[0] : 1;\n const numDims = sparseIndices.rank > 1 ? sparseIndices.shape[1] : 1;\n if (outputShape.length !== numDims) {\n throw new Error('outputShape has incorrect number of elements:,' +\n ` ${outputShape.length}, should be: ${numDims}.`);\n }\n const numValues = sparseValues.size;\n if (!(sparseValues.rank === 0 ||\n sparseValues.rank === 1 && numValues === numElems)) {\n throw new Error('sparseValues has incorrect shape ' +\n `${sparseValues.shape}, should be [] or [${numElems}]`);\n }\n if (sparseValues.dtype !== defaultValues.dtype) {\n throw new Error('sparseValues.dtype must match defaultValues.dtype');\n }\n}\n//# sourceMappingURL=sparse_to_dense_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SparseToDense } from '../kernel_names';\nimport * as sparse_to_dense from '../ops/sparse_to_dense_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Converts a sparse representation into a dense tensor.\n *\n * Builds an array dense with shape outputShape such that:\n *\n * // If sparseIndices is scalar\n * dense[i] = (i == sparseIndices ? sparseValues : defaultValue)\n *\n * // If sparseIndices is a vector, then for each i\n * dense[sparseIndices[i]] = sparseValues[i]\n *\n * // If sparseIndices is an n by d matrix, then for each i in [0, n)\n * dense[sparseIndices[i][0], ..., sparseIndices[i][d-1]] = sparseValues[i]\n * All other values in dense are set to defaultValue. If sparseValues is a\n * scalar, all sparse indices are set to this single value.\n *\n * If indices are repeated the final value is summed over all values for those\n * indices.\n *\n * ```js\n * const indices = tf.tensor1d([4, 5, 6, 1, 2, 3], 'int32');\n * const values = tf.tensor1d([10, 11, 12, 13, 14, 15], 'float32');\n * const shape = [8];\n * tf.sparseToDense(indices, values, shape).print();\n * ```\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape Shape of the dense output tensor. the type is inferred.\n * @param defaultValue Scalar. Value to set for indices not specified in\n * sparseIndices. Defaults to zero.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction sparseToDense_(sparseIndices, sparseValues, outputShape, defaultValue = 0) {\n const $sparseIndices = convertToTensor(sparseIndices, 'sparseIndices', 'sparseToDense', 'int32');\n const $sparseValues = convertToTensor(sparseValues, 'sparseValues', 'sparseToDense');\n const $defaultValue = convertToTensor(defaultValue, 'defaultValue', 'sparseToDense', $sparseValues.dtype);\n sparse_to_dense.validateInput($sparseIndices, $sparseValues, outputShape, $defaultValue);\n const inputs = {\n sparseIndices: $sparseIndices,\n sparseValues: $sparseValues,\n defaultValue: $defaultValue\n };\n const attrs = { outputShape };\n return ENGINE.runKernelFunc(backend => backend.sparseToDense($sparseIndices, $sparseValues, outputShape, $defaultValue), inputs, null /* grad */, SparseToDense, attrs);\n}\nexport const sparseToDense = op({ sparseToDense_ });\n//# sourceMappingURL=sparse_to_dense.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Gather slices from input tensor into a Tensor with shape specified by\n * `indices`.\n *\n * `indices` is an K-dimensional integer tensor, best thought of as a\n * (K-1)-dimensional tensor of indices into input, where each element defines a\n * slice of input:\n * output[\\\\(i_0, ..., i_{K-2}\\\\)] = input[indices[\\\\(i_0, ..., i_{K-2}\\\\)]]\n *\n * Whereas in `tf.gather`, `indices` defines slices into the first dimension of\n * input, in `tf.gatherND`, `indices` defines slices into the first N dimensions\n * of input, where N = indices.shape[-1].\n *\n * The last dimension of indices can be at most the rank of input:\n * indices.shape[-1] <= input.rank\n *\n * The last dimension of `indices` corresponds to elements\n * (if indices.shape[-1] == input.rank) or slices\n * (if indices.shape[-1] < input.rank) along dimension indices.shape[-1] of\n * input.\n * The output tensor has shape\n * indices.shape[:-1] + input.shape[indices.shape[-1]:]\n *\n * Note that on CPU, if an out of bound index is found, an error is returned. On\n * GPU, if an out of bound index is found, a 0 is stored in the corresponding\n * output value.\n *\n * ```js\n * const indices = tf.tensor2d([0, 1, 1, 0], [2,2], 'int32');\n * const input = tf.tensor2d([9, 10, 11, 12], [2, 2]);\n * tf.gatherND(input, indices).print() // [10, 11]\n * ```\n *\n * @param x The tensor from which to gather values.\n * @param indices Index tensor, must be of type int32.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction gatherND_(x, indices) {\n const $indices = convertToTensor(indices, 'indices', 'gatherND', 'int32');\n const $x = convertToTensor(x, 'x', 'gatherND');\n const forward = (backend) => {\n return backend.gatherND($x, $indices);\n };\n const inputs = { params: $x, indices: $indices };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, GatherNd);\n}\nexport const gatherND = op({ gatherND_ });\n//# sourceMappingURL=gather_nd.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Normalize noise shape based on provided tensor and noise shape.\n *\n * @param x Tensor.\n * @param noiseShape The shape for the randomly generated keep/drop flags, as\n * an array of numbers. Optional.\n * @returns Normalized noise shape.\n */\nexport function getNoiseShape(x, noiseShape) {\n if (noiseShape == null) {\n return x.shape.slice();\n }\n if (util.arraysEqual(x.shape, noiseShape)) {\n return noiseShape;\n }\n if (x.shape.length === noiseShape.length) {\n const newDimension = [];\n for (let i = 0; i < x.shape.length; i++) {\n if (noiseShape[i] == null && x.shape[i] != null) {\n newDimension.push(x.shape[i]);\n }\n else {\n newDimension.push(noiseShape[i]);\n }\n }\n return newDimension;\n }\n return noiseShape;\n}\n//# sourceMappingURL=dropout_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { getNoiseShape } from './dropout_util';\nimport { floor } from './floor';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { randomUniform } from './random_uniform';\n/**\n * Computes dropout.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 2, 1]);\n * const rate = 0.75;\n * const output = tf.dropout(x, rate);\n * output.print();\n * ```\n *\n * @param x A floating point Tensor or TensorLike.\n * @param rate A float in the range [0, 1). The probability that each element\n * of x is discarded.\n * @param noiseShape An array of numbers of type int32, representing the\n * shape for randomly generated keep/drop flags. If the noiseShape has null\n * value, it will be automatically replaced with the x's relative dimension\n * size. Optional.\n * @param seed Used to create random seeds. Optional.\n * @returns A Tensor of the same shape of x.\n *\n * @doc {heading: 'Operations', subheading: 'Dropout'}\n */\nfunction dropout_(x, rate, noiseShape, seed) {\n const $x = convertToTensor(x, 'x', 'dropout');\n util.assert($x.dtype === 'float32', () => `x has to be a floating point tensor since it's going to be ` +\n `scaled, but got a ${$x.dtype} tensor instead.`);\n util.assert(rate >= 0 && rate < 1, () => `rate must be a float in the range [0, 1), but got ${rate}.`);\n if (rate === 0) {\n return x instanceof Tensor ? $x.clone() : $x;\n }\n const $noiseShape = getNoiseShape($x, noiseShape);\n const keepProb = 1 - rate;\n const multiplier = div(floor(add(randomUniform($noiseShape, 0, 1, 'float32', seed), keepProb)), keepProb);\n return mul($x, multiplier);\n}\nexport const dropout = op({ dropout_ });\n//# sourceMappingURL=dropout.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensor1d } from './tensor1d';\nexport function enclosingPowerOfTwo(value) {\n // Return 2**N for integer N such that 2**N >= value.\n return Math.floor(Math.pow(2, Math.ceil(Math.log(value) / Math.log(2.0))));\n}\nexport function cosineWindow(windowLength, a, b) {\n const even = 1 - windowLength % 2;\n const newValues = new Float32Array(windowLength);\n for (let i = 0; i < windowLength; ++i) {\n const cosArg = (2.0 * Math.PI * i) / (windowLength + even - 1);\n newValues[i] = a - b * Math.cos(cosArg);\n }\n return tensor1d(newValues, 'float32');\n}\n//# sourceMappingURL=signal_ops_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch, getTypedArrayFromDType } from '../util';\nimport { tensor } from './tensor';\n/**\n * Returns whether the targets are in the top K predictions.\n *\n * ```js\n * const predictions = tf.tensor2d([[20, 10, 40, 30], [30, 50, -20, 10]]);\n * const targets = tf.tensor1d([2, 0]);\n * const precision = await tf.inTopKAsync(predictions, targets);\n * precision.print();\n * ```\n * @param predictions 2-D or higher `tf.Tensor` with last dimension being\n * at least `k`.\n * @param targets 1-D or higher `tf.Tensor`.\n * @param k Optional Number of top elements to look at for computing precision,\n * default to 1.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nasync function inTopKAsync_(predictions, targets, k = 1) {\n const $predictions = convertToTensor(predictions, 'predictions', 'inTopK');\n const $targets = convertToTensor(targets, 'targets', 'inTopK');\n assert($predictions.rank > 1, () => 'inTopK() expects the predictions to be of rank 2 or higher, ' +\n `but got ${$predictions.rank}`);\n assert($predictions.rank - 1 === $targets.rank, () => `predictions rank should be 1 larger than ` +\n `targets rank, but got predictions rank ` +\n `${$predictions.rank} and targets rank ${$targets.rank}`);\n assertShapesMatch($predictions.shape.slice(0, $predictions.shape.length - 1), $targets.shape, `predictions's shape should be align with the targets' shape, ` +\n 'except the last dimension.');\n const lastDim = $predictions.shape[$predictions.shape.length - 1];\n assert(k > 0 && k <= lastDim, () => `'k' passed to inTopK() must be > 0 && <= the predictions last ` +\n `dimension (${lastDim}), but got ${k}`);\n const predictionsVals = await $predictions.data();\n const targetsVals = await $targets.data();\n // Reshape predictionsVals into a 2d tensor [batch, lastDim]\n // and look up topK along lastDim.\n const [batch, size] = [predictionsVals.length / lastDim, lastDim];\n const precision = getTypedArrayFromDType('bool', batch);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = predictionsVals.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n precision[b] = 0;\n for (let i = 0; i < k; i++) {\n if (valAndInd[i].index === targetsVals[b]) {\n precision[b] = 1;\n break;\n }\n }\n }\n if (predictions !== $predictions) {\n $predictions.dispose();\n }\n if (targets !== $targets) {\n $targets.dispose();\n }\n // Output precision has the same shape as targets.\n return tensor(precision, $targets.shape, 'bool');\n}\nexport const inTopKAsync = inTopKAsync_;\n//# sourceMappingURL=in_top_k.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropFilter } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 2D convolution.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * [batch, height, width, inChannels]. If rank 3, batch of 1 is assumed.\n * @param dy The dy image, of rank 4 or rank 3, of shape\n * [batch, height, width, outDepth]. If rank 3, batch of 1 is assumed.\n * @param filterShape The shape of the filter, length 4,\n * [filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction conv2DBackpropFilter_(x, dy, filterShape, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2dDerFilter: input must be rank 4, but got shape ` +\n `${x4D.shape}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerFilter: dy must be rank 4, but got shape ` +\n `${dy4D.shape}.`);\n util.assert(filterShape.length === 4, () => `Error in conv2dDerFilter: filterShape must be length 4, but got ` +\n `${filterShape}.`);\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filterShape[2], () => `Error in conv2dDerFilter: depth of input ${inDepth}) must ` +\n `match input depth in filter (${filterShape[2]}.`);\n util.assert(outDepth === filterShape[3], () => `Error in conv2dDerFilter: depth of dy (${outDepth}) must ` +\n `match output depth for filter (${filterShape[3]}).`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerFilter: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, filterShape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n return backend.conv2dDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv2DBackpropFilter, attrs);\n}\nexport const conv2DBackpropFilter = op({ conv2DBackpropFilter_ });\n//# sourceMappingURL=conv2d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as broadcast_util from './broadcast_util';\nimport { elu } from './elu';\nimport { mul } from './mul';\nimport { prelu } from './prelu';\nimport { relu } from './relu';\nimport { relu6 } from './relu6';\nimport { reshape } from './reshape';\nimport { step } from './step';\nimport { sum } from './sum';\n// Returns gradient for fused activation.\nexport function getFusedDyActivation(dy, y, activation) {\n if (activation == null || activation === 'linear') {\n return dy;\n }\n if (activation === 'relu') {\n return mul(dy, step(y));\n }\n throw new Error(`Cannot compute gradient for fused activation ${activation}.`);\n}\n// Returns gradient for fused bias.\nexport function getFusedBiasGradient(bias, dyActivation) {\n let res = dyActivation;\n const reduceAxes = broadcast_util.getReductionAxes(bias.shape, dyActivation.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, bias.shape);\n}\nexport function applyActivation(x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return x;\n }\n else if (activation === 'relu') {\n return relu(x);\n }\n else if (activation === 'elu') {\n return elu(x);\n }\n else if (activation === 'relu6') {\n return relu6(x);\n }\n else if (activation === 'prelu') {\n return prelu(x, preluActivationWeights);\n }\n throw new Error(`Unknown fused activation ${activation}.`);\n}\n// Whether we should call fused ops.\nexport const shouldFuse = (gradientDepth, activation) => {\n const gradientMode = gradientDepth > 0;\n return !gradientMode || activation === 'linear';\n};\n//# sourceMappingURL=fused_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { conv2d as unfusedConv2d } from '../conv2d';\nimport { conv2DBackpropFilter } from '../conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../conv2d_backprop_input';\nimport * as conv_util from '../conv_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes a 2D convolution over the input x, optionally fused with adding a\n * bias and applying an activation.\n *\n * ```js\n * const inputDepth = 2;\n * const inShape = [2, 2, 2, inputDepth];\n * const outputDepth = 2;\n * const fSize = 1;\n * const pad = 0;\n * const strides = 1;\n *\n * const x = tf.tensor4d( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,\n * 16], inShape);\n * const w = tf.tensor4d([-1, 1, -2, 0.5], [fSize, fSize, inputDepth,\n * outputDepth]);\n *\n * tf.fused.conv2d({ x, filter: w, strides, pad, dataFormat: 'NHWC',\n * dilations: [1, 1], bias: tf.scalar(5), activation: 'relu' }).print();\n * ```\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid` output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`) to be\n * applied\n * after biasAdd.\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n activation = activation || 'linear';\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused conv2d: input must be rank 4, but got rank ` +\n `${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in conv2d: depth of input (${x4D.shape[3]}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NHWC', () => `Error in conv2d: got dataFormat of ${dataFormat} but only NHWC is currently supported.`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused conv2d');\n }\n const grad = (dy, saved) => {\n const [$filter, x4D, y, $bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused conv2D: ' +\n `dilation rates greater than 1 ` +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n const xDer = conv2DBackpropInput(x4D.shape, dyActivation, $filter, strides, pad);\n const filterDer = conv2DBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad);\n const der = [xDer, filterDer];\n if ($bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n der.push(biasDer);\n }\n return der;\n };\n const forward = (backend) => {\n const res = backend.fusedConv2d({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const conv2d = op({ fusedConv2d_ });\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropFilter } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropFilter_(x, dy, filterShape, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dimRoundingMode, dilations, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropFilter, attrs);\n}\nexport const depthwiseConv2dNativeBackpropFilter = op({ depthwiseConv2dNativeBackpropFilter_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropInput } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropInput_(xShape, dy, filter, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(xShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerInput(dy4D, filter, convInfo);\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dimRoundingMode, dilations, inputShape: xShape };\n const res = ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2dNativeBackpropInput = op({ depthwiseConv2dNativeBackpropInput_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_input.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedDepthwiseConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport * as conv_util from '../conv_util';\nimport { depthwiseConv2d as unfusedDepthwiseConv2d } from '../depthwise_conv2d';\nimport { depthwiseConv2dNativeBackpropFilter } from '../depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../depthwise_conv2d_native_backprop_input';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes depthwise 2D convolution, optionally fused with adding a\n * bias and applying an activation.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`).\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedDepthwiseConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedDepthwiseConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused depthwiseConv2d: filter must be rank 4, ` +\n `but got rank ${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in fused depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in fused depthwiseConv2d: Either strides or dilations must ' +\n `be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused depthwiseConv2d: pad must be an integer when ` +\n `using dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused depthwiseConv2d');\n }\n const grad = (dy, saved) => {\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused depthwiseConv2d: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${dilations}'`);\n const [$filter, x4D, y, bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n const xDer = depthwiseConv2dNativeBackpropInput(x4D.shape, dyActivation, $filter, strides, pad, dilations, dimRoundingMode);\n const filterDer = depthwiseConv2dNativeBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad, dilations, dimRoundingMode);\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [xDer, filterDer, biasDer];\n }\n return [xDer, filterDer];\n };\n const forward = (backend) => {\n const res = backend.fusedDepthwiseConv2D({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const depthwiseConv2d = op({ fusedDepthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { _FusedMatMul } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { matMul as unfusedMatMul } from '../mat_mul';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes the dot product of two matrices with optional activation and bias.\n *\n * ```js\n * const a = tf.tensor2d([-1, -2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const bias = tf.tensor2d([1, 2], [1, 2]);\n *\n * tf.fused.matMul({a, b, bias, activation: 'relu'}).print();\n * ```\n *\n * @param obj An object with the following properties:\n * - `a` First matrix in dot product operation.\n * - `b` Second matrix in dot product operation.\n * - `transposeA` If true, `a` is transposed before multiplication.\n * - `transposeB` If true, `b` is transposed before multiplication.\n * - `bias` Matrix to be added to the result.\n * - `activation` Name of activation kernel (defaults to `linear`).\n * - `preluActivationWeights` Tensor of prelu weights.\n */\nfunction fusedMatMul_({ a, b, transposeA = false, transposeB = false, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedMatMul(a, b, transposeA, transposeB);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n let $a = convertToTensor(a, 'a', 'fused matMul');\n let $b = convertToTensor(b, 'b', 'fused matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n util.assert($a.rank >= 2 && $b.rank >= 2 && $a.rank === $b.rank, () => `Error in fused matMul: inputs must have the same rank of at least ` +\n `2, got ranks ${$a.rank} and ${$b.rank}.`);\n util.assert(util.arraysEqual(outerDimsA, outerDimsB), () => `Error in fused matMul: outer dimensions (${outerDimsA}) and (` +\n `${outerDimsB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} must match.`);\n util.assert(innerShapeA === innerShapeB, () => `Error in fused matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShape = $a.shape.slice(0, -2).concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused matMul');\n [$bias] = makeTypesMatch($bias, $a);\n broadcast_util.assertAndGetBroadcastShape(outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused matMul');\n }\n const grad = (dy, saved) => {\n const [a3D, b3D, y, $bias] = saved;\n // we reshape dy because the result of the forward is not\n // necessarily going to be a 3d tensor due to a reshape done at the end of\n // the customOp.\n const dyActivation = getFusedDyActivation(reshape(dy, y.shape), y, activation);\n let aDer;\n let bDer;\n if (!transposeA && !transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, true, false);\n }\n else if (!transposeA && transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, false);\n bDer = unfusedMatMul(dyActivation, a3D, true, false);\n }\n else if (transposeA && !transposeB) {\n aDer = unfusedMatMul(b3D, dyActivation, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, false, false);\n }\n else {\n aDer = unfusedMatMul(b3D, dyActivation, true, true);\n bDer = unfusedMatMul(dyActivation, a3D, true, true);\n }\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [aDer, bDer, biasDer];\n }\n else {\n return [aDer, bDer];\n }\n };\n const forward = (backend) => {\n const y = backend.fusedBatchMatMul({\n a: a3D,\n b: b3D,\n transposeA,\n transposeB,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return y;\n };\n const inputs = {\n a: a3D,\n b: b3D,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { transposeA, transposeB, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((a3D, b3D, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOp(a3D, b3D);\n }\n else {\n const customOpWithBias = customGrad((a3D, b3D, $bias, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res, $bias]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOpWithBias(a3D, b3D, $bias);\n }\n}\nexport const matMul = op({ fusedMatMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from './fused/conv2d';\nimport { depthwiseConv2d } from './fused/depthwise_conv2d';\nimport { matMul } from './fused/mat_mul';\nexport { conv2d, depthwiseConv2d, matMul };\n//# sourceMappingURL=fused_ops.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a hamming window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hammingWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hammingWindow_(windowLength) {\n return cosineWindow(windowLength, 0.54, 0.46);\n}\nexport const hammingWindow = op({ hammingWindow_ });\n//# sourceMappingURL=hamming_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a Hann window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hannWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hannWindow_(windowLength) {\n return cosineWindow(windowLength, 0.5, 0.5);\n}\nexport const hannWindow = op({ hannWindow_ });\n//# sourceMappingURL=hann_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { fill } from '../fill';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { tensor2d } from '../tensor2d';\n/**\n * Expands input into frames of frameLength.\n * Slides a window size with frameStep.\n *\n * ```js\n * tf.signal.frame([1, 2, 3], 2, 1).print();\n * ```\n * @param signal The input tensor to be expanded\n * @param frameLength Length of each frame\n * @param frameStep The frame hop size in samples.\n * @param padEnd Whether to pad the end of signal with padValue.\n * @param padValue An number to use where the input signal does\n * not exist when padEnd is True.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction frame_(signal, frameLength, frameStep, padEnd = false, padValue = 0) {\n let start = 0;\n const output = [];\n while (start + frameLength <= signal.size) {\n output.push(slice(signal, start, frameLength));\n start += frameStep;\n }\n if (padEnd) {\n while (start < signal.size) {\n const padLen = (start + frameLength) - signal.size;\n const pad = concat([\n slice(signal, start, frameLength - padLen), fill([padLen], padValue)\n ]);\n output.push(pad);\n start += frameStep;\n }\n }\n if (output.length === 0) {\n return tensor2d([], [0, frameLength]);\n }\n return reshape(concat(output), [output.length, frameLength]);\n}\nexport const frame = op({ frame_ });\n//# sourceMappingURL=frame.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { enclosingPowerOfTwo } from '../signal_ops_util';\nimport { slice } from '../slice';\nimport { rfft } from '../spectral/rfft';\nimport { frame } from './frame';\nimport { hannWindow } from './hann_window';\n/**\n * Computes the Short-time Fourier Transform of signals\n * See: https://en.wikipedia.org/wiki/Short-time_Fourier_transform\n *\n * ```js\n * const input = tf.tensor1d([1, 1, 1, 1, 1])\n * tf.signal.stft(input, 3, 1).print();\n * ```\n * @param signal 1-dimensional real value tensor.\n * @param frameLength The window length of samples.\n * @param frameStep The number of samples to step.\n * @param fftLength The size of the FFT to apply.\n * @param windowFn A callable that takes a window length and returns 1-d tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction stft_(signal, frameLength, frameStep, fftLength, windowFn = hannWindow) {\n if (fftLength == null) {\n fftLength = enclosingPowerOfTwo(frameLength);\n }\n const framedSignal = frame(signal, frameLength, frameStep);\n const windowedSignal = mul(framedSignal, windowFn(frameLength));\n const output = [];\n for (let i = 0; i < framedSignal.shape[0]; i++) {\n output.push(rfft(slice(windowedSignal, [i, 0], [1, frameLength]), fftLength));\n }\n return concat(output);\n}\nexport const stft = op({ stft_ });\n//# sourceMappingURL=stft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { CropAndResize } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Extracts crops from the input image tensor and resizes them using bilinear\n * sampling or nearest neighbor sampling (possibly with aspect ratio change)\n * to a common output size specified by cropSize.\n *\n * @param image 4d tensor of shape `[batch,imageHeight,imageWidth, depth]`,\n * where imageHeight and imageWidth must be positive, specifying the\n * batch of images from which to take crops\n * @param boxes 2d float32 tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the normalized\n * coordinates of the box in the boxInd[i]'th image in the batch\n * @param boxInd 1d int32 tensor of shape `[numBoxes]` with values in range\n * `[0, batch)` that specifies the image that the `i`-th box refers to.\n * @param cropSize 1d int32 tensor of 2 elements `[cropHeigh, cropWidth]`\n * specifying the size to which all crops are resized to.\n * @param method Optional string from `'bilinear' | 'nearest'`,\n * defaults to bilinear, which specifies the sampling method for resizing\n * @param extrapolationValue A threshold for deciding when to remove boxes based\n * on score. Defaults to 0.\n * @return A 4D tensor of the shape `[numBoxes,cropHeight,cropWidth,depth]`\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction cropAndResize_(image, boxes, boxInd, cropSize, method, extrapolationValue) {\n const $image = convertToTensor(image, 'image', 'cropAndResize');\n const $boxes = convertToTensor(boxes, 'boxes', 'cropAndResize', 'float32');\n const $boxInd = convertToTensor(boxInd, 'boxInd', 'cropAndResize', 'int32');\n method = method || 'bilinear';\n extrapolationValue = extrapolationValue || 0;\n const numBoxes = $boxes.shape[0];\n util.assert($image.rank === 4, () => 'Error in cropAndResize: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n util.assert($boxes.rank === 2 && $boxes.shape[1] === 4, () => `Error in cropAndResize: boxes must be have size [${numBoxes},4] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert($boxInd.rank === 1 && $boxInd.shape[0] === numBoxes, () => `Error in cropAndResize: boxInd must be have size [${numBoxes}] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert(cropSize.length === 2, () => `Error in cropAndResize: cropSize must be of length 2, but got ` +\n `length ${cropSize.length}.`);\n util.assert(cropSize[0] >= 1 && cropSize[1] >= 1, () => `cropSize must be atleast [1,1], but was ${cropSize}`);\n util.assert(method === 'bilinear' || method === 'nearest', () => `method must be bilinear or nearest, but was ${method}`);\n const forward = (backend) => backend.cropAndResize($image, $boxes, $boxInd, cropSize, method, extrapolationValue);\n const inputs = { image: $image, boxes: $boxes, boxInd: $boxInd };\n const attrs = { method, extrapolationValue, cropSize };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, CropAndResize, attrs);\n return res;\n}\nexport const cropAndResize = op({ cropAndResize_ });\n//# sourceMappingURL=crop_and_resize.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FlipLeftRight } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Flips the image left to right. Currently available in the CPU, WebGL, and\n * WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction flipLeftRight_(image) {\n const $image = convertToTensor(image, 'image', 'flipLeftRight', 'float32');\n util.assert($image.rank === 4, () => 'Error in flipLeftRight: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const res = ENGINE.runKernel(FlipLeftRight, inputs, {});\n return res;\n}\nexport const flipLeftRight = op({ flipLeftRight_ });\n//# sourceMappingURL=flip_left_right.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { RotateWithOffset } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Rotates the input image tensor counter-clockwise with an optional offset\n * center of rotation. Currently available in the CPU, WebGL, and WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n * @param radians The amount of rotation.\n * @param fillValue The value to fill in the empty space leftover\n * after rotation. Can be either a single grayscale value (0-255), or an\n * array of three numbers `[red, green, blue]` specifying the red, green,\n * and blue channels. Defaults to `0` (black).\n * @param center The center of rotation. Can be either a single value (0-1), or\n * an array of two numbers `[centerX, centerY]`. Defaults to `0.5` (rotates\n * the image around its center).\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction rotateWithOffset_(image, radians, fillValue = 0, center = 0.5) {\n const $image = convertToTensor(image, 'image', 'rotateWithOffset', 'float32');\n util.assert($image.rank === 4, () => 'Error in rotateWithOffset: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const attrs = { radians, fillValue, center };\n const res = ENGINE.runKernel(RotateWithOffset, inputs, attrs);\n return res;\n}\nexport const rotateWithOffset = op({ rotateWithOffset_ });\n//# sourceMappingURL=rotate_with_offset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nfunction nonMaxSuppSanityCheck(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n if (iouThreshold == null) {\n iouThreshold = 0.5;\n }\n if (scoreThreshold == null) {\n scoreThreshold = Number.NEGATIVE_INFINITY;\n }\n if (softNmsSigma == null) {\n softNmsSigma = 0.0;\n }\n const numBoxes = boxes.shape[0];\n maxOutputSize = Math.min(maxOutputSize, numBoxes);\n util.assert(0 <= iouThreshold && iouThreshold <= 1, () => `iouThreshold must be in [0, 1], but was '${iouThreshold}'`);\n util.assert(boxes.rank === 2, () => `boxes must be a 2D tensor, but was of rank '${boxes.rank}'`);\n util.assert(boxes.shape[1] === 4, () => `boxes must have 4 columns, but 2nd dimension was ${boxes.shape[1]}`);\n util.assert(scores.rank === 1, () => 'scores must be a 1D tensor');\n util.assert(scores.shape[0] === numBoxes, () => `scores has incompatible shape with boxes. Expected ${numBoxes}, ` +\n `but was ${scores.shape[0]}`);\n util.assert(0 <= softNmsSigma && softNmsSigma <= 1, () => `softNmsSigma must be in [0, 1], but was '${softNmsSigma}'`);\n return { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n}\nexport { nonMaxSuppSanityCheck };\n//# sourceMappingURL=nonmax_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV3 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\nfunction nonMaxSuppression_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold };\n return ENGINE.runKernelFunc(b => b.nonMaxSuppression($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold), { boxes: $boxes, scores: $scores }, null /* grad */, NonMaxSuppressionV3, attrs);\n}\nexport const nonMaxSuppression = op({ nonMaxSuppression_ });\n//# sourceMappingURL=non_max_suppression.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inserts a value into a sorted array. This method allows duplicate, meaning it\n * allows inserting duplicate value, in which case, the element will be inserted\n * at the lowest index of the value.\n * @param arr The array to modify.\n * @param element The element to insert.\n * @param comparator Optional. If no comparator is specified, elements are\n * compared using array_util.defaultComparator, which is suitable for Strings\n * and Numbers in ascending arrays. If the array contains multiple instances of\n * the target value, the left-most instance will be returned. To provide a\n * comparator, it should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n */\nexport function binaryInsert(arr, element, comparator) {\n const index = binarySearch(arr, element, comparator);\n const insertionPoint = index < 0 ? -(index + 1) : index;\n arr.splice(insertionPoint, 0, element);\n}\n/**\n * Searches the array for the target using binary search, returns the index\n * of the found element, or position to insert if element not found. If no\n * comparator is specified, elements are compared using array_\n * util.defaultComparator, which is suitable for Strings and Numbers in\n * ascending arrays. If the array contains multiple instances of the target\n * value, the left-most instance will be returned.\n * @param arr The array to be searched in.\n * @param target The target to be searched for.\n * @param comparator Should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n * @return Lowest index of the target value if found, otherwise the insertion\n * point where the target should be inserted, in the form of\n * (-insertionPoint - 1).\n */\nexport function binarySearch(arr, target, comparator) {\n return binarySearch_(arr, target, comparator || defaultComparator);\n}\n/**\n * Compares its two arguments for order.\n * @param a The first element to be compared.\n * @param b The second element to be compared.\n * @return A negative number, zero, or a positive number as the first\n * argument is less than, equal to, or greater than the second.\n */\nfunction defaultComparator(a, b) {\n return a > b ? 1 : a < b ? -1 : 0;\n}\nfunction binarySearch_(arr, target, comparator) {\n let left = 0;\n let right = arr.length;\n let middle = 0;\n let found = false;\n while (left < right) {\n middle = left + ((right - left) >>> 1);\n const compareResult = comparator(target, arr[middle]);\n if (compareResult > 0) {\n left = middle + 1;\n }\n else {\n right = middle;\n // If compareResult is 0, the value is found. We record it is found,\n // and then keep looking because there may be duplicate.\n found = !compareResult;\n }\n }\n return found ? left : -left - 1;\n}\n//# sourceMappingURL=array_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Implementation of the NonMaxSuppression kernel shared between webgl and cpu.\n */\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { binaryInsert } from './array_util';\nexport function nonMaxSuppressionV3Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */)\n .selectedIndices;\n}\nexport function nonMaxSuppressionV4Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */, false /* returnScoresTensor */, padToMaxOutputSize /* padToMaxOutputSize */, true\n /* returnValidOutputs */ );\n}\nexport function nonMaxSuppressionV5Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, true /* returnScoresTensor */);\n}\nfunction nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, returnScoresTensor = false, padToMaxOutputSize = false, returnValidOutputs = false) {\n // The list is sorted in ascending order, so that we can always pop the\n // candidate with the largest score in O(1) time.\n const candidates = [];\n for (let i = 0; i < scores.length; i++) {\n if (scores[i] > scoreThreshold) {\n candidates.push({ score: scores[i], boxIndex: i, suppressBeginIndex: 0 });\n }\n }\n candidates.sort(ascendingComparator);\n // If softNmsSigma is 0, the outcome of this algorithm is exactly same as\n // before.\n const scale = softNmsSigma > 0 ? (-0.5 / softNmsSigma) : 0.0;\n const selectedIndices = [];\n const selectedScores = [];\n while (selectedIndices.length < maxOutputSize && candidates.length > 0) {\n const candidate = candidates.pop();\n const { score: originalScore, boxIndex, suppressBeginIndex } = candidate;\n if (originalScore < scoreThreshold) {\n break;\n }\n // Overlapping boxes are likely to have similar scores, therefore we\n // iterate through the previously selected boxes backwards in order to\n // see if candidate's score should be suppressed. We use\n // suppressBeginIndex to track and ensure a candidate can be suppressed\n // by a selected box no more than once. Also, if the overlap exceeds\n // iouThreshold, we simply ignore the candidate.\n let ignoreCandidate = false;\n for (let j = selectedIndices.length - 1; j >= suppressBeginIndex; --j) {\n const iou = intersectionOverUnion(boxes, boxIndex, selectedIndices[j]);\n if (iou >= iouThreshold) {\n ignoreCandidate = true;\n break;\n }\n candidate.score =\n candidate.score * suppressWeight(iouThreshold, scale, iou);\n if (candidate.score <= scoreThreshold) {\n break;\n }\n }\n // At this point, if `candidate.score` has not dropped below\n // `scoreThreshold`, then we know that we went through all of the\n // previous selections and can safely update `suppressBeginIndex` to the\n // end of the selected array. Then we can re-insert the candidate with\n // the updated score and suppressBeginIndex back in the candidate list.\n // If on the other hand, `candidate.score` has dropped below the score\n // threshold, we will not add it back to the candidates list.\n candidate.suppressBeginIndex = selectedIndices.length;\n if (!ignoreCandidate) {\n // Candidate has passed all the tests, and is not suppressed, so\n // select the candidate.\n if (candidate.score === originalScore) {\n selectedIndices.push(boxIndex);\n selectedScores.push(candidate.score);\n }\n else if (candidate.score > scoreThreshold) {\n // Candidate's score is suppressed but is still high enough to be\n // considered, so add back to the candidates list.\n binaryInsert(candidates, candidate, ascendingComparator);\n }\n }\n }\n // NonMaxSuppressionV4 feature: padding output to maxOutputSize.\n const validOutputs = selectedIndices.length;\n const elemsToPad = maxOutputSize - validOutputs;\n if (padToMaxOutputSize && elemsToPad > 0) {\n selectedIndices.push(...new Array(elemsToPad).fill(0));\n selectedScores.push(...new Array(elemsToPad).fill(0.0));\n }\n const result = { selectedIndices: tensor1d(selectedIndices, 'int32') };\n if (returnScoresTensor) {\n result['selectedScores'] = tensor1d(selectedScores, 'float32');\n }\n if (returnValidOutputs) {\n result['validOutputs'] = scalar(validOutputs, 'int32');\n }\n return result;\n}\nfunction intersectionOverUnion(boxes, i, j) {\n const iCoord = boxes.subarray(i * 4, i * 4 + 4);\n const jCoord = boxes.subarray(j * 4, j * 4 + 4);\n const yminI = Math.min(iCoord[0], iCoord[2]);\n const xminI = Math.min(iCoord[1], iCoord[3]);\n const ymaxI = Math.max(iCoord[0], iCoord[2]);\n const xmaxI = Math.max(iCoord[1], iCoord[3]);\n const yminJ = Math.min(jCoord[0], jCoord[2]);\n const xminJ = Math.min(jCoord[1], jCoord[3]);\n const ymaxJ = Math.max(jCoord[0], jCoord[2]);\n const xmaxJ = Math.max(jCoord[1], jCoord[3]);\n const areaI = (ymaxI - yminI) * (xmaxI - xminI);\n const areaJ = (ymaxJ - yminJ) * (xmaxJ - xminJ);\n if (areaI <= 0 || areaJ <= 0) {\n return 0.0;\n }\n const intersectionYmin = Math.max(yminI, yminJ);\n const intersectionXmin = Math.max(xminI, xminJ);\n const intersectionYmax = Math.min(ymaxI, ymaxJ);\n const intersectionXmax = Math.min(xmaxI, xmaxJ);\n const intersectionArea = Math.max(intersectionYmax - intersectionYmin, 0.0) *\n Math.max(intersectionXmax - intersectionXmin, 0.0);\n return intersectionArea / (areaI + areaJ - intersectionArea);\n}\n// A Gaussian penalty function, this method always returns values in [0, 1].\n// The weight is a function of similarity, the more overlap two boxes are, the\n// smaller the weight is, meaning highly overlapping boxe will be significantly\n// penalized. On the other hand, a non-overlapping box will not be penalized.\nfunction suppressWeight(iouThreshold, scale, iou) {\n const weight = Math.exp(scale * iou * iou);\n return iou <= iouThreshold ? weight : 0.0;\n}\nfunction ascendingComparator(c1, c2) {\n // For objects with same scores, we make the object with the larger index go\n // first. In an array that pops from the end, this means that the object with\n // the smaller index will be popped first. This ensures the same output as\n // the TensorFlow python version.\n return (c1.score - c2.score) ||\n ((c1.score === c2.score) && (c2.boxIndex - c1.boxIndex));\n}\n//# sourceMappingURL=non_max_suppression_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV3Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This is the async version of `nonMaxSuppression`\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @return A 1D tensor with the selected box indices.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionAsync = nonMaxSuppressionAsync_;\n//# sourceMappingURL=non_max_suppression_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV5 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionWithScore_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n const result = ENGINE.runKernel(NonMaxSuppressionV5, inputs, attrs);\n return { selectedIndices: result[0], selectedScores: result[1] };\n}\nexport const nonMaxSuppressionWithScore = op({ nonMaxSuppressionWithScore_ });\n//# sourceMappingURL=non_max_suppression_with_score.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV5Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionWithScoreAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionWithScoreAsync = nonMaxSuppressionWithScoreAsync_;\n//# sourceMappingURL=non_max_suppression_with_score_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV4 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionPadded_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = {\n maxOutputSize: $maxOutputSize,\n iouThreshold: $iouThreshold,\n scoreThreshold: $scoreThreshold,\n padToMaxOutputSize\n };\n const result = ENGINE.runKernel(NonMaxSuppressionV4, inputs, attrs);\n return { selectedIndices: result[0], validOutputs: result[1] };\n}\nexport const nonMaxSuppressionPadded = op({ nonMaxSuppressionPadded_ });\n//# sourceMappingURL=non_max_suppression_padded.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV4Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionPaddedAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const [boxesVals, scoresVals] = await Promise.all([$boxes.data(), $scores.data()]);\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV4Impl(boxesVals, scoresVals, $maxOutputSize, $iouThreshold, $scoreThreshold, padToMaxOutputSize);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionPaddedAsync = nonMaxSuppressionPaddedAsync_;\n//# sourceMappingURL=non_max_suppression_padded_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeBilinear } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Bilinear resize a single 3D image or a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeBilinear_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeBilinear');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeBilinear: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeBilinear: new shape must 2D, but got shape ` +\n `${size}.`);\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeBilinear(batchImages, newHeight, newWidth, alignCorners);\n };\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeBilinear, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeBilinear = op({ resizeBilinear_ });\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeNearestNeighbor } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * NearestNeighbor resize a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeNearestNeighbor_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeNearestNeighbor');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeNearestNeighbor: new shape must 2D, but got shape ` +\n `${size}.`);\n util.assert($images.dtype === 'float32' || $images.dtype === 'int32', () => '`images` must have `int32` or `float32` as dtype');\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeNearestNeighbor(batchImages, newHeight, newWidth, alignCorners);\n };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeNearestNeighbor, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeNearestNeighbor = op({ resizeNearestNeighbor_ });\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assert } from '../../util';\nimport { greaterEqual } from '../greater_equal';\nimport { lessEqual } from '../less_equal';\nimport { logicalAnd } from '../logical_and';\nimport { op } from '../operation';\nimport { range } from '../range';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\nimport { zeros } from '../zeros';\n/**\n * Copy a tensor setting everything outside a central band in each innermost\n * matrix to zero.\n *\n * The band part is computed as follows: Assume input has `k` dimensions\n * `[I, J, K, ..., M, N]`, then the output is a tensor with the same shape where\n * `band[i, j, k, ..., m, n] = in_band(m, n) * input[i, j, k, ..., m, n]`.\n * The indicator function\n * `in_band(m, n) = (num_lower < 0 || (m-n) <= num_lower))`\n * `&& (num_upper < 0 || (n-m) <= num_upper)`\n *\n * ```js\n * const x = tf.tensor2d([[ 0, 1, 2, 3],\n * [-1, 0, 1, 2],\n * [-2, -1, 0, 1],\n * [-3, -2, -1, 0]]);\n * let y = tf.linalg.bandPart(x, 1, -1);\n * y.print(); // [[ 0, 1, 2, 3],\n * // [-1, 0, 1, 2],\n * // [ 0, -1, 0, 1],\n * // [ 0, 0 , -1, 0]]\n * let z = tf.linalg.bandPart(x, 2, 1);\n * z.print(); // [[ 0, 1, 0, 0],\n * // [-1, 0, 1, 0],\n * // [-2, -1, 0, 1],\n * // [ 0, -2, -1, 0]]\n * ```\n *\n * @param x Rank `k` tensor\n * @param numLower Number of subdiagonals to keep.\n * If negative, keep entire lower triangle.\n * @param numUpper Number of subdiagonals to keep.\n * If negative, keep entire upper triangle.\n * @returns Rank `k` tensor of the same shape as input.\n * The extracted banded tensor.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction bandPart_(a, numLower, numUpper) {\n assert(numLower % 1 === 0, () => `bandPart(): numLower must be an integer, got ${numLower}.`);\n assert(numUpper % 1 === 0, () => `bandPart(): numUpper must be an integer, got ${numUpper}.`);\n const $a = convertToTensor(a, 'a', 'bandPart');\n assert($a.rank >= 2, () => `bandPart(): Rank must be at least 2, got ${$a.rank}.`);\n const shape = $a.shape;\n const [M, N] = $a.shape.slice(-2);\n if (!(numLower <= M)) {\n throw new Error(`bandPart(): numLower (${numLower})` +\n ` must not be greater than the number of rows (${M}).`);\n }\n if (!(numUpper <= N)) {\n throw new Error(`bandPart(): numUpper (${numUpper})` +\n ` must not be greater than the number of columns (${N}).`);\n }\n if (numLower < 0) {\n numLower = M;\n }\n if (numUpper < 0) {\n numUpper = N;\n }\n const i = reshape(range(0, M, 1, 'int32'), [-1, 1]);\n const j = range(0, N, 1, 'int32');\n const ij = sub(i, j);\n const inBand = logicalAnd(lessEqual(ij, scalar(+numLower, 'int32')), greaterEqual(ij, scalar(-numUpper, 'int32')));\n const zero = zeros([M, N], $a.dtype);\n return reshape(stack(unstack(reshape($a, [-1, M, N]))\n .map(mat => where(inBand, mat, zero))), shape);\n}\nexport const bandPart = op({ bandPart_ });\n//# sourceMappingURL=band_part.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { assert } from '../../util';\nimport { div } from '../div';\nimport { mul } from '../mul';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { split } from '../split';\nimport { squeeze } from '../squeeze';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\n/**\n * Gram-Schmidt orthogonalization.\n *\n * ```js\n * const x = tf.tensor2d([[1, 2], [3, 4]]);\n * let y = tf.linalg.gramSchmidt(x);\n * y.print();\n * console.log('Othogonalized:');\n * y.dot(y.transpose()).print(); // should be nearly the identity matrix.\n * console.log('First row direction maintained:');\n * const data = await y.array();\n * console.log(data[0][1] / data[0][0]); // should be nearly 2.\n * ```\n *\n * @param xs The vectors to be orthogonalized, in one of the two following\n * formats:\n * - An Array of `tf.Tensor1D`.\n * - A `tf.Tensor2D`, i.e., a matrix, in which case the vectors are the rows\n * of `xs`.\n * In each case, all the vectors must have the same length and the length\n * must be greater than or equal to the number of vectors.\n * @returns The orthogonalized and normalized vectors or matrix.\n * Orthogonalization means that the vectors or the rows of the matrix\n * are orthogonal (zero inner products). Normalization means that each\n * vector or each row of the matrix has an L2 norm that equals `1`.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction gramSchmidt_(xs) {\n let inputIsTensor2D;\n if (Array.isArray(xs)) {\n inputIsTensor2D = false;\n assert(xs != null && xs.length > 0, () => 'Gram-Schmidt process: input must not be null, undefined, or ' +\n 'empty');\n const dim = xs[0].shape[0];\n for (let i = 1; i < xs.length; ++i) {\n assert(xs[i].shape[0] === dim, () => 'Gram-Schmidt: Non-unique lengths found in the input vectors: ' +\n `(${xs[i].shape[0]} vs. ${dim})`);\n }\n }\n else {\n inputIsTensor2D = true;\n xs = split(xs, xs.shape[0], 0).map(x => squeeze(x, [0]));\n }\n assert(xs.length <= xs[0].shape[0], () => `Gram-Schmidt: Number of vectors (${xs.length}) exceeds ` +\n `number of dimensions (${xs[0].shape[0]}).`);\n const ys = [];\n const xs1d = xs;\n for (let i = 0; i < xs.length; ++i) {\n ys.push(ENGINE.tidy(() => {\n let x = xs1d[i];\n if (i > 0) {\n for (let j = 0; j < i; ++j) {\n const proj = mul(sum(mul(ys[j], x)), ys[j]);\n x = sub(x, proj);\n }\n }\n return div(x, norm(x, 'euclidean'));\n }));\n }\n if (inputIsTensor2D) {\n return stack(ys, 0);\n }\n else {\n return ys;\n }\n}\nexport const gramSchmidt = op({ gramSchmidt_ });\n//# sourceMappingURL=gram_schmidt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { dispose } from '../../globals';\nimport { assert } from '../../util';\nimport { clone } from '../clone';\nimport { concat } from '../concat';\nimport { div } from '../div';\nimport { eye } from '../eye';\nimport { greater } from '../greater';\nimport { matMul } from '../mat_mul';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { tensor2d } from '../tensor2d';\nimport { transpose } from '../transpose';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\n/**\n * Compute QR decomposition of m-by-n matrix using Householder transformation.\n *\n * Implementation based on\n * [http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf]\n * (http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf)\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [3, 4]]);\n * let [q, r] = tf.linalg.qr(a);\n * console.log('Q');\n * q.print();\n * console.log('R');\n * r.print();\n * console.log('Orthogonalized');\n * q.dot(q.transpose()).print() // should be nearly the identity matrix.\n * console.log('Reconstructed');\n * q.dot(r).print(); // should be nearly [[1, 2], [3, 4]];\n * ```\n *\n * @param x The `tf.Tensor` to be QR-decomposed. Must have rank >= 2. Suppose\n * it has the shape `[..., M, N]`.\n * @param fullMatrices An optional boolean parameter. Defaults to `false`.\n * If `true`, compute full-sized `Q`. If `false` (the default),\n * compute only the leading N columns of `Q` and `R`.\n * @returns An `Array` of two `tf.Tensor`s: `[Q, R]`. `Q` is a unitary matrix,\n * i.e., its columns all have unit norm and are mutually orthogonal.\n * If `M >= N`,\n * If `fullMatrices` is `false` (default),\n * - `Q` has a shape of `[..., M, N]`,\n * - `R` has a shape of `[..., N, N]`.\n * If `fullMatrices` is `true` (default),\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * If `M < N`,\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * @throws If the rank of `x` is less than 2.\n *\n * @doc {heading:'Operations',\n * subheading:'Linear Algebra',\n * namespace:'linalg'}\n */\nfunction qr_(x, fullMatrices = false) {\n assert(x.rank >= 2, () => `qr() requires input tensor to have a rank >= 2, but got rank ${x.rank}`);\n if (x.rank === 2) {\n return qr2d(x, fullMatrices);\n }\n else {\n // Rank > 2.\n // TODO(cais): Below we split the input into individual 2D tensors,\n // perform QR decomposition on them and then stack the results back\n // together. We should explore whether this can be parallelized.\n const outerDimsProd = x.shape.slice(0, x.shape.length - 2)\n .reduce((value, prev) => value * prev);\n const x2ds = unstack(reshape(x, [\n outerDimsProd, x.shape[x.shape.length - 2],\n x.shape[x.shape.length - 1]\n ]), 0);\n const q2ds = [];\n const r2ds = [];\n x2ds.forEach(x2d => {\n const [q2d, r2d] = qr2d(x2d, fullMatrices);\n q2ds.push(q2d);\n r2ds.push(r2d);\n });\n const q = reshape(stack(q2ds, 0), x.shape);\n const r = reshape(stack(r2ds, 0), x.shape);\n return [q, r];\n }\n}\nfunction qr2d(x, fullMatrices = false) {\n return ENGINE.tidy(() => {\n assert(x.shape.length === 2, () => `qr2d() requires a 2D Tensor, but got a ${x.shape.length}D Tensor.`);\n const m = x.shape[0];\n const n = x.shape[1];\n let q = eye(m); // Orthogonal transform so far.\n let r = clone(x); // Transformed matrix so far.\n const one2D = tensor2d([[1]], [1, 1]);\n let w = clone(one2D);\n const iters = m >= n ? n : m;\n for (let j = 0; j < iters; ++j) {\n // This tidy within the for-loop ensures we clean up temporary\n // tensors as soon as they are no longer needed.\n const rTemp = r;\n const wTemp = w;\n const qTemp = q;\n [w, r, q] = ENGINE.tidy(() => {\n // Find H = I - tau * w * w', to put zeros below R(j, j).\n const rjEnd1 = slice(r, [j, j], [m - j, 1]);\n const normX = norm(rjEnd1);\n const rjj = slice(r, [j, j], [1, 1]);\n // The sign() function returns 0 on 0, which causes division by zero.\n const s = where(greater(rjj, 0), tensor2d([[-1]]), tensor2d([[1]]));\n const u1 = sub(rjj, mul(s, normX));\n const wPre = div(rjEnd1, u1);\n if (wPre.shape[0] === 1) {\n w = clone(one2D);\n }\n else {\n w = concat([\n one2D,\n slice(wPre, [1, 0], [wPre.shape[0] - 1, wPre.shape[1]])\n ], 0);\n }\n const tau = neg(div(matMul(s, u1), normX));\n // -- R := HR, Q := QH.\n const rjEndAll = slice(r, [j, 0], [m - j, n]);\n const tauTimesW = mul(tau, w);\n const wT = transpose(w);\n if (j === 0) {\n r = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n }\n else {\n const rTimesTau = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n r = concat([slice(r, [0, 0], [j, n]), rTimesTau], 0);\n }\n const tawTimesWT = transpose(tauTimesW);\n const qAllJEnd = slice(q, [0, j], [m, q.shape[1] - j]);\n if (j === 0) {\n q = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n }\n else {\n const qTimesTau = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n q = concat([slice(q, [0, 0], [m, j]), qTimesTau], 1);\n }\n return [w, r, q];\n });\n dispose([rTemp, wTemp, qTemp]);\n }\n if (!fullMatrices && m > n) {\n q = slice(q, [0, 0], [m, n]);\n r = slice(r, [0, 0], [n, n]);\n }\n return [q, r];\n });\n}\nexport const qr = op({ qr_ });\n//# sourceMappingURL=qr.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Reduction;\n(function (Reduction) {\n Reduction[Reduction[\"NONE\"] = 0] = \"NONE\";\n Reduction[Reduction[\"MEAN\"] = 1] = \"MEAN\";\n Reduction[Reduction[\"SUM\"] = 2] = \"SUM\";\n Reduction[Reduction[\"SUM_BY_NONZERO_WEIGHTS\"] = 3] = \"SUM_BY_NONZERO_WEIGHTS\";\n})(Reduction || (Reduction = {}));\n//# sourceMappingURL=loss_ops_utils.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { Reduction } from '../loss_ops_utils';\nimport { mean } from '../mean';\nimport { mul } from '../mul';\nimport { notEqual } from '../not_equal';\nimport { ones } from '../ones';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sum } from '../sum';\n/**\n * Computes the weighted loss between two tensors.\n *\n * @param losses Tensor of shape `[batch_size, d1, ... dN]`.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `losses`, and must be broadcastable to `losses` (i.e., all\n * dimensions must be either `1`, or the same as the corresponding\n * `losses` dimension).\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction computeWeightedLoss_(losses, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $losses = convertToTensor(losses, 'losses', 'computeWeightedLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'computeWeightedLoss');\n }\n const weightedLoss = ($weights == null) ? $losses : mul($losses, $weights);\n if (reduction === Reduction.NONE) {\n return weightedLoss;\n }\n if (reduction === Reduction.SUM) {\n return sum(weightedLoss);\n }\n if (reduction === Reduction.MEAN) {\n if ($weights == null) {\n return mean(weightedLoss);\n }\n else {\n const broadcastFactor = $losses.size / $weights.size;\n const result = div(sum(weightedLoss), sum($weights));\n return broadcastFactor > 1 ? div(result, scalar(broadcastFactor)) :\n result;\n }\n }\n if (reduction === Reduction.SUM_BY_NONZERO_WEIGHTS) {\n if ($weights == null) {\n return div(sum(weightedLoss), scalar($losses.size));\n }\n else {\n const broadcastedWeights = mul($weights, ones($losses.shape));\n const numNonZeros = cast(sum(notEqual(broadcastedWeights, scalar(0))), 'float32');\n return div(sum(weightedLoss), numNonZeros);\n }\n }\n throw Error(`Unknown reduction: ${reduction}`);\n}\nexport const computeWeightedLoss = op({ computeWeightedLoss_ });\n//# sourceMappingURL=compute_weighted_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the absolute difference loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction absoluteDifference_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'absoluteDifference');\n const $predictions = convertToTensor(predictions, 'predictions', 'absoluteDifference');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'absoluteDifference');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in absoluteDifference: ');\n const losses = abs(sub($labels, $predictions));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const absoluteDifference = op({ absoluteDifference_ });\n//# sourceMappingURL=absolute_difference.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the cosine distance loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param axis The dimension along which the cosine distance is computed.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction cosineDistance_(labels, predictions, axis, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'cosineDistance');\n const $predictions = convertToTensor(predictions, 'predictions', 'cosineDistance');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'cosineDistance');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in cosineDistance: ');\n const one = scalar(1);\n const losses = sub(one, sum(mul($labels, $predictions), axis, true));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const cosineDistance = op({ cosineDistance_ });\n//# sourceMappingURL=cosine_distance.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the Hinge loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction hingeLoss_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $labels = convertToTensor(labels, 'labels', 'hingeLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'hingeLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'hingeLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in hingeLoss: ');\n const one = scalar(1);\n // Convert binary labels to (-1, 1)\n $labels = sub(mul(scalar(2), $labels), one);\n const losses = relu(sub(one, mul($labels, $predictions)));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const hingeLoss = op({ hingeLoss_ });\n//# sourceMappingURL=hinge_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { Reduction } from '../loss_ops_utils';\nimport { minimum } from '../minimum';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { square } from '../square';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the huber loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param delta Point where huber loss changes from quadratic to linear.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`.\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction huberLoss_(labels, predictions, weights, delta = 1.0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'huberLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'huberLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'huberLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in huberLoss: ');\n const deltaScalar = scalar(delta);\n const error = abs(sub($predictions, $labels));\n const quadratic = minimum(error, deltaScalar);\n const linear = sub(error, quadratic);\n const losses = add(mul(scalar(0.5), square(quadratic)), mul(deltaScalar, linear));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const huberLoss = op({ huberLoss_ });\n//# sourceMappingURL=huber_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { log } from '../log';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the log loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param epsilon A small increment to avoid taking log of zero\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction logLoss_(labels, predictions, weights, epsilon = 1e-7, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'logLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'logLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'logLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in logLoss: ');\n const one = scalar(1);\n const epsilonScalar = scalar(epsilon);\n const l1 = neg(mul($labels, log(add($predictions, epsilonScalar))));\n const l2 = mul(sub(one, $labels), log(add(sub(one, $predictions), epsilonScalar)));\n const losses = sub(l1, l2);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const logLoss = op({ logLoss_ });\n//# sourceMappingURL=log_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { squaredDifference } from '../squared_difference';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the mean squared error between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction meanSquaredError_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'meanSquaredError');\n const $predictions = convertToTensor(predictions, 'predictions', 'meanSquaredError');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'meanSquaredError');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in meanSquaredError: ');\n const losses = squaredDifference($labels, $predictions);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const meanSquaredError = op({ meanSquaredError_ });\n//# sourceMappingURL=mean_squared_error.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { exp } from '../exp';\nimport { log1p } from '../log1p';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\nfunction sigmoidCrossEntropyWithLogits_(labels, logits) {\n const $labels = convertToTensor(labels, 'labels', 'sigmoidCrossEntropyWithLogits');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropyWithLogits');\n assertShapesMatch($labels.shape, $logits.shape, 'Error in sigmoidCrossEntropyWithLogits: ');\n /**\n * Implementation Details:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n *\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n *\n * Hence, to ensure stability and avoid overflow, the implementation uses\n * this equivalent formulation:\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n */\n const maxOutput = relu($logits);\n const outputXTarget = mul($logits, $labels);\n const sigmoidOutput = log1p(exp(neg(abs($logits))));\n return add(sub(maxOutput, outputXTarget), sigmoidOutput);\n}\n/**\n * Computes the sigmoid cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newMulticlassLabels = multiclassLabels * (1 - labelSmoothing)\n * + 0.5 * labelSmoothing\n *\n * @param multiClassLabels The ground truth output tensor of shape\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction sigmoidCrossEntropy_(multiClassLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $multiClassLabels = convertToTensor(multiClassLabels, 'multiClassLabels', 'sigmoidCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'sigmoidCrossEntropy');\n }\n assertShapesMatch($multiClassLabels.shape, $logits.shape, 'Error in sigmoidCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const half = scalar(0.5);\n $multiClassLabels =\n add(mul($multiClassLabels, sub(one, labelSmoothingScalar)), mul(half, labelSmoothingScalar));\n }\n const losses = sigmoidCrossEntropyWithLogits_($multiClassLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const sigmoidCrossEntropy = op({ sigmoidCrossEntropy_ });\n//# sourceMappingURL=sigmoid_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../../gradients';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { expandShapeToKeepDim } from '../axis_util';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { exp } from '../exp';\nimport { logSumExp } from '../log_sum_exp';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes softmax cross entropy between logits and labels.\n *\n * Measures the probability error in discrete classification tasks in which\n * the classes are mutually exclusive (each entry is in exactly one class).\n * For example, each CIFAR-10 image is labeled with one and only one label: an\n * image can be a dog or a truck, but not both.\n *\n * `NOTE`: While the classes are mutually exclusive, their probabilities need\n * not be. All that is required is that each row of labels is a valid\n * probability distribution. If they are not, the computation of the gradient\n * will be incorrect.\n *\n * `WARNING`: This op expects unscaled logits, since it performs a softmax on\n * logits internally for efficiency. Do not call this op with the output of\n * softmax, as it will produce incorrect results.\n *\n * logits and labels must have the same shape, e.g. [batch_size, num_classes]\n * and the same dtype.\n * @param labels The labels array.\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n */\nfunction softmaxCrossEntropyWithLogits_(labels, logits, dim = -1) {\n if (dim === -1) {\n dim = logits.rank - 1;\n }\n if (dim !== logits.rank - 1) {\n throw Error(`Softmax cross entropy along a non-last dimension is not yet ` +\n `supported. Labels / logits was rank ${logits.rank} ` +\n `and dim was ${dim}`);\n }\n // Use a custom gradient for numerical stability.\n const customOp = customGrad((labels, logits, save) => {\n // Reference:\n // 1. http://cs231n.github.io/linear-classify/#softmax\n // 2. https://blog.feedly.com/tricks-of-the-trade-logsumexp/\n const keepDims = true;\n const lse = logSumExp(logits, [dim], keepDims);\n const logResult = sub(cast(logits, 'float32'), lse);\n save([labels, logResult]);\n const costVector = neg(mul(logResult, labels));\n const value = sum(costVector, [dim]);\n const gradFunc = (dy, saved) => {\n const [labels, logResult] = saved;\n const dyShape = expandShapeToKeepDim(dy.shape, [dim]);\n return [\n mul(reshape(dy, dyShape), sub(cast(labels, 'float32'), exp(logResult))),\n mul(reshape(dy, dyShape), sub(exp(logResult), cast(labels, 'float32'))),\n ];\n };\n return { value, gradFunc };\n });\n return customOp(labels, logits);\n}\n/**\n * Computes the softmax cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newOnehotLabels = onehotLabels * (1 - labelSmoothing)\n * + labelSmoothing / numClasses\n *\n * @param onehotLabels One hot encoded labels\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or 1, and must be\n * broadcastable to `loss` of shape [batch_size]\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction softmaxCrossEntropy_(onehotLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $onehotLabels = convertToTensor(onehotLabels, 'onehotLabels', 'softmaxCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'softmaxCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'softmaxCrossEntropy');\n }\n assertShapesMatch($onehotLabels.shape, $logits.shape, 'Error in softmaxCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const numClasses = scalar($onehotLabels.shape[1]);\n $onehotLabels =\n add(mul($onehotLabels, sub(one, labelSmoothingScalar)), div(labelSmoothingScalar, numClasses));\n }\n const losses = softmaxCrossEntropyWithLogits_($onehotLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const softmaxCrossEntropy = op({ softmaxCrossEntropy_ });\n//# sourceMappingURL=softmax_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Modularized ops.\nexport { abs } from './abs';\nexport { acos } from './acos';\nexport { acosh } from './acosh';\nexport { add } from './add';\nexport { addN } from './add_n';\nexport { all } from './all';\nexport { any } from './any';\nexport { argMax } from './arg_max';\nexport { argMin } from './arg_min';\nexport { asin } from './asin';\nexport { asinh } from './asinh';\nexport { atan } from './atan';\nexport { atan2 } from './atan2';\nexport { atanh } from './atanh';\nexport { avgPool } from './avg_pool';\nexport { avgPool3d } from './avg_pool_3d';\nexport { basicLSTMCell } from './basic_lstm_cell';\nexport { batchToSpaceND } from './batch_to_space_nd';\nexport { batchNorm } from './batchnorm';\nexport { batchNorm2d } from './batchnorm2d';\nexport { batchNorm3d } from './batchnorm3d';\nexport { batchNorm4d } from './batchnorm4d';\nexport { broadcastTo } from './broadcast_to';\nexport { buffer } from './buffer';\nexport { cast } from './cast';\nexport { ceil } from './ceil';\nexport { clipByValue } from './clip_by_value';\nexport { clone } from './clone';\nexport { complex } from './complex';\nexport { concat } from './concat';\nexport { concat1d } from './concat_1d';\nexport { concat2d } from './concat_2d';\nexport { concat3d } from './concat_3d';\nexport { concat4d } from './concat_4d';\nexport { conv1d } from './conv1d';\nexport { conv2d } from './conv2d';\nexport { conv2dTranspose } from './conv2d_transpose';\nexport { conv3d } from './conv3d';\nexport { conv3dTranspose } from './conv3d_transpose';\nexport { cos } from './cos';\nexport { cosh } from './cosh';\nexport { cumsum } from './cumsum';\nexport { depthToSpace } from './depth_to_space';\nexport { depthwiseConv2d } from './depthwise_conv2d';\nexport { diag } from './diag';\nexport { dilation2d } from './dilation2d';\nexport { div } from './div';\nexport { divNoNan } from './div_no_nan';\nexport { dot } from './dot';\nexport { elu } from './elu';\nexport { equal } from './equal';\nexport { erf } from './erf';\nexport { exp } from './exp';\nexport { expandDims } from './expand_dims';\nexport { expm1 } from './expm1';\nexport { eye } from './eye';\nexport { fill } from './fill';\nexport { floor } from './floor';\nexport { floorDiv } from './floorDiv';\nexport { gather } from './gather';\nexport { greater } from './greater';\nexport { greaterEqual } from './greater_equal';\nexport { imag } from './imag';\nexport { isFinite } from './is_finite';\nexport { isInf } from './is_inf';\nexport { isNaN } from './is_nan';\nexport { leakyRelu } from './leaky_relu';\nexport { less } from './less';\nexport { lessEqual } from './less_equal';\nexport { linspace } from './linspace';\nexport { localResponseNormalization } from './local_response_normalization';\nexport { log } from './log';\nexport { log1p } from './log1p';\nexport { logSigmoid } from './log_sigmoid';\nexport { logSoftmax } from './log_softmax';\nexport { logSumExp } from './log_sum_exp';\nexport { logicalAnd } from './logical_and';\nexport { logicalNot } from './logical_not';\nexport { logicalOr } from './logical_or';\nexport { logicalXor } from './logical_xor';\nexport { matMul } from './mat_mul';\nexport { max } from './max';\nexport { maxPool } from './max_pool';\nexport { maxPool3d } from './max_pool_3d';\nexport { maxPoolWithArgmax } from './max_pool_with_argmax';\nexport { maximum } from './maximum';\nexport { mean } from './mean';\nexport { min } from './min';\nexport { minimum } from './minimum';\nexport { mirrorPad } from './mirror_pad';\nexport { mod } from './mod';\nexport { moments } from './moments';\nexport { mul } from './mul';\nexport { multiRNNCell } from './multi_rnn_cell';\nexport { multinomial } from './multinomial';\nexport { neg } from './neg';\nexport { notEqual } from './not_equal';\nexport { oneHot } from './one_hot';\nexport { ones } from './ones';\nexport { onesLike } from './ones_like';\nexport { outerProduct } from './outer_product';\nexport { pad } from './pad';\nexport { pad1d } from './pad1d';\nexport { pad2d } from './pad2d';\nexport { pad3d } from './pad3d';\nexport { pad4d } from './pad4d';\nexport { pool } from './pool';\nexport { pow } from './pow';\nexport { prelu } from './prelu';\nexport { print } from './print';\nexport { prod } from './prod';\nexport { rand } from './rand';\nexport { randomGamma } from './random_gamma';\nexport { randomNormal } from './random_normal';\nexport { randomUniform } from './random_uniform';\nexport { range } from './range';\nexport { real } from './real';\nexport { reciprocal } from './reciprocal';\nexport { relu } from './relu';\nexport { relu6 } from './relu6';\nexport { reshape } from './reshape';\nexport { reverse } from './reverse';\nexport { reverse1d } from './reverse_1d';\nexport { reverse2d } from './reverse_2d';\nexport { reverse3d } from './reverse_3d';\nexport { reverse4d } from './reverse_4d';\nexport { round } from './round';\nexport { rsqrt } from './rsqrt';\nexport { scalar } from './scalar';\nexport { selu } from './selu';\nexport { separableConv2d } from './separable_conv2d';\nexport { setdiff1dAsync } from './setdiff1d_async';\nexport { sigmoid } from './sigmoid';\nexport { sign } from './sign';\nexport { sin } from './sin';\nexport { sinh } from './sinh';\nexport { slice } from './slice';\nexport { slice1d } from './slice1d';\nexport { slice2d } from './slice2d';\nexport { slice3d } from './slice3d';\nexport { slice4d } from './slice4d';\nexport { softmax } from './softmax';\nexport { softplus } from './softplus';\nexport { spaceToBatchND } from './space_to_batch_nd';\nexport { fft } from './spectral/fft';\nexport { ifft } from './spectral/ifft';\nexport { irfft } from './spectral/irfft';\nexport { rfft } from './spectral/rfft';\nexport { split } from './split';\nexport { sqrt } from './sqrt';\nexport { square } from './square';\nexport { squaredDifference } from './squared_difference';\nexport { squeeze } from './squeeze';\nexport { stack } from './stack';\nexport { step } from './step';\nexport { stridedSlice } from './strided_slice';\nexport { sub } from './sub';\nexport { sum } from './sum';\nexport { tan } from './tan';\nexport { tanh } from './tanh';\nexport { tensor } from './tensor';\nexport { tensor1d } from './tensor1d';\nexport { tensor2d } from './tensor2d';\nexport { tensor3d } from './tensor3d';\nexport { tensor4d } from './tensor4d';\nexport { tensor5d } from './tensor5d';\nexport { tensor6d } from './tensor6d';\nexport { tile } from './tile';\nexport { topk } from './topk';\nexport { truncatedNormal } from './truncated_normal';\nexport { unique } from './unique';\nexport { unsortedSegmentSum } from './unsorted_segment_sum';\nexport { unstack } from './unstack';\nexport { variable } from './variable';\nexport { where } from './where';\nexport { whereAsync } from './where_async';\nexport { zeros } from './zeros';\nexport { zerosLike } from './zeros_like';\nexport * from './boolean_mask';\nexport * from './compare';\nexport * from './binary_ops';\nexport * from './transpose';\nexport * from './norm';\nexport * from './moving_average';\nexport * from './scatter_nd';\nexport * from './sparse_to_dense';\nexport * from './gather_nd';\nexport * from './dropout';\nexport * from './signal_ops_util';\nexport * from './in_top_k';\nexport { op, OP_SCOPE_SUFFIX } from './operation';\nimport { rfft } from './spectral/rfft';\nimport { fft } from './spectral/fft';\nimport { ifft } from './spectral/ifft';\nimport { irfft } from './spectral/irfft';\nconst spectral = {\n fft,\n ifft,\n rfft,\n irfft\n};\nimport * as fused from './fused_ops';\nimport { hammingWindow } from './signal/hamming_window';\nimport { hannWindow } from './signal/hann_window';\nimport { frame } from './signal/frame';\nimport { stft } from './signal/stft';\nconst signal = {\n hammingWindow,\n hannWindow,\n frame,\n stft,\n};\n// Image Ops namespace\nimport { cropAndResize } from './image/crop_and_resize';\nimport { flipLeftRight } from './image/flip_left_right';\nimport { rotateWithOffset } from './image/rotate_with_offset';\nimport { nonMaxSuppression } from './image/non_max_suppression';\nimport { nonMaxSuppressionAsync } from './image/non_max_suppression_async';\nimport { nonMaxSuppressionWithScore } from './image/non_max_suppression_with_score';\nimport { nonMaxSuppressionWithScoreAsync } from './image/non_max_suppression_with_score_async';\nimport { nonMaxSuppressionPadded } from './image/non_max_suppression_padded';\nimport { nonMaxSuppressionPaddedAsync } from './image/non_max_suppression_padded_async';\nimport { resizeBilinear } from './image/resize_bilinear';\nimport { resizeNearestNeighbor } from './image/resize_nearest_neighbor';\nconst image = {\n flipLeftRight,\n resizeNearestNeighbor,\n resizeBilinear,\n rotateWithOffset,\n cropAndResize,\n nonMaxSuppression,\n nonMaxSuppressionAsync,\n nonMaxSuppressionWithScore,\n nonMaxSuppressionWithScoreAsync,\n nonMaxSuppressionPadded,\n nonMaxSuppressionPaddedAsync\n};\n// linalg namespace\nimport { bandPart } from './linalg/band_part';\nimport { gramSchmidt } from './linalg/gram_schmidt';\nimport { qr } from './linalg/qr';\nconst linalg = {\n bandPart,\n gramSchmidt,\n qr\n};\n// losses namespace;\nimport { absoluteDifference } from './losses/absolute_difference';\nimport { computeWeightedLoss } from './losses/compute_weighted_loss';\nimport { cosineDistance } from './losses/cosine_distance';\nimport { hingeLoss } from './losses/hinge_loss';\nimport { huberLoss } from './losses/huber_loss';\nimport { logLoss } from './losses/log_loss';\nimport { meanSquaredError } from './losses/mean_squared_error';\nimport { sigmoidCrossEntropy } from './losses/sigmoid_cross_entropy';\nimport { softmaxCrossEntropy } from './losses/softmax_cross_entropy';\nconst losses = {\n absoluteDifference,\n computeWeightedLoss,\n cosineDistance,\n hingeLoss,\n huberLoss,\n logLoss,\n meanSquaredError,\n sigmoidCrossEntropy,\n softmaxCrossEntropy\n};\n// Second level exports.\nexport { image, linalg, losses, spectral, fused, signal };\n//# sourceMappingURL=ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dispose } from '../globals';\nimport { variableGrads } from '../gradients';\nimport { scalar } from '../ops/ops';\nimport { Serializable } from '../serialization';\n/** @doc {heading: 'Training', subheading: 'Classes', namespace: 'train'} */\nexport class Optimizer extends Serializable {\n /**\n * Executes `f()` and minimizes the scalar output of `f()` by computing\n * gradients of y with respect to the list of trainable variables provided by\n * `varList`. If no list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to minimize.\n * @param returnCost Whether to return the scalar cost value produced by\n * executing `f()`.\n * @param varList An optional list of variables to update. If specified, only\n * the trainable variables in varList will be updated by minimize. Defaults to\n * all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n minimize(f, returnCost = false, varList) {\n const { value, grads } = this.computeGradients(f, varList);\n if (varList != null) {\n const gradArray = varList.map(v => ({ name: v.name, tensor: grads[v.name] }));\n this.applyGradients(gradArray);\n }\n else {\n this.applyGradients(grads);\n }\n // Dispose gradients.\n dispose(grads);\n if (returnCost) {\n return value;\n }\n else {\n value.dispose();\n return null;\n }\n }\n /**\n * The number of iterations that this optimizer instance has been invoked for.\n */\n get iterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return this.iterations_;\n }\n incrementIterations() {\n this.iterations_ = this.iterations + 1;\n }\n /**\n * Executes f() and computes the gradient of the scalar output of f() with\n * respect to the list of trainable variables provided by `varList`. If no\n * list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to use for computing\n * gradients with respect to variables.\n * @param varList An optional list of variables to compute gradients with\n * respect to. If specified, only the trainable variables in varList will have\n * gradients computed with respect to. Defaults to all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n computeGradients(f, varList) {\n return variableGrads(f, varList);\n }\n /**\n * Dispose the variables (if any) owned by this optimizer instance.\n */\n dispose() {\n if (this.iterations_ != null) {\n dispose(this.iterations_);\n }\n }\n async saveIterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return {\n name: 'iter',\n // TODO(cais): Use 'int64' type when available.\n tensor: scalar(this.iterations_, 'int32')\n };\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for this optimizer yet.');\n }\n async setWeights(weightValues) {\n throw new Error(`setWeights() is not implemented for this optimizer class ` +\n `${this.getClassName()}`);\n }\n /**\n * Extract the first element of the weight values and set it\n * as the iterations counter variable of this instance of optimizer.\n *\n * @param weightValues\n * @returns Weight values with the first element consumed and excluded.\n */\n async extractIterations(weightValues) {\n this.iterations_ = (await weightValues[0].tensor.data())[0];\n return weightValues.slice(1);\n }\n}\nObject.defineProperty(Optimizer, Symbol.hasInstance, {\n value: (instance) => {\n return instance.minimize != null && instance.computeGradients != null &&\n instance.applyGradients != null;\n }\n});\n//# sourceMappingURL=optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/ops';\nimport { square } from '../ops/square';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdadeltaOptimizer extends Optimizer {\n constructor(learningRate, rho, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.rho = rho;\n this.epsilon = epsilon;\n this.accumulatedGrads = [];\n this.accumulatedUpdates = [];\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedGrads[i] == null) {\n this.accumulatedGrads[i] = {\n originalName: `${name}/accum_grad`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedUpdates[i] == null) {\n this.accumulatedUpdates[i] = {\n originalName: `${name}/accum_var`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n const accumulatedUpdate = this.accumulatedUpdates[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(mul(accumulatedGrad, this.rho), mul(square(gradient), 1 - this.rho));\n const updates = mul(div(sqrt(add(accumulatedUpdate, this.epsilon)), sqrt(add(accumulatedGrad, this.epsilon))), gradient);\n const newAccumulatedUpdate = add(mul(accumulatedUpdate, this.rho), mul(square(updates), 1 - this.rho));\n accumulatedGrad.assign(newAccumulatedGrad);\n accumulatedUpdate.assign(newAccumulatedUpdate);\n const newValue = add(mul(updates, -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedUpdates != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n dispose(this.accumulatedUpdates.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedGrads, ...this.accumulatedUpdates];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedGrads =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedUpdates =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'rho': this.rho,\n 'epsilon': this.epsilon\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['rho'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdadeltaOptimizer.className = 'Adadelta'; // Name matters for Python compatibility.\nregisterClass(AdadeltaOptimizer);\n//# sourceMappingURL=adadelta_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { fill } from '../ops/fill';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdagradOptimizer extends Optimizer {\n constructor(learningRate, initialAccumulatorValue = 0.1) {\n super();\n this.learningRate = learningRate;\n this.initialAccumulatorValue = initialAccumulatorValue;\n this.accumulatedGrads = [];\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulatedGrads[i] == null) {\n const trainable = false;\n this.accumulatedGrads[i] = {\n originalName: `${name}/accumulator`,\n variable: tidy(() => fill(value.shape, this.initialAccumulatorValue)\n .variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(accumulatedGrad, square(gradient));\n accumulatedGrad.assign(newAccumulatedGrad);\n const newValue = add(mul(div(gradient, sqrt(add(newAccumulatedGrad, ENGINE.backend.epsilon()))), -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedGrads != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulatedGrads.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulatedGrads = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'initialAccumulatorValue': this.initialAccumulatorValue,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['initialAccumulatorValue']);\n }\n}\n/** @nocollapse */\nAdagradOptimizer.className = 'Adagrad'; // Note: Name matters for Python compatibility.\nregisterClass(AdagradOptimizer);\n//# sourceMappingURL=adagrad_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.accumulatedFirstMoment = [];\n this.accumulatedSecondMoment = [];\n tidy(() => {\n // accB* will be updated by batch.\n this.accBeta1 = scalar(beta1).variable();\n this.accBeta2 = scalar(beta2).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const oneMinusAccBeta2 = sub(1, this.accBeta2);\n varNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedSecondMoment[i] == null) {\n this.accumulatedSecondMoment[i] = {\n originalName: `${name}/v`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const secondMoment = this.accumulatedSecondMoment[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const newSecondMoment = add(mul(secondMoment, this.beta2), mul(square(gradient), 1 - this.beta2));\n const biasCorrectedFirstMoment = div(newFirstMoment, oneMinusAccBeta1);\n const biasCorrectedSecondMoment = div(newSecondMoment, oneMinusAccBeta2);\n firstMoment.assign(newFirstMoment);\n secondMoment.assign(newSecondMoment);\n const newValue = add(mul(div(biasCorrectedFirstMoment, add(sqrt(biasCorrectedSecondMoment), this.epsilon)), -this.learningRate), value);\n value.assign(newValue);\n });\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n this.accBeta2.assign(mul(this.accBeta2, this.beta2));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.accBeta2.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedSecondMoment != null) {\n dispose(this.accumulatedSecondMoment.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedFirstMoment, ...this.accumulatedSecondMoment];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n tidy(() => {\n this.accBeta1.assign(pow(this.beta1, this.iterations_ + 1));\n this.accBeta2.assign(pow(this.beta2, this.iterations_ + 1));\n });\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedFirstMoment =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedSecondMoment =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdamOptimizer.className = 'Adam'; // Note: Name matters for Python compatibility.\nregisterClass(AdamOptimizer);\n//# sourceMappingURL=adam_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { abs } from '../ops/abs';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { maximum } from '../ops/maximum';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamaxOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null, decay = 0.0) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.decay = decay;\n this.accumulatedFirstMoment = [];\n this.accumulatedWeightedInfNorm = [];\n tidy(() => {\n this.iteration = scalar(0).variable();\n this.accBeta1 = scalar(beta1).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const lr = div(-this.learningRate, add(mul(this.iteration, this.decay), 1));\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n if (this.accumulatedWeightedInfNorm[i] == null) {\n this.accumulatedWeightedInfNorm[i] = {\n originalName: `${name}/v`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const weightedInfNorm = this.accumulatedWeightedInfNorm[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const ut0 = mul(weightedInfNorm, this.beta2);\n const ut1 = abs(gradient);\n const newWeightedInfNorm = maximum(ut0, ut1);\n firstMoment.assign(newFirstMoment);\n weightedInfNorm.assign(newWeightedInfNorm);\n const newValue = add(mul(div(lr, oneMinusAccBeta1), div(newFirstMoment, add(newWeightedInfNorm, this.epsilon))), value);\n value.assign(newValue);\n });\n this.iteration.assign(add(this.iteration, 1));\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.iteration.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedWeightedInfNorm != null) {\n dispose(this.accumulatedWeightedInfNorm.map(v => v.variable));\n }\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for Adamax yet.');\n }\n async setWeights(weightValues) {\n throw new Error('setWeights() is not implemented for Adamax yet.');\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n 'decay': this.decay\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon'], config['decay']);\n }\n}\n/** @nocollapse */\nAdamaxOptimizer.className = 'Adamax'; // Note: Name matters for Python compatbility.\nregisterClass(AdamaxOptimizer);\n//# sourceMappingURL=adamax_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { keep, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class SGDOptimizer extends Optimizer {\n constructor(learningRate) {\n super();\n this.learningRate = learningRate;\n this.setLearningRate(learningRate);\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n varNames.forEach((name, i) => {\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const value = ENGINE.registeredVariables[name];\n tidy(() => {\n const newValue = add(mul(this.c, gradient), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n /**\n * Sets the learning rate of the optimizer.\n */\n setLearningRate(learningRate) {\n this.learningRate = learningRate;\n if (this.c != null) {\n this.c.dispose();\n }\n this.c = keep(scalar(-learningRate));\n }\n dispose() {\n this.c.dispose();\n }\n async getWeights() {\n return [await this.saveIterations()];\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n if (weightValues.length !== 0) {\n throw new Error('SGD optimizer does not have settable weights.');\n }\n }\n getConfig() {\n return { 'learningRate': this.learningRate };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate']);\n }\n}\n/** @nocollapse */\nSGDOptimizer.className = 'SGD'; // Note: Name matters for Python compatibility.\nregisterClass(SGDOptimizer);\n//# sourceMappingURL=sgd_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { SGDOptimizer } from './sgd_optimizer';\n/** @doclink Optimizer */\nexport class MomentumOptimizer extends SGDOptimizer {\n constructor(learningRate, momentum, useNesterov = false) {\n super(learningRate);\n this.learningRate = learningRate;\n this.momentum = momentum;\n this.useNesterov = useNesterov;\n this.accumulations = [];\n this.m = scalar(this.momentum);\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulations[i] == null) {\n const trainable = false;\n this.accumulations[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const accumulation = this.accumulations[i].variable;\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n tidy(() => {\n let newValue;\n const newAccumulation = add(mul(this.m, accumulation), gradient);\n if (this.useNesterov) {\n newValue = add(mul(this.c, add(gradient, mul(newAccumulation, this.m))), value);\n }\n else {\n newValue = add(mul(this.c, newAccumulation), value);\n }\n accumulation.assign(newAccumulation);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n this.m.dispose();\n if (this.accumulations != null) {\n dispose(this.accumulations.map(v => v.variable));\n }\n }\n /**\n * Sets the momentum of the optimizer.\n *\n * @param momentum\n */\n setMomentum(momentum) {\n this.momentum = momentum;\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulations.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulations = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'momentum': this.momentum,\n 'useNesterov': this.useNesterov\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['momentum'], config['useNesterov']);\n }\n}\n/** @nocollapse */\nMomentumOptimizer.className = 'Momentum'; // Name matters for Python compatibility.\nregisterClass(MomentumOptimizer);\n//# sourceMappingURL=momentum_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class RMSPropOptimizer extends Optimizer {\n constructor(learningRate, decay = 0.9, momentum = 0.0, epsilon = null, centered = false) {\n super();\n this.learningRate = learningRate;\n this.decay = decay;\n this.momentum = momentum;\n this.epsilon = epsilon;\n this.accumulatedMeanSquares = [];\n this.accumulatedMoments = [];\n this.accumulatedMeanGrads = [];\n this.centered = centered;\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n if (learningRate == null) {\n throw new Error(`learningRate for RMSPropOptimizer must be defined.`);\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedMeanSquares[i] == null) {\n this.accumulatedMeanSquares[i] = {\n originalName: `${name}/rms`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMoments[i] == null) {\n this.accumulatedMoments[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMeanGrads[i] == null && this.centered) {\n this.accumulatedMeanGrads[i] = {\n originalName: `${name}/mg`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedMeanSquare = this.accumulatedMeanSquares[i].variable;\n const accumulatedMoments = this.accumulatedMoments[i].variable;\n tidy(() => {\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n if (this.centered) {\n const accumulatedMeanGrad = this.accumulatedMeanGrads[i].variable;\n // Centered gradient\n const newAccumulatedMeanGrad = add(mul(accumulatedMeanGrad, this.decay), mul(gradient, 1 - this.decay));\n const gradContribution = div(mul(gradient, this.learningRate), sqrt(sub(newAccumulatedMeanSquare, add(square(newAccumulatedMeanGrad), this.epsilon))));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), gradContribution);\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMeanGrad.assign(newAccumulatedMeanGrad);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n else {\n // Plain gradient\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), div(mul(gradient, this.learningRate), sqrt(add(newAccumulatedMeanSquare, this.epsilon))));\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedMeanSquares != null) {\n dispose(this.accumulatedMeanSquares.map(v => v.variable));\n }\n if (this.accumulatedMeanGrads != null && this.centered) {\n dispose(this.accumulatedMeanGrads.map(v => v.variable));\n }\n if (this.accumulatedMoments != null) {\n dispose(this.accumulatedMoments.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedMeanSquares, ...this.accumulatedMoments];\n if (this.centered) {\n variables.push(...this.accumulatedMeanGrads);\n }\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = this.centered ? weightValues.length / 3 : weightValues.length / 2;\n const trainable = false;\n this.accumulatedMeanSquares =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedMoments =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n if (this.centered) {\n this.accumulatedMeanGrads =\n weightValues.slice(variableCount * 2, variableCount * 3)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'decay': this.decay,\n 'momentum': this.momentum,\n 'epsilon': this.epsilon,\n 'centered': this.centered\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['decay'], config['momentum'], config['epsilon'], config['centered']);\n }\n}\n/** @nocollapse */\nRMSPropOptimizer.className = 'RMSProp'; // Note: Name matters for Python compatibility.\nregisterClass(RMSPropOptimizer);\n//# sourceMappingURL=rmsprop_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AdadeltaOptimizer } from './adadelta_optimizer';\nimport { AdagradOptimizer } from './adagrad_optimizer';\nimport { AdamOptimizer } from './adam_optimizer';\nimport { AdamaxOptimizer } from './adamax_optimizer';\nimport { MomentumOptimizer } from './momentum_optimizer';\nimport { RMSPropOptimizer } from './rmsprop_optimizer';\nimport { SGDOptimizer } from './sgd_optimizer';\nexport class OptimizerConstructors {\n /**\n * Constructs a `tf.SGDOptimizer` that uses stochastic gradient descent.\n *\n * ```js\n * // Fit a quadratic function by learning the coefficients a, b, c.\n * const xs = tf.tensor1d([0, 1, 2, 3]);\n * const ys = tf.tensor1d([1.1, 5.9, 16.8, 33.9]);\n *\n * const a = tf.scalar(Math.random()).variable();\n * const b = tf.scalar(Math.random()).variable();\n * const c = tf.scalar(Math.random()).variable();\n *\n * // y = a * x^2 + b * x + c.\n * const f = x => a.mul(x.square()).add(b.mul(x)).add(c);\n * const loss = (pred, label) => pred.sub(label).square().mean();\n *\n * const learningRate = 0.01;\n * const optimizer = tf.train.sgd(learningRate);\n *\n * // Train the model.\n * for (let i = 0; i < 10; i++) {\n * optimizer.minimize(() => loss(f(xs), ys));\n * }\n *\n * // Make predictions.\n * console.log(\n * `a: ${a.dataSync()}, b: ${b.dataSync()}, c: ${c.dataSync()}`);\n * const preds = f(xs).dataSync();\n * preds.forEach((pred, i) => {\n * console.log(`x: ${i}, pred: ${pred}`);\n * });\n * ```\n *\n * @param learningRate The learning rate to use for the SGD algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static sgd(learningRate) {\n return new SGDOptimizer(learningRate);\n }\n /**\n * Constructs a `tf.MomentumOptimizer` that uses momentum gradient\n * descent.\n *\n * See\n * [http://proceedings.mlr.press/v28/sutskever13.pdf](\n * http://proceedings.mlr.press/v28/sutskever13.pdf)\n *\n * @param learningRate The learning rate to use for the Momentum gradient\n * descent algorithm.\n * @param momentum The momentum to use for the momentum gradient descent\n * algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static momentum(learningRate, momentum, useNesterov = false) {\n return new MomentumOptimizer(learningRate, momentum, useNesterov);\n }\n /**\n * Constructs a `tf.RMSPropOptimizer` that uses RMSProp gradient\n * descent. This implementation uses plain momentum and is not centered\n * version of RMSProp.\n *\n * See\n * [http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf](\n * http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)\n *\n * @param learningRate The learning rate to use for the RMSProp gradient\n * descent algorithm.\n * @param decay The discounting factor for the history/coming gradient.\n * @param momentum The momentum to use for the RMSProp gradient descent\n * algorithm.\n * @param epsilon Small value to avoid zero denominator.\n * @param centered If true, gradients are normalized by the estimated\n * variance of the gradient.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static rmsprop(learningRate, decay = .9, momentum = 0.0, epsilon = null, centered = false) {\n return new RMSPropOptimizer(learningRate, decay, momentum, epsilon, centered);\n }\n /**\n * Constructs a `tf.AdamOptimizer` that uses the Adam algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adam gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adam(learningRate = 0.001, beta1 = 0.9, beta2 = 0.999, epsilon = null) {\n return new AdamOptimizer(learningRate, beta1, beta2, epsilon);\n }\n /**\n * Constructs a `tf.AdadeltaOptimizer` that uses the Adadelta algorithm.\n * See [https://arxiv.org/abs/1212.5701](https://arxiv.org/abs/1212.5701)\n *\n * @param learningRate The learning rate to use for the Adadelta gradient\n * descent algorithm.\n * @param rho The learning rate decay over each update.\n * @param epsilon A constant epsilon used to better condition the grad\n * update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adadelta(learningRate = .001, rho = .95, epsilon = null) {\n return new AdadeltaOptimizer(learningRate, rho, epsilon);\n }\n /**\n * Constructs a `tf.AdamaxOptimizer` that uses the Adamax algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adamax gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n * @param decay The learning rate decay over each update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adamax(learningRate = 0.002, beta1 = 0.9, beta2 = 0.999, epsilon = null, decay = 0.0) {\n return new AdamaxOptimizer(learningRate, beta1, beta2, epsilon, decay);\n }\n /**\n * Constructs a `tf.AdagradOptimizer` that uses the Adagrad algorithm.\n * See\n * [http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf](\n * http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)\n * or\n * [http://ruder.io/optimizing-gradient-descent/index.html#adagrad](\n * http://ruder.io/optimizing-gradient-descent/index.html#adagrad)\n *\n * @param learningRate The learning rate to use for the Adagrad gradient\n * descent algorithm.\n * @param initialAccumulatorValue Starting value for the accumulators, must be\n * positive.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adagrad(learningRate, initialAccumulatorValue = 0.1) {\n return new AdagradOptimizer(learningRate, initialAccumulatorValue);\n }\n}\n//# sourceMappingURL=optimizer_constructors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// So typings can propagate.\nimport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nimport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nimport { AdamOptimizer } from './optimizers/adam_optimizer';\nimport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nimport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nimport { OptimizerConstructors } from './optimizers/optimizer_constructors';\nimport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nimport { SGDOptimizer } from './optimizers/sgd_optimizer';\n// tslint:disable-next-line:no-unused-expression\n[MomentumOptimizer, SGDOptimizer, AdadeltaOptimizer, AdagradOptimizer,\n RMSPropOptimizer, AdamaxOptimizer, AdamOptimizer];\nexport const train = {\n sgd: OptimizerConstructors.sgd,\n momentum: OptimizerConstructors.momentum,\n adadelta: OptimizerConstructors.adadelta,\n adagrad: OptimizerConstructors.adagrad,\n rmsprop: OptimizerConstructors.rmsprop,\n adamax: OptimizerConstructors.adamax,\n adam: OptimizerConstructors.adam\n};\n//# sourceMappingURL=train.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst delayCallback = (() => {\n if (typeof requestAnimationFrame !== 'undefined') {\n return requestAnimationFrame;\n }\n else if (typeof setImmediate !== 'undefined') {\n return setImmediate;\n }\n return (f) => f(); // no delays\n})();\n/**\n * Returns a promise that resolve when a requestAnimationFrame has completed.\n *\n * On Node.js this uses setImmediate instead of requestAnimationFrame.\n *\n * This is simply a sugar method so that users can do the following:\n * `await tf.nextFrame();`\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nfunction nextFrame() {\n return new Promise(resolve => delayCallback(() => resolve()));\n}\nexport { nextFrame };\n//# sourceMappingURL=browser_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Returns the image center in pixels.\nexport function getImageCenter(center, imageHeight, imageWidth) {\n const centerX = imageWidth * (typeof center === 'number' ? center : center[0]);\n const centerY = imageHeight * (typeof center === 'number' ? center : center[1]);\n return [centerX, centerY];\n}\n//# sourceMappingURL=rotate_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Gets the new shape of the input Tensor after it's been reshaped\n * to:\n * [blockShape[0], ..., blockShape[M-1], batch / prod(blockShape),\n * inputShape[1], ..., inputShape[N-1]]\n *\n * See step 1: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshaped(inputShape, blockShape, prod, batchToSpace = true) {\n let reshaped = [];\n if (batchToSpace) {\n reshaped = reshaped.concat(blockShape.slice(0));\n reshaped.push(inputShape[0] / prod);\n reshaped = reshaped.concat(inputShape.slice(1));\n }\n else {\n reshaped = reshaped.concat(inputShape[0]);\n const spatialLength = blockShape.length;\n for (let i = 0; i < spatialLength; ++i) {\n reshaped =\n reshaped.concat([inputShape[i + 1] / blockShape[i], blockShape[i]]);\n }\n reshaped = reshaped.concat(inputShape.slice(spatialLength + 1));\n }\n return reshaped;\n}\n/**\n * Gets the permutation that will transpose the dimensions of the\n * reshaped tensor to shape:\n *\n * [batch / prod(block_shape),inputShape[1], blockShape[0], ...,\n * inputShape[M], blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * see step 2: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getPermuted(reshapedRank, blockShapeRank, batchToSpace = true) {\n const permuted = [];\n if (batchToSpace) {\n permuted.push(blockShapeRank);\n for (let i = blockShapeRank + 1; i < reshapedRank; ++i) {\n if (i <= 2 * blockShapeRank) {\n permuted.push(i);\n permuted.push(i - (blockShapeRank + 1));\n }\n else {\n permuted.push(i);\n }\n }\n }\n else {\n const permutedBeforeBatch = [];\n const permutedAfterBatch = [];\n for (let i = 1; i < reshapedRank; ++i) {\n if (i >= blockShapeRank * 2 + 1 || i % 2 === 1) {\n permutedAfterBatch.push(i);\n }\n else {\n permutedBeforeBatch.push(i);\n }\n }\n permuted.push(...permutedBeforeBatch);\n permuted.push(0);\n permuted.push(...permutedAfterBatch);\n }\n return permuted;\n}\n/**\n * Gets the shape of the reshaped and permuted input Tensor before any cropping\n * is applied. The new shape will be:\n *\n * [batch / prod(blockShape),inputShape[1] * blockShape[0], ...,\n * inputShape[M] * blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 3: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshapedPermuted(inputShape, blockShape, prod, batchToSpace = true) {\n const reshapedPermuted = [];\n if (batchToSpace) {\n reshapedPermuted.push(inputShape[0] / prod);\n }\n else {\n reshapedPermuted.push(inputShape[0] * prod);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n if (i <= blockShape.length) {\n if (batchToSpace) {\n reshapedPermuted.push(blockShape[i - 1] * inputShape[i]);\n }\n else {\n reshapedPermuted.push(inputShape[i] / blockShape[i - 1]);\n }\n }\n else {\n reshapedPermuted.push(inputShape[i]);\n }\n }\n return reshapedPermuted;\n}\n/**\n * Converts the crops argument into the beginning coordinates of a slice\n * operation.\n */\nexport function getSliceBeginCoords(crops, blockShape) {\n const sliceBeginCoords = [0];\n for (let i = 0; i < blockShape; ++i) {\n sliceBeginCoords.push(crops[i][0]);\n }\n return sliceBeginCoords;\n}\n/**\n * Converts the crops argument into the size of a slice operation. When\n * combined with getSliceBeginCoords this function allows the reshaped and\n * permuted Tensor to be cropped to its final output shape of:\n *\n * inputShape[1] * blockShape[0] - crops[0,0] - crops[0,1], ...,\n * inputShape[M] * blockShape[M-1] -crops[M-1,0] -\n * crops[M-1,1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 4: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getSliceSize(uncroppedShape, crops, blockShape) {\n const sliceSize = uncroppedShape.slice(0, 1);\n for (let i = 0; i < blockShape; ++i) {\n sliceSize.push(uncroppedShape[i + 1] - crops[i][0] - crops[i][1]);\n }\n return sliceSize;\n}\n//# sourceMappingURL=array_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const SELU_SCALEALPHA = 1.7580993408473768599402175208123;\nexport const SELU_SCALE = 1.0507009873554804934193349852946;\n//# sourceMappingURL=selu_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const ERF_P = 0.3275911;\nexport const ERF_A1 = 0.254829592;\nexport const ERF_A2 = -0.284496736;\nexport const ERF_A3 = 1.421413741;\nexport const ERF_A4 = -1.453152027;\nexport const ERF_A5 = 1.061405429;\n//# sourceMappingURL=erf_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nexport function warn(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.warn(...msg);\n }\n}\nexport function log(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.log(...msg);\n }\n}\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Merges real and imaginary Float32Arrays into a single complex Float32Array.\n *\n * The memory layout is interleaved as follows:\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n * complex: [r0, i0, r1, i1, r2, i2]\n *\n * This is the inverse of splitRealAndImagArrays.\n *\n * @param real The real values of the complex tensor values.\n * @param imag The imag values of the complex tensor values.\n * @returns A complex tensor as a Float32Array with merged values.\n */\nexport function mergeRealAndImagArrays(real, imag) {\n if (real.length !== imag.length) {\n throw new Error(`Cannot merge real and imag arrays of different lengths. real:` +\n `${real.length}, imag: ${imag.length}.`);\n }\n const result = new Float32Array(real.length * 2);\n for (let i = 0; i < result.length; i += 2) {\n result[i] = real[i / 2];\n result[i + 1] = imag[i / 2];\n }\n return result;\n}\n/**\n * Splits a complex Float32Array into real and imag parts.\n *\n * The memory layout is interleaved as follows:\n * complex: [r0, i0, r1, i1, r2, i2]\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n *\n * This is the inverse of mergeRealAndImagArrays.\n *\n * @param complex The complex tensor values.\n * @returns An object with real and imag Float32Array components of the complex\n * tensor.\n */\nexport function splitRealAndImagArrays(complex) {\n const real = new Float32Array(complex.length / 2);\n const imag = new Float32Array(complex.length / 2);\n for (let i = 0; i < complex.length; i += 2) {\n real[i / 2] = complex[i];\n imag[i / 2] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts even indexed complex values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithEvenIndex(complex) {\n const len = Math.ceil(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 0; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts odd indexed comple values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithOddIndex(complex) {\n const len = Math.floor(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 2; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Get the map representing a complex value in the given array.\n * @param complex The complex tensor values.\n * @param index An index of the target complex value.\n */\nexport function getComplexWithIndex(complex, index) {\n const real = complex[index * 2];\n const imag = complex[index * 2 + 1];\n return { real, imag };\n}\n/**\n * Insert a given complex value into the TypedArray.\n * @param data The array in which the complex value is inserted.\n * @param c The complex value to be inserted.\n * @param index An index of the target complex value.\n */\nexport function assignToTypedArray(data, real, imag, index) {\n data[index * 2] = real;\n data[index * 2 + 1] = imag;\n}\n/**\n * Make the list of exponent terms used by FFT.\n */\nexport function exponents(n, inverse) {\n const real = new Float32Array(n / 2);\n const imag = new Float32Array(n / 2);\n for (let i = 0; i < Math.ceil(n / 2); i++) {\n const x = (inverse ? 2 : -2) * Math.PI * (i / n);\n real[i] = Math.cos(x);\n imag[i] = Math.sin(x);\n }\n return { real, imag };\n}\n/**\n * Make the exponent term used by FFT.\n */\nexport function exponent(k, n, inverse) {\n const x = (inverse ? 2 : -2) * Math.PI * (k / n);\n const real = Math.cos(x);\n const imag = Math.sin(x);\n return { real, imag };\n}\n//# sourceMappingURL=complex_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { cast } from '../ops/cast';\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { zeros } from '../ops/zeros';\nimport { hasEncodingLoss, makeZerosTypedArray } from '../util';\n// Utilities needed by backend consumers of tf-core.\nexport * from '../ops/axis_util';\nexport * from '../ops/broadcast_util';\nexport * from '../ops/concat_util';\nexport * from '../ops/conv_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/reduce_util';\nimport * as slice_util from '../ops/slice_util';\nexport { slice_util };\nexport { upcastType } from '../types';\nexport * from '../ops/rotate_util';\nexport * from '../ops/array_ops_util';\nexport * from '../ops/gather_nd_util';\nexport * from '../ops/scatter_nd_util';\nexport * from '../ops/selu_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/erf_util';\nexport * from '../log';\nexport * from '../backends/complex_util';\nexport * from '../ops/split_util';\nimport * as segment_util from '../ops/segment_util';\nexport { segment_util };\nexport function castTensor(x, dtype, backend) {\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return x.clone();\n }\n const zerosTensor = zeros(x.shape);\n const floatX = cast(x, 'float32');\n const result = backend.complex(floatX, zerosTensor);\n zerosTensor.dispose();\n floatX.dispose();\n return result;\n }\n if (!hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n return ENGINE.makeTensorFromDataId(x.dataId, x.shape, dtype);\n }\n if (x.dtype === 'complex64') {\n const real = backend.real(x);\n const result = cast(real, dtype);\n real.dispose();\n return result;\n }\n if (dtype === 'int32') {\n return backend.int(x);\n }\n else if (dtype === 'bool') {\n const zero = scalar(0, x.dtype);\n const result = backend.notEqual(x, zero);\n zero.dispose();\n return result;\n }\n else {\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n }\n}\nexport function reshapeTensor(x, shape) {\n return ENGINE.makeTensorFromDataId(x.dataId, shape, x.dtype);\n}\nexport function linspaceImpl(start, stop, num) {\n const step = (stop - start) / (num - 1);\n const values = makeZerosTypedArray(num, 'float32');\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, 'float32');\n}\n//# sourceMappingURL=backend_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { slice } from '../ops/slice';\n// TODO(annxingyuan): Use this helper in WASM Split kernel once intermediate\n// kernels have been modularized in WebGL and CPU\n// https://github.com/tensorflow/tfjs/issues/2822.\n/** Shared implementation of the split kernel across WebGL and CPU. */\nexport function split(x, sizeSplits, axis) {\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n return sizeSplits.map(s => {\n const sliceSize = [...size];\n sliceSize[axis] = s;\n const sliceT = slice(x, begin, sliceSize);\n begin[axis] += s;\n return sliceT;\n });\n}\n//# sourceMappingURL=split_shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * An implementation of the tile kernel shared between webgl and cpu for string\n * tensors only.\n */\nimport { buffer } from '../ops/buffer';\nexport function tile(xBuf, reps) {\n const newShape = new Array(xBuf.rank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xBuf.shape[i] * reps[i];\n }\n const result = buffer(newShape, xBuf.dtype);\n for (let i = 0; i < result.values.length; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = new Array(xBuf.rank);\n for (let j = 0; j < originalLoc.length; j++) {\n originalLoc[j] = newLoc[j] % xBuf.shape[j];\n }\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n}\n//# sourceMappingURL=tile_impl.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the TopK kernel shared between webgl and cpu. */\nimport { tensor } from '../ops/tensor';\nimport { getTypedArrayFromDType } from '../util';\nexport function topkImpl(x, xShape, xDtype, k, sorted) {\n // Reshape into a 2d tensor [batch, lastDim] and compute topk along lastDim.\n const lastDim = xShape[xShape.length - 1];\n const [batch, size] = [x.length / lastDim, lastDim];\n const allTopKVals = getTypedArrayFromDType(xDtype, batch * k);\n const allTopKIndices = getTypedArrayFromDType('int32', batch * k);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = x.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n const outOffset = b * k;\n const topKVals = allTopKVals.subarray(outOffset, outOffset + k);\n const topKIndices = allTopKIndices.subarray(outOffset, outOffset + k);\n for (let i = 0; i < k; i++) {\n topKVals[i] = valAndInd[i].value;\n topKIndices[i] = valAndInd[i].index;\n }\n }\n // Reshape back to the original input shape, except that the last\n // dimension is k.\n const outputShape = xShape.slice();\n outputShape[outputShape.length - 1] = k;\n return [\n tensor(allTopKVals, outputShape, xDtype),\n tensor(allTopKIndices, outputShape, 'int32')\n ];\n}\n//# sourceMappingURL=topk_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { nonMaxSuppressionV3Impl, nonMaxSuppressionV4Impl, nonMaxSuppressionV5Impl } from './non_max_suppression_impl';\nexport { split } from './split_shared';\nexport { tile } from './tile_impl';\nexport { topkImpl } from './topk_impl';\nexport { whereImpl } from './where_impl';\n//# sourceMappingURL=kernel_impls.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is tfjs-core without auto registration of things like flags,\n// gradients, chained ops or the opHandler. See base_side_effects.ts for parts\n// tfjs core that are required side effects.\n/**\n * @fileoverview\n * @suppress {partialAlias} Optimization disabled due to passing the module\n * object into a function below:\n *\n * import * as ops from './ops/ops';\n * setOpHandler(ops);\n */\n// Serialization.\nimport * as io from './io/io';\nimport * as math from './math';\nimport * as browser from './ops/browser';\nimport * as gather_util from './ops/gather_nd_util';\nimport * as scatter_util from './ops/scatter_nd_util';\nimport * as slice_util from './ops/slice_util';\nimport * as serialization from './serialization';\nimport * as tensor_util from './tensor_util';\nimport * as test_util from './test_util';\nimport * as util from './util';\nimport { version } from './version';\n// Optimizers.\nexport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nexport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nexport { AdamOptimizer } from './optimizers/adam_optimizer';\nexport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nexport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nexport { Optimizer } from './optimizers/optimizer';\nexport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nexport { SGDOptimizer } from './optimizers/sgd_optimizer';\nexport { Tensor, TensorBuffer, Variable } from './tensor';\nexport { Rank, sumOutType, upcastType } from './types';\nexport * from './ops/ops';\nexport { Reduction } from './ops/loss_ops_utils';\nexport * from './train';\nexport * from './globals';\nexport * from './kernel_registry';\nexport { customGrad, grad, grads, valueAndGrad, valueAndGrads, variableGrads } from './gradients';\nexport { Environment, env, ENV } from './environment';\nexport { version as version_core };\n// Top-level method exports.\nexport { nextFrame } from './browser_util';\n// Second level exports.\nimport * as backend_util from './backends/backend_util';\nimport * as device_util from './device_util';\nexport { browser, io, math, serialization, test_util, util, backend_util, tensor_util, slice_util, gather_util, scatter_util, device_util };\nimport * as kernel_impls from './backends/kernel_impls';\nexport { kernel_impls };\n// Backend specific.\nexport { KernelBackend, DataStorage } from './backends/backend';\n// Export all kernel names / info.\nexport * from './kernel_names';\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const absGradConfig = {\n kernelName: Abs,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, step(cast(x, 'float32'), -1)) };\n }\n};\n//# sourceMappingURL=Abs_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acosGradConfig = {\n kernelName: Acos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = square(cast(x, 'float32'));\n const b = sqrt(sub(scalar(1), a));\n return neg(div(dy, b));\n }\n };\n }\n};\n//# sourceMappingURL=Acos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acoshGradConfig = {\n kernelName: Acosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(sub(square(cast(x, 'float32')), 1));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Acosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const addGradConfig = {\n kernelName: Add,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Add_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AddN } from '../kernel_names';\nexport const addNGradConfig = {\n kernelName: AddN,\n saveAllInputs: true,\n gradFunc: (dy, saved) => {\n const ders = {};\n saved.forEach((_, i) => {\n ders[i] = () => dy.clone();\n });\n return ders;\n }\n};\n//# sourceMappingURL=AddN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMax } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMaxGradConfig = {\n kernelName: ArgMax,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMin } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMinGradConfig = {\n kernelName: ArgMin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const asinGradConfig = {\n kernelName: Asin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sqrt(sub(scalar(1), square(cast(x, 'float32'))))) };\n }\n};\n//# sourceMappingURL=Asin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nexport const asinhGradConfig = {\n kernelName: Asinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(add(scalar(1), square(cast(x, 'float32'))));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Asinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const atan2GradConfig = {\n kernelName: Atan2,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const d = add(square(a), square(b));\n let res = mul(dy, div(b, d));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n const d = add(square(a), square(b));\n let res = neg(mul(dy, div(a, d)));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Atan2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const atanGradConfig = {\n kernelName: Atan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(square(cast(x, 'float32')), 1)) };\n }\n};\n//# sourceMappingURL=Atan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { scalar } from '../ops/scalar';\nexport const atanhGradConfig = {\n kernelName: Atanh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sub(scalar(1), square(cast(x, 'float32')))) };\n }\n};\n//# sourceMappingURL=Atanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d avg pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank4 of shape\n * [batchSize, depth, height, width, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0. The dilation\n * rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction avgPool3dBackprop_(dy, input, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'avgPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'avgPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in avgPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in avgPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.avgPool3dBackprop(dy5D, input5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3dBackprop = op({ avgPool3dBackprop_ });\n//# sourceMappingURL=avg_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool3D } from '../kernel_names';\nimport { avgPool3dBackprop } from '../ops/avg_pool_3d_backprop';\nexport const avgPool3DGradConfig = {\n kernelName: AvgPool3D,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => avgPool3dBackprop(dy, x, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=AvgPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of an 2D avg pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The input image, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction avgPoolBackprop_(dy, input, filterSize, strides, pad) {\n const $dy = convertToTensor(dy, 'dy', 'avgPoolBackprop');\n const $input = convertToTensor(input, 'input', 'avgPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`);\n let input4D = $input;\n let dy4D = $dy;\n let reshapedTo4D = false;\n if ($input.rank === 3) {\n reshapedTo4D = true;\n input4D =\n reshape($input, [1, $input.shape[0], $input.shape[1], $input.shape[2]]);\n dy4D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2]]);\n }\n util.assert(dy4D.rank === 4, () => `Error in avgPoolBackprop: dy must be rank 4 but got rank ` +\n `${dy4D.rank}.`);\n util.assert(input4D.rank === 4, () => `Error in avgPoolBackprop: input must be rank 4 but got rank ` +\n `${input4D.rank}.`);\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo(input4D.shape, filterSize, strides, 1 /* dilations */, pad);\n return backend.avgPoolBackprop(dy4D, input4D, convInfo);\n };\n const inputs = { dy: dy4D, input: input4D };\n const attrs = { filterSize, strides, pad };\n const res = ENGINE.runKernelFunc(forward, inputs, null, AvgPoolBackprop, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPoolBackprop = op({ avgPoolBackprop_ });\n//# sourceMappingURL=avg_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool } from '../kernel_names';\nimport { avgPoolBackprop } from '../ops/avg_pool_backprop';\nexport const avgPoolGradConfig = {\n kernelName: AvgPool,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => avgPoolBackprop(dy, x, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=AvgPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul } from '../kernel_names';\nimport { matMul } from '../ops/mat_mul';\nexport const batchMatMulGradConfig = {\n kernelName: BatchMatMul,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved, attrs) => {\n const [a, b] = saved;\n const { transposeA, transposeB } = attrs;\n if (!transposeA && !transposeB) {\n return {\n a: () => matMul(dy, b, false, true),\n b: () => matMul(a, dy, true, false)\n };\n }\n else if (!transposeA && transposeB) {\n return {\n a: () => matMul(dy, b, false, false),\n b: () => matMul(dy, a, true, false)\n };\n }\n else if (transposeA && !transposeB) {\n return {\n a: () => matMul(b, dy, false, true),\n b: () => matMul(a, dy, false, false)\n };\n }\n else {\n return {\n a: () => matMul(b, dy, true, true),\n b: () => matMul(dy, a, true, true)\n };\n }\n }\n};\n//# sourceMappingURL=BatchMatMul_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchToSpaceND } from '../kernel_names';\nimport { spaceToBatchND } from '../ops/space_to_batch_nd';\nexport const batchToSpaceNDGradConfig = {\n kernelName: BatchToSpaceND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, crops } = attrs;\n return { x: () => spaceToBatchND(dy, blockShape, crops) };\n }\n};\n//# sourceMappingURL=BatchToSpaceND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BroadcastTo } from '../kernel_names';\nimport { sum } from '../ops/sum';\nexport const broadcastToGradConfig = {\n kernelName: BroadcastTo,\n gradFunc: (dy, saved, attrs) => {\n const broadCastToAttrs = attrs;\n const inputShape = broadCastToAttrs.inputShape;\n const outputShape = broadCastToAttrs.shape;\n const reps = Array.from(outputShape);\n for (let i = inputShape.length - 1; i >= 0; i--) {\n if (inputShape[i] === outputShape[i]) {\n reps[i] = 1;\n }\n else if (inputShape[i] !== 1) {\n throw new Error(`broadcastTo(): [${inputShape}] cannot be broadcast to [${outputShape}].`);\n }\n }\n const axes = [];\n for (let i = 0; i < reps.length; i++) {\n if (reps[i] > 1) {\n axes.push(i);\n }\n }\n return { x: () => sum(dy, axes, true /* keepDims */) };\n }\n};\n//# sourceMappingURL=BroadcastTo_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cast } from '../kernel_names';\nexport const castGradConfig = {\n kernelName: Cast,\n gradFunc: (dy) => {\n return { x: () => dy.clone() };\n }\n};\n//# sourceMappingURL=Cast_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const ceilGradConfig = {\n kernelName: Ceil,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Ceil_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '../kernel_names';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { lessEqual } from '../ops/less_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const clipByValueGradConfig = {\n kernelName: ClipByValue,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { clipValueMin, clipValueMax } = attrs;\n return {\n x: () => where(logicalAnd(greaterEqual(x, clipValueMin), lessEqual(x, clipValueMax)), dy, zerosLike(dy)),\n };\n }\n};\n//# sourceMappingURL=ClipByValue_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Concat } from '../kernel_names';\nimport { split } from '../ops/split';\nimport { parseAxisParam } from '../util';\nexport const concatGradConfig = {\n kernelName: Concat,\n saveAllInputs: true,\n gradFunc: (dy, saved, attrs) => {\n const shapes = saved.map(t => t.shape);\n const { axis } = attrs;\n const $axis = parseAxisParam(axis, saved[0].shape)[0];\n const sizeSplits = shapes.map(s => s[$axis]);\n const derTensors = split(dy, sizeSplits, $axis);\n return derTensors.map(t => () => t);\n }\n};\n//# sourceMappingURL=Concat_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2D } from '../kernel_names';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../ops/conv2d_backprop_input';\nimport * as conv_util from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv2DGradConfig = {\n kernelName: Conv2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x4D, $filter] = saved;\n const { dilations, strides, pad, dataFormat } = attrs;\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of conv2D: dilation rates greater than 1 ' +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n return {\n x: () => conv2DBackpropInput(x4D.shape, dy, $filter, strides, pad, dataFormat),\n filter: () => conv2DBackpropFilter(x4D, dy, $filter.shape, strides, pad, dataFormat)\n };\n }\n};\n//# sourceMappingURL=Conv2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport { conv2d } from '../ops/conv2d';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nexport const conv2DBackpropInputGradConfig = {\n kernelName: Conv2DBackpropInput,\n inputsToSave: ['dy', 'filter'],\n gradFunc: (ddx, saved, attrs) => {\n const [dy, filter] = saved;\n const { strides, pad, dataFormat, dimRoundingMode } = attrs;\n return {\n dy: () => conv2d(ddx, filter, strides, pad, dataFormat, 1 /* dilations */, dimRoundingMode),\n filter: () => conv2DBackpropFilter(ddx, dy, filter.shape, strides, pad, dataFormat, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=Conv2DBackpropInput_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropFilterV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 3D convolution.\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * [batch, depth, height, width, inChannels]. If rank 4, batch of 1 is\n * assumed.\n * @param dy The dy image, of rank 5 or rank 4, of shape\n * [batch, depth, height, width, outDepth]. If rank 4, batch of 1 is\n * assumed.\n * @param filterShape The shape of the filter, length 5,\n * [filterDepth, filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideDepth, strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction conv3DBackpropFilter_(x, dy, filterShape, strides, pad) {\n let x5D = x;\n if (x.rank === 4) {\n x5D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2], x.shape[3]]);\n }\n let dy5D = dy;\n if (dy5D.rank === 4) {\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3dDerFilter: input must be rank 5, but got shape ` +\n `${x5D.shape}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerFilter: dy must be rank 5, but got shape ` +\n `${dy5D.shape}.`);\n util.assert(filterShape.length === 5, () => `Error in conv3dDerFilter: filterShape must be length 5, but got ` +\n `${filterShape}.`);\n util.assert(x5D.shape[4] === filterShape[3], () => `Error in conv3dDerFilter: depth of input ${x5D.shape[4]}) must ` +\n `match input depth in filter (${filterShape[3]}.`);\n util.assert(dy5D.shape[4] === filterShape[4], () => `Error in conv3dDerFilter: depth of dy (${dy5D.shape[4]}) must ` +\n `match output depth for filter (${filterShape[4]}).`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, filterShape, strides, dilations, pad);\n return backend.conv3dDerFilter(x5D, dy5D, convInfo);\n };\n const inputs = { x: x5D, dy: dy5D };\n const attrs = { strides, pad, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropFilterV2, attrs);\n}\nexport const conv3DBackpropFilter = op({ conv3DBackpropFilter_ });\n//# sourceMappingURL=conv3d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv3D } from '../kernel_names';\nimport { conv3DBackpropFilter } from '../ops/conv3d_backprop_filter';\nimport { conv3DBackpropInput } from '../ops/conv3d_backprop_input';\nimport { tupleValuesAreOne } from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv3DGradConfig = {\n kernelName: Conv3D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad } = attrs;\n util.assert(tupleValuesAreOne(dilations), () => 'Error in gradient of conv3D: dilation rates greater than 1 are ' +\n `not yet supported in gradients. Got dilations '${dilations}'`);\n const [x5D, $filter] = saved;\n return {\n x: () => conv3DBackpropInput(x5D.shape, dy, $filter, strides, pad),\n filter: () => conv3DBackpropFilter(x5D, dy, $filter.shape, strides, pad)\n };\n }\n};\n//# sourceMappingURL=Conv3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { sin } from '../ops/sin';\nexport const cosGradConfig = {\n kernelName: Cos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(neg(sin(cast(x, 'float32'))), dy) };\n }\n};\n//# sourceMappingURL=Cos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { sinh } from '../ops/sinh';\nexport const coshGradConfig = {\n kernelName: Cosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(sinh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Cosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cumsum } from '../kernel_names';\nimport { getAxesPermutation } from '../ops/axis_util';\nimport { cumsum } from '../ops/cumsum';\nimport { transpose } from '../ops/transpose';\nexport const cumsumGradConfig = {\n kernelName: Cumsum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { axis, exclusive, reverse } = attrs;\n return {\n x: () => {\n const permutation = getAxesPermutation([axis], x.rank);\n let out = cumsum(dy, axis, exclusive, !reverse);\n if (permutation != null) {\n out = transpose(out, permutation);\n }\n return out;\n }\n };\n }\n};\n//# sourceMappingURL=Cumsum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport * as conv_util from '../ops/conv_util';\nimport { depthwiseConv2dNativeBackpropFilter } from '../ops/depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../ops/depthwise_conv2d_native_backprop_input';\nimport * as util from '../util';\nexport const depthwiseConv2dNativeGradConfig = {\n kernelName: DepthwiseConv2dNative,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1] : dilations;\n util.assert(conv_util.tupleValuesAreOne($dilations), () => 'Error in gradient of depthwiseConv2dNative: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${$dilations}'`);\n const [x, filter] = saved;\n util.assert(x.rank === 4, () => `Error in gradient of depthwiseConv2dNative: input must be ` +\n `rank 4, but got rank ${x.rank}.`);\n util.assert(filter.rank === 4, () => `Error in gradient of depthwiseConv2dNative: filter must be ` +\n `rank 4, but got rank ${filter.rank}.`);\n util.assert(x.shape[3] === filter.shape[2], () => `Error in gradient of depthwiseConv2d: number of input ` +\n `channels (${x.shape[3]}) must match the inChannels dimension ` +\n `in filter ${filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in gradient of depthwiseConv2d: Either strides or ' +\n `dilations must be 1. Got strides ${strides} and dilations ` +\n `'${$dilations}'.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n return {\n x: () => depthwiseConv2dNativeBackpropInput(x.shape, dy, filter, strides, pad, dilations, dimRoundingMode),\n filter: () => depthwiseConv2dNativeBackpropFilter(x, dy, filter.shape, strides, pad, dilations, dimRoundingMode),\n };\n }\n};\n//# sourceMappingURL=DepthwiseConv2dNative_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D, Dilation2DBackpropFilter, Dilation2DBackpropInput } from '../kernel_names';\nexport const dilation2dGradConfig = {\n kernelName: Dilation2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x, filter] = saved;\n const inputInputs = { x, filter, dy };\n const filterInputs = { x, filter, dy };\n return {\n x: () => ENGINE.runKernel(Dilation2DBackpropInput, inputInputs, attrs),\n filter: () => ENGINE.runKernel(Dilation2DBackpropFilter, filterInputs, attrs)\n };\n }\n};\n//# sourceMappingURL=Dilation2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const divGradConfig = {\n kernelName: Div,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Div_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu, EluGrad } from '../kernel_names';\nexport const eluGradConfig = {\n kernelName: Elu,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n const backPropKernelFunc = (backend) => {\n return backend.eluDer(dy, y);\n };\n const inputs = { dy, y };\n return {\n x: () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* grad */, EluGrad)\n };\n }\n};\n//# sourceMappingURL=Elu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Erf } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const erfGradConfig = {\n kernelName: Erf,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const a = mul(exp(neg(square(x))), 2 / Math.sqrt(Math.PI));\n return { x: () => mul(dy, a) };\n }\n};\n//# sourceMappingURL=Erf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '../kernel_names';\nimport { mul } from '../ops/mul';\nexport const expGradConfig = {\n kernelName: Exp,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, y) };\n }\n};\n//# sourceMappingURL=Exp_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nexport const expm1GradConfig = {\n kernelName: Expm1,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, exp(x)) };\n }\n};\n//# sourceMappingURL=Expm1_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const floorGradConfig = {\n kernelName: Floor,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Floor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FloorDiv } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const floorDivGradConfig = {\n kernelName: FloorDiv,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=FloorDiv_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { rsqrt } from '../ops/rsqrt';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { tile } from '../ops/tile';\nexport const fusedBatchNormGradConfig = {\n kernelName: FusedBatchNorm,\n inputsToSave: ['x', 'mean', 'variance', 'scale'],\n gradFunc: (dy, saved, attrs) => {\n const { varianceEpsilon } = attrs;\n const [x, mean, variance, scale] = saved;\n const scaleValue = scale == null ? scalar(1) : scale;\n const reductionAxes = getReductionAxes(mean.shape, x.shape);\n const tileShape = [];\n if (mean.rank === 1) {\n for (let i = 0; i < x.shape.length - 1; ++i) {\n tileShape.push(x.shape[i]);\n }\n tileShape.push(1);\n }\n const xMinusMean = sub(x, mean);\n const dyTimesScaleValue = mul(dy, scaleValue);\n const oneOverSqrtVariance = rsqrt(add(variance, scalar(varianceEpsilon)));\n const minusHalfRCube = mul(mul(mul(oneOverSqrtVariance, oneOverSqrtVariance), oneOverSqrtVariance), scalar(-0.5));\n const derX = () => {\n if (mean.rank === 1) {\n return reshape(mul(mul(dy, tile(reshape(oneOverSqrtVariance, [1, 1, 1, mean.shape[0]]), tileShape)), scaleValue), x.shape);\n }\n else {\n return reshape(mul(mul(dy, oneOverSqrtVariance), scaleValue), x.shape);\n }\n };\n const derMean = () => {\n let meanDer = mul(mul(oneOverSqrtVariance, scalar(-1)), dyTimesScaleValue);\n if (mean.rank === 1) {\n meanDer = sum(meanDer, reductionAxes);\n }\n return reshape(meanDer, mean.shape);\n };\n const derVariance = () => {\n let varianceDer = mul(mul(minusHalfRCube, xMinusMean), dyTimesScaleValue);\n if (mean.rank === 1) {\n varianceDer = sum(varianceDer, reductionAxes);\n }\n return reshape(varianceDer, mean.shape);\n };\n const derScale = () => {\n const xMinusMean2TimesRsqrt = mul(xMinusMean, oneOverSqrtVariance);\n let scaleDer = mul(dy, xMinusMean2TimesRsqrt);\n if (mean.rank === 1) {\n scaleDer = sum(scaleDer, reductionAxes);\n }\n return reshape(scaleDer, mean.shape);\n };\n const derOffset = () => {\n let offsetDer = dy;\n if (mean.rank === 1) {\n offsetDer = sum(offsetDer, reductionAxes);\n }\n return reshape(offsetDer, mean.shape);\n };\n return {\n x: derX,\n mean: derMean,\n variance: derVariance,\n scale: derScale,\n offset: derOffset\n };\n }\n};\n//# sourceMappingURL=FusedBatchNorm_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GatherV2 } from '../kernel_names';\nimport { getUndoAxesPermutation } from '../ops/axis_util';\nimport { reshape } from '../ops/reshape';\nimport { transpose } from '../ops/transpose';\nimport { unsortedSegmentSum } from '../ops/unsorted_segment_sum';\nimport { parseAxisParam } from '../util';\nexport const gatherGradConfig = {\n kernelName: GatherV2,\n inputsToSave: ['x', 'indices'],\n gradFunc: (dy, saved, attrs) => {\n const [x, indices] = saved;\n const { axis } = attrs;\n const parsedAxis = parseAxisParam(axis, x.shape)[0];\n const derX = () => {\n const paramsShape = x.shape;\n const indicesSize = indices.size;\n const outerShape = paramsShape.slice(0, parsedAxis);\n const outerDims = outerShape.length;\n const innerShape = paramsShape.slice(axis, paramsShape.length).slice(1);\n const innerDims = innerShape.length;\n const outerAxesIndices = arrayRange(0, outerDims);\n const innerAxesIndices = arrayRange(outerDims + 1, outerDims + 1 + innerDims);\n const valuesShape = arrayConcat([outerShape, [indicesSize], innerShape]);\n const values = reshape(dy, valuesShape);\n const reshapedIndices = reshape(indices, [indicesSize]);\n const transposeDims = arrayConcat([[outerDims], outerAxesIndices, innerAxesIndices]);\n const valuesTranspose = transpose(values, transposeDims);\n let paramsGrad = unsortedSegmentSum(valuesTranspose, reshapedIndices, x.shape[parsedAxis]);\n const invertTransposeDims = getUndoAxesPermutation(transposeDims);\n paramsGrad = transpose(paramsGrad, invertTransposeDims);\n return paramsGrad;\n };\n return { x: derX, indices: () => indices };\n }\n};\nfunction arrayRange(start, stop) {\n const result = [];\n for (let i = start; i < stop; ++i) {\n result.push(i);\n }\n return result;\n}\nfunction arrayConcat(arrays) {\n const result = [];\n for (let i = 0; i < arrays.length; ++i) {\n for (let j = 0; j < arrays[i].length; ++j) {\n result.push(arrays[i][j]);\n }\n }\n return result;\n}\n//# sourceMappingURL=GatherV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GreaterEqual } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const greaterEqualGradConfig = {\n kernelName: GreaterEqual,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n return { a: () => zerosLike(a), b: () => zerosLike(b) };\n }\n};\n//# sourceMappingURL=GreaterEqual_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '../kernel_names';\nimport { cast } from '../ops/cast';\nexport const identityGradConfig = {\n kernelName: Identity,\n gradFunc: (dy) => {\n return { x: () => cast(dy, 'float32') };\n }\n};\n//# sourceMappingURL=Identity_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isFiniteGradConfig = {\n kernelName: IsFinite,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsFinite_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isInfGradConfig = {\n kernelName: IsInf,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsInf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isNanGradConfig = {\n kernelName: IsNan,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsNan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nexport const log1pGradConfig = {\n kernelName: Log1p,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(x, 1)) };\n }\n};\n//# sourceMappingURL=Log1p_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nexport const logGradConfig = {\n kernelName: Log,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, cast(x, 'float32')) };\n }\n};\n//# sourceMappingURL=Log_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogSoftmax } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const logSoftmaxGradConfig = {\n kernelName: LogSoftmax,\n inputsToSave: [],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [value] = saved;\n const { axis } = attrs;\n return {\n logits: () => {\n const keepDims = true;\n const softmax = exp(value);\n return sub(dy, mul(sum(dy, axis, keepDims), softmax));\n }\n };\n }\n};\n//# sourceMappingURL=LogSoftmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRNBackprop } from '../kernel_names';\nimport { op } from './operation';\nfunction localResponseNormalizationBackprop_(x, y, dy, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const forward = backend => backend.LRNGrad(dy, x, y, depthRadius, bias, alpha, beta);\n const inputs = { x, y, dy };\n const attrs = { depthRadius, bias, alpha, beta };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRNBackprop, attrs);\n}\nexport const localResponseNormalizationBackprop = op({ localResponseNormalizationBackprop_ });\n//# sourceMappingURL=local_response_normalization_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LRN } from '../kernel_names';\nimport { localResponseNormalizationBackprop } from '../ops/local_response_normalization_backprop';\nexport const lrnGradConfig = {\n kernelName: LRN,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { depthRadius, bias, alpha, beta } = attrs;\n return {\n x: () => localResponseNormalizationBackprop(x, y, dy, depthRadius, bias, alpha, beta)\n };\n }\n};\n//# sourceMappingURL=LRN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as axis_util from '../ops/axis_util';\nimport { cast } from '../ops/cast';\nimport { equal } from '../ops/equal';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\n/**\n * Gradient helper function for the min and max operations.\n */\nexport function gradForMinAndMax(dy, y, xOrig, origAxes) {\n if (y.rank < xOrig.rank) {\n y = reshape(y, axis_util.expandShapeToKeepDim(y.shape, origAxes));\n }\n if (dy.rank < xOrig.rank) {\n dy = reshape(dy, axis_util.expandShapeToKeepDim(dy.shape, origAxes));\n }\n return {\n x: () => {\n const dx = mul(dy, cast(equal(xOrig, y), dy.dtype));\n return dx;\n }\n };\n}\n//# sourceMappingURL=min_max_grad_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const maxGradConfig = {\n kernelName: Max,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const maxAttrs = attrs;\n const { reductionIndices } = maxAttrs;\n const x = saved[0];\n const y = saved[1];\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n const maxGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return maxGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Max_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Maximum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { less } from '../ops/less';\nimport { mul } from '../ops/mul';\nexport const maximumGradConfig = {\n kernelName: Maximum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(greaterEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(less(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Maximum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d max pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank 4 of shape\n * [batchSize, depth, height, width, channels].\n * @param output The original output image, of rank 5 of shape\n * [batchSize, outDepth, outHeight, outWidth, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPool3dBackprop_(dy, input, output, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'maxPool3dBackprop');\n const $output = convertToTensor(output, 'output', 'maxPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let output5D = $output;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n output5D = reshape($output, [\n 1, $output.shape[0], $output.shape[1], $output.shape[2], $output.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in maxPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in maxPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(output5D.rank === 5, () => `Error in maxPool3dBackprop: output must be rank 5 but got rank ` +\n `${output5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.maxPool3dBackprop(dy5D, input5D, output5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D, output: output5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3dBackprop = op({ maxPool3dBackprop_ });\n//# sourceMappingURL=max_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool3D } from '../kernel_names';\nimport { maxPool3dBackprop } from '../ops/max_pool_3d_backprop';\nexport const maxPool3DGradConfig = {\n kernelName: MaxPool3D,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => maxPool3dBackprop(dy, x, y, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=MaxPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\n/**\n * Computes the backprop of a 2D max pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The original input image, of rank 4, of shape\n * [batchSize, height, width, channels].\n * @param output The original output image, of rank 4, of shape\n * [batchSize, outHeight, outWidth, channels].\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPoolBackprop_(dy, input, output, filterSize, strides, pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPoolBackprop');\n const $input = convertToTensor(input, 'input', 'maxPoolBackprop');\n const $output = convertToTensor(output, 'output', 'maxPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy ` +\n `(${$dy.rank})`);\n util.assert($dy.rank === 4, () => `Error in maxPoolBackprop: dy must be rank 4 but got rank ` +\n `${$dy.rank}.`);\n util.assert($input.rank === 4, () => `Error in maxPoolBackprop: input must be rank 4 but got rank ` +\n `${$input.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPoolBackprop: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo($input.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n return backend.maxPoolBackprop($dy, $input, $output, convInfo);\n };\n const inputs = { dy: $dy, input: $input, output: $output };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n return ENGINE.runKernelFunc(forward, inputs, null, MaxPoolBackprop, attrs);\n}\nexport const maxPoolBackprop = op({ maxPoolBackprop_ });\n//# sourceMappingURL=max_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool } from '../kernel_names';\nimport { maxPoolBackprop } from '../ops/max_pool_backprop';\nexport const maxPoolGradConfig = {\n kernelName: MaxPool,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => maxPoolBackprop(dy, x, y, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=MaxPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Min } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const minGradConfig = {\n kernelName: Min,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const minAttrs = attrs;\n const { axis } = minAttrs;\n const [x, y] = saved;\n const origAxes = util.parseAxisParam(axis, x.shape);\n const minGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return minGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Min_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Minimum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nexport const minimumGradConfig = {\n kernelName: Minimum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(lessEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(greater(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Minimum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const mirrorPadGradConfig = {\n kernelName: MirrorPad,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=MirrorPad_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Mod } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { floor } from '../ops/floor';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const modGradConfig = {\n kernelName: Mod,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(dy, reduceAxes), a.shape);\n }\n return dy;\n };\n const derB = () => {\n const res = mul(dy, neg(floor(div(a, b))));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Mod_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const multiplyGradConfig = {\n kernelName: Multiply,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = mul(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n const res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Multiply_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Negate } from '../kernel_names';\nimport { neg } from '../ops/neg';\nexport const negateGradConfig = {\n kernelName: Negate,\n gradFunc: (dy) => {\n return { x: () => neg(dy) };\n }\n};\n//# sourceMappingURL=Negate_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OneHot } from '../kernel_names';\nimport { zeros } from '../ops/zeros';\nexport const oneHotGradConfig = {\n kernelName: OneHot,\n inputsToSave: ['indices'],\n gradFunc: (dy, saved) => {\n const indices = saved[0];\n return { indices: () => zeros(indices.shape, 'float32') };\n }\n};\n//# sourceMappingURL=OneHot_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OnesLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const onesLikeGradConfig = {\n kernelName: OnesLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=OnesLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2 } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const padV2GradConfig = {\n kernelName: PadV2,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=PadV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pow } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { log } from '../ops/log';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { reshape } from '../ops/reshape';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const powGradConfig = {\n kernelName: Pow,\n inputsToSave: ['a', 'b'],\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [a, b, y] = saved;\n const base = a;\n const exp = b;\n const outShape = broadcast_util.assertAndGetBroadcastShape(base.shape, exp.shape);\n const derBase = () => {\n const expFloat = cast(exp, 'float32');\n let res = mul(dy, mul(expFloat, pow(base, sub(expFloat, scalar(1)))));\n const reduceAxes = broadcast_util.getReductionAxes(base.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, base.shape);\n };\n const derExp = () => {\n const condition = greater(base, 0);\n const logBase = where(condition, log(base), zerosLike(base));\n let res = mul(dy, mul(y, logBase));\n const reduceAxes = broadcast_util.getReductionAxes(exp.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, exp.shape);\n };\n return { a: derBase, b: derExp };\n }\n};\n//# sourceMappingURL=Pow_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '../kernel_names';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const preluGradConfig = {\n kernelName: Prelu,\n inputsToSave: ['x', 'alpha'],\n gradFunc: (dy, saved) => {\n const [x, alpha] = saved;\n const mask = greater(x, 0);\n return {\n x: () => where(mask, dy, mul(dy, alpha)),\n alpha: () => {\n let res = where(mask, zerosLike(dy), mul(dy, x));\n const reduceAxes = getReductionAxes(alpha.shape, dy.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, alpha.shape);\n }\n };\n }\n};\n//# sourceMappingURL=Prelu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const reciprocalGradConfig = {\n kernelName: Reciprocal,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, neg(square(x))) };\n }\n};\n//# sourceMappingURL=Reciprocal_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const relu6GradConfig = {\n kernelName: Relu6,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const mask = mul(lessEqual(x, 6), step(x));\n return { x: () => mul(dy, cast(mask, 'float32')) };\n }\n};\n//# sourceMappingURL=Relu6_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const reluGradConfig = {\n kernelName: Relu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, cast(step(x), 'float32')) };\n }\n};\n//# sourceMappingURL=Relu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape } from '../kernel_names';\nimport { reshape } from '../ops/reshape';\nexport const reshapeGradConfig = {\n kernelName: Reshape,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => reshape(dy, x.shape) };\n }\n};\n//# sourceMappingURL=Reshape_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeBilinear, ResizeBilinearGrad } from '../kernel_names';\nexport const resizeBilinearGradConfig = {\n kernelName: ResizeBilinear,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeBilinearBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeBilinearGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeBilinear_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeNearestNeighbor, ResizeNearestNeighborGrad } from '../kernel_names';\nexport const resizeNearestNeighborGradConfig = {\n kernelName: ResizeNearestNeighbor,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeNearestNeighborBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeNearestNeighborGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeNearestNeighbor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reverse } from '../kernel_names';\nimport { reverse } from '../ops/reverse';\nimport { parseAxisParam } from '../util';\nexport const reverseGradConfig = {\n kernelName: Reverse,\n gradFunc: (dy, saved, attrs) => {\n const { dims } = attrs;\n const axes = parseAxisParam(dims, dy.shape);\n return { x: () => reverse(dy, axes) };\n }\n};\n//# sourceMappingURL=Reverse_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const roundGradConfig = {\n kernelName: Round,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Round_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { pow } from '../ops/pow';\nexport const rsqrtGradConfig = {\n kernelName: Rsqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => neg(div(dy, mul(pow(x, 1.5), 2))) };\n }\n};\n//# sourceMappingURL=Rsqrt_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SelectV2 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { logicalNot } from '../ops/logical_not';\nimport { mul } from '../ops/mul';\nimport { zerosLike } from '../ops/zeros_like';\nexport const selectV2PoolGradConfig = {\n kernelName: SelectV2,\n inputsToSave: ['condition'],\n gradFunc: (dy, saved) => {\n const [condition] = saved;\n return {\n // TODO(julianoks): Return null for condition gradient\n // when backprop supports it.\n condition: () => cast(zerosLike(condition), 'float32'),\n t: () => mul(dy, cast(condition, dy.dtype)),\n e: () => mul(dy, cast(logicalNot(condition), dy.dtype))\n };\n }\n};\n//# sourceMappingURL=SelectV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Selu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { exp } from '../ops/exp';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { SELU_SCALE, SELU_SCALEALPHA } from '../ops/selu_util';\nimport { where } from '../ops/where';\nexport const seluGradConfig = {\n kernelName: Selu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const mask = greater(x, scalar(0));\n const scaleAlpha = scalar(SELU_SCALEALPHA);\n const scale = scalar(SELU_SCALE);\n const greaterThanZeroDer = mul(dy, scale);\n const lessEqualZeroDer = mul(mul(dy, scaleAlpha), exp(cast(x, 'float32')));\n return where(mask, greaterThanZeroDer, lessEqualZeroDer);\n }\n };\n }\n};\n//# sourceMappingURL=Selu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const sigmoidGradConfig = {\n kernelName: Sigmoid,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, mul(y, sub(scalar(1), y))) };\n }\n};\n//# sourceMappingURL=Sigmoid_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const signGradConfig = {\n kernelName: Sign,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Sign_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cos } from '../ops/cos';\nimport { mul } from '../ops/mul';\nexport const sinGradConfig = {\n kernelName: Sin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cos(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cosh } from '../ops/cosh';\nimport { mul } from '../ops/mul';\nexport const sinhGradConfig = {\n kernelName: Sinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cosh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice } from '../kernel_names';\nimport { pad } from '../ops/pad';\nimport { parseSliceParams } from '../ops/slice_util';\nexport const sliceGradConfig = {\n kernelName: Slice,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { begin, size } = attrs;\n const inputShape = x.shape;\n const [begin_, size_] = parseSliceParams(x, begin, size);\n // Create an Nx2 padding where the first column represents how many\n // zeros are prepended (at start) for each dimension, and the second\n // column indicates how many zeros are appended (at end).\n // The number of zeros to append is the shape of the input\n // elementwise-subtracted by both the begin vector and sizes vector.\n const paddings = [];\n for (let i = 0; i < dy.rank; i++) {\n paddings.push([begin_[i], inputShape[i] - begin_[i] - size_[i]]);\n }\n return { x: () => pad(dy, paddings) };\n }\n};\n//# sourceMappingURL=Slice_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softmax } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const softmaxGradConfig = {\n kernelName: Softmax,\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [y] = saved;\n const { dim } = attrs;\n const keepDims = true;\n const dyTimesY = mul(dy, y);\n return {\n logits: () => sub(dyTimesY, mul(sum(dyTimesY, [dim], keepDims), y))\n };\n }\n};\n//# sourceMappingURL=Softmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sigmoid } from '../ops/sigmoid';\nexport const softplusGradConfig = {\n kernelName: Softplus,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, sigmoid(x)) };\n }\n};\n//# sourceMappingURL=Softplus_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SpaceToBatchND } from '../kernel_names';\nimport { batchToSpaceND } from '../ops/batch_to_space_nd';\nexport const spaceToBatchNDGradConfig = {\n kernelName: SpaceToBatchND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, paddings } = attrs;\n return { x: () => batchToSpaceND(dy, blockShape, paddings) };\n }\n};\n//# sourceMappingURL=SpaceToBatchND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SplitV } from '../kernel_names';\nimport { concat } from '../ops/concat';\nexport const splitVGradConfig = {\n kernelName: SplitV,\n gradFunc: (dy, saved, attrs) => {\n const { axis } = attrs;\n return { x: () => concat(dy, axis) };\n }\n};\n//# sourceMappingURL=SplitV_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nexport const sqrtGradConfig = {\n kernelName: Sqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, mul(sqrt(cast(x, 'float32')), 2)) };\n }\n};\n//# sourceMappingURL=Sqrt_grad.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nexport const squareGradConfig = {\n kernelName: Square,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, mul(cast(x, 'float32'), 2)) };\n }\n};\n//# sourceMappingURL=Square_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const squaredDifferenceGradConfig = {\n kernelName: SquaredDifference,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const two = scalar(2);\n const derA = () => mul(dy, mul(two, sub(a, b)));\n const derB = () => mul(dy, mul(two, sub(b, a)));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=SquaredDifference_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const stepGradConfig = {\n kernelName: Step,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports\n // it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Step_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const subGradConfig = {\n kernelName: Sub,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(neg(res), b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Sub_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sum } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { ones } from '../ops/ones';\nimport { reshape } from '../ops/reshape';\nimport { parseAxisParam } from '../util';\nexport const sumGradConfig = {\n kernelName: Sum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const expandedDyShape = x.shape.slice();\n const { axis } = attrs;\n const axes = parseAxisParam(axis, x.shape);\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = mul(expandedDy, ones(x.shape, 'float32'));\n return { x: () => derX };\n }\n};\n//# sourceMappingURL=Sum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '../kernel_names';\nimport { cos } from '../ops/cos';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const tanGradConfig = {\n kernelName: Tan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, square(cos(x))) };\n }\n};\n//# sourceMappingURL=Tan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const tanhGradConfig = {\n kernelName: Tanh,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(sub(scalar(1), square(y)), dy) };\n }\n};\n//# sourceMappingURL=Tanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tile } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { slice } from '../ops/slice';\nimport { zerosLike } from '../ops/zeros_like';\nexport const tileGradConfig = {\n kernelName: Tile,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { reps } = attrs;\n const derX = () => {\n let xGrad = zerosLike(x);\n // TODO(cais): Maybe reduce memory footprint by avoiding repeated\n // slicing.\n if (x.rank === 1) {\n for (let i = 0; i < reps[0]; ++i) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0]], [x.shape[0]]));\n }\n }\n else if (x.rank === 2) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1]], [\n x.shape[0], x.shape[1]\n ]));\n }\n }\n }\n else if (x.rank === 3) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n xGrad =\n add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1], k * x.shape[2]], [x.shape[0], x.shape[1], x.shape[2]]));\n }\n }\n }\n }\n else if (x.rank === 4) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n for (let l = 0; l < reps[3]; ++l) {\n xGrad =\n add(xGrad, slice(dy, [\n i * x.shape[0], j * x.shape[1], k * x.shape[2],\n l * x.shape[3]\n ], [x.shape[0], x.shape[1], x.shape[2], x.shape[3]]));\n }\n }\n }\n }\n }\n else {\n throw new Error(`Gradient for tile operation is not implemented for rank-` +\n `${x.rank} tensors yet.`);\n }\n return xGrad;\n };\n return { x: derX };\n },\n};\n//# sourceMappingURL=Tile_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '../kernel_names';\nimport * as axis_util from '../ops/axis_util';\nimport { transpose } from '../ops/transpose';\nexport const transposeGradConfig = {\n kernelName: Transpose,\n gradFunc: (dy, saved, attrs) => {\n const transposeAttrs = attrs;\n const { perm } = transposeAttrs;\n const undoPerm = axis_util.getUndoAxesPermutation(perm);\n return { x: () => transpose(dy, undoPerm) };\n }\n};\n//# sourceMappingURL=Transpose_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unpack } from '../kernel_names';\nimport { stack } from '../ops/stack';\nexport const unpackGradConfig = {\n kernelName: Unpack,\n gradFunc: (dy, saved, attrs) => {\n const unpackAttrs = attrs;\n const { axis } = unpackAttrs;\n return { value: () => stack(dy, axis) };\n }\n};\n//# sourceMappingURL=Unpack_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { expandDims } from '../ops/expand_dims';\nimport { gather } from '../ops/gather';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { maximum } from '../ops/maximum';\nimport { ones } from '../ops/ones';\nimport { scalar } from '../ops/scalar';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const unsortedSegmentSumGradConfig = {\n kernelName: UnsortedSegmentSum,\n inputsToSave: ['segmentIds'],\n gradFunc: (dy, saved) => {\n const [segmentIds] = saved;\n const derX = () => {\n return gatherDropNegatives(dy, segmentIds);\n };\n return { x: derX };\n }\n};\nfunction gatherDropNegatives(x, indices) {\n // Helper function for unsorted segment ops. Gathers params for\n // positive segment ids and gathers 0 for inputs with negative segment id.\n // Mirrors _GatherDropNegatives from tensorflow/python/ops/math_grad.py\n const zeroClippedIndices = maximum(indices, zerosLike(indices));\n const gathered = gather(x, zeroClippedIndices);\n let isPositive = greaterEqual(indices, scalar(0, 'int32'));\n const numIters = gathered.rank - isPositive.rank;\n for (let i = 0; i < numIters; ++i) {\n isPositive = expandDims(isPositive, i + 1);\n }\n isPositive = logicalAnd(isPositive, ones(gathered.shape, 'bool'));\n const zeroSlice = zerosLike(gathered);\n return where(isPositive, gathered, zeroSlice);\n}\n//# sourceMappingURL=UnsortedSegmentSum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ZerosLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const zerosLikeGradConfig = {\n kernelName: ZerosLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=ZerosLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { absGradConfig } from './gradients/Abs_grad';\nimport { acosGradConfig } from './gradients/Acos_grad';\nimport { acoshGradConfig } from './gradients/Acosh_grad';\nimport { addGradConfig } from './gradients/Add_grad';\nimport { addNGradConfig } from './gradients/AddN_grad';\nimport { argMaxGradConfig } from './gradients/ArgMax_grad';\nimport { argMinGradConfig } from './gradients/ArgMin_grad';\nimport { asinGradConfig } from './gradients/Asin_grad';\nimport { asinhGradConfig } from './gradients/Asinh_grad';\nimport { atan2GradConfig } from './gradients/Atan2_grad';\nimport { atanGradConfig } from './gradients/Atan_grad';\nimport { atanhGradConfig } from './gradients/Atanh_grad';\nimport { avgPool3DGradConfig } from './gradients/AvgPool3D_grad';\nimport { avgPoolGradConfig } from './gradients/AvgPool_grad';\nimport { batchMatMulGradConfig } from './gradients/BatchMatMul_grad';\nimport { batchToSpaceNDGradConfig } from './gradients/BatchToSpaceND_grad';\nimport { broadcastToGradConfig } from './gradients/BroadcastTo_grad';\nimport { castGradConfig } from './gradients/Cast_grad';\nimport { ceilGradConfig } from './gradients/Ceil_grad';\nimport { clipByValueGradConfig } from './gradients/ClipByValue_grad';\nimport { concatGradConfig } from './gradients/Concat_grad';\nimport { conv2DGradConfig } from './gradients/Conv2D_grad';\nimport { conv2DBackpropInputGradConfig } from './gradients/Conv2DBackpropInput_grad';\nimport { conv3DGradConfig } from './gradients/Conv3D_grad';\nimport { cosGradConfig } from './gradients/Cos_grad';\nimport { coshGradConfig } from './gradients/Cosh_grad';\nimport { cumsumGradConfig } from './gradients/Cumsum_grad';\nimport { depthwiseConv2dNativeGradConfig } from './gradients/DepthwiseConv2dNative_grad';\nimport { dilation2dGradConfig } from './gradients/Dilation2D_grad';\nimport { divGradConfig } from './gradients/Div_grad';\nimport { eluGradConfig } from './gradients/Elu_grad';\nimport { erfGradConfig } from './gradients/Erf_grad';\nimport { expGradConfig } from './gradients/Exp_grad';\nimport { expm1GradConfig } from './gradients/Expm1_grad';\nimport { floorGradConfig } from './gradients/Floor_grad';\nimport { floorDivGradConfig } from './gradients/FloorDiv_grad';\nimport { fusedBatchNormGradConfig } from './gradients/FusedBatchNorm_grad';\nimport { gatherGradConfig } from './gradients/GatherV2_grad';\nimport { greaterEqualGradConfig } from './gradients/GreaterEqual_grad';\nimport { identityGradConfig } from './gradients/Identity_grad';\nimport { isFiniteGradConfig } from './gradients/IsFinite_grad';\nimport { isInfGradConfig } from './gradients/IsInf_grad';\nimport { isNanGradConfig } from './gradients/IsNan_grad';\nimport { log1pGradConfig } from './gradients/Log1p_grad';\nimport { logGradConfig } from './gradients/Log_grad';\nimport { logSoftmaxGradConfig } from './gradients/LogSoftmax_grad';\nimport { lrnGradConfig } from './gradients/LRN_grad';\nimport { maxGradConfig } from './gradients/Max_grad';\nimport { maximumGradConfig } from './gradients/Maximum_grad';\nimport { maxPool3DGradConfig } from './gradients/MaxPool3D_grad';\nimport { maxPoolGradConfig } from './gradients/MaxPool_grad';\nimport { minGradConfig } from './gradients/Min_grad';\nimport { minimumGradConfig } from './gradients/Minimum_grad';\nimport { mirrorPadGradConfig } from './gradients/MirrorPad_grad';\nimport { modGradConfig } from './gradients/Mod_grad';\nimport { multiplyGradConfig } from './gradients/Multiply_grad';\nimport { negateGradConfig } from './gradients/Negate_grad';\nimport { oneHotGradConfig } from './gradients/OneHot_grad';\nimport { onesLikeGradConfig } from './gradients/OnesLike_grad';\nimport { padV2GradConfig } from './gradients/PadV2_grad';\nimport { powGradConfig } from './gradients/Pow_grad';\nimport { preluGradConfig } from './gradients/Prelu_grad';\nimport { reciprocalGradConfig } from './gradients/Reciprocal_grad';\nimport { relu6GradConfig } from './gradients/Relu6_grad';\nimport { reluGradConfig } from './gradients/Relu_grad';\nimport { reshapeGradConfig } from './gradients/Reshape_grad';\nimport { resizeBilinearGradConfig } from './gradients/ResizeBilinear_grad';\nimport { resizeNearestNeighborGradConfig } from './gradients/ResizeNearestNeighbor_grad';\nimport { reverseGradConfig } from './gradients/Reverse_grad';\nimport { roundGradConfig } from './gradients/Round_grad';\nimport { rsqrtGradConfig } from './gradients/Rsqrt_grad';\nimport { selectV2PoolGradConfig } from './gradients/SelectV2_grad';\nimport { seluGradConfig } from './gradients/Selu_grad';\nimport { sigmoidGradConfig } from './gradients/Sigmoid_grad';\nimport { signGradConfig } from './gradients/Sign_grad';\nimport { sinGradConfig } from './gradients/Sin_grad';\nimport { sinhGradConfig } from './gradients/Sinh_grad';\nimport { sliceGradConfig } from './gradients/Slice_grad';\nimport { softmaxGradConfig } from './gradients/Softmax_grad';\nimport { softplusGradConfig } from './gradients/Softplus_grad';\nimport { spaceToBatchNDGradConfig } from './gradients/SpaceToBatchND_grad';\nimport { splitVGradConfig } from './gradients/SplitV_grad';\nimport { sqrtGradConfig } from './gradients/Sqrt_grad';\nimport { squareGradConfig } from './gradients/Square_grad';\nimport { squaredDifferenceGradConfig } from './gradients/SquaredDifference_grad';\nimport { stepGradConfig } from './gradients/Step_grad';\nimport { subGradConfig } from './gradients/Sub_grad';\nimport { sumGradConfig } from './gradients/Sum_grad';\nimport { tanGradConfig } from './gradients/Tan_grad';\nimport { tanhGradConfig } from './gradients/Tanh_grad';\nimport { tileGradConfig } from './gradients/Tile_grad';\nimport { transposeGradConfig } from './gradients/Transpose_grad';\nimport { unpackGradConfig } from './gradients/Unpack_grad';\nimport { unsortedSegmentSumGradConfig } from './gradients/UnsortedSegmentSum_grad';\nimport { zerosLikeGradConfig } from './gradients/ZerosLike_grad';\nimport { registerGradient } from './kernel_registry';\n// Export all kernel configs here so that the package can auto register them\nconst gradConfigs = [\n absGradConfig,\n acosGradConfig,\n acoshGradConfig,\n addGradConfig,\n addNGradConfig,\n argMaxGradConfig,\n argMinGradConfig,\n asinGradConfig,\n asinhGradConfig,\n atan2GradConfig,\n atanGradConfig,\n atanhGradConfig,\n avgPool3DGradConfig,\n avgPoolGradConfig,\n batchMatMulGradConfig,\n batchToSpaceNDGradConfig,\n broadcastToGradConfig,\n castGradConfig,\n ceilGradConfig,\n clipByValueGradConfig,\n concatGradConfig,\n conv2DBackpropInputGradConfig,\n conv2DGradConfig,\n conv3DGradConfig,\n cosGradConfig,\n coshGradConfig,\n cumsumGradConfig,\n depthwiseConv2dNativeGradConfig,\n dilation2dGradConfig,\n divGradConfig,\n eluGradConfig,\n erfGradConfig,\n expGradConfig,\n expm1GradConfig,\n floorDivGradConfig,\n floorGradConfig,\n fusedBatchNormGradConfig,\n gatherGradConfig,\n greaterEqualGradConfig,\n identityGradConfig,\n isFiniteGradConfig,\n isInfGradConfig,\n isNanGradConfig,\n log1pGradConfig,\n logGradConfig,\n logSoftmaxGradConfig,\n lrnGradConfig,\n maxGradConfig,\n maxGradConfig,\n maximumGradConfig,\n maxPool3DGradConfig,\n maxPoolGradConfig,\n minGradConfig,\n minimumGradConfig,\n mirrorPadGradConfig,\n modGradConfig,\n multiplyGradConfig,\n negateGradConfig,\n oneHotGradConfig,\n onesLikeGradConfig,\n padV2GradConfig,\n padV2GradConfig,\n powGradConfig,\n preluGradConfig,\n reciprocalGradConfig,\n relu6GradConfig,\n reluGradConfig,\n reshapeGradConfig,\n resizeBilinearGradConfig,\n resizeNearestNeighborGradConfig,\n reverseGradConfig,\n roundGradConfig,\n rsqrtGradConfig,\n selectV2PoolGradConfig,\n seluGradConfig,\n sigmoidGradConfig,\n signGradConfig,\n sinGradConfig,\n sinhGradConfig,\n sliceGradConfig,\n softmaxGradConfig,\n softplusGradConfig,\n spaceToBatchNDGradConfig,\n spaceToBatchNDGradConfig,\n splitVGradConfig,\n splitVGradConfig,\n sqrtGradConfig,\n squaredDifferenceGradConfig,\n squareGradConfig,\n stepGradConfig,\n subGradConfig,\n sumGradConfig,\n tanGradConfig,\n tanhGradConfig,\n tileGradConfig,\n transposeGradConfig,\n unpackGradConfig,\n unsortedSegmentSumGradConfig,\n zerosLikeGradConfig\n];\nfor (const gradientConfig of gradConfigs) {\n registerGradient(gradientConfig);\n}\n//# sourceMappingURL=register_all_gradients.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { abs } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.abs = function () {\n this.throwIfDisposed();\n return abs(this);\n};\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acos = function () {\n this.throwIfDisposed();\n return acos(this);\n};\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acosh = function () {\n this.throwIfDisposed();\n return acosh(this);\n};\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { addStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.addStrict = function (x) {\n this.throwIfDisposed();\n return addStrict(this, x);\n};\n//# sourceMappingURL=add_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { add } from '../../ops/add';\nimport { Tensor } from '../../tensor';\nTensor.prototype.add = function (b) {\n this.throwIfDisposed();\n return add(this, b);\n};\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { all } from '../../ops/all';\nimport { Tensor } from '../../tensor';\nTensor.prototype.all = function (axis, keepDims) {\n this.throwIfDisposed();\n return all(this, axis, keepDims);\n};\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { any } from '../../ops/any';\nimport { Tensor } from '../../tensor';\nTensor.prototype.any = function (axis, keepDims) {\n this.throwIfDisposed();\n return any(this, axis, keepDims);\n};\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMax } from '../../ops/arg_max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMax = function (axis) {\n this.throwIfDisposed();\n return argMax(this, axis);\n};\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMin } from '../../ops/arg_min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMin = function (axis) {\n this.throwIfDisposed();\n return argMin(this, axis);\n};\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nimport { assert } from '../../util';\n/** Converts a size-1 `tf.Tensor` to a `tf.Scalar`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asScalar = function () {\n this.throwIfDisposed();\n assert(this.size === 1, () => 'The array must have only 1 element.');\n return reshape(this, []);\n};\n//# sourceMappingURL=as_scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * Casts a `tf.Tensor` to a specified dtype.\n *\n * @param dtype Data-type to cast the tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asType = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=as_type.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Converts a `tf.Tensor` to a `tf.Tensor1D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as1D = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=as1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor2D`.\n *\n * @param rows Number of rows in `tf.Tensor2D`.\n * @param columns Number of columns in `tf.Tensor2D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as2D = function (rows, columns) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns]);\n};\n//# sourceMappingURL=as2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor3D`.\n *\n * @param rows Number of rows in `tf.Tensor3D`.\n * @param columns Number of columns in `tf.Tensor3D`.\n * @param depth Depth of `tf.Tensor3D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as3D = function (rows, columns, depth) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth]);\n};\n//# sourceMappingURL=as3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor4D`.\n *\n * @param rows Number of rows in `tf.Tensor4D`.\n * @param columns Number of columns in `tf.Tensor4D`.\n * @param depth Depth of `tf.Tensor4D`.\n * @param depth2 4th dimension of `tf.Tensor4D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as4D = function (rows, columns, depth, depth2) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2]);\n};\n//# sourceMappingURL=as4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor5D`.\n *\n * @param rows Number of rows in `tf.Tensor5D`.\n * @param columns Number of columns in `tf.Tensor5D`.\n * @param depth Depth of `tf.Tensor5D`.\n * @param depth2 4th dimension of `tf.Tensor5D`.\n * @param depth3 5th dimension of 'tf.Tensor5D'\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as5D = function (rows, columns, depth, depth2, depth3) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2, depth3]);\n};\n//# sourceMappingURL=as5d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asin = function () {\n this.throwIfDisposed();\n return asin(this);\n};\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asinh = function () {\n this.throwIfDisposed();\n return asinh(this);\n};\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan = function () {\n this.throwIfDisposed();\n return atan(this);\n};\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { atan2 } from '../../ops/atan2';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan2 = function (b) {\n this.throwIfDisposed();\n return atan2(this, b);\n};\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atanh = function () {\n this.throwIfDisposed();\n return atanh(this);\n};\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { avgPool } from '../../ops/avg_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.avgPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return avgPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchToSpaceND } from '../../ops/batch_to_space_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchToSpaceND = function (blockShape, crops) {\n this.throwIfDisposed();\n return batchToSpaceND(this, blockShape, crops);\n};\n//# sourceMappingURL=batch_to_space_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchNorm } from '../../ops/batchnorm';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchNorm = function (mean, variance, offset, scale, varianceEpsilon) {\n this.throwIfDisposed();\n return batchNorm(this, mean, variance, offset, scale, varianceEpsilon);\n};\n//# sourceMappingURL=batchnorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { broadcastTo } from '../../ops/broadcast_to';\nimport { Tensor } from '../../tensor';\nTensor.prototype.broadcastTo = function (shape) {\n this.throwIfDisposed();\n return broadcastTo(this, shape);\n};\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cast = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ceil } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ceil = function () {\n this.throwIfDisposed();\n return ceil(this);\n};\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { clipByValue } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.clipByValue = function (min, max) {\n this.throwIfDisposed();\n return clipByValue(this, min, max);\n};\n//# sourceMappingURL=clip_by_value.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../../ops/concat';\nimport { Tensor } from '../../tensor';\nTensor.prototype.concat = function (x, axis) {\n this.throwIfDisposed();\n if (x instanceof Tensor) {\n x = [x];\n }\n return concat([this, ...x], axis);\n};\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv1d } from '../../ops/conv1d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv1d = function (filter, stride, pad, dataFormat, dilation, dimRoundingMode) {\n this.throwIfDisposed();\n return conv1d(this, filter, stride, pad, dataFormat, dilation, dimRoundingMode);\n};\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2dTranspose } from '../../ops/conv2d_transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2dTranspose = function (filter, outputShape, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2dTranspose(this, filter, outputShape, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from '../../ops/conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cos = function () {\n this.throwIfDisposed();\n return cos(this);\n};\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cosh = function () {\n this.throwIfDisposed();\n return cosh(this);\n};\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { cumsum } from '../../ops/cumsum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cumsum = function (axis, exclusive, reverse) {\n this.throwIfDisposed();\n return cumsum(this, axis, exclusive, reverse);\n};\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthToSpace } from '../../ops/depth_to_space';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthToSpace = function (blockSize, dataFormat) {\n this.throwIfDisposed();\n return depthToSpace(this, blockSize, dataFormat);\n};\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../../globals';\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated Use `depthwiseConv2d` instead.\n */\nTensor.prototype.depthwiseConv2D = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n deprecationWarn('depthwiseConv2D is deprecated, use depthwiseConv2d instead');\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2D_deprecated.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthwiseConv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dilation2d } from '../../ops/dilation2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dilation2d = function (filter, strides, pad, dilations, dataFormat) {\n this.throwIfDisposed();\n return dilation2d(this, filter, strides, pad, dilations, dataFormat);\n};\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { divNoNan } from '../../ops/div_no_nan';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divNoNan = function (b) {\n this.throwIfDisposed();\n return divNoNan(this, b);\n};\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { divStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divStrict = function (x) {\n this.throwIfDisposed();\n return divStrict(this, x);\n};\n//# sourceMappingURL=div_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { div } from '../../ops/div';\nimport { Tensor } from '../../tensor';\nTensor.prototype.div = function (b) {\n this.throwIfDisposed();\n return div(this, b);\n};\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dot } from '../../ops/dot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dot = function (b) {\n this.throwIfDisposed();\n return dot(this, b);\n};\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../../ops/elu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.elu = function () {\n this.throwIfDisposed();\n return elu(this);\n};\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { equalStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.equalStrict = function (x) {\n this.throwIfDisposed();\n return equalStrict(this, x);\n};\n//# sourceMappingURL=equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { equal } from '../../ops/equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.equal = function (b) {\n this.throwIfDisposed();\n return equal(this, b);\n};\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { erf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.erf = function () {\n this.throwIfDisposed();\n return erf(this);\n};\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { exp } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.exp = function () {\n this.throwIfDisposed();\n return exp(this);\n};\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { expandDims } from '../../ops/expand_dims';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expandDims = function (axis) {\n this.throwIfDisposed();\n return expandDims(this, axis);\n};\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { expm1 } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expm1 = function () {\n this.throwIfDisposed();\n return expm1(this);\n};\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { fft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.fft = function () {\n this.throwIfDisposed();\n return fft(this);\n};\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Flatten a Tensor to a 1D array.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.flatten = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=flatten.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { floor } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floor = function () {\n this.throwIfDisposed();\n return floor(this);\n};\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { floorDiv } from '../../ops/floorDiv';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floorDiv = function (b) {\n this.throwIfDisposed();\n return floorDiv(this, b);\n};\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { gather } from '../../ops/gather';\nimport { Tensor } from '../../tensor';\nTensor.prototype.gather = function (indices, axis) {\n this.throwIfDisposed();\n return gather(this, indices, axis);\n};\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterEqualStrict = function (x) {\n this.throwIfDisposed();\n return greaterEqualStrict(this, x);\n};\n//# sourceMappingURL=greater_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greaterEqual } from '../../ops/greater_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greaterEqual = function (b) {\n this.throwIfDisposed();\n return greaterEqual(this, b);\n};\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterStrict = function (x) {\n this.throwIfDisposed();\n return greaterStrict(this, x);\n};\n//# sourceMappingURL=greater_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greater } from '../../ops/greater';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greater = function (b) {\n this.throwIfDisposed();\n return greater(this, b);\n};\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ifft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ifft = function () {\n this.throwIfDisposed();\n return ifft(this);\n};\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { irfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.irfft = function () {\n this.throwIfDisposed();\n return irfft(this);\n};\n//# sourceMappingURL=irfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isFinite } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isFinite = function () {\n this.throwIfDisposed();\n return isFinite(this);\n};\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isInf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isInf = function () {\n this.throwIfDisposed();\n return isInf(this);\n};\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isNaN } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isNaN = function () {\n this.throwIfDisposed();\n return isNaN(this);\n};\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { leakyRelu } from '../../ops/leaky_relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.leakyRelu = function (alpha) {\n this.throwIfDisposed();\n return leakyRelu(this, alpha);\n};\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.lessEqualStrict = function (x) {\n this.throwIfDisposed();\n return lessEqualStrict(this, x);\n};\n//# sourceMappingURL=less_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { lessEqual } from '../../ops/less_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessEqual = function (b) {\n this.throwIfDisposed();\n return lessEqual(this, b);\n};\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessStrict = function (x) {\n this.throwIfDisposed();\n return lessStrict(this, x);\n};\n//# sourceMappingURL=less_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { less } from '../../ops/less';\nimport { Tensor } from '../../tensor';\nTensor.prototype.less = function (b) {\n this.throwIfDisposed();\n return less(this, b);\n};\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { localResponseNormalization } from '../../ops/local_response_normalization';\nimport { Tensor } from '../../tensor';\nTensor.prototype.localResponseNormalization = function (depthRadius, bias, alpha, beta) {\n this.throwIfDisposed();\n return localResponseNormalization(this, depthRadius, bias, alpha, beta);\n};\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSigmoid = function () {\n this.throwIfDisposed();\n return logSigmoid(this);\n};\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSoftmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSoftmax = function (axis) {\n this.throwIfDisposed();\n return logSoftmax(this, axis);\n};\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logSumExp } from '../../ops/log_sum_exp';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSumExp = function (axis, keepDims) {\n this.throwIfDisposed();\n return logSumExp(this, axis, keepDims);\n};\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log = function () {\n this.throwIfDisposed();\n return log(this);\n};\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log1p } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log1p = function () {\n this.throwIfDisposed();\n return log1p(this);\n};\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalAnd } from '../../ops/logical_and';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalAnd = function (b) {\n this.throwIfDisposed();\n return logicalAnd(this, b);\n};\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalNot } from '../../ops/logical_not';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalNot = function () {\n this.throwIfDisposed();\n return logicalNot(this);\n};\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalOr } from '../../ops/logical_or';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalOr = function (b) {\n this.throwIfDisposed();\n return logicalOr(this, b);\n};\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalXor } from '../../ops/logical_xor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalXor = function (b) {\n this.throwIfDisposed();\n return logicalXor(this, b);\n};\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { matMul } from '../../ops/mat_mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.matMul = function (b, transposeA, transposeB) {\n this.throwIfDisposed();\n return matMul(this, b, transposeA, transposeB);\n};\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maxPool } from '../../ops/max_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maxPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return maxPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { max } from '../../ops/max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.max = function (axis, keepDims) {\n this.throwIfDisposed();\n return max(this, axis, keepDims);\n};\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { maximumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.maximumStrict = function (x) {\n this.throwIfDisposed();\n return maximumStrict(this, x);\n};\n//# sourceMappingURL=maximum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maximum } from '../../ops/maximum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maximum = function (b) {\n this.throwIfDisposed();\n return maximum(this, b);\n};\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mean } from '../../ops/mean';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mean = function (axis, keepDims) {\n this.throwIfDisposed();\n return mean(this, axis, keepDims);\n};\n//# sourceMappingURL=mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { min } from '../../ops/min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.min = function (axis, keepDims) {\n this.throwIfDisposed();\n return min(this, axis, keepDims);\n};\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { minimumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.minimumStrict = function (x) {\n this.throwIfDisposed();\n return minimumStrict(this, x);\n};\n//# sourceMappingURL=minimum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { minimum } from '../../ops/minimum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.minimum = function (b) {\n this.throwIfDisposed();\n return minimum(this, b);\n};\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mirrorPad } from '../../ops/mirror_pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mirrorPad = function (paddings, mode) {\n this.throwIfDisposed();\n return mirrorPad(this, paddings, mode);\n};\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { modStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.modStrict = function (x) {\n this.throwIfDisposed();\n return modStrict(this, x);\n};\n//# sourceMappingURL=mod_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mod } from '../../ops/mod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mod = function (b) {\n this.throwIfDisposed();\n return mod(this, b);\n};\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { mulStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.mulStrict = function (x) {\n this.throwIfDisposed();\n return mulStrict(this, x);\n};\n//# sourceMappingURL=mul_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mul } from '../../ops/mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mul = function (b) {\n this.throwIfDisposed();\n return mul(this, b);\n};\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { neg } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.neg = function () {\n this.throwIfDisposed();\n return neg(this);\n};\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { norm } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.norm = function (ord, axis, keepDims) {\n this.throwIfDisposed();\n return norm(this, ord, axis, keepDims);\n};\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { notEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.notEqualStrict = function (x) {\n this.throwIfDisposed();\n return notEqualStrict(this, x);\n};\n//# sourceMappingURL=not_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { notEqual } from '../../ops/not_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.notEqual = function (b) {\n this.throwIfDisposed();\n return notEqual(this, b);\n};\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { oneHot } from '../../ops/one_hot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.oneHot = function (depth, onValue = 1, offValue = 0) {\n this.throwIfDisposed();\n return oneHot(this, depth, onValue, offValue);\n};\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { onesLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.onesLike = function () {\n this.throwIfDisposed();\n return onesLike(this);\n};\n//# sourceMappingURL=ones_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pad } from '../../ops/pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pad = function (paddings, constantValue) {\n this.throwIfDisposed();\n return pad(this, paddings, constantValue);\n};\n//# sourceMappingURL=pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pool } from '../../ops/pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pool = function (windowShape, poolingType, padding, dilationRate, strides) {\n this.throwIfDisposed();\n return pool(this, windowShape, poolingType, padding, dilationRate, strides);\n};\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { powStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.powStrict = function (exp) {\n this.throwIfDisposed();\n return powStrict(this, exp);\n};\n//# sourceMappingURL=pow_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pow } from '../../ops/pow';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pow = function (exp) {\n this.throwIfDisposed();\n return pow(this, exp);\n};\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prelu } from '../../ops/prelu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prelu = function (alpha) {\n this.throwIfDisposed();\n return prelu(this, alpha);\n};\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prod } from '../../ops/prod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prod = function (axis, keepDims) {\n this.throwIfDisposed();\n return prod(this, axis, keepDims);\n};\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { reciprocal } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reciprocal = function () {\n this.throwIfDisposed();\n return reciprocal(this);\n};\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu } from '../../ops/relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu = function () {\n this.throwIfDisposed();\n return relu(this);\n};\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu6 } from '../../ops/relu6';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu6 = function () {\n this.throwIfDisposed();\n return relu6(this);\n};\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Reshapes the tensor into the shape of the provided tensor.\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.reshapeAs = function (x) {\n this.throwIfDisposed();\n return reshape(this, x.shape);\n};\n//# sourceMappingURL=reshape_as.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reshape = function (shape) {\n this.throwIfDisposed();\n return reshape(this, shape);\n};\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeBilinear } from '../../ops/image/resize_bilinear';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeBilinear = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeBilinear(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeNearestNeighbor } from '../../ops/image/resize_nearest_neighbor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeNearestNeighbor = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeNearestNeighbor(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reverse } from '../../ops/reverse';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reverse = function (axis) {\n this.throwIfDisposed();\n return reverse(this, axis);\n};\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rfft = function () {\n this.throwIfDisposed();\n return rfft(this);\n};\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { round } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.round = function () {\n this.throwIfDisposed();\n return round(this);\n};\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rsqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rsqrt = function () {\n this.throwIfDisposed();\n return rsqrt(this);\n};\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { selu } from '../../ops/selu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.selu = function () {\n this.throwIfDisposed();\n return selu(this);\n};\n//# sourceMappingURL=selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { separableConv2d } from '../../ops/separable_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.separableConv2d = function (depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat) {\n this.throwIfDisposed();\n return separableConv2d(this, depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat);\n};\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sigmoid = function () {\n this.throwIfDisposed();\n return sigmoid(this);\n};\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sign } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sign = function () {\n this.throwIfDisposed();\n return sign(this);\n};\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sin = function () {\n this.throwIfDisposed();\n return sin(this);\n};\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sinh = function () {\n this.throwIfDisposed();\n return sinh(this);\n};\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { slice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.slice = function (begin, size) {\n this.throwIfDisposed();\n return slice(this, begin, size);\n};\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softmax = function (dim) {\n this.throwIfDisposed();\n return softmax(this, dim);\n};\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softplus } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softplus = function () {\n this.throwIfDisposed();\n return softplus(this);\n};\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { spaceToBatchND } from '../../ops/space_to_batch_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.spaceToBatchND = function (blockShape, paddings) {\n this.throwIfDisposed();\n return spaceToBatchND(this, blockShape, paddings);\n};\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { split } from '../../ops/split';\nimport { Tensor } from '../../tensor';\nTensor.prototype.split = function (numOrSizeSplits, axis) {\n this.throwIfDisposed();\n return split(this, numOrSizeSplits, axis);\n};\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sqrt = function () {\n this.throwIfDisposed();\n return sqrt(this);\n};\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { square } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.square = function () {\n this.throwIfDisposed();\n return square(this);\n};\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squaredDifference } from '../../ops/squared_difference';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squaredDifference = function (b) {\n this.throwIfDisposed();\n return squaredDifference(this, b);\n};\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { squaredDifferenceStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.squaredDifferenceStrict = function (x) {\n this.throwIfDisposed();\n return squaredDifferenceStrict(this, x);\n};\n//# sourceMappingURL=squared_difference_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squeeze } from '../../ops/squeeze';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squeeze = function (axis) {\n this.throwIfDisposed();\n return squeeze(this, axis);\n};\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { stack } from '../../ops/stack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stack = function (x, axis) {\n this.throwIfDisposed();\n const tensorsToBeStacked = x instanceof Tensor ? [this, x] : [this, ...x];\n return stack(tensorsToBeStacked, axis);\n};\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { step } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.step = function (alpha) {\n this.throwIfDisposed();\n return step(this, alpha);\n};\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { stridedSlice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stridedSlice = function (begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask) {\n this.throwIfDisposed();\n return stridedSlice(this, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask);\n};\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { subStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.subStrict = function (x) {\n this.throwIfDisposed();\n return subStrict(this, x);\n};\n//# sourceMappingURL=sub_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sub } from '../../ops/sub';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sub = function (b) {\n this.throwIfDisposed();\n return sub(this, b);\n};\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sum } from '../../ops/sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sum = function (axis, keepDims) {\n this.throwIfDisposed();\n return sum(this, axis, keepDims);\n};\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tan = function () {\n this.throwIfDisposed();\n return tan(this);\n};\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tanh = function () {\n this.throwIfDisposed();\n return tanh(this);\n};\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tile } from '../../ops/tile';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tile = function (reps) {\n this.throwIfDisposed();\n return tile(this, reps);\n};\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `bool`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toBool = function () {\n this.throwIfDisposed();\n return cast(this, 'bool');\n};\n//# sourceMappingURL=to_bool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `float32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toFloat = function () {\n this.throwIfDisposed();\n return cast(this, 'float32');\n};\n//# sourceMappingURL=to_float.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `int32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toInt = function () {\n this.throwIfDisposed();\n return cast(this, 'int32');\n};\n//# sourceMappingURL=to_int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { topk } from '../../ops/topk';\nimport { Tensor } from '../../tensor';\nTensor.prototype.topk = function (k, sorted) {\n this.throwIfDisposed();\n return topk(this, k, sorted);\n};\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { transpose } from '../../ops/transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.transpose = function (perm) {\n this.throwIfDisposed();\n return transpose(this, perm);\n};\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unique } from '../../ops/unique';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unique = function (axis) {\n this.throwIfDisposed();\n return unique(this, axis);\n};\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unsortedSegmentSum } from '../../ops/unsorted_segment_sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unsortedSegmentSum = function (segmentIds, numSegments) {\n this.throwIfDisposed();\n return unsortedSegmentSum(this, segmentIds, numSegments);\n};\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unstack } from '../../ops/unstack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unstack = function (axis) {\n this.throwIfDisposed();\n return unstack(this, axis);\n};\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { where } from '../../ops/where';\nimport { Tensor } from '../../tensor';\nTensor.prototype.where = function (condition, x) {\n this.throwIfDisposed();\n return where(condition, this, x);\n};\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { zerosLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.zerosLike = function () {\n this.throwIfDisposed();\n return zerosLike(this);\n};\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './abs';\nimport './acos';\nimport './acosh';\nimport './add_strict';\nimport './add';\nimport './all';\nimport './any';\nimport './arg_max';\nimport './arg_min';\nimport './as_scalar';\nimport './as_type';\nimport './as1d';\nimport './as2d';\nimport './as3d';\nimport './as4d';\nimport './as5d';\nimport './asin';\nimport './asinh';\nimport './atan';\nimport './atan2';\nimport './atanh';\nimport './avg_pool';\nimport './batch_to_space_nd';\nimport './batchnorm';\nimport './broadcast_to';\nimport './cast';\nimport './ceil';\nimport './clip_by_value';\nimport './concat';\nimport './conv1d';\nimport './conv2d_transpose';\nimport './conv2d';\nimport './cos';\nimport './cosh';\nimport './cumsum';\nimport './depth_to_space';\nimport './depthwise_conv2D_deprecated';\nimport './depthwise_conv2d';\nimport './dilation2d';\nimport './div_no_nan';\nimport './div_strict';\nimport './div';\nimport './dot';\nimport './elu';\nimport './equal_strict';\nimport './equal';\nimport './erf';\nimport './exp';\nimport './expand_dims';\nimport './expm1';\nimport './fft';\nimport './flatten';\nimport './floor';\nimport './floorDiv';\nimport './gather';\nimport './greater_equal_strict';\nimport './greater_equal';\nimport './greater_strict';\nimport './greater';\nimport './ifft';\nimport './irfft';\nimport './is_finite';\nimport './is_inf';\nimport './is_nan';\nimport './leaky_relu';\nimport './less_equal_strict';\nimport './less_equal';\nimport './less_strict';\nimport './less';\nimport './local_response_normalization';\nimport './log_sigmoid';\nimport './log_softmax';\nimport './log_sum_exp';\nimport './log';\nimport './log1p';\nimport './logical_and';\nimport './logical_not';\nimport './logical_or';\nimport './logical_xor';\nimport './mat_mul';\nimport './max_pool';\nimport './max';\nimport './maximum_strict';\nimport './maximum';\nimport './mean';\nimport './min';\nimport './minimum_strict';\nimport './minimum';\nimport './mirror_pad';\nimport './mod_strict';\nimport './mod';\nimport './mul_strict';\nimport './mul';\nimport './neg';\nimport './norm';\nimport './not_equal_strict';\nimport './not_equal';\nimport './one_hot';\nimport './ones_like';\nimport './pad';\nimport './pool';\nimport './pow_strict';\nimport './pow';\nimport './prelu';\nimport './prod';\nimport './reciprocal';\nimport './relu';\nimport './relu6';\nimport './reshape_as';\nimport './reshape';\nimport './resize_bilinear';\nimport './resize_nearest_neighbor';\nimport './reverse';\nimport './rfft';\nimport './round';\nimport './rsqrt';\nimport './selu';\nimport './separable_conv2d';\nimport './sigmoid';\nimport './sign';\nimport './sin';\nimport './sinh';\nimport './slice';\nimport './softmax';\nimport './softplus';\nimport './space_to_batch_nd';\nimport './split';\nimport './sqrt';\nimport './square';\nimport './squared_difference';\nimport './squared_difference_strict';\nimport './squeeze';\nimport './stack';\nimport './step';\nimport './strided_slice';\nimport './sub_strict';\nimport './sub';\nimport './sum';\nimport './tan';\nimport './tanh';\nimport './tile';\nimport './to_bool';\nimport './to_float';\nimport './to_int';\nimport './topk';\nimport './transpose';\nimport './unique';\nimport './unsorted_segment_sum';\nimport './unstack';\nimport './where';\nimport './zeros_like';\n//# sourceMappingURL=register_all_chained_ops.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code.\nimport './base_side_effects';\n// All exports from this package should be in base.\nexport * from './base';\n// Register all the gradients.\nimport './register_all_gradients';\n// Import all op chainers and add type info to Tensor.\nimport './public/chained_ops/register_all_chained_ops';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { backend } from '@tensorflow/tfjs-core';\nlet _epsilon;\n/**\n * Returns the value of the fuzz factor used in numeric expressions.\n */\nexport function epsilon() {\n if (_epsilon == null) {\n _epsilon = backend().epsilon();\n }\n return _epsilon;\n}\n/**\n * Sets the value of the fuzz factor used in numeric expressions.\n * @param e New value of epsilon.\n */\nexport function setEpsilon(e) {\n _epsilon = e;\n}\n/**\n * Returns the default image data format convention.\n */\nexport function imageDataFormat() {\n return 'channelsLast';\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Explicit error types.\n *\n * See the following link for more information about why the code includes\n * calls to setPrototypeOf:\n *\n * https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work\n */\n// tslint:enable\n/**\n * Equivalent of Python's AttributeError.\n */\nexport class AttributeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AttributeError.prototype);\n }\n}\n/**\n * Equivalent of Python's RuntimeError.\n */\nexport class RuntimeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, RuntimeError.prototype);\n }\n}\n/**\n * Equivalent of Python's ValueError.\n */\nexport class ValueError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, ValueError.prototype);\n }\n}\n/**\n * Equivalent of Python's NotImplementedError.\n */\nexport class NotImplementedError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, NotImplementedError.prototype);\n }\n}\n/**\n * Equivalent of Python's AssertionError.\n */\nexport class AssertionError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AssertionError.prototype);\n }\n}\n/**\n * Equivalent of Python's IndexError.\n */\nexport class IndexError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, IndexError.prototype);\n }\n}\n//# sourceMappingURL=errors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: utils/generic_utils.py */\nimport { util } from '@tensorflow/tfjs-core';\nimport { AssertionError, ValueError } from '../errors';\n// tslint:enable\n/**\n * If `value` is an Array, equivalent to Python's `value * numValues`.\n * If `value` is not an Array, equivalent to Python's `[value] * numValues`\n */\n// tslint:disable-next-line:no-any\nexport function pyListRepeat(value, numValues) {\n if (Array.isArray(value)) {\n // tslint:disable-next-line:no-any\n let newArray = [];\n for (let i = 0; i < numValues; i++) {\n newArray = newArray.concat(value);\n }\n return newArray;\n }\n else {\n const newArray = new Array(numValues);\n newArray.fill(value);\n return newArray;\n }\n}\nexport function assert(val, message) {\n if (!val) {\n throw new AssertionError(message);\n }\n}\n/**\n * Count the number of elements of the `array` that are equal to `reference`.\n */\nexport function count(array, refernce) {\n let counter = 0;\n for (const item of array) {\n if (item === refernce) {\n counter++;\n }\n }\n return counter;\n}\n/**\n * If an array is of length 1, just return the first element. Otherwise, return\n * the full array.\n * @param tensors\n */\nexport function singletonOrArray(xs) {\n if (xs.length === 1) {\n return xs[0];\n }\n return xs;\n}\n/**\n * Normalizes a list/tensor into a list.\n *\n * If a tensor is passed, we return\n * a list of size 1 containing the tensor.\n *\n * @param x target object to be normalized.\n */\n// tslint:disable-next-line:no-any\nexport function toList(x) {\n if (Array.isArray(x)) {\n return x;\n }\n return [x];\n}\n/**\n * Generate a UID for a list\n */\n// tslint:disable-next-line:no-any\nexport function objectListUid(objs) {\n const objectList = toList(objs);\n let retVal = '';\n for (const obj of objectList) {\n if (obj.id == null) {\n throw new ValueError(`Object ${obj} passed to objectListUid without an id`);\n }\n if (retVal !== '') {\n retVal = retVal + ', ';\n }\n retVal = `${retVal}${Math.abs(obj.id)}`;\n }\n return retVal;\n}\n/**\n * Converts string to snake-case.\n * @param name\n */\nexport function toSnakeCase(name) {\n const intermediate = name.replace(/(.)([A-Z][a-z0-9]+)/g, '$1_$2');\n const insecure = intermediate.replace(/([a-z])([A-Z])/g, '$1_$2').toLowerCase();\n /*\n If the class is private the name starts with \"_\" which is not secure\n for creating scopes. We prefix the name with \"private\" in this case.\n */\n if (insecure[0] !== '_') {\n return insecure;\n }\n return 'private' + insecure;\n}\nexport function toCamelCase(identifier) {\n // quick return for empty string or single character strings\n if (identifier.length <= 1) {\n return identifier;\n }\n // Check for the underscore indicating snake_case\n if (identifier.indexOf('_') === -1) {\n return identifier;\n }\n return identifier.replace(/[_]+(\\w|$)/g, (m, p1) => p1.toUpperCase());\n}\n// tslint:disable-next-line:no-any\nlet _GLOBAL_CUSTOM_OBJECTS = {};\nexport function serializeKerasObject(instance) {\n if (instance === null || instance === undefined) {\n return null;\n }\n const dict = {};\n dict['className'] = instance.getClassName();\n dict['config'] = instance.getConfig();\n return dict;\n}\n/**\n * Replace ndarray-style scalar objects in serialization objects with numbers.\n *\n * Background: In some versions of tf.keras, certain scalar values in the HDF5\n * model save file can be serialized as: `{'type': 'ndarray', 'value': num}`,\n * where in `num` is a plain number. This method converts such serialization\n * to a `number`.\n *\n * @param config The keras-format serialization object to be processed\n * (in place).\n */\nfunction convertNDArrayScalarsInConfig(config) {\n if (config == null || typeof config !== 'object') {\n return;\n }\n else if (Array.isArray(config)) {\n config.forEach(configItem => convertNDArrayScalarsInConfig(configItem));\n }\n else {\n const fields = Object.keys(config);\n for (const field of fields) {\n const value = config[field];\n if (value != null && typeof value === 'object') {\n if (!Array.isArray(value) && value['type'] === 'ndarray' &&\n typeof value['value'] === 'number') {\n config[field] = value['value'];\n }\n else {\n convertNDArrayScalarsInConfig(value);\n }\n }\n }\n }\n}\n/**\n * Deserialize a saved Keras Object\n * @param identifier either a string ID or a saved Keras dictionary\n * @param moduleObjects a list of Python class names to object constructors\n * @param customObjects a list of Python class names to object constructors\n * @param printableModuleName debug text for the object being reconstituted\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns a TensorFlow.js Layers object\n */\n// tslint:disable:no-any\nexport function deserializeKerasObject(identifier, moduleObjects = {}, customObjects = {}, printableModuleName = 'object', fastWeightInit = false) {\n // tslint:enable\n if (typeof identifier === 'string') {\n const functionName = identifier;\n let fn;\n if (functionName in customObjects) {\n fn = customObjects[functionName];\n }\n else if (functionName in _GLOBAL_CUSTOM_OBJECTS) {\n fn = _GLOBAL_CUSTOM_OBJECTS[functionName];\n }\n else {\n fn = moduleObjects[functionName];\n if (fn == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${identifier}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n }\n return fn;\n }\n else {\n // In this case we are dealing with a Keras config dictionary.\n const config = identifier;\n if (config['className'] == null || config['config'] == null) {\n throw new ValueError(`${printableModuleName}: Improper config format: ` +\n `${JSON.stringify(config)}.\\n` +\n `'className' and 'config' must set.`);\n }\n const className = config['className'];\n let cls, fromConfig;\n if (className in customObjects) {\n [cls, fromConfig] = customObjects[className];\n }\n else if (className in _GLOBAL_CUSTOM_OBJECTS) {\n [cls, fromConfig] = _GLOBAL_CUSTOM_OBJECTS['className'];\n }\n else if (className in moduleObjects) {\n [cls, fromConfig] = moduleObjects[className];\n }\n if (cls == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${className}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n if (fromConfig != null) {\n // Porting notes: Instead of checking to see whether fromConfig accepts\n // customObjects, we create a customObjects dictionary and tack it on to\n // config['config'] as config['config'].customObjects. Objects can use it,\n // if they want.\n // tslint:disable-next-line:no-any\n const customObjectsCombined = {};\n for (const key of Object.keys(_GLOBAL_CUSTOM_OBJECTS)) {\n customObjectsCombined[key] = _GLOBAL_CUSTOM_OBJECTS[key];\n }\n for (const key of Object.keys(customObjects)) {\n customObjectsCombined[key] = customObjects[key];\n }\n // Add the customObjects to config\n const nestedConfig = config['config'];\n nestedConfig['customObjects'] = customObjectsCombined;\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n convertNDArrayScalarsInConfig(config['config']);\n const returnObj = fromConfig(cls, config['config'], customObjects, fastWeightInit);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n else {\n // Then `cls` may be a function returning a class.\n // In this case by convention `config` holds\n // the kwargs of the function.\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n // In python this is **config['config'], for tfjs-layers we require\n // classes that use this fall-through construction method to take\n // a config interface that mimics the expansion of named parameters.\n const returnObj = new cls(config['config']);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n }\n}\n/**\n * Compares two numbers for sorting.\n * @param a\n * @param b\n */\nexport function numberCompare(a, b) {\n return (a < b) ? -1 : ((a > b) ? 1 : 0);\n}\n/**\n * Comparison of two numbers for reverse sorting.\n * @param a\n * @param b\n */\nexport function reverseNumberCompare(a, b) {\n return -1 * numberCompare(a, b);\n}\n/**\n * Convert a string into the corresponding DType.\n * @param dtype\n * @returns An instance of DType.\n */\nexport function stringToDType(dtype) {\n switch (dtype) {\n case 'float32':\n return 'float32';\n default:\n throw new ValueError(`Invalid dtype: ${dtype}`);\n }\n}\n/**\n * Test the element-by-element equality of two Arrays of strings.\n * @param xs First array of strings.\n * @param ys Second array of strings.\n * @returns Wether the two arrays are all equal, element by element.\n */\nexport function stringsEqual(xs, ys) {\n if (xs == null || ys == null) {\n return xs === ys;\n }\n if (xs.length !== ys.length) {\n return false;\n }\n for (let i = 0; i < xs.length; ++i) {\n if (xs[i] !== ys[i]) {\n return false;\n }\n }\n return true;\n}\n/**\n * Get the unique elements of an array.\n * @param xs Array.\n * @returns An Array consisting of the unique elements in `xs`.\n */\nexport function unique(xs) {\n if (xs == null) {\n return xs;\n }\n const out = [];\n // TODO(cais): Maybe improve performance by sorting.\n for (const x of xs) {\n if (out.indexOf(x) === -1) {\n out.push(x);\n }\n }\n return out;\n}\n/**\n * Determine if an Object is empty (i.e., does not have own properties).\n * @param obj Object\n * @returns Whether the Object is empty.\n * @throws ValueError: If object is `null` or `undefined`.\n */\nexport function isObjectEmpty(obj) {\n if (obj == null) {\n throw new ValueError(`Invalid value in obj: ${JSON.stringify(obj)}`);\n }\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n return false;\n }\n }\n return true;\n}\n/**\n * Helper function used to build type union/enum run-time checkers.\n * @param values The list of allowed values.\n * @param label A string name for the type\n * @param value The value to test.\n * @throws ValueError: If the value is not in values nor `undefined`/`null`.\n */\nexport function checkStringTypeUnionValue(values, label, value) {\n if (value == null) {\n return;\n }\n if (values.indexOf(value) < 0) {\n throw new ValueError(`${value} is not a valid ${label}. Valid values are ${values} or null/undefined.`);\n }\n}\n/**\n * Helper function for verifying the types of inputs.\n *\n * Ensures that the elements of `x` are all of type `expectedType`.\n * Also verifies that the length of `x` is within bounds.\n *\n * @param x Object to test.\n * @param expectedType The string expected type of all of the elements in the\n * Array.\n * @param minLength Return false if x.length is less than this.\n * @param maxLength Return false if x.length is greater than this.\n * @returns true if and only if `x` is an `Array` with\n * length >= `minLength` and <= `maxLength`.\n */\n// tslint:disable:no-any\nexport function checkArrayTypeAndLength(x, expectedType, minLength = 0, maxLength = Infinity) {\n assert(minLength >= 0);\n assert(maxLength >= minLength);\n return (Array.isArray(x) && x.length >= minLength && x.length <= maxLength &&\n x.every(e => typeof e === expectedType));\n}\n// tslint:enable:no-any\n/**\n * Assert that a value or an array of value are positive integer.\n *\n * @param value The value being asserted on. May be a single number or an array\n * of numbers.\n * @param name Name of the value, used to make the error message.\n */\nexport function assertPositiveInteger(value, name) {\n if (Array.isArray(value)) {\n util.assert(value.length > 0, () => `${name} is unexpectedly an empty array.`);\n value.forEach((v, i) => assertPositiveInteger(v, `element ${i + 1} of ${name}`));\n }\n else {\n util.assert(Number.isInteger(value) && value > 0, () => `Expected ${name} to be a positive integer, but got ` +\n `${formatAsFriendlyString(value)}.`);\n }\n}\n/**\n * Format a value into a display-friendly, human-readable fashion.\n *\n * - `null` is formatted as `'null'`\n * - Strings are formated with flanking pair of quotes.\n * - Arrays are formatted with flanking pair of square brackets.\n *\n * @param value The value to display.\n * @return Formatted string.\n */\n// tslint:disable-next-line:no-any\nexport function formatAsFriendlyString(value) {\n if (value === null) {\n return 'null';\n }\n else if (Array.isArray(value)) {\n return '[' + value.map(v => formatAsFriendlyString(v)).join(',') + ']';\n }\n else if (typeof value === 'string') {\n return `\"${value}\"`;\n }\n else {\n return `${value}`;\n }\n}\n/**\n * Returns a function `f2` (decorator) which wraps the original function\n * `f`. `f2` guarantees that `f` can be called at most once\n * every `waitMs` ms. If `f2` is called more often, it will return\n * the last returned result of `f`.\n *\n * @param f The original function `f` to wrap.\n * @param waitMs The time between two consecutive calls to `f` in ms.\n */\nexport function debounce(f, waitMs) {\n let lastTime = util.now();\n let lastResult;\n const f2 = (...args) => {\n const now = util.now();\n if (now - lastTime < waitMs) {\n return lastResult;\n }\n lastTime = now;\n lastResult = f(...args);\n return lastResult;\n };\n return f2;\n}\n/**\n * Returns the fusable activation given a layers identifier.\n *\n * @param activationName The layers identifier string.\n * @return The name of the fusable activation.\n */\nexport function mapActivationToFusedKernel(activationName) {\n if (activationName === 'relu') {\n return 'relu';\n }\n if (activationName === 'linear') {\n return 'linear';\n }\n if (activationName === 'elu') {\n return 'elu';\n }\n return null;\n}\n/**\n * Returns the cartesian product of sets of values.\n * This works the same as itertools.product in Python.\n *\n * Example:\n *\n * filters = [128, 256, 512]\n * paddings = ['same', 'valid']\n *\n * product = [ [128, 'same'], [128, 'valid'], [256, 'same'], [256, 'valid'],\n * [512, 'same'], [512, 'valid']]\n *\n * @param arrayOfValues List/array of values.\n * @return The cartesian product.\n */\nexport function getCartesianProductOfValues(...arrayOfValues) {\n assert(arrayOfValues.length > 0, 'arrayOfValues is empty');\n for (const values of arrayOfValues) {\n assert(Array.isArray(values), 'one of the values is not an array');\n assert(values.length > 0, 'one of the values is empty');\n }\n return arrayOfValues.reduce((products, values) => {\n if (products.length === 0) {\n return values.map(value => [value]);\n }\n return values\n .map(value => {\n return products.map((prevValue) => [...prevValue, value]);\n })\n .reduce((flattenedProduct, unflattenedProduct) => {\n return flattenedProduct.concat(unflattenedProduct);\n }, []);\n }, []);\n}\n//# sourceMappingURL=generic_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/contraints.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\n/**\n * Helper function used by many of the Constraints to find the L2Norms.\n */\nfunction calcL2Norms(w, axis) {\n return tidy(() => tfc.sqrt(tfc.sum(tfc.mul(w, w), axis, true)));\n}\n/**\n * Base class for functions that impose constraints on weight values\n *\n * @doc {\n * heading: 'Constraints',\n * subheading: 'Classes',\n * namespace: 'constraints'\n * }\n */\nexport class Constraint extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\nexport class MaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMaxValue = 2;\n this.defaultAxis = 0;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.clipByValue(norms, 0, this.maxValue);\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return { maxValue: this.maxValue, axis: this.axis };\n }\n}\n/** @nocollapse */\nMaxNorm.className = 'MaxNorm';\nserialization.registerClass(MaxNorm);\nexport class UnitNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultAxis = 0;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => tfc.div(w, tfc.add(epsilon(), calcL2Norms(w, this.axis))));\n }\n getConfig() {\n return { axis: this.axis };\n }\n}\n/** @nocollapse */\nUnitNorm.className = 'UnitNorm';\nserialization.registerClass(UnitNorm);\nexport class NonNeg extends Constraint {\n apply(w) {\n return tfc.relu(w);\n }\n}\n/** @nocollapse */\nNonNeg.className = 'NonNeg';\nserialization.registerClass(NonNeg);\nexport class MinMaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMinValue = 0.0;\n this.defaultMaxValue = 1.0;\n this.defaultRate = 1.0;\n this.defaultAxis = 0;\n this.minValue =\n args.minValue != null ? args.minValue : this.defaultMinValue;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.rate = args.rate != null ? args.rate : this.defaultRate;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.add(tfc.mul(this.rate, tfc.clipByValue(norms, this.minValue, this.maxValue)), tfc.mul(1.0 - this.rate, norms));\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return {\n minValue: this.minValue,\n maxValue: this.maxValue,\n rate: this.rate,\n axis: this.axis\n };\n }\n}\n/** @nocollapse */\nMinMaxNorm.className = 'MinMaxNorm';\nserialization.registerClass(MinMaxNorm);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'maxNorm': 'MaxNorm',\n 'minMaxNorm': 'MinMaxNorm',\n 'nonNeg': 'NonNeg',\n 'unitNorm': 'UnitNorm'\n};\nexport function serializeConstraint(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeConstraint(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'constraint');\n}\nexport function getConstraint(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeConstraint(config);\n }\n else if (identifier instanceof Constraint) {\n return identifier;\n }\n else {\n return deserializeConstraint(identifier);\n }\n}\n//# sourceMappingURL=constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { MaxNorm, MinMaxNorm, NonNeg, UnitNorm } from './constraints';\n/**\n * MaxNorm weight constraint.\n *\n * Constrains the weights incident to each hidden unit\n * to have a norm less than or equal to a desired value.\n *\n * References\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting\n * Srivastava, Hinton, et al.\n * 2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Constraints',namespace: 'constraints'}\n */\nexport function maxNorm(args) {\n return new MaxNorm(args);\n}\n/**\n * Constrains the weights incident to each hidden unit to have unit norm.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function unitNorm(args) {\n return new UnitNorm(args);\n}\n/**\n * Constains the weight to be non-negative.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function nonNeg() {\n return new NonNeg();\n}\n/** @doc {heading: 'Constraints', namespace: 'constraints'} */\nexport function minMaxNorm(config) {\n return new MinMaxNorm(config);\n}\n//# sourceMappingURL=exports_constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_DATA_FORMAT_VALUES = ['channelsFirst', 'channelsLast'];\nexport const VALID_PADDING_MODE_VALUES = ['valid', 'same', 'causal'];\nexport const VALID_POOL_MODE_VALUES = ['max', 'avg'];\nexport const VALID_BIDIRECTIONAL_MERGE_MODES = ['sum', 'mul', 'concat', 'ave'];\nexport const VALID_SAMPLE_WEIGHT_MODES = ['temporal'];\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Common functions for TensorFlow.js Layers.\n */\nimport { VALID_DATA_FORMAT_VALUES, VALID_PADDING_MODE_VALUES, VALID_POOL_MODE_VALUES } from './keras_format/common';\nimport { checkStringTypeUnionValue } from './utils/generic_utils';\n// A map from the requested scoped name of a Tensor to the number of Tensors\n// wanting that name so far. This allows enforcing name uniqueness by appending\n// an incrementing index, e.g. scope/name, scope/name_1, scope/name_2, etc.\nconst nameMap = new Map();\nexport function checkDataFormat(value) {\n checkStringTypeUnionValue(VALID_DATA_FORMAT_VALUES, 'DataFormat', value);\n}\nexport function checkPaddingMode(value) {\n checkStringTypeUnionValue(VALID_PADDING_MODE_VALUES, 'PaddingMode', value);\n}\nexport function checkPoolMode(value) {\n checkStringTypeUnionValue(VALID_POOL_MODE_VALUES, 'PoolMode', value);\n}\nconst _nameScopeStack = [];\nconst _nameScopeDivider = '/';\n/**\n * Enter namescope, which can be nested.\n */\nexport function nameScope(name, fn) {\n _nameScopeStack.push(name);\n try {\n const val = fn();\n _nameScopeStack.pop();\n return val;\n }\n catch (e) {\n _nameScopeStack.pop();\n throw e;\n }\n}\n/**\n * Get the current namescope as a flat, concatenated string.\n */\nfunction currentNameScopePrefix() {\n if (_nameScopeStack.length === 0) {\n return '';\n }\n else {\n return _nameScopeStack.join(_nameScopeDivider) + _nameScopeDivider;\n }\n}\n/**\n * Get the name a Tensor (or Variable) would have if not uniqueified.\n * @param tensorName\n * @return Scoped name string.\n */\nexport function getScopedTensorName(tensorName) {\n if (!isValidTensorName(tensorName)) {\n throw new Error('Not a valid tensor name: \\'' + tensorName + '\\'');\n }\n return currentNameScopePrefix() + tensorName;\n}\n/**\n * Get unique names for Tensors and Variables.\n * @param scopedName The fully-qualified name of the Tensor, i.e. as produced by\n * `getScopedTensorName()`.\n * @return A unique version of the given fully scoped name.\n * If this is the first time that the scoped name is seen in this session,\n * then the given `scopedName` is returned unaltered. If the same name is\n * seen again (producing a collision), an incrementing suffix is added to the\n * end of the name, so it takes the form 'scope/name_1', 'scope/name_2', etc.\n */\nexport function getUniqueTensorName(scopedName) {\n if (!isValidTensorName(scopedName)) {\n throw new Error('Not a valid tensor name: \\'' + scopedName + '\\'');\n }\n if (!nameMap.has(scopedName)) {\n nameMap.set(scopedName, 0);\n }\n const index = nameMap.get(scopedName);\n nameMap.set(scopedName, nameMap.get(scopedName) + 1);\n if (index > 0) {\n const result = `${scopedName}_${index}`;\n // Mark the composed name as used in case someone wants\n // to call getUniqueTensorName(\"name_1\").\n nameMap.set(result, 1);\n return result;\n }\n else {\n return scopedName;\n }\n}\nconst tensorNameRegex = new RegExp(/^[A-Za-z0-9][-A-Za-z0-9\\._\\/]*$/);\n/**\n * Determine whether a string is a valid tensor name.\n * @param name\n * @returns A Boolean indicating whether `name` is a valid tensor name.\n */\nexport function isValidTensorName(name) {\n return !!name.match(tensorNameRegex);\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Math utility functions.\n *\n * This file contains some frequently used math function that operates on\n * number[] or Float32Array and return a number. Many of these functions are\n * not-so-thick wrappers around TF.js Core functions. But they offer the\n * convenience of\n * 1) not having to convert the inputs into Tensors,\n * 2) not having to convert the returned Tensors to numbers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar, tensor1d } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\n/**\n * Determine if a number is an integer.\n */\nexport function isInteger(x) {\n return x === parseInt(x.toString(), 10);\n}\n/**\n * Calculate the product of an array of numbers.\n * @param array The array to calculate the product over.\n * @param begin Beginning index, inclusive.\n * @param end Ending index, exclusive.\n * @return The product.\n */\nexport function arrayProd(array, begin, end) {\n if (begin == null) {\n begin = 0;\n }\n if (end == null) {\n end = array.length;\n }\n let prod = 1;\n for (let i = begin; i < end; ++i) {\n prod *= array[i];\n }\n return prod;\n}\n/**\n * A helper function transforms the two input types to an instance of Tensor1D,\n * so the return value can be fed directly into various TF.js Core functions.\n * @param array\n */\nfunction toArray1D(array) {\n array = Array.isArray(array) ? new Float32Array(array) : array;\n return tensor1d(array);\n}\n/**\n * Compute minimum value.\n * @param array\n * @return minimum value.\n */\nexport function min(array) {\n return tfc.min(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute maximum value.\n * @param array\n * @return maximum value\n */\nexport function max(array) {\n return tfc.max(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute sum of array.\n * @param array\n * @return The sum.\n */\nexport function sum(array) {\n return tfc.sum(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute mean of array.\n * @param array\n * @return The mean.\n */\nexport function mean(array) {\n return sum(array) / array.length;\n}\n/**\n * Compute variance of array.\n * @param array\n * @return The variance.\n */\nexport function variance(array) {\n const demeaned = tfc.sub(toArray1D(array), scalar(mean(array)));\n const sumSquare = tfc.sum(tfc.mul(demeaned, demeaned)).dataSync()[0];\n return sumSquare / array.length;\n}\n/**\n * Compute median of array.\n * @param array\n * @return The median value.\n */\nexport function median(array) {\n const arraySorted = array.slice().sort((a, b) => a - b);\n const lowIdx = Math.floor((arraySorted.length - 1) / 2);\n const highIdx = Math.ceil((arraySorted.length - 1) / 2);\n if (lowIdx === highIdx) {\n return arraySorted[lowIdx];\n }\n return (arraySorted[lowIdx] + arraySorted[highIdx]) / 2;\n}\n/**\n * Generate an array of integers in [begin, end).\n * @param begin Beginning integer, inclusive.\n * @param end Ending integer, exclusive.\n * @returns Range array.\n * @throws ValueError, iff `end` < `begin`.\n */\nexport function range(begin, end) {\n if (end < begin) {\n throw new ValueError(`end (${end}) < begin (${begin}) is forbidden.`);\n }\n const out = [];\n for (let i = begin; i < end; ++i) {\n out.push(i);\n }\n return out;\n}\n//# sourceMappingURL=math_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * deeplearn.js backend.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { onesLike as coreOnesLike, scalar, tensor1d, tidy, where, zerosLike as coreZerosLike } from '@tensorflow/tfjs-core';\nimport { checkDataFormat } from '../common';\nimport { NotImplementedError, ValueError } from '../errors';\nimport * as math_utils from '../utils/math_utils';\nimport { imageDataFormat } from './common';\n// tslint:enable\n/* Setting and getting backend from deeplearn.js. */\n// Default deeplearn.js backend is WebGL (GPU).\nlet backend = 'webgl';\nexport function setBackend(requestedBackend) {\n tfc.setBackend(requestedBackend);\n backend = requestedBackend;\n}\nexport function getBackend() {\n return backend;\n}\n/**\n * Indicates whether the backend is operating symbolically.\n *\n * This function will be used to determine how to interpret user code. If\n * it returns true, calls to the backend construct a symbolic graph; if\n * it returns false, calls to the backend execute immediately.\n */\nexport function isBackendSymbolic() {\n return false;\n}\n/**\n * Get the number of elements in a Tensor.\n * @param x The Tensor.\n * @return Number of elements in `x`.\n */\nexport function countParams(x) {\n const shape = x.shape;\n if (shape.length > 0) {\n return shape.reduce((a, b) => a * b);\n }\n else {\n // Scalar.\n return 1;\n }\n}\n/**\n * Casts a tensor to a different dtype and returns it.\n * @param x Input tensor.\n * @param dtype String: 'float32'|'int32'|'bool'.\n * @returns Tensor of the specified `dtype`.\n */\nexport function cast(x, dtype) {\n return x.asType(dtype);\n}\n/**\n * Adds a 1-sized dimension at index \"axis\".\n * @param x Input tensor.\n * @param axis Position where to add the new axis.\n * @returns Result of the dimension expansion.\n */\nexport function expandDims(x, axis = -1) {\n const outShape = x.shape.slice();\n if (axis < 0) {\n axis = outShape.length + axis + 1;\n }\n outShape.splice(axis, 0, 1);\n return x.reshape(outShape);\n}\n/**\n * Repeats a 2D tensor.\n *\n * If `x` has shape `[samples, dim]` and `n` is 2, for example, the output\n * will have shape `[samples, 2, dim]`.\n *\n * @param x Input tensor.\n * @param n Integer, number of times to repeat.\n * @returns The result of the repeat operation.\n * @throws ValueError: If input tensor is not 2D.\n */\nexport function repeat(x, n) {\n return tidy(() => {\n if (x.shape.length !== 2) {\n throw new ValueError(`repeat() expects a rank-2 tensor, but received a ` +\n `rank-${x.shape.length} tensor.`);\n }\n const y = expandDims(x, 1);\n return tile(y, [1, n, 1]);\n });\n}\n/**\n * Flatten a Tensor into 1D.\n * @param x Input tensor.\n * @return The result of the flattening `x`.\n */\nexport function flatten(x) {\n const newShape = [math_utils.arrayProd(x.shape)];\n return x.reshape(newShape);\n}\n/**\n * Turn a nD tensor into a 2D tensor with same 0th dimension.\n * In other words, it flattens each data samples of a batch.\n *\n * @param x The tensor to flatten. The rank of this tensor is required to be 2\n * or higher.\n * @return The result of the flattening.\n */\nexport function batchFlatten(x) {\n if (x.rank <= 1) {\n throw new ValueError(`batchFlatten requires a minimum rank of 2. Got rank: ${x.rank}.`);\n }\n const newShape = [x.shape[0], math_utils.arrayProd(x.shape, 1)];\n return x.reshape(newShape);\n}\n/**\n * Do slicing along the first axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the first axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongFirstAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [start, 0], [size, array.shape[1]]);\n case 3:\n return tfc.slice3d(array, [start, 0, 0], [size, array.shape[1], array.shape[2]]);\n case 4:\n return tfc.slice4d(array, [start, 0, 0, 0], [size, array.shape[1], array.shape[2], array.shape[3]]);\n case 5:\n return tfc.slice(array, [start, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4]\n ]);\n case 6:\n return tfc.slice(array, [start, 0, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4],\n array.shape[5]\n ]);\n default:\n throw new ValueError(`sliceAlongFirstAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the last axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the last axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongLastAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [0, start], [array.shape[0], size]);\n case 3:\n return tfc.slice3d(array, [0, 0, start], [array.shape[0], array.shape[1], size]);\n case 4:\n return tfc.slice4d(array, [0, 0, 0, start], [array.shape[0], array.shape[1], array.shape[2], size]);\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the sepcified axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size of the slice along the chosen axis.\n * @param choose an axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongAxis(array, start, size, axis) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 3:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice3d(array, [0, start, 0], [array.shape[0], size, array.shape[2]]);\n case 3:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 4:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice4d(array, [0, start, 0, 0], [array.shape[0], size, array.shape[2], array.shape[3]]);\n case 3:\n return tfc.slice4d(array, [0, 0, start, 0], [array.shape[0], array.shape[1], size, array.shape[3]]);\n case 4:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Concatenates a list of tensors alongside the specified axis.\n * @param tensors `Array` of tensors to concatenate.\n * @param axis Concatenation axis.\n * @returns The result of the concatenation.\n */\nexport function concatenate(tensors, axis = -1) {\n let rank;\n if (axis < 0) {\n rank = tensors[0].rank;\n if (rank !== 0) {\n axis = rank;\n }\n else {\n axis = 0;\n }\n }\n if (axis === tensors[0].rank) {\n // Porting Note: This is necessary because tfc.concat() requires axis to be\n // in the interval [-rank, rank).\n axis = -1;\n }\n // Porting Note: Sparse concat is not supported yet.\n return tfc.concat(tensors, axis);\n}\n/**\n * Concatenate two arrays along the first dimension.\n * @param a The 1st `tf.Tensor` to concatenate.\n * @param b The 2nd `tf.Tensor` to concatenate.\n * @returns Result of the concatenation.\n * @throws ValueError: If `a` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function concatAlongFirstAxis(a, b) {\n switch (a.rank) {\n case 1:\n return tfc.concat1d([a, b]);\n case 2:\n return tfc.concat2d([a, b], 0);\n case 3:\n return tfc.concat3d([a, b], 0);\n case 4:\n return tfc.concat4d([a, b], 0);\n default:\n throw new ValueError(`concatAlongFirstAxis() received an unsupported ` +\n `tensor rank: ${a.rank}`);\n }\n}\n/**\n * Creates a tensor by tiling `x` by `n`.\n * @param x A tensor.\n * @param n An Array of integers or a single integer. If an Array, the length\n * must be the same as the number of dimensions in `x`. If a single integer,\n * it will be treated as an Array of length 1.\n */\nexport function tile(x, n) {\n if (!Array.isArray(n)) {\n n = [n];\n }\n if (x.rank !== n.length) {\n throw new ValueError(`The length of input n (${n.length}) does not match ` +\n `the number of dimensions in input x (${x.rank})`);\n }\n return tfc.tile(x, n);\n}\n/* Creation of random tensors. */\n/**\n * Get a tensor with normal distribution of values.\n *\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @return The normal tensor.\n */\nexport function randomNormal(shape, mean = 0.0, stddev = 1.0, dtype, seed) {\n return tfc.randomNormal(shape, mean, stddev, dtype, seed);\n}\n/* Linear Algebra */\n/**\n * Multiply two tensors and returns the result as a tensor.\n *\n * For 2D tensors, this is equivalent to matrix multiplication (matMul).\n * For tensors of higher ranks, it follows the Theano behavior,\n * (e.g. `(2, 3) * (4, 3, 5) -> (2, 4, 5)`). From the Theano documentation:\n *\n * For N dimensions it is a sum product over the last axis of x and the\n * second-to-last of y:\n *\n * @param a A tensor of at least rank 2.\n * @param b A tensor of at least rank 2.\n * @param activation (optional) A string identifying the activation\n * function.\n * @return Result of the dot operation.\n */\nexport function dot(a, b, activation, bias) {\n if ((a.rank < 2) || (b.rank < 2)) {\n throw new NotImplementedError(`dot requires both inputs to be rank >= 2` +\n ` but got x shape = ${a.shape} and y shape = ${b.shape}`);\n }\n if (b.rank >= 3) {\n const xLastDim = a.shape.slice(-1)[0];\n const ySecondLastDim = b.shape.slice(-2)[0];\n if (xLastDim !== ySecondLastDim) {\n throw new NotImplementedError(`If rank y >= 3, then the second last dim` +\n ` of y must equal the last dim of x but got x shape = ${a.shape} and ` +\n ` y shape = ${b.shape}`);\n }\n }\n // Handle basic 2D x 2D case.\n if ((a.rank === 2) && (b.rank === 2)) {\n const transposeA = false;\n const transposeB = false;\n // tfc.fused.matMul only fuses certain activation functions. Unsupported\n // activation functions are treated as 'linear' activations, which is\n // equivalent to a no-op.\n return tfc.fused.matMul({\n a,\n b: b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n });\n }\n else {\n // Reshape x into the analogous 2D Tensor.\n const aFirstDims = a.shape.slice(); // Holds all but the last dim of x.\n const aLastDim = aFirstDims.pop();\n a = a.reshape([-1, aLastDim]);\n // Reshape y into the analogous 2D Tensor, and keep track of the\n // required dimensions to reproduce the output shape.\n const bShape = b.shape.slice();\n const bLastDim = bShape.pop();\n const ySecondLastDim = bShape.pop();\n const yOtherDims = [...bShape, bLastDim];\n // permutation should be like [r-2, 0, 1, 2, ... r-4, r-3, r-1]\n // where r is the rank of y.\n const perm = Array.from({ length: b.rank }, (_, i) => {\n if (i === 0) {\n return b.rank - 2;\n }\n else if (i <= b.rank - 2) {\n return i - 1;\n }\n return i;\n });\n b = b.transpose(perm).reshape([ySecondLastDim, -1]);\n // Multiply x and y as 2D Tensors, and then reshape back to original.\n const outputShape = [...aFirstDims, ...yOtherDims];\n const transposeA = false;\n const transposeB = false;\n return tfc.fused\n .matMul({\n a,\n b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n })\n .reshape(outputShape);\n }\n}\n/**\n * Compute the sign Tensor of an input Tensor.\n *\n * Elements of the input `tf.Tensor` that are === 0 are mapped to 0.\n * Elements of the input `tf.Tensor` that are > 0 are mapped to 1.\n * Elements of the input `tf.Tensor` that are < 0 are mapped to -1.\n *\n * @param x Input `tf.Tensor`.\n * @return The sign `tf.Tensor`.\n */\nexport function sign(x) {\n // TODO(cais): Move to the core.\n return tidy(() => {\n const zerosLikeX = coreZerosLike(x);\n const onesLikeX = coreOnesLike(x);\n return where(tfc.equal(x, zerosLikeX), zerosLikeX, where(tfc.greater(x, coreZerosLike(x)), onesLikeX, tfc.mul(-1, onesLikeX)));\n });\n}\n/**\n * Computes the one-hot representation of an integer tensor.\n * @param indices nD integer tensor of shape\n * `(batch_size, dim1, dim2, ... dim(n-1))`\n * @param numClasses Integer, number of classes to consider.\n * @returns (n + 1)D one hot representation of the input\n * with shape `(batch_size, dim1, dim2, ... dim(n-1), num_classes)`\n */\nexport function oneHot(indices, numClasses) {\n return tidy(() => {\n if (indices.rank !== 1) {\n throw new Error('Only 1D one-hot tensors are supported in the ' +\n 'deeplearn backend, at present.');\n }\n indices = indices.toInt();\n return tfc.oneHot(indices, numClasses).toFloat();\n });\n}\n/* Elementary math functions. */\n/**\n * Retrieves the elements of indices `indices` in the tensor `reference`.\n * @param reference A tensor.\n * @param indices An integer tensor of indices or an `Array` of integers.\n * @param axis Axis along which to perform the gather operation.\n * @returns The result of the gathering as a tensor.\n */\nexport function gather(reference, indices, axis) {\n return tidy(() => {\n if (Array.isArray(indices)) {\n indices = tensor1d(indices, 'int32');\n }\n else {\n indices = indices.toInt();\n }\n return tfc.gather(reference, indices, axis);\n });\n}\n/**\n * Element-wise square.\n * @param x Input tensor.\n * @return element-wise x^2\n */\nexport function square(x) {\n return tfc.mul(x, x);\n}\n/**\n * Element-wise exponentiation.\n *\n * Porting Note: In PyKeras, `a` (the exponent) is a Python integer, which\n * takes advatnage of the backend's (e.g., TensorFlow's) automatic\n * conversion to tensor. Here we allow `a` to be either a number or a tensor.\n *\n * @param x The base tensor.\n * @param a The exponent, tensor or number. If a number, it is rounded to the\n * nearest integer and converted to a tensor.\n * @returns A tensor of the same shape as `x`.\n */\nexport function pow(x, a) {\n return tidy(() => {\n if (typeof (a) === 'number') {\n a = scalar(Math.round(a), 'int32');\n }\n if (a.dtype !== 'int32') {\n throw new NotImplementedError(`Non-int32 dtype (${a.dtype}) is not supported by pow() yet`);\n }\n return tfc.pow(x, a);\n });\n}\n/**\n * Reshapes bias tensor according to rank of x.\n */\nfunction reshapeBias(xRank, bias, dataFormat) {\n const biasShape = bias.shape;\n if (bias.rank !== 1 && bias.rank !== xRank) {\n throw new ValueError(`Unexpected bias dimensions: ${bias.rank}` +\n `; expected it to be 1 or ${xRank}`);\n }\n if (xRank === 5) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[3], biasShape[0], biasShape[1], biasShape[2]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 4) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[2], biasShape[0], biasShape[1]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 3) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1]);\n }\n else {\n return bias.reshape([1, biasShape[1], biasShape[0]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank < 3) {\n return bias;\n }\n throw new ValueError(`Unsupported input rank by biasAdd: ${bias.rank}`);\n}\n/* Neural-network operations. */\n/**\n * Add a bias to a tensor.\n *\n * @param x The tensor to add the bias to.\n * @param bias The bias to add to `x`. Must be 1D or the same rank as `x`.\n * @return Result of the bias adding.\n * @throws ValueError: If the rank of `bias` is incorrect.\n */\nexport function biasAdd(x, bias, dataFormat) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n return x.add(reshapeBias(x.rank, bias, dataFormat));\n });\n}\n/**\n * Exponential linear unit (ELU).\n * @param x A tensor or variable to compute the activation function for.\n * @param alpha: A scalar, a scaling factor for the negative section.\n * @return Output of the ELU operation.\n */\nexport function elu(x, alpha = 1) {\n // TODO(cais): Add support for alpha values other than 1.\n if (alpha !== 1) {\n throw new NotImplementedError(`Support for alpha values other than 1 (${alpha}) is not implemented ` +\n `yet.`);\n }\n return tfc.elu(x);\n}\n/**\n * Softsign of a tensor.\n *\n * Defined as x / (abs(x) + 1), element-wise.\n *\n * @param x: Input.\n * @returns Output.\n */\nexport function softsign(x) {\n return tidy(() => tfc.div(x, tfc.abs(x).add(1)));\n}\n/**\n * Sets entries in `x` to zero at random, while scaling the entire tensor.\n *\n * @param x input tensor.\n * @param level fraction of the entries in the tensor that will be set to 0.\n * @param noiseShape shape of randomly generated keep/drop flags, must be\n * broadcastable to the shape of `x`. Optional.\n * @param seed random seed to ensure determinism. Optional.\n * @returns Result of the dropout operation.\n */\nexport function dropout(x, level, noiseShape, seed) {\n return tidy(() => tfc.dropout(x, level, noiseShape, seed));\n}\n/**\n * Element-wise, segment-wise linear approximation of sigmoid.\n *\n * Returns `0.` if `x < -2.5`, `1.` if `x > 2.5`.\n * In `-2.5 <= x <= 2.5`, returns `0.2 * x + 0.5`.\n *\n * @param x Input tensor.\n * @returns Output tensor.\n */\nexport function hardSigmoid(x) {\n return tidy(() => {\n const y = tfc.add(.5, tfc.mul(.2, x));\n return tfc.clipByValue(y, 0, 1);\n });\n}\n/**\n * Invoke `x` in the training phase, and `alt` otherwise.\n *\n * Porting Note: We do not create placeholder tensors for the `training`\n * boolean flag here, because there is no such thing in the TF.js imperative\n * backend.\n *\n * @param x The function to invoke iff `training` is `true`.\n * @param alt The function to invoke iff `training` is `false`.\n * @param training Boolean flag for whether training phase is active.\n * @returns The return value of `x()` if `training` is `true`, or the return\n * value of `alt()` if `training` is `false`.\n */\nexport function inTrainPhase(x, alt, training = false) {\n return training ? x() : alt();\n}\n//# sourceMappingURL=tfjs_backend.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_FAN_MODE_VALUES = ['fanIn', 'fanOut', 'fanAvg'];\nexport const VALID_DISTRIBUTION_VALUES = ['normal', 'uniform', 'truncatedNormal'];\n// We can't easily extract a string[] from the string union type, but we can\n// recapitulate the list, enforcing at compile time that the values are valid\n// and that we have the right number of them.\n/**\n * A string array of valid Initializer class names.\n *\n * This is guaranteed to match the `InitializerClassName` union type.\n */\nexport const initializerClassNames = [\n 'Zeros', 'Ones', 'Constant', 'RandomNormal', 'RandomUniform',\n 'TruncatedNormal', 'VarianceScaling', 'Orthogonal', 'Identity'\n];\n//# sourceMappingURL=initializer_config.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { eye, linalg, mul, ones, randomUniform, scalar, serialization, tidy, truncatedNormal, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { checkDataFormat } from './common';\nimport { NotImplementedError, ValueError } from './errors';\nimport { VALID_DISTRIBUTION_VALUES, VALID_FAN_MODE_VALUES } from './keras_format/initializer_config';\nimport { checkStringTypeUnionValue, deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nimport { arrayProd } from './utils/math_utils';\nexport function checkFanMode(value) {\n checkStringTypeUnionValue(VALID_FAN_MODE_VALUES, 'FanMode', value);\n}\nexport function checkDistribution(value) {\n checkStringTypeUnionValue(VALID_DISTRIBUTION_VALUES, 'Distribution', value);\n}\n/**\n * Initializer base class.\n *\n * @doc {\n * heading: 'Initializers', subheading: 'Classes', namespace: 'initializers'}\n */\nexport class Initializer extends serialization.Serializable {\n fromConfigUsesCustomObjects() {\n return false;\n }\n getConfig() {\n return {};\n }\n}\nexport class Zeros extends Initializer {\n apply(shape, dtype) {\n return zeros(shape, dtype);\n }\n}\n/** @nocollapse */\nZeros.className = 'Zeros';\nserialization.registerClass(Zeros);\nexport class Ones extends Initializer {\n apply(shape, dtype) {\n return ones(shape, dtype);\n }\n}\n/** @nocollapse */\nOnes.className = 'Ones';\nserialization.registerClass(Ones);\nexport class Constant extends Initializer {\n constructor(args) {\n super();\n if (typeof args !== 'object') {\n throw new ValueError(`Expected argument of type ConstantConfig but got ${args}`);\n }\n if (args.value === undefined) {\n throw new ValueError(`config must have value set but got ${args}`);\n }\n this.value = args.value;\n }\n apply(shape, dtype) {\n return tidy(() => mul(scalar(this.value), ones(shape, dtype)));\n }\n getConfig() {\n return {\n value: this.value,\n };\n }\n}\n/** @nocollapse */\nConstant.className = 'Constant';\nserialization.registerClass(Constant);\nexport class RandomUniform extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MINVAL = -0.05;\n this.DEFAULT_MAXVAL = 0.05;\n this.minval = args.minval || this.DEFAULT_MINVAL;\n this.maxval = args.maxval || this.DEFAULT_MAXVAL;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n return randomUniform(shape, this.minval, this.maxval, dtype);\n }\n getConfig() {\n return { minval: this.minval, maxval: this.maxval, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomUniform.className = 'RandomUniform';\nserialization.registerClass(RandomUniform);\nexport class RandomNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return K.randomNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomNormal.className = 'RandomNormal';\nserialization.registerClass(RandomNormal);\nexport class TruncatedNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`truncatedNormal does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nTruncatedNormal.className = 'TruncatedNormal';\nserialization.registerClass(TruncatedNormal);\nexport class Identity extends Initializer {\n constructor(args) {\n super();\n this.gain = args.gain != null ? args.gain : 1.0;\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length !== 2 || shape[0] !== shape[1]) {\n throw new ValueError('Identity matrix initializer can only be used for' +\n ' 2D square matrices.');\n }\n else {\n return mul(this.gain, eye(shape[0]));\n }\n });\n }\n getConfig() {\n return { gain: this.gain };\n }\n}\n/** @nocollapse */\nIdentity.className = 'Identity';\nserialization.registerClass(Identity);\n/**\n * Computes the number of input and output units for a weight shape.\n * @param shape Shape of weight.\n * @param dataFormat data format to use for convolution kernels.\n * Note that all kernels in Keras are standardized on the\n * CHANNEL_LAST ordering (even when inputs are set to CHANNEL_FIRST).\n * @return An length-2 array: fanIn, fanOut.\n */\nfunction computeFans(shape, dataFormat = 'channelsLast') {\n let fanIn;\n let fanOut;\n checkDataFormat(dataFormat);\n if (shape.length === 2) {\n fanIn = shape[0];\n fanOut = shape[1];\n }\n else if ([3, 4, 5].indexOf(shape.length) !== -1) {\n if (dataFormat === 'channelsFirst') {\n const receptiveFieldSize = arrayProd(shape, 2);\n fanIn = shape[1] * receptiveFieldSize;\n fanOut = shape[0] * receptiveFieldSize;\n }\n else if (dataFormat === 'channelsLast') {\n const receptiveFieldSize = arrayProd(shape, 0, shape.length - 2);\n fanIn = shape[shape.length - 2] * receptiveFieldSize;\n fanOut = shape[shape.length - 1] * receptiveFieldSize;\n }\n }\n else {\n const shapeProd = arrayProd(shape);\n fanIn = Math.sqrt(shapeProd);\n fanOut = Math.sqrt(shapeProd);\n }\n return [fanIn, fanOut];\n}\nexport class VarianceScaling extends Initializer {\n /**\n * Constructor of VarianceScaling.\n * @throws ValueError for invalid value in scale.\n */\n constructor(args) {\n super();\n if (args.scale < 0.0) {\n throw new ValueError(`scale must be a positive float. Got: ${args.scale}`);\n }\n this.scale = args.scale == null ? 1.0 : args.scale;\n this.mode = args.mode == null ? 'fanIn' : args.mode;\n checkFanMode(this.mode);\n this.distribution =\n args.distribution == null ? 'normal' : args.distribution;\n checkDistribution(this.distribution);\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n const fans = computeFans(shape);\n const fanIn = fans[0];\n const fanOut = fans[1];\n let scale = this.scale;\n if (this.mode === 'fanIn') {\n scale /= Math.max(1, fanIn);\n }\n else if (this.mode === 'fanOut') {\n scale /= Math.max(1, fanOut);\n }\n else {\n scale /= Math.max(1, (fanIn + fanOut) / 2);\n }\n if (this.distribution === 'normal') {\n const stddev = Math.sqrt(scale);\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`${this.getClassName()} does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, 0, stddev, dtype, this.seed);\n }\n else {\n const limit = Math.sqrt(3 * scale);\n return randomUniform(shape, -limit, limit, dtype);\n }\n }\n getConfig() {\n return {\n scale: this.scale,\n mode: this.mode,\n distribution: this.distribution,\n seed: this.seed\n };\n }\n}\n/** @nocollapse */\nVarianceScaling.className = 'VarianceScaling';\nserialization.registerClass(VarianceScaling);\nexport class GlorotUniform extends VarianceScaling {\n /**\n * Constructor of GlorotUniform\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotUniform.className = 'GlorotUniform';\nserialization.registerClass(GlorotUniform);\nexport class GlorotNormal extends VarianceScaling {\n /**\n * Constructor of GlorotNormal.\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotNormal.className = 'GlorotNormal';\nserialization.registerClass(GlorotNormal);\nexport class HeNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeNormal.className = 'HeNormal';\nserialization.registerClass(HeNormal);\nexport class HeUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeUniform.className = 'HeUniform';\nserialization.registerClass(HeUniform);\nexport class LeCunNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunNormal.className = 'LeCunNormal';\nserialization.registerClass(LeCunNormal);\nexport class LeCunUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunUniform.className = 'LeCunNormal';\nserialization.registerClass(LeCunUniform);\nexport class Orthogonal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_GAIN = 1;\n this.gain = args.gain == null ? this.DEFAULT_GAIN : args.gain;\n this.seed = args.seed;\n if (this.seed != null) {\n throw new NotImplementedError('Random seed is not implemented for Orthogonal Initializer yet.');\n }\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length < 2) {\n throw new NotImplementedError('Shape must be at least 2D.');\n }\n if (shape[0] * shape[1] > 2000) {\n console.warn(`Orthogonal initializer is being called on a matrix with more ` +\n `than 2000 (${shape[0] * shape[1]}) elements: ` +\n `Slowness may result.`);\n }\n // TODO(cais): Add seed support.\n const normalizedShape = shape[0] > shape[1] ? [shape[1], shape[0]] : shape;\n const a = K.randomNormal(normalizedShape, 0, 1, 'float32');\n let q = linalg.gramSchmidt(a);\n if (shape[0] > shape[1]) {\n q = q.transpose();\n }\n return mul(this.gain, q);\n });\n }\n getConfig() {\n return {\n gain: this.gain,\n seed: this.seed,\n };\n }\n}\n/** @nocollapse */\nOrthogonal.className = 'Orthogonal';\nserialization.registerClass(Orthogonal);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'constant': 'Constant',\n 'glorotNormal': 'GlorotNormal',\n 'glorotUniform': 'GlorotUniform',\n 'heNormal': 'HeNormal',\n 'heUniform': 'HeUniform',\n 'identity': 'Identity',\n 'leCunNormal': 'LeCunNormal',\n 'leCunUniform': 'LeCunUniform',\n 'ones': 'Ones',\n 'orthogonal': 'Orthogonal',\n 'randomNormal': 'RandomNormal',\n 'randomUniform': 'RandomUniform',\n 'truncatedNormal': 'TruncatedNormal',\n 'varianceScaling': 'VarianceScaling',\n 'zeros': 'Zeros'\n};\nfunction deserializeInitializer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'initializer');\n}\nexport function serializeInitializer(initializer) {\n return serializeKerasObject(initializer);\n}\nexport function getInitializer(identifier) {\n if (typeof identifier === 'string') {\n const className = identifier in INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n /* We have four 'helper' classes for common initializers that\n all get serialized as 'VarianceScaling' and shouldn't go through\n the deserializeInitializer pathway. */\n if (className === 'GlorotNormal') {\n return new GlorotNormal();\n }\n else if (className === 'GlorotUniform') {\n return new GlorotUniform();\n }\n else if (className === 'HeNormal') {\n return new HeNormal();\n }\n else if (className === 'HeUniform') {\n return new HeUniform();\n }\n else if (className === 'LeCunNormal') {\n return new LeCunNormal();\n }\n else if (className === 'LeCunUniform') {\n return new LeCunUniform();\n }\n else {\n const config = {};\n config['className'] = className;\n config['config'] = {};\n return deserializeInitializer(config);\n }\n }\n else if (identifier instanceof Initializer) {\n return identifier;\n }\n else {\n return deserializeInitializer(identifier);\n }\n}\n//# sourceMappingURL=initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { Constant, GlorotNormal, GlorotUniform, HeNormal, HeUniform, Identity, LeCunNormal, LeCunUniform, Ones, Orthogonal, RandomNormal, RandomUniform, TruncatedNormal, VarianceScaling, Zeros } from './initializers';\n/**\n * Initializer that generates tensors initialized to 0.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function zeros() {\n return new Zeros();\n}\n/**\n * Initializer that generates tensors initialized to 1.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function ones() {\n return new Ones();\n}\n/**\n * Initializer that generates values initialized to some constant.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function constant(args) {\n return new Constant(args);\n}\n/**\n * Initializer that generates random values initialized to a uniform\n * distribution.\n *\n * Values will be distributed uniformly between the configured minval and\n * maxval.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomUniform(args) {\n return new RandomUniform(args);\n}\n/**\n * Initializer that generates random values initialized to a normal\n * distribution.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomNormal(args) {\n return new RandomNormal(args);\n}\n/**\n * Initializer that generates random values initialized to a truncated normal.\n * distribution.\n *\n * These values are similar to values from a `RandomNormal` except that values\n * more than two standard deviations from the mean are discarded and re-drawn.\n * This is the recommended initializer for neural network weights and filters.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function truncatedNormal(args) {\n return new TruncatedNormal(args);\n}\n/**\n * Initializer that generates the identity matrix.\n * Only use for square 2D matrices.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function identity(args) {\n return new Identity(args);\n}\n/**\n * Initializer capable of adapting its scale to the shape of weights.\n * With distribution=NORMAL, samples are drawn from a truncated normal\n * distribution centered on zero, with `stddev = sqrt(scale / n)` where n is:\n * - number of input units in the weight tensor, if mode = FAN_IN.\n * - number of output units, if mode = FAN_OUT.\n * - average of the numbers of input and output units, if mode = FAN_AVG.\n * With distribution=UNIFORM,\n * samples are drawn from a uniform distribution\n * within [-limit, limit], with `limit = sqrt(3 * scale / n)`.\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function varianceScaling(config) {\n return new VarianceScaling(config);\n}\n/**\n * Glorot uniform initializer, also called Xavier uniform initializer.\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotUniform(args) {\n return new GlorotUniform(args);\n}\n/**\n * Glorot normal initializer, also called Xavier normal initializer.\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor.\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotNormal(args) {\n return new GlorotNormal(args);\n}\n/**\n * He normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function heNormal(args) {\n return new HeNormal(args);\n}\n/**\n * He uniform initializer.\n *\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / fan_in)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function heUniform(args) {\n return new HeUniform(args);\n}\n/**\n * LeCun normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(1 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * References:\n * [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n * [Efficient Backprop](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunNormal(args) {\n return new LeCunNormal(args);\n}\n/**\n * LeCun uniform initializer.\n *\n * It draws samples from a uniform distribution in the interval\n * `[-limit, limit]` with `limit = sqrt(3 / fanIn)`,\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunUniform(args) {\n return new LeCunUniform(args);\n}\n/**\n * Initializer that generates a random orthogonal matrix.\n *\n * Reference:\n * [Saxe et al., http://arxiv.org/abs/1312.6120](http://arxiv.org/abs/1312.6120)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function orthogonal(args) {\n return new Orthogonal(args);\n}\n//# sourceMappingURL=exports_initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Utilities related to persistent state in the backend.\n */\n/**\n * An ID to track `tf.SymbolicTensor`s and derived classes.\n * Required in different places in engine/topology.ts to identify unique\n * tensors.\n */\nlet _nextUniqueTensorId = 0;\nexport function getNextUniqueTensorId() {\n return _nextUniqueTensorId++;\n}\nconst _uidPrefixes = {};\n/**\n * Provides a unique UID given a string prefix.\n *\n * @param prefix\n */\nexport function getUid(prefix = '') {\n if (!(prefix in _uidPrefixes)) {\n _uidPrefixes[prefix] = 0;\n }\n _uidPrefixes[prefix] += 1;\n return prefix + _uidPrefixes[prefix].toString();\n}\n//# sourceMappingURL=state.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\n// tslint:enable\n/**\n * Determine whether the input is an Array of Shapes.\n */\nexport function isArrayOfShapes(x) {\n return Array.isArray(x) && Array.isArray(x[0]);\n}\n/**\n * Special case of normalizing shapes to lists.\n *\n * @param x A shape or list of shapes to normalize into a list of Shapes.\n * @return A list of Shapes.\n */\nexport function normalizeShapeList(x) {\n if (x.length === 0) {\n return [];\n }\n if (!Array.isArray(x[0])) {\n return [x];\n }\n return x;\n}\n/**\n * Helper function to obtain exactly one Tensor.\n * @param xs: A single `tf.Tensor` or an `Array` of `tf.Tensor`s.\n * @return A single `tf.Tensor`. If `xs` is an `Array`, return the first one.\n * @throws ValueError: If `xs` is an `Array` and its length is not 1.\n */\nexport function getExactlyOneTensor(xs) {\n let x;\n if (Array.isArray(xs)) {\n if (xs.length !== 1) {\n throw new ValueError(`Expected Tensor length to be 1; got ${xs.length}`);\n }\n x = xs[0];\n }\n else {\n x = xs;\n }\n return x;\n}\n/**\n * Helper function to obtain exactly on instance of Shape.\n *\n * @param shapes Input single `Shape` or Array of `Shape`s.\n * @returns If input is a single `Shape`, return it unchanged. If the input is\n * an `Array` containing exactly one instance of `Shape`, return the instance.\n * Otherwise, throw a `ValueError`.\n * @throws ValueError: If input is an `Array` of `Shape`s, and its length is not\n * 1.\n */\nexport function getExactlyOneShape(shapes) {\n if (Array.isArray(shapes) && Array.isArray(shapes[0])) {\n if (shapes.length === 1) {\n shapes = shapes;\n return shapes[0];\n }\n else {\n throw new ValueError(`Expected exactly 1 Shape; got ${shapes.length}`);\n }\n }\n else {\n return shapes;\n }\n}\n//# sourceMappingURL=types_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Count the elements in an Array of LayerVariables.\n *\n * @param weights: The LayerVariables of which the constituent numbers are to\n * be counted.\n * @returns A count of the elements in all the LayerVariables\n */\nexport function countParamsInWeights(weights) {\n let count = 0;\n for (const weight of weights) {\n if (weight.shape.length === 0) {\n count += 1;\n }\n else {\n count += weight.shape.reduce((a, b) => a * b);\n }\n }\n return count;\n}\n//# sourceMappingURL=variable_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { variableGrads } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId } from './backend/state';\nimport { getScopedTensorName, getUniqueTensorName } from './common';\nimport { NotImplementedError } from './errors';\nconst DEFAULT_VARIABLE_NAME_PREFIX = 'Variable';\n/**\n * A `tf.layers.LayerVariable` is similar to a `tf.Tensor` in that it has a\n * dtype and shape, but its value is mutable. The value is itself represented\n * as a`tf.Tensor`, and can be read with the `read()` method and updated with\n * the `write()` method.\n */\nexport class LayerVariable {\n /**\n * Construct Variable from a `tf.Tensor`.\n *\n * If not explicitly named, the Variable will be given a name with the\n * prefix 'Variable'. Variable names are unique. In the case of name\n * collision, suffixies '_' will be added to the name.\n *\n * @param val Initial value of the Variable.\n * @param name Name of the variable. If `null` or `undefined` is provided, it\n * will default a name with the prefix 'Variable'.\n * @param constraint Optional, projection function to be applied to the\n * variable after optimize updates\n * @throws ValueError if `name` is `null` or `undefined`.\n */\n constructor(val, dtype = 'float32', name = DEFAULT_VARIABLE_NAME_PREFIX, trainable = true, constraint = null) {\n this.dtype = dtype == null ? 'float32' : dtype;\n this.shape = val.shape;\n this.id = getNextUniqueTensorId();\n name = name == null ? DEFAULT_VARIABLE_NAME_PREFIX : name;\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n this.trainable_ = trainable;\n this.constraint = constraint;\n this.val = tfc.variable(val, this.trainable_, this.name, this.dtype);\n }\n /**\n * Get a snapshot of the Variable's value.\n *\n * The returned value is a snapshot of the Variable's value at the time of\n * the invocation. Future mutations in the value of the tensor will only\n * be reflected by future calls to this method.\n */\n read() {\n this.assertNotDisposed();\n return this.val;\n }\n /**\n * Update the value of the Variable.\n *\n * @param newVal: The new value to update to. Must be consistent with the\n * dtype and shape of the Variable.\n * @return This Variable.\n */\n write(newVal) {\n // TODO(cais): Once TF.js Core supports Tensor.dtype, check dtype match.\n this.assertNotDisposed();\n checkShapesMatch(this.val, newVal);\n // Skip updating if this is the exact same tensor.\n if (this.val.id !== newVal.id) {\n this.val.assign(newVal);\n if (this.constraint != null) {\n this.val.assign(this.constraint.apply(this.val));\n }\n }\n return this;\n }\n /**\n * Dispose this LayersVariable instance from memory.\n */\n dispose() {\n this.assertNotDisposed();\n this.val.dispose();\n }\n assertNotDisposed() {\n if (this.val.isDisposed) {\n throw new Error(`LayersVariable ${this.name} is already disposed.`);\n }\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.trainable_ = trainable;\n this.val.trainable = trainable;\n }\n}\nfunction checkShapesMatch(x, y) {\n if (x.shape.toString() !== y.shape.toString()) {\n throw new Error('Shape mismatch: ' + JSON.stringify(x.shape) + ' vs. ' +\n JSON.stringify(y.shape));\n }\n}\n/**\n * Create a Variable.\n * @param x The initial value of the `Variable`.\n * @param dtype optional, the type of the variable.\n * @param name optional, the name of the variable, default provided by\n * Variable.\n * @param constraint optional, a constraint to be applied after every update.\n * @return The newly instantiated `Variable`.\n */\nexport function variable(x, dtype, name, constraint) {\n return new LayerVariable(x, dtype, name, true, constraint);\n}\n/**\n * Instantiates an all-zeros Variable and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-zero Variable.\n */\nexport function zerosVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n return new LayerVariable(tfc.zeros(shape), dtype, name);\n}\n/**\n * Instantiates an all-zeros tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function zerosLike(x, dtype, name) {\n return new LayerVariable(tfc.zerosLike(x), dtype, name);\n}\n/**\n * Instantiates an all-ones tensor and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-ones Variable.\n */\nexport function onesVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n const allocated = tfc.ones(shape);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiates an all-ones tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function onesLike(x, dtype, name) {\n const allocated = tfc.onesLike(x);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiate an identity matrix and returns it, as a Variable\n *\n * @param size Number of rows/columns.\n * @param dtype Data type of returned Variable.\n * @param name Name of returned Variable.\n * @return A Variable, an identity matrix.\n */\nexport function eyeVariable(size, dtype, name) {\n return new LayerVariable(tfc.eye(size), dtype, name);\n}\n/**\n * Get a Variable with uniform distribution of values.\n * @param shape Shape of the tensor.\n * @param minval Lower bound of the uniform distribution.\n * @param maxval Upper bound of the uniform distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The uniform-random Variable.\n */\nexport function randomUniformVariable(shape, minval, maxval, dtype, seed, name = 'randomUniform') {\n return new LayerVariable(tfc.randomUniform(shape, minval, maxval, dtype), dtype, name);\n}\n/**\n * Get a Variable with truncated-normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function truncatedNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'truncatedNormal') {\n // TODO(cais): Implement logic for dtype and seed once they are supported\n // by deeplearn.js.\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.truncatedNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Get a Variable with normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function randomNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'randomNormal') {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormalVariable does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.randomNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Update the value of a Variable.\n * @param x The Variable to be updated.\n * @param xNew The new value to update to.\n * @return The Variable updated.\n */\nexport function update(x, xNew) {\n return x.write(xNew);\n}\n/**\n * Update the value of a Variable by adding an increment.\n * @param x The Variable to be updated.\n * @param increment The incrment to add to `x`.\n * @return The Variable updated.\n */\nexport function updateAdd(x, increment) {\n return x.write(tfc.add(x.read(), increment));\n}\n/**\n * Update the value of a Variable by subtracting a decrement.\n * @param x The Variable to be updated.\n * @param decrement The decrement to subtract from `x`.\n * @return The Variable updated.\n */\nexport function updateSub(x, decrement) {\n return x.write(tfc.sub(x.read(), decrement));\n}\n/**\n * Get the values of an array of Variables.\n *\n * @param tensors An `Array` of `Variable`s to get the values of.\n * @return The values of the inputs, as an `Array` of`tf.Tensor`s.\n */\nexport function batchGetValue(xs) {\n return xs.map(x => x.read());\n}\n/**\n * Update the value of multiple Variables at once.\n *\n * @param variablesAndValues An `Array`, each element is of type\n * [Variable, Tensor]. The first item is the\n * `Variable` of which the value is to be updated. The second item\n * carries the new value.\n */\nexport function batchSetValue(variablesAndValues) {\n variablesAndValues.forEach(variableAndValue => {\n const variable = variableAndValue[0];\n variable.write(variableAndValue[1]);\n });\n}\n/**\n * Returns the gradients of `variables` w.r.t. the return value of `lossFn`.\n * @param lossFn A function which returns a Scalar to be used as the function\n * value (i.e., numerator) for differentiation.\n * @param variables List of variables to be used as the independent variables\n * (i.e., denominator) for differentiation.\n * @returns An Array of gradients tensors.\n */\nexport function gradients(lossFn, variables) {\n // TODO(cais): The return type signature can be simplified if deeplearn makes\n // the corresponding type public.\n const variableList = variables.map(variable => variable.read());\n const valudAndGrads = variableGrads(lossFn, variableList);\n return variables.map(variable => valudAndGrads.grads[variable.name]);\n}\n//# sourceMappingURL=variables.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId, getUid } from '../backend/state';\nimport { getScopedTensorName, getUniqueTensorName, nameScope } from '../common';\nimport { AttributeError, NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { getInitializer } from '../initializers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as types_utils from '../utils/types_utils';\nimport * as variable_utils from '../utils/variable_utils';\nimport { batchGetValue, batchSetValue, LayerVariable } from '../variables';\n/**\n * Specifies the ndim, dtype and shape of every input to a layer.\n *\n * Every layer should expose (if appropriate) an `inputSpec` attribute:\n * a list of instances of InputSpec (one per input tensor).\n *\n * A null entry in a shape is compatible with any dimension,\n * a null shape is compatible with any shape.\n */\nexport class InputSpec {\n constructor(args) {\n this.dtype = args.dtype;\n this.shape = args.shape;\n /*\n TODO(michaelterry): Could throw error if ndim and shape are both defined\n (then backport).\n */\n if (args.shape != null) {\n this.ndim = args.shape.length;\n }\n else {\n this.ndim = args.ndim;\n }\n this.maxNDim = args.maxNDim;\n this.minNDim = args.minNDim;\n this.axes = args.axes || {};\n }\n}\n/**\n * `tf.SymbolicTensor` is a placeholder for a Tensor without any concrete value.\n *\n * They are most often encountered when building a graph of `Layer`s for a\n * a `tf.LayersModel` and the input data's shape, but not values are known.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\nexport class SymbolicTensor {\n /**\n *\n * @param dtype\n * @param shape\n * @param sourceLayer The Layer that produced this symbolic tensor.\n * @param inputs The inputs passed to sourceLayer's __call__() method.\n * @param nodeIndex\n * @param tensorIndex\n * @param callArgs The keyword arguments passed to the __call__() method.\n * @param name\n * @param outputTensorIndex The index of this tensor in the list of outputs\n * returned by apply().\n */\n constructor(dtype, shape, sourceLayer, inputs, callArgs, name, outputTensorIndex) {\n this.dtype = dtype;\n this.shape = shape;\n this.sourceLayer = sourceLayer;\n this.inputs = inputs;\n this.callArgs = callArgs;\n this.outputTensorIndex = outputTensorIndex;\n this.id = getNextUniqueTensorId();\n if (name != null) {\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n }\n this.rank = shape.length;\n }\n}\nlet _nextNodeID = 0;\n/**\n * A `Node` describes the connectivity between two layers.\n *\n * Each time a layer is connected to some new input,\n * a node is added to `layer.inboundNodes`.\n *\n * Each time the output of a layer is used by another layer,\n * a node is added to `layer.outboundNodes`.\n *\n * `nodeIndices` and `tensorIndices` are basically fine-grained coordinates\n * describing the origin of the `inputTensors`, verifying the following:\n *\n * `inputTensors[i] ==\n * inboundLayers[i].inboundNodes[nodeIndices[i]].outputTensors[\n * tensorIndices[i]]`\n *\n * A node from layer A to layer B is added to:\n * A.outboundNodes\n * B.inboundNodes\n */\nexport class Node {\n constructor(args, \n // TODO(michaelterry): Define actual type for this.\n callArgs) {\n this.callArgs = callArgs;\n this.id = _nextNodeID++;\n /*\n Layer instance (NOT a list).\n this is the layer that takes a list of input tensors\n and turns them into a list of output tensors.\n the current node will be added to\n the inboundNodes of outboundLayer.\n */\n this.outboundLayer = args.outboundLayer;\n /*\n The following 3 properties describe where\n the input tensors come from: which layers,\n and for each layer, which node and which\n tensor output of each node.\n */\n // List of layer instances.\n this.inboundLayers = args.inboundLayers;\n // List of integers, 1:1 mapping with inboundLayers.\n this.nodeIndices = args.nodeIndices;\n // List of integers, 1:1 mapping with inboundLayers.\n this.tensorIndices = args.tensorIndices;\n /*\n Following 2 properties:\n tensor inputs and outputs of outboundLayer.\n */\n // List of tensors. 1:1 mapping with inboundLayers.\n this.inputTensors = args.inputTensors;\n // List of tensors, created by outboundLayer.call().\n this.outputTensors = args.outputTensors;\n /*\n Following 2 properties: input and output masks.\n List of tensors, 1:1 mapping with inputTensor.\n */\n this.inputMasks = args.inputMasks;\n // List of tensors, created by outboundLayer.computeMask().\n this.outputMasks = args.outputMasks;\n // Following 2 properties: input and output shapes.\n // List of shape tuples, shapes of inputTensors.\n this.inputShapes = args.inputShapes;\n // List of shape tuples, shapes of outputTensors.\n this.outputShapes = args.outputShapes;\n // Add nodes to all layers involved.\n for (const layer of args.inboundLayers) {\n if (layer != null) {\n layer.outboundNodes.push(this);\n }\n }\n args.outboundLayer.inboundNodes.push(this);\n }\n getConfig() {\n const inboundNames = [];\n for (const layer of this.inboundLayers) {\n if (layer != null) {\n inboundNames.push(layer.name);\n }\n else {\n inboundNames.push(null);\n }\n }\n return {\n outboundLayer: this.outboundLayer ? this.outboundLayer.name : null,\n inboundLayers: inboundNames,\n nodeIndices: this.nodeIndices,\n tensorIndices: this.tensorIndices\n };\n }\n}\nlet _nextLayerID = 0;\n/**\n * A layer is a grouping of operations and weights that can be composed to\n * create a `tf.LayersModel`.\n *\n * Layers are constructed by using the functions under the\n * [tf.layers](#Layers-Basic) namespace.\n *\n * @doc {heading: 'Layers', subheading: 'Classes', namespace: 'layers'}\n */\nexport class Layer extends serialization.Serializable {\n constructor(args = {}) {\n super();\n this._callHook = null;\n this._addedWeightNames = [];\n // Porting Notes: PyKeras does not have this property in this base Layer\n // class. Instead lets Layer subclass set it dynamically and checks the\n // value with `hasattr`. In tfjs-layers, we let this be a member of this\n // base class.\n this._stateful = false;\n this.id = _nextLayerID++;\n this.activityRegularizer = null;\n this.inputSpec = null;\n this.supportsMasking = false;\n // These properties will be set upon call of this.build()\n this._trainableWeights = [];\n this._nonTrainableWeights = [];\n this._losses = [];\n this._updates = [];\n this._built = false;\n /*\n These lists will be filled via successive calls\n to this.addInboundNode().\n */\n this.inboundNodes = [];\n this.outboundNodes = [];\n let name = args.name;\n if (!name) {\n const prefix = this.getClassName();\n name = generic_utils.toSnakeCase(prefix) + '_' + getUid(prefix);\n }\n this.name = name;\n this.trainable_ = args.trainable == null ? true : args.trainable;\n if (args.inputShape != null || args.batchInputShape != null) {\n /*\n In this case we will later create an input layer\n to insert before the current layer\n */\n let batchInputShape;\n if (args.batchInputShape != null) {\n batchInputShape = args.batchInputShape;\n }\n else if (args.inputShape != null) {\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n batchInputShape = [batchSize].concat(args.inputShape);\n }\n this.batchInputShape = batchInputShape;\n // Set dtype.\n let dtype = args.dtype;\n if (dtype == null) {\n dtype = args.inputDType;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n this.dtype = dtype;\n }\n if (args.weights != null) {\n this.initialWeights = args.weights;\n }\n else {\n this.initialWeights = null;\n }\n // The value of `_refCount` is initialized to null. When the layer is used\n // in a symbolic way for the first time, it will be set to 1.\n this._refCount = null;\n this.fastWeightInitDuringBuild = false;\n }\n /**\n * Converts a layer and its index to a unique (immutable type) name.\n * This function is used internally with `this.containerNodes`.\n * @param layer The layer.\n * @param nodeIndex The layer's position (e.g. via enumerate) in a list of\n * nodes.\n *\n * @returns The unique name.\n */\n static nodeKey(layer, nodeIndex) {\n return layer.name + '_ib-' + nodeIndex.toString();\n }\n /**\n * Returns this.inboundNode at index nodeIndex.\n *\n * Porting note: This is a replacement for _get_node_attribute_at_index()\n * @param nodeIndex\n * @param attrName The name of the attribute related to request for this node.\n */\n getNodeAtIndex(nodeIndex, attrName) {\n if (this.inboundNodes.length === 0) {\n throw new RuntimeError('The layer has never been called ' +\n `and thus has no defined ${attrName}.`);\n }\n if (this.inboundNodes.length <= nodeIndex) {\n throw new ValueError(`Asked to get ${attrName} at node ${nodeIndex}, ` +\n `but the layer has only ${this.inboundNodes.length} inbound nodes.`);\n }\n return this.inboundNodes[nodeIndex];\n }\n /**\n * Retrieves the input tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple inputs).\n */\n getInputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple outputs).\n */\n getOutputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'output').outputTensors);\n }\n // Properties\n /**\n * Retrieves the input tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Input tensor or list of input tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get input() {\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer input\" ' +\n 'is ill-defined. ' +\n 'Use `getInputAt(nodeIndex)` instead.');\n }\n else if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' is not connected, no input to return.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Output tensor or list of output tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get output() {\n if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has no inbound nodes.');\n }\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer output\" ' +\n 'is ill-defined. ' +\n 'Use `getOutputAt(nodeIndex)` instead.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'output').outputTensors);\n }\n get losses() {\n return this._losses;\n }\n /**\n * Retrieves the Layer's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Layer.loss in PyKeras.\n // In PyKeras, Layer.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return this.losses.map(lossFn => lossFn());\n }\n get updates() {\n return this._updates;\n }\n get built() {\n return this._built;\n }\n set built(built) {\n this._built = built;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this._trainableWeights.forEach(w => w.trainable = trainable);\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n if (this.trainable_) {\n return this._trainableWeights.filter(w => w.trainable);\n }\n else {\n return [];\n }\n }\n set trainableWeights(weights) {\n this._trainableWeights = weights;\n }\n get nonTrainableWeights() {\n if (this.trainable) {\n return this._trainableWeights.filter(w => !w.trainable)\n .concat(this._nonTrainableWeights);\n }\n else {\n return this._trainableWeights.concat(this._nonTrainableWeights);\n }\n }\n set nonTrainableWeights(weights) {\n this._nonTrainableWeights = weights;\n }\n /**\n * The concatenation of the lists trainableWeights and nonTrainableWeights\n * (in this order).\n */\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n get stateful() {\n return this._stateful;\n }\n /**\n * Reset the states of the layer.\n *\n * This method of the base Layer class is essentially a no-op.\n * Subclasses that are stateful (e.g., stateful RNNs) should override this\n * method.\n */\n resetStates() {\n if (!this.stateful) {\n throw new Error('Cannot call the resetStates() method of a non-stateful Layer ' +\n 'object.');\n }\n }\n /**\n * Checks compatibility between the layer and provided inputs.\n *\n * This checks that the tensor(s) `input`\n * verify the input assumptions of the layer\n * (if any). If not, exceptions are raised.\n *\n * @param inputs Input tensor or list of input tensors.\n *\n * @exception ValueError in case of mismatch between\n * the provided inputs and the expectations of the layer.\n */\n assertInputCompatibility(inputs) {\n inputs = generic_utils.toList(inputs);\n if (this.inputSpec == null || this.inputSpec.length === 0) {\n return;\n }\n const inputSpec = generic_utils.toList(this.inputSpec);\n if (inputs.length !== inputSpec.length) {\n throw new ValueError(`Layer ${this.name} expects ${inputSpec.length} inputs, ` +\n `but it received ${inputs.length} input tensors. ` +\n `Input received: ${inputs}`);\n }\n for (let inputIndex = 0; inputIndex < inputs.length; inputIndex++) {\n const x = inputs[inputIndex];\n const spec = inputSpec[inputIndex];\n if (spec == null) {\n continue;\n }\n // Check ndim.\n const ndim = x.rank;\n if (spec.ndim != null) {\n if (ndim !== spec.ndim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}: ` +\n `expected ndim=${spec.ndim}, found ndim=${ndim}`);\n }\n }\n if (spec.maxNDim != null) {\n if (ndim > spec.maxNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected max_ndim=${spec.maxNDim}, found ndim=${ndim}`);\n }\n }\n if (spec.minNDim != null) {\n if (ndim < spec.minNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected min_ndim=${spec.minNDim}, found ndim=${ndim}.`);\n }\n }\n // Check dtype.\n if (spec.dtype != null) {\n if (x.dtype !== spec.dtype) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name} ` +\n `: expected dtype=${spec.dtype}, found dtype=${x.dtype}.`);\n }\n }\n // Check specific shape axes.\n if (spec.axes) {\n const xShape = x.shape;\n for (const key in spec.axes) {\n const axis = Number(key);\n const value = spec.axes[key];\n // Perform Python-style slicing in case axis < 0;\n // TODO(cais): Use https://github.com/alvivi/typescript-underscore to\n // ensure type safety through Underscore calls.\n const xShapeAtAxis = axis >= 0 ? xShape[axis] : xShape[xShape.length + axis];\n if (value != null && [value, null].indexOf(xShapeAtAxis) === -1) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected axis ${axis} of input shape to ` +\n `have value ${value} but got shape ${xShape}.`);\n }\n }\n }\n // Check shape.\n if (spec.shape != null) {\n for (let i = 0; i < spec.shape.length; ++i) {\n const specDim = spec.shape[i];\n const dim = x.shape[i];\n if (specDim != null && dim != null) {\n if (specDim !== dim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected shape=${spec.shape}, ` +\n `found shape=${x.shape}.`);\n }\n }\n }\n }\n }\n }\n /**\n * This is where the layer's logic lives.\n *\n * @param inputs Input tensor, or list/tuple of input tensors.\n * @param kwargs Additional keyword arguments.\n *\n * @return A tensor or list/tuple of tensors.\n */\n call(inputs, kwargs) {\n return inputs;\n }\n invokeCallHook(inputs, kwargs) {\n if (this._callHook != null) {\n this._callHook(inputs, kwargs);\n }\n }\n /**\n * Set call hook.\n * This is currently used for testing only.\n * @param callHook\n */\n setCallHook(callHook) {\n this._callHook = callHook;\n }\n /**\n * Clear call hook.\n * This is currently used for testing only.\n */\n clearCallHook() {\n this._callHook = null;\n }\n /**\n * Builds or executes a `Layer's logic.\n *\n * When called with `tf.Tensor`(s), execute the `Layer`s computation and\n * return Tensor(s). For example:\n *\n * ```js\n * const denseLayer = tf.layers.dense({\n * units: 1,\n * kernelInitializer: 'zeros',\n * useBias: false\n * });\n *\n * // Invoke the layer's apply() method with a `tf.Tensor` (with concrete\n * // numeric values).\n * const input = tf.ones([2, 2]);\n * const output = denseLayer.apply(input);\n *\n * // The output's value is expected to be [[0], [0]], due to the fact that\n * // the dense layer has a kernel initialized to all-zeros and does not have\n * // a bias.\n * output.print();\n * ```\n *\n * When called with `tf.SymbolicTensor`(s), this will prepare the layer for\n * future execution. This entails internal book-keeping on shapes of\n * expected Tensors, wiring layers together, and initializing weights.\n *\n * Calling `apply` with `tf.SymbolicTensor`s are typically used during the\n * building of non-`tf.Sequential` models. For example:\n *\n * ```js\n * const flattenLayer = tf.layers.flatten();\n * const denseLayer = tf.layers.dense({units: 1});\n *\n * // Use tf.layers.input() to obtain a SymbolicTensor as input to apply().\n * const input = tf.input({shape: [2, 2]});\n * const output1 = flattenLayer.apply(input);\n *\n * // output1.shape is [null, 4]. The first dimension is the undetermined\n * // batch size. The second dimension comes from flattening the [2, 2]\n * // shape.\n * console.log(JSON.stringify(output1.shape));\n *\n * // The output SymbolicTensor of the flatten layer can be used to call\n * // the apply() of the dense layer:\n * const output2 = denseLayer.apply(output1);\n *\n * // output2.shape is [null, 1]. The first dimension is the undetermined\n * // batch size. The second dimension matches the number of units of the\n * // dense layer.\n * console.log(JSON.stringify(output2.shape));\n *\n * // The input and output and be used to construct a model that consists\n * // of the flatten and dense layers.\n * const model = tf.model({inputs: input, outputs: output2});\n * ```\n *\n * @param inputs a `tf.Tensor` or `tf.SymbolicTensor` or an Array of them.\n * @param kwargs Additional keyword arguments to be passed to `call()`.\n *\n * @return Output of the layer's `call` method.\n *\n * @exception ValueError error in case the layer is missing shape information\n * for its `build` call.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n // Porting Note: This is a replacement for __call__() in Python.\n apply(inputs, kwargs) {\n kwargs = kwargs || {};\n this.assertNotDisposed();\n // Ensure inputs are all the same type.\n const inputsList = generic_utils.toList(inputs);\n let allAreSymbolic = true;\n for (const input of inputsList) {\n if (!(input instanceof SymbolicTensor)) {\n allAreSymbolic = false;\n break;\n }\n }\n let noneAreSymbolic = true;\n for (const input of inputsList) {\n if (input instanceof SymbolicTensor) {\n noneAreSymbolic = false;\n break;\n }\n }\n if (allAreSymbolic === noneAreSymbolic) {\n throw new ValueError('Arguments to apply() must be all ' +\n 'SymbolicTensors or all Tensors');\n }\n // TODO(michaelterry): nameScope() may not be necessary.\n return nameScope(this.name, () => {\n // Handle laying building (weight creating, input spec locking).\n if (!this.built) {\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec specified in the layer constructor.\n */\n this.assertInputCompatibility(inputs);\n // Collect input shapes to build layer.\n const inputShapes = [];\n for (const xElem of generic_utils.toList(inputs)) {\n inputShapes.push(xElem.shape);\n }\n this.build(generic_utils.singletonOrArray(inputShapes));\n this.built = true;\n // Load weights that were specified at layer instantiation.\n if (this.initialWeights) {\n this.setWeights(this.initialWeights);\n }\n if (this._refCount === null && noneAreSymbolic) {\n // The first use of this layer is a non-symbolic call, set ref count\n // to 1 so the Layer can be properly disposed if its dispose() method\n // is called.\n this._refCount = 1;\n }\n }\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec set at build time.\n */\n this.assertInputCompatibility(inputs);\n // Handle mask propagation.\n // TODO(michaelterry): Mask propagation not currently implemented.\n // Actually call the layer, collecting output(s), mask(s), and shape(s).\n if (noneAreSymbolic) {\n let output = this.call(inputs, kwargs);\n // TODO(michaelterry): Compute the outputMask\n // If the layer returns tensors from its inputs, unmodified,\n // we copy them to avoid loss of tensor metadata.\n const outputList = generic_utils.toList(output);\n const outputListCopy = [];\n // TODO(michaelterry): This copying may not be necessary given our eager\n // backend.\n for (let x of outputList) {\n if (inputsList.indexOf(x) !== -1) {\n x = x.clone();\n }\n outputListCopy.push(x);\n }\n output = generic_utils.singletonOrArray(outputListCopy);\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Call addInboundNode()?\n return output;\n }\n else {\n const inputShape = collectInputShape(inputs);\n const outputShape = this.computeOutputShape(inputShape);\n let output;\n const outputDType = guessOutputDType(inputs);\n this.warnOnIncompatibleInputShape(Array.isArray(inputs) ? inputShape[0] :\n inputShape);\n if (outputShape != null && outputShape.length > 0 &&\n Array.isArray(outputShape[0])) {\n // We have multiple output shapes. Create multiple output tensors.\n output = outputShape\n .map((shape, index) => new SymbolicTensor(outputDType, shape, this, generic_utils.toList(inputs), kwargs, this.name, index));\n }\n else {\n output = new SymbolicTensor(outputDType, outputShape, this, generic_utils.toList(inputs), kwargs, this.name);\n }\n /*\n Add an inbound node to the layer, so that it keeps track\n of the call and of all new variables created during the call.\n This also updates the layer history of the output tensor(s).\n If the input tensor(s) had no previous history,\n this does nothing.\n */\n this.addInboundNode(inputs, output, null, null, inputShape, outputShape, kwargs);\n this._refCount++;\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n return output;\n }\n });\n }\n /**\n * Check compatibility between input shape and this layer's batchInputShape.\n *\n * Print warning if any incompatibility is found.\n *\n * @param inputShape Input shape to be checked.\n */\n warnOnIncompatibleInputShape(inputShape) {\n if (this.batchInputShape == null) {\n return;\n }\n else if (inputShape.length !== this.batchInputShape.length) {\n console.warn(`The rank of the input tensor provided (shape: ` +\n `${JSON.stringify(inputShape)}) does not match that of the ` +\n `batchInputShape (${JSON.stringify(this.batchInputShape)}) ` +\n `of the layer ${this.name}`);\n }\n else {\n let dimMismatch = false;\n this.batchInputShape.forEach((dimension, i) => {\n if (dimension != null && inputShape[i] != null &&\n inputShape[i] !== dimension) {\n dimMismatch = true;\n }\n });\n if (dimMismatch) {\n console.warn(`The shape of the input tensor ` +\n `(${JSON.stringify(inputShape)}) does not ` +\n `match the expectation of layer ${this.name}: ` +\n `${JSON.stringify(this.batchInputShape)}`);\n }\n }\n }\n /**\n * Retrieves the output shape(s) of a layer.\n *\n * Only applicable if the layer has only one inbound node, or if all inbound\n * nodes have the same output shape.\n *\n * @returns Output shape or shapes.\n * @throws AttributeError: if the layer is connected to more than one incoming\n * nodes.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n get outputShape() {\n if (this.inboundNodes == null || this.inboundNodes.length === 0) {\n throw new AttributeError(`The layer ${this.name} has never been called and thus has no ` +\n `defined output shape.`);\n }\n const allOutputShapes = [];\n for (const node of this.inboundNodes) {\n const shapeString = JSON.stringify(node.outputShapes);\n if (allOutputShapes.indexOf(shapeString) === -1) {\n allOutputShapes.push(shapeString);\n }\n }\n if (allOutputShapes.length === 1) {\n const outputShapes = this.inboundNodes[0].outputShapes;\n if (Array.isArray(outputShapes) && Array.isArray(outputShapes[0]) &&\n outputShapes.length === 1) {\n return outputShapes[0];\n }\n else {\n return outputShapes;\n }\n }\n else {\n throw new AttributeError(`The layer ${this.name} has multiple inbound nodes with different ` +\n `output shapes. Hence the notion of \"output shape\" is ill-defined ` +\n `for the layer.`);\n // TODO(cais): Implement getOutputShapeAt().\n }\n }\n /**\n * Counts the total number of numbers (e.g., float32, int32) in the\n * weights.\n *\n * @returns An integer count.\n * @throws RuntimeError: If the layer is not built yet (in which case its\n * weights are not defined yet.)\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n countParams() {\n if (!this.built) {\n throw new RuntimeError(`You tried to call countParams() on ${this.name}, ` +\n `but the layer is not built yet. Build it first by calling ` +\n `build(batchInputShape).`);\n }\n return variable_utils.countParamsInWeights(this.weights);\n }\n /**\n * Creates the layer weights.\n *\n * Must be implemented on all layers that have weights.\n *\n * Called when apply() is called to construct the weights.\n *\n * @param inputShape A `Shape` or array of `Shape` (unused).\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n build(inputShape) {\n this.built = true;\n }\n /**\n * Returns the current values of the weights of the layer.\n *\n * @param trainableOnly Whether to get the values of only trainable weights.\n * @returns Weight values as an `Array` of `tf.Tensor`s.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getWeights(trainableOnly = false) {\n return batchGetValue(trainableOnly ? this.trainableWeights : this.weights);\n }\n /**\n * Sets the weights of the layer, from Tensors.\n *\n * @param weights a list of Tensors. The number of arrays and their shape\n * must match number of the dimensions of the weights of the layer (i.e.\n * it should match the output of `getWeights`).\n *\n * @exception ValueError If the provided weights list does not match the\n * layer's specifications.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n setWeights(weights) {\n tidy(() => {\n const params = this.weights;\n if (params.length !== weights.length) {\n // TODO(cais): Restore the following and use `providedWeights`, instead\n // of `weights` in the error message, once the deeplearn.js bug is\n // fixed: https://github.com/PAIR-code/deeplearnjs/issues/498 const\n // providedWeights = JSON.stringify(weights).substr(0, 50);\n throw new ValueError(`You called setWeights(weights) on layer \"${this.name}\" ` +\n `with a weight list of length ${weights.length}, ` +\n `but the layer was expecting ${params.length} weights. ` +\n `Provided weights: ${weights}...`);\n }\n if (params.length === 0) {\n return;\n }\n const weightValueTuples = [];\n const paramValues = batchGetValue(params);\n for (let i = 0; i < paramValues.length; ++i) {\n const pv = paramValues[i];\n const p = params[i];\n const w = weights[i];\n if (!util.arraysEqual(pv.shape, w.shape)) {\n throw new ValueError(`Layer weight shape ${pv.shape} ` +\n `not compatible with provided weight shape ${w.shape}`);\n }\n weightValueTuples.push([p, w]);\n }\n batchSetValue(weightValueTuples);\n });\n }\n /**\n * Adds a weight variable to the layer.\n *\n * @param name Name of the new weight variable.\n * @param shape The shape of the weight.\n * @param dtype The dtype of the weight.\n * @param initializer An initializer instance.\n * @param regularizer A regularizer instance.\n * @param trainable Whether the weight should be trained via backprop or not\n * (assuming that the layer itself is also trainable).\n * @param constraint An optional trainable.\n * @return The created weight variable.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addWeight(name, shape, dtype, initializer, regularizer, trainable, constraint) {\n // Reject duplicate weight names.\n if (this._addedWeightNames.indexOf(name) !== -1) {\n throw new ValueError(`Duplicate weight name ${name} for layer ${this.name}`);\n }\n this._addedWeightNames.push(name);\n if (dtype == null) {\n dtype = 'float32';\n }\n if (this.fastWeightInitDuringBuild) {\n initializer = getInitializer('zeros');\n }\n const initValue = initializer.apply(shape, dtype);\n const weight = new LayerVariable(initValue, dtype, name, trainable, constraint);\n initValue.dispose();\n // Request backend not to dispose the weights of the model on scope() exit.\n if (regularizer != null) {\n this.addLoss(() => regularizer.apply(weight.read()));\n }\n if (trainable == null) {\n trainable = true;\n }\n if (trainable) {\n this._trainableWeights.push(weight);\n }\n else {\n this._nonTrainableWeights.push(weight);\n }\n return weight;\n }\n /**\n * Set the fast-weight-initialization flag.\n *\n * In cases where the initialized weight values will be immediately\n * overwritten by loaded weight values during model loading, setting\n * the flag to `true` saves unnecessary calls to potentially expensive\n * initializers and speeds up the loading process.\n *\n * @param value Target value of the flag.\n */\n setFastWeightInitDuringBuild(value) {\n this.fastWeightInitDuringBuild = value;\n }\n /**\n * Add losses to the layer.\n *\n * The loss may potentionally be conditional on some inputs tensors,\n * for instance activity losses are conditional on the layer's inputs.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addLoss(losses) {\n if (losses == null || Array.isArray(losses) && losses.length === 0) {\n return;\n }\n // Update this.losses\n losses = generic_utils.toList(losses);\n if (this._losses !== undefined && this._losses !== null) {\n this.losses.push(...losses);\n }\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n computeOutputShape(inputShape) {\n return inputShape;\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n if (!this.supportsMasking) {\n if (mask != null) {\n if (Array.isArray(mask)) {\n mask.forEach(maskElement => {\n if (maskElement != null) {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n });\n }\n else {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n }\n // masking not explicitly supported: return null as mask\n return null;\n }\n // if masking is explictly supported, by default\n // carry over the input mask\n return mask;\n }\n /**\n * Internal method to create an inbound node for the layer.\n *\n * @param inputTensors List of input tensors.\n * @param outputTensors List of output tensors.\n * @param inputMasks List of input masks (a mask can be a tensor, or null).\n * @param outputMasks List of output masks (a mask can be a tensor, or null).\n * @param inputShapes List of input shape tuples.\n * @param outputShapes List of output shape tuples.\n * @param kwargs Dictionary of keyword arguments that were passed to the\n * `call` method of the layer at the call that created the node.\n */\n addInboundNode(inputTensors, outputTensors, inputMasks, outputMasks, inputShapes, outputShapes, kwargs = null) {\n const inputTensorList = generic_utils.toList(inputTensors);\n outputTensors = generic_utils.toList(outputTensors);\n inputMasks = generic_utils.toList(inputMasks);\n outputMasks = generic_utils.toList(outputMasks);\n inputShapes = types_utils.normalizeShapeList(inputShapes);\n outputShapes = types_utils.normalizeShapeList(outputShapes);\n // Collect input tensor(s) coordinates.\n const inboundLayers = [];\n const nodeIndices = [];\n const tensorIndices = [];\n for (const x of inputTensorList) {\n /*\n * TODO(michaelterry): Keras adds this value to tensors; it's not\n * clear whether we'll use this or not.\n */\n inboundLayers.push(x.sourceLayer);\n nodeIndices.push(x.nodeIndex);\n tensorIndices.push(x.tensorIndex);\n }\n // Create node, add it to inbound nodes.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers,\n nodeIndices,\n tensorIndices,\n inputTensors: inputTensorList,\n outputTensors,\n inputMasks,\n outputMasks,\n inputShapes,\n outputShapes\n }, kwargs);\n // Update tensor history\n for (let i = 0; i < outputTensors.length; i++) {\n // TODO(michaelterry: _uses_learning_phase not tracked.\n outputTensors[i].sourceLayer = this;\n outputTensors[i].nodeIndex = this.inboundNodes.length - 1;\n outputTensors[i].tensorIndex = i;\n }\n }\n /**\n * Returns the config of the layer.\n *\n * A layer config is a TS dictionary (serializable)\n * containing the configuration of a layer.\n * The same layer can be reinstantiated later\n * (without its trained weights) from this configuration.\n *\n * The config of a layer does not include connectivity\n * information, nor the layer class name. These are handled\n * by 'Container' (one layer of abstraction above).\n *\n * Porting Note: The TS dictionary follows TS naming standrds for\n * keys, and uses tfjs-layers type-safe Enums. Serialization methods\n * should use a helper function to convert to the pythonic storage\n * standard. (see serialization_utils.convertTsToPythonic)\n *\n * @returns TS dictionary of configuration.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getConfig() {\n const config = { name: this.name, trainable: this.trainable };\n if (this.batchInputShape != null) {\n config['batchInputShape'] = this.batchInputShape;\n }\n if (this.dtype != null) {\n config['dtype'] = this.dtype;\n }\n return config;\n }\n /**\n * Dispose the weight variables that this Layer instance holds.\n *\n * @returns {number} Number of disposed variables.\n */\n disposeWeights() {\n this.weights.forEach(weight => weight.dispose());\n return this.weights.length;\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Layer '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose layer's weights.\n *\n * This method decrease the reference count of the Layer object by 1.\n *\n * A Layer is reference-counted. Its reference count is incremented by 1\n * the first item its `apply()` method is called and when it becomes a part\n * of a new `Node` (through calling the `apply()`) method on a\n * `tf.SymbolicTensor`).\n *\n * If the reference count of a Layer becomes 0, all the weights will be\n * disposed and the underlying memory (e.g., the textures allocated in WebGL)\n * will be freed.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * weights of the Layer will *not* be disposed.\n *\n * After a Layer is disposed, it cannot be used in calls such as `apply()`,\n * `getWeights()` or `setWeights()` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the Container after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the layer has already\n * been disposed.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n dispose() {\n if (!this.built) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been ` +\n `built yet.`);\n }\n if (this._refCount === null) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been used ` +\n `yet.`);\n }\n this.assertNotDisposed();\n let numDisposedVariables = 0;\n if (--this._refCount === 0) {\n numDisposedVariables = this.disposeWeights();\n }\n return { refCountAfterDispose: this._refCount, numDisposedVariables };\n }\n}\n/**\n * Collects the input shape(s) of a list of `tf.Tensor`s or\n * `tf.SymbolicTensor`s.\n *\n * TODO(michaelterry): Update PyKeras docs (backport).\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return List of shape tuples (or single tuple), one tuple per input.\n */\nfunction collectInputShape(inputTensors) {\n inputTensors =\n generic_utils.toList(inputTensors);\n const shapes = [];\n for (const x of inputTensors) {\n shapes.push(x.shape);\n }\n return generic_utils.singletonOrArray(shapes);\n}\n/**\n * Guesses output dtype based on inputs.\n *\n * At present, just returns 'float32' for any input.\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return The guessed DType. At present, always returns 'float32'.\n */\nfunction guessOutputDType(inputTensors) {\n return 'float32';\n}\n/**\n * Returns the list of input tensors necessary to compute `tensor`.\n *\n * Output will always be a list of tensors (potentially with 1 element).\n *\n * @param tensor The tensor to start from.\n * @param layer Origin layer of the tensor.\n * @param nodeIndex Origin node index of the tensor.\n *\n * @return Array of input tensors.\n */\nexport function getSourceInputs(tensor, layer, nodeIndex) {\n if (layer == null || (nodeIndex != null && nodeIndex > 0)) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n }\n if (layer.inboundNodes.length === 0) {\n return [tensor];\n }\n else {\n const node = layer.inboundNodes[nodeIndex];\n if (node.inboundLayers.length === 0) {\n return node.inputTensors;\n }\n else {\n const sourceTensors = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const previousSources = getSourceInputs(x, layer, nodeIndex);\n // Avoid input redundancy.\n for (const x of previousSources) {\n if (sourceTensors.indexOf(x) === -1) {\n sourceTensors.push(x);\n }\n }\n }\n return sourceTensors;\n }\n }\n}\n//# sourceMappingURL=topology.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { ValueError } from '../errors';\nimport { Layer, Node, SymbolicTensor } from './topology';\nexport class InputLayer extends Layer {\n constructor(args) {\n super({\n dtype: args.dtype,\n name: args.name != null ? args.name : getUid('input').toString()\n });\n // Normalize config.batchSize and config.sparse\n if (args.batchSize == null) {\n args.batchSize = null;\n }\n if (args.sparse == null) {\n args.sparse = false;\n }\n this.trainable = false;\n this.built = true;\n this.sparse = args.sparse;\n if (args.inputShape != null && args.batchInputShape != null) {\n throw new ValueError('Only provide the inputShape OR ' +\n 'batchInputShape argument to inputLayer, not both at the same time.');\n }\n let batchInputShape = args.batchInputShape;\n if (batchInputShape == null) {\n if (args.inputShape == null) {\n throw new ValueError('An InputLayer should be passed either a ' +\n '`batchInputShape` or an `inputShape`.');\n }\n else {\n batchInputShape = [args.batchSize].concat(args.inputShape);\n }\n }\n else {\n // TODO(michaelterry): Backport to PyKeras\n if (args.batchSize != null) {\n throw new ValueError('Cannot specify batchSize if batchInputShape is ' +\n 'specified when creating an InputLayer.');\n }\n }\n const dtype = args.dtype || 'float32';\n this.batchInputShape = batchInputShape;\n this.dtype = dtype;\n // TODO(michaelterry): Backport this to PyKeras?\n this.inputSpec = [{ shape: batchInputShape }];\n const inputTensor = new SymbolicTensor(this.dtype, this.batchInputShape, this, [], {}, this.name);\n inputTensor.nodeIndex = 0;\n inputTensor.tensorIndex = 0;\n // Create an input node to add to this.outboundNode.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: [inputTensor],\n outputTensors: [inputTensor],\n inputMasks: [null],\n outputMasks: [null],\n inputShapes: [batchInputShape],\n outputShapes: [batchInputShape]\n });\n }\n apply(inputs, kwargs) {\n throw new ValueError('Cannot pass any input to an ' +\n `InputLayer's apply() method. InputLayer name: ${this.name}`);\n }\n dispose() {\n // dispose() for InputLayer is overridden as no-op.\n return { refCountAfterDispose: this._refCount, numDisposedVariables: 0 };\n }\n getConfig() {\n return {\n batchInputShape: this.batchInputShape,\n dtype: this.dtype,\n sparse: this.sparse,\n name: this.name\n };\n }\n}\n/** @nocollapse */\nInputLayer.className = 'InputLayer';\nserialization.registerClass(InputLayer);\nexport function Input(config) {\n if (config.batchShape == null && config.shape == null) {\n throw new Error('Please provide to Input either a `shape`' +\n ' or a `batchShape` argument. Note that ' +\n '`shape` does not include the batch ' +\n 'dimension.');\n }\n if (config.batchShape != null && config.shape != null) {\n // TODO(michaelterry): Backport to PyKeras.\n throw new ValueError('Please provide either a `shape` or `batchShape` ' +\n 'argument to Input, but not both.');\n }\n let batchShape = config.batchShape;\n if (config.shape != null && batchShape == null) {\n batchShape = [null].concat(config.shape);\n }\n let dtype = config.dtype;\n if (dtype == null) {\n dtype = 'float32';\n }\n const inputLayer = new InputLayer({\n batchInputShape: batchShape,\n name: config.name,\n dtype,\n sparse: config.sparse\n });\n const outputs = inputLayer.inboundNodes[0].outputTensors;\n return outputs[0];\n}\n//# sourceMappingURL=input_layer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose } from '@tensorflow/tfjs-core';\n/**\n * Turn any Scalar values in a Logs object into actual number values.\n *\n * @param logs The `Logs` object to be resolved in place.\n */\nexport async function resolveScalarsInLogs(logs) {\n if (logs == null) {\n return;\n }\n const promises = [];\n const keys = [];\n const scalarsToDispose = [];\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n const valueScalar = value;\n promises.push(valueScalar.data());\n keys.push(key);\n scalarsToDispose.push(valueScalar);\n }\n }\n if (promises.length > 0) {\n const values = await Promise.all(promises);\n for (let i = 0; i < values.length; ++i) {\n logs[keys[i]] = values[i][0];\n }\n // Dispose the original scalar tensors.\n dispose(scalarsToDispose);\n }\n}\n/**\n * Dispose all Tensors in an UnresolvedLogs object.\n *\n * @param logs An `UnresolvedLogs` object potentially containing `tf.Tensor`s in\n * places where the values can be `tf.Tensor` or `number`.\n */\nexport function disposeTensorsInLogs(logs) {\n if (logs == null) {\n return;\n }\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n value.dispose();\n }\n }\n}\n//# sourceMappingURL=logs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { add, div, keep, mul, nextFrame, tidy, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nimport * as generic_utils from './utils/generic_utils';\n/** Verbosity logging level when fitting a model. */\nexport var ModelLoggingVerbosity;\n(function (ModelLoggingVerbosity) {\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"SILENT\"] = 0] = \"SILENT\";\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"VERBOSE\"] = 1] = \"VERBOSE\";\n})(ModelLoggingVerbosity || (ModelLoggingVerbosity = {}));\n/** How often to yield to the main thread when training (in ms). */\nexport const DEFAULT_YIELD_EVERY_MS = 125;\n/**\n * Abstract base class used to build new callbacks.\n *\n * The `logs` dictionary that callback methods take as argument will contain\n * keys for quantities relevant to the current batch or epoch.\n *\n * Currently, the `.fit()` method of the `Sequential` model class\n * will include the following quantities in the `logs` that\n * it passes to its callbacks:\n *\n * onEpochEnd: Logs include `acc` and `loss`, and optionally include `valLoss`\n * (if validation is enabled in `fit`), and `valAcc` (if validation and\n * accuracy monitoring are enabled).\n * onBatchBegin: Logs include `size`, the number of samples in the current\n * batch.\n * onBatchEnd: Logs include `loss`, and optionally `acc` (if accuracy monitoring\n * is enabled).\n */\nexport class BaseCallback {\n constructor() {\n // TODO(michaelterry): This type is a best guess.\n this.validationData = null;\n }\n setParams(params) {\n this.params = params;\n }\n async onEpochBegin(epoch, logs) { }\n async onEpochEnd(epoch, logs) { }\n async onBatchBegin(batch, logs) { }\n async onBatchEnd(batch, logs) { }\n async onTrainBegin(logs) { }\n async onTrainEnd(logs) { }\n // LayersModel needs to call Callback.setModel(), but cannot actually depend\n // on Callback because that creates a cyclic dependency. Providing this no-op\n // method on BaseCallback breaks the cycle: this way LayersModel can depend on\n // BaseCallback but not on Callback. The argument is typed as `Container`\n // (the superclass of LayersModel) to avoid recapitulating the cycle. Callback\n // overrides this method and enforces that the argument is really a\n // LayersModel.\n setModel(model) {\n // Do nothing. Use Callback instead of BaseCallback to track the model.\n }\n}\n/**\n * Container abstracting a list of callbacks.\n */\nexport class CallbackList {\n // TODO(cais): When the need arises, uncomment the following lines and\n // implement the queue for time values.\n // private deltaTBatch: number;\n // private deltaTsBatchBegin: Array;\n // private deltaTsBatchEnd: Array;\n /**\n * Constructor of CallbackList.\n * @param callbacks Array of `Callback` instances.\n * @param queueLength Queue length for keeping running statistics over\n * callback execution time.\n */\n constructor(callbacks, queueLength = 10) {\n // TODO(cais): Make use of queueLength when implementing the queue for time\n // values.\n if (callbacks == null) {\n callbacks = [];\n }\n this.callbacks = callbacks;\n this.queueLength = queueLength;\n }\n append(callback) {\n this.callbacks.push(callback);\n }\n setParams(params) {\n for (const callback of this.callbacks) {\n callback.setParams(params);\n }\n }\n setModel(model) {\n for (const callback of this.callbacks) {\n callback.setModel(model);\n }\n }\n /**\n * Called at the start of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochBegin(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochBegin(epoch, logs);\n }\n }\n /**\n * Called at the end of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochEnd(epoch, logs);\n }\n }\n /**\n * Called right before processing a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchBegin(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchBegin(batch, logs);\n }\n }\n /**\n * Called at the end of a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchEnd(batch, logs);\n }\n }\n /**\n * Called at the beginning of training.\n * @param logs Dictionary of logs.\n */\n async onTrainBegin(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainBegin(logs);\n }\n }\n /**\n * Called at the end of training.\n * @param logs Dictionary of logs.\n */\n async onTrainEnd(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainEnd(logs);\n }\n }\n}\n/**\n * Callback that accumulates epoch averages of metrics.\n *\n * This callback is automatically applied to every LayersModel.\n */\nexport class BaseLogger extends BaseCallback {\n constructor() {\n super();\n }\n async onEpochBegin(epoch) {\n this.seen = 0;\n this.totals = {};\n }\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n const batchSize = logs['size'] == null ? 0 : logs['size'];\n this.seen += batchSize;\n for (const key in logs) {\n const value = logs[key];\n if (typeof value === 'number') {\n if (!this.totals.hasOwnProperty(key)) {\n this.totals[key] = 0;\n }\n this.totals[key] = this.totals[key] + value * batchSize;\n }\n else {\n let oldTotalsToDispose;\n if (key in this.totals) {\n oldTotalsToDispose = this.totals[key];\n }\n else {\n this.totals[key] = 0;\n }\n const total = tidy(() => add((this.totals[key]), mul(value, batchSize)));\n this.totals[key] = total;\n if (oldTotalsToDispose != null) {\n oldTotalsToDispose.dispose();\n }\n }\n }\n }\n async onEpochEnd(epoch, logs) {\n if (logs != null) {\n for (const key of this.params['metrics']) {\n if (this.totals[key] == null) {\n continue;\n }\n if (typeof this.totals[key] === 'number') {\n logs[key] = this.totals[key] / this.seen;\n }\n else {\n tidy(() => {\n const log = mul(div(1, this.seen), this.totals[key]);\n logs[key] = log;\n this.totals[key].dispose();\n keep(logs[key]);\n });\n }\n }\n }\n }\n}\n/**\n * Callback that records events into a `History` object. This callback is\n * automatically applied to every TF.js Layers model. The `History` object\n * gets returned by the `fit` method of models.\n */\nexport class History extends BaseCallback {\n async onTrainBegin(logs) {\n this.epoch = [];\n this.history = {};\n }\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n this.epoch.push(epoch);\n for (const key in logs) {\n if (this.history[key] == null) {\n this.history[key] = [];\n }\n this.history[key].push(logs[key]);\n }\n }\n /**\n * Await the values of all losses and metrics.\n */\n async syncData() {\n const promises = [];\n const keys = [];\n const indices = [];\n for (const key in this.history) {\n const valueArray = this.history[key];\n for (let i = 0; i < valueArray.length; ++i) {\n if (typeof valueArray[i] !== 'number') {\n const valueScalar = valueArray[i];\n promises.push(valueScalar.data());\n keys.push(key);\n indices.push(i);\n }\n }\n }\n const values = await Promise.all(promises);\n for (let n = 0; n < values.length; ++n) {\n const tensorToDispose = this.history[keys[n]][indices[n]];\n tensorToDispose.dispose();\n this.history[keys[n]][indices[n]] = values[n][0];\n }\n }\n}\n/**\n * Custom callback for training.\n */\nexport class CustomCallback extends BaseCallback {\n constructor(args, yieldEvery) {\n super();\n this.currentEpoch = 0;\n this.yieldEvery = yieldEvery || 'auto';\n if (this.yieldEvery === 'auto') {\n this.yieldEvery = DEFAULT_YIELD_EVERY_MS;\n }\n if (this.yieldEvery === 'never' && args.onYield != null) {\n throw new Error('yieldEvery is `never` but you provided an `onYield` callback. ' +\n 'Either change `yieldEvery` or remove the callback');\n }\n if (util.isNumber(this.yieldEvery)) {\n // Decorate `maybeWait` so it will be called at most once every\n // `yieldEvery` ms.\n this.maybeWait = generic_utils.debounce(this.maybeWait.bind(this), this.yieldEvery);\n }\n this.trainBegin = args.onTrainBegin;\n this.trainEnd = args.onTrainEnd;\n this.epochBegin = args.onEpochBegin;\n this.epochEnd = args.onEpochEnd;\n this.batchBegin = args.onBatchBegin;\n this.batchEnd = args.onBatchEnd;\n this.yield = args.onYield;\n }\n async maybeWait(epoch, batch, logs) {\n const ps = [];\n if (this.yield != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.yield(epoch, batch, logs));\n }\n ps.push(nextFrame());\n await Promise.all(ps);\n }\n async onEpochBegin(epoch, logs) {\n this.currentEpoch = epoch;\n if (this.epochBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.epochBegin(epoch, logs);\n }\n }\n async onEpochEnd(epoch, logs) {\n const ps = [];\n if (this.epochEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.epochEnd(epoch, logs));\n }\n if (this.yieldEvery === 'epoch') {\n ps.push(nextFrame());\n }\n await Promise.all(ps);\n }\n async onBatchBegin(batch, logs) {\n if (this.batchBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.batchBegin(batch, logs);\n }\n }\n async onBatchEnd(batch, logs) {\n const ps = [];\n if (this.batchEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.batchEnd(batch, logs));\n }\n if (this.yieldEvery === 'batch') {\n ps.push(nextFrame());\n }\n else if (util.isNumber(this.yieldEvery)) {\n ps.push(this.maybeWait(this.currentEpoch, batch, logs));\n }\n await Promise.all(ps);\n }\n async onTrainBegin(logs) {\n if (this.trainBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.trainBegin(logs);\n }\n }\n async onTrainEnd(logs) {\n if (this.trainEnd != null) {\n await resolveScalarsInLogs(logs);\n await this.trainEnd(logs);\n }\n }\n}\n/**\n * Standardize callbacks or configurations of them to an Array of callbacks.\n */\nexport function standardizeCallbacks(callbacks, yieldEvery) {\n if (callbacks == null) {\n callbacks = {};\n }\n if (callbacks instanceof BaseCallback) {\n return [callbacks];\n }\n if (Array.isArray(callbacks) && callbacks[0] instanceof BaseCallback) {\n return callbacks;\n }\n // Convert custom callback configs to custom callback objects.\n const callbackConfigs = generic_utils.toList(callbacks);\n return callbackConfigs.map(callbackConfig => new CustomCallback(callbackConfig, yieldEvery));\n}\n/**\n * A global registry for callback constructors to be used during\n * LayersModel.fit().\n */\nexport class CallbackConstructorRegistry {\n /**\n * Blocks public access to constructor.\n */\n constructor() { }\n /**\n * Register a tf.LayersModel.fit() callback constructor.\n *\n * The registered callback constructor will be used to instantiate\n * callbacks for every tf.LayersModel.fit() call afterwards.\n *\n * @param verbosityLevel Level of verbosity at which the `callbackConstructor`\n * is to be reigstered.\n * @param callbackConstructor A no-arg constructor for `tf.Callback`.\n * @throws Error, if the same callbackConstructor has been registered before,\n * either at the same or a different `verbosityLevel`.\n */\n static registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n util.assert(verbosityLevel >= 0 && Number.isInteger(verbosityLevel), () => `Verbosity level is expected to be an integer >= 0, ` +\n `but got ${verbosityLevel}`);\n CallbackConstructorRegistry.checkForDuplicate(callbackConstructor);\n if (CallbackConstructorRegistry.constructors[verbosityLevel] == null) {\n CallbackConstructorRegistry.constructors[verbosityLevel] = [];\n }\n CallbackConstructorRegistry.constructors[verbosityLevel].push(callbackConstructor);\n }\n static checkForDuplicate(callbackConstructor) {\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const constructors = CallbackConstructorRegistry.constructors[+levelName];\n constructors.forEach(ctor => {\n if (ctor === callbackConstructor) {\n throw new ValueError('Duplicate callback constructor.');\n }\n });\n }\n }\n /**\n * Clear all registered callback constructors.\n */\n static clear() {\n CallbackConstructorRegistry.constructors = {};\n }\n /**\n * Create callbacks using the registered callback constructors.\n *\n * Given `verbosityLevel`, all constructors registered at that level or above\n * will be called and the instantiated callbacks will be used.\n *\n * @param verbosityLevel: Level of verbosity.\n */\n static createCallbacks(verbosityLevel) {\n const constructors = [];\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const level = +levelName;\n if (verbosityLevel >= level) {\n constructors.push(...CallbackConstructorRegistry.constructors[level]);\n }\n }\n return constructors.map(ctor => new ctor());\n }\n}\nCallbackConstructorRegistry.constructors = {};\nexport function configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics) {\n const history = new History();\n const actualCallbacks = [\n new BaseLogger(), ...CallbackConstructorRegistry.createCallbacks(verbose)\n ];\n if (callbacks != null) {\n actualCallbacks.push(...callbacks);\n }\n actualCallbacks.push(history);\n const callbackList = new CallbackList(actualCallbacks);\n // TODO(cais): Figure out when this LayersModel instance can have a\n // dynamically\n // set property called 'callback_model' as in PyKeras.\n callbackList.setParams({\n epochs,\n initialEpoch,\n samples: numTrainSamples,\n steps: stepsPerEpoch,\n batchSize,\n verbose,\n doValidation,\n metrics: callbackMetrics,\n });\n return { callbackList, history };\n}\n//# sourceMappingURL=base_callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source layers/__init__.py */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { deserializeKerasObject } from '../utils/generic_utils';\n/**\n * Instantiate a layer from a config dictionary.\n * @param config dict of the form {class_name: str, config: dict}\n * @param customObjects dict mapping class names (or function names)\n * of custom (non-Keras) objects to class/functions\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns Layer instance (may be LayersModel, Sequential, Layer...)\n */\nexport function deserialize(config, customObjects = {}, fastWeightInit = false) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'layer', fastWeightInit);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: losses.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport * as K from './backend/tfjs_backend';\nimport { ValueError } from './errors';\n/**\n * Normalizes a tensor wrt the L2 norm alongside the specified axis.\n * @param x\n * @param axis Axis along which to perform normalization.\n */\nexport function l2Normalize(x, axis) {\n return tidy(() => {\n if (x.dtype !== 'float32') {\n x = x.asType('float32');\n }\n const squareSum = tfc.sum(K.square(x), axis, true);\n const epsilonTensor = tfc.fill(squareSum.shape, epsilon());\n const norm = tfc.sqrt(tfc.maximum(squareSum, epsilonTensor));\n return tfc.div(x, norm);\n });\n}\nexport function meanSquaredError(yTrue, yPred) {\n return tidy(() => tfc.mean(K.square(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsoluteError(yTrue, yPred) {\n return tidy(() => tfc.mean(tfc.abs(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return tidy(() => {\n const diff = tfc.sub(yTrue, yPred);\n const clippedTrue = tfc.clipByValue(tfc.abs(yTrue), epsilon(), Number.MAX_VALUE);\n const absResult = tfc.abs(tfc.div(diff, clippedTrue));\n return tfc.mul(100, tfc.mean(absResult, -1));\n });\n}\nexport function meanSquaredLogarithmicError(yTrue, yPred) {\n return tidy(() => {\n const clippedPred = tfc.clipByValue(yPred, epsilon(), Number.MAX_VALUE);\n const firstLog = tfc.log(tfc.add(1, clippedPred));\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), Number.MAX_VALUE);\n const secondLog = tfc.log(tfc.add(1, clippedTrue));\n return tfc.mean(K.square(tfc.sub(firstLog, secondLog)), -1);\n });\n}\nexport function squaredHinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(K.square(maxResult), -1);\n });\n}\nexport function hinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(maxResult, -1);\n });\n}\nexport function categoricalHinge(yTrue, yPred) {\n return tidy(() => {\n const pos = tfc.sum(tfc.mul(yTrue, yPred), -1);\n const neg = tfc.max(tfc.mul(tfc.sub(1, yTrue), yPred), -1);\n return tfc.maximum(0, tfc.add(1, tfc.sub(neg, pos)));\n });\n}\n/**\n * Logarithm of the hyperbolic cosine of the prediction error.\n *\n * `log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and\n * to `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly\n * like the mean squared error, but will not be so strongly affected by the\n * occasional wildly incorrect prediction.\n */\nexport function logcosh(yTrue, yPred) {\n return tidy(() => {\n const log2 = Math.log(2);\n const predictionDiff = tfc.sub(yPred, yTrue);\n const logcoshResult = tfc.sub(tfc.add(predictionDiff, tfc.softplus(tfc.mul(-2, predictionDiff))), log2);\n return tfc.mean(logcoshResult, -1);\n });\n}\nexport function categoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n if (fromLogits) {\n output = tfc.softmax(output);\n }\n else {\n // scale preds so that the class probabilities of each sample sum to 1.\n const outputSum = tfc.sum(output, output.shape.length - 1, true);\n output = tfc.div(output, outputSum);\n }\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n return tfc.neg(tfc.sum(tfc.mul(target.toFloat(), tfc.log(output)), output.shape.length - 1));\n });\n}\n/**\n * Categorical crossentropy with integer targets.\n *\n * @param target An integer tensor.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n */\nexport function sparseCategoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n const flatTarget = tfc.floor(K.flatten(target)).toInt();\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n const outputShape = output.shape;\n const oneHotTarget = tfc.oneHot(flatTarget, outputShape[outputShape.length - 1])\n .reshape(outputShape);\n return categoricalCrossentropy(oneHotTarget, output, fromLogits);\n });\n}\n/**\n * From TensorFlow's implementation in nn_impl.py:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n * Hence, to ensure stability and avoid overflow, the implementation uses this\n * equivalent formulation\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n *\n * @param labels The labels.\n * @param logits The logits.\n */\nexport function sigmoidCrossEntropyWithLogits(labels, logits) {\n if (!util.arraysEqual(labels.shape, logits.shape)) {\n throw new ValueError(`logits and labels must have the same shape, but got shapes ` +\n `${JSON.stringify(labels.shape)} and ${JSON.stringify(logits.shape)}`);\n }\n return tidy(() => {\n // The logistic loss formula from above is\n // x - x * z + log(1 + exp(-x))\n // For x < 0, a more numerically stable formula is\n // -x * z + log(1 + exp(x))\n // Note that these two expressions can be combined into the following:\n // max(x, 0) - x * z + log(1 + exp(-abs(x)))\n const reluLogits = logits.relu();\n const negAbsLogits = logits.abs().neg();\n return reluLogits.sub(logits.mul(labels)).add(negAbsLogits.exp().log1p());\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return tidy(() => {\n let y;\n y = tfc.clipByValue(yPred, epsilon(), 1 - epsilon());\n y = tfc.log(tfc.div(y, tfc.sub(1, y)));\n return tfc.mean(sigmoidCrossEntropyWithLogits(yTrue, y), -1);\n });\n}\nexport function kullbackLeiblerDivergence(yTrue, yPred) {\n return tidy(() => {\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), 1);\n const clippedPred = tfc.clipByValue(yPred, epsilon(), 1);\n return tfc.sum(tfc.mul(yTrue, tfc.log(tfc.div(clippedTrue, clippedPred))), -1);\n });\n}\nexport function poisson(yTrue, yPred) {\n return tidy(() => {\n const logPred = tfc.log(tfc.add(epsilon(), yPred));\n return tfc.mean(tfc.sub(yPred, tfc.mul(yTrue, logPred)), -1);\n });\n}\nexport function cosineProximity(yTrue, yPred) {\n return tidy(() => {\n const trueNormalized = l2Normalize(yTrue, -1);\n const predNormalized = l2Normalize(yPred, -1);\n const trueXPred = tfc.mul(trueNormalized, predNormalized);\n return tfc.neg(tfc.sum(trueXPred, -1));\n });\n}\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const msle = meanSquaredLogarithmicError;\nexport const MSLE = meanSquaredLogarithmicError;\nexport const kld = kullbackLeiblerDivergence;\nexport const KLD = kullbackLeiblerDivergence;\nexport const cosine = cosineProximity;\n// TODO(michaelterry): Add deserialize() function.\nexport const lossesMap = {\n meanSquaredError,\n meanAbsoluteError,\n meanAbsolutePercentageError,\n meanSquaredLogarithmicError,\n squaredHinge,\n hinge,\n categoricalHinge,\n logcosh,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n binaryCrossentropy,\n kullbackLeiblerDivergence,\n poisson,\n cosineProximity\n};\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function get(identifierOrFn) {\n if (typeof identifierOrFn === 'string') {\n if (identifierOrFn in lossesMap) {\n return lossesMap[identifierOrFn];\n }\n let errMsg = `Unknown loss ${identifierOrFn}`;\n if (identifierOrFn.toLowerCase().includes('softmaxcrossentropy')) {\n errMsg = `Unknown loss ${identifierOrFn}. ` +\n 'Use \"categoricalCrossentropy\" as the string name for ' +\n 'tf.losses.softmaxCrossEntropy';\n }\n throw new ValueError(errMsg);\n }\n else {\n return identifierOrFn;\n }\n}\n//# sourceMappingURL=losses.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Built-in metrics.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { NotImplementedError, ValueError } from './errors';\nimport { categoricalCrossentropy as categoricalCrossentropyLoss, cosineProximity, meanAbsoluteError, meanAbsolutePercentageError, meanSquaredError, sparseCategoricalCrossentropy as sparseCategoricalCrossentropyLoss } from './losses';\nimport { binaryCrossentropy as lossBinaryCrossentropy } from './losses';\nimport { lossesMap } from './losses';\nimport * as util from './utils/generic_utils';\nexport function binaryAccuracy(yTrue, yPred) {\n return tidy(() => {\n const threshold = tfc.mul(.5, tfc.onesLike(yPred));\n const yPredThresholded = K.cast(tfc.greater(yPred, threshold), yTrue.dtype);\n return tfc.mean(tfc.equal(yTrue, yPredThresholded), -1);\n });\n}\nexport function categoricalAccuracy(yTrue, yPred) {\n return tidy(() => K.cast(tfc.equal(tfc.argMax(yTrue, -1), tfc.argMax(yPred, -1)), 'float32'));\n}\nfunction truePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(1)).sum().cast('float32');\n });\n}\nfunction falseNegatives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(0)).sum().cast('float32');\n });\n}\nfunction falsePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(0), yPred.equal(1)).sum().cast('float32');\n });\n}\nexport function precision(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fp = falsePositives(yTrue, yPred);\n const denominator = tp.add(fp);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function recall(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fn = falseNegatives(yTrue, yPred);\n const denominator = tp.add(fn);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return lossBinaryCrossentropy(yTrue, yPred);\n}\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n if (yTrue.rank === yPred.rank) {\n yTrue = yTrue.squeeze([yTrue.rank - 1]);\n }\n yPred = yPred.argMax(-1);\n if (yPred.dtype !== yTrue.dtype) {\n yPred = yPred.asType(yTrue.dtype);\n }\n return tfc.equal(yTrue, yPred).asType('float32');\n}\nexport function topKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\nexport function sparseTopKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\n// Aliases.\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const categoricalCrossentropy = categoricalCrossentropyLoss;\nexport const cosine = cosineProximity;\nexport const sparseCategoricalCrossentropy = sparseCategoricalCrossentropyLoss;\n// TODO(cais, nielsene): Add serialize().\nexport const metricsMap = {\n binaryAccuracy,\n categoricalAccuracy,\n precision,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n mse,\n MSE,\n mae,\n MAE,\n mape,\n MAPE,\n cosine\n};\nexport function get(identifier) {\n if (typeof identifier === 'string' && identifier in metricsMap) {\n return metricsMap[identifier];\n }\n else if (typeof identifier !== 'string' && identifier != null) {\n return identifier;\n }\n else {\n throw new ValueError(`Unknown metric ${identifier}`);\n }\n}\n/**\n * Get the shortcut function name.\n *\n * If the fn name is a string,\n * directly return the string name.\n * If the function is included in metricsMap or lossesMap,\n * return key of the map.\n * - If the function relative to multiple keys,\n * return the first found key as the function name.\n * - If the function exists in both lossesMap and metricsMap,\n * search lossesMap first.\n * If the function is not included in metricsMap or lossesMap,\n * return the function name.\n *\n * @param fn loss function, metric function, or short cut name.\n * @returns Loss or Metric name in string.\n */\nexport function getLossOrMetricName(fn) {\n util.assert(fn !== null, `Unknown LossOrMetricFn ${fn}`);\n if (typeof fn === 'string') {\n return fn;\n }\n else {\n let fnName;\n for (const key of Object.keys(lossesMap)) {\n if (lossesMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n for (const key of Object.keys(metricsMap)) {\n if (metricsMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n return fn.name;\n }\n}\n//# sourceMappingURL=metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Optimizers.\n */\nimport { train } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { ValueError } from './errors';\n// Add (de)serialize()\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function getOptimizer(identifier) {\n const optimizerMap = {\n 'Adagrad': () => train.adagrad(0.01),\n 'Adadelta': () => train.adadelta(1, 0.95, epsilon()),\n 'Adam': () => train.adam(0.001, 0.9, 0.999, epsilon()),\n 'Adamax': () => train.adamax(0.002, 0.9, 0.999, epsilon(), 0),\n 'RMSProp': () => train.rmsprop(0.001, 0.9, 0, epsilon()),\n 'SGD': () => train.sgd(0.01)\n };\n optimizerMap['adagrad'] = optimizerMap['Adagrad'];\n optimizerMap['adadelta'] = optimizerMap['Adadelta'];\n optimizerMap['adam'] = optimizerMap['Adam'];\n optimizerMap['adamax'] = optimizerMap['Adamax'];\n optimizerMap['rmsprop'] = optimizerMap['RMSProp'];\n optimizerMap['sgd'] = optimizerMap['SGD'];\n if (identifier in optimizerMap) {\n return optimizerMap[identifier]();\n }\n throw new ValueError(`Unknown Optimizer ${identifier}`);\n}\n//# sourceMappingURL=optimizers.js.map", "/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/** Utility functions related to user-defined metadata. */\n// Maximum recommended serialized size for user-defined metadata.\n// Beyond this limit, a warning message will be printed during model loading and\n// saving.\nexport const MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH = 1 * 1024 * 1024;\n/**\n * Check validity of user-defined metadata.\n *\n * @param userDefinedMetadata\n * @param modelName Name of the model that the user-defined metadata belongs to.\n * Used during construction of error messages.\n * @param checkSize Whether to check the size of the metadata is under\n * recommended limit. Default: `false`. If `true`, will try stringify the\n * JSON object and print a console warning if the serialzied size is above the\n * limit.\n * @throws Error if `userDefinedMetadata` is not a plain JSON object.\n */\nexport function checkUserDefinedMetadata(userDefinedMetadata, modelName, checkSize = false) {\n if (userDefinedMetadata == null ||\n typeof userDefinedMetadata !== 'object' ||\n Object.getPrototypeOf(userDefinedMetadata) !== Object.prototype ||\n !plainObjectCheck(userDefinedMetadata)) {\n throw new Error('User-defined metadata is expected to be a JSON object, but is not.');\n }\n if (checkSize) {\n const out = JSON.stringify(userDefinedMetadata);\n if (out.length > MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH) {\n console.warn(`User-defined metadata of model \"${modelName}\" is too large in ` +\n `size (length=${out.length} when serialized). It is not ` +\n `recommended to store such large objects in user-defined metadata. ` +\n `Please make sure its serialized length is <= ` +\n `${MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH}.`);\n }\n }\n}\n/**\n * Check if an input is plain JSON object or any valid subfield of it.\n *\n * @param x The input to be checked.\n * @param assertObject Whether to assert `x` is a JSON object, i.e., reject\n * cases of arrays and primitives.\n * @return Returns `true` if and only if `x` is a plain JSON object,\n * a JSON-valid primitive including string, number, boolean and null,\n * or an array of the said types.\n */\n// tslint:disable-next-line:no-any\nexport function plainObjectCheck(x) {\n if (x === null) {\n // Note: typeof `null` is 'object', and `null` is valid in JSON.\n return true;\n }\n else if (typeof x === 'object') {\n if (Object.getPrototypeOf(x) === Object.prototype) {\n // `x` is a JavaScript object and its prototype is Object.\n const keys = Object.keys(x);\n for (const key of keys) {\n if (typeof key !== 'string') {\n // JSON keys must be strings.\n return false;\n }\n if (!plainObjectCheck(x[key])) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object but its prototype is not Object.\n if (Array.isArray(x)) {\n // `x` is a JavaScript array.\n for (const item of x) {\n if (!plainObjectCheck(item)) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object and its prototype is not Object,\n // and it's not an Array. I.e., it's a complex object such as\n // `Error` and `Date`.\n return false;\n }\n }\n }\n else {\n // `x` is not a JavaScript object or `null`.\n const xType = typeof x;\n return xType === 'string' || xType === 'number' || xType === 'boolean';\n }\n}\n//# sourceMappingURL=user_defined_metadata.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { countParamsInWeights } from './variable_utils';\n/**\n * Print the summary of a LayersModel object.\n *\n * @param model tf.LayersModel instance.\n * @param lineLength Total length of printed lines. Set this to adapt to the\n * display to different terminal or console sizes.\n * @param positions Relative or absolute positions of log elements in each\n * line. Each number corresponds to right-most (i.e., ending) position of a\n * column.\n * If not provided, defaults to `[0.45, 0.85, 1]` for sequential-like\n * models and `[0.33, 0.55, 0.67, 1]` for non-sequential like models.\n * @param printFn Print function to use.\n * It will be called on each line of the summary. You can provide a custom\n * function in order to capture the string summary. Defaults to `console.log`.\n */\nexport function printSummary(model, lineLength, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n const sequentialLike = isModelSequentialLike(model);\n // Header names for different log elements.\n const toDisplay = ['Layer (type)', 'Output shape', 'Param #'];\n if (sequentialLike) {\n lineLength = lineLength || 65;\n positions = positions || [0.45, 0.85, 1];\n }\n else {\n lineLength = lineLength || 98;\n positions = positions || [0.33, 0.55, 0.67, 1];\n // Header names for different log elements.\n }\n if (positions[positions.length - 1] <= 1) {\n // `positions` is relative. Convert it to absolute positioning.\n positions = positions.map(p => Math.floor(lineLength * p));\n }\n let relevantNodes;\n if (!sequentialLike) {\n toDisplay.push('Receives inputs');\n relevantNodes = [];\n for (const depth in model.nodesByDepth) {\n relevantNodes.push(...model.nodesByDepth[depth]);\n }\n }\n printFn('_'.repeat(lineLength));\n printRow(toDisplay, positions, printFn);\n printFn('='.repeat(lineLength));\n const layers = model.layers;\n for (let i = 0; i < layers.length; ++i) {\n if (sequentialLike) {\n printLayerSummary(layers[i], positions, printFn);\n }\n else {\n printLayerSummaryWithConnections(layers[i], positions, relevantNodes, printFn);\n }\n printFn((i === layers.length - 1 ? '=' : '_').repeat(lineLength));\n }\n // tslint:disable-next-line:no-any\n model.checkTrainableWeightsConsistency();\n const trainableCount = countTrainableParams(model);\n const nonTrainableCount = countParamsInWeights(model.nonTrainableWeights);\n printFn(`Total params: ${trainableCount + nonTrainableCount}`);\n printFn(`Trainable params: ${trainableCount}`);\n printFn(`Non-trainable params: ${nonTrainableCount}`);\n printFn('_'.repeat(lineLength));\n}\nfunction countTrainableParams(model) {\n let trainableCount;\n // tslint:disable:no-any\n if (model.collectedTrainableWeights != null) {\n trainableCount =\n countParamsInWeights(model.collectedTrainableWeights);\n }\n else {\n trainableCount = countParamsInWeights(model.trainableWeights);\n }\n // tslint:enable:no-any\n return trainableCount;\n}\nfunction isModelSequentialLike(model) {\n let sequentialLike = true;\n const nodesByDepth = [];\n const nodes = [];\n for (const depth in model.nodesByDepth) {\n nodesByDepth.push(model.nodesByDepth[depth]);\n }\n for (const depthNodes of nodesByDepth) {\n if (depthNodes.length > 1 ||\n depthNodes.length === 1 && depthNodes[0].inboundLayers.length > 1) {\n sequentialLike = false;\n break;\n }\n nodes.push(...depthNodes);\n }\n if (sequentialLike) {\n // Search for shared layers.\n for (const layer of model.layers) {\n let flag = false;\n for (const node of layer.inboundNodes) {\n if (nodes.indexOf(node) !== -1) {\n if (flag) {\n sequentialLike = false;\n break;\n }\n else {\n flag = true;\n }\n }\n }\n if (!sequentialLike) {\n break;\n }\n }\n }\n return sequentialLike;\n}\nfunction printRow(fields, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n let line = '';\n for (let i = 0; i < fields.length; ++i) {\n if (i > 0) {\n line = line.slice(0, line.length - 1) + ' ';\n }\n line += fields[i];\n line = line.slice(0, positions[i]);\n line += ' '.repeat(positions[i] - line.length);\n }\n printFn(line);\n}\n/**\n * Prints a summary for a single Layer, without connectivity information.\n *\n * @param layer: Layer instance to print.\n */\nfunction printLayerSummary(layer, positions, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const name = layer.name;\n const className = layer.getClassName();\n const fields = [`${name} (${className})`, outputShape, layer.countParams().toString()];\n printRow(fields, positions, printFn);\n}\n/**\n * Prints a summary for a single Layer, with connectivity information.\n */\nfunction printLayerSummaryWithConnections(layer, positions, relevantNodes, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const connections = [];\n for (const node of layer.inboundNodes) {\n if (relevantNodes != null && relevantNodes.length > 0 &&\n relevantNodes.indexOf(node) === -1) {\n continue;\n }\n for (let i = 0; i < node.inboundLayers.length; ++i) {\n const inboundLayer = node.inboundLayers[i].name;\n const inboundLayerIndex = node.nodeIndices[i];\n const inboundTensorIndex = node.tensorIndices[i];\n connections.push(`${inboundLayer}[${inboundLayerIndex}][${inboundTensorIndex}]`);\n }\n }\n const name = layer.name;\n const className = layer.getClassName();\n const firstConnection = connections.length === 0 ? '' : connections[0];\n const fields = [\n `${name} (${className})`, outputShape, layer.countParams().toString(),\n firstConnection\n ];\n printRow(fields, positions, printFn);\n for (let i = 1; i < connections.length; ++i) {\n printRow(['', '', '', connections[i]], positions, printFn);\n }\n}\n//# sourceMappingURL=layer_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as generic_utils from '../utils/generic_utils';\n// tslint:enable\n/**\n * Test whether a value in an array is the name of a LayersModel or Layer.\n * @param key The key name that the value is found under. Note that the key\n * may not be at the level immediately above the value, if the value is in a\n * nested array.\n * @param index Index of the value in the Array that it is found in.\n * @param value The value object.\n * @returns A boolean indicating whether value is a name.\n */\nfunction isArrayItemInputOrOutputName(key, index, value) {\n return (key === 'inboundNodes' || key === 'outputLayers' ||\n key === 'inputLayers') &&\n index === 0 && typeof value === 'string';\n}\n/**\n * Convert a Pythonic config object to TypeScript config object.\n * @param pythonicConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertPythonicToTs(pythonicConfig, key) {\n if (pythonicConfig === null) {\n return null;\n }\n else if (typeof pythonicConfig === 'string') {\n return generic_utils.toCamelCase(pythonicConfig);\n }\n else if ((typeof pythonicConfig === 'number') ||\n (typeof pythonicConfig === 'boolean')) {\n return pythonicConfig;\n }\n else if (pythonicConfig instanceof Array) {\n const tsArray = [];\n const arrayLength = pythonicConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = pythonicConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n tsArray.push(item);\n }\n else {\n tsArray.push(convertPythonicToTs(item, key));\n }\n }\n return tsArray;\n }\n else {\n const tsDict = {};\n for (const pythonicKey of Object.keys(pythonicConfig)) {\n const pythonicValue = pythonicConfig[pythonicKey];\n if (pythonicKey === 'name' && typeof pythonicValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // camel-case conversion.\n tsDict[pythonicKey] = pythonicValue;\n }\n else {\n const tsKey = generic_utils.toCamelCase(pythonicKey);\n tsDict[tsKey] = convertPythonicToTs(pythonicValue, tsKey);\n }\n }\n return tsDict;\n }\n}\n/**\n * Convert a TypeScript config object to Python config object.\n * @param tsConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertTsToPythonic(tsConfig, key) {\n if (tsConfig === null || tsConfig === undefined) {\n return null;\n }\n else if (typeof tsConfig === 'string') {\n return generic_utils.toSnakeCase(tsConfig);\n }\n else if ((typeof tsConfig === 'number') || (typeof tsConfig === 'boolean')) {\n return tsConfig;\n }\n else if (tsConfig instanceof Array) {\n const pyArray = [];\n const arrayLength = tsConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = tsConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n pyArray.push(item);\n }\n else {\n pyArray.push(convertTsToPythonic(item, key));\n }\n }\n return pyArray;\n }\n else {\n const pyDict = {};\n for (const tsKey of Object.keys(tsConfig)) {\n const tsValue = tsConfig[tsKey];\n const pyKey = generic_utils.toSnakeCase(tsKey);\n if ((tsKey === 'name' || tsKey === 'className') &&\n typeof tsValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // snake-case conversion.\n pyDict[pyKey] = tsValue;\n }\n else {\n pyDict[pyKey] = convertTsToPythonic(tsValue, tsKey);\n }\n }\n return pyDict;\n }\n}\n//# sourceMappingURL=serialization_utils.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Executor: Evaluates SymbolicTensor based on feeds.\n */\nimport { cast, dispose, memory, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\nimport { toList } from '../utils/generic_utils';\nimport { InputLayer } from './input_layer';\nimport { SymbolicTensor } from './topology';\n/**\n * Helper function to check the dtype and shape compatibility of a feed value.\n */\nfunction assertFeedCompatibility(key, val) {\n // Check dtype compatibility.\n if (key.dtype == null || key.dtype === val.dtype) {\n // a. If types match, return val tensor as is.\n return val;\n }\n try {\n // b. Attempt to convert to expected type.\n return cast(val, key.dtype);\n }\n catch (err) {\n // c. If conversion fails, return helpful error.\n throw new ValueError(`The dtype of the feed (${val.dtype}) can not be cast to the dtype ` +\n `of the key '${key.name}' (${key.dtype}).`);\n }\n}\n/**\n * FeedDict: A mapping from unique SymbolicTensors to feed values for them.\n * A feed value is a concrete value represented as an `Tensor`.\n */\nexport class FeedDict {\n /**\n * Constructor, optionally does copy-construction.\n * @param feeds An Array of `Feed`s, or another `FeedDict`, in which case\n * copy-construction will be performed.\n */\n constructor(feeds) {\n this.id2Value = {};\n this.id2Mask = {};\n this.name2Id = {};\n if (feeds instanceof FeedDict) {\n for (const id in feeds.id2Value) {\n this.id2Value[id] = feeds.id2Value[id];\n if (id in feeds.id2Mask) {\n this.id2Mask[id] = feeds.id2Mask[id];\n }\n }\n }\n else {\n if (feeds == null) {\n return;\n }\n for (const feed of feeds) {\n this.add(feed.key, feed.value);\n }\n }\n }\n /**\n * Add a key-value pair to the FeedDict.\n *\n * @param key The key of the feed.\n * @param value The value of the tensor feed.\n * @param mask The value of the mask feed (optional).\n * @returns This `FeedDict`.\n * @throws ValueError: If the key `SymbolicTensor` already exists in the\n * `FeedDict`.\n */\n add(key, value, mask) {\n if (this.id2Value[key.id] == null) {\n this.id2Value[key.id] = assertFeedCompatibility(key, value);\n this.name2Id[key.name] = key.id;\n if (mask != null) {\n this.id2Mask[key.id] = mask;\n }\n }\n else {\n throw new ValueError(`Duplicate key: name=${key.name}, id=${key.id}`);\n }\n return this;\n }\n /**\n * Add a Feed to the FeedDict.\n * @param feed The new `Feed` to add.\n * @returns This `FeedDict`.\n */\n addFeed(feed) {\n this.add(feed.key, feed.value);\n }\n /**\n * Probe whether a key already exists in the FeedDict.\n * @param key\n */\n hasKey(key) {\n return this.id2Value[key.id] != null;\n }\n /**\n * Get all the SymbolicTensor available in this FeedDict.\n */\n names() {\n return Object.keys(this.name2Id);\n }\n /**\n * Get the feed value for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed value.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getValue(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Value[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Value[id];\n }\n }\n /**\n * Get the feed mask for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed mask.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getMask(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Mask[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Mask[id];\n }\n }\n /** Dispose all mask Tensors held by this object. */\n disposeMasks() {\n if (this.id2Mask != null) {\n dispose(this.id2Mask);\n }\n }\n}\n// Cache for topologically sorted SymbolicTensors for given execution\n// targets (i.e., fetches).\nconst cachedSorted = {};\n// Cache for recipient count maps for given execution targets (i.e., fetches).\nconst cachedRecipientCounts = {};\n/**\n * Execute a SymbolicTensor by using concrete feed values.\n *\n * A `SymbolicTensor` object is a node in a computation graph of TF.js\n * Layers. The object is backed by a source layer and input\n * `SymbolicTensor`s to the source layer. This method evaluates\n * the `call()` method of the source layer, using concrete values of the\n * inputs obtained from either\n * * `feedDict`, if the input key exists in `feedDict`, or else,\n * * a recursive call to `execute()` itself.\n *\n * @param x: The `SymbolicTensor` to execute.\n * @param feedDict: The feed values, as base condition of the recursion.\n * execution.\n * @param kwargs: Optional keyword arguments.\n * @param probe: A probe object (of interface `ExecutionProbe`) used for\n * testing memory footprint of `execute` calls.\n * @returns Result of the execution.\n * @throws ValueError: If any `SymbolicTensor`s from `InputLayer`s\n * encountered during the execution lacks a feed value in `feedDict`.\n */\nexport function execute(fetches, feedDict, kwargs, probe) {\n const training = kwargs == null ? false : kwargs['training'];\n const arrayFetches = Array.isArray(fetches);\n const fetchArray = arrayFetches ? fetches : [fetches];\n const outputNames = fetchArray.map(t => t.name);\n const finalOutputs = [];\n const feedNames = feedDict.names();\n for (const outputName of outputNames) {\n if (feedNames.indexOf(outputName) !== -1) {\n finalOutputs.push(feedDict.getValue(outputName));\n }\n else {\n finalOutputs.push(null);\n }\n }\n if (probe != null) {\n // For optional probing of memory footprint during execution.\n probe.maxNumTensors = -Infinity;\n probe.minNumTensors = Infinity;\n }\n // Check cache.\n const fetchAndFeedKey = outputNames.join(',') + '|' + feedDict.names().join(',');\n let sorted;\n let recipientCounts;\n if (cachedSorted[fetchAndFeedKey] == null) {\n // Cache doesn't contain the desired combination of fetches. Compute\n // topological sort for the combination for the first time.\n const out = getTopologicalSortAndRecipientCounts(fetchArray, feedDict);\n sorted = out.sorted;\n recipientCounts = out.recipientCounts;\n // Store results in cache for future use.\n cachedSorted[fetchAndFeedKey] = sorted;\n cachedRecipientCounts[fetchAndFeedKey] = recipientCounts;\n }\n sorted = cachedSorted[fetchAndFeedKey];\n recipientCounts = {};\n if (!training) {\n Object.assign(recipientCounts, cachedRecipientCounts[fetchAndFeedKey]);\n }\n const internalFeedDict = new FeedDict(feedDict);\n // Start iterative execution on the topologically-sorted SymbolicTensors.\n for (let i = 0; i < sorted.length; ++i) {\n if (probe != null) {\n // For optional probing of memory usage during execution.\n const numTensors = memory().numTensors;\n if (numTensors > probe.maxNumTensors) {\n probe.maxNumTensors = numTensors;\n }\n if (numTensors < probe.minNumTensors) {\n probe.minNumTensors = numTensors;\n }\n }\n const symbolic = sorted[i];\n const srcLayer = symbolic.sourceLayer;\n if (srcLayer instanceof InputLayer) {\n continue;\n }\n const inputValues = [];\n const inputMasks = [];\n const tensorsToDispose = [];\n let maskExists = false;\n for (const input of symbolic.inputs) {\n const value = internalFeedDict.getValue(input);\n const mask = internalFeedDict.getMask(input);\n inputValues.push(value);\n inputMasks.push(mask);\n if (mask != null) {\n maskExists = true;\n }\n if (!training) {\n recipientCounts[input.name]--;\n if (recipientCounts[input.name] === 0 && !feedDict.hasKey(input) &&\n outputNames.indexOf(input.name) === -1 && !value.isDisposed &&\n input.sourceLayer.stateful !== true) {\n tensorsToDispose.push(value);\n }\n }\n }\n if (maskExists) {\n kwargs = kwargs || {};\n kwargs['mask'] = inputMasks[0];\n }\n const outputTensors = toList(srcLayer.apply(inputValues, kwargs));\n let outputMask = null;\n if (srcLayer.supportsMasking) {\n outputMask = srcLayer.computeMask(inputValues, inputMasks);\n }\n const layerOutputs = getNodeOutputs(symbolic);\n const outputSymbolicTensors = Array.isArray(layerOutputs) ? layerOutputs : [layerOutputs];\n for (let i = 0; i < outputSymbolicTensors.length; ++i) {\n if (!internalFeedDict.hasKey(outputSymbolicTensors[i])) {\n internalFeedDict.add(outputSymbolicTensors[i], outputTensors[i], Array.isArray(outputMask) ? outputMask[0] : outputMask);\n }\n const index = outputNames.indexOf(outputSymbolicTensors[i].name);\n if (index !== -1) {\n finalOutputs[index] = outputTensors[i];\n }\n }\n if (!training) {\n // Clean up Tensors that are no longer needed.\n dispose(tensorsToDispose);\n }\n }\n // NOTE(cais): Unlike intermediate tensors, we don't discard mask\n // tensors as we go, because these tensors are sometimes passed over a\n // series of mutliple layers, i.e., not obeying the immediate input\n // relations in the graph. If this becomes a memory-usage concern,\n // we can improve this in the future.\n internalFeedDict.disposeMasks();\n return arrayFetches ? finalOutputs : finalOutputs[0];\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for an array of fetches.\n *\n * This function calls getTopologicalSortAndRecipientCountsForOneFetch and\n * merges their results.\n *\n * @param fetch The array of fetches requested. Must be a non-empty array.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientCounts: Recipient counts for all SymbolicTensors in `sorted`.\n */\nfunction getTopologicalSortAndRecipientCounts(fetches, feedDict) {\n util.assert(fetches != null && fetches.length > 0, () => `Expected at least one fetch, got none`);\n let finalSorted = [];\n let finalRecipientMap = {};\n if (fetches.length === 1) {\n // Special-casing 1 fetch for efficiency.\n const out = getTopologicalSortAndRecipientCountsForOneFetch(fetches[0], feedDict);\n finalSorted = out.sorted;\n finalRecipientMap = out.recipientMap;\n }\n else {\n const visited = new Set();\n for (const fetch of fetches) {\n const { sorted, recipientMap } = getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict);\n // Merge sorted SymbolicTensor Arrays.\n for (const symbolicTensor of sorted) {\n if (!visited.has(symbolicTensor.name)) {\n finalSorted.push(symbolicTensor);\n visited.add(symbolicTensor.name);\n }\n }\n // Merge recipient maps.\n for (const name in recipientMap) {\n if (finalRecipientMap[name] == null) {\n finalRecipientMap[name] = new Set();\n }\n recipientMap[name].forEach(recipient => finalRecipientMap[name].add(recipient));\n }\n }\n }\n return {\n sorted: finalSorted,\n recipientCounts: recipientMap2Counts(finalRecipientMap)\n };\n}\nfunction recipientMap2Counts(recipientMap) {\n const recipientCounts = {};\n for (const name in recipientMap) {\n recipientCounts[name] = recipientMap[name].size;\n }\n return recipientCounts;\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for a single fetch.\n *\n * This helper function processes the upstream SymbolicTensors of a single\n * fetch.\n *\n * @param fetch The single fetch requested.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientMap: Recipient names for all SymbolicTensors in `sorted`.\n */\nexport function getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict) {\n const visited = new Set();\n const sorted = [];\n const recipientMap = {};\n // Put keys of the feedDict into visited first, so they don't have to be\n // walked. This is needed in case where there are feeds for intermediate\n // SymbolicTensors of the graph.\n for (const key of feedDict.names()) {\n visited.add(key);\n }\n const stack = [];\n const marks = [];\n // Initial population of stack and marks.\n stack.push(fetch);\n while (stack.length > 0) {\n const top = stack[stack.length - 1];\n if (visited.has(top.name)) {\n stack.pop();\n continue;\n }\n const topIsMarked = marks[marks.length - 1] === stack.length - 1;\n if (top.inputs.length === 0 || topIsMarked) {\n // Input SymbolicTensor or all children have been visited.\n stack.pop();\n sorted.push(top);\n visited.add(top.name);\n if (topIsMarked) {\n marks.pop();\n }\n }\n else {\n // A non-input SymbolicTensor whose upstream SymbolicTensors haven't\n // been visited yet. Push them onto the stack.\n marks.push(stack.length - 1);\n for (const input of top.inputs) {\n // Increment the recipient count. Note that this needs to happen\n // regardless of whether the SymbolicTensor has been visited before.\n if (recipientMap[input.name] == null) {\n recipientMap[input.name] = new Set();\n }\n recipientMap[input.name].add(top.name);\n if (visited.has(input.name)) {\n continue; // Avoid repeated visits to the same SymbolicTensor.\n }\n stack.push(input);\n }\n }\n }\n return { sorted, recipientMap };\n}\n/**\n * Get the symbolic output tensors of the node to which a given fetch belongs.\n * @param fetch The fetched symbolic tensor.\n * @returns The Array of symbolic tensors output by the node to which `fetch`\n * belongs.\n */\nfunction getNodeOutputs(fetch) {\n let layerOutputs;\n if (fetch.sourceLayer.inboundNodes.length === 1) {\n layerOutputs = fetch.sourceLayer.output;\n }\n else {\n let nodeIndex = null;\n for (let i = 0; i < fetch.sourceLayer.inboundNodes.length; ++i) {\n for (const outputTensor of fetch.sourceLayer.inboundNodes[i]\n .outputTensors) {\n if (outputTensor.id === fetch.id) {\n nodeIndex = i;\n break;\n }\n }\n }\n layerOutputs = fetch.sourceLayer.getOutputAt(nodeIndex);\n }\n return layerOutputs;\n}\n//# sourceMappingURL=executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { tidy } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize as deserializeLayer } from '../layers/serialization';\nimport * as generic_utils from '../utils/generic_utils';\nimport { convertTsToPythonic } from '../utils/serialization_utils';\nimport * as types_utils from '../utils/types_utils';\nimport { batchSetValue } from '../variables';\nimport { version as layersVersion } from '../version';\nimport { execute, FeedDict } from './executor';\nimport { InputLayer } from './input_layer';\nimport { Layer, Node } from './topology';\n/**\n * A Container is a directed acyclic graph of layers.\n *\n * It is the topological form of a \"model\". A LayersModel\n * is simply a Container with added training routines.\n *\n */\nexport class Container extends Layer {\n constructor(args) {\n // No args passed to super's constructor.\n super({});\n this.containerNodes = new Set();\n this.name = args.name;\n if (this.name == null) {\n const prefix = this.getClassName().toLowerCase();\n this.name = getUid(prefix);\n }\n this.supportsMasking = false;\n this.trainable_ = true;\n // TODO(michaelterry): Initialize perInputLosses/Updates here.\n // Container-specific properties.\n if (Array.isArray(args.inputs)) {\n this.inputs = args.inputs.slice();\n }\n else {\n this.inputs = [args.inputs];\n }\n if (Array.isArray(args.outputs)) {\n this.outputs = args.outputs.slice();\n }\n else {\n this.outputs = [args.outputs];\n }\n // Check for redundancy in inputs.\n if (generic_utils.unique(this.inputs).length !== this.inputs.length) {\n throw new ValueError('The list of inputs passed to the model is ' +\n 'redundant. All inputs should only appear once. Found: ' +\n `${this.inputs.map(x => x.name)}`);\n }\n // Check for redundancy in outputs.\n if (generic_utils.unique(this.outputs).length !== this.outputs.length) {\n console.warn('The list of outputs passed to the model is redundant. ' +\n 'All outputs should only appear once. Found: ' +\n `${this.outputs.map(x => x.name)}`);\n }\n /*\n List of initial layers (1 to 1 mapping with this.inputs, hence the same\n layer might appear twice)\n */\n this.inputLayers = [];\n this.inputLayersNodeIndices = [];\n this.inputLayersTensorIndices = [];\n /*\n List of layers (1 to 1 mapping with this.outputs, hence the same layer\n might appear twice)\n */\n this.outputLayers = [];\n this.outputLayersNodeIndices = [];\n this.outputLayersTensorIndices = [];\n /*\n All layers in order of horizontal graph traversal. Entries are unique.\n Includes input and output layers.\n */\n this.layers = [];\n /*\n References to container layers that were constructed internally. We need\n these to properly dispose of tensors from nested containers.\n */\n this.internalContainerRefs = [];\n // TODO(michaelterry): Determine if caching still needed with eager\n // backend.\n /*\n This is for performance optimization when calling the Container on new\n inputs. Every time the Container is called on a set on input tensors,\n we compute the output tensors, output masks and output shapes in one pass,\n then cache them here. When one of these outputs is queried later,\n we retrieve it from there instead of recomputing it.\n */\n // this.outputTensorCache = {};\n // this.outputShapeCache = {};\n // Build this.outputLayers:\n for (const x of this.outputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n this.outputLayers.push(layer);\n this.outputLayersNodeIndices.push(nodeIndex);\n this.outputLayersTensorIndices.push(tensorIndex);\n }\n // TODO(michaelterry): Add output mask cache code.\n // Build this.inputLayers:\n for (const x of this.inputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n /*\n It's supposed to be an input layer, so only one node\n and one tensor output.\n */\n generic_utils.assert(nodeIndex === 0, 'input layer has >1 nodes');\n generic_utils.assert(tensorIndex === 0, 'input layer has >1 tensors');\n this.inputLayers.push(layer);\n this.inputLayersNodeIndices.push(nodeIndex);\n this.inputLayersTensorIndices.push(tensorIndex);\n }\n // Build this.inputNames and this.outputNames.\n this.inputNames = [];\n this.outputNames = [];\n this.feedInputShapes = [];\n this.feedInputNames = [];\n this.feedOutputNames = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n // Check that layer is an InputLayer.\n if (!(layer instanceof InputLayer)) {\n throw new TypeError('Input layers to a LayersModel must be InputLayer objects. ' +\n `Received inputs: ${args.inputs}. ` +\n `Input ${i} (0-based) originates ` +\n `from layer type ${layer.getClassName()}.`);\n }\n this.inputNames.push(layer.name);\n this.feedInputShapes.push(layer.batchInputShape);\n this.feedInputNames.push(layer.name);\n }\n for (const layer of this.outputLayers) {\n this.outputNames.push(layer.name);\n }\n this.internalInputShapes = this.inputs.map(x => x.shape);\n this.internalOutputShapes = this.outputs.map(x => x.shape);\n /*\n Container_nodes: set of nodes included in the graph (not all nodes\n included in the layers are relevant to the current graph).\n */\n // ids of all nodes relevant to the Container:\n const nodesDepths = {};\n // To recover nodes from their ID.\n const nodeIDToNode = {};\n const layersDepths = {};\n // To layers from their ID.\n const layerIDToLayer = {};\n const layerIndices = {};\n const nodesInDecreasingDepth = [];\n /**\n * Builds a map of the graph of layers.\n *\n * This recursively updates the map `layerIndices`,\n * the list `nodesInDecreasingDepth` and the set `containerNodes`.\n *\n * @param tensor Some tensor in a graph.\n * @param finishedNodes Set of nodes whose subgraphs have been traversed\n * completely. Useful to prevent duplicated work.\n * @param nodesInProgress Set of nodes that are currently active on the\n * recursion stack. Useful to detect cycles.\n * @param layer Layer from which `tensor` comes from. If not provided,\n * will be obtained from tensor.sourceLayer.\n * @param nodeIndex Node index from which `tensor` comes from.\n * @param tensorIndex TensorIndex from which `tensor` comes from.\n *\n * @exception RuntimeError if a cycle is detected.\n */\n const buildMapOfGraph = (tensor, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex) => {\n if (layer == null || nodeIndex == null || tensorIndex == null) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n tensorIndex = tensor.tensorIndex;\n }\n const node = layer.inboundNodes[nodeIndex];\n // Prevent cycles.\n if (nodesInProgress.indexOf(node) !== -1) {\n throw new RuntimeError(`The tensor ${tensor.name} at layer \"${layer.name}\" ` +\n 'is part of a cycle.');\n }\n // Don't repeat work for shared subgraphs\n if (finishedNodes.indexOf(node) !== -1) {\n return;\n }\n // Update containerNodes.\n this.containerNodes.add(Container.nodeKey(layer, nodeIndex));\n // Store the traversal order for layer sorting.\n if (!(layer.id in layerIndices)) {\n layerIndices[layer.id] = Object.keys(layerIndices).length;\n }\n if (nodesInProgress.indexOf(node) === -1) {\n nodesInProgress.push(node);\n }\n // Propagate to all previous tensors connected to this node.\n const numInboundLayers = node.inboundLayers.length;\n for (let i = 0; i < numInboundLayers; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n buildMapOfGraph(x, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex);\n }\n finishedNodes.push(node);\n while (nodesInProgress.indexOf(node) >= 0) {\n nodesInProgress.splice(nodesInProgress.indexOf(node), 1);\n }\n nodesInDecreasingDepth.push(node);\n };\n const finishedNodes = [];\n const nodesInProgress = [];\n for (const x of this.outputs) {\n buildMapOfGraph(x, finishedNodes, nodesInProgress);\n }\n const reversedNodesInDecreasingDepth = nodesInDecreasingDepth.slice().reverse();\n for (const node of reversedNodesInDecreasingDepth) {\n nodeIDToNode[node.id] = node;\n // If the depth is not set, the node has no outbound nodes (depth 0).\n if (!(node.id in nodesDepths)) {\n nodesDepths[node.id] = 0;\n }\n let depth = nodesDepths[node.id];\n // Update the depth of the corresponding layer\n const previousDepth = (layersDepths[node.outboundLayer.id] == null ?\n 0 :\n layersDepths[node.outboundLayer.id]);\n /*\n If we've seen this layer before at a higher depth, we should use that\n depth instead of the node depth. This is necessary for shared layers\n that have inputs at different depth levels in the graph.\n */\n depth = Math.max(depth, previousDepth);\n layersDepths[node.outboundLayer.id] = depth;\n layerIDToLayer[node.outboundLayer.id] = node.outboundLayer;\n nodesDepths[node.id] = depth;\n // Update the depth of inbound nodes.\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const inboundNode = inboundLayer.inboundNodes[nodeIndex];\n const previousDepth = (nodesDepths[inboundNode.id] == null ? 0 :\n nodesDepths[inboundNode.id]);\n nodesDepths[inboundNode.id] = Math.max(depth + 1, previousDepth);\n nodeIDToNode[inboundNode.id] = inboundNode;\n }\n }\n // Build a dict {depth: list of nodes with this depth}\n const nodesByDepth = {};\n for (const nodeID in nodesDepths) {\n const depth = nodesDepths[nodeID];\n if (!(depth in nodesByDepth)) {\n nodesByDepth[depth] = [];\n }\n nodesByDepth[depth].push(nodeIDToNode[nodeID]);\n }\n // Build a dict {depth: list of layers with this depth}\n const layersByDepth = {};\n for (const layerID in layersDepths) {\n const depth = layersDepths[layerID];\n if (!(depth in layersByDepth)) {\n layersByDepth[depth] = [];\n }\n layersByDepth[depth].push(layerIDToLayer[layerID]);\n }\n // Get sorted list of layer depths.\n let depthKeys = Object.keys(layersByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Set this.layers and this.layersByDepth.\n this.layers = [];\n for (const depth of depthKeys) {\n const layersForDepth = layersByDepth[depth];\n // Container.layers needs to have a deterministic order:\n // here we order them by traversal order.\n layersForDepth.sort((a, b) => {\n const aIndex = layerIndices[a.id];\n const bIndex = layerIndices[b.id];\n if (aIndex < bIndex) {\n return -1;\n }\n if (aIndex > bIndex) {\n return 1;\n }\n return 0;\n });\n for (const layer of layersForDepth) {\n if (layer instanceof Container) {\n this.internalContainerRefs.push(layer);\n }\n this.layers.push(layer);\n }\n }\n this.layersByDepth = layersByDepth;\n // Get sorted list of node depths;\n depthKeys = Object.keys(nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Check that all tensors required are computable.\n // computable_tensors: all tensors in the graph\n // that can be computed from the inputs provided.\n const computableTensors = this.inputs.slice();\n // To provide a better error msg.\n const layersWithCompleteInput = [];\n for (const depth of depthKeys) {\n for (const node of nodesByDepth[depth]) {\n const layer = node.outboundLayer;\n if (layer != null) {\n for (const x of node.inputTensors) {\n if (computableTensors.indexOf(x) === -1) {\n throw new RuntimeError(`Graph disconnected: cannot obtain value for tensor ${x}` +\n ` at layer \"${layer.name}\". ` +\n 'The following previous layers were accessed without ' +\n `issue: ${layersWithCompleteInput}`);\n }\n }\n for (const x of node.outputTensors) {\n computableTensors.push(x);\n }\n layersWithCompleteInput.push(layer.name);\n }\n }\n }\n // Set this.containerNodes and this.nodesByDepth.\n this.nodesByDepth = nodesByDepth;\n // Ensure name unicity, which will be crucial for serialization\n // (since serialized nodes refer to layers by their name).\n const allNames = this.layers.map(x => x.name);\n for (const name of allNames) {\n const numOccurrences = allNames.filter(x => x === name).length;\n if (numOccurrences !== 1) {\n throw new RuntimeError(`The name \"${name}\" is used ${numOccurrences} times ` +\n 'in the model. All layer names should be unique. Layer names: ' +\n JSON.stringify(allNames));\n }\n }\n // Layer parameters.\n // The new container starts with a single inbound node\n // for its inputs, and no outbound nodes.\n // Will be appended to by future calls to apply().\n this.outboundNodes = [];\n // Will be appended to below, and by future calls to apply().\n this.inboundNodes = [];\n // Create the node linking internal inputs to internal outputs.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n inputMasks: this.inputs.map(x => null),\n outputMasks: this.outputs.map(x => null),\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs.map(x => x.shape)\n });\n this.built = true;\n this._refCount = 1; // The ref count of a container always start at 1.\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Container '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose a LayersModel's weights.\n *\n * This method decrease the reference count of the LayersModel object by 1.\n *\n * A LayersModel is reference-counted. Its reference count is incremented by 1\n * when it is first constructed and when it is used as a Layer of another\n * LayersModel.\n *\n * If the reference count of a LayersModel becomes 0, the `dispose` method of\n * all its constituent `Layer`s will be called.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * `dispose` method of its constituent `Layer`s will *not* be called.\n *\n * After a LayersModel is disposed, it cannot be used in calls such as\n * 'predict`, `evaluate` or `fit` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the LayersModel after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the LayersModel has\n * already been disposed.\n */\n dispose() {\n this.assertNotDisposed();\n const result = { refCountAfterDispose: null, numDisposedVariables: 0 };\n if (--this._refCount === 0) {\n for (const layer of this.layers) {\n result.numDisposedVariables += layer.dispose().numDisposedVariables;\n }\n // Call dispose on each internally created container layer again to ensure\n // their refCounts hit zero and their tensors are subsequently deleted.\n for (const container of this.internalContainerRefs) {\n result.numDisposedVariables += container.dispose().numDisposedVariables;\n }\n }\n result.refCountAfterDispose = this._refCount;\n return result;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.layers.forEach(layer => {\n // tslint:disable-next-line:no-any\n layer._trainableWeights\n .forEach(w => w.trainable = trainable);\n });\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n // Porting Note: This check below is to prevent errors where the\n // _trainableWeights inherited from the parent class (Layer) gets\n // inadvertently used.\n if (this._trainableWeights.length > 0) {\n throw new ValueError('Container instance unexpectedly contains _trainableWeights.' +\n 'The trainable weights of a Container are a union of the ' +\n 'trainable weights of its consituent Layers. Its own ' +\n '_trainableWeights must remain an empty Array.');\n }\n if (!this.trainable) {\n return [];\n }\n let weights = [];\n for (const layer of this.layers) {\n weights = weights.concat(layer.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const layer of this.layers) {\n weights.push(...layer.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const layer of this.layers) {\n trainableWeights.push(...layer.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n /**\n * Loads all layer weights from a JSON object.\n *\n * Porting Note: HDF5 weight files cannot be directly loaded in JavaScript /\n * TypeScript. The utility script at `scripts/pykeras.py` offers means\n * to convert them into JSON strings compatible with this method.\n * Porting Note: TensorFlow.js Layers supports only loading by name currently.\n *\n * @param weights A JSON mapping weight names to weight values as nested\n * arrays of numbers, or a `NamedTensorMap`, i.e., a JSON mapping weight\n * names to `tf.Tensor` objects.\n * @param strict Require that the provided weights exactly match those\n * required by the container. Default: `true`. Passing `false` means that\n * extra weights and missing weights will be silently ignored.\n */\n loadWeights(weights, strict = true) {\n const nameToWeight = {};\n let totalWeightsCount = 0;\n for (const layer of this.layers) {\n for (const weight of layer.weights) {\n if (nameToWeight[weight.originalName] != null) {\n throw new ValueError(`Duplicate weight name: ${weight.originalName}`);\n }\n nameToWeight[weight.originalName] = weight;\n totalWeightsCount++;\n }\n }\n const weightValueTuples = [];\n for (const name in weights) {\n // TF 2.2.0 added cell name to the weight name in the format of\n // layer_name/cell_name/weight_name, we need to remove\n // the inner cell name.\n let validatedName = name;\n if (nameToWeight[name] == null) {\n const tokens = name.split('/');\n const shortenNameArray = tokens.slice(0, -2).concat([tokens[tokens.length - 1]]);\n validatedName = shortenNameArray.join('/');\n }\n if (nameToWeight[validatedName] != null) {\n weightValueTuples.push([nameToWeight[validatedName], weights[name]]);\n }\n else if (strict) {\n throw new ValueError(`Provided weight data has no target variable: ${name}`);\n }\n delete nameToWeight[validatedName];\n }\n if (strict) {\n // Check that all weights are set.\n const unsetNames = [];\n for (const name in nameToWeight) {\n unsetNames.push(name);\n }\n if (unsetNames.length > 0) {\n throw new ValueError(`${unsetNames.length} of ${totalWeightsCount} weights are not set: ` +\n `${unsetNames}`);\n }\n }\n batchSetValue(weightValueTuples);\n }\n /**\n * Util shared between different serialization methods.\n * @returns LayersModel config with Keras version information added.\n */\n updatedConfig() {\n const theConfig = this.getConfig();\n const modelConfig = {};\n modelConfig['className'] = this.getClassName();\n modelConfig['config'] = theConfig;\n modelConfig['kerasVersion'] = `tfjs-layers ${layersVersion}`;\n // TODO(nielsene): Replace something like K.backend() once\n // possible.\n modelConfig['backend'] = 'TensorFlow.js';\n return modelConfig;\n }\n /**\n * Returns a JSON string containing the network configuration.\n *\n * To load a network from a JSON save file, use\n * models.modelFromJSON(jsonString);\n * @param extraJsonArgs Unused in tfjs-layers, maintained for PyKeras\n * @param returnString Whether the return value should be stringified\n * (default: `true`).\n * @returns a JSON string if `returnString` (default), or a JSON object if\n * `!returnString`.\n */\n // tslint:disable-next-line:no-any\n toJSON(unused, returnString = true) {\n const modelConfig = convertTsToPythonic(this.updatedConfig());\n return returnString ? JSON.stringify(modelConfig) : modelConfig;\n }\n /**\n * Call the model on new inputs.\n *\n * In this case `call` just reapplies all ops in the graph to the new inputs\n * (e.g. build a new computational graph from the provided inputs).\n *\n * @param inputs A tensor or list of tensors.\n * @param mask A mask or list of masks. A mask can be either a tensor or null\n * (no mask).\n *\n * @return A tensor if there is a single output, or a list of tensors if there\n * are more than one outputs.\n */\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n const feedDict = new FeedDict();\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n return execute(this.outputs, feedDict, kwargs);\n });\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n let masks;\n if (mask == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n else {\n masks = generic_utils.toList(mask);\n }\n // TODO(michaelterry): Add support for mask caching.\n return this.runInternalGraph(inputs, masks)[1];\n });\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n */\n computeOutputShape(inputShape) {\n const inputShapes = types_utils.normalizeShapeList(inputShape);\n if (inputShapes.length !== this.inputLayers.length) {\n throw new ValueError(`Invalid inputShape argument ${inputShape}: ` +\n `model has ${this.inputLayers.length} tensor inputs.`);\n }\n // TODO(michaelterry): Add caching\n const layersToOutputShapes = {};\n for (let i = 0; i < inputShapes.length; i++) {\n const layer = this.inputLayers[i];\n const inputShape = inputShapes[i];\n // It's an input layer: computeOutputShape is identity,\n // and there is only one node and one tensor output.\n const shapeKey = layer.name + '_0_0';\n layersToOutputShapes[shapeKey] = inputShape;\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Iterate over nodes, by depth level.\n if (depthKeys.length > 1) {\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n if (this.inputLayers.map(x => x.id).indexOf(layer.id) !== -1) {\n // We've already covered the input layers a few lines above.\n continue;\n }\n // Potentially redundant list, same size of node.inputTensors.\n const inputShapes = [];\n for (let j = 0; j < node.inboundLayers.length; j++) {\n const inboundLayer = node.inboundLayers[j];\n const nodeIndex = node.nodeIndices[j];\n const tensorIndex = node.tensorIndices[j];\n const shapeKey = `${inboundLayer.name}_${nodeIndex}_${tensorIndex}`;\n const inputShape = layersToOutputShapes[shapeKey];\n inputShapes.push(inputShape);\n }\n const outputShape = layer.computeOutputShape(generic_utils.singletonOrArray(inputShapes));\n const outputShapes = types_utils.normalizeShapeList(outputShape);\n const nodeIndex = layer.inboundNodes.indexOf(node);\n for (let j = 0; j < outputShapes.length; j++) {\n const shapeKey = `${layer.name}_${nodeIndex}_${j}`;\n layersToOutputShapes[shapeKey] = outputShapes[j];\n }\n }\n }\n }\n // Read final output shapes from layersToOutputShapes.\n const outputShapes = [];\n const outputShapeKeys = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const tensorIndex = this.outputLayersTensorIndices[i];\n const shapeKey = `${layer.name}_${nodeIndex}_${tensorIndex}`;\n outputShapeKeys.push(shapeKey);\n }\n for (let i = 0; i < outputShapeKeys.length; i++) {\n const key = outputShapeKeys[i];\n generic_utils.assert(key in layersToOutputShapes);\n outputShapes.push(layersToOutputShapes[key]);\n }\n // TODO(michaelterry): Update cache\n return generic_utils.singletonOrArray(outputShapes);\n }\n /**\n * Computes output tensors for new inputs.\n *\n * Note:\n * - Expects `inputs` to be a list (potentially with 1 element).\n *\n * @param inputs List of tensors\n * @param masks List of masks (tensors or null).\n * @return Three lists: outputTensors, outputMasks, outputShapes\n */\n runInternalGraph(inputs, masks) {\n if (masks == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n // Dictionary mapping reference tensors to tuples\n // (computed tensor, compute mask)\n // we assume a 1:1 mapping from tensor to mask\n // TODO: raise exception when a `.computeMask()` call\n // does not return a list the same size as `call`\n const tensorMap = {};\n for (let i = 0; i < this.inputs.length; ++i) {\n const x = this.inputs[i];\n const y = inputs[i];\n const mask = masks[i];\n tensorMap[x.id] = [y, mask];\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n const referenceInputTensors = node.inputTensors;\n const referenceOutputTensors = node.outputTensors;\n // If all previous input tensors are available in tensorMap,\n // then call node.inboundLayer on them.\n // List of tuples [input, mask]:\n const computedData = new Array();\n for (const x of referenceInputTensors) {\n if (x.id in tensorMap) {\n computedData.push(tensorMap[x.id]);\n }\n }\n if (computedData.length === referenceInputTensors.length) {\n // TODO(michaelterry): Add K.name_scope here, if we need it.\n let kwargs = {};\n let computedTensors;\n let computedMasks;\n let outputTensors;\n let outputMasks;\n // call layer\n if (node.callArgs != null) {\n kwargs = node.callArgs;\n }\n if (computedData.length === 1) {\n const [computedTensor, computedMask] = computedData[0];\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMask;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensor, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensor, computedMask));\n computedTensors = [computedTensor];\n computedMasks = [computedMask];\n }\n else {\n computedTensors = computedData.map(x => x[0]);\n computedMasks = computedData.map(x => x[1]);\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMasks;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensors, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensors, computedMasks));\n }\n if (layer.activityRegularizer) {\n throw new NotImplementedError('LayersModel invocation with concrete Tensor value(s) in the ' +\n 'presence of activity regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Add model updates and losses\n // Update tensor map.\n for (let i = 0; i < referenceOutputTensors.length; ++i) {\n const x = referenceOutputTensors[i];\n const y = outputTensors[i];\n const mask = outputMasks[i];\n tensorMap[x.id] = [y, mask];\n }\n }\n }\n }\n const outputTensors = [];\n const outputMasks = [];\n const outputShapes = [];\n for (const x of this.outputs) {\n generic_utils.assert(x.id in tensorMap, `Could not compute output ${x.name} : ${x.id}`);\n const [tensor, mask] = tensorMap[x.id];\n outputShapes.push(tensor.shape);\n outputTensors.push(tensor);\n outputMasks.push(mask);\n }\n // TODO(michaelterry): Add support for caches.\n return [outputTensors, outputMasks, outputShapes];\n }\n /**\n * Builds a map of internal node keys to node ordering.\n * Used in serializaion a node orderings may change as unused nodes are\n * dropped. Porting Note: This helper method was pulled out of getConfig to\n * improve readability.\n * @param layers An array of Layers in the model.\n * @returns Map of Node Keys to index order within the layer.\n */\n buildNodeConversionMap(layers) {\n const nodeConversionMap = {};\n let keptNodes;\n for (const layer of this.layers) {\n keptNodes = layer instanceof Container ? 1 : 0;\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n // i.e. we mark it to be saved\n nodeConversionMap[nodeKey] = keptNodes;\n keptNodes += 1;\n }\n }\n }\n return nodeConversionMap;\n }\n /**\n * Retrieves a layer based on either its name (unique) or index.\n *\n * Indices are based on order of horizontal graph traversal (bottom-up).\n *\n * If both `name` and `index` are specified, `index` takes precedence.\n *\n * @param name Name of layer.\n * @param index Index of layer.\n * @returns A Layer instance.\n * @throws ValueError: In case of invalid layer name or index.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Classes',\n * namespace: 'layers',\n * subclasses: ['LayersModel']\n * }\n */\n getLayer(name, index) {\n if (index != null) {\n if (this.layers.length <= index) {\n throw new ValueError(`Was asked to retrieve layer at index ${index}, but model only ` +\n `has ${this.layers.length} layer(s).`);\n }\n else {\n return this.layers[index];\n }\n }\n else {\n if (name == null) {\n throw new ValueError('Provide either a layer name or layer index');\n }\n }\n for (const layer of this.layers) {\n if (layer.name === name) {\n return layer;\n }\n }\n throw new ValueError(`No such layer: ${name}`);\n }\n /**\n * Retrieves the Container's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Container.loss in PyKeras.\n // In PyKeras, Container.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return tidy(() => {\n const losses = [];\n for (const layer of this.layers) {\n for (let nodeIndex = 0; nodeIndex < layer.inboundNodes.length; ++nodeIndex) {\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n losses.push(...layer.calculateLosses());\n }\n }\n }\n // TODO(cais): Add any unconditional model-level losses?\n return losses;\n });\n }\n getConfig() {\n const config = { name: this.name };\n // Build a map from layer unique name (self._node_key)\n // to the index of the nodes that are saved in the config.\n // Only nodes in container_nodes are saved.\n const nodeConversionMap = this.buildNodeConversionMap(this.layers);\n // Serialize and save the layers in layerConfigs\n const layerConfigs = [];\n for (const layer of this.layers) {\n const layerClassName = layer.getClassName();\n const layerConfig = layer.getConfig();\n const filteredInboundNodes = [];\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const node = layer.inboundNodes[originalNodeIndex];\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n let kwargs = {};\n if (this.containerNodes.has(nodeKey)) {\n // The node is relevant to the model:\n // add to filteredInboundNodes.\n if (node.callArgs) {\n try {\n JSON.stringify(node.callArgs);\n kwargs = node.callArgs;\n }\n catch (err) {\n console.warn(`Layer ${layer.name} was passed ` +\n `non-serializable keyword arguments: ` +\n `${node.callArgs}. They will not be included ` +\n `in the serialized model (and thus will be ` +\n `missing at deserialization time).`);\n kwargs = {};\n }\n }\n if (node.inboundLayers.length > 0) {\n const nodeData = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n const nodeKey = Container.nodeKey(inboundLayer, nodeIndex);\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex == null) {\n newNodeIndex = 0;\n }\n nodeData.push([inboundLayer.name, newNodeIndex, tensorIndex, kwargs]);\n }\n filteredInboundNodes.push(nodeData);\n }\n }\n }\n const dict = {};\n dict['name'] = layer.name;\n dict['className'] = layerClassName;\n dict['config'] = layerConfig;\n dict['inboundNodes'] = filteredInboundNodes;\n layerConfigs.push(dict);\n }\n config['layers'] = layerConfigs;\n // Gather info about inputs and outputs\n const modelInputs = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n const nodeIndex = this.inputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.inputLayersTensorIndices[i];\n modelInputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['inputLayers'] = modelInputs;\n const modelOutputs = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.outputLayersTensorIndices[i];\n modelOutputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['outputLayers'] = modelOutputs;\n return config;\n }\n /**\n * Instantiates a LayersModel from its config (output of `get_config()`).\n * @param cls the class to create\n * @param config LayersModel config dictionary.\n * @param customObjects An optional dictionary of custom objects.\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns A LayersModel instance.\n * @throws ValueError: In case of improperly formatted config dict.\n */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n // Layer instances created during\n // the graph reconstruction process\n const createdLayers = {};\n // Dictionary mapping layer instances to\n // node data that specifies a layer call.\n // It acts as a queue that maintains any unprocessed\n // layer call until it becomes possible to process it\n // (i.e. until the input tensors to the call all exist).\n const unprocessedNodes = {};\n function addUnprocessedNode(layer, nodeData) {\n if (!(layer.name in unprocessedNodes)) {\n unprocessedNodes[layer.name] = [nodeData];\n }\n else {\n unprocessedNodes[layer.name].push(nodeData);\n }\n }\n function processNode(layer, nodeData) {\n const inputTensors = [];\n let kwargs;\n for (const inputData of nodeData) {\n const inboundLayerName = inputData[0];\n const inboundNodeIndex = inputData[1];\n const inboundTensorIndex = inputData[2];\n kwargs = inputData[3] == null ?\n {} :\n inputData[3];\n if (!(inboundLayerName in createdLayers)) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundLayer = createdLayers[inboundLayerName];\n if (inboundLayer.inboundNodes.length <= inboundNodeIndex) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundNode = inboundLayer.inboundNodes[inboundNodeIndex];\n inputTensors.push(inboundNode.outputTensors[inboundTensorIndex]);\n }\n // Call layer on its inputs, thus creating the node\n // and building the layer if needed.\n // Note: This has Eager vs Graph Implications.\n if (inputTensors.length > 0) {\n layer.apply(generic_utils.singletonOrArray(inputTensors), kwargs); // was ** kwargs\n }\n }\n /**\n * Deserialize a layer, then call it on appropriate inputs.\n * @param layerData: layer config dict.\n * @throws ValueError: In case of improperly formatted `layer_data`\n * dict.\n */\n function processLayer(layerData) {\n const layerName = layerData['name'];\n // Instantiate layer.\n const layer = deserializeLayer(layerData, config['customObjects'] != null ?\n config['customObjects'] :\n {});\n layer.setFastWeightInitDuringBuild(fastWeightInit);\n createdLayers[layerName] = layer;\n // Gather layer inputs.\n const inboundNodesData = layerData['inboundNodes'];\n inboundNodesData.forEach(nodeData => {\n if (!(nodeData instanceof Array)) {\n throw new ValueError(`Corrupted configuration, expected array for nodeData: ${nodeData}`);\n }\n // We don't process nodes (i.e. make layer calls)\n // on the fly because the inbound node may not yet exist,\n // in case of layer shared at different topological depths\n // (e.g.a model such as A(B(A(B(x)))))\n addUnprocessedNode(layer, nodeData);\n });\n }\n // First, we create all layers and enqueue nodes to be processed.\n const name = config['name'];\n const layersFromConfig = config['layers'];\n for (const layerData of layersFromConfig) {\n processLayer(layerData);\n }\n // Then we process nodes in order of layer depth.\n // Nodes that cannot yet be processed(if the inbound node\n // does not yet exist) are re - enqueued, and the process\n // is repeated until all nodes are processed.\n while (!generic_utils.isObjectEmpty(unprocessedNodes)) {\n for (const layerData of layersFromConfig) {\n const layer = createdLayers[layerData['name']];\n if (layer.name in unprocessedNodes) {\n const currentUnprocessedNodesForLayer = unprocessedNodes[layer.name];\n delete unprocessedNodes[layer.name];\n for (const nodeData of currentUnprocessedNodesForLayer) {\n processNode(layer, nodeData);\n }\n }\n }\n }\n const inputTensors = [];\n const outputTensors = [];\n const inputLayersFromConfig = config['inputLayers'];\n for (const layerData of inputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n inputTensors.push(layerOutputTensors[tensorIndex]);\n }\n const outputLayersFromConfig = config['outputLayers'];\n for (const layerData of outputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n outputTensors.push(layerOutputTensors[tensorIndex]);\n }\n return new cls({ inputs: inputTensors, outputs: outputTensors, name });\n }\n /**\n * Determine whether the container is stateful.\n *\n * Porting Note: this is the equivalent of the stateful @property of\n * the Container class in PyKeras.\n */\n get stateful() {\n // Porting Note: This check is to prevent inadvertent setting of the\n // _stateful property of the Container instance.\n if (this._stateful) {\n throw new ValueError('Container instance unexpectedly has _stateful = true. The ' +\n 'statefulness of a Container is determined by the Layers it ' +\n 'contains. Its _stateful property must remain the default false.');\n }\n for (const layer of this.layers) {\n if (layer.stateful) {\n return true;\n }\n }\n return false;\n }\n /**\n * Reset the state of all stateful constituent layers (if any).\n *\n * Examples of stateful layers include RNN layers whose `stateful` property\n * is set as `true`.\n */\n resetStates() {\n tidy(() => {\n this.layers.forEach(layer => {\n // tslint:disable:no-any\n if (layer.stateful) {\n layer.resetStates();\n }\n // tslint:enable:no-any\n });\n });\n }\n}\n//# sourceMappingURL=container.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose, mul, tensor1d, tidy } from '@tensorflow/tfjs-core';\nfunction standardizeSampleOrClassWeights(xWeight, outputNames, weightType) {\n const numOutputs = outputNames.length;\n if (xWeight == null || (Array.isArray(xWeight) && xWeight.length === 0)) {\n return outputNames.map(name => null);\n }\n if (numOutputs === 1) {\n if (Array.isArray(xWeight) && xWeight.length === 1) {\n return xWeight;\n }\n else if (typeof xWeight === 'object' && outputNames[0] in xWeight) {\n return [xWeight[outputNames[0]]];\n }\n else {\n return [xWeight];\n }\n }\n if (Array.isArray(xWeight)) {\n if (xWeight.length !== numOutputs) {\n throw new Error(`Provided ${weightType} is an array of ${xWeight.length} ` +\n `element(s), but the model has ${numOutputs} outputs. ` +\n `Make sure a set of weights is provided for each model output.`);\n }\n return xWeight;\n }\n else if (typeof xWeight === 'object' && Object.keys(xWeight).length > 0 &&\n typeof xWeight[Object.keys(xWeight)[0]] ===\n 'object') {\n const output = [];\n outputNames.forEach(outputName => {\n if (outputName in xWeight) {\n output.push(xWeight[outputName]);\n }\n else {\n output.push(null);\n }\n });\n return output;\n }\n else {\n throw new Error(`The model has multiple (${numOutputs}) outputs, ` +\n `so ${weightType} must be either an array with ` +\n `${numOutputs} elements or an object with ${outputNames} keys. ` +\n `Provided ${weightType} not understood: ${JSON.stringify(xWeight)}`);\n }\n}\n/**\n * Standardize class weighting objects.\n *\n * This function takes a single class-weighting object, an array of them,\n * or a map from output name to class-weighting object. It compares it to the\n * output name(s) of the model, base on which it outputs an array of\n * class-weighting objects of which the length matches the number of outputs.\n *\n * @param classWeight Input class-weighting object(s).\n * @param outputNames All output name(s) of the model.\n * @return An array of class-weighting objects. The length of the array matches\n * the model's number of outputs.\n */\nexport function standardizeClassWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'classWeight');\n}\nexport function standardizeSampleWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'sampleWeight');\n}\n/**\n * Standardize by-sample and/or by-class weights for training.\n *\n * Note that this function operates on one model output at a time. For a model\n * with multiple outputs, you must call this function multiple times.\n *\n * @param y The target tensor that the by-sample and/or by-class weight is for.\n * The values of y are assumed to encode the classes, either directly\n * as an integer index, or as one-hot encoding.\n * @param sampleWeight By-sample weights.\n * @param classWeight By-class weights: an object mapping class indices\n * (integers) to a weight (float) to apply to the model's loss for the\n * samples from this class during training. This can be useful to tell the\n * model to \"pay more attention\" to samples from an under-represented class.\n * @param sampleWeightMode The mode for the sample weights.\n * @return A Promise of weight tensor, of which the size of the first dimension\n * matches that of `y`.\n */\nexport async function standardizeWeights(y, sampleWeight, classWeight, sampleWeightMode) {\n if (sampleWeight != null || sampleWeightMode != null) {\n // TODO(cais): Once 'temporal' mode is implemented, document it in the doc\n // string.\n throw new Error('Support sampleWeight is not implemented yet');\n }\n if (classWeight != null) {\n // Apply class weights per sample.\n const yClasses = tidy(() => {\n if (y.shape.length === 1) {\n // Assume class indices.\n return y.clone();\n }\n else if (y.shape.length === 2) {\n if (y.shape[1] > 1) {\n // Assume one-hot encoding of classes.\n const axis = 1;\n return y.argMax(axis);\n }\n else if (y.shape[1] === 1) {\n // Class index.\n return y.reshape([y.shape[0]]);\n }\n else {\n throw new Error(`Encountered unexpected last-dimension size (${y.shape[1]}) ` +\n `during handling of class weights. The size is expected to be ` +\n `>= 1.`);\n }\n }\n else {\n throw new Error(`Unexpected rank of target (y) tensor (${y.rank}) during ` +\n `handling of class weights. The rank is expected to be 1 or 2.`);\n }\n });\n const yClassIndices = Array.from(await yClasses.data());\n dispose(yClasses);\n const classSampleWeight = [];\n yClassIndices.forEach(classIndex => {\n if (classWeight[classIndex] == null) {\n throw new Error(`classWeight must contain all classes in the training data. ` +\n `The class ${classIndex} exists in the data but not in ` +\n `classWeight`);\n }\n else {\n classSampleWeight.push(classWeight[classIndex]);\n }\n });\n return tensor1d(classSampleWeight, 'float32');\n }\n else {\n return null;\n }\n}\n/**\n * Apply per-sample weights on the loss values from a number of samples.\n *\n * @param losses Loss tensor of shape `[batchSize]`.\n * @param sampleWeights Per-sample weight tensor of shape `[batchSize]`.\n * @returns Tensor of the same shape as`losses`.\n */\nexport function computeWeightedLoss(losses, sampleWeights) {\n return mul(losses, sampleWeights);\n}\n//# sourceMappingURL=training_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using TensorFlow.js datasets.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { singletonOrArray, toList } from '../utils/generic_utils';\nimport { standardizeClassWeights, standardizeWeights } from './training_utils';\n// Default batch size used during tensor-based validation.\nconst DEFAULT_VALIDATION_BATCH_SIZE = 32;\n/**\n * Standardize the output of a dataset iterator for use by\n * LayersModel.fitDataset().\n *\n * @param model: A `tf.LayersModel` object.\n * @param iteratorOut The output of a dataset iterator. It is required to be\n * an object of the form `{xs: TensorOrArrayOrMap, ys:\n * TensorOrArrayOrMap}`, where `TensorOrArrayOrMap` is a single `tf.Tensor`,\n * a `tf.Tensor[]`, or a flat map from string names to `tf.Tensor`s.\n * @returns A flat array of `tf.Tensor` objects: the input `tf.Tensor`s\n * followed by the target `tf.Tensor`s. When `tf.Tensor`s are provided\n * as a map, the order in the resulting array is taken from the `inputNames`\n * and `outputNames` of the model.\n */\nfunction standardizeDataIteratorOutput(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, iteratorOut) {\n let xs;\n let ys;\n const iteratorOutObj = iteratorOut;\n xs = iteratorOutObj['xs'];\n ys = iteratorOutObj['ys'];\n tfc.util.assert(xs != null && ys != null, () => 'A Dataset iterator for fitDataset() is expected to generate ' +\n 'objects of the form `{xs: xVal, ys: yVal}`, where the two ' +\n 'values may be `tf.Tensor`, an array of Tensors, or a map of ' +\n 'string to Tensor. The provided Dataset instead generates ' +\n `${iteratorOut}`);\n const flattenedXs = flattenTensorOrArrayOrMap('input', model.inputNames, xs);\n const flattenedYs = flattenTensorOrArrayOrMap('output', model.outputNames, ys);\n const batchSize = flattenedXs[0].shape[0];\n tfc.util.assert(flattenedXs.length === model.inputs.length, () => `LayersModel has ${model.inputs.length} inputs, but the dataset ` +\n `provides ${flattenedXs.length} inputs. (Expected input keys: ` +\n `${JSON.stringify(model.inputNames)})`);\n tfc.util.assert(flattenedYs.length === model.outputs.length, () => `LayersModel has ${model.outputs.length} outputs, but the dataset ` +\n `provides ${flattenedYs.length} outputs. (Expected output keys: ` +\n `${JSON.stringify(model.outputNames)})`);\n for (let xIndex = 0; xIndex < flattenedXs.length; xIndex++) {\n tfc.util.assert(flattenedXs[xIndex].shape[0] === batchSize, () => `Batch size mismatch: input ` +\n `${model.inputNames[xIndex]} has ${flattenedXs[xIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n for (let yIndex = 0; yIndex < flattenedYs.length; yIndex++) {\n tfc.util.assert(flattenedYs[yIndex].shape[0] === batchSize, () => `Batch size mismatch: output ` +\n `${model.outputNames[yIndex]} has ${flattenedYs[yIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n return { xs: flattenedXs, ys: flattenedYs };\n}\nfunction flattenTensorOrArrayOrMap(inputOrOutput, names, values) {\n if (values instanceof tfc.Tensor) {\n return [values];\n }\n else if (Array.isArray(values)) {\n tfc.util.assert(values.length === names.length, () => `Received an array of ${values.length} Tensors, but expected ${names.length} to match the ${inputOrOutput} keys ${names}.`);\n return values;\n }\n else {\n const result = [];\n // Check that all the required keys are available.\n for (const name of names) {\n if (values[name] == null) {\n throw new ValueError(`The feature data generated by the dataset lacks the required ` +\n `${inputOrOutput} key '${name}'.`);\n }\n result.push(values[name]);\n }\n return result;\n }\n}\nfunction standardizeTensorValidationData(data) {\n if (data.length === 3) {\n throw new NotImplementedError('Validation with sample weights is not implemented yet.');\n }\n return { xs: data[0], ys: data[1] };\n}\nexport async function fitDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n const hasBatchesPerEpoch = args.batchesPerEpoch != null;\n tfc.util.assert(model.optimizer != null, () => 'You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileConfig).');\n tfc.util.assert(args != null, () => `For fitDataset(), the 2nd argument (config) is required, ` +\n `but it is not provided in this call.`);\n tfc.util.assert(args.epochs != null && args.epochs > 0 && Number.isInteger(args.epochs), () => `For fitDataset(), config.epochs is expected to be a positive ` +\n `integer, but got ${args.epochs}`);\n tfc.util.assert(!hasBatchesPerEpoch ||\n (args.batchesPerEpoch > 0 && Number.isInteger(args.batchesPerEpoch)), () => `For fitDataset(), config.batchesPerEpoch is expected to be a ` +\n `positive integer if specified, but got ${args.batchesPerEpoch}`);\n tfc.util.assert(\n // tslint:disable-next-line:no-any\n args['validationSplit'] == null, () => '`validationSplit` is not supported by `fitDataset()`. ' +\n 'Use validationData instead.');\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n try {\n const doValidation = args.validationData != null;\n let valXs;\n let valYs;\n if (doValidation) {\n if (isDatasetObject(args.validationData)) {\n tfc.util.assert(args.validationBatches == null ||\n (args.validationBatches > 0 &&\n Number.isInteger(args.validationBatches)), () => `For fitDataset() with dataset-based validation, ` +\n `config.validationBatches is expected not to be provided, ` +\n `or to be a positive integer, ` +\n `but got ${args.validationBatches}`);\n }\n else {\n const validationData = standardizeTensorValidationData(args.validationData);\n valXs = validationData.xs;\n valYs = validationData.ys;\n }\n }\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let callbackMetrics;\n if (doValidation) {\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const verbose = args.verbose == null ? 1 : args.verbose;\n const { callbackList, history } = configureCallbacks(callbacks, verbose, args.epochs, null, null, getStepsPerEpoch(dataset, args), null, // Batch size determined by the dataset itself.\n doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n let epoch = args.initialEpoch == null ? 0 : args.initialEpoch;\n let dataIterator = await dataset.iterator();\n while (epoch < args.epochs) {\n const epochLogs = {};\n await callbackList.onEpochBegin(epoch);\n let stepsDone = 0;\n let batchIndex = 0;\n if (!hasBatchesPerEpoch) {\n dataIterator = await dataset.iterator();\n }\n while (hasBatchesPerEpoch ? stepsDone < args.batchesPerEpoch : true) {\n const iteratorOut = await dataIterator.next();\n // If `batchesPerEpoch` is specified, the dataset should not be\n // exhausted until all epoches are done.\n if (hasBatchesPerEpoch && iteratorOut.done) {\n console.warn('You provided `batchesPerEpoch` as ' +\n `${args.batchesPerEpoch}, ` +\n 'but your dataset iterator ran out of data after ' +\n `${stepsDone} batches; ` +\n 'interrupting training. Make sure that your ' +\n 'dataset can generate at least `batchesPerEpoch * epochs` ' +\n 'batches (in this case, ' +\n `${args.batchesPerEpoch * args.epochs} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n break;\n }\n if (iteratorOut.value != null) {\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const batchLogs = {};\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = xs[0].shape[0];\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n const sampleWeights = [];\n if (args.classWeight != null) {\n const standardClassWeights = standardizeClassWeights(args.classWeight, model.outputNames);\n for (let i = 0; i < standardClassWeights.length; ++i) {\n sampleWeights.push(await standardizeWeights(ys[i], null, standardClassWeights[i]));\n }\n }\n // Train on batch.\n const ins = xs.concat(ys).concat(sampleWeights);\n const outs = trainFunction(ins);\n tfc.dispose(ins);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n }\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n batchIndex++;\n stepsDone++;\n }\n if (hasBatchesPerEpoch ? stepsDone >= args.batchesPerEpoch :\n iteratorOut.done) {\n // Epoch finished. Perform validation.\n if (doValidation) {\n let valOuts;\n if (isDatasetObject(args.validationData)) {\n valOuts = toList(await model.evaluateDataset(args.validationData, { batches: args.validationBatches }));\n }\n else {\n valOuts = toList(model.evaluate(valXs, valYs, {\n batchSize: args.validationBatchSize == null ?\n DEFAULT_VALIDATION_BATCH_SIZE :\n args.validationBatchSize,\n verbose: 0\n }));\n }\n for (let i = 0; i < model.metricsNames.length; ++i) {\n epochLogs[`val_${model.metricsNames[i]}`] = valOuts[i];\n }\n }\n // Call `break` to exit one epoch lopp after validation is done. If\n // config.batchesPerEpoch is specified, an epoch while loop will\n // stop when `stepsDone >= config.batchesPerEpoch`. When\n // config.batchesPerEpoch is not provided, the following `break` is\n // required to exit the while lopp after dataset is exhausted.\n break;\n }\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onEpochEnd(epoch, epochLogs);\n epoch++;\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n }\n finally {\n model.isTraining = false;\n }\n}\n/** Helper function that determines number of steps (batches) per epoch. */\nfunction getStepsPerEpoch(dataset, args) {\n // Attempt to determine # of batches in an epoch.\n let stepsPerEpoch = null;\n if (args.batchesPerEpoch != null) {\n stepsPerEpoch = args.batchesPerEpoch;\n }\n else if (Number.isFinite(dataset.size)) {\n stepsPerEpoch = dataset.size;\n }\n return stepsPerEpoch;\n}\n// Check if provided object is a Dataset object by checking its .iterator\n// element.\nfunction isDatasetObject(dataset) {\n return (typeof dataset.iterator === 'function');\n}\n// Check if provided object is a LazyIterator object by checking it's .next\n// element.\nfunction isLazyIteratorObject(iterator) {\n return (typeof iterator.next === 'function');\n}\nexport async function evaluateDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n args = args || {};\n const hasBatches = args.batches != null;\n const f = model.testFunction;\n let outs = [];\n if (args.verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n tfc.util.assert(!hasBatches || (args.batches > 0 && Number.isInteger(args.batches)), () => 'Test loop expects `batches` to be a positive integer, but ' +\n `received ${JSON.stringify(args.batches)}`);\n const dataIterator = isLazyIteratorObject(dataset) ?\n dataset :\n await dataset.iterator();\n // Keeps track of number of examples used in this evaluation.\n let numExamples = 0;\n let batch = 0;\n while (hasBatches ? batch < args.batches : true) {\n const iteratorOut = await dataIterator.next();\n outs = tfc.tidy(() => {\n if (iteratorOut.value) {\n // TODO(cais): Once real dataset is available, use\n // `map(x => standardizeDataIteratorOutput(model, x).map(f)`.\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const xsAndYs = xs.concat(ys);\n const batchOuts = tfc.tidy(() => f(xsAndYs));\n tfc.dispose(xsAndYs);\n if (batch === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n const batchSize = xsAndYs[0].shape[0];\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n const oldScalar = outs[i];\n outs[i] =\n tfc.tidy(() => tfc.add(outs[i], tfc.mul(batchSize, batchOut)));\n if (batch > 0) {\n tfc.dispose(oldScalar);\n }\n }\n tfc.dispose(batchOuts);\n numExamples += batchSize;\n ++batch;\n }\n return outs;\n });\n if (iteratorOut.done) {\n if (hasBatches) {\n console.warn('Your dataset iterator ran out of data during evaluateDataset(). ' +\n 'Interrupting evalution. Make sure that your ' +\n 'dataset can generate at least `batches` ' +\n `batches (in this case, ${args.batches} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n }\n break;\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n const oldScalar = outs[i];\n outs[i] = tfc.div(outs[i], numExamples);\n tfc.dispose(oldScalar);\n }\n return singletonOrArray(outs);\n}\n//# sourceMappingURL=training_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using tf.Tensor objects.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport { expandDims, gather, sliceAlongFirstAxis } from '../backend/tfjs_backend';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { range } from '../utils/math_utils';\nexport function checkBatchSize(batchSize) {\n tfc.util.assert(batchSize > 0 && Number.isInteger(batchSize), () => `batchSize is required to be a positive integer, but got ${batchSize}`);\n}\n/**\n * Slice a Tensor or an Array of Tensors, by start and stop indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArraysByIndices()` together.\n *\n * @param arrays: the input.\n * @param start: the starting index (inclusive).\n * @param stop: the stopping index (exclusive).\n * @returns The result of the slicing. If `arrays` is an `Array` of\n * `tf.Tensor`s, the slicing will be applied to all elements of the `Array`\n * in the same way.\n */\nexport function sliceArrays(arrays, start, stop) {\n if (arrays == null) {\n return [null];\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceAlongFirstAxis(array, start, stop - start));\n }\n else { // Tensor.\n return sliceAlongFirstAxis(arrays, start, stop - start);\n }\n}\n/**\n * Slice a Tensor or an Array of Tensors, by random-order indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArrays()` together.\n *\n * @param arrays The input `tf.Tensor` or `Array` of `tf.Tensor`s to slice.\n * If an `Array` of `tf.Tensor`s, all `tf.Tensor`s will be sliced in the\n * same fashion.\n * @param indices The indices to use for slicing along the first (batch)\n * dimension.\n * @returns Result(s) of the slicing.\n */\nexport function sliceArraysByIndices(arrays, indices) {\n return tfc.tidy(() => {\n if (arrays == null) {\n return null;\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceArraysByIndices(array, indices));\n }\n else {\n // TODO(cais): indices should be a pre-constructed Tensor1D to avoid\n // tensor1d() calls.\n return gather(arrays, indices.dtype === 'int32' ? indices : indices.toInt());\n }\n });\n}\n/**\n * Returns a list of batch indices (tuples of indices).\n * @param size: Integer, total size of the data to slice into batches.\n * @param batchSize: Integer, batch size.\n * @returns An Array of [batchStart, batchEnd] tuples. batchStart is\n * inclusive; batchEnd is exclusive. I.e., each batch consists of indices x\n * that satisfy batchStart <= x < batchEnd.\n */\nexport function makeBatches(size, batchSize) {\n const output = [];\n let batchStart = 0;\n let batchEnd = null;\n while (batchStart < size) {\n batchEnd = batchStart + batchSize;\n if (batchEnd >= size) {\n batchEnd = size;\n }\n output.push([batchStart, batchEnd]);\n batchStart = batchEnd;\n }\n return output;\n}\n/**\n * Abstract fit function for `f(ins)`.\n * @param f A Function returning a list of tensors. For training, this\n * function is expected to perform the updates to the variables.\n * @param ins List of tensors to be fed to `f`.\n * @param outLabels List of strings, display names of the outputs of `f`.\n * @param batchSize Integer batch size or `== null` if unknown. Default : 32.\n * @param epochs Number of times to iterate over the data. Default : 1.\n * @param verbose Verbosity mode: 0, 1, or 2. Default: 1.\n * @param callbacks List of callbacks to be called during training.\n * @param valF Function to call for validation.\n * @param valIns List of tensors to be fed to `valF`.\n * @param shuffle Whether to shuffle the data at the beginning of every\n * epoch. Default : true.\n * @param callbackMetrics List of strings, the display names of the metrics\n * passed to the callbacks. They should be the concatenation of the\n * display names of the outputs of `f` and the list of display names\n * of the outputs of `valF`.\n * @param initialEpoch Epoch at which to start training (useful for\n * resuming a previous training run). Default : 0.\n * @param stepsPerEpoch Total number of steps (batches on samples) before\n * declaring one epoch finished and starting the next epoch. Ignored with\n * the default value of `undefined` or `null`.\n * @param validationSteps Number of steps to run validation for (only if\n * doing validation from data tensors). Not applicable for tfjs-layers.\n * @returns A `History` object.\n */\nasync function fitLoop(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, f, ins, outLabels, batchSize, epochs, verbose, callbacks, valF, valIns, shuffle, callbackMetrics, initialEpoch, stepsPerEpoch, validationSteps) {\n if (batchSize == null) {\n batchSize = 32;\n }\n if (epochs == null) {\n epochs = 1;\n }\n if (shuffle == null) {\n shuffle = true;\n }\n if (initialEpoch == null) {\n initialEpoch = 0;\n }\n // TODO(cais): Change const to let below when implementing validation.\n let doValidation = false;\n if (valF != null && valIns != null) {\n doValidation = true;\n // TODO(cais): verbose message.\n }\n if (validationSteps != null) {\n doValidation = true;\n if (stepsPerEpoch == null) {\n throw new ValueError('Can only use `validationSteps` when doing step-wise training, ' +\n 'i.e., `stepsPerEpoch` must be set.');\n }\n }\n const numTrainSamples = model.checkNumSamples(ins, batchSize, stepsPerEpoch, 'steps_per_epoch');\n let indexArray;\n if (numTrainSamples != null) {\n indexArray = range(0, numTrainSamples);\n }\n if (verbose == null) {\n verbose = 1;\n }\n const { callbackList, history } = configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n // TODO(cais): Take care of callbacks.validation_data as in PyKeras.\n // TODO(cais): Pre-convert feeds for performance as in PyKeras.\n for (let epoch = initialEpoch; epoch < epochs; ++epoch) {\n await callbackList.onEpochBegin(epoch);\n const epochLogs = {};\n if (stepsPerEpoch != null) {\n throw new NotImplementedError('stepsPerEpoch mode is not implemented yet.');\n }\n else {\n if (shuffle === 'batch') {\n throw new NotImplementedError('batch shuffling is not implemneted yet');\n }\n else if (shuffle) {\n util.shuffle(indexArray);\n }\n // Convert the potentially shuffled indices to Tensor1D, to avoid the\n // cost of repeated creation of Array1Ds later on.\n const epochIndexArray1D = tensor1d(indexArray);\n const batches = makeBatches(numTrainSamples, batchSize);\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchLogs = {};\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = sliceAlongFirstAxis(epochIndexArray1D, batchStart, batchEnd - batchStart);\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = batchEnd - batchStart;\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const outs = f(insBatch);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n }\n if (batchIndex === batches.length - 1) { // Last batch.\n if (doValidation) {\n const valOuts = model.testLoop(valF, valIns, batchSize);\n // Porting Notes: In tfjs-layers, valOuts is always an Array.\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = valOuts[i];\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n epochLogs['val_' + label] = out;\n }\n }\n }\n });\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n if (model.stopTraining_) {\n break;\n }\n // TODO(cais): return outs as list of Tensor.\n }\n epochIndexArray1D.dispose();\n }\n // TODO(cais): Run validation at the end of the epoch.\n await callbackList.onEpochEnd(epoch, epochLogs);\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n}\nexport async function fitTensors(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, x, y, args = {}) {\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n let inputs;\n let targets;\n let inputValX;\n let inputValY;\n let valX;\n let valY;\n let sampleWeights;\n try {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // Validate user data.\n // TODO(cais): Support sampleWeight.\n const checkBatchAxis = false;\n const standardizedOuts = await model.standardizeUserData(x, y, args.sampleWeight, args.classWeight, checkBatchAxis, batchSize);\n inputs = standardizedOuts[0];\n targets = standardizedOuts[1];\n sampleWeights = standardizedOuts[2];\n // Prepare validation data.\n let doValidation = false;\n let valIns;\n if (args.validationData != null && args.validationData.length > 0) {\n doValidation = true;\n if (args.validationData.length === 2) {\n // config.validationData consists of valX and valY.\n inputValX = args.validationData[0];\n inputValY = args.validationData[1];\n }\n else if (args.validationData.length === 3) {\n throw new NotImplementedError('validationData including sample weights is not supported yet.');\n }\n else {\n throw new ValueError(`When passing validation data, it must contain 2 (valX, valY) ` +\n `or 3 (valX, valY, valSampleWeight) items; ` +\n `${args.validationData} is invalid.`);\n }\n const checkBatchAxis = true;\n const valStandardized = await model.standardizeUserData(inputValX, inputValY, null, /** Unused sample weights. */ null, /** Unused class weights. */ checkBatchAxis, batchSize);\n valX = valStandardized[0];\n valY = valStandardized[1];\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSplit != null && args.validationSplit > 0 &&\n args.validationSplit < 1) {\n doValidation = true;\n // Porting Note: In tfjs-layers, inputs[0] is always a Tensor.\n const splitAt = Math.floor(inputs[0].shape[0] * (1 - args.validationSplit));\n const originalBatchSize = inputs[0].shape[0];\n valX = sliceArrays(inputs, splitAt, originalBatchSize);\n inputs = sliceArrays(inputs, 0, splitAt);\n valY = sliceArrays(targets, splitAt, originalBatchSize);\n targets = sliceArrays(targets, 0, splitAt);\n // TODO(cais): Once sampleWeights becomes available, slice it to get\n // valSampleWeights.\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSteps != null) {\n doValidation = true;\n // TODO(cais): Add useLearningPhase.\n }\n const ins = inputs.concat(targets).concat(sampleWeights);\n model.checkTrainableWeightsConsistency();\n // TODO(cais): Handle use_learning_phase and learning_phase?\n // Porting Note: Here we see a key deviation of tfjs-layers from\n // Keras.\n // Due to the imperative nature of tfjs-layers' backend (tfjs-core),\n // we do not construct symbolic computation graphs to embody the\n // training process. Instead, we define a function that performs the\n // training action. In PyKeras, the data (inputs and targets) are fed\n // through graph placeholders. In tfjs-layers, the data are fed as\n // function arguments. Since the function are defined below in the\n // scope, we don't have equivalents of PyKeras's\n // `_make_train_funciton`.\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let valFunction;\n let callbackMetrics;\n if (doValidation) {\n model.makeTestFunction();\n valFunction = model.testFunction;\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n valFunction = null;\n valIns = [];\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const out = await fitLoop(model, trainFunction, ins, outLabels, batchSize, args.epochs, args.verbose, callbacks, valFunction, valIns, args.shuffle, callbackMetrics, args.initialEpoch, null, null);\n return out;\n }\n finally {\n model.isTraining = false;\n // Memory clean up.\n disposeNewTensors(inputs, x);\n disposeNewTensors(targets, y);\n disposeNewTensors(valX, inputValX);\n disposeNewTensors(valY, inputValY);\n if (sampleWeights != null) {\n tfc.dispose(sampleWeights);\n }\n }\n // TODO(cais): Add value to outLabels.\n}\n/**\n * Ensure tensors all have a rank of at least 2.\n *\n * If a tensor has a rank of 1, it is dimension-expanded to rank 2.\n * If any tensor has a rank of 0 (i.e., is a scalar), an error will be thrown.\n */\nexport function ensureTensorsRank2OrHigher(tensors) {\n const outs = [];\n if (tensors instanceof Tensor) {\n tensors = [tensors];\n }\n // Make Tensors at least 2D.\n for (let i = 0; i < tensors.length; ++i) {\n const tensor = tensors[i];\n if (tensor.rank === 1) {\n outs.push(expandDims(tensor, 1));\n }\n else if (tensor.rank === 0) {\n throw new Error('Expected tensor to be at least 1D, but received a 0D tensor ' +\n '(scalar).');\n }\n else {\n outs.push(tensor);\n }\n }\n return outs;\n}\n/**\n * Compare a set of tensors with a reference (old) set, discard the ones\n * in the new set that are not present in the reference set.\n *\n * This method is used for memory clenaup during calls such as\n * LayersModel.fit().\n *\n * @param tensors New set which may contain Tensors not present in\n * `refTensors`.\n * @param refTensors Reference Tensor set.\n */\n// TODO(cais, kangyizhang): Deduplicate with tfjs-data.\nexport function disposeNewTensors(tensors, refTensors) {\n if (tensors == null) {\n return;\n }\n const oldTensorIds = [];\n if (refTensors instanceof Tensor) {\n oldTensorIds.push(refTensors.id);\n }\n else if (Array.isArray(refTensors)) {\n refTensors.forEach(t => oldTensorIds.push(t.id));\n }\n else if (refTensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in refTensors) {\n const oldTensor = refTensors[name];\n oldTensorIds.push(oldTensor.id);\n }\n }\n const tensorsToDispose = [];\n if (tensors instanceof Tensor) {\n if (oldTensorIds.indexOf(tensors.id) === -1) {\n tensorsToDispose.push(tensors);\n }\n }\n else if (Array.isArray(tensors)) {\n tensors.forEach(t => {\n if (oldTensorIds.indexOf(t.id) === -1) {\n tensorsToDispose.push(t);\n }\n });\n }\n else if (tensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in tensors) {\n const tensor = tensors[name];\n if (oldTensorIds.indexOf(tensor.id) === -1) {\n tensorsToDispose.push(tensor);\n }\n }\n }\n tensorsToDispose.forEach(t => {\n if (!t.isDisposed) {\n t.dispose();\n }\n });\n}\n//# sourceMappingURL=training_tensors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: engine/training.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { io, Optimizer, scalar, serialization, Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize } from '../layers/serialization';\nimport * as losses from '../losses';\nimport * as Metrics from '../metrics';\nimport * as optimizers from '../optimizers';\nimport { checkUserDefinedMetadata } from '../user_defined_metadata';\nimport { count, pyListRepeat, singletonOrArray, toCamelCase, toSnakeCase, unique } from '../utils/generic_utils';\nimport { printSummary } from '../utils/layer_utils';\nimport { range } from '../utils/math_utils';\nimport { convertPythonicToTs } from '../utils/serialization_utils';\nimport { version } from '../version';\nimport { Container } from './container';\nimport { execute, FeedDict } from './executor';\nimport { evaluateDataset, fitDataset } from './training_dataset';\nimport { checkBatchSize, disposeNewTensors, ensureTensorsRank2OrHigher, fitTensors, makeBatches, sliceArrays, sliceArraysByIndices } from './training_tensors';\nimport { computeWeightedLoss, standardizeClassWeights, standardizeWeights } from './training_utils';\n/**\n * Helper function for polymorphic input data: 1. singleton Tensor.\n */\nexport function isDataTensor(x) {\n return x instanceof Tensor;\n}\n/**\n * Helper function for polymorphic input data: 2. Array of Tensor.\n */\nexport function isDataArray(x) {\n return Array.isArray(x);\n}\n/**\n * Helper function for polymorphic input data: 3. \"dict\" of Tensor.\n */\nexport function isDataDict(x) {\n return !isDataTensor(x) && !isDataArray(x);\n}\n/**\n * Normalizes inputs and targets provided by users.\n * @param data User-provided input data (polymorphic).\n * @param names An Array of expected Tensor names.\n * @param shapes Optional Array of expected Tensor shapes.\n * @param checkBatchAxis Whether to check that the batch axis of the arrays\n * match the expected value found in `shapes`.\n * @param exceptionPrefix String prefix used for exception formatting.\n * @returns List of standardized input Tensors (one Tensor per model input).\n * @throws ValueError: in case of improperly formatted user data.\n */\nexport function standardizeInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n if (names == null || names.length === 0) {\n // Check for the case where the model expected no data, but some data got\n // sent.\n if (data != null) {\n let gotUnexpectedData = false;\n if (isDataArray(data) && data.length > 0) {\n gotUnexpectedData = true;\n }\n else if (isDataDict(data)) {\n for (const key in data) {\n if (data.hasOwnProperty(key)) {\n gotUnexpectedData = true;\n break;\n }\n }\n }\n else {\n // `data` is a singleton Tensor in this case.\n gotUnexpectedData = true;\n }\n if (gotUnexpectedData) {\n throw new ValueError(`Error when checking model ${exceptionPrefix} expected no data, ` +\n `but got ${data}`);\n }\n }\n return [];\n }\n if (data == null) {\n return names.map(name => null);\n }\n let arrays;\n if (isDataDict(data)) {\n data = data;\n arrays = [];\n for (const name of names) {\n if (data[name] == null) {\n throw new ValueError(`No data provided for \"${name}\". Need data for each key in: ` +\n `${names}`);\n }\n arrays.push(data[name]);\n }\n }\n else if (isDataArray(data)) {\n data = data;\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `model expected. Expected to see ${names.length} Tensor(s), but ` +\n `instead got the following list of Tensor(s): ${data}`);\n }\n arrays = data;\n }\n else {\n data = data;\n if (names.length > 1) {\n throw new ValueError(`The model ${exceptionPrefix} expects ${names.length} Tensor(s), ` +\n `but only received one Tensor. Found: Tensor with shape ${data.shape}`);\n }\n arrays = [data];\n }\n arrays = ensureTensorsRank2OrHigher(arrays);\n // Check shape compatibility.\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s). but got array with ` +\n `shape ${array.shape}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n // Skip the first (batch) axis.\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null && refDim >= 0 && dim !== refDim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have shape [${shapes[i]}], but got array with shape ` +\n `[${array.shape}].`);\n }\n }\n }\n }\n return arrays;\n}\n/**\n * User input validation for Tensors.\n * @param inputs `Array` of `tf.Tensor`s for inputs.\n * @param targets `Array` of `tf.Tensor`s for targets.\n * @param weights Optional `Array` of `tf.Tensor`s for sample weights.\n * @throws ValueError: in case of incorrectly formatted data.\n */\nexport function checkArrayLengths(inputs, targets, weights) {\n const setX = unique(inputs.map(input => input.shape[0]));\n setX.sort();\n const setY = unique(targets.map(target => target.shape[0]));\n setY.sort();\n // TODO(cais): Check `weights` as well.\n if (setX.length > 1) {\n throw new ValueError(`All input Tensors (x) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(inputs.map(input => input.shape))}`);\n }\n if (setY.length > 1) {\n throw new ValueError(`All target Tensors (y) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(targets.map(target => target.shape))}`);\n }\n if (setX.length > 0 && setY.length > 0 && !util.arraysEqual(setX, setY)) {\n throw new ValueError(`Input Tensors should have the same number of samples as target ` +\n `Tensors. Found ${setX[0]} input sample(s) and ${setY[0]} target ` +\n `sample(s).`);\n }\n}\n/**\n * Validation on the compatibility of targes and loss functions.\n *\n * This helps prevent users from using loss functions incorrectly.\n *\n * @param targets `Array` of `tf.Tensor`s of targets.\n * @param lossFns `Array` of loss functions.\n * @param outputShapes `Array` of shapes of model outputs.\n */\nfunction checkLossAndTargetCompatibility(targets, lossFns, outputShapes) {\n // TODO(cais): Dedicated test coverage?\n const keyLosses = [\n losses.meanSquaredError, losses.binaryCrossentropy,\n losses.categoricalCrossentropy\n ];\n for (let i = 0; i < targets.length; ++i) {\n const y = targets[i];\n const loss = lossFns[i];\n const shape = outputShapes[i];\n if (loss == null) {\n continue;\n }\n if (loss === losses.categoricalCrossentropy) {\n if (y.shape[y.shape.length - 1] === 1) {\n throw new ValueError(`You are passing a target array of shape ${y.shape} while using ` +\n `a loss 'categorical_crossentropy'. 'categorical_crossentropy'` +\n `expects targets to be binary matrices (1s and 0s) of shape ` +\n `[samples, classes].`);\n // TODO(cais): Example code in error message.\n }\n }\n if (keyLosses.indexOf(loss) !== -1) {\n const slicedYShape = y.shape.slice(1);\n const slicedShape = shape.slice(1);\n for (let j = 0; j < slicedYShape.length; ++j) {\n const targetDim = slicedYShape[j];\n const outDim = slicedShape[j];\n if (outDim != null && targetDim !== outDim) {\n throw new ValueError(`A target Tensor with shape ${y.shape} was passed for an ` +\n `output of shape ${shape}, while using a loss function that ` +\n `expects targets to have the same shape as the output.`);\n }\n }\n }\n }\n}\n/**\n * Check inputs provided by the user.\n *\n * Porting Note: This corresponds to _standardize_input_data() in Python\n * Keras. Because of the strong typing in TF.js, we do not need to convert\n * the data. Specifically:\n * 1) in PyKeras, `data` can be `DataFrame` instances from pandas, for\n * example. We don't need to worry about that here because there is no\n * widely popular javascript/typesdcript equivalent of pandas (so far).\n * If one becomes available in the future, we can add support.\n * 2) in PyKeras, inputs can be Python dict. But here we are stipulating\n * that the data is either a single `tf.Tensor` or an Array of `tf.Tensor`s. We\n * may add support for `Object` data inputs in the future when the need\n * arises.\n *\n * Instead, we perform basic checks for number of parameters and shapes.\n *\n * @param data: The input data.\n * @param names: Name for the inputs, from the model.\n * @param shapes: Expected shapes for the input data, from the model.\n * @param checkBatchAxis: Whether the size along the batch axis (i.e., the\n * first dimension) will be checked for matching.\n * @param exceptionPrefix: Execption prefix message, used in generating error\n * messages.\n * @throws ValueError: on incorrect number of inputs or mismatches in shapes.\n */\nfunction checkInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n let arrays;\n if (Array.isArray(data)) {\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `the model expected. Expected to see ${names.length} Tensor(s),` +\n ` but instead got ${data.length} Tensors(s).`);\n }\n arrays = data;\n }\n else {\n if (names.length > 1) {\n throw new ValueError(`The model expects ${names.length} ${exceptionPrefix} Tensors, ` +\n `but only received one Tensor. Found: array with shape ` +\n `${JSON.stringify(data.shape)}.`);\n }\n arrays = [data];\n }\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s), but got array with ` +\n `shape ${JSON.stringify(array.shape)}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null) {\n if (refDim !== dim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ` +\n `${names[i]} to have shape ${JSON.stringify(shapes[i])} but ` +\n `got array with shape ${JSON.stringify(array.shape)}.`);\n }\n }\n }\n }\n }\n}\n/**\n * Maps metric functions to model outputs.\n * @param metrics An shortcut strings name, metric function, `Array` or dict\n * (`Object`) of metric functions.\n * @param outputNames An `Array` of the names of model outputs.\n * @returns An `Array` (one entry per model output) of `Array` of metric\n * functions. For instance, if the model has 2 outputs, and for the first\n * output we want to compute `binaryAccuracy` and `binaryCrossentropy`,\n * and just `binaryAccuracy` for the second output, the `Array` would look\n * like:\n * `[[binaryAccuracy, binaryCrossentropy], [binaryAccuracy]]`\n * @throws TypeError: incompatible metrics format.\n */\nexport function collectMetrics(metrics, outputNames) {\n if (metrics == null || Array.isArray(metrics) && metrics.length === 0) {\n return outputNames.map(name => []);\n }\n let wrappedMetrics;\n if (typeof metrics === 'string' || typeof metrics === 'function') {\n wrappedMetrics = [metrics];\n }\n else if (Array.isArray(metrics) || typeof metrics === 'object') {\n wrappedMetrics = metrics;\n }\n else {\n throw new TypeError('Type of metrics argument not understood. Expected an string,' +\n `function, Array, or Object, found: ${metrics}`);\n }\n if (Array.isArray(wrappedMetrics)) {\n // We then apply all metrics to all outputs.\n return outputNames.map(name => wrappedMetrics);\n }\n else {\n // In this case, metrics is a dict.\n const nestedMetrics = [];\n for (const name of outputNames) {\n let outputMetrics = wrappedMetrics.hasOwnProperty(name) ? wrappedMetrics[name] : [];\n if (!Array.isArray(outputMetrics)) {\n outputMetrics = [outputMetrics];\n }\n nestedMetrics.push(outputMetrics);\n }\n return nestedMetrics;\n }\n}\nconst LAYERS_MODEL_FORMAT_NAME = 'layers-model';\n/**\n * A `tf.LayersModel` is a directed, acyclic graph of `tf.Layer`s plus methods\n * for training, evaluation, prediction and saving.\n *\n * `tf.LayersModel` is the basic unit of training, inference and evaluation in\n * TensorFlow.js. To create a `tf.LayersModel`, use `tf.LayersModel`.\n *\n * See also:\n * `tf.Sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class LayersModel extends Container {\n constructor(args) {\n super(args);\n this.isTraining = false;\n }\n /**\n * Print a text summary of the model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - If the model has non-sequential-like topology, the inputs each layer\n * receives\n * - The total number of trainable and non-trainable parameters of the model.\n *\n * ```js\n * const input1 = tf.input({shape: [10]});\n * const input2 = tf.input({shape: [20]});\n * const dense1 = tf.layers.dense({units: 4}).apply(input1);\n * const dense2 = tf.layers.dense({units: 8}).apply(input2);\n * const concat = tf.layers.concatenate().apply([dense1, dense2]);\n * const output =\n * tf.layers.dense({units: 3, activation: 'softmax'}).apply(concat);\n *\n * const model = tf.model({inputs: [input1, input2], outputs: output});\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n throw new ValueError(`This model has never been called, thus its weights have not been ` +\n `created yet. So no summary can be displayed. Build the model ` +\n `first (e.g., by calling it on some test data).`);\n }\n printSummary(this, lineLength, positions, printFn);\n }\n /**\n * Configures and prepares the model for training and evaluation. Compiling\n * outfits the model with an optimizer, loss, and/or metrics. Calling `fit`\n * or `evaluate` on an un-compiled model will throw an error.\n *\n * @param args a `ModelCompileArgs` specifying the loss, optimizer, and\n * metrics to be used for fitting and evaluating this model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n compile(args) {\n if (args.loss == null) {\n args.loss = [];\n }\n this.loss = args.loss;\n if (typeof args.optimizer === 'string') {\n this.optimizer_ = optimizers.getOptimizer(args.optimizer);\n this.isOptimizerOwned = true;\n }\n else {\n if (!(args.optimizer instanceof Optimizer)) {\n throw new ValueError(`User-defined optimizer must be an instance of tf.Optimizer.`);\n }\n this.optimizer_ = args.optimizer;\n this.isOptimizerOwned = false;\n }\n // TODO(cais): Add lossWeights.\n // TODO(cais): Add sampleWeightMode.\n // Prepare loss functions.\n let lossFunctions = [];\n if (!Array.isArray(args.loss) && typeof args.loss !== 'string' &&\n typeof args.loss !== 'function') {\n args.loss = args.loss;\n for (const name in args.loss) {\n if (this.outputNames.indexOf(name) === -1) {\n throw new ValueError(`Unknown entry in loss dictionary: \"${name}\". ` +\n `Only expected the following keys: ${this.outputNames}`);\n }\n }\n for (const name of this.outputNames) {\n if (args.loss[name] == null) {\n console.warn(`Output \"${name}\" is missing from loss dictionary. We assume ` +\n `this was done on purpose, and we will not be expecting data ` +\n `to be passed to ${name} during training`);\n }\n lossFunctions.push(losses.get(args.loss[name]));\n }\n }\n else if (Array.isArray(args.loss)) {\n if (args.loss.length !== this.outputs.length) {\n throw new ValueError(`When passing an Array as loss, it should have one entry per ` +\n `model output. The model has ${this.outputs.length} output(s), ` +\n `but you passed loss=${args.loss}.`);\n }\n const theLosses = args.loss;\n lossFunctions = theLosses.map(l => losses.get(l));\n }\n else {\n const lossFunction = losses.get(args.loss);\n this.outputs.forEach(_ => {\n lossFunctions.push(lossFunction);\n });\n }\n this.lossFunctions = lossFunctions;\n this.feedOutputNames = [];\n this.feedOutputShapes = [];\n this.feedLossFns = [];\n for (let i = 0; i < this.outputs.length; ++i) {\n // TODO(cais): Logic for skipping target(s).\n const shape = this.internalOutputShapes[i];\n const name = this.outputNames[i];\n this.feedOutputNames.push(name);\n this.feedOutputShapes.push(shape);\n this.feedLossFns.push(this.lossFunctions[i]);\n }\n // TODO(cais): Add logic for output masks.\n // TODO(cais): Add logic for sample weights.\n const skipTargetIndices = [];\n // Prepare metrics.\n this.metrics = args.metrics;\n // TODO(cais): Add weightedMetrics.\n this.metricsNames = ['loss'];\n this.metricsTensors = [];\n // Compute total loss.\n // Porting Note: In PyKeras, metrics_tensors are symbolic tensor objects.\n // Here, metricsTensors are TypeScript functions. This difference is due\n // to the difference in symbolic/imperative property of the backends.\n nameScope('loss', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n // TODO(cais): Add weightedLoss, sampleWeight and mask.\n // The following line should be weightedLoss\n const weightedLoss = this.lossFunctions[i];\n if (this.outputs.length > 1) {\n this.metricsTensors.push([weightedLoss, i]);\n this.metricsNames.push(this.outputNames[i] + '_loss');\n }\n }\n // Porting Note: Due to the imperative nature of the backend, we calculate\n // the regularizer penalties in the totalLossFunction, instead of here.\n });\n const nestedMetrics = collectMetrics(args.metrics, this.outputNames);\n // TODO(cais): Add nestedWeightedMetrics.\n /**\n * Helper function used in loop below.\n */\n const appendMetric = (outputIndex, metricName, metricTensor) => {\n if (this.outputNames.length > 1) {\n metricName = this.outputNames[outputIndex] + '_' + metricName;\n }\n this.metricsNames.push(metricName);\n this.metricsTensors.push([metricTensor, outputIndex]);\n };\n nameScope('metric', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n const outputMetrics = nestedMetrics[i];\n // TODO(cais): Add weights and outputWeightedMetrics.\n // TODO(cais): Add optional arg `weights` to the following function.\n const handleMetrics = (metrics) => {\n const metricNamePrefix = '';\n let metricName;\n let accFn;\n let weightedMetricFn;\n // TODO(cais): Use 'weights_' for weighted metrics.\n for (const metric of metrics) {\n if (typeof metric === 'string' &&\n ['accuracy', 'acc', 'crossentropy', 'ce'].indexOf(metric) !==\n -1) {\n const outputShape = this.internalOutputShapes[i];\n if (outputShape[outputShape.length - 1] === 1 ||\n this.lossFunctions[i] === losses.binaryCrossentropy) {\n // case: binary accuracy/crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryCrossentropy;\n }\n }\n else if (this.lossFunctions[i] ===\n losses.sparseCategoricalCrossentropy) {\n // case: categorical accuracy / crossentropy with sparse\n // targets.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalCrossentropy;\n }\n }\n else {\n // case: categorical accuracy / crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalCrossentropy;\n }\n }\n let suffix;\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n suffix = 'acc';\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n suffix = 'ce';\n }\n // TODO(cais): Add weighting actually.\n weightedMetricFn = accFn;\n metricName = metricNamePrefix + suffix;\n }\n else {\n const metricFn = Metrics.get(metric);\n // TODO(cais): Add weighting actually.\n weightedMetricFn = metricFn;\n metricName =\n metricNamePrefix + Metrics.getLossOrMetricName(metric);\n }\n // TODO(cais): Add weighting and masking to metricResult.\n let metricResult;\n nameScope(metricName, () => {\n metricResult = weightedMetricFn;\n });\n appendMetric(i, metricName, metricResult);\n }\n };\n handleMetrics(outputMetrics);\n // TODO(cais): Call handleMetrics with weights.\n }\n });\n // Porting Notes: Given the imperative backend of tfjs-core,\n // there is no need for constructing the symbolic graph and placeholders.\n this.collectedTrainableWeights = this.trainableWeights;\n }\n /**\n * Check trainable weights count consistency.\n *\n * This will raise a warning if `this.trainableWeights` and\n * `this.collectedTrainableWeights` are inconsistent (i.e., have different\n * numbers of parameters).\n * Inconsistency will typically arise when one modifies `model.trainable`\n * without calling `model.compile()` again.\n */\n checkTrainableWeightsConsistency() {\n if (this.collectedTrainableWeights == null) {\n return;\n }\n if (this.trainableWeights.length !==\n this.collectedTrainableWeights.length) {\n console.warn('Discrepancy between trainableweights and collected trainable ' +\n 'weights. Did you set `model.trainable` without calling ' +\n '`model.compile()` afterwards?');\n }\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(\n * tf.ones([8, 10]), tf.ones([8, 1]), {batchSize: 4});\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateArgs`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // TODO(cais): Standardize `config.sampleWeights` as well.\n // Validate user data.\n const checkBatchAxis = true;\n const standardizedOuts = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n try {\n // TODO(cais): If uses `useLearningPhase`, set the corresponding element\n // of the input to 0.\n const ins = standardizedOuts[0].concat(standardizedOuts[1]);\n this.makeTestFunction();\n const f = this.testFunction;\n const testOuts = this.testLoop(f, ins, batchSize, args.verbose, args.steps);\n return singletonOrArray(testOuts);\n }\n finally {\n disposeNewTensors(standardizedOuts[0], x);\n disposeNewTensors(standardizedOuts[1], y);\n }\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n this.makeTestFunction();\n return evaluateDataset(this, dataset, args);\n }\n /**\n * Get number of samples provided for training, evaluation or prediction.\n *\n * @param ins Input `tf.Tensor`.\n * @param batchSize Integer batch size, optional.\n * @param steps Total number of steps (batches of samples) before\n * declaring loop finished. Optional.\n * @param stepsName The public API's parameter name for `steps`.\n * @returns Number of samples provided.\n */\n checkNumSamples(ins, batchSize, steps, stepsName = 'steps') {\n let numSamples;\n if (steps != null) {\n numSamples = null;\n if (batchSize != null) {\n throw new ValueError(`If ${stepsName} is set, batchSize must be null or undefined.` +\n `Got batchSize = ${batchSize}`);\n }\n }\n else if (ins != null) {\n if (Array.isArray(ins)) {\n numSamples = ins[0].shape[0];\n }\n else {\n numSamples = ins.shape[0];\n }\n }\n else {\n throw new ValueError(`Either the input data should have a defined shape, or ` +\n `${stepsName} shoud be specified.`);\n }\n return numSamples;\n }\n /**\n * Execute internal tensors of the model with input data feed.\n * @param inputs Input data feed. Must match the inputs of the model.\n * @param outputs Names of the output tensors to be fetched. Must match\n * names of the SymbolicTensors that belong to the graph.\n * @returns Fetched values for `outputs`.\n */\n execute(inputs, outputs) {\n if (Array.isArray(outputs) && outputs.length === 0) {\n throw new ValueError('`outputs` is an empty Array, which is not allowed.');\n }\n const outputsIsArray = Array.isArray(outputs);\n const outputNames = (outputsIsArray ? outputs : [outputs]);\n const outputSymbolicTensors = this.retrieveSymbolicTensors(outputNames);\n // Format the input into a FeedDict.\n const feedDict = new FeedDict();\n if (inputs instanceof Tensor) {\n inputs = [inputs];\n }\n if (Array.isArray(inputs)) {\n if (inputs.length !== this.inputs.length) {\n throw new ValueError(`The number of inputs provided (${inputs.length}) ` +\n `does not match the number of inputs of this model ` +\n `(${this.inputs.length}).`);\n }\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n }\n else {\n for (const input of this.inputs) {\n const tensorValue = inputs[input.name];\n if (tensorValue == null) {\n throw new ValueError(`No value is provided for the model's input ${input.name}`);\n }\n feedDict.add(input, tensorValue);\n }\n }\n // Run execution.\n const executeOutputs = execute(outputSymbolicTensors, feedDict);\n return outputsIsArray ? executeOutputs : executeOutputs[0];\n }\n /**\n * Retrieve the model's internal symbolic tensors from symbolic-tensor names.\n */\n retrieveSymbolicTensors(symbolicTensorNames) {\n const outputSymbolicTensors = pyListRepeat(null, symbolicTensorNames.length);\n let outputsRemaining = symbolicTensorNames.length;\n for (const layer of this.layers) {\n const layerOutputs = Array.isArray(layer.output) ? layer.output : [layer.output];\n const layerOutputNames = layerOutputs.map(output => output.name);\n for (let i = 0; i < symbolicTensorNames.length; ++i) {\n const index = layerOutputNames.indexOf(symbolicTensorNames[i]);\n if (index !== -1) {\n outputSymbolicTensors[i] = layerOutputs[index];\n outputsRemaining--;\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining > 0) {\n const remainingNames = [];\n outputSymbolicTensors.forEach((tensor, i) => {\n if (tensor == null) {\n remainingNames.push(symbolicTensorNames[i]);\n }\n });\n throw new ValueError(`Cannot find SymbolicTensors for output name(s): ` +\n `${JSON.stringify(remainingNames)}`);\n }\n return outputSymbolicTensors;\n }\n /**\n * Helper method to loop over some data in batches.\n *\n * Porting Note: Not using the functional approach in the Python equivalent\n * due to the imperative backend.\n * Porting Note: Does not support step mode currently.\n *\n * @param ins: input data\n * @param batchSize: integer batch size.\n * @param verbose: verbosity model\n * @returns: Predictions as `tf.Tensor` (if a single output) or an `Array` of\n * `tf.Tensor` (if multipe outputs).\n */\n predictLoop(ins, batchSize = 32, verbose = false) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins);\n if (verbose) {\n throw new NotImplementedError('Verbose predictLoop() is not implemented yet.');\n }\n // Sample-based predictions.\n // Porting Note: Tensor currently does not support sliced assignments as\n // in numpy, e.g., x[1:3] = y. Therefore we use concatenation while\n // iterating over the batches.\n const batches = makeBatches(numSamples, batchSize);\n const outsBatches = this.outputs.map(output => []);\n // TODO(cais): Can the scope() be pushed down inside the for loop?\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchOuts = tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n // TODO(cais): Take care of the case of the last element is a flag for\n // training/test.\n const insBatch = sliceArrays(ins, batchStart, batchEnd);\n // Construct the feeds for execute();\n const feeds = [];\n if (Array.isArray(insBatch)) {\n for (let i = 0; i < insBatch.length; ++i) {\n feeds.push({ key: this.inputs[i], value: insBatch[i] });\n }\n }\n else {\n feeds.push({ key: this.inputs[0], value: insBatch });\n }\n const feedDict = new FeedDict(feeds);\n return execute(this.outputs, feedDict);\n });\n batchOuts.forEach((batchOut, i) => outsBatches[i].push(batchOut));\n }\n return singletonOrArray(outsBatches.map(batches => tfc.concat(batches, 0)));\n });\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFlow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([8, 10]), {batchSize: 4}).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param args A `ModelPredictArgs` object containing optional fields.\n *\n * @return Prediction results as a `tf.Tensor`(s).\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n const xsRank2OrHigher = ensureTensorsRank2OrHigher(x);\n checkInputData(xsRank2OrHigher, this.inputNames, this.feedInputShapes, false);\n try {\n // TODO(cais): Take care of stateful models.\n // if (this.stateful) ...\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n return this.predictLoop(xsRank2OrHigher, batchSize);\n }\n finally {\n disposeNewTensors(xsRank2OrHigher, x);\n }\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predictOnBatch(tf.ones([8, 10])).print();\n * ```\n * @param x: Input samples, as a Tensor (for models with exactly one\n * input) or an array of Tensors (for models with more than one input).\n * @return Tensor(s) of predictions\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predictOnBatch(x) {\n checkInputData(x, this.inputNames, this.feedInputShapes, true);\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = (Array.isArray(x) ? x[0] : x).shape[0];\n return this.predictLoop(x, batchSize);\n }\n standardizeUserDataXY(x, y, checkBatchAxis = true, batchSize) {\n // TODO(cais): Add sampleWeight, classWeight\n if (this.optimizer_ == null) {\n throw new RuntimeError('You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileArgs).');\n }\n const outputShapes = [];\n for (let i = 0; i < this.feedOutputShapes.length; ++i) {\n const outputShape = this.feedOutputShapes[i];\n const lossFn = this.feedLossFns[i];\n if (lossFn === losses.sparseCategoricalCrossentropy) {\n outputShapes.push(outputShape.slice(0, outputShape.length - 1).concat([1]));\n }\n else {\n // Porting Note: Because of strong typing `lossFn` must be a function.\n outputShapes.push(outputShape);\n }\n }\n x = standardizeInputData(x, this.feedInputNames, this.feedInputShapes, false, 'input');\n y = standardizeInputData(y, this.feedOutputNames, outputShapes, false, 'target');\n // TODO(cais): Standardize sampleWeights & classWeights.\n checkArrayLengths(x, y, null);\n // TODO(cais): Check sampleWeights as well.\n checkLossAndTargetCompatibility(y, this.feedLossFns, this.feedOutputShapes);\n if (this.stateful && batchSize != null && batchSize > 0) {\n if (x[0].shape[0] % batchSize !== 0) {\n throw new ValueError(`In a stateful network, you should only pass inputs with a ` +\n `number of samples that is divisible by the batch size ` +\n `${batchSize}. Found: ${x[0].shape[0]} sample(s).`);\n }\n }\n return [x, y];\n }\n async standardizeUserData(x, y, sampleWeight, classWeight, checkBatchAxis = true, batchSize) {\n const [standardXs, standardYs] = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n // TODO(cais): Handle sampleWeights.\n if (sampleWeight != null) {\n throw new Error('sample weight is not supported yet.');\n }\n let standardSampleWeights = null;\n if (classWeight != null) {\n const classWeights = standardizeClassWeights(classWeight, this.outputNames);\n standardSampleWeights = [];\n for (let i = 0; i < classWeights.length; ++i) {\n standardSampleWeights.push(await standardizeWeights(standardYs[i], null, classWeights[i]));\n }\n }\n // TODO(cais): Deal with the case of model.stateful == true.\n return [standardXs, standardYs, standardSampleWeights];\n }\n /**\n * Loop over some test data in batches.\n * @param f A Function returning a list of tensors.\n * @param ins Array of tensors to be fed to `f`.\n * @param batchSize Integer batch size or `null` / `undefined`.\n * @param verbose verbosity mode.\n * @param steps Total number of steps (batches of samples) before\n * declaring test finished. Ignored with the default value of `null` /\n * `undefined`.\n * @returns Array of Scalars.\n */\n testLoop(f, ins, batchSize, verbose = 0, steps) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins, batchSize, steps, 'steps');\n const outs = [];\n if (verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n // TODO(cais): Use `indicesForConversionToDense' to prevent slow down.\n if (steps != null) {\n throw new NotImplementedError('steps mode in testLoop() is not implemented yet');\n }\n else {\n const batches = makeBatches(numSamples, batchSize);\n const indexArray = tensor1d(range(0, numSamples));\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = K.sliceAlongFirstAxis(indexArray, batchStart, batchEnd - batchStart);\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const batchOuts = f(insBatch);\n if (batchIndex === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n outs[i] =\n tfc.add(outs[i], tfc.mul(batchEnd - batchStart, batchOut));\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n outs[i] = tfc.div(outs[i], numSamples);\n }\n }\n return outs;\n });\n }\n getDedupedMetricsNames() {\n const outLabels = this.metricsNames;\n // Rename duplicated metrics names (can happen with an output layer\n // shared among multiple dataflows).\n const dedupedOutLabels = [];\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n let newLabel = label;\n if (count(outLabels, label) > 1) {\n const dupIndex = count(outLabels.slice(0, i), label);\n newLabel += `_${dupIndex}`;\n }\n dedupedOutLabels.push(newLabel);\n }\n return dedupedOutLabels;\n }\n /**\n * Creates a function that performs the following actions:\n *\n * 1. computes the losses\n * 2. sums them to get the total loss\n * 3. call the optimizer computes the gradients of the LayersModel's\n * trainable weights w.r.t. the total loss and update the variables\n * 4. calculates the metrics\n * 5. returns the values of the losses and metrics.\n */\n makeTrainFunction() {\n return (data) => {\n const lossValues = [];\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const sampleWeights = data.slice(this.inputs.length + this.outputs.length, this.inputs.length + this.outputs.length * 2);\n const metricsValues = [];\n // Create a function that computes the total loss based on the\n // inputs. This function is used for obtaining gradients through\n // backprop.\n const totalLossFunction = () => {\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict, { 'training': true });\n // TODO(cais): Take care of the case of multiple outputs from a\n // single layer?\n let totalLoss;\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n let loss = lossFunction(targets[i], outputs[i]);\n if (sampleWeights[i] != null) {\n loss = computeWeightedLoss(loss, sampleWeights[i]);\n }\n // TODO(cais): push Scalar instead.\n const meanLoss = tfc.mean(loss);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n lossValues.push(meanLoss);\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n }\n // Compute the metrics.\n // TODO(cais): These should probably be calculated outside\n // totalLossFunction to benefit speed?\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n let weightedMetric;\n if (this.outputs.length > 1 && i < this.outputs.length) {\n weightedMetric = lossValues[i];\n }\n else {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n weightedMetric =\n tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n }\n tfc.keep(weightedMetric);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n metricsValues.push(weightedMetric);\n }\n totalLoss = tfc.mean(totalLoss);\n // Add regularizer penalties.\n this.calculateLosses().forEach(regularizerLoss => {\n totalLoss = tfc.add(totalLoss, regularizerLoss);\n });\n return totalLoss;\n };\n const variables = this.collectedTrainableWeights.map(param => param.read());\n const returnCost = true;\n const totalLossValue = this.optimizer_.minimize(totalLossFunction, returnCost, variables);\n return [totalLossValue].concat(metricsValues);\n };\n }\n /**\n * Create a function which, when invoked with an array of `tf.Tensor`s as a\n * batch of inputs, returns the prespecified loss and metrics of the model\n * under the batch of input data.\n */\n makeTestFunction() {\n this.testFunction = (data) => {\n return tfc.tidy(() => {\n const valOutputs = [];\n let totalLoss;\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict);\n // Compute total loss.\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n // TODO(cais): Add sample weighting and replace the simple\n // averaging.\n const loss = tfc.mean(lossFunction(targets[i], outputs[i]));\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n valOutputs.push(totalLoss);\n }\n // Compute the metrics.\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n // TODO(cais): Replace K.mean() with a proper weighting function.\n const meanMetric = tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n valOutputs.push(meanMetric);\n }\n return valOutputs;\n });\n };\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a\n * dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * for (let i = 1; i < 5 ; ++i) {\n * const h = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(\"Loss after Epoch \" + i + \" : \" + h.history.loss[0]);\n * }\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you\n * can also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named,\n * you can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input\n * data and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n return fitTensors(this, x, y, args);\n }\n // TODO(cais): Add code snippet below when it's possible to instantiate\n // actual dataset objects.\n /**\n * Trains the model using a dataset object.\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for training. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs.\n * Of the two items in the array, the first is the input feature(s) and\n * the second is the output target(s).\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fitDataset(dataset, args) {\n return fitDataset(this, dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n // TODO(cais): Support sampleWeight and classWeight.\n // TODO(cais): Support Dataset objects.\n const standardizeOut = await this.standardizeUserData(x, y);\n const inputs = standardizeOut[0];\n const targets = standardizeOut[1];\n const trainFunction = this.makeTrainFunction();\n const losses = trainFunction(inputs.concat(targets));\n const lossValues = [];\n for (const loss of losses) {\n const v = await loss.data();\n lossValues.push(v[0]);\n }\n tfc.dispose(losses);\n return singletonOrArray(lossValues);\n }\n /**\n * Extract weight values of the model.\n *\n * @param config: An instance of `io.SaveConfig`, which specifies\n * model-saving options such as whether only trainable weights are to be\n * saved.\n * @returns A `NamedTensorMap` mapping original weight names (i.e.,\n * non-uniqueified weight names) to their values.\n */\n getNamedWeights(config) {\n const namedWeights = [];\n const trainableOnly = config != null && config.trainableOnly;\n const weights = trainableOnly ? this.trainableWeights : this.weights;\n const weightValues = this.getWeights(trainableOnly);\n for (let i = 0; i < weights.length; ++i) {\n if (trainableOnly && !weights[i].trainable) {\n // Optionally skip non-trainable weights.\n continue;\n }\n namedWeights.push({ name: weights[i].originalName, tensor: weightValues[i] });\n }\n return namedWeights;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const input = tf.input({shape: [10]});\n * const output = tf.layers.dense({units: 1}).apply(input);\n * const model = tf.model({inputs: [input], outputs: [output]});\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10\n * values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n this.stopTraining_ = stop;\n }\n get stopTraining() {\n return this.stopTraining_;\n }\n get optimizer() {\n return this.optimizer_;\n }\n set optimizer(optimizer) {\n if (this.optimizer_ !== optimizer) {\n this.optimizer_ = optimizer;\n this.isOptimizerOwned = false;\n }\n }\n dispose() {\n const result = super.dispose();\n if (result.refCountAfterDispose === 0 && this.optimizer != null &&\n this.isOptimizerOwned) {\n const numTensorsBeforeOptmizerDisposal = tfc.memory().numTensors;\n this.optimizer_.dispose();\n result.numDisposedVariables +=\n numTensorsBeforeOptmizerDisposal - tfc.memory().numTensors;\n }\n return result;\n }\n getLossIdentifiers() {\n let lossNames;\n if (typeof this.loss === 'string') {\n lossNames = toSnakeCase(this.loss);\n }\n else if (Array.isArray(this.loss)) {\n for (const loss of this.loss) {\n if (typeof loss !== 'string') {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n lossNames = this.loss.map(name => toSnakeCase(name));\n }\n else {\n const outputNames = Object.keys(this.loss);\n lossNames = {};\n const losses = this.loss;\n for (const outputName of outputNames) {\n if (typeof losses[outputName] === 'string') {\n lossNames[outputName] =\n toSnakeCase(losses[outputName]);\n }\n else {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n }\n return lossNames;\n }\n getMetricIdentifiers() {\n if (typeof this.metrics === 'string' ||\n typeof this.metrics === 'function') {\n return [toSnakeCase(Metrics.getLossOrMetricName(this.metrics))];\n }\n else if (Array.isArray(this.metrics)) {\n return this.metrics.map(metric => toSnakeCase(Metrics.getLossOrMetricName(metric)));\n }\n else {\n const metricsIdentifiers = {};\n for (const key in this.metrics) {\n metricsIdentifiers[key] =\n toSnakeCase(Metrics.getLossOrMetricName(this.metrics[key]));\n }\n return metricsIdentifiers;\n }\n }\n getTrainingConfig() {\n return {\n loss: this.getLossIdentifiers(),\n metrics: this.getMetricIdentifiers(),\n optimizer_config: {\n class_name: this.optimizer.getClassName(),\n config: this.optimizer.getConfig()\n }\n };\n // TODO(cais): Add weight_metrics when they are supported.\n // TODO(cais): Add sample_weight_mode when it's supported.\n // TODO(cais): Add loss_weights when it's supported.\n }\n loadTrainingConfig(trainingConfig) {\n if (trainingConfig.weighted_metrics != null) {\n throw new Error('Loading weight_metrics is not supported yet.');\n }\n if (trainingConfig.loss_weights != null) {\n throw new Error('Loading loss_weights is not supported yet.');\n }\n if (trainingConfig.sample_weight_mode != null) {\n throw new Error('Loading sample_weight_mode is not supported yet.');\n }\n const tsConfig = convertPythonicToTs(trainingConfig.optimizer_config);\n const optimizer = deserialize(tsConfig);\n let loss;\n if (typeof trainingConfig.loss === 'string') {\n loss = toCamelCase(trainingConfig.loss);\n }\n else if (Array.isArray(trainingConfig.loss)) {\n loss = trainingConfig.loss.map(lossEntry => toCamelCase(lossEntry));\n }\n else if (trainingConfig.loss != null) {\n loss = {};\n for (const key in trainingConfig.loss) {\n loss[key] = toCamelCase(trainingConfig.loss[key]);\n }\n }\n let metrics;\n if (Array.isArray(trainingConfig.metrics)) {\n metrics = trainingConfig.metrics.map(metric => toCamelCase(metric));\n }\n else if (trainingConfig.metrics != null) {\n metrics = {};\n for (const key in trainingConfig.metrics) {\n metrics[key] = toCamelCase(trainingConfig.metrics[key]);\n }\n }\n this.compile({ loss, metrics, optimizer });\n }\n /**\n * Save the configuration and/or weights of the LayersModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights as two files\n * (`my-model-1.json` and `my-model-1.weights.bin`) downloaded from\n * browser.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('downloads://my-model-1');\n * ```\n *\n * Example 4. Send `model`'s topology and weights to an HTTP server.\n * See the documentation of `tf.io.http` for more details\n * including specifying request parameters and implementation of the\n * server.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('http://my-server/model/upload');\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new ValueError(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new ValueError('LayersModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n const weightDataAndSpecs = await io.encodeWeights(this.getNamedWeights(config));\n const returnString = false;\n const unusedArg = null;\n const modelConfig = this.toJSON(unusedArg, returnString);\n const modelArtifacts = {\n modelTopology: modelConfig,\n format: LAYERS_MODEL_FORMAT_NAME,\n generatedBy: `TensorFlow.js tfjs-layers v${version}`,\n convertedBy: null,\n };\n const includeOptimizer = config == null ? false : config.includeOptimizer;\n if (includeOptimizer && this.optimizer != null) {\n modelArtifacts.trainingConfig = this.getTrainingConfig();\n const weightType = 'optimizer';\n const { data: optimizerWeightData, specs: optimizerWeightSpecs } = await io.encodeWeights(await this.optimizer.getWeights(), weightType);\n weightDataAndSpecs.specs.push(...optimizerWeightSpecs);\n weightDataAndSpecs.data = io.concatenateArrayBuffers([weightDataAndSpecs.data, optimizerWeightData]);\n }\n if (this.userDefinedMetadata != null) {\n // Check serialized size of user-defined metadata.\n const checkSize = true;\n checkUserDefinedMetadata(this.userDefinedMetadata, this.name, checkSize);\n modelArtifacts.userDefinedMetadata = this.userDefinedMetadata;\n }\n modelArtifacts.weightData = weightDataAndSpecs.data;\n modelArtifacts.weightSpecs = weightDataAndSpecs.specs;\n return handlerOrURL.save(modelArtifacts);\n }\n /**\n * Set user-defined metadata.\n *\n * The set metadata will be serialized together with the topology\n * and weights of the model during `save()` calls.\n *\n * @param setUserDefinedMetadata\n */\n setUserDefinedMetadata(userDefinedMetadata) {\n checkUserDefinedMetadata(userDefinedMetadata, this.name);\n this.userDefinedMetadata = userDefinedMetadata;\n }\n /**\n * Get user-defined metadata.\n *\n * The metadata is supplied via one of the two routes:\n * 1. By calling `setUserDefinedMetadata()`.\n * 2. Loaded during model loading (if the model is constructed\n * via `tf.loadLayersModel()`.)\n *\n * If no user-defined metadata is available from either of the\n * two routes, this function will return `undefined`.\n */\n getUserDefinedMetadata() {\n return this.userDefinedMetadata;\n }\n}\n// The class name is 'Model' rather than 'LayersModel' for backwards\n// compatibility since this class name shows up in the serialization format.\n/** @nocollapse */\nLayersModel.className = 'Model';\nserialization.registerClass(LayersModel);\n/**\n * A `tf.Functional` is an alias to `tf.LayersModel`.\n *\n * See also:\n * `tf.LayersModel`, `tf.Sequential`, `tf.loadLayersModel`.\n */\n/** @doc {heading: 'Models', subheading: 'Classes'} */\nexport class Functional extends LayersModel {\n}\nFunctional.className = 'Functional';\nserialization.registerClass(Functional);\n//# sourceMappingURL=training.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source keras/models.py */\nimport { dispose, io, serialization, util } from '@tensorflow/tfjs-core';\nimport { getUid } from './backend/state';\nimport { Input } from './engine/input_layer';\nimport { getSourceInputs, Node } from './engine/topology';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError, RuntimeError, ValueError } from './errors';\nimport { deserialize } from './layers/serialization';\nimport * as generic_utils from './utils/generic_utils';\nimport { convertPythonicToTs } from './utils/serialization_utils';\nimport { getExactlyOneShape } from './utils/types_utils';\n/**\n * Parses a JSON model configuration file and returns a model instance.\n *\n * ```js\n * // This example shows how to serialize a model using `toJSON()` and\n * // deserialize it as another model using `tf.models.modelFromJSON()`.\n * // Note: this example serializes and deserializes only the topology\n * // of the model; the weights of the loaded model will be different\n * // from those of the the original model, due to random weight\n * // initialization.\n * // To load the topology and weights of a model, use `tf.loadLayersModel()`.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.repeatVector({inputShape: [2], n: 4}));\n * // Serialize `model1` as a JSON object.\n * const model1JSON = model1.toJSON(null, false);\n * model1.summary();\n *\n * const model2 = await tf.models.modelFromJSON(model1JSON);\n * model2.summary();\n * ```\n *\n * @param modelAndWeightsConfig JSON object or string encoding a model and\n * weights configuration. It can also be only the topology JSON of the\n * model, in which case the weights will not be loaded.\n * @param custom_objects Optional dictionary mapping names\n * (strings) to custom classes or functions to be\n * considered during deserialization.\n * @returns A TensorFlow.js Layers `tf.LayersModel` instance (uncompiled).\n */\nexport async function modelFromJSON(modelAndWeightsConfig, customObjects) {\n if (!('modelTopology' in modelAndWeightsConfig)) {\n modelAndWeightsConfig = { modelTopology: modelAndWeightsConfig };\n }\n modelAndWeightsConfig = modelAndWeightsConfig;\n let modelTopology = modelAndWeightsConfig.modelTopology;\n if (modelTopology['model_config'] != null) {\n // If the model-topology JSON contains a 'model_config' field, then it is\n // a full model JSON (e.g., from `keras.Model.save()`), which contains\n // not only the model's architecture in its 'model_config' field, but\n // additional information such as the model's optimizer. We use only the\n // 'model_config' field currently.\n modelTopology = modelTopology['model_config'];\n }\n const tsConfig = convertPythonicToTs(modelTopology);\n const model = deserialize(tsConfig, customObjects);\n if (modelAndWeightsConfig.weightsManifest != null) {\n // Load the weight values keyed by the original tensor names in the model\n // file that was loaded. These should match the keys of the weight\n // manifest.\n const weightValues = await io.loadWeights(modelAndWeightsConfig.weightsManifest, modelAndWeightsConfig.pathPrefix, model.weights.map(weight => weight.originalName));\n // Map the weights to the unique tensor names generated during model loading\n const uniqueWeightValues = {};\n for (const weight of model.weights) {\n uniqueWeightValues[weight.originalName] =\n weightValues[weight.originalName];\n }\n model.loadWeights(uniqueWeightValues);\n // Dispose temporary weight values.\n dispose(weightValues);\n }\n return model;\n}\n/**\n * Load a model, including its topology and optionally weights. See the\n * Tutorial named \"How to import a Keras Model\" for usage examples.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * Example 4. Load a model from an HTTP server.\n *\n * ```js\n * const model = await\n * tf.loadLayersModel('https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. This path will be\n * interpreted as a relative HTTP path, to which `fetch` will be used to\n * request the model topology and weight manifest JSON.\n * The content of the JSON file is assumed to be a JSON object with the\n * following fields and values:\n * - 'modelTopology': A JSON object that can be either of:\n * 1. a model architecture JSON consistent with the format of the return\n * value of `keras.Model.to_json()`\n * 2. a full model JSON in the format of `keras.models.save_model()`.\n * - 'weightsManifest': A TensorFlow.js weights manifest.\n * See the Python converter function `save_model()` for more details.\n * It is also assumed that model weights can be accessed from relative\n * paths described by the `paths` fields in weights manifest.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A progress callback of the form:\n * `(fraction: number) => void`. This callback can be used to monitor the\n * model-loading process.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n */\nexport async function loadLayersModelInternal(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n if (typeof pathOrIOHandler === 'string') {\n const handlers = io.getLoadHandlers(pathOrIOHandler, options);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n // TODO(cais): Reformat the args into a single `LoadOptions` once the core\n // is refactored.\n handlers.push(io.browserHTTPRequest(pathOrIOHandler, options));\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${pathOrIOHandler}'`);\n }\n pathOrIOHandler = handlers[0];\n }\n return loadLayersModelFromIOHandler(pathOrIOHandler, undefined, options);\n}\n/**\n * Load a model and optionally its weights, using an IOHandler object.\n *\n * @param handler The instance of `IOHandler` to be used during the model\n * loading.\n * @param customObjects Any optional custom objects to be used during model\n * loading.\n * @param strict Whether the weight loading will be done in strict mode.\n * Default: `true`.\n */\nexport async function loadLayersModelFromIOHandler(handler, customObjects, options) {\n if (options == null) {\n options = {};\n }\n if (handler.load == null) {\n throw new ValueError('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await handler.load();\n let modelTopology = artifacts.modelTopology;\n if (modelTopology['model_config'] != null) {\n modelTopology = modelTopology['model_config'];\n }\n const strict = options.strict == null ? true : options.strict;\n // If weights are provided and the weight-loading mode is strict, use\n // fast weight initialization. This skips costly initializers such as\n // 'orthogonal' and saves unnecessary computation in cases where\n // the initialized weight values will immediately be overwritten by\n // loaded weight values.\n const fastWeightInit = artifacts.weightData != null && artifacts.weightSpecs != null && strict;\n const model = deserialize(convertPythonicToTs(modelTopology), customObjects, fastWeightInit);\n const trainingConfig = artifacts.trainingConfig;\n if (trainingConfig != null) {\n model.loadTrainingConfig(trainingConfig);\n }\n if (artifacts.userDefinedMetadata != null) {\n model.setUserDefinedMetadata(artifacts.userDefinedMetadata);\n }\n // If weightData is present, load the weights into the model.\n if (artifacts.weightData != null) {\n // Loading weights requires weightSpecs.\n if (artifacts.weightSpecs == null) {\n throw new ValueError('LayersModel artifacts contains weight data, but not weight specs. ' +\n 'Therefore loading of weights cannot proceed.');\n }\n const { modelWeights, optimizerWeights } = decodeModelAndOptimizerWeights(artifacts.weightData, artifacts.weightSpecs);\n model.loadWeights(modelWeights, strict);\n if (model.optimizer != null && optimizerWeights.length > 0) {\n await model.optimizer.setWeights(optimizerWeights);\n }\n // Dispose temporary weight values.\n dispose(modelWeights);\n dispose(optimizerWeights.map(w => w.tensor));\n }\n return model;\n}\nfunction decodeModelAndOptimizerWeights(buffer, specs) {\n const name2Tensor = io.decodeWeights(buffer, specs);\n const modelWeights = {};\n const optimizerWeights = [];\n specs.forEach(spec => {\n if (spec.group === 'optimizer') {\n optimizerWeights.push({ name: spec.name, tensor: name2Tensor[spec.name] });\n }\n else {\n modelWeights[spec.name] = name2Tensor[spec.name];\n }\n });\n return { modelWeights, optimizerWeights };\n}\n/**\n * A model with a stack of layers, feeding linearly from one to the next.\n *\n * `tf.sequential` is a factory function that creates an instance of\n * `tf.Sequential`.\n *\n * ```js\n * // Define a model for linear regression.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [1]}));\n *\n * // Prepare the model for training: Specify the loss and the optimizer.\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n *\n * // Generate some synthetic data for training.\n * const xs = tf.tensor2d([1, 2, 3, 4], [4, 1]);\n * const ys = tf.tensor2d([1, 3, 5, 7], [4, 1]);\n *\n * // Train the model using the data then do inference on a data point the\n * // model hasn't seen:\n * await model.fit(xs, ys);\n * model.predict(tf.tensor2d([5], [1, 1])).print();\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class Sequential extends LayersModel {\n constructor(args) {\n super({ inputs: [], outputs: [] });\n args = args || {};\n this.trainable = true;\n this.built = false;\n // Set model name.\n this.name = (args.name != null) ? args.name : getUid('sequential_');\n // Add to the model any layers passed to the constructor.\n if (args.layers != null) {\n for (const layer of args.layers) {\n this.add(layer);\n }\n }\n }\n // Helper function to Sequential.add Throws if the new output shape will be\n // invalid.\n checkShape(layer) {\n const shape = layer.inboundNodes[0].outputTensors[0].shape;\n if (shape.some(x => x < 0)) {\n throw new ValueError('Negative dimension size caused by adding layer ' +\n `${layer.name} with input shape [` +\n `${layer.inboundNodes[0].inputTensors[0].shape}]`);\n }\n }\n /**\n * Adds a layer instance on top of the layer stack.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 8, inputShape: [1]}));\n * model.add(tf.layers.dense({units: 4, activation: 'relu6'}));\n * model.add(tf.layers.dense({units: 1, activation: 'relu6'}));\n * // Note that the untrained model is random at this point.\n * model.predict(tf.randomNormal([10, 1])).print();\n * ```\n * @param layer Layer instance.\n *\n * @exception ValueError In case the `layer` argument does not know its\n * input shape.\n * @exception ValueError In case the `layer` argument has multiple output\n * tensors, or is already connected somewhere else (forbidden in\n * `Sequential` models).\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n add(layer) {\n const isLayerModelInstance = layer instanceof Sequential || layer instanceof LayersModel;\n let modelLayer;\n if (isLayerModelInstance) {\n modelLayer = layer;\n if (modelLayer.outputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n if (modelLayer.inputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single input tensor. ' +\n 'For multi-input layers, ' +\n 'use the functional API.');\n }\n }\n if (this.outputs.length === 0) {\n // first layer in model: check that it is an input layer\n if (layer.inboundNodes.length === 0) {\n // create an input layer\n if (layer.batchInputShape == null) {\n throw new ValueError('The first layer in a Sequential model must ' +\n 'get an `inputShape` or `batchInputShape` argument.');\n }\n // Instantiate the input layer.\n const x = Input({\n batchShape: layer.batchInputShape,\n dtype: layer.dtype,\n name: layer.name + '_input'\n });\n // This will build the current layer and create the node connecting\n // the current layer to the input layer we just created.\n layer.apply(x);\n }\n if (isLayerModelInstance) {\n this.outputs = modelLayer.outputs;\n this.inputs = modelLayer.inputs;\n }\n else {\n if (layer.inboundNodes.length !== 1) {\n throw new ValueError('A layer added to a Sequential model must not already be ' +\n `connected somewhere else. LayersModel received layer ${layer.name} ` +\n `which has ${layer.inboundNodes.length} pre-existing inbound ` +\n 'connections.');\n }\n if (layer.inboundNodes[0].outputTensors.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [layer.inboundNodes[0].outputTensors[0]];\n this.inputs = getSourceInputs(this.outputs[0]);\n }\n this.inboundNodes = [];\n // We create an input node, which we will keep updated\n // as we add more layers.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n // no model-level masking for now\n inputMasks: generic_utils.pyListRepeat(null, this.inputs.length),\n outputMasks: [null],\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs[0].shape\n });\n }\n else {\n const outputTensor = layer.apply(this.outputs[0]);\n if (Array.isArray(outputTensor)) {\n throw new TypeError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [outputTensor];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n this.layers.push(layer);\n this.built = false;\n }\n /**\n * Removes the last layer in the model.\n *\n * @exception TypeError if there are no layers in the model.\n */\n pop() {\n if (this.layers.length === 0) {\n throw new TypeError('There are no layers in the model.');\n }\n this.layers.pop();\n if (this.layers.length === 0) {\n this.outputs = [];\n this.inboundNodes = [];\n this.outboundNodes = [];\n }\n else {\n const lastLayerIndex = this.layers.length - 1;\n this.layers[lastLayerIndex].outboundNodes = [];\n this.outputs = [this.layers[lastLayerIndex].output];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n }\n call(inputs, kwargs) {\n if (this.model == null) {\n this.build();\n }\n return this.model.call(inputs, kwargs);\n }\n build(inputShape) {\n // Call `getExactlyOneShape` without using its return value,\n // to verify that exactly one input shape is provided.\n getExactlyOneShape(inputShape);\n if (this.inputs.length === 0 || this.outputs.length === 0) {\n throw new TypeError('Sequential model cannot be built: model is empty.' +\n ' Add some layers first.');\n }\n // actually create the model\n this.model = new LayersModel({\n inputs: this.inputs,\n outputs: this.outputs[0],\n name: this.name + '_model'\n });\n this.model.trainable = this.trainable;\n // mirror model attributes\n this.supportsMasking = this.model.supportsMasking;\n // TODO(michaelterry): Add caches\n this.inputLayers = this.model.inputLayers;\n this.inputLayersNodeIndices = this.model.inputLayersNodeIndices;\n this.inputLayersTensorIndices = this.model.inputLayersTensorIndices;\n this.outputLayers = this.model.outputLayers;\n this.outputLayersNodeIndices = this.model.outputLayersNodeIndices;\n this.outputLayersTensorIndices = this.model.outputLayersTensorIndices;\n this.nodesByDepth = this.model.nodesByDepth;\n this.containerNodes = this.model.containerNodes;\n this.outputNames = this.model.outputNames;\n this.inputNames = this.model.inputNames;\n // TODO(michaelterry): Add feedInputNames, feedInputs, if needed.\n // TODO(michaelterry): Add callbackModel if needed.\n this.built = true;\n }\n countParams() {\n if (!this.built) {\n this.build();\n }\n return super.countParams();\n }\n /**\n * Print a text summary of the Sequential model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - The total number of trainable and non-trainable parameters of the\n * model.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 100, inputShape: [10], activation: 'relu'}));\n * model.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n *\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n this.build();\n }\n super.summary(lineLength, positions, printFn);\n }\n /**\n * Sets the weights of the model.\n *\n * @param weights Should be a list of Tensors with shapes and types matching\n * the output of `model.getWeights()`.\n */\n setWeights(weights) {\n if (this.model == null) {\n this.build();\n }\n this.model.setWeights(weights);\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * });\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateConfig`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluate(x, y, args);\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluateDataset(dataset, args);\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([2, 10])).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param conifg A `ModelPredictConfig` object containing optional fields.\n *\n * @return `tf.Tensor`(s) of predictions.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predict(x, args);\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * @param x: Input samples, as a Tensor, or list of Tensors (if the model\n * has multiple inputs).\n * @return Tensor(s) of predictions\n */\n predictOnBatch(x) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predictOnBatch(x);\n }\n /**\n * See `LayersModel.compile`.\n *\n * @param args\n */\n compile(args) {\n this.build();\n this.model.compile(args);\n this.optimizer_ = this.model.optimizer;\n // tslint:disable-next-line:no-any\n this.isOptimizerOwned = this.model.isOptimizerOwned;\n this.loss = this.model.loss;\n this.metrics = this.model.metrics;\n // TODO(cais): Add this.lossWeights, this.sampleWeightMode,\n // this.weightedMetrics, this.targets.\n this.metricsTensors = this.model.metricsTensors;\n this.metricsNames = this.model.metricsNames;\n // TODO(cais): Add sampleWeights.\n }\n get optimizer() {\n return this.model == null ? undefined : this.model.optimizer;\n }\n set optimizer(optimizer) {\n this.model.optimizer = optimizer;\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(history.history.loss[0]);\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you can\n * also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named, you\n * can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fit(x, y, args);\n }\n /**\n * Trains the model using a dataset object.\n *\n * ```js\n * const xArray = [\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * ];\n * const yArray = [1, 1, 1, 1];\n * // Create a dataset from the JavaScript array.\n * const xDataset = tf.data.array(xArray);\n * const yDataset = tf.data.array(yArray);\n * // Zip combines the `x` and `y` Datasets into a single Dataset, the\n * // iterator of which will return an object containing of two tensors,\n * // corresponding to `x` and `y`. The call to `batch(4)` will bundle\n * // four such samples into a single object, with the same keys now pointing\n * // to tensors that hold 4 examples, organized along the batch dimension.\n * // The call to `shuffle(4)` causes each iteration through the dataset to\n * // happen in a different order. The size of the shuffle window is 4.\n * const xyDataset = tf.data.zip({xs: xDataset, ys: yDataset})\n * .batch(4)\n * .shuffle(4);\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [9]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fitDataset(xyDataset, {\n * epochs: 4,\n * callbacks: {onEpochEnd: (epoch, logs) => console.log(logs.loss)}\n * });\n * ```\n *\n * @param dataset A dataset object. Its `iterator()` method is expected to\n * generate a dataset iterator object, the `next()` method of which is\n * expected to produce data batches for evaluation. The return value of the\n * `next()` call ought to contain a boolean `done` field and a `value`\n * field.\n *\n * The `value` field is expected to be an object of with fields\n * `xs` and `ys`, which point to the feature tensor and the target tensor,\n * respectively. This case is for models with exactly one input and one\n * output (e.g.. a sequential model). For example:\n * ```js\n * {value: {xs: xsTensor, ys: ysTensor}, done: false}\n * ```\n *\n * If the model has multiple inputs, the `xs` field of `value` should\n * be an object mapping input names to their respective feature tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: {\n * input_1: xsTensor1,\n * input_2: xsTensor2\n * },\n * ys: ysTensor\n * },\n * done: false\n * }\n * ```\n * If the model has multiple outputs, the `ys` field of `value` should\n * be an object mapping output names to their respective target tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: xsTensor,\n * ys: {\n * output_1: ysTensor1,\n * output_2: ysTensor2\n * },\n * },\n * done: false\n * }\n * ```\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async fitDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fitDataset(dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n return this.model.trainOnBatch(x, y);\n }\n /* See parent class for JsDoc */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n let configArray;\n let extraModelConfig = {};\n if (config instanceof Array) {\n if (!(config[0].className != null) ||\n config[0]['className'] === 'Merge') {\n throw new ValueError('Legacy serialization format not supported yet.');\n }\n configArray = config;\n }\n else {\n util.assert(config['layers'] != null, () => `When the config data for a Sequential model is not an Array, ` +\n `it must be an Object that contains the 'layers' field.`);\n configArray = config['layers'];\n delete config['layers'];\n extraModelConfig = config;\n }\n const model = new cls(extraModelConfig);\n if (!(model instanceof Sequential)) {\n throw new NotImplementedError(`Sequential.fromConfig called on non-Sequential input: ${model}`);\n }\n for (const conf of configArray) {\n const customObjects = undefined;\n const layer = deserialize(conf, customObjects, fastWeightInit);\n if (fastWeightInit) {\n layer.setFastWeightInitDuringBuild(true);\n }\n model.add(layer);\n }\n return model;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [10]}));\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10 values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n // TODO(cais): When refactoring to remove the composition pattern happens,\n // remove this method overriding.\n if (this.model == null) {\n throw new ValueError('Cannot set the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n this.model.stopTraining = stop;\n }\n get stopTraining() {\n if (this.model == null) {\n throw new ValueError('Cannot get the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n return this.model.stopTraining;\n }\n // TODO(cais): Override get trainableWeights() here\n // tslint:disable-next-line:no-any\n getConfig() {\n // NOTE(cais): We override the return type of getConfig() to `any` here,\n // because the `Sequential` class is a special case among `Container`\n // subtypes in that its getConfig() method returns an Array (not a\n // dict).\n const layers = [];\n for (const layer of this.layers) {\n const dict = {};\n dict['className'] = layer.getClassName();\n dict['config'] = layer.getConfig();\n layers.push(dict);\n }\n return { name: this.name, layers };\n }\n}\n/** @nocollapse */\nSequential.className = 'Sequential';\nserialization.registerClass(Sequential);\n//# sourceMappingURL=models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { CallbackConstructorRegistry } from './base_callbacks';\nimport { Input, } from './engine/input_layer';\nimport { LayersModel } from './engine/training';\nimport { loadLayersModelInternal, Sequential } from './models';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// LayersModel and related factory methods.\n/**\n * A model is a data structure that consists of `Layers` and defines inputs\n * and outputs.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.model` is more generic, supporting an arbitrary graph (without\n * cycles) of layers. `tf.sequential` is less generic and supports only a linear\n * stack of layers.\n *\n * When creating a `tf.LayersModel`, specify its input(s) and output(s). Layers\n * are used to wire input(s) to output(s).\n *\n * For example, the following code snippet defines a model consisting of\n * two `dense` layers, with 10 and 4 units, respectively.\n *\n * ```js\n * // Define input, which has a size of 5 (not including batch dimension).\n * const input = tf.input({shape: [5]});\n *\n * // First dense layer uses relu activation.\n * const denseLayer1 = tf.layers.dense({units: 10, activation: 'relu'});\n * // Second dense layer uses softmax activation.\n * const denseLayer2 = tf.layers.dense({units: 4, activation: 'softmax'});\n *\n * // Obtain the output symbolic tensor by applying the layers on the input.\n * const output = denseLayer2.apply(denseLayer1.apply(input));\n *\n * // Create the model based on the inputs.\n * const model = tf.model({inputs: input, outputs: output});\n *\n * // The model can be used for training, evaluation and prediction.\n * // For example, the following line runs prediction with the model on\n * // some fake data.\n * model.predict(tf.ones([2, 5])).print();\n * ```\n * See also:\n * `tf.sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function model(args) {\n return new LayersModel(args);\n}\n/**\n * Creates a `tf.Sequential` model. A sequential model is any model where the\n * outputs of one layer are the inputs to the next layer, i.e. the model\n * topology is a simple 'stack' of layers, with no branching or skipping.\n *\n * This means that the first layer passed to a `tf.Sequential` model should have\n * a defined input shape. What that means is that it should have received an\n * `inputShape` or `batchInputShape` argument, or for some type of layers\n * (recurrent, Dense...) an `inputDim` argument.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.sequential` is less generic, supporting only a linear stack of layers.\n * `tf.model` is more generic and supports an arbitrary graph (without\n * cycles) of layers.\n *\n * Examples:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have an input shape defined.\n * model.add(tf.layers.dense({units: 32, inputShape: [50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output, which equals\n * // `[null, 4]`. The 1st dimension is the undetermined batch dimension; the\n * // 2nd is the output size of the model's last layer.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * It is also possible to specify a batch size (with potentially undetermined\n * batch dimension, denoted by \"null\") for the first layer using the\n * `batchInputShape` key. The following example is equivalent to the above:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have a defined input shape\n * model.add(tf.layers.dense({units: 32, batchInputShape: [null, 50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * You can also use an `Array` of already-constructed `Layer`s to create\n * a `tf.Sequential` model:\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 32, inputShape: [50]}),\n * tf.layers.dense({units: 4})]\n * });\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function sequential(config) {\n return new Sequential(config);\n}\n/**\n * Load a model composed of Layer objects, including its topology and optionally\n * weights. See the Tutorial named \"How to import a Keras Model\" for usage\n * examples.\n *\n * This method is applicable to:\n *\n * 1. Models created with the `tf.layers.*`, `tf.sequential`, and\n * `tf.model` APIs of TensorFlow.js and later saved with the\n * `tf.LayersModel.save` method.\n * 2. Models converted from Keras or TensorFlow tf.keras using the\n * [tensorflowjs_converter](https://github.com/tensorflow/tfjs/tree/master/tfjs-converter).\n *\n * This mode is *not* applicable to TensorFlow `SavedModel`s or their converted\n * forms. For those models, use `tf.loadGraphModel`.\n *\n * Example 1. Load a model from an HTTP server.\n *\n * ```js\n * const model = await tf.loadLayersModel(\n * 'https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * Example 2: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 4. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. For file://\n * (tfjs-node-only), http:// and https:// schemas, the path can be\n * either absolute or relative.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A function of the signature `(fraction: number) => void',\n * that can be used as the progress callback for the model loading.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport function loadLayersModel(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n return loadLayersModelInternal(pathOrIOHandler, options);\n}\n/**\n * Used to instantiate an input to a model as a `tf.SymbolicTensor`.\n *\n * Users should call the `input` factory function for\n * consistency with other generator functions.\n *\n * Example:\n *\n * ```js\n * // Defines a simple logistic regression model with 32 dimensional input\n * // and 3 dimensional output.\n * const x = tf.input({shape: [32]});\n * const y = tf.layers.dense({units: 3, activation: 'softmax'}).apply(x);\n * const model = tf.model({inputs: x, outputs: y});\n * model.predict(tf.ones([2, 32])).print();\n * ```\n *\n * Note: `input` is only necessary when using `model`. When using\n * `sequential`, specify `inputShape` for the first layer or use `inputLayer`\n * as the first layer.\n *\n * @doc {heading: 'Models', subheading: 'Inputs'}\n */\nexport function input(config) {\n return Input(config);\n}\nexport function registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n CallbackConstructorRegistry.registerCallbackConstructor(verbosityLevel, callbackConstructor);\n}\n//# sourceMappingURL=exports.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// Layer activation functions\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject } from './utils/generic_utils';\n/**\n * Base class for Activations.\n *\n * Special note: due to cross-language compatibility reasons, the\n * static readonly className field in this family of classes must be set to\n * the initialLowerCamelCase name of the activation.\n */\nexport class Activation extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\n/**\n * Exponential linear unit (ELU).\n * Reference: https://arxiv.org/abs/1511.07289\n */\nexport class Elu extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x: Input.\n * @param alpha: Scaling factor the negative section.\n * @return Output of the ELU activation.\n */\n apply(x, alpha = 1) {\n return K.elu(x, alpha);\n }\n}\n/** @nocollapse */\nElu.className = 'elu';\nserialization.registerClass(Elu);\n/**\n * Scaled Exponential Linear Unit. (Klambauer et al., 2017).\n * Reference: Self-Normalizing Neural Networks, https://arxiv.org/abs/1706.02515\n * Notes:\n * - To be used together with the initialization \"lecunNormal\".\n * - To be used together with the dropout variant \"AlphaDropout\".\n */\nexport class Selu extends Activation {\n apply(x) {\n return tfc.selu(x);\n }\n}\n/** @nocollapse */\nSelu.className = 'selu';\nserialization.registerClass(Selu);\n/**\n * Rectified linear unit\n */\nexport class Relu extends Activation {\n apply(x) {\n return tfc.relu(x);\n }\n}\n/** @nocollapse */\nRelu.className = 'relu';\nserialization.registerClass(Relu);\n/**\n * Rectified linear unit activation maxing out at 6.0.\n */\nexport class Relu6 extends Activation {\n apply(x) {\n return tidy(() => tfc.minimum(6.0, tfc.relu(x)));\n }\n}\n/** @nocollapse */\nRelu6.className = 'relu6';\nserialization.registerClass(Relu6);\n//* Linear activation (no-op) */\nexport class Linear extends Activation {\n apply(x) {\n return x;\n }\n}\n/** @nocollapse */\nLinear.className = 'linear';\nserialization.registerClass(Linear);\n/**\n * Sigmoid activation function.\n */\nexport class Sigmoid extends Activation {\n apply(x) {\n return tfc.sigmoid(x);\n }\n}\n/** @nocollapse */\nSigmoid.className = 'sigmoid';\nserialization.registerClass(Sigmoid);\n/**\n * Segment-wise linear approximation of sigmoid.\n */\nexport class HardSigmoid extends Activation {\n apply(x) {\n return K.hardSigmoid(x);\n }\n}\n/** @nocollapse */\nHardSigmoid.className = 'hardSigmoid';\nserialization.registerClass(HardSigmoid);\n/**\n * Softplus activation function.\n */\nexport class Softplus extends Activation {\n apply(x) {\n return tfc.softplus(x);\n }\n}\n/** @nocollapse */\nSoftplus.className = 'softplus';\nserialization.registerClass(Softplus);\n/**\n * Softsign activation function.\n */\nexport class Softsign extends Activation {\n apply(x) {\n return K.softsign(x);\n }\n}\n/** @nocollapse */\nSoftsign.className = 'softsign';\nserialization.registerClass(Softsign);\n/**\n * Hyperbolic tangent function.\n */\nexport class Tanh extends Activation {\n apply(x) {\n return tfc.tanh(x);\n }\n}\n/** @nocollapse */\nTanh.className = 'tanh';\nserialization.registerClass(Tanh);\n/**\n * Softmax activation function\n */\nexport class Softmax extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.softmax(x, axis);\n }\n}\n/** @nocollapse */\nSoftmax.className = 'softmax';\nserialization.registerClass(Softmax);\n/**\n * Log softmax activation function\n */\nexport class LogSoftmax extends Activation {\n /**\n * Calculate the activation function of log softmax:\n * log( exp(x_i) / sum(exp(x)) )\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.logSoftmax(x, axis);\n }\n}\n/** @nocollapse */\nLogSoftmax.className = 'logSoftmax';\nserialization.registerClass(LogSoftmax);\n/**\n * Swish activation function\n */\nexport class Swish extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param alpha Scaling factor for the sigmoid function.\n * @returns a Tensor of the same shape as x\n */\n apply(x, alpha = 1) {\n return tidy(() => tfc.sigmoid(x.mul(alpha)).mul(x));\n }\n}\n/** @nocollapse */\nSwish.className = 'swish';\nserialization.registerClass(Swish);\nexport function serializeActivation(activation) {\n return activation.getClassName();\n}\nexport function deserializeActivation(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'activation');\n}\nexport function getActivation(identifier) {\n if (identifier == null) {\n const config = {};\n config['className'] = 'linear';\n config['config'] = {};\n return deserializeActivation(config);\n }\n if (typeof identifier === 'string') {\n const config = {};\n config['className'] = identifier;\n config['config'] = {};\n return deserializeActivation(config);\n }\n else if (identifier instanceof Activation) {\n return identifier;\n }\n else {\n return deserializeActivation(identifier);\n }\n}\n//# sourceMappingURL=activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* original source: keras/regularizers.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { abs, add, serialization, sum, tidy, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nfunction assertObjectArgs(args) {\n if (args != null && typeof args !== 'object') {\n throw new Error(`Argument to L1L2 regularizer's constructor is expected to be an ` +\n `object, but received: ${args}`);\n }\n}\n/**\n * Regularizer base class.\n */\nexport class Regularizer extends serialization.Serializable {\n}\nexport class L1L2 extends Regularizer {\n constructor(args) {\n super();\n assertObjectArgs(args);\n this.l1 = args == null || args.l1 == null ? 0.01 : args.l1;\n this.l2 = args == null || args.l2 == null ? 0.01 : args.l2;\n this.hasL1 = this.l1 !== 0;\n this.hasL2 = this.l2 !== 0;\n }\n /**\n * Porting note: Renamed from __call__.\n * @param x Variable of which to calculate the regularization score.\n */\n apply(x) {\n return tidy(() => {\n let regularization = zeros([1]);\n if (this.hasL1) {\n regularization = add(regularization, sum(tfc.mul(this.l1, abs(x))));\n }\n if (this.hasL2) {\n regularization =\n add(regularization, sum(tfc.mul(this.l2, K.square(x))));\n }\n return regularization.asScalar();\n });\n }\n getConfig() {\n return { 'l1': this.l1, 'l2': this.l2 };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls({ l1: config['l1'], l2: config['l2'] });\n }\n}\n/** @nocollapse */\nL1L2.className = 'L1L2';\nserialization.registerClass(L1L2);\nexport function l1(args) {\n assertObjectArgs(args);\n return new L1L2({ l1: args != null ? args.l1 : null, l2: 0 });\n}\nexport function l2(args) {\n assertObjectArgs(args);\n return new L1L2({ l2: args != null ? args.l2 : null, l1: 0 });\n}\n// Maps the JavaScript-like identifier keys to the corresponding keras symbols.\nexport const REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'l1l2': 'L1L2'\n};\nexport function serializeRegularizer(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeRegularizer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'regularizer');\n}\nexport function getRegularizer(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeRegularizer(config);\n }\n else if (identifier instanceof Regularizer) {\n return identifier;\n }\n else {\n return deserializeRegularizer(identifier);\n }\n}\n//# sourceMappingURL=regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Advanced activation layers.\n */\nimport { clipByValue, elu, leakyRelu, prelu, relu, serialization } from '@tensorflow/tfjs-core';\nimport { Softmax as softmaxActivation } from '../activations';\nimport { cast } from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class ReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maxValue = args.maxValue;\n }\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n let output = relu(inputs);\n if (this.maxValue != null) {\n output = clipByValue(output, 0, this.maxValue);\n }\n return output;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { maxValue: this.maxValue };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReLU.className = 'ReLU';\nserialization.registerClass(ReLU);\nexport class LeakyReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 0.3;\n if (args == null) {\n args = {};\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return leakyRelu(x, this.alpha);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLeakyReLU.className = 'LeakyReLU';\nserialization.registerClass(LeakyReLU);\nexport class PReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA_INITIALIZER = 'zeros';\n if (args == null) {\n args = {};\n }\n this.supportsMasking = true;\n this.alphaInitializer =\n getInitializer(args.alphaInitializer || this.DEFAULT_ALPHA_INITIALIZER);\n this.alphaRegularizer = getRegularizer(args.alphaRegularizer);\n this.alphaConstraint = getConstraint(args.alphaConstraint);\n if (args.sharedAxes == null) {\n this.sharedAxes = null;\n }\n else if (Array.isArray(args.sharedAxes)) {\n this.sharedAxes = args.sharedAxes;\n }\n else if (typeof args.sharedAxes === 'number') {\n this.sharedAxes = [args.sharedAxes];\n }\n else {\n throw new ValueError(`Expected sharedAxes to be a number or an array of numbers, ` +\n `but got ${args.sharedAxes}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const paramShape = inputShape.slice(1);\n if (this.sharedAxes != null) {\n for (const i of this.sharedAxes) {\n paramShape[i - 1] = 1;\n }\n }\n this.alpha = this.addWeight('alpha', paramShape, 'float32', this.alphaInitializer, this.alphaRegularizer, true, this.alphaConstraint);\n // Set input spec.\n const axes = {};\n if (this.sharedAxes != null) {\n for (let i = 1; i < inputShape.length; ++i) {\n axes[i] = inputShape[i];\n }\n }\n this.inputSpec = [new InputSpec({\n ndim: inputShape.length,\n axes,\n })];\n this.built = true;\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n return prelu(inputs, this.alpha.read());\n }\n getConfig() {\n const config = {\n alphaInitializer: serializeInitializer(this.alphaInitializer),\n alphaRegularizer: serializeRegularizer(this.alphaRegularizer),\n alphaConstraint: serializeConstraint(this.alphaConstraint),\n sharedAxes: this.sharedAxes\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPReLU.className = 'PReLU';\nserialization.registerClass(PReLU);\nexport class ELU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 1.0;\n if (args == null) {\n args = {};\n }\n if (args.alpha != null && args.alpha !== this.DEFAULT_ALPHA) {\n throw new NotImplementedError(`Non-default alpha value (${args.alpha}) is not supported by the ` +\n `ELU layer yet.`);\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return elu(x);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nELU.className = 'ELU';\nserialization.registerClass(ELU);\nexport class ThresholdedReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_THETA = 1.0;\n if (args == null) {\n args = {};\n }\n this.theta = args.theta == null ? this.DEFAULT_THETA : args.theta;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return x.mul(cast(x.greater(this.theta), 'float32'));\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { theta: this.theta };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nThresholdedReLU.className = 'ThresholdedReLU';\nserialization.registerClass(ThresholdedReLU);\nexport class Softmax extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_AXIS = 1.0;\n if (args == null) {\n args = {};\n }\n this.softmax = new softmaxActivation().apply;\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return this.softmax(x, this.axis);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { axis: this.axis };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nSoftmax.className = 'Softmax';\nserialization.registerClass(Softmax);\n//# sourceMappingURL=advanced_activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\nimport { pyListRepeat } from './generic_utils';\nimport { isInteger, max } from './math_utils';\n/**\n * Transforms a single number of array of numbers into an array of numbers.\n * @param value\n * @param n: The size of the tuple to be returned.\n * @param name: Name of the parameter, used for generating error messages.\n * @returns An array of numbers.\n */\nexport function normalizeArray(value, n, name) {\n if (typeof value === 'number') {\n return pyListRepeat(value, n);\n }\n else {\n if (value.length !== n) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n} integers.` +\n ` Received: ${value.length} elements.`);\n }\n for (let i = 0; i < n; ++i) {\n const singleValue = value[i];\n if (!isInteger(singleValue)) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n}` +\n ` integers. Received: ${JSON.stringify(value)} including a` +\n ` non-integer number ${singleValue}`);\n }\n }\n return value;\n }\n}\n/**\n * Determines output length of a convolution given input length.\n * @param inputLength\n * @param filterSize\n * @param padding\n * @param stride\n * @param dilation: dilation rate.\n */\nexport function convOutputLength(inputLength, filterSize, padding, stride, dilation = 1) {\n if (inputLength == null) {\n return inputLength;\n }\n const dilatedFilterSize = filterSize + (filterSize - 1) * (dilation - 1);\n let outputLength;\n if (padding === 'same') {\n outputLength = inputLength;\n }\n else { // VALID\n outputLength = inputLength - dilatedFilterSize + 1;\n }\n return Math.floor((outputLength + stride - 1) / stride);\n}\nexport function deconvLength(dimSize, strideSize, kernelSize, padding) {\n if (dimSize == null) {\n return null;\n }\n if (padding === 'valid') {\n dimSize = dimSize * strideSize + max([kernelSize - strideSize, 0]);\n }\n else if (padding === 'same') {\n dimSize = dimSize * strideSize;\n }\n else {\n throw new ValueError(`Unsupport padding mode: ${padding}.`);\n }\n return dimSize;\n}\n//# sourceMappingURL=conv_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength, deconvLength, normalizeArray } from '../utils/conv_utils';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Transpose and cast the input before the conv2d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv2DInput(x, dataFormat) {\n // TODO(cais): Cast type to float32 if not.\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * Transpose and cast the input before the conv3d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv3DInput(x, dataFormat) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 4, 1]); // NCDHW -> NDHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * 1D-convolution with bias added.\n *\n * Porting Note: This function does not exist in the Python Keras backend.\n * It is exactly the same as `conv2d`, except the added `bias`.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.\n * @param bias Bias, rank-3, of shape `[outDepth]`.\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1dWithBias(x, kernel, bias, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n // Check the ranks of x, kernel and bias.\n if (x.shape.length !== 3) {\n throw new ValueError(`The input of a conv1dWithBias operation should be 3, but is ` +\n `${x.shape.length} instead.`);\n }\n if (kernel.shape.length !== 3) {\n throw new ValueError(`The kernel for a conv1dWithBias operation should be 3, but is ` +\n `${kernel.shape.length} instead`);\n }\n if (bias != null && bias.shape.length !== 1) {\n throw new ValueError(`The bias for a conv1dWithBias operation should be 1, but is ` +\n `${kernel.shape.length} instead`);\n }\n // TODO(cais): Support CAUSAL padding mode.\n if (dataFormat === 'channelsFirst') {\n x = tfc.transpose(x, [0, 2, 1]); // NCW -> NWC.\n }\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n let y = tfc.conv1d(x, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n return y;\n });\n}\n/**\n * 1D-convolution.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.s\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1d(x, kernel, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv1dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 2D pooling.\n */\nexport function conv2d(x, kernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv2dWithBiasActivation(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution with an added bias and optional activation.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv2d`, except the added `bias`.\n */\nexport function conv2dWithBiasActivation(x, kernel, bias, strides = [1, 1], padding = 'valid', dataFormat, dilationRate, activation = null) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 3 && x.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects input to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n if (kernel.rank !== 3 && kernel.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects kernel to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n let y = preprocessConv2DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.fused.conv2d({\n x: y,\n filter: kernel,\n strides: strides,\n pad: padding === 'same' ? 'same' : 'valid',\n dilations: dilationRate,\n dataFormat: 'NHWC',\n bias,\n activation\n });\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\n/**\n * 3D Convolution.\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 3D convolution.\n */\nexport function conv3d(x, kernel, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv3dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 3D Convolution with an added bias.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv3d`, except the added `bias`.\n */\nexport function conv3dWithBias(x, kernel, bias, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 4 && x.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects input to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n if (kernel.rank !== 4 && kernel.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects kernel to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n let y = preprocessConv3DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv3dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.conv3d(y, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NDHWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]);\n }\n return y;\n });\n}\n/**\n * Abstract convolution layer.\n */\nexport class BaseConv extends Layer {\n constructor(rank, args) {\n super(args);\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n BaseConv.verifyArgs(args);\n this.rank = rank;\n generic_utils.assertPositiveInteger(this.rank, 'rank');\n if (this.rank !== 1 && this.rank !== 2 && this.rank !== 3) {\n throw new NotImplementedError(`Convolution layer for rank other than 1, 2, or 3 (${this.rank}) is ` +\n `not implemented yet.`);\n }\n this.kernelSize = normalizeArray(args.kernelSize, rank, 'kernelSize');\n this.strides = normalizeArray(args.strides == null ? 1 : args.strides, rank, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.activation = getActivation(args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.dilationRate = normalizeArray(args.dilationRate == null ? 1 : args.dilationRate, rank, 'dilationRate');\n if (this.rank === 1 &&\n (Array.isArray(this.dilationRate) && this.dilationRate.length !== 1)) {\n throw new ValueError(`dilationRate must be a number or an array of a single number ` +\n `for 1D convolution, but received ` +\n `${JSON.stringify(this.dilationRate)}`);\n }\n else if (this.rank === 2) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate = [this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 2) {\n throw new ValueError(`dilationRate must be a number or array of two numbers for 2D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n else if (this.rank === 3) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate =\n [this.dilationRate, this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 3) {\n throw new ValueError(`dilationRate must be a number or array of three numbers for 3D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n }\n static verifyArgs(args) {\n // Check config.kernelSize type and shape.\n generic_utils.assert('kernelSize' in args, `required key 'kernelSize' not in config`);\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 3)) {\n throw new ValueError(`BaseConv expects config.kernelSize to be number or number[] with ` +\n `length 1, 2, or 3, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n getConfig() {\n const config = {\n kernelSize: this.kernelSize,\n strides: this.strides,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n biasInitializer: serializeInitializer(this.biasInitializer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/**\n * Abstract nD convolution layer. Ancestor of convolution layers which reduce\n * across channels, i.e., Conv1D and Conv2D, but not DepthwiseConv2D.\n */\nexport class Conv extends BaseConv {\n constructor(rank, args) {\n super(rank, args);\n this.kernel = null;\n Conv.verifyArgs(args);\n this.filters = args.filters;\n generic_utils.assertPositiveInteger(this.filters, 'filters');\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([inputDim, this.filters]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.inputSpec = [{ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } }];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs;\n const biasValue = this.bias == null ? null : this.bias.read();\n const fusedActivationName = generic_utils.mapActivationToFusedKernel(this.activation.getClassName());\n if (fusedActivationName != null && this.rank === 2) {\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate, fusedActivationName);\n }\n else {\n if (this.rank === 1) {\n outputs = conv1dWithBias(inputs, this.kernel.read(), biasValue, this.strides[0], this.padding, this.dataFormat, this.dilationRate[0]);\n }\n else if (this.rank === 2) {\n // TODO(cais): Move up to constructor.\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else if (this.rank === 3) {\n outputs = conv3dWithBias(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else {\n throw new NotImplementedError('convolutions greater than 3D are not implemented yet.');\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const newSpace = [];\n const space = (this.dataFormat === 'channelsLast') ?\n inputShape.slice(1, inputShape.length - 1) :\n inputShape.slice(2);\n for (let i = 0; i < space.length; ++i) {\n const newDim = convOutputLength(space[i], this.kernelSize[i], this.padding, this.strides[i], typeof this.dilationRate === 'number' ? this.dilationRate :\n this.dilationRate[i]);\n newSpace.push(newDim);\n }\n let outputShape = [inputShape[0]];\n if (this.dataFormat === 'channelsLast') {\n outputShape = outputShape.concat(newSpace);\n outputShape.push(this.filters);\n }\n else {\n outputShape.push(this.filters);\n outputShape = outputShape.concat(newSpace);\n }\n return outputShape;\n }\n getConfig() {\n const config = {\n filters: this.filters,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n static verifyArgs(args) {\n // Check config.filters type, shape, and value.\n if (!('filters' in args) || typeof args.filters !== 'number' ||\n args.filters < 1) {\n throw new ValueError(`Convolution layer expected config.filters to be a 'number' > 0 ` +\n `but got ${JSON.stringify(args.filters)}`);\n }\n }\n}\nexport class Conv2D extends Conv {\n constructor(args) {\n super(2, args);\n Conv2D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if ((typeof args.kernelSize !== 'number') &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 2)) {\n throw new ValueError(`Conv2D expects config.kernelSize to be number or number[] with ` +\n `length 1 or 2, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv2D.className = 'Conv2D';\nserialization.registerClass(Conv2D);\nexport class Conv3D extends Conv {\n constructor(args) {\n super(3, args);\n Conv3D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number') {\n if (!(Array.isArray(args.kernelSize) &&\n (args.kernelSize.length === 1 || args.kernelSize.length === 3))) {\n throw new ValueError(`Conv3D expects config.kernelSize to be number or` +\n ` [number, number, number], but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n }\n}\n/** @nocollapse */\nConv3D.className = 'Conv3D';\nserialization.registerClass(Conv3D);\nexport class Conv2DTranspose extends Conv2D {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n if (this.padding !== 'same' && this.padding !== 'valid') {\n throw new ValueError(`Conv2DTranspose currently supports only padding modes 'same' ` +\n `and 'valid', but received padding mode ${this.padding}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length !== 4) {\n throw new ValueError('Input should have rank 4; Received input shape: ' +\n JSON.stringify(inputShape));\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError('The channel dimension of the inputs should be defined. ' +\n 'Found `None`.');\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([this.filters, inputDim]);\n this.kernel = this.addWeight('kernel', kernelShape, 'float32', this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n // Set input spec.\n this.inputSpec =\n [new InputSpec({ ndim: 4, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n if (input.shape.length !== 4) {\n throw new ValueError(`Conv2DTranspose.call() expects input tensor to be rank-4, but ` +\n `received a tensor of rank-${input.shape.length}`);\n }\n const inputShape = input.shape;\n const batchSize = inputShape[0];\n let hAxis;\n let wAxis;\n if (this.dataFormat === 'channelsFirst') {\n hAxis = 2;\n wAxis = 3;\n }\n else {\n hAxis = 1;\n wAxis = 2;\n }\n const height = inputShape[hAxis];\n const width = inputShape[wAxis];\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n // Infer the dynamic output shape.\n const outHeight = deconvLength(height, strideH, kernelH, this.padding);\n const outWidth = deconvLength(width, strideW, kernelW, this.padding);\n // Porting Note: We don't branch based on `this.dataFormat` here,\n // because\n // the tjfs-core function `conv2dTranspose` called below always\n // assumes channelsLast.\n const outputShape = [batchSize, outHeight, outWidth, this.filters];\n if (this.dataFormat !== 'channelsLast') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n }\n let outputs = tfc.conv2dTranspose(input, this.kernel.read(), outputShape, this.strides, this.padding);\n if (this.dataFormat !== 'channelsLast') {\n outputs = tfc.transpose(outputs, [0, 3, 1, 2]);\n }\n if (this.bias != null) {\n outputs =\n K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n let channelAxis;\n let heightAxis;\n let widthAxis;\n if (this.dataFormat === 'channelsFirst') {\n channelAxis = 1;\n heightAxis = 2;\n widthAxis = 3;\n }\n else {\n channelAxis = 3;\n heightAxis = 1;\n widthAxis = 2;\n }\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n outputShape[channelAxis] = this.filters;\n outputShape[heightAxis] =\n deconvLength(outputShape[heightAxis], strideH, kernelH, this.padding);\n outputShape[widthAxis] =\n deconvLength(outputShape[widthAxis], strideW, kernelW, this.padding);\n return outputShape;\n }\n getConfig() {\n const config = super.getConfig();\n delete config['dilationRate'];\n return config;\n }\n}\n/** @nocollapse */\nConv2DTranspose.className = 'Conv2DTranspose';\nserialization.registerClass(Conv2DTranspose);\nexport class SeparableConv extends Conv {\n constructor(rank, config) {\n super(rank, config);\n this.DEFAULT_DEPTHWISE_INITIALIZER = 'glorotUniform';\n this.DEFAULT_POINTWISE_INITIALIZER = 'glorotUniform';\n this.depthwiseKernel = null;\n this.pointwiseKernel = null;\n if (config.filters == null) {\n throw new ValueError('The `filters` configuration field is required by SeparableConv, ' +\n 'but is unspecified.');\n }\n if (config.kernelInitializer != null || config.kernelRegularizer != null ||\n config.kernelConstraint != null) {\n throw new ValueError('Fields kernelInitializer, kernelRegularizer and kernelConstraint ' +\n 'are invalid for SeparableConv2D. Use depthwiseInitializer, ' +\n 'depthwiseRegularizer, depthwiseConstraint, pointwiseInitializer, ' +\n 'pointwiseRegularizer and pointwiseConstraint instead.');\n }\n if (config.padding != null && config.padding !== 'same' &&\n config.padding !== 'valid') {\n throw new ValueError(`SeparableConv${this.rank}D supports only padding modes: ` +\n `'same' and 'valid', but received ${JSON.stringify(config.padding)}`);\n }\n this.depthMultiplier =\n config.depthMultiplier == null ? 1 : config.depthMultiplier;\n this.depthwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_DEPTHWISE_INITIALIZER);\n this.depthwiseRegularizer = getRegularizer(config.depthwiseRegularizer);\n this.depthwiseConstraint = getConstraint(config.depthwiseConstraint);\n this.pointwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_POINTWISE_INITIALIZER);\n this.pointwiseRegularizer = getRegularizer(config.pointwiseRegularizer);\n this.pointwiseConstraint = getConstraint(config.pointwiseConstraint);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < this.rank + 2) {\n throw new ValueError(`Inputs to SeparableConv${this.rank}D should have rank ` +\n `${this.rank + 2}, but received input shape: ` +\n `${JSON.stringify(inputShape)}`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError(`The channel dimension of the inputs should be defined, ` +\n `but found ${JSON.stringify(inputShape[channelAxis])}`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = this.kernelSize.concat([inputDim, this.depthMultiplier]);\n const pointwiseKernelShape = [];\n for (let i = 0; i < this.rank; ++i) {\n pointwiseKernelShape.push(1);\n }\n pointwiseKernelShape.push(inputDim * this.depthMultiplier, this.filters);\n const trainable = true;\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, 'float32', this.depthwiseInitializer, this.depthwiseRegularizer, trainable, this.depthwiseConstraint);\n this.pointwiseKernel = this.addWeight('pointwise_kernel', pointwiseKernelShape, 'float32', this.pointwiseInitializer, this.pointwiseRegularizer, trainable, this.pointwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, trainable, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.inputSpec =\n [new InputSpec({ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let output;\n if (this.rank === 1) {\n throw new NotImplementedError('1D separable convolution is not implemented yet.');\n }\n else if (this.rank === 2) {\n if (this.dataFormat === 'channelsFirst') {\n inputs = tfc.transpose(inputs, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n output = tfc.separableConv2d(inputs, this.depthwiseKernel.read(), this.pointwiseKernel.read(), this.strides, this.padding, this.dilationRate, 'NHWC');\n }\n if (this.useBias) {\n output = K.biasAdd(output, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n if (this.dataFormat === 'channelsFirst') {\n output = tfc.transpose(output, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return output;\n });\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['kernelInitializer'];\n delete config['kernelRegularizer'];\n delete config['kernelConstraint'];\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['pointwiseInitializer'] =\n serializeInitializer(this.pointwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['pointwiseRegularizer'] =\n serializeRegularizer(this.pointwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseConstraint);\n config['pointwiseConstraint'] =\n serializeConstraint(this.pointwiseConstraint);\n return config;\n }\n}\n/** @nocollapse */\nSeparableConv.className = 'SeparableConv';\nexport class SeparableConv2D extends SeparableConv {\n constructor(args) {\n super(2, args);\n }\n}\n/** @nocollapse */\nSeparableConv2D.className = 'SeparableConv2D';\nserialization.registerClass(SeparableConv2D);\nexport class Conv1D extends Conv {\n constructor(args) {\n super(1, args);\n Conv1D.verifyArgs(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['dataFormat'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 1)) {\n throw new ValueError(`Conv1D expects config.kernelSize to be number or number[] with ` +\n `length 1, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv1D.className = 'Conv1D';\nserialization.registerClass(Conv1D);\nexport class Cropping2D extends Layer {\n constructor(args) {\n super(args);\n if (typeof args.cropping === 'number') {\n this.cropping =\n [[args.cropping, args.cropping], [args.cropping, args.cropping]];\n }\n else if (typeof args.cropping[0] === 'number') {\n this.cropping = [\n [args.cropping[0], args.cropping[0]],\n [args.cropping[1], args.cropping[1]]\n ];\n }\n else {\n this.cropping = args.cropping;\n }\n this.dataFormat =\n args.dataFormat === undefined ? 'channelsLast' : args.dataFormat;\n this.inputSpec = [{ ndim: 4 }];\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n return [\n inputShape[0], inputShape[1],\n inputShape[2] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[3] - this.cropping[1][0] - this.cropping[1][1]\n ];\n }\n else {\n return [\n inputShape[0],\n inputShape[1] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[2] - this.cropping[1][0] - this.cropping[1][1], inputShape[3]\n ];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[1] - this.cropping[0][0] - this.cropping[0][1], 2);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[2] - this.cropping[1][1] - this.cropping[1][0], 3);\n }\n else {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[2] - this.cropping[0][0] - this.cropping[0][1], 3);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[3] - this.cropping[1][1] - this.cropping[1][0], 4);\n }\n });\n }\n getConfig() {\n const config = { cropping: this.cropping, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nCropping2D.className = 'Cropping2D';\nserialization.registerClass(Cropping2D);\nexport class UpSampling2D extends Layer {\n constructor(args) {\n super(args);\n this.DEFAULT_SIZE = [2, 2];\n this.inputSpec = [{ ndim: 4 }];\n this.size = args.size == null ? this.DEFAULT_SIZE : args.size;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n const height = inputShape[2] == null ? null : this.size[0] * inputShape[2];\n const width = inputShape[3] == null ? null : this.size[1] * inputShape[3];\n return [inputShape[0], inputShape[1], height, width];\n }\n else {\n const height = inputShape[1] == null ? null : this.size[0] * inputShape[1];\n const width = inputShape[2] == null ? null : this.size[1] * inputShape[2];\n return [inputShape[0], height, width, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n if (this.dataFormat === 'channelsFirst') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n const height = this.size[0] * inputShape[2];\n const width = this.size[1] * inputShape[3];\n const resized = input.resizeNearestNeighbor([height, width]);\n return tfc.transpose(resized, [0, 3, 1, 2]);\n }\n else {\n const height = this.size[0] * inputShape[1];\n const width = this.size[1] * inputShape[2];\n return input.resizeNearestNeighbor([height, width]);\n }\n });\n }\n getConfig() {\n const config = { size: this.size, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nUpSampling2D.className = 'UpSampling2D';\nserialization.registerClass(UpSampling2D);\n//# sourceMappingURL=convolutional.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Depthwise Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { BaseConv, preprocessConv2DInput } from './convolutional';\n/**\n * 2D convolution with separable filters.\n * @param x Input tensor.\n * @param depthwiseKernel Convolution kernel for depthwise convolution.\n * @param strides Strides (Array of two integers).\n * @param padding Padding model.\n * @param dataFormat Data format.\n * @param dilationRate Array of two integers, dilation rates for the separable\n * convolution.\n * @returns Output tensor.\n * @throws ValueError If depthwiseKernel is not a 4D array.\n */\nexport function depthwiseConv2d(x, depthwiseKernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n let y = preprocessConv2DInput(x, dataFormat);\n if (x.rank !== 4) {\n throw new ValueError(`Input for depthwiseConv2d is required to be 4-D, but is instead ` +\n `${x.rank}-D`);\n }\n if (depthwiseKernel.rank !== 4) {\n throw new ValueError(`depthwiseKernel is required to be 4-D, but is instead ` +\n `${depthwiseKernel.rank}-D`);\n }\n y = tfc.depthwiseConv2d(y, depthwiseKernel, strides, padding === 'same' ? 'same' : 'valid', 'NHWC', dilationRate);\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\nexport class DepthwiseConv2D extends BaseConv {\n constructor(args) {\n super(2, args);\n this.depthwiseKernel = null;\n this.depthMultiplier =\n args.depthMultiplier == null ? 1 : args.depthMultiplier;\n this.depthwiseInitializer = getInitializer(args.depthwiseInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.depthwiseConstraint = getConstraint(args.depthwiseConstraint);\n this.depthwiseRegularizer = getRegularizer(args.depthwiseRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 4) {\n throw new ValueError(`Inputs to DepthwiseConv2D should have rank 4. ` +\n `Received input shape: ${JSON.stringify(inputShape)}.`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : 3;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError('The channel dimension of the inputs to DepthwiseConv2D should ' +\n `be defined, but is not (${inputShape[channelAxis]}).`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = [\n this.kernelSize[0], this.kernelSize[1], inputDim, this.depthMultiplier\n ];\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, null, this.depthwiseInitializer, this.depthwiseRegularizer, true, this.depthwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [inputDim * this.depthMultiplier], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs = depthwiseConv2d(inputs, this.depthwiseKernel.read(), this.strides, this.padding, this.dataFormat, null);\n // TODO(cais): Add support for dilation.\n if (this.useBias) {\n outputs = K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n const cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n const outFilters = this.dataFormat === 'channelsFirst' ?\n inputShape[1] * this.depthMultiplier :\n inputShape[3] * this.depthMultiplier;\n const outRows = convOutputLength(rows, this.kernelSize[0], this.padding, this.strides[0]);\n const outCols = convOutputLength(cols, this.kernelSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], outFilters, outRows, outCols];\n }\n else {\n // In this case, assume 'channelsLast'.\n return [inputShape[0], outRows, outCols, outFilters];\n }\n }\n getConfig() {\n const config = super.getConfig();\n config['depthMultiplier'] = this.depthMultiplier;\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseRegularizer);\n return config;\n }\n}\n/** @nocollapse */\nDepthwiseConv2D.className = 'DepthwiseConv2D';\nserialization.registerClass(DepthwiseConv2D);\n//# sourceMappingURL=convolutional_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Recurrent Neural Network Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, SymbolicTensor } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, Initializer, Ones, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor, isArrayOfShapes } from '../utils/types_utils';\nimport { batchGetValue, batchSetValue } from '../variables';\nimport { deserialize } from './serialization';\n/**\n * Standardize `apply()` args to a single list of tensor inputs.\n *\n * When running a model loaded from file, the input tensors `initialState` and\n * `constants` are passed to `RNN.apply()` as part of `inputs` instead of the\n * dedicated kwargs fields. `inputs` consists of\n * `[inputs, initialState0, initialState1, ..., constant0, constant1]` in this\n * case.\n * This method makes sure that arguments are\n * separated and that `initialState` and `constants` are `Array`s of tensors\n * (or None).\n *\n * @param inputs Tensor or `Array` of tensors.\n * @param initialState Tensor or `Array` of tensors or `null`/`undefined`.\n * @param constants Tensor or `Array` of tensors or `null`/`undefined`.\n * @returns An object consisting of\n * inputs: A tensor.\n * initialState: `Array` of tensors or `null`.\n * constants: `Array` of tensors or `null`.\n * @throws ValueError, if `inputs` is an `Array` but either `initialState` or\n * `constants` is provided.\n */\nexport function standardizeArgs(inputs, initialState, constants, numConstants) {\n if (Array.isArray(inputs)) {\n if (initialState != null || constants != null) {\n throw new ValueError('When inputs is an array, neither initialState or constants ' +\n 'should be provided');\n }\n if (numConstants != null) {\n constants = inputs.slice(inputs.length - numConstants, inputs.length);\n inputs = inputs.slice(0, inputs.length - numConstants);\n }\n if (inputs.length > 1) {\n initialState = inputs.slice(1, inputs.length);\n }\n inputs = inputs[0];\n }\n function toListOrNull(x) {\n if (x == null || Array.isArray(x)) {\n return x;\n }\n else {\n return [x];\n }\n }\n initialState = toListOrNull(initialState);\n constants = toListOrNull(constants);\n return { inputs, initialState, constants };\n}\n/**\n * Iterates over the time dimension of a tensor.\n *\n * @param stepFunction RNN step function.\n * Parameters:\n * inputs: tensor with shape `[samples, ...]` (no time dimension),\n * representing input for the batch of samples at a certain time step.\n * states: an Array of tensors.\n * Returns:\n * outputs: tensor with shape `[samples, outputDim]` (no time dimension).\n * newStates: list of tensors, same length and shapes as `states`. The first\n * state in the list must be the output tensor at the previous timestep.\n * @param inputs Tensor of temporal data of shape `[samples, time, ...]` (at\n * least 3D).\n * @param initialStates Tensor with shape `[samples, outputDim]` (no time\n * dimension), containing the initial values of the states used in the step\n * function.\n * @param goBackwards If `true`, do the iteration over the time dimension in\n * reverse order and return the reversed sequence.\n * @param mask Binary tensor with shape `[sample, time, 1]`, with a zero for\n * every element that is masked.\n * @param constants An Array of constant values passed at each step.\n * @param unroll Whether to unroll the RNN or to use a symbolic loop. *Not*\n * applicable to this imperative deeplearn.js backend. Its value is ignored.\n * @param needPerStepOutputs Whether the per-step outputs are to be\n * concatenated into a single tensor and returned (as the second return\n * value). Default: `false`. This arg is included so that the relatively\n * expensive concatenation of the stepwise outputs can be omitted unless\n * the stepwise outputs need to be kept (e.g., for an LSTM layer of which\n * `returnSequence` is `true`.)\n * @returns An Array: `[lastOutput, outputs, newStates]`.\n * lastOutput: the lastest output of the RNN, of shape `[samples, ...]`.\n * outputs: tensor with shape `[samples, time, ...]` where each entry\n * `output[s, t]` is the output of the step function at time `t` for sample\n * `s`. This return value is provided if and only if the\n * `needPerStepOutputs` is set as `true`. If it is set as `false`, this\n * return value will be `undefined`.\n * newStates: Array of tensors, latest states returned by the step function,\n * of shape `(samples, ...)`.\n * @throws ValueError If input dimension is less than 3.\n *\n * TODO(nielsene): This needs to be tidy-ed.\n */\nexport function rnn(stepFunction, inputs, initialStates, goBackwards = false, mask, constants, unroll = false, needPerStepOutputs = false) {\n return tfc.tidy(() => {\n const ndim = inputs.shape.length;\n if (ndim < 3) {\n throw new ValueError(`Input should be at least 3D, but is ${ndim}D.`);\n }\n // Transpose to time-major, i.e., from [batch, time, ...] to [time, batch,\n // ...].\n const axes = [1, 0].concat(math_utils.range(2, ndim));\n inputs = tfc.transpose(inputs, axes);\n if (constants != null) {\n throw new NotImplementedError('The rnn() functoin of the deeplearn.js backend does not support ' +\n 'constants yet.');\n }\n // Porting Note: the unroll option is ignored by the imperative backend.\n if (unroll) {\n console.warn('Backend rnn(): the unroll = true option is not applicable to the ' +\n 'imperative deeplearn.js backend.');\n }\n if (mask != null) {\n mask = mask.asType('bool').asType('float32');\n if (mask.rank === ndim - 1) {\n mask = tfc.expandDims(mask, -1);\n }\n mask = tfc.transpose(mask, axes);\n }\n if (goBackwards) {\n inputs = tfc.reverse(inputs, 0);\n if (mask != null) {\n mask = tfc.reverse(mask, 0);\n }\n }\n // Porting Note: PyKeras with TensorFlow backend uses a symbolic loop\n // (tf.while_loop). But for the imperative deeplearn.js backend, we just\n // use the usual TypeScript control flow to iterate over the time steps in\n // the inputs.\n // Porting Note: PyKeras patches a \"_use_learning_phase\" attribute to\n // outputs.\n // This is not idiomatic in TypeScript. The info regarding whether we are\n // in a learning (i.e., training) phase for RNN is passed in a different\n // way.\n const perStepOutputs = [];\n let lastOutput;\n let states = initialStates;\n const timeSteps = inputs.shape[0];\n const perStepInputs = tfc.unstack(inputs);\n let perStepMasks;\n if (mask != null) {\n perStepMasks = tfc.unstack(mask);\n }\n for (let t = 0; t < timeSteps; ++t) {\n const currentInput = perStepInputs[t];\n const stepOutputs = tfc.tidy(() => stepFunction(currentInput, states));\n if (mask == null) {\n lastOutput = stepOutputs[0];\n states = stepOutputs[1];\n }\n else {\n const maskedOutputs = tfc.tidy(() => {\n const stepMask = perStepMasks[t];\n const negStepMask = tfc.onesLike(stepMask).sub(stepMask);\n // TODO(cais): Would tfc.where() be better for performance?\n const output = stepOutputs[0].mul(stepMask).add(states[0].mul(negStepMask));\n const newStates = states.map((state, i) => {\n return stepOutputs[1][i].mul(stepMask).add(state.mul(negStepMask));\n });\n return { output, newStates };\n });\n lastOutput = maskedOutputs.output;\n states = maskedOutputs.newStates;\n }\n if (needPerStepOutputs) {\n perStepOutputs.push(lastOutput);\n }\n }\n let outputs;\n if (needPerStepOutputs) {\n const axis = 1;\n outputs = tfc.stack(perStepOutputs, axis);\n }\n return [lastOutput, outputs, states];\n });\n}\nexport class RNN extends Layer {\n constructor(args) {\n super(args);\n let cell;\n if (args.cell == null) {\n throw new ValueError('cell property is missing for the constructor of RNN.');\n }\n else if (Array.isArray(args.cell)) {\n cell = new StackedRNNCells({ cells: args.cell });\n }\n else {\n cell = args.cell;\n }\n if (cell.stateSize == null) {\n throw new ValueError('The RNN cell should have an attribute `stateSize` (tuple of ' +\n 'integers, one integer per RNN state).');\n }\n this.cell = cell;\n this.returnSequences =\n args.returnSequences == null ? false : args.returnSequences;\n this.returnState = args.returnState == null ? false : args.returnState;\n this.goBackwards = args.goBackwards == null ? false : args.goBackwards;\n this._stateful = args.stateful == null ? false : args.stateful;\n this.unroll = args.unroll == null ? false : args.unroll;\n this.supportsMasking = true;\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n this.stateSpec = null;\n this.states_ = null;\n // TODO(cais): Add constantsSpec and numConstants.\n this.numConstants = null;\n // TODO(cais): Look into the use of initial_state in the kwargs of the\n // constructor.\n this.keptStates = [];\n }\n // Porting Note: This is the equivalent of `RNN.states` property getter in\n // PyKeras.\n getStates() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n return math_utils.range(0, numStates).map(x => null);\n }\n else {\n return this.states_;\n }\n }\n // Porting Note: This is the equivalent of the `RNN.states` property setter in\n // PyKeras.\n setStates(states) {\n this.states_ = states;\n }\n computeOutputShape(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n // TODO(cais): Remove the casting once stacked RNN cells become supported.\n let stateSize = this.cell.stateSize;\n if (!Array.isArray(stateSize)) {\n stateSize = [stateSize];\n }\n const outputDim = stateSize[0];\n let outputShape;\n if (this.returnSequences) {\n outputShape = [inputShape[0], inputShape[1], outputDim];\n }\n else {\n outputShape = [inputShape[0], outputDim];\n }\n if (this.returnState) {\n const stateShape = [];\n for (const dim of stateSize) {\n stateShape.push([inputShape[0], dim]);\n }\n return [outputShape].concat(stateShape);\n }\n else {\n return outputShape;\n }\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n const outputMask = this.returnSequences ? mask : null;\n if (this.returnState) {\n const stateMask = this.states.map(s => null);\n return [outputMask].concat(stateMask);\n }\n else {\n return outputMask;\n }\n });\n }\n /**\n * Get the current state tensors of the RNN.\n *\n * If the state hasn't been set, return an array of `null`s of the correct\n * length.\n */\n get states() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n const output = [];\n for (let i = 0; i < numStates; ++i) {\n output.push(null);\n }\n return output;\n }\n else {\n return this.states_;\n }\n }\n set states(s) {\n this.states_ = s;\n }\n build(inputShape) {\n // Note inputShape will be an Array of Shapes of initial states and\n // constants if these are passed in apply().\n const constantShape = null;\n if (this.numConstants != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n const batchSize = this.stateful ? inputShape[0] : null;\n const inputDim = inputShape.slice(2);\n this.inputSpec[0] = new InputSpec({ shape: [batchSize, null, ...inputDim] });\n // Allow cell (if RNNCell Layer) to build before we set or validate\n // stateSpec.\n const stepInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (constantShape != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n else {\n this.cell.build(stepInputShape);\n }\n // Set or validate stateSpec.\n let stateSize;\n if (Array.isArray(this.cell.stateSize)) {\n stateSize = this.cell.stateSize;\n }\n else {\n stateSize = [this.cell.stateSize];\n }\n if (this.stateSpec != null) {\n if (!util.arraysEqual(this.stateSpec.map(spec => spec.shape[spec.shape.length - 1]), stateSize)) {\n throw new ValueError(`An initialState was passed that is not compatible with ` +\n `cell.stateSize. Received stateSpec=${this.stateSpec}; ` +\n `However cell.stateSize is ${this.cell.stateSize}`);\n }\n }\n else {\n this.stateSpec =\n stateSize.map(dim => new InputSpec({ shape: [null, dim] }));\n }\n if (this.stateful) {\n this.resetStates();\n }\n }\n /**\n * Reset the state tensors of the RNN.\n *\n * If the `states` argument is `undefined` or `null`, will set the\n * state tensor(s) of the RNN to all-zero tensors of the appropriate\n * shape(s).\n *\n * If `states` is provided, will set the state tensors of the RNN to its\n * value.\n *\n * @param states Optional externally-provided initial states.\n * @param training Whether this call is done during training. For stateful\n * RNNs, this affects whether the old states are kept or discarded. In\n * particular, if `training` is `true`, the old states will be kept so\n * that subsequent backpropgataion through time (BPTT) may work properly.\n * Else, the old states will be discarded.\n */\n resetStates(states, training = false) {\n tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const batchSize = this.inputSpec[0].shape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.states_ == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_ = [tfc.zeros([batchSize, this.cell.stateSize])];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_[0] = tfc.zeros([batchSize, this.cell.stateSize]);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training === true) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const dim = Array.isArray(this.cell.stateSize) ?\n this.cell.stateSize[index] :\n this.cell.stateSize;\n const expectedShape = [batchSize, dim];\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n apply(inputs, kwargs) {\n // TODO(cais): Figure out whether initialState is in kwargs or inputs.\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n // If any of `initial_state` or `constants` are specified and are\n // `tf.SymbolicTensor`s, then add them to the inputs and temporarily modify\n // the input_spec to include them.\n let additionalInputs = [];\n let additionalSpecs = [];\n if (initialState != null) {\n kwargs['initialState'] = initialState;\n additionalInputs = additionalInputs.concat(initialState);\n this.stateSpec = [];\n for (const state of initialState) {\n this.stateSpec.push(new InputSpec({ shape: state.shape }));\n }\n // TODO(cais): Use the following instead.\n // this.stateSpec = initialState.map(state => new InputSpec({shape:\n // state.shape}));\n additionalSpecs = additionalSpecs.concat(this.stateSpec);\n }\n if (constants != null) {\n kwargs['constants'] = constants;\n additionalInputs = additionalInputs.concat(constants);\n // TODO(cais): Add this.constantsSpec.\n this.numConstants = constants.length;\n }\n const isTensor = additionalInputs[0] instanceof SymbolicTensor;\n if (isTensor) {\n // Compute full input spec, including state and constants.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call with temporarily replaced inputSpec.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n // tslint:disable-next-line:no-any\n call(inputs, kwargs) {\n // Input shape: `[samples, time (padded with zeros), input_dim]`.\n // Note that the .build() method of subclasses **must** define\n // this.inputSpec and this.stateSpec owith complete input shapes.\n return tidy(() => {\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n let initialState = kwargs == null ? null : kwargs['initialState'];\n inputs = getExactlyOneTensor(inputs);\n if (initialState == null) {\n if (this.stateful) {\n initialState = this.states_;\n }\n else {\n initialState = this.getInitialState(inputs);\n }\n }\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n if (initialState.length !== numStates) {\n throw new ValueError(`RNN Layer has ${numStates} state(s) but was passed ` +\n `${initialState.length} initial state(s).`);\n }\n if (this.unroll) {\n console.warn('Ignoring unroll = true for RNN layer, due to imperative backend.');\n }\n const cellCallKwargs = { training };\n // TODO(cais): Add support for constants.\n const step = (inputs, states) => {\n // `inputs` and `states` are concatenated to form a single `Array` of\n // `tf.Tensor`s as the input to `cell.call()`.\n const outputs = this.cell.call([inputs].concat(states), cellCallKwargs);\n // Marshall the return value into output and new states.\n return [outputs[0], outputs.slice(1)];\n };\n // TODO(cais): Add support for constants.\n const rnnOutputs = rnn(step, inputs, initialState, this.goBackwards, mask, null, this.unroll, this.returnSequences);\n const lastOutput = rnnOutputs[0];\n const outputs = rnnOutputs[1];\n const states = rnnOutputs[2];\n if (this.stateful) {\n this.resetStates(states, training);\n }\n const output = this.returnSequences ? outputs : lastOutput;\n // TODO(cais): Porperty set learning phase flag.\n if (this.returnState) {\n return [output].concat(states);\n }\n else {\n return output;\n }\n });\n }\n getInitialState(inputs) {\n return tidy(() => {\n // Build an all-zero tensor of shape [samples, outputDim].\n // [Samples, timeSteps, inputDim].\n let initialState = tfc.zeros(inputs.shape);\n // [Samples].\n initialState = tfc.sum(initialState, [1, 2]);\n initialState = K.expandDims(initialState); // [Samples, 1].\n if (Array.isArray(this.cell.stateSize)) {\n return this.cell.stateSize.map(dim => dim > 1 ? K.tile(initialState, [1, dim]) : initialState);\n }\n else {\n return this.cell.stateSize > 1 ?\n [K.tile(initialState, [1, this.cell.stateSize])] :\n [initialState];\n }\n });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n return this.cell.trainableWeights;\n }\n get nonTrainableWeights() {\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n if (!this.trainable) {\n return this.cell.weights;\n }\n return this.cell.nonTrainableWeights;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.cell != null) {\n this.cell.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n returnSequences: this.returnSequences,\n returnState: this.returnState,\n goBackwards: this.goBackwards,\n stateful: this.stateful,\n unroll: this.unroll,\n };\n if (this.numConstants != null) {\n config['numConstants'] = this.numConstants;\n }\n const cellConfig = this.cell.getConfig();\n if (this.getClassName() === RNN.className) {\n config['cell'] = {\n 'className': this.cell.getClassName(),\n 'config': cellConfig,\n };\n }\n // this order is necessary, to prevent cell name from replacing layer name\n return Object.assign({}, cellConfig, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cellConfig = config['cell'];\n const cell = deserialize(cellConfig, customObjects);\n return new cls(Object.assign(config, { cell }));\n }\n}\n/** @nocollapse */\nRNN.className = 'RNN';\nserialization.registerClass(RNN);\n// Porting Note: This is a common parent class for RNN cells. There is no\n// equivalent of this in PyKeras. Having a common parent class forgoes the\n// need for `has_attr(cell, ...)` checks or its TypeScript equivalent.\n/**\n * An RNNCell layer.\n *\n * @doc {heading: 'Layers', subheading: 'Classes'}\n */\nexport class RNNCell extends Layer {\n}\nexport class SimpleRNNCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, `units`);\n this.activation = getActivation(args.activation == null ? this.DEFAULT_ACTIVATION : args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n // TODO(cais): Use regularizer.\n this.kernel = this.addWeight('kernel', [inputShape[inputShape.length - 1], this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n // Porting Note: PyKeras' equivalent of this method takes two tensor inputs:\n // `inputs` and `states`. Here, the two tensors are combined into an\n // `Tensor[]` Array as the first input argument.\n // Similarly, PyKeras' equivalent of this method returns two values:\n // `output` and `[output]`. Here the two are combined into one length-2\n // `Tensor[]`, consisting of `output` repeated.\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`SimpleRNNCell expects 2 input Tensors, got ${inputs.length}.`);\n }\n let prevOutput = inputs[1];\n inputs = inputs[0];\n const training = kwargs['training'] == null ? false : kwargs['training'];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(prevOutput),\n rate: this.recurrentDropout,\n training\n });\n }\n let h;\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n if (dpMask != null) {\n h = K.dot(tfc.mul(inputs, dpMask), this.kernel.read());\n }\n else {\n h = K.dot(inputs, this.kernel.read());\n }\n if (this.bias != null) {\n h = K.biasAdd(h, this.bias.read());\n }\n if (recDpMask != null) {\n prevOutput = tfc.mul(prevOutput, recDpMask);\n }\n let output = tfc.add(h, K.dot(prevOutput, this.recurrentKernel.read()));\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n // TODO(cais): Properly set learning phase on output tensor?\n return [output, output];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nSimpleRNNCell.className = 'SimpleRNNCell';\nserialization.registerClass(SimpleRNNCell);\nexport class SimpleRNN extends RNN {\n constructor(args) {\n args.cell = new SimpleRNNCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nSimpleRNN.className = 'SimpleRNN';\nserialization.registerClass(SimpleRNN);\nexport class GRUCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.resetAfter) {\n throw new ValueError(`GRUCell does not support reset_after parameter set to true.`);\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 3], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 3], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units * 3], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`GRUCell expects 2 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] == null ? false : kwargs['training'];\n let hTMinus1 = inputs[1]; // Previous memory state.\n inputs = inputs[0];\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2, regardless of the actual value of\n // config.implementation.\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 3\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 3\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n let z;\n let r;\n let hh;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let matrixX = K.dot(inputs, this.kernel.read());\n if (this.useBias) {\n matrixX = K.biasAdd(matrixX, this.bias.read());\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n const recurrentKernelValue = this.recurrentKernel.read();\n const [rk1, rk2] = tfc.split(recurrentKernelValue, [2 * this.units, this.units], recurrentKernelValue.rank - 1);\n const matrixInner = K.dot(hTMinus1, rk1);\n const [xZ, xR, xH] = tfc.split(matrixX, 3, matrixX.rank - 1);\n const [recurrentZ, recurrentR] = tfc.split(matrixInner, 2, matrixInner.rank - 1);\n z = this.recurrentActivation.apply(tfc.add(xZ, recurrentZ));\n r = this.recurrentActivation.apply(tfc.add(xR, recurrentR));\n const recurrentH = K.dot(tfc.mul(r, hTMinus1), rk2);\n hh = this.activation.apply(tfc.add(xH, recurrentH));\n const h = tfc.add(tfc.mul(z, hTMinus1), tfc.mul(tfc.add(1, tfc.neg(z)), hh));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n resetAfter: false\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nGRUCell.className = 'GRUCell';\nserialization.registerClass(GRUCell);\nexport class GRU extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new GRUCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nGRU.className = 'GRU';\nserialization.registerClass(GRU);\nexport class LSTMCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.unitForgetBias = args.unitForgetBias;\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = [this.units, this.units];\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 4], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 4], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n let biasInitializer;\n if (this.useBias) {\n if (this.unitForgetBias) {\n const capturedBiasInit = this.biasInitializer;\n const capturedUnits = this.units;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n // TODO(cais): More informative variable names?\n const bI = capturedBiasInit.apply([capturedUnits]);\n const bF = (new Ones()).apply([capturedUnits]);\n const bCAndH = capturedBiasInit.apply([capturedUnits * 2]);\n return K.concatAlongFirstAxis(K.concatAlongFirstAxis(bI, bF), bCAndH);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.units * 4], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n inputs = inputs;\n if (inputs.length !== 3) {\n throw new ValueError(`LSTMCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n let hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n inputs = inputs[0];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 4\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 4\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2 regardless of the actual value of\n // config.implementation.\n let i;\n let f;\n let c;\n let o;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let z = K.dot(inputs, this.kernel.read());\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n z = tfc.add(z, K.dot(hTMinus1, this.recurrentKernel.read()));\n if (this.useBias) {\n z = K.biasAdd(z, this.bias.read());\n }\n const [z0, z1, z2, z3] = tfc.split(z, 4, z.rank - 1);\n i = this.recurrentActivation.apply(z0);\n f = this.recurrentActivation.apply(z1);\n c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(z2)));\n o = this.recurrentActivation.apply(z3);\n const h = tfc.mul(o, this.activation.apply(c));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h, c];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n unitForgetBias: this.unitForgetBias,\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nLSTMCell.className = 'LSTMCell';\nserialization.registerClass(LSTMCell);\nexport class LSTM extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new LSTMCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nLSTM.className = 'LSTM';\nserialization.registerClass(LSTM);\nexport class StackedRNNCells extends RNNCell {\n constructor(args) {\n super(args);\n this.cells = args.cells;\n }\n get stateSize() {\n // States are a flat list in reverse order of the cell stack.\n // This allows perserving the requirement `stack.statesize[0] ===\n // outputDim`. E.g., states of a 2-layer LSTM would be `[h2, c2, h1, c1]`,\n // assuming one LSTM has states `[h, c]`.\n const stateSize = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n stateSize.push(...cell.stateSize);\n }\n else {\n stateSize.push(cell.stateSize);\n }\n }\n return stateSize;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n let states = inputs.slice(1);\n // Recover per-cell states.\n const nestedStates = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n nestedStates.push(states.splice(0, cell.stateSize.length));\n }\n else {\n nestedStates.push(states.splice(0, 1));\n }\n }\n nestedStates.reverse();\n // Call the cells in order and store the returned states.\n const newNestedStates = [];\n let callInputs;\n for (let i = 0; i < this.cells.length; ++i) {\n const cell = this.cells[i];\n states = nestedStates[i];\n // TODO(cais): Take care of constants.\n if (i === 0) {\n callInputs = [inputs[0]].concat(states);\n }\n else {\n callInputs = [callInputs[0]].concat(states);\n }\n callInputs = cell.call(callInputs, kwargs);\n newNestedStates.push(callInputs.slice(1));\n }\n // Format the new states as a flat list in reverse cell order.\n states = [];\n for (const cellStates of newNestedStates.slice().reverse()) {\n states.push(...cellStates);\n }\n return [callInputs[0]].concat(states);\n });\n }\n build(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n // TODO(cais): Take care of input constants.\n // const constantShape = inputShape.slice(1);\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n let outputDim;\n this.cells.forEach((cell, i) => {\n nameScope(`RNNCell_${i}`, () => {\n // TODO(cais): Take care of input constants.\n cell.build(inputShape);\n if (Array.isArray(cell.stateSize)) {\n outputDim = cell.stateSize[0];\n }\n else {\n outputDim = cell.stateSize;\n }\n inputShape = [inputShape[0], outputDim];\n });\n });\n this.built = true;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const getCellConfig = (cell) => {\n return {\n 'className': cell.getClassName(),\n 'config': cell.getConfig(),\n };\n };\n const cellConfigs = this.cells.map(getCellConfig);\n const config = { 'cells': cellConfigs };\n return Object.assign({}, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cells = [];\n for (const cellConfig of config['cells']) {\n cells.push(deserialize(cellConfig, customObjects));\n }\n return new cls({ cells });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const cell of this.cells) {\n trainableWeights.push(...cell.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n /**\n * Retrieve the weights of a the model.\n *\n * @returns A flat `Array` of `tf.Tensor`s.\n */\n getWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.weights);\n }\n return batchGetValue(weights);\n }\n /**\n * Set the weights of the model.\n *\n * @param weights An `Array` of `tf.Tensor`s with shapes and types matching\n * the output of `getWeights()`.\n */\n setWeights(weights) {\n const tuples = [];\n for (const cell of this.cells) {\n const numParams = cell.weights.length;\n const inputWeights = weights.splice(numParams);\n for (let i = 0; i < cell.weights.length; ++i) {\n tuples.push([cell.weights[i], inputWeights[i]]);\n }\n }\n batchSetValue(tuples);\n }\n}\n/** @nocollapse */\nStackedRNNCells.className = 'StackedRNNCells';\nserialization.registerClass(StackedRNNCells);\nexport function generateDropoutMask(args) {\n const { ones, rate, training = false, count = 1 } = args;\n const droppedInputs = () => K.dropout(ones(), rate);\n const createMask = () => K.inTrainPhase(droppedInputs, ones, training);\n // just in case count is provided with null or undefined\n if (!count || count <= 1) {\n return tfc.keep(createMask().clone());\n }\n const masks = Array(count).fill(undefined).map(createMask);\n return masks.map(m => tfc.keep(m.clone()));\n}\n//# sourceMappingURL=recurrent.js.map", "/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { Initializer } from '../initializers';\nimport { convOutputLength, normalizeArray } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\nimport { generateDropoutMask, LSTMCell, RNN, RNNCell } from './recurrent';\nclass ConvRNN2DCell extends RNNCell {\n}\n/**\n * Base class for convolutional-recurrent layers.\n */\nclass ConvRNN2D extends RNN {\n constructor(args) {\n if (args.unroll) {\n throw new NotImplementedError('Unrolling is not possible with convolutional RNNs.');\n }\n if (Array.isArray(args.cell)) {\n throw new NotImplementedError('It is not possible at the moment to stack convolutional cells.');\n }\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n if (kwargs && kwargs['constants']) {\n throw new ValueError('ConvRNN2D cell does not support constants');\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n computeOutputShape(inputShape) {\n let outShape = this.computeSingleOutputShape(inputShape);\n if (!this.returnSequences) {\n outShape = [outShape[0], ...outShape.slice(2)];\n }\n if (this.returnState) {\n outShape =\n [outShape, ...Array(2).fill([inputShape[0], ...outShape.slice(-3)])];\n }\n return outShape;\n }\n getInitialState(inputs) {\n return tfc.tidy(() => {\n const { stateSize } = this.cell;\n const inputShape = inputs.shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const initialState = tfc.zeros(stateShape);\n if (Array.isArray(stateSize)) {\n return Array(stateSize.length).fill(initialState);\n }\n return [initialState];\n });\n }\n resetStates(states, training = false) {\n tfc.tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const inputShape = this.inputSpec[0].shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const batchSize = inputShape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.getStates() == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_ = [tfc.zeros(stateShape)];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_[0] = tfc.zeros(stateShape);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const expectedShape = stateShape;\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n computeSingleOutputShape(inputShape) {\n const { dataFormat, filters, kernelSize, padding, strides, dilationRate } = this.cell;\n const isChannelsFirst = dataFormat === 'channelsFirst';\n const h = inputShape[isChannelsFirst ? 3 : 2];\n const w = inputShape[isChannelsFirst ? 4 : 3];\n const hOut = convOutputLength(h, kernelSize[0], padding, strides[0], dilationRate[0]);\n const wOut = convOutputLength(w, kernelSize[1], padding, strides[1], dilationRate[1]);\n const outShape = [\n ...inputShape.slice(0, 2),\n ...(isChannelsFirst ? [filters, hOut, wOut] : [hOut, wOut, filters])\n ];\n return outShape;\n }\n}\n/** @nocollapse */\nConvRNN2D.className = 'ConvRNN2D';\nexport class ConvLSTM2DCell extends LSTMCell {\n constructor(args) {\n const { filters, kernelSize, strides, padding, dataFormat, dilationRate, } = args;\n super(Object.assign({}, args, { units: filters }));\n this.filters = filters;\n assertPositiveInteger(this.filters, 'filters');\n this.kernelSize = normalizeArray(kernelSize, 2, 'kernelSize');\n this.kernelSize.forEach(size => assertPositiveInteger(size, 'kernelSize'));\n this.strides = normalizeArray(strides || 1, 2, 'strides');\n this.strides.forEach(stride => assertPositiveInteger(stride, 'strides'));\n this.padding = padding || 'valid';\n checkPaddingMode(this.padding);\n this.dataFormat = dataFormat || 'channelsLast';\n checkDataFormat(this.dataFormat);\n this.dilationRate = normalizeArray(dilationRate || 1, 2, 'dilationRate');\n this.dilationRate.forEach(rate => assertPositiveInteger(rate, 'dilationRate'));\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const numOfKernels = 4;\n const kernelShape = this.kernelSize.concat([inputDim, this.filters * numOfKernels]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n const recurrentKernelShape = this.kernelSize.concat([this.filters, this.filters * numOfKernels]);\n this.recurrentKernel = this.addWeight('recurrent_kernel', recurrentKernelShape, null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n let biasInitializer;\n if (this.unitForgetBias) {\n const init = this.biasInitializer;\n const filters = this.filters;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n const biasI = init.apply([filters]);\n const biasF = tfc.ones([filters]);\n const biasCAndO = init.apply([filters * 2]);\n return K.concatenate([biasI, biasF, biasCAndO]);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.filters * numOfKernels], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (inputs.length !== 3) {\n throw new ValueError(`ConvLSTM2DCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] || false;\n const x = inputs[0]; // Current input\n const hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n const numOfKernels = 4;\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(x),\n rate: this.dropout,\n training,\n count: numOfKernels\n });\n }\n const dropoutMask = this.dropoutMask;\n const applyDropout = (x, mask, index) => {\n if (!mask || !mask[index]) {\n return x;\n }\n return tfc.mul(mask[index], x);\n };\n let xI = applyDropout(x, dropoutMask, 0);\n let xF = applyDropout(x, dropoutMask, 1);\n let xC = applyDropout(x, dropoutMask, 2);\n let xO = applyDropout(x, dropoutMask, 3);\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: numOfKernels\n });\n }\n const recDropoutMask = this.recurrentDropoutMask;\n let hI = applyDropout(hTMinus1, recDropoutMask, 0);\n let hF = applyDropout(hTMinus1, recDropoutMask, 1);\n let hC = applyDropout(hTMinus1, recDropoutMask, 2);\n let hO = applyDropout(hTMinus1, recDropoutMask, 3);\n const kernelChannelAxis = 3;\n const [kernelI, kernelF, kernelC, kernelO] = tfc.split(this.kernel.read(), numOfKernels, kernelChannelAxis);\n const [biasI, biasF, biasC, biasO] = this.useBias ?\n tfc.split(this.bias.read(), numOfKernels) :\n [null, null, null, null];\n xI = this.inputConv(xI, kernelI, biasI, this.padding);\n xF = this.inputConv(xF, kernelF, biasF, this.padding);\n xC = this.inputConv(xC, kernelC, biasC, this.padding);\n xO = this.inputConv(xO, kernelO, biasO, this.padding);\n const [recKernelI, recKernelF, recKernelC, recKernelO] = tfc.split(this.recurrentKernel.read(), numOfKernels, kernelChannelAxis);\n hI = this.recurrentConv(hI, recKernelI);\n hF = this.recurrentConv(hF, recKernelF);\n hC = this.recurrentConv(hC, recKernelC);\n hO = this.recurrentConv(hO, recKernelO);\n const i = this.recurrentActivation.apply(tfc.add(xI, hI));\n const f = this.recurrentActivation.apply(tfc.add(xF, hF));\n const c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(tfc.add(xC, hC))));\n const h = tfc.mul(this.recurrentActivation.apply(tfc.add(xO, hO)), this.activation.apply(c));\n return [h, h, c];\n });\n }\n getConfig() {\n const _a = super.getConfig(), { 'units': _ } = _a, baseConfig = __rest(_a, ['units']);\n const config = {\n filters: this.filters,\n kernelSize: this.kernelSize,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n strides: this.strides,\n };\n return Object.assign({}, baseConfig, config);\n }\n inputConv(x, w, b, padding) {\n const out = tfc.conv2d(x, w, this.strides, (padding || 'valid'), this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC', this.dilationRate);\n if (b) {\n return K.biasAdd(out, b, this.dataFormat);\n }\n return out;\n }\n recurrentConv(x, w) {\n const strides = 1;\n return tfc.conv2d(x, w, strides, 'same', this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC');\n }\n}\n/** @nocollapse */\nConvLSTM2DCell.className = 'ConvLSTM2DCell';\ntfc.serialization.registerClass(ConvLSTM2DCell);\nexport class ConvLSTM2D extends ConvRNN2D {\n constructor(args) {\n const cell = new ConvLSTM2DCell(args);\n super(Object.assign({}, args, { cell }));\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nConvLSTM2D.className = 'ConvLSTM2D';\ntfc.serialization.registerClass(ConvLSTM2D);\n//# sourceMappingURL=convolutional_recurrent.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Basic Layers.\n */\nimport { any, notEqual, serialization, tidy, transpose, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger, mapActivationToFusedKernel } from '../utils/generic_utils';\nimport { arrayProd, range } from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Dropout extends Layer {\n constructor(args) {\n super(args);\n this.rate = Math.max(Math.min(args.rate, 1), 0);\n // So that the scalar doesn't get tidied up between executions.\n this.noiseShape = args.noiseShape;\n this.seed = args.seed;\n this.supportsMasking = true;\n }\n getNoiseShape(input) {\n if (this.noiseShape == null) {\n return this.noiseShape;\n }\n const inputShape = input.shape;\n const noiseShape = [];\n for (let i = 0; i < this.noiseShape.length; ++i) {\n noiseShape.push(this.noiseShape[i] == null ? inputShape[i] : this.noiseShape[i]);\n }\n return noiseShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (0 < this.rate && this.rate < 1) {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const noiseShape = this.getNoiseShape(input);\n const output = K.inTrainPhase(() => K.dropout(input, this.rate, noiseShape, this.seed), () => input, training);\n return output;\n }\n return inputs;\n });\n }\n getConfig() {\n const config = {\n rate: this.rate,\n noiseShape: this.noiseShape,\n seed: this.seed,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n dispose() {\n return super.dispose();\n }\n}\n/** @nocollapse */\nDropout.className = 'Dropout';\nserialization.registerClass(Dropout);\nexport class SpatialDropout1D extends Dropout {\n constructor(args) {\n super(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getNoiseShape(input) {\n const inputShape = input.shape;\n return [inputShape[0], 1, inputShape[2]];\n }\n}\n/** @nocollapse */\nSpatialDropout1D.className = 'SpatialDropout1D';\nserialization.registerClass(SpatialDropout1D);\nexport class Dense extends Layer {\n constructor(args) {\n super(args);\n // Default activation: Linear (none).\n this.activation = null;\n this.useBias = true;\n this.kernel = null;\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.batchInputShape == null && args.inputShape == null &&\n args.inputDim != null) {\n // This logic is copied from Layer's constructor, since we can't\n // do exactly what the Python constructor does for Dense().\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n this.batchInputShape = [batchSize, args.inputDim];\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation);\n if (args.useBias != null) {\n this.useBias = args.useBias;\n }\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.supportsMasking = true;\n this.inputSpec = [{ minNDim: 2 }];\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputLastDim = inputShape[inputShape.length - 1];\n if (this.kernel == null) {\n this.kernel = this.addWeight('kernel', [inputLastDim, this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n }\n this.inputSpec = [{ minNDim: 2, axes: { [-1]: inputLastDim } }];\n this.built = true;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n outputShape[outputShape.length - 1] = this.units;\n return outputShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Dense layer accepts only a single input.\n const input = getExactlyOneTensor(inputs);\n const fusedActivationName = mapActivationToFusedKernel(this.activation.getClassName());\n let output;\n if (fusedActivationName != null) {\n output = K.dot(input, this.kernel.read(), fusedActivationName, this.bias ? this.bias.read() : null);\n }\n else {\n output = K.dot(input, this.kernel.read());\n if (this.bias != null) {\n output = K.biasAdd(output, this.bias.read());\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n }\n return output;\n });\n }\n getConfig() {\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDense.className = 'Dense';\nserialization.registerClass(Dense);\nexport class Flatten extends Layer {\n constructor(args) {\n args = args || {};\n super(args);\n this.inputSpec = [{ minNDim: 3 }];\n this.dataFormat = args.dataFormat;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n for (const dim of inputShape.slice(1)) {\n if (dim == null) {\n throw new ValueError(`The shape of the input to \"Flatten\" is not fully defined ` +\n `(got ${inputShape.slice(1)}). Make sure to pass a complete ` +\n `\"input_shape\" or \"batch_input_shape\" argument to the first ` +\n `layer in your model.`);\n }\n }\n return [inputShape[0], arrayProd(inputShape, 1)];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n let input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsFirst' && input.rank > 1) {\n const permutation = [0];\n for (let i = 2; i < input.rank; ++i) {\n permutation.push(i);\n }\n permutation.push(1);\n input = input.transpose(permutation);\n }\n return K.batchFlatten(input);\n });\n }\n getConfig() {\n const config = {};\n if (this.dataFormat != null) {\n config['dataFormat'] = this.dataFormat;\n }\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nFlatten.className = 'Flatten';\nserialization.registerClass(Flatten);\nexport class Activation extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.activation = getActivation(args.activation);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n return this.activation.apply(input);\n });\n }\n getConfig() {\n const config = { activation: serializeActivation(this.activation) };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nActivation.className = 'Activation';\nserialization.registerClass(Activation);\nexport class RepeatVector extends Layer {\n constructor(args) {\n super(args);\n this.n = args.n;\n this.inputSpec = [{ ndim: 2 }];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], this.n, inputShape[1]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n return K.repeat(inputs, this.n);\n });\n }\n getConfig() {\n const config = {\n n: this.n,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nRepeatVector.className = 'RepeatVector';\nserialization.registerClass(RepeatVector);\nexport class Reshape extends Layer {\n constructor(args) {\n super(args);\n this.targetShape = args.targetShape;\n // Make sure that all unknown dimensions are represented as `null`.\n for (let i = 0; i < this.targetShape.length; ++i) {\n if (this.isUnknown(this.targetShape[i])) {\n this.targetShape[i] = null;\n }\n }\n }\n isUnknown(dim) {\n return dim < 0 || dim == null;\n }\n /**\n * Finds and replaces a missing dimension in output shape.\n *\n * This is a near direct port of the internal Numpy function\n * `_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`.\n *\n * @param inputShape: Original shape of array begin reshape.\n * @param outputShape: Target shape of the array, with at most a single\n * `null` or negative number, which indicates an underdetermined dimension\n * that should be derived from `inputShape` and the known dimensions of\n * `outputShape`.\n * @returns: The output shape with `null` replaced with its computed value.\n * @throws: ValueError: If `inputShape` and `outputShape` do not match.\n */\n fixUnknownDimension(inputShape, outputShape) {\n const errorMsg = 'Total size of new array must be unchanged.';\n const finalShape = outputShape.slice();\n let known = 1;\n let unknown = null;\n for (let i = 0; i < finalShape.length; ++i) {\n const dim = finalShape[i];\n if (this.isUnknown(dim)) {\n if (unknown === null) {\n unknown = i;\n }\n else {\n throw new ValueError('Can only specifiy one unknown dimension.');\n }\n }\n else {\n known *= dim;\n }\n }\n const originalSize = arrayProd(inputShape);\n if (unknown !== null) {\n if (known === 0 || originalSize % known !== 0) {\n throw new ValueError(errorMsg);\n }\n finalShape[unknown] = originalSize / known;\n }\n else if (originalSize !== known) {\n throw new ValueError(errorMsg);\n }\n return finalShape;\n }\n computeOutputShape(inputShape) {\n let anyUnknownDims = false;\n for (let i = 0; i < inputShape.length; ++i) {\n if (this.isUnknown(inputShape[i])) {\n anyUnknownDims = true;\n break;\n }\n }\n if (anyUnknownDims) {\n return inputShape.slice(0, 1).concat(this.targetShape);\n }\n else {\n return inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const outputShape = inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n return input.reshape(outputShape);\n });\n }\n getConfig() {\n const config = {\n targetShape: this.targetShape,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReshape.className = 'Reshape';\nserialization.registerClass(Reshape);\nexport class Permute extends Layer {\n constructor(args) {\n super(args);\n if (args.dims == null) {\n throw new Error('Required configuration field `dims` is missing during Permute ' +\n 'constructor call.');\n }\n if (!Array.isArray(args.dims)) {\n throw new Error('Permute constructor requires `dims` to be an Array, but received ' +\n `${args.dims} instead.`);\n }\n // Check the validity of the permutation indices.\n const expectedSortedIndices = range(1, args.dims.length + 1);\n if (!util.arraysEqual(args.dims.slice().sort(), expectedSortedIndices)) {\n throw new Error('Invalid permutation `dims`: ' + JSON.stringify(args.dims) +\n ' `dims` must contain consecutive integers starting from 1.');\n }\n this.dims = args.dims;\n this.dimsIncludingBatch = [0].concat(this.dims);\n this.inputSpec = [new InputSpec({ ndim: this.dims.length + 1 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n this.dims.forEach((dim, i) => {\n outputShape[i + 1] = inputShape[dim];\n });\n return outputShape;\n }\n call(inputs, kwargs) {\n return transpose(getExactlyOneTensor(inputs), this.dimsIncludingBatch);\n }\n getConfig() {\n const config = {\n dims: this.dims,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPermute.className = 'Permute';\nserialization.registerClass(Permute);\nexport class Masking extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maskValue = args.maskValue == null ? 0 : args.maskValue;\n }\n else {\n this.maskValue = 0;\n }\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { maskValue: this.maskValue };\n Object.assign(config, baseConfig);\n return config;\n }\n computeMask(inputs, mask) {\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n return any(notEqual(input, this.maskValue), axis);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n const keepDims = true;\n const booleanMask = any(notEqual(input, this.maskValue), axis, keepDims);\n const output = input.mul(booleanMask.asType(input.dtype));\n return output;\n });\n }\n}\n/** @nocollapse */\nMasking.className = 'Masking';\nserialization.registerClass(Masking);\n//# sourceMappingURL=core.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Embedding Layer.\n *\n * Original source: keras/constraints.py\n */\nimport { notEqual, serialization, tidy, zerosLike } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Embedding extends Layer {\n constructor(args) {\n super(args);\n this.embeddings = null;\n this.DEFAULT_EMBEDDINGS_INITIALIZER = 'randomUniform';\n if (args.batchInputShape == null && args.inputShape == null) {\n // Porting Note: This logic is copied from Layer's constructor, since we\n // can't do exactly what the Python constructor does for Embedding().\n // Specifically, the super constructor can not be called after the\n // mutation of the `config` argument.\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n if (args.inputLength == null) {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (None, )\n this.batchInputShape = [batchSize, null];\n }\n else {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (config.inputLength, )\n this.batchInputShape =\n [batchSize].concat(generic_utils.toList(args.inputLength));\n }\n }\n this.inputDim = args.inputDim;\n generic_utils.assertPositiveInteger(this.inputDim, 'inputDim');\n this.outputDim = args.outputDim;\n generic_utils.assertPositiveInteger(this.outputDim, 'outputDim');\n this.embeddingsInitializer = getInitializer(args.embeddingsInitializer || this.DEFAULT_EMBEDDINGS_INITIALIZER);\n this.embeddingsRegularizer = getRegularizer(args.embeddingsRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.embeddingsConstraint = getConstraint(args.embeddingsConstraint);\n this.maskZero = args.maskZero;\n this.supportsMasking = args.maskZero;\n this.inputLength = args.inputLength;\n }\n build(inputShape) {\n this.embeddings = this.addWeight('embeddings', [this.inputDim, this.outputDim], this.dtype, this.embeddingsInitializer, this.embeddingsRegularizer, true, this.embeddingsConstraint);\n this.built = true;\n }\n // Override warnOnIncompatibleInputShape because an embedding layer allows\n // the input to have varying ranks.\n warnOnIncompatibleInputShape(inputShape) { }\n computeMask(inputs, mask) {\n return tidy(() => {\n if (!this.maskZero) {\n return null;\n }\n else {\n inputs = getExactlyOneTensor(inputs);\n return notEqual(inputs, zerosLike(inputs));\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (this.inputLength == null) {\n return [...inputShape, this.outputDim];\n }\n // inputLength can be an array if input is 3D or higher.\n const inLens = generic_utils.toList(this.inputLength);\n if (inLens.length !== inputShape.length - 1) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else {\n let i = 0;\n for (let k = 0; k < inLens.length; ++k) {\n const s1 = inLens[k];\n const s2 = inputShape[k + 1];\n if ((s1 != null) && (s2 != null) && (s1 !== s2)) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else if (s1 == null) {\n inLens[i] = s2;\n }\n i++;\n }\n }\n return [inputShape[0], ...inLens, this.outputDim];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Embedding layer accepts only a single input.\n let input = getExactlyOneTensor(inputs);\n if (input.dtype !== 'int32') {\n input = K.cast(input, 'int32');\n }\n const output = K.gather(this.embeddings.read(), input.as1D());\n return output.reshape(getExactlyOneShape(this.computeOutputShape(input.shape)));\n });\n }\n getConfig() {\n const config = {\n inputDim: this.inputDim,\n outputDim: this.outputDim,\n embeddingsInitializer: serializeInitializer(this.embeddingsInitializer),\n embeddingsRegularizer: serializeRegularizer(this.embeddingsRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n embeddingsConstraint: serializeConstraint(this.embeddingsConstraint),\n maskZero: this.maskZero,\n inputLength: this.inputLength\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nEmbedding.className = 'Embedding';\nserialization.registerClass(Embedding);\n//# sourceMappingURL=embeddings.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Merge Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { l2Normalize } from '../losses';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as mathUtils from '../utils/math_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\n/**\n * Generic Merge layer for element-wise merge functions.\n *\n * Used to implement `Sum`, `Average`, `Concatenate`, etc.\n */\nexport class Merge extends Layer {\n constructor(args) {\n super(args || {});\n this.supportsMasking = true;\n }\n /**\n * Logic for merging multiple tensors, to be overridden by subclasses.\n * @param inputs\n */\n mergeFunction(inputs) {\n throw new NotImplementedError();\n }\n /**\n * Computes the shape of the result of an elementwise operation.\n *\n * @param shape1: Shape of the first tensor.\n * @param shape2: Shape of the second tensor.\n * @returns Expected output shape when an elementwise operation is carried\n * out on 2 tensors with shapes `shape1` and `shape2`.\n * @throws ValueError: If `shape1` and `shape2` are not compatible for\n * element-wise operations.\n */\n computeElementwiseOpOutputShape(shape1, shape2) {\n if (shape1 == null || shape2 == null) {\n return null;\n }\n else if (shape1.length < shape2.length) {\n return this.computeElementwiseOpOutputShape(shape2, shape1);\n }\n else if (shape2.length === 0) {\n return shape1;\n }\n const outputShape = shape1.slice(0, shape1.length - shape2.length);\n for (let k = 0; k < shape2.length; ++k) {\n const i = shape1[shape1.length - shape2.length + k];\n const j = shape2[k];\n if (i == null || j == null || i < 0 || j < 0) {\n outputShape.push(null);\n }\n else if (i === 1) {\n outputShape.push(j);\n }\n else if (j === 1) {\n outputShape.push(i);\n }\n else {\n if (i !== j) {\n throw new ValueError('Operands could not be broadcast together with shapes ' +\n JSON.stringify(shape1) + ' ' + JSON.stringify(shape2));\n }\n outputShape.push(i);\n }\n }\n return outputShape;\n }\n build(inputShape) {\n // Used purely for shape validation.\n if (Array.isArray(inputShape) && !Array.isArray(inputShape[0])) {\n // Make sure that inputShape is an Array of shape.\n inputShape = [getExactlyOneShape(inputShape)];\n }\n inputShape = inputShape;\n if (inputShape.length < 2) {\n throw new ValueError('A merge layer should be called on an Array of at least 2 inputs.' +\n ` Got ${inputShape.length} input(s).`);\n }\n // Make sure that there is at most one unique batch size among the input\n // shapes.\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length > 1) {\n throw new ValueError(`Can not merge tensors with different batch sizes. ` +\n `Got tensors with shapes: ${JSON.stringify(inputShape)}.`);\n }\n let outputShape = inputShape[0] == null ? null : inputShape[0].slice(1);\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n // If the inputs have different ranks, we have to reshape them to make them\n // broadcastable.\n const allRanks = inputShape.map(shape => shape.length);\n if (inputShape.indexOf(null) === -1 &&\n generic_utils.unique(allRanks).length === 1) {\n this.reshapeRequired = false;\n }\n else {\n this.reshapeRequired = true;\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (this.reshapeRequired) {\n const reshapedInputs = [];\n const inputDims = inputs.map(input => input.rank);\n if (inputDims.indexOf(null) === -1) {\n // If ranks of all inputs are available, we simply expand each of them\n // at axis=1 until all of them have the same rank.\n const maxNDim = mathUtils.max(inputDims);\n for (let x of inputs) {\n const xNDim = x.rank;\n for (let k = 0; k < maxNDim - xNDim; ++k) {\n x = K.expandDims(x, 1);\n }\n reshapedInputs.push(x);\n }\n return this.mergeFunction(reshapedInputs);\n }\n else {\n // Transpose all inputs so that batch size is the last dimension.\n // [batchSize, dim1, dim2, ...] -> [dim1, dim2, ..., batchSize]\n let transposed = false;\n for (const x of inputs) {\n const xNDim = x.rank;\n if (xNDim == null) {\n const xShape = x.shape;\n const batchSize = xShape[0];\n const newShape = xShape.slice(1).concat([batchSize]);\n let xTransposed = x.reshape([batchSize].concat(mathUtils.arrayProd(xShape.slice(1))));\n xTransposed = tfc.transpose(xTransposed, [1, 0]);\n xTransposed = xTransposed.reshape(newShape);\n reshapedInputs.push(xTransposed);\n transposed = true;\n }\n else if (xNDim > 1) {\n const dims = mathUtils.range(1, xNDim).concat([0]);\n reshapedInputs.push(tfc.transpose(x, dims));\n transposed = true;\n }\n else {\n // We don't transpose inputs if they are 1D vectors or scalars.\n reshapedInputs.push(x);\n }\n }\n let y = this.mergeFunction(reshapedInputs);\n const yNDim = y.rank;\n if (transposed) {\n // If inputs have been transposed, we have to transpose the output\n // too.\n if (yNDim == null) {\n const yShape = y.shape;\n const yNDim = yShape.length;\n const batchSize = yShape[yNDim - 1];\n const newShape = [batchSize].concat(yShape.slice(0, yShape.length - 1));\n y = tfc.transpose(y.reshape([-1, batchSize]), [1, 0])\n .reshape(newShape);\n }\n else if (yNDim > 1) {\n const dims = [yNDim - 1].concat(mathUtils.range(0, yNDim - 1));\n y = tfc.transpose(y, dims);\n }\n }\n return y;\n }\n }\n else {\n return this.mergeFunction(inputs);\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n let outputShape;\n if (inputShape[0] == null) {\n outputShape = null;\n }\n else {\n outputShape = inputShape[0].slice(1);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length === 1) {\n outputShape = batchSizes.concat(outputShape);\n }\n else {\n outputShape = [null].concat(outputShape);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an Array');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an Array');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`The Array 'inputs' and 'mask' are expected to have the same ` +\n `length, but have different lengths ` +\n `(${inputs.length} vs ${mask.length})`);\n }\n if (mask.every(m => m == null)) {\n return null;\n }\n mask = mask.map(m => m == null ? m : tfc.expandDims(m, 0));\n let output = mask[0];\n for (let i = 1; i < mask.length - 1; ++i) {\n output = tfc.logicalAnd(output, mask[i]);\n }\n return output;\n });\n }\n}\nexport class Add extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nAdd.className = 'Add';\nserialization.registerClass(Add);\n/**\n * Calculate the element-wise sum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Add` layer, by using no input argument\n * or a single configuration argument. The resultant `Add` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const addLayer = tf.layers.add();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = addLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.add([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.add([input1, input2]).print();\n * // Gives [[11, 22], [33, 44]].\n *\n */\nexport function add(config) {\n if (Array.isArray(config)) {\n const layer = new Add({});\n return layer.apply(config);\n }\n else {\n return new Add(config);\n }\n}\nexport class Multiply extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.mul(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMultiply.className = 'Multiply';\nserialization.registerClass(Multiply);\n/**\n * Calculate the element-wise product of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Multiply` layer, by using no input argument\n * or a single configuration argument. The resultant `Multiply` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const multiplyLayer = tf.layers.multiply();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = multiplyLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.multiply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.multiply([input1, input2]).print();\n * // Gives [[10, 40], [90, 160]].\n *\n */\nexport function multiply(config) {\n if (Array.isArray(config)) {\n const layer = new Multiply({});\n return layer.apply(config);\n }\n else {\n return new Multiply(config);\n }\n}\nexport class Average extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return tfc.mul(1 / inputs.length, output);\n });\n }\n}\n/** @nocollapse */\nAverage.className = 'Average';\nserialization.registerClass(Average);\n/**\n * Calculate the element-wise arithmetic mean of inputs, which all have the same\n * shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Average` layer, by using no input argument\n * or a single configuration argument. The resultant `Average` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const averageLayer = tf.layers.average();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = averageLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.average([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.average([input1, input2]).print();\n * // Gives [[5.5, 11], [16.5, 22]].\n *\n */\nexport function average(config) {\n if (Array.isArray(config)) {\n const layer = new Average({});\n return layer.apply(config);\n }\n else {\n return new Average(config);\n }\n}\nexport class Maximum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.maximum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMaximum.className = 'Maximum';\nserialization.registerClass(Maximum);\n/**\n * Calculate the element-wise maximum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Maximum` layer, by using no input argument\n * or a single configuration argument. The resultant `Maximum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const maximumLayer = tf.layers.maximum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = maximumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.maximum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.maximum([input1, input2]).print();\n * // Gives [[10, 20], [30, 40]].\n *\n */\nexport function maximum(config) {\n if (Array.isArray(config)) {\n const layer = new Maximum({});\n return layer.apply(config);\n }\n else {\n return new Maximum(config);\n }\n}\nexport class Minimum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.minimum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMinimum.className = 'Minimum';\nserialization.registerClass(Minimum);\n/**\n * Calculate the element-wise minimum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Minimum` layer, by using no input argument\n * or a single configuration argument. The resultant `Minimum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const minimumLayer = tf.layers.minimum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = minimumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.minimum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.minimum([input1, input2]).print();\n * // Gives [[1, 2], [3, 4]].\n *\n */\nexport function minimum(config) {\n if (Array.isArray(config)) {\n const layer = new Minimum({});\n return layer.apply(config);\n }\n else {\n return new Minimum(config);\n }\n}\nexport class Concatenate extends Merge {\n constructor(args) {\n super(args);\n this.DEFAULT_AXIS = -1;\n if (args == null) {\n args = {};\n }\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n // Used purely for shape validation.]\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0])) ||\n inputShape.length === 1) {\n throw new ValueError('A `Concatenate` layer should be called on a list of at least 2 ' +\n 'inputs');\n }\n inputShape = inputShape;\n let allNoneShape = true;\n for (const shape of inputShape) {\n if (shape != null) {\n allNoneShape = false;\n break;\n }\n }\n if (allNoneShape) {\n return;\n }\n const shapeSet = [];\n for (let i = 0; i < inputShape.length; ++i) {\n const shapeWithoutConcatAxis = inputShape[i].slice();\n shapeWithoutConcatAxis.splice(this.axis, 1);\n let exists = false;\n for (const shape of shapeSet) {\n if (util.arraysEqual(shape, shapeWithoutConcatAxis)) {\n exists = true;\n break;\n }\n }\n if (!exists) {\n shapeSet.push(shapeWithoutConcatAxis);\n }\n }\n if (shapeSet.length > 1) {\n throw new ValueError('A `Concatenate` layer requires inputs with matching shapes ' +\n 'except for the concat axis. Got input shapes: ' +\n JSON.stringify(inputShape));\n }\n }\n mergeFunction(inputs) {\n return tidy(() => {\n return K.concatenate(inputs, this.axis);\n });\n }\n computeOutputShape(inputShape) {\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0]))) {\n throw new ValueError('A `Concatenate` layer should be called on a list of inputs.');\n }\n const inputShapes = inputShape;\n const outputShape = inputShapes[0].slice();\n const axis = this.axis < 0 ? outputShape.length + this.axis : this.axis;\n // Porting Note: the line above is because TypeScript doesn't support\n // negative indices.\n for (const shape of inputShapes.slice(1)) {\n if (outputShape[axis] == null || shape[axis] == null) {\n outputShape[axis] = null;\n break;\n }\n outputShape[axis] += shape[axis];\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an array for Concatenate');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an array for Concatenate');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`Mismatch in the length of mask (${mask.length}) ` +\n `and the legnth of inputs (${inputs.length})`);\n }\n return tfc.tidy(() => {\n let allNullMasks = true;\n mask.forEach(m => {\n if (m != null) {\n allNullMasks = false;\n return;\n }\n });\n if (allNullMasks) {\n return null;\n }\n const outputMasks = [];\n for (let i = 0; i < inputs.length; ++i) {\n if (mask[i] == null) {\n // Input is unmasked. Append all 1's to masks.\n outputMasks.push(tfc.onesLike(inputs[i]).asType('bool'));\n }\n else if (mask[i].rank < inputs[i].rank) {\n // Mask is smaller than the input, expand it.\n outputMasks.push(tfc.expandDims(mask[i], -1));\n }\n else {\n outputMasks.push(mask[i]);\n }\n }\n const concatenatedMasks = tfc.concat(outputMasks, this.axis);\n return tfc.all(concatenatedMasks, -1, false);\n });\n }\n getConfig() {\n const config = {\n 'axis': this.axis,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nConcatenate.className = 'Concatenate';\nserialization.registerClass(Concatenate);\n/**\n * Concatenate an `Array` of inputs.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Concatenate` layer, by using no input argument\n * or a single configuration argument. The resultant `Concatenate` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const concatLayer = tf.layers.concatenate();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = concatLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 7], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = tf.layers.concatenate([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([[1, 2], [3, 4]], [2, 2]);\n * const input2 = tf.tensor2d([[10, 20], [30, 40]], [2, 2]);\n * tf.layers.concatenate([input1, input2]).print();\n * // Gives [[1, 2, 10, 20], [3, 4, 30, 40]].\n *\n */\nexport function concatenate(config) {\n if (Array.isArray(config)) {\n const layer = new Concatenate({});\n return layer.apply(config);\n }\n else {\n return new Concatenate(config);\n }\n}\n/**\n * Interpretable potentially negative axis index.\n *\n * For example, given axis = -1, and dim = 3, this function will return 2.\n *\n * @param axis The axis index, may be a positive, zero or negative integer.\n * @param dim Total number of dimensions, a positive integer.\n * @returns A non-negative axis index equivalent to the input `axis`.\n */\nfunction interpretAxis(axis, dim) {\n while (axis < 0) {\n axis += dim;\n }\n return axis;\n}\nfunction batchDot(x, y, axes) {\n if (x.shape.length > 3 || y.shape.length > 3) {\n throw new NotImplementedError('batchDot is not implemented for tensors of 4D or higher rank yet');\n }\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of x to be >= 2, ` +\n `but got ${x.shape.length}`);\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of y to be >= 2, ` +\n `but got ${y.shape.length}`);\n if (typeof axes === 'number') {\n axes = [axes, axes];\n }\n if (x.dtype === 'complex64' || y.dtype === 'complex64') {\n throw new NotImplementedError('batchDot is not implemented for complex64-type Tensors yet.');\n }\n const xNDim = x.shape.length;\n const yNDim = y.shape.length;\n if (axes == null) {\n // Behave like batchMatmul by default.\n axes = [xNDim - 1, yNDim - 2];\n }\n const axesArray = axes;\n return tfc.tidy(() => {\n let diff;\n if (xNDim > yNDim) {\n diff = xNDim - yNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n y = y.reshape(y.shape.concat(diffShape));\n }\n else if (yNDim > xNDim) {\n diff = yNDim - xNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n x = x.reshape(x.shape.concat(diffShape));\n }\n else {\n diff = 0;\n }\n let out;\n if (x.shape.length === 2 && y.shape.length === 2) {\n if (axesArray[0] === axesArray[1]) {\n out = x.mul(y).sum(axesArray[0]);\n }\n else {\n out = x.transpose([1, 0]).mul(y).sum(axesArray[1]);\n }\n }\n else {\n const adjX = axesArray[0] !== x.shape.length - 1;\n const adjY = axesArray[1] === y.shape.length - 1;\n out = x.matMul(y, adjX, adjY);\n }\n if (diff > 0) {\n let idx;\n if (xNDim > yNDim) {\n idx = xNDim + yNDim - 3;\n }\n else {\n idx = xNDim - 1;\n }\n const squeezeAxes = [];\n for (let i = idx; i < idx + diff; ++i) {\n squeezeAxes.push(i);\n }\n out = out.squeeze(squeezeAxes);\n }\n if (out.shape.length === 1) {\n out = out.expandDims(1);\n }\n return out;\n });\n}\nexport class Dot extends Merge {\n constructor(args) {\n super(args);\n this.axes = args.axes;\n this.normalize = args.normalize == null ? false : args.normalize;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0];\n const shape2 = inputShape[1];\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n if (shape1[axes[0]] !== shape2[axes[1]]) {\n throw new ValueError(`Dimension incompatibility: ` +\n `${shape1[axes[0]]} !== ${shape2[axes[1]]}`);\n }\n }\n mergeFunction(inputs) {\n if (inputs.length !== 2) {\n throw new ValueError('A `Dot` layer must be called on exactly 2 inputs, ' +\n `but received ${inputs.length} input(s).`);\n }\n let x1 = inputs[0];\n let x2 = inputs[1];\n let axes;\n if (!Array.isArray(this.axes)) {\n axes = [\n interpretAxis(this.axes, x1.shape.length),\n interpretAxis(this.axes, x2.shape.length)\n ];\n }\n else {\n axes = this.axes.map((axis, i) => interpretAxis(axis, inputs[i].shape.length));\n }\n if (this.normalize) {\n x1 = l2Normalize(x1, axes[0]);\n x2 = l2Normalize(x2, axes[1]);\n }\n return batchDot(x1, x2, axes);\n }\n interpretAxes(shape1, shape2) {\n let axes;\n if (!Array.isArray(this.axes)) {\n // `this.axes` is a single integer.\n axes = [\n interpretAxis(this.axes, shape1.length),\n interpretAxis(this.axes, shape2.length)\n ];\n }\n else {\n // `this.axes` is an Array of integers.\n axes = this.axes;\n }\n return axes;\n }\n computeOutputShape(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0].slice();\n const shape2 = inputShape[1].slice();\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n shape1.splice(axes[0], 1);\n shape2.splice(axes[1], 1);\n shape2.splice(0, 1);\n const outputShape = shape1.concat(shape2);\n if (outputShape.length === 1) {\n outputShape.push(1);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return null;\n }\n getConfig() {\n const config = {\n 'axes': this.axes,\n 'normalize': this.normalize\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDot.className = 'Dot';\nserialization.registerClass(Dot);\n// TODO(cais): Add functional interfaces for the merge layers.\n//# sourceMappingURL=merge.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Noise Layers.\n */\nimport { greaterEqual, randomUniform, serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { getExactlyOneTensor } from '../utils/types_utils';\nexport class GaussianNoise extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.stddev = args.stddev;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { stddev: this.stddev };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const noised = () => K.randomNormal(input.shape, 0, this.stddev).add(input);\n const output = K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n return output;\n });\n }\n}\n/** @nocollapse */\nGaussianNoise.className = 'GaussianNoise';\nserialization.registerClass(GaussianNoise);\nexport class GaussianDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (this.rate > 0 && this.rate < 1) {\n const noised = () => {\n const stddev = Math.sqrt(this.rate / (1 - this.rate));\n return input.mul(K.randomNormal(input.shape, 1, stddev));\n };\n return K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n }\n return input;\n });\n }\n}\n/** @nocollapse */\nGaussianDropout.className = 'GaussianDropout';\nserialization.registerClass(GaussianDropout);\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n */\nexport class AlphaDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n this.noiseShape = args.noiseShape;\n }\n _getNoiseShape(inputs) {\n return this.noiseShape || getExactlyOneTensor(inputs).shape;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.rate < 1 && this.rate > 0) {\n const noiseShape = this._getNoiseShape(inputs);\n const droppedInputs = () => {\n const input = getExactlyOneTensor(inputs);\n const alpha = 1.6732632423543772848170429916717;\n const scale = 1.0507009873554804934193349852946;\n const alphaP = -alpha * scale;\n let keptIdx = greaterEqual(randomUniform(noiseShape), this.rate);\n keptIdx = K.cast(keptIdx, 'float32'); // get default dtype.\n // Get affine transformation params.\n const a = ((1 - this.rate) * (1 + this.rate * alphaP ** 2)) ** -0.5;\n const b = -a * alphaP * this.rate;\n // Apply mask.\n const x = input.mul(keptIdx).add(keptIdx.add(-1).mul(alphaP));\n return x.mul(a).add(b);\n };\n return K.inTrainPhase(droppedInputs, () => getExactlyOneTensor(inputs), kwargs['training'] || false);\n }\n return inputs;\n });\n }\n}\n/** @nocollapse */\nAlphaDropout.className = 'AlphaDropout';\nserialization.registerClass(AlphaDropout);\n//# sourceMappingURL=noise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Normalization layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { moments, serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Applies batch normalization on x given mean, var, beta and gamma.\n *\n * I.e. returns:\n * `output = (x - mean) / (sqrt(var) + epsilon) * gamma + beta`\n *\n * @param x Input tensor.\n * @param mean Mean of batch.\n * @param variance Variance of batch.\n * @param beta Tensor with which to center the input.\n * @param gamma Tensor by which to scale the input.\n * @param epsilon Fuzz factor.\n * @returns The result of the batch normalization.\n */\nexport function batchNormalization(x, mean, variance, beta, gamma, epsilon = 1e-3) {\n let out;\n if (x.rank === 2) {\n out = tfc.batchNorm2d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 3) {\n // TODO(cais): Check rank; give proper error message.\n out = tfc.batchNorm3d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 4) {\n out = tfc.batchNorm4d(x, mean, variance, beta, gamma, epsilon);\n }\n else {\n throw new NotImplementedError(`batchNormalization is not implemented for array of rank ${x.rank} ` +\n `yet`);\n }\n return out;\n}\n/**\n * Non-broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const normed = batchNormalization(x, mean, variance, beta, gamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const targetShape = [];\n for (const axis of math_utils.range(0, x.rank)) {\n if (reductionAxes.indexOf(axis) !== -1) {\n targetShape.push(1);\n }\n else {\n targetShape.push(x.shape[axis]);\n }\n }\n const broadcastMean = mean.reshape(targetShape);\n const broadcastVariance = variance.reshape(targetShape);\n const broadcastGamma = gamma == null ? null : gamma.reshape(targetShape);\n const broadcastBeta = beta == null ? null : beta.reshape(targetShape);\n const normed = batchNormalization(x, broadcastMean, broadcastVariance, broadcastBeta, broadcastGamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Batch normalization for use in training (not inference).\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nexport function normalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n if (util.arraysEqual(reductionAxes.slice().sort(), math_utils.range(0, x.rank - 1))) {\n return regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n else {\n return broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n}\nexport class BatchNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.supportsMasking = true;\n this.axis = args.axis == null ? -1 : args.axis;\n this.momentum = args.momentum == null ? 0.99 : args.momentum;\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.movingMeanInitializer =\n getInitializer(args.movingMeanInitializer || 'zeros');\n this.movingVarianceInitializer =\n getInitializer(args.movingVarianceInitializer || 'ones');\n this.betaConstraint = getConstraint(args.betaConstraint);\n this.gammaConstraint = getConstraint(args.gammaConstraint);\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const axis = this.axis >= 0 ? this.axis : (this.axis + inputShape.length);\n const dim = inputShape[axis];\n if (dim == null) {\n throw new ValueError(`Axis ${axis} of input tensor should have a defined dimension but ` +\n `the layer received an input with shape ` +\n `${JSON.stringify(inputShape)}.`);\n }\n this.inputSpec =\n [new InputSpec({ ndim: inputShape.length, axes: { [axis]: dim } })];\n const shape = [dim];\n if (this.scale) {\n this.gamma = this.addWeight('gamma', shape, null, this.gammaInitializer, this.gammaRegularizer, true, this.gammaConstraint);\n }\n if (this.center) {\n this.beta = this.addWeight('beta', shape, null, this.betaInitializer, this.betaRegularizer, true, this.betaConstraint);\n }\n this.movingMean = this.addWeight('moving_mean', shape, null, this.movingMeanInitializer, null, false);\n this.movingVariance = this.addWeight('moving_variance', shape, null, this.movingVarianceInitializer, null, false);\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const ndim = inputShape.length;\n const reductionAxes = math_utils.range(0, ndim);\n const axis = this.axis >= 0 ? this.axis : (this.axis + ndim);\n reductionAxes.splice(axis, 1);\n const broadcastShape = generic_utils.pyListRepeat(1, ndim);\n broadcastShape[axis] = inputShape[axis];\n const sortedReductionAxes = reductionAxes.slice();\n sortedReductionAxes.sort();\n const needsBroadcasting = !util.arraysEqual(sortedReductionAxes, math_utils.range(0, ndim).slice(0, ndim - 1));\n const normalizeInference = () => {\n if (needsBroadcasting) {\n const broadcastMovingMean = this.movingMean.read().reshape(broadcastShape);\n const broadcastMovingVariance = this.movingVariance.read().reshape(broadcastShape);\n const broadcastBeta = this.center ? this.beta.read().reshape(broadcastShape) : null;\n const broadcastGamma = this.scale ? this.gamma.read().reshape(broadcastShape) : null;\n return batchNormalization(input, broadcastMovingMean, broadcastMovingVariance, broadcastBeta, broadcastGamma, this.epsilon);\n }\n else {\n return batchNormalization(input, this.movingMean.read(), this.movingVariance.read(), this.beta == null ? null : this.beta.read(), this.gamma == null ? null : this.gamma.read(), this.epsilon);\n }\n };\n if (!training) {\n return normalizeInference();\n }\n const [normedTraining, mean, variance] = normalizeBatchInTraining(input, this.gamma.read(), this.beta.read(), reductionAxes, this.epsilon);\n const doMovingAverage = (variable, value, momentum) => {\n tfc.tidy(() => {\n const decay = 1 - momentum;\n const origValue = variable.read();\n const updateDelta = origValue.sub(value).mul(decay);\n variable.write(origValue.sub(updateDelta));\n });\n };\n // Perform updates to moving mean and moving variance for training.\n // Porting Note: In PyKeras, these updates to `movingMean` and\n // `movingAverage` are done as a deferred Graph, added to the `Layer`'s\n // `update`s using the `add_update()` method. Here we do it imperatively\n // and encapsulate the updates in a function that is invoked\n // immediately.\n const updateMovingMeanAndVariance = () => {\n doMovingAverage(this.movingMean, mean, this.momentum);\n doMovingAverage(this.movingVariance, variance, this.momentum);\n };\n updateMovingMeanAndVariance();\n return normedTraining;\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n momentum: this.momentum,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n movingMeanInitializer: serializeInitializer(this.movingMeanInitializer),\n movingVarianceInitializer: serializeInitializer(this.movingVarianceInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer),\n betaConstraint: serializeConstraint(this.betaConstraint),\n gammaConstraint: serializeConstraint(this.gammaConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nBatchNormalization.className = 'BatchNormalization';\nserialization.registerClass(BatchNormalization);\nexport class LayerNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.axis = args.axis == null ? -1 : args.axis;\n if (typeof this.axis === 'number') {\n if (!Number.isInteger(this.axis)) {\n throw new Error(`Expected axis to be an integer, but received ${this.axis}`);\n }\n }\n else if (Array.isArray(this.axis)) {\n for (const axis of this.axis) {\n if (!Number.isInteger(axis)) {\n throw new Error(`Expected axis to be an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n }\n }\n else {\n throw new Error(`Expected axis to be an integer or an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const nDims = inputShape.length;\n // Convert axis to array and resolve negatives.\n if (typeof this.axis === 'number') {\n this.axis = [this.axis];\n }\n for (let i = 0; i < this.axis.length; ++i) {\n if (this.axis[i] < 0) {\n this.axis[i] += nDims;\n }\n }\n // Further validate axes.\n for (const axis of this.axis) {\n if (axis < 0 || axis >= nDims) {\n throw new Error(`Invalid axis: ${axis}`);\n }\n }\n if (this.axis.length !== generic_utils.unique(this.axis).length) {\n throw new Error(`Found duplicate axes in: ${this.axis}`);\n }\n const paramShape = this.axis.map(axis => inputShape[axis]);\n const trainable = true;\n if (this.scale) {\n this.gamma = this.addWeight('gamma', paramShape, 'float32', this.gammaInitializer, this.gammaRegularizer, trainable);\n }\n else {\n this.gamma = null;\n }\n if (this.center) {\n this.beta = this.addWeight('beta', paramShape, 'float32', this.betaInitializer, this.betaRegularizer, trainable);\n }\n else {\n this.beta = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const nDims = inputShape.length;\n return tidy(() => {\n const keepDims = true;\n let { mean, variance } = moments(input, this.axis, keepDims);\n const broadcastShape = generic_utils.pyListRepeat(1, nDims);\n for (const dim of this.axis) {\n broadcastShape[dim] = inputShape[dim];\n }\n const broadcast = (v) => {\n if (v != null && v.shape.length !== nDims &&\n this.axis !== [nDims - 1]) {\n return v.reshape(broadcastShape);\n }\n else {\n return v;\n }\n };\n let scale = broadcast(this.gamma.read());\n let offset = broadcast(this.beta.read());\n // TODO(https://github.com/tensorflow/tfjs/issues/2120): The tiling below\n // is a workaround for the limitation of core's batchNormalization?d don't\n // support broadcasting in their gradients. In addition, the tiling is\n // necessary to ensure correctness on the browser CPU backend regardless\n // of forward or backward computation. Remove this workaround once the\n // limitation is addressed. See .\n const momentsTiling = [];\n const scaleOffsetTiling = [];\n for (let i = 0; i < nDims; ++i) {\n if (this.axis.indexOf(i) !== -1) {\n momentsTiling.push(inputShape[i]);\n scaleOffsetTiling.push(1);\n }\n else {\n momentsTiling.push(1);\n scaleOffsetTiling.push(inputShape[i]);\n }\n }\n mean = mean.tile(momentsTiling);\n variance = variance.tile(momentsTiling);\n scale = scale.tile(scaleOffsetTiling);\n offset = offset.tile(scaleOffsetTiling);\n return batchNormalization(input, mean, variance, offset, scale, this.epsilon);\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLayerNormalization.className = 'LayerNormalization';\nserialization.registerClass(LayerNormalization);\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Padding Layers.\n */\n// Porting Note: In Python Keras, the padding layers are in convolutional.py,\n// but we decided to put them in a separate file (padding.ts) for clarity.\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Pads the middle dimension of a 3D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of 2 integers, how many zeros to add at the start and\n * end of the middle dimension (i.e., dimension 1).\n * @return A padded 3D `tf.Tensor`.\n */\nexport function temporalPadding(x, padding) {\n return tidy(() => {\n if (x.rank !== 3) {\n throw new ValueError(`temporalPadding expects input tensor to be 3-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [1, 1];\n }\n if (padding.length !== 2) {\n throw new ValueError(`temporalPadding expects input padding pattern to be a length-2 ` +\n `array, but received a length-${padding.length} array.`);\n }\n const pattern = [[0, 0], padding, [0, 0]];\n return tfc.pad(x, pattern);\n });\n}\n/**\n * Pads the 2nd and 3rd dimensions of a 4D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of two `Array`s, each of which is an `Array` of two\n * integers. The amount of padding at the beginning and end of the 2nd and 3rd\n * dimensions, respectively.\n * @param dataFormat 'channelsLast' (default) or 'channelsFirst'.\n * @return Padded 4D `tf.Tensor`.\n */\nexport function spatial2dPadding(x, padding, dataFormat) {\n return tidy(() => {\n if (x.rank !== 4) {\n throw new ValueError(`temporalPadding expects input tensor to be 4-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [[1, 1], [1, 1]];\n }\n if (padding.length !== 2 || padding[0].length !== 2 ||\n padding[1].length !== 2) {\n throw new ValueError('spatial2dPadding expects `padding` to be an Array of two Arrays, ' +\n 'each of which is an Array of two integers.');\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (dataFormat !== 'channelsLast' && dataFormat !== 'channelsFirst') {\n throw new ValueError(`Unknown data format: ${dataFormat}. ` +\n `Supported data formats are 'channelsLast' and 'channelsFirst.`);\n }\n let pattern;\n if (dataFormat === 'channelsFirst') {\n pattern = [[0, 0], [0, 0], padding[0], padding[1]];\n }\n else {\n pattern = [[0, 0], padding[0], padding[1], [0, 0]];\n }\n return tfc.pad(x, pattern);\n });\n}\nexport class ZeroPadding2D extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.dataFormat =\n args.dataFormat == null ? imageDataFormat() : args.dataFormat;\n // TODO(cais): Maybe refactor the following logic surrounding `padding`\n // into a helper method.\n if (args.padding == null) {\n this.padding = [[1, 1], [1, 1]];\n }\n else if (typeof args.padding === 'number') {\n this.padding =\n [[args.padding, args.padding], [args.padding, args.padding]];\n }\n else {\n args.padding = args.padding;\n if (args.padding.length !== 2) {\n throw new ValueError(`ZeroPadding2D expects padding to be a length-2 array, but ` +\n `received a length-${args.padding.length} array.`);\n }\n let heightPadding;\n let widthPadding;\n if (typeof args.padding[0] === 'number') {\n heightPadding = [args.padding[0], args.padding[0]];\n widthPadding = [args.padding[1], args.padding[1]];\n }\n else {\n args.padding = args.padding;\n if (args.padding[0].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects height padding to be a length-2 array, ` +\n `but received a length-${args.padding[0].length} array.`);\n }\n heightPadding = args.padding[0];\n if (args.padding[1].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects width padding to be a length-2 array, ` +\n `but received a length-${args.padding[1].length} array.`);\n }\n widthPadding = args.padding[1];\n }\n this.padding = [heightPadding, widthPadding];\n }\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows;\n let cols;\n if (this.dataFormat === 'channelsFirst') {\n if (inputShape[2] != null && inputShape[2] >= 0) {\n rows = inputShape[2] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[3] != null && inputShape[3] >= 0) {\n cols = inputShape[3] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n if (inputShape[1] != null && inputShape[1] >= 0) {\n rows = inputShape[1] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[2] != null && inputShape[2] >= 0) {\n cols = inputShape[2] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => spatial2dPadding(getExactlyOneTensor(inputs), this.padding, this.dataFormat));\n }\n getConfig() {\n const config = {\n padding: this.padding,\n dataFormat: this.dataFormat,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nZeroPadding2D.className = 'ZeroPadding2D';\nserialization.registerClass(ZeroPadding2D);\n//# sourceMappingURL=padding.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Pooling Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode, checkPoolMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { preprocessConv2DInput, preprocessConv3DInput } from './convolutional';\n/**\n * 2D pooling.\n * @param x\n * @param poolSize\n * @param stridesdes strides. Defaults to [1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 2D pooling.\n */\nexport function pool2d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // TODO(cais): Remove the preprocessing step once deeplearn.js supports\n // dataFormat as an input argument.\n x = preprocessConv2DInput(x, dataFormat); // x is NHWC after preprocessing.\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n // TODO(cais): Rank check?\n y = tfc.maxPool(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n // TODO(cais): Check the dtype and rank of x and give clear error message\n // if those are incorrect.\n y = tfc.avgPool(\n // TODO(cais): Rank check?\n x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return y;\n });\n}\n/**\n * 3D pooling.\n * @param x\n * @param poolSize. Default to [1, 1, 1].\n * @param strides strides. Defaults to [1, 1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 3D pooling.\n */\nexport function pool3d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // x is NDHWC after preprocessing.\n x = preprocessConv3DInput(x, dataFormat);\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n y = tfc.maxPool3d(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n y = tfc.avgPool3d(x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]); // NDHWC -> NCDHW.\n }\n return y;\n });\n}\n/**\n * Abstract class for different pooling 1D layers.\n */\nexport class Pooling1D extends Layer {\n /**\n *\n * @param args Parameters for the Pooling layer.\n *\n * config.poolSize defaults to 2.\n */\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = 2;\n }\n super(args);\n if (typeof args.poolSize === 'number') {\n this.poolSize = [args.poolSize];\n }\n else if (Array.isArray(args.poolSize) &&\n args.poolSize.length === 1 &&\n typeof args.poolSize[0] === 'number') {\n this.poolSize = args.poolSize;\n }\n else {\n throw new ValueError(`poolSize for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.poolSize)}`);\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else {\n if (typeof args.strides === 'number') {\n this.strides = [args.strides];\n }\n else if (Array.isArray(args.strides) &&\n args.strides.length === 1 &&\n typeof args.strides[0] === 'number') {\n this.strides = args.strides;\n }\n else {\n throw new ValueError(`strides for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.strides)}`);\n }\n }\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const length = convOutputLength(inputShape[1], this.poolSize[0], this.padding, this.strides[0]);\n return [inputShape[0], length, inputShape[2]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Add dummy last dimension.\n inputs = K.expandDims(getExactlyOneTensor(inputs), 2);\n const output = this.poolingFunction(getExactlyOneTensor(inputs), [this.poolSize[0], 1], [this.strides[0], 1], this.padding, 'channelsLast');\n // Remove dummy last dimension.\n return tfc.squeeze(output, [2]);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling1D.className = 'MaxPooling1D';\nserialization.registerClass(MaxPooling1D);\nexport class AveragePooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling1D.className = 'AveragePooling1D';\nserialization.registerClass(AveragePooling1D);\n/**\n * Abstract class for different pooling 2D layers.\n */\nexport class Pooling2D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 2) {\n throw new ValueError(`If the strides property of a 2D pooling layer is an Array, ` +\n `it is expected to have a length of 2, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n rows =\n convOutputLength(rows, this.poolSize[0], this.padding, this.strides[0]);\n cols =\n convOutputLength(cols, this.poolSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling2D.className = 'MaxPooling2D';\nserialization.registerClass(MaxPooling2D);\nexport class AveragePooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling2D.className = 'AveragePooling2D';\nserialization.registerClass(AveragePooling2D);\n/**\n * Abstract class for different pooling 3D layers.\n */\nexport class Pooling3D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 3) {\n throw new ValueError(`If the strides property of a 3D pooling layer is an Array, ` +\n `it is expected to have a length of 3, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let depths = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[4] : inputShape[3];\n depths = convOutputLength(depths, this.poolSize[0], this.padding, this.strides[0]);\n rows =\n convOutputLength(rows, this.poolSize[1], this.padding, this.strides[1]);\n cols =\n convOutputLength(cols, this.poolSize[2], this.padding, this.strides[2]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], depths, rows, cols];\n }\n else {\n return [inputShape[0], depths, rows, cols, inputShape[4]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling3D.className = 'MaxPooling3D';\nserialization.registerClass(MaxPooling3D);\nexport class AveragePooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling3D.className = 'AveragePooling3D';\nserialization.registerClass(AveragePooling3D);\n/**\n * Abstract class for different global pooling 1D layers.\n */\nexport class GlobalPooling1D extends Layer {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], inputShape[2]];\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n}\nexport class GlobalAveragePooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.mean(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling1D.className = 'GlobalAveragePooling1D';\nserialization.registerClass(GlobalAveragePooling1D);\nexport class GlobalMaxPooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.max(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling1D.className = 'GlobalMaxPooling1D';\nserialization.registerClass(GlobalMaxPooling1D);\n/**\n * Abstract class for different global pooling 2D layers.\n */\nexport class GlobalPooling2D extends Layer {\n constructor(args) {\n super(args);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n if (this.dataFormat === 'channelsLast') {\n return [inputShape[0], inputShape[3]];\n }\n else {\n return [inputShape[0], inputShape[1]];\n }\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n getConfig() {\n const config = { dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class GlobalAveragePooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.mean(input, [1, 2]);\n }\n else {\n return tfc.mean(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling2D.className = 'GlobalAveragePooling2D';\nserialization.registerClass(GlobalAveragePooling2D);\nexport class GlobalMaxPooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.max(input, [1, 2]);\n }\n else {\n return tfc.max(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling2D.className = 'GlobalMaxPooling2D';\nserialization.registerClass(GlobalMaxPooling2D);\n//# sourceMappingURL=pooling.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Layers that augment the functionality of a base layer.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { InputSpec, Layer, SymbolicTensor } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { VALID_BIDIRECTIONAL_MERGE_MODES } from '../keras_format/common';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { rnn, standardizeArgs } from './recurrent';\nimport { deserialize } from './serialization';\n/**\n * Abstract wrapper base class.\n *\n * Wrappers take another layer and augment it in various ways.\n * Do not use this class as a layer, it is only an abstract base class.\n * Two usable wrappers are the `TimeDistributed` and `Bidirectional` wrappers.\n */\nexport class Wrapper extends Layer {\n constructor(args) {\n // Porting Note: In PyKeras, `self.layer` is set prior to the calling\n // `super()`. But we can't do that here due to TypeScript's restriction.\n // See: https://github.com/Microsoft/TypeScript/issues/8277\n // As a result, we have to add checks in `get trainable()` and\n // `set trainable()` below in order to prevent using `this.layer` when\n // its value is `undefined`. The super constructor does use the getter\n // and the setter of `this.layer`.\n super(args);\n this.layer = args.layer;\n }\n build(inputShape) {\n this.built = true;\n }\n // TODO(cais): Implement activityRegularizer getter.\n get trainable() {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n return this.layer.trainable;\n }\n else {\n return false;\n }\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n this.layer.trainable = value;\n }\n }\n get trainableWeights() {\n return this.layer.trainableWeights;\n }\n // TODO(cais): Implement setter for trainableWeights.\n get nonTrainableWeights() {\n return this.layer.nonTrainableWeights;\n }\n // TODO(cais): Implement setter for nonTrainableWeights.\n get updates() {\n // tslint:disable-next-line:no-any\n return this.layer._updates;\n }\n // TODO(cais): Implement getUpdatesFor().\n get losses() {\n return this.layer.losses;\n }\n // TODO(cais): Implement getLossesFor().\n getWeights() {\n return this.layer.getWeights();\n }\n setWeights(weights) {\n this.layer.setWeights(weights);\n }\n getConfig() {\n const config = {\n 'layer': {\n 'className': this.layer.getClassName(),\n 'config': this.layer.getConfig(),\n }\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.layer != null) {\n this.layer.setFastWeightInitDuringBuild(value);\n }\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const layerConfig = config['layer'];\n const layer = deserialize(layerConfig, customObjects);\n delete config['layer'];\n const newConfig = { layer };\n Object.assign(newConfig, config);\n return new cls(newConfig);\n }\n}\nexport class TimeDistributed extends Wrapper {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 3) {\n throw new ValueError(`TimeDistributed layer expects an input shape >= 3D, but received ` +\n `input shape ${JSON.stringify(inputShape)}`);\n }\n this.inputSpec = [{ shape: inputShape }];\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (!this.layer.built) {\n this.layer.build(childInputShape);\n this.layer.built = true;\n }\n super.build(inputShape);\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n const childOutputShape = this.layer.computeOutputShape(childInputShape);\n const timesteps = inputShape[1];\n return [childOutputShape[0], timesteps].concat(childOutputShape.slice(1));\n }\n call(inputs, kwargs) {\n return tidy(() => {\n // TODO(cais): Add 'training' and 'useLearningPhase' to kwargs.\n inputs = getExactlyOneTensor(inputs);\n // Porting Note: In tfjs-layers, `inputs` are always concrete tensor\n // values. Hence the inputs can't have an undetermined first (batch)\n // dimension, which is why we always use the K.rnn approach here.\n const step = (inputs, states) => {\n // TODO(cais): Add useLearningPhase.\n // NOTE(cais): `layer.call` may return a length-1 array of Tensor in\n // some cases (e.g., `layer` is a `Sequential` instance), which is\n // why `getExactlyOneTensor` is used below.\n const output = getExactlyOneTensor(this.layer.call(inputs, kwargs));\n return [output, []];\n };\n const rnnOutputs = rnn(step, inputs, [], false /* goBackwards */, null /* mask */, null /* constants */, false /* unroll */, true /* needPerStepOutputs */);\n const y = rnnOutputs[1];\n // TODO(cais): Add activity regularization.\n // TODO(cais): Add useLearningPhase.\n return y;\n });\n }\n}\n/** @nocollapse */\nTimeDistributed.className = 'TimeDistributed';\nserialization.registerClass(TimeDistributed);\nexport function checkBidirectionalMergeMode(value) {\n generic_utils.checkStringTypeUnionValue(VALID_BIDIRECTIONAL_MERGE_MODES, 'BidirectionalMergeMode', value);\n}\nconst DEFAULT_BIDIRECTIONAL_MERGE_MODE = 'concat';\nexport class Bidirectional extends Wrapper {\n constructor(args) {\n super(args);\n // Note: When creating `this.forwardLayer`, the original Layer object\n // (`config.layer`) ought to be cloned. This is why we call\n // `getConfig()` followed by `deserialize()`. Without this cloning,\n // the layer names saved during serialization will incorrectly contain\n // the 'forward_' prefix. In Python Keras, this is done using\n // `copy.copy` (shallow copy), which does not have a simple equivalent\n // in JavaScript. JavaScript's `Object.assign()` does not copy\n // methods.\n const layerConfig = args.layer.getConfig();\n const forwDict = {};\n forwDict['className'] = args.layer.getClassName();\n forwDict['config'] = layerConfig;\n this.forwardLayer = deserialize(forwDict);\n layerConfig['goBackwards'] =\n layerConfig['goBackwards'] === true ? false : true;\n const backDict = {};\n backDict['className'] = args.layer.getClassName();\n backDict['config'] = layerConfig;\n this.backwardLayer = deserialize(backDict);\n this.forwardLayer.name = 'forward_' + this.forwardLayer.name;\n this.backwardLayer.name = 'backward_' + this.backwardLayer.name;\n this.mergeMode = args.mergeMode === undefined ?\n DEFAULT_BIDIRECTIONAL_MERGE_MODE :\n args.mergeMode;\n checkBidirectionalMergeMode(this.mergeMode);\n if (args.weights) {\n throw new NotImplementedError('weights support is not implemented for Bidirectional layer yet.');\n }\n this._stateful = args.layer.stateful;\n this.returnSequences = args.layer.returnSequences;\n this.returnState = args.layer.returnState;\n this.supportsMasking = true;\n this._trainable = true;\n this.inputSpec = args.layer.inputSpec;\n this.numConstants = null;\n }\n get trainable() {\n return this._trainable;\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n this._trainable = value;\n if (this.forwardLayer != null) {\n this.forwardLayer.trainable = value;\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.trainable = value;\n }\n }\n getWeights() {\n return this.forwardLayer.getWeights().concat(this.backwardLayer.getWeights());\n }\n setWeights(weights) {\n const numWeights = weights.length;\n const numeightsOver2 = Math.floor(numWeights / 2);\n this.forwardLayer.setWeights(weights.slice(0, numeightsOver2));\n this.backwardLayer.setWeights(weights.slice(numeightsOver2));\n }\n computeOutputShape(inputShape) {\n let layerShapes = this.forwardLayer.computeOutputShape(inputShape);\n if (!(Array.isArray(layerShapes) && Array.isArray(layerShapes[0]))) {\n layerShapes = [layerShapes];\n }\n layerShapes = layerShapes;\n let outputShape;\n let outputShapes;\n let stateShape;\n if (this.returnState) {\n stateShape = layerShapes.slice(1);\n outputShape = layerShapes[0];\n }\n else {\n outputShape = layerShapes[0];\n }\n outputShape = outputShape;\n if (this.mergeMode === 'concat') {\n outputShape[outputShape.length - 1] *= 2;\n outputShapes = [outputShape];\n }\n else if (this.mergeMode == null) {\n outputShapes = [outputShape, outputShape.slice()];\n }\n else {\n outputShapes = [outputShape];\n }\n if (this.returnState) {\n if (this.mergeMode == null) {\n return outputShapes.concat(stateShape).concat(stateShape.slice());\n }\n return [outputShape].concat(stateShape).concat(stateShape.slice());\n }\n return generic_utils.singletonOrArray(outputShapes);\n }\n apply(inputs, kwargs) {\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n if (Array.isArray(inputs)) {\n initialState = inputs.slice(1);\n inputs = inputs[0];\n }\n if ((initialState == null || initialState.length === 0) &&\n constants == null) {\n return super.apply(inputs, kwargs);\n }\n const additionalInputs = [];\n const additionalSpecs = [];\n if (initialState != null) {\n const numStates = initialState.length;\n if (numStates % 2 > 0) {\n throw new ValueError('When passing `initialState` to a Bidrectional RNN, ' +\n 'the state should be an Array containing the states of ' +\n 'the underlying RNNs.');\n }\n kwargs['initialState'] = initialState;\n additionalInputs.push(...initialState);\n const stateSpecs = initialState\n .map(state => new InputSpec({ shape: state.shape }));\n this.forwardLayer.stateSpec = stateSpecs.slice(0, numStates / 2);\n this.backwardLayer.stateSpec = stateSpecs.slice(numStates / 2);\n additionalSpecs.push(...stateSpecs);\n }\n if (constants != null) {\n throw new NotImplementedError('Support for constants in Bidirectional layers is not ' +\n 'implemented yet.');\n }\n const isSymbolicTensor = additionalInputs[0] instanceof SymbolicTensor;\n for (const tensor of additionalInputs) {\n if (tensor instanceof SymbolicTensor !== isSymbolicTensor) {\n throw new ValueError('The initial state of a Bidirectional layer cannot be ' +\n 'specified as a mix of symbolic and non-symbolic tensors');\n }\n }\n if (isSymbolicTensor) {\n // Compute the full input and specs, including the states.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call temporarily and replace inputSpec.\n // Note: with initial states symbolic calls and non-symbolic calls to\n // this method differ in how the initial states are passed. For\n // symbolic calls, the initial states are passed in the first arg, as\n // an Array of SymbolicTensors; for non-symbolic calls, they are\n // passed in the second arg as a part of the kwargs. Hence the need to\n // temporarily modify inputSpec here.\n // TODO(cais): Make refactoring so that this hacky code below is no\n // longer needed.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const initialState = kwargs['initialState'];\n let y;\n let yRev;\n if (initialState == null) {\n y = this.forwardLayer.call(inputs, kwargs);\n yRev = this.backwardLayer.call(inputs, kwargs);\n }\n else {\n const forwardState = initialState.slice(0, initialState.length / 2);\n const backwardState = initialState.slice(initialState.length / 2);\n y = this.forwardLayer.call(inputs, Object.assign(kwargs, { initialState: forwardState }));\n yRev = this.backwardLayer.call(inputs, Object.assign(kwargs, { initialState: backwardState }));\n }\n let states;\n if (this.returnState) {\n if (Array.isArray(y)) {\n states = y.slice(1).concat(yRev.slice(1));\n }\n else {\n }\n y = y[0];\n yRev = yRev[0];\n }\n if (this.returnSequences) {\n yRev = tfc.reverse(yRev, 1);\n }\n let output;\n if (this.mergeMode === 'concat') {\n output = K.concatenate([y, yRev]);\n }\n else if (this.mergeMode === 'sum') {\n output = tfc.add(y, yRev);\n }\n else if (this.mergeMode === 'ave') {\n output = tfc.mul(.5, tfc.add(y, yRev));\n }\n else if (this.mergeMode === 'mul') {\n output = tfc.mul(y, yRev);\n }\n else if (this.mergeMode == null) {\n output = [y, yRev];\n }\n // TODO(cais): Properly set learning phase.\n if (this.returnState) {\n if (this.mergeMode == null) {\n return output.concat(states);\n }\n return [output].concat(states);\n }\n return output;\n });\n }\n resetStates(states) {\n this.forwardLayer.resetStates();\n this.backwardLayer.resetStates();\n }\n build(inputShape) {\n nameScope(this.forwardLayer.name, () => {\n this.forwardLayer.build(inputShape);\n });\n nameScope(this.backwardLayer.name, () => {\n this.backwardLayer.build(inputShape);\n });\n this.built = true;\n }\n computeMask(inputs, mask) {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n let outputMask;\n if (this.returnSequences) {\n if (this.mergeMode == null) {\n outputMask = [mask, mask];\n }\n else {\n outputMask = mask;\n }\n }\n else {\n if (this.mergeMode == null) {\n outputMask = [null, null];\n }\n else {\n outputMask = null;\n }\n }\n if (this.returnState) {\n const states = this.forwardLayer.states;\n const stateMask = states.map(state => null);\n if (Array.isArray(outputMask)) {\n return outputMask.concat(stateMask).concat(stateMask);\n }\n else {\n return [outputMask].concat(stateMask).concat(stateMask);\n }\n }\n else {\n return outputMask;\n }\n }\n get trainableWeights() {\n return this.forwardLayer.trainableWeights.concat(this.backwardLayer.trainableWeights);\n }\n get nonTrainableWeights() {\n return this.forwardLayer.nonTrainableWeights.concat(this.backwardLayer.nonTrainableWeights);\n }\n // TODO(cais): Implement constraints().\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.forwardLayer != null) {\n this.forwardLayer.setFastWeightInitDuringBuild(value);\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const config = {\n 'mergeMode': this.mergeMode,\n };\n // TODO(cais): Add logic for `numConstants` once the property is added.\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n const rnnLayer = deserialize(config['layer']);\n delete config['layer'];\n // TODO(cais): Add logic for `numConstants` once the property is added.\n if (config['numConstants'] != null) {\n throw new NotImplementedError(`Deserialization of a Bidirectional layer with numConstants ` +\n `present is not supported yet.`);\n }\n // tslint:disable-next-line:no-any\n const newConfig = config;\n newConfig['layer'] = rnnLayer;\n return new cls(newConfig);\n }\n}\n/** @nocollapse */\nBidirectional.className = 'Bidirectional';\nserialization.registerClass(Bidirectional);\n//# sourceMappingURL=wrappers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { InputLayer } from './engine/input_layer';\nimport { Layer } from './engine/topology';\nimport { input } from './exports';\nimport { ELU, LeakyReLU, PReLU, ReLU, Softmax, ThresholdedReLU } from './layers/advanced_activations';\nimport { Conv1D, Conv2D, Conv2DTranspose, Conv3D, Cropping2D, SeparableConv2D, UpSampling2D } from './layers/convolutional';\nimport { DepthwiseConv2D } from './layers/convolutional_depthwise';\nimport { ConvLSTM2D, ConvLSTM2DCell } from './layers/convolutional_recurrent';\nimport { Activation, Dense, Dropout, Flatten, Masking, Permute, RepeatVector, Reshape, SpatialDropout1D } from './layers/core';\nimport { Embedding } from './layers/embeddings';\nimport { Add, Average, Concatenate, Dot, Maximum, Minimum, Multiply } from './layers/merge';\nimport { AlphaDropout, GaussianDropout, GaussianNoise } from './layers/noise';\nimport { BatchNormalization, LayerNormalization } from './layers/normalization';\nimport { ZeroPadding2D } from './layers/padding';\nimport { AveragePooling1D, AveragePooling2D, AveragePooling3D, GlobalAveragePooling1D, GlobalAveragePooling2D, GlobalMaxPooling1D, GlobalMaxPooling2D, MaxPooling1D, MaxPooling2D, MaxPooling3D } from './layers/pooling';\nimport { GRU, GRUCell, LSTM, LSTMCell, RNN, RNNCell, SimpleRNN, SimpleRNNCell, StackedRNNCells } from './layers/recurrent';\nimport { Bidirectional, TimeDistributed } from './layers/wrappers';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// Input Layer.\n/**\n * An input layer is an entry point into a `tf.LayersModel`.\n *\n * `InputLayer` is generated automatically for `tf.Sequential`` models by\n * specifying the `inputshape` or `batchInputShape` for the first layer. It\n * should not be specified explicitly. However, it can be useful sometimes,\n * e.g., when constructing a sequential model from a subset of another\n * sequential model's layers. Like the code snippet below shows.\n *\n * ```js\n * // Define a model which simply adds two inputs.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.dense({inputShape: [4], units: 3, activation: 'relu'}));\n * model1.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n * model1.summary();\n * model1.predict(tf.zeros([1, 4])).print();\n *\n * // Construct another model, reusing the second layer of `model1` while\n * // not using the first layer of `model1`. Note that you cannot add the second\n * // layer of `model` directly as the first layer of the new sequential model,\n * // because doing so will lead to an error related to the fact that the layer\n * // is not an input layer. Instead, you need to create an `inputLayer` and add\n * // it to the new sequential model before adding the reused layer.\n * const model2 = tf.sequential();\n * // Use an inputShape that matches the input shape of `model1`'s second\n * // layer.\n * model2.add(tf.layers.inputLayer({inputShape: [3]}));\n * model2.add(model1.layers[1]);\n * model2.summary();\n * model2.predict(tf.zeros([1, 3])).print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Inputs', namespace: 'layers'}\n */\nexport function inputLayer(args) {\n return new InputLayer(args);\n}\n// Advanced Activation Layers.\n/**\n * Exponetial Linear Unit (ELU).\n *\n * It follows:\n * `f(x) = alpha * (exp(x) - 1.) for x < 0`,\n * `f(x) = x for x >= 0`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Fast and Accurate Deep Network Learning by Exponential Linear Units\n * (ELUs)](https://arxiv.org/abs/1511.07289v1)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function elu(args) {\n return new ELU(args);\n}\n/**\n * Rectified Linear Unit activation function.\n *\n * Input shape:\n * Arbitrary. Use the config field `inputShape` (Array of integers, does\n * not include the sample axis) when using this layer as the first layer\n * in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function reLU(args) {\n return new ReLU(args);\n}\n/**\n * Leaky version of a rectified linear unit.\n *\n * It allows a small gradient when the unit is not active:\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function leakyReLU(args) {\n return new LeakyReLU(args);\n}\n/**\n * Parameterized version of a leaky rectified linear unit.\n *\n * It follows\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n * wherein `alpha` is a trainable weight.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function prelu(args) {\n return new PReLU(args);\n}\n/**\n * Softmax activation layer.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function softmax(args) {\n return new Softmax(args);\n}\n/**\n * Thresholded Rectified Linear Unit.\n *\n * It follows:\n * `f(x) = x for x > theta`,\n * `f(x) = 0 otherwise`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Zero-Bias Autoencoders and the Benefits of Co-Adapting\n * Features](http://arxiv.org/abs/1402.3337)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function thresholdedReLU(args) {\n return new ThresholdedReLU(args);\n}\n// Convolutional Layers.\n/**\n * 1D convolution layer (e.g., temporal convolution).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input over a single spatial (or temporal) dimension\n * to produce a tensor of outputs.\n *\n * If `use_bias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model, provide an\n * `inputShape` argument `Array` or `null`.\n *\n * For example, `inputShape` would be:\n * - `[10, 128]` for sequences of 10 vectors of 128-dimensional vectors\n * - `[null, 128]` for variable-length sequences of 128-dimensional vectors.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv1d(args) {\n return new Conv1D(args);\n}\n/**\n * 2D convolution layer (e.g. spatial convolution over images).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 3]` for 128x128 RGB pictures\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2d(args) {\n return new Conv2D(args);\n}\n/**\n * Transposed convolutional layer (sometimes called Deconvolution).\n *\n * The need for transposed convolutions generally arises\n * from the desire to use a transformation going in the opposite direction of\n * a normal convolution, i.e., from something that has the shape of the output\n * of some convolution to something that has the shape of its input while\n * maintaining a connectivity pattern that is compatible with said\n * convolution.\n *\n * When using this layer as the first layer in a model, provide the\n * configuration `inputShape` (`Array` of integers, does not include the\n * sample axis), e.g., `inputShape: [128, 128, 3]` for 128x128 RGB pictures in\n * `dataFormat: 'channelsLast'`.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if `dataFormat` is `'channelsFirst'`.\n * or 4D tensor with shape\n * `[batch, rows, cols, channels]` if `dataFormat` is `'channelsLast`.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if `dataFormat` is\n * `'channelsFirst'`. or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if `dataFormat` is `'channelsLast'`.\n *\n * References:\n * - [A guide to convolution arithmetic for deep\n * learning](https://arxiv.org/abs/1603.07285v1)\n * - [Deconvolutional\n * Networks](http://www.matthewzeiler.com/pubs/cvpr2010/cvpr2010.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2dTranspose(args) {\n return new Conv2DTranspose(args);\n}\n/**\n * 3D convolution layer (e.g. spatial convolution over volumes).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 128, 1]` for 128x128x128 grayscale volumes\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv3d(args) {\n return new Conv3D(args);\n}\n/**\n * Depthwise separable 2D convolution.\n *\n * Separable convolution consists of first performing\n * a depthwise spatial convolution\n * (which acts on each input channel separately)\n * followed by a pointwise convolution which mixes together the resulting\n * output channels. The `depthMultiplier` argument controls how many\n * output channels are generated per input channel in the depthwise step.\n *\n * Intuitively, separable convolutions can be understood as\n * a way to factorize a convolution kernel into two smaller kernels,\n * or as an extreme version of an Inception block.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, rows, cols, channels]` if data_format='channelsLast'.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if data_format='channelsLast'.\n * `rows` and `cols` values might have changed due to padding.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function separableConv2d(args) {\n return new SeparableConv2D(args);\n}\n/**\n * Cropping layer for 2D input (e.g., image).\n *\n * This layer can crop an input\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, croppedRows, croppedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, croppedRows, croppedCols]`.\n *\n * Examples\n * ```js\n *\n * const model = tf.sequential();\n * model.add(tf.layers.cropping2D({cropping:[[2, 2], [2, 2]],\n * inputShape: [128, 128, 3]}));\n * //now output shape is [batch, 124, 124, 3]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function cropping2D(args) {\n return new Cropping2D(args);\n}\n/**\n * Upsampling layer for 2D inputs.\n *\n * Repeats the rows and columns of the data\n * by size[0] and size[1] respectively.\n *\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, rows, cols]`\n *\n * Output shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, upsampledRows, upsampledCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, upsampledRows, upsampledCols]`\n *\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function upSampling2d(args) {\n return new UpSampling2D(args);\n}\n// Convolutional(depthwise) Layers.\n/**\n * Depthwise separable 2D convolution.\n *\n * Depthwise Separable convolutions consists in performing just the first step\n * in a depthwise spatial convolution (which acts on each input channel\n * separately). The `depthMultplier` argument controls how many output channels\n * are generated per input channel in the depthwise step.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function depthwiseConv2d(args) {\n return new DepthwiseConv2D(args);\n}\n// Basic Layers.\n/**\n * Applies an activation function to an output.\n *\n * This layer applies element-wise activation function. Other layers, notably\n * `dense` can also apply activation functions. Use this isolated activation\n * function to extract the values before and after the\n * activation. For instance:\n *\n * ```js\n * const input = tf.input({shape: [5]});\n * const denseLayer = tf.layers.dense({units: 1});\n * const activationLayer = tf.layers.activation({activation: 'relu6'});\n *\n * // Obtain the output symbolic tensors by applying the layers in order.\n * const denseOutput = denseLayer.apply(input);\n * const activationOutput = activationLayer.apply(denseOutput);\n *\n * // Create the model based on the inputs.\n * const model = tf.model({\n * inputs: input,\n * outputs: [denseOutput, activationOutput]\n * });\n *\n * // Collect both outputs and print separately.\n * const [denseOut, activationOut] = model.predict(tf.randomNormal([6, 5]));\n * denseOut.print();\n * activationOut.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function activation(args) {\n return new Activation(args);\n}\n/**\n * Creates a dense (fully connected) layer.\n *\n * This layer implements the operation:\n * `output = activation(dot(input, kernel) + bias)`\n *\n * `activation` is the element-wise activation function\n * passed as the `activation` argument.\n *\n * `kernel` is a weights matrix created by the layer.\n *\n * `bias` is a bias vector created by the layer (only applicable if `useBias`\n * is `true`).\n *\n * **Input shape:**\n *\n * nD `tf.Tensor` with shape: `(batchSize, ..., inputDim)`.\n *\n * The most common situation would be\n * a 2D input with shape `(batchSize, inputDim)`.\n *\n * **Output shape:**\n *\n * nD tensor with shape: `(batchSize, ..., units)`.\n *\n * For instance, for a 2D input with shape `(batchSize, inputDim)`,\n * the output would have shape `(batchSize, units)`.\n *\n * Note: if the input to the layer has a rank greater than 2, then it is\n * flattened prior to the initial dot product with the kernel.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dense(args) {\n return new Dense(args);\n}\n/**\n * Applies\n * [dropout](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf) to\n * the input.\n *\n * Dropout consists in randomly setting a fraction `rate` of input units to 0 at\n * each update during training time, which helps prevent overfitting.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dropout(args) {\n return new Dropout(args);\n}\n/**\n * Spatial 1D version of Dropout.\n *\n * This Layer type performs the same function as the Dropout layer, but it drops\n * entire 1D feature maps instead of individual elements. For example, if an\n * input example consists of 3 timesteps and the feature map for each timestep\n * has a size of 4, a `spatialDropout1d` layer may zero out the feature maps\n * of the 1st timesteps and 2nd timesteps completely while sparing all feature\n * elements of the 3rd timestep.\n *\n * If adjacent frames (timesteps) are strongly correlated (as is normally the\n * case in early convolution layers), regular dropout will not regularize the\n * activation and will otherwise just result in merely an effective learning\n * rate decrease. In this case, `spatialDropout1d` will help promote\n * independence among feature maps and should be used instead.\n *\n * **Arguments:**\n * rate: A floating-point number >=0 and <=1. Fraction of the input elements\n * to drop.\n *\n * **Input shape:**\n * 3D tensor with shape `(samples, timesteps, channels)`.\n *\n * **Output shape:**\n * Same as the input shape.\n *\n * References:\n * - [Efficient Object Localization Using Convolutional\n * Networks](https://arxiv.org/abs/1411.4280)\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function spatialDropout1d(args) {\n return new SpatialDropout1D(args);\n}\n/**\n * Flattens the input. Does not affect the batch size.\n *\n * A `Flatten` layer flattens each batch in its inputs to 1D (making the output\n * 2D).\n *\n * For example:\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const flattenLayer = tf.layers.flatten();\n * // Inspect the inferred output shape of the flatten layer, which\n * // equals `[null, 12]`. The 2nd dimension is 4 * 3, i.e., the result of the\n * // flattening. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(flattenLayer.apply(input).shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function flatten(args) {\n return new Flatten(args);\n}\n/**\n * Repeats the input n times in a new dimension.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.repeatVector({n: 4, inputShape: [2]}));\n * const x = tf.tensor2d([[10, 20]]);\n * // Use the model to do inference on a data point the model hasn't see\n * model.predict(x).print();\n * // output shape is now [batch, 2, 4]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function repeatVector(args) {\n return new RepeatVector(args);\n}\n/**\n * Reshapes an input to a certain shape.\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const reshapeLayer = tf.layers.reshape({targetShape: [2, 6]});\n * // Inspect the inferred output shape of the Reshape layer, which\n * // equals `[null, 2, 6]`. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(reshapeLayer.apply(input).shape));\n * ```\n *\n * Input shape:\n * Arbitrary, although all dimensions in the input shape must be fixed.\n * Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n *\n * Output shape:\n * [batchSize, targetShape[0], targetShape[1], ...,\n * targetShape[targetShape.length - 1]].\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function reshape(args) {\n return new Reshape(args);\n}\n/**\n * Permutes the dimensions of the input according to a given pattern.\n *\n * Useful for, e.g., connecting RNNs and convnets together.\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.permute({\n * dims: [2, 1],\n * inputShape: [10, 64]\n * }));\n * console.log(model.outputShape);\n * // Now model's output shape is [null, 64, 10], where null is the\n * // unpermuted sample (batch) dimension.\n * ```\n *\n * Input shape:\n * Arbitrary. Use the configuration field `inputShape` when using this\n * layer as the first layer in a model.\n *\n * Output shape:\n * Same rank as the input shape, but with the dimensions re-ordered (i.e.,\n * permuted) according to the `dims` configuration of this layer.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function permute(args) {\n return new Permute(args);\n}\n/**\n * Maps positive integers (indices) into dense vectors of fixed size.\n * eg. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]\n *\n * **Input shape:** 2D tensor with shape: `[batchSize, sequenceLength]`.\n *\n * **Output shape:** 3D tensor with shape: `[batchSize, sequenceLength,\n * outputDim]`.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function embedding(args) {\n return new Embedding(args);\n}\n// Merge Layers.\n/**\n * Layer that performs element-wise addition on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). The inputs are specified as an\n * `Array` when the `apply` method of the `Add` layer instance is called. For\n * example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const addLayer = tf.layers.add();\n * const sum = addLayer.apply([input1, input2]);\n * console.log(JSON.stringify(sum.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function add(args) {\n return new Add(args);\n}\n/**\n * Layer that performs element-wise averaging on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const averageLayer = tf.layers.average();\n * const average = averageLayer.apply([input1, input2]);\n * console.log(JSON.stringify(average.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function average(args) {\n return new Average(args);\n}\n/**\n * Layer that concatenates an `Array` of inputs.\n *\n * It takes a list of tensors, all of the same shape except for the\n * concatenation axis, and returns a single tensor, the concatenation\n * of all inputs. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 3]});\n * const concatLayer = tf.layers.concatenate();\n * const output = concatLayer.apply([input1, input2]);\n * console.log(JSON.stringify(output.shape));\n * // You get [null, 2, 5], with the first dimension as the undetermined batch\n * // dimension. The last dimension (5) is the result of concatenating the\n * // last dimensions of the inputs (2 and 3).\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function concatenate(args) {\n return new Concatenate(args);\n}\n/**\n * Layer that computes the element-wise maximum an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const maxLayer = tf.layers.maximum();\n * const max = maxLayer.apply([input1, input2]);\n * console.log(JSON.stringify(max.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function maximum(args) {\n return new Maximum(args);\n}\n/**\n * Layer that computes the element-wise minimum of an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const minLayer = tf.layers.minimum();\n * const min = minLayer.apply([input1, input2]);\n * console.log(JSON.stringify(min.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function minimum(args) {\n return new Minimum(args);\n}\n/**\n * Layer that multiplies (element-wise) an `Array` of inputs.\n *\n * It takes as input an Array of tensors, all of the same\n * shape, and returns a single tensor (also of the same shape).\n * For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const input3 = tf.input({shape: [2, 2]});\n * const multiplyLayer = tf.layers.multiply();\n * const product = multiplyLayer.apply([input1, input2, input3]);\n * console.log(product.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function multiply(args) {\n return new Multiply(args);\n}\n/**\n * Layer that computes a dot product between samples in two tensors.\n *\n * E.g., if applied to a list of two tensors `a` and `b` both of shape\n * `[batchSize, n]`, the output will be a tensor of shape `[batchSize, 1]`,\n * where each entry at index `[i, 0]` will be the dot product between\n * `a[i, :]` and `b[i, :]`.\n *\n * Example:\n *\n * ```js\n * const dotLayer = tf.layers.dot({axes: -1});\n * const x1 = tf.tensor2d([[10, 20], [30, 40]]);\n * const x2 = tf.tensor2d([[-1, -2], [-3, -4]]);\n *\n * // Invoke the layer's apply() method in eager (imperative) mode.\n * const y = dotLayer.apply([x1, x2]);\n * y.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function dot(args) {\n return new Dot(args);\n}\n// Normalization Layers.\n/**\n * Batch normalization layer (Ioffe and Szegedy, 2014).\n *\n * Normalize the activations of the previous layer at each batch,\n * i.e. applies a transformation that maintains the mean activation\n * close to 0 and the activation standard deviation close to 1.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape` (Array of integers, does\n * not include the sample axis) when calling the constructor of this class,\n * if this layer is used as a first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Batch Normalization: Accelerating Deep Network Training by Reducing\n * Internal Covariate Shift](https://arxiv.org/abs/1502.03167)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function batchNormalization(args) {\n return new BatchNormalization(args);\n}\n/**\n * Layer-normalization layer (Ba et al., 2016).\n *\n * Normalizes the activations of the previous layer for each given example in a\n * batch independently, instead of across a batch like in `batchNormalization`.\n * In other words, this layer applies a transformation that maintanis the mean\n * activation within each example close to0 and activation variance close to 1.\n *\n * Input shape:\n * Arbitrary. Use the argument `inputShape` when using this layer as the first\n * layer in a model.\n *\n * Output shape:\n * Same as input.\n *\n * References:\n * - [Layer Normalization](https://arxiv.org/abs/1607.06450)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function layerNormalization(args) {\n return new LayerNormalization(args);\n}\n// Padding Layers.\n/**\n * Zero-padding layer for 2D input (e.g., image).\n *\n * This layer can add rows and columns of zeros\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, paddedRows, paddedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, paddedRows, paddedCols]`.\n *\n * @doc {heading: 'Layers', subheading: 'Padding', namespace: 'layers'}\n */\nexport function zeroPadding2d(args) {\n return new ZeroPadding2D(args);\n}\n// Pooling Layers.\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * `tf.avgPool1d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling1d(args) {\n return new AveragePooling1D(args);\n}\nexport function avgPool1d(args) {\n return averagePooling1d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling1d(args) {\n return averagePooling1d(args);\n}\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * `tf.avgPool2d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling2d(args) {\n return new AveragePooling2D(args);\n}\nexport function avgPool2d(args) {\n return averagePooling2d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling2d(args) {\n return averagePooling2d(args);\n}\n/**\n * Average pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 4D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling3d(args) {\n return new AveragePooling3D(args);\n}\nexport function avgPool3d(args) {\n return averagePooling3d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling3d(args) {\n return averagePooling3d(args);\n}\n/**\n * Global average pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling1d(args) {\n return new GlobalAveragePooling1D(args);\n}\n/**\n * Global average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling2d(args) {\n return new GlobalAveragePooling2D(args);\n}\n/**\n * Global max pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling1d(args) {\n return new GlobalMaxPooling1D(args);\n}\n/**\n * Global max pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling2d(args) {\n return new GlobalMaxPooling2D(args);\n}\n/**\n * Max pooling operation for temporal data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling1d(args) {\n return new MaxPooling1D(args);\n}\n/**\n * Max pooling operation for spatial data.\n *\n * Input shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat=CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling2d(args) {\n return new MaxPooling2D(args);\n}\n/**\n * Max pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling3d(args) {\n return new MaxPooling3D(args);\n}\n// Recurrent Layers.\n/**\n * Gated Recurrent Unit - Cho et al. 2014.\n *\n * This is an `RNN` layer consisting of one `GRUCell`. However, unlike\n * the underlying `GRUCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.gru({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `GRUCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gru(args) {\n return new GRU(args);\n}\n/**\n * Cell class for `GRU`.\n *\n * `GRUCell` is distinct from the `RNN` subclass `GRU` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `GRU` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.gruCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `GRUCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.gruCell({units: 4}),\n * tf.layers.gruCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `gruCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `GRUCell`, use the\n * `tf.layers.gru`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gruCell(args) {\n return new GRUCell(args);\n}\n/**\n * Long-Short Term Memory layer - Hochreiter 1997.\n *\n * This is an `RNN` layer consisting of one `LSTMCell`. However, unlike\n * the underlying `LSTMCell`, the `apply` method of `LSTM` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const lstm = tf.layers.lstm({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = lstm.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `LSTMCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstm(args) {\n return new LSTM(args);\n}\n/**\n * Cell class for `LSTM`.\n *\n * `LSTMCell` is distinct from the `RNN` subclass `LSTM` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `LSTM` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.lstmCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `LSTMCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.lstmCell({units: 4}),\n * tf.layers.lstmCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `lstmCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `LSTMCell`, use the\n * `tf.layers.lstm`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstmCell(args) {\n return new LSTMCell(args);\n}\n/**\n * Fully-connected RNN where the output is to be fed back to input.\n *\n * This is an `RNN` layer consisting of one `SimpleRNNCell`. However, unlike\n * the underlying `SimpleRNNCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.simpleRNN({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `SimpleRNNCell`'s number of units.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNN(args) {\n return new SimpleRNN(args);\n}\n/**\n * Cell class for `SimpleRNN`.\n *\n * `SimpleRNNCell` is distinct from the `RNN` subclass `SimpleRNN` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `SimpleRNN` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.simpleRNNCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `SimpleRNNCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.simpleRNNCell({units: 4}),\n * tf.layers.simpleRNNCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `SimpleRNNCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `SimpleRNNCell`, use the\n * `tf.layers.simpleRNN`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNNCell(args) {\n return new SimpleRNNCell(args);\n}\n/**\n * Convolutional LSTM layer - Xingjian Shi 2015.\n *\n * This is an `ConvRNN2D` layer consisting of one `ConvLSTM2DCell`. However,\n * unlike the underlying `ConvLSTM2DCell`, the `apply` method of `ConvLSTM2D`\n * operates on a sequence of inputs. The shape of the input (not including the\n * first, batch dimension) needs to be 4-D, with the first dimension being time\n * steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const batchSize = 4;\n * const sequenceLength = 2;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [batchSize, sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const layer = tf.layers.convLstm2d({filters, kernelSize});\n *\n * const output = layer.apply(input);\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2d(args) {\n return new ConvLSTM2D(args);\n}\n/**\n * Cell class for `ConvLSTM2D`.\n *\n * `ConvLSTM2DCell` is distinct from the `ConvRNN2D` subclass `ConvLSTM2D` in\n * that its `call` method takes the input data of only a single time step and\n * returns the cell's output at the time step, while `ConvLSTM2D` takes the\n * input data over a number of time steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const sequenceLength = 1;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const cell = tf.layers.convLstm2dCell({filters, kernelSize});\n *\n * cell.build(input.shape);\n *\n * const outputSize = size - kernelSize + 1;\n * const outShape = [sequenceLength, outputSize, outputSize, filters];\n *\n * const initialH = tf.zeros(outShape);\n * const initialC = tf.zeros(outShape);\n *\n * const [o, h, c] = cell.call([input, initialH, initialC], {});\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2dCell(args) {\n return new ConvLSTM2DCell(args);\n}\n/**\n * Base class for recurrent layers.\n *\n * Input shape:\n * 3D tensor with shape `[batchSize, timeSteps, inputDim]`.\n *\n * Output shape:\n * - if `returnState`, an Array of tensors (i.e., `tf.Tensor`s). The first\n * tensor is the output. The remaining tensors are the states at the\n * last time step, each with shape `[batchSize, units]`.\n * - if `returnSequences`, the output will have shape\n * `[batchSize, timeSteps, units]`.\n * - else, the output will have shape `[batchSize, units]`.\n *\n * Masking:\n * This layer supports masking for input data with a variable number\n * of timesteps. To introduce masks to your data,\n * use an embedding layer with the `mask_zero` parameter\n * set to `True`.\n *\n * Notes on using statefulness in RNNs:\n * You can set RNN layers to be 'stateful', which means that the states\n * computed for the samples in one batch will be reused as initial states\n * for the samples in the next batch. This assumes a one-to-one mapping\n * between samples in different successive batches.\n *\n * To enable statefulness:\n * - specify `stateful: true` in the layer constructor.\n * - specify a fixed batch size for your model, by passing\n * if sequential model:\n * `batchInputShape=[...]` to the first layer in your model.\n * else for functional model with 1 or more Input layers:\n * `batchShape=[...]` to all the first layers in your model.\n * This is the expected shape of your inputs *including the batch size*.\n * It should be a tuple of integers, e.g. `(32, 10, 100)`.\n * - specify `shuffle=False` when calling fit().\n *\n * To reset the states of your model, call `.resetStates()` on either\n * a specific layer, or on your entire model.\n *\n * Note on specifying the initial state of RNNs\n * You can specify the initial state of RNN layers symbolically by\n * calling them with the option `initialState`. The value of\n * `initialState` should be a tensor or list of tensors representing\n * the initial state of the RNN layer.\n *\n * You can specify the initial state of RNN layers numerically by\n * calling `resetStates` with the keyword argument `states`. The value of\n * `states` should be a numpy array or list of numpy arrays representing\n * the initial state of the RNN layer.\n *\n * Note on passing external constants to RNNs\n * You can pass \"external\" constants to the cell using the `constants`\n * keyword argument of `RNN.call` method. This requires that the `cell.call`\n * method accepts the same keyword argument `constants`. Such constants\n * can be used to conditon the cell transformation on additional static inputs\n * (not changing over time), a.k.a an attention mechanism.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function rnn(args) {\n return new RNN(args);\n}\n/**\n * Wrapper allowing a stack of RNN cells to behave as a single cell.\n *\n * Used to implement efficient stacked RNNs.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function stackedRNNCells(args) {\n return new StackedRNNCells(args);\n}\n// Wrapper Layers.\n/** @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'} */\nexport function bidirectional(args) {\n return new Bidirectional(args);\n}\n/**\n * This wrapper applies a layer to every temporal slice of an input.\n *\n * The input should be at least 3D, and the dimension of the index `1` will be\n * considered to be the temporal dimension.\n *\n * Consider a batch of 32 samples, where each sample is a sequence of 10 vectors\n * of 16 dimensions. The batch input shape of the layer is then `[32, 10,\n * 16]`, and the `inputShape`, not including the sample dimension, is\n * `[10, 16]`.\n *\n * You can then use `TimeDistributed` to apply a `Dense` layer to each of the 10\n * timesteps, independently:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.dense({units: 8}),\n * inputShape: [10, 16],\n * }));\n *\n * // Now model.outputShape = [null, 10, 8].\n * // The output will then have shape `[32, 10, 8]`.\n *\n * // In subsequent layers, there is no need for `inputShape`:\n * model.add(tf.layers.timeDistributed({layer: tf.layers.dense({units: 32})}));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * // Now model.outputShape = [null, 10, 32].\n * ```\n *\n * The output will then have shape `[32, 10, 32]`.\n *\n * `TimeDistributed` can be used with arbitrary layers, not just `Dense`, for\n * instance a `Conv2D` layer.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.conv2d({filters: 64, kernelSize: [3, 3]}),\n * inputShape: [10, 299, 299, 3],\n * }));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'}\n */\nexport function timeDistributed(args) {\n return new TimeDistributed(args);\n}\n// Aliases for pooling.\nexport const globalMaxPool1d = globalMaxPooling1d;\nexport const globalMaxPool2d = globalMaxPooling2d;\nexport const maxPool1d = maxPooling1d;\nexport const maxPool2d = maxPooling2d;\nexport { Layer, RNN, RNNCell, input /* alias for tf.input */ };\n/**\n * Apply additive zero-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * This is useful to mitigate overfitting\n * (you could see it as a form of random data augmentation).\n * Gaussian Noise (GS) is a natural choice as corruption process\n * for real valued inputs.\n *\n * # Arguments\n * stddev: float, standard deviation of the noise distribution.\n *\n * # Input shape\n * Arbitrary. Use the keyword argument `input_shape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * # Output shape\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianNoise(args) {\n return new GaussianNoise(args);\n}\n/**\n * Apply multiplicative 1-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting](\n * http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianDropout(args) {\n return new GaussianDropout(args);\n}\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function alphaDropout(args) {\n return new AlphaDropout(args);\n}\n/**\n * Masks a sequence by using a mask value to skip timesteps.\n *\n * If all features for a given sample timestep are equal to `mask_value`,\n * then the sample timestep will be masked (skipped) in all downstream layers\n * (as long as they support masking).\n *\n * If any downstream layer does not support masking yet receives such\n * an input mask, an exception will be raised.\n *\n * Arguments:\n * - `maskValue`: Either None or mask value to skip.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Mask', namespace: 'layers'}\n */\nexport function masking(args) {\n return new Masking(args);\n}\n//# sourceMappingURL=exports_layers.js.map", "import * as losses from './losses';\nimport * as metrics from './metrics';\n/**\n * Binary accuracy metric function.\n *\n * `yTrue` and `yPred` can have 0-1 values. Example:\n * ```js\n * const x = tf.tensor2d([[1, 1, 1, 1], [0, 0, 0, 0]], [2, 4]);\n * const y = tf.tensor2d([[1, 0, 1, 0], [0, 0, 0, 1]], [2, 4]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * `yTrue` and `yPred` can also have floating-number values between 0 and 1, in\n * which case the values will be thresholded at 0.5 to yield 0-1 values (i.e.,\n * a value >= 0.5 and <= 1.0 is interpreted as 1.\n * )\n * Example:\n * ```js\n * const x = tf.tensor1d([1, 1, 1, 1, 0, 0, 0, 0]);\n * const y = tf.tensor1d([0.2, 0.4, 0.6, 0.8, 0.2, 0.3, 0.4, 0.7]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryAccuracy(yTrue, yPred) {\n return metrics.binaryAccuracy(yTrue, yPred);\n}\n/**\n * Binary crossentropy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0], [1], [1], [1]]);\n * const y = tf.tensor2d([[0], [0], [0.5], [1]]);\n * const crossentropy = tf.metrics.binaryCrossentropy(x, y);\n * crossentropy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction, probabilities for the `1` case.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryCrossentropy(yTrue, yPred) {\n return metrics.binaryCrossentropy(yTrue, yPred);\n}\n/**\n * Sparse categorical accuracy metric function.\n *\n * Example:\n * ```js\n *\n * const yTrue = tf.tensor1d([1, 1, 2, 2, 0]);\n * const yPred = tf.tensor2d(\n * [[0, 1, 0], [1, 0, 0], [0, 0.4, 0.6], [0, 0.6, 0.4], [0.7, 0.3, 0]]);\n * const crossentropy = tf.metrics.sparseCategoricalAccuracy(yTrue, yPred);\n * crossentropy.print();\n * ```\n *\n * @param yTrue True labels: indices.\n * @param yPred Predicted probabilities or logits.\n * @returns Accuracy tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n return metrics.sparseCategoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical accuracy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0, 0, 0, 1], [0, 0, 0, 1]]);\n * const y = tf.tensor2d([[0.1, 0.8, 0.05, 0.05], [0.1, 0.05, 0.05, 0.8]]);\n * const accuracy = tf.metrics.categoricalAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth: one-hot encoding of categories.\n * @param yPred Binary Tensor of prediction: probabilities or logits for the\n * same categories as in `yTrue`.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalAccuracy(yTrue, yPred) {\n return metrics.categoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical crossentropy between an output tensor and a target tensor.\n *\n * @param target A tensor of the same shape as `output`.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalCrossentropy(yTrue, yPred) {\n return metrics.categoricalCrossentropy(yTrue, yPred);\n}\n/**\n * Computes the precision of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const precision = tf.metrics.precision(x, y);\n * precision.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Precision Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function precision(yTrue, yPred) {\n return metrics.precision(yTrue, yPred);\n}\n/**\n * Computes the recall of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const recall = tf.metrics.recall(x, y);\n * recall.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Recall Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function recall(yTrue, yPred) {\n return metrics.recall(yTrue, yPred);\n}\n/**\n * Loss or metric function: Cosine proximity.\n *\n * Mathematically, cosine proximity is defined as:\n * `-sum(l2Normalize(yTrue) * l2Normalize(yPred))`,\n * wherein `l2Normalize()` normalizes the L2 norm of the input to 1 and `*`\n * represents element-wise multiplication.\n *\n * ```js\n * const yTrue = tf.tensor2d([[1, 0], [1, 0]]);\n * const yPred = tf.tensor2d([[1 / Math.sqrt(2), 1 / Math.sqrt(2)], [0, 1]]);\n * const proximity = tf.metrics.cosineProximity(yTrue, yPred);\n * proximity.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Cosine proximity Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function cosineProximity(yTrue, yPred) {\n return losses.cosineProximity(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute error.\n *\n * Mathematically, mean absolute error is defined as:\n * `mean(abs(yPred - yTrue))`,\n * wherein the `mean` is applied over feature dimensions.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [0, 0], [2, 3]]);\n * const yPred = tf.tensor2d([[0, 1], [0, 1], [-2, -3]]);\n * const mse = tf.metrics.meanAbsoluteError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsoluteError(yTrue, yPred) {\n return losses.meanAbsoluteError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute percentage error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [10, 20]]);\n * const yPred = tf.tensor2d([[0, 1], [11, 24]]);\n * const mse = tf.metrics.meanAbsolutePercentageError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MAPE`, `tf.metrics.mape`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute percentage error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function MAPE(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function mape(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean squared error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [3, 4]]);\n * const yPred = tf.tensor2d([[0, 1], [-3, -4]]);\n * const mse = tf.metrics.meanSquaredError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MSE`, `tf.metrics.mse`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean squared error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanSquaredError(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function MSE(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function mse(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\n//# sourceMappingURL=exports_metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport { modelFromJSON } from './models';\n//# sourceMappingURL=exports_models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as regularizers from './regularizers';\n// tslint:disable-next-line:max-line-length\nimport { L1L2 } from './regularizers';\n/**\n * Regularizer for L1 and L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x)) + sum(l2 * x^2)\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1l2(config) {\n return new L1L2(config);\n}\n/**\n * Regularizer for L1 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x))\n * @param args l1 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1(config) {\n return regularizers.l1(config);\n}\n/**\n * Regularizer for L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l2 * x^2)\n * @param args l2 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l2(config) {\n return regularizers.l2(config);\n}\n//# sourceMappingURL=exports_regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { BaseCallback } from './base_callbacks';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nexport class Callback extends BaseCallback {\n constructor() {\n super(...arguments);\n /** Instance of `keras.models.Model`. Reference of the model being trained. */\n this.model = null;\n }\n setModel(model) {\n if (!(model instanceof LayersModel)) {\n throw new Error('model must be a LayersModel, not some other Container');\n }\n this.model = model;\n }\n}\nfunction less(currVal, prevVal) {\n return currVal < prevVal;\n}\nfunction greater(currVal, prevVal) {\n return currVal > prevVal;\n}\n/**\n * A Callback that stops training when a monitored quantity has stopped\n * improving.\n */\nexport class EarlyStopping extends Callback {\n constructor(args) {\n super();\n if (args == null) {\n args = {};\n }\n if (args.restoreBestWeights) {\n throw new NotImplementedError('restoreBestWeights = True is not implemented in EarlyStopping yet.');\n }\n this.monitor = args.monitor || 'val_loss';\n this.minDelta = Math.abs(args.minDelta || 0);\n this.patience = args.patience || 0;\n this.verbose = args.verbose || 0;\n this.mode = args.mode || 'auto';\n this.baseline = args.baseline;\n if (['auto', 'min', 'max'].indexOf(this.mode) === -1) {\n console.warn(`EarlyStopping mode '${this.mode}' is invalid. ` +\n `Falling back to mode 'auto'.`);\n this.mode = 'auto';\n }\n if (this.mode === 'min') {\n this.monitorFunc = less;\n }\n else if (this.mode === 'max') {\n this.monitorFunc = greater;\n }\n else {\n // For mode === 'auto'.\n if (this.monitor.indexOf('acc') !== -1) {\n this.monitorFunc = greater;\n }\n else {\n this.monitorFunc = less;\n }\n }\n if (this.monitorFunc === less) {\n this.minDelta *= -1;\n }\n }\n async onTrainBegin(logs) {\n this.wait = 0;\n this.stoppedEpoch = 0;\n if (this.baseline != null) {\n this.best = this.baseline;\n }\n else {\n this.best = this.monitorFunc === less ? Infinity : -Infinity;\n }\n }\n async onEpochEnd(epoch, logs) {\n await resolveScalarsInLogs(logs);\n const current = this.getMonitorValue(logs);\n if (current == null) {\n return;\n }\n if (this.monitorFunc(current - this.minDelta, this.best)) {\n this.best = current;\n this.wait = 0;\n // TODO(cais): Logic for restoreBestWeights.\n }\n else {\n this.wait++;\n if (this.wait >= this.patience) {\n this.stoppedEpoch = epoch;\n this.model.stopTraining = true;\n }\n // TODO(cais): Logic for restoreBestWeights.\n }\n }\n async onTrainEnd(logs) {\n if (this.stoppedEpoch > 0 && this.verbose) {\n console.log(`Epoch ${this.stoppedEpoch}: early stopping.`);\n }\n }\n getMonitorValue(logs) {\n if (logs == null) {\n logs = {};\n }\n const monitorValue = logs[this.monitor];\n if (monitorValue == null) {\n console.warn(`Metric for EarlyStopping ${this.monitor} is not available. ` +\n `Available metrics are: ${Object.keys(logs)}`);\n }\n return monitorValue;\n }\n}\n/**\n * Factory function for a Callback that stops training when a monitored\n * quantity has stopped improving.\n *\n * Early stopping is a type of regularization, and protects model against\n * overfitting.\n *\n * The following example based on fake data illustrates how this callback\n * can be used during `tf.LayersModel.fit()`:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * units: 3,\n * activation: 'softmax',\n * kernelInitializer: 'ones',\n * inputShape: [2]\n * }));\n * const xs = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const ys = tf.tensor2d([[1, 0, 0], [0, 1, 0]], [2, 3]);\n * const xsVal = tf.tensor2d([4, 3, 2, 1], [2, 2]);\n * const ysVal = tf.tensor2d([[0, 0, 1], [0, 1, 0]], [2, 3]);\n * model.compile(\n * {loss: 'categoricalCrossentropy', optimizer: 'sgd', metrics: ['acc']});\n *\n * // Without the EarlyStopping callback, the val_acc value would be:\n * // 0.5, 0.5, 0.5, 0.5, ...\n * // With val_acc being monitored, training should stop after the 2nd epoch.\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * validationData: [xsVal, ysVal],\n * callbacks: tf.callbacks.earlyStopping({monitor: 'val_acc'})\n * });\n *\n * // Expect to see a length-2 array.\n * console.log(history.history.val_acc);\n * ```\n *\n * @doc {\n * heading: 'Callbacks',\n * namespace: 'callbacks'\n * }\n */\nexport function earlyStopping(args) {\n return new EarlyStopping(args);\n}\nexport const callbacks = { earlyStopping };\n//# sourceMappingURL=callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// This file lists all exports of TensorFlow.js Layers\nimport * as constraints from './exports_constraints';\nimport * as initializers from './exports_initializers';\nimport * as layers from './exports_layers';\nimport * as metrics from './exports_metrics';\nimport * as models from './exports_models';\nimport * as regularizers from './exports_regularizers';\nexport { CallbackList, CustomCallback, History } from './base_callbacks';\nexport { Callback, callbacks, EarlyStopping } from './callbacks';\nexport { InputSpec, SymbolicTensor } from './engine/topology';\nexport { LayersModel } from './engine/training';\nexport { input, loadLayersModel, model, registerCallbackConstructor, sequential } from './exports';\nexport { RNN } from './layers/recurrent';\nexport { Sequential } from './models';\nexport { LayerVariable } from './variables';\nexport { version as version_layers } from './version';\nexport { constraints, initializers, layers, metrics, models, regularizers };\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/** DataType enum. */\nexport var DataType;\n(function (DataType) {\n DataType[DataType[\"DT_INVALID\"] = 0] = \"DT_INVALID\";\n DataType[DataType[\"DT_FLOAT\"] = 1] = \"DT_FLOAT\";\n DataType[DataType[\"DT_DOUBLE\"] = 2] = \"DT_DOUBLE\";\n DataType[DataType[\"DT_INT32\"] = 3] = \"DT_INT32\";\n DataType[DataType[\"DT_UINT8\"] = 4] = \"DT_UINT8\";\n DataType[DataType[\"DT_INT16\"] = 5] = \"DT_INT16\";\n DataType[DataType[\"DT_INT8\"] = 6] = \"DT_INT8\";\n DataType[DataType[\"DT_STRING\"] = 7] = \"DT_STRING\";\n DataType[DataType[\"DT_COMPLEX64\"] = 8] = \"DT_COMPLEX64\";\n DataType[DataType[\"DT_INT64\"] = 9] = \"DT_INT64\";\n DataType[DataType[\"DT_BOOL\"] = 10] = \"DT_BOOL\";\n DataType[DataType[\"DT_QINT8\"] = 11] = \"DT_QINT8\";\n DataType[DataType[\"DT_QUINT8\"] = 12] = \"DT_QUINT8\";\n DataType[DataType[\"DT_QINT32\"] = 13] = \"DT_QINT32\";\n DataType[DataType[\"DT_BFLOAT16\"] = 14] = \"DT_BFLOAT16\";\n DataType[DataType[\"DT_FLOAT_REF\"] = 101] = \"DT_FLOAT_REF\";\n DataType[DataType[\"DT_DOUBLE_REF\"] = 102] = \"DT_DOUBLE_REF\";\n DataType[DataType[\"DT_INT32_REF\"] = 103] = \"DT_INT32_REF\";\n DataType[DataType[\"DT_UINT8_REF\"] = 104] = \"DT_UINT8_REF\";\n DataType[DataType[\"DT_INT16_REF\"] = 105] = \"DT_INT16_REF\";\n DataType[DataType[\"DT_INT8_REF\"] = 106] = \"DT_INT8_REF\";\n DataType[DataType[\"DT_STRING_REF\"] = 107] = \"DT_STRING_REF\";\n DataType[DataType[\"DT_COMPLEX64_REF\"] = 108] = \"DT_COMPLEX64_REF\";\n DataType[DataType[\"DT_INT64_REF\"] = 109] = \"DT_INT64_REF\";\n DataType[DataType[\"DT_BOOL_REF\"] = 110] = \"DT_BOOL_REF\";\n DataType[DataType[\"DT_QINT8_REF\"] = 111] = \"DT_QINT8_REF\";\n DataType[DataType[\"DT_QUINT8_REF\"] = 112] = \"DT_QUINT8_REF\";\n DataType[DataType[\"DT_QINT32_REF\"] = 113] = \"DT_QINT32_REF\";\n DataType[DataType[\"DT_BFLOAT16_REF\"] = 114] = \"DT_BFLOAT16_REF\";\n})(DataType || (DataType = {}));\nexport var SaverDef;\n(function (SaverDef) {\n /** CheckpointFormatVersion enum. */\n let CheckpointFormatVersion;\n (function (CheckpointFormatVersion) {\n CheckpointFormatVersion[CheckpointFormatVersion[\"LEGACY\"] = 0] = \"LEGACY\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V1\"] = 1] = \"V1\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V2\"] = 2] = \"V2\";\n })(CheckpointFormatVersion = SaverDef.CheckpointFormatVersion || (SaverDef.CheckpointFormatVersion = {}));\n})(SaverDef || (SaverDef = {}));\n//# sourceMappingURL=compiled_api.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst CUSTOM_OPS = {};\n/**\n * Register an Op for graph model executor. This allow you to register\n * TensorFlow custom op or override existing op.\n *\n * Here is an example of registering a new MatMul Op.\n * ```js\n * const customMatmul = (node) =>\n * tf.matMul(\n * node.inputs[0], node.inputs[1],\n * node.attrs['transpose_a'], node.attrs['transpose_b']);\n *\n * tf.registerOp('MatMul', customMatmul);\n * ```\n * The inputs and attrs of the node object is based on the TensorFlow op\n * registry.\n *\n * @param name The Tensorflow Op name.\n * @param opFunc An op function which is called with the current graph node\n * during execution and needs to return a tensor or a list of tensors. The node\n * has the following attributes:\n * - attr: A map from attribute name to its value\n * - inputs: A list of input tensors\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function registerOp(name, opFunc) {\n const opMapper = {\n tfOpName: name,\n category: 'custom',\n inputs: [],\n attrs: [],\n customExecutor: opFunc\n };\n CUSTOM_OPS[name] = opMapper;\n}\n/**\n * Retrieve the OpMapper object for the registered op.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function getRegisteredOp(name) {\n return CUSTOM_OPS[name];\n}\n/**\n * Deregister the Op for graph model executor.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function deregisterOp(name) {\n delete CUSTOM_OPS[name];\n}\n//# sourceMappingURL=register.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { clone, util } from '@tensorflow/tfjs-core';\nexport function getParamValue(paramName, node, tensorMap, context, resourceManager) {\n const inputParam = node.inputParams[paramName];\n if (inputParam && inputParam.inputIndexStart !== undefined) {\n const start = inputParam.inputIndexStart;\n const end = inputParam.inputIndexEnd === 0 ?\n undefined :\n (inputParam.inputIndexEnd === undefined ? start + 1 :\n inputParam.inputIndexEnd);\n if (inputParam.type === 'tensor') {\n return getTensor(node.inputNames[inputParam.inputIndexStart], tensorMap, context, resourceManager);\n }\n if (inputParam.type === 'tensors') {\n const inputs = node.inputNames.slice(start, end);\n return inputs.map(name => getTensor(name, tensorMap, context, resourceManager));\n }\n const tensor = getTensor(node.inputNames.slice(start)[0], tensorMap, context, resourceManager);\n const data = tensor.dataSync();\n return inputParam.type === 'number' ?\n data[0] :\n util.toNestedArray(tensor.shape, data);\n }\n const attrParam = node.attrParams[paramName];\n return attrParam && attrParam.value;\n}\n/**\n * Retrieve the tensor from tensorsMap based on input name.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function getTensor(name, tensorsMap, context, resourceManager) {\n const [nodeName, index] = parseNodeName(name);\n if (resourceManager != null) {\n const tensor = resourceManager.getHashTableHandleByName(nodeName);\n if (tensor != null) {\n return tensor;\n }\n }\n const contextId = context.currentContextIds.find(contextId => {\n return !!tensorsMap[getNodeNameWithContextId(nodeName, contextId)];\n });\n return contextId !== undefined ?\n tensorsMap[getNodeNameWithContextId(nodeName, contextId)][index] :\n undefined;\n}\n/**\n * Retrieve the tensors based on input name for current context.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n */\nexport function getTensorsForCurrentContenxt(name, tensorsMap, context) {\n return tensorsMap[getNodeNameWithContextId(name, context.currentContextId)];\n}\n/**\n * Returns the node name and index from the Node input name.\n * @param inputName The input name of the node, in format of\n * node_name:output_index, i.e. MatMul:0, if the output_index is not set, it is\n * default to 0.\n */\nexport function getNodeNameAndIndex(inputName, context) {\n const [nodeName, index] = parseNodeName(inputName);\n return [\n getNodeNameWithContextId(nodeName, context && context.currentContextId),\n index\n ];\n}\nfunction getNodeNameWithContextId(name, contextId) {\n return !!contextId ? `${name}-${contextId}` : name;\n}\nexport function parseNodeName(name) {\n const parts = name.split(':');\n if (parts.length === 1) {\n return [name, 0];\n }\n const nodeName = parts[0];\n return [nodeName, Number(parts[parts.length - 1])];\n}\nexport function split(arr, size) {\n const res = [];\n for (let i = 0; i < arr.length; i += size) {\n res.push(arr.slice(i, i + size));\n }\n return res;\n}\nexport function getPadding(node, tensorMap, context) {\n let pad = getParamValue('pad', node, tensorMap, context);\n if (pad === 'explicit') {\n // This is 1d array, we need to convert it to 2d array\n pad = getParamValue('explicitPaddings', node, tensorMap, context);\n const explicitPadding = [[0, 0], [0, 0], [0, 0], [0, 0]];\n for (let i = 0; i < 4; i++) {\n explicitPadding[i][0] = pad[i * 2];\n explicitPadding[i][1] = pad[i * 2 + 1];\n }\n return explicitPadding;\n }\n return pad;\n}\n/**\n * Reuse the tensor if it is marked as keep, otherwise clone the tensor to\n * avoid disposal. This is important for TensorArray and TensorList ops, since\n * internally they use a tensor as the id for TensorArray and TensorList, and\n * to simplify lookup, they also use Tensor.id as the key to the internal map.\n * These id tensors have been marked as kept in the backend, we need avoid clone\n * them in order to create new Tensor.id.\n * @param tensor\n */\nexport function cloneTensor(tensor) {\n return tensor.kept ? tensor : clone(tensor);\n}\n//# sourceMappingURL=utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Add',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddV2',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddN',\n 'category': 'arithmetic',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'BiasAdd',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sub',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'RealDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Div',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'DivNoNan',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mul',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Maximum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Minimum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Pow',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SquaredDifference',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorMod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n }\n];\n//# sourceMappingURL=arithmetic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Abs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan2',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ceil',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ClipByValue',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'clip_value_min', 'name': 'clipValueMin', 'type': 'number' },\n { 'tfName': 'clip_value_max', 'name': 'clipValueMax', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Complex',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'real', 'type': 'tensor' },\n { 'start': 1, 'name': 'imag', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ComplexAbs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Elu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Exp',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Floor',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Imag',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Neg',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Real',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Prelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'alpha', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu6',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'clipValueMin',\n 'name': 'clipValueMin',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'clipValueMax',\n 'name': 'clipValueMax',\n 'type': 'number',\n 'defaultValue': 6\n }\n ]\n },\n {\n 'tfOpName': 'Selu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sigmoid',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Rsqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Square',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sign',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Round',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Expm1',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log1p',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Reciprocal',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Softplus',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Erf',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axes', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'keep_dims',\n 'name': 'keepDims',\n 'type': 'bool',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LeakyRelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 0.2\n },\n {\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=basic_math.js.map", "export const json = [\n {\n 'tfOpName': 'LoopCond',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'pred', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Switch',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'data', 'type': 'tensor' },\n { 'start': 1, 'name': 'pred', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Merge',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Enter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'frame_name', 'name': 'frameName', 'type': 'string' },\n { 'tfName': 'is_constant', 'name': 'isConstant', 'type': 'bool' }\n ]\n },\n {\n 'tfOpName': 'Exit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NextIteration',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'size', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'dynamic_size', 'name': 'dynamicSize', 'type': 'bool' },\n { 'tfName': 'clear_after_read', 'name': 'clearAfterRead', 'type': 'bool' },\n {\n 'tfName': 'identical_element_shapes',\n 'name': 'identicalElementShapes',\n 'type': 'bool'\n },\n { 'tfName': 'tensor_array_name', 'name': 'name', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayWriteV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayReadV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{\n 'tfName': 'dtype',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n },\n {\n 'tfOpName': 'TensorArrayGatherV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayScatterV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArrayConcatV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }, {\n 'tfName': 'element_shape_except0',\n 'name': 'elementShapeExcept0',\n 'type': 'shape',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArraySplitV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArraySizeV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayCloseV3',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'StatelessIf',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'If',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'StatelessWhile',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'While',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'TensorListScatter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListScatterV2',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 3, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGather',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListSetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListReserve',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 1, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListFromTensor',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListStack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' },\n { 'tfName': 'num_elements', 'name': 'numElements', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListSplit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListConcat',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListPopBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListPushBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=control.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'AvgPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPoolWithArgmax',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' }, {\n 'tfName': 'include_batch_in_index',\n 'name': 'includeBatchInIndex',\n 'type': 'bool'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AvgPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Conv1D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'stride', 'name': 'stride', 'type': 'number' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NWC'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'dilation',\n 'name': 'dilation',\n 'type': 'number',\n 'defaultValue': 1\n }\n ]\n },\n {\n 'tfOpName': 'Conv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'useCudnnOnGpu', 'name': 'useCudnnOnGpu', 'type': 'bool' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': '_FusedConv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'use_cudnn_on_gpu',\n 'name': 'useCudnnOnGpu',\n 'type': 'bool',\n 'defaultValue': true\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n ]\n },\n {\n 'tfOpName': 'Conv2DBackpropInput',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 2, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 0, 'name': 'outputShape', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2d',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'FusedDepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n }\n ]\n },\n {\n 'tfOpName': 'Conv3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ],\n },\n {\n 'tfOpName': 'Dilation2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'rates', 'name': 'dilations', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }\n ]\n }\n];\n//# sourceMappingURL=convolution.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Fill',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n { 'start': 1, 'name': 'value', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'LinSpace',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'num', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'OneHot',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'depth', 'type': 'number' },\n { 'start': 2, 'name': 'onValue', 'type': 'number', 'defaultValue': 1 },\n { 'start': 3, 'name': 'offValue', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [\n {\n 'tfName': 'axis',\n 'name': 'axis',\n 'type': 'number',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ones',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'OnesLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'RandomUniform',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'minval',\n 'name': 'minval',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'maxval',\n 'name': 'maxval',\n 'type': 'number',\n 'defaultValue': 1\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Range',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'step', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [{ 'tfName': 'Tidx', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TruncatedNormal',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'means',\n 'name': 'mean',\n 'type': 'number',\n 'defaultValue': 0.0\n },\n {\n 'tfName': 'stddev',\n 'name': 'stdDev',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Zeros',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'ZerosLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'Multinomial',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'logits', 'type': 'tensor' },\n { 'start': 1, 'name': 'numSamples', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' },\n { 'tfName': 'seed2', 'name': 'seed2', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'output_dtype', 'name': 'output_dtype', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=creation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'NonMaxSuppressionV2',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV3',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV4',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'T_threshold',\n 'name': 'threshold',\n 'type': 'dtype',\n 'notSupported': true\n },\n {\n 'tfName': 'pad_to_max_output_size',\n 'name': 'padToMaxOutputSize',\n 'type': 'bool'\n }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV5',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' },\n { 'start': 5, 'name': 'softNmsSigma', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Where',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ListDiff',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=dynamic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'TopKV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'k', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'sorted', 'name': 'sorted', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Unique',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n },\n {\n 'tfOpName': 'UniqueV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n },\n];\n//# sourceMappingURL=evaluation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'PlaceholderWithDefault',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'default', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'Placeholder',\n 'category': 'graph',\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n { 'tfOpName': 'Const', 'category': 'graph' }, {\n 'tfOpName': 'Identity',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IdentityN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Snapshot',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Rank',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Size',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Shape',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'ShapeN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Print',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'data', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'message', 'name': 'message', 'type': 'string' }, {\n 'tfName': 'first_n',\n 'name': 'firstN',\n 'type': 'number',\n 'notSupported': true\n },\n {\n 'tfName': 'summarize',\n 'name': 'summarize',\n 'type': 'number',\n 'defaultValue': 3\n }\n ]\n },\n { 'tfOpName': 'NoOp', 'category': 'graph', 'inputs': [] }, {\n 'tfOpName': 'StopGradient',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'FakeQuantWithMinMaxVars',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'min', 'name': 'min', 'type': 'number' },\n { 'tfName': 'max', 'name': 'max', 'type': 'number' }\n ]\n }\n];\n//# sourceMappingURL=graph.js.map", "export const json = [\n {\n 'tfOpName': 'HashTable',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'HashTableV2',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'LookupTableImport',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableImportV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFind',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFindV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ResizeBilinear',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ResizeNearestNeighbor',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'CropAndResize',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'image', 'type': 'tensor' },\n { 'start': 1, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 2, 'name': 'boxInd', 'type': 'tensor' },\n { 'start': 3, 'name': 'cropSize', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'method', 'name': 'method', 'type': 'string' }, {\n 'tfName': 'extrapolation_value',\n 'name': 'extrapolationValue',\n 'type': 'number'\n }\n ]\n }\n];\n//# sourceMappingURL=image.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Equal',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NotEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Greater',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'GreaterEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Less',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LessEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalAnd',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalNot',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalOr',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Select',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SelectV2',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=logical.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': '_FusedMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' }, {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMulV2',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Transpose',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'perm', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=matrices.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FusedBatchNorm',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV2',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV3',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LRN',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'depth_radius',\n 'name': 'radius',\n 'type': 'number',\n 'defaultValue': 5\n },\n { 'tfName': 'bias', 'name': 'bias', 'type': 'number', 'defaultValue': 1.0 },\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n {\n 'tfName': 'beta',\n 'name': 'beta',\n 'type': 'number',\n 'defaultValue': 0.5\n }\n ]\n },\n {\n 'tfOpName': 'Softmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'LogSoftmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': true,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Max',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Mean',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Min',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Sum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'All',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Any',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'ArgMax',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'ArgMin',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Cumsum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'exclusive', 'name': 'exclusive', 'type': 'bool' },\n { 'tfName': 'reverse', 'name': 'reverse', 'type': 'bool' }\n ]\n }\n];\n//# sourceMappingURL=reduction.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ConcatV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': -1, 'name': 'tensors', 'type': 'tensors' },\n { 'start': -1, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'Concat',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 1, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n { 'start': 0, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'GatherV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Gather',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Reverse',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'dims', 'type': 'bool', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ReverseV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Slice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'size', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'StridedSlice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'end', 'type': 'number[]' },\n { 'start': 3, 'name': 'strides', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'begin_mask',\n 'name': 'beginMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'end_mask',\n 'name': 'endMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'new_axis_mask',\n 'name': 'newAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'ellipsis_mask',\n 'name': 'ellipsisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'shrink_axis_mask',\n 'name': 'shrinkAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Pack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Unpack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'num',\n 'name': 'num',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Tile',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'reps', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Split',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'axis', 'type': 'number', 'defaultValue': 0 },\n { 'start': 1, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'num_split',\n 'name': 'numOrSizeSplits',\n 'type': 'number',\n 'defaultValue': 1\n }]\n },\n {\n 'tfOpName': 'SplitV',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'numOrSizeSplits', 'type': 'number[]' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'ScatterNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'values', 'type': 'tensor' },\n { 'start': 2, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'GatherNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': false,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=slice_join.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IFFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'RFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'IRFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=spectral.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Cast',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'SrcT',\n 'name': 'sdtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n { 'tfName': 'DstT', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'ExpandDims',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'MirrorPad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'mode', 'name': 'mode', 'type': 'string' }]\n },\n {\n 'tfOpName': 'Pad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'constant_value',\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }]\n },\n {\n 'tfOpName': 'PadV2',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' }, {\n 'start': 2,\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Reshape',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Squeeze',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'axis',\n 'tfDeprecatedName': 'squeeze_dims',\n 'name': 'axis',\n 'type': 'number[]'\n }]\n },\n {\n 'tfOpName': 'SpaceToBatchND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'paddings', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'BatchToSpaceND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'crops', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthToSpace',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'block_size', 'name': 'blockSize', 'type': 'number' },\n { 'tfName': 'data_format', 'name': 'dataFormat', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'BroadcastTo',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': []\n }\n];\n//# sourceMappingURL=transformation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as tensorflow from '../data/compiled_api';\nimport { getRegisteredOp } from './custom_op/register';\nimport { getNodeNameAndIndex } from './executors/utils';\nimport * as arithmetic from './op_list/arithmetic';\nimport * as basicMath from './op_list/basic_math';\nimport * as control from './op_list/control';\nimport * as convolution from './op_list/convolution';\nimport * as creation from './op_list/creation';\nimport * as dynamic from './op_list/dynamic';\nimport * as evaluation from './op_list/evaluation';\nimport * as graph from './op_list/graph';\nimport * as hashTable from './op_list/hash_table';\nimport * as image from './op_list/image';\nimport * as logical from './op_list/logical';\nimport * as matrices from './op_list/matrices';\nimport * as normalization from './op_list/normalization';\nimport * as reduction from './op_list/reduction';\nimport * as sliceJoin from './op_list/slice_join';\nimport * as spectral from './op_list/spectral';\nimport * as transformation from './op_list/transformation';\nexport class OperationMapper {\n // Singleton instance for the mapper\n static get Instance() {\n return this._instance || (this._instance = new this());\n }\n // Loads the op mapping from the JSON file.\n constructor() {\n const ops = [\n arithmetic, basicMath, control, convolution, creation, dynamic,\n evaluation, logical, image, graph, matrices, normalization, reduction,\n sliceJoin, spectral, transformation, hashTable\n ];\n const mappersJson = [].concat(...ops.map(op => op.json));\n this.opMappers = mappersJson.reduce((map, mapper) => {\n map[mapper.tfOpName] = mapper;\n return map;\n }, {});\n }\n // Converts the model inference graph from Tensorflow GraphDef to local\n // representation for TensorFlow.js API\n transformGraph(graph, signature = {}) {\n const tfNodes = graph.node;\n const placeholders = [];\n const weights = [];\n const initNodes = [];\n const nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op.startsWith('Placeholder')) {\n placeholders.push(map[node.name]);\n }\n else if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n else if (node.input == null || node.input.length === 0) {\n initNodes.push(map[node.name]);\n }\n return map;\n }, {});\n let inputs = [];\n const outputs = [];\n let inputNodeNameToKey = {};\n let outputNodeNameToKey = {};\n if (signature != null) {\n inputNodeNameToKey = this.mapSignatureEntries(signature.inputs);\n outputNodeNameToKey = this.mapSignatureEntries(signature.outputs);\n }\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n // if signature has not outputs set, add any node that does not have\n // outputs.\n if (Object.keys(outputNodeNameToKey).length === 0) {\n allNodes.forEach(key => {\n const node = nodes[key];\n if (node.children.length === 0) {\n outputs.push(node);\n }\n });\n }\n else {\n Object.keys(outputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node != null) {\n node.signatureKey = outputNodeNameToKey[name];\n outputs.push(node);\n }\n });\n }\n if (Object.keys(inputNodeNameToKey).length > 0) {\n Object.keys(inputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node) {\n node.signatureKey = inputNodeNameToKey[name];\n inputs.push(node);\n }\n });\n }\n else {\n inputs = placeholders;\n }\n let functions = {};\n if (graph.library != null && graph.library.function != null) {\n functions = graph.library.function.reduce((functions, func) => {\n functions[func.signature.name] = this.mapFunction(func);\n return functions;\n }, {});\n }\n const result = { nodes, inputs, outputs, weights, placeholders, signature, functions };\n if (initNodes.length > 0) {\n result.initNodes = initNodes;\n }\n return result;\n }\n mapSignatureEntries(entries) {\n return Object.keys(entries || {})\n .reduce((prev, curr) => {\n prev[entries[curr].name] = curr;\n return prev;\n }, {});\n }\n mapNode(node) {\n // Unsupported ops will cause an error at run-time (not parse time), since\n // they may not be used by the actual execution subgraph.\n const mapper = getRegisteredOp(node.op) || this.opMappers[node.op] || {};\n if (node.attr == null) {\n node.attr = {};\n }\n const newNode = {\n name: node.name,\n op: node.op,\n category: mapper.category,\n inputNames: (node.input ||\n []).map(input => input.startsWith('^') ? input.substr(1) : input),\n inputs: [],\n children: [],\n inputParams: {},\n attrParams: {},\n rawAttrs: node.attr\n };\n if (mapper.inputs != null) {\n newNode.inputParams =\n mapper.inputs.reduce((map, param) => {\n map[param.name] = {\n type: param.type,\n inputIndexStart: param.start,\n inputIndexEnd: param.end\n };\n return map;\n }, {});\n }\n if (mapper.attrs != null) {\n newNode.attrParams =\n mapper.attrs.reduce((map, param) => {\n const type = param.type;\n let value = undefined;\n switch (param.type) {\n case 'string':\n value = getStringParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'string[]':\n value = getStringArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number':\n value = getNumberParam(node.attr, param.tfName, (param.defaultValue || 0));\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumberParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number[]':\n value = getNumericArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumericArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool':\n value = getBoolParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool[]':\n value = getBoolArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape':\n value = getTensorShapeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape[]':\n value = getTensorShapeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype':\n value = getDtypeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype[]':\n value = getDtypeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'func':\n value = getFuncParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getFuncParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'tensor':\n case 'tensors':\n break;\n default:\n throw new Error(`Unsupported param type: ${param.type} for op: ${node.op}`);\n }\n map[param.name] = { value, type };\n return map;\n }, {});\n }\n return newNode;\n }\n // map the TFunctionDef to TFJS graph object\n mapFunction(functionDef) {\n const tfNodes = functionDef.nodeDef;\n const placeholders = [];\n const weights = [];\n let nodes = {};\n if (tfNodes != null) {\n nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n return map;\n }, {});\n }\n const inputs = [];\n const outputs = [];\n functionDef.signature.inputArg.forEach(arg => {\n const [nodeName,] = getNodeNameAndIndex(arg.name);\n const node = {\n name: nodeName,\n op: 'Placeholder',\n inputs: [],\n inputNames: [],\n category: 'graph',\n inputParams: {},\n attrParams: { dtype: { value: parseDtypeParam(arg.type), type: 'dtype' } },\n children: []\n };\n node.signatureKey = arg.name;\n inputs.push(node);\n nodes[nodeName] = node;\n });\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n const returnNodeMap = functionDef.ret;\n functionDef.signature.outputArg.forEach(output => {\n const [nodeName, index] = getNodeNameAndIndex(returnNodeMap[output.name]);\n const node = nodes[nodeName];\n if (node != null) {\n node.defaultOutput = index;\n outputs.push(node);\n }\n });\n const signature = this.mapArgsToSignature(functionDef);\n return { nodes, inputs, outputs, weights, placeholders, signature };\n }\n mapArgsToSignature(functionDef) {\n return {\n methodName: functionDef.signature.name,\n inputs: functionDef.signature.inputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg);\n return map;\n }, {}),\n outputs: functionDef.signature.outputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg, functionDef.ret);\n return map;\n }, {}),\n };\n }\n mapArgToTensorInfo(arg, nameMap) {\n let name = arg.name;\n if (nameMap != null) {\n name = nameMap[name];\n }\n return { name, dtype: arg.type };\n }\n}\nexport function decodeBase64(text) {\n const global = env().global;\n if (typeof global.atob !== 'undefined') {\n return global.atob(text);\n }\n else if (typeof Buffer !== 'undefined') {\n return new Buffer(text, 'base64').toString();\n }\n else {\n throw new Error('Unable to decode base64 in this environment. ' +\n 'Missing built-in atob() or Buffer()');\n }\n}\nexport function parseStringParam(s, keepCase) {\n const value = Array.isArray(s) ? String.fromCharCode.apply(null, s) : decodeBase64(s);\n return keepCase ? value : value.toLowerCase();\n}\nexport function getStringParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param != null) {\n return parseStringParam(param.s, keepCase);\n }\n return def;\n}\nexport function getBoolParam(attrs, name, def) {\n const param = attrs[name];\n return param ? param.b : def;\n}\nexport function getNumberParam(attrs, name, def) {\n const param = attrs[name] || {};\n const value = param['i'] != null ? param['i'] : (param['f'] != null ? param['f'] : def);\n return (typeof value === 'number') ? value : parseInt(value, 10);\n}\nexport function parseDtypeParam(value) {\n if (typeof (value) === 'string') {\n // tslint:disable-next-line:no-any\n value = tensorflow.DataType[value];\n }\n switch (value) {\n case tensorflow.DataType.DT_FLOAT:\n return 'float32';\n case tensorflow.DataType.DT_INT32:\n case tensorflow.DataType.DT_INT64:\n case tensorflow.DataType.DT_INT8:\n case tensorflow.DataType.DT_UINT8:\n return 'int32';\n case tensorflow.DataType.DT_BOOL:\n return 'bool';\n case tensorflow.DataType.DT_DOUBLE:\n return 'float32';\n case tensorflow.DataType.DT_STRING:\n return 'string';\n default:\n // Unknown dtype error will happen at runtime (instead of parse time),\n // since these nodes might not be used by the actual subgraph execution.\n return null;\n }\n}\nexport function getFuncParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.func) {\n return param.func.name;\n }\n return def;\n}\nexport function getDtypeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.type) {\n return parseDtypeParam(param.type);\n }\n return def;\n}\nexport function getDtypeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.type) {\n return param.list.type.map(v => parseDtypeParam(v));\n }\n return def;\n}\nexport function parseTensorShapeParam(shape) {\n if (shape.unknownRank) {\n return undefined;\n }\n if (shape.dim != null) {\n return shape.dim.map(dim => (typeof dim.size === 'number') ? dim.size : parseInt(dim.size, 10));\n }\n return [];\n}\nexport function getTensorShapeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.shape) {\n return parseTensorShapeParam(param.shape);\n }\n return def;\n}\nexport function getNumericArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param) {\n return ((param.list.f && param.list.f.length ? param.list.f :\n param.list.i) ||\n [])\n .map(v => (typeof v === 'number') ? v : parseInt(v, 10));\n }\n return def;\n}\nexport function getStringArrayParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param && param.list && param.list.s) {\n return param.list.s.map((v) => {\n return parseStringParam(v, keepCase);\n });\n }\n return def;\n}\nexport function getTensorShapeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.shape) {\n return param.list.shape.map((v) => {\n return parseTensorShapeParam(v);\n });\n }\n return def;\n}\nexport function getBoolArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.b) {\n return param.list.b;\n }\n return def;\n}\n//# sourceMappingURL=operation_mapper.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getTensor } from '../executors/utils';\nimport { getBoolArrayParam, getBoolParam, getDtypeArrayParam, getDtypeParam, getNumberParam, getNumericArrayParam, getStringArrayParam, getStringParam, getTensorShapeArrayParam, getTensorShapeParam } from '../operation_mapper';\n/**\n * Helper class for lookup inputs and params for nodes in the model graph.\n */\nexport class NodeValueImpl {\n constructor(node, tensorMap, context) {\n this.node = node;\n this.tensorMap = tensorMap;\n this.context = context;\n this.inputs = [];\n this.attrs = {};\n this.inputs = node.inputNames.map(name => this.getInput(name));\n if (node.rawAttrs != null) {\n this.attrs = Object.keys(node.rawAttrs)\n .reduce((attrs, key) => {\n attrs[key] = this.getAttr(key);\n return attrs;\n }, {});\n }\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getInput(name) {\n return getTensor(name, this.tensorMap, this.context);\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getAttr(name, defaultValue) {\n const value = this.node.rawAttrs[name];\n if (value.tensor != null) {\n return getTensor(name, this.tensorMap, this.context);\n }\n if (value.i != null || value.f != null) {\n return getNumberParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.s != null) {\n return getStringParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.b != null) {\n return getBoolParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.shape != null) {\n return getTensorShapeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.type != null) {\n return getDtypeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list != null) {\n if (value.list.i != null || value.list.f != null) {\n return getNumericArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.s != null) {\n return getStringArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.shape != null) {\n return getTensorShapeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.b != null) {\n return getBoolArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.type != null) {\n return getDtypeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n }\n return defaultValue;\n }\n}\n//# sourceMappingURL=node_value_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This file exports ops used by the converters executors. By default it\n * re-exports all ops. In a custom build this is aliased to a file that will\n * only exports ops for a given model.json.\n */\nexport * from './ops';\n//# sourceMappingURL=ops_for_converter.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BiasAdd':\n case 'AddV2':\n case 'Add': {\n return [tfOps.add(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'AddN': {\n return [tfOps.addN(getParamValue('tensors', node, tensorMap, context))];\n }\n case 'FloorMod':\n case 'Mod':\n return [tfOps.mod(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'Mul':\n return [tfOps.mul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'RealDiv':\n case 'Div': {\n return [tfOps.div(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'DivNoNan': {\n return [tfOps.divNoNan(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'FloorDiv': {\n return [tfOps.floorDiv(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Sub': {\n return [tfOps.sub(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Minimum': {\n return [tfOps.minimum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Maximum': {\n return [tfOps.maximum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Pow': {\n return [tfOps.pow(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'SquaredDifference': {\n return [tfOps.squaredDifference(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'arithmetic';\n//# sourceMappingURL=arithmetic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Abs':\n case 'ComplexAbs':\n return [tfOps.abs(getParamValue('x', node, tensorMap, context))];\n case 'Acos':\n return [tfOps.acos(getParamValue('x', node, tensorMap, context))];\n case 'Acosh':\n return [tfOps.acosh(getParamValue('x', node, tensorMap, context))];\n case 'Asin':\n return [tfOps.asin(getParamValue('x', node, tensorMap, context))];\n case 'Asinh':\n return [tfOps.asinh(getParamValue('x', node, tensorMap, context))];\n case 'Atan':\n return [tfOps.atan(getParamValue('x', node, tensorMap, context))];\n case 'Atan2':\n return [tfOps.atan2(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context))];\n case 'Atanh':\n return [tfOps.atanh(getParamValue('x', node, tensorMap, context))];\n case 'Ceil':\n return [tfOps.ceil(getParamValue('x', node, tensorMap, context))];\n case 'Complex':\n return [tfOps.complex(getParamValue('real', node, tensorMap, context), getParamValue('imag', node, tensorMap, context))];\n case 'Cos':\n return [tfOps.cos(getParamValue('x', node, tensorMap, context))];\n case 'Cosh':\n return [tfOps.cosh(getParamValue('x', node, tensorMap, context))];\n case 'Elu':\n return [tfOps.elu(getParamValue('x', node, tensorMap, context))];\n case 'Erf':\n return [tfOps.erf(getParamValue('x', node, tensorMap, context))];\n case 'Exp':\n return [tfOps.exp(getParamValue('x', node, tensorMap, context))];\n case 'Expm1': {\n return [tfOps.expm1(getParamValue('x', node, tensorMap, context))];\n }\n case 'Floor':\n return [tfOps.floor(getParamValue('x', node, tensorMap, context))];\n case 'Log':\n return [tfOps.log(getParamValue('x', node, tensorMap, context))];\n case 'Log1p': {\n return [tfOps.log1p(getParamValue('x', node, tensorMap, context))];\n }\n case 'Imag':\n return [tfOps.imag(getParamValue('x', node, tensorMap, context))];\n case 'Neg':\n return [tfOps.neg(getParamValue('x', node, tensorMap, context))];\n case 'Reciprocal': {\n return [tfOps.reciprocal(getParamValue('x', node, tensorMap, context))];\n }\n case 'Real':\n return [tfOps.real(getParamValue('x', node, tensorMap, context))];\n case 'Relu':\n return [tfOps.relu(getParamValue('x', node, tensorMap, context))];\n case 'Round': {\n return [tfOps.round(getParamValue('x', node, tensorMap, context))];\n }\n case 'Selu':\n return [tfOps.selu(getParamValue('x', node, tensorMap, context))];\n case 'Sigmoid':\n return [tfOps.sigmoid(getParamValue('x', node, tensorMap, context))];\n case 'Sin':\n return [tfOps.sin(getParamValue('x', node, tensorMap, context))];\n case 'Sign': {\n return [tfOps.sign(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sinh': {\n return [tfOps.sinh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Softplus': {\n return [tfOps.softplus(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sqrt': {\n return [tfOps.sqrt(getParamValue('x', node, tensorMap, context))];\n }\n case 'Square': {\n return [tfOps.square(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tanh': {\n return [tfOps.tanh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tan':\n return [tfOps.tan(getParamValue('x', node, tensorMap, context))];\n case 'Relu6':\n case 'ClipByValue':\n return [tfOps.clipByValue(getParamValue('x', node, tensorMap, context), getParamValue('clipValueMin', node, tensorMap, context), getParamValue('clipValueMax', node, tensorMap, context))];\n case 'Rsqrt':\n return [tfOps.rsqrt(getTensor(node.inputNames[0], tensorMap, context))];\n case 'Prod':\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), getParamValue('axes', node, tensorMap, context))];\n case 'LeakyRelu':\n return [tfOps.leakyRelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n case 'Prelu':\n return [tfOps.prelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'basic_math';\n//# sourceMappingURL=basic_math_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This differs from util.assertShapesMatch in that it allows values of\n * negative one, an undefined size of a dimensinon, in a shape to match\n * anything.\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertShapesMatchAllowUndefinedSize(shapeA, shapeB, errorMessagePrefix = '') {\n util.assert(shapesEqualAllowUndefinedSize(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function shapesEqualAllowUndefinedSize(n1, n2) {\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== -1 && n2[i] !== -1 && n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\n//# sourceMappingURL=tensor_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * The TensorArray object keeps an array of Tensors. It\n * allows reading from the array and writing to the array.\n */\nexport class TensorArray {\n constructor(name, dtype, maxSize, elementShape, identicalElementShapes, dynamicSize, clearAfterRead) {\n this.name = name;\n this.dtype = dtype;\n this.maxSize = maxSize;\n this.elementShape = elementShape;\n this.identicalElementShapes = identicalElementShapes;\n this.dynamicSize = dynamicSize;\n this.clearAfterRead = clearAfterRead;\n this.tensors = [];\n this.closed_ = false;\n this.idTensor = scalar(0);\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n get closed() {\n return this.closed_;\n }\n /**\n * Dispose the tensors and idTensor and mark the TensoryArray as closed.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.tensor.id)) {\n tensor.tensor.dispose();\n }\n });\n this.tensors = [];\n this.closed_ = true;\n this.idTensor.dispose();\n }\n size() {\n return this.tensors.length;\n }\n /**\n * Read the value at location index in the TensorArray.\n * @param index Number the index to read from.\n */\n read(index) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || index >= this.size()) {\n throw new Error(`Tried to read from index ${index}, but array size is: ${this.size()}`);\n }\n const tensorWithState = this.tensors[index];\n if (tensorWithState.cleared) {\n throw new Error(`TensorArray ${this.name}: Could not read index ${index} twice because it was cleared after a previous read ` +\n `(perhaps try setting clear_after_read = false?).`);\n }\n if (this.clearAfterRead) {\n tensorWithState.cleared = true;\n }\n tensorWithState.read = true;\n return tensorWithState.tensor;\n }\n /**\n * Helper method to read multiple tensors from the specified indices.\n */\n readMany(indices) {\n return indices.map(index => this.read(index));\n }\n /**\n * Write value into the index of the TensorArray.\n * @param index number the index to write to.\n * @param tensor\n */\n write(index, tensor) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || !this.dynamicSize && index >= this.maxSize) {\n throw new Error(`Tried to write to index ${index}, but array is not resizeable and size is: ${this.maxSize}`);\n }\n const t = this.tensors[index] || {};\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index},\n because the value dtype is ${tensor.dtype}, but TensorArray dtype is ${this.dtype}.`);\n }\n // Set the shape for the first time write to unknow shape tensor array\n if (this.size() === 0 &&\n (this.elementShape == null || this.elementShape.length === 0)) {\n this.elementShape = tensor.shape;\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, `TensorArray ${this.name}: Could not write to TensorArray index ${index}.`);\n if (t.read) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been read.`);\n }\n if (t.written) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been written.`);\n }\n t.tensor = tensor;\n keep(tensor);\n t.written = true;\n this.tensors[index] = t;\n }\n /**\n * Helper method to write multiple tensors to the specified indices.\n */\n writeMany(indices, tensors) {\n if (indices.length !== tensors.length) {\n throw new Error(`TensorArray ${this.name}: could not write multiple tensors,` +\n `because the index size: ${indices.length} is not the same as tensors size: ${tensors.length}.`);\n }\n indices.forEach((i, index) => this.write(i, tensors[index]));\n }\n /**\n * Return selected values in the TensorArray as a packed Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param [indices] number[] Optional. Taking values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size(). If not specified returns\n * all tensors in the original order.\n * @param [dtype]\n */\n gather(indices, dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but gather requested dtype ${dtype}`);\n }\n if (!indices) {\n indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n }\n else {\n indices = indices.slice(0, this.size());\n }\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n // Read all the PersistentTensors into a vector to keep track of\n // their memory.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, 'TensorArray shape mismatch: ');\n return stack(tensors, 0);\n }\n /**\n * Return the values in the TensorArray as a concatenated Tensor.\n */\n concat(dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but concat requested dtype ${dtype}`);\n }\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n const indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n // Collect all the tensors from the tensors array.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, `TensorArray shape mismatch: tensor array shape (${this.elementShape}) vs first tensor shape (${tensors[0].shape})`);\n return concat(tensors, 0);\n }\n /**\n * Scatter the values of a Tensor in specific indices of a TensorArray.\n * @param indices nummber[] values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size().\n * @param tensor Tensor input tensor.\n */\n scatter(indices, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (!this.dynamicSize && maxIndex >= this.maxSize) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${this.maxSize})`);\n }\n this.writeMany(indices, unstack(tensor, 0));\n }\n /**\n * Split the values of a Tensor into the TensorArray.\n * @param length number[] with the lengths to use when splitting value along\n * its first dimension.\n * @param tensor Tensor, the tensor to split.\n */\n split(length, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n if (!this.dynamicSize && length.length !== this.maxSize) {\n throw new Error(`TensorArray's size is not equal to the size of lengths (${this.maxSize} vs. ${length.length}), ` +\n 'and the TensorArray is not marked as dynamically resizeable');\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = [];\n tidy(() => {\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), this.elementShape);\n }\n return tensors;\n });\n const indices = [];\n for (let i = 0; i < length.length; i++) {\n indices[i] = i;\n }\n this.writeMany(indices, tensors);\n }\n}\n//# sourceMappingURL=tensor_array.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * TensorList stores a container of `tf.Tensor` objects, which are accessible\n * via tensors field.\n *\n * In order to get a copy of the underlying list, use the copy method:\n * ```\n * TensorList b = a.copy();\n * b.tensors().pushBack(t); // This does not modify a.tensors().\n * ```\n *\n * Note that this is not a deep copy: the memory locations of the underlying\n * tensors will still point to the same locations of the corresponding tensors\n * in the original.\n */\nexport class TensorList {\n /**\n *\n * @param tensors list of tensors\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param maxNumElements The maximum allowed size of `tensors`. Defaults to -1\n * meaning that the size of `tensors` is unbounded.\n */\n constructor(tensors, elementShape, elementDtype, maxNumElements = -1) {\n this.tensors = tensors;\n this.elementShape = elementShape;\n this.elementDtype = elementDtype;\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (elementDtype !== tensor.dtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${tensor.dtype}`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n });\n }\n this.idTensor = scalar(0);\n this.maxNumElements = maxNumElements;\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n /**\n * Get a new TensorList containing a copy of the underlying tensor container.\n */\n copy() {\n return new TensorList([...this.tensors], this.elementShape, this.elementDtype);\n }\n /**\n * Dispose the tensors and idTensor and clear the tensor list.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n this.tensors.length = 0;\n this.idTensor.dispose();\n }\n /**\n * The size of the tensors in the tensor list.\n */\n size() {\n return this.tensors.length;\n }\n /**\n * Return a tensor that stacks a list of rank-R tf.Tensors into one rank-(R+1)\n * tf.Tensor.\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param numElements the number of elements to stack\n */\n stack(elementShape, elementDtype, numElements = -1) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (numElements !== -1 && this.tensors.length !== numElements) {\n throw new Error(`Operation expected a list with ${numElements} elements but got a list with ${this.tensors.length} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, this.elementShape, 'TensorList shape mismatch: ');\n return tidy(() => {\n const reshapedTensors = this.tensors.map(tensor => reshape(tensor, elementShape));\n return stack(reshapedTensors, 0);\n });\n }\n /**\n * Pop a tensor from the end of the list.\n * @param elementShape shape of the tensor\n * @param elementDtype data type of the tensor\n */\n popBack(elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (this.size() === 0) {\n throw new Error('Trying to pop from an empty list.');\n }\n const tensor = this.tensors.pop();\n assertShapesMatchAllowUndefinedSize(tensor.shape, elementShape, 'TensorList shape mismatch: ');\n return reshape(tensor, elementShape);\n }\n /**\n * Push a tensor to the end of the list.\n * @param tensor Tensor to be pushed.\n */\n pushBack(tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(tensor.shape, this.elementShape, 'TensorList shape mismatch: ');\n if (this.maxNumElements === this.size()) {\n throw new Error(`Trying to push element into a full list.`);\n }\n keep(tensor);\n this.tensors.push(tensor);\n }\n /**\n * Update the size of the list.\n * @param size the new size of the list.\n */\n resize(size) {\n if (size < 0) {\n throw new Error(`TensorListResize expects size to be non-negative. Got: ${size}`);\n }\n if (this.maxNumElements !== -1 && size > this.maxNumElements) {\n throw new Error(`TensorListResize input size ${size} is greater maxNumElement ${this.maxNumElements}.`);\n }\n this.tensors.length = size;\n }\n /**\n * Retrieve the element at the provided index\n * @param elementShape shape of the tensor\n * @param elementDtype dtype of the tensor\n * @param elementIndex index of the tensor\n */\n getItem(elementIndex, elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 || elementIndex > this.tensors.length) {\n throw new Error(`Trying to access element ${elementIndex} in a list with ${this.tensors.length} elements.`);\n }\n if (this.tensors[elementIndex] == null) {\n throw new Error(`element at index ${elementIndex} is null.`);\n }\n assertShapesMatchAllowUndefinedSize(this.tensors[elementIndex].shape, elementShape, 'TensorList shape mismatch: ');\n return this.tensors[elementIndex];\n }\n /**\n * Set the tensor at the index\n * @param elementIndex index of the tensor\n * @param tensor the tensor to be inserted into the list\n */\n setItem(elementIndex, tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 ||\n this.maxNumElements !== -1 && elementIndex >= this.maxNumElements) {\n throw new Error(`Trying to set element ${elementIndex} in a list with max ${this.maxNumElements} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n this.tensors[elementIndex] = tensor;\n }\n /**\n * Return selected values in the TensorList as a stacked Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param indices indices of tensors to gather\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n gather(indices, elementDtype, elementShape) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n // When indices is greater than the size of the list, indices beyond the\n // size of the list are ignored.\n indices = indices.slice(0, this.size());\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = indices.map(i => reshape(this.tensors[i], elementShape));\n return stack(tensors, 0);\n });\n }\n /**\n * Return the values in the TensorList as a concatenated Tensor.\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n concat(elementDtype, elementShape) {\n if (!!elementDtype && elementDtype !== this.elementDtype) {\n throw new Error(`TensorList dtype is ${this.elementDtype} but concat requested dtype ${elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = this.tensors.map(t => reshape(t, elementShape));\n return concat(tensors, 0);\n });\n }\n}\n/**\n * Creates a TensorList which, when stacked, has the value of tensor.\n * @param tensor from tensor\n * @param elementShape output tensor element shape\n */\nexport function fromTensor(tensor, elementShape, elementDtype) {\n const dtype = tensor.dtype;\n if (tensor.shape.length < 1) {\n throw new Error(`Tensor must be at least a vector, but saw shape: ${tensor.shape}`);\n }\n if (tensor.dtype !== elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${elementDtype}`);\n }\n const outputShape = tensor.shape.slice(1);\n assertShapesMatchAllowUndefinedSize(outputShape, elementShape, 'TensorList shape mismatch: ');\n const tensorList = unstack(tensor);\n return new TensorList(tensorList, elementShape, dtype);\n}\n/**\n * Return a TensorList of the given size with empty elements.\n * @param elementShape the shape of the future elements of the list\n * @param elementDtype the desired type of elements in the list\n * @param numElements the number of elements to reserve\n */\nexport function reserve(elementShape, elementDtype, numElements) {\n return new TensorList([], elementShape, elementDtype, numElements);\n}\n/**\n * Put tensors at specific indices of a stacked tensor into a TensorList.\n * @param indices list of indices on how to scatter the tensor.\n * @param tensor input tensor.\n * @param elementShape the shape of the future elements of the list\n * @param numElements the number of elements to scatter\n */\nexport function scatter(tensor, indices, elementShape, numElements) {\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (numElements != null && numElements !== -1 && maxIndex >= numElements) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${numElements})`);\n }\n const list = new TensorList([], elementShape, tensor.dtype, numElements);\n const tensors = unstack(tensor, 0);\n indices.forEach((value, index) => {\n list.setItem(value, tensors[index]);\n });\n return list;\n}\n/**\n * Split the values of a Tensor into a TensorList.\n * @param length the lengths to use when splitting value along\n * its first dimension.\n * @param tensor the tensor to split.\n * @param elementShape the shape of the future elements of the list\n */\nexport function split(tensor, length, elementShape) {\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = tidy(() => {\n const tensors = [];\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), elementShape);\n }\n tensor.dispose();\n return tensors;\n });\n const list = new TensorList([], elementShape, tensor.dtype, length.length);\n for (let i = 0; i < tensors.length; i++) {\n list.setItem(i, tensors[i]);\n }\n return list;\n}\n//# sourceMappingURL=tensor_list.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { TensorArray } from '../../executor/tensor_array';\nimport { fromTensor, reserve, scatter, split } from '../../executor/tensor_list';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'If':\n case 'StatelessIf': {\n const thenFunc = getParamValue('thenBranch', node, tensorMap, context);\n const elseFunc = getParamValue('elseBranch', node, tensorMap, context);\n const cond = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n const condValue = await cond.data();\n if (condValue[0]) {\n return context.functionMap[thenFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n else {\n return context.functionMap[elseFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n }\n case 'While':\n case 'StatelessWhile': {\n const bodyFunc = getParamValue('body', node, tensorMap, context);\n const condFunc = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n // Calculate the condition of the loop\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap));\n const argIds = args.map(tensor => tensor.id);\n let condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n let result = args;\n while (condValue[0]) {\n // Record the previous result for intermediate tensor tracking\n const origResult = result;\n // Execution the body of the loop\n result = await context.functionMap[bodyFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap);\n const resultIds = result.map(tensor => tensor.id);\n // Dispose the intermediate tensor for body function that is not global\n // kept, not input/output of the body function\n origResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n // Recalcuate the condition of the loop using the latest results.\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap));\n condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n }\n return result;\n }\n case 'LoopCond': {\n const pred = getParamValue('pred', node, tensorMap, context);\n return [cloneTensor(pred)];\n }\n case 'Switch': {\n const pred = getParamValue('pred', node, tensorMap, context);\n let data = getParamValue('data', node, tensorMap, context);\n if (!data.kept) {\n data = cloneTensor(data);\n }\n // Outputs nodes :0 => false, :1 => true\n return (await pred.data())[0] ? [undefined, data] : [data, undefined];\n }\n case 'Merge': {\n const inputName = node.inputNames.find(name => getTensor(name, tensorMap, context) !== undefined);\n if (inputName) {\n const data = getTensor(inputName, tensorMap, context);\n return [cloneTensor(data)];\n }\n return undefined;\n }\n case 'Enter': {\n const frameId = getParamValue('frameName', node, tensorMap, context);\n const data = getParamValue('tensor', node, tensorMap, context);\n context.enterFrame(frameId);\n return [cloneTensor(data)];\n }\n case 'Exit': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.exitFrame();\n return [cloneTensor(data)];\n }\n case 'NextIteration': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.nextIteration();\n return [cloneTensor(data)];\n }\n case 'TensorArrayV3': {\n const size = getParamValue('size', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const dynamicSize = getParamValue('dynamicSize', node, tensorMap, context);\n const clearAfterRead = getParamValue('clearAfterRead', node, tensorMap, context);\n const identicalElementShapes = getParamValue('identicalElementShapes', node, tensorMap, context);\n const name = getParamValue('name', node, tensorMap, context);\n const tensorArray = new TensorArray(name, dtype, size, elementShape, identicalElementShapes, dynamicSize, clearAfterRead);\n context.addTensorArray(tensorArray);\n return [tensorArray.idTensor, scalar(1.0)];\n }\n case 'TensorArrayWriteV3': {\n const id = getParamValue('tensorArrayId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const writeTensorArray = context.getTensorArray(id.id);\n writeTensorArray.write(index, writeTensor);\n return [writeTensorArray.idTensor];\n }\n case 'TensorArrayReadV3': {\n const readId = getParamValue('tensorArrayId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const readTensorArray = context.getTensorArray(readId.id);\n return [readTensorArray.read(readIndex)];\n }\n case 'TensorArrayGatherV3': {\n const gatherId = getParamValue('tensorArrayId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const gatherDtype = getParamValue('dtype', node, tensorMap, context);\n const gatherTensorArray = context.getTensorArray(gatherId.id);\n return [gatherTensorArray.gather(gatherIndices, gatherDtype)];\n }\n case 'TensorArrayScatterV3': {\n const scatterId = getParamValue('tensorArrayId', node, tensorMap, context);\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const scatterTensorArray = context.getTensorArray(scatterId.id);\n scatterTensorArray.scatter(scatterIndices, scatterTensor);\n return [scatterTensorArray.idTensor];\n }\n case 'TensorArrayConcatV3': {\n const concatId = getParamValue('tensorArrayId', node, tensorMap, context);\n const concatTensorArray = context.getTensorArray(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n return [concatTensorArray.concat(concatDtype)];\n }\n case 'TensorArraySplitV3': {\n const splitId = getParamValue('tensorArrayId', node, tensorMap, context);\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const splitTensorArray = context.getTensorArray(splitId.id);\n splitTensorArray.split(lengths, splitTensor);\n return [splitTensorArray.idTensor];\n }\n case 'TensorArraySizeV3': {\n const sizeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const sizeTensorArray = context.getTensorArray(sizeId.id);\n return [scalar(sizeTensorArray.size(), 'int32')];\n }\n case 'TensorArrayCloseV3': {\n const closeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const closeTensorArray = context.getTensorArray(closeId.id);\n closeTensorArray.clearAndClose();\n return [closeTensorArray.idTensor];\n }\n case 'TensorListSetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.setItem(index, writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListGetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.getItem(readIndex, elementShape, elementDType)];\n }\n case 'TensorListScatterV2':\n case 'TensorListScatter': {\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = scatter(scatterTensor, scatterIndices, elementShape, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListReserve': {\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = reserve(elementShape, elementDtype, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListGather': {\n const gatherId = getParamValue('tensorListId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(gatherId.id);\n return [tensorList.gather(gatherIndices, elementDtype, elementShape)];\n }\n case 'TensorListStack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.stack(elementShape, elementDtype, numElements)];\n }\n case 'TensorListFromTensor': {\n const tensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = fromTensor(tensor, elementShape, elementDtype);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListConcat': {\n const concatId = getParamValue('tensorListId', node, tensorMap, context);\n const tensorList = context.getTensorList(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n return [tensorList.concat(concatDtype, elementShape)];\n }\n case 'TensorListPushBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.pushBack(writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListPopBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.popBack(elementShape, elementDType)];\n }\n case 'TensorListSplit': {\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const tensorList = split(splitTensor, lengths, elementShape);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'control';\n//# sourceMappingURL=control_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getPadding, getParamValue } from './utils';\nfunction fusedConvAndDepthWiseParams(node, tensorMap, context) {\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const isBatchNorm = extraOp === 'fusedbatchnorm';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd and Prelu ' +\n 'must have two extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd must have ' +\n 'one extra argument: bias.');\n }\n }\n if (isBatchNorm) {\n throw new Error('FusedConv2d and DepthwiseConv2d with FusedBatchNorm is not supported.');\n }\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return {\n stride,\n pad,\n dataFormat,\n dilations,\n biasArg,\n preluArg,\n activationFunc\n };\n}\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Conv1D': {\n const stride = getParamValue('stride', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilation = getParamValue('dilation', node, tensorMap, context);\n return [tfOps.conv1d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), stride, pad, dataFormat, dilation)];\n }\n case 'Conv2D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case '_FusedConv2D': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.conv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'FusedDepthwiseConv2dNative': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.depthwiseConv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'Conv2DBackpropInput':\n case 'Conv2dTranspose': {\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n return [tfOps.conv2dTranspose(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), shape, [stride[1], stride[2]], pad)];\n }\n case 'DepthwiseConv2dNative':\n case 'DepthwiseConv2d': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n return [tfOps.depthwiseConv2d(getParamValue('input', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case 'Conv3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv3d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2], stride[3]], pad, dataFormat, [dilations[1], dilations[2], dilations[3]])];\n }\n case 'AvgPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPoolWithArgmax': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n const includeBatchInIndex = getParamValue('includeBatchInIndex', node, tensorMap, context);\n const { result, indexes } = tfOps.maxPoolWithArgmax(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad, includeBatchInIndex);\n return [result, indexes];\n }\n case 'AvgPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'MaxPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'Dilation2D': {\n const strides = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n // strides: [1, stride_height, stride_width, 1].\n const strideHeight = strides[1];\n const strideWidth = strides[2];\n // dilations: [1, dilation_height, dilation_width, 1].\n const dilationHeight = dilations[1];\n const dilationWidth = dilations[2];\n return [tfOps.dilation2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [strideHeight, strideWidth], pad, [dilationHeight, dilationWidth], 'NHWC' /* dataFormat */)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'convolution';\n//# sourceMappingURL=convolution_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Fill': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const value = getParamValue('value', node, tensorMap, context);\n return [tfOps.fill(shape, value, dtype)];\n }\n case 'LinSpace': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const num = getParamValue('num', node, tensorMap, context);\n return [tfOps.linspace(start, stop, num)];\n }\n case 'Multinomial': {\n const logits = getParamValue('logits', node, tensorMap, context);\n const numSamples = getParamValue('numSamples', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.multinomial(logits, numSamples, seed)];\n }\n case 'OneHot': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const depth = getParamValue('depth', node, tensorMap, context);\n const onValue = getParamValue('onValue', node, tensorMap, context);\n const offValue = getParamValue('offValue', node, tensorMap, context);\n return [tfOps.oneHot(indices, depth, onValue, offValue)];\n }\n case 'Ones': {\n return [tfOps.ones(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'OnesLike': {\n return [tfOps.onesLike(getParamValue('x', node, tensorMap, context))];\n }\n case 'RandomUniform': {\n return [tfOps.randomUniform(\n // tslint:disable-next-line:no-any\n getParamValue('shape', node, tensorMap, context), getParamValue('minval', node, tensorMap, context), getParamValue('maxval', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'Range': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const step = getParamValue('step', node, tensorMap, context);\n return [tfOps.range(start, stop, step, getParamValue('dtype', node, tensorMap, context))];\n }\n case 'TruncatedNormal': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const mean = getParamValue('mean', node, tensorMap, context);\n const stdDev = getParamValue('stdDev', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.truncatedNormal(shape, mean, stdDev, getParamValue('dtype', node, tensorMap, context), seed)];\n }\n case 'Zeros': {\n return [tfOps.zeros(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ZerosLike': {\n return [tfOps.zerosLike(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'creation';\n//# sourceMappingURL=creation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nfunction nmsParams(node, tensorMap, context) {\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const scores = getParamValue('scores', node, tensorMap, context);\n const maxOutputSize = getParamValue('maxOutputSize', node, tensorMap, context);\n const iouThreshold = getParamValue('iouThreshold', node, tensorMap, context);\n const scoreThreshold = getParamValue('scoreThreshold', node, tensorMap, context);\n const softNmsSigma = getParamValue('softNmsSigma', node, tensorMap, context);\n return {\n boxes,\n scores,\n maxOutputSize,\n iouThreshold,\n scoreThreshold,\n softNmsSigma\n };\n}\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'NonMaxSuppressionV5': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = nmsParams(node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionWithScoreAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n return [result.selectedIndices, result.selectedScores];\n }\n case 'NonMaxSuppressionV4': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n const padToMaxOutputSize = getParamValue('padToMaxOutputSize', node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionPaddedAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [result.selectedIndices, result.validOutputs];\n }\n case 'NonMaxSuppressionV3':\n case 'NonMaxSuppressionV2': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n return [await tfOps.image.nonMaxSuppressionAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold)];\n }\n case 'Where': {\n const condition = tfOps.cast(getParamValue('condition', node, tensorMap, context), 'bool');\n const result = [await tfOps.whereAsync(condition)];\n condition.dispose();\n return result;\n }\n case 'ListDiff': {\n return tfOps.setdiff1dAsync(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context));\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'dynamic';\n//# sourceMappingURL=dynamic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'TopKV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const k = getParamValue('k', node, tensorMap, context);\n const sorted = getParamValue('sorted', node, tensorMap, context);\n const result = tfOps.topk(x, k, sorted);\n return [result.values, result.indices];\n }\n case 'Unique': {\n const x = getParamValue('x', node, tensorMap, context);\n const result = tfOps.unique(x);\n return [result.values, result.indices];\n }\n case 'UniqueV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n const result = tfOps.unique(x, axis);\n return [result.values, result.indices];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'evaluation';\n//# sourceMappingURL=evaluation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Const': {\n return tensorMap[node.name];\n }\n case 'PlaceholderWithDefault':\n const def = getParamValue('default', node, tensorMap, context);\n return [getTensor(node.name, tensorMap, context) || def];\n case 'Placeholder':\n return [getTensor(node.name, tensorMap, context)];\n case 'Identity':\n case 'StopGradient':\n case 'FakeQuantWithMinMaxVars': { // This op is currently ignored.\n const data = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(data)];\n }\n case 'IdentityN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => cloneTensor(t));\n case 'Snapshot':\n const snapshot = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(snapshot)];\n case 'Shape':\n return [tfOps.tensor1d(getParamValue('x', node, tensorMap, context).shape, 'int32')];\n case 'ShapeN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => tfOps.tensor1d(t.shape));\n case 'Size':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).size, 'int32')];\n case 'Rank':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).rank, 'int32')];\n case 'NoOp':\n return [tfOps.scalar(1)];\n case 'Print':\n const input = getParamValue('x', node, tensorMap, context);\n const data = getParamValue('data', node, tensorMap, context);\n const message = getParamValue('message', node, tensorMap, context);\n const summarize = getParamValue('summarize', node, tensorMap, context);\n console.warn('The graph has a tf.print() operation,' +\n 'usually used for debugging, which slows down performance.');\n console.log(message);\n for (let i = 0; i < data.length; i++) {\n console.log(Array.prototype.slice.call(data[i].dataSync())\n .slice(0, summarize));\n }\n return [input];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'graph';\n//# sourceMappingURL=graph_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { keep, scalar, stack, tidy, unstack, util } from '@tensorflow/tfjs-core';\n/**\n * Hashtable contains a set of tensors, which can be accessed by key.\n */\nexport class HashTable {\n /**\n * Constructor of HashTable. Creates a hash table.\n *\n * @param keyDType `dtype` of the table keys.\n * @param valueDType `dtype` of the table values.\n */\n constructor(keyDType, valueDType) {\n this.keyDType = keyDType;\n this.valueDType = valueDType;\n this.handle = scalar(0);\n // tslint:disable-next-line: no-any\n this.tensorMap = new Map();\n keep(this.handle);\n }\n get id() {\n return this.handle.id;\n }\n /**\n * Dispose the tensors and handle and clear the hashtable.\n */\n clearAndClose() {\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n this.handle.dispose();\n }\n /**\n * The number of items in the hash table.\n */\n size() {\n return this.tensorMap.size;\n }\n /**\n * Replaces the contents of the table with the specified keys and values.\n * @param keys Keys to store in the hashtable.\n * @param values Values to store in the hashtable.\n */\n async import(keys, values) {\n this.checkKeyAndValueTensor(keys, values);\n // We only store the primitive values of the keys, this allows lookup\n // to be O(1).\n const $keys = await keys.data();\n // Clear the hashTable before inserting new values.\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n return tidy(() => {\n const $values = unstack(values);\n const keysLength = $keys.length;\n const valuesLength = $values.length;\n util.assert(keysLength === valuesLength, () => `The number of elements doesn't match, keys has ` +\n `${keysLength} elements, the values has ${valuesLength} ` +\n `elements.`);\n for (let i = 0; i < keysLength; i++) {\n const key = $keys[i];\n const value = $values[i];\n keep(value);\n this.tensorMap.set(key, value);\n }\n return this.handle;\n });\n }\n /**\n * Looks up keys in a hash table, outputs the corresponding values.\n *\n * Performs batch lookups, for every element in the key tensor, `find`\n * stacks the corresponding value into the return tensor.\n *\n * If an element is not present in the table, the given `defaultValue` is\n * used.\n *\n * @param keys Keys to look up. Must have the same type as the keys of the\n * table.\n * @param defaultValue The scalar `defaultValue` is the value output for keys\n * not present in the table. It must also be of the same type as the\n * table values.\n */\n async find(keys, defaultValue) {\n this.checkKeyAndValueTensor(keys, defaultValue);\n const $keys = await keys.data();\n return tidy(() => {\n const result = [];\n for (let i = 0; i < $keys.length; i++) {\n const key = $keys[i];\n const value = this.findWithDefault(key, defaultValue);\n result.push(value);\n }\n return stack(result);\n });\n }\n // tslint:disable-next-line: no-any\n findWithDefault(key, defaultValue) {\n const result = this.tensorMap.get(key);\n return result != null ? result : defaultValue;\n }\n checkKeyAndValueTensor(key, value) {\n if (key.dtype !== this.keyDType) {\n throw new Error(`Expect key dtype ${this.keyDType}, but got ` +\n `${key.dtype}`);\n }\n if (value.dtype !== this.valueDType) {\n throw new Error(`Expect value dtype ${this.valueDType}, but got ` +\n `${value.dtype}`);\n }\n }\n}\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { HashTable } from '../../executor/hash_table';\nimport { getParamValue } from './utils';\nexport const executeOp = async (node, tensorMap, context, resourceManager) => {\n switch (node.op) {\n case 'HashTable':\n case 'HashTableV2': {\n const keyDType = getParamValue('keyDType', node, tensorMap, context);\n const valueDType = getParamValue('valueDType', node, tensorMap, context);\n const hashTable = new HashTable(keyDType, valueDType);\n resourceManager.addHashTable(node.name, hashTable);\n return [hashTable.handle];\n }\n case 'LookupTableImport':\n case 'LookupTableImportV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.import(keys, values)];\n }\n case 'LookupTableFind':\n case 'LookupTableFindV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.find(keys, defaultValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'hash_table';\n//# sourceMappingURL=hash_table_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ResizeBilinear': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeBilinear(images, [size[0], size[1]], alignCorners)];\n }\n case 'ResizeNearestNeighbor': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeNearestNeighbor(images, [size[0], size[1]], alignCorners)];\n }\n case 'CropAndResize': {\n const image = getParamValue('image', node, tensorMap, context);\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const boxInd = getParamValue('boxInd', node, tensorMap, context);\n const cropSize = getParamValue('cropSize', node, tensorMap, context);\n const method = getParamValue('method', node, tensorMap, context);\n const extrapolationValue = getParamValue('extrapolationValue', node, tensorMap, context);\n return [tfOps.image.cropAndResize(image, boxes, boxInd, cropSize, method, extrapolationValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'image';\n//# sourceMappingURL=image_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Equal': {\n return [tfOps.equal(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'NotEqual': {\n return [tfOps.notEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Greater': {\n return [tfOps.greater(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'GreaterEqual': {\n return [tfOps.greaterEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Less': {\n return [tfOps.less(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LessEqual': {\n return [tfOps.lessEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalAnd': {\n return [tfOps.logicalAnd(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalNot': {\n return [tfOps.logicalNot(getParamValue('a', node, tensorMap, context))];\n }\n case 'LogicalOr': {\n return [tfOps.logicalOr(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Select':\n case 'SelectV2': {\n return [tfOps.where(getParamValue('condition', node, tensorMap, context), getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'logical';\n//# sourceMappingURL=logical_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BatchMatMul':\n case 'BatchMatMulV2':\n case 'MatMul':\n return [tfOps.matMul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context), getParamValue('transposeA', node, tensorMap, context), getParamValue('transposeB', node, tensorMap, context))];\n case 'Transpose':\n return [tfOps.transpose(getParamValue('x', node, tensorMap, context), getParamValue('perm', node, tensorMap, context))];\n case '_FusedMatMul':\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('Fused MatMul with BiasAdd and Prelu must have two ' +\n 'extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('Fused MatMul with BiasAdd must have one extra argument: bias.');\n }\n }\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return [tfOps.fused.matMul({\n a: getParamValue('a', node, tensorMap, context),\n b: getParamValue('b', node, tensorMap, context),\n transposeA: getParamValue('transposeA', node, tensorMap, context),\n transposeB: getParamValue('transposeB', node, tensorMap, context),\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'matrices';\n//# sourceMappingURL=matrices_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FusedBatchNorm':\n case 'FusedBatchNormV2': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'FusedBatchNormV3': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'LRN': {\n return [tfOps.localResponseNormalization(getParamValue('x', node, tensorMap, context), getParamValue('radius', node, tensorMap, context), getParamValue('bias', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context), getParamValue('beta', node, tensorMap, context))];\n }\n case 'Softmax': {\n return [tfOps.softmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'LogSoftmax': {\n return [tfOps.logSoftmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'SparseToDense': {\n return [tfOps.sparseToDense(getParamValue('sparseIndices', node, tensorMap, context), getParamValue('outputShape', node, tensorMap, context), getParamValue('sparseValues', node, tensorMap, context), getParamValue('defaultValue', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'normalization';\n//# sourceMappingURL=normalization_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Max': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.max(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Mean': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.mean(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Min': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.min(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Sum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.sum(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'All': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.all(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Any': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.any(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'ArgMax': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMax(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'ArgMin': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMin(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Prod': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Cumsum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const exclusive = getParamValue('exclusive', node, tensorMap, context);\n const reverse = getParamValue('reverse', node, tensorMap, context);\n return [tfOps.cumsum(getParamValue('x', node, tensorMap, context), axis, exclusive, reverse)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'reduction';\n//# sourceMappingURL=reduction_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ConcatV2':\n case 'Concat': {\n const n = getParamValue('n', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n let inputs = getParamValue('tensors', node, tensorMap, context);\n inputs = inputs.slice(0, n);\n return [tfOps.concat(inputs, axis)];\n }\n case 'GatherV2':\n case 'Gather': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gather(input, tfOps.cast(indices, 'int32'), axis)];\n }\n case 'ReverseV2':\n case 'Reverse': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n return [tfOps.reverse(input, axis)];\n }\n case 'Slice': {\n // tslint:disable-next-line:no-any\n const begin = getParamValue('begin', node, tensorMap, context);\n // tslint:disable-next-line:no-any\n const size = getParamValue('size', node, tensorMap, context);\n return [tfOps.slice(getParamValue('x', node, tensorMap, context), begin, size)];\n }\n case 'StridedSlice': {\n const begin = getParamValue('begin', node, tensorMap, context);\n const end = getParamValue('end', node, tensorMap, context);\n const strides = getParamValue('strides', node, tensorMap, context);\n const beginMask = getParamValue('beginMask', node, tensorMap, context);\n const endMask = getParamValue('endMask', node, tensorMap, context);\n const ellipsisMask = getParamValue('ellipsisMask', node, tensorMap, context);\n const newAxisMask = getParamValue('newAxisMask', node, tensorMap, context);\n const shrinkAxisMask = getParamValue('shrinkAxisMask', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return [tfOps.stridedSlice(tensor, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask)];\n }\n case 'Pack': {\n return tidy(() => {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensors = getParamValue('tensors', node, tensorMap, context);\n // Reshape the tensors to the first tensor's shape if they don't\n // match.\n const shape = tensors[0].shape;\n const squeezedShape = tfOps.squeeze(tensors[0]).shape;\n const mapped = tensors.map(tensor => {\n const sameShape = util.arraysEqual(tensor.shape, shape);\n if (!sameShape &&\n !util.arraysEqual(tfOps.squeeze(tensor).shape, squeezedShape)) {\n throw new Error('the input tensors shape does not match');\n }\n return sameShape ? tensor : tfOps.reshape(tensor, shape);\n });\n return [tfOps.stack(mapped, axis)];\n });\n }\n case 'Unpack': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensor = getParamValue('tensor', node, tensorMap, context);\n return tfOps.unstack(tensor, axis);\n }\n case 'Tile': {\n const reps = getParamValue('reps', node, tensorMap, context);\n return [tfOps.tile(getParamValue('x', node, tensorMap, context), reps)];\n }\n case 'Split':\n case 'SplitV': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const numOrSizeSplits = getParamValue('numOrSizeSplits', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return tfOps.split(tensor, numOrSizeSplits, axis);\n }\n case 'ScatterNd': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const shape = getParamValue('shape', node, tensorMap, context);\n return [tfOps.scatterND(indices, values, shape)];\n }\n case 'GatherNd': {\n const x = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gatherND(x, indices)];\n }\n case 'SparseToDense': {\n const indices = getParamValue('sparseIndices', node, tensorMap, context);\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const sparseValues = getParamValue('sparseValues', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n return [tfOps.sparseToDense(indices, sparseValues, shape, sparseValues.dtype === defaultValue.dtype ?\n defaultValue :\n tfOps.cast(defaultValue, sparseValues.dtype))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'slice_join';\n//# sourceMappingURL=slice_join_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FFT': {\n return [tfOps.fft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IFFT': {\n return [tfOps.ifft(getParamValue('x', node, tensorMap, context))];\n }\n case 'RFFT': {\n return [tfOps.rfft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IRFFT': {\n return [tfOps.irfft(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'spectral';\n//# sourceMappingURL=spectral_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Cast': {\n return [tfOps.cast(getParamValue('x', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ExpandDims': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.expandDims(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Squeeze': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.squeeze(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Reshape': {\n return [tfOps.reshape(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n case 'MirrorPad': {\n return [tfOps.mirrorPad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('mode', node, tensorMap, context))];\n }\n case 'PadV2':\n case 'Pad': {\n return [tfOps.pad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('constantValue', node, tensorMap, context))];\n }\n case 'SpaceToBatchND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const paddings = getParamValue('paddings', node, tensorMap, context);\n return [tfOps.spaceToBatchND(getParamValue('x', node, tensorMap, context), blockShape, paddings)];\n }\n case 'BatchToSpaceND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const crops = getParamValue('crops', node, tensorMap, context);\n return [tfOps.batchToSpaceND(getParamValue('x', node, tensorMap, context), blockShape, crops)];\n }\n case 'DepthToSpace': {\n const blockSize = getParamValue('blockSize', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context).toUpperCase();\n return [tfOps.depthToSpace(getParamValue('x', node, tensorMap, context), blockSize, dataFormat)];\n }\n case 'BroadcastTo': {\n return [tfOps.broadcastTo(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'transformation';\n//# sourceMappingURL=transformation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { NodeValueImpl } from './custom_op/node_value_impl';\nimport { getRegisteredOp } from './custom_op/register';\nimport * as arithmetic from './executors/arithmetic_executor';\nimport * as basicMath from './executors/basic_math_executor';\nimport * as control from './executors/control_executor';\nimport * as convolution from './executors/convolution_executor';\nimport * as creation from './executors/creation_executor';\nimport * as dynamic from './executors/dynamic_executor';\nimport * as evaluation from './executors/evaluation_executor';\nimport * as graph from './executors/graph_executor';\nimport * as hashTable from './executors/hash_table_executor';\nimport * as image from './executors/image_executor';\nimport * as logical from './executors/logical_executor';\nimport * as matrices from './executors/matrices_executor';\nimport * as normalization from './executors/normalization_executor';\nimport * as reduction from './executors/reduction_executor';\nimport * as sliceJoin from './executors/slice_join_executor';\nimport * as spectral from './executors/spectral_executor';\nimport * as transformation from './executors/transformation_executor';\n/**\n * Executes the op defined by the node object.\n * @param node\n * @param tensorMap contains tensors for executed nodes and weights\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function executeOp(node, tensorMap, context, resourceManager) {\n const value = ((node, tensorMap, context) => {\n switch (node.category) {\n case 'arithmetic':\n return tfc.tidy(() => arithmetic.executeOp(node, tensorMap, context));\n case 'basic_math':\n return tfc.tidy(() => basicMath.executeOp(node, tensorMap, context));\n case 'control':\n return control.executeOp(node, tensorMap, context);\n case 'convolution':\n return tfc.tidy(() => convolution.executeOp(node, tensorMap, context));\n case 'creation':\n return tfc.tidy(() => creation.executeOp(node, tensorMap, context));\n case 'dynamic':\n return dynamic.executeOp(node, tensorMap, context);\n case 'evaluation':\n return tfc.tidy(() => evaluation.executeOp(node, tensorMap, context));\n case 'image':\n return tfc.tidy(() => image.executeOp(node, tensorMap, context));\n case 'graph':\n return tfc.tidy(() => graph.executeOp(node, tensorMap, context));\n case 'logical':\n return tfc.tidy(() => logical.executeOp(node, tensorMap, context));\n case 'matrices':\n return tfc.tidy(() => matrices.executeOp(node, tensorMap, context));\n case 'normalization':\n return tfc.tidy(() => normalization.executeOp(node, tensorMap, context));\n case 'reduction':\n return tfc.tidy(() => reduction.executeOp(node, tensorMap, context));\n case 'slice_join':\n return tfc.tidy(() => sliceJoin.executeOp(node, tensorMap, context));\n case 'spectral':\n return tfc.tidy(() => spectral.executeOp(node, tensorMap, context));\n case 'transformation':\n return tfc.tidy(() => transformation.executeOp(node, tensorMap, context));\n case 'hash_table':\n return hashTable.executeOp(node, tensorMap, context, resourceManager);\n case 'custom':\n const opMapper = getRegisteredOp(node.op);\n if (opMapper && opMapper.customExecutor) {\n return opMapper.customExecutor(new NodeValueImpl(node, tensorMap, context));\n }\n else {\n throw TypeError(`Custom op ${node.op} is not registered.`);\n }\n default:\n throw TypeError(`Unknown op '${node.op}'. File an issue at ` +\n `https://github.com/tensorflow/tfjs/issues so we can add it` +\n `, or register a custom execution with tf.registerOp()`);\n }\n })(node, tensorMap, context);\n if (tfc.util.isPromise(value)) {\n return value.then((data) => [].concat(data));\n }\n return [].concat(value);\n}\n//# sourceMappingURL=operation_executor.js.map", "/**\n * ExecutionContext captures the runtime environment of the node. It keeps\n * track of the current frame and iteration for the control flow ops.\n *\n * For example, typical Dynamic RNN model may contain loops, for which\n * TensorFlow will generate graphs with Enter/Exit nodes to control the\n * current execution frame, and NextIteration Nodes for iteration id increment.\n * For model with branch logic, TensorFLow will generate Switch/Merge ops.\n */\nexport class ExecutionContext {\n constructor(weightMap = {}, tensorArrayMap = {}, tensorListMap = {}, functionMap = {}) {\n this.weightMap = weightMap;\n this.tensorArrayMap = tensorArrayMap;\n this.tensorListMap = tensorListMap;\n this.functionMap = functionMap;\n this.rootContext = { id: 0, frameName: '', iterationId: 0 };\n this.contexts = [this.rootContext];\n this.lastId = 0;\n this.generateCurrentContextIds();\n }\n newFrame(id, frameName) {\n return { id, frameName, iterationId: 0 };\n }\n /**\n * Set the current context\n * @param contexts: ExecutionContextInfo[] the current path of execution\n * frames\n */\n set currentContext(contexts) {\n if (this.contexts !== contexts) {\n this.contexts = contexts;\n this.generateCurrentContextIds();\n }\n }\n get currentContext() {\n return this.contexts;\n }\n /**\n * Returns the current context in string format.\n */\n get currentContextId() {\n return this._currentContextIds[0];\n }\n /**\n * Returns the current context and all parent contexts in string format.\n * This allow access to the nodes in the current and parent frames.\n */\n get currentContextIds() {\n return this._currentContextIds;\n }\n generateCurrentContextIds() {\n const names = [];\n for (let i = 0; i < this.contexts.length - 1; i++) {\n const contexts = this.contexts.slice(0, this.contexts.length - i);\n names.push(this.contextIdforContexts(contexts));\n }\n names.push('');\n this._currentContextIds = names;\n }\n contextIdforContexts(contexts) {\n return contexts ?\n contexts\n .map(context => (context.id === 0 && context.iterationId === 0) ?\n '' :\n `${context.frameName}-${context.iterationId}`)\n .join('/') :\n '';\n }\n /**\n * Enter a new frame, a new context is pushed on the current context list.\n * @param frameId new frame id\n */\n enterFrame(frameId) {\n if (this.contexts) {\n this.lastId++;\n this.contexts = this.contexts.slice();\n this.contexts.push(this.newFrame(this.lastId, frameId));\n this._currentContextIds.unshift(this.contextIdforContexts(this.contexts));\n }\n }\n /**\n * Exit the current frame, the last context is removed from the current\n * context list.\n */\n exitFrame() {\n if (this.contexts && this.contexts.length > 1) {\n this.contexts = this.contexts.slice();\n this.contexts.splice(-1);\n this.currentContextIds.shift();\n }\n else {\n throw new Error('Cannot exit frame, the context is empty');\n }\n }\n /**\n * Enter the next iteration of a loop, the iteration id of last context is\n * increased.\n */\n nextIteration() {\n if (this.contexts && this.contexts.length > 0) {\n this.contexts = this.contexts.slice();\n this.lastId++;\n const context = Object.assign({}, this.contexts[this.contexts.length - 1]);\n context.iterationId += 1;\n context.id = this.lastId;\n this.contexts.splice(-1, 1, context);\n this._currentContextIds.splice(0, 1, this.contextIdforContexts(this.contexts));\n }\n else {\n throw new Error('Cannot increase frame iteration, the context is empty');\n }\n }\n getWeight(name) {\n return this.weightMap[name];\n }\n addTensorArray(tensorArray) {\n this.tensorArrayMap[tensorArray.id] = tensorArray;\n }\n getTensorArray(id) {\n return this.tensorArrayMap[id];\n }\n addTensorList(tensorList) {\n this.tensorListMap[tensorList.id] = tensorList;\n }\n getTensorList(id) {\n return this.tensorListMap[id];\n }\n dispose(keepIds) {\n for (const key in this.tensorArrayMap) {\n this.tensorArrayMap[key].clearAndClose(keepIds);\n }\n for (const key in this.tensorListMap) {\n this.tensorListMap[key].clearAndClose(keepIds);\n }\n }\n}\n//# sourceMappingURL=execution_context.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { parseNodeName } from '../operations/executors/utils';\n/**\n * Given graph inputs and desired outputs, find the minimal set of nodes\n * to execute in order to compute the outputs. In addition return other useful\n * info such:\n * - Missing inputs needed to compute the output.\n * - Whether the subgraph contains dynamic ops (control flow, dynamic shape).\n * - Alternative inputs in order to avoid async (dynamic op) execution.\n */\nexport function getExecutionSubgraph(inputs, outputs, weightMap, initNodes) {\n const usedNodes = new Set();\n const missingInputs = [];\n let dynamicNode = null;\n let syncInputs = null;\n // Start with the outputs, going backwards and find all the nodes that are\n // needed to compute those outputs.\n const seen = new Set();\n const inputNodeNames = Object.keys(inputs).map(name => parseNodeName(name)[0]);\n let initNodeNames = [];\n if (initNodes != null) {\n initNodeNames = initNodes.map(node => parseNodeName(node.name)[0]);\n }\n const frontier = [...outputs];\n while (frontier.length > 0) {\n const node = frontier.pop();\n if (isControlFlow(node) || isDynamicShape(node) || isHashTable(node)) {\n if (dynamicNode == null) {\n dynamicNode = node;\n syncInputs = dynamicNode.children.map(child => child.name)\n .filter(name => usedNodes.has(name));\n }\n }\n usedNodes.add(node.name);\n // Weights are dead end since we already have their values.\n if (weightMap[node.name] != null) {\n continue;\n }\n // This node is a dead end since it's one of the user-provided inputs.\n if (inputNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n // This node is a dead end since it doesn't have any inputs.\n if (initNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n if (node.inputs.length === 0) {\n missingInputs.push(node.name);\n continue;\n }\n node.inputs.forEach(input => {\n // Don't add to the frontier if it is already there.\n if (seen.has(input.name)) {\n return;\n }\n seen.add(input.name);\n frontier.push(input);\n });\n }\n return { inputs, outputs, usedNodes, missingInputs, dynamicNode, syncInputs };\n}\n/**\n * Given the execution info, return a list of nodes in topological order that\n * need to be executed to compute the output.\n */\nexport function getNodesInTopologicalOrder(graph, weightMap, executionInfo) {\n const { usedNodes, inputs } = executionInfo;\n const frontier = [];\n const inputNodes = Object.keys(inputs)\n .map(name => parseNodeName(name)[0])\n .map(name => graph.nodes[name]);\n const initNodes = graph.initNodes;\n inputNodes.forEach(input => {\n if (usedNodes.has(input.name)) {\n frontier.push(input);\n }\n });\n graph.weights.forEach(weight => {\n if (usedNodes.has(weight.name)) {\n frontier.push(weight);\n }\n });\n if (initNodes != null) {\n initNodes.forEach(node => {\n if (usedNodes.has(node.name)) {\n frontier.push(node);\n }\n });\n }\n const seen = new Set();\n const orderedNodes = [];\n while (frontier.length > 0) {\n const node = frontier.pop();\n seen.add(node.name);\n if (!weightMap[node.name]) {\n orderedNodes.push(node);\n }\n node.children.forEach(child => {\n if (!seen.has(child.name) && usedNodes.has(child.name) &&\n child.inputs.every(input => seen.has(input.name))) {\n frontier.push(child);\n }\n });\n }\n return orderedNodes;\n}\nconst CONTROL_FLOW_OPS = [\n 'Switch', 'Merge', 'Enter', 'Exit', 'NextIteration', 'StatelessIf',\n 'StatelessWhile', 'if', 'While'\n];\nconst DYNAMIC_SHAPE_OPS = [\n 'NonMaxSuppressionV2', 'NonMaxSuppressionV3', 'NonMaxSuppressionV5', 'Where'\n];\nconst HASH_TABLE_OPS = [\n 'HashTable', 'HashTableV2', 'LookupTableImport', 'LookupTableImportV2',\n 'LookupTableFind', 'LookupTableFindV2'\n];\nexport function isControlFlow(node) {\n return CONTROL_FLOW_OPS.indexOf(node.op) >= 0;\n}\nexport function isDynamicShape(node) {\n return DYNAMIC_SHAPE_OPS.indexOf(node.op) >= 0;\n}\nexport function isHashTable(node) {\n return HASH_TABLE_OPS.indexOf(node.op) >= 0;\n}\n//# sourceMappingURL=model_analysis.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { getNodeNameAndIndex, getParamValue, getTensor, getTensorsForCurrentContenxt, parseNodeName } from '../operations/executors/utils';\nimport { executeOp } from '../operations/operation_executor';\nimport { ExecutionContext } from './execution_context';\nimport { getExecutionSubgraph, getNodesInTopologicalOrder, isControlFlow } from './model_analysis';\nexport class GraphExecutor {\n /**\n *\n * @param graph Graph the model or function graph to be executed.\n * @param parent When building function exector you need to set the parent\n * executor. Since the weights and function executor maps are set at parant\n * level, that function executor can access the function maps and weight maps\n * through the parent.\n */\n constructor(graph, parent) {\n this.graph = graph;\n this.parent = parent;\n this.compiledMap = new Map();\n this._weightMap = {};\n this.SEPERATOR = ',';\n this._functions = {};\n this._functionExecutorMap = {};\n this._outputs = graph.outputs;\n this._inputs = graph.inputs;\n this._initNodes = graph.initNodes;\n this._signature = graph.signature;\n this._functions = graph.functions;\n // create sub-graph executors\n if (graph.functions != null) {\n Object.keys(graph.functions).forEach(name => {\n this._functionExecutorMap[name] =\n new GraphExecutor(graph.functions[name], this);\n });\n }\n }\n get weightIds() {\n return this.parent ? this.parent.weightIds : this._weightIds;\n }\n get functionExecutorMap() {\n return this.parent ? this.parent.functionExecutorMap :\n this._functionExecutorMap;\n }\n get weightMap() {\n return this.parent ? this.parent.weightMap : this._weightMap;\n }\n set weightMap(weightMap) {\n const weightIds = Object.keys(weightMap).map(key => weightMap[key].map(tensor => tensor.id));\n this._weightIds = [].concat(...weightIds);\n this._weightMap = weightMap;\n }\n /**\n * Set `ResourceManager` shared by executors of a model.\n * @param resourceManager: `ResourceManager` of the `GraphModel`.\n */\n set resourceManager(resourceManager) {\n this._resourceManager = resourceManager;\n }\n get inputs() {\n return this._inputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get outputs() {\n return this._outputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get inputNodes() {\n return this._inputs.map(node => node.signatureKey || node.name);\n }\n get outputNodes() {\n return this._outputs.map((node) => {\n const name = node.signatureKey || node.name;\n return node.defaultOutput ? (`${name}:${node.defaultOutput}`) : name;\n });\n }\n get functions() {\n return Object.keys(this._functions).reduce((map, key) => {\n map[key] = this._functions[key].signature;\n return map;\n }, {});\n }\n getCompilationKey(inputs, outputs) {\n const sortedInputs = inputs.map(node => node.name).sort();\n const sortedOutputs = outputs.map(node => node.name).sort();\n return sortedInputs.join(this.SEPERATOR) + '--' +\n sortedOutputs.join(this.SEPERATOR);\n }\n /**\n * Compiles the inference graph and returns the minimal set of nodes that are\n * required for execution, in the correct execution order.\n */\n compile(inputs, outputs) {\n const executionInfo = getExecutionSubgraph(inputs, outputs, this.weightMap, this._initNodes);\n const { missingInputs, dynamicNode, syncInputs } = executionInfo;\n if (dynamicNode != null) {\n throw new Error(`This execution contains the node '${dynamicNode.name}', which has ` +\n `the dynamic op '${dynamicNode.op}'. Please use ` +\n `model.executeAsync() instead. Alternatively, to avoid the ` +\n `dynamic ops, specify the inputs [${syncInputs}]`);\n }\n if (missingInputs.length > 0) {\n const outNames = outputs.map(n => n.name);\n const inNames = Object.keys(inputs);\n throw new Error(`Cannot compute the outputs [${outNames}] from the provided inputs ` +\n `[${inNames}]. Missing the following inputs: [${missingInputs}]`);\n }\n return getNodesInTopologicalOrder(this.graph, this.weightMap, executionInfo);\n }\n /**\n * Executes the inference for given input tensors.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model, if\n * no outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n */\n execute(inputs, outputs) {\n inputs = this.mapInputs(inputs);\n const names = Object.keys(inputs).sort();\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputs.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const compilationKey = this.getCompilationKey(inputNodes, outputNodes);\n // Do nothing if the compiled graph cache contains the input.\n let orderedNodes = this.compiledMap.get(compilationKey);\n if (orderedNodes == null) {\n orderedNodes = this.compile(inputs, outputNodes);\n this.compiledMap.set(compilationKey, orderedNodes);\n }\n const tensorArrayMap = {};\n const tensorListMap = {};\n return tidy(() => {\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const intermediateTensorConsumerCount = {};\n for (let i = 0; i < orderedNodes.length; i++) {\n const node = orderedNodes[i];\n if (!tensorsMap[node.name]) {\n const tensors = executeOp(node, tensorsMap, context, this._resourceManager);\n if (util.isPromise(tensors)) {\n throw new Error(`The execution of the op '${node.op}' returned a promise. ` +\n `Please use model.executeAsync() instead.`);\n }\n tensorsMap[node.name] = tensors;\n this.checkTensorForDisposal(node.name, node, tensorsMap, context, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount);\n }\n }\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(tensorsToKeep);\n }\n return outputs.map(name => getTensor(name, tensorsMap, context));\n });\n }\n getFrozenTensorIds(tensorMap) {\n const ids = [].concat.apply([], Object.keys(tensorMap)\n .map(key => tensorMap[key])\n .map(tensors => tensors.map(tensor => tensor.id)));\n return new Set(ids);\n }\n checkTensorForDisposal(nodeName, node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount) {\n // Skip output nodes and any control flow nodes, since its dependency is\n // tricky to track correctly.\n if (node.category === 'control' || outputNames.indexOf(nodeName) !== -1) {\n return;\n }\n tensorMap[nodeName].forEach(tensor => {\n if (tensor != null) {\n intermediateTensorConsumerCount[tensor.id] =\n (intermediateTensorConsumerCount[tensor.id] || 0) +\n node.children.length;\n }\n });\n node.inputs.forEach(input => {\n // Skip any control flow nodes, since its dependency is tricky to track\n // correctly.\n if (input.category !== 'control') {\n const tensors = getTensorsForCurrentContenxt(input.name, tensorMap, context);\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (tensor && !tensorsToKeep.has(tensor.id)) {\n const count = intermediateTensorConsumerCount[tensor.id];\n if (count === 1) {\n tensor.dispose();\n delete intermediateTensorConsumerCount[tensor.id];\n }\n else if (count != null) {\n // only intermediate nodes has count set, inputs and weights are\n // not.\n intermediateTensorConsumerCount[tensor.id]--;\n }\n }\n });\n }\n }\n });\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n */\n async executeAsync(inputs, outputs) {\n return this._executeAsync(inputs, outputs);\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Optional. Flag for executing a function.\n * @param tensorArrayMap Optional, global TensorArray map by id. Used for\n * function execution.\n * @param tensorArrayMap Optinal global TensorList map by id. Used for\n * function execution.\n */\n async _executeAsync(inputs, outputs, isFunctionExecution = false, tensorArrayMap = {}, tensorListMap = {}) {\n if (!isFunctionExecution) {\n inputs = this.mapInputs(inputs);\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n }\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n // Graph with control flow op requires runtime evaluation of the execution\n // order, while without control flow the execution order is pre-determined\n // in the compile method.\n const tensorMap = await this.executeWithControlFlow(inputs, context, outputs, isFunctionExecution);\n const results = outputs.map(name => getTensor(name, tensorMap, context));\n // dispose all the intermediate tensors\n const outputIds = results.map(t => t.id);\n const inputIds = Object.keys(inputs).map(name => inputs[name].id);\n const keepIds = new Set([...outputIds, ...inputIds, ...this.weightIds]);\n Object.keys(tensorMap).forEach(key => {\n const tensorArray = tensorMap[key];\n tensorArray.forEach(tensor => {\n if (tensor && !tensor.isDisposed && !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n });\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(keepIds);\n }\n return results;\n }\n async executeFunctionAsync(inputs, tensorArrayMap, tensorListMap) {\n const mappedInputs = inputs.reduce((map, tensor, index) => {\n map[this.inputs[index].name] = tensor;\n return map;\n }, {});\n return this._executeAsync(mappedInputs, this.outputNodes, true, tensorArrayMap, tensorListMap);\n }\n /**\n * When there are control flow nodes in the graph, the graph execution use\n * ExecutionContext to keep track of the frames and loop iterators.\n * @param inputs placeholder tensors for the graph.\n * @param context the execution context object for current execution.\n * @param outputNames Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Flag for executing a function.\n */\n async executeWithControlFlow(inputs, context, outputNames, isFunctionExecution) {\n const names = Object.keys(inputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputNames.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const { usedNodes, missingInputs, dynamicNode, syncInputs } = getExecutionSubgraph(inputs, outputNodes, this.weightMap, this._initNodes);\n // First nodes to execute include inputNodes, weights, and initNodes.\n const stack = [\n ...inputNodes, ...this.graph.weights, ...(this._initNodes || [])\n ].map(node => {\n return { node, contexts: context.currentContext };\n });\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const intermediateTensorConsumerCount = {};\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const added = {};\n while (stack.length > 0) {\n const promises = this.processStack(inputNodes, stack, context, tensorsMap, added, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount, usedNodes);\n await Promise.all(promises);\n }\n if (dynamicNode == null && !isFunctionExecution) {\n console.warn(`This model execution did not contain any nodes with control flow ` +\n `or dynamic output shapes. You can use model.execute() instead.`);\n }\n const missingOutputs = outputNodes\n .filter(node => !isControlFlow(node) &&\n !getTensor(node.name, tensorsMap, context))\n .map(node => node.name);\n if (missingOutputs.length > 0) {\n let alternativeMsg = '';\n if (dynamicNode != null) {\n alternativeMsg =\n `Alternatively, to avoid the dynamic ops, use model.execute() ` +\n `and specify the inputs [${syncInputs}]`;\n }\n throw new Error(`Cannot compute the outputs [${missingOutputs}] from the provided ` +\n `inputs [${names}]. Consider providing the following inputs: ` +\n `[${missingInputs}]. ${alternativeMsg}`);\n }\n return tensorsMap;\n }\n processStack(inputNodes, stack, context, tensorMap, added, tensorsToKeep, outputNames, intermediateTensorConsumerCount, usedNodes) {\n const promises = [];\n while (stack.length > 0) {\n const item = stack.pop();\n context.currentContext = item.contexts;\n let nodeName = '';\n // The tensor of the Enter op with isConstant set should be set\n // in the parent scope, so it will be available as constant for the\n // whole loop.\n if (item.node.op === 'Enter' &&\n getParamValue('isConstant', item.node, tensorMap, context)) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n // only process nodes that are not in the tensorMap yet, this include\n // inputNodes and internal initNodes.\n if (tensorMap[item.node.name] == null) {\n const tensors = executeOp(item.node, tensorMap, context, this._resourceManager);\n if (!nodeName) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n const currentContext = context.currentContext;\n if (util.isPromise(tensors)) {\n promises.push(tensors.then(t => {\n tensorMap[nodeName] = t;\n context.currentContext = currentContext;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n return t;\n }));\n }\n else {\n tensorMap[nodeName] = tensors;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n else {\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n return promises;\n }\n processChildNodes(node, stack, context, tensorMap, added, usedNodes) {\n node.children.forEach((childNode) => {\n const [nodeName,] = getNodeNameAndIndex(childNode.name, context);\n if (added[nodeName] || !usedNodes.has(childNode.name)) {\n return;\n }\n // Merge op can be pushed if any of its inputs has value.\n if (childNode.op === 'Merge') {\n if (childNode.inputNames.some(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n }\n else // Otherwise all inputs must to have value.\n if (childNode.inputNames.every(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n });\n }\n /**\n * Releases the memory used by the weight tensors.\n */\n dispose() {\n Object.keys(this.weightMap)\n .forEach(key => this.weightMap[key].forEach(tensor => tensor.dispose()));\n }\n checkInputShapeAndType(inputs) {\n Object.keys(inputs).forEach(name => {\n const input = inputs[name];\n const [nodeName,] = parseNodeName(name);\n const node = this.graph.nodes[nodeName];\n if (node.attrParams['shape'] && node.attrParams['shape'].value) {\n const shape = node.attrParams['shape'].value;\n const match = shape.length === input.shape.length &&\n input.shape.every((dim, index) => shape[index] === -1 || shape[index] === dim);\n util.assert(match, () => `The shape of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be [${shape}], but was ` +\n `[${input.shape}]`);\n }\n if (node.attrParams['dtype'] && node.attrParams['dtype'].value) {\n util.assert(input.dtype === node.attrParams['dtype'].value, () => `The dtype of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be ` +\n `${node.attrParams['dtype'].value}, but was ${input.dtype}`);\n }\n });\n }\n mapInputs(inputs) {\n const result = {};\n for (const inputName in inputs) {\n if (this._signature != null && this._signature.inputs != null &&\n this._signature.inputs[inputName] != null) {\n const tensor = this._signature.inputs[inputName];\n result[tensor.name] = inputs[inputName];\n }\n else {\n result[inputName] = inputs[inputName];\n }\n }\n return result;\n }\n checkInputs(inputs) {\n const notInGraph = Object.keys(inputs).filter(name => {\n const [nodeName] = parseNodeName(name);\n return this.graph.nodes[nodeName] == null;\n });\n if (notInGraph.length > 0) {\n throw new Error(`The dict provided in model.execute(dict) has ` +\n `keys: [${notInGraph}] that are not part of graph`);\n }\n }\n mapOutputs(outputs) {\n return outputs.map(name => {\n if (this._signature != null && this._signature.outputs != null &&\n this._signature.outputs[name] != null) {\n const tensor = this._signature.outputs[name];\n return tensor.name;\n }\n return name;\n }, {});\n }\n checkOutputs(outputs) {\n outputs.forEach(name => {\n const [normalizedName] = parseNodeName(name);\n if (!this.graph.nodes[normalizedName]) {\n throw new Error(`The output '${name}' is not found in the graph`);\n }\n });\n }\n}\n//# sourceMappingURL=graph_executor.js.map", "/**\n * Contains global resources of a model.\n */\nexport class ResourceManager {\n constructor(hashTableNameToHandle = {}, hashTableMap = {}) {\n this.hashTableNameToHandle = hashTableNameToHandle;\n this.hashTableMap = hashTableMap;\n }\n /**\n * Register a `HashTable` in the resource manager.\n *\n * The `HashTable` can be retrieved by `resourceManager.getHashTableById`,\n * where id is the table handle tensor's id.\n *\n * @param name Op node name that creates the `HashTable`.\n * @param hashTable The `HashTable` to be added to resource manager.\n */\n addHashTable(name, hashTable) {\n this.hashTableNameToHandle[name] = hashTable.handle;\n this.hashTableMap[hashTable.id] = hashTable;\n }\n /**\n * Get the table handle by node name.\n * @param name Op node name that creates the `HashTable`. This name is also\n * used in the inputs list of lookup and import `HashTable` ops.\n */\n getHashTableHandleByName(name) {\n return this.hashTableNameToHandle[name];\n }\n /**\n * Get the actual `HashTable` by its handle tensor's id.\n * @param id The id of the handle tensor.\n */\n getHashTableById(id) {\n return this.hashTableMap[id];\n }\n /**\n * Dispose `ResourceManager`, including its hashTables and tensors in them.\n */\n dispose() {\n for (const key in this.hashTableMap) {\n this.hashTableMap[key].clearAndClose();\n delete this.hashTableMap[key];\n }\n for (const name in this.hashTableNameToHandle) {\n this.hashTableNameToHandle[name].dispose();\n delete this.hashTableNameToHandle[name];\n }\n }\n}\n//# sourceMappingURL=resource_manager.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { io, Tensor } from '@tensorflow/tfjs-core';\nimport { OperationMapper } from '../operations/operation_mapper';\nimport { GraphExecutor } from './graph_executor';\nimport { ResourceManager } from './resource_manager';\nexport const TFHUB_SEARCH_PARAM = '?tfjs-format=file';\nexport const DEFAULT_MODEL_NAME = 'model.json';\n/**\n * A `tf.GraphModel` is a directed, acyclic graph built from a\n * SavedModel GraphDef and allows inference execution.\n *\n * A `tf.GraphModel` can only be created by loading from a model converted from\n * a [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) using\n * the command line converter tool and loaded via `tf.loadGraphModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class GraphModel {\n /**\n * @param modelUrl url for the model, or an `io.IOHandler`.\n * @param weightManifestUrl url for the weight file generated by\n * scripts/convert.py script.\n * @param requestOption options for Request, which allows to send credentials\n * and custom headers.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n */\n constructor(modelUrl, loadOptions = {}) {\n this.modelUrl = modelUrl;\n this.loadOptions = loadOptions;\n this.version = 'n/a';\n if (loadOptions == null) {\n this.loadOptions = {};\n }\n this.resourceManager = new ResourceManager();\n }\n // Returns the version information for the tensorflow model GraphDef.\n get modelVersion() {\n return this.version;\n }\n get inputNodes() {\n return this.executor.inputNodes;\n }\n get outputNodes() {\n return this.executor.outputNodes;\n }\n get inputs() {\n return this.executor.inputs;\n }\n get outputs() {\n return this.executor.outputs;\n }\n get weights() {\n return this.executor.weightMap;\n }\n findIOHandler() {\n const path = this.modelUrl;\n if (path.load != null) {\n // Path is an IO Handler.\n this.handler = path;\n }\n else if (this.loadOptions.requestInit != null) {\n this.handler = io.browserHTTPRequest(path, this.loadOptions);\n }\n else {\n const handlers = io.getLoadHandlers(path, this.loadOptions);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n handlers.push(io.browserHTTPRequest(path, this.loadOptions));\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${[path]}'`);\n }\n this.handler = handlers[0];\n }\n }\n /**\n * Loads the model and weight files, construct the in memory weight map and\n * compile the inference graph.\n */\n async load() {\n this.findIOHandler();\n if (this.handler.load == null) {\n throw new Error('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await this.handler.load();\n return this.loadSync(artifacts);\n }\n /**\n * Synchronously construct the in memory weight map and\n * compile the inference graph. Also initialize hashtable if any.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n loadSync(artifacts) {\n this.artifacts = artifacts;\n const graph = this.artifacts.modelTopology;\n let signature = {};\n if (this.artifacts.userDefinedMetadata != null) {\n signature = // tslint:disable-next-line:no-any\n this.artifacts.userDefinedMetadata.signature;\n }\n this.version = `${graph.versions.producer}.${graph.versions.minConsumer}`;\n const weightMap = io.decodeWeights(this.artifacts.weightData, this.artifacts.weightSpecs);\n this.executor = new GraphExecutor(OperationMapper.Instance.transformGraph(graph, signature));\n this.executor.weightMap = this.convertTensorMapToTensorsMap(weightMap);\n // Attach a model-level resourceManager to each executor to share resources,\n // such as `HashTable`.\n this.executor.resourceManager = this.resourceManager;\n if (artifacts.modelInitializer != null) {\n const initializer = OperationMapper.Instance.transformGraph(artifacts.modelInitializer);\n this.initializer = new GraphExecutor(initializer);\n this.initializer.weightMap = this.executor.weightMap;\n // Attach a model-level resourceManager to the initializer, the\n // hashTables created from when executing the initializer will be stored\n // in the resourceManager.\n this.initializer.resourceManager = this.resourceManager;\n this.initializer.executeAsync({}, []);\n }\n return true;\n }\n /**\n * Save the configuration and/or weights of the GraphModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadGraphModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * model.predict(zeros).print();\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new Error(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new Error('GraphModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n return handlerOrURL.save(this.artifacts);\n }\n /**\n * Execute the inference for the input tensors.\n *\n * @param input The input tensors, when there is single input for the model,\n * inputs param should be a `tf.Tensor`. For models with mutliple inputs,\n * inputs params should be in either `tf.Tensor`[] if the input order is\n * fixed, or otherwise NamedTensorMap format.\n *\n * For model with multiple inputs, we recommend you use NamedTensorMap as the\n * input type, if you use `tf.Tensor`[], the order of the array needs to\n * follow the\n * order of inputNodes array. @see {@link GraphModel.inputNodes}\n *\n * You can also feed any intermediate nodes using the NamedTensorMap as the\n * input type. For example, given the graph\n * InputNode => Intermediate => OutputNode,\n * you can execute the subgraph Intermediate => OutputNode by calling\n * model.execute('IntermediateNode' : tf.tensor(...));\n *\n * This is useful for models that uses tf.dynamic_rnn, where the intermediate\n * state needs to be fed manually.\n *\n * For batch inference execution, the tensors for each input need to be\n * concatenated together. For example with mobilenet, the required input shape\n * is [1, 244, 244, 3], which represents the [batch, height, width, channel].\n * If we are provide a batched data of 100 images, the input tensor should be\n * in the shape of [100, 244, 244, 3].\n *\n * @param config Prediction configuration for specifying the batch size and\n * output node names. Currently the batch size option is ignored for graph\n * model.\n *\n * @returns Inference result tensors. The output would be single `tf.Tensor`\n * if model has single output node, otherwise Tensor[] or NamedTensorMap[]\n * will be returned for model with multiple outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(inputs, config) {\n return this.execute(inputs, this.outputNodes);\n }\n normalizeInputs(inputs) {\n if (!(inputs instanceof Tensor) && !Array.isArray(inputs)) {\n // The input is already a NamedTensorMap.\n return inputs;\n }\n inputs = Array.isArray(inputs) ? inputs : [inputs];\n if (inputs.length !== this.inputNodes.length) {\n throw new Error('Input tensor count mismatch,' +\n `the graph model has ${this.inputNodes.length} placeholders, ` +\n `while there are ${inputs.length} input tensors.`);\n }\n return this.inputNodes.reduce((map, inputName, i) => {\n map[inputName] = inputs[i];\n return map;\n }, {});\n }\n normalizeOutputs(outputs) {\n outputs = outputs || this.outputNodes;\n return !Array.isArray(outputs) ? [outputs] : outputs;\n }\n /**\n * Executes inference for the model for given input tensors.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no\n * outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n *\n * @returns A single tensor if provided with a single output or no outputs\n * are provided and there is only one default output, otherwise return a\n * tensor array. The order of the tensor array is the same as the outputs\n * if provided, otherwise the order of outputNodes attribute of the model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n execute(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = this.executor.execute(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n /**\n * Executes inference for the model for given input tensors in async\n * fashion, use this method when your model contains control flow ops.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n *\n * @returns A Promise of single tensor if provided with a single output or\n * no outputs are provided and there is only one default output, otherwise\n * return a tensor map.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async executeAsync(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = await this.executor.executeAsync(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n convertTensorMapToTensorsMap(map) {\n return Object.keys(map).reduce((newMap, key) => {\n newMap[key] = [map[key]];\n return newMap;\n }, {});\n }\n /**\n * Releases the memory used by the weight tensors and resourceManager.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n dispose() {\n this.executor.dispose();\n if (this.initializer) {\n this.initializer.dispose();\n }\n this.resourceManager.dispose();\n }\n}\n/**\n * Load a graph model given a URL to the model definition.\n *\n * Example of loading MobileNetV2 from a URL and making a prediction with a\n * zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n *\n * Example of loading MobileNetV2 from a TF Hub URL and making a prediction with\n * a zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://tfhub.dev/google/imagenet/mobilenet_v2_140_224/classification/2';\n * const model = await tf.loadGraphModel(modelUrl, {fromTFHub: true});\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n * @param modelUrl The url or an `io.IOHandler` that loads the model.\n * @param options Options for the HTTP request, which allows to send credentials\n * and custom headers.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport async function loadGraphModel(modelUrl, options = {}) {\n if (modelUrl == null) {\n throw new Error('modelUrl in loadGraphModel() cannot be null. Please provide a url ' +\n 'or an IOHandler that loads the model');\n }\n if (options == null) {\n options = {};\n }\n if (options.fromTFHub) {\n if (modelUrl.load == null) {\n if (!modelUrl.endsWith('/')) {\n modelUrl = modelUrl + '/';\n }\n modelUrl = `${modelUrl}${DEFAULT_MODEL_NAME}${TFHUB_SEARCH_PARAM}`;\n }\n }\n const model = new GraphModel(modelUrl, options);\n await model.load();\n return model;\n}\n//# sourceMappingURL=graph_model.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { GraphModel, loadGraphModel } from './executor/graph_model';\nexport { deregisterOp, registerOp } from './operations/custom_op/register';\nexport { version as version_converter } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\n/**\n * Apply a mapping function to a nested structure in a recursive manner.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapResult`. The `DeepMapResult` either provides a\n * replacement value for that node (i.e., replacing the subtree), or indicates\n * that the node should be processed recursively.\n */\nexport function deepMap(input, mapFn) {\n return deepMapInternal(input, mapFn);\n}\n/**\n * @param seen: A Map of known object mappings (i.e., memoized results of\n * `mapFn()`)\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepMapInternal(input, mapFn, seen = new Map(), containedIn = new Set()) {\n if (input == null) {\n return null;\n }\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n if (seen.has(input)) {\n return seen.get(input);\n }\n const result = mapFn(input);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep map function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n seen.set(input, result.value);\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const child = input[k];\n const childResult = deepMapInternal(child, mapFn, seen, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// TODO(soergel, kangyizhang) Reconsider naming of deepZip() to avoid confusion\n// with zip()\n/**\n * Zip nested structures together in a recursive manner.\n *\n * This has the effect of transposing or pivoting data, e.g. converting it from\n * a row-major representation to a column-major representation.\n *\n * For example, `deepZip([{a: 1, b: 2}, {a: 3, b: 4}])` returns\n * `{a: [1, 3], b: [2, 4]}`.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure, where\n * the leaves are arrays collecting the values of the inputs at that location\n * (or, optionally, the result of a custom function applied to those arrays).\n *\n * @param inputs: An array of the objects to zip together.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n */\nexport function deepZip(inputs, zipFn = zipToList) {\n return deepZipInternal(inputs, zipFn);\n}\n/**\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepZipInternal(inputs, zipFn, containedIn = new Set()) {\n // The recursion follows the structure of input 0; it's assumed that all the\n // other inputs have the same structure.\n const input = inputs[0];\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n const result = zipFn(inputs);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep zip function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const children = inputs.map(x => x[k]);\n const childResult = deepZipInternal(children, zipFn, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// tslint:disable-next-line:no-any\nexport function zipToList(x) {\n if (x === null) {\n return null;\n }\n // TODO(soergel): validate array type?\n if (isIterable(x[0])) {\n return { value: null, recurse: true };\n }\n else {\n return { value: x, recurse: false };\n }\n}\n/**\n * Apply an async mapping function to a nested structure in a recursive manner.\n *\n * This first creates a nested structure of Promises, and then awaits all of\n * those, resulting in a single Promise for a resolved nested structure.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapAsyncResult`. The `DeepMapAsyncResult` either provides\n * a `Promise` for a replacement value for that node (i.e., replacing the\n * subtree), or indicates that the node should be processed recursively. Note\n * that the decision whether or not to recurse must be made immediately; only\n * the mapped value may be promised.\n */\nexport async function deepMapAndAwaitAll(input, mapFn) {\n const seen = new Map();\n // First do a normal deepMap, collecting Promises in 'seen' as a side effect.\n deepMapInternal(input, mapFn, seen);\n // Replace the Promises in 'seen' in place.\n // Note TypeScript provides no async map iteration, and regular map iteration\n // is broken too, so sadly we have to do Array.from() to make it work.\n // (There's no advantage to Promise.all(), and that would be tricky anyway.)\n for (const key of Array.from(seen.keys())) {\n const value = seen.get(key);\n if (tf.util.isPromise(value)) {\n const mappedValue = await value;\n seen.set(key, mappedValue);\n }\n }\n // Normal deepMap again, this time filling in the resolved values.\n // It's unfortunate that we have to do two passes.\n // TODO(soergel): test performance and think harder about a fast solution.\n const result = deepMapInternal(input, mapFn, seen);\n return result;\n}\n/**\n * Determine whether the argument is iterable.\n *\n * @returns true if the argument is an array or any non-Tensor object.\n */\n// tslint:disable-next-line:no-any\nexport function isIterable(obj) {\n return obj != null && (!ArrayBuffer.isView(obj)) &&\n (Array.isArray(obj) ||\n (typeof obj === 'object' && !(obj instanceof tf.Tensor)));\n}\n/**\n * Determine whether the argument can be converted to Tensor.\n *\n * Tensors, primitives, arrays, and TypedArrays all qualify; anything else does\n * not.\n *\n * @returns true if the argument can be converted to Tensor.\n */\n// tslint:disable-next-line:no-any\nexport function canTensorify(obj) {\n return obj == null || isPrimitive(obj) || Array.isArray(obj) ||\n (typeof obj === 'object' && (obj instanceof tf.Tensor)) ||\n tf.util.isTypedArray(obj);\n}\n/**\n * Returns true if the given `value` is a primitive type. Otherwise returns\n * false. This is equivalant to node util.isPrimitive\n */\nfunction isPrimitive(value) {\n return (value === null ||\n (typeof value !== 'object' && typeof value !== 'function'));\n}\n//# sourceMappingURL=deep_map.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { deepMap, isIterable } from './deep_map';\nexport function deepClone(container) {\n return deepMap(container, cloneIfTensor);\n}\n// tslint:disable-next-line: no-any\nfunction cloneIfTensor(item) {\n if (item instanceof tf.Tensor) {\n return ({ value: item.clone(), recurse: false });\n }\n else if (isIterable(item)) {\n return { value: null, recurse: true };\n }\n else {\n return { value: item, recurse: false };\n }\n}\n//# sourceMappingURL=deep_clone.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * A ring buffer, providing O(1) FIFO, LIFO, and related operations.\n */\nexport class RingBuffer {\n /**\n * Constructs a `RingBuffer`.\n * @param capacity The number of items that the buffer can accomodate.\n */\n constructor(capacity) {\n this.capacity = capacity;\n // Note we store the indices in the range 0 <= index < 2*capacity.\n // This allows us to distinguish the full from the empty case.\n // See https://www.snellman.net/blog/archive/2016-12-13-ring-buffers/\n this.begin = 0; // inclusive\n this.end = 0; // exclusive\n if (capacity == null) {\n throw new RangeError('Can\\'t create a ring buffer of unknown capacity.');\n }\n if (capacity < 1) {\n throw new RangeError('Can\\'t create ring buffer of capacity < 1.');\n }\n this.data = new Array(capacity);\n this.doubledCapacity = 2 * capacity;\n }\n /**\n * Map any index into the range 0 <= index < 2*capacity.\n */\n wrap(index) {\n // don't trust % on negative numbers\n while (index < 0) {\n index += this.doubledCapacity;\n }\n return index % this.doubledCapacity;\n }\n get(index) {\n if (index < 0) {\n throw new RangeError('Can\\'t get item at a negative index.');\n }\n return this.data[index % this.capacity];\n }\n set(index, value) {\n if (index < 0) {\n throw new RangeError('Can\\'t set item at a negative index.');\n }\n this.data[index % this.capacity] = value;\n }\n /**\n * Returns the current number of items in the buffer.\n */\n length() {\n let length = this.end - this.begin;\n if (length < 0) {\n length = this.doubledCapacity + length;\n }\n return length;\n }\n /**\n * Reports whether the buffer is full.\n * @returns true if the number of items in the buffer equals its capacity, and\n * false otherwise.\n */\n isFull() {\n return this.length() === this.capacity;\n }\n /**\n * Reports whether the buffer is empty.\n * @returns true if the number of items in the buffer equals zero, and\n * false otherwise.\n */\n isEmpty() {\n return this.length() === 0;\n }\n /**\n * Adds an item to the end of the buffer.\n */\n push(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.set(this.end, value);\n this.end = this.wrap(this.end + 1);\n }\n /**\n * Adds many items to the end of the buffer, in order.\n */\n pushAll(values) {\n for (const value of values) {\n this.push(value);\n }\n }\n /**\n * Removes and returns the last item in the buffer.\n */\n pop() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n this.end = this.wrap(this.end - 1);\n const result = this.get(this.end);\n this.set(this.end, undefined);\n return result;\n }\n /**\n * Adds an item to the beginning of the buffer.\n */\n unshift(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.begin = this.wrap(this.begin - 1);\n this.set(this.begin, value);\n }\n /**\n * Removes and returns the first item in the buffer.\n */\n shift() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const result = this.get(this.begin);\n this.set(this.begin, undefined);\n this.begin = this.wrap(this.begin + 1);\n return result;\n }\n /**\n * Removes and returns a specific item in the buffer, and moves the last item\n * to the vacated slot. This is useful for implementing a shuffling stream.\n * Note that this operation necessarily scrambles the original order.\n *\n * @param relativeIndex: the index of the item to remove, relative to the\n * first item in the buffer (e.g., hiding the ring nature of the underlying\n * storage).\n */\n shuffleExcise(relativeIndex) {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const index = this.wrap(this.begin + relativeIndex);\n const result = this.get(index);\n this.set(index, this.pop());\n return result;\n }\n}\n//# sourceMappingURL=ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { RingBuffer } from './ring_buffer';\nexport class GrowingRingBuffer extends RingBuffer {\n /**\n * Constructs a `GrowingRingBuffer`.\n */\n constructor() {\n super(GrowingRingBuffer.INITIAL_CAPACITY);\n }\n isFull() {\n return false;\n }\n push(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.push(value);\n }\n unshift(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.unshift(value);\n }\n /**\n * Doubles the capacity of the buffer.\n */\n expand() {\n const newCapacity = this.capacity * 2;\n const newData = new Array(newCapacity);\n const len = this.length();\n // Rotate the buffer to start at index 0 again, since we can't just\n // allocate more space at the end.\n for (let i = 0; i < len; i++) {\n newData[i] = this.get(this.wrap(this.begin + i));\n }\n this.data = newData;\n this.capacity = newCapacity;\n this.doubledCapacity = 2 * this.capacity;\n this.begin = 0;\n this.end = len;\n }\n}\nGrowingRingBuffer.INITIAL_CAPACITY = 32;\n//# sourceMappingURL=growing_ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { deepClone } from '../util/deep_clone';\nimport { deepMapAndAwaitAll, deepZip, zipToList } from '../util/deep_map';\nimport { GrowingRingBuffer } from '../util/growing_ring_buffer';\nimport { RingBuffer } from '../util/ring_buffer';\n// Here we implement a simple asynchronous iterator.\n// This lets us avoid using either third-party stream libraries or\n// recent TypeScript language support requiring polyfills.\n/**\n * Create a `LazyIterator` from an array of items.\n */\nexport function iteratorFromItems(items) {\n return new ArrayIterator(items);\n}\n/**\n * Create a `LazyIterator` of incrementing integers.\n */\nexport function iteratorFromIncrementing(start) {\n let i = start;\n return iteratorFromFunction(() => ({ value: i++, done: false }));\n}\n/**\n * Create a `LazyIterator` from a function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * await iter.forEachAsync(e => console.log(e));\n * ```\n *\n * @param func A function that produces data on each call.\n */\nexport function iteratorFromFunction(func) {\n return new FunctionCallIterator(func);\n}\n/**\n * Create a `LazyIterator` by concatenating underlying streams, which are\n * themselves provided as a stream.\n *\n * This can also be thought of as a \"stream flatten\" operation.\n *\n * @param baseIterators A stream of streams to be concatenated.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenated(baseIterators, baseErrorHandler) {\n return new ChainedIterator(baseIterators, baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by concatenating streams produced by calling a\n * stream-generating function a given number of times.\n *\n * Since a `LazyIterator` is read-once, it cannot be repeated, but this\n * function can be used to achieve a similar effect:\n *\n * LazyIterator.ofConcatenatedFunction(() => new MyIterator(), 6);\n *\n * @param iteratorFunc: A function that produces a new stream on each call.\n * @param count: The number of times to call the function.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenatedFunction(iteratorFunc, count, baseErrorHandler) {\n return iteratorFromConcatenated(iteratorFromFunction(iteratorFunc).take(count), baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by zipping together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nexport function iteratorFromZipped(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n return new ZipIterator(iterators, mismatchMode);\n}\n/**\n * An asynchronous iterator, providing lazy access to a potentially\n * unbounded stream of elements.\n *\n * Iterator can be obtained from a dataset:\n * `const iter = await dataset.iterator();`\n */\nexport class LazyIterator {\n /**\n * Collect all remaining elements of a bounded stream into an array.\n * Obviously this will succeed only for small streams that fit in memory.\n * Useful for testing.\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArray() {\n const result = [];\n let x = await this.next();\n while (!x.done) {\n result.push(x.value);\n x = await this.next();\n }\n return result;\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArrayForTest() {\n const stream = this.prefetch(100);\n const result = [];\n let x = await stream.next();\n while (!x.done) {\n result.push(x.value);\n x = await stream.next();\n }\n return result;\n }\n /**\n * Draw items from the stream until it is exhausted.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveFully() {\n let x = await this.next();\n while (!x.done) {\n x = await this.next();\n }\n }\n /**\n * Draw items from the stream until it is exhausted, or a predicate fails.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveWhile(predicate) {\n let x = await this.next();\n let shouldContinue = predicate(x.value);\n while ((!x.done) && shouldContinue) {\n x = await this.next();\n shouldContinue = predicate(x.value);\n }\n }\n /**\n * Handles errors thrown on this stream using a provided handler function.\n *\n * @param handler A function that handles any `Error` thrown during a `next()`\n * call and returns true if the stream should continue (dropping the failed\n * call) or false if the stream should quietly terminate. If the handler\n * itself throws (or rethrows) an `Error`, that will be propagated.\n *\n * @returns A `LazyIterator` of elements passed through from upstream,\n * possibly filtering or terminating on upstream `next()` calls that\n * throw an `Error`.\n */\n handleErrors(handler) {\n return new ErrorHandlingLazyIterator(this, handler);\n }\n // TODO(soergel): Implement reduce() etc.\n /**\n * Filters this stream according to `predicate`.\n *\n * @param predicate A function mapping a stream element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `LazyIterator` of elements for which the predicate was true.\n */\n filter(predicate) {\n return new FilterIterator(this, predicate);\n }\n /**\n * Maps this stream through a 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n map(transform) {\n return new MapIterator(this, transform);\n }\n /**\n * Maps this stream through an async 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a `Promise` for a\n * transformed stream element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n mapAsync(transform) {\n return new AsyncMapIterator(this, transform);\n }\n /**\n * Maps this stream through a 1-to-1 transform, forcing serial execution.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n serialMapAsync(transform) {\n return new AsyncMapIterator(this, transform).serial();\n }\n /**\n * Maps this stream through a 1-to-many transform.\n *\n * @param transform A function mapping a stream element to an array of\n * transformed elements.\n *\n * @returns A `DataStream` of transformed elements.\n */\n flatmap(transform) {\n return new FlatmapIterator(this, transform);\n }\n /**\n * Apply a function to every element of the stream.\n *\n * @param f A function to apply to each stream element.\n */\n async forEachAsync(f) {\n return this.map(f).resolveFully();\n }\n /**\n * Apply a function to every element of the stream, forcing serial execution.\n *\n * @param f A function to apply to each stream element. Should return 'true'\n * to indicate that the stream should continue, or 'false' to cause it to\n * terminate.\n */\n async serialForEach(f) {\n return this.serialMapAsync(f).resolveWhile(x => (x === true));\n }\n /**\n * Groups elements into batches, represented as arrays of elements.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"Row-major\" means that the resulting batch is simply a collection of\n * rows: `[row1, row2, row3, ...]`. This is contrast to the column-major\n * form, which is needed for vectorized computation.\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `LazyIterator` of batches of elements, represented as arrays\n * of the original element type.\n */\n rowMajorBatch(batchSize, smallLastBatch = true) {\n return new RowMajorBatchIterator(this, batchSize, smallLastBatch);\n }\n /**\n * Groups elements into batches, represented in column-major form.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"column-major\" means that the resulting batch is a (potentially\n * nested) structure representing the columns. Each column entry, then,\n * contains a collection of the values found in that column for a range of\n * input elements. This representation allows for vectorized computation, in\n * contrast to the row-major form.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure,\n * where the leaves are arrays collecting the values of the inputs at that\n * location (or, optionally, the result of a custom function applied to those\n * arrays).\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n * @returns A `LazyIterator` of batches of elements, represented as an object\n * with collections at the leaves.\n */\n columnMajorBatch(batchSize, smallLastBatch = true, \n // tslint:disable-next-line:no-any\n zipFn = zipToList) {\n // First collect the desired number of input elements as a row-major batch.\n const rowBatches = this.rowMajorBatch(batchSize, smallLastBatch);\n // Now 'rotate' or 'pivot' the data, collecting all values from each column\n // in the batch (i.e., for each key within the elements) into an array.\n return rowBatches.map(x => deepZip(x, zipFn));\n }\n /**\n * Concatenate this `LazyIterator` with another.\n *\n * @param iterator A `LazyIterator` to be concatenated onto this one.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can\n * decide whether the error should be propagated, whether the error should\n * be ignored, or whether the base stream should be terminated.\n * @returns A `LazyIterator`.\n */\n concatenate(iterator, baseErrorHandler) {\n return new ChainedIterator(iteratorFromItems([this, iterator]), baseErrorHandler);\n }\n /**\n * Limits this stream to return at most `count` items.\n *\n * @param count The maximum number of items to provide from the stream. If\n * a negative or undefined value is given, the entire stream is returned\n * unaltered.\n */\n take(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new TakeIterator(this, count);\n }\n /**\n * Skips the first `count` items in this stream.\n *\n * @param count The number of items to skip. If a negative or undefined\n * value is given, the entire stream is returned unaltered.\n */\n skip(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new SkipIterator(this, count);\n }\n /**\n * Prefetch the first `bufferSize` items in this stream.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n */\n prefetch(bufferSize) {\n return new PrefetchIterator(this, bufferSize);\n }\n // TODO(soergel): deep sharded shuffle, where supported\n /**\n * Randomly shuffles the elements of this stream.\n *\n * @param bufferSize: An integer specifying the number of elements from\n * this stream from which the new stream will sample.\n * @param seed: (Optional.) An integer specifying the random seed that\n * will be used to create the distribution.\n */\n shuffle(windowSize, seed) {\n return new ShuffleIterator(this, windowSize, seed);\n }\n /**\n * Force an iterator to execute serially: each next() call will await the\n * prior one, so that they cannot execute concurrently.\n */\n serial() {\n return new SerialIterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on LazyIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// Iterators that just extend LazyIterator directly\n// ============================================================================\nclass ArrayIterator extends LazyIterator {\n constructor(items) {\n super();\n this.items = items;\n this.trav = 0;\n }\n summary() {\n return `Array of ${this.items.length} items`;\n }\n async next() {\n if (this.trav >= this.items.length) {\n return { value: null, done: true };\n }\n const item = this.items[this.trav];\n this.trav++;\n return { value: deepClone(item), done: false };\n }\n}\nclass FunctionCallIterator extends LazyIterator {\n constructor(nextFn) {\n super();\n this.nextFn = nextFn;\n }\n summary() {\n return `Function call`;\n }\n async next() {\n try {\n return this.nextFn();\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message =\n `Error thrown while iterating through a dataset: ${e.message}`;\n throw e;\n }\n }\n}\nclass SerialIterator extends LazyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Serial`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n return this.upstream.next();\n }\n}\nclass SkipIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n // Local state that should not be clobbered by out-of-order execution.\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Skip`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // TODO(soergel): consider tradeoffs of reading in parallel, eg.\n // collecting next() promises in an Array and then waiting for\n // Promise.all() of those. Benefit: pseudo-parallel execution. Drawback:\n // maybe delayed GC.\n while (this.count++ < this.maxCount) {\n const skipped = await this.upstream.next();\n // short-circuit if upstream is already empty\n if (skipped.done) {\n return skipped;\n }\n tf.dispose(skipped.value);\n }\n return this.upstream.next();\n }\n}\nclass TakeIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n this.count = 0;\n }\n summary() {\n return `${this.upstream.summary()} -> Take`;\n }\n async next() {\n if (this.count++ >= this.maxCount) {\n return { value: null, done: true };\n }\n return this.upstream.next();\n }\n}\n// Note this batch just groups items into row-wise element arrays.\n// Rotating these to a column-wise representation happens only at the dataset\n// level.\nclass RowMajorBatchIterator extends LazyIterator {\n constructor(upstream, batchSize, enableSmallLastBatch = true) {\n super();\n this.upstream = upstream;\n this.batchSize = batchSize;\n this.enableSmallLastBatch = enableSmallLastBatch;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> RowMajorBatch`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n const batch = [];\n while (batch.length < this.batchSize) {\n const item = await this.upstream.next();\n if (item.done) {\n if (this.enableSmallLastBatch && batch.length > 0) {\n return { value: batch, done: false };\n }\n return { value: null, done: true };\n }\n batch.push(item.value);\n }\n return { value: batch, done: false };\n }\n}\nclass FilterIterator extends LazyIterator {\n constructor(upstream, predicate) {\n super();\n this.upstream = upstream;\n this.predicate = predicate;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Filter`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n const item = await this.upstream.next();\n if (item.done || this.predicate(item.value)) {\n return item;\n }\n tf.dispose(item.value);\n }\n }\n}\nclass MapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Map`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\nclass ErrorHandlingLazyIterator extends LazyIterator {\n constructor(upstream, handler) {\n super();\n this.upstream = upstream;\n this.handler = handler;\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> handleErrors`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n try {\n return await this.upstream.next();\n }\n catch (e) {\n if (!this.handler(e)) {\n return { value: null, done: true };\n }\n // If the handler returns true, loop and fetch the next upstream item.\n // If the upstream iterator throws an endless stream of errors, and if\n // the handler says to ignore them, then we loop forever here. That is\n // the correct behavior-- it's up to the handler to decide when to stop.\n }\n }\n }\n}\nclass AsyncMapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> AsyncMap`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = await this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\n// Iterators that maintain a queue of pending items\n// ============================================================================\n/**\n * A base class for transforming streams that operate by maintaining an\n * output queue of elements that are ready to return via next(). This is\n * commonly required when the transformation is 1-to-many: A call to next()\n * may trigger a call to the underlying stream, which will produce many\n * mapped elements of this stream-- of which we need to return only one, so\n * we have to queue the rest.\n */\nexport class OneToManyIterator extends LazyIterator {\n constructor() {\n super();\n this.outputQueue = new GrowingRingBuffer();\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // Fetch so that the queue contains at least one item if possible.\n // If the upstream source is exhausted, AND there are no items left in\n // the output queue, then this stream is also exhausted.\n while (this.outputQueue.length() === 0) {\n // TODO(soergel): consider parallel reads.\n if (!await this.pump()) {\n return { value: null, done: true };\n }\n }\n return { value: this.outputQueue.shift(), done: false };\n }\n}\nclass FlatmapIterator extends OneToManyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Flatmap`;\n }\n async pump() {\n const item = await this.upstream.next();\n if (item.done) {\n return false;\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // that's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying any\n // intermediate Tensors. Here we are concerned only about the inputs.\n const mappedArray = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mappedArray);\n this.outputQueue.pushAll(mappedArray);\n // TODO(soergel) faster intersection, and deduplicate outputTensors\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return true;\n }\n}\n/**\n * Provides a `LazyIterator` that concatenates a stream of underlying\n * streams.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n */\nexport class ChainedIterator extends LazyIterator {\n constructor(iterators, baseErrorHandler) {\n super();\n this.baseErrorHandler = baseErrorHandler;\n // Strict Promise execution order:\n // a next() call may not even begin until the previous one completes.\n this.lastRead = null;\n // Local state that should not be clobbered by out-of-order execution.\n this.iterator = null;\n this.moreIterators = iterators;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of chained summaries';\n return `${upstreamSummaries} -> Chained`;\n }\n async next() {\n this.lastRead = this.readFromChain(this.lastRead);\n return this.lastRead;\n }\n async readFromChain(lastRead) {\n // Must await on the previous read since the previous read may have advanced\n // the stream of streams, from which we need to read.\n // This is unfortunate since we can't parallelize reads. Which means\n // prefetching of chained streams is a no-op.\n // One solution is to prefetch immediately upstream of this.\n await lastRead;\n if (this.iterator == null) {\n const iteratorResult = await this.moreIterators.next();\n if (iteratorResult.done) {\n // No more streams to stream from.\n return { value: null, done: true };\n }\n this.iterator = iteratorResult.value;\n if (this.baseErrorHandler != null) {\n this.iterator = this.iterator.handleErrors(this.baseErrorHandler);\n }\n }\n const itemResult = await this.iterator.next();\n if (itemResult.done) {\n this.iterator = null;\n return this.readFromChain(lastRead);\n }\n return itemResult;\n }\n}\nexport var ZipMismatchMode;\n(function (ZipMismatchMode) {\n ZipMismatchMode[ZipMismatchMode[\"FAIL\"] = 0] = \"FAIL\";\n ZipMismatchMode[ZipMismatchMode[\"SHORTEST\"] = 1] = \"SHORTEST\";\n ZipMismatchMode[ZipMismatchMode[\"LONGEST\"] = 2] = \"LONGEST\"; // use nulls for exhausted streams; use up the longest stream.\n})(ZipMismatchMode || (ZipMismatchMode = {}));\n/**\n * Provides a `LazyIterator` that zips together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nclass ZipIterator extends LazyIterator {\n constructor(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n super();\n this.iterators = iterators;\n this.mismatchMode = mismatchMode;\n this.count = 0;\n this.currentPromise = null;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of zip summaries';\n return `{${upstreamSummaries}} -> Zip`;\n }\n async nextState(afterState) {\n // This chaining ensures that the underlying next() are not even called\n // before the previous ones have resolved.\n await afterState;\n // Collect underlying iterator \"done\" signals as a side effect in\n // getNext()\n let numIterators = 0;\n let iteratorsDone = 0;\n function getNext(container) {\n if (container instanceof LazyIterator) {\n const result = container.next();\n return {\n value: result.then(x => {\n numIterators++;\n if (x.done) {\n iteratorsDone++;\n }\n return x.value;\n }),\n recurse: false\n };\n }\n else {\n return { value: null, recurse: true };\n }\n }\n const mapped = await deepMapAndAwaitAll(this.iterators, getNext);\n if (numIterators === iteratorsDone) {\n // The streams have all ended.\n return { value: null, done: true };\n }\n if (iteratorsDone > 0) {\n switch (this.mismatchMode) {\n case ZipMismatchMode.FAIL:\n throw new Error('Zipped streams should have the same length. ' +\n `Mismatched at element ${this.count}.`);\n case ZipMismatchMode.SHORTEST:\n return { value: null, done: true };\n case ZipMismatchMode.LONGEST:\n default:\n // Continue. The exhausted streams already produced value: null.\n }\n }\n this.count++;\n return { value: mapped, done: false };\n }\n async next() {\n this.currentPromise = this.nextState(this.currentPromise);\n return this.currentPromise;\n }\n}\n// Iterators that maintain a ring buffer of pending promises\n// ============================================================================\n/**\n * A stream that prefetches a given number of items from an upstream source,\n * returning them in FIFO order.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n */\nexport class PrefetchIterator extends LazyIterator {\n constructor(upstream, bufferSize) {\n super();\n this.upstream = upstream;\n this.bufferSize = bufferSize;\n this.buffer = new RingBuffer(bufferSize);\n }\n summary() {\n return `${this.upstream.summary()} -> Prefetch`;\n }\n /**\n * Refill the prefetch buffer. Returns only after the buffer is full, or\n * the upstream source is exhausted.\n */\n refill() {\n while (!this.buffer.isFull()) {\n const v = this.upstream.next();\n this.buffer.push(v);\n }\n }\n next() {\n this.refill();\n // This shift will never throw an error because the buffer is always\n // full after a refill. If the stream is exhausted, the buffer will be\n // full of Promises that will resolve to the end-of-stream signal.\n return this.buffer.shift();\n }\n}\n/**\n * A stream that performs a sliding-window random shuffle on an upstream\n * source. This is like a `PrefetchIterator` except that the items are\n * returned in randomized order. Mixing naturally improves as the buffer\n * size increases.\n */\nexport class ShuffleIterator extends PrefetchIterator {\n constructor(upstream, windowSize, seed) {\n super(upstream, windowSize);\n this.upstream = upstream;\n this.windowSize = windowSize;\n // Local state that should not be clobbered by out-of-order execution.\n this.upstreamExhausted = false;\n this.random = seedrandom.alea(seed || tf.util.now().toString());\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n randomInt(max) {\n return Math.floor(this.random() * max);\n }\n chooseIndex() {\n return this.randomInt(this.buffer.length());\n }\n async serialNext() {\n // TODO(soergel): consider performance\n if (!this.upstreamExhausted) {\n this.refill();\n }\n while (!this.buffer.isEmpty()) {\n const chosenIndex = this.chooseIndex();\n const result = await this.buffer.shuffleExcise(chosenIndex);\n if (result.done) {\n this.upstreamExhausted = true;\n }\n else {\n this.refill();\n return result;\n }\n }\n return { value: null, done: true };\n }\n}\n//# sourceMappingURL=lazy_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { iteratorFromConcatenated, iteratorFromFunction, iteratorFromItems, iteratorFromZipped, ZipMismatchMode } from './iterators/lazy_iterator';\nimport { canTensorify, deepMapAndAwaitAll, isIterable } from './util/deep_map';\n// TODO(soergel): consider vectorized operations within the pipeline.\n/**\n * Represents a potentially large list of independent data elements (typically\n * 'samples' or 'examples').\n *\n * A 'data example' may be a primitive, an array, a map from string keys to\n * values, or any nested structure of these.\n *\n * A `Dataset` represents an ordered collection of elements, together with a\n * chain of transformations to be performed on those elements. Each\n * transformation is a method of `Dataset` that returns another `Dataset`, so\n * these may be chained, e.g.\n * `const processedDataset = rawDataset.filter(...).map(...).batch(...)`.\n *\n * Data loading and transformation is done in a lazy, streaming fashion. The\n * dataset may be iterated over multiple times; each iteration starts the data\n * loading anew and recapitulates the transformations.\n *\n * A `Dataset` is typically processed as a stream of unbatched examples --i.e.,\n * its transformations are applied one example at a time. Batching produces a\n * new `Dataset` where each element is a batch. Batching should usually come\n * last in a pipeline, because data transformations are easier to express on a\n * per-example basis than on a per-batch basis.\n *\n * The following code examples are calling `await dataset.forEachAsync(...)` to\n * iterate once over the entire dataset in order to print out the data.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class Dataset {\n constructor() {\n this.size = null;\n }\n // TODO(soergel): Make Datasets report whether repeated iterator() calls\n // produce the same result (e.g., reading from a file) or different results\n // (e.g., from the webcam). Currently we don't make this distinction but it\n // could be important for the user to know.\n // abstract isDeterministic(): boolean;\n /**\n * Groups elements into batches.\n *\n * It is assumed that each of the incoming dataset elements has the same\n * structure-- i.e. the same set of keys at each location in an object\n * hierarchy. For each key, the resulting `Dataset` provides a batched\n * element collecting all of the incoming values for that key.\n *\n * * Incoming primitives are grouped into a 1-D Tensor.\n * * Incoming Tensors are grouped into a new Tensor where the 0'th axis is\n * the batch dimension.\n * * Incoming arrays are converted to Tensor and then batched.\n * * A nested array is interpreted as an n-D Tensor, so the batched result\n * has n+1 dimensions.\n * * An array that cannot be converted to Tensor produces an error.\n *\n * If an array should not be batched as a unit, it should first be converted\n * to an object with integer keys.\n *\n * Here are a few examples:\n *\n * Batch a dataset of numbers:\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8]).batch(4);\n * await a.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of arrays:\n * ```js\n * const b = tf.data.array([[1], [2], [3], [4], [5], [6], [7], [8]]).batch(4);\n * await b.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of objects:\n * ```js\n * const c = tf.data.array([{a: 1, b: 11}, {a: 2, b: 12}, {a: 3, b: 13},\n * {a: 4, b: 14}, {a: 5, b: 15}, {a: 6, b: 16}, {a: 7, b: 17},\n * {a: 8, b: 18}]).batch(4);\n * await c.forEachAsync(e => {\n * console.log('{');\n * for(var key in e) {\n * console.log(key+':');\n * e[key].print();\n * }\n * console.log('}');\n * })\n * ```\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `Dataset`, from which a stream of batches can be obtained.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n batch(batchSize, smallLastBatch = true) {\n const base = this;\n tf.util.assert(batchSize > 0, () => `batchSize needs to be positive, but it is\n ${batchSize}`);\n let size;\n if (this.size === Infinity || this.size == null) {\n // If the size of this dataset is infinity or null, the new size keeps the\n // same.\n size = this.size;\n }\n else if (smallLastBatch) {\n // If the size of this dataset is known and include small last batch, the\n // new size is full batch count plus last batch.\n size = Math.ceil(this.size / batchSize);\n }\n else {\n // If the size of this dataset is known and not include small last batch,\n // the new size is full batch count.\n size = Math.floor(this.size / batchSize);\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator())\n .columnMajorBatch(batchSize, smallLastBatch, deepBatchConcat);\n }, size);\n }\n /**\n * Concatenates this `Dataset` with another.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * const b = tf.data.array([4, 5, 6]);\n * const c = a.concatenate(b);\n * await c.forEachAsync(e => console.log(e));\n * ```\n *\n * @param dataset A `Dataset` to be concatenated onto this one.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n concatenate(dataset) {\n const base = this;\n let size;\n if (this.size === Infinity || dataset.size === Infinity) {\n // If the size of any of these two dataset is infinity, new size is\n // infinity.\n size = Infinity;\n }\n else if (this.size != null && dataset.size != null) {\n // If the size of both datasets are known and not infinity, new size is\n // sum the size of these two datasets.\n size = this.size + dataset.size;\n }\n else {\n // If neither of these two datasets has infinite size and any of these two\n // datasets' size is null, the new size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).concatenate(await dataset.iterator()), size);\n }\n /**\n * Filters this dataset according to `predicate`.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])\n * .filter(x => x%2 === 0);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param predicate A function mapping a dataset element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `Dataset` of elements for which the predicate was true.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n filter(predicate) {\n const base = this;\n let size;\n if (this.size === Infinity) {\n // If the size of this dataset is infinity, new size is infinity\n size = Infinity;\n }\n else {\n // If this dataset has limited elements, new size is null because it might\n // exhausted randomly.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).filter(x => tf.tidy(() => predicate(x)));\n }, size);\n }\n /**\n * Apply a function to every element of the dataset.\n *\n * After the function is applied to a dataset element, any Tensors contained\n * within that element are disposed.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function to apply to each dataset element.\n * @returns A `Promise` that resolves after all elements have been processed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async forEachAsync(f) {\n return (await this.iterator()).forEachAsync(f);\n }\n /**\n * Maps this dataset through a 1-to-1 transform.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).map(x => x*x);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param transform A function mapping a dataset element to a transformed\n * dataset element.\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n map(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).map(x => tf.tidy(() => transform(x)));\n }, this.size);\n }\n /**\n * Maps this dataset through an async 1-to-1 transform.\n *\n * ```js\n * const a =\n * tf.data.array([1, 2, 3]).mapAsync(x => new Promise(function(resolve){\n * setTimeout(() => {\n * resolve(x * x);\n * }, Math.random()*1000 + 500);\n * }));\n * console.log(await a.toArray());\n * ```\n *\n * @param transform A function mapping a dataset element to a `Promise` for a\n * transformed dataset element. This transform is responsible for disposing\n * any intermediate `Tensor`s, i.e. by wrapping its computation in\n * `tf.tidy()`; that cannot be automated here (as it is in the synchronous\n * `map()` case).\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n mapAsync(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).mapAsync(transform);\n }, this.size);\n }\n /**\n * Creates a `Dataset` that prefetches elements from this dataset.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n prefetch(bufferSize) {\n if (bufferSize == null) {\n throw new RangeError('`Dataset.prefetch()` requires bufferSize to be specified.');\n }\n const base = this;\n return datasetFromIteratorFn(async () => (await base.iterator()).prefetch(bufferSize), this.size);\n }\n /**\n * Repeats this dataset `count` times.\n *\n * NOTE: If this dataset is a function of global state (e.g. a random number\n * generator), then different repetitions may produce different elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).repeat(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: (Optional) An integer, representing the number of times\n * the dataset should be repeated. The default behavior (if `count` is\n * `undefined` or negative) is for the dataset be repeated indefinitely.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n repeat(count) {\n const base = this;\n let size;\n if (this.size != null && count > 0) {\n // If this dataset has size and count is positive, new size is current\n // size multiply count. This also covers the case that current size is\n // infinity.\n size = this.size * count;\n }\n else if (count === 0) {\n // If count is 0, new size is 0.\n size = 0;\n }\n else if (this.size != null && (count === undefined || count < 0)) {\n // If this dataset has size and count is undefined or negative, the\n // dataset will be repeated indefinitely and new size is infinity.\n size = Infinity;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n const iteratorIterator = iteratorFromFunction(async () => ({ value: await base.iterator(), done: false }));\n return iteratorFromConcatenated(iteratorIterator.take(count));\n }, size);\n }\n /**\n * Creates a `Dataset` that skips `count` initial elements from this dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).skip(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be skipped\n * to form the new dataset. If `count` is greater than the size of this\n * dataset, the new dataset will contain no elements. If `count`\n * is `undefined` or negative, skips the entire dataset.\n *\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n skip(count) {\n const base = this;\n let size;\n if (this.size != null && count >= 0 && this.size >= count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is current size minus skipped size.This also covers the case that\n // current size is infinity.\n size = this.size - count;\n }\n else if (this.size != null &&\n (this.size < count || count === undefined || count < 0)) {\n // If the size of this dataset is smaller than count, or count is\n // undefined or negative, skips the entire dataset and the new size is 0.\n size = 0;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).skip(count), size);\n }\n /**\n * Pseudorandomly shuffles the elements of this dataset. This is done in a\n * streaming manner, by sampling from a given number of prefetched elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).shuffle(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param bufferSize: An integer specifying the number of elements from this\n * dataset from which the new dataset will sample.\n * @param seed: (Optional) An integer specifying the random seed that will\n * be used to create the distribution.\n * @param reshuffleEachIteration: (Optional) A boolean, which if true\n * indicates that the dataset should be pseudorandomly reshuffled each time\n * it is iterated over. If false, elements will be returned in the same\n * shuffled order on each iteration. (Defaults to `true`.)\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n shuffle(bufferSize, seed, reshuffleEachIteration = true) {\n if (bufferSize == null || bufferSize < 0) {\n if (this.size == null) {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified.');\n }\n else {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified. ' +\n 'If your data fits in main memory (for regular JS objects), ' +\n 'and/or GPU memory (for `tf.Tensor`s), consider setting ' +\n `bufferSize to the dataset size (${this.size} elements)`);\n }\n }\n const base = this;\n const random = seedrandom.alea(seed || tf.util.now().toString());\n return datasetFromIteratorFn(async () => {\n let seed2 = random.int32();\n if (reshuffleEachIteration) {\n seed2 += random.int32();\n }\n return (await base.iterator()).shuffle(bufferSize, seed2.toString());\n }, this.size);\n }\n /**\n * Creates a `Dataset` with at most `count` initial elements from this\n * dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).take(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be taken\n * to form the new dataset. If `count` is `undefined` or negative, or if\n * `count` is greater than the size of this dataset, the new dataset will\n * contain all elements of this dataset.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n take(count) {\n const base = this;\n let size;\n if (this.size != null && this.size > count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is count.\n size = count;\n }\n else if (this.size != null && this.size <= count) {\n // If the size of this dataset is equal or smaller than count, the new\n // dataset's size is the size of this dataset.\n size = this.size;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).take(count), size);\n }\n /**\n * Collect all elements of this dataset into an array.\n *\n * Obviously this will succeed only for small datasets that fit in memory.\n * Useful for testing and generally should be avoided if possible.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]);\n * console.log(await a.toArray());\n * ```\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async toArray() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArray();\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n */\n async toArrayForTest() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArrayForTest();\n }\n}\n// TODO(soergel): deep sharded shuffle, where supported\nDataset.MAX_BUFFER_SIZE = 10000;\n/**\n * Create a `Dataset` defined by a provided iterator() function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * const ds = tf.data.datasetFromIteratorFn(iter);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n */\nexport function datasetFromIteratorFn(iteratorFn, size = null) {\n return new class extends Dataset {\n constructor() {\n super(...arguments);\n this.size = size;\n }\n /*\n * Provide a new stream of elements. Note this will also start new streams\n * from any underlying `Dataset`s.\n */\n async iterator() {\n return iteratorFn();\n }\n }();\n}\n/**\n * Create a `Dataset` from an array of elements.\n *\n * Create a Dataset from an array of objects:\n * ```js\n * const a = tf.data.array([{'item': 1}, {'item': 2}, {'item': 3}]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * Create a Dataset from an array of numbers:\n * ```js\n * const a = tf.data.array([4, 5, 6]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n * @param items An array of elements that will be parsed as items in a dataset.\n *\n * @doc {heading: 'Data', subheading: 'Creation', namespace: 'data'}\n */\nexport function array(items) {\n return datasetFromIteratorFn(async () => iteratorFromItems(items), items.length);\n}\n/**\n * Create a `Dataset` by zipping together an array, dict, or nested\n * structure of `Dataset`s (and perhaps additional constants).\n * The underlying datasets must provide elements in a consistent order such that\n * they correspond.\n *\n * The number of elements in the resulting dataset is the same as the size of\n * the smallest dataset in datasets.\n *\n * The nested structure of the `datasets` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Note this means that, given an array of two datasets that produce dict\n * elements, the result is a dataset that produces elements that are arrays\n * of two dicts:\n *\n * Zip an array of datasets:\n * ```js\n * console.log('Zip two datasets of objects:');\n * const ds1 = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const ds2 = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const ds3 = tf.data.zip([ds1, ds2]);\n * await ds3.forEachAsync(e => console.log(JSON.stringify(e)));\n *\n * // If the goal is to merge the dicts in order to produce elements like\n * // {a: ..., b: ...}, this requires a second step such as:\n * console.log('Merge the objects:');\n * const ds4 = ds3.map(x => {return {a: x[0].a, b: x[1].b}});\n * await ds4.forEachAsync(e => console.log(e));\n * ```\n *\n * Zip a dict of datasets:\n * ```js\n * const a = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const b = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const c = tf.data.zip({c: a, d: b});\n * await c.forEachAsync(e => console.log(JSON.stringify(e)));\n * ```\n *\n * @doc {heading: 'Data', subheading: 'Operations', namespace: 'data'}\n */\nexport function zip(datasets) {\n // manually type-check the argument for JS users\n if (!isIterable(datasets)) {\n throw new Error('The argument to zip() must be an object or array.');\n }\n let size;\n if (Array.isArray(datasets)) {\n for (let i = 0; i < datasets.length; i++) {\n size = size == null ? datasets[i].size :\n Math.min(size, datasets[i].size);\n }\n }\n else if (datasets instanceof Object) {\n for (const ds in datasets) {\n size = size == null ? datasets[ds].size :\n Math.min(size, datasets[ds].size);\n }\n }\n return datasetFromIteratorFn(async () => {\n const streams = await deepMapAndAwaitAll(datasets, d => {\n if (d instanceof Dataset) {\n return { value: d.iterator(), recurse: false };\n }\n else if (isIterable(d)) {\n return { value: null, recurse: true };\n }\n else {\n throw new Error('Leaves of the structure passed to zip() must be Datasets, ' +\n 'not primitives.');\n }\n });\n return iteratorFromZipped(streams, ZipMismatchMode.SHORTEST);\n }, size);\n}\n/**\n * A zip function for use with deepZip, passed via the columnMajorBatch call.\n *\n * Accepts an array of identically-structured nested elements and either batches\n * them (if they are primitives, numeric arrays, or Tensors) or requests\n * recursion (if not).\n */\n// tslint:disable-next-line:no-any\nfunction deepBatchConcat(rows) {\n if (rows === null) {\n return null;\n }\n // use the first item to decide whether to recurse or batch here.\n const exampleRow = rows[0];\n if (canTensorify(exampleRow)) {\n // rows is an array of primitives, Tensors, or arrays. Batch them.\n const value = batchConcat(rows);\n return { value, recurse: false };\n }\n // the example row is an object, so recurse into it.\n return { value: null, recurse: true };\n}\n/**\n * Assembles a list of same-shaped numbers, number arrays, or Tensors\n * into a single new Tensor where axis 0 is the batch dimension.\n */\nfunction batchConcat(arrays) {\n if (arrays.length === 0) {\n // We can't return an empty Tensor because we don't know the element shape.\n throw new Error('Can\\'t make a batch of zero elements.');\n }\n if (arrays[0] instanceof tf.Tensor) {\n // Input is an array of Tensors\n return tf.stack(arrays);\n }\n else {\n // Input is a possibly-nested array of numbers.\n return tf.tensor(arrays);\n }\n}\n//# sourceMappingURL=dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { Dataset } from '../dataset';\n/**\n * Represents a potentially large collection of text lines.\n *\n * The results are not batched.\n */\nexport class TextLineDataset extends Dataset {\n /**\n * Create a `TextLineDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n */\n constructor(input) {\n super();\n this.input = input;\n }\n async iterator() {\n const inputIterator = await this.input.iterator();\n const utf8Iterator = inputIterator.decodeUTF8();\n const lineIterator = utf8Iterator.split('\\n').map(line => {\n // Windows/DOS format text file has extra line breaker at the end of line.\n if (line.endsWith('\\r')) {\n line = line.slice(0, -1);\n }\n return line;\n });\n return lineIterator;\n }\n}\n//# sourceMappingURL=text_line_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { Dataset } from '../dataset';\nimport { TextLineDataset } from './text_line_dataset';\nconst CODE_QUOTE = '\"';\nconst STATE_OUT = Symbol('out');\nconst STATE_FIELD = Symbol('field');\nconst STATE_QUOTE = Symbol('quote');\nconst STATE_QUOTE_AFTER_QUOTE = Symbol('quoteafterquote');\nconst STATE_WITHIN_QUOTE_IN_QUOTE = Symbol('quoteinquote');\n/**\n * Represents a potentially large collection of delimited text records.\n *\n * The produced `TensorContainer`s each contain one key-value pair for\n * every column of the table. When a field is empty in the incoming data, the\n * resulting value is `undefined`, or throw error if it is required. Values\n * that can be parsed as numbers are emitted as type `number`, other values\n * are parsed as `string`.\n *\n * The results are not batched.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class CSVDataset extends Dataset {\n /**\n * Create a `CSVDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * hasHeader: (Optional) A boolean value that indicates whether the first\n * row of provided CSV file is a header line with column names, and should\n * not be included in the data. Defaults to `true`.\n *\n * columnNames: (Optional) A list of strings that corresponds to\n * the CSV column names, in order. If provided, it ignores the column\n * names inferred from the header row. If not provided, infers the column\n * names from the first row of the records. If hasHeader is false and\n * columnNames is not provided, this method throws an error.\n *\n * columnConfigs: (Optional) A dictionary whose key is column names, value\n * is an object stating if this column is required, column's data type,\n * default value, and if this column is label. If provided, keys must\n * correspond to names provided in columnNames or inferred from the file\n * header lines. If isLabel is true any column, returns an array of two\n * items: the first item is a dict of features key/value pairs, the second\n * item is a dict of labels key/value pairs. If no feature is marked as\n * label, returns a dict of features only.\n *\n * configuredColumnsOnly (Optional) If true, only columns provided in\n * columnConfigs will be parsed and provided during iteration.\n *\n * delimiter (Optional) The string used to parse each line of the input\n * file. Defaults to `,`.\n */\n constructor(input, csvConfig) {\n super();\n this.input = input;\n this.hasHeader = true;\n this.fullColumnNames = null;\n this.columnNamesValidated = false;\n this.columnConfigs = null;\n this.configuredColumnsOnly = false;\n this.delimiter = ',';\n this.delimWhitespace = false;\n this.base = new TextLineDataset(input);\n if (!csvConfig) {\n csvConfig = {};\n }\n this.hasHeader = csvConfig.hasHeader === false ? false : true;\n this.fullColumnNames = csvConfig.columnNames;\n this.columnConfigs = csvConfig.columnConfigs;\n this.configuredColumnsOnly = csvConfig.configuredColumnsOnly;\n if (csvConfig.delimWhitespace) {\n util.assert(csvConfig.delimiter == null, () => 'Delimiter should not be provided when delimWhitespace is true.');\n this.delimWhitespace = true;\n this.delimiter = ' ';\n }\n else {\n this.delimiter = csvConfig.delimiter ? csvConfig.delimiter : ',';\n }\n }\n /**\n * Returns column names of the csv dataset. If `configuredColumnsOnly` is\n * true, return column names in `columnConfigs`. If `configuredColumnsOnly` is\n * false and `columnNames` is provided, `columnNames`. If\n * `configuredColumnsOnly` is false and `columnNames` is not provided, return\n * all column names parsed from the csv file. For example usage please go to\n * `tf.data.csv`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async columnNames() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n return this.configuredColumnsOnly ? Object.keys(this.columnConfigs) :\n this.fullColumnNames;\n }\n /* 1) If `columnNames` is provided as string[], use this string[] as output\n * keys in corresponding order. The length must match the number of inferred\n * columns if `hasHeader` is true .\n * 2) If `columnNames` is not provided, parse header line as `columnNames` if\n * hasHeader is true. If `hasHeader` is false, throw an error.\n * 3) If `columnConfigs` is provided, all the keys in `columnConfigs` must\n * exist in parsed `columnNames`.\n */\n async setColumnNames() {\n const columnNamesFromFile = await this.maybeReadHeaderLine();\n if (!this.fullColumnNames && !columnNamesFromFile) {\n // Throw an error if columnNames is not provided and no header line.\n throw new Error('Column names must be provided if there is no header line.');\n }\n else if (this.fullColumnNames && columnNamesFromFile) {\n // Check provided columnNames match header line.\n util.assert(columnNamesFromFile.length === this.fullColumnNames.length, () => 'The length of provided columnNames (' +\n this.fullColumnNames.length.toString() +\n ') does not match the length of the header line read from ' +\n 'file (' + columnNamesFromFile.length.toString() + ').');\n }\n if (!this.fullColumnNames) {\n this.fullColumnNames = columnNamesFromFile;\n }\n // Check if there are duplicate column names.\n const counts = this.fullColumnNames.reduce((countAcc, name) => {\n countAcc[name] = (countAcc[name] + 1) || 1;\n return countAcc;\n }, {});\n const duplicateNames = Object.keys(counts).filter((name) => (counts[name] > 1));\n util.assert(duplicateNames.length === 0, () => 'Duplicate column names found: ' + duplicateNames.toString());\n // Check if keys in columnConfigs match columnNames.\n if (this.columnConfigs) {\n for (const key of Object.keys(this.columnConfigs)) {\n const index = this.fullColumnNames.indexOf(key);\n if (index === -1) {\n throw new Error('The key \"' + key +\n '\" provided in columnConfigs does not match any of the column ' +\n 'names (' + this.fullColumnNames.toString() + ').');\n }\n }\n }\n this.columnNamesValidated = true;\n }\n async maybeReadHeaderLine() {\n if (this.hasHeader) {\n const iter = await this.base.iterator();\n const firstElement = await iter.next();\n if (firstElement.done) {\n throw new Error('No data was found for CSV parsing.');\n }\n const firstLine = firstElement.value;\n const headers = this.parseRow(firstLine, false);\n return headers;\n }\n else {\n return null;\n }\n }\n async iterator() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n let lines = await this.base.iterator();\n if (this.hasHeader) {\n // We previously read the first line to get the columnNames.\n // Now that we're providing data, skip it.\n lines = lines.skip(1);\n }\n return lines.map(x => this.makeDataElement(x));\n }\n makeDataElement(line) {\n const values = this.parseRow(line);\n const features = {};\n const labels = {};\n for (let i = 0; i < this.fullColumnNames.length; i++) {\n const key = this.fullColumnNames[i];\n const config = this.columnConfigs ? this.columnConfigs[key] : null;\n if (this.configuredColumnsOnly && !config) {\n // This column is not selected.\n continue;\n }\n else {\n const value = values[i];\n let parsedValue = null;\n if (value === '') {\n // If default value is provided, use it. If default value is not\n // provided, set as undefined.\n if (config && config.default !== undefined) {\n parsedValue = config.default;\n }\n else if (config && (config.required || config.isLabel)) {\n throw new Error(`Required column ${key} is empty in this line: ${line}`);\n }\n else {\n parsedValue = undefined;\n }\n }\n else {\n // A value is present, so parse it based on type\n const valueAsNum = Number(value);\n if (isNaN(valueAsNum)) {\n // The value is a string and this column is declared as boolean\n // in config, parse it as boolean.\n if (config && config.dtype === 'bool') {\n parsedValue = this.getBoolean(value);\n }\n else {\n // Set value as string\n parsedValue = value;\n }\n }\n else if (!config || !config.dtype) {\n // If this value is a number and no type config is provided, return\n // it as number.\n parsedValue = valueAsNum;\n }\n else {\n // If this value is a number and data type is provided, parse it\n // according to provided data type.\n switch (config.dtype) {\n case 'float32':\n parsedValue = valueAsNum;\n break;\n case 'int32':\n parsedValue = Math.floor(valueAsNum);\n break;\n case 'bool':\n parsedValue = this.getBoolean(value);\n break;\n default:\n parsedValue = valueAsNum;\n }\n }\n }\n // Check if this column is label.\n (config && config.isLabel) ? labels[key] = parsedValue :\n features[key] = parsedValue;\n }\n }\n // If label exists, return an object of features and labels as {xs:features,\n // ys:labels}, otherwise return features only.\n if (Object.keys(labels).length === 0) {\n return features;\n }\n else {\n return { xs: features, ys: labels };\n }\n }\n getBoolean(value) {\n if (value === '1' || value.toLowerCase() === 'true') {\n return 1;\n }\n else {\n return 0;\n }\n }\n // adapted from https://beta.observablehq.com/@mbostock/streaming-csv\n parseRow(line, validateElementCount = true) {\n const result = [];\n let readOffset = 0;\n const readLength = line.length;\n let currentState = STATE_OUT;\n // Goes through the line to parse quote.\n for (let i = 0; i < readLength; i++) {\n switch (currentState) {\n // Before enter a new field\n case STATE_OUT:\n switch (line.charAt(i)) {\n // Enter a quoted field\n case CODE_QUOTE:\n readOffset = i + 1;\n currentState = STATE_QUOTE;\n break;\n // Read an empty field\n case this.delimiter:\n readOffset = i + 1;\n // If delimiter is white space and configured to collapse\n // multiple white spaces, ignore this white space.\n if (this.delimiter === ' ' && this.delimWhitespace) {\n break;\n }\n result.push('');\n currentState = STATE_OUT;\n break;\n // Enter an unquoted field\n default:\n currentState = STATE_FIELD;\n readOffset = i;\n break;\n }\n break;\n // In an unquoted field\n case STATE_FIELD:\n switch (line.charAt(i)) {\n // Exit an unquoted field, add it to result\n case this.delimiter:\n result.push(line.substring(readOffset, i));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n default:\n }\n break;\n // In a quoted field\n case STATE_QUOTE:\n switch (line.charAt(i)) {\n // Read a quote after a quote\n case CODE_QUOTE:\n currentState = STATE_QUOTE_AFTER_QUOTE;\n break;\n default:\n }\n break;\n // This state means it's right after a second quote in a field\n case STATE_QUOTE_AFTER_QUOTE:\n switch (line.charAt(i)) {\n // Finished a quoted field\n case this.delimiter:\n result.push(line.substring(readOffset, i - 1));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n // Finished a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n // In a quoted part in a quoted field\n default:\n currentState = STATE_WITHIN_QUOTE_IN_QUOTE;\n break;\n }\n break;\n case STATE_WITHIN_QUOTE_IN_QUOTE:\n switch (line.charAt(i)) {\n // Exit a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n default:\n }\n break;\n default:\n }\n }\n // Adds last item based on if it is quoted.\n if (currentState === STATE_QUOTE_AFTER_QUOTE) {\n result.push(line.substring(readOffset, readLength - 1));\n }\n else {\n result.push(line.substring(readOffset));\n }\n // Check if each row has the same number of elements as column names.\n if (validateElementCount && result.length !== this.fullColumnNames.length) {\n throw new Error(`Invalid row in csv file. Should have ${this.fullColumnNames.length} elements in a row, but got ${result}`);\n }\n return result;\n }\n}\n// TODO(soergel): add more basic datasets for parity with tf.data\n// tf.data.FixedLengthRecordDataset()\n// tf.data.TFRecordDataset()\n//# sourceMappingURL=csv_dataset.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env, tensor, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of tensors from microphone audio stream. The tensors are\n * representing audio data as frequency-domain spectrogram generated with\n * browser's native FFT. Tensors representing time-domain waveform is available\n * based on configuration. Only works in browser environment.\n */\nexport class MicrophoneIterator extends LazyIterator {\n constructor(microphoneConfig) {\n super();\n this.microphoneConfig = microphoneConfig;\n this.isClosed = false;\n this.fftSize = microphoneConfig.fftSize || 1024;\n const fftSizeLog2 = Math.log2(this.fftSize);\n if (this.fftSize < 0 || fftSizeLog2 < 4 || fftSizeLog2 > 14 ||\n !Number.isInteger(fftSizeLog2)) {\n throw new Error(`Invalid fftSize: it must be a power of 2 between ` +\n `2 to 4 and 2 to 14, but got ${this.fftSize}`);\n }\n this.numFrames = microphoneConfig.numFramesPerSpectrogram || 43;\n this.sampleRateHz = microphoneConfig.sampleRateHz;\n this.columnTruncateLength =\n microphoneConfig.columnTruncateLength || this.fftSize;\n this.audioTrackConstraints = microphoneConfig.audioTrackConstraints;\n this.smoothingTimeConstant = microphoneConfig.smoothingTimeConstant || 0;\n this.includeSpectrogram =\n microphoneConfig.includeSpectrogram === false ? false : true;\n this.includeWaveform =\n microphoneConfig.includeWaveform === true ? true : false;\n if (!this.includeSpectrogram && !this.includeWaveform) {\n throw new Error('Both includeSpectrogram and includeWaveform are false. ' +\n 'At least one type of data should be returned.');\n }\n }\n summary() {\n return `microphone`;\n }\n // Construct a MicrophoneIterator and start the audio stream.\n static async create(microphoneConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('microphone API is only supported in browser environment.');\n }\n const microphoneIterator = new MicrophoneIterator(microphoneConfig);\n // Call async function start() to initialize the audio stream.\n await microphoneIterator.start();\n return microphoneIterator;\n }\n // Start the audio stream and FFT.\n async start() {\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n audio: this.audioTrackConstraints == null ? true :\n this.audioTrackConstraints,\n video: false\n });\n }\n catch (e) {\n throw new Error(`Error thrown while initializing video stream: ${e.message}`);\n }\n if (!this.stream) {\n throw new Error('Could not obtain audio from microphone.');\n }\n const ctxConstructor = \n // tslint:disable-next-line:no-any\n window.AudioContext || window.webkitAudioContext;\n this.audioContext = new ctxConstructor();\n if (!this.sampleRateHz) {\n // If sample rate is not provided, use the available sample rate on\n // device.\n this.sampleRateHz = this.audioContext.sampleRate;\n }\n else if (this.audioContext.sampleRate !== this.sampleRateHz) {\n throw new Error(`Mismatch in sampling rate: ` +\n `Expected: ${this.sampleRateHz}; ` +\n `Actual: ${this.audioContext.sampleRate}`);\n }\n const streamSource = this.audioContext.createMediaStreamSource(this.stream);\n this.analyser = this.audioContext.createAnalyser();\n this.analyser.fftSize = this.fftSize * 2;\n this.analyser.smoothingTimeConstant = this.smoothingTimeConstant;\n streamSource.connect(this.analyser);\n this.freqData = new Float32Array(this.fftSize);\n this.timeData = new Float32Array(this.fftSize);\n return;\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let spectrogramTensor;\n let waveformTensor;\n const audioDataQueue = await this.getAudioData();\n if (this.includeSpectrogram) {\n const freqData = this.flattenQueue(audioDataQueue.freqDataQueue);\n spectrogramTensor = this.getTensorFromAudioDataArray(freqData, [this.numFrames, this.columnTruncateLength, 1]);\n }\n if (this.includeWaveform) {\n const timeData = this.flattenQueue(audioDataQueue.timeDataQueue);\n waveformTensor = this.getTensorFromAudioDataArray(timeData, [this.numFrames * this.fftSize, 1]);\n }\n return {\n value: { 'spectrogram': spectrogramTensor, 'waveform': waveformTensor },\n done: false\n };\n }\n // Capture one result from the audio stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n async getAudioData() {\n const freqDataQueue = [];\n const timeDataQueue = [];\n let currentFrames = 0;\n return new Promise(resolve => {\n const intervalID = setInterval(() => {\n if (this.includeSpectrogram) {\n this.analyser.getFloatFrequencyData(this.freqData);\n // If the audio stream is initializing, return empty queue.\n if (this.freqData[0] === -Infinity) {\n resolve({ freqDataQueue, timeDataQueue });\n }\n freqDataQueue.push(this.freqData.slice(0, this.columnTruncateLength));\n }\n if (this.includeWaveform) {\n this.analyser.getFloatTimeDomainData(this.timeData);\n timeDataQueue.push(this.timeData.slice());\n }\n // Clean interval and return when all frames have been collected\n if (++currentFrames === this.numFrames) {\n clearInterval(intervalID);\n resolve({ freqDataQueue, timeDataQueue });\n }\n }, this.fftSize / this.sampleRateHz * 1e3);\n });\n }\n // Stop the audio stream and pause the iterator.\n stop() {\n if (!this.isClosed) {\n this.isClosed = true;\n this.analyser.disconnect();\n this.audioContext.close();\n if (this.stream != null && this.stream.getTracks().length > 0) {\n this.stream.getTracks()[0].stop();\n }\n }\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite audio stream to array.');\n }\n // Return audio sampling rate in Hz\n getSampleRate() {\n return this.sampleRateHz;\n }\n flattenQueue(queue) {\n const frameSize = queue[0].length;\n const freqData = new Float32Array(queue.length * frameSize);\n queue.forEach((data, i) => freqData.set(data, i * frameSize));\n return freqData;\n }\n getTensorFromAudioDataArray(freqData, shape) {\n const vals = new Float32Array(util.sizeFromShape(shape));\n // If the data is less than the output shape, the rest is padded with zeros.\n vals.set(freqData, vals.length - freqData.length);\n return tensor(vals, shape);\n }\n}\n//# sourceMappingURL=microphone_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { browser, env, image, tensor1d, tensor2d, tidy, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of image tensors from webcam video stream. Only works in\n * browser environment.\n */\nexport class WebcamIterator extends LazyIterator {\n constructor(webcamVideoElement, webcamConfig) {\n super();\n this.webcamVideoElement = webcamVideoElement;\n this.webcamConfig = webcamConfig;\n this.isClosed = true;\n this.resize = false;\n if (this.needToResize()) {\n this.resize = true;\n this.cropSize =\n [this.webcamConfig.resizeHeight, this.webcamConfig.resizeWidth];\n this.cropBoxInd = tensor1d([0], 'int32');\n if (this.webcamConfig.centerCrop) {\n // Calculate the box based on resizing shape.\n const widthCroppingRatio = this.webcamConfig.resizeWidth * 1.0 / this.webcamVideoElement.width;\n const heightCroppingRatio = this.webcamConfig.resizeHeight * 1.0 /\n this.webcamVideoElement.height;\n const widthCropStart = (1 - widthCroppingRatio) / 2;\n const heightCropStart = (1 - heightCroppingRatio) / 2;\n const widthCropEnd = widthCropStart + widthCroppingRatio;\n const heightCropEnd = heightCroppingRatio + heightCropStart;\n this.cropBox = tensor2d([heightCropStart, widthCropStart, heightCropEnd, widthCropEnd], [1, 4]);\n }\n else {\n this.cropBox = tensor2d([0, 0, 1, 1], [1, 4]);\n }\n }\n }\n summary() {\n return `webcam`;\n }\n // Construct a WebcamIterator and start it's video stream.\n static async create(webcamVideoElement, webcamConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('tf.data.webcam is only supported in browser environment.');\n }\n if (!webcamVideoElement) {\n // If webcam video element is not provided, create a hidden video element\n // with provided width and height.\n webcamVideoElement = document.createElement('video');\n if (!webcamConfig.resizeWidth || !webcamConfig.resizeHeight) {\n throw new Error('Please provide webcam video element, or resizeWidth and ' +\n 'resizeHeight to create a hidden video element.');\n }\n webcamVideoElement.width = webcamConfig.resizeWidth;\n webcamVideoElement.height = webcamConfig.resizeHeight;\n }\n const webcamIterator = new WebcamIterator(webcamVideoElement, webcamConfig);\n // Call async function to initialize the video stream.\n await webcamIterator.start();\n return webcamIterator;\n }\n // Async function to start video stream.\n async start() {\n if (this.webcamConfig.facingMode) {\n util.assert((this.webcamConfig.facingMode === 'user') ||\n (this.webcamConfig.facingMode === 'environment'), () => `Invalid webcam facing mode: ${this.webcamConfig.facingMode}. ` +\n `Please provide 'user' or 'environment'`);\n }\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n video: {\n deviceId: this.webcamConfig.deviceId,\n facingMode: this.webcamConfig.facingMode ?\n this.webcamConfig.facingMode :\n 'user',\n width: this.webcamVideoElement.width,\n height: this.webcamVideoElement.height\n }\n });\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message = `Error thrown while initializing video stream: ${e.message}`;\n throw e;\n }\n if (!this.stream) {\n throw new Error('Could not obtain video from webcam.');\n }\n // Older browsers may not have srcObject\n try {\n this.webcamVideoElement.srcObject = this.stream;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = window.URL.createObjectURL(this.stream);\n }\n // Start the webcam video stream\n this.webcamVideoElement.play();\n this.isClosed = false;\n return new Promise(resolve => {\n // Add event listener to make sure the webcam has been fully initialized.\n this.webcamVideoElement.onloadedmetadata = () => {\n resolve();\n };\n });\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let img;\n try {\n img = browser.fromPixels(this.webcamVideoElement);\n }\n catch (e) {\n throw new Error(`Error thrown converting video to pixels: ${JSON.stringify(e)}`);\n }\n if (this.resize) {\n try {\n return { value: this.cropAndResizeFrame(img), done: false };\n }\n catch (e) {\n throw new Error(`Error thrown cropping the video: ${e.message}`);\n }\n finally {\n img.dispose();\n }\n }\n else {\n return { value: img, done: false };\n }\n }\n needToResize() {\n // If resizeWidth and resizeHeight are provided, and different from the\n // width and height of original HTMLVideoElement, then resizing and cropping\n // is required.\n if (this.webcamConfig.resizeWidth && this.webcamConfig.resizeHeight &&\n (this.webcamVideoElement.width !== this.webcamConfig.resizeWidth ||\n this.webcamVideoElement.height !== this.webcamConfig.resizeHeight)) {\n return true;\n }\n return false;\n }\n // Cropping and resizing each frame based on config\n cropAndResizeFrame(img) {\n return tidy(() => {\n const expandedImage = img.toFloat().expandDims(0);\n let resizedImage;\n resizedImage = image.cropAndResize(expandedImage, this.cropBox, this.cropBoxInd, this.cropSize, 'bilinear');\n // Extract image from batch cropping.\n const shape = resizedImage.shape;\n return resizedImage.reshape(shape.slice(1));\n });\n }\n // Capture one frame from the video stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n // Stop the video stream and pause webcam iterator.\n stop() {\n const tracks = this.stream.getTracks();\n tracks.forEach(track => track.stop());\n try {\n this.webcamVideoElement.srcObject = null;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = null;\n }\n this.isClosed = true;\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite video stream to array.');\n }\n}\n//# sourceMappingURL=webcam_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * Represents a data source readable as a stream of binary data chunks.\n *\n * Because `Dataset`s can be read repeatedly (via `Dataset.iterator()`), this\n * provides a means to repeatedly create streams from the underlying data\n * sources.\n */\nexport class DataSource {\n}\n// TODO(soergel): consider convenience factory functions here\n// in combination with chainable source->dataset above, e.g.:\n// tf.data.url(...).asCsvDataset().shuffle().batch()\n//# sourceMappingURL=datasource.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nexport class StringIterator extends LazyIterator {\n /**\n * Splits a string stream on a given separator.\n *\n * It is assumed that the incoming chunk boundaries have no semantic meaning,\n * so conceptually the incoming stream is treated simply as the concatenation\n * of its elements.\n *\n * The outgoing stream provides chunks corresponding to the results of the\n * standard string split() operation (even if such a chunk spanned incoming\n * chunks). The separators are not included.\n *\n * A typical usage is to split a text file (represented as a stream with\n * arbitrary chunk boundaries) into lines.\n *\n * @param upstream A readable stream of strings that can be treated as\n * concatenated.\n * @param separator A character to split on.\n */\n split(separator) {\n return new SplitIterator(this, separator);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on StringIterator. Unfortunately they can't be placed in separate files, due\n// to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class SplitIterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass SplitIterator extends StringIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.impl = new SplitIteratorImpl(upstream, separator);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\nclass SplitIteratorImpl extends OneToManyIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.separator = separator;\n // A partial string at the end of an upstream chunk\n this.carryover = '';\n }\n summary() {\n return `${this.upstream.summary()} -> Split('${this.separator}')`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n if (chunkResult.done) {\n if (this.carryover === '') {\n return false;\n }\n // Pretend that the pump succeeded in order to emit the small last batch.\n // The next pump() call will actually fail.\n this.outputQueue.push(this.carryover);\n this.carryover = '';\n return true;\n }\n const lines = chunkResult.value.split(this.separator);\n // Note the behavior: \" ab \".split(' ') === ['', 'ab', '']\n // Thus the carryover may be '' if the separator falls on a chunk\n // boundary; this produces the correct result.\n lines[0] = this.carryover + lines[0];\n for (const line of lines.slice(0, -1)) {\n this.outputQueue.push(line);\n }\n this.carryover = lines[lines.length - 1];\n return true;\n }\n}\n//# sourceMappingURL=string_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nimport { StringIterator } from './string_iterator';\nexport class ByteChunkIterator extends LazyIterator {\n /**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * The byte arrays producetd from the ByteChunkIterator on which this is\n * called will be interpreted as concatenated. No assumptions are made about\n * the boundaries of the incoming chunks, so a multi-byte UTF8 encoding of a\n * character may span the boundary between chunks. This naturally happens,\n * for instance, when reading fixed-size byte arrays from a file.\n */\n decodeUTF8() {\n return new Utf8Iterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on ByteChunkIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class Utf8Iterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass Utf8Iterator extends StringIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.impl = new Utf8IteratorImpl(upstream);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\n/**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * This is tricky because the incoming byte array boundaries may disrupt a\n * multi-byte UTF8 character. Thus any incomplete character data at the end of\n * a chunk must be carried over and prepended to the next chunk before\n * decoding. Luckily with native decoder, TextDecoder in browser and\n * string_decoder in node, byte array boundaries are handled automatically.\n *\n * In the context of an input pipeline for machine learning, UTF8 decoding is\n * needed to parse text files containing training examples or prediction\n * requests (e.g., formatted as CSV or JSON). We cannot use the built-in\n * decoding provided by FileReader.readAsText() because here we are in a\n * streaming context, which FileReader does not support.\n *\n * @param upstream A `LazyIterator` of `Uint8Arrays` containing UTF8-encoded\n * text, which should be interpreted as concatenated. No assumptions are\n * made about the boundaries of the incoming chunks, so a multi-byte UTF8\n * encoding of a character may span the boundary between chunks. This\n * naturally happens, for instance, when reading fixed-size byte arrays from a\n * file.\n */\nclass Utf8IteratorImpl extends OneToManyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n if (env().get('IS_BROWSER')) {\n this.decoder = new TextDecoder('utf-8');\n }\n else {\n // tslint:disable-next-line:no-require-imports\n const { StringDecoder } = require('string_decoder');\n this.decoder = new StringDecoder('utf8');\n }\n }\n summary() {\n return `${this.upstream.summary()} -> Utf8`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n let chunk;\n if (chunkResult.done) {\n return false;\n }\n else {\n chunk = chunkResult.value;\n }\n let text;\n if (env().get('IS_BROWSER')) {\n text = this.decoder.decode(chunk, { stream: true });\n }\n else {\n text = this.decoder.write(Buffer.from(chunk.buffer));\n }\n this.outputQueue.push(text);\n return true;\n }\n}\n//# sourceMappingURL=byte_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// inspired by https://github.com/maxogden/filereader-stream\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { ByteChunkIterator } from './byte_chunk_iterator';\n/**\n * Provide a stream of chunks from a File, Blob, or Uint8Array.\n * @param file The source File, Blob or Uint8Array.\n * @param options Optional settings controlling file reading.\n * @returns a lazy Iterator of Uint8Arrays containing sequential chunks of the\n * input File, Blob or Uint8Array.\n */\nexport class FileChunkIterator extends ByteChunkIterator {\n constructor(file, options = {}) {\n super();\n this.file = file;\n this.options = options;\n util.assert((file instanceof Uint8Array) ||\n (env().get('IS_BROWSER') ?\n (file instanceof File || file instanceof Blob) :\n false), () => 'FileChunkIterator only supports File, Blob and Uint8Array ' +\n 'right now.');\n this.offset = options.offset || 0;\n // default 1MB chunk has tolerable perf on large files\n this.chunkSize = options.chunkSize || 1024 * 1024;\n }\n summary() {\n return `FileChunks ${this.file}`;\n }\n async next() {\n if (this.offset >= ((this.file instanceof Uint8Array) ?\n this.file.byteLength :\n this.file.size)) {\n return { value: null, done: true };\n }\n const chunk = new Promise((resolve, reject) => {\n const end = this.offset + this.chunkSize;\n if (this.file instanceof Uint8Array) {\n // Note if end > this.uint8Array.byteLength, we just get a small last\n // chunk.\n resolve(new Uint8Array(this.file.slice(this.offset, end)));\n }\n else {\n // This branch assumes that this.file type is File or Blob, which\n // means it is in the browser environment.\n // TODO(soergel): is this a performance issue?\n const fileReader = new FileReader();\n fileReader.onload = (event) => {\n let data = fileReader.result;\n // Not sure we can trust the return type of\n // FileReader.readAsArrayBuffer See e.g.\n // https://github.com/node-file-api/FileReader/issues/2\n if (data instanceof ArrayBuffer) {\n data = new Uint8Array(data);\n }\n if (!(data instanceof Uint8Array)) {\n return reject(new TypeError('FileReader returned unknown type.'));\n }\n resolve(data);\n };\n fileReader.onabort = (event) => {\n return reject(new Error('Aborted'));\n };\n fileReader.onerror = (event) => {\n return reject(new Error(event.type));\n };\n // TODO(soergel): better handle onabort, onerror\n // Note if end > this.file.size, we just get a small last chunk.\n const slice = this.file.slice(this.offset, end);\n // We can't use readAsText here (even if we know the file is text)\n // because the slice boundary may fall within a multi-byte character.\n fileReader.readAsArrayBuffer(slice);\n }\n this.offset = end;\n });\n return { value: (await chunk), done: false };\n }\n}\n//# sourceMappingURL=file_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FileChunkIterator } from './file_chunk_iterator';\n/**\n * Provide a stream of chunks from a URL.\n *\n * Note this class first downloads the entire file into memory before providing\n * the first element from the stream. This is because the Fetch API does not\n * yet reliably provide a reader stream for the response body.\n */\nexport async function urlChunkIterator(url, options = {}) {\n let urlString;\n let requestInit;\n if ((typeof url) === 'string') {\n urlString = url;\n }\n else {\n urlString = url.url;\n requestInit = getRequestInitFromRequest(url);\n }\n const response = await util.fetch(urlString, requestInit);\n if (response.ok) {\n const uint8Array = new Uint8Array(await response.arrayBuffer());\n return new FileChunkIterator(uint8Array, options);\n }\n else {\n throw new Error(response.statusText);\n }\n}\n// Generate RequestInit from Request to match tf.util.fetch signature.\nconst getRequestInitFromRequest = (request) => {\n const init = {\n method: request.method,\n headers: request.headers,\n body: request.body,\n mode: request.mode,\n credentials: request.credentials,\n cache: request.cache,\n redirect: request.redirect,\n referrer: request.referrer,\n integrity: request.integrity,\n };\n return init;\n};\n//# sourceMappingURL=url_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// Skip tslint any type check cause this method is aiming to check type of\n// input.\n// tslint:disable-next-line:no-any\nexport function isLocalPath(source) {\n return (typeof source === 'string') && source.substr(0, 7) === 'file://';\n}\n//# sourceMappingURL=source_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { DataSource } from '../datasource';\nimport { FileChunkIterator } from '../iterators/file_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\n/**\n * Represents a file, blob, or Uint8Array readable as a stream of binary data\n * chunks.\n */\nexport class FileDataSource extends DataSource {\n /**\n * Create a `FileDataSource`.\n *\n * @param input Local file path, or `File`/`Blob`/`Uint8Array` object to\n * read. Local file only works in node environment.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(input, options = {}) {\n super();\n this.input = input;\n this.options = options;\n }\n async iterator() {\n if (isLocalPath(this.input) && env().get('IS_NODE')) {\n // tslint:disable-next-line:no-require-imports\n const fs = require('fs');\n this.input = fs.readFileSync(this.input.substr(7));\n }\n // TODO(kangyizhang): Add LocalFileChunkIterator to split local streaming\n // with file in browser.\n return new FileChunkIterator(this.input, this.options);\n }\n}\n//# sourceMappingURL=file_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { DataSource } from '../datasource';\nimport { urlChunkIterator } from '../iterators/url_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\nimport { FileDataSource } from './file_data_source';\n/*\n * Represents a URL readable as a stream of binary data chunks.\n */\nexport class URLDataSource extends DataSource {\n /**\n * Create a `URLDataSource`.\n *\n * @param url A source URL string, or a `Request` object.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(url, fileOptions = {}) {\n super();\n this.url = url;\n this.fileOptions = fileOptions;\n }\n // TODO(soergel): provide appropriate caching options. Currently this\n // will download the URL anew for each call to iterator(). Since we have\n // to treat the downloaded file as a blob/buffer anyway, we may as well retain\n // it-- but that raises GC issues. Also we may want a persistent disk cache.\n async iterator() {\n if (isLocalPath(this.url)) {\n return (new FileDataSource(this.url, this.fileOptions))\n .iterator();\n }\n else {\n return urlChunkIterator(this.url, this.fileOptions);\n }\n }\n}\n//# sourceMappingURL=url_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { datasetFromIteratorFn } from './dataset';\nimport { CSVDataset } from './datasets/csv_dataset';\nimport { iteratorFromFunction } from './iterators/lazy_iterator';\nimport { MicrophoneIterator } from './iterators/microphone_iterator';\nimport { WebcamIterator } from './iterators/webcam_iterator';\nimport { URLDataSource } from './sources/url_data_source';\n/**\n * Create a `CSVDataset` by reading and decoding CSV file(s) from provided URL\n * or local path if it's in Node environment.\n *\n * Note: If isLabel in columnConfigs is `true` for at least one column, the\n * element in returned `CSVDataset` will be an object of\n * `{xs:features, ys:labels}`: xs is a dict of features key/value pairs, ys\n * is a dict of labels key/value pairs. If no column is marked as label,\n * returns a dict of features only.\n *\n * ```js\n * const csvUrl =\n * 'https://storage.googleapis.com/tfjs-examples/multivariate-linear-regression/data/boston-housing-train.csv';\n *\n * async function run() {\n * // We want to predict the column \"medv\", which represents a median value of\n * // a home (in $1000s), so we mark it as a label.\n * const csvDataset = tf.data.csv(\n * csvUrl, {\n * columnConfigs: {\n * medv: {\n * isLabel: true\n * }\n * }\n * });\n *\n * // Number of features is the number of column names minus one for the label\n * // column.\n * const numOfFeatures = (await csvDataset.columnNames()).length - 1;\n *\n * // Prepare the Dataset for training.\n * const flattenedDataset =\n * csvDataset\n * .map(({xs, ys}) =>\n * {\n * // Convert xs(features) and ys(labels) from object form (keyed by\n * // column name) to array form.\n * return {xs:Object.values(xs), ys:Object.values(ys)};\n * })\n * .batch(10);\n *\n * // Define the model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * inputShape: [numOfFeatures],\n * units: 1\n * }));\n * model.compile({\n * optimizer: tf.train.sgd(0.000001),\n * loss: 'meanSquaredError'\n * });\n *\n * // Fit the model using the prepared Dataset\n * return model.fitDataset(flattenedDataset, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * console.log(epoch + ':' + logs.loss);\n * }\n * }\n * });\n * }\n *\n * await run();\n * ```\n *\n * @param source URL or local path to get CSV file. If it's a local path, it\n * must have prefix `file://` and it only works in node environment.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function csv(source, csvConfig = {}) {\n return new CSVDataset(new URLDataSource(source), csvConfig);\n}\n/**\n * Create a `Dataset` that produces each element by calling a provided function.\n *\n * Note that repeated iterations over this `Dataset` may produce different\n * results, because the function will be called anew for each element of each\n * iteration.\n *\n * Also, beware that the sequence of calls to this function may be out of order\n * in time with respect to the logical order of the Dataset. This is due to the\n * asynchronous lazy nature of stream processing, and depends on downstream\n * transformations (e.g. .shuffle()). If the provided function is pure, this is\n * no problem, but if it is a closure over a mutable state (e.g., a traversal\n * pointer), then the order of the produced elements may be scrambled.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const ds = tf.data.func(func);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function that produces one data element on each call.\n */\nexport function func(f) {\n const iter = iteratorFromFunction(f);\n return datasetFromIteratorFn(async () => iter);\n}\n/**\n * Create a `Dataset` that produces each element from provided JavaScript\n * generator, which is a function*\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions),\n * or a function that returns an\n * iterator\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions).\n *\n * The returned iterator should have `.next()` function that returns element in\n * format of `{value: TensorContainer, done:boolean}`.\n *\n * Example of creating a dataset from an iterator factory:\n * ```js\n * function makeIterator() {\n * const numElements = 10;\n * let index = 0;\n *\n * const iterator = {\n * next: () => {\n * let result;\n * if (index < numElements) {\n * result = {value: index, done: false};\n * index++;\n * return result;\n * }\n * return {value: index, done: true};\n * }\n * };\n * return iterator;\n * }\n * const ds = tf.data.generator(makeIterator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * Example of creating a dataset from a generator:\n * ```js\n * function* dataGenerator() {\n * const numElements = 10;\n * let index = 0;\n * while (index < numElements) {\n * const x = index;\n * index++;\n * yield x;\n * }\n * }\n *\n * const ds = tf.data.generator(dataGenerator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param generator A Javascript generator function that returns a JavaScript\n * iterator.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function generator(generator) {\n return datasetFromIteratorFn(async () => {\n const gen = await generator();\n return iteratorFromFunction(() => gen.next());\n });\n}\n/**\n * Create an iterator that generate `Tensor`s from webcam video stream. This API\n * only works in Browser environment when the device has webcam.\n *\n * Note: this code snippet only works when the device has a webcam. It will\n * request permission to open the webcam when running.\n * ```js\n * const videoElement = document.createElement('video');\n * videoElement.width = 100;\n * videoElement.height = 100;\n * const cam = await tf.data.webcam(videoElement);\n * const img = await cam.capture();\n * img.print();\n * cam.stop();\n * ```\n *\n * @param webcamVideoElement A `HTMLVideoElement` used to play video from\n * webcam. If this element is not provided, a hidden `HTMLVideoElement` will\n * be created. In that case, `resizeWidth` and `resizeHeight` must be\n * provided to set the generated tensor shape.\n * @param webcamConfig A `WebcamConfig` object that contains configurations of\n * reading and manipulating data from webcam video stream.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function webcam(webcamVideoElement, webcamConfig) {\n return WebcamIterator.create(webcamVideoElement, webcamConfig);\n}\n/**\n * Create an iterator that generate frequency-domain spectrogram `Tensor`s from\n * microphone audio stream with browser's native FFT. This API only works in\n * browser environment when the device has microphone.\n *\n * Note: this code snippet only works when the device has a microphone. It will\n * request permission to open the microphone when running.\n * ```js\n * const mic = await tf.data.microphone({\n * fftSize: 1024,\n * columnTruncateLength: 232,\n * numFramesPerSpectrogram: 43,\n * sampleRateHz:44100,\n * includeSpectrogram: true,\n * includeWaveform: true\n * });\n * const audioData = await mic.capture();\n * const spectrogramTensor = audioData.spectrogram;\n * spectrogramTensor.print();\n * const waveformTensor = audioData.waveform;\n * waveformTensor.print();\n * mic.stop();\n * ```\n *\n * @param microphoneConfig A `MicrophoneConfig` object that contains\n * configurations of reading audio data from microphone.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function microphone(microphoneConfig) {\n return MicrophoneIterator.create(microphoneConfig);\n}\n//# sourceMappingURL=readers.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { array, Dataset, zip } from './dataset';\nexport { CSVDataset } from './datasets/csv_dataset';\nexport { TextLineDataset } from './datasets/text_line_dataset';\nexport { csv, func, generator, microphone, webcam } from './readers';\nexport { FileDataSource } from './sources/file_data_source';\nexport { URLDataSource } from './sources/url_data_source';\nexport { version as version_data } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors in the CPU backend.`);\n }\n });\n}\n//# sourceMappingURL=cpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { backend_util, DataStorage, engine, env, kernel_impls, KernelBackend, max, slice_util, TensorBuffer, upcastType, util } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV3Impl = kernel_impls.nonMaxSuppressionV3Impl;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport * as seedrandom from 'seedrandom';\nimport { assertNotComplex } from './cpu_util';\nexport class MathBackendCPU extends KernelBackend {\n constructor() {\n super();\n this.blockSize = 48;\n this.firstUse = true;\n this.data = new DataStorage(this, engine());\n }\n write(values, shape, dtype) {\n if (this.firstUse) {\n this.firstUse = false;\n if (env().get('IS_NODE')) {\n backend_util.warn('\\n============================\\n' +\n 'Hi there \uD83D\uDC4B. Looks like you are running TensorFlow.js in ' +\n 'Node.js. To speed things up dramatically, install our node ' +\n 'backend, which binds to TensorFlow C++, by running ' +\n 'npm i @tensorflow/tfjs-node, ' +\n 'or npm i @tensorflow/tfjs-node-gpu if you have CUDA. ' +\n 'Then call require(\\'@tensorflow/tfjs-node\\'); (-gpu ' +\n 'suffix for CUDA) at the start of your program. ' +\n 'Visit https://github.com/tensorflow/tfjs-node for more details.' +\n '\\n============================');\n }\n }\n const dataId = {};\n this.data.set(dataId, { values, dtype, refCount: 1 });\n return dataId;\n }\n /**\n * Create a data bucket in cpu backend.\n * @param shape Shape of the `TensorInfo`.\n * @param dtype DType of the `TensorInfo`.\n * @param values The value of the `TensorInfo` stored as a flattened array.\n */\n makeTensorInfo(shape, dtype, values) {\n let outId;\n if (dtype === 'string' && values != null && values.length > 0 &&\n util.isString(values[0])) {\n const encodedValues = values.map(d => util.encodeString(d));\n outId = this.write(encodedValues, shape, dtype);\n }\n else {\n outId = this.write(values, shape, dtype);\n }\n return { dataId: outId, shape, dtype };\n }\n /** Increase refCount of a `TensorData`. */\n incRef(dataId) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount++;\n }\n /** Decrease refCount of a `TensorData`. */\n decRef(dataId) {\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n this.data.set(dataId, { values, dtype, refCount: 1 });\n }\n numDataIds() {\n return this.data.numDataIds();\n }\n async read(dataId) {\n return this.readSync(dataId);\n }\n readSync(dataId) {\n const { dtype, complexTensorInfos } = this.data.get(dataId);\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n return backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n return this.data.get(dataId).values;\n }\n bufferSync(t) {\n const data = this.readSync(t.dataId);\n let decodedData = data;\n if (t.dtype === 'string') {\n try {\n // Decode the bytes into string.\n decodedData = data.map(d => util.decodeString(d));\n }\n catch (_a) {\n throw new Error('Failed to decode encoded string bytes into utf-8');\n }\n }\n return tf.buffer(t.shape, t.dtype, decodedData);\n }\n makeOutput(values, shape, dtype) {\n const dataId = this.write(values, shape, dtype);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n disposeData(dataId) {\n if (this.data.has(dataId)) {\n const { complexTensorInfos } = this.data.get(dataId);\n if (complexTensorInfos != null) {\n this.disposeData(complexTensorInfos.real.dataId);\n this.disposeData(complexTensorInfos.imag.dataId);\n }\n this.data.delete(dataId);\n }\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n if (tensorData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n async time(f) {\n const start = util.now();\n f();\n const kernelMs = util.now() - start;\n return { kernelMs };\n }\n memory() {\n return {\n // Unreliable due to automatic gc. The numbers above are cumulative.\n unreliable: true,\n reasons: ['The reported memory is an upper bound. Due to automatic garbage ' +\n 'collection, the true allocated memory may be less.']\n };\n }\n stridedSlice(x, begin, end, strides) {\n assertNotComplex(x, 'stridedSlice');\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tf.tensor([], outShape);\n }\n const buffer = tf.buffer(outShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const loc = buffer.indexToLoc(i);\n const newLoc = new Array(loc.length);\n for (let j = 0; j < newLoc.length; j++) {\n newLoc[j] = loc[j] * strides[j] + begin[j];\n }\n buffer.set(xBuf.get(...newLoc), ...loc);\n }\n return buffer.toTensor();\n }\n diag(x) {\n const xVals = this.readSync(x.dataId);\n const buffer = tf.buffer([x.size, x.size], x.dtype);\n const vals = buffer.values;\n for (let i = 0; i < xVals.length; i++) {\n vals[i * x.size + i] = xVals[i];\n }\n return buffer.toTensor();\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = tf.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n reverse(x, axis) {\n assertNotComplex(x, 'reverse');\n const buffer = tf.buffer(x.shape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const outLoc = buffer.indexToLoc(i);\n const inLoc = outLoc.slice();\n axis.forEach(ax => inLoc[ax] = x.shape[ax] - 1 - inLoc[ax]);\n buffer.set(xBuf.get(...inLoc), ...outLoc);\n }\n return buffer.toTensor();\n }\n neg(x) {\n assertNotComplex(x, 'neg');\n // TODO(lina128): Use mul directly once neg is modularized.\n return tf.mul(tf.scalar(-1), x);\n }\n addN(tensors) {\n assertNotComplex(tensors, 'addN');\n const vals = tensors.map(t => this.readSync(t.dataId));\n const result = tf.buffer(tensors[0].shape, tensors[0].dtype);\n const resultVals = result.values;\n for (let i = 0; i < tensors.length; i++) {\n const currVals = vals[i];\n for (let j = 0; j < resultVals.length; j++) {\n resultVals[j] += currVals[j];\n }\n }\n return result.toTensor();\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(lina128): Use sub directly once softmax is modularized.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = tf.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax\n // kernel modularization.\n return tf.div(b, sumExp);\n }\n pow(a, b) {\n assertNotComplex([a, b], 'pow');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.pow(aValue, bValue));\n }\n floorDiv(a, b) {\n assertNotComplex([a, b], 'floorDiv');\n const op = (a, b) => Math.floor(a / b);\n const outputDtype = 'int32';\n return this.broadcastedBinaryOp(a, b, outputDtype, op);\n }\n sum(x, axes) {\n assertNotComplex(x, 'sum');\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let sum = 0;\n for (let j = 0; j < reduceSize; ++j) {\n sum += aVals[offset + j];\n }\n vals[i] = sum;\n }\n return result;\n }\n prod(x, axes) {\n assertNotComplex(x, 'sum');\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let prod = 1;\n for (let j = 0; j < reduceSize; ++j) {\n prod *= aVals[offset + j];\n }\n vals[i] = prod;\n }\n return result;\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n assertNotComplex(x, 'unsortedSegmentSum');\n const res = [];\n // Reshape the segment id's so that they can be broadcast with\n // x. The new shape should be [segmentIds.shape, 1, ..., 1]\n const numIters = x.rank - segmentIds.rank;\n for (let i = 0; i < numIters; ++i) {\n segmentIds = segmentIds.expandDims(i + 1);\n }\n for (let i = 0; i < numSegments; ++i) {\n const segmentId = tf.scalar(i, 'int32');\n const mask = tf.equal(segmentId, segmentIds).asType('float32');\n const sum = mask.mul(x).sum(0);\n res.push(sum);\n }\n return tf.stack(res);\n }\n argMin(x, axis) {\n assertNotComplex(x, 'argMin');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMin', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n let minIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n minIndex = j;\n }\n }\n vals[i] = minIndex;\n }\n return result;\n }\n argMax(x, axis) {\n assertNotComplex(x, 'argMax');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMax', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n let maxIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n maxIndex = j;\n }\n }\n vals[i] = maxIndex;\n }\n return result;\n }\n cumsum(x, axis, exclusive, reverse) {\n assertNotComplex(x, 'cumsum');\n if (axis !== x.rank - 1) {\n throw new Error(`backend.cumsum in CPU expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(x.shape, resultDtype);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n const finalDim = x.shape[x.rank - 1];\n const indexAdjuster = reverse ?\n (i, j) => i + finalDim - j - 1 :\n (i, j) => i + j;\n for (let i = 0; i < aVals.length; i += finalDim) {\n for (let j = 0; j < finalDim; j++) {\n const idx = indexAdjuster(i, j);\n if (j === 0) {\n vals[idx] = exclusive ? 0 : aVals[idx];\n }\n else {\n const prevIdx = indexAdjuster(i, j - 1);\n vals[idx] = exclusive ? aVals[prevIdx] + vals[prevIdx] :\n aVals[idx] + vals[prevIdx];\n }\n }\n }\n return result;\n }\n equal(a, b) {\n assertNotComplex([a, b], 'equal');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal === bVal) ? 1 : 0;\n });\n }\n notEqual(a, b) {\n assertNotComplex([a, b], 'notEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal !== bVal) ? 1 : 0;\n });\n }\n less(a, b) {\n assertNotComplex([a, b], 'less');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal < bVal) ? 1 : 0;\n });\n }\n lessEqual(a, b) {\n assertNotComplex([a, b], 'lessEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal <= bVal) ? 1 : 0;\n });\n }\n greater(a, b) {\n assertNotComplex([a, b], 'greater');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal > bVal) ? 1 : 0;\n });\n }\n greaterEqual(a, b) {\n assertNotComplex([a, b], 'greaterEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal >= bVal) ? 1 : 0;\n });\n }\n logicalAnd(a, b) {\n assertNotComplex([a, b], 'logicalAnd');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal && bVal;\n });\n }\n logicalOr(a, b) {\n assertNotComplex([a, b], 'logicalOr');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal || bVal;\n });\n }\n select(condition, a, b) {\n assertNotComplex([condition, a, b], 'select');\n const values = this.readSync(condition.dataId);\n const aValues = this.readSync(a.dataId);\n const bValues = this.readSync(b.dataId);\n const result = tf.zeros(a.shape, upcastType(a.dtype, b.dtype));\n const newValues = this.readSync(result.dataId);\n let index = 0;\n const offset = condition.rank === 0 || condition.rank > 1 || a.rank === 1 ?\n 1 :\n util.sizeFromShape(a.shape.slice(1));\n for (let i = 0; i < values.length; i++) {\n for (let j = 0; j < offset; j++) {\n if (values[i] === 1) {\n newValues[index++] = aValues[i];\n }\n else {\n newValues[index++] = bValues[i];\n }\n }\n }\n return result;\n }\n where(condition) {\n assertNotComplex([condition], 'where');\n const condVals = this.readSync(condition.dataId);\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n assertNotComplex(x, 'topk');\n const xVals = this.readSync(x.dataId);\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n assertNotComplex(x, 'min');\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n }\n }\n vals[i] = min;\n }\n return result;\n }\n minimum(a, b) {\n assertNotComplex([a, b], 'minimum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.min(aVal, bVal));\n }\n mod(a, b) {\n assertNotComplex([a, b], 'mod');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const rem = aVal % bVal;\n if ((aVal < 0 && bVal < 0) || (aVal >= 0 && bVal >= 0)) {\n return rem;\n }\n else {\n return (rem + bVal) % bVal;\n }\n });\n }\n maximum(a, b) {\n assertNotComplex([a, b], 'maximum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.max(aVal, bVal));\n }\n all(x, axes) {\n assertNotComplex(x, 'all');\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let all = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n all = all && value;\n }\n vals[i] = all;\n }\n return result;\n }\n any(x, axes) {\n assertNotComplex(x, 'any');\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let anyVal = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n anyVal = anyVal || value;\n }\n vals[i] = anyVal;\n }\n return result;\n }\n squaredDifference(a, b) {\n assertNotComplex([a, b], 'squaredDifference');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const diff = aVal - bVal;\n return diff * diff;\n });\n }\n eluDer(dy, y) {\n assertNotComplex([dy, y], 'eluDer');\n const resultValues = new Float32Array(y.size);\n const values = this.readSync(y.dataId);\n const dyValues = this.readSync(dy.dataId);\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n if (v >= 1) {\n resultValues[i] = dyValues[i];\n }\n else {\n resultValues[i] = dyValues[i] * (v + 1);\n }\n }\n return this.makeOutput(resultValues, y.shape, 'float32');\n }\n atan2(a, b) {\n assertNotComplex([a, b], 'atan2');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.atan2(aValue, bValue));\n }\n tile(x, reps) {\n assertNotComplex(x, 'tile');\n return tile(this.bufferSync(x), reps);\n }\n gather(x, indices, axis) {\n assertNotComplex([x, indices], 'gather');\n const newShape = x.shape.slice();\n const indicesValues = this.readSync(indices.dataId);\n newShape[axis] = indicesValues.length;\n const result = tf.buffer(newShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < result.size; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = newLoc.slice();\n originalLoc[axis] = indicesValues[newLoc[axis]];\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n }\n batchToSpaceND(x, blockShape, crops) {\n assertNotComplex([x], 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return tf.transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n pool3d(x, convInfo, poolType) {\n assertNotComplex(x, 'pool3d');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const xValues = this.readSync(x.dataId);\n const output = tf.buffer(convInfo.outShape, x.dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] *\n convInfo.outShape[3] * convInfo.outShape[4];\n const outputDepthStrides = convInfo.outShape[2] * convInfo.outShape[3] * convInfo.outShape[4];\n const outputRowStrides = convInfo.outShape[3] * convInfo.outShape[4];\n const outputColStrides = convInfo.outShape[4];\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n const outputBatchOffset = batch * outputBatchStrides;\n const inputBatchOffset = batch * x.strides[0];\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n const outputDepthOffset = outputBatchOffset + yDepth * outputDepthStrides;\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n const outputRowOffset = outputDepthOffset + yRow * outputRowStrides;\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n const outputColOffset = outputRowOffset + yCol * outputColStrides;\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const xDepthOffset = inputBatchOffset + xDepth * x.strides[1];\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const xRowOffset = xDepthOffset + xRow * x.strides[2];\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const xColOffset = xRowOffset + xCol * x.strides[3];\n const pixel = xValues[xColOffset + channel];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputColOffset + channel;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n avgPool3d(x, convInfo) {\n assertNotComplex(x, 'avgPool3d');\n return this.pool3d(x, convInfo, 'avg').toFloat();\n }\n avgPool3dBackprop(dy, x, convInfo) {\n assertNotComplex([dy, x], 'avgPool3dBackprop');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins.\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel;\n }\n }\n }\n dx.set(dotProd * avgMultiplier, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n maxPool3d(x, convInfo) {\n assertNotComplex(x, 'maxPool3d');\n return this.pool3d(x, convInfo, 'max').toFloat();\n }\n maxPool3dPositions(x, convInfo) {\n const maxPositions = tf.buffer(convInfo.outShape, 'int32');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = this.bufferSync(x);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const wDepth = xDepth - xDepthCorner;\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const wRow = xRow - xRowCorner;\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const wCol = xCol - xColCorner;\n const pixel = xBuf.get(batch, xDepth, xRow, xCol, channel);\n if (pixel >= maxValue) {\n maxValue = pixel;\n maxPosition = wDepth * effectiveFilterHeight *\n effectiveFilterWidth +\n wRow * effectiveFilterHeight + wCol;\n }\n }\n }\n }\n maxPositions.set(maxPosition, batch, yDepth, yRow, yCol, channel);\n }\n }\n }\n }\n }\n return maxPositions.toTensor();\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n assertNotComplex([x, y], 'maxPool3dBackprop');\n const maxPositions = this.maxPool3dPositions(x, convInfo);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const maxPosBuf = this.bufferSync(maxPositions);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const maxPos = effectiveFilterDepth *\n effectiveFilterHeight * effectiveFilterWidth -\n 1 -\n maxPosBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n const curPos = wDepth * effectiveFilterHeight * effectiveFilterWidth +\n wRow * effectiveFilterWidth + wCol;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel * mask;\n }\n }\n }\n dx.set(dotProd, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeBilinear');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(util.sizeFromShape([batch, newHeight, newWidth, numChannels]));\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n let outputIdx = 0;\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n for (let b = 0; b < batch; b++) {\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceRowFloor = Math.floor(sourceFracRow);\n const rowFrac = sourceFracRow - sourceRowFloor;\n const sourceRowCeil = Math.min(oldHeight - 1, Math.ceil(sourceFracRow));\n const topRowOffset = b * x.strides[0] + sourceRowFloor * x.strides[1];\n const botRowOffset = b * x.strides[0] + sourceRowCeil * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceColFloor = Math.floor(sourceFracCol);\n const colFrac = sourceFracCol - sourceColFloor;\n const sourceColCeil = Math.min(oldWidth - 1, Math.ceil(sourceFracCol));\n const topLeftOffest = topRowOffset + sourceColFloor * x.strides[2];\n const botLeftOffset = botRowOffset + sourceColFloor * x.strides[2];\n const topRightOffset = topRowOffset + sourceColCeil * x.strides[2];\n const botRightOffest = botRowOffset + sourceColCeil * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const topLeft = xValues[topLeftOffest + d];\n const bottomLeft = xValues[botLeftOffset + d];\n const topRight = xValues[topRightOffset + d];\n const bottomRight = xValues[botRightOffest + d];\n const top = topLeft + (topRight - topLeft) * colFrac;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * colFrac;\n const newValue = top + (bottom - top) * rowFrac;\n result[outputIdx++] = newValue;\n }\n }\n }\n }\n return tf.tensor(result, [batch, newHeight, newWidth, numChannels]);\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeBilinearBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass and add the\n // corresponding coefficient from dy to the gradient (with some\n // interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/3039375c86a5bbc9610c7725dcaa95d635f87ba2/tensorflow/core/kernels/resize_bilinear_op.cc#L275\n const dyValues = this.readSync(dy.dataId);\n let offset = 0;\n for (let b = 0; b < batch; b++) {\n const bOffset = b * x.strides[0];\n for (let r = 0; r < yHeight; r++) {\n const dxR = r * heightScale;\n const topDxRIndex = Math.floor(dxR);\n const bottomDxRIndex = Math.min(Math.ceil(dxR), xHeight - 1);\n const topDxROffset = bOffset + topDxRIndex * x.strides[1];\n const bottomDxROffset = bOffset + bottomDxRIndex * x.strides[1];\n const dxRLerp = dxR - topDxRIndex;\n const inverseDxRLerp = 1.0 - dxRLerp;\n for (let c = 0; c < yWidth; c++) {\n const dxC = c * widthScale;\n const leftDxCIndex = Math.floor(dxC);\n const rightDxCIndex = Math.min(Math.ceil(dxC), xWidth - 1);\n const dxCLerp = dxC - leftDxCIndex;\n const inverseDxCLerp = 1.0 - dxCLerp;\n const topLeftRCOffset = topDxROffset + leftDxCIndex * x.strides[2];\n const topRightRCOffset = topDxROffset + rightDxCIndex * x.strides[2];\n const bottomLeftRCOffset = bottomDxROffset + leftDxCIndex * x.strides[2];\n const bottomRightRCOffset = bottomDxROffset + rightDxCIndex * x.strides[2];\n const inverseDxRLerpTimesInverseDxCLerp = inverseDxRLerp * inverseDxCLerp;\n const inverseDxRLerpTimesDxCLerp = inverseDxRLerp * dxCLerp;\n const dxRLerpTimesInverseDxCLerp = dxRLerp * inverseDxCLerp;\n const dxRLerpTimesDxCLerp = dxRLerp * dxCLerp;\n for (let d = 0; d < depth; d++) {\n const dyVal = dyValues[offset++];\n output[topLeftRCOffset + d] +=\n dyVal * inverseDxRLerpTimesInverseDxCLerp;\n output[topRightRCOffset + d] += dyVal * inverseDxRLerpTimesDxCLerp;\n output[bottomLeftRCOffset + d] +=\n dyVal * dxRLerpTimesInverseDxCLerp;\n output[bottomRightRCOffset + d] += dyVal * dxRLerpTimesDxCLerp;\n }\n }\n }\n }\n return tf.tensor4d(output, [batch, xWidth, xHeight, depth], x.dtype);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeNearestNeighbor');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const output = new Float32Array(batch * newHeight * newWidth * numChannels);\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n let outputOffset = 0;\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceNearestRow = Math.min(oldHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n const rowOffset = batchOffset + sourceNearestRow * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceNearestCol = Math.min(oldWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n const colOffset = rowOffset + sourceNearestCol * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const newVal = xValues[colOffset + d];\n output[outputOffset++] = newVal;\n }\n }\n }\n }\n return tf.tensor(output, [batch, newHeight, newWidth, numChannels], x.dtype);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeNearestNeighborBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n const dyValues = this.readSync(dy.dataId);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n // Loop over the output space.\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < xHeight; r++) {\n const rowOffset = batchOffset + r * x.strides[1];\n // Compute bounds for where in dy we will look\n const startRLerp = Math.floor(r * invHeightScale);\n const startDyR = Math.floor(startRLerp - (winHeight / 2));\n for (let c = 0; c < xWidth; c++) {\n const colOffset = rowOffset + c * x.strides[2];\n // Compute bounds for where in dy we will look\n const startCLerp = Math.floor(c * invWidthScale);\n const startDyC = Math.floor(startCLerp - (winWidth / 2));\n for (let d = 0; d < depth; d++) {\n let accum = 0;\n // loop over dy\n for (let dyRIndex = 0; dyRIndex < winHeight; dyRIndex++) {\n const dyR = dyRIndex + startDyR;\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= yHeight) {\n continue;\n }\n const dyROffset = batchOffset + dyR * dy.strides[1];\n const sourceFracRow = dyR * heightScale;\n const sourceNearestRow = Math.min(xHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n if (r !== sourceNearestRow) {\n continue;\n }\n for (let dyCIndex = 0; dyCIndex < winWidth; dyCIndex++) {\n const dyC = dyCIndex + startDyC;\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= yWidth) {\n continue;\n }\n const dyCOffset = dyROffset + dyC * dy.strides[2];\n const sourceFracCol = dyC * widthScale;\n const sourceNearestCol = Math.min(xWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n if (c === sourceNearestCol) {\n accum += dyValues[dyCOffset + d];\n }\n }\n }\n output[colOffset + d] = accum;\n }\n }\n }\n }\n return tf.tensor4d(output, x.shape, x.dtype);\n }\n localResponseNormalization4D(x, depthRadius, bias, alpha, beta) {\n assertNotComplex(x, 'localResponseNormalization4D');\n const channels = x.shape[3];\n const maxD = channels - 1;\n const xValues = this.readSync(x.dataId);\n const size = x.size;\n const result = new Float32Array(size);\n function sumAcrossChannels(offset) {\n const currentChannel = offset % channels;\n let beginSumOffset = offset - currentChannel + Math.max(0, currentChannel - depthRadius);\n const endSumOffset = offset - currentChannel +\n Math.min(currentChannel + depthRadius, maxD);\n let sum = 0.0;\n for (; beginSumOffset <= endSumOffset; beginSumOffset++) {\n const z = xValues[beginSumOffset];\n sum += z * z;\n }\n return sum;\n }\n for (let offset = 0; offset < size; offset++) {\n const sum = sumAcrossChannels(offset);\n const val = xValues[offset] * Math.pow(bias + alpha * sum, -beta);\n result[offset] = val;\n }\n return tf.tensor4d(result, x.shape);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n assertNotComplex(dy, 'LRNGrad');\n const channels = dy.shape[3];\n const dyValues = this.readSync(dy.dataId);\n const inputImageValues = this.readSync(inputImage.dataId);\n const outputImageValues = this.readSync(outputImage.dataId);\n const result = new Float32Array(dy.size);\n const size = dy.size;\n for (let offset = 0; offset < size; offset++) {\n const currentChannel = offset % channels;\n const depthBegin = (offset - currentChannel) + Math.max(0, currentChannel - depthRadius);\n const depthEnd = (offset - currentChannel) +\n Math.min(channels, currentChannel + depthRadius + 1);\n let norm = 0;\n for (let k = depthBegin; k < depthEnd; k++) {\n norm += Math.pow(inputImageValues[k], 2);\n }\n norm = alpha * norm + bias;\n for (let k = depthBegin; k < depthEnd; k++) {\n let dyi = -2 * alpha * beta * inputImageValues[k] *\n outputImageValues[offset] / norm;\n if (offset === k) {\n dyi += Math.pow(norm, -beta);\n }\n dyi *= dyValues[offset];\n result[k] += dyi;\n }\n }\n return tf.tensor4d(result, dy.shape);\n }\n multinomial(logits, normalized, numSamples, seed) {\n assertNotComplex(logits, 'multinomial');\n const probabilities = normalized ? logits : tf.softmax(logits);\n const batchSize = probabilities.shape[0];\n const numEvents = probabilities.shape[1];\n const res = tf.zeros([batchSize, numSamples], 'int32');\n const resVals = this.readSync(res.dataId);\n const probVals = this.readSync(probabilities.dataId);\n for (let b = 0; b < batchSize; ++b) {\n const offset = b * numEvents;\n // The cdf won't include the last event. It will be implicit if no other\n // event happened.\n const cdf = new Float32Array(numEvents - 1);\n cdf[0] = probVals[offset];\n for (let event = 1; event < cdf.length; ++event) {\n cdf[event] = cdf[event - 1] + probVals[offset + event];\n }\n const random = seedrandom.alea(seed.toString());\n const outOffset = b * numSamples;\n for (let sampleId = 0; sampleId < numSamples; ++sampleId) {\n const r = random();\n // Assume last event happened by default.\n resVals[outOffset + sampleId] = cdf.length;\n for (let event = 0; event < cdf.length; event++) {\n if (r < cdf[event]) {\n resVals[outOffset + sampleId] = event;\n break;\n }\n }\n }\n }\n return res;\n }\n oneHot(indices, depth, onValue, offValue) {\n assertNotComplex(indices, 'oneHot');\n const res = new Float32Array(indices.size * depth);\n res.fill(offValue);\n const indicesVal = this.readSync(indices.dataId);\n for (let event = 0; event < indices.size; ++event) {\n if (indicesVal[event] >= 0 && indicesVal[event] < depth) {\n res[event * depth + indicesVal[event]] = onValue;\n }\n }\n return tf.tensor2d(res, [indices.size, depth], 'int32');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n assertNotComplex(boxes, 'nonMaxSuppression');\n const boxesVals = this.readSync(boxes.dataId);\n const scoresVals = this.readSync(scores.dataId);\n return nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(dataFormat === 'NHWC', () => `Only NHWC dataFormat supported on CPU for depthToSpace. Got ${dataFormat}`);\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = x.shape[1];\n const inputWidth = x.shape[2];\n const inputDepth = x.shape[3];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(batchSize * outputHeight * outputWidth * outputDepth);\n let outputIdx = 0;\n for (let b = 0; b < batchSize; ++b) {\n for (let h = 0; h < outputHeight; ++h) {\n const inH = Math.floor(h / blockSize);\n const offsetH = (h % blockSize);\n for (let w = 0; w < outputWidth; ++w) {\n const inW = Math.floor(w / blockSize);\n const offsetW = (w % blockSize);\n const offsetD = (offsetH * blockSize + offsetW) * outputDepth;\n for (let d = 0; d < outputDepth; ++d) {\n const inD = d + offsetD;\n const inputIdx = inD + inputDepth * (inW + inputWidth * (inH + inputHeight * b));\n result[outputIdx++] = xValues[inputIdx];\n }\n }\n }\n }\n return tf.tensor4d(result, [batchSize, outputHeight, outputWidth, outputDepth]);\n }\n broadcastedBinaryOp(a, b, dtype, op) {\n const newShape = backend_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const result = tf.buffer(newShape, dtype);\n const aVals = this.readSync(a.dataId);\n const bVals = this.readSync(b.dataId);\n const aBroadcastDims = backend_util.getBroadcastDims(a.shape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(b.shape, newShape);\n const resVals = result.values;\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resVals.length; ++i) {\n resVals[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n const aBuf = this.bufferSync(a);\n const bBuf = this.bufferSync(b);\n for (let i = 0; i < resVals.length; ++i) {\n const loc = result.indexToLoc(i);\n const aLoc = loc.slice(-a.rank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = aBuf.locToIndex(aLoc);\n const bLoc = loc.slice(-b.rank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = bBuf.locToIndex(bLoc);\n resVals[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return result.toTensor();\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n dispose() { }\n floatPrecision() {\n return 32;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return super.epsilon();\n }\n cropAndResize(images, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const [batch, imageHeight, imageWidth, numChannels] = images.shape;\n const numBoxes = boxes.shape[0];\n const [cropHeight, cropWidth] = cropSize;\n const output = tf.buffer([numBoxes, cropHeight, cropWidth, numChannels], 'float32');\n const boxVals = this.readSync(boxes.dataId);\n const boxIndVals = this.readSync(boxIndex.dataId);\n const imageVals = this.readSync(images.dataId);\n const inStride = images.strides; // to calculate flat indexes into image\n const outStride = output.strides; // to calculate flat indexes into output\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op.cc\n for (let b = 0; b < numBoxes; b++) {\n const startInd = b * 4;\n const y1 = boxVals[startInd];\n const x1 = boxVals[startInd + 1];\n const y2 = boxVals[startInd + 2];\n const x2 = boxVals[startInd + 3];\n const bInd = boxIndVals[b];\n if (bInd >= batch) {\n continue;\n }\n const heightScale = (cropHeight > 1) ?\n (y2 - y1) * (imageHeight - 1) / (cropHeight - 1) :\n 0;\n const widthScale = (cropWidth > 1) ? (x2 - x1) * (imageWidth - 1) / (cropWidth - 1) : 0;\n for (let y = 0; y < cropHeight; y++) {\n const yInd = (cropHeight > 1) ?\n y1 * (imageHeight - 1) + y * (heightScale) :\n 0.5 * (y1 + y2) * (imageHeight - 1);\n if (yInd < 0 || yInd > imageHeight - 1) {\n for (let x = 0; x < cropWidth; x++) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n }\n continue;\n }\n if (method === 'bilinear') {\n const topInd = Math.floor(yInd);\n const bottomInd = Math.ceil(yInd);\n const yLerp = yInd - topInd;\n for (let x = 0; x < cropWidth; x++) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const leftInd = Math.floor(xInd);\n const rightInd = Math.ceil(xInd);\n const xLerp = xInd - leftInd;\n for (let c = 0; c < numChannels; c++) {\n let ind = c + leftInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topRight = imageVals[ind];\n ind = c + leftInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomRight = imageVals[ind];\n const top = topLeft + (topRight - topLeft) * xLerp;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * xLerp;\n ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = top + ((bottom - top) * yLerp);\n }\n }\n }\n else { // method == \"nearest\"\n for (let x = 0; x < cropWidth; ++x) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const closestX = Math.round(xInd);\n const closestY = Math.round(yInd);\n for (let c = 0; c < numChannels; c++) {\n const inInd = c + closestX * inStride[2] +\n closestY * inStride[1] + bInd * inStride[0];\n const outInd = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[outInd] = imageVals[inInd];\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n return this.scatter(sparseIndices, sparseValues, outputShape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n if (numSlices === 0) {\n return tf.tensor([], resultShape, x.dtype);\n }\n const buffer = new TensorBuffer([numSlices, sliceSize], x.dtype);\n const indicesData = this.readSync(indices.dataId);\n const xData = this.readSync(x.dataId);\n for (let i = 0; i < numSlices; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n flattenIndex += dim * strides[j];\n index.push(dim);\n }\n if (flattenIndex < 0 || flattenIndex >= x.size / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${x.shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n buffer.values[i * sliceSize + k] = xData[flattenIndex * sliceSize + k];\n }\n }\n return buffer.toTensor().reshape(resultShape);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const defaultValue = tf.scalar(0);\n const sumDupeIndices = true;\n return this.scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported for string tensors');\n }\n else {\n // TODO(lina128): Use fill kernel directly once this kernel is\n // modularized.\n return tf.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n const values = util.getArrayFromDType(x.dtype, util.sizeFromShape(x.shape));\n return this.makeOutput(values, x.shape, x.dtype);\n }\n linspace(start, stop, num) {\n return backend_util.linspaceImpl(start, stop, num);\n }\n scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices) {\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const indicesData = this.readSync(indices.dataId);\n const updatesData = this.readSync(updates.dataId);\n if (outputSize === 0) {\n return tf.tensor([], shape, updates.dtype);\n }\n const buffer = new TensorBuffer(flattenShape, updates.dtype);\n buffer.values.fill(this.readSync(defaultValue.dataId)[0]);\n for (let i = 0; i < numUpdates; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n index.push(dim);\n flattenIndex += dim * strides[j];\n }\n if (flattenIndex < 0 || flattenIndex >= outputSize / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n if (sumDupeIndices) {\n buffer.values[flattenIndex * sliceSize + k] +=\n updatesData[i * sliceSize + k];\n }\n else {\n buffer.values[flattenIndex * sliceSize + k] = updates.rank === 0 ?\n updatesData[0] :\n updatesData[i * sliceSize + k];\n }\n }\n }\n return buffer.toTensor().reshape(shape);\n }\n}\n//# sourceMappingURL=backend_cpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs, util } from '@tensorflow/tfjs-core';\nexport function simpleAbsImpl(vals) {\n const resultValues = new Float32Array(vals.length);\n for (let i = 0; i < vals.length; ++i) {\n resultValues[i] = Math.abs(vals[i]);\n }\n return resultValues;\n}\nexport const abs = (args) => {\n const { x } = args.inputs;\n const cpuBackend = args.backend;\n let resultValues = new Float32Array(util.sizeFromShape(x.shape));\n if (x.dtype !== 'complex64') {\n const values = cpuBackend.data.get(x.dataId).values;\n resultValues = simpleAbsImpl(values);\n }\n else {\n const complexVals = cpuBackend.data.get(x.dataId);\n const real = complexVals.complexTensorInfos.real;\n const imag = complexVals.complexTensorInfos.imag;\n const realVals = cpuBackend.data.get(real.dataId).values;\n const imagVals = cpuBackend.data.get(imag.dataId).values;\n for (let i = 0; i < realVals.length; i++) {\n const real = realVals[i];\n const imag = imagVals[i];\n resultValues[i] = Math.hypot(real, imag);\n }\n }\n return cpuBackend.makeOutput(resultValues, x.shape, 'float32');\n};\nexport const absConfig = {\n kernelName: Abs,\n backendName: 'cpu',\n kernelFunc: abs,\n};\n//# sourceMappingURL=Abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for binary ops. Supports broadcast.\n */\nexport function createSimpleBinaryKernelImpl(op) {\n return (aShape, bShape, aVals, bVals, dtype) => {\n const newShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultRank = newShape.length;\n const resultStrides = util.computeStrides(newShape);\n const resultSize = util.sizeFromShape(newShape);\n const result = util.getTypedArrayFromDType(dtype, resultSize);\n const aRank = aShape.length;\n const bRank = bShape.length;\n const aStrides = util.computeStrides(aShape);\n const bStrides = util.computeStrides(bShape);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, newShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < result.length; ++i) {\n result[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n for (let i = 0; i < result.length; ++i) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n result[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return [result, newShape];\n };\n}\n//# sourceMappingURL=binary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const realVals = backend.data.get(real.dataId).values;\n const imagVals = backend.data.get(imag.dataId).values;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.data.get(complexInfo.dataId);\n // The complex tensor owns the underlying real and imag tensorInfos, only the\n // complex tensor tracks refCount, when complexData is disposed the\n // underlying tensorData will be disposed.\n complex.complexTensorInfos = {\n real: backend.makeTensorInfo(real.shape, 'float32', realVals),\n imag: backend.makeTensorInfo(imag.shape, 'float32', imagVals)\n };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'cpu',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'cpu',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const real = backend.data.get(input.dataId).complexTensorInfos.real;\n const realVal = backend.data.get(real.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the real value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(real.shape, real.dtype, realVal);\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'cpu',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { real } from './Real';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(lina128): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n const values = backend.data.get(x.dataId).values;\n const resultValues = Int32Array.from(values);\n return backend.makeTensorInfo(x.shape, 'int32', resultValues);\n }\n if (dtype === 'bool') {\n // This is essentially the result of notEqual(x, 0). We avoid using\n // kernel notEqual to avoid circular dependency, i.e. binary_utils ->\n // cast -> notEqual -> binary_utils.\n const xVals = backend.data.get(x.dataId).values;\n const zero = util.toTypedArray([0], x.dtype);\n const [resultData, resultShape] = createSimpleBinaryKernelImpl((a, b) => (a !== b) ? 1 : 0)(x.shape, [], xVals, zero, 'bool');\n return backend.makeTensorInfo(resultShape, 'bool', resultData);\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'cpu',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { cast } from '../kernels/Cast';\nimport { complex } from '../kernels/Complex';\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param name Kernel name.\n * @param binaryKernelImpl A `SimpleBinaryKernelImpl` for the kernel.\n * @param binaryKernelComplexImpl Optional. If exists, represents a\n * `ComplexBinaryKernelImpl` for the kernel, will be used when input dtype\n * is `complex64`.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc(name, simpleImpl, complexImpl, dtype) {\n if (complexImpl == null) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n assertNotComplex([a, b], name);\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n };\n }\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n if (a.dtype === 'complex64' || b.dtype === 'complex64') {\n const $aComplex = cast({ inputs: { x: a }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $aComplexVals = cpuBackend.data.get($aComplex.dataId);\n const aReal = $aComplexVals.complexTensorInfos.real;\n const aImag = $aComplexVals.complexTensorInfos.imag;\n const aRealVals = cpuBackend.data.get(aReal.dataId).values;\n const aImagVals = cpuBackend.data.get(aImag.dataId).values;\n const $bComplex = cast({ inputs: { x: b }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $bComplexVals = cpuBackend.data.get($bComplex.dataId);\n const bReal = $bComplexVals.complexTensorInfos.real;\n const bImag = $bComplexVals.complexTensorInfos.imag;\n const bRealVals = cpuBackend.data.get(bReal.dataId).values;\n const bImagVals = cpuBackend.data.get(bImag.dataId).values;\n const [resultRealData, resultImagData, resultShape] = complexImpl(a.shape, b.shape, aRealVals, aImagVals, bRealVals, bImagVals);\n const resultReal = cpuBackend.makeTensorInfo(resultShape, 'float32', resultRealData);\n const resultImag = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImagData);\n const result = complex({ inputs: { real: resultReal, imag: resultImag }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($aComplex);\n cpuBackend.disposeIntermediateTensorInfo($bComplex);\n cpuBackend.disposeIntermediateTensorInfo(resultReal);\n cpuBackend.disposeIntermediateTensorInfo(resultImag);\n return result;\n }\n else {\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n }\n };\n}\n/**\n * Template that creates the complex type implementation for binary ops.\n * Supports broadcast.\n */\nexport function createComplexBinaryKernelImpl(op) {\n return (aShape, bShape, aRealVals, aImagVals, bRealVals, bImagVals) => {\n const resultShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultSize = util.sizeFromShape(resultShape);\n const resultRank = resultShape.length;\n const resultStrides = util.computeStrides(resultShape);\n const resultRealVals = util.getTypedArrayFromDType('float32', resultSize);\n const resultImagVals = util.getTypedArrayFromDType('float32', resultSize);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, resultShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, resultShape);\n const aVals = backend_util.mergeRealAndImagArrays(aRealVals, aImagVals);\n const bVals = backend_util.mergeRealAndImagArrays(bRealVals, bImagVals);\n const aRank = aShape.length;\n const aStrides = util.computeStrides(aShape);\n const bRank = bShape.length;\n const bStrides = util.computeStrides(bShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resultRealVals.length; i++) {\n const aIdx = i % aVals.length;\n const bIdx = i % bVals.length;\n const result = op(aVals[aIdx * 2], aVals[aIdx * 2 + 1], bVals[bIdx * 2], bVals[bIdx * 2 + 1]);\n resultRealVals[i] = result.real;\n resultImagVals[i] = result.imag;\n }\n }\n else {\n for (let i = 0; i < resultRealVals.length; i++) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n const opResult = op(aVals[aIndex * 2], aVals[aIndex * 2 + 1], bVals[bIndex * 2], bVals[bIndex * 2 + 1]);\n resultRealVals[i] = opResult.real;\n resultImagVals[i] = opResult.imag;\n }\n }\n return [resultRealVals, resultImagVals, resultShape];\n };\n}\n//# sourceMappingURL=kernel_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const addImpl = createSimpleBinaryKernelImpl(((a, b) => a + b));\nexport const addComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal + bReal, imag: aImag + bImag };\n}));\nexport const add = binaryKernelFunc(Add, addImpl, addComplexImpl);\nexport const addConfig = {\n kernelName: Add,\n backendName: 'cpu',\n kernelFunc: add\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for unary op.\n */\nexport function createSimpleUnaryImpl(op) {\n return (values, dtype, attrs) => {\n const newValues = util.getTypedArrayFromDType(dtype, values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return newValues;\n };\n}\n//# sourceMappingURL=unary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param name Kernel name.\n * @param op A `SimpleUnaryOperation` for the kernel.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFunc(name, op, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const $dtype = dtype || x.dtype;\n const newValues = util.getArrayFromDType($dtype, xSize);\n for (let i = 0; i < xSize; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n/**\n * Template that creates a `KernelFunc` for unary ops from the given\n * `SimpleUnaryImpl`..\n * @param name Kernel name.\n * @param unaryImpl A `SimpleUnaryImpl` that implements the op.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFuncFromImpl(name, unaryImpl, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const $dtype = dtype || x.dtype;\n const newValues = unaryImpl(values, $dtype, attrs);\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n//# sourceMappingURL=unary_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const ceilImpl = createSimpleUnaryImpl((xi) => Math.ceil(xi));\nexport const ceil = unaryKernelFuncFromImpl(Ceil, ceilImpl);\nexport const ceilConfig = {\n kernelName: Ceil,\n backendName: 'cpu',\n kernelFunc: ceil,\n};\n//# sourceMappingURL=Ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expImpl = createSimpleUnaryImpl((xi) => Math.exp(xi));\nexport const exp = unaryKernelFuncFromImpl(Exp, expImpl);\nexport const expConfig = {\n kernelName: Exp,\n backendName: 'cpu',\n kernelFunc: exp,\n};\n//# sourceMappingURL=Exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expm1Impl = createSimpleUnaryImpl((xi) => Math.expm1(xi));\nexport const expm1 = unaryKernelFuncFromImpl(Expm1, expm1Impl);\nexport const expm1Config = {\n kernelName: Expm1,\n backendName: 'cpu',\n kernelFunc: expm1,\n};\n//# sourceMappingURL=Expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const floorImpl = createSimpleUnaryImpl((xi) => Math.floor(xi));\nexport const floor = unaryKernelFuncFromImpl(Floor, floorImpl);\nexport const floorConfig = {\n kernelName: Floor,\n backendName: 'cpu',\n kernelFunc: floor,\n};\n//# sourceMappingURL=Floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const logImpl = createSimpleUnaryImpl((xi) => Math.log(xi));\nexport const log = unaryKernelFuncFromImpl(Log, logImpl);\nexport const logConfig = {\n kernelName: Log,\n backendName: 'cpu',\n kernelFunc: log,\n};\n//# sourceMappingURL=Log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function maxImpl(aVals, reduceSize, outShape, dtype) {\n const vals = util.getTypedArrayFromDType(dtype, util.sizeFromShape(outShape));\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n }\n }\n vals[i] = max;\n }\n return vals;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const multiplyImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue * bValue));\nexport const multiplyComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return {\n real: aReal * bReal - aImag * bImag,\n imag: aReal * bImag + aImag * bReal\n };\n}));\nexport const multiply = binaryKernelFunc(Multiply, multiplyImpl, multiplyComplexImpl);\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'cpu',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const notEqualImpl = createSimpleBinaryKernelImpl(((a, b) => (a !== b) ? 1 : 0));\nexport const notEqual = binaryKernelFunc(NotEqual, notEqualImpl, null /* complexOp */, 'bool');\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'cpu',\n kernelFunc: notEqual\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const rsqrtImpl = createSimpleUnaryImpl((xi) => 1 / Math.sqrt(xi));\nexport const rsqrt = unaryKernelFuncFromImpl(Rsqrt, rsqrtImpl);\nexport const rsqrtConfig = {\n kernelName: Rsqrt,\n backendName: 'cpu',\n kernelFunc: rsqrt,\n};\n//# sourceMappingURL=Rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice, slice_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function sliceImpl(vals, begin, size, shape, dtype) {\n const isContinous = slice_util.isSliceContinous(shape, begin, size);\n const length = util.sizeFromShape(size);\n const xStrides = util.computeStrides(shape);\n if (isContinous) {\n const flatOffset = slice_util.computeFlatOffset(begin, xStrides);\n return vals.subarray(flatOffset, flatOffset + length);\n }\n const outVals = util.getTypedArrayFromDType(dtype, length);\n for (let i = 0; i < length; ++i) {\n const rank = size.length;\n const strides = util.computeStrides(size);\n const loc = util.indexToLoc(i, rank, strides);\n const xLoc = loc.map((idx, j) => idx + begin[j]);\n const xIndex = util.locToIndex(xLoc, shape.length, xStrides);\n outVals[i] = vals[xIndex];\n }\n return outVals;\n}\nexport function slice(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { begin, size } = attrs;\n assertNotComplex(x, 'slice');\n const [$begin, $size] = slice_util.parseSliceParams(x, begin, size);\n slice_util.assertParamsValid(x, $begin, $size);\n const vals = backend.data.get(x.dataId).values;\n const outVals = sliceImpl(vals, $begin, $size, x.shape, x.dtype);\n return backend.makeTensorInfo($size, x.dtype, outVals);\n}\nexport const sliceConfig = {\n kernelName: Slice,\n backendName: 'cpu',\n kernelFunc: slice\n};\n//# sourceMappingURL=Slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const squaredDifferenceImpl = createSimpleBinaryKernelImpl(((a, b) => {\n const diff = a - b;\n return diff * diff;\n}));\nexport const squaredDifference = binaryKernelFunc(SquaredDifference, squaredDifferenceImpl);\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'cpu',\n kernelFunc: squaredDifference\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const subImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue - bValue));\nexport const subComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal - bReal, imag: aImag - bImag };\n}));\nexport const sub = binaryKernelFunc(Sub, subImpl, subComplexImpl);\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'cpu',\n kernelFunc: sub\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function transposeImpl(xVals, xShape, dtype, perm, newShape) {\n const xRank = xShape.length;\n const xSize = util.sizeFromShape(xShape);\n const xStrides = util.computeStrides(xShape);\n const newStrides = util.computeStrides(newShape);\n const result = util.getTypedArrayFromDType(dtype, util.sizeFromShape(newShape));\n for (let i = 0; i < xSize; ++i) {\n const loc = util.indexToLoc(i, xRank, xStrides);\n // Permute location.\n const newLoc = new Array(loc.length);\n for (let i = 0; i < newLoc.length; i++) {\n newLoc[i] = loc[perm[i]];\n }\n const newIndex = util.locToIndex(newLoc, xRank, newStrides);\n result[newIndex] = xVals[i];\n }\n return result;\n}\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer, util } from '@tensorflow/tfjs-core';\nexport function uniqueImpl(values, axis, shape, dtype) {\n // Normalize and validate axis.\n const $axis = util.parseAxisParam(axis, shape)[0];\n // Calculate the new shape that is suitable for extracting data along the\n // given axis.\n //\n // The rank is 3.\n // The size of the 1st dimension is the size of all the axes < the given axis.\n // The size of the 2nd dimension is the same as the size of the given axis.\n // The size of the 3rd dimension is the size of all the axes > the given axis.\n //\n // For example, for a 4D tensor with shape=[2, 3, 5, 4] and axis=2, the\n // newShape would be: [2*3, 5, 4].\n //\n // Note that this is not the final output shape. This will be the shape for an\n // intermediate TensorBuffer (see inputBuffer below) to allow us to extract\n // values along the given axis. To demonstrate how it works, consider the\n // following example:\n //\n // Input: a 3D tensor, with shape [1, 2, 3]\n // [\n // [\n // [1,2,3],\n // [4,5,6]\n // ]\n // ]\n // Axis: 2 (the last axis).\n // Along axis 2, we expect to extract 3 tensors: [1,4], [2,5], [3,6].\n //\n // For this example, newShape would be: [2, 3, 1], where 2 is calculated from\n // 1*2. The re-shaped data would look like:\n //\n // [\n // [\n // [1], [2], [3]\n // ],\n // [\n // [4], [5], [6]\n // ]\n // ]\n //\n // Then, we can construct a 3-level nested loop by the following dimension\n // order to extract the values along the axis (dimension1):\n // i: dimension1 // 0,1,2 (newShape[1])\n // m: dimension0 // 0,1 (newShape[0])\n // n: dimension2 // 0 (newShape[2])\n //\n // m, i, n\n // ---------\n // Iteration 0: data at [0, 0, 0] => \"1\"\n // Iteration 1: data at [1, 0, 0] => \"4\"\n // We got [1,4].\n // Iteration 2: data at [0, 1, 0] => \"2\"\n // Iteration 3: data at [1, 1, 0] => \"5\"\n // We got [2,5].\n // Iteration 4: data at [0, 2, 0] => \"3\"\n // Iteration 5: data at [1, 2, 0] => \"6\"\n // We got [3,6].\n const newShape = [1, shape[0], 1];\n for (let i = 0; i < $axis; i++) {\n newShape[0] *= shape[i];\n }\n newShape[1] = shape[$axis];\n for (let i = $axis + 1; i < shape.length; i++) {\n newShape[2] *= shape[i];\n }\n // A map from unique elements (their string representations) to their values\n // in \"indices\" (below).\n const uniqueElements = {};\n // The indices of each unique element in the original tensor along the given\n // axis. It is 1D and has the same size as the given axis.\n const indices = new Int32Array(shape[$axis]);\n // Create a buffer so we can easily extract value at a given location.\n const inputBuffer = new TensorBuffer(newShape, dtype, values);\n // The indices along the given axis that have unique elements. This is a\n // de-duped version of \"indices\" above.\n const uniqueIndices = [];\n const is1DTensor = newShape[0] === 1 && newShape[2] === 1;\n for (let i = 0; i < shape[$axis]; i++) {\n // Extract values along the axis.\n let element;\n if (is1DTensor) {\n // Fast path for 1D tensor input.\n element = values[i].toString();\n }\n else {\n const axisValues = [];\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n axisValues.push(inputBuffer.get(m, i, n));\n }\n }\n element = axisValues.join(',');\n }\n // Dedup and update various indices.\n if (uniqueElements[element] !== undefined) {\n indices[i] = uniqueElements[element];\n }\n else {\n const uniqueIndex = Object.keys(uniqueElements).length;\n uniqueElements[element] = uniqueIndex;\n indices[i] = uniqueIndex;\n uniqueIndices.push(i);\n }\n }\n // Now we know where each of the unique elements are located along the axis\n // (uniqueIndices). Extract them from input buffer and store them in the\n // output buffer.\n const outputTmpShape = newShape.slice();\n outputTmpShape[1] = Object.keys(uniqueElements).length;\n const outputBuffer = new TensorBuffer(outputTmpShape, dtype);\n uniqueIndices.forEach((uniqueElementIndex, i) => {\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n outputBuffer.set(inputBuffer.get(m, uniqueElementIndex, n), m, i, n);\n }\n }\n });\n // The output shape can be calculated from the input shape with the size of\n // the given axis replaced by the number of unique elements along that axis.\n const outputShape = shape.slice();\n outputShape[$axis] = outputTmpShape[1];\n return {\n outputValues: outputBuffer.values,\n outputShape,\n indices,\n };\n}\n//# sourceMappingURL=Unique_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Shared functionality among backends.\nexport { simpleAbsImpl } from './kernels/Abs';\nexport { addImpl } from './kernels/Add';\nexport { ceilImpl } from './kernels/Ceil';\nexport { expImpl } from './kernels/Exp';\nexport { expm1Impl } from './kernels/Expm1';\nexport { floorImpl } from './kernels/Floor';\nexport { logImpl } from './kernels/Log';\nexport { maxImpl } from './kernels/Max_impl';\nexport { multiplyImpl } from './kernels/Multiply';\nexport { notEqualImpl } from './kernels/NotEqual';\nexport { rsqrtImpl } from './kernels/Rsqrt';\nexport { sliceImpl } from './kernels/Slice';\nexport { squaredDifferenceImpl } from './kernels/SquaredDifference';\nexport { subImpl } from './kernels/Sub';\nexport { transposeImpl } from './kernels/Transpose_impl';\nexport { uniqueImpl } from './kernels/Unique_impl';\n//# sourceMappingURL=shared.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/*\n * base.ts contains all the exports from tfjs-backend-cpu\n * without auto-kernel registration\n */\nimport { registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendCPU } from './backend_cpu';\nimport * as shared from './shared';\nexport { MathBackendCPU } from './backend_cpu';\nexport { version as version_cpu } from './version';\nexport { shared };\n// Side effects for default initialization of MathBackendCPU\nregisterBackend('cpu', () => new MathBackendCPU(), 1 /* priority */);\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Elu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const elu = unaryKernelFunc(Elu, (xi) => xi >= 0 ? xi : (Math.exp(xi) - 1));\nexport const eluConfig = {\n kernelName: Elu,\n backendName: 'cpu',\n kernelFunc: elu,\n};\n//# sourceMappingURL=Elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nconst preluImpl = createSimpleBinaryKernelImpl((xValue, aValue) => xValue < 0 ? aValue * xValue : xValue);\nexport function prelu(args) {\n const { inputs, backend } = args;\n const { x, alpha } = inputs;\n assertNotComplex([x, alpha], 'prelu');\n const aVals = backend.data.get(x.dataId).values;\n const bVals = backend.data.get(alpha.dataId).values;\n const [resultData, resultShape] = preluImpl(x.shape, alpha.shape, aVals, bVals, x.dtype);\n return backend.makeTensorInfo(resultShape, x.dtype, resultData);\n}\nexport const preluConfig = {\n kernelName: Prelu,\n backendName: 'cpu',\n kernelFunc: prelu,\n};\n//# sourceMappingURL=Prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu = unaryKernelFunc(Relu, (xi) => Math.max(0, xi));\nexport const reluConfig = {\n kernelName: Relu,\n backendName: 'cpu',\n kernelFunc: relu,\n};\n//# sourceMappingURL=Relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu6 = unaryKernelFunc(Relu6, (xi) => Math.min(Math.max(0, xi), 6));\nexport const relu6Config = {\n kernelName: Relu6,\n backendName: 'cpu',\n kernelFunc: relu6,\n};\n//# sourceMappingURL=Relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../kernels/Elu';\nimport { identity } from '../kernels/Identity';\nimport { prelu } from '../kernels/Prelu';\nimport { relu } from '../kernels/Relu';\nimport { relu6 } from '../kernels/Relu6';\nexport function applyActivation(backend, x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return identity({ inputs: { x }, backend });\n }\n else if (activation === 'relu') {\n return relu({ inputs: { x }, backend });\n }\n else if (activation === 'elu') {\n return elu({ inputs: { x }, backend });\n }\n else if (activation === 'relu6') {\n return relu6({ inputs: { x }, backend });\n }\n else if (activation === 'prelu') {\n return prelu({ inputs: { x, alpha: preluActivationWeights }, backend });\n }\n throw new Error(`Activation ${activation} has not been implemented for the CPU backend.`);\n}\n//# sourceMappingURL=fused_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n backend.incRef(x.dataId);\n const xData = backend.data.get(x.dataId);\n if (xData.complexTensorInfos != null) {\n const real = xData.complexTensorInfos.real;\n const imag = xData.complexTensorInfos.imag;\n real.shape = $shape;\n imag.shape = $shape;\n }\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'cpu',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul, buffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { reshape } from './Reshape';\nexport function batchMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b } = inputs;\n const { transposeA, transposeB } = attrs;\n assertNotComplex([a, b], 'matMul');\n const aRank = a.shape.length;\n const bRank = b.shape.length;\n const innerShapeA = transposeA ? a.shape[aRank - 2] : a.shape[aRank - 1];\n const innerShapeB = transposeB ? b.shape[bRank - 1] : b.shape[bRank - 2];\n const outerShapeA = transposeA ? a.shape[aRank - 1] : a.shape[aRank - 2];\n const outerShapeB = transposeB ? b.shape[bRank - 2] : b.shape[bRank - 1];\n const outerDimsA = a.shape.slice(0, -2);\n const outerDimsB = b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert(aRank >= 2 && bRank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n const outShapeOuterDims = batchDimA > batchDimB ? a.shape.slice(0, -2) : b.shape.slice(0, -2);\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${a.shape} and ` +\n `${b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const a3dShape = transposeA ? [batchDimA, innerShapeA, outerShapeA] :\n [batchDimA, outerShapeA, innerShapeA];\n const b3dShape = transposeB ? [batchDimB, outerShapeB, innerShapeB] :\n [batchDimB, innerShapeB, outerShapeB];\n // The rest of the implementation is designed to operate on rank-3 tensors\n const a3d = reshape({ inputs: { x: a }, backend, attrs: { shape: a3dShape } });\n const b3d = reshape({ inputs: { x: b }, backend, attrs: { shape: b3dShape } });\n const sharedDim = transposeA ? a3d.shape[1] : a3d.shape[2];\n const leftDim = transposeA ? a3d.shape[2] : a3d.shape[1];\n const rightDim = transposeB ? b3d.shape[1] : b3d.shape[2];\n const batchDim = Math.max(batchDimA, batchDimB);\n const a3dValues = backend.data.get(a3d.dataId).values;\n const b3dValues = backend.data.get(b3d.dataId).values;\n const a3dStrides = util.computeStrides(a3d.shape);\n const b3dStrides = util.computeStrides(b3d.shape);\n const [aBatch, aOuterStep, aInnerStep] = transposeA ?\n [a3dStrides[0], 1, a3dStrides[1]] :\n [a3dStrides[0], a3dStrides[1], 1];\n const [bInnerStep, bOuterStep, bBatch] = transposeB ?\n [1, b3dStrides[1], b3dStrides[0]] :\n [b3dStrides[1], 1, b3dStrides[0]];\n const size = leftDim * rightDim;\n const result = buffer([batchDim, leftDim, rightDim], a3d.dtype);\n const resVals = result.values;\n const blockSize = backend.blockSize;\n for (let bi = 0; bi < batchDim; bi++) {\n for (let i0 = 0; i0 < leftDim; i0 += blockSize) {\n for (let j0 = 0; j0 < rightDim; j0 += blockSize) {\n for (let k0 = 0; k0 < sharedDim; k0 += blockSize) {\n // for when blockSize doesn't evenly divide the input\n const iBlock = Math.min(i0 + blockSize, leftDim);\n const jBlock = Math.min(j0 + blockSize, rightDim);\n const kBlock = Math.min(k0 + blockSize, sharedDim);\n for (let i = i0; i < iBlock; i++) {\n for (let j = j0; j < jBlock; j++) {\n let sum = 0.0;\n for (let k = k0; k < kBlock; k++) {\n const batchOffsetA = Math.min(bi, batchDimA - 1) * aBatch;\n const batchOffsetB = Math.min(bi, batchDimB - 1) * bBatch;\n const aVal = a3dValues[batchOffsetA + i * aOuterStep + k * aInnerStep];\n const bVal = b3dValues[k * bInnerStep + j * bOuterStep + batchOffsetB];\n sum += aVal * bVal;\n }\n resVals[bi * size + (i * rightDim + j)] += sum;\n }\n }\n }\n }\n }\n }\n backend.disposeIntermediateTensorInfo(a3d);\n backend.disposeIntermediateTensorInfo(b3d);\n // set correct shape on output.\n return backend.makeTensorInfo(outShape, result.dtype, result.values);\n}\nexport const batchMatMulConfig = {\n kernelName: BatchMatMul,\n backendName: 'cpu',\n kernelFunc: batchMatMul,\n};\n//# sourceMappingURL=BatchMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { _FusedMatMul } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { batchMatMul } from './BatchMatMul';\nexport function _fusedMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b, bias, preluActivationWeights } = inputs;\n const { transposeA, transposeB, activation } = attrs;\n let current;\n let addRes;\n let activationRes;\n const intermediates = [];\n const matMulRes = batchMatMul({ inputs: { a, b }, attrs: { transposeA, transposeB }, backend });\n current = matMulRes;\n if (bias) {\n addRes = add({ inputs: { a: current, b: bias }, backend });\n intermediates.push(current);\n current = addRes;\n }\n if (activation) {\n activationRes =\n applyActivation(backend, current, activation, preluActivationWeights);\n intermediates.push(current);\n current = activationRes;\n }\n for (const i of intermediates) {\n backend.disposeIntermediateTensorInfo(i);\n }\n return current;\n}\nexport const _fusedMatMulConfig = {\n kernelName: _FusedMatMul,\n backendName: 'cpu',\n kernelFunc: _fusedMatMul,\n};\n//# sourceMappingURL=_FusedMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acos = unaryKernelFunc(Acos, (xi) => Math.acos(xi));\nexport const acosConfig = {\n kernelName: Acos,\n backendName: 'cpu',\n kernelFunc: acos,\n};\n//# sourceMappingURL=Acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acosh = unaryKernelFunc(Acosh, (xi) => Math.acosh(xi));\nexport const acoshConfig = {\n kernelName: Acosh,\n backendName: 'cpu',\n kernelFunc: acosh,\n};\n//# sourceMappingURL=Acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asin = unaryKernelFunc(Asin, (xi) => Math.asin(xi));\nexport const asinConfig = {\n kernelName: Asin,\n backendName: 'cpu',\n kernelFunc: asin,\n};\n//# sourceMappingURL=Asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asinh = unaryKernelFunc(Asinh, (xi) => Math.asinh(xi));\nexport const asinhConfig = {\n kernelName: Asinh,\n backendName: 'cpu',\n kernelFunc: asinh,\n};\n//# sourceMappingURL=Asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atan = unaryKernelFunc(Atan, (xi) => Math.atan(xi));\nexport const atanConfig = {\n kernelName: Atan,\n backendName: 'cpu',\n kernelFunc: atan,\n};\n//# sourceMappingURL=Atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atanh = unaryKernelFunc(Atanh, (xi) => Math.atanh(xi));\nexport const atanhConfig = {\n kernelName: Atanh,\n backendName: 'cpu',\n kernelFunc: atanh,\n};\n//# sourceMappingURL=Atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from '@tensorflow/tfjs-core';\nexport function pool(xValues, xShape, dtype, strides, convInfo, poolType) {\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const output = buffer(convInfo.outShape, dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] * convInfo.outShape[3];\n const outputRowStrides = convInfo.outShape[2] * convInfo.outShape[3];\n const outputColStrides = convInfo.outShape[3];\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const outputBatchOffset = b * outputBatchStrides;\n const inputBatchOffset = b * strides[0];\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n const outputRowOffset = outputBatchOffset + yR * outputRowStrides;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n const xCMin = Math.max(0, xCCorner);\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const xROffset = inputBatchOffset + xR * strides[1];\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const xCOffset = xROffset + xC * strides[2];\n const pixel = xValues[xCOffset + d];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputRowOffset + yC * outputColStrides + d;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n return output;\n}\nexport function maxPoolPositions(xValues, xShape, dtype, convInfo, flattenPositions = false, includeBatchInIndex = false) {\n const maxPositions = buffer(convInfo.outShape, 'int32');\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = buffer(xShape, dtype, xValues);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n let xRMin = xRCorner;\n while (xRMin < 0) {\n xRMin += dilationHeight;\n }\n // const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n let xCMin = xCCorner;\n while (xCMin < 0) {\n xCMin += dilationWidth;\n }\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const wR = xR - xRCorner;\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const wC = xC - xCCorner;\n const pixel = xBuf.get(b, xR, xC, d);\n if (pixel > maxValue) {\n maxValue = pixel;\n if (flattenPositions) {\n maxPosition = includeBatchInIndex ?\n ((b * convInfo.inHeight + xR) * convInfo.inWidth + xC) *\n convInfo.inChannels +\n d :\n (xR * convInfo.inWidth + xC) * convInfo.inChannels + d;\n }\n else {\n maxPosition = wR * effectiveFilterWidth + wC;\n }\n }\n }\n }\n maxPositions.set(maxPosition, b, yR, yC, d);\n }\n }\n }\n }\n return maxPositions;\n}\n//# sourceMappingURL=pool_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'avg');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'cpu',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util, buffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel;\n }\n }\n dx.set(dotProd * avgMultiplier, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'cpu',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function batchNorm(args) {\n const { inputs, backend, attrs } = args;\n const { x, scale, offset, mean, variance } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n assertNotComplex([x, mean, variance, scale, offset], 'batchNorm');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const xVals = backend.data.get(x.dataId).values;\n const mVals = backend.data.get(mean.dataId).values;\n const varVals = backend.data.get(variance.dataId).values;\n const sVals = scale ? backend.data.get(scale.dataId).values :\n new Float32Array([1]);\n const offVals = offset ?\n backend.data.get(offset.dataId).values :\n new Float32Array([0]);\n const outVals = new Float32Array(xVals.length);\n const offValsLength = offVals.length;\n const sValsLength = sVals.length;\n const varValsLength = varVals.length;\n const mValsLength = mVals.length;\n let offi = 0;\n let mi = 0;\n let si = 0;\n let vi = 0;\n for (let i = 0; i < xVals.length; ++i) {\n outVals[i] = offVals[offi++] +\n (xVals[i] - mVals[mi++]) * sVals[si++] /\n Math.sqrt(varVals[vi++] + varianceEpsilon);\n if (offi >= offValsLength) {\n offi = 0;\n }\n if (mi >= mValsLength) {\n mi = 0;\n }\n if (si >= sValsLength) {\n si = 0;\n }\n if (vi >= varValsLength) {\n vi = 0;\n }\n }\n return backend.makeTensorInfo(x.shape, x.dtype, outVals);\n}\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'cpu',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const clip = unaryKernelFunc(ClipByValue, (xi, attrs) => {\n const clipAttrs = attrs;\n if (xi > clipAttrs.clipValueMax) {\n return clipAttrs.clipValueMax;\n }\n return xi < clipAttrs.clipValueMin ? clipAttrs.clipValueMin : xi;\n});\nexport const clipConfig = {\n kernelName: ClipByValue,\n backendName: 'cpu',\n kernelFunc: clip,\n};\n//# sourceMappingURL=Clip.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const imag = backend.data.get(input.dataId).complexTensorInfos.imag;\n const imagVal = backend.data.get(imag.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the imag value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(imag.shape, imag.dtype, imagVal);\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'cpu',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n let outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n if ($inputs[0].dtype === 'complex64') {\n const reals = $inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = $inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concat({ inputs: reals, backend, attrs: { axis: $axis } });\n const imagConcated = concat({ inputs: imags, backend, attrs: { axis: $axis } });\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const inputs2D = $inputs.map(t => {\n const innerSize = util.sizeFromShape(t.shape.slice($axis));\n const shape = [-1, innerSize];\n return reshape({ inputs: { x: t }, backend, attrs: { shape } });\n });\n // Concats 2d tensors along axis=1.\n outShape =\n backend_util.computeOutShape(inputs2D.map(t => t.shape), 1 /* axis */);\n const outVals = util.getTypedArrayFromDType($inputs[0].dtype, util.sizeFromShape(outShape));\n if (inputs2D[0].shape[0] === 1) {\n // Use built-in TypedArray.set() method for speed.\n let offset = 0;\n inputs2D.forEach(t => {\n const val = backend.data.get(t.dataId).values;\n const size = util.sizeFromShape(t.shape);\n outVals.set(val, offset);\n offset += size;\n });\n }\n else {\n let colOffset = 0;\n inputs2D.forEach(t => {\n const tVals = backend.data.get(t.dataId).values;\n let tIdx = 0;\n for (let row = 0; row < t.shape[0]; ++row) {\n const resIdx = row * outShape[1] + colOffset;\n for (let col = 0; col < t.shape[1]; ++col) {\n outVals[resIdx + col] = tVals[tIdx++];\n }\n }\n colOffset += t.shape[1];\n });\n }\n const finalOutShape = backend_util.computeOutShape($inputs.map(t => t.shape), $axis);\n const outInfo = backend.makeTensorInfo(finalOutShape, inputs[0].dtype, outVals);\n inputs2D.forEach(t => backend.disposeIntermediateTensorInfo(t));\n return outInfo;\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'cpu',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'conv2d');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, dilations, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const padLeft = convInfo.padInfo.left;\n const padTop = convInfo.padInfo.top;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const xBatchStride = xStrides[0];\n const xRowStride = isChannelsLast ? xStrides[1] : xStrides[2];\n const xColStride = isChannelsLast ? xStrides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : xStrides[1];\n const yBatchStride = y.strides[0];\n const yRowStride = isChannelsLast ? y.strides[1] : y.strides[2];\n const yColStride = isChannelsLast ? y.strides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : y.strides[1];\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xBatchStride;\n const yOffset1 = b * yBatchStride;\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * yRowStride;\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xRowStride;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * yColStride;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * xColStride;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1 * xChannelStride];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset3 + d2 * yChannelStride] +=\n xVal * wVals[wOffset3 + d2];\n }\n wOffset3 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, yVals);\n}\nexport const conv2DConfig = {\n kernelName: Conv2D,\n backendName: 'cpu',\n kernelFunc: conv2D\n};\n//# sourceMappingURL=Conv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, dataFormat, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv2dBackpropFilter');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const xVals = backend.data.get(x.dataId).values;\n const dyVals = backend.data.get(dy.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n if (isChannelsLast) {\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n else {\n dotProd += xBuf.get(b, d1, xR, xC) *\n dyBuf.get(b, d2, yR, yC);\n }\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, d2);\n }\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const conv2DBackpropFilterConfig = {\n kernelName: Conv2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropFilter\n};\n//# sourceMappingURL=Conv2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { inputShape, strides, pad, dataFormat, dimRoundingMode } = attrs;\n assertNotComplex([dy, filter], 'conv2dBackpropInput');\n const filterStrides = util.computeStrides(filter.shape);\n const dyStrides = util.computeStrides(dy.shape);\n let $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad, dimRoundingMode, false, $dataFormat);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const dyValues = backend.data.get(dy.dataId).values;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n $dataFormat = convInfo.dataFormat;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const isChannelsLast = $dataFormat === 'channelsLast';\n const xBatchStride = dx.strides[0];\n const xRowStride = isChannelsLast ? dx.strides[1] : dx.strides[2];\n const xColStride = isChannelsLast ? dx.strides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : dx.strides[1];\n const yBatchStride = dyStrides[0];\n const yRowStride = isChannelsLast ? dyStrides[1] : dyStrides[2];\n const yColStride = isChannelsLast ? dyStrides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : dyStrides[1];\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = yBatchStride * b + yRowStride * yR + yColStride * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + yChannelStride * d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n const dxOffset = xBatchStride * b + xRowStride * xR +\n xColStride * xC + xChannelStride * d1;\n dxValues[dxOffset] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv2DBackpropInputConfig = {\n kernelName: Conv2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropInput\n};\n//# sourceMappingURL=Conv2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n assertNotComplex([x, filter], 'conv3d');\n const convInfo = backend_util.computeConv3DInfo(x.shape, filter.shape, strides, dilations, pad);\n const { filterDepth, filterHeight, filterWidth, dilationDepth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padFront = padInfo.front;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yF = 0; yF < convInfo.outDepth; ++yF) {\n const yOffset2 = yOffset1 + yF * y.strides[1];\n const xFCorner = yF * convInfo.strideDepth - padFront;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const xF = xFCorner + wF * dilationDepth;\n if (xF < 0 || xF >= convInfo.inDepth) {\n continue;\n }\n const wOffset1 = wF * filterStrides[0];\n const xOffset2 = xOffset1 + xF * xStrides[1];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset3 = yOffset2 + yR * y.strides[2];\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset2 = wOffset1 + wR * filterStrides[1];\n const xOffset3 = xOffset2 + xR * xStrides[2];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset4 = yOffset3 + yC * convInfo.outChannels;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset3 = wOffset2 + wC * filterStrides[2];\n const xOffset4 = xOffset3 + xC * convInfo.inChannels;\n let wOffset4 = wOffset3;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset4 + d1];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset4 + d2] += xVal * wVals[wOffset4 + d2];\n }\n wOffset4 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const conv3DConfig = {\n kernelName: Conv3D,\n backendName: 'cpu',\n kernelFunc: conv3D\n};\n//# sourceMappingURL=Conv3D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropFilterV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropFilterV2(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv3dBackpropFilterV2');\n const xStrides = util.computeStrides(x.shape);\n const dyStrides = util.computeStrides(dy.shape);\n const convInfo = backend_util.computeConv3DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dw = new TensorBuffer(convInfo.filterShape, 'float32');\n const dwValues = dw.values;\n const [dwS0, dwS1, dwS2, dwS3] = dw.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const xValues = backend.data.get(x.dataId).values;\n const [xS0, xS1, xS2, xS3] = xStrides;\n const frontPad = convInfo.padInfo.front;\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const yFMin = Math.max(0, Math.ceil((frontPad - wF) / strideDepth));\n const yFMax = Math.min(convInfo.outDepth, (convInfo.inDepth + frontPad - wF) / strideDepth);\n const wOffset1 = wF * dwS0;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n const wOffset2 = wR * dwS1 + wOffset1;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n const wOffset3 = wC * dwS2 + wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const wOffset4 = d1 * dwS3 + wOffset3;\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xS0;\n const yOffset1 = b * dyS0;\n for (let yF = yFMin; yF < yFMax; ++yF) {\n const xF = wF + yF * strideDepth - frontPad;\n const xOffset2 = xF * xS1 + xOffset1;\n const yOffset2 = yF * dyS1 + yOffset1;\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n const xOffset3 = xR * xS2 + xOffset2;\n const yOffset3 = yR * dyS2 + yOffset2;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n const xOffset4 = xC * xS3 + xOffset3;\n const yOffset4 = yC * dyS3 + yOffset3;\n dotProd += xValues[xOffset4 + d1] * dyValues[yOffset4 + d2];\n }\n }\n }\n }\n dwValues[wOffset4 + d2] = dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dw.shape, dw.dtype, dw.values);\n}\nexport const conv3DBackpropFilterV2Config = {\n kernelName: Conv3DBackpropFilterV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropFilterV2\n};\n//# sourceMappingURL=Conv3DBackpropFilterV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropInputV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropInputV2(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { pad, strides, inputShape } = attrs;\n assertNotComplex([dy], 'conv3dBackpropInputV2');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv3DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2, dxS3] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2, fltS3] = filterStrides;\n const { batchSize, filterDepth, filterHeight, filterWidth, inChannels, inDepth, inHeight, inWidth, outChannels, outDepth, outHeight, outWidth, strideDepth, strideHeight, strideWidth } = convInfo;\n const frontPad = filterDepth - 1 - convInfo.padInfo.front;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n // Frames of depth\n for (let xF = 0; xF < inDepth; ++xF) {\n const xFCorner = xF - frontPad;\n const xFMin = Math.max(0, Math.ceil(xFCorner / strideDepth));\n const yFMax = Math.min(outDepth, (filterDepth + xFCorner) / strideDepth);\n // Rows as per standard 2d matrix notation\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n // Columns as per standard 2d matrix notation\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yF = xFMin; yF < yFMax; ++yF) {\n const wF = yF * strideDepth - xFCorner;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yF + dyS2 * yR + dyS3 * yC;\n const fltOffset = fltS0 * (filterDepth - 1 - wF) +\n fltS1 * (filterHeight - 1 - wR) +\n fltS2 * (filterWidth - 1 - wC) + fltS3 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xF + dxS2 * xR + dxS3 * xC + d1] =\n dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv3DBackpropInputV2Config = {\n kernelName: Conv3DBackpropInputV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropInputV2\n};\n//# sourceMappingURL=Conv3DBackpropInputV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cos = unaryKernelFunc(Cos, (xi) => Math.cos(xi));\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'cpu',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cosh = unaryKernelFunc(Cosh, (xi) => Math.cosh(xi));\nexport const coshConfig = {\n kernelName: Cosh,\n backendName: 'cpu',\n kernelFunc: cosh,\n};\n//# sourceMappingURL=Cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNative, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNative(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'depthwiseConv2DNative');\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n let $dilations = dilations;\n if ($dilations == null) {\n $dilations = [1, 1];\n }\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${$dilations}'`);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, $dilations, pad, dimRoundingMode, true /* depthwise */);\n const { filterHeight, filterWidth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * y.strides[1];\n const xRCorner = yR * convInfo.strideHeight - padLeft;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xStrides[1];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * y.strides[2];\n const xCCorner = yC * convInfo.strideWidth - padTop;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * convInfo.inChannels;\n let yOffset4 = yOffset3;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1];\n for (let q = 0; q < chMul; ++q) {\n yVals[yOffset4 + q] += xVal * wVals[wOffset3 + q];\n }\n yOffset4 += chMul;\n wOffset3 += chMul;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const depthwiseConv2dNativeConfig = {\n kernelName: DepthwiseConv2dNative,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNative\n};\n//# sourceMappingURL=DepthwiseConv2dNative.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, dilations, pad, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'depthwiseConv2dNativeBackpropFilter');\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const xVals = backend.data.get(x.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyVals = backend.data.get(dy.dataId).values;\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n const d1 = Math.trunc(d2 / chMul);\n const dm = d2 % chMul;\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, dm);\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const depthwiseConv2dNativeBackpropFilterConfig = {\n kernelName: DepthwiseConv2dNativeBackpropFilter,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropFilter\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { strides, dilations, pad, dimRoundingMode, inputShape } = attrs;\n assertNotComplex([dy, filter], 'depthwiseConv2DNativeBackpropInput');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const chMul = outChannels / inChannels;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yR + dyS2 * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let dm = 0; dm < chMul; ++dm) {\n const d2 = d1 * chMul + dm;\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + dm];\n dotProd += pixel * weight;\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xR + dxS2 * xC + d1] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const depthwiseConv2dNativeBackpropInputConfig = {\n kernelName: DepthwiseConv2dNativeBackpropInput,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropInput\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2D, util } from '@tensorflow/tfjs-core';\nexport const dilation2dConfig = {\n kernelName: Dilation2D,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const xVals = cpuBackend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const filterVals = cpuBackend.data.get(filter.dataId).values;\n const filterRank = filter.shape.length;\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n const outSize = util.sizeFromShape(outShape);\n const outRank = outShape.length;\n const outputVals = util.getArrayFromDType(x.dtype, outSize);\n // Upsampling the input by fill in `dilation size - 1` values between each\n // input value.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const xIndex = util.locToIndex([b, hIn, wIn, d], xRank, util.computeStrides(x.shape));\n const filterIndex = util.locToIndex([h, w, d], filterRank, util.computeStrides(filter.shape));\n const val = xVals[xIndex] + filterVals[filterIndex];\n if (val > curVal) {\n curVal = val;\n }\n }\n }\n }\n }\n const outputIndex = util.locToIndex([b, hOut, wOut, d], outRank, util.computeStrides(outShape));\n outputVals[outputIndex] = curVal;\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(outputVals, x.dtype), outShape, x.dtype);\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropFilter, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropFilterConfig = {\n kernelName: Dilation2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropFilter}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed filter gradients has the same dimensions as the filter:\n // [filterHeight, filterWidth, depth]\n const gradients = util.makeZerosNestedTypedArray(filter.shape, filter.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hMax = 0;\n let wMax = 0;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hMax = h;\n wMax = w;\n }\n }\n }\n }\n }\n gradients[hMax][wMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), filter.shape, filter.dtype);\n return { dataId, shape: filter.shape, dtype: filter.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropInput, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropInputConfig = {\n kernelName: Dilation2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropInput}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed gradients has the same dimensions as the input:\n // [batch, inputHeight, inputCols, inChannel]\n const gradients = util.makeZerosNestedTypedArray(x.shape, x.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hInMax = (hBeg < 0) ? 0 : hBeg;\n let wInMax = (wBeg < 0) ? 0 : wBeg;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hInMax = hIn;\n wInMax = wIn;\n }\n }\n }\n }\n }\n gradients[b][hInMax][wInMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const divImpl = createSimpleBinaryKernelImpl((a, b) => a / b);\nexport const div = binaryKernelFunc(Div, divImpl);\nexport const divConfig = {\n kernelName: Div,\n backendName: 'cpu',\n kernelFunc: div\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Erf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst p = backend_util.ERF_P;\nconst a1 = backend_util.ERF_A1;\nconst a2 = backend_util.ERF_A2;\nconst a3 = backend_util.ERF_A3;\nconst a4 = backend_util.ERF_A4;\nconst a5 = backend_util.ERF_A5;\nexport const erf = unaryKernelFunc(Erf, (xi) => {\n const sign = Math.sign(xi);\n const v = Math.abs(xi);\n const t = 1.0 / (1.0 + p * v);\n return sign *\n (1.0 -\n (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t *\n Math.exp(-v * v));\n});\nexport const erfConfig = {\n kernelName: Erf,\n backendName: 'cpu',\n kernelFunc: erf,\n};\n//# sourceMappingURL=Erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { add } from '../kernels/Add';\nimport { complex } from '../kernels/Complex';\nimport { concat } from '../kernels/Concat';\nimport { divConfig } from '../kernels/Div';\nimport { identity } from '../kernels/Identity';\nimport { imag } from '../kernels/Imag';\nimport { multiply } from '../kernels/Multiply';\nimport { real } from '../kernels/Real';\nimport { slice } from '../kernels/Slice';\nimport { sub } from '../kernels/Sub';\n/**\n * Calculate FFT of inner most elements of batch tensor.\n */\nexport function fftBatch(input, inverse, cpuBackend) {\n const inputShape = input.shape;\n const batch = inputShape[0];\n const innerDim = inputShape[1];\n const inputVals = cpuBackend.data.get(input.dataId);\n const real2D = inputVals.complexTensorInfos.real;\n const imag2D = inputVals.complexTensorInfos.imag;\n // Collects real and imaginary values separately.\n const resultShape = [batch, innerDim];\n const resultSize = util.sizeFromShape(resultShape);\n const resultReal = util.getTypedArrayFromDType('float32', resultSize);\n const resultImag = util.getTypedArrayFromDType('float32', resultSize);\n for (let b = 0; b < batch; b++) {\n // TODO: Support slice ops for complex type.\n const r = slice({\n inputs: { x: real2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const i = slice({\n inputs: { x: imag2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const input = complex({ inputs: { real: r, imag: i }, backend: cpuBackend });\n // Run FFT by batch element.\n const { real, imag } = fftImpl(input, inverse, cpuBackend);\n const res = backend_util.mergeRealAndImagArrays(real, imag);\n for (let d = 0; d < innerDim; d++) {\n const c = backend_util.getComplexWithIndex(res, d);\n resultReal[b * innerDim + d] = c.real;\n resultImag[b * innerDim + d] = c.imag;\n }\n cpuBackend.disposeIntermediateTensorInfo(r);\n cpuBackend.disposeIntermediateTensorInfo(i);\n cpuBackend.disposeIntermediateTensorInfo(input);\n }\n const $realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultReal);\n const $imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImag);\n const result = complex({ inputs: { real: $realInfo, imag: $imagInfo }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($realInfo);\n cpuBackend.disposeIntermediateTensorInfo($imagInfo);\n return result;\n}\nexport function fftImpl(input, inverse, cpuBackend) {\n const inputSize = util.sizeFromShape(input.shape);\n const inputVals = cpuBackend.data.get(input.dataId);\n const realVals = cpuBackend.data.get(inputVals.complexTensorInfos.real.dataId).values;\n const imagVals = cpuBackend.data.get(inputVals.complexTensorInfos.imag.dataId).values;\n if (isExponentOf2(inputSize)) {\n const result = fftRadix2(realVals, imagVals, inputSize, inverse, cpuBackend);\n const resultShape = [input.shape[0], input.shape[1]];\n if (inverse) {\n const realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.real);\n const imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.imag);\n const sizeInfo = cpuBackend.makeTensorInfo([], 'float32', util.createScalarValue(inputSize, 'float32'));\n const sizeInfoCopy = identity({ inputs: { x: sizeInfo }, backend: cpuBackend });\n const divRealInfo = divConfig.kernelFunc({ inputs: { a: realInfo, b: sizeInfo }, backend: cpuBackend });\n const divImagInfo = divConfig.kernelFunc({ inputs: { a: imagInfo, b: sizeInfoCopy }, backend: cpuBackend });\n const divRealVals = cpuBackend.data.get(divRealInfo.dataId).values;\n const divImagVals = cpuBackend.data.get(divImagInfo.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(realInfo);\n cpuBackend.disposeIntermediateTensorInfo(imagInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfoCopy);\n cpuBackend.disposeIntermediateTensorInfo(divRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(divImagInfo);\n return { real: divRealVals, imag: divImagVals };\n }\n return result;\n }\n else {\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const rawOutput = fourierTransformByMatmul(data, inputSize, inverse);\n return backend_util.splitRealAndImagArrays(rawOutput);\n }\n}\nfunction isExponentOf2(size) {\n return (size & size - 1) === 0;\n}\n// FFT using Cooley-Tukey algorithm on radix 2 dimensional input.\nfunction fftRadix2(realVals, imagVals, size, inverse, cpuBackend) {\n if (size === 1) {\n return { real: realVals, imag: imagVals };\n }\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const half = size / 2;\n const evenComplex = backend_util.complexWithEvenIndex(data);\n const evenRealVals = evenComplex.real;\n const evenImagVals = evenComplex.imag;\n const evenShape = [evenRealVals.length];\n const evenRealInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenRealVals);\n const evenImagInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenImagVals);\n const evenTensorInfo = complex({ inputs: { real: evenRealInfo, imag: evenImagInfo }, backend: cpuBackend });\n const oddComplex = backend_util.complexWithOddIndex(data);\n const oddRealVals = oddComplex.real;\n const oddImagVals = oddComplex.imag;\n const oddShape = [oddRealVals.length];\n const oddRealInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddRealVals);\n const oddImagInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddImagVals);\n const oddTensorInfo = complex({ inputs: { real: oddRealInfo, imag: oddImagInfo }, backend: cpuBackend });\n // Recursive call for half part of original input.\n const $evenComplex = fftRadix2(evenRealVals, evenImagVals, half, inverse, cpuBackend);\n const $evenRealVals = $evenComplex.real;\n const $evenImagVals = $evenComplex.imag;\n const $evenShape = [$evenRealVals.length];\n const $evenRealInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenRealVals);\n const $evenImagInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenImagVals);\n const $evenTensorInfo = complex({\n inputs: { real: $evenRealInfo, imag: $evenImagInfo },\n backend: cpuBackend\n });\n const $oddComplex = fftRadix2(oddRealVals, oddImagVals, half, inverse, cpuBackend);\n const $oddRealVals = $oddComplex.real;\n const $oddImagVals = $oddComplex.imag;\n const $oddShape = [$oddRealVals.length];\n const $oddRealInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddRealVals);\n const $oddImagInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddImagVals);\n const $oddTensorInfo = complex({ inputs: { real: $oddRealInfo, imag: $oddImagInfo }, backend: cpuBackend });\n const e = backend_util.exponents(size, inverse);\n const eShape = [e.real.length];\n const eRealInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.real);\n const eImagInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.imag);\n const complexInfo = complex({ inputs: { real: eRealInfo, imag: eImagInfo }, backend: cpuBackend });\n const exponentInfo = multiply({ inputs: { a: complexInfo, b: $oddTensorInfo }, backend: cpuBackend });\n const addPart = add({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const subPart = sub({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const addPartReal = real({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartReal = real({ inputs: { input: subPart }, backend: cpuBackend });\n const addPartImag = imag({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartImag = imag({ inputs: { input: subPart }, backend: cpuBackend });\n const $real = concat({\n inputs: [addPartReal, subPartReal],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $imag = concat({\n inputs: [addPartImag, subPartImag],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $realVals = cpuBackend.data.get($real.dataId).values;\n const $imagVals = cpuBackend.data.get($imag.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(eRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(eImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(complexInfo);\n cpuBackend.disposeIntermediateTensorInfo(exponentInfo);\n cpuBackend.disposeIntermediateTensorInfo(addPart);\n cpuBackend.disposeIntermediateTensorInfo(subPart);\n cpuBackend.disposeIntermediateTensorInfo(addPartReal);\n cpuBackend.disposeIntermediateTensorInfo(addPartImag);\n cpuBackend.disposeIntermediateTensorInfo(subPartReal);\n cpuBackend.disposeIntermediateTensorInfo(subPartImag);\n cpuBackend.disposeIntermediateTensorInfo($real);\n cpuBackend.disposeIntermediateTensorInfo($imag);\n return { real: $realVals, imag: $imagVals };\n}\n// Calculate fourier transform by multplying sinusoid matrix.\nfunction fourierTransformByMatmul(data, size, inverse) {\n const ret = new Float32Array(size * 2);\n // TODO: Use matmul instead once it supports complex64 type.\n for (let r = 0; r < size; r++) {\n let real = 0.0;\n let imag = 0.0;\n for (let c = 0; c < size; c++) {\n const e = backend_util.exponent(r * c, size, inverse);\n const term = backend_util.getComplexWithIndex(data, c);\n real += term.real * e.real - term.imag * e.imag;\n imag += term.real * e.imag + term.imag * e.real;\n }\n if (inverse) {\n real /= size;\n imag /= size;\n }\n backend_util.assignToTypedArray(ret, real, imag, r);\n }\n return ret;\n}\n//# sourceMappingURL=fft_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, false, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'cpu',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Fill, util } from '@tensorflow/tfjs-core';\nexport function fill(args) {\n const { backend, attrs } = args;\n const { shape, value, dtype } = attrs;\n const $dtype = dtype || util.inferDtype(value);\n const values = util.getArrayFromDType($dtype, util.sizeFromShape(shape));\n fillValues(values, value, $dtype);\n return backend.makeTensorInfo(shape, $dtype, values);\n}\nexport const fillConfig = {\n kernelName: Fill,\n backendName: 'cpu',\n kernelFunc: fill\n};\nfunction fillValues(values, value, dtype) {\n if (dtype === 'string') {\n values.fill(value);\n }\n else {\n values.fill(value);\n }\n}\n//# sourceMappingURL=Fill.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight, util } from '@tensorflow/tfjs-core';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const coordX = Math.round(imageWidth - x);\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n let outputValue = imageVals[outIdx];\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth) {\n // set the output to the image value at the coordinate position.\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { conv2D } from './Conv2D';\nexport function fusedConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = conv2D({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const resultOld = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n if (activation) {\n const resultOld = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n return result;\n}\nexport const fusedConv2DConfig = {\n kernelName: FusedConv2D,\n backendName: 'cpu',\n kernelFunc: fusedConv2D\n};\n//# sourceMappingURL=FusedConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedDepthwiseConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { depthwiseConv2dNative } from './DepthwiseConv2dNative';\nexport function fusedDepthwiseConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = depthwiseConv2dNative({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const oldResult = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n if (activation) {\n const oldResult = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n return result;\n}\nexport const fusedDepthwiseConv2DConfig = {\n kernelName: FusedDepthwiseConv2D,\n backendName: 'cpu',\n kernelFunc: fusedDepthwiseConv2D\n};\n//# sourceMappingURL=FusedDepthwiseConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, true, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'cpu',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isFinite = unaryKernelFunc(IsFinite, (xi) => Number.isFinite(xi) ? 1 : 0, 'bool');\nexport const isFiniteConfig = {\n kernelName: IsFinite,\n backendName: 'cpu',\n kernelFunc: isFinite,\n};\n//# sourceMappingURL=IsFinite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isInf = unaryKernelFunc(IsInf, (xi) => Math.abs(xi) === Infinity ? 1 : 0, 'bool');\nexport const isInfConfig = {\n kernelName: IsInf,\n backendName: 'cpu',\n kernelFunc: isInf,\n};\n//# sourceMappingURL=IsInf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isNaN = unaryKernelFunc(IsNan, (xi) => Number.isNaN(xi) ? 1 : 0, 'bool');\nexport const isNaNConfig = {\n kernelName: IsNan,\n backendName: 'cpu',\n kernelFunc: isNaN,\n};\n//# sourceMappingURL=IsNaN.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const log1p = unaryKernelFunc(Log1p, (xi) => Math.log1p(xi));\nexport const log1pConfig = {\n kernelName: Log1p,\n backendName: 'cpu',\n kernelFunc: log1p,\n};\n//# sourceMappingURL=Log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogicalNot } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const logicalNot = unaryKernelFunc(LogicalNot, (xi) => xi ? 0 : 1, 'bool');\nexport const logicalNotConfig = {\n kernelName: LogicalNot,\n backendName: 'cpu',\n kernelFunc: logicalNot,\n};\n//# sourceMappingURL=LogicalNot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const cpuBackend = backend;\n let xShape = x.shape;\n const xRank = xShape.length;\n const origAxes = util.parseAxisParam(reductionIndices, xShape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n let xVals = cpuBackend.data.get(x.dataId).values;\n if (permutedAxes != null) {\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xShape[permutedAxes[i]];\n }\n xVals = transposeImpl(xVals, xShape, x.dtype, permutedAxes, newShape);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n xShape = newShape;\n }\n assertNotComplex(x, 'max');\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(xShape, axes);\n const reduceSize = util.sizeFromShape(reduceShape);\n const result = maxImpl(xVals, reduceSize, maxOutShape, x.dtype);\n const dataId = cpuBackend.write(result, maxOutShape, x.dtype);\n let outShape = maxOutShape;\n if (keepDims) {\n // reshape\n const newShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n outShape = newShape;\n }\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'max');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'cpu',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, buffer, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolPositions } from '../utils/pool_utils';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const xValues = backend.data.get(x.dataId).values;\n const maxPosBuf = buffer(convInfo.outShape, x.dtype, maxPoolPositions(xValues, x.shape, x.dtype, convInfo).values);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const maxPos = effectiveFilterHeight * effectiveFilterWidth - 1 -\n maxPosBuf.get(b, dyR, dyC, d);\n const curPos = wR * effectiveFilterWidth + wC;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel * mask;\n }\n }\n dx.set(dotProd, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'cpu',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { maxPoolPositions, pool } from '../utils/pool_utils';\nexport function maxPoolWithArgmaxImpl(xValues, xShape, dtype, includeBatchInIndex, convInfo) {\n const strides = util.computeStrides(xShape);\n const maxPools = pool(xValues, xShape, dtype, strides, convInfo, 'max');\n const maxPositions = maxPoolPositions(xValues, xShape, dtype, convInfo, true, includeBatchInIndex);\n return [maxPools.values, maxPositions.values];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const cpuBackend = backend;\n assertNotComplex(x, 'MaxPoolWithArgmax');\n const values = cpuBackend.data.get(x.dataId).values;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, [1, 1], pad);\n const [pooled, indexes] = maxPoolWithArgmaxImpl(values, x.shape, x.dtype, includeBatchInIndex, convInfo);\n const pooledDataId = cpuBackend.write(pooled, convInfo.outShape, x.dtype);\n const indexesDataId = cpuBackend.write(indexes, convInfo.outShape, x.dtype);\n return [\n { dataId: pooledDataId, shape: convInfo.outShape, dtype: x.dtype },\n { dataId: indexesDataId, shape: convInfo.outShape, dtype: 'int32' }\n ];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function mirrorPad(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, mode } = attrs;\n assertNotComplex(x, 'mirrorPad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const end = paddings.map((p, i) => p[0] + x.shape[i]);\n const offset = mode === 'reflect' ? 0 : 1;\n const xVals = backend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n for (let i = 0; i < resultSize; i++) {\n let coords = util.indexToLoc(i, resultRank, resultStrides);\n for (let i = 0; i < resultRank; i++) {\n if (coords[i] < start[i]) {\n coords[i] = start[i] * 2 - coords[i] - offset;\n }\n else if (coords[i] >= end[i]) {\n coords[i] = (end[i] - 1) * 2 - coords[i] + offset;\n }\n }\n coords = coords.map((c, i) => c - start[i]);\n const inIndex = util.locToIndex(coords, xRank, xStrides);\n resVals[i] = xVals[inIndex];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'cpu',\n kernelFunc: mirrorPad\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionPadded');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionWithScore');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function padV2(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, constantValue } = attrs;\n assertNotComplex(x, 'pad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const xVals = backend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n if (constantValue !== 0) {\n resVals.fill(constantValue);\n }\n for (let i = 0; i < xSize; i++) {\n const coords = util.indexToLoc(i, xRank, xStrides);\n const outCoords = coords.map((c, i) => c + start[i]);\n const outIndex = util.locToIndex(outCoords, resultRank, resultStrides);\n resVals[outIndex] = xVals[i];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const padV2Config = {\n kernelName: PadV2,\n backendName: 'cpu',\n kernelFunc: padV2\n};\n//# sourceMappingURL=PadV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const reciprocal = unaryKernelFunc(Reciprocal, (xi) => 1 / xi);\nexport const reciprocalConfig = {\n kernelName: Reciprocal,\n backendName: 'cpu',\n kernelFunc: reciprocal,\n};\n//# sourceMappingURL=Reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, RotateWithOffset, util } from '@tensorflow/tfjs-core';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const fullOpacityValue = 255;\n const sinFactor = Math.sin(radians);\n const cosFactor = Math.cos(radians);\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const y = coords[1];\n // coordX/coordY are the result of rotating and translating x/y.\n let coordX = (x - centerX) * cosFactor - (y - centerY) * sinFactor;\n let coordY = (x - centerX) * sinFactor + (y - centerY) * cosFactor;\n coordX = Math.round(coordX + centerX);\n coordY = Math.round(coordY + centerY);\n let outputValue = fillValue;\n if (typeof fillValue !== 'number') {\n if (channel === 3) {\n outputValue = fullOpacityValue;\n }\n else {\n outputValue = fillValue[channel];\n }\n }\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth && coordY >= 0 &&\n coordY < imageHeight) {\n // set the output to the image value at the coordinate position.\n const rotatedRowOffset = coordY * (imageWidth * numChannels);\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rotatedRowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const round = unaryKernelFunc(Round, (xi) => {\n // The algorithm is based on banker's rounding.\n const base = Math.floor(xi);\n if (xi - base < 0.5) {\n return Math.floor(xi);\n }\n else if (xi - base > 0.5) {\n return Math.ceil(xi);\n }\n else {\n if (base % 2.0 === 0.0) {\n return base;\n }\n else {\n return base + 1.0;\n }\n }\n});\nexport const roundConfig = {\n kernelName: Round,\n backendName: 'cpu',\n kernelFunc: round,\n};\n//# sourceMappingURL=Round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Selu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst scaleAlpha = backend_util.SELU_SCALEALPHA;\nconst scale = backend_util.SELU_SCALE;\nexport const selu = unaryKernelFunc(Selu, (xi) => {\n if (xi >= 0) {\n return scale * xi;\n }\n else {\n return scaleAlpha * (Math.exp(xi) - 1);\n }\n});\nexport const seluConfig = {\n kernelName: Selu,\n backendName: 'cpu',\n kernelFunc: selu,\n};\n//# sourceMappingURL=Selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sigmoid = unaryKernelFunc(Sigmoid, (xi) => 1 / (1 + Math.exp(-xi)));\nexport const sigmoidConfig = {\n kernelName: Sigmoid,\n backendName: 'cpu',\n kernelFunc: sigmoid,\n};\n//# sourceMappingURL=Sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sign = unaryKernelFunc(Sign, (xi) => {\n if (xi < 0) {\n return -1;\n }\n else if (xi > 0) {\n return 1;\n }\n else {\n return 0;\n }\n});\nexport const signConfig = {\n kernelName: Sign,\n backendName: 'cpu',\n kernelFunc: sign,\n};\n//# sourceMappingURL=Sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sin = unaryKernelFunc(Sin, (xi) => Math.sin(xi));\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'cpu',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sinh = unaryKernelFunc(Sinh, (xi) => Math.sinh(xi));\nexport const sinhConfig = {\n kernelName: Sinh,\n backendName: 'cpu',\n kernelFunc: sinh,\n};\n//# sourceMappingURL=Sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\n// mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n// epsilon is the difference between 1.0 and the next representable float.\n// For a single precision 32 bit float this should be 2^-23, see:\n// https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\nconst epsilon = 1.1920928955078125e-7;\nconst threshold = Math.log(epsilon) + 2.0;\nexport const softplus = unaryKernelFunc(Softplus, (xi) => {\n // Value above which exp(x) may overflow, but softplus(x) == x\n // is within machine epsilon.\n const tooLarge = xi > -threshold;\n // Value below which exp(x) may underflow, but softplus(x) == exp(x)\n // is within machine epsilon.\n const tooSmall = xi < threshold;\n const expX = Math.exp(xi);\n let result;\n if (tooSmall) {\n result = expX;\n }\n else if (tooLarge) {\n result = xi;\n }\n else {\n result = Math.log(1.0 + expX);\n }\n return result;\n});\nexport const softplusConfig = {\n kernelName: Softplus,\n backendName: 'cpu',\n kernelFunc: softplus,\n};\n//# sourceMappingURL=Softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { transposeImpl } from './Transpose_impl';\nexport function transpose(args) {\n const { inputs, attrs, backend } = args;\n const { x } = inputs;\n const { perm } = attrs;\n assertNotComplex(x, 'transpose');\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n const values = backend.data.get(x.dataId).values;\n const result = transposeImpl(values, x.shape, x.dtype, perm, newShape);\n const dataId = backend.write(result, newShape, x.dtype);\n return { dataId, shape: newShape, dtype: x.dtype };\n}\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'cpu',\n kernelFunc: transpose\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, SpaceToBatchND, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { padV2Config } from './PadV2';\nimport { reshape } from './Reshape';\nimport { transpose } from './Transpose';\nexport function spaceToBatchND(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { blockShape, paddings } = attrs;\n assertNotComplex([x], 'spaceToBatchND');\n const prod = util.sizeFromShape(blockShape);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = padV2Config.kernelFunc({\n inputs: { x },\n backend,\n attrs: { paddings: completePaddings, constantValue: 0 }\n });\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const reshapeInputs = { x: paddedX };\n const reshapeAttrs = { shape: reshapedPaddedShape };\n const paddedXReshaped = reshape({ inputs: reshapeInputs, backend, attrs: reshapeAttrs });\n const transposeInputs = { x: paddedXReshaped };\n const transposeAttrs = { perm: permutedReshapedPaddedPermutation };\n const paddedXT = transpose({ inputs: transposeInputs, backend, attrs: transposeAttrs });\n const resultReshapeInputs = { x: paddedXT };\n const resultReshapeAttrs = { shape: flattenShape };\n const result = reshape({ inputs: resultReshapeInputs, backend, attrs: resultReshapeAttrs });\n backend.disposeIntermediateTensorInfo(paddedX);\n backend.disposeIntermediateTensorInfo(paddedXReshaped);\n backend.disposeIntermediateTensorInfo(paddedXT);\n return result;\n}\nexport const spaceToBatchNDConfig = {\n kernelName: SpaceToBatchND,\n backendName: 'cpu',\n kernelFunc: spaceToBatchND\n};\n//# sourceMappingURL=SpaceToBatchND.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sqrt = unaryKernelFunc(Sqrt, (xi) => Math.sqrt(xi));\nexport const sqrtConfig = {\n kernelName: Sqrt,\n backendName: 'cpu',\n kernelFunc: sqrt,\n};\n//# sourceMappingURL=Sqrt.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend }) => {\n const { x } = inputs;\n const cpuBackend = backend;\n assertNotComplex(x, 'square');\n const values = cpuBackend.data.get(x.dataId).values;\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = value * value;\n }\n const dataId = cpuBackend.write(newValues, x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const step = unaryKernelFunc(Step, (xi, attrs) => {\n const stepAttrs = attrs;\n if (isNaN(xi)) {\n return NaN;\n }\n else {\n return xi > 0 ? 1 : stepAttrs.alpha;\n }\n});\nexport const stepConfig = {\n kernelName: Step,\n backendName: 'cpu',\n kernelFunc: step,\n};\n//# sourceMappingURL=Step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tan = unaryKernelFunc(Tan, (xi) => Math.tan(xi));\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'cpu',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tanh = unaryKernelFunc(Tanh, (xi) => Math.tanh(xi));\nexport const tanhConfig = {\n kernelName: Tanh,\n backendName: 'cpu',\n kernelFunc: tanh,\n};\n//# sourceMappingURL=Tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { uniqueImpl } from './Unique_impl';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n const values = backend.data.get(x.dataId).values;\n const { outputValues, outputShape, indices } = uniqueImpl(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'cpu',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// We explicitly import the modular kernels so they get registered in the\n// global registry when we compile the library. A modular build would replace\n// the contents of this file and import only the kernels that are needed.\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { _fusedMatMulConfig } from './kernels/_FusedMatMul';\nimport { absConfig } from './kernels/Abs';\nimport { acosConfig } from './kernels/Acos';\nimport { acoshConfig } from './kernels/Acosh';\nimport { addConfig } from './kernels/Add';\nimport { asinConfig } from './kernels/Asin';\nimport { asinhConfig } from './kernels/Asinh';\nimport { atanConfig } from './kernels/Atan';\nimport { atanhConfig } from './kernels/Atanh';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchMatMulConfig } from './kernels/BatchMatMul';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { ceilConfig } from './kernels/Ceil';\nimport { clipConfig } from './kernels/Clip';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { conv2DConfig } from './kernels/Conv2D';\nimport { conv2DBackpropFilterConfig } from './kernels/Conv2DBackpropFilter';\nimport { conv2DBackpropInputConfig } from './kernels/Conv2DBackpropInput';\nimport { conv3DConfig } from './kernels/Conv3D';\nimport { conv3DBackpropFilterV2Config } from './kernels/Conv3DBackpropFilterV2';\nimport { conv3DBackpropInputV2Config } from './kernels/Conv3DBackpropInputV2';\nimport { cosConfig } from './kernels/Cos';\nimport { coshConfig } from './kernels/Cosh';\nimport { depthwiseConv2dNativeConfig } from './kernels/DepthwiseConv2dNative';\nimport { depthwiseConv2dNativeBackpropFilterConfig } from './kernels/DepthwiseConv2dNativeBackpropFilter';\nimport { depthwiseConv2dNativeBackpropInputConfig } from './kernels/DepthwiseConv2dNativeBackpropInput';\nimport { dilation2dConfig } from './kernels/Dilation2D';\nimport { dilation2dBackpropFilterConfig } from './kernels/Dilation2DBackpropFilter';\nimport { dilation2dBackpropInputConfig } from './kernels/Dilation2DBackpropInput';\nimport { divConfig } from './kernels/Div';\nimport { eluConfig } from './kernels/Elu';\nimport { erfConfig } from './kernels/Erf';\nimport { expConfig } from './kernels/Exp';\nimport { expm1Config } from './kernels/Expm1';\nimport { fftConfig } from './kernels/FFT';\nimport { fillConfig } from './kernels/Fill';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { floorConfig } from './kernels/Floor';\nimport { fusedConv2DConfig } from './kernels/FusedConv2D';\nimport { fusedDepthwiseConv2DConfig } from './kernels/FusedDepthwiseConv2D';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { isFiniteConfig } from './kernels/IsFinite';\nimport { isInfConfig } from './kernels/IsInf';\nimport { isNaNConfig } from './kernels/IsNaN';\nimport { logConfig } from './kernels/Log';\nimport { log1pConfig } from './kernels/Log1p';\nimport { logicalNotConfig } from './kernels/LogicalNot';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { padV2Config } from './kernels/PadV2';\nimport { preluConfig } from './kernels/Prelu';\nimport { realConfig } from './kernels/Real';\nimport { reciprocalConfig } from './kernels/Reciprocal';\nimport { reluConfig } from './kernels/Relu';\nimport { relu6Config } from './kernels/Relu6';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { roundConfig } from './kernels/Round';\nimport { rsqrtConfig } from './kernels/Rsqrt';\nimport { seluConfig } from './kernels/Selu';\nimport { sigmoidConfig } from './kernels/Sigmoid';\nimport { signConfig } from './kernels/Sign';\nimport { sinConfig } from './kernels/Sin';\nimport { sinhConfig } from './kernels/Sinh';\nimport { sliceConfig } from './kernels/Slice';\nimport { softplusConfig } from './kernels/Softplus';\nimport { spaceToBatchNDConfig } from './kernels/SpaceToBatchND';\nimport { sqrtConfig } from './kernels/Sqrt';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { stepConfig } from './kernels/Step';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { tanhConfig } from './kernels/Tanh';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n _fusedMatMulConfig,\n absConfig,\n acosConfig,\n acoshConfig,\n addConfig,\n asinConfig,\n asinhConfig,\n atanConfig,\n atanhConfig,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchMatMulConfig,\n batchNormConfig,\n castConfig,\n ceilConfig,\n clipConfig,\n complexConfig,\n concatConfig,\n conv2DBackpropFilterConfig,\n conv2DBackpropInputConfig,\n conv2DConfig,\n conv3DBackpropFilterV2Config,\n conv3DBackpropInputV2Config,\n conv3DConfig,\n cosConfig,\n coshConfig,\n depthwiseConv2dNativeConfig,\n depthwiseConv2dNativeBackpropFilterConfig,\n depthwiseConv2dNativeBackpropInputConfig,\n dilation2dConfig,\n dilation2dBackpropInputConfig,\n dilation2dBackpropFilterConfig,\n divConfig,\n eluConfig,\n erfConfig,\n expConfig,\n expm1Config,\n fftConfig,\n fillConfig,\n flipLeftRightConfig,\n floorConfig,\n fusedConv2DConfig,\n fusedDepthwiseConv2DConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n isFiniteConfig,\n isInfConfig,\n isNaNConfig,\n logConfig,\n log1pConfig,\n logicalNotConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n maxConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n padV2Config,\n preluConfig,\n realConfig,\n reciprocalConfig,\n reluConfig,\n relu6Config,\n reshapeConfig,\n rotateWithOffsetConfig,\n roundConfig,\n rsqrtConfig,\n seluConfig,\n sigmoidConfig,\n signConfig,\n sinConfig,\n sinhConfig,\n sliceConfig,\n softplusConfig,\n spaceToBatchNDConfig,\n sqrtConfig,\n squareConfig,\n squaredDifferenceConfig,\n stepConfig,\n subConfig,\n tanConfig,\n tanhConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst contexts = {};\nconst WEBGL_ATTRIBUTES = {\n alpha: false,\n antialias: false,\n premultipliedAlpha: false,\n preserveDrawingBuffer: false,\n depth: false,\n stencil: false,\n failIfMajorPerformanceCaveat: true\n};\nexport function clearWebGLContext(webGLVersion) {\n delete contexts[webGLVersion];\n}\nexport function setWebGLContext(webGLVersion, gl) {\n contexts[webGLVersion] = gl;\n}\nexport function getWebGLContext(webGLVersion) {\n if (!(webGLVersion in contexts)) {\n const newCtx = getWebGLRenderingContext(webGLVersion);\n if (newCtx !== null) {\n contexts[webGLVersion] = newCtx;\n }\n else {\n console.log('Could not get context for WebGL version', webGLVersion);\n return null;\n }\n }\n const gl = contexts[webGLVersion];\n if (gl.isContextLost()) {\n delete contexts[webGLVersion];\n return getWebGLContext(webGLVersion);\n }\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n return contexts[webGLVersion];\n}\nfunction createCanvas(webGLVersion) {\n if (typeof OffscreenCanvas !== 'undefined' && webGLVersion === 2) {\n return new OffscreenCanvas(300, 150);\n }\n else if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n }\n else {\n throw new Error('Cannot create a canvas in this context');\n }\n}\nfunction getWebGLRenderingContext(webGLVersion) {\n if (webGLVersion !== 1 && webGLVersion !== 2) {\n throw new Error('Cannot get WebGL rendering context, WebGL is disabled.');\n }\n const canvas = createCanvas(webGLVersion);\n canvas.addEventListener('webglcontextlost', (ev) => {\n ev.preventDefault();\n delete contexts[webGLVersion];\n }, false);\n if (webGLVersion === 1) {\n return (canvas.getContext('webgl', WEBGL_ATTRIBUTES) ||\n canvas.getContext('experimental-webgl', WEBGL_ATTRIBUTES));\n }\n return canvas.getContext('webgl2', WEBGL_ATTRIBUTES);\n}\n//# sourceMappingURL=canvas_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nexport var PackingScheme;\n(function (PackingScheme) {\n /**\n * All values in a single texel are densely packed without any constraints.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 4]\n * (indices are [batch, row, col]).\n *\n * 000|001 010|011 020|021\n * ------- ------- -------\n * 002|003 012|013 022|023\n *\n * 100|101 110|111 120|121\n * ------- ------- -------\n * 102|103 112|113 122|123\n *\n */\n PackingScheme[PackingScheme[\"DENSE\"] = 0] = \"DENSE\";\n /**\n * Single texels contain only values from the same batch, and from adjacent\n * rows and columns.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 5]\n * (indices are [batch, row, col]).\n *\n * 000|001 002|003 004|xxx 020|021 022|023 024|xxx\n * ------- ------- ------- ------- ------- -------\n * 010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n * 100|101 102|103 104|xxx 120|121 122|123 124|xxx\n * ------- ------- ------- ------- ------- -------\n * 110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n */\n PackingScheme[PackingScheme[\"SHARED_BATCH\"] = 1] = \"SHARED_BATCH\";\n})(PackingScheme || (PackingScheme = {}));\nexport var TextureUsage;\n(function (TextureUsage) {\n TextureUsage[TextureUsage[\"RENDER\"] = 0] = \"RENDER\";\n TextureUsage[TextureUsage[\"UPLOAD\"] = 1] = \"UPLOAD\";\n TextureUsage[TextureUsage[\"PIXELS\"] = 2] = \"PIXELS\";\n TextureUsage[TextureUsage[\"DOWNLOAD\"] = 3] = \"DOWNLOAD\";\n})(TextureUsage || (TextureUsage = {}));\nexport var PhysicalTextureType;\n(function (PhysicalTextureType) {\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT16\"] = 0] = \"UNPACKED_FLOAT16\";\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT32\"] = 1] = \"UNPACKED_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_4X1_UNSIGNED_BYTE\"] = 2] = \"PACKED_4X1_UNSIGNED_BYTE\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT32\"] = 3] = \"PACKED_2X2_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT16\"] = 4] = \"PACKED_2X2_FLOAT16\";\n})(PhysicalTextureType || (PhysicalTextureType = {}));\nexport function getUnpackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns, rows];\n}\nexport function getUnpackedArraySizeFromMatrixSize(matrixSize, channelsPerTexture) {\n return matrixSize * channelsPerTexture;\n}\nexport function getColorMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns * 4, rows];\n}\n/**\n * Get shape for densely packed RGBA texture.\n */\nexport function getDenseTexShape(shape) {\n const size = util.sizeFromShape(shape);\n const texelsNeeded = Math.ceil(size / 4);\n return util.sizeToSquarishShape(texelsNeeded);\n}\nexport function getMatrixSizeFromUnpackedArraySize(unpackedSize, channelsPerTexture) {\n if (unpackedSize % channelsPerTexture !== 0) {\n throw new Error(`unpackedSize (${unpackedSize}) must be a multiple of ` +\n `${channelsPerTexture}`);\n }\n return unpackedSize / channelsPerTexture;\n}\nexport function decodeMatrixFromUnpackedColorRGBAArray(unpackedArray, matrix, channels) {\n const requiredSize = unpackedArray.length * channels / 4;\n if (matrix.length < requiredSize) {\n throw new Error(`matrix length (${matrix.length}) must be >= ${requiredSize}`);\n }\n let dst = 0;\n for (let src = 0; src < unpackedArray.length; src += 4) {\n for (let c = 0; c < channels; c++) {\n matrix[dst++] = unpackedArray[src + c];\n }\n }\n}\nexport function getPackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [\n Math.max(1, Math.ceil(columns / 2)), Math.max(1, Math.ceil(rows / 2))\n ];\n}\nexport function getPackedRGBAArraySizeFromMatrixShape(rows, columns) {\n const [w, h] = getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return w * h * 4;\n}\nexport function getTextureConfig(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n let internalFormatFloat;\n let internalFormatHalfFloat;\n let internalFormatPackedHalfFloat;\n let internalFormatPackedFloat;\n let textureFormatFloat;\n let downloadTextureFormat;\n let downloadUnpackNumChannels;\n let defaultNumChannels;\n let textureTypeHalfFloat;\n let textureTypeFloat;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n internalFormatFloat = glany.R32F;\n internalFormatHalfFloat = glany.R16F;\n internalFormatPackedHalfFloat = glany.RGBA16F;\n internalFormatPackedFloat = glany.RGBA32F;\n textureFormatFloat = glany.RED;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 1;\n textureTypeHalfFloat = glany.HALF_FLOAT;\n textureTypeFloat = glany.FLOAT;\n }\n else {\n internalFormatFloat = gl.RGBA;\n internalFormatHalfFloat = gl.RGBA;\n internalFormatPackedHalfFloat = gl.RGBA;\n internalFormatPackedFloat = glany.RGBA;\n textureFormatFloat = gl.RGBA;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 4;\n textureTypeHalfFloat = textureHalfFloatExtension != null ?\n textureHalfFloatExtension.HALF_FLOAT_OES :\n null;\n textureTypeFloat = gl.FLOAT;\n }\n downloadTextureFormat = gl.RGBA;\n return {\n internalFormatFloat,\n internalFormatHalfFloat,\n internalFormatPackedHalfFloat,\n internalFormatPackedFloat,\n textureFormatFloat,\n downloadTextureFormat,\n downloadUnpackNumChannels,\n defaultNumChannels,\n textureTypeHalfFloat,\n textureTypeFloat\n };\n}\n//# sourceMappingURL=tex_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext } from './canvas_util';\nimport { getTextureConfig } from './tex_util';\nexport function callAndCheck(gl, func) {\n const returnValue = func();\n if (env().getBool('DEBUG')) {\n checkWebGLError(gl);\n }\n return returnValue;\n}\nfunction checkWebGLError(gl) {\n const error = gl.getError();\n if (error !== gl.NO_ERROR) {\n throw new Error('WebGL Error: ' + getWebGLErrorMessage(gl, error));\n }\n}\n// https://en.wikipedia.org/wiki/Half-precision_floating-point_format\nconst MIN_FLOAT16 = 5.96e-8;\nconst MAX_FLOAT16 = 65504;\nexport function canBeRepresented(num) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED') || num === 0 ||\n (MIN_FLOAT16 < Math.abs(num) && Math.abs(num) < MAX_FLOAT16)) {\n return true;\n }\n return false;\n}\nexport function getWebGLErrorMessage(gl, status) {\n switch (status) {\n case gl.NO_ERROR:\n return 'NO_ERROR';\n case gl.INVALID_ENUM:\n return 'INVALID_ENUM';\n case gl.INVALID_VALUE:\n return 'INVALID_VALUE';\n case gl.INVALID_OPERATION:\n return 'INVALID_OPERATION';\n case gl.INVALID_FRAMEBUFFER_OPERATION:\n return 'INVALID_FRAMEBUFFER_OPERATION';\n case gl.OUT_OF_MEMORY:\n return 'OUT_OF_MEMORY';\n case gl.CONTEXT_LOST_WEBGL:\n return 'CONTEXT_LOST_WEBGL';\n default:\n return `Unknown error code ${status}`;\n }\n}\nexport function getExtensionOrThrow(gl, extensionName) {\n return throwIfNull(gl, () => gl.getExtension(extensionName), 'Extension \"' + extensionName + '\" not supported on this browser.');\n}\nexport function createVertexShader(gl, vertexShaderSource) {\n const vertexShader = throwIfNull(gl, () => gl.createShader(gl.VERTEX_SHADER), 'Unable to create vertex WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(vertexShader, vertexShaderSource));\n callAndCheck(gl, () => gl.compileShader(vertexShader));\n if (gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS) === false) {\n console.log(gl.getShaderInfoLog(vertexShader));\n throw new Error('Failed to compile vertex shader.');\n }\n return vertexShader;\n}\nexport function createFragmentShader(gl, fragmentShaderSource) {\n const fragmentShader = throwIfNull(gl, () => gl.createShader(gl.FRAGMENT_SHADER), 'Unable to create fragment WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(fragmentShader, fragmentShaderSource));\n callAndCheck(gl, () => gl.compileShader(fragmentShader));\n if (gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS) === false) {\n logShaderSourceAndInfoLog(fragmentShaderSource, gl.getShaderInfoLog(fragmentShader));\n throw new Error('Failed to compile fragment shader.');\n }\n return fragmentShader;\n}\nconst lineNumberRegex = /ERROR: [0-9]+:([0-9]+):/g;\nfunction logShaderSourceAndInfoLog(shaderSource, shaderInfoLog) {\n const lineNumberRegexResult = lineNumberRegex.exec(shaderInfoLog);\n if (lineNumberRegexResult == null) {\n console.log(`Couldn't parse line number in error: ${shaderInfoLog}`);\n console.log(shaderSource);\n return;\n }\n const lineNumber = +lineNumberRegexResult[1];\n const shaderLines = shaderSource.split('\\n');\n const pad = shaderLines.length.toString().length + 2;\n const linesWithLineNumbers = shaderLines.map((line, lineNumber) => util.rightPad((lineNumber + 1).toString(), pad) + line);\n let maxLineLength = 0;\n for (let i = 0; i < linesWithLineNumbers.length; i++) {\n maxLineLength = Math.max(linesWithLineNumbers[i].length, maxLineLength);\n }\n const beforeErrorLines = linesWithLineNumbers.slice(0, lineNumber - 1);\n const errorLine = linesWithLineNumbers.slice(lineNumber - 1, lineNumber);\n const afterErrorLines = linesWithLineNumbers.slice(lineNumber);\n console.log(beforeErrorLines.join('\\n'));\n console.log(shaderInfoLog.split('\\n')[0]);\n console.log(`%c ${util.rightPad(errorLine[0], maxLineLength)}`, 'border:1px solid red; background-color:#e3d2d2; color:#a61717');\n console.log(afterErrorLines.join('\\n'));\n}\nexport function createProgram(gl) {\n return throwIfNull(gl, () => gl.createProgram(), 'Unable to create WebGLProgram.');\n}\nexport function linkProgram(gl, program) {\n callAndCheck(gl, () => gl.linkProgram(program));\n if (gl.getProgramParameter(program, gl.LINK_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Failed to link vertex and fragment shaders.');\n }\n}\nexport function validateProgram(gl, program) {\n callAndCheck(gl, () => gl.validateProgram(program));\n if (gl.getProgramParameter(program, gl.VALIDATE_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Shader program validation failed.');\n }\n}\nexport function createStaticVertexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function createStaticIndexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function getNumChannels() {\n if (env().getNumber('WEBGL_VERSION') === 2) {\n return 1;\n }\n return 4;\n}\nexport function createTexture(gl) {\n return throwIfNull(gl, () => gl.createTexture(), 'Unable to create WebGLTexture.');\n}\nexport function validateTextureSize(width, height) {\n const maxTextureSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if ((width <= 0) || (height <= 0)) {\n const requested = `[${width}x${height}]`;\n throw new Error('Requested texture size ' + requested + ' is invalid.');\n }\n if ((width > maxTextureSize) || (height > maxTextureSize)) {\n const requested = `[${width}x${height}]`;\n const max = `[${maxTextureSize}x${maxTextureSize}]`;\n throw new Error('Requested texture size ' + requested +\n ' greater than WebGL maximum on this browser / GPU ' + max + '.');\n }\n}\nexport function createFramebuffer(gl) {\n return throwIfNull(gl, () => gl.createFramebuffer(), 'Unable to create WebGLFramebuffer.');\n}\nexport function bindVertexBufferToProgramAttribute(gl, program, attribute, buffer, arrayEntriesPerItem, itemStrideInBytes, itemOffsetInBytes) {\n const loc = gl.getAttribLocation(program, attribute);\n if (loc === -1) {\n // The GPU compiler decided to strip out this attribute because it's unused,\n // thus no need to bind.\n return false;\n }\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.vertexAttribPointer(loc, arrayEntriesPerItem, gl.FLOAT, false, itemStrideInBytes, itemOffsetInBytes));\n callAndCheck(gl, () => gl.enableVertexAttribArray(loc));\n return true;\n}\nexport function bindTextureUnit(gl, texture, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n}\nexport function unbindTextureUnit(gl, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function getProgramUniformLocationOrThrow(gl, program, uniformName) {\n return throwIfNull(gl, () => gl.getUniformLocation(program, uniformName), 'uniform \"' + uniformName + '\" not present in program.');\n}\nexport function getProgramUniformLocation(gl, program, uniformName) {\n return gl.getUniformLocation(program, uniformName);\n}\nexport function bindTextureToProgramUniformSampler(gl, texture, uniformSamplerLocation, textureUnit) {\n callAndCheck(gl, () => bindTextureUnit(gl, texture, textureUnit));\n callAndCheck(gl, () => gl.uniform1i(uniformSamplerLocation, textureUnit));\n}\nexport function bindCanvasToFramebuffer(gl) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n callAndCheck(gl, () => gl.viewport(0, 0, gl.canvas.width, gl.canvas.height));\n callAndCheck(gl, () => gl.scissor(0, 0, gl.canvas.width, gl.canvas.height));\n}\nexport function bindColorTextureToFramebuffer(gl, texture, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0));\n}\nexport function unbindColorTextureFromFramebuffer(gl, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, null, 0));\n}\nexport function validateFramebuffer(gl) {\n const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);\n if (status !== gl.FRAMEBUFFER_COMPLETE) {\n throw new Error('Error binding framebuffer: ' + getFramebufferErrorMessage(gl, status));\n }\n}\nexport function getFramebufferErrorMessage(gl, status) {\n switch (status) {\n case gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:\n return 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS';\n case gl.FRAMEBUFFER_UNSUPPORTED:\n return 'FRAMEBUFFER_UNSUPPORTED';\n default:\n return `unknown error ${status}`;\n }\n}\nfunction throwIfNull(gl, returnTOrNull, failureMessage) {\n const tOrNull = callAndCheck(gl, () => returnTOrNull());\n if (tOrNull == null) {\n throw new Error(failureMessage);\n }\n return tOrNull;\n}\nfunction validateTextureUnit(gl, textureUnit) {\n const maxTextureUnit = gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS - 1;\n const glTextureUnit = textureUnit + gl.TEXTURE0;\n if (glTextureUnit < gl.TEXTURE0 || glTextureUnit > maxTextureUnit) {\n const textureUnitRange = `[gl.TEXTURE0, gl.TEXTURE${maxTextureUnit}]`;\n throw new Error(`textureUnit must be in ${textureUnitRange}.`);\n }\n}\nexport function getBatchDim(shape, dimsToSkip = 2) {\n return util.sizeFromShape(shape.slice(0, shape.length - dimsToSkip));\n}\nexport function getRowsCols(shape) {\n if (shape.length === 0) {\n throw Error('Cannot get rows and columns of an empty shape array.');\n }\n return [\n shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]\n ];\n}\nexport function getShapeAs3D(shape) {\n let shapeAs3D = [1, 1, 1];\n const isScalar = shape.length === 0 || (shape.length === 1 && shape[0] === 1);\n if (!isScalar) {\n shapeAs3D =\n [getBatchDim(shape), ...getRowsCols(shape)];\n }\n return shapeAs3D;\n}\nexport function getTextureShapeFromLogicalShape(logShape, isPacked = false) {\n let maxTexSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if (isPacked) {\n maxTexSize = maxTexSize * 2;\n // This logic ensures we accurately count the number of packed texels needed\n // to accommodate the tensor. We can only pack values in the same texel if\n // they are from adjacent pairs of rows/cols within the same batch. So if a\n // tensor has 3 rows, we pretend it has 4 rows in order to account for the\n // fact that the texels containing the third row are half empty.\n logShape = logShape.map((d, i) => i >= logShape.length - 2 ?\n util.nearestLargerEven(logShape[i]) :\n logShape[i]);\n // Packed texture height is at least 2 (the channel height of a single\n // texel).\n if (logShape.length === 1) {\n logShape = [2, logShape[0]];\n }\n }\n // If logical shape is 2, we don't squeeze, since we want to match physical.\n if (logShape.length !== 2) {\n const squeezeResult = util.squeezeShape(logShape);\n logShape = squeezeResult.newShape;\n }\n let size = util.sizeFromShape(logShape);\n if (logShape.length <= 1 && size <= maxTexSize) {\n return [1, size];\n }\n else if (logShape.length === 2 && logShape[0] <= maxTexSize &&\n logShape[1] <= maxTexSize) {\n return logShape;\n }\n else if (logShape.length === 3 && logShape[0] * logShape[1] <= maxTexSize &&\n logShape[2] <= maxTexSize) {\n return [logShape[0] * logShape[1], logShape[2]];\n }\n else if (logShape.length === 3 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2]];\n }\n else if (logShape.length === 4 &&\n logShape[0] * logShape[1] * logShape[2] <= maxTexSize &&\n logShape[3] <= maxTexSize) {\n return [logShape[0] * logShape[1] * logShape[2], logShape[3]];\n }\n else if (logShape.length === 4 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] * logShape[3] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2] * logShape[3]];\n }\n else {\n if (isPacked) {\n // For packed textures size equals the number of channels required to\n // accommodate the texture data. However in order to squarify such that\n // inner dimensions stay even, we rewrite size to equal the number of\n // texels. Then in the return statement we rehydrate the squarified\n // dimensions to channel units.\n const batchDim = getBatchDim(logShape);\n let rows = 2, cols = 2;\n if (logShape.length) {\n [rows, cols] = getRowsCols(logShape);\n }\n size = batchDim * (rows / 2) * (cols / 2);\n return util.sizeToSquarishShape(size).map(d => d * 2);\n }\n return util.sizeToSquarishShape(size);\n }\n}\nfunction isEven(n) {\n return n % 2 === 0;\n}\n/**\n * This determines whether reshaping a packed texture requires rearranging\n * the data within the texture, assuming 2x2 packing.\n */\nexport function isReshapeFree(shape1, shape2) {\n shape1 = shape1.slice(-2);\n shape2 = shape2.slice(-2);\n if (util.arraysEqual(shape1, shape2)) {\n return true;\n }\n if (!shape1.length || !shape2.length) { // One of the shapes is a scalar.\n return true;\n }\n if (shape1[0] === 0 || shape1[1] === 0 || shape2[0] === 0 ||\n shape2[1] === 0) {\n return true;\n }\n if (shape1.length !== shape2.length) { // One of the shapes is a vector.\n const shape1Cols = shape1.slice(-1)[0];\n const shape2Cols = shape2.slice(-1)[0];\n if (shape1Cols === shape2Cols) {\n return true;\n }\n if (isEven(shape1Cols) && isEven(shape2Cols) &&\n (shape1[0] === 1 || shape2[0] === 1)) {\n return true;\n }\n }\n return shape1[1] === shape2[1] && isEven(shape1[0]) && isEven(shape2[0]);\n}\n// We cache webgl params because the environment gets reset between\n// unit tests and we don't want to constantly query the WebGLContext for\n// MAX_TEXTURE_SIZE.\nlet MAX_TEXTURE_SIZE;\nlet MAX_TEXTURES_IN_SHADER;\nexport function getWebGLMaxTextureSize(webGLVersion) {\n if (MAX_TEXTURE_SIZE == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURE_SIZE = gl.getParameter(gl.MAX_TEXTURE_SIZE);\n }\n return MAX_TEXTURE_SIZE;\n}\nexport function resetMaxTextureSize() {\n MAX_TEXTURE_SIZE = null;\n}\nexport function resetMaxTexturesInShader() {\n MAX_TEXTURES_IN_SHADER = null;\n}\nexport function getMaxTexturesInShader(webGLVersion) {\n if (MAX_TEXTURES_IN_SHADER == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURES_IN_SHADER = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);\n }\n // We cap at 16 to avoid spurious runtime \"memory exhausted\" error.\n return Math.min(16, MAX_TEXTURES_IN_SHADER);\n}\nexport function getWebGLDisjointQueryTimerVersion(webGLVersion) {\n if (webGLVersion === 0) {\n return 0;\n }\n let queryTimerVersion;\n const gl = getWebGLContext(webGLVersion);\n if (hasExtension(gl, 'EXT_disjoint_timer_query_webgl2') &&\n webGLVersion === 2) {\n queryTimerVersion = 2;\n }\n else if (hasExtension(gl, 'EXT_disjoint_timer_query')) {\n queryTimerVersion = 1;\n }\n else {\n queryTimerVersion = 0;\n }\n return queryTimerVersion;\n}\nexport function hasExtension(gl, extensionName) {\n const ext = gl.getExtension(extensionName);\n return ext != null;\n}\nexport function isWebGLVersionEnabled(webGLVersion) {\n try {\n const gl = getWebGLContext(webGLVersion);\n if (gl != null) {\n return true;\n }\n }\n catch (e) {\n console.log('Error when getting WebGL context: ', e);\n return false;\n }\n return false;\n}\nexport function isCapableOfRenderingToFloatTexture(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n }\n else {\n if (!hasExtension(gl, 'EXT_color_buffer_float')) {\n return false;\n }\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\n/**\n * Check if we can download values from a float/half-float texture.\n *\n * Note that for performance reasons we use binding a texture to a framebuffer\n * as a proxy for ability to download float values later using readPixels. The\n * texture params of this texture will not match those in readPixels exactly\n * but if we are unable to bind some kind of float texture to the frameBuffer\n * then we definitely will not be able to read float values from it.\n */\nexport function isDownloadFloatTextureEnabled(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n if (!hasExtension(gl, 'WEBGL_color_buffer_float')) {\n return false;\n }\n }\n else {\n if (hasExtension(gl, 'EXT_color_buffer_float')) {\n return createFloatTextureAndBindToFramebuffer(gl);\n }\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (hasExtension(gl, COLOR_BUFFER_HALF_FLOAT)) {\n const textureHalfFloatExtension = gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n return createHalfFloatTextureAndBindToFramebuffer(gl, textureHalfFloatExtension);\n }\n return false;\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\nfunction createFloatTextureAndBindToFramebuffer(gl) {\n const texConfig = getTextureConfig(gl);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nfunction createHalfFloatTextureAndBindToFramebuffer(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n const texConfig = getTextureConfig(gl, textureHalfFloatExtension);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatHalfFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeHalfFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nexport function isWebGLFenceEnabled(webGLVersion) {\n if (webGLVersion !== 2) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n // tslint:disable-next-line:no-any\n const isEnabled = gl.fenceSync != null;\n return isEnabled;\n}\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors ` +\n 'in the WebGL backend.');\n }\n });\n}\n//# sourceMappingURL=webgl_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { device_util, env } from '@tensorflow/tfjs-core';\nimport { getMaxTexturesInShader, getWebGLDisjointQueryTimerVersion, getWebGLMaxTextureSize, isCapableOfRenderingToFloatTexture, isDownloadFloatTextureEnabled, isWebGLFenceEnabled, isWebGLVersionEnabled } from './webgl_util';\nconst ENV = env();\n/**\n * This file contains WebGL-specific flag registrations.\n */\n/**\n * True if WebGL is supported.\n */\nENV.registerFlag('HAS_WEBGL', () => ENV.getNumber('WEBGL_VERSION') > 0);\n/** 0: No WebGL, 1: WebGL 1.0, 2: WebGL 2.0. */\nENV.registerFlag('WEBGL_VERSION', () => {\n if (isWebGLVersionEnabled(2)) {\n return 2;\n }\n else if (isWebGLVersionEnabled(1)) {\n return 1;\n }\n return 0;\n});\n/** Whether to check for numerical representation problems. */\nENV.registerFlag('WEBGL_CHECK_NUMERICAL_PROBLEMS', () => false);\nENV.registerFlag('WEBGL_BUFFER_SUPPORTED', () => ENV.get('WEBGL_VERSION') === 2);\n/** Whether the WebGL backend will sometimes forward ops to the CPU. */\nENV.registerFlag('WEBGL_CPU_FORWARD', () => true);\n/** Whether the WebGL backend will always use f16 textures for rendering. */\nENV.registerFlag('WEBGL_FORCE_F16_TEXTURES', () => false);\n/** Whether to turn all packing related flags on. */\nENV.registerFlag('WEBGL_PACK', () => ENV.getBool('HAS_WEBGL'));\n/** Whether we will pack the batchnormalization op. */\nENV.registerFlag('WEBGL_PACK_NORMALIZATION', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the clip op. */\nENV.registerFlag('WEBGL_PACK_CLIP', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the depthwise conv op. */\n// TODO: https://github.com/tensorflow/tfjs/issues/1679\nENV.registerFlag('WEBGL_PACK_DEPTHWISECONV', () => false);\n/** Whether we will pack binary ops. */\nENV.registerFlag('WEBGL_PACK_BINARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack unary ops. */\nENV.registerFlag('WEBGL_PACK_UNARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack array ops. */\nENV.registerFlag('WEBGL_PACK_ARRAY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack image ops. */\nENV.registerFlag('WEBGL_PACK_IMAGE_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack reduce ops. */\nENV.registerFlag('WEBGL_PACK_REDUCE', () => ENV.getBool('WEBGL_PACK'));\n/** Whether packed WebGL kernels lazily unpack their outputs. */\nENV.registerFlag('WEBGL_LAZILY_UNPACK', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will use the im2col algorithm to speed up convolutions. */\nENV.registerFlag('WEBGL_CONV_IM2COL', () => ENV.getBool('WEBGL_PACK'));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURE_SIZE', () => getWebGLMaxTextureSize(ENV.getNumber('WEBGL_VERSION')));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURES_IN_SHADER', () => getMaxTexturesInShader(ENV.getNumber('WEBGL_VERSION')));\n/**\n * The disjoint_query_timer extension version.\n * 0: disabled, 1: EXT_disjoint_timer_query, 2:\n * EXT_disjoint_timer_query_webgl2.\n * In Firefox with WebGL 2.0,\n * EXT_disjoint_timer_query_webgl2 is not available, so we must use the\n * WebGL 1.0 extension.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION', () => {\n const webGLVersion = ENV.getNumber('WEBGL_VERSION');\n if (webGLVersion === 0) {\n return 0;\n }\n return getWebGLDisjointQueryTimerVersion(webGLVersion);\n});\n/**\n * Whether the timer object from the disjoint_query_timer extension gives\n * timing information that is reliable.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE', () => ENV.getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0 &&\n !device_util.isMobile());\n/**\n * Whether the device is physically capable of rendering to float32 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_CAPABLE', () => isCapableOfRenderingToFloatTexture(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Whether rendering to float32 textures is enabled. If disabled, renders to\n * float16 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_ENABLED', () => {\n return ENV.getBool('WEBGL_FORCE_F16_TEXTURES') ?\n false :\n ENV.getBool('WEBGL_RENDER_FLOAT32_CAPABLE');\n});\n/**\n * Whether downloading float textures is enabled (16 or 32 bit). If disabled,\n * uses IEEE 754 encoding of the float32 values to 4 uint8 when downloading.\n */\nENV.registerFlag('WEBGL_DOWNLOAD_FLOAT_ENABLED', () => isDownloadFloatTextureEnabled(ENV.getNumber('WEBGL_VERSION')));\n/** Whether the fence API is available. */\nENV.registerFlag('WEBGL_FENCE_API_ENABLED', () => isWebGLFenceEnabled(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Tensors with size <= than this will be uploaded as uniforms, not textures.\n */\nENV.registerFlag('WEBGL_SIZE_UPLOAD_UNIFORM', () => {\n // Use uniform uploads only when 32bit floats are supported. In\n // 16bit\n // environments there are problems with comparing a 16bit texture value\n // with a 32bit uniform value.\n const useUniforms = ENV.getBool('WEBGL_RENDER_FLOAT32_ENABLED');\n return useUniforms ? 4 : 0;\n});\n/**\n * If the total number of bytes allocated on the GPU is greater than this\n * number, we will aggressively delete textures upon disposal with\n * gl.deleteMatrixTexture, rather than making them available for reuse.\n *\n * Default value -1 indicates that we will never aggressively delete textures.\n */\nENV.registerFlag('WEBGL_DELETE_TEXTURE_THRESHOLD', () => {\n return -1;\n}, threshold => {\n if (threshold < 0 && threshold !== -1) {\n throw new Error(`WEBGL_DELETE_TEXTURE_THRESHOLD must be -1 (indicating never ` +\n `delete) or at least 0, but got ${threshold}.`);\n }\n});\n//# sourceMappingURL=flags_webgl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import shared functionality from tfjs-backend-cpu without triggering\n// side effects.\n// tslint:disable-next-line: no-imports-from-dist\nimport * as shared from '@tensorflow/tfjs-backend-cpu/dist/shared';\nconst { simpleAbsImpl: simpleAbsImplCPU, addImpl: addImplCPU, ceilImpl: ceilImplCPU, expImpl: expImplCPU, expm1Impl: expm1ImplCPU, floorImpl: floorImplCPU, logImpl: logImplCPU, maxImpl: maxImplCPU, multiplyImpl: multiplyImplCPU, rsqrtImpl: rsqrtImplCPU, sliceImpl: sliceImplCPU, subImpl: subImplCPU, transposeImpl: transposeImplCPU, uniqueImpl: uniqueImplCPU, } = shared;\nexport { simpleAbsImplCPU, addImplCPU, ceilImplCPU, expImplCPU, expm1ImplCPU, logImplCPU, multiplyImplCPU, sliceImplCPU, subImplCPU, floorImplCPU, maxImplCPU, rsqrtImplCPU, transposeImplCPU, uniqueImplCPU, };\n//# sourceMappingURL=shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`float v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n float result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNPackedProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`vec4 v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n vec4 result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ArgMinMaxProgram {\n constructor(reduceInfo, op, firstPass) {\n this.variableNames = ['A'];\n const { windowSize, batchSize, outSize } = reduceInfo;\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n this.outputShape = [batchSize, outSize];\n const compOp = (op === 'max') ? '>' : '<';\n const indexSnippet = firstPass ?\n 'inOffset + i;' :\n 'round(getBestIndicesA(batch, inOffset + i));';\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n int bestIndex = inOffset;\n float bestValue = getA(batch, bestIndex);\n\n for (int i = 0; i < ${windowSize}; i++) {\n int inIdx = ${indexSnippet};\n float candidate = getA(batch, inIdx);\n if (candidate ${compOp} bestValue) {\n bestValue = candidate;\n bestIndex = inIdx;\n }\n }\n setOutput(float(bestIndex));\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport function getVecChannels(name, rank) {\n return ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank).map(d => `${name}.${d}`);\n}\nexport function getChannels(name, rank) {\n if (rank === 1) {\n return [name];\n }\n return getVecChannels(name, rank);\n}\nexport function getSourceCoords(rank, dims) {\n if (rank === 1) {\n return 'rc';\n }\n let coords = '';\n for (let i = 0; i < rank; i++) {\n coords += dims[i];\n if (i < rank - 1) {\n coords += ',';\n }\n }\n return coords;\n}\n//# sourceMappingURL=packing_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nexport function getGlslDifferences() {\n let version;\n let attribute;\n let varyingVs;\n let varyingFs;\n let texture2D;\n let output;\n let defineOutput;\n let defineSpecialNaN;\n let defineSpecialInf;\n let defineRound;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n version = '#version 300 es';\n attribute = 'in';\n varyingVs = 'out';\n varyingFs = 'in';\n texture2D = 'texture';\n output = 'outputColor';\n defineOutput = 'out vec4 outputColor;';\n // Use custom isnan definition to work across differences between\n // implementations on various platforms. While this should happen in ANGLE\n // we still see differences between android and windows (on chrome) when\n // using isnan directly.\n defineSpecialNaN = `\n bool isnan_custom(float val) {\n return (val > 0.0 || val < 0.0) ? false : val != 0.0;\n }\n\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan_custom(val.x),\n isnan_custom(val.y), isnan_custom(val.z), isnan_custom(val.w));\n }\n\n #define isnan(value) isnan_custom(value)\n `;\n // In webgl 2 we do not need to specify a custom isinf so there is no\n // need for a special INFINITY constant.\n defineSpecialInf = ``;\n defineRound = `\n #define round(value) newRound(value)\n int newRound(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 newRound(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n else {\n version = '';\n attribute = 'attribute';\n varyingVs = 'varying';\n varyingFs = 'varying';\n texture2D = 'texture2D';\n output = 'gl_FragColor';\n defineOutput = '';\n // WebGL1 has no built in isnan so we define one here.\n defineSpecialNaN = `\n #define isnan(value) isnan_custom(value)\n bool isnan_custom(float val) {\n return (val > 0. || val < 1. || val == 0.) ? false : true;\n }\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan(val.x), isnan(val.y), isnan(val.z), isnan(val.w));\n }\n `;\n defineSpecialInf = `\n uniform float INFINITY;\n\n bool isinf(float val) {\n return abs(val) == INFINITY;\n }\n bvec4 isinf(vec4 val) {\n return equal(abs(val), vec4(INFINITY));\n }\n `;\n defineRound = `\n int round(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 round(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n return {\n version,\n attribute,\n varyingVs,\n varyingFs,\n texture2D,\n output,\n defineOutput,\n defineSpecialNaN,\n defineSpecialInf,\n defineRound\n };\n}\n//# sourceMappingURL=glsl_version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Produces GLSL code that derives logical coordinates from a flat\n * index. The code performs integer division with each stride and decrements\n * the index until the index equals the final dimension coordinate.\n */\nexport function getLogicalCoordinatesFromFlatIndex(coords, shape, index = 'index') {\n const strides = util.computeStrides(shape);\n return strides\n .map((stride, i) => {\n const line1 = `int ${coords[i]} = ${index} / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coords[i + 1]} = ${index} - ${coords[i]} * ${stride}` :\n `index -= ${coords[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n}\nfunction buildVec(x) {\n if (x.length === 1) {\n return `${x[0]}`;\n }\n return `vec${x.length}(${x.join(',')})`;\n}\n/**\n * Produces GLSL code that computes the dot product of the input x and y\n * vectors. Handles splitting inputs into increments of vec4s when necessary.\n */\nexport function dotify(x, y) {\n if (x.length !== y.length) {\n throw new Error(`Vectors to be dotted must be of the same length -` +\n `got ${x.length} and ${y.length}`);\n }\n const slices = [];\n const nearestVec4 = Math.floor(x.length / 4);\n const nearestVec4Remainder = x.length % 4;\n for (let i = 0; i < nearestVec4; i++) {\n const xSlice = x.slice(i * 4, i * 4 + 4);\n const ySlice = y.slice(i * 4, i * 4 + 4);\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n if (nearestVec4Remainder !== 0) {\n let xSlice = x.slice(nearestVec4 * 4);\n let ySlice = y.slice(nearestVec4 * 4);\n if (xSlice.length === 1) {\n xSlice = xSlice.map(d => `float(${d})`);\n ySlice = ySlice.map(d => `float(${d})`);\n }\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n return slices.map((d, i) => `dot(${d})`).join('+');\n}\n/**\n * Produces GLSL that computes the flat index from 3D coordinates.\n */\nexport function getFlatIndexFrom3D(shape) {\n const strides = util.computeStrides(shape).map(d => d.toString());\n return `\n int getFlatIndex(ivec3 coords) {\n return coords.x * ${strides[0]} + coords.y * ${strides[1]} + coords.z;\n }\n`;\n}\nexport const ENCODE_FLOAT_SNIPPET = `\n const float FLOAT_MAX = 1.70141184e38;\n const float FLOAT_MIN = 1.17549435e-38;\n\n lowp vec4 encode_float(highp float v) {\n if (isnan(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n }\n`;\n//# sourceMappingURL=shader_compiler_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nconst { getBroadcastDims } = backend_util;\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport function makeShader(inputsInfo, outputShape, userCode, usesPackedTextures) {\n const prefixSnippets = [];\n inputsInfo.forEach(x => {\n const size = util.sizeFromShape(x.shapeInfo.logicalShape);\n // Snippet when we decided to upload the values as uniform.\n if (x.shapeInfo.isUniform) {\n prefixSnippets.push(`uniform float ${x.name}${size > 1 ? `[${size}]` : ''};`);\n }\n else {\n prefixSnippets.push(`uniform sampler2D ${x.name};`);\n prefixSnippets.push(`uniform int offset${x.name};`);\n }\n });\n const inputPrefixSnippet = prefixSnippets.join('\\n');\n const inputSamplingSnippet = inputsInfo\n .map(x => getInputSamplingSnippet(x, outputShape, usesPackedTextures))\n .join('\\n');\n const outTexShape = outputShape.texShape;\n const glsl = getGlslDifferences();\n const floatTextureSampleSnippet = getFloatTextureSampleSnippet(glsl);\n let outputSamplingSnippet;\n let floatTextureSetOutputSnippet;\n let shaderPrefix = getShaderPrefix(glsl);\n if (outputShape.isPacked) {\n outputSamplingSnippet =\n getPackedOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRGBASnippet(glsl);\n }\n else {\n outputSamplingSnippet =\n getOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRSnippet(glsl);\n }\n if (usesPackedTextures) {\n shaderPrefix += SHADER_PACKED_PREFIX;\n }\n const source = [\n shaderPrefix, floatTextureSampleSnippet, floatTextureSetOutputSnippet,\n inputPrefixSnippet, outputSamplingSnippet, inputSamplingSnippet, userCode\n ].join('\\n');\n return source;\n}\nfunction getSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getSamplerScalar(inInfo);\n case 1:\n return getSampler1D(inInfo);\n case 2:\n return getSampler2D(inInfo);\n case 3:\n return getSampler3D(inInfo);\n case 4:\n return getSampler4D(inInfo);\n case 5:\n return getSampler5D(inInfo);\n case 6:\n return getSampler6D(inInfo);\n default:\n throw new Error(`${shape.length}-D input sampling` +\n ` is not yet supported`);\n }\n}\nfunction getPackedSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getPackedSamplerScalar(inInfo);\n case 1:\n return getPackedSampler1D(inInfo);\n case 2:\n return getPackedSampler2D(inInfo);\n case 3:\n return getPackedSampler3D(inInfo);\n default:\n return getPackedSamplerND(inInfo);\n }\n}\nfunction getInputSamplingSnippet(inInfo, outShapeInfo, usesPackedTextures = false) {\n let res = '';\n if (usesPackedTextures) {\n res += getPackedSamplerFromInInfo(inInfo);\n }\n else {\n res += getSamplerFromInInfo(inInfo);\n }\n const inShape = inInfo.shapeInfo.logicalShape;\n const outShape = outShapeInfo.logicalShape;\n if (inShape.length <= outShape.length) {\n if (usesPackedTextures) {\n res += getPackedSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n else {\n res += getSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n }\n return res;\n}\nfunction getPackedOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutputPacked1DCoords(outShape, outTexShape);\n case 2:\n return getOutputPacked2DCoords(outShape, outTexShape);\n case 3:\n return getOutputPacked3DCoords(outShape, outTexShape);\n default:\n return getOutputPackedNDCoords(outShape, outTexShape);\n }\n}\nfunction getOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutput1DCoords(outShape, outTexShape);\n case 2:\n return getOutput2DCoords(outShape, outTexShape);\n case 3:\n return getOutput3DCoords(outShape, outTexShape);\n case 4:\n return getOutput4DCoords(outShape, outTexShape);\n case 5:\n return getOutput5DCoords(outShape, outTexShape);\n case 6:\n return getOutput6DCoords(outShape, outTexShape);\n default:\n throw new Error(`${outShape.length}-D output sampling is not yet supported`);\n }\n}\nfunction getFloatTextureSampleSnippet(glsl) {\n return `\n float sampleTexture(sampler2D textureSampler, vec2 uv) {\n return ${glsl.texture2D}(textureSampler, uv).r;\n }\n `;\n}\nfunction getFloatTextureSetRSnippet(glsl) {\n return `\n void setOutput(float val) {\n ${glsl.output} = vec4(val, 0, 0, 0);\n }\n `;\n}\nfunction getFloatTextureSetRGBASnippet(glsl) {\n return `\n void setOutput(vec4 val) {\n ${glsl.output} = val;\n }\n `;\n}\nfunction getShaderPrefix(glsl) {\n const SHADER_PREFIX = `${glsl.version}\n precision highp float;\n precision highp int;\n precision highp sampler2D;\n ${glsl.varyingFs} vec2 resultUV;\n ${glsl.defineOutput}\n const vec2 halfCR = vec2(0.5, 0.5);\n\n struct ivec5\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n };\n\n struct ivec6\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n };\n\n uniform float NAN;\n ${glsl.defineSpecialNaN}\n ${glsl.defineSpecialInf}\n ${glsl.defineRound}\n\n int imod(int x, int y) {\n return x - y * (x / y);\n }\n\n int idiv(int a, int b, float sign) {\n int res = a / b;\n int mod = imod(a, b);\n if (sign < 0. && mod != 0) {\n res -= 1;\n }\n return res;\n }\n\n //Based on the work of Dave Hoskins\n //https://www.shadertoy.com/view/4djSRW\n #define HASHSCALE1 443.8975\n float random(float seed){\n vec2 p = resultUV * seed;\n vec3 p3 = fract(vec3(p.xyx) * HASHSCALE1);\n p3 += dot(p3, p3.yzx + 19.19);\n return fract((p3.x + p3.y) * p3.z);\n }\n\n ${SAMPLE_1D_SNIPPET}\n ${SAMPLE_2D_SNIPPET}\n ${SAMPLE_3D_SNIPPET}\n `;\n return SHADER_PREFIX;\n}\nconst SAMPLE_1D_SNIPPET = `\nvec2 uvFromFlat(int texNumR, int texNumC, int index) {\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom1D(int texNumR, int texNumC, int index) {\n int texelIndex = index / 2;\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_2D_SNIPPET = `\nvec2 packedUVfrom2D(int texelsInLogicalRow, int texNumR,\n int texNumC, int row, int col) {\n int texelIndex = (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_3D_SNIPPET = `\nvec2 packedUVfrom3D(int texNumR, int texNumC,\n int texelsInBatch, int texelsInLogicalRow, int b,\n int row, int col) {\n int index = b * texelsInBatch + (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SHADER_PACKED_PREFIX = `\n float getChannel(vec4 frag, vec2 innerDims) {\n vec2 modCoord = mod(innerDims, 2.);\n return modCoord.x == 0. ?\n (modCoord.y == 0. ? frag.r : frag.g) :\n (modCoord.y == 0. ? frag.b : frag.a);\n }\n float getChannel(vec4 frag, int dim) {\n float modCoord = mod(float(dim), 2.);\n return modCoord == 0. ? frag.r : frag.g;\n }\n`;\nfunction getOutputScalarCoords() {\n return `\n int getOutputCoords() {\n return 0;\n }\n `;\n}\nfunction getOutputPacked1DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (packedTexShape[0] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.x * ${packedTexShape[1]}.0);\n }\n `;\n }\n if (packedTexShape[1] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.y * ${packedTexShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n return 2 * (resTexRC.x * ${packedTexShape[1]} + resTexRC.y);\n }\n `;\n}\nfunction getOutput1DCoords(shape, texShape) {\n if (texShape[0] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.x * ${texShape[1]}.0);\n }\n `;\n }\n if (texShape[1] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.y * ${texShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n return resTexRC.x * ${texShape[1]} + resTexRC.y;\n }\n `;\n}\nfunction getOutputPacked3DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[1] / 2);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec3(b, r, c);\n }\n `;\n}\nfunction getOutput3DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\nfunction getOutputPackedNDCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[shape.length - 1] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[shape.length - 2] / 2);\n let texelsInBatchN = texelsInBatch;\n let batches = ``;\n let coords = 'b, r, c';\n for (let b = 2; b < shape.length - 1; b++) {\n texelsInBatchN *= shape[shape.length - b - 1];\n batches = `\n int b${b} = index / ${texelsInBatchN};\n index -= b${b} * ${texelsInBatchN};\n ` + batches;\n coords = `b${b}, ` + coords;\n }\n return `\n ivec${shape.length} getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n ${batches}\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec${shape.length}(${coords});\n }\n `;\n}\nfunction getOutput4DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2'], shape);\n return `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec4(r, c, d, d2);\n }\n `;\n}\nfunction getOutput5DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3'], shape);\n return `\n ivec5 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx * vec2(${texShape[0]},\n ${texShape[1]}));\n\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec5 outShape = ivec5(r, c, d, d2, d3);\n return outShape;\n }\n `;\n}\nfunction getOutput6DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3', 'd4'], shape);\n return `\n ivec6 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec6 result = ivec6(r, c, d, d2, d3, d4);\n return result;\n }\n `;\n}\nfunction getOutputPacked2DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return 2 * ivec2(resultUV.yx * vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n }\n `;\n }\n // texels needed to accommodate a logical row\n const texelsInLogicalRow = Math.ceil(shape[1] / 2);\n /**\n * getOutputCoords\n *\n * resTexRC: The rows and columns of the texels. If you move over one\n * texel to the right in the packed texture, you are moving over one column\n * (not two).\n *\n * index: The texel index\n */\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec2(r, c);\n }\n `;\n}\nfunction getOutput2DCoords(shape, texShape) {\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return ivec2(resultUV.yx * vec2(${texShape[0]}, ${texShape[1]}));\n }\n `;\n }\n if (shape[1] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(index, 0);\n }\n `;\n }\n if (shape[0] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(0, index);\n }\n `;\n }\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n int r = index / ${shape[1]};\n int c = index - r * ${shape[1]};\n return ivec2(r, c);\n }\n `;\n}\nfunction getFlatOffsetUniformName(texName) {\n return `offset${texName}`;\n}\nfunction getPackedSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}() {\n return ${glsl.texture2D}(${texName}, halfCR);\n }\n `;\n}\nfunction getSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n return `float ${funcName}() {return ${texName};}`;\n }\n const [texNumR, texNumC] = inputInfo.shapeInfo.texShape;\n if (texNumR === 1 && texNumC === 1) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const [tNumR, tNumC] = inputInfo.shapeInfo.texShape;\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}() {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int index) {\n vec2 uv = packedUVfrom1D(\n ${packedTexShape[0]}, ${packedTexShape[1]}, index);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int index) {\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const tNumR = texShape[0];\n const tNumC = texShape[1];\n if (tNumC === 1 && tNumR === 1) {\n return `\n float ${funcName}(int index) {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n if (tNumC === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2(0.5, (float(index + ${offset}) + 0.5) / ${tNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (tNumR === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2((float(index + ${offset}) + 0.5) / ${tNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int index) {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const glsl = getGlslDifferences();\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n }\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const valuesPerRow = Math.ceil(shape[1] / 2);\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = packedUVfrom2D(${valuesPerRow}, ${packedTexShape[0]}, ${packedTexShape[1]}, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n return `\n float ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n int index = round(dot(vec2(row, col), vec2(${shape[1]}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const offset = getFlatOffsetUniformName(texName);\n if (texNumC === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2(0.5, (index + 0.5) / ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumR === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2((index + 0.5) / ${texNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int row, int col) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${shape[1]} + col + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n`;\n}\nfunction getPackedSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (shape[0] === 1) {\n const squeezedShape = shape.slice(1);\n const keptDims = [1, 2];\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['b', 'row', 'col'];\n return `\n ${getPackedSamplerFromInInfo(newInputInfo)}\n vec4 ${funcName}(int b, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[1] / 2);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int b, int row, int col) {\n vec2 uv = packedUVfrom3D(\n ${texNumR}, ${texNumC}, ${texelsInBatch}, ${valuesPerRow}, b, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride0 = shape[1] * shape[2];\n const stride1 = shape[2];\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col', 'depth'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n int index = round(dot(vec3(row, col, depth),\n vec3(${stride0}, ${stride1}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = float(row);\n float texC = dot(vec2(col, depth), vec2(${stride1}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride1 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = dot(vec2(row, col), vec2(${shape[1]}, 1));\n float texC = float(depth);\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSamplerND(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const rank = shape.length;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[rank - 1] / 2);\n let texelsInBatch = valuesPerRow * Math.ceil(shape[rank - 2] / 2);\n let params = `int b, int row, int col`;\n let index = `b * ${texelsInBatch} + (row / 2) * ${valuesPerRow} + (col / 2)`;\n for (let b = 2; b < rank - 1; b++) {\n params = `int b${b}, ` + params;\n texelsInBatch *= shape[rank - b - 1];\n index = `b${b} * ${texelsInBatch} + ` + index;\n }\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(${params}) {\n int index = ${index};\n int texR = index / ${texNumC};\n int texC = index - texR * ${texNumC};\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}, ${texNumR});\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler4D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride2 = shape[3];\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n int index = round(dot(vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = float(row);\n float texC =\n dot(vec3(col, depth, depth2),\n vec3(${stride1}, ${stride2}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride2 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = dot(vec3(row, col, depth),\n vec3(${shape[1] * shape[2]}, ${shape[2]}, 1));\n float texC = float(depth2);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} +\n depth * ${stride2} + depth2;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler5D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride3 = shape[4];\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float index = dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n depth3;\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride3 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float texR = dot(\n vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3]},\n ${shape[2] * shape[3]}, ${shape[3]}, 1));\n int texC = depth3;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler6D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3', 'depth4'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const stride4 = shape[5];\n const stride3 = shape[4] * stride4;\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int index = round(dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n dot(\n vec2(depth3, depth4),\n vec2(${stride4}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, ${stride4})) +\n float(depth4);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride4 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n float texR = dot(vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3] * shape[4]},\n ${shape[2] * shape[3] * shape[4]},\n ${shape[3] * shape[4]},\n ${shape[4]})) + float(depth3);\n int texC = depth4;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 * ${stride4} + depth4 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getUniformSampler(inputInfo) {\n const texName = inputInfo.name;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n if (inSize < 2) {\n return `return ${texName};`;\n }\n return `\n for (int i = 0; i < ${inSize}; i++) {\n if (i == index) {\n return ${texName}[i];\n }\n }\n `;\n}\nfunction getPackedSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const type = getCoordsDataType(outRank);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n let output = `return outputValue;`;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n const isInputScalar = inSize === 1;\n const outSize = util.sizeFromShape(outShapeInfo.logicalShape);\n const isOutputScalar = outSize === 1;\n if (inRank === 1 && !isInputScalar && !isOutputScalar) {\n output = `\n return vec4(outputValue.xy, outputValue.xy);\n `;\n }\n else if (isInputScalar && !isOutputScalar) {\n if (outRank === 1) {\n output = `\n return vec4(outputValue.x, outputValue.x, 0., 0.);\n `;\n }\n else {\n output = `\n return vec4(outputValue.x);\n `;\n }\n }\n else if (broadcastDims.length) {\n const rows = inRank - 2;\n const cols = inRank - 1;\n if (broadcastDims.indexOf(rows) > -1 && broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.x);`;\n }\n else if (broadcastDims.indexOf(rows) > -1) {\n output = `return vec4(outputValue.x, outputValue.y, ` +\n `outputValue.x, outputValue.y);`;\n }\n else if (broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.xx, outputValue.zz);`;\n }\n }\n return `\n vec4 ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n vec4 outputValue = get${texFuncSnippet}(${unpackedCoordsSnippet});\n ${output}\n }\n `;\n}\nfunction getSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const outTexShape = outShapeInfo.texShape;\n const inTexShape = inputInfo.shapeInfo.texShape;\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n if (!inputInfo.shapeInfo.isUniform && inRank === outRank &&\n inputInfo.shapeInfo.flatOffset == null &&\n util.arraysEqual(inTexShape, outTexShape)) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, resultUV);\n }\n `;\n }\n const type = getCoordsDataType(outRank);\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n return `\n float ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n return get${texFuncSnippet}(${unpackedCoordsSnippet});\n }\n `;\n}\nexport function getCoordsDataType(rank) {\n if (rank <= 1) {\n return 'int';\n }\n else if (rank === 2) {\n return 'ivec2';\n }\n else if (rank === 3) {\n return 'ivec3';\n }\n else if (rank === 4) {\n return 'ivec4';\n }\n else if (rank === 5) {\n return 'ivec5';\n }\n else if (rank === 6) {\n return 'ivec6';\n }\n else {\n throw Error(`GPU for rank ${rank} is not yet supported`);\n }\n}\n/** Returns a new input info (a copy) that has a squeezed logical shape. */\nfunction squeezeInputInfo(inInfo, squeezedShape) {\n // Deep copy.\n const newInputInfo = JSON.parse(JSON.stringify(inInfo));\n newInputInfo.shapeInfo.logicalShape = squeezedShape;\n return newInputInfo;\n}\nfunction getSqueezedParams(params, keptDims) {\n return keptDims.map(d => params[d]).join(', ');\n}\n//# sourceMappingURL=shader_compiler.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ArgMinMaxPackedProgram {\n constructor(shape, windowSize, op, firstPass) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n util.assert(shape.length > 2, () => `Packed arg${op.charAt(0).toUpperCase() +\n op.slice(1)} supports only inputs with rank above 2.`);\n const inSize = shape[shape.length - 1];\n const outSize = Math.ceil(inSize / windowSize);\n this.outputShape = shape.slice(0, -1);\n if (outSize > 1) {\n this.outputShape.push(outSize);\n }\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n const outShape = this.outputShape;\n const rank = outShape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n let sourceLocSetup;\n let sourceRank;\n if (outSize === 1) {\n sourceRank = rank + 1;\n const sourceLocDType = getCoordsDataType(sourceRank);\n sourceLocSetup = `\n ${sourceLocDType} sourceLocR = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 1]};\n ${sourceLocDType} sourceLocG = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 2]};\n ${sourceLocDType} sourceLocA = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 1]};\n ${sourceLocDType} sourceLocB = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 2]};`;\n }\n else {\n sourceRank = rank;\n sourceLocSetup = `\n ${dtype} sourceLocR = coords;\n ++${coords[rank - 1]};\n ${dtype} sourceLocG = coords;\n ++${coords[rank - 2]};\n ${dtype} sourceLocA = coords;\n --${coords[rank - 1]};\n ${dtype} sourceLocB = coords;\n --${coords[rank - 2]};`;\n }\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, sourceRank);\n const inChannel = '.' + channels[sourceRank - 1]; // e.g. \".b\" for rank 3.\n const intChannels = channels.map(x => 'int ' + x);\n const srcRCoords = getChannels('sourceLocR', sourceRank - 1).concat('inIdx.r');\n const srcGCoords = getChannels('sourceLocG', sourceRank - 1).concat('inIdx.g');\n const srcBCoords = getChannels('sourceLocB', sourceRank - 1).concat('inIdx.b');\n const srcACoords = getChannels('sourceLocA', sourceRank - 1).concat('inIdx.a');\n const compOp = (op === 'max') ? 'greaterThan' : 'lessThan';\n const fetchCandidateIdx = firstPass ? '' : `\n inIdx = round(vec4(getBestIndicesAChannel(${srcRCoords.join()}),\n getBestIndicesAChannel(${srcGCoords.join()}),\n getBestIndicesAChannel(${srcBCoords.join()}),\n getBestIndicesAChannel(${srcACoords.join()})));`;\n const fetchValue = `vec4(\n getAChannel(${srcRCoords.join()}),\n hasNextCol ? getAChannel(${srcGCoords.join()}) : 0.,\n hasNextRow ? getAChannel(${srcBCoords.join()}) : 0.,\n hasNextRow && hasNextCol ? getAChannel(${srcACoords.join()}) : 0.)`;\n const getBestIndicesAChannelSnippet = firstPass ? '' : `\n float getBestIndicesAChannel(${intChannels.join()}) {\n return getChannel(getBestIndicesA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }`;\n this.userCode = `\n float getAChannel(${intChannels.join()}) {\n return getChannel(getA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }\n ${getBestIndicesAChannelSnippet}\n void main() {\n ${dtype} coords = getOutputCoords();\n bool hasNextCol = ${coords[rank - 1]} < ${outShape[rank - 1] - 1};\n bool hasNextRow = ${coords[rank - 2]} < ${outShape[rank - 2] - 1};\n ${sourceLocSetup}\n ivec4 srcIdx = ivec4(sourceLocR${inChannel}, sourceLocG${inChannel},\n sourceLocB${inChannel}, sourceLocA${inChannel}) * ${windowSize};\n ivec4 inIdx = srcIdx;\n vec4 bestIndex = vec4(inIdx);\n vec4 bestValue = ${fetchValue};\n\n for (int i = 0; i < ${windowSize}; i++) {\n inIdx = srcIdx;\n ${fetchCandidateIdx}\n vec4 candidate = ${fetchValue};\n bvec4 nan = isnan(candidate);\n bvec4 replace = bvec4(\n vec4(${compOp}(candidate, bestValue)) * (vec4(1.0) - vec4(nan)));\n\n bestValue = vec4(replace.x ? candidate.x : bestValue.x,\n replace.y ? candidate.y : bestValue.y,\n replace.z ? candidate.z : bestValue.z,\n replace.w ? candidate.w : bestValue.w);\n bestIndex = mix(bestIndex, vec4(inIdx), vec4(replace));\n srcIdx++;\n }\n setOutput(bestIndex);\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AvgPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC+= ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class AvgPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, d) with pos mask(:, :, :, ch) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=avg_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nconst CHECK_NAN_SNIPPET = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\n// We use native integer division to deal with floating point imprecision. Since\n// we implement floor division and glsl implements truncated division, we\n// correct for this by subtracting 1 from result when the result is negative and\n// there is a remainder.\nexport const INT_DIV = `\n float s = sign(a) * sign(b);\n int ia = round(a);\n int ib = round(b);\n if (ib != 0) {\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n return float(idiv(ia, ib, s));\n } else {\n return NAN;\n }\n`;\nexport const POW = `\nif(a < 0.0 && floor(b) < b){\n return NAN;\n}\nif (b == 0.0) {\n return 1.0;\n}\nreturn (round(mod(b, 2.0)) != 1) ?\n pow(abs(a), b) : sign(a) * pow(abs(a), b);\n`;\nexport const SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const EQUAL = `return float(a == b);`;\nexport const LESS = `return float(a < b);`;\nexport const LESS_EQUAL = `return float(a <= b);`;\nexport const GREATER = `return float(a > b);`;\nexport const GREATER_EQUAL = `return float(a >= b);`;\nexport const LOGICAL_AND = `return float(a >= 1.0 && b >= 1.0);`;\nexport const LOGICAL_OR = `return float(a >= 1.0 || b >= 1.0);`;\nexport const MAX = CHECK_NAN_SNIPPET + `\n return max(a, b);\n`;\nexport const MIN = CHECK_NAN_SNIPPET + `\n return min(a, b);\n`;\nexport const MOD = `if (b == 0.0) return NAN;\n return mod(a, b);`;\nexport const ELU_DER = `return (b >= 1.0) ? a : a * (b + 1.0);`;\nexport const PRELU = `return (a < 0.) ? b * a : a;`;\nexport class BinaryOpProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['A', 'B'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOperation(float a, float b) {\n ${op}\n }\n\n void main() {\n float a = getAAtOutCoords();\n float b = getBAtOutCoords();\n setOutput(binaryOperation(a, b));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nconst CHECK_NAN_SNIPPET = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\nexport const INT_DIV = `\n ivec4 ia = round(a);\n ivec4 ib = round(b);\n bvec4 cond = notEqual(ib, ivec4(0));\n ivec4 result = ivec4(0);\n vec4 s = sign(a) * sign(b);\n\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n if (cond[0]) {\n result[0] = idiv(ia[0], ib[0], s[0]);\n }\n if (cond[1]) {\n result[1] = idiv(ia[1], ib[1], s[1]);\n }\n if (cond[2]) {\n result[2] = idiv(ia[2], ib[2], s[2]);\n }\n if (cond[3]) {\n result[3] = idiv(ia[3], ib[3], s[3]);\n }\n return vec4(result);\n`;\nexport const POW = `\n // isModRound1 has 1 for components with round(mod(b, 2.0)) == 1, 0 otherwise.\n vec4 isModRound1 = vec4(equal(round(mod(b, 2.0)), ivec4(1)));\n vec4 multiplier = sign(a) * isModRound1 + (vec4(1.0) - isModRound1);\n vec4 result = multiplier * pow(abs(a), b);\n\n // Ensure that a^0 = 1, including 0^0 = 1 as this correspond to TF and JS\n bvec4 isExpZero = equal(b, vec4(0.0));\n result.r = isExpZero.r ? 1.0 : result.r;\n result.g = isExpZero.g ? 1.0 : result.g;\n result.b = isExpZero.b ? 1.0 : result.b;\n result.a = isExpZero.a ? 1.0 : result.a;\n\n vec4 isNaN = vec4(lessThan(a, vec4(0.0))) * vec4(lessThan(floor(b), b));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const PRELU = `\n vec4 aLessThanZero = vec4(lessThan(a, vec4(0.)));\n return (aLessThanZero * (b * a)) + ((vec4(1.0) - aLessThanZero) * a);\n`;\nexport const ELU_DER = `\n vec4 bGTEZero = vec4(greaterThanEqual(b, vec4(0.)));\n return (bGTEZero * a) + ((vec4(1.0) - bGTEZero) * (a * (b + vec4(1.0))));\n`;\nexport const EQUAL = `\n return vec4(equal(a, b));\n`;\nexport const NOT_EQUAL = `\n return vec4(notEqual(a, b));\n`;\nexport const LESS = `\n return vec4(lessThan(a, b));\n`;\nexport const LESS_EQUAL = `\n return vec4(lessThanEqual(a, b));\n`;\nexport const GREATER = `\n return vec4(greaterThan(a, b));\n`;\nexport const GREATER_EQUAL = `\n return vec4(greaterThanEqual(a, b));\n`;\nexport const LOGICAL_AND = `\n return vec4(\n vec4(greaterThanEqual(a, vec4(1.0))) *\n vec4(greaterThanEqual(b, vec4(1.0))));\n`;\nexport const LOGICAL_OR = `\n return min(\n vec4(greaterThanEqual(a, vec4(1.0))) +\n vec4(greaterThanEqual(b, vec4(1.0))),\n vec4(1.0));\n`;\nexport const MAX = `\n vec4 result = vec4(max(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MIN = `\n vec4 result = vec4(min(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MOD = `\n vec4 result = mod(a, b);\n vec4 isNaN = vec4(equal(b, vec4(0.0)));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport class BinaryOpPackedProgram {\n constructor(op, aShape, bShape, checkOutOfBounds = false) {\n this.variableNames = ['A', 'B'];\n this.supportsBroadcasting = true;\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const rank = this.outputShape.length;\n let checkOutOfBoundsString = '';\n if (checkOutOfBounds) {\n if (rank === 0 || util.sizeFromShape(this.outputShape) === 1) {\n checkOutOfBoundsString = `\n result.y = 0.;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const dtype = getCoordsDataType(rank);\n checkOutOfBoundsString = `\n ${dtype} coords = getOutputCoords();\n `;\n if (rank === 1) {\n checkOutOfBoundsString += `\n result.y = (coords + 1) >= ${this.outputShape[0]} ? 0. : result.y;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const channels = getChannels('coords', rank);\n checkOutOfBoundsString += `\n bool nextRowOutOfBounds =\n (${channels[rank - 2]} + 1) >= ${this.outputShape[rank - 2]};\n bool nextColOutOfBounds =\n (${channels[rank - 1]} + 1) >= ${this.outputShape[rank - 1]};\n result.y = nextColOutOfBounds ? 0. : result.y;\n result.z = nextRowOutOfBounds ? 0. : result.z;\n result.w = nextColOutOfBounds || nextRowOutOfBounds ? 0. : result.w;\n `;\n }\n }\n }\n this.userCode = `\n vec4 binaryOperation(vec4 a, vec4 b) {\n ${op}\n }\n\n void main() {\n vec4 a = getAAtOutCoords();\n vec4 b = getBAtOutCoords();\n\n vec4 result = binaryOperation(a, b);\n ${checkOutOfBoundsString}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n float value = getAAtOutCoords();\n if (isnan(value)) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, minVal, maxVal));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipPackedProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n vec4 value = getAAtOutCoords();\n\n if (any(isnan(value))) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, vec4(minVal), vec4(maxVal)));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ComplexAbsProgram {\n constructor(shape) {\n this.variableNames = ['real', 'imag'];\n this.outputShape = shape;\n this.userCode = `\n void main() {\n float re = abs(getRealAtOutCoords());\n float im = abs(getImagAtOutCoords());\n float mx = max(re, im);\n\n // sadly the length function in glsl is not underflow-safe\n // (at least not on Intel GPUs). So the safe solution is\n // to ensure underflow-safety in all cases.\n setOutput(\n mx == 0.0 ? 0.0 : mx * length(vec2(1, min(re, im)/mx))\n );\n }\n `;\n }\n}\n//# sourceMappingURL=complex_abs_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int d2 = coords.w;\n\n // Convolve x(?, ?, d1) with dy(:, :, d2) to get dw(wR, wC, d1, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n if (${isChannelsLast}) {\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n } else {\n float dyValue = getDy(b, d2, yR, yC);\n float xValue = getX(b, d1, xR, xC);\n dotProd += (xValue * dyValue);\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[${channelDim}];\n\n ivec2 dyCorner = ivec2(coords[${rowDim}], coords[${colDim}]) - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n\n if (${isChannelsLast}) {\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n } else {\n float xValue = getDy(batch, d2, idyR, idyC);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.userCode = `\n void main() {\n ivec5 coords = getOutputCoords();\n int wF = coords.x;\n int wR = coords.y;\n int wC = coords.z;\n int d1 = coords.w;\n int d2 = coords.u;\n\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yF = 0; yF < ${convInfo.outDepth}; yF++) {\n int xF = wF + yF * ${strideDepth} - ${padFront};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yF, yR, yC, d2);\n float xValue = getX(b, xF, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = filterDepth - 1 - convInfo.padInfo.front;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d1 = coords.u;\n\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyFCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n float dyF = float(dyFCorner + wF) / ${strideDepth}.0;\n\n if (dyF < 0.0 || dyF >= ${convInfo.outDepth}.0 || fract(dyF) > 0.0) {\n continue;\n }\n int idyF = int(dyF);\n\n int wFPerm = ${filterDepth} - 1 - wF;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n float xValue = getDy(batch, idyF, idyR, idyC, d2);\n float wValue = getW(wFPerm, wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int dm = coords.w;\n int d2 = d1 * ${channelMul} + dm;\n\n float dotProd = 0.0;\n\n // TO DO: Vec4 over the batch size\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class DepthwiseConv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[3];\n ivec2 dyCorner = coords.yz - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n float dotProd = 0.0;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n // TO DO: Vec4 over the channelMul\n for (int dm = 0; dm < ${channelMul}; dm++) {\n int d2 = d1 * ${channelMul} + dm;\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, dm);\n dotProd += xValue * wValue;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivationWeights = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivationWeights) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivationWeights) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d2 = coords[${channelDim}];\n\n ivec2 xRCCorner =\n ivec2(coords[${rowDim}], coords[${colDim}]) * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, d2) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 wValues = vec4(\n getW(wR, wC, d1, d2),\n getW(wR, wC, d1 + 1, d2),\n getW(wR, wC, d1 + 2, d2),\n getW(wR, wC, d1 + 3, d2)\n );\n\n if (${isChannelsLast}) {\n vec4 xValues = vec4(\n getX(batch, xR, xC, d1),\n getX(batch, xR, xC, d1 + 1),\n getX(batch, xR, xC, d1 + 2),\n getX(batch, xR, xC, d1 + 3)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec4 xValues = vec4(\n getX(batch, d1, xR, xC),\n getX(batch, d1 + 1, xR, xC),\n getX(batch, d1 + 2, xR, xC),\n getX(batch, d1 + 3, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n\n if (${isChannelsLast}) {\n dotProd +=\n getX(batch, xR, xC, ${inputDepthNearestVec4}) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n } else {\n dotProd +=\n getX(batch, ${inputDepthNearestVec4}, xR, xC) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n }\n\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 wValues = vec2(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n\n if (${isChannelsLast}) {\n vec2 xValues = vec2(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec2 xValues = vec2(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 wValues = vec3(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n\n if (${isChannelsLast}) {\n vec3 xValues = vec3(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec3 xValues = vec3(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 2, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n }\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\nexport class Conv3DProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n this.userCode = `\n const ivec3 strides = ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d2 = coords.u;\n\n ivec3 xFRCCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xFCorner = xFRCCorner.x;\n int xRCorner = xFRCCorner.y;\n int xCCorner = xFRCCorner.z;\n\n // Convolve x(?, ?, ?, d1) with w(:, :, :, d1, d2) to get\n // y(yF, yR, yC, d2). ? = to be determined. : = across all\n // values in that axis.\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n int xF = xFCorner + wF * ${dilationDepth};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3)\n );\n vec4 wValues = vec4(\n getW(wF, wR, wC, d1, d2),\n getW(wF, wR, wC, d1 + 1, d2),\n getW(wF, wR, wC, d1 + 2, d2),\n getW(wF, wR, wC, d1 + 3, d2)\n );\n\n dotProd += dot(xValues, wValues);\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n dotProd +=\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}) *\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2);\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 xValues = vec2(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n vec2 wValues = vec2(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n dotProd += dot(xValues, wValues);\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 xValues = vec3(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n vec3 wValues = vec3(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2 / ${channelMul};\n int q = d2 - d1 * ${channelMul};\n\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, q) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n // TO DO(dsmilkov): Flatten the two for loops and vec4 the operations.\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${xNumRows}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${xNumCols}) {\n continue;\n }\n\n float xVal = getX(batch, xR, xC, d1);\n float wVal = getW(wR, wC, d1, q);\n dotProd += xVal * wVal;\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class DepthwiseConvPacked2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const texelsAcross = filterWidth;\n let mainLoop = `int xR; int xC; int xCOffset;`;\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `\n vec4 xTexelR${r}C${c * 2} = vec4(0.);\n vec4 wR${r}C${c} = vec4(0.);\n vec4 xR${r}C${c} = vec4(0.);`;\n }\n }\n /**\n * This vectorized implementation works by gathering the values needed for\n * each output channel's dot product into vec4's and then multiplying them\n * all together (this happens in the final double for-loop below). Most of\n * the main loop consists of constructing these vec4's with the minimum\n * number of texture2D calls, which means making use of all four returned\n * values from a texture2D call at once.\n */\n for (let r = 0; r < filterHeight; r++) {\n for (let texelC = 0; texelC < texelsAcross; texelC++) {\n const c = texelC * 2;\n mainLoop += `\n xR = xRCorner + ${r * dilationHeight};\n xC = xCCorner + ${c * dilationWidth};\n `;\n if (strideWidth === 1) {\n if (c < filterWidth) {\n // If padding is odd, the outer texels have to be composed.\n if (padLeft % 2 === 1) {\n // TODO: Ensure vec4 previous does not result in redundant sample,\n // and avoid setting xTexelRC's that exceed the boundary in the\n // first place rather than resetting them to vec4(0)).\n // To compute xCOffset:\n // - If padding is odd, we must add 1 to ensure we ask for an\n // even-numbered row.\n // - We subtract 2 to access the previous texel.\n mainLoop += `\n xCOffset = xC + 1;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n xTexelR${r}C${c}.zw = vec2(0.);\n }\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + 1 - 2;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n vec4 previous = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n previous.zw = vec2(0.);\n }\n\n xR${r}C${c} = vec4(previous.zw, xTexelR${r}C${c}.xy);\n } else {\n xR${r}C${c} = vec4(0, 0, xTexelR${r}C${c}.xy);\n }\n `;\n }\n else {\n // Padding is even, so xRC corresponds to a single texel.\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows} && xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xR${r}C${c} = xTexelR${r}C${c};\n `;\n }\n if (c + 1 < filterWidth) {\n // If dilation is even, the second entry should match the first\n // (either both are composed or both are single samples). But if\n // dilation is odd, then the second entry should be the opposite\n // of the first (if the first is composed, the second is a single\n // sample, and vice versa.)\n const nextTexelOffset = padLeft % 2 === 0 ?\n util.nearestLargerEven(dilationWidth) :\n dilationWidth;\n if ((dilationWidth % 2 === 0 && padLeft % 2 === 1) ||\n (dilationWidth % 2 !== 0 && padLeft % 2 !== 1)) {\n mainLoop += `\n xCOffset = xC + ${padLeft % 2} + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n `;\n // If dilation > 1 then the xRC's will not be able to share any\n // values, so each xRC will require two unique calls to getX.\n if (dilationWidth > 1) {\n mainLoop += `\n xCOffset -= 2;\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n `;\n }\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.xy);\n `;\n }\n else {\n mainLoop += `\n xCOffset = xC + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n\n xR${r}C${c + 1} = xTexelR${r}C${c + 2};\n `;\n }\n }\n }\n }\n else { // stride > 1\n if (c < filterWidth) {\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows}) {\n `;\n // Depending on whether padLeft is even or odd, we want either the\n // xy or zw channels from X texels for xR${r}C${c}. If padLeft is\n // even, xR${r}C${c + 1} is simply the zw channels of texels we've\n // already sampled. But if padLeft is odd, xR${r}C{$c + 1}.zw will\n // need to come from the xy channels of a new texel, hence the `vec4\n // final` initialized below.\n if (padLeft % 2 === 1) {\n mainLoop += `\n xCOffset = xC + 1 - ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n if(xC + 1 >= 0 && xC + 1 < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xC + 1, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 final = vec4(0.);\n xCOffset = xC + 1 + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n final = getX(batch, xR, xCOffset, d1);\n }\n xR${r}C${c + 1} = vec4(xTexelR${r}C${c + 2}.xy, final.xy);\n `;\n }\n }\n else {\n mainLoop += `\n if(xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.xy, xTexelR${r}C${c + 2}.xy);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n }\n }\n mainLoop += `}`;\n }\n }\n if (c < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c} = getW(${r}, ${c}, d1, q);\n wR${r}C${c} = vec4(wTexelR${r}C${c}.xz, wTexelR${r}C${c}.xz);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c + 1} = getW(${r}, ${c + 1}, d1, q);\n wR${r}C${c + 1} =\n vec4(wTexelR${r}C${c + 1}.xz, wTexelR${r}C${c + 1}.xz);`;\n }\n }\n }\n }\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `dotProd += xR${r}C${c} * wR${r}C${c};`;\n }\n }\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2;\n int q = 0;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n vec4 dotProd = vec4(0.);\n\n ${mainLoop}\n\n vec4 result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_packed_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class CropAndResizeProgram {\n constructor(imageShape, boxShape, cropSize, method, extrapolationValue) {\n this.variableNames = ['Image', 'Boxes', 'BoxInd'];\n this.outputShape = [];\n const [batch, imageHeight, imageWidth, depth] = imageShape;\n const [numBoxes,] = boxShape;\n const [cropHeight, cropWidth] = cropSize;\n this.outputShape = [numBoxes, cropHeight, cropWidth, depth];\n const methodId = method === 'bilinear' ? 1 : 0;\n const [inputHeightFloat, inputWidthFloat] = [`${imageHeight - 1}.0`, `${imageWidth - 1}.0`];\n const [heightRatio, heightScale, inY] = cropHeight > 1 ?\n [\n `${(imageHeight - 1) / (cropHeight - 1)}`,\n '(y2-y1) * height_ratio',\n `y1*${inputHeightFloat} + float(y)*(height_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (y1+y2) * ${inputHeightFloat}`,\n ];\n const [widthRatio, widthScale, inX] = cropWidth > 1 ?\n [\n `${(imageWidth - 1) / (cropWidth - 1)}`,\n '(x2-x1) * width_ratio',\n `x1*${inputWidthFloat} + float(x)*(width_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (x1+x2) * ${inputWidthFloat}`,\n ];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op_gpu.cu.cc\n this.userCode = `\n const float height_ratio = float(${heightRatio});\n const float width_ratio = float(${widthRatio});\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int y = coords[1];\n int x = coords[2];\n int d = coords[3];\n\n // get box vals\n float y1 = getBoxes(b,0);\n float x1 = getBoxes(b,1);\n float y2 = getBoxes(b,2);\n float x2 = getBoxes(b,3);\n\n // get image in batch index\n int bInd = round(getBoxInd(b));\n if(bInd < 0 || bInd >= ${batch}) {\n return;\n }\n\n float height_scale = ${heightScale};\n float width_scale = ${widthScale};\n\n float in_y = ${inY};\n if( in_y < 0.0 || in_y > ${inputHeightFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n float in_x = ${inX};\n if( in_x < 0.0 || in_x > ${inputWidthFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n\n vec2 sourceFracIndexCR = vec2(in_x,in_y);\n if(${methodId} == 1) {\n // Compute the four integer indices.\n ivec2 sourceFloorCR = ivec2(sourceFracIndexCR);\n ivec2 sourceCeilCR = ivec2(ceil(sourceFracIndexCR));\n\n float topLeft = getImage(b, sourceFloorCR.y, sourceFloorCR.x, d);\n float bottomLeft = getImage(b, sourceCeilCR.y, sourceFloorCR.x, d);\n float topRight = getImage(b, sourceFloorCR.y, sourceCeilCR.x, d);\n float bottomRight = getImage(b, sourceCeilCR.y, sourceCeilCR.x, d);\n\n vec2 fracCR = sourceFracIndexCR - vec2(sourceFloorCR);\n\n float top = topLeft + (topRight - topLeft) * fracCR.x;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracCR.x;\n float newValue = top + (bottom - top) * fracCR.y;\n setOutput(newValue);\n } else {\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestCR = ivec2(floor(\n sourceFracIndexCR + vec2(0.5,0.5)));\n float newValue = getImage(b, sourceNearestCR.y, sourceNearestCR.x, d);\n setOutput(newValue);\n }\n }\n `;\n }\n}\n//# sourceMappingURL=crop_and_resize_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class CumSumProgram {\n constructor(shape, exclusive, reverse) {\n this.variableNames = ['x'];\n this.outputShape = shape;\n const rank = shape.length;\n const val = exclusive ? '0.0' : `getX(${getCoords(rank, 'coords')})`;\n const length = shape[shape.length - 1];\n let condition = '';\n let idxString = '';\n // When exclusive is set, the cumsum op becomes roll op that copies the\n // value from the previous index based on the direction specified by the\n // reverse flag.\n if (exclusive) {\n condition = reverse ? `end != ${length - 1}` : 'end != 0';\n idxString = reverse ? 'end + 1' : 'end - 1';\n }\n else {\n condition = reverse ? `end + pow2 < ${length}` : 'end >= pow2';\n idxString = (reverse ? 'end + pow2' : 'end - pow2');\n }\n this.userCode = `\n uniform float index;\n void main() {\n ${getCoordsDataType(rank)} coords = getOutputCoords();\n int end = ${getFinalCoord(rank, 'coords')};\n float val = ${val};\n int pow2 = int(pow(2.0, index));\n if (${condition}) {\n int idx = ${idxString};\n ${getFinalCoord(rank, 'coords')} = idx;\n val += getX(${getCoords(rank, 'coords')});\n }\n setOutput(val);\n }\n `;\n }\n getCustomSetupFunc(index) {\n return (gpgpu, webGLProgram) => {\n if (this.index == null) {\n this.index = gpgpu.getUniformLocation(webGLProgram, 'index');\n }\n gpgpu.gl.uniform1f(this.index, index);\n };\n }\n}\nfunction getCoords(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.x, ${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.x, ${name}.y, ${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.x, ${name}.y, ${name}.z, ${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\nfunction getFinalCoord(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=cumsum_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getA(rc.x, rc.y, rc.z);\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getChannel(getA(rc.x, rc.y, rc.z), vec2(rc.y, rc.z));\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthToSpaceProgram {\n constructor(outputShape, blockSize, dataFormat) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.outputShape = outputShape;\n this.blockSize = blockSize;\n this.dataFormat = dataFormat;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int h = ${this.getHeightCoordString()};\n int w = ${this.getWidthCoordString()};\n int d = ${this.getDepthCoordString()};\n\n int in_h = h / ${blockSize};\n int offset_h = imod(h, ${blockSize});\n int in_w = w / ${blockSize};\n int offset_w = imod(w, ${blockSize});\n int offset_d = (offset_h * ${blockSize} + offset_w) *\n ${this.getOutputDepthSize()};\n int in_d = d + offset_d;\n\n float result = ${this.getInputSamplingString()};\n setOutput(result);\n }\n `;\n }\n getHeightCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[1]`;\n }\n else {\n return `coords[2]`;\n }\n }\n getWidthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[2]`;\n }\n else {\n return `coords[3]`;\n }\n }\n getDepthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[3]`;\n }\n else {\n return `coords[1]`;\n }\n }\n getOutputDepthSize() {\n if (this.dataFormat === 'NHWC') {\n return this.outputShape[3];\n }\n else {\n return this.outputShape[1];\n }\n }\n getInputSamplingString() {\n if (this.dataFormat === 'NHWC') {\n return `getX(b, in_h, in_w, in_d)`;\n }\n else {\n return `getX(b, in_d, in_h, in_w)`;\n }\n }\n}\n//# sourceMappingURL=depth_to_space_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DiagProgram {\n constructor(size) {\n this.variableNames = ['X'];\n this.outputShape = [size, size];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n float val = coords[0] == coords[1] ? getX(coords[0]) : 0.0;\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=diag_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n float x = getAAtOutCoords();\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n ivec3 coords = getOutputCoords();\n float x = getChannel(getAAtOutCoords(), vec2(coords.y, coords.z));\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport class EncodeMatrixProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let output = `result`;\n if (inputIsUnsignedByte) {\n output = `floor(result * 255. + 0.5)`;\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n int flatIndex = getFlatIndex(coords);\n int offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n int r = flatIndex / ${width};\n int c = imod(flatIndex, ${width});\n vec2 uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n\n float result;\n\n if(offset == 0) {\n result = values[0];\n } else if(offset == 1) {\n result = values[1];\n } else if(offset == 2) {\n result = values[2];\n } else {\n result = values[3];\n }\n\n ${glsl.output} = vec4(${output}, 0., 0., 0.);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\n/*\nThis is how the shader encodes a tensor with shape = [2, 3, 5]\n(indices are [batch, row, col]).\n\n000|001 002|003 004|xxx 020|021 022|023 024|xxx\n------- ------- ------- ------- ------- -------\n010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n\n100|101 102|103 104|xxx 120|121 122|123 124|xxx\n------- ------- ------- ------- ------- -------\n110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n\nSingle texels contain only values from the same batch, and from adjacent rows\nand columns.\n */\nexport class EncodeMatrixPackedProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let mainLoop = '';\n let output = 'result';\n if (inputIsUnsignedByte) {\n output = 'floor(result * 255. + 0.5)';\n }\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n const channel = row * 2 + col;\n mainLoop += `\n localCoords = coords;\n if(localCoords[2] + ${col} < ${outputShape[2]}) {\n localCoords[2] += ${col};\n if(localCoords[1] + ${row} < ${outputShape[1]}) {\n localCoords[1] += ${row};\n\n flatIndex = getFlatIndex(localCoords);\n offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n r = flatIndex / ${width};\n c = imod(flatIndex, ${width});\n uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n values = ${glsl.texture2D}(A, uv);\n\n if(offset == 0) {\n result[${channel}] = values[0];\n } else if(offset == 1) {\n result[${channel}] = values[1];\n } else if(offset == 2) {\n result[${channel}] = values[2];\n } else {\n result[${channel}] = values[3];\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n vec4 result = vec4(0.);\n int flatIndex, r, c, offset;\n ivec3 localCoords;\n vec2 uv;\n vec4 values;\n\n ${mainLoop}\n\n ${glsl.output} = ${output};\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FillProgram {\n constructor(shape, value) {\n this.outputShape = [];\n this.variableNames = ['x'];\n this.outputShape = shape;\n this.userCode = `\n uniform float value;\n void main() {\n // Input can be obtained from uniform value.\n setOutput(value);\n }\n `;\n }\n getCustomSetupFunc(value) {\n return (gpgpu, webGLProgram) => {\n if (this.valueLoc == null) {\n this.valueLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'value');\n }\n gpgpu.gl.uniform1f(this.valueLoc, value);\n };\n }\n}\n//# sourceMappingURL=fill_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class GatherProgram {\n constructor(aShape, indicesLength, axis) {\n this.variableNames = ['A', 'indices'];\n const outputShape = aShape.slice();\n outputShape[axis] = indicesLength;\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape, axis);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape, axis) {\n const rank = aShape.length;\n if (rank > 4) {\n throw Error(`Gather for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `int(getIndices(resRC))`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n if (i === axis) {\n sourceCoords.push(`int(getIndices(${currentCoords[i]}))`);\n }\n else {\n sourceCoords.push(`${currentCoords[i]}`);\n }\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=gather_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class GatherNDProgram {\n constructor(sliceDim, strides, shape) {\n this.sliceDim = sliceDim;\n this.strides = strides;\n this.variableNames = ['x', 'indices'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n const strideString = this.sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${this.strides});\n void main() {\n ${dtype} coords = getOutputCoords();\n int flattenIndex = 0;\n for (int j = 0; j < ${this.sliceDim}; j++) {\n int index = round(getIndices(coords[0], j));\n flattenIndex += index * ${strideString};\n }\n setOutput(getX(flattenIndex, coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=gather_nd_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport function createVertexShader(gl) {\n const glsl = getGlslDifferences();\n const vertexShaderSource = `${glsl.version}\n precision highp float;\n ${glsl.attribute} vec3 clipSpacePos;\n ${glsl.attribute} vec2 uv;\n ${glsl.varyingVs} vec2 resultUV;\n\n void main() {\n gl_Position = vec4(clipSpacePos, 1);\n resultUV = uv;\n }`;\n return webgl_util.createVertexShader(gl, vertexShaderSource);\n}\nexport function createVertexBuffer(gl) {\n // [x y z u v] * [upper-left, lower-left, upper-right, lower-right]\n const vertexArray = new Float32Array([-1, 1, 0, 0, 1, -1, -1, 0, 0, 0, 1, 1, 0, 1, 1, 1, -1, 0, 1, 0]);\n return webgl_util.createStaticVertexBuffer(gl, vertexArray);\n}\nexport function createIndexBuffer(gl) {\n // OpenGL (and WebGL) have \"CCW == front\" winding\n const triangleVertexIndices = new Uint16Array([0, 1, 2, 2, 1, 3]);\n return webgl_util.createStaticIndexBuffer(gl, triangleVertexIndices);\n}\nfunction createAndConfigureTexture(gl, width, height, internalFormat, textureFormat, textureType) {\n webgl_util.validateTextureSize(width, height);\n const texture = webgl_util.createTexture(gl);\n const tex2d = gl.TEXTURE_2D;\n webgl_util.callAndCheck(gl, () => gl.bindTexture(tex2d, texture));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MIN_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MAG_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texImage2D(tex2d, 0, internalFormat, width, height, 0, textureFormat, textureType, null));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n return texture;\n}\nexport function getInternalFormatForFloat32MatrixTexture(textureConfig) {\n return textureConfig.internalFormatFloat;\n}\nexport function createFloat32MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat32MatrixTexture(textureConfig), textureConfig.textureFormatFloat, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16MatrixTexture(textureConfig) {\n return textureConfig.internalFormatHalfFloat;\n}\nexport function createFloat16MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16MatrixTexture(textureConfig), textureConfig.textureFormatFloat, textureConfig.textureTypeHalfFloat);\n}\nexport function getInternalFormatForUnsignedBytesMatrixTexture(textureConfig) {\n return textureConfig.downloadTextureFormat;\n}\nexport function createUnsignedBytesMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForUnsignedBytesMatrixTexture(textureConfig), gl.RGBA, gl.UNSIGNED_BYTE);\n}\nexport function getInternalFormatForPackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedFloat;\n}\nexport function createPackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForPackedMatrixTexture(textureConfig), gl.RGBA, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16PackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedHalfFloat;\n}\nexport function createFloat16PackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16PackedMatrixTexture(textureConfig), gl.RGBA, textureConfig.textureTypeHalfFloat);\n}\nexport function bindVertexProgramAttributeStreams(gl, program, vertexBuffer) {\n const posOffset = 0; // x is the first buffer element\n const uvOffset = 3 * 4; // uv comes after [x y z]\n const stride = (3 * 4) + (2 * 4); // xyz + uv, each entry is 4-byte float.\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer));\n const success = webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'clipSpacePos', vertexBuffer, 3, stride, posOffset);\n return success &&\n webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'uv', vertexBuffer, 2, stride, uvOffset);\n}\nexport function uploadDenseMatrixToTexture(gl, texture, width, height, data, textureConfig) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n let dataForUpload, texelDataType, internalFormat;\n if (data instanceof Uint8Array) {\n dataForUpload = new Uint8Array(width * height * 4);\n texelDataType = gl.UNSIGNED_BYTE;\n internalFormat = gl.RGBA;\n }\n else {\n dataForUpload = new Float32Array(width * height * 4);\n texelDataType = gl.FLOAT;\n internalFormat = textureConfig.internalFormatPackedFloat;\n }\n dataForUpload.set(data);\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, gl.RGBA, texelDataType, dataForUpload));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function uploadPixelDataToTexture(gl, texture, pixels) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n if (pixels.data instanceof Uint8Array) {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, pixels.width, pixels.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, pixels.data));\n }\n else {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, pixels));\n }\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function createBufferFromOutputTexture(gl2, rows, columns, textureConfig) {\n // Create and bind the buffer.\n const buffer = gl2.createBuffer();\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer));\n // Initialize the buffer to the size of the texture in bytes.\n const bytesPerFloat = 4;\n const valuesPerTexel = 4;\n const bufferSizeBytes = bytesPerFloat * valuesPerTexel * rows * columns;\n webgl_util.callAndCheck(gl2, () => gl2.bufferData(gl2.PIXEL_PACK_BUFFER, bufferSizeBytes, gl2.STREAM_READ));\n // Enqueue a command on the GPU command queue to copy of texture into the\n // buffer.\n webgl_util.callAndCheck(gl2, () => gl2.readPixels(0, 0, columns, rows, gl2.RGBA, gl2.FLOAT, 0));\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null));\n return buffer;\n}\nexport function downloadFloat32MatrixFromBuffer(gl, buffer, size) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(size);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadByteEncodedFloatMatrixFromOutputTexture(gl, rows, columns, textureConfig) {\n const [w, h] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n const numChannels = 4;\n const downloadTarget = new Uint8Array(tex_util.getUnpackedArraySizeFromMatrixSize(rows * columns, numChannels));\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, w, h, textureConfig.downloadTextureFormat, gl.UNSIGNED_BYTE, downloadTarget));\n // By wrapping the buffer in a Float32Array, we use native browser IEEE 754\n // decoding of the 4 bytes that back each 32 bit float.\n return new Float32Array(downloadTarget.buffer);\n}\nexport function downloadPackedMatrixFromBuffer(gl, buffer, batch, rows, cols, physicalRows, physicalCols, textureConfig) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(tex_util.getPackedRGBAArraySizeFromMatrixShape(physicalRows, physicalCols));\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadMatrixFromPackedOutputTexture(gl, physicalRows, physicalCols) {\n const packedRGBA = new Float32Array(physicalRows * physicalCols * 4);\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, physicalCols, physicalRows, gl.RGBA, gl.FLOAT, packedRGBA));\n return packedRGBA;\n}\n//# sourceMappingURL=gpgpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext, setWebGLContext } from './canvas_util';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport class GPGPUContext {\n constructor(gl) {\n this.outputTexture = null;\n this.program = null;\n this.disposed = false;\n this.vertexAttrsAreBound = false;\n this.itemsToPoll = [];\n const glVersion = env().getNumber('WEBGL_VERSION');\n if (gl != null) {\n this.gl = gl;\n setWebGLContext(glVersion, gl);\n }\n else {\n this.gl = getWebGLContext(glVersion);\n }\n // WebGL 2.0 enables texture floats without an extension.\n let COLOR_BUFFER_FLOAT = 'WEBGL_color_buffer_float';\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (env().getNumber('WEBGL_VERSION') === 1) {\n const TEXTURE_FLOAT = 'OES_texture_float';\n const TEXTURE_HALF_FLOAT = 'OES_texture_half_float';\n this.textureFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_FLOAT);\n if (webgl_util.hasExtension(this.gl, TEXTURE_HALF_FLOAT)) {\n this.textureHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support half float textures, yet the ' +\n 'environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n this.colorBufferFloatExtension = this.gl.getExtension(COLOR_BUFFER_FLOAT);\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, COLOR_BUFFER_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support color renderable half floats, yet ' +\n 'the environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n }\n else {\n COLOR_BUFFER_FLOAT = 'EXT_color_buffer_float';\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_FLOAT)) {\n this.colorBufferFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_FLOAT);\n }\n else if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n }\n else {\n throw new Error('GL context does not support color renderable floats');\n }\n }\n this.vertexBuffer = gpgpu_util.createVertexBuffer(this.gl);\n this.indexBuffer = gpgpu_util.createIndexBuffer(this.gl);\n this.framebuffer = webgl_util.createFramebuffer(this.gl);\n this.textureConfig =\n tex_util.getTextureConfig(this.gl, this.textureHalfFloatExtension);\n }\n get debug() {\n return env().getBool('DEBUG');\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n if (this.program != null) {\n console.warn('Disposing a GPGPUContext that still has a bound WebGLProgram.' +\n ' This is probably a resource leak, delete the program with ' +\n 'GPGPUContext.deleteProgram before disposing.');\n }\n if (this.outputTexture != null) {\n console.warn('Disposing a GPGPUContext that still has a bound output matrix ' +\n 'texture. This is probably a resource leak, delete the output ' +\n 'matrix texture with GPGPUContext.deleteMatrixTexture before ' +\n 'disposing.');\n }\n const gl = this.gl;\n webgl_util.callAndCheck(gl, () => gl.finish());\n webgl_util.callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteFramebuffer(this.framebuffer));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteBuffer(this.indexBuffer));\n this.disposed = true;\n }\n createFloat32MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat32MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createFloat16MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createUnsignedBytesMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createUnsignedBytesMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n uploadPixelDataToTexture(texture, pixels) {\n this.throwIfDisposed();\n gpgpu_util.uploadPixelDataToTexture(this.gl, texture, pixels);\n }\n uploadDenseMatrixToTexture(texture, width, height, data) {\n this.throwIfDisposed();\n gpgpu_util.uploadDenseMatrixToTexture(this.gl, texture, width, height, data, this.textureConfig);\n }\n createFloat16PackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16PackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createPackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createPackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n deleteMatrixTexture(texture) {\n this.throwIfDisposed();\n if (this.outputTexture === texture) {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n this.outputTexture = null;\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteTexture(texture));\n }\n downloadByteEncodedFloatMatrixFromOutputTexture(texture, rows, columns) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadByteEncodedFloatMatrixFromOutputTexture(this.gl, rows, columns, this.textureConfig));\n }\n downloadPackedMatrixFromBuffer(buffer, batch, rows, columns, physicalRows, physicalCols) {\n return gpgpu_util.downloadPackedMatrixFromBuffer(this.gl, buffer, batch, rows, columns, physicalRows, physicalCols, this.textureConfig);\n }\n downloadFloat32MatrixFromBuffer(buffer, size) {\n return gpgpu_util.downloadFloat32MatrixFromBuffer(this.gl, buffer, size);\n }\n createBufferFromTexture(texture, rows, columns) {\n this.bindTextureToFrameBuffer(texture);\n const result = gpgpu_util.createBufferFromOutputTexture(this.gl, rows, columns, this.textureConfig);\n this.unbindTextureToFrameBuffer();\n return result;\n }\n createAndWaitForFence() {\n const fenceContext = this.createFence(this.gl);\n return this.pollFence(fenceContext);\n }\n createFence(gl) {\n let query;\n let isFencePassed;\n if (env().getBool('WEBGL_FENCE_API_ENABLED')) {\n const gl2 = gl;\n const sync = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);\n gl.flush();\n isFencePassed = () => {\n const status = gl2.clientWaitSync(sync, 0, 0);\n return status === gl2.ALREADY_SIGNALED ||\n status === gl2.CONDITION_SATISFIED;\n };\n query = sync;\n }\n else if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n query = this.beginQuery();\n this.endQuery();\n isFencePassed = () => this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n else {\n // If we have no way to fence, return true immediately. This will fire in\n // WebGL 1.0 when there is no disjoint query timer. In this case, because\n // the fence passes immediately, we'll immediately ask for a download of\n // the texture, which will cause the UI thread to hang.\n isFencePassed = () => true;\n }\n return { query, isFencePassed };\n }\n downloadMatrixFromPackedTexture(texture, physicalRows, physicalCols) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadMatrixFromPackedOutputTexture(this.gl, physicalRows, physicalCols));\n }\n createProgram(fragmentShaderSource) {\n this.throwIfDisposed();\n const gl = this.gl;\n const fragmentShader = webgl_util.createFragmentShader(gl, fragmentShaderSource);\n const vertexShader = gpgpu_util.createVertexShader(gl);\n const program = webgl_util.createProgram(gl);\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, vertexShader));\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, fragmentShader));\n webgl_util.linkProgram(gl, program);\n if (this.debug) {\n webgl_util.validateProgram(gl, program);\n }\n if (!this.vertexAttrsAreBound) {\n this.setProgram(program);\n this.vertexAttrsAreBound = gpgpu_util.bindVertexProgramAttributeStreams(gl, this.program, this.vertexBuffer);\n }\n return program;\n }\n deleteProgram(program) {\n this.throwIfDisposed();\n if (program === this.program) {\n this.program = null;\n }\n if (program != null) {\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteProgram(program));\n }\n }\n setProgram(program) {\n this.throwIfDisposed();\n this.program = program;\n if ((this.program != null) && this.debug) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.useProgram(program));\n }\n getUniformLocation(program, uniformName, shouldThrow = true) {\n this.throwIfDisposed();\n if (shouldThrow) {\n return webgl_util.getProgramUniformLocationOrThrow(this.gl, program, uniformName);\n }\n else {\n return webgl_util.getProgramUniformLocation(this.gl, program, uniformName);\n }\n }\n getAttributeLocation(program, attribute) {\n this.throwIfDisposed();\n return webgl_util.callAndCheck(this.gl, () => this.gl.getAttribLocation(program, attribute));\n }\n getUniformLocationNoThrow(program, uniformName) {\n this.throwIfDisposed();\n return this.gl.getUniformLocation(program, uniformName);\n }\n setInputMatrixTexture(inputMatrixTexture, uniformLocation, textureUnit) {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n webgl_util.bindTextureToProgramUniformSampler(this.gl, inputMatrixTexture, uniformLocation, textureUnit);\n }\n setOutputMatrixTexture(outputMatrixTexture, rows, columns) {\n this.setOutputMatrixTextureDriver(outputMatrixTexture, columns, rows);\n }\n setOutputPackedMatrixTexture(outputPackedMatrixTexture, rows, columns) {\n this.throwIfDisposed();\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n this.setOutputMatrixTextureDriver(outputPackedMatrixTexture, width, height);\n }\n setOutputMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n this.setOutputMatrixWriteRegionDriver(startColumn, startRow, numColumns, numRows);\n }\n setOutputPackedMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n throw new Error('setOutputPackedMatrixWriteRegion not implemented.');\n }\n debugValidate() {\n if (this.program != null) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.validateFramebuffer(this.gl);\n }\n executeProgram() {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n const gl = this.gl;\n if (this.debug) {\n this.debugValidate();\n }\n webgl_util.callAndCheck(gl, () => gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0));\n }\n blockUntilAllProgramsCompleted() {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.finish());\n }\n getQueryTimerExtension() {\n if (this.disjointQueryTimerExtension == null) {\n this.disjointQueryTimerExtension =\n webgl_util.getExtensionOrThrow(this.gl, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2 ?\n 'EXT_disjoint_timer_query_webgl2' :\n 'EXT_disjoint_timer_query');\n }\n return this.disjointQueryTimerExtension;\n }\n getQueryTimerExtensionWebGL2() {\n return this.getQueryTimerExtension();\n }\n getQueryTimerExtensionWebGL1() {\n return this.getQueryTimerExtension();\n }\n beginQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const query = gl2.createQuery();\n gl2.beginQuery(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n const query = ext.createQueryEXT();\n ext.beginQueryEXT(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n endQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n gl2.endQuery(ext.TIME_ELAPSED_EXT);\n return;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n ext.endQueryEXT(ext.TIME_ELAPSED_EXT);\n }\n async waitForQueryAndGetTime(query) {\n await util.repeatedTry(() => this.disposed || // while testing contexts are created / disposed\n // in rapid succession, so without this check we\n // may poll for the query timer indefinitely\n this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION')));\n return this.getQueryTime(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n getQueryTime(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return null;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const timeElapsedNanos = gl2.getQueryParameter(query, gl2.QUERY_RESULT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const timeElapsedNanos = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_EXT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n }\n isQueryAvailable(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return true;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const available = gl2.getQueryParameter(query, gl2.QUERY_RESULT_AVAILABLE);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const available = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_AVAILABLE_EXT);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n }\n pollFence(fenceContext) {\n return new Promise(resolve => {\n this.addItemToPoll(() => fenceContext.isFencePassed(), () => resolve());\n });\n }\n pollItems() {\n // Find the last query that has finished.\n const index = linearSearchLastTrue(this.itemsToPoll.map(x => x.isDoneFn));\n for (let i = 0; i <= index; ++i) {\n const { resolveFn } = this.itemsToPoll[i];\n resolveFn();\n }\n this.itemsToPoll = this.itemsToPoll.slice(index + 1);\n }\n addItemToPoll(isDoneFn, resolveFn) {\n this.itemsToPoll.push({ isDoneFn, resolveFn });\n if (this.itemsToPoll.length > 1) {\n // We already have a running loop that polls.\n return;\n }\n // Start a new loop that polls.\n util.repeatedTry(() => {\n this.pollItems();\n // End the loop if no more items to poll.\n return this.itemsToPoll.length === 0;\n });\n }\n bindTextureToFrameBuffer(texture) {\n this.throwIfDisposed();\n webgl_util.bindColorTextureToFramebuffer(this.gl, texture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n unbindTextureToFrameBuffer() {\n if (this.outputTexture != null) {\n webgl_util.bindColorTextureToFramebuffer(this.gl, this.outputTexture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n else {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n }\n }\n downloadMatrixDriver(texture, downloadAndDecode) {\n this.bindTextureToFrameBuffer(texture);\n const result = downloadAndDecode();\n this.unbindTextureToFrameBuffer();\n return result;\n }\n setOutputMatrixTextureDriver(outputMatrixTextureMaybePacked, width, height) {\n this.throwIfDisposed();\n const gl = this.gl;\n webgl_util.bindColorTextureToFramebuffer(gl, outputMatrixTextureMaybePacked, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(gl);\n }\n this.outputTexture = outputMatrixTextureMaybePacked;\n webgl_util.callAndCheck(gl, () => gl.viewport(0, 0, width, height));\n webgl_util.callAndCheck(gl, () => gl.scissor(0, 0, width, height));\n }\n setOutputMatrixWriteRegionDriver(x, y, width, height) {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.scissor(x, y, width, height));\n }\n throwIfDisposed() {\n if (this.disposed) {\n throw new Error('Attempted to use disposed GPGPUContext.');\n }\n }\n throwIfNoProgram() {\n if (this.program == null) {\n throw new Error('No GPU program is currently set.');\n }\n }\n}\n/**\n * Finds the index of the last true element using linear search.\n * Note: We can't do binary search because Chrome expects us to explicitly\n * test all fences before download:\n * https://github.com/tensorflow/tfjs/issues/1145\n */\nexport function linearSearchLastTrue(arr) {\n let i = 0;\n for (; i < arr.length; ++i) {\n const isDone = arr[i]();\n if (!isDone) {\n break;\n }\n }\n return i - 1;\n}\n//# sourceMappingURL=gpgpu_context.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport * as shader_compiler from './shader_compiler';\nexport function compileProgram(gpgpu, program, inputs, output) {\n const userCode = program.userCode;\n const inputInfos = inputs.map((input, i) => {\n const shapeInfo = {\n logicalShape: input.shape,\n texShape: input.isUniform ? null : input.texData.texShape,\n isUniform: input.isUniform,\n isPacked: input.isUniform ? false : input.texData.isPacked,\n flatOffset: null\n };\n if (input.texData != null && input.texData.slice != null &&\n input.texData.slice.flatOffset > 0) {\n shapeInfo.flatOffset = input.texData.slice.flatOffset;\n }\n return { name: program.variableNames[i], shapeInfo };\n });\n const inShapeInfos = inputInfos.map(x => x.shapeInfo);\n const outShapeInfo = {\n logicalShape: output.shape,\n texShape: output.texData.texShape,\n isUniform: false,\n isPacked: output.texData.isPacked,\n flatOffset: null\n };\n const source = shader_compiler.makeShader(inputInfos, outShapeInfo, userCode, program.packedInputs);\n const webGLProgram = gpgpu.createProgram(source);\n // Add special uniforms (NAN, INFINITY)\n let infLoc = null;\n const nanLoc = gpgpu.getUniformLocation(webGLProgram, 'NAN', false);\n if (env().getNumber('WEBGL_VERSION') === 1) {\n infLoc = gpgpu.getUniformLocation(webGLProgram, 'INFINITY', false);\n }\n // Add user-defined uniforms\n const uniformLocations = {};\n for (let i = 0; i < program.variableNames.length; i++) {\n const varName = program.variableNames[i];\n const shouldThrow = false;\n uniformLocations[varName] =\n gpgpu.getUniformLocation(webGLProgram, varName, shouldThrow);\n uniformLocations[`offset${varName}`] =\n gpgpu.getUniformLocation(webGLProgram, `offset${varName}`, shouldThrow);\n }\n return {\n program,\n source,\n webGLProgram,\n uniformLocations,\n inShapeInfos,\n outShapeInfo,\n infLoc,\n nanLoc,\n };\n}\nfunction validateBinaryAndProgram(shapeInfos, inputs) {\n if (shapeInfos.length !== inputs.length) {\n throw Error(`Binary was compiled with ${shapeInfos.length} inputs, but ` +\n `was executed with ${inputs.length} inputs`);\n }\n shapeInfos.forEach((s, i) => {\n const shapeA = s.logicalShape;\n const input = inputs[i];\n const shapeB = input.shape;\n if (!util.arraysEqual(shapeA, shapeB)) {\n throw Error(`Binary was compiled with different shapes than ` +\n `the current args. Shapes ${shapeA} and ${shapeB} must match`);\n }\n // The input is uploaded as uniform.\n if (s.isUniform && input.isUniform) {\n return;\n }\n const texShapeA = s.texShape;\n const texShapeB = input.isUniform ? null : input.texData.texShape;\n if (!util.arraysEqual(texShapeA, texShapeB)) {\n throw Error(`Binary was compiled with different texture shapes than the` +\n ` current args. Shape ${texShapeA} and ${texShapeB} must match`);\n }\n });\n}\nexport function runProgram(gpgpu, binary, inputs, output, customSetup) {\n validateBinaryAndProgram(binary.inShapeInfos, inputs);\n validateBinaryAndProgram([binary.outShapeInfo], [output]);\n const outTex = output.texData.texture;\n const outTexShape = output.texData.texShape;\n if (output.texData.isPacked) {\n gpgpu.setOutputPackedMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n else {\n gpgpu.setOutputMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n gpgpu.setProgram(binary.webGLProgram);\n // Set special uniforms (NAN, INFINITY)\n if (env().getNumber('WEBGL_VERSION') === 1) {\n if (binary.infLoc !== null) {\n gpgpu.gl.uniform1f(binary.infLoc, Infinity);\n }\n }\n if (binary.nanLoc !== null) {\n gpgpu.gl.uniform1f(binary.nanLoc, NaN);\n }\n // Set user-defined inputs\n inputs.forEach((input, i) => {\n const varName = binary.program.variableNames[i];\n const varLoc = binary.uniformLocations[varName];\n const varOffsetLoc = binary.uniformLocations[`offset${varName}`];\n if (varLoc == null) {\n // The compiler inferred that this variable is not used in this shader.\n return;\n }\n if (input.isUniform) {\n // Upload the values of the tensor as uniform.\n if (util.sizeFromShape(input.shape) < 2) {\n gpgpu.gl.uniform1f(varLoc, input.uniformValues[0]);\n }\n else {\n let vals = input.uniformValues;\n if (!(vals instanceof Float32Array)) {\n vals = new Float32Array(vals);\n }\n gpgpu.gl.uniform1fv(varLoc, vals);\n }\n return;\n }\n // If the input was sliced, upload the flat offset index.\n if (input.texData.slice != null && varOffsetLoc != null) {\n gpgpu.gl.uniform1i(varOffsetLoc, input.texData.slice.flatOffset);\n }\n gpgpu.setInputMatrixTexture(input.texData.texture, varLoc, i);\n });\n if (customSetup != null) {\n customSetup(gpgpu, binary.webGLProgram);\n }\n gpgpu.executeProgram();\n}\nexport function makeShaderKey(program, inputs, output) {\n let keyInputs = '';\n inputs.concat(output).forEach(x => {\n const hasOffset = x.texData != null && x.texData.slice != null &&\n x.texData.slice.flatOffset > 0;\n const texShape = x.isUniform ? 'uniform' : x.texData.texShape;\n keyInputs += `${x.shape}_${texShape}_${hasOffset}`;\n });\n const keyUserCode = program.userCode;\n let key = program.constructor.name;\n // Fast string concat. See https://jsperf.com/string-concatenation/14.\n key += '_' + keyInputs + '_' + keyUserCode;\n return key;\n}\n//# sourceMappingURL=gpgpu_math.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nexport class Im2ColPackedProgram {\n constructor(outputShape, inputShape, convInfo) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const { filterWidth, inChannels, strideWidth, strideHeight, padInfo, outWidth, dilationWidth, dilationHeight, dataFormat } = convInfo;\n const { left, top } = padInfo;\n const itemsPerBlockRow = inChannels * filterWidth;\n const glsl = getGlslDifferences();\n const isChannelsLast = dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 0 : 1;\n const colDim = isChannelsLast ? 1 : 2;\n let unrolled = ``;\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n unrolled += `\n blockIndex = rc.y + ${col};\n pos = rc.x + ${row};\n\n if(blockIndex < ${outputShape[1]} && pos < ${outputShape[0]}) {\n offsetY = int(blockIndex / (${outWidth})) * ${strideHeight} - ${top};\n d0 = offsetY + ${dilationHeight} * (pos / ${itemsPerBlockRow});\n\n if(d0 < ${inputShape[rowDim]} && d0 >= 0) {\n\n offsetX = int(mod(float(blockIndex), ${outWidth}.) * ${strideWidth}. - ${left}.);\n d1 = offsetX + ${dilationWidth} * (int(mod(float(pos), ${itemsPerBlockRow}.) / ${inChannels}.));\n\n if(d1 < ${inputShape[colDim]} && d1 >= 0) {\n\n ch = int(mod(float(pos), ${inChannels}.));\n\n if (${isChannelsLast}) {\n innerDims = vec2(d1, ch);\n result[${row * 2 + col}] = getChannel(\n getA(d0, int(innerDims.x),\n int(innerDims.y)), innerDims);\n } else {\n innerDims = vec2(d0, d1);\n result[${row * 2 + col}] = getChannel(\n getA(ch, int(innerDims.x),\n int(innerDims.y)), innerDims);\n }\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n void main() {\n ivec2 rc = getOutputCoords();\n\n vec4 result = vec4(0);\n\n int blockIndex, pos, offsetY, d0, offsetX, d1, ch;\n vec2 innerDims;\n\n ${unrolled}\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=im2col_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n int d = coords[3];\n float x = getX(b, r, c, d);\n float sum = 0.0;\n for (int j = -${rad}; j <= ${rad}; j++) {\n int idx = d + j;\n if (idx >= 0 && idx <= ${maxD}) {\n float z = getX(b, r, c, idx);\n sum += z * z;\n }\n }\n float val = x * ${powOperator};\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNGradProgram {\n constructor(inputShape, depthRadius, bias, alpha, beta) {\n this.variableNames = ['inputImage', 'outputImage', 'dy'];\n this.outputShape = [];\n this.outputShape = inputShape;\n this.depth = inputShape[3];\n this.depthRadius = depthRadius;\n this.bias = bias;\n this.alpha = alpha;\n this.beta = beta;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n\n float result = 0.0;\n for (int d = 0; d < ${this.depth}; ++d) {\n int depthBegin = int(max(0.0, float(d - ${depthRadius})));\n int depthEnd = int(min(float(${this.depth}),\n float(d + ${depthRadius} + 1)));\n\n const int MIN_DEPTH_BEGIN = 0;\n const int MAX_DEPTH_END = ${this.depth};\n\n float norm = 0.0;\n for (int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k) {\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd) {\n norm += getInputImage(b, r, c, k) * getInputImage(b, r, c, k);\n }\n else {\n break;\n }\n }\n\n norm = float(${alpha}) * norm + float(${bias});\n\n for(int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k){\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd){\n float dyi = -2.0 * float(${alpha})\n * float(${beta})\n * getInputImage(b ,r ,c, k) * getOutputImage(b, r, c, d)\n / norm;\n if (k == d) {\n dyi += pow(norm, -1.0 * ${beta});\n }\n if (k == coords[3]) {\n dyi *= getDy(b, r, c, d);\n result += dyi;\n }\n }\n else {\n break;\n }\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_grad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNPackedProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords.x;\n int r = coords.y;\n int c = coords.z;\n int d = coords.w;\n\n bool hasNextCol = d < ${this.outputShape[3]};\n bool hasNextRow = c < ${this.outputShape[2]};\n\n vec4 sum = vec4(0.);\n vec4 xFragAtOutputCoords = getX(b, r, c, d);\n\n vec4 xAtOutputCoords = vec4(\n getChannel(xFragAtOutputCoords, vec2(c, d)),\n hasNextCol ?\n getChannel(xFragAtOutputCoords, vec2(c, d + 1)) : 0.0,\n hasNextRow ?\n getChannel(xFragAtOutputCoords , vec2(c + 1, d)) : 0.0,\n (hasNextRow && hasNextCol) ?\n getChannel(xFragAtOutputCoords, vec2(c + 1, d + 1)) : 0.0\n );\n\n int firstChannel = d - ${rad};\n vec2 cache = vec2(0.);\n if(firstChannel >= 0){\n vec4 firstChannelFrag = getX(b, r, c, firstChannel);\n cache.x = getChannel(firstChannelFrag, vec2(c, firstChannel));\n if(hasNextRow){\n cache.y = getChannel(firstChannelFrag, vec2(c + 1, firstChannel));\n }\n }\n\n ivec2 depth = ivec2(d, d + 1);\n for (int j = - ${rad}; j <= ${rad}; j++) {\n ivec2 idx = depth + j;\n bvec2 aboveLowerBound = greaterThanEqual(idx, ivec2(0));\n bvec2 belowUpperBound = lessThanEqual(idx, ivec2(${maxD}));\n\n bool depthInRange = aboveLowerBound.x && belowUpperBound.x;\n bool depthPlusOneInRange = aboveLowerBound.y && belowUpperBound.y;\n\n if(depthInRange || depthPlusOneInRange){\n vec4 z = vec4(0.);\n vec4 xFragAtCurrentDepth;\n z.xz = cache.xy;\n if(depthPlusOneInRange && hasNextCol){\n xFragAtCurrentDepth = idx.y != d ?\n getX(b, r, c, idx.y) : xFragAtOutputCoords;\n z.y = getChannel(xFragAtCurrentDepth, vec2(c, idx.y));\n if(hasNextRow){\n z.w = getChannel(xFragAtCurrentDepth, vec2(c + 1, idx.y));\n }\n }\n cache.xy = z.yw;\n sum += z * z;\n }\n }\n vec4 result = xAtOutputCoords * ${powOperator};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MaxPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n int maxPosValue = ${lastIndex} - int(getMaxPos(b, idyR, idyC, d));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue = wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class MaxPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterDepth * effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, ch) with pos mask(:, :, :, d) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n int maxPosValue = ${lastIndex} -\n int(getMaxPos(batch, idyD, idyR, idyC, ch));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue =\n wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=max_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MatMulPackedProgram {\n constructor(aShape, bShape, outputShape, transposeA = false, transposeB = false, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['matrixA', 'matrixB'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const sharedDim = transposeA ? aShape[1] : aShape[2];\n const sharedDimensionPacked = Math.ceil(sharedDim / 2);\n const aSample = transposeA ? 'i * 2, rc.y' : 'rc.y, i * 2';\n const bSample = transposeB ? 'rc.z, i * 2' : 'i * 2, rc.z';\n const aSwizzle = transposeA ? ['a.xxyy', 'a.zzww'] : ['a.xxzz', 'a.yyww'];\n const bSwizzle = transposeB ? ['b.xzxz', 'b.ywyw'] : ['b.xyxy', 'b.zwzw'];\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n let batchASnippet = 'rc.x';\n let batchBSnippet = 'rc.x';\n if (aShape[0] < bShape[0]) {\n batchASnippet = `int(min(float(rc.x), ${aShape[0] - 1}.))`;\n }\n else if (bShape[0] < aShape[0]) {\n batchBSnippet = `int(min(float(rc.x), ${bShape[0] - 1}.))`;\n }\n this.userCode = `\n ${activationSnippet}\n\n const float sharedDimension = ${sharedDimensionPacked}.0;\n\n vec4 dot2x2ARowBCol(ivec3 rc) {\n vec4 result = vec4(0);\n for (int i = 0; i < ${sharedDimensionPacked}; i++) {\n int batchA = ${batchASnippet};\n int batchB = ${batchBSnippet};\n vec4 a = getMatrixA(batchA, ${aSample});\n vec4 b = getMatrixB(batchB, ${bSample});\n\n // These swizzled products need to be separately added.\n // See: https://github.com/tensorflow/tfjs/issues/1735\n result += (${aSwizzle[0]} * ${bSwizzle[0]});\n result += (${aSwizzle[1]} * ${bSwizzle[1]});\n }\n return result;\n }\n\n void main() {\n ivec3 rc = getOutputCoords();\n vec4 result = dot2x2ARowBCol(rc);\n\n ${addBiasSnippet}\n\n ${applyActivationSnippet}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mulmat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MultinomialProgram {\n constructor(batchSize, numOutcomes, numSamples) {\n this.variableNames = ['probs'];\n this.outputShape = [batchSize, numSamples];\n this.userCode = `\n uniform float seed;\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n\n float r = random(seed);\n float cdf = 0.0;\n\n for (int i = 0; i < ${numOutcomes - 1}; i++) {\n cdf += getProbs(batch, i);\n\n if (r < cdf) {\n setOutput(float(i));\n return;\n }\n }\n\n // If no other event happened, last event happened.\n setOutput(float(${numOutcomes - 1}));\n }\n `;\n }\n getCustomSetupFunc(seed) {\n return (gpgpu, webGLProgram) => {\n if (this.seedLoc == null) {\n this.seedLoc = gpgpu.getUniformLocation(webGLProgram, 'seed');\n }\n gpgpu.gl.uniform1f(this.seedLoc, seed);\n };\n }\n}\n//# sourceMappingURL=multinomial_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class OneHotProgram {\n constructor(numIndices, depth, onValue, offValue) {\n this.variableNames = ['indices'];\n this.outputShape = [numIndices, depth];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int index = round(getIndices(coords.x));\n setOutput(mix(float(${offValue}), float(${onValue}),\n float(index == coords.y)));\n }\n `;\n }\n}\n//# sourceMappingURL=onehot_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n // Only input / output 3D tensors.\n this.outputShape = outputShape;\n const rank = outputShape.length;\n if (rank === 0) {\n this.userCode = `\n void main() {\n setOutput(vec4(getA(), 0., 0., 0.));\n }\n `;\n }\n else {\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const outOfBoundsCondition = getOutOfBoundsCondition(rank, outputShape, channels);\n const setup = getSetup(rank, outputShape[outputShape.length - 1], outputShape[outputShape.length - 2], channels);\n const output = getOutput(outputShape, channels);\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n\n if(${outOfBoundsCondition}) {\n setOutput(vec4(0));\n } else {\n ${setup}\n\n setOutput(vec4(${output}));\n }\n }\n `;\n }\n }\n}\nfunction getSourceCoordsArr(rank, dims) {\n const coords = [];\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n let coord = `${row === 0 ? 'r' : 'rp1'}, ${col === 0 ? 'c' : 'cp1'}`;\n for (let d = 2; d < rank; d++) {\n coord = `${dims[dims.length - 1 - d]},` + coord;\n }\n coords.push(coord);\n }\n }\n return coords;\n}\nfunction getOutOfBoundsCondition(rank, shape, dims) {\n if (rank === 1) {\n return `rc > ${shape[0]}`;\n }\n let cond = '';\n for (let i = rank - 2; i < rank; i++) {\n cond += `${dims[i]} >= ${shape[i]}`;\n if (i < rank - 1) {\n cond += '||';\n }\n }\n return cond;\n}\nfunction getSetup(rank, cols, rows, dims) {\n if (rank === 1) {\n return '';\n }\n const innerDims = dims.slice(-2);\n return `\n int r = ${innerDims[0]};\n int c = ${innerDims[1]};\n int rp1 = r + 1;\n int cp1 = c + 1;\n\n bool cEdge = cp1 >= ${cols};\n bool rEdge = rp1 >= ${rows};\n `;\n}\nfunction getOutput(shape, dims) {\n const rank = shape.length;\n const sourceCoords = getSourceCoordsArr(rank, dims);\n if (rank === 1) {\n return `getA(rc),\n rc + 1 >= ${shape[0]} ? 0. : getA(rc + 1),\n 0, 0`;\n }\n return `getA(${sourceCoords[0]}),\n cEdge ? 0. : getA(${sourceCoords[1]}),\n rEdge ? 0. : getA(${sourceCoords[2]}),\n rEdge || cEdge ? 0. : getA(${sourceCoords[3]})`;\n}\n//# sourceMappingURL=pack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const type = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start || outC >= end) {\n setOutput(float(${constantValue}));\n } else {\n setOutput(getX(outC - start));\n }\n }\n `;\n return;\n }\n this.userCode = `\n ${type} start = ${type}(${start});\n ${type} end = ${type}(${end});\n\n void main() {\n ${type} outC = getOutputCoords();\n if (any(lessThan(outC, start)) || any(greaterThanEqual(outC, end))) {\n setOutput(float(${constantValue}));\n } else {\n ${type} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadPackedProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const componentSetup = [\n `${dtype} rc = outputLoc;`, `${coords[rank - 1]} += 1;\n if(${cLimit}) {\n `,\n rank === 1 ? '' : `}\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {`,\n rank === 1 ? '' : ` ${coords[rank - 1]} += 1;\n if(${cLimit}) {`\n ];\n const paddingArea = rank === 1 ?\n 'rc < start || rc >= end' :\n 'any(lessThan(rc, start)) || any(greaterThanEqual(rc, end))';\n let mainLoop = '';\n for (let i = 0, j = rank === 1 ? 2 : 4; i < j; i++) {\n mainLoop += `\n ${componentSetup[i]}\n if (${paddingArea}) {\n result[${i}] = float(${constantValue});\n } else {\n ${dtype} source = rc - start;\n result[${i}] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n mainLoop += (rank === 1 ? `} ` : `}}`);\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Pool2DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n const batchFlattenPositionStr = `((batch * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n const flattenPositionStr = `(xR * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n float avgValue = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xR, xC, d);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ? (includeBatchInIndex ? batchFlattenPositionStr :\n flattenPositionStr) :\n `wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xR, int xC, int d) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xR, xC, d);\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n getValue(batch, xR, xC + 3 * ${dilationWidth}, d)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\nexport class Pool3DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, ch) to get y(yD, yR, yC, ch).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xD, xR, xC, ch);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ?\n (includeBatchInIndex ?\n `(((batch * ${convInfo.inDepth} + xD) * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch` :\n `((xD * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch`) :\n `wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xD, int xR, int xC, int ch) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xD, xR, xC, ch);\n }\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, d) to get y(yD, yR, yC, ch).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 3 * ${dilationWidth}, ch)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pool_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ReduceProgram {\n constructor(reduceInfo, reduceType) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n let initializationValue = '0.0';\n let compareOp = ``;\n if (reduceType === 'prod') {\n initializationValue = '1.0';\n }\n else if (reduceType === 'min') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '1.0 / 1e-20';\n compareOp = `min`;\n }\n else if (reduceType === 'max') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n compareOp = `max`;\n }\n let returnValue = `${reduceType}(${reduceType}(${reduceType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (reduceType === 'sum') {\n returnValue = `sumValue`;\n }\n else if (reduceType === 'prod') {\n returnValue = `prodValue`;\n }\n else if (reduceType === 'all') {\n returnValue = `allValue`;\n }\n else if (reduceType === 'any') {\n returnValue = `anyValue`;\n }\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `\n if (${reduceType === 'sum'}) {\n sumValue += dot(values, ones);\n } else if (${reduceType === 'prod'}) {\n vec2 tmp = vec2(values[0], values[1]) * vec2(values[2], values[3]);\n prodValue *= tmp[0] * tmp[1];\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n let vecType = `vec4`;\n if (reduceType === 'all') {\n initializationValue = '1.0';\n updateSnippet = `\n bool reducedAllValue = all(values);\n float floatedReducedAllValue = float(reducedAllValue);\n allValue = float(allValue >= 1.0 && floatedReducedAllValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n else if (reduceType === 'any') {\n initializationValue = '0.0';\n updateSnippet = `\n bool reducedAnyValue = any(values);\n float floatedReducedAnyValue = float(reducedAnyValue);\n anyValue = float(anyValue >= 1.0 || floatedReducedAnyValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n vec4 minMaxValue = vec4(${initializationValue});\n float prodValue = 1.0;\n float sumValue = 0.0;\n float allValue = 1.0;\n float anyValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=reduce_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as shader_util from './shader_compiler_util';\nexport class ReshapePackedProgram {\n constructor(outputShape, inputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n let mainLoop = ``;\n for (let i = 0; i < 4; i++) {\n let thisRC = `thisRC = rc;`;\n if (i % 2 === 1) {\n thisRC += `thisRC.z += 1;`;\n }\n if (i > 1) {\n thisRC += `thisRC.y += 1;`;\n }\n mainLoop += `\n ${thisRC}\n ${i > 0 ? `if(thisRC.y < rows && thisRC.z < cols){` : ''}\n int flatIndex = getFlatIndex(thisRC);\n\n ivec3 inputRC = inputCoordsFromReshapedOutCoords(flatIndex);\n vec2 inputRCInnerDims = vec2(float(inputRC.y),float(inputRC.z));\n\n result[${i}] =\n getChannel(getA(inputRC.x, inputRC.y, inputRC.z), inputRCInnerDims);\n ${i > 0 ? '}' : ''}\n `;\n }\n this.userCode = `\n ${getReshapedInputCoords(inputShape)}\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 rc = getOutputCoords();\n\n vec4 result = vec4(0.);\n\n ivec3 thisRC;\n int rows = ${outputShape[1]};\n int cols = ${outputShape[2]};\n\n ${mainLoop}\n\n setOutput(result);\n }\n `;\n }\n}\nfunction getReshapedInputCoords(shape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 inputCoordsFromReshapedOutCoords(int index) {\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\n//# sourceMappingURL=reshape_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(startRLerp - float(winHeight / 2));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(startCLerp - float(winWidth / 2));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float dxR = float(dyR) * heightScale;\n int topDxRIndex = int(floor(dxR));\n int bottomDxRIndex = int(min(ceil(dxR), ${xHeight - 1}.0));\n float dxRLerp = dxR - float(topDxRIndex);\n float inverseDxRLerp = 1.0 - dxRLerp;\n\n float dxC = float(dyC) * widthScale;\n int leftDxCIndex = int(floor(dxC));\n int rightDxCIndex = int(min(ceil(dxC), ${xWidth - 1}.0));\n float dxCLerp = dxC - float(leftDxCIndex);\n float inverseDxCLerp = 1.0 - dxCLerp;\n\n if (r == topDxRIndex && c == leftDxCIndex) {\n // topLeft\n accumulator +=\n getDy(b, dyR, dyC, d) * inverseDxRLerp * inverseDxCLerp;\n }\n\n if (r == topDxRIndex && c == rightDxCIndex) {\n // topRight\n accumulator += getDy(b, dyR, dyC, d) * inverseDxRLerp * dxCLerp;\n }\n\n if (r == bottomDxRIndex && c == leftDxCIndex) {\n // bottomLeft\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * inverseDxCLerp;\n }\n\n if (r == bottomDxRIndex && c == rightDxCIndex) {\n // bottomRight\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * dxCLerp;\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec2 sourceFloorRC = ivec2(sourceFracIndexRC);\n ivec2 sourceCeilRC = ivec2(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n float topLeft = getA(b, sourceFloorRC.x, sourceFloorRC.y, d);\n float bottomLeft = getA(b, sourceCeilRC.x, sourceFloorRC.y, d);\n float topRight = getA(b, sourceFloorRC.x, sourceCeilRC.y, d);\n float bottomRight = getA(b, sourceCeilRC.x, sourceCeilRC.y, d);\n\n vec2 fracRC = sourceFracIndexRC - vec2(sourceFloorRC);\n\n float top = topLeft + (topRight - topLeft) * fracRC.y;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracRC.y;\n float newValue = top + (bottom - top) * fracRC.x;\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearPackedProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec3 effectiveInputOverOutputRatioRC = vec3(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec3 inputShapeRC = vec3(${oldHeight}.0, ${oldWidth}.0,\n ${oldWidth}.0);\n\n float getAValue(int b, int r, int c, int d) {\n return getChannel(getA(b, r, c, d), vec2(c, d));\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n // Calculate values for next column in yRC.z.\n ivec3 yRC = coords.yzz + ivec3(0, 0, 1);\n\n // Fractional source index.\n vec3 sourceFracIndexRC = vec3(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec3 sourceFloorRC = ivec3(sourceFracIndexRC);\n ivec3 sourceCeilRC = ivec3(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n // Should we calculate next column and row elements in 2x2 packed cell.\n bool hasNextCol = d < ${depth - 1};\n bool hasNextRow = coords.z < ${newWidth - 1};\n\n // In parallel, construct four corners for all four components in\n // packed 2x2 cell.\n vec4 topLeft = vec4(\n getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 bottomLeft = vec4(\n getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 topRight = vec4(\n getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec4 bottomRight = vec4(\n getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec3 fracRC = sourceFracIndexRC - vec3(sourceFloorRC);\n\n vec4 top = mix(topLeft, topRight, fracRC.yyzz);\n vec4 bottom = mix(bottomLeft, bottomRight, fracRC.yyzz);\n vec4 newValue = mix(top, bottom, fracRC.x);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeigborBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(floor(startRLerp - float(winHeight / 2)));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(floor(startCLerp - float(winWidth / 2)));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float sourceFracRow =\n float(${effectiveXSize[0]}) *\n (float(dyR) / float(${effectiveYSize[0]}));\n\n float sourceFracCol =\n float(${effectiveXSize[1]}) *\n (float(dyC) / float(${effectiveYSize[1]}));\n\n int sourceNearestRow = int(min(\n float(int(${xHeight}) - 1),\n ${alignCorners} ? float(round(sourceFracRow)) :\n float(floor(sourceFracRow))));\n\n int sourceNearestCol = int(min(\n float(int(${xWidth}) - 1),\n ${alignCorners} ? float(round(sourceFracCol)) :\n float(floor(sourceFracCol))));\n\n if (r == sourceNearestRow && c == sourceNearestCol) {\n accumulator += getDy(b, dyR, dyC, d);\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeighborProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n // When align corners is false, we rounds the value with floor.\n const roundBase = alignCorners ? '0.5' : '0.0';\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestRC = ivec2(\n min(inputShapeRC - 1.0, floor(sourceFracIndexRC + ${roundBase})));\n\n float newValue = getA(b, sourceNearestRC.x, sourceNearestRC.y, d);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReverseProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n if (rank === 1) {\n this.userCode = `\n void main() {\n int coord = getOutputCoords();\n setOutput(getX(${xShape[0]} - coord - 1));\n }\n `;\n return;\n }\n const getInCoord = (i) => {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - coords[${i}] - 1`;\n }\n return `coords[${i}]`;\n };\n const inCoords = xShape.map((_, i) => getInCoord(i)).join(',');\n const type = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${type} coords = getOutputCoords();\n setOutput(getX(${inCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=reverse_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReversePackedProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n const channels = getChannels('rc', rank);\n const nextColumn = `${channels[rank - 1]} + 1 < ${this.outputShape[rank - 1]}`;\n const nextRow = `${channels[rank - 2]} + 1 < ${this.outputShape[rank - 2]}`;\n const type = getCoordsDataType(rank);\n if (rank === 1) {\n this.userCode = `\n void main(){\n int rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = getChannel(getX(${xShape[0]} - rc - 1),\n ${xShape[0]} - rc - 1);\n if(${nextColumn}){\n result.g = getChannel(getX(${xShape[0]} - (rc + 1) - 1),\n ${xShape[0]} - (rc + 1) - 1);\n }\n setOutput(result);\n }\n `;\n }\n else {\n this.userCode = `\n void main() {\n ${type} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = ${getR(channels.slice())};\n if(${nextColumn}){\n result.g = ${getG(channels.slice())};\n }\n if(${nextRow}) {\n result.b = ${getB(channels.slice())};\n if(${nextColumn}) {\n result.a = ${getA(channels.slice())};\n }\n }\n setOutput(result);\n }\n `;\n }\n function getR(channels) {\n return getChannel(channels);\n }\n function getG(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n return getChannel(channels);\n }\n function getB(channels) {\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getA(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getChannel(channels) {\n const inCoordsArray = xShape.map((_, i) => getInCoord(i, channels));\n const inCoords = inCoordsArray.join(',');\n const innerDims = inCoordsArray.slice(-2).join(',');\n return `getChannel(getX(${inCoords}), vec2(${innerDims}))`;\n }\n function getInCoord(i, channels1) {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - ${channels1[i]} - 1`;\n }\n else {\n return `${channels1[i]}`;\n }\n }\n }\n}\n//# sourceMappingURL=reverse_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ScatterProgram {\n constructor(updateSize, sliceDim, indicesRank, updatesRank, strides, shape, summingDupeIndex = true) {\n this.variableNames = ['updates', 'indices', 'defaultValue'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n let indicesString = '';\n if (indicesRank === 1) {\n indicesString = 'i';\n }\n else if (indicesRank === 2) {\n indicesString = 'i, j';\n }\n const indicesSnippet = `getIndices(${indicesString})`;\n let updatesString = '';\n if (updatesRank === 1) {\n updatesString = 'i';\n }\n else if (updatesRank === 2) {\n updatesString = 'i, coords[1]';\n }\n const updatesSnippet = `getUpdates(${updatesString})`;\n const strideString = sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n float sum = 0.0;\n bool found = false;\n for (int i = 0; i < ${updateSize}; i++) {\n int flattenedIndex = 0;\n for (int j = 0; j < ${sliceDim}; j++) {\n int index = round(${indicesSnippet});\n flattenedIndex += index * ${strideString};\n }\n if (flattenedIndex == coords[0]) {\n sum += ${updatesSnippet};\n found = true;\n }\n }\n setOutput(mix(getDefaultValue(), sum, float(found)));\n }\n `;\n }\n}\n//# sourceMappingURL=scatter_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class SegmentOpProgram {\n constructor(segOpInfo, segOpType) {\n this.variableNames = ['x', 'segmentIds'];\n const windowSize = segOpInfo.windowSize;\n const batchSize = segOpInfo.batchSize;\n const inSize = segOpInfo.inSize;\n const numSegments = segOpInfo.numSegments;\n const outSize = numSegments * Math.ceil(inSize / windowSize);\n this.outputShape = [batchSize, outSize];\n const initializationValue = '0.0';\n const returnValue = `sumValue`;\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n const updateSnippet = `\n sumValue += dot(values, segFilter);\n `;\n let checkValueOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkValueOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n let checkSegmentIdOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkSegmentIdOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return -1.0;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n\n float getValue(int batch, int inIdx) {\n ${checkValueOutOfBounds}\n return getX(batch, inIdx);\n }\n\n float getSegmentIdAtIndex(int inIdx) {\n ${checkSegmentIdOutOfBounds}\n return getSegmentIds(inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = int(floor(float(outIdx) / float(\n ${numSegments})) * float(${windowSize}));\n int currentSeg = int(mod(float(outIdx), float(${numSegments})));\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 3)) == currentSeg ? 1 : 0\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n int inIdxSeg = int(getSegmentIdAtIndex(inIdx));\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n 0\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=segment_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SelectProgram {\n constructor(cRank, shape, rank) {\n this.variableNames = ['c', 'a', 'b'];\n this.outputShape = shape;\n let cCoords;\n let abCoords;\n if (rank > 4) {\n throw Error(`Where for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n abCoords = `resRC`;\n cCoords = `resRC`;\n }\n else {\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const cCoordVars = [];\n const abCoordVars = [];\n for (let i = 0; i < shape.length; i++) {\n abCoordVars.push(`${currentCoords[i]}`);\n if (i < cRank) {\n cCoordVars.push(`${currentCoords[i]}`);\n }\n }\n cCoords = cCoordVars.join();\n abCoords = abCoordVars.join();\n }\n const dtype = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n float cVal = getC(${cCoords});\n if (cVal >= 1.0) {\n setOutput(getA(${abCoords}));\n } else {\n setOutput(getB(${abCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=select_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SliceProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const uniformPart = `uniform int start[${this.rank}];`;\n const sourceCoords = getCoords(this.rank);\n let body;\n const coordSum = destSize.map((_, i) => {\n return `sourceLoc.${coords[i]} = start[${i}] + coords.${coords[i]};`;\n });\n body = `\n ${dtype} sourceLoc;\n ${dtype} coords = getOutputCoords();\n ${coordSum.join('\\n')}\n `;\n this.userCode = `\n ${uniformPart}\n void main() {\n ${body}\n setOutput(getSource(${sourceCoords}));\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\nconst coords = ['x', 'y', 'z', 'w', 'u', 'v'];\nfunction getCoords(rank) {\n if (rank === 1) {\n return 'sourceLoc';\n }\n else if (rank <= 6) {\n return coords.slice(0, rank).map(x => 'sourceLoc.' + x).join(',');\n }\n else {\n throw Error(`Slicing for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=slice_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class SlicePackedProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const coords = getChannels('coords', this.rank);\n const sourceLoc = getChannels('sourceLoc', this.rank);\n const innerDims = this.rank === 1 ? 'sourceLoc' : `vec2(${sourceLoc.slice(-2).join()})`;\n const getChannel = `getChannel(getSource(${sourceLoc.join()}), ${innerDims})`;\n const upperRow = `\n result.x = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.y = ${getChannel};\n --${sourceLoc[this.rank - 1]};\n }\n `;\n const lowerRow = this.rank === 1 ? '' : `\n --${coords[this.rank - 1]};\n if (++${coords[this.rank - 2]} < ${destSize[this.rank - 2]}) {\n ++${sourceLoc[this.rank - 2]};\n result.z = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.w = ${getChannel};\n }\n }\n `;\n const sourceLocSetup = this.rank <= 4 ?\n `sourceLoc = coords +\n ${dtype}(${destSize.map((_, i) => `start[${i}]`).join()});` :\n destSize.map((_, i) => `${sourceLoc[i]} = ${coords[i]} + start[${i}];`)\n .join('\\n');\n this.userCode = `\n uniform int start[${this.rank}];\n void main() {\n ${dtype} coords = getOutputCoords();\n ${dtype} sourceLoc;\n ${sourceLocSetup}\n vec4 result = vec4(0.);\n ${upperRow}\n ${lowerRow}\n setOutput(result);\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\n//# sourceMappingURL=slice_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class StridedSliceProgram {\n constructor(begin, strides, size) {\n this.variableNames = ['x'];\n this.outputShape = size;\n const rank = size.length;\n const inputDtype = getCoordsDataType(size.length);\n const dtype = getCoordsDataType(size.length);\n let newCoords = '';\n if (rank === 1) {\n newCoords = 'coords * strides + begin';\n }\n else {\n let outputAxis = 0;\n newCoords =\n size.map((_, i) => {\n outputAxis++;\n return size.length === 1 ?\n `coords * strides[${i}] + begin[${i}]` :\n `coords[${outputAxis - 1}] * strides[${i}] + begin[${i}]`;\n })\n .join(',');\n }\n this.userCode = `\n ${inputDtype} begin = ${inputDtype}(${begin});\n ${inputDtype} strides = ${inputDtype}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n setOutput(getX(${newCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=strided_slice_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { getInternalFormatForFloat16MatrixTexture, getInternalFormatForFloat16PackedMatrixTexture, getInternalFormatForFloat32MatrixTexture, getInternalFormatForPackedMatrixTexture, getInternalFormatForUnsignedBytesMatrixTexture } from './gpgpu_util';\nimport { getPackedMatrixTextureShapeWidthHeight, getUnpackedMatrixTextureShapeWidthHeight, PhysicalTextureType, TextureUsage } from './tex_util';\nexport class TextureManager {\n constructor(gpgpu) {\n this.gpgpu = gpgpu;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0; // How many bytes that have been allocated\n // are available for reuse.\n this.freeTextures = {};\n this.logEnabled = false;\n this.usedTextures = {};\n }\n acquireTexture(shapeRC, usage, isPacked) {\n const physicalTexType = getPhysicalFromLogicalTextureType(usage, isPacked);\n const shapeKey = getKeyFromTextureShape(shapeRC, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n if (!(shapeKey in this.usedTextures)) {\n this.usedTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shapeRC, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n if (this.freeTextures[shapeKey].length > 0) {\n this.numFreeTextures--;\n this.numUsedTextures++;\n this._numBytesFree -= texBytes;\n this.log();\n const newTexture = this.freeTextures[shapeKey].shift();\n this.usedTextures[shapeKey].push(newTexture);\n return newTexture;\n }\n let newTexture;\n if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT32) {\n newTexture = this.gpgpu.createPackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16PackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT32) {\n newTexture =\n this.gpgpu.createFloat32MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE) {\n newTexture =\n this.gpgpu.createUnsignedBytesMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n this.usedTextures[shapeKey].push(newTexture);\n this.numUsedTextures++;\n this._numBytesAllocated += texBytes;\n this.log();\n return newTexture;\n }\n releaseTexture(texture, shape, logicalTexType, isPacked) {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n const physicalTexType = getPhysicalFromLogicalTextureType(logicalTexType, isPacked);\n const shapeKey = getKeyFromTextureShape(shape, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shape, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n const deleteTexThreshold = env().get('WEBGL_DELETE_TEXTURE_THRESHOLD');\n if (deleteTexThreshold !== -1 &&\n this._numBytesAllocated > deleteTexThreshold) {\n this.gpgpu.deleteMatrixTexture(texture);\n this._numBytesAllocated -= texBytes;\n }\n else {\n this.freeTextures[shapeKey].push(texture);\n this.numFreeTextures++;\n this._numBytesFree += texBytes;\n }\n this.numUsedTextures--;\n const texList = this.usedTextures[shapeKey];\n const texIndex = texList.indexOf(texture);\n if (texIndex < 0) {\n throw new Error('Cannot release a texture that was never provided by this ' +\n 'texture manager');\n }\n texList.splice(texIndex, 1);\n this.log();\n }\n log() {\n if (!this.logEnabled) {\n return;\n }\n const total = this.numFreeTextures + this.numUsedTextures;\n console.log('Free/Used', `${this.numFreeTextures} / ${this.numUsedTextures}`, `(${total})`);\n const freeRatio = this._numBytesFree / this._numBytesAllocated;\n console.log(`Bytes allocated: ${this._numBytesAllocated}`);\n console.log(`Bytes unused: ${this._numBytesFree} (${Math.round(100 * freeRatio)}%)`);\n }\n get numBytesAllocated() {\n return this._numBytesAllocated;\n }\n get numBytesFree() {\n return this._numBytesFree;\n }\n getNumUsedTextures() {\n return this.numUsedTextures;\n }\n getNumFreeTextures() {\n return this.numFreeTextures;\n }\n dispose() {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n for (const texShape in this.freeTextures) {\n this.freeTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n for (const texShape in this.usedTextures) {\n this.usedTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n this.freeTextures = null;\n this.usedTextures = null;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0;\n }\n}\nfunction numBytesForInternalFormat(gl, internalFormat) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n if (internalFormat === glany.R32F) {\n return 4;\n }\n else if (internalFormat === glany.R16F) {\n return 2;\n }\n else if (internalFormat === glany.RGBA32F) {\n return 16;\n }\n else if (internalFormat === gl.RGBA) {\n return 16;\n }\n else if (internalFormat === glany.RGBA16F) {\n return 8;\n }\n throw new Error(`Unknown internal format ${internalFormat}`);\n}\nexport function computeBytes(shape, physicalTexType, gl, textureConfig, isPacked) {\n // It is not possible to infer packed status from the texture type because\n // depending on the textureConfig, different texture types may resolve to the\n // same internal format (e.g. in WebGL1, the internal format for\n // UNPACKED_FLOAT16 textures is gl.RGBA). Therefore we pass in `isPacked`\n // explicitly.\n const internalFormat = internalFormatForPhysicalTexType(physicalTexType, textureConfig);\n let numElements;\n if (isPacked) {\n const [packedWidth, packedHeight] = getPackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = packedWidth * packedHeight;\n }\n else {\n const [width, height] = getUnpackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = width * height;\n }\n const bytesPerElement = numBytesForInternalFormat(gl, internalFormat);\n return numElements * bytesPerElement;\n}\nfunction internalFormatForPhysicalTexType(physicalTexType, textureConfig) {\n switch (physicalTexType) {\n case PhysicalTextureType.PACKED_2X2_FLOAT32:\n return getInternalFormatForPackedMatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_2X2_FLOAT16:\n return getInternalFormatForFloat16PackedMatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT32:\n return getInternalFormatForFloat32MatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT16:\n return getInternalFormatForFloat16MatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE:\n return getInternalFormatForUnsignedBytesMatrixTexture(textureConfig);\n default:\n throw new Error(`Unknown physical texture type ${physicalTexType}`);\n }\n}\nfunction getPhysicalTextureForRendering(isPacked) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED')) {\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n return PhysicalTextureType.UNPACKED_FLOAT32;\n }\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT16;\n }\n return PhysicalTextureType.UNPACKED_FLOAT16;\n}\nfunction getPhysicalFromLogicalTextureType(logicalTexType, isPacked) {\n if (logicalTexType === TextureUsage.UPLOAD) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n else if (logicalTexType === TextureUsage.RENDER || logicalTexType == null) {\n return getPhysicalTextureForRendering(isPacked);\n }\n else if (logicalTexType === TextureUsage.DOWNLOAD ||\n logicalTexType === TextureUsage.PIXELS) {\n return PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE;\n }\n throw new Error(`Unknown logical texture type ${logicalTexType}`);\n}\nfunction getKeyFromTextureShape(shapeRowsCol, physicalTexType, isPacked) {\n return `${shapeRowsCol[0]}_${shapeRowsCol[1]}_${physicalTexType}_${isPacked}`;\n}\n//# sourceMappingURL=texture_manager.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TileProgram {\n constructor(aShape, reps) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[i] * reps[i];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape) {\n const rank = aShape.length;\n if (rank > 5) {\n throw Error(`Tile for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `imod(resRC, ${aShape[0]})`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n sourceCoords.push(`imod(${currentCoords[i]}, ${aShape[i]})`);\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=tile_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class UnaryOpProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n float unaryOperation(float x) {\n ${opSnippet}\n }\n\n void main() {\n float x = getAAtOutCoords();\n float y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\nconst CHECK_NAN_SNIPPET = `if (isnan(x)) return x;`;\nexport const LINEAR = `return x;`;\nexport const ABS = `return abs(x);`;\nexport const RELU = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : x;\n`;\nexport const RELU6 = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : min(6.0, x);\n`;\nexport const ELU = `return (x >= 0.0) ? x : (exp(x) - 1.0);`;\nexport const SELU = `\n // Stable and Attracting Fixed Point (0, 1) for Normalized Weights.\n // see: https://arxiv.org/abs/1706.02515\n float scaleAlpha = ${backend_util.SELU_SCALEALPHA};\n float scale = ${backend_util.SELU_SCALE};\n return (x >= 0.0) ? scale * x : scaleAlpha * (exp(x) - 1.0);\n`;\nexport function STEP(alpha = 0.0) {\n return CHECK_NAN_SNIPPET + `\n return x > 0.0 ? 1.0 : float(${alpha});\n `;\n}\nexport const NEG = `return -x;`;\nexport const CEIL = `return ceil(x);`;\nexport const FLOOR = `return floor(x);`;\nexport const SIGN = `\n if (isnan(x)) { return 0.0; }\n return sign(x);\n`;\nexport const IS_NAN = `return float(isnan(x));`;\nexport const IS_INF = `return float(isinf(x));`;\nexport const IS_FINITE = `return float(!isnan(x) && !isinf(x));`;\nexport const ROUND = `\n // OpenGL ES does not support round function.\n // The algorithm is based on banker's rounding.\n float base = floor(x);\n if ((x - base) < 0.5) {\n return floor(x);\n } else if ((x - base) > 0.5) {\n return ceil(x);\n } else {\n if (mod(base, 2.0) == 0.0) {\n return base;\n } else {\n return base + 1.0;\n }\n }\n`;\nexport const EXP = `return exp(x);`;\nexport const EXPM1 = `return exp(x) - 1.0;`;\nexport const LOG = `if (x < 0.0) return NAN;\n return log(x);`;\nexport const LOG1P = `return log(1.0 + x);`;\nexport const SQRT = `return sqrt(x);`;\nexport const RSQRT = `return inversesqrt(x);`;\nexport const SIGMOID = `return 1.0 / (1.0 + exp(-1.0 * x));`;\n/**\n * mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n *\n * epsilon is the difference between 1.0 and the next representable\n * float. For a single precision 32 bit float this should be 2^-23, see:\n * https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\n *\n * too_large = (x > -threshold) is value above which exp(x) may overflow\n * but softplus(x) == x is within machine epsilon\n *\n * too_small = (x < threshold) is value below which exp(x) may underflow,\n * but softplus(x) == exp(x) is within machine epsilon.\n */\nexport const SOFTPLUS = `\n float epsilon = 1.1920928955078125e-7;\n float threshold = log(epsilon) + 2.0;\n\n bool too_large = x > -threshold;\n bool too_small = x < threshold;\n\n float result;\n float exp_x = exp(x);\n\n if (too_large){\n result = x;\n }\n else if (too_small){\n result = exp_x;\n }\n else{\n result = log(exp_x + 1.0);\n }\n return result;\n`;\nexport const ASIN = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return asin(x);\n`;\nexport const ACOS = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return acos(x);\n`;\nexport const ATAN = CHECK_NAN_SNIPPET + `\n return atan(x);\n`;\nexport const SINH = `\n float e2x = exp(x);\n return (e2x - 1.0 / e2x) / 2.0;\n`;\nexport const COSH = `\n float e2x = exp(-x);\n return (e2x + 1.0 / e2x) / 2.0;\n`;\nexport const TANH = `\n float e2x = exp(-2.0 * abs(x));\n return sign(x) * (1.0 - e2x) / (1.0 + e2x);\n`;\nexport const ASINH = CHECK_NAN_SNIPPET + `return log(x + sqrt(x * x + 1.0));`;\nexport const ACOSH = CHECK_NAN_SNIPPET + `\n if (x < 1.0) return NAN;\n return log(x + sqrt(x * x - 1.0));`;\nexport const ATANH = CHECK_NAN_SNIPPET + `\n if ((x < -1.0) || (x > 1.0)) return NAN;\n return (log(1.0 + x) - log(1.0 - x)) / 2.0;`;\nexport const ERF = `\n // Error function is calculated approximately with elementary function.\n // See \"Handbook of Mathematical Functions with Formulas,\n // Graphs, and Mathematical Tables\", Abramowitz and Stegun.\n float p = ${backend_util.ERF_P};\n float a1 = ${backend_util.ERF_A1};\n float a2 = ${backend_util.ERF_A2};\n float a3 = ${backend_util.ERF_A3};\n float a4 = ${backend_util.ERF_A4};\n float a5 = ${backend_util.ERF_A5};\n\n float sign = sign(x);\n x = abs(x);\n float t = 1.0 / (1.0 + p * x);\n return sign * (1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*exp(-x*x));\n`;\nexport const RECIPROCAL = `return 1.0 / x;`;\nexport const LOGICAL_NOT = `return float(!(x >= 1.0));`;\nexport const CLONE = 'return x;';\n//# sourceMappingURL=unaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const LINEAR = `return x;`;\nexport const LOG = `\n vec4 result = log(x);\n vec4 isNaN = vec4(lessThan(x, vec4(0.0)));\n result.r = isNaN.r == 1.0 ? NAN : result.r;\n result.g = isNaN.g == 1.0 ? NAN : result.g;\n result.b = isNaN.b == 1.0 ? NAN : result.b;\n result.a = isNaN.a == 1.0 ? NAN : result.a;\n\n return result;\n`;\nexport const RELU = `\n vec4 result = x * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const RELU6 = `\n vec4 result = min(x, vec4(6.)) * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const ELU = `\n vec4 result;\n\n result.r = (x.r >= 0.0) ? x.r : (exp(x.r) - 1.0);\n result.g = (x.g >= 0.0) ? x.g : (exp(x.g) - 1.0);\n result.b = (x.b >= 0.0) ? x.b : (exp(x.b) - 1.0);\n result.a = (x.a >= 0.0) ? x.a : (exp(x.a) - 1.0);\n\n return result;\n`;\nexport class UnaryOpPackedProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n vec4 unaryOperation(vec4 x) {\n ${opSnippet}\n }\n\n void main() {\n vec4 x = getAAtOutCoords();\n vec4 y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\n//# sourceMappingURL=unaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels, getSourceCoords } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class UnpackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outputShape = outputShape;\n const rank = outputShape.length;\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const sourceCoords = getSourceCoords(rank, channels);\n const innerDims = channels.slice(-2);\n const coords = rank <= 1 ? 'rc' : `vec2(${innerDims.join(',')})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 packedInput = getA(${sourceCoords});\n\n setOutput(getChannel(packedInput, ${coords}));\n }\n `;\n }\n}\n//# sourceMappingURL=unpack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import webgl flags.\nimport './flags_webgl';\nimport * as tf from '@tensorflow/tfjs-core';\nimport { div, engine, env, max, range, reshape, scalar, softmax, tensor, tidy, transpose } from '@tensorflow/tfjs-core';\nimport { backend_util, buffer, kernel_impls, slice_util, util } from '@tensorflow/tfjs-core';\nimport { DataStorage, KernelBackend, upcastType } from '@tensorflow/tfjs-core';\nimport { ceilImplCPU, expImplCPU, expm1ImplCPU, floorImplCPU, logImplCPU, rsqrtImplCPU, simpleAbsImplCPU, sliceImplCPU } from './kernel_utils/shared';\nconst { segment_util } = backend_util;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport { AddNProgram } from './addn_gpu';\nimport { AddNPackedProgram } from './addn_packed_gpu';\nimport { ArgMinMaxProgram } from './argminmax_gpu';\nimport { ArgMinMaxPackedProgram } from './argminmax_packed_gpu';\nimport { AvgPool3DBackpropProgram } from './avg_pool_backprop_gpu';\nimport * as binaryop_gpu from './binaryop_gpu';\nimport { BinaryOpProgram } from './binaryop_gpu';\nimport * as binaryop_packed_gpu from './binaryop_packed_gpu';\nimport { BinaryOpPackedProgram } from './binaryop_packed_gpu';\nimport { getWebGLContext } from './canvas_util';\nimport { ClipProgram } from './clip_gpu';\nimport { ClipPackedProgram } from './clip_packed_gpu';\nimport { ComplexAbsProgram } from './complex_abs_gpu';\nimport { Conv2DDerFilterProgram, Conv2DDerInputProgram, Conv3DDerFilterProgram, Conv3DDerInputProgram } from './conv_backprop_gpu';\nimport { DepthwiseConv2DDerFilterProgram, DepthwiseConv2DDerInputProgram } from './conv_backprop_gpu_depthwise';\nimport { Conv2DProgram, Conv3DProgram } from './conv_gpu';\nimport { DepthwiseConv2DProgram } from './conv_gpu_depthwise';\nimport { DepthwiseConvPacked2DProgram } from './conv_packed_gpu_depthwise';\nimport { CropAndResizeProgram } from './crop_and_resize_gpu';\nimport { CumSumProgram } from './cumsum_gpu';\nimport { DecodeMatrixProgram } from './decode_matrix_gpu';\nimport { DecodeMatrixPackedProgram } from './decode_matrix_packed_gpu';\nimport { DepthToSpaceProgram } from './depth_to_space_gpu';\nimport { DiagProgram } from './diag_gpu';\nimport { EncodeFloatProgram } from './encode_float_gpu';\nimport { EncodeFloatPackedProgram } from './encode_float_packed_gpu';\nimport { EncodeMatrixProgram } from './encode_matrix_gpu';\nimport { EncodeMatrixPackedProgram } from './encode_matrix_packed_gpu';\nimport { FillProgram } from './fill_gpu';\nimport { GatherProgram } from './gather_gpu';\nimport { GatherNDProgram } from './gather_nd_gpu';\nimport { GPGPUContext } from './gpgpu_context';\nimport * as gpgpu_math from './gpgpu_math';\nimport { Im2ColPackedProgram } from './im2col_packed_gpu';\nimport { LRNProgram } from './lrn_gpu';\nimport { LRNGradProgram } from './lrn_grad_gpu';\nimport { LRNPackedProgram } from './lrn_packed_gpu';\nimport { MaxPool3DBackpropProgram } from './max_pool_backprop_gpu';\nimport { MatMulPackedProgram } from './mulmat_packed_gpu';\nimport { MultinomialProgram } from './multinomial_gpu';\nimport { OneHotProgram } from './onehot_gpu';\nimport { PackProgram } from './pack_gpu';\nimport { PadProgram } from './pad_gpu';\nimport { PadPackedProgram } from './pad_packed_gpu';\nimport { Pool3DProgram } from './pool_gpu';\nimport { ReduceProgram } from './reduce_gpu';\nimport { ReshapePackedProgram } from './reshape_packed_gpu';\nimport { ResizeBilinearBackpropProgram } from './resize_bilinear_backprop_gpu';\nimport { ResizeBilinearProgram } from './resize_bilinear_gpu';\nimport { ResizeBilinearPackedProgram } from './resize_bilinear_packed_gpu';\nimport { ResizeNearestNeigborBackpropProgram } from './resize_nearest_neighbor_backprop_gpu';\nimport { ResizeNearestNeighborProgram } from './resize_nearest_neighbor_gpu';\nimport { ReverseProgram } from './reverse_gpu';\nimport { ReversePackedProgram } from './reverse_packed_gpu';\nimport { ScatterProgram } from './scatter_gpu';\nimport { SegmentOpProgram } from './segment_gpu';\nimport { SelectProgram } from './select_gpu';\nimport { SliceProgram } from './slice_gpu';\nimport { SlicePackedProgram } from './slice_packed_gpu';\nimport { StridedSliceProgram } from './strided_slice_gpu';\nimport * as tex_util from './tex_util';\nimport { TextureUsage } from './tex_util';\nimport { TextureManager } from './texture_manager';\nimport { TileProgram } from './tile_gpu';\nimport * as unary_op from './unaryop_gpu';\nimport { UnaryOpProgram } from './unaryop_gpu';\nimport * as unary_packed_op from './unaryop_packed_gpu';\nimport { UnaryOpPackedProgram } from './unaryop_packed_gpu';\nimport { UnpackProgram } from './unpack_gpu';\nimport * as webgl_util from './webgl_util';\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\nconst binaryCaches = {};\nexport function getBinaryCache(webGLVersion) {\n if (webGLVersion in binaryCaches) {\n return binaryCaches[webGLVersion];\n }\n binaryCaches[webGLVersion] = {};\n return binaryCaches[webGLVersion];\n}\nfunction mapActivationToShaderProgram(activation, packed = false) {\n if (activation === 'linear') {\n if (packed) {\n return unary_packed_op.LINEAR;\n }\n return unary_op.LINEAR;\n }\n else if (activation === 'relu') {\n if (packed) {\n return unary_packed_op.RELU;\n }\n return unary_op.RELU;\n }\n else if (activation === 'elu') {\n if (packed) {\n return unary_packed_op.ELU;\n }\n return unary_op.ELU;\n }\n else if (activation === 'relu6') {\n if (packed) {\n return unary_packed_op.RELU6;\n }\n return unary_op.RELU6;\n }\n else if (activation === 'prelu') {\n if (packed) {\n return binaryop_packed_gpu.PRELU;\n }\n return binaryop_gpu.PRELU;\n }\n throw new Error(`Activation ${activation} has not been implemented for the WebGL backend.`);\n}\n// Empirically determined constant used to determine size threshold for handing\n// off execution to the CPU.\nconst CPU_HANDOFF_SIZE_THRESHOLD = 128;\n// Empirically determined constant used to decide the number of MB on GPU\n// before we warn about high memory use. The MB are this constant * screen area\n// * dpi / 1024 / 1024.\nconst BEFORE_PAGING_CONSTANT = 600;\nfunction numMBBeforeWarning() {\n if (env().global.screen == null) {\n return 1024; // 1 GB.\n }\n return (env().global.screen.height * env().global.screen.width *\n window.devicePixelRatio) *\n BEFORE_PAGING_CONSTANT / 1024 / 1024;\n}\n// Empirically determined minimal shared dimension in matmul before we forward\n// to a.mul(b).sum() in order to take advantage of GPU parallelism. See\n// https://github.com/tensorflow/tfjs-core/pull/1379 for benchmarks.\nexport const MATMUL_SHARED_DIM_THRESHOLD = 1000;\nexport class MathBackendWebGL extends KernelBackend {\n constructor(gpgpu) {\n super();\n // Maps data ids that have a pending read operation, to list of subscribers.\n this.pendingRead = new WeakMap();\n // List of data ids that are scheduled for disposal, but are waiting on a\n // pending read operation.\n this.pendingDisposal = new WeakSet();\n // Used to count the number of 'shallow' sliced tensors that point to the\n // same data id.\n this.dataRefCount = new WeakMap();\n this.numBytesInGPU = 0;\n // Accumulated time spent (including blocking) in uploading data to webgl.\n this.uploadWaitMs = 0;\n // Accumulated time spent (including blocking in downloading data from webgl.\n this.downloadWaitMs = 0;\n this.warnedAboutMemory = false;\n this.warnedAboutCPUBackend = false;\n this.pendingDeletes = 0;\n this.disposed = false;\n if (!env().getBool('HAS_WEBGL')) {\n throw new Error('WebGL is not supported on this device');\n }\n if (gpgpu == null) {\n const gl = getWebGLContext(env().getNumber('WEBGL_VERSION'));\n this.binaryCache = getBinaryCache(env().getNumber('WEBGL_VERSION'));\n this.gpgpu = new GPGPUContext(gl);\n this.canvas = gl.canvas;\n this.gpgpuCreatedLocally = true;\n }\n else {\n this.gpgpu = gpgpu;\n this.binaryCache = {};\n this.gpgpuCreatedLocally = false;\n this.canvas = gpgpu.gl.canvas;\n }\n this.textureManager = new TextureManager(this.gpgpu);\n this.numMBBeforeWarning = numMBBeforeWarning();\n this.texData = new DataStorage(this, engine());\n }\n numDataIds() {\n return this.texData.numDataIds() +\n (this.cpuBackend ? this.cpuBackend.numDataIds() : 0) -\n this.pendingDeletes;\n }\n write(values, shape, dtype) {\n if (env().getBool('WEBGL_CHECK_NUMERICAL_PROBLEMS') ||\n env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64' && values != null) {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n const dataId = {};\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n return dataId;\n }\n /** Increase refCount of a `TextureData`. */\n incRef(dataId) {\n const texData = this.texData.get(dataId);\n texData.refCount++;\n }\n /** Decrease refCount of a `TextureData`. */\n decRef(dataId) {\n if (this.texData.has(dataId)) {\n const texData = this.texData.get(dataId);\n texData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n if (env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.texData.has(dataId)) {\n const textureData = this.texData.get(dataId);\n textureData.refCount--;\n if (textureData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n readSync(dataId) {\n const texData = this.texData.get(dataId);\n const { values, dtype, complexTensorInfos, slice, shape, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.readSync(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (dtype === 'string') {\n return values;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let result;\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n result = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else {\n result = this.getValuesFromTexture(dataId);\n }\n if (shouldTimeProgram) {\n this.downloadWaitMs += util.now() - start;\n }\n return this.convertAndCacheOnCPU(dataId, result);\n }\n async read(dataId) {\n if (this.pendingRead.has(dataId)) {\n const subscribers = this.pendingRead.get(dataId);\n return new Promise(resolve => subscribers.push(resolve));\n }\n const texData = this.texData.get(dataId);\n const { values, shape, slice, dtype, complexTensorInfos, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.read(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (!env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED') &&\n env().getNumber('WEBGL_VERSION') === 2) {\n throw new Error(`tensor.data() with WEBGL_DOWNLOAD_FLOAT_ENABLED=false and ` +\n `WEBGL_VERSION=2 not yet supported.`);\n }\n let buffer = null;\n let tmpDownloadTarget;\n if (dtype !== 'complex64' && env().get('WEBGL_BUFFER_SUPPORTED')) {\n // Possibly copy the texture into a buffer before inserting a fence.\n tmpDownloadTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpDownloadTarget.dataId);\n buffer = this.gpgpu.createBufferFromTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape));\n }\n this.pendingRead.set(dataId, []);\n if (dtype !== 'complex64') {\n // Create a fence and wait for it to resolve.\n await this.gpgpu.createAndWaitForFence();\n }\n // Download the values from the GPU.\n let vals;\n if (dtype === 'complex64') {\n const ps = await Promise.all([\n this.read(complexTensorInfos.real.dataId),\n this.read(complexTensorInfos.imag.dataId)\n ]);\n const realValues = ps[0];\n const imagValues = ps[1];\n vals = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else if (buffer == null) {\n vals = this.getValuesFromTexture(dataId);\n }\n else {\n const size = util.sizeFromShape(shape);\n vals = this.gpgpu.downloadFloat32MatrixFromBuffer(buffer, size);\n }\n if (tmpDownloadTarget != null) {\n this.disposeIntermediateTensorInfo(tmpDownloadTarget);\n }\n const dTypeVals = this.convertAndCacheOnCPU(dataId, vals);\n const subscribers = this.pendingRead.get(dataId);\n this.pendingRead.delete(dataId);\n // Notify all pending reads.\n subscribers.forEach(resolve => resolve(dTypeVals));\n if (this.pendingDisposal.has(dataId)) {\n this.pendingDisposal.delete(dataId);\n this.disposeData(dataId);\n this.pendingDeletes--;\n }\n return dTypeVals;\n }\n checkNumericalProblems(values) {\n if (values == null) {\n return;\n }\n for (let i = 0; i < values.length; i++) {\n const num = values[i];\n if (!webgl_util.canBeRepresented(num)) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_CAPABLE')) {\n throw Error(`The value ${num} cannot be represented with your ` +\n `current settings. Consider enabling float32 rendering: ` +\n `'tf.env().set('WEBGL_RENDER_FLOAT32_ENABLED', true);'`);\n }\n throw Error(`The value ${num} cannot be represented on this device.`);\n }\n }\n }\n getValuesFromTexture(dataId) {\n const { shape, dtype, isPacked } = this.texData.get(dataId);\n const size = util.sizeFromShape(shape);\n if (env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED')) {\n const tmpTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpTarget.dataId);\n const vals = this.gpgpu\n .downloadMatrixFromPackedTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape))\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(tmpTarget);\n return vals;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK') && isPacked === true;\n const outputShape = shouldUsePackedProgram ? webgl_util.getShapeAs3D(shape) : shape;\n const program = shouldUsePackedProgram ?\n new EncodeFloatPackedProgram(outputShape) :\n new EncodeFloatProgram(outputShape);\n const output = this.runWebGLProgram(program, [{ shape: outputShape, dtype, dataId }], 'float32');\n const tmpData = this.texData.get(output.dataId);\n const vals = this.gpgpu\n .downloadByteEncodedFloatMatrixFromOutputTexture(tmpData.texture, tmpData.texShape[0], tmpData.texShape[1])\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(output);\n return vals;\n }\n async time(f) {\n const oldActiveTimers = this.activeTimers;\n const newActiveTimers = [];\n let outerMostTime = false;\n if (this.programTimersStack == null) {\n this.programTimersStack = newActiveTimers;\n outerMostTime = true;\n }\n else {\n this.activeTimers.push(newActiveTimers);\n }\n this.activeTimers = newActiveTimers;\n f();\n // needing to split these up because util.flatten only accepts certain types\n const flattenedActiveTimerQueries = util.flatten(this.activeTimers.map((d) => d.query))\n .filter(d => d != null);\n const flattenedActiveTimerNames = util.flatten(this.activeTimers.map((d) => d.name))\n .filter(d => d != null);\n this.activeTimers = oldActiveTimers;\n if (outerMostTime) {\n this.programTimersStack = null;\n }\n const res = {\n uploadWaitMs: this.uploadWaitMs,\n downloadWaitMs: this.downloadWaitMs,\n kernelMs: null,\n wallMs: null // will be filled by the engine\n };\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n const kernelMs = await Promise.all(flattenedActiveTimerQueries);\n res['kernelMs'] = util.sum(kernelMs);\n res['getExtraProfileInfo'] = () => kernelMs.map((d, i) => ({ name: flattenedActiveTimerNames[i], ms: d }))\n .map(d => `${d.name}: ${d.ms}`)\n .join(', ');\n }\n else {\n res['kernelMs'] = {\n error: 'WebGL query timers are not supported in this environment.'\n };\n }\n this.uploadWaitMs = 0;\n this.downloadWaitMs = 0;\n return res;\n }\n memory() {\n return {\n unreliable: false,\n numBytesInGPU: this.numBytesInGPU,\n numBytesInGPUAllocated: this.textureManager.numBytesAllocated,\n numBytesInGPUFree: this.textureManager.numBytesFree\n };\n }\n startTimer() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.beginQuery();\n }\n return { startMs: util.now(), endMs: null };\n }\n endTimer(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n this.gpgpu.endQuery();\n return query;\n }\n query.endMs = util.now();\n return query;\n }\n async getQueryTime(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.waitForQueryAndGetTime(query);\n }\n const timerQuery = query;\n return timerQuery.endMs - timerQuery.startMs;\n }\n disposeData(dataId) {\n if (this.pendingDisposal.has(dataId)) {\n return;\n }\n if (this.pendingRead.has(dataId)) {\n this.pendingDisposal.add(dataId);\n this.pendingDeletes++;\n return;\n }\n // No-op if already disposed.\n if (!this.texData.has(dataId)) {\n return;\n }\n // Trying to dispose a textureData that has a 'kept' refCount, e.g. trying\n // to dispose a tensor whose data bucket is shared with a complex tensor. In\n // this case we are removing a reference to the textureData, but we\n // shouldn't actually dispose the texture.\n if (this.texData.get(dataId).complexParentRefCount > 0) {\n this.texData.get(dataId).refCount--;\n return;\n }\n this.releaseGPUData(dataId);\n const { complexTensorInfos } = this.texData.get(dataId);\n if (complexTensorInfos != null) {\n this.texData.get(complexTensorInfos.real.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.real);\n this.texData.get(complexTensorInfos.imag.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.imag);\n }\n this.texData.delete(dataId);\n }\n releaseGPUData(dataId) {\n const { texture, dtype, texShape, usage, isPacked, slice } = this.texData.get(dataId);\n const key = slice && slice.origDataId || dataId;\n const refCount = this.dataRefCount.get(key);\n if (refCount > 1) {\n this.dataRefCount.set(key, refCount - 1);\n }\n else {\n this.dataRefCount.delete(key);\n if (texture != null) {\n this.numBytesInGPU -= this.computeBytes(texShape, dtype);\n this.textureManager.releaseTexture(texture, texShape, usage, isPacked);\n }\n }\n const texData = this.texData.get(dataId);\n texData.texture = null;\n texData.texShape = null;\n texData.isPacked = false;\n texData.slice = null;\n }\n getTexture(dataId) {\n this.uploadToGPU(dataId);\n return this.texData.get(dataId).texture;\n }\n /**\n * Returns internal information for the specific data bucket. Used in unit\n * tests.\n */\n getDataInfo(dataId) {\n return this.texData.get(dataId);\n }\n getCPUBackend() {\n if (!env().getBool('WEBGL_CPU_FORWARD')) {\n return null;\n }\n if (this.cpuBackend == null) {\n this.cpuBackend = engine().findBackend('cpu');\n }\n return this.cpuBackend;\n }\n /*\n Tests whether all the inputs to an op are small and on the CPU. This heuristic\n determines when it would be faster to execute a kernel on the CPU. WebGL\n kernels opt into running this check and forwarding when appropriate.\n TODO(https://github.com/tensorflow/tfjs/issues/872): Develop a more\n sustainable strategy for optimizing backend execution of ops.\n */\n shouldExecuteOnCPU(inputs, sizeThreshold = CPU_HANDOFF_SIZE_THRESHOLD) {\n const cpuBackend = this.getCPUBackend();\n if (!this.warnedAboutCPUBackend && cpuBackend == null) {\n console.warn('Your application contains ops that are small enough to be ' +\n 'executed on the CPU backend, however the CPU backend cannot ' +\n 'be found. Consider importing the CPU backend ' +\n '(@tensorflow/tfjs-backend-cpu) for better performance.');\n this.warnedAboutCPUBackend = true;\n }\n return cpuBackend != null &&\n inputs.every(input => this.texData.get(input.dataId).texture == null &&\n util.sizeFromShape(input.shape) < sizeThreshold);\n }\n getGPGPUContext() {\n return this.gpgpu;\n }\n slice(x, begin, size) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = sliceImplCPU(this.texData.get(x.dataId).values, begin, size, x.shape, x.dtype);\n return this.makeOutput(size, x.dtype, outValues);\n }\n // Short-circuit computation if the slice is zero-sized.\n if (util.sizeFromShape(size) === 0) {\n return tensor([], size, x.dtype);\n }\n const { isPacked } = this.texData.get(x.dataId);\n const isContinous = slice_util.isSliceContinous(x.shape, begin, size);\n if (isPacked || !isContinous) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new SlicePackedProgram(size) :\n new SliceProgram(size);\n const customSetup = program.getCustomSetupFunc(begin);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n this.uploadToGPU(x.dataId);\n return this.shallowSlice(x, begin, size);\n }\n shallowSlice(x, begin, size) {\n const xTexData = this.texData.get(x.dataId);\n const t = this.makeOutput(size, x.dtype);\n const newTexData = this.texData.get(t.dataId);\n // Copy texture data from the original tensor.\n Object.assign(newTexData, xTexData);\n newTexData.shape = size;\n newTexData.dtype = x.dtype;\n let flatOffset = slice_util.computeFlatOffset(begin, x.strides);\n if (xTexData.slice) {\n // We are slicing an already sliced tensor, so we have to accumulate\n // the offset.\n flatOffset += xTexData.slice.flatOffset;\n }\n newTexData.slice = {\n flatOffset,\n // Point to the original dataId, which is used to do ref counting.\n origDataId: xTexData.slice && xTexData.slice.origDataId || x.dataId\n };\n // Increase the ref count for that data bucket.\n const refCount = this.dataRefCount.get(newTexData.slice.origDataId) || 1;\n this.dataRefCount.set(newTexData.slice.origDataId, refCount + 1);\n return t;\n }\n stridedSlice(x, begin, end, strides) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.stridedSlice(x, begin, end, strides));\n if (cpuRes) {\n return cpuRes;\n }\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tensor([], outShape);\n }\n const program = new StridedSliceProgram(begin, strides, outShape);\n return this.compileAndRun(program, [x]);\n }\n reverse(x, axis) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new ReversePackedProgram(x.shape, axis) :\n new ReverseProgram(x.shape, axis);\n return this.compileAndRun(program, [x]);\n }\n neg(x) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.neg(x));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.NEG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.NEG);\n return this.compileAndRun(program, [x]);\n }\n batchMatMul(a, b, transposeA, transposeB) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const sharedDim = transposeA ? a.shape[1] : a.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n // Since the matrices are vectors, it is faster to call mul().sum()\n // because sum() is O(sqrt(N)) due to divide-and-conquer.\n if ((outerShapeA === 1 || outerShapeB === 1) &&\n sharedDim > MATMUL_SHARED_DIM_THRESHOLD) {\n if (transposeA) {\n a = transpose(a, [0, 2, 1]);\n }\n if (transposeB) {\n b = transpose(b, [0, 2, 1]);\n }\n const a3D = outerShapeB === 1 ? a : a.as3D(batch, sharedDim, 1);\n const axis = outerShapeB === 1 ? 2 : 1;\n const b3D = outerShapeB === 1 ? b.as3D(batch, 1, sharedDim) : b;\n // TODO(annxingyuan): Call multiply directly as part of batchMatMul\n // modularization.\n const product = tf.mul(a3D, b3D);\n return product.sum(axis, true /* keepDims */);\n }\n const dtype = upcastType(a.dtype, b.dtype);\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB);\n return this.compileAndRun(program, [a, b], dtype);\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n const dtype = upcastType(a.dtype, b.dtype);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [a, b];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs, dtype);\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new LRNPackedProgram(x.shape, radius, bias, alpha, beta) :\n new LRNProgram(x.shape, radius, bias, alpha, beta);\n return this.compileAndRun(program, [x]);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n const program = new LRNGradProgram(inputImage.shape, depthRadius, bias, alpha, beta);\n return this.compileAndRun(program, [inputImage, outputImage, dy]);\n }\n tile(x, reps) {\n if (x.dtype === 'string') {\n const data = this.readSync(x.dataId);\n const decodedData = data.map(d => util.decodeString(d));\n const buf = buffer(x.shape, x.dtype, decodedData);\n return tile(buf, reps);\n }\n const program = new TileProgram(x.shape, reps);\n return this.compileAndRun(program, [x]);\n }\n pad(x, paddings, constantValue) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new PadPackedProgram(x.shape, paddings, constantValue) :\n new PadProgram(x.shape, paddings, constantValue);\n return this.compileAndRun(program, [x]);\n }\n gather(x, indices, axis) {\n const cpuRes = this.tryRunOnCpuOrThrow([x, indices], () => this.cpuBackend.gather(x, indices, axis));\n if (cpuRes) {\n return cpuRes;\n }\n const program = new GatherProgram(x.shape, indices.size, axis);\n return this.compileAndRun(program, [x, indices]);\n }\n batchToSpaceND(x, blockShape, crops) {\n util.assert(x.rank <= 4, () => 'batchToSpaceND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n spaceToBatchND(x, blockShape, paddings) {\n util.assert(x.rank <= 4, () => 'spaceToBatchND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = x.pad(completePaddings);\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const paddedXT = transpose(paddedX.reshape(reshapedPaddedShape), permutedReshapedPaddedPermutation);\n return reshape(paddedXT, flattenShape);\n }\n reduce(x, reduceType, dtype) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const outSize = Math.ceil(inSize / windowSize);\n const reduceInfo = { windowSize, inSize, batchSize, outSize };\n const program = new ReduceProgram(reduceInfo, reduceType);\n const output = this.compileAndRun(program, [x], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.reduce(output, reduceType, dtype);\n }\n argReduce(x, reduceType, bestIndicesA = null) {\n let batchSize = x.shape[0];\n let inSize = x.shape[1];\n if (bestIndicesA != null) {\n batchSize = bestIndicesA.shape[0];\n inSize = bestIndicesA.shape[1];\n }\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const reduceInfo = {\n windowSize,\n inSize,\n batchSize,\n outSize: Math.ceil(inSize / windowSize)\n };\n const program = new ArgMinMaxProgram(reduceInfo, reduceType, bestIndicesA == null);\n const inputs = [x];\n if (bestIndicesA != null) {\n inputs.push(bestIndicesA);\n }\n const output = this.compileAndRun(program, inputs, 'int32');\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.argReduce(x, reduceType, output);\n }\n argReducePacked(x, reduceType, bestIndicesA = null) {\n const inShape = bestIndicesA != null ? bestIndicesA.shape : x.shape;\n const inSize = inShape[inShape.length - 1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const program = new ArgMinMaxPackedProgram(inShape, windowSize, reduceType, bestIndicesA == null);\n const inputs = bestIndicesA == null ? [x] : [x, bestIndicesA];\n const output = this.compileAndRun(program, inputs, 'int32');\n if (output.rank === x.rank) {\n return this.argReducePacked(x, reduceType, output);\n }\n return output;\n }\n sum(x, axes) {\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'sum', outputDType).reshape(outShape);\n }\n prod(x, axes) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.prod(x, axes));\n if (cpuRes) {\n return cpuRes;\n }\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'prod', outputDType).reshape(outShape);\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n let axis = 0;\n const permutation = backend_util.getAxesPermutation([axis], x.rank);\n let permutedX = x;\n if (permutation != null) {\n permutedX = transpose(x, permutation);\n axis = backend_util.getInnerMostAxes(1, x.rank)[0];\n }\n const outShape = segment_util.computeOutShape(permutedX.shape, axis, numSegments);\n const inSize = util.sizeFromShape([permutedX.shape[axis]]);\n const a2D = permutedX.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n let result = this.segOpCompute(a2D, 'unsortedSegmentSum', segmentIds, outputDType, numSegments)\n .reshape(outShape);\n if (permutation != null) {\n result =\n transpose(result, backend_util.getUndoAxesPermutation(permutation));\n }\n return result;\n }\n segOpCompute(x, segOpType, segmentIds, dtype, numSegments) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = segment_util.segOpComputeOptimalWindowSize(inSize, numSegments);\n const segOpInfo = { windowSize, inSize, batchSize, numSegments };\n const program = new SegmentOpProgram(segOpInfo, segOpType);\n const output = this.compileAndRun(program, [x, segmentIds], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === numSegments) {\n return output;\n }\n segmentIds = range(0, numSegments).tile([inSize / windowSize]);\n return this.segOpCompute(output, segOpType, segmentIds, dtype, numSegments);\n }\n argMinMaxReduce(x, axis, reduceType) {\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('arg' + reduceType.charAt(0).toUpperCase() + reduceType.slice(1), axes, x.rank);\n if (!env().getBool('WEBGL_PACK_REDUCE') || x.rank <= 2) {\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.argReduce(a2D, reduceType).reshape(outShape);\n }\n return this.argReducePacked(x, reduceType);\n }\n argMin(x, axis) {\n return this.argMinMaxReduce(x, axis, 'min');\n }\n argMax(x, axis) {\n return this.argMinMaxReduce(x, axis, 'max');\n }\n cumsum(x, axis, exclusive, reverse) {\n if (axis !== x.rank - 1) {\n throw new Error(`WebGL cumsum shader expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const size = x.shape[axis];\n let result = x;\n // Use cumsum parallel algorithm, ref:\n // https://developer.nvidia.com/gpugems/gpugems3/part-vi-gpu-computing/chapter-39-parallel-prefix-sum-scan-cuda\n for (let i = 0; i <= Math.ceil(Math.log2(size)) - 1; i++) {\n const program = new CumSumProgram(x.shape, false, reverse);\n const customSetup = program.getCustomSetupFunc(i);\n const prevResult = result;\n result = this.compileAndRun(program, [result], result.dtype, customSetup);\n prevResult.dispose();\n }\n // For exclusive cumsum, shift the end result in the direction of sum and\n // add 0 to the front index.\n if (exclusive) {\n const program = new CumSumProgram(x.shape, exclusive, reverse);\n const prevResult = result;\n result = this.compileAndRun(program, [result]);\n prevResult.dispose();\n }\n return result;\n }\n equal(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n less(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.less(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n lessEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greater(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.greater(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greaterEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalNot(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOGICAL_NOT);\n return this.compileAndRun(program, [x]);\n }\n logicalAnd(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_AND, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_AND, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalOr(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_OR, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_OR, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n select(condition, a, b) {\n const program = new SelectProgram(condition.rank, a.shape, a.rank);\n return this.compileAndRun(program, [condition, a, b], upcastType(a.dtype, b.dtype));\n }\n where(condition) {\n backend_util.warn('tf.where() in webgl locks the UI thread. ' +\n 'Call tf.whereAsync() instead');\n const condVals = condition.dataSync();\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n const xVals = x.dataSync();\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'min', a2D.dtype).reshape(outShape);\n }\n minimum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.minimum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MIN, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MIN, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n mod(a, b) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MOD, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MOD, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n maximum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.maximum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MAX, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MAX, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n all(x, axes) {\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'all', a2D.dtype).reshape(outShape);\n }\n any(x, axes) {\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'any', a2D.dtype).reshape(outShape);\n }\n floorDiv(a, b) {\n const op = binaryop_gpu.INT_DIV;\n const outputDtype = 'int32';\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.INT_DIV, outputDtype);\n }\n const program = new BinaryOpProgram(op, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], outputDtype);\n }\n packedUnaryOp(x, op, dtype) {\n const program = new UnaryOpPackedProgram(x.shape, op);\n return this.compileAndRun(program, [x], dtype);\n }\n packedBinaryOp(a, b, op, dtype, checkOutOfBounds = false) {\n const program = new BinaryOpPackedProgram(op, a.shape, b.shape, checkOutOfBounds);\n return this.compileAndRun(program, [a, b], dtype);\n }\n // Returns a TensorInfo with the complex shape and the dataId of the\n // underlying part. We need to do this because a reshaped complex tensor is\n // not reflected in its parts.\n makeComplexComponentTensorInfo(complexTensor, complexPart) {\n return {\n dataId: complexPart.dataId,\n dtype: complexPart.dtype,\n shape: complexTensor.shape\n };\n }\n addN(tensors) {\n if (tensors.length === 1) {\n return tensors[0];\n }\n // Limit the number of uploaded textures for optimization.\n if (tensors.length > env().get('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(tensors.length / 2);\n const leftSide = this.addN(tensors.slice(0, midIndex));\n const rightSide = this.addN(tensors.slice(midIndex));\n return this.addN([leftSide, rightSide]);\n }\n const dtype = tensors.map(t => t.dtype).reduce((d1, d2) => upcastType(d1, d2));\n const shapes = tensors.map(t => t.shape);\n // We can make sure shapes are identical in op level.\n const usePackedOp = env().getBool('WEBGL_PACK');\n const program = usePackedOp ?\n new AddNPackedProgram(tensors[0].shape, shapes) :\n new AddNProgram(tensors[0].shape, shapes);\n return this.compileAndRun(program, tensors, dtype);\n }\n pow(a, b) {\n const usePackedOp = env().getBool('WEBGL_PACK_BINARY_OPERATIONS');\n const program = usePackedOp ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.POW, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.POW, a.shape, b.shape);\n const dtype = upcastType(a.dtype, b.dtype);\n return this.compileAndRun(program, [a, b], dtype);\n }\n ceil(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = ceilImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.CEIL, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.CEIL);\n return this.compileAndRun(program, [x]);\n }\n floor(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = floorImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.FLOOR, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.FLOOR);\n return this.compileAndRun(program, [x]);\n }\n sign(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGN);\n return this.compileAndRun(program, [x]);\n }\n isNaN(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_NAN);\n return this.compileAndRun(program, [x], 'bool');\n }\n isInf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_INF);\n return this.compileAndRun(program, [x], 'bool');\n }\n isFinite(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_FINITE);\n return this.compileAndRun(program, [x], 'bool');\n }\n round(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ROUND);\n return this.compileAndRun(program, [x]);\n }\n exp(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXP, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXP);\n return this.compileAndRun(program, [x]);\n }\n expm1(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expm1ImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXPM1, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXPM1);\n return this.compileAndRun(program, [x]);\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(annxingyuan): Call sub directly as part of softmax kernel\n // modularization.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = this.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax kernel\n // modularization.\n return div(b, sumExp);\n }\n log(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = logImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.LOG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.LOG);\n return this.compileAndRun(program, [x]);\n }\n log1p(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOG1P);\n return this.compileAndRun(program, [x]);\n }\n sqrt(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SQRT);\n return this.compileAndRun(program, [x]);\n }\n rsqrt(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = rsqrtImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.RSQRT);\n return this.compileAndRun(program, [x]);\n }\n reciprocal(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.RECIPROCAL);\n return this.compileAndRun(program, [x]);\n }\n relu(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU);\n }\n return this.compileAndRun(program, [x]);\n }\n relu6(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU6);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU6);\n }\n return this.compileAndRun(program, [x]);\n }\n prelu(x, alpha) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.PRELU, x.shape, alpha.shape) :\n new BinaryOpProgram(binaryop_gpu.PRELU, x.shape, alpha.shape);\n return this.compileAndRun(program, [x, alpha]);\n }\n elu(x) {\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.ELU, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ELU);\n return this.compileAndRun(program, [x]);\n }\n eluDer(dy, y) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.ELU_DER, dy.shape, y.shape) :\n new BinaryOpProgram(binaryop_gpu.ELU_DER, dy.shape, y.shape);\n return this.compileAndRun(program, [dy, y]);\n }\n selu(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SELU);\n return this.compileAndRun(program, [x]);\n }\n clip(x, min, max) {\n let program;\n if (env().getBool('WEBGL_PACK_CLIP')) {\n program = new ClipPackedProgram(x.shape);\n }\n else {\n program = new ClipProgram(x.shape);\n }\n const customSetup = program.getCustomSetupFunc(min, max);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n abs(x) {\n // TODO: handle cases when x is complex.\n if (this.shouldExecuteOnCPU([x]) && x.dtype !== 'complex64') {\n const outValues = simpleAbsImplCPU(this.texData.get(x.dataId).values);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.ABS, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ABS);\n return this.compileAndRun(program, [x]);\n }\n complexAbs(x) {\n const xData = this.texData.get(x.dataId);\n const program = new ComplexAbsProgram(x.shape);\n const inputs = [\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.real),\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.imag),\n ];\n return this.compileAndRun(program, inputs);\n }\n sigmoid(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGMOID);\n return this.compileAndRun(program, [x]);\n }\n softplus(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SOFTPLUS);\n return this.compileAndRun(program, [x]);\n }\n asin(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASIN);\n return this.compileAndRun(program, [x]);\n }\n acos(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOS);\n return this.compileAndRun(program, [x]);\n }\n atan(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATAN);\n return this.compileAndRun(program, [x]);\n }\n sinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SINH);\n return this.compileAndRun(program, [x]);\n }\n cosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.COSH);\n return this.compileAndRun(program, [x]);\n }\n tanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.TANH);\n return this.compileAndRun(program, [x]);\n }\n asinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASINH);\n return this.compileAndRun(program, [x]);\n }\n acosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOSH);\n return this.compileAndRun(program, [x]);\n }\n atanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATANH);\n return this.compileAndRun(program, [x]);\n }\n erf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ERF);\n return this.compileAndRun(program, [x]);\n }\n step(x, alpha) {\n const program = new UnaryOpProgram(x.shape, unary_op.STEP(alpha));\n return this.compileAndRun(program, [x]);\n }\n conv2dByMatMul(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Reshapes conv2D input to 2D tensors, uses matMul and then reshape the\n // result from 2D to 4D.\n const xShape = x.shape;\n const xTexData = this.texData.get(x.dataId);\n const sharedMatMulDim = convInfo.inChannels;\n const outerShapeX = xShape[0] * xShape[1] * xShape[2];\n const outerShapeFilter = convInfo.outChannels;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const transposeA = false;\n const transposeB = false;\n // TODO: Once reduction ops are packed, batchMatMul will always be packed\n // and we can remove this condition.\n const batchMatMulWillBeUnpacked = (outerShapeX === 1 || outerShapeFilter === 1) &&\n sharedMatMulDim > MATMUL_SHARED_DIM_THRESHOLD;\n const reshapeWillBeExpensive = xShape[2] % 2 !== 0 && !!xTexData.isPacked;\n if (batchMatMulWillBeUnpacked || !env().getBool('WEBGL_LAZILY_UNPACK') ||\n !env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ||\n !reshapeWillBeExpensive) {\n const targetShape = isChannelsLast ? xShape[0] * xShape[1] * xShape[2] :\n xShape[0] * xShape[2] * xShape[3];\n const xReshaped = reshape(x, [1, targetShape, convInfo.inChannels]);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const result = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n return reshape(result, convInfo.outShape);\n }\n // Following optimization is specific to packed |x| with odd row count\n // (For example, in channelLast mode, 'row count' refers to x.shape[2]):\n // we avoid expensive packed 2x2 reshape by padding row count to next,\n // even number. When x.shape[2] is odd, the result of packed batchMatMul is\n // the same (has the same texture layout and and values in the texture) as\n // it is for even x.shape[2] + 1. We make the odd-rows tensor to look like\n // even-rows tensor before the operation and, after the batchMatMul,\n // fix the even-rows result to have odd number of rows.\n const targetShape = isChannelsLast ?\n xShape[0] * xShape[1] * (xShape[2] + 1) :\n xShape[0] * xShape[2] * (xShape[3] + 1);\n const xReshaped = {\n dataId: x.dataId,\n shape: [1, targetShape, convInfo.inChannels],\n dtype: x.dtype\n };\n // xTexData.shape gets referenced from GPGPUBinary.inShapeInfos.\n // Decrementing row count, after batchMatMul->...->compileProgram leads to\n // invalid row count within the reference in GPGPUBinary.inShapeInfos.\n // Alternative fix would be to provide a copy to GPGPUBinary.inShapeInfos\n // in compileProgram method, but that would affect compilation of all\n // programs - instead, provide a copy here, with even row count, before\n // calling batchMatMul->...->compileProgram and after that, the original\n // xTexData.shape is restored.\n const originalXTexDataShape = xTexData.shape;\n xTexData.shape = xTexData.shape.slice();\n xTexData.shape[xTexData.shape.length - 2]++;\n util.assert(webgl_util.isReshapeFree(xTexData.shape, xReshaped.shape), () => `packed reshape ${xTexData.shape} to ${xReshaped.shape} isn't free`);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const pointwiseConv = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n const pointwiseConvTexData = this.texData.get(pointwiseConv.dataId);\n util.assert(pointwiseConvTexData.isPacked, () => 'batchMatMul result is expected to be packed');\n // Restore the input shape to original.\n xTexData.shape = originalXTexDataShape;\n // Set the output shape - there is no need for expensive reshape as data\n // layout is already correct.\n pointwiseConvTexData.shape = convInfo.outShape;\n return engine().makeTensorFromDataId(pointwiseConv.dataId, convInfo.outShape, pointwiseConv.dtype);\n }\n conv2dWithIm2Row(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Rearranges conv2d input so each block to be convolved over forms the\n // column of a new matrix with shape [filterWidth * filterHeight *\n // inChannels, outHeight * outWidth]. The filter is also rearranged so each\n // output channel forms a row of a new matrix with shape [outChannels,\n // filterWidth * filterHeight * inChannels]. The convolution is then\n // computed by multiplying these matrices and reshaping the result.\n const { filterWidth, filterHeight, inChannels, outWidth, outHeight, dataFormat } = convInfo;\n const isChannelsLast = dataFormat === 'channelsLast';\n const sharedDim = filterWidth * filterHeight * inChannels;\n const numCols = outHeight * outWidth;\n const x2ColShape = [sharedDim, numCols];\n const transposeA = true;\n const transposeB = false;\n const xSqueezed = x.squeeze([0]);\n const w2Row = filter.reshape([1, sharedDim, -1]);\n const im2ColProgram = new Im2ColPackedProgram(x2ColShape, xSqueezed.shape, convInfo);\n const im2Col = this.compileAndRun(im2ColProgram, [xSqueezed]).reshape([\n 1, x2ColShape[0], x2ColShape[1]\n ]);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const matmulProgram = new MatMulPackedProgram(im2Col.shape, w2Row.shape, [1, numCols, convInfo.outChannels], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [im2Col, w2Row];\n if (bias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n const product = this.compileAndRun(matmulProgram, inputs);\n if (isChannelsLast) {\n return product.reshape([1, outHeight, outWidth, convInfo.outChannels]);\n }\n else {\n return product.reshape([1, convInfo.outChannels, outHeight, outWidth]);\n }\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && input.shape[0] === 1) {\n return this.conv2dWithIm2Row(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, false) : null;\n const program = new Conv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [input, filter];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs);\n }\n conv2d(x, filter, convInfo) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(x, filter, convInfo);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && x.shape[0] === 1) {\n return this.conv2dWithIm2Row(x, filter, convInfo);\n }\n const program = new Conv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv2dDerInput(dy, filter, convInfo) {\n const program = new Conv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv2dDerFilter(x, dy, convInfo) {\n const program = new Conv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n const shouldPackDepthwiseConv = env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1;\n const fusedActivation = activation ?\n mapActivationToShaderProgram(activation, shouldPackDepthwiseConv) :\n null;\n const inputs = [input, filter];\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n if (hasBias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n let program;\n if (shouldPackDepthwiseConv) {\n program = new DepthwiseConvPacked2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n program = new DepthwiseConv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n depthwiseConv2D(x, filter, convInfo) {\n let program;\n if (env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1) {\n program = new DepthwiseConvPacked2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n program = new DepthwiseConv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n const program = new DepthwiseConv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n depthwiseConv2DDerFilter(x, dy, convInfo) {\n const program = new DepthwiseConv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n conv3d(x, filter, convInfo) {\n const program = new Conv3DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv3dDerInput(dy, filter, convInfo) {\n const program = new Conv3DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv3dDerFilter(x, dy, convInfo) {\n const program = new Conv3DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = this.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n avgPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'avg', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n const avgPool3dBackpropProgram = new AvgPool3DBackpropProgram(convInfo);\n return this.compileAndRun(avgPool3dBackpropProgram, [dy], x.dtype);\n }\n maxPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'max', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n const getPositions = true;\n const maxPool3dPositionsProgram = new Pool3DProgram(convInfo, 'max', getPositions);\n const maxPool3dPositions = this.compileAndRun(maxPool3dPositionsProgram, [x]);\n const maxPool3dBackPropProgram = new MaxPool3DBackpropProgram(convInfo);\n const result = this.compileAndRun(maxPool3dBackPropProgram, [dy, maxPool3dPositions], x.dtype);\n maxPool3dPositions.dispose();\n return result;\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n const program = env().getBool('WEBGL_PACK_IMAGE_OPERATIONS') ?\n new ResizeBilinearPackedProgram(x.shape, newHeight, newWidth, alignCorners) :\n new ResizeBilinearProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x], 'float32');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n const program = new ResizeBilinearBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n const program = new ResizeNearestNeighborProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x]);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n const program = new ResizeNearestNeigborBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n multinomial(logits, normalized, numSamples, seed) {\n const probs = normalized ? logits : softmax(logits);\n const batchSize = probs.shape[0];\n const numOutcomes = probs.shape[1];\n const program = new MultinomialProgram(batchSize, numOutcomes, numSamples);\n const customSetup = program.getCustomSetupFunc(seed);\n return this.compileAndRun(program, [probs], 'int32', customSetup);\n }\n oneHot(indices, depth, onValue, offValue) {\n const program = new OneHotProgram(indices.size, depth, onValue, offValue);\n return this.compileAndRun(program, [indices]);\n }\n diag(x) {\n const program = new DiagProgram(x.size);\n return this.compileAndRun(program, [x]);\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const program = new CropAndResizeProgram(image.shape, boxes.shape, cropSize, method, extrapolationValue);\n return this.compileAndRun(program, [image, boxes, boxIndex], 'float32');\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = (dataFormat === 'NHWC') ? x.shape[1] : x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? x.shape[2] : x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? x.shape[3] : x.shape[1];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const outputShape = (dataFormat === 'NHWC') ?\n [batchSize, outputHeight, outputWidth, outputDepth] :\n [batchSize, outputDepth, outputHeight, outputWidth];\n const program = new DepthToSpaceProgram(outputShape, blockSize, dataFormat);\n return this.compileAndRun(program, [x]);\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const flattenIndices = indices.reshape([numUpdates, sliceRank]);\n const flattenX = updates.reshape([numUpdates, sliceSize]);\n if (outputSize === 0) {\n return backend_util.reshapeTensor(tensor([]), shape);\n }\n const defaultValue = scalar(0);\n const program = new ScatterProgram(numUpdates, sliceRank, flattenIndices.rank, flattenX.rank, strides, flattenShape);\n const res = this.compileAndRun(program, [flattenX, flattenIndices, defaultValue]);\n return res.reshape(shape);\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n const program = new ScatterProgram(numUpdates, sliceRank, sparseIndices.rank, sparseValues.rank, strides, [outputSize, 1], sumDupeIndices);\n const res = this.compileAndRun(program, [sparseValues, sparseIndices, defaultValue]);\n return res.reshape(outputShape);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n const flattenIndices = indices.reshape([numSlices, sliceRank]);\n const flattenX = x.reshape([x.size / sliceSize, sliceSize]);\n const program = new GatherNDProgram(sliceRank, strides, [numSlices, sliceSize]);\n const res = this.compileAndRun(program, [flattenX, flattenIndices]);\n return res.reshape(resultShape);\n }\n fill(shape, value, dtype) {\n dtype = dtype || util.inferDtype(value);\n if (dtype === 'string') {\n // String type should be handled in CPU memory.\n const values = util.getArrayFromDType(dtype, util.sizeFromShape(shape));\n values.fill(value);\n return engine().makeTensor(values, shape, dtype, this);\n }\n else {\n const program = new FillProgram(shape, value);\n const customSetup = program.getCustomSetupFunc(value);\n return this.compileAndRun(program, [], dtype, customSetup);\n }\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported under string dtype');\n }\n else {\n // TODO(cais, smilkov): Add WebGL shader for onesLike:\n // https://github.com/tensorflow/tfjs/issues/1293\n return this.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n return this.fill(x.shape, x.dtype === 'string' ? '' : 0, x.dtype);\n }\n linspace(start, stop, num) {\n // TODO: Use CPU implementation due to the precision problem in Safari.\n return backend_util.linspaceImpl(start, stop, num);\n }\n makeTensorInfo(shape, dtype, values) {\n const dataId = this.write(values, shape, dtype);\n this.texData.get(dataId).usage = null;\n return { dataId, shape, dtype };\n }\n makeOutput(shape, dtype, values) {\n const { dataId } = this.makeTensorInfo(shape, dtype, values);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n unpackTensor(input) {\n const program = new UnpackProgram(input.shape);\n return this.runWebGLProgram(program, [input], input.dtype);\n }\n packTensor(input) {\n const program = new PackProgram(input.shape);\n const preventEagerUnpackingOutput = true;\n return this.runWebGLProgram(program, [input], input.dtype, null /* customSetup */, preventEagerUnpackingOutput);\n }\n packedReshape(input, afterShape) {\n const input3DShape = [\n webgl_util.getBatchDim(input.shape),\n ...webgl_util.getRowsCols(input.shape)\n ];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [\n webgl_util.getBatchDim(afterShape), ...webgl_util.getRowsCols(afterShape)\n ];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = this.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n }\n decode(dataId) {\n const texData = this.texData.get(dataId);\n const { isPacked, shape, dtype } = texData;\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n if (isPacked) {\n program = new DecodeMatrixPackedProgram(shapeAs3D);\n }\n else {\n program = new DecodeMatrixProgram(shapeAs3D);\n }\n const preventEagerUnpackingOfOutput = true;\n const out = this.runWebGLProgram(program, [{ shape: shapeAs3D, dtype, dataId }], dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dtype, shape, dataId: out.dataId };\n }\n runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n const output = this.makeTensorInfo(program.outputShape, outputDtype);\n const outData = this.texData.get(output.dataId);\n if (program.packedOutput) {\n outData.isPacked = true;\n }\n if (program.outPackingScheme === tex_util.PackingScheme.DENSE) {\n const texelShape = tex_util.getDenseTexShape(program.outputShape);\n // For a densely packed output, we explicitly set texShape\n // so it doesn't get assigned later according to our typical packing\n // scheme wherein a single texel can only contain values from adjacent\n // rows/cols.\n outData.texShape = texelShape.map(d => d * 2);\n }\n if (program.outTexUsage != null) {\n outData.usage = program.outTexUsage;\n }\n if (util.sizeFromShape(output.shape) === 0) {\n // Short-circuit the computation since the result is empty (has 0 in its\n // shape).\n outData.values =\n util.getTypedArrayFromDType(output.dtype, 0);\n return output;\n }\n const dataToDispose = [];\n const inputsData = inputs.map(input => {\n if (input.dtype === 'complex64') {\n throw new Error(`GPGPUProgram does not support complex64 input. For complex64 ` +\n `dtypes, please separate the program into real and imaginary ` +\n `parts.`);\n }\n let texData = this.texData.get(input.dataId);\n if (texData.texture == null) {\n if (!program.packedInputs &&\n util.sizeFromShape(input.shape) <=\n env().getNumber('WEBGL_SIZE_UPLOAD_UNIFORM')) {\n // Upload small tensors that live on the CPU as uniforms, not as\n // textures. Do this only when the environment supports 32bit floats\n // due to problems when comparing 16bit floats with 32bit floats.\n // TODO(https://github.com/tensorflow/tfjs/issues/821): Make it\n // possible for packed shaders to sample from uniforms.\n return {\n shape: input.shape,\n texData: null,\n isUniform: true,\n uniformValues: texData.values\n };\n }\n // This ensures that if a packed program's inputs have not yet been\n // uploaded to the GPU, they get uploaded as packed right off the bat.\n if (program.packedInputs) {\n texData.isPacked = true;\n texData.shape = input.shape;\n }\n }\n else if (!!texData.isPacked !== !!program.packedInputs) {\n input = texData.isPacked ? this.unpackTensor(input) :\n this.packTensor(input);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n }\n else if (texData.isPacked &&\n !webgl_util.isReshapeFree(texData.shape, input.shape)) {\n // This is a special case where a texture exists for a tensor\n // but the shapes are incompatible (due to packing constraints) because\n // the tensor did not have a chance to go through the packed reshape\n // shader. This only happens when we reshape the *same* tensor to form\n // *distinct* inputs to an op, e.g. dotting a vector with itself. This\n // case will disappear once packed uploading is the default.\n const savedInput = input;\n const targetShape = input.shape;\n input.shape = texData.shape;\n input = this.packedReshape(input, targetShape);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n savedInput.shape = targetShape;\n }\n this.uploadToGPU(input.dataId);\n return { shape: input.shape, texData, isUniform: false };\n });\n this.uploadToGPU(output.dataId);\n const outputData = { shape: output.shape, texData: outData, isUniform: false };\n const key = gpgpu_math.makeShaderKey(program, inputsData, outputData);\n const binary = this.getAndSaveBinary(key, () => {\n return gpgpu_math.compileProgram(this.gpgpu, program, inputsData, outputData);\n });\n const shouldTimeProgram = this.activeTimers != null;\n let query;\n if (shouldTimeProgram) {\n query = this.startTimer();\n }\n gpgpu_math.runProgram(this.gpgpu, binary, inputsData, outputData, customSetup);\n dataToDispose.forEach(info => this.disposeIntermediateTensorInfo(info));\n if (shouldTimeProgram) {\n query = this.endTimer(query);\n this.activeTimers.push({ name: program.constructor.name, query: this.getQueryTime(query) });\n }\n if (!env().getBool('WEBGL_LAZILY_UNPACK') && outData.isPacked &&\n preventEagerUnpackingOfOutput === false) {\n const unpacked = this.unpackTensor(output);\n this.disposeIntermediateTensorInfo(output);\n return unpacked;\n }\n return output;\n }\n compileAndRun(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n outputDtype = outputDtype || inputs[0].dtype;\n const outInfo = this.runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput);\n return engine().makeTensorFromDataId(outInfo.dataId, outInfo.shape, outInfo.dtype);\n }\n getAndSaveBinary(key, getBinary) {\n if (!(key in this.binaryCache)) {\n this.binaryCache[key] = getBinary();\n }\n return this.binaryCache[key];\n }\n getTextureManager() {\n return this.textureManager;\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n // Avoid disposing the compiled webgl programs during unit testing because\n // it slows down test execution.\n if (!env().getBool('IS_TEST')) {\n const allKeys = Object.keys(this.binaryCache);\n allKeys.forEach(key => {\n this.gpgpu.deleteProgram(this.binaryCache[key].webGLProgram);\n delete this.binaryCache[key];\n });\n }\n this.textureManager.dispose();\n if (this.canvas != null &&\n (typeof (HTMLCanvasElement) !== 'undefined' &&\n this.canvas instanceof HTMLCanvasElement)) {\n this.canvas.remove();\n }\n else {\n this.canvas = null;\n }\n if (this.gpgpuCreatedLocally) {\n this.gpgpu.program = null;\n this.gpgpu.dispose();\n }\n this.disposed = true;\n }\n floatPrecision() {\n if (this.floatPrecisionValue == null) {\n this.floatPrecisionValue = tidy(() => {\n if (!env().get('WEBGL_RENDER_FLOAT32_ENABLED')) {\n // Momentarily switching DEBUG flag to false so we don't throw an\n // error trying to upload a small value.\n const debugFlag = env().getBool('DEBUG');\n env().set('DEBUG', false);\n const underflowCheckValue = this.abs(scalar(1e-8)).dataSync()[0];\n env().set('DEBUG', debugFlag);\n if (underflowCheckValue > 0) {\n return 32;\n }\n }\n return 16;\n });\n }\n return this.floatPrecisionValue;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n uploadToGPU(dataId) {\n const texData = this.texData.get(dataId);\n const { shape, dtype, values, texture, usage, isPacked } = texData;\n if (texture != null) {\n // Array is already on GPU. No-op.\n return;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let texShape = texData.texShape;\n if (texShape == null) {\n texShape = webgl_util.getTextureShapeFromLogicalShape(shape, isPacked);\n texData.texShape = texShape;\n }\n if (values != null) {\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n let width = texShape[1], height = texShape[0];\n const isByteArray = values instanceof Uint8Array;\n if (isPacked) {\n [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(texShape[0], texShape[1]);\n program = new EncodeMatrixPackedProgram(shapeAs3D, [height, width], isByteArray);\n }\n else {\n program =\n new EncodeMatrixProgram(shapeAs3D, [height, width], isByteArray);\n }\n const tempDenseInputHandle = this.makeTensorInfo([height, width], dtype);\n if (isByteArray) {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.PIXELS;\n }\n else {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.UPLOAD;\n }\n this.gpgpu.uploadDenseMatrixToTexture(this.getTexture(tempDenseInputHandle.dataId), width, height, values);\n // We want the output to remain packed regardless of the value of\n // WEBGL_PACK.\n const preventEagerUnpacking = true;\n const encodedOutputTarget = this.runWebGLProgram(program, [tempDenseInputHandle], dtype, null, preventEagerUnpacking);\n // Have the original texture assume the identity of the encoded output.\n const outputTexData = this.texData.get(encodedOutputTarget.dataId);\n texData.texture = outputTexData.texture;\n texData.texShape = outputTexData.texShape;\n texData.isPacked = outputTexData.isPacked;\n texData.usage = outputTexData.usage;\n this.disposeIntermediateTensorInfo(tempDenseInputHandle);\n this.texData.delete(encodedOutputTarget.dataId);\n // Once uploaded, don't store the values on cpu.\n texData.values = null;\n if (shouldTimeProgram) {\n this.uploadWaitMs += util.now() - start;\n }\n }\n else {\n const newTexture = this.acquireTexture(texShape, usage, dtype, isPacked);\n texData.texture = newTexture;\n }\n }\n convertAndCacheOnCPU(dataId, float32Values) {\n const texData = this.texData.get(dataId);\n const { dtype } = texData;\n this.releaseGPUData(dataId);\n if (float32Values != null) {\n texData.values = float32ToTypedArray(float32Values, dtype);\n }\n return texData.values;\n }\n acquireTexture(texShape, texType, dtype, isPacked) {\n this.numBytesInGPU += this.computeBytes(texShape, dtype);\n if (!this.warnedAboutMemory &&\n this.numBytesInGPU > this.numMBBeforeWarning * 1024 * 1024) {\n const mb = (this.numBytesInGPU / 1024 / 1024).toFixed(2);\n this.warnedAboutMemory = true;\n console.warn(`High memory usage in GPU: ${mb} MB, ` +\n `most likely due to a memory leak`);\n }\n return this.textureManager.acquireTexture(texShape, texType, isPacked);\n }\n computeBytes(shape, dtype) {\n return shape[0] * shape[1] * util.bytesPerElement(dtype);\n }\n tryRunOnCpuOrThrow(inputs, fn) {\n if (this.shouldExecuteOnCPU(inputs)) {\n try {\n return fn();\n }\n catch (e) {\n if (env().getBool('IS_TEST')) {\n throw new Error('CPU forwarding failed');\n }\n }\n }\n return null;\n }\n}\nfunction float32ToTypedArray(a, dtype) {\n if (dtype === 'float32' || dtype === 'complex64') {\n return a;\n }\n else if (dtype === 'int32' || dtype === 'bool') {\n const result = (dtype === 'int32') ? new Int32Array(a.length) :\n new Uint8Array(a.length);\n for (let i = 0; i < result.length; ++i) {\n result[i] = Math.round(a[i]);\n }\n return result;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n//# sourceMappingURL=backend_webgl.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as webgl_util from './webgl_util';\nexport { MathBackendWebGL } from './backend_webgl';\nexport { setWebGLContext } from './canvas_util';\nexport { GPGPUContext } from './gpgpu_context';\n// WebGL specific utils.\nexport { gpgpu_util, webgl_util };\n/**\n * Enforce use of half precision textures if available on the platform.\n *\n * @doc {heading: 'Environment', namespace: 'webgl'}\n */\nexport function forceHalfFloat() {\n env().set('WEBGL_FORCE_F16_TEXTURES', true);\n}\n//# sourceMappingURL=webgl.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is the webgl backend without auto kernel registration.\nimport { device_util, registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendWebGL } from './backend_webgl';\nexport { version as version_webgl } from './version';\nif (device_util.isBrowser()) {\n registerBackend('webgl', () => new MathBackendWebGL(), 2 /* priority */);\n}\n// Export webgl utilities\nexport * from './webgl';\n// Export forceHalfFlost under webgl namespace for the union bundle.\nimport { forceHalfFloat } from './webgl';\nexport const webgl = { forceHalfFloat };\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'webgl',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\n/**\n * In WebGL data is stored in GPU textures which can't be efficiently copied, so\n * complex tensors share data with their real and imaginary components. Complex\n * tensors increment the `complexParentRefCount` properties of the underlying\n * data buckets to prevent them from being disposed, as the engine's disposal\n * logic does not account for data sharing by complex tensors.\n *\n * When a complex tensor is disposed, it will explicitly decrease the\n * `complexParentRefCount` properties of its underlying components.\n */\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.texData.get(complexInfo.dataId);\n const realTensorInfo = identity({ inputs: { x: real }, backend });\n const realData = backend.texData.get(realTensorInfo.dataId);\n realData.complexParentRefCount++;\n const imagTensorInfo = identity({ inputs: { x: imag }, backend });\n const imagData = backend.texData.get(imagTensorInfo.dataId);\n imagData.complexParentRefCount++;\n complex.complexTensorInfos = { real: realTensorInfo, imag: imagTensorInfo };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'webgl',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, upcastType } from '@tensorflow/tfjs-core';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { complex } from '../kernels/Complex';\nimport { UnaryOpProgram } from '../unaryop_gpu';\nexport const CHECK_NAN_SNIPPET_UNARY = `if (isnan(x)) return x;`;\nexport const CHECK_NAN_SNIPPET_BINARY = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\nexport const CHECK_NAN_SNIPPET_BINARY_PACKED = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param opSnippets Op snippet to create `UnaryOpProgram`.\n */\nexport function unaryKernelFunc(opSnippet) {\n return ({ inputs, backend }) => {\n const { x } = inputs;\n const webglBackend = backend;\n const program = new UnaryOpProgram(x.shape, opSnippet);\n return webglBackend.runWebGLProgram(program, [x], x.dtype);\n };\n}\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param opSnippet Op snippet to create `BinaryOpProgram`.\n * @param packedOpSnippet Op snippet to create `BinaryOpPackedProgram`.\n * @param checkOutOfBoundsForPackedProgram Whether to set checkOutOfBounds=true\n * when creating BinaryOpPackedProgram.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc({ opSnippet, packedOpSnippet, checkOutOfBounds = false, supportsComplex = false, cpuKernelImpl, dtype }) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const webglBackend = backend;\n if (supportsComplex && a.dtype === 'complex64') {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [real, imag] = [\n [aData.complexTensorInfos.real, bData.complexTensorInfos.real],\n [aData.complexTensorInfos.imag, bData.complexTensorInfos.imag]\n ].map(complexParts => {\n const [aPart, bPart] = complexParts;\n const aHandle = {\n dataId: aPart.dataId,\n dtype: aPart.dtype,\n shape: a.shape\n };\n const bHandle = {\n dataId: bPart.dataId,\n dtype: bPart.dtype,\n shape: b.shape\n };\n const program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n return webglBackend.runWebGLProgram(program, [aHandle, bHandle], upcastType(aPart.dtype, bPart.dtype));\n });\n const complexOutput = complex({ inputs: { real, imag }, backend: webglBackend });\n webglBackend.disposeIntermediateTensorInfo(real);\n webglBackend.disposeIntermediateTensorInfo(imag);\n // TODO(annxingyuan): Implement CPU forwarding for complex inputs.\n return complexOutput;\n }\n const $dtype = dtype || upcastType(a.dtype, b.dtype);\n if (webglBackend.shouldExecuteOnCPU([a, b]) && cpuKernelImpl != null) {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [outValues, outShape] = cpuKernelImpl(a.shape, b.shape, aData.values, bData.values, $dtype);\n const out = webglBackend.makeTensorInfo(outShape, $dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') &&\n packedOpSnippet != null;\n let program;\n if (shouldUsePackedProgram) {\n program = new BinaryOpPackedProgram(packedOpSnippet, a.shape, b.shape, checkOutOfBounds);\n }\n else {\n program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n }\n return webglBackend.runWebGLProgram(program, [a, b], $dtype);\n };\n}\n//# sourceMappingURL=kernel_funcs_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { addImplCPU as cpuAdd } from '../kernel_utils/shared';\nconst ADD = 'return a + b;';\nexport const addKernelFunc = binaryKernelFunc({\n opSnippet: ADD,\n packedOpSnippet: ADD,\n supportsComplex: true,\n cpuKernelImpl: cpuAdd\n});\nexport const addConfig = {\n kernelName: Add,\n backendName: 'webgl',\n kernelFunc: addKernelFunc\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc, CHECK_NAN_SNIPPET_BINARY, CHECK_NAN_SNIPPET_BINARY_PACKED } from '../kernel_utils/kernel_funcs_utils';\nconst ATAN2 = CHECK_NAN_SNIPPET_BINARY + `\n return atan(a, b);\n`;\nconst ATAN2_PACKED = `\n vec4 result = atan(a, b);\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET_BINARY_PACKED + `\n return result;\n`;\nexport const atan2 = binaryKernelFunc({ opSnippet: ATAN2, packedOpSnippet: ATAN2_PACKED });\nexport const atan2Config = {\n kernelName: Atan2,\n backendName: 'webgl',\n kernelFunc: atan2,\n};\n//# sourceMappingURL=Atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const avgPoolProgram = new Pool2DProgram(convInfo, 'avg', false);\n return backend.runWebGLProgram(avgPoolProgram, [x], 'float32');\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'webgl',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util } from '@tensorflow/tfjs-core';\nimport { AvgPool2DBackpropProgram } from '../avg_pool_backprop_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const avgPoolBackpropProgram = new AvgPool2DBackpropProgram(convInfo);\n return backend.runWebGLProgram(avgPoolBackpropProgram, [dy], x.dtype);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'webgl',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.outputShape = [];\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = '0.0';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = '1.0';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n float x = getXAtOutCoords();\n float mean = getMeanAtOutCoords();\n float variance = getVarianceAtOutCoords();\n float offset = ${offsetSnippet};\n float scale = ${scaleSnippet};\n float inv = scale * inversesqrt(variance + float(${varianceEpsilon}));\n setOutput(dot(vec3(x, -mean, offset), vec3(inv, inv, 1)));\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormPackedProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = 'vec4(0.0)';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = 'vec4(1.0)';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n vec4 offset = ${offsetSnippet};\n vec4 scale = ${scaleSnippet};\n\n vec4 x = getXAtOutCoords();\n vec4 mean = getMeanAtOutCoords();\n vec4 variance = getVarianceAtOutCoords();\n\n vec4 inv = scale * inversesqrt(variance + vec4(${varianceEpsilon}));\n\n setOutput((x - mean) * inv + offset);\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { BatchNormProgram } from '../batchnorm_gpu';\nimport { BatchNormPackedProgram } from '../batchnorm_packed_gpu';\nexport const batchNorm = ({ inputs, backend, attrs }) => {\n const { x, mean, variance, offset, scale } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const finalInputs = [x, mean, variance];\n let offsetShape = null;\n if (offset != null) {\n offsetShape = offset.shape;\n finalInputs.push(offset);\n }\n let scaleShape = null;\n if (scale != null) {\n scaleShape = scale.shape;\n finalInputs.push(scale);\n }\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new BatchNormPackedProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon) :\n new BatchNormProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon);\n const output = backend.runWebGLProgram(program, finalInputs, finalInputs[0].dtype);\n return output;\n};\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'webgl',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst NOT_EQUAL = `return float(a != b);`;\nexport const notEqual = binaryKernelFunc({ opSnippet: NOT_EQUAL, dtype: 'bool' });\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'webgl',\n kernelFunc: notEqual,\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.real }, backend });\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'webgl',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnaryOpProgram } from '../unaryop_gpu';\nconst TO_INT = `return float(int(x));`;\nexport function int(input, backend) {\n const program = new UnaryOpProgram(input.shape, TO_INT);\n const output = backend.runWebGLProgram(program, [input], 'int32');\n return { dataId: output.dataId, shape: output.shape, dtype: output.dtype };\n}\n//# sourceMappingURL=int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { notEqual } from './NotEqual';\nimport { real } from './Real';\nimport { int } from '../kernel_utils/int';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(annxingyuan): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n return int(x, backend);\n }\n if (dtype === 'bool') {\n const zerosTensorInfo = backend.makeTensorInfo([], 'bool', util.getTypedArrayFromDType('bool', 1));\n const binaryInputs = { a: x, b: zerosTensorInfo };\n const result = notEqual({ inputs: binaryInputs, backend });\n backend.disposeIntermediateTensorInfo(zerosTensorInfo);\n return result;\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'webgl',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class ConcatProgram {\n // Concats 2d tensors along axis=1. See comments in MathBackendWebGL.concat().\n constructor(shapes) {\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, 1 /* axis */);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][1];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][1];\n }\n const snippets = [`if (yC < ${offsets[0]}) setOutput(getT0(yR, yC));`];\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n snippets.push(`else if (yC < ${offsets[i]}) ` +\n `setOutput(getT${i}(yR, yC-${shift}));`);\n }\n const lastIndex = offsets.length;\n const lastShift = offsets[offsets.length - 1];\n snippets.push(`else setOutput(getT${lastIndex}(yR, yC-${lastShift}));`);\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int yR = coords.x;\n int yC = coords.y;\n\n ${snippets.join('\\n ')}\n }\n `;\n }\n}\n//# sourceMappingURL=concat_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ConcatPackedProgram {\n constructor(shapes, axis) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, axis);\n const shape = this.outputShape;\n const rank = shape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][axis];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][axis];\n }\n const channel = channels[axis];\n const lastChannels = channels.slice(-2);\n const allChannels = channels.join();\n let getValueSnippet = `if (${channel} < ${offsets[0]}) {\n return getChannel(\n getT0(${allChannels}), vec2(${lastChannels.join()}));\n }`;\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n // Note: the >= comparison below may seem unnecessary given the check\n // above but is needed to workaround branch execution issues on some\n // devices. It makes all the conditions exclusive without relying on\n // execution order.\n getValueSnippet += `\n if (${channel} < ${offsets[i]} && ${channel} >= ${offsets[i - 1]}) {\n return getChannel(\n getT${i}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));\n }`;\n }\n const lastIndex = offsets.length;\n const shift = offsets[offsets.length - 1];\n getValueSnippet += `\n return getChannel(\n getT${lastIndex}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));`;\n this.userCode = `\n float getValue(${channels.map(x => 'int ' + x)}) {\n ${getValueSnippet}\n }\n\n void main() {\n ${dtype} coords = getOutputCoords();\n vec4 result = vec4(getValue(${coords}), 0., 0., 0.);\n\n ${coords[rank - 1]} = ${coords[rank - 1]} + 1;\n if (${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.g = getValue(${coords});\n }\n\n ${coords[rank - 2]} = ${coords[rank - 2]} + 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]}) {\n result.a = getValue(${coords});\n }\n\n ${coords[rank - 1]} = ${coords[rank - 1]} - 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]} &&\n ${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.b = getValue(${coords});\n }\n setOutput(result);\n }\n `;\n }\n}\n/**\n * Return an expression for coordinates into a vector where a given channel\n * will be offset by [shift].\n *\n * @param channels the channels to consider\n * @param channel the channel we want shifted\n * @param shift the amount to subtract from the channel.\n *\n * @returns a string of the form 'x, y-[shift], z' where any one channel can\n * have the shift applied.\n */\nfunction shiftedChannels(channels, channel, shift) {\n const channelIdx = channels.indexOf(channel);\n const res = channels.map((c, idx) => {\n if (idx === channelIdx) {\n return `${c} - ${shift}`;\n }\n else {\n return c;\n }\n });\n return res.join();\n}\n//# sourceMappingURL=concat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.imag }, backend });\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'webgl',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ReshapePackedProgram } from '../reshape_packed_gpu';\nimport { getBatchDim, getRowsCols } from '../webgl_util';\nexport function packedReshape(input, afterShape, backend) {\n const input3DShape = [getBatchDim(input.shape),\n ...getRowsCols(input.shape)];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [getBatchDim(afterShape),\n ...getRowsCols(afterShape)];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = backend.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n}\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nimport { packedReshape } from '../kernel_utils/reshape';\nimport { isReshapeFree } from '../webgl_util';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const webglBackend = backend;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n const xTexData = webglBackend.texData.get(x.dataId);\n if (xTexData.isPacked && !isReshapeFree(x.shape, $shape) &&\n !(xTexData.texture !== null && isReshapeFree(xTexData.shape, $shape))) {\n return packedReshape(x, $shape, webglBackend);\n }\n webglBackend.incRef(x.dataId);\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'webgl',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, util } from '@tensorflow/tfjs-core';\nimport { ConcatProgram } from '../concat_gpu';\nimport { ConcatPackedProgram } from '../concat_packed_gpu';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concatImpl(inputs, axis, backend) {\n const dtype = inputs[0].dtype;\n if (dtype === 'complex64') {\n const reals = inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concatImpl(reals, axis, backend);\n const imagConcated = concatImpl(imags, axis, backend);\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n if (inputs.length > env().getNumber('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(inputs.length / 2);\n const leftSide = concatImpl(inputs.slice(0, midIndex), axis, backend);\n const rightSide = concatImpl(inputs.slice(midIndex), axis, backend);\n const result = concatImpl([leftSide, rightSide], axis, backend);\n backend.disposeIntermediateTensorInfo(leftSide);\n backend.disposeIntermediateTensorInfo(rightSide);\n return result;\n }\n if (env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') &&\n inputs[0].shape.length > 1) {\n const program = new ConcatPackedProgram(inputs.map(t => t.shape), axis);\n return backend.runWebGLProgram(program, inputs, dtype);\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), axis);\n const tensors2D = inputs.map(x => reshape({\n inputs: { x },\n attrs: { shape: [-1, util.sizeFromShape(x.shape.slice(axis))] },\n backend\n }));\n const program = new ConcatProgram(tensors2D.map(t => t.shape));\n const result = backend.runWebGLProgram(program, tensors2D, dtype);\n tensors2D.forEach(r => backend.disposeIntermediateTensorInfo(r));\n const reshapedResult = reshape({ inputs: { x: result }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(result);\n return reshapedResult;\n}\n//# sourceMappingURL=Concat_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { concatImpl } from './Concat_impl';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n return concatImpl($inputs, $axis, backend);\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'webgl',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst COS = CHECK_NAN_SNIPPET_UNARY + `\n return cos(x);\n`;\nexport const cos = unaryKernelFunc(COS);\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'webgl',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\n// Without the equality check div produces 0.9999 for a = b, which when\n// floored can cause errors.\nconst DIV = `\nif (a == b) {\n return 1.0;\n};\nreturn a / b;`;\n// We do the same as in ./binaryop_gpu, with vec4 and ivec4.\n// On Linux, the vectorized implementation produces NaNs when a and b are 0.\nconst DIV_PACKED = `\n // vec4 one = vec4(equal(a, b));\n // return one + (vec4(1.0) - one) * a / b;\n vec4 result = a / b;\n if(a.x == b.x) {\n result.x = 1.;\n }\n if(a.y == b.y) {\n result.y = 1.;\n }\n if(a.z == b.z) {\n result.z = 1.;\n }\n if(a.w == b.w) {\n result.w = 1.;\n }\n\n return result;\n`;\nexport const div = binaryKernelFunc({ opSnippet: DIV, packedOpSnippet: DIV_PACKED, checkOutOfBounds: true });\nexport const divConfig = {\n kernelName: Div,\n backendName: 'webgl',\n kernelFunc: div,\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FFTProgram {\n constructor(component, inputShape, inverse) {\n this.variableNames = ['real', 'imag'];\n const innerDim = inputShape[1];\n this.outputShape = inputShape;\n const exponentMultiplierSnippet = inverse ? `2.0 * ${Math.PI}` : `-2.0 * ${Math.PI}`;\n const resultDenominator = inverse ? `${innerDim}.0` : '1.0';\n let opString;\n if (component === 'real') {\n opString = 'return real * expR - imag * expI;';\n }\n else if (component === 'imag') {\n opString = 'return real * expI + imag * expR;';\n }\n else {\n throw new Error(`FFT component must be either \"real\" or \"imag\", got ${component}.`);\n }\n this.userCode = `\n const float exponentMultiplier = ${exponentMultiplierSnippet};\n\n float unaryOpComplex(float real, float expR, float imag, float expI) {\n ${opString}\n }\n\n float mulMatDFT(int batch, int index) {\n float indexRatio = float(index) / float(${innerDim});\n float exponentMultiplierTimesIndexRatio =\n exponentMultiplier * indexRatio;\n\n float result = 0.0;\n\n for (int i = 0; i < ${innerDim}; i++) {\n // x = (-2|2 * PI / N) * index * i;\n float x = exponentMultiplierTimesIndexRatio * float(i);\n float expR = cos(x);\n float expI = sin(x);\n float real = getReal(batch, i);\n float imag = getImag(batch, i);\n\n result +=\n unaryOpComplex(real, expR, imag, expI) / ${resultDenominator};\n }\n\n return result;\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n setOutput(mulMatDFT(coords[0], coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=fft_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FFTProgram } from '../fft_gpu';\nimport { complex } from './Complex';\nimport { reshape } from './Reshape';\nexport function fftImpl(x, inverse, backend) {\n const xData = backend.texData.get(x.dataId);\n const inputSize = util.sizeFromShape(x.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = x.shape[x.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({ inputs: { x }, backend, attrs: { shape: [batch, innerDimensionSize] } });\n const xShape = input2D.shape;\n const realProgram = new FFTProgram('real', xShape, inverse);\n const imagProgram = new FFTProgram('imag', xShape, inverse);\n const inputs = [\n {\n dataId: xData.complexTensorInfos.real.dataId,\n dtype: xData.complexTensorInfos.real.dtype,\n shape: xShape\n },\n {\n dataId: xData.complexTensorInfos.imag.dataId,\n dtype: xData.complexTensorInfos.imag.dtype,\n shape: xShape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n const complexOutputReshaped = reshape({ inputs: { x: complexOutput }, backend, attrs: { shape: x.shape } });\n backend.disposeIntermediateTensorInfo(complexOutputReshaped);\n return complexOutputReshaped;\n}\n//# sourceMappingURL=FFT_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, false /* inverse */, backend);\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'webgl',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FlipLeftRightProgram {\n constructor(imageShape) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageWidth = imageShape[2];\n this.outputShape = imageShape;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n\n int coordX = ${imageWidth} - x;\n float outputValue;\n if(coordX >= 0 && coordX < ${imageWidth}) {\n outputValue = getImage(coords[0], coords[1], coordX, coords[3]);\n } else {\n outputValue = getImage(coords[0], coords[1], coords[2], coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=flip_left_right_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight } from '@tensorflow/tfjs-core';\nimport { FlipLeftRightProgram } from '../flip_left_right_gpu';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend }) => {\n const { image } = inputs;\n const webglBackend = backend;\n const program = new FlipLeftRightProgram(image.shape);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${width}.0, ${height}.0);\n\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n setOutput(floor(value * 255.0 + 0.5));\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n\n vec4 result = vec4(0.);\n\n for(int row=0; row<=1; row++) {\n for(int col=0; col<=1; col++) {\n texC = coords[1] + row;\n depth = coords[2] + col;\n\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n result[row * 2 + col] = floor(value * 255.0 + 0.5);\n }\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { FromPixels } from '@tensorflow/tfjs-core';\nimport { TextureUsage } from '../tex_util';\nimport { FromPixelsProgram } from './FromPixels_utils/from_pixels_gpu';\nimport { FromPixelsPackedProgram } from './FromPixels_utils/from_pixels_packed_gpu';\nexport const fromPixelsConfig = {\n kernelName: FromPixels,\n backendName: 'webgl',\n kernelFunc: fromPixels,\n};\nlet fromPixels2DContext;\nfunction fromPixels(args) {\n const { inputs, backend, attrs } = args;\n let { pixels } = inputs;\n const { numChannels } = attrs;\n const isVideo = typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement;\n const isImage = typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement;\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n const texShape = [height, width];\n const outShape = [height, width, numChannels];\n if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n pixels = fromPixels2DContext.canvas;\n }\n const tempPixelHandle = backend.makeTensorInfo(texShape, 'int32');\n // This is a byte texture with pixels.\n backend.texData.get(tempPixelHandle.dataId).usage = TextureUsage.PIXELS;\n backend.gpgpu.uploadPixelDataToTexture(backend.getTexture(tempPixelHandle.dataId), pixels);\n const program = env().getBool('WEBGL_PACK') ?\n new FromPixelsPackedProgram(outShape) :\n new FromPixelsProgram(outShape);\n const res = backend.runWebGLProgram(program, [tempPixelHandle], 'int32');\n backend.disposeData(tempPixelHandle.dataId);\n return res;\n}\n//# sourceMappingURL=FromPixels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, true /* inverse */, backend);\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'webgl',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class MeanProgram {\n constructor(reduceInfo, divisor) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `sumValue += dot(values, ones);`;\n if (divisor != null) {\n const denominator = 1 / divisor;\n updateSnippet = `sumValue += dot(values * ${util.isInt(denominator) ? denominator.toPrecision(2) :\n denominator}, ones);`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return 0.0;\n }\n `;\n }\n this.userCode = `\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(getValue(batch, inIdx), 0.0, 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1), 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2), 0.0);\n\n ${updateSnippet}\n }\n setOutput(sumValue);\n }\n `;\n }\n}\n//# sourceMappingURL=mean_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { MeanProgram } from '../mean_gpu';\nimport { ReduceProgram } from '../reduce_gpu';\n// Returns an array of configuration objects that describe each stage of the\n// reduction.\nfunction getReductionStages(inShape) {\n const stages = [];\n while (stages.length === 0 || stages[stages.length - 1].outSize !== 1) {\n const outSize = stages.length ? stages[stages.length - 1].outSize : inShape[1];\n const windowSize = backend_util.computeOptimalWindowSize(outSize);\n stages.push({\n inSize: outSize,\n windowSize,\n outSize: Math.ceil(outSize / windowSize)\n });\n }\n return stages;\n}\nexport function reduce(x, dtype, reductionType, backend) {\n const reductionStages = getReductionStages(x.shape);\n let result = x;\n for (let i = 0; i < reductionStages.length; i++) {\n const { inSize, windowSize, outSize } = reductionStages[i];\n let program;\n let previousResult;\n if (reductionType === 'mean') {\n program = i === 0 ?\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, inSize) :\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize });\n }\n else {\n program = new ReduceProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, reductionType);\n }\n previousResult = result;\n result = backend.runWebGLProgram(program, [result], dtype);\n if (previousResult.dataId !== x.dataId) {\n backend.disposeIntermediateTensorInfo(previousResult);\n }\n }\n return result;\n}\n//# sourceMappingURL=reduce.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function maxImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, x.dtype, 'max', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposeProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const switched = getSwitchedCoords(newDim);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${switched}));\n }\n `;\n }\n}\nfunction getSwitchedCoords(newDim) {\n const rank = newDim.length;\n if (rank > 6) {\n throw Error(`Transpose for rank ${rank} is not yet supported`);\n }\n const originalOrder = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u', 'resRC.v'];\n const switchedCoords = new Array(rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedCoords[newDim[i]] = originalOrder[i];\n }\n return switchedCoords.join();\n}\n//# sourceMappingURL=transpose_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getVecChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposePackedProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n if (this.rank > 6) {\n throw Error(`Packed transpose for rank ${this.rank} is not yet supported.`);\n }\n const dtype = getCoordsDataType(this.rank);\n const outputOrder = getVecChannels('rc', this.rank);\n const switchedOrder = new Array(this.rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedOrder[newDim[i]] = outputOrder[i];\n }\n const innerDims = `vec2(${switchedOrder.slice(-2).join()})`;\n const nextColumn = `++${outputOrder[this.rank - 1]} < ${outputShape[this.rank - 1]}`;\n const getc = `getChannel(getA(${switchedOrder.join()}), ${innerDims})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result[0] = ${getc};\n if(${nextColumn}) {\n result[1] = ${getc};\n }\n --${outputOrder[this.rank - 1]};\n if(++${outputOrder[this.rank - 2]} < ${outputShape[this.rank - 2]}) {\n result[2] = ${getc};\n if(${nextColumn}) {\n result[3] = ${getc};\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=transpose_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { transposeImplCPU } from '../kernel_utils/shared';\nimport { TransposeProgram } from '../transpose_gpu';\nimport { TransposePackedProgram } from '../transpose_packed_gpu';\nexport function transposeImpl(x, perm, backend) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new TransposePackedProgram(x.shape, perm) :\n new TransposeProgram(x.shape, perm);\n return backend.runWebGLProgram(program, [x], x.dtype);\n}\nexport { transposeImplCPU };\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxImplCPU } from '../kernel_utils/shared';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const maxInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n let maxInput = x;\n if (maxInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const maxInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n maxInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const maxInputData = webglBackend.texData.get(maxInput.dataId);\n maxInputData.values = maxInputValues;\n }\n else {\n maxInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(maxInput.shape, axes);\n let outShape = maxOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n }\n let out;\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const outValues = maxImplCPU(values, util.sizeFromShape(reduceShape), outShape, x.dtype);\n out = webglBackend.makeTensorInfo(outShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = maxImpl(maxInput, reduceShape, outShape, webglBackend);\n }\n if (maxInputIsTransposed) {\n webglBackend.disposeIntermediateTensorInfo(maxInput);\n }\n return out;\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const maxPoolProgram = new Pool2DProgram(convInfo, 'max', false);\n return backend.runWebGLProgram(maxPoolProgram, [x], x.dtype);\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'webgl',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { MaxPool2DBackpropProgram } from '../max_pool_backprop_gpu';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const getPositions = true;\n const maxPoolPositionsProgram = new Pool2DProgram(convInfo, 'max', getPositions);\n const maxPoolPositions = backend.runWebGLProgram(maxPoolPositionsProgram, [x], x.dtype);\n const maxPoolBackPropProgram = new MaxPool2DBackpropProgram(convInfo);\n const result = backend.runWebGLProgram(maxPoolBackPropProgram, [dy, maxPoolPositions], x.dtype);\n backend.disposeIntermediateTensorInfo(maxPoolPositions);\n return result;\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'webgl',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pool2DProgram } from '../pool_gpu';\nexport function maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, backend) {\n let program = new Pool2DProgram(convInfo, 'max', false);\n const poolOutput = backend.runWebGLProgram(program, [x], 'float32');\n program = new Pool2DProgram(convInfo, 'max', true, true, includeBatchInIndex);\n const indexOutput = backend.runWebGLProgram(program, [x], 'float32');\n return [poolOutput, indexOutput];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const webglBackend = backend;\n util.assert(x.shape.length === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x.shape.length}.`);\n const dilations = [1, 1];\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad);\n const [result, indexes] = maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, webglBackend);\n return [result, indexes];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function meanImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, 'float32', 'mean', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Mean_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Mean, util } from '@tensorflow/tfjs-core';\nimport { meanImpl } from './Mean_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const meanConfig = {\n kernelName: Mean,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { keepDims, axis } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(axis, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const meanInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n const intermediates = [];\n let meanInput = x;\n if (meanInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(meanInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const meanInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n meanInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const meanInputData = webglBackend.texData.get(meanInput.dataId);\n meanInputData.values = meanInputValues;\n }\n else {\n meanInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n intermediates.push(meanInput);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('sum', axes, xRank);\n const [meanOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(meanInput.shape, axes);\n let outShape = meanOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(meanOutShape, origAxes);\n }\n const out = meanImpl(meanInput, reduceShape, outShape, webglBackend);\n for (const i of intermediates) {\n webglBackend.disposeIntermediateTensorInfo(i);\n }\n return out;\n }\n};\n//# sourceMappingURL=Mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class MirrorPadProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n const offset = mode === 'reflect' ? 0 : 1;\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start) {\n outC = start * 2 - outC - ${offset};\n } else if(outC >= end) {\n outC = (end - 1) * 2 - outC + ${offset};\n }\n setOutput(getX(outC - start));\n }\n `;\n return;\n }\n this.userCode = `\n ${dtype} start = ${dtype}(${start});\n ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outC = getOutputCoords();\n for (int i = 0; i < ${rank}; i++) {\n if (outC[i] < start[i]) {\n outC[i] = start[i] * 2 - outC[i] - ${offset};\n } else if(outC[i] >= end[i]) {\n outC[i] = (end[i] - 1) * 2 - outC[i] + ${offset};\n }\n }\n ${dtype} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\n/**\n * Example shader code for\n * `mirrorPad(tf.tensor1d([1, 2, 3], 'int32'), [[2, 2]], 'reflect')`\n * ```\n * const int start = int(2);\n * const int end = int(5);\n *\n * void main() {\n * int outputLoc = getOutputCoords();\n * vec4 result = vec4(0.);\n *\n * int rc = outputLoc;\n *\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[0] = getChannel(getX(source), source);\n * rc += 1;\n * if(rc < 6) {\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[1] = getChannel(getX(source), source);\n * }\n *\n * setOutput(result);\n * }\n * ```\n */\nexport class MirrorPadPackedProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const offset = mode === 'reflect' ? 0 : 1;\n let mainLoop = '';\n if (rank === 1) {\n const padSetup = `\n ${dtype} source = rc;\n if (source < start) {\n source = start * 2 - source - ${offset};\n } else if (source >= end) {\n source = (end - 1) * 2 - source + ${offset};\n }\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n else {\n const padSetup = `\n ${dtype} source = rc;\n ${dtype} lt = ${dtype}(lessThan(source, start));\n ${dtype} gte = ${dtype}(greaterThanEqual(source, end));\n ${dtype} orig = 1 - (lt + gte);\n source = orig * source +\n lt * (start * 2 - source - ${offset}) +\n gte * ((end - 1) * 2 - source + ${offset});\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {\n ${padSetup}\n result[2] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[3] = getChannel(getX(${source.join()}), ${innerDims});\n }\n }\n `;\n }\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, MirrorPad } from '@tensorflow/tfjs-core';\nimport { MirrorPadProgram } from '../mirror_pad_gpu';\nimport { MirrorPadPackedProgram } from '../mirror_pad_packed_gpu';\nexport const mirrorPadKernelFunc = ({ inputs, backend, attrs }) => {\n const { x } = inputs;\n const { paddings, mode } = attrs;\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new MirrorPadPackedProgram(x.shape, paddings, mode) :\n new MirrorPadProgram(x.shape, paddings, mode);\n const output = backend.runWebGLProgram(program, [x], x.dtype);\n return output;\n};\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'webgl',\n kernelFunc: mirrorPadKernelFunc,\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\n// (Ar + Ai)(Br + Bi) =\n// ArBr + ArBi + AiBr + AiBi = ArBr - AB + ArBi + AiBr\n// Yr = ArBr - AB\n// Yi = ArBi + AiBr\nexport const COMPLEX_MULTIPLY = {\n REAL: 'return areal * breal - aimag * bimag;',\n IMAG: 'return areal * bimag + aimag * breal;'\n};\nexport class BinaryOpComplexProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['AReal', 'AImag', 'BReal', 'BImag'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOpComplex(\n float areal, float aimag, float breal, float bimag) {\n ${op}\n }\n\n void main() {\n float areal = getARealAtOutCoords();\n float aimag = getAImagAtOutCoords();\n float breal = getBRealAtOutCoords();\n float bimag = getBImagAtOutCoords();\n setOutput(binaryOpComplex(areal, aimag, breal, bimag));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_complex_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, Multiply } from '@tensorflow/tfjs-core';\nimport * as binaryop_complex_gpu from '../binaryop_complex_gpu';\nimport { BinaryOpComplexProgram } from '../binaryop_complex_gpu';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { multiplyImplCPU as cpuMultiply } from '../kernel_utils/shared';\nimport { complex } from './Complex';\nconst MUL = 'return a * b;';\nexport function multiply(args) {\n const { inputs, backend } = args;\n const { a, b } = inputs;\n const dtype = backend_util.upcastType(a.dtype, b.dtype);\n if (a.dtype === 'complex64') {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const realProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.REAL, a.shape, b.shape);\n const imagProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.IMAG, a.shape, b.shape);\n const inputs = [\n {\n dataId: aData.complexTensorInfos.real.dataId,\n dtype: aData.complexTensorInfos.real.dtype,\n shape: a.shape\n },\n {\n dataId: aData.complexTensorInfos.imag.dataId,\n dtype: aData.complexTensorInfos.imag.dtype,\n shape: a.shape\n },\n {\n dataId: bData.complexTensorInfos.real.dataId,\n dtype: bData.complexTensorInfos.real.dtype,\n shape: b.shape\n },\n {\n dataId: bData.complexTensorInfos.imag.dataId,\n dtype: bData.complexTensorInfos.imag.dtype,\n shape: b.shape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n // TODO(annxingyuan): CPU forwarding for complex inputs.\n return complexOutput;\n }\n if (backend.shouldExecuteOnCPU([a, b])) {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const [outValues, outShape] = cpuMultiply(a.shape, b.shape, aData.values, bData.values, dtype);\n const out = backend.makeTensorInfo(outShape, dtype);\n const outData = backend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n let program;\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n program = new BinaryOpPackedProgram(MUL, a.shape, b.shape);\n }\n else {\n program = new BinaryOpProgram(MUL, a.shape, b.shape);\n }\n return backend.runWebGLProgram(program, [a, b], dtype);\n}\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'webgl',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV3 } from '@tensorflow/tfjs-core';\nexport const nonMaxSuppressionV3Config = {\n kernelName: NonMaxSuppressionV3,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n return kernel_impls.nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal);\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV3.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls } from '@tensorflow/tfjs-core';\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class RotateProgram {\n constructor(imageShape, radians, fillValue, center) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageHeight = imageShape[1];\n const imageWidth = imageShape[2];\n const sinFactor = Math.sin(radians).toFixed(3);\n const cosFactor = Math.cos(radians).toFixed(3);\n this.outputShape = imageShape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const centerXString = centerX.toFixed(3);\n const centerYString = centerY.toFixed(3);\n let fillSnippet = '';\n if (typeof fillValue === 'number') {\n fillSnippet = `float outputValue = ${fillValue.toFixed(2)};`;\n }\n else {\n fillSnippet = `\n vec3 fill = vec3(${fillValue.join(',')});\n float outputValue = fill[coords[3]];`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n int y = coords[1];\n float coordXFloat = (float(x) - ${centerXString}) * ${cosFactor} - (float(y) - ${centerYString}) * ${sinFactor};\n float coordYFloat = (float(x) - ${centerXString}) * ${sinFactor} + (float(y) - ${centerYString}) * ${cosFactor};\n int coordX = int(round(coordXFloat + ${centerXString}));\n int coordY = int(round(coordYFloat + ${centerYString}));\n ${fillSnippet}\n if(coordX >= 0 && coordX < ${imageWidth} && coordY >= 0 && coordY < ${imageHeight}) {\n outputValue = getImage(coords[0], coordY, coordX, coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=rotate_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { RotateWithOffset } from '@tensorflow/tfjs-core';\nimport { RotateProgram } from '../rotate_gpu';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const webglBackend = backend;\n const program = new RotateProgram(image.shape, radians, fillValue, center);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SIN = CHECK_NAN_SNIPPET_UNARY + `\n return sin(x);\n`;\nexport const sin = unaryKernelFunc(SIN);\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'webgl',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARE = `return x * x;`;\nexport const square = unaryKernelFunc(SQUARE);\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'webgl',\n kernelFunc: square,\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const squaredDifference = binaryKernelFunc({ opSnippet: SQUARED_DIFFERENCE, packedOpSnippet: SQUARED_DIFFERENCE });\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'webgl',\n kernelFunc: squaredDifference,\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { subImplCPU as cpuSub } from '../kernel_utils/shared';\nconst SUB = 'return a - b;';\nexport const subKernelFunc = binaryKernelFunc({\n opSnippet: SUB,\n packedOpSnippet: SUB,\n supportsComplex: true,\n cpuKernelImpl: cpuSub\n});\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'webgl',\n kernelFunc: subKernelFunc\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst TAN = `return tan(x);`;\nexport const tan = unaryKernelFunc(TAN);\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'webgl',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { transposeImpl } from './Transpose_impl';\nimport { transposeImplCPU as cpuTranspose } from './Transpose_impl';\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { perm } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n let out;\n if (webglBackend.shouldExecuteOnCPU([x])) {\n const xTexData = webglBackend.texData.get(x.dataId);\n const values = xTexData.values;\n const outValues = cpuTranspose(values, x.shape, x.dtype, perm, newShape);\n out = webglBackend.makeTensorInfo(newShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = transposeImpl(x, perm, webglBackend);\n }\n return out;\n }\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { uniqueImplCPU } from '../kernel_utils/shared';\nimport { assertNotComplex } from '../webgl_util';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n // For now, always forward calculation to the CPU backend.\n console.warn('WARNING: ', 'UI might be locked temporarily as data is being downloaded');\n const values = backend.readSync(x.dataId);\n const { outputValues, outputShape, indices } = uniqueImplCPU(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'webgl',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { addConfig } from './kernels/Add';\nimport { atan2Config } from './kernels/Atan2';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { cosConfig } from './kernels/Cos';\nimport { divConfig } from './kernels/Div';\nimport { fftConfig } from './kernels/FFT';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { fromPixelsConfig } from './kernels/FromPixels';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { meanConfig } from './kernels/Mean';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV3Config } from './kernels/NonMaxSuppressionV3';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { realConfig } from './kernels/Real';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { sinConfig } from './kernels/Sin';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n addConfig,\n atan2Config,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchNormConfig,\n castConfig,\n complexConfig,\n concatConfig,\n cosConfig,\n divConfig,\n fftConfig,\n flipLeftRightConfig,\n fromPixelsConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n maxConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n meanConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV3Config,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n realConfig,\n reshapeConfig,\n rotateWithOffsetConfig,\n sinConfig,\n squareConfig,\n subConfig,\n squaredDifferenceConfig,\n tanConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport {version};\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport * from '@tensorflow/tfjs-core';\nexport * from '@tensorflow/tfjs-layers';\nexport * from '@tensorflow/tfjs-converter';\n\n// Export data api as tf.data\nimport * as data from '@tensorflow/tfjs-data';\nexport {data};\n\n// Import and register backends.\nimport '@tensorflow/tfjs-backend-cpu';\nimport '@tensorflow/tfjs-backend-webgl';\n\n// Import versions of all sub-packages.\nimport {version_core} from '@tensorflow/tfjs-core';\nimport {version_cpu} from '@tensorflow/tfjs-backend-cpu';\nimport {version_webgl} from '@tensorflow/tfjs-backend-webgl';\nimport {version_data} from '@tensorflow/tfjs-data';\nimport {version_layers} from '@tensorflow/tfjs-layers';\nimport {version_converter} from '@tensorflow/tfjs-converter';\nimport {version as version_union} from './version';\n\nexport const version = {\n 'tfjs-core': version_core,\n 'tfjs-backend-cpu': version_cpu,\n 'tfjs-backend-webgl': version_webgl,\n 'tfjs-data': version_data,\n 'tfjs-layers': version_layers,\n 'tfjs-converter': version_converter,\n 'tfjs': version_union\n};\n", "", "", "", "\nvar WasmBackendModuleThreadedSimd = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModuleThreadedSimd) {\n WasmBackendModuleThreadedSimd = WasmBackendModuleThreadedSimd || {};\n\nfunction GROWABLE_HEAP_I8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP8}function GROWABLE_HEAP_U8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU8}function GROWABLE_HEAP_I32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP32}function GROWABLE_HEAP_U32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU32}function GROWABLE_HEAP_F64(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPF64}var Module=typeof WasmBackendModuleThreadedSimd!==\"undefined\"?WasmBackendModuleThreadedSimd:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var ENVIRONMENT_IS_PTHREAD=Module[\"ENVIRONMENT_IS_PTHREAD\"]||false;if(ENVIRONMENT_IS_PTHREAD){buffer=Module[\"buffer\"];DYNAMIC_BASE=Module[\"DYNAMIC_BASE\"];DYNAMICTOP_PTR=Module[\"DYNAMICTOP_PTR\"]}var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"};var nodeWorkerThreads;try{nodeWorkerThreads=require(\"worker_threads\")}catch(e){console.error('The \"worker_threads\" module is not supported in this node.js build - perhaps a newer version is needed?');throw e}Worker=nodeWorkerThreads.Worker}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}if(ENVIRONMENT_IS_NODE){read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret}}else{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}if(ENVIRONMENT_IS_NODE){if(typeof performance===\"undefined\"){performance=require(\"perf_hooks\").performance}}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var Atomics_load=Atomics.load;var Atomics_store=Atomics.store;var Atomics_compareExchange=Atomics.compareExchange;var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":165,\"maximum\":165+0,\"element\":\"anyfunc\"});var wasmModule;var threadInfoStruct=0;var selfThreadId=0;var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx)){var u0=heap[idx++];if(!u0)return str;if(!(u0&128)){str+=String.fromCharCode(u0);continue}var u1=heap[idx++]&63;if((u0&224)==192){str+=String.fromCharCode((u0&31)<<6|u1);continue}var u2=heap[idx++]&63;if((u0&240)==224){u0=(u0&15)<<12|u1<<6|u2}else{u0=(u0&7)<<18|u1<<12|u2<<6|heap[idx++]&63}if(u0<65536){str+=String.fromCharCode(u0)}else{var ch=u0-65536;str+=String.fromCharCode(55296|ch>>10,56320|ch&1023)}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(GROWABLE_HEAP_U8(),ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,GROWABLE_HEAP_U8(),outPtr,maxBytesToWrite)}function lengthBytesUTF8(str){var len=0;for(var i=0;i=55296&&u<=57343)u=65536+((u&1023)<<10)|str.charCodeAt(++i)&1023;if(u<=127)++len;else if(u<=2047)len+=2;else if(u<=65535)len+=3;else len+=4}return len}function writeArrayToMemory(array,buffer){GROWABLE_HEAP_I8().set(array,buffer)}var WASM_PAGE_SIZE=65536;function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var STACK_BASE=5256384,STACKTOP=STACK_BASE,STACK_MAX=13504,DYNAMIC_BASE=5256384,DYNAMICTOP_PTR=12576;if(ENVIRONMENT_IS_PTHREAD){}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;if(ENVIRONMENT_IS_PTHREAD){wasmMemory=Module[\"wasmMemory\"];buffer=Module[\"buffer\"]}else{if(Module[\"wasmMemory\"]){wasmMemory=Module[\"wasmMemory\"]}else{wasmMemory=new WebAssembly.Memory({\"initial\":INITIAL_INITIAL_MEMORY/WASM_PAGE_SIZE,\"maximum\":2147483648/WASM_PAGE_SIZE,\"shared\":true});if(!(wasmMemory.buffer instanceof SharedArrayBuffer)){err(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\");if(ENVIRONMENT_IS_NODE){console.log(\"(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and also use a recent version)\")}throw Error(\"bad memory\")}}}if(wasmMemory){buffer=wasmMemory.buffer}INITIAL_INITIAL_MEMORY=buffer.byteLength;updateGlobalBufferAndViews(buffer);if(!ENVIRONMENT_IS_PTHREAD){GROWABLE_HEAP_I32()[DYNAMICTOP_PTR>>2]=DYNAMIC_BASE}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATEXIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;if(ENVIRONMENT_IS_PTHREAD)runtimeInitialized=true;function preRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){if(ENVIRONMENT_IS_PTHREAD)return;callRuntimeCallbacks(__ATMAIN__)}function postRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){assert(!ENVIRONMENT_IS_PTHREAD,\"addRunDependency cannot be used in a pthread worker\");runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}if(ENVIRONMENT_IS_PTHREAD)console.error(\"Pthread aborting at \"+(new Error).stack);what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm-threaded-simd.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"a\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmModule=module;if(!ENVIRONMENT_IS_PTHREAD){var numWorkersToLoad=PThread.unusedWorkers.length;PThread.unusedWorkers.forEach(function(w){PThread.loadWasmModuleToWorker(w,function(){if(!--numWorkersToLoad)removeRunDependency(\"wasm-instantiate\")})})}}if(!ENVIRONMENT_IS_PTHREAD){addRunDependency(\"wasm-instantiate\")}function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"],output[\"module\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}var ASM_CONSTS={};function initPthreadsJS(){PThread.initRuntime()}if(!ENVIRONMENT_IS_PTHREAD)__ATINIT__.push({func:function(){___wasm_call_ctors()}});var __pthread_ptr=0;var __pthread_is_main_runtime_thread=0;var __pthread_is_main_browser_thread=0;function __register_pthread_ptr(pthreadPtr,isMainBrowserThread,isMainRuntimeThread){pthreadPtr=pthreadPtr|0;isMainBrowserThread=isMainBrowserThread|0;isMainRuntimeThread=isMainRuntimeThread|0;__pthread_ptr=pthreadPtr;__pthread_is_main_browser_thread=isMainBrowserThread;__pthread_is_main_runtime_thread=isMainRuntimeThread}Module[\"__register_pthread_ptr\"]=__register_pthread_ptr;var ERRNO_CODES={EPERM:63,ENOENT:44,ESRCH:71,EINTR:27,EIO:29,ENXIO:60,E2BIG:1,ENOEXEC:45,EBADF:8,ECHILD:12,EAGAIN:6,EWOULDBLOCK:6,ENOMEM:48,EACCES:2,EFAULT:21,ENOTBLK:105,EBUSY:10,EEXIST:20,EXDEV:75,ENODEV:43,ENOTDIR:54,EISDIR:31,EINVAL:28,ENFILE:41,EMFILE:33,ENOTTY:59,ETXTBSY:74,EFBIG:22,ENOSPC:51,ESPIPE:70,EROFS:69,EMLINK:34,EPIPE:64,EDOM:18,ERANGE:68,ENOMSG:49,EIDRM:24,ECHRNG:106,EL2NSYNC:156,EL3HLT:107,EL3RST:108,ELNRNG:109,EUNATCH:110,ENOCSI:111,EL2HLT:112,EDEADLK:16,ENOLCK:46,EBADE:113,EBADR:114,EXFULL:115,ENOANO:104,EBADRQC:103,EBADSLT:102,EDEADLOCK:16,EBFONT:101,ENOSTR:100,ENODATA:116,ETIME:117,ENOSR:118,ENONET:119,ENOPKG:120,EREMOTE:121,ENOLINK:47,EADV:122,ESRMNT:123,ECOMM:124,EPROTO:65,EMULTIHOP:36,EDOTDOT:125,EBADMSG:9,ENOTUNIQ:126,EBADFD:127,EREMCHG:128,ELIBACC:129,ELIBBAD:130,ELIBSCN:131,ELIBMAX:132,ELIBEXEC:133,ENOSYS:52,ENOTEMPTY:55,ENAMETOOLONG:37,ELOOP:32,EOPNOTSUPP:138,EPFNOSUPPORT:139,ECONNRESET:15,ENOBUFS:42,EAFNOSUPPORT:5,EPROTOTYPE:67,ENOTSOCK:57,ENOPROTOOPT:50,ESHUTDOWN:140,ECONNREFUSED:14,EADDRINUSE:3,ECONNABORTED:13,ENETUNREACH:40,ENETDOWN:38,ETIMEDOUT:73,EHOSTDOWN:142,EHOSTUNREACH:23,EINPROGRESS:26,EALREADY:7,EDESTADDRREQ:17,EMSGSIZE:35,EPROTONOSUPPORT:66,ESOCKTNOSUPPORT:137,EADDRNOTAVAIL:4,ENETRESET:39,EISCONN:30,ENOTCONN:53,ETOOMANYREFS:141,EUSERS:136,EDQUOT:19,ESTALE:72,ENOTSUP:138,ENOMEDIUM:148,EILSEQ:25,EOVERFLOW:61,ECANCELED:11,ENOTRECOVERABLE:56,EOWNERDEAD:62,ESTRPIPE:135};var __main_thread_futex_wait_address=13488;function _emscripten_futex_wake(addr,count){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0||count<0)return-28;if(count==0)return 0;if(count>=2147483647)count=Infinity;var mainThreadWaitAddress=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2);var mainThreadWoken=0;if(mainThreadWaitAddress==addr){var loadedAddr=Atomics.compareExchange(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,mainThreadWaitAddress,0);if(loadedAddr==mainThreadWaitAddress){--count;mainThreadWoken=1;if(count<=0)return 1}}var ret=Atomics.notify(GROWABLE_HEAP_I32(),addr>>2,count);if(ret>=0)return ret+mainThreadWoken;throw\"Atomics.notify returned an unexpected value \"+ret}Module[\"_emscripten_futex_wake\"]=_emscripten_futex_wake;function __kill_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _kill_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _kill_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];pthread.worker.terminate();PThread.freeThreadData(pthread);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(pthread.worker),1);pthread.worker.pthread=undefined}function __cancel_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cancel_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cancel_thread!\";var pthread=PThread.pthreads[pthread_ptr];pthread.worker.postMessage({\"cmd\":\"cancel\"})}function __cleanup_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cleanup_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cleanup_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];if(pthread){var worker=pthread.worker;PThread.returnWorkerToPool(worker)}}var PThread={MAIN_THREAD_ID:1,mainThreadInfo:{schedPolicy:0,schedPrio:0},unusedWorkers:[],runningWorkers:[],initRuntime:function(){__register_pthread_ptr(PThread.mainThreadBlock,!ENVIRONMENT_IS_WORKER,1);_emscripten_register_main_browser_thread_id(PThread.mainThreadBlock)},initMainThreadBlock:function(){var pthreadPoolSize=8;for(var i=0;i>2]=PThread.mainThreadBlock;var headPtr=PThread.mainThreadBlock+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var tlsMemory=12976;for(var i=0;i<128;++i)GROWABLE_HEAP_U32()[tlsMemory/4+i]=0;Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+104>>2,tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+40>>2,PThread.mainThreadBlock);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+44>>2,42)},initWorker:function(){},pthreads:{},exitHandlers:null,setThreadStatus:function(){},runExitHandlers:function(){if(PThread.exitHandlers!==null){while(PThread.exitHandlers.length>0){PThread.exitHandlers.pop()()}PThread.exitHandlers=null}if(ENVIRONMENT_IS_PTHREAD&&threadInfoStruct)___pthread_tsd_run_dtors()},threadExit:function(exitCode){var tb=_pthread_self();if(tb){Atomics.store(GROWABLE_HEAP_U32(),tb+4>>2,exitCode);Atomics.store(GROWABLE_HEAP_U32(),tb+0>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+60>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+64>>2,0);PThread.runExitHandlers();_emscripten_futex_wake(tb+0,2147483647);__register_pthread_ptr(0,0,0);threadInfoStruct=0;if(ENVIRONMENT_IS_PTHREAD){postMessage({\"cmd\":\"exit\"})}}},threadCancel:function(){PThread.runExitHandlers();Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+4>>2,-1);Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+0>>2,1);_emscripten_futex_wake(threadInfoStruct+0,2147483647);threadInfoStruct=selfThreadId=0;__register_pthread_ptr(0,0,0);postMessage({\"cmd\":\"cancelDone\"})},terminateAllThreads:function(){for(var t in PThread.pthreads){var pthread=PThread.pthreads[t];if(pthread&&pthread.worker){PThread.returnWorkerToPool(pthread.worker)}}PThread.pthreads={};for(var i=0;i>2];GROWABLE_HEAP_I32()[pthread.threadInfoStruct+104>>2]=0;_free(tlsMemory);_free(pthread.threadInfoStruct)}pthread.threadInfoStruct=0;if(pthread.allocatedOwnStack&&pthread.stackBase)_free(pthread.stackBase);pthread.stackBase=0;if(pthread.worker)pthread.worker.pthread=null},returnWorkerToPool:function(worker){delete PThread.pthreads[worker.pthread.thread];PThread.unusedWorkers.push(worker);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(worker),1);PThread.freeThreadData(worker.pthread);worker.pthread=undefined},receiveObjectTransfer:function(data){},loadWasmModuleToWorker:function(worker,onFinishedLoading){worker.onmessage=function(e){var d=e[\"data\"];var cmd=d[\"cmd\"];if(worker.pthread)PThread.currentProxiedOperationCallerThread=worker.pthread.threadInfoStruct;if(d[\"targetThread\"]&&d[\"targetThread\"]!=_pthread_self()){var thread=PThread.pthreads[d.targetThread];if(thread){thread.worker.postMessage(e.data,d[\"transferList\"])}else{console.error('Internal error! Worker sent a message \"'+cmd+'\" to target pthread '+d[\"targetThread\"]+\", but that thread no longer exists!\")}PThread.currentProxiedOperationCallerThread=undefined;return}if(cmd===\"processQueuedMainThreadWork\"){_emscripten_main_thread_process_queued_calls()}else if(cmd===\"spawnThread\"){__spawn_thread(e.data)}else if(cmd===\"cleanupThread\"){__cleanup_thread(d[\"thread\"])}else if(cmd===\"killThread\"){__kill_thread(d[\"thread\"])}else if(cmd===\"cancelThread\"){__cancel_thread(d[\"thread\"])}else if(cmd===\"loaded\"){worker.loaded=true;if(onFinishedLoading)onFinishedLoading(worker);if(worker.runPthread){worker.runPthread();delete worker.runPthread}}else if(cmd===\"print\"){out(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"printErr\"){err(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"alert\"){alert(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"exit\"){var detached=worker.pthread&&Atomics.load(GROWABLE_HEAP_U32(),worker.pthread.thread+68>>2);if(detached){PThread.returnWorkerToPool(worker)}}else if(cmd===\"cancelDone\"){PThread.returnWorkerToPool(worker)}else if(cmd===\"objectTransfer\"){PThread.receiveObjectTransfer(e.data)}else if(e.data.target===\"setimmediate\"){worker.postMessage(e.data)}else{err(\"worker sent an unknown command \"+cmd)}PThread.currentProxiedOperationCallerThread=undefined};worker.onerror=function(e){err(\"pthread sent an error! \"+e.filename+\":\"+e.lineno+\": \"+e.message)};if(ENVIRONMENT_IS_NODE){worker.on(\"message\",function(data){worker.onmessage({data:data})});worker.on(\"error\",function(data){worker.onerror(data)});worker.on(\"exit\",function(data){console.log(\"worker exited - TODO: update the worker queue?\")})}worker.postMessage({\"cmd\":\"load\",\"urlOrBlob\":Module[\"mainScriptUrlOrBlob\"]||_scriptDir,\"wasmMemory\":wasmMemory,\"wasmModule\":wasmModule,\"DYNAMIC_BASE\":DYNAMIC_BASE,\"DYNAMICTOP_PTR\":DYNAMICTOP_PTR})},allocateUnusedWorker:function(){var pthreadMainJs=locateFile(\"tfjs-backend-wasm-threaded-simd.worker.js\");PThread.unusedWorkers.push(new Worker(pthreadMainJs))},getNewWorker:function(){if(PThread.unusedWorkers.length==0){PThread.allocateUnusedWorker();PThread.loadWasmModuleToWorker(PThread.unusedWorkers[0])}if(PThread.unusedWorkers.length>0)return PThread.unusedWorkers.pop();else return null},busySpinWait:function(msecs){var t=performance.now()+msecs;while(performance.now()>2]=value;return value}function _atexit(func,arg){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(1,1,func,arg);__ATEXIT__.unshift({func:func,arg:arg})}function __emscripten_notify_thread_queue(targetThreadId,mainThreadId){if(targetThreadId==mainThreadId){postMessage({\"cmd\":\"processQueuedMainThreadWork\"})}else if(ENVIRONMENT_IS_PTHREAD){postMessage({\"targetThread\":targetThreadId,\"cmd\":\"processThreadQueue\"})}else{var pthread=PThread.pthreads[targetThreadId];var worker=pthread&&pthread.worker;if(!worker){return}worker.postMessage({\"cmd\":\"processThreadQueue\"})}return 1}function _abort(){abort()}function _emscripten_conditional_set_current_thread_status(expectedStatus,newStatus){expectedStatus=expectedStatus|0;newStatus=newStatus|0}function _emscripten_futex_wait(addr,val,timeout){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0)return-28;if(ENVIRONMENT_IS_WORKER){var ret=Atomics.wait(GROWABLE_HEAP_I32(),addr>>2,val,timeout);if(ret===\"timed-out\")return-73;if(ret===\"not-equal\")return-6;if(ret===\"ok\")return 0;throw\"Atomics.wait returned an unexpected value \"+ret}else{var loadedVal=Atomics.load(GROWABLE_HEAP_I32(),addr>>2);if(val!=loadedVal)return-6;var tNow=performance.now();var tEnd=tNow+timeout;Atomics.store(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,addr);var ourWaitAddress=addr;while(addr==ourWaitAddress){tNow=performance.now();if(tNow>tEnd){return-73}_emscripten_main_thread_process_queued_calls();addr=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2)}return 0}}function _emscripten_is_main_browser_thread(){return __pthread_is_main_browser_thread|0}function _emscripten_is_main_runtime_thread(){return __pthread_is_main_runtime_thread|0}function _emscripten_memcpy_big(dest,src,num){GROWABLE_HEAP_U8().copyWithin(dest,src,src+num)}function _emscripten_num_logical_cores(){return navigator[\"hardwareConcurrency\"]}function _emscripten_proxy_to_main_thread_js(index,sync){var numCallArgs=arguments.length-2;var stack=stackSave();var args=stackAlloc(numCallArgs*8);var b=args>>3;for(var i=0;i>3]);buf+=8}else{buf=buf+3&~3;args.push(GROWABLE_HEAP_I32()[buf>>2]);buf+=4}}return args}function _emscripten_receive_on_main_thread_js(index,numCallArgs,args){_emscripten_receive_on_main_thread_js_callArgs.length=numCallArgs;var b=args>>3;for(var i=0;i>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){requestedSize=requestedSize>>>0;var oldSize=_emscripten_get_heap_size();if(requestedSize<=oldSize){return false}var PAGE_MULTIPLE=65536;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}var minHeapSize=16777216;for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(minHeapSize,requestedSize,overGrownHeapSize),PAGE_MULTIPLE));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}var JSEvents={keyEvent:0,mouseEvent:0,wheelEvent:0,uiEvent:0,focusEvent:0,deviceOrientationEvent:0,deviceMotionEvent:0,fullscreenChangeEvent:0,pointerlockChangeEvent:0,visibilityChangeEvent:0,touchEvent:0,previousFullscreenElement:null,previousScreenX:null,previousScreenY:null,removeEventListenersRegistered:false,removeAllEventListeners:function(){for(var i=JSEvents.eventHandlers.length-1;i>=0;--i){JSEvents._removeHandler(i)}JSEvents.eventHandlers=[];JSEvents.deferredCalls=[]},registerRemoveEventListeners:function(){if(!JSEvents.removeEventListenersRegistered){__ATEXIT__.push(JSEvents.removeAllEventListeners);JSEvents.removeEventListenersRegistered=true}},deferredCalls:[],deferCall:function(targetFunction,precedence,argsList){function arraysHaveEqualContent(arrA,arrB){if(arrA.length!=arrB.length)return false;for(var i in arrA){if(arrA[i]!=arrB[i])return false}return true}for(var i in JSEvents.deferredCalls){var call=JSEvents.deferredCalls[i];if(call.targetFunction==targetFunction&&arraysHaveEqualContent(call.argsList,argsList)){return}}JSEvents.deferredCalls.push({targetFunction:targetFunction,precedence:precedence,argsList:argsList});JSEvents.deferredCalls.sort(function(x,y){return x.precedence>2]=eventTypeId;GROWABLE_HEAP_I32()[varargs+4>>2]=eventData;GROWABLE_HEAP_I32()[varargs+8>>2]=userData;_emscripten_async_queue_on_thread_(targetThread,637534208,eventHandlerFunc,eventData,varargs);stackRestore(stackTop)},getTargetThreadForEventCallback:function(targetThread){switch(targetThread){case 1:return 0;case 2:return PThread.currentProxiedOperationCallerThread;default:return targetThread}},getNodeNameForTarget:function(target){if(!target)return\"\";if(target==window)return\"#window\";if(target==screen)return\"#screen\";return target&&target.nodeName?target.nodeName:\"\"},fullscreenEnabled:function(){return document.fullscreenEnabled||document.webkitFullscreenEnabled}};function stringToNewUTF8(jsString){var length=lengthBytesUTF8(jsString)+1;var cString=_malloc(length);stringToUTF8(jsString,cString,length);return cString}function _emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height){var stackTop=stackSave();var varargs=stackAlloc(12);var targetCanvasPtr=0;if(targetCanvas){targetCanvasPtr=stringToNewUTF8(targetCanvas)}GROWABLE_HEAP_I32()[varargs>>2]=targetCanvasPtr;GROWABLE_HEAP_I32()[varargs+4>>2]=width;GROWABLE_HEAP_I32()[varargs+8>>2]=height;_emscripten_async_queue_on_thread_(targetThread,657457152,0,targetCanvasPtr,varargs);stackRestore(stackTop)}function _emscripten_set_offscreencanvas_size_on_target_thread(targetThread,targetCanvas,width,height){targetCanvas=targetCanvas?UTF8ToString(targetCanvas):\"\";_emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height)}function __maybeCStringToJsString(cString){return cString>2?UTF8ToString(cString):cString}var specialHTMLTargets=[0,typeof document!==\"undefined\"?document:0,typeof window!==\"undefined\"?window:0];function __findEventTarget(target){target=__maybeCStringToJsString(target);var domElement=specialHTMLTargets[target]||(typeof document!==\"undefined\"?document.querySelector(target):undefined);return domElement}function __findCanvasEventTarget(target){return __findEventTarget(target)}function _emscripten_set_canvas_element_size_calling_thread(target,width,height){var canvas=__findCanvasEventTarget(target);if(!canvas)return-4;if(canvas.canvasSharedPtr){GROWABLE_HEAP_I32()[canvas.canvasSharedPtr>>2]=width;GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+4>>2]=height}if(canvas.offscreenCanvas||!canvas.controlTransferredOffscreen){if(canvas.offscreenCanvas)canvas=canvas.offscreenCanvas;var autoResizeViewport=false;if(canvas.GLctxObject&&canvas.GLctxObject.GLctx){var prevViewport=canvas.GLctxObject.GLctx.getParameter(2978);autoResizeViewport=prevViewport[0]===0&&prevViewport[1]===0&&prevViewport[2]===canvas.width&&prevViewport[3]===canvas.height}canvas.width=width;canvas.height=height;if(autoResizeViewport){canvas.GLctxObject.GLctx.viewport(0,0,width,height)}}else if(canvas.canvasSharedPtr){var targetThread=GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+8>>2];_emscripten_set_offscreencanvas_size_on_target_thread(targetThread,target,width,height);return 1}else{return-4}return 0}function _emscripten_set_canvas_element_size_main_thread(target,width,height){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(2,1,target,width,height);return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}function _emscripten_set_canvas_element_size(target,width,height){var canvas=__findCanvasEventTarget(target);if(canvas){return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}else{return _emscripten_set_canvas_element_size_main_thread(target,width,height)}}function _emscripten_set_current_thread_status(newStatus){newStatus=newStatus|0}function _emscripten_set_thread_name(threadId,name){threadId=threadId|0;name=name|0}function __webgl_enable_ANGLE_instanced_arrays(ctx){var ext=ctx.getExtension(\"ANGLE_instanced_arrays\");if(ext){ctx[\"vertexAttribDivisor\"]=function(index,divisor){ext[\"vertexAttribDivisorANGLE\"](index,divisor)};ctx[\"drawArraysInstanced\"]=function(mode,first,count,primcount){ext[\"drawArraysInstancedANGLE\"](mode,first,count,primcount)};ctx[\"drawElementsInstanced\"]=function(mode,count,type,indices,primcount){ext[\"drawElementsInstancedANGLE\"](mode,count,type,indices,primcount)};return 1}}function __webgl_enable_OES_vertex_array_object(ctx){var ext=ctx.getExtension(\"OES_vertex_array_object\");if(ext){ctx[\"createVertexArray\"]=function(){return ext[\"createVertexArrayOES\"]()};ctx[\"deleteVertexArray\"]=function(vao){ext[\"deleteVertexArrayOES\"](vao)};ctx[\"bindVertexArray\"]=function(vao){ext[\"bindVertexArrayOES\"](vao)};ctx[\"isVertexArray\"]=function(vao){return ext[\"isVertexArrayOES\"](vao)};return 1}}function __webgl_enable_WEBGL_draw_buffers(ctx){var ext=ctx.getExtension(\"WEBGL_draw_buffers\");if(ext){ctx[\"drawBuffers\"]=function(n,bufs){ext[\"drawBuffersWEBGL\"](n,bufs)};return 1}}var GL={counter:1,lastError:0,buffers:[],mappedBuffers:{},programs:[],framebuffers:[],renderbuffers:[],textures:[],uniforms:[],shaders:[],vaos:[],contexts:{},currentContext:null,offscreenCanvases:{},timerQueriesEXT:[],programInfos:{},stringCache:{},unpackAlignment:4,init:function(){var miniTempFloatBuffer=new Float32Array(GL.MINI_TEMP_BUFFER_SIZE);for(var i=0;i>2]:-1;source+=UTF8ToString(GROWABLE_HEAP_I32()[string+i*4>>2],len<0?undefined:len)}return source},createContext:function(canvas,webGLContextAttributes){var ctx=canvas.getContext(\"webgl\",webGLContextAttributes);if(!ctx)return 0;var handle=GL.registerContext(ctx,webGLContextAttributes);return handle},registerContext:function(ctx,webGLContextAttributes){var handle=_malloc(8);GROWABLE_HEAP_I32()[handle+4>>2]=_pthread_self();var context={handle:handle,attributes:webGLContextAttributes,version:webGLContextAttributes.majorVersion,GLctx:ctx};if(ctx.canvas)ctx.canvas.GLctxObject=context;GL.contexts[handle]=context;if(typeof webGLContextAttributes.enableExtensionsByDefault===\"undefined\"||webGLContextAttributes.enableExtensionsByDefault){GL.initExtensions(context)}return handle},makeContextCurrent:function(contextHandle){GL.currentContext=GL.contexts[contextHandle];Module.ctx=GLctx=GL.currentContext&&GL.currentContext.GLctx;return!(contextHandle&&!GLctx)},getContext:function(contextHandle){return GL.contexts[contextHandle]},deleteContext:function(contextHandle){if(GL.currentContext===GL.contexts[contextHandle])GL.currentContext=null;if(typeof JSEvents===\"object\")JSEvents.removeAllHandlersOnTarget(GL.contexts[contextHandle].GLctx.canvas);if(GL.contexts[contextHandle]&&GL.contexts[contextHandle].GLctx.canvas)GL.contexts[contextHandle].GLctx.canvas.GLctxObject=undefined;_free(GL.contexts[contextHandle].handle);GL.contexts[contextHandle]=null},initExtensions:function(context){if(!context)context=GL.currentContext;if(context.initExtensionsDone)return;context.initExtensionsDone=true;var GLctx=context.GLctx;__webgl_enable_ANGLE_instanced_arrays(GLctx);__webgl_enable_OES_vertex_array_object(GLctx);__webgl_enable_WEBGL_draw_buffers(GLctx);GLctx.disjointTimerQueryExt=GLctx.getExtension(\"EXT_disjoint_timer_query\");var automaticallyEnabledExtensions=[\"OES_texture_float\",\"OES_texture_half_float\",\"OES_standard_derivatives\",\"OES_vertex_array_object\",\"WEBGL_compressed_texture_s3tc\",\"WEBGL_depth_texture\",\"OES_element_index_uint\",\"EXT_texture_filter_anisotropic\",\"EXT_frag_depth\",\"WEBGL_draw_buffers\",\"ANGLE_instanced_arrays\",\"OES_texture_float_linear\",\"OES_texture_half_float_linear\",\"EXT_blend_minmax\",\"EXT_shader_texture_lod\",\"EXT_texture_norm16\",\"WEBGL_compressed_texture_pvrtc\",\"EXT_color_buffer_half_float\",\"WEBGL_color_buffer_float\",\"EXT_sRGB\",\"WEBGL_compressed_texture_etc1\",\"EXT_disjoint_timer_query\",\"WEBGL_compressed_texture_etc\",\"WEBGL_compressed_texture_astc\",\"EXT_color_buffer_float\",\"WEBGL_compressed_texture_s3tc_srgb\",\"EXT_disjoint_timer_query_webgl2\",\"WEBKIT_WEBGL_compressed_texture_pvrtc\"];var exts=GLctx.getSupportedExtensions()||[];exts.forEach(function(ext){if(automaticallyEnabledExtensions.indexOf(ext)!=-1){GLctx.getExtension(ext)}})},populateUniformTable:function(program){var p=GL.programs[program];var ptable=GL.programInfos[program]={uniforms:{},maxUniformLength:0,maxAttributeLength:-1,maxUniformBlockNameLength:-1};var utable=ptable.uniforms;var numUniforms=GLctx.getProgramParameter(p,35718);for(var i=0;i>2;contextAttributes[\"alpha\"]=!!GROWABLE_HEAP_I32()[a+(0>>2)];contextAttributes[\"depth\"]=!!GROWABLE_HEAP_I32()[a+(4>>2)];contextAttributes[\"stencil\"]=!!GROWABLE_HEAP_I32()[a+(8>>2)];contextAttributes[\"antialias\"]=!!GROWABLE_HEAP_I32()[a+(12>>2)];contextAttributes[\"premultipliedAlpha\"]=!!GROWABLE_HEAP_I32()[a+(16>>2)];contextAttributes[\"preserveDrawingBuffer\"]=!!GROWABLE_HEAP_I32()[a+(20>>2)];var powerPreference=GROWABLE_HEAP_I32()[a+(24>>2)];contextAttributes[\"powerPreference\"]=__emscripten_webgl_power_preferences[powerPreference];contextAttributes[\"failIfMajorPerformanceCaveat\"]=!!GROWABLE_HEAP_I32()[a+(28>>2)];contextAttributes.majorVersion=GROWABLE_HEAP_I32()[a+(32>>2)];contextAttributes.minorVersion=GROWABLE_HEAP_I32()[a+(36>>2)];contextAttributes.enableExtensionsByDefault=GROWABLE_HEAP_I32()[a+(40>>2)];contextAttributes.explicitSwapControl=GROWABLE_HEAP_I32()[a+(44>>2)];contextAttributes.proxyContextToMainThread=GROWABLE_HEAP_I32()[a+(48>>2)];contextAttributes.renderViaOffscreenBackBuffer=GROWABLE_HEAP_I32()[a+(52>>2)];var canvas=__findCanvasEventTarget(target);if(!canvas){return-4}if(contextAttributes.explicitSwapControl){return-1}var contextHandle=GL.createContext(canvas,contextAttributes);return contextHandle}function _emscripten_webgl_create_context(a0,a1){return _emscripten_webgl_do_create_context(a0,a1)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=GROWABLE_HEAP_I32()[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(3,1,fd);return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(4,1,fd,offset_low,offset_high,whence,newOffset)}function _fd_write(fd,iov,iovcnt,pnum){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(5,1,fd,iov,iovcnt,pnum);var num=0;for(var i=0;i>2];var len=GROWABLE_HEAP_I32()[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _pthread_cleanup_pop(execute){var routine=PThread.exitHandlers.pop();if(execute)routine()}function _pthread_cleanup_push(routine,arg){if(PThread.exitHandlers===null){PThread.exitHandlers=[]}PThread.exitHandlers.push(function(){dynCall_vi(routine,arg)})}function __spawn_thread(threadParams){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _spawn_thread() can only ever be called from main application thread!\";var worker=PThread.getNewWorker();if(worker.pthread!==undefined)throw\"Internal error!\";if(!threadParams.pthread_ptr)throw\"Internal error, no pthread ptr!\";PThread.runningWorkers.push(worker);var tlsMemory=_malloc(128*4);for(var i=0;i<128;++i){GROWABLE_HEAP_I32()[tlsMemory+i*4>>2]=0}var stackHigh=threadParams.stackBase+threadParams.stackSize;var pthread=PThread.pthreads[threadParams.pthread_ptr]={worker:worker,stackBase:threadParams.stackBase,stackSize:threadParams.stackSize,allocatedOwnStack:threadParams.allocatedOwnStack,thread:threadParams.pthread_ptr,threadInfoStruct:threadParams.pthread_ptr};var tis=pthread.threadInfoStruct>>2;Atomics.store(GROWABLE_HEAP_U32(),tis+(0>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(4>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(8>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(68>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(104>>2),tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),tis+(48>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(40>>2),pthread.threadInfoStruct);Atomics.store(GROWABLE_HEAP_U32(),tis+(44>>2),42);Atomics.store(GROWABLE_HEAP_U32(),tis+(108>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(84>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(80>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+8>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+12>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+20>>2),threadParams.schedPolicy);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+24>>2),threadParams.schedPrio);var global_libc=_emscripten_get_global_libc();var global_locale=global_libc+40;Atomics.store(GROWABLE_HEAP_U32(),tis+(176>>2),global_locale);worker.pthread=pthread;var msg={\"cmd\":\"run\",\"start_routine\":threadParams.startRoutine,\"arg\":threadParams.arg,\"threadInfoStruct\":threadParams.pthread_ptr,\"selfThreadId\":threadParams.pthread_ptr,\"parentThreadId\":threadParams.parent_pthread_ptr,\"stackBase\":threadParams.stackBase,\"stackSize\":threadParams.stackSize};worker.runPthread=function(){msg.time=performance.now();worker.postMessage(msg,threadParams.transferList)};if(worker.loaded){worker.runPthread();delete worker.runPthread}}function _pthread_getschedparam(thread,policy,schedparam){if(!policy&&!schedparam)return ERRNO_CODES.EINVAL;if(!thread){err(\"pthread_getschedparam called with a null thread pointer!\");return ERRNO_CODES.ESRCH}var self=GROWABLE_HEAP_I32()[thread+12>>2];if(self!==thread){err(\"pthread_getschedparam attempted on thread \"+thread+\", which does not point to a valid thread, or does not exist anymore!\");return ERRNO_CODES.ESRCH}var schedPolicy=Atomics.load(GROWABLE_HEAP_U32(),thread+108+20>>2);var schedPrio=Atomics.load(GROWABLE_HEAP_U32(),thread+108+24>>2);if(policy)GROWABLE_HEAP_I32()[policy>>2]=schedPolicy;if(schedparam)GROWABLE_HEAP_I32()[schedparam>>2]=schedPrio;return 0}function _pthread_self(){return __pthread_ptr|0}Module[\"_pthread_self\"]=_pthread_self;function _pthread_create(pthread_ptr,attr,start_routine,arg){if(typeof SharedArrayBuffer===\"undefined\"){err(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\");return 6}if(!pthread_ptr){err(\"pthread_create called with a null thread pointer!\");return 28}var transferList=[];var error=0;if(ENVIRONMENT_IS_PTHREAD&&(transferList.length===0||error)){return _emscripten_sync_run_in_main_thread_4(687865856,pthread_ptr,attr,start_routine,arg)}if(error)return error;var stackSize=0;var stackBase=0;var detached=0;var schedPolicy=0;var schedPrio=0;if(attr){stackSize=GROWABLE_HEAP_I32()[attr>>2];stackSize+=81920;stackBase=GROWABLE_HEAP_I32()[attr+8>>2];detached=GROWABLE_HEAP_I32()[attr+12>>2]!==0;var inheritSched=GROWABLE_HEAP_I32()[attr+16>>2]===0;if(inheritSched){var prevSchedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];var prevSchedPrio=GROWABLE_HEAP_I32()[attr+24>>2];var parentThreadPtr=PThread.currentProxiedOperationCallerThread?PThread.currentProxiedOperationCallerThread:_pthread_self();_pthread_getschedparam(parentThreadPtr,attr+20,attr+24);schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2];GROWABLE_HEAP_I32()[attr+20>>2]=prevSchedPolicy;GROWABLE_HEAP_I32()[attr+24>>2]=prevSchedPrio}else{schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2]}}else{stackSize=2097152}var allocatedOwnStack=stackBase==0;if(allocatedOwnStack){stackBase=_memalign(16,stackSize)}else{stackBase-=stackSize;assert(stackBase>0)}var threadInfoStruct=_malloc(232);for(var i=0;i<232>>2;++i)GROWABLE_HEAP_U32()[(threadInfoStruct>>2)+i]=0;GROWABLE_HEAP_I32()[pthread_ptr>>2]=threadInfoStruct;GROWABLE_HEAP_I32()[threadInfoStruct+12>>2]=threadInfoStruct;var headPtr=threadInfoStruct+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var threadParams={stackBase:stackBase,stackSize:stackSize,allocatedOwnStack:allocatedOwnStack,schedPolicy:schedPolicy,schedPrio:schedPrio,detached:detached,startRoutine:start_routine,pthread_ptr:threadInfoStruct,parent_pthread_ptr:_pthread_self(),arg:arg,transferList:transferList};if(ENVIRONMENT_IS_PTHREAD){threadParams.cmd=\"spawnThread\";postMessage(threadParams,transferList)}else{__spawn_thread(threadParams)}return 0}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}function _sysconf(name){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(6,1,name);switch(name){case 30:return 16384;case 85:var maxHeapSize=2147483648;return maxHeapSize/16384;case 132:case 133:case 12:case 137:case 138:case 15:case 235:case 16:case 17:case 18:case 19:case 20:case 149:case 13:case 10:case 236:case 153:case 9:case 21:case 22:case 159:case 154:case 14:case 77:case 78:case 139:case 80:case 81:case 82:case 68:case 67:case 164:case 11:case 29:case 47:case 48:case 95:case 52:case 51:case 46:case 79:return 200809;case 27:case 246:case 127:case 128:case 23:case 24:case 160:case 161:case 181:case 182:case 242:case 183:case 184:case 243:case 244:case 245:case 165:case 178:case 179:case 49:case 50:case 168:case 169:case 175:case 170:case 171:case 172:case 97:case 76:case 32:case 173:case 35:return-1;case 176:case 177:case 7:case 155:case 8:case 157:case 125:case 126:case 92:case 93:case 129:case 130:case 131:case 94:case 91:return 1;case 74:case 60:case 69:case 70:case 4:return 1024;case 31:case 42:case 72:return 32;case 87:case 26:case 33:return 2147483647;case 34:case 1:return 47839;case 38:case 36:return 99;case 43:case 37:return 2048;case 0:return 2097152;case 3:return 65536;case 28:return 32768;case 44:return 32767;case 75:return 16384;case 39:return 1e3;case 89:return 700;case 71:return 256;case 40:return 255;case 2:return 100;case 180:return 64;case 25:return 20;case 5:return 16;case 6:return 6;case 73:return 4;case 84:{if(typeof navigator===\"object\")return navigator[\"hardwareConcurrency\"]||1;return 1}}setErrNo(28);return-1}if(!ENVIRONMENT_IS_PTHREAD)PThread.initMainThreadBlock();else PThread.initWorker();var GLctx;GL.init();var proxiedFunctionTable=[null,_atexit,_emscripten_set_canvas_element_size_main_thread,_fd_close,_fd_seek,_fd_write,_sysconf];var asmLibraryArg={\"e\":___assert_fail,\"r\":___call_main,\"w\":__emscripten_notify_thread_queue,\"a\":_abort,\"l\":_emscripten_conditional_set_current_thread_status,\"d\":_emscripten_futex_wait,\"c\":_emscripten_futex_wake,\"h\":_emscripten_get_now,\"g\":_emscripten_is_main_browser_thread,\"x\":_emscripten_is_main_runtime_thread,\"q\":_emscripten_memcpy_big,\"B\":_emscripten_num_logical_cores,\"t\":_emscripten_receive_on_main_thread_js,\"A\":_emscripten_resize_heap,\"u\":_emscripten_set_canvas_element_size,\"k\":_emscripten_set_current_thread_status,\"s\":_emscripten_set_thread_name,\"v\":_emscripten_webgl_create_context,\"m\":_fd_close,\"o\":_fd_seek,\"i\":_fd_write,\"p\":initPthreadsJS,\"memory\":wasmMemory||Module[\"wasmMemory\"],\"y\":_pthread_cleanup_pop,\"z\":_pthread_cleanup_push,\"j\":_pthread_create,\"b\":_pthread_self,\"f\":_roundf,\"n\":_sysconf,\"table\":wasmTable};var asm=createWasm();Module[\"asm\"]=asm;var ___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=function(){return(___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=Module[\"asm\"][\"C\"]).apply(null,arguments)};var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"D\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"E\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"F\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"G\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"H\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"I\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"J\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"K\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"L\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"M\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"N\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"O\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"P\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Q\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"R\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"S\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"T\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"U\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"V\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"W\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"X\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"Y\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"Z\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"_\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"$\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"aa\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"ba\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"ca\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"da\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"ea\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"fa\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"ga\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"ha\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"ia\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"ja\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"ka\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"la\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"ma\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"na\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"oa\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"pa\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"qa\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"ra\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"sa\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"ta\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"ua\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"va\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"wa\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"xa\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"ya\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"za\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"Aa\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Ba\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"Ca\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Da\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"Ea\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"Fa\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Ga\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Ha\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Ia\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Ja\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Ka\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"La\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"Ma\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Na\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Oa\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Pa\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Qa\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Ra\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"Sa\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"Ta\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"Ua\"]).apply(null,arguments)};var _emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=function(){return(_emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=Module[\"asm\"][\"Va\"]).apply(null,arguments)};var ___errno_location=Module[\"___errno_location\"]=function(){return(___errno_location=Module[\"___errno_location\"]=Module[\"asm\"][\"Wa\"]).apply(null,arguments)};var ___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=function(){return(___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=Module[\"asm\"][\"Xa\"]).apply(null,arguments)};var _memalign=Module[\"_memalign\"]=function(){return(_memalign=Module[\"_memalign\"]=Module[\"asm\"][\"Ya\"]).apply(null,arguments)};var ___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=function(){return(___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=Module[\"asm\"][\"Za\"]).apply(null,arguments)};var _emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=function(){return(_emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=Module[\"asm\"][\"_a\"]).apply(null,arguments)};var _emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=function(){return(_emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=Module[\"asm\"][\"$a\"]).apply(null,arguments)};var _emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=function(){return(_emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=Module[\"asm\"][\"ab\"]).apply(null,arguments)};var _emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=function(){return(_emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=Module[\"asm\"][\"bb\"]).apply(null,arguments)};var _emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=function(){return(_emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=Module[\"asm\"][\"cb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=function(){return(_emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=Module[\"asm\"][\"db\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=function(){return(_emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=Module[\"asm\"][\"eb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=function(){return(_emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=Module[\"asm\"][\"fb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=function(){return(_emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=Module[\"asm\"][\"gb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=function(){return(_emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=Module[\"asm\"][\"hb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=function(){return(_emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=Module[\"asm\"][\"ib\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=function(){return(_emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=Module[\"asm\"][\"jb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=function(){return(_emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=Module[\"asm\"][\"kb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=function(){return(_emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=Module[\"asm\"][\"lb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=function(){return(_emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=Module[\"asm\"][\"mb\"]).apply(null,arguments)};var _emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=function(){return(_emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=Module[\"asm\"][\"nb\"]).apply(null,arguments)};var _emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=function(){return(_emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=Module[\"asm\"][\"ob\"]).apply(null,arguments)};var _emscripten_tls_init=Module[\"_emscripten_tls_init\"]=function(){return(_emscripten_tls_init=Module[\"_emscripten_tls_init\"]=Module[\"asm\"][\"pb\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"qb\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"rb\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"sb\"]).apply(null,arguments)};var dynCall_vi=Module[\"dynCall_vi\"]=function(){return(dynCall_vi=Module[\"dynCall_vi\"]=Module[\"asm\"][\"tb\"]).apply(null,arguments)};var dynCall_v=Module[\"dynCall_v\"]=function(){return(dynCall_v=Module[\"dynCall_v\"]=Module[\"asm\"][\"ub\"]).apply(null,arguments)};var dynCall_ii=Module[\"dynCall_ii\"]=function(){return(dynCall_ii=Module[\"dynCall_ii\"]=Module[\"asm\"][\"vb\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;Module[\"PThread\"]=PThread;Module[\"PThread\"]=PThread;Module[\"_pthread_self\"]=_pthread_self;Module[\"wasmMemory\"]=wasmMemory;Module[\"ExitStatus\"]=ExitStatus;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}if(!ENVIRONMENT_IS_PTHREAD)noExitRuntime=true;if(!ENVIRONMENT_IS_PTHREAD)run();\n\n\n return WasmBackendModuleThreadedSimd\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModuleThreadedSimd;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModuleThreadedSimd; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModuleThreadedSimd\"] = WasmBackendModuleThreadedSimd;\n ", "\nvar WasmBackendModule = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModule) {\n WasmBackendModule = WasmBackendModule || {};\n\nvar Module=typeof WasmBackendModule!==\"undefined\"?WasmBackendModule:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":147,\"maximum\":147+0,\"element\":\"anyfunc\"});var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str=\"\";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,HEAPU8,outPtr,maxBytesToWrite)}function writeArrayToMemory(array,buffer){HEAP8.set(array,buffer)}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"env\":asmLibraryArg,\"wasi_snapshot_preview1\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmMemory=exports[\"memory\"];updateGlobalBufferAndViews(wasmMemory.buffer);removeRunDependency(\"wasm-instantiate\")}addRunDependency(\"wasm-instantiate\");function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}__ATINIT__.push();function _emscripten_notify_memory_growth(memoryIndex){updateGlobalBufferAndViews(wasmMemory.buffer)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=HEAP32[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){}function _fd_write(fd,iov,iovcnt,pnum){var num=0;for(var i=0;i>2];var len=HEAP32[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _exit(status){exit(status)}function _proc_exit(code){_exit(code)}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}var asmLibraryArg={\"emscripten_notify_memory_growth\":_emscripten_notify_memory_growth,\"fd_close\":_fd_close,\"fd_seek\":_fd_seek,\"fd_write\":_fd_write,\"proc_exit\":_proc_exit,\"roundf\":_roundf};var asm=createWasm();Module[\"asm\"]=asm;var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"init\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"register_tensor\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"dispose_data\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"dispose\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"Abs\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"Add\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"AddN\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"ArgMax\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"AvgPool\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"BatchMatMul\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"ClipByValue\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"Conv2D\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"Conv2DBackpropInput\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Cos\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"CropAndResize\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"Cumsum\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"DepthToSpace\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"DepthwiseConv2dNative\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"Div\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"Equal\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"Exp\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"FlipLeftRight\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"FloorDiv\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"FusedBatchNorm\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"FusedConv2D\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"FusedDepthwiseConv2D\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"Gather\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"GatherNd\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"Greater\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"GreaterEqual\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"Less\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"LessEqual\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"Log\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"LogicalAnd\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"Max\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"MaxPool\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"Maximum\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"Min\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"Minimum\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"Multiply\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"Negate\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"NonMaxSuppressionV3\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"NonMaxSuppressionV4\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"NonMaxSuppressionV5\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"NotEqual\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"OneHot\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"PadV2\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"Pow\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"Prelu\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"Relu\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"Relu6\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"ResizeBilinear\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Reverse\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"RotateWithOffset\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Rsqrt\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"ScatterNd\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"SelectV2\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Sigmoid\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Sin\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Softmax\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Sqrt\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Square\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"SquaredDifference\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"StridedSlice\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Sub\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Sum\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Tanh\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Tile\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Transpose\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"_FusedMatMul\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"malloc\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"free\"]).apply(null,arguments)};var __start=Module[\"__start\"]=function(){return(__start=Module[\"__start\"]=Module[\"asm\"][\"_start\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"stackSave\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"stackAlloc\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"stackRestore\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}var calledMain=false;dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function callMain(args){var entryFunction=Module[\"__start\"];try{entryFunction();var ret=0;exit(ret,true)}catch(e){if(e instanceof ExitStatus){return}else if(e==\"unwind\"){noExitRuntime=true;return}else{var toLog=e;if(e&&typeof e===\"object\"&&e.stack){toLog=[e,e.stack]}err(\"exception thrown: \"+toLog);quit_(1,e)}}finally{calledMain=true}}function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();if(shouldRunNow)callMain(args);postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;function exit(status,implicit){if(implicit&&noExitRuntime&&status===0){return}if(noExitRuntime){}else{ABORT=true;EXITSTATUS=status;exitRuntime();if(Module[\"onExit\"])Module[\"onExit\"](status)}quit_(status,new ExitStatus(status))}if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}var shouldRunNow=true;if(Module[\"noInitialRun\"])shouldRunNow=false;noExitRuntime=true;run();\n\n\n return WasmBackendModule\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModule;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModule; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModule\"] = WasmBackendModule;\n ", "import { tf, loadGraphModel } from '../tf.js';\n\nconst NUM_LANDMARKS = 6;\n\nfunction generateAnchors(inputSize) {\n const spec = { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] };\n const anchors = [];\n for (let i = 0; i < spec.strides.length; i++) {\n const stride = spec.strides[i];\n const gridRows = Math.floor((inputSize + stride - 1) / stride);\n const gridCols = Math.floor((inputSize + stride - 1) / stride);\n const anchorsNum = spec.anchors[i];\n for (let gridY = 0; gridY < gridRows; gridY++) {\n const anchorY = stride * (gridY + 0.5);\n for (let gridX = 0; gridX < gridCols; gridX++) {\n const anchorX = stride * (gridX + 0.5);\n for (let n = 0; n < anchorsNum; n++) {\n anchors.push([anchorX, anchorY]);\n }\n }\n }\n }\n return anchors;\n}\n\nconst disposeBox = (box) => {\n box.startEndTensor.dispose();\n box.startPoint.dispose();\n box.endPoint.dispose();\n};\n\nconst createBox = (startEndTensor) => ({\n startEndTensor,\n startPoint: tf.slice(startEndTensor, [0, 0], [-1, 2]),\n endPoint: tf.slice(startEndTensor, [0, 2], [-1, 2]),\n});\n\nconst scaleBox = (box, factors) => {\n const starts = tf.mul(box.startPoint, factors);\n const ends = tf.mul(box.endPoint, factors);\n const newCoordinates = tf.concat2d([starts, ends], 1);\n return createBox(newCoordinates);\n};\n\nfunction decodeBounds(boxOutputs, anchors, inputSize) {\n const boxStarts = tf.slice(boxOutputs, [0, 1], [-1, 2]);\n const centers = tf.add(boxStarts, anchors);\n const boxSizes = tf.slice(boxOutputs, [0, 3], [-1, 2]);\n const boxSizesNormalized = tf.div(boxSizes, inputSize);\n const centersNormalized = tf.div(centers, inputSize);\n const halfBoxSize = tf.div(boxSizesNormalized, 2);\n const starts = tf.sub(centersNormalized, halfBoxSize);\n const ends = tf.add(centersNormalized, halfBoxSize);\n const startNormalized = tf.mul(starts, inputSize);\n const endNormalized = tf.mul(ends, inputSize);\n const concatAxis = 1;\n return tf.concat2d([startNormalized, endNormalized], concatAxis);\n}\n\nfunction scaleBoxFromPrediction(face, scaleFactor) {\n return tf.tidy(() => {\n const box = face['box'] ? face['box'] : face;\n return scaleBox(box, scaleFactor).startEndTensor.squeeze();\n });\n}\n\nclass BlazeFaceModel {\n constructor(model, config) {\n this.blazeFaceModel = model;\n this.width = config.detector.inputSize;\n this.height = config.detector.inputSize;\n this.anchorsData = generateAnchors(config.detector.inputSize);\n this.anchors = tf.tensor2d(this.anchorsData);\n this.inputSize = tf.tensor1d([this.width, this.height]);\n this.config = config;\n this.scaleFaces = 0.8;\n }\n\n async getBoundingBoxes(inputImage) {\n // sanity check on input\n if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;\n const [detectedOutputs, boxes, scores] = tf.tidy(() => {\n const resizedImage = inputImage.resizeBilinear([this.width, this.height]);\n // const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);\n const normalizedImage = tf.sub(resizedImage.div(127.5), 1);\n const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);\n let prediction;\n // are we using tfhub or pinto converted model?\n if (Array.isArray(batchedPrediction)) {\n const sorted = batchedPrediction.sort((a, b) => a.size - b.size);\n const concat384 = tf.concat([sorted[0], sorted[2]], 2); // dim: 384, 1 + 16\n const concat512 = tf.concat([sorted[1], sorted[3]], 2); // dim: 512, 1 + 16\n const concat = tf.concat([concat512, concat384], 1);\n prediction = concat.squeeze(0);\n } else {\n prediction = batchedPrediction.squeeze(); // when using tfhub model\n }\n const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);\n const logits = tf.slice(prediction, [0, 0], [-1, 1]);\n const scoresOut = tf.sigmoid(logits).squeeze();\n return [prediction, decodedBounds, scoresOut];\n });\n const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);\n const boxIndices = boxIndicesTensor.arraySync();\n boxIndicesTensor.dispose();\n const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));\n const boundingBoxes = boundingBoxesMap.map((boundingBox) => {\n const vals = boundingBox.arraySync();\n boundingBox.dispose();\n return vals;\n });\n\n const scoresVal = scores.dataSync();\n const annotatedBoxes = [];\n for (const i in boundingBoxes) {\n const boxIndex = boxIndices[i];\n const confidence = scoresVal[boxIndex];\n if (confidence > this.config.detector.minConfidence) {\n const box = createBox(boundingBoxes[i]);\n const anchor = this.anchorsData[boxIndex];\n const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));\n annotatedBoxes.push({ box, landmarks, anchor, confidence });\n }\n }\n detectedOutputs.dispose();\n boxes.dispose();\n scores.dispose();\n detectedOutputs.dispose();\n return {\n boxes: annotatedBoxes,\n scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height],\n };\n }\n\n async estimateFaces(input) {\n const { boxes, scaleFactor } = await this.getBoundingBoxes(input);\n const faces = [];\n for (const face of boxes) {\n const landmarkData = face.landmarks.arraySync();\n const scaledBox = scaleBoxFromPrediction(face, scaleFactor);\n const boxData = scaleBox.arraySync();\n const probabilityData = face.probability.arraySync();\n const anchor = face.anchor;\n const [scaleFactorX, scaleFactorY] = scaleFactor;\n const scaledLandmarks = landmarkData\n .map((landmark) => ([\n (landmark[0] + anchor[0]) * scaleFactorX,\n (landmark[1] + anchor[1]) * scaleFactorY,\n ]));\n const normalizedFace = {\n topLeft: boxData.slice(0, 2),\n bottomRight: boxData.slice(2),\n landmarks: scaledLandmarks,\n probability: probabilityData,\n };\n disposeBox(face.box);\n face.landmarks.dispose();\n face.probability.dispose();\n scaledBox.dispose();\n faces.push(normalizedFace);\n }\n return faces;\n }\n}\n\nasync function load(config) {\n const blazeface = await loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });\n const model = new BlazeFaceModel(blazeface, config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n return model;\n}\n\nexports.load = load;\nexports.BlazeFaceModel = BlazeFaceModel;\nexports.disposeBox = disposeBox;\n", "exports.MESH_ANNOTATIONS = {\n silhouette: [\n 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288,\n 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136,\n 172, 58, 132, 93, 234, 127, 162, 21, 54, 103, 67, 109,\n ],\n lipsUpperOuter: [61, 185, 40, 39, 37, 0, 267, 269, 270, 409, 291],\n lipsLowerOuter: [146, 91, 181, 84, 17, 314, 405, 321, 375, 291],\n lipsUpperInner: [78, 191, 80, 81, 82, 13, 312, 311, 310, 415, 308],\n lipsLowerInner: [78, 95, 88, 178, 87, 14, 317, 402, 318, 324, 308],\n rightEyeUpper0: [246, 161, 160, 159, 158, 157, 173],\n rightEyeLower0: [33, 7, 163, 144, 145, 153, 154, 155, 133],\n rightEyeUpper1: [247, 30, 29, 27, 28, 56, 190],\n rightEyeLower1: [130, 25, 110, 24, 23, 22, 26, 112, 243],\n rightEyeUpper2: [113, 225, 224, 223, 222, 221, 189],\n rightEyeLower2: [226, 31, 228, 229, 230, 231, 232, 233, 244],\n rightEyeLower3: [143, 111, 117, 118, 119, 120, 121, 128, 245],\n rightEyebrowUpper: [156, 70, 63, 105, 66, 107, 55, 193],\n rightEyebrowLower: [35, 124, 46, 53, 52, 65],\n rightEyeIris: [473, 474, 475, 476, 477],\n leftEyeUpper0: [466, 388, 387, 386, 385, 384, 398],\n leftEyeLower0: [263, 249, 390, 373, 374, 380, 381, 382, 362],\n leftEyeUpper1: [467, 260, 259, 257, 258, 286, 414],\n leftEyeLower1: [359, 255, 339, 254, 253, 252, 256, 341, 463],\n leftEyeUpper2: [342, 445, 444, 443, 442, 441, 413],\n leftEyeLower2: [446, 261, 448, 449, 450, 451, 452, 453, 464],\n leftEyeLower3: [372, 340, 346, 347, 348, 349, 350, 357, 465],\n leftEyebrowUpper: [383, 300, 293, 334, 296, 336, 285, 417],\n leftEyebrowLower: [265, 353, 276, 283, 282, 295],\n leftEyeIris: [468, 469, 470, 471, 472],\n midwayBetweenEyes: [168],\n noseTip: [1],\n noseBottom: [2],\n noseRightCorner: [98],\n noseLeftCorner: [327],\n rightCheek: [205],\n leftCheek: [425],\n};\nexports.MESH_TO_IRIS_INDICES_MAP = [ // A mapping from facemesh model keypoints to iris model keypoints.\n { key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] },\n { key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] },\n { key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] },\n { key: 'EyeLower0', indices: [0, 1, 2, 3, 4, 5, 6, 7, 8] },\n { key: 'EyeLower1', indices: [16, 17, 18, 19, 20, 21, 22, 23, 24] },\n { key: 'EyeLower2', indices: [32, 33, 34, 35, 36, 37, 38, 39, 40] },\n { key: 'EyeLower3', indices: [54, 55, 56, 57, 58, 59, 60, 61, 62] },\n { key: 'EyebrowUpper', indices: [63, 64, 65, 66, 67, 68, 69, 70] },\n { key: 'EyebrowLower', indices: [48, 49, 50, 51, 52, 53] },\n];\n", "import { tf } from '../tf.js';\n\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n return { startPoint, endPoint };\n}\nexports.scaleBoxCoordinates = scaleBoxCoordinates;\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nexports.getBoxSize = getBoxSize;\n\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nexports.getBoxCenter = getBoxCenter;\n\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nexports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;\n\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.enlargeBox = enlargeBox;\n\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.squarifyBox = squarifyBox;\n", "exports.IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];\n/**\n * Normalizes the provided angle to the range -pi to pi.\n * @param angle The angle in radians to be normalized.\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nexports.normalizeRadians = normalizeRadians;\n/**\n * Computes the angle of rotation between two anchor points.\n * @param point1 First anchor point\n * @param point2 Second anchor point\n */\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nexports.computeRotation = computeRotation;\nfunction radToDegrees(rad) {\n return rad * 180 / Math.PI;\n}\nexports.radToDegrees = radToDegrees;\nfunction buildTranslationMatrix(x, y) {\n return [[1, 0, x], [0, 1, y], [0, 0, 1]];\n}\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nexports.dot = dot;\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nexports.getColumnFrom2DArr = getColumnFrom2DArr;\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nexports.buildRotationMatrix = buildRotationMatrix;\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nexports.invertTransformMatrix = invertTransformMatrix;\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexports.rotatePoint = rotatePoint;\nfunction xyDistanceBetweenPoints(a, b) {\n return Math.sqrt(((a[0] - b[0]) ** 2) + ((a[1] - b[1]) ** 2));\n}\nexports.xyDistanceBetweenPoints = xyDistanceBetweenPoints;\n", "/* eslint-disable class-methods-use-this */\nimport { tf } from '../tf.js';\nimport * as bounding from './box';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nconst LANDMARKS_COUNT = 468;\nconst MESH_MOUTH_INDEX = 13;\nconst MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [MESH_MOUTH_INDEX, keypoints.MESH_ANNOTATIONS['midwayBetweenEyes'][0]];\nconst BLAZEFACE_MOUTH_INDEX = 3;\nconst BLAZEFACE_NOSE_INDEX = 2;\nconst BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [BLAZEFACE_MOUTH_INDEX, BLAZEFACE_NOSE_INDEX];\nconst LEFT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['leftEyeLower0'];\nconst LEFT_EYE_BOUNDS = [LEFT_EYE_OUTLINE[0], LEFT_EYE_OUTLINE[LEFT_EYE_OUTLINE.length - 1]];\nconst RIGHT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['rightEyeLower0'];\nconst RIGHT_EYE_BOUNDS = [RIGHT_EYE_OUTLINE[0], RIGHT_EYE_OUTLINE[RIGHT_EYE_OUTLINE.length - 1]];\nconst IRIS_UPPER_CENTER_INDEX = 3;\nconst IRIS_LOWER_CENTER_INDEX = 4;\nconst IRIS_IRIS_INDEX = 71;\nconst IRIS_NUM_COORDINATES = 76;\n\n// Replace the raw coordinates returned by facemesh with refined iris model coordinates. Update the z coordinate to be an average of the original and the new. This produces the best visual effect.\nfunction replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {\n for (let i = 0; i < keypoints.MESH_TO_IRIS_INDICES_MAP.length; i++) {\n const { key, indices } = keypoints.MESH_TO_IRIS_INDICES_MAP[i];\n const originalIndices = keypoints.MESH_ANNOTATIONS[`${prefix}${key}`];\n const shouldReplaceAllKeys = keys == null;\n if (shouldReplaceAllKeys || keys.includes(key)) {\n for (let j = 0; j < indices.length; j++) {\n const index = indices[j];\n rawCoords[originalIndices[j]] = [\n newCoords[index][0], newCoords[index][1],\n (newCoords[index][2] + rawCoords[originalIndices[j]][2]) / 2,\n ];\n }\n }\n }\n}\n// The Pipeline coordinates between the bounding box and skeleton models.\nclass Pipeline {\n constructor(boundingBoxDetector, meshDetector, irisModel, config) {\n // An array of facial bounding boxes.\n this.storedBoxes = [];\n this.runsWithoutFaceDetector = 0;\n this.boundingBoxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.irisModel = irisModel;\n this.meshWidth = config.mesh.inputSize;\n this.meshHeight = config.mesh.inputSize;\n this.irisSize = config.iris.inputSize;\n this.irisEnlarge = 2.3;\n this.skipped = 1000;\n this.detectedFaces = 0;\n }\n\n transformRawCoords(rawCoords, box, angle, rotationMatrix) {\n const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });\n const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight];\n const coordsScaled = rawCoords.map((coord) => ([\n scaleFactor[0] * (coord[0] - this.meshWidth / 2),\n scaleFactor[1] * (coord[1] - this.meshHeight / 2), coord[2],\n ]));\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]]));\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => ([\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1], coord[2],\n ]));\n }\n\n getLeftToRightEyeDepthDifference(rawCoords) {\n const leftEyeZ = rawCoords[LEFT_EYE_BOUNDS[0]][2];\n const rightEyeZ = rawCoords[RIGHT_EYE_BOUNDS[0]][2];\n return leftEyeZ - rightEyeZ;\n }\n\n // Returns a box describing a cropped region around the eye fit for passing to the iris model.\n getEyeBox(rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {\n const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));\n const boxSize = bounding.getBoxSize(box);\n let crop = tf.image.cropAndResize(face, [[\n box.startPoint[1] / this.meshHeight,\n box.startPoint[0] / this.meshWidth, box.endPoint[1] / this.meshHeight,\n box.endPoint[0] / this.meshWidth,\n ]], [0], [this.irisSize, this.irisSize]);\n if (flip) {\n crop = tf.image.flipLeftRight(crop);\n }\n return { box, boxSize, crop };\n }\n\n // Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.\n getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) {\n const eyeRawCoords = [];\n for (let i = 0; i < IRIS_NUM_COORDINATES; i++) {\n const x = eyeData[i * 3];\n const y = eyeData[i * 3 + 1];\n const z = eyeData[i * 3 + 2];\n eyeRawCoords.push([\n (flip\n ? (1 - (x / this.irisSize))\n : (x / this.irisSize)) * eyeBoxSize[0] + eyeBox.startPoint[0],\n (y / this.irisSize) * eyeBoxSize[1] + eyeBox.startPoint[1], z,\n ]);\n }\n return { rawCoords: eyeRawCoords, iris: eyeRawCoords.slice(IRIS_IRIS_INDEX) };\n }\n\n // The z-coordinates returned for the iris are unreliable, so we take the z values from the surrounding keypoints.\n getAdjustedIrisCoords(rawCoords, irisCoords, direction) {\n const upperCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeUpper0`][IRIS_UPPER_CENTER_INDEX]][2];\n const lowerCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeLower0`][IRIS_LOWER_CENTER_INDEX]][2];\n const averageZ = (upperCenterZ + lowerCenterZ) / 2;\n // Iris indices: 0: center | 1: right | 2: above | 3: left | 4: below\n return irisCoords.map((coord, i) => {\n let z = averageZ;\n if (i === 2) {\n z = upperCenterZ;\n } else if (i === 4) {\n z = lowerCenterZ;\n }\n return [coord[0], coord[1], z];\n });\n }\n\n async predict(input, config) {\n this.skipped++;\n let useFreshBox = false;\n // run new detector every skipFrames unless we only want box to start with\n let detector;\n if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled) {\n detector = await this.boundingBoxDetector.getBoundingBoxes(input);\n // don't reset on test image\n if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.detector.maxFaces))) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n for (const possible of detector.boxes) {\n this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks, confidence: possible.confidence });\n }\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n\n if (useFreshBox) {\n if (!detector || !detector.boxes || (detector.boxes.length === 0)) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n return null;\n }\n for (const i in this.storedBoxes) {\n const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);\n const enlargedBox = bounding.enlargeBox(scaledBox);\n const landmarks = this.storedBoxes[i].landmarks.arraySync();\n const confidence = this.storedBoxes[i].confidence;\n this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };\n }\n this.runsWithoutFaceDetector = 0;\n }\n if (detector && detector.boxes) {\n detector.boxes.forEach((prediction) => {\n prediction.box.startPoint.dispose();\n prediction.box.endPoint.dispose();\n prediction.landmarks.dispose();\n });\n }\n\n // console.log(this.skipped, config.detector.skipFrames, this.detectedFaces, config.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);\n\n let results = tf.tidy(() => this.storedBoxes.map((box, i) => {\n let angle = 0;\n // The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).\n const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;\n let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n if (boxLandmarksFromMeshModel === false) {\n [indexOfMouth, indexOfForehead] = BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n }\n angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);\n const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });\n const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];\n let rotatedImage = input;\n let rotationMatrix = util.IDENTITY_MATRIX;\n if (angle !== 0) {\n rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);\n rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);\n }\n const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };\n const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);\n\n // if we're not going to produce mesh, don't spend time with further processing\n if (!config.mesh.enabled) {\n const prediction = {\n coords: null,\n box,\n faceConfidence: null,\n confidence: box.confidence,\n image: face,\n };\n return prediction;\n }\n\n // The first returned tensor represents facial contours, which are included in the coordinates.\n const [, confidence, coords] = this.meshDetector.predict(face);\n const confidenceVal = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceVal < config.detector.minConfidence) {\n coords.dispose();\n return null;\n }\n const coordsReshaped = tf.reshape(coords, [-1, 3]);\n let rawCoords = coordsReshaped.arraySync();\n if (config.iris.enabled) {\n const { box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop } = this.getEyeBox(rawCoords, face, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true);\n const { box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop } = this.getEyeBox(rawCoords, face, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]);\n const eyePredictions = (this.irisModel.predict(tf.concat([leftEyeCrop, rightEyeCrop])));\n const eyePredictionsData = eyePredictions.dataSync();\n eyePredictions.dispose();\n const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3);\n const { rawCoords: leftEyeRawCoords, iris: leftIrisRawCoords } = this.getEyeCoords(leftEyeData, leftEyeBox, leftEyeBoxSize, true);\n const rightEyeData = eyePredictionsData.slice(IRIS_NUM_COORDINATES * 3);\n const { rawCoords: rightEyeRawCoords, iris: rightIrisRawCoords } = this.getEyeCoords(rightEyeData, rightEyeBox, rightEyeBoxSize);\n const leftToRightEyeDepthDifference = this.getLeftToRightEyeDepthDifference(rawCoords);\n if (Math.abs(leftToRightEyeDepthDifference) < 30) { // User is looking straight ahead.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left');\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right');\n // If the user is looking to the left or to the right, the iris coordinates tend to diverge too much from the mesh coordinates for them to be merged. So we only update a single contour line above and below the eye.\n } else if (leftToRightEyeDepthDifference < 1) { // User is looking towards the right.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', ['EyeUpper0', 'EyeLower0']);\n } else { // User is looking towards the left.\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right', ['EyeUpper0', 'EyeLower0']);\n }\n const adjustedLeftIrisCoords = this.getAdjustedIrisCoords(rawCoords, leftIrisRawCoords, 'left');\n const adjustedRightIrisCoords = this.getAdjustedIrisCoords(rawCoords, rightIrisRawCoords, 'right');\n rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);\n }\n const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);\n tf.dispose(rawCoords);\n const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));\n const transformedCoords = tf.tensor2d(transformedCoordsData);\n const prediction = {\n coords: transformedCoords,\n box: landmarksBox,\n faceConfidence: confidenceVal,\n confidence: box.confidence,\n image: face,\n };\n this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };\n return prediction;\n }));\n results = results.filter((a) => a !== null);\n this.detectedFaces = results.length;\n return results;\n }\n\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint, landmarks };\n }\n}\nexports.Pipeline = Pipeline;\n", "exports.UV_COORDS = [\n [0.499976992607117, 0.652534008026123],\n [0.500025987625122, 0.547487020492554],\n [0.499974012374878, 0.602371990680695],\n [0.482113003730774, 0.471979022026062],\n [0.500150978565216, 0.527155995368958],\n [0.499909996986389, 0.498252987861633],\n [0.499523013830185, 0.40106201171875],\n [0.289712011814117, 0.380764007568359],\n [0.499954998493195, 0.312398016452789],\n [0.499987006187439, 0.269918978214264],\n [0.500023007392883, 0.107050001621246],\n [0.500023007392883, 0.666234016418457],\n [0.5000159740448, 0.679224014282227],\n [0.500023007392883, 0.692348003387451],\n [0.499976992607117, 0.695277988910675],\n [0.499976992607117, 0.70593398809433],\n [0.499976992607117, 0.719385027885437],\n [0.499976992607117, 0.737019002437592],\n [0.499967992305756, 0.781370997428894],\n [0.499816000461578, 0.562981009483337],\n [0.473773002624512, 0.573909997940063],\n [0.104906998574734, 0.254140973091125],\n [0.365929991006851, 0.409575998783112],\n [0.338757991790771, 0.41302502155304],\n [0.311120003461838, 0.409460008144379],\n [0.274657994508743, 0.389131009578705],\n [0.393361985683441, 0.403706014156342],\n [0.345234006643295, 0.344011008739471],\n [0.370094001293182, 0.346076011657715],\n [0.319321990013123, 0.347265005111694],\n [0.297903001308441, 0.353591024875641],\n [0.24779200553894, 0.410809993743896],\n [0.396889001131058, 0.842755019664764],\n [0.280097991228104, 0.375599980354309],\n [0.106310002505779, 0.399955987930298],\n [0.2099249958992, 0.391353011131287],\n [0.355807989835739, 0.534406006336212],\n [0.471751004457474, 0.65040397644043],\n [0.474155008792877, 0.680191993713379],\n [0.439785003662109, 0.657229006290436],\n [0.414617002010345, 0.66654098033905],\n [0.450374007225037, 0.680860996246338],\n [0.428770989179611, 0.682690978050232],\n [0.374971002340317, 0.727805018424988],\n [0.486716985702515, 0.547628998756409],\n [0.485300987958908, 0.527395009994507],\n [0.257764995098114, 0.314490020275116],\n [0.401223003864288, 0.455172002315521],\n [0.429818987846375, 0.548614978790283],\n [0.421351999044418, 0.533740997314453],\n [0.276895999908447, 0.532056987285614],\n [0.483370006084442, 0.499586999416351],\n [0.33721199631691, 0.282882988452911],\n [0.296391993761063, 0.293242990970612],\n [0.169294998049736, 0.193813979625702],\n [0.447580009698868, 0.302609980106354],\n [0.392390012741089, 0.353887975215912],\n [0.354490011930466, 0.696784019470215],\n [0.067304998636246, 0.730105042457581],\n [0.442739009857178, 0.572826027870178],\n [0.457098007202148, 0.584792017936707],\n [0.381974011659622, 0.694710969924927],\n [0.392388999462128, 0.694203019142151],\n [0.277076005935669, 0.271932005882263],\n [0.422551989555359, 0.563233017921448],\n [0.385919004678726, 0.281364023685455],\n [0.383103013038635, 0.255840003490448],\n [0.331431001424789, 0.119714021682739],\n [0.229923993349075, 0.232002973556519],\n [0.364500999450684, 0.189113974571228],\n [0.229622006416321, 0.299540996551514],\n [0.173287004232407, 0.278747975826263],\n [0.472878992557526, 0.666198015213013],\n [0.446828007698059, 0.668527007102966],\n [0.422762006521225, 0.673889994621277],\n [0.445307999849319, 0.580065965652466],\n [0.388103008270264, 0.693961024284363],\n [0.403039008378983, 0.706539988517761],\n [0.403629004955292, 0.693953037261963],\n [0.460041999816895, 0.557139039039612],\n [0.431158006191254, 0.692366003990173],\n [0.452181994915009, 0.692366003990173],\n [0.475387006998062, 0.692366003990173],\n [0.465828001499176, 0.779190003871918],\n [0.472328990697861, 0.736225962638855],\n [0.473087012767792, 0.717857003211975],\n [0.473122000694275, 0.704625964164734],\n [0.473033010959625, 0.695277988910675],\n [0.427942007780075, 0.695277988910675],\n [0.426479011774063, 0.703539967536926],\n [0.423162013292313, 0.711845993995667],\n [0.4183090031147, 0.720062971115112],\n [0.390094995498657, 0.639572978019714],\n [0.013953999616206, 0.560034036636353],\n [0.499913990497589, 0.58014702796936],\n [0.413199990987778, 0.69539999961853],\n [0.409626007080078, 0.701822996139526],\n [0.468080013990402, 0.601534962654114],\n [0.422728985548019, 0.585985004901886],\n [0.463079988956451, 0.593783974647522],\n [0.37211999297142, 0.47341400384903],\n [0.334562003612518, 0.496073007583618],\n [0.411671012639999, 0.546965003013611],\n [0.242175996303558, 0.14767599105835],\n [0.290776997804642, 0.201445996761322],\n [0.327338010072708, 0.256527006626129],\n [0.399509996175766, 0.748921036720276],\n [0.441727995872498, 0.261676013469696],\n [0.429764986038208, 0.187834024429321],\n [0.412198007106781, 0.108901023864746],\n [0.288955003023148, 0.398952007293701],\n [0.218936994671822, 0.435410976409912],\n [0.41278201341629, 0.398970007896423],\n [0.257135003805161, 0.355440020561218],\n [0.427684992551804, 0.437960982322693],\n [0.448339998722076, 0.536936044692993],\n [0.178560003638268, 0.45755398273468],\n [0.247308000922203, 0.457193970680237],\n [0.286267012357712, 0.467674970626831],\n [0.332827985286713, 0.460712015628815],\n [0.368755996227264, 0.447206974029541],\n [0.398963987827301, 0.432654976844788],\n [0.476410001516342, 0.405806005001068],\n [0.189241006970406, 0.523923993110657],\n [0.228962004184723, 0.348950982093811],\n [0.490725994110107, 0.562400996685028],\n [0.404670000076294, 0.485132992267609],\n [0.019469000399113, 0.401564002037048],\n [0.426243007183075, 0.420431017875671],\n [0.396993011236191, 0.548797011375427],\n [0.266469985246658, 0.376977026462555],\n [0.439121007919312, 0.51895797252655],\n [0.032313998788595, 0.644356966018677],\n [0.419054001569748, 0.387154996395111],\n [0.462783008813858, 0.505746960639954],\n [0.238978996872902, 0.779744982719421],\n [0.198220998048782, 0.831938028335571],\n [0.107550002634525, 0.540755033493042],\n [0.183610007166862, 0.740257024765015],\n [0.134409993886948, 0.333683013916016],\n [0.385764002799988, 0.883153975009918],\n [0.490967005491257, 0.579378008842468],\n [0.382384985685349, 0.508572995662689],\n [0.174399003386497, 0.397670984268188],\n [0.318785011768341, 0.39623498916626],\n [0.343364000320435, 0.400596976280212],\n [0.396100014448166, 0.710216999053955],\n [0.187885001301765, 0.588537991046906],\n [0.430987000465393, 0.944064974784851],\n [0.318993002176285, 0.898285031318665],\n [0.266247987747192, 0.869701027870178],\n [0.500023007392883, 0.190576016902924],\n [0.499976992607117, 0.954452991485596],\n [0.366169989109039, 0.398822009563446],\n [0.393207013607025, 0.39553701877594],\n [0.410373002290726, 0.391080021858215],\n [0.194993004202843, 0.342101991176605],\n [0.388664990663528, 0.362284004688263],\n [0.365961998701096, 0.355970978736877],\n [0.343364000320435, 0.355356991291046],\n [0.318785011768341, 0.35834002494812],\n [0.301414996385574, 0.363156020641327],\n [0.058132998645306, 0.319076001644135],\n [0.301414996385574, 0.387449026107788],\n [0.499987989664078, 0.618434011936188],\n [0.415838003158569, 0.624195992946625],\n [0.445681989192963, 0.566076993942261],\n [0.465844005346298, 0.620640993118286],\n [0.49992299079895, 0.351523995399475],\n [0.288718998432159, 0.819945991039276],\n [0.335278987884521, 0.852819979190826],\n [0.440512001514435, 0.902418971061707],\n [0.128294005990028, 0.791940987110138],\n [0.408771991729736, 0.373893976211548],\n [0.455606997013092, 0.451801002025604],\n [0.499877005815506, 0.908990025520325],\n [0.375436991453171, 0.924192011356354],\n [0.11421000212431, 0.615022003650665],\n [0.448662012815475, 0.695277988910675],\n [0.4480200111866, 0.704632043838501],\n [0.447111994028091, 0.715808033943176],\n [0.444831997156143, 0.730794012546539],\n [0.430011987686157, 0.766808986663818],\n [0.406787008047104, 0.685672998428345],\n [0.400738000869751, 0.681069016456604],\n [0.392399996519089, 0.677703022956848],\n [0.367855995893478, 0.663918972015381],\n [0.247923001646996, 0.601333022117615],\n [0.452769994735718, 0.420849978923798],\n [0.43639200925827, 0.359887003898621],\n [0.416164010763168, 0.368713974952698],\n [0.413385987281799, 0.692366003990173],\n [0.228018000721931, 0.683571994304657],\n [0.468268007040024, 0.352671027183533],\n [0.411361992359161, 0.804327011108398],\n [0.499989002943039, 0.469825029373169],\n [0.479153990745544, 0.442654013633728],\n [0.499974012374878, 0.439637005329132],\n [0.432112008333206, 0.493588984012604],\n [0.499886006116867, 0.866917014122009],\n [0.49991300702095, 0.821729004383087],\n [0.456548988819122, 0.819200992584229],\n [0.344549000263214, 0.745438992977142],\n [0.37890899181366, 0.574010014533997],\n [0.374292999505997, 0.780184984207153],\n [0.319687992334366, 0.570737957954407],\n [0.357154995203018, 0.604269981384277],\n [0.295284003019333, 0.621580958366394],\n [0.447750002145767, 0.862477004528046],\n [0.410986006259918, 0.508723020553589],\n [0.31395098567009, 0.775308012962341],\n [0.354128003120422, 0.812552988529205],\n [0.324548006057739, 0.703992962837219],\n [0.189096003770828, 0.646299958229065],\n [0.279776990413666, 0.71465802192688],\n [0.1338230073452, 0.682700991630554],\n [0.336768001317978, 0.644733011722565],\n [0.429883986711502, 0.466521978378296],\n [0.455527991056442, 0.548622965812683],\n [0.437114000320435, 0.558896005153656],\n [0.467287987470627, 0.529924988746643],\n [0.414712011814117, 0.335219979286194],\n [0.37704598903656, 0.322777986526489],\n [0.344107985496521, 0.320150971412659],\n [0.312875986099243, 0.32233202457428],\n [0.283526003360748, 0.333190023899078],\n [0.241245999932289, 0.382785975933075],\n [0.102986000478268, 0.468762993812561],\n [0.267612010240555, 0.424560010433197],\n [0.297879010438919, 0.433175981044769],\n [0.333433985710144, 0.433878004550934],\n [0.366427004337311, 0.426115989685059],\n [0.396012008190155, 0.416696012020111],\n [0.420121014118195, 0.41022801399231],\n [0.007561000064015, 0.480777025222778],\n [0.432949006557465, 0.569517970085144],\n [0.458638995885849, 0.479089021682739],\n [0.473466008901596, 0.545744001865387],\n [0.476087987422943, 0.563830018043518],\n [0.468472003936768, 0.555056989192963],\n [0.433990985155106, 0.582361996173859],\n [0.483518004417419, 0.562983989715576],\n [0.482482999563217, 0.57784903049469],\n [0.42645001411438, 0.389798998832703],\n [0.438998997211456, 0.39649498462677],\n [0.450067013502121, 0.400434017181396],\n [0.289712011814117, 0.368252992630005],\n [0.276670008897781, 0.363372981548309],\n [0.517862021923065, 0.471948027610779],\n [0.710287988185883, 0.380764007568359],\n [0.526226997375488, 0.573909997940063],\n [0.895093023777008, 0.254140973091125],\n [0.634069979190826, 0.409575998783112],\n [0.661242008209229, 0.41302502155304],\n [0.688880026340485, 0.409460008144379],\n [0.725341975688934, 0.389131009578705],\n [0.606630027294159, 0.40370500087738],\n [0.654766023159027, 0.344011008739471],\n [0.629905998706818, 0.346076011657715],\n [0.680678009986877, 0.347265005111694],\n [0.702096998691559, 0.353591024875641],\n [0.75221198797226, 0.410804986953735],\n [0.602918028831482, 0.842862963676453],\n [0.719901978969574, 0.375599980354309],\n [0.893692970275879, 0.399959981441498],\n [0.790081977844238, 0.391354024410248],\n [0.643998026847839, 0.534487962722778],\n [0.528249025344849, 0.65040397644043],\n [0.525849997997284, 0.680191040039062],\n [0.560214996337891, 0.657229006290436],\n [0.585384011268616, 0.66654098033905],\n [0.549625992774963, 0.680860996246338],\n [0.57122802734375, 0.682691991329193],\n [0.624852001667023, 0.72809898853302],\n [0.513050019741058, 0.547281980514526],\n [0.51509702205658, 0.527251958847046],\n [0.742246985435486, 0.314507007598877],\n [0.598631024360657, 0.454979002475739],\n [0.570338010787964, 0.548575043678284],\n [0.578631997108459, 0.533622980117798],\n [0.723087012767792, 0.532054007053375],\n [0.516445994377136, 0.499638974666595],\n [0.662801027297974, 0.282917976379395],\n [0.70362401008606, 0.293271005153656],\n [0.830704987049103, 0.193813979625702],\n [0.552385985851288, 0.302568018436432],\n [0.607609987258911, 0.353887975215912],\n [0.645429015159607, 0.696707010269165],\n [0.932694971561432, 0.730105042457581],\n [0.557260990142822, 0.572826027870178],\n [0.542901992797852, 0.584792017936707],\n [0.6180260181427, 0.694710969924927],\n [0.607590973377228, 0.694203019142151],\n [0.722943007946014, 0.271963000297546],\n [0.577413976192474, 0.563166975975037],\n [0.614082992076874, 0.281386971473694],\n [0.616907000541687, 0.255886018276215],\n [0.668509006500244, 0.119913995265961],\n [0.770092010498047, 0.232020974159241],\n [0.635536015033722, 0.189248979091644],\n [0.77039098739624, 0.299556016921997],\n [0.826722025871277, 0.278755009174347],\n [0.527121007442474, 0.666198015213013],\n [0.553171992301941, 0.668527007102966],\n [0.577238023281097, 0.673889994621277],\n [0.554691970348358, 0.580065965652466],\n [0.611896991729736, 0.693961024284363],\n [0.59696102142334, 0.706539988517761],\n [0.596370995044708, 0.693953037261963],\n [0.539958000183105, 0.557139039039612],\n [0.568841993808746, 0.692366003990173],\n [0.547818005084991, 0.692366003990173],\n [0.52461302280426, 0.692366003990173],\n [0.534089982509613, 0.779141008853912],\n [0.527670979499817, 0.736225962638855],\n [0.526912987232208, 0.717857003211975],\n [0.526877999305725, 0.704625964164734],\n [0.526966989040375, 0.695277988910675],\n [0.572058022022247, 0.695277988910675],\n [0.573521018028259, 0.703539967536926],\n [0.57683801651001, 0.711845993995667],\n [0.581691026687622, 0.720062971115112],\n [0.609944999217987, 0.639909982681274],\n [0.986046016216278, 0.560034036636353],\n [0.5867999792099, 0.69539999961853],\n [0.590372025966644, 0.701822996139526],\n [0.531915009021759, 0.601536989212036],\n [0.577268004417419, 0.585934996604919],\n [0.536915004253387, 0.593786001205444],\n [0.627542972564697, 0.473352015018463],\n [0.665585994720459, 0.495950996875763],\n [0.588353991508484, 0.546862006187439],\n [0.757824003696442, 0.14767599105835],\n [0.709249973297119, 0.201507985591888],\n [0.672684013843536, 0.256581008434296],\n [0.600408971309662, 0.74900496006012],\n [0.55826598405838, 0.261672019958496],\n [0.570303976535797, 0.187870979309082],\n [0.588165998458862, 0.109044015407562],\n [0.711045026779175, 0.398952007293701],\n [0.781069993972778, 0.435405015945435],\n [0.587247014045715, 0.398931980133057],\n [0.742869973182678, 0.355445981025696],\n [0.572156012058258, 0.437651991844177],\n [0.55186802148819, 0.536570012569427],\n [0.821442008018494, 0.457556009292603],\n [0.752701997756958, 0.457181990146637],\n [0.71375697851181, 0.467626988887787],\n [0.66711300611496, 0.460672974586487],\n [0.631101012229919, 0.447153985500336],\n [0.6008620262146, 0.432473003864288],\n [0.523481011390686, 0.405627012252808],\n [0.810747981071472, 0.523926019668579],\n [0.771045982837677, 0.348959028720856],\n [0.509127020835876, 0.562718033790588],\n [0.595292985439301, 0.485023975372314],\n [0.980530977249146, 0.401564002037048],\n [0.573499977588654, 0.420000016689301],\n [0.602994978427887, 0.548687994480133],\n [0.733529984951019, 0.376977026462555],\n [0.560611009597778, 0.519016981124878],\n [0.967685997486115, 0.644356966018677],\n [0.580985009670258, 0.387160003185272],\n [0.537728011608124, 0.505385041236877],\n [0.760966002941132, 0.779752969741821],\n [0.801778972148895, 0.831938028335571],\n [0.892440974712372, 0.54076099395752],\n [0.816350996494293, 0.740260004997253],\n [0.865594983100891, 0.333687007427216],\n [0.614073991775513, 0.883246004581451],\n [0.508952975273132, 0.579437971115112],\n [0.617941975593567, 0.508316040039062],\n [0.825608015060425, 0.397674977779388],\n [0.681214988231659, 0.39623498916626],\n [0.656635999679565, 0.400596976280212],\n [0.603900015354156, 0.710216999053955],\n [0.81208598613739, 0.588539004325867],\n [0.56801301240921, 0.944564998149872],\n [0.681007981300354, 0.898285031318665],\n [0.733752012252808, 0.869701027870178],\n [0.633830010890961, 0.398822009563446],\n [0.606792986392975, 0.39553701877594],\n [0.589659988880157, 0.391062021255493],\n [0.805015981197357, 0.342108011245728],\n [0.611334979534149, 0.362284004688263],\n [0.634037971496582, 0.355970978736877],\n [0.656635999679565, 0.355356991291046],\n [0.681214988231659, 0.35834002494812],\n [0.698584973812103, 0.363156020641327],\n [0.941866993904114, 0.319076001644135],\n [0.698584973812103, 0.387449026107788],\n [0.584177017211914, 0.624107003211975],\n [0.554318010807037, 0.566076993942261],\n [0.534153997898102, 0.62064003944397],\n [0.711217999458313, 0.819975018501282],\n [0.664629995822906, 0.852871000766754],\n [0.559099972248077, 0.902631998062134],\n [0.871706008911133, 0.791940987110138],\n [0.591234028339386, 0.373893976211548],\n [0.544341027736664, 0.451583981513977],\n [0.624562978744507, 0.924192011356354],\n [0.88577002286911, 0.615028977394104],\n [0.551338016986847, 0.695277988910675],\n [0.551980018615723, 0.704632043838501],\n [0.552887976169586, 0.715808033943176],\n [0.555167973041534, 0.730794012546539],\n [0.569944024085999, 0.767035007476807],\n [0.593203008174896, 0.685675978660583],\n [0.599261999130249, 0.681069016456604],\n [0.607599973678589, 0.677703022956848],\n [0.631937980651855, 0.663500010967255],\n [0.752032995223999, 0.601315021514893],\n [0.547226011753082, 0.420395016670227],\n [0.563543975353241, 0.359827995300293],\n [0.583841025829315, 0.368713974952698],\n [0.586614012718201, 0.692366003990173],\n [0.771915018558502, 0.683578014373779],\n [0.531597018241882, 0.352482974529266],\n [0.588370978832245, 0.804440975189209],\n [0.52079701423645, 0.442565023899078],\n [0.567984998226166, 0.493479013442993],\n [0.543282985687256, 0.819254994392395],\n [0.655317008495331, 0.745514988899231],\n [0.621008992195129, 0.574018001556396],\n [0.625559985637665, 0.78031200170517],\n [0.680198013782501, 0.570719003677368],\n [0.64276397228241, 0.604337990283966],\n [0.704662978649139, 0.621529996395111],\n [0.552012026309967, 0.862591981887817],\n [0.589071989059448, 0.508637011051178],\n [0.685944974422455, 0.775357007980347],\n [0.645735025405884, 0.812640011310577],\n [0.675342977046967, 0.703978002071381],\n [0.810858011245728, 0.646304965019226],\n [0.72012197971344, 0.714666962623596],\n [0.866151988506317, 0.682704985141754],\n [0.663187026977539, 0.644596993923187],\n [0.570082008838654, 0.466325998306274],\n [0.544561982154846, 0.548375964164734],\n [0.562758982181549, 0.558784961700439],\n [0.531987011432648, 0.530140042304993],\n [0.585271000862122, 0.335177004337311],\n [0.622952997684479, 0.32277899980545],\n [0.655896008014679, 0.320163011550903],\n [0.687132000923157, 0.322345972061157],\n [0.716481983661652, 0.333200991153717],\n [0.758756995201111, 0.382786989212036],\n [0.897013008594513, 0.468769013881683],\n [0.732392013072968, 0.424547016620636],\n [0.70211398601532, 0.433162987232208],\n [0.66652500629425, 0.433866024017334],\n [0.633504986763, 0.426087975502014],\n [0.603875994682312, 0.416586995124817],\n [0.579657971858978, 0.409945011138916],\n [0.992439985275269, 0.480777025222778],\n [0.567192018032074, 0.569419980049133],\n [0.54136598110199, 0.478899002075195],\n [0.526564002037048, 0.546118021011353],\n [0.523913025856018, 0.563830018043518],\n [0.531529009342194, 0.555056989192963],\n [0.566035985946655, 0.582329034805298],\n [0.51631098985672, 0.563053965568542],\n [0.5174720287323, 0.577877044677734],\n [0.573594987392426, 0.389806985855103],\n [0.560697972774506, 0.395331978797913],\n [0.549755990505219, 0.399751007556915],\n [0.710287988185883, 0.368252992630005],\n [0.723330020904541, 0.363372981548309],\n];\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as blazeface from './blazeface';\nimport * as keypoints from './keypoints';\nimport * as pipe from './facepipeline';\nimport * as uv_coords from './uvcoords';\nimport * as triangulation from './triangulation';\n\nclass MediaPipeFaceMesh {\n constructor(blazeFace, blazeMeshModel, irisModel, config) {\n this.pipeline = new pipe.Pipeline(blazeFace, blazeMeshModel, irisModel, config);\n if (config) this.config = config;\n }\n\n async estimateFaces(input, config) {\n if (config) this.config = config;\n const predictions = await this.pipeline.predict(input, config);\n const results = [];\n for (const prediction of (predictions || [])) {\n // guard against disposed tensors on long running operations such as pause in middle of processing\n if (prediction.isDisposedInternal) continue;\n const mesh = prediction.coords ? prediction.coords.arraySync() : null;\n const annotations = {};\n if (mesh && mesh.length > 0) {\n for (const key in keypoints.MESH_ANNOTATIONS) {\n if (this.config.iris.enabled || key.includes('Iris') === false) {\n annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => mesh[index]);\n }\n }\n }\n results.push({\n confidence: prediction.confidence || 0,\n box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,\n mesh,\n annotations,\n image: prediction.image ? tf.clone(prediction.image) : null,\n });\n if (prediction.coords) prediction.coords.dispose();\n if (prediction.image) prediction.image.dispose();\n }\n return results;\n }\n}\n\nasync function load(config) {\n const models = await Promise.all([\n blazeface.load(config),\n loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),\n ]);\n const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.mesh.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.iris.modelPath.match(/\\/(.*)\\./)[1]}`);\n return faceMesh;\n}\n\nexports.load = load;\nexports.MediaPipeFaceMesh = MediaPipeFaceMesh;\nexports.uv_coords = uv_coords;\nexports.triangulation = triangulation.default;\n", "const profileData = {};\n\nfunction profile(name, data) {\n if (!data || !data.kernels) return;\n const maxResults = 5;\n const time = data.kernels\n .filter((a) => a.kernelTimeMs > 0)\n .reduce((a, b) => a += b.kernelTimeMs, 0);\n const slowest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.kernelTimeMs > 0)\n .sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);\n const largest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.totalBytesSnapshot > 0)\n .sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);\n if (slowest.length > maxResults) slowest.length = maxResults;\n if (largest.length > maxResults) largest.length = maxResults;\n const res = { newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };\n profileData[name] = res;\n // eslint-disable-next-line no-console\n console.log('Human profiler', name, res);\n}\n\nexports.run = profile;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { age: 0 };\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\n\nasync function load(config) {\n if (!models.age) {\n models.age = await loadGraphModel(config.face.age.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.age.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.age;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n const enhance = tf.mul(resize, [255.0]);\n tf.dispose(resize);\n\n let ageT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.age.enabled) ageT = await models.age.predict(enhance);\n } else {\n const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};\n ageT = profileAge.result.clone();\n profileAge.result.dispose();\n profile.run('age', profileAge);\n }\n enhance.dispose();\n\n if (ageT) {\n const data = ageT.dataSync();\n obj.age = Math.trunc(10 * data[0]) / 10;\n }\n ageT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { gender: '' };\nlet frame = Number.MAX_SAFE_INTEGER;\nlet alternative = false;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\n\nasync function load(config) {\n if (!models.gender) {\n models.gender = await loadGraphModel(config.face.gender.modelPath);\n alternative = models.gender.inputs[0].shape[3] === 1;\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.gender.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.gender;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.gender.skipFrames) && last.gender !== '') {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);\n let enhance;\n if (alternative) {\n enhance = tf.tidy(() => {\n const [red, green, blue] = tf.split(resize, 3, 3);\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n return grayscale.sub(0.5).mul(2);\n });\n } else {\n enhance = tf.mul(resize, [255.0]);\n }\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n tf.dispose(resize);\n\n let genderT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);\n } else {\n const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};\n genderT = profileGender.result.clone();\n profileGender.result.dispose();\n profile.run('gender', profileGender);\n }\n enhance.dispose();\n\n if (genderT) {\n const data = genderT.dataSync();\n if (alternative) {\n // returns two values 0..1, bigger one is prediction\n const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] > data[1] ? 'female' : 'male';\n obj.confidence = confidence;\n }\n } else {\n // returns one value 0..1, .5 is prediction threshold\n const confidence = Math.trunc(200 * Math.abs((data[0] - 0.5))) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] <= 0.5 ? 'female' : 'male';\n obj.confidence = Math.min(0.99, confidence);\n }\n }\n }\n genderT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];\nconst models = {};\nlet last = [];\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\nconst scale = 1; // score multiplication factor\n\nasync function load(config) {\n if (!models.emotion) {\n models.emotion = await loadGraphModel(config.face.emotion.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.emotion;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);\n const [red, green, blue] = tf.split(resize, 3, 3);\n resize.dispose();\n // weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n red.dispose();\n green.dispose();\n blue.dispose();\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n redNorm.dispose();\n greenNorm.dispose();\n blueNorm.dispose();\n const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));\n grayscale.dispose();\n const obj = [];\n if (config.face.emotion.enabled) {\n let data;\n if (!config.profile) {\n const emotionT = await models.emotion.predict(normalize);\n data = emotionT.dataSync();\n tf.dispose(emotionT);\n } else {\n const profileData = await tf.profile(() => models.emotion.predict(normalize));\n data = profileData.result.dataSync();\n profileData.result.dispose();\n profile.run('emotion', profileData);\n }\n for (let i = 0; i < data.length; i++) {\n if (scale * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * scale * data[i]) / 100), emotion: annotations[i] });\n }\n obj.sort((a, b) => b.score - a.score);\n }\n normalize.dispose();\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf } from '../tf.js';\n\nclass BaseModel {\n constructor(model, outputStride) {\n this.model = model;\n this.outputStride = outputStride;\n }\n\n predict(input) {\n return tf.tidy(() => {\n const asFloat = this.preprocessInput(input.toFloat());\n const asBatch = asFloat.expandDims(0);\n const results = this.model.predict(asBatch);\n const results3d = results.map((y) => y.squeeze([0]));\n const namedResults = this.nameOutputResults(results3d);\n return {\n heatmapScores: namedResults.heatmap.sigmoid(),\n offsets: namedResults.offsets,\n displacementFwd: namedResults.displacementFwd,\n displacementBwd: namedResults.displacementBwd,\n };\n });\n }\n\n /**\n * Releases the CPU and GPU memory allocated by the model.\n */\n dispose() {\n this.model.dispose();\n }\n}\nexports.BaseModel = BaseModel;\n", "import { tf } from '../tf.js';\nimport * as modelBase from './modelBase';\n\nclass MobileNet extends modelBase.BaseModel {\n // eslint-disable-next-line class-methods-use-this\n preprocessInput(input) {\n // Normalize the pixels [0, 255] to be between [-1, 1].\n return tf.tidy(() => tf.div(input, 127.5).sub(1.0));\n }\n\n // eslint-disable-next-line class-methods-use-this\n nameOutputResults(results) {\n const [offsets, heatmap, displacementFwd, displacementBwd] = results;\n return { offsets, heatmap, displacementFwd, displacementBwd };\n }\n}\nexports.MobileNet = MobileNet;\n", "// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort\nfunction half(k) {\n return Math.floor(k / 2);\n}\nclass MaxHeap {\n constructor(maxSize, getElementValue) {\n this.priorityQueue = new Array(maxSize);\n this.numberOfElements = -1;\n this.getElementValue = getElementValue;\n }\n\n enqueue(x) {\n this.priorityQueue[++this.numberOfElements] = x;\n this.swim(this.numberOfElements);\n }\n\n dequeue() {\n const max = this.priorityQueue[0];\n this.exchange(0, this.numberOfElements--);\n this.sink(0);\n this.priorityQueue[this.numberOfElements + 1] = null;\n return max;\n }\n\n empty() {\n return this.numberOfElements === -1;\n }\n\n size() {\n return this.numberOfElements + 1;\n }\n\n all() {\n return this.priorityQueue.slice(0, this.numberOfElements + 1);\n }\n\n max() {\n return this.priorityQueue[0];\n }\n\n swim(k) {\n while (k > 0 && this.less(half(k), k)) {\n this.exchange(k, half(k));\n k = half(k);\n }\n }\n\n sink(k) {\n while (2 * k <= this.numberOfElements) {\n let j = 2 * k;\n if (j < this.numberOfElements && this.less(j, j + 1)) j++;\n if (!this.less(k, j)) break;\n this.exchange(k, j);\n k = j;\n }\n }\n\n getValueAt(i) {\n return this.getElementValue(this.priorityQueue[i]);\n }\n\n less(i, j) {\n return this.getValueAt(i) < this.getValueAt(j);\n }\n\n exchange(i, j) {\n const t = this.priorityQueue[i];\n this.priorityQueue[i] = this.priorityQueue[j];\n this.priorityQueue[j] = t;\n }\n}\nexports.MaxHeap = MaxHeap;\n", "import * as heapSort from './heapSort';\n\nfunction scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores) {\n const [height, width] = scores.shape;\n let localMaximum = true;\n const yStart = Math.max(heatmapY - localMaximumRadius, 0);\n const yEnd = Math.min(heatmapY + localMaximumRadius + 1, height);\n for (let yCurrent = yStart; yCurrent < yEnd; ++yCurrent) {\n const xStart = Math.max(heatmapX - localMaximumRadius, 0);\n const xEnd = Math.min(heatmapX + localMaximumRadius + 1, width);\n for (let xCurrent = xStart; xCurrent < xEnd; ++xCurrent) {\n if (scores.get(yCurrent, xCurrent, keypointId) > score) {\n localMaximum = false;\n break;\n }\n }\n if (!localMaximum) {\n break;\n }\n }\n return localMaximum;\n}\n/**\n * Builds a priority queue with part candidate positions for a specific image in\n * the batch. For this we find all local maxima in the score maps with score\n * values above a threshold. We create a single priority queue across all parts.\n */\nfunction buildPartWithScoreQueue(scoreThreshold, localMaximumRadius, scores) {\n const [height, width, numKeypoints] = scores.shape;\n const queue = new heapSort.MaxHeap(height * width * numKeypoints, ({ score }) => score);\n for (let heatmapY = 0; heatmapY < height; ++heatmapY) {\n for (let heatmapX = 0; heatmapX < width; ++heatmapX) {\n for (let keypointId = 0; keypointId < numKeypoints; ++keypointId) {\n const score = scores.get(heatmapY, heatmapX, keypointId);\n // Only consider parts with score greater or equal to threshold as root candidates.\n if (score < scoreThreshold) continue;\n // Only consider keypoints whose score is maximum in a local window.\n if (scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores)) {\n queue.enqueue({ score, part: { heatmapY, heatmapX, id: keypointId } });\n }\n }\n }\n }\n return queue;\n}\nexports.buildPartWithScoreQueue = buildPartWithScoreQueue;\n", "exports.partNames = [\n 'nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftShoulder',\n 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist',\n 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle',\n];\nexports.NUM_KEYPOINTS = exports.partNames.length;\nexports.partIds = exports.partNames.reduce((result, jointName, i) => {\n result[jointName] = i;\n return result;\n}, {});\nconst connectedPartNames = [\n ['leftHip', 'leftShoulder'], ['leftElbow', 'leftShoulder'],\n ['leftElbow', 'leftWrist'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['rightHip', 'rightShoulder'],\n ['rightElbow', 'rightShoulder'], ['rightElbow', 'rightWrist'],\n ['rightHip', 'rightKnee'], ['rightKnee', 'rightAnkle'],\n ['leftShoulder', 'rightShoulder'], ['leftHip', 'rightHip'],\n];\n/*\n * Define the skeleton. This defines the parent->child relationships of our\n * tree. Arbitrarily this defines the nose as the root of the tree, however\n * since we will infer the displacement for both parent->child and\n * child->parent, we can define the tree root as any node.\n */\nexports.poseChain = [\n ['nose', 'leftEye'], ['leftEye', 'leftEar'], ['nose', 'rightEye'],\n ['rightEye', 'rightEar'], ['nose', 'leftShoulder'],\n ['leftShoulder', 'leftElbow'], ['leftElbow', 'leftWrist'],\n ['leftShoulder', 'leftHip'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['nose', 'rightShoulder'],\n ['rightShoulder', 'rightElbow'], ['rightElbow', 'rightWrist'],\n ['rightShoulder', 'rightHip'], ['rightHip', 'rightKnee'],\n ['rightKnee', 'rightAnkle'],\n];\nexports.connectedPartIndices = connectedPartNames.map(([jointNameA, jointNameB]) => ([exports.partIds[jointNameA], exports.partIds[jointNameB]]));\nexports.partChannels = [\n 'left_face',\n 'right_face',\n 'right_upper_leg_front',\n 'right_lower_leg_back',\n 'right_upper_leg_back',\n 'left_lower_leg_front',\n 'left_upper_leg_front',\n 'left_upper_leg_back',\n 'left_lower_leg_back',\n 'right_feet',\n 'right_lower_leg_front',\n 'left_feet',\n 'torso_front',\n 'torso_back',\n 'right_upper_arm_front',\n 'right_upper_arm_back',\n 'right_lower_arm_back',\n 'left_lower_arm_front',\n 'left_upper_arm_front',\n 'left_upper_arm_back',\n 'left_lower_arm_back',\n 'right_hand',\n 'right_lower_arm_front',\n 'left_hand',\n];\n", "import * as kpt from './keypoints';\n\nfunction getOffsetPoint(y, x, keypoint, offsets) {\n return {\n y: offsets.get(y, x, keypoint),\n x: offsets.get(y, x, keypoint + kpt.NUM_KEYPOINTS),\n };\n}\nexports.getOffsetPoint = getOffsetPoint;\n\nfunction getImageCoords(part, outputStride, offsets) {\n const { heatmapY, heatmapX, id: keypoint } = part;\n const { y, x } = getOffsetPoint(heatmapY, heatmapX, keypoint, offsets);\n return {\n x: part.heatmapX * outputStride + x,\n y: part.heatmapY * outputStride + y,\n };\n}\nexports.getImageCoords = getImageCoords;\n\nfunction fillArray(element, size) {\n const result = new Array(size);\n for (let i = 0; i < size; i++) {\n result[i] = element;\n }\n return result;\n}\nexports.fillArray = fillArray;\n\nfunction clamp(a, min, max) {\n if (a < min) return min;\n if (a > max) return max;\n return a;\n}\nexports.clamp = clamp;\n\nfunction squaredDistance(y1, x1, y2, x2) {\n const dy = y2 - y1;\n const dx = x2 - x1;\n return dy * dy + dx * dx;\n}\nexports.squaredDistance = squaredDistance;\n\nfunction addVectors(a, b) {\n return { x: a.x + b.x, y: a.y + b.y };\n}\nexports.addVectors = addVectors;\n\nfunction clampVector(a, min, max) {\n return { y: clamp(a.y, min, max), x: clamp(a.x, min, max) };\n}\nexports.clampVector = clampVector;\n", "import * as keypoints from './keypoints';\nimport * as vectors from './vectors';\n\nconst parentChildrenTuples = keypoints.poseChain.map(([parentJoinName, childJoinName]) => ([keypoints.partIds[parentJoinName], keypoints.partIds[childJoinName]]));\nconst parentToChildEdges = parentChildrenTuples.map(([, childJointId]) => childJointId);\nconst childToParentEdges = parentChildrenTuples.map(([parentJointId]) => parentJointId);\nfunction getDisplacement(edgeId, point, displacements) {\n const numEdges = displacements.shape[2] / 2;\n return {\n y: displacements.get(point.y, point.x, edgeId),\n x: displacements.get(point.y, point.x, numEdges + edgeId),\n };\n}\nfunction getStridedIndexNearPoint(point, outputStride, height, width) {\n return {\n y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1),\n x: vectors.clamp(Math.round(point.x / outputStride), 0, width - 1),\n };\n}\n/**\n * We get a new keypoint along the `edgeId` for the pose instance, assuming\n * that the position of the `idSource` part is already known. For this, we\n * follow the displacement vector from the source to target part (stored in\n * the `i`-t channel of the displacement tensor). The displaced keypoint\n * vector is refined using the offset vector by `offsetRefineStep` times.\n */\nfunction traverseToTargetKeypoint(edgeId, sourceKeypoint, targetKeypointId, scoresBuffer, offsets, outputStride, displacements, offsetRefineStep = 2) {\n const [height, width] = scoresBuffer.shape;\n // Nearest neighbor interpolation for the source->target displacements.\n const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, outputStride, height, width);\n const displacement = getDisplacement(edgeId, sourceKeypointIndices, displacements);\n const displacedPoint = vectors.addVectors(sourceKeypoint.position, displacement);\n let targetKeypoint = displacedPoint;\n for (let i = 0; i < offsetRefineStep; i++) {\n const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const offsetPoint = vectors.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetKeypointId, offsets);\n targetKeypoint = vectors.addVectors({\n x: targetKeypointIndices.x * outputStride,\n y: targetKeypointIndices.y * outputStride,\n }, { x: offsetPoint.x, y: offsetPoint.y });\n }\n const targetKeyPointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const score = scoresBuffer.get(targetKeyPointIndices.y, targetKeyPointIndices.x, targetKeypointId);\n return { position: targetKeypoint, part: keypoints.partNames[targetKeypointId], score };\n}\n/**\n * Follows the displacement fields to decode the full pose of the object\n * instance given the position of a part that acts as root.\n *\n * @return An array of decoded keypoints and their scores for a single pose\n */\nfunction decodePose(root, scores, offsets, outputStride, displacementsFwd, displacementsBwd) {\n const numParts = scores.shape[2];\n const numEdges = parentToChildEdges.length;\n const instanceKeypoints = new Array(numParts);\n // Start a new detection instance at the position of the root.\n const { part: rootPart, score: rootScore } = root;\n const rootPoint = vectors.getImageCoords(rootPart, outputStride, offsets);\n instanceKeypoints[rootPart.id] = {\n score: rootScore,\n part: keypoints.partNames[rootPart.id],\n position: rootPoint,\n };\n // Decode the part positions upwards in the tree, following the backward\n // displacements.\n for (let edge = numEdges - 1; edge >= 0; --edge) {\n const sourceKeypointId = parentToChildEdges[edge];\n const targetKeypointId = childToParentEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsBwd);\n }\n }\n // Decode the part positions downwards in the tree, following the forward\n // displacements.\n for (let edge = 0; edge < numEdges; ++edge) {\n const sourceKeypointId = childToParentEdges[edge];\n const targetKeypointId = parentToChildEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsFwd);\n }\n }\n return instanceKeypoints;\n}\nexports.decodePose = decodePose;\n", "import * as buildParts from './buildParts';\nimport * as decodePose from './decodePose';\nimport * as vectors from './vectors';\n\nfunction withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) {\n return poses.some(({ keypoints }) => {\n const correspondingKeypoint = keypoints[keypointId].position;\n return vectors.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;\n });\n}\n/* Score the newly proposed object instance without taking into account\n * the scores of the parts that overlap with any previously detected\n * instance.\n */\nfunction getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {\n const notOverlappedKeypointScores = instanceKeypoints.reduce((result, { position, score }, keypointId) => {\n if (!withinNmsRadiusOfCorrespondingPoint(existingPoses, squaredNmsRadius, position, keypointId)) {\n result += score;\n }\n return result;\n }, 0.0);\n return notOverlappedKeypointScores / instanceKeypoints.length;\n}\n// A point (y, x) is considered as root part candidate if its score is a\n// maximum in a window |y - y'| <= kLocalMaximumRadius, |x - x'| <=\n// kLocalMaximumRadius.\nconst kLocalMaximumRadius = 1;\n/**\n * Detects multiple poses and finds their parts from part scores and\n * displacement vectors. It returns up to `maxDetections` object instance\n * detections in decreasing root score order. It works as follows: We first\n * create a priority queue with local part score maxima above\n * `scoreThreshold`, considering all parts at the same time. Then we\n * iteratively pull the top element of the queue (in decreasing score order)\n * and treat it as a root candidate for a new object instance. To avoid\n * duplicate detections, we reject the root candidate if it is within a disk\n * of `nmsRadius` pixels from the corresponding part of a previously detected\n * instance, which is a form of part-based non-maximum suppression (NMS). If\n * the root candidate passes the NMS check, we start a new object instance\n * detection, treating the corresponding part as root and finding the\n * positions of the remaining parts by following the displacement vectors\n * along the tree-structured part graph. We assign to the newly detected\n * instance a score equal to the sum of scores of its parts which have not\n * been claimed by a previous instance (i.e., those at least `nmsRadius`\n * pixels away from the corresponding part of all previously detected\n * instances), divided by the total number of parts `numParts`.\n *\n * @param heatmapScores 3-D tensor with shape `[height, width, numParts]`.\n * The value of heatmapScores[y, x, k]` is the score of placing the `k`-th\n * object part at position `(y, x)`.\n *\n * @param offsets 3-D tensor with shape `[height, width, numParts * 2]`.\n * The value of [offsets[y, x, k], offsets[y, x, k + numParts]]` is the\n * short range offset vector of the `k`-th object part at heatmap\n * position `(y, x)`.\n *\n * @param displacementsFwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the forward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param displacementsBwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the backward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param outputStride The output stride that was used when feed-forwarding\n * through the PoseNet model. Must be 32, 16, or 8.\n *\n * @param maxPoseDetections Maximum number of returned instance detections per\n * image.\n *\n * @param scoreThreshold Only return instance detections that have root part\n * score greater or equal to this value. Defaults to 0.5.\n *\n * @param nmsRadius Non-maximum suppression part distance. It needs to be\n * strictly positive. Two parts suppress each other if they are less than\n * `nmsRadius` pixels away. Defaults to 20.\n *\n * @return An array of poses and their scores, each containing keypoints and\n * the corresponding keypoint scores.\n */\nfunction decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, maxPoseDetections, scoreThreshold = 0.5, nmsRadius = 20) {\n const poses = [];\n const queue = buildParts.buildPartWithScoreQueue(scoreThreshold, kLocalMaximumRadius, scoresBuffer);\n const squaredNmsRadius = nmsRadius * nmsRadius;\n // Generate at most maxDetections object instances per image in\n // decreasing root part score order.\n while (poses.length < maxPoseDetections && !queue.empty()) {\n // The top element in the queue is the next root candidate.\n const root = queue.dequeue();\n // Part-based non-maximum suppression: We reject a root candidate if it\n // is within a disk of `nmsRadius` pixels from the corresponding part of\n // a previously detected instance.\n const rootImageCoords = vectors.getImageCoords(root.part, outputStride, offsetsBuffer);\n if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;\n // Start a new detection instance at the position of the root.\n const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, outputStride, displacementsFwdBuffer, displacementsBwdBuffer);\n const score = getInstanceScore(poses, squaredNmsRadius, keypoints);\n poses.push({ keypoints, score });\n }\n return poses;\n}\nexports.decodeMultiplePoses = decodeMultiplePoses;\n", "import * as kpt from './keypoints';\n\nfunction eitherPointDoesntMeetConfidence(a, b, minConfidence) {\n return (a < minConfidence || b < minConfidence);\n}\n\nfunction getAdjacentKeyPoints(keypoints, minConfidence) {\n return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {\n if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {\n return result;\n }\n result.push([keypoints[leftJoint], keypoints[rightJoint]]);\n return result;\n }, []);\n}\nexports.getAdjacentKeyPoints = getAdjacentKeyPoints;\n\nconst { NEGATIVE_INFINITY, POSITIVE_INFINITY } = Number;\nfunction getBoundingBox(keypoints) {\n return keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({\n maxX: Math.max(maxX, x),\n maxY: Math.max(maxY, y),\n minX: Math.min(minX, x),\n minY: Math.min(minY, y),\n }), {\n maxX: NEGATIVE_INFINITY,\n maxY: NEGATIVE_INFINITY,\n minX: POSITIVE_INFINITY,\n minY: POSITIVE_INFINITY,\n });\n}\nexports.getBoundingBox = getBoundingBox;\n\nfunction getBoundingBoxPoints(keypoints) {\n const { minX, minY, maxX, maxY } = getBoundingBox(keypoints);\n return [{ x: minX, y: minY }, { x: maxX, y: minY }, { x: maxX, y: maxY }, { x: minX, y: maxY }];\n}\nexports.getBoundingBoxPoints = getBoundingBoxPoints;\n\nasync function toTensorBuffers3D(tensors) {\n return Promise.all(tensors.map((tensor) => tensor.buffer()));\n}\nexports.toTensorBuffers3D = toTensorBuffers3D;\n\nfunction scalePose(pose, scaleY, scaleX) {\n return {\n score: pose.score,\n keypoints: pose.keypoints.map(({ score, part, position }) => ({\n score,\n part,\n position: { x: position.x * scaleX, y: position.y * scaleY },\n })),\n };\n}\nexports.scalePose = scalePose;\n\nfunction resizeTo(image, [targetH, targetW]) {\n const input = image.squeeze(0);\n const resized = input.resizeBilinear([targetH, targetW]);\n input.dispose();\n return resized;\n}\nexports.resizeTo = resizeTo;\n\nfunction scaleAndFlipPoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]) {\n const scaledPoses = poses.map((pose) => scalePose(pose, height / inputResolutionHeight, width / inputResolutionWidth));\n return scaledPoses;\n}\nexports.scaleAndFlipPoses = scaleAndFlipPoses;\n", "import { loadGraphModel } from '../tf.js';\nimport * as modelMobileNet from './modelMobileNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as util from './util';\n\nclass PoseNet {\n constructor(net) {\n this.baseModel = net;\n this.outputStride = 16;\n }\n\n async estimatePoses(input, config) {\n return new Promise(async (resolve) => {\n const height = input.shape[1];\n const width = input.shape[2];\n const resized = util.resizeTo(input, [config.body.inputSize, config.body.inputSize]);\n const res = this.baseModel.predict(resized);\n const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);\n const scoresBuffer = allTensorBuffers[0];\n const offsetsBuffer = allTensorBuffers[1];\n const displacementsFwdBuffer = allTensorBuffers[2];\n const displacementsBwdBuffer = allTensorBuffers[3];\n const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, this.outputStride, config.body.maxDetections, config.body.scoreThreshold, config.body.nmsRadius);\n const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);\n res.heatmapScores.dispose();\n res.offsets.dispose();\n res.displacementFwd.dispose();\n res.displacementBwd.dispose();\n resized.dispose();\n resolve(resultPoses);\n });\n }\n\n dispose() {\n this.baseModel.dispose();\n }\n}\nexports.PoseNet = PoseNet;\n\nasync function load(config) {\n const graphModel = await loadGraphModel(config.body.modelPath);\n const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.body.modelPath.match(/\\/(.*)\\./)[1]}`);\n return new PoseNet(mobilenet);\n}\nexports.load = load;\n", "import * as modelMobileNet from './modelMobileNet';\nimport * as modelPoseNet from './modelPoseNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nexports.load = modelPoseNet.load;\nexports.PoseNet = modelPoseNet.PoseNet;\n\nexports.MobileNet = modelMobileNet.MobileNet;\nexports.decodeMultiplePoses = decodeMultiple.decodeMultiplePoses;\nexports.partChannels = keypoints.partChannels;\nexports.partIds = keypoints.partIds;\nexports.partNames = keypoints.partNames;\nexports.poseChain = keypoints.poseChain;\nexports.getAdjacentKeyPoints = util.getAdjacentKeyPoints;\nexports.getBoundingBox = util.getBoundingBox;\nexports.getBoundingBoxPoints = util.getBoundingBoxPoints;\nexports.scaleAndFlipPoses = util.scaleAndFlipPoses;\nexports.scalePose = util.scalePose;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\n\nclass HandDetector {\n constructor(model, inputSize, anchorsAnnotated) {\n this.model = model;\n this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);\n this.anchorsTensor = tf.tensor2d(this.anchors);\n this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);\n this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);\n }\n\n normalizeBoxes(boxes) {\n return tf.tidy(() => {\n const boxOffsets = tf.slice(boxes, [0, 0], [-1, 2]);\n const boxSizes = tf.slice(boxes, [0, 2], [-1, 2]);\n const boxCenterPoints = tf.add(tf.div(boxOffsets, this.inputSizeTensor), this.anchorsTensor);\n const halfBoxSizes = tf.div(boxSizes, this.doubleInputSizeTensor);\n const startPoints = tf.mul(tf.sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n const endPoints = tf.mul(tf.add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n return tf.concat2d([startPoints, endPoints], 1);\n });\n }\n\n normalizeLandmarks(rawPalmLandmarks, index) {\n return tf.tidy(() => {\n const landmarks = tf.add(tf.div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]);\n return tf.mul(landmarks, this.inputSizeTensor);\n });\n }\n\n async getBoxes(input, config) {\n const batched = this.model.predict(input);\n const predictions = batched.squeeze();\n batched.dispose();\n const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());\n const scoresVal = scores.dataSync();\n const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);\n const boxes = this.normalizeBoxes(rawBoxes);\n rawBoxes.dispose();\n const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);\n const filtered = filteredT.arraySync();\n\n scores.dispose();\n filteredT.dispose();\n const hands = [];\n for (const boxIndex of filtered) {\n if (scoresVal[boxIndex] >= config.minConfidence) {\n const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);\n const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);\n const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));\n rawPalmLandmarks.dispose();\n hands.push({ box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex] });\n }\n }\n predictions.dispose();\n boxes.dispose();\n return hands;\n }\n\n async estimateHandBounds(input, config) {\n const inputHeight = input.shape[1];\n const inputWidth = input.shape[2];\n const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));\n const predictions = await this.getBoxes(image, config);\n image.dispose();\n if (!predictions || predictions.length === 0) return null;\n const hands = [];\n for (const prediction of predictions) {\n const boxes = prediction.box.dataSync();\n const startPoint = boxes.slice(0, 2);\n const endPoint = boxes.slice(2, 4);\n const palmLandmarks = prediction.palmLandmarks.arraySync();\n prediction.box.dispose();\n prediction.palmLandmarks.dispose();\n hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));\n }\n return hands;\n }\n}\nexports.HandDetector = HandDetector;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\nimport * as util from './util';\n\nconst PALM_BOX_SHIFT_VECTOR = [0, -0.4];\nconst PALM_BOX_ENLARGE_FACTOR = 3;\nconst HAND_BOX_SHIFT_VECTOR = [0, -0.1]; // move detected hand box by x,y to ease landmark detection\nconst HAND_BOX_ENLARGE_FACTOR = 1.65; // increased from model default 1.65;\nconst PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];\nconst PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;\nconst PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;\n\nclass HandPipeline {\n constructor(boundingBoxDetector, meshDetector, inputSize) {\n this.boxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.inputSize = inputSize;\n this.storedBoxes = [];\n this.skipped = 1000;\n this.detectedHands = 0;\n }\n\n getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {\n const rotatedPalmLandmarks = palmLandmarks.map((coord) => {\n const homogeneousCoordinate = [...coord, 1];\n return util.rotatePoint(homogeneousCoordinate, rotationMatrix);\n });\n const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);\n return box.enlargeBox(box.squarifyBox(box.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), PALM_BOX_ENLARGE_FACTOR);\n }\n\n getBoxForHandLandmarks(landmarks) {\n const boundingBox = this.calculateLandmarksBoundingBox(landmarks);\n const boxAroundHand = box.enlargeBox(box.squarifyBox(box.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const palmLandmarks = [];\n for (let i = 0; i < PALM_LANDMARK_IDS.length; i++) {\n palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));\n }\n boxAroundHand.palmLandmarks = palmLandmarks;\n return boxAroundHand;\n }\n\n transformRawCoords(rawCoords, box2, angle, rotationMatrix) {\n const boxSize = box.getBoxSize(box2);\n const scaleFactor = [boxSize[0] / this.inputSize, boxSize[1] / this.inputSize];\n const coordsScaled = rawCoords.map((coord) => [\n scaleFactor[0] * (coord[0] - this.inputSize / 2),\n scaleFactor[1] * (coord[1] - this.inputSize / 2),\n coord[2],\n ]);\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => {\n const rotated = util.rotatePoint(coord, coordsRotationMatrix);\n return [...rotated, coord[2]];\n });\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...box.getBoxCenter(box2), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => [\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1],\n coord[2],\n ]);\n }\n\n async estimateHands(image, config) {\n this.skipped++;\n let useFreshBox = false;\n\n // run new detector every skipFrames unless we only want box to start with\n let boxes;\n if ((this.skipped > config.skipFrames) || !config.landmarks) {\n boxes = await this.boxDetector.estimateHandBounds(image, config);\n // don't reset on test image\n if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.maxHands) || !config.landmarks)) {\n this.storedBoxes = [];\n this.detectedHands = 0;\n for (const possible of boxes) this.storedBoxes.push(possible);\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n const hands = [];\n // console.log(`skipped: ${this.skipped} max: ${config.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);\n\n // go through working set of boxes\n for (const i in this.storedBoxes) {\n const currentBox = this.storedBoxes[i];\n if (!currentBox) continue;\n if (config.landmarks) {\n const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);\n const palmCenter = box.getBoxCenter(currentBox);\n const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];\n const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);\n const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);\n const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;\n const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);\n const handImage = croppedInput.div(255);\n croppedInput.dispose();\n rotatedImage.dispose();\n const [confidence, keypoints] = await this.meshDetector.predict(handImage);\n handImage.dispose();\n const confidenceValue = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceValue >= config.minConfidence) {\n const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);\n const rawCoords = keypointsReshaped.arraySync();\n keypoints.dispose();\n keypointsReshaped.dispose();\n const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);\n const nextBoundingBox = this.getBoxForHandLandmarks(coords);\n this.storedBoxes[i] = nextBoundingBox;\n const result = {\n landmarks: coords,\n confidence: confidenceValue,\n box: {\n topLeft: nextBoundingBox.startPoint,\n bottomRight: nextBoundingBox.endPoint,\n },\n };\n hands.push(result);\n } else {\n this.storedBoxes[i] = null;\n }\n keypoints.dispose();\n } else {\n const enlarged = box.enlargeBox(box.squarifyBox(box.shiftBox(currentBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const result = {\n confidence: currentBox.confidence,\n box: {\n topLeft: enlarged.startPoint,\n bottomRight: enlarged.endPoint,\n },\n };\n hands.push(result);\n }\n }\n this.storedBoxes = this.storedBoxes.filter((a) => a !== null);\n this.detectedHands = hands.length;\n return hands;\n }\n\n // eslint-disable-next-line class-methods-use-this\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint };\n }\n}\n\nexports.HandPipeline = HandPipeline;\n", "exports.anchors = [\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n];\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html\n\nimport { loadGraphModel } from '../tf.js';\nimport * as handdetector from './handdetector';\nimport * as pipeline from './handpipeline';\nimport * as anchors from './anchors';\n\nconst MESH_ANNOTATIONS = {\n thumb: [1, 2, 3, 4],\n indexFinger: [5, 6, 7, 8],\n middleFinger: [9, 10, 11, 12],\n ringFinger: [13, 14, 15, 16],\n pinky: [17, 18, 19, 20],\n palmBase: [0],\n};\n\nclass HandPose {\n constructor(pipe) {\n this.pipeline = pipe;\n }\n\n static getAnnotations() {\n return MESH_ANNOTATIONS;\n }\n\n async estimateHands(input, config) {\n const predictions = await this.pipeline.estimateHands(input, config);\n if (!predictions) return [];\n const hands = [];\n for (const prediction of predictions) {\n const annotations = {};\n if (prediction.landmarks) {\n for (const key of Object.keys(MESH_ANNOTATIONS)) {\n annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);\n }\n }\n hands.push({\n confidence: prediction.confidence,\n box: prediction.box ? [\n prediction.box.topLeft[0],\n prediction.box.topLeft[1],\n prediction.box.bottomRight[0] - prediction.box.topLeft[0],\n prediction.box.bottomRight[1] - prediction.box.topLeft[1],\n ] : 0,\n landmarks: prediction.landmarks,\n annotations,\n });\n }\n return hands;\n }\n}\nexports.HandPose = HandPose;\n\nasync function load(config) {\n const [handDetectorModel, handPoseModel] = await Promise.all([\n loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),\n ]);\n const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);\n const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);\n const handpose = new HandPose(pipe);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.skeleton.modelPath.match(/\\/(.*)\\./)[1]}`);\n return handpose;\n}\nexports.load = load;\n", "exports.body = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const pose of res) {\n // raising hands\n const leftWrist = pose.keypoints.find((a) => (a.part === 'leftWrist'));\n const rightWrist = pose.keypoints.find((a) => (a.part === 'rightWrist'));\n const nose = pose.keypoints.find((a) => (a.part === 'nose'));\n if (nose && leftWrist && rightWrist && (leftWrist.position.y < nose.position.y) && (rightWrist.position.y < nose.position.y)) gestures.push('i give up');\n else if (nose && leftWrist && (leftWrist.position.y < nose.position.y)) gestures.push('raise left hand');\n else if (nose && rightWrist && (rightWrist.position.y < nose.position.y)) gestures.push('raise right hand');\n\n // leaning\n const leftShoulder = pose.keypoints.find((a) => (a.part === 'leftShoulder'));\n const rightShoulder = pose.keypoints.find((a) => (a.part === 'rightShoulder'));\n if (leftShoulder && rightShoulder) gestures.push(`leaning ${(leftShoulder.position.y > rightShoulder.position.y) ? 'left' : 'right'}`);\n }\n return gestures;\n};\n\nexports.face = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const face of res) {\n // if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {\n // gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);\n // }\n if (face.mesh && face.mesh.length > 0) {\n const eyeFacing = face.mesh[35][2] - face.mesh[263][2];\n if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');\n else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);\n const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openLeft < 0.2) gestures.push('blink left eye');\n const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openRight < 0.2) gestures.push('blink right eye');\n const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));\n if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);\n const chinDepth = face.mesh[152][2];\n if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);\n }\n }\n return gestures;\n};\n\nexports.hand = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const hand of res) {\n const fingers = [];\n for (const [finger, pos] of Object.entries(hand['annotations'])) {\n if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger\n }\n if (fingers && fingers.length > 0) {\n const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));\n const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));\n gestures.push(`${closest.name} forward ${highest.name} up`);\n }\n }\n return gestures;\n};\n", "/* eslint-disable no-use-before-define */\n/*\nWebGLImageFilter - MIT Licensed\n2013, Dominic Szablewski - phoboslab.org\n\n*/\n\nconst WebGLProgram = function (gl, vertexSource, fragmentSource) {\n const _collect = function (source, prefix, collection) {\n const r = new RegExp('\\\\b' + prefix + ' \\\\w+ (\\\\w+)', 'ig');\n source.replace(r, (match, name) => {\n collection[name] = 0;\n return match;\n });\n };\n\n const _compile = function (source, type) {\n const shader = gl.createShader(type);\n gl.shaderSource(shader, source);\n gl.compileShader(shader);\n\n if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {\n throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));\n }\n return shader;\n };\n\n this.uniform = {};\n this.attribute = {};\n\n const _vsh = _compile(vertexSource, gl.VERTEX_SHADER);\n const _fsh = _compile(fragmentSource, gl.FRAGMENT_SHADER);\n\n this.id = gl.createProgram();\n gl.attachShader(this.id, _vsh);\n gl.attachShader(this.id, _fsh);\n gl.linkProgram(this.id);\n\n if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {\n throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));\n }\n\n gl.useProgram(this.id);\n\n // Collect attributes\n _collect(vertexSource, 'attribute', this.attribute);\n for (const a in this.attribute) {\n this.attribute[a] = gl.getAttribLocation(this.id, a);\n }\n\n // Collect uniforms\n _collect(vertexSource, 'uniform', this.uniform);\n _collect(fragmentSource, 'uniform', this.uniform);\n for (const u in this.uniform) {\n this.uniform[u] = gl.getUniformLocation(this.id, u);\n }\n};\n\nconst WebGLImageFilter = function (params) {\n if (!params) params = { };\n let _drawCount = 0;\n let _sourceTexture = null;\n let _lastInChain = false;\n let _currentFramebufferIndex = -1;\n let _tempFramebuffers = [null, null];\n let _filterChain = [];\n let _width = -1;\n let _height = -1;\n let _vertexBuffer = null;\n let _currentProgram = null;\n const _canvas = params.canvas || document.createElement('canvas');\n\n // key is the shader program source, value is the compiled program\n const _shaderProgramCache = { };\n\n const gl = _canvas.getContext('webgl');\n if (!gl) throw new Error('Filter: getContext() failed');\n\n this.addFilter = function (name) {\n // eslint-disable-next-line prefer-rest-params\n const args = Array.prototype.slice.call(arguments, 1);\n const filter = _filter[name];\n\n _filterChain.push({ func: filter, args });\n };\n\n this.reset = function () {\n _filterChain = [];\n };\n\n this.apply = function (image) {\n _resize(image.width, image.height);\n _drawCount = 0;\n\n // Create the texture for the input image if we haven't yet\n if (!_sourceTexture) _sourceTexture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, _sourceTexture);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);\n\n // No filters? Just draw\n if (_filterChain.length === 0) {\n // const program = _compileShader(SHADER.FRAGMENT_IDENTITY);\n _draw();\n return _canvas;\n }\n\n for (let i = 0; i < _filterChain.length; i++) {\n _lastInChain = (i === _filterChain.length - 1);\n const f = _filterChain[i];\n f.func.apply(this, f.args || []);\n }\n\n return _canvas;\n };\n\n const _resize = function (width, height) {\n // Same width/height? Nothing to do here\n if (width === _width && height === _height) { return; }\n\n _canvas.width = width;\n _width = width;\n _canvas.height = height;\n _height = height;\n\n // Create the context if we don't have it yet\n if (!_vertexBuffer) {\n // Create the vertex buffer for the two triangles [x, y, u, v] * 6\n const vertices = new Float32Array([\n -1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0,\n -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0,\n ]);\n // eslint-disable-next-line no-unused-expressions\n (_vertexBuffer = gl.createBuffer(), gl.bindBuffer(gl.ARRAY_BUFFER, _vertexBuffer));\n gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);\n\n // Note sure if this is a good idea; at least it makes texture loading\n // in Ejecta instant.\n gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);\n }\n\n gl.viewport(0, 0, _width, _height);\n\n // Delete old temp framebuffers\n _tempFramebuffers = [null, null];\n };\n\n const _getTempFramebuffer = function (index) {\n _tempFramebuffers[index] = _tempFramebuffers[index]\n || _createFramebufferTexture(_width, _height);\n\n return _tempFramebuffers[index];\n };\n\n const _createFramebufferTexture = function (width, height) {\n const fbo = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);\n\n const renderbuffer = gl.createRenderbuffer();\n gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);\n\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);\n\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n\n return { fbo, texture };\n };\n\n const _draw = function (flags) {\n let source = null;\n let target = null;\n let flipY = false;\n\n // Set up the source\n if (_drawCount === 0) {\n // First draw call - use the source texture\n source = _sourceTexture;\n } else {\n // All following draw calls use the temp buffer last drawn to\n source = _getTempFramebuffer(_currentFramebufferIndex).texture;\n }\n _drawCount++;\n\n // Set up the target\n if (_lastInChain && !(flags & DRAW.INTERMEDIATE)) {\n // Last filter in our chain - draw directly to the WebGL Canvas. We may\n // also have to flip the image vertically now\n target = null;\n flipY = _drawCount % 2 === 0;\n } else {\n // Intermediate draw call - get a temp buffer to draw to\n _currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;\n target = _getTempFramebuffer(_currentFramebufferIndex).fbo;\n }\n\n // Bind the source and target and draw the two triangles\n gl.bindTexture(gl.TEXTURE_2D, source);\n gl.bindFramebuffer(gl.FRAMEBUFFER, target);\n\n gl.uniform1f(_currentProgram.uniform.flipY, (flipY ? -1 : 1));\n gl.drawArrays(gl.TRIANGLES, 0, 6);\n };\n\n const _compileShader = function (fragmentSource) {\n if (_shaderProgramCache[fragmentSource]) {\n _currentProgram = _shaderProgramCache[fragmentSource];\n gl.useProgram(_currentProgram.id);\n return _currentProgram;\n }\n\n // Compile shaders\n _currentProgram = new WebGLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);\n\n const floatSize = Float32Array.BYTES_PER_ELEMENT;\n const vertSize = 4 * floatSize;\n gl.enableVertexAttribArray(_currentProgram.attribute.pos);\n gl.vertexAttribPointer(_currentProgram.attribute.pos, 2, gl.FLOAT, false, vertSize, 0 * floatSize);\n gl.enableVertexAttribArray(_currentProgram.attribute.uv);\n gl.vertexAttribPointer(_currentProgram.attribute.uv, 2, gl.FLOAT, false, vertSize, 2 * floatSize);\n\n _shaderProgramCache[fragmentSource] = _currentProgram;\n return _currentProgram;\n };\n\n let DRAW = { INTERMEDIATE: 1 };\n\n let SHADER = {};\n SHADER.VERTEX_IDENTITY = [\n 'precision highp float;',\n 'attribute vec2 pos;',\n 'attribute vec2 uv;',\n 'varying vec2 vUv;',\n 'uniform float flipY;',\n\n 'void main(void) {',\n 'vUv = uv;',\n 'gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.);',\n '}',\n ].join('\\n');\n\n SHADER.FRAGMENT_IDENTITY = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n\n 'void main(void) {',\n 'gl_FragColor = texture2D(texture, vUv);',\n '}',\n ].join('\\n');\n\n let _filter = {};\n\n // -------------------------------------------------------------------------\n // Color Matrix Filter\n\n _filter.colorMatrix = function (matrix) {\n // Create a Float32 Array and normalize the offset component to 0-1\n const m = new Float32Array(matrix);\n m[4] /= 255;\n m[9] /= 255;\n m[14] /= 255;\n m[19] /= 255;\n\n // Can we ignore the alpha value? Makes things a bit faster.\n const shader = (m[18] === 1 && m[3] === 0 && m[8] === 0 && m[13] === 0 && m[15] === 0 && m[16] === 0 && m[17] === 0 && m[19] === 0)\n ? _filter.colorMatrix.SHADER.WITHOUT_ALPHA\n : _filter.colorMatrix.SHADER.WITH_ALPHA;\n\n const program = _compileShader(shader);\n gl.uniform1fv(program.uniform.m, m);\n _draw();\n };\n\n _filter.colorMatrix.SHADER = {};\n _filter.colorMatrix.SHADER.WITH_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14];',\n 'gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19];',\n '}',\n ].join('\\n');\n _filter.colorMatrix.SHADER.WITHOUT_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14];',\n 'gl_FragColor.a = c.a;',\n '}',\n ].join('\\n');\n\n _filter.brightness = function (brightness) {\n const b = (brightness || 0) + 1;\n _filter.colorMatrix([\n b, 0, 0, 0, 0,\n 0, b, 0, 0, 0,\n 0, 0, b, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.saturation = function (amount) {\n const x = (amount || 0) * 2 / 3 + 1;\n const y = ((x - 1) * -0.5);\n _filter.colorMatrix([\n x, y, y, 0, 0,\n y, x, y, 0, 0,\n y, y, x, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturate = function () {\n _filter.saturation(-1);\n };\n\n _filter.contrast = function (amount) {\n const v = (amount || 0) + 1;\n const o = -128 * (v - 1);\n\n _filter.colorMatrix([\n v, 0, 0, 0, o,\n 0, v, 0, 0, o,\n 0, 0, v, 0, o,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.negative = function () {\n _filter.contrast(-2);\n };\n\n _filter.hue = function (rotation) {\n rotation = (rotation || 0) / 180 * Math.PI;\n const cos = Math.cos(rotation);\n const sin = Math.sin(rotation);\n const lumR = 0.213;\n const lumG = 0.715;\n const lumB = 0.072;\n\n _filter.colorMatrix([\n lumR + cos * (1 - lumR) + sin * (-lumR), lumG + cos * (-lumG) + sin * (-lumG), lumB + cos * (-lumB) + sin * (1 - lumB), 0, 0,\n lumR + cos * (-lumR) + sin * (0.143), lumG + cos * (1 - lumG) + sin * (0.140), lumB + cos * (-lumB) + sin * (-0.283), 0, 0,\n lumR + cos * (-lumR) + sin * (-(1 - lumR)), lumG + cos * (-lumG) + sin * (lumG), lumB + cos * (1 - lumB) + sin * (lumB), 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturateLuminance = function () {\n _filter.colorMatrix([\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.sepia = function () {\n _filter.colorMatrix([\n 0.393, 0.7689999, 0.18899999, 0, 0,\n 0.349, 0.6859999, 0.16799999, 0, 0,\n 0.272, 0.5339999, 0.13099999, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.brownie = function () {\n _filter.colorMatrix([\n 0.5997023498159715, 0.34553243048391263, -0.2708298674538042, 0, 47.43192855600873,\n -0.037703249837783157, 0.8609577587992641, 0.15059552388459913, 0, -36.96841498319127,\n 0.24113635128153335, -0.07441037908422492, 0.44972182064877153, 0, -7.562075277591283,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.vintagePinhole = function () {\n _filter.colorMatrix([\n 0.6279345635605994, 0.3202183420819367, -0.03965408211312453, 0, 9.651285835294123,\n 0.02578397704808868, 0.6441188644374771, 0.03259127616149294, 0, 7.462829176470591,\n 0.0466055556782719, -0.0851232987247891, 0.5241648018700465, 0, 5.159190588235296,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.kodachrome = function () {\n _filter.colorMatrix([\n 1.1285582396593525, -0.3967382283601348, -0.03992559172921793, 0, 63.72958762196502,\n -0.16404339962244616, 1.0835251566291304, -0.05498805115633132, 0, 24.732407896706203,\n -0.16786010706155763, -0.5603416277695248, 1.6014850761964943, 0, 35.62982807460946,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.technicolor = function () {\n _filter.colorMatrix([\n 1.9125277891456083, -0.8545344976951645, -0.09155508482755585, 0, 11.793603434377337,\n -0.3087833385928097, 1.7658908555458428, -0.10601743074722245, 0, -70.35205161461398,\n -0.231103377548616, -0.7501899197440212, 1.847597816108189, 0, 30.950940869491138,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.polaroid = function () {\n _filter.colorMatrix([\n 1.438, -0.062, -0.062, 0, 0,\n -0.122, 1.378, -0.122, 0, 0,\n -0.016, -0.016, 1.483, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.shiftToBGR = function () {\n _filter.colorMatrix([\n 0, 0, 1, 0, 0,\n 0, 1, 0, 0, 0,\n 1, 0, 0, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Convolution Filter\n\n _filter.convolution = function (matrix) {\n const m = new Float32Array(matrix);\n const pixelSizeX = 1 / _width;\n const pixelSizeY = 1 / _height;\n\n const program = _compileShader(_filter.convolution.SHADER);\n gl.uniform1fv(program.uniform.m, m);\n gl.uniform2f(program.uniform.px, pixelSizeX, pixelSizeY);\n _draw();\n };\n\n _filter.convolution.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n 'uniform float m[9];',\n\n 'void main(void) {',\n 'vec4 c11 = texture2D(texture, vUv - px);', // top left\n 'vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y));', // top center\n 'vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y));', // top right\n\n 'vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) );', // mid left\n 'vec4 c22 = texture2D(texture, vUv);', // mid center\n 'vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) );', // mid right\n\n 'vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) );', // bottom left\n 'vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) );', // bottom center\n 'vec4 c33 = texture2D(texture, vUv + px );', // bottom right\n\n 'gl_FragColor = ',\n 'c11 * m[0] + c12 * m[1] + c22 * m[2] +',\n 'c21 * m[3] + c22 * m[4] + c23 * m[5] +',\n 'c31 * m[6] + c32 * m[7] + c33 * m[8];',\n 'gl_FragColor.a = c22.a;',\n '}',\n ].join('\\n');\n\n _filter.detectEdges = function () {\n _filter.convolution.call(this, [\n 0, 1, 0,\n 1, -4, 1,\n 0, 1, 0,\n ]);\n };\n\n _filter.sobelX = function () {\n _filter.convolution.call(this, [\n -1, 0, 1,\n -2, 0, 2,\n -1, 0, 1,\n ]);\n };\n\n _filter.sobelY = function () {\n _filter.convolution.call(this, [\n -1, -2, -1,\n 0, 0, 0,\n 1, 2, 1,\n ]);\n };\n\n _filter.sharpen = function (amount) {\n const a = amount || 1;\n _filter.convolution.call(this, [\n 0, -1 * a, 0,\n -1 * a, 1 + 4 * a, -1 * a,\n 0, -1 * a, 0,\n ]);\n };\n\n _filter.emboss = function (size) {\n const s = size || 1;\n _filter.convolution.call(this, [\n -2 * s, -1 * s, 0,\n -1 * s, 1, 1 * s,\n 0, 1 * s, 2 * s,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Blur Filter\n\n _filter.blur = function (size) {\n const blurSizeX = (size / 7) / _width;\n const blurSizeY = (size / 7) / _height;\n\n const program = _compileShader(_filter.blur.SHADER);\n\n // Vertical\n gl.uniform2f(program.uniform.px, 0, blurSizeY);\n _draw(DRAW.INTERMEDIATE);\n\n // Horizontal\n gl.uniform2f(program.uniform.px, blurSizeX, 0);\n _draw();\n };\n\n _filter.blur.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv )*0.159576912161;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265;',\n '}',\n ].join('\\n');\n\n // -------------------------------------------------------------------------\n // Pixelate Filter\n\n _filter.pixelate = function (size) {\n const blurSizeX = (size) / _width;\n const blurSizeY = (size) / _height;\n\n const program = _compileShader(_filter.pixelate.SHADER);\n\n // Horizontal\n gl.uniform2f(program.uniform.size, blurSizeX, blurSizeY);\n _draw();\n };\n\n _filter.pixelate.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform vec2 size;',\n 'uniform sampler2D texture;',\n\n 'vec2 pixelate(vec2 coord, vec2 size) {',\n 'return floor( coord / size ) * size;',\n '}',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'vec2 coord = pixelate(vUv, size);',\n 'gl_FragColor += texture2D(texture, coord);',\n '}',\n ].join('\\n');\n};\n\nexports.Canvas = WebGLImageFilter;\n", "import { tf } from './tf.js';\nimport * as fxImage from './imagefx.js';\n\n// internal temp canvases\nlet inCanvas = null;\nlet outCanvas = null;\n\n// process input image and return tensor\n// input can be tensor, imagedata, htmlimageelement, htmlvideoelement\n// input is resized and run through imagefx filter\nfunction process(input, config) {\n let tensor;\n if (input instanceof tf.Tensor) {\n tensor = tf.clone(input);\n } else {\n const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));\n const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));\n let targetWidth = originalWidth;\n let targetHeight = originalHeight;\n if (config.filter.width > 0) targetWidth = config.filter.width;\n else if (config.filter.height > 0) targetWidth = originalWidth * (config.filter.height / originalHeight);\n if (config.filter.height > 0) targetHeight = config.filter.height;\n else if (config.filter.width > 0) targetHeight = originalHeight * (config.filter.width / originalWidth);\n if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) {\n inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n if (inCanvas.width !== targetWidth) inCanvas.width = targetWidth;\n if (inCanvas.height !== targetHeight) inCanvas.height = targetHeight;\n }\n const ctx = inCanvas.getContext('2d');\n if (input instanceof ImageData) ctx.putImageData(input, 0, 0);\n else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);\n if (config.filter.enabled) {\n if (!this.fx || !outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) {\n outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');\n if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;\n if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;\n this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')\n }\n this.fx.reset();\n this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled\n if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast);\n if (config.filter.sharpness !== 0) this.fx.addFilter('sharpen', config.filter.sharpness);\n if (config.filter.blur !== 0) this.fx.addFilter('blur', config.filter.blur);\n if (config.filter.saturation !== 0) this.fx.addFilter('saturation', config.filter.saturation);\n if (config.filter.hue !== 0) this.fx.addFilter('hue', config.filter.hue);\n if (config.filter.negative) this.fx.addFilter('negative');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.vintage) this.fx.addFilter('brownie');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.kodachrome) this.fx.addFilter('kodachrome');\n if (config.filter.technicolor) this.fx.addFilter('technicolor');\n if (config.filter.polaroid) this.fx.addFilter('polaroid');\n if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);\n this.fx.apply(inCanvas);\n // read pixel data\n // const gl = outCanvas.getContext('webgl');\n const gl = false;\n if (gl) {\n const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);\n const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);\n gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);\n // gl returns rbga while we only need rgb, so discarding alpha channel\n // gl returns starting point as lower left, so need to invert vertical\n let i = 0;\n for (let y = outCanvas.height - 1; y >= 0; y--) {\n for (let x = 0; x < outCanvas.width; x++) {\n const index = (x + y * outCanvas.width) * 4;\n pixBuffer[i++] = glBuffer[index + 0];\n pixBuffer[i++] = glBuffer[index + 1];\n pixBuffer[i++] = glBuffer[index + 2];\n }\n }\n outCanvas.data = pixBuffer;\n }\n } else {\n outCanvas = inCanvas;\n }\n let pixels;\n if (outCanvas.data) {\n const shape = [outCanvas.height, outCanvas.width, 3];\n pixels = tf.tensor3d(outCanvas.data, shape, 'int32');\n } else if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {\n // tf kernel-optimized method to get imagedata, also if input is imagedata, just use it\n pixels = tf.browser.fromPixels(outCanvas);\n } else {\n // cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas\n const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n tempCanvas.width = targetWidth;\n tempCanvas.height = targetHeight;\n const tempCtx = tempCanvas.getContext('2d');\n tempCtx.drawImage(outCanvas, 0, 0);\n const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);\n pixels = tf.browser.fromPixels(data);\n }\n const casted = pixels.toFloat();\n tensor = casted.expandDims(0);\n pixels.dispose();\n casted.dispose();\n }\n return { tensor, canvas: config.filter.return ? outCanvas : null };\n}\n\nexports.process = process;\n", "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "export const wasmWorkerContents = 'var threadInfoStruct=0;var selfThreadId=0;var parentThreadId=0;var Module={};function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(\" \");console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(\" \");postMessage({cmd:\"alert\",text:text,threadId:selfThreadId})}var err=threadPrintErr;this.alert=threadAlert;Module[\"instantiateWasm\"]=function(info,receiveInstance){var instance=new WebAssembly.Instance(Module[\"wasmModule\"],info);Module[\"wasmModule\"]=null;receiveInstance(instance);return instance.exports};this.onmessage=function(e){try{if(e.data.cmd===\"load\"){Module[\"DYNAMIC_BASE\"]=e.data.DYNAMIC_BASE;Module[\"DYNAMICTOP_PTR\"]=e.data.DYNAMICTOP_PTR;Module[\"wasmModule\"]=e.data.wasmModule;Module[\"wasmMemory\"]=e.data.wasmMemory;Module[\"buffer\"]=Module[\"wasmMemory\"].buffer;Module[\"ENVIRONMENT_IS_PTHREAD\"]=true;if(typeof e.data.urlOrBlob===\"string\"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}Module=WasmBackendModuleThreadedSimd(Module);postMessage({\"cmd\":\"loaded\"})}else if(e.data.cmd===\"objectTransfer\"){Module[\"PThread\"].receiveObjectTransfer(e.data)}else if(e.data.cmd===\"run\"){Module[\"__performance_now_clock_drift\"]=performance.now()-e.data.time;threadInfoStruct=e.data.threadInfoStruct;Module[\"__register_pthread_ptr\"](threadInfoStruct,0,0);selfThreadId=e.data.selfThreadId;parentThreadId=e.data.parentThreadId;var max=e.data.stackBase;var top=e.data.stackBase+e.data.stackSize;Module[\"establishStackSpace\"](top,max);Module[\"_emscripten_tls_init\"]();Module[\"PThread\"].receiveObjectTransfer(e.data);Module[\"PThread\"].setThreadStatus(Module[\"_pthread_self\"](),1);try{var result=Module[\"dynCall_ii\"](e.data.start_routine,e.data.arg);if(!Module[\"getNoExitRuntime\"]())Module[\"PThread\"].threadExit(result)}catch(ex){if(ex===\"Canceled!\"){Module[\"PThread\"].threadCancel()}else if(ex!=\"unwind\"){Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+4>>2,ex instanceof Module[\"ExitStatus\"]?ex.status:-2);Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+0>>2,1);Module[\"_emscripten_futex_wake\"](threadInfoStruct+0,2147483647);if(!(ex instanceof Module[\"ExitStatus\"]))throw ex}}}else if(e.data.cmd===\"cancel\"){if(threadInfoStruct){Module[\"PThread\"].threadCancel()}}else if(e.data.target===\"setimmediate\"){}else if(e.data.cmd===\"processThreadQueue\"){if(threadInfoStruct){Module[\"_emscripten_current_thread_process_queued_calls\"]()}}else{err(\"worker.js received unknown command \"+e.data.cmd);err(e.data)}}catch(ex){err(\"worker.js onmessage() captured an uncaught exception: \"+ex);if(ex.stack)err(ex.stack);throw ex}};if(typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\"){self={location:{href:__filename}};var onmessage=this.onmessage;var nodeWorkerThreads=require(\"worker_threads\");Worker=nodeWorkerThreads.Worker;var parentPort=nodeWorkerThreads.parentPort;parentPort.on(\"message\",function(data){onmessage({data:data})});var nodeFS=require(\"fs\");var nodeRead=function(filename){return nodeFS.readFileSync(filename,\"utf8\")};function globalEval(x){global.require=require;global.Module=Module;eval.call(null,x)}importScripts=function(f){globalEval(nodeRead(f))};postMessage=function(msg){parentPort.postMessage(msg)};if(typeof performance===\"undefined\"){performance={now:function(){return Date.now()}}}}';", null, null, null, null, "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", "export default [\n 127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121,\n 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,\n 151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92,\n 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,\n 157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4,\n 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,\n 181, 85, 84, 17, 206, 203, 36, 148, 171, 140, 92, 40, 39, 193, 189, 244,\n 159, 158, 28, 247, 246, 161, 236, 3, 196, 54, 68, 104, 193, 168, 8, 117,\n 228, 31, 189, 193, 55, 98, 97, 99, 126, 47, 100, 166, 79, 218, 155, 154, 26,\n 209, 49, 131, 135, 136, 150, 47, 126, 217, 223, 52, 53, 45, 51, 134, 211,\n 170, 140, 67, 69, 108, 43, 106, 91, 230, 119, 120, 226, 130, 247, 63, 53,\n 52, 238, 20, 242, 46, 70, 156, 78, 62, 96, 46, 53, 63, 143, 34, 227, 173,\n 155, 133, 123, 117, 111, 44, 125, 19, 236, 134, 51, 216, 206, 205, 154, 153,\n 22, 39, 37, 167, 200, 201, 208, 36, 142, 100, 57, 212, 202, 20, 60, 99, 28,\n 158, 157, 35, 226, 113, 160, 159, 27, 204, 202, 210, 113, 225, 46, 43, 202,\n 204, 62, 76, 77, 137, 123, 116, 41, 38, 72, 203, 129, 142, 64, 98, 240, 49,\n 102, 64, 41, 73, 74, 212, 216, 207, 42, 74, 184, 169, 170, 211, 170, 149,\n 176, 105, 66, 69, 122, 6, 168, 123, 147, 187, 96, 77, 90, 65, 55, 107, 89,\n 90, 180, 101, 100, 120, 63, 105, 104, 93, 137, 227, 15, 86, 85, 129, 102,\n 49, 14, 87, 86, 55, 8, 9, 100, 47, 121, 145, 23, 22, 88, 89, 179, 6, 122,\n 196, 88, 95, 96, 138, 172, 136, 215, 58, 172, 115, 48, 219, 42, 80, 81, 195,\n 3, 51, 43, 146, 61, 171, 175, 199, 81, 82, 38, 53, 46, 225, 144, 163, 110,\n 246, 33, 7, 52, 65, 66, 229, 228, 117, 34, 127, 234, 107, 108, 69, 109, 108,\n 151, 48, 64, 235, 62, 78, 191, 129, 209, 126, 111, 35, 143, 163, 161, 246,\n 117, 123, 50, 222, 65, 52, 19, 125, 141, 221, 55, 65, 3, 195, 197, 25, 7,\n 33, 220, 237, 44, 70, 71, 139, 122, 193, 245, 247, 130, 33, 71, 21, 162,\n 153, 158, 159, 170, 169, 150, 188, 174, 196, 216, 186, 92, 144, 160, 161, 2,\n 97, 167, 141, 125, 241, 164, 167, 37, 72, 38, 12, 145, 159, 160, 38, 82, 13,\n 63, 68, 71, 226, 35, 111, 158, 153, 154, 101, 50, 205, 206, 92, 165, 209,\n 198, 217, 165, 167, 97, 220, 115, 218, 133, 112, 243, 239, 238, 241, 214,\n 135, 169, 190, 173, 133, 171, 208, 32, 125, 44, 237, 86, 87, 178, 85, 86,\n 179, 84, 85, 180, 83, 84, 181, 201, 83, 182, 137, 93, 132, 76, 62, 183, 61,\n 76, 184, 57, 61, 185, 212, 57, 186, 214, 207, 187, 34, 143, 156, 79, 239,\n 237, 123, 137, 177, 44, 1, 4, 201, 194, 32, 64, 102, 129, 213, 215, 138, 59,\n 166, 219, 242, 99, 97, 2, 94, 141, 75, 59, 235, 24, 110, 228, 25, 130, 226,\n 23, 24, 229, 22, 23, 230, 26, 22, 231, 112, 26, 232, 189, 190, 243, 221, 56,\n 190, 28, 56, 221, 27, 28, 222, 29, 27, 223, 30, 29, 224, 247, 30, 225, 238,\n 79, 20, 166, 59, 75, 60, 75, 240, 147, 177, 215, 20, 79, 166, 187, 147, 213,\n 112, 233, 244, 233, 128, 245, 128, 114, 188, 114, 217, 174, 131, 115, 220,\n 217, 198, 236, 198, 131, 134, 177, 132, 58, 143, 35, 124, 110, 163, 7, 228,\n 110, 25, 356, 389, 368, 11, 302, 267, 452, 350, 349, 302, 303, 269, 357,\n 343, 277, 452, 453, 357, 333, 332, 297, 175, 152, 377, 384, 398, 382, 347,\n 348, 330, 303, 304, 270, 9, 336, 337, 278, 279, 360, 418, 262, 431, 304,\n 408, 409, 310, 415, 407, 270, 409, 410, 450, 348, 347, 422, 430, 434, 313,\n 314, 17, 306, 307, 375, 387, 388, 260, 286, 414, 398, 335, 406, 418, 364,\n 367, 416, 423, 358, 327, 251, 284, 298, 281, 5, 4, 373, 374, 253, 307, 320,\n 321, 425, 427, 411, 421, 313, 18, 321, 405, 406, 320, 404, 405, 315, 16, 17,\n 426, 425, 266, 377, 400, 369, 322, 391, 269, 417, 465, 464, 386, 257, 258,\n 466, 260, 388, 456, 399, 419, 284, 332, 333, 417, 285, 8, 346, 340, 261,\n 413, 441, 285, 327, 460, 328, 355, 371, 329, 392, 439, 438, 382, 341, 256,\n 429, 420, 360, 364, 394, 379, 277, 343, 437, 443, 444, 283, 275, 440, 363,\n 431, 262, 369, 297, 338, 337, 273, 375, 321, 450, 451, 349, 446, 342, 467,\n 293, 334, 282, 458, 461, 462, 276, 353, 383, 308, 324, 325, 276, 300, 293,\n 372, 345, 447, 382, 398, 362, 352, 345, 340, 274, 1, 19, 456, 248, 281, 436,\n 427, 425, 381, 256, 252, 269, 391, 393, 200, 199, 428, 266, 330, 329, 287,\n 273, 422, 250, 462, 328, 258, 286, 384, 265, 353, 342, 387, 259, 257, 424,\n 431, 430, 342, 353, 276, 273, 335, 424, 292, 325, 307, 366, 447, 345, 271,\n 303, 302, 423, 266, 371, 294, 455, 460, 279, 278, 294, 271, 272, 304, 432,\n 434, 427, 272, 407, 408, 394, 430, 431, 395, 369, 400, 334, 333, 299, 351,\n 417, 168, 352, 280, 411, 325, 319, 320, 295, 296, 336, 319, 403, 404, 330,\n 348, 349, 293, 298, 333, 323, 454, 447, 15, 16, 315, 358, 429, 279, 14, 15,\n 316, 285, 336, 9, 329, 349, 350, 374, 380, 252, 318, 402, 403, 6, 197, 419,\n 318, 319, 325, 367, 364, 365, 435, 367, 397, 344, 438, 439, 272, 271, 311,\n 195, 5, 281, 273, 287, 291, 396, 428, 199, 311, 271, 268, 283, 444, 445,\n 373, 254, 339, 263, 466, 249, 282, 334, 296, 449, 347, 346, 264, 447, 454,\n 336, 296, 299, 338, 10, 151, 278, 439, 455, 292, 407, 415, 358, 371, 355,\n 340, 345, 372, 390, 249, 466, 346, 347, 280, 442, 443, 282, 19, 94, 370,\n 441, 442, 295, 248, 419, 197, 263, 255, 359, 440, 275, 274, 300, 383, 368,\n 351, 412, 465, 263, 467, 466, 301, 368, 389, 380, 374, 386, 395, 378, 379,\n 412, 351, 419, 436, 426, 322, 373, 390, 388, 2, 164, 393, 370, 462, 461,\n 164, 0, 267, 302, 11, 12, 374, 373, 387, 268, 12, 13, 293, 300, 301, 446,\n 261, 340, 385, 384, 381, 330, 266, 425, 426, 423, 391, 429, 355, 437, 391,\n 327, 326, 440, 457, 438, 341, 382, 362, 459, 457, 461, 434, 430, 394, 414,\n 463, 362, 396, 369, 262, 354, 461, 457, 316, 403, 402, 315, 404, 403, 314,\n 405, 404, 313, 406, 405, 421, 418, 406, 366, 401, 361, 306, 408, 407, 291,\n 409, 408, 287, 410, 409, 432, 436, 410, 434, 416, 411, 264, 368, 383, 309,\n 438, 457, 352, 376, 401, 274, 275, 4, 421, 428, 262, 294, 327, 358, 433,\n 416, 367, 289, 455, 439, 462, 370, 326, 2, 326, 370, 305, 460, 455, 254,\n 449, 448, 255, 261, 446, 253, 450, 449, 252, 451, 450, 256, 452, 451, 341,\n 453, 452, 413, 464, 463, 441, 413, 414, 258, 442, 441, 257, 443, 442, 259,\n 444, 443, 260, 445, 444, 467, 342, 445, 459, 458, 250, 289, 392, 290, 290,\n 328, 460, 376, 433, 435, 250, 290, 392, 411, 416, 433, 341, 463, 464, 453,\n 464, 465, 357, 465, 412, 343, 412, 399, 360, 363, 440, 437, 399, 456, 420,\n 456, 363, 401, 435, 288, 372, 383, 353, 339, 255, 249, 448, 261, 255, 133,\n 243, 190, 133, 155, 112, 33, 246, 247, 33, 130, 25, 398, 384, 286, 362, 398,\n 414, 362, 463, 341, 263, 359, 467, 263, 249, 255, 466, 467, 260, 75, 60,\n 166, 238, 239, 79, 162, 127, 139, 72, 11, 37, 121, 232, 120, 73, 72, 39,\n 114, 128, 47, 233, 232, 128, 103, 104, 67, 152, 175, 148, 173, 157, 155,\n 119, 118, 101, 74, 73, 40, 107, 9, 108, 49, 48, 131, 32, 194, 211, 184, 74,\n 185, 191, 80, 183, 185, 40, 186, 119, 230, 118, 210, 202, 214, 84, 83, 17,\n 77, 76, 146, 161, 160, 30, 190, 56, 173, 182, 106, 194, 138, 135, 192, 129,\n 203, 98, 54, 21, 68, 5, 51, 4, 145, 144, 23, 90, 77, 91, 207, 205, 187, 83,\n 201, 18, 181, 91, 182, 180, 90, 181, 16, 85, 17, 205, 206, 36, 176, 148,\n 140, 165, 92, 39, 245, 193, 244, 27, 159, 28, 30, 247, 161, 174, 236, 196,\n 103, 54, 104, 55, 193, 8, 111, 117, 31, 221, 189, 55, 240, 98, 99, 142, 126,\n 100, 219, 166, 218, 112, 155, 26, 198, 209, 131, 169, 135, 150, 114, 47,\n 217, 224, 223, 53, 220, 45, 134, 32, 211, 140, 109, 67, 108, 146, 43, 91,\n 231, 230, 120, 113, 226, 247, 105, 63, 52, 241, 238, 242, 124, 46, 156, 95,\n 78, 96, 70, 46, 63, 116, 143, 227, 116, 123, 111, 1, 44, 19, 3, 236, 51,\n 207, 216, 205, 26, 154, 22, 165, 39, 167, 199, 200, 208, 101, 36, 100, 43,\n 57, 202, 242, 20, 99, 56, 28, 157, 124, 35, 113, 29, 160, 27, 211, 204, 210,\n 124, 113, 46, 106, 43, 204, 96, 62, 77, 227, 137, 116, 73, 41, 72, 36, 203,\n 142, 235, 64, 240, 48, 49, 64, 42, 41, 74, 214, 212, 207, 183, 42, 184, 210,\n 169, 211, 140, 170, 176, 104, 105, 69, 193, 122, 168, 50, 123, 187, 89, 96,\n 90, 66, 65, 107, 179, 89, 180, 119, 101, 120, 68, 63, 104, 234, 93, 227, 16,\n 15, 85, 209, 129, 49, 15, 14, 86, 107, 55, 9, 120, 100, 121, 153, 145, 22,\n 178, 88, 179, 197, 6, 196, 89, 88, 96, 135, 138, 136, 138, 215, 172, 218,\n 115, 219, 41, 42, 81, 5, 195, 51, 57, 43, 61, 208, 171, 199, 41, 81, 38,\n 224, 53, 225, 24, 144, 110, 105, 52, 66, 118, 229, 117, 227, 34, 234, 66,\n 107, 69, 10, 109, 151, 219, 48, 235, 183, 62, 191, 142, 129, 126, 116, 111,\n 143, 7, 163, 246, 118, 117, 50, 223, 222, 52, 94, 19, 141, 222, 221, 65,\n 196, 3, 197, 45, 220, 44, 156, 70, 139, 188, 122, 245, 139, 71, 162, 145,\n 153, 159, 149, 170, 150, 122, 188, 196, 206, 216, 92, 163, 144, 161, 164, 2,\n 167, 242, 141, 241, 0, 164, 37, 11, 72, 12, 144, 145, 160, 12, 38, 13, 70,\n 63, 71, 31, 226, 111, 157, 158, 154, 36, 101, 205, 203, 206, 165, 126, 209,\n 217, 98, 165, 97, 237, 220, 218, 237, 239, 241, 210, 214, 169, 140, 171, 32,\n 241, 125, 237, 179, 86, 178, 180, 85, 179, 181, 84, 180, 182, 83, 181, 194,\n 201, 182, 177, 137, 132, 184, 76, 183, 185, 61, 184, 186, 57, 185, 216, 212,\n 186, 192, 214, 187, 139, 34, 156, 218, 79, 237, 147, 123, 177, 45, 44, 4,\n 208, 201, 32, 98, 64, 129, 192, 213, 138, 235, 59, 219, 141, 242, 97, 97, 2,\n 141, 240, 75, 235, 229, 24, 228, 31, 25, 226, 230, 23, 229, 231, 22, 230,\n 232, 26, 231, 233, 112, 232, 244, 189, 243, 189, 221, 190, 222, 28, 221,\n 223, 27, 222, 224, 29, 223, 225, 30, 224, 113, 247, 225, 99, 60, 240, 213,\n 147, 215, 60, 20, 166, 192, 187, 213, 243, 112, 244, 244, 233, 245, 245,\n 128, 188, 188, 114, 174, 134, 131, 220, 174, 217, 236, 236, 198, 134, 215,\n 177, 58, 156, 143, 124, 25, 110, 7, 31, 228, 25, 264, 356, 368, 0, 11, 267,\n 451, 452, 349, 267, 302, 269, 350, 357, 277, 350, 452, 357, 299, 333, 297,\n 396, 175, 377, 381, 384, 382, 280, 347, 330, 269, 303, 270, 151, 9, 337,\n 344, 278, 360, 424, 418, 431, 270, 304, 409, 272, 310, 407, 322, 270, 410,\n 449, 450, 347, 432, 422, 434, 18, 313, 17, 291, 306, 375, 259, 387, 260,\n 424, 335, 418, 434, 364, 416, 391, 423, 327, 301, 251, 298, 275, 281, 4,\n 254, 373, 253, 375, 307, 321, 280, 425, 411, 200, 421, 18, 335, 321, 406,\n 321, 320, 405, 314, 315, 17, 423, 426, 266, 396, 377, 369, 270, 322, 269,\n 413, 417, 464, 385, 386, 258, 248, 456, 419, 298, 284, 333, 168, 417, 8,\n 448, 346, 261, 417, 413, 285, 326, 327, 328, 277, 355, 329, 309, 392, 438,\n 381, 382, 256, 279, 429, 360, 365, 364, 379, 355, 277, 437, 282, 443, 283,\n 281, 275, 363, 395, 431, 369, 299, 297, 337, 335, 273, 321, 348, 450, 349,\n 359, 446, 467, 283, 293, 282, 250, 458, 462, 300, 276, 383, 292, 308, 325,\n 283, 276, 293, 264, 372, 447, 346, 352, 340, 354, 274, 19, 363, 456, 281,\n 426, 436, 425, 380, 381, 252, 267, 269, 393, 421, 200, 428, 371, 266, 329,\n 432, 287, 422, 290, 250, 328, 385, 258, 384, 446, 265, 342, 386, 387, 257,\n 422, 424, 430, 445, 342, 276, 422, 273, 424, 306, 292, 307, 352, 366, 345,\n 268, 271, 302, 358, 423, 371, 327, 294, 460, 331, 279, 294, 303, 271, 304,\n 436, 432, 427, 304, 272, 408, 395, 394, 431, 378, 395, 400, 296, 334, 299,\n 6, 351, 168, 376, 352, 411, 307, 325, 320, 285, 295, 336, 320, 319, 404,\n 329, 330, 349, 334, 293, 333, 366, 323, 447, 316, 15, 315, 331, 358, 279,\n 317, 14, 316, 8, 285, 9, 277, 329, 350, 253, 374, 252, 319, 318, 403, 351,\n 6, 419, 324, 318, 325, 397, 367, 365, 288, 435, 397, 278, 344, 439, 310,\n 272, 311, 248, 195, 281, 375, 273, 291, 175, 396, 199, 312, 311, 268, 276,\n 283, 445, 390, 373, 339, 295, 282, 296, 448, 449, 346, 356, 264, 454, 337,\n 336, 299, 337, 338, 151, 294, 278, 455, 308, 292, 415, 429, 358, 355, 265,\n 340, 372, 388, 390, 466, 352, 346, 280, 295, 442, 282, 354, 19, 370, 285,\n 441, 295, 195, 248, 197, 457, 440, 274, 301, 300, 368, 417, 351, 465, 251,\n 301, 389, 385, 380, 386, 394, 395, 379, 399, 412, 419, 410, 436, 322, 387,\n 373, 388, 326, 2, 393, 354, 370, 461, 393, 164, 267, 268, 302, 12, 386, 374,\n 387, 312, 268, 13, 298, 293, 301, 265, 446, 340, 380, 385, 381, 280, 330,\n 425, 322, 426, 391, 420, 429, 437, 393, 391, 326, 344, 440, 438, 458, 459,\n 461, 364, 434, 394, 428, 396, 262, 274, 354, 457, 317, 316, 402, 316, 315,\n 403, 315, 314, 404, 314, 313, 405, 313, 421, 406, 323, 366, 361, 292, 306,\n 407, 306, 291, 408, 291, 287, 409, 287, 432, 410, 427, 434, 411, 372, 264,\n 383, 459, 309, 457, 366, 352, 401, 1, 274, 4, 418, 421, 262, 331, 294, 358,\n 435, 433, 367, 392, 289, 439, 328, 462, 326, 94, 2, 370, 289, 305, 455, 339,\n 254, 448, 359, 255, 446, 254, 253, 449, 253, 252, 450, 252, 256, 451, 256,\n 341, 452, 414, 413, 463, 286, 441, 414, 286, 258, 441, 258, 257, 442, 257,\n 259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305,\n 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,\n 453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360,\n 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tf } from '../tf.js';\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h,\n box.startPoint[0] / w,\n box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n const palmLandmarks = box.palmLandmarks.map((coord) => {\n const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];\n return scaledCoord;\n });\n return { startPoint, endPoint, palmLandmarks, confidence: box.confidence };\n}\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction shiftBox(box, shiftFactor) {\n const boxSize = [\n box.endPoint[0] - box.startPoint[0],\n box.endPoint[1] - box.startPoint[1],\n ];\n const shiftVector = [boxSize[0] * shiftFactor[0], boxSize[1] * shiftFactor[1]];\n const startPoint = [box.startPoint[0] + shiftVector[0], box.startPoint[1] + shiftVector[1]];\n const endPoint = [box.endPoint[0] + shiftVector[0], box.endPoint[1] + shiftVector[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nexport {\n cutBoxFromImageAndResize,\n enlargeBox,\n getBoxCenter,\n getBoxSize,\n scaleBoxCoordinates,\n shiftBox,\n squarifyBox,\n};\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nconst buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexport {\n buildRotationMatrix,\n computeRotation,\n dot,\n getColumnFrom2DArr,\n invertTransformMatrix,\n normalizeRadians,\n rotatePoint,\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/* eslint-disable indent */\n/* eslint-disable no-multi-spaces */\n\nexport default {\n backend: 'webgl', // select tfjs backend to use\n wasmPath: '../assets/', // path for wasm binaries\n // only used for backend: wasm\n console: true, // enable debugging output to console\n async: true, // execute enabled models in parallel\n // this disables per-model performance data but\n // slightly increases performance\n // cannot be used if profiling is enabled\n profile: false, // enable tfjs profiling\n // this has significant performance impact\n // only enable for debugging purposes\n // currently only implemented for age,gender,emotion models\n deallocate: false, // aggresively deallocate gpu memory after each usage\n // only valid for webgl backend and only during first call\n // cannot be changed unless library is reloaded\n // this has significant performance impact\n // only enable on low-memory devices\n scoped: false, // enable scoped runs\n // some models *may* have memory leaks,\n // this wrapps everything in a local scope at a cost of performance\n // typically not needed\n videoOptimized: true, // perform additional optimizations when input is video,\n // must be disabled for images\n // basically this skips object box boundary detection for every n frames\n // while maintaining in-box detection since objects cannot move that fast\n\n filter: {\n enabled: true, // enable image pre-processing filters\n width: 0, // resize input width\n height: 0, // resize input height\n // if both width and height are set to 0, there is no resizing\n // if just one is set, second one is scaled automatically\n // if both are set, values are used as-is\n return: true, // return processed canvas imagedata in result\n brightness: 0, // range: -1 (darken) to 1 (lighten)\n contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)\n sharpness: 0, // range: 0 (no sharpening) to 1 (maximum sharpening)\n blur: 0, // range: 0 (no blur) to N (blur radius in pixels)\n saturation: 0, // range: -1 (reduce saturation) to 1 (increase saturation)\n hue: 0, // range: 0 (no change) to 360 (hue rotation in degrees)\n negative: false, // image negative\n sepia: false, // image sepia colors\n vintage: false, // image vintage colors\n kodachrome: false, // image kodachrome colors\n technicolor: false, // image technicolor colors\n polaroid: false, // image polaroid camera effect\n pixelate: 0, // range: 0 (no pixelate) to N (number of pixels to pixelate)\n },\n\n gesture: {\n enabled: true, // enable simple gesture recognition\n },\n\n face: {\n enabled: true, // controls if specified modul is enabled\n // face.enabled is required for all face models:\n // detector, mesh, iris, age, gender, emotion\n // (note: module is not loaded until it is required)\n detector: {\n modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.\n // 'front' is optimized for large faces\n // such as front-facing camera and\n // 'back' is optimized for distanct faces.\n inputSize: 256, // fixed value: 128 for front and 256 for 'back'\n maxFaces: 10, // maximum number of faces detected in the input\n // should be set to the minimum number for performance\n skipFrames: 15, // how many frames to go without re-running the face bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated face analysis as the head probably hasn't moved much\n // in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in\n // non-maximum suppression (0.1 means drop if overlap 10%)\n scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression,\n // this is applied on detection objects only and before minConfidence\n },\n\n mesh: {\n enabled: true,\n modelPath: '../models/facemesh.json',\n inputSize: 192, // fixed value\n },\n\n iris: {\n enabled: true,\n modelPath: '../models/iris.json',\n inputSize: 64, // fixed value\n },\n\n age: {\n enabled: true,\n modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'\n // which determines training set for model\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n gender: {\n enabled: true,\n minConfidence: 0.1, // threshold for discarding a prediction\n modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n emotion: {\n enabled: true,\n inputSize: 64, // fixed value\n minConfidence: 0.2, // threshold for discarding a prediction\n skipFrames: 15, // how many frames to go without re-running the detector\n modelPath: '../models/emotion-large.json', // can be 'mini', 'large'\n },\n },\n\n body: {\n enabled: true,\n modelPath: '../models/posenet.json',\n inputSize: 257, // fixed value\n maxDetections: 10, // maximum number of people detected in the input\n // should be set to the minimum number for performance\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression\n nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression\n },\n\n hand: {\n enabled: true,\n inputSize: 256, // fixed value\n skipFrames: 15, // how many frames to go without re-running the hand bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated hand skeleton analysis as the hand probably\n // hasn't moved much in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much\n // in non-maximum suppression\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on\n // score in non-maximum suppression\n maxHands: 1, // maximum number of hands detected in the input\n // should be set to the minimum number for performance\n landmarks: true, // detect hand landmarks or just hand boundary box\n detector: {\n modelPath: '../models/handdetect.json',\n },\n skeleton: {\n modelPath: '../models/handskeleton.json',\n },\n },\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n"],
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,IAAA;AAAA;AAGA,mBAAgB;AAIf,QAAI,OAAO,SAAS;AAAe,aAAO;AAAA;AAC1C,QAAI,OAAO,WAAW;AAAe,aAAO;AAAA;AAC5C,QAAI,OAAO,YAAW;AAAe,aAAO;AAAA;AAC5C,UAAM,IAAI,MAAM;AAAA;AAGjB,gBAAa;AAEb,SAAO,UAAU,UAAU,QAAO;AAGlC,MAAI,QAAO;AACV,YAAQ,UAAU,QAAO,MAAM,KAAK;AAAA;AAGrC,UAAQ,UAAU,QAAO;AACzB,UAAQ,UAAU,QAAO;AACzB,UAAQ,WAAW,QAAO;AAAA;;;ACxB1B,IAAA;AACA,gBAAa;AACb,gBAAa,QAAO;AAGpB;AACE,oBAAgB;AACd,UAAI,OAAO,IAAI;AAAA;AAAA;AAGnB,MAAI,QAAO,QAAQ,QAAO,SAAS,QAAO,eAAe,QAAO;AAC9D,WAAO,UAAU;AAAA;AAGjB,cAAU,SAAQ;AAClB,YAAQ,SAAS;AAAA;AAGnB;AACE,WAAO,QAAO,KAAK,kBAAkB;AAAA;AAIvC,YAAU,SAAQ;AAElB,aAAW,OAAO;AAChB,QAAI,OAAO,QAAQ;AACjB,YAAM,IAAI,UAAU;AAAA;AAEtB,WAAO,QAAO,KAAK,kBAAkB;AAAA;AAGvC,aAAW,QAAQ;AACjB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;AAAA;AAEtB,cAAU,QAAO;AACjB,QAAI,UAAS;AACX,UAAI,OAAO,aAAa;AACtB,YAAI,KAAK,OAAM;AAAA;AAEf,YAAI,KAAK;AAAA;AAAA;AAGX,UAAI,KAAK;AAAA;AAEX,WAAO;AAAA;AAGT,aAAW,cAAc;AACvB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;AAAA;AAEtB,WAAO,QAAO;AAAA;AAGhB,aAAW,kBAAkB;AAC3B,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;AAAA;AAEtB,WAAO,QAAO,WAAW;AAAA;AAAA;;;AC5D3B,IAAA;AAqBA;AAIA,gBAAa,sBAAuB;AAGpC,mBAAiB,QAAO,cAAc;AACpC,eAAW,KAAK;AAChB,YAAQ,YAAY,SAAS;AAAA,WACtB;AAAA,WAAW;AAAA,WAAY;AAAA,WAAa;AAAA,WAAa;AAAA,WAAc;AAAA,WAAc;AAAA,WAAY;AAAA,WAAa;AAAA,WAAe;AAAA,WAAgB;AACxI,eAAO;AAAA;AAEP,eAAO;AAAA;AAAA;AAIb;AACE,QAAI,CAAC;AAAK,aAAO;AACjB;AACA,WAAO;AACL,cAAQ;AAAA,aACD;AAAA,aACA;AACH,iBAAO;AAAA,aACJ;AAAA,aACA;AAAA,aACA;AAAA,aACA;AACH,iBAAO;AAAA,aACJ;AAAA,aACA;AACH,iBAAO;AAAA,aACJ;AAAA,aACA;AAAA,aACA;AACH,iBAAO;AAAA;AAEP,cAAI;AAAS;AACb,gBAAO,MAAK,KAAK;AACjB,oBAAU;AAAA;AAAA;AAAA;AAOlB;AACE,eAAW,mBAAmB;AAC9B,QAAI,OAAO,SAAS,YAAa,SAAO,eAAe,cAAc,CAAC,WAAW;AAAO,YAAM,IAAI,MAAM,uBAAuB;AAC/H,WAAO,QAAQ;AAAA;AAMjB,UAAQ,gBAAgB;AACxB;AACE,SAAK,WAAW,kBAAkB;AAClC;AACA,YAAQ,KAAK;AAAA,WACN;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;AAAA,WACG;AACH,aAAK,WAAW;AAChB,aAAK;AACL;AAAA,WACG;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;AAAA;AAEA,aAAK,QAAQ;AACb,aAAK,MAAM;AACX;AAAA;AAEJ,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,WAAW,QAAO,YAAY;AAAA;AAGrC,gBAAc,UAAU,QAAQ;AAC9B,QAAI,IAAI,WAAW;AAAG,aAAO;AAC7B;AACA;AACA,QAAI,KAAK;AACP,UAAI,KAAK,SAAS;AAClB,UAAI,MAAM;AAAW,eAAO;AAC5B,UAAI,KAAK;AACT,WAAK,WAAW;AAAA;AAEhB,UAAI;AAAA;AAEN,QAAI,IAAI,IAAI;AAAQ,aAAO,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AACtE,WAAO,KAAK;AAAA;AAGd,gBAAc,UAAU,MAAM;AAG9B,gBAAc,UAAU,OAAO;AAG/B,gBAAc,UAAU,WAAW;AACjC,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,KAAK;AAChE,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;AAAA;AAEvD,QAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,IAAI;AAC/D,SAAK,YAAY,IAAI;AAAA;AAKvB;AACE,QAAI,QAAQ;AAAM,aAAO;AAAA,aAAW,QAAQ,MAAM;AAAM,aAAO;AAAA,aAAW,QAAQ,MAAM;AAAM,aAAO;AAAA,aAAW,QAAQ,MAAM;AAAM,aAAO;AAC3I,WAAO,QAAQ,MAAM,IAAO,KAAK;AAAA;AAMnC;AACE,YAAQ,IAAI,SAAS;AACrB,QAAI,IAAI;AAAG,aAAO;AAClB,aAAS,cAAc,IAAI;AAC3B,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;AAAA;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;AAAA;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AACP,YAAI,OAAO;AAAG,eAAK;AAAA;AAAO,gBAAK,WAAW,KAAK;AAAA;AAEjD,aAAO;AAAA;AAET,WAAO;AAAA;AAWT;AACE,QAAK,KAAI,KAAK,SAAU;AACtB,YAAK,WAAW;AAChB,aAAO;AAAA;AAET,QAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,UAAK,KAAI,KAAK,SAAU;AACtB,cAAK,WAAW;AAChB,eAAO;AAAA;AAET,UAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,YAAK,KAAI,KAAK,SAAU;AACtB,gBAAK,WAAW;AAChB,iBAAO;AAAA;AAAA;AAAA;AAAA;AAOf;AACE,YAAQ,KAAK,YAAY,KAAK;AAC9B,YAAQ,oBAAoB,MAAM,KAAK;AACvC,QAAI,MAAM;AAAW,aAAO;AAC5B,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,GAAG,GAAG,KAAK;AACnC,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;AAAA;AAEvD,QAAI,KAAK,KAAK,UAAU,GAAG,GAAG,IAAI;AAClC,SAAK,YAAY,IAAI;AAAA;AAMvB;AACE,gBAAY,oBAAoB,MAAM,KAAK;AAC3C,QAAI,CAAC,KAAK;AAAU,aAAO,IAAI,SAAS,QAAQ;AAChD,SAAK,YAAY;AACjB,cAAU,IAAI,SAAU,SAAQ,KAAK;AACrC,QAAI,KAAK,KAAK,UAAU,GAAG;AAC3B,WAAO,IAAI,SAAS,QAAQ,GAAG;AAAA;AAKjC;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI;AAC9B,WAAO;AAAA;AAOT;AACE,QAAK,KAAI,SAAS,KAAK,MAAM;AAC3B,cAAQ,IAAI,SAAS,WAAW;AAChC,UAAI;AACF,gBAAQ,EAAE,WAAW,EAAE,SAAS;AAChC,YAAI,KAAK,SAAU,KAAK;AACtB,eAAK,WAAW;AAChB,eAAK,YAAY;AACjB,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,iBAAO,EAAE,MAAM,GAAG;AAAA;AAAA;AAGtB,aAAO;AAAA;AAET,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAO,IAAI,SAAS,WAAW,GAAG,IAAI,SAAS;AAAA;AAKjD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AACP,gBAAU,KAAK,YAAY,KAAK;AAChC,aAAO,IAAI,KAAK,SAAS,SAAS,WAAW,GAAG;AAAA;AAElD,WAAO;AAAA;AAGT;AACE,YAAS,KAAI,SAAS,KAAK;AAC3B,QAAI,MAAM;AAAG,aAAO,IAAI,SAAS,UAAU;AAC3C,SAAK,WAAW,IAAI;AACpB,SAAK,YAAY;AACjB,QAAI,MAAM;AACR,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AAAA;AAEpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AAAA;AAEtC,WAAO,IAAI,SAAS,UAAU,GAAG,IAAI,SAAS;AAAA;AAGhD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI,KAAK,SAAS,SAAS,UAAU,GAAG,IAAI,KAAK;AAC3E,WAAO;AAAA;AAIT;AACE,WAAO,IAAI,SAAS,KAAK;AAAA;AAG3B;AACE,WAAO,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAAA;AAAA;;;;;;;;;;;;;;;;;;;;;;;;ACtS/C;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,6BAAwB;;MAG3B;AACI,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,aAAK,OAAO,IAAI;AAChB,aAAK,eAAe;;MAExB;AACI,YAAI,CAAC,KAAK,KAAK,IAAI;AACf,eAAK,UAAU,SAAS,KAAK,SAAS;;AAE1C,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,aAAK,KAAK,IAAI,QAAQ;;MAE1B;AACI,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,eAAO,KAAK,KAAK,OAAO;;MAE5B;AACI,eAAO,KAAK;;;;MAUhB;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,KAAK,qBAAqB,KAAK,mBAAkB;;MAE5D;AACI,eAAO,mBAAkB;;MAE7B,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;;AAGjC;AACI,YAAM,IAAI,MAAM,IAAI;;ACvfxB;;;;;;;;;;;;;;;;AA8BO;AACH,oBAAc,OAAM;AACpB,iBAAW;AACX,mBAAY;AAEZ,aAAO,UAAU;AAEb,iBAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,eAAO,OAAM;AACb,eAAM,WAAW,OAAM;AACvB,eAAM,UAAS;;;AAIhB;AACH,aAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAE9B;AACH,aAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAEhC;AACH,iBAAU;AACV,mBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAO,IAAI;;AAEf,aAAO;;AASJ;AACH,gBAAU,KAAK;AACf,aAAQ,IAAI,IAAM,KAAI,KAAK;;AAGxB;AACH,mBAAa;AACb,mBAAa,GAAG,IAAI,EAAE,QAAQ;AAC1B,qBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,kBAAU,OAAO;;AAErB,aAAO;;AAiBJ;AACH,UAAI,CAAC;AACD,cAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAGjD,qEAAgE;AACnE,cAAO,aAAY,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAErF;AACH,cAAO,KAAK,MAAM,MAAM;;AAqBrB,oCAA+B,qBAAqB;AACvD,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,MAAM,QAAQ,QAAQ,cAAa,QAAQ,CAAC;AAC5C,qBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,mBAAQ,IAAI,IAAI,QAAQ;;;AAI5B,eAAO,KAAK;;AAEhB,aAAO;;AAaJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO;;AAEX,iBAAW,MAAM;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,gBAAQ,MAAM;;AAElB,aAAO;;AAEJ;AACH,aAAO,MAAM,WAAW;;AAErB;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO;;AAEX,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,aAAO,IAAI,MAAM;;AAEd;AAEH,UAAI,KAAK,QAAQ;AAEb,eAAO,KAAK,KAAK;;AAErB,UAAI,MAAM;AACN,eAAO;iBAEF,MAAM;AACX,eAAO;;AAGP,oBAAY,KAAK,IAAI,IAAI;AACzB,eAAQ,OAAM,KAAM,OAAM;;;AAG3B;AACH,oBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,aAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAc7B;AACH,8BAAwB,IAAI,YAAY;AACxC,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,wBAAgB,KAAK;;AAEzB,eAAQ;AACR,aAAO;;AAEJ;AACH,UAAI,QAAQ,EAAE;AACV,eAAO;;AAEX,aAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAE5B,6CAAwC,aAAa;AACxD,aAAO,IAAI,QAAQ;AACf,uBAAe;AACf,sBAAc;AACV,cAAI;AACA;AACA;;AAEJ;AACA,8BAAoB,QAAQ;AAC5B,cAAI,cAAc,QAAQ,YAAY;AAClC;AACA;;AAEJ,qBAAW,OAAO;;AAEtB;;;AAYD;AACH,sBAAgB;AAChB,wBAAkB;AAClB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,MAAM,MAAM;AACZ,uBAAa,MAAM;mBAEd,MAAM,OAAO;AAClB,cAAI,gBAAgB;AAChB,kBAAM,MAAM,yDACW,uBAAuB;;AAElD,wBAAc;mBAET,MAAM,KAAK;AAChB,gBAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAGvE,UAAI,gBAAgB;AAChB,YAAI,OAAO,KAAK,SAAS;AACrB,gBAAM,MAAM,QAAQ,yCAAyC;;AAEjE,eAAO;;AAEX,UAAI,cAAc;AACd,cAAM,MAAM,qCAAqC;;AAGrD,UAAI,OAAO,cAAc;AACrB,cAAM,MAAM,wDACD,UAAU;;AAEzB,uBAAiB,MAAM;AACvB,eAAS,eAAe,OAAO;AAC/B,aAAO;;AAEJ;AACH,mBAAa,MAAM;AAEnB,aAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAEzD,cAAO,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OAAO,MAAM,+CAA+C,SAAS,sBACjG;AAEhB,cAAO,KAAK,MAAM,QAAM,OAAM,MAAM,MAAM,0DAC1B;AAEhB,aAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAGrC;AACH,uBAAiB;AACjB,uBAAiB;AACjB,2BAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,mBAAc,QAAQ,QAAQ,eAC1B,OACA,gBAAe,MAAM,OAAO;AAChC,cAAQ;AACR,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,QAAQ;AACR,cAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAC9B,kBAAM,IAAI,MAAM,sBAAsB,oBAAoB,MAAM;;AAEpE,cAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACjD,qBAAS,KAAK,MAAM;AACpB,qBAAS,KAAK;;AAElB,cAAI,KAAK,MAAM;AACX;;;AAGR,YAAI,MAAM,OAAO;AACb,mBAAS,KAAK,MAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,UAAU;;AAEhB;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,MAAM;;AAGnB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AACxB,gBAAM,MAAM,oBAAoB,iCAAiC;;;;AAKtE;AACH,aAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAMhC;AACH,UAAI,YAAY;AACZ,eAAO;;AAEX,UAAI,YAAY,aAAa,YAAY;AACrC,eAAO;;AAEX,UAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC5D,eAAO;;AAEX,UAAI,YAAY,UAAU,YAAY;AAClC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAEd;AACH,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU;AACf,eAAO;iBAEF,UAAU;AACf,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;AASlC;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,kBAAY;AACZ,UAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,aAAO;;AAGJ;AACH,aAAO,OAAO,UAAU,YAAY,iBAAiB;;AAElD;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,UAAI,MAAM,QAAQ;AACd,eAAO,YAAW,OAAO;;AAE7B,UAAI,kBAAkB;AAClB,eAAO;iBAEF,kBAAkB,cAAc,kBAAkB;AACvD,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,WAAU;AACf,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAEzC;AACH,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,YAAI,OAAO,MAAM;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,MAAM;AACnB,UAAI,OAAO;AACP,eAAO;;AAIX,sBAAgB,IAAI,MAAM,OAAO;AACjC,cAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,mBAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC7B,gBAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE5C,aAAO;;AAEX;AACI,kBAAY,IAAI;AAChB,UAAI,MAAM,WAAW;AACjB,kBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,EAAE,SAAS;;;AAIxB,kBAAU,MAAM;AAChB,qBAAa,MAAM,MAAM;AACzB,oBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,mBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAG3D,aAAO;;AAGJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO,EAAE;;AAEb,mBAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,UAAI,SAAS;AAET,eAAO;;AAEX,UAAI,SAAS,EAAE;AACX,cAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAElE,aAAO,mBAAkB,GAAG,OAAO;;AAEhC;AACH,qBAAc,qBAAoB,MAAM;AACxC,mBAAa,GAAG,IAAI,OAAM,QAAQ;AAC9B,eAAM,KAAK;;AAEf,aAAO;;AAEJ;AACH,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,eAAO,IAAI,WAAW;;AAGtB,cAAM,IAAI,MAAM,qBAAqB;;;AAQtC;AACH,mBAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,UAAI,SAAS,QAAQ,UAAU;AAC3B,eAAO,eAAc,OAAO,IAAI,aAAa;iBAExC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;iBAEtC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;;AAG3C,cAAM,IAAI,MAAM,qBAAqB;;;AAGtC;AACH,YAAM,QAAQ;AACV,gBAAO,OAAO,UAAU,YAAY,WAAW,GAAG,MAAM,0EAC1C;;;AAWf;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,KAAK;;AAEhB,mBAAY,KAAK,KAAK,SAAS;AAC/B,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,kBAAS,QAAQ,KAAK,KAAK;;AAE/B,aAAO;;AAUJ;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,CAAC;;AAEZ,mBAAa,IAAI,MAAM;AACvB,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,aAAK,KAAK,KAAK,MAAM,SAAQ,QAAQ;AACrC,kBAAS,KAAK,KAAK,QAAQ;;AAE/B,WAAK,KAAK,SAAS,KAAK;AACxB,aAAO;;AAOJ;AAOH,aAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;ACrnB3D;;;;;;;;;;;;;;;;AAkBA,uCAAkC;;MAU9B;AACI,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,KAAK,YAAY;AACjB,kBAAQ,KAAK,YAAY,KAAK,oEACO;;AAEzC,aAAK,eAAe;AACpB,aAAK,WAAW;;MAEpB;AACI,aAAK,aAAa,YAAY,CAAE,cAAc;AAG9C,YAAI,KAAK,SAAS,aAAa;AAC3B,4BAAkB,KAAK,SAAS;AAChC,kBAAQ,KAAK,qCAAqC,aAAa;AAC/D,eAAK,IAAI,UAAU;;;YAGrB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,aAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,eAAO,KAAK,MAAM;;MAEtB;AACI,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,0BAAkB,KAAK,aAAa;AACpC,YAAI,WAAU;AACV,gBAAM,IAAI,MAAM,QAAQ;;AAG5B,aAAK,MAAM,YAAY;AACvB,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK;;UAGZ;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,mBAAmB;;AAEvC,aAAK,MAAM,YAAY;AACvB,YAAI,KAAK,aAAa,UAAU,WAAW;AACvC,eAAK,aAAa,UAAU,QAAQ;;;MAG5C;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,yBAAyB;;AAE7C,eAAO,KAAK,aAAa,UAAU;;MAEvC;AACI,aAAK,QAAQ,OAAO,OAAO,IAAI;;MAEnC;AACI,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACvC;;AAEJ,0BAAkB,gBAAe,KAAK,OAAO,SAAS;AACtD,YAAI,8BAA6B;AAC7B,4BAAkB,UAAU,4BAA2B,MAAM;AAC7D,oBAAU,QAAQ;AACd,iCAAqB,SAAS,MAAM;AACpC,iBAAK,SAAS,OAAO,YAAW,KAAK;;;;;AAK9C;AACH,qBAAe;AACf,kBAAY,QAAQ,+BAA+B;AAC/C,qBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,eAAO,EAAE,KAAK;;AAElB,aAAO;;AAEX;AACI,aAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAEnE;AACI,cAAQ,MAAM;AACd,UAAI,UAAU,UAAU,UAAU;AAC9B,eAAO,UAAU;iBAEZ,GAAG,CAAC,YAAY;AACrB,eAAO,CAAC;;AAEZ,YAAM,IAAI,MAAM,oCAAoC,kBAAkB;;AAUnE;AACH,aAAO,SAAA;;AAEA,aAAA,MAAM;AACV;AACH,eAAA,MAAM;;AC9JV;;;;;;;;;;;;;;;;AAoBA;AAEO;AACH,UAAI,oBAAmB;AAEnB;AACA,YAAI,OAAQ,WAAY;AACpB,eAAK;mBAEA,OAAQ,WAAY;AACzB,eAAK;mBAEA,OAAQ,YAAa;AAC1B,eAAK;mBAEA,OAAQ,SAAU;AACvB,eAAK;;AAGL,gBAAM,IAAI,MAAM;;AAEpB,2BAAkB;;AAEtB,aAAO;;AAGX;AACI,iBAAW;AACX,UAAI,GAAG,cAAc;AACjB,WAAG,aAAa,IAAI;;AAExB,aAAO,GAAG;;AASP;AACH,wBAAkB;AAClB,UAAI,UAAU,IAAI;AACd,eAAO,UAAU,IAAI;;AAGrB,0BAAkB;AAClB,kBAAU,IAAI,KAAK;AACnB,eAAO,UAAU,IAAI;;;ACpEjB,iBAAO;AACP,iBAAQ;AACR,kBAAS;AACT,iBAAO;AACP,kBAAQ;AACR,gBAAO;AACP,gBAAO;AACP,oBAAU;AACV,mBAAU;AACV,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,kBAAS;AACT,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,yBAAe;AACf,2BAAkB;AAClB,wBAAe;AACf,kBAAQ;AACR,iBAAQ;AACR,yBAAe;AACf,qBAAW;AACX,oBAAU;AACV,oBAAU;AACV,iCAAwB;AACxB,iCAAuB;AACvB,mBAAU;AACV,mCAA0B;AAC1B,kCAAyB;AACzB,iBAAO;AACP,iBAAQ;AACR,oBAAU;AACV,2BAAiB;AACjB,0BAAgB;AAChB,mCAAyB;AACzB,gDAAuC;AACvC,+CAAsC;AACtC,iBAAQ;AACR,uBAAc;AACd,oCAA2B;AAC3B,qCAA4B;AAC5B,iBAAO;AACP,iBAAO;AACP,oBAAW;AACX,gBAAO;AACP,mBAAS;AACT,iBAAO;AACP,kBAAS;AACT,gBAAO;AACP,kBAAQ;AACR,2BAAiB;AACjB,kBAAS;AACT,sBAAY;AACZ,4BAAkB;AAClB,sBAAY;AACZ,sBAAY;AACZ,qBAAW;AACX,0BAAgB;AAChB,sBAAY;AACZ,iBAAQ;AACR,iBAAQ;AACR,qBAAY;AACZ,kBAAS;AACT,kBAAS;AACT,kBAAQ;AACR,uBAAa;AACb,qBAAY;AACZ,iBAAO;AACP,kBAAS;AACT,wBAAc;AACd,uBAAc;AACd,sBAAa;AACb,uBAAc;AACd,gBAAO;AACP,wBAAe;AACf,iBAAO;AACP,qBAAW;AACX,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,8BAAqB;AACrB,iBAAQ;AACR,iBAAO;AACP,qBAAW;AACX,sBAAa;AACb,gBAAO;AACP,sBAAY;AACZ,oBAAU;AACV,sBAAY;AACZ,iCAAuB;AACvB,iCAAuB;AACvB,iCAAuB;AACvB,sBAAY;AACZ,oBAAU;AACV,mBAAS;AACT,iBAAQ;AACR,iBAAO;AACP,mBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,uBAAc;AACd,kBAAQ;AACR,qBAAW;AACX,kCAAyB;AACzB,sCAA6B;AAC7B,4BAAkB;AAClB,+BAAsB;AACtB,mBAAS;AACT,qBAAW;AACX,kBAAS;AACT,mBAAS;AACT,uBAAa;AACb,sBAAY;AACZ,iBAAQ;AACR,mBAAS;AACT,iBAAO;AACP,iBAAQ;AACR,iBAAQ;AACR,qBAAW;AACX,qBAAY;AACZ,kBAAQ;AACR,iBAAO;AACP,2BAAkB;AAClB,oBAAU;AACV,qBAAW;AACX,+BAAqB;AACrB,oBAAU;AACV,iBAAO;AACP,0BAAiB;AACjB,0BAAgB;AAChB,gBAAO;AACP,kBAAQ;AACR,kBAAQ;AACR,iBAAQ;AACR,uBAAa;AACb,mBAAU;AACV,oBAAU;AACV,+BAAsB;AACtB,uBAAa;AAIb,kBAAQ;AACR,uBAAc;AACd,8BAAoB;AACpB,0BAAgB;AAChB,yBAAe;AACf,kCAAwB;ACxJpC;;;;;;;;;;;;;;;;AAkBA,4BAAuB,WAAU,kBAAkB,MAAM,IAAI;AAC7D,0BAAqB,WAAU,gBAAgB,MAAM,IAAI;AAOlD;AACH,kBAAY,SAAQ,YAAY;AAChC,aAAO,gBAAe,IAAI;;AAMvB;AACH,aAAO,cAAa,IAAI;;AAErB;AACH,iBAAW,gBAAe;AAC1B,qBAAe;AACf,aAAO;AACH,eAAQ,MAAM,SAAU,GAAG;AAC3B,YAAI;AACA;;AAEJ,+BAAsB;AACtB,2BAAmB,IAAI,MAAM;AAC7B,YAAI,aAAY;AACZ,iBAAO,KAAK;;;AAGpB,aAAO;;AAaJ;AACH,aAAQ,YAAY,eAAgB;AACpC,kBAAY,SAAQ,YAAY;AAChC,UAAI,gBAAe,IAAI;AACnB,gBAAQ,KAAK,eAAe,4BACpB;;AAEZ,sBAAe,IAAI,KAAK;;AAUrB;AACH,aAAQ,cAAe;AACvB,UAAI,cAAa,IAAI;AAGjB,YAAI,OAAM,QAAQ;AACd,kBAAQ,KAAK,gCAAgC;;;AAGrD,oBAAa,IAAI,YAAY;;AAS1B;AACH,kBAAY,SAAQ,YAAY;AAChC,UAAI,CAAC,gBAAe,IAAI;AACpB,cAAM,IAAI,MAAM,eAAe,4BACvB;;AAEZ,sBAAe,OAAO;;AAGnB;AACH,UAAI,CAAC,cAAa,IAAI;AAClB,cAAM,IAAI,MAAM,iBAAiB;;AAErC,oBAAa,OAAO;;AAQjB;AACH,sBAAgB,sBAAqB;AACrC,cAAQ,QAAQ;AACZ,gCAAwB,OAAO,OAAO,IAAI,cAAc,CAAE,aAAa;AACvE,wBAAe;;;AAGvB;AACI,aAAO,GAAG,eAAe;;AChI7B;;;;;;;;;;;;;;;;AAsBO;AACH,UAAI,UAAU;AACV,eAAO,cAAa;;AAExB,aAAO,cAAa,CAAC,QAAQ;;AAEjC;AACI,aAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAEvC;AACH,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,UAAI,MAAM,QAAQ;AACd,YAAI,SAAa;;AAErB,UAAI,OAAM,QAAQ;AACd,kCAA8B,GAAG;;AAErC,UAAI,oBAAmB,GAAG;AACtB,eAAO;;AAEX,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,qBAAa,IAAI,WAAW,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,cAAI,KAAK,MAAM,EAAE,QAAQ;AACrB,iBAAK,KAAK;;;AAGlB,eAAO;;AAGP,cAAM,IAAI,MAAM,qBAAqB;;;AActC;AACH,aAAO,OAAM,SAAS;;AAkBnB;AACH,aAAO,OAAM,SAAS,MAAM,MAAM;;AAU/B,yCAAoC;AACvC,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,GAAG;;AAU7B,6CAAwC;AAC3C,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACxHxC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,SAAS;AACd,YAAI,UAAU;AACV,eAAK,SAAS,IAAI;;;MAG1B;AACI;AACA,oCAA4B;AACxB,oBAAU;;AAEd,sBAAc,KAAK,aAAa,KAAK;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAe,QAAQ;AAGvB,iBAAO,OAAO,KAAK;AACf,uCAA0B,YAAY,OAAO,OAAO;;;AAG5D,8BAAsB;UAClB;UACA;UACA;UACA,QAAQ,MAAM,KAAK,YAAU,OAAO;UACpC,WAAW,MAAM,KAAK,YAAU,OAAO,uBAAuB,OAC1D,OAAO,wBACP;;AAER,eAAO;;MAEX;AACI,eAAQ,YAAY,SAAS,QAAQ,QAAQ,aAAc;AAC3D,gBAAQ,QAAQ;AACZ,kBAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACjD,iBAAK,OAAO,iBAAiB,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAAQ,eAAe;;;;;AAKvH;AACH,UAAI,UAAU;AAEV,eAAO;;AAEX,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AAExB,kBAAQ,KAAK,SAAS,yBAAyB;AAC/C,iBAAO;;;AAGf,aAAO;;;MAGP;AACI,sBAAa,OAAO,WAAW,WAAW,UAAc,GAAG,YAAY,KACnE,OAAO;AACX,2BAAmB,UAAc,MAAM;AACvC,qBAAa,OAAO;AACpB,qBAAa,OAAO;AACpB,sBAAc,UAAc,OAAO,MAAM,YAAY;AACrD,qCAA6B;AAC7B,4BAAmB;AACf,yBAAc,OAAO;AACrB,cAAI,UAAS;AAGT,+BAAmB,OAAM,SAAS,OAAO;AACzC,8BAAkB,WAAW;AAC7B,sCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAGnE,gBAAQ,IAAI,KAAK,gBAAiB,WAAW,SAAS,WAAY,UAAW,4BAA6B,aAAa,oBAAoB,aAAa,cAAc,iBAAiB,gBAAgB;;;AC9F/M;;;;;;;;;;;;;;;;AAyBO;AAGH,2BAAqB;AACrB,yBAAmB;AACnB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,qBAAa,GAAG,GAAG,MAAM;;AAE7B,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AACxB,gCAAwB;AACpB,yBAAc,WAAW;AACzB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,gBAAI,aAAa,OAAM;AACnB,mBAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,8BAAgB;AAChB,yBAAW,KAAK,MAAM;AACtB;;;AAGR,cAAI;AACA;;;;AAKZ,6BAAuB;AACvB,qBAAe,EAAE,MAAM;AACvB,uBAAiB;AACjB,mBAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AAClC,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACrC,cAAI,eAAe,KAAK,QAAQ,GAAG;AAC/B,oCAAwB;AACpB,6BAAe,WAAW,WAAW,MAAM;AAC3C,uBAAS,KAAK,MAAM;;AAExB;;;;AAKZ,2BAAqB;AACrB,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,YAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAErC,+BAAqB;AACrB,kCAAwB,KAAK;AACzB,8BAAkB,KAAK,OAAO;AAC9B,gBAAI,aAAa,UAAU;AACvB,2BAAa,aAAa;;;AAIlC,6BAAmB,OAAO,OAAO,IAAI;AACrC,qBAAW,SAAS;AACpB,qBAAW,UAAU,KAAK;AAC1B,uBAAa,KAAK;;;AAG1B,aAAO;;AASJ;AAEH,mBAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC1C,qBAAa,aAAa;AAC1B,oBAAY;AACZ,aAAK,QAAQ,QAAQ;AACjB,6BAAmB,6BAA6B,EAAE;AAClD,cAAI,cAAc;AACd,gBAAI,KAAK;;AAKT,gBAAI,KAAK;;;AAGjB,YAAI,KAAK,YAAY;AACjB,gBAAM,IAAI,MAAM,4DACL,KAAK;;AAGpB,+BAAuB,KAAK,SAAS;AACrC,gCAAwB,KAAK;AACzB,cAAI,CAAE,cAAa;AACf,kBAAM,IAAI,MAAM,iCAAiC,yCACf,OAAO,KAAK;;AAGlD,qBAAW,MAAK,MAAM,eAAe;AACrC,cAAI,GAAG,UAAU;AACb,kBAAM,IAAI,MAAM,4BAA4B,KAAK,qCAC1C,iDAAiD,GAAG;;AAE/D,oBAAU,KAAK,OAAO;AACtB,cAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,kBAAM,IAAI,MAAM,4BAA4B,KAAK,sCACzC,yBAAyB,GAAG,wDACL,EAAE;;AAErC,cAAI,6BAA6B,EAAE,OAAO;AACtC,yCAA6B,EAAE,MAAM;;AAGrC,gCAAoB,6BAA6B,EAAE;AACnD,yCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,wBAAY;;;;;AChJ5B;;;;;;;;;;;;;;;;AAkBA,mCAA8B;AAE9B,wCAAmC;AAEnC,mCAA8B;AACvB;AACH,sBAAgB,gBAAe;AAC/B,wBAAkB,yBAAwB,MAAM,OAAO,OAAO;AAC9D,mBAAa,MAAM;AACnB,wBAAkB,mBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,oBAAc,CAAC;AACf,UAAI;AACA,cAAM,KAAK,YAAY;AACvB,cAAM,KAAK,WAAW;AACtB,cAAM,KAAK,aAAa;AACxB,cAAM,KAAK;;AAEf,YAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,aAAO,MAAM,KAAK;;AAEtB;AACI,gBAAU,eAAc;AACxB,sBAAgB,QAAQ,QAAQ,SAAS;AACzC,wBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,mBAAa,MAAM;AACnB,6BAAuB,UAAU,cAAc,qBAAoB,QAAQ;AAC3E,UAAI,OAAO;AACP,uBAAe,GAAG,MAAM,IAAI,SAAS;AACjC,yBAAe,MAAM;AACrB,uBAAa,GAAG,IAAI,SAAS;AACzB,sBAAU,KAAK,KAAK,IAAI,UAAU,IAAI,aAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAIpG,aAAO;;AAEX;AACI;AACA,UAAI,MAAM,QAAQ;AACd,iBAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,8BAC/B,WAAW,IAAI,GAAG,QAAQ;iBAE5B,UAAS;AACd,iBAAS,IAAI;iBAER,UAAU;AACf,iBAAS,iBAAgB;;AAGzB,iBAAS,WAAW,IAAI,QAAQ,yBAAwB;;AAE5D,aAAO,UAAS,QAAQ;;AAE5B;AACI,aAAO,MAAM,IAAI,UAAU;;AAE/B,iFAA4E;AACxE,gCAA0B,UAAU,cAAc,IAAI;AACtD,mBAAa,MAAM;AACnB,mBAAa,MAAM;AACnB,UAAI,SAAS;AACT,YAAI,UAAU;AACV,+BAAqB,qBAAoB;AACzC,iBAAO,CAAC,aAAY,aAAa,IAAI,GAAG;;AAE5C,YAAI,UAAU;AACV,iBAAO,CAAC,iBAAgB,KAAK;;AAEjC,eAAO,CAAC,KAAK,GAAG;;AAEpB,UAAI,SAAS;AACT,YAAI,OAAO;AACP,gCAAsB,8BAA6B;AACnD,0BAAgB,MAAM,KAAK,KAAK,MAAM,GAAG;AACzC,yBAAe,MAAM,KAAK,KAAK,MAAO,QAAO,+BAA8B,mBAAmB,OAAO;AACrG,cAAI,UAAU;AACV,wBAAY,qBAAoB;AAChC,uBAAW,qBAAoB;;AAEnC,iBAAO;YACH,MACI,UAAU,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IAAI,UAAU,aAAY,GAAG,UAAU,OAAO,8BAA6B,IAAI,QAC/E,KAAK,QACV;;;AAGZ,4BAAoB,UAAU,cAAc,qBAAoB,QAC5D,MAAM,KAAK;AACf,eAAO;UACH,MACI,YAAY,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAIZ,uBAAiB,MAAM,MAAM;AAC7B,yBAAmB,QAAQ,MAAM;AACjC,qBAAe,QAAQ,KAAK;AAC5B,oBAAc;AACd,UAAI,OAAO;AACP,qBAAa,GAAG,IAAI,6BAA4B;AAC5C,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW;;AAEpG,cAAM,KAAK;AACX,qBAAa,OAAO,6BAA4B,IAAI,MAAM;AACtD,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAIjH,qBAAa,GAAG,IAAI,MAAM;AACtB,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAGrH,kBAAY,SAAS,IAAI,MAAM;AAC/B,YAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,cAAM,KAAK,MAAM,MAAM,KAAK;;AAEhC,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM;AACtB,sBAAc;;AAElB,YAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,aAAO;;AAEX;AACI,4BAAsB;AACtB,mBAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,sBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAE1C,aAAO;;AChKX;;;;;;;;;;;;;;;;;MA4BI;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ,MAAM;AACnB,aAAK,OAAO,eAAmB;AAC/B,YAAI,UAAU;AACV,oBAAU,OAAO;AACjB,kBAAY,MAAM,KAAK,MAAM,MAAM,qBAAqB,qDAC1B,KAAK;;AAEvC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAIpB,aAAK,SAAS,UAAU,mBAAuB,OAAO,KAAK;AAC3D,aAAK,UAAU,gBAAe;;MAUlC;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAY,KAAK,WAAW,KAAK,MAAM,MAAM,uCAAuC,KAAK,gCAClE,KAAK;AAC5B,uBAAc,KAAK,WAAW;AAC9B,aAAK,OAAO,UAAS;;MASzB;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAQ;AACR,0BAAkB;AACd,cAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC7B,wBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,kBAAM,IAAI,MAAM;;AAEpB;;AAEJ,qBAAY,KAAK,KAAK,SAAS;AAC/B,sBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,MAAK,KAAK;;AAEpC,eAAO,KAAK,OAAO;;MAEvB;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,KAAK;;AAEhB,qBAAY,KAAK,KAAK,SAAS;AAC/B,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,KAAK,KAAK;;AAEpC,eAAO;;MAEX;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,CAAC;;AAEZ,qBAAa,IAAI,MAAM,KAAK,MAAM;AAClC,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,eAAK,KAAK,KAAK,MAAM,SAAQ,KAAK,QAAQ;AAC1C,oBAAS,KAAK,KAAK,KAAK,QAAQ;;AAEpC,aAAK,KAAK,SAAS,KAAK;AACxB,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAOtB;AACI,eAAO,aAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAIpE,qBAAgB;AAEhB,qBAAgB;AAEhB,gCAA2B;AAI3B,KAAC;AAMM;AACH,mBAAY;;AAOT;AACH,mBAAY;;AAMT;AACH,8BAAuB;;;MAWvB;AAEI,aAAK,OAAO;AACZ,aAAK,qBAAqB;AAC1B,aAAK,QAAQ,MAAM;AACnB,aAAK,QAAQ,SAAS;AACtB,aAAK,OAAO,eAAmB;AAC/B,aAAK,UAAU,gBAAe;AAC9B,aAAK,SAAS;AACd,aAAK,KAAK;AACV,aAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;UAExD;AACA,eAAO,KAAK,MAAM;;YAOhB;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO;;MAMpD;AACI,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK;;YAQnD;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,eAAc,KAAK,OAAO;;MAQrC;AACI,eAAO,eAAc,KAAK,OAAO,KAAK;;YAQpC;AACF,aAAK;AACL,sBAAa,aAAY,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU;AACf,wBAAc,MAAM;AACpB;AACI,mBAAO,MAAM,IAAI,OAAK,cAAkB;;AAGxC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;MAQX;AACI,aAAK;AACL,sBAAa,aAAY,SAAS,KAAK;AACvC,YAAI,KAAK,UAAU;AACf;AACI,mBAAO,MAAK,IAAI,OAAK,cAAkB;;AAGvC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;YAGL;AACF,aAAK;AACL,sBAAa,MAAM,aAAY,KAAK,KAAK;AACzC,YAAI,KAAK,UAAU;AACf,iBAAO;;AAGP,iBAAO,IAAI,WAAW,MAAK;;;MAQnC;AACI,YAAI,KAAK;AACL;;AAEJ,qBAAY,cAAc;AAC1B,aAAK,qBAAqB;;UAE1B;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAWxB,gBAAgB;AACZ,eAAO,WAAU,MAAM,MAAM;;MAMjC;AACI,aAAK;AACL,eAAO,WAAU,MAAM;;MAO3B,mBAAmB;AACf,qBAAa,KAAK;AAClB,eAAO,gBAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;MAExD;AACI,aAAK;AACL,eAAO,WAAU,KAAK,MAAM;;MAEhC,qBAAqB;AACjB,aAAK;AACL,eAAO,aAAY,aAAa,MAAM,WAAW,MAAM;;;AAG/D,WAAO,eAAe,SAAQ,OAAO,aAAa;MAC9C,OAAO;AAMH,eAAO,CAAC,CAAC,YAAY,SAAS,QAAQ,QAAQ,SAAS,YAAY,QAC/D,SAAS,mBAAmB;;;4BAQV;MAC1B;AACI,cAAM,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AACnE,aAAK,YAAY;AACjB,aAAK,OAAO;;MAUhB;AACI,YAAI,SAAS,UAAU,KAAK;AACxB,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,YAAI,CAAC,aAAiB,SAAS,OAAO,KAAK;AACvC,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,qBAAY,cAAc;AAC1B,aAAK,SAAS,SAAS;AACvB,qBAAY,OAAO,MAAM;;MAE7B;AACI,qBAAY,gBAAgB;AAC5B,aAAK,qBAAqB;;;AAGlC,WAAO,eAAe,WAAU,OAAO,aAAa;MAChD,OAAO;AACH,eAAO,oBAAoB,WAAU,SAAS,UAAU,QACpD,SAAS,kBAAkB;;;AC5XvC;;;;;;;;;;;;;;;;AAiBA,IAAC;AACG,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;OACd,SAAA,QAAS,UAAA,OAAO;AAGnB;AACA,IAAC;AACG,yBAAkB,aAAa;AAC/B,yBAAkB,WAAW;AAC7B,yBAAkB,UAAU;AAC5B,yBAAkB,eAAe;OAClC,sBAAsB,sBAAoB;AAC7C;AACA,IAAC;AACG,wBAAiB,aAAa;AAC9B,wBAAiB,WAAW;AAC5B,wBAAiB,UAAU;AAC3B,wBAAiB,eAAe;OACjC,qBAAqB,qBAAmB;AAC3C;AACA,IAAC;AACG,2BAAoB,aAAa;AACjC,2BAAoB,WAAW;AAC/B,2BAAoB,UAAU;AAC9B,2BAAoB,eAAe;OACpC,wBAAwB,wBAAsB;AACjD;AACA,IAAC;AACG,6BAAsB,aAAa;AACnC,6BAAsB,WAAW;AACjC,6BAAsB,UAAU;AAChC,6BAAsB,eAAe;OACtC,0BAA0B,0BAAwB;AACrD,2BAAsB;MAClB,SAAW;MACX,OAAS;MACT,MAAQ;MACR,WAAa;;AAEV;AACH,UAAI,UAAU,YAAY,UAAU;AAChC,YAAI,UAAU,YAAY,UAAU;AAChC,iBAAO;;AAEX,cAAM,IAAI,MAAM,kBAAkB,cAAc;;AAEpD,aAAO,eAAc,OAAO;;AAGzB;AACH,aAAO,YAAW,MAAM;;ACzE5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,EAAE,UAAU,EAAE;AACd,eAAO,CAAC,GAAG;;AAEf,oBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,aAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AAE3B;AACH,cAAO,EAAE,UAAU,EAAE,OAAO,MAAM,2BAA2B,EAAE,qBAChD,EAAE;;AAEd;AACH,aAAO,WAAW,KAAK,OAAK,EAAE,OAAO,QAAO;;AAczC;AACH,mBAAa;AACb,mBAAa,IAAI;AACjB,2BAAoB,QAAQ,MAAM;AAClC,aAAO;;AAEX;AACI,UAAI,aAAa;AACb;;AAEJ,UAAI,qBAAqB;AACrB,aAAK,KAAK;AACV;;AAEJ,UAAI,CAAC,YAAW;AACZ;;AAGJ,uBAAiB;AACjB,sBAAgB;AACZ,oBAAY,SAAS;AACrB,YAAI,CAAC,KAAK,IAAI;AACV,eAAK,IAAI;AACT,+BAAoB,KAAK,MAAM;;;;AAK3C;AACI,aAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;;;;;;;;AC1EhD;;;;;;;;;;;;;;;;;MA4BI;AAEI,aAAK,sBAAsB;AAC3B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AAItB,aAAK,gBAAgB;AAGrB,aAAK,cAAc;AACnB,aAAK,aAAa;AAKlB,aAAK,oBAAoB;AACzB,aAAK,cAAc;AACnB,aAAK,aAAa,IAAI;AACtB,aAAK,YAAY;AACjB,aAAK,gBAAgB,CAAE,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;MAE1F;AACI,mCAA2B,KAAK;AAC5B,eAAK,oBAAoB,cAAc;;;;;MAK/C;AACI,aAAK,MAAM;AACX,aAAK,WAAW;AAChB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,QAAQ,IAAI;;YAEf;AACF,YAAI,KAAK,sBAAsB;AAC3B,iBAAO,KAAK,mBAAmB,KAAK;;;AAExC,YAAI,KAAK,mBAAmB;AACxB;;AAEJ,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,0BAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,cAAI;AACA,kBAAM,KAAK,WAAW;AACtB;;;AAGR,cAAM,IAAI,MAAM;;UAGhB;AACA,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM,YAAY,KAAK;;AAIrC,YAAI,KAAK,mBAAmB;AACxB,iBAAQ,MAAM,aAAc,KAAK;AACjC,cAAI;AACA,kBAAM,IAAI,MAAM,iCAAiC;;AAIrD,eAAK,WAAW;;AAEpB,eAAO,KAAK;;MAEhB;AACI,eAAO,OAAO,KAAK,KAAK;;MAE5B;AACI,YAAI,CAAE,gBAAe,KAAK;AAGtB,cAAI,eAAe,KAAK;AACpB,mBAAQ,aAAc,KAAK,kBAAkB;AAC7C,gBAAI;AAEA,qBAAO;;;AAIX,mBAAO;;;AAGf,eAAO,KAAK,SAAS;;MAEzB;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,iBAAO;;AAEX,eAAO,KAAK,gBAAgB,aAAa;;MAE7C,iDAAiD;AAC7C,YAAI,eAAe,KAAK;AACpB,kBAAQ,KAAK,GAAG;AAEhB,iBAAO;;AAEX,aAAK,gBAAgB,eAAe,CAAE,SAAS;AAC/C,eAAO;;YAEL;AACF,YAAI,KAAK,gBAAgB,gBAAgB;AACrC,gBAAM,IAAI,MAAM,iBAAiB;;AAErC,aAAK,cAAc;AACnB,YAAI,KAAK,SAAS,gBAAgB;AAC9B,eAAK,kBAAkB;AACvB,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,yBAAe,YAAY,MAAM,UAAU;AAC3C,cAAI,CAAC;AACD,mBAAO;;;AAGf,aAAK,kBAAkB,KAAK,SAAS;AACrC,aAAK;AAEL,aAAK,WAAW,IAAI,UAAS,KAAK;AAClC,eAAO;;MAEX;AACI,wBAAgB,sBAAqB,KAAK;AAC1C,gBAAQ,QAAQ;AACZ,cAAI,OAAO,aAAa;AACpB,mBAAO,UAAU,KAAK;;;;MAIlC;AACI,wBAAgB,sBAAqB;AACrC,gBAAQ,QAAQ;AACZ,cAAI,OAAO,eAAe;AACtB,mBAAO,YAAY,KAAK,SAAS;;;;MAU7C;AACI,qCAA6B,KAAK,gBAAgB;AAClD,YAAI,wBAAwB;AACxB,gBAAM,IAAI,MAAM,6BAA6B;;AAEjD;AACI,2BAAgB,qBAAqB;AAMrC,cAAI,YAAW,CAAE,qBAAmB,mBAC7B,OAAO,SAAQ,SAAS;AAC3B,8BAAkB,EAAE,KAAK;AACzB,4BAAgB,SACX,KAAK;AAEN,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,SAAS,eAAe;AAC7B,mBAAK,qBAAqB;AAC1B,qBAAO;eAEN,MAAM;AAEP,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,qBAAqB;AAC1B,sBAAQ,KAAK,6BAA6B;AAC1C,sBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,qBAAO;;AAEX,iBAAK,qBAAqB;AAC1B,mBAAO,CAAE,SAAS,WAAW;;AAG7B,iBAAK,SAAS,eAAe;AAC7B,mBAAO,CAAE,SAAS,MAAM,WAAW;;;AAIvC,kBAAQ,KAAK,6BAA6B;AAC1C,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO,CAAE,SAAS,OAAO,WAAW;;;MAG5C;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,gBAAM,IAAI,MAAM,GAAG;;AAEvB,YAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAG/D,eAAK;;AAET,YAAI,eAAe,KAAK;AACpB,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,eAAO,KAAK,gBAAgB;AAE5B,YAAI,KAAK,gBAAgB;AACrB,eAAK,qBAAqB;AAC1B,eAAK,cAAc;AACnB,eAAK,kBAAkB;;;MAG/B;AACI,YAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC7C,gBAAM,IAAI,MAAM;;AAEpB,eAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE1C,iBAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;MAGpC;AACI,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,cAAI,aAAa;AACb,mBAAO,CAAE,MAAM,aAAa;;;AAGpC,cAAM,IAAI,MAAM;;MAGpB;AACI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAmB,KAAK;AACxB,uBAAe,KAAK,SAAS;AAG7B,mBAAW,YAAY;AACvB,aAAK,UAAU;AACf,iBAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,YAAI,KAAK;AAGL,eAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;MAG3E;AACI,mBAAW;AACX,YAAI,MAAM;AAEN,cAAI,OAAO,aAAa;AACpB,kBAAM,IAAI,MAAM;;AAEpB,eAAK;;AAIL,cAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACtD,kBAAM,IAAI,MAAM;;AAGpB,cAAI,OAAO,OAAO;AACd,kBAAM,IAAI,MAAM;;AAGpB,iBAAO;;AAIX;AACA,eAAO,KAAK,UAAU,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AAC5E,mBAAS;AACT,cAAI,kBAAkB;AAClB,oBAAQ,MAAM;;AAElB,iBAAO;;;MAGf;AACI;AACA;AACI,sBAAY;AACZ;AACA,iBAAO;;AAGP;AACA,gBAAM;;;MAGd;AACI,eAAO,QAAO;;MAElB;AACI,eAAO,QAAO;;MAWlB;AACI,kBAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,uBAAe,CAAE;AACjB,sBAAa,QAAS;UAClB,GAAG;AACC,0BAAc;AACd,+BAAmB,CAAE,GAAG;AACxB,0BAAc,CAAE;AAChB,mBAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,YAAY,MAAiB,OAAM;;;AAG3G,sBAAc;AACd,aAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,OAAM,OAAO;AACxE,eAAO;;MAeX;AACI,4BAAoB;AACpB,8BAAsB;AAItB,eAAO,KAAK,cAAc,aAAa,QAAQ,eAAe,YAAY,OAAO,cAAc;;MAEnG;AACI,eAAO,KAAK,IAAI,QAAQ;;MAE5B;AACI,gCAAwB,KAAK,QAAQ;AAErC,+BAAuB;AACvB,iBAAS,QAAQ;AAGb,8BAAqB,KAAK,UAAU,cAAc,IAAI;;AAO1D,yBAAiB,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACpF,8BAAsB,kBAAkB,mBAAmB,mBAAmB;AAC9E,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,YAAY,KAAK,6CACzB,0CAA0C;;;MAO1D;AACI;AACA,oBAAY;AACZ,yBAAiB,KAAK;AACtB,YAAI,cAAc;AACd,uBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAEvE,kCAA0B,KAAK,MAAM;AACrC,mCAA2B,KAAK,MAAM;AACtC,YAAI,KAAK;AACL,eAAK,MAAM,kBAAkB,KAAK;;AAEtC;AACA,uBAAe,WAAU,YAAY,KAAK;AAC1C;AACA,YAAI,UAAU;AACV,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,OAAO,WAAW,CAAE,QAAQ,OAAO,SAAS,KAAK;AACvD,6BAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,+BAAmB,SAAS,IAAI,EAAG,QAAQ,OAAO,WAAY,KAAK,qBAAqB,QAAQ,OAAO;AAKvG,gBAAI;AACA,kCAAoB,KAAK,sBAAsB,YAAY,QAAQ;AACnE,kBAAI,iBAAiB;AAKjB,oBAAI,iBAAiB;AACjB,kCAAgB;;AAEpB,mCAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,gCAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAExD,sBAAQ,KAAK,2BAA2B;;AAE5C,mBAAO;;;AAIX,2BAAiB;AAIb,gBAAI,CAAC;AACD;;AAEJ,oBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAEvD,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,yBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,mBAAO;;;AAIf;AACA,aAAK,UAAU,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC3E,cAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC1C,sBAAU;;AAGV,4BAAgB,KAAK,SAAS,cAAc,YAAY,QAAQ,MAAM;AACtE,gBAAI,KAAK,IAAI,QAAQ;AACjB,mBAAK,SAAS,iBAAiB;;AAEnC,sBAAU,cAAc;;;AAGhC,YAAI;AACA,eAAK,YAAY,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAExE,YAAI,KAAK,MAAM;AACX,eAAK,MAAM,cAAc,QAAQ,KAAK;YAClC,MAAM;YACN,YAAY,KAAK,MAAM,WAAW;YAClC,oBAAoB,KAAK,MAAM;YAC/B,cAAc,KAAK,MAAM,aAAa;YACtC,sBAAsB,KAAK,MAAM;YACjC,aAAa,OAAO,KAAK,QAAQ,IAAI,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;YACtF,cAAc,QAAQ,IAAI,UAAQ,KAAK;YACvC,cAAc,cAAc;YAC5B,WAAW,cAAc;;;AAGjC,eAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;MAOnD;AACI,sBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,eAAO;;MAYX;AACI,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,+BAAqB,WAAW,gBAAgB;AAChD,gCAAsB,WAAW,iBAAiB;AAGlD;AACA,cAAI,WAAW;AACX,oBAAY,MAAM,QAAQ,SAAS,MAAM;AACzC,iCAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAG7D,iCAAqB,aAAa,IAAI,eAAe,OAAO;;AAEhE,sCAA4B,QAAQ,OAAO,UAAU,cAAc;AACnE,iBAAO,mBAAmB,OAAO;;AAIrC,eAAO;;MAOX;AACI,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAEpB,gBAAQ,SAAS;AACjB,mBAAU,YAAW,KAAK;AAC1B,0BAAkB;AAClB,YAAI,UAAU,YAAY,UAAc,OAAO;AAC3C,wBAAc,OAAO,IAAI,OAAK,cAAkB;;AAEpD,uBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AAEf,YAAI,UAAU;AACV,uBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAiB,sBAAqB;AACtC,eAAK,MAAM,YAAY,WAAW,KAAK;AACvC,eAAK,QAAQ;;AAEjB,eAAO;;MAOX;AACI,gBAAQ,SAAS;AACjB,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AACf,eAAO;;MAEX,uCAAuC;AACnC,eAAO,QAAQ,KAAK,iBAAiB;AACrC,YAAI,SAAS,QAAQ,UAAU,aAAa;AACxC,yBAAe,aAAa,KAAK;;AAErC,kBAAU,IAAI,UAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,gBAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE5C,aAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,aAAK,OAAO,GAAG,KAAK;AACpB,eAAO;;MAEX;AACI,yBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,YAAI,aAAa;AACb,eAAK,MAAM;AAGX,sBAAY;AACZ,cAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,oBAAQ,EAAE,OAAO,iBAAqB,EAAE;;AAE5C,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;YAChC,SAAS,YAAW,KAAK;YACzB,OAAO,EAAE;YACT,OAAO,EAAE;YACT;YACA,UAAU;;AAEd,eAAK,MAAM,YAAY;;AAE3B,aAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AACpC,YAAI,CAAE,cAAa;AACf,eAAK,MAAM;;;MAGnB;AACI,YAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC7B;;AAEJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,qBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,yBAAiB,KAAK;AACtB,YAAI,YAAY;AAGZ,cAAI,EAAE,UAAU;AACZ,iBAAK,MAAM,YAAY,KAAK;;AAEhC,eAAK,MAAM;AACX,eAAK,QAAQ,YAAY,EAAE;AAC3B,eAAK,MAAM,WAAW,OAAO,EAAE;;AAG/B,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;MAM5C;AACI,8BAAsB,KAAK,MAAM;AAC7B,oBAAU,KAAK,MAAM,oBAAoB;AACzC,eAAK,gBAAgB;;;MAG7B;AACI,aAAK,cAAc;AACnB,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,iBAAO,KAAK,MAAM,oBAAoB,EAAE;;;MAGhD;AACI,qBAAa,KAAK,QAAQ;AAC1B,aAAK,aAAa,KAAK,MAAM;AAC7B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,WAAW,KAAK,MAAM;AAC3B,YAAI,KAAK,MAAM,mBAAmB;AAC9B,eAAK,aAAa;AAClB,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU;;AAEnB,eAAK,QAAQ,KAAK;;AAGtB,eAAO;;YAEL;AACF,aAAK,MAAM,YAAY;AACvB,2BAAmB,KAAK,MAAM;AAC9B,gCAAwB,KAAK,MAAM;AACnC,aAAK,MAAM,cAAc,UAAU;AACnC,aAAK,MAAM,cAAc,SAAS,MAAM;AACxC,aAAK,MAAM,YAAY;AACvB,aAAK,MAAM,cAAc,YAAY,KAAK,IAAI,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AAC7F,aAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,aAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,6BAAqB,KAAK,MAAM,cAAc;AAC1C,iBAAO,eAAe,MAAM,OAAO;AACnC,iBAAO,YAAY,MAAM,OAAO;;AAEpC,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;MAEtE;AACI,yBAAiB,CAAE,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AACjF,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,0BAAgB,WAAW;;AAE/B,YAAI,iBAAiB;AACjB,mBAAS,WAAW;AAGhB,kBAAM,IAAI,IAAI;AACV,kBAAI,MAAM;AACN,+BAAe,QAAQ;AACvB,6BAAa,qBAAyB,OAAO,MAAM,OAAO;AAC1D,uBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEtD,qBAAO;;AAIX,mBAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAGnE,aAAK,MAAM,WAAW,KAAK;;MAE/B;AACI,eAAO,OAAO;AACd,eAAO;;MAEX;AACI,YAAI,KAAK,MAAM,kBAAkB;AAC7B,eAAK,MAAM,aAAa;;AAE5B,aAAK,MAAM;;MAEf;AACI,aAAK,MAAM;;MAMf;AACI,0BAAkB;UACd,OAAO;UACP,MAAM;UACN,IAAI,KAAK,MAAM;;AAEnB,YAAI;AACA,oBAAU,OAAO;;AAErB,aAAK,MAAM,WAAW,KAAK;AAC3B,aAAK,MAAM,cAAc;;MAM7B;AACI,uCAA+B,uBAAsB;AACrD,0CAAkC,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAE5E,qBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACrD,0BAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,cAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACtD,oBAAO;;;AAGf,yBAAiB,KAAK,MAAM,WAAW;AACvC,aAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAEzD,+BAAuB,QAAQ;AAG3B,cAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC5C,iBAAK,MAAM;;;;MAUvB,wCAAwC;AACpC,gBAAY,GAAG,SAAS,GAAG,MAAM;AACjC,YAAI,MAAM,QAAQ,GAAG,UAAU;AAC3B,gBAAM,IAAI,MAAM,0CAA0C,GAAG;;AAEjE,kBAAU,KAAK,UAAU,MAAM,KAAK,aAAa,MAAM,KAAK,WAAW,MAAM,KAAK,KAAK,WAAW;AAClG,gBAAY,aAAa,SAAQ,MAAM;AAEvC,6BAAqB,sBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,YAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAC9D,gBAAM,IAAI,MAAM;;AAIpB,eAAO,KAAK,KAAK,YAAY;AACzB,yCAA+B;AAC/B,iCAAuB,EAAE,MAAO,MAAM,OAAQ,MAAK,EAAE,SAAS;AAE9D,kCAAuB,wBAAwB,cAE/C,QAAK,KAAK,KAAK,KAEf;AACA,yBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AACnD,cAAI,KAAK,MAAM,kBAAkB;AAG7B,iBAAK,MAAM,WAAW,QAAQ;AAC1B,oCAAqB,KAAK;AACtB,wBAAO;;;AAGf,iBAAK,MAAM,aAAa;;AAE5B,iBAAO,CAAE,OAAO,GAAG;;;MAG3B;AACI,gBAAY,YAAgB,IAAI,MAAM;AACtC,eAAO;AACH,kBAAY,OAAO,MAAM,OAAK,aAAa,UAAS,MAAM;AAE1D;AACA,2BAAiB;AACjB,iBAAO,QAAQ;AACX,qBAAS,KAAK;;AAElB,iBAAO,KAAK,cAAc;AACtB,kBAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AACvB,oBAAY,IAAI,iBAAiB,SAAQ,MAAM;AAE/C,oBAAY,YAAgB,IAAI,WAAW,MAAM;AAEjD,mBAAO,IAAI;aACZ,UAAU;AACT,4BAAgB,IAAI,SAAS,IAAI;AACjC,2BAAc,MAAM,QAAQ,WAAW,UAAU,CAAC;AAClD,oBAAY,OAAM,WAAW,OAAO,QAAQ,MAAM;AAGlD,oBAAY,OAAM,MAAM,OAAK,aAAa,UAAS,MAAM;AAGzD,4BAAgB;AAChB,mBAAM,QAAQ;AACV,sBAAQ,KAAK,MAAM;;AAEvB,mBAAO;;;;MAInB;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,SAAS;;MAEjC;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,KAAK;;YAEvB;AACF,sBAAc;AACd,2BAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,mBAAW,SAAS,SAAQ;AAC5B,eAAO;;MAQX;AACI,YAAI,KAAK,MAAM,eAAe;AAC1B,iBAAO,UAAU,KAAK,MAAM,YAAY;AACxC,eAAK,MAAM,YAAY,MAAM,KAAK;;AAEtC,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAMtB;AAEI,aAAK;AACL,aAAK,MAAM;AACX,aAAK,IAAI;AACT,aAAK,QAAQ,IAAI;AACjB,kCAA0B,KAAK;AAC3B,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;;;AAGlC,YAAO,eAAe;AACtB,YAAO,iBAAiB;AACxB;AACI,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAErC;AACH,iBAAW;AACX,UAAI,GAAG,aAAa;AAChB,6BAAoB,IAAI,aAAY;AACpC,WAAG,YAAY,IAAI,QAAO;;AAE9B,4BAAqB,GAAG,UAAU;AAGlC,wBAAiB,MAAM,GAAG;AAC1B,aAAO,GAAG;;AAEP,oBAAe;AAOf;AAEH,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI,GAAG;AAC3B,aAAK,CAAC,GAAG;AACT,eAAO;SACR,QAAQ,MAAqB;;ACp7BpC;;;;;;;;;;;;;;;;AAiBA;AACI,aAAO,OAAO,cAAc,eAAe,aAAa;;AAErD;AACH,UAAI;AAEA,kBAAU,UAAU,aAAa,UAAU,UAAU,OAAO;AAE5D,eAAO,2TACF,KAAK,MAEN,0kDACK,KAAK,EAAE,OAAO,GAAG;;AAE9B,aAAO;;AAEJ;AACH,aAAQ,OAAO,WAAW,eAAe,OAAO,YAAY,QAEvD,OAAO,sBAAsB;;;;;;;ACpCtC;;;;;;;;;;;;;;;;AAmBA,iBAAY;AAKZ,SAAI,aAAa,SAAS,MAAM,OAAO;AACnC,UAAI;AACA,gBAAQ,KAAK;;;AAMrB,SAAI,aAAa,cAAc,MAAM;AAErC,SAAI,aAAa,WAAW,MAAO,OAAO,YAAY,eACjD,OAAO,QAAQ,aAAa,eAC5B,OAAO,QAAQ,SAAS,SAAS;AAEtC,SAAI,aAAa,aAAa,MAAM,OAAO,cAAc,eAAe,aAAa,QACjF,UAAU,aAAa,QAAQ,SAAS,KAAK,UAAU,cACvD,aAAa,KAAK,UAAU;AAKhC,SAAI,aAAa,QAAQ,MAAM;AAK/B,SAAI,aAAa,sCAAsC,MAAM,KAAI,QAAQ;AAEzE,SAAI,aAAa,gCAAgC,MAAM;AAEvD,SAAI,aAAa,WAAW,MAAM;ACtDlC;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB;AAChB,UAAI,cAAa;AACb,eAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAE1C,UAAI,CAAC,MAAM,QAAQ;AACf,eAAO;;AAEX,oBAAc;AACd,aAAO,MAAM,QAAQ,cACjB,cAAa,cAAc,UAAU;AACrC,cAAM,KAAK,UAAU;AACrB,oBAAY,UAAU;;AAE1B,UAAI,MAAM,QAAQ,QACd,OAAM,QAAQ;AACd,oCAA2B,KAAK,OAAO;;AAE3C,aAAO;;AAEX;AACI,gBAAU,WAAW;AACrB,UAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,cAAa;AACvC,gBAAO,MAAM,WAAW,GAAG,MAAM,eAAe,QAAQ,KAAK,+DACjB,MAAM;AAClD;;AAEJ,cAAO,MAAM,SAAS,GAAG,MAAM,eAAe,QAAQ,KAAK,oDACjC,IAAI;AAC9B,cAAO,IAAI,WAAW,MAAM,IAAI,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrE,IAAI;AAC7B,uBAAiB,MAAM,MAAM;AAC7B,mBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,oCAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAGpE;AACI,UAAI,iBAAiB;AACjB;;AAEJ,UAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AAC/C,cAAM,IAAI,MAAM,aAAa,uBAAuB,yBAC1C,iCAAiC;;;AAG5C,uEAAkE;AACrE,UAAI,aAAa;AACb,qBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,eAAO;;AAEX,0BAAoB,YAAW;AAG/B,UAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACtD,wBAAgB;;AAEpB,mBAAY,cAAc,eAAe,SAAS;AAClD,UAAK,KAAK,QACL,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACnD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC3C,qBAAa,KAAK,OAAO,SAAS,EAAE,YAAY;AAChD,cAAM,IAAI,MAAM,aAAa,uBAAuB,0DACd;;AAE1C,4BAAsB,YAAW,GAAG;AACpC,UAAI,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ;AACnC,YAAI,CAAC;;AAET,6BAAuB;AACvB,qBAAe,kBAAkB,WAC7B,cAAa,GAAG,iBAChB,SAAQ,GAAG,IAAI;AACnB,aAAO,QAAO,WAAW,QAAQ,eAAe;;AAE7C,6EAAyE;AAC5E,UAAI,CAAC,MAAM,QAAQ;AACf,cAAM,IAAI,MAAM,YAAY,qBAAqB;;AAGrD,sBAAgB;AAChB,aAAO,QAAQ,IAAI,UAAU,iBAAgB,GAAG,GAAG,WAAW,MAAM,eAAe;;ACtGvF;;;;;;;;;;;;;;;;AAkBY,6BAAmB;AAMxB;AACH,mBAAa,OAAO,KAAK;AACzB,UAAI,KAAK,WAAW;AAChB,cAAM,IAAI,MAAM,yGAET,KAAK;;AAEhB,mBAAa,KAAK;AAClB,iBAAW,EAAE;AAEb,UAAI,OAAO,SAAS;AAChB,iBAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAGjD,eAAS,SAAS;AAElB,iBAAW;AACP,gBAAO,WAAW;AAClB;AACI,yBAAe,GAAG,GAAG;AACrB,cAAI,WAAU;AACV,oBAAQ,MAAM;;AAElB,kBAAO,SAAS;AAChB,iBAAO;;AAGP,kBAAO,SAAS;AAChB,gBAAM;;;AAGd,aAAO,eAAe,IAAI,QAAQ,CAAE,OAAO,QAAQ,cAAc;AAEjE,aAAO;;ACzDX;;;;;;;;;;;;;;;;AAyCA;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,yBAAuB,MAAM,OAAO,MAAM,OAAO,yBAAyB,MAAM,aAAa,MAAM;AAEnG,sBAAgB;AACZ,eAAO,SAAQ,QAAQ,OAAO;;AAElC,qBAAe,CAAE,MAAM,OAAO,MAAM;AACpC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAW,IAAG,CAAE;ACpD5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,SAAS;AACT,gBAAQ,YAAW;;AAEvB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AAClB,cAAM,IAAI,MAAM;;AAGpB,UAAI,SAAS;AACT,4CAAmC;AACnC,6BAAqB,eAAc;AACnC,6BAAqB,eAAc;AACnC,gBAAO,iBAAiB,cAAc,MAAM,iCAAiC,kCACtE,+BAA+B;AACtC,qBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AACxC,2BAAiB,cAAc;AAC/B,oCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,eAAc,MAAM,MAAM,MACvC;AACJ,kBAAO,cAAc,OAAO,MAAM,MAAM,CAAC,mBAAmB,MAAM,gDAC1D,qDACM;;;AAGtB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ;AACxC,iBAAS,CAAC;;AAEd,cAAQ,SAAS;AACjB,eAAS,UAAU,WACf,cAAa,QAAQ,SACrB,SAAQ,QAAQ,IAAI;AACxB,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxD5C;;;;;;;;;;;;;;;;AA8CO;AACH,4BAAsB,YAAW,QAAQ;AACzC,aAAO,YAAW,QAAQ,OAAO,eAAe;;AChDpD;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,SAAW;MACX,SAAW;MACX,OAAS;MACT,QAAU;MACV,OAAS;MACT,MAAQ;MACR,WAAa;;AC3BjB;;;;;;;;;;;;;;;;AAqBA,oCAAgC;AAkBzB;AAEH,oBAAc;AACd,2BAAqB;AACrB,oBAAc,MAAM,QAAQ,WACxB,QAAQ,IAAI,aAAU,QAAO,QAC7B,OAAO,KAAK;AAChB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,qBAAa,MAAM;AACnB,kBAAU,MAAM,QAAQ,WAAW,QAAQ,GAAG,SAAS,QAAQ;AAC/D,YAAI,EAAE,UAAU,aAAa,EAAE,UAAU,WAAW,EAAE,UAAU,UAC5D,EAAE,UAAU,YAAY,EAAE,UAAU;AACpC,gBAAM,IAAI,MAAM,gCAAgC,UAAU,EAAE;;AAEhE,qBAAa,CAAE,MAAM,OAAO,EAAE,OAAO,OAAO,EAAE;AAC9C,YAAI,EAAE,UAAU;AACZ,4BAAkB,IAAI,QAAQ;AAC1B,yBAAa,MAAM,EAAE;AACrB,kCAAsB,KAAK,OAAO,WAAU,KAAI,EAAE,QAAQ,KACtD,0BAA0B,KAAK;AACnC,0BAAc,IAAI,WAAW;AAC7B,yBAAa;AACb,0BAAa,GAAG,KAAI,KAAK,QAAQ;AAC7B,0BAAY,KAAK;AACjB,oCAAsB,IAAI,WAAW,IAAI,YAAY,CAAC,IAAI,SAAS;AACnE,oBAAM,IAAI,eAAe;AACzB,wBAAU;AACV,oBAAM,IAAI,KAAK;AACf,wBAAU,IAAI;;AAElB,oBAAQ;;AAEZ,uBAAa,KAAK;;AAGlB,uBAAa,KAAK,EAAE;;AAExB,YAAI,SAAS;AACT,eAAK,QAAQ;;AAEjB,cAAM,KAAK;;AAEf,2BAAqB,MAAM,QAAQ,IAAI;AACvC,aAAO,CAAE,MAAM,uBAAuB,eAAe;;AAiBlD;AAEH,kBAAY;AACZ;AACA,mBAAa;AACb,yBAAmB;AACf,qBAAa,KAAK;AAClB,sBAAc,KAAK;AACnB,sBAAc,KAAK;AACnB,qBAAa,eAAc;AAC3B;AACA,YAAI,kBAAkB;AAClB,+BAAqB,KAAK;AAC1B,cAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,gBAAI,CAAE,UAAS,gBAAgB,WAAW;AACtC,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa;;qBAIrE,aAAa,UAAU;AAC5B,gBAAI,UAAU;AACV,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa,yDACf;;;AAI3D,kBAAM,IAAI,MAAM,UAAU,KAAK,uCACL,aAAa;;AAI3C,yCAA+B,qBAAqB,aAAa;AACjE,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,iCAAwB,aAAa,UAAU,UAC3C,IAAI,WAAW,cACf,IAAI,YAAY;AACpB,cAAI,UAAU;AACV,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,uBAAS,IAAI,aAAa,eAAe;AACzC,2BAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,0BAAU,eAAe;AACzB,uBAAO,KAAK,IAAI,aAAa,QAAQ,aAAa;;uBAGjD,aAAa,UAAU;AAC5B,kBAAI,kBAAkB;AAClB,gCAAgB;;AAEpB,uBAAS,cAAc;;AAGvB,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;qBAI7D,UAAU;AACf,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;AAGlE,qBAAS,IAAI,WAAW,eAAe;AACvC,yBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAU,eAAe;AACzB,qBAAO,KAAK,KAAK,MAAM,IAAI,aAAa,QAAQ,aAAa;;;AAIjE,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;mBAEZ,UAAU;AACf,wBAAa,eAAc,KAAK;AAChC,mBAAS;AACT,uBAAa,GAAG,IAAI,OAAM;AACtB,+BAAmB,IAAI,YAAY,QAAO,MAAM,QAAQ,SAAS,0BAA0B;AAC3F,sBAAU;AACV,0BAAc,IAAI,WAAW,QAAO,MAAM,QAAQ,SAAS;AAC3D,mBAAO,KAAK;AACZ,sBAAU;;;AAId,8BAAoB,qBAAqB;AACzC,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,cAAI,UAAU;AACV,qBAAS,IAAI,aAAa;qBAErB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,aAAa;AAC1B,0BAAa,IAAI,aAAa,OAAO,SAAS;AAC9C,2BAAc,IAAI,aAAa,OAAO,SAAS;AAC/C,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAK,KAAK,OAAO,IAAI;AACrB,qBAAM,KAAK,OAAO,IAAI,IAAI;;AAE9B,+BAAmB,QAAO,OAAM,OAAO;AACvC,gCAAoB,QAAO,QAAO,OAAO;AACzC,gBAAI,QAAQ,SAAQ,YAAY;AAChC,uBAAW;AACX,wBAAY;;AAGZ,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;;AAErB,YAAI,UAAU;AACV,cAAI,QAAQ,QAAO,QAAQ,OAAO;;;AAG1C,aAAO;;AAKJ;AAEH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM,wBAAwB,KAAK,UAAU;;AAE3D,4BAAsB;AAQtB,2BAAqB;AACrB,SAAG,QAAQ;AACP,2BAAmB,EAAE;AAErB,qBAAa,KAAK,EAAE,eAAe,EAAE,OAAO,aAAa,IACrD,IAAI,EAAE,YAAY;AACtB,YAAI,CAAE,cAAa,gBAAgB,aAAa,cAC5C,aAAa;AACb,gBAAM,IAAI,MAAM,mCAAmC,EAAE,YAAY;;;AAIzE,gBAAU,IAAI,WAAW;AACzB,mBAAa;AACb,mBAAa,QAAQ;AACjB,UAAE,IAAI,IAAI,WAAW,EAAE,SAAS;AAChC,kBAAU,EAAE;;AAEhB,aAAO,EAAE;;AAGb,0BAAsB,OAAO,WAAW,eACnC,QAAO,SAAS,eAAe,OAAO,SAAS,eAC5C,OAAO,SAAS;AAUjB;AACH,UAAI;AACA,eAAO,OAAO,WAAW;;AAE7B,aAAO,IAAI,KAAK,CAAC,MAAM;;AAQpB;AACH,UAAI;AACA,eAAO,OAAO,KAAK,SAAQ,SAAS;;AAExC,kBAAY,IAAI,WAAW;AAC3B,cAAQ;AACR,mBAAa,OAAO,IAAI,QAAQ,IAAI,GAAG;AACnC,aAAK,OAAO,aAAa,IAAI;;AAEjC,aAAO,KAAK;;AAQT;AACH,UAAI;AACA,oBAAY,OAAO,KAAK,KAAK;AAC7B,eAAO,IAAI,OAAO,MAAM,IAAI,YAAY,IAAI,aAAa,IAAI;;AAEjE,gBAAU,KAAK;AACf,sBAAe,IAAI,WAAW,EAAE;AAChC,mBAAa,GAAG,IAAI,EAAE,QAAQ,EAAE;AAC5B,gBAAO,IAAI,CAAC,EAAE,WAAW,KAAK;;AAElC,aAAO,QAAO;;AAQX;AACH,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,4BAAsB;AACtB,cAAQ,QAAQ;AACZ,2BAAmB,QAAO;;AAE9B,mBAAa,IAAI,WAAW;AAC5B,mBAAa;AACb,cAAQ,QAAQ;AACZ,aAAK,IAAI,IAAI,WAAW,UAAS;AACjC,kBAAU,QAAO;;AAErB,aAAO,KAAK;;AAST;AACH,wBAAkB;AAClB,aAAO,KAAK;AACZ,aAAO,KAAK,SAAS;AACjB,eAAO,KAAK,MAAM,GAAG,KAAK,SAAS;;AAEvC,oBAAc,KAAK,MAAM;AACzB,aAAO,MAAM,MAAM,SAAS;;AAOzB;AACH,UAAI,eAAe,yBAAyB;AACxC,cAAM,IAAI,MAAM;;AAEpB,aAAO;QACH,WAAW,IAAI;QACf,mBAAmB;QACnB,oBAAoB,eAAe,iBAAiB,OAChD,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,kBAAkB,eAAe,eAAe,OAC5C,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,iBAAiB,eAAe,cAAc,OAC1C,IACA,eAAe,WAAW;;;AAStC;AACI,8BAAwB;AACpB,gBAAQ,KAAK;AACb,gBAAQ;AACR,eAAQ,KAAI,aAAgB;AACxB,eAAK;AACL,gBAAM;;AAEV,aAAK,CAAC;AACN,aAAK;AACL,eAAO,IAAI;;AAEf,2BAAqB,IAAI,YAAY;AACrC,mBAAa,KAAK;AAClB,mBAAa,GAAG,IAAI,MAAM;AACtB,qBAAa,KAAK,gBAAgB;;AAEtC,mBAAa,MAAM,IAAI,MAAM;AACzB,qBAAa,KAAK,YAAe,KAAI,QAAS;;AAElD,aAAO;;AAQX;AACI,4BAAsB,IAAI,YAAY;AACtC,oBAAc,KAAK;AACnB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,mBAAa,GAAG,IAAI,IAAI;AACpB,sBAAc,KAAK,KAAK;;AAE5B,mBAAa,IAAI,IAAI,IAAI;AACrB,sBAAc,KAAK,aAAe,KAAI,MAAO;;AAEjD,aAAO;;AAQX;AACI,0BAAoB,IAAI,YAAY;AACpC,mBAAa,GAAG,IAAI,IAAI;AACpB,oBAAY,KAAK;;AAErB,kBAAY,KAAK,YAAY,MAAM;AACnC,aAAO;;AASJ;AAIH,2BAAqB;AACrB,4BAAsB;AACtB,0BAAoB;AACpB,aAAO;AACH,wBAAe,IAAI,YAAY,IAAI,eAAe;AAClD,iCAAyB,IAAI,YAAY;AACzC,0BAAiB,GAAG,SAAQ,eAAe,QAAQ;AAC/C,8BAAoB,eAAe;AACnC,8BAAoB,aAAa,YAAY,eAAe,MAAO,eAAc,SAC7E,cAAc,eAAe;AACjC,2BAAiB,UAAS;;AAE9B,eAAO,IAAI,aAAa;;;ACtchC;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;;aAEhB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAQrB;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAQ7C;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAU7C;AACH,eAAO,iBAAiB,YAAY,KAAK;;aAUtC;AACH,eAAO,iBAAiB,YAAY,KAAK,QAAQ;;aAE9C;AACH,8BAAsB;AACtB,wBAAgB,gBAAgB,SAC5B,iBAAiB,cAAc,cAC/B,iBAAiB,cAAc;AACnC,gBAAQ,QAAQ;AACZ,0BAAgB,OAAO,KAAK;AAC5B,cAAI,YAAY;AACZ,0BAAc,KAAK;;;AAG3B,eAAO;;;AAGR,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,4BAAwB,SAAS,iBAAiB,gBAAgB;AAClE,4BAAwB,sBAAsB,iBAAiB,gBAAgB,KAAK;ACpF3F;;;;;;;;;;;;;;;;AAoBA,0BAAsB;AACtB,6BAAyB;AAIzB,6BAAyB;AAIzB,4BAAwB;AAIjB;AACH,yBAAmB;AACnB,aAAO,IAAI,QAAQ;AACf,8BAAsB,WAAW,eAAe;AAChD,sBAAc,YAAY,MAAM;AAChC,sBAAc,UAAU,WAAS,OAAO;;;AAGhD;AACI,UAAI,CAAC,OAAM,QAAQ;AAIf,cAAM,IAAI,MAAM;;AAIpB,wBAAkB,OAAO,WAAW,cAAc,OAAO;AACzD,sBAAgB,UAAU,aAAa,UAAU,gBAC7C,UAAU,mBAAmB,UAAU,eACvC,UAAU;AACd,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,iBAAW,YAAY;AACvB,SAAG,kBAAkB,kBAAkB,CAAE,SAAS;AAClD,SAAG,kBAAkB,iBAAiB,CAAE,SAAS;;;MAQjD;AACI,aAAK,YAAY;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;;YAEf;AAEF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,eAAO,KAAK,eAAe,KAAK,WAAW;;YAEzC;AACF,eAAO,KAAK,eAAe,KAAK;;MAgBpC;AACI,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,gBAAI,kBAAkB;AAElB,8BAAgB,GAAG,YAAY,kBAAkB;AACjD,iCAAmB,QAAQ,YAAY;AACvC,iCAAmB,WAAW,IAAI,KAAK;AACvC,yBAAW,YAAY;AACnB,oBAAI,WAAW,UAAU;AACrB,qBAAG;AACH,yBAAO,OAAO,IAAI,MAAM,gCAAgC,KAAK;;AAI7D,0BAAQ,WAAW,OAAO;;;AAGlC,yBAAW,UAAU;AACjB,mBAAG;AACH,uBAAO,OAAO,WAAW;;AAE7B,sBAAQ,aAAa,MAAM,GAAG;;AAI9B,yCAA2B,6BAA6B;AAExD,6BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAgB,OAAO,YAAY;AACnC,qCAAuB,UAAU,IAAI,CAAE,WAAW,KAAK,WAAW;AAClE;AACA,6BAAe,YAAY;AAEvB,0BAAU,GAAG,YAAY,kBAAkB;AAC3C,mCAAmB,QAAQ,YAAY;AACvC,wCAAwB,WAAW,IAAI;kBACnC,WAAW,KAAK;kBAChB;kBACA;;AAEJ,gCAAgB,YAAY,MAAM,QAAQ,CAAE;AAC5C,gCAAgB,UAAU;AAGtB,8BAAY,OAAO,YAAY;AAC/B,4CAA0B,UAAU,OAAO,KAAK;AAChD,oCAAkB,YAAY;AAC1B,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;AAElC,oCAAkB,UAAU;AACxB,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;;;AAI1C,6BAAe,UAAU;AACrB,mBAAG;AACH,uBAAO,OAAO,eAAe;;AAEjC,qBAAO,aAAa;AAChB,oBAAI,WAAW;AACX,qBAAG;;AAGH,0BAAQ,aAAa,MAAM,GAAG;;;;;AAK9C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;AAI9D,qBAAiB,aAAa;AACvB,4BAAwB;AAC3B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAkB7B;AACH,aAAO,IAAI,iBAAiB;;AAEhC;AACI,aAAO,IAAI,WAAW,iBAAiB,cACnC,IAAI,MAAM,iBAAiB,WAAW,UACtC;;;MAGJ;AACI,aAAK,YAAY;;YAEf;AACF,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,uBAAW,GAAG,YAAY,iBAAiB;AAC3C,0BAAc,GAAG,YAAY;AAS7B,sCAA0B,MAAM;AAChC,8BAAkB,YAAY;AAC1B,0BAAY;AACZ,iCAAmB,kBAAkB;AACjC,oBAAI,KAAK,aAAa,KAAK;;AAE/B,sBAAQ;;AAEZ,8BAAkB,UAAU;AACxB,iBAAG;AACH,qBAAO,OAAO,kBAAkB;;AAEpC,eAAG,aAAa,MAAM,GAAG;;AAE7B,sBAAY,UAAU,WAAS,OAAO,YAAY;;;YAGpD;AACF,eAAO,iBAAiB;AACxB,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,2BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAkB,OAAO,YAAY;AACrC,mCAAuB,UAAU,IAAI;AACrC;AACA,2BAAe,YAAY;AACvB,kBAAI,eAAe,UAAU;AACzB,mBAAG;AACH,uBAAO,OAAO,IAAI,MAAM,gCAAgC;;AAKxD,0CAA0B,UAAU,OAAO;AAC3C,wCAAwB;AAEpB,4BAAU,GAAG,YAAY,kBAAkB;AAC3C,qCAAmB,QAAQ,YAAY;AACvC,6CAA2B,WAAW,OAAO;AAC7C,qCAAmB,YAAY,MAAM,QAAQ,eAAe,OAAO;AACnE,qCAAmB,UAAU,WAAS,OAAO,eAAe;;AAIhE,kCAAkB,YAAY;AAC9B,kCAAkB,UAAU;AACxB;AACA,qBAAG;AACH,yBAAO,OAAO,eAAe;;;;AAIzC,2BAAe,UAAU;AACrB,iBAAG;AACH,qBAAO,OAAO,eAAe;;AAEjC,mBAAO,aAAa;AAChB,kBAAI,WAAW;AACX,mBAAG;;AAGH,wBAAQ,aAAa,MAAM,GAAG;;;;AAI1C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;ACrT9D;;;;;;;;;;;;;;;;AAqBA,2BAAuB;AACvB,wBAAoB;AACpB,wBAAoB;AACpB,kCAA8B;AAC9B,gCAA4B;AAC5B,+BAA2B;AAC3B,kCAA8B;AAMvB;AACH,UAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAC/B,cAAM,IAAI,MAAM;;AAGpB,iBAAW,OAAO;AAClB,+BAAyB;AACzB,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,oBAAY,GAAG,IAAI;AACnB,uBAAe,cAAc;AAC7B,YAAI,IAAI,WAAW,WAAW,IAAI,SAAS,OAAO;AAC9C,aAAG,WAAW;AACd,4BAAkB,oBAAoB;AACtC,cAAI,iBAAiB,QAAQ,eAAe;AACxC,6BAAiB,KAAK;;;;AAIlC,aAAO;;AAEX;AACI,aAAO;QACH,MAAM,CAAC,aAAa,MAAM,aAAa,KAAK;QAC5C,UAAU,CAAC,aAAa,MAAM,uBAAuB,KAAK;QAC1D,aAAa,CAAC,aAAa,MAAM,qBAAqB,KAAK;QAC3D,YAAY,CAAC,aAAa,MAAM,oBAAoB,KAAK;QACzD,eAAe,CAAC,aAAa,MAAM,uBAAuB,KAAK;;;AAUvE;AACI,oBAAc,IAAI,MAAM;AACxB,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,uBAAuB;;AAE3C,aAAO,MAAM,MAAM,GAAG,MAAM,SAAS,GAAG,KAAK;;AAEjD;AACI,aAAO,IAAI,WAAW,oBAAoB,cACtC,IAAI,MAAM,oBAAoB,WAAW,UACzC;;;MAQJ;AACI,YAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAK/B,gBAAM,IAAI,MAAM;;AAEpB,aAAK,KAAK,OAAO;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;AACjB,aAAK,OAAO,aAAa,KAAK;;YAW5B;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,2BAAiB,KAAK,UAAU,eAAe;AAC/C,8BAAoB,KAAK,UAAU,eAAe;AAClD,qCAA2B,6BAA6B;AACxD;AACI,iBAAK,GAAG,QAAQ,KAAK,KAAK,MAAM,KAAK,UAAU;AAC/C,iBAAK,GAAG,QAAQ,KAAK,KAAK,UAAU;AACpC,iBAAK,GAAG,QAAQ,KAAK,KAAK,aAAa;AACvC,iBAAK,GAAG,QAAQ,KAAK,KAAK,YAAY,0BAA0B,eAAe;AAC/E,iBAAK,GAAG,QAAQ,KAAK,KAAK,eAAe,KAAK,UAAU;cACpD,QAAQ,eAAe;cACvB,aAAa,eAAe;cAC5B,aAAa,eAAe;cAC5B,qBAAqB,eAAe;;AAExC,mBAAO,CAAE;;AAIT,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,kBAAM,IAAI,MAAM,yBAAyB,KAAK,kHAEpB,mBAAmB,wCACrB,mBAAmB,qCACpB,mBAAmB;;;;YAYhD;AACF,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AAClD,YAAI,QAAQ;AACR,gBAAM,IAAI,MAAM,kDAAkD,KAAK;;AAE3E,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM;;AAGpB,oBAAY;AAEZ,yBAAiB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACtD,YAAI,YAAY;AACZ,gBAAM,IAAI,MAAM,4CAA4C,KAAK;;AAGrE,YAAI,gBAAgB;AAEpB,4BAAoB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACzD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,gDAAgD,KAAK;;AAGzE,YAAI,cAAc;AAElB,+BAAuB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACjD,YAAI,kBAAkB;AAClB,2BAAiB,KAAK,MAAM;AAC5B,cAAI,SAAS,SAAS;AACtB,cAAI,cAAc,SAAS;AAC3B,cAAI,cAAc,SAAS;AAC3B,cAAI,sBAAsB,SAAS;;AAGvC,iCAAyB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACnD,YAAI,oBAAoB;AACpB,gBAAM,IAAI,MAAM,wDACR,KAAK;;AAEjB,YAAI,aAAa,0BAA0B;AAC3C,eAAO;;;AAGf,wBAAoB,aAAa;AAC1B,+BAA2B;AAC9B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,oBAAoB;AAC1D,iBAAO,oBAAoB,IAAI,MAAM,oBAAoB,WAAW;;AAGpE,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAyB7B;AACH,aAAO,IAAI,oBAAoB;;;MAG/B;AACI,gBAAO,OAAM,QAAQ,eAAe,MAAM;AAC1C,gBAAO,OAAO,WAAW,eACrB,OAAO,OAAO,iBAAiB,aAAa,MAAM;AACtD,aAAK,KAAK,OAAO;;YAEf;AACF,oBAAY;AACZ,uBAAe,cAAc;AAC7B,uBAAe,iBAAiB;AAChC,qBAAa,GAAG,IAAI,KAAK,GAAG,QAAQ,EAAE;AAClC,sBAAY,KAAK,GAAG,IAAI;AACxB,cAAI,IAAI,WAAW,WAAW,IAAI,SAAS;AACvC,8BAAkB,oBAAoB;AACtC,gBAAI,aAAa,KAAK,MAAM,KAAK,GAAG,QAAQ;;;AAGpD,eAAO;;YAEL;AACF,eAAO,mBAAiB;AACxB,qBAAa,aAAa;AAC1B,YAAI,KAAK,GAAG,QAAQ,KAAK,SAAS;AAC9B,gBAAM,IAAI,MAAM,8BAA8B;;AAElD,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK;AAC7C,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,eAAO;;;ACnRf;;;;;;;;;;;;;;;;AA4BA,8BAA0B;;MAEtB;AACI,aAAK,WAAW;;aAEb;AACH,YAAI,0BAA0B,YAAY;AACtC,oCAA0B,WAAW,IAAI;;AAE7C,eAAO,0BAA0B;;aAQ9B;AACH,gBAAO,UAAU,MAAM,MAAM;AAC7B,YAAI,OAAO,SAAS;AAChB,mBAAS,OAAO,MAAM,GAAG,OAAO,QAAQ;;AAE5C,gBAAO,OAAO,SAAS,GAAG,MAAM;AAChC,yBAAiB,0BAA0B;AAC3C,gBAAO,SAAS,SAAS,WAAW,MAAM,MAAM,2DAA2D;AAC3G,iBAAS,SAAS,UAAU;;aAEzB;AACH,wBAAgB,KAAK,cAAc,SAAS;AAC5C,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM,yCAAyC;;AAE7D,eAAO;;aAEJ;AACH,eAAO,OAAO,KAAK,KAAK,cAAc;;;AAW9C;AACI,UAAI,IAAI,QAAQ,uBAAuB;AACnC,cAAM,IAAI,MAAM,6EAET,0BAA0B,aAAa,KAAK;;AAEvD,aAAO;QACH,QAAQ,IAAI,MAAM,mBAAmB;QACrC,MAAM,IAAI,MAAM,mBAAmB;;;AAG3C,yEAAqE;AACjE,cAAO,cAAc,SAAS,MAAM,wCAAwC;AAC5E,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,kEAAkE;AACxG,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,wCACxD;AACpC,0BAAoB,aAAa;AACjC,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,uEAC3B;AACX,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,6CACnD;AACzC,0BAAoB,aAAa;AACjC,2BAAqB,SAAS,WAAW;AACzC,yBAAmB,SAAS,WAAW;AACvC,yBAAmB,iBAAiB,SAAS,WAAW;AACxD,6BAAuB,MAAM,YAAY;AAIzC,UAAI,gBAAgB;AAChB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,yBAAmB,MAAM,YAAY,KAAK;AAI1C,UAAI,gBAAgB,CAAC;AACjB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,aAAO,WAAW;;AAqCtB;AACI,sBAAgB,0BAA0B;AAC1C,kBAAY;AACZ,2BAAqB;AACjB,0BAAkB,MAAM,0BAA0B,WAAW,QAAQ;AACrE,2BAAmB;AACf,sBAAY,SAAS,oBAAoB;AACzC,cAAI,OAAO,UAAU;;;AAG7B,aAAO;;AAmCX;AACI,4BAAsB,SAAS;AAC/B,sBAAgB,0BAA0B,WAAW,cAAc;AACnE,aAAO,QAAQ,YAAY,cAAc;;AAiD7C;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AAgDlD;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AC/SlD;;;;;;;;;;;;;;;;;MAsBI;AACI,eAAO,MAAM,MAAM;;MAEvB;AACI,eAAO,YAAY;;MAEvB;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,kDAAkD;;AAEtE,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc,IAAI;;AAE3B,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,eAAO,IAAI,YAAY,UAAU,OAAO;;;AAGhD,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,WAAW,IAAI;AAEjC;AACI,kCAA0B,gBAAgB,oBAAoB,YAAY,IAAI;;;AAKlF;AACI,kCAA0B,gBAAgB,iBAAiB,YAAY,IAAI;;;;ACnDnF;;;;;;;;;;;;;;;;AAkBO,yBAAqB;MAExB,aAAa,MAAM;;AAEvB;AAGO;AACH,oBAAc;;AAEX;AACH,oBAAc;;AAEX;AACH,aAAO;;;MAGP;AAEI,aAAK,OAAO;AAGZ,aAAK,cAAc,IAAI,KAAK,KAAK;;MAErC;AACI,YAAI,OAAM,OAAO,SAAS;AACtB,iBAAO,OAAM,OAAO,MAAM,MAAM;;AAEpC,YAAI,eAAe;AACf,wBAAc,aAAa;;AAE/B,eAAO,YAAY,MAAM;;MAE7B;AACI,sBAAa,QAAQ;AACrB,eAAO,MAAK,KAAK,MAAO,MAAK,KAAK;;MAEtC;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,YAAI,MAAM,WAAW;AACjB,iBAAO;;AAEX,eAAO,IAAI,KAAK,KAAK,YAAY,UAAU,OAAO;;;AAG1D,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,QAAQ,IAAI;;ACrElC;;;;;;;;;;;;;;;;AA4CO,oCAA+B;AAClC,cAAQ,SAAS;AACjB,0CAAwC;AACxC,aAAO,IAAI,cAAa,OAAO,OAAO;;AC/C1C;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAEnC,UAAI,CAAC,cAAkB;AACnB,cAAM,IAAI,MAAM,mCAAmC;;AAEvD,UAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE;AC/CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,SAAS;AAC5C,sBAAgB,MAAM,QAAO,qBAAqB,GAAG,QAAQ,GAAG,OAAO,GAAG;AAC1E,qBAAe,CAAE,GAAG;AAGpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA6BO,iCAA4B;AAC/B,cAAQ,IAAI,EAAE,SAAS;;AC9B3B;;;;;;;;;;;;;;;;AAmBA;AAYA,wBAAkB;MACd;MACA;MACA;MACA;;AAEJ,iBAAa;ACrCb;;;;;;;;;;;;;;;;AAwBA,qCAAiC;AACjC,wCAAoC;AACpC,+CAA2C;AAC3C;AACI,aAAO,IAAI,QAAQ,aAAW,WAAW,UAAU,KAAK;;;MAGxD;AACI,YAAI,CAAC,OAAM,QAAQ;AAGf,gBAAM,IAAI,MAAM;;AAGpB,YAAI,eAAe,WAAW,iBAAiB;AAC3C,2BAAiB,eAAe,MAAM,iBAAiB,WAAW;;AAEtE,YAAI,kBAAkB,QAAQ,eAAe,WAAW;AACpD,2BAAiB;;AAErB,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,qBACD,iBAAiB;;YAEnB;AACF,YAAI,OAAQ,aAAc;AACtB,gBAAM,IAAI,MAAM;;AAGpB,2BAAmB,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM;AAC5F,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,kCAAwB,CAAC;YACjB,OAAO,CAAC,OAAO,KAAK;YACpB,SAAS,eAAe;;AAEhC,iDAAuC;YACnC,eAAe,eAAe;YAC9B,QAAQ,eAAe;YACvB,aAAa,eAAe;YAC5B,aAAa,eAAe;YAC5B;;AAEJ,oDAA0C,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM;AAGxI,6BAAmB,KAAK,cAAc,OAAO,SAAS,cAAc,OAChE,KAAK;AACT,qBAAW,WAAW,KAAK;AAC3B,qBAAW,OAAO;AAIlB,gBAAM,MAAM,MAAM,WAAW,cAAc,IAAI,WAAW;AAC1D,cAAI,eAAe,cAAc;AAC7B,qCAAyB,KAAK,oBAAoB,OAC9C,SAAS,cAAc,OACvB,KAAK;AACT,6BAAiB,WAAW,KAAK;AACjC,6BAAiB,OAAO;AACxB,kBAAM,MAAM,MAAM,iBAAiB,cAAc,IAAI,WAAW;;AAEpE,iBAAO,CAAE,oBAAoB,6BAA6B;;;;AAItE,qBAAiB,aAAa;;MAE1B;AACI,YAAI,SAAS,QAAQ,MAAM,SAAS;AAChC,gBAAM,IAAI,MAAM,wEACI;;AAExB,aAAK,QAAQ;;YAEX;AACF,yBAAiB,KAAK,MAAM;AAC5B,4BAAoB,KAAK,MAAM,MAAM;AACrC,eAAO,IAAI,QAAQ;AACf,6BAAmB,IAAI;AACvB,qBAAW,SAAS;AAEhB,8BAAkB,KAAK,MAAM,MAAM,OAAO;AAC1C,kCAAsB,UAAU;AAChC,gBAAI,iBAAiB;AACjB,qBAAO,IAAI,MAAM,4CAA4C,SAAS;AACtE;;AAEJ,gBAAI,YAAY,WAAW;AACvB,sBAAQ,CAAE;;AAEd,oCAAwB,UAAU;AAClC,gBAAI,mBAAmB;AACnB,qBAAO,IAAI,MAAM,6CAA6C,SAAS;AACvE;;AAEJ;AACA;AACI,2BACI,KAAK,4BAA4B,iBAAiB;;AAGtD,qBAAO;AACP;;AAEJ,gCAAoB;AACpB,0BAAc;AACd,mCAAuB;AACvB,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,sBAAM,KAAK;AACX,+BAAe,KAAK;;AAExB,0BAAY,KAAK,GAAG,aAAa;;AAErC,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,yCAAyB,IAAI;AAC7B,iCAAiB,SAAS;AAEtB,qCAAmB,OAAM,OAAO;AAChC,iCAAc,MAAM,QAAQ;AAC5B,iCAAe,UAAS;AACxB,sBAAI,eAAe,QAAQ,UAAU;AACjC,4BAAQ;sBACJ;sBACA;sBACA,YAAY,wBAAwB;sBACpC,QAAQ,UAAU;sBAClB,aAAa,UAAU;sBACvB,aAAa,UAAU;sBACvB,qBAAqB,UAAU;;;;AAI3C,iCAAiB,UAAU,WAAS,OAAO,6CAA6C;AACxF,iCAAiB,kBAAkB,WAAW;;;;AAI1D,qBAAW,UAAU,WAAS,OAAO,sEACnB,SAAS;AAE3B,qBAAW,WAAW;;;MAM9B;AACI,0BAAkB;AAClB,0BAAkB,MAAM,IAAI,UAAQ,SAAS,KAAK;AAClD,2BAAmB;AACnB,4BAAoB;AAChB,gBAAM,MAAM,QAAQ;AAChB,iCAAqB,SAAS;AAC9B,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,uDACR;;AAEZ,sBAAU,KAAK;AACf,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,8BAA8B;;AAG9C,yBAAW,QAAQ,MAAM,UAAU,QAAQ;;;;AAIvD,YAAI,UAAU,WAAW,MAAM;AAC3B,gBAAM,IAAI,MAAM,wDACR,UAAU,oDACV,MAAM;;AAElB,eAAO;;;AAGR,mCAA+B;AAClC,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AAwC7B,+CAA2C;AAC9C,aAAO,IAAI,iBAAiB;;AA0CzB;AACH,aAAO,IAAI,aAAa;;AC7S5B;;;;;;;;;;;;;;;;AAyBO;AACH,oBAAc;AACd,sBAAgB,iBAAiB,OAAO,IAAI;AAC5C,oBAAc,eAAe,OAAO,IAAI;AACxC,oBAAc,eAAe;AAC7B,4BAAsB;AACtB,8BAAwB;AACpB,gBAAQ,KAAK;AACT,2BAAiB,gBACb,EAAE,kBAAkB,SAAS,SAAU,eAAc;AAEzD,qBAAW;AACX,iBAAO;;AAEX,eAAO;;AAEX;AACI,gBAAO,aAAY,QAAQ,MAAM,QAAQ,cAAa,UAAS,SAAS,GAAG,MAAM;;AAErF;AACI,gBAAO,kBAAiB,KAAK,kBAAiB,GAAG,MAAM,oEAC9B;AACzB,gBAAO,gBAAe,KAAK,gBAAe,GAAG,MAAM,kEAC5B;AACvB,gBAAO,gBAAe,gBAAe,MAAM,yEAClB,kCAClB;;AAEX,aAAO,QAAQ,IAAI,SAAS,IAAI;;ACrDpC;;;;;;;;;;;;;;;;AAgCO;AACH,UAAI,eAAe;AACf,sBAAc;;AAElB,wBAAkB,YAAY,aAAa,OAAO,OAAM,SAAS,QAC7D,YAAY;AAEhB,uBAAiB,UAAU,IAAI,cAAY,UAAU,UAAU,YAAY,aAAa,CAAE,UAAU;AACpG,iCAA2B;AAC3B,+BAAyB;AACzB,wBAAkB,YAAY,cAAc,OACxC,MAAM,QAAQ,IAAI,YAClB,MAAM,wBAAwB,UAAU,YAAY,YAAY,oBAAoB;AACxF,6BAAuB,UAAU,IAAI,cAAY,SAAS;AAC1D,kCAA4B;AAC5B,gCAA0B;AAC1B,sBAAgB,YAAY,cAAc,OACtC,MAAM,QAAQ,IAAI,kBAClB,MAAM,wBAAwB,gBAAgB,YAAY,YAAY,qBAAqB;AAC/F,aAAO;;AAWJ,0DAAsD;AAMzD,2BAAqB,eAAe,yBAAyB,WAAW,CAAE;AAC1E,2BAAoB,qBAAqB;AACzC,aAAO,aAAY,UAAU,gBAAgB;;AA0B1C;AACH,aAAO,kCAAkC;AAGrC,uCAA+B,SAAS,IAAI,MAAM;AAClD,oCAA4B;AAC5B,6BAAqB,eAAe,OAAO,YAAY,IAAI,MAAM,SAAS;AAC1E,uCAA+B;AAC/B,iBAAS,QAAQ;AACb,4BAAkB;AAClB,8BAAoB,QAAQ,QAAQ;AAChC,6BAAkB,kBAAkB,eAChC,aAAa,aAAa,QAC1B,aAAa;AACjB,iCAAqB,qBAAqB,YACtC,eAAmB,aAAa;AACpC,gDAAoC;AAChC,qCAAuB,cAAc;AACrC,kBAAI,oBAAoB,eAAe;AACnC,oCAAoB,cAAc;;AAEtC,kCAAoB,YAAY,KAAK;gBACjC,eAAe;gBACf;gBACA,WAAW;;;AAGnB,gBAAI,eAAe;AACf,0BAAY,QAAQ;AAChB,oBAAI,eAAe,aAAa;AAC5B;AACA,+BAAa,eAAe;;;;AAKpC;;AAEJ,mCAAuB,KAAK,aAAa;AACzC,2BAAe;;;AAGvB,YAAI,CAAC,aAAa,MAAM,WAAS;AAC7B,kCAAwB,YAAY,OAAO,UAAU,CAAC,aAAa;AACnE,gBAAM,IAAI,MAAM,kDACT,gBAAgB,KAAK;wCAErB,uBAAuB,KAAK;;AAIvC,oCAA4B,uBAAuB,OAAO;AACtD,cAAI;AACA,wBAAY,KAAK;;AAErB,iBAAO;WACR;AACH,0BAAkB;AAClB,4BAAoB,QAAQ;AACxB,mBAAS,GAAG,MAAM,QAAQ;AACtB,6BAAiB,iBACZ,EAAC,eAAe,SAAS,OAAO,MAAM,MAAM;AACjD,sBAAU,KAAK;;;AAGvB,wBAAgB,MAAM,qBAAqB;AAC3C,iCAAyB;AACzB,gCAAwB;AACxB,4BAAoB,QAAQ;AACxB,6BAAmB,SAAS,GAAG,MAAM;AACrC,2BAAiB;AACjB,wBAAa,GAAG,KAAI,YAAY;AAC5B,0BAAc,QAAQ,oBAAoB,IAAG;;AAGjD,8BAAoB,IAAI,YAAY;AACpC,kCAAwB,IAAI,WAAW;AACvC,kCAAwB;AACxB,wBAAa,GAAG,KAAI,YAAY;AAC5B,4BAAe,IAAI,WAAW,QAAQ,oBAAoB;AAC1D,4BAAgB,IAAI,SAAQ;AAC5B,iCAAqB,QAAO;;AAEhC,iCAAuB,oBAAoB;AAC3C,yBAAe,QAAQ;AACnB,+BAAmB,YAAY,MAAM,aAAa,aAAa,aAAa,cAAc,aAAa;AACvG,oCAAwB,cAAc,YAAY,CAAC,aAAa;AAChE,+BAAmB;AACf,+BAAiB,QAAQ,gBAAgB;;;AAGjD,+BAAqB;;AAEzB,eAAO;;;AC7Lf;;;;;;;;;;;;;;;;AA0BA,mCAA+B;AAC/B,sBAAkB;;MAEd;AACI,aAAK,iBAAiB;AACtB,YAAI,eAAe;AACf,wBAAc;;AAElB,aAAK,mBAAmB,YAAY;AACpC,aAAK,aAAa,YAAY;AAC9B,aAAK,qBAAqB,YAAY;AACtC,YAAI,YAAY,aAAa;AACzB,kBAAO,OAAO,YAAY,cAAc,YAAY,MAAM;AAG1D,eAAK,QAAQ,YAAY;;AAGzB,eAAK,QAAQ,OAAM,SAAS;;AAEhC,gBAAO,QAAQ,QAAQ,KAAK,SAAS,GAAG,MAAM;AAE9C,YAAI,MAAM,QAAQ;AACd,kBAAO,KAAK,WAAW,GAAG,MAAM,iEACP,KAAK;;AAElC,aAAK,OAAO;AACZ,YAAI,YAAY,eAAe,QAC3B,YAAY,YAAY,QAAQ;AAChC,gBAAM,IAAI,MAAM;;AAEpB,aAAK,cAAc,YAAY,eAAe;;YAE5C;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa,OAAO,OAAO,CAAE,QAAQ,KAAK,iBAAkB,KAAK;AACjE,cAAK,OAAO,IAAI;AAChB,gCAAwB,CAAC;UACjB,OAAO,CAAC;UACR,SAAS,eAAe;;AAEhC,+CAAuC;UACnC,eAAe,eAAe;UAC9B,QAAQ,eAAe;UACvB,aAAa,eAAe;UAC5B,aAAa,eAAe;UAC5B,qBAAqB,eAAe;UACpC;;AAEJ,cAAK,KAAK,OAAO,cAAc,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM,aAAc;AAChH,YAAI,eAAe,cAAc;AAC7B,gBAAK,KAAK,OAAO,qBAAqB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM,0BAA2B;;AAEnH,yBAAiB,MAAM,KAAK,MAAM,KAAK,MAAM;AAC7C,YAAI,SAAS;AACT,iBAAO;YACH,oBAAoB,6BAA6B;YACjD,WAAW,CAAC;;;AAIhB,gBAAM,IAAI,MAAM,gEACT,SAAS;;;YAWlB;AACF,mCAA2B,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK;AAC5D,YAAI,CAAC,mBAAmB;AACpB,gBAAM,IAAI,MAAM,cAAc,KAAK,gCAC5B,mBAAmB;;AAG9B;AACA;AACI,wBAAc,MAAM,mBAAmB;;AAGvC,wBAAc,+CAA+C,KAAK;AAGlE,cAAI,KAAK,KAAK,SAAS;AACnB,uBAAW;;AAQX,uBAAW;;AAGf,gBAAM,IAAI,MAAM;;AAEpB,8BAAsB,YAAY;AAClC,gCAAwB,YAAY;AACpC,4BAAoB,YAAY;AAChC,4BAAoB,YAAY;AAChC,uBAAe,YAAY;AAC3B,oCAA4B,YAAY;AAExC,YAAI,iBAAiB,QAAQ,mBAAmB;AAC5C,gBAAM,IAAI,MAAM,2BAA2B,KAAK;;AAGpD;AACA;AACA,YAAI,mBAAmB;AACnB,0BAAgB,MAAM,KAAK,YAAY;AACvC,WAAC,aAAa,cAAc;;AAEhC,0BAAkB;UACd;UACA;UACA;UACA;UACA;UACA;UACA;;AAEJ,4BAAoB,YAAY;AAChC,YAAI;AACA,oBAAU,mBAAmB;;AAEjC,eAAO;;YAEL;AACF,2BAAmB,MAAM,QAAQ,KAAK,QAAQ,KAAK,KAAK,KAAK,KAAK;AAClE,iCAAyB,SAAS;AAClC,2BAAmB,KAAK,oBAAoB;AAC5C,4BAAoB;AACpB,4BAAoB;AAChB,sBAAY,KAAK,GAAG,MAAM;;AAE9B,0BAAkB;AAClB,4BAAoB;AACpB,mCAA2B;AACvB,6BAAmB,aAAa;AAC5B,gBAAI,KAAK,sBAAsB;AAC3B,0BAAY,KAAK,KAAK,mBAAmB;;AAGzC,wBAAU,KAAK,aAAa,OAAO;;;;AAI/C,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,MAAM,QAAQ,IAAI;;AAExC,wBAAgB,MAAM,yBAAyB,WAAW;UACtD,aAAa,KAAK;UAClB,WAAW,KAAK;UAChB,YAAY,KAAK;;AAErB,eAAO,CAAC,aAAa,wBAAwB;;;AAGrD,gBAAY,mBAAmB;AAYxB;AACH,wBAAkB,IAAI,YAAY;AAClC,8BAAwB,IAAI,YAAY;AACxC,qBAAe,IAAI,UAAU,GAAG;AAChC,qBAAe,kBAAkB,YAAY,IAAI,UAAU,mBAAmB;AAC9E,aAAO,CAAC,SAAS,KAAK;;AAEnB;AACH,aAAO,IAAI,MAAM,YAAY,qBAAqB;;AAE/C,uBAAmB;AACtB,UAAI,OAAO,UAAU,eAChB,gBAAe,QAAQ,YAAY,aAAa;AAIjD,eAAO;;AAGP,qBAAa;AACb,YAAI,MAAM,QAAQ;AACd,mBAAS,IAAI,MAAM,aAAW,aAAa;;AAG3C,mBAAS,aAAa;;AAE1B,YAAI;AACA,iBAAO,KAAK,KAAK;;;AAGzB,aAAO;;AAEX,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAuE7B;AACH,aAAO,IAAI,YAAY,MAAM;;AAO1B;AACH,aAAO,KAAK,MAAM;;AC/TtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,iBAAiB;;YAEpB;AACF,eAAO,KAAK;;;;MAIhB;AACI,aAAK,cAAc;;YAEjB;AACF,eAAO,KAAK,YAAY;;;AAwBzB;AACH,UAAI,UAAU,WAAW;AACrB,iCAAyB,eAAe,iBAAiB,QACrD,eAAe,eAAe;AAClC,YAAI;AACA,iBAAO,IAAI,kBAAkB;;AAK7B,kBAAQ,KAAK;AAIb,iBAAO,IAAI,kBAAkB,CAAE,eAAe;;;AAMlD,gBAAQ,KAAK;AAIb,eAAO,IAAI,kBAAkB;UACzB,eAAe;UACf;UACA;UACA;;;;AAmBL;AACH,aAAO,IAAI,iBAAiB;;ACrGhC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA+CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,sBAAgB;AACZ,gBAAQ,wBAA4B,OAAO,GAAG;AAC9C,gBAAY,GAAG,SAAS,eAAmB,QAAQ,MAAM;AACzD,aAAK,CAAC;AACN,eAAO,SAAQ,QAAQ,IAAI;;AAE/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;;AAE/D,qBAAW,IAAG,CAAE;AC3D5B;;;;;;;;;;;;;;;;AAuCA,wCAAoC,oBAAoB;AACpD,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,aAAK,CAAC,IAAI;AACV,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,0BAAkB,eAAmB;AACrC,0BAAkB,eAAmB;AACrC,oCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,gBAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,qBAAqB,MAAM,uJAE3C,oBAAoB;AAChD,gBAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,kCAA0B,YAAY,YAAY,aAAa;AAC/D,yBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,sBAAc,SAAQ,YAAY,KAAK,KAAK,YAAY;AACxD,eAAO,SAAQ,OAAO;;AAE1B,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;;AAEnE,mBAAU,IAAG,CAAE;AC5E3B;;;;;;;;;;;;;;;;AAwCA,+CAA2C,cAAc;AACrD,UAAI,QAAQ;AACR,cAAM,IAAI,MAAM,iDAAiD;;AAErE,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,uBAAiB,CAAC,GAAG,SAAS,OAAO;AACrC,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,SAAQ,OAAO,SAAQ,UAAU,CAAC,SAAS,QAAQ,OAAO,SAAS,WAAW;;AAEjG,qBAAe,CAAE,SAAS;AAC1B,oBAAc,CAAE,OAAO,SAAS;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;ACtD3B;;;;;;;;;;;;;;;;AAwCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,QAAQ;AACR,eAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAErC,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,WAAK,QAAQ;AACT,gBAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,MAAM,+CAA+C,GAAG,OAAO,aACxF;;AAEpB,UAAI,GAAG,QAAQ;AACX,eAAO,GAAG;;AAEd,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,OAAO,QAAQ,MAAqB,YAAW;;AAEpG,uBAAa,IAAG,CAAE;AC1D9B;;;;;;;;;;;;;;;;AAoDO;AACH,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,cAAY,cAAc,QAAQ,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,+DACzE;AACf,cAAY,QAAQ,SAAS,GAAG,MAAM,gDAAgD,QAAQ;AAC9F,cAAY,aAAa,SAAS,GAAG,MAAM,qDAC5B,aAAa;AAC5B,cAAY,QAAQ,MAAM,OAAO,aAAa,MAAM,IAAI,MAAM,uCACvD,QAAQ,MAAM,UAAU,aAAa,MAAM;AAElD,cAAY,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,4DAC3D;AAGP,2BAAqB,QAAO,MAAK,SAAS,UAAU;AACpD,gCAA0B,QAAO,MAAK,cAAc,UAAU;AAC9D,4BAAsB,WAAU;AAChC,sBAAgB,OAAO,eAAe;AACtC,aAAO,MAAK,SAAS;;AAElB,4BAAwB,IAAG,CAAE;ACzEpC;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AAwBA;AAwBA,+CAA2C;AAEvC,UAAI,cAAc;AACd,cAAM,IAAI,MAAM;;AAEpB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,wBAAkB;AAClB,wBAAkB;AAClB,oBAAc;AACd,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,gBAAgB;AACvB,sBAAc;iBAET,OAAQ,cAAe,eAAe,kBAAkB;AAC7D,sBAAc;iBAET,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAEL,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAGL,OAAO,cAAc;AAC1B,uBAAe;;AAGf,cAAM,IAAI,MAAM,qPAID,OAAO,YAAY;;AAEtC,UAAI;AACA,8CAAsC;AACtC,YAAI,WACA,OAAO,aACH;AACJ,gBAAM,IAAI,MAAM;;;AAMxB,qBAAe,WAAU,YAAY,QAAO;AAC5C,UAAI,UAAU;AACV,uBAAe,CAAE;AACjB,sBAAc,CAAE;AAChB,eAAO,QAAO,UAAU,YAAY,QAAQ;;AAEhD,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B;AACA,UAAI;AACA,eAEI,OAAO,WAAW,MAAM,aAAa,GAAG,GAAG,OAAO,QAAQ;iBAEzD,eAAe;AACpB,eAAO,OAAO;iBAET,WAAW;AAChB,YAAI,uBAAuB;AACvB,gCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,4BAAoB,OAAO,QAAQ;AACnC,4BAAoB,OAAO,SAAS;AACpC,4BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,eAAO,oBAAoB,aAAa,GAAG,GAAG,OAAO,QAAQ;;AAEjE;AACA,UAAI,gBAAgB;AAChB,iBAAS,IAAI,WAAW;;AAGxB,0BAAkB,QAAQ;AAC1B,iBAAS,IAAI,WAAW,YAAY;AACpC,qBAAa,GAAG,IAAI,WAAW;AAC3B,6BAAmB,GAAG,UAAU,aAAa,EAAE;AAC3C,mBAAO,IAAI,cAAc,WAAW,KAAK,IAAI,IAAI;;;;AAI7D,uBAAiB,CAAC,QAAQ,OAAO;AACjC,aAAO,SAAS,QAAQ,UAAU;;AAqB/B;AACH,iBAAW,iBAAgB,KAAK,OAAO;AACvC,UAAI,CAAE,gBAAe;AAEjB,kCAA0B;AAC1B,eAAO,MAAK,mBAAmB;AAC/B,0BAAkB;;AAEtB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,MAAM,wDAAwD,KAAK;;AAEjF,8BAAwB,KAAK,MAAM,MAAM,GAAG;AAC5C,oBAAc,KAAK,SAAS,IAAI,IAAI,KAAK,MAAM;AAC/C,UAAI,QAAQ,KAAK,UAAU;AACvB,cAAM,IAAI,MAAM,0DACS;;AAE7B,UAAI,KAAK,UAAU,aAAa,KAAK,UAAU;AAC3C,cAAM,IAAI,MAAM,kCAAkC,KAAK;;AAG3D,oBAAa,MAAM,KAAK;AACxB,yBAAmB,KAAK,UAAU,YAAY,MAAM;AACpD,oBAAc,IAAI,kBAAkB,QAAQ,SAAS;AACrD,mBAAa,GAAG,IAAI,SAAS,OAAO,EAAE;AAClC,qBAAa,CAAC,GAAG,GAAG,GAAG;AACvB,qBAAa,GAAG,IAAI,OAAO;AACvB,wBAAc,MAAK,IAAI,QAAQ;AAC/B,cAAI,KAAK,UAAU;AACf,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACqB;;qBAGpC,KAAK,UAAU;AACpB,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACuB;;;AAG/C,cAAI,UAAU;AACV,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;;AAGlB,iBAAK,KAAK,QAAQ;;;AAG1B,kBAAU,IAAI;AACd,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;;AAEnC,UAAI,UAAU;AACV,eAAO,QAAQ;AACf,eAAO,SAAS;AAChB,oBAAY,OAAO,WAAW;AAC9B,0BAAkB,IAAI,UAAU,OAAO,OAAO;AAC9C,YAAI,aAAa,WAAW,GAAG;;AAEnC,UAAI,SAAS;AACT,aAAK;;AAET,aAAO;;AAEJ,uBAAmB,IAAG,CAAE;;;;;;AC5NxB;AACH,UAAI,QAAO,OAAO;AACd,cAAM,IAAI,MAAM,4EACS,QAAO;;AAEpC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,8EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,yEACU,QAAQ;;AAEtC,UAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AACzC,cAAM,IAAI,MAAM,iEACT,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAEzD,UAAI,QAAO,SAAS;AAChB,cAAM,IAAI,MAAM,mEACK,QAAO;;AAEhC,2BAAqB,QAAQ;AAC7B,wBAAkB,aAAa,aAAa,SAAS;AAGrD,oBAAc;AACd,mBAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC3C,mBAAW,aAAa;;AAE5B,yBAAmB,QAAO;AAC1B,0BAAoB,aAAa;AACjC,kBAAY;AACZ,sBAAgB;AAChB,mBAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACvC,qBAAa,WAAW;AACxB,oBAAY,KAAK,WAAW;;AAEhC,sBAAgB;QAAC,GAAG,gBAAe,QAAO,OAAO,IAAI,YAAU,SAAS;QACpE;QAAG,MAAM,GAAG;AAChB,aAAO,CAAC,aAAa,SAAS,WAAW;;;;;;ACzCtC;AACH,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AACzD,yBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAC9C,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEnD,UAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC1C,cAAM,IAAI,MAAM,aACZ,0BAA0B,WAAY,SAAQ,OAAO;;AAE7D,UAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC3C,cAAM,IAAI,MAAM,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAE9E,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,YAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACnC,gBAAM,IAAI,MAAM,aACZ,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAAO,QAAQ,MAAM;;;AAGhG,mBAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC3C,YAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC1C,gBAAM,IAAI,MAAM,aACZ,kBAAkB,IAAI,cAAc,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAAc,MAAM,IAAI;;;;AAWlH;AACH,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,0DAA0D,QAAQ;;AAEtF,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,6DAA6D;;AAEjF,UAAI,MAAM,WAAW;AACjB,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;AAElF,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;;AAGtF,2BAAoB,OAAO,SAAS;;AAWjC;AAEH,0BAAoB,QAAQ,MAAM;AAClC,wBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAIvE,sBAAgB,MAAM;AACtB,sBAAgB;AAChB,mBAAa,WAAW,IAAI,SAAS,EAAE;AACnC,qBAAa,MAAM;;AAEvB,2BAAsB,YAAY,IAAK,IAAI;AAC3C,yBAAmB,eAAc,QAAQ,SAAS;AAClD,sBAAgB,CAAC,GAAG,gBAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,yBAAmB,eAAc;AACjC,aAAO,CAAE,WAAW,YAAY,WAAW,SAAS;;;;;;;;AC9FxD;;;;;;;;;;;;;;;;AAiBO;AACH,wBAAkB,OAAM,MAAM;AAC9B,cAAY,cAAc,MAAM,QAAQ,MAAM,iBAAiB,+BAA+B,2CAC1D;AACpC,cAAY,cAAc,KAAK,QAAQ,MAAM,iBAAiB,8BAA8B,0CACxD;AACpC,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,gBAAY,MAAM,KAAK,KAAK,MAAM,OAAM,MAAM,IAAI,MAAM,iBAAiB,qBAAqB,aAAa,OACnG,MAAM,KAAK,KAAK,kCAAkC,OAAO,OAAM,MAAM;;;AAI9E;AACH,mBAAa;AACb,iBAAW;AACX,aAAO,OAAO;AACV,YAAI,OAAO;AACP,eAAK,KAAK;;AAEd,gBAAQ;AACR;;AAEJ,aAAO;;AAGJ;AACH,mBAAa;AACb,sBAAgB,GAAG,OAAO,MAAM,QAAQ;AACpC,aAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE/D,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,mBAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACnD,mBAAW,KAAK;;AAEpB,mBAAa,GAAG,IAAI,eAAe;AAC/B,YAAI,MAAM;AACN,qBAAW,0BAA0B;;AAGrC,qBAAW,OAAO,wBAAwB,GAAgC;AAC1E,qBAAW;;;AAGnB,aAAO;;AAEX;AACI,UAAI,kBAAkB;AAClB,eAAO;;AAEX,aAAO,iBAAkB,iBAAgB;;AAE7C;AACI,yBAAmB;AACnB,mBAAa,GAAG,IAAI,eAAe;AAC/B,mBAAW,KAAK,yBAAyB;;AAE7C,aAAO;;AAGJ;AACH,wBAAkB,WAAW;AAC7B,4BAAsB,IAAI,MAAM,4BAA4B,IAAI,MAAM,gCAAgC,IAAI,MAAM;AAChH,UAAI,aAAa,UAAU,sBAAsB;AAC7C,0BAAkB,aAAa;AAG/B,8BAAsB,sBAAsB;AAC5C,0BAAkB,4BAA2B,WAAW,WAAW,eAAe,OAAO;AACzF,wBAAgB,2BAA0B,SAAS,WAAW,eAAe,KAAK;AAClF,4BACI,uBAAsB,SAAS,WAAW,eAAe;;AAG7D,wBAAgB,GAAG,OAAO,WAAW;AACjC,0BAAgB,QAAQ,cAAa,WAAW,OAAO,SAAS,YAAY,MAAM;AAClF,wBAAc,QACV,aAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,4BAAkB,QAAQ,gBAAe,SAAS,MAAM;;;AAGhE,aAAO;QACH,OAAO;QACP,KAAK;QACL,SAAS;;;AAKV;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ;;AAGnB,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,cAAc;AAClC,cAAI,YAAY,KAAK;AACjB,4BAAgB;;AAEpB,qBAAW,QAAQ;;;AAG3B,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ,OAAO;;AAG1B,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,YAAY;AAChC,cAAI,UAAU,KAAK;AACf,4BAAgB,OAAO;;AAE3B,qBAAW,QAAQ;;;AAG3B,mBAAa,GAAG,IAAI,WAAW,QAAQ;AAEnC,yBAAiB,WAAW;AAC5B,YAAI,WAAW,KAAK;AAChB,qBAAW,MAAM;;AAErB,mBAAW,KAAK,OAAW,GAAG,WAAW,IAAI,WAAW;;AAE5D,aAAO;;AAEJ;AACH,mBAAa,QAAQ;AACrB,UAAI,eAAgB,KAAK,QAAS,UAAU;AACxC,iBAAS;;AAEb,aAAO;;AAEJ;AAEH,kBAAY,aAAa;AACzB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAC9D,YAAI,SAAS;AAIT,kBAAQ,OAAO;;AAIf,kBAAQ,OAAO;;;AAIvB,uBAAiB,WAAW;AAC5B,UAAI,QAAQ;AACR,iBAAS;;AAGb,cAAQ,OAAW,GAAG,OAAO,WAAW;AACxC,aAAO;;AAEJ;AAEH,iBAAW,YAAY;AACvB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AAC/D,YAAI,SAAS;AAGT,iBAAO,OAAO;;AAId,iBAAO,OAAO;;;AAItB,uBAAiB,WAAW;AAC5B,UAAI,OAAO;AACP,gBAAQ;;AAKZ,UAAI,SAAS;AAET,eAAO,OAAW,GAAG,MAAM;;AAI3B,eAAO,OAAW,IAAI,MAAM,WAAW;;AAE3C,aAAO;;AAMJ;AAEH,4BAAsB,KAAK;AAC3B,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,YAAI,KAAK,KAAK;AACV,4BAAkB;AAClB;;;AAGR,mBAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AAC/C,YAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AAClC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,uBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,sBAAc,MAAM,KAAK,QAAQ;;AAErC,aAAO;;AAEJ;AAEH;AACA,oBAAc,EAAE,MAAM;AACtB,UAAI,OAAO,UAAU;AACjB,iBAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEzC,MAAM,SAAS;AACpB,iBAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAG3D,iBAAS,MAAM;;AAEnB,aAAO,QAAQ;AACX,gBAAY,MAAM,IAAI,MAAM;;AAEhC;AACA,UAAI,QAAQ;AACR,gBAAQ,IAAI,MAAM,OAAO,KAAK;iBAEzB,OAAO,SAAS;AACrB,gBAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEvC,KAAK,SAAS;AACnB,gBAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAGxD,gBAAQ;;AAEZ,cAAQ,MAAM,IAAI;AACd,YAAI,KAAK;AACL,iBAAO;;AAGP,kBAAY,MAAM,IAAI,MAAM,qDACrB,mCAAmC;AAC1C,iBAAO,EAAE,MAAM,KAAK,OAAO;;;AAGnC,aAAO,CAAC,QAAQ;;;;;;;;;;;;;;;;;;ACnSpB;;;;;;;;;;;;;;;;;MAmCI;AACI,eAAO,KAAK,YACP;;aAWF;AACH,eAAO,IAAI,IAAI;;;;MAWnB;AACI,aAAK,eAAe;;aAKjB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAKrB;AACH,yBAAiB,SAAS,aAAa,IAAI,aACvC,CAAC,KAAK,IAAI;;;AA2Bf;AACH,cAAO,IAAI,aAAa,MAAM,MAAM;AAEpC,cAAO,OAAO,IAAI,cAAc,UAAU,MAAM,wDAC5C,OAAO,IAAI;AACf,cAAO,IAAI,UAAU,SAAS,GAAG,MAAM;AAEvC,uBAAiB,SAAS;;;;;;;;AC/G9B;;;;;;;;;;;;;;;;AAmBA,iCAA6B;AACtB,iCAA6B;AAC7B;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,aAAO,QAAO,QAAQ,qBAAqB,KAAK,uBAC5C;;AAER;AACI,2BAAqB;AACrB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI;AACA,sBAAc,OAAO,YAAY;AACjC,sBAAc,SAAS,YAAY;AACnC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM,yCAAyC,oBACxC;;;AAGzB,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ;AACvC,4BAAoB,YAAW;AAC/B,8BAAsB,YAAW;AACjC,YAAI,CAAC,aAAY,aAAa;AAC1B,gBAAM,IAAI,MAAM,0CACA,4BAA4B;;;AAGpD,yBAAmB,cAAa,UAAU,SAAS,SAAQ;AAC3D,2BAAqB,cAAa,YAC9B,WACA,SAAQ;AACZ,UAAI,WAAW,WAAW,aAAa;AACnC,cAAM,IAAI,MAAM,yCAAyC,WAAW,uBACnD,aAAa;YACb;YACA;;AAErB,mBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kBAAU,WAAW;AACrB,kBAAU,aAAa;AACvB,YAAI,CAAC,UAAU,GAAG;AACd,gBAAM,IAAI,MAAM,yBAAyB,QAAQ,eAAe,QAAQ;YACvD;YACA;;;;AAItB;AACH,WAAK,KAAK,MAAM,KAAK,QAAQ,MAAM;;AAEhC;AACH,mBAAY,OAAO,aAAa,YAAY,OAAO,aAAa,YAC5D,OAAO,aAAa,YACpB,CAAC,YACD;AACJ,UAAI,UAAS,WAAW,UAAS,OAAO,OACpC,UAAS,aAAa,UAAS,SAAS;AAExC,eAAO,sBAAsB,QAAQ,MAAK,UAAU,KAAK;;AAE7D,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,UAAI,CAAC,SAAS,GAAG,GAAG;AAChB,cAAM,IAAI,MAAM,8BAA8B,mBAAmB;;;AAGzE;AACI,UAAI,CAAC,SAAS,MAAM,CAAC,SAAS;AAC1B,eAAO;;AAEX,UAAI,MAAM,MAAM,MAAM,MAAM,KAAK,IAAI,IAAI,KAAK;AAC1C,eAAO;;AAEX,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK,OAAO,OAAO,KAAK;AAC/B,gBAAM,IAAI,MAAM,sBAAsB,OAAO,WAAW,cAAc;;;;AAI3E;AAGH,aAAO,IAAI,aAAa,SAAS,QAAQ,IAAI,aAAa;;;;;;;;;;;;;ACrH9D;AAEK,qBAAW;ACFhB;;;;;;;;;;;;;;;;AA0BO;AACH,aAAM,IAAI,QAAQ;;AAgBf;AACH,aAAM,IAAI,SAAS;;AAGhB;AACH,aAAM,IAAI,gCAAgC;AAC1C,cAAQ,KAAK;;AAGV;AACH,UAAI,OAAM,QAAQ;AACd,gBAAQ,KAAK,MAAM;;;AAI3B,6BAAwB;AAMjB;AACH,cAAO;;AAOJ;AACH,aAAO;;AAuBJ;AACH,aAAO,QAAO;;AA+BX;AACH,aAAO,QAAO,QAAQ;;AA0CnB;AACH,aAAO,QAAO,KAAK,UAAU;;AAa1B;AACH,sBAAgB,uBAAsB;AACtC,cAAQ,QAAQ,aAAU,QAAO;;AAkC9B;AACH,aAAO,QAAO,KAAK;;AA2BhB;AACH,aAAO,QAAO,KAAK;;AAiBhB;AACH,aAAO,QAAO,WAAW;;AAStB;AACH,aAAO,QAAO;;AAQX;AACH,aAAO,QAAO;;AAOX;AACH,cAAO,cAAc;;AAMlB;AACH,aAAO,QAAO,YAAY;;AAOvB;AACH,aAAO,QAAO,mBAAmB;;AAiB9B,wDAAmD;AACtD,aAAO,QAAO,gBAAgB,MAAM,SAAS;;AAU1C;AACH,aAAO,QAAO;;AAQX;AACH,aAAM,YAAY,cAAc;;AC5VpC;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AA+CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;AC3D7B;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU,WAAW,GAAG,UAAU;AACrC,eAAO,SAAS,IAAI;;AAExB,sBAAgB;AACZ,oBAAY,SAAQ,WAAW,IAAI;AACnC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,gBAAO,IAAG,CAAE;AC9DxB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAO,IAAG,CAAE;AC1DxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,SAAQ,WAAW;;AAE9B,eAAO,SAAQ,IAAI;SACpB,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AC3CxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,cAAY,MAAM,QAAQ,UAAU,MAAM;AAC1C,cAAY,QAAQ,UAAU,GAAG,MAAM,uDAChC,QAAQ;AACf,uBAAiB,QAAQ,IAAI,UAAU,iBAAgB,GAAG,UAAU,KAAK;AACzE,0BAAoB,SAAS;AAC7B,eAAS,QAAQ;AACb,YAAI,EAAE,UAAU,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAGxB,eAAS,QAAQ;AACb,YAAI,CAAC,aAAiB,EAAE,OAAO,YAAY;AACvC,gBAAM,IAAI,MAAM;;;AAGxB,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC1DzB;;;;;;;;;;;;;;;;AAqBO;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,YAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AACzC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,UAAU,SAAS,UAAU;AAC1C,kBAAY;AACZ,mBAAa;AACb,sBAAgB;AAChB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,cAAI,KAAK,UAAU;;AAGnB,cAAI,KAAK,UAAU;;;AAG3B,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,mBAAS,KAAK,OAAO;;;AAG7B,0BAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,aAAO,CAAC,UAAU;;AAEf;AACH,6BAAuB,KAAK,IAAI,OAAK;AACrC,aAAO,kBAAiB,OAAO,gBAAgB;;AAE5C;AACH,cAAY,sBAAqB,MAAM,OAAO,MAAM,GAAG,uDACvC,iBAAiB;;AAO9B;AACH,UAAI,sBAAqB,MAAM;AAC3B,eAAO;;AAEX,qBAAe;AACf,mBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,YAAI,KAAK,QAAQ,OAAO;AACpB,iBAAO,KAAK;;;AAGpB,WAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,aAAO;;AAGJ;AACH,aAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAEb;AACH,kBAAY;AACZ,mBAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACrC,YAAI,KAAK;;AAEb,aAAO;;AC5FX;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAE3D,gBAAO,IAAG,CAAE;AC1ExB;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAG3D,gBAAO,IAAG,CAAE;AC3ExB;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAU,IAAG,CAAE;AC/D3B;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,QAAQ;AACR,iBAAO;;AAEX,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;;AAE9D,mBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;ACjD1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAyCO,yFAAmF;AAKtF,4BAAsB,WAAW;AACjC,2BAAqB,CAAC,GAAG,aAAa;AACtC,0BAAoB,yBAAwB;AAC5C,aAAO,mBAAkB,YAAY,cAAc,SAAS,WAAW,MAAK,MAAyB,MAAsB;;AAExH,0GAAoG;AACvG,0CAAoC,iBAAgB;AACpD;AACA,UAAI,eAAe;AACf,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAEzD,eAAe;AACpB,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAG9D,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAAO;;AAK1F,0GAAoG;AACvG,uDAAiD,kBAAiB;AAClE;AACA;AACA,UAAI,eAAe;AACf,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAE5D,eAAe;AACpB,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAGjE,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aAAa;;AAMzF,0GAAoG,oBAAoB;AAC3H,uDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,UAAI,eAAe;AACf,SAAC,WAAW,UAAU,SAAS,cAAc;iBAExC,eAAe;AACpB,SAAC,WAAW,YAAY,UAAU,WAAW;;AAG7C,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,4DAAsD;AACtD,0CAAoC,iBAAgB;AACpD,8CAAwC,iBAAgB;AACxD,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,WAAW,YAAa,kBAAiB,MAAK,UAAU,SAAS,cAAc,aAAa,uBAAuB,sBAAsB,cAAc;AACxK,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,WAAW;iBAE1C,eAAe;AACpB,mBAAW,CAAC,WAAW,WAAW,UAAU;;AAEhD,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAOD,4FAAsF,oBAAoB;AAC7G,gEAA0D,CAAC,IAAI,IAAI,IAAI,IAAI;AAC3E,UAAI,eAAe;AACf,SAAC,WAAW,SAAS,UAAU,SAAS,cAAc;iBAEjD,eAAe;AACpB,SAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAGtD,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,yEAAmE;AACnE,uDAAiD,kBAAiB;AAClE,6DAAuD,kBAAiB;AACxE,mCAA6B,wBAAuB,aAAa;AACjE,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,UAAU,WAAW,YAAa,oBAAmB,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAAa,sBAAsB,uBAAuB,sBAAsB;AAClN,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,UAAU,WAAW;iBAEpD,eAAe;AACpB,mBAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAE1D,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,YAAY;;AAExB;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,yBAAmB,QAAQ;AAC3B,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,2BAAqB,kBAAkB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AAC3F,cAAY,OAAW,eAAe,MAAM,2BAA2B;AAEvE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,cAAc,YAAY,YAAY;;AAE3C,0EAAqE;AACxE,iCAA2B,wBAAuB,WAAW;AAC7D,aAAO,KAAK,MAAO,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAErF;AACI,UAAI,OAAO,UAAU;AACjB,eAAO,CAAC,OAAO,OAAO;;AAE1B,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAEhC,aAAO;;AAEX;AACI,aAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAa/D;AACI,UAAI,YAAY;AACZ,eAAO;;AAEX,aAAO,aAAc,cAAa,KAAM,YAAW;;AAEvD;AACI;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU,CAAE,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAChE,yBAAiB,sBAAqB,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC5F,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,+BAAuB,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AACnF,8BAAsB,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC/E,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;iBAEvC,SAAQ;AACb,kBAAU,CAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACxD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;iBAE9C,OAAO,SAAQ;AACpB,oBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,uBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,qBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,sBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,wBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC5C,oBAAY,kBAAkB,YAAW,eAAe,MAAM,UAAU,eAAe,GAAG;AAC1F,mBAAW,kBAAkB,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAGtF,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,WAAW;;AAEjC;AACI;AACA;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,yBAAiB,sBAAqB,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAAK;AACzG,mBAAW,SAAS;AACpB,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,mBAAW,KAAK,KAAK,UAAU;AAC/B,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,+BAAwB,aAAY,KAAK,eAAe,eAAe;AACvE,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,sBAAc,KAAK,MAAM,gBAAgB;AACzC,qBAAa,gBAAgB;AAC7B,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;iBAEpD,SAAQ;AACb,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAGnD,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,UAAU,WAAW;;AAO3C;AACI,UAAI,CAAC;AACD,eAAO;;AAEX,cAAQ;aACC;AAED,iBAAO,KAAK,MAAM;aACjB;AAED,iBAAO,KAAK,KAAK;aAChB;AACD,iBAAO,KAAK,MAAM;;AAElB,gBAAM,IAAI,MAAM,wBAAwB;;;AAG7C;AACH,iCAA2B,iBAAgB;AAC3C,aAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAEzC;AACH,aAAO,mBAAkB,YAAY,mBAAkB;;AASpD;AACH,UAAI,eAAe;AACf,eAAO;iBAEF,eAAe;AACpB,eAAO;;AAGP,cAAM,IAAI,MAAM,sBAAsB;;;ACtZ9C;;;;;;;;;;;;;;;;AA6CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,wBAAkB;AAClB,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,mDAAmD,IAAI;AACzF,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG,aAAK,CAAC;AACN,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,iBAAO,IAAI;;AAEf,eAAO,SAAQ,QAAQ,KAAK;;AAEhC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC1E,YAAM,MAAK,KAAK,GAAG;AACnB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;AC/E5B;;;;;;;;;;;;;;;;AAsEA,oFAA+E;AAC3E,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,aAAK,CAAC;AACN,eAAO,SAAQ,UAAU,KAAK;;AAElC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK,iBAAiB,YAAY;AACvE,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC5E,YAAM,MAAK,KAAK,IAAI;AACpB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC/G9B;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,OAAO,GAAG;AACvB,aAAO,QAAQ;AACX,gBAAY,MAAM,WAAW,MAAM,MAAM,kBAAkB,0BAA0B,gDACrD;;AAEpC,cAAY,QAAQ,KAAK,OAAO,MAAM,MAAM,kBAAkB,qCAAqC,OAAO;AAC1G,yBAAmB,OAAO;AAC1B,aAAO,QAAQ;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,kBAAa,MAAM,QAAU,MAAM,OAAO,WAAW,IAAK,MAAM,kBAAkB,2BAA2B,OAAO,gDACvE,+CACN;;;;AAI5C;AACH,0BAAoB,OAAO,GAAG;AAC9B,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,oBAAY,SAAS,OAAO,GAAG;;AAEnC,aAAO;;ACtCX;;;;;;;;;;;;;;;;AA+DA,qCAAiC;AAC7B,cAAO,QAAQ,UAAU,GAAG,MAAM;AAClC,qBAAe,qBAAqB,SAAS,WAAW;AACxD,UAAI,SAAS,GAAG,UAAU;AACtB,iBAAS,QAAQ;AACb,cAAI,QAAO,UAAU;AACjB,kBAAM,IAAI,MAAM;uBACT,QAAO;;;;AAI1B,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,SAAS,GAAG,OAAO;AACtD,yBAAiB,kBAAgB,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,YAAI,eAAc,cAAc;AAC5B,iBAAO,QAAO,IAAI;;AAGtB,mBAAW,SAAS,OAAO,OAAK,EAAE,OAAO;AACzC,YAAI,SAAS,WAAW;AACpB,iBAAO,SAAS;;AAEpB,uBAAe,SAAS,IAAI,OAAK,EAAE;AACnC,gCAAuB,QAAQ;AAC/B,oBAAY,SAAQ,OAAO,UAAU;AACrC,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,mBAAa,CAAE;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AC/F3B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,QAAQ;AAC5B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAW,IAAG,CAAE;ACzC5B;;;;;;;;;;;;;;;;AAuDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,gCAAwB,kBAA4B,IAAI,OAAO;AAC/D,2BAA6B,IAAI,QAAQ;AACzC,aAAK,CAAC;AACN,eAAO,SAAQ,MAAM,IAAI,QAAQ;;AAErC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,mBAAS,IAAG,CAAE;ACtE1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,KAAK;AACvB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,mBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAyCA;AACI,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,wBAAkB,iBAAgB,UAAU,YAAY;AACxD,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,QAAO,CAAC,OAAO,KAAK;AACrC,uBAAiB,OAAO,UAAU;AAClC,kBAAY,MAAI,UAAU;AAE1B,wBAAkB,IAAI,MAAM;AAC5B,wBAAkB,IAAI,MAAM,KAAK;AACjC,wBAAkB,CAAC,WAAW;AAC9B,gBAAU,OAAM,KAAK,CAAC,GAAG,IAAI;AAC7B,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY;AACrC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,mBAAa,MAAI,KAAI,SAAQ,IAAI,OAAK,KAAK,KAAI,IAAI,SAAQ,MAAI,aAAa;AAC5E,mBAAa,KAAI,OAAK,OAAO,SAAQ;AACrC,aAAO,CAAC,MAAM;;AAEN,0BAAiB,IAAG,CAAE;AC/DlC;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,iBAAiB,GAAG,+CAA+C,WAAW;AAClI,cAAY,MAAM,WAAW,WAAW,QAAQ,MAAM,mBAAmB,MAAM,oDAAoD,WAAW;AAC9I,cAAY,GAAG,MAAM,KAAK,UAAS,GAAG,MAAM,yBAAyB,GAAG,MAAM,wEAC5C,WAAW,KAAK,cAAc;AAChE,sBAAgB;AACZ,eAAO,SAAQ,eAAe,IAAI,YAAY;;AAElD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;AClF5B;AACH;AACA,UAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,GAAG,EAAE;iBAExB,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAEvC,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAGrD,cAAM;;AAEV,aAAO;;ACfX;;;;;;;;;;;;;;;;AAiDA;AACI,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,MAAM,SAAS,UAAU,MAAM,MAAM;AAEjD,cAAY,WAAW,QAAQ,MAAM,SAAS,QAAQ,MAAM,MAAM;AAElE,cAAY,UAAU,QAAQ,MAAM,SAAS,OAAO,MAAM,MAAM;AAEhE,kBAAY,MAAM;AAClB,sBAAgB;AACZ,aAAK,CAAC,KAAK,OAAO,WAAW;AAC7B,eAAO,SAAQ,UAAU,KAAK,SAAS,QAAQ,SAAS,YAAY,SAAS,UAAU,SAAS,SAAS;;AAE7G,qBAAe;QACX,GAAG;QACH,OAAO;QACP,QAAQ;QACR,MAAM;QACN,UAAU;;AAEd,oBAAc,CAAE;AAChB,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,aAAO,SAAQ,KAAK,GAAG;;AAE3B;AACI,UAAI,KAAK;AACL,eAAO;;AAEX,UAAI,EAAE,SAAS;AAEX,eAAO,SAAQ,GAAG,CAAC,EAAE;iBAEhB,EAAE,SAAS;AAChB,eAAO;iBAEF,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAExC,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAE1D,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC5F9B;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC3ChC;;;;;;;;;;;;;;;;AAoCA;AACI,mBAAY,iBAAgB,GAAG,eAAe;AAC9C,qBAAe,OAAM;AACrB,UAAI,MAAM,KAAK,OAAK,CAAE,KAAI,MAAM,IAAI,MAAM;AACtC,cAAM,IAAI,MAAM,2CAA2C;;AAE/D,UAAI,MAAM,SAAS,OAAM;AACrB,cAAM,IAAI,MAAM,+BAA+B,MAAM,uBAAuB,OAAM;;AAEtF,UAAI,MAAM,SAAS,OAAM;AACrB,yBAAiB,OAAM,MAAM;AAC7B,eAAO,SAAS,SAAS,MAAM;AAC3B,mBAAS,QAAQ;;AAErB,iBAAQ,SAAQ,QAAO;;AAE3B,yBAAmB,OAAM;AACzB,mBAAa,MAAM,KAAK;AACxB,mBAAa,MAAM,SAAS,GAAG,KAAK,GAAG;AACnC,YAAI,WAAW,OAAO,MAAM;AACxB,eAAK,KAAK;mBAEL,OAAM,MAAM,OAAO;AACxB,gBAAM,IAAI,MAAM,mBAAmB,mCAAmC;;;AAG9E,mBAAa,KAAK,IAAI,UAAU,IAAI,IAAI,IAAI,IAAI,OAAO,OAAK,KAAK;AACjE,UAAI,KAAK,WAAW;AAChB,eAAO,MAAM;;AAEjB,sBAAgB,cAAa,SAAQ,KAAK,QAAO;AACjD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAEnE,wBAAe,IAAG,CAAE;ACvEhC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAa,gBAAgB,cAAe,MAAM,uBAAuB,oDACvC;AAClC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,cAAc;AAC9B,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK,IAAI,cAAc;AAC3C,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,cAAa;;AAEjC,wBAAe,IAAG,CAAE;AClChC;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACa7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACC7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;AC1B7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACb7B;;;;;;;;;;;;;;;;AAwDA,4DAAuD,oBAAoB,CAAC,GAAG;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,cAAY,YAAY,QAAQ,MAAM,IAAI,MAAM,oCAAoC,8CACtD,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,sBAAgB;AACZ,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACxH,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,oBAAU,IAAG,CAAE;ACzD3B,2DAAsD,kBAAkB;AACpE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM;;AAEhD,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,QAAQ,WAAW,MAAM,oEAC5D,wBAAwB;AAC1C,cAAY,eAAe,OAAO,MAAM,sCAAsC;AAC9E,uBAAiB,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;AACxF,sBAAgB,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM;AACvE,sBAAgB,CAAC,GAAG;AACpB,wBAAkB,CAAC,GAAG;AACtB,+BAAyB;AACzB,kBAAY,QAAO,SAAS,UAAU,SAAS,MAAK,kBAAkB,WAAW;AACjF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEjD,aAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEnD,mBAAU,IAAG,CAAE;ACnE3B;;;;;;;;;;;;;;;;AA+CA,kFAA6E;AACzE,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AAC1D,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO;;AAEhD,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,sBAAgB,eAAe,SAAS,SAAS,KAAK,SAAS;AAC/D,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,+EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,qBAAY,SAAQ,eAAe,MAAM,QAAQ;AACjD,aAAK,CAAC,MAAM;AACZ,eAAO;;AAEX,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,WAAK,YAAY,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,sBAAqB;AACxF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,iCAA4B,IAAG,CAAE;AClExC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,qBAAoB,aAAa,IAAI,SAAS,SAAS,MAAK,QAAQ;;AAEnE,4BAAmB,IAAG,CAAE;AC7BpC;;;;;;;;;;;;;;;;AAwDA,4DAAuD,qBAAqB,CAAC,GAAG,GAAG;AAC/E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAA+B,SAAS,YAAY,MAAM,uEACnD,0BAA0B;AAC7C,cAAY,eAAe,SAAS,MAAM,sCAAsC;AAChF,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW;AAC3F,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,mBAAU,IAAG,CAAE;ACvF3B;;;;;;;;;;;;;;;;AAwCA;AACI,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACvE,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI,OAAO;;AAE3D,sBAAgB,SAAS;AACzB,uBAAiB,KAAK,MAAM;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW;AACzF,eAAO,SAAQ,eAAe,MAAM,QAAQ;;AAEhD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,WAAK,SAAS,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,uBAAuB;AAC/E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,gCAA4B,IAAG,CAAE;ACtDxC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,oBAAoB,aAAa,IAAI,SAAS,SAAS;;AAEtD,4BAAmB,IAAG,CAAE;AC3BpC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA6CA,+BAA2B,eAAe,kBAAiB;AACvD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,4BAAoB,oBAAmB,CAAC,OAAO,GAAG;AAClD,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;;AAE9B,6BAAqB,kBAAiB,GAAG,GAAG,MAAM;AAClD,oBAAY,SAAQ,OAAO,WAAW,cAAc,WAAW;AAC/D,aAAK,CAAC;AACN,YAAI,eAAe;AACf,qCAA2B,wBAAuB;AAClD,kBAAQ,WAAU,OAAO;;AAE7B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM,WAAW;AACjC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA0DA,sDAAkD;AAC9C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAqB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACrE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,cAAY,cAAc,aAAa,GAAG,MAAM;MAC9C,mBAAmB;MACnB,GAAG;AACL,cAAY,aAAa,aAAa,GAAG,MAAM;MAC7C,kBAAkB;UACd,GAAG;AACT,cAAa,aAAc,aAAY,eAAe,GAAI,MAAM,8CAA8C,YAAY,oBAAoB,gDAAgD,GAAG;AACjM,sBAAgB,cAAW,SAAQ,aAAa,IAAI,WAAW;AAC/D,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,WAAW;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,eAAc;;AAExE,0BAAgB,IAAG,CAAE;AC3EjC;;;;;;;;;;;;;;;;AAqEA,qEAAgE,oBAAoB,CAAC,GAAG;AACpF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,uDAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG;;AAEpB,gBAAY,gCAAyC,SAAS,YAAY,MAAM,gFAC1D,0BAA0B;AAChD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH,qBAAY,SAAQ,gBAAgB,KAAK,SAAS;AAClD,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,wBAAuB;AAC1F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,6BAAmB,IAAG,CAAE;AC5GpC;;;;;;;;;;;;;;;;AA0CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,SAAQ,IAAI,CAAC,GAAG;AAC7B,uBAAe,SAAQ,KAAK;AAC5B,yBAAiB,CAAC,GAAG,EAAE,OAAO,GAAG,EAAE;AACnC,eAAO,SAAQ,QAAQ;;AAE3B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACrDzB;;;;;;;;;;;;;;;;AAqDA,+DAA0D,CAAC,GAAG,iBAAiB;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM,gEAC3C,GAAG;AACV,cAAY,QAAQ,SAAS,GAAG,MAAM,4DAC/B,QAAQ;AACf,cAAY,eAAe,QAAQ,MAAM,gFACZ;AAC7B,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACzD,uBAAe;;AAEnB,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK;AAC9B,kBAAY,QAAO,UAAU,YAAY,QAAQ;AACjD,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,uBAAc,IAAG,CAAE;AC5E/B;;;;;;;;;;;;;;;;AAyBO;AACH,qBAAe,QAAQ;AACvB,mBAAa;AACb,mBAAa,GAAG,IAAI,QAAQ;AACxB,oBAAY,SAAS,IAAI;AACzB,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,YAAI,IAAI,KAAK,MAAM;AACf,eAAK,QAAQ;;;AAGrB,aAAO;;AAMJ;AACH,qBAAe;AACf,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,sBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,wBAAgB,SAAS,SAAS,IAAI;AACtC,uBAAe,SAAS;AACxB,YAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC1C,iBAAO,QAAQ;;;AAGvB,aAAO;;AAEJ;AACH,qBAAe;AACf,gBAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AACzC,mBAAa,GAAG,IAAI,GAAG;AACnB,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,YAAI,MAAM;AACN,iBAAO,QAAQ;mBAEV,MAAM;AACX,iBAAO,QAAQ;mBAEV,MAAM;AACX,yBAAe,wDACR,cAAc;AACrB,gBAAM,MAAM;;AAGZ,iBAAO,QAAQ;;;AAGvB,aAAO;;ACjFX;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,MAAM,IAAI;AAC7C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM;;AAE3C,kBAAS,IAAG,CAAE;AC9C1B;;;;;;;;;;;;;;;;AA8CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAmB,iBAAgB,WAAW,aAAa,SAAS;AAIpE,6BAAuB,4BAA2B,GAAG,OAAO,GAAG;AAC/D,4BAAsB,YAAY,IAAI;AACtC,4BAAsB,YAAY,IAAI;AACtC,UAAI,WAAW,SAAS;AAGpB,gBAAO,WAAW,MAAM,OAAO,GAAG,MAAM,IAAI,MAAM;;AAEtD,UAAI,WAAW,SAAS;AAEpB,2BAAkB,WAAW,OAAO,cAAc,OAAO;;AAE7D,sBAAgB;AACZ,oBAAY,SAAQ,OAAO,YAAY,eAAe;AACtD,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe;QACX,WAAW;QACX,GAAG;QACH,GAAG;;AAEP,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;AC7E1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,KAAK,QAAQ,MAAiB;;AAE/E,uBAAa,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AAqDA;AAEI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,wBAAkB,IAAI,IAAI;AAC1B,qBAAc,WAAU;AACxB,0BAAoB,MAAM,IAAI;AAC9B,aAAO,MAAM,aAAa,QAAO;;AAEzB,qBAAY,IAAG,CAAE;AC/D7B;;;;;;;;;;;;;;;;AAsCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAa,KAAI,SAAS,KAAK,IAAI,SAAS,MAAO,KAAI,SAAS,KAAK,IAAI,SAAS,IAAI,MAAM,+DACrF,IAAI,YAAY,IAAI;AAC3B,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,cAAY,YAAY,SAAS,MAAM,gEAChC,eAAe;AACtB,UAAI,IAAI,SAAS,KAAK,IAAI,SAAS;AAC/B,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM;iBAEhB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM,CAAC,KAAK;iBAEtB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,KAAK;AACzB,eAAO,SAAQ,MAAM,CAAC,KAAK;;AAG3B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,KAAK;AACzB,eAAO;;;AAGH,iBAAO,IAAG,CAAE;ACtExB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,kBAAU,SAAQ,IAAI;AACtB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAO,IAAG,CAAE;AC1CxB;;;;;;;;;;;;;;;;AAmCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,cAAY,GAAG,UAAU,WAAW,GAAG,UAAU,WAAW,MAAM;AAClE,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AChDxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAoCA,mCAA+B;AAC3B,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,cAAY,QAAQ,GAAG,MAAM,MAAM;AACnC,uBAAiB,GAAG,MAAM;AAC1B,UAAI,OAAO;AAEP,gBAAY,CAAE,IAAG,OAAO,MAAM,MAAM,MAAM,iCAAiC,CAAE,IAAG,OAAO,OAAO,GAAG;AACjG,eAAO,GAAG,OAAO,OAAO;;AAE5B,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,SAAQ,IAAI;;AAEX,uBAAc,IAAG,CAAE;ACjD/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA8CA;AACI,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,QAAQ;AAC3C,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK,IAAI;AAC7B,aAAK,CAAC;AACN,eAAO;;AAEX,2BAAqB,CAAC;AACtB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,OAAM,OAAO;;AAEnE,kBAAQ,IAAG,CAAE;AC7DzB;;;;;;;;;;;;;;;;AAmCA,2DAAuD;AACnD,UAAI,cAAc;AACd,qBAAa;;AAEjB,mBAAa,QAAO,CAAC,SAAS,aAAa;AAC3C,gBAAU,WAAW,aAAa,UAAU;AAC5C,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,aAAK,IAAI,GAAG,GAAG;;AAEnB,kBAAY,SAAQ,KAAK,YAAY,CAAC,SAAS;AAC/C,UAAI,cAAc;AACd,eAAO;;AAGP,YAAI,WAAW,WAAW;AACtB,iBAAO,MAAK,WAAW,KAAK,IAAI,CAAC,WAAW,IAAI,GAAG;mBAE9C,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,KAAK,IAAI,IAAI,CAAC,WAAW,IAAI,WAAW,IAAI,GAAG;mBAE5E,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,WAAW,KAAK,IAAI,IAAI,IAAI;YAC1D,WAAW;YAAI,WAAW;YAAI,WAAW;YAAI;YAAG;;;AAIpD,gBAAM,IAAI,MAAM,qEAEiB,WAAW;;;;AAI5C,gBAAO,IAAG,CAAE;ACrExB;;;;;;;;;;;;;;;;AAgCA;AACI,oBAAc,CAAE,OAAO,OAAO;AAC9B,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,OAAO,OAAO,QAAQ,IAAI,MAAM,OAAM;;AClC9F;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAqBO,mCAA8B;AAC9B;AACH,UAAI,UAAU;AACV,eAAO;;AAEX,aAAO,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AC1BvD;;;;;;;;;;;;;;;;AAkBO;AACH,iBAAW;AACX;AACA,UAAI,UAAU;AACV,cAAM;AACN,eAAO;;AAGP,cAAM,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAEtD,aAAO,CAAC;AACJ,YAAI,MAAM,eAAe,QAAQ;AAC7B,iBAAO;;AAGP,gBAAM,gBAAe,QAAQ,MAAM;;;AAG3C,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,QAAQ;AACR,mBAAS,KAAK,OAAO;;AAGrB,mBAAS,KAAK;;;AAGtB,aAAO;;AAEJ;AACH,sBAAgB,EAAE,MAAM;AACxB,0BAAoB;AACpB,sBAAgB;AAChB,sBAAgB;AAChB,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,mBAAa,GAAG,IAAI,QAAQ,MAAM;AAC9B,oBAAY,KAAK,QAAQ,MAAM;;AAEnC,mBAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AAC/B,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,aAAO,CAAE,WAAW,WAAW,SAAS;;;;;;;;ACnE5C;;;;;;;;;;;;;;;;AA6CA,wCAAoC;AAChC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,qBAAe,CAAE,GAAG,IAAI,SAAS;AACjC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,2BAAmB,gBAAe,MAAM,GAAG,OAAO;AAClD,0BAAkB,0BAAyB,IAAI,UAAU;AACzD,oBAAY,SAAQ,OAAO,IAAI,SAAQ,UAAU,CAAC,SAAS,QAAQ;AACnE,aAAK,CAAC,IAAI;AACV,eAAO,SAAQ,KAAK,UAAU;;AAElC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAU;;AAEhE,mBAAU,IAAG,CAAE;AC3D3B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,QAAQ,IAAI;AAC/C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,aAAa,IAAI;AACrC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,yBAAgB,IAAG,CAAE;AClDjC;;;;;;;;;;;;;;;;AAkCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC1CzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,SAAS,KAAK,QAAQ,MAAiB;;AAEhF,uBAAY,IAAG,CAAE;ACrC7B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,oBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AAiCO;AACH,UAAM,eAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,UAAU,YAAY,cAAa,UACnC,CAAE,kBAAiB;AACnB,cAAM,IAAI,MAAM;;AAGpB,oBAAc;AACd,4BAAsB;AACtB,aAAO,YAAW,OAAO,OAAO,eAAe;;AC9CnD;;;;;;;;;;;;;;;;AAsCA,mCAA+B;AAC3B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,QAAQ,KAAI,QAAO,QAAQ,KAAK;;AAE/B,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,KAAK,IAAI;AAC5C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC7CzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,UAAU,IAAI;AAClC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAa,IAAG,CAAE;AClD9B;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM;;AAEpB,oBAAc,CAAE,OAAO,MAAM;AAC7B,aAAO,QAAO,cAAc,cAAW,SAAQ,SAAS,OAAO,MAAM,MAAM,IAAiB,MAAiB,UAAU;;ACnC3H;;;;;;;;;;;;;;;;AAqCA,0DAAsD,UAAU,WAAW,UAAU;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM;sBAChC,GAAG;AACrB,cAAY,OAAW,cAAc,MAAM,2FACR;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,sBAAgB;AACZ,kBAAU,SAAQ,6BAA6B,KAAK,aAAa,MAAM,OAAO;AAC9E,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;AACxE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAG3D,eAAO;;;AAGH,uCAA8B,IAAG,CAAE;AChE/C;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAsDA;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AAEH,mBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,aAAa;AAClE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,KAAK,CAAC,KAAK;AAC7D,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO,OAAM;;;;AAiCzB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,OAAO,MAAM;AAGvC,sBAAc,qBAAqB,MAAM,QAAQ,YAAY;AAC7D,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,cAAc;AACnE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,GAAG,QAAQ,OAAO;AACpE,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO;;;;AA6BnB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,aAAa,SAAQ,MAAM;AACvC,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,eAAQ,eAAO,SAAU,QAAO,UAAU,MAAM,EAAE,IAAI,CAAC,IAAI;AAC3D,mBAAW;AACX,eAAO,CAAE,MAAM,OAAM,IAAI;;;AAkCjC;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,SAAS,KAAK,MAAM,SAAO,eAAe,UAAS,MAAM;AAEnF,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,oBAAY,QAAO,UAAU,MAAM,EAAE,GAAG,OAAO,MAAM;AACrD,YAAI,MAAM;AACN,6BAAuB,IAAI,MAAM,OAAO,GAAG,OAAO;;AAGtD,mBAAW,IAAI;AACf,eAAO;;;AAiCf;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,cAAY,WAAW,QACnB,MAAM,QAAQ,YAAY,QAAQ,MAAM,OAAK,aAAa,YAAW,MAAM;AAE/E,+BAAyB,WAAW;AACpC,UAAI,CAAC;AAED,kBAAU;AACV,8BAAsB,QAAO;AACzB,kBAAQ,KAAK,QAAO,oBAAoB;;;AAGhD,oCAA8B,mBAAmB,QAAQ,OAAO,eAAY,CAAC,UAAS,aAAa;AAEnG,+BAAyB,QAAQ;AACjC,gBAAU,QAAQ,OAAO,eAAY,UAAS;AAC9C,cAAY,QAAQ,SAAS,GAAG,MAAM,gGACD;AAErC,+BAAyB;AACzB,aAAQ,OAAO,iBAAU,QAAO,UAAU,GAAG,SAAS,MAAM;AAC5D,cAAY,OAAM,KAAK,OAAK,KAAK,OAAO,MAAM;AAG9C,cAAY,MAAM,SAAS,GAAG,MAAM,iFACb,MAAM;AAC7B,yBAAmB;AACnB,cAAQ,QAAQ;AACZ,YAAI,OAAM,MAAM;AACZ,qBAAW,EAAE,QAAQ,OAAM;;;AAGnC,UAAI,yBAAyB;AAGzB,8BAAsB,QAAQ,OAAK,WAAW,EAAE,QAAQ;;AAE5D,aAAO,CAAE,OAAO,OAAO;;AA0C3B;AACI,aAAO,QAAO,WAAW;;AAE7B;AACI,+BAAyB,OAAM,OAAO,OAAK,KAAK,MAAM;AACtD,UAAI,mBAAmB;AACnB,cAAM,IAAI,MAAM;;;;AC5TxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,IAAI,KAAK,QAAQ,MAAiB;;AAEzE,gBAAO,IAAG,CAAE;ACtCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,SAAS;AAC7B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAY,IAAG,CAAE;ACzC7B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAInC,uBAAiB,WAAW;AAIxB,sBAAc,IAAI,SAAS,IAAI;AAC/B,yBAAiB;AACb,uBAAa,KAAI,IAAI,SAAQ,IAAI;AACjC,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,uBAAc,IAAG,CAAE;ACtD/B;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAoB,MAAM,GAAG;AAC9C,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,SAAS;;AAE5D,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO,gBAAoB,MAAM,GAAG;AAC7F,gBAAM,SAAQ,KAAK;AACnB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,kBAAkB,MAAM;AACxC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjFxB;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,gBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AAuDA,6BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,aAAK,CAAC;AACN,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,IAAI,WAAW;AACnC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAK;;AAE3D,kBAAO,IAAG,CAAE;ACjFxB;;;;;;;;;;;;;;;;AA+CA,wCAAoC;AAChC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,UAAI,SAAS;AACT,eAAO,QAAQ,OAAO;;AAE1B,UAAI,SAAS,QAAQ,OAAO;AACxB,cAAM,MAAM,gFACW,QAAQ,qBAAqB;;AAExD,sBAAgB;AACZ,yBAAiB;AACjB,qBAAa,KAAI,QAAQ,MAAM;AAC/B,wBAAgB,IAAI,QAAQ;AAC5B,sBAAc,IAAI,MAAK,SAAS,YAAY,KAAI,MAAI,IAAI,UAAU,MAAM;AACxE,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAY;;AAElE,uBAAc,IAAG,CAAE;ACpE/B;;;;;;;;;;;;;;;;AAwDA,kCAA8B,iBAAiB;AAC3C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,mBAAa,KAAI,IAAI,MAAM;AAC3B,gBAAU,IAAI,IAAI;AAClB,gBAAU,IAAI;AACd,gBAAU,MAAI,GAAG;AACjB,gBAAU,KAAI;AACd,kBAAY,MAAI,SAAQ,MAAM,EAAE,QAAQ;AACxC,UAAI;AACA,yBAAiB,sBAAqB,IAAI,OAAO;AACjD,eAAO,SAAQ,KAAK;;AAExB,aAAO;;AAEC,sBAAa,IAAG,CAAE;ACvE9B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,IAAI,KAAK,QAAQ,MAAiB;;AAEpF,uBAAc,IAAG,CAAE;AC3C/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,KAAK,QAAQ,MAAiB;;AAEhF,uBAAc,IAAG,CAAE;ACtC/B;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,KAAK,QAAQ,MAAiB;;AAEnF,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAqCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AAExC,aAAO,WAAW,UAAU,GAAG,IAAI,WAAW,WAAW,GAAG;;AAEpD,uBAAc,IAAG,CAAE;AC5C/B;;;;;;;;;;;;;;;;AAiDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,wBAAkB;AAClB,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG;AACA,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAI,IAAI;;AAGR,cAAI,SAAQ,QAAQ,KAAK;;AAE7B,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC5E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;ACtF5B;;;;;;;;;;;;;;;;AAoEA,wCAAoC,CAAC,GAAG,GAAG,iDAAgD;AACvF,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,kBAAU,SAAQ,UAAU,KAAK;AACjC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC9E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC7G9B;;;;;;;;;;;;;;;;AAqDA,oFAA+E;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,qBAAe,QAAO,UAAU,mBAAmB,QAAQ;AAC3D,aAAO,CAAE,QAAQ,OAAO,IAAI,SAAS,OAAO;;AAEpC,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAgCO,mCAA8B;AACjC,UAAI,UAAU;AACV,sBAAa,OAAM,OAAO;AAC1B,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,qBAAoB,eAAc,QAAQ;AACzD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACvC5C;;;;;;;;;;;;;;;;AAiCO,mCAA6B;AAChC,UAAI,UAAU;AACV,sBAAa,OAAK,OAAO;AACzB,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxC5C;;;;;;;;;;;;;;;;AA2DA,6BAAyB,iBAAiB;AACtC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,qBAAe,2BAA0B,GAAG,OAAO;AACnD,0BAAoB,OAAO;AAC3B,yBAAmB,eAAc;AACjC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,sBAAgB;AACZ,iCAAyB,QAAO;AAEhC,wBAAgB,iBAAiB,UAAU,GAAG,QAC1C,KACA,MAAK,IAAI,iBAAiB;AAC9B,oBAAY,IAAI,SAAS;AACzB,eAAO,MAAI,KAAK,MAAM;;AAI1B,uBAAiB,WAAW;AACxB,sBAAc,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;AAC3E,yBAAiB;AACb,kCAAwB,GAAE,MAAM;AAChC,eAAK,QAAQ;AACT,4BAAgB,SAAQ;;AAE5B,6BAAmB,SAAQ,IAAI;AAC/B,uBAAa,IAAI,KAAI,YAAY,OAAK,GAAE,OAAO,aAAa;AAC5D,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,iBAAQ,IAAG,CAAE;ACxDzB,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO;AAChE,gBAAM,SAAQ,GAAG;AACjB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjExB;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AA8CA;AACI,cAAY,SAAS,aAAa,SAAS,aAAa,MAAM,+DACnD;AACX,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAGpB,cAAY,SAAS,WAAW,GAAG,MAAM,MAAM,wCAAwC,GAAG,aAC/E,SAAS;AACpB,0BAAoB,SAAS,YAAY,IAAI;AAC7C,mBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,gBAAY,SAAS,GAAG,WAAW,GAAG,MAAM;AAC5C,gBAAY,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,eAC/D,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,aAAa,MAAM,wBAAwB,wCAC5F,GAAG,MAAM,KAAK,iDACX,GAAG;;AAEpB,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,UAAU,WAAW,QAAQ;;AAEnC,sBAAa,IAAG,CAAE;ACpE9B;;;;;;;;;;;;;;;;AAiDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc;AACd,2BAAqB,CAAC;AACtB,4BAAsB;AACtB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,eAAO,SAAQ,OAAO;SACvB,CAAE,GAAG,KAAM,MAAiB,UAAU,OAAO,cAAc;;AAEtD,mBAAU,IAAG,CAAE;ACzC3B;;;;;;;;;;;;;;;;AAuCA,gCAA4B,iBAAiB;AACzC,UAAI,iBAAgB,GAAG,KAAK;AAC5B,mBAAa,gBAAe,MAAM,EAAE;AACpC,oBAAc,KAAK,GAAG,MAAM;AAC5B,0BAAoB,MAAM;AAC1B,UAAI,CAAC;AACD,wBAAgB,sBAAqB,MAAM,OAAO;;AAEtD,yBAAmB,OAAO,IAAI,MAAK,GAAG,YAAY,SAAQ,OAAO;AACjE,wBAAiB,KAAK,YAAY,MAAM;AACxC,aAAO,CAAE,MAAM,OAAO;;AAEd,oBAAW,IAAG,CAAE;ACjC5B;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,qBAAqB,GAAG,KAAK;AACxC,iBAAW,qBAAqB,GAAG,KAAK;AACxC,mBAAY;AACZ,wBAAkB;AAClB,mBAAa,GAAG,IAAI,UAAU,QAAQ;AAClC,uBAAe,UAAU,GAAG,QAAO,GAAG,IAAI,GAAG;AAC7C,kBAAU,KAAK,OAAO;AACtB,kBAAU,KAAK,OAAO;AACtB,iBAAQ,OAAO;;AAEnB,mBAAa;AACb,mBAAa;AACb,mBAAa,GAAG,IAAI,UAAU,QAAQ,KAAK;AACvC,aAAK,KAAK,UAAU;AACpB,aAAK,KAAK,UAAU,IAAI;;AAE5B,aAAO,CAAC,MAAM;;AAEN,yBAAgB,IAAG,CAAE;ACtCjC;;;;;;;;;;;;;;;;AAwCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,0BAAoB,QAAQ;AAC5B,uBAAiB,QAAQ;AACzB,UAAI,cAAc;AACd,cAAM,IAAI,MAAM,+DACT;;AAEX,UAAI,WAAW;AACX,cAAM,IAAI,MAAM,gDAAgD;;AAEpE,aAAO,QAAQ,KAAK;AACpB,uBAAiB,aAAa,IAAI,SAAQ,SAAS,CAAC,GAAG,OAAO;AAC9D,kBAAY,QAAO,cAAc,cAAW,SAAQ,YAAY,UAAU,YAAY,YAAY,OAAO,CAAE;AAE3G,aAAO,aAAa,IAAI,SAAQ,KAAK,CAAC,IAAI,SAAS;;AAE3C,wBAAe,IAAG,CAAE;ACzDhC;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAa,SAAQ,SAAS,IAAI;AAClD,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,qBAAY,IAAG,CAAE;AC7C7B;;;;;;;;;;;;;;;;AAmCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC3CzB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,YAAI,GAAG,UAAU;AACb,oBAAU,UAAS,KAAK;AACxB,oBAAU,WAAU,KAAK;AACzB,iBAAO,SAAQ,GAAG;;AAEtB,eAAO,SAAQ,SAAS;;AAE5B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAY,IAAG,CAAE;AC9B7B;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAY,IAAI,SAAS,KAAK,IAAI,SAAS,GAAG,MAAM,+DAC7C,IAAI,YAAY,IAAI;AAC3B,mBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,mBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,aAAO,OAAO,MAAM;;AAEZ,yBAAgB,IAAG,CAAE;AC5BjC;;;;;;;;;;;;;;;;AA6CA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,IAAI,IAAI,UAAU;;AAErC,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,iBAAO,IAAG,CAAE;ACpDxB,iDAA6C;AACzC,cAAO,SAAS,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,CAAC,WAAW;;AAElB,kBAAS,IAAG,CAAE;ACJ1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,GAAG,MAAM;AAChE,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACZ1B;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,cAAc,GAAG,sCAAsC,WAAW;AACtH,cAAY,SAAS,WAAW,WAAW,QAAQ,MAAM,qBAAqB,SAAS,wCAAwC,WAAW;AAC1I,cAAY,GAAG,MAAM,OAAO;AACxB,YAAI,IAAI,KAAK,KAAK,WAAW;AACzB,iBAAO,KACD,KAAI,SAAS,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,MACvC,WAAW,IAAI,OACf;;AAEZ,eAAO;SACR,OAAO,MAAM,4BAA4B,GAAG,MAAM,MAAM,oBAAoB,SAAS,+CAA+C,WAAW;AAClJ,sBAAgB,cAAW,SAAQ,eAAe,IAAI,YAAY;AAClE,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;ACvFnC;;;;;;;;;;;;;;;;AAmDA;AACI,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,SAAQ;AACR,eAAM;;AAEV,iBAAW,iBAAgB,QAAO,KAAK;AACvC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,gCAAyC,SAAS,YAAY,MAAM,qEAC7D,0BAA0B;AAC7C,uBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,uBAAiB,CAAC,SAAS,gBAAgB,SAAS;AAKpD;AACA,UAAI,SAAQ;AACR,sBAAc,6BAA6B,CAAC,SAAS,cAAc,SAAS,cAAc;;AAG1F,sBAAc,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE/B,4BAAsB,SAAS,OAAO,KAAK,SAAS,OAAO;AAC3D,+CAAyC,6BAA6B,CAAC,SAAS,UAAU,SAAS,UAAU,UAAU;AACvH,2BAAqB,gBAAgB,OAAM;AAC3C,yBAAmB,gBAAgB,MAAM,eAAe,KAAK,UAAU;AACvE,wBAAkB,gBAAgB,QAC9B,MAAM,SAAQ,YAAY,aAAa,SAAS,gBAChD,MAAM,SAAQ,YAAY,aAAa,SAAS;AACpD,gBAAU;AACV,kBAAY,gBAAgB,IAAI,eAAe,GAAG,UAAU;AAC5D,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAKX;AACI,uBAAiB,YAAY,IAAI,OAAK,EAAE;AACxC,yBAAmB,YAAY,IAAI,OAAK,EAAE;AAC1C,6BAAuB,WAAW,OAAO,UAAU;AACnD,0BAAoB,WAAW,IAAI,UAAW,KAAI,eAAe,KAAK,KAAK;AAC3E,qBAAe,WAAW,IAAI,UAAU,IAAI,YAAY;AACxD,uBAAiB,WAAW,IAAI,UAAU,CAAC,SAAS,IAAI,OAAO;AAC/D,oBAAc,WAAW,IAAI,UAAU,CAAC,GAAG,YAAY;AACvD,aAAO,CAAC,UAAU;;AAKtB;AAGI,iCAA2B,YAAY,IAAI;AACvC,eAAO,IAAK,KAAI,KAAM,UAAS,KAAK;;AAExC,4BAAsB,mBAAmB,IAAI,OAAK,IAAI;AAGtD,4BAAsB,cAAc,IAAI,OAAK,KAAK,MAAM,IAAI;AAC5D,0BAAoB,cAAc,IAAI,UAAU,IAAI,cAAc;AAClE,aAAO,cAAc,IAAI;AACrB,eAAO,CAAC,cAAc,IAAI,YAAY;;;AAGlC,iBAAQ,IAAG,CAAE;AChIzB;;;;;;;;;;;;;;;;AAiDA;AACI,kBAAY,iBAAgB,OAAM,QAAQ;AAC1C,iBAAW,iBAAgB,MAAK,OAAO;AACvC,OAAC,OAAO,QAAQ,gBAAe,OAAO;AACtC,qBAAe,CAAE,GAAG,OAAO,GAAG;AAC9B,sBAAgB;AACZ,kBAAU,SAAQ,IAAI,OAAO;AAC7B,aAAK,CAAC,OAAO,MAAM;AACnB,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,OAAO;AAC/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE;AC/C1B;;;;;;;;;;;;;;;;AAuDA,6BAAyB,iBAAiB;AACtC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AAEb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,KAAK,WAAW;AACpC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;;AAE5D,iBAAQ,IAAG,CAAE;ACjFzB;;;;;;;;;;;;;;;;AA4BA;AACI,mBAAa,eAAc;AAC3B,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,mBAAa,GAAG,IAAI,MAAM;AACtB,eAAO,KAAK;;AAEhB,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAEhC,iBAAQ,IAAG,CAAE;;;;;;;;;;;;;;;ACrBzB,MAAC;AAED;AACE,mBAAS,aAAa;AAEtB,aAAG,OAAO;AACR,oBAAQ,UAAU,GAAG,KAAK,GAAG,IAAI;AACjC,eAAG,KAAK,GAAG;AACX,eAAG,KAAK,GAAG;AACX,mBAAO,GAAG,KAAK,IAAK,IAAG,IAAI,IAAI;;AAIjC,aAAG,IAAI;AACP,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,iBAAO;;AAGT;AACE,YAAE,IAAI,EAAE;AACR,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,iBAAO;;AAGT;AACE,mBAAS,IAAI,KAAK,eACN,QAAQ,KAAK,cACd,GAAG;AACd,eAAK,QAAQ;AAAa,mBAAQ,GAAG,SAAS,aAAe;;AAC7D,eAAK,SAAS;AACZ,mBAAO,SAAU,UAAS,UAAW,KAAK;;AAE5C,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT;AACE,kBAAQ;AAER,qBAAW;AACT,oBAAO,MAAK;AACZ,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,mBAAK,MAAK,WAAW;AACrB,sBAAQ,sBAAsB;AAC9B,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK;AACL,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK,IAAI;;AAEX,mBAAQ,OAAM,KAAK;;AAGrB,iBAAO;;AAIT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,OAAO;;SAIZ,gBAC+B,SAC9B;;;AC3GH,MAAC;AAED;AACE,mBAAS,gBAAgB;AAEzB,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAGP,aAAG,OAAO;AACR,oBAAQ,GAAG,IAAK,GAAG,KAAK;AACxB,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,mBAAO,GAAG,KAAM,GAAG,MAAM,KAAM,IAAK,MAAM;;AAG5C,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC1EH,MAAC;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAS,GAAG,IAAK,GAAG,MAAM;AAC1B,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AACjD,mBAAQ,IAAG,IAAK,GAAG,IAAI,SAAS,KAC5B,IAAG,IAAK,GAAG,IAAK,GAAG,KAAK,IAAO,KAAK,KAAK,MAAO;;AAGtD,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAEP,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,gBAAI,KAAK,QAAQ;AACf,iBAAG,IAAI,GAAG,KAAK,KAAK,GAAG,MAAM;;AAE/B,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC7EH,MAAC;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AAER,oBAAQ,GAAG,OAAO,GAAG;AACrB,gBAAI,EAAE;AAAI,iBAAM,MAAM;AAAI,gBAAI,IAAK,KAAK;AACxC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,KAAK;AACnC,gBAAI,EAAG,IAAI,IAAK;AAAI,gBAAI,IAAK,KAAK;AAAK,iBAAK,IAAK,KAAK;AACtD,cAAE,KAAK;AACP,eAAG,IAAK,IAAI,IAAK;AACjB,mBAAO;;AAGT;AACE,0BAAc;AAEd,gBAAI,UAAU,SAAO;AAEnB,kBAAI,EAAE,KAAK;;AAGX,sBAAO,KAAK;AACZ,mBAAK,IAAI,GAAG,IAAI,MAAK,QAAQ,EAAE;AAC7B,kBAAE,IAAI,KAAM,EAAE,IAAI,MAAM,KACnB,MAAK,WAAW,KAAK,EAAG,IAAI,IAAK,MAAM;;;AAIhD,mBAAO,EAAE,SAAS;AAAG,gBAAE,KAAK;AAC5B,iBAAK,IAAI,GAAG,IAAI,KAAK,EAAE,OAAO,GAAG,EAAE;AAAE;AACrC,gBAAI,KAAK;AAAG,kBAAI,EAAE,KAAK;;AAAS,kBAAI,EAAE;AAEtC,gBAAG,IAAI;AACP,gBAAG,IAAI;AAGP,iBAAK,IAAI,KAAK,IAAI,GAAG,EAAE;AACrB,kBAAG;;;AAIP,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE,EAAE;AACV,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,YAAY;;SAIjB,gBAC+B,SAC9B;;;ACrEH,MAAC;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AACR,oBAAQ,GAAG,OACH,GAAG,OAAO,GAAG;AAErB,eAAG,IAAI,IAAK,IAAI,aAAc;AAE9B,gBAAI,EAAG,IAAI,KAAM;AACjB,gBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,iBAAK,KAAK;AACV,iBAAK,KAAK;AACV,iBAAK,MAAM;AACX,iBAAK,MAAM;AAEX,gBAAI,EAAE,KAAK,IAAI;AACf,eAAG,IAAI;AAEP,mBAAQ,IAAK,KAAK,MAAM,MAAQ;;AAGlC;AACE,mCAAuB,YAAY;AACnC,gBAAI,UAAU,SAAO;AAEnB,kBAAI;AACJ,sBAAO;;AAGP,sBAAO,QAAO;AACd,kBAAI;AACJ,sBAAQ,KAAK,IAAI,OAAO,MAAK;;AAG/B,iBAAK,IAAI,GAAG,IAAI,KAAK,IAAI,OAAO,EAAE;AAEhC,kBAAI;AAAM,qBAAK,MAAK,WAAY,KAAI,MAAM,MAAK;AAE/C,kBAAI,MAAM;AAAG,oBAAI;AACjB,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,kBAAI,KAAK;AACP,oBAAK,IAAI,aAAc;AACvB,oBAAK,EAAE,IAAI,QAAS,IAAI;AACxB,oBAAK,AAAK,KAAL,IAAU,IAAI,IAAI;;;AAI3B,gBAAI,KAAK;AACP,gBAAG,UAAQ,MAAK,UAAU,KAAK,OAAO;;AAKxC,gBAAI;AACJ,iBAAK,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE;AACzB,kBAAI,EAAG,IAAI,KAAM;AACjB,kBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,mBAAK,KAAK;AACV,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,MAAM;AACX,gBAAE,KAAK,IAAI;;AAGb,gBAAG,IAAI;AACP,gBAAG,IAAI;AACP,gBAAG,IAAI;;AAGT,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAED;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,UAAU;;SAIf,gBAC+B,SAC9B;;;AC5IH,MAAC;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAQ,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,GAAG;AACzC,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,eAAG,IAAI,IAAK,KAAK,KAAO,MAAM,KAAM;AACpC,eAAG,IAAI,IAAK,IAAI,IAAK;AACrB,eAAG,IAAK,KAAK,KAAO,MAAM,KAAM;AAChC,mBAAO,GAAG,IAAK,IAAI,IAAK;;AAmB1B,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI,aAAa;AACpB,aAAG,IAAI;AAEP,cAAI,SAAS,KAAK,MAAM;AAEtB,eAAG,IAAK,OAAO,aAAe;AAC9B,eAAG,IAAI,OAAO;;AAGd,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AACR;AAED;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC3EH,MAAC;AAID,sBAAa,cACD,cACC,YACA,cACC,uBACG,MAAK,IAAI,OAAO,wBACd,MAAK,IAAI,GAAG,oBAChB,eAAe,UACnB,QAAQ;AAOnB;AACE,oBAAU;AACV,oBAAW,WAAW,OAAQ,CAAE,SAAS,QAAU,WAAW;AAG9D,0BAAgB,OAAO,SACrB,QAAQ,UAAU,CAAC,MAAM,SAAS,UACjC,QAAQ,OAAQ,aAAa,MAAM,IAAI;AAG1C,qBAAW,IAAI,KAAK;AAIpB,qBAAW;AACT,oBAAQ,KAAK,EAAE,aACP,gBACA;AACR,mBAAO,IAAI;AACT,kBAAK,KAAI,KAAK;AACd,mBAAK;AACL,kBAAI,KAAK,EAAE;;AAEb,mBAAO,KAAK;AACV,mBAAK;AACL,mBAAK;AACL,qBAAO;;AAET,mBAAQ,KAAI,KAAK;;AAGnB,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,SAAS;AAGd,iBAAO,SAAS,KAAK,IAAI;AAGzB,iBAAQ,SAAQ,QAAQ,YACpB;AACE,gBAAI;AAEF,kBAAI,MAAM;AAAK,qBAAK,OAAO;;AAE3B,oBAAK,QAAQ;AAAa,uBAAO,KAAK,MAAM;;;AAK9C,gBAAI;AAAgB,oBAAK,WAAW;AAAM,qBAAO;;AAI5C,qBAAO;aAElB,MACA,WACA,YAAY,UAAU,QAAQ,SAAU,QAAQ,OAChD,QAAQ;;AAEV,cAAK,SAAS,WAAW;AAYzB;AACE,0BAAgB,IAAI,aACX,UAAU,OAAO,GAAG,IAAI,GAAG,IAAI,OAAO,GAAG,IAAI;AAGtD,cAAI,CAAC;AAAU,kBAAM,CAAC;;AAGtB,iBAAO,IAAI;AACT,cAAE,KAAK;;AAET,eAAK,IAAI,GAAG,IAAI,OAAO;AACrB,cAAE,KAAK,EAAE,IAAI,OAAQ,IAAI,IAAI,IAAI,UAAW,KAAI,EAAE;AAClD,cAAE,KAAK;;AAIT,UAAC,IAAG,IAAI;AAEN,wBAAW,QACH,GAAG,QAAO,GAAG,QAAO,GAAG;AAC/B,mBAAO;AACL,mBAAI,GAAE,KAAI,OAAQ,KAAI;AACtB,kBAAI,IAAI,QAAQ,GAAE,OAAS,IAAE,MAAK,GAAE,KAAI,OAAQ,KAAI,OAAQ,IAAE,MAAK;;AAErE,eAAG,IAAI;AAAG,eAAG,IAAI;AACjB,mBAAO;aAIN;;AAOL;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAMD;AACE,uBAAa,UAAW,OAAO;AAC/B,cAAI,SAAS,OAAO;AAClB,iBAAK,QAAQ;AACX;AAAM,uBAAO,KAAK,SAAQ,IAAI,OAAO,QAAQ;;;;;AAGjD,iBAAQ,OAAO,SAAS,SAAS,OAAO,WAAW,MAAM,MAAM;;AAQjE;AACE,2BAAiB,OAAO,eAAe;AACvC,iBAAO,IAAI,WAAW;AACpB,gBAAI,OAAO,KACT,OAAS,UAAS,IAAI,OAAO,KAAK,MAAM,WAAW,WAAW;;AAElE,iBAAO,SAAS;;AAQlB;AACE;AACE;AACA,gBAAI,cAAe,OAAM,WAAW;AAElC,oBAAM,IAAI;;AAEV,oBAAM,IAAI,WAAW;AACrB,cAAC,SAAO,UAAU,QAAO,UAAU,gBAAgB;;AAErD,mBAAO,SAAS;;AAEhB,2BAAc,QAAO,qBACP,YAAW,SAAQ;AACjC,mBAAO,CAAC,CAAC,IAAI,QAAM,SAAQ,SAAS,QAAO,QAAQ,SAAS;;;AAQhE;AACE,iBAAO,OAAO,aAAa,MAAM,GAAG;;AAUtC,eAAO,MAAK,UAAU;AAMtB,YAAmC,QAAO;AACxC,kBAAA,UAAiB;AAEjB;AACE,yBAAa;;;mBAEL;AACV,mBAAO;AAAa,mBAAO;;;SAK3B,IACA;;ACjMF,eAAG,OAAO;AACV,eAAG,SAAS;AACZ,eAAG,SAAS;AACZ,eAAG,YAAY;AACf,eAAG,UAAU;AACb,eAAG,SAAS;AAEZ,uBAAiB;;AC3DjB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,OAAO;AACZ,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;AACvC,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;;AAE3C,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,SAAS,aAAgB,UAAU;;MAG5C;AACI,YAAI,CAAC,MAAM,KAAK;AACZ,wBAAc,KAAK;AACnB,eAAK,UAAU;AACf,iBAAO;;AAEX;AACA,sBAAc;AACd,eAAO,CAAC;AACJ;AACA;AACI,iBAAK,IAAI,KAAK,WAAW;AACzB,iBAAK,IAAI,KAAK,WAAW;AACzB,gBAAI,KAAK,KAAK,KAAK;mBACd,KAAK,KAAK,MAAM;AACzB,uBAAY,KAAK,KAAK,KAAO,KAAK,IAAI,KAAK;AAC3C,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,cAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,sBAAU;;;AAGlB,YAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,eAAK,UAAU,KAAK,aAAa;;AAErC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU;AACrC,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,SAAS,KAAK,SAAS,SAAS,KAAK;;;;MAMhD;AACI,aAAK,QAAQ;AACb,aAAK,OAAO,IAAI;AAChB,aAAK,QAAQ;AACb,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,QAAQ,aAAgB,UAAU;AACvC,aAAK,QAAQ,IAAI,YAAY,GAAG,GAAG,OAAO,OAAO,KAAK;AACtD,YAAI,QAAQ;AACR,eAAK,IAAI,QAAS,IAAI;;AAGtB,eAAK,IAAI,QAAS,IAAI;;AAE1B,aAAK,IAAI,IAAI,KAAK,KAAK,IAAI,KAAK;;MAGpC;AACI;AACA,eAAO;AACH;AACI,gBAAI,KAAK,MAAM;AACf,gBAAI,IAAK,KAAK,IAAI;mBACb,KAAK;AACd,eAAK,IAAI;AACT,eAAK,IAAI;AACT,eAAK,IAAK,QAAQ,KAAK;AACvB,eAAM,MAAM,KAAO,KAAK,IAAK,KAAI,IAAI,KAAK,IAAI;AAC9C,cAAI,KAAK;AACT,cAAI,IAAI,MAAM,KAAK,IAAI,KAAK;AACxB;;;AAGR,YAAK,IAAI,KAAK,OAAQ,KAAK,IAAI;AAC/B,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,IAAI,KAAK,SAAS,IAAI,KAAK;;AAEzC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,UAAU;AACf,iBAAO;;AAEX,eAAO,KAAK,MAAM;;;;MAItB,mBAAkB,UAAS;AAEvB,aAAK,iBAAiB,MAAO,KAAK,SAAS,QAAQ,KAAK,UAAU;AAClE,aAAK,MAAM;AACX,aAAK,QAAQ,OAAM;AACnB,aAAK,QAAQ;AACb,YAAI,QAAQ;AACR,iBAAO,KAAK;;AAEhB,YAAI,OAAO,SAAS;AAChB,iBAAO,KAAK;;AAEhB,YAAI,CAAC,KAAK,oBAAoB,KAAK,SAAS;AACxC,gBAAM,IAAI,MAAM,0BAA0B,UAAS;;AAEvD,aAAK,SAAS,aAAgB;;MAElC;AACI,YAAI,KAAK;AACL,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,aAAa,KAAK,MAAM,KAAK,QAAQ,KAAK;;;AAGvD;AAEH,gBAAU,OAAO;AACjB,gBAAU,SAAS;AACnB,gBAAU,SAAS;AACnB,iBAAW,IAAI,IAAK,MAAK,IAAI,GAAG,KAAK,OAAO,KAAK,IAAI,IAAI,GAAG;AAG5D,8BAAwB;AACxB,UAAI,KAAK;AACL,cAAM,IAAI,MAAM,2BAA2B;;;AAG5C;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,yBAAmB,OAAK;AACxB,yBAAmB,YAAY,cAAc;AAC7C,yBAAmB,kBAAkB,QAAQ,aAAa,gBAAgB;;AAE9E;AACI,iBAAU;AACV,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,gBAAO,OAAO;;AAElB,aAAO,OAAM,OAAO;;AAExB;AACI,0BAAoB;AACpB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,OAAO,KAAK;AACzB,yBAAiB,OAAO;;AAE5B,aAAO,KAAK,KAAK,gBAAgB,OAAO;;AAE5C;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAI,IAAK,OAAM;;AAErD;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAK,KAAI,KAAM,OAAM,IAAI;;AClN/D;;;;;;;;;;;;;;;;AAmCA,+CAA2C,WAAW;AAClD,UAAI,QAAQ;AACR,eAAO;;AAEX,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,qBAAe,IAAI,UAAU,OAAO,MAAM,OAAO;AACjD,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,wBAAe,IAAG,CAAE;ACpDhC;;;;;;;;;;;;;;;;AAkCA,0CAAqC,YAAY;AAC7C,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,OAAuB;AAC9E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,yBAAgB,IAAG,CAAE;AC7CjC;;;;;;;;;;;;;;;;AAuCA,4CAAwC,YAAY,WAAW;AAC3D,kBAAY,QAAO,OAAO;AAC1B,qBAAe,IAAI,cAAc,QAAQ,QAAQ,MAAM;AACvD,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,0BAAiB,IAAG,CAAE;AC/ClC;;;;;;;;;;;;;;;;AAmCO;AACH,qBAAc;AACd,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW;AACzB,cAAM,IAAI,MAAM;;AAEpB,oBAAc;AACd,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1CpD;;;;;;;;;;;;;;;;AAwCO,wCAAmC,WAAW;AACjD,UAAI,UAAS;AACT,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,8BAAsB,UAAU;AAChC,4CAAoC,QAAQ,QAAQ,QAAO;AAC3D,4CAAoC,OAAO,SAAS,QAAO;AAC3D,YAAI,iBAAiB,+BACjB;AACA,iBAAO,OAAM,CAAC,IAAI;;AAEtB,4BAAoB,KAAK,IAAI,KAAK,KAAM,QAAO,SAAS;AACxD,uBAAe,qBAAoB,aAAa;AAChD,YAAI,OAAO,SAAS,UAAS;AAGzB,kBAAO;;AAEX,eAAO,KAAK;AACZ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,iBAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,eAAO,UAAS,QAAQ;;AAE5B,oBAAc,CAAE,OAAO,MAAM,aAAM;AACnC,aAAO,QAAO,cAAc,SAAS,IAAiB,MAAiB,OAAO;;AClElF;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,WAAW;AAC/B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,uBAAc,IAAG,CAAE;ACzC/B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAQ,IAAG,CAAE;AC9CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,MAAM;;AAEzB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE;AC9C1B;;;;;;;;;;;;;;;;AAsDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,YAAI,GAAG,SAAS;AACZ,iBAAO,MAAM;;AAEjB,oBAAY,SAAQ,QAAQ,IAAI;AAChC,eAAO,SAAQ,KAAK,GAAG;;AAE3B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,UAAS;;AAEnE,qBAAW,IAAG,CAAE;ACpE5B;;;;;;;;;;;;;;;;AAyBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AC9B9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACtC1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACOzB,6FAAwF,CAAC,GAAG,iBAAiB;AACzG,iBAAW,iBAAgB,GAAG,KAAK;AACnC,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,UAAI,eAAe;AACf,cAAM,IAAI,MAAM;;AAGpB,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFACxB,iBAAiB,MAAM;AAClD,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFAClB,iBAAiB,MAAM;AACxD,yBAAmB,iBAAiB,MAAM;AAC1C,gCAA0B,iBAAiB,MAAM;AACjD,cAAY,iBAAiB,MAAM,OAAO,aAAa,mBAAmB,MAAM,6EACjE,aAAa,8BACb,iBAAiB,MAAM;AACtC,wBAAkB,iBAAgB,KAAK,kBAAkB,SAAS,MAAK,YAAY;AACnF,8BAAwB;AACxB,kBAAY,QAAO,WAAW,kBAAkB,iBAAiB,SAAS;AAC1E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,4BAAmB,IAAG,CAAE;ACxFpC;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,GAAG,OAAO,MAAM,kDAAkD,GAAG,iBAAiB,GAAG;AAClH,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,oBAAc,MAAM,GAAG;AACvB,oBAAc,MAAM,GAAG;AACvB,mBAAa,IAAI,IAAI;AACrB,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB;;;AAGR,sBAAe,IAAI,cAAa,CAAC,aAAa,GAAG;AACjD,sBAAgB,IAAI,cAAa,CAAC,aAAa;AAC/C,mBAAa,QAAO,GAAG,IAAI,MAAM,QAAQ;AACrC,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB,kBAAO,OAAO,MAAK,MAAM;AACzB,kBAAQ,OAAO,MAAK;AACpB;;;AAGR,aAAO,CAAC,QAAO,YAAY,QAAQ;;AAE3B,2BAAkB;AC1E9B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,CAAC,QAAQ,CAAC;;AAEnB,oBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAyCA,oCAAgC;AAC5B,sBAAgB,iBAAgB,QAAQ,UAAU,WAAW;AAC7D,UAAI,QAAQ;AACR,cAAM,QAAQ,OAAO;;AAEzB,UAAI,QAAQ,QAAQ,OAAO;AACvB,cAAM,MAAM,4EACW,QAAQ,oBAAoB;;AAEvD,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,QAAQ,SAAS;AACnC,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,UAAS;;AAE7B,qBAAW,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,6DAC3B,OAAM;AACrB,qBAAe,CAAE;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,OAAM,KAAK,OAAO;AAClC,uBAAe,SAAQ,IAAI;AAC3B,eAAO,OAAO,QAAQ,OAAM;SAC7B,QAAQ,MAAqB;;AAExB,gBAAO,IAAG,CAAE;AClDxB;;;;;;;;;;;;;;;;AAsCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,8DAC3B,OAAM;AACrB,qBAAe,CAAE;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,SAAQ,QAAO,CAAC,OAAO;AACvC,uBAAe,SAAQ,KAAK;AAC5B,eAAO,SAAQ,QAAQ,OAAM;SAC9B,QAAQ,MAAqB;;AAExB,iBAAQ,IAAG,CAAE;ACnDzB;;;;;;;;;;;;;;;;AA4CA;AACI,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,sBAAsB;AACtB,6BAAqB,SAAQ,QAAO,CAAC,OAAO;AAC5C,cAAM,KAAK;;AAKX,4BAAoB,CAAC,OAAO,IAAK,sBAAqB;AACtD,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,8BAAsB,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK;AACzF,8BAAsB,KAAI,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK,IAAI,QAAO;AACxG,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,6BAAqB,SAAQ,SAAQ,GAAG,IAAI,CAAC,YAAY,IAAI,YAAY;AACzE,cAAM,KAAK;;AAEf,YAAM,KAAK;AAEX,UAAI,OAAM,SAAS,KAAK,OAAM,MAAM,OAAO;AACvC,qBAAa;AACb,uBAAc,OAAM,MAAM;AAC1B,cAAM,SAAQ,KAAK,CAAC,QAAO,IAAI,MAAM,KAAK,QAAO,IAAI,MAAM;AAC3D,aAAK;;AAET,aAAO;;AAEC,kBAAS,IAAG,CAAE;ACrEnB,0DAAqD;AACxD,uBAAiB;AACjB,UAAI,OAAQ,oBAAqB;AAC7B,gBAAO,EAAE,MAAM,QAAQ,oBAAoB,GAAG,MAAM;AACpD,qBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAGpD,0BAAkB,gBAAgB,OAAO;AACrC,cAAI,UAAU;AACV,sBAAS;;AAEb,iBAAO;WACR;AACH,gBAAO,aAAa,GAAG,MAAM;AAC7B,yBAAiB,gBAAgB,QAAQ;AAGzC,YAAI,aAAa;AACb,wBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,0BAAgB,YAAY,EAAE,MAAM,QAAQ;;AAEhD,gBAAO,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IAAI,MAAM;AACxE,qBAAa;;AAEjB,aAAO;;AC/BX;;;;;;;;;;;;;;;;AAyDA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,GAAG,OAAO;AAC7C,2BAAmB,kBAAiB,IAAI,iBAAiB;AACzD,eAAO,SAAQ,MAAM,IAAI,YAAY;;AAEzC,qBAAe,CAAE,GAAG;AACpB,mBAAa,CAAE,iBAAiB;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAS,IAAG,CAAE;ACpE1B;;;;;;;;;;;;;;;;AA2CA;AACI,cAAO,OAAM,UAAU,WAAW,MAAM,mDAAmD,OAAM;AACjG,+BAAyB,OAAM,MAAM,OAAM,MAAM,SAAS;AAC1D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,aAAa,QAAQ,YAAY;AAEjC,sBAAc,OAAM,MAAM,IAAI,OAAK;AACnC,qBAAa,OAAM,MAAM,IAAI,OAAK;AAClC,aAAK,OAAM,MAAM,SAAS,KAAK;AAC/B,wBAAgB,OAAM,QAAO,OAAO;AACpC,6BAAqB;iBAEhB,aAAa,QAAQ,YAAY;AAEtC,2BAAmB,OAAM,MAAM,IAAI,OAAK;AACxC,mBAAW,OAAM,MAAM,SAAS,KAAK,YAAY;AACjD,wBAAgB,QAAO,CAAC,QAAO,OAAM,cAAc,OAAM,MAAM,SAAS;AACxE,6BAAqB;;AAGrB,wBAAgB;;AAGpB,yBAAmB,WAAU;AAC7B,2BAAqB,SAAQ,SAAQ,eAAe,aAAa,CAAC,OAAO;AACzE,kBAAY,IAAI;AAEhB,mBAAa,KAAK,MAAM,qBAAqB,KAAK;AAClD,yBAAmB,KAAK;AACxB,yBAAmB,KAAK;AACxB,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,0BAAoB,cAAc,MAAM;AACxC,kBAAY,cAAc,MAAM,SAAS,KAAK;AAC9C,aAAO,SAAQ,SAAQ,qBAAqB,IAAI,qBAAqB,KAAK;;AAElE,iBAAQ,IAAG,CAAE;AChFzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,kBAAkB,IAAI;AAC1C,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAmB;;AAEzE,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,SAAQ,IAAI,cAAa,GAAG,OAAO,MAAM;;AAExC,oBAAW,IAAG,CAAE;ACvC5B;;;;;;;;;;;;;;;;AAoCA,oCAAgC;AAC5B,uBAAiB,qBAAqB,SAAS,WAAW;AAC1D,cAAY,SAAS,UAAU,GAAG,MAAM;AACxC,UAAI,SAAS,WAAW;AACpB,eAAO,WAAW,SAAS,IAAI;;AAEnC,mBAAa,SAAS,GAAG;AACzB,oBAAc,SAAS,GAAG;AAC1B,oBAAc,SAAS,GAAG;AAC1B,cAAY,QAAQ,MAAM,MAAM;AAChC,eAAS,QAAQ;AACb,2BAAuB,OAAO,EAAE,OAAO;AACvC,gBAAY,UAAU,EAAE,OAAO,MAAM;;AAEzC,8BAAwB,SAAS,IAAI,OAAK,WAAW,GAAG;AAOxD,aAAO,QAAO,iBAAiB;;AAEvB,kBAAS,IAAG,CAAE;AC3D1B;;;;;;;;;;;;;;;;AAiCA,+BAA0B;AACtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE;ACvCzB;;;;;;;;;;;;;;;;AAwDA,+DAA2D,aAAa,kBAAkB,iBAAiB,oBAAoB;AAC3H,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,YAAI,WAAW;AACX,oBAAU,IAAI,MAAM,MAAM;;AAE9B,6BAAqB,YAAW;AAChC,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,gBAAgB;AACtC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,mBAAmB;AACzC,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B,GAAG,OAAO,MAAM;AAE5C,2BAAmB,YAAW;AAC9B,yBAAiB,GAAG,MAAM;AAC1B,mBAAW,QAAQ;AACf,gBAAM,QAAQ;AACd,cAAI,QAAQ;AACZ,mBAAS,OAAO,MAAM,GAAG;;AAE7B,aAAK,SAAQ,IAAI;AACjB,eAAQ,wBAAwB,oBAAoB,8BAA+B,mBAAkB,GAAG,OAAO,cAAc,qBAAqB,OAAO,KAAK,SAAS,WAAW,SAAS;AAC3L,gBAAQ;AACR,cAAM;AACN,kBAAU;AACV,2BAAmB,YAAW;AAE9B,mBAAW,QAAQ;AACf,cAAI,QAAQ,MAAM,QAAQ;AAC1B,kBAAQ,QAAQ;;AAGpB,qBAAa,iBAAgB,OAAO,KAAK;AAEzC,yBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AACvE,2BAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,YAAI;AACA,iBAAO,SAAQ,OAAM,IAAI,OAAO,OAAO;;AAE3C,oBAAY,SAAQ,aAAa,IAAI,OAAO,KAAK;AACjD,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,oBAAc;QACV;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;AAEJ,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;;AAEpE,0BAAgB,IAAG,CAAE;ACpHjC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACxDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,cAAQ,SACJ;AACJ,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1DpD;;;;;;;;;;;;;;;;AA4CA,0BAAsB,YAAY;AAC9B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB,GAAG,MAAM,GAAG,MAAM,SAAS;AAC3C,UAAI,IAAI;AACJ,cAAM,IAAI,MAAM,uDAAuD,oBACxD;;AAEnB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,GAAG;AACnB,gCAA0B,QAAO,cAAc,OAAK,EAAE,KAAK,IAAI,GAAG,SAAS,QAAQ,MAAiB,MAAM;AAC1G,aAAO,CAAE,QAAQ;;AAET,iBAAQ,IAAG,CAAE;AC3DzB;;;;;;;;;;;;;;;;AAuCA,6CAAwC,YAAY;AAChD,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM;;AAEpB,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,MAAsB;AAC7E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,4BAAmB,IAAG,CAAE;AClDpC;;;;;;;;;;;;;;;;AAsEA,+BAA2B;AAEvB,iBAAW,iBAAgB,GAAG,KAAK,UAAU;AAC7C,cAAO,GAAG,OAAO,GAAG,MAAM;AAC1B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,gCAA0B,QAAO,UAAU,QAAQ,QAAQ;AAC3D,aAAO,CAAE,QAAQ;;AAET,mBAAU,IAAG,CAAE;AC/E3B;;;;;;;;;;;;;;;;AAuCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAoB,iBAAgB,YAAY,cAAc,sBAAsB;AACpF,cAAO,OAAM,cAAc,MAAM;AACjC,qBAAe,CAAE,GAAG,IAAI,YAAY;AACpC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,oBAAY,SAAQ,mBAAmB,IAAI,aAAa;AACxD,aAAK,CAAC;AACN,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAoB;;AAE1E,+BAAsB,IAAG,CAAE;ACpDvC;;;;;;;;;;;;;;;;AAmCA,gCAA4B;AACxB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,QAAQ,CAAC,GAAG,MAAM,UAAU,OAAO,GAAG,MAAM,QAAQ,MAAM,UAAU,oBAAoB,GAAG,MAAM,WAAW,GAAG,MAAM;AACjI,UAAI,OAAO;AACP,gBAAQ,GAAG,MAAM;;AAErB,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE;AAChB,sBAAgB,cAAa,SAAQ,QAAQ,IAAI;AACjD,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAiCO,gDAA4C;AAC/C,aAAO,QAAO,aAAa,cAAc,WAAW,MAAM;;AClC9D;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB;AAChB,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,YAAI,SAAS;AACT,kBAAQ,KAAK;;;AAGrB,uBAAiB,QAAO,WAAW;AACnC,kBAAY,QAAO,CAAC,QAAQ,QAAQ,UAAU,SAAS;AACvD,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,oBAAY,SAAS,WAAW,QAAQ;AACxC,uBAAe,IAAI,UAAU;AAC7B,YAAI,OAAO,IAAI,KAAK;;AAExB,aAAO,IAAI;;AChCf;;;;;;;;;;;;;;;;AAsCA;AACI,yBAAmB,iBAAgB,WAAW,aAAa,cAAc;AACzE,mBAAa,MAAM,WAAW;AAC9B,kBAAY,UAAU,WAAW,OAAO;AACxC,UAAI,cAAc;AACd,mBAAW;;AAEf,aAAO;;AAEC,uBAAc;AC/C1B;;;;;;;;;;;;;;;;AAwCA;AACI,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,oBAAc,iBAAgB,MAAM,QAAQ,YAAY;AACxD,uBAAiB,QAAQ,OAAO,IAAI;AACpC,sBAAgB,MAAM;AACtB,0BAAoB,QAAQ;AAC5B,cAAY,UAAU,GAAG,MAAM;AAC/B,yBAAuB,YAAY,MAAM,UAAU,WAAW,UAAU,MAAM,OAAO;AACrF,wBAAkB;AAClB,mBAAa,UAAU,IAAI,WAAW,SAAS;AAC3C,uBAAe,YAAY;;AAE/B,gCAA0B,YAAY,MAAM,GAAG,UAC1C,OAAO,CAAC,cAAc,YAAY,MAAM,WAAW;AACxD,6BAAuB,SAAQ,SAAS;AACxC,2BAAqB,SAAQ,OAAO,CAAC;AACrC,gCAA0B,MAAM,WAAW;AAC3C,sBAAgB,QAAQ,mBAAmB,CAAC;AAC5C,kBAAY,OAAO,gBAAgB,SAAS;AAE5C,UAAI,YAAW;AACX,gBAAQ;;AAEZ,UAAI,SAAS;AACT,cAAM;;AAEV,cAAQ;AACR,qBAAe;AACf,mBAAa;AACb,wBAAkB;AAClB,aAAO;;AAEC,6BAAoB;ACxEhC;;;;;;;;;;;;;;;;AAmCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,SAAS,IAAI;;AAWxB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,KAAK,IAAI;;AAEpB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,MAAM,IAAI;;AAErB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,UAAU,IAAI;;AAEzB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,QAAQ,IAAI;;AAEvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,aAAa,IAAI;;AAEhB,wBAAe,IAAG,CAAE;AACpB,+BAAsB,IAAG,CAAE;AAC3B,0BAAiB,IAAG,CAAE;AACtB,4BAAmB,IAAG,CAAE;AACxB,uBAAc,IAAG,CAAE;AACnB,2BAAkB,IAAG,CAAE;ACjGnC;;;;;;;;;;;;;;;;AAsCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,MAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,yBAAuB,MAAK,OAAO,KAAI,OAAO;AAC9C,aAAO,IAAI,OAAM;;AAYrB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,KAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAUvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAYvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,kBAAkB,IAAI;;AAErB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,0BAAiB,IAAG,CAAE;AACtB,0BAAiB,IAAG,CAAE;AACtB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,oCAA2B,IAAG,CAAE;AAChC,sBAAa,IAAG,CAAE;AC5L9B;;;;;;;;;;;;;;;;AAmEA,4BAAwB,oBAAoB,iBAAiB;AACzD,UAAI,iBAAgB,GAAG,KAAK;AAC5B,oBAAa,SAAS,GAAG,KAAK;AAC9B,0BAAoB,MAAK;AACzB,UAAI;AACA,qBAAa,gBAAe,MAAM,EAAE;AACpC,wBAAgB,sBAA+B,MAAK,OAAO;;AAE/D,aAAO,SAAQ,OAAM;;AAEzB,oCAA+B;AAC3B,UAAI,EAAE,SAAS;AACX,eAAO,IAAI;;AAGf,UAAI,EAAE,SAAS,KAAK,SAAS;AACzB,eAAO,SAAS,SAAQ,GAAG,CAAC,MAAM,IAAG;;AAGzC,UAAI,EAAE,SAAS,KAAK,OAAO,SAAS,YAChC,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,MAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM,eAAe,OAAM;AAE3B,iBAAO,KAAK,MAAI,IAAI,IAAI,IAAI,QAAO,GAAG,WAAW;;AAErD,cAAM,IAAI,MAAM,qCAAqC;;AAGzD,UAAI,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;;AAE/C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM,SAAS,OAAM;AAErB,iBAAO,KAAK,MAAI,OAAO,IAAI;;AAE/B,cAAM,IAAI,MAAM,qCAAqC;;AAEzD,YAAM,IAAI,MAAM,gCAAgC;;AAExC,iBAAQ,IAAG,CAAE;AC1HzB;;;;;;;;;;;;;;;;AAsDA,6DAAwD;AACpD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,uBAAiB,IAAI;AACrB,cAAY,aAAiB,GAAG,OAAO,GAAG,QAAQ,MAAM;AACxD,kBAAY,QAAO;AACnB,4BAAsB,IAAI,KAAK;AAC/B,oBAAa,KAAI,IAAI,IAAI,KAAK;AAC9B,UAAI;AACA,gBAAY,SAAQ,MAAM,MAAM;AAChC,sBAAc,iBAAgB,OAAM,QAAQ;AAC5C,kBAAS,IAAI,SAAQ,IAAI,KAAK,IAAI,QAAQ;;AAE9C,aAAO,MAAI,IAAI;;AAEP,0BAAiB,IAAG,CAAE;ACtElC;;;;;;;;;;;;;;;;AAwCA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,aAAa;AAClE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,qBAA8B,UAAU,UAAU;AAClD,sBAAgB;AACZ,eAAO,SAAQ,UAAU,UAAU,UAAU;;AAEjD,qBAAe,CAAE,SAAS,UAAU,SAAS;AAC7C,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAW;;AAEjE,sBAAa,IAAG,CAAE;ACtCvB;AACH,UAAI,cAAc,UAAU;AACxB,cAAM,IAAI,MAAM,8EACU,cAAc;;AAE5C,UAAI,cAAc,OAAO;AACrB,cAAM,IAAI,MAAM,sEACM,cAAc;;AAExC,uBAAiB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AACnE,sBAAgB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AAClE,UAAI,YAAY,WAAW;AACvB,cAAM,IAAI,MAAM,kDACR,YAAY,sBAAsB;;AAE9C,wBAAkB,aAAa;AAC/B,UAAI,CAAE,cAAa,SAAS,KACxB,aAAa,SAAS,KAAK,cAAc;AACzC,cAAM,IAAI,MAAM,oCACT,aAAa,2BAA2B;;AAEnD,UAAI,aAAa,UAAU,cAAc;AACrC,cAAM,IAAI,MAAM;;;ACnCxB;;;;;;;;;;;;;;;;AA2DA,qFAAiF;AAC7E,6BAAuB,iBAAgB,eAAe,iBAAiB,iBAAiB;AACxF,4BAAsB,iBAAgB,cAAc,gBAAgB;AACpE,4BAAsB,iBAAgB,cAAc,gBAAgB,iBAAiB,cAAc;AACnG,sBAA8B,gBAAgB,eAAe,aAAa;AAC1E,qBAAe;QACX,eAAe;QACf,cAAc;QACd,cAAc;;AAElB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,cAAc,gBAAgB,eAAe,aAAa,gBAAgB,QAAQ,MAAiB,eAAe;;AAEzJ,0BAAiB,IAAG,CAAE;ACxElC;;;;;;;;;;;;;;;;AA0DA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,YAAY;AACjE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,eAAO,SAAQ,SAAS,IAAI;;AAEhC,qBAAe,CAAE,QAAQ,IAAI,SAAS;AACtC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;ACnE7B;;;;;;;;;;;;;;;;AAyBO;AACH,UAAI,cAAc;AACd,eAAO,EAAE,MAAM;;AAEnB,UAAI,aAAiB,EAAE,OAAO;AAC1B,eAAO;;AAEX,UAAI,EAAE,MAAM,WAAW,WAAW;AAC9B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,EAAE,MAAM,QAAQ;AAChC,cAAI,WAAW,MAAM,QAAQ,EAAE,MAAM,MAAM;AACvC,yBAAa,KAAK,EAAE,MAAM;;AAG1B,yBAAa,KAAK,WAAW;;;AAGrC,eAAO;;AAEX,aAAO;;AC5CX;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,WAAW,MAAM,gFACjB,GAAG;AAC5B,cAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,qDAAqD;AAC9F,UAAI,SAAS;AACT,eAAO,aAAa,UAAS,GAAG,UAAU;;AAE9C,0BAAoB,cAAc,IAAI;AACtC,uBAAiB,IAAI;AACrB,yBAAmB,IAAI,MAAM,MAAI,cAAc,aAAa,GAAG,GAAG,WAAW,OAAO,YAAY;AAChG,aAAO,KAAI,IAAI;;AAEP,oBAAW,IAAG,CAAE;AC7D5B;;;;;;;;;;;;;;;;AAiBO;AAEH,aAAO,KAAK,MAAM,KAAK,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,SAAS,KAAK,IAAI;;AAEhE;AACH,mBAAa,IAAI,eAAe;AAChC,wBAAkB,IAAI,aAAa;AACnC,mBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,uBAAgB,IAAM,KAAK,KAAK,IAAM,gBAAe,OAAO;AAC5D,kBAAU,KAAK,IAAI,IAAI,KAAK,IAAI;;AAEpC,aAAO,UAAS,WAAW;;AC5B/B;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,cAAO,aAAa,OAAO,GAAG,MAAM,uEACrB,aAAa;AAC5B,cAAO,aAAa,OAAO,MAAM,SAAS,MAAM,MAAM,mFAE/C,aAAa,yBAAyB,SAAS;AACtD,yBAAkB,aAAa,MAAM,MAAM,GAAG,aAAa,MAAM,SAAS,IAAI,SAAS,OAAO;AAE9F,sBAAgB,aAAa,MAAM,aAAa,MAAM,SAAS;AAC/D,cAAO,IAAI,KAAK,KAAK,SAAS,MAAM,4EAClB,qBAAqB;AACvC,8BAAwB,MAAM,aAAa;AAC3C,0BAAoB,MAAM,SAAS;AAGnC,4BAAsB,CAAC,gBAAgB,SAAS,SAAS;AACzD,yBAAkB,wBAAuB,QAAQ;AACjD,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,gBAAgB,SAAS,QAAQ,SAAS;AACvD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,mBAAU,KAAK;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,UAAU,GAAG,UAAU,YAAY;AACnC,uBAAU,KAAK;AACf;;;;AAIZ,UAAI,gBAAgB;AAChB,qBAAa;;AAEjB,UAAI,YAAY;AACZ,iBAAS;;AAGb,aAAO,QAAO,YAAW,SAAS,OAAO;;AAEjC,wBAAe;AChF3B;;;;;;;;;;;;;;;;AA4CA,mFAA8E;AAC1E,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,YAAY,IAAI,MAAM,4CAA4C,8CACtD,YAAY;AAChD,cAAY,aAAa,YAAY,IAAI,MAAM,0CAA0C,iDACnD,YAAY;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,WAAK,YAAY,iBAAiB;AAC3D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,sBAAsB;;AAEtE,iCAA6B,IAAG,CAAE;AC/EzC;;;;;;;;;;;;;;;;AA0BO;AACH,UAAI,eAAc,QAAQ,gBAAe;AACrC,eAAO;;AAEX,UAAI,gBAAe;AACf,eAAO,KAAI,IAAI,MAAK;;AAExB,YAAM,IAAI,MAAM,gDAAgD;;AAG7D;AACH,gBAAU;AACV,yBAAmB,kBAAgC,KAAK,OAAO,aAAa;AAC5E,UAAI,WAAW,SAAS;AACpB,cAAM,MAAI,KAAK;;AAEnB,aAAO,SAAQ,KAAK,KAAK;;AAEtB;AACH,UAAI,gBAAe;AACf,eAAO;iBAEF,gBAAe;AACpB,eAAO,MAAK;iBAEP,gBAAe;AACpB,eAAO,KAAI;iBAEN,gBAAe;AACpB,eAAO,OAAM;iBAER,gBAAe;AACpB,eAAO,OAAM,GAAG;;AAEpB,YAAM,IAAI,MAAM,4BAA4B;;AAGzC,wBAAmB;AACtB,2BAAqB,gBAAgB;AACrC,aAAO,CAAC,gBAAgB,gBAAe;;ACjE3C;;;;;;;;;;;;;;;;AAuFA,2BAAwB,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AACpI,oBAAa,eAAc;AAC3B,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,QAAc,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AAC3E,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,6DAC3B,IAAI;AACX,cAAY,QAAQ,SAAS,GAAG,MAAM,8DAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,6EACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,cAAY,eAAe,QAAQ,MAAM,sCAAsC;AAC/E,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK;AAChG;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,4CAAiC;AACjC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,gBAAY,mBAA4B,YAAY,MAAM,uHAEA;AAC1D,qBAAa,qBAAoB,KAAI,OAAO,cAAc,UAAS,SAAS;AAC5E,0BAAkB,qBAAqB,MAAK,cAAc,SAAQ,OAAO,SAAS;AAClF,oBAAY,CAAC,MAAM;AACnB,YAAI,UAAS;AACT,0BAAgB,sBAAqB,QAAO;AAC5C,cAAI,KAAK;;AAEb,eAAO;;AAEX,sBAAgB;AACZ,oBAAY,SAAQ,YAAY;UAC5B,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW,iBAAiB;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,qBAAe,IAAG,CAAE;AC1L3B;;;;;;;;;;;;;;;;AAqBA,iGAA4F,CAAC,GAAG;AAC5F,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,yBAAyB,KAAK,MAAM;;AAEvD,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,WAAK,iBAAiB,WAAW;AAC1D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,qCAAqC;;AAErF,gDAA4C,IAAG,CAAE;ACtCxD;;;;;;;;;;;;;;;;AAqBA,gGAA2F,CAAC,GAAG;AAC3F,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,wBAAwB,MAAM,QAAQ;;AAEzD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,WAAK,iBAAiB,WAAW,YAAY;AACtE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,oCAAoC;AAC5F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,+CAA2C,IAAG,CAAE;ACxCvD;;;;;;;;;;;;;;;;AAiFA,oCAAiC,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AAC7I,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,iBAAuB,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AACpF,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,sEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,uEAClB,QAAQ;AAC5B,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,6DAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,cAAY,gCAAyC,SAAS,YAAY,MAAM,sFACvD,0BAA0B;AACnD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,qFACN,+BAA+B;;AAEhE,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,gBAAY,mBAA4B,YAAY,MAAM,mHAElD;AACR,2CAAgC;AAChC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,qBAAa,mCAAmC,KAAI,OAAO,cAAc,UAAS,SAAS,MAAK,WAAW;AAC3G,0BAAkB,oCAAoC,MAAK,cAAc,SAAQ,OAAO,SAAS,MAAK,WAAW;AACjH,YAAI,SAAQ;AACR,0BAAgB,sBAAqB,OAAO;AAC5C,iBAAO,CAAC,MAAM,WAAW;;AAE7B,eAAO,CAAC,MAAM;;AAElB,sBAAgB;AACZ,oBAAY,SAAQ,qBAAqB;UACrC,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW,iBAAiB;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,8BAAwB,IAAG,CAAE;ACrLpC;;;;;;;;;;;;;;;;AAgDA,2BAAwB,GAAG,GAAG,aAAa,OAAO,aAAa,OAAO,MAAM,0BAAa,UAAU;AAC/F,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,OAAc,GAAG,GAAG,YAAY;AAC7C,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,cAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,GAAG,SAAS,GAAG,MAAM,MAAM,kFACnD,GAAG,YAAY,GAAG;AACtC,cAAY,aAAiB,YAAY,aAAa,MAAM,4CAA4C,oBACjG,sCAAsC,GAAG,aACzC,GAAG;AACV,cAAY,gBAAgB,aAAa,MAAM,wCAAwC,qBAChF,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,uBAAiB,GAAG,MAAM,MAAM,GAAG,IAAI,OAAO,CAAC,aAAa;AAC5D,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,UAAU,MAAM;;AAE9D;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,wCAA6B;AAI7B,6BAAqB,sBAAqB,SAAQ,IAAI,EAAE,QAAQ,GAAG;AACnE;AACA;AACA,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,MAAM;mBAEzC,CAAC,cAAc;AACpB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,cAAc,MAAK,MAAM;mBAEzC,cAAc,CAAC;AACpB,iBAAO,OAAc,MAAK,cAAc,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,OAAO;;AAG/C,iBAAO,OAAc,MAAK,cAAc,MAAM;AAC9C,iBAAO,OAAc,cAAc,MAAK,MAAM;;AAElD,YAAI,QAAQ;AACR,0BAAgB,sBAAqB,QAAO;AAC5C,iBAAO,CAAC,MAAM,MAAM;;AAGpB,iBAAO,CAAC,MAAM;;;AAGtB,sBAAgB;AACZ,kBAAU,SAAQ,iBAAiB;UAC/B,GAAG;UACH,GAAG;UACH;UACA;UACA,MAAM;UACN;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,GAAG;QACH,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,YAAY,YAAY;AAGxC,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK;AAChB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK,KAAK;AACrB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,iBAAiB,KAAK,KAAK;;;AAGnC,qBAAe,IAAG,CAAE;ACnK3B;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,MAAM;;AAErC,0BAAsB,IAAG,CAAE;ACjClC;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,KAAK;;AAEpC,uBAAmB,IAAG,CAAE;ACjC/B;;;;;;;;;;;;;;;;AAsCA,8DAAyD,kBAAkB;AACvE,kBAAY;AACZ,qBAAe;AACf,aAAO,QAAQ,eAAe,QAAO;AACjC,eAAO,KAAK,OAAM,SAAQ,OAAO;AACjC,iBAAS;;AAEb,UAAI;AACA,eAAO,QAAQ,QAAO;AAClB,yBAAgB,QAAQ,cAAe,QAAO;AAC9C,uBAAY,QAAO;YACf,OAAM,SAAQ,OAAO,cAAc;YAAS,MAAK,CAAC,SAAS;;AAE/D,iBAAO,KAAK;AACZ,mBAAS;;;AAGjB,UAAI,OAAO,WAAW;AAClB,eAAO,SAAS,IAAI,CAAC,GAAG;;AAE5B,aAAO,SAAQ,QAAO,SAAS,CAAC,OAAO,QAAQ;;AAE5C,kBAAc,IAAG,CAAE;AC5D1B;;;;;;;;;;;;;;;;AAwCA,0EAAqE;AACjE,UAAI,aAAa;AACb,oBAAY,oBAAoB;;AAEpC,2BAAqB,MAAM,SAAQ,aAAa;AAChD,6BAAuB,KAAI,cAAc,SAAS;AAClD,qBAAe;AACf,mBAAa,GAAG,IAAI,aAAa,MAAM,IAAI;AACvC,eAAO,KAAK,KAAK,OAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,eAAe;;AAEtE,aAAO,QAAO;;AAEX,iBAAa,IAAG,CAAE;ACpDzB;;;;;;;;;;;;;;;;AA4CA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,qBAAe,iBAAgB,OAAO,SAAS,iBAAiB;AAChE,sBAAgB,iBAAgB,QAAQ,UAAU,iBAAiB;AACnE,eAAS,UAAU;AACnB,2BAAqB,sBAAsB;AAC3C,uBAAiB,OAAO,MAAM;AAC9B,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,cAAY,OAAO,SAAS,KAAK,OAAO,MAAM,OAAO,GAAG,MAAM,oDAAoD,6BAC7F,OAAO;AAC5B,cAAY,QAAQ,SAAS,KAAK,QAAQ,MAAM,OAAO,UAAU,MAAM,qDAAqD,2BACvG,OAAO;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,wEAC3B,SAAS;AACvB,cAAY,SAAS,MAAM,KAAK,SAAS,MAAM,GAAG,MAAM,2CAA2C;AACnG,cAAY,WAAW,cAAc,WAAW,WAAW,MAAM,+CAA+C;AAChH,sBAAgB,cAAa,SAAQ,cAAc,QAAQ,QAAQ,SAAS,UAAU,QAAQ;AAC9F,qBAAe,CAAE,OAAO,QAAQ,OAAO,QAAQ,QAAQ;AACvD,oBAAc,CAAE,QAAQ,oBAAoB;AAC5C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,gBAAe;AAClF,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACnElC;;;;;;;;;;;;;;;;AA4BA;AACI,qBAAe,iBAAgB,QAAO,SAAS,iBAAiB;AAChE,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,kBAAY,QAAO,UAAU,gBAAe,QAAQ;AACpD,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACpClC;;;;;;;;;;;;;;;;AAqCA,4DAAuD,YAAY;AAC/D,qBAAe,iBAAgB,QAAO,SAAS,oBAAoB;AACnE,cAAY,OAAO,SAAS,GAAG,MAAM,gEACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE,SAAS,WAAW;AACpC,kBAAY,QAAO,UAAU,mBAAkB,QAAQ;AACvD,aAAO;;AAEJ,8BAAyB,IAAG,CAAE;AC9CrC;;;;;;;;;;;;;;;;AAiBA;AACI,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,UAAI,kBAAkB;AAClB,yBAAiB,OAAO;;AAE5B,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,uBAAiB,MAAM,MAAM;AAC7B,sBAAgB,KAAK,IAAI,eAAe;AACxC,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,cAAY,MAAM,SAAS,GAAG,MAAM,+CAA+C,MAAM;AACzF,cAAY,MAAM,MAAM,OAAO,GAAG,MAAM,oDAAoD,MAAM,MAAM;AACxG,cAAY,OAAO,SAAS,GAAG,MAAM;AACrC,cAAY,OAAO,MAAM,OAAO,UAAU,MAAM,sDAAsD,qBACvF,OAAO,MAAM;AAC5B,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,aAAO,CAAE,eAAe,cAAc,gBAAgB;;ACpC1D;;;;;;;;;;;;;;;;AAqBA,6EAAyE,sBAAsB,OAAO;AAClG,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,oBAAc,CAAE,eAAe,cAAc;AAC7C,aAAO,QAAO,cAAc,OAAK,EAAE,kBAAkB,QAAQ,SAAS,eAAe,cAAc,iBAAiB,CAAE,OAAO,QAAQ,QAAQ,UAAW,MAAiB,sBAAqB;;AAE3L,8BAA0B,IAAG,CAAE;AC/BtC;;;;;;;;;;;;;;;;AA6BO;AACH,qBAAc,aAAa,KAAK,SAAS;AACzC,6BAAuB,SAAQ,IAAI,CAAE,UAAQ,KAAK;AAClD,UAAI,OAAO,gBAAgB,GAAG;;AAiB3B;AACH,aAAO,cAAc,KAAK,QAAQ,cAAc;;AASpD;AACI,aAAO,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK;;AAEpC;AACI,iBAAW;AACX,kBAAY,IAAI;AAChB,mBAAa;AACb,kBAAY;AACZ,aAAO,OAAO;AACV,iBAAS,OAAS,SAAQ,SAAU;AACpC,8BAAsB,WAAW,QAAQ,IAAI;AAC7C,YAAI,gBAAgB;AAChB,iBAAO,SAAS;;AAGhB,kBAAQ;AAGR,kBAAQ,CAAC;;;AAGjB,aAAO,QAAQ,OAAO,CAAC,OAAO;;AChFlC;;;;;;;;;;;;;;;;AAsBO;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GACrF;;AAEF;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GAAsB,OAAgC,oBAA6C;;AAG1L;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,cAAc;;AAE5G,mIAA+H,4BAA4B,4BAA4B;AAGnL,yBAAmB;AACnB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK;AACZ,qBAAW,KAAK,CAAE,OAAO,OAAO,IAAI,UAAU,GAAG,oBAAoB;;;AAG7E,iBAAW,KAAK;AAGhB,qBAAc,eAAe,IAAK,OAAO,eAAgB;AACzD,8BAAwB;AACxB,6BAAuB;AACvB,aAAO,gBAAgB,SAAS,iBAAiB,WAAW,SAAS;AACjE,0BAAkB,WAAW;AAC7B,eAAQ,sBAAsB,UAAU,sBAAuB;AAC/D,YAAI,gBAAgB;AAChB;;AAQJ,8BAAsB;AACtB,qBAAa,gBAAgB,SAAS,GAAG,KAAK,oBAAoB,EAAE;AAChE,sBAAY,sBAAsB,OAAO,UAAU,gBAAgB;AACnE,cAAI,OAAO;AACP,8BAAkB;AAClB;;AAEJ,oBAAU,QACN,UAAU,QAAQ,eAAe,cAAc,QAAO;AAC1D,cAAI,UAAU,SAAS;AACnB;;;AAUR,kBAAU,qBAAqB,gBAAgB;AAC/C,YAAI,CAAC;AAGD,cAAI,UAAU,UAAU;AACpB,4BAAgB,KAAK;AACrB,2BAAe,KAAK,UAAU;qBAEzB,UAAU,QAAQ;AAGvB,yBAAa,YAAY,WAAW;;;;AAKhD,2BAAqB,gBAAgB;AACrC,yBAAmB,gBAAgB;AACnC,UAAI,sBAAsB,aAAa;AACnC,wBAAgB,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;AACnD,uBAAe,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;;AAEtD,qBAAe,CAAE,iBAAiB,UAAS,iBAAiB;AAC5D,UAAI;AACA,eAAO,oBAAoB,UAAS,gBAAgB;;AAExD,UAAI;AACA,eAAO,kBAAkB,QAAO,cAAc;;AAElD,aAAO;;AAEX;AACI,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,UAAI,SAAS,KAAK,SAAS;AACvB,eAAO;;AAEX,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,mBAAmB,kBAAkB,KACnE,KAAK,IAAI,mBAAmB,kBAAkB;AAClD,aAAO,mBAAoB,SAAQ,QAAQ;;AAM/C;AACI,qBAAe,KAAK,IAAI,SAAQ,MAAM;AACtC,aAAO,OAAO,eAAe,SAAS;;AAE1C;AAKI,aAAQ,GAAG,QAAQ,GAAG,SAChB,GAAG,UAAU,GAAG,SAAW,GAAG,WAAW,GAAG;;ACrJtD;;;;;;;;;;;;;;;;AAuCA,wFAAoF,sBAAsB,OAAO;AAC7G,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc;AACxF,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,mCAA+B;AC7DtC;;;;;;;;;;;;;;;;AAkDA,sFAAkF,sBAAsB,OAAO,kCAAkC;AAC7I,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc,CAAE,eAAe,cAAc,gBAAgB;AAC7D,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,gBAAgB,OAAO;;AAEzD,uCAAmC,IAAG,CAAE;AC/D/C;;;;;;;;;;;;;;;;AAgDA,iGAA6F,sBAAsB,OAAO,kCAAkC;AACxJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACxG,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,4CAAwC;ACvE/C;;;;;;;;;;;;;;;;AA4CA,mFAA+E,sBAAsB,OAAO,wCAAwC;AAChJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc;QACV,eAAe;QACf,cAAc;QACd,gBAAgB;QAChB;;AAEJ,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,cAAc,OAAO;;AAEvD,oCAAgC,IAAG,CAAE;AC7D5C;;;;;;;;;;;;;;;;AA0CA,8FAA0F,sBAAsB,OAAO,wCAAwC;AAC3J,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,sCAAgC,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AAI1E,kBAAY,wBAAwB,WAAW,YAAY,gBAAgB,eAAe,iBAAiB;AAC3G,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,yCAAqC;AC9D5C;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,gEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,6DAC9B;AACP,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,eAAe,aAAa,WAAW,UAAU;;AAEpE,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAuB,IAAG,CAAE;AC7DnC;;;;;;;;;;;;;;;;AAoCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,uEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,oEAC9B;AACP,cAAY,QAAQ,UAAU,aAAa,QAAQ,UAAU,SAAS,MAAM;AAC5E,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,sBAAsB,aAAa,WAAW,UAAU;;AAE3E,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,uBAAuB;AAC9F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,kCAA8B,IAAG,CAAE;AC9D1C;;;;;;;;;;;;;;;;AAoEA;AACI,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAO,GAAG,QAAQ,GAAG,MAAM,4CAA4C,GAAG;AAC1E,oBAAc,GAAG;AACjB,qBAAe,GAAG,MAAM,MAAM;AAC9B,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,0DACY;;AAEzD,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,6DACe;;AAE5D,UAAI,WAAW;AACX,mBAAW;;AAEf,UAAI,WAAW;AACX,mBAAW;;AAEf,gBAAU,SAAQ,MAAM,GAAG,GAAG,GAAG,UAAU,CAAC,IAAI;AAChD,gBAAU,MAAM,GAAG,GAAG,GAAG;AACzB,iBAAW,IAAI,GAAG;AAClB,qBAAe,WAAW,UAAU,IAAI,QAAO,CAAC,UAAU,WAAW,aAAa,IAAI,QAAO,CAAC,UAAU;AACxG,mBAAa,OAAM,CAAC,GAAG,IAAI,GAAG;AAC9B,aAAO,SAAQ,MAAM,QAAQ,SAAQ,IAAI,CAAC,IAAI,GAAG,KAC5C,IAAI,SAAO,MAAM,QAAQ,KAAK,SAAS;;AAEzC,qBAAiB,IAAG,CAAE;ACjG7B;;;;;;;;;;;;;;;;AAuDA;AACI;AACA,UAAI,MAAM,QAAQ;AACd,0BAAkB;AAClB,gBAAO,MAAM,QAAQ,GAAG,SAAS,GAAG,MAAM;AAE1C,oBAAY,GAAG,GAAG,MAAM;AACxB,qBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,kBAAO,GAAG,GAAG,MAAM,OAAO,KAAK,MAAM,iEAC7B,GAAG,GAAG,MAAM,UAAU;;;AAIlC,0BAAkB;AAClB,aAAK,OAAM,IAAI,GAAG,MAAM,IAAI,GAAG,IAAI,OAAK,QAAQ,GAAG,CAAC;;AAExD,cAAO,GAAG,UAAU,GAAG,GAAG,MAAM,IAAI,MAAM,oCAAoC,GAAG,yCACpD,GAAG,GAAG,MAAM;AACzC,iBAAW;AACX,mBAAa;AACb,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,WAAG,KAAK,QAAO,KAAK;AAChB,kBAAQ,KAAK;AACb,cAAI,IAAI;AACJ,yBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,2BAAa,KAAI,MAAI,KAAI,GAAG,IAAI,KAAK,GAAG;AACxC,kBAAI,IAAI,GAAG;;;AAGnB,iBAAO,IAAI,GAAG,KAAK,GAAG;;;AAG9B,UAAI;AACA,eAAO,MAAM,IAAI;;AAGjB,eAAO;;;AAGR,wBAAoB,IAAG,CAAE;AC9FhC;;;;;;;;;;;;;;;;AAgFA,mCAA+B;AAC3B,cAAO,EAAE,QAAQ,GAAG,MAAM,gEAAgE,EAAE;AAC5F,UAAI,EAAE,SAAS;AACX,eAAO,KAAK,GAAG;;AAOf,8BAAsB,EAAE,MAAM,MAAM,GAAG,EAAE,MAAM,SAAS,GACnD,OAAO,iBAAiB,QAAQ;AACrC,qBAAa,QAAQ,SAAQ,GAAG;UAC5B;UAAe,EAAE,MAAM,EAAE,MAAM,SAAS;UACxC,EAAE,MAAM,EAAE,MAAM,SAAS;YACzB;AACJ,qBAAa;AACb,qBAAa;AACb,aAAK,QAAQ;AACT,6BAAmB,KAAK,KAAK;AAC7B,eAAK,KAAK;AACV,eAAK,KAAK;;AAEd,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,eAAO,CAAC,GAAG;;;AAGnB,oCAAgC;AAC5B,aAAO,QAAO,KAAK;AACf,gBAAO,EAAE,MAAM,WAAW,GAAG,MAAM,0CAA0C,EAAE,MAAM;AACrF,kBAAU,EAAE,MAAM;AAClB,kBAAU,EAAE,MAAM;AAClB,gBAAQ,IAAI;AACZ,gBAAQ,MAAM;AACd,sBAAc,SAAS,CAAC,CAAC,KAAK,CAAC,GAAG;AAClC,gBAAQ,MAAM;AACd,sBAAc,KAAK,IAAI,IAAI;AAC3B,qBAAa,GAAG,IAAI,OAAO,EAAE;AAGzB,wBAAc;AACd,wBAAc;AACd,wBAAc;AACd,WAAC,GAAG,GAAG,KAAK,QAAO,KAAK;AAEpB,2BAAe,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AACxC,0BAAc,KAAK;AACnB,wBAAY,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AAEjC,sBAAU,MAAM,QAAQ,KAAK,IAAI,SAAS,CAAC,CAAC,OAAO,SAAS,CAAC,CAAC;AAC9D,uBAAW,IAAI,KAAK,KAAI,GAAG;AAC3B,yBAAa,IAAI,QAAQ;AACzB,gBAAI,KAAK,MAAM,OAAO;AAClB,kBAAI,MAAM;;AAGV,kBAAI,QAAO;gBACP;gBACA,OAAM,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,MAAM,KAAK,GAAG,KAAK,MAAM;iBACpD;;AAEP,wBAAY,IAAI,IAAI,OAAO,GAAG,KAAK;AAEnC,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AAC1C,8BAAkB,KAAI,KAAK;AAC3B,uBAAW,WAAU;AACrB,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;;AAG/C,gCAAkB,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;AAC7D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,+BAAmB,WAAU;AAC7B,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,MAAM,KAAK;AACnD,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;;AAG9C,gCAAkB,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;AAC5D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,mBAAO,CAAC,GAAG,GAAG;;AAElB,kBAAQ,CAAC,OAAO,OAAO;;AAE3B,YAAI,CAAC,gBAAgB,IAAI;AACrB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AACzB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE7B,eAAO,CAAC,GAAG;;;AAGZ,eAAW,IAAG,CAAE;AC9KvB;;;;;;;;;;;;;;;;AAiBA,IAAC;AACG,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,SAAS,KAAK;AAClC,gBAAU,UAAU,4BAA4B,KAAK;OACtD,SAAA,aAAc,UAAA,YAAY;ACA7B,iEAA2D,SAAA,UAAU;AACjE,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,2BAAsB,YAAY,OAAQ,UAAU,KAAI,SAAS;AACjE,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO;;AAEX,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO,MAAI;;AAEf,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,KAAK;;AAGZ,kCAAwB,QAAQ,OAAO,SAAS;AAChD,yBAAe,IAAI,MAAI,eAAe,MAAI;AAC1C,iBAAO,kBAAkB,IAAI,IAAI,QAAQ,QAAO,oBAC5C;;;AAGZ,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,IAAI,MAAI,eAAe,QAAO,QAAQ;;AAG7C,qCAA2B,KAAI,UAAU,OAAK,QAAQ;AACtD,8BAAoB,MAAK,MAAI,SAAS,oBAAoB,QAAO,MAAM;AACvE,iBAAO,IAAI,MAAI,eAAe;;;AAGtC,YAAM,MAAM,sBAAsB;;AAE/B,gCAA4B,IAAG,CAAE;AC1DxC;;;;;;;;;;;;;;;;AAsCA,4EAAuE,SAAA,UAAU;AAC7E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,IAAI,IAAI,SAAS;AAChC,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,+BAA2B,IAAG,CAAE;ACxBvC,8EAAyE,SAAA,UAAU;AAC/E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,sBAAe,IAAI,KAAK,MAAI,KAAI,SAAS,eAAe,MAAM;AAC9D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,2BAAuB,IAAG,CAAE;ACbnC,mEAA8D,SAAA,UAAU;AACpE,oBAAc,iBAAgB,QAAQ,UAAU;AAChD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AAEnB,gBAAU,IAAI,KAAI,QAAO,IAAI,UAAU;AACvC,sBAAe,MAAK,IAAI,KAAK,KAAI,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AA4CA,8DAA0D,gBAAiB,SAAA,UAAU;AACjF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,0BAAoB,QAAO;AAC3B,oBAAc,IAAI,IAAI,cAAc;AACpC,wBAAkB,QAAQ,OAAO;AACjC,qBAAe,IAAI,OAAO;AAC1B,sBAAe,MAAI,KAAI,QAAO,MAAM,OAAO,aAAa,KAAI,aAAa;AACzE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;AC3D9B;;;;;;;;;;;;;;;;AA2CA,+DAA0D,mBAAkB,SAAA,UAAU;AAClF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,4BAAsB,QAAO;AAC7B,kBAAW,IAAI,KAAI,SAAS,KAAI,MAAI,cAAc;AAClD,kBAAW,KAAI,IAAI,KAAK,UAAU,KAAI,MAAI,IAAI,KAAK,eAAe;AAClE,sBAAe,IAAI,KAAI;AACvB,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,oBAAgB,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA,0EAAqE,SAAA,UAAU;AAC3E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,kBAAkB,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,6BAAyB,IAAG,CAAE;AChDrC;;;;;;;;;;;;;;;;AA8BA;AACI,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,yBAAkB,QAAQ,OAAO,QAAQ,OAAO;AAqBhD,wBAAkB,MAAK;AACvB,4BAAsB,KAAI,SAAS;AACnC,4BAAsB,MAAM,IAAI,IAAI,IAAI;AACxC,aAAO,MAAI,IAAI,WAAW,gBAAgB;;AAuB9C,sFAAkF,gBAAe,SAAA,UAAU;AACvG,8BAAwB,iBAAgB,kBAAkB,oBAAoB;AAC9E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,kBAAkB,OAAO,QAAQ,OAAO;AAC1D,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,qBAAa,QAAO;AACpB,4BACI,MAAI,KAAI,mBAAmB,IAAI,KAAK,wBAAwB,KAAI,MAAM;;AAE9E,sBAAe,+BAA+B,mBAAmB;AACjE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AClGxC;;;;;;;;;;;;;;;;AA0DA,kEAA8D;AAC1D,UAAI,QAAQ;AACR,cAAM,OAAO,OAAO;;AAExB,UAAI,QAAQ,OAAO,OAAO;AACtB,cAAM,MAAM,mGAC+B,OAAO,oBAC/B;;AAGvB,uBAAiB,WAAW;AAIxB,yBAAiB;AACjB,oBAAY,UAAU,SAAQ,CAAC,MAAM;AACrC,0BAAkB,IAAI,MAAK,SAAQ,YAAY;AAC/C,aAAK,CAAC,SAAQ;AACd,2BAAmB,IAAI,KAAI,WAAW;AACtC,sBAAc,MAAI,YAAY,CAAC;AAC/B,yBAAiB;AACb,wCAA4B;AAC5B,0BAAgB,sBAAqB,GAAG,OAAO,CAAC;AAChD,iBAAO;YACH,KAAI,SAAQ,IAAI,UAAU,IAAI,MAAK,SAAQ,YAAY,IAAI;YAC3D,KAAI,SAAQ,IAAI,UAAU,IAAI,IAAI,aAAY,MAAK,SAAQ;;;AAGnE,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS,QAAQ;;AAqB5B,kFAA8E,gBAAe,SAAA,UAAU;AACnG,0BAAoB,iBAAgB,cAAc,gBAAgB;AAClE,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,cAAc,OAAO,QAAQ,OAAO;AACtD,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,2BAAmB,QAAO,cAAc,MAAM;AAC9C,wBACI,MAAI,KAAI,eAAe,IAAI,KAAK,wBAAwB,IAAI,sBAAsB;;AAE1F,sBAAe,+BAA+B,eAAe;AAC7D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AC/HxC;;;;;;;;;;;;;;;;AAoNK,qBAAY;MACb;MACA;MACA;MACA;;AAOC,mBAAU;MACX;MACA;MACA;MACA;;AAcC,mBAAS;MACV;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAMC,mBAAU;MACX;MACA;MACA;;AAYC,mBAAU;MACX;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AC1RJ;;;;;;;;;;;;;;;;4BAqB+B;MAe3B,yBAAyB;AACrB,eAAQ,OAAO,iBAAU,KAAK,iBAAiB,GAAG;AAClD,YAAI,WAAW;AACX,4BAAkB,QAAQ,IAAI,OAAM,EAAE,MAAM,EAAE,MAAM,QAAQ,OAAM,EAAE;AACpE,eAAK,eAAe;;AAGpB,eAAK,eAAe;;AAGxB,gBAAQ;AACR,YAAI;AACA,iBAAO;;AAGP,gBAAM;AACN,iBAAO;;;UAMX;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO,KAAK;;MAEhB;AACI,aAAK,cAAc,KAAK,aAAa;;MAezC;AACI,eAAO,cAAc,GAAG;;MAK5B;AACI,YAAI,KAAK,eAAe;AACpB,kBAAQ,KAAK;;;YAGf;AACF,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO;UACH,MAAM;UAEN,QAAQ,QAAO,KAAK,aAAa;;;YAGnC;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM,4DACT,KAAK;;YASV;AACF,aAAK,cAAe,OAAM,aAAa,GAAG,OAAO,QAAQ;AACzD,eAAO,aAAa,MAAM;;;AAGlC,WAAO,eAAe,WAAW,OAAO,aAAa;MACjD,OAAO;AACH,eAAO,SAAS,YAAY,QAAQ,SAAS,oBAAoB,QAC7D,SAAS,kBAAkB;;;AC3HvC;;;;;;;;;;;;;;;;oCA2BuC;MACnC,0CAAyC;AACrC;AACA,aAAK,eAAe;AACpB,aAAK,MAAM;AACX,aAAK,UAAU;AACf,aAAK,mBAAmB;AACxB,aAAK,qBAAqB;AAC1B,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,iBAAiB,MAAM;AAC5B,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,oCAA0B,KAAK,mBAAmB,GAAG;AACrD,eAAK;AACD,uCAA2B,MAAI,KAAI,iBAAiB,KAAK,MAAM,KAAI,OAAO,WAAW,IAAI,KAAK;AAC9F,4BAAgB,KAAI,IAAI,KAAK,MAAI,mBAAmB,KAAK,WAAW,KAAK,MAAI,iBAAiB,KAAK,YAAY;AAC/G,yCAA6B,MAAI,KAAI,mBAAmB,KAAK,MAAM,KAAI,OAAO,UAAU,IAAI,KAAK;AACjG,4BAAgB,OAAO;AACvB,8BAAkB,OAAO;AACzB,6BAAiB,MAAI,KAAI,SAAS,CAAC,KAAK,eAAe;AACvD,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;AACzC,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,kBAAkB,GAAG,KAAK;AACrD,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,mBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,KAAO,KAAK;UACZ,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,QAAQ,QAAO;;;AAIrE,sBAAkB,YAAY;AAC9B,kBAAc;ACvHd;;;;;;;;;;;;;;;;mCA2BsC;MAClC,oDAAoD;AAChD;AACA,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,mBAAmB;;MAE5B;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,iBAAiB,MAAM;AAC5B,8BAAkB;AAClB,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,MAAK,MAAM,OAAO,KAAK,yBACvC,SAAS;;;AAGtB,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,eAAK;AACD,uCAA2B,MAAI,iBAAiB,OAAO;AACvD,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,UAAU,KAAK,MAAI,oBAAoB,QAAO,QAAQ,cAAc,CAAC,KAAK,eAAe;AACtH,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,oBAAoB;AACzB,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;;;YAG3C;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,iBAAiB,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAE5G;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,mBAAmB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEvG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,yBAA2B,KAAK;;;aAIjC;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO;;;AAItD,qBAAiB,YAAY;AAC7B,kBAAc;AC3Fd;;;;;;;;;;;;;;;;gCA6BmC;MAC/B,mDAAkD;AAC9C;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,0BAA0B;AAC/B,aAAK;AAED,eAAK,WAAW,QAAO,OAAO;AAC9B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,mCAAyB,IAAI,GAAG,KAAK;AACrC,mBAAS,QAAQ;AACb,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,gBAAI,KAAK,wBAAwB,MAAM;AACnC,mBAAK,wBAAwB,KAAK;gBAC9B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,iCAAqB,KAAK,wBAAwB,GAAG;AACrD,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,oCAAwB,MAAI,KAAI,cAAc,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC1F,6CAAiC,IAAI,gBAAgB;AACrD,8CAAkC,IAAI,iBAAiB;AACvD,wBAAY,OAAO;AACnB,yBAAa,OAAO;AACpB,6BAAiB,MAAI,KAAI,IAAI,0BAA0B,MAAI,KAAK,4BAA4B,KAAK,WAAW,CAAC,KAAK,eAAe;AACjI,kBAAM,OAAO;;AAEjB,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;AAC7C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,SAAS;AACd,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,2BAA2B;AAChC,kBAAQ,KAAK,wBAAwB,IAAI,OAAK,EAAE;;;YAGlD;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,aAAK;AACD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;AACxD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;;AAE5D,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,0BACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO;;;AAIxF,kBAAc,YAAY;AAC1B,kBAAc;AC7Id;;;;;;;;;;;;;;;;kCA4BqC;MACjC,mDAAkD,cAAc;AAC5D;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,yBAAyB;AAC9B,aAAK,6BAA6B;AAClC,aAAK;AACD,eAAK,YAAY,QAAO,GAAG;AAC3B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,qBAAW,IAAI,CAAC,KAAK,cAAc,MAAI,KAAI,KAAK,WAAW,KAAK,QAAQ;AACxE,wBAAc,QAAQ;AAClB,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,gBAAI,KAAK,2BAA2B,MAAM;AACtC,mBAAK,2BAA2B,KAAK;gBACjC,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,oCAAwB,KAAK,2BAA2B,GAAG;AAC3D,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,wBAAY,KAAI,iBAAiB,KAAK;AACtC,wBAAY,IAAI;AAChB,uCAA2B,QAAQ,KAAK;AACxC,wBAAY,OAAO;AACnB,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,IAAI,mBAAmB,IAAI,gBAAgB,MAAI,oBAAoB,KAAK,YAAY;AACjH,kBAAM,OAAO;;AAEjB,eAAK,UAAU,OAAO,MAAI,KAAK,WAAW;AAC1C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,UAAU;AACf,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,8BAA8B;AACnC,kBAAQ,KAAK,2BAA2B,IAAI,OAAK,EAAE;;;YAGrD;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM;;MAEpB;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;UAChB,OAAS,KAAK;;;aAIf;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO,YAAY,QAAO;;;AAI3G,oBAAgB,YAAY;AAC5B,kBAAc;AC1Hd;;;;;;;;;;;;;;;;+BAwBkC;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,gBAAgB;;MAEzB;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,iBAAS,QAAQ;AACb,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wBAAc,QAAO,oBAAoB;AACzC,eAAK;AACD,6BAAiB,MAAI,KAAI,KAAK,GAAG,WAAW;AAC5C,kBAAM,OAAO;;;AAGrB,aAAK;;MAKT;AACI,aAAK,eAAe;AACpB,YAAI,KAAK,KAAK;AACV,eAAK,EAAE;;AAEX,aAAK,IAAI,KAAK,QAAO,CAAC;;MAE1B;AACI,aAAK,EAAE;;YAEL;AACF,eAAO,CAAC,MAAM,KAAK;;YAEjB;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,YAAI,aAAa,WAAW;AACxB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,CAAE,cAAgB,KAAK;;aAG3B;AACH,eAAO,IAAI,IAAI,QAAO;;;AAI9B,iBAAa,YAAY;AACzB,kBAAc;ACjFd;;;;;;;;;;;;;;;;oCAyBuC;MACnC,kDAAkD;AAC9C,cAAM;AACN,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,aAAK,IAAI,QAAO,KAAK;;MAEzB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,cAAc,MAAM;AACzB,8BAAkB;AAClB,iBAAK,cAAc,KAAK;cACpB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,+BAAqB,KAAK,cAAc,GAAG;AAC3C,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,eAAK;AACD;AACA,oCAAwB,MAAI,KAAI,KAAK,GAAG,eAAe;AACvD,gBAAI,KAAK;AACL,yBAAW,MAAI,KAAI,KAAK,GAAG,MAAI,UAAU,KAAI,iBAAiB,KAAK,MAAM;;AAGzE,yBAAW,MAAI,KAAI,KAAK,GAAG,kBAAkB;;AAEjD,yBAAa,OAAO;AACpB,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,aAAK,EAAE;AACP,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK,cAAc,IAAI,OAAK,EAAE;;;MAQ9C;AACI,aAAK,WAAW;;YAEd;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,cAAc,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEzG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,gBAAgB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEpG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,UAAY,KAAK;UACjB,aAAe,KAAK;;;aAIrB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,aAAa,QAAO;;;AAI1E,sBAAkB,YAAY;AAC9B,kBAAc;AC1Gd;;;;;;;;;;;;;;;;mCA4BsC;MAClC,kCAAkC,gBAAgB,cAAe,iBAAiB;AAC9E;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,qBAAqB;AAC1B,aAAK,uBAAuB;AAC5B,aAAK,WAAW;AAChB,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;AAElC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,uBAAuB,MAAM;AAClC,iBAAK,uBAAuB,KAAK;cAC7B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,qBAAqB,MAAM,QAAQ,KAAK;AAC7C,iBAAK,qBAAqB,KAAK;cAC3B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wCAA8B,KAAK,uBAAuB,GAAG;AAC7D,qCAA2B,KAAK,mBAAmB,GAAG;AACtD,eAAK;AACD,6CAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,gBAAI,KAAK;AACL,0CAA4B,KAAK,qBAAqB,GAAG;AAEzD,6CAA+B,MAAI,KAAI,qBAAqB,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChG,uCAAyB,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,IAAI,0BAA0B,MAAI,OAAO,yBAAyB,KAAK;AAC3I,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW;AAC1E,oCAAsB,OAAO;AAC7B,kCAAoB,OAAO;AAC3B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;AAIb,gDAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,MAAI,2BAA0B,KAAK;AACxJ,oCAAsB,OAAO;AAC7B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;;;AAIzB,aAAK;;MAET;AACI,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,wBAAwB,QAAQ,KAAK;AAC1C,kBAAQ,KAAK,qBAAqB,IAAI,OAAK,EAAE;;AAEjD,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,KAAK;;AAE3B,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,KAAK,WAAW,aAAa,SAAS,IAAI,aAAa,SAAS;AACtF,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,YAAI,KAAK;AACL,eAAK,uBACD,aAAa,MAAM,gBAAgB,GAAG,gBAAgB,GACjD,IAAI,OAAM;YACX,cAAc,EAAE;YAChB,UAAU,EAAE,OAAO,SAAS;;;;MAI5C;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,UAAY,KAAK;UACjB,SAAW,KAAK;UAChB,UAAY,KAAK;;;aAIlB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,aAAa,QAAO,YAAY,QAAO;;;AAI9G,qBAAiB,YAAY;AAC7B,kBAAc;ACrKd;;;;;;;;;;;;;;;;;aA6DW;AACH,eAAO,IAAI,aAAa;;aAiBrB,+CAA+C;AAClD,eAAO,IAAI,kBAAkB,cAAc,UAAU;;aAsBlD,8BAA8B,gBAAe,cAAe,iBAAiB;AAChF,eAAO,IAAI,iBAAiB,cAAc,OAAO,UAAU,UAAS;;aAcjE,oBAAoB,cAAe,aAAa,kBAAiB;AACpE,eAAO,IAAI,cAAc,cAAc,OAAO,OAAO;;aAclD,wBAAwB,YAAY,iBAAe;AACtD,eAAO,IAAI,kBAAkB,cAAc,KAAK;;aAe7C,sBAAsB,cAAe,aAAa,kBAAiB,cAAc;AACpF,eAAO,IAAI,gBAAgB,cAAc,OAAO,OAAO,UAAS;;aAkB7D,gDAAgD;AACnD,eAAO,IAAI,iBAAiB,cAAc;;;ACxKlD;;;;;;;;;;;;;;;;AA0BA;MAAC;MAAmB;MAAc;MAAmB;MACjD;MAAkB;MAAiB;;AAC3B,kBAAS;MACjB,KAAK,sBAAsB;MAC3B,UAAU,sBAAsB;MAChC,UAAU,sBAAsB;MAChC,SAAS,sBAAsB;MAC/B,SAAS,sBAAsB;MAC/B,QAAQ,sBAAsB;MAC9B,MAAM,sBAAsB;;ACnChC;;;;;;;;;;;;;;;;AAgBA,0BAAuB;AACnB,UAAI,OAAO,0BAA0B;AACjC,eAAO;iBAEF,OAAO,iBAAiB;AAC7B,eAAO;;AAEX,aAAO,OAAO;;AAYlB;AACI,aAAO,IAAI,QAAQ,aAAW,cAAc,MAAM;;ACpCtD;;;;;;;;;;;;;;;;AAiBO;AACH,sBAAgB,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC3E,sBAAgB,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAC5E,aAAO,CAAC,SAAS;;ACpBrB;;;;;;;;;;;;;;;;AAwBO,wEAAkE;AACrE,qBAAe;AACf,UAAI;AACA,mBAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,iBAAS,KAAK,WAAW,KAAK;AAC9B,mBAAW,SAAS,OAAO,WAAW,MAAM;;AAG5C,mBAAW,SAAS,OAAO,WAAW;AACtC,8BAAsB,WAAW;AACjC,qBAAa,GAAG,IAAI,eAAe,EAAE;AACjC,qBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAEvE,mBAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAEhE,aAAO;;AAWJ,uEAAkE;AACrE,uBAAiB;AACjB,UAAI;AACA,iBAAS,KAAK;AACd,qBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACjD,cAAI,KAAK,IAAI;AACT,qBAAS,KAAK;AACd,qBAAS,KAAK,IAAK,kBAAiB;;AAGpC,qBAAS,KAAK;;;;AAKtB,oCAA4B;AAC5B,mCAA2B;AAC3B,qBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,cAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AACzC,+BAAmB,KAAK;;AAGxB,gCAAoB,KAAK;;;AAGjC,iBAAS,KAAK,GAAG;AACjB,iBAAS,KAAK;AACd,iBAAS,KAAK,GAAG;;AAErB,aAAO;;AAWJ,gFAA0E;AAC7E,+BAAyB;AACzB,UAAI;AACA,yBAAiB,KAAK,WAAW,KAAK;;AAGtC,yBAAiB,KAAK,WAAW,KAAK;;AAE1C,mBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,YAAI,KAAK,WAAW;AAChB,cAAI;AACA,6BAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAGrD,6BAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAIzD,2BAAiB,KAAK,WAAW;;;AAGzC,aAAO;;AAMJ;AACH,+BAAyB,CAAC;AAC1B,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,yBAAiB,KAAK,MAAM,GAAG;;AAEnC,aAAO;;AAaJ;AACH,wBAAkB,eAAe,MAAM,GAAG;AAC1C,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,kBAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAElE,aAAO;;AC7IX;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,wBAAmB;ACjB1B;;;;;;;;;;;;;;;;AAgBO,mBAAc;AACd,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;ACrBtB;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,KAAK,GAAG;;;AAGjB;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,IAAI,GAAG;;;ACxBvB;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,MAAK,WAAW,MAAK;AACrB,cAAM,IAAI,MAAM,gEACT,MAAK,iBAAiB,MAAK;;AAEtC,qBAAe,IAAI,aAAa,MAAK,SAAS;AAC9C,mBAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACpC,eAAO,KAAK,MAAK,IAAI;AACrB,eAAO,IAAI,KAAK,MAAK,IAAI;;AAE7B,aAAO;;AAgBJ;AACH,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,IAAI,KAAK,SAAQ;AACtB,cAAK,IAAI,KAAK,SAAQ,IAAI;;AAE9B,aAAO,CAAE,aAAM;;AAMZ;AACH,kBAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,aAAM;;AAMZ;AACH,kBAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,aAAM;;AAOZ;AACH,oBAAa,SAAQ,SAAQ;AAC7B,oBAAa,SAAQ,SAAQ,IAAI;AACjC,aAAO,CAAE,aAAM;;AAQZ;AACH,YAAK,SAAQ,KAAK;AAClB,YAAK,SAAQ,IAAI,KAAK;;AAKnB;AACH,oBAAa,IAAI,aAAa,IAAI;AAClC,oBAAa,IAAI,aAAa,IAAI;AAClC,mBAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AAClC,kBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,cAAK,KAAK,KAAK,IAAI;AACnB,cAAK,KAAK,KAAK,IAAI;;AAEvB,aAAO,CAAE,aAAM;;AAKZ;AACH,gBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,oBAAa,KAAK,IAAI;AACtB,oBAAa,KAAK,IAAI;AACtB,aAAO,CAAE,aAAM;;ACrInB;;;;;;;;;;;;;;;;AA4CO;AACH,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,EAAE;;AAEb,4BAAoB,OAAM,EAAE;AAC5B,uBAAe,MAAK,GAAG;AACvB,uBAAe,SAAQ,QAAQ,QAAQ;AACvC,oBAAY;AACZ,eAAO;AACP,eAAO;;AAEX,UAAI,CAAC,iBAAgB,EAAE,OAAO;AAG1B,eAAO,QAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAE1D,UAAI,EAAE,UAAU;AACZ,sBAAa,SAAQ,KAAK;AAC1B,uBAAe,MAAK,OAAM;AAC1B,cAAK;AACL,eAAO;;AAEX,UAAI,UAAU;AACV,eAAO,SAAQ,IAAI;iBAEd,UAAU;AACf,qBAAa,QAAO,GAAG,EAAE;AACzB,uBAAe,SAAQ,SAAS,GAAG;AACnC,aAAK;AACL,eAAO;;AAGP,cAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAGhE;AACH,aAAO,QAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAEnD;AACH,oBAAc,QAAO,SAAU,OAAM;AACrC,qBAAe,qBAAoB,KAAK;AACxC,aAAO,KAAK;AACZ,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,eAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,aAAO,UAAS,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1F5B;;;;;;;;;;;;;;;;AAqBO;AACH,oBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,mBAAa,EAAE,MAAM;AACrB,aAAO,WAAW,IAAI;AAClB,0BAAkB,CAAC,GAAG;AACtB,kBAAU,QAAQ;AAClB,uBAAe,OAAM,GAAG,OAAO;AAC/B,cAAM,SAAS;AACf,eAAO;;;AC7Bf;;;;;;;;;;;;;;;;AAqBO;AACH,uBAAiB,IAAI,MAAM,KAAK;AAChC,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,KAAK,MAAM,KAAK,KAAK;;AAEvC,qBAAe,QAAO,UAAU,KAAK;AACrC,mBAAa,GAAG,IAAI,OAAO,OAAO,QAAQ,EAAE;AACxC,uBAAe,OAAO,WAAW;AACjC,4BAAoB,IAAI,MAAM,KAAK;AACnC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK,MAAM;;AAE5C,8BAAsB,KAAK,WAAW;AACtC,eAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,aAAO,OAAO;;ACpClB;;;;;;;;;;;;;;;;AAmBO;AAEH,sBAAgB,OAAO,OAAO,SAAS;AACvC,4BAAsB,CAAC,EAAE,SAAS,SAAS;AAC3C,0BAAoB,wBAAuB,QAAQ,QAAQ;AAC3D,6BAAuB,wBAAuB,SAAS,QAAQ;AAC/D,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,EAAE,SAAS,QAAQ,SAAS;AACzC,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,0BAAkB,IAAI;AACtB,yBAAiB,YAAY,SAAS,WAAW,YAAY;AAC7D,4BAAoB,eAAe,SAAS,WAAW,YAAY;AACnE,qBAAa,GAAG,IAAI,GAAG;AACnB,mBAAS,KAAK,UAAU,GAAG;AAC3B,sBAAY,KAAK,UAAU,GAAG;;;AAKtC,0BAAoB,OAAO;AAC3B,kBAAY,YAAY,SAAS,KAAK;AACtC,aAAO;QACH,QAAO,aAAa,aAAa;QACjC,QAAO,gBAAgB,aAAa;;;AC/C5C;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,GAAG,YAAY;;;ACzB3D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,OAAO,MAAK,GAAG;AACzB,sBAAU,KAAK,IAAI,QAAO,IAAI;AAC9B,mBAAO,IAAI,IAAI,IAAI;;;;;ACjCnC;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,IAAI,OAAO,MAAK,GAAG,aAAa;AAC/C,mBAAO,IAAI,IAAI;;;;;AC9B/B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC1C7B;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,qBAAa;AACb,cAAM,QAAQ;AACV,eAAK,KAAK,MAAM,GAAG;;AAEvB,eAAO;;;ACzBf;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAuBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAK,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC5BrE;;;;;;;;;;;;;;;;AAuBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,MAAI,QAAO,IAAI,OAAO,MAAK,GAAG;AAC7C,mBAAO,IAAI,IAAI;;;;;AC/B/B;;;;;;;;;;;;;;;;AAyBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,KAAI,IAAI,IAAI,GAAG;AACzB,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,IAAI,KAAI,IAAI,IAAI,GAAG;AAC7B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACjD7B;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,OAAO,MAAK,GAAG,aAAa;;;AC1BlE;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC3BhE;;;;;;;;;;;;;;;;AAoDA,6EAAwE,CAAC,GAAG,GAAG;AAC3E,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,iBAAW;AACX,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;;AAG3E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS;;AAEpD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS,WAAW,WAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;ACvFtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC1BpF;;;;;;;;;;;;;;;;AAuCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAAoC,IAAI;AAC7G,oBAAc;AACd,iBAAW;AACX,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,kBACI,SAAQ,QAAQ,CAAC,GAAG,OAAO,MAAM,IAAI,OAAO,MAAM,IAAI,OAAO,MAAM;AACvE,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAElE,cAAY,KAAK,SAAS,GAAG,MAAM,4DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,+DAC/B,QAAQ;AACf,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,GAAmB;AACpG,eAAO,SAAQ,gBAAgB,MAAM,SAAS;;AAElD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS;AACrC,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;AACzE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAwB,IAAG,CAAE;ACpEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,YAAY,SAAS;;;;ACzBjE;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,cAAe;AACnC,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;;mBAG5B,CAAC,cAAc;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;mBAG5B,cAAc,CAAC;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;;;AAIlC,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;YAC7B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;;;;AC7C7C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,SAAU;AAC9B,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,UAAU;AACN,iCAAyB;AACzB,2BAAmB,iBAAiB;AACpC,4BAAoB,iBAAiB;AACrC,qBAAa,MAAM,KAAK;AACxB,qBAAa,WAAW,SAAS,GAAG,KAAK,GAAG;AACxC,cAAI,WAAW,OAAO,YAAY;AAC9B,iBAAK,KAAK;qBAEL,WAAW,OAAO;AACvB,kBAAM,IAAI,MAAM,mBAAmB,uCAAuC;;;AAGlF,qBAAa;AACb,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,cAAI,KAAK,KAAK;AACV,iBAAK,KAAK;;;AAGlB,eAAO,CAAE,GAAG,MAAM,MAAI,IAAI,MAAM;;;ACvCxC;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACpB7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAEN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACtBpC;;;;;;;;;;;;;;;;AAsBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,cAAc,gBAAiB;AACvC,eAAO;UACH,GAAG,MAAM,MAAM,WAAW,aAAa,GAAG,eAAe,UAAU,GAAG,gBAAgB,IAAI,WAAU;;;;AC7BhH;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,uBAAe,MAAM,IAAI,OAAK,EAAE;AAChC,eAAQ,QAAS;AACjB,sBAAc,gBAAe,MAAM,MAAM,GAAG,OAAO;AACnD,2BAAmB,OAAO,IAAI,OAAK,EAAE;AACrC,2BAAmB,OAAM,IAAI,YAAY;AACzC,eAAO,WAAW,IAAI,OAAK,MAAM;;;AC5BzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,+BAAuB;AACvB,eAAQ,WAAW,SAAS,WAAK,cAAe;AAChD,gBAAY,mBAA4B,YAAY,MAAM,iHACA;AAC1D,eAAO;UACH,GAAG,MAAM,qBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS,MAAK;UACnE,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS,MAAK;;;;AC/BrF;;;;;;;;;;;;;;;;AAmBO,0CAAsC;MACzC,YAAY;MACZ,cAAc,CAAC,MAAM;MACrB,UAAU;AACN,6BAAqB;AACrB,eAAQ,SAAS,WAAK,YAAY,mBAAoB;AACtD,eAAO;UACH,IAAI,MAAM,QAAO,KAAK,QAAQ,SAAS,MAAK,YAAY,GAAmB;UAC3E,QAAQ,MAAM,qBAAqB,KAAK,IAAI,OAAO,OAAO,SAAS,MAAK,YAAY;;;;AC3BhG;;;;;;;;;;;;;;;;AAsCA;AACI,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAErE,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE3E,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,cAAY,IAAI,MAAM,OAAO,YAAY,IAAI,MAAM,4CAA4C,IAAI,MAAM,yCACrE,YAAY;AAChD,cAAY,KAAK,MAAM,OAAO,YAAY,IAAI,MAAM,0CAA0C,KAAK,MAAM,2CACnE,YAAY;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,WAAK;AAC9B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,wBAAwB;;AAExE,iCAA6B,IAAG,CAAE;AClEzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,aAAQ;AACpC,gBAAY,mBAAkB,YAAY,MAAM,iHACM;AACtD,+BAAuB;AACvB,eAAO;UACH,GAAG,MAAM,oBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS;UAC9D,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS;;;;AC/BhF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI,MAAK,GAAG,cAAc;;;AC1B5D;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAoBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,MAAM,WAAW,qBAAY;AACrC,eAAO;UACH,GAAG;AACC,gCAAoB,oBAAmB,CAAC,OAAO,EAAE;AACjD,sBAAU,QAAO,IAAI,MAAM,WAAW,CAAC;AACvC,gBAAI,eAAe;AACf,oBAAM,WAAU,KAAK;;AAEzB,mBAAO;;;;;ACjCvB;;;;;;;;;;;;;;;;AAqBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,WAAK,mBAAoB;AACrD,2BAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAChD,gBAAY,mBAA4B,aAAa,MAAM,mHAEnD;AACR,4BAAoB;AACpB,gBAAY,EAAE,SAAS,GAAG,MAAM,kFACJ,EAAE;AAC9B,gBAAY,OAAO,SAAS,GAAG,MAAM,mFACT,OAAO;AACnC,gBAAY,EAAE,MAAM,OAAO,OAAO,MAAM,IAAI,MAAM,mEACjC,EAAE,MAAM,qDACR,OAAO,MAAM;AAC9B,gBAAY,gCAAyC,SAAS,aAAa,MAAM,6FACxC,0BACjC;AACR,YAAI,mBAAmB;AACnB,kBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,eAAO;UACH,GAAG,MAAM,mCAAmC,EAAE,OAAO,IAAI,QAAQ,SAAS,MAAK,WAAW;UAC1F,QAAQ,MAAM,oCAAoC,GAAG,IAAI,OAAO,OAAO,SAAS,MAAK,WAAW;;;;AC/C5G;;;;;;;;;;;;;;;;AAkBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,4BAAoB;AACpB,4BAAoB,CAAE,GAAG,QAAQ;AACjC,6BAAqB,CAAE,GAAG,QAAQ;AAClC,eAAO;UACH,GAAG,MAAM,QAAO,UAAU,yBAAyB,aAAa;UAChE,QAAQ,MAAM,QAAO,UAAU,0BAA0B,cAAc;;;;AC3BnF;;;;;;;;;;;;;;;;AAyBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,mCAA2B;AACvB,iBAAO,SAAQ,OAAO,IAAI;;AAE9B,uBAAe,CAAE,IAAI;AACrB,eAAO;UACH,GAAG,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAiB;;;;AC5BvF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,kBAAU,KAAI,IAAI,IAAI,OAAO,MAAM,IAAI,KAAK,KAAK,KAAK;AACtD,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;AC3BlC;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACvBlC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAyBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AA0BO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC,KAAK,QAAQ,YAAY;MACxC,UAAU;AACN,eAAQ,mBAAoB;AAC5B,8CAAmC;AACnC,2BAAmB,UAAS,OAAO,QAAO,KAAK;AAC/C,8BAAsB,kBAAiB,MAAK,OAAO,EAAE;AACrD,0BAAkB;AAClB,YAAI,MAAK,SAAS;AACd,uBAAa,GAAG,IAAI,EAAE,MAAM,SAAS,GAAG,EAAE;AACtC,sBAAU,KAAK,EAAE,MAAM;;AAE3B,oBAAU,KAAK;;AAEnB,2BAAmB,IAAI,GAAG;AAC1B,kCAA0B,KAAI,IAAI;AAClC,oCAA4B,MAAM,MAAI,WAAU,QAAO;AACvD,+BAAuB,KAAI,KAAI,KAAI,qBAAqB,sBAAsB,sBAAsB,QAAO;AAC3G,qBAAa;AACT,cAAI,MAAK,SAAS;AACd,mBAAO,SAAQ,KAAI,KAAI,IAAI,MAAK,SAAQ,qBAAqB,CAAC,GAAG,GAAG,GAAG,MAAK,MAAM,MAAM,aAAa,aAAa,EAAE;;AAGpH,mBAAO,SAAQ,KAAI,KAAI,IAAI,sBAAsB,aAAa,EAAE;;;AAGxE,wBAAgB;AACZ,wBAAc,KAAI,KAAI,qBAAqB,QAAO,MAAM;AACxD,cAAI,MAAK,SAAS;AACd,sBAAU,MAAI,SAAS;;AAE3B,iBAAO,SAAQ,SAAS,MAAK;;AAEjC,4BAAoB;AAChB,4BAAkB,KAAI,KAAI,gBAAgB,aAAa;AACvD,cAAI,MAAK,SAAS;AACd,0BAAc,MAAI,aAAa;;AAEnC,iBAAO,SAAQ,aAAa,MAAK;;AAErC,yBAAiB;AACb,wCAA8B,KAAI,YAAY;AAC9C,yBAAe,KAAI,IAAI;AACvB,cAAI,MAAK,SAAS;AACd,uBAAW,MAAI,UAAU;;AAE7B,iBAAO,SAAQ,UAAU,MAAK;;AAElC,0BAAkB;AACd,0BAAgB;AAChB,cAAI,MAAK,SAAS;AACd,wBAAY,MAAI,WAAW;;AAE/B,iBAAO,SAAQ,WAAW,MAAK;;AAEnC,eAAO;UACH,GAAG;UACH,MAAM;UACN,UAAU;UACV,OAAO;UACP,QAAQ;;;;ACvFpB;;;;;;;;;;;;;;;;AAsBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,6BAAqB;AACrB,eAAQ,QAAS;AACjB,2BAAmB,gBAAe,MAAM,EAAE,OAAO;AACjD,qBAAa;AACT,8BAAoB,EAAE;AACtB,8BAAoB,QAAQ;AAC5B,6BAAmB,YAAY,MAAM,GAAG;AACxC,4BAAkB,WAAW;AAC7B,6BAAmB,YAAY,MAAM,MAAM,YAAY,QAAQ,MAAM;AACrE,4BAAkB,WAAW;AAC7B,mCAAyB,WAAW,GAAG;AACvC,mCAAyB,WAAW,YAAY,GAAG,YAAY,IAAI;AACnE,8BAAoB,YAAY,CAAC,YAAY,CAAC,cAAc;AAC5D,yBAAe,SAAQ,IAAI;AAC3B,kCAAwB,SAAQ,SAAS,CAAC;AAC1C,gCAAsB,YAAY,CAAC,CAAC,YAAY,kBAAkB;AAClE,kCAAwB,WAAU,QAAQ;AAC1C,2BAAiB,mBAAmB,iBAAiB,iBAAiB,EAAE,MAAM;AAC9E,sCAA4B,wBAAuB;AACnD,uBAAa,WAAU,YAAY;AACnC,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,SAAS,MAAM;;;AAGzC;AACI,qBAAe;AACf,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,eAAO,KAAK;;AAEhB,aAAO;;AAEX;AACI,qBAAe;AACf,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,iBAAO,KAAK,OAAO,GAAG;;;AAG9B,aAAO;;ACjEX;;;;;;;;;;;;;;;;AAkBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI,GAAG,MAAM,WAAU;;;ACvB3D;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,MAAK,IAAI;;;ACrBnC;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,GAAG;;;ACxBzC;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAK,GAAG;;;ACxB1C;;;;;;;;;;;;;;;;AAqBO,iCAA6B;MAChC,YAAY;MACZ,cAAc;MACd,eAAe,CAAC;MAChB,UAAU;AACN,wBAAgB;AAChB,eAAQ,QAAS;AACjB,eAAO;UACH,QAAQ;AACJ,6BAAiB;AACjB,6BAAgB,IAAI;AACpB,mBAAO,IAAI,IAAI,KAAI,MAAI,IAAI,MAAM,WAAW;;;;;AChC5D;;;;;;;;;;;;;;;;AAmBA,yEAAqE,UAAU,WAAW,UAAU;AAChG,sBAAgB,cAAW,SAAQ,QAAQ,IAAI,GAAG,GAAG,aAAa,MAAM,OAAO;AAC/E,qBAAe,CAAE,GAAG,GAAG;AACvB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAExE,+CAA2C,IAAG,CAAE;ACzBvD;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,aAAa,MAAM,OAAO,QAAS;AAC3C,eAAO;UACH,GAAG,MAAM,mCAAmC,GAAG,GAAG,IAAI,aAAa,MAAM,OAAO;;;;AC1B5F;;;;;;;;;;;;;;;;AAwBO;AACH,UAAI,EAAE,OAAO,MAAM;AACf,YAAI,SAAQ,GAAG,sBAA+B,EAAE,OAAO;;AAE3D,UAAI,GAAG,OAAO,MAAM;AAChB,aAAK,SAAQ,IAAI,sBAA+B,GAAG,OAAO;;AAE9D,aAAO;QACH,GAAG;AACC,qBAAW,KAAI,IAAI,MAAK,MAAM,OAAO,IAAI,GAAG;AAC5C,iBAAO;;;;AClCnB;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,oBAAqB;AAC7B,kBAAU,MAAM;AAChB,kBAAU,MAAM;AAChB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AChC/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,aAAa,GAAG,IAAI;AACpD,qBAAa,MAAM,KAAI,IAAI,MAAK,KAAK,GAAG,IAAI;AAC5C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAsDA,qFAAgF,CAAC,GAAG,GAAG;AACnF,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,iBAAW;AACX,oBAAc;AACd,qBAAe;AACf,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;AAEvE,mBAAW,SAAQ,SAAS;UACxB;UAAG,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;;;AAG/E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,SAAS,SAAS,GAAG,MAAM,kEAChC,SAAS;AAChB,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS,UAAU;;AAE9D,qBAAe,CAAE,IAAI,MAAM,OAAO,SAAS,QAAQ;AACnD,oBAAc,CAAE,YAAY,SAAS,WAAW,WAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;AChGtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC3BvF;;;;;;;;;;;;;;;;AA2CA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAC7D,IAAI;AACZ,cAAY,IAAI,SAAS,GAAG,MAAM,4DAC3B,IAAI;AACX,cAAY,OAAO,SAAS,GAAG,MAAM,+DAC9B,OAAO;AACd,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,OAAO,OAAO,YAAY,SAAS,GAAmB,MAAK;AACxG,eAAO,SAAQ,gBAAgB,KAAK,QAAQ,SAAS;;AAEzD,qBAAe,CAAE,IAAI,KAAK,OAAO,QAAQ,QAAQ;AACjD,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;;AAEjE,4BAAwB,IAAG,CAAE;ACjEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,GAAG,YAAY,SAAS;;;;AC1BpE;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,QAAS;AACjB,uBAAe;AACf,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AC/B/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,UAAU,GAAG,IAAI;AACjD,qBAAa,MAAM,KAAI,IAAI,MAAK,QAAQ,GAAG,IAAI;AAC/C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AAwBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,IAAI,aAAa,EAAE;;AAE1C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,IAAI,MAAM,IAAI,GAAG;AACrC,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC7C7B;;;;;;;;;;;;;;;;AAsBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5C7B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,IAAI;;;ACrB9B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,wBAAgB,MAAM;AACtB,eAAO,CAAE,SAAS,MAAM,OAAM,QAAQ,OAAO;;;ACvBrD;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AA6BO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,eAAe,CAAC;MAChB,UAAU;AACN,0BAAkB;AAClB,sBAAa;AACb,qBAAY;AACZ,yBAAiB,4BAA0C,MAAK,OAAO,KAAI;AAC3E,wBAAgB;AACZ,2BAAiB,MAAK,MAAK;AAC3B,oBAAU,KAAI,IAAI,KAAI,UAAU,IAAI,OAAM,IAAI,UAAU,QAAO;AAC/D,6BAAmB,kBAAgC,MAAK,OAAO;AAC/D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,MAAK;;AAE7B,uBAAe;AACX,4BAAkB,QAAQ,OAAM;AAChC,0BAAgB,MAAM,WAAW,KAAI,QAAO,WAAU;AACtD,oBAAU,KAAI,IAAI,KAAI,GAAG;AACzB,6BAAmB,kBAAgC,KAAI,OAAO;AAC9D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,KAAI;;AAE5B,eAAO,CAAE,GAAG,SAAS,GAAG;;;ACzDhC;;;;;;;;;;;;;;;;AAwBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,2BAAmB;AACnB,qBAAa,QAAQ,GAAG;AACxB,eAAO;UACH,GAAG,MAAM,MAAM,MAAM,IAAI,KAAI,IAAI;UACjC,OAAO;AACH,sBAAU,MAAM,MAAM,WAAU,KAAK,KAAI,IAAI;AAC7C,+BAAmB,kBAAiB,MAAM,OAAO,GAAG;AACpD,gBAAI,WAAW,SAAS;AACpB,oBAAM,MAAI,KAAK;;AAEnB,mBAAO,SAAQ,KAAK,MAAM;;;;;ACtC1C;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,OAAO;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,qBAAa,KAAI,UAAU,GAAG,IAAI,MAAK;AACvC,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAM;;;AC3B7C;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,IAAI;;;ACzBhD;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI,EAAE;;;ACvBxC;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,uBAAuB,IAAI,QAAQ;;AAEtD,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,oBAAoB;AAClH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAkBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,8BAA8B,IAAI,QAAQ;;AAE7D,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,2BAA2B;AACzH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAmBO,8BAA0B;MAC7B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,GAAG;AACrC,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,KAAI,IAAI,GAAG,MAAM;;;AC1BvD;;;;;;;;;;;;;;;;AAqBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,4BAAoB;AACpB,eAAO;UAGH,WAAW,MAAM,MAAK,WAAU,YAAY;UAC5C,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,GAAG;UACpC,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,YAAY,GAAG;;;;AC/B5D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,yBAAa,QAAQ,GAAG,QAAO;AAC/B,gCAAmB,QAAO;AAC1B,2BAAc,QAAO;AACrB,uCAA2B,KAAI,IAAI;AACnC,qCAAyB,KAAI,KAAI,IAAI,cAAa,IAAI,MAAK,GAAG;AAC9D,mBAAO,MAAM,MAAM,oBAAoB;;;;;ACpCvD;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,GAAG,IAAI,QAAO,IAAI;;;ACzBxD;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,GAAG,aAAa;;;ACzBvD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAO,QAAS;AACxB,2BAAmB,EAAE;AACrB,gCAAwB,kBAAiB,GAAG,OAAO;AAMnD,yBAAiB;AACjB,qBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,mBAAS,KAAK,CAAC,OAAO,IAAI,WAAW,KAAK,OAAO,KAAK,MAAM;;AAEhE,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACpClC;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAQ;AAChB,yBAAiB;AACjB,yBAAiB,KAAI,IAAI;AACzB,eAAO;UACH,QAAQ,MAAM,IAAI,UAAU,KAAI,MAAI,UAAU,CAAC,MAAM,WAAW;;;;AC7B5E;;;;;;;;;;;;;;;;AAmBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,SAAQ;;;ACxB1C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,YAAa;AACjC,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,eAAO,CAAE,GAAG,MAAM,QAAO,IAAI;;;ACtBrC;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAI,KAAK,MAAK,GAAG,aAAa;;;AC1BhE;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,MAAK,GAAG,YAAY;;;ACxB1D;;;;;;;;;;;;;;;;AAoBO,wCAAoC;MACvC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,oBAAY,QAAO;AACnB,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,IAAI,MAAM,EAAE;;AAE/B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC3C7B;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,gCAAwB,EAAE,MAAM;AAChC,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,EAAE;AACpC,aAAK,QAAQ;AACT,0BAAgB,SAAQ;;AAE5B,2BAAmB,SAAQ,IAAI;AAC/B,qBAAa,KAAI,YAAY,OAAK,EAAE,OAAO;AAC3C,eAAO,CAAE,GAAG,MAAM;;;AClC1B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,OAAO,IAAI;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,QAAO,IAAI,OAAO,KAAK;;;AC1BzD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,QAAS;AACjB,qBAAa;AACT,sBAAY,WAAU;AAGtB,cAAI,EAAE,SAAS;AACX,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,sBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM;;qBAGvD,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,wBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK;kBAC3D,EAAE,MAAM;kBAAI,EAAE,MAAM;;;;qBAK3B,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,0BACI,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;qBAKnH,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,+BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,4BACI,MAAI,OAAO,OAAM,IAAI;sBACjB,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAC5C,IAAI,EAAE,MAAM;uBACb,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;;;AAOpE,kBAAM,IAAI,MAAM,2DACT,EAAE;;AAEb,iBAAO;;AAEX,eAAO,CAAE,GAAG;;;AC3EpB;;;;;;;;;;;;;;;;AAmBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,+BAAuB;AACvB,eAAQ,QAAS;AACjB,yBAAiB,wBAAiC;AAClD,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI;;;ACzBxC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,4BAAoB;AACpB,eAAQ,QAAS;AACjB,eAAO,CAAE,OAAO,MAAM,MAAM,IAAI;;;ACvBxC;;;;;;;;;;;;;;;;AA0BO,yCAAqC;MACxC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,6BAAqB;AACrB,qBAAa;AACT,iBAAO,oBAAoB,IAAI;;AAEnC,eAAO,CAAE,GAAG;;;AAGpB;AAII,iCAA2B,QAAQ,SAAS,WAAU;AACtD,uBAAiB,OAAO,GAAG;AAC3B,uBAAiB,aAAa,SAAS,QAAO,GAAG;AACjD,uBAAiB,SAAS,OAAO,WAAW;AAC5C,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,qBAAa,WAAW,YAAY,IAAI;;AAE5C,mBAAa,WAAW,YAAY,OAAK,SAAS,OAAO;AACzD,wBAAkB,WAAU;AAC5B,aAAO,MAAM,YAAY,UAAU;;AClDvC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAiHA,wBAAoB;MAChB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,iCAA6B;AACzB,uBAAiB;;ACvNrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,cAAO,KAAK,SAAS,GAAG,MAAM;AAC9B,aAAO,SAAQ,MAAM;;ACzBzB;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;AC5BtB;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM;;AC3BhC;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS;;AC5BzC;;;;;;;;;;;;;;;;AA2BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO;;AC7BhD;;;;;;;;;;;;;;;;AA6BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ;;AC/BxD;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,OAAM,WAAU,QAAQ,QAAO;;ACpB1D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACpB7B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM,MAAK;;ACrBlC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,UAAI,aAAa;AACb,YAAI,CAAC;;AAET,aAAO,QAAO,CAAC,MAAM,GAAG,IAAI;;ACvBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,QAAQ,QAAQ,MAAK,YAAY,UAAU;;ACpBnE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,QAAQ,aAAa,SAAS,MAAK;;ACpBpE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpBrE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,uBAAgB;AAChB,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACzB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM,QAAQ,SAAS,MAAK,WAAW;;ACpB7D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACrB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI;;ACpBf;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACxB7B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,SAAS;;ACpBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM;;ACxBpC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,aAAa,MAAM;;ACpB9B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,WAAS;;ACrBpB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,QAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM;;ACxBjC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,6BAA6B;AAC1C,WAAK;AACL,aAAO,2BAA2B,MAAM,aAAa,MAAM,OAAO;;ACpBtE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,MAAM;;ACpBjC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,GAAG,YAAY;;ACpBvC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,UAAU;;ACpBrC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,KAAK,MAAM;;ACrBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM;;ACxBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS,0BAA2B,cAAc;AAC/D,WAAK;AACL,aAAO,QAAO,MAAM,OAAO,SAAS;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,UAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,UAAU;;ACpB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,aAAa,aAAa,SAAS,cAAc;;ACpBvE;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM;;ACpBjB;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,SAAQ,MAAM,EAAE;;AC3B3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,gBAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,wBAAwB;AACrC,WAAK;AACL,aAAO,sBAAsB,MAAM,YAAY;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,iBAAiB,iBAAiB,SAAS,MAAK,UAAU;;ACpB3F;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ;;ACrBnB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,OAAO;;ACrB9B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACrBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,iBAAiB;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO;;ACrBlB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,oBAAoB;AACjC,WAAK;AACL,aAAO,kBAAkB,MAAM;;ACpBnC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,0BAA0B;AACvC,WAAK;AACL,aAAO,wBAAwB,MAAM;;ACxBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,iCAA2B,aAAa,UAAS,CAAC,MAAM,KAAK,CAAC,MAAM,GAAG;AACvE,aAAO,MAAM,oBAAoB;;ACrBrC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;ACrBlG;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,OAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,GAAG;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM,YAAY;;ACpBhD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,WAAW,MAAM;;ACpBlC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU;;ACrBrB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;AAUA;AAIO;AACH,UAAI,YAAY;AACZ,mBAAW,WAAU;;AAEzB,aAAO;;AAMJ;AACH,iBAAW;;AAKR;AACH,aAAO;;AC/BX;;;;;;;;;iCAqBoC;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;+BAMjB;MAC9B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,aAAa;;;6BAMjB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;sCAMN;MACrC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,oBAAoB;;;iCAMpB;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;6BAMnB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;AC3E/C;;;;;;;;;AAkBO;AACH,UAAI,MAAM,QAAQ;AAEd,uBAAe;AACf,qBAAa,GAAG,IAAI,WAAW;AAC3B,qBAAW,SAAS,OAAO;;AAE/B,eAAO;;AAGP,yBAAiB,IAAI,MAAM;AAC3B,iBAAS,KAAK;AACd,eAAO;;;AAGR;AACH,UAAI,CAAC;AACD,cAAM,IAAI,eAAe;;;AAM1B;AACH,oBAAc;AACd,yBAAmB;AACf,YAAI,SAAS;AACT;;;AAGR,aAAO;;AAOJ;AACH,UAAI,GAAG,WAAW;AACd,eAAO,GAAG;;AAEd,aAAO;;AAWJ;AACH,UAAI,MAAM,QAAQ;AACd,eAAO;;AAEX,aAAO,CAAC;;AAML;AACH,yBAAmB,OAAO;AAC1B,mBAAa;AACb,wBAAkB;AACd,YAAI,IAAI,MAAM;AACV,gBAAM,IAAI,WAAW,UAAU;;AAEnC,YAAI,WAAW;AACX,mBAAS,SAAS;;AAEtB,iBAAS,GAAG,SAAS,KAAK,IAAI,IAAI;;AAEtC,aAAO;;AAMJ;AACH,2BAAqB,KAAK,QAAQ,wBAAwB;AAC1D,uBAAiB,aAAa,QAAQ,mBAAmB,SAAS;AAKlE,UAAI,SAAS,OAAO;AAChB,eAAO;;AAEX,aAAO,YAAY;;AAEhB;AAEH,UAAI,WAAW,UAAU;AACrB,eAAO;;AAGX,UAAI,WAAW,QAAQ,SAAS;AAC5B,eAAO;;AAEX,aAAO,WAAW,QAAQ,eAAe,WAAW,GAAG;;AAG3D,iCAA6B;AACtB;AACH,UAAI,aAAa,QAAQ,aAAa;AAClC,eAAO;;AAEX,mBAAa;AACb,WAAK,eAAe,SAAS;AAC7B,WAAK,YAAY,SAAS;AAC1B,aAAO;;AAaX;AACI,UAAI,WAAU,QAAQ,OAAO,YAAW;AACpC;iBAEK,MAAM,QAAQ;AACnB,gBAAO,QAAQ,gBAAc,8BAA8B;;AAG3D,uBAAe,OAAO,KAAK;AAC3B,4BAAoB;AAChB,wBAAc,QAAO;AACrB,cAAI,SAAS,QAAQ,OAAO,UAAU;AAClC,gBAAI,CAAC,MAAM,QAAQ,UAAU,MAAM,YAAY,aAC3C,OAAO,MAAM,aAAa;AAC1B,sBAAO,SAAS,MAAM;;AAGtB,4CAA8B;;;;;;AAmB3C,gEAA4D,oBAAoB,0BAA0B,2BAA2B;AAExI,UAAI,OAAO,eAAe;AACtB,6BAAqB;AACrB;AACA,YAAI,gBAAgB;AAChB,eAAK,cAAc;mBAEd,gBAAgB;AACrB,eAAK,uBAAuB;;AAG5B,eAAK,cAAc;AACnB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;;AAM7B,eAAO;;AAIP,wBAAe;AACf,YAAI,QAAO,gBAAgB,QAAQ,QAAO,aAAa;AACnD,gBAAM,IAAI,WAAW,GAAG,gDACjB,KAAK,UAAU;;;AAG1B,0BAAkB,QAAO;AACzB;AACA,YAAI,aAAa;AACb,WAAC,KAAK,cAAc,cAAc;mBAE7B,aAAa;AAClB,WAAC,KAAK,cAAc,uBAAuB;mBAEtC,aAAa;AAClB,WAAC,KAAK,cAAc,cAAc;;AAEtC,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;AAKzB,YAAI,cAAc;AAMd,wCAA8B;AAC9B,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,uBAAuB;;AAExD,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,cAAc;;AAG/C,+BAAqB,QAAO;AAC5B,uBAAa,mBAAmB;AAChC,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAEhD,wCAA8B,QAAO;AACrC,4BAAkB,WAAW,KAAK,QAAO,WAAW,eAAe;AACnE,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;AAMP,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAKhD,4BAAkB,IAAI,IAAI,QAAO;AACjC,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;;;AASZ;AACH,aAAQ,IAAI,IAAK,KAAO,IAAI,IAAK,IAAI;;AAOlC;AACH,aAAO,KAAK,cAAc,GAAG;;AAO1B;AACH,cAAQ;aACC;AACD,iBAAO;;AAEP,gBAAM,IAAI,WAAW,kBAAkB;;;AAS5C;AACH,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO,OAAO;;AAElB,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAOJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,kBAAY;AAEZ,sBAAgB;AACZ,YAAI,IAAI,QAAQ,OAAO;AACnB,cAAI,KAAK;;;AAGjB,aAAO;;AAQJ;AACH,UAAI,OAAO;AACP,cAAM,IAAI,WAAW,yBAAyB,KAAK,UAAU;;AAEjE,wBAAkB;AACd,YAAI,IAAI,eAAe;AACnB,iBAAO;;;AAGf,aAAO;;AASJ;AACH,UAAI,SAAS;AACT;;AAEJ,UAAI,OAAO,QAAQ,SAAS;AACxB,cAAM,IAAI,WAAW,GAAG,wBAAwB,4BAA4B;;;AAkB7E,kEAA8D,eAAe;AAChF,eAAO,aAAa;AACpB,eAAO,aAAa;AACpB,aAAQ,MAAM,QAAQ,MAAM,EAAE,UAAU,aAAa,EAAE,UAAU,aAC7D,EAAE,MAAM,OAAK,OAAO,MAAM;;AAU3B;AACH,UAAI,MAAM,QAAQ;AACd,gBAAY,MAAM,SAAS,GAAG,MAAM,GAAG;AACvC,cAAM,QAAQ,UAAU,sBAAsB,GAAG,WAAW,IAAI,QAAQ;;AAGxE,gBAAY,OAAO,UAAU,UAAU,QAAQ,GAAG,MAAM,YAAY,0CAC7D,uBAAuB;;;AAc/B;AACH,UAAI,UAAU;AACV,eAAO;iBAEF,MAAM,QAAQ;AACnB,eAAO,MAAM,MAAM,IAAI,OAAK,uBAAuB,IAAI,KAAK,OAAO;iBAE9D,OAAO,UAAU;AACtB,eAAO,IAAI;;AAGX,eAAO,GAAG;;;AAYX;AACH,qBAAe;AACf;AACA,iBAAW;AACP,sBAAY;AACZ,YAAI,QAAM,WAAW;AACjB,iBAAO;;AAEX,mBAAW;AACX,qBAAa,EAAE,GAAG;AAClB,eAAO;;AAEX,aAAO;;AAQJ;AACH,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,aAAO;;AAiBJ;AACH,eAAO,cAAc,SAAS,GAAG;AACjC,2BAAqB;AACjB,iBAAO,MAAM,QAAQ,SAAS;AAC9B,iBAAO,OAAO,SAAS,GAAG;;AAE9B,aAAO,cAAc,OAAO;AACxB,YAAI,SAAS,WAAW;AACpB,iBAAO,OAAO,IAAI,WAAS,CAAC;;AAEhC,eAAO,OACF,IAAI;AACL,iBAAO,SAAS,IAAI,eAAe,CAAC,GAAG,WAAW;WAEjD,OAAO;AACR,iBAAO,iBAAiB,OAAO;WAChC;SACJ;;AClgBP;;;;;;;;;AAiBA;AACI,aAAO,KAAK,MAAM,KAAS,MAAQ,KAAQ,GAAG,IAAI,MAAM;;6BAW5B;MAC5B;AACI,eAAO;;;0BAGc;MACzB;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,YAAgB,OAAO,GAAG,KAAK;AAC/C,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO,CAAE,UAAU,KAAK,UAAU,MAAM,KAAK;;;AAIrD,YAAQ,YAAY;AACpB,kBAA4B;2BACE;MAC1B;AACI;AACA,aAAK,cAAc;AACnB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK,MAAM,IAAQ,GAAG,MAAQ,WAAW,YAAY,GAAG,KAAK;;MAExE;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,aAAS,YAAY;AACrB,kBAA4B;yBACA;MACxB;AACI,eAAO,MAAS;;;AAIxB,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,MAAQ,KAAQ,KAAK,MAAM,YAAgB,OAAO,KAAK,UAAU,KAAK,YAAY,KAAQ,IAAM,KAAK,MAAM;AAC3H,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO;UACH,UAAU,KAAK;UACf,UAAU,KAAK;UACf,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,sDAAkD;MACrD,SAAW;MACX,YAAc;MACd,QAAU;MACV,UAAY;;AAET;AACH,aAAO,qBAAqB;;AAEzB,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,4CAC5B,0CAA0C,cAC1C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC/IrC;;;;;;;;;AAwBO;AACH,aAAO,IAAI,QAAQ;;AAOhB;AACH,aAAO,IAAI,SAAS;;AAOjB;AACH,aAAO,IAAI;;AAGR;AACH,aAAO,IAAI,WAAW;;;;;;;;;AC7C1B;;;;;;;;;AASO,qCAAiC,CAAC,iBAAiB;AACnD,sCAAkC,CAAC,SAAS,QAAQ;AACpD,mCAA+B,CAAC,OAAO;AACvC,4CAAwC,CAAC,OAAO,OAAO,UAAU;AACjE,sCAAkC,CAAC;ACb1C;;;;;;;;;AAiBA,oBAAgB,IAAI;AACb;AACH,gCAA0B,0BAA0B,cAAc;;AAE/D;AACH,gCAA0B,2BAA2B,eAAe;;AAEjE;AACH,gCAA0B,wBAAwB,YAAY;;AAElE,4BAAwB;AACxB,8BAA0B;AAInB;AACH,sBAAgB,KAAK;AACrB;AACI,oBAAY;AACZ,wBAAgB;AAChB,eAAO;;AAGP,wBAAgB;AAChB,cAAM;;;AAMd;AACI,UAAI,gBAAgB,WAAW;AAC3B,eAAO;;AAGP,eAAO,gBAAgB,KAAK,qBAAqB;;;AAQlD;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,aAAO,2BAA2B;;AAY/B;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,UAAI,CAAC,QAAQ,IAAI;AACb,gBAAQ,IAAI,YAAY;;AAE5B,qBAAc,QAAQ,IAAI;AAC1B,cAAQ,IAAI,YAAY,QAAQ,IAAI,cAAc;AAClD,UAAI,SAAQ;AACR,uBAAe,GAAG,cAAc;AAGhC,gBAAQ,IAAI,QAAQ;AACpB,eAAO;;AAGP,eAAO;;;AAGf,4BAAwB,IAAI,OAAO;AAM5B;AACH,aAAO,CAAC,CAAC,KAAK,MAAM;;ACvGxB;;;;;;;;;AAyBO;AACH,aAAO,MAAM,SAAS,EAAE,YAAY;;AASjC;AACH,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,OAAO;AACP,cAAM,OAAM;;AAEhB,kBAAW;AACX,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,iBAAQ,OAAM;;AAElB,aAAO;;AAOX;AACI,eAAQ,MAAM,QAAQ,UAAS,IAAI,aAAa,UAAS;AACzD,aAAO,UAAS;;AAOb;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAI,UAAS,OAAM;;AAOvB;AACH,uBAAiB,IAAQ,UAAU,SAAQ,QAAO,OAAK;AACvD,wBAAkB,MAAQ,KAAQ,UAAU,WAAW,WAAW;AAClE,aAAO,YAAY,OAAM;;AAOtB;AACH,0BAAoB,OAAM,QAAQ,KAAK,UAAU,IAAI;AACrD,qBAAe,KAAK,MAAO,aAAY,SAAS,KAAK;AACrD,sBAAgB,KAAK,KAAM,aAAY,SAAS,KAAK;AACrD,UAAI,WAAW;AACX,eAAO,YAAY;;AAEvB,aAAQ,aAAY,UAAU,YAAY,YAAY;;AASnD;AACH,UAAI,MAAM;AACN,cAAM,IAAI,WAAW,QAAQ,iBAAiB;;AAElD,kBAAY;AACZ,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,YAAI,KAAK;;AAEb,aAAO;;AChIX;;;;;;;;;AAqBA,oBAAc;AACP;AACH,iBAAe;AACf,kBAAU;;AAEP;AACH,aAAO;;AASJ;AACH,aAAO;;AAOJ;AACH,oBAAc,EAAE;AAChB,UAAI,MAAM,SAAS;AACf,eAAO,MAAM,OAAO,UAAU,IAAI;;AAIlC,eAAO;;;AASR;AACH,aAAO,EAAE,OAAO;;AAQb,oCAA8B;AACjC,uBAAiB,EAAE,MAAM;AACzB,UAAI,OAAO;AACP,eAAO,SAAS,SAAS,OAAO;;AAEpC,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,EAAE,QAAQ;;AAad;AACH,aAAO,KAAK;AACR,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,yDACT,EAAE,MAAM;;AAExB,kBAAU,aAAW,GAAG;AACxB,eAAO,OAAK,GAAG,CAAC,GAAG,GAAG;;;AAQvB;AACH,uBAAiB,CAAC,UAAqB,EAAE;AACzC,aAAO,EAAE,QAAQ;;AAUd;AACH,UAAI,EAAE,QAAQ;AACV,cAAM,IAAI,WAAW,wDAAwD,EAAE;;AAEnF,uBAAiB,CAAC,EAAE,MAAM,IAAI,UAAqB,EAAE,OAAO;AAC5D,aAAO,EAAE,QAAQ;;AAUd;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,IAAI,CAAC,MAAM,OAAM,MAAM;eACxD;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;eAC3E;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM;eAC9F;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,IAAI;cACzC;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;;eAErE;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,GAAG,IAAI;cAC5C;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAClE,OAAM,MAAM;;;AAGhB,kBAAM,IAAI,WAAW,8DACd,OAAM;;;;AAYtB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI;eACtD;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;eACzE;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;;AAE7F,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAatB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC3E;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;mBAC9F;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC9F;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;;AAGf,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAUtB,yCAAqC;AACxC;AACA,UAAI,OAAO;AACP,eAAO,QAAQ,GAAG;AAClB,YAAI,SAAS;AACT,iBAAO;;AAGP,iBAAO;;;AAGf,UAAI,SAAS,QAAQ,GAAG;AAGpB,eAAO;;AAGX,aAAO,QAAW,SAAS;;AASxB;AACH,cAAQ,EAAE;aACD;AACD,iBAAO,SAAa,CAAC,GAAG;aACvB;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;;AAE5B,gBAAM,IAAI,WAAW,+DACD,EAAE;;;AAU3B;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,YAAI,CAAC;;AAET,UAAI,EAAE,SAAS,EAAE;AACb,cAAM,IAAI,WAAW,0BAA0B,EAAE,+DACL,EAAE;;AAElD,aAAO,MAAS,GAAG;;AAahB,2CAAoC,YAAc;AACrD,aAAO,aAAiB,OAAO,OAAM,QAAQ,OAAO;;AAmBjD;AACH,UAAK,EAAE,OAAO,KAAO,EAAE,OAAO;AAC1B,cAAM,IAAI,oBAAoB,8DACJ,EAAE,uBAAuB,EAAE;;AAEzD,UAAI,EAAE,QAAQ;AACV,yBAAiB,EAAE,MAAM,MAAM,IAAI;AACnC,+BAAuB,EAAE,MAAM,MAAM,IAAI;AACzC,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB,gGAC8B,EAAE,wBAC5C,EAAE;;;AAI5B,UAAK,EAAE,SAAS,KAAO,EAAE,SAAS;AAC9B,2BAAmB;AACnB,2BAAmB;AAInB,eAAO,SAAiB;UACpB;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D;;;AAKJ,2BAAmB,EAAE,MAAM;AAC3B,yBAAiB,WAAW;AAC5B,YAAI,EAAE,QAAQ,CAAC,IAAI;AAGnB,uBAAe,EAAE,MAAM;AACvB,yBAAiB,OAAO;AACxB,+BAAuB,OAAO;AAC9B,2BAAmB,CAAC,GAAG,QAAQ;AAG/B,qBAAa,MAAM,KAAK,CAAE,QAAQ,EAAE,OAAQ;AACxC,cAAI,MAAM;AACN,mBAAO,EAAE,OAAO;qBAEX,KAAK,EAAE,OAAO;AACnB,mBAAO,IAAI;;AAEf,iBAAO;;AAEX,YAAI,EAAE,UAAU,MAAM,QAAQ,CAAC,gBAAgB;AAE/C,4BAAoB,CAAC,GAAG,YAAY,GAAG;AACvC,2BAAmB;AACnB,2BAAmB;AACnB,eAAO,SACK;UACR;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D;WAEC,QAAQ;;;AAad;AAEH,aAAO,KAAK;AACR,2BAAmB,WAAc;AACjC,0BAAkB,UAAa;AAC/B,eAAO,MAAM,MAAU,GAAG,aAAa,YAAY,MAAM,QAAY,GAAG,WAAc,KAAK,WAAW,KAAQ,IAAI;;;AAWnH;AACH,aAAO,KAAK;AACR,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM;;AAGpB,kBAAU,QAAQ;AAClB,eAAO,QAAW,SAAS,YAAY;;;AAWxC;AACH,aAAO,KAAK;AACR,YAAI,MAAM,QAAQ;AACd,oBAAU,UAAS,SAAS;;AAG5B,oBAAU,QAAQ;;AAEtB,eAAO,OAAW,WAAW,SAAS;;;AAQvC;AACH,aAAO,KAAQ,GAAG;;AAcf;AACH,aAAO,KAAK;AACR,YAAI,OAAQ,MAAO;AACf,cAAI,QAAO,KAAK,MAAM,IAAI;;AAE9B,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,oBAAoB,oBAAoB,EAAE;;AAExD,eAAO,IAAQ,GAAG;;;AAM1B;AACI,wBAAkB,KAAK;AACvB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,WAAW,+BAA+B,KAAK,gCACzB;;AAEpC,UAAI,UAAU;AACV,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG,GAAG;;AAG5C,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG3E,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,GAAG,UAAU;;AAG3C,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG;;AAGzC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG7D,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,UAAU;;AAGxC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI;;AAGtC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU;;mBAG/C,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,UAAU;;AAGrC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,QAAQ;AACb,eAAO;;AAEX,YAAM,IAAI,WAAW,sCAAsC,KAAK;;AAW7D;AACH,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,eAAO,EAAE,IAAI,YAAY,EAAE,MAAM,MAAM;;;AASxC,8BAAwB;AAE3B,UAAI,UAAU;AACV,cAAM,IAAI,oBAAoB,0CAA0C;;AAG5E,aAAO,KAAQ;;AAUZ;AACH,aAAO,KAAK,MAAM,IAAQ,GAAG,IAAQ,GAAG,IAAI;;AAYzC;AACH,aAAO,KAAK,MAAM,QAAY,GAAG,OAAO,YAAY;;AAWjD;AACH,aAAO,KAAK;AACR,kBAAU,MAAQ,KAAI,KAAQ,KAAI;AAClC,eAAO,YAAgB,GAAG,GAAG;;;AAgB9B,6CAAyC;AAC5C,aAAO,WAAW,MAAM;;AChoB5B;;;;;;;;;AASO,kCAA8B,CAAC,SAAS,UAAU;AAClD,sCAAkC,CAAC,UAAU,WAAW;AASxD,kCAA8B;MACjC;MAAS;MAAQ;MAAY;MAAgB;MAC7C;MAAmB;MAAmB;MAAc;;ACrBxD;;;;;;;;;AAgBO;AACH,gCAA0B,uBAAuB,WAAW;;AAEzD;AACH,gCAA0B,2BAA2B,gBAAgB;;8BAQxC;MAC7B;AACI,eAAO;;MAEX;AACI,eAAO;;;wBAGY;MACvB;AACI,eAAO,OAAM,OAAO;;;AAI5B,UAAM,YAAY;AAClB,kBAA4B;uBACF;MACtB;AACI,eAAO,OAAK,OAAO;;;AAI3B,SAAK,YAAY;AACjB,kBAA4B;2BACE;MAC1B;AACI;AACA,YAAI,OAAO,SAAS;AAChB,gBAAM,IAAI,WAAW,oDAAoD;;AAE7E,YAAI,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,sCAAsC;;AAE/D,aAAK,QAAQ,KAAK;;MAEtB;AACI,eAAO,KAAK,MAAM,KAAI,QAAO,KAAK,QAAQ,OAAK,OAAO;;MAE1D;AACI,eAAO;UACH,OAAO,KAAK;;;;AAKxB,aAAS,YAAY;AACrB,kBAA4B;gCACO;MAC/B;AACI;AACA,aAAK,iBAAiB;AACtB,aAAK,iBAAiB;AACtB,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO,cAAc,OAAO,KAAK,QAAQ,KAAK,QAAQ;;MAE1D;AACI,eAAO,CAAE,QAAQ,KAAK,QAAQ,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAItE,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,eAAO,eAAe,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAErE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,iBAAa,YAAY;AACzB,kBAA4B;kCACS;MACjC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,0CAA0C;;AAE5E,eAAO,gBAAgB,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAEtE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,oBAAgB,YAAY;AAC5B,kBAA4B;6BACE;MAC1B;AACI;AACA,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO;;MAEhD;AACI,eAAO,KAAK;AACR,cAAI,MAAM,WAAW,KAAK,MAAM,OAAO,MAAM;AACzC,kBAAM,IAAI,WAAW;;AAIrB,mBAAO,KAAI,KAAK,MAAM,IAAI,MAAM;;;;MAI5C;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;AAS5B,6CAAyC;AACrC;AACA;AACA,sBAAgB;AAChB,UAAI,MAAM,WAAW;AACjB,gBAAQ,MAAM;AACd,iBAAS,MAAM;iBAEV,CAAC,GAAG,GAAG,GAAG,QAAQ,MAAM,YAAY;AACzC,YAAI,eAAe;AACf,qCAA2B,UAAU,OAAO;AAC5C,kBAAQ,MAAM,KAAK;AACnB,mBAAS,MAAM,KAAK;mBAEf,eAAe;AACpB,qCAA2B,UAAU,OAAO,GAAG,MAAM,SAAS;AAC9D,kBAAQ,MAAM,MAAM,SAAS,KAAK;AAClC,mBAAS,MAAM,MAAM,SAAS,KAAK;;;AAIvC,0BAAkB,UAAU;AAC5B,gBAAQ,KAAK,KAAK;AAClB,iBAAS,KAAK,KAAK;;AAEvB,aAAO,CAAC,OAAO;;kCAEkB;MAKjC;AACI;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW,wCAAwC,KAAK;;AAEtE,aAAK,QAAQ,KAAK,SAAS,OAAO,IAAM,KAAK;AAC7C,aAAK,OAAO,KAAK,QAAQ,OAAO,UAAU,KAAK;AAC/C,qBAAa,KAAK;AAClB,aAAK,eACD,KAAK,gBAAgB,OAAO,WAAW,KAAK;AAChD,0BAAkB,KAAK;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,qBAAa,YAAY;AACzB,sBAAc,KAAK;AACnB,uBAAe,KAAK;AACpB,qBAAY,KAAK;AACjB,YAAI,KAAK,SAAS;AACd,oBAAS,KAAK,IAAI,GAAG;mBAEhB,KAAK,SAAS;AACnB,oBAAS,KAAK,IAAI,GAAG;;AAGrB,oBAAS,KAAK,IAAI,GAAI,SAAQ,UAAU;;AAE5C,YAAI,KAAK,iBAAiB;AACtB,yBAAe,KAAK,KAAK;AACzB,kBAAQ,SAAS;AACjB,cAAI,UAAU,aAAa,UAAU;AACjC,kBAAM,IAAI,oBAAoB,GAAG,KAAK,yCAAyC;;AAEnF,iBAAO,gBAAgB,OAAO,GAAG,QAAQ,OAAO,KAAK;;AAGrD,wBAAc,KAAK,KAAK,IAAI;AAC5B,iBAAO,cAAc,OAAO,CAAC,OAAO,OAAO;;;MAGnD;AACI,eAAO;UACH,OAAO,KAAK;UACZ,MAAM,KAAK;UACX,cAAc,KAAK;UACnB,MAAM,KAAK;;;;AAKvB,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAQ/B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAQ9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;2BACE;MAC1B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,aAAS,YAAY;AACrB,kBAA4B;4BACG;MAC3B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,cAAU,YAAY;AACtB,kBAA4B;8BACK;MAC7B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,gBAAY,YAAY;AACxB,kBAA4B;+BACM;MAC9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,oBAAoB;;;MAGtC;AACI,eAAO,KAAK;AACR,cAAI,MAAM,SAAS;AACf,kBAAM,IAAI,oBAAoB;;AAElC,cAAI,MAAM,KAAK,MAAM,KAAK;AACtB,oBAAQ,KAAK,2EACK,MAAM,KAAK,MAAM;;AAIvC,kCAAwB,MAAM,KAAK,MAAM,KAAK,CAAC,MAAM,IAAI,MAAM,MAAM;AACrE,oBAAU,eAAe,iBAAiB,GAAG,GAAG;AAChD,kBAAQ,OAAO,YAAY;AAC3B,cAAI,MAAM,KAAK,MAAM;AACjB,gBAAI,EAAE;;AAEV,iBAAO,KAAI,KAAK,MAAM;;;MAG9B;AACI,eAAO;UACH,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,uDAAmD;MACtD,UAAY;MACZ,cAAgB;MAChB,eAAiB;MACjB,UAAY;MACZ,WAAa;MACb,UAAY;MACZ,aAAe;MACf,cAAgB;MAChB,MAAQ;MACR,YAAc;MACd,cAAgB;MAChB,eAAiB;MACjB,iBAAmB;MACnB,iBAAmB;MACnB,OAAS;;AAEb,6DAAwD;AACpD,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,aAAO,qBAAqB;;AAEzB;AACH,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AAIJ,YAAI,cAAc;AACd,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;;AAGX,0BAAe;AACf,kBAAO,eAAe;AACtB,kBAAO,YAAY;AACnB,iBAAO,uBAAuB;;iBAG7B,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;ACretC;;;;;;;;;AAgBO;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI,SAAS;;AAWjB;AACH,aAAO,IAAI,cAAc;;AAQtB;AACH,aAAO,IAAI,aAAa;;AAYrB;AACH,aAAO,IAAI,gBAAgB;;AAQxB;AACH,aAAO,IAAI,WAAS;;AAejB;AACH,aAAO,IAAI,gBAAgB;;AAexB;AACH,aAAO,IAAI,cAAc;;AAetB;AACH,aAAO,IAAI,aAAa;;AAcrB;AACH,aAAO,IAAI,SAAS;;AAcjB;AACH,aAAO,IAAI,UAAU;;AAelB;AACH,aAAO,IAAI,YAAY;;AAWpB;AACH,aAAO,IAAI,aAAa;;AAUrB;AACH,aAAO,IAAI,WAAW;;;;;;;;;;;;;;;;;;;;ACjM1B;;;;;;;;;AAiBA,8BAA0B;AACnB;AACH,aAAO;;AAEX,yBAAqB;AAMd,6BAAyB;AAC5B,UAAI,CAAE,WAAU;AACZ,qBAAa,UAAU;;AAE3B,mBAAa,WAAW;AACxB,aAAO,SAAS,aAAa,QAAQ;;AChCzC;;;;;;;;;AAcO;AACH,aAAO,MAAM,QAAQ,MAAM,MAAM,QAAQ,EAAE;;AAQxC;AACH,UAAI,EAAE,WAAW;AACb,eAAO;;AAEX,UAAI,CAAC,MAAM,QAAQ,EAAE;AACjB,eAAO,CAAC;;AAEZ,aAAO;;AAQJ;AACH;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,GAAG,WAAW;AACd,gBAAM,IAAI,WAAW,uCAAuC,GAAG;;AAEnE,YAAI,GAAG;;AAGP,YAAI;;AAER,aAAO;;AAYJ;AACH,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ,OAAO;AAC9C,YAAI,OAAO,WAAW;AAClB,mBAAS;AACT,iBAAO,OAAO;;AAGd,gBAAM,IAAI,WAAW,iCAAiC,OAAO;;;AAIjE,eAAO;;;ACxEf;;;;;;;;;AAgBO;AACH,mBAAY;AACZ,2BAAqB;AACjB,YAAI,OAAO,MAAM,WAAW;AACxB,oBAAS;;AAGT,oBAAS,OAAO,MAAM,OAAO,UAAU,IAAI;;;AAGnD,aAAO;;AC1BX;;;;;;;;;AAcA,yCAAqC;;MAsBjC,yBAAyB,kBAAkB,0CAA0C,mBAAmB;AACpG,aAAK,QAAQ,SAAS,OAAO,YAAY;AACzC,aAAK,QAAQ,IAAI;AACjB,aAAK,KAAK;AACV,eAAO,QAAQ,OAAO,+BAA+B;AACrD,aAAK,eAAe,oBAAoB;AACxC,aAAK,OAAO,oBAAoB,KAAK;AACrC,aAAK,aAAa;AAClB,aAAK,aAAa;AAClB,aAAK,MAAM,SAAa,KAAK,KAAK,YAAY,KAAK,MAAM,KAAK;;MASlE;AACI,aAAK;AACL,eAAO,KAAK;;MAShB;AAEI,aAAK;AACL,yBAAiB,KAAK,KAAK;AAE3B,YAAI,KAAK,IAAI,OAAO,OAAO;AACvB,eAAK,IAAI,OAAO;AAChB,cAAI,KAAK,cAAc;AACnB,iBAAK,IAAI,OAAO,KAAK,WAAW,MAAM,KAAK;;;AAGnD,eAAO;;MAKX;AACI,aAAK;AACL,aAAK,IAAI;;MAEb;AACI,YAAI,KAAK,IAAI;AACT,gBAAM,IAAI,MAAM,kBAAkB,KAAK;;;UAG3C;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,aAAa;AAClB,aAAK,IAAI,YAAY;;;AAG7B;AACI,UAAI,EAAE,MAAM,eAAe,EAAE,MAAM;AAC/B,cAAM,IAAI,MAAM,qBAAqB,KAAK,UAAU,EAAE,SAAS,UAC3D,KAAK,UAAU,EAAE;;;AAYtB;AACH,aAAO,IAAI,cAAc,GAAG,OAAO,MAAM,MAAM;;AAU5C;AAEH,aAAO,IAAI,cAAc,OAAU,QAAQ,OAAO;;AAU/C;AACH,aAAO,IAAI,cAAc,WAAc,IAAI,OAAO;;AAU/C;AAEH,wBAAkB,OAAS;AAC3B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,wBAAkB,UAAa;AAC/B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,aAAO,IAAI,cAAc,IAAQ,OAAO,OAAO;;AAY5C,8EAA0E;AAC7E,aAAO,IAAI,cAAc,cAAkB,OAAO,QAAQ,QAAQ,QAAQ,OAAO;;AAY9E,oDAA+C,YAAc,uBAAyB;AAGzF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,aAAO,IAAI,cAAc,gBAAoB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAYpF,iDAA4C,YAAc,uBAAyB;AACtF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,+CAA+C;;AAEjF,aAAO,IAAI,cAAc,aAAiB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAQjF;AACH,aAAO,EAAE,MAAM;;AAQZ;AACH,aAAO,EAAE,MAAM,MAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,EAAE,MAAM,IAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,GAAG,IAAI,OAAK,EAAE;;AAUlB;AACH,yBAAmB,QAAQ;AACvB,0BAAiB,iBAAiB;AAClC,kBAAS,MAAM,iBAAiB;;;AAWjC;AAGH,2BAAqB,UAAU,IAAI,eAAY,UAAS;AACxD,4BAAsB,cAAc,QAAQ;AAC5C,aAAO,UAAU,IAAI,eAAY,cAAc,MAAM,UAAS;;AC/RlE;;;;;;;;;;MA6BI;AACI,aAAK,QAAQ,KAAK;AAClB,aAAK,QAAQ,KAAK;AAKlB,YAAI,KAAK,SAAS;AACd,eAAK,OAAO,KAAK,MAAM;;AAGvB,eAAK,OAAO,KAAK;;AAErB,aAAK,UAAU,KAAK;AACpB,aAAK,UAAU,KAAK;AACpB,aAAK,OAAO,KAAK,QAAQ;;;;MAyB7B;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,cAAc;AACnB,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,oBAAoB;AACzB,aAAK,KAAK;AACV,YAAI,QAAQ;AACR,eAAK,eAAe,oBAAoB;AACxC,eAAK,OAAO,oBAAoB,KAAK;;AAEzC,aAAK,OAAO,MAAM;;;AAG1B,sBAAkB;;MAsBd;AAGI,aAAK,WAAW;AAChB,aAAK,KAAK;AAQV,aAAK,gBAAgB,KAAK;AAQ1B,aAAK,gBAAgB,KAAK;AAE1B,aAAK,cAAc,KAAK;AAExB,aAAK,gBAAgB,KAAK;AAM1B,aAAK,eAAe,KAAK;AAEzB,aAAK,gBAAgB,KAAK;AAK1B,aAAK,aAAa,KAAK;AAEvB,aAAK,cAAc,KAAK;AAGxB,aAAK,cAAc,KAAK;AAExB,aAAK,eAAe,KAAK;AAEzB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,kBAAM,cAAc,KAAK;;;AAGjC,aAAK,cAAc,aAAa,KAAK;;MAEzC;AACI,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,yBAAa,KAAK,MAAM;;AAGxB,yBAAa,KAAK;;;AAG1B,eAAO;UACH,eAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;UAC9D,eAAe;UACf,aAAa,KAAK;UAClB,eAAe,KAAK;;;;AAIhC,uBAAmB;wBAUQ;MACvB,mBAAmB;AACf;AACA,aAAK,YAAY;AACjB,aAAK,oBAAoB;AAKzB,aAAK,YAAY;AACjB,aAAK,KAAK;AACV,aAAK,sBAAsB;AAC3B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AAEvB,aAAK,oBAAoB;AACzB,aAAK,uBAAuB;AAC5B,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,SAAS;AAKd,aAAK,eAAe;AACpB,aAAK,gBAAgB;AACrB,mBAAW,KAAK;AAChB,YAAI,CAAC;AACD,yBAAe,KAAK;AACpB,iBAAO,YAA0B,UAAU,MAAM,OAAO;;AAE5D,aAAK,OAAO;AACZ,aAAK,aAAa,KAAK,aAAa,OAAO,OAAO,KAAK;AACvD,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AAKnD;AACA,cAAI,KAAK,mBAAmB;AACxB,8BAAkB,KAAK;qBAElB,KAAK,cAAc;AACxB,4BAAgB;AAChB,gBAAI,KAAK,aAAa;AAClB,0BAAY,KAAK;;AAErB,8BAAkB,CAAC,WAAW,OAAO,KAAK;;AAE9C,eAAK,kBAAkB;AAEvB,sBAAY,KAAK;AACjB,cAAI,SAAS;AACT,oBAAQ,KAAK;;AAEjB,cAAI,SAAS;AACT,oBAAQ;;AAEZ,eAAK,QAAQ;;AAEjB,YAAI,KAAK,WAAW;AAChB,eAAK,iBAAiB,KAAK;;AAG3B,eAAK,iBAAiB;;AAI1B,aAAK,YAAY;AACjB,aAAK,4BAA4B;;aAW9B;AACH,eAAO,MAAM,OAAO,SAAS,UAAU;;MAS3C;AACI,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,aAAa,2DACQ;;AAEnC,YAAI,KAAK,aAAa,UAAU;AAC5B,gBAAM,IAAI,WAAW,gBAAgB,oBAAoB,qCAC3B,KAAK,aAAa;;AAEpD,eAAO,KAAK,aAAa;;MAW7B;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,SAAS;;MAWlF;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,UAAU;;UAc/E;AACA,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;mBAMlC,KAAK,aAAa,WAAW;AAClC,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,SAAS;;UAatE;AACA,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAM3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,UAAU;;UAEvE;AACA,eAAO,KAAK;;MAOhB;AAKI,eAAO,KAAK,OAAO,IAAI,YAAU;;UAEjC;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,SAAS;;UAEd;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,kBAAkB,QAAQ,OAAK,EAAE,YAAY;AAClD,aAAK,aAAa;;UAElB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,EAAE;;AAG5C,iBAAO;;;UAGX;AACA,aAAK,oBAAoB;;UAEzB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,CAAC,EAAE,WACxC,OAAO,KAAK;;AAGjB,iBAAO,KAAK,kBAAkB,OAAO,KAAK;;;UAG9C;AACA,aAAK,uBAAuB;;UAM5B;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;UAEzC;AACA,eAAO,KAAK;;MAShB;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;;MAgBxB;AACI,iBAAS,OAAqB;AAC9B,YAAI,KAAK,aAAa,QAAQ,KAAK,UAAU,WAAW;AACpD;;AAEJ,0BAAkB,OAAqB,KAAK;AAC5C,YAAI,OAAO,WAAW,UAAU;AAC5B,gBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,UAAU,kCACtC,OAAO,yCACP;;AAE3B,8BAAsB,GAAG,aAAa,OAAO,QAAQ;AACjD,oBAAU,OAAO;AACjB,uBAAa,UAAU;AACvB,cAAI,QAAQ;AACR;;AAGJ,uBAAa,EAAE;AACf,cAAI,KAAK,QAAQ;AACb,gBAAI,SAAS,KAAK;AACd,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,uBACvD,KAAK,oBAAoB;;;AAGtD,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAG/D,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAI/D,cAAI,KAAK,SAAS;AACd,gBAAI,EAAE,UAAU,KAAK;AACjB,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,yBACpD,KAAK,sBAAsB,EAAE;;;AAI7D,cAAI,KAAK;AACL,2BAAe,EAAE;AACjB,8BAAkB,KAAK;AACnB,2BAAa,OAAO;AACpB,4BAAc,KAAK,KAAK;AAIxB,mCAAqB,QAAQ,IAAI,OAAO,QAAQ,OAAO,OAAO,SAAS;AACvE,kBAAI,SAAS,QAAQ,CAAC,OAAO,MAAM,QAAQ,kBAAkB;AACzD,sBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,uBAAuB,qCACjB,uBAAuB;;;;AAKrD,cAAI,KAAK,SAAS;AACd,yBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,8BAAgB,KAAK,MAAM;AAC3B,0BAAY,EAAE,MAAM;AACpB,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,YAAY;AACZ,wBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,wBAAwB,KAAK,sBACtB,EAAE;;;;;;;MAe7C;AACI,eAAO;;MAEX;AACI,YAAI,KAAK,aAAa;AAClB,eAAK,UAAU,QAAQ;;;MAQ/B;AACI,aAAK,YAAY;;MAMrB;AACI,aAAK,YAAY;;MAuErB;AACI,iBAAS,UAAU;AACnB,aAAK;AAEL,2BAAmB,OAAqB;AACxC,6BAAqB;AACrB,6BAAoB;AAChB,cAAI,CAAE,mBAAiB;AACnB,6BAAiB;AACjB;;;AAGR,8BAAsB;AACtB,6BAAoB;AAChB,cAAI,kBAAiB;AACjB,8BAAkB;AAClB;;;AAGR,YAAI,mBAAmB;AACnB,gBAAM,IAAI,WAAW;;AAIzB,eAAO,UAAU,KAAK,MAAM;AAExB,cAAI,CAAC,KAAK;AAKN,iBAAK,yBAAyB;AAE9B,gCAAoB;AACpB,gCAAoB,OAAqB;AACrC,0BAAY,KAAK,MAAM;;AAE3B,iBAAK,MAAM,iBAA+B;AAC1C,iBAAK,QAAQ;AAEb,gBAAI,KAAK;AACL,mBAAK,WAAW,KAAK;;AAEzB,gBAAI,KAAK,cAAc,QAAQ;AAI3B,mBAAK,YAAY;;;AAOzB,eAAK,yBAAyB;AAI9B,cAAI;AACA,yBAAa,KAAK,KAAK,QAAQ;AAI/B,+BAAmB,OAAqB;AACxC,mCAAuB;AAGvB,0BAAc;AACV,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,EAAE;;AAEV,6BAAe,KAAK;;AAExB,qBAAS,iBAA+B;AACxC,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAIlC,mBAAO;;AAGP,+BAAmB,kBAAkB;AACrC,gCAAoB,KAAK,mBAAmB;AAC5C;AACA,gCAAoB,iBAAiB;AACrC,iBAAK,6BAA6B,MAAM,QAAQ,UAAU,WAAW,KACjE;AACJ,gBAAI,eAAe,QAAQ,YAAY,SAAS,KAC5C,MAAM,QAAQ,YAAY;AAE1B,uBAAS,YACJ,IAAI,mBAAkB,IAAI,eAAe,aAAa,OAAO,MAAM,OAAqB,SAAS,QAAQ,KAAK,MAAM;;AAGzH,uBAAS,IAAI,eAAe,aAAa,aAAa,MAAM,OAAqB,SAAS,QAAQ,KAAK;;AAS3G,iBAAK,eAAe,QAAQ,QAAQ,MAAM,MAAM,YAAY,aAAa;AACzE,iBAAK;AACL,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAGlC,mBAAO;;;;MAWnB;AACI,YAAI,KAAK,mBAAmB;AACxB;mBAEK,WAAW,WAAW,KAAK,gBAAgB;AAChD,kBAAQ,KAAK,iDACN,KAAK,UAAU,4DACE,KAAK,UAAU,KAAK,kCACxB,KAAK;;AAGzB,4BAAkB;AAClB,eAAK,gBAAgB,QAAQ;AACzB,gBAAI,aAAa,QAAQ,WAAW,MAAM,QACtC,WAAW,OAAO;AAClB,4BAAc;;;AAGtB,cAAI;AACA,oBAAQ,KAAK,kCACL,KAAK,UAAU,wDACe,KAAK,SACpC,KAAK,UAAU,KAAK;;;;UAgBnC;AACA,YAAI,KAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC1D,gBAAM,IAAI,eAAe,aAAa,KAAK;;AAG/C,gCAAwB;AACxB,2BAAmB,KAAK;AACpB,8BAAoB,KAAK,UAAU,KAAK;AACxC,cAAI,gBAAgB,QAAQ,iBAAiB;AACzC,4BAAgB,KAAK;;;AAG7B,YAAI,gBAAgB,WAAW;AAC3B,+BAAqB,KAAK,aAAa,GAAG;AAC1C,cAAI,MAAM,QAAQ,iBAAiB,MAAM,QAAQ,aAAa,OAC1D,aAAa,WAAW;AACxB,mBAAO,aAAa;;AAGpB,mBAAO;;;AAIX,gBAAM,IAAI,eAAe,aAAa,KAAK;;;MAgBnD;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa,sCAAsC,KAAK;;AAItE,eAAO,qBAAoC,KAAK;;MAapD;AACI,aAAK,QAAQ;;MAUjB,2BAA2B;AACvB,eAAO,cAAc,gBAAgB,KAAK,mBAAmB,KAAK;;MActE;AACI,aAAK;AACD,yBAAe,KAAK;AACpB,cAAI,OAAO,WAAW,QAAQ;AAK1B,kBAAM,IAAI,WAAW,4CAA4C,KAAK,sCAClC,QAAQ,uCACT,OAAO,qCACjB;;AAE7B,cAAI,OAAO,WAAW;AAClB;;AAEJ,oCAA0B;AAC1B,8BAAoB,cAAc;AAClC,uBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,uBAAW,YAAY;AACvB,uBAAU,OAAO;AACjB,sBAAU,QAAQ;AAClB,gBAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,oBAAM,IAAI,WAAW,sBAAsB,GAAG,mDACG,EAAE;;AAEvD,8BAAkB,KAAK,CAAC,IAAG;;AAE/B,wBAAc;;;MAkBtB;AAEI,YAAI,KAAK,kBAAkB,QAAQ,UAAU;AACzC,gBAAM,IAAI,WAAW,yBAAyB,kBAAkB,KAAK;;AAEzE,aAAK,kBAAkB,KAAK;AAC5B,YAAI,SAAS;AACT,kBAAQ;;AAEZ,YAAI,KAAK;AACL,wBAAc,eAAe;;AAEjC,0BAAkB,YAAY,MAAM,OAAO;AAC3C,uBAAe,IAAI,cAAc,WAAW,OAAO,MAAM,WAAW;AACpE,kBAAU;AAEV,YAAI,eAAe;AACf,eAAK,QAAQ,MAAM,YAAY,MAAM,OAAO;;AAEhD,YAAI,aAAa;AACb,sBAAY;;AAEhB,YAAI;AACA,eAAK,kBAAkB,KAAK;;AAG5B,eAAK,qBAAqB,KAAK;;AAEnC,eAAO;;MAYX;AACI,aAAK,4BAA4B;;MAUrC;AACI,YAAI,WAAU,QAAQ,MAAM,QAAQ,YAAW,QAAO,WAAW;AAC7D;;AAGJ,kBAAS,OAAqB;AAC9B,YAAI,KAAK,YAAY,UAAa,KAAK,YAAY;AAC/C,eAAK,OAAO,KAAK,GAAG;;;MAc5B;AACI,eAAO;;MAWX;AACI,YAAI,CAAC,KAAK;AACN,cAAI,QAAQ;AACR,gBAAI,MAAM,QAAQ;AACd,mBAAK,QAAQ;AACT,oBAAI,eAAe;AACf,wBAAM,IAAI,UAAU,SAAS,KAAK;;;;AAM1C,oBAAM,IAAI,UAAU,SAAS,KAAK;;;AAK1C,iBAAO;;AAIX,eAAO;;MAcX,yGAAyG;AACrG,gCAAwB,OAAqB;AAC7C,wBAAgB,OAAqB;AACrC,qBAAa,OAAqB;AAClC,sBAAc,OAAqB;AACnC,sBAAc,mBAA+B;AAC7C,uBAAe,mBAA+B;AAE9C,8BAAsB;AACtB,4BAAoB;AACpB,8BAAsB;AACtB,wBAAgB;AAKZ,wBAAc,KAAK,EAAE;AACrB,sBAAY,KAAK,EAAE;AACnB,wBAAc,KAAK,EAAE;;AAKzB,YAAI,KAAK;UACL,eAAe;UACf;UACA;UACA;UACA,cAAc;UACd;UACA;UACA;UACA;UACA;WACD;AAEH,qBAAa,GAAG,IAAI,cAAc,QAAQ;AAEtC,wBAAc,GAAG,cAAc;AAC/B,wBAAc,GAAG,YAAY,KAAK,aAAa,SAAS;AACxD,wBAAc,GAAG,cAAc;;;MAwBvC;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,WAAW,KAAK;AAClD,YAAI,KAAK,mBAAmB;AACxB,kBAAO,qBAAqB,KAAK;;AAErC,YAAI,KAAK,SAAS;AACd,kBAAO,WAAW,KAAK;;AAE3B,eAAO;;MAOX;AACI,aAAK,QAAQ,QAAQ,YAAU,OAAO;AACtC,eAAO,KAAK,QAAQ;;MAExB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,UAAU,KAAK;;;MAiCvC;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,aAAK;AACL,mCAA2B;AAC3B,YAAI,EAAE,KAAK,cAAc;AACrB,iCAAuB,KAAK;;AAEhC,eAAO,CAAE,sBAAsB,KAAK,WAAW;;;AAavD;AACI,qBACI,OAAqB;AACzB,qBAAe;AACf,sBAAgB;AACZ,eAAO,KAAK,EAAE;;AAElB,aAAO,iBAA+B;;AAW1C;AACI,aAAO;;AAaJ;AACH,UAAI,SAAS,QAAS,aAAa,QAAQ,YAAY;AACnD,gBAAQ,QAAO;AACf,oBAAY,QAAO;;AAEvB,UAAI,MAAM,aAAa,WAAW;AAC9B,eAAO,CAAC;;AAGR,qBAAa,MAAM,aAAa;AAChC,YAAI,KAAK,cAAc,WAAW;AAC9B,iBAAO,KAAK;;AAGZ,gCAAsB;AACtB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,oCAAwB,gBAAgB,GAAG,QAAO;AAElD,6BAAgB;AACZ,kBAAI,cAAc,QAAQ,QAAO;AAC7B,8BAAc,KAAK;;;;AAI/B,iBAAO;;;;ACztCnB;;;;;;;;;6BAagC;MAC5B;AACI,cAAM;UACF,OAAO,KAAK;UACZ,MAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,OAAO,SAAS;;AAG1D,YAAI,KAAK,aAAa;AAClB,eAAK,YAAY;;AAErB,YAAI,KAAK,UAAU;AACf,eAAK,SAAS;;AAElB,aAAK,YAAY;AACjB,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK;AACnB,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AACnD,gBAAM,IAAI,WAAW;;AAGzB,8BAAsB,KAAK;AAC3B,YAAI,mBAAmB;AACnB,cAAI,KAAK,cAAc;AACnB,kBAAM,IAAI,WAAW;;AAIrB,8BAAkB,CAAC,KAAK,WAAW,OAAO,KAAK;;;AAKnD,cAAI,KAAK,aAAa;AAClB,kBAAM,IAAI,WAAW;;;AAI7B,sBAAc,KAAK,SAAS;AAC5B,aAAK,kBAAkB;AACvB,aAAK,QAAQ;AAEb,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,4BAAoB,IAAI,eAAe,KAAK,OAAO,KAAK,iBAAiB,MAAM,IAAI,IAAI,KAAK;AAC5F,oBAAY,YAAY;AACxB,oBAAY,cAAc;AAI1B,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,CAAC;UACf,eAAe,CAAC;UAChB,YAAY,CAAC;UACb,aAAa,CAAC;UACd,aAAa,CAAC;UACd,cAAc,CAAC;;;MAGvB;AACI,cAAM,IAAI,WAAW,6EACgC,KAAK;;MAE9D;AAEI,eAAO,CAAE,sBAAsB,KAAK,WAAW,sBAAsB;;MAEzE;AACI,eAAO;UACH,iBAAiB,KAAK;UACtB,OAAO,KAAK;UACZ,QAAQ,KAAK;UACb,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AACrB;AACH,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAC7C,cAAM,IAAI,MAAM;;AAKpB,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAE7C,cAAM,IAAI,WAAW;;AAGzB,uBAAiB,QAAO;AACxB,UAAI,QAAO,SAAS,QAAQ,cAAc;AACtC,qBAAa,CAAC,MAAM,OAAO,QAAO;;AAEtC,kBAAY,QAAO;AACnB,UAAI,SAAS;AACT,gBAAQ;;AAEZ,0BAAmB,IAAI,WAAW;QAC9B,iBAAiB;QACjB,MAAM,QAAO;QACb;QACA,QAAQ,QAAO;;AAEnB,sBAAgB,YAAW,aAAa,GAAG;AAC3C,aAAO,QAAQ;;ACzHnB;;;;;;;;;AAeO;AACH,UAAI,QAAQ;AACR;;AAEJ,uBAAiB;AACjB,mBAAa;AACb,+BAAyB;AACzB,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,8BAAoB;AACpB,mBAAS,KAAK,YAAY;AAC1B,eAAK,KAAK;AACV,2BAAiB,KAAK;;;AAG9B,UAAI,SAAS,SAAS;AAClB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,eAAK,KAAK,MAAM,OAAO,GAAG;;AAG9B,gBAAQ;;;AAST;AACH,UAAI,QAAQ;AACR;;AAEJ,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,gBAAM;;;;ACrDlB;;;;;;;;;AAeO;AACP,IAAC;AACG,6BAAsB,uBAAsB,YAAY,KAAK;AAC7D,6BAAsB,uBAAsB,aAAa,KAAK;OAC/D,yBAA0B,yBAAwB;AAE9C,mCAA+B;;MAoBlC;AAEI,aAAK,iBAAiB;;MAE1B;AACI,aAAK,SAAS;;YAEZ;;YACA;;YACA;;YACA;;YACA;;YACA;;MAQN;;;;MAmBA,sCAAqC;AAGjC,YAAI,cAAa;AACb,uBAAY;;AAEhB,aAAK,YAAY;AACjB,aAAK,cAAc;;MAEvB;AACI,aAAK,UAAU,KAAK;;MAExB;AACI,+BAAuB,KAAK;AACxB,mBAAS,UAAU;;;MAG3B;AACI,+BAAuB,KAAK;AACxB,mBAAS,SAAS;;;YAQpB;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAQnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAOnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa;;;YAO9B;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW;;;;6BASN;MAC5B;AACI;;YAEE;AACF,aAAK,OAAO;AACZ,aAAK,SAAS;;YAEZ;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,0BAAkB,KAAK,WAAW,OAAO,IAAI,KAAK;AAClD,aAAK,QAAQ;AACb,0BAAkB;AACd,wBAAc,KAAK;AACnB,cAAI,OAAO,UAAU;AACjB,gBAAI,CAAC,KAAK,OAAO,eAAe;AAC5B,mBAAK,OAAO,OAAO;;AAEvB,iBAAK,OAAO,OAAO,KAAK,OAAO,OAAO,QAAQ;;AAG9C;AACA,gBAAI,OAAO,KAAK;AACZ,mCAAqB,KAAK,OAAO;;AAGjC,mBAAK,OAAO,OAAO;;AAEvB,0BAAc,KAAK,MAAM,MAAK,KAAK,OAAO,MAAO,KAAI,OAAO;AAC5D,iBAAK,OAAO,OAAO;AACnB,gBAAI,sBAAsB;AACtB,iCAAmB;;;;;YAK7B;AACF,YAAI,QAAQ;AACR,4BAAkB,KAAK,OAAO;AAC1B,gBAAI,KAAK,OAAO,QAAQ;AACpB;;AAEJ,gBAAI,OAAO,KAAK,OAAO,SAAS;AAC5B,mBAAK,OAAO,KAAK,OAAO,OAAO,KAAK;;AAGpC,mBAAK;AACD,6BAAY,KAAI,IAAI,GAAG,KAAK,OAAO,KAAK,OAAO;AAC/C,qBAAK,OAAO;AACZ,qBAAK,OAAO,KAAK;AACjB,qBAAK,KAAK;;;;;;;0BAYL;YACnB;AACF,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,MAAM,KAAK;AAChB,0BAAkB;AACd,cAAI,KAAK,QAAQ,QAAQ;AACrB,iBAAK,QAAQ,OAAO;;AAExB,eAAK,QAAQ,KAAK,KAAK,KAAK;;;YAM9B;AACF,yBAAiB;AACjB,qBAAa;AACb,wBAAgB;AAChB,0BAAkB,KAAK;AACnB,6BAAmB,KAAK,QAAQ;AAChC,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,gBAAI,OAAO,WAAW,OAAO;AACzB,kCAAoB,WAAW;AAC/B,uBAAS,KAAK,YAAY;AAC1B,mBAAK,KAAK;AACV,sBAAQ,KAAK;;;;AAIzB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,kCAAwB,KAAK,QAAQ,KAAK,IAAI,QAAQ;AACtD,0BAAgB;AAChB,eAAK,QAAQ,KAAK,IAAI,QAAQ,MAAM,OAAO,GAAG;;;;iCAOtB;MAChC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,aAAa,cAAc;AAChC,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;;AAEtB,YAAI,KAAK,eAAe,WAAW,KAAK,WAAW;AAC/C,gBAAM,IAAI,MAAM;;AAGpB,YAAI,UAAc,KAAK;AAGnB,eAAK,YAAY,SAAuB,KAAK,UAAU,KAAK,OAAO,KAAK;;AAE5E,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,QAAQ,KAAK;;YAEhB;AACF,mBAAW;AACX,YAAI,KAAK,SAAS;AACd,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,MAAM,OAAO,OAAO;;AAErC,WAAG,KAAK;AACR,cAAM,QAAQ,IAAI;;YAEhB;AACF,aAAK,eAAe;AACpB,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;;AAEZ,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;mBAEH,UAAc,KAAK;AACxB,aAAG,KAAK,KAAK,UAAU,KAAK,cAAc,OAAO;;AAErD,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW;;;YAGxB;AACF,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,SAAS;;;;AAOzB;AACH,UAAI,cAAa;AACb,qBAAY;;AAEhB,UAAI,sBAAqB;AACrB,eAAO,CAAC;;AAEZ,UAAI,MAAM,QAAQ,eAAc,WAAU,cAAc;AACpD,eAAO;;AAGX,8BAAwB,OAAqB;AAC7C,aAAO,gBAAgB,IAAI,oBAAkB,IAAI,eAAe,gBAAgB;;;MAUhF;;aAaO;AACH,gBAAY,kBAAkB,KAAK,OAAO,UAAU,iBAAiB,MAAM,8DAC5D;AACf,oCAA4B,kBAAkB;AAC9C,YAAI,4BAA4B,aAAa,mBAAmB;AAC5D,sCAA4B,aAAa,kBAAkB;;AAE/D,oCAA4B,aAAa,gBAAgB,KAAK;;aAE3D;AACH,gCAAwB,4BAA4B;AAChD,+BAAqB,4BAA4B,aAAa,CAAC;AAC/D,uBAAa,QAAQ;AACjB,gBAAI,SAAS;AACT,oBAAM,IAAI,WAAW;;;;;aAQ9B;AACH,oCAA4B,eAAe;;aAUxC;AACH,6BAAqB;AACrB,gCAAwB,4BAA4B;AAChD,wBAAc,CAAC;AACf,cAAI,kBAAkB;AAClB,yBAAa,KAAK,GAAG,4BAA4B,aAAa;;;AAGtE,eAAO,aAAa,IAAI,UAAQ,IAAI;;;AAG5C,gCAA4B,eAAe;AACpC;AACH,sBAAgB,IAAI;AACpB,8BAAwB;QACpB,IAAI;QAAc,GAAG,4BAA4B,gBAAgB;;AAErE,UAAI,cAAa;AACb,wBAAgB,KAAK,GAAG;;AAE5B,sBAAgB,KAAK;AACrB,2BAAqB,IAAI,aAAa;AAItC,mBAAa,UAAU;QACnB;QACA;QACA,SAAS;QACT,OAAO;QACP;QACA;QACA;QACA,SAAS;;AAEb,aAAO,CAAE,cAAc;;ACre3B;;;;;;;;;AAuBO,kDAA6C,qBAAqB;AACrE,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe,SAAS;;ACxBxH;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,UAAU;AACZ,cAAI,EAAE,OAAO;;AAEjB,0BAAkB,MAAQ,SAAS,IAAI,MAAM;AAC7C,8BAAsB,MAAS,UAAU,OAAO;AAChD,sBAAa,KAAS,QAAY,WAAW;AAC7C,eAAO,IAAQ,GAAG;;;AAGnB;AACH,aAAO,KAAK,MAAM,KAAS,SAAS,IAAQ,OAAO,SAAS;;AAEzD;AACH,aAAO,KAAK,MAAM,KAAS,IAAQ,IAAQ,OAAO,SAAS;;AAExD;AACH,aAAO,KAAK;AACR,qBAAa,IAAQ,OAAO;AAC5B,4BAAoB,YAAgB,IAAQ,QAAQ,WAAW,OAAO;AACtE,0BAAkB,IAAQ,IAAQ,MAAM;AACxC,eAAO,KAAQ,KAAK,KAAS,WAAW;;;AAGzC;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,yBAAiB,KAAQ,MAAQ,GAAG;AACpC,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,0BAAkB,KAAQ,MAAQ,GAAG;AACrC,eAAO,KAAS,SAAS,IAAQ,UAAU,aAAa;;;AAGzD;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,SAAS,YAAY;;;AAGtC;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,WAAW;;;AAG5B;AACH,aAAO,KAAK;AACR,oBAAY,MAAQ,KAAQ,OAAO,QAAQ;AAC3C,qBAAY,KAAQ,KAAQ,IAAQ,GAAG,QAAQ,QAAQ;AACvD,eAAO,QAAY,GAAG,MAAQ,GAAG,IAAQ,MAAK;;;AAW/C;AACH,aAAO,KAAK;AACR,sBAAa,KAAK,IAAI;AACtB,+BAAuB,IAAQ,OAAO;AACtC,8BAAsB,IAAQ,MAAQ,gBAAgB,SAAa,KAAQ,IAAI,mBAAmB;AAClG,eAAO,KAAS,eAAe;;;AAGhC,kEAA8D;AACjE,aAAO,KAAK;AACR,YAAI;AACA,mBAAS,SAAY;;AAIrB,4BAAkB,MAAQ,QAAQ,OAAO,MAAM,SAAS,GAAG;AAC3D,mBAAS,IAAQ,QAAQ;;AAE7B,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,eAAO,IAAQ,MAAQ,KAAQ,OAAO,WAAW,KAAQ,UAAU,OAAO,MAAM,SAAS;;;AAY1F,wEAAoE;AACvE,aAAO,KAAK;AACR,2BAAmB,MAAU,UAAU,SAAS;AAChD,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,4BAAoB,OAAO;AAC3B,6BAAqB,QAAW,YAAY,YAAY,YAAY,SAAS,IACxE,QAAQ;AACb,eAAO,wBAAwB,cAAc,QAAQ;;;AAwBtD;AACH,UAAI,CAAC,aAAiB,OAAO,OAAO,OAAO;AACvC,cAAM,IAAI,WAAW,8DACd,KAAK,UAAU,OAAO,cAAc,KAAK,UAAU,OAAO;;AAErE,aAAO,KAAK;AAOR,2BAAmB,OAAO;AAC1B,6BAAqB,OAAO,MAAM;AAClC,eAAO,WAAW,IAAI,OAAO,IAAI,SAAS,IAAI,aAAa,MAAM;;;AAGlE;AACH,aAAO,KAAK;AACR;AACA,YAAI,YAAgB,OAAO,WAAW,IAAI;AAC1C,YAAI,KAAQ,IAAQ,GAAG,IAAQ,GAAG;AAClC,eAAO,KAAS,8BAA8B,OAAO,IAAI;;;AAG1D;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW;AACtD,4BAAoB,YAAgB,OAAO,WAAW;AACtD,eAAO,MAAQ,KAAQ,OAAO,KAAQ,IAAQ,aAAa,gBAAgB;;;AAG5E;AACH,aAAO,KAAK;AACR,wBAAgB,KAAQ,MAAQ,WAAW;AAC3C,eAAO,KAAS,IAAQ,OAAO,KAAQ,OAAO,WAAW;;;AAG1D;AACH,aAAO,KAAK;AACR,+BAAuB,YAAY,OAAO;AAC1C,+BAAuB,YAAY,OAAO;AAC1C,0BAAkB,KAAQ,gBAAgB;AAC1C,eAAO,IAAQ,MAAQ,WAAW;;;AAGnC,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,gBAAY;AACZ,gBAAY;AACZ,mBAAe;AAEf,sBAAkB;MACzB,kBAAI;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAIG;AACH,UAAI,OAAO,mBAAmB;AAC1B,YAAI,kBAAkB;AAClB,iBAAO,UAAU;;AAErB,qBAAa,gBAAgB;AAC7B,YAAI,eAAe,cAAc,SAAS;AACtC,mBAAS,gBAAgB;;AAI7B,cAAM,IAAI,WAAW;;AAGrB,eAAO;;;ACzOf;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,2BAAkB,KAAQ,KAAI,UAAa;AAC3C,iCAAyB,OAAO,QAAY,OAAO,aAAY,MAAM;AACrE,eAAO,KAAS,MAAU,OAAO,mBAAmB;;;AAGrD;AACH,aAAO,KAAK,MAAM,OAAO,MAAU,OAAW,OAAO,KAAK,OAAW,OAAO,MAAM;;AAEtF;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGlE;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,mBAAuB,OAAO;;AAElC;AACH,UAAI,MAAM,SAAS,MAAM;AACrB,gBAAQ,MAAM,QAAQ,CAAC,MAAM,OAAO;;AAExC,cAAQ,MAAM,OAAO;AACrB,UAAI,MAAM,UAAU,MAAM;AACtB,gBAAQ,MAAM,OAAO,MAAM;;AAE/B,aAAO,MAAU,OAAO,OAAO,OAAO;;AAEnC;AACH,YAAM,IAAI;;AAEP;AACH,YAAM,IAAI;;AAGP,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa;AACb,sCAAgC;AAChC,qBAAe;AACf,4CAAsC;AAEtC,uBAAmB;MACtB;MACA;MACA;MACJ,yBAAI;MACJ,+BAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,MAAI;MACJ,MAAI;MACJ,QAAI;;AAEG;AACH,UAAI,OAAO,eAAe,YAAY,cAAc;AAChD,eAAO,WAAW;iBAEb,OAAO,eAAe,YAAY,cAAc;AACrD,eAAO;;AAGP,cAAM,IAAI,WAAW,kBAAkB;;;AAoBxC;AACH,eAAY,OAAO,MAAM,0BAA0B;AACnD,UAAI,OAAO,OAAO;AACd,eAAO;;AAGP;AACA,0BAAkB,OAAO,KAAK;AAC1B,cAAI,UAAU,SAAS;AACnB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,0BAAkB,OAAO,KAAK;AAC1B,cAAI,WAAW,SAAS;AACpB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,eAAO,GAAG;;;AChKlB;;;;;;;;;AAkBO;AACH,2BAAqB;QACjB,SAAW,MAAM,MAAM,QAAQ;QAC/B,UAAY,MAAM,MAAM,SAAS,GAAG,MAAM;QAC1C,MAAQ,MAAM,MAAM,KAAK,MAAO,KAAK,OAAO;QAC5C,QAAU,MAAM,MAAM,OAAO,MAAO,KAAK,OAAO,WAAW;QAC3D,SAAW,MAAM,MAAM,QAAQ,MAAO,KAAK,GAAG;QAC9C,KAAO,MAAM,MAAM,IAAI;;AAE3B,mBAAa,aAAa,aAAa;AACvC,mBAAa,cAAc,aAAa;AACxC,mBAAa,UAAU,aAAa;AACpC,mBAAa,YAAY,aAAa;AACtC,mBAAa,aAAa,aAAa;AACvC,mBAAa,SAAS,aAAa;AACnC,UAAI,cAAc;AACd,eAAO,aAAa;;AAExB,YAAM,IAAI,WAAW,qBAAqB;;ACpC9C;;;;;;;;;AAaO,wDAAoD,IAAI,OAAO;AAa/D,kFAA8E;AACjF,UAAI,uBAAuB,QACvB,OAAO,wBAAwB,YAC/B,OAAO,eAAe,yBAAyB,OAAO,aACtD,CAAC,iBAAiB;AAClB,cAAM,IAAI,MAAM;;AAEpB,UAAI;AACA,oBAAY,KAAK,UAAU;AAC3B,YAAI,IAAI,SAAS;AACb,kBAAQ,KAAK,mCAAmC,2CAC5B,IAAI,qJAGjB;;;;AAeZ;AACH,UAAI,MAAM;AAEN,eAAO;iBAEF,OAAO,MAAM;AAClB,YAAI,OAAO,eAAe,OAAO,OAAO;AAEpC,uBAAa,OAAO,KAAK;AACzB,4BAAkB;AACd,gBAAI,OAAO,QAAQ;AAEf,qBAAO;;AAEX,gBAAI,CAAC,iBAAiB,EAAE;AACpB,qBAAO;;;AAGf,iBAAO;;AAIP,cAAI,MAAM,QAAQ;AAEd,+BAAmB;AACf,kBAAI,CAAC,iBAAiB;AAClB,uBAAO;;;AAGf,mBAAO;;AAMP,mBAAO;;;;AAMf,sBAAc,OAAO;AACrB,eAAO,UAAU,YAAY,UAAU,YAAY,UAAU;;;ACjGrE;;;;;;;;;AAyBO,mEAEG,QAAQ;AACd,6BAAuB,sBAAsB;AAE7C,wBAAkB,CAAC,gBAAgB,gBAAgB;AACnD,UAAI;AACA,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM;;AAGtC,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM,MAAM;;AAGhD,UAAI,UAAU,UAAU,SAAS,MAAM;AAEnC,oBAAY,UAAU,IAAI,QAAK,KAAK,MAAM,aAAa;;AAE3D;AACA,UAAI,CAAC;AACD,kBAAU,KAAK;AACf,wBAAgB;AAChB,4BAAoB,OAAM;AACtB,wBAAc,KAAK,GAAG,OAAM,aAAa;;;AAGjD,cAAQ,IAAI,OAAO;AACnB,eAAS,WAAW,WAAW;AAC/B,cAAQ,IAAI,OAAO;AACnB,qBAAe,OAAM;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI;AACA,4BAAkB,OAAO,IAAI,WAAW;;AAGxC,2CAAiC,OAAO,IAAI,WAAW,eAAe;;AAE1E,gBAAS,OAAM,OAAO,SAAS,IAAI,MAAM,KAAK,OAAO;;AAGzD,aAAM;AACN,6BAAuB,qBAAqB;AAC5C,gCAA0B,qBAAqB,OAAM;AACrD,cAAQ,iBAAiB,iBAAiB;AAC1C,cAAQ,qBAAqB;AAC7B,cAAQ,yBAAyB;AACjC,cAAQ,IAAI,OAAO;;AAEvB;AACI;AAEA,UAAI,OAAM,6BAA6B;AACnC,yBACI,qBAAqB,OAAM;;AAG/B,yBAAiB,qBAAqB,OAAM;;AAGhD,aAAO;;AAEX;AACI,2BAAqB;AACrB,2BAAqB;AACrB,oBAAc;AACd,0BAAoB,OAAM;AACtB,qBAAa,KAAK,OAAM,aAAa;;AAEzC,+BAAyB;AACrB,YAAI,WAAW,SAAS,KACpB,WAAW,WAAW,KAAK,WAAW,GAAG,cAAc,SAAS;AAChE,2BAAiB;AACjB;;AAEJ,cAAM,KAAK,GAAG;;AAElB,UAAI;AAEA,4BAAoB,OAAM;AACtB,qBAAW;AACX,6BAAmB,MAAM;AACrB,gBAAI,MAAM,QAAQ,UAAU;AACxB,kBAAI;AACA,iCAAiB;AACjB;;AAGA,uBAAO;;;;AAInB,cAAI,CAAC;AACD;;;;AAIZ,aAAO;;AAEX,mDAEU,QAAQ;AACd,iBAAW;AACX,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,IAAI;AACJ,iBAAO,KAAK,MAAM,GAAG,KAAK,SAAS,KAAK;;AAE5C,gBAAQ,OAAO;AACf,eAAO,KAAK,MAAM,GAAG,UAAU;AAC/B,gBAAQ,IAAI,OAAO,UAAU,KAAK,KAAK;;AAE3C,cAAQ;;AAOZ;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,qBAAe,CAAC,GAAG,SAAS,cAAc,aAAa,MAAM,cAAc;AAC3E,eAAS,QAAQ,WAAW;;AAKhC;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,0BAAoB;AACpB,yBAAmB,MAAM;AACrB,YAAI,iBAAiB,QAAQ,cAAc,SAAS,KAChD,cAAc,QAAQ,UAAU;AAChC;;AAEJ,qBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,+BAAqB,KAAK,cAAc,GAAG;AAC3C,oCAA0B,KAAK,YAAY;AAC3C,qCAA2B,KAAK,cAAc;AAC9C,sBAAY,KAAK,GAAG,gBAAgB,sBAAsB;;;AAGlE,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,8BAAwB,YAAY,WAAW,IAAI,KAAK,YAAY;AACpE,qBAAe;QACX,GAAG,SAAS;QAAc;QAAa,MAAM,cAAc;QAC3D;;AAEJ,eAAS,QAAQ,WAAW;AAC5B,mBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,iBAAS,CAAC,IAAI,IAAI,IAAI,YAAY,KAAK,WAAW;;;ACjM1D;;;;;;;;;AAoBA;AACI,aAAQ,SAAQ,kBAAkB,QAAQ,kBACtC,QAAQ,kBACR,WAAU,KAAK,OAAO,UAAU;;AAQjC;AACH,UAAI,mBAAmB;AACnB,eAAO;iBAEF,OAAO,mBAAmB;AAC/B,eAAO,YAA0B;iBAE3B,OAAO,mBAAmB,YAC/B,OAAO,mBAAmB;AAC3B,eAAO;iBAEF,0BAA0B;AAC/B,wBAAgB;AAChB,4BAAoB,eAAe;AACnC,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,eAAe;AAC5B,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,kCAA0B,OAAO,KAAK;AAClC,gCAAsB,eAAe;AACrC,cAAI,gBAAgB,UAAU,OAAO,kBAAkB;AAInD,mBAAO,eAAe;;AAGtB,0BAAc,YAA0B;AACxC,mBAAO,SAAS,oBAAoB,eAAe;;;AAG3D,eAAO;;;AASR;AACH,UAAI,aAAa,QAAQ,aAAa;AAClC,eAAO;iBAEF,OAAO,aAAa;AACzB,eAAO,YAA0B;iBAE3B,OAAO,aAAa,YAAc,OAAO,aAAa;AAC5D,eAAO;iBAEF,oBAAoB;AACzB,wBAAgB;AAChB,4BAAoB,SAAS;AAC7B,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,SAAS;AACtB,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,4BAAoB,OAAO,KAAK;AAC5B,0BAAgB,SAAS;AACzB,wBAAc,YAA0B;AACxC,cAAK,WAAU,UAAU,UAAU,gBAC/B,OAAO,YAAY;AAInB,mBAAO,SAAS;;AAGhB,mBAAO,SAAS,oBAAoB,SAAS;;;AAGrD,eAAO;;;ACxHf;AAEK,sBAAW;ACFhB;;;;;;;;;AAoBA;AAEI,UAAI,IAAI,SAAS,QAAQ,IAAI,UAAU,IAAI;AAEvC,eAAO;;AAEX;AAEI,eAAO,MAAK,KAAK,IAAI;;AAIrB,cAAM,IAAI,WAAW,0BAA0B,IAAI,mDAChC,IAAI,UAAU,IAAI;;;;MAazC;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,YAAI,iBAAiB;AACjB,2BAAiB,MAAM;AACnB,iBAAK,SAAS,MAAM,MAAM,SAAS;AACnC,gBAAI,MAAM,MAAM;AACZ,mBAAK,QAAQ,MAAM,MAAM,QAAQ;;;;AAKzC,cAAI,SAAS;AACT;;AAEJ,6BAAmB;AACf,iBAAK,IAAI,KAAK,KAAK,KAAK;;;;MAcpC;AACI,YAAI,KAAK,SAAS,IAAI,OAAO;AACzB,eAAK,SAAS,IAAI,MAAM,wBAAwB,KAAK;AACrD,eAAK,QAAQ,IAAI,QAAQ,IAAI;AAC7B,cAAI,QAAQ;AACR,iBAAK,QAAQ,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,uBAAuB,IAAI,YAAY,IAAI;;AAEpE,eAAO;;MAOX;AACI,aAAK,IAAI,KAAK,KAAK,KAAK;;MAM5B;AACI,eAAO,KAAK,SAAS,IAAI,OAAO;;MAKpC;AACI,eAAO,OAAO,KAAK,KAAK;;MAS5B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,SAAS,IAAI;;;AAI7B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,SAAS;;;MAU7B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,QAAQ,IAAI;;;AAI5B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,QAAQ;;;MAI5B;AACI,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;;;AAMzB,yBAAqB;AAErB,kCAA8B;AAsBvB;AACH,uBAAiB,UAAU,OAAO,QAAQ,OAAO;AACjD,2BAAqB,MAAM,QAAQ;AACnC,yBAAmB,eAAe,UAAU,CAAC;AAC7C,0BAAoB,WAAW,IAAI,OAAK,EAAE;AAC1C,2BAAqB;AACrB,wBAAkB,SAAS;AAC3B,+BAAyB;AACrB,YAAI,UAAU,QAAQ,gBAAgB;AAClC,uBAAa,KAAK,SAAS,SAAS;;AAGpC,uBAAa,KAAK;;;AAG1B,UAAI,SAAS;AAET,cAAM,gBAAgB;AACtB,cAAM,gBAAgB;;AAG1B,8BAAwB,YAAY,KAAK,OAAO,MAAM,SAAS,QAAQ,KAAK;AAC5E;AACA;AACA,UAAI,aAAa,oBAAoB;AAGjC,oBAAY,qCAAqC,YAAY;AAC7D,iBAAS,IAAI;AACb,0BAAkB,IAAI;AAEtB,qBAAa,mBAAmB;AAChC,8BAAsB,mBAAmB;;AAE7C,eAAS,aAAa;AACtB,wBAAkB;AAClB,UAAI,CAAC;AACD,eAAO,OAAO,iBAAiB,sBAAsB;;AAEzD,+BAAyB,IAAI,SAAS;AAEtC,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,SAAS;AAET,6BAAmB,SAAS;AAC5B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;AAE1B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;;AAG9B,yBAAiB,OAAO;AACxB,yBAAiB,SAAS;AAC1B,YAAI,oBAAoB;AACpB;;AAEJ,4BAAoB;AACpB,2BAAmB;AACnB,iCAAyB;AACzB,yBAAiB;AACjB,6BAAoB,SAAS;AACzB,wBAAc,iBAAiB,SAAS;AACxC,uBAAa,iBAAiB,QAAQ;AACtC,sBAAY,KAAK;AACjB,qBAAW,KAAK;AAChB,cAAI,QAAQ;AACR,yBAAa;;AAEjB,cAAI,CAAC;AACD,4BAAgB,OAAM;AACtB,gBAAI,gBAAgB,OAAM,UAAU,KAAK,CAAC,SAAS,OAAO,WACtD,YAAY,QAAQ,OAAM,UAAU,MAAM,CAAC,MAAM,cACjD,OAAM,YAAY,aAAa;AAC/B,+BAAiB,KAAK;;;;AAIlC,YAAI;AACA,mBAAS,UAAU;AACnB,iBAAO,UAAU,WAAW;;AAEhC,8BAAsB,OAAO,SAAS,MAAM,aAAa;AACzD,yBAAiB;AACjB,YAAI,SAAS;AACT,uBAAa,SAAS,YAAY,aAAa;;AAEnD,6BAAqB,eAAe;AACpC,sCAA8B,MAAM,QAAQ,gBAAgB,eAAe,CAAC;AAC5E,sBAAa,GAAG,KAAI,sBAAsB,QAAQ,EAAE;AAChD,cAAI,CAAC,iBAAiB,OAAO,sBAAsB;AAC/C,6BAAiB,IAAI,sBAAsB,KAAI,cAAc,KAAI,MAAM,QAAQ,cAAc,WAAW,KAAK;;AAEjH,yBAAc,YAAY,QAAQ,sBAAsB,IAAG;AAC3D,cAAI,WAAU;AACV,yBAAa,UAAS,cAAc;;;AAG5C,YAAI,CAAC;AAED,kBAAQ;;;AAQhB,uBAAiB;AACjB,aAAO,eAAe,eAAe,aAAa;;AAatD;AACI,cAAY,WAAW,QAAQ,QAAQ,SAAS,GAAG,MAAM;AACzD,wBAAkB;AAClB,8BAAwB;AACxB,UAAI,QAAQ,WAAW;AAEnB,oBAAY,gDAAgD,QAAQ,IAAI;AACxE,sBAAc,IAAI;AAClB,4BAAoB,IAAI;;AAGxB,wBAAgB,IAAI;AACpB,6BAAoB;AAChB,iBAAQ,QAAQ,gBAAiB,gDAAgD,QAAO;AAExF,uCAA6B;AACzB,gBAAI,CAAC,QAAQ,IAAI,eAAe;AAC5B,0BAAY,KAAK;AACjB,sBAAQ,IAAI,eAAe;;;AAInC,6BAAmB;AACf,gBAAI,kBAAkB,SAAS;AAC3B,gCAAkB,QAAQ,IAAI;;AAElC,yBAAa,MAAM,QAAQ,eAAa,kBAAkB,MAAM,IAAI;;;;AAIhF,aAAO;QACH,QAAQ;QACR,iBAAiB,oBAAoB;;;AAG7C;AACI,8BAAwB;AACxB,yBAAmB;AACf,wBAAgB,QAAQ,aAAa,MAAM;;AAE/C,aAAO;;AAaJ;AACH,sBAAgB,IAAI;AACpB,qBAAe;AACf,2BAAqB;AAIrB,wBAAkB,SAAS;AACvB,gBAAQ,IAAI;;AAEhB,qBAAc;AACd,oBAAc;AAEd,aAAM,KAAK;AACX,aAAO,OAAM,SAAS;AAClB,oBAAY,OAAM,OAAM,SAAS;AACjC,YAAI,QAAQ,IAAI,IAAI;AAChB,iBAAM;AACN;;AAEJ,4BAAoB,MAAM,MAAM,SAAS,OAAO,OAAM,SAAS;AAC/D,YAAI,IAAI,OAAO,WAAW,KAAK;AAE3B,iBAAM;AACN,iBAAO,KAAK;AACZ,kBAAQ,IAAI,IAAI;AAChB,cAAI;AACA,kBAAM;;;AAMV,gBAAM,KAAK,OAAM,SAAS;AAC1B,+BAAoB,IAAI;AAGpB,gBAAI,aAAa,OAAM,SAAS;AAC5B,2BAAa,OAAM,QAAQ,IAAI;;AAEnC,yBAAa,OAAM,MAAM,IAAI,IAAI;AACjC,gBAAI,QAAQ,IAAI,OAAM;AAClB;;AAEJ,mBAAM,KAAK;;;;AAIvB,aAAO,CAAE,QAAQ;;AAQrB;AACI;AACA,UAAI,OAAM,YAAY,aAAa,WAAW;AAC1C,uBAAe,OAAM,YAAY;;AAGjC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAM,YAAY,aAAa,QAAQ,EAAE;AACzD,qCAA2B,OAAM,YAAY,aAAa,GACrD;AACD,gBAAI,aAAa,OAAO,OAAM;AAC1B,0BAAY;AACZ;;;;AAIZ,uBAAe,OAAM,YAAY,YAAY;;AAEjD,aAAO;;ACzbX;;;;;;;;;4BA6B+B;MAC3B;AAEI,cAAM;AACN,aAAK,iBAAiB,IAAI;AAC1B,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,yBAAe,KAAK,eAAe;AACnC,eAAK,OAAO,OAAO;;AAEvB,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAGlB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,SAAS,KAAK,OAAO;;AAG1B,eAAK,SAAS,CAAC,KAAK;;AAExB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,UAAU,KAAK,QAAQ;;AAG5B,eAAK,UAAU,CAAC,KAAK;;AAGzB,YAAI,SAAqB,KAAK,QAAQ,WAAW,KAAK,OAAO;AACzD,gBAAM,IAAI,WAAW,mGAEd,KAAK,OAAO,IAAI,OAAK,EAAE;;AAGlC,YAAI,SAAqB,KAAK,SAAS,WAAW,KAAK,QAAQ;AAC3D,kBAAQ,KAAK,qGAEN,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAMnC,aAAK,cAAc;AACnB,aAAK,yBAAyB;AAC9B,aAAK,2BAA2B;AAKhC,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,4BAA4B;AAKjC,aAAK,SAAS;AAKd,aAAK,wBAAwB;AAa7B,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,wBAAwB,KAAK;AAClC,eAAK,0BAA0B,KAAK;;AAIxC,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AAKtB,mBAAqB,cAAc,GAAG;AACtC,mBAAqB,gBAAgB,GAAG;AACxC,eAAK,YAAY,KAAK;AACtB,eAAK,uBAAuB,KAAK;AACjC,eAAK,yBAAyB,KAAK;;AAGvC,aAAK,aAAa;AAClB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,iBAAiB;AACtB,aAAK,kBAAkB;AACvB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAE/B,cAAI,CAAE,kBAAiB;AACnB,kBAAM,IAAI,UAAU,8EACI,KAAK,iBAChB,0CACU,MAAM;;AAEjC,eAAK,WAAW,KAAK,MAAM;AAC3B,eAAK,gBAAgB,KAAK,MAAM;AAChC,eAAK,eAAe,KAAK,MAAM;;AAEnC,4BAAoB,KAAK;AACrB,eAAK,YAAY,KAAK,MAAM;;AAEhC,aAAK,sBAAsB,KAAK,OAAO,IAAI,OAAK,EAAE;AAClD,aAAK,uBAAuB,KAAK,QAAQ,IAAI,OAAK,EAAE;AAMpD,4BAAoB;AAEpB,6BAAqB;AACrB,6BAAqB;AAErB,+BAAuB;AACvB,6BAAqB;AACrB,uCAA+B;AAmB/B,gCAAwB;AACpB,cAAI,SAAS,QAAQ,aAAa,QAAQ,eAAe;AACrD,oBAAQ,QAAO;AACf,wBAAY,QAAO;AACnB,0BAAc,QAAO;;AAEzB,uBAAa,MAAM,aAAa;AAEhC,cAAI,iBAAgB,QAAQ,UAAU;AAClC,kBAAM,IAAI,aAAa,cAAc,QAAO,kBAAkB,MAAM;;AAIxE,cAAI,eAAc,QAAQ,UAAU;AAChC;;AAGJ,eAAK,eAAe,IAAI,UAAU,QAAQ,OAAO;AAEjD,cAAI,CAAE,OAAM,MAAM;AACd,yBAAa,MAAM,MAAM,OAAO,KAAK,cAAc;;AAEvD,cAAI,iBAAgB,QAAQ,UAAU;AAClC,6BAAgB,KAAK;;AAGzB,mCAAyB,KAAK,cAAc;AAC5C,uBAAa,GAAG,IAAI,kBAAkB;AAClC,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,iCAAoB,KAAK,cAAc;AACvC,4BAAgB,GAAG,gBAAe,kBAAiB,QAAO,YAAW;;AAEzE,yBAAc,KAAK;AACnB,iBAAO,iBAAgB,QAAQ,SAAS;AACpC,6BAAgB,OAAO,iBAAgB,QAAQ,OAAO;;AAE1D,iCAAuB,KAAK;;AAEhC,8BAAsB;AACtB,gCAAwB;AACxB,wBAAgB,KAAK;AACjB,0BAAgB,GAAG,eAAe;;AAEtC,+CAAuC,uBAAuB,QAAQ;AACtE,2BAAmB;AACf,uBAAa,KAAK,MAAM;AAExB,cAAI,CAAE,MAAK,MAAM;AACb,wBAAY,KAAK,MAAM;;AAE3B,sBAAY,YAAY,KAAK;AAE7B,gCAAuB,aAAa,KAAK,cAAc,OAAO,OAC1D,IACA,aAAa,KAAK,cAAc;AAMpC,kBAAQ,KAAK,IAAI,OAAO;AACxB,uBAAa,KAAK,cAAc,MAAM;AACtC,yBAAe,KAAK,cAAc,MAAM,KAAK;AAC7C,sBAAY,KAAK,MAAM;AAEvB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,iCAAqB,KAAK,cAAc;AACxC,8BAAkB,KAAK,YAAY;AACnC,gCAAoB,aAAa,aAAa;AAC9C,mCAAuB,YAAY,YAAY,OAAO,OAAO,IACzD,YAAY,YAAY;AAC5B,wBAAY,YAAY,MAAM,KAAK,IAAI,QAAQ,GAAG;AAClD,yBAAa,YAAY,MAAM;;;AAIvC,6BAAqB;AACrB,6BAAqB;AACjB,wBAAc,YAAY;AAC1B,cAAI,CAAE,UAAS;AACX,yBAAa,SAAS;;AAE1B,uBAAa,OAAO,KAAK,aAAa;;AAG1C,8BAAsB;AACtB,8BAAsB;AAClB,wBAAc,aAAa;AAC3B,cAAI,CAAE,UAAS;AACX,0BAAc,SAAS;;AAE3B,wBAAc,OAAO,KAAK,eAAe;;AAG7C,wBAAgB,OAAO,KAAK,eACvB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,aAAK,SAAS;AACd,4BAAoB;AAChB,iCAAuB,cAAc;AAGrC,yBAAe,KAAK;AAChB,2BAAe,aAAa,EAAE;AAC9B,2BAAe,aAAa,EAAE;AAC9B,gBAAI,SAAS;AACT,qBAAO;;AAEX,gBAAI,SAAS;AACT,qBAAO;;AAEX,mBAAO;;AAEX,8BAAoB;AAChB,gBAAI,iBAAiB;AACjB,mBAAK,sBAAsB,KAAK;;AAEpC,iBAAK,OAAO,KAAK;;;AAGzB,aAAK,gBAAgB;AAErB,oBAAY,OAAO,KAAK,cACnB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAIV,kCAA0B,KAAK,OAAO;AAEtC,wCAAgC;AAChC,4BAAoB;AAChB,6BAAmB,aAAa;AAC5B,0BAAc,KAAK;AACnB,gBAAI,SAAS;AACT,8BAAgB,KAAK;AACjB,oBAAI,kBAAkB,QAAQ,OAAO;AACjC,wBAAM,IAAI,aAAa,sDAAsD,eAC3D,MAAM,qEAEV;;;AAGtB,8BAAgB,KAAK;AACjB,kCAAkB,KAAK;;AAE3B,sCAAwB,KAAK,MAAM;;;;AAK/C,aAAK,eAAe;AAGpB,yBAAiB,KAAK,OAAO,IAAI,OAAK,EAAE;AACxC,2BAAmB;AACf,iCAAuB,SAAS,OAAO,OAAK,MAAM,MAAM;AACxD,cAAI,mBAAmB;AACnB,kBAAM,IAAI,aAAa,aAAa,iBAAiB,uFAEjD,KAAK,UAAU;;;AAO3B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAIpB,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,KAAK;UACnB,eAAe,KAAK;UACpB,YAAY,KAAK,OAAO,IAAI,OAAK;UACjC,aAAa,KAAK,QAAQ,IAAI,OAAK;UACnC,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;UACpC,cAAc,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAE1C,aAAK,QAAQ;AACb,aAAK,YAAY;;MAErB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,cAAc,KAAK;;;MA6B3C;AACI,aAAK;AACL,uBAAe,CAAE,sBAAsB,MAAM,sBAAsB;AACnE,YAAI,EAAE,KAAK,cAAc;AACrB,8BAAoB,KAAK;AACrB,mBAAO,wBAAwB,MAAM,UAAU;;AAInD,kCAAwB,KAAK;AACzB,mBAAO,wBAAwB,UAAU,UAAU;;;AAG3D,eAAO,uBAAuB,KAAK;AACnC,eAAO;;UAEP;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,OAAO,QAAQ;AAEhB,gBAAM,kBACD,QAAQ,OAAK,EAAE,YAAY;;AAEpC,aAAK,aAAa;;UAElB;AAIA,YAAI,KAAK,kBAAkB,SAAS;AAChC,gBAAM,IAAI,WAAW;;AAKzB,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,sBAAc;AACd,4BAAoB,KAAK;AACrB,oBAAU,QAAQ,OAAO,MAAM;;AAEnC,eAAO;;UAEP;AACA,wBAAgB;AAChB,4BAAoB,KAAK;AACrB,kBAAQ,KAAK,GAAG,MAAM;;AAE1B,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,8BAAoB,KAAK;AACrB,6BAAiB,KAAK,GAAG,MAAM;;AAEnC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;UAEP;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;MAiB7C,8BAA8B;AAC1B,6BAAqB;AACrB,gCAAwB;AACxB,4BAAoB,KAAK;AACrB,+BAAqB,MAAM;AACvB,gBAAI,aAAa,OAAO,iBAAiB;AACrC,oBAAM,IAAI,WAAW,0BAA0B,OAAO;;AAE1D,yBAAa,OAAO,gBAAgB;AACpC;;;AAGR,kCAA0B;AAC1B,2BAAmB;AAIf,8BAAoB;AACpB,cAAI,aAAa,SAAS;AACtB,2BAAe,KAAK,MAAM;AAC1B,qCAAyB,OAAO,MAAM,GAAG,IAAI,OAAO,CAAC,OAAO,OAAO,SAAS;AAC5E,4BAAgB,iBAAiB,KAAK;;AAE1C,cAAI,aAAa,kBAAkB;AAC/B,8BAAkB,KAAK,CAAC,aAAa,gBAAgB,QAAQ;qBAExD;AACL,kBAAM,IAAI,WAAW,gDAAgD;;AAEzE,iBAAO,aAAa;;AAExB,YAAI;AAEA,6BAAmB;AACnB,6BAAmB;AACf,uBAAW,KAAK;;AAEpB,cAAI,WAAW,SAAS;AACpB,kBAAM,IAAI,WAAW,GAAG,WAAW,aAAa,0CACzC;;;AAGf,sBAAc;;MAMlB;AACI,0BAAkB,KAAK;AACvB,4BAAoB;AACpB,oBAAY,eAAe,KAAK;AAChC,oBAAY,YAAY;AACxB,oBAAY,kBAAkB,eAAe;AAG7C,oBAAY,aAAa;AACzB,eAAO;;MAcX,8BAA8B;AAC1B,4BAAoB,oBAAoB,KAAK;AAC7C,eAAO,eAAe,KAAK,UAAU,eAAe;;MAexD;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B,2BAAiB,IAAI;AACrB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;AAExC,iBAAO,QAAQ,KAAK,SAAS,UAAU;;;MAY/C;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B;AACA,cAAI,QAAQ;AACR,oBAAQ,aAA2B,MAAM,OAAO;;AAGhD,oBAAQ,OAAqB;;AAGjC,iBAAO,KAAK,iBAAiB,QAAQ,OAAO;;;MAYpD;AACI,4BAAoB,mBAA+B;AACnD,YAAI,YAAY,WAAW,KAAK,YAAY;AACxC,gBAAM,IAAI,WAAW,+BAA+B,yBACnC,KAAK,YAAY;;AAGtC,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,wBAAc,KAAK,YAAY;AAC/B,8BAAmB,YAAY;AAG/B,2BAAiB,MAAM,OAAO;AAC9B,+BAAqB,YAAY;;AAErC,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,YAAI,UAAU,SAAS;AACnB,8BAAoB;AAChB,0BAAc,KAAK,aAAa;AAChC,+BAAmB;AAEf,4BAAc,KAAK;AACnB,kBAAI,KAAK,YAAY,IAAI,OAAK,EAAE,IAAI,QAAQ,MAAM,QAAQ;AAEtD;;AAGJ,mCAAoB;AACpB,2BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,qCAAqB,KAAK,cAAc;AACxC,mCAAkB,KAAK,YAAY;AACnC,oCAAoB,KAAK,cAAc;AACvC,iCAAiB,GAAG,aAAa,QAAQ,cAAa;AACtD,oCAAmB,qBAAqB;AACxC,6BAAY,KAAK;;AAErB,kCAAoB,MAAM,mBAAmB,iBAA+B;AAC5E,oCAAqB,mBAA+B;AACpD,gCAAkB,MAAM,aAAa,QAAQ;AAC7C,2BAAa,GAAG,IAAI,cAAa,QAAQ;AACrC,iCAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,qCAAqB,YAAY,cAAa;;;;;AAM9D,6BAAqB;AACrB,gCAAwB;AACxB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,8BAAoB,KAAK,0BAA0B;AACnD,2BAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,0BAAgB,KAAK;;AAEzB,qBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,sBAAY,gBAAgB;AAC5B,mBAAqB,OAAO;AAC5B,uBAAa,KAAK,qBAAqB;;AAG3C,eAAO,iBAA+B;;MAY1C;AACI,YAAI,SAAS;AACT,kBAAQ,aAA2B,MAAM,OAAO;;AAOpD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAU,KAAK,OAAO;AACtB,oBAAU,OAAO;AACjB,uBAAa,MAAM;AACnB,oBAAU,EAAE,MAAM,CAAC,GAAG;;AAE1B,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AACV,4BAAoB;AAChB,wBAAc,KAAK,aAAa;AAChC,6BAAmB;AAEf,0BAAc,KAAK;AACnB,0CAA8B,KAAK;AACnC,2CAA+B,KAAK;AAIpC,iCAAqB,IAAI;AACzB,4BAAgB;AACZ,kBAAI,EAAE,MAAM;AACR,6BAAa,KAAK,UAAU,EAAE;;;AAGtC,gBAAI,aAAa,WAAW,sBAAsB;AAE9C,2BAAa;AACb;AACA;AACA;AACA;AAEA,kBAAI,KAAK,YAAY;AACjB,yBAAS,KAAK;;AAElB,kBAAI,aAAa,WAAW;AACxB,uDAAuC,aAAa;AACpD,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,gBAAgB;AACpD,+BAAc,OAAqB,MAAM,YAAY,gBAAgB;AACrE,kCAAkB,CAAC;AACnB,gCAAgB,CAAC;;AAGjB,kCAAkB,aAAa,IAAI,OAAK,EAAE;AAC1C,gCAAgB,aAAa,IAAI,OAAK,EAAE;AACxC,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,iBAAiB;AACrD,+BAAc,OAAqB,MAAM,YAAY,iBAAiB;;AAE1E,kBAAI,MAAM;AACN,sBAAM,IAAI,oBAAoB;;AAKlC,2BAAa,GAAG,IAAI,uBAAuB,QAAQ,EAAE;AACjD,0BAAU,uBAAuB;AACjC,0BAAU,eAAc;AACxB,6BAAa,aAAY;AACzB,0BAAU,EAAE,MAAM,CAAC,GAAG;;;;;AAKtC,8BAAsB;AACtB,4BAAoB;AACpB,6BAAqB;AACrB,wBAAgB,KAAK;AACjB,mBAAqB,EAAE,MAAM,WAAW,4BAA4B,EAAE,UAAU,EAAE;AAClF,kCAAuB,UAAU,EAAE;AACnC,uBAAa,KAAK,QAAO;AACzB,wBAAc,KAAK;AACnB,sBAAY,KAAK;;AAGrB,eAAO,CAAC,eAAe,aAAa;;MAUxC;AACI,kCAA0B;AAC1B;AACA,4BAAoB,KAAK;AACrB,sBAAY,iBAAiB,YAAY,IAAI;AAC7C,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,4BAAgB,UAAU,QAAQ,OAAO;AACzC,gBAAI,KAAK,eAAe,IAAI;AAExB,gCAAkB,WAAW;AAC7B,2BAAa;;;;AAIzB,eAAO;;MAqBX;AACI,YAAI,UAAS;AACT,cAAI,KAAK,OAAO,UAAU;AACtB,kBAAM,IAAI,WAAW,wCAAwC,8BAClD,KAAK,OAAO;;AAGvB,mBAAO,KAAK,OAAO;;;AAIvB,cAAI,QAAQ;AACR,kBAAM,IAAI,WAAW;;;AAG7B,4BAAoB,KAAK;AACrB,cAAI,MAAM,SAAS;AACf,mBAAO;;;AAGf,cAAM,IAAI,WAAW,kBAAkB;;MAO3C;AAKI,eAAO,KAAK;AACR,0BAAe;AACf,8BAAoB,KAAK;AACrB,iCAAqB,GAAG,YAAY,MAAM,aAAa,QAAQ,EAAE;AAC7D,8BAAgB,UAAU,QAAQ,OAAO;AACzC,kBAAI,KAAK,eAAe,IAAI;AACxB,wBAAO,KAAK,GAAG,MAAM;;;;AAKjC,iBAAO;;;MAGf;AACI,wBAAe,CAAE,MAAM,KAAK;AAI5B,kCAA0B,KAAK,uBAAuB,KAAK;AAE3D,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,iCAAuB,MAAM;AAC7B,8BAAoB,MAAM;AAC1B,uCAA6B;AAC7B,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,yBAAa,MAAM,aAAa;AAChC,4BAAgB,UAAU,QAAQ,OAAO;AACzC,yBAAa;AACb,gBAAI,KAAK,eAAe,IAAI;AAGxB,kBAAI,KAAK;AACL;AACI,uBAAK,UAAU,KAAK;AACpB,2BAAS,KAAK;;AAGd,0BAAQ,KAAK,SAAS,MAAM,uDAErB,KAAK;AAGZ,2BAAS;;;AAGjB,kBAAI,KAAK,cAAc,SAAS;AAC5B,iCAAiB;AACjB,6BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,uCAAqB,KAAK,cAAc;AACxC,oCAAkB,KAAK,YAAY;AACnC,sCAAoB,KAAK,cAAc;AACvC,mCAAgB,UAAU,QAAQ,cAAc;AAChD,qCAAmB,kBAAkB;AACrC,sBAAI,gBAAgB;AAChB,mCAAe;;AAEnB,2BAAS,KAAK,CAAC,aAAa,MAAM,cAAc,aAAa;;AAEjE,qCAAqB,KAAK;;;;AAItC,uBAAa;AACb,eAAK,UAAU,MAAM;AACrB,eAAK,eAAe;AACpB,eAAK,YAAY;AACjB,eAAK,kBAAkB;AACvB,uBAAa,KAAK;;AAEtB,gBAAO,YAAY;AAEnB,4BAAoB;AACpB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAC/B,4BAAkB,KAAK,uBAAuB;AAC9C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,yBAAyB;AAClD,sBAAY,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEhD,gBAAO,iBAAiB;AACxB,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,0BAA0B;AACnD,uBAAa,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEjD,gBAAO,kBAAkB;AACzB,eAAO;;aAeJ,yCAAwC,qBAAqB;AAGhE,8BAAsB;AAMtB,iCAAyB;AACzB;AACI,cAAI,CAAE,OAAM,QAAQ;AAChB,6BAAiB,MAAM,QAAQ,CAAC;;AAGhC,6BAAiB,MAAM,MAAM,KAAK;;;AAG1C;AACI,gCAAqB;AACrB;AACA,kCAAwB;AACpB,qCAAyB,UAAU;AACnC,qCAAyB,UAAU;AACnC,uCAA2B,UAAU;AACrC,qBAAS,UAAU,MAAM,OACrB,KACA,UAAU;AACd,gBAAI,CAAE,qBAAoB;AACtB,iCAAmB,OAAO;AAC1B;;AAEJ,iCAAqB,cAAc;AACnC,gBAAI,aAAa,aAAa,UAAU;AACpC,iCAAmB,OAAO;AAC1B;;AAEJ,gCAAoB,aAAa,aAAa;AAC9C,0BAAa,KAAK,YAAY,cAAc;;AAKhD,cAAI,cAAa,SAAS;AACtB,kBAAM,MAAM,iBAA+B,gBAAe;;;AASlE;AACI,4BAAkB,UAAU;AAE5B,wBAAc,YAAiB,WAAW,QAAO,oBAAoB,OACjE,QAAO,mBACP;AACJ,gBAAM,6BAA6B;AACnC,wBAAc,aAAa;AAE3B,mCAAyB,UAAU;AACnC,2BAAiB,QAAQ;AACrB,gBAAI,CAAE,qBAAoB;AACtB,oBAAM,IAAI,WAAW,yDAAyD;;AAMlF,+BAAmB,OAAO;;;AAIlC,qBAAa,QAAO;AACpB,iCAAyB,QAAO;AAChC,gCAAwB;AACpB,uBAAa;;AAMjB,eAAO,CAAC,cAA4B;AAChC,kCAAwB;AACpB,0BAAc,cAAc,UAAU;AACtC,gBAAI,MAAM,QAAQ;AACd,sDAAwC,iBAAiB,MAAM;AAC/D,qBAAO,iBAAiB,MAAM;AAC9B,qCAAuB;AACnB,4BAAY,OAAO;;;;;AAKnC,6BAAqB;AACrB,8BAAsB;AACtB,sCAA8B,QAAO;AACrC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,uBAAa,KAAK,mBAAmB;;AAEzC,uCAA+B,QAAO;AACtC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,wBAAc,KAAK,mBAAmB;;AAE1C,eAAO,IAAI,IAAI,CAAE,QAAQ,cAAc,SAAS,eAAe;;UAQ/D;AAGA,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAIzB,4BAAoB,KAAK;AACrB,cAAI,MAAM;AACN,mBAAO;;;AAGf,eAAO;;MAQX;AACI,aAAK;AACD,eAAK,OAAO,QAAQ;AAEhB,gBAAI,MAAM;AACN,oBAAM;;;;;;AC5mC1B;;;;;;;;;AAUA;AACI,yBAAmB,YAAY;AAC/B,UAAI,WAAW,QAAS,MAAM,QAAQ,YAAY,QAAQ,WAAW;AACjE,eAAO,YAAY,IAAI,UAAQ;;AAEnC,UAAI,eAAe;AACf,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,iBAAO;mBAEF,OAAO,YAAY,YAAY,YAAY,MAAM;AACtD,iBAAO,CAAC,QAAQ,YAAY;;AAG5B,iBAAO,CAAC;;;AAGhB,UAAI,MAAM,QAAQ;AACd,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,MAAM,YAAY,6BAA6B,QAAQ,wCAC5B;;AAGzC,eAAO;iBAEF,OAAO,YAAY,YAAY,OAAO,KAAK,SAAS,SAAS,KAClE,OAAO,QAAQ,OAAO,KAAK,SAAS,QAChC;AACJ,uBAAe;AACf,oBAAY,QAAQ;AAChB,cAAI,cAAc;AACd,mBAAO,KAAK,QAAQ;;AAGpB,mBAAO,KAAK;;;AAGpB,eAAO;;AAGP,cAAM,IAAI,MAAM,2BAA2B,2BACjC,2CACH,yCAAyC,8BAChC,8BAA8B,KAAK,UAAU;;;AAgB9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAE9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAoB9D;AACH,UAAI,gBAAgB,QAAQ,oBAAoB;AAG5C,cAAM,IAAI,MAAM;;AAEpB,UAAI,eAAe;AAEf,yBAAiB,KAAK;AAClB,cAAI,EAAE,MAAM,WAAW;AAEnB,mBAAO,EAAE;qBAEJ,EAAE,MAAM,WAAW;AACxB,gBAAI,EAAE,MAAM,KAAK;AAEb,2BAAa;AACb,qBAAO,EAAE,OAAO;uBAEX,EAAE,MAAM,OAAO;AAEpB,qBAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;;AAG1B,oBAAM,IAAI,MAAM,+CAA+C,EAAE,MAAM;;;AAM3E,kBAAM,IAAI,MAAM,yCAAyC,EAAE;;;AAInE,8BAAsB,MAAM,KAAK,MAAM,SAAS;AAChD,gBAAQ;AACR,kCAA0B;AAC1B,sBAAc,QAAQ;AAClB,cAAI,YAAY,eAAe;AAC3B,kBAAM,IAAI,MAAM,wEACC;;AAIjB,8BAAkB,KAAK,YAAY;;;AAG3C,eAAO,UAAS,mBAAmB;;AAGnC,eAAO;;;AAUR;AACH,aAAO,KAAI,SAAQ;;ACzJvB;;;;;;;;;AAoBA,0CAAsC;AAetC;AAKI;AACA;AACA,6BAAuB;AACvB,WAAK,eAAe;AACpB,WAAK,eAAe;AACpB,cAAgB,MAAM,QAAQ,MAAM,MAAM,MAAM,mPAIzC;AACP,0BAAoB,0BAA0B,SAAS,OAAM,YAAY;AACzE,0BAAoB,0BAA0B,UAAU,OAAM,aAAa;AAC3E,wBAAkB,YAAY,GAAG,MAAM;AACvC,cAAgB,YAAY,WAAW,OAAM,OAAO,QAAQ,MAAM,mBAAmB,OAAM,OAAO,2CAClF,YAAY,yCACrB,KAAK,UAAU,OAAM;AAC5B,cAAgB,YAAY,WAAW,OAAM,QAAQ,QAAQ,MAAM,mBAAmB,OAAM,QAAQ,4CACpF,YAAY,2CACrB,KAAK,UAAU,OAAM;AAC5B,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,8BAC3D,OAAM,WAAW,eAAe,YAAY,QAAQ,MAAM,iBAChD,4BAA4B,OAAM,WAAW;;AAElE,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,+BAC3D,OAAM,YAAY,eAAe,YAAY,QAAQ,MAAM,iBACjD,4BAA4B,OAAM,WAAW;;AAElE,aAAO,CAAE,IAAI,aAAa,IAAI;;AAElC;AACI,UAAI,kBAAkB;AAClB,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,gBAAgB,OAAO,WAAW,MAAM,QAAQ,MAAM,wBAAwB,OAAO,gCAAgC,MAAM,uBAAuB,sBAAsB;AACxK,eAAO;;AAGP,uBAAe;AAEf,2BAAmB;AACf,cAAI,OAAO,SAAS;AAChB,kBAAM,IAAI,WAAW,gEACd,sBAAsB;;AAEjC,iBAAO,KAAK,OAAO;;AAEvB,eAAO;;;AAGf;AACI,UAAI,MAAK,WAAW;AAChB,cAAM,IAAI,oBAAoB;;AAElC,aAAO,CAAE,IAAI,MAAK,IAAI,IAAI,MAAK;;AAE5B;AAKH,iCAA2B,KAAK,mBAAmB;AACnD,cAAgB,OAAM,aAAa,MAAM,MAAM;AAE/C,cAAgB,QAAQ,MAAM,MAAM;AAEpC,cAAgB,KAAK,UAAU,QAAQ,KAAK,SAAS,KAAK,OAAO,UAAU,KAAK,SAAS,MAAM,iFACvE,KAAK;AAC7B,cAAgB,CAAC,sBACZ,KAAK,kBAAkB,KAAK,OAAO,UAAU,KAAK,kBAAmB,MAAM,uGAClC,KAAK;AACnD,cAEA,KAAK,sBAAsB,MAAM,MAAM;AAEvC,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACI,6BAAqB,KAAK,kBAAkB;AAC5C;AACA;AACA,YAAI;AACA,cAAI,gBAAgB,KAAK;AACrB,oBAAgB,KAAK,qBAAqB,QACrC,KAAK,oBAAoB,KACtB,OAAO,UAAU,KAAK,oBAAqB,MAAM,iJAG1C,KAAK;;AAGpB,mCAAuB,gCAAgC,KAAK;AAC5D,oBAAQ,eAAe;AACvB,oBAAQ,eAAe;;;AAG/B,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA,YAAI;AACA,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,wBAAgB,KAAK,WAAW,OAAO,IAAI,KAAK;AAChD,eAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,KAAK,QAAQ,MAAM,MAAM,iBAAiB,SAAS,OAAO,MACnI,cAAc;AACd,qBAAa,SAAS;AACtB,eAAM,UAAU;AAChB,cAAM,aAAa;AACnB,eAAM,gBAAgB;AACtB,oBAAY,KAAK,gBAAgB,OAAO,IAAI,KAAK;AACjD,2BAAmB,MAAM,QAAQ;AACjC,eAAO,QAAQ,KAAK;AAChB,4BAAkB;AAClB,gBAAM,aAAa,aAAa;AAChC,0BAAgB;AAChB,2BAAiB;AACjB,cAAI,CAAC;AACD,2BAAe,MAAM,QAAQ;;AAEjC,iBAAO,qBAAqB,YAAY,KAAK,kBAAkB;AAC3D,gCAAoB,MAAM,aAAa;AAGvC,gBAAI,sBAAsB,YAAY;AAClC,sBAAQ,KAAK,uCACN,KAAK,oEAEL,mJAIA,KAAK,kBAAkB,KAAK;AAGnC;;AAEJ,gBAAI,YAAY,SAAS;AACrB,qBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,gCAAkB;AAClB,wBAAU,WAAW;AACrB,wBAAU,UAAU,GAAG,GAAG,MAAM;AAChC,oBAAM,aAAa,aAAa,YAAY;AAC5C,oCAAsB;AACtB,kBAAI,KAAK,eAAe;AACpB,6CAA6B,wBAAwB,KAAK,aAAa,OAAM;AAC7E,6BAAa,GAAG,IAAI,qBAAqB,QAAQ,EAAE;AAC/C,gCAAc,KAAK,MAAM,mBAAmB,GAAG,IAAI,MAAM,qBAAqB;;;AAItF,0BAAY,GAAG,OAAO,IAAI,OAAO;AACjC,2BAAa,cAAc;AAC3B,sBAAY;AACZ,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAEb,oBAAM,aAAa,WAAW,YAAY;AAC1C,mCAAqB;AACrB;AACA;;AAEJ,gBAAI,qBAAqB,aAAa,KAAK,kBACvC,YAAY;AAEZ,kBAAI;AACA;AACA,oBAAI,gBAAgB,KAAK;AACrB,4BAAU,OAAO,MAAM,OAAM,gBAAgB,KAAK,gBAAgB,CAAE,SAAS,KAAK;;AAGlF,4BAAU,OAAO,OAAM,SAAS,OAAO,OAAO;oBAC1C,WAAW,KAAK,uBAAuB,OACnC,gCACA,KAAK;oBACT,SAAS;;;AAGjB,6BAAa,GAAG,IAAI,OAAM,aAAa,QAAQ,EAAE;AAC7C,4BAAU,OAAO,OAAM,aAAa,QAAQ,QAAQ;;;AAQ5D;;AAEJ,gBAAI,OAAM;AACN;;;AAGR,gBAAM,aAAa,WAAW,OAAO;AACrC;AACA,cAAI,OAAM;AACN;;;AAGR,cAAM,aAAa;AACnB,cAAM,OAAM,QAAQ;AACpB,eAAO,OAAM;;AAGb,eAAM,aAAa;;;AAI3B;AAEI,0BAAoB;AACpB,UAAI,KAAK,mBAAmB;AACxB,wBAAgB,KAAK;iBAEhB,OAAO,SAAS,QAAQ;AAC7B,wBAAgB,QAAQ;;AAE5B,aAAO;;AAIX;AACI,aAAQ,OAAO,QAAQ,aAAa;;AAIxC;AACI,aAAQ,OAAO,SAAS,SAAS;;AAE9B;AAKH,aAAO,QAAQ;AACf,yBAAmB,KAAK,WAAW;AACnC,gBAAU,OAAM;AAChB,iBAAW;AACX,UAAI,KAAK,UAAU;AACf,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,CAAC,cAAe,KAAK,UAAU,KAAK,OAAO,UAAU,KAAK,UAAW,MAAM,wEAC3E,KAAK,UAAU,KAAK;AACpC,2BAAqB,qBAAqB,WACtC,UACA,MAAM,QAAQ;AAElB,wBAAkB;AAClB,kBAAY;AACZ,aAAO,aAAa,QAAQ,KAAK,UAAU;AACvC,4BAAoB,MAAM,aAAa;AACvC,eAAO,KAAS;AACZ,cAAI,YAAY;AAGZ,mBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,4BAAgB,GAAG,OAAO;AAC1B,8BAAkB,KAAS,MAAM,EAAE;AACnC,oBAAY;AACZ,gBAAI,UAAU;AACV,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,qBAAK,KAAK,QAAO;;;AAGzB,8BAAkB,QAAQ,GAAG,MAAM;AACnC,yBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,+BAAiB,UAAU;AAC3B,gCAAkB,KAAK;AACvB,mBAAK,KACD,KAAS,MAAM,MAAQ,KAAK,IAAI,KAAQ,WAAW;AACvD,kBAAI,QAAQ;AACR,wBAAY;;;AAGpB,oBAAY;AACZ,2BAAe;AACf,cAAE;;AAEN,iBAAO;;AAEX,YAAI,YAAY;AACZ,cAAI;AACA,oBAAQ,KAAK,gLAGiB,KAAK;;AAIvC;;;AAGR,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,0BAAkB,KAAK;AACvB,aAAK,KAAK,IAAQ,KAAK,IAAI;AAC3B,gBAAY;;AAEhB,aAAO,iBAAiB;;AC5V5B;;;;;;;;;AAmBO;AACH,cAAgB,YAAY,KAAK,OAAO,UAAU,YAAY,MAAM,2DAA2D;;AAe5H;AACH,UAAI,UAAU;AACV,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,eAAO,OAAO,IAAI,YAAS,oBAAoB,QAAO,OAAO,OAAO;;AAGpE,eAAO,oBAAoB,QAAQ,OAAO,OAAO;;;AAgBlD;AACH,aAAO,KAAS;AACZ,YAAI,UAAU;AACV,iBAAO;mBAEF,MAAM,QAAQ;AACnB,iBAAO,OAAO,IAAI,YAAS,qBAAqB,QAAO;;AAKvD,iBAAO,SAAO,QAAQ,QAAQ,UAAU,UAAU,UAAU,QAAQ;;;;AAYzE;AACH,qBAAe;AACf,uBAAiB;AACjB,qBAAe;AACf,aAAO,aAAa;AAChB,mBAAW,aAAa;AACxB,YAAI,YAAY;AACZ,qBAAW;;AAEf,eAAO,KAAK,CAAC,YAAY;AACzB,qBAAa;;AAEjB,aAAO;;AA6BX;AAII,UAAI,aAAa;AACb,oBAAY;;AAEhB,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,aAAW;AACX,oBAAU;;AAEd,UAAI,gBAAgB;AAChB,uBAAe;;AAGnB,yBAAmB;AACnB,UAAI,QAAQ,QAAQ,UAAU;AAC1B,uBAAe;;AAGnB,UAAI,mBAAmB;AACnB,uBAAe;AACf,YAAI,iBAAiB;AACjB,gBAAM,IAAI,WAAW;;;AAI7B,8BAAwB,OAAM,gBAAgB,KAAK,WAAW,eAAe;AAC7E;AACA,UAAI,mBAAmB;AACnB,qBAAa,QAAM,GAAG;;AAE1B,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,QAAQ,cAAc,iBAAiB,eAAe,WAAW,cAAc;AACxJ,mBAAa,SAAS;AACtB,aAAM,UAAU;AAChB,YAAM,aAAa;AACnB,aAAM,gBAAgB;AAGtB,uBAAiB,cAAc,QAAQ,QAAQ,EAAE;AAC7C,cAAM,aAAa,aAAa;AAChC,0BAAkB;AAClB,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,cAAI,cAAY;AACZ,kBAAM,IAAI,oBAAoB;qBAEzB;AACL,qBAAa;;AAIjB,oCAA0B,UAAS;AACnC,0BAAgB,YAAY,iBAAiB;AAC7C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB;AAClB,kBAAM,aAAa,aAAa,YAAY;AAC5C,iBAAS;AACL,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAoB,mBAAmB,YAAY,WAAW;AAC/E,wBAAU,WAAW;AACrB,wBAAU,UAAU,WAAW;AAG/B,+BAAiB,qBAAqB,KAAK;AAC3C,2BAAa,EAAE;AACf,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAGb,kBAAI,eAAe,QAAQ,SAAS;AAChC,oBAAI;AACA,kCAAgB,OAAM,SAAS,MAAM,QAAQ;AAE7C,+BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,kCAAc,UAAU;AACxB,gCAAY,QAAQ;AACpB,yBAAS;AAET,8BAAU,SAAS,SAAS;;;;;AAK5C,kBAAM,aAAa,WAAW,YAAY;AAC1C,iCAAqB;AACrB,gBAAI,OAAM;AACN;;;AAIR,4BAAkB;;AAGtB,cAAM,aAAa,WAAW,OAAO;AACrC,YAAI,OAAM;AACN;;;AAGR,YAAM,aAAa;AACnB,YAAM,OAAM,QAAQ;AACpB,aAAO,OAAM;;AAEV,mDAGa;AAChB,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACI,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,MAAM,OAAM,oBAAoB,GAAG,GAAG,KAAK,cAAc,KAAK,aAAa,gBAAgB;AACpH,iBAAS,iBAAiB;AAC1B,kBAAU,iBAAiB;AAC3B,wBAAgB,iBAAiB;AAEjC,2BAAmB;AACnB;AACA,YAAI,KAAK,kBAAkB,QAAQ,KAAK,eAAe,SAAS;AAC5D,yBAAe;AACf,cAAI,KAAK,eAAe,WAAW;AAE/B,wBAAY,KAAK,eAAe;AAChC,wBAAY,KAAK,eAAe;qBAE3B,KAAK,eAAe,WAAW;AACpC,kBAAM,IAAI,oBAAoB;;AAG9B,kBAAM,IAAI,WAAW,0GAEd,KAAK;;AAEhB,kCAAuB;AACvB,kCAAwB,MAAM,OAAM,oBAAoB,WAAW,WAAW,MAAoC,MAAmC,iBAAgB;AACrK,iBAAO,gBAAgB;AACvB,iBAAO,gBAAgB;AACvB,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB,QAAQ,KAAK,kBAAkB,KAC5D,KAAK,kBAAkB;AACvB,yBAAe;AAEf,0BAAgB,KAAK,MAAM,OAAO,GAAG,MAAM,KAAM,KAAI,KAAK;AAC1D,oCAA0B,OAAO,GAAG,MAAM;AAC1C,iBAAO,YAAY,QAAQ,SAAS;AACpC,mBAAS,YAAY,QAAQ,GAAG;AAChC,iBAAO,YAAY,SAAS,SAAS;AACrC,oBAAU,YAAY,SAAS,GAAG;AAGlC,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB;AAC7B,yBAAe;;AAGnB,oBAAY,OAAO,OAAO,SAAS,OAAO;AAC1C,eAAM;AAYN,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA;AACA,YAAI;AACA,iBAAM;AACN,wBAAc,OAAM;AACpB,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,wBAAc;AACd,mBAAS;AACT,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,oBAAY,MAAM,QAAQ,QAAO,eAAe,KAAK,WAAW,WAAW,KAAK,QAAQ,KAAK,SAAS,YAAW,aAAa,QAAQ,KAAK,SAAS,iBAAiB,KAAK,cAAc,MAAM;AAC9L,eAAO;;AAGP,eAAM,aAAa;AAEnB,0BAAkB,QAAQ;AAC1B,0BAAkB,SAAS;AAC3B,0BAAkB,MAAM;AACxB,0BAAkB,MAAM;AACxB,YAAI,iBAAiB;AACjB,kBAAY;;;;AAWjB;AACH,mBAAa;AACb,UAAI,mBAAmB;AACnB,kBAAU,CAAC;;AAGf,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAe,QAAQ;AACvB,YAAI,QAAO,SAAS;AAChB,eAAK,KAAK,aAAW,SAAQ;mBAExB,QAAO,SAAS;AACrB,gBAAM,IAAI,MAAM;;AAIhB,eAAK,KAAK;;;AAGlB,aAAO;;AAcJ;AACH,UAAI,WAAW;AACX;;AAEJ,2BAAqB;AACrB,UAAI,sBAAsB;AACtB,qBAAa,KAAK,WAAW;iBAExB,MAAM,QAAQ;AACnB,mBAAW,QAAQ,OAAK,aAAa,KAAK,EAAE;iBAEvC,cAAc;AAEnB,2BAAmB;AACf,4BAAkB,WAAW;AAC7B,uBAAa,KAAK,UAAU;;;AAGpC,+BAAyB;AACzB,UAAI,mBAAmB;AACnB,YAAI,aAAa,QAAQ,QAAQ,QAAQ;AACrC,2BAAiB,KAAK;;iBAGrB,MAAM,QAAQ;AACnB,gBAAQ,QAAQ;AACZ,cAAI,aAAa,QAAQ,EAAE,QAAQ;AAC/B,6BAAiB,KAAK;;;iBAIzB,WAAW;AAEhB,2BAAmB;AACf,0BAAe,QAAQ;AACvB,cAAI,aAAa,QAAQ,QAAO,QAAQ;AACpC,6BAAiB,KAAK;;;;AAIlC,uBAAiB,QAAQ;AACrB,YAAI,CAAC,EAAE;AACH,YAAE;;;;AChbd;;;;;;;;;AAiCO;AACH,aAAO,aAAa;;AAKjB;AACH,aAAO,MAAM,QAAQ;;AAKlB;AACH,aAAO,CAAC,aAAa,MAAM,CAAC,YAAY;;AAarC,yEAAoE,wBAAwB;AAC/F,UAAI,SAAS,QAAQ,MAAM,WAAW;AAGlC,YAAI,SAAQ;AACR,kCAAwB;AACxB,cAAI,YAAY,UAAS,MAAK,SAAS;AACnC,gCAAoB;qBAEf,WAAW;AAChB,8BAAkB;AACd,kBAAI,MAAK,eAAe;AACpB,oCAAoB;AACpB;;;;AAMR,gCAAoB;;AAExB,cAAI;AACA,kBAAM,IAAI,WAAW,6BAA6B,6CACnC;;;AAGvB,eAAO;;AAEX,UAAI,SAAQ;AACR,eAAO,MAAM,IAAI,UAAQ;;AAE7B;AACA,UAAI,WAAW;AACX,gBAAO;AACP,iBAAS;AACT,2BAAmB;AACf,cAAI,MAAK,SAAS;AACd,kBAAM,IAAI,WAAW,yBAAyB,qCACvC;;AAEX,iBAAO,KAAK,MAAK;;iBAGhB,YAAY;AACjB,gBAAO;AACP,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,gIAEX,MAAM,sEACO;;AAExD,iBAAS;;AAGT,gBAAO;AACP,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,aAAa,2BAA2B,MAAM,4EACL,MAAK;;AAEvE,iBAAS,CAAC;;AAEd,eAAS,2BAA2B;AAEpC,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,OAAM;;AAEvB,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AAEZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU,QAAQ,UAAU,KAAK,QAAQ;AACzC,oBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,qBACzD,OAAO,kCACrB,OAAM;;;;;AAK9B,aAAO;;AASJ;AACH,mBAAa,SAAO,OAAO,IAAI,YAAS,OAAM,MAAM;AACpD,WAAK;AACL,mBAAa,SAAO,QAAQ,IAAI,YAAU,OAAO,MAAM;AACvD,WAAK;AAEL,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,mFAEd,KAAK,UAAU,OAAO,IAAI,YAAS,OAAM;;AAEpD,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,oFAEd,KAAK,UAAU,QAAQ,IAAI,YAAU,OAAO;;AAEvD,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,CAAC,aAAiB,MAAM;AAC9D,cAAM,IAAI,WAAW,iFACC,KAAK,0BAA0B,KAAK;;;AAalE;AAEI,wBAAkB;QACd;QAAyB;QACzB;;AAEJ,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,kBAAU,QAAQ;AAClB,qBAAa,QAAQ;AACrB,sBAAc,aAAa;AAC3B,YAAI,QAAQ;AACR;;AAEJ,YAAI,SAAS;AACT,cAAI,EAAE,MAAM,EAAE,MAAM,SAAS,OAAO;AAChC,kBAAM,IAAI,WAAW,2CAA2C,EAAE;;;AAO1E,YAAI,UAAU,QAAQ,UAAU;AAC5B,+BAAqB,EAAE,MAAM,MAAM;AACnC,8BAAoB,MAAM,MAAM;AAChC,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,8BAAkB,aAAa;AAC/B,2BAAe,YAAY;AAC3B,gBAAI,UAAU,QAAQ,cAAc;AAChC,oBAAM,IAAI,WAAW,8BAA8B,EAAE,2CAC9B;;;;;;AAiC3C,mEAA8D,wBAAwB;AAClF;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,oIAEP,MAAM,qCACzB,MAAK;;AAEjC,iBAAS;;AAGT,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,qBAAqB,MAAM,UAAU,kFAEnD,KAAK,UAAU,MAAK;;AAE/B,iBAAS,CAAC;;AAEd,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,KAAK,UAAU,OAAM;;AAEtC,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AACZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU;AACV,kBAAI,WAAW;AACX,sBAAM,IAAI,WAAW,uBAAuB,6BACrC,MAAM,oBAAoB,KAAK,UAAU,OAAO,gCAC3B,KAAK,UAAU,OAAM;;;;;;;AAoBlE;AACH,UAAI,WAAW,QAAQ,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAChE,eAAO,YAAY,IAAI,UAAQ;;AAEnC;AACA,UAAI,OAAO,YAAY,YAAY,OAAO,YAAY;AAClD,yBAAiB,CAAC;iBAEb,MAAM,QAAQ,YAAY,OAAO,YAAY;AAClD,yBAAiB;;AAGjB,cAAM,IAAI,UAAU,kGACsB;;AAE9C,UAAI,MAAM,QAAQ;AAEd,eAAO,YAAY,IAAI,UAAQ;;AAI/B,8BAAsB;AACtB,2BAAmB;AACf,8BAAoB,eAAe,eAAe,QAAQ,eAAe,QAAQ;AACjF,cAAI,CAAC,MAAM,QAAQ;AACf,4BAAgB,CAAC;;AAErB,wBAAc,KAAK;;AAEvB,eAAO;;;AAGf,qCAAiC;8BAaA;MAC7B;AACI,cAAM;AACN,aAAK,aAAa;;MAqCtB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,WAAW;;AAIzB,qBAAa,MAAM,YAAY,WAAW;;MAY9C;AACI,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,aAAK,OAAO,KAAK;AACjB,YAAI,OAAO,KAAK,cAAc;AAC1B,eAAK,aAAa,aAAwB,KAAK;AAC/C,eAAK,mBAAmB;;AAGxB,cAAI,CAAE,MAAK,qBAAqB;AAC5B,kBAAM,IAAI,WAAW;;AAEzB,eAAK,aAAa,KAAK;AACvB,eAAK,mBAAmB;;AAK5B,4BAAoB;AACpB,YAAI,CAAC,MAAM,QAAQ,KAAK,SAAS,OAAO,KAAK,SAAS,YAClD,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,KAAK;AACjB,6BAAmB,KAAK;AACpB,gBAAI,KAAK,YAAY,QAAQ,UAAU;AACnC,oBAAM,IAAI,WAAW,sCAAsC,4CAClB,KAAK;;;AAGtD,6BAAmB,KAAK;AACpB,gBAAI,KAAK,KAAK,SAAS;AACnB,sBAAQ,KAAK,WAAW,gIAED;;AAE3B,0BAAc,KAAK,IAAW,KAAK,KAAK;;mBAGvC,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,KAAK,WAAW,KAAK,QAAQ;AAClC,kBAAM,IAAI,WAAW,2FACc,KAAK,QAAQ,yCACrB,KAAK;;AAEpC,4BAAkB,KAAK;AACvB,0BAAgB,UAAU,IAAI,OAAK,IAAW;;AAG9C,+BAAqB,IAAW,KAAK;AACrC,eAAK,QAAQ,QAAQ;AACjB,0BAAc,KAAK;;;AAG3B,aAAK,gBAAgB;AACrB,aAAK,kBAAkB;AACvB,aAAK,mBAAmB;AACxB,aAAK,cAAc;AACnB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AAEvC,wBAAc,KAAK,qBAAqB;AACxC,uBAAa,KAAK,YAAY;AAC9B,eAAK,gBAAgB,KAAK;AAC1B,eAAK,iBAAiB,KAAK;AAC3B,eAAK,YAAY,KAAK,KAAK,cAAc;;AAI7C,kCAA0B;AAE1B,aAAK,UAAU,KAAK;AAEpB,aAAK,eAAe,CAAC;AACrB,aAAK,iBAAiB;AAKtB,kBAAU,QAAQ;AACd,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAIJ,iCAAqB,KAAK,cAAc;AACxC,gBAAI,KAAK,QAAQ,SAAS;AACtB,mBAAK,eAAe,KAAK,CAAC,cAAc;AACxC,mBAAK,aAAa,KAAK,KAAK,YAAY,KAAK;;;;AAMzD,8BAAsB,eAAe,KAAK,SAAS,KAAK;AAKxD,6BAAqB;AACjB,cAAI,KAAK,YAAY,SAAS;AAC1B,yBAAa,KAAK,YAAY,eAAe,MAAM;;AAEvD,eAAK,aAAa,KAAK;AACvB,eAAK,eAAe,KAAK,CAAC,cAAc;;AAE5C,kBAAU,UAAU;AAChB,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAEJ,kCAAsB,cAAc;AAGpC,kCAAsB;AAClB,uCAAyB;AACzB;AACA;AACA;AAEA,mCAAqB;AACjB,oBAAI,OAAO,WAAW,YAClB,CAAC,YAAY,OAAO,gBAAgB,MAAM,QAAQ,YAC9C;AACJ,sCAAoB,KAAK,qBAAqB;AAC9C,sBAAI,YAAY,YAAY,SAAS,OAAO,KACxC,KAAK,cAAc,OAAO;AAE1B,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;6BAGP,KAAK,cAAc,OACxB;AAGA,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAKZ,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAGhB;AACA,sBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,6BAAS;6BAEJ,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,6BAAS;;AAGb,qCAAmB;AACnB,+BAAa,mBAAmB;;AAGhC,mCAAiB,MAAY;AAE7B,qCAAmB;AACnB,+BACI,mBAAmB,oBAA4B;;AAGvD;AACA,0BAAU,YAAY;AAClB,iCAAe;;AAEnB,6BAAa,GAAG,YAAY;;;AAGpC,0BAAc;;;AAMtB,aAAK,4BAA4B,KAAK;;MAW1C;AACI,YAAI,KAAK,6BAA6B;AAClC;;AAEJ,YAAI,KAAK,iBAAiB,WACtB,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK;;;MAoCrB,sBAAsB;AAClB,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAC1E;AAGI,sBAAY,iBAAiB,GAAG,OAAO,iBAAiB;AACxD,eAAK;AACL,oBAAU,KAAK;AACf,2BAAiB,KAAK,SAAS,GAAG,KAAK,WAAW,KAAK,SAAS,KAAK;AACrE,iBAAO,iBAAiB;;AAGxB,4BAAkB,iBAAiB,IAAI;AACvC,4BAAkB,iBAAiB,IAAI;;;YAyBzC;AACF,aAAK;AACL,eAAO,gBAAgB,MAAM,SAAS;;MAY1C,mDAAmD;AAC/C;AACA,YAAI,SAAS;AACT,uBAAa;AACb,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW,MAAM,yEACJ;;mBAGtB,OAAO;AACZ,cAAI,MAAM,QAAQ;AACd,yBAAa,IAAI,GAAG,MAAM;;AAG1B,yBAAa,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,yDACd;;AAEX,eAAO;;MASX;AACI,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,gBAAM,IAAI,WAAW;;AAEzB,+BAAuB,MAAM,QAAQ;AACrC,4BAAqB,iBAAiB,UAAU,CAAC;AACjD,sCAA8B,KAAK,wBAAwB;AAE3D,yBAAiB,IAAI;AACrB,YAAI,kBAAkB;AAClB,mBAAS,CAAC;;AAEd,YAAI,MAAM,QAAQ;AACd,cAAI,OAAO,WAAW,KAAK,OAAO;AAC9B,kBAAM,IAAI,WAAW,kCAAkC,OAAO,8DAEtD,KAAK,OAAO;;AAExB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;;AAIxC,+BAAoB,KAAK;AACrB,gCAAoB,OAAO,OAAM;AACjC,gBAAI,eAAe;AACf,oBAAM,IAAI,WAAW,8CAA8C,OAAM;;AAE7E,qBAAS,IAAI,QAAO;;;AAI5B,+BAAuB,QAAQ,uBAAuB;AACtD,eAAO,iBAAiB,iBAAiB,eAAe;;MAK5D;AACI,sCAA8B,aAAa,MAAM,oBAAoB;AACrE,+BAAuB,oBAAoB;AAC3C,4BAAoB,KAAK;AACrB,+BAAqB,MAAM,QAAQ,MAAM,UAAU,MAAM,SAAS,CAAC,MAAM;AACzE,mCAAyB,aAAa,IAAI,YAAU,OAAO;AAC3D,uBAAa,GAAG,IAAI,oBAAoB,QAAQ,EAAE;AAC9C,2BAAc,iBAAiB,QAAQ,oBAAoB;AAC3D,gBAAI,WAAU;AACV,oCAAsB,KAAK,aAAa;AACxC;;AAEJ,gBAAI,qBAAqB;AACrB;;;AAGR,cAAI,qBAAqB;AACrB;;;AAGR,YAAI,mBAAmB;AACnB,iCAAuB;AACvB,gCAAsB,QAAQ;AAC1B,gBAAI,WAAU;AACV,6BAAe,KAAK,oBAAoB;;;AAGhD,gBAAM,IAAI,WAAW,mDACd,KAAK,UAAU;;AAE1B,eAAO;;MAeX,6BAA6B,cAAc;AACvC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB;AACxC,cAAI;AACA,kBAAM,IAAI,oBAAoB;;AAMlC,0BAAgB,YAAY,YAAY;AACxC,8BAAoB,KAAK,QAAQ,IAAI,YAAU;AAE/C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB,KAAS;AACvB,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AAGrC,+BAAiB,YAAY,KAAK,YAAY;AAE9C,4BAAc;AACd,kBAAI,MAAM,QAAQ;AACd,6BAAa,GAAG,IAAI,SAAS,QAAQ,EAAE;AACnC,wBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,SAAS;;;AAItD,sBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO;;AAE7C,+BAAiB,IAAI,SAAS;AAC9B,qBAAO,QAAQ,KAAK,SAAS;;AAEjC,sBAAU,QAAQ,iBAAiB,YAAY,GAAG,KAAK;;AAE3D,iBAAO,iBAAiB,YAAY,IAAI,cAAW,QAAW,UAAS;;;MA8B/E,kBAAkB;AACd,gCAAwB,2BAA2B;AACnD,uBAAe,iBAAiB,KAAK,YAAY,KAAK,iBAAiB;AACvE;AAKI,4BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,yBAAe;AACf,iBAAO,KAAK,YAAY,iBAAiB;;AAGzC,4BAAkB,iBAAiB;;;MAkB3C;AACI,uBAAe,GAAG,KAAK,YAAY,KAAK,iBAAiB;AAGzD,0BAAmB,OAAM,QAAQ,KAAK,EAAE,KAAK,GAAG,MAAM;AACtD,eAAO,KAAK,YAAY,GAAG;;MAE/B,6CAA6C;AAEzC,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,aAAa;;AAG3B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,iBAAiB,QAAQ,EAAE;AAChD,8BAAoB,KAAK,iBAAiB;AAC1C,yBAAe,KAAK,YAAY;AAChC,cAAI,WAAW;AACX,yBAAa,KAAK,YAAY,MAAM,GAAG,YAAY,SAAS,GAAG,OAAO,CAAC;;AAIvE,yBAAa,KAAK;;;AAG1B,YAAI,qBAAqB,GAAG,KAAK,gBAAgB,KAAK,iBAAiB,OAAO;AAC9E,YAAI,qBAAqB,GAAG,KAAK,iBAAiB,cAAc,OAAO;AAEvE,0BAAkB,GAAG,GAAG;AAExB,wCAAgC,GAAG,KAAK,aAAa,KAAK;AAC1D,YAAI,KAAK,YAAY,aAAa,QAAQ,YAAY;AAClD,cAAI,EAAE,GAAG,MAAM,KAAK,cAAc;AAC9B,kBAAM,IAAI,WAAW,mHAEd,qBAAqB,EAAE,GAAG,MAAM;;;AAG/C,eAAO,CAAC,GAAG;;YAET,sEAAsE;AACxE,yCAAiC,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAElF,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B;AAC5B,YAAI,eAAe;AACf,+BAAqB,wBAAwB,aAAa,KAAK;AAC/D,kCAAwB;AACxB,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kCAAsB,KAAK,MAAM,mBAAmB,WAAW,IAAI,MAAM,aAAa;;;AAI9F,eAAO,CAAC,YAAY,YAAY;;MAapC,sCAAsC;AAClC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB,KAAK,WAAW,OAAO;AAC/D,uBAAa;AACb,cAAI,UAAU;AACV,kBAAM,IAAI,oBAAoB;;AAGlC,cAAI,SAAS;AACT,kBAAM,IAAI,oBAAoB;;AAG9B,4BAAgB,YAAY,YAAY;AACxC,+BAAmB,UAAS,QAAM,GAAG;AACrC,kCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAsB,YAAY,YAAY,WAAW;AAG1E,+BAAiB,qBAAqB,KAAK;AAC3C,gCAAkB,EAAE;AACpB,kBAAI,eAAe;AACf,6BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,uBAAK,KAAK,QAAO;;;AAGzB,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,iCAAiB,UAAU;AAC3B,qBAAK,KACD,MAAQ,KAAK,IAAI,KAAQ,WAAW,YAAY;;;AAG5D,yBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,mBAAK,KAAK,IAAQ,KAAK,IAAI;;;AAGnC,iBAAO;;;MAGf;AACI,0BAAkB,KAAK;AAGvB,iCAAyB;AACzB,qBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,wBAAc,UAAU;AACxB,yBAAe;AACf,cAAI,MAAM,WAAW,SAAS;AAC1B,6BAAiB,MAAM,UAAU,MAAM,GAAG,IAAI;AAC9C,wBAAY,IAAI;;AAEpB,2BAAiB,KAAK;;AAE1B,eAAO;;MAYX;AACI,eAAO;AACH,6BAAmB;AACnB,yBAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,0BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,gCAAsB,MAAK,MAAM,KAAK,OAAO,SAAS,KAAK,QAAQ,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ,SAAS;AACtH,gCAAsB;AAItB,oCAA0B;AACtB,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS,UAAU,CAAE,UAAY;AAG9D;AACA,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AACxC,yBAAW,aAAa,QAAQ,IAAI,QAAQ;AAC5C,kBAAI,cAAc,MAAM;AACpB,uBAAO,sBAAoB,MAAM,cAAc;;AAGnD,+BAAiB,KAAS;AAE1B,yBAAW,KAAK;AAChB,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;;AAMvC,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C;AACA,kBAAI,KAAK,QAAQ,SAAS,KAAK,IAAI,KAAK,QAAQ;AAC5C,iCAAiB,WAAW;;AAG5B,+BAAe,KAAK,eAAe,GAAG;AACtC,oCAAoB,KAAK,eAAe,GAAG;AAC3C,iCACI,KAAS,OAAO,QAAQ,cAAc,QAAQ;;AAEtD,mBAAS;AAET,4BAAc,KAAK;;AAEvB,wBAAY,KAAS;AAErB,iBAAK,kBAAkB,QAAQ;AAC3B,0BAAY,MAAQ,WAAW;;AAEnC,mBAAO;;AAEX,4BAAkB,KAAK,0BAA0B,IAAI,WAAS,MAAM;AACpE,6BAAmB;AACnB,iCAAuB,KAAK,WAAW,SAAS,mBAAmB,YAAY;AAC/E,iBAAO,CAAC,gBAAgB,OAAO;;;MAQvC;AACI,aAAK,eAAe;AAChB,iBAAO,KAAS;AACZ,+BAAmB;AACnB;AACA,2BAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,4BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS;AAEtC,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AAGxC,2BAAa,KAAS,aAAa,QAAQ,IAAI,QAAQ;AACvD,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;AAEnC,yBAAW,KAAK;;AAGpB,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C,6BAAe,KAAK,eAAe,GAAG;AACtC,kCAAoB,KAAK,eAAe,GAAG;AAE3C,iCAAmB,KAAS,OAAO,QAAQ,cAAc,QAAQ;AACjE,yBAAW,KAAK;;AAEpB,mBAAO;;;;YAsCb,iBAAiB;AACnB,eAAO,WAAW,MAAM,GAAG,GAAG;;YAyB5B;AACF,eAAO,WAAW,MAAM,SAAS;;YAyB/B;AAGF,+BAAuB,MAAM,KAAK,oBAAoB,GAAG;AACzD,uBAAe,eAAe;AAC9B,wBAAgB,eAAe;AAC/B,8BAAsB,KAAK;AAC3B,wBAAe,cAAc,OAAO,OAAO;AAC3C,2BAAmB;AACnB,2BAAmB;AACf,oBAAU,MAAM,KAAK;AACrB,qBAAW,KAAK,EAAE;;AAEtB,gBAAY;AACZ,eAAO,iBAAiB;;MAW5B;AACI,6BAAqB;AACrB,8BAAsB,WAAU,QAAQ,QAAO;AAC/C,wBAAgB,gBAAgB,KAAK,mBAAmB,KAAK;AAC7D,6BAAqB,KAAK,WAAW;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,cAAI,iBAAiB,CAAC,QAAQ,GAAG;AAE7B;;AAEJ,uBAAa,KAAK,CAAE,MAAM,QAAQ,GAAG,cAAc,QAAQ,aAAa;;AAE5E,eAAO;;UAgCP;AACA,aAAK,gBAAgB;;UAErB;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;AAClB,eAAK,mBAAmB;;;MAGhC;AACI,uBAAe,MAAM;AACrB,YAAI,OAAO,yBAAyB,KAAK,KAAK,aAAa,QACvD,KAAK;AACL,mDAAyC,SAAa;AACtD,eAAK,WAAW;AAChB,iBAAO,wBACH,mCAAmC,SAAa;;AAExD,eAAO;;MAEX;AACI;AACA,YAAI,OAAO,KAAK,SAAS;AACrB,sBAAY,YAAY,KAAK;mBAExB,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,OAAO,SAAS;AAChB,oBAAM,IAAI,MAAM;;;AAGxB,sBAAY,KAAK,KAAK,IAAI,UAAQ,YAAY;;AAG9C,8BAAoB,OAAO,KAAK,KAAK;AACrC,sBAAY;AACZ,0BAAe,KAAK;AACpB,mCAAyB;AACrB,gBAAI,OAAO,QAAO,gBAAgB;AAC9B,wBAAU,cACN,YAAY,QAAO;;AAGvB,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;MAEX;AACI,YAAI,OAAO,KAAK,YAAY,YACxB,OAAO,KAAK,YAAY;AACxB,iBAAO,CAAC,YAAY,oBAA4B,KAAK;mBAEhD,MAAM,QAAQ,KAAK;AACxB,iBAAO,KAAK,QAAQ,IAAI,YAAU,YAAY,oBAA4B;;AAG1E,qCAA2B;AAC3B,4BAAkB,KAAK;AACnB,+BAAmB,OACf,YAAY,oBAA4B,KAAK,QAAQ;;AAE7D,iBAAO;;;MAGf;AACI,eAAO;UACH,MAAM,KAAK;UACX,SAAS,KAAK;UACd,kBAAkB;YACd,YAAY,KAAK,UAAU;YAC3B,QAAQ,KAAK,UAAU;;;;MAOnC;AACI,YAAI,eAAe,oBAAoB;AACnC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,gBAAgB;AAC/B,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,sBAAsB;AACrC,gBAAM,IAAI,MAAM;;AAEpB,yBAAiB,oBAAoB,eAAe;AACpD,0BAAkB,YAAY;AAC9B;AACA,YAAI,OAAO,eAAe,SAAS;AAC/B,iBAAO,YAAY,eAAe;mBAE7B,MAAM,QAAQ,eAAe;AAClC,iBAAO,eAAe,KAAK,IAAI,eAAa,YAAY;mBAEnD,eAAe,QAAQ;AAC5B,iBAAO;AACP,4BAAkB,eAAe;AAC7B,iBAAK,OAAO,YAAY,eAAe,KAAK;;;AAGpD;AACA,YAAI,MAAM,QAAQ,eAAe;AAC7B,oBAAU,eAAe,QAAQ,IAAI,YAAU,YAAY;mBAEtD,eAAe,WAAW;AAC/B,oBAAU;AACV,4BAAkB,eAAe;AAC7B,oBAAQ,OAAO,YAAY,eAAe,QAAQ;;;AAG1D,aAAK,QAAQ,CAAE,MAAM,SAAS;;YAmF5B;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,WAAW,0CAA0C;qBAE1D,SAAS,SAAS;AACvB,kBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,WAAW;;AAGzB,mCAA2B,MAAM,cAAiB,KAAK,gBAAgB;AACvE,6BAAqB;AACrB,0BAAkB;AAClB,4BAAoB,KAAK,OAAO,WAAW;AAC3C,+BAAuB;UACnB,eAAe;UACf,QAAQ;UACR,aAAa,8BAA8B;UAC3C,aAAa;;AAEjB,iCAAyB,WAAU,OAAO,QAAQ,QAAO;AACzD,YAAI,oBAAoB,KAAK,aAAa;AACtC,yBAAe,iBAAiB,KAAK;AACrC,6BAAmB;AACnB,iBAAQ,2BAA2B,+BAAgC,MAAM,cAAiB,MAAM,KAAK,UAAU,cAAc;AAC7H,6BAAmB,MAAM,KAAK,GAAG;AACjC,6BAAmB,OAAO,wBAA2B,CAAC,mBAAmB,MAAM;;AAEnF,YAAI,KAAK,uBAAuB;AAE5B,4BAAkB;AAClB,mCAAyB,KAAK,qBAAqB,KAAK,MAAM;AAC9D,yBAAe,sBAAsB,KAAK;;AAE9C,uBAAe,aAAa,mBAAmB;AAC/C,uBAAe,cAAc,mBAAmB;AAChD,eAAO,aAAa,KAAK;;MAU7B;AACI,iCAAyB,qBAAqB,KAAK;AACnD,aAAK,sBAAsB;;MAa/B;AACI,eAAO,KAAK;;;AAMpB,gBAAY,YAAY;AACxB,kBAA4B;6BAQI;;AAEhC,eAAW,YAAY;AACvB,kBAA4B;ACzkD5B;;;;;;;;;AAiDO;AACH,UAAI,CAAE,oBAAmB;AACrB,gCAAwB,CAAE,eAAe;;AAE7C,8BAAwB;AACxB,0BAAoB,sBAAsB;AAC1C,UAAI,cAAc,mBAAmB;AAMjC,wBAAgB,cAAc;;AAElC,uBAAiB,oBAAoB;AACrC,qBAAc,YAAY,UAAU;AACpC,UAAI,sBAAsB,mBAAmB;AAIzC,6BAAqB,MAAM,YAAe,sBAAsB,iBAAiB,sBAAsB,YAAY,OAAM,QAAQ,IAAI,YAAU,OAAO;AAEtJ,mCAA2B;AAC3B,6BAAqB,OAAM;AACvB,6BAAmB,OAAO,gBACtB,aAAa,OAAO;;AAE5B,eAAM,YAAY;AAElB,gBAAQ;;AAEZ,aAAO;;AA0FJ;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,OAAO,oBAAoB;AAC3B,yBAAiB,gBAAmB,iBAAiB;AACrD,YAAI,SAAS,WAAW;AAKpB,mBAAS,KAAK,mBAAsB,iBAAiB;mBAEhD,SAAS,SAAS;AACvB,gBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,0BAAkB,SAAS;;AAE/B,aAAO,6BAA6B,iBAAiB,QAAW;;AAY7D;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ,QAAQ;AAChB,cAAM,IAAI,WAAW;;AAGzB,wBAAkB,MAAM,QAAQ;AAChC,0BAAoB,UAAU;AAC9B,UAAI,cAAc,mBAAmB;AACjC,wBAAgB,cAAc;;AAElC,qBAAe,QAAQ,UAAU,OAAO,OAAO,QAAQ;AAMvD,6BAAuB,UAAU,cAAc,QAAQ,UAAU,eAAe,QAAQ;AACxF,qBAAc,YAAY,oBAAoB,gBAAgB,eAAe;AAC7E,6BAAuB,UAAU;AACjC,UAAI,kBAAkB;AAClB,eAAM,mBAAmB;;AAE7B,UAAI,UAAU,uBAAuB;AACjC,eAAM,uBAAuB,UAAU;;AAG3C,UAAI,UAAU,cAAc;AAExB,YAAI,UAAU,eAAe;AACzB,gBAAM,IAAI,WAAW;;AAGzB,eAAQ,cAAc,oBAAqB,+BAA+B,UAAU,YAAY,UAAU;AAC1G,eAAM,YAAY,cAAc;AAChC,YAAI,OAAM,aAAa,QAAQ,iBAAiB,SAAS;AACrD,gBAAM,OAAM,UAAU,WAAW;;AAGrC,gBAAQ;AACR,gBAAQ,iBAAiB,IAAI,OAAK,EAAE;;AAExC,aAAO;;AAEX;AACI,0BAAoB,cAAiB,SAAQ;AAC7C,2BAAqB;AACrB,+BAAyB;AACzB,YAAM,QAAQ;AACV,YAAI,KAAK,UAAU;AACf,2BAAiB,KAAK,CAAE,MAAM,KAAK,MAAM,QAAQ,YAAY,KAAK;;AAGlE,uBAAa,KAAK,QAAQ,YAAY,KAAK;;;AAGnD,aAAO,CAAE,cAAc;;6BA4BK;MAC5B;AACI,cAAM,CAAE,QAAQ,IAAI,SAAS;AAC7B,eAAO,QAAQ;AACf,aAAK,YAAY;AACjB,aAAK,QAAQ;AAEb,aAAK,OAAQ,KAAK,QAAQ,OAAQ,KAAK,OAAO,OAAO;AAErD,YAAI,KAAK,UAAU;AACf,8BAAoB,KAAK;AACrB,iBAAK,IAAI;;;;MAMrB;AACI,sBAAc,MAAM,aAAa,GAAG,cAAc,GAAG;AACrD,YAAI,MAAM,KAAK,OAAK,IAAI;AACpB,gBAAM,IAAI,WAAW,kDACd,MAAM,0BACN,MAAM,aAAa,GAAG,aAAa,GAAG;;;MAwBrD;AACI,qCAA6B,iBAAiB,cAAc,iBAAiB;AAC7E;AACA,YAAI;AACA,uBAAa;AACb,cAAI,WAAW,QAAQ,WAAW;AAC9B,kBAAM,IAAI,WAAW;;AAKzB,cAAI,WAAW,OAAO,WAAW;AAC7B,kBAAM,IAAI,WAAW;;;AAM7B,YAAI,KAAK,QAAQ,WAAW;AAExB,cAAI,MAAM,aAAa,WAAW;AAE9B,gBAAI,MAAM,mBAAmB;AACzB,oBAAM,IAAI,WAAW;;AAIzB,sBAAU,MAAM;cACZ,YAAY,MAAM;cAClB,OAAO,MAAM;cACb,MAAM,MAAM,OAAO;;AAIvB,kBAAM,MAAM;;AAEhB,cAAI;AACA,iBAAK,UAAU,WAAW;AAC1B,iBAAK,SAAS,WAAW;;AAGzB,gBAAI,MAAM,aAAa,WAAW;AAC9B,oBAAM,IAAI,WAAW,gHACuC,MAAM,kBACjD,MAAM,aAAa;;AAGxC,gBAAI,MAAM,aAAa,GAAG,cAAc,WAAW;AAC/C,oBAAM,IAAI,WAAW;;AAKzB,iBAAK,WAAW;AAChB,iBAAK,UAAU,CAAC,MAAM,aAAa,GAAG,cAAc;AACpD,iBAAK,SAAS,gBAAgB,KAAK,QAAQ;;AAE/C,eAAK,eAAe;AAKpB,cAAI,KAAK;YACL,eAAe;YACf,eAAe;YACf,aAAa;YACb,eAAe;YACf,cAAc,KAAK;YACnB,eAAe,KAAK;YAEpB,YAAY,aAA2B,MAAM,KAAK,OAAO;YACzD,aAAa,CAAC;YACd,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;YACpC,cAAc,KAAK,QAAQ,GAAG;;;AAIlC,+BAAqB,MAAM,MAAM,KAAK,QAAQ;AAC9C,cAAI,MAAM,QAAQ;AACd,kBAAM,IAAI,UAAU;;AAKxB,eAAK,WAAW;AAChB,eAAK,UAAU,CAAC;AAEhB,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;AAEzD,aAAK,OAAO,KAAK;AACjB,aAAK,QAAQ;;MAOjB;AACI,YAAI,KAAK,OAAO,WAAW;AACvB,gBAAM,IAAI,UAAU;;AAExB,aAAK,OAAO;AACZ,YAAI,KAAK,OAAO,WAAW;AACvB,eAAK,UAAU;AACf,eAAK,eAAe;AACpB,eAAK,gBAAgB;;AAGrB,iCAAuB,KAAK,OAAO,SAAS;AAC5C,eAAK,OAAO,gBAAgB,gBAAgB;AAC5C,eAAK,UAAU,CAAC,KAAK,OAAO,gBAAgB;AAE5C,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;;MAG7D;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,KAAK,QAAQ;;MAEnC;AAGI,2BAAmB;AACnB,YAAI,KAAK,OAAO,WAAW,KAAK,KAAK,QAAQ,WAAW;AACpD,gBAAM,IAAI,UAAU;;AAIxB,aAAK,QAAQ,IAAI,YAAY;UACzB,QAAQ,KAAK;UACb,SAAS,KAAK,QAAQ;UACtB,MAAM,KAAK,OAAO;;AAEtB,aAAK,MAAM,YAAY,KAAK;AAE5B,aAAK,kBAAkB,KAAK,MAAM;AAElC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,yBAAyB,KAAK,MAAM;AACzC,aAAK,2BAA2B,KAAK,MAAM;AAC3C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,0BAA0B,KAAK,MAAM;AAC1C,aAAK,4BAA4B,KAAK,MAAM;AAC5C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,aAAa,KAAK,MAAM;AAG7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,MAAM;;MAgCjB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,cAAM,QAAQ,YAAY,WAAW;;MAQzC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,aAAK,MAAM,WAAW;;MAkC1B,sBAAsB;AAClB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,SAAS,GAAG,GAAG;;YAwB/B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,gBAAgB,SAAS;;MA6B/C,kBAAkB;AACd,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,QAAQ,GAAG;;MASjC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,eAAe;;MAOrC;AACI,aAAK;AACL,aAAK,MAAM,QAAQ;AACnB,aAAK,aAAa,KAAK,MAAM;AAE7B,aAAK,mBAAmB,KAAK,MAAM;AACnC,aAAK,OAAO,KAAK,MAAM;AACvB,aAAK,UAAU,KAAK,MAAM;AAG1B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,eAAe,KAAK,MAAM;;UAG/B;AACA,eAAO,KAAK,SAAS,OAAO,SAAY,KAAK,MAAM;;UAEnD;AACA,aAAK,MAAM,YAAY;;YAiCrB,iBAAiB;AACnB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,IAAI,GAAG,GAAG;;YAuF1B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,WAAW,SAAS;;YAyBpC;AACF,eAAO,KAAK,MAAM,aAAa,GAAG;;aAI/B,yCAAwC,qBAAqB;AAChE;AACA,+BAAuB;AACvB,YAAI,mBAAkB;AAClB,cAAI,CAAE,SAAO,GAAG,aAAa,SACzB,QAAO,GAAG,iBAAiB;AAC3B,kBAAM,IAAI,WAAW;;AAEzB,wBAAc;;AAGd,kBAAY,QAAO,aAAa,MAAM,MAAM;AAE5C,wBAAc,QAAO;AACrB,iBAAO,QAAO;AACd,6BAAmB;;AAEvB,uBAAc,IAAI,IAAI;AACtB,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,oBAAoB,yDAAyD;;AAE3F,2BAAmB;AACf,iCAAsB;AACtB,wBAAc,YAAY,MAAM,gBAAe;AAC/C,cAAI;AACA,kBAAM,6BAA6B;;AAEvC,iBAAM,IAAI;;AAEd,eAAO;;UA8BP;AAGA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,aAAK,MAAM,eAAe;;UAE1B;AACA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,eAAO,KAAK,MAAM;;MAItB;AAKI,uBAAe;AACf,4BAAoB,KAAK;AACrB,uBAAa;AACb,eAAK,eAAe,MAAM;AAC1B,eAAK,YAAY,MAAM;AACvB,iBAAO,KAAK;;AAEhB,eAAO,CAAE,MAAM,KAAK,MAAM;;;AAIlC,eAAW,YAAY;AACvB,kBAA4B;ACn6B5B;;;;;;;;;AAyDO;AACH,aAAO,IAAI,YAAY;;AA8DpB;AACH,aAAO,IAAI,WAAW;;AA6FnB;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAO,wBAAwB,iBAAiB;;AAyB7C;AACH,aAAO,MAAM;;AAEV;AACH,kCAA4B,4BAA4B,gBAAgB;;ACvP5E;;;;;;;;;6BAqBgC;MAC5B;AACI,eAAO;;;wBAOU;MAQrB,iBAAiB;AACb,eAAO,MAAM,GAAG;;;AAIxB,UAAI,YAAY;AAChB,kBAA4B;yBAQF;MACtB;AACI,eAAO,KAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;yBAIF;MACtB;AACI,eAAO,MAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;0BAID;MACvB;AACI,eAAO,KAAK,MAAM,QAAY,GAAK,MAAS;;;AAIpD,YAAM,YAAY;AAClB,kBAA4B;yBAEA;MACxB;AACI,eAAO;;;AAIf,WAAO,YAAY;AACnB,kBAA4B;4BAIC;MACzB;AACI,eAAO,SAAY;;;AAI3B,cAAQ,YAAY;AACpB,kBAA4B;8BAIK;MAC7B;AACI,eAAO,YAAc;;;AAI7B,gBAAY,YAAY;AACxB,kBAA4B;6BAIE;MAC1B;AACI,eAAO,SAAa;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;2BAIE;MAC1B;AACI,eAAO,SAAW;;;AAI1B,aAAS,YAAY;AACrB,kBAA4B;yBAIF;MACtB;AACI,eAAO,OAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;4BAIC;MAazB,gBAAiB;AACb,eAAO,SAAY,GAAG;;;AAI9B,cAAQ,YAAY;AACpB,kBAA4B;+BAII;MAc5B,gBAAiB;AACb,eAAO,WAAe,GAAG;;;AAIjC,iBAAW,YAAY;AACvB,kBAA4B;wBAID;MAQvB,iBAAiB;AACb,eAAO,KAAK,MAAM,SAAY,EAAE,IAAI,QAAQ,IAAI;;;AAIxD,UAAM,YAAY;AAClB,kBAA4B;AACrB;AACH,aAAO,YAAW;;AAEf,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;;AAEjC,UAAI,OAAO,eAAe;AACtB,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC1OrC;;;;;;;;;AAcA;AACI,UAAI,QAAQ,QAAQ,OAAO,SAAS;AAChC,cAAM,IAAI,MAAM,yFACa;;;8BAMJ;;uBAEP;MACtB;AACI;AACA,yBAAiB;AACjB,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,QAAQ,KAAK,OAAO;AACzB,aAAK,QAAQ,KAAK,OAAO;;MAM7B;AACI,eAAO,KAAK;AACR,+BAAqB,OAAM,CAAC;AAC5B,cAAI,KAAK;AACL,6BAAiB,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,IAAI;;AAElE,cAAI,KAAK;AACL,6BACI,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,SAAS;;AAE1D,iBAAO,eAAe;;;MAG9B;AACI,eAAO,CAAE,IAAM,KAAK,IAAI,IAAM,KAAK;;aAGhC;AACH,eAAO,IAAI,IAAI,CAAE,IAAI,QAAO,OAAO,IAAI,QAAO;;;AAItD,SAAK,YAAY;AACjB,kBAA4B;AACrB;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAEtD;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAGtD,uDAAmD;MACtD,MAAQ;;AAEL;AACH,aAAO,qBAAqB;;AAEzB,6DAAwD;AAC3D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,uBAAuB;iBAEzB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;AC/FtC;;;;;;;;;uBAqB0B;MACtB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,WAAW,KAAK;;;MAG7B;AACI,iBAAS,oBAAoB;AAC7B,qBAAa,MAAK;AAClB,YAAI,KAAK,YAAY;AACjB,mBAAS,YAAY,QAAQ,GAAG,KAAK;;AAEzC,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,UAAU,KAAK;AAChC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,SAAK,YAAY;AACjB,kBAA4B;4BACG;MAC3B;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,UAAU,GAAG,KAAK;;MAE7B;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;wBACD;MACvB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,4BAA4B;AACjC,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,kBAAkB;AACvB,aAAK,mBACD,eAAe,KAAK,oBAAoB,KAAK;AACjD,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB,cAAc,KAAK;AAC1C,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa;mBAEb,MAAM,QAAQ,KAAK;AACxB,eAAK,aAAa,KAAK;mBAElB,OAAO,KAAK,eAAe;AAChC,eAAK,aAAa,CAAC,KAAK;;AAGxB,gBAAM,IAAI,WAAW,sEACN,KAAK;;;MAG5B;AACI,qBAAa,mBAAmB;AAChC,2BAAmB,WAAW,MAAM;AACpC,YAAI,KAAK,cAAc;AACnB,0BAAgB,KAAK;AACjB,uBAAW,IAAI,KAAK;;;AAG5B,aAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;AAErH,qBAAa;AACb,YAAI,KAAK,cAAc;AACnB,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,iBAAK,KAAK,WAAW;;;AAG7B,aAAK,YAAY,CAAC,IAAI,UAAU;UACxB,MAAM,WAAW;UACjB;;AAER,aAAK,QAAQ;;MAEjB;AACI,iBAAS,oBAAoB;AAC7B,eAAO,OAAM,QAAQ,KAAK,MAAM;;MAEpC;AACI,wBAAe;UACX,kBAAkB,qBAAqB,KAAK;UAC5C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,oBAAoB,KAAK;UAC1C,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;sBACH;MACrB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU,KAAK;AAC1C,gBAAM,IAAI,oBAAoB,4BAA4B,KAAK;;AAGnE,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAI;;MAEf;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;kCACS;MACjC;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,EAAE,IAAI,OAAK,EAAE,QAAQ,KAAK,QAAQ;;MAE7C;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;4BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,UAAU,IAAI,YAAoB;AACvC,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;;MAE7D;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAK,QAAQ,GAAG,KAAK;;MAEhC;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,MAAM,KAAK;AAC5B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;ACjO5B;;;;;;;;;AAmBO;AACH,UAAI,OAAO,UAAU;AACjB,eAAO,aAAa,OAAO;;AAG3B,YAAI,MAAM,WAAW;AACjB,gBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAC1D,MAAM;;AAE5B,qBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,8BAAoB,MAAM;AAC1B,cAAI,CAAC,UAAU;AACX,kBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAChD,KAAK,UAAU,yCAChB;;;AAGnC,eAAO;;;AAWR,mFAA+E;AAClF,UAAI,eAAe;AACf,eAAO;;AAEX,gCAA0B,aAAc,cAAa,KAAM,YAAW;AACtE;AACA,UAAI,YAAY;AACZ,uBAAe;;AAGf,uBAAe,cAAc,oBAAoB;;AAErD,aAAO,KAAK,MAAO,gBAAe,SAAS,KAAK;;AAE7C;AACH,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,kBAAU,UAAU,aAAa,MAAI,CAAC,aAAa,YAAY;iBAE1D,YAAY;AACjB,kBAAU,UAAU;;AAGpB,cAAM,IAAI,WAAW,2BAA2B;;AAEpD,aAAO;;AC1EX;;;;;;;;;AA+BO;AAEH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAGlC,iBAAO;;;;AASZ;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAGrC,iBAAO;;;;AAoBZ,uDAAmD,aAAa,oCAAoC;AACvG,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAEhB,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,+DACd,EAAE,MAAM;;AAEnB,YAAI,OAAO,MAAM,WAAW;AACxB,gBAAM,IAAI,WAAW,iEACd,OAAO,MAAM;;AAExB,YAAI,QAAQ,QAAQ,KAAK,MAAM,WAAW;AACtC,gBAAM,IAAI,WAAW,+DACd,OAAO,MAAM;;AAGxB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG;;AAEhC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,gBAAQ,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,OAAO;AACrF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,eAAO;;;AAeR,2CAAqC,aAAa,oCAAoC;AACzF,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAatE,2CAAqC,CAAC,GAAG,cAAc;AAC1D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,yBAAyB,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQhF,iEAA6D,CAAC,GAAG,cAAc,iDAAgD;AAClI,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,6EACD,EAAE;;AAE1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,8EACD,EAAE;;AAE1B,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,SAAiB;UACjB,GAAG;UACH,QAAQ;UACR;UACA,KAAK,YAAY,SAAS,SAAS;UACnC,WAAW;UACX,YAAY;UACZ;UACA;;AAEJ,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR,2CAAqC,CAAC,GAAG,GAAG,cAAc;AAC7D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQtE,uDAAmD,CAAC,GAAG,GAAG,cAAc;AAC3E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,oEACd,EAAE;;AAEb,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,SAAS;AACnF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;2BAMe;MAC1B;AACI,cAAM;AACN,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,iBAAS,WAAW;AACpB,aAAK,OAAO;AACZ,8BAAoC,KAAK,MAAM;AAC/C,YAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,KAAK,SAAS;AACpD,gBAAM,IAAI,oBAAoB,qDAAqD,KAAK;;AAG5F,aAAK,aAAa,eAAe,KAAK,YAAY,MAAM;AACxD,aAAK,UAAU,eAAe,KAAK,WAAW,OAAO,IAAI,KAAK,SAAS,MAAM;AAC7E,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,aAAa,cAAc,KAAK;AACrC,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,eAAe,eAAe,KAAK,gBAAgB,OAAO,IAAI,KAAK,cAAc,MAAM;AAC5F,YAAI,KAAK,SAAS,KACb,OAAM,QAAQ,KAAK,iBAAiB,KAAK,aAAa,WAAW;AAClE,gBAAM,IAAI,WAAW,iGAEd,KAAK,UAAU,KAAK;mBAEtB,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eAAe,CAAC,KAAK,cAAc,KAAK;qBAExC,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,0FACY,KAAK,UAAU,KAAK;;mBAGpD,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eACD,CAAC,KAAK,cAAc,KAAK,cAAc,KAAK;qBAE3C,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,4FACY,KAAK,UAAU,KAAK;;;;aAI1D;AAEH,iBAAqB,gBAAgB,MAAM;AAC3C,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,oGACkB,KAAK,UAAU,KAAK;;;MAGnE;AACI,wBAAe;UACX,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,iBAAiB,qBAAqB,KAAK;UAC3C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;uBAOW;MACtB;AACI,cAAM,MAAM;AACZ,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,UAAU,KAAK;AACpB,8BAAoC,KAAK,SAAS;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,oBAAoB,eAAe,KAAK;;MAEjD;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AAC3D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAEpH,aAAK,YAAY,CAAC,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AAChE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,4BAAkB,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK;AACvD,sCAA4B,2BAAyC,KAAK,WAAW;AACrF,cAAI,uBAAuB,QAAQ,KAAK,SAAS;AAC7C,sBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK,cAAc;;AAG1I,gBAAI,KAAK,SAAS;AACd,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,QAAQ,IAAI,KAAK,SAAS,KAAK,YAAY,KAAK,aAAa;uBAE7H,KAAK,SAAS;AAEnB,wBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;uBAEvH,KAAK,SAAS;AACnB,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;;AAGlH,oBAAM,IAAI,oBAAoB;;AAElC,gBAAI,KAAK,cAAc;AACnB,wBAAU,KAAK,WAAW,MAAM;;;AAGxC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,yBAAiB;AACjB,sBAAe,KAAK,eAAe,iBAC/B,WAAW,MAAM,GAAG,WAAW,SAAS,KACxC,WAAW,MAAM;AACrB,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,yBAAe,iBAAiB,MAAM,IAAI,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ,IAAI,OAAO,KAAK,iBAAiB,WAAW,KAAK,eACtI,KAAK,aAAa;AACtB,mBAAS,KAAK;;AAElB,0BAAkB,CAAC,WAAW;AAC9B,YAAI,KAAK,eAAe;AACpB,wBAAc,YAAY,OAAO;AACjC,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,KAAK,KAAK;AACtB,wBAAc,YAAY,OAAO;;AAErC,eAAO;;MAEX;AACI,wBAAe;UACX,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,mBAAmB,qBAAqB,KAAK;UAC7C,kBAAkB,oBAAoB,KAAK;;AAE/C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAEJ;AAEH,YAAI,CAAE,cAAa,SAAS,OAAO,KAAK,YAAY,YAChD,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,0EACN,KAAK,UAAU,KAAK;;;;2BAInB;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAK,OAAO,KAAK,eAAe,YAC5B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,8FACc,KAAK,UAAU,KAAK;;;;AAKnE,aAAO,YAAY;AACnB,kBAA4B;2BACA;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe;AAC3B,cAAI,CAAE,OAAM,QAAQ,KAAK,eACpB,MAAK,WAAW,WAAW,KAAK,KAAK,WAAW,WAAW;AAC5D,kBAAM,IAAI,WAAW,2FAC0B,KAAK,UAAU,KAAK;;;;;AAMnF,aAAO,YAAY;AACnB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,YAAI,KAAK,YAAY,UAAU,KAAK,YAAY;AAC5C,gBAAM,IAAI,WAAW,uGACyB,KAAK;;;MAG3D;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW,qDACjB,KAAK,UAAU;;AAEvB,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW;;AAGzB,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS;AAC1D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,WAAW,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AAC1H,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGzH,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,GAAG,MAAM,EAAG,cAAc;AACrD,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,cAAI,OAAM,MAAM,WAAW;AACvB,kBAAM,IAAI,WAAW,2FACY,OAAM,MAAM;;AAEjD,6BAAmB,OAAM;AACzB,4BAAkB,WAAW;AAC7B;AACA;AACA,cAAI,KAAK,eAAe;AACpB,oBAAQ;AACR,oBAAQ;;AAGR,oBAAQ;AACR,oBAAQ;;AAEZ,yBAAe,WAAW;AAC1B,wBAAc,WAAW;AACzB,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,QAAQ;AAC7B,0BAAgB,KAAK,QAAQ;AAE7B,4BAAkB,aAAa,QAAQ,SAAS,SAAS,KAAK;AAC9D,2BAAiB,aAAa,OAAO,SAAS,SAAS,KAAK;AAK5D,8BAAoB,CAAC,WAAW,WAAW,UAAU,KAAK;AAC1D,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;;AAE3C,wBAAc,gBAAoB,QAAO,KAAK,OAAO,QAAQ,aAAa,KAAK,SAAS,KAAK;AAC7F,cAAI,KAAK,eAAe;AACpB,sBAAU,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAE/C,cAAI,KAAK,QAAQ;AACb,sBACI,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAElD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B;AACA;AACA;AACA,YAAI,KAAK,eAAe;AACpB,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAGZ,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAEhB,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,QAAQ;AAC7B,wBAAgB,KAAK,QAAQ;AAC7B,oBAAY,eAAe,KAAK;AAChC,oBAAY,cACR,aAAa,YAAY,aAAa,SAAS,SAAS,KAAK;AACjE,oBAAY,aACR,aAAa,YAAY,YAAY,SAAS,SAAS,KAAK;AAChE,eAAO;;MAEX;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAC/B;AACI,cAAM,MAAM;AACZ,aAAK,gCAAgC;AACrC,aAAK,gCAAgC;AACrC,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,YAAI,QAAO,WAAW;AAClB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,QAAO,qBAAqB,QAAQ,QAAO,qBAAqB,QAChE,QAAO,oBAAoB;AAC3B,gBAAM,IAAI,WAAW;;AAKzB,YAAI,QAAO,WAAW,QAAQ,QAAO,YAAY,UAC7C,QAAO,YAAY;AACnB,gBAAM,IAAI,WAAW,gBAAgB,KAAK,uEACF,KAAK,UAAU,QAAO;;AAElE,aAAK,kBACD,QAAO,mBAAmB,OAAO,IAAI,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS,KAAK,OAAO;AAChC,gBAAM,IAAI,WAAW,0BAA0B,KAAK,0BAC7C,KAAK,OAAO,gCACZ,KAAK,UAAU;;AAE1B,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,oEACJ,KAAK,UAAU,WAAW;;AAE/C,yBAAiB,WAAW;AAC5B,qCAA6B,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AACpE,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,KAAK,MAAM,EAAE;AAC7B,+BAAqB,KAAK;;AAE9B,6BAAqB,KAAK,WAAW,KAAK,iBAAiB,KAAK;AAChE,0BAAkB;AAClB,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,WAAW,KAAK;;AAG1H,eAAK,OAAO;;AAEhB,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AACjE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,cAAI,KAAK,SAAS;AACd,kBAAM,IAAI,oBAAoB;qBAEzB,KAAK,SAAS;AACnB,gBAAI,KAAK,eAAe;AACpB,uBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,qBAAS,gBAAoB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,cAAc;;AAElJ,cAAI,KAAK;AACL,qBAAS,QAAU,QAAQ,KAAK,KAAK,QAAQ,KAAK;;AAEtD,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAEnC,cAAI,KAAK,eAAe;AACpB,qBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,iBAAO;;;MAGf;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,kBAAc,YAAY;kCACW;MACjC;AACI,cAAM,GAAG;;;AAIjB,oBAAgB,YAAY;AAC5B,kBAA4B;yBACA;MACxB;AACI,cAAM,GAAG;AACT,eAAO,WAAW;AAClB,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,yFACS,KAAK,UAAU,KAAK;;;;AAK9D,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WACD,CAAC,CAAC,KAAK,UAAU,KAAK,WAAW,CAAC,KAAK,UAAU,KAAK;mBAErD,OAAO,KAAK,SAAS,OAAO;AACjC,eAAK,WAAW;YACZ,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;YACjC,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;;;AAIrC,eAAK,WAAW,KAAK;;AAEzB,aAAK,aACD,KAAK,eAAe,SAAY,iBAAiB,KAAK;AAC1D,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;YACH,WAAW;YAAI,WAAW;YAC1B,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;;;AAI3D,iBAAO;YACH,WAAW;YACX,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YAAI,WAAW;;;;MAIlF;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,cAAI,KAAK,eAAe;AACpB,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;AAGnH,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;;;MAI/H;AACI,wBAAe,CAAE,UAAU,KAAK,UAAU,YAAY,KAAK;AAC3D,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,eAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,eAAe,CAAC,GAAG;AACxB,aAAK,YAAY,CAAC,CAAE,MAAM;AAC1B,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;;MAExD;AACI,YAAI,KAAK,eAAe;AACpB,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ;;AAG9C,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,QAAQ,OAAO,WAAW;;;MAGzD;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,6BAAmB,OAAM;AACzB,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;AACvC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,4BAAgB,OAAM,sBAAsB,CAAC,QAAQ;AACrD,mBAAO,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAGxC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,mBAAO,OAAM,sBAAsB,CAAC,QAAQ;;;;MAIxD;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,YAAY,KAAK;AACnD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;ACn0B5B;;;;;;;;;AAoCO,6DAAuD,CAAC,GAAG,cAAc;AAC5E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,gBAAgB,SAAS;AACzB,gBAAM,IAAI,WAAW,yDACd,gBAAgB;;AAE3B,YAAI,iBAAoB,GAAG,iBAAiB,SAAS,YAAY,SAAS,SAAS,SAAS,QAAQ;AACpG,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;kCAGsB;MACjC;AACI,cAAM,GAAG;AACT,aAAK,kBAAkB;AACvB,aAAK,kBACD,KAAK,mBAAmB,OAAO,IAAI,KAAK;AAC5C,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,uBAAuB,eAAe,KAAK;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,uEACQ,KAAK,UAAU;;AAEhD,4BAAoB,KAAK,eAAe,kBAAkB,IAAI;AAC9D,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,yFACU,WAAW;;AAE9C,yBAAiB,WAAW;AAC5B,qCAA6B;UACzB,KAAK,WAAW;UAAI,KAAK,WAAW;UAAI;UAAU,KAAK;;AAE3D,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,WAAW,KAAK,kBAAkB,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGnI,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,wBAAc,kBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY;AAEhH,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAExD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,2BAAmB,KAAK,eAAe,kBACnC,WAAW,KAAK,KAAK,kBACrB,WAAW,KAAK,KAAK;AACzB,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,YAAY,SAAS;;AAI5C,iBAAO,CAAC,WAAW,IAAI,SAAS,SAAS;;;MAGjD;AACI,wBAAe,MAAM;AACrB,gBAAO,qBAAqB,KAAK;AACjC,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;ACzI5B;;;;;;;;;AAkDO;AACH,UAAI,MAAM,QAAQ;AACd,YAAI,gBAAgB,QAAQ,aAAa;AACrC,gBAAM,IAAI,WAAW;;AAGzB,YAAI,gBAAgB;AAChB,sBAAY,OAAO,MAAM,OAAO,SAAS,cAAc,OAAO;AAC9D,mBAAS,OAAO,MAAM,GAAG,OAAO,SAAS;;AAE7C,YAAI,OAAO,SAAS;AAChB,yBAAe,OAAO,MAAM,GAAG,OAAO;;AAE1C,iBAAS,OAAO;;AAEpB;AACI,YAAI,KAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;;AAGP,iBAAO,CAAC;;;AAGhB,qBAAe,aAAa;AAC5B,kBAAY,aAAa;AACzB,aAAO,CAAE,QAAQ,cAAc;;AA6C5B,oEAAgE,iCAAiC,4BAA4B;AAChI,aAAO,KAAS;AACZ,qBAAa,OAAO,MAAM;AAC1B,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,uCAAuC;;AAIhE,qBAAa,CAAC,GAAG,GAAG,OAAO,QAAiB,GAAG;AAC/C,iBAAS,WAAc,QAAQ;AAC/B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAIlC,YAAI;AACA,kBAAQ,KAAK;;AAGjB,YAAI,QAAQ;AACR,iBAAO,KAAK,OAAO,QAAQ,OAAO;AAClC,cAAI,KAAK,SAAS,OAAO;AACrB,mBAAO,WAAe,MAAM;;AAEhC,iBAAO,WAAc,MAAM;;AAE/B,YAAI;AACA,mBAAS,SAAY,QAAQ;AAC7B,cAAI,QAAQ;AACR,mBAAO,SAAY,MAAM;;;AAYjC,+BAAuB;AACvB;AACA,qBAAa;AACb,0BAAkB,OAAO,MAAM;AAC/B,8BAAsB,QAAY;AAClC;AACA,YAAI,QAAQ;AACR,yBAAe,QAAY;;AAE/B,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,+BAAqB,cAAc;AACnC,8BAAoB,KAAS,MAAM,aAAa,cAAc;AAC9D,cAAI,QAAQ;AACR,yBAAa,YAAY;AACzB,qBAAS,YAAY;;AAGrB,kCAAsB,KAAS;AAC3B,+BAAiB,aAAa;AAC9B,kCAAoB,UAAa,UAAU,IAAI;AAE/C,6BAAe,YAAY,GAAG,IAAI,UAAU,IAAI,OAAO,GAAG,IAAI;AAC9D,gCAAkB,OAAO,IAAI;AACzB,uBAAO,YAAY,GAAG,GAAG,IAAI,UAAU,IAAI,MAAM,IAAI;;AAEzD,qBAAO,CAAE,QAAQ;;AAErB,yBAAa,cAAc;AAC3B,qBAAS,cAAc;;AAE3B,cAAI;AACA,2BAAe,KAAK;;;AAG5B;AACA,YAAI;AACA,uBAAa;AACb,oBAAU,MAAU,gBAAgB;;AAExC,eAAO,CAAC,YAAY,SAAS;;;sBAGZ;MACrB;AACI,cAAM;AACN;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW;mBAEhB,MAAM,QAAQ,KAAK;AACxB,iBAAO,IAAI,gBAAgB,CAAE,OAAO,KAAK;;AAGzC,iBAAO,KAAK;;AAEhB,YAAI,KAAK,aAAa;AAClB,gBAAM,IAAI,WAAW;;AAGzB,aAAK,OAAO;AACZ,aAAK,kBACD,KAAK,mBAAmB,OAAO,QAAQ,KAAK;AAChD,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,YAAY,KAAK,YAAY,OAAO,QAAQ,KAAK;AACtD,aAAK,SAAS,KAAK,UAAU,OAAO,QAAQ,KAAK;AACjD,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,aAAK,YAAY;AACjB,aAAK,UAAU;AAEf,aAAK,eAAe;AAGpB,aAAK,aAAa;;MAItB;AACI,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,iBAAO,QAAiB,GAAG,WAAW,IAAI,OAAK;;AAG/C,iBAAO,KAAK;;;MAKpB;AACI,aAAK,UAAU;;MAEnB;AACI,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AAEb,wBAAgB,KAAK,KAAK;AAC1B,YAAI,CAAC,MAAM,QAAQ;AACf,sBAAY,CAAC;;AAEjB,0BAAkB,UAAU;AAC5B;AACA,YAAI,KAAK;AACL,wBAAc,CAAC,WAAW,IAAI,WAAW,IAAI;;AAG7C,wBAAc,CAAC,WAAW,IAAI;;AAElC,YAAI,KAAK;AACL,6BAAmB;AACnB,4BAAkB;AACd,uBAAW,KAAK,CAAC,WAAW,IAAI;;AAEpC,iBAAO,CAAC,aAAa,OAAO;;AAG5B,iBAAO;;;MAGf;AACI,eAAO,KAAS;AACZ,cAAI,MAAM,QAAQ;AACd,mBAAO,KAAK;;AAEhB,6BAAmB,KAAK,kBAAkB,OAAO;AACjD,cAAI,KAAK;AACL,8BAAkB,KAAK,OAAO,IAAI,OAAK;AACvC,mBAAO,CAAC,YAAY,OAAO;;AAG3B,mBAAO;;;;UAUf;AACA,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,yBAAe;AACf,uBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,mBAAO,KAAK;;AAEhB,iBAAO;;AAGP,iBAAO,KAAK;;;UAGhB;AACA,aAAK,UAAU;;MAEnB;AAGI,8BAAsB;AACtB,YAAI,KAAK,gBAAgB;AACrB,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AACb,0BAAkB,KAAK,WAAW,WAAW,KAAK;AAClD,yBAAiB,WAAW,MAAM;AAClC,aAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,CAAC,WAAW,MAAM,GAAG;AAGhE,+BAAuB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAC/D,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,eAAK,KAAK,MAAM;;AAGpB;AACA,YAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,CAAC,KAAK,KAAK;;AAE3B,YAAI,KAAK,aAAa;AAClB,cAAI,CAAC,aAAiB,KAAK,UAAU,IAAI,UAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,KAAK;AACjF,kBAAM,IAAI,WAAW,6FACqB,KAAK,wCACd,KAAK,KAAK;;;AAI/C,eAAK,YACD,UAAU,IAAI,SAAO,IAAI,UAAU,CAAE,OAAO,CAAC,MAAM;;AAE3D,YAAI,KAAK;AACL,eAAK;;;MAoBb,+BAA+B;AAC3B,aAAK;AACD,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,4BAAkB,KAAK,UAAU,GAAG,MAAM;AAC1C,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,WAAW;AAChB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,UAAU,CAAC,OAAU,CAAC,WAAW,KAAK,KAAK;;qBAG/C,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,QAAQ,KAAK,OAAU,CAAC,WAAW,KAAK,KAAK;;;AAItD,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI,aAAa;AAKb,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,0BAAY,MAAM,QAAQ,KAAK,KAAK,aAChC,KAAK,KAAK,UAAU,UACpB,KAAK,KAAK;AACd,oCAAsB,CAAC,WAAW;AAClC,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AAEI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AAIzB,+BAAuB;AACvB,8BAAsB;AACtB,YAAI,gBAAgB;AAChB,iBAAO,kBAAkB;AACzB,6BAAmB,iBAAiB,OAAO;AAC3C,eAAK,YAAY;AACjB,8BAAoB;AAChB,iBAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,MAAM;;AAKrD,4BAAkB,gBAAgB,OAAO,KAAK;;AAElD,YAAI,aAAa;AACb,iBAAO,eAAe;AACtB,6BAAmB,iBAAiB,OAAO;AAE3C,eAAK,eAAe,UAAU;;AAElC,yBAAiB,iBAAiB,cAAc;AAChD,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAE5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAInC;AAII,eAAO,KAAK;AACR,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,6BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,mBAAS,oBAAoB;AAC7B,cAAI,gBAAgB;AAChB,gBAAI,KAAK;AACL,6BAAe,KAAK;;AAGpB,6BAAe,KAAK,gBAAgB;;;AAG5C,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,cAAI,aAAa,WAAW;AACxB,kBAAM,IAAI,WAAW,iBAAiB,qCAC/B,aAAa;;AAExB,cAAI,KAAK;AACL,oBAAQ,KAAK;;AAEjB,iCAAuB,CAAE;AAEzB,wBAAa;AAGT,6BAAgB,KAAK,KAAK,KAAK,CAAC,SAAQ,OAAO,UAAS;AAExD,mBAAO,CAAC,SAAQ,IAAI,SAAQ,MAAM;;AAGtC,6BAAmB,IAAI,OAAM,QAAQ,cAAc,KAAK,aAAa,MAAM,MAAM,KAAK,QAAQ,KAAK;AACnG,6BAAmB,WAAW;AAC9B,0BAAgB,WAAW;AAC3B,yBAAe,WAAW;AAC1B,cAAI,KAAK;AACL,iBAAK,YAAY,QAAQ;;AAE7B,yBAAe,KAAK,kBAAkB,UAAU;AAEhD,cAAI,KAAK;AACL,mBAAO,CAAC,QAAQ,OAAO;;AAGvB,mBAAO;;;;MAInB;AACI,eAAO,KAAK;AAGR,6BAAmB,OAAU,OAAO;AAEpC,yBAAe,MAAQ,cAAc,CAAC,GAAG;AACzC,yBAAe,aAAa;AAC5B,cAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAO,KAAK,KAAK,UAAU,IAAI,SAAO,MAAM,IAAI,OAAO,cAAc,CAAC,GAAG,QAAQ;;AAGjF,mBAAO,KAAK,KAAK,YAAY,IACzB,CAAC,OAAO,cAAc,CAAC,GAAG,KAAK,KAAK,eACpC,CAAC;;;;UAIb;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAGX,eAAO,KAAK,KAAK;;UAEjB;AAEA,YAAI,CAAC,KAAK;AACN,iBAAO,KAAK,KAAK;;AAErB,eAAO,KAAK,KAAK;;MAErB;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,6BAA6B;;;MAG/C;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,iBAAiB,KAAK;UACtB,aAAa,KAAK;UAClB,aAAa,KAAK;UAClB,UAAU,KAAK;UACf,QAAQ,KAAK;;AAEjB,YAAI,KAAK,gBAAgB;AACrB,kBAAO,kBAAkB,KAAK;;AAElC,2BAAmB,KAAK,KAAK;AAC7B,YAAI,KAAK,mBAAmB,IAAI;AAC5B,kBAAO,UAAU;YACb,WAAa,KAAK,KAAK;YACvB,QAAU;;;AAIlB,eAAO,OAAO,OAAO,IAAI,YAAY,YAAY;;aAG9C,yCAAwC;AAC3C,2BAAmB,QAAO;AAC1B,qBAAa,YAAY,YAAY;AACrC,eAAO,IAAI,IAAI,OAAO,OAAO,SAAQ,CAAE;;;AAI/C,QAAI,YAAY;AAChB,kBAA4B;0BASC;;gCAEM;MAC/B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,cAAc,OAAO,KAAK,qBAAqB,KAAK;AACzF,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAEhC,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,WAAW,WAAW,SAAS,IAAI,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACzJ,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC3J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG9G,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAQjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8CAA8C,OAAO;;AAE9E,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR;AACA,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB,cAAI,UAAU;AACV,gBAAI,MAAM,KAAQ,QAAQ,SAAS,KAAK,OAAO;;AAG/C,gBAAI,MAAM,QAAQ,KAAK,OAAO;;AAElC,cAAI,KAAK,QAAQ;AACb,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,cAAI,aAAa;AACb,yBAAa,KAAQ,YAAY;;AAErC,uBAAa,MAAQ,GAAG,MAAM,YAAY,KAAK,gBAAgB;AAC/D,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAGnC,iBAAO,CAAC,QAAQ;;;MAGxB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;;AAE3B,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,kBAAc,YAAY;AAC1B,kBAA4B;4BACG;MAC3B;AACI,aAAK,OAAO,IAAI,cAAc;AAC9B,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,eAAO,IAAI,IAAI;;;AAIvB,cAAU,YAAY;AACtB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGlH,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,uDACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAe,OAAO;AACtB,mBAAS,OAAO;AAIhB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,wBAAc,MAAM,QAAQ,KAAK,OAAO;AACxC,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK;;AAE3C,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,uCAA6B,KAAK,gBAAgB;AAClD,6BAAmB,OAAU,sBAAsB,CAAC,IAAI,KAAK,OAAO,KAAK,QAAQ,qBAAqB,OAAO;AAC7G,8BAAoB,MAAM,UAAU;AACpC,+BAAqB,OAAU,SAAS,GAAG,QAAQ,OAAO;AAC1D,2CAAiC,OAAU,aAAa,GAAG,YAAY,OAAO;AAC9E,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,6BAAmB,MAAM,KAAQ,GAAG,WAAW;AAC/C,eAAK,KAAK,WAAW,MAAM,MAAQ,IAAI;AACvC,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,MAAQ,GAAG,IAAQ,KAAK;AAExE,iBAAO,CAAC,GAAG;;;MAGnB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;UACrB,YAAY;;AAEhB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,YAAQ,YAAY;AACpB,kBAA4B;sBACH;MACrB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,QAAQ;AACxB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,QAAI,YAAY;AAChB,kBAA4B;2BACE;MAC1B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,KAAK;AAC3B,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,CAAC,KAAK,OAAO,KAAK;AACnC,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI;AACA,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J;AACA,YAAI,KAAK;AACL,cAAI,KAAK;AACL,qCAAyB,KAAK;AAC9B,kCAAsB,KAAK;AAC3B,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AAEI,2BAAW,iBAAiB,MAAM,CAAC;AACnC,2BAAY,IAAI,OAAQ,MAAM,CAAC;AAC/B,+BAAe,iBAAiB,MAAM,CAAC,gBAAgB;AACvD,uBAAO,qBAAuB,qBAAuB,IAAI,KAAK;;eAItE,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG7G,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,wDACd,OAAO;;AAElB,yBAAe,OAAO;AACtB,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AAIvB;AACA;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,kBAAQ,MAAM,QAAQ,KAAK,OAAO;AAClC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,cAAI,MAAQ,GAAG,MAAM,UAAU,KAAK,gBAAgB;AACpD,cAAI,KAAK;AACL,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,mCAAyB,OAAU,GAAG,GAAG,EAAE,OAAO;AAClD,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM;AACnE,cAAI,KAAK,oBAAoB,MAAM;AACnC,oBAAU,KAAQ,GAAG,KAAK,WAAW,MAAM;AAE3C,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,gBAAgB,KAAK;UACrB,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;;AAEzB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,aAAS,YAAY;AACrB,kBAA4B;uBACF;MACtB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,SAAS;AACzB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,SAAK,YAAY;AACjB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,QAAQ,KAAK;;UAElB;AAKA,0BAAkB;AAClB,2BAAmB,KAAK,MAAM,QAAQ;AAClC,cAAI,MAAM,QAAQ,KAAK;AACnB,sBAAU,KAAK,GAAG,KAAK;;AAGvB,sBAAU,KAAK,KAAK;;;AAG5B,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,mBAAS;AACT,uBAAa,OAAO,MAAM;AAE1B,+BAAqB;AACrB,6BAAmB,KAAK,MAAM,QAAQ;AAClC,gBAAI,MAAM,QAAQ,KAAK;AACnB,2BAAa,KAAK,OAAO,OAAO,GAAG,KAAK,UAAU;;AAGlD,2BAAa,KAAK,OAAO,OAAO,GAAG;;;AAG3C,uBAAa;AAEb,kCAAwB;AACxB;AACA,uBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,yBAAa,KAAK,MAAM;AACxB,qBAAS,aAAa;AAEtB,gBAAI,MAAM;AACN,2BAAa,CAAC,OAAO,IAAI,OAAO;;AAGhC,2BAAa,CAAC,WAAW,IAAI,OAAO;;AAExC,yBAAa,KAAK,KAAK,YAAY;AACnC,4BAAgB,KAAK,WAAW,MAAM;;AAG1C,mBAAS;AACT,mCAAyB,gBAAgB,QAAQ;AAC7C,mBAAO,KAAK,GAAG;;AAEnB,iBAAO,CAAC,WAAW,IAAI,OAAO;;;MAGtC;AACI,YAAI,gBAAgB;AAGhB,uBAAa,WAAW;;AAE5B,qBAAa;AACb;AACA,aAAK,MAAM,QAAQ;AACf,oBAAU,WAAW,KAAK;AAEtB,iBAAK,MAAM;AACX,gBAAI,MAAM,QAAQ,KAAK;AACnB,0BAAY,KAAK,UAAU;;AAG3B,0BAAY,KAAK;;AAErB,yBAAa,CAAC,WAAW,IAAI;;;AAGrC,aAAK,QAAQ;;MAEjB;AACI,2BAAmB,MAAM;AACzB,8BAAsB;AAClB,iBAAO;YACH,WAAa,KAAK;YAClB,QAAU,KAAK;;;AAGvB,4BAAoB,KAAK,MAAM,IAAI;AACnC,wBAAe,CAAE,OAAS;AAC1B,eAAO,OAAO,OAAO,IAAI,YAAY;;aAGlC,yCAAwC;AAC3C,sBAAc;AACd,iCAAyB,QAAO;AAC5B,gBAAM,KAAK,YAAY,YAAY;;AAEvC,eAAO,IAAI,IAAI,CAAE;;UAEjB;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO;;UAEP;AACA,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,6BAAmB,KAAK;AACpB,6BAAiB,KAAK,GAAG,KAAK;;AAElC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;MAOX;AACI,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO,cAAc;;MAQzB;AACI,uBAAe;AACf,2BAAmB,KAAK;AACpB,4BAAkB,KAAK,QAAQ;AAC/B,+BAAqB,QAAQ,OAAO;AACpC,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,mBAAO,KAAK,CAAC,KAAK,QAAQ,IAAI,aAAa;;;AAGnD,sBAAc;;;AAItB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,aAAQ,aAAM,MAAM,WAAW,OAAO,gBAAQ,KAAM;AACpD,4BAAsB,MAAM,UAAU,SAAQ;AAC9C,yBAAmB,MAAM,aAAe,eAAe,OAAM;AAE7D,UAAI,CAAC,UAAS,UAAS;AACnB,eAAO,KAAS,aAAa;;AAEjC,oBAAc,MAAM,QAAO,KAAK,QAAW,IAAI;AAC/C,aAAO,MAAM,IAAI,OAAK,KAAS,EAAE;;ACtzCrC;;;;;;;;;AASA,iBAAsC;AAClC,cAAQ;AACR,qBAAc;AAAG,YAAI,OAAO,UAAU,eAAe,KAAK,GAAG,OAAM,EAAE,QAAQ,MAAK;AAC9E,YAAE,MAAK,EAAE;AACb,UAAI,KAAK,QAAQ,OAAO,OAAO,0BAA0B;AACrD,qBAAa,QAAO,OAAO,sBAAsB,IAAI,IAAI,GAAE,QAAQ;AAC/D,cAAI,EAAE,QAAQ,GAAE,MAAM,KAAK,OAAO,UAAU,qBAAqB,KAAK,GAAG,GAAE;AACvE,cAAE,GAAE,MAAM,EAAE,GAAE;;AAE1B,aAAO;;gCAaiB;;4BAKJ;MACpB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,MAAM,QAAQ,KAAK;AACnB,gBAAM,IAAI,oBAAoB;;AAElC,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,KAAS;AACZ,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,cAAI,UAAU,OAAO;AACjB,kBAAM,IAAI,WAAW;;AAEzB,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;MAGpD;AACI,uBAAe,KAAK,yBAAyB;AAC7C,YAAI,CAAC,KAAK;AACN,qBAAW,CAAC,SAAS,IAAI,GAAG,SAAS,MAAM;;AAE/C,YAAI,KAAK;AACL,qBACI,CAAC,UAAU,GAAG,MAAM,GAAG,KAAK,CAAC,WAAW,IAAI,GAAG,SAAS,MAAM;;AAEtE,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,iBAAQ,aAAc,KAAK;AAC3B,6BAAmB,OAAO;AAC1B,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,+BAAqB,OAAU;AAC/B,cAAI,MAAM,QAAQ;AACd,mBAAO,MAAM,UAAU,QAAQ,KAAK;;AAExC,iBAAO,CAAC;;;MAGhB,+BAA+B;AAC3B,aAAS;AACL,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,6BAAmB,KAAK,UAAU,GAAG;AACrC,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,4BAAkB,WAAW;AAC7B,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,eAAe;AACpB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,UAAU,CAAC,OAAU;;qBAGzB,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,QAAQ,KAAK,OAAU;;;AAIhC,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI;AAKA,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,oCAAsB;AACtB,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AACI,eAAQ,YAAY,SAAS,YAAY,SAAS,SAAS,gBAAiB,KAAK;AACjF,gCAAwB,eAAe;AACvC,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,yBAAiB;UACb,GAAG,WAAW,MAAM,GAAG;UACvB,GAAI,kBAAkB,CAAC,SAAS,MAAM,QAAQ,CAAC,MAAM,MAAM;;AAE/D,eAAO;;;AAIf,cAAU,YAAY;iCACc;MAChC;AACI,eAAQ,SAAS,YAAY,SAAS,SAAS,YAAY,gBAAkB;AAC7E,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE,OAAO;AACvC,aAAK,UAAU;AACf,8BAAsB,KAAK,SAAS;AACpC,aAAK,aAAa,eAAe,YAAY,GAAG;AAChD,aAAK,WAAW,QAAQ,UAAQ,sBAAsB,MAAM;AAC5D,aAAK,UAAU,eAAe,WAAW,GAAG,GAAG;AAC/C,aAAK,QAAQ,QAAQ,YAAU,sBAAsB,QAAQ;AAC7D,aAAK,UAAU,WAAW;AAC1B,yBAAiB,KAAK;AACtB,aAAK,aAAa,cAAc;AAChC,wBAAgB,KAAK;AACrB,aAAK,eAAe,eAAe,gBAAgB,GAAG,GAAG;AACzD,aAAK,aAAa,QAAQ,UAAQ,sBAAsB,MAAM;;MAElE;AACI;AACA,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,6BAAqB;AACrB,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK,UAAU;AACrE,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,qCAA6B,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS,KAAK,UAAU;AAClF,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL;AACA,cAAI,KAAK;AACL,0BAAa,KAAK;AAClB,4BAAgB,KAAK;AACrB,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AACI,8BAAc,MAAK,MAAM,CAAC;AAC1B,8BAAc,OAAS,CAAC;AACxB,kCAAkB,MAAK,MAAM,CAAC,UAAU;AACxC,uBAAO,YAAc,CAAC,OAAO,OAAO;;eAI5C,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,eAAe,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE9H,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8DACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe;AACvC,oBAAU,OAAO;AACjB,2BAAiB,OAAO;AACxB,2BAAiB,OAAO;AACxB,+BAAqB;AACrB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,8BAAoB,KAAK;AACzB,+BAAqB;AACjB,gBAAI,CAAC,QAAQ,CAAC,KAAK;AACf,qBAAO;;AAEX,mBAAO,KAAQ,KAAK,SAAQ;;AAEhC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,iCAAuB,KAAK;AAC5B,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,oCAA0B;AAC1B,uDAA6C,OAAU,KAAK,OAAO,QAAQ,cAAc;AACzF,+CAAqC,KAAK,UACtC,OAAU,KAAK,KAAK,QAAQ,gBAC5B,CAAC,MAAM,MAAM,MAAM;AACvB,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,mEAAyD,OAAU,KAAK,gBAAgB,QAAQ,cAAc;AAC9G,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM,MAAQ,IAAI;AACrF,oBAAU,KAAQ,KAAK,oBAAoB,MAAM,MAAQ,IAAI,MAAM,KAAK,WAAW,MAAM;AACzF,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,mBAAW,MAAM,cAAe,YAAe,iBAAiB,OAAO,IAAI,CAAC;AAC5E,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,SAAS,KAAK;;AAElB,eAAO,OAAO,OAAO,IAAI,YAAY;;MAEzC;AACI,oBAAY,QAAW,GAAG,GAAG,KAAK,SAAU,WAAW,SAAU,KAAK,eAAe,kBAAkB,SAAS,QAAQ,KAAK;AAC7H,YAAI;AACA,iBAAO,QAAU,KAAK,GAAG,KAAK;;AAElC,eAAO;;MAEX;AACI,wBAAgB;AAChB,eAAO,QAAW,GAAG,GAAG,SAAS,QAAQ,KAAK,eAAe,kBAAkB,SAAS;;;AAIhG,mBAAe,YAAY;AAC3B,kBAAgC;6BACA;MAC5B;AACI,qBAAa,IAAI,eAAe;AAChC,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE;;aAG7B;AACH,eAAO,IAAI,IAAI;;;AAIvB,eAAW,YAAY;AACvB,kBAAgC;AClVhC;;;;;;;;;0BAuB6B;MACzB;AACI,cAAM;AACN,aAAK,OAAO,KAAK,IAAI,KAAK,IAAI,KAAK,MAAM,IAAI;AAE7C,aAAK,aAAa,KAAK;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,kBAAkB;;MAE3B;AACI,YAAI,KAAK,cAAc;AACnB,iBAAO,KAAK;;AAEhB,2BAAmB,OAAM;AACzB,2BAAmB;AACnB,qBAAa,GAAG,IAAI,KAAK,WAAW,QAAQ,EAAE;AAC1C,qBAAW,KAAK,KAAK,WAAW,MAAM,OAAO,WAAW,KAAK,KAAK,WAAW;;AAEjF,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,IAAI,KAAK,QAAQ,KAAK,OAAO;AAC7B,6BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,+BAAmB,KAAK,cAAc;AACtC,2BAAe,aAAe,MAAM,UAAU,QAAO,KAAK,MAAM,YAAY,KAAK,OAAO,MAAM,QAAO;AACrG,mBAAO;;AAEX,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,YAAY,KAAK;UACjB,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,MAAM;;;AAIrB,YAAQ,YAAY;AACpB,kBAA4B;mCACU;MAClC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,2BAAmB,OAAM;AACzB,eAAO,CAAC,WAAW,IAAI,GAAG,WAAW;;;AAI7C,qBAAiB,YAAY;AAC7B,kBAA4B;wBACD;MACvB;AACI,cAAM;AAEN,aAAK,aAAa;AAClB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc,QACnD,KAAK,YAAY;AAGjB,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,eAAK,kBAAkB,CAAC,WAAW,KAAK;;AAE5C,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAExB,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,CAAE,SAAS;;MAEjC;AACI,qBAAa,mBAAmB;AAChC,6BAAqB,WAAW,WAAW,SAAS;AACpD,YAAI,KAAK,UAAU;AACf,eAAK,SAAS,KAAK,UAAU,UAAU,CAAC,cAAc,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,cAAI,KAAK;AACL,iBAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;;AAGtH,aAAK,YAAY,CAAC,CAAE,SAAS,GAAG,MAAM,EAAG,KAAK;AAC9C,aAAK,QAAQ;;MAEjB;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,oBAAY,YAAY,SAAS,KAAK,KAAK;AAC3C,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,yBAAc,oBAAoB;AAClC,sCAA4B,2BAA2B,KAAK,WAAW;AACvE;AACA,cAAI,uBAAuB;AACvB,qBAAS,MAAM,QAAO,KAAK,OAAO,QAAQ,qBAAqB,KAAK,OAAO,KAAK,KAAK,SAAS;;AAG9F,qBAAS,MAAM,QAAO,KAAK,OAAO;AAClC,gBAAI,KAAK,QAAQ;AACb,uBAAS,QAAU,QAAQ,KAAK,KAAK;;AAEzC,gBAAI,KAAK,cAAc;AACnB,uBAAS,KAAK,WAAW,MAAM;;;AAGvC,iBAAO;;;MAGf;AACI,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;0BACC;MACzB;AACI,eAAO,QAAQ;AACf,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,SAAS;AAC7B,aAAK,aAAa,KAAK;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,0BAAkB,WAAW,MAAM;AAC/B,cAAI,OAAO;AACP,kBAAM,IAAI,WAAW,iEACT,WAAW,MAAM;;;AAKrC,eAAO,CAAC,WAAW,IAAI,UAAU,YAAY;;MAEjD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,uBAAY,oBAAoB;AAChC,cAAI,KAAK,eAAe,mBAAmB,OAAM,OAAO;AACpD,gCAAoB,CAAC;AACrB,yBAAa,GAAG,IAAI,OAAM,MAAM,EAAE;AAC9B,0BAAY,KAAK;;AAErB,wBAAY,KAAK;AACjB,qBAAQ,OAAM,UAAU;;AAE5B,iBAAO,aAAe;;;MAG9B;AACI,wBAAe;AACf,YAAI,KAAK,cAAc;AACnB,kBAAO,gBAAgB,KAAK;;AAEhC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;+BACI;MAC5B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,aAAa,cAAc,KAAK;;MAEzC;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,iBAAO,KAAK,WAAW,MAAM;;;MAGrC;AACI,wBAAe,CAAE,YAAY,oBAAoB,KAAK;AACtD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,IAAI,KAAK;AACd,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,eAAO,CAAC,WAAW,IAAI,KAAK,GAAG,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,iBAAO,OAAS,QAAQ,KAAK;;;MAGrC;AACI,wBAAe;UACX,GAAG,KAAK;;AAEZ,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;4BACC;MACzB;AACI,cAAM;AACN,aAAK,cAAc,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ,EAAE;AAC3C,cAAI,KAAK,UAAU,KAAK,YAAY;AAChC,iBAAK,YAAY,KAAK;;;;MAIlC;AACI,eAAO,MAAM,KAAK,OAAO;;MAgB7B;AACI,yBAAiB;AACjB,2BAAmB,YAAY;AAC/B,oBAAY;AACZ,sBAAc;AACd,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,sBAAY,WAAW;AACvB,cAAI,KAAK,UAAU;AACf,gBAAI,YAAY;AACZ,wBAAU;;AAGV,oBAAM,IAAI,WAAW;;;AAIzB,qBAAS;;;AAGjB,6BAAqB,UAAU;AAC/B,YAAI,YAAY;AACZ,cAAI,UAAU,KAAK,eAAe,UAAU;AACxC,kBAAM,IAAI,WAAW;;AAEzB,qBAAW,WAAW,eAAe;mBAEhC,iBAAiB;AACtB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;MAEX;AACI,6BAAqB;AACrB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,cAAI,KAAK,UAAU,WAAW;AAC1B,6BAAiB;AACjB;;;AAGR,YAAI;AACA,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK;;AAG1C,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;;;MAGhG;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,8BAAoB,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;AACrG,iBAAO,OAAM,QAAQ;;;MAG7B;AACI,wBAAe;UACX,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,gBAAM,IAAI,MAAM,sEACT,KAAK;;AAGhB,sCAA8B,QAAM,GAAG,KAAK,KAAK,SAAS;AAC1D,YAAI,CAAC,aAAiB,KAAK,KAAK,QAAQ,QAAQ;AAC5C,gBAAM,IAAI,MAAM,iCAAiC,KAAK,UAAU,KAAK,QACjE;;AAER,aAAK,OAAO,KAAK;AACjB,aAAK,qBAAqB,CAAC,GAAG,OAAO,KAAK;AAC1C,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,KAAK,SAAS;;MAE/D;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,aAAK,KAAK,QAAQ;AACd,sBAAY,IAAI,KAAK,WAAW;;AAEpC,eAAO;;MAEX;AACI,eAAO,WAAU,oBAAoB,SAAS,KAAK;;MAEvD;AACI,wBAAe;UACX,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,YAAY,KAAK,aAAa,OAAO,IAAI,KAAK;;AAGnD,eAAK,YAAY;;;MAGzB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,WAAW,KAAK;AACjC,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,uBAAc,oBAAoB;AAClC,qBAAa;AACb,eAAO,IAAI,SAAS,QAAO,KAAK,YAAY;;MAEhD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,uBAAa;AACb,2BAAiB;AACjB,8BAAoB,IAAI,SAAS,QAAO,KAAK,YAAY,MAAM;AAC/D,yBAAe,OAAM,IAAI,YAAY,OAAO,OAAM;AAClD,iBAAO;;;;AAKnB,YAAQ,YAAY;AACpB,kBAA4B;AC3c5B;;;;;;;;;4BAuB+B;MAC3B;AACI,cAAM;AACN,aAAK,aAAa;AAClB,aAAK,iCAAiC;AACtC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc;AAKnD,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,cAAI,KAAK,eAAe;AAGpB,iBAAK,kBAAkB,CAAC,WAAW;;AAKnC,iBAAK,kBACD,CAAC,WAAW,OAAO,OAAqB,KAAK;;;AAGzD,aAAK,WAAW,KAAK;AACrB,8BAAoC,KAAK,UAAU;AACnD,aAAK,YAAY,KAAK;AACtB,8BAAoC,KAAK,WAAW;AACpD,aAAK,wBAAwB,eAAe,KAAK,yBAAyB,KAAK;AAC/E,aAAK,wBAAwB,eAAe,KAAK;AACjD,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,uBAAuB,cAAc,KAAK;AAC/C,aAAK,WAAW,KAAK;AACrB,aAAK,kBAAkB,KAAK;AAC5B,aAAK,cAAc,KAAK;;MAE5B;AACI,aAAK,aAAa,KAAK,UAAU,cAAc,CAAC,KAAK,UAAU,KAAK,YAAY,KAAK,OAAO,KAAK,uBAAuB,KAAK,uBAAuB,MAAM,KAAK;AAC/J,aAAK,QAAQ;;MAIjB;;MACA;AACI,eAAO,KAAK;AACR,cAAI,CAAC,KAAK;AACN,mBAAO;;AAGP,qBAAS,oBAAoB;AAC7B,mBAAO,SAAS,QAAQ,WAAU;;;;MAI9C;AACI,qBAAa,mBAAmB;AAChC,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,GAAG,YAAY,KAAK;;AAGhC,uBAAe,OAAqB,KAAK;AACzC,YAAI,OAAO,WAAW,WAAW,SAAS;AACtC,gBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;;AAG7B,kBAAQ;AACR,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,uBAAW,OAAO;AAClB,uBAAW,WAAW,IAAI;AAC1B,gBAAK,MAAM,QAAU,MAAM,QAAU,OAAO;AACxC,oBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;uBAExB,MAAM;AACX,qBAAO,KAAK;;AAEhB;;;AAGR,eAAO,CAAC,WAAW,IAAI,GAAG,QAAQ,KAAK;;MAE3C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,uBAAY,oBAAoB;AAChC,cAAI,OAAM,UAAU;AAChB,qBAAQ,OAAO,QAAO;;AAE1B,yBAAe,SAAS,KAAK,WAAW,QAAQ,OAAM;AACtD,iBAAO,OAAO,QAAQ,mBAAmB,KAAK,mBAAmB,OAAM;;;MAG/E;AACI,wBAAe;UACX,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,uBAAuB,qBAAqB,KAAK;UACjD,uBAAuB,qBAAqB,KAAK;UACjD,qBAAqB,qBAAqB,KAAK;UAC/C,sBAAsB,oBAAoB,KAAK;UAC/C,UAAU,KAAK;UACf,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;ACzI5B;;;;;;;;;wBA0B2B;MACvB;AACI,cAAM,QAAQ;AACd,aAAK,kBAAkB;;MAM3B;AACI,cAAM,IAAI;;MAYd;AACI,YAAI,UAAU,QAAQ,UAAU;AAC5B,iBAAO;mBAEF,OAAO,SAAS,OAAO;AAC5B,iBAAO,KAAK,gCAAgC,QAAQ;mBAE/C,OAAO,WAAW;AACvB,iBAAO;;AAEX,4BAAoB,OAAO,MAAM,GAAG,OAAO,SAAS,OAAO;AAC3D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO,OAAO,SAAS,OAAO,SAAS;AACjD,oBAAU,OAAO;AACjB,cAAI,KAAK,QAAQ,KAAK,QAAQ,IAAI,KAAK,IAAI;AACvC,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;;AAGjB,gBAAI,MAAM;AACN,oBAAM,IAAI,WAAW,0DACjB,KAAK,UAAU,UAAU,MAAM,KAAK,UAAU;;AAEtD,wBAAY,KAAK;;;AAGzB,eAAO;;MAEX;AAEI,YAAI,MAAM,QAAQ,eAAe,CAAC,MAAM,QAAQ,WAAW;AAEvD,uBAAa,CAAC,mBAAmB;;AAErC,qBAAa;AACb,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,wEACT,WAAW;;AAI3B,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,8EACW,KAAK,UAAU;;AAEnD,0BAAkB,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACrE,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAIpE,yBAAiB,WAAW,IAAI,WAAS,MAAM;AAC/C,YAAI,WAAW,QAAQ,UAAU,MAC7B,SAAqB,UAAU,WAAW;AAC1C,eAAK,kBAAkB;;AAGvB,eAAK,kBAAkB;;;MAG/B;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,KAAK;AACL,mCAAuB;AACvB,8BAAkB,OAAO,IAAI,YAAS,OAAM;AAC5C,gBAAI,UAAU,QAAQ,UAAU;AAG5B,8BAAgB,MAAc;AAC9B,4BAAc;AACV,8BAAc,EAAE;AAChB,6BAAa,GAAG,IAAI,UAAU,OAAO,EAAE;AACnC,sBAAI,aAAa,GAAG;;AAExB,+BAAe,KAAK;;AAExB,qBAAO,KAAK,cAAc;;AAK1B,+BAAiB;AACjB,8BAAgB;AACZ,8BAAc,EAAE;AAChB,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,oCAAkB,OAAO;AACzB,mCAAiB,OAAO,MAAM,GAAG,OAAO,CAAC;AACzC,oCAAkB,EAAE,QAAQ,CAAC,WAAW,OAAO,UAAoB,OAAO,MAAM;AAChF,gCAAc,WAAc,aAAa,CAAC,GAAG;AAC7C,gCAAc,YAAY,QAAQ;AAClC,iCAAe,KAAK;AACpB,+BAAa;2BAER,QAAQ;AACb,+BAAa,QAAgB,GAAG,OAAO,OAAO,CAAC;AAC/C,iCAAe,KAAK,WAAc,GAAG;AACrC,+BAAa;;AAIb,iCAAe,KAAK;;;AAG5B,sBAAQ,KAAK,cAAc;AAC3B,4BAAc,EAAE;AAChB,kBAAI;AAGA,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,iCAAc,OAAO;AACrB,oCAAkB,OAAO,SAAQ;AACjC,mCAAiB,CAAC,WAAW,OAAO,OAAO,MAAM,GAAG,OAAO,SAAS;AACpE,sBAAI,WAAc,EAAE,QAAQ,CAAC,IAAI,aAAa,CAAC,GAAG,IAC7C,QAAQ;2BAER,QAAQ;AACb,+BAAa,CAAC,QAAQ,GAAG,OAAO,QAAgB,GAAG,QAAQ;AAC3D,sBAAI,WAAc,GAAG;;;AAG7B,qBAAO;;;AAIX,mBAAO,KAAK,cAAc;;;;MAItC;AACI,qBAAa;AACb;AACA,YAAI,WAAW,MAAM;AACjB,wBAAc;;AAGd,wBAAc,WAAW,GAAG,MAAM;;AAEtC,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAEpE,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,WAAW;AACtB,wBAAc,WAAW,OAAO;;AAGhC,wBAAc,CAAC,MAAM,OAAO;;AAEhC,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,cAAI,QAAQ;AACR,mBAAO;;AAEX,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,KAAK,WAAW,OAAO;AACvB,kBAAM,IAAI,WAAW,mGAEb,OAAO,aAAa,KAAK;;AAErC,cAAI,KAAK,MAAM,OAAK,KAAK;AACrB,mBAAO;;AAEX,iBAAO,KAAK,IAAI,OAAK,KAAK,OAAO,IAAI,WAAe,GAAG;AACvD,uBAAa,KAAK;AAClB,uBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,qBAAS,WAAe,QAAQ,KAAK;;AAEzC,iBAAO;;;;wBAIM;MACrB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,UAAI,YAAY;AAChB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,MAAI;AACtB,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,MAAI;;;6BAGO;MAC1B;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,eAAS,YAAY;AACrB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,WAAS;AAC3B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,WAAS;;;0BAGC;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO,KAAQ,IAAI,OAAO,QAAQ;;;;AAK9C,YAAQ,YAAY;AACpB,kBAA4B;AAgDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,QAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,QAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;8BAGM;MAC7B;AACI,cAAM;AACN,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AAEI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW,QACxD,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,qBAAa;AACb,2BAAmB;AACnB,4BAAoB;AAChB,cAAI,SAAS;AACT,2BAAe;AACf;;;AAGR,YAAI;AACA;;AAEJ,yBAAiB;AACjB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,yCAA+B,WAAW,GAAG;AAC7C,iCAAuB,OAAO,KAAK,MAAM;AACzC,uBAAa;AACb,8BAAoB;AAChB,gBAAI,aAAiB,OAAO;AACxB,uBAAS;AACT;;;AAGR,cAAI,CAAC;AACD,qBAAS,KAAK;;;AAGtB,YAAI,SAAS,SAAS;AAClB,gBAAM,IAAI,WAAW,8GAEjB,KAAK,UAAU;;;MAG3B;AACI,eAAO,KAAK;AACR,iBAAO,YAAc,QAAQ,KAAK;;;MAG1C;AACI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW;AACxD,gBAAM,IAAI,WAAW;;AAEzB,4BAAoB;AACpB,4BAAoB,YAAY,GAAG;AACnC,qBAAa,KAAK,OAAO,IAAI,YAAY,SAAS,KAAK,OAAO,KAAK;AAGnE,4BAAoB,YAAY,MAAM;AAClC,cAAI,YAAY,SAAS,QAAQ,MAAM,SAAS;AAC5C,wBAAY,QAAQ;AACpB;;AAEJ,sBAAY,SAAS,MAAM;;AAE/B,eAAO;;MAEX;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,KAAK,WAAW,OAAO;AACvB,gBAAM,IAAI,WAAW,mCAAmC,KAAK,qCAC5B,OAAO;;AAE5C,eAAO,KAAS;AACZ,6BAAmB;AACnB,eAAK,QAAQ;AACT,gBAAI,KAAK;AACL,6BAAe;AACf;;;AAGR,cAAI;AACA,mBAAO;;AAEX,8BAAoB;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,gBAAI,KAAK,MAAM;AAEX,0BAAY,KAAK,UAAa,OAAO,IAAI,OAAO;uBAE3C,KAAK,GAAG,OAAO,OAAO,GAAG;AAE9B,0BAAY,KAAK,WAAe,KAAK,IAAI;;AAGzC,0BAAY,KAAK,KAAK;;;AAG9B,oCAA0B,QAAW,aAAa,KAAK;AACvD,iBAAO,IAAQ,mBAAmB,IAAI;;;MAG9C;AACI,wBAAe;UACX,MAAQ,KAAK;;AAEjB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,gBAAY,YAAY;AACxB,kBAA4B;AAiDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,YAAY;AAC9B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,YAAY;;;AAY/B;AACI,aAAO,OAAO;AACV,gBAAQ;;AAEZ,aAAO;;AAEX;AACI,UAAI,EAAE,MAAM,SAAS,KAAK,EAAE,MAAM,SAAS;AACvC,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,UAAI,OAAO,SAAS;AAChB,eAAO,CAAC,MAAM;;AAElB,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,cAAM,IAAI,oBAAoB;;AAElC,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,UAAI,QAAQ;AAER,eAAO,CAAC,QAAQ,GAAG,QAAQ;;AAE/B,wBAAkB;AAClB,aAAO,KAAS;AACZ;AACA,YAAI,QAAQ;AACR,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;mBAExB,QAAQ;AACb,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;;AAG7B,iBAAO;;AAEX;AACA,YAAI,EAAE,MAAM,WAAW,KAAK,EAAE,MAAM,WAAW;AAC3C,cAAI,UAAU,OAAO,UAAU;AAC3B,kBAAM,EAAE,IAAI,GAAG,IAAI,UAAU;;AAG7B,kBAAM,EAAE,UAAU,CAAC,GAAG,IAAI,IAAI,GAAG,IAAI,UAAU;;;AAInD,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,gBAAM,EAAE,OAAO,GAAG,MAAM;;AAE5B,YAAI,OAAO;AACP;AACA,cAAI,QAAQ;AACR,kBAAM,QAAQ,QAAQ;;AAGtB,kBAAM,QAAQ;;AAElB,8BAAoB;AACpB,uBAAa,KAAK,IAAI,MAAM,MAAM,EAAE;AAChC,wBAAY,KAAK;;AAErB,gBAAM,IAAI,QAAQ;;AAEtB,YAAI,IAAI,MAAM,WAAW;AACrB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;;sBAGU;MACrB;AACI,cAAM;AACN,aAAK,OAAO,KAAK;AACjB,aAAK,YAAY,KAAK,aAAa,OAAO,QAAQ,KAAK;AACvD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW;AAC1B,uBAAe,WAAW;AAC1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,YAAI,OAAO,KAAK,QAAQ,OAAO,KAAK;AAChC,gBAAM,IAAI,WAAW,8BACd,OAAO,KAAK,WAAW,OAAO,KAAK;;;MAGlD;AACI,YAAI,OAAO,WAAW;AAClB,gBAAM,IAAI,WAAW,oEACD,OAAO;;AAE/B,iBAAS,OAAO;AAChB,iBAAS,OAAO;AAChB;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,iBAAO;YACH,cAAc,KAAK,MAAM,GAAG,MAAM;YAClC,cAAc,KAAK,MAAM,GAAG,MAAM;;;AAItC,iBAAO,KAAK,KAAK,IAAI,aAAa,cAAc,MAAM,OAAO,GAAG,MAAM;;AAE1E,YAAI,KAAK;AACL,eAAK,YAAY,IAAI,KAAK;AAC1B,eAAK,YAAY,IAAI,KAAK;;AAE9B,eAAO,SAAS,IAAI,IAAI;;MAE5B;AACI;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AAEpB,iBAAO;YACH,cAAc,KAAK,MAAM,OAAO;YAChC,cAAc,KAAK,MAAM,OAAO;;;AAKpC,iBAAO,KAAK;;AAEhB,eAAO;;MAEX;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW,GAAG;AAC7B,uBAAe,WAAW,GAAG;AAC7B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,GAAG;AACjB,4BAAoB,OAAO,OAAO;AAClC,YAAI,YAAY,WAAW;AACvB,sBAAY,KAAK;;AAErB,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe;UACX,MAAQ,KAAK;UACb,WAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;ACl9B5B;;;;;;;;;gCAgBmC;MAC/B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,SAAS,KAAK;;MAEvB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,QAAQ,KAAK;AAC9B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,yBAAe,MAAM,eAAe,OAAM,OAAO,GAAG,KAAK,QAAQ,IAAI;AACrE,yBAAe,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;AACzE,iBAAO;;;;AAKnB,kBAAc,YAAY;AAC1B,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,2BAAe;AACX,6BAAe,KAAK,KAAK,KAAK,OAAQ,KAAI,KAAK;AAC/C,qBAAO,OAAM,IAAI,eAAe,OAAM,OAAO,GAAG;;AAEpD,mBAAO,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;;AAErE,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;+BA8BM;MAC9B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,aAAa,KAAK;;MAE3B;AACI,eAAO,KAAK,cAAc,oBAAoB,QAAQ;;MAE1D;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,+BAAmB,KAAK,eAAe;AACvC,kCAAsB;AAClB,6BAAc,oBAAoB;AAClC,4BAAc;AACd,6BAAc;AACd,6BAAe,CAAC,QAAQ;AACxB,4BAAc,aAAa,cAAc,aAAa,KAAK;AAC3D,wBAAU,OAAO,SAAS;AAE1B,wBAAY,MAAI,KAAK,QAAS,KAAI,KAAK,OAAO,UAAU,OAAO;AAC/D,wBAAU,CAAC,IAAI,SAAS,KAAK;AAE7B,wBAAU,OAAM,IAAI,SAAS,IAAI,QAAQ,IAAI,IAAI,IAAI;AACrD,qBAAO,EAAE,IAAI,GAAG,IAAI;;AAExB,mBAAO,aAAe,eAAe,MAAM,oBAAoB,SAAS,OAAO,eAAe;;AAElG,iBAAO;;;;AAKnB,iBAAa,YAAY;AACzB,kBAA4B;ACvJ5B;;;;;;;;;AAoCO,6EAAsE;AACzE;AACA,UAAI,EAAE,SAAS;AACX,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAEhB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAChB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;;AAGtD,cAAM,IAAI,oBAAoB,2DAA2D,EAAE;;AAG/F,aAAO;;AAmBX,uFAAkF;AAC9E,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,uBAAe,mBAAmB,GAAG,OAAM,WAAU,MAAM,OAAO;AAClE,eAAO,CAAC,QAAQ,OAAM;;;AAoB9B,yFAAoF;AAChF,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,4BAAoB;AACpB,2BAAmB,QAAiB,GAAG,EAAE;AACrC,cAAI,cAAc,QAAQ,UAAU;AAChC,wBAAY,KAAK;;AAGjB,wBAAY,KAAK,EAAE,MAAM;;;AAGjC,8BAAsB,MAAK,QAAQ;AACnC,kCAA0B,UAAS,QAAQ;AAC3C,+BAAuB,SAAS,OAAO,OAAO,MAAM,QAAQ;AAC5D,8BAAsB,QAAQ,OAAO,OAAO,KAAK,QAAQ;AACzD,uBAAe,mBAAmB,GAAG,eAAe,mBAAmB,eAAe,gBAAgB;AACtG,eAAO,CAAC,QAAQ,OAAM;;;AAcvB,gFAA2E;AAC9E,UAAI,aAAiB,cAAc,QAAQ,QAAQ,QAAiB,GAAG,EAAE,OAAO;AAC5E,eAAO,gCAAgC,GAAG,OAAO,MAAM,eAAe;;AAGtE,eAAO,kCAAkC,GAAG,OAAO,MAAM,eAAe;;;qCAGxC;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,aAAK,WAAW,KAAK,YAAY,OAAO,OAAO,KAAK;AACpD,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,wBACD,eAAe,KAAK,yBAAyB;AACjD,aAAK,4BACD,eAAe,KAAK,6BAA6B;AACrD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,cAAc,KAAK;AAC1C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;;MAEhD;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO,WAAW;AAClE,oBAAY,WAAW;AACvB,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,QAAQ,mGAEtB,KAAK,UAAU;;AAE1B,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,WAAW,QAAQ,MAAM,EAAG,OAAO;AAC9D,sBAAc,CAAC;AACf,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,OAAO,MAAM,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;;AAE/G,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,OAAO,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE3G,aAAK,aAAa,KAAK,UAAU,eAAe,OAAO,MAAM,KAAK,uBAAuB,MAAM;AAC/F,aAAK,iBAAiB,KAAK,UAAU,mBAAmB,OAAO,MAAM,KAAK,2BAA2B,MAAM;AAC3G,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,uBAAa,WAAW;AACxB,gCAAsB,QAAiB,GAAG;AAC1C,uBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO;AACvD,wBAAc,OAAO,MAAM;AAC3B,iCAAuB,aAA2B,GAAG;AACrD,yBAAe,QAAQ,WAAW;AAClC,sCAA4B,cAAc;AAC1C,8BAAoB;AACpB,oCAA0B,CAAC,aAAiB,qBAAqB,QAAiB,GAAG,MAAM,MAAM,GAAG,OAAO;AAC3G,qCAA2B;AACvB,gBAAI;AACA,0CAA4B,KAAK,WAAW,OAAO,QAAQ;AAC3D,8CAAgC,KAAK,eAAe,OAAO,QAAQ;AACnE,oCAAsB,KAAK,SAAS,KAAK,KAAK,OAAO,QAAQ,kBAAkB;AAC/E,qCAAuB,KAAK,QAAQ,KAAK,MAAM,OAAO,QAAQ,kBAAkB;AAChF,qBAAO,mBAAmB,QAAO,qBAAqB,yBAAyB,eAAe,gBAAgB,KAAK;;AAGnH,qBAAO,mBAAmB,QAAO,KAAK,WAAW,QAAQ,KAAK,eAAe,QAAQ,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK,MAAM,QAAQ,KAAK;;;AAG9L,cAAI,CAAC;AACD,mBAAO;;AAEX,qDAAyC,yBAAyB,QAAO,KAAK,MAAM,QAAQ,KAAK,KAAK,QAAQ,eAAe,KAAK;AAClI,kCAAwB;AACpB,iBAAS;AACL,4BAAc,IAAI;AAClB,gCAAkB,UAAS;AAC3B,kCAAoB,UAAU,IAAI,OAAO,IAAI;AAC7C,wBAAS,MAAM,UAAU,IAAI;;;AASrC,8CAAoC;AAChC,4BAAgB,KAAK,YAAY,OAAM,KAAK;AAC5C,4BAAgB,KAAK,gBAAgB,WAAU,KAAK;;AAExD;AACA,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,uBAAuB,qBAAqB,KAAK;UACjD,2BAA2B,qBAAqB,KAAK;UACrD,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,gBAAgB,oBAAoB,KAAK;UACzC,iBAAiB,oBAAoB,KAAK;;AAE9C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;qCACY;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,YAAI,OAAO,KAAK,SAAS;AACrB,cAAI,CAAC,OAAO,UAAU,KAAK;AACvB,kBAAM,IAAI,MAAM,gDAAgD,KAAK;;mBAGpE,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,CAAC,OAAO,UAAU;AAClB,oBAAM,IAAI,MAAM,0DACI,KAAK,UAAU,KAAK;;;;AAKhD,gBAAM,IAAI,MAAM,wEACI,KAAK,UAAU,KAAK;;AAE5C,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,sBAAc,WAAW;AAEzB,YAAI,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,CAAC,KAAK;;AAEtB,qBAAa,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE;AACpC,cAAI,KAAK,KAAK,KAAK;AACf,iBAAK,KAAK,MAAM;;;AAIxB,2BAAmB,KAAK;AACpB,cAAI,OAAO,KAAK,QAAQ;AACpB,kBAAM,IAAI,MAAM,iBAAiB;;;AAGzC,YAAI,KAAK,KAAK,WAAW,SAAqB,KAAK,MAAM;AACrD,gBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAErD,2BAAmB,KAAK,KAAK,IAAI,UAAQ,WAAW;AACpD,0BAAkB;AAClB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB;;AAG1G,eAAK,QAAQ;;AAEjB,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,YAAY,WAAW,KAAK,iBAAiB,KAAK,iBAAiB;;AAGtG,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,uBAAc,oBAAoB;AAClC,2BAAmB,OAAM;AACzB,sBAAc,WAAW;AACzB,eAAO,KAAK;AACR,2BAAiB;AACjB,eAAM,aAAM,uBAAa,QAAQ,QAAO,KAAK,MAAM;AACnD,iCAAuB,aAA2B,GAAG;AACrD,4BAAkB,KAAK;AACnB,2BAAe,OAAO,WAAW;;AAErC,4BAAkB;AACd,gBAAI,KAAK,QAAQ,EAAE,MAAM,WAAW,SAChC,KAAK,SAAS,CAAC,QAAQ;AACvB,qBAAO,EAAE,QAAQ;;AAGjB,qBAAO;;;AAGf,uBAAY,UAAU,KAAK,MAAM;AACjC,uBAAa,UAAU,KAAK,KAAK;AAOjC,gCAAsB;AACtB,oCAA0B;AAC1B,uBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,gBAAI,KAAK,KAAK,QAAQ,OAAO;AACzB,4BAAc,KAAK,WAAW;AAC9B,gCAAkB,KAAK;;AAGvB,4BAAc,KAAK;AACnB,gCAAkB,KAAK,WAAW;;;AAG1C,kBAAO,MAAK,KAAK;AACjB,sBAAW,UAAS,KAAK;AACzB,mBAAQ,OAAM,KAAK;AACnB,mBAAS,OAAO,KAAK;AACrB,iBAAO,mBAAmB,QAAO,OAAM,WAAU,QAAQ,QAAO,KAAK;;;MAG7E;AACI,wBAAe;UACX,MAAM,KAAK;UACX,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;;AAEhD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;AC1Y5B;;;;;;;;;AA4BO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,WAAW,+FACe,QAAQ;;AAEhD,wBAAgB,CAAC,CAAC,GAAG,IAAI,SAAS,CAAC,GAAG;AACtC,eAAO,KAAQ,GAAG;;;AAanB;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE3B,YAAI,QAAQ,WAAW,KAAK,QAAQ,GAAG,WAAW,KAC9C,QAAQ,GAAG,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,eAAe,kBAAkB,eAAe;AAChD,gBAAM,IAAI,WAAW,wBAAwB;;AAGjD;AACA,YAAI,eAAe;AACf,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ;;AAG/C,oBAAU,CAAC,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ,IAAI,CAAC,GAAG;;AAEnD,eAAO,KAAQ,GAAG;;;gCAGS;MAC/B;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,oBAAoB,KAAK;AAGvD,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;mBAEvB,OAAO,KAAK,YAAY;AAC7B,eAAK,UACD,CAAC,CAAC,KAAK,SAAS,KAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAGvD,eAAK,UAAU,KAAK;AACpB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,+EACI,KAAK,QAAQ;;AAE1C;AACA;AACA,cAAI,OAAO,KAAK,QAAQ,OAAO;AAC3B,4BAAgB,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;AAC/C,2BAAe,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;;AAG9C,iBAAK,UAAU,KAAK;AACpB,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,sFACQ,KAAK,QAAQ,GAAG;;AAEjD,4BAAgB,KAAK,QAAQ;AAC7B,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,qFACQ,KAAK,QAAQ,GAAG;;AAEjD,2BAAe,KAAK,QAAQ;;AAEhC,eAAK,UAAU,CAAC,eAAe;;AAEnC,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC;AACA;AACA,YAAI,KAAK,eAAe;AACpB,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK,MAAM,iBAAiB,oBAAoB,SAAS,KAAK,SAAS,KAAK;;MAEvF;AACI,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,kBAAc,YAAY;AAC1B,kBAA4B;ACtL5B;;;;;;;;;AAkCO;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAIf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AAEb,cAAI,SAAY,GAAG,UAAU,SAAS;;AAKtC,cAAI,SAEJ,GAAG,UAAU,SAAS;;AAE1B,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG,GAAG;;AAErB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAGf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AACb,cAAI,UAAc,GAAG,UAAU,SAAS;;AAGxC,cAAI,UAAc,GAAG,UAAU,SAAS;;AAE5C,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;4BAMgB;MAO3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW;;AAEpB,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WAAW,CAAC,KAAK;mBAEjB,MAAM,QAAQ,KAAK,aACxB,KAAK,SAAS,WAAW,KACzB,OAAO,KAAK,SAAS,OAAO;AAC5B,eAAK,WAAW,KAAK;;AAGrB,gBAAM,IAAI,WAAW,qGAEd,KAAK,UAAU,KAAK;;AAE/B,8BAAsB,KAAK,UAAU;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAGpB,cAAI,OAAO,KAAK,YAAY;AACxB,iBAAK,UAAU,CAAC,KAAK;qBAEhB,MAAM,QAAQ,KAAK,YACxB,KAAK,QAAQ,WAAW,KACxB,OAAO,KAAK,QAAQ,OAAO;AAC3B,iBAAK,UAAU,KAAK;;AAGpB,kBAAM,IAAI,WAAW,oGAEd,KAAK,UAAU,KAAK;;;AAGnC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,uBAAe,iBAAiB,WAAW,IAAI,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC5F,eAAO,CAAC,WAAW,IAAI,QAAQ,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,mBAAS,aAAa,oBAAoB,SAAS;AACnD,yBAAe,KAAK,gBAAgB,oBAAoB,SAAS,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,QAAQ,IAAI,IAAI,KAAK,SAAS;AAE5H,iBAAO,QAAY,QAAQ,CAAC;;;MAGpC;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;;AAElB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG;;AAExB,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK;AACzB,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAEvC,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG,GAAG;;AAE3B,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK,UAAU,KAAK;AACxC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK,SAAS,KAAK;;AAErD,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,iBAAS,iBAAiB,QAAQ,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC/E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ,MAAM;;AAGpD,iBAAO,CAAC,WAAW,IAAI,QAAQ,MAAM,MAAM,WAAW;;;MAG9D;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,CAAC,WAAW,IAAI,WAAW;;MAEtC;AACI,cAAM,IAAI;;;yCAG0B;MACxC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAS,QAAO;;;;AAKnC,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAQ,QAAO;;;;AAKlC,uBAAmB,YAAY;AAC/B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa;AACb,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW;;AAGlC,iBAAO,CAAC,WAAW,IAAI,WAAW;;;MAG1C;AACI,cAAM,IAAI;;MAEd;AACI,wBAAe,CAAE,YAAY,KAAK;AAClC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;yCAG6B;MACxC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAS,QAAO,CAAC,GAAG;;AAG3B,mBAAO,KAAS,QAAO,CAAC,GAAG;;;;;AAM3C,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAQ,QAAO,CAAC,GAAG;;AAG1B,mBAAO,KAAQ,QAAO,CAAC,GAAG;;;;;AAM1C,uBAAmB,YAAY;AAC/B,kBAA4B;ACpgB5B;;;;;;;;;0BA8B6B;MACzB;AAQI,cAAM;AACN,aAAK,QAAQ,KAAK;;MAEtB;AACI,aAAK,QAAQ;;UAGb;AAIA,YAAI,KAAK,SAAS;AACd,iBAAO,KAAK,MAAM;;AAGlB,iBAAO;;;UAGX;AAIA,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,YAAY;;;UAG3B;AACA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;UAGlB;AAEA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,KAAK,MAAM;;MAEtB;AACI,aAAK,MAAM,WAAW;;MAE1B;AACI,wBAAe;UACX,OAAS;YACL,WAAa,KAAK,MAAM;YACxB,QAAU,KAAK,MAAM;;;AAG7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,6BAA6B;;;aAIzC,yCAAwC;AAC3C,4BAAoB,QAAO;AAC3B,sBAAc,YAAY,aAAa;AACvC,eAAO,QAAO;AACd,0BAAkB,CAAE;AACpB,eAAO,OAAO,WAAW;AACzB,eAAO,IAAI,IAAI;;;kCAGc;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,gFACF,KAAK,UAAU;;AAEtC,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,YAAI,CAAC,KAAK,MAAM;AACZ,eAAK,MAAM,MAAM;AACjB,eAAK,MAAM,QAAQ;;AAEvB,cAAM,MAAM;;MAEhB;AACI,qBAAa,mBAAmB;AAChC,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,iCAAyB,KAAK,MAAM,mBAAmB;AACvD,0BAAkB,WAAW;AAC7B,eAAO,CAAC,iBAAiB,IAAI,WAAW,OAAO,iBAAiB,MAAM;;MAE1E;AACI,eAAO,KAAK;AAER,mBAAS,oBAAoB;AAI7B,wBAAa;AAKT,2BAAe,oBAAoB,KAAK,MAAM,KAAK,SAAQ;AAC3D,mBAAO,CAAC,QAAQ;;AAEpB,6BAAmB,IAAI,OAAM,QAAQ,IAAI,OAAyB,MAAiB,MAAsB,OAAoB;AAC7H,oBAAU,WAAW;AAGrB,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,gCAAwC,iCAAiC,0BAA0B;;AAEvG,6CAAyC;gCACN;MAC/B;AACI,cAAM;AASN,4BAAoB,KAAK,MAAM;AAC/B,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,eAAe,YAAY;AAChC,oBAAY,iBACR,YAAY,mBAAmB,OAAO,QAAQ;AAClD,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,gBAAgB,YAAY;AACjC,aAAK,aAAa,OAAO,aAAa,KAAK,aAAa;AACxD,aAAK,cAAc,OAAO,cAAc,KAAK,cAAc;AAC3D,aAAK,YAAY,KAAK,cAAc,SAChC,mCACA,KAAK;AACT,oCAA4B,KAAK;AACjC,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,kBAAkB,KAAK,MAAM;AAClC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,eAAe;;UAEpB;AACA,eAAO,KAAK;;UAEZ;AAIA,aAAK,aAAa;AAClB,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,YAAY;;AAElC,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,YAAY;;;MAGvC;AACI,eAAO,KAAK,aAAa,aAAa,OAAO,KAAK,cAAc;;MAEpE;AACI,2BAAmB,QAAQ;AAC3B,+BAAuB,KAAK,MAAM,aAAa;AAC/C,aAAK,aAAa,WAAW,QAAQ,MAAM,GAAG;AAC9C,aAAK,cAAc,WAAW,QAAQ,MAAM;;MAEhD;AACI,0BAAkB,KAAK,aAAa,mBAAmB;AACvD,YAAI,CAAE,OAAM,QAAQ,gBAAgB,MAAM,QAAQ,YAAY;AAC1D,wBAAc,CAAC;;AAEnB,sBAAc;AACd;AACA;AACA;AACA,YAAI,KAAK;AACL,uBAAa,YAAY,MAAM;AAC/B,wBAAc,YAAY;;AAG1B,wBAAc,YAAY;;AAE9B,sBAAc;AACd,YAAI,KAAK,cAAc;AACnB,sBAAY,YAAY,SAAS,MAAM;AACvC,yBAAe,CAAC;mBAEX,KAAK,aAAa;AACvB,yBAAe,CAAC,aAAa,YAAY;;AAGzC,yBAAe,CAAC;;AAEpB,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,mBAAO,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE7D,iBAAO,CAAC,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE9D,eAAO,iBAA+B;;MAE1C;AACI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AACzB,YAAI,MAAM,QAAQ;AACd,yBAAe,OAAO,MAAM;AAC5B,mBAAS,OAAO;;AAEpB,YAAK,iBAAgB,QAAQ,aAAa,WAAW,MACjD,aAAa;AACb,iBAAO,MAAM,MAAM,QAAQ;;AAE/B,iCAAyB;AACzB,gCAAwB;AACxB,YAAI,gBAAgB;AAChB,4BAAkB,aAAa;AAC/B,cAAI,YAAY,IAAI;AAChB,kBAAM,IAAI,WAAW;;AAIzB,iBAAO,kBAAkB;AACzB,2BAAiB,KAAK,GAAG;AACzB,6BAAmB,aACd,IAAI,WAAS,IAAI,UAAU,CAAE,OAAO,MAAM;AAC/C,eAAK,aAAa,YAAY,WAAW,MAAM,GAAG,YAAY;AAC9D,eAAK,cAAc,YAAY,WAAW,MAAM,YAAY;AAC5D,0BAAgB,KAAK,GAAG;;AAE5B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAGlC,iCAAyB,iBAAiB,cAAc;AACxD,8BAAqB;AACjB,cAAI,mBAAkB,mBAAmB;AACrC,kBAAM,IAAI,WAAW;;;AAI7B,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAU5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAGnC;AACI,eAAO,KAAK;AACR,+BAAqB,OAAO;AAC5B;AACA;AACA,cAAI,gBAAgB;AAChB,gBAAI,KAAK,aAAa,KAAK,QAAQ;AACnC,mBAAO,KAAK,cAAc,KAAK,QAAQ;;AAGvC,iCAAqB,aAAa,MAAM,GAAG,aAAa,SAAS;AACjE,kCAAsB,aAAa,MAAM,aAAa,SAAS;AAC/D,gBAAI,KAAK,aAAa,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;AACzE,mBAAO,KAAK,cAAc,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;;AAEjF;AACA,cAAI,KAAK;AACL,gBAAI,MAAM,QAAQ;AACd,uBAAS,EAAE,MAAM,GAAG,OAAO,KAAK,MAAM;;;AAI1C,gBAAI,EAAE;AACN,mBAAO,KAAK;;AAEhB,cAAI,KAAK;AACL,mBAAO,SAAY,MAAM;;AAE7B;AACA,cAAI,KAAK,cAAc;AACnB,qBAAS,YAAc,CAAC,GAAG;qBAEtB,KAAK,cAAc;AACxB,qBAAS,MAAQ,GAAG;qBAEf,KAAK,cAAc;AACxB,qBAAS,KAAQ,KAAI,MAAQ,GAAG;qBAE3B,KAAK,cAAc;AACxB,qBAAS,KAAQ,GAAG;qBAEf,KAAK,aAAa;AACvB,qBAAS,CAAC,GAAG;;AAGjB,cAAI,KAAK;AACL,gBAAI,KAAK,aAAa;AAClB,qBAAO,OAAO,OAAO;;AAEzB,mBAAO,CAAC,QAAQ,OAAO;;AAE3B,iBAAO;;;MAGf;AACI,aAAK,aAAa;AAClB,aAAK,cAAc;;MAEvB;AACI,kBAAU,KAAK,aAAa,MAAM;AAC9B,eAAK,aAAa,MAAM;;AAE5B,kBAAU,KAAK,cAAc,MAAM;AAC/B,eAAK,cAAc,MAAM;;AAE7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,MAAM,QAAQ;AACd,iBAAO,KAAK;;AAEhB;AACA,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAIjB,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAGrB,YAAI,KAAK;AACL,yBAAe,KAAK,aAAa;AACjC,4BAAkB,OAAO,IAAI,WAAS;AACtC,cAAI,MAAM,QAAQ;AACd,mBAAO,WAAW,OAAO,WAAW,OAAO;;AAG3C,mBAAO,CAAC,YAAY,OAAO,WAAW,OAAO;;;AAIjD,iBAAO;;;UAGX;AACA,eAAO,KAAK,aAAa,iBAAiB,OAAO,KAAK,cAAc;;UAEpE;AACA,eAAO,KAAK,aAAa,oBAAoB,OAAO,KAAK,cAAc;;MAG3E;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,6BAA6B;;AAEnD,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,6BAA6B;;;MAGxD;AACI,wBAAe;UACX,WAAa,KAAK;;AAGtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAGJ;AACH,yBAAiB,YAAY,QAAO;AACpC,eAAO,QAAO;AAEd,YAAI,QAAO,mBAAmB;AAC1B,gBAAM,IAAI,oBAAoB;;AAIlC,0BAAkB;AAClB,kBAAU,WAAW;AACrB,eAAO,IAAI,IAAI;;;AAIvB,kBAAc,YAAY;AAC1B,kBAA4B;ACle5B;;;;;;;;;AA+DO;AACH,aAAO,IAAI,WAAW;;AA2BnB;AACH,aAAO,IAAI,IAAI;;AAmBZ;AACH,aAAO,IAAI,KAAK;;AAsBb;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAM;;AAkBd;AACH,aAAO,IAAI,UAAQ;;AA0BhB;AACH,aAAO,IAAI,gBAAgB;;AAuBxB;AACH,aAAO,IAAI,OAAO;;AAoBf;AACH,aAAO,IAAI,SAAO;;AAqCf;AACH,aAAO,IAAI,gBAAgB;;AAoBxB;AACH,aAAO,IAAI,SAAO;;AA+Bf;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,WAAW;;AA0BnB;AACH,aAAO,IAAI,aAAa;;AAarB;AACH,aAAO,IAAI,gBAAgB;;AAkCxB;AACH,aAAO,IAAI,aAAW;;AAmCnB;AACH,aAAO,IAAI,MAAM;;AAYd;AACH,aAAO,IAAI,QAAQ;;AAkChB;AACH,aAAO,IAAI,iBAAiB;;AAqBzB;AACH,aAAO,IAAI,QAAQ;;AAgBhB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,UAAQ;;AA8BhB;AACH,aAAO,IAAI,QAAQ;;AAahB;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAI;;AAoBZ;AACH,aAAO,IAAI,QAAQ;;AAsBhB;AACH,aAAO,IAAI,YAAY;;AAoBpB;AACH,aAAO,IAAI,UAAQ;;AAoBhB;AACH,aAAO,IAAI,UAAQ;;AAqBhB;AACH,aAAO,IAAI,WAAS;;AAwBjB;AACH,aAAO,IAAI,IAAI;;AAwBZ;AACH,aAAO,IAAI,mBAAmB;;AAsB3B;AACH,aAAO,IAAI,mBAAmB;;AAyB3B;AACH,aAAO,IAAI,cAAc;;AActB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAyBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAuBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAWrB;AACH,aAAO,IAAI,uBAAuB;;AAgB/B;AACH,aAAO,IAAI,uBAAuB;;AAW/B;AACH,aAAO,IAAI,mBAAmB;;AAgB3B;AACH,aAAO,IAAI,mBAAmB;;AAW3B;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AA0BrB;AACH,aAAO,IAAI,IAAI;;AA+CZ;AACH,aAAO,IAAI,QAAQ;;AAyBhB;AACH,aAAO,IAAI,KAAK;;AA+Cb;AACH,aAAO,IAAI,SAAS;;AA0BjB;AACH,aAAO,IAAI,UAAU;;AA+ClB;AACH,aAAO,IAAI,cAAc;;AA6BtB;AACH,aAAO,IAAI,WAAW;;AAmCnB;AACH,aAAO,IAAI,eAAe;;AA8DvB;AACH,aAAO,IAAI,IAAI;;AASZ;AACH,aAAO,IAAI,gBAAgB;;AAIxB;AACH,aAAO,IAAI,cAAc;;AAgDtB;AACH,aAAO,IAAI,gBAAgB;;AAGxB,4BAAwB;AACxB,4BAAwB;AACxB,sBAAkB;AAClB,sBAAkB;AAyBlB;AACH,aAAO,IAAI,cAAc;;AA0BtB;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACpjDhB;AACH,aAAO,eAAuB,OAAO;;AAmBlC;AACH,aAAO,qBAA2B,OAAO;;AAqBtC;AACH,aAAO,0BAAkC,OAAO;;AAoB7C;AACH,aAAO,oBAA4B,OAAO;;AAavC;AACH,aAAO,0BAAgC,OAAO;;AAqC3C;AACH,aAAO,UAAkB,OAAO;;AAqC7B;AACH,aAAO,OAAe,OAAO;;AAuB1B;AACH,aAAO,gBAAuB,OAAO;;AAsBlC;AACH,aAAO,kBAAyB,OAAO;;AAoBpC;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAoB9C;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;;;;;;;;;;;;;;;;;;;AC9R1C;;;;;;;;;;;;;ACAA;;;;;;;;;AAoBO;AACH,aAAO,IAAI,KAAK;;AAWb;AACH,aAAO,GAAgB;;AAWpB;AACH,aAAO,GAAgB;;;;;;;;AC7C3B;;;;;;;;;2BAc8B;MAC1B;AACI,cAAM,GAAG;AAET,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,QAAQ;;;AAGrB;AACI,aAAO,UAAU;;AAErB;AACI,aAAO,UAAU;;gCAMc;MAC/B;AACI;AACA,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,WAAW,KAAK,IAAI,KAAK,YAAY;AAC1C,aAAK,WAAW,KAAK,YAAY;AACjC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,OAAO,KAAK,QAAQ;AACzB,aAAK,WAAW,KAAK;AACrB,YAAI,CAAC,QAAQ,OAAO,OAAO,QAAQ,KAAK,UAAU;AAC9C,kBAAQ,KAAK,uBAAuB,KAAK;AAEzC,eAAK,OAAO;;AAEhB,YAAI,KAAK,SAAS;AACd,eAAK,cAAc;mBAEd,KAAK,SAAS;AACnB,eAAK,cAAc;;AAInB,cAAI,KAAK,QAAQ,QAAQ,WAAW;AAChC,iBAAK,cAAc;;AAGnB,iBAAK,cAAc;;;AAG3B,YAAI,KAAK,gBAAgB;AACrB,eAAK,YAAY;;;YAGnB;AACF,aAAK,OAAO;AACZ,aAAK,eAAe;AACpB,YAAI,KAAK,YAAY;AACjB,eAAK,OAAO,KAAK;;AAGjB,eAAK,OAAO,KAAK,gBAAgB,SAAO,WAAW;;;YAGrD;AACF,cAAM,qBAAqB;AAC3B,wBAAgB,KAAK,gBAAgB;AACrC,YAAI,WAAW;AACX;;AAEJ,YAAI,KAAK,YAAY,UAAU,KAAK,UAAU,KAAK;AAC/C,eAAK,OAAO;AACZ,eAAK,OAAO;;AAIZ,eAAK;AACL,cAAI,KAAK,QAAQ,KAAK;AAClB,iBAAK,eAAe;AACpB,iBAAK,MAAM,eAAe;;;;YAKhC;AACF,YAAI,KAAK,eAAe,KAAK,KAAK;AAC9B,kBAAQ,IAAI,SAAS,KAAK;;;MAGlC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,6BAAqB,KAAK,KAAK;AAC/B,YAAI,gBAAgB;AAChB,kBAAQ,KAAK,4BAA4B,KAAK,oDAChB,OAAO,KAAK;;AAE9C,eAAO;;;AA8CR;AACH,aAAO,IAAI,cAAc;;AAEjB,sBAAa,CAAE;ACzK3B;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAkBO;AACP,IAAC;AACG,gBAAS,UAAS,gBAAgB,KAAK;AACvC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,KAAK;AACpC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,kBAAkB,KAAK;AACzC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,MAAM;AACrC,gBAAS,UAAS,cAAc,MAAM;AACtC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,iBAAiB,MAAM;AACzC,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,sBAAsB,OAAO;AAC/C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,qBAAqB,OAAO;OAC/C,YAAa,YAAW;AACpB;AACP,IAAC;AAEG;AACA,MAAC;AACG,iCAAwB,yBAAwB,YAAY,KAAK;AACjE,iCAAwB,yBAAwB,QAAQ,KAAK;AAC7D,iCAAwB,yBAAwB,QAAQ,KAAK;SAC9D,0BAA0B,UAAS,2BAA4B,WAAS,0BAA0B;OACtG,YAAa,YAAW;AC3D3B;;;;;;;;;;;;;;;;AAgBA,uBAAmB;AA0BZ;AACH,uBAAiB;QACb,UAAU;QACV,UAAU;QACV,QAAQ;QACR,OAAO;QACP,gBAAgB;;AAEpB,iBAAW,QAAQ;;AAShB;AACH,aAAO,WAAW;;AASf;AACH,aAAO,WAAW;;ACtEtB;;;;;;;;;;;;;;;;AAiBO;AACH,yBAAmB,KAAK,YAAY;AACpC,UAAI,cAAc,WAAW,oBAAoB;AAC7C,sBAAc,WAAW;AACzB,oBAAY,WAAW,kBAAkB,IACrC,SACC,WAAW,kBAAkB,SAAY,QAAQ,IAC9C,WAAW;AACnB,YAAI,WAAW,SAAS;AACpB,iBAAO,UAAU,KAAK,WAAW,WAAW,kBAAkB,WAAW,SAAS;;AAEtF,YAAI,WAAW,SAAS;AACpB,yBAAe,KAAK,WAAW,MAAM,OAAO;AAC5C,iBAAO,OAAO,IAAI,UAAQ,UAAU,MAAM,WAAW,SAAS;;AAElE,wBAAe,UAAU,KAAK,WAAW,MAAM,OAAO,IAAI,WAAW,SAAS;AAC9E,sBAAa,QAAO;AACpB,eAAO,WAAW,SAAS,WACvB,MAAK,KACL,eAAmB,QAAO,OAAO;;AAEzC,wBAAkB,KAAK,WAAW;AAClC,aAAO,aAAa,UAAU;;AAS3B;AACH,iCAA0B,cAAc;AACxC,UAAI,mBAAmB;AACnB,wBAAe,gBAAgB,yBAAyB;AACxD,YAAI,WAAU;AACV,iBAAO;;;AAGf,wBAAkB,QAAQ,kBAAkB,KAAK;AAC7C,eAAO,CAAC,CAAC,WAAW,yBAAyB,UAAU;;AAE3D,aAAO,cAAc,SACjB,WAAW,yBAAyB,UAAU,YAAY,UAC1D;;AAOD;AACH,aAAO,WAAW,yBAAyB,MAAM,QAAQ;;AAQtD;AACH,iCAA0B,cAAc;AACxC,aAAO;QACH,yBAAyB,UAAU,WAAW,QAAQ;QACtD;;;AAGR;AACI,aAAO,CAAC,CAAC,YAAY,GAAG,QAAQ,cAAc;;AAE3C;AACH,oBAAc,KAAK,MAAM;AACzB,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM;;AAElB,uBAAiB,MAAM;AACvB,aAAO,CAAC,UAAU,OAAO,MAAM,MAAM,SAAS;;AAE3C;AACH,kBAAY;AACZ,mBAAa,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,YAAI,KAAK,IAAI,MAAM,GAAG,IAAI;;AAE9B,aAAO;;AAEJ;AACH,iBAAU,cAAc,OAAO,MAAM,WAAW;AAChD,UAAI,SAAQ;AAER,eAAM,cAAc,oBAAoB,MAAM,WAAW;AACzD,gCAAwB,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG;AACrD,qBAAa,GAAG,IAAI,GAAG;AACnB,0BAAgB,GAAG,KAAK,KAAI,IAAI;AAChC,0BAAgB,GAAG,KAAK,KAAI,IAAI,IAAI;;AAExC,eAAO;;AAEX,aAAO;;AAWJ;AACH,aAAO,QAAO,OAAO,UAAS,MAAM;;AC9HxC;;;;;;;;;;;;;;;;AAgBO,iBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;;;;;;AClL/E;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;UAC9D,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC5dzB,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;MAErD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;UAC3D,CAAE,QAAU,oBAAoB,MAAQ,kBAAkB,MAAQ;UAClE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,qBAAqB,MAAQ,QAAQ,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAAW;YACrD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;;MAGhD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;;;MAGnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;;;;;;ACvUzE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UAAc;YAC7D,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAChD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,iBAAiB,MAAQ;UAAU;YACpE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,SAAS,MAAQ,aAAa,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;;;;;;;;ACnV1D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;;QAEzC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ,UAAU,cAAgB;UACnE,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ,UAAU,cAAgB;;QAExE,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;QAEpE,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;MAE3D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAAY;YACpD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;;QAEhD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;UAC1C,CAAE,QAAU,gBAAgB,MAAQ,gBAAgB,MAAQ;;;;;;;;AC1KxE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;;MAGxD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;;;MAIpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;UAChD,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;;QAE/C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;;;;;;ACtClD;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD,CAAE,UAAY,SAAS,UAAY;MAAW;QAC1C,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAAY;YAC1D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B,CAAE,UAAY,QAAQ,UAAY,SAAS,QAAU;MAAM;QACvD,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;UAC1C,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;;;;;;;;AC1G/C,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAAY;YACxD,QAAU;YACV,MAAQ;YACR,MAAQ;;;;;;;;;ACtDxB;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC3IhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UAAY;YAC3D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;ACrIhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UACtE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;AChJhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;UACtD,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;;;;;;;;ACzG9D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,IAAI,MAAQ,WAAW,MAAQ;UACpD,CAAE,OAAS,IAAI,MAAQ,QAAQ,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;UACnD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,QAAQ,cAAgB;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;UACrC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;QAEvD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAG9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAChE,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,mBAAmB,MAAQ;UACjD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;;MAGjD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;ACvNhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/ChC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;;MAGrD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;MAE1D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UAAc;YACnD,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,kBAAoB;UACpB,MAAQ;UACR,MAAQ;;;MAGpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;;MAGlD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;;;;;;;ACtIjB;;;;;;;;;;;;;;;;;iBAuCe;AACP,eAAO,KAAK,aAAc,MAAK,YAAY,IAAI;;MAGnD;AACI,oBAAY;UACR;UAAY;UAAW;UAAS;UAAa;UAAU;UACvD;UAAY;UAAS;UAAO;UAAO;UAAU;UAAe;UAC5D;UAAW;UAAU;UAAgB;;AAEzC,4BAAoB,GAAG,OAAO,GAAG,IAAI,IAAI,SAAM,IAAG;AAClD,aAAK,YAAY,YAAY,OAAO;AAChC,cAAI,OAAO,YAAY;AACvB,iBAAO;WACR;;MAIP,mCAAkC;AAC9B,wBAAgB,OAAM;AACtB,6BAAqB;AACrB,wBAAgB;AAChB,0BAAkB;AAClB,sBAAc,QAAQ,OAAO;AACzB,cAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,cAAI,KAAK,GAAG,WAAW;AACnB,yBAAa,KAAK,IAAI,KAAK;qBAEtB,KAAK,OAAO;AACjB,oBAAQ,KAAK,IAAI,KAAK;qBAEjB,KAAK,SAAS,QAAQ,KAAK,MAAM,WAAW;AACjD,sBAAU,KAAK,IAAI,KAAK;;AAE5B,iBAAO;WACR;AACH,qBAAa;AACb,wBAAgB;AAChB,iCAAyB;AACzB,kCAA0B;AAC1B,YAAI,aAAa;AACb,+BAAqB,KAAK,oBAAoB,UAAU;AACxD,gCAAsB,KAAK,oBAAoB,UAAU;;AAE7D,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAKtC,YAAI,OAAO,KAAK,qBAAqB,WAAW;AAC5C,mBAAS,QAAQ;AACb,yBAAa,MAAM;AACnB,gBAAI,KAAK,SAAS,WAAW;AACzB,sBAAQ,KAAK;;;;AAKrB,iBAAO,KAAK,qBAAqB,QAAQ;AACrC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI,QAAQ;AACR,mBAAK,eAAe,oBAAoB;AACxC,sBAAQ,KAAK;;;;AAIzB,YAAI,OAAO,KAAK,oBAAoB,SAAS;AACzC,iBAAO,KAAK,oBAAoB,QAAQ;AACpC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI;AACA,mBAAK,eAAe,mBAAmB;AACvC,qBAAO,KAAK;;;;AAKpB,mBAAS;;AAEb,wBAAgB;AAChB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,YAAY;AACnD,sBAAY,OAAM,QAAQ,SAAS,OAAO;AACtC,uBAAU,MAAK,UAAU,QAAQ,KAAK,YAAY;AAClD,mBAAO;aACR;;AAEP,uBAAe,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc,WAAW;AAC3E,YAAI,UAAU,SAAS;AACnB,iBAAO,YAAY;;AAEvB,eAAO;;MAEX;AACI,eAAO,OAAO,KAAK,WAAW,IACzB,OAAO;AACR,eAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;WACR;;MAEP;AAGI,uBAAe,gBAAgB,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO;AACtE,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,wBAAgB;UACZ,MAAM,KAAK;UACX,IAAI,KAAK;UACT,UAAU,OAAO;UACjB,YAAa,MAAK,SACd,IAAI,IAAI,YAAS,OAAM,WAAW,OAAO,OAAM,OAAO,KAAK;UAC/D,QAAQ;UACR,UAAU;UACV,aAAa;UACb,YAAY;UACZ,UAAU,KAAK;;AAEnB,YAAI,OAAO,UAAU;AACjB,kBAAQ,cACJ,OAAO,OAAO,OAAO;AACjB,gBAAI,MAAM,QAAQ;cACd,MAAM,MAAM;cACZ,iBAAiB,MAAM;cACvB,eAAe,MAAM;;AAEzB,mBAAO;aACR;;AAEX,YAAI,OAAO,SAAS;AAChB,kBAAQ,aACJ,OAAO,MAAM,OAAO;AAChB,yBAAa,MAAM;AACnB,wBAAY;AACZ,oBAAQ,MAAM;mBACL;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAQ,MAAM;AACtD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAS,MAAM,gBAAgB;AACvE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,qBAAqB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC5D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,qBAAqB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE1E;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;AACD,wBAAQ,kBAAkB,KAAK,MAAM,MAAM,QAAQ,MAAM;AACzD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,kBAAkB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEvE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,yBAAyB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAChE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,yBAAyB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE9E;mBACC;AACD,wBAAQ,cAAc,KAAK,MAAM,MAAM,QAAQ,MAAM;AACrD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,cAAc,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEnE;mBACC;AACD,wBAAQ,mBAAmB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC1D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,mBAAmB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAExE;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;mBACA;AACD;;AAEA,sBAAM,IAAI,MAAM,2BAA2B,MAAM,gBAAgB,KAAK;;AAE9E,gBAAI,MAAM,QAAQ,CAAE,OAAO;AAC3B,mBAAO;aACR;;AAEX,eAAO;;MAGX;AACI,wBAAgB,YAAY;AAC5B,6BAAqB;AACrB,wBAAgB;AAChB,oBAAY;AACZ,YAAI,WAAW;AACX,kBAAQ,QAAQ,OAAO;AACnB,gBAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,gBAAI,KAAK,OAAO;AACZ,sBAAQ,KAAK,IAAI,KAAK;;AAE1B,mBAAO;aACR;;AAEP,uBAAe;AACf,wBAAgB;AAChB,oBAAY,UAAU,SAAS,QAAQ;AACnC,6BAAoB,oBAAoB,IAAI;AAC5C,uBAAa;YACT,MAAM;YACN,IAAI;YACJ,QAAQ;YACR,YAAY;YACZ,UAAU;YACV,aAAa;YACb,YAAY,CAAE,OAAO,CAAE,OAAO,gBAAgB,IAAI,OAAO,MAAM;YAC/D,UAAU;;AAEd,eAAK,eAAe,IAAI;AACxB,iBAAO,KAAK;AACZ,gBAAM,YAAY;;AAEtB,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAGtC,8BAAsB,YAAY;AAClC,oBAAY,UAAU,UAAU,QAAQ;AACpC,qCAA0B,oBAAoB,cAAc,OAAO;AACnE,uBAAa,MAAM;AACnB,cAAI,QAAQ;AACR,iBAAK,gBAAgB;AACrB,oBAAQ,KAAK;;;AAGrB,0BAAkB,KAAK,mBAAmB;AAC1C,eAAO,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc;;MAE5D;AACI,eAAO;UACH,YAAY,YAAY,UAAU;UAClC,QAAQ,YAAY,UAAU,SAAS,OAAO;AAC1C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB;AACxC,mBAAO;aACR;UACH,SAAS,YAAY,UAAU,UAAU,OAAO;AAC5C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB,KAAK,YAAY;AACzD,mBAAO;aACR;;;MAGX;AACI,mBAAW,IAAI;AACf,YAAI,YAAW;AACX,iBAAO,SAAQ;;AAEnB,eAAO,CAAE,MAAM,OAAO,IAAI;;;AAG3B;AACH,sBAAe,OAAM;AACrB,UAAI,OAAO,QAAO,SAAS;AACvB,eAAO,QAAO,KAAK;iBAEd,OAAO,WAAW;AACvB,eAAO,IAAI,OAAO,MAAM,UAAU;;AAGlC,cAAM,IAAI,MAAM;;;AAIjB;AACH,oBAAc,MAAM,QAAQ,KAAK,OAAO,aAAa,MAAM,MAAM,KAAK,aAAa;AACnF,aAAO,WAAW,QAAQ,MAAM;;AAE7B,yDAAqD;AACxD,oBAAc,MAAM;AACpB,UAAI,SAAS;AACT,eAAO,iBAAiB,MAAM,GAAG;;AAErC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,aAAO,QAAQ,MAAM,IAAI;;AAEtB;AACH,oBAAc,MAAM,SAAS;AAC7B,oBAAc,MAAM,QAAQ,OAAO,MAAM,OAAQ,MAAM,QAAQ,OAAO,MAAM,OAAO;AACnF,aAAQ,OAAO,UAAU,WAAY,QAAQ,SAAS,OAAO;;AAE1D;AACH,UAAI,OAAQ,UAAW;AAEnB,gBAAQ,SAAoB;;AAEhC,cAAQ;aACC,SAAoB;AACrB,iBAAO;aACN,SAAoB;aACpB,SAAoB;aACpB,SAAoB;aACpB,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;;AAIP,iBAAO;;;AAGZ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,gBAAgB,MAAM;;AAEjC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,KAAK,IAAI,OAAK,gBAAgB;;AAEpD,aAAO;;AAEJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,UAAI,MAAM,OAAO;AACb,eAAO,MAAM,IAAI,IAAI,SAAQ,OAAO,IAAI,SAAS,WAAY,IAAI,OAAO,SAAS,IAAI,MAAM;;AAE/F,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,sBAAsB,MAAM;;AAEvC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI;AACA,eAAS,QAAM,KAAK,KAAK,MAAM,KAAK,EAAE,SAAS,MAAM,KAAK,IACtD,MAAM,KAAK,MACX,IACC,IAAI,OAAM,OAAO,MAAM,WAAY,IAAI,SAAS,GAAG;;AAE5D,aAAO;;AAEJ,8DAA0D;AAC7D,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,EAAE,IAAI;AACpB,iBAAO,iBAAiB,GAAG;;;AAGnC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,MAAM,IAAI;AACxB,iBAAO,sBAAsB;;;AAGrC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AC9cX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,OAAO;AACZ,aAAK,YAAY;AACjB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK,WAAW,IAAI,UAAQ,KAAK,SAAS;AACxD,YAAI,KAAK,YAAY;AACjB,eAAK,QAAQ,OAAO,KAAK,KAAK,UACzB,OAAO;AACR,kBAAM,OAAO,KAAK,QAAQ;AAC1B,mBAAO;aACR;;;MAOX;AACI,eAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;MAMhD;AACI,sBAAc,KAAK,KAAK,SAAS;AACjC,YAAI,MAAM,UAAU;AAChB,iBAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;AAEhD,YAAI,MAAM,KAAK,QAAQ,MAAM,KAAK;AAC9B,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,aAAa,KAAK,KAAK,UAAU,MAAM;;AAElD,YAAI,MAAM,SAAS;AACf,iBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,YAAI,MAAM,QAAQ;AACd,iBAAO,cAAc,KAAK,KAAK,UAAU,MAAM;;AAEnD,YAAI,MAAM,QAAQ;AACd,cAAI,MAAM,KAAK,KAAK,QAAQ,MAAM,KAAK,KAAK;AACxC,mBAAO,qBAAqB,KAAK,KAAK,UAAU,MAAM;;AAE1D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,cAAI,MAAM,KAAK,SAAS;AACpB,mBAAO,yBAAyB,KAAK,KAAK,UAAU,MAAM;;AAE9D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,kBAAkB,KAAK,KAAK,UAAU,MAAM;;AAEvD,cAAI,MAAM,KAAK,QAAQ;AACnB,mBAAO,mBAAmB,KAAK,KAAK,UAAU,MAAM;;;AAG5D,eAAO;;;ACrFf;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAmBO,sBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,KAAW,cAAc,WAAW,MAAM,WAAW;;aAE5D;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAGlH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,qBAAiB;AC/DxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACrG;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;aACzD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW;;aAExD;AACD,iBAAO,CAAC,OAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;aACA;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;aAC/K;AACD,iBAAO,CAAC,MAAY,UAAU,KAAK,WAAW,IAAI,WAAW;aAC5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aACvG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;AAE1G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrHxB;;;;;;;;;;;;;;;;AAsBO,sFAAkF;AACrF,cAAY,8BAA8B,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAE5G;AACH,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,MAAM,GAAG,OAAO,MAAM,GAAG,OAAO,GAAG;AAC7C,iBAAO;;;AAGf,aAAO;;AClCX;;;;;;;;;;;;;;;;;MAuBI;AACI,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,yBAAyB;AAC9B,aAAK,cAAc;AACnB,aAAK,iBAAiB;AACtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,WAAW,QAAO;AACvB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK;;MAKhB;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO,OAAO;AAC9C,oBAAO,OAAO;;;AAGtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,SAAS;;MAElB;AACI,eAAO,KAAK,QAAQ;;MAMxB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,UAAS,KAAK;AAC3B,gBAAM,IAAI,MAAM,4BAA4B,8BAA6B,KAAK;;AAElF,gCAAwB,KAAK,QAAQ;AACrC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;;AAGtE,YAAI,KAAK;AACL,0BAAgB,UAAU;;AAE9B,wBAAgB,OAAO;AACvB,eAAO,gBAAgB;;MAK3B;AACI,eAAO,QAAQ,IAAI,YAAS,KAAK,KAAK;;MAO1C;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,CAAC,KAAK,eAAe,UAAS,KAAK;AAChD,gBAAM,IAAI,MAAM,2BAA2B,oDAAmD,KAAK;;AAEvG,kBAAU,KAAK,QAAQ,WAAU;AACjC,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;uCACvD,QAAO,mCAAmC,KAAK;;AAG9E,YAAI,KAAK,WAAW,KACf,MAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC3D,eAAK,eAAe,QAAO;;AAE/B,4CAAoC,KAAK,cAAc,QAAO,OAAO,eAAe,KAAK,8CAA8C;AACvI,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,UAAE,SAAS;AACX,aAAK;AACL,UAAE,UAAU;AACZ,aAAK,QAAQ,UAAS;;MAK1B;AACI,YAAI,QAAQ,WAAW,QAAQ;AAC3B,gBAAM,IAAI,MAAM,eAAe,KAAK,kEACL,QAAQ,2CAA2C,QAAQ;;AAE9F,gBAAQ,QAAQ,eAAc,KAAK,MAAM,GAAG,QAAQ;;MAUxD;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,CAAC;AACD,oBAAU;AACV,uBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAQ,KAAK;;;AAIjB,oBAAU,QAAQ,MAAM,GAAG,KAAK;;AAEpC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAItC,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO;AACzE,eAAO,MAAM,SAAS;;MAK1B;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,kBAAQ,KAAK;;AAGjB,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO,mDAAmD,KAAK,wCAAwC,QAAQ,GAAG;AACpL,eAAO,QAAO,SAAS;;MAQ3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,YAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,gBAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,yBAAiB,KAAK,IAAI,GAAG;AAC7B,YAAI,CAAC,KAAK,eAAe,YAAY,KAAK;AACtC,gBAAM,IAAI,MAAM,mCAAmC,iBAAiB,KAAK;;AAE7E,aAAK,UAAU,SAAS,QAAQ,SAAQ;;MAQ5C;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,0BAAkB;AAClB,kCAA0B,OAAO,IAAI;AACjC,yBAAe;AACf,iBAAO;;AAEX,YAAI,gBAAgB,QAAO,MAAM;AAC7B,gBAAM,IAAI,MAAM;;UAElB,uCAAuC,QAAO;;AAEhD,YAAI,CAAC,KAAK,eAAe,OAAO,WAAW,KAAK;AAC5C,gBAAM,IAAI,MAAM,2DAA2D,KAAK,eAAe,OAAO;;AAG1G,8BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,wBAAgB;AAChB,aAAK;AACD,oBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,6BAAgB,CAAC,GAAG,gBAAgB;AACpC,0BAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,oBAAQ,KAAK,SAAQ,OAAM,SAAQ,UAAS,QAAQ,KAAK;;AAE7D,iBAAO;;AAEX,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,kBAAQ,KAAK;;AAEjB,aAAK,UAAU,SAAS;;;AC9OhC;;;;;;;;;;;;;;;;;MAyCI,kEAAkE;AAC9D,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,YAAI,WAAW;AACX,kBAAQ,QAAQ;AACZ,gBAAI,iBAAiB,QAAO;AACxB,oBAAM,IAAI,MAAM,mCAAmC,mCAAmC,QAAO;;AAEjG,gDAAoC,cAAc,QAAO,OAAO;AAChE,iBAAK;;;AAGb,aAAK,WAAW,QAAO;AACvB,aAAK,iBAAiB;AACtB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;MAKzB;AACI,eAAO,IAAI,WAAW,CAAC,GAAG,KAAK,UAAU,KAAK,cAAc,KAAK;;MAKrE;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO;AACvC,oBAAO;;;AAGf,aAAK,QAAQ,SAAS;AACtB,aAAK,SAAS;;MAKlB;AACI,eAAO,KAAK,QAAQ;;MASxB,gDAAgD;AAC5C,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,gBAAgB,MAAM,KAAK,QAAQ,WAAW;AAC9C,gBAAM,IAAI,MAAM,kCAAkC,4CAA4C,KAAK,QAAQ;;AAE/G,4CAAoC,cAAc,KAAK,cAAc;AACrE,eAAO,KAAK;AACR,kCAAwB,KAAK,QAAQ,IAAI,aAAU,SAAQ,SAAQ;AACnE,iBAAO,MAAM,iBAAiB;;;MAQtC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;AAEpB,wBAAe,KAAK,QAAQ;AAC5B,4CAAoC,QAAO,OAAO,cAAc;AAChE,eAAO,SAAQ,SAAQ;;MAM3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,4CAAoC,QAAO,OAAO,KAAK,cAAc;AACrE,YAAI,KAAK,mBAAmB,KAAK;AAC7B,gBAAM,IAAI,MAAM;;AAEpB,aAAK;AACL,aAAK,QAAQ,KAAK;;MAMtB;AACI,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,0DAA0D;;AAE9E,YAAI,KAAK,mBAAmB,MAAM,OAAO,KAAK;AAC1C,gBAAM,IAAI,MAAM,+BAA+B,iCAAiC,KAAK;;AAEzF,aAAK,QAAQ,SAAS;;MAQ1B;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,eAAe,KAAK,eAAe,KAAK,QAAQ;AAChD,gBAAM,IAAI,MAAM,4BAA4B,+BAA+B,KAAK,QAAQ;;AAE5F,YAAI,KAAK,QAAQ,iBAAiB;AAC9B,gBAAM,IAAI,MAAM,oBAAoB;;AAExC,4CAAoC,KAAK,QAAQ,cAAc,OAAO,cAAc;AACpF,eAAO,KAAK,QAAQ;;MAOxB;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,YAAI,eAAe,KACf,KAAK,mBAAmB,MAAM,gBAAgB,KAAK;AACnD,gBAAM,IAAI,MAAM,yBAAyB,mCAAmC,KAAK;;AAErF,4CAAoC,KAAK,cAAc,QAAO,OAAO;AACrE,aAAK;AACL,aAAK,QAAQ,gBAAgB;;MASjC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,4CAAoC,KAAK,cAAc,cAAc;AAGrE,kBAAU,QAAQ,MAAM,GAAG,KAAK;AAChC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,QAAQ,IAAI,OAAK,SAAQ,KAAK,QAAQ,IAAI;AAC1D,iBAAO,MAAM,SAAS;;;MAQ9B;AACI,YAAI,CAAC,CAAC,gBAAgB,iBAAiB,KAAK;AACxC,gBAAM,IAAI,MAAM,uBAAuB,KAAK,2CAA2C;;AAE3F,4CAAoC,KAAK,cAAc,cAAc;AACrE,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,KAAK,QAAQ,IAAI,OAAK,SAAQ,GAAG;AACjD,iBAAO,QAAO,SAAS;;;;AAS5B;AACH,oBAAc,QAAO;AACrB,UAAI,QAAO,MAAM,SAAS;AACtB,cAAM,IAAI,MAAM,oDAAoD,QAAO;;AAE/E,UAAI,QAAO,UAAU;AACjB,cAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B;;AAE1F,0BAAoB,QAAO,MAAM,MAAM;AACvC,0CAAoC,aAAa,cAAc;AAC/D,yBAAmB,QAAQ;AAC3B,aAAO,IAAI,WAAW,YAAY,cAAc;;AAQ7C;AACH,aAAO,IAAI,WAAW,IAAI,cAAc,cAAc;;AASnD;AACH,UAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,cAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,uBAAiB,KAAK,IAAI,GAAG;AAC7B,UAAI,eAAe,QAAQ,gBAAgB,MAAM,YAAY;AACzD,cAAM,IAAI,MAAM,mCAAmC,iBAAiB;;AAExE,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO;AAC5D,sBAAgB,QAAQ,SAAQ;AAChC,cAAQ,QAAQ;AACZ,aAAK,QAAQ,OAAO,QAAQ;;AAEhC,aAAO;;AASJ;AACH,wBAAkB;AAClB,gCAA0B,OAAO,IAAI;AACjC,uBAAe;AACf,eAAO;;AAEX,UAAI,gBAAgB,QAAO,MAAM;AAC7B,cAAM,IAAI,MAAM;;UAEd,uCAAuC,QAAO;;AAEpD,4BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,sBAAgB,KAAK;AACjB,yBAAgB;AAChB,kBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,0BAAgB,CAAC,GAAG,gBAAgB;AACpC,wBAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,mBAAQ,KAAK,SAAQ,OAAM,SAAQ,SAAS,QAAQ;;AAExD,gBAAO;AACP,eAAO;;AAEX,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO,OAAO;AACnE,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,aAAK,QAAQ,GAAG,QAAQ;;AAE5B,aAAO;;ACvTX;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,MAAM,KAAK;AAC7B,cAAI,UAAU;AACV,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;AAGhG,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;;aAGnG;aACA;AACD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AAEpD,6BAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;AACnH,yBAAe,KAAK,IAAI,aAAU,QAAO;AACzC,0BAAgB,MAAM,WAAW,GAAG;AAEpC,qBAAW,QAAQ;AACf,gBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ;AAC9C,sBAAO;;;AAGf,uBAAa;AACb,iBAAO,UAAU;AAEb,+BAAmB;AAEnB,qBAAS,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AAC1G,8BAAkB,OAAO,IAAI,aAAU,QAAO;AAG9C,uBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;AAIf,gCAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AACrH,wBAAY,MAAM,YAAW,GAAG;AAEhC,wBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;;AAInB,iBAAO;;aAEN;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAW,cAAc,QAAQ,MAAM,WAAW;AAClD,cAAI,CAAC,MAAK;AACN,oBAAO,YAAY;;AAGvB,iBAAQ,OAAM,KAAK,QAAQ,KAAK,CAAC,QAAW,SAAQ,CAAC,OAAM;;aAE1D;AACD,4BAAkB,KAAK,WAAW,KAAK,UAAQ,UAAU,MAAM,WAAW,aAAa;AACvF,cAAI;AACA,0BAAa,UAAU,WAAW,WAAW;AAC7C,mBAAO,CAAC,YAAY;;AAExB,iBAAO;;aAEN;AACD,0BAAgB,cAAc,aAAa,MAAM,WAAW;AAC5D,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ,WAAW;AACnB,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,yCAA+B,cAAc,0BAA0B,MAAM,WAAW;AACxF,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,8BAAoB,IAAI,YAAY,MAAM,OAAO,MAAM,cAAc,wBAAwB,aAAa;AAC1G,kBAAQ,eAAe;AACvB,iBAAO,CAAC,YAAY,UAAU,QAAO;;aAEpC;AACD,qBAAW,cAAc,iBAAiB,MAAM,WAAW;AAC3D,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,mCAAyB,QAAQ,eAAe,GAAG;AACnD,2BAAiB,MAAM,QAAO;AAC9B,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,gBAAgB,KAAK;;aAE5B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,iBAAO,CAAC,kBAAkB,OAAO,eAAe;;aAE/C;AACD,4BAAkB,cAAc,iBAAiB,MAAM,WAAW;AAClE,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,qCAA2B,QAAQ,eAAe,UAAU;AAC5D,6BAAmB,QAAQ,gBAAgB;AAC3C,iBAAO,CAAC,mBAAmB;;aAE1B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,iBAAO,CAAC,kBAAkB,OAAO;;aAEhC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB,MAAM,SAAS;AAChC,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,QAAO,gBAAgB,QAAQ;;aAEtC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB;AACjB,iBAAO,CAAC,iBAAiB;;aAExB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,QAAQ,QAAO;AAC1B,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,WAAW,cAAc;;aAEnD;aACA;AACD,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,eAAe,gBAAgB,cAAc;AACxE,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,cAAc;AACvD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,OAAO,eAAe,cAAc;;aAEtD;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,MAAM,cAAc,cAAc;;aAEpD;AACD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,WAAW,SAAQ,cAAc;AACpD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,WAAW,OAAO,aAAa;;aAEtC;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,SAAS;AACpB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,cAAc;;aAExC;AACD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,6BAAmB,QAAM,aAAa,SAAS;AAC/C,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;;AAGnB,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACjRxB;;;;;;;;;;;;;;;;AAmBA;AACI,wCAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,wBAAkB,YAAY;AAC9B,sBAAgB,mBAAmB;AACnC,0BAAoB,YAAY;AAChC,sBAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,UAAI;AACA,YAAI,WAAW,YAAY;AACvB,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,WAAW,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAIxB,UAAI;AACA,cAAM,IAAI,MAAM;;AAEpB,qBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,mBAAY,WAAW,MAAM,WAAW;AACxC,yBAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,wBAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,QAAQ,MAAK,YAAY;;aAE9I;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEvL;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,kBAA4B;YAC5B,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;aACA;AACD,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,iBAAO,CAAC,gBAAsB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE7J;aACA;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,iBAAO,CAAC,iBAAsB,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEpM;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU,IAAI,UAAU;;aAEhN;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,sCAA4B,cAAc,uBAAuB,MAAM,WAAW;AAClF,iBAAQ,QAAQ,WAAY,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK;AAC/J,iBAAO,CAAC,QAAQ;;aAEf;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAE9D,+BAAqB,QAAQ;AAC7B,8BAAoB,QAAQ;AAE5B,iCAAuB,UAAU;AACjC,gCAAsB,UAAU;AAChC,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,cAAc,cAAc,MAAK,CAAC,gBAAgB,gBAAgB;;;AAG7L,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5KxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,MAAW,OAAO,OAAO;;aAEhC;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAe,OAAO,MAAM;;aAEnC;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAkB,QAAQ,YAAY;;aAE7C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,QAAa,SAAS,OAAO,SAAS;;aAE7C;AACD,iBAAO,CAAC,OAAW,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,cAEJ,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAEnM;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAY,OAAO,MAAM,OAAM,cAAc,SAAS,MAAM,WAAW;;aAE9E;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,gBAAsB,OAAO,OAAM,QAAQ,cAAc,SAAS,MAAM,WAAW,UAAU;;aAEpG;AACD,iBAAO,CAAC,OAAY,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE7G;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW;;;AAG5D,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChFxB;;;;;;;;;;;;;;;;AAmBA;AACI,oBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,qBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,4BAAsB,cAAc,iBAAiB,MAAM,WAAW;AACtE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,gBAAgB,gBAAiB,UAAU,MAAM,WAAW;AAChH,yBAAe,MAAM,OAAY,gCAAgC,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC7H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,yBAAe,MAAM,OAAY,6BAA6B,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC1H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;aACA;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,iBAAO,CAAC,MAAM,OAAY,uBAAuB,OAAO,QAAQ,eAAe,cAAc;;aAE5F;AACD,4BAAkB,MAAW,cAAc,aAAa,MAAM,WAAW,UAAU;AACnF,yBAAe,CAAC,MAAM,WAAiB;AACvC,oBAAU;AACV,iBAAO;;aAEN;AACD,iBAAO,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG9G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AClExB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,yBAAe,KAAW,GAAG,GAAG;AAChC,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,OAAa;AAC5B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,OAAa,GAAG;AAC/B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;;AAG9B,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC3CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,UAAU,KAAK;;aAErB;AACD,sBAAY,cAAc,WAAW,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW,YAAY;aACnD;AACD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW;aACvC;aACA;aACA;AACD,wBAAa,cAAc,KAAK,MAAM,WAAW;AACjD,iBAAO,CAAC,YAAY;;aAEnB;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,YAAY;aAC3B;AACD,2BAAiB,cAAc,KAAK,MAAM,WAAW;AACrD,iBAAO,CAAC,YAAY;aACnB;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW,SAAS,OAAO;aAC1E;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,UAAe,EAAE;aAChC;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa;aACpB;AACD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kBAAQ,KAAK;AAEb,kBAAQ,IAAI;AACZ,uBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAQ,IAAI,MAAM,UAAU,MAAM,KAAK,MAAK,GAAG,YAC1C,MAAM,GAAG;;AAElB,iBAAO,CAAC;;AAER,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrExB;;;;;;;;;;;;;;;;;MA2BI;AACI,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,QAAO;AAErB,aAAK,YAAY,IAAI;AACrB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,OAAO;;MAKvB;AACI,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,aAAK,OAAO;;MAKhB;AACI,eAAO,KAAK,UAAU;;YAOpB;AACF,aAAK,uBAAuB,MAAM;AAGlC,sBAAc,MAAM,KAAK;AAEzB,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,eAAO,KAAK;AACR,0BAAgB,QAAQ;AACxB,6BAAmB,MAAM;AACzB,+BAAqB,QAAQ;AAC7B,kBAAY,eAAe,cAAc,MAAM,kDACxC,uCAAuC;AAE9C,uBAAa,GAAG,IAAI,YAAY;AAC5B,wBAAY,MAAM;AAClB,0BAAc,QAAQ;AACtB,iBAAK;AACL,iBAAK,UAAU,IAAI,KAAK;;AAE5B,iBAAO,KAAK;;;YAkBd;AACF,aAAK,uBAAuB,MAAM;AAClC,sBAAc,MAAM,KAAK;AACzB,eAAO,KAAK;AACR,yBAAe;AACf,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,MAAM;AAClB,0BAAc,KAAK,gBAAgB,KAAK;AACxC,mBAAO,KAAK;;AAEhB,iBAAO,MAAM;;;MAIrB;AACI,uBAAe,KAAK,UAAU,IAAI;AAClC,eAAO,UAAU,OAAO,SAAS;;MAErC;AACI,YAAI,IAAI,UAAU,KAAK;AACnB,gBAAM,IAAI,MAAM,oBAAoB,KAAK,qBAClC,IAAI;;AAEf,YAAI,MAAM,UAAU,KAAK;AACrB,gBAAM,IAAI,MAAM,sBAAsB,KAAK,uBACpC,MAAM;;;;ACzHzB;;;;;;;;;;;;;;;;AAkBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,6BAAkB,IAAI,UAAU,UAAU;AAC1C,0BAAgB,aAAa,KAAK,MAAM;AACxC,iBAAO,CAAC,WAAU;;aAEjB;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,OAAO,MAAM;;aAEpC;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,KAAK,MAAM;;;AAGnC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,eAAe,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAE9D;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,sBAAsB,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAErE;AACD,2BAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,iBAAO,CAAC,OAAY,cAAc,UAAO,OAAO,QAAQ,UAAU,QAAQ;;;AAG1E,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC9CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAErG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,aAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEpG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE1G;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;aACA;AACD,iBAAO,CAAC,MAAY,cAAc,aAAa,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG5J,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACxDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW;aACpN;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC5G;AACD,4CAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,4BAAkB,YAAY;AAC9B,0BAAgB,mBAAmB;AACnC,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,cAAI;AACA,gBAAI,WAAW,YAAY;AACvB,oBAAM,IAAI,MAAM;;AAGpB,gBAAI,CAAC,WAAW,YAAY;AACxB,oBAAM,IAAI,MAAM;;;AAGxB,sCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,MAAM;YACN,YAAY;YACZ,wBAAwB;;;AAGhC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACvDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,2BAAiC,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEnR;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;;aAEzD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,cAAoB,cAAc,iBAAiB,MAAM,WAAW,UAAU,cAAc,eAAe,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;;;AAGtP,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,2BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM,WAAW;;;AAGpF,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC1ExB;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,WAAW,MAAM,WAAW;AACvD,mBAAS,OAAO,MAAM,GAAG;AACzB,iBAAO,CAAC,QAAa,QAAQ;;aAE5B;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,OAAa,QAAO,MAAW,SAAS,UAAU;;aAEzD;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAc,QAAO;;aAE5B;AAED,wBAAc,cAAc,SAAS,MAAM,WAAW;AAEtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,OAAO;;aAExE;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,CAAC,cAAmB,SAAQ,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;aAEtG;AACD,iBAAO,KAAK;AACR,yBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAgB,cAAc,WAAW,MAAM,WAAW;AAG1D,0BAAc,QAAQ,GAAG;AACzB,kCAAsB,QAAc,QAAQ,IAAI;AAChD,2BAAe,QAAQ,IAAI;AACvB,gCAAkB,aAAiB,QAAO,OAAO;AACjD,kBAAI,CAAC,aACD,CAAC,aAAiB,QAAc,SAAQ,OAAO;AAC/C,sBAAM,IAAI,MAAM;;AAEpB,qBAAO,YAAY,UAAS,SAAc,SAAQ;;AAEtD,mBAAO,CAAC,MAAY,QAAQ;;;aAG/B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,iBAAO,QAAc,SAAQ;;aAE5B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEhE;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,kCAAwB,cAAc,mBAAmB,MAAM,WAAW;AAC1E,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,OAAY,SAAQ,iBAAiB;;aAE3C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAgB,SAAS,QAAQ;;aAExC;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,SAAe,GAAG;;aAEzB;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,cAAoB,SAAS,cAAc,OAAO,aAAa,UAAU,aAAa,QACtF,eACA,MAAW,cAAc,aAAa;;;AAG9C,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACzHxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;;aAErD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;;AAGxD,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrCxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAExG;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEnE;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE3G;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEhK;aACA;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,iBAAiB,MAAM,WAAW;;aAEnK;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAAS;AACzE,iBAAO,CAAC,cAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,WAAW;;aAEnF;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;;AAGhH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChExB;;;;;;;;;;;;;;;;AA2CO;AACH,oBAAe;AACX,gBAAQ,MAAK;eACJ;AACD,mBAAO,KAAS,MAAM,UAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAsB,OAAM,YAAW;eAC5D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAkB,OAAM,YAAW;eACxD;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAwB,OAAM,YAAW;eAC9D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAyB,OAAM,YAAW;eAC/D;AACD,mBAAO,YAAoB,OAAM,YAAW,UAAS;eACpD;AACD,6BAAiB,gBAAgB,MAAK;AACtC,gBAAI,YAAY,SAAS;AACrB,qBAAO,SAAS,eAAe,IAAI,cAAc,OAAM,YAAW;;AAGlE,oBAAM,UAAU,aAAa,MAAK;;;AAGtC,kBAAM,UAAU,eAAe,MAAK;;SAI7C,MAAM,WAAW;AACpB,UAAI,WAAmB;AACnB,eAAO,MAAM,KAAK,WAAU,GAAG,OAAO;;AAE1C,aAAO,GAAG,OAAO;;;MCvFjB,wBAAwB,qBAAqB,oBAAoB,kBAAkB;AAC/E,aAAK,YAAY;AACjB,aAAK,iBAAiB;AACtB,aAAK,gBAAgB;AACrB,aAAK,cAAc;AACnB,aAAK,cAAc,CAAE,IAAI,GAAG,WAAW,IAAI,aAAa;AACxD,aAAK,WAAW,CAAC,KAAK;AACtB,aAAK,SAAS;AACd,aAAK;;MAET;AACI,eAAO,CAAE,IAAI,WAAW,aAAa;;UAOrC;AACA,YAAI,KAAK,aAAa;AAClB,eAAK,WAAW;AAChB,eAAK;;;UAGT;AACA,eAAO,KAAK;;UAKZ;AACA,eAAO,KAAK,mBAAmB;;UAM/B;AACA,eAAO,KAAK;;MAEhB;AACI,sBAAc;AACd,qBAAa,GAAG,IAAI,KAAK,SAAS,SAAS,GAAG;AAC1C,4BAAiB,KAAK,SAAS,MAAM,GAAG,KAAK,SAAS,SAAS;AAC/D,gBAAM,KAAK,KAAK,qBAAqB;;AAEzC,cAAM,KAAK;AACX,aAAK,qBAAqB;;MAE9B;AACI,eAAO,YACH,UACK,IAAI,aAAY,QAAQ,OAAO,KAAK,QAAQ,gBAAgB,IAC7D,KACA,GAAG,QAAQ,aAAa,QAAQ,eAC/B,KAAK,OACV;;MAMR;AACI,YAAI,KAAK;AACL,eAAK;AACL,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,KAAK,KAAK,SAAS,KAAK,QAAQ;AAC9C,eAAK,mBAAmB,QAAQ,KAAK,qBAAqB,KAAK;;;MAOvE;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,OAAO;AACrB,eAAK,kBAAkB;;AAGvB,gBAAM,IAAI,MAAM;;;MAOxB;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK;AACL,0BAAgB,OAAO,OAAO,IAAI,KAAK,SAAS,KAAK,SAAS,SAAS;AACvE,kBAAQ,eAAe;AACvB,kBAAQ,KAAK,KAAK;AAClB,eAAK,SAAS,OAAO,IAAI,GAAG;AAC5B,eAAK,mBAAmB,OAAO,GAAG,GAAG,KAAK,qBAAqB,KAAK;;AAGpE,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,KAAK,UAAU;;MAE1B;AACI,aAAK,eAAe,YAAY,MAAM;;MAE1C;AACI,eAAO,KAAK,eAAe;;MAE/B;AACI,aAAK,cAAc,WAAW,MAAM;;MAExC;AACI,eAAO,KAAK,cAAc;;MAE9B;AACI,0BAAkB,KAAK;AACnB,eAAK,eAAe,KAAK,cAAc;;AAE3C,0BAAkB,KAAK;AACnB,eAAK,cAAc,KAAK,cAAc;;;;ACpIlD;;;;;;;;;;;;;;;;AAyBO;AACH,wBAAkB,IAAI;AACtB,4BAAsB;AACtB,wBAAkB;AAClB,uBAAiB;AAGjB,mBAAa,IAAI;AACjB,6BAAuB,OAAO,KAAK,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAC3E,0BAAoB;AACpB,UAAI,aAAa;AACb,wBAAgB,UAAU,IAAI,UAAQ,cAAc,KAAK,MAAM;;AAEnE,uBAAiB,CAAC,GAAG;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,YAAI,cAAc,SAAS,eAAe,SAAS,YAAY;AAC3D,cAAI,eAAe;AACf,0BAAc;AACd,yBAAa,YAAY,SAAS,IAAI,WAAS,MAAM,MAChD,OAAO,UAAQ,UAAU,IAAI;;;AAG1C,kBAAU,IAAI,KAAK;AAEnB,YAAI,UAAU,KAAK,SAAS;AACxB;;AAGJ,YAAI,eAAe,QAAQ,KAAK,UAAU;AACtC;;AAGJ,YAAI,cAAc,QAAQ,KAAK,UAAU;AACrC;;AAEJ,YAAI,KAAK,OAAO,WAAW;AACvB,wBAAc,KAAK,KAAK;AACxB;;AAEJ,aAAK,OAAO,QAAQ;AAEhB,cAAI,KAAK,IAAI,OAAM;AACf;;AAEJ,eAAK,IAAI,OAAM;AACf,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,QAAQ,SAAS,WAAW,eAAe,aAAa;;AAM9D;AACH,aAAQ,WAAW,UAAW;AAC9B,uBAAiB;AACjB,yBAAmB,OAAO,KAAK,QAC1B,IAAI,UAAQ,cAAc,MAAM,IAChC,IAAI,UAAQ,OAAM,MAAM;AAC7B,wBAAkB,OAAM;AACxB,iBAAW,QAAQ;AACf,YAAI,UAAU,IAAI,OAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAM,QAAQ,QAAQ;AAClB,YAAI,UAAU,IAAI,OAAO;AACrB,mBAAS,KAAK;;;AAGtB,UAAI,aAAa;AACb,kBAAU,QAAQ;AACd,cAAI,UAAU,IAAI,KAAK;AACnB,qBAAS,KAAK;;;;AAI1B,mBAAa,IAAI;AACjB,2BAAqB;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,aAAK,IAAI,KAAK;AACd,YAAI,CAAC,UAAU,KAAK;AAChB,uBAAa,KAAK;;AAEtB,aAAK,SAAS,QAAQ;AAClB,cAAI,CAAC,KAAK,IAAI,MAAM,SAAS,UAAU,IAAI,MAAM,SAC7C,MAAM,OAAO,MAAM,YAAS,KAAK,IAAI,OAAM;AAC3C,qBAAS,KAAK;;;;AAI1B,aAAO;;AAEX,6BAAyB;MACrB;MAAU;MAAS;MAAS;MAAQ;MAAiB;MACrD;MAAkB;MAAM;;AAE5B,8BAA0B;MACtB;MAAuB;MAAuB;MAAuB;;AAEzE,2BAAuB;MACnB;MAAa;MAAe;MAAqB;MACjD;MAAmB;;AAEhB;AACH,aAAO,iBAAiB,QAAQ,KAAK,OAAO;;AAEzC;AACH,aAAO,kBAAkB,QAAQ,KAAK,OAAO;;AAE1C;AACH,aAAO,eAAe,QAAQ,KAAK,OAAO;;AC3I9C;;;;;;;;;;;;;;;;;MA8BI;AACI,aAAK,QAAQ;AACb,aAAK,SAAS;AACd,aAAK,cAAc,IAAI;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,OAAM;AACtB,aAAK,UAAU,OAAM;AACrB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AAExB,YAAI,OAAM,aAAa;AACnB,iBAAO,KAAK,OAAM,WAAW,QAAQ;AACjC,iBAAK,qBAAqB,QACtB,IAAI,cAAc,OAAM,UAAU,OAAO;;;;UAIrD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,sBAC7B,KAAK;;UAET;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,0BAAkB,OAAO,KAAK,WAAW,IAAI,SAAO,UAAU,KAAK,IAAI,aAAU,QAAO;AACxF,aAAK,aAAa,GAAG,OAAO,GAAG;AAC/B,aAAK,aAAa;;UAMlB;AACA,aAAK,mBAAmB;;UAExB;AACA,eAAO,KAAK,QAAQ,IAAI;AACpB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,QAAQ,IAAI,UAAQ,KAAK,gBAAgB,KAAK;;UAE1D;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,uBAAa,KAAK,gBAAgB,KAAK;AACvC,iBAAO,KAAK,gBAAiB,GAAG,QAAQ,KAAK,kBAAmB;;;UAGpE;AACA,eAAO,OAAO,KAAK,KAAK,YAAY,OAAO;AACvC,cAAI,OAAO,KAAK,WAAW,KAAK;AAChC,iBAAO;WACR;;MAEP;AACI,6BAAqB,OAAO,IAAI,UAAQ,KAAK,MAAM;AACnD,8BAAsB,QAAQ,IAAI,UAAQ,KAAK,MAAM;AACrD,eAAO,aAAa,KAAK,KAAK,aAAa,OACvC,cAAc,KAAK,KAAK;;MAMhC;AACI,8BAAsB,qBAAqB,QAAQ,SAAS,KAAK,WAAW,KAAK;AACjF,eAAQ,eAAe,aAAa,cAAe;AACnD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,qCAAqC,YAAY,oCAC1C,YAAY,8GAEK;;AAE5C,YAAI,cAAc,SAAS;AACvB,2BAAiB,QAAQ,IAAI,OAAK,EAAE;AACpC,0BAAgB,OAAO,KAAK;AAC5B,gBAAM,IAAI,MAAM,+BAA+B,uCACvC,4CAA4C;;AAExD,eAAO,2BAA2B,KAAK,OAAO,KAAK,WAAW;;MAWlE;AACI,iBAAS,KAAK,UAAU;AACxB,sBAAc,OAAO,KAAK,QAAQ;AAClC,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,kBAAU,KAAK,WAAW;AAC1B,aAAK,aAAa;AAClB,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAChE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,+BAAuB,KAAK,kBAAkB,YAAY;AAE1D,2BAAmB,KAAK,YAAY,IAAI;AACxC,YAAI,gBAAgB;AAChB,yBAAe,KAAK,QAAQ,QAAQ;AACpC,eAAK,YAAY,IAAI,gBAAgB;;AAEzC,+BAAuB;AACvB,8BAAsB;AACtB,eAAO,KAAK;AACR,0BAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AACzF,6BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,iBAAO,KAAK,QAAQ,QAAQ;AACxB,uCAA0B,cAAc;AACxC,4BAAgB;AAChB,oBAAQ,UAAS,OAAO;AACxB,uBAAW,YAAY;;AAE3B,gCAAsB,KAAK,mBAAmB;AAC9C,kDAAwC;AACxC,uBAAa,GAAG,IAAI,aAAa,QAAQ;AACrC,yBAAa,aAAa;AAC1B,gBAAI,CAAC,WAAW,KAAK;AACjB,8BAAgB,YAAU,MAAM,YAAY,SAAS,KAAK;AAC1D,kBAAI,WAAe;AACf,sBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAGrD,yBAAW,KAAK,QAAQ;AACxB,mBAAK,uBAAuB,KAAK,MAAM,MAAM,YAAY,SAAS,eAAe,iBAAiB;;;AAI1G,cAAI,KAAK,UAAU;AACf,oBAAQ,QAAQ;;AAEpB,iBAAO,QAAQ,IAAI,UAAQ,UAAU,MAAM,YAAY;;;MAG/D;AACI,oBAAY,GAAG,OAAO,MAAM,IAAI,OAAO,KAAK,WACvC,IAAI,SAAO,UAAU,MACrB,IAAI,aAAW,QAAQ,IAAI,aAAU,QAAO;AACjD,eAAO,IAAI,IAAI;;MAEnB;AAGI,YAAI,KAAK,aAAa,aAAa,YAAY,QAAQ,cAAc;AACjE;;AAEJ,kBAAU,UAAU,QAAQ;AACxB,cAAI,WAAU;AACV,4CAAgC,QAAO,MAClC,iCAAgC,QAAO,OAAO,KAC3C,KAAK,SAAS;;;AAG9B,aAAK,OAAO,QAAQ;AAGhB,cAAI,OAAM,aAAa;AACnB,4BAAgB,6BAA6B,OAAM,MAAM,WAAW;AACpE,gBAAI,WAAW;AACX,sBAAQ,QAAQ;AACZ,oBAAI,WAAU,CAAC,cAAc,IAAI,QAAO;AACpC,iCAAc,gCAAgC,QAAO;AACrD,sBAAI,WAAU;AACV,4BAAO;AACP,2BAAO,gCAAgC,QAAO;6BAEzC,UAAS;AAGd,oDAAgC,QAAO;;;;;;;;YAiB7D;AACF,eAAO,KAAK,cAAc,QAAQ;;YAgBhC,qDAAqD,wBAAwB,oBAAoB;AACnG,YAAI,CAAC;AACD,mBAAS,KAAK,UAAU;AACxB,eAAK,YAAY;AACjB,eAAK,uBAAuB;AAC5B,oBAAU,KAAK,WAAW;AAC1B,eAAK,aAAa;;AAEtB,wBAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AAIzF,0BAAkB,MAAM,KAAK,uBAAuB,QAAQ,SAAS,SAAS;AAC9E,wBAAgB,QAAQ,IAAI,UAAQ,UAAU,MAAM,WAAW;AAE/D,0BAAkB,QAAQ,IAAI,OAAK,EAAE;AACrC,yBAAiB,OAAO,KAAK,QAAQ,IAAI,UAAQ,OAAO,MAAM;AAC9D,wBAAgB,IAAI,IAAI,CAAC,GAAG,WAAW,GAAG,UAAU,GAAG,KAAK;AAC5D,eAAO,KAAK,WAAW,QAAQ;AAC3B,8BAAoB,UAAU;AAC9B,sBAAY,QAAQ;AAChB,gBAAI,WAAU,CAAC,QAAO,cAAc,CAAC,QAAQ,IAAI,QAAO;AACpD,sBAAO;;;;AAKnB,YAAI,KAAK,UAAU;AACf,kBAAQ,QAAQ;;AAEpB,eAAO;;YAEL;AACF,6BAAqB,OAAO,OAAO;AAC/B,cAAI,KAAK,OAAO,QAAO,QAAQ;AAC/B,iBAAO;WACR;AACH,eAAO,KAAK,cAAc,cAAc,KAAK,aAAa,MAAM,gBAAgB;;YAa9E;AACF,sBAAc,OAAO,KAAK;AAC1B,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,YAAY,IAAI,UAAQ,cAAc,MAAM;AACpE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,eAAQ,WAAW,eAAe,aAAa,cAAe,qBAAqB,QAAQ,aAAa,KAAK,WAAW,KAAK;AAE7H,uBAAc;UACV,GAAG;UAAY,GAAG,KAAK,MAAM;UAAS,GAAI,KAAK,cAAc;UAC/D,IAAI;AACF,iBAAO,CAAE,MAAM,UAAU,QAAQ;;AAErC,2BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,eAAO,KAAK,QAAQ,QAAQ;AACxB,qCAA0B,cAAc;AACxC,0BAAgB;AAChB,kBAAQ,UAAS,OAAO;AACxB,qBAAW,YAAY;;AAE3B,gDAAwC;AACxC,8BAAsB,KAAK,mBAAmB;AAC9C,sBAAc;AACd,eAAO,OAAM,SAAS;AAClB,2BAAiB,KAAK,aAAa,YAAY,QAAO,SAAS,YAAY,OAAO,eAAe,iBAAiB,iCAAiC;AACnJ,gBAAM,QAAQ,IAAI;;AAEtB,YAAI,eAAe,QAAQ,CAAC;AACxB,kBAAQ,KAAK;;AAGjB,+BAAuB,YAClB,OAAO,UAAQ,CAAC,cAAc,SAC/B,CAAC,UAAU,KAAK,MAAM,YAAY,UACjC,IAAI,UAAQ,KAAK;AACtB,YAAI,eAAe,SAAS;AACxB,+BAAqB;AACrB,cAAI,eAAe;AACf,6BACI,wFAC+B;;AAEvC,gBAAM,IAAI,MAAM,+BAA+B,6CAChC,qDACP,mBAAmB;;AAE/B,eAAO;;MAEX;AACI,yBAAiB;AACjB,eAAO,OAAM,SAAS;AAClB,uBAAa,OAAM;AACnB,kBAAQ,iBAAiB,KAAK;AAC9B,yBAAe;AAIf,cAAI,KAAK,KAAK,OAAO,WACjB,cAAc,cAAc,KAAK,MAAM,WAAW;AAClD,aAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAIrD,cAAI,UAAU,KAAK,KAAK,SAAS;AAC7B,4BAAgB,YAAU,KAAK,MAAM,WAAW,SAAS,KAAK;AAC9D,gBAAI,CAAC;AACD,eAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAErD,mCAAuB,QAAQ;AAC/B,gBAAI,WAAe;AACf,uBAAS,KAAK,QAAQ,KAAK;AACvB,0BAAU,YAAY;AACtB,wBAAQ,iBAAiB;AACzB,qBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,qBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;AACpE,uBAAO;;;AAIX,wBAAU,YAAY;AACtB,mBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,mBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAIxE,iBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAG5E,eAAO;;MAEX;AACI,aAAK,SAAS,QAAQ;AAClB,6BAAoB,oBAAoB,UAAU,MAAM;AACxD,cAAI,MAAM,aAAa,CAAC,UAAU,IAAI,UAAU;AAC5C;;AAGJ,cAAI,UAAU,OAAO;AACjB,gBAAI,UAAU,WAAW,KAAK;AAC1B,qBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,oBAAM,YAAY;AAClB,qBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;qBAIxD,UAAU,WAAW,MAAM;AAC5B,mBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,kBAAM,YAAY;AAClB,mBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;;;MAOjE;AACI,eAAO,KAAK,KAAK,WACZ,QAAQ,SAAO,KAAK,UAAU,KAAK,QAAQ,aAAU,QAAO;;MAErE;AACI,eAAO,KAAK,QAAQ,QAAQ;AACxB,yBAAc,OAAO;AACrB,6BAAoB,cAAc;AAClC,uBAAa,KAAK,MAAM,MAAM;AAC9B,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,0BAAc,KAAK,WAAW,SAAS;AACvC,0BAAc,MAAM,WAAW,OAAM,MAAM,UACvC,OAAM,MAAM,MAAM,iBAAgB,MAAM,YAAW,MAAM,MAAM,YAAW;AAC9E,oBAAY,OAAO,MAAM,sBAAsB,KAAK,mDAChB,oBAC5B,OAAM;;AAElB,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,oBAAY,OAAM,UAAU,KAAK,WAAW,SAAS,OAAO,MAAM,sBAAsB,KAAK,kDAEtF,KAAK,WAAW,SAAS,kBAAkB,OAAM;;;;MAIpE;AACI,uBAAe;AACf,gCAAwB;AACpB,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,UAAU,QACrD,KAAK,WAAW,OAAO,cAAc;AACrC,4BAAe,KAAK,WAAW,OAAO;AACtC,mBAAO,QAAO,QAAQ,OAAO;;AAG7B,mBAAO,aAAa,OAAO;;;AAGnC,eAAO;;MAEX;AACI,2BAAmB,OAAO,KAAK,QAAQ,OAAO;AAC1C,6BAAmB,cAAc;AACjC,iBAAO,KAAK,MAAM,MAAM,aAAa;;AAEzC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,MAAM,uDACF;;;MAGtB;AACI,eAAO,QAAQ,IAAI;AACf,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,WAAW,QACtD,KAAK,WAAW,QAAQ,SAAS;AACjC,4BAAe,KAAK,WAAW,QAAQ;AACvC,mBAAO,QAAO;;AAElB,iBAAO;WACR;;MAEP;AACI,gBAAQ,QAAQ;AACZ,mCAAyB,cAAc;AACvC,cAAI,CAAC,KAAK,MAAM,MAAM;AAClB,kBAAM,IAAI,MAAM,eAAe;;;;;;MCpf3C,oCAAoC,mBAAmB;AACnD,aAAK,wBAAwB;AAC7B,aAAK,eAAe;;MAWxB;AACI,aAAK,sBAAsB,QAAQ,WAAU;AAC7C,aAAK,aAAa,WAAU,MAAM;;MAOtC;AACI,eAAO,KAAK,sBAAsB;;MAMtC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,0BAAkB,KAAK;AACnB,eAAK,aAAa,KAAK;AACvB,iBAAO,KAAK,aAAa;;AAE7B,2BAAmB,KAAK;AACpB,eAAK,sBAAsB,MAAM;AACjC,iBAAO,KAAK,sBAAsB;;;;AC9C9C;;;;;;;;;;;;;;;;AAoBO,+BAA2B;AAC3B,+BAA2B;;MAqB9B,oCAAoC;AAChC,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,UAAU;AACf,YAAI,eAAe;AACf,eAAK,cAAc;;AAEvB,aAAK,kBAAkB,IAAI;;UAG3B;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;MAEzB;AACI,qBAAa,KAAK;AAClB,YAAI,KAAK,QAAQ;AAEb,eAAK,UAAU;mBAEV,KAAK,YAAY,eAAe;AACrC,eAAK,UAAU,mBAAsB,MAAM,KAAK;;AAGhD,2BAAiB,gBAAmB,MAAM,KAAK;AAC/C,cAAI,SAAS,WAAW;AAGpB,qBAAS,KAAK,mBAAsB,MAAM,KAAK;qBAE1C,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC,CAAC;;AAEjB,eAAK,UAAU,SAAS;;;YAO1B;AACF,aAAK;AACL,YAAI,KAAK,QAAQ,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,0BAAkB,MAAM,KAAK,QAAQ;AACrC,eAAO,KAAK,SAAS;;MAQzB;AACI,aAAK,YAAY;AACjB,uBAAc,KAAK,UAAU;AAC7B,wBAAgB;AAChB,YAAI,KAAK,UAAU,uBAAuB;AACtC,sBACI,KAAK,UAAU,oBAAoB;;AAE3C,aAAK,UAAU,GAAG,OAAM,SAAS,YAAY,OAAM,SAAS;AAC5D,0BAAkB,cAAiB,KAAK,UAAU,YAAY,KAAK,UAAU;AAC7E,aAAK,WAAW,IAAI,cAAc,gBAAgB,SAAS,eAAe,QAAO;AACjF,aAAK,SAAS,YAAY,KAAK,6BAA6B;AAG5D,aAAK,SAAS,kBAAkB,KAAK;AACrC,YAAI,UAAU,oBAAoB;AAC9B,8BAAoB,gBAAgB,SAAS,eAAe,UAAU;AACtE,eAAK,cAAc,IAAI,cAAc;AACrC,eAAK,YAAY,YAAY,KAAK,SAAS;AAI3C,eAAK,YAAY,kBAAkB,KAAK;AACxC,eAAK,YAAY,aAAa,IAAI;;AAEtC,eAAO;;YA8CL;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,MAAM,0CAA0C;qBAErD,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,eAAO,aAAa,KAAK,KAAK;;MAwClC;AACI,eAAO,KAAK,QAAQ,QAAQ,KAAK;;MAErC;AACI,YAAI,CAAE,mBAAkB,YAAW,CAAC,MAAM,QAAQ;AAE9C,iBAAO;;AAEX,iBAAS,MAAM,QAAQ,UAAU,SAAS,CAAC;AAC3C,YAAI,OAAO,WAAW,KAAK,WAAW;AAClC,gBAAM,IAAI,MAAM,mDACW,KAAK,WAAW,wCACpB,OAAO;;AAElC,eAAO,KAAK,WAAW,OAAO;AAC1B,cAAI,aAAa,OAAO;AACxB,iBAAO;WACR;;MAEP;AACI,kBAAU,WAAW,KAAK;AAC1B,eAAO,CAAC,MAAM,QAAQ,WAAW,CAAC,WAAW;;MAkBjD;AACI,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,KAAK,SAAS,QAAQ,QAAQ;AAC7C,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;YAkBzC;AACF,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,MAAM,KAAK,SAAS,aAAa,QAAQ;AACxD,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;MAE/C;AACI,eAAO,OAAO,KAAK,KAAK,OAAO;AAC3B,iBAAO,OAAO,CAAC,IAAI;AACnB,iBAAO;WACR;;MAOP;AACI,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,YAAY;;AAErB,aAAK,gBAAgB;;;AAiCtB,uDAAkD;AACrD,UAAI,YAAY;AACZ,cAAM,IAAI,MAAM;;AAGpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ;AACR,YAAI,SAAS,QAAQ;AACjB,cAAI,CAAC,SAAS,SAAS;AACnB,uBAAW,WAAW;;AAE1B,qBAAW,GAAG,WAAW,qBAAqB;;;AAGtD,qBAAc,IAAI,WAAW,UAAU;AACvC,YAAM,OAAM;AACZ,aAAO;;ACrXX;AAEK,sBAAW;ACFhB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAmCO;AACH,aAAO,gBAAgB,QAAO;;AAQlC,mDAA8C,IAAI,qBAAqB,IAAI;AACvE,UAAI,UAAS;AACT,eAAO;;AAEX,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,UAAI,KAAK,IAAI;AACT,eAAO,KAAK,IAAI;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,aAAK,IAAI,QAAO,OAAO;AACvB,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,wBAAc,OAAM;AACpB,8BAAoB,gBAAgB,OAAO,OAAO,MAAM;AACxD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AA2B1D,qCAAiC;AACpC,aAAO,gBAAgB,QAAQ;;AAMnC,0DAAsD,IAAI;AAGtD,qBAAc,OAAO;AACrB,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,2BAAiB,OAAO,IAAI,OAAK,EAAE;AACnC,8BAAoB,gBAAgB,UAAU,OAAO;AACrD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AAI1D;AACH,UAAI,MAAM;AACN,eAAO;;AAGX,UAAI,aAAW,EAAE;AACb,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,GAAG,SAAS;;;AAyB7B;AACH,mBAAa,IAAI;AAEjB,sBAAgB,QAAO,OAAO;AAK9B,wBAAkB,MAAM,KAAK,KAAK;AAC9B,sBAAc,KAAK,IAAI;AACvB,YAAI,WAAkB;AAClB,8BAAoB,MAAM;AAC1B,eAAK,IAAI,KAAK;;;AAMtB,qBAAe,gBAAgB,QAAO,OAAO;AAC7C,aAAO;;AAQJ;AACH,aAAO,OAAO,QAAS,CAAC,YAAY,OAAO,QACtC,OAAM,QAAQ,QACV,OAAO,QAAQ,YAAY,CAAE,gBAAe;;AAWlD;AACH,aAAO,OAAO,QAAQ,YAAY,QAAQ,MAAM,QAAQ,QACnD,OAAO,QAAQ,YAAa,eAAe,WAC5C,cAAqB;;AAM7B;AACI,aAAQ,UAAU,QACb,OAAO,UAAU,YAAY,OAAO,UAAU;;AClOvD;;;;;;;;;;;;;;;;;AAmBO;AACH,aAAO,QAAQ,WAAW;;AAG9B;AACI,UAAI,gBAAgB;AAChB,eAAQ,CAAE,OAAO,KAAK,SAAS,SAAS;iBAEnC,aAAW;AAChB,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,MAAM,SAAS;;;AC/BvC;;;;;;;;;;;;;;;;;;MAyBI;AACI,aAAK,WAAW;AAIhB,aAAK,QAAQ;AACb,aAAK,MAAM;AACX,YAAI,YAAY;AACZ,gBAAM,IAAI,WAAW;;AAEzB,YAAI,WAAW;AACX,gBAAM,IAAI,WAAW;;AAEzB,aAAK,OAAO,IAAI,MAAM;AACtB,aAAK,kBAAkB,IAAI;;MAK/B;AAEI,eAAO,SAAQ;AACX,oBAAS,KAAK;;AAElB,eAAO,SAAQ,KAAK;;MAExB;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,eAAO,KAAK,KAAK,SAAQ,KAAK;;MAElC;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,aAAK,KAAK,SAAQ,KAAK,YAAY;;MAKvC;AACI,qBAAa,KAAK,MAAM,KAAK;AAC7B,YAAI,SAAS;AACT,mBAAS,KAAK,kBAAkB;;AAEpC,eAAO;;MAOX;AACI,eAAO,KAAK,aAAa,KAAK;;MAOlC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,IAAI,KAAK,KAAK;AACnB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;;MAKpC;AACI,4BAAoB;AAChB,eAAK,KAAK;;;MAMlB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;AAChC,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,KAAK;AACnB,eAAO;;MAKX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,aAAK,IAAI,KAAK,OAAO;;MAKzB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,OAAO;AACrB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,eAAO;;MAWX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAc,KAAK,KAAK,KAAK,QAAQ;AACrC,uBAAe,KAAK,IAAI;AACxB,aAAK,IAAI,QAAO,KAAK;AACrB,eAAO;;;AC7Jf;;;;;;;;;;;;;;;;;oCAkBuC;MAInC;AACI,cAAM,kBAAkB;;MAE5B;AACI,eAAO;;MAEX;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,KAAK;;MAEf;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,QAAQ;;MAKlB;AACI,4BAAoB,KAAK,WAAW;AACpC,wBAAgB,IAAI,MAAM;AAC1B,oBAAY,KAAK;AAGjB,qBAAa,GAAG,IAAI,KAAK;AACrB,kBAAQ,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK,QAAQ;;AAEjD,aAAK,OAAO;AACZ,aAAK,WAAW;AAChB,aAAK,kBAAkB,IAAI,KAAK;AAChC,aAAK,QAAQ;AACb,aAAK,MAAM;;;AAGnB,sBAAkB,mBAAmB;AC3DrC;;;;;;;;;;;;;;;;;AA6BO;AACH,aAAO,IAAI,cAAc;;AAKtB;AACH,cAAQ;AACR,aAAO,qBAAqB,MAAO,EAAE,OAAO,KAAK,MAAM;;AAepD;AACH,aAAO,IAAI,qBAAqB;;AAc7B;AACH,aAAO,IAAI,gBAAgB,eAAe;;AAkBvC;AACH,aAAO,yBAAyB,qBAAqB,cAAc,KAAK,SAAQ;;AA0B7E,0DAAsD,gBAAgB;AACzE,aAAO,IAAI,YAAY,WAAW;;;YAkB5B;AACF,uBAAe;AACf,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,KAAK;;AAEnB,eAAO;;YAaL;AACF,uBAAe,KAAK,SAAS;AAC7B,uBAAe;AACf,gBAAQ,MAAM,OAAO;AACrB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,OAAO;;AAErB,eAAO;;YASL;AACF,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,cAAI,MAAM,KAAK;;;YAUjB;AACF,gBAAQ,MAAM,KAAK;AACnB,6BAAqB,UAAU,EAAE;AACjC,eAAQ,CAAC,EAAE,QAAS;AAChB,cAAI,MAAM,KAAK;AACf,2BAAiB,UAAU,EAAE;;;MAerC;AACI,eAAO,IAAI,0BAA0B,MAAM;;MAW/C;AACI,eAAO,IAAI,eAAe,MAAM;;MAUpC;AACI,eAAO,IAAI,YAAY,MAAM;;MAUjC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAUtC;AACI,eAAO,IAAI,iBAAiB,MAAM,WAAW;;MAUjD;AACI,eAAO,IAAI,gBAAgB,MAAM;;YAO/B;AACF,eAAO,KAAK,IAAI,GAAG;;YASjB;AACF,eAAO,KAAK,eAAe,GAAG,aAAa,OAAM,MAAM;;MAoB3D,0CAA0C;AACtC,eAAO,IAAI,sBAAsB,MAAM,WAAW;;MAkCtD,6CAA6C,cAErC;AAEJ,2BAAmB,KAAK,cAAc,WAAW;AAGjD,eAAO,WAAW,IAAI,OAAK,QAAQ,GAAG;;MAY1C;AACI,eAAO,IAAI,gBAAgB,kBAAkB,CAAC,MAAM,YAAY;;MASpE;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAQlC;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAWlC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAWtC;AACI,eAAO,IAAI,gBAAgB,MAAM,YAAY;;MAMjD;AACI,eAAO,IAAI,eAAe;;;gCAUN;MACxB;AACI;AACA,aAAK,QAAQ;AACb,aAAK,OAAO;;MAEhB;AACI,eAAO,YAAY,KAAK,MAAM;;YAE5B;AACF,YAAI,KAAK,QAAQ,KAAK,MAAM;AACxB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,qBAAa,KAAK,MAAM,KAAK;AAC7B,aAAK;AACL,eAAO,CAAE,OAAO,UAAU,OAAO,MAAM;;;uCAGZ;MAC/B;AACI;AACA,aAAK,SAAS;;MAElB;AACI,eAAO;;YAEL;AACF;AACI,iBAAO,KAAK;;AAIZ,YAAE,UACE,mDAAmD,EAAE;AACzD,gBAAM;;;;iCAIW;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAEhB,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAKF,eAAO,KAAK,UAAU,KAAK;AACvB,0BAAgB,MAAM,KAAK,SAAS;AAEpC,cAAI,QAAQ;AACR,mBAAO;;AAEX,kBAAW,QAAQ;;AAEvB,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAChB,aAAK,QAAQ;;MAEjB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,YAAI,KAAK,WAAW,KAAK;AACrB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAO,KAAK,SAAS;;;wCAMO;MAChC,wDAAwD;AACpD;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,sBAAc;AACd,eAAO,MAAM,SAAS,KAAK;AACvB,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK;AACL,gBAAI,KAAK,wBAAwB,MAAM,SAAS;AAC5C,qBAAO,CAAE,OAAO,OAAO,MAAM;;AAEjC,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,gBAAM,KAAK,KAAK;;AAEpB,eAAO,CAAE,OAAO,OAAO,MAAM;;;iCAGR;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK,QAAQ,KAAK,UAAU,KAAK;AACjC,mBAAO;;AAEX,kBAAW,KAAK;;;;8BAIF;MACtB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,KAAK,UAAU,KAAK;AACnC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;4CAGE;MACpC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH;AACI,mBAAO,MAAM,KAAK,SAAS;;AAG3B,gBAAI,CAAC,KAAK,QAAQ;AACd,qBAAO,CAAE,OAAO,MAAM,MAAM;;;;;;mCAUjB;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,MAAM,KAAK,UAAU,KAAK;AACzC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;oCAaC;MACnC;AACI;AACA,aAAK,cAAc,IAAI;AACvB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAIF,eAAO,KAAK,YAAY,aAAa;AAEjC,cAAI,CAAC,MAAM,KAAK;AACZ,mBAAO,CAAE,OAAO,MAAM,MAAM;;;AAGpC,eAAO,CAAE,OAAO,KAAK,YAAY,SAAS,MAAM;;;kCAG1B;MAC1B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO;;AAEX,6BAAqB,uBAAqC,KAAK;AAM/D,4BAAoB,KAAK,UAAU,KAAK;AACxC,8BAAsB,uBAAqC;AAC3D,aAAK,YAAY,QAAQ;AAGzB,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO;;;kCAYsB;MACjC;AACI;AACA,aAAK,mBAAmB;AAGxB,aAAK,WAAW;AAEhB,aAAK,WAAW;AAChB,aAAK,gBAAgB;;MAEzB;AACI,kCAA0B;AAC1B,eAAO,GAAG;;YAER;AACF,aAAK,WAAW,KAAK,cAAc,KAAK;AACxC,eAAO,KAAK;;YAEV;AAMF,cAAM;AACN,YAAI,KAAK,YAAY;AACjB,iCAAuB,MAAM,KAAK,cAAc;AAChD,cAAI,eAAe;AAEf,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAK,WAAW,eAAe;AAC/B,cAAI,KAAK,oBAAoB;AACzB,iBAAK,WAAW,KAAK,SAAS,aAAa,KAAK;;;AAGxD,2BAAmB,MAAM,KAAK,SAAS;AACvC,YAAI,WAAW;AACX,eAAK,WAAW;AAChB,iBAAO,KAAK,cAAc;;AAE9B,eAAO;;;AAGR;AACP,IAAC;AACG,uBAAgB,iBAAgB,UAAU,KAAK;AAC/C,uBAAgB,iBAAgB,cAAc,KAAK;AACnD,uBAAgB,iBAAgB,aAAa,KAAK;OACnD,mBAAoB,mBAAkB;8BA8Bf;MACtB,sCAAsC,gBAAgB;AAClD;AACA,aAAK,YAAY;AACjB,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,iBAAiB;;MAE1B;AACI,kCAA0B;AAC1B,eAAO,IAAI;;YAET;AAGF,cAAM;AAGN,2BAAmB;AACnB,4BAAoB;AACpB;AACI,cAAI,qBAAqB;AACrB,2BAAe,UAAU;AACzB,mBAAO;cACH,OAAO,OAAO,KAAK;AACf;AACA,oBAAI,EAAE;AACF;;AAEJ,uBAAO,EAAE;;cAEb,SAAS;;;AAIb,mBAAO,CAAE,OAAO,MAAM,SAAS;;;AAGvC,uBAAe,MAAM,mBAAmB,KAAK,WAAW;AACxD,YAAI,iBAAiB;AAEjB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,YAAI,gBAAgB;AAChB,kBAAQ,KAAK;iBACJ,gBAAgB;AACjB,oBAAM,IAAI,MAAM,qEACa,KAAK;iBACjC,gBAAgB;AACjB,qBAAO,CAAE,OAAO,MAAM,MAAM;iBAC3B,gBAAgB;;;;AAK7B,aAAK;AACL,eAAO,CAAE,OAAO,QAAQ,MAAM;;YAE5B;AACF,aAAK,iBAAiB,KAAK,UAAU,KAAK;AAC1C,eAAO,KAAK;;;mCAYkB;MAClC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,IAAI,WAAW;;MAEjC;AACI,eAAO,GAAG,KAAK,SAAS;;MAM5B;AACI,eAAO,CAAC,KAAK,OAAO;AAChB,oBAAU,KAAK,SAAS;AACxB,eAAK,OAAO,KAAK;;;MAGzB;AACI,aAAK;AAIL,eAAO,KAAK,OAAO;;;kCASU;MACjC;AACI,cAAM,UAAU;AAChB,aAAK,WAAW;AAChB,aAAK,aAAa;AAElB,aAAK,oBAAoB;AACzB,aAAK,SAAS,aAAgB,QAAQ,OAAc;AACpD,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK,MAAM,KAAK,WAAW;;MAEtC;AACI,eAAO,KAAK,UAAU,KAAK,OAAO;;YAEhC;AAEF,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,CAAC,KAAK,OAAO;AAChB,8BAAoB,KAAK;AACzB,yBAAe,MAAM,KAAK,OAAO,cAAc;AAC/C,cAAI,OAAO;AACP,iBAAK,oBAAoB;;AAGzB,iBAAK;AACL,mBAAO;;;AAGf,eAAO,CAAE,OAAO,MAAM,MAAM;;;AC1+BpC;;;;;;;;;;;;;;;;;;MAmDI;AACI,aAAK,OAAO;;MA8DhB,kCAAkC;AAC9B,sBAAa;AACb,gBAAe,YAAY,GAAG,MAAM;QACpC;AACA;AACA,YAAI,KAAK,SAAS,YAAY,KAAK,QAAQ;AAGvC,iBAAO,KAAK;mBAEP;AAGL,iBAAO,KAAK,KAAK,KAAK,OAAO;;AAK7B,iBAAO,KAAK,MAAM,KAAK,OAAO;;AAElC,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YACd,iBAAiB,WAAW,gBAAgB;WAClD;;MAiBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS,YAAY,QAAQ,SAAS;AAG3C,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,QAAQ,QAAQ;AAG1C,iBAAO,KAAK,OAAO,QAAQ;;AAK3B,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,YAAY,MAAM,QAAQ,aAAa;;MAkB5G;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS;AAEd,iBAAO;;AAKP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,OAAO,OAAK,KAAQ,MAAM,UAAU;WACpE;;YAkBD;AACF,eAAQ,OAAM,KAAK,YAAY,aAAa;;MAiBhD;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,IAAI,OAAK,KAAQ,MAAM,UAAU;WACjE,KAAK;;MAyBZ;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,SAAS;WACzC,KAAK;;MAWZ;AACI,YAAI,cAAc;AACd,gBAAM,IAAI,WAAW;;AAEzB,sBAAa;AACb,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,SAAS,aAAa,KAAK;;MAoBhG;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,SAAQ;AAI7B,iBAAO,KAAK,OAAO;mBAEd,WAAU;AAEf,iBAAO;mBAEF,KAAK,QAAQ,QAAS,YAAU,UAAa,SAAQ;AAG1D,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,mCAAyB,qBAAqB,YAAa,EAAE,OAAO,MAAM,MAAK,YAAY,MAAM;AACjG,iBAAO,yBAAyB,iBAAiB,KAAK;WACvD;;MAmBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,UAAS,KAAK,KAAK,QAAQ;AAIhD,iBAAO,KAAK,OAAO;mBAEd,KAAK,QAAQ,QACjB,MAAK,OAAO,UAAS,WAAU,UAAa,SAAQ;AAGrD,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;MAuBlF,mDAAmD;AAC/C,YAAI,cAAc,QAAQ,aAAa;AACnC,cAAI,KAAK,QAAQ;AACb,kBAAM,IAAI,WAAW;;AAGrB,kBAAM,IAAI,WAAW,mNAGkB,KAAK;;;AAGpD,sBAAa;AACb,uBAAe,aAAgB,QAAQ,OAAc;AACrD,eAAO,sBAAsB;AACzB,sBAAY,OAAO;AACnB,cAAI;AACA,qBAAS,OAAO;;AAEpB,iBAAQ,OAAM,MAAK,YAAY,QAAQ,YAAY,MAAM;WAC1D,KAAK;;MAmBZ;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,KAAK,OAAO;AAGjC,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,KAAK,QAAQ;AAGvC,iBAAO,KAAK;;AAIZ,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;YAkB5E;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;YAa7B;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;;AAIvC,YAAQ,kBAAkB;AAanB,sDAAkD;AACrD,aAAO,IAAI,cAAc;QACrB;AACI,gBAAM,GAAG;AACT,eAAK,OAAO;;cAMV;AACF,iBAAO;;;;AAsBZ;AACH,aAAO,sBAAsB,YAAY,kBAAkB,QAAQ,MAAM;;AA2CtE;AAEH,UAAI,CAAC,aAAW;AACZ,cAAM,IAAI,MAAM;;AAEpB;AACA,UAAI,MAAM,QAAQ;AACd,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAO,QAAQ,OAAO,SAAS,GAAG,OAC9B,KAAK,IAAI,MAAM,SAAS,GAAG;;iBAG9B,oBAAoB;AACzB,yBAAiB;AACb,iBAAO,QAAQ,OAAO,SAAS,IAAI,OAC/B,KAAK,IAAI,MAAM,SAAS,IAAI;;;AAGxC,aAAO,sBAAsB;AACzB,wBAAgB,MAAM,mBAAmB,UAAU;AAC/C,cAAI,aAAa;AACb,mBAAO,CAAE,OAAO,EAAE,YAAY,SAAS;qBAElC,aAAW;AAChB,mBAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,kBAAM,IAAI,MAAM;;;AAIxB,eAAO,mBAAmB,SAAS,gBAAgB;SACpD;;AAUP;AACI,UAAI,SAAS;AACT,eAAO;;AAGX,yBAAmB,KAAK;AACxB,UAAI,aAAa;AAEb,sBAAc,YAAY;AAC1B,eAAO,CAAE,OAAO,SAAS;;AAG7B,aAAO,CAAE,OAAO,MAAM,SAAS;;AAMnC;AACI,UAAI,OAAO,WAAW;AAElB,cAAM,IAAI,MAAM;;AAEpB,UAAI,OAAO,cAAc;AAErB,eAAO,MAAS;;AAIhB,eAAO,QAAU;;;AC/oBzB;;;;;;;;;;;;;;;;;kCAuBqC;MAMjC;AACI;AACA,aAAK,QAAQ;;YAEX;AACF,8BAAsB,MAAM,KAAK,MAAM;AACvC,6BAAqB,cAAc;AACnC,6BAAqB,aAAa,MAAM,MAAM,IAAI;AAE9C,cAAI,KAAK,SAAS;AACd,mBAAO,KAAK,MAAM,GAAG;;AAEzB,iBAAO;;AAEX,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;;AAoBA,uBAAmB;AACnB,sBAAkB,OAAO;AACzB,wBAAoB,OAAO;AAC3B,wBAAoB,OAAO;AAC3B,oCAAgC,OAAO;AACvC,wCAAoC,OAAO;6BAcX;MAiC5B;AACI;AACA,aAAK,QAAQ;AACb,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,gBAAgB;AACrB,aAAK,wBAAwB;AAC7B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,OAAO,IAAI,gBAAgB;AAChC,YAAI,CAAC;AACD,sBAAY;;AAEhB,aAAK,YAAY,UAAU,cAAc,QAAQ,QAAQ;AACzD,aAAK,kBAAkB,UAAU;AACjC,aAAK,gBAAgB,UAAU;AAC/B,aAAK,wBAAwB,UAAU;AACvC,YAAI,UAAU;AACV,kBAAY,UAAU,aAAa,MAAM,MAAM;AAC/C,eAAK,kBAAkB;AACvB,eAAK,YAAY;;AAGjB,eAAK,YAAY,UAAU,YAAY,UAAU,YAAY;;;YAa/D;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,eAAO,KAAK,wBAAwB,OAAO,KAAK,KAAK,iBACjD,KAAK;;YAUP;AACF,oCAA4B,MAAM,KAAK;AACvC,YAAI,CAAC,KAAK,mBAAmB,CAAC;AAE1B,gBAAM,IAAI,MAAM;mBAEX,KAAK,mBAAmB;AAE7B,kBAAY,oBAAoB,WAAW,KAAK,gBAAgB,QAAQ,MAAM,yCAC1E,KAAK,gBAAgB,OAAO,aAC5B,oEACW,oBAAoB,OAAO,aAAa;;AAE3D,YAAI,CAAC,KAAK;AACN,eAAK,kBAAkB;;AAG3B,uBAAe,KAAK,gBAAgB,OAAO;AACvC,mBAAS,QAAS,SAAS,QAAQ,KAAM;AACzC,iBAAO;WACR;AACH,+BAAuB,OAAO,KAAK,QAAQ,OAAO,UAAW,OAAO,QAAQ;AAC5E,gBAAY,eAAe,WAAW,GAAG,MAAM,mCAAmC,eAAe;AAEjG,YAAI,KAAK;AACL,4BAAkB,OAAO,KAAK,KAAK;AAC/B,2BAAc,KAAK,gBAAgB,QAAQ;AAC3C,gBAAI,WAAU;AACV,oBAAM,IAAI,MAAM,cAAc,MAC1B,yEACY,KAAK,gBAAgB,aAAa;;;;AAI9D,aAAK,uBAAuB;;YAE1B;AACF,YAAI,KAAK;AACL,uBAAa,MAAM,KAAK,KAAK;AAC7B,+BAAqB,MAAM,KAAK;AAChC,cAAI,aAAa;AACb,kBAAM,IAAI,MAAM;;AAEpB,4BAAkB,aAAa;AAC/B,0BAAgB,KAAK,SAAS,WAAW;AACzC,iBAAO;;AAGP,iBAAO;;;YAGT;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,oBAAY,MAAM,KAAK,KAAK;AAC5B,YAAI,KAAK;AAGL,kBAAQ,MAAM,KAAK;;AAEvB,eAAO,MAAM,IAAI,OAAK,KAAK,gBAAgB;;MAE/C;AACI,uBAAe,KAAK,SAAS;AAC7B,yBAAiB;AACjB,uBAAe;AACf,qBAAa,GAAG,IAAI,KAAK,gBAAgB,QAAQ;AAC7C,sBAAY,KAAK,gBAAgB;AACjC,0BAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;AAC9D,cAAI,KAAK,yBAAyB,CAAC;AAE/B;;AAGA,0BAAc,OAAO;AACrB,8BAAkB;AAClB,gBAAI,UAAU;AAGV,kBAAI,WAAU,QAAO,YAAY;AAC7B,8BAAc,QAAO;yBAEhB,WAAW,SAAO,YAAY,QAAO;AAC1C,sBAAM,IAAI,MAAM,mBAAmB,8BAA8B;;AAGjE,8BAAc;;;AAKlB,iCAAmB,OAAO;AAC1B,kBAAI,MAAM;AAGN,oBAAI,WAAU,QAAO,UAAU;AAC3B,gCAAc,KAAK,WAAW;;AAI9B,gCAAc;;yBAGb,CAAC,WAAU,CAAC,QAAO;AAGxB,8BAAc;;AAKd,wBAAQ,QAAO;uBACN;AACD,kCAAc;AACd;uBACC;AACD,kCAAc,KAAK,MAAM;AACzB;uBACC;AACD,kCAAc,KAAK,WAAW;AAC9B;;AAEA,kCAAc;;;;AAK9B,YAAC,WAAU,QAAO,UAAW,OAAO,OAAO,cACvC,SAAS,OAAO;;;AAK5B,YAAI,OAAO,KAAK,QAAQ,WAAW;AAC/B,iBAAO;;AAGP,iBAAO,CAAE,IAAI,UAAU,IAAI;;;MAGnC;AACI,YAAI,UAAU,OAAO,MAAM,kBAAkB;AACzC,iBAAO;;AAGP,iBAAO;;;MAIf,sCAAsC;AAClC,uBAAe;AACf,yBAAiB;AACjB,2BAAmB,KAAK;AACxB,2BAAmB;AAEnB,qBAAa,GAAG,IAAI,YAAY;AAC5B,kBAAQ;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,+BAAa,IAAI;AACjB,iCAAe;AACf;qBAEC,KAAK;AACN,+BAAa,IAAI;AAGjB,sBAAI,KAAK,cAAc,OAAO,KAAK;AAC/B;;AAEJ,yBAAO,KAAK;AACZ,iCAAe;AACf;;AAGA,iCAAe;AACf,+BAAa;AACb;;AAER;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY;AACvC,iCAAe;AACf,+BAAa,IAAI;AACjB;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY,IAAI;AAC3C,iCAAe;AACf,+BAAa,IAAI;AACjB;qBAEC;AACD,iCAAe;AACf;;AAGA,iCAAe;AACf;;AAER;iBACC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;;;;AAKZ,YAAI,iBAAiB;AACjB,iBAAO,KAAK,KAAK,UAAU,YAAY,aAAa;;AAGpD,iBAAO,KAAK,KAAK,UAAU;;AAG/B,YAAI,wBAAwB,OAAO,WAAW,KAAK,gBAAgB;AAC/D,gBAAM,IAAI,MAAM,wCAAwC,KAAK,gBAAgB,qCAAqC;;AAEtH,eAAO;;;ACpXf;;;;;;;;;;;;;;;;;qCAyBwC;MACpC;AACI;AACA,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,aAAK,UAAU,iBAAiB,WAAW;AAC3C,4BAAoB,KAAK,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU,KAAK,cAAc,KAAK,cAAc,MACrD,CAAC,OAAO,UAAU;AAClB,gBAAM,IAAI,MAAM,gFACmB,KAAK;;AAE5C,aAAK,YAAY,iBAAiB,2BAA2B;AAC7D,aAAK,eAAe,iBAAiB;AACrC,aAAK,uBACD,iBAAiB,wBAAwB,KAAK;AAClD,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,wBAAwB,iBAAiB,yBAAyB;AACvE,aAAK,qBACD,iBAAiB,uBAAuB,QAAQ,QAAQ;AAC5D,aAAK,kBACD,iBAAiB,oBAAoB,OAAO,OAAO;AACvD,YAAI,CAAC,KAAK,sBAAsB,CAAC,KAAK;AAClC,gBAAM,IAAI,MAAM;;;MAIxB;AACI,eAAO;;mBAGE,0BAA0B;AACnC,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,mCAA2B,IAAI,mBAAmB;AAElD,cAAM,mBAAmB;AACzB,eAAO;;YAGL;AACF;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO,KAAK,yBAAyB,OAAO,OACxC,KAAK;YACT,OAAO;;;AAIX,gBAAM,IAAI,MAAM,iDAAiD,EAAE;;AAEvE,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAEpB,+BAEA,OAAO,gBAAgB,OAAO;AAC9B,aAAK,eAAe,IAAI;AACxB,YAAI,CAAC,KAAK;AAGN,eAAK,eAAe,KAAK,aAAa;mBAEjC,KAAK,aAAa,eAAe,KAAK;AAC3C,gBAAM,IAAI,MAAM,wCACC,KAAK,yBACP,KAAK,aAAa;;AAErC,6BAAqB,KAAK,aAAa,wBAAwB,KAAK;AACpE,aAAK,WAAW,KAAK,aAAa;AAClC,aAAK,SAAS,UAAU,KAAK,UAAU;AACvC,aAAK,SAAS,wBAAwB,KAAK;AAC3C,qBAAa,QAAQ,KAAK;AAC1B,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC;;YAEE;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACA,+BAAuB,MAAM,KAAK;AAClC,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,8BAAoB,KAAK,4BAA4B,UAAU,CAAC,KAAK,WAAW,KAAK,sBAAsB;;AAE/G,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,2BAAiB,KAAK,4BAA4B,UAAU,CAAC,KAAK,YAAY,KAAK,SAAS;;AAEhG,eAAO;UACH,OAAO,CAAE,aAAe,mBAAmB,UAAY;UACvD,MAAM;;;YAKR;AACF,eAAQ,OAAM,KAAK,QAAQ;;YAEzB;AACF,8BAAsB;AACtB,8BAAsB;AACtB,4BAAoB;AACpB,eAAO,IAAI,QAAQ;AACf,6BAAmB,YAAY;AAC3B,gBAAI,KAAK;AACL,mBAAK,SAAS,sBAAsB,KAAK;AAEzC,kBAAI,KAAK,SAAS,OAAO;AACrB,wBAAQ,CAAE,eAAe;;AAE7B,4BAAc,KAAK,KAAK,SAAS,MAAM,GAAG,KAAK;;AAEnD,gBAAI,KAAK;AACL,mBAAK,SAAS,uBAAuB,KAAK;AAC1C,4BAAc,KAAK,KAAK,SAAS;;AAGrC,gBAAI,EAAE,kBAAkB,KAAK;AACzB,4BAAc;AACd,sBAAQ,CAAE,eAAe;;aAE9B,KAAK,UAAU,KAAK,eAAe;;;MAI9C;AACI,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,SAAS;AACd,eAAK,aAAa;AAClB,cAAI,KAAK,UAAU,QAAQ,KAAK,OAAO,YAAY,SAAS;AACxD,iBAAK,OAAO,YAAY,GAAG;;;;MAKvC;AACI,cAAM,IAAI,MAAM;;MAGpB;AACI,eAAO,KAAK;;MAEhB;AACI,0BAAkB,MAAM,GAAG;AAC3B,yBAAiB,IAAI,aAAa,MAAM,SAAS;AACjD,cAAM,QAAQ,cAAa,SAAS,IAAI,OAAM,IAAI;AAClD,eAAO;;MAEX;AACI,qBAAa,IAAI,aAAa,eAAmB;AAEjD,aAAK,IAAI,UAAU,KAAK,SAAS,SAAS;AAC1C,eAAO,QAAO,MAAM;;;ACvL5B;;;;;;;;;;;;;;;;;iCAuBoC;MAChC;AACI;AACA,aAAK,qBAAqB;AAC1B,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,SAAS;AACd,eAAK,WACD,CAAC,KAAK,aAAa,cAAc,KAAK,aAAa;AACvD,eAAK,aAAa,UAAS,CAAC,IAAI;AAChC,cAAI,KAAK,aAAa;AAElB,uCAA2B,KAAK,aAAa,cAAc,IAAM,KAAK,mBAAmB;AACzF,wCAA4B,KAAK,aAAa,eAAe,IACzD,KAAK,mBAAmB;AAC5B,mCAAwB,KAAI,sBAAsB;AAClD,oCAAyB,KAAI,uBAAuB;AACpD,iCAAqB,iBAAiB;AACtC,kCAAsB,sBAAsB;AAC5C,iBAAK,UAAU,SAAS,CAAC,iBAAiB,gBAAgB,eAAe,eAAe,CAAC,GAAG;;AAG5F,iBAAK,UAAU,SAAS,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG;;;;MAItD;AACI,eAAO;;mBAGE,0CAA0C;AACnD,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,YAAI,CAAC;AAGD,+BAAqB,SAAS,cAAc;AAC5C,cAAI,CAAC,aAAa,eAAe,CAAC,aAAa;AAC3C,kBAAM,IAAI,MAAM;;AAGpB,6BAAmB,QAAQ,aAAa;AACxC,6BAAmB,SAAS,aAAa;;AAE7C,+BAAuB,IAAI,eAAe,oBAAoB;AAE9D,cAAM,eAAe;AACrB,eAAO;;YAGL;AACF,YAAI,KAAK,aAAa;AAClB,kBAAa,KAAK,aAAa,eAAe,UACzC,KAAK,aAAa,eAAe,eAAgB,MAAM,+BAA+B,KAAK,aAAa;;AAGjH;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO;cACH,UAAU,KAAK,aAAa;cAC5B,YAAY,KAAK,aAAa,aAC1B,KAAK,aAAa,aAClB;cACJ,OAAO,KAAK,mBAAmB;cAC/B,QAAQ,KAAK,mBAAmB;;;;AAMxC,YAAE,UAAU,iDAAiD,EAAE;AAC/D,gBAAM;;AAEV,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAGpB;AACI,eAAK,mBAAmB,YAAY,KAAK;;AAGzC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM,OAAO,IAAI,gBAAgB,KAAK;;AAGlE,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,eAAO,IAAI,QAAQ;AAEf,eAAK,mBAAmB,mBAAmB;AACvC;;;;YAIN;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACI,gBAAM,WAAmB,KAAK;;AAG9B,gBAAM,IAAI,MAAM,4CAA4C,KAAK,UAAU;;AAE/E,YAAI,KAAK;AACL;AACI,mBAAO,CAAE,OAAO,KAAK,mBAAmB,MAAM,MAAM;;AAGpD,kBAAM,IAAI,MAAM,oCAAoC,EAAE;;AAGtD,gBAAI;;;AAIR,iBAAO,CAAE,OAAO,KAAK,MAAM;;;MAGnC;AAII,YAAI,KAAK,aAAa,eAAe,KAAK,aAAa,gBAClD,MAAK,mBAAmB,UAAU,KAAK,aAAa,eACjD,KAAK,mBAAmB,WAAW,KAAK,aAAa;AACzD,iBAAO;;AAEX,eAAO;;MAGX;AACI,eAAO,KAAK;AACR,gCAAsB,IAAI,UAAU,WAAW;AAC/C;AACA,yBAAe,OAAM,cAAc,eAAe,KAAK,SAAS,KAAK,YAAY,KAAK,UAAU;AAEhG,wBAAc,aAAa;AAC3B,iBAAO,aAAa,QAAQ,MAAM,MAAM;;;YAK1C;AACF,eAAQ,OAAM,KAAK,QAAQ;;MAG/B;AACI,uBAAe,KAAK,OAAO;AAC3B,eAAO,QAAQ,WAAS,MAAM;AAC9B;AACI,eAAK,mBAAmB,YAAY;;AAGpC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM;;AAElC,aAAK,WAAW;;MAGpB;AACI,cAAM,IAAI,MAAM;;;AC5LxB;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;iCAkBoC;MAmBhC;AACI,eAAO,IAAI,cAAc,MAAM;;;gCAYX;MACxB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,kBAAkB,UAAU;;MAEhD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;oCAGO;MAC5B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AAEjB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS,uBAAuB,KAAK;;YAElD;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC,YAAI,YAAY;AACZ,cAAI,KAAK,cAAc;AACnB,mBAAO;;AAIX,eAAK,YAAY,KAAK,KAAK;AAC3B,eAAK,YAAY;AACjB,iBAAO;;AAEX,sBAAc,YAAY,MAAM,MAAM,KAAK;AAI3C,cAAM,KAAK,KAAK,YAAY,MAAM;AAClC,2BAAmB,MAAM,MAAM,GAAG;AAC9B,eAAK,YAAY,KAAK;;AAE1B,aAAK,YAAY,MAAM,MAAM,SAAS;AACtC,eAAO;;;AC/Ff;;;;;;;;;;;;;;;;;oCAoBuC;MAUnC;AACI,eAAO,IAAI,aAAa;;;+BAYL;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,iBAAiB;;MAErC;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;mCAyBM;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,YAAI,OAAM,IAAI;AACV,eAAK,UAAU,IAAI,YAAY;;AAI/B,iBAAQ,iBAAkB;AAC1B,eAAK,UAAU,IAAI,cAAc;;;MAGzC;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC;AACA,YAAI,YAAY;AACZ,iBAAO;;AAGP,kBAAQ,YAAY;;AAExB;AACA,YAAI,OAAM,IAAI;AACV,iBAAO,KAAK,QAAQ,OAAO,OAAO,CAAE,QAAQ;;AAG5C,iBAAO,KAAK,QAAQ,MAAM,OAAO,KAAK,MAAM;;AAEhD,aAAK,YAAY,KAAK;AACtB,eAAO;;;AC/Gf;;;;;;;;;;;;;;;;;oCA2BuC;MACnC,4BAA4B;AACxB;AACA,aAAK,OAAO;AACZ,aAAK,UAAU;AACf,gBAAa,gBAAgB,cACxB,QAAM,IAAI,gBACN,gBAAgB,QAAQ,gBAAgB,OACzC,QAAQ,MAAM;AAEtB,aAAK,SAAS,QAAQ,UAAU;AAEhC,aAAK,YAAY,QAAQ,aAAa,OAAO;;MAEjD;AACI,eAAO,cAAc,KAAK;;YAExB;AACF,YAAI,KAAK,UAAY,MAAK,gBAAgB,aACtC,KAAK,KAAK,aACV,KAAK,KAAK;AACV,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,sBAAc,IAAI,QAAQ;AACtB,sBAAY,KAAK,SAAS,KAAK;AAC/B,cAAI,KAAK,gBAAgB;AAGrB,oBAAQ,IAAI,WAAW,KAAK,KAAK,MAAM,KAAK,QAAQ;;AAMpD,+BAAmB,IAAI;AACvB,uBAAW,SAAS;AAChB,0BAAW,WAAW;AAItB,kBAAI,iBAAgB;AAChB,wBAAO,IAAI,WAAW;;AAE1B,kBAAI,CAAE,kBAAgB;AAClB,uBAAO,OAAO,IAAI,UAAU;;AAEhC,sBAAQ;;AAEZ,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM;;AAE5B,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM,MAAM;;AAIlC,2BAAc,KAAK,KAAK,MAAM,KAAK,QAAQ;AAG3C,uBAAW,kBAAkB;;AAEjC,eAAK,SAAS;;AAElB,eAAO,CAAE,OAAQ,MAAM,OAAQ,MAAM;;;AC1F7C;;;;;;;;;;;;;;;;;AA0BO,mDAA+C;AAClD;AACA;AACA,UAAK,OAAO,QAAS;AACjB,oBAAY;;AAGZ,oBAAY,IAAI;AAChB,sBAAc,0BAA0B;;AAE5C,uBAAiB,MAAM,QAAW,WAAW;AAC7C,UAAI,SAAS;AACT,2BAAmB,IAAI,WAAW,MAAM,SAAS;AACjD,eAAO,IAAI,kBAAkB,YAAY;;AAGzC,cAAM,IAAI,MAAM,SAAS;;;AAIjC,sCAAkC;AAC9B,oBAAa;QACT,QAAQ,QAAQ;QAChB,SAAS,QAAQ;QACjB,MAAM,QAAQ;QACd,MAAM,QAAQ;QACd,aAAa,QAAQ;QACrB,OAAO,QAAQ;QACf,UAAU,QAAQ;QAClB,UAAU,QAAQ;QAClB,WAAW,QAAQ;;AAEvB,aAAO;;AC1DX;;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,OAAO,WAAW,YAAa,OAAO,OAAO,GAAG,OAAO;;ACrBnE;;;;;;;;;;;;;;;;;iCAyBoC;MAShC,8BAA6B;AACzB;AACA,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,YAAY,KAAK,UAAU,OAAM,IAAI;AAErC,qBAAW;AACX,eAAK,QAAQ,GAAG,aAAa,KAAK,MAAM,OAAO;;AAInD,eAAO,IAAI,kBAAkB,KAAK,OAAO,KAAK;;;AC/CtD;;;;;;;;;;;;;;;;;gCAwBmC;MAQ/B,+BAA+B;AAC3B;AACA,aAAK,MAAM;AACX,aAAK,cAAc;;YAMjB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAQ,IAAI,eAAe,KAAK,KAAK,KAAK,aACrC;;AAGL,iBAAO,iBAAiB,KAAK,KAAK,KAAK;;;;AC/CnD;;;;;;;;;;;;;;;;;AAqGO,qCAAiC;AACpC,aAAO,IAAI,WAAW,IAAI,cAAc,SAAS;;AA0B9C;AACH,mBAAa,qBAAqB;AAClC,aAAO,sBAAsB,YAAY;;AA8DtC;AACH,aAAO,sBAAsB;AACzB,oBAAY,MAAM;AAClB,eAAO,qBAAqB,MAAM,IAAI;;;AAiCvC;AACH,aAAO,eAAe,OAAO,oBAAoB;;AAoC9C;AACH,aAAO,mBAAmB,OAAO;;AC1QrC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;ACvB1D;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAChC,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;iCAGkB;MAChC;AACI;AACA,aAAK,YAAY;AACjB,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,aAAY,MAAM;;MAEtC;AACI,YAAI,KAAK;AACL,eAAK,WAAW;AAChB,cAAI,OAAM,IAAI;AACV,kBAAkB;;;AAY1B,uBAAe;AACf,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;AACjD,eAAO;;MAQX;AACI;AACA,YAAI,UAAU,YAAY,UAAU,QAAQ,OAAO,SAAS,KACxD,UAAc,OAAO;AACrB,gCAAsB,OAAO,IAAI,OAAK,cAAkB;AACxD,kBAAQ,KAAK,MAAM,eAAe,OAAO;;AAGzC,kBAAQ,KAAK,MAAM,QAAQ,OAAO;;AAEtC,eAAO,CAAE,QAAQ,OAAO,OAAO;;MAGnC;AACI,2BAAmB,KAAK,KAAK,IAAI;AACjC,mBAAW;;MAGf;AACI,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;;;MAGnB;AACI,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;;MAErD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,SAAS;;MAEzB;AACI,eAAQ,OAAO,sBAAuB,KAAK,KAAK,IAAI;AACpD,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,iBAAO,wBAAoC,YAAY;;AAE3D,eAAO,KAAK,KAAK,IAAI,QAAQ;;MAEjC;AACI,sBAAa,KAAK,SAAS,EAAE;AAC7B,0BAAkB;AAClB,YAAI,EAAE,UAAU;AACZ;AAEI,0BAAc,MAAK,IAAI,OAAK,cAAkB;;AAG9C,kBAAM,IAAI,MAAM;;;AAGxB,eAAO,QAAU,EAAE,OAAO,EAAE,OAAO;;MAEvC;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,YAAI,KAAK,KAAK,IAAI;AACd,iBAAQ,sBAAuB,KAAK,KAAK,IAAI;AAC7C,cAAI,sBAAsB;AACtB,iBAAK,YAAY,mBAAmB,KAAK;AACzC,iBAAK,YAAY,mBAAmB,KAAK;;AAE7C,eAAK,KAAK,OAAO;;;MAGzB;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;AACX,cAAI,WAAW,WAAW;AACtB,iBAAK,YAAY;;;;YAIvB;AACF,sBAAc;AACd;AACA,yBAAiB,SAAa;AAC9B,eAAO,CAAE;;MAEb;AACI,eAAO;UAEH,YAAY;UACZ,SAAS,CAAC;;;MAIlB;AACI,yBAAiB,GAAG;AACpB,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAU,IAAI;;AAEzB,yBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,sBAAY,SAAO,WAAW;AAC9B,yBAAe,IAAI,MAAM,IAAI;AAC7B,uBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,mBAAO,KAAK,IAAI,KAAK,QAAQ,KAAK,MAAM;;AAE5C,mBAAO,IAAI,KAAK,IAAI,GAAG,SAAS,GAAG;;AAEvC,eAAO,SAAO;;MAElB;AACI,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAe,QAAU,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE;AAC7C,qBAAa,SAAO;AACpB,qBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,eAAK,IAAI,EAAE,OAAO,KAAK,MAAM;;AAEjC,eAAO,SAAO;;MAElB;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,OAAS,GAAG,OAAO,MAAM,QAAQ;;AAE9C,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,yBAAe,QAAU,EAAE,OAAO,EAAE;AACpC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,yBAAe,SAAO,WAAW;AACjC,wBAAc,OAAO;AACrB,eAAK,QAAQ,QAAM,MAAM,MAAM,EAAE,MAAM,MAAM,IAAI,MAAM;AACvD,mBAAO,IAAI,KAAK,IAAI,GAAG,QAAQ,GAAG;;AAEtC,eAAO,SAAO;;MAElB;AACI,yBAAiB,GAAG;AAEpB,eAAO,KAAO,QAAU,KAAK;;MAEjC;AACI,yBAAiB,SAAS;AAC1B,qBAAa,QAAQ,IAAI,OAAK,KAAK,SAAS,EAAE;AAC9C,uBAAe,QAAU,QAAQ,GAAG,OAAO,QAAQ,GAAG;AACtD,2BAAmB,OAAO;AAC1B,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,2BAAiB,KAAK;AACtB,uBAAa,GAAG,IAAI,WAAW,QAAQ;AACnC,uBAAW,MAAM,SAAS;;;AAGlC,eAAO,OAAO;;MAElB;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAExE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,IAAO;AACjB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAO,GAAG;;MAErB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,IAAI,QAAQ;;MAExF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAW,YAAU,KAAK,MAAM,KAAI;AACpC,4BAAoB;AACpB,eAAO,KAAK,oBAAoB,GAAG,GAAG,aAAa;;MAEvD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU;AACV,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,oBAAO,MAAM,SAAS;;AAE1B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,sBAAW;AACX,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,qBAAQ,MAAM,SAAS;;AAE3B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oBAAY;AAGZ,yBAAiB,EAAE,OAAO,WAAW;AACrC,qBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,uBAAa,WAAW,WAAW,IAAI;;AAE3C,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAkB,QAAU,GAAG;AAC/B,uBAAa,MAAS,WAAW,YAAY,OAAO;AACpD,uBAAY,KAAK,IAAI,GAAG,IAAI;AAC5B,cAAI,KAAK;;AAEb,eAAO,MAAS;;MAEpB;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,oDAAoD,EAAE,OAAO,kBACzD;;AAExB,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,EAAE,OAAO;AACjC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAiB,EAAE,MAAM,EAAE,OAAO;AAClC,8BAAsB,WAClB,UAAU,IAAI,WAAW,IAAI,IAC7B,UAAU,IAAI;AAClB,qBAAa,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,uBAAa,GAAG,IAAI,UAAU;AAC1B,wBAAY,cAAc,GAAG;AAC7B,gBAAI,MAAM;AACN,mBAAK,OAAO,YAAY,IAAI,MAAM;;AAGlC,8BAAgB,cAAc,GAAG,IAAI;AACrC,mBAAK,OAAO,YAAY,MAAM,WAAW,KAAK,WAC1C,MAAM,OAAO,KAAK;;;;AAIlC,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,WAAW,GAAG,IAAI;AACpC,uBAAe,KAAK,SAAS,UAAU;AACvC,wBAAgB,KAAK,SAAS,EAAE;AAChC,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,OAAS,EAAE,OAAO,YAAW,EAAE,OAAO,EAAE;AACvD,0BAAkB,KAAK,SAAS,OAAO;AACvC,qBAAY;AACZ,uBAAe,UAAU,SAAS,KAAK,UAAU,OAAO,KAAK,EAAE,SAAS,IACpE,IACA,eAAmB,EAAE,MAAM,MAAM;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAa,GAAG,IAAI,QAAQ;AACxB,gBAAI,OAAO,OAAO;AACd,wBAAU,YAAW,QAAQ;;AAG7B,wBAAU,YAAW,QAAQ;;;;AAIzC,eAAO;;MAEX;AACI,yBAAiB,CAAC,YAAY;AAC9B,yBAAiB,KAAK,SAAS,UAAU;AACzC,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,yBAAiB,GAAG;AACpB,sBAAc,KAAK,SAAS,EAAE;AAC9B,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;;;AAGd,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,sBAAY,OAAO;AACnB,cAAK,OAAO,KAAK,OAAO,KAAO,QAAQ,KAAK,QAAQ;AAChD,mBAAO;;AAGP,mBAAQ,OAAM,QAAQ;;;;MAIlC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,mBAAM,QAAO;;AAEjB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,uBAAa,MAAM;AACnB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,qBAAS,UAAU;;AAEvB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,uBAAa,OAAO;AACpB,iBAAO,OAAO;;;MAGtB;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,6BAAqB,IAAI,aAAa,EAAE;AACxC,uBAAe,KAAK,SAAS,EAAE;AAC/B,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO;AACjB,cAAI,KAAK;AACL,yBAAa,KAAK,SAAS;;AAG3B,yBAAa,KAAK,SAAS,KAAM,KAAI;;;AAG7C,eAAO,KAAK,WAAW,cAAc,EAAE,OAAO;;MAElD;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,MAAM,QAAQ;;MAE1F;AACI,yBAAiB,GAAG;AACpB,eAAO,OAAK,KAAK,WAAW,IAAI;;MAEpC;AACI,yBAAiB,CAAC,GAAG,UAAU;AAC/B,yBAAiB,EAAE,MAAM;AACzB,8BAAsB,KAAK,SAAS,QAAQ;AAC5C,iBAAS,QAAQ,cAAc;AAC/B,uBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AAC/B,yBAAe,OAAO,WAAW;AACjC,8BAAoB,OAAO;AAC3B,sBAAY,QAAQ,cAAc,OAAO;AACzC,gCAAsB,KAAK,WAAW;AACtC,iBAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,eAAO,OAAO;;MAElB;AACI,yBAAiB,CAAC,IAAI;AACtB,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAa,EAAE,QAAQ,WAAW,UACpC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,yBAAiB,GAAG;AACpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,QAAU,SAAS,UAAU,EAAE;AAC9C,2BAAmB,OAAO;AAC1B,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAChE,SAAS,SAAS,KAAK,SAAS,SAAS;AAC7C,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,iCAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,iCAAyB,SAAS,SAAS;AAC3C,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,oCAA0B,QAAQ;AAClC,mCAAyB,QAAQ,EAAE,QAAQ;AAC3C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,wCAA0B,oBAAoB,SAAS;AACvD,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,wCAAwB,oBAAoB,OAAO;AACnD,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,0CAAwB,kBAAkB,OAAO;AACjD,oCAAkB;AAClB,iCAAe;AACf,+BAAY;AACZ,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,yCAAqB,mBAAmB,SAAS,EAAE,QAAQ;AAC3D,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,yCAAmB,eAAe,OAAO,EAAE,QAAQ;AACnD,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,2CAAmB,aAAa,OAAO,EAAE,QAAQ;AACjD,sCAAc,QAAQ,aAAa;AACnC,4BAAK,aAAa,SAAS,QAAQ;AAC/B,wCAAc;mCAET,aAAa;AAClB,sCAAY;AACZ;;AAEJ,4BAAI,MAAM;AACN;;;AAGR,0BAAI,MAAM;AACN;;;AAGR,wBAAI,MAAM;AACN;;;AAGR,uCAAqB,kBAAkB;AACvC,6BAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;;AAMhE,eAAO,OAAO;;MAElB;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,8BAAsB,IAAK,eAAc,eAAe;AACxD,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW;;;;AAIvB,qBAAG,IAAI,UAAU,eAAe,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlF,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,6BAAqB,QAAU,SAAS,UAAU;AAClD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,qBAAa,KAAK,WAAW;AAC7B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,iCAAe,OAAO;AACtB,oCAAkB;AAClB,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,mCAAe,SAAS;AACxB,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,mCAAa,OAAO;AACpB,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,qCAAa,OAAO;AACpB,sCAAc,KAAK,IAAI,OAAO,QAAQ,MAAM,MAAM;AAClD,4BAAI,SAAS;AACT,qCAAW;AACX,wCAAc,SAAS,wBACnB,uBACA,OAAO,wBAAwB;;;;;AAKnD,+BAAa,IAAI,aAAa,OAAO,QAAQ,MAAM,MAAM;;;;;;AAM7E,eAAO,aAAa;;MAExB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,6BAAqB,KAAK,mBAAmB,GAAG;AAChD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,0BAAkB,KAAK,WAAW;AAClC,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,uCAAe,uBACX,wBAAwB,uBACxB,IACA,UAAU,IAAI,OAAO,SAAS,OAAO,OAAO;AAChD,uCAAe,SAAS,wBAAwB,uBAC5C,OAAO,uBAAuB;AAClC,qCAAa,WAAW,SAAS,IAAI;AACrC,4BAAI,SAAS;AACT;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW,QAAQ;;;;AAI/B,qBAAG,IAAI,SAAS,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlE,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,eAAmB,CAAC,OAAO,WAAW,UAAU;AAChF,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,wBAAgB;AAChB,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,qBAAa,GAAG,IAAI,OAAO;AACvB,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,mCAAuB,KAAK,MAAM;AAClC,4BAAgB,gBAAgB;AAChC,kCAAsB,KAAK,IAAI,YAAY,GAAG,KAAK,KAAK;AACxD,iCAAqB,IAAI,EAAE,QAAQ,KAAK,iBAAiB,EAAE,QAAQ;AACnE,iCAAqB,IAAI,EAAE,QAAQ,KAAK,gBAAgB,EAAE,QAAQ;AAClE,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,qCAAuB,KAAK,MAAM;AAClC,8BAAgB,gBAAgB;AAChC,oCAAsB,KAAK,IAAI,WAAW,GAAG,KAAK,KAAK;AACvD,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,2BAAa,GAAG,IAAI,aAAa;AAG7B,gCAAgB,QAAQ,gBAAgB;AACxC,mCAAmB,QAAQ,gBAAgB;AAC3C,iCAAiB,QAAQ,iBAAiB;AAC1C,oCAAoB,QAAQ,iBAAiB;AAC7C,4BAAY,UAAW,YAAW,WAAW;AAC7C,+BAAe,aAAc,eAAc,cAAc;AACzD,iCAAiB,MAAO,UAAS,OAAO;AACxC,uBAAO,eAAe;;;;;AAKtC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU;;MAE1D;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAK3D,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AAItD,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa;AACb,qBAAa,GAAG,IAAI,OAAO;AACvB,0BAAgB,IAAI,EAAE,QAAQ;AAC9B,uBAAa,GAAG,IAAI,SAAS;AACzB,wBAAY,IAAI;AAChB,gCAAoB,KAAK,MAAM;AAC/B,mCAAuB,KAAK,IAAI,KAAK,KAAK,MAAM,UAAU;AAC1D,iCAAqB,UAAU,cAAc,EAAE,QAAQ;AACvD,oCAAwB,UAAU,iBAAiB,EAAE,QAAQ;AAC7D,4BAAgB,MAAM;AACtB,mCAAuB,IAAM;AAC7B,yBAAa,GAAG,IAAI,QAAQ;AACxB,0BAAY,IAAI;AAChB,mCAAqB,KAAK,MAAM;AAChC,oCAAsB,KAAK,IAAI,KAAK,KAAK,MAAM,SAAS;AACxD,8BAAgB,MAAM;AACtB,qCAAuB,IAAM;AAC7B,sCAAwB,eAAe,eAAe,EAAE,QAAQ;AAChE,uCAAyB,eAAe,gBAAgB,EAAE,QAAQ;AAClE,yCAA2B,kBAAkB,eAAe,EAAE,QAAQ;AACtE,0CAA4B,kBAAkB,gBAAgB,EAAE,QAAQ;AACxE,wDAA0C,iBAAiB;AAC3D,iDAAmC,iBAAiB;AACpD,iDAAmC,UAAU;AAC7C,0CAA4B,UAAU;AACtC,2BAAa,GAAG,IAAI,OAAO;AACvB,8BAAc,SAAS;AACvB,uBAAO,kBAAkB,MACrB,QAAQ;AACZ,uBAAO,mBAAmB,MAAM,QAAQ;AACxC,uBAAO,qBAAqB,MACxB,QAAQ;AACZ,uBAAO,sBAAsB,MAAM,QAAQ;;;;;AAK3D,eAAO,SAAY,QAAQ,CAAC,OAAO,QAAQ,SAAS,QAAQ,EAAE;;MAElE;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,QAAQ,YAAY,WAAW;AAC/D,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,2BAAmB;AACnB,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,qCAAyB,KAAK,IAAI,YAAY,GAAG,eAAe,KAAK,MAAM,iBACvE,KAAK,MAAM;AACf,8BAAkB,cAAc,mBAAmB,EAAE,QAAQ;AAC7D,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,uCAAyB,KAAK,IAAI,WAAW,GAAG,eAAe,KAAK,MAAM,iBACtE,KAAK,MAAM;AACf,gCAAkB,YAAY,mBAAmB,EAAE,QAAQ;AAC3D,2BAAa,GAAG,IAAI,aAAa;AAG7B,+BAAe,QAAQ,YAAY;AACnC,uBAAO,kBAAkB;;;;;AAKzC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU,cAAc,EAAE;;MAE1E;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAC3D,yBAAiB,KAAK,SAAS,GAAG;AAGlC,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAElD,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,SAAS;AACzB,8BAAkB,cAAc,IAAI,EAAE,QAAQ;AAE9C,+BAAmB,KAAK,MAAM,IAAI;AAClC,6BAAiB,KAAK,MAAM,aAAc,YAAY;AACtD,yBAAa,GAAG,IAAI,QAAQ;AACxB,gCAAkB,YAAY,IAAI,EAAE,QAAQ;AAE5C,iCAAmB,KAAK,MAAM,IAAI;AAClC,+BAAiB,KAAK,MAAM,aAAc,WAAW;AACrD,2BAAa,GAAG,IAAI,OAAO;AACvB,4BAAY;AAEZ,oCAAoB,GAAG,WAAW,WAAW;AACzC,8BAAY,WAAW;AAEvB,sBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,oCAAkB,cAAc,MAAM,GAAG,QAAQ;AACjD,wCAAsB,MAAM;AAC5B,2CAAyB,KAAK,IAAI,UAAU,GAAG,eAAe,KAAK,MAAM,iBACrE,KAAK,MAAM;AACf,sBAAI,MAAM;AACN;;AAEJ,sCAAoB,GAAG,WAAW,UAAU;AACxC,gCAAY,WAAW;AAEvB,wBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,sCAAkB,YAAY,MAAM,GAAG,QAAQ;AAC/C,0CAAsB,MAAM;AAC5B,6CAAyB,KAAK,IAAI,SAAS,GAAG,eAAe,KAAK,MAAM,iBACpE,KAAK,MAAM;AACf,wBAAI,MAAM;AACN,+BAAS,SAAS,YAAY;;;;AAI1C,uBAAO,YAAY,KAAK;;;;;AAKxC,eAAO,SAAY,QAAQ,EAAE,OAAO,EAAE;;MAE1C;AACI,yBAAiB,GAAG;AACpB,yBAAiB,EAAE,MAAM;AACzB,qBAAa,WAAW;AACxB,wBAAgB,KAAK,SAAS,EAAE;AAChC,qBAAa,EAAE;AACf,uBAAe,IAAI,aAAa;AAChC;AACI,iCAAuB,SAAS;AAChC,+BAAqB,SAAS,iBAAiB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,+BAAqB,SAAS,iBAC1B,KAAK,IAAI,iBAAiB,aAAa;AAC3C,qBAAU;AACV,iBAAO,kBAAkB,cAAc;AACnC,sBAAU,QAAQ;AAClB,oBAAO,IAAI;;AAEf,iBAAO;;AAEX,0BAAkB,GAAG,SAAS,MAAM;AAChC,uBAAY,kBAAkB;AAC9B,sBAAY,QAAQ,UAAU,KAAK,IAAI,OAAO,QAAQ,MAAK,CAAC;AAC5D,iBAAO,UAAU;;AAErB,eAAO,SAAY,QAAQ,EAAE;;MAEjC;AACI,yBAAiB,IAAI;AACrB,yBAAiB,GAAG,MAAM;AAC1B,yBAAiB,KAAK,SAAS,GAAG;AAClC,iCAAyB,KAAK,SAAS,WAAW;AAClD,kCAA0B,KAAK,SAAS,YAAY;AACpD,uBAAe,IAAI,aAAa,GAAG;AACnC,qBAAa,GAAG;AAChB,0BAAkB,GAAG,SAAS,MAAM;AAChC,iCAAuB,SAAS;AAChC,6BAAoB,SAAS,iBAAkB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,2BAAkB,SAAS,iBACvB,KAAK,IAAI,UAAU,iBAAiB,cAAc;AACtD,sBAAW;AACX,uBAAa,YAAY,IAAI,UAAU;AACnC,qBAAQ,KAAK,IAAI,iBAAiB,IAAI;;AAE1C,kBAAO,QAAQ,QAAO;AACtB,uBAAa,YAAY,IAAI,UAAU;AACnC,sBAAU,KAAK,QAAQ,OAAO,iBAAiB,KAC3C,kBAAkB,UAAU;AAChC,gBAAI,WAAW;AACX,qBAAO,KAAK,IAAI,OAAM,CAAC;;AAE3B,mBAAO,SAAS;AAChB,mBAAO,MAAM;;;AAGrB,eAAO,SAAY,QAAQ,GAAG;;MAElC;AACI,yBAAiB,QAAQ;AACzB,8BAAsB,aAAa,SAAS,SAAW;AACvD,0BAAkB,cAAc,MAAM;AACtC,0BAAkB,cAAc,MAAM;AACtC,oBAAY,OAAS,CAAC,WAAW,aAAa;AAC9C,wBAAgB,KAAK,SAAS,IAAI;AAClC,yBAAiB,KAAK,SAAS,cAAc;AAC7C,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,yBAAe,IAAI;AAGnB,sBAAY,IAAI,aAAa,YAAY;AACzC,cAAI,KAAK,SAAS;AAClB,2BAAiB,GAAG,QAAQ,IAAI,QAAQ,EAAE;AACtC,gBAAI,SAAS,IAAI,QAAQ,KAAK,SAAS,SAAS;;AAEpD,yBAAe,aAAgB,KAAK;AACpC,4BAAkB,IAAI;AACtB,8BAAoB,GAAG,WAAW,YAAY,EAAE;AAC5C,sBAAU;AAEV,oBAAQ,YAAY,YAAY,IAAI;AACpC,6BAAiB,GAAG,QAAQ,IAAI,QAAQ;AACpC,kBAAI,IAAI,IAAI;AACR,wBAAQ,YAAY,YAAY;AAChC;;;;;AAKhB,eAAO;;MAEX;AACI,yBAAiB,SAAS;AAC1B,oBAAY,IAAI,aAAa,QAAQ,OAAO;AAC5C,YAAI,KAAK;AACT,2BAAmB,KAAK,SAAS,QAAQ;AACzC,yBAAiB,GAAG,QAAQ,QAAQ,MAAM,EAAE;AACxC,cAAI,WAAW,UAAU,KAAK,WAAW,SAAS;AAC9C,gBAAI,QAAQ,QAAQ,WAAW,UAAU;;;AAGjD,eAAO,SAAY,KAAK,CAAC,QAAQ,MAAM,QAAQ;;MAEnD;AACI,yBAAiB,OAAO;AACxB,0BAAkB,KAAK,SAAS,MAAM;AACtC,2BAAmB,KAAK,SAAS,OAAO;AACxC,eAAO,0BAAwB,WAAW,YAAY,eAAe,cAAc;;MAEvF;AACI,gBAAY,eAAe,QAAQ,MAAM,+DAA+D;AACxG,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAoB,EAAE,MAAM;AAC5B,2BAAmB,EAAE,MAAM;AAC3B,2BAAmB,EAAE,MAAM;AAC3B,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,YAAY,eAAe,cAAc;AACzE,wBAAgB;AAChB,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,uBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,wBAAY,KAAK,MAAM,IAAI;AAC3B,4BAAiB,IAAI;AACrB,yBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,0BAAY,KAAK,MAAM,IAAI;AAC3B,8BAAiB,IAAI;AACrB,8BAAiB,WAAU,YAAY,WAAW;AAClD,2BAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAY,IAAI;AAChB,iCAAiB,MAAM,aAAc,OAAM,aAAc,OAAM,cAAc;AAC7E,uBAAO,eAAe,QAAQ;;;;;AAK9C,eAAO,SAAY,QAAQ,CAAC,WAAW,cAAc,aAAa;;MAEtE;AACI,yBAAiB,4BAAwC,EAAE,OAAO,EAAE;AACpE,uBAAe,QAAU,UAAU;AACnC,sBAAc,KAAK,SAAS,EAAE;AAC9B,sBAAc,KAAK,SAAS,EAAE;AAC9B,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,wBAAgB,OAAO;AACvB,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,oBAAQ,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI7D,uBAAa,KAAK,WAAW;AAC7B,uBAAa,KAAK,WAAW;AAC7B,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAY,OAAO,WAAW;AAC9B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,oBAAQ,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG7C,eAAO,OAAO;;MAElB;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;;MACA;AACI,eAAO;;MAGX;AACI,eAAO,MAAM;;MAEjB;AACI,8DAAsD,OAAO;AAC7D,yBAAiB,MAAM,MAAM;AAC7B,wCAAgC;AAChC,uBAAe,QAAU,CAAC,UAAU,YAAY,WAAW,cAAc;AACzE,wBAAgB,KAAK,SAAS,MAAM;AACpC,2BAAmB,KAAK,SAAS,SAAS;AAC1C,0BAAkB,KAAK,SAAS,OAAO;AACvC,yBAAiB,OAAO;AACxB,0BAAkB,OAAO;AAIzB,qBAAa,GAAG,IAAI,UAAU;AAC1B,2BAAiB,IAAI;AACrB,qBAAW,QAAQ;AACnB,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,uBAAa,WAAW;AACxB,cAAI,QAAQ;AACR;;AAEJ,8BAAqB,aAAa,IAC7B,MAAK,MAAO,eAAc,KAAM,cAAa,KAC9C;AACJ,6BAAoB,YAAY,IAAM,MAAK,MAAO,cAAa,KAAM,aAAY,KAAK;AACtF,uBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc,aAAa,IACvB,KAAM,eAAc,KAAK,IAAK,cAC9B,MAAO,MAAK,MAAO,eAAc;AACrC,gBAAI,OAAO,KAAK,OAAO,cAAc;AACjC,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAa,GAAG,IAAI,aAAa;AAC7B,8BAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,yBAAO,OAAO,OAAO;;;AAG7B;;AAEJ,gBAAI,WAAW;AACX,6BAAe,KAAK,MAAM;AAC1B,gCAAkB,KAAK,KAAK;AAC5B,4BAAc,OAAO;AACrB,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,gCAAgB,KAAK,MAAM;AAC3B,iCAAiB,KAAK,KAAK;AAC3B,8BAAc,OAAO;AACrB,6BAAa,GAAG,IAAI,aAAa;AAC7B,4BAAU,IAAI,UAAU,SAAS,KAAK,SAAS,SAAS,KACpD,OAAO,SAAS;AACpB,kCAAgB,UAAU;AAC1B,wBAAM,IAAI,WAAW,SAAS,KAAK,SAAS,SAAS,KACjD,OAAO,SAAS;AACpB,mCAAiB,UAAU;AAC3B,wBAAM,IAAI,UAAU,SAAS,KAAK,YAAY,SAAS,KACnD,OAAO,SAAS;AACpB,qCAAmB,UAAU;AAC7B,wBAAM,IAAI,WAAW,SAAS,KAAK,YAAY,SAAS,KACpD,OAAO,SAAS;AACpB,sCAAoB,UAAU;AAC9B,8BAAY,UAAW,YAAW,WAAW;AAC7C,iCAAe,aAAc,eAAc,cAAc;AACzD,wBAAM,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AAC9D,yBAAO,OAAO,OAAO,MAAQ,UAAS,OAAO;;;;AAKrD,2BAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,iCAAiB,KAAK,MAAM;AAC5B,iCAAiB,KAAK,MAAM;AAC5B,6BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAc,IAAI,WAAW,SAAS,KAClC,WAAW,SAAS,KAAK,OAAO,SAAS;AAC7C,iCAAe,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACvE,yBAAO,OAAO,UAAU,UAAU;;;;;;AAMtD,eAAO,OAAO;;MAElB;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,cAAc,eAAe;AAC5H,+BAAuB;AACvB,eAAO,KAAK,QAAQ,eAAe,cAAc,aAAa,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEvI;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,YAAI,cAAc;AACd,iBAAO,QAAU,IAAI,aAAa,EAAE;;AAExC,wBAAe,IAAI,cAAa,CAAC,WAAW,YAAY,EAAE;AAC1D,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,WAAW;AAC3B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,4BAAgB,MAAM,QAAQ;AAC9B,mBAAM,KAAK;;AAEf,cAAI,eAAe,KAAK,gBAAgB,EAAE,OAAO;AAC7C,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B,EAAE;;AAEvE,uBAAa,GAAG,IAAI,WAAW;AAC3B,oBAAO,OAAO,IAAI,YAAY,KAAK,MAAM,eAAe,YAAY;;;AAG5E,eAAO,QAAO,WAAW,QAAQ;;MAErC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,QAAU;AAC/B,+BAAuB;AACvB,eAAO,KAAK,QAAQ,SAAS,SAAS,OAAO,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEtH;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,MAAQ,EAAE,OAAO,GAAG,EAAE;;;MAGrC;AACI,uBAAe,mBAAuB,EAAE,OAAO,eAAmB,EAAE;AACpE,eAAO,KAAK,WAAW,QAAQ,EAAE,OAAO,EAAE;;MAE9C;AACI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,6BAAqB,CAAC,aAAa,WAAW;AAC9C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,YAAI,eAAe;AACf,iBAAO,QAAU,IAAI,OAAO,QAAQ;;AAExC,wBAAe,IAAI,cAAa,cAAc,QAAQ;AACtD,gBAAO,OAAO,KAAK,KAAK,SAAS,aAAa,QAAQ;AACtD,qBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,mBAAM,KAAK;AACX,4BAAgB,MAAM,QAAQ;;AAElC,cAAI,eAAe,KAAK,gBAAgB,aAAa;AACjD,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B;;AAErE,uBAAa,GAAG,IAAI,WAAW;AAC3B,gBAAI;AACA,sBAAO,OAAO,eAAe,YAAY,MACrC,YAAY,IAAI,YAAY;;AAGhC,sBAAO,OAAO,eAAe,YAAY,KAAK,QAAQ,SAAS,IAC3D,YAAY,KACZ,YAAY,IAAI,YAAY;;;;AAI5C,eAAO,QAAO,WAAW,QAAQ;;;ACp9CzC;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,IAAI,aAAa,KAAK;AAC3C,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,qBAAa,KAAK,KAAK,IAAI,KAAK;;AAEpC,aAAO;;AAEJ,kBAAY;AACf,aAAQ,KAAM,KAAK;AACnB,yBAAmB,KAAK;AACxB,yBAAmB,IAAI,aAAa,eAAmB,EAAE;AACzD,UAAI,EAAE,UAAU;AACZ,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,cAAc;;AAG7B,4BAAoB,WAAW,KAAK,IAAI,EAAE;AAC1C,sBAAa,YAAY,mBAAmB;AAC5C,sBAAa,YAAY,mBAAmB;AAC5C,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,wBAAa,SAAS;AACtB,wBAAa,SAAS;AACtB,uBAAa,KAAK,KAAK,MAAM,OAAM;;;AAG3C,aAAO,WAAW,WAAW,cAAc,EAAE,OAAO;;AAEjD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,yBAAiB,4BAAwC,QAAQ;AACjE,2BAAmB,SAAS;AAC5B,8BAAsB,gBAAoB;AAC1C,2BAAmB,eAAmB;AACtC,uBAAe,wBAA4B,OAAO;AAClD,sBAAc,OAAO;AACrB,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,yBAAiB,gBAAoB;AACrC,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mBAAO,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI5D,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,mBAAO,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG5C,eAAO,CAAC,QAAQ;;;AClDxB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,KAAK,IAAI,YAAY;AAI7C,eAAQ,qBAAqB;QACzB,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;QACpD,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;;AAExD,aAAO;;AAEJ,0BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAsBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,mBAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,mBAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,uBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,6BAAqB,WAAW,KAAK;AACrC,eAAO,SAAQ,eAAe,EAAE,OAAO,SAAS;;AAEpD,UAAI,UAAU;AAIV,sBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAa,cAAkB,CAAC,IAAI,EAAE;AACtC,0CAAkC,6BAA6B,UAAW,MAAM,IAAK,IAAI,GAAG,EAAE,OAAO,IAAI,OAAO,MAAM;AACtH,eAAO,SAAQ,eAAe,aAAa,QAAQ;;AAEvD,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AA+BO;AACH,UAAI,eAAe;AACf,eAAO,EAAG,QAAQ;AACd,iBAAQ,GAAG,KAAM;AACjB,6BAAmB;AACnB,2BAAiB,CAAC,GAAG,IAAI;AACzB,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;AAG9D,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,2BAAmB;AACnB,YAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,gEAAsD,YAAY,EAAE,OAAO,EAAE,OAAO,WAAW,WAAW,WAAW;AACrH,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,yBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,YAAY,MAAM,aAAc,SAAS;AAClF,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO;;AAGP,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;;AAQ3D;AACH,aAAO;AACH,4BAAoB,4BAAwC,QAAQ;AACpE,2BAAmB,eAAmB;AACtC,2BAAmB,YAAY;AAC/B,8BAAsB,gBAAoB;AAC1C,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,yBAAa,IAAI,MAAM;AACvB,yBAAa,IAAI,MAAM;AACvB,2BAAe,IAAG,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI,IAAI,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI;AAC1F,2BAAe,KAAK,OAAO;AAC3B,2BAAe,KAAK,OAAO;;;AAI/B,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,6BAAiB,IAAG,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI,IAAI,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI;AACpG,2BAAe,KAAK,SAAS;AAC7B,2BAAe,KAAK,SAAS;;;AAGrC,eAAO,CAAC,gBAAgB,gBAAgB;;;AC1HhD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,UAAU,IAAI;AAC5D,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,0BAAkB,wBAA4B,OAAO,OAAO;AAC5D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;AA0BO;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,sBAAc,eAAmB,EAAE;AACnC,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,mBAAuB,QAAQ;AACjD,qBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AAYnD;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,UAAU,QAAQ,QAAQ;AAC5C,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AChE1D;;;;;;;;;;;;;;;;AAmBO,qBAAiB,sBAAsB,QAAQ,KAAK,KAAK;AACzD,mBAAa,wBAAwB,MAAM;AAC3C,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,wBAA4B,OAAO,eAAmB;AACnE,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,uBAAe,IAAI;AACnB,mBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,wBAAc,MAAM,SAAS;AAC7B,cAAI,QAAQ;AACR,mBAAM;;;AAGd,aAAK,KAAK;;AAEd,aAAO;;AC9BX;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,oBAAoB,SAAS;AAChF,gCAA4B,8BAA+B;AAC9D,aAAO;QACH,MAAM,QAAQ,QAAQ,QAAQ;QAC9B,MAAM,QAAQ,QAAQ,QAAQ;;;AAG/B,uBAAiB,iBAAiB,WAAU,cAAc;AAC1D,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,UAAW,MAAM,IAAK,IAAI;AAC7E,uBAAiB,iBAAiB,WAAU,cAAc,MAAsB;AAChF,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,IAAI,KAAK,KAAK;AAC9D,oBAAc,wBAAwB,QAAO;AAC7C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBO;AACH,0BAAoB,kBAA4B,OAAO,OAAO;AAC9D,qBAAe,eAAmB;AAClC,uBAAiB,gBAAoB;AACrC,UAAI;AACA,2BAAmB,mBAA6B,OAAO;AACvD,eAAO,KAAK,SAAS,YAAY,aAAa;;AAElD,sBAAgB,wBAA4B,OAAO;AACnD,mBAAa,GAAG,IAAI,QAAQ,EAAE;AAC1B,qBAAa,KAAK;AAClB,wBAAgB,gBAAoB;AACpC,oBAAY,YAAgB,GAAG,MAAM;AACrC,qBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,uBAAe,YAAgB,MAAM,MAAM,QAAQ;AACnD,gBAAQ,KAAK,KAAK;;AAEtB,aAAO;;AAEJ;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,OAAO,QAAS;AACxB,uBAAiB,GAAG;AACpB,8BAAwB,kBAA4B,GAAG,OAAO;AAC9D,yBAA6B,GAAG,QAAQ;AACxC,mBAAa,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACxC,sBAAgB,UAAU,MAAM,QAAQ,OAAO,EAAE,OAAO,EAAE;AAC1D,aAAO,SAAQ,eAAe,OAAO,EAAE,OAAO;;AAE3C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAmBO,kCAA8B,6BAA8B;AAC/D,mBAAa,IAAI;AACjB,aAAO,OAAO;;AAEX,gCAA0B,iBAAiB,oBAAmB;AAC9D,qCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,oBAAoB,SAAS;AAC3E,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,OAAO;AACrB,oBAAc,eAAmB;AACjC,uBAAiB,gBAAoB;AACrC,yBAAmB,gBAAoB;AACvC,qBAAe,wBAA4B,OAAO,eAAmB;AACrE,mBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAY,YAAgB,GAAG,OAAO;AAEtC,uBAAe,IAAI,MAAM,IAAI;AAC7B,sBAAa,GAAG,KAAI,OAAO,QAAQ;AAC/B,iBAAO,MAAK,IAAI,KAAK;;AAEzB,yBAAiB,YAAgB,QAAQ,OAAO;AAChD,eAAO,YAAY,MAAM;;AAE7B,aAAO;;ACjCX;;;;;;;;;;;;;;;;AAiBO;AAEH,oBAAc,gBAAoB,MAAM,OAAO;AAwD/C,uBAAiB,CAAC,GAAG,MAAM,IAAI;AAC/B,mBAAa,GAAG,IAAI,OAAO;AACvB,iBAAS,MAAM,MAAM;;AAEzB,eAAS,KAAK,MAAM;AACpB,mBAAa,QAAQ,GAAG,IAAI,MAAM,QAAQ;AACtC,iBAAS,MAAM,MAAM;;AAIzB,6BAAuB;AAGvB,sBAAgB,IAAI,WAAW,MAAM;AAErC,0BAAoB,IAAI,cAAa,UAAU,OAAO;AAGtD,4BAAsB;AACtB,yBAAmB,SAAS,OAAO,KAAK,SAAS,OAAO;AACxD,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAE9B;AACA,YAAI;AAEA,oBAAU,OAAO,GAAG;;AAGpB,6BAAmB;AACnB,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAW,KAAK,YAAY,IAAI,GAAG,GAAG;;;AAG9C,oBAAU,WAAW,KAAK;;AAG9B,YAAI,eAAe,aAAa;AAC5B,kBAAQ,KAAK,eAAe;;AAG5B,8BAAoB,OAAO,KAAK,gBAAgB;AAChD,yBAAe,WAAW;AAC1B,kBAAQ,KAAK;AACb,wBAAc,KAAK;;;AAM3B,6BAAuB,SAAS;AAChC,qBAAe,KAAK,OAAO,KAAK,gBAAgB;AAChD,2BAAqB,IAAI,cAAa,gBAAgB;AACtD,oBAAc,QAAQ;AAClB,qBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,IAAI,YAAY,IAAI,GAAG,oBAAoB,IAAI,GAAG,GAAG;;;;AAM9E,0BAAoB,MAAM;AAC1B,kBAAY,SAAS,eAAe;AACpC,aAAO;QACH,cAAc,aAAa;QAC3B;QACA;;;AC9IR;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA2BA,qBAAgB,OAAO,MAAM,IAAI,kBAAkB;AC3BnD;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,MAAM,IAAI,KAAM,KAAK,IAAI,MAAM;AACxE,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBA,sBAAkB,6BAA6B,oBAAoB,SAAS,IAAI,SAAS,SAAS;AAC3F;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,SAAU;AACrB,uBAAiB,CAAC,GAAG,QAAQ;AAC7B,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAM,QAAQ;AAC7C,wCAAkC,UAAU,EAAE,OAAO,MAAM,OAAO,OAAO,OAAO,EAAE;AAClF,aAAO,SAAQ,eAAe,aAAa,EAAE,OAAO;;AAEjD,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,IAAI,GAAG;AACvD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,QAAO,QAAQ,KAAK,IAAI,KAAK,IAAI,GAAG,KAAK;AACvE,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAqBO;AACH,UAAI,gBAAe;AACf,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;iBAE5B,gBAAe;AACpB,eAAO,OAAK,CAAE,QAAQ,CAAE,IAAK;iBAExB,gBAAe;AACpB,eAAO,MAAI,CAAE,QAAQ,CAAE,IAAK;iBAEvB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,IAAK;iBAEzB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,GAAG,OAAO,yBAA0B;;AAEjE,YAAM,IAAI,MAAM,cAAc;;ACrClC;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,eAAQ,OAAO,EAAE;AACjB,oBAAc,SAAQ,KAAK,IAAI,EAAE;AACjC,UAAI,MAAM,sBAAsB;AAC5B,sBAAa,MAAM,mBAAmB;AACtC,sBAAa,MAAM,mBAAmB;AACtC,cAAK,QAAQ;AACb,cAAK,QAAQ;;AAEjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,KAAM;AACjB,aAAQ,YAAY,cAAe;AACnC,uBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,kCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,cAAY,SAAS,KAAK,SAAS,KAAK,qBAAqB,MAAM,uJAEvC,oBAAoB;AAChD,gCAA0B,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AAC1F,uBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,cAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AACvB,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAC7B,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAE7B,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,mBAAS,OAAO,CAAE,OAAO;AACjE,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,mBAAS,OAAO,CAAE,OAAO;AACjE,wBAAkB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACxD,sBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,uBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,uBAAiB,KAAK,IAAI,WAAW;AACrC,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,yBAAmB,gBAAoB,IAAI;AAC3C,yBAAmB,gBAAoB,IAAI;AAC3C,+CAAyC,aACrC,CAAC,WAAW,IAAI,GAAG,WAAW,MAC9B,CAAC,WAAW,IAAI,WAAW,IAAI;AACnC,+CAAyC,aACrC,CAAC,GAAG,WAAW,IAAI,WAAW,MAC9B,CAAC,WAAW,IAAI,GAAG,WAAW;AAClC,mBAAa,UAAU;AACvB,qBAAe,QAAO,CAAC,UAAU,SAAS,WAAW,IAAI;AACzD,sBAAgB,OAAO;AACvB,wBAAkB,SAAQ;AAC1B,oBAAc,GAAG,KAAK,UAAU;AAC5B,sBAAc,GAAG,KAAK,SAAS,MAAM;AACjC,wBAAc,GAAG,KAAK,UAAU,MAAM;AAClC,0BAAc,GAAG,KAAK,WAAW,MAAM;AAEnC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,2BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAU;AACV,+BAAa,IAAI,IAAI,QAAQ;AACzB,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,iCAAa,UAAU,eAAe,IAAI,aAAa,IAAI;AAC3D,iCAAa,UAAU,IAAI,aAAa,IAAI,aAAa;AACzD,4BAAO,OAAO;;AAElB,0BAAQ,KAAK,OAAQ,KAAI,WAAW,OAAO;;;;;;;AAOnE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AAEtC,aAAO,SAAQ,eAAe,UAAU,OAAO,OAAO,OAAO;;AAE1D,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtGhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,GAAG,MAAM,0BAA2B;AAC/C,aAAQ,YAAY,YAAY,2BAAe;AAC/C;AACA;AACA;AACA,4BAAsB;AACtB,wBAAkB,aAAY,CAAE,QAAQ,CAAE,GAAG,IAAK,OAAO,CAAE,YAAY,aAAc;AACrF,gBAAU;AACV,UAAI;AACA,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,SAAS,GAAG,OAAQ;AAChD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,UAAI;AACA,wBACI,kBAAgB,UAAS,SAAS,aAAY;AAClD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,sBAAgB;AACZ,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA2B;MAC9B,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,2BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,qBAAe,QAAO,SAAS,UAAU;AACzC,yBAAmB,OAAO;AAC1B,iCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,+BAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,+BAAyB,SAAS,SAAS;AAC3C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,kCAA0B,IAAI;AAC9B,iCAAyB,IAAI,QAAQ;AACrC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,0BAAc,KAAK,IAAI,GAAG;AAC1B,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,oCAAwB,oBAAoB,KAAK;AACjD,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,4BAAc,KAAK,IAAI,GAAG;AAC1B,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,gCAAkB;AAClB,6BAAe;AACf,2BAAY;AACZ,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,iCAAiB,mBAAmB,KAAK,QAAQ;AACjD,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,mCAAiB,WAAW,KAAK,QAAQ;AACzC,gCAAc,QAAQ,WAAW;AACjC,sBAAK,aAAa,SAAS,QAAQ;AAC/B,kCAAc;6BAET,aAAa;AAClB,gCAAY;AACZ;;;AAGR,oBAAI,MAAM;AACN;;;AAGR,mCAAqB,kBAAkB,KAAK,mBAAmB;AAC/D,yBAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;AAK5D,aAAO;;AAEJ,mFAA+E,6BAA6B;AAC/G,2BAAqB,QAAO,SAAS,UAAU;AAC/C,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,mBAAa,QAAO,QAAQ,OAAO;AACnC,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,wBAAY;AACZ,mBAAO,QAAQ;AACX,uBAAS;;AAGb,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,0BAAY;AACZ,qBAAO,QAAQ;AACX,yBAAS;;AAEb,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,6BAAe,OAAO;AACtB,gCAAkB;AAClB,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,2BAAW,KAAK;AAChB,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,6BAAW,KAAK;AAChB,gCAAc,KAAK,IAAI,GAAG,IAAI,IAAI;AAClC,sBAAI,QAAQ;AACR,+BAAW;AACX,wBAAI;AACA,oCAAc,sBACR,MAAI,SAAS,WAAW,MAAM,SAAS,UAAU,MAC/C,SAAS,aACT,IACH,MAAK,SAAS,UAAU,MAAM,SAAS,aAAa;;AAGzD,oCAAc,KAAK,uBAAuB;;;;;AAK1D,2BAAa,IAAI,aAAa,GAAG,IAAI,IAAI;;;;;AAKzD,aAAO;;AClIX;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,uBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,4BAAsB,IAAK,gBAAe;AAC1C,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW;;;AAGnB,iBAAG,IAAI,UAAU,eAAe,GAAG,KAAK,KAAK;;;;;AAK7D,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,eAAO,QAAQ,aAAM,uBAAa;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,uBAAiB,CAAC,GAAG,OAAM,WAAU,QAAO,SAAS;AACrD,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC5C,sBAAgB,SAAQ,KAAK,IAAI,UAAS,QAAQ;AAClD,oBAAc,SAAQ,SAAQ,KAAK,IAAI,OAAM,QAAQ,SACjD,IAAI,aAAa,CAAC;AACtB,sBAAgB,SACZ,SAAQ,KAAK,IAAI,OAAO,QAAQ,SAChC,IAAI,aAAa,CAAC;AACtB,sBAAgB,IAAI,aAAa,MAAM;AACvC,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,iBAAW;AACX,eAAS;AACT,eAAS;AACT,eAAS;AACT,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,gBAAQ,KAAK,QAAQ,UAChB,OAAM,KAAK,MAAM,SAAS,MAAM,QAC7B,KAAK,KAAK,QAAQ,QAAQ;AAClC,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;;AAGb,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AAkBO,kBAAa,gBAAgB,cAAa;AAC7C,wBAAkB;AAClB,UAAI,KAAK,UAAU;AACf,eAAO,UAAU;;AAErB,aAAO,KAAK,UAAU,eAAe,UAAU,eAAe;;AAE3D,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC5BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,qBAAe,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACtE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,UAAI,QAAQ,GAAG,UAAU;AACrB,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9D,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9D,6BAAqB,SAAO,CAAE,QAAQ,OAAO,mBAAS,OAAO,CAAE,MAAM;AACrE,6BAAqB,SAAO,CAAE,QAAQ,OAAO,mBAAS,OAAO,CAAE,MAAM;AACrE,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AASX,uBAAiB,QAAQ,IAAI;AACzB,0BAAkB,eAAmB,EAAE,MAAM,MAAM;AACnD,sBAAc,CAAC,IAAI;AACnB,eAAO,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,mBAAS,OAAO,CAAE;;AAGzD,iBACI,kBAA6B,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,sBAAgB,wBAA4B,QAAQ,GAAG,OAAO,eAAmB;AACjF,UAAI,SAAS,GAAG,MAAM,OAAO;AAEzB,qBAAa;AACb,iBAAS,QAAQ;AACb,sBAAY,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACvC,uBAAa,eAAmB,EAAE;AAClC,kBAAQ,IAAI,KAAK;AACjB,oBAAU;;;AAId,wBAAgB;AAChB,iBAAS,QAAQ;AACb,wBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAW;AACX,yBAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,2BAAe,MAAM,SAAS,KAAK;AACnC,2BAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,sBAAQ,SAAS,OAAO,MAAM;;;AAGtC,uBAAa,EAAE,MAAM;;;AAG7B,4BAAsB,kBAA6B,QAAQ,IAAI,OAAK,EAAE,QAAQ;AAC9E,sBAAgB,SAAQ,eAAe,eAAe,OAAO,GAAG,OAAO;AACvE,eAAS,QAAQ,OAAK,SAAQ,8BAA8B;AAC5D,aAAO;;AAEJ,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AChGhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,YAAY,WAAW,mBAAoB;AACjE,uBAAiB,CAAC,GAAG,SAAS;AAC9B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAuB;AACxI,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,6BAAuB,SAAS,eAAe;AAC/C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,2BAAqB,SAAS;AAC9B,yBAAmB,iBAAiB,SAAS,KAAK,SAAS;AAC3D,yBAAmB,iBAAiB,SAAS,KAAK;AAClD,6BAAuB,iBAAiB,IAAI,SAAS;AACrD,2BAAqB,EAAE,QAAQ;AAC/B,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK,EAAE,QAAQ;AAC7D,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,EAAE,QAAQ;AACtD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI;AACrB,yBAAiB,IAAI;AACrB,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK;AACjC,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK;AACjC,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK;AACjC,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK;AACjC,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW,KAAK;AACnC,gCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,0BAAM,WAAW,KAAK,mBAClB,OAAO,MAAM,WAAW;;AAEhC,8BAAY,SAAS;;;;;;;AAO7C,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,YAAY,iBAAiB,eAAgB;AACnE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB,MAAK,iBAAiB,OAAuB;AAC/I,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,6BAAuB,SAAS,eAAe;AAC/C,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,0BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,4BAAc;AACd,2BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,eAAe;AACpC,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,wBAAI;AACA,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;AAGzB,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;;AAKzC,iBAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;;AAK5C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,YAAY,SAAS,WAAK,YAAY,mBAAoB;AAClE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,4BAAsB,gBAAoB,OAAO;AACjD,wBAAkB,gBAAoB,GAAG;AACzC,wBAAkB,yBAAqC;AACvD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB,MAAK,iBAAiB,OAAO;AACnI,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,oBAAc,SAAS;AACvB,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,6BAAuB,gBAAgB;AACvC,2BAAqB,GAAG,QAAQ;AAChC,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK,GAAG,QAAQ;AAC/D,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK;AACpD,6BAAuB,iBAAiB,IAAI,GAAG,QAAQ;AACvD,2BAAqB,UAAU;AAC/B,yBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,yBAAmB,iBAAiB,UAAU,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,UAAU;AACtD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,eAAe,IAAI,aAAa,KAAK,aAAa;AACnE,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,kCAAc,SAAS,WAAW,iBAAiB;AACnD,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,+BAAiB,eAAe,IAAI,aAAa,KAC7C,aAAa,KAAK,iBAAiB;AACvC,uBAAS,YAAY;;;;;AAKrC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY;;AClFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,aAAc;AACpC,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW;AAC3F,aAAQ,aAAa,cAAc,aAAa,eAAe,gBAAgB,eAAe,WAAY;AAC1G,uBAAiB,QAAQ;AACzB,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,cAAc;AAC7C,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,eAAe;AAC9C,4BAAc,GAAG,KAAK,cAAc,EAAE;AAClC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,8BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,mCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAiB,KAAK,SAAS,cAAc;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,+BAAW,WAAW,KAAK;AAC3B,wBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,qCAAiB,WAAW,KAAK,cAAc;AAC/C,qCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAe;AACf,kCAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,mCAAa,MAAM,WAAW;AAC9B,oCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAM,WAAW,OAAO,OAAO,MAAM,WAAW;;AAEpD,kCAAY,SAAS;;;;;;;;;AASrD,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,eAAgB;AACtC,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,gBAAoB,EAAE;AACvC,wBAAkB,gBAAoB,GAAG;AACzC,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB;AAClG,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,mCAA6B;AAC7B,uBAAiB,SAAS,QAAQ;AAClC,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,YAAW,MAAM;AACtD,sBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,WAAW,MAAM;AAC/E,yBAAiB,KAAK;AACtB,sBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,wBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,2BAAiB,KAAK,OAAO;AAC7B,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,0BAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,6BAAiB,KAAK,OAAO;AAC7B,0BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAiB,KAAK,OAAO;AAC7B,4BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAc;AACd,6BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,mCAAiB,IAAI;AACrB,mCAAiB,IAAI;AACrB,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,qCAAiB,KAAK,MAAM;AAC5B,qCAAiB,KAAK,OAAO;AAC7B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,KAAK,eAAe;AACpC,uCAAiB,KAAK,MAAM;AAC5B,uCAAiB,KAAK,OAAO;AAC7B,oCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,mCAAW,KAAK,KAAK,cAAc;AACnC,yCAAiB,KAAK,MAAM;AAC5B,yCAAiB,KAAK,OAAO;AAC7B,mCAAW,QAAQ,WAAW,MAAM,SAAS,WAAW;;;;;AAKxE,yBAAS,WAAW,MAAM;;;;;;AAM9C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,yCAAqC;MACxC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,WAAK,SAAS,cAAe;AACrC,uBAAiB,CAAC,KAAK;AACvB,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB;AACtG,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,2CAAqC;AACrC,aAAQ,WAAW,aAAa,cAAc,aAAa,YAAY,SAAS,UAAU,SAAS,aAAa,UAAU,WAAW,UAAU,aAAa,cAAc,eAAgB;AAC1L,uBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAEhC,wBAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAE5D,0BAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAE9D,4BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,iCAAiB,KAAK;AACtB,8BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,8BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,8BAAc;AACd,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,eAAe;AAC/B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,cAAc;AAC9B,uCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO;AAC3D,wCAAkB,QAAS,eAAc,IAAI,MACzC,QAAS,gBAAe,IAAI,MAC5B,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,oCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sCAAc,SAAS,WAAW;AAClC,uCAAe,UAAU,YAAY;AACrC,mCAAW,QAAQ;;;;;AAKnC,yBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK,MACpD;;;;;;AAMxB,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,wCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrFhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,WAAW,mBAAoB;AACrD,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB;AACjB,UAAI,cAAc;AACd,qBAAa,CAAC,GAAG;;AAErB,cAAY,gCAA4C,SAAS,aAAa,MAAM,gFAC9D,0BAA0B;AAChD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,YAAY,MAAK,iBAAiB;AAClH,aAAQ,cAAc,aAAa,gBAAgB,eAAe,WAAY;AAC9E,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,oBAAc,SAAS,cAAc,SAAS;AAC9C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,+BAAe;AACf,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW;AAC9B,+BAAa,GAAG,IAAI,OAAO,EAAE;AACzB,0BAAM,WAAW,MAAM,OAAO,MAAM,WAAW;;AAEnD,8BAAY;AACZ,8BAAY;;;;;;;AAOpC,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACnFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,eAAgB;AAClE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAChH,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAS,cAAc,SAAS;AAC9C,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,uBAAW,KAAK,MAAM,KAAK;AAC3B,uBAAW,KAAK;AAChB,0BAAc;AACd,yBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,KAAK,eAAe;AACpC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,cAAc;AACnC,6BAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;AAIrC,eAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;AAIxC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,sDAAkD;MACrD,YAAY;MACZ,aAAa;MACb,YAAY;;AC9DhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,cAAe;AACjE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACpH,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,iCAA2B,GAAG;AAC9B,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,iCAA2B;AAC3B,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,oBAAc,cAAc;AAC5B,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO;AAC/C,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,OAAO,EAAE;AAC3B,+BAAW,KAAK,QAAQ;AACxB,kCAAc,SAAS,WAAW;AAClC,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,uBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,MAAM;;;;;AAKlE,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,qDAAiD;MACpD,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAiBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,UAAW;AACtB,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,sBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,sBAAc,EAAE,MAAM;AACtB,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,2BAAmB,OAAO,MAAM;AAChC,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,wBAAgB,eAAmB;AACnC,wBAAgB,SAAS;AACzB,2BAAmB,mBAAuB,EAAE,OAAO;AAKnD,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,uCAAe,YAAgB,CAAC,GAAG,KAAK,KAAK,IAAI,OAAO,gBAAoB,EAAE;AAC9E,4CAAoB,YAAgB,CAAC,GAAG,GAAG,IAAI,YAAY,gBAAoB,OAAO;AACtF,oCAAY,MAAM,UAAU,WAAW;AACvC,4BAAI,MAAM;AACN,mCAAS;;;;;;AAM7B,oCAAoB,YAAgB,CAAC,GAAG,MAAM,MAAM,IAAI,SAAS,gBAAoB;AACrF,2BAAW,eAAe;;;;;AAK1C,uBAAe,WAAW,MAAM,cAAkB,YAAY,EAAE,QAAQ,UAAU,EAAE;AACpF,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;AClEnD;;;;;;;;;;;;;;;;AAiBO,2CAAuC;MAC1C,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,kEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,OAAO,OAAO,OAAO;AAMtE,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,2BAAW;AACX,2BAAW;AACX,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,iCAAO;AACP,iCAAO;;;;;;AAM3B,2BAAU,MAAM,MAAM,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAK/D,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,OAAO,OAAO,OAAO;AAC5F,eAAO,CAAE,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;;ACtE5D;;;;;;;;;;;;;;;;AAiBO,0CAAsC;MACzC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,iEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,EAAE,OAAO,EAAE;AAM5D,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,mCAAS;AACT,mCAAS;;;;;;AAM7B,2BAAU,GAAG,QAAQ,QAAQ,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAKtE,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,EAAE,OAAO,EAAE;AAClF,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;ACtElD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA6B,UAAU,IAAI;AAC3D,kBAAY,iBAAiB,MAAK;AAClC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBA,cAAU;AACV,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACJ,kBAAY,gBAAgB,KAAK;AACpC,oBAAa,KAAK,KAAK;AACvB,gBAAU,KAAK,IAAI;AACnB,gBAAU,IAAO,KAAM,IAAI;AAC3B,aAAO,QACF,KACQ,SAAK,IAAI,MAAM,IAAK,MAAM,IAAI,MAAM,IAAI,MAAM,IAC/C,KAAK,IAAI,CAAC,IAAI;;AAEvB,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AA8BO;AACH,yBAAmB,OAAM;AACzB,oBAAc,WAAW;AACzB,uBAAiB,WAAW;AAC5B,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,qBAAe,UAAU,mBAAmB;AAC5C,qBAAe,UAAU,mBAAmB;AAE5C,0BAAoB,CAAC,OAAO;AAC5B,yBAAmB,eAAmB;AACtC,yBAAmB,wBAA4B,WAAW;AAC1D,yBAAmB,wBAA4B,WAAW;AAC1D,mBAAa,GAAG,IAAI,OAAO;AAEvB,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,uBAAc,UAAQ,CAAE,QAAQ,CAAE,MAAM,GAAG,MAAM,IAAK,SAAS;AAE/D,eAAQ,aAAM,eAAS,QAAQ,QAAO,SAAS;AAC/C,oBAAY,wBAAoC,OAAM;AACtD,qBAAa,GAAG,IAAI,UAAU;AAC1B,oBAAU,qBAAiC,KAAK;AAChD,qBAAW,IAAI,WAAW,KAAK,EAAE;AACjC,qBAAW,IAAI,WAAW,KAAK,EAAE;;AAErC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;;AAE7C,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,qBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AAChF,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO;;AAEJ;AACH,wBAAkB,eAAmB,OAAM;AAC3C,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,UAAI,cAAc;AACd,uBAAe,UAAU,UAAU,UAAU,WAAW,SAAS;AACjE,4BAAoB,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM;AACjD,YAAI;AACA,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,IAAI,WAAW,mBAAuB,WAAW;AAC5F,+BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAS;AAClE,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,WAAY,SAAS;AAC1F,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,eAAgB,SAAS;AAC9F,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO,CAAE,MAAM,aAAa,MAAM;;AAEtC,eAAO;;AAGP,sBAAa,wBAAoC,UAAU;AAC3D,0BAAkB,yBAAyB,OAAM,WAAW;AAC5D,eAAO,wBAAoC;;;AAGnD;AACI,aAAQ,QAAO,OAAO,OAAO;;AAGjC;AACI,UAAI,SAAS;AACT,eAAO,CAAE,MAAM,UAAU,MAAM;;AAEnC,oBAAa,wBAAoC,UAAU;AAC3D,mBAAa,OAAO;AACpB,0BAAoB,sBAAkC;AACtD,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,yBAAmB,qBAAiC;AACpD,0BAAoB,WAAW;AAC/B,0BAAoB,WAAW;AAC/B,uBAAiB,CAAC,YAAY;AAC9B,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,aAAa,MAAM,cAAe,SAAS;AAE3F,2BAAqB,UAAU,cAAc,cAAc,MAAM,SAAS;AAC1E,4BAAsB,aAAa;AACnC,4BAAsB,aAAa;AACnC,yBAAmB,CAAC,cAAc;AAClC,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,8BAAwB,UAAQ;QAC5B,QAAQ,CAAE,MAAM,eAAe,MAAM;QACrC,SAAS;;AAEb,0BAAoB,UAAU,aAAa,aAAa,MAAM,SAAS;AACvE,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,gBAAU,WAAuB,MAAM;AACvC,qBAAe,CAAC,EAAE,KAAK;AACvB,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,0BAAoB,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AACrF,2BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,aAAa,GAAG,iBAAkB,SAAS;AACxF,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO,CAAE,MAAM,WAAW,MAAM;;AAGpC;AACI,kBAAY,IAAI,aAAa,OAAO;AAEpC,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAW;AACX,oBAAW;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,oBAAU,UAAsB,IAAI,GAAG,MAAM;AAC7C,uBAAa,qBAAiC,OAAM;AACpD,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;AAC3C,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;;AAE/C,YAAI;AACA,mBAAQ;AACR,mBAAQ;;AAEZ,4BAAgC,KAAK,OAAM,OAAM;;AAErD,aAAO;;AChOX;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,OAAO;AACxC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,mBAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,mBAAS,SAAU;AAC3B,aAAQ,OAAO,OAAO,SAAU;AAChC,qBAAe,SAAS,YAAgB;AACxC,qBAAe,mBAAuB,QAAQ,eAAmB;AACjE,iBAAW,QAAQ,OAAO;AAC1B,aAAO,SAAQ,eAAe,OAAO,QAAQ;;AAE1C,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACI,UAAI,UAAU;AACV,eAAO,KAAK;;AAGZ,eAAO,KAAK;;;ACnCpB;;;;;;;;;;;;;;;;AAiBO,iCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,+BAAe,KAAK,MAAM,aAAa;AACvC,+BAAe,cAAc,YAAY,YAAY;AACrD,kCAAkB,UAAU;AAE5B,oBAAI,UAAU,KAAK,SAAS;AAExB,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,YAAY,mBAAmB;AAC9D,gCAAc,UAAU;;AAE5B,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACnD1D;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,OAAO;QAChB,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,SAAS,WAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,sBAAsB;QAC/B,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,SAAS,WAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,wCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,MAAM;AACvC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,mBAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAkBO,uBAAiB,gBAAgB,UAAU,QAAQ,OAAO,SAAS,MAAM,IAAI,GAAG;AAChF,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,IAAI,QAAQ,WAAW,IAAI,GAAG;AAChF,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,OAAO,MAAM,MAAM,IAAI,GAAG;AACvE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,KAAK,IAAI,GAAG;AACnE,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBO,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,2BAAmB;AACnB,qBAAa,EAAE;AACf,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB,kBAAkB;AACvD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,oBAAY,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC1C,YAAI,gBAAgB;AAChB,2BAAiB,IAAI,MAAM;AAC3B,uBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,qBAAS,KAAK,OAAO,aAAa;;AAEtC,kBAAQ,cAAc,OAAO,QAAQ,EAAE,OAAO,cAAc;AAC5D,iBAAO,kBAA8B,KAAK,QAAQ;AAClD,mBAAS;;AAEb,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,QAAQ;AAClF,2BAAmB,eAAmB;AACtC,uBAAe,QAAQ,OAAO,YAAY,aAAa,EAAE;AACzD,uBAAe,WAAW,MAAM,QAAQ,aAAa,EAAE;AACvD,uBAAe;AACf,YAAI;AAEA,2BAAiB,sBAAkC,aAAa;AAChE,qBAAW;;AAEf,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;ACxDnD;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,uBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,wBAAkB,QAAO,SAAS,UAAU,EAAE,OAAO,iBAAiB,SAAS,EAAE,OAAO,EAAE,OAAO,UAAU;AAC3G,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,iCAAe,wBAAwB,uBAAuB,IAC1D,UAAU,IAAI,GAAG,KAAK,KAAK;AAC/B,iCAAe,KAAK,uBAAuB;AAC3C,+BAAa,WAAW,SAAS,IAAI;AACrC,sBAAI,SAAS;AACT;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW,QAAQ;;;AAG3B,iBAAG,IAAI,SAAS,GAAG,KAAK,KAAK;;;;;AAK7C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChFhB;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB,gBAAoB;AACpC,uBAAiB,OAAK,SAAS,QAAQ,OAAO,SAAS,UAAU;AACjE,2BAAqB,iBAAiB,SAAS,QAAQ,OAAO,UAAU,MAAM;AAC9E,aAAO,CAAC,SAAS,QAAQ,aAAa;;ACtB1C;;;;;;;;;;;;;;;;AAoBO,oCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,CAAC,GAAG,IAAI;AACtF,kCAA0B,sBAAsB,QAAQ,EAAE,OAAO,EAAE,OAAO,qBAAqB;AAC/F,6BAAqB,WAAW,MAAM,QAAQ,SAAS,UAAU,EAAE;AACnE,8BAAsB,WAAW,MAAM,SAAS,SAAS,UAAU,EAAE;AACrE,eAAO;UACH,CAAE,QAAQ,cAAc,OAAO,SAAS,UAAU,OAAO,EAAE;UAC3D,CAAE,QAAQ,eAAe,OAAO,SAAS,UAAU,OAAO;;;;ACnCtE;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,kBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,EAAE,MAAM;AAClD,qBAAe,SAAS,YAAY,IAAI;AACxC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,mBAAa,GAAG,IAAI,YAAY;AAC5B,sBAAa,YAAgB,GAAG,YAAY;AAC5C,sBAAa,GAAG,KAAI,YAAY;AAC5B,cAAI,QAAO,MAAK,MAAM;AAClB,oBAAO,MAAK,MAAM,MAAK,IAAI,QAAO,MAAK;qBAElC,QAAO,OAAM,IAAI;AACtB,oBAAO,MAAM,KAAI,MAAK,KAAK,IAAI,QAAO,MAAK;;;AAGnD,kBAAS,QAAO,IAAI,WAAU,IAAI,MAAM;AACxC,wBAAgB,YAAgB,SAAQ,OAAO;AAC/C,gBAAQ,KAAK,MAAM;;AAEvB,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtDhB;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC/BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,iBAAkB;AACpC,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,eAAmB,EAAE;AACnC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,UAAI,kBAAkB;AAClB,gBAAQ,KAAK;;AAEjB,mBAAa,GAAG,IAAI,OAAO;AACvB,wBAAe,YAAgB,GAAG,OAAO;AACzC,0BAAkB,QAAO,IAAI,WAAU,IAAI,MAAM;AACjD,yBAAiB,YAAgB,WAAW,YAAY;AACxD,gBAAQ,YAAY,MAAM;;AAE9B,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChDhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,IAAI;AAC3D,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO,oCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,iCAAyB;AACzB,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,0BAAU,QAAO;AAEjB,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,yBAAS,KAAK,MAAM,SAAS;AAC7B,yBAAS,KAAK,MAAM,SAAS;AAC7B,kCAAkB;AAClB,oBAAI,OAAO,cAAc;AACrB,sBAAI,YAAY;AACZ,kCAAc;;AAGd,kCAAc,UAAU;;;AAIhC,oBAAI,UAAU,KAAK,SAAS,cAAc,UAAU,KAChD,SAAS;AAET,2CAAyB,SAAU,cAAa;AAChD,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,mBAAmB,mBAAmB;AACrE,gCAAc,UAAU;;AAE5B,+BAAe,cAAc,YAAY,YAAY;AACrD,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACvE1D;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO;AAExC,oBAAa,KAAK,MAAM;AACxB,UAAI,KAAK,QAAO;AACZ,eAAO,KAAK,MAAM;iBAEb,KAAK,QAAO;AACjB,eAAO,KAAK,KAAK;;AAGjB,YAAI,QAAO,MAAQ;AACf,iBAAO;;AAGP,iBAAO,QAAO;;;;AAInB,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAkBA,uBAAmB;AACnB,kBAAc;AACP,mBAAa,gBAAgB,MAAM;AACtC,UAAI,MAAM;AACN,eAAO,QAAQ;;AAGf,eAAO,aAAc,MAAK,IAAI,MAAM;;;AAGrC,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAkBO,sBAAgB,gBAAgB,UAAS,QAAQ,IAAK,KAAI,KAAK,IAAI,CAAC;AACpE,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM;AACtC,UAAI,KAAK;AACL,eAAO;iBAEF,KAAK;AACV,eAAO;;AAGP,eAAO;;;AAGR,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBA,sBAAgB;AAChB,sBAAkB,KAAK,IAAI,aAAW;AAC/B,uBAAiB,gBAAgB,UAAU;AAG9C,uBAAiB,KAAK,CAAC;AAGvB,uBAAiB,KAAK;AACtB,mBAAa,KAAK,IAAI;AACtB;AACA,UAAI;AACA,iBAAS;iBAEJ;AACL,iBAAS;;AAGT,iBAAS,KAAK,IAAI,IAAM;;AAE5B,aAAO;;AAEJ,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,KAAM;AACd,aAAQ,QAAS;AACjB,uBAAiB,GAAG;AACpB,oBAAc,EAAE,MAAM;AACtB,uBAAiB,IAAI,MAAM;AAC3B,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,qBAAe,cAAc,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC7D,qBAAe,SAAQ,MAAM,QAAQ,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;AAExC,6BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACrChB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,YAAY,YAAa;AACjC,uBAAiB,CAAC,IAAI;AACtB,oBAAa,eAAmB;AAChC,+BAAyB,CAAC,CAAC,GAAG;AAC9B,uBAAiB,KAAK,GAAG;AACzB,mBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,yBAAiB,KAAK,CAAC,GAAG;;AAE9B,sBAAgB,aAAY,WAAW;QACnC,QAAQ,CAAE;QACV;QACA,OAAO,CAAE,UAAU,kBAAkB,eAAe;;AAExD,kCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,gDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,2BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,4BAAsB,CAAE,GAAG;AAC3B,2BAAqB,CAAE,OAAO;AAC9B,8BAAwB,UAAQ,CAAE,QAAQ,eAAe,mBAAS,OAAO;AACzE,8BAAwB,CAAE,GAAG;AAC7B,6BAAuB,CAAE,MAAM;AAC/B,uBAAiB,YAAU,CAAE,QAAQ,iBAAiB,mBAAS,OAAO;AACtE,kCAA4B,CAAE,GAAG;AACjC,iCAA2B,CAAE,OAAO;AACpC,qBAAe,UAAQ,CAAE,QAAQ,qBAAqB,mBAAS,OAAO;AACtE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,iCAA6B;MAChC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,KAAM;AACd,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,0BAAkB,IAAI,aAAa,OAAO;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAc,OAAO;AACrB,oBAAU,KAAK,QAAQ;;AAE3B,uBAAe,WAAW,MAAM,WAAW,EAAE,OAAO,EAAE;AACtD,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;AChClD;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM;AACtC,wBAAkB;AAClB,UAAI,MAAM;AACN,eAAO;;AAGP,eAAO,KAAK,IAAI,IAAI,UAAU;;;AAG/B,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,KAAK,QAAQ,KAAK,IAAI;AAClD,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,aAAQ,cAAc,aAAa,WAAY,WAAW,QAAQ,MAAM,EAAE,OAAO,EAAE;AACnF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AClChB;;;;;;;;;;;;;;;;AA6GA,2BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;ACxMnB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAgBA,qBAAiB;AACjB,6BAAyB;MACrB,OAAO;MACP,WAAW;MACX,oBAAoB;MACpB,uBAAuB;MACvB,OAAO;MACP,SAAS;MACT,8BAA8B;;AAE3B;AACH,aAAO,SAAS;;AAEb;AACH,eAAS,gBAAgB;;AAEtB;AACH,UAAI,CAAE,iBAAgB;AAClB,uBAAe,yBAAyB;AACxC,YAAI,WAAW;AACX,mBAAS,gBAAgB;;AAGzB,kBAAQ,IAAI,2CAA2C;AACvD,iBAAO;;;AAGf,iBAAW,SAAS;AACpB,UAAI,GAAG;AACH,eAAO,SAAS;AAChB,eAAO,gBAAgB;;AAE3B,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,OAAO,GAAG;AACb,SAAG,OAAO,GAAG;AACb,SAAG,SAAS,GAAG;AACf,aAAO,SAAS;;AAEpB;AACI,UAAI,OAAO,oBAAoB,eAAe,iBAAiB;AAC3D,eAAO,IAAI,gBAAgB,KAAK;iBAE3B,OAAO,aAAa;AACzB,eAAO,SAAS,cAAc;;AAG9B,cAAM,IAAI,MAAM;;;AAGxB;AACI,UAAI,iBAAiB,KAAK,iBAAiB;AACvC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,aAAa;AAC5B,aAAO,iBAAiB,oBAAoB;AACxC,WAAG;AACH,eAAO,SAAS;SACjB;AACH,UAAI,iBAAiB;AACjB,eAAQ,OAAO,WAAW,SAAS,qBAC/B,OAAO,WAAW,sBAAsB;;AAEhD,aAAO,OAAO,WAAW,UAAU;;ACnFvC;;;;;;;;;;;;;;;;AAiBO;AACP,IAAC;AAgBG,qBAAc,eAAc,WAAW,KAAK;AAiB5C,qBAAc,eAAc,kBAAkB,KAAK;OACpD,iBAAkB,iBAAgB;AAC9B;AACP,IAAC;AACG,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,cAAc,KAAK;OAC9C,gBAAiB,gBAAe;AAC5B;AACP,IAAC;AACG,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,8BAA8B,KAAK;AAC3E,2BAAoB,qBAAoB,wBAAwB,KAAK;AACrE,2BAAoB,qBAAoB,wBAAwB,KAAK;OACtE,uBAAwB,uBAAsB;AAC1C;AACH,aAAO,CAAC,SAAS;;AAEd;AACH,aAAO,aAAa;;AAEjB;AACH,aAAO,CAAC,UAAU,GAAG;;AAKlB;AACH,mBAAa,eAAmB;AAChC,2BAAqB,KAAK,KAAK,OAAO;AACtC,aAAO,qBAAyB;;AAE7B;AACH,UAAI,eAAe,uBAAuB;AACtC,cAAM,IAAI,MAAM,iBAAiB,uCAC1B;;AAEX,aAAO,eAAe;;AAEnB;AACH,2BAAqB,cAAc,SAAS,WAAW;AACvD,UAAI,OAAO,SAAS;AAChB,cAAM,IAAI,MAAM,kBAAkB,OAAO,sBAAsB;;AAEnE,gBAAU;AACV,qBAAe,GAAG,MAAM,cAAc,QAAQ,OAAO;AACjD,qBAAa,GAAG,IAAI,UAAU;AAC1B,iBAAO,SAAS,cAAc,MAAM;;;;AAIzC;AACH,aAAO;QACH,KAAK,IAAI,GAAG,KAAK,KAAK,UAAU;QAAK,KAAK,IAAI,GAAG,KAAK,KAAK,OAAO;;;AAGnE;AACH,qBAAe,uCAAuC,MAAM;AAC5D,aAAO,IAAI,IAAI;;AAEZ;AAIH,oBAAc;AACd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,8BAAsB,MAAM;AAC5B,kCAA0B,MAAM;AAChC,wCAAgC,MAAM;AACtC,oCAA4B,MAAM;AAClC,6BAAqB,MAAM;AAC3B,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,MAAM;AAC7B,2BAAmB,MAAM;;AAGzB,8BAAsB,GAAG;AACzB,kCAA0B,GAAG;AAC7B,wCAAgC,GAAG;AACnC,oCAA4B,MAAM;AAClC,6BAAqB,GAAG;AACxB,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,6BAA6B,OAChD,0BAA0B,iBAC1B;AACJ,2BAAmB,GAAG;;AAE1B,8BAAwB,GAAG;AAC3B,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;ACnKR;;;;;;;;;;;;;;;;AAmBO;AACH,0BAAoB;AACpB,UAAI,OAAM,QAAQ;AACd,wBAAgB;;AAEpB,aAAO;;AAEX;AACI,oBAAc,GAAG;AACjB,UAAI,UAAU,GAAG;AACb,cAAM,IAAI,MAAM,kBAAkB,qBAAqB,IAAI;;;AAInE,wBAAoB;AACpB,wBAAoB;AACb;AACH,UAAI,OAAM,QAAQ,mCAAmC,QAAQ,KACxD,cAAc,KAAK,IAAI,QAAQ,KAAK,IAAI,OAAO;AAChD,eAAO;;AAEX,aAAO;;AAEJ;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,sBAAsB;;;AAGlC;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,aAAa,gBAAgB,gBAAgB,gBAAgB;;AAE1F;AACH,2BAAqB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,gBAAgB;AAC9E,mBAAa,IAAI,MAAM,GAAG,aAAa,cAAc;AACrD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,cAAc,GAAG,oBAAoB;AAC3D,gBAAQ,IAAI,GAAG,iBAAiB;AAChC,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEJ;AACH,6BAAuB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,kBAAkB;AAClF,mBAAa,IAAI,MAAM,GAAG,aAAa,gBAAgB;AACvD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,gBAAgB,GAAG,oBAAoB;AAC7D,kCAA0B,sBAAsB,GAAG,iBAAiB;AACpE,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX,4BAAwB;AACxB;AACI,oCAA8B,gBAAgB,KAAK;AACnD,UAAI,yBAAyB;AACzB,gBAAQ,IAAI,wCAAwC;AACpD,gBAAQ,IAAI;AACZ;;AAEJ,yBAAmB,CAAC,sBAAsB;AAC1C,0BAAoB,aAAa,MAAM;AACvC,mBAAY,YAAY,OAAO,WAAW,SAAS;AACnD,mCAA6B,YAAY,IAAI,uBAAsB,UAAe,eAAa,GAAG,YAAY,QAAO;AACrH,0BAAoB;AACpB,mBAAa,GAAG,IAAI,qBAAqB,QAAQ;AAC7C,wBAAgB,KAAK,IAAI,qBAAqB,GAAG,QAAQ;;AAE7D,+BAAyB,qBAAqB,MAAM,GAAG,aAAa;AACpE,wBAAkB,qBAAqB,MAAM,aAAa,GAAG;AAC7D,8BAAwB,qBAAqB,MAAM;AACnD,cAAQ,IAAI,iBAAiB,KAAK;AAClC,cAAQ,IAAI,cAAc,MAAM,MAAM;AACtC,cAAQ,IAAI,MAAM,UAAc,UAAU,IAAI,kBAAkB;AAChE,cAAQ,IAAI,gBAAgB,KAAK;;AAE9B;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,mBAAa,IAAI,MAAM,GAAG,YAAY;AACtC,UAAI,GAAG,oBAAoB,SAAS,GAAG,iBAAiB;AACpD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB;AAC1C,UAAI,GAAG,oBAAoB,SAAS,GAAG,qBAAqB;AACxD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc,OAAM,GAAG;AAC/D,aAAO;;AAEJ;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AAC9D,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB,OAAM,GAAG;AACvE,aAAO;;AAEJ;AACH,UAAI,OAAM,UAAU,qBAAqB;AACrC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,6BAAuB,OAAM,UAAU;AACvC,UAAK,SAAS,KAAO,UAAU;AAC3B,0BAAkB,IAAI,SAAS;AAC/B,cAAM,IAAI,MAAM,4BAA4B,YAAY;;AAE5D,UAAK,QAAQ,kBAAoB,SAAS;AACtC,0BAAkB,IAAI,SAAS;AAC/B,qBAAY,IAAI,kBAAkB;AAClC,cAAM,IAAI,MAAM,4BAA4B,YACxC,uDAAuD,OAAM;;;AAGlE;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,qBAAqB;;AAElD;AACH,kBAAY,GAAG,kBAAkB,SAAS;AAC1C,UAAI,QAAQ;AAGR,eAAO;;AAEX,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,oBAAoB,KAAK,qBAAqB,GAAG,OAAO,OAAO,mBAAmB;AAC5G,mBAAa,IAAI,MAAM,GAAG,wBAAwB;AAClD,aAAO;;AAEJ;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,mBAAmB,SAAS,cAAc,cAAc,cAAc;;AAEnG;AACH,aAAO,GAAG,mBAAmB,SAAS;;AAEnC;AACH,mBAAa,IAAI,MAAM,gBAAgB,IAAI,SAAS;AACpD,mBAAa,IAAI,MAAM,GAAG,UAAU,wBAAwB;;AAEzD;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;AACpE,mBAAa,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;;AAEhE;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;;AAE1G;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,MAAM;;AAEvG;AACH,qBAAe,GAAG,uBAAuB,GAAG;AAC5C,UAAI,WAAW,GAAG;AACd,cAAM,IAAI,MAAM,gCAAgC,2BAA2B,IAAI;;;AAGhF;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,iBAAiB;;;AAGpC;AACI,sBAAgB,aAAa,IAAI,MAAM;AACvC,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,6BAAuB,GAAG,mCAAmC;AAC7D,4BAAsB,cAAc,GAAG;AACvC,UAAI,gBAAgB,GAAG,YAAY,gBAAgB;AAC/C,iCAAyB,2BAA2B;AACpD,cAAM,IAAI,MAAM,0BAA0B;;;AAG3C,6CAAyC;AAC5C,aAAO,eAAmB,MAAM,MAAM,GAAG,MAAM,SAAS;;AAErD;AACH,UAAI,MAAM,WAAW;AACjB,cAAM,MAAM;;AAEhB,aAAO;QACH,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;QAAG,MAAM,MAAM,SAAS;;;AAGtE;AACH,sBAAgB,CAAC,GAAG,GAAG;AACvB,uBAAiB,MAAM,WAAW,KAAM,MAAM,WAAW,KAAK,MAAM,OAAO;AAC3E,UAAI,CAAC;AACD,oBACI,CAAC,YAAY,QAAQ,GAAG,YAAY;;AAE5C,aAAO;;AAEJ,kEAA8D;AACjE,uBAAiB,OAAM,UAAU;AACjC,UAAI;AACA,qBAAa,aAAa;AAM1B,mBAAW,SAAS,IAAI,UAAU,KAAK,SAAS,SAAS,IACrD,mBAAuB,SAAS,MAChC,SAAS;AAGb,YAAI,SAAS,WAAW;AACpB,qBAAW,CAAC,GAAG,SAAS;;;AAIhC,UAAI,SAAS,WAAW;AACpB,8BAAsB,cAAkB;AACxC,mBAAW,cAAc;;AAE7B,iBAAW,eAAmB;AAC9B,UAAI,SAAS,UAAU,KAAK,QAAQ;AAChC,eAAO,CAAC,GAAG;iBAEN,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,MAAM;AACf,eAAO;iBAEF,SAAS,WAAW,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3D,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,IAAI,SAAS;iBAEvC,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,MAAM;AAC7B,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS;iBAEvC,SAAS,WAAW,KACzB,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3C,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,KAAK,SAAS,IAAI,SAAS;iBAErD,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM;AAC3C,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS,KAAK,SAAS;;AAG1D,YAAI;AAMA,2BAAiB,YAAY;AAC7B,qBAAW,UAAU;AACrB,cAAI,SAAS;AACT,aAAC,MAAM,QAAQ,YAAY;;AAE/B,iBAAO,WAAY,QAAO,KAAM,QAAO;AACvC,iBAAO,qBAAyB,MAAM,IAAI,OAAK,IAAI;;AAEvD,eAAO,qBAAyB;;;AAGxC;AACI,aAAO,IAAI,MAAM;;AAMd;AACH,eAAS,OAAO,MAAM;AACtB,eAAS,OAAO,MAAM;AACtB,UAAI,aAAiB,QAAQ;AACzB,eAAO;;AAEX,UAAI,CAAC,OAAO,UAAU,CAAC,OAAO;AAC1B,eAAO;;AAEX,UAAI,OAAO,OAAO,KAAK,OAAO,OAAO,KAAK,OAAO,OAAO,KACpD,OAAO,OAAO;AACd,eAAO;;AAEX,UAAI,OAAO,WAAW,OAAO;AACzB,2BAAmB,OAAO,MAAM,IAAI;AACpC,2BAAmB,OAAO,MAAM,IAAI;AACpC,YAAI,eAAe;AACf,iBAAO;;AAEX,YAAI,OAAO,eAAe,OAAO,eAC5B,QAAO,OAAO,KAAK,OAAO,OAAO;AAClC,iBAAO;;;AAGf,aAAO,OAAO,OAAO,OAAO,MAAM,OAAO,OAAO,OAAO,OAAO,OAAO;;AAKzE;AACA;AACO;AACH,UAAI,oBAAoB;AACpB,mBAAW,gBAAgB;AAC3B,2BAAmB,GAAG,aAAa,GAAG;;AAE1C,aAAO;;AAEJ;AACH,yBAAmB;;AAEhB;AACH,+BAAyB;;AAEtB;AACH,UAAI,0BAA0B;AAC1B,mBAAW,gBAAgB;AAC3B,iCAAyB,GAAG,aAAa,GAAG;;AAGhD,aAAO,KAAK,IAAI,IAAI;;AAEjB;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX;AACA,iBAAW,gBAAgB;AAC3B,UAAI,aAAa,IAAI,sCACjB,iBAAiB;AACjB,4BAAoB;iBAEf,aAAa,IAAI;AACtB,4BAAoB;;AAGpB,4BAAoB;;AAExB,aAAO;;AAEJ;AACH,kBAAY,GAAG,aAAa;AAC5B,aAAO,OAAO;;AAEX;AACH;AACI,mBAAW,gBAAgB;AAC3B,YAAI,MAAM;AACN,iBAAO;;;AAIX,gBAAQ,IAAI,sCAAsC;AAClD,eAAO;;AAEX,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAGf,oCAA8B,uCAAuC;AACrE,aAAO;;AAWJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;AAEX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,aAAa,IAAI;AACjB,iBAAO,uCAAuC;;AAElD,wCAAgC;AAChC,YAAI,aAAa,IAAI;AACjB,4CAAkC,GAAG,aAAa;AAClD,iBAAO,2CAA2C,IAAI;;AAE1D,eAAO;;AAEX,oCAA8B,uCAAuC;AACrE,aAAO;;AAEX;AACI,wBAAkB,iBAAiB;AACnC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,qBAAqB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,kBAAkB;AAC3I,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEX;AAGI,wBAAkB,iBAAiB,IAAI;AACvC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,yBAAyB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,sBAAsB;AACnJ,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAE3B,wBAAkB,GAAG,aAAa;AAClC,aAAO;;AAEJ;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;AChhB1D;;;;;;;;;;;;;;;;AAkBA,kBAAY;AAOZ,UAAI,aAAa,aAAa,MAAM,MAAI,UAAU,mBAAmB;AAErE,UAAI,aAAa,iBAAiB;AAC9B,UAAI,sBAAsB;AACtB,eAAO;iBAEF,sBAAsB;AAC3B,eAAO;;AAEX,aAAO;;AAGX,UAAI,aAAa,kCAAkC,MAAM;AACzD,UAAI,aAAa,0BAA0B,MAAM,MAAI,IAAI,qBAAqB;AAE9E,UAAI,aAAa,qBAAqB,MAAM;AAE5C,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,cAAc,MAAM,MAAI,QAAQ;AAEjD,UAAI,aAAa,4BAA4B,MAAM,MAAI,QAAQ;AAE/D,UAAI,aAAa,mBAAmB,MAAM,MAAI,QAAQ;AAGtD,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,gCAAgC,MAAM,MAAI,QAAQ;AAEnE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,uBAAuB,MAAM,MAAI,QAAQ;AAE1D,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,0BAA0B,MAAM,uBAAuB,MAAI,UAAU;AAEtF,UAAI,aAAa,gCAAgC,MAAM,uBAAuB,MAAI,UAAU;AAS5F,UAAI,aAAa,gDAAgD;AAC7D,2BAAqB,MAAI,UAAU;AACnC,UAAI,iBAAiB;AACjB,eAAO;;AAEX,aAAO,kCAAkC;;AAM7C,UAAI,aAAa,iDAAiD,MAAM,MAAI,UAAU,kDAAkD,KACpI,CAAC;AAIL,UAAI,aAAa,gCAAgC,MAAM,mCAAmC,MAAI,UAAU;AAKxG,UAAI,aAAa,gCAAgC;AAC7C,aAAO,MAAI,QAAQ,8BACf,QACA,MAAI,QAAQ;;AAMpB,UAAI,aAAa,gCAAgC,MAAM,8BAA8B,MAAI,UAAU;AAEnG,UAAI,aAAa,2BAA2B,MAAM,oBAAoB,MAAI,UAAU;AAIpF,UAAI,aAAa,6BAA6B;AAK1C,0BAAoB,MAAI,QAAQ;AAChC,aAAO,cAAc,IAAI;;AAS7B,UAAI,aAAa,kCAAkC;AAC/C,aAAO;OACR;AACC,UAAI,aAAY,KAAK,eAAc;AAC/B,cAAM,IAAI,MAAM,8FACsB;;;ACtI9C;;;;;;;;;;;;;;;;AAoBA,WAAQ,iCAAiC,qBAAqB,uBAAuB,qBAAqB,yBAAyB,yBAAyB,qBAAqB,qBAAqB,+BAA+B,yBAAyB,yBAAyB,qBAAqB,iCAAiC,6BAA+B;ACpB5W;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,UAAU,kBAAiB;;AAG7C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;yBAEC;;;;;;ACpCzB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,SAAS,kBAAiB;;AAG5C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;wBAEA;;;;;;ACtCxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,WAAY;AAC3C,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,aAAK,cAAc,CAAC,WAAW;AAC/B,uBAAgB,QAAO,QAAS,MAAM;AACtC,6BAAqB,YACjB,kBACA;AACJ,aAAK,WAAW;;;;;kCAKU;;;;;8BAKJ;wBACN;;0BAEE;;;;;;;;;;ACzC1B;;;;;;;;;;;;;;;;AAgBO;AACH,aAAO,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG,MAAM,IAAI,OAAK,GAAG,QAAQ;;AAEtE;AACH,UAAI,SAAS;AACT,eAAO,CAAC;;AAEZ,aAAO,eAAe,MAAM;;AAEzB;AACH,UAAI,SAAS;AACT,eAAO;;AAEX,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM;AACtB,mBAAU,KAAK;AACf,YAAI,IAAI,OAAO;AACX,qBAAU;;;AAGlB,aAAO;;ACpCX;;;;;;;;;;;;;;;;AAiBO;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAKf,2BAAmB;;;;;;;;;;;;AAcnB,2BAAmB;AACnB,sBAAc;;;;;;;;;;;AAYd,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAEf,2BAAmB;;;;;;;;;AASnB,2BAAmB;;;;;;;;;;AAUnB,sBAAc;;;;;;;;;;AAUlB,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AClHR;;;;;;;;;;;;;;;;AAsBO,yEAAmE;AACtE,sBAAgB,gBAAoB;AACpC,aAAO,QACF,IAAI;AACL,sBAAc,OAAO,QAAO,QAAQ,YAAW;AAC/C,sBAAc,MAAM,QAAQ,SAAS,IACjC,OAAO,QAAO,IAAI,QAAQ,YAAW,QAAO,QAAQ,WACpD,YAAY,QAAO,QAAQ;AAC/B,eAAO,GAAG,UAAU;SAEnB,KAAK;;AAEd;AACI,UAAI,EAAE,WAAW;AACb,eAAO,GAAG,EAAE;;AAEhB,aAAO,MAAM,EAAE,UAAU,EAAE,KAAK;;AAM7B;AACH,UAAI,EAAE,WAAW,EAAE;AACf,cAAM,IAAI,MAAM,wDACL,EAAE,cAAc,EAAE;;AAEjC,qBAAe;AACf,0BAAoB,KAAK,MAAM,EAAE,SAAS;AAC1C,mCAA6B,EAAE,SAAS;AACxC,mBAAa,GAAG,IAAI,aAAa;AAC7B,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,UAAI,yBAAyB;AACzB,qBAAa,EAAE,MAAM,cAAc;AACnC,qBAAa,EAAE,MAAM,cAAc;AACnC,YAAI,OAAO,WAAW;AAClB,mBAAS,OAAO,IAAI,OAAK,SAAS;AAClC,mBAAS,OAAO,IAAI,OAAK,SAAS;;AAEtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,aAAO,OAAO,IAAI,UAAU,OAAO,MAAM,KAAK;;AAK3C;AACH,sBAAgB,gBAAoB,OAAO,IAAI,OAAK,EAAE;AACtD,aAAO;;wBAEa,QAAQ,mBAAmB,QAAQ;;;;AAIpD,iCAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC/EpC;;;;;;;;;;;;;;;;AAiBA,WAAM,wCAAuB;AAGtB;AACH,6BAAuB;AACvB,iBAAW,QAAQ;AACf,qBAAa,eAAmB,EAAE,UAAU;AAE5C,YAAI,EAAE,UAAU;AACZ,yBAAe,KAAK,iBAAiB,EAAE,OAAO,OAAO,IAAI,IAAI,UAAU;;AAGvE,yBAAe,KAAK,qBAAqB,EAAE;AAC3C,yBAAe,KAAK,qBAAqB,EAAE;;;AAGnD,iCAA2B,eAAe,KAAK;AAC/C,mCAA6B,WACxB,IAAI,OAAK,wBAAwB,GAAG,aAAa,qBACjD,KAAK;AACV,0BAAoB,YAAY;AAChC,mBAAa;AACb,wCAAkC,6BAA6B;AAC/D;AACA;AACA,yBAAmB,gBAAgB;AACnC,UAAI,YAAY;AACZ,gCACI,+BAA+B,YAAY,cAAc;AAC7D,uCAA+B,8BAA8B;;AAG7D,gCACI,yBAAyB,YAAY,cAAc;AACvD,uCAA+B,2BAA2B;;AAE9D,UAAI;AACA,wBAAgB;;AAEpB,qBAAe;QACX;QAAc;QAA2B;QACzC;QAAoB;QAAuB;QAAsB;QACnE,KAAK;AACP,aAAO;;AAEX;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,iBAAiB;aACvB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;;AAEpB,gBAAM,IAAI,MAAM,GAAG,MAAM;;;AAIrC;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,uBAAuB;aAC7B;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;;AAE1B,iBAAO,mBAAmB;;;AAGtC,gFAA4E;AACxE,gBAAU;AACV,UAAI;AACA,eAAO,2BAA2B;;AAGlC,eAAO,qBAAqB;;AAEhC,sBAAgB,OAAO,UAAU;AACjC,uBAAiB,aAAa;AAC9B,UAAI,QAAQ,UAAU,SAAS;AAC3B,YAAI;AACA,iBAAO,+BAA+B,QAAQ;;AAG9C,iBAAO,yBAAyB,QAAQ;;;AAGhD,aAAO;;AAEX;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;;AAEzC,iBAAO,wBAAwB,UAAU;;;AAGrD;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;;AAEnC,gBAAM,IAAI,MAAM,GAAG,SAAS;;;AAGxC;AACI,aAAO;;eAEI,KAAK;;;;AAIpB;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,4BAAsB,GAAG,KAAK;;;;MAI5B,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;MAuBL,KAAK;MACL,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;MAyBL;MACA;MACA;;AAEF,aAAO;;AAEX,8BAA0B;;;;;;;;;;;;;AAa1B,8BAA0B;;;;;;;;;AAS1B,8BAA0B;;;;;;;;;;AAU1B,iCAA6B;;;;;;;;;;;;AAY7B;AACI,aAAO;;;;;;AAMX;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;;;AAIhD;AACI,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;4BACjC,SAAS;;;;AAIrC;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAChD,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,KAAK;AAChE,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;wBAExB;qBACH;;6BAEQ;4BACD;;;;;;AAM5B;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;QAClC;;;;;AAKR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/D,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/E,2BAAqB;AACrB,oBAAc;AACd,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,0BAAkB,MAAM,MAAM,SAAS,IAAI;AAC3C,kBAAU;aACL,eAAe;kBACV,OAAO;QACjB;AACA,kBAAS,IAAI,QAAQ;;AAEzB,aAAO;UACD,MAAM;;oCAEoB,eAAe,OAAO,eAAe;iCACxC,eAAe;;QAExC;;wBAEgB;qBACH;;6BAEQ;4BACD;;mBAET,MAAM,UAAU;;;;AAInC;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,OAAO;AACrG,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;QAClC;;;;;AAKR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,OAAO;AAC3G,aAAO;;kDAEuC,SAAS;+BAC5B,SAAS;;iCAEP,SAAS;;QAElC;;;;;;;AAOR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,MAAM,OAAO;AACjH,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;;QAElC;;;;;;;AAOR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,aAAiB,OAAO;AACxB,eAAO;;8CAE+B,eAAe,OAAO,eAAe;;;;AAK/E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAUhD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;;iCAExC,eAAe;6BACnB;4BACD;;;;;;AAM5B;AACI,UAAI,aAAiB,OAAO;AACxB,eAAO;;0CAE2B,SAAS,OAAO,SAAS;;;;AAI/D,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;wBAClB,MAAM;4BACF,MAAM;;;;;AAKlC;AACI,aAAO,SAAS;;AAEpB;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,mBAAa;AACb,aAAO;WACA;eACI,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AACpB,eAAO,SAAS,sBAAsB;;AAE1C,iCAA2B,UAAU,UAAU;AAC/C,UAAI,YAAY,KAAK,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,6BAAuB,UAAU,UAAU;AAC3C,qBAAe,yBAAyB;AACxC,aAAO;YACC;6BACiB,UAAU,UAAU;6BACpB;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,mBAAa;AACb,aAAO;WACA;;UAED,eAAe,OAAO,eAAe;eAChC,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;UACJ,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,oBAAc,SAAS;AACvB,oBAAc,SAAS;AACvB,UAAI,UAAU,KAAK,UAAU;AACzB,eAAO;cACD;+BACiB;;;;AAI3B,qBAAe,yBAAyB;AACxC,UAAI,UAAU;AACV,eAAO;cACD;6CAC+B,oBAAoB;+BAClC;;;;AAI3B,UAAI,UAAU;AACV,eAAO;cACD;wCAC0B,oBAAoB;+BAC7B;;;;AAI3B,aAAO;YACC;6BACiB,UAAU,kBAAkB;6BAC5B;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,mBAAa;AACb,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,eAAO;aACF;qDACwC,cAAc;;iBAElD,KAAK,aAAa;;;;AAI/B,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,aAAO;WACA;iCACsB,iBAAiB,eAAe,OAAO,eAAe;eACxE,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,yBAAgB,SAAS;AACzB,yBAAgB,SAAS;AACzB,eAAO;YACH;mDACuC,eAAc;6BACpC;;;;AAIzB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO;AACvB,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;qDACuC,MAAM;UACjD,kBAAkB;;;;AAIxB,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,qBAAe,yBAAyB;AACxC,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;4CACpB;6BACf;;;;AAIzB,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;uCACzB;6BACV;;;;AAIzB,aAAO;UACD;;wBAEc,MAAM,cAAc;2BACjB,YAAY;2BACZ;;;;AAI3B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,MAAM,OAAO;AACb,8BAAsB,MAAM,MAAM;AAClC,yBAAiB,CAAC,GAAG;AACrB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,KAAK,OAAO;AAC5B,eAAO;UACL,2BAA2B;eACtB;mBACI,YAAY,kBAAkB,QAAQ;;;;AAIrD,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,4BAAsB,eAAe,KAAK,KAAK,MAAM,KAAK;AAC1D,mBAAa;AACb,aAAO;WACA;;UAED,YAAY,YAAY,kBAAkB;eACrC,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM,KAAK,MAAM;AACjC,sBAAgB,MAAM;AACtB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO;AAC9B,eAAO;UACL,qBAAqB;gBACf;mBACG,YAAY,kBAAkB,QAAQ;;;;AAIrD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY;UACnC,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,yBAAmB,UAAU,UAAU;AACvC,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;gBACC;;oDAEoC;;4BAExB,cAAc;iCACT;;;;AAI7B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;YACH;8CACkC,MAAM;;qDAEC,cAAc;6BACtC;;;;AAIzB,qBAAe,yBAAyB;AACxC,aAAO;cACG;;4BAEc,mBAAmB,qBAAqB;+BACrC,YAAY;+BACZ;;;;AAI/B;AACI,oBAAc,UAAU,UAAU;AAClC,mBAAa,MAAM;AACnB,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,OAAO,KAAK;AACjD,0BAAoB,eAAe,KAAK,KAAK,MAAM,OAAO,KAAK;AAC/D,mBAAa;AACb,mBAAY,OAAO,+BAA+B;AAClD,mBAAa,GAAG,IAAI,OAAO,GAAG;AAC1B,iBAAS,QAAQ,QAAQ;AACzB,yBAAiB,MAAM,OAAO,IAAI;AAClC,iBAAQ,IAAI,OAAO,qBAAqB;;AAE5C,mBAAa;AACb,aAAO;WACA,YAAY;oBACH;2BACO;kCACO;qDACmB,YAAY;eAClD,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS;AACvC,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY,YAAY;UAC/C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;uBAIS,YAAY;;0BAET,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;gCAEkB,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGrC,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB;oBACzB;6BACS,YAAY,oBAAoB;6BAChC;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU;AACjD,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;iBAGG,YAAY,YAAY,YAAY;;UAE3C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;gCAGkB,YAAY,YAAY;;0BAE9B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGtB,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB,qBAAqB;qBAC7C,sBAAsB;6BACd,YAAY;6BACZ;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU,UAAU;AAC3D,eAAO;QACP,qBAAqB;cACf;;iBAEG,YAAY,kBAAkB,QAAQ;;;;AAInD,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;mBAGlC;UACT,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;0BAG3B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM;iBACvC,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM;iBACjB,MAAM;;;yBAGE,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;;0BAGc,mBAAmB,qBAAqB;qBAC7C,sBAAsB,sBAAsB;6BACpC,YAAY;6BACZ;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,qBAAe,eAAmB,UAAU,UAAU;AACtD,UAAI,SAAS;AACT,eAAO,UAAU;;AAErB,aAAO;0BACe;;iBAET;;;;;AAKjB;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,mBAAa,kBAAkB;AAC/B,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,mBAAa;AACb,qBAAe,eAAmB,UAAU,UAAU;AACtD,4BAAsB,WAAW;AACjC,sBAAgB,eAAmB,aAAa;AAChD,6BAAuB,YAAY;AACnC,UAAI,WAAW,KAAK,CAAC,iBAAiB,CAAC;AACnC,iBAAS;;;iBAIJ,iBAAiB,CAAC;AACvB,YAAI,YAAY;AACZ,mBAAS;;;;AAKT,mBAAS;;;;iBAKR,cAAc;AACnB,qBAAa,SAAS;AACtB,qBAAa,SAAS;AACtB,YAAI,cAAc,QAAQ,QAAQ,MAAM,cAAc,QAAQ,QAAQ;AAClE,mBAAS;mBAEJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;mBAGJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;;;AAGjB,aAAO;WACA;QACH;QACA;8BACsB,kBAAkB;QACxC;;;;AAIR;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,0BAAoB,aAAa;AACjC,yBAAmB,UAAU,UAAU;AACvC,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,UAAI,CAAC,UAAU,UAAU,aAAa,WAAW,WAC7C,UAAU,UAAU,cAAc,QAClC,aAAiB,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,mBAAa,kBAAkB;AAC/B,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,aAAO;YACC;QACJ;QACA;kBACU,kBAAkB;;;;AAI7B;AACH,UAAI,QAAQ;AACR,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;;AAGP,cAAM,MAAM,gBAAgB;;;AAIpC;AAEI,2BAAqB,KAAK,MAAM,KAAK,UAAU;AAC/C,mBAAa,UAAU,eAAe;AACtC,aAAO;;AAEX;AACI,aAAO,SAAS,IAAI,OAAK,OAAO,IAAI,KAAK;;AC3sC7C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,gBAAY,MAAM,SAAS,GAAG,MAAM,aAAa,IAAG,OAAO,GAAG,gBAC1D,IAAG,MAAM;AACb,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB,KAAK,KAAK,SAAS;AACnC,aAAK,cAAc,MAAM,MAAM,GAAG;AAClC,YAAI,UAAU;AACV,eAAK,YAAY,KAAK;;AAE1B,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,yBAAiB,KAAK;AACtB,qBAAa,SAAS;AACtB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC;AACA;AACA,YAAI,YAAY;AACZ,uBAAa,OAAO;AACpB,iCAAuB,kBAAkB;AACzC,2BAAiB;UACnB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;;AAGd,uBAAa;AACb,2BAAiB;UACnB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;;AAElB,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,0BAAkB,MAAM,SAAS,aAAa;AAC9C,4BAAoB,SAAS,IAAI,OAAK,SAAS;AAC/C,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,uBAAgB,QAAO,QAAS,gBAAgB;AAChD,kCAA0B,YAAY,KAAK;sDACG,WAAW;sDACX,WAAW;sDACX,WAAW;sDACX,WAAW;AACzD,2BAAmB;0BACD,WAAW;uCACE,WAAW;uCACX,WAAW;qDACG,WAAW;AACxD,8CAAsC,YAAY,KAAK;qCAC1B,YAAY;4CACL,SAAS;iDACJ,SAAS,MAAM,IAAI;;AAE5D,aAAK,WAAW;0BACE,YAAY;iCACL,SAAS;sCACJ,SAAS,MAAM,IAAI;;QAEjD;;UAEE;4BACkB,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;4BAC3C,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;UAC7D;yCAC+B,wBAAwB;sBAC3C,wBAAwB,gBAAgB;;;2BAGnC;;8BAEG;;YAElB;6BACiB;;;mBAGV;;;;;;;;;;;;;;AChHnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,gBAAe;AAC1C,aAAK,WAAW;iCACS,WAAW;0CACF;;;;;;;;;;;;;;gCAcV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;mBACf;kDAC+B;;sCAEZ,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,eAAc,eAAe;AACxD,aAAK,WAAW;iCACS,aAAa,WAAW;0CACf;;;;;;;;;;;;;;;;;gCAiBV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;;;;;;;;;;;AC1IjD;;;;;;;;;;;;;;;;AAiBA,8BAA0B;;;;AAQnB,oBAAgB;;;;;;;;;;;AAWhB,gBAAY;;;;;;;;;;AAUZ,+BAA2B;AAC3B,kBAAc;AACd,iBAAa;AACb,uBAAmB;AACnB,oBAAgB;AAChB,0BAAsB;AACtB,wBAAoB;AACpB,uBAAmB;AACnB,gBAAY,oBAAoB;;;AAGhC,gBAAY,oBAAoB;;;AAGhC,gBAAY;;AAEZ,oBAAgB;AAChB,kBAAc;;MAEjB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;UAEd;;;;;;;;;;;ACtEV;;;;;;;;;;;;;;;;AAmBA,gCAA0B;;;;;;AAMnB,sBAAgB;;;;;;;;;;;;;;;;;;;;;;AAsBhB,kBAAY;;;;;;;;;;;;;;MAef,sBAAoB;;;AAGjB,oBAAc;;;;AAId,sBAAgB;;;;AAIhB,oBAAc;;;AAGd,sBAAkB;;;AAGlB,mBAAa;;;AAGb,yBAAmB;;;AAGnB,sBAAgB;;;AAGhB,4BAAsB;;;AAGtB,0BAAoB;;;;;AAKpB,yBAAmB;;;;;;AAMnB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;;MAIpB,oDAAmD;AAC/C,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,uBAAuB;AAC5B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,4BAAwC,QAAQ;AACnE,qBAAa,KAAK,YAAY;AAC9B,qCAA6B;AAC7B,YAAI;AACA,cAAI,SAAS,KAAK,eAAmB,KAAK,iBAAiB;AACvD,qCAAyB;;;;;;AAOzB,0BAAc,kBAAkB;AAChC,qCAAyB;YAC7B;;AAEI,gBAAI,SAAS;AACT,wCAA0B;yCACL,KAAK,YAAY;;;;;AAMtC,+BAAiB,YAAY,UAAU;AACvC,wCAA0B;;iBAE7B,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;iBAEtD,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;;;;;;;AAQ/D,aAAK,WAAW;;UAEd;;;;;;;;UAQA;;;;;;;AChLV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;MAepB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC1C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;MAgBpB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC7C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,+BAAuB,SAAS,eAAe;AAC/C,aAAK,WAAW;;;;;;;;;;;;8BAYM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;oBAIzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,aAAK,WAAW;iCACS,WAAW;;;;;0BAKlB;;wCAEc,mBAAmB;;;;;;;gCAO3B;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES,SAAS;;oBAEzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,WAAW;;;;;;;;;;;8BAWM,SAAS;kCACL,SAAS;iCACV,iBAAiB;;kCAEhB,SAAS;;;;oCAIP,SAAS;mCACV,kBAAkB;;oCAEjB,SAAS;;;;sCAIP,SAAS;qCACV,iBAAiB;;sCAEhB,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;gCAczB;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES;oDACgB;;wCAEZ,SAAS;;;;;;6BAMpB;;sCAES,SAAS;;;;;;;;;;;;;AC/P/C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;;;;;;;wBAOA;;;;;8BAKM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;;;;;;;;;;;;;MAgBzC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;gCAYZ;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;;oCAGS;8BACN;;;;;;;;;;;;AC9G9B;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,kCAAkC;AAClF,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;0BAKlB;;;2BAGC,mBAAmB;;;;;;;gCAOd;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;;;;;;;;oBAQhB;;;;;;;;;;;;;;;;;;;kBAmBF,4BAA4B;;oBAE1B;;0CAEsB;mCACP;;;kCAGD;mCACC;;;yBAGV,4BAA4B;;+BAEtB;+BACA;;;oBAGX;;wCAEoB;wCACA;;;;;gCAKR;gCACA;;;;;yBAKP,4BAA4B;;+BAEtB;+BACA;+BACA;;;oBAGX;;wCAEoB;wCACA;wCACA;;;;;gCAKR;gCACA;gCACA;;;;;;;;;;UAUtB;UACA;;;;;;;MAON;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,aAAK,WAAW;oCACY,gBAAgB,iBAAiB;iCACpC,aAAa,WAAW;;;;;;;;;;;;;;;;gCAgBzB;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;yCACK;;oCAEL,SAAS;;;;sCAIP;;;;;;;;;;;;;;;;;oBAiBlB,4BAA4B;;4CAEJ;qCACP;2BACV,4BAA4B;;4CAEX;4CACA;;;qCAGP;qCACA;;;2BAGV,4BAA4B;;4CAEX;4CACA;4CACA;;;qCAGP;qCACA;qCACA;;;;;;;;;;;;AC1RrC;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,2BAAmB,SAAS,cAAc,SAAS;AACnD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;wBAOpB;4BACI;;;;;;;;;gCASI;qCACK;;gCAEL;;;;kCAIE;uCACK;;kCAEL;;;;;;;;;;;UAWxB;UACA;;;;;;AClGV;;;;;;;;;;;;;;;;;MAkBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB;AACrB,uBAAe;AACf,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY;wBACJ,KAAK,IAAI;mBACd,KAAK;mBACL,KAAK;;;AAWhB,qBAAa,GAAG,IAAI,cAAc;AAC9B,4BAAkB,GAAG,SAAS,cAAc;AACxC,sBAAU,SAAS;AACnB,wBAAY;4BACA,IAAI;4BACJ,IAAI;;AAEhB,gBAAI,gBAAgB;AAChB,kBAAI,IAAI;AAEJ,oBAAI,UAAU,MAAM;AAQhB,8BAAY;;qCAEH,2CAA2C;2BACrD,KAAK;;;;uCAIO;6BACV,KAAK;;;2BAGP,KAAK;;;;qCAIK,2CAA2C;;;;;uCAKzC;;;;sBAIjB,KAAK,gCAAgC,KAAK;;sBAE1C,KAAK,yBAAyB,KAAK;;;;AAM7B,8BAAY;qCACH,+BAA+B;2BACzC,KAAK;;2BAEL,KAAK;;;oBAGZ,KAAK,cAAc,KAAK;;;AAGpB,oBAAI,IAAI,IAAI;AAMR,0CAAwB,UAAU,MAAM,IACpC,mBAAuB,iBACvB;AACJ,sBAAK,gBAAgB,MAAM,KAAK,UAAU,MAAM,KAC3C,gBAAgB,MAAM,KAAK,UAAU,MAAM;AAC5C,gCAAY;oCACR,UAAU,OAAO;;uCAEd;kDACW;6BACrB,KAAK,IAAI;;;AAKN,wBAAI,gBAAgB;AAChB,kCAAY;;yCAEP;oDACW;+BACrB,KAAK;;+BAEL,KAAK;;;;AAIJ,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;AAI3B,gCAAY;oCACR;;uCAEG;kDACW;6BACrB,KAAK,IAAI;;;sBAGhB,KAAK,IAAI,cAAc,KAAK,IAAI;;;;;;AAOlC,kBAAI,IAAI;AACJ,4BAAY;mCACD;;AAQX,oBAAI,UAAU,MAAM;AAChB,8BAAY;sCACF;iDACW;2BACtB,KAAK;;2BAEL,KAAK;;;6CAGa;2BAClB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;;wCAEJ;mDACW;;;sBAG7B,KAAK,IAAI,mBAAmB,KAAK,IAAI;;;;AAK/B,8BAAY;qCACH;2BACV,KAAK;;2BAEL,KAAK;;;kCAGE;iDACe;2BACtB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;;AAInC,4BAAY;;;AAGpB,gBAAI,IAAI;AACJ,0BAAY;0BACN,KAAK,YAAY,MAAM;gBACjC,KAAK,mBAAmB,KAAK,gBAAgB,KAAK;;AAE9C,kBAAI,IAAI,IAAI;AACR,4BAAY;4BACR,KAAK,IAAI,YAAY,MAAM,IAAI;kBACzC,KAAK,IAAI;8BACG,KAAK,IAAI,gBAAgB,KAAK,IAAI;;;;;AAKxD,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY,gBAAgB,KAAK,SAAS,KAAK;;;AAGvD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;;;;;;;;;UAelC;;;UAGA;UACA;;;;;;ACvSV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS;AACxC,aAAK,cAAc;AACnB,wDAAgD;AAChD,2BAAoB;AACpB,wCAAgC;AAChC,aAAK,cAAc,CAAC,UAAU,YAAY,WAAW;AACrD,yBAAiB,WAAW,aAAa,IAAI;AAC7C,oDAA4C,CAAC,GAAG,cAAc,OAAO,GAAG,aAAa;AACrF,gDAAwC,aAAa,IACjD;UACI,GAAI,eAAc,KAAM,cAAa;UACrC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAE3B,8CAAsC,YAAY,IAC9C;UACI,GAAI,cAAa,KAAM,aAAY;UACnC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAK3B,aAAK,WAAW;yCACiB;wCACD;;;;;;;;;;;;;;;;iCAgBP;;;;+BAIF;8BACD;;uBAEP;mCACY;4BACP;;;uBAGL;mCACY;4BACP;;;;;aAKf;;;;;;;;;;;;;;;;;;;;;;;;;;;;MCtFT;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,MAAM;AACnB,oBAAY,YAAY,QAAQ,QAAQ,UAAU,MAAM;AACxD,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB;AAChB,wBAAgB;AAIhB,YAAI;AACA,sBAAY,WAAU,UAAU,SAAS,MAAM;AAC/C,sBAAY,WAAU,YAAY;;AAGlC,sBAAY,WAAU,gBAAgB,WAAW;AACjD,sBAAa,WAAU,eAAe;;AAE1C,aAAK,WAAW;;;UAGd,kBAAkB;oBACR,cAAc,MAAM;sBAClB;;cAER;sBACQ;YACV,cAAc,MAAM;wBACR,UAAU,MAAM;;;;;;MAMpC;AACI,eAAO;AACH,cAAI,KAAK,SAAS;AACd,iBAAK,QAAQ,MAAM,mBAAmB,cAAc;;AAExD,gBAAM,GAAG,UAAU,KAAK,OAAO;;;;AAI3C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG,WAAW;iBAEhB,SAAS;AACd,eAAO,GAAG,WAAW,WAAW;iBAE3B,SAAS;AACd,eAAO,GAAG,WAAW,WAAW,WAAW;;AAG3C,cAAM,MAAM,2BAA2B;;;AAG/C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;;AAGV,cAAM,MAAM,2BAA2B;;;AC7E/C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,WAAW;;;;gBAIR,KAAK;gBACL,KAAK;gBACL,KAAK;;uBAEE;+BACQ;uBACR;+BACQ;mCACI;UACzB,KAAK;;;uBAGQ,KAAK;;;;;MAKxB;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO,KAAK,YAAY;;AAGxB,iBAAO,KAAK,YAAY;;;MAGhC;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;;ACjFnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,MAAM;AAC1B,aAAK,WAAW;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;UAIE,KAAK;;;;;AC9Bf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;;UAKE,KAAK;;;;;ACjCf;;;;;;;;;;;;;;;;;MAmBI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;8BAUT;kCACI;iDACe,YAAY;wBACrC,KAAK;;;;;;;;;;;;;;UAcnB,KAAK,iBAAiB;;;;;ACxDhC;;;;;;;;;;;;;;;;;MAkCI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,4BAAgB,MAAM,IAAI;AAC1B,wBAAY;;gCAEI,SAAS,YAAY;gCACrB;kCACE,SAAS,YAAY;kCACrB;;;;;;;gCAOF;oCACI;kDACc,YAAY;yBACrC,KAAK;;;yBAGL;;yBAEA;;yBAEA;;yBAEA;;;;;;;AAOjB,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;;UAW7B;;UAEA,KAAK,YAAY;;;;;AC9F3B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;MAQpB;AACI,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;;AAElE,gBAAM,GAAG,UAAU,KAAK,UAAU;;;;AClC9C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,4BAAoB,OAAO;AAC3B,oBAAY,QAAQ;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB,QAAQ;AAC7C,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,mBAAmB;;AAEnC,UAAI,SAAS;AACT,eAAO;;AAEX,4BAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,MAAM;AACN,uBAAa,KAAK,kBAAkB,cAAc;;AAGlD,uBAAa,KAAK,GAAG,cAAc;;;AAG3C,aAAO,aAAa;;;MClDpB;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,6BAAqB,KAAK,WAAW,IAAI,eAAe;AACxD,aAAK,WAAW;UACd,yBAAyB,eAAe,KAAK;;YAE3C;;gCAEoB,KAAK;;sCAEC;;;;;;;ACjBtC;;;;;;;;;;;;;;;;AAmBO;AACH,mBAAa;AACb,iCAA2B,GAAG,KAAK;;MAEjC,KAAK;MACL,KAAK;MACL,KAAK;;;;;;AAMP,aAAO,mBAA8B,IAAI;;AAEtC;AAEH,0BAAoB,IAAI,aAAa,CAAC,IAAI,GAAG,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,IAAI,GAAG,GAAG;AACnG,aAAO,yBAAoC,IAAI;;AAE5C;AAEH,oCAA8B,IAAI,YAAY,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG;AAC9D,aAAO,wBAAmC,IAAI;;AAElD;AACI,0BAA+B,OAAO;AACtC,sBAAgB,cAAyB;AACzC,oBAAc,GAAG;AACjB,mBAAwB,IAAI,MAAM,GAAG,YAAY,OAAO;AACxD,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,WAAW,OAAO,GAAG,gBAAgB,OAAO,QAAQ,GAAG,eAAe,aAAa;AACxH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,aAAO;;AAEJ;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,GAAG;;AAE/I;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,cAAc;;AAE1J;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,GAAG;;AAE5H;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,wCAAwC,gBAAgB,GAAG,MAAM,GAAG;;AAErH;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,cAAc;;AAEvI;AACH,wBAAkB;AAClB,uBAAiB,IAAI;AACrB,qBAAgB,IAAI,IAAM,IAAI;AAC9B,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,sBAAgB,mCAA8C,IAAI,SAAS,gBAAgB,cAAc,GAAG,QAAQ;AACpH,aAAO,WACH,mCAA8C,IAAI,SAAS,MAAM,cAAc,GAAG,QAAQ;;AAE3F;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE;AACA,UAAI,iBAAgB;AAChB,wBAAgB,IAAI,WAAW,QAAQ,SAAS;AAChD,wBAAgB,GAAG;AACnB,yBAAiB,GAAG;;AAGpB,wBAAgB,IAAI,aAAa,QAAQ,SAAS;AAClD,wBAAgB,GAAG;AACnB,yBAAiB,cAAc;;AAEnC,oBAAc,IAAI;AAClB,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,gBAAgB,OAAO,QAAQ,GAAG,GAAG,MAAM,eAAe;AAC5H,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,UAAI,OAAO,gBAAgB;AACvB,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe,OAAO;;AAG7I,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;;AAE1G,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AAEH,sBAAe,IAAI;AACnB,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AAEzE,4BAAsB;AACtB,6BAAuB;AACvB,8BAAwB,gBAAgB,iBAAiB,OAAO;AAChE,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB,iBAAiB,IAAI;AAG9F,mBAAwB,KAAK,MAAM,IAAI,WAAW,GAAG,GAAG,SAAS,MAAM,IAAI,MAAM,IAAI,OAAO;AAC5F,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AACzE,aAAO;;AAEJ;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa;AACxC,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,qBAAe,yCAAkD,MAAM;AACvE,0BAAoB;AACpB,6BAAuB,IAAI,WAAW,mCAA4C,OAAO,SAAS;AAClG,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,GAAG,GAAG,cAAc,uBAAuB,GAAG,eAAe;AAGnH,aAAO,IAAI,aAAa,eAAe;;AAEpC;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa,sCAA+C,cAAc;AACrG,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,yBAAmB,IAAI,aAAa,eAAe,eAAe;AAClE,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,cAAc,cAAc,GAAG,MAAM,GAAG,OAAO;AACrG,aAAO;;AC1KX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,gBAAgB;AACrB,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,sBAAsB;AAC3B,aAAK,cAAc;AACnB,0BAAkB,OAAM,UAAU;AAClC,YAAI,MAAM;AACN,eAAK,KAAK;AACV,0BAAgB,WAAW;;AAG3B,eAAK,KAAK,gBAAgB;;AAG9B,iCAAyB;AACzB,wCAAgC;AAChC,YAAI,OAAM,UAAU,qBAAqB;AACrC,gCAAsB;AACtB,qCAA2B;AAC3B,eAAK,wBACD,oBAA+B,KAAK,IAAI;AAC5C,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;AAGpB,eAAK,4BAA4B,KAAK,GAAG,aAAa;AACtD,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,gCACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;;AAKpB,+BAAqB;AACrB,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,KAAK,GAAG,aAAa;qBAEpB,aAAwB,KAAK,IAAI;AACtC,iBAAK,gCACD,KAAK,GAAG,aAAa;;AAGzB,kBAAM,IAAI,MAAM;;;AAGxB,aAAK,eAAe,mBAA8B,KAAK;AACvD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,gBACD,iBAA0B,KAAK,IAAI,KAAK;;UAE5C;AACA,eAAO,OAAM,QAAQ;;MAEzB;AACI,YAAI,KAAK;AACL;;AAEJ,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;AAIjB,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK;;AAKjB,mBAAW,KAAK;AAChB,qBAAwB,IAAI,MAAM,GAAG;AACrC,qBAAwB,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AACrE,qBAAwB,IAAI,MAAM,GAAG,kBAAkB,KAAK;AAC5D,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AACzE,qBAAwB,IAAI,MAAM,GAAG,aAAa,KAAK;AACvD,aAAK,WAAW;;MAEpB;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,iCAAoC,KAAK,IAAI,SAAS;;MAE1D;AACI,aAAK;AACL,mCAAsC,KAAK,IAAI,SAAS,OAAO,QAAQ,OAAM,KAAK;;MAEtF;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,eAAO,0BAAqC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE7E;AACI,aAAK;AACL,YAAI,KAAK,kBAAkB;AACvB,4CAA6C,KAAK,IAAI,KAAK;AAC3D,eAAK,gBAAgB;;AAEzB,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;MAEjE;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,gDAA2D,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE5I;AACI,eAAO,+BAA0C,KAAK,IAAI,SAAQ,OAAO,MAAM,SAAS,cAAc,cAAc,KAAK;;MAE7H;AACI,eAAO,gCAA2C,KAAK,IAAI,SAAQ;;MAEvE;AACI,aAAK,yBAAyB;AAC9B,uBAAe,8BAAyC,KAAK,IAAI,MAAM,SAAS,KAAK;AACrF,aAAK;AACL,eAAO;;MAEX;AACI,6BAAqB,KAAK,YAAY,KAAK;AAC3C,eAAO,KAAK,UAAU;;MAE1B;AACI;AACA;AACA,YAAI,OAAM,QAAQ;AACd,sBAAY;AACZ,uBAAa,IAAI,UAAU,IAAI,4BAA4B;AAC3D,aAAG;AACH,0BAAgB;AACZ,2BAAe,IAAI,eAAe,MAAM,GAAG;AAC3C,mBAAO,WAAW,IAAI,oBAClB,WAAW,IAAI;;AAEvB,kBAAQ;mBAEH,OAAM,UAAU,kDAAkD;AACvE,kBAAQ,KAAK;AACb,eAAK;AACL,0BAAgB,MAAM,KAAK,iBAAiB,OAAO,OAAM,UAAU;;AAOnE,0BAAgB,MAAM;;AAE1B,eAAO,CAAE,OAAO;;MAEpB;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,sCAAiD,KAAK,IAAI,cAAc;;MAE5H;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,+BAAuB,qBAAgC,IAAI;AAC3D,6BAAqB,qBAA8B;AACnD,wBAAgB,cAAyB;AACzC,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,oBAAuB,IAAI;AAC3B,YAAI,KAAK;AACL,0BAA2B,IAAI;;AAEnC,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,sBAAsB,kCAA6C,IAAI,KAAK,SAAS,KAAK;;AAEnG,eAAO;;MAEX;AACI,aAAK;AACL,YAAI,YAAY,KAAK;AACjB,eAAK,UAAU;;AAEnB,YAAI,WAAW;AACX,uBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;;MAGrE;AACI,aAAK;AACL,aAAK,UAAU;AACf,YAAK,KAAK,WAAW,QAAS,KAAK;AAC/B,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,WAAW;;MAE9D,uDAAuD;AACnD,aAAK;AACL,YAAI;AACA,iBAAO,iCAA4C,KAAK,IAAI,SAAS;;AAGrE,iBAAO,0BAAqC,KAAK,IAAI,SAAS;;;MAGtE;AACI,aAAK;AACL,eAAO,aAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,kBAAkB,SAAS;;MAErF;AACI,aAAK;AACL,eAAO,KAAK,GAAG,mBAAmB,SAAS;;MAE/C;AACI,aAAK;AACL,aAAK;AACL,2CAA8C,KAAK,IAAI,oBAAoB,iBAAiB;;MAEhG;AACI,aAAK,6BAA6B,qBAAqB,SAAS;;MAEpE;AACI,aAAK;AACL,gCAAwB,uCAAgD,MAAM;AAC9E,aAAK,6BAA6B,2BAA2B,OAAO;;MAExE;AACI,aAAK,iCAAiC,aAAa,UAAU,YAAY;;MAE7E;AACI,cAAM,IAAI,MAAM;;MAEpB;AACI,YAAI,KAAK,WAAW;AAChB,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,4BAA+B,KAAK;;MAExC;AACI,aAAK;AACL,aAAK;AACL,mBAAW,KAAK;AAChB,YAAI,KAAK;AACL,eAAK;;AAET,qBAAwB,IAAI,MAAM,GAAG,aAAa,GAAG,WAAW,GAAG,GAAG,gBAAgB;;MAE1F;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG;;MAEnD;AACI,YAAI,KAAK,+BAA+B;AACpC,eAAK,8BACD,oBAA+B,KAAK,IAAI,OAAM,UAAU,oDAAoD,IACxG,oCACA;;AAEZ,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,yBAAc,IAAI;AAClB,cAAI,WAAW,KAAI,kBAAkB;AACrC,iBAAO;;AAEX,oBAAY,KAAK;AACjB,sBAAc,IAAI;AAClB,YAAI,cAAc,IAAI,kBAAkB;AACxC,eAAO;;MAEX;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,cAAI,SAAS,KAAI;AACjB;;AAEJ,oBAAY,KAAK;AACjB,YAAI,YAAY,IAAI;;YAElB;AACF,cAAM,aAAiB,MAAM,KAAK,YAG9B,KAAK,iBAAiB,OAAO,OAAM,UAAU;AACjD,eAAO,KAAK,aAAa,OAAO,OAAM,UAAU;;MAEpD;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;AAG1B,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;;MAGlC;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;AAG1B,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;;MAGlC;AACI,eAAO,IAAI,QAAQ;AACf,eAAK,cAAc,MAAM,aAAa,iBAAiB,MAAM;;;MAGrE;AAEI,uBAAc,qBAAqB,KAAK,YAAY,IAAI,OAAK,EAAE;AAC/D,qBAAa,GAAG,KAAK,QAAO,EAAE;AAC1B,iBAAQ,aAAc,KAAK,YAAY;AACvC;;AAEJ,aAAK,cAAc,KAAK,YAAY,MAAM,SAAQ;;MAEtD;AACI,aAAK,YAAY,KAAK,CAAE,UAAU;AAClC,YAAI,KAAK,YAAY,SAAS;AAE1B;;AAGJ,qBAAiB;AACb,eAAK;AAEL,iBAAO,KAAK,YAAY,WAAW;;;MAG3C;AACI,aAAK;AACL,sCAAyC,KAAK,IAAI,SAAS,KAAK;AAChE,YAAI,KAAK;AACL,8BAA+B,KAAK;;;MAG5C;AACI,YAAI,KAAK,iBAAiB;AACtB,wCAAyC,KAAK,IAAI,KAAK,eAAe,KAAK;AAC3E,cAAI,KAAK;AACL,gCAA+B,KAAK;;;AAIxC,4CAA6C,KAAK,IAAI,KAAK;;;MAGnE;AACI,aAAK,yBAAyB;AAC9B,uBAAe;AACf,aAAK;AACL,eAAO;;MAEX;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,sCAAyC,IAAI,gCAAgC,KAAK;AAClF,YAAI,KAAK;AACL,8BAA+B;;AAEnC,aAAK,gBAAgB;AACrB,qBAAwB,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,OAAO;AAC3D,qBAAwB,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAE9D;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAExE;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAGxB;AACI,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;;;AAUrB;AACH,cAAQ;AACR,aAAO,IAAI,IAAI,QAAQ,EAAE;AACrB,uBAAe,IAAI;AACnB,YAAI,CAAC;AACD;;;AAGR,aAAO,IAAI;;AC5cf;;;;;;;;;;;;;;;;AAkBO;AACH,uBAAiB,QAAQ;AACzB,yBAAmB,OAAO,IAAI;AAC1B,0BAAkB;UACd,cAAc,OAAM;UACpB,UAAU,OAAM,YAAY,OAAO,OAAM,QAAQ;UACjD,WAAW,OAAM;UACjB,UAAU,OAAM,YAAY,QAAQ,OAAM,QAAQ;UAClD,YAAY;;AAEhB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,SAAS,QAChD,OAAM,QAAQ,MAAM,aAAa;AACjC,oBAAU,aAAa,OAAM,QAAQ,MAAM;;AAE/C,eAAO,CAAE,MAAM,QAAQ,cAAc,IAAI;;AAE7C,2BAAqB,WAAW,IAAI,OAAK,EAAE;AAC3C,2BAAqB;QACjB,cAAc,OAAO;QACrB,UAAU,OAAO,QAAQ;QACzB,WAAW;QACX,UAAU,OAAO,QAAQ;QACzB,YAAY;;AAEhB,qBAAe,WAA2B,YAAY,cAAc,UAAU,QAAQ;AACtF,2BAAqB,MAAM,cAAc;AAEzC,mBAAa;AACb,qBAAe,MAAM,mBAAmB,cAAc,OAAO;AAC7D,UAAI,OAAM,UAAU,qBAAqB;AACrC,iBAAS,MAAM,mBAAmB,cAAc,YAAY;;AAGhE,+BAAyB;AACzB,mBAAa,GAAG,IAAI,QAAQ,cAAc,QAAQ;AAC9C,wBAAgB,QAAQ,cAAc;AACtC,4BAAoB;AACpB,yBAAiB,WACb,MAAM,mBAAmB,cAAc,SAAS;AACpD,yBAAiB,SAAS,aACtB,MAAM,mBAAmB,cAAc,SAAS,WAAW;;AAEnE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW,WAAW,OAAO;AAC7B,cAAM,MAAM,4BAA4B,WAAW,wCAC1B,OAAO;;AAEpC,iBAAW,QAAQ;AACf,uBAAe,EAAE;AACjB,uBAAc,OAAO;AACrB,uBAAe,OAAM;AACrB,YAAI,CAAC,aAAiB,QAAQ;AAC1B,gBAAM,MAAM,2EACoB,cAAc;;AAGlD,YAAI,EAAE,aAAa,OAAM;AACrB;;AAEJ,0BAAkB,EAAE;AACpB,0BAAkB,OAAM,YAAY,OAAO,OAAM,QAAQ;AACzD,YAAI,CAAC,aAAiB,WAAW;AAC7B,gBAAM,MAAM,kFACgB,iBAAiB;;;;AAIlD;AACH,+BAAyB,OAAO,cAAc;AAC9C,+BAAyB,CAAC,OAAO,eAAe,CAAC;AACjD,qBAAe,OAAO,QAAQ;AAC9B,0BAAoB,OAAO,QAAQ;AACnC,UAAI,OAAO,QAAQ;AACf,cAAM,6BAA6B,QAAQ,YAAY,IAAI,YAAY;;AAGvE,cAAM,uBAAuB,QAAQ,YAAY,IAAI,YAAY;;AAErE,YAAM,WAAW,OAAO;AAExB,UAAI,OAAM,UAAU,qBAAqB;AACrC,YAAI,OAAO,WAAW;AAClB,gBAAM,GAAG,UAAU,OAAO,QAAQ;;;AAG1C,UAAI,OAAO,WAAW;AAClB,cAAM,GAAG,UAAU,OAAO,QAAQ;;AAGtC,aAAO,QAAQ;AACX,wBAAgB,OAAO,QAAQ,cAAc;AAC7C,uBAAe,OAAO,iBAAiB;AACvC,6BAAqB,OAAO,iBAAiB,SAAS;AACtD,YAAI,UAAU;AAEV;;AAEJ,YAAI,OAAM;AAEN,cAAI,eAAmB,OAAM,SAAS;AAClC,kBAAM,GAAG,UAAU,QAAQ,OAAM,cAAc;;AAG/C,uBAAW,OAAM;AACjB,gBAAI,CAAE,iBAAgB;AAClB,qBAAO,IAAI,aAAa;;AAE5B,kBAAM,GAAG,WAAW,QAAQ;;AAEhC;;AAGJ,YAAI,OAAM,QAAQ,SAAS,QAAQ,gBAAgB;AAC/C,gBAAM,GAAG,UAAU,cAAc,OAAM,QAAQ,MAAM;;AAEzD,cAAM,sBAAsB,OAAM,QAAQ,SAAS,QAAQ;;AAE/D,UAAI,eAAe;AACf,oBAAY,OAAO,OAAO;;AAE9B,YAAM;;AAEH;AACH,sBAAgB;AAChB,aAAO,OAAO,QAAQ,QAAQ;AAC1B,0BAAkB,EAAE,WAAW,QAAQ,EAAE,QAAQ,SAAS,QACtD,EAAE,QAAQ,MAAM,aAAa;AACjC,yBAAiB,EAAE,YAAY,YAAY,EAAE,QAAQ;AACrD,qBAAa,GAAG,EAAE,SAAS,YAAY;;AAE3C,0BAAoB,QAAQ;AAC5B,gBAAU,QAAQ,YAAY;AAE9B,aAAO,MAAM,YAAY,MAAM;AAC/B,aAAO;;ACnKX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,eAAQ,aAAa,YAAY,aAAa,cAAc,SAAS,UAAU,eAAe,gBAAgB,cAAe;AAC7H,eAAQ,MAAM,OAAQ;AACtB,iCAAyB,aAAa;AACtC,qBAAa;AACb,+BAAuB,eAAe;AACtC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe;AACf,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,wBAAY;gCACI;yBACP;;4BAEG,YAAY,eAAe,YAAY;0CACzB,gBAAgB,kBAAkB;6BAC/C,2BAA2B;;sBAElC,WAAW;;qDAEoB,gBAAgB,kBAAkB;+BACxD,wCAAwC,wBAAwB;;wBAEvE,WAAW;;2CAEQ;;sBAErB;;2BAEK,MAAM,IAAI;;;;;2BAKV,MAAM,IAAI;;;;;;;;;;AAU7B,aAAK,WAAW;;;;;;;;;UASd;;UAEA,KAAK;;;;;AC9Ef;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;;wBASA,aAAa;;oCAED;;;;;0BAKV;;;;;;ACtD1B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,cAAc,eAAe;AACnD,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,QAAQ,WAAW;AACxB,aAAK,cAAc;AACnB,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,OAAO;AACZ,aAAK,WAAW;;;;;;;;8BAQM,KAAK;oDACiB;yCACX,KAAK;0BACpB;;;sCAGY,KAAK;;;;;;;;;;;;;;;yBAelB,yBAAyB;;;;;;;yCAOT;0BACf;;;;0CAIgB;;;;;;;;;;;;;;;;;ACnE1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;gCAQQ,KAAK,YAAY;gCACjB,KAAK,YAAY;;;;;;;;;;;;;;;iCAehB;;;;;;;;;;;yBAWR,aAAa;;;6DAGuB;;;;;;;;;;;;;;;;;;;;;0CAqBnB;;;;;;ACnG1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,wBAAwB,uBAAuB;AACjE,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;;;gCAcZ;kBACd;gDAC8B;;oCAEZ,SAAS;;;;;kCAKX;kDACgB;;sCAEZ,SAAS;;;;;;;gCAOf;;;;qCAIK;;;;;;;;;;;;MAYjC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,uBAAuB,wBAAwB,uBAAuB;AACxF,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;;;;gCAiBzB;mBACb;gDAC6B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;kCAOf;;;;;;yBAMT,2BAA2B;yBAC3B;;;;;;;;;;;;ACrJzB;;;;;;;;;;;;;;;;;MAiBI,sDAAsD,oBAAoB,iBAAiB,qBAAoB,2BAA2B;AACtI,aAAK,gBAAgB,CAAC,WAAW;AACjC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,0BAAkB,aAAa,OAAO,KAAK,OAAO;AAClD,sCAA8B,KAAK,KAAK,YAAY;AACpD,wBAAgB,aAAa,gBAAgB;AAC7C,wBAAgB,aAAa,gBAAgB;AAC7C,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,4BAAoB;AACpB,4BAAoB;AACpB,YAAI,OAAO,KAAK,OAAO;AACnB,0BAAgB,wBAAwB,OAAO,KAAK;mBAE/C,OAAO,KAAK,OAAO;AACxB,0BAAgB,wBAAwB,OAAO,KAAK;;AAExD,aAAK,WAAW;QAChB;;sCAE8B;;;;8BAIR;yBACL;yBACA;wCACe;wCACA;;;;uBAIjB,SAAS,QAAQ,SAAS;uBAC1B,SAAS,QAAQ,SAAS;;;;;;;;;UASvC;;UAEA;;;;;;;ACrFV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,WAAW;AAC/B,aAAK,WAAW;;;;;;;;;;8BAUM,cAAc;;;;;;;;;;0BAUlB,cAAc;;;;MAIpC;AACI,eAAO;AACH,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU,MAAM,mBAAmB,cAAc;;AAE1D,gBAAM,GAAG,UAAU,KAAK,SAAS;;;;ACjD7C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,YAAY;AAChC,aAAK,WAAW;;;;8BAIM,oBAAoB;;;;;;ACxBlD;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AAEpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,YAAI,SAAS;AACT,eAAK,WAAW;;;;;;AAOhB,2BAAiB,YAAY,MAAM;AACnC,wBAAc,kBAAkB;AAChC,uCAA6B,wBAAwB,MAAM,aAAa;AACxE,0BAAc,SAAS,MAAM,YAAY,YAAY,SAAS,IAAI,YAAY,YAAY,SAAS,IAAI;AACvG,yBAAe,UAAU,aAAa;AACtC,eAAK,WAAW;;YAEhB;;eAEG;;;cAGD;;6BAEe;;;;;;;AAO7B;AACI,sBAAe;AACf,qBAAe,GAAG,OAAO,GAAG;AACxB,uBAAe,GAAG,OAAO,GAAG;AACxB,sBAAY,GAAG,QAAQ,IAAI,MAAM,UAAU,QAAQ,IAAI,MAAM;AAC7D,uBAAa,GAAG,IAAI,MAAM;AACtB,oBAAQ,GAAG,KAAK,KAAK,SAAS,IAAI,QAAQ;;AAE9C,kBAAO,KAAK;;;AAGpB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO,QAAQ,MAAM;;AAEzB,iBAAW;AACX,mBAAa,OAAO,GAAG,IAAI,MAAM;AAC7B,gBAAQ,GAAG,KAAK,SAAS,MAAM;AAC/B,YAAI,IAAI,OAAO;AACX,kBAAQ;;;AAGhB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO;;AAEX,wBAAkB,KAAK,MAAM;AAC7B,aAAO;cACG,UAAU;cACV,UAAU;;;;0BAIE;0BACA;;;AAG1B;AACI,mBAAa,MAAM;AACnB,2BAAqB,mBAAmB,MAAM;AAC9C,UAAI,SAAS;AACT,eAAO;wBACS,MAAM;;;AAG1B,aAAO,QAAQ,aAAa;8BACF,aAAa;8BACb,aAAa;uCACJ,aAAa;;AC3GpD;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,qBAAa,kBAAkB;AAC/B,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;8BAKU;;;;;;AAMlB;;AAEJ,aAAK,WAAW;QAChB,gBAAgB,QAAQ;QACxB,cAAc,QAAQ;;;UAGpB;;4BAEkB;;YAEhB;2BACe;;;;;;ACpD3B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,+BAAuB;UACnB,GAAG;UAAyB,GAAG,QAAO,OAAO;YAC7C;;UAEA,SAAS,IAAI,KAAK;;SAErB,QAAO,OAAO;YACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;UAC9C,SAAS,IAAI,KAAK,KAAK,QAAO,OAAO;cACnC;;AAEN,4BAAoB,SAAS,IACzB,4BACA;AACJ,uBAAe;AACf,qBAAa,OAAO,SAAS,IAAI,IAAI,GAAG,IAAI,GAAG;AAC3C,sBAAY;UACd,eAAe;cACX;mBACK,cAAc;;YAErB;mBACO,wBAAwB,OAAO,YAAY;;;;AAItD,oBAAa,SAAS,IAAI,OAAO;AACjC,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;AClEV;;;;;;;;;;;;;;;;;MAiBI,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,wCAAgC,cAAc,SAAS,oBAAoB,SAAS,mBAAmB,SAAS;AAChH,mCAA2B,SAAS,SAAS,mBAAmB,SAAS;AACzE,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;sCACU,iBAAiB;mCACpB,WAAW;;;;;;;;;;;;;;;;;;kCAkBZ;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;;;;;;;0BAUnB;;;mCAGS,mBAAoB,sBAAsB,0BAC7D,qBACA,QAAQ;;;;;;;AAOZ;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;oCACY,iBAAiB;iCACpB,WAAW;0CACF;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;kCAkBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;uCACK;;;;yCAIE;6CACI;6CACA;;;cAG/B;;;gCAGkB;gBAChB,6BAA6B;;;;;;;;cAQ/B;uBACS,6BAA6B;;;yCAGX;;;;;cAK3B;uBACS,6BAA6B;;;yCAGX;6CACI;;;;cAI/B;;;oBAGM;;;;;;MAMhB,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;;oBAER,gBAAgB,iBAAiB;mCAClB,aAAa,WAAW;;;;;;;;;;;;;;;;;;kCAkBzB;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;sCAIP;0BACZ;;;sCAGY,SAAS;;;;;;;;;;4BAUnB;;;qCAGS,mBACpB,sBACG,cAAc,SAAS,mBAAmB,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAC5G,UAAU,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAChF,QAAQ,2BAA2B;6BACtB;;;;;;;;AAQjB;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;;gBAER,gBAAgB,iBAAiB;iCAChB,aAAa,WAAW;0CACf;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;;kCAmBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;oBACd;;;kCAGc,SAAS;;;;oCAIP;yCACK;;;;+CAIM;mDACI;mDACA;;;gBAGnC;;;kCAGkB;kBAChB,6BAA6B;;;;;;;;gBAQ/B;yBACS,6BAA6B;;;+CAGP;;;;;gBAK/B;yBACS,6BAA6B;;;+CAGP;mDACI;;;;gBAInC;;;sBAGM;;;;;;ACpZtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,kCAA0B;AAC1B,wBAAgB;AAChB,YAAI,eAAe;AACf,gCAAsB;mBAEjB,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;mBAEP,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;;AAEhB,0BAAkB,GAAG,cAAc,cAAc;AAEjD,YAAI,eAAe;AACf,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;;AAElB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;YAChB,eAAe;;mBAER,eAAe;;;;wBAIV;;;AAGhB,sBAAc;AACd,YAAI,eAAe;AACf,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;mBAEL,eAAe;AACpB,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;;AAEd,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;0CACkB;;;;UAIhC;;;;;;;;kCAQwB;;kCAEA;;;;;;8BAMJ;;YAElB,oBAAoB;;;;;;;YAOpB;;;iCAGqB;cACnB,4BAA4B;YAC9B,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;;oBAEQ;;;;;ACvJpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,uBAAa;AACb,cAAI,IAAI,MAAM;AACV,sBAAU;;AAEd,cAAI,IAAI;AACJ,sBAAU;;AAEd,sBAAY;UACd;UACA,IAAI,IAAI,4CAA4C;;;;;;mBAM3C;;UAET,IAAI,IAAI,MAAM;;;AAGhB,aAAK,WAAW;QAChB,uBAAuB;QACvB,mBAA+B;;;;;;;;qBAQlB,YAAY;qBACZ,YAAY;;UAEvB;;;;;;;AAOV;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;QAEH;;;;;ACrER;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;;sDAMkB,UAAU;;;;;;qDAMX,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7F9D;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AClCvD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;uCAChB;;;;;;;;;;;;;;;;;;;;;;gCAsBP,QAAQ;uCACD,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7DlD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;sBAKd,eAAe;sCACC,eAAe;;;wBAG7B,eAAe;wCACC,eAAe;;;4BAG3B;kBACV;;;;4BAIU;kBACV;;;;;;;;;;;;;;;ACpGlB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAGpD,0BAAkB,eAAe,QAAQ;AACzC,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;8DAaO;;;;;;;;;ACjD9D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,YAAI,SAAS;AACT,eAAK,WAAW;;;2BAGD,OAAO;;;AAGtB;;AAEJ,2BAAmB;AACf,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,eAAe;;AAEpC,iBAAO,UAAU;;AAErB,yBAAiB,OAAO,IAAI,UAAU,WAAW,IAAI,KAAK;AAC1D,qBAAa,kBAAkB;AAC/B,aAAK,WAAW;;UAEd;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,yBAAiB,YAAY,MAAM;AACnC,2BAAmB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AAC1E,wBAAgB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AACvE,qBAAa,kBAAkB;AAC/B,YAAI,SAAS;AACT,eAAK,WAAW;;;;uCAIW,OAAO;cAChC,OAAO;eACN;2CAC4B,OAAO;kBAChC,OAAO;;;;;;AAOb,eAAK,WAAW;;YAEhB;;uBAEW,KAAK,SAAS;eACtB;yBACU,KAAK,SAAS;;eAExB;yBACU,KAAK,SAAS;iBACtB;2BACU,KAAK,SAAS;;;;;;;AAOjC;AACI,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,gCAAsB,OAAO,IAAI,UAAU,WAAW,GAAG;AACzD,2BAAiB,cAAc,KAAK;AACpC,4BAAkB,cAAc,MAAM,IAAI,KAAK;AAC/C,iBAAO,mBAAmB,mBAAmB;;AAEjD;AACI,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,QAAQ,UAAU;;AAGnC,mBAAO,GAAG,UAAU;;;;;AC7FpC;;;;;;;;;;;;;;;;;MAkBI,+FAA+F;AAC3F,aAAK,gBAAgB,CAAC,WAAW,WAAW;AAC5C,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,6BAAqB,WAAW,IAAI,eAAe;AACnD,aAAK,WAAW;UACd,yBAAyB,eAAe;;;YAGtC;;;gCAGoB;;kCAEE;kCACA;0CACQ;;;uBAGnB;;;;;;;;;ACtDvB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,2BAAmB,UAAU;AAC7B,0BAAkB,UAAU;AAC5B,uBAAe,UAAU;AACzB,4BAAoB,UAAU;AAC9B,wBAAgB,cAAc,KAAK,KAAK,SAAS;AACjD,aAAK,cAAc,CAAC,WAAW;AAC/B,oCAA4B;AAC5B,4BAAoB;AACpB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,8BAAsB;;;AAGtB,oCAA4B;AAC5B,YAAI,SAAS,aAAa;AACtB,kCAAwB;oCACA;;;;;AAK5B,wCAAgC;AAChC,YAAI,SAAS,aAAa;AACtB,sCAA4B;oCACJ;;;;;AAK5B,aAAK,WAAW;0CACkB;;;UAGhC;;;;;UAKA;;;;;;;;;YASE,yBAAyB;wDACmB;;;;8BAI1B;;;;;;;;;;;;;;;;YAgBlB;;;iCAGqB;cACnB,4BAA4B;;;;;;;;;;;;;;;;;YAiB9B;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;;oBAEQ;;;;;AC9IpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK,KAAK;AAChC,aAAK,cAAc;AACnB;AACA;AACA,YAAI,OAAO;AACP,gBAAM,MAAM,kBAAkB;;AAElC,YAAI,SAAS;AACT,qBAAW;AACX,oBAAU;;AAGV,gCAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,6BAAmB;AACnB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,KAAK,GAAG,cAAc;AAClC,gBAAI,IAAI;AACJ,yBAAW,KAAK,GAAG,cAAc;;;AAGzC,oBAAU,WAAW;AACrB,qBAAW,YAAY;;AAE3B,sBAAc,kBAAkB;AAChC,aAAK,WAAW;;UAEd;4BACkB;;2BAED;;2BAEA;;;;;;ACnD3B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,qBAAqB,KAAK;AAC9C,6BAAqB,YAAU,KAAK;AACpC;AACA,yBAAiB,SAAS,IAAI;AAC1B,iBAAO,aAAa,OAAO,cAAc,eAAe,OAAO;;AAEnE,gBAAO;UACL;UACA;UACA,SAAS,KAAK;;AAEhB,aAAK,WAAW;QAChB;;UAEE;8BACoB;;;;MAI1B;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;AAI/C,mBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC;AACI,UAAI,SAAS;AACT,eAAO;iBAEF,QAAQ;AACb,eAAO,OAAO,MAAM,GAAG,MAAM,IAAI,OAAK,eAAe,GAAG,KAAK;;AAG7D,cAAM,MAAM,oBAAoB;;;ACrExC;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,wBAAe,YAAY,UAAU,KAAK;AAC1C,0BAAkB,YAAY,aAAa,KAAK;AAChD,0BAAkB,KAAK,SAAS,IAAI,cAAc,QAAQ,UAAU,MAAM,IAAI;AAC9E,2BAAmB,wBAAwB,UAAU,YAAY;AACjE,yBAAiB;mBACN;cACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;YACT,UAAU,KAAK,OAAO;;;AAG1B,yBAAiB,KAAK,SAAS,IAAI,KAAK;UACtC,QAAO,KAAK,OAAO;cACf,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;gBACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;cAClD,UAAU,KAAK,OAAO;uBACb;;;;AAIf,+BAAuB,KAAK,QAAQ,IAChC;cACE,SAAS,SAAS,IAAI,UAAU,SAAS,MAAM,aACjD,SAAS,IAAI,UAAU,GAAG,UAAU,QAAQ,QAAO,cAAc,OAC5D,KAAK;AACd,aAAK,WAAW;0BACE,KAAK;;UAErB;UACA;UACA;;UAEA;UACA;;;;;MAKN;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;ACjF/C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,KAAK;AAClB,2BAAmB,kBAAkB,KAAK;AAC1C,sBAAc,kBAAkB,KAAK;AACrC,wBAAgB;AAChB,YAAI,SAAS;AACT,sBAAY;;AAGZ,2BAAiB;AACjB,sBACI,KAAK,IAAI;AACL;AACA,mBAAO,KAAK,WAAW,IACnB,oBAAoB,cAAc,OAClC,UAAU,aAAa,gBAAgB,cAAc;aAExD,KAAK;;AAElB,aAAK,WAAW;QAChB,sBAAsB,cAAc;QACpC,wBAAwB,cAAc;;;UAGpC;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,QAAQ;AACb,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AACpB,aAAK,aAAa;AAClB,aAAK,eAAe;;MAExB;AACI,gCAAwB,kCAAkC,OAAO;AACjE,yBAAiB,uBAAuB,SAAS,iBAAiB;AAClE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,SAAS,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AACjG,YAAI,KAAK,aAAa,UAAU,SAAS;AACrC,eAAK;AACL,eAAK;AACL,eAAK,iBAAiB;AACtB,eAAK;AACL,8BAAmB,KAAK,aAAa,UAAU;AAC/C,eAAK,aAAa,UAAU,KAAK;AACjC,iBAAO;;AAEX;AACA,YAAI,oBAAoB,oBAAoB;AACxC,uBAAa,KAAK,MAAM,0BAA0B,QAAQ,IAAI,QAAQ;mBAEjE,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;mBAE/D,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;;AAExE,aAAK,aAAa,UAAU,KAAK;AACjC,aAAK;AACL,aAAK,sBAAsB;AAC3B,aAAK;AACL,eAAO;;MAEX;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,gCAAwB,kCAAkC,gBAAgB;AAC1E,yBAAiB,uBAAuB,OAAO,iBAAiB;AAChE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,OAAO,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AAC/F,mCAA2B,OAAM,IAAI;AACrC,YAAI,uBAAuB,MACvB,KAAK,qBAAqB;AAC1B,eAAK,MAAM,oBAAoB;AAC/B,eAAK,sBAAsB;;AAG3B,eAAK,aAAa,UAAU,KAAK;AACjC,eAAK;AACL,eAAK,iBAAiB;;AAE1B,aAAK;AACL,wBAAgB,KAAK,aAAa;AAClC,yBAAiB,QAAQ,QAAQ;AACjC,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM;;AAGpB,gBAAQ,OAAO,UAAU;AACzB,aAAK;;MAET;AACI,YAAI,CAAC,KAAK;AACN;;AAEJ,sBAAc,KAAK,kBAAkB,KAAK;AAC1C,gBAAQ,IAAI,aAAa,GAAG,KAAK,qBAAqB,KAAK,mBAAmB,IAAI;AAClF,0BAAkB,KAAK,gBAAgB,KAAK;AAC5C,gBAAQ,IAAI,oBAAoB,KAAK;AACrC,gBAAQ,IAAI,iBAAiB,KAAK,kBAAkB,KAAK,MAAM,MAAM;;UAErE;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;;;AAG7B;AAEI,oBAAc;AACd,UAAI,mBAAmB,MAAM;AACzB,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,GAAG;AAC3B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;;AAEX,YAAM,IAAI,MAAM,2BAA2B;;AAExC;AAMH,6BAAuB,iCAAiC,iBAAiB;AACzE;AACA,UAAI;AACA,4CAAoC,uCAAuC,MAAM,IAAI,MAAM;AAC3F,sBAAc,cAAc;;AAG5B,gCAAwB,yCAAyC,MAAM,IAAI,MAAM;AACjF,sBAAc,QAAQ;;AAE1B,+BAAwB,0BAA0B,IAAI;AACtD,aAAO,cAAc;;AAEzB;AACI,cAAQ;aACC,oBAAoB;AACrB,iBAAO,wCAAwC;aAC9C,oBAAoB;AACrB,iBAAO,+CAA+C;aACrD,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,+CAA+C;;AAEtD,gBAAM,IAAI,MAAM,iCAAiC;;;AAG7D;AACI,UAAI,OAAM,QAAQ;AACd,YAAI;AACA,iBAAO,oBAAoB;;AAE/B,eAAO,oBAAoB;;AAE/B,UAAI;AACA,eAAO,oBAAoB;;AAE/B,aAAO,oBAAoB;;AAE/B;AACI,UAAI,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;iBAEtB,mBAAmB,aAAa,UAAU,kBAAkB;AACjE,eAAO,+BAA+B;iBAEjC,mBAAmB,aAAa,YACrC,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;;AAE/B,YAAM,IAAI,MAAM,gCAAgC;;AAEpD;AACI,aAAO,GAAG,aAAa,MAAM,aAAa,MAAM,mBAAmB;;AC1OvE;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK;;AAEtC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB;AACrC,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,iBAAiB;;AAEjC,UAAI,SAAS;AACT,eAAO,eAAe,OAAO;;AAEjC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW;AACnE,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,KAAK,QAAQ,cAAc,OAAO,OAAO;;AAE1D,aAAO,aAAa;;ACjDxB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;AAYV,gCAA0B;AACnB,mBAAe;AACf,gBAAY;AACZ,iBAAa,sBAAoB;;;AAGjC,kBAAc,sBAAoB;;;AAGlC,kBAAY;AACZ,iBAAa;;;uBAGG;kBACL;;;AAGX,0BAAsB;AACzB,aAAO,sBAAoB;mCACI;;;AAG5B,gBAAY;AACZ,iBAAa;AACb,kBAAc;AACd,iBAAa;;;;AAIb,mBAAe;AACf,mBAAe;AACf,sBAAkB;AAClB,kBAAc;;;;;;;;;;;;;;;;AAgBd,gBAAY;AACZ,kBAAc;AACd,gBAAY;;AAEZ,kBAAc;AACd,iBAAa;AACb,kBAAc;AACd,oBAAgB;AAchB,qBAAiB;;;;;;;;;;;;;;;;;;;;;AAqBjB,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;AAGjC,iBAAa;;;;AAIb,iBAAa;;;;AAIb,iBAAa;;;;AAIb,kBAAc,sBAAoB;AAClC,kBAAc,sBAAoB;;;AAGlC,kBAAc,sBAAoB;;;AAGlC,gBAAY;;;;cAIL;eACC;eACA;eACA;eACA;eACA;;;;;;;AAOR,uBAAmB;AACnB,wBAAoB;AACpB,kBAAc;ACjLrB;;;;;;;;;;;;;;;;AAgBO,qBAAe;AACf,kBAAY;;;;;;;;;;AAUZ,mBAAa;;;;;;;;;;;AAWb,oBAAc;;;;;;;;;;;AAWd,kBAAY;;;;;;;;;;;MAWf;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;ACnEV;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,yBAAiB,YAAY,MAAM;AACnC,sBAAc,kBAAkB;AAChC,6BAAqB,gBAAgB,MAAM;AAC3C,0BAAkB,SAAS,MAAM;AACjC,wBAAe,QAAQ,IAAI,OAAO,QAAQ,UAAU,KAAK;AACzD,aAAK,WAAW;;UAEd;kCACwB;;4CAEU;;;;;ACnC5C;;;;;;;;;;;;;;;;AAuBA,WAAM,gCAAmB;AACzB,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;AAuEX,8BAAwB;AACxB,8BAAwB;AAC/B,yBAAqB;AACd;AACH,UAAI,gBAAgB;AAChB,eAAO,aAAa;;AAExB,mBAAa,gBAAgB;AAC7B,aAAO,aAAa;;AAExB,gEAA2D;AACvD,UAAI,gBAAe;AACf,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;;AAEX,YAAM,IAAI,MAAM,cAAc;;AAIlC,uCAAmC;AAInC,mCAA+B;AAC/B;AACI,UAAI,OAAM,OAAO,UAAU;AACvB,eAAO;;AAEX,aAAQ,OAAM,OAAO,OAAO,SAAS,OAAM,OAAO,OAAO,QACrD,OAAO,mBACP,yBAAyB,OAAO;;AAKjC,wCAAoC;mCACL;MAClC;AACI;AAEA,aAAK,cAAc,IAAI;AAGvB,aAAK,kBAAkB,IAAI;AAG3B,aAAK,eAAe,IAAI;AACxB,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAEpB,aAAK,iBAAiB;AACtB,aAAK,oBAAoB;AACzB,aAAK,wBAAwB;AAC7B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,YAAI,CAAC,OAAM,QAAQ;AACf,gBAAM,IAAI,MAAM;;AAEpB,YAAI,SAAS;AACT,qBAAW,gBAAgB,OAAM,UAAU;AAC3C,eAAK,cAAc,eAAe,OAAM,UAAU;AAClD,eAAK,QAAQ,IAAI,aAAa;AAC9B,eAAK,SAAS,GAAG;AACjB,eAAK,sBAAsB;;AAG3B,eAAK,QAAQ;AACb,eAAK,cAAc;AACnB,eAAK,sBAAsB;AAC3B,eAAK,SAAS,MAAM,GAAG;;AAE3B,aAAK,iBAAiB,IAAI,eAAe,KAAK;AAC9C,aAAK,qBAAqB;AAC1B,aAAK,UAAU,IAAI,aAAY,MAAM;;MAEzC;AACI,eAAO,KAAK,QAAQ,eACf,MAAK,aAAa,KAAK,WAAW,eAAe,KAClD,KAAK;;MAEb;AACI,YAAI,OAAM,QAAQ,qCACd,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU,eAAe,UAAU;AACnC,gBAAM,IAAI,MAAM;;AAGpB,uBAAe;AACf,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;AAE3B,eAAO;;MAGX;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ;;MAGZ;AACI,YAAI,KAAK,QAAQ,IAAI;AACjB,0BAAgB,KAAK,QAAQ,IAAI;AACjC,kBAAQ;;;MAGhB;AACI,YAAI,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAGpB,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;;MAG/B;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,QAAQ,IAAI;AACjB,8BAAoB,KAAK,QAAQ,IAAI;AACrC,sBAAY;AACZ,cAAI,YAAY,WAAW;AACvB,iBAAK,YAAY;;;;MAI7B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,oBAAoB,eAAO,OAAO,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,SAAS,IAAI;AAC/B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,UAAU;AACV,iBAAO;;AAEX,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ;AACA,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,mBAAS,wBAAoC,YAAY;;AAGzD,mBAAS,KAAK,qBAAqB;;AAEvC,YAAI;AACA,eAAK,kBAAkB,SAAa;;AAExC,eAAO,KAAK,qBAAqB,QAAQ;;YAEvC;AACF,YAAI,KAAK,YAAY,IAAI;AACrB,+BAAoB,KAAK,YAAY,IAAI;AACzC,iBAAO,IAAI,QAAQ,aAAW,aAAY,KAAK;;AAEnD,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,eAAO,OAAO,oBAAoB,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,KAAK,IAAI;AAC3B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,CAAC,OAAM,QAAQ,mCACf,OAAM,UAAU,qBAAqB;AACrC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa;AACb;AACA,YAAI,UAAU,eAAe,OAAM,IAAI;AAEnC,8BAAoB,KAAK,OAAO;AAChC,0BAAgB,KAAK,QAAQ,IAAI,kBAAkB;AACnD,oBAAS,KAAK,MAAM,wBAAwB,QAAQ,SAAS,GAAG,iBAA0B;;AAE9F,aAAK,YAAY,IAAI,QAAQ;AAC7B,YAAI,UAAU;AAEV,gBAAM,KAAK,MAAM;;AAGrB;AACA,YAAI,UAAU;AACV,qBAAW,MAAM,QAAQ,IAAI;YACzB,KAAK,KAAK,mBAAmB,KAAK;YAClC,KAAK,KAAK,mBAAmB,KAAK;;AAEtC,6BAAmB,GAAG;AACtB,6BAAmB,GAAG;AACtB,iBAAO,wBAAoC,YAAY;mBAElD,WAAU;AACf,iBAAO,KAAK,qBAAqB;;AAGjC,uBAAa,eAAmB;AAChC,iBAAO,KAAK,MAAM,gCAAgC,SAAQ;;AAE9D,YAAI,qBAAqB;AACrB,eAAK,8BAA8B;;AAEvC,0BAAkB,KAAK,qBAAqB,QAAQ;AACpD,4BAAoB,KAAK,YAAY,IAAI;AACzC,aAAK,YAAY,OAAO;AAExB,oBAAY,QAAQ,aAAW,QAAQ;AACvC,YAAI,KAAK,gBAAgB,IAAI;AACzB,eAAK,gBAAgB,OAAO;AAC5B,eAAK,YAAY;AACjB,eAAK;;AAET,eAAO;;MAEX;AACI,YAAI,UAAU;AACV;;AAEJ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,sBAAY,OAAO;AACnB,cAAI,CAAC,iBAA4B;AAC7B,gBAAI,OAAM,QAAQ;AACd,oBAAM,MAAM,aAAa;;AAI7B,kBAAM,MAAM,aAAa;;;;MAIrC;AACI,eAAQ,OAAO,OAAO,YAAa,KAAK,QAAQ,IAAI;AACpD,qBAAa,eAAmB;AAChC,YAAI,OAAM,QAAQ;AACd,4BAAkB,KAAK,OAAO;AAC9B,2BAAgB,KAAK,QAAQ,IAAI,UAAU;AAC3C,wBAAa,KAAK,MACb,gCAAgC,SAAQ,SAAS,GAAG,iBAA0B,QAC9E,SAAS,GAAG;AACjB,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,iBAAiB,aAAa;AAC3E,4BAAoB,yBAAyB,aAAwB,SAAS;AAC9E,wBAAgB,yBACZ,IAAI,yBAAyB,eAC7B,IAAI,mBAAmB;AAC3B,uBAAe,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,aAAa,OAAO,UAAW;AACtF,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,qBAAa,KAAK,MACb,gDAAgD,QAAQ,SAAS,QAAQ,SAAS,IAAI,QAAQ,SAAS,IACvG,SAAS,GAAG;AACjB,aAAK,8BAA8B;AACnC,eAAO;;YAEL;AACF,gCAAwB,KAAK;AAC7B,gCAAwB;AACxB,4BAAoB;AACpB,YAAI,KAAK,sBAAsB;AAC3B,eAAK,qBAAqB;AAC1B,0BAAgB;;AAGhB,eAAK,aAAa,KAAK;;AAE3B,aAAK,eAAe;AACpB;AAEA,4CAAoC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,QAC3E,OAAO,OAAK,KAAK;AACtB,0CAAkC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,OACzE,OAAO,OAAK,KAAK;AACtB,aAAK,eAAe;AACpB,YAAI;AACA,eAAK,qBAAqB;;AAE9B,oBAAY;UACR,cAAc,KAAK;UACnB,gBAAgB,KAAK;UACrB,UAAU;UACV,QAAQ;;AAEZ,YAAI,OAAM,UAAU,mDAAmD;AACnE,2BAAiB,MAAM,QAAQ,IAAI;AACnC,cAAI,cAAc,KAAS;AAC3B,cAAI,yBAAyB,MAAM,SAAS,IAAI,UAAW,EAAE,MAAM,0BAA0B,IAAI,IAAI,KAChG,IAAI,OAAK,GAAG,EAAE,SAAS,EAAE,MACzB,KAAK;;AAGV,cAAI,cAAc;YACd,OAAO;;;AAGf,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,eAAO;;MAEX;AACI,eAAO;UACH,YAAY;UACZ,eAAe,KAAK;UACpB,wBAAwB,KAAK,eAAe;UAC5C,mBAAmB,KAAK,eAAe;;;MAG/C;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM;;AAEtB,eAAO,CAAE,SAAS,QAAY,OAAO;;MAEzC;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,eAAK,MAAM;AACX,iBAAO;;AAEX,cAAM,QAAQ;AACd,eAAO;;YAEL;AACF,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM,uBAAuB;;AAE7C,2BAAmB;AACnB,eAAO,WAAW,QAAQ,WAAW;;MAEzC;AACI,YAAI,KAAK,gBAAgB,IAAI;AACzB;;AAEJ,YAAI,KAAK,YAAY,IAAI;AACrB,eAAK,gBAAgB,IAAI;AACzB,eAAK;AACL;;AAGJ,YAAI,CAAC,KAAK,QAAQ,IAAI;AAClB;;AAMJ,YAAI,KAAK,QAAQ,IAAI,QAAQ,wBAAwB;AACjD,eAAK,QAAQ,IAAI,QAAQ;AACzB;;AAEJ,aAAK,eAAe;AACpB,eAAQ,sBAAuB,KAAK,QAAQ,IAAI;AAChD,YAAI,sBAAsB;AACtB,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;AACtD,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;;AAE1D,aAAK,QAAQ,OAAO;;MAExB;AACI,eAAQ,SAAS,OAAO,UAAU,OAAO,UAAU,iBAAU,KAAK,QAAQ,IAAI;AAC9E,oBAAY,UAAS,OAAM,cAAc;AACzC,yBAAiB,KAAK,aAAa,IAAI;AACvC,YAAI,WAAW;AACX,eAAK,aAAa,IAAI,KAAK,WAAW;;AAGtC,eAAK,aAAa,OAAO;AACzB,cAAI,WAAW;AACX,iBAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,iBAAK,eAAe,eAAe,SAAS,UAAU,OAAO;;;AAGrE,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ,UAAU;AAClB,gBAAQ,WAAW;AACnB,gBAAQ,WAAW;AACnB,gBAAQ,QAAQ;;MAEpB;AACI,aAAK,YAAY;AACjB,eAAO,KAAK,QAAQ,IAAI,QAAQ;;MAMpC;AACI,eAAO,KAAK,QAAQ,IAAI;;MAE5B;AACI,YAAI,CAAC,OAAM,QAAQ;AACf,iBAAO;;AAEX,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa,WAAS,YAAY;;AAE3C,eAAO,KAAK;;MAShB,2CAA2C;AACvC,2BAAmB,KAAK;AACxB,YAAI,CAAC,KAAK,yBAAyB,cAAc;AAC7C,kBAAQ,KAAK;AAIb,eAAK,wBAAwB;;AAEjC,eAAO,cAAc,QACjB,OAAO,MAAM,YAAS,KAAK,QAAQ,IAAI,OAAM,QAAQ,WAAW,QAC5D,eAAmB,OAAM,SAAS;;MAE9C;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,OAAO,MAAM,EAAE,OAAO,EAAE;AAC1F,iBAAO,KAAK,WAAW,MAAM,EAAE,OAAO;;AAG1C,YAAI,eAAmB,UAAU;AAC7B,iBAAO,QAAO,IAAI,MAAM,EAAE;;AAE9B,eAAQ,YAAa,KAAK,QAAQ,IAAI,EAAE;AACxC,4BAAoB,kBAA4B,EAAE,OAAO,OAAO;AAChE,YAAI,YAAY,CAAC;AACb,0BAAgB,OAAM,QAAQ,iCAC1B,IAAI,mBAAmB,QACvB,IAAI,aAAa;AACrB,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;AAElD,aAAK,YAAY,EAAE;AACnB,eAAO,KAAK,aAAa,GAAG,OAAO;;MAEvC;AACI,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,kBAAU,KAAK,WAAW,MAAM,EAAE;AAClC,2BAAmB,KAAK,QAAQ,IAAI,EAAE;AAEtC,eAAO,OAAO,YAAY;AAC1B,mBAAW,QAAQ;AACnB,mBAAW,QAAQ,EAAE;AACrB,yBAAiB,mBAA6B,OAAO,EAAE;AACvD,YAAI,SAAS;AAGT,wBAAc,SAAS,MAAM;;AAEjC,mBAAW,QAAQ;UACf;UAEA,YAAY,SAAS,SAAS,SAAS,MAAM,cAAc,EAAE;;AAGjE,yBAAiB,KAAK,aAAa,IAAI,WAAW,MAAM,eAAe;AACvE,aAAK,aAAa,IAAI,WAAW,MAAM,YAAY,WAAW;AAC9D,eAAO;;MAEX;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,aAAa,GAAG,OAAO,KAAK;AAC9F,YAAI;AACA,iBAAO;;AAEX,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAO,IAAI;;AAEtB,wBAAgB,IAAI,oBAAoB,OAAO,SAAS;AACxD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,qBAAqB,EAAE,OAAO,QAClC,IAAI,eAAe,EAAE,OAAO;AAChC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,IAAI;AACtE,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,0BAAkB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACpD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAG3C,YAAK,iBAAgB,KAAK,gBAAgB,MACtC,YAAY;AACZ,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,sBAAY,gBAAgB,IAAI,IAAI,EAAE,KAAK,OAAO,WAAW;AAC7D,uBAAa,gBAAgB,IAAI,IAAI;AACrC,sBAAY,gBAAgB,IAAI,EAAE,KAAK,OAAO,GAAG,aAAa;AAG9D,0BAAgB,KAAO,KAAK;AAC5B,iBAAO,QAAQ,IAAI,MAAM;;AAE7B,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY;AACzG,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAC3C,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/I,uBAAe,CAAC,GAAG;AACnB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS,QAAQ;;MAE/C;AACI,wBAAgB,OAAM,QAAQ,8BAC1B,IAAI,iBAAiB,EAAE,OAAO,QAAQ,MAAM,OAAO,QACnD,IAAI,WAAW,EAAE,OAAO,QAAQ,MAAM,OAAO;AACjD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,WAAW,OAAO,aAAa,MAAM,OAAO;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC,YAAY,aAAa;;MAEjE;AACI,YAAI,EAAE,UAAU;AACZ,wBAAa,KAAK,SAAS,EAAE;AAC7B,8BAAoB,MAAK,IAAI,OAAK,cAAkB;AACpD,sBAAY,QAAO,EAAE,OAAO,EAAE,OAAO;AACrC,iBAAO,OAAK,KAAK;;AAErB,wBAAgB,IAAI,YAAY,EAAE,OAAO;AACzC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,iBAAiB,EAAE,OAAO,UAAU,iBACxC,IAAI,WAAW,EAAE,OAAO,UAAU;AACtC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,UAAU,MAAM,KAAK,WAAW,OAAO,GAAG,SAAS;AAC9F,YAAI;AACA,iBAAO;;AAEX,wBAAgB,IAAI,cAAc,EAAE,OAAO,QAAQ,MAAM;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAU,EAAE,QAAQ,WAAW,UACjC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,iCAAyB,CAAC,CAAC,GAAG;AAC9B,yBAAiB,KAAK,GAAG;AACzB,qBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,2BAAiB,KAAK,CAAC,GAAG;;AAE9B,wBAAgB,EAAE,IAAI;AACtB,oCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,kDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,6BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,yBAAiB,WAAU,QAAQ,QAAQ,sBAAsB;AACjE,eAAO,SAAQ,UAAU;;MAE7B;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,0BAAsC;AACzD,wBAAgB,KAAK,KAAK,SAAS;AACnC,2BAAmB,CAAE,YAAY,QAAQ,WAAW;AACpD,wBAAgB,IAAI,cAAc,YAAY;AAC9C,uBAAe,KAAK,cAAc,SAAS,CAAC,IAAI;AAEhD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,OAAO,QAAQ,YAAY;;MAE3C,wCAAwC;AACpC,wBAAgB,EAAE,MAAM;AACxB,qBAAa,EAAE,MAAM;AACrB,YAAI,gBAAgB;AAChB,sBAAY,aAAa,MAAM;AAC/B,mBAAS,aAAa,MAAM;;AAEhC,2BAAmB,0BAAsC;AACzD,2BAAmB;UACf;UACA;UACA;UACA,SAAS,KAAK,KAAK,SAAS;;AAEhC,wBAAgB,IAAI,iBAAiB,YAAY,YAAY,gBAAgB;AAC7E,uBAAe,CAAC;AAChB,YAAI,gBAAgB;AAChB,iBAAO,KAAK;;AAEhB,uBAAe,KAAK,cAAc,SAAS,QAAQ;AAEnD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,UAAU,GAAG,YAAY;;MAEzC,8CAA8C;AAC1C,wBAAgB,gBAAgB,OAAO,aAAa,QAAQ,EAAE;AAC9D,uBAAe,QAAQ,QAAQ,SAAS;AACxC,2BAAmB,0BAAsC;AACzD,wBAAgB,IAAI,uBAAuB,SAAS,YAAY,YAAY,gBAAgB;AAC5F,uBAAe,gBAAgB,OAAO,CAAC,KAAK,CAAC,GAAG;AAChD,uBAAe,KAAK,cAAc,SAAS,QAAQ;AACnD,YAAI,OAAO,SAAS,EAAE;AAClB,iBAAO,KAAK,gBAAgB,GAAG,YAAY;;AAE/C,eAAO;;MAEX;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,OAAO,aAAa,QAAQ;;MAExD;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC1E,YAAI;AACA,iBAAO;;AAEX,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,QAAQ,aAAa,QAAQ;;MAEzD;AACI,mBAAW;AACX,4BAAoB,oBAAgC,CAAC,OAAO,EAAE;AAC9D,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,GAAG;AACzB,iBAAO,kBAA8B,GAAG,EAAE,MAAM;;AAEpD,yBAAiB,eAAa,gBAAgB,UAAU,OAAO,MAAM;AACrE,uBAAe,eAAmB,CAAC,UAAU,MAAM;AACnD,oBAAY,UAAU,KAAK,IAAI;AAC/B,4BAAoB,WAAc,EAAE;AACpC,qBAAa,KAAK,aAAa,KAAK,sBAAsB,YAAY,aAAa,aAC9E,QAAQ;AACb,YAAI,eAAe;AACf,mBACI,WAAU,QAAQ,wBAAoC;;AAE9D,eAAO;;MAEX;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,eAAa,8BAA8B,QAAQ;AACtE,0BAAkB,CAAE,YAAY,QAAQ,WAAW;AACnD,wBAAgB,IAAI,iBAAiB,WAAW;AAChD,uBAAe,KAAK,cAAc,SAAS,CAAC,GAAG,aAAa;AAE5D,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,qBAAa,MAAM,GAAG,aAAa,KAAK,CAAC,SAAS;AAClD,eAAO,KAAK,aAAa,QAAQ,WAAW,YAAY,OAAO;;MAEnE;AACI,qBAAa,CAAC;AACd,oCAAwC,QAAQ,WAAW,OAAO,GAAG,gBAAgB,WAAW,MAAM,IAAI,MAAM,EAAE;AAClH,YAAI,CAAC,OAAM,QAAQ,wBAAwB,EAAE,QAAQ;AACjD,0CAAgC,2BAAuC,EAAE,OAAO;AAChF,yBAAe,eAAmB;AAClC,sBAAY,EAAE,KAAK,IAAI;AACvB,iBAAO,KAAK,UAAU,KAAK,YAAY,QAAQ;;AAEnD,eAAO,KAAK,gBAAgB,GAAG;;MAEnC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,kDAAkD,EAAE,OAAO,kBACvD;;AAExB,qBAAa,EAAE,MAAM;AACrB,qBAAa;AAGb,qBAAa,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,SAAS,GAAG;AACjD,0BAAgB,IAAI,cAAc,EAAE,OAAO,OAAO;AAClD,8BAAoB,QAAQ,mBAAmB;AAC/C,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC,SAAS,OAAO,OAAO;AAC7D,qBAAW;;AAIf,YAAI;AACA,0BAAgB,IAAI,cAAc,EAAE,OAAO,WAAW;AACtD,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC;AACtC,qBAAW;;AAEf,eAAO;;MAEX;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,SAA2B;;AAEhE,wBAAgB,IAAI,gBAAgB,OAAoB,EAAE,OAAO,EAAE;AACnE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC7E,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,QAA0B;;AAE/D,wBAAgB,IAAI,gBAAgB,MAAmB,EAAE,OAAO,EAAE;AAClE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,SAAsB,EAAE,OAAO,EAAE;AACrE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,iBAAmC;;AAExE,wBAAgB,IAAI,gBAAgB,eAA4B,EAAE,OAAO,EAAE;AAC3E,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,eAAiC;;AAEtE,wBAAgB,IAAI,gBAAgB,aAA0B,EAAE,OAAO,EAAE;AACzE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,cAAc,UAAU,MAAM,EAAE,OAAO,EAAE;AAC7D,eAAO,KAAK,cAAc,SAAS,CAAC,WAAW,GAAG,IAAI,YAAW,EAAE,OAAO,EAAE;;MAEhF;AACI,cAAkB;AAElB,yBAAiB,UAAU;AAC3B,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,sBAAc,EAAE;AAChB,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oBAAW;AACX,4BAAoB;AACpB,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,KAAI,EAAE,OAAO,EAAE;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,qBAAqB,EAAE,OAAO;AAClD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C,oDAAmD;AAC/C,wBAAgB,IAAI,sBAAsB,KAAI,EAAE,OAAO,EAAE,OAAO;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAK/C;AACI,eAAO;UACH,QAAQ,YAAY;UACpB,OAAO,YAAY;UACnB,OAAO,cAAc;;;MAG7B;AACI,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAQ;;AAGnB,YAAI,QAAQ,SAAS,OAAM,IAAI;AAC3B,2BAAiB,KAAK,MAAM,QAAQ,SAAS;AAC7C,2BAAiB,KAAK,KAAK,QAAQ,MAAM,GAAG;AAC5C,4BAAkB,KAAK,KAAK,QAAQ,MAAM;AAC1C,iBAAO,KAAK,KAAK,CAAC,UAAU;;AAEhC,sBAAc,QAAQ,IAAI,OAAK,EAAE,OAAO,OAAO,YAAY,YAAW,IAAI;AAC1E,uBAAe,QAAQ,IAAI,OAAK,EAAE;AAElC,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,kBAAkB,QAAQ,GAAG,OAAO,UACxC,IAAI,YAAY,QAAQ,GAAG,OAAO;AACtC,eAAO,KAAK,cAAc,SAAS,SAAS;;MAEhD;AACI,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,YAAY,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACnE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,MAAe,EAAE;;AAElD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAGxE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,KAAK,IAAI;AACnB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAI,GAAG;;MAElB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,SAA2B,EAAE,OAAO,MAAM,SACpE,IAAI,gBAAgB,OAAoB,EAAE,OAAO,MAAM;AAC3D,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,WAA6B,GAAG,OAAO,EAAE,SACnE,IAAI,gBAAgB,SAAsB,GAAG,OAAO,EAAE;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,kBAAkB,EAAE;;AAGlC,oBAAU,IAAI,YAAY,EAAE;;AAEhC,4BAAoB,QAAQ,mBAAmB,MAAK;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;MAElD;AAEI,YAAI,KAAK,mBAAmB,CAAC,OAAO,EAAE,UAAU;AAC5C,4BAAkB,iBAAiB,KAAK,QAAQ,IAAI,EAAE,QAAQ;AAC9D,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,KAAK,QAAQ,IAAI,EAAE;AACjC,wBAAgB,IAAI,kBAAkB,EAAE;AACxC,uBAAe;UACX,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;UAChE,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;;AAEpE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO,KAAc;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AAGI,uBAAe,EAAE;AACjB,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,gCAAwB,SAAS;AACjC,4BAAoB,OAAO,KAAK,OAAO,KAAK,OAAO;AACnD,iCAAyB,SAAS;AAClC,+BAAuB,SAAS,eAAe;AAC/C,2BAAmB;AACnB,2BAAmB;AAGnB,0CAAmC,iBAAgB,KAAK,qBAAqB,MACzE,kBAAkB;AACtB,uCAA+B,OAAO,KAAK,MAAM,KAAK,CAAC,CAAC,SAAS;AACjE,YAAI,6BAA6B,CAAC,OAAM,QAAQ,0BAC5C,CAAC,OAAM,QAAQ,mCACf,CAAC;AACD,+BAAoB,iBAAiB,OAAO,KAAK,OAAO,KAAK,OAAO,KAChE,OAAO,KAAK,OAAO,KAAK,OAAO;AACnC,6BAAkB,SAAQ,GAAG,CAAC,GAAG,cAAa,SAAS;AACvD,kCAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,yBAAe,KAAK,iBAAiB;YACjC,GAAG;YACH,GAAG;YACH;YACA;YACA;YACA;YACA;;AAEJ,iBAAO,SAAQ,QAAQ,SAAS;;AAUpC,4BAAoB,iBAChB,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK,KACrC,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK;AACzC,0BAAkB;UACd,QAAQ,EAAE;UACV,OAAO,CAAC,GAAG,aAAa,SAAS;UACjC,OAAO,EAAE;;AAUb,sCAA8B,SAAS;AACvC,iBAAS,QAAQ,SAAS,MAAM;AAChC,iBAAS,MAAM,SAAS,MAAM,SAAS;AACvC,gBAAY,cAAyB,SAAS,OAAO,UAAU,QAAQ,MAAM,kBAAkB,SAAS,YAAY,UAAU;AAC9H,+BAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,8BAAsB,KAAK,iBAAiB;UACxC,GAAG;UACH,GAAG;UACH;UACA;UACA;UACA;UACA;;AAEJ,qCAA6B,KAAK,QAAQ,IAAI,cAAc;AAC5D,gBAAY,qBAAqB,UAAU,MAAM;AAEjD,iBAAS,QAAQ;AAGjB,6BAAqB,QAAQ,SAAS;AACtC,eAAO,WAAS,qBAAqB,cAAc,QAAQ,SAAS,UAAU,cAAc;;MAEhG;AAOI,eAAQ,aAAa,cAAc,YAAY,UAAU,WAAW,cAAe;AACnF,+BAAuB,eAAe;AACtC,0BAAkB,cAAc,eAAe;AAC/C,wBAAgB,YAAY;AAC5B,2BAAmB,CAAC,WAAW;AAC/B,2BAAmB;AACnB,2BAAmB;AACnB,0BAAkB,EAAE,QAAQ,CAAC;AAC7B,sBAAc,OAAO,QAAQ,CAAC,GAAG,WAAW;AAC5C,8BAAsB,IAAI,oBAAoB,YAAY,UAAU,OAAO;AAC3E,uBAAe,KAAK,cAAc,eAAe,CAAC,YAAY,QAAQ;UAClE;UAAG,WAAW;UAAI,WAAW;;AAEjC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,8BAAsB,IAAI,oBAAoB,OAAO,OAAO,MAAM,OAAO,CAAC,GAAG,SAAS,SAAS,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/J,uBAAe,CAAC,QAAQ;AACxB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,wBAAgB,KAAK,cAAc,eAAe;AAClD,YAAI;AACA,iBAAO,QAAQ,QAAQ,CAAC,GAAG,WAAW,UAAU,SAAS;;AAGzD,iBAAO,QAAQ,QAAQ,CAAC,GAAG,SAAS,aAAa,WAAW;;;MAGpE,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE1E,YAAI,OAAM,QAAQ,wBAAwB,OAAM,MAAM,OAAO;AACzD,iBAAO,KAAK,iBAAiB,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE5E,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,SAAS;AACvF,wBAAgB,IAAI,cAAc,UAAU,SAAS,iBAAiB;AACtE,uBAAe,CAAC,QAAO;AACvB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,GAAG,QAAQ;;AAE1C,YAAI,OAAM,QAAQ,wBAAwB,EAAE,MAAM,OAAO;AACrD,iBAAO,KAAK,iBAAiB,GAAG,QAAQ;;AAE5C,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,wCAAgC,OAAM,QAAQ,+BAC1C,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AACnD,gCAAwB,cACpB,6BAA6B,aAAY,2BACzC;AACJ,uBAAe,CAAC,QAAO;AACvB,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB;AACA,YAAI;AACA,oBAAU,IAAI,6BAA6B,UAAU,SAAS,iBAAiB;AAC/E,iBAAO,KAAK,cAAc,SAAS;;AAEvC,kBAAU,IAAI,uBAAuB,UAAU,SAAS,iBAAiB;AACzE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI;AACA,YAAI,OAAM,QAAQ,+BACd,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AAC/C,oBAAU,IAAI,6BAA6B;AAC3C,iBAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;AAE3C,kBAAU,IAAI,uBAAuB;AACrC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,+BAA+B;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,gCAAgC;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,KAAK,MAAM,GAAG,OAAO,MAAM,QAAQ;;AAEhD,eAAO;;MAEX;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,yCAAiC,IAAI,yBAAyB;AAC9D,eAAO,KAAK,cAAc,0BAA0B,CAAC,KAAK,EAAE;;MAEhE;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,6BAAqB;AACrB,0CAAkC,IAAI,cAAc,UAAU,OAAO;AACrE,mCAA2B,KAAK,cAAc,2BAA2B,CAAC;AAC1E,yCAAiC,IAAI,yBAAyB;AAC9D,uBAAe,KAAK,cAAc,0BAA0B,CAAC,IAAI,qBAAqB,EAAE;AACxF,2BAAmB;AACnB,eAAO;;MAEX;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,4BAA4B,EAAE,OAAO,WAAW,UAAU,gBAC9D,IAAI,sBAAsB,EAAE,OAAO,WAAW,UAAU;AAC5D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,8BAA8B,IAAI,GAAG;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,6BAA6B,EAAE,OAAO,WAAW,UAAU;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,oCAAoC,IAAI,GAAG;AAC/D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,aAAa,SAAS,SAAQ;AAC5C,0BAAkB,MAAM,MAAM;AAC9B,4BAAoB,MAAM,MAAM;AAChC,wBAAgB,IAAI,mBAAmB,WAAW,aAAa;AAC/D,4BAAoB,QAAQ,mBAAmB;AAC/C,eAAO,KAAK,cAAc,SAAS,CAAC,QAAQ,SAAS;;MAEzD;AACI,wBAAgB,IAAI,cAAc,QAAQ,MAAM,OAAO,SAAS;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,YAAY,EAAE;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,qBAAqB,OAAM,OAAO,MAAM,OAAO,UAAU,QAAQ;AACrF,eAAO,KAAK,cAAc,SAAS,CAAC,QAAO,OAAO,WAAW;;MAEjE;AACI,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,4BAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAC3C,wBAAgB,IAAI,oBAAoB,aAAa,WAAW;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,CAAC,aAAa,WAAW;AAC9C,+BAAuB,QAAQ,QAAQ,CAAC,YAAY;AACpD,yBAAiB,QAAQ,QAAQ,CAAC,YAAY;AAC9C,YAAI,eAAe;AACf,iBAAO,eAA2B,QAAO,KAAK;;AAElD,6BAAqB,QAAO;AAC5B,wBAAgB,IAAI,eAAe,YAAY,WAAW,eAAe,MAAM,SAAS,MAAM,SAAS;AACvG,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU,gBAAgB;AACnE,eAAO,IAAI,QAAQ;;MAEvB;AACI,eAAQ,WAAW,YAAY,SAAS,cAAe,iBAA6B,cAAc,eAAe;AACjH,+BAAuB;AACvB,wBAAgB,IAAI,eAAe,YAAY,WAAW,cAAc,MAAM,aAAa,MAAM,SAAS,CAAC,YAAY,IAAI;AAC3H,oBAAY,KAAK,cAAc,SAAS,CAAC,cAAc,eAAe;AACtE,eAAO,IAAI,QAAQ;;MAEvB;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,+BAAuB,QAAQ,QAAQ,CAAC,WAAW;AACnD,yBAAiB,EAAE,QAAQ,CAAC,EAAE,OAAO,WAAW;AAChD,wBAAgB,IAAI,gBAAgB,WAAW,SAAS,CAAC,WAAW;AACpE,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU;AACnD,eAAO,IAAI,QAAQ;;MAEvB;AACI,gBAAQ,SAAS,YAAgB;AACjC,YAAI,UAAU;AAEV,yBAAe,mBAAuB,OAAO,eAAmB;AAChE,iBAAO,KAAK;AACZ,iBAAO,WAAS,WAAW,QAAQ,OAAO,OAAO;;AAGjD,0BAAgB,IAAI,YAAY,OAAO;AACvC,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,IAAI,OAAO;;;MAGtD;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,KAAK,KAAK,EAAE,OAAO,GAAG,EAAE;;;MAGvC;AACI,eAAO,KAAK,KAAK,EAAE,OAAO,EAAE,UAAU,WAAW,KAAK,GAAG,EAAE;;MAE/D;AAEI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,aAAK,QAAQ,IAAI,QAAQ,QAAQ;AACjC,eAAO,CAAE,QAAQ,OAAO;;MAE5B;AACI,eAAQ,UAAW,KAAK,eAAe,OAAO,OAAO;AACrD,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,wBAAgB,IAAI,cAAc,OAAM;AACxC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM;;MAExD;AACI,wBAAgB,IAAI,YAAY,OAAM;AACtC,4CAAoC;AACpC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM,OAAO,MAAwB;;MAEvF;AACI,6BAAqB;UACjB,YAAuB,OAAM;UAC7B,GAAG,YAAuB,OAAM;;AAEpC,wBAAgB;UACZ,OAAO,OAAM;UACb,OAAO;UACP,QAAQ,OAAM;;AAElB,+BAAuB;UACnB,YAAuB;UAAa,GAAG,YAAuB;;AAElE,wBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,8CAAsC;AACtC,uBAAe,KAAK,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAC7F,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;MAErE;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,UAAU,OAAO,SAAU;AACnC,0BAAkB,aAAwB;AAC1C;AACA,YAAI;AACA,oBAAU,IAAI,0BAA0B;;AAGxC,oBAAU,IAAI,oBAAoB;;AAEtC,8CAAsC;AACtC,oBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,WAAW,OAAO,UAAW,OAAO,MAAwB;AAChH,eAAO,CAAE,OAAO,OAAO,QAAQ,IAAI;;MAEvC,2FAA2F;AACvF,uBAAe,KAAK,eAAe,QAAQ,aAAa;AACxD,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,YAAI,QAAQ;AACR,kBAAQ,WAAW;;AAEvB,YAAI,QAAQ,qBAAqB,cAAuB;AACpD,6BAAmB,iBAA0B,QAAQ;AAKrD,kBAAQ,WAAW,WAAW,IAAI,OAAK,IAAI;;AAE/C,YAAI,QAAQ,eAAe;AACvB,kBAAQ,QAAQ,QAAQ;;AAE5B,YAAI,eAAmB,OAAO,WAAW;AAGrC,kBAAQ,SACJ,wBAA4B,OAAO,OAAO;AAC9C,iBAAO;;AAEX,8BAAsB;AACtB,2BAAmB,OAAO,IAAI;AAC1B,cAAI,OAAM,UAAU;AAChB,kBAAM,IAAI,MAAM;;AAIpB,wBAAc,KAAK,QAAQ,IAAI,OAAM;AACrC,cAAI,QAAQ,WAAW;AACnB,gBAAI,CAAC,QAAQ,gBACT,eAAmB,OAAM,UACrB,OAAM,UAAU;AAMpB,qBAAO;gBACH,OAAO,OAAM;gBACb,SAAS;gBACT,WAAW;gBACX,eAAe,QAAQ;;;AAK/B,gBAAI,QAAQ;AACR,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ,OAAM;;qBAGrB,CAAC,CAAC,QAAQ,aAAa,CAAC,CAAC,QAAQ;AACtC,qBAAQ,QAAQ,WAAW,KAAK,aAAa,UACzC,KAAK,WAAW;AACpB,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;qBAE5B,QAAQ,YACb,CAAC,cAAyB,QAAQ,OAAO,OAAM;AAO/C,+BAAmB;AACnB,gCAAoB,OAAM;AAC1B,mBAAM,QAAQ,QAAQ;AACtB,qBAAQ,KAAK,cAAc,QAAO;AAClC,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;AACjC,uBAAW,QAAQ;;AAEvB,eAAK,YAAY,OAAM;AACvB,iBAAO,CAAE,OAAO,OAAM,OAAO,SAAS,WAAW;;AAErD,aAAK,YAAY,OAAO;AACxB,2BAAmB,CAAE,OAAO,OAAO,OAAO,SAAS,SAAS,WAAW;AACvE,oBAAY,cAAyB,SAAS,YAAY;AAC1D,uBAAe,KAAK,iBAAiB,KAAK;AACtC,iBAAO,eAA0B,KAAK,OAAO,SAAS,YAAY;;AAEtE,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ,KAAK;;AAEjB,mBAAsB,KAAK,OAAO,QAAQ,YAAY,YAAY;AAClE,sBAAc,QAAQ,UAAQ,KAAK,8BAA8B;AACjE,YAAI;AACA,kBAAQ,KAAK,SAAS;AACtB,eAAK,aAAa,KAAK,CAAE,MAAM,QAAQ,YAAY,MAAM,OAAO,KAAK,aAAa;;AAEtF,YAAI,CAAC,OAAM,QAAQ,0BAA0B,QAAQ,YACjD,kCAAkC;AAClC,2BAAiB,KAAK,aAAa;AACnC,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,eAAO;;MAEX,yFAAyF;AACrF,sBAAc,eAAe,OAAO,GAAG;AACvC,wBAAgB,KAAK,gBAAgB,SAAS,QAAQ,aAAa,aAAa;AAChF,eAAO,WAAS,qBAAqB,QAAQ,QAAQ,QAAQ,OAAO,QAAQ;;MAEhF;AACI,YAAI,CAAE,QAAO,KAAK;AACd,eAAK,YAAY,OAAO;;AAE5B,eAAO,KAAK,YAAY;;MAE5B;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL;;AAIJ,YAAI,CAAC,OAAM,QAAQ;AACf,0BAAgB,OAAO,KAAK,KAAK;AACjC,kBAAQ,QAAQ;AACZ,iBAAK,MAAM,cAAc,KAAK,YAAY,KAAK;AAC/C,mBAAO,KAAK,YAAY;;;AAGhC,aAAK,eAAe;AACpB,YAAI,KAAK,UAAU,QACd,QAAQ,sBAAuB,eAC5B,KAAK,kBAAkB;AAC3B,eAAK,OAAO;;AAGZ,eAAK,SAAS;;AAElB,YAAI,KAAK;AACL,eAAK,MAAM,UAAU;AACrB,eAAK,MAAM;;AAEf,aAAK,WAAW;;MAEpB;AACI,YAAI,KAAK,uBAAuB;AAC5B,eAAK,sBAAsB,KAAK;AAC5B,gBAAI,CAAC,OAAM,IAAI;AAGX,gCAAkB,OAAM,QAAQ;AAChC,qBAAM,IAAI,SAAS;AACnB,0CAA4B,KAAK,IAAI,QAAO,OAAO,WAAW;AAC9D,qBAAM,IAAI,SAAS;AACnB,kBAAI,sBAAsB;AACtB,uBAAO;;;AAGf,mBAAO;;;AAGf,eAAO,KAAK;;MAGhB;AACI,eAAO,KAAK,qBAAqB,KAAK,oBAAkB;;MAE5D;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,OAAO,OAAO,QAAQ,SAAS,OAAO,YAAa;AAC3D,YAAI,WAAW;AAEX;;AAEJ,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ,uBAAe,QAAQ;AACvB,YAAI,YAAY;AACZ,qBAAW,gCAA2C,OAAO;AAC7D,kBAAQ,WAAW;;AAEvB,YAAI,UAAU;AACV,4BAAkB,aAAwB;AAC1C;AACA,sBAAY,SAAS,aAAa,SAAS;AAC3C,8BAAoB,kBAAkB;AACtC,cAAI;AACA,aAAC,OAAO,UAAU,uCAAgD,SAAS,IAAI,SAAS;AACxF,sBAAU,IAAI,0BAA0B,WAAW,CAAC,QAAQ,QAAQ;;AAGpE,sBACI,IAAI,oBAAoB,WAAW,CAAC,QAAQ,QAAQ;;AAE5D,uCAA6B,KAAK,eAAe,CAAC,QAAQ,QAAQ;AAClE,cAAI;AACA,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAGjB,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAErB,eAAK,MAAM,2BAA2B,KAAK,WAAW,qBAAqB,SAAS,OAAO,QAAQ;AAGnG,wCAA8B;AAC9B,sCAA4B,KAAK,gBAAgB,SAAS,CAAC,uBAAuB,OAAO,MAAM;AAE/F,gCAAsB,KAAK,QAAQ,IAAI,oBAAoB;AAC3D,kBAAQ,UAAU,cAAc;AAChC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,QAAQ,cAAc;AAC9B,eAAK,8BAA8B;AACnC,eAAK,QAAQ,OAAO,oBAAoB;AAExC,kBAAQ,SAAS;AACjB,cAAI;AACA,iBAAK,gBAAgB,SAAa;;;AAItC,6BAAmB,KAAK,eAAe,UAAU,OAAO,OAAO;AAC/D,kBAAQ,UAAU;;;MAG1B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,SAAU;AAClB,aAAK,eAAe;AACpB,YAAI,iBAAiB;AACjB,kBAAQ,SAAS,oBAAoB,eAAe;;AAExD,eAAO,QAAQ;;MAEnB;AACI,aAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,YAAI,CAAC,KAAK,qBACN,KAAK,gBAAgB,KAAK,qBAAqB,OAAO;AACtD,qBAAY,MAAK,gBAAgB,OAAO,MAAM,QAAQ;AACtD,eAAK,oBAAoB;AACzB,kBAAQ,KAAK,6BAA6B;;AAG9C,eAAO,KAAK,eAAe,eAAe,UAAU,SAAS;;MAEjE;AACI,eAAO,MAAM,KAAK,MAAM,KAAK,iBAAqB;;MAEtD;AACI,YAAI,KAAK,mBAAmB;AACxB;AACI,mBAAO;;AAGP,gBAAI,OAAM,QAAQ;AACd,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;;AAGf;AACI,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU,WAAW,UAAU;AACpC,uBAAgB,UAAU,UAAW,IAAI,WAAW,EAAE,UAClD,IAAI,WAAW,EAAE;AACrB,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iBAAO,KAAK,KAAK,MAAM,EAAE;;AAE7B,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;ACjgEzC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA6BO;AACH,aAAM,IAAI,4BAA4B;;AC9B1C;;;;;;;;;;;;;;;;AAoBA,QAAI;AACA,uBAAgB,SAAS,MAAM,IAAI,oBAAoB;;AAMpD,kBAAc,CAAE;AC3BvB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AA4BO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,QAAQ,IAAI,YAAY;AAChD,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,eAAQ,qBAAqB,CAAE,MAAM,gBAAgB,MAAM;AAC3D,aAAO;;AAEJ,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAqBO,oCAAgC;AAChC,qCAAiC;;;;AAIjC,4CAAwC;;;;;;AAUxC;AACH,aAAO,EAAG,QAAQ;AACd,eAAQ,KAAM;AACd,6BAAqB;AACrB,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,aAAa,gBAAgB,SAAS,CAAC,IAAI,EAAE;;;AAarD,iCAA4B,WAAW,iBAAiB,mBAAmB,OAAO,kBAAkB,OAAO,eAAe;AAC7H,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,6BAAqB;AACrB,YAAI,mBAAmB,EAAE,UAAU;AAC/B,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,iCAAqB;YACjB,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YACzD,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YAC3D,IAAI;AACF,mCAAuB;AACvB,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,6BAAgB,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;AAC1D,mBAAO,aAAa,gBAAgB,UAAS,CAAC,SAAS,UAAU,YAAW,MAAM,OAAO,MAAM;;AAEnG,gCAAsB,UAAQ,CAAE,QAAQ,CAAE,aAAM,cAAQ,SAAS;AACjE,uBAAa,8BAA8B;AAC3C,uBAAa,8BAA8B;AAE3C,iBAAO;;AAEX,uBAAe,SAAS,YAAW,EAAE,OAAO,EAAE;AAC9C,YAAI,aAAa,mBAAmB,CAAC,GAAG,OAAO,iBAAiB;AAC5D,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wCAA8B,cAAc,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AAC1F,sBAAY,aAAa,eAAe,UAAU;AAClD,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;AACjB,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,mCACzC,mBAAmB;AACvB;AACA,YAAI;AACA,oBAAU,IAAI,sBAAsB,iBAAiB,EAAE,OAAO,EAAE,OAAO;;AAGvE,oBAAU,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;;AAExD,eAAO,aAAa,gBAAgB,SAAS,CAAC,GAAG,IAAI;;;ACxG7D;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,kBAAc,2BAA2B;;;AAGzC,yBAAqB;;;MAIjB,kCAAkC;;;AAG/B,oBAAc,mBAAiB,CAAE,WAAW,OAAO,iBAAiB;AACpE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI;;AAEjD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,yBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,qCAA+B,IAAI,yBAAyB;AAC5D,aAAO,SAAQ,gBAAgB,wBAAwB,CAAC,KAAK,EAAE;;AAE5D,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;wBACD;2DACmC;;;;;;AC3C3D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;wBAEA;uBACD;;;;;;yDAMkC;;;;;;;AC9CzD;;;;;;;;;;;;;;;;AAmBO,wBAAkB,EAAG,QAAQ,mBAAS;AACzC,aAAQ,GAAG,aAAM,qBAAU,QAAQ,iBAAU;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,0BAAoB,CAAC,GAAG,OAAM;AAC9B,wBAAkB;AAClB,UAAI,UAAU;AACV,sBAAc,OAAO;AACrB,oBAAY,KAAK;;AAErB,uBAAiB;AACjB,UAAI,UAAS;AACT,qBAAa,OAAM;AACnB,oBAAY,KAAK;;AAErB,sBAAgB,OAAM,QAAQ,8BAC1B,IAAI,uBAAuB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY,mBACzF,IAAI,iBAAiB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY;AACvF,qBAAe,SAAQ,gBAAgB,SAAS,aAAa,YAAY,GAAG;AAC5E,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAkBA,wBAAkB;AACX,uBAAiB,mBAAiB,CAAE,WAAW,aAAW,OAAO;AACjE,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBA,mBAAe;AACR;AACH,sBAAgB,IAAI,eAAe,OAAM,OAAO;AAChD,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,SAAQ;AACzD,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;ACrBvE;;;;;;;;;;;;;;;;AAuBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,mBAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,mBAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,eAAO,IAAI,GAAG;;AAElB,UAAI,UAAU;AACV,gCAAwB,SAAQ,eAAe,IAAI,QAAQ,wBAA4B,QAAQ;AAC/F,6BAAqB,CAAE,GAAG,GAAG,GAAG;AAChC,uBAAe,WAAS,CAAE,QAAQ,cAAc;AAChD,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpEhB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,yBAAiB,CAAC,YAAY,QAAQ;AACtC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,wBAAc,QAAQ,IAAI;AAC1B,mBAAS,KAAK,iBAAiB,QAAQ,qBAClB,YAAY;;AAErC,0BAAkB,QAAQ;AAC1B,0BAAkB,QAAQ,QAAQ,SAAS;AAC3C,iBAAS,KAAK,sBAAsB,oBAAoB;AACxD,aAAK,WAAW;;;;;;UAMd,SAAS,KAAK;;;;;AC3CxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,sBAAc,KAAK;AACnB,qBAAa,MAAM;AACnB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,wBAAgB,SAAS;AACzB,6BAAqB,SAAS,MAAM;AACpC,4BAAoB,SAAS;AAC7B,8BAAsB,OAAO,aAAa,QAAQ;;oBAEtC,sBAAsB,aAAa;;AAE/C,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAc,QAAQ,IAAI;AAK1B,6BAAmB;cACjB,aAAa,QAAQ,UAAU,cAAc,QAAQ,IAAI;;kBAErD,KAAK,gBAAgB,UAAU,SAAS;mBACvC,gBAAgB,cAAc,SAAS;;;AAGlD,0BAAkB,QAAQ;AAC1B,sBAAc,QAAQ,QAAQ,SAAS;AACvC,2BAAmB;;gBAEX,aAAa,gBAAgB,UAAU,SAAS;iBAC/C,gBAAgB,cAAc,SAAS;AAChD,aAAK,WAAW;uBACD,SAAS,IAAI,OAAK,SAAS;UACxC;;;;UAIA;sCAC4B;;UAE5B,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;cACnC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;;;;;AAkBhC;AACI,yBAAmB,SAAS,QAAQ;AACpC,kBAAY,SAAS,IAAI;AACrB,YAAI,QAAQ;AACR,iBAAO,GAAG,OAAO;;AAGjB,iBAAO;;;AAGf,aAAO,IAAI;;AChHf;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAkBO;AACH,2BAAqB;QAAC,YAAY,OAAM;QACpC,GAAG,YAAY,OAAM;;AACzB,sBAAgB;QACZ,OAAO,OAAM;QACb,OAAO;QACP,QAAQ,OAAM;;AAElB,6BAAuB;QAAC,YAAY;QAChC,GAAG,YAAY;;AACnB,sBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,4CAAsC;AACtC,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAChG,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;AC/BrE;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,2BAAqB;AACrB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,uBAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,UAAI,SAAS,YAAY,CAAC,cAAc,EAAE,OAAO,WAC7C,CAAE,UAAS,YAAY,QAAQ,cAAc,SAAS,OAAO;AAC7D,eAAO,cAAc,GAAG,QAAQ;;AAEpC,mBAAa,OAAO,EAAE;AACtB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzChB;;;;;;;;;;;;;;;;AAuBO;AACH,oBAAc,OAAO,GAAG;AACxB,UAAI,UAAU;AACV,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC7D,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC7D,6BAAqB,WAAW,OAAO,MAAM;AAC7C,6BAAqB,WAAW,OAAO,MAAM;AAC7C,wBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAO,SAAS,OAAM,UAAU;AAChC,yBAAiB,KAAK,MAAM,OAAO,SAAS;AAC5C,yBAAiB,WAAW,OAAO,MAAM,GAAG,WAAW,MAAM;AAC7D,0BAAkB,WAAW,OAAO,MAAM,WAAW,MAAM;AAC3D,wBAAe,WAAW,CAAC,UAAU,YAAY,MAAM;AACvD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAM,QAAQ,kCACd,OAAO,GAAG,MAAM,SAAS;AACzB,yBAAgB,IAAI,oBAAoB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAClE,eAAO,SAAQ,gBAAgB,UAAS,QAAQ;;AASpD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,wBAAkB,OAAO,IAAI,OAAK,UAAQ;QACtC,QAAQ,CAAE;QACV,OAAO,CAAE,OAAO,CAAC,IAAI,eAAmB,EAAE,MAAM,MAAM;QACtD;;AAEJ,sBAAgB,IAAI,cAAc,UAAU,IAAI,OAAK,EAAE;AACvD,qBAAe,SAAQ,gBAAgB,SAAS,WAAW;AAC3D,gBAAU,QAAQ,OAAK,SAAQ,8BAA8B;AAC7D,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,OAAO,CAAE,OAAO,WAAY;AACpF,eAAQ,8BAA8B;AACtC,aAAO;;ACrEX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,aAAO,WAAW,SAAS,OAAO;;AAE/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAoBA,gBAAY;;;;;AAOZ,uBAAmB;;;;;;;;;;;;;;;;;;;AAmBZ,kBAAY,mBAAiB,CAAE,WAAW,KAAK,iBAAiB,YAAY,kBAAkB;AAC9F,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AClDhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,yBAAiB,WAAW;AAC5B,aAAK,cAAc;AACnB,0CAAkC,UAAU,SAAS,KAAK,OAAO,UAAU,KAAK;AAChF,kCAA0B,UAAU,GAAG,eAAe;AACtD;AACA,YAAI,cAAc;AACd,qBAAW;mBAEN,cAAc;AACnB,qBAAW;;AAGX,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,aAAK,WAAW;yCACiB;;;UAG/B;;;;kDAIwC;;;;;;8BAMpB;;;;;;;;;yDAS2B;;;;;;;;;;;;;ACxDzD;;;;;;;;;;;;;;;;AAoBO;AACH,oBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,wBAAkB,eAAmB,EAAE;AAEvC,iCAA2B,EAAE,MAAM,EAAE,MAAM,SAAS;AACpD,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ,CAAE,QAAQ,CAAE,IAAK,mBAAS,OAAO,CAAE,OAAO,CAAC,OAAO;AAC1E,qBAAe,QAAQ;AACvB,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,qBAAe;QACX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;QAEX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;;AAGf,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY;AAC5E,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,oCAA8B,UAAQ,CAAE,QAAQ,CAAE,GAAG,gBAAiB,mBAAS,OAAO,CAAE,OAAO,EAAE;AACjG,eAAQ,8BAA8B;AACtC,aAAO;;ACjDX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,OAAqB;;AAExC,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,2BAAmB,WAAW;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;;uCAEc;;;;;;;;;;AC7BvC;;;;;;;;;;;;;;;;AAkBO,kCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,iBAAU;AAClB,6BAAqB;AACrB,wBAAgB,IAAI,qBAAqB,OAAM;AAC/C,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;uDAM+B,YAAY;;wBAE3C,KAAK;;;;;;;;;;;;;;;;;AC/B7B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;8BAeM,YAAY;4BACd,KAAK;;;;;;;;;;;;;;;;UAgBvB,KAAK;;;;;ACzDf;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACA;AACI,aAAQ,QAAQ,mBAAS,SAAU;AACnC,WAAM,UAAW;AACjB,aAAQ,eAAgB;AACxB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B,uBAAiB,CAAC,QAAQ;AAC1B,uBAAiB,CAAC,QAAQ,OAAO;AACjC,UAAI,WAAW;AACX,YAAI,yBAAuB;AACvB,kCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,8BAAoB,OAAO,QAAQ;AACnC,8BAAoB,OAAO,SAAS;AACpC,8BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,iBAAS,sBAAoB;;AAEjC,8BAAwB,SAAQ,eAAe,UAAU;AAEzD,eAAQ,QAAQ,IAAI,gBAAgB,QAAQ,QAAQ,aAAa;AACjE,eAAQ,MAAM,yBAAyB,SAAQ,WAAW,gBAAgB,SAAS;AACnF,sBAAgB,OAAM,QAAQ,gBAC1B,IAAI,wBAAwB,YAC5B,IAAI,kBAAkB;AAC1B,kBAAY,SAAQ,gBAAgB,SAAS,CAAC,kBAAkB;AAChE,eAAQ,YAAY,gBAAgB;AACpC,aAAO;;AC7DX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,MAAoB;;AAEvC,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;AACpB,YAAI,WAAW;AACX,8BAAoB,IAAI;AACxB,0BAAgB,4BAA4B,OAAW,eAAe,YAAY,YAAY,KAC1F;;AAER,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;;;;UAId;;;;;;;;kCAQwB;;;;8BAIJ;;;;;;;;;YASlB;;;iCAGqB;cACnB,4BAA4B;;;YAG9B;qBACS,4BAA4B;;;;;YAKrC;qBACS,4BAA4B;;;;;;YAMrC;;;;;;;ACnFZ;;;;;;;;;;;;;;;;AAqBA;AACI,qBAAe;AACf,aAAO,OAAO,WAAW,KAAK,OAAO,OAAO,SAAS,GAAG,YAAY;AAChE,wBAAgB,OAAO,SAAS,OAAO,OAAO,SAAS,GAAG,UAAU,QAAQ;AAC5E,2BAAmB,0BAAsC;AACzD,eAAO,KAAK;UACR,QAAQ;UACR;UACA,SAAS,KAAK,KAAK,UAAU;;;AAGrC,aAAO;;AAEJ;AACH,8BAAwB,mBAAmB,EAAE;AAC7C,mBAAa;AACb,mBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,eAAQ,QAAQ,YAAY,WAAY,gBAAgB;AACxD;AACA;AACA,YAAI,kBAAkB;AAClB,oBAAU,MAAM,IACZ,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW,UACxE,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI;;AAGjE,oBAAU,IAAI,cAAc,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW;;AAExF,yBAAiB;AACjB,iBAAS,SAAQ,gBAAgB,SAAS,CAAC,SAAS;AACpD,YAAI,eAAe,WAAW,EAAE;AAC5B,mBAAQ,8BAA8B;;;AAG9C,aAAO;;ACvDX;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW;AACtF,sBAAgB,OAAO,eAAe,EAAE,OAAO,OAAO;AACtD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,yBAAiB,kBAAkB;AACnC,aAAK,WAAW;;QAEhB;uBACe;;;;;AAKvB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,sBAAsB;;AAEtC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW,WAAW;AAC9E,6BAAuB,IAAI,MAAM;AACjC,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAe,OAAO,MAAM,cAAc;;AAE9C,aAAO,eAAe;;AC9C1B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,YAAI,KAAK,OAAO;AACZ,gBAAM,MAAM,6BAA6B,KAAK;;AAElD,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,eAAe,MAAM,KAAK;AAC9C,8BAAsB,IAAI,MAAM,KAAK;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,wBAAc,OAAO,MAAM,YAAY;;AAE3C,0BAAkB,QAAQ,cAAc,MAAM,IAAI;AAClD,2BAAmB,KAAK,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;AAChF,qBAAa,mBAAmB,cAAc,YAAY;AAC1D,aAAK,WAAW;;QAEhB;;oBAEY;WACT;sBACW;;UAEZ,YAAY,KAAK,OAAO;aACrB,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;sBAC/C;aACT;wBACW;;;;;;;;ACrDxB;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,QACpC,IAAI,iBAAiB,EAAE,OAAO;AAClC,aAAO,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;;ACxBnD;;;;;;;;;;;;;;;;AAqBO,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,qCAA6B,gBAAgB;AAC7C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,uBAAe;AACf,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,mCAAuB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AAChF,uBAAW,aAAa,eAAe,UAAU,EAAE;AACnD,iCAAqB,aAAa,QAAQ,IAAI,SAAS;AACvD,yBAAa,SAAS;;AAGtB,uBAAW,gBAAc,GAAG,cAAc;;AAE9C,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,SAAS,OAAO;AAC1F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,aAAa;;AAE9D;AACA,YAAI;AACA,2BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,yBAAe,SAAS;AACxB,4BAAkB,WAAW,QAAQ,eAAmB,cAAc,UAAU,EAAE;AAClF,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,UAAQ,UAAU,aAAa,UAAU;;AAEnD,YAAI;AACA,uBAAa,8BAA8B;;AAE/C,eAAO;;;AC3Ef;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI,EAAE;;AAEnD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,yBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,2BAAqB;AACrB,sCAAgC,IAAI,cAAc,UAAU,OAAO;AACnE,gCAAyB,SAAQ,gBAAgB,yBAAyB,CAAC,IAAI,EAAE;AACjF,qCAA+B,IAAI,yBAAyB;AAC5D,qBAAe,SAAQ,gBAAgB,wBAAwB,CAAC,IAAI,oBAAmB,EAAE;AACzF,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,IAAI,cAAc,UAAU,OAAO;AACjD,yBAAmB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AACzD,gBAAU,IAAI,cAAc,UAAU,OAAO,MAAM,MAAM;AACzD,0BAAoB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AAC1D,aAAO,CAAC,YAAY;;ACtBxB;;;;;;;;;;;;;;;;AAmBO,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,6BAAqB;AACrB,gBAAY,EAAE,MAAM,WAAW,GAAG,MAAM,uDAAuD,EAAE,MAAM;AACvG,0BAAkB,CAAC,GAAG;AACtB,gBAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW;AACzF,kCAA0B,wBAAsB,GAAG,qBAAqB,UAAU;AAClF,eAAO,CAAC,QAAQ;;;AChCxB;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW;AACtF,sBAAgB,OAAO,eAAe,WAAW,QAAQ;AACzD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;AAmBO,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,UAAU,QAAS;AAC3B,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,sCAA8B,gBAAgB;AAC9C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,8BAAsB;AACtB,wBAAgB;AAChB,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,UAAU;AACpD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,oCAAwB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AACjF,wBAAY,aAAa,eAAe,UAAU,EAAE;AACpD,kCAAsB,aAAa,QAAQ,IAAI,UAAU;AACzD,0BAAc,SAAS;;AAGvB,wBAAY,gBAAc,GAAG,cAAc;;AAE/C,wBAAc,KAAK;AACnB,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,4CAAoC,2BAAuC,UAAU,OAAO;AAC5F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,cAAc;;AAE/D,oBAAY,SAAS,WAAW,aAAa,UAAU;AACvD,wBAAgB;AACZ,uBAAa,8BAA8B;;AAE/C,eAAO;;;AChEf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,uBAAe,SAAS,YAAY,IAAI;AACxC,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;wCAKoB;;4CAEI;;;;;AAKhC;;AAEJ,aAAK,WAAW;QAChB,iBAAiB,SAAS;QAC1B,eAAe,SAAS;;;UAGtB;8BACoB;;iDAEmB;;qDAEI;;;UAG3C;yBACe;;;;;AC1DzB;;;;;;;;;;;;;;;;;MA0DI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,uBAAe,SAAS,YAAY,IAAI;AACxC,uBAAe;AACf,YAAI,SAAS;AACT,2BAAiB;UACnB;;0CAEgC;;8CAEI;;;;AAIlC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;;AAK/C,2BAAiB;UACnB;UACA,cAAc;UACd,eAAe;UACf;;6CAEmC;kDACK;;;AAGtC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;UAGjD,QAAO,OAAO;aACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;YAC/C;wCAC4B,OAAO,YAAY;YAC/C,QAAO,OAAO;eACX;cACD;0CAC4B,OAAO,YAAY;;;;;AAKrD,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;ACtIV;;;;;;;;;;;;;;;;AAmBO,gCAA4B,EAAG,QAAQ,mBAAS;AACnD,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,UAAU,QAC9C,IAAI,iBAAiB,EAAE,OAAO,UAAU;AAC5C,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;AACvD,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,MAAM;MACN,MAAM;;;MAGN;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS,SAAS;AACjD,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;;UAGd;;;;;;;;;;;;;AChCV;;;;;;;;;;;;;;;;AAuBA,gBAAY;AACL;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,KAAM;AACjB,oBAAc,YAAwB,EAAE,OAAO,EAAE;AACjD,UAAI,EAAE,UAAU;AACZ,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,wBAAe;UACX;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;;AAGjB,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,8BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY;AAC5E,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AAEtC,eAAO;;AAEX,UAAI,SAAQ,mBAAmB,CAAC,GAAG;AAC/B,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sCAA8B,gBAAY,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AACxF,oBAAY,SAAQ,eAAe,UAAU;AAC7C,wBAAgB,SAAQ,QAAQ,IAAI,IAAI;AACxC,gBAAQ,SAAS;AACjB,eAAO;;AAEX;AACA,UAAI,OAAM,QAAQ;AACd,kBAAU,IAAI,sBAAsB,KAAK,EAAE,OAAO,EAAE;;AAGpD,kBAAU,IAAI,gBAAgB,KAAK,EAAE,OAAO,EAAE;;AAElD,aAAO,SAAQ,gBAAgB,SAAS,CAAC,GAAG,IAAI;;AAE7C,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACpFhB;;;;;;;;;;;;;;;;AAiBO,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,kBAAmB;AACxD,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,eAAO,wBAAqC,WAAW,YAAY,kBAAkB,iBAAiB;;;AC/B9G;;;;;;;;;;;;;;;;AAiBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC9BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,4BAAoB,WAAW;AAC/B,2BAAmB,WAAW;AAC9B,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,aAAK,cAAc;AACnB,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,8BAAsB,QAAQ,QAAQ;AACtC,8BAAsB,QAAQ,QAAQ;AACtC,0BAAkB;AAClB,YAAI,OAAO,cAAc;AACrB,wBAAc,uBAAuB,UAAU,QAAQ;;AAGvD,wBAAc;2BACC,UAAU,KAAK;;;AAGlC,aAAK,WAAW;;;;;4CAKoB,oBAAoB,2BAA2B,oBAAoB;4CACnE,oBAAoB,2BAA2B,oBAAoB;iDAC9D;iDACA;YACrC;uCAC2B,yCAAyC;;;;;;;;AChDhF;;;;;;;;;;;;;;;;AAkBO,qCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,6BAAqB;AACrB,wBAAgB,IAAI,cAAc,OAAM,OAAO,SAAS,WAAW;AACnE,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC3Bf;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAkBA,mBAAe;AACR,qBAAe,kBAAgB;AAC/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBA,iCAA2B;AACpB,gCAA0B,mBAAiB,CAAE,WAAW,sBAAoB,iBAAiB;AAC7F,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,gBAAY;AACL,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBO,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,QAAS;AACjB,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,IAAI,MAAM;AAC3B,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,mBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B;AACA,YAAI,aAAa,mBAAmB,CAAC;AACjC,2BAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,yBAAe,SAAS;AACxB,4BAAkB,iBAAa,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC/D,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,gBAAc,GAAG,MAAM;;AAEjC,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,yBAAiB,GAAG;AAEpB,cAAQ,KAAK,aAAa;AAC1B,qBAAe,SAAQ,SAAS,EAAE;AAClC,aAAQ,cAAc,aAAa,WAAY,cAAc,QAAQ,MAAM,EAAE,OAAO,EAAE;AACtF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAuDA,4BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;AC/FnB;;;;;;;;;;;;;;;;ACAA;AAGA,sBAAgB;ACHhB;;;;;;;;;;;;;;;;sBAsCuB;MACrB,aAAa;MACb,oBAAoB;MACpB,sBAAsB;MACtB,aAAa;MACb,eAAe;MACf,kBAAkB;MAClB,MAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7CV,IAAA;AAAA;;;ACAA,IAAA;AAAA;;;ACAA,IAAA;AAAA;;;ACAA,IAAA;AACA,sCAAqC;AACnC,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,uCAAgC,kCAAiC;AAEnE;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAM;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAQ;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAQ,mBAAW,OAAO,mCAAgC,cAAY,iCAA8B;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;AAAA;AAAA;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;AAAA;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,mCAA2B,OAAO,6BAA2B;AAAM,UAAG;AAAwB,kBAAO,OAAO;AAAU,uBAAa,OAAO;AAAgB,yBAAe,OAAO;AAAA;AAAkB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;AAAA;AAAiB,eAAO,kBAAgB;AAAA;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;AAAA;AAAS,4BAAgB,YAAU;AAAA;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;AAAA;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;AAAA;AAAK,kBAAO,IAAI;AAAQ,iBAAO;AAAA;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;AAAA;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;AAAA;AAAA;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;AAAA;AAAS,eAAO,aAAW;AAAW,iBAAM;AAAA;AAA8B;AAAsB;AAAI,8BAAkB;AAAA;AAAmC,kBAAQ,MAAM;AAA2G,gBAAM;AAAA;AAAE,iBAAO,kBAAkB;AAAA,iBAAe;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;AAAA;AAAA;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;AAAA;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;AAAA;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;AAAA,mBAAmB,OAAO,aAAW;AAAa,uBAAW;AAAA;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;AAAA;AAAA;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;AAAA;AAAA,iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;AAAA,mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;AAAA;AAAI,YAAG;AAAY,4BAAgB;AAAA;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;AAAA;AAAQ,4BAAgB;AAAA;AAAG,YAAG;AAAqB,kBAAM;AAAqC,gBAAG,CAAC;AAAO,uBAAO;AAAc,gBAAG,CAAC;AAAS,yBAAS;AAAgB,uBAAS,SAAS,aAAa;AAAU,mBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;AAAA;AAAS,uBAAW;AAA8B,sBAAQ,MAAM,UAAS;AAAM,gBAAG,CAAC,IAAI;AAAQ,oBAAI,IAAI,WAAW;AAAA;AAAK,oBAAO,IAAI;AAAQ,mBAAO;AAAA;AAAA;AAAU,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;AAAA;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;AAAA;AAAA;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;AAAA;AAAO;AAAA;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;AAAA;AAAA;AAAO,yBAAe;AAAgB,mBAAS,QAAM;AAAA;AAAA;AAAA;AAAa,UAAG;AAAqB,YAAG,OAAO,gBAAc;AAAa,wBAAY,qBAAsB;AAAA;AAAA;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;AAAA;AAAA;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ,yBAAiB,QAAQ;AAAK,0BAAkB,QAAQ;AAAM,oCAA4B,QAAQ;AAAgB;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;AAAA;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY;AAAe,6BAAqB;AAAE,yBAAiB;AAAE,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;AAAA;AAAA;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;AAAA;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,QAAM,QAAM,QAAM,UAAW,QAAM;AAAG,sBAAS,KAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,KAAI,MAAI;AAAA;AAAK,iBAAO;AAAA,WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;AAAA;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;AAAA;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;AAAA;AAAS,oBAAM,KAAG,KAAK;AAAA;AAAA;AAAA;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;AAAA;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;AAAA;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;AAAA;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;AAAA;AAAA;AAAO;AAAoD,qBAAW,MAAI;AAAe,kBAAQ;AAAG,eAAM,CAAE,QAAK;AAAS,mBAAO,KAAK;AAAO,cAAG,CAAC;AAAG,mBAAO;AAAI,cAAG,CAAE,MAAG;AAAM,mBAAK,OAAO,aAAa;AAAI;AAAA;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,mBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;AAAA;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,iBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;AAAA;AAAQ,iBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;AAAA;AAAG,cAAG,KAAG;AAAO,mBAAK,OAAO,aAAa;AAAA;AAAS,qBAAO,KAAG;AAAM,mBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;AAAA;AAAA;AAAO,eAAO;AAAA;AAAI;AAA0C,eAAO,MAAI,kBAAkB,oBAAmB,KAAI,kBAAgB;AAAA;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,IAAI,QAAO,EAAE;AAAG,kBAAM,IAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,IAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;AAAA;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;AAAA,qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;AAAA,qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAA;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;AAAA;AAAS;AAAkD,eAAO,kBAAkB,KAAI,oBAAmB,QAAO;AAAA;AAAiB;AAA8B,kBAAQ;AAAE,qBAAU,GAAE,IAAE,IAAI,QAAO,EAAE;AAAG,kBAAM,IAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAM,gBAAE,QAAQ,MAAE,SAAO,MAAI,IAAI,WAAW,EAAE,KAAG;AAAK,cAAG,KAAG;AAAI,cAAE;AAAA,mBAAY,KAAG;AAAK,mBAAK;AAAA,mBAAU,KAAG;AAAM,mBAAK;AAAA;AAAO,mBAAK;AAAA;AAAE,eAAO;AAAA;AAAI;AAA0C,2BAAmB,IAAI,OAAM;AAAA;AAAQ,2BAAmB;AAAM;AAA6B,YAAG,IAAE,WAAS;AAAG,eAAG,WAAS,IAAE;AAAA;AAAS,eAAO;AAAA;AAAE;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAA;AAAK,uBAAe,oBAAiB,wBAAqB,sBAAmB,0BAAuB;AAAM,UAAG;AAAA;AAAyB,mCAA2B,OAAO,qBAAmB;AAAS,UAAG;AAAwB,qBAAW,OAAO;AAAc,kBAAO,OAAO;AAAA;AAAe,YAAG,OAAO;AAAe,uBAAW,OAAO;AAAA;AAAmB,uBAAW,IAAI,YAAY,OAAO,CAAC,SAAU,yBAAuB,gBAAe,SAAU,aAAW,gBAAe,QAAS;AAAO,cAAG,CAAE,YAAW,kBAAkB;AAAoB,gBAAI;AAA+N,gBAAG;AAAqB,sBAAQ,IAAI;AAAA;AAAqH,kBAAM,MAAM;AAAA;AAAA;AAAA;AAAgB,UAAG;AAAY,kBAAO,WAAW;AAAA;AAAO,+BAAuB,QAAO;AAAW,iCAA2B;AAAQ,UAAG,CAAC;AAAwB,4BAAoB,kBAAgB,KAAG;AAAA;AAAa;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;AAAA;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;AAAA;AAAW,qBAAO,cAAc,MAAK,SAAS;AAAA;AAAA;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;AAAA;AAAA;AAAA;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,UAAG;AAAuB,6BAAmB;AAAK;AAAkB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;AAAA;AAAY;AAAmB,YAAG;AAAuB;AAAO,6BAAqB;AAAA;AAAY;AAAmB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAe;AAAyB,qBAAa,QAAQ;AAAA;AAAI;AAA0B,sBAAc,QAAQ;AAAA;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B,gBAAO,CAAC,wBAAuB;AAAuD;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAA;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;AAAA;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;AAAA;AAAA;AAAA;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;AAAA;AAAM,YAAG;AAAuB,kBAAQ,MAAM,yBAAwB,IAAI,QAAO;AAAO,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;AAAA;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,IAAI,WAAW,UAAQ,IAAI,QAAQ,YAAU;AAAA;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,2BAAmB;AAAuC,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;AAAA;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;AAAA;AAAY,cAAG;AAAY,mBAAO,WAAW;AAAA;AAAqB,kBAAK;AAAA;AAAA;AAA8D,gBAAM;AAAA;AAAA;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;AAAA;AAAI,mBAAO,SAAS;AAAA,aAAmB,MAAM;AAAW,mBAAO;AAAA;AAAA;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;AAAA;AAAA;AAAe;AAAsB,mBAAS,CAAC,GAAI;AAAe;AAA0C,yBAAY,SAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW;AAAO,cAAG,CAAC;AAAwB,mCAAqB,QAAQ,cAAc;AAAO,oBAAQ,cAAc,QAAQ;AAAY,sBAAQ,uBAAuB,GAAE;AAAW,oBAAG,CAAC,EAAE;AAAiB,sCAAoB;AAAA;AAAA;AAAA;AAAA;AAAyB,YAAG,CAAC;AAAwB,2BAAiB;AAAA;AAAoB;AAA2C,0BAAgB,OAAO,aAAY,OAAO;AAAA;AAAW;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;AAAA,aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;AAAA;AAAA;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;AAAA;AAAA;AAAA;AAAoC,mBAAO,uBAAuB;AAAA;AAAA;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;AAAA;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;AAAA;AAAA;AAAO;AAAmB,eAAM;AAAA;AAAG,uBAAe;AAAG;AAA0B,gBAAQ;AAAA;AAAc,UAAG,CAAC;AAAuB,mBAAW,KAAK,CAAC,MAAK;AAAW;AAAA;AAAwB,0BAAkB;AAAE,6CAAqC;AAAE,6CAAqC;AAAE;AAAoF,qBAAW,aAAW;AAAE,8BAAoB,sBAAoB;AAAE,8BAAoB,sBAAoB;AAAE,wBAAc;AAAW,2CAAiC;AAAoB,2CAAiC;AAAA;AAAoB,aAAO,4BAA0B;AAAuB,wBAAgB,CAAC,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,OAAM,IAAG,KAAI,IAAG,OAAM,IAAG,OAAM,GAAE,SAAQ,IAAG,OAAM,GAAE,QAAO,IAAG,QAAO,GAAE,aAAY,GAAE,QAAO,IAAG,QAAO,GAAE,QAAO,IAAG,SAAQ,KAAI,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,SAAQ,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,SAAQ,IAAG,OAAM,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,MAAK,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,KAAI,UAAS,KAAI,QAAO,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,IAAG,QAAO,IAAG,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,WAAU,IAAG,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,IAAG,MAAK,KAAI,QAAO,KAAI,OAAM,KAAI,QAAO,IAAG,WAAU,IAAG,SAAQ,KAAI,SAAQ,GAAE,UAAS,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,UAAS,KAAI,QAAO,IAAG,WAAU,IAAG,cAAa,IAAG,OAAM,IAAG,YAAW,KAAI,cAAa,KAAI,YAAW,IAAG,SAAQ,IAAG,cAAa,GAAE,YAAW,IAAG,UAAS,IAAG,aAAY,IAAG,WAAU,KAAI,cAAa,IAAG,YAAW,GAAE,cAAa,IAAG,aAAY,IAAG,UAAS,IAAG,WAAU,IAAG,WAAU,KAAI,cAAa,IAAG,aAAY,IAAG,UAAS,GAAE,cAAa,IAAG,UAAS,IAAG,iBAAgB,IAAG,iBAAgB,KAAI,eAAc,GAAE,WAAU,IAAG,SAAQ,IAAG,UAAS,IAAG,cAAa,KAAI,QAAO,KAAI,QAAO,IAAG,QAAO,IAAG,SAAQ,KAAI,WAAU,KAAI,QAAO,IAAG,WAAU,IAAG,WAAU,IAAG,iBAAgB,IAAG,YAAW,IAAG,UAAS;AAAK,6CAAqC;AAAM;AAA4C,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK,QAAM,QAAM;AAAE,iBAAM;AAAI,YAAG,SAAO;AAAE,iBAAO;AAAE,YAAG,SAAO;AAAW,kBAAM;AAAS,oCAA0B,QAAQ,KAAK,qBAAoB,oCAAkC;AAAG,8BAAoB;AAAE,YAAG,yBAAuB;AAAM,2BAAe,QAAQ,gBAAgB,qBAAoB,oCAAkC,GAAE,uBAAsB;AAAG,cAAG,cAAY;AAAuB,cAAE;AAAM,8BAAgB;AAAE,gBAAG,SAAO;AAAE,qBAAO;AAAA;AAAA;AAAG,kBAAQ,QAAQ,OAAO,qBAAoB,QAAM,GAAE;AAAO,YAAG,OAAK;AAAE,iBAAO,MAAI;AAAgB,cAAK,iDAA+C;AAAA;AAAI,aAAO,4BAA0B;AAAuB;AAAoC,YAAG;AAAuB,gBAAK;AAAuF,YAAG,CAAC;AAAY,gBAAK;AAAoD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO;AAAY,gBAAQ,eAAe;AAAS,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,QAAQ,SAAQ;AAAG,gBAAQ,OAAO,UAAQ;AAAA;AAAU;AAAsC,YAAG;AAAuB,gBAAK;AAAyF,YAAG,CAAC;AAAY,gBAAK;AAAsD,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO,YAAY,CAAC,KAAM;AAAA;AAAW;AAAuC,YAAG;AAAuB,gBAAK;AAA0F,YAAG,CAAC;AAAY,gBAAK;AAAuD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,YAAG;AAAS,uBAAW,QAAQ;AAAO,kBAAQ,mBAAmB;AAAA;AAAA;AAAS,oBAAY,CAAC,gBAAe,GAAE,gBAAe,CAAC,aAAY,GAAE,WAAU,IAAG,eAAc,IAAG,gBAAe,IAAG,aAAY;AAAW,+BAAuB,QAAQ,iBAAgB,CAAC,uBAAsB;AAAG,oDAA4C,QAAQ;AAAA,SAAkB,qBAAoB;AAAW,8BAAoB;AAAE,qBAAU,GAAE,IAAE,iBAAgB,EAAE;AAAG,kBAAQ;AAAA;AAAuB,gBAAQ,kBAAgB;AAAM,qBAAU,GAAE,IAAE,MAAI,GAAE,EAAE;AAAE,8BAAoB,QAAQ,kBAAgB,IAAE,KAAG;AAAE,4BAAoB,QAAQ,kBAAgB,MAAI,KAAG,QAAQ;AAAgB,sBAAY,QAAQ,kBAAgB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,wBAAc;AAAM,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAE,8BAAoB,YAAU,IAAE,KAAG;AAAE,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,OAAK,GAAE;AAAW,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE,QAAQ;AAAiB,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE;AAAA,SAAK,YAAW;AAAA,SAAa,UAAS,IAAG,cAAa,MAAK,iBAAgB;AAAA,SAAa,iBAAgB;AAAW,YAAG,QAAQ,iBAAe;AAAM,iBAAM,QAAQ,aAAa,SAAO;AAAG,oBAAQ,aAAa;AAAA;AAAQ,kBAAQ,eAAa;AAAA;AAAK,YAAG,0BAAwB;AAAiB;AAAA,SAA4B,YAAW;AAAmB,iBAAO;AAAgB,YAAG;AAAI,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAU,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ;AAAkB,iCAAuB,KAAG,GAAE;AAAY,iCAAuB,GAAE,GAAE;AAAG,6BAAiB;AAAE,cAAG;AAAwB,wBAAY,CAAC,KAAM;AAAA;AAAA;AAAA,SAAY,cAAa;AAAW,gBAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAI,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAG,+BAAuB,mBAAiB,GAAE;AAAY,2BAAiB,eAAa;AAAE,+BAAuB,GAAE,GAAE;AAAG,oBAAY,CAAC,KAAM;AAAA,SAAgB,qBAAoB;AAAW,sBAAa,QAAQ;AAAU,wBAAY,QAAQ,SAAS;AAAG,cAAG,WAAS,QAAQ;AAAQ,oBAAQ,mBAAmB,QAAQ;AAAA;AAAA;AAAS,gBAAQ,WAAS;AAAG,qBAAU,GAAE,IAAE,QAAQ,cAAc,QAAO,EAAE;AAAG,uBAAW,QAAQ,cAAc;AAAG,iBAAO;AAAA;AAAY,gBAAQ,gBAAc;AAAG,qBAAU,GAAE,IAAE,QAAQ,eAAe,QAAO,EAAE;AAAG,uBAAW,QAAQ,eAAe;AAAG,wBAAY,OAAO;AAAQ,kBAAQ,eAAe;AAAS,iBAAO;AAAA;AAAY,gBAAQ,iBAAe;AAAA,SAAI,gBAAe;AAAkB,YAAG,CAAC;AAAQ;AAAO,YAAG,QAAQ;AAAkB,0BAAc,oBAAoB,QAAQ,mBAAiB,OAAK;AAAG,8BAAoB,QAAQ,mBAAiB,OAAK,KAAG;AAAE,gBAAM;AAAW,gBAAM,QAAQ;AAAA;AAAkB,gBAAQ,mBAAiB;AAAE,YAAG,QAAQ,qBAAmB,QAAQ;AAAU,gBAAM,QAAQ;AAAW,gBAAQ,YAAU;AAAE,YAAG,QAAQ;AAAO,kBAAQ,OAAO,UAAQ;AAAA,SAAM,oBAAmB;AAAiB,eAAO,QAAQ,SAAS,OAAO,QAAQ;AAAQ,gBAAQ,cAAc,KAAK;AAAQ,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,SAAQ;AAAG,gBAAQ,eAAe,OAAO;AAAS,eAAO,UAAQ;AAAA,SAAW,uBAAsB;AAAA,SAAiB,wBAAuB;AAAmC,eAAO,YAAU;AAAY,kBAAM,EAAE;AAAQ,oBAAQ,EAAE;AAAO,cAAG,OAAO;AAAQ,oBAAQ,sCAAoC,OAAO,QAAQ;AAAiB,cAAG,EAAE,mBAAiB,EAAE,mBAAiB;AAAiB,yBAAW,QAAQ,SAAS,EAAE;AAAc,gBAAG;AAAQ,qBAAO,OAAO,YAAY,EAAE,MAAK,EAAE;AAAA;AAAsB,sBAAQ,MAAM,4CAA0C,MAAI,yBAAuB,EAAE,kBAAgB;AAAA;AAAuC,oBAAQ,sCAAoC;AAAU;AAAA;AAAO,cAAG,QAAM;AAA+B;AAAA,qBAAuD,QAAM;AAAe,2BAAe,EAAE;AAAA,qBAAc,QAAM;AAAiB,6BAAiB,EAAE;AAAA,qBAAmB,QAAM;AAAc,0BAAc,EAAE;AAAA,qBAAmB,QAAM;AAAgB,4BAAgB,EAAE;AAAA,qBAAmB,QAAM;AAAU,mBAAO,SAAO;AAAK,gBAAG;AAAkB,gCAAkB;AAAQ,gBAAG,OAAO;AAAY,qBAAO;AAAa,qBAAO,OAAO;AAAA;AAAA,qBAAoB,QAAM;AAAS,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;AAAA,qBAAiB,QAAM;AAAY,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;AAAA,qBAAiB,QAAM;AAAS,kBAAM,YAAU,EAAE,cAAY,OAAK,EAAE;AAAA,qBAAiB,QAAM;AAAQ,2BAAa,OAAO,WAAS,QAAQ,KAAK,qBAAoB,OAAO,QAAQ,SAAO,MAAI;AAAG,gBAAG;AAAU,sBAAQ,mBAAmB;AAAA;AAAA,qBAAiB,QAAM;AAAc,oBAAQ,mBAAmB;AAAA,qBAAgB,QAAM;AAAkB,oBAAQ,sBAAsB,EAAE;AAAA,qBAAc,EAAE,KAAK,WAAS;AAAgB,mBAAO,YAAY,EAAE;AAAA;AAAW,gBAAI,oCAAkC;AAAA;AAAK,kBAAQ,sCAAoC;AAAA;AAAW,eAAO,UAAQ;AAAY,cAAI,4BAA0B,EAAE,WAAS,MAAI,EAAE,SAAO,OAAK,EAAE;AAAA;AAAU,YAAG;AAAqB,iBAAO,GAAG,WAAU;AAAe,mBAAO,UAAU,CAAC,MAAK;AAAA;AAAS,iBAAO,GAAG,SAAQ;AAAe,mBAAO,QAAQ;AAAA;AAAQ,iBAAO,GAAG,QAAO;AAAe,oBAAQ,IAAI;AAAA;AAAA;AAAoD,eAAO,YAAY,CAAC,KAAM,QAAO,WAAY,OAAO,0BAAwB,YAAW,YAAwB,YAAwB,cAA4B;AAAA,SAAmC,sBAAqB;AAAW,4BAAkB,WAAW;AAA6C,gBAAQ,cAAc,KAAK,IAAI,OAAO;AAAA,SAAiB,cAAa;AAAW,YAAG,QAAQ,cAAc,UAAQ;AAAG,kBAAQ;AAAuB,kBAAQ,uBAAuB,QAAQ,cAAc;AAAA;AAAI,YAAG,QAAQ,cAAc,SAAO;AAAE,iBAAO,QAAQ,cAAc;AAAA;AAAW,iBAAO;AAAA,SAAM,cAAa;AAAgB,gBAAM,YAAY,QAAM;AAAM,eAAM,YAAY,QAAM;AAAA;AAAA;AAAO;AAAgD,qBAAW,WAAS;AAAS,oBAAU;AAAS,qBAAa;AAAA;AAAU,aAAO,yBAAuB;AAAoB;AAA4B,eAAO;AAAA;AAAc,aAAO,sBAAoB;AAAiB;AAAsD,cAAM,uBAAqB,aAAa,aAAW,WAAS,CAAC,WAAS,aAAa,YAAU,oBAAmB,MAAK,OAAK,aAAa,QAAM;AAAA;AAAqB;AAAiC,yBAAe,MAAM,MAAK;AAAA;AAAM;AAAwB,UAAG;AAAqB,8BAAoB;AAAW,kBAAM,QAAQ;AAAY,iBAAO,EAAE,KAAG,MAAI,EAAE,KAAG;AAAA;AAAA,iBAAa;AAAwB,8BAAoB;AAAW,iBAAO,YAAY,QAAM,OAAO;AAAA;AAAA,iBAA0C,OAAO,YAAU;AAAa,8BAAoB;AAAA;AAAa,8BAAoB;AAAW,iBAAO,YAAY;AAAA;AAAO;AAAyB,4BAAoB,uBAAqB,KAAG;AAAM,eAAO;AAAA;AAAM;AAA2B,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,MAAK;AAAK,mBAAW,QAAQ,CAAC,MAAU;AAAA;AAAU;AAAuE,YAAG,kBAAgB;AAAc,sBAAY,CAAC,KAAM;AAAA,mBAAwC;AAAwB,sBAAY,CAAC,cAAe,gBAAe,KAAM;AAAA;AAA4B,wBAAY,QAAQ,SAAS;AAAgB,uBAAW,WAAS,QAAQ;AAAO,cAAG,CAAC;AAAQ;AAAA;AAAO,iBAAO,YAAY,CAAC,KAAM;AAAA;AAAuB,eAAO;AAAA;AAAE;AAAkB;AAAA;AAAQ;AAAqF,yBAAe,iBAAe;AAAE,oBAAU,YAAU;AAAA;AAAE;AAAkD,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK;AAAK,iBAAM;AAAI,YAAG;AAAuB,oBAAQ,QAAQ,KAAK,qBAAoB,QAAM,GAAE,KAAI;AAAS,cAAG,QAAM;AAAY,mBAAM;AAAI,cAAG,QAAM;AAAY,mBAAM;AAAG,cAAG,QAAM;AAAK,mBAAO;AAAE,gBAAK,+CAA6C;AAAA;AAAS,0BAAc,QAAQ,KAAK,qBAAoB,QAAM;AAAG,cAAG,OAAK;AAAU,mBAAM;AAAG,qBAAS,YAAY;AAAM,qBAAS,OAAK;AAAQ,kBAAQ,MAAM,qBAAoB,oCAAkC,GAAE;AAAM,+BAAmB;AAAK,iBAAM,QAAM;AAAgB,mBAAK,YAAY;AAAM,gBAAG,OAAK;AAAM,qBAAM;AAAA;AAAI;AAA+C,mBAAK,QAAQ,KAAK,qBAAoB,oCAAkC;AAAA;AAAG,iBAAO;AAAA;AAAA;AAAG;AAA8C,eAAO,mCAAiC;AAAA;AAAE;AAA8C,eAAO,mCAAiC;AAAA;AAAE;AAA8C,2BAAmB,WAAW,MAAK,KAAI,MAAI;AAAA;AAAK;AAAyC,eAAO,UAAU;AAAA;AAAuB;AAAyD,0BAAgB,UAAU,SAAO;AAAE,oBAAU;AAAY,mBAAS,WAAW,cAAY;AAAG,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,8BAAoB,IAAE,KAAG,UAAU,IAAE;AAAA;AAAG,kBAAQ,0CAA0C,OAAM,aAAY,MAAK;AAAM,qBAAa;AAAO,eAAO;AAAA;AAAI,2DAAmD;AAAG;AAAsC,YAAG,CAAC,iBAAiB;AAAO,2BAAiB,QAAM;AAAA;AAAG,mBAAS,iBAAiB;AAAM,aAAK,SAAO;AAAE;AAAO,eAAM,KAAG,mBAAmB;AAAW,cAAG,OAAK,OAAK,OAAK;AAAK,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;AAAA;AAAO,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;AAAA;AAAA;AAAG,eAAO;AAAA;AAAK;AAAuE,uDAA+C,SAAO;AAAY,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,yDAA+C,KAAG,oBAAoB,IAAE;AAAA;AAAG,2BAAiB,QAAM;AAAE,mBAAS,CAAC,eAAa,qBAAqB,SAAO,WAAW,CAAC,QAAM;AAAG,YAAG;AAAc,uBAAW,+CAA+C;AAAG,0BAAc,+CAA+C;AAAG,0BAAc,iBAAiB,QAAO;AAAW,iBAAO,KAAK,MAAM,MAAK;AAAA;AAAW,eAAO,KAAK,MAAM,MAAK;AAAA;AAAgD;AAAqC,eAAO,mBAAmB;AAAA;AAAO;AAAyC;AAAI,qBAAW,KAAK,OAAK,QAAO,aAAW,UAAQ;AAAI,qCAA2B,WAAW;AAAQ,iBAAO;AAAA;AAAA;AAAA;AAAa;AAAgD,wBAAc,kBAAgB;AAAE,sBAAY;AAA4B,YAAG,iBAAe;AAAS,iBAAO;AAAA;AAAM,4BAAkB;AAAM,0BAAgB;AAAW,YAAG,gBAAc;AAAa,iBAAO;AAAA;AAAM,0BAAgB;AAAS,2BAAgB,GAAE,WAAS,GAAE,WAAS;AAAG,kCAAsB,UAAS,KAAE,MAAG;AAAS,8BAAkB,KAAK,IAAI,mBAAkB,gBAAc;AAAW,wBAAY,KAAK,IAAI,aAAY,QAAQ,KAAK,IAAI,aAAY,eAAc,oBAAmB;AAAgB,4BAAgB,0BAA0B;AAAS,cAAG;AAAa,mBAAO;AAAA;AAAA;AAAM,eAAO;AAAA;AAAM,qBAAa,CAAC,UAAS,GAAE,YAAW,GAAE,YAAW,GAAE,SAAQ,GAAE,YAAW,GAAE,wBAAuB,GAAE,mBAAkB,GAAE,uBAAsB,GAAE,wBAAuB,GAAE,uBAAsB,GAAE,YAAW,GAAE,2BAA0B,MAAK,iBAAgB,MAAK,iBAAgB,MAAK,gCAA+B,OAAM,yBAAwB;AAAW,qBAAU,SAAS,cAAc,SAAO,GAAE,KAAG,GAAE,EAAE;AAAG,mBAAS,eAAe;AAAA;AAAG,iBAAS,gBAAc;AAAG,iBAAS,gBAAc;AAAA,SAAI,8BAA6B;AAAW,YAAG,CAAC,SAAS;AAAgC,qBAAW,KAAK,SAAS;AAAyB,mBAAS,iCAA+B;AAAA;AAAA,SAAO,eAAc,IAAG,WAAU;AAA6C;AAA2C,cAAG,KAAK,UAAQ,KAAK;AAAO,mBAAO;AAAM,yBAAa;AAAM,gBAAG,KAAK,OAAI,KAAK;AAAG,qBAAO;AAAA;AAAM,iBAAO;AAAA;AAAK,sBAAa,SAAS;AAAe,qBAAS,SAAS,cAAc;AAAG,cAAG,KAAK,kBAAgB,kBAAgB,uBAAuB,KAAK,UAAS;AAAW;AAAA;AAAA;AAAQ,iBAAS,cAAc,KAAK,CAAC,gBAA8B,YAAsB;AAAoB,iBAAS,cAAc,KAAK;AAAc,iBAAO,EAAE,aAAW,EAAE;AAAA;AAAA,SAAc,qBAAoB;AAAyB,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,kBAAgB;AAAgB,qBAAS,cAAc,OAAO,GAAE;AAAG,cAAE;AAAA;AAAA;AAAA,SAAK,gCAA+B;AAAW,eAAO,SAAS,kBAAgB,SAAS,oBAAoB;AAAA,SAAqB,kBAAiB;AAAW,YAAG,CAAC,SAAS;AAAkC;AAAA;AAAO,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,qBAAS,SAAS,cAAc;AAAG,mBAAS,cAAc,OAAO,GAAE;AAAG,YAAE;AAAE,eAAK,eAAe,MAAM,MAAK,KAAK;AAAA;AAAA,SAAY,gBAAe,GAAE,qBAAoB,MAAK,eAAc,IAAG,2BAA0B;AAAiC,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,UAAQ,UAAS,EAAC,mBAAiB,mBAAiB,SAAS,cAAc,GAAG;AAAkB,qBAAS,eAAe;AAAA;AAAA;AAAA,SAAQ,gBAAe;AAAY,gBAAM,SAAS,cAAc;AAAG,UAAE,OAAO,oBAAoB,EAAE,iBAAgB,EAAE,mBAAkB,EAAE;AAAY,iBAAS,cAAc,OAAO,GAAE;AAAA,SAAI,yBAAwB;AAAuB,6BAAmB;AAA+B,YAAE,SAAS;AAAe,mBAAS,sBAAoB;AAAa,mBAAS;AAAmB,uBAAa,YAAY;AAAO,mBAAS;AAAmB,YAAE,SAAS;AAAA;AAAgB,YAAG,aAAa;AAAc,uBAAa,oBAAkB;AAAe,uBAAa,OAAO,iBAAiB,aAAa,iBAAgB,gBAAe,aAAa;AAAY,mBAAS,cAAc,KAAK;AAAc,mBAAS;AAAA;AAAoC,uBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,gBAAG,SAAS,cAAc,GAAG,UAAQ,aAAa,UAAQ,SAAS,cAAc,GAAG,mBAAiB,aAAa;AAAiB,uBAAS,eAAe;AAAA;AAAA;AAAA;AAAA,SAAS,gCAA+B;AAAuE,uBAAa;AAAY,sBAAY,WAAW;AAAI,4BAAoB,WAAS,KAAG;AAAY,4BAAoB,UAAQ,KAAG,KAAG;AAAU,4BAAoB,UAAQ,KAAG,KAAG;AAAS,2CAAmC,cAAa,WAAU,kBAAiB,WAAU;AAAS,qBAAa;AAAA,SAAW,iCAAgC;AAAuB,gBAAO;AAAA,eAAmB;AAAE,mBAAO;AAAA,eAAO;AAAE,mBAAO,QAAQ;AAAA;AAA4C,mBAAO;AAAA;AAAA,SAAe,sBAAqB;AAAiB,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,UAAQ;AAAO,iBAAM;AAAU,YAAG,UAAQ;AAAO,iBAAM;AAAU,eAAO,UAAQ,OAAO,WAAS,OAAO,WAAS;AAAA,SAAI,mBAAkB;AAAW,eAAO,SAAS,qBAAmB,SAAS;AAAA;AAA0B;AAAmC,qBAAW,gBAAgB,YAAU;AAAE,sBAAY,QAAQ;AAAQ,qBAAa,UAAS,SAAQ;AAAQ,eAAO;AAAA;AAAQ;AAA0G,uBAAa;AAAY,sBAAY,WAAW;AAAI,8BAAoB;AAAE,YAAG;AAAc,4BAAgB,gBAAgB;AAAA;AAAc,4BAAoB,WAAS,KAAG;AAAgB,4BAAoB,UAAQ,KAAG,KAAG;AAAM,4BAAoB,UAAQ,KAAG,KAAG;AAAO,2CAAmC,cAAa,WAAU,GAAE,iBAAgB;AAAS,qBAAa;AAAA;AAAU;AAAuG,uBAAa,eAAa,aAAa,gBAAc;AAAG,iEAAyD,cAAa,cAAa,OAAM;AAAA;AAAQ;AAA2C,eAAO,UAAQ,IAAE,aAAa,WAAS;AAAA;AAAQ,+BAAuB,CAAC,GAAE,OAAO,aAAW,cAAY,WAAS,GAAE,OAAO,WAAS,cAAY,SAAO;AAAG;AAAmC,iBAAO,yBAAyB;AAAQ,yBAAe,mBAAmB,WAAU,QAAO,aAAW,cAAY,SAAS,cAAc,UAAQ;AAAW,eAAO;AAAA;AAAW;AAAyC,eAAO,kBAAkB;AAAA;AAAQ;AAAiF,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,OAAO;AAAiB,8BAAoB,OAAO,mBAAiB,KAAG;AAAM,8BAAoB,OAAO,kBAAgB,KAAG,KAAG;AAAA;AAAO,YAAG,OAAO,mBAAiB,CAAC,OAAO;AAA6B,cAAG,OAAO;AAAgB,qBAAO,OAAO;AAAgB,mCAAuB;AAAM,cAAG,OAAO,eAAa,OAAO,YAAY;AAAO,+BAAiB,OAAO,YAAY,MAAM,aAAa;AAAM,iCAAmB,aAAa,OAAK,KAAG,aAAa,OAAK,KAAG,aAAa,OAAK,OAAO,SAAO,aAAa,OAAK,OAAO;AAAA;AAAO,iBAAO,QAAM;AAAM,iBAAO,SAAO;AAAO,cAAG;AAAoB,mBAAO,YAAY,MAAM,SAAS,GAAE,GAAE,OAAM;AAAA;AAAA,mBAAiB,OAAO;AAAiB,6BAAiB,oBAAoB,OAAO,kBAAgB,KAAG;AAAG,gEAAsD,cAAa,QAAO,OAAM;AAAQ,iBAAO;AAAA;AAAO,iBAAM;AAAA;AAAG,eAAO;AAAA;AAAE;AAA8E,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,QAAO,OAAM;AAAQ,eAAO,mDAAmD,QAAO,OAAM;AAAA;AAAQ;AAAkE,qBAAW,wBAAwB;AAAQ,YAAG;AAAQ,iBAAO,mDAAmD,QAAO,OAAM;AAAA;AAAa,iBAAO,gDAAgD,QAAO,OAAM;AAAA;AAAA;AAAS;AAA0D,oBAAU,YAAU;AAAA;AAAE;AAAoD,mBAAS,WAAS;AAAE,eAAK,OAAK;AAAA;AAAE;AAAoD,kBAAQ,IAAI,aAAa;AAA0B,YAAG;AAAK,cAAI,yBAAuB;AAAwB,gBAAI,4BAA4B,OAAM;AAAA;AAAU,cAAI,yBAAuB;AAAqC,gBAAI,4BAA4B,MAAK,OAAM,OAAM;AAAA;AAAY,cAAI,2BAAyB;AAA4C,gBAAI,8BAA8B,MAAK,OAAM,MAAK,SAAQ;AAAA;AAAY,iBAAO;AAAA;AAAA;AAAG;AAAqD,kBAAQ,IAAI,aAAa;AAA2B,YAAG;AAAK,cAAI,uBAAqB;AAAW,mBAAO,IAAI;AAAA;AAA2B,cAAI,uBAAqB;AAAc,gBAAI,wBAAwB;AAAA;AAAM,cAAI,qBAAmB;AAAc,gBAAI,sBAAsB;AAAA;AAAM,cAAI,mBAAiB;AAAc,mBAAO,IAAI,oBAAoB;AAAA;AAAM,iBAAO;AAAA;AAAA;AAAG;AAAgD,kBAAQ,IAAI,aAAa;AAAsB,YAAG;AAAK,cAAI,iBAAe;AAAiB,gBAAI,oBAAoB,GAAE;AAAA;AAAO,iBAAO;AAAA;AAAA;AAAG,eAAO,CAAC,SAAQ,GAAE,WAAU,GAAE,SAAQ,IAAG,eAAc,IAAG,UAAS,IAAG,cAAa,IAAG,eAAc,IAAG,UAAS,IAAG,UAAS,IAAG,SAAQ,IAAG,MAAK,IAAG,UAAS,IAAG,gBAAe,MAAK,mBAAkB,IAAG,iBAAgB,IAAG,cAAa,IAAG,aAAY,IAAG,iBAAgB,GAAE,MAAK;AAAW,kCAAwB,IAAI,aAAa,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,yBAAyB,KAAG,oBAAoB,SAAS,GAAE,IAAE;AAAA;AAAG,gCAAsB,IAAI,WAAW,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,uBAAuB,KAAG,kBAAkB,SAAS,GAAE,IAAE;AAAA;AAAA,SAAK,aAAY;AAAgC,YAAG,CAAC,GAAG;AAAW,aAAG,YAAU;AAAA;AAAA,SAAY,UAAS;AAAgB,kBAAQ,GAAG;AAAU,qBAAU,MAAM,QAAO,IAAE,KAAI;AAAK,gBAAM,KAAG;AAAA;AAAK,eAAO;AAAA,SAAK,uBAAsB,KAAI,0BAAyB,CAAC,IAAG,wBAAuB,CAAC,IAAG,WAAU;AAAqC,qBAAW;AAAG,qBAAU,GAAE,IAAE,OAAM,EAAE;AAAG,oBAAQ,SAAO,oBAAoB,SAAO,IAAE,KAAG,KAAG;AAAG,oBAAQ,aAAa,oBAAoB,SAAO,IAAE,KAAG,IAAG,MAAI,IAAE,SAAU;AAAA;AAAK,eAAO;AAAA,SAAQ,eAAc;AAAwC,kBAAQ,OAAO,WAAW,SAAQ;AAAwB,YAAG,CAAC;AAAI,iBAAO;AAAE,qBAAW,GAAG,gBAAgB,KAAI;AAAwB,eAAO;AAAA,SAAQ,iBAAgB;AAAqC,qBAAW,QAAQ;AAAG,4BAAoB,SAAO,KAAG,KAAG;AAAgB,sBAAY,CAAC,QAAc,YAAW,wBAAuB,SAAQ,uBAAuB,cAAa,OAAM;AAAK,YAAG,IAAI;AAAO,cAAI,OAAO,cAAY;AAAQ,WAAG,SAAS,UAAQ;AAAQ,YAAG,OAAO,uBAAuB,8BAA4B,eAAa,uBAAuB;AAA2B,aAAG,eAAe;AAAA;AAAS,eAAO;AAAA,SAAQ,oBAAmB;AAAwB,WAAG,iBAAe,GAAG,SAAS;AAAe,eAAO,MAAI,QAAM,GAAG,kBAAgB,GAAG,eAAe;AAAM,eAAM,CAAE,kBAAe,CAAC;AAAA,SAAQ,YAAW;AAAwB,eAAO,GAAG,SAAS;AAAA,SAAgB,eAAc;AAAwB,YAAG,GAAG,mBAAiB,GAAG,SAAS;AAAe,aAAG,iBAAe;AAAK,YAAG,OAAO,aAAW;AAAS,mBAAS,0BAA0B,GAAG,SAAS,eAAe,MAAM;AAAQ,YAAG,GAAG,SAAS,kBAAgB,GAAG,SAAS,eAAe,MAAM;AAAO,aAAG,SAAS,eAAe,MAAM,OAAO,cAAY;AAAU,cAAM,GAAG,SAAS,eAAe;AAAQ,WAAG,SAAS,iBAAe;AAAA,SAAM,gBAAe;AAAkB,YAAG,CAAC;AAAQ,oBAAQ,GAAG;AAAe,YAAG,QAAQ;AAAmB;AAAO,gBAAQ,qBAAmB;AAAK,qBAAU,QAAQ;AAAM,8CAAsC;AAAO,+CAAuC;AAAO,0CAAkC;AAAO,eAAM,wBAAsB,OAAM,aAAa;AAA4B,6CAAmC,CAAC,qBAAoB,0BAAyB,4BAA2B,2BAA0B,iCAAgC,uBAAsB,0BAAyB,kCAAiC,kBAAiB,sBAAqB,0BAAyB,4BAA2B,iCAAgC,oBAAmB,0BAAyB,sBAAqB,kCAAiC,+BAA8B,4BAA2B,YAAW,iCAAgC,4BAA2B,gCAA+B,iCAAgC,0BAAyB,sCAAqC,mCAAkC;AAAyC,mBAAS,OAAM,4BAA0B;AAAG,aAAK,QAAQ;AAAc,cAAG,+BAA+B,QAAQ,QAAM;AAAI,mBAAM,aAAa;AAAA;AAAA;AAAA,SAAS,sBAAqB;AAAkB,gBAAM,GAAG,SAAS;AAAS,qBAAW,GAAG,aAAa,WAAS,CAAC,UAAS,IAAG,kBAAiB,GAAE,oBAAmB,IAAG,2BAA0B;AAAI,qBAAW,OAAO;AAAS,0BAAgB,MAAM,oBAAoB,GAAE;AAAO,qBAAU,GAAE,IAAE,aAAY,EAAE;AAAG,kBAAM,MAAM,iBAAiB,GAAE;AAAG,qBAAS,EAAE;AAAK,iBAAO,mBAAiB,KAAK,IAAI,OAAO,kBAAiB,KAAK,SAAO;AAAG,cAAG,KAAK,MAAM,OAAK;AAAK,mBAAK,KAAK,MAAM,GAAE,KAAK,YAAY;AAAA;AAAM,oBAAQ,MAAM,mBAAmB,GAAE;AAAM,cAAG;AAAK,qBAAO,GAAG,SAAS,GAAG;AAAU,mBAAO,QAAM,CAAC,EAAE,MAAK;AAAI,eAAG,SAAS,MAAI;AAAI,yBAAU,GAAE,IAAE,EAAE,MAAK,EAAE;AAAG,sBAAM,OAAK,MAAI,IAAE;AAAI,oBAAI,MAAM,mBAAmB,GAAE;AAAG,mBAAG,GAAG,SAAS,GAAG;AAAU,iBAAG,SAAS,MAAI;AAAA;AAAA;AAAA;AAAA;AAAS,iDAAyC,CAAC,WAAU,aAAY;AAAoB;AAAgE,gCAAsB;AAAG,gBAAM,cAAY;AAAE,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,aAAW,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,eAAa,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,wBAAsB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,8BAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,qBAAmB,qCAAqC;AAAiB,0BAAkB,kCAAgC,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,4BAA0B,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,sBAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,+BAA6B,oBAAoB,IAAG,OAAI;AAAI,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAQ,iBAAM;AAAA;AAAG,YAAG,kBAAkB;AAAqB,iBAAM;AAAA;AAAG,4BAAkB,GAAG,cAAc,QAAO;AAAmB,eAAO;AAAA;AAAc;AAAiD,eAAO,oCAAoC,IAAG;AAAA;AAAI,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;AAAA,SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;AAAA,qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;AAAA,qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;AAAA;AAAA;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;AAAA;AAAA;AAAO,eAAO;AAAA,SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;AAAA,YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;AAAA;AAAI,YAAG,QAAM;AAAe,kBAAM;AAAA;AAAI,eAAO,cAAW,MAAI,MAAI;AAAA,SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;AAAA;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;AAAA;AAAG,eAAO,OAAK;AAAA,SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;AAAA,SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;AAAA,SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;AAAA,SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;AAAA;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAI,UAAC,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;AAAA;AAAO,kBAAO,KAAK;AAAA;AAAA,SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,oBAAoB,SAAS,UAAQ,KAAG;AAAG,eAAO;AAAA,SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;AAAA,SAAK,OAAM;AAAmB,eAAO;AAAA;AAAM;AAAuB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAI,eAAO;AAAA;AAAE;AAA8D,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,YAAW,aAAY,QAAO;AAAA;AAAW;AAAuC,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,KAAI,QAAO;AAAM,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,oBAAoB,MAAI,IAAE,KAAG;AAAG,oBAAQ,oBAAoB,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,mBAAmB,MAAI;AAAA;AAAI,iBAAK;AAAA;AAAI,4BAAoB,QAAM,KAAG;AAAI,eAAO;AAAA;AAAE;AAAuC,sBAAY,QAAQ,aAAa;AAAM,YAAG;AAAQ;AAAA;AAAU;AAA4C,YAAG,QAAQ,iBAAe;AAAM,kBAAQ,eAAa;AAAA;AAAG,gBAAQ,aAAa,KAAK;AAAW,qBAAW,SAAQ;AAAA;AAAA;AAAO;AAAsC,YAAG;AAAuB,gBAAK;AAAwF,qBAAW,QAAQ;AAAe,YAAG,OAAO,YAAU;AAAU,gBAAK;AAAkB,YAAG,CAAC,aAAa;AAAY,gBAAK;AAAkC,gBAAQ,eAAe,KAAK;AAAQ,wBAAc,QAAQ,MAAI;AAAG,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAG,8BAAoB,YAAU,IAAE,KAAG,KAAG;AAAA;AAAE,wBAAc,aAAa,YAAU,aAAa;AAAU,sBAAY,QAAQ,SAAS,aAAa,eAAa,CAAC,QAAc,WAAU,aAAa,WAAU,WAAU,aAAa,WAAU,mBAAkB,aAAa,mBAAkB,QAAO,aAAa,aAAY,kBAAiB,aAAa;AAAa,kBAAQ,QAAQ,oBAAkB;AAAE,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,QAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAI,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,KAAG,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAa,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAW,0BAAgB;AAA8B,4BAAkB,cAAY;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAe,eAAO,UAAQ;AAAQ,kBAAQ,CAAC,KAAM,OAAM,eAAgB,aAAa,cAAa,KAAM,aAAa,KAAI,kBAAmB,aAAa,aAAY,cAAe,aAAa,aAAY,gBAAiB,aAAa,oBAAmB,WAAY,aAAa,WAAU,WAAY,aAAa;AAAW,eAAO,aAAW;AAAW,cAAI,OAAK,YAAY;AAAM,iBAAO,YAAY,KAAI,aAAa;AAAA;AAAe,YAAG,OAAO;AAAQ,iBAAO;AAAa,iBAAO,OAAO;AAAA;AAAA;AAAY;AAA0D,YAAG,CAAC,UAAQ,CAAC;AAAW,iBAAO,YAAY;AAAO,YAAG,CAAC;AAAQ,cAAI;AAA4D,iBAAO,YAAY;AAAA;AAAM,oBAAS,oBAAoB,SAAO,MAAI;AAAG,YAAG,UAAO;AAAQ,cAAI,+CAA6C,SAAO;AAAwE,iBAAO,YAAY;AAAA;AAAM,0BAAgB,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,wBAAc,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,YAAG;AAAO,8BAAoB,UAAQ,KAAG;AAAY,YAAG;AAAW,8BAAoB,cAAY,KAAG;AAAU,eAAO;AAAA;AAAE;AAAyB,eAAO,gBAAc;AAAA;AAAE,aAAO,mBAAiB;AAAc;AAA6D,YAAG,OAAO,sBAAoB;AAAa,cAAI;AAAuF,iBAAO;AAAA;AAAE,YAAG,CAAC;AAAa,cAAI;AAAqD,iBAAO;AAAA;AAAG,2BAAiB;AAAG,oBAAU;AAAE,YAAG,0BAAyB,cAAa,WAAS,KAAG;AAAQ,iBAAO,sCAAsC,WAAU,aAAY,MAAK,eAAc;AAAA;AAAK,YAAG;AAAM,iBAAO;AAAM,wBAAc;AAAE,wBAAc;AAAE,uBAAa;AAAE,0BAAgB;AAAE,wBAAc;AAAE,YAAG;AAAM,sBAAU,oBAAoB,QAAM;AAAG,uBAAW;AAAM,sBAAU,oBAAoB,OAAK,KAAG;AAAG,qBAAS,oBAAoB,OAAK,MAAI,OAAK;AAAE,6BAAiB,oBAAoB,OAAK,MAAI,OAAK;AAAE,cAAG;AAAc,kCAAoB,oBAAoB,OAAK,MAAI;AAAG,gCAAkB,oBAAoB,OAAK,MAAI;AAAG,kCAAoB,QAAQ,sCAAoC,QAAQ,sCAAoC;AAAgB,mCAAuB,iBAAgB,OAAK,IAAG,OAAK;AAAI,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;AAAG,gCAAoB,OAAK,MAAI,KAAG;AAAgB,gCAAoB,OAAK,MAAI,KAAG;AAAA;AAAmB,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;AAAA;AAAA;AAAS,sBAAU;AAAA;AAAQ,gCAAsB,aAAW;AAAE,YAAG;AAAmB,sBAAU,UAAU,IAAG;AAAA;AAAgB,uBAAW;AAAU,kBAAO,YAAU;AAAA;AAAG,gCAAqB,QAAQ;AAAK,qBAAU,GAAE,IAAE,OAAK,GAAE,EAAE;AAAE,8BAAqB,sBAAkB,KAAG,KAAG;AAAE,4BAAoB,eAAa,KAAG;AAAiB,4BAAoB,oBAAiB,MAAI,KAAG;AAAiB,sBAAY,oBAAiB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,2BAAiB,CAAC,WAAoB,WAAoB,mBAAoC,aAAwB,WAAoB,UAAkB,cAAa,eAAc,aAAY,mBAAiB,oBAAmB,iBAAgB,KAAQ;AAA2B,YAAG;AAAwB,uBAAa,MAAI;AAAc,sBAAY,cAAa;AAAA;AAAmB,yBAAe;AAAA;AAAc,eAAO;AAAA;AAAE;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;AAAA;AAAK;AAAwB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAM,gBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,8BAAgB;AAAW,mBAAO,cAAY;AAAA,eAAW;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAO;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAY;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAG,mBAAM;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAO;AAAA,eAAS;AAAA,eAAO;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAO;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAU;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAgB;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAW;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAU;AAAE,mBAAO;AAAA,eAAa;AAAE,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAS;AAAG,mBAAO;AAAA,eAAS;AAAG,mBAAO;AAAA,eAAS;AAAG,mBAAO;AAAA,eAAS;AAAE,mBAAO;AAAA,eAAS;AAAI,mBAAO;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAO;AAAG,mBAAO;AAAA,eAAO;AAAI,gBAAG,OAAO,cAAY;AAAS,qBAAO,UAAU,0BAAwB;AAAE,mBAAO;AAAA;AAAA;AAAG,iBAAS;AAAI,eAAM;AAAA;AAAG,UAAG,CAAC;AAAuB,gBAAQ;AAAA;AAA2B,gBAAQ;AAAa;AAAU,SAAG;AAAO,iCAAyB,CAAC,MAAK,SAAQ,iDAAgD,WAAU,UAAS,WAAU;AAAU,0BAAkB,CAAC,GAAI,gBAAe,GAAI,cAAa,GAAI,kCAAiC,GAAI,QAAO,GAAI,mDAAkD,GAAI,wBAAuB,GAAI,wBAAuB,GAAI,qBAAoB,GAAI,oCAAmC,GAAI,oCAAmC,GAAI,wBAAuB,GAAI,+BAA8B,GAAI,uCAAsC,GAAI,yBAAwB,GAAI,qCAAoC,GAAI,uCAAsC,GAAI,6BAA4B,GAAI,kCAAiC,GAAI,WAAU,GAAI,UAAS,GAAI,WAAU,GAAI,gBAAe,QAAS,cAAY,OAAO,eAAc,GAAI,sBAAqB,GAAI,uBAAsB,GAAI,iBAAgB,GAAI,eAAc,GAAI,SAAQ,GAAI,UAAS,OAAQ;AAAW,gBAAQ;AAAa,aAAO,SAAO;AAAI,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,wCAAgC,OAAO,iCAA+B;AAAW,eAAO,+BAA4B,OAAO,iCAA+B,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,yDAAiD,OAAO,kDAAgD;AAAW,eAAO,gDAA6C,OAAO,kDAAgD,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,4DAAoD,OAAO,qDAAmD;AAAW,eAAO,mDAAgD,OAAO,qDAAmD,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,wDAAgD,OAAO,iDAA+C;AAAW,eAAO,+CAA4C,OAAO,iDAA+C,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iDAAyC,OAAO,0CAAwC;AAAW,eAAO,wCAAqC,OAAO,0CAAwC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,gDAAwC,OAAO,yCAAuC;AAAW,eAAO,uCAAoC,OAAO,yCAAuC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,gEAAwD,OAAO,yDAAuD;AAAW,eAAO,uDAAoD,OAAO,yDAAuD,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sDAA8C,OAAO,+CAA6C;AAAW,eAAO,6CAA0C,OAAO,+CAA6C,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM,aAAO,aAAW;AAAQ,aAAO,aAAW;AAAQ,aAAO,mBAAiB;AAAc,aAAO,gBAAc;AAAW,aAAO,gBAAc;AAAW;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;AAAA;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;AAAA;AAAA;AAAS,eAAO;AAAA;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,SAAO;AAAI,aAAK,SAAO;AAAA;AAAO,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;AAAA;AAAW;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;AAAA;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B;AAAA;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;AAAA,eAAK;AAAG;AAAA,aAAS;AAAA;AAAQ;AAAA;AAAA;AAAS,aAAO,SAAO;AAAI,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;AAAA;AAAA;AAAS,UAAG,CAAC;AAAuB,wBAAc;AAAK,UAAG,CAAC;AAAuB;AAGx1tE,aAAO;AAAA;AAAA;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;AAAA,WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;AAAA;AAAA,WACxB,OAAO,YAAY;AAC1B,YAAQ,mCAAmC;AAAA;;;ACpBjD,IAAA;AACA,0BAAyB;AACvB,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,2BAAoB,sBAAqB;AAE3C,mBAAW,OAAO,uBAAoB,cAAY,qBAAkB;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;AAAA;AAAA;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;AAAA;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;AAAA;AAAiB,eAAO,kBAAgB;AAAA;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;AAAA;AAAS,4BAAgB,YAAU;AAAA;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;AAAA;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;AAAA;AAAK,kBAAO,IAAI;AAAQ,iBAAO;AAAA;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;AAAA;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;AAAA;AAAA;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;AAAA;AAAS,eAAO,aAAW;AAAW,iBAAM;AAAA;AAAA,iBAAsC;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;AAAA;AAAA;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;AAAA;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;AAAA;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;AAAA,mBAAmB,OAAO,aAAW;AAAa,uBAAW;AAAA;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;AAAA;AAAA;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;AAAA;AAAA,iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;AAAA,mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;AAAA;AAAI,YAAG;AAAY,4BAAgB;AAAA;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;AAAA;AAAQ,4BAAgB;AAAA;AAAG;AAAC,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;AAAA;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;AAAA;AAAA;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;AAAA;AAAO;AAAA;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;AAAA;AAAA;AAAO,yBAAe;AAAgB,mBAAS,QAAM;AAAA;AAAA;AAAA;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;AAAA;AAAA;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;AAAA;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;AAAA;AAAA;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;AAAA;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,QAAM,QAAM,QAAM,UAAW,QAAM;AAAG,sBAAS,KAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,KAAI,MAAI;AAAA;AAAK,iBAAO;AAAA,WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;AAAA;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;AAAA;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;AAAA;AAAS,oBAAM,KAAG,KAAK;AAAA;AAAA;AAAA;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;AAAA;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;AAAA;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;AAAA;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;AAAA;AAAA;AAAO,wBAAgB,OAAO,gBAAc,cAAY,IAAI,YAAY,UAAQ;AAAU;AAAoD,qBAAW,MAAI;AAAe,qBAAW;AAAI,eAAM,KAAK,WAAS,CAAE,WAAQ;AAAQ,YAAE;AAAO,YAAG,SAAO,MAAI,MAAI,KAAK,YAAU;AAAa,iBAAO,YAAY,OAAO,KAAK,SAAS,KAAI;AAAA;AAAc,oBAAQ;AAAG,iBAAM,MAAI;AAAQ,qBAAO,KAAK;AAAO,gBAAG,CAAE,MAAG;AAAM,qBAAK,OAAO,aAAa;AAAI;AAAA;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,qBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;AAAA;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,mBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;AAAA;AAAQ,mBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;AAAA;AAAG,gBAAG,KAAG;AAAO,qBAAK,OAAO,aAAa;AAAA;AAAS,uBAAO,KAAG;AAAM,qBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;AAAA;AAAA;AAAA;AAAQ,eAAO;AAAA;AAAI;AAA0C,eAAO,MAAI,kBAAkB,QAAO,KAAI,kBAAgB;AAAA;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,IAAI,QAAO,EAAE;AAAG,kBAAM,IAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,IAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;AAAA;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;AAAA,qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;AAAA,qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAA;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;AAAA;AAAS;AAAkD,eAAO,kBAAkB,KAAI,QAAO,QAAO;AAAA;AAAiB;AAA0C,cAAM,IAAI,OAAM;AAAA;AAAQ;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAA;AAAK,mCAA2B,OAAO,qBAAmB;AAAS;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;AAAA;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;AAAA;AAAW,qBAAO,cAAc,MAAK,SAAS;AAAA;AAAA;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;AAAA;AAAA;AAAA;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,0BAAkB;AAAM;AAAkB,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;AAAA;AAAY;AAAmB,6BAAqB;AAAA;AAAY;AAAuB,wBAAc;AAAA;AAAK;AAAmB,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAe;AAAyB,qBAAa,QAAQ;AAAA;AAAI;AAA0B,sBAAc,QAAQ;AAAA;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAA;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;AAAA;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;AAAA;AAAA;AAAA;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;AAAA;AAAM,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;AAAA;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,IAAI,WAAW,UAAQ,IAAI,QAAQ,YAAU;AAAA;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,2BAAmB;AAAyB,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;AAAA;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;AAAA;AAAY,cAAG;AAAY,mBAAO,WAAW;AAAA;AAAqB,kBAAK;AAAA;AAAA;AAA8D,gBAAM;AAAA;AAAA;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;AAAA;AAAI,mBAAO,SAAS;AAAA,aAAmB,MAAM;AAAW,mBAAO;AAAA;AAAA;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;AAAA;AAAA;AAAe;AAAsB,mBAAS,CAAC,KAAM,eAAc,wBAAyB;AAAe;AAA0C,yBAAY,SAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW,SAAQ;AAAU,qCAA2B,WAAW;AAAQ,8BAAoB;AAAA;AAAoB,yBAAiB;AAAoB;AAA2C,0BAAgB,OAAO;AAAA;AAAa;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;AAAA,aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;AAAA;AAAA;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;AAAA;AAAA;AAAA;AAAoC,mBAAO,uBAAuB;AAAA;AAAA;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;AAAA;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;AAAA;AAAA;AAAO;AAAmB,eAAM;AAAA;AAAG,iBAAW;AAAO;AAAuD,mCAA2B,WAAW;AAAA;AAAQ,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;AAAA,SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;AAAA,qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;AAAA,qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;AAAA;AAAA;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;AAAA;AAAA;AAAO,eAAO;AAAA,SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;AAAA,YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;AAAA;AAAI,YAAG,QAAM;AAAe,kBAAM;AAAA;AAAI,eAAO,cAAW,MAAI,MAAI;AAAA,SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;AAAA;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;AAAA;AAAG,eAAO,OAAK;AAAA,SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;AAAA,SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;AAAA,SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;AAAA,SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;AAAA;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAI,UAAC,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;AAAA;AAAO,kBAAO,KAAK;AAAA;AAAA,SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,OAAO,SAAS,UAAQ,KAAG;AAAG,eAAO;AAAA,SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;AAAA,SAAK,OAAM;AAAmB,eAAO;AAAA;AAAM;AAAuB,eAAO;AAAA;AAAE;AAAA;AAA+D;AAAuC,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,OAAO,MAAI,IAAE,KAAG;AAAG,oBAAQ,OAAO,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,OAAO,MAAI;AAAA;AAAI,iBAAK;AAAA;AAAI,eAAO,QAAM,KAAG;AAAI,eAAO;AAAA;AAAE;AAAuB,aAAK;AAAA;AAAQ;AAA0B,cAAM;AAAA;AAAM;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;AAAA;AAAK,0BAAkB,CAAC,iCAAkC,kCAAiC,UAAW,WAAU,SAAU,UAAS,UAAW,WAAU,WAAY,YAAW,QAAS;AAAS,gBAAQ;AAAa,aAAO,SAAO;AAAI,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,oBAAoB,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,0BAA0B,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;AAAA;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,yBAAyB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,eAAe,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,qBAAqB,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,sBAAsB,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,eAAe,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;AAAA;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;AAAA;AAAA;AAAS,eAAO;AAAA;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,SAAO;AAAI,aAAK,SAAO;AAAA;AAAO,uBAAe;AAAM,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;AAAA;AAAW;AAAwB,4BAAkB,OAAO;AAAW;AAAI;AAAgB,oBAAQ;AAAE,eAAK,KAAI;AAAA;AAAe,cAAG,aAAa;AAAY;AAAA,qBAAe,KAAG;AAAU,4BAAc;AAAK;AAAA;AAAY,wBAAU;AAAE,gBAAG,KAAG,OAAO,MAAI,YAAU,EAAE;AAAO,sBAAM,CAAC,GAAE,EAAE;AAAA;AAAO,gBAAI,uBAAqB;AAAO,kBAAM,GAAE;AAAA;AAAA;AAAY,uBAAW;AAAA;AAAA;AAAM;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;AAAA;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B,cAAG;AAAa,qBAAS;AAAM;AAAA;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;AAAA,eAAK;AAAG;AAAA,aAAS;AAAA;AAAQ;AAAA;AAAA;AAAS,aAAO,SAAO;AAAI;AAA+B,YAAG,YAAU,iBAAe,WAAS;AAAG;AAAA;AAAO,YAAG;AAAA;AAAqB,kBAAM;AAAK,uBAAW;AAAO;AAAc,cAAG,OAAO;AAAU,mBAAO,UAAU;AAAA;AAAQ,cAAM,QAAO,IAAI,WAAW;AAAA;AAAS,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;AAAA;AAAA;AAAS,yBAAiB;AAAK,UAAG,OAAO;AAAgB,uBAAa;AAAM,sBAAc;AAAK;AAGp30B,aAAO;AAAA;AAAA;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;AAAA,WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;AAAA;AAAA,WACxB,OAAO,YAAY;AAC1B,YAAQ,uBAAuB;AAAA;;;ACpBrC,IAAA;AAEA,wBAAsB;AAEtB;AACE,iBAAa,CAAE,SAAS,CAAC,YAAY,IAAI,YAAY,IAAI,SAAS,CAAC,GAAG;AACtE,oBAAgB;AAChB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,qBAAe,KAAK,QAAQ;AAC5B,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,yBAAmB,KAAK,QAAQ;AAChC,uBAAiB,GAAG,QAAQ,UAAU;AACpC,wBAAgB,SAAU,SAAQ;AAClC,yBAAiB,GAAG,QAAQ,UAAU;AACpC,0BAAgB,SAAU,SAAQ;AAClC,uBAAa,GAAG,IAAI,YAAY;AAC9B,oBAAQ,KAAK,CAAC,SAAS;AAAA;AAAA;AAAA;AAAA;AAK/B,WAAO;AAAA;AAGT,qBAAmB;AACjB,QAAI,eAAe;AACnB,QAAI,WAAW;AACf,QAAI,SAAS;AAAA;AAGf,oBAAkB,oBAAqB;AAAA,IACrC;AAAA,IACA,YAAY,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;AAAA,IAClD,UAAU,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;AAAA;AAGlD,mBAAiB;AACf,mBAAe,GAAG,IAAI,IAAI,YAAY;AACtC,iBAAa,GAAG,IAAI,IAAI,UAAU;AAClC,2BAAuB,GAAG,SAAS,CAAC,QAAQ,OAAO;AACnD,WAAO,UAAU;AAAA;AAGnB;AACE,sBAAkB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAgB,GAAG,IAAI,WAAW;AAClC,qBAAiB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACnD,+BAA2B,GAAG,IAAI,UAAU;AAC5C,8BAA0B,GAAG,IAAI,SAAS;AAC1C,wBAAoB,GAAG,IAAI,oBAAoB;AAC/C,mBAAe,GAAG,IAAI,mBAAmB;AACzC,iBAAa,GAAG,IAAI,mBAAmB;AACvC,4BAAwB,GAAG,IAAI,QAAQ;AACvC,0BAAsB,GAAG,IAAI,MAAM;AACnC,uBAAmB;AACnB,WAAO,GAAG,SAAS,CAAC,iBAAiB,gBAAgB;AAAA;AAGvD;AACE,WAAO,GAAG,KAAK;AACb,kBAAY,MAAK,SAAS,MAAK,SAAS;AACxC,aAAO,SAAS,KAAK,aAAa,eAAe;AAAA;AAAA;AA9DrD;AAAA,IAmEE;AACE,WAAK,iBAAiB;AACtB,WAAK,QAAQ,QAAO,SAAS;AAC7B,WAAK,SAAS,QAAO,SAAS;AAC9B,WAAK,cAAc,gBAAgB,QAAO,SAAS;AACnD,WAAK,UAAU,GAAG,SAAS,KAAK;AAChC,WAAK,YAAY,GAAG,SAAS,CAAC,KAAK,OAAO,KAAK;AAC/C,WAAK,SAAS;AACd,WAAK,aAAa;AAAA;AAAA,UAGd;AAEJ,UAAK,CAAC,cAAgB,WAAW,sBAAwB,WAAW,MAAM,WAAW,KAAO,WAAW,MAAM,KAAK,KAAO,WAAW,MAAM,KAAK;AAAI,eAAO;AAC1J,+CAAyC,GAAG,KAAK;AAC/C,6BAAqB,WAAW,eAAe,CAAC,KAAK,OAAO,KAAK;AAEjE,gCAAwB,GAAG,IAAI,aAAa,IAAI,QAAQ;AACxD,kCAA0B,KAAK,eAAe,QAAQ;AACtD;AAEA,YAAI,MAAM,QAAQ;AAChB,yBAAe,kBAAkB,KAAK,UAAU,EAAE,OAAO,EAAE;AAC3D,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,0BAAe,GAAG,OAAO,CAAC,WAAW,YAAY;AACjD,uBAAa,QAAO,QAAQ;AAAA;AAE5B,uBAAa,kBAAkB;AAAA;AAEjC,8BAAsB,aAAa,YAAY,KAAK,SAAS,KAAK;AAClE,uBAAe,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACjD,0BAAkB,GAAG,QAAQ,QAAQ;AACrC,eAAO,CAAC,YAAY,eAAe;AAAA;AAErC,+BAAyB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,KAAK,OAAO,SAAS,UAAU,KAAK,OAAO,SAAS,cAAc,KAAK,OAAO,SAAS;AACrK,yBAAmB,iBAAiB;AACpC,uBAAiB;AACjB,+BAAyB,WAAW,IAAI,cAAc,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACzF,4BAAsB,iBAAiB,IAAI;AACzC,qBAAa,YAAY;AACzB,oBAAY;AACZ,eAAO;AAAA;AAGT,wBAAkB,OAAO;AACzB,6BAAuB;AACvB,sBAAgB;AACd,yBAAiB,WAAW;AAC5B,2BAAmB,UAAU;AAC7B,YAAI,aAAa,KAAK,OAAO,SAAS;AACpC,sBAAY,UAAU,cAAc;AACpC,yBAAe,KAAK,YAAY;AAChC,4BAAkB,GAAG,KAAK,MAAM,GAAG,MAAM,iBAAiB,CAAC,UAAU,gBAAgB,IAAI,CAAC,GAAG,KAAK,UAAU,QAAQ,CAAC,eAAe;AACpI,yBAAe,KAAK,CAAE,KAAK,WAAW,QAAQ;AAAA;AAAA;AAGlD,sBAAgB;AAChB,YAAM;AACN,aAAO;AACP,sBAAgB;AAChB,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa,CAAC,WAAW,MAAM,KAAK,KAAK,OAAO,WAAW,MAAM,KAAK,KAAK;AAAA;AAAA;AAAA,UAIzE;AACJ,aAAQ,OAAO,eAAgB,MAAM,KAAK,iBAAiB;AAC3D,oBAAc;AACd,0BAAmB;AACjB,6BAAqB,MAAK,UAAU;AACpC,0BAAkB,uBAAuB,OAAM;AAC/C,wBAAgB,SAAS;AACzB,gCAAwB,MAAK,YAAY;AACzC,uBAAe,MAAK;AACpB,6CAAqC;AACrC,gCAAwB,aACrB,IAAI,cAAe;AAAA,UACjB,UAAS,KAAK,OAAO,MAAM;AAAA,UAC3B,UAAS,KAAK,OAAO,MAAM;AAAA;AAEhC,+BAAuB;AAAA,UACrB,SAAS,QAAQ,MAAM,GAAG;AAAA,UAC1B,aAAa,QAAQ,MAAM;AAAA,UAC3B,WAAW;AAAA,UACX,aAAa;AAAA;AAEf,mBAAW,MAAK;AAChB,cAAK,UAAU;AACf,cAAK,YAAY;AACjB,kBAAU;AACV,cAAM,KAAK;AAAA;AAEb,aAAO;AAAA;AAAA;AAIX;AACE,sBAAkB,MAAM,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAClH,kBAAc,IAAI,eAAe,WAAW;AAE5C,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;AAAA;AAGT,UAAQ,OAAO;AACf,UAAQ,iBAAiB;AACzB,UAAQ,aAAa;AAAA;;;AC/KrB,IAAA;AAAA,UAAQ,mBAAmB;AAAA,IACzB,YAAY;AAAA,MACV;AAAA,MAAI;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MACtD;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MACvD;AAAA,MAAK;AAAA,MAAI;AAAA,MAAK;AAAA,MAAI;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAI;AAAA,MAAI;AAAA,MAAK;AAAA,MAAI;AAAA;AAAA,IAEpD,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK;AAAA,IAC7D,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;AAAA,IAC3D,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9D,gBAAgB,CAAC,IAAI,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9D,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC/C,gBAAgB,CAAC,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACtD,gBAAgB,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC1C,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK;AAAA,IACpD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC/C,gBAAgB,CAAC,KAAK,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACzD,mBAAmB,CAAC,KAAK,IAAI,IAAI,KAAK,IAAI,KAAK,IAAI;AAAA,IACnD,mBAAmB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI;AAAA,IACzC,cAAc,CAAC,KAAK,KAAK,KAAK,KAAK;AAAA,IACnC,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACtD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC5C,aAAa,CAAC,KAAK,KAAK,KAAK,KAAK;AAAA,IAClC,mBAAmB,CAAC;AAAA,IACpB,SAAS,CAAC;AAAA,IACV,YAAY,CAAC;AAAA,IACb,iBAAiB,CAAC;AAAA,IAClB,gBAAgB,CAAC;AAAA,IACjB,YAAY,CAAC;AAAA,IACb,WAAW,CAAC;AAAA;AAEd,UAAQ,2BAA2B;AAAA,IACjC,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IACrD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG;AAAA,IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC9D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC7D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA;AAAA;;;AC/CvD,IAAA;AAEA;AACE,uBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,qBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,WAAO,CAAE,YAAY;AAAA;AAEvB,UAAQ,sBAAsB;AAE9B;AACE,WAAO;AAAA,MACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA,MAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA;AAAA;AAG9C,UAAQ,aAAa;AAErB;AACE,WAAO;AAAA,MACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA,MAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA;AAAA;AAGhE,UAAQ,eAAe;AAEvB;AACE,cAAU,OAAM,MAAM;AACtB,cAAU,OAAM,MAAM;AACtB,kBAAc,CAAC;AAAA,MACb,IAAI,WAAW,KAAK;AAAA,MAAG,IAAI,WAAW,KAAK;AAAA,MAAG,IAAI,SAAS,KAAK;AAAA,MAChE,IAAI,SAAS,KAAK;AAAA;AAEpB,WAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;AAAA;AAEnD,UAAQ,2BAA2B;AAEnC,qCAAkC;AAChC,mBAAe,cAAa;AAC5B,iBAAa,YAAW;AACxB,wBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,uBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,qBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;AAAA;AAEhD,UAAQ,aAAa;AAErB;AACE,oBAAgB,cAAa;AAC7B,iBAAa,YAAW;AACxB,oBAAgB,KAAK,IAAI,GAAG;AAC5B,qBAAiB,UAAU;AAC3B,uBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,qBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;AAAA;AAEhD,UAAQ,cAAc;AAAA;;;ACvDtB,IAAA;AAAA,UAAQ,kBAAkB,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AAKxD;AACE,WAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;AAAA;AAExE,UAAQ,mBAAmB;AAM3B;AACE,oBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,WAAO,kBAAiB;AAAA;AAE1B,UAAQ,kBAAkB;AAC1B;AACE,WAAO,MAAM,MAAM,KAAK;AAAA;AAE1B,UAAQ,eAAe;AACvB;AACE,WAAO,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AAAA;AAEvC;AACE,kBAAc;AACd,iBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAW,GAAG,KAAK,GAAG;AAAA;AAExB,WAAO;AAAA;AAET,UAAQ,MAAM;AACd;AACE,mBAAe;AACf,iBAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,aAAO,KAAK,IAAI,GAAG;AAAA;AAErB,WAAO;AAAA;AAET,UAAQ,qBAAqB;AAC7B;AACE,oBAAgB;AAChB,iBAAa,KAAK;AAClB,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK;AACb,qBAAe,GAAG,MAAM,MAAM;AAC5B,gBAAQ,KAAK,KAAK,KAAI,KAAK,MAAM,oBAAmB,MAAM;AAAA;AAAA;AAG9D,WAAO;AAAA;AAET;AACE,iBAAa,KAAK,IAAI;AACtB,iBAAa,KAAK,IAAI;AACtB,2BAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,8BAA0B,wBAAuB,OAAO,IAAI,OAAO;AACnE,qCAAiC,2BAA0B,mBAAmB;AAC9E,sCAAkC,wBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,WAAO,2BAA0B,0BAA0B;AAAA;AAE7D,UAAQ,sBAAsB;AAC9B;AACE,8BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,iCAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,gCAA4B;AAAA,MAC1B,CAAC,KAAI,kBAAkB,IAAI;AAAA,MAC3B,CAAC,KAAI,kBAAkB,IAAI;AAAA;AAE7B,WAAO;AAAA,MACL,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,MAChD,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,MAChD,CAAC,GAAG,GAAG;AAAA;AAAA;AAGX,UAAQ,wBAAwB;AAChC;AACE,WAAO;AAAA,MACL,KAAI,uBAAuB,eAAe;AAAA,MAC1C,KAAI,uBAAuB,eAAe;AAAA;AAAA;AAG9C,UAAQ,cAAc;AACtB;AACE,WAAO,KAAK,KAAO,GAAE,KAAK,EAAE,OAAO,IAAO,GAAE,KAAK,EAAE,OAAO;AAAA;AAE5D,UAAQ,0BAA0B;AAAA;;;ACvFlC,IAAA;AAEA,mBAA0B;AAC1B,oBAA2B;AAC3B,iBAAsB;AAEtB,0BAAwB;AACxB,2BAAyB;AACzB,kDAAgD,CAAC,kBAAkB,AAAU,2BAAiB,qBAAqB;AACnH,gCAA8B;AAC9B,+BAA6B;AAC7B,uDAAqD,CAAC,uBAAuB;AAC7E,2BAAyB,AAAU,2BAAiB;AACpD,0BAAwB,CAAC,iBAAiB,IAAI,iBAAiB,iBAAiB,SAAS;AACzF,4BAA0B,AAAU,2BAAiB;AACrD,2BAAyB,CAAC,kBAAkB,IAAI,kBAAkB,kBAAkB,SAAS;AAC7F,kCAAgC;AAChC,kCAAgC;AAChC,0BAAwB;AACxB,+BAA6B;AAG7B;AACE,iBAAa,GAAG,IAAI,AAAU,mCAAyB,QAAQ;AAC7D,aAAQ,KAAK,WAAY,AAAU,mCAAyB;AAC5D,8BAAwB,AAAU,2BAAiB,GAAG,SAAS;AAC/D,mCAA6B,QAAQ;AACrC,UAAI,wBAAwB,KAAK,SAAS;AACxC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,wBAAc,QAAQ;AACtB,oBAAU,gBAAgB,MAAM;AAAA,YAC9B,UAAU,OAAO;AAAA,YAAI,UAAU,OAAO;AAAA,YACrC,WAAU,OAAO,KAAK,UAAU,gBAAgB,IAAI,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAhCrE;AAAA,IAwCE;AAEE,WAAK,cAAc;AACnB,WAAK,0BAA0B;AAC/B,WAAK,sBAAsB;AAC3B,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,YAAY,QAAO,KAAK;AAC7B,WAAK,aAAa,QAAO,KAAK;AAC9B,WAAK,WAAW,QAAO,KAAK;AAC5B,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;AAAA;AAAA,IAGvB;AACE,sBAAgB,AAAS,oBAAW,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAChF,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAY;AAAA,QAC7C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;AAAA,QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,aAAa;AAAA,QAAI,MAAM;AAAA;AAE3D,mCAA6B,AAAK,2BAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI,WAAY,CAAC,GAAG,AAAK,mBAAY,OAAO,uBAAuB,MAAM;AAC5G,oCAA8B,AAAK,6BAAsB;AACzD,wBAAkB,CAAC,GAAG,AAAS,sBAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI,YAAa;AACrG,gCAA0B;AAAA,QACxB,AAAK,WAAI,WAAW,sBAAsB;AAAA,QAC1C,AAAK,WAAI,WAAW,sBAAsB;AAAA;AAE5C,aAAO,cAAc,IAAI,WAAY;AAAA,QACnC,MAAM,KAAK,kBAAkB;AAAA,QAC7B,MAAM,KAAK,kBAAkB;AAAA,QAAI,MAAM;AAAA;AAAA;AAAA,IAI3C;AACE,uBAAiB,UAAU,gBAAgB,IAAI;AAC/C,wBAAkB,UAAU,iBAAiB,IAAI;AACjD,aAAO,WAAW;AAAA;AAAA,IAIpB,6EAA4E;AAC1E,kBAAY,AAAS,qBAAY,AAAS,oBAAW,KAAK,8BAA8B,CAAC,UAAU,sBAAsB,UAAU,wBAAwB,KAAK;AAChK,sBAAgB,AAAS,oBAAW;AACpC,iBAAW,GAAG,MAAM,cAAc,OAAM,CAAC;AAAA,QACvC,IAAI,WAAW,KAAK,KAAK;AAAA,QACzB,IAAI,WAAW,KAAK,KAAK;AAAA,QAAW,IAAI,SAAS,KAAK,KAAK;AAAA,QAC3D,IAAI,SAAS,KAAK,KAAK;AAAA,UACrB,CAAC,IAAI,CAAC,KAAK,UAAU,KAAK;AAC9B,UAAI;AACF,eAAO,GAAG,MAAM,cAAc;AAAA;AAEhC,aAAO,CAAE,KAAK,SAAS;AAAA;AAAA,IAIzB,iDAAiD;AAC/C,2BAAqB;AACrB,mBAAa,GAAG,IAAI,sBAAsB;AACxC,kBAAU,QAAQ,IAAI;AACtB,kBAAU,QAAQ,IAAI,IAAI;AAC1B,kBAAU,QAAQ,IAAI,IAAI;AAC1B,qBAAa,KAAK;AAAA,UACf,QACI,IAAK,IAAI,KAAK,WACd,IAAI,KAAK,YAAa,WAAW,KAAK,OAAO,WAAW;AAAA,UAC5D,IAAI,KAAK,WAAY,WAAW,KAAK,OAAO,WAAW;AAAA,UAAI;AAAA;AAAA;AAGhE,aAAO,CAAE,WAAW,cAAc,MAAM,aAAa,MAAM;AAAA;AAAA,IAI7D;AACE,2BAAqB,UAAU,AAAU,2BAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,2BAAqB,UAAU,AAAU,2BAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,uBAAkB,gBAAe,gBAAgB;AAEjD,aAAO,WAAW,IAAI;AACpB,gBAAQ;AACR,YAAI,MAAM;AACR,cAAI;AAAA,mBACK,MAAM;AACf,cAAI;AAAA;AAEN,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;AAAA;AAAA;AAAA,UAI1B;AACJ,WAAK;AACL,wBAAkB;AAElB;AACA,UAAK,KAAK,UAAU,QAAO,SAAS,cAAe,CAAC,QAAO,KAAK;AAC9D,mBAAW,MAAM,KAAK,oBAAoB,iBAAiB;AAE3D,YAAK,MAAM,MAAM,OAAO,OAAS,MAAM,MAAM,OAAO;AAAM,eAAK,UAAU;AAAA;AAI3E,UAAI,YAAY,SAAS,SAAU,SAAS,MAAM,SAAS,KAAO,EAAC,QAAO,KAAK,WAAY,SAAS,MAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,SAAS;AAClL,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB,SAAS;AAC9B,eAAK,YAAY,KAAK,CAAE,YAAY,SAAS,IAAI,WAAW,YAAY,UAAU,SAAS,IAAI,SAAS,YAAY,WAAW,SAAS,WAAW,YAAY,SAAS;AAAA;AAE1K,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;AAAA;AAGjD,UAAI;AACF,YAAI,CAAC,YAAY,CAAC,SAAS,SAAU,SAAS,MAAM,WAAW;AAC7D,eAAK,cAAc;AACnB,eAAK,gBAAgB;AACrB,iBAAO;AAAA;AAET,wBAAgB,KAAK;AACnB,4BAAkB,AAAS,6BAAoB,CAAE,YAAY,KAAK,YAAY,GAAG,YAAY,UAAU,KAAK,YAAY,GAAG,WAAY,SAAS;AAChJ,8BAAoB,AAAS,oBAAW;AACxC,4BAAkB,KAAK,YAAY,GAAG,UAAU;AAChD,6BAAmB,KAAK,YAAY,GAAG;AACvC,eAAK,YAAY,KAAK,IAAK,aAAa,YAAY;AAAA;AAEtD,aAAK,0BAA0B;AAAA;AAEjC,UAAI,YAAY,SAAS;AACvB,iBAAS,MAAM,QAAQ;AACrB,qBAAW,IAAI,WAAW;AAC1B,qBAAW,IAAI,SAAS;AACxB,qBAAW,UAAU;AAAA;AAAA;AAMzB,oBAAc,GAAG,KAAK,MAAM,KAAK,YAAY,IAAI;AAC/C,oBAAY;AAEZ,0CAAkC,IAAI,UAAU,UAAU;AAC1D,8CAAsC;AACtC,YAAI,8BAA8B;AAChC,WAAC,cAAc,mBAAmB;AAAA;AAEpC,gBAAQ,AAAK,uBAAgB,IAAI,UAAU,eAAe,IAAI,UAAU;AACxE,2BAAmB,AAAS,sBAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AACrF,qCAA6B,CAAC,WAAW,KAAK,MAAM,MAAM,IAAI,WAAW,KAAK,MAAM,MAAM;AAC1F,2BAAmB;AACnB,6BAA0B;AAC1B,YAAI,UAAU;AACZ,yBAAe,GAAG,MAAM,iBAAiB,OAAO,OAAO,GAAG;AAC1D,2BAAiB,AAAK,2BAAoB,CAAC,OAAO;AAAA;AAEpD,uBAAe,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAC3D,sBAAa,AAAS,kCAAyB,QAAQ,cAAc,CAAC,KAAK,YAAY,KAAK,YAAY,IAAI;AAG5G,YAAI,CAAC,QAAO,KAAK;AACf,8BAAmB;AAAA,YACjB,QAAQ;AAAA,YACR;AAAA,YACA,gBAAgB;AAAA,YAChB,YAAY,IAAI;AAAA,YAChB,OAAO;AAAA;AAET,iBAAO;AAAA;AAIT,uCAA+B,KAAK,aAAa,QAAQ;AACzD,8BAAsB,WAAW,WAAW;AAC5C,mBAAW;AACX,YAAI,gBAAgB,QAAO,SAAS;AAClC,iBAAO;AACP,iBAAO;AAAA;AAET,+BAAuB,GAAG,QAAQ,QAAQ,CAAC,IAAI;AAC/C,wBAAgB,eAAe;AAC/B,YAAI,QAAO,KAAK;AACd,iBAAQ,iBAAiB,yBAAyB,qBAAsB,KAAK,UAAU,WAAW,OAAM,gBAAgB,IAAI,gBAAgB,IAAI;AAChJ,iBAAQ,kBAAkB,0BAA0B,sBAAuB,KAAK,UAAU,WAAW,OAAM,iBAAiB,IAAI,iBAAiB;AACjJ,iCAAwB,KAAK,UAAU,QAAQ,GAAG,OAAO,CAAC,aAAa;AACvE,qCAA2B,eAAe;AAC1C,yBAAe;AACf,8BAAoB,mBAAmB,MAAM,GAAG,uBAAuB;AACvE,iBAAQ,6BAA6B,2BAA4B,KAAK,aAAa,aAAa,YAAY,gBAAgB;AAC5H,+BAAqB,mBAAmB,MAAM,uBAAuB;AACrE,iBAAQ,8BAA8B,4BAA6B,KAAK,aAAa,cAAc,aAAa;AAChH,gDAAsC,KAAK,iCAAiC;AAC5E,cAAI,KAAK,IAAI,iCAAiC;AAC5C,kCAAsB,WAAW,kBAAkB;AACnD,kCAAsB,WAAW,mBAAmB;AAAA,qBAE3C,gCAAgC;AACzC,kCAAsB,WAAW,kBAAkB,QAAQ,CAAC,aAAa;AAAA;AAEzE,kCAAsB,WAAW,mBAAmB,SAAS,CAAC,aAAa;AAAA;AAE7E,yCAA+B,KAAK,sBAAsB,WAAW,mBAAmB;AACxF,0CAAgC,KAAK,sBAAsB,WAAW,oBAAoB;AAC1F,sBAAY,UAAU,OAAO,wBAAwB,OAAO;AAAA;AAE9D,sCAA8B,KAAK,mBAAmB,WAAW,KAAK,OAAO;AAC7E,WAAG,QAAQ;AACX,6BAAqB,AAAS,oBAAW,KAAK,8BAA8B;AAC5E,kCAA0B,GAAG,SAAS;AACtC,2BAAmB;AAAA,UACjB,QAAQ;AAAA,UACR,KAAK;AAAA,UACL,gBAAgB;AAAA,UAChB,YAAY,IAAI;AAAA,UAChB,OAAO;AAAA;AAET,aAAK,YAAY,KAAK,IAAK,cAAc,WAAW,kBAAkB,aAAa,YAAY,IAAI,YAAY,gBAAgB;AAC/H,eAAO;AAAA;AAET,gBAAU,QAAQ,OAAO,OAAO,MAAM;AACtC,WAAK,gBAAgB,QAAQ;AAC7B,aAAO;AAAA;AAAA,IAGT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY,UAAU;AAAA;AAAA;AAGnC,UAAQ,WAAW;AAAA;;;AC9QnB,IAAA;AAAA,UAAQ,YAAY;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,kBAAkB;AAAA,IACnB,CAAC,gBAAgB;AAAA,IACjB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA;AAAA;;;ACpdtB,IAAA;AACA,oBAA2B;AAC3B,oBAA2B;AAC3B,eAAsB;AACtB,oBAA2B;AAJ3B;AAAA,IAQE;AACE,WAAK,WAAW,IAAS,cAAS,WAAW,gBAAgB,WAAW;AACxE,UAAI;AAAQ,aAAK,SAAS;AAAA;AAAA,UAGtB;AACJ,UAAI;AAAQ,aAAK,SAAS;AAC1B,0BAAoB,MAAM,KAAK,SAAS,QAAQ,OAAO;AACvD,sBAAgB;AAChB,+BAA0B,eAAe;AAEvC,YAAI,WAAW;AAAoB;AACnC,qBAAa,WAAW,SAAS,WAAW,OAAO,cAAc;AACjE,4BAAoB;AACpB,YAAI,QAAQ,KAAK,SAAS;AACxB,4BAA4B;AAC1B,gBAAI,KAAK,OAAO,KAAK,WAAW,IAAI,SAAS,YAAY;AACvD,0BAAY,OAAO,AAAU,2BAAiB,KAAK,IAAI,WAAW,KAAK;AAAA;AAAA;AAAA;AAI7E,gBAAQ,KAAK;AAAA,UACX,YAAY,WAAW,cAAc;AAAA,UACrC,KAAK,WAAW,MAAM,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,MAAM;AAAA,UAC3M;AAAA,UACA;AAAA,UACA,OAAO,WAAW,QAAQ,GAAG,MAAM,WAAW,SAAS;AAAA;AAEzD,YAAI,WAAW;AAAQ,qBAAW,OAAO;AACzC,YAAI,WAAW;AAAO,qBAAW,MAAM;AAAA;AAEzC,aAAO;AAAA;AAAA;AAIX;AACE,mBAAe,MAAM,QAAQ,IAAI;AAAA,MAC/B,AAAU,eAAK;AAAA,MACf,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;AAAA,MAClF,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;AAAA;AAEpF,qBAAiB,IAAI,kBAAkB,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI;AAExE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAE1E,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO;AAAA;AAGT,UAAQ,OAAO;AACf,UAAQ,oBAAoB;AAC5B,UAAQ,YAAY;AACpB,UAAQ,gBAA8B;AAAA;;;AC5DtC,IAAA;AAAA,sBAAoB;AAEpB;AACE,QAAI,CAAC,SAAQ,CAAC,MAAK;AAAS;AAC5B,uBAAmB;AACnB,iBAAa,MAAK,QACf,OAAO,OAAO,EAAE,eAAe,GAC/B,OAAO,UAAU,KAAK,EAAE,cAAc;AACzC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;AAAA,OACjC,OAAO,OAAO,EAAE,eAAe,GAC/B,KAAK,UAAU,EAAE,eAAe,EAAE;AACrC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;AAAA,OACjC,OAAO,OAAO,EAAE,qBAAqB,GACrC,KAAK,UAAU,EAAE,qBAAqB,EAAE;AAC3C,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,gBAAY,CAAE,UAAU,MAAK,UAAU,YAAY,MAAK,YAAY,WAAW,MAAK,WAAW,cAAc,MAAK,QAAQ,QAAQ,eAAe,MAAM,kBAAkB,SAAS,kBAAkB;AACpM,gBAAY,QAAQ;AAEpB,YAAQ,IAAI,kBAAkB,MAAM;AAAA;AAGtC,UAAQ,MAAM;AAAA;;;ACxBd,IAAA;AACA,mBAAyB;AAEzB,iBAAe;AACf,aAAW,CAAE,KAAK;AAClB,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AAEjB;AACE,QAAI,CAAC,OAAO;AACV,aAAO,MAAM,MAAM,eAAe,QAAO,KAAK,IAAI;AAElD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,IAAI,UAAU,MAAM,YAAY;AAAA;AAEhF,WAAO,OAAO;AAAA;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,IAAI,cAAe,KAAK,OAAQ,KAAK,MAAM;AAClE,eAAS;AACT,aAAO;AAAA;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;AAAA,QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA,QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,IAAI,WAAW,QAAO,KAAK,IAAI;AAEnG,sBAAgB,GAAG,IAAI,QAAQ,CAAC;AAChC,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,IAAI;AAAS,iBAAO,MAAM,OAAO,IAAI,QAAQ;AAAA;AAE7D,2BAAmB,QAAO,KAAK,IAAI,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,IAAI,QAAQ,YAAY;AACnG,eAAO,WAAW,OAAO;AACzB,mBAAW,OAAO;AAClB,QAAQ,aAAI,OAAO;AAAA;AAErB,cAAQ;AAER,UAAI;AACF,sBAAa,KAAK;AAClB,YAAI,MAAM,KAAK,MAAM,KAAK,MAAK,MAAM;AAAA;AAEvC,WAAK;AAEL,aAAO;AACP,cAAQ;AAAA;AAAA;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;AAAA;;;AC9Df,IAAA;AACA,mBAAyB;AAEzB,iBAAe;AACf,aAAW,CAAE,QAAQ;AACrB,cAAY,OAAO;AACnB,oBAAkB;AAGlB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAE7B;AACE,QAAI,CAAC,OAAO;AACV,aAAO,SAAS,MAAM,eAAe,QAAO,KAAK,OAAO;AACxD,oBAAc,OAAO,OAAO,OAAO,GAAG,MAAM,OAAO;AAEnD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,YAAY;AAAA;AAEnF,WAAO,OAAO;AAAA;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,OAAO,cAAe,KAAK,WAAW;AAC7D,eAAS;AACT,aAAO;AAAA;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;AAAA,QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA,QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,OAAO,WAAW,QAAO,KAAK,OAAO;AACzG;AACA,UAAI;AACF,kBAAU,GAAG,KAAK;AAChB,qCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,0BAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,4BAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,2BAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,4BAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,iBAAO,UAAU,IAAI,KAAK,IAAI;AAAA;AAAA;AAGhC,kBAAU,GAAG,IAAI,QAAQ,CAAC;AAAA;AAG5B,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,OAAO;AAAS,oBAAU,MAAM,OAAO,OAAO,QAAQ;AAAA;AAEtE,8BAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,OAAO,QAAQ,YAAY;AAC5G,kBAAU,cAAc,OAAO;AAC/B,sBAAc,OAAO;AACrB,QAAQ,aAAI,UAAU;AAAA;AAExB,cAAQ;AAER,UAAI;AACF,sBAAa,QAAQ;AACrB,YAAI;AAEF,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAI,MAAK,KAAK,MAAK,OAAO;AACnE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,KAAK,MAAK,KAAK,WAAW;AAC5C,gBAAI,aAAa;AAAA;AAAA;AAInB,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAK,MAAK,KAAK,QAAS;AACjE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,MAAM,MAAM,WAAW;AACzC,gBAAI,aAAa,KAAK,IAAI,MAAM;AAAA;AAAA;AAAA;AAItC,cAAQ;AAER,aAAO;AACP,cAAQ;AAAA;AAAA;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;AAAA;;;AC3Ff,IAAA;AACA,mBAAyB;AAEzB,sBAAoB,CAAC,SAAS,WAAW,QAAQ,SAAS,OAAO,WAAW;AAC5E,iBAAe;AACf,aAAW;AACX,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAC7B,gBAAc;AAEd;AACE,QAAI,CAAC,OAAO;AACV,aAAO,UAAU,MAAM,eAAe,QAAO,KAAK,QAAQ;AAE1D,cAAQ,IAAI,sBAAsB,QAAO,KAAK,QAAQ,UAAU,MAAM,YAAY;AAAA;AAEpF,WAAO,OAAO;AAAA;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,QAAQ,cAAgB,KAAK,SAAS;AAC7D,eAAS;AACT,aAAO;AAAA;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;AAAA,QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA,QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,QAAQ,WAAW,QAAO,KAAK,QAAQ;AAE3G,iCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,aAAO;AAEP,sBAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,wBAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,uBAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,UAAI;AACJ,YAAM;AACN,WAAK;AACL,wBAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,cAAQ;AACR,gBAAU;AACV,eAAS;AACT,wBAAkB,GAAG,KAAK,MAAM,UAAU,IAAI,KAAK,IAAI;AACvD,gBAAU;AACV,kBAAY;AACZ,UAAI,QAAO,KAAK,QAAQ;AACtB;AACA,YAAI,CAAC,QAAO;AACV,2BAAiB,MAAM,OAAO,QAAQ,QAAQ;AAC9C,kBAAO,SAAS;AAChB,aAAG,QAAQ;AAAA;AAEX,8BAAoB,MAAM,GAAG,QAAQ,MAAM,OAAO,QAAQ,QAAQ;AAClE,kBAAO,YAAY,OAAO;AAC1B,sBAAY,OAAO;AACnB,UAAQ,aAAI,WAAW;AAAA;AAEzB,qBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,cAAI,QAAQ,MAAK,KAAK,QAAO,KAAK,QAAQ;AAAe,gBAAI,KAAK,CAAE,OAAO,KAAK,IAAI,MAAM,KAAK,MAAM,MAAM,QAAQ,MAAK,MAAM,MAAM,SAAS,YAAY;AAAA;AAE3J,YAAI,KAAK,UAAU,EAAE,QAAQ,EAAE;AAAA;AAEjC,gBAAU;AACV,aAAO;AACP,cAAQ;AAAA;AAAA;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;AAAA;;;AC7Ef,IAAA;AAAA;AAAA,IAGE;AACE,WAAK,QAAQ;AACb,WAAK,eAAe;AAAA;AAAA,IAGtB;AACE,aAAO,GAAG,KAAK;AACb,wBAAgB,KAAK,gBAAgB,MAAM;AAC3C,wBAAgB,QAAQ,WAAW;AACnC,wBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAkB,QAAQ,IAAI,OAAO,EAAE,QAAQ,CAAC;AAChD,6BAAqB,KAAK,kBAAkB;AAC5C,eAAO;AAAA,UACL,eAAe,aAAa,QAAQ;AAAA,UACpC,SAAS,aAAa;AAAA,UACtB,iBAAiB,aAAa;AAAA,UAC9B,iBAAiB,aAAa;AAAA;AAAA;AAAA;AAAA,IAQpC;AACE,WAAK,MAAM;AAAA;AAAA;AAGf,UAAQ,YAAY;AAAA;;;AC/BpB,IAAA;AACA,oBAA2B;AAD3B,0BAGkC;AAAA,IAEhC;AAEE,aAAO,GAAG,KAAK,MAAM,GAAG,IAAI,OAAO,OAAO,IAAI;AAAA;AAAA,IAIhD;AACE,mEAA6D;AAC7D,aAAO,CAAE,SAAS,SAAS,iBAAiB;AAAA;AAAA;AAGhD,UAAQ,YAAY;AAAA;;;AChBpB,IAAA;AACA;AACE,WAAO,KAAK,MAAM,IAAI;AAAA;AAFxB;AAAA,IAKE;AACE,WAAK,gBAAgB,IAAI,MAAM;AAC/B,WAAK,mBAAmB;AACxB,WAAK,kBAAkB;AAAA;AAAA,IAGzB;AACE,WAAK,cAAc,EAAE,KAAK,oBAAoB;AAC9C,WAAK,KAAK,KAAK;AAAA;AAAA,IAGjB;AACE,mBAAY,KAAK,cAAc;AAC/B,WAAK,SAAS,GAAG,KAAK;AACtB,WAAK,KAAK;AACV,WAAK,cAAc,KAAK,mBAAmB,KAAK;AAChD,aAAO;AAAA;AAAA,IAGT;AACE,aAAO,KAAK,qBAAqB;AAAA;AAAA,IAGnC;AACE,aAAO,KAAK,mBAAmB;AAAA;AAAA,IAGjC;AACE,aAAO,KAAK,cAAc,MAAM,GAAG,KAAK,mBAAmB;AAAA;AAAA,IAG7D;AACE,aAAO,KAAK,cAAc;AAAA;AAAA,IAG5B;AACE,aAAO,IAAI,KAAK,KAAK,KAAK,KAAK,IAAI;AACjC,aAAK,SAAS,GAAG,KAAK;AACtB,YAAI,KAAK;AAAA;AAAA;AAAA,IAIb;AACE,aAAO,IAAI,KAAK,KAAK;AACnB,gBAAQ,IAAI;AACZ,YAAI,IAAI,KAAK,oBAAoB,KAAK,KAAK,GAAG,IAAI;AAAI;AACtD,YAAI,CAAC,KAAK,KAAK,GAAG;AAAI;AACtB,aAAK,SAAS,GAAG;AACjB,YAAI;AAAA;AAAA;AAAA,IAIR;AACE,aAAO,KAAK,gBAAgB,KAAK,cAAc;AAAA;AAAA,IAGjD;AACE,aAAO,KAAK,WAAW,KAAK,KAAK,WAAW;AAAA;AAAA,IAG9C;AACE,gBAAU,KAAK,cAAc;AAC7B,WAAK,cAAc,KAAK,KAAK,cAAc;AAC3C,WAAK,cAAc,KAAK;AAAA;AAAA;AAG5B,UAAQ,UAAU;AAAA;;;ACvElB,IAAA;AAAA,mBAA0B;AAE1B;AACE,4BAAwB,OAAO;AAC/B,uBAAmB;AACnB,mBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,iBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,wBAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,qBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,mBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,0BAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,YAAI,OAAO,IAAI,UAAU,UAAU,cAAc;AAC/C,yBAAe;AACf;AAAA;AAAA;AAGJ,UAAI,CAAC;AACH;AAAA;AAAA;AAGJ,WAAO;AAAA;AAOT;AACE,0CAAsC,OAAO;AAC7C,kBAAc,IAAa,iBAAQ,SAAS,QAAQ,cAAc,EAAG,WAAY;AACjF,wBAAoB,GAAG,WAAW,QAAQ,EAAE;AAC1C,0BAAoB,GAAG,WAAW,OAAO,EAAE;AACzC,8BAAsB,GAAG,aAAa,cAAc,EAAE;AACpD,wBAAc,OAAO,IAAI,UAAU,UAAU;AAE7C,cAAI,QAAQ;AAAgB;AAE5B,cAAI,4BAA4B,YAAY,OAAO,UAAU,UAAU,oBAAoB;AACzF,kBAAM,QAAQ,CAAE,OAAO,MAAM,CAAE,UAAU,UAAU,IAAI;AAAA;AAAA;AAAA;AAAA;AAK/D,WAAO;AAAA;AAET,UAAQ,0BAA0B;AAAA;;;AC7ClC,IAAA;AAAA,UAAQ,YAAY;AAAA,IAClB;AAAA,IAAQ;AAAA,IAAW;AAAA,IAAY;AAAA,IAAW;AAAA,IAAY;AAAA,IACtD;AAAA,IAAiB;AAAA,IAAa;AAAA,IAAc;AAAA,IAAa;AAAA,IACzD;AAAA,IAAW;AAAA,IAAY;AAAA,IAAY;AAAA,IAAa;AAAA,IAAa;AAAA;AAE/D,UAAQ,gBAAgB,QAAQ,UAAU;AAC1C,UAAQ,UAAU,QAAQ,UAAU,OAAO;AACzC,WAAO,aAAa;AACpB,WAAO;AAAA,KACN;AACH,6BAA2B;AAAA,IACzB,CAAC,WAAW;AAAA,IAAiB,CAAC,aAAa;AAAA,IAC3C,CAAC,aAAa;AAAA,IAAc,CAAC,WAAW;AAAA,IACxC,CAAC,YAAY;AAAA,IAAc,CAAC,YAAY;AAAA,IACxC,CAAC,cAAc;AAAA,IAAkB,CAAC,cAAc;AAAA,IAChD,CAAC,YAAY;AAAA,IAAc,CAAC,aAAa;AAAA,IACzC,CAAC,gBAAgB;AAAA,IAAkB,CAAC,WAAW;AAAA;AAQjD,UAAQ,YAAY;AAAA,IAClB,CAAC,QAAQ;AAAA,IAAY,CAAC,WAAW;AAAA,IAAY,CAAC,QAAQ;AAAA,IACtD,CAAC,YAAY;AAAA,IAAa,CAAC,QAAQ;AAAA,IACnC,CAAC,gBAAgB;AAAA,IAAc,CAAC,aAAa;AAAA,IAC7C,CAAC,gBAAgB;AAAA,IAAY,CAAC,WAAW;AAAA,IACzC,CAAC,YAAY;AAAA,IAAc,CAAC,QAAQ;AAAA,IACpC,CAAC,iBAAiB;AAAA,IAAe,CAAC,cAAc;AAAA,IAChD,CAAC,iBAAiB;AAAA,IAAa,CAAC,YAAY;AAAA,IAC5C,CAAC,aAAa;AAAA;AAEhB,UAAQ,uBAAuB,mBAAmB,IAAI,8BAA+B,CAAC,QAAQ,QAAQ,aAAa,QAAQ,QAAQ;AACnI,UAAQ,eAAe;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;;;AC3DF,IAAA;AAAA,cAAqB;AAErB;AACE,WAAO;AAAA,MACL,GAAG,QAAQ,IAAI,GAAG,GAAG;AAAA,MACrB,GAAG,QAAQ,IAAI,GAAG,GAAG,WAAe;AAAA;AAAA;AAGxC,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,UAAU,UAAU,gBAAiB;AAC7C,WAAQ,GAAG,KAAM,eAAe,UAAU,UAAU,UAAU;AAC9D,WAAO;AAAA,MACL,GAAG,KAAK,WAAW,eAAe;AAAA,MAClC,GAAG,KAAK,WAAW,eAAe;AAAA;AAAA;AAGtC,UAAQ,iBAAiB;AAEzB;AACE,mBAAe,IAAI,MAAM;AACzB,iBAAa,GAAG,IAAI,MAAM;AACxB,aAAO,KAAK;AAAA;AAEd,WAAO;AAAA;AAET,UAAQ,YAAY;AAEpB;AACE,QAAI,IAAI;AAAK,aAAO;AACpB,QAAI,IAAI;AAAK,aAAO;AACpB,WAAO;AAAA;AAET,UAAQ,QAAQ;AAEhB;AACE,eAAW,KAAK;AAChB,eAAW,KAAK;AAChB,WAAO,KAAK,KAAK,KAAK;AAAA;AAExB,UAAQ,kBAAkB;AAE1B;AACE,WAAO,CAAE,GAAG,EAAE,IAAI,EAAE,GAAG,GAAG,EAAE,IAAI,EAAE;AAAA;AAEpC,UAAQ,aAAa;AAErB;AACE,WAAO,CAAE,GAAG,OAAM,EAAE,GAAG,MAAK,OAAM,GAAG,OAAM,EAAE,GAAG,MAAK;AAAA;AAEvD,UAAQ,cAAc;AAAA;;;ACnDtB,IAAA;AAAA,oBAA2B;AAC3B,kBAAyB;AAEzB,+BAA6B,AAAU,oBAAU,IAAI,qCAAsC,CAAC,AAAU,kBAAQ,iBAAiB,AAAU,kBAAQ;AACjJ,6BAA2B,qBAAqB,IAAI,sBAAsB;AAC1E,6BAA2B,qBAAqB,IAAI,qBAAqB;AACzE;AACE,qBAAiB,cAAc,MAAM,KAAK;AAC1C,WAAO;AAAA,MACL,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG;AAAA,MACvC,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG,WAAW;AAAA;AAAA;AAGtD;AACE,WAAO;AAAA,MACL,GAAG,AAAQ,cAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,SAAS;AAAA,MACjE,GAAG,AAAQ,cAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,QAAQ;AAAA;AAAA;AAUpE,qJAAmJ;AACjJ,4BAAwB,aAAa;AAErC,kCAA8B,yBAAyB,eAAe,UAAU,cAAc,QAAQ;AACtG,yBAAqB,gBAAgB,QAAQ,uBAAuB;AACpE,2BAAuB,AAAQ,mBAAW,eAAe,UAAU;AACnE,yBAAqB;AACrB,iBAAa,GAAG,IAAI,kBAAkB;AACpC,oCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,0BAAoB,AAAQ,uBAAe,sBAAsB,GAAG,sBAAsB,GAAG,kBAAkB;AAC/G,uBAAiB,AAAQ,mBAAW;AAAA,QAClC,GAAG,sBAAsB,IAAI;AAAA,QAC7B,GAAG,sBAAsB,IAAI;AAAA,SAC5B,CAAE,GAAG,YAAY,GAAG,GAAG,YAAY;AAAA;AAExC,kCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,kBAAc,aAAa,IAAI,sBAAsB,GAAG,sBAAsB,GAAG;AACjF,WAAO,CAAE,UAAU,gBAAgB,MAAM,AAAU,oBAAU,mBAAmB;AAAA;AAQlF;AACE,qBAAiB,OAAO,MAAM;AAC9B,qBAAiB,mBAAmB;AACpC,8BAA0B,IAAI,MAAM;AAEpC,WAAQ,gBAAgB,oBAAqB;AAC7C,sBAAkB,AAAQ,uBAAe,UAAU,cAAc;AACjE,sBAAkB,SAAS,MAAM;AAAA,MAC/B,OAAO;AAAA,MACP,MAAM,AAAU,oBAAU,SAAS;AAAA,MACnC,UAAU;AAAA;AAIZ,oBAAgB,WAAW,GAAG,QAAQ,GAAG,EAAE;AACzC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;AAAA;AAAA;AAK/J,oBAAgB,GAAG,OAAO,UAAU,EAAE;AACpC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;AAAA;AAAA;AAG/J,WAAO;AAAA;AAET,UAAQ,aAAa;AAAA;;;ACnFrB,IAAA;AAAA,qBAA4B;AAC5B,qBAA4B;AAC5B,kBAAyB;AAEzB,yEAAwE,GAAG;AACzE,WAAO,MAAM,KAAK,EAAG;AACnB,oCAA8B,UAAU,YAAY;AACpD,aAAO,AAAQ,wBAAgB,GAAG,GAAG,sBAAsB,GAAG,sBAAsB,MAAM;AAAA;AAAA;AAO9F;AACE,wCAAoC,kBAAkB,OAAO,UAAW,UAAU;AAChF,UAAI,CAAC,oCAAoC,eAAe,kBAAkB,UAAU;AAClF,kBAAU;AAAA;AAEZ,aAAO;AAAA,OACN;AACH,WAAO,8BAA8B,kBAAkB;AAAA;AAKzD,8BAA4B;AAwD5B,8JAA4J,iBAAiB;AAC3K,kBAAc;AACd,kBAAc,AAAW,mCAAwB,gBAAgB,qBAAqB;AACtF,6BAAyB,YAAY;AAGrC,WAAO,MAAM,SAAS,qBAAqB,CAAC,MAAM;AAEhD,mBAAa,MAAM;AAInB,8BAAwB,AAAQ,uBAAe,KAAK,MAAM,cAAc;AACxE,UAAI,oCAAoC,OAAO,kBAAkB,iBAAiB,KAAK,KAAK;AAAK;AAEjG,wBAAkB,AAAW,sBAAW,MAAM,cAAc,eAAe,cAAc,wBAAwB;AACjH,oBAAc,iBAAiB,OAAO,kBAAkB;AACxD,YAAM,KAAK,CAAE,WAAW;AAAA;AAE1B,WAAO;AAAA;AAET,UAAQ,sBAAsB;AAAA;;;ACvG9B,IAAA;AAAA,cAAqB;AAErB;AACE,WAAQ,IAAI,iBAAiB,IAAI;AAAA;AAGnC;AACE,WAAO,AAAI,yBAAqB,OAAO;AACrC,UAAI,gCAAgC,UAAU,WAAW,OAAO,UAAU,YAAY,OAAO;AAC3F,eAAO;AAAA;AAET,aAAO,KAAK,CAAC,UAAU,YAAY,UAAU;AAC7C,aAAO;AAAA,OACN;AAAA;AAEL,UAAQ,uBAAuB;AAE/B,SAAQ,mBAAmB,qBAAsB;AACjD;AACE,WAAO,UAAU,OAAO,EAAG,MAAM,MAAM,MAAM,QAAU,WAAY,GAAG,QAAW;AAAA,MAC/E,MAAM,KAAK,IAAI,MAAM;AAAA,MACrB,MAAM,KAAK,IAAI,MAAM;AAAA,MACrB,MAAM,KAAK,IAAI,MAAM;AAAA,MACrB,MAAM,KAAK,IAAI,MAAM;AAAA,QACnB;AAAA,MACF,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA;AAAA;AAGV,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,MAAM,MAAM,MAAM,QAAS,eAAe;AAClD,WAAO,CAAC,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG;AAAA;AAE1F,UAAQ,uBAAuB;AAE/B;AACE,WAAO,QAAQ,IAAI,QAAQ,IAAI,aAAY,QAAO;AAAA;AAEpD,UAAQ,oBAAoB;AAE5B;AACE,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,WAAW,KAAK,UAAU,IAAI,EAAG,OAAO,MAAM,cAAgB;AAAA,QAC5D;AAAA,QACA;AAAA,QACA,UAAU,CAAE,GAAG,SAAS,IAAI,QAAQ,GAAG,SAAS,IAAI;AAAA;AAAA;AAAA;AAI1D,UAAQ,YAAY;AAEpB;AACE,kBAAc,OAAM,QAAQ;AAC5B,oBAAgB,MAAM,eAAe,CAAC,SAAS;AAC/C,UAAM;AACN,WAAO;AAAA;AAET,UAAQ,WAAW;AAEnB;AACE,wBAAoB,MAAM,IAAI,UAAU,UAAU,MAAM,SAAS,uBAAuB,QAAQ;AAChG,WAAO;AAAA;AAET,UAAQ,oBAAoB;AAAA;;;ACpE5B,IAAA;AACA,yBAAgC;AAChC,yBAAgC;AAChC,iBAAsB;AAHtB;AAAA,IAME;AACE,WAAK,YAAY;AACjB,WAAK,eAAe;AAAA;AAAA,UAGhB;AACJ,aAAO,IAAI,QAAQ;AACjB,uBAAe,MAAM,MAAM;AAC3B,sBAAc,MAAM,MAAM;AAC1B,wBAAgB,AAAK,gBAAS,OAAO,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACzE,oBAAY,KAAK,UAAU,QAAQ;AACnC,iCAAyB,MAAM,AAAK,yBAAkB,CAAC,IAAI,eAAe,IAAI,SAAS,IAAI,iBAAiB,IAAI;AAChH,6BAAqB,iBAAiB;AACtC,8BAAsB,iBAAiB;AACvC,uCAA+B,iBAAiB;AAChD,uCAA+B,iBAAiB;AAChD,sBAAc,MAAM,AAAe,mCAAoB,cAAc,eAAe,wBAAwB,wBAAwB,KAAK,cAAc,QAAO,KAAK,eAAe,QAAO,KAAK,gBAAgB,QAAO,KAAK;AAC1N,4BAAoB,AAAK,yBAAkB,OAAO,CAAC,QAAQ,QAAQ,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACvG,YAAI,cAAc;AAClB,YAAI,QAAQ;AACZ,YAAI,gBAAgB;AACpB,YAAI,gBAAgB;AACpB,gBAAQ;AACR,gBAAQ;AAAA;AAAA;AAAA,IAIZ;AACE,WAAK,UAAU;AAAA;AAAA;AAGnB,UAAQ,UAAU;AAElB;AACE,uBAAmB,MAAM,eAAe,QAAO,KAAK;AACpD,sBAAkB,IAAmB,yBAAU,YAAY,KAAK;AAEhE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO,IAAI,QAAQ;AAAA;AAErB,UAAQ,OAAO;AAAA;;;AC9Cf,IAAA;AAAA,yBAAgC;AAChC,uBAA8B;AAC9B,yBAAgC;AAChC,oBAA2B;AAC3B,iBAAsB;AAEtB,UAAQ,OAAoB;AAC5B,UAAQ,UAAuB;AAE/B,UAAQ,YAA2B;AACnC,UAAQ,sBAAqC;AAC7C,UAAQ,eAAyB;AACjC,UAAQ,UAAoB;AAC5B,UAAQ,YAAsB;AAC9B,UAAQ,YAAsB;AAC9B,UAAQ,uBAA4B;AACpC,UAAQ,iBAAsB;AAC9B,UAAQ,uBAA4B;AACpC,UAAQ,oBAAyB;AACjC,UAAQ,YAAiB;AAAA;;;ACnBzB,IAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAqBE;AACE,WAAK,QAAQ;AACb,WAAK,UAAU,iBAAiB,IAAI,YAAY,CAAC,OAAO,UAAU,OAAO;AACzE,WAAK,gBAAgB,GAAG,SAAS,KAAK;AACtC,WAAK,kBAAkB,GAAG,SAAS,CAAC,WAAW;AAC/C,WAAK,wBAAwB,GAAG,SAAS,CAAC,YAAY,GAAG,YAAY;AAAA;AAAA,IAGvE;AACE,aAAO,GAAG,KAAK;AACb,2BAAmB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAChD,yBAAiB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAC9C,gCAAwB,GAAG,IAAI,GAAG,IAAI,YAAY,KAAK,kBAAkB,KAAK;AAC9E,6BAAqB,GAAG,IAAI,UAAU,KAAK;AAC3C,4BAAoB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACvE,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACrE,eAAO,GAAG,SAAS,CAAC,aAAa,YAAY;AAAA;AAAA;AAAA,IAIjD;AACE,aAAO,GAAG,KAAK;AACb,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,QAAQ,CAAC,IAAI,GAAG,KAAK,KAAK,kBAAkB,KAAK,QAAQ;AAC1G,eAAO,GAAG,IAAI,WAAW,KAAK;AAAA;AAAA;AAAA,UAI5B;AACJ,sBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAoB,QAAQ;AAC5B,cAAQ;AACR,qBAAe,GAAG,KAAK,MAAM,GAAG,QAAQ,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,KAAK;AAChF,wBAAkB,OAAO;AACzB,uBAAiB,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAc,KAAK,eAAe;AAClC,eAAS;AACT,wBAAkB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,QAAO,UAAU,QAAO,cAAc,QAAO;AACpH,uBAAiB,UAAU;AAE3B,aAAO;AACP,gBAAU;AACV,oBAAc;AACd,6BAAuB;AACrB,YAAI,UAAU,aAAa,QAAO;AAChC,8BAAoB,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACvD,mCAAyB,GAAG,MAAM,aAAa,CAAC,UAAU,IAAI,CAAC,GAAG;AAClE,gCAAsB,GAAG,KAAK,MAAM,KAAK,mBAAmB,kBAAkB,UAAU,QAAQ,CAAC,IAAI;AACrG,2BAAiB;AACjB,gBAAM,KAAK,CAAE,KAAK,aAAa,eAAe,YAAY,UAAU;AAAA;AAAA;AAGxE,kBAAY;AACZ,YAAM;AACN,aAAO;AAAA;AAAA,UAGH;AACJ,0BAAoB,MAAM,MAAM;AAChC,yBAAmB,MAAM,MAAM;AAC/B,qBAAc,GAAG,KAAK,MAAM,MAAM,eAAe,CAAC,QAAO,WAAW,QAAO,YAAY,IAAI,OAAO,IAAI;AACtG,0BAAoB,MAAM,KAAK,SAAS,QAAO;AAC/C,aAAM;AACN,UAAI,CAAC,eAAe,YAAY,WAAW;AAAG,eAAO;AACrD,oBAAc;AACd,+BAAyB;AACvB,sBAAc,WAAW,IAAI;AAC7B,2BAAmB,MAAM,MAAM,GAAG;AAClC,yBAAiB,MAAM,MAAM,GAAG;AAChC,8BAAsB,WAAW,cAAc;AAC/C,mBAAW,IAAI;AACf,mBAAW,cAAc;AACzB,cAAM,KAAK,AAAI,oBAAoB,CAAE,YAAY,UAAU,eAAe,YAAY,WAAW,aAAc,CAAC,aAAa,QAAO,WAAW,cAAc,QAAO;AAAA;AAEtK,aAAO;AAAA;AAAA;AAGX,UAAQ,eAAe;AAAA;;;ACjGvB,IAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAqBA,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,4BAA0B,CAAC,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG;AAC/C,4CAA0C;AAC1C,qDAAmD;AA3BnD;AAAA,IA8BE;AACE,WAAK,cAAc;AACnB,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;AAAA;AAAA,IAGvB;AACE,mCAA6B,cAAc,IAAI;AAC7C,sCAA8B,CAAC,GAAG,OAAO;AACzC,eAAO,AAAK,YAAY,uBAAuB;AAAA;AAEjD,4BAAsB,KAAK,8BAA8B;AACzD,aAAO,AAAI,WAAW,AAAI,YAAY,AAAI,SAAS,eAAe,yBAAyB;AAAA;AAAA,IAG7F;AACE,0BAAoB,KAAK,8BAA8B;AACvD,4BAAsB,AAAI,WAAW,AAAI,YAAY,AAAI,SAAS,aAAa,yBAAyB;AACxG,4BAAsB;AACtB,mBAAa,GAAG,IAAI,kBAAkB,QAAQ;AAC5C,sBAAc,KAAK,UAAU,kBAAkB,IAAI,MAAM,GAAG;AAAA;AAE9D,oBAAc,gBAAgB;AAC9B,aAAO;AAAA;AAAA,IAGT;AACE,sBAAgB,AAAI,WAAW;AAC/B,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAW;AAAA,QAC5C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;AAAA,QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;AAAA,QAC9C,MAAM;AAAA;AAER,mCAA6B,AAAK,oBAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI;AACrC,wBAAgB,AAAK,YAAY,OAAO;AACxC,eAAO,CAAC,GAAG,SAAS,MAAM;AAAA;AAE5B,oCAA8B,AAAK,sBAAsB;AACzD,wBAAkB,CAAC,GAAG,AAAI,aAAa,OAAO;AAC9C,gCAA0B;AAAA,QACxB,AAAK,IAAI,WAAW,sBAAsB;AAAA,QAC1C,AAAK,IAAI,WAAW,sBAAsB;AAAA;AAE5C,aAAO,cAAc,IAAI,WAAW;AAAA,QAClC,MAAM,KAAK,kBAAkB;AAAA,QAC7B,MAAM,KAAK,kBAAkB;AAAA,QAC7B,MAAM;AAAA;AAAA;AAAA,UAIJ;AACJ,WAAK;AACL,wBAAkB;AAGlB;AACA,UAAK,KAAK,UAAU,QAAO,cAAe,CAAC,QAAO;AAChD,gBAAQ,MAAM,KAAK,YAAY,mBAAmB,QAAO;AAEzD,YAAK,OAAM,MAAM,OAAO,OAAS,OAAM,MAAM,OAAO;AAAM,eAAK,UAAU;AAAA;AAI3E,UAAI,SAAU,MAAM,SAAS,KAAQ,OAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,YAAa,CAAC,QAAO;AAC/H,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB;AAAO,eAAK,YAAY,KAAK;AACpD,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;AAAA;AAEjD,oBAAc;AAId,sBAAgB,KAAK;AACnB,2BAAmB,KAAK,YAAY;AACpC,YAAI,CAAC;AAAY;AACjB,YAAI,QAAO;AACT,wBAAc,AAAK,gBAAgB,WAAW,cAAc,oCAAoC,WAAW,cAAc;AACzH,6BAAmB,AAAI,aAAa;AACpC,uCAA6B,CAAC,WAAW,KAAK,OAAM,MAAM,IAAI,WAAW,KAAK,OAAM,MAAM;AAC1F,+BAAqB,GAAG,MAAM,iBAAiB,QAAO,OAAO,GAAG;AAChE,iCAAuB,AAAK,oBAAoB,CAAC,OAAO;AACxD,yBAAe,cAAc,KAAK,uBAAuB,WAAW,eAAe,kBAAkB;AACrG,+BAAqB,AAAI,yBAAyB,QAAQ,cAAc,CAAC,KAAK,WAAW,KAAK;AAC9F,4BAAkB,aAAa,IAAI;AACnC,uBAAa;AACb,uBAAa;AACb,0CAAgC,MAAM,KAAK,aAAa,QAAQ;AAChE,oBAAU;AACV,kCAAwB,WAAW,WAAW;AAC9C,qBAAW;AACX,cAAI,mBAAmB,QAAO;AAC5B,sCAA0B,GAAG,QAAQ,WAAW,CAAC,IAAI;AACrD,8BAAkB,kBAAkB;AACpC,sBAAU;AACV,8BAAkB;AAClB,2BAAe,KAAK,mBAAmB,WAAW,QAAQ,OAAO;AACjE,oCAAwB,KAAK,uBAAuB;AACpD,iBAAK,YAAY,KAAK;AACtB,2BAAe;AAAA,cACb,WAAW;AAAA,cACX,YAAY;AAAA,cACZ,KAAK;AAAA,gBACH,SAAS,gBAAgB;AAAA,gBACzB,aAAa,gBAAgB;AAAA;AAAA;AAGjC,kBAAM,KAAK;AAAA;AAEX,iBAAK,YAAY,KAAK;AAAA;AAExB,oBAAU;AAAA;AAEV,2BAAiB,AAAI,WAAW,AAAI,YAAY,AAAI,SAAS,YAAY,yBAAyB;AAClG,yBAAe;AAAA,YACb,YAAY,WAAW;AAAA,YACvB,KAAK;AAAA,cACH,SAAS,SAAS;AAAA,cAClB,aAAa,SAAS;AAAA;AAAA;AAG1B,gBAAM,KAAK;AAAA;AAAA;AAGf,WAAK,cAAc,KAAK,YAAY,OAAO,OAAO,MAAM;AACxD,WAAK,gBAAgB,MAAM;AAC3B,aAAO;AAAA;AAAA,IAIT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY;AAAA;AAAA;AAIzB,UAAQ,eAAe;AAAA;;;AC9KvB,IAAA;AAAA,UAAQ,UAAU;AAAA,IAChB;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA;AAAA;;;AC/viBd,IAAA;AAmBA,uBAA8B;AAC9B,mBAA0B;AAC1B,kBAAyB;AArBzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuBA,2BAAyB;AAAA,IACvB,OAAO,CAAC,GAAG,GAAG,GAAG;AAAA,IACjB,aAAa,CAAC,GAAG,GAAG,GAAG;AAAA,IACvB,cAAc,CAAC,GAAG,IAAI,IAAI;AAAA,IAC1B,YAAY,CAAC,IAAI,IAAI,IAAI;AAAA,IACzB,OAAO,CAAC,IAAI,IAAI,IAAI;AAAA,IACpB,UAAU,CAAC;AAAA;AA7Bb;AAAA,IAiCE;AACE,WAAK,WAAW;AAAA;AAAA,WAGX;AACL,aAAO;AAAA;AAAA,UAGH;AACJ,0BAAoB,MAAM,KAAK,SAAS,cAAc,OAAO;AAC7D,UAAI,CAAC;AAAa,eAAO;AACzB,oBAAc;AACd,+BAAyB;AACvB,4BAAoB;AACpB,YAAI,WAAW;AACb,4BAAkB,OAAO,KAAK;AAC5B,wBAAY,OAAO,iBAAiB,KAAK,IAAI,WAAW,WAAW,UAAU;AAAA;AAAA;AAGjF,cAAM,KAAK;AAAA,UACT,YAAY,WAAW;AAAA,UACvB,KAAK,WAAW,MAAM;AAAA,YACpB,WAAW,IAAI,QAAQ;AAAA,YACvB,WAAW,IAAI,QAAQ;AAAA,YACvB,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;AAAA,YACvD,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;AAAA,cACrD;AAAA,UACJ,WAAW,WAAW;AAAA,UACtB;AAAA;AAAA;AAGJ,aAAO;AAAA;AAAA;AAGX,UAAQ,WAAW;AAEnB;AACE,+CAA2C,MAAM,QAAQ,IAAI;AAAA,MAC3D,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAAA,MAC1F,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAAA;AAE5F,qBAAiB,IAAiB,0BAAa,mBAAmB,QAAO,WAAmB;AAC5F,iBAAa,IAAa,sBAAa,UAAU,eAAe,QAAO;AACvE,sBAAiB,IAAI,SAAS;AAE9B,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAE9E,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;AAAA;AAET,UAAQ,OAAO;AAAA;;;ACnFf,IAAA;AAAA,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,uBAAmB;AAEjB,wBAAkB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACzD,yBAAmB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC1D,mBAAa,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACpD,UAAI,QAAQ,aAAa,cAAe,UAAU,SAAS,IAAI,KAAK,SAAS,KAAO,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAAA,eACnI,QAAQ,aAAc,UAAU,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAAA,eAC7E,QAAQ,cAAe,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAGxF,2BAAqB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC5D,4BAAsB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC7D,UAAI,gBAAgB;AAAe,iBAAS,KAAK,WAAY,aAAa,SAAS,IAAI,cAAc,SAAS,IAAK,SAAS;AAAA;AAE9H,WAAO;AAAA;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AAIjB,UAAI,MAAK,QAAQ,MAAK,KAAK,SAAS;AAClC,0BAAkB,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACpD,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK;AAAA;AACvC,mBAAS,KAAK,UAAU,YAAY,IAAI,UAAU;AACvD,yBAAiB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAC/G,YAAI,WAAW;AAAK,mBAAS,KAAK;AAClC,0BAAkB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAChH,YAAI,YAAY;AAAK,mBAAS,KAAK;AACnC,0BAAkB,KAAK,IAAI,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,IAAI,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACjI,YAAI,YAAY;AAAI,mBAAS,KAAK,SAAS,KAAK,MAAM;AACtD,0BAAkB,MAAK,KAAK,KAAK;AACjC,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK,QAAQ,YAAY,IAAI,OAAO;AAAA;AAAA;AAG/E,WAAO;AAAA;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AACjB,sBAAgB;AAChB,kCAA4B,OAAO,QAAQ,MAAK;AAC9C,YAAI,WAAW;AAAY,kBAAQ,KAAK,CAAE,MAAM,OAAO,eAAe,UAAU,IAAI;AAAA;AAEtF,UAAI,WAAW,QAAQ,SAAS;AAC9B,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,iBAAS,KAAK,GAAG,QAAQ,gBAAgB,QAAQ;AAAA;AAAA;AAGrD,WAAO;AAAA;AAAA;;;AC1DT,IAAA;AAOA,uBAAqB;AACnB,qBAAiB;AACf,gBAAU,IAAI,OAAO,QAAQ,SAAS,gBAAgB;AACtD,aAAO,QAAQ,GAAG;AAChB,mBAAW,QAAQ;AACnB,eAAO;AAAA;AAAA;AAIX,qBAAiB;AACf,qBAAe,GAAG,aAAa;AAC/B,SAAG,aAAa,QAAQ;AACxB,SAAG,cAAc;AAEjB,UAAI,CAAC,GAAG,mBAAmB,QAAQ,GAAG;AACpC,cAAM,IAAI,MAAM,6BAA6B,GAAG,iBAAiB;AAAA;AAEnE,aAAO;AAAA;AAGT,SAAK,UAAU;AACf,SAAK,YAAY;AAEjB,iBAAa,SAAS,cAAc,GAAG;AACvC,iBAAa,SAAS,gBAAgB,GAAG;AAEzC,SAAK,KAAK,GAAG;AACb,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,YAAY,KAAK;AAEpB,QAAI,CAAC,GAAG,oBAAoB,KAAK,IAAI,GAAG;AACtC,YAAM,IAAI,MAAM,0BAA0B,GAAG,kBAAkB,KAAK;AAAA;AAGtE,OAAG,WAAW,KAAK;AAGnB,aAAS,cAAc,aAAa,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,UAAU,KAAK,GAAG,kBAAkB,KAAK,IAAI;AAAA;AAIpD,aAAS,cAAc,WAAW,KAAK;AACvC,aAAS,gBAAgB,WAAW,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,QAAQ,KAAK,GAAG,mBAAmB,KAAK,IAAI;AAAA;AAAA;AAIrD,2BAAyB;AACvB,QAAI,CAAC;AAAQ,eAAS;AACtB,qBAAiB;AACjB,yBAAqB;AACrB,uBAAmB;AACnB,mCAA+B;AAC/B,4BAAwB,CAAC,MAAM;AAC/B,uBAAmB;AACnB,iBAAa;AACb,kBAAc;AACd,wBAAoB;AACpB,0BAAsB;AACtB,oBAAgB,OAAO,UAAU,SAAS,cAAc;AAGxD,gCAA4B;AAE5B,eAAW,QAAQ,WAAW;AAC9B,QAAI,CAAC;AAAI,YAAM,IAAI,MAAM;AAEzB,SAAK,YAAY;AAEf,mBAAa,MAAM,UAAU,MAAM,KAAK,WAAW;AACnD,qBAAe,QAAQ;AAEvB,mBAAa,KAAK,CAAE,MAAM,QAAQ;AAAA;AAGpC,SAAK,QAAQ;AACX,qBAAe;AAAA;AAGjB,SAAK,QAAQ;AACX,cAAQ,OAAM,OAAO,OAAM;AAC3B,mBAAa;AAGb,UAAI,CAAC;AAAgB,yBAAiB,GAAG;AACzC,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;AAGpE,UAAI,aAAa,WAAW;AAE1B;AACA,eAAO;AAAA;AAGT,mBAAa,GAAG,IAAI,aAAa,QAAQ;AACvC,uBAAgB,MAAM,aAAa,SAAS;AAC5C,kBAAU,aAAa;AACvB,UAAE,KAAK,MAAM,MAAM,EAAE,QAAQ;AAAA;AAG/B,aAAO;AAAA;AAGT,oBAAgB;AAEd,UAAI,UAAU,UAAU,WAAW;AAAW;AAAA;AAE9C,cAAQ,QAAQ;AAChB,eAAS;AACT,cAAQ,SAAS;AACjB,gBAAU;AAGV,UAAI,CAAC;AAEH,yBAAiB,IAAI,aAAa;AAAA,UAChC;AAAA,UAAI;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UACrC;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA;AAGrC,QAAC,gBAAgB,GAAG,gBAAgB,GAAG,WAAW,GAAG,cAAc;AACnE,WAAG,WAAW,GAAG,cAAc,UAAU,GAAG;AAI5C,WAAG,YAAY,GAAG,gCAAgC;AAAA;AAGpD,SAAG,SAAS,GAAG,GAAG,QAAQ;AAG1B,0BAAoB,CAAC,MAAM;AAAA;AAG7B,gCAA4B;AAC1B,wBAAkB,SAAS,kBAAkB,UAC1C,0BAA0B,QAAQ;AAErC,aAAO,kBAAkB;AAAA;AAG3B,sCAAkC;AAChC,kBAAY,GAAG;AACf,SAAG,gBAAgB,GAAG,aAAa;AAEnC,2BAAqB,GAAG;AACxB,SAAG,iBAAiB,GAAG,cAAc;AAErC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe;AAEtF,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AAEtD,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AAEtF,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,aAAO,CAAE,KAAK;AAAA;AAGhB,kBAAc;AACZ,mBAAa;AACb,mBAAa;AACb,kBAAY;AAGZ,UAAI,eAAe;AAEjB,iBAAS;AAAA;AAGT,iBAAS,oBAAoB,0BAA0B;AAAA;AAEzD;AAGA,UAAI,gBAAgB,CAAE,SAAQ,KAAK;AAGjC,iBAAS;AACT,gBAAQ,aAAa,MAAM;AAAA;AAG3B,mCAA4B,4BAA2B,KAAK;AAC5D,iBAAS,oBAAoB,0BAA0B;AAAA;AAIzD,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,SAAG,UAAU,gBAAgB,QAAQ,OAAQ,QAAQ,KAAK;AAC1D,SAAG,WAAW,GAAG,WAAW,GAAG;AAAA;AAGjC,2BAAuB;AACrB,UAAI,oBAAoB;AACtB,0BAAkB,oBAAoB;AACtC,WAAG,WAAW,gBAAgB;AAC9B,eAAO;AAAA;AAIT,wBAAkB,IAAI,aAAa,IAAI,OAAO,iBAAiB;AAE/D,wBAAkB,aAAa;AAC/B,uBAAiB,IAAI;AACrB,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,KAAK,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AACxF,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,IAAI,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AAEvF,0BAAoB,kBAAkB;AACtC,aAAO;AAAA;AAGT,eAAW,CAAE,cAAc;AAE3B,iBAAa;AACb,WAAO,kBAAkB;AAAA,MACvB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,WAAO,oBAAoB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,kBAAc;AAKd,YAAQ,cAAc;AAEpB,gBAAU,IAAI,aAAa;AAC3B,QAAE,MAAM;AACR,QAAE,MAAM;AACR,QAAE,OAAO;AACT,QAAE,OAAO;AAGT,qBAAgB,EAAE,QAAQ,KAAK,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,IAC7H,QAAQ,YAAY,OAAO,gBAC3B,QAAQ,YAAY,OAAO;AAE/B,sBAAgB,eAAe;AAC/B,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC;AAAA;AAGF,YAAQ,YAAY,SAAS;AAC7B,YAAQ,YAAY,OAAO,aAAa;AAAA,MACtC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AACP,YAAQ,YAAY,OAAO,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,YAAQ,aAAa;AACnB,gBAAW,eAAc,KAAK;AAC9B,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,gBAAW,WAAU,KAAK,IAAI,IAAI;AAClC,gBAAY,KAAI,KAAK;AACrB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,cAAQ,WAAW;AAAA;AAGrB,YAAQ,WAAW;AACjB,gBAAW,WAAU,KAAK;AAC1B,gBAAU,OAAQ,KAAI;AAEtB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,WAAW;AACjB,cAAQ,SAAS;AAAA;AAGnB,YAAQ,MAAM;AACZ,iBAAY,aAAY,KAAK,MAAM,KAAK;AACxC,kBAAY,KAAK,IAAI;AACrB,kBAAY,KAAK,IAAI;AACrB,mBAAa;AACb,mBAAa;AACb,mBAAa;AAEb,cAAQ,YAAY;AAAA,QAClB,OAAO,MAAO,KAAI,QAAQ,MAAO,CAAC;AAAA,QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAC;AAAA,QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,KAAI;AAAA,QAAO;AAAA,QAAG;AAAA,QAC3H,OAAO,MAAO,CAAC,OAAQ,MAAO;AAAA,QAAQ,OAAO,MAAO,KAAI,QAAQ,MAAO;AAAA,QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;AAAA,QAAS;AAAA,QAAG;AAAA,QACzH,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAE,KAAI;AAAA,QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;AAAA,QAAO,OAAO,MAAO,KAAI,QAAQ,MAAO;AAAA,QAAO;AAAA,QAAG;AAAA,QAC5H;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,sBAAsB;AAC5B,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAW;AAAA,QAAW;AAAA,QAAW;AAAA,QAAG;AAAA,QACpC;AAAA,QAAW;AAAA,QAAW;AAAA,QAAW;AAAA,QAAG;AAAA,QACpC;AAAA,QAAW;AAAA,QAAW;AAAA,QAAW;AAAA,QAAG;AAAA,QACpC;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,QAAQ;AACd,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAO;AAAA,QAAW;AAAA,QAAY;AAAA,QAAG;AAAA,QACjC;AAAA,QAAO;AAAA,QAAW;AAAA,QAAY;AAAA,QAAG;AAAA,QACjC;AAAA,QAAO;AAAA,QAAW;AAAA,QAAY;AAAA,QAAG;AAAA,QACjC;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,UAAU;AAChB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACjE;AAAA,QAAuB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACnE;AAAA,QAAqB;AAAA,QAAsB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACnE;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,iBAAiB;AACvB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAoB;AAAA,QAAsB;AAAA,QAAG;AAAA,QACjE;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACjE;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAG;AAAA,QAChE;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAsB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAsB;AAAA,QAAoB;AAAA,QAAsB;AAAA,QAAG;AAAA,QACnE;AAAA,QAAsB;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,cAAc;AACpB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAsB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAsB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAmB;AAAA,QAAG;AAAA,QAC/D;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,WAAW;AACjB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAO;AAAA,QAAQ;AAAA,QAAQ;AAAA,QAAG;AAAA,QAC1B;AAAA,QAAQ;AAAA,QAAO;AAAA,QAAQ;AAAA,QAAG;AAAA,QAC1B;AAAA,QAAQ;AAAA,QAAQ;AAAA,QAAO;AAAA,QAAG;AAAA,QAC1B;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAOhB,YAAQ,cAAc;AACpB,gBAAU,IAAI,aAAa;AAC3B,yBAAmB,IAAI;AACvB,yBAAmB,IAAI;AAEvB,sBAAgB,eAAe,QAAQ,YAAY;AACnD,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC,SAAG,UAAU,QAAQ,QAAQ,IAAI,YAAY;AAC7C;AAAA;AAGF,YAAQ,YAAY,SAAS;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,YAAQ,cAAc;AACpB,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAG;AAAA,QAAG;AAAA,QACN;AAAA,QAAG;AAAA,QAAI;AAAA,QACP;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIV,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAI;AAAA,QAAG;AAAA,QACP;AAAA,QAAI;AAAA,QAAG;AAAA,QACP;AAAA,QAAI;AAAA,QAAG;AAAA;AAAA;AAIX,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAI;AAAA,QAAI;AAAA,QACR;AAAA,QAAG;AAAA,QAAG;AAAA,QACN;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIV,YAAQ,UAAU;AAChB,gBAAU,UAAU;AACpB,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAG,KAAK;AAAA,QAAG;AAAA,QACX,KAAK;AAAA,QAAG,IAAI,IAAI;AAAA,QAAG,KAAK;AAAA,QACxB;AAAA,QAAG,KAAK;AAAA,QAAG;AAAA;AAAA;AAIf,YAAQ,SAAS;AACf,gBAAU,QAAQ;AAClB,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B,KAAK;AAAA,QAAG,KAAK;AAAA,QAAG;AAAA,QAChB,KAAK;AAAA,QAAG;AAAA,QAAG,IAAI;AAAA,QACf;AAAA,QAAG,IAAI;AAAA,QAAG,IAAI;AAAA;AAAA;AAOlB,YAAQ,OAAO;AACb,wBAAmB,OAAO,IAAK;AAC/B,wBAAmB,OAAO,IAAK;AAE/B,sBAAgB,eAAe,QAAQ,KAAK;AAG5C,SAAG,UAAU,QAAQ,QAAQ,IAAI,GAAG;AACpC,YAAM,KAAK;AAGX,SAAG,UAAU,QAAQ,QAAQ,IAAI,WAAW;AAC5C;AAAA;AAGF,YAAQ,KAAK,SAAS;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAKP,YAAQ,WAAW;AACjB,wBAAmB,OAAQ;AAC3B,wBAAmB,OAAQ;AAE3B,sBAAgB,eAAe,QAAQ,SAAS;AAGhD,SAAG,UAAU,QAAQ,QAAQ,MAAM,WAAW;AAC9C;AAAA;AAGF,YAAQ,SAAS,SAAS;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA;AAGT,UAAQ,SAAS;AAAA;;;AC7lBjB,IAAA;AACA,kBAAyB;AAGzB,iBAAe;AACf,kBAAgB;AAKhB;AACE;AACA,QAAI,iBAAiB,GAAG;AACtB,gBAAS,GAAG,MAAM;AAAA;AAElB,4BAAsB,MAAM,gBAAgB,MAAM,cAAc,MAAM,SAAU,MAAM,SAAU,MAAM,MAAM,KAAK;AACjH,6BAAuB,MAAM,iBAAiB,MAAM,eAAe,MAAM,UAAW,MAAM,SAAU,MAAM,MAAM,KAAK;AACrH,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAO,OAAO,QAAQ;AAAG,sBAAc,QAAO,OAAO;AAAA,eAChD,QAAO,OAAO,SAAS;AAAG,sBAAc,gBAAiB,SAAO,OAAO,SAAS;AACzF,UAAI,QAAO,OAAO,SAAS;AAAG,uBAAe,QAAO,OAAO;AAAA,eAClD,QAAO,OAAO,QAAQ;AAAG,uBAAe,iBAAkB,SAAO,OAAO,QAAQ;AACzF,UAAI,CAAC,YAAa,SAAS,UAAU,eAAiB,SAAS,WAAW;AACxE,mBAAY,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AAC9H,YAAI,SAAS,UAAU;AAAa,mBAAS,QAAQ;AACrD,YAAI,SAAS,WAAW;AAAc,mBAAS,SAAS;AAAA;AAE1D,kBAAY,SAAS,WAAW;AAChC,UAAI,iBAAiB;AAAW,YAAI,aAAa,OAAO,GAAG;AAAA;AACtD,YAAI,UAAU,OAAO,GAAG,GAAG,eAAe,gBAAgB,GAAG,GAAG,SAAS,OAAO,SAAS;AAC9F,UAAI,QAAO,OAAO;AAChB,YAAI,CAAC,KAAK,MAAM,CAAC,aAAc,SAAS,UAAU,UAAU,SAAW,SAAS,WAAW,UAAU;AACnG,sBAAa,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,SAAS,OAAO,SAAS,UAAU,SAAS,cAAc;AACrI,cAAI,UAAU,UAAU,SAAS;AAAO,sBAAU,QAAQ,SAAS;AACnE,cAAI,UAAU,WAAW,SAAS;AAAQ,sBAAU,SAAS,SAAS;AACtE,eAAK,KAAK,GAAG,IAAI,MAAM,aAAa,IAAY,eAAO,CAAE,QAAQ,cAAe;AAAA;AAElF,aAAK,GAAG;AACR,aAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,cAAc;AAAG,eAAK,GAAG,UAAU,WAAW,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,SAAS;AAAG,eAAK,GAAG,UAAU,QAAQ,QAAO,OAAO;AACtE,YAAI,QAAO,OAAO,eAAe;AAAG,eAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAClF,YAAI,QAAO,OAAO,QAAQ;AAAG,eAAK,GAAG,UAAU,OAAO,QAAO,OAAO;AACpE,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAS,eAAK,GAAG,UAAU;AAC7C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAY,eAAK,GAAG,UAAU;AAChD,YAAI,QAAO,OAAO;AAAa,eAAK,GAAG,UAAU;AACjD,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,aAAK,GAAG,MAAM;AAGd,mBAAW;AACX,YAAI;AACF,2BAAiB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACrE,4BAAkB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACtE,aAAG,WAAW,GAAG,GAAG,UAAU,OAAO,UAAU,QAAQ,GAAG,MAAM,GAAG,eAAe;AAGlF,kBAAQ;AACR,uBAAa,UAAU,SAAS,GAAG,KAAK,GAAG;AACzC,yBAAa,GAAG,IAAI,UAAU,OAAO;AACnC,4BAAe,KAAI,IAAI,UAAU,SAAS;AAC1C,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;AAAA;AAAA;AAGtC,oBAAU,OAAO;AAAA;AAAA;AAGnB,oBAAY;AAAA;AAEd;AACA,UAAI,UAAU;AACZ,sBAAc,CAAC,UAAU,QAAQ,UAAU,OAAO;AAClD,iBAAS,GAAG,SAAS,UAAU,MAAM,OAAO;AAAA,iBAClC,QAAO,YAAY,WAAa,qBAAqB;AAE/D,iBAAS,GAAG,QAAQ,WAAW;AAAA;AAG/B,2BAAoB,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AACtI,mBAAW,QAAQ;AACnB,mBAAW,SAAS;AACpB,wBAAgB,WAAW,WAAW;AACtC,gBAAQ,UAAU,WAAW,GAAG;AAChC,sBAAa,QAAQ,aAAa,GAAG,GAAG,aAAa;AACrD,iBAAS,GAAG,QAAQ,WAAW;AAAA;AAEjC,qBAAe,OAAO;AACtB,gBAAS,OAAO,WAAW;AAC3B,aAAO;AACP,aAAO;AAAA;AAET,WAAO,CAAE,iBAAQ,QAAQ,QAAO,OAAO,SAAS,YAAY;AAAA;AAG9D,UAAQ,UAAU;AAAA;;;AC5FlB,MAAA,KAAoB;;;ACVpB;;;;;;;;;;;;;;;;AAsBO,MAAM,kBAAkB;AACxB,wBAAwB;AAvB/B;EAiDE;AAAoB,SAAA,UAAA;AAAgC,SAAA,YAAA;AAH5C,SAAA,OAAO,IAAI;AACX,SAAA,eAAe;;EAIvB;AACE,QAAI,CAAC,KAAK,KAAK,IAAI;AACjB,WAAK,UAAU,SAAS,KAAK,SAAS;;AAExC,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,SAAK,KAAK,IAAI,QAAQ;;EAGxB;AACE,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,WAAO,KAAK,KAAK,OAAO;;EAG1B;AACE,WAAO,KAAK;;;AAzEhB;EAiGE;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,KAAK,qBAAqB,KAAK,kBAAkB;;EAG1D;AAGE,WAAO,kBAAkB;;EAG3B,kBACK,GAAG,GAAG,YAAY,YAAY,MAAM,YAAY;AAEnD,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B,aACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B,sBACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAI3B;AACE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;;AAI7B;AACE,QAAM,IAAI,MACN,IAAI;;;;ACzpBV;;;;;;;;;;;;;;;;AAiCM,iBAAkB;AAEtB,gBAAc,MAAM;AACpB,aAAW;AACX,cAAY;AAEZ,SAAO,UAAU;AAEf,YAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,WAAO,MAAM;AACb,UAAM,WAAW,MAAM;AACvB,UAAM,SAAS;;;AAKb;AACJ,SAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAG7B;AACJ,SAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAG/B;AACJ,aAAU;AACV,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,YAAO,IAAI;;AAEb,SAAO;;AAUH;AACJ,YAAU,KAAK;AACf,SAAQ,IAAI,IAAM,KAAI,KAAK;;AAIvB;AACJ,eAAa;AACb,eAAa,GAAG,IAAI,EAAE,QAAQ;AAC5B,iBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,cAAU,OAAO;;AAEnB,SAAO;;AAkBH;AACJ,MAAI,CAAC;AACH,UAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAI9C,gEACuD;AAC3D,SACI,YAAY,QAAQ,SACpB,MAAM,qBAAqB,WAAW,cAAc;;AAGpD;AACJ,SACI,KAAK,MACL,MAAM;;AAsBN,+BAEsC,qBAAqB;AAC/D,MAAI,UAAU;AACZ,aAAS;;AAEX,MAAI,MAAM,QAAQ,QAAQ,aAAa,QAAQ,CAAC;AAC9C,iBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,cAAQ,IAAI,IAAI,QAAQ;;;AAG1B,WAAO,KAAK;;AAEd,SAAO;;AAcH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO;;AAET,aAAW,MAAM;AACjB,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,YAAQ,MAAM;;AAEhB,SAAO;;AAGH;AACJ,SAAO,MAAM,WAAW;;AAGpB;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,MAAI,MAAM,QAAQ,MAAM;AACtB,WAAO;;AAGT,MAAI,GAAG,WAAW,GAAG;AACnB,WAAO;;AAET,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,QAAI,GAAG,OAAO,GAAG;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,SAAO,IAAI,MAAM;;AAGb;AAEJ,MAAK,KAAa,QAAQ;AAExB,WAAQ,KAAa,KAAK;;AAE5B,MAAI,MAAM;AACR,WAAO;aACE,MAAM;AACf,WAAO;;AAEP,gBAAY,KAAK,IAAI,IAAI;AACzB,WAAQ,OAAM,KAAM,OAAM;;;AAIxB;AACJ,gBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,SAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAe5B;AACJ,0BAAwB,IAAI,YAAY;AACxC,eAAa,GAAG,IAAI,GAAG,EAAE;AACvB,oBAAgB,KAAK;;AAEvB,UAAQ;AACR,SAAO;;AAGH;AACJ,MAAI,QAAQ,EAAE;AACZ,WAAO;;AAET,SAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAG3B,wCACgC,aAAqB;AAEzD,SAAO,IAAI,QAAc;AACvB,mBAAe;AAEf,kBAAc;AACZ,UAAI;AACF;AACA;;AAGF;AAEA,0BAAoB,QAAQ;AAE5B,UAAI,cAAc,QAAQ,YAAY;AACpC;AACA;;AAEF,iBAAW,OAAO;;AAGpB;;;AAaE;AAEJ,kBAAgB;AAChB,oBAAkB;AAElB,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,MAAM;AACd,mBAAa,MAAM;eACV,MAAM,OAAO;AACtB,UAAI,gBAAgB;AAClB,cAAM,MACF,yDACmB,uBAAuB;;AAEhD,oBAAc;eACL,MAAM,KAAK;AACpB,YAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAInE,MAAI,gBAAgB;AAClB,QAAI,OAAO,KAAK,SAAS;AACvB,YAAM,MAAM,QAAQ,yCAAyC;;AAE/D,WAAO;;AAGT,MAAI,cAAc;AAChB,UAAM,MACF,qCAAqC;;AAG3C,MAAI,OAAO,cAAc;AACvB,UAAM,MACF,wDACO,UAAU;;AAGvB,mBAAiB,MAAM;AACvB,WAAS,eAAe,OAAO;AAC/B,SAAO;;AAGH;AAEJ,eAAa,MAAM;AAGnB,SAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAGzD,SACI,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OACrC,MACI,+CAA+C,SAAS,sBAC5C;AAGpB,SACI,KAAK,MAAM,QAAM,MAAM,MACvB,MAAM,0DACU;AAGpB,SAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAIpC;AAEJ,mBAA2B;AAC3B,mBAA2B;AAC3B,uBAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,eAAc,QAAQ,QAAQ,eAC1B,OACA,eAAe,MAAM,OAAO;AAChC,UAAQ;AACR,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,QAAQ;AACV,UAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAChC,cAAM,IAAI,MACN,sBAAsB,oBAAoB,MAAM;;AAEtD,UAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACnD,iBAAS,KAAK,MAAM;AACpB,iBAAS,KAAK;;AAEhB,UAAI,KAAK,MAAM;AACb;;;AAGJ,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;AACpB,eAAS,KAAK;;;AAGlB,SAAO,CAAC,UAAU;;AAGd;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;;AAExB,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,MAAgB;;AAE7B,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAC1B,YAAM,MAAM,oBAAoB,iCAAiC;;;;AAMjE;AACJ,SAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAO/B;AACJ,MAAI,YAAY;AACd,WAAO;;AAET,MAAI,YAAY,aAAa,YAAY;AACvC,WAAO;;AAET,MAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC9D,WAAO;;AAET,MAAI,YAAY,UAAU,YAAY;AACpC,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAGb;AACJ,MAAI,UAAU,aAAa,UAAU;AACnC,WAAO;aACE,UAAU;AACnB,WAAO;aACE,UAAU;AACnB,WAAO;;AAEP,UAAM,IAAI,MAAM,iBAAiB;;;AAU/B;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,cAAY;AACZ,MAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,SAAO;;AAIH;AACJ,SAAO,OAAO,UAAU,YAAY,iBAAiB;;AAGjD;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,MAAI,MAAM,QAAQ;AAChB,WAAO,WAAW,OAAO;;AAE3B,MAAI,kBAAkB;AACpB,WAAO;aACE,kBAAkB,cAAc,kBAAkB;AAC3D,WAAO;aACE,SAAS;AAClB,WAAO;aACE,SAAS;AAClB,WAAO;aACE,UAAU;AACnB,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAGxC;AACJ,eAAa,OAAO,IAAI,MAAM,EAAE;AAC9B,QAAI,OAAO,MAAM;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,eAAa,MAAM;AACnB,MAAI,OAAO;AACT,WAAO;;AAKT,kBAAgB,IAAI,MAAM,OAAO;AACjC,UAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,eAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC/B,YAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE1C,SAAO;;AAGT;AACE,cAAY,IAAI;AAChB,MAAI,MAAM,WAAW;AACnB,cAAU,MAAM;AAChB,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,EAAE,SAAS;;;AAGtB,cAAU,MAAM;AAChB,iBAAa,MAAM,MAAM;AACzB,gBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,kBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAGvD,SAAO;;AAIH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO,EAAE;;AAEX,eAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,MAAI,SAAS;AAEX,WAAO;;AAET,MAAI,SAAS,EAAE;AACb,UAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAGhE,SAAO,kBAAkB,GAAG,OAAO;;AAG/B;AAEJ,gBAAc,oBAAoB,MAAM;AACxC,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,UAAM,KAAK;;AAEb,SAAO;;AAGH;AAEJ,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,WAAO,IAAI,WAAW;;AAEtB,UAAM,IAAI,MAAM,qBAAqB;;;AASnC;AAEJ,eAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,MAAI,SAAS,QAAQ,UAAU;AAC7B,WAAO,cAAc,OAAO,IAAI,aAAa;aACpC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;aAClC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;;AAE3C,UAAM,IAAI,MAAM,qBAAqB;;;AAInC;AACJ,QAAM,QAAQ;AACZ,WACI,OAAO,UAAU,YAAY,WAAW,GACxC,MACI,0EACU;;;AAYhB;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,KAAK;;AAEd,cAAY,KAAK,KAAK,SAAS;AAC/B,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,aAAS,QAAQ,KAAK,KAAK;;AAE7B,SAAO;;AAWH;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,CAAC;;AAEV,eAAuB,IAAI,MAAM;AACjC,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,SAAK,KAAK,KAAK,MAAM,QAAQ,QAAQ;AACrC,aAAS,KAAK,KAAK,QAAQ;;AAE7B,OAAK,KAAK,SAAS,KAAK;AACxB,SAAO;;AAQH;AAOJ,SAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;;;AC1rBzD;;;;;;;;;;;;;;;;AAqBA,MAAM,4BAA4B;AArBlC;EAkDE;AAAmB,SAAA,SAAA;AATX,SAAA,QAAe;AACf,SAAA,eAAwD;AAExD,SAAA,WAAkB;AAOxB,SAAK;;EAGP;AACE,QAAI,KAAK,YAAY;AACnB,cAAQ,KACJ,YAAY,KAAK,oEACgB;;AAEvC,SAAK,eAAe;AACpB,SAAK,WAAW;;EAGlB;AAGE,SAAK,aAAa,YAAY,CAAC,cAAc;AAI7C,QAAI,KAAK,SAAS,aAAa;AAC7B,wBAAkB,KAAK,SAAS;AAChC,cAAQ,KACJ,qCAAqC,aAAa;AACtD,WAAK,IAAI,UAAU;;;QAIjB;AACJ,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,SAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,WAAO,KAAK,MAAM;;EAGpB;AACE,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,sBAAkB,KAAK,aAAa;AACpC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN,QAAQ;;AAId,SAAK,MAAM,YAAY;AAEvB,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK;;MAGV;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,mBAAmB;;AAEzB,SAAK,MAAM,YAAY;AACvB,QAAI,KAAK,aAAa,UAAU,WAAW;AACzC,WAAK,aAAa,UAAU,QAAQ;;;EAIhC;AACN,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,yBAAyB;;AAE/B,WAAO,KAAK,aAAa,UAAU;;EAGrC;AACE,SAAK,QAAQ,OAAO,OAAO,IAAI;;EAGjC;AACE,SAAK,QAAQ;AACb,SAAK,WAAW;AAChB,SAAK;;EAGC;AACN,QAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACzC;;AAGF,sBAAkB,eAAe,KAAK,OAAO,SAAS;AACtD,QAAI,6BAA6B;AAC/B,wBAAkB,UAAU,2BAA2B,MAAM;AAC7D,gBAAU,QAAQ;AAChB,6BAAqB,SAAS,MAAM;AACpC,aAAK,SAAS,OAAO,WAAW,KAAK;;;;;AAMvC;AACJ,iBAAe;AACf,cAAY,QAAQ,+BAA+B;AACjD,gBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,WAAO,EAAE,KAAK;;AAEhB,SAAO;;AAGT;AAEE,SAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAGjE;AACE,UAAQ,MAAM;AACd,MAAI,UAAU,UAAU,UAAU;AAChC,WAAO,UAAU;aACR,GAAG,CAAE,YAAY;AAC1B,WAAO,CAAC;;AAEV,QAAM,IAAI,MACN,oCAAoC,kBAAkB;;AAWtD;AACJ,SAAO;;AAGF,UAAuB;AACxB;AACJ,QAAM;;;;AC/MR;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEE;AACJ,MAAI,mBAAmB;AAErB;AACA,QAAI,OAAQ,WAAY;AACtB,WAAK;eACI,OAAQ,WAAY;AAC7B,WAAK;eACI,OAAQ,YAAa;AAC9B,WAAK;eACI,OAAQ,SAAU;AAC3B,WAAK;;AAEL,YAAM,IAAI,MAAM;;AAElB,sBAAkB;;AAEpB,SAAO;;AAIT;AACE,aAAW;AACX,MAAI,GAAG,cAAc;AACnB,OAAG,aAAa,IAAI;;AAEtB,SAAO,GAAG;;AAUN;AACJ,oBAAkB;AAClB,MAAI,UAAU,IAAI;AAChB,WAAO,UAAU,IAAI;;AAErB,sBAAkB;AAClB,cAAU,IAAI,KAAK;AACnB,WAAO,UAAU,IAAI;;;;;ACzClB,MAAM,MAAM;ACSZ,MAAM,MAAM;AAGZ,aAAa;ACiBb,MAAM,SAAS;AC2Bf,MAAM,UAAU;ACsChB,MAAM,cAAc;ACuBpB,MAAM,OAAO;ACSb,MAAM,cAAc;AAOpB,gBAAgB;AAGhB,eAAe;AAMf,eAAe;ACoBf,MAAM,sBAAsB;ACoC5B,MAAM,MAAM;ACMZ,MAAM,SAAS;AAQf,sBAAsB;AAStB,qBAAqB;AAOrB,8BAA8B;ACsD9B,MAAM,MAAM;AAGZ,YAAY;ACSZ,MAAM,QAAQ;AAGd,YAAY;ACSZ,MAAM,OAAO;AAOb,sBAAsB;ACMtB,MAAM,WAAW;AAGjB,uBAAuB;AAOvB,iBAAiB;AAMjB,iBAAiB;AAGjB,gBAAgB;AAGhB,qBAAqB;AAGrB,iBAAiB;ACkBjB,MAAM,OAAO;AAGb,kBAAkB;ACSlB,MAAM,MAAM;ACMZ,MAAM,aAAa;ACiCnB,MAAM,MAAM;AAOZ,gBAAgB;AAGhB,gBAAgB;ACyDhB,MAAM,MAAM;AAOZ,gBAAgB;ACahB,MAAM,WAAW;AAGjB,eAAe;AAGf,iBAAiB;AAGjB,4BAA4B;AAS5B,4BAA4B;AAU5B,4BAA4B;AAU5B,iBAAiB;AAGjB,eAAe;AAQf,cAAc;ACUd,MAAM,MAAM;AAGZ,cAAc;ACwBd,MAAM,OAAO;AAGb,gBAAgB;ACiBhB,MAAM,iBAAiB;ACUvB,MAAM,QAAQ;AAGd,gBAAgB;ACShB,MAAM,QAAQ;AAGd,kBAAkB;AAMlB,iBAAiB;ACMjB,MAAM,QAAQ;AAMd,YAAY;ACSZ,MAAM,UAAU;ACMhB,MAAM,OAAO;AAGb,YAAY;ACcZ,MAAM,SAAS;AAOf,gBAAgB;AAMhB,0BAA0B;AAG1B,eAAe;AAGf,YAAY;ACUZ,MAAM,eAAe;ACgBrB,MAAM,OAAO;AAGb,aAAa;ACab,MAAM,YAAY;ACclB,MAAM,SAAS;ACaf,MAAM,YAAY;AAMlB,aAAa;ACeb,MAAM,mBAAmB;AAQzB,qBAAqB;AAerB,oBAAoB;AAgBpB,6BAA6B;;;ACt1BpC;;;;;;;;;;;;;;;;AAuBA,MAAM,iBACF,UAAU,kBAAkB,MAAM,IAAI;AAC1C,qBACI,UAAU,gBAAgB,MAAM,IAAI;AAoElC;AAEJ,cAAY,QAAQ,YAAY;AAChC,SAAO,eAAe,IAAI;;AAOtB;AACJ,SAAO,aAAa,IAAI;;AAGpB;AACJ,aAAW,eAAe;AAC1B,iBAA+B;AAE/B,SAAO;AACL,WAAO,MAAM,SAAS,GAAG;AACzB,QAAI;AACF;;AAEF,2BAAsB;AACtB,uBAAoB,IAAI,MAAM;AAC9B,QAAI,aAAY;AACd,aAAO,KAAK;;;AAGhB,SAAO;;AAcH;AACJ,SAAO,YAAY,eAAe;AAClC,cAAY,QAAQ,YAAY;AAChC,MAAI,eAAe,IAAI;AACrB,YAAQ,KACJ,eAAe,4BACX;;AAEV,iBAAe,IAAI,KAAK;;ACmE1B,iBAAiB;AACf,SAAO,GAAG,eAAe;;;;ACrN3B,MAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAyBM,2BACF;AACF,MAAI,UAAU;AACZ,WAAO,aAAa;;AAGtB,SAAO,aAAa,CAAC,QAAQ;;AAG/B;AACE,SAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAGtC;AACJ,MAAI,UAAU;AACZ,UAAM,IAAI,MAAM;;AAElB,MAAI,MAAM,QAAQ;AAChB,QAAI,AAAK,QAAQ;;AAGnB,MAAI,MAAM,QAAQ;AAChB,IAAK,yBAAyB,GAAe;;AAE/C,MAAI,mBAAmB,GAAG;AACxB,WAAO;;AAET,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,iBAAa,IAAI,WAAY,EAAe;AAC5C,iBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,UAAI,KAAK,MAAO,EAAe,QAAQ;AACrC,aAAK,KAAK;;;AAGd,WAAO;;AAEP,UAAM,IAAI,MAAM,qBAAqB;;;AAenC;AACJ,SAAO,MAAM,SAAS;;AAmBlB;AAEJ,SAAO,MAAM,SAAS,MAAM,MAAM;;AAW9B,oCAA6C;AACjD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,GAAG;;AAW5B,wCAAqD;AACzD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,OAAO;;;;AClItC;;;;;;;;;;;;;;;;;EAgCE,YAAoB;AAAA,SAAA,eAAA;AAAoC,SAAA,SAAA;AACtD,QAAI,UAAU;AACZ,WAAK,SAAS,IAAI;;;EAItB;AAEE;AACA,gCAA4B;AAC1B,gBAAU;;AAEZ,kBAAc,KAAK,aAAa,KAAK;AAErC,iBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,qBAAe,QAAQ;AAGvB,aAAO,OAAO,KAAK;AACjB,kCAA0B,YAAY,OAAO,OAAO;;;AAIxD,0BAAsB;MACpB;MACA;MACA;MACA,QAAQ,MAAM,KAAK,YAAU,OAAO;MACpC,WAAW,MAAM,KACb,YAAU,OAAO,uBAAuB,OACpC,OAAO,wBACP;;AAEV,WAAO;;EAGT;AACE,WAAO,YAAY,SAAS,QAAQ,QAAQ,aAAa;AAEzD,YAAQ,QAAQ;AACd,cAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACnD,aAAK,OAAO,iBACR,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAC1D,eAAe;;;;;AAMrB;AAEJ,MAAI,UAAU;AAEZ,WAAO;;AAET,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAE1B,cAAQ,KAAK,SAAS,yBAAyB;AAC/C,aAAO;;;AAGX,SAAO;;AA/FT;EAmGE;AAIE,iBAAa,OAAO,WAAW,WAAW,AAAK,SAAS,GAAG,YAAY,KAC7B,OAAO;AACjD,uBAAmB,AAAK,SAAS,MAAM;AACvC,iBAAa,OAAO;AACpB,iBAAa,OAAO;AACpB,kBAAc,AAAK,SAAS,OAAO,MAAM,YAAY;AACrD,iCAA6B;AAE7B,wBAAmB;AACjB,oBAAc,OAAO;AACrB,UAAI,SAAS;AAGX,2BAAmB,MAAM,SAAS,OAAO;AACzC,0BAAkB,WAAW;AAC7B,kCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAI/D,YAAQ,IACJ,KAAK,gBAAiB,UAAW,SAAS,WAAY,UAClD,4BAA6B,aACjC,oBAAoB,aAAa,cAAc,iBAC/C,gBAAgB;;;;;AC/HxB;;;;;;;;;;;;;;;;AA2CM,8BACF;AAGF,uBAAoD;AACpD,qBAAgD;AAChD,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAa,GAAG,GAAG,MAAM;;AAG3B,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AACxB,4BAAwB;AACtB,oBAAc,WAAW;AAEzB,0BAAoB;AACpB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,YAAI,aAAa,MAAM;AACrB,eAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,0BAAgB;AAChB,qBAAW,KAAK,MAAM;AACtB;;;AAIJ,UAAI;AACF;;;;AAMN,yBAAsD;AACtD,iBAAe,EAAE,MAAM;AACvB,mBAA8C;AAE9C,eAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AACpC,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AAGxB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,UAAI,eAAe,KAAK,QAAQ,GAAG;AACjC,gCAAwB;AACtB,yBAAe,WAAW,WAAW,MAAM;AAC3C,mBAAS,KAAK,MAAM;;AAEtB;;;;AAMN,uBAAiC;AACjC,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAElB,QAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAEvC,2BAAoD;AACpD,8BAAwB,KAAK;AAC3B,0BAAkB,KAAK,OAAO;AAC9B,YAAI,aAAa,UAAU;AACzB,uBAAa,aAAa;;;AAK9B,yBAAmB,OAAO,OAAO,IAAI;AACrC,iBAAW,SAAS;AACpB,iBAAW,UAAU,KAAK;AAE1B,mBAAa,KAAK;;;AAItB,SAAO;;AAUH;AAKJ,eAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC5C,iBAAa,aAAa;AAE1B,gBAAsB;AACtB,SAAK,QAAQ,QAAQ;AACnB,yBAAmB,6BAA6B,EAAE;AAClD,UAAI,cAAc;AAChB,YAAI,KAAK;;AAIT,YAAI,KAAK;;;AAIb,QAAI,KAAK,YAAY;AACnB,YAAM,IAAI,MACN,4DACO,KAAK;;AAIlB,2BAAuB,KAAK,SAAS;AAErC,4BAAwB,KAAK;AAC3B,UAAI,CAAE,cAAa;AACjB,cAAM,IAAI,MACN,iCAAiC,yCACH,OAAO,KAAK;;AAIhD,iBAAW,KAAK,MAAM,eAAe;AACrC,UAAI,GAAG,UAAU;AACf,cAAM,IAAI,MACN,4BACI,KAAK,qCACN,iDAAiD,GAAG;;AAE7D,gBAAU,KAAK,OAAO;AACtB,UAAI,CAAC,AAAK,YAAY,GAAG,OAAO,EAAE;AAChC,cAAM,IAAI,MACN,4BACI,KAAK,sCACL,yBAAyB,GAAG,wDACL,EAAE;;AAGnC,UAAI,6BAA6B,EAAE,OAAO;AACxC,qCAA6B,EAAE,MAAM;;AAErC,4BAAoB,6BAA6B,EAAE;AACnD,qCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,oBAAY;;;;;;;AC5LpB;;;;;;;;;;;;;;;;AAqBA,MAAM,wBAAwB;AAE9B,mCAAmC;AAEnC,8BAA8B;AAExB;AAGJ,kBAAgB,eAAe;AAC/B,oBAAkB,wBAAwB,MAAM,OAAO,OAAO;AAC9D,eAAa,MAAM;AACnB,oBAAkB,kBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,gBAAc,CAAC;AACf,MAAI;AACF,UAAM,KAAK,YAAY;AACvB,UAAM,KAAK,WAAW;AACtB,UAAM,KAAK,aAAa;AACxB,UAAM,KAAK;;AAEb,QAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,SAAO,MAAM,KAAK;;AAGpB;AAGE,YAAU,cAAc;AACxB,kBAAgB,QAAQ,QAAQ,SAAS;AACzC,oBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,eAAa,MAAM;AACnB,yBACI,UAAU,cAAc,oBAAoB,QAAQ;AAExD,MAAI,OAAO;AACT,mBAAe,GAAG,MAAM,IAAI,SAAS;AACnC,qBAAe,MAAM;AACrB,mBAAa,GAAG,IAAI,SAAS;AAC3B,kBAAU,KAAK,KAAK,IAChB,UAAU,IACV,YAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAI5D,SAAO;;AAGT;AAEE;AACA,MAAI,MAAM,QAAQ;AAChB,aAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,6BAC/B,WAAW,IAAI,GAAG,QAAQ;aACxB,SAAS;AAClB,aAAS,IAAI;aACJ,UAAU;AACnB,aAAS,gBAAgB;;AAEzB,aAAS,WAAW,IAAI,QAAQ,wBAAwB;;AAG1D,SAAO,SAAS,QAAQ;;AAG1B;AACE,SAAO,MAAM,IAAI,UAAU;;AAG7B,4EAEqD;AACnD,4BAA0B,UAAU,cAAc,IAAI;AAEtD,eAAa,MAAM;AACnB,eAAa,MAAM;AACnB,MAAI,SAAS;AACX,QAAI,UAAU;AACZ,2BAAqB,oBAAoB;AACzC,aAAO,CAAC,YAAY,aAAa,IAAI,GAAG;;AAE1C,QAAI,UAAU;AACZ,aAAO,CAAC,gBAAgB,KAAK;;AAE/B,WAAO,CAAC,KAAK,GAAG;;AAGlB,MAAI,SAAS;AACX,QAAI,OAAO;AACT,4BAAsB,6BAA6B;AAEnD,sBAAgB,MAAM,KAClB,KAAK,MAAM,GAAG;AAClB,qBAAe,MAAM,KAAqC,KAAK,MAC1D,QAAO,8BAA8B,mBACtC,OAAO;AACX,UAAI,UAAU;AACZ,oBAAY,oBAAoB;AAChC,mBAAW,oBAAoB;;AAEjC,aAAO;QACL,MACA,UAAU,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IACG,UAAU,YACN,GAAG,UAAU,OAAO,6BAA6B,IAAI,QAC5D,KAAK,QACV;;;AAGJ,wBACI,UAAU,cAAc,oBAAoB,QACpB,MAAM,KAAoB;AAEtD,WAAO;MACL,MACA,YAAY,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAKJ,mBAAiB,MAAM,MAAM;AAC7B,qBAAmB,QAAQ,MAAM;AACjC,iBAAe,QAAQ,KAAK;AAC5B,gBAAwB;AACxB,MAAI,OAAO;AACT,iBAAa,GAAG,IAAI,4BAA4B;AAC9C,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD;;AAEN,UAAM,KAAK;AACX,iBAAa,OAAO,4BAA4B,IAAI,MAAM;AACxD,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGnB,iBAAa,GAAG,IAAI,MAAM;AACxB,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGrB,cAAY,SAAS,IAAI,MAAM;AAC/B,QAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,UAAM,KAAK,MAAM,MAAM,KAAK;;AAE9B,mBAAiB;AACjB,eAAa,GAAG,IAAI,MAAM;AACxB,kBAAc;;AAEhB,QAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,SAAO;;AAGT;AAEE,wBAA+C;AAC/C,eAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,kBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAExC,SAAO;;;;ACnMT;;;;;;;;;;;;;;;;;EA6CE,YAAY;AAA2B,SAAA,QAAA;AACrC,SAAK,QAAQ,MAAM;AACnB,SAAK,OAAO,AAAK,cAAc;AAE/B,QAAI,UAAU;AACZ,gBAAU,OAAO;AACjB,MAAK,OACD,MAAM,KAAK,MACX,MAAM,qBAAqB,qDACG,KAAK;;AAEzC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN;;AAIN,SAAK,SAAS,UAAU,AAAK,kBAAkB,OAAO,KAAK;AAC3D,SAAK,UAAU,eAAe;;EAWhC;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEV,IAAK,OACD,KAAK,WAAW,KAAK,MACrB,MAAM,uCAAuC,KAAK,gCAC3B,KAAK;AAEhC,kBAAc,KAAK,WAAW;AAC9B,SAAK,OAAO,SAAS;;EAUvB;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEV,YAAQ;AACR,sBAAkB;AAChB,UAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC/B,oBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,cAAM,IAAI,MAAM;;AAElB;;AAEF,gBAAY,KAAK,KAAK,SAAS;AAC/B,kBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,MAAK,KAAK;;AAElC,WAAO,KAAK,OAAO;;EAGrB;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,KAAK;;AAEd,gBAAY,KAAK,KAAK,SAAS;AAC/B,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,KAAK,KAAK;;AAElC,WAAO;;EAGT;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,CAAC;;AAEV,iBAAuB,IAAI,MAAM,KAAK,MAAM;AAC5C,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,WAAK,KAAK,KAAK,MAAM,QAAQ,KAAK,QAAQ;AAC1C,eAAS,KAAK,KAAK,KAAK,QAAQ;;AAElC,SAAK,KAAK,SAAS,KAAK;AACxB,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAQpB;AACE,WAAO,YAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAiChE,gBAAqC;AAErC,gBAA2B;AAE3B,2BAAkD;ACW5C,0BAA2B;AAC/B,cAAY;;ACgBR,iCAAkC;AACtC,yBAAuB;;AA1NzB;EA8QE;AAXA,SAAA,OAAO;AAoIG,SAAA,qBAAqB;AAxH7B,SAAK,QAAQ,MAAM;AACnB,SAAK,QAAQ,SAAS;AACtB,SAAK,OAAO,AAAK,cAAc;AAC/B,SAAK,UAAU,eAAe;AAC9B,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;MAGtD;AACF,WAAO,KAAK,MAAM;;QAQd;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY;;EAOvD;AACE,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY,KAAK;;QAStD;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,cAAc,KAAK,OAAO;;EASnC;AACE,WAAO,cAAc,KAAK,OAAO,KAAK;;QASlC;AACJ,SAAK;AACL,kBAAa,YAAY,KAAK,KAAK;AACnC,QAAI,KAAK,UAAU;AACjB,oBAAc,MAAM;AACpB;AACE,eAAO,MAAM,IAAI,OAAK,AAAK,aAAa;;AAExC,cAAM,IAAI,MACN;;;AAIR,WAAO;;EAST;AACE,SAAK;AACL,kBAAa,YAAY,SAAS,KAAK;AACvC,QAAI,KAAK,UAAU;AACjB;AACE,eAAQ,MAAsB,IAAI,OAAK,AAAK,aAAa;;AAGzD,cAAM,IAAI,MACN;;;AAIR,WAAO;;QAIH;AACJ,SAAK;AACL,kBAAa,MAAM,YAAY,KAAK,KAAK;AACzC,QAAI,KAAK,UAAU;AACjB,aAAO;;AAEP,aAAO,IAAI,WAAY,MAAoB;;;EAS/C;AACE,QAAI,KAAK;AACP;;AAEF,gBAAY,cAAc;AAC1B,SAAK,qBAAqB;;MAIxB;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK;AACP,YAAM,IAAI,MAAM;;;EAYpB,gBAAgB;AACd,WAAO,UAAU,MAAM,MAAM;;EAO/B;AACE,SAAK;AACL,WAAO,UAAU,MAAM;;EAQzB,mBAAmB;AACjB,iBAAa,KAAK;AAClB,WAAO,eAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;EAGtD;AACE,SAAK;AACL,WAAO,UAAU,KAAK,MAAW;;EAEnC,qBAAqB;AACnB,SAAK;AACL,WAAO,YAAY,aAAa,MAAM,WAAW,MAAM;;;AAI3D,OAAO,eAAe,QAAQ,OAAO,aAAa;EAChD,OAAO;AAML,WAAO,CAAC,CAAC,YAAY,SAAS,QAAQ,QAAQ,SAAS,YAAY,QAC/D,SAAS,mBAAmB;;;AAncpC,uBAueqD;EAGnD;AAGE,UACI,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AAH/B,SAAA,YAAA;AAIlC,SAAK,OAAO;;EAWd;AACE,QAAI,SAAS,UAAU,KAAK;AAC1B,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,QAAI,CAAC,AAAK,YAAY,SAAS,OAAO,KAAK;AACzC,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,gBAAY,cAAc;AAC1B,SAAK,SAAS,SAAS;AACvB,gBAAY,OAAO,MAAM;;EAG3B;AACE,gBAAY,gBAAgB;AAC5B,SAAK,qBAAqB;;;AAI9B,OAAO,eAAe,UAAU,OAAO,aAAa;EAClD,OAAO;AACL,WAAO,oBAAoB,UAAU,SAAS,UAAU,QACpD,SAAS,kBAAkB;;;;;ACnhBnC;;;;;;;;;;;;;;;;AAgEA,IAAY;AAAZ,AAAA;AACE,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;GAPU,QAAA,QAAI;AAqBhB;AAAA,AAAA;AACE,qBAAA,aAAA;AACA,qBAAA,WAAA;AACA,qBAAA,UAAA;AACA,qBAAA,eAAA;GAJG,qBAAA,qBAAiB;AAOtB;AAAA,AAAA;AACE,oBAAA,aAAA;AACA,oBAAA,WAAA;AACA,oBAAA,UAAA;AACA,oBAAA,eAAA;GAJG,oBAAA,oBAAgB;AAOrB;AAAA,AAAA;AACE,uBAAA,aAAA;AACA,uBAAA,WAAA;AACA,uBAAA,UAAA;AACA,uBAAA,eAAA;GAJG,uBAAA,uBAAmB;AAOxB;AAAA,AAAA;AACE,yBAAA,aAAA;AACA,yBAAA,WAAA;AACA,yBAAA,UAAA;AACA,yBAAA,eAAA;GAJG,yBAAA,yBAAqB;AAO1B,sBAAsB;EACpB,SAAW;EACX,OAAS;EACT,MAAQ;EACR,WAAa;;AAGT;AACJ,MAAI,UAAU,YAAY,UAAU;AAClC,QAAI,UAAU,YAAY,UAAU;AAClC,aAAO;;AAET,UAAM,IAAI,MAAM,kBAAkB,cAAc;;AAElD,SAAO,cAAc,OAAO;;;;AC/H9B;;;;;;;;;;;;;;;;AAsBM,wBAA2C;AAC/C,MAAI,EAAE,UAAU,EAAE;AAChB,WAAO,CAAC,GAAG;;AAEb,gBAAc,WAAW,EAAE,OAAO,EAAE;AACpC,SAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AC0B1B,+BAAgC;AACpC,eAAuB;AACvB,eAAa,IAAI;AACjB,sBAAoB,QAAQ,MAAM;AAClC,SAAO;;AAGT;AAEE,MAAI,aAAa;AACf;;AAEF,MAAI,qBAAqB;AACvB,SAAK,KAAK;AACV;;AAEF,MAAI,CAAC,WAAW;AACd;;AAGF,mBAAiB;AACjB,kBAAgB;AACd,gBAAY,SAAS;AACrB,QAAI,CAAC,KAAK,IAAI;AACZ,WAAK,IAAI;AACT,0BAAoB,KAAK,MAAM;;;;AAMrC;AACE,SAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;;;ACrF9C;;;;;;;;;;;;;;;;;EAmFA;AAEE,SAAA,sBAAwC;AAExC,SAAA,iBAAiB;AACjB,SAAA,WAAW;AACX,SAAA,aAAa;AACb,SAAA,mBAAmB;AACnB,SAAA,iBAAiB;AAMjB,SAAA,gBAAgB;AAGhB,SAAA,cAAc;AAId,SAAA,aAA2B;AAK3B,SAAA,oBAA8B;AAC9B,SAAA,cAAc;AAEd,SAAA,aAAa,IAAI;AAQjB,SAAA,YAAY;AACZ,SAAA,gBACI,CAAC,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;EAEpE;AACE,eAAW,gBAAgB,KAAK;AAC9B,WAAK,oBAAoB,cAAc;;;;AA9H7C;EAmJE;AAAmB,SAAA,MAAA;AAbnB,SAAA,WAA0C;AAC1C,SAAA,kBAKI;AAKI,SAAA,uBAAuB;AAG7B,SAAK,QAAQ,IAAI;;QAGb;AACJ,QAAI,KAAK,sBAAsB;AAC7B,aAAO,KAAK,mBAAmB,KAAK;;;AAEtC,QAAI,KAAK,mBAAmB;AAC1B;;AAEF,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,sBAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,UAAI;AACF,cAAM,KAAK,WAAW;AACtB;;;AAIJ,UAAM,IAAI,MACN;;MAIF;AACF,QAAI,KAAK,sBAAsB;AAC7B,YAAM,IAAI,MACN,YAAY,KAAK;;AAIvB,QAAI,KAAK,mBAAmB;AAC1B,aAAO,MAAM,aAAa,KAAK;AAC/B,UAAI;AACF,cAAM,IAAI,MACN,iCAAiC;;AAIvC,WAAK,WAAW;;AAElB,WAAO,KAAK;;EAGd;AACE,WAAO,OAAO,KAAK,KAAK;;EAG1B;AACE,QAAI,CAAE,gBAAe,KAAK;AAGxB,UAAI,eAAe,KAAK;AACtB,eAAO,aAAa,KAAK,kBAAkB;AAC3C,YAAI;AAEF,iBAAO;;;AAGT,eAAO;;;AAGX,WAAO,KAAK,SAAS;;EAGvB;AAEE,QAAI,CAAE,gBAAe,KAAK;AACxB,aAAO;;AAET,WAAO,KAAK,gBAAgB,aAAa;;EAG3C,iDAGe;AACb,QAAI,eAAe,KAAK;AACtB,cAAQ,KACJ,GAAG;AAEP,aAAO;;AAET,SAAK,gBAAgB,eAAe,CAAC,SAAS;AAC9C,WAAO;;QAGH;AACJ,QAAI,KAAK,gBAAgB,gBAAgB;AACvC,YAAM,IAAI,MAAM,iBAAiB;;AAEnC,SAAK,cAAc;AACnB,QAAI,KAAK,SAAS,gBAAgB;AAChC,WAAK,kBAAkB;AACvB,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,qBAAe,YAAY,MAAM,UAAU;AAC3C,UAAI,CAAC;AACH,eAAO;;;AAGX,SAAK,kBAAkB,KAAK,SAAS;AACrC,SAAK;AAEL,SAAK,WAAW,IAAI,SAAS,KAAK;AAElC,WAAO;;EAGD;AACN,oBAAgB,qBAAqB,KAAK;AAC1C,YAAQ,QAAQ;AACd,UAAI,OAAO,aAAa;AACtB,eAAO,UAAU,KAAK;;;;EAKpB;AACN,oBAAgB,qBAAqB;AACrC,YAAQ,QAAQ;AACd,UAAI,OAAO,eAAe;AACxB,eAAO,YAAY,KAAK,SAAS;;;;EAW/B;AAEN,iCAA6B,KAAK,gBAAgB;AAClD,QAAI,wBAAwB;AAC1B,YAAM,IAAI,MACN,6BAA6B;;AAGnC;AACE,uBAAgB,qBAAqB;AAMrC,UAAI,YAAW,CAAE,qBAAmB,kBAC7B,OAAO,SAAQ,SAAS;AAC7B,0BAAkB,EAAE,KAAK;AACzB,wBACI,SACK,KAAK;AAEJ,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,SAAS,eAAe;AAC7B,eAAK,qBAAqB;AAC1B,iBAAO;WAER,MAAM;AAEL,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,qBAAqB;AAC1B,kBAAQ,KACJ,6BAA6B;AACjC,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO;;AAEjB,aAAK,qBAAqB;AAC1B,eAAO,CAAC,SAAS,WAAW;;AAE5B,aAAK,SAAS,eAAe;AAC7B,eAAO,CAAC,SAAS,MAAM,WAAW;;;AAGpC,cAAQ,KAAK,6BAA6B;AAC1C,cAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,aAAO,CAAC,SAAS,OAAO,WAAW;;;EAIvC;AACE,QAAI,CAAE,gBAAe,KAAK;AACxB,YAAM,IAAI,MAAM,GAAG;;AAErB,QAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAGjE,WAAK;;AAGP,QAAI,eAAe,KAAK;AACtB,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAGvB,WAAO,KAAK,gBAAgB;AAG5B,QAAI,KAAK,gBAAgB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,cAAc;AACnB,WAAK,kBAAkB;;;EAInB;AACN,QAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC/C,YAAM,IAAI,MAAM;;AAElB,WAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE5C,aAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;EAIxB;AAEN,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,UAAI,aAAa;AACf,eAAO,CAAC,MAAM,aAAa;;;AAG/B,UAAM,IAAI,MACN;;EAIN;AACE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAmB,KAAK;AACxB,mBAAe,KAAK,SAAS;AAG7B,eAAW,YAAY;AACvB,SAAK,UAAU;AACf,aAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,QAAI,KAAK;AAGP,WAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;EAIvE;AAEE,eAAmB;AACnB,QAAI,MAAM;AAER,UAAI,OAAO,aAAa;AACtB,cAAM,IAAI,MAAM;;AAElB,WAAK;;AAGL,UAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACxD,cAAM,IAAI,MACN;;AAGN,UAAI,OAAO,OAAO;AAChB,cAAM,IAAI,MACN;;AAGN,aAAO;;AAIT;AACA,WAAO,KAAK,UACR,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AACxD,eAAS;AACT,UAAI,kBAAkB;AACpB,gBAAQ,MAAM;;AAEhB,aAAO;;;EAIP;AACN;AACA;AACE,kBAAY;AACZ;AACA,aAAO;;AAEP;AACA,YAAM;;;EAKF;AACN,WAAO,OAAO;;EAIR;AACN,WAAO,OAAO;;EAYR;AACN,cAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,mBAAe,CAAC;AAChB,iBAAa,QAAiB;MAC5B,GAAG;AACD,sBAAc;AACd,2BAAmB,CAAC,GAAG;AACvB,sBAAc,CAAC;AAEf,eAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAC5B,YAAoC,MAAiB,MACrD;;;AAGR,kBAAwB;AACxB,SAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,MAAM,OAAO;AACxE,WAAO;;EAgBT;AAGE,wBAA0B;AAC1B,0BAA4B;AAI5B,WAAO,KAAK,cACR,aAAa,QAAQ,eAAe,YAAY,OAAO,cACvD;;EAGE;AACN,WAAO,KAAK,IAAI,QAAQ;;EAGlB;AAGN,4BAAwB,KAAK,QAAQ;AAGrC,2BAAuB;AACvB,aAAS,QAAQ;AAGf,0BAAqB,KAAK,UAAU,cAAc,IAAI;;AAQxD,qBACI,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACvE,0BACI,kBAAkB,mBAAmB,mBAAmB;AAC5D,QAAI,gBAAgB;AAClB,YAAM,IAAI,MACN,YAAY,KAAK,6CACb,0CAA0C;;;EAQtD;AAKE;AACA,gBAAsB;AACtB,qBAAiB,KAAK;AACtB,QAAI,cAAc;AAChB,mBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAGrE,8BAA0B,KAAK,MAAM;AACrC,+BAA2B,KAAK,MAAM;AAEtC,QAAI,KAAK;AACP,WAAK,MAAM,kBAAkB,KAAK;;AAGpC;AACA,mBAAe,UAAU,YAAY,KAAK;AAC1C;AACA,QAAI,UAAU;AACZ,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,OAAO,WAAW,CAAC,QAAQ,OAAO,SAAS,KAAK;AACtD,yBAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,2BAAmB,SAAS,IACxB,EAAE,QAAQ,OAAO,WACb,KAAK,qBAAqB,QAAQ,OAAO;AAMjD,YAAI;AACF,8BACI,KAAK,sBAAsB,YAAY,QAAQ;AACnD,cAAI,iBAAiB;AAKnB,gBAAI,iBAAiB;AACnB,8BAAgB;;AAElB,+BAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,4BAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAEtD,kBAAQ,KAAK,2BAA2B;;AAE1C,eAAO;;;AAGT,uBAA+B;AAI7B,YAAI,CAAC;AACH;;AAEF,gBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAGrD,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,qBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,eAAO;;;AAKX;AACA,SAAK,UACD,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC9D,UAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC5C,kBAAU;;AAEV,wBAAgB,KAAK,SAAS,cAC1B,YAAY,QAAQ,MAAM;AAC9B,YAAI,KAAK,IAAI,QAAQ;AACnB,eAAK,SAAS,iBAAiB;;AAEjC,kBAAU,cAAc;;;AAIhC,QAAI;AACF,WAAK,YACD,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAGzD,QAAI,KAAK,MAAM;AACb,WAAK,MAAM,cAAc,QAAQ,KAAK;QACpC,MAAM;QACN,YAAY,KAAK,MAAM,WAAW;QAClC,oBAAoB,KAAK,MAAM;QAC/B,cAAc,KAAK,MAAM,aAAa;QACtC,sBAAsB,KAAK,MAAM;QACjC,aAAa,OAAO,KAAK,QAAQ,IAC7B,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;QACrD,cAAc,QAAQ,IAAI,UAAQ,KAAK;QACvC,cAAc,cAAc;QAC5B,WAAW,cAAc;;;AAG7B,WAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;EAQzC;AACN,kBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,WAAO;;EAaD;AAGN,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,2BAA+B,WAAW,gBAAgB;AAC1D,4BAAiC,WAAW,iBAAiB;AAI7D;AACA,UAAI,WAAW;AACb,QAAK,OACD,MAAM,QAAQ,SACd,MAAM;AAEV,6BAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAE7D,6BAAqB,aAAa,IAAI,eAAe,OAAO;;AAG9D,kCACI,QAAQ,OAAO,UAAU,cAAc;AAE3C,aAAO,mBAAmB,OAAO;;AAInC,WAAO;;EAQT;AAGE,QAAI,UAAU;AACZ,YAAM,IAAI,MAAM;;AAElB,YAAQ,SAAS;AACjB,eAAU,YAAW,KAAK;AAC1B,sBAAkB;AAClB,QAAI,UAAU,YAAY,AAAK,SAAS,OAAO;AAC7C,oBAAe,OAAoB,IAAI,OAAK,AAAK,aAAa;;AAEhE,mBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AAGf,QAAI,UAAU;AACZ,mBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAiB,qBAAqB;AACtC,WAAK,MAAM,YAAY,WAAW,KAAK;AACvC,WAAK,QAAQ;;AAEf,WAAO;;EAQT;AAGE,YAAQ,SAAS;AACjB,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AACf,WAAO;;EAGT,uCACsC;AAEpC,WAAO,QAAQ,KAAK,iBAAiB;AACrC,QAAI,SAAS,QAAQ,UAAU,aAAa;AAC1C,qBAAe,aAAa,KAAK;;AAEnC,cAAU,IAAI,SAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,YAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE1C,SAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,SAAK,OAAO,GAAG,KAAK;AACpB,WAAO;;EAGT;AACE,qBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,QAAI,aAAa;AACf,WAAK,MAAM;AAIX,kBAAY;AACZ,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACzC,gBAAQ,EAAE,OAAO,AAAK,gBAAgB,EAAE;;AAE1C,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;QAClC,SAAS,YAAW,KAAK;QACzB,OAAO,EAAE;QACT,OAAO,EAAE;QACT;QACA,UAAU;;AAEZ,WAAK,MAAM,YAAY;;AAGzB,SAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AAEpC,QAAI,CAAE,cAAa;AACjB,WAAK,MAAM;;;EAIf;AACE,QAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC/B;;AAGF,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,iBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,qBAAiB,KAAK;AAEtB,QAAI,YAAY;AAGd,UAAI,EAAE,UAAU;AACd,aAAK,MAAM,YAAY,KAAK;;AAE9B,WAAK,MAAM;AAEX,WAAK,QAAQ,YAAY,EAAE;AAC3B,WAAK,MAAM,WAAW,OAAO,EAAE;;AAE/B,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;EAOxC;AACE,0BAAsB,KAAK,MAAM;AAC/B,gBAAU,KAAK,MAAM,oBAAoB;AACzC,WAAK,gBAAgB;;;EAIzB;AACE,SAAK,cAAc;AACnB,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,aAAO,KAAK,MAAM,oBAAoB,EAAE;;;EAI5C;AACE,iBAAa,KAAK,QAAQ;AAC1B,SAAK,aAAa,KAAK,MAAM;AAC7B,SAAK,iBAAiB,KAAK,MAAM;AACjC,SAAK,WAAW,KAAK,MAAM;AAC3B,QAAI,KAAK,MAAM,mBAAmB;AAChC,WAAK,aAAa;AAClB,UAAI,KAAK,WAAW;AAClB,aAAK,UAAU;;AAEjB,WAAK,QAAQ,KACT;;AAGN,WAAO;;QAGH;AAEJ,SAAK,MAAM,YAAY;AAEvB,uBAAmB,KAAK,MAAM;AAC9B,4BAAwB,KAAK,MAAM;AAEnC,SAAK,MAAM,cAAc,UAAU;AACnC,SAAK,MAAM,cAAc,SAAS,MAAM;AAExC,SAAK,MAAM,YAAY;AAEvB,SAAK,MAAM,cAAc,YAAY,KAAK,IACtC,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AACnD,SAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,SAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,yBAAqB,KAAK,MAAM,cAAc;AAC5C,aAAO,eAAe,MAAM,OAAO;AACnC,aAAO,YAAY,MAAM,OAAO;;AAElC,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;EAG5D;AAGN,qBACI,CAAC,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AAEnE,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,sBAAgB,WAAW;;AAE7B,QAAI,iBAAiB;AACnB,eAAS,WAAW;AAGlB,cAAM,IAAI,IAAI;AACZ,cAAI,MAAM;AACR,2BAAe,QAAQ;AACvB,yBAAa,AAAK,oBAAoB,OAAO,MAAM,OAAO;AAC1D,mBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEpD,iBAAO;;AAIT,eAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAG/D,SAAK,MAAM,WAAW,KAAK;;EAG7B;AACE,WAAO,OAAO;AACd,WAAO;;EAGD;AACN,QAAI,KAAK,MAAM,kBAAkB;AAC/B,WAAK,MAAM,aAAa;;AAE1B,SAAK,MAAM;;EAGL;AACN,SAAK,MAAM;;EAOb;AACE,sBAA8B;MAC5B,OAAO;MACP,MAAM;MACN,IAAI,KAAK,MAAM;;AAEjB,QAAI;AACF,gBAAU,OAAO;;AAEnB,SAAK,MAAM,WAAW,KAAK;AAC3B,SAAK,MAAM,cAAc;;EAO3B;AACE,mCAA+B,sBAAsB;AACrD,sCACI,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAG9C,iBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACvD,sBAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,UAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACxD,gBAAO;;;AAIX,qBAAiB,KAAK,MAAM,WAAW;AACvC,SAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAGzD,2BAAuB,QAAQ;AAG7B,UAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC9C,aAAK,MAAM;;;;EAWjB,wCAEuB;AACrB,IAAK,OACD,GAAG,SAAS,GAAG,MAAM;AACzB,QAAI,MAAM,QAAQ,GAAG,UAAU;AAC7B,YAAM,IAAI,MAAM,0CAA0C,GAAG;;AAG/D,cAAU,KAAK,UACX,MAAM,KAAK,aAAa,MAAM,KAAK,WACnC,MAAM,KAAK,KAAK,WAAW;AAE/B,IAAK,OACD,aAAa,QACb,MAAM;AAEV,yBAAqB,qBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,QAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAChE,YAAM,IAAI,MACN;;AAKN,WAAO,KAAK,KAAK,YAAY;AAC3B,qCAA6D;AAC7D,6BAAuB,EAAE,MAAO,MAAM,OAAQ,KAAK,EAAE,SAAS;AAG9D,6BACI,wBAAwB,cAExB,QAAK,KAAK,KAAK,KAEf;AACJ,oBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AAEnD,UAAI,KAAK,MAAM,kBAAkB;AAG/B,aAAK,MAAM,WAAW,QAAQ;AAC5B,gCAAqB,KAAK;AACxB,oBAAO;;;AAGX,aAAK,MAAM,aAAa;;AAE1B,aAAO,CAAC,OAAO,GAAG;;;EAItB;AAEE,IAAK,OACD,AAAK,WAAW,IAChB,MAAM;AACV,WAAO;AACL,MAAK,OACD,OAAO,MAAM,OAAK,aAAa,SAC/B,MAAM;AAGV;AAIA,uBAAiC;AACjC,aAAO,QAAQ;AACb,iBAAS,KAAK;;AAEhB,aAAO,KAAK,cACR;AACE,cAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AACvB,QAAK,OACD,IAAI,iBAAiB,QACrB,MAAM;AAEV,QAAK,OACD,AAAK,WAAW,IAAI,WACpB,MAAM;AAEV,eAAO,IAAI;SAEb,UACA;AACE,wBAAgB,IAAI,SAAS,IAAI;AACjC,sBACI,MAAM,QAAQ,WAAW,UAAU,CAAC;AACxC,QAAK,OACD,MAAM,WAAW,OAAO,QACxB,MAAM;AAGV,QAAK,OACD,MAAM,MAAM,OAAK,aAAa,SAC9B,MAAM;AAGV,wBAA+C;AAC/C,cAAM,QAAQ;AACZ,kBAAQ,KAAK,MAAM;;AAErB,eAAO;;;;EAKjB;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,SAAS;;EAE/B;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,KAAK;;QAGrB;AACJ,kBAAc;AACd,uBAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,eAAW,SAAS,QAAQ;AAC5B,WAAO;;EASD;AACN,QAAI,KAAK,MAAM,eAAe;AAC5B,aAAO,UAAU,KAAK,MAAM,YAAY;AACxC,WAAK,MAAM,YAAY,MAAM,KAAK;;AAGpC,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAOpB;AAEE,SAAK;AAEL,SAAK,MAAM;AACX,SAAK,IAAI;AACT,SAAK,QAAQ,IAAI;AAEjB,8BAA0B,KAAK;AAC7B,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAEvB,SAAK,cAAc;AACnB,SAAK,kBAAkB;AACvB,SAAK,qBAAqB;;;AA/sBb,OAAA,eAAe;AAKf,OAAA,iBAAiB;AA8sBlC;AACE,iBAAe,mBAAmB,cAAc,QAAQ;AACxD,SAAO,OAAO,WAAW,QAAQ,OAAO;;AAGpC;AACJ,aAAW;AACX,MAAI,GAAG,aAAa;AAClB,yBAAoB,IAAI,YAAY;AACpC,OAAG,YAAY,IAAI,OAAO;;AAE5B,uBAAqB,GAAG,UAAU;AAIlC,mBAAiB,MAAM,GAAG;AAC1B,SAAO,GAAG;;AAGL,eAAe;AAQhB;AAEJ,iBAAe,CAAC,GAAG;AACnB,SAAO,OAAO,cAAc;AAC1B,gBAAY,SAAQ,IAAI,GAAG;AAC3B,SAAK,CAAC,GAAG;AACT,WAAO;KACN,QAAgC,MAAqB;;;;ACzrC1D;;;;;;;;;;;;;;;;AAuBM,oBAAqB;AACzB,kBAA4B;AAE5B,MAAI,aAAa;AACf,WAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAExC,MAAI,CAAC,MAAM,QAAQ;AACjB,WAAO;;AAET,gBAAwB;AAExB,SAAO,MAAM,QAAQ,cACd,aAAa,cAAc,UAAU;AAC1C,UAAM,KAAK,UAAU;AACrB,gBAAY,UAAU;;AAExB,MAAI,MAAM,QAAQ,QACd,MAAM,QAAQ;AAChB,+BAA2B,KAAK,OAAO;;AAGzC,SAAO;;AAGT;AAEE,YAAU,WAAW;AACrB,MAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,aAAa;AACzC,WACI,MAAM,WAAW,GACjB,MAAM,eAAe,QAAQ,KAAK,+DACU,MAAM;AACtD;;AAEF,SACI,MAAM,SAAS,GACf,MAAM,eAAe,QAAQ,KAAK,oDACR,IAAI;AAClC,SACI,IAAI,WAAW,MAAM,IACrB,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrC,IAAI;AACjC,mBAAiB,MAAM,MAAM;AAC7B,eAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,+BAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAIhE;AAGE,MAAI,iBAAiB;AACnB;;AAEF,MAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AACjD,UAAM,IAAI,MACN,aAAa,uBAAuB,yBAC9B,iCAAiC;;;AAIzC,kEAEiC;AACrC,MAAI,aAAa;AACf,gBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,WAAO;;AAET,sBAAoB,WAAW;AAG/B,MAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACxD,oBAAgB;;AAElB,cAAY,cAAc,eAAe,SAAS;AAElD,MAAK,KAAK,QACL,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACtD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC1C,iBAAa,KAAK,OAAO,SAAU,EAAS,YAAY;AACxD,UAAM,IAAI,MACN,aAAa,uBAAuB,0DACF;;AAExC,wBAAsB,WAAW,GAAG;AACpC,MAAI,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ;AACrC,QAAI,CAAC;;AAEP,yBAAuB;AACvB,iBAAe,kBAAkB,WAC7B,aAAa,GAAG,iBAChB,QAAQ,GAAe,IAAI;AAC/B,SAAO,OAAO,WAAW,QAAQ,eAAe;;;;ACrHlD;;;;;;;;;;;;;;;;AAmBO,MAAM,kBAAkB;AAOzB;AACJ,eAAa,OAAO,KAAK;AACzB,MAAI,KAAK,WAAW;AAClB,UAAM,IAAI,MACN,yGAEG,KAAK;;AAGd,eAAa,KAAK;AAClB,aAAW,EAAE;AAGb,MAAI,OAAO,SAAS;AAClB,aAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAI/C,WAAS,SAAS;AAGlB,aAAW;AACT,WAAO,WAAW;AAClB;AACE,qBAAe,GAAG,GAAG;AACrB,UAAI,UAAU;AACZ,gBAAQ,MAAM;;AAEhB,aAAO,SAAS;AAChB,aAAO;;AAEP,aAAO,SAAS;AAChB,YAAM;;;AAGV,SAAO,eAAe,IAAI,QAAQ,CAAC,OAAO,QAAQ,cAAc;AAGhE,SAAO;;;;AChET;;;;;;;;;;;;;;;;AA8CA,kBAAoC;AAClC,gBAAc,gBAAgB,MAAM,QAAQ;AAC5C,gBAAc,gBAAgB,MAAM,QAAQ;AAC5C,EAAK,kBACD,MAAM,OAAO,MAAM,OACnB,yBAAyB,MAAM,aAAa,MAAM;AAGtD,kBAAqC;AACnC,WAAO,SAAQ,QAAQ,OAAO;;AAEhC,iBAA8B,CAAC,MAAM,OAAO,MAAM;AAClD,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAGN,gBAAgB,GAAG,CAAC;;;AC/D3B;;;;;;;;;;;;;;;;AAwBM,oBACF;AAEF,MAAI,SAAS;AACX,YAAQ,WAAW;;AAErB,MAAI,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AACpB,UAAM,IAAI,MACN;;AAGN,MAAI,SAAS;AACX,uCAAmC;AAEnC,yBAAqB,cAAc;AACnC,yBAAqB,cAAc;AACnC,WACI,iBAAiB,cACjB,MACI,iCAAiC,kCAC9B,+BAA+B;AAE1C,iBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AAC1C,uBAAiB,cAAc;AAC/B,gCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,cAAc,MAAM,MAAM,MACvC;AACJ,aACI,cAAc,OAAO,MAAM,MAAM,CAAC,mBAClC,MAAM,gDACE,qDACM;;;AAItB,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ;AAC1C,aAAS,CAAC;;AAGZ,UAAQ,SAAS;AACjB,WAAS,UAAU,WACf,aAAa,QAAQ,SACrB,QAAQ,QAAoB,IAAI;AACpC,SAAO,OAAO,WAAW,QAAsB,OAAO;;;;AC1ExD;;;;;;;;;;;;;;;;AA+CM,gBACF,eAA+B;AAEjC,UAAQ,SAAS;AACjB,EAAK,mCAAmC;AACxC,SAAO,IAAI,aAAmB,OAAO,OAAO;;;;ACpD9C;;;;;;;;;;;;;;;;AAuCA,eAAiC;AAC/B,aAAW,gBAAgB,GAAG,KAAK;AAGnC,MAAI,CAAC,AAAK,aAAa;AACrB,UAAM,IAAI,MAAM,mCAAmC;;AAErD,MAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACrC,UAAM,IAAI,MAAM;;AAGlB,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAGtB,aAAa,GAAG,CAAC;;;AC3DxB;;;;;;;;;;;;;;;;AAuDA,kBACI;AACF,aAAW,gBAAgB,GAAG,KAAK,WAAW;AAE9C,iBAA8B,CAAC,GAAG;AAClC,gBAA4B,CAAC;AAC7B,kBACiB;AACf,YAAQ,AAAK,uBAAuB,OAAO,GAAG;AAC9C,IAAK,OACD,GAAG,SAAS,AAAK,cAAc,QAC/B,MAAM;AACV,SAAK,CAAC;AACN,WAAO,SAAQ,QAAQ,IAAI;;AAE7B,SAAO,OAAO,cACV,SAAS,QAAgC,MAAiB,SAC1D;;AAEC,gBAAgB,GAAG,CAAC;;;AC1E3B;;;;;;;;;;;;;;;;AA+CA,oBAAsC;AACpC,aAAW,gBAAgB,GAAG,KAAK;AAEnC,MAAI,QAAQ;AACV,WAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAEnC,EAAK,OACD,GAAG,SAAS,KAAK,QACjB,MAAM,qCAAqC,GAAG,kCACb;AACrC,OAAK,QAAQ;AACX,IAAK,OACD,QAAQ,KAAK,OAAO,GAAG,MACvB,MAAM,+CAA+C,GAAG,OAAO,aAC/C;;AAGtB,MAAI,GAAG,QAAQ;AACb,WAAO,GAAG;;AAGZ,iBAAgC,CAAC,GAAG;AACpC,gBAA8B,CAAC;AAE/B,SAAO,OAAO,cACV,cAAW,SAAQ,UAAU,IAAI,OAAO,QACxC,MAAqB,WAAW;;AAG/B,kBAAkB,GAAG,CAAC;;;AC3D7B,MAAA;;;;ACUM,4BACF;AACF,MAAI,QAAO,OAAO;AAChB,UAAM,IAAI,MACN,4EACqB,QAAO;;AAElC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,8EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MACN,yEACsB,QAAQ;;AAEpC,MAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AAC3C,UAAM,IAAI,MACN,iEACG,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAGvD,MAAI,QAAO,SAAS;AAClB,UAAM,IAAI,MACN,mEACiB,QAAO;;AAG9B,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAIrD,gBAAc;AACd,eAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC7C,eAAW,aAAa;;AAG1B,qBAAmB,QAAO;AAE1B,sBAAoB,aAAa;AACjC,cAAY;AAEZ,kBAAgB;AAChB,eAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACzC,iBAAa,WAAW;AACxB,gBAAY,KAAK,WAAW;;AAG9B,kBACI;IAAC,GAAG,eAAe,QAAO,OAAO,IAAI,YAAU,SAAS;IACvD;IAAG,MAAM,GAAG;AAEjB,SAAO,CAAC,aAAa,SAAS,WAAW;;;;AC/D3C,MAAA;;;;;;ACQM,6BACF;AACF,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AAEzD,qBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAE9C,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEjD,MAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC5C,UAAM,IAAI,MACN,aACA,0BAA0B,WAAY,SAAQ,OAAO;;AAE3D,MAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC7C,UAAM,IAAI,MACN,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAEhE,eAAa,GAAG,IAAI,UAAU,EAAE;AAC9B,QAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACrC,YAAM,IAAI,MACN,aACA,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAC3D,QAAQ,MAAM;;;AAG1B,eAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC7C,QAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC5C,YAAM,IAAI,MACN,aACA,kBAAkB,IAAI,cAClB,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAC7C,MAAM,IAAI;;;;AAmBlB;AAEJ,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MAAM,0DACZ,QAAQ;;AAEd,MAAI,MAAM,SAAS;AACjB,UAAM,IAAI,MACN,6DAA6D;;AAGnE,MAAI,MAAM,WAAW;AACnB,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;AAEd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;;AAIhB,sBAAoB,OAAO,SAAS;;AAYhC;AAIJ,sBAAoB,QAAQ,MAAM;AAClC,oBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAKvE,kBAAgB,MAAM;AAEtB,kBAAgB;AAChB,eAAa,WAAW,IAAI,SAAS,EAAE;AACrC,iBAAa,MAAM;;AAGrB,uBAAsB,YAAY,IAAK,IAAI;AAC3C,qBAAmB,cAAc,QAAQ,SAAS;AAElD,kBAAgB,CAAC,GAAG,eAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,qBAAmB,cAAc;AACjC,SAAO,CAAC,WAAW,YAAY,WAAW,SAAS;;;;ACnJrD,MAAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBM,2BACF;AACF,oBAAkB,MAAM,MAAM;AAC9B,EAAK,OACD,cAAc,MAAM,QACpB,MAAM,iBAAiB,+BAA+B,2CAClB;AACxC,EAAK,OACD,cAAc,KAAK,QACnB,MAAM,iBAAiB,8BAA8B,0CACjB;AAExC,eAAa,GAAG,IAAI,WAAW,EAAE;AAC/B,IAAK,OACD,MAAM,KAAK,KAAK,MAAM,MAAM,MAAM,IAClC,MAAM,iBAAiB,qBAAqB,aAAa,OACjD,MAAM,KAAK,KAAK,kCAAkC,OAChD,MAAM,MAAM;;;AAKxB;AACJ,eAAa;AACb,aAAW;AACX,SAAO,OAAO;AACZ,QAAI,OAAO;AACT,WAAK,KAAK;;AAEZ,YAAQ;AACR;;AAEF,SAAO;;AAIH;AAEJ,eAAa;AACb,kBAAgB,GAAG,OAAO,MAAM,QAAQ;AACtC,SAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE7D,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,eAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACrD,eAAW,KAAK;;AAElB,eAAa,GAAG,IAAI,eAAe;AACjC,QAAI,MAAM;AACR,iBAAW,0BAA0B;;AAErC,iBAAW,OACP,wBAAwB,GACxB;AACJ,iBAAW;;;AAGf,SAAO;;AAGT;AAGE,MAAI,kBAAkB;AACpB,WAAO;;AAGT,SAAO,iBAAkB,iBAAgB;;AAG3C;AACE,qBAAmB;AACnB,eAAa,GAAG,IAAI,eAAe;AACjC,eAAW,KAAK,yBAAyB;;AAE3C,SAAO;;AAIH;AAKJ,oBAAkB,WAAW;AAC7B,wBAAsB,IAAI,MAAM,4BACZ,IAAI,MAAM,gCACN,IAAI,MAAM;AAClC,MAAI,aAAa,UAAU,sBAAsB;AAC/C,sBAAkB,aAAa;AAI/B,0BAAsB,sBAAsB;AAC5C,sBAAkB,2BACd,WAAW,WAAW,eAAe,OAAO;AAChD,oBAAgB,0BACZ,SAAS,WAAW,eAAe,KAAK;AAC5C,wBACI,sBAAsB,SAAS,WAAW,eAAe;;AAE7D,oBAAgB,GAAG,OAAO,WAAW;AACnC,sBAAgB,QAAQ,aACpB,WAAW,OAAO,SAAS,YAAY,MAAM;AACjD,oBAAc,QACV,YAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,wBAAkB,QAAQ,eAAe,SAAS,MAAM;;;AAI5D,SAAO;IACL,OAAO;IACP,KAAK;IACL,SAAS;;;AAMP;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ;;AAEnB,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,cAAc;AAClC,UAAI,YAAY,KAAK;AACnB,wBAAgB;;AAGlB,iBAAW,QAAQ;;;AAGvB,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ,OAAO;;AAE1B,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,YAAY;AAChC,UAAI,UAAU,KAAK;AACjB,wBAAgB,OAAO;;AAEzB,iBAAW,QAAQ;;;AAIvB,eAAa,GAAG,IAAI,WAAW,QAAQ;AAErC,qBAAiB,WAAW;AAC5B,QAAI,WAAW,KAAK;AAClB,iBAAW,MAAM;;AAEnB,eAAW,KAAK,AAAK,MAAM,GAAG,WAAW,IAAI,WAAW;;AAE1D,SAAO;;AAGH;AAEJ,eAAa,QAAQ;AACrB,MAAI,eAAgB,KAAK,QAAS,UAAU;AAC1C,aAAS;;AAGX,SAAO;;AAGH;AAIJ,cAAY,aAAa;AACzB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAChE,QAAI,SAAS;AAIX,cAAQ,OAAO;;AAGf,cAAQ,OAAO;;;AAKnB,mBAAiB,WAAW;AAC5B,MAAI,QAAQ;AACV,aAAS;;AAIX,UAAQ,AAAK,MAAM,GAAG,OAAO,WAAW;AAExC,SAAO;;AAGH;AAIJ,aAAW,YAAY;AACvB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AACjE,QAAI,SAAS;AAGX,aAAO,OAAO;;AAGd,aAAO,OAAO;;;AAKlB,mBAAiB,WAAW;AAC5B,MAAI,OAAO;AACT,YAAQ;;AAMV,MAAI,SAAS;AAEX,WAAO,AAAK,MAAM,GAAG,MAAM;;AAG3B,WAAO,AAAK,MAAM,IAAI,MAAM,WAAW;;AAGzC,SAAO;;AAOH;AAGJ,wBAAsB,KAAK;AAC3B,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,KAAK;AACZ,wBAAkB;AAClB;;;AAIJ,eAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AACjD,QAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AACpC,aAAO;;;AAGX,SAAO;;AAGH;AACJ,mBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,kBAAc,MAAM,KAAK,QAAQ;;AAEnC,SAAO;;AAGH;AAGJ;AACA,gBAAc,EAAE,MAAM;AACtB,MAAI,OAAO,UAAU;AACnB,aAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACrC,MAAM,SAAS;AACxB,aAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAE3D,aAAS,MAAM;;AAEjB,SAAO,QAAQ;AACb,IAAK,OACD,MAAM,IAAI,MAAM;;AAEtB;AACA,MAAI,QAAQ;AACV,YAAQ,IAAI,MAAM,OAAO,KAAK;aACrB,OAAO,SAAS;AACzB,YAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACnC,KAAK,SAAS;AACvB,YAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAExD,YAAQ;;AAEV,UAAQ,MAAM,IAAI;AAChB,QAAI,KAAK;AACP,aAAO;;AAEP,MAAK,OACD,MAAM,IACN,MAAM,qDACC,mCAAmC;AAC9C,aAAO,EAAE,MAAM,KAAK,OAAO;;;AAG/B,SAAO,CAAC,QAAQ;;;;ACjWlB;;;;;;;;;;;;;;;;AC6DM,yBAA0B;AAC9B,MAAI,MAAM,QAAQ;AAChB,YAAQ,KACJ,MAAM;;;AAId,wBAAwB;ACgBlB;AACJ,SAAO;;AC8QH,yBACF,0BACW;AACb,SAAO,OAAO,gBAAgB,MAAM,SAAS;;;;ACtW/C;;;;;;;;;;;;;;;;AAoDA,cAAgC;AAC9B,WAAS,gBAAgB,GAAG,KAAK;AACjC,WAAS,gBAAgB,GAAG,KAAK;AACjC,GAAC,IAAI,MAAM,eAAe,IAAI;AAE9B,kBAAqC;AACnC,gBAAY,SAAQ,SAAS,IAAI;AACjC,SAAK,CAAC,IAAI;AACV,WAAO;;AAET,iBAA+B,CAAC,GAAG,IAAI,GAAG;AAE1C,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAEN,YAAY,GAAG,CAAC;;;ACpEvB;;;;;;;;;;;;;;;;AAuBM,8BAA+B;AACnC,eAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,QAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AAC3C,aAAO;;;AAGX,SAAO;;AAGH;AAEJ,eAAa,UAAU,SAAS,UAAU;AAC1C,cAAY;AACZ,eAAa;AACb,kBAAgB;AACd,iBAAe,GAAG,MAAM,MAAM;AAC9B,QAAI,KAAK,QAAQ,SAAS;AACxB,UAAI,KAAK,UAAU;;AAEnB,UAAI,KAAK,UAAU;;;AAGvB,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,KAAK,QAAQ,SAAS;AACxB,eAAS,KAAK,OAAO;;;AAGzB,sBAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,SAAO,CAAC,UAAU;;AAGd;AAEJ,yBAAuB,KAAK,IAAI,OAAK;AACrC,SAAO,iBAAiB,OAAO,gBAAgB;;AAG3C;AAEJ,EAAK,OACD,qBAAqB,MAAM,OAC3B,MAAM,GAAG,uDACO,iBAAiB;;AAQjC;AAEJ,MAAI,qBAAqB,MAAM;AAC7B,WAAO;;AAET,iBAAyB;AACzB,eAAa,GAAG,IAAI,MAAM,EAAE;AAC1B,QAAI,KAAK,QAAQ,OAAO;AACtB,aAAO,KAAK;;;AAGhB,OAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,SAAO;;AAIH;AACJ,SAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAGZ;AACJ,cAAsB;AACtB,eAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACvC,QAAI,KAAK;;AAEX,SAAO;;;;AC3GT;;;;;;;;;;;;;;;;AAmGM,+BACF,qDAEiD;AAMnD,wBAAsB,WAAW;AACjC,uBACI,CAAC,GAAG,aAAa;AACrB,sBAAoB,wBAAwB;AAE5C,SAAO,kBACH,YAAY,cAAc,SAAS,WAAW,MAC9C,MAAyB,MAAsB;;AAG/C,qGAK2C;AAC/C,sCAAoC,gBAAgB;AAEpD;AACA,MAAI,eAAe;AACjB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACrD,eAAe;AACxB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAE9D,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAC7D;;AAMA,qGAM4B;AAChC,mDAAiD,iBAAiB;AAElE;AACA;AACA,MAAI,eAAe;AACjB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACxD,eAAe;AACxB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAEjE,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aACtD;;AAOA,qGAKiD,oBACN;AAC/C,mDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,MAAI,eAAe;AACjB,KAAC,WAAW,UAAU,SAAS,cAAc;aACpC,eAAe;AACxB,KAAC,WAAW,YAAY,UAAU,WAAW;;AAE7C,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,wDAAsD;AACtD,sCAAoC,gBAAgB;AACpD,0CAAwC,gBAAgB;AAExD,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,WAAW,YAAY,iBACnC,MAAK,UAAU,SAAS,cAAc,aAAa,uBACnD,sBAAsB,cAAc;AAExC,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,WAAW;aACtC,eAAe;AACxB,eAAW,CAAC,WAAW,WAAW,UAAU;;AAG9C,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AA0CE,uFAKU,oBACiC;AAE/C,4DACI,CAAC,IAAI,IAAI,IAAI,IAAI;AACrB,MAAI,eAAe;AACjB,KAAC,WAAW,SAAS,UAAU,SAAS,cAAc;aAC7C,eAAe;AACxB,KAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAEtD,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,qEACI;AACJ,mDAAiD,iBAAiB;AAClE,yDACI,iBAAiB;AAErB,+BACI,uBAAuB,aAAa;AACxC,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,UAAU,WAAW,YAAY,mBAC7C,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAC5D,sBAAsB,uBAAuB,sBAC7C;AAEJ,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,UAAU,WAAW;aAChD,eAAe;AACxB,eAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAGxD,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,YAAY;;AAGtB;AAIE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,qBAAmB,QAAQ;AAC3B,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,uBAAqB,iBAChB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AACzD,EAAK,OACD,AAAK,MAAM,eACX,MAAM,2BAA2B;AAGrC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,cAAc,YAAY,YAAY;;AAG1C,qEAE4C;AAChD,6BAA2B,uBAAuB,WAAW;AAC7D,SAAO,KAAK,MACP,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAGrE;AACE,MAAI,OAAO,UAAU;AACnB,WAAO,CAAC,OAAO,OAAO;;AAExB,MAAI,MAAM,WAAW;AACnB,WAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAE9B,SAAO;;AAGT;AAEE,SAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAc7D;AACE,MAAI,YAAY;AACd,WAAO;;AAGT,SAAO,aAAc,cAAa,KAAM,YAAW;;AAGrD;AAOE;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU,CAAC,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAC/D,qBAAiB,qBACb,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC1D,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,2BACI,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AAChE,0BACI,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC7D,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAC9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;aAClC,SAAQ;AACjB,cAAU,CAAC,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACvD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;aAC1C,OAAO,SAAQ;AACxB,gBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,mBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,iBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,kBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,oBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC3C,gBAAY,iBACP,YAAW,eAAe,MAAM,UAAU,eAAe,GAC1D;AACJ,eAAW,iBACN,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAE9D,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,WAAW;;AAG9B;AAUE;AACA;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,qBAAiB,qBACb,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAC9D;AACJ,eAAW,SAAS;AACpB,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,eAAW,KAAK,KAAK,UAAU;AAC/B,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,2BACK,aAAY,KAAK,eAAe,eAAe;AACpD,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,kBAAc,KAAK,MAAM,gBAAgB;AACzC,iBAAa,gBAAgB;AAC7B,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAE9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;aAC/C,SAAQ;AACjB,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAEnD,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,UAAU,WAAW;;AAQxC;AAEE,MAAI,CAAC;AACH,WAAO;;AAET,UAAQ;SACD;AAEH,aAAO,KAAK,MAAM;SACf;AAEH,aAAO,KAAK,KAAK;SACd;AACH,aAAO,KAAK,MAAM;;AAElB,YAAM,IAAI,MAAM,wBAAwB;;;AAIxC;AACJ,6BAA2B,gBAAgB;AAC3C,SAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAGxC;AAEJ,SAAO,kBAAkB,YAAY,kBAAkB;;AAUnD;AAEJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO;;AAEP,UAAM,IAAI,MAAM,sBAAsB;;;;;AC5mB1C;;;;;;;;;;;;;;;;AAmBM,gCAAiC;AACrC,eAAa,OAAO,GAAG;AACvB,SAAO,QAAQ;AACb,IAAK,OACD,MAAM,WAAW,MACjB,MACI,kBAAkB,0BAA0B,gDAChB;;AAGtC,EAAK,OACD,QAAQ,KAAK,OAAO,MACpB,MAAM,kBAAkB,qCAAqC,OAAO;AAExE,qBAAmB,OAAO;AAC1B,SAAO,QAAQ;AACb,iBAAa,GAAG,IAAI,MAAM;AACxB,MAAK,OACA,MAAM,QAAU,MAAM,OAAO,WAAW,IACzC,MAAM,kBAAkB,2BAA2B,OAAO,gDACb,+CACN;;;;AAK3C;AACJ,sBAAoB,OAAO,GAAG;AAC9B,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,gBAAY,SAAS,OAAO,GAAG;;AAEjC,SAAO;;;;AClDT;;;;;;;;;;;;;;;;AA0BM,0BACF;AACF,iBAAe,QAAQ;AACvB,eAAuB;AACvB,eAAa,GAAG,IAAI,QAAQ;AAC1B,gBAAY,SAAS,IAAI;AACzB,cAAU,QAAQ,QAAQ;AAC1B,cAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,QAAI,IAAI,KAAK,MAAM;AACjB,WAAK,QAAQ;;;AAGjB,SAAO;;AAOH;AAEJ,iBAAyB;AACzB,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,kBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,oBAAgB,SAAS,SAAS,IAAI;AACtC,mBAAe,SAAS;AACxB,QAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC5C,aAAO,QAAQ;;;AAGnB,SAAO;;AAGH;AAEJ,iBAAyB;AACzB,YAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AAEzC,eAAa,GAAG,IAAI,GAAG;AACrB,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,QAAI,MAAM;AACR,aAAO,QAAQ;eACN,MAAM;AACf,aAAO,QAAQ;eACN,MAAM;AACf,qBAAe,wDACR,cAAc;AACrB,YAAM,MAAM;;AAEZ,aAAO,QAAQ;;;AAGnB,SAAO;;;;ACrFT;;;;;;;;;;;;;;;;AAsCA,cAAgC;AAC9B,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,cAAU,SAAQ,IAAI;AACtB,SAAK,CAAC;AACN,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAE9B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,YAAY,GAAG,CAAC;;;ACtDvB,MAAA;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAuBO,MAAM,wBAAwB;AAS/B;AACJ,MAAI,UAAU;AACZ,WAAO;;AAET,SAAO,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;;;ACpCrD;;;;;;;;;;;;;;;;AA6BM,uCACF;AACF,aAAW;AACX;AAEA,MAAI,UAAU;AACZ,UAAM;AACN,WAAO;;AAEP,UAAM,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAGpD,SAAO,CAAC;AACN,QAAI,MAAM,eAAe,QAAQ;AAC/B,aAAO;;AAEP,YAAM,eAAe,QAAQ,MAAM;;;AAGvC,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,QAAQ;AACV,eAAS,KAAK,OAAO;;AAErB,eAAS,KAAK;;;AAGlB,SAAO;;AASH;AAEJ,kBAAgB,EAAE,MAAM;AAExB,sBAA8B;AAC9B,kBAAgB;AAChB,kBAAgB;AAChB,eAAa,GAAG,IAAI,MAAM;AACxB,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,eAAa,GAAG,IAAI,QAAQ,MAAM;AAChC,gBAAY,KAAK,QAAQ,MAAM;;AAGjC,eAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AACjC,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,SAAO,CAAC,WAAW,WAAW,SAAS;;;;AC5FzC;;;;;;;;;;;;;;;;AAqCM,gBACF;AACF,MAAM,cAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,UAAU,YAAY,aAAa,UACnC,CAAE,kBAAiB;AACrB,UAAM,IAAI,MACN;;AAGN,gBAAwB;AACxB,wBAAgC;AAChC,SAAO,WAAW,OAAO,OAAO,eAAe;;;;ACrDjD;;;;;;;;;;;;;;;;AA6DA,cACI,UAA8C,iBAAiB;AACjE,WAAS,gBAAgB,GAAG,KAAK;AACjC,MAAI,GAAG,UAAU;AACf,SAAK,KAAK,IAAI;;AAGhB,kBAAqC;AACnC,SAAK,CAAC;AACN,iBAAa,eAAe,MAAM,GAAG;AAErC,wBAAoB,mBAAmB,MAAM,GAAG;AAChD,wBAAoB;AACpB,oBAAgB;AAChB,QAAI,eAAe;AACjB,kBAAY,UAAU,IAAI;AAC1B,sBAAgB,iBAAiB,cAAc,QAAQ,GAAG;;AAE5D,gBAAY,SAAQ,IAAI,WAAW;AACnC,QAAI;AACF,uBAAiB,qBAAqB,MAAM,OAAO;AACnD,cAAQ,QAAQ,OAAO;;AAEzB,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAC9B,gBAAwB,CAAC,MAAM;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB,KAC1D;;AAGN,aAAY,GAAG,CAAC;;;AC/FvB;;;;;;;;;;;;;;;;AAqCM,eACF,eAAsC;AACxC,MAAI,UAAU;AACZ,iBAAa,MAAM,OAAO;AAC1B,iBAAa,MAAM,OAAO;AAC1B,WAAO,QAAQ,MAAM;;AAEvB,iBAAe,oBAAoB,cAAc,QAAQ;AACzD,SAAO,OAAO,WAAW,QAAQ,OAAO;;;;AC7C1C;;;;;;;;;;;;;;;;AA0CA,gBAAkC;AAChC,aAAW,gBAAgB,GAAG,KAAK;AACnC,iBAAe,gBAAgB,OAAO,SAAS;AAE/C,kBAAqC;AACnC,gBAAY,SAAQ,MAAM,IAAI;AAC9B,SAAK,CAAC,IAAI;AACV,WAAO;;AAGT,iBAA4B,CAAC,GAAG,IAAI,OAAO;AAC3C,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;;;AC1DzB;;;;;;;;;;;;;;;;AAwCM,kBAAmB;AACvB,gBAAc;AACd,wBAAsB,WAAW,QAAQ;AACzC,MAAI,cAAc,WAAW;AAC3B,UAAM,IAAI,MAAM;;AAElB,gBAAwB;AACxB,SAAO,WAAW,QAAQ,OAAO,eAAe;;;;AC/ClD;;;;;;;;;;;;;;;;AAwCA,eAAiC;AAC/B,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,KAAK;;AAGtB,iBAA2B,CAAC,GAAG;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,aAAa,GAAG,CAAC;;;AC5DxB;;;;;;;;;;;;;;;;AAwCA,gBAAkC;AAChC,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,MAAM;;AAGvB,iBAA4B,CAAC,GAAG;AAEhC,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;;;ACnCnB,0BACF,2BACO;AACT,mBAAiB;AACjB,MAAI,OAAQ,oBAAqB;AAC/B,WACI,EAAE,MAAM,QAAQ,oBAAoB,GACpC,MAAM;AACV,iBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAEpD,sBAAkB,gBAAgB,OAAO;AACvC,UAAI,UAAU;AACZ,iBAAS;;AAEX,aAAO;OACN;AACH,WACI,aAAa,GACb,MAAM;AACV,qBAAiB,gBAAgB,QAAQ;AAGzC,QAAI,aAAa;AACf,oBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,sBAAgB,YAAY,EAAE,MAAM,QAAQ;;AAE9C,WACI,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IACvD,MAAM;AACV,iBAAa;;AAGf,SAAO;;;;AC1DT;;;;;;;;;;;;;;;;AAwCA,eAAiC,WAAyB;AACxD,aAAW,gBAAgB,GAAG,KAAK;AAEnC,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAEtB,aAAa,GAAG,CAAC;;;AClDxB;;;;;;;;;;;;;;;;AA+BM,8BACF;AACF,MAAI,cAAc,QAAQ,eAAe;AACvC,WAAO;;AAET,MAAI,eAAe;AACjB,WAAO,IAAI,IAAI,KAAK;;AAEtB,QAAM,IAAI,MACN,gDAAgD;;AAIhD;AAEJ,YAAU;AACV,qBACI,AAAe,iBAAiB,KAAK,OAAO,aAAa;AAC7D,MAAI,WAAW,SAAS;AACtB,UAAM,KAAI,KAAK;;AAEjB,SAAO,QAAQ,KAAK,KAAK;;AAGrB;AAGJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO,KAAK;aACH,eAAe;AACxB,WAAO,IAAI;aACF,eAAe;AACxB,WAAO,MAAM;aACJ,eAAe;AACxB,WAAO,MAAM,GAAG;;AAElB,QAAM,IAAI,MAAM,4BAA4B;;AAIvC,mBAAmB;AACxB,uBAAqB,gBAAgB;AACrC,SAAO,CAAC,gBAAgB,eAAe;;;;AC3EzC,MAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAkBM,wBACF;AAEF,kBACI,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC/D,kBACI,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAChE,SAAO,CAAC,SAAS;;;;ACzBnB;;;;;;;;;;;;;;;;AAyBM,qBACF,6CACe;AACjB,iBAAyB;AACzB,MAAI;AACF,eAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,aAAS,KAAK,WAAW,KAAK;AAC9B,eAAW,SAAS,OAAO,WAAW,MAAM;;AAE5C,eAAW,SAAS,OAAO,WAAW;AACtC,0BAAsB,WAAW;AACjC,iBAAa,GAAG,IAAI,eAAe,EAAE;AACnC,iBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAErE,eAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAE9D,SAAO;;AAYH,kEAEa;AACjB,mBAAiB;AACjB,MAAI;AACF,aAAS,KAAK;AACd,iBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACnD,UAAI,KAAK,IAAI;AACX,iBAAS,KAAK;AACd,iBAAS,KAAK,IAAK,kBAAiB;;AAEpC,iBAAS,KAAK;;;;AAIlB,gCAA4B;AAC5B,+BAA2B;AAC3B,iBAAa,GAAG,IAAI,cAAc,EAAE;AAClC,UAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AAC3C,2BAAmB,KAAK;;AAExB,4BAAoB,KAAK;;;AAG7B,aAAS,KAAK,GAAG;AACjB,aAAS,KAAK;AACd,aAAS,KAAK,GAAG;;AAEnB,SAAO;;AAYH,0EAEa;AACjB,2BAAyB;AAEzB,MAAI;AACF,qBAAiB,KAAK,WAAW,KAAK;;AAEtC,qBAAiB,KAAK,WAAW,KAAK;;AAGxC,eAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACvC,QAAI,KAAK,WAAW;AAClB,UAAI;AACF,yBAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAErD,yBAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAGvD,uBAAiB,KAAK,WAAW;;;AAIrC,SAAO;;AAOH;AAEJ,2BAAyB,CAAC;AAC1B,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,qBAAiB,KAAK,MAAM,GAAG;;AAEjC,SAAO;;AAcH;AAEJ,oBAAkB,eAAe,MAAM,GAAG;AAC1C,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,cAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAGhE,SAAO;;;;ACvJT;;;;;;;;;;;;;;;;AAiBO,MAAM,kBAAkB;AACxB,mBAAmB;;;AClB1B;;;;;;;;;;;;;;;;AAiBO,MAAM,QAAQ;AACd,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;;;ACtBtB;;;;;;;;;;;;;;;;AAmBM,iBAAkB;AACtB,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,KAAK,GAAG;;;AAId;AACJ,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,IAAI,GAAG;;;;;AC3BnB;;;;;;;;;;;;;;;;AAgCM,gCACF;AACF,MAAI,KAAK,WAAW,KAAK;AACvB,UAAM,IAAI,MACN,gEACG,KAAK,iBAAiB,KAAK;;AAEpC,iBAAe,IAAI,aAAa,KAAK,SAAS;AAC9C,eAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,WAAO,KAAK,KAAK,IAAI;AACrB,WAAO,IAAI,KAAK,KAAK,IAAI;;AAE3B,SAAO;;AAiBH;AAEJ,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,IAAI,KAAK,SAAQ;AACtB,SAAK,IAAI,KAAK,SAAQ,IAAI;;AAE5B,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAQV;AAEJ,eAAa,SAAQ,QAAQ;AAC7B,eAAa,SAAQ,QAAQ,IAAI;AACjC,SAAO,CAAC,MAAM;;AASV;AAEJ,QAAK,QAAQ,KAAK;AAClB,QAAK,QAAQ,IAAI,KAAK;;AAMlB;AAEJ,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AACpC,cAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,SAAK,KAAK,KAAK,IAAI;AACnB,SAAK,KAAK,KAAK,IAAI;;AAErB,SAAO,CAAC,MAAM;;AAMV;AAEJ,YAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,SAAO,CAAC,MAAM;;;;ACvJhB;;;;;;;;;;;;;;;;ACwDM,oBACF;AACF,MAAI,UAAU;AACZ,QAAI,EAAE,UAAU;AACd,aAAO,EAAE;;AAEX,wBAAoB,MAAM,EAAE;AAC5B,mBAAe,KAAK,GAAG;AACvB,mBAAe,SAAQ,QAAQ,QAAQ;AACvC,gBAAY;AACZ,WAAO;AACP,WAAO;;AAGT,MAAI,CAAC,gBAAgB,EAAE,OAAO;AAG5B,WAAO,OAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAExD,MAAI,EAAE,UAAU;AACd,iBAAa,SAAQ,KAAK;AAC1B,mBAAe,KAAK,MAAM;AAC1B,SAAK;AACL,WAAO;;AAET,MAAI,UAAU;AACZ,WAAO,SAAQ,IAAI;aACV,UAAU;AACnB,iBAAa,OAAO,GAAG,EAAE;AACzB,mBAAe,SAAQ,SAAS,GAAG;AACnC,SAAK;AACL,WAAO;;AAEP,UAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAI7D;AAEJ,SAAO,OAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAGlD;AACJ,gBAAc,QAAO,SAAU,OAAM;AAErC,iBAAe,oBAAoB,KAAK;AACxC,SAAO,KAAK;AACZ,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,WAAO,KAAK,OAAO,IAAI,KAAK;;AAG9B,SAAO,SAAS,QAAQ;;;;AC3G1B;;;;;;;;;;;;;;;;AAkBA,IAAY;AAAZ,AAAA;AACE,YAAA,UAAA,aAAA,KAAA;AACA,YAAA,UAAA,WAAA,KAAA;AACA,YAAA,UAAA,UAAA,KAAA;AACA,YAAA,UAAA,YAAA,KAAA;AACA,YAAA,UAAA,eAAA,KAAA;GALU,YAAA,YAAQ;AASpB;AAAA,AAAA;AACE,qBAAA,mBAAA,YAAA,KAAA;AACA,qBAAA,mBAAA,UAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;GAJU,qBAAA,qBAAiB;;;AC3B7B;;;;;;;;;;;;;;;;AAuBA,IAAI;AAMJ;AACE,oBAAkB,SAAQ,KAAK,MAAM,cAAc,MAAiB;IAClE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,GAAG,MAAM,0BAA0B;AAE7C,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,SAAO,YAAY,YAAY,cAAc;AAC7C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,uDACQ,SAAS,MAAM;;AAE7B,aAAS,SAAS;;AAEpB,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,kBAAgB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AAClD,mBAAiB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACnD,mBAAiB,EAAE,MAAM;AAEzB,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,EAAE;AAChE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,kBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,YAAY,YAAY,iBAAiB,QAAQ,0BACjD;AAEJ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC1Gd;;;;;;;;;;;;;;;;AAqBM,iCAAkC;AACtC;AAEA;AACE,gBACI,SAAQ,KAAK,MAAM,YAAY,MAAiB,CAAC,UAAU;;AAGjE;AAEE,WAAO,mBAAS,SAAS,MAAM;AAC/B,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,QAAI,aAAK,cAAc,IAAI,WAAW;AACpC,aAAO;;AAGT,cAAS,KAAK;AACd,WAAO;;AAGT,SAAO,CAAC,YAAY,aAAa,QAAQ,uBAAW;;;;AC7CtD;;;;;;;;;;;;;;;;AAoBO,MAAM,YAA0B,wBAAwB;;;ACpB/D;;;;;;;;;;;;;;;;AAuBM,kCACF;AAEF;AAKA;AACE,gBAAW,SAAQ,KAAK,MAAM,YAAY,MAAiB;MACzD;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;;AAIJ;AAEE,WAAO,mBAAS,UAAU;AAC1B,WAAO,GAAG,KAAK;AACf,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,uBAAmB,SAAS,OAAO,QAAQ,EAAE;AAC7C,qBAAiB,qBAAa,2BAA2B,EAAE,OAAO,EAAE;AACpE,gBAAY,SAAQ,WAAW,UAAU;AAGzC,QAAI,aAAK,cAAc,cAAc;AACnC,aAAO;;AAGT,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,wBAAmB,MAAM,UACrB,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,SAAS,EAAE,QAAQ;AAGvB,QAAI,2BAAyB,EAAE,UAAU;AACvC;AACA,aAAO;;AAGT,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,QAAI,mBAAmB;AACrB;AACA,aAAO;;AAEP,YAAM,IAAI,MACN,0DACiB,EAAE,SAAS;;;AAIpC,SAAO,CAAC,YAAY,aAAa,QAAQ,uBAAW;;;;ACvFtD;;;;;;;;;;;;;;;;AAqBA,MAAM,wBAAwB;AAEvB,kBACH,yBAAyB,KAAK;;;ACxBlC;;;;;;;;;;;;;;;;AAuBA,IAAI;AAIJ;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,cAAY,SAAQ,WAAW,OAAO,GAAG,OAAO,OAAO,GAAG;AAG1D,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,mBAAiB,OAAO,IAAI,OAAK,SAAQ,UAAU,IAAI,EAAE,QAAQ;AACjE,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAC9D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,eAAe,SAAS,QAAQ,SAAS,IAAI,QAAQ;AAE9D,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb;EACA,YAAY;;;;ACzDd;;;;;;;;;;;;;;;;AAsBM,kBAAmB;AAEvB,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACnCd;;;;;;;;;;;;;;;;AAwBA,IAAI;AAIJ;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAiB;IAC7D;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAIJ,SAAO,QAAQ,mBAAS,SAAS;AAGjC,+BAA6B,kBAAkB,OAAO,EAAE,OAAO,MAAM;AAErE,mBAAiB;AACjB,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,OAAO;AACd,mBAAa;;;AAGjB,mBAAiB,iBAAgB,OAAO,EAAE,OAAO,MAAM;AACvD,YAAU;IACR,QAAQ,OAAO,EAAE;IACjB,OAAO;IACP,OAAO,OAAO,EAAE;;AAGlB,MAAI;AACF,mBAAe,SAAS,CAAC,QAAQ;AACjC,WAAO,QAAQ;AACf,WAAO;;AAGT,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,gBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,OAAO,WAC5D,KAAK;AACT,SAAO;;AAGT;AACE,mBAAiB,IAAI,MAAM,QAAQ;AACnC,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,QAAQ,KAAK;;AAE7B,SAAO;;AAGT;AAEE,mBAA2B;AAC3B,kBAA0B;AAC1B,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;;AAEtB,QAAI,MAAM,KAAK,QAAQ;AACrB,cAAQ,KAAK,KAAK;;;AAGtB,eAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,oBAAgB;AAChB,iBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,UAAI,QAAQ,MAAM,KACb,eAAc,MAAM,QAAQ,aAAa,QAAQ;AACpD,oBAAY;;;AAGhB,YAAQ,aAAa;;AAEvB,SAAO,CAAC,UAAU;;AAGb,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACrHb;;;;;;;;;;;;;;;;AA6BM,iCACF;AAMF,iBAAe,EAAE;AACjB,gBAAc,EAAE,MAAM;AAEtB,uBAAqB,aAAK,eAAe,MAAM;AAC/C,aAAW;AACX,uBAAqB,qBAAa,mBAAmB,MAAM;AAC3D,oBAAkB;AAClB,2BAAyB;AACzB,MAAI,gBAAgB;AAClB,qBAA2B,IAAI,MAAM;AACrC,iBAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,eAAS,KAAK,OAAO,aAAa;;AAGpC,WAAO,qBAAa,iBAAiB,KAAK,QAAQ;AAClD,kBACI,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,eAAe;AAEzD,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,yBAAqB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAC/D,QAAI,iBAAiB;AACnB,2BAAqB;;;AAIzB,SAAO,CAAC,YAAY,aAAa,cAAc,MAAM;;;;AC7DvD;;;;;;;;;;;;;;;;AAwBA,IAAI;AAIJ;AACE,cAAW,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACrD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ;AACf,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,sBACrB,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,mBAAiB,MAAM,MAAM,MAAM,GAAG;AACtC,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,aAAK,cAAc,IAAI;AACzC,oBAAkB,MAAM,MAAM,KAAK;AACnC,YAAS,SAAS,SAAS,MAAM,QAAQ,WAAW,WAAW;AAE/D,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;AC/Eb;;;;;;;;;;;;;;;;AAqBA,IAAI;AAMJ;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,mBAAiB,SAAS;AAE1B,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,MAAI,SAAS,kBAAkB,KAAK,SAAS,mBAAmB;AAC9D,UAAM,IAAI,MACN,0EACQ,SAAS,mBAAmB,SAAS;;AAGnD,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,cAAc,aAAa,UACjE;AACJ,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Fd;;;;;;;;;;;;;;;;AAqBM,kBAAkB;AAKtB,SAAO,QAAQ,SAAS;AACxB,SAAO,KAAK;AACZ,SAAO,SAAS;AAEhB,gBAAc,aAAK,cAAc,EAAE;AACnC,iBAAe,aAAK,uBAAuB,OAAO;AAElD,eAAK,OACD,UAAU,aAAK,cAAc,SAC7B,MAAM,cAAc,sBAAsB,EAAE;AAGhD,SAAO,CAAC,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAG7C,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;;;;AC5Cd;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,KAAK;AACf,SAAO,YAAY,cAAc;AAEjC,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,gBAAc,EAAE,MAAM;AACtB,gBAAc,EAAE,MAAM;AAEtB,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,qBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,qBAAmB,EAAE,MAAM,MAAM,GAAG;AAEpC,oBAAkB,aAAK,cAAc;AACrC,oBAAkB,aAAK,cAAc;AAErC,8BACI,cAAc,aAAa,cAAc,KAAK,cAAc;AAEhE,eAAK,OACD,SAAS,KAAK,SAAS,KAAK,qBAC5B,MAAM,uJAEsB,oBAAoB;AAEpD,4BACI,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AACpE,mBAAiB,kBAAkB,OAAO,CAAC,aAAa;AAExD,eAAK,OACD,gBAAgB,aAChB,MAAM,kCAAkC,qBACjC,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AAE3B,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AACvD,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AAGvD,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,mBAAS,OAAO,CAAC,OAAO;AAC7D,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,mBAAS,OAAO,CAAC,OAAO;AAE7D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,kBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,mBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,mBAAiB,KAAK,IAAI,WAAW;AAErC,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,IAAI;AAClE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAC7D,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAE7D,kBACI,OAAO,aAAa,IAAI,MAAM,QAAQ,OAAO,aAC7C,IAAI,MAAM,QAAQ,YAAY,YAAY;AAE9C,MAAI,QAAQ;AACZ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Hd;;;;;;;;;;;;;;;;AAsBM,eACF;AAEF,SAAO,SAAS,IAAI,QAAQ,QAAQ,qBAAW;AAC/C,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACpCd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEJ;AACE,aAAW,SAAQ,KAAK,MAAM,aAAa,MAAiB;IAC1D;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,cAAc,gBAAgB;AACrC,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,KAAK,cAAc,cAAc;AAC1C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACnDd;;;;;;;;;;;;;;;;AAqBA,gBACI;AACF,SAAO,QAAQ,qBAAW;AAE1B,eAAa,aAAK,eAAe,KAAK,MAAM,MAAM,OAAO,GAAG,OAAO;AAEnE,mBAAiB,qBAAa,gBAAgB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAExE,cAAY,SAAQ,WAAW,UAAU,OAAO,GAAG;AAEnD,MAAI,aAAK,cAAc,cAAc;AACnC,WAAO;;AAIT,kBAAgB,OAAO,OAAO,OAAK,aAAK,cAAc,EAAE,SAAS;AACjE,MAAI,QAAQ,WAAW;AACrB,WAAO,QAAQ;;AAGjB,iBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,uBAAa,uBAAuB,QAAQ;AAE5C,mBAAiB,aAAK,cAAc,QAAQ,GAAG,MAAM,MAAM,GAAG;AAC9D,qBAAmB;AACnB,oBAAkB,QAAQ,IAAI;AAC5B,qBAAiB,aAAK,cAAc,MAAM,MAAM,MAAM;AACtD,oBAAgB;AAChB,WAAO;;AAET,iBAAe,QAAQ,IAAI,WAAS,SAAQ,mBAAmB;AAC/D,kBAAgB,SAAQ,mBAAmB;AAC3C,eAAa,GAAG,IAAI,UAAU;AAC5B,oBAAgB,IAAI;AACpB,iBAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,uBAAiB,UAAU;AAC3B,uBAAiB,IAAI;AACrB,mBAAa,OAAO,GAAG,SAAS,UAAU,WAAW;AACrD,cAAQ,IAAI,MAAM;AAClB,mBAAa;;;AAGjB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACrEd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAQJ;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,iBAAiB,cAAc;AAC/D,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB,OAAO;AAEjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,oDACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,aACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACrGd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAUJ;AACE,4BAA0B,SAAQ,KAAK,MAAM,qBAAqB,MAAM;IACtE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,IAAI,UAAU;AACrB,SAAO,SAAS,WAAK,YAAY,iBAAiB,cAAc;AAEhE,oBAAkB;AAElB,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBAC1B,YAAY,OAAO,OAA2C,SAC9D,WAAW,MAAK,iBAAiB,OAAuB;AAC5D,SACE,WACA,cACA,aACA,YACA,UACA,SACA,aACA,WACA,UACA,cACA,eACE;AAEJ,iBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,kBAAgB,cAAc,IAAI,SAAS,QAAQ;AAEnD,yBAAuB,SAAS,eAAe;AAC/C,oBAAkB,aAAK,eAAe,SAAS;AAC/C,oBAAkB,aAAK,eAAe,GAAG;AACzC,gCAA8B,aAAK,eAAe,OAAO;AACzD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AACtD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AAEtD,cAAY,SAAQ,WAAW,SAAS,SAAS;AACjD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eAAa,SAAQ,UAAU,IAAI,GAAG,QAAQ;AAC9C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,0BACI,MAAM,UAAU,WAAW,cAAc,aAAa,UAAU,SAChE,YAAY,WAAW,UAAU,aAAa,cAAc,aAC5D,QAAQ,SAAS,OAAO,OAAO,OAAO,cAAc,YACpD,YAAY,gBAAgB,cAAc,YAAY,YACtD,gBAAgB;AACpB,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Hd;;;;;;;;;;;;;;;;AAqBO,MAAM,YAA0B,wBAAwB;;;ACrB/D;;;;;;;;;;;;;;;;AAwBA,IAAK;AAAL,AAAA;AACE,uBAAA,qBAAA,cAAA,KAAA;AACA,uBAAA,qBAAA,aAAA,KAAA;GAFG,uBAAA,uBAAmB;AAKxB;AAKA;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAe;IACnE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ,oBAAoB,YAAY;AAC/C,SAAO,eAAO,OAAO,UAAU;AAE/B,mBAAiB,MAAM,MAAM;AAE7B,kCAAgC;AAChC,mBAAiB,CAAC,UAAU,YAAY,WAAW,OAAM,MAAM;AAE/D,mBAAiB,SAAQ,UAAU,IAAI,OAAM;AAC7C;AACA,MAAI,OAAM,UAAU;AAClB,iBAAa,MAAK,CAAC,mBAAS,QAAQ,CAAC,GAAG,SAAQ,OAAO,CAAC,OAAO;AAC/D,iBAAa,SAAQ,UAAU,IAAI,WAAW;;AAGhD,mBAAiB,WAAW;AAC5B,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,2BAAyB,IAAI,WAAW,IAAI,WAAW,OAAM,OAAO;AAEpE,oBACI,UAAU,SAAS,UAAU,UAAU,kBAAkB,YACzD,WACA,oBAAoB,SACpB,oBAAoB;AAExB,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AChGd;;;;;;;;;;;;;;;;AAyBA,IAAI;AAGJ;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,MAAM,WAAW,qBAAW;AACnC,gBAAc,EAAE,MAAM;AAEtB,eAAK,OAAO,EAAE,UAAU,aAAa,EAAE,UAAU,SAC/C,MAAM,2BAA2B,EAAE;AAErC,sBAAoB,qBAAa,mBAAmB,CAAC,OAAO;AAC5D,kBAAgB;AAChB,MAAI,gBAAgB;AAClB,gBAAY,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,cAAc;;AAElE,uBAAqB,qBAAa,iBAAiB,GAAG,OAAO;AAC7D,uBAAa,2BAA2B,UAAU,CAAC,eAAe;AAElE,sBAAoB,SAAQ,WAAW,UAAU,OAAO,UAAU;AAClE,mBAAiB,UAAU,MAAM;AACjC,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,wBAAsB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAChE,aAAW,aAAa,YAAY,IAAI,GAAG,WAAU,IAAI,GAAG,UACjD,eAAe,SAAS,EAAE;AAGrC,YAAU;AACV,MAAI,gBAAgB;AAClB,4BAAwB,qBAAa,uBAAuB;AAC5D,UAAM,WACJ,CAAC,QAAQ,CAAC,GAAG,cAAc,OAAO,CAAC,MAAM,kBAAkB;AAC7D,aAAQ,YAAY,UAAU;AAC9B,aAAQ,YAAY,YAAY;;AAElC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACjFd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAKJ;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,WAAW,cAAc;AAEhC,eAAK,OACD,YAAY,GACZ,MAAM,sDAAsD;AAEhE,oBAAkB,EAAE,MAAM;AAC1B,sBAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAElE,uBAAqB,cAAc;AACnC,sBAAoB,aAAa;AACjC,sBAAoB,aAAc,aAAY;AAE9C,sBAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAE3C,cAAY,SAAQ,WAAW,aAAa;AAE5C,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAClB,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAEhE,2BAAyB,IAAI,WAAW,IAAI,WAAW,aAAa;AACpE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,cAAc;AAEpE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,uBAAqB,eAAe,SAAS,IAAI;AACjD,mBACI,KAAK,WAAW,cAAc,eAAe,EAAE,MAAM,SAAS,GAC9D,kBAAkB,iBAAiB,YAAY,QAAQ;AAE3D,SAAO;;AAGF,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC1Fd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAQJ;AACE,wBACI,SAAQ,KAAK,MAAM,uBAAuB,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,mBAAmB;AAEnD,qBAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAEhD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAClD,YAA0C,MAAK,iBAChD;AAEJ,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,mEACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,oCAAkD;EACvD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC5Gd;;;;;;;;;;;;;;;;AAqBA,MAAM,yBAAwB;AACvB,kBACH,yBAAyB,KAAK;;;ACvBlC;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,oBACH,yBAAyB,OAAO,wBAAuB;;;ACtB3D;;;;;;;;;;;;;;;;AAoBO,MAAM,YAA0B,wBAAwB;;;ACpB/D;;;;;;;;;;;;;;;;AAsBA,cAAc;AACZ,SAAO,QAAQ,OAAO,OAAO,QAAQ,qBAAW;AAChD,cAAY,SAAQ,WAAW,OAAO;AACtC,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACjCd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAiB;IACrE;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAEJ,SAAO,QAAQ,qBAAW;AAC1B,SAAO,iBAAS;AAEhB,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,oBACI,SAAS,OAAO,aAAa,YAAY,aAAa;AAC1D,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACxDb;;;;;;;;;;;;;;;;AAqBA,MAAM,yBAAwB;AACvB,uBACH,yBAAyB,UAAU;;;ACvBvC;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,kBAAgB,SAAQ,KAAK,MACzB,gBAAgB,MAChB,CAAC,UAAU,UAAU,UAAU,UAAU,UAAU,UAAU;;AAGnE;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,mBAAmB;AAC1B,SAAO,GAAG,MAAM,UAAU,QAAQ,SAAS;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,iBAAe,SAAQ,UAAU,IAAI,KAAK,QAAQ;AAClD,qBAAmB,SAAQ,UAAU,IAAI,SAAS,QAAQ;AAC1D,mBAAiB,UAAU,OAAO,SAAQ,UAAU,IAAI,OAAO,QAAQ,KAAK;AAC5E,kBAAgB,SAAS,OAAO,SAAQ,UAAU,IAAI,MAAM,QAAQ,KAAK;AAEzE,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAE1C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBACI,KAAK,QAAQ,YAAY,UAAU,SAAS,iBAAiB;AACjE,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Dd;;;;;;;;;;;;;;;;AAuBA,IAAI;AASJ;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK;AAET,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,sDACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,2BAA2B,SAAS,wDACI;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,yDACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,kBACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Id;;;;;;;;;;;;;;;;AAuBA,IAAI;AASJ;AACE,6BACI,SAAQ,KAAK,MAAM,sBAAsB,MAAiB;IACxD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB;AAE1B,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,+DACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,oCAAoC,SAAS,wDACL;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,kEACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,2BACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,mCAAiD;EACtD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Id;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,iBAAe,SAAQ,KAAK,MAAM,UAAU,MAAe;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,UAAU;AAC1B,SAAO,QAAQ,WAAW;AAE1B,uDACI,uBAAY,mBAAmB,QAAkB;AAErD,cAAY,SAAQ,WAAW,aAAa,OAAO;AACnD,MAAI,cAAc;AAChB,WAAO;;AAGT,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAErD,gBAAc,SAAQ,UAAU,IAAI,OAAO;AAC3C,cAAY,MAAM;AAClB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eACI,KAAK,SAAS,OAAO,QAAQ,WAAW,WAAW,WAAW,WAC9D,cAAc;AAElB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC5Ed;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAe;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,GAAG,WAAW;AACrB,SAAO,QAAQ;AAEf,mBAAiB,EAAE,MAAM;AACzB,WAAS,QAAQ,aAAK,cAAc,QAAQ;AAC5C,sBAAoB,EAAE,MAAM,SAAS;AAErC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAElB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAChE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AAEjE,aACI,KAAK,SAAS,EAAE,QAAQ,eAAe,aAAa,WAAW,MAC/D,iBAAiB;AAGrB,qBAAmB,aAAK,eAAe,MAAM,EAAE,OAAO;AACtD,oBAAkB,qBAAa,aAAa,yBACxC,GAAa,SAAmB;AAEpC,MAAI,QAAQ,UAAU;AACtB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACvFd;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,sBACH,yBAAyB,SAAS,wBAAuB;;;ACtB7D;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,2BACH,yBAAyB,cAAc,wBAAuB;;;ACtBlE;;;;;;;;;;;;;;;;AAmBA,MAAM,yBAAwB;AACvB,mBACH,yBAAyB,MAAM,wBAAuB;;;ACrB1D;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,wBACH,yBAAyB,WAAW,wBAAuB;;;ACtB/D;;;;;;;;;;;;;;;;AAmBO,MAAM,YAA0B,wBAAwB;;;ACnB/D;;;;;;;;;;;;;;;;AAmBA,MAAM,yBAAwB;AACvB,yBACH,yBAAyB,YAAY,wBAAuB;;;ACrBhE;;;;;;;;;;;;;;;;AAwBA,IAAI;AAEJ;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,wBAAwB,YAAY;AAC3C,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,YAAQ;AACR,cAAU;;AAGZ,oBAAkB,MAAM,MAAM;AAC9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Ed;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,sBACH,yBAAyB,SAAS;;;ACrBtC;;;;;;;;;;;;;;;;AAqBA,IAAI;AAOJ;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAEhC,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,gBAAgB,eACtD,cAAc,aAAa,eAAe,gBAAgB;AAC9D,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Fd;;;;;;;;;;;;;;;;AAuBA,IAAI;AAEJ;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,oBAAkB,MAAM,MAAM;AAE9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AClFd;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,sBACH,yBAAyB,SAAS;;;ACrBtC;;;;;;;;;;;;;;;;AAqBA,MAAM,0BAAwB;AACvB,uBACH,yBAAyB,UAAU;;;ACvBvC;;;;;;;;;;;;;;;;AAmBO,MAAM,eAA6B,wBAAwB;;;ACnBlE;;;;;;;;;;;;;;;;AA8BM,2BACF;AACF,iBAAe,IAAI,WAAW,SAAQ,KAAK,OAAO,QAAQ,WAAW;AACrE,2BAAyB,OAAO;AAChC,uBAAqB,OAAO;AAC5B,0BAAwB,OAAO;AAC/B,wBAAsB,OAAO;AAE7B,WAAQ,KAAK,MAAM;AACnB,SAAO,CAAC,kBAAkB,cAAc,iBAAiB;;;;ACvC3D;;;;;;;;;;;;;;;;AAuBA,IAAI;AAIJ;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,kBAAkB;AACtD,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBACI,UAAS,SAAS,UAAU,eAAe,cAAc;AAE7D,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AACnB,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX;;;;ACxEF;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,sBAChD;AACJ,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,6BAA2B,SAAQ,WAAW,IAAI,SAAS;AAE3D,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Ed;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,gBAAgB;AACpE,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAI/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAChD,+BACI,SAAQ,WAAW,CAAC,eAAe,WAAW;AAElD,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Ed;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,uBACH,yBAAyB,UAAU,yBAAuB;;;ACrB9D;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,WAAW;AAClB,SAAO,OAAO,SAAS,YAAY;AAEnC,cAAY,SAAQ,WAAW,CAAC,GAAG,QAAQ,OAAO,QAAQ;AAC1D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,aAAW,WAAW,OAAO,SAAS,UAAU;AAEhD,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACxDd;;;;;;;;;;;;;;;;AAqBA,kBAAkB;AAChB,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;;;AChCd;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,SAAS,IAAI,mBAAS,QAAQ,UAAU,kBAAkB;AAEjE,mBAAiB,SAAS,IACtB,UAAU,EAAE,KAAqB,EAAE,MAAM,KAAK,EAAE;AACpD,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,0BAAwB,SAAS,IAAI,cAAY,SAAS;AAC1D,2BAAyB,SAAS,IAAI,cAAY,SAAS;AAC3D,2BACI,IAAI,WAAW,IAAI,WAAW,iBAAiB;AACnD,4BACI,IAAI,WAAW,IAAI,WAAW,kBAAkB;AAEpD,YACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,kBACrD,mBAAmB,eAAe;AACtC,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACrEb;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,kBACH,yBAAyB,KAAK;;;ACrBlC;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEJ;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,GAAG,SAAS;AACnB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,oBAAkB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AAEtD,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAU,KAAK,WAAW;AAC1B,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,MAAM,aAA2B,wBAAwB;;;ACnBhE;;;;;;;;;;;;;;;;AAmBO,MAAM,cAA4B,wBAAwB;;;ACnBjE;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,uBAAqB,SAAQ,KAAK,MAAM,gBAAgB,MAAe;IACrE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AAEjC,SAAO,UAAU;AACjB,SAAO,cAAc,QAAQ;AAC7B,gCAA8B;AAE9B,oDAAkD,OAAO;AACzD,mBAAiB,CAAC,OAAO,WAAW,UAAU;AAE9C,cAAY,SAAQ,UAAU,IAAI,OAAO;AACzC;AACA,MAAI,MAAM,UAAU;AAClB,iBACI,MAAK,CAAC,mBAAS,QAAQ,CAAC,GAAG,SAAS,OAAO,CAAC,OAAO;AACvD,YAAQ,SAAQ,UAAU,IAAI,WAAW;;AAE3C,cAAY,MAAM;AAElB,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,aAAK,cAAc,OAAO,WAAW;AACvC,WAAO;;AAET,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,OAAO,WAAW,UAAU,aAAa,WAAW,UACzD,eAAe,IAAI,GAAG;AAE1B,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACtFd;;;;;;;;;;;;;;;;AAwBA,IAAI;AAIJ;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAM;IAC9C;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,QAAQ;AAEf,eAAa,aAAK,eAAe,MAAM,EAAE;AAEzC,MAAI,EAAE,MAAM,WAAW;AACrB,WAAO,SAAS,CAAC,QAAQ,CAAC,IAAI;;AAGhC,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,wBAAsB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE7D,cACI,KAAK,WAAW,KAAK,QAAQ,eAAe,EAAE,MAAM,QAAQ;AAEhE,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,QAAQ;;AAGtD,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACrEb;;;;;;;;;;;;;;;;AAsBA,IAAI;AAKJ;AACE,eAAa,SAAQ,KAAK,MAAM,kBAAkB,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,iBAAS;AAChB,SAAO,SAAS,WAAW,UAAU;AAErC,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,6BACI,qBAAa,eAAe,QAAQ,aAAa;AAErD,sBAAoB,cAAc;AAClC,2BAAyB;AAEzB,qBAAmB,OAAO,cAAc,WACpC,CAAC,WAAW,WAAW,WAAW,cAAc,IAAI,oBACpD,CAAC,GAAG,WAAW;AACnB,oBAAkB,IAAI,WAAW,IAAI,WAAW,YAAY;AAE5D,aACI,SAAS,OAAO,aAAa,YAAY,aAAa,SAAS,SAC/D,SAAS,WAAW,WAAW,QAAQ;AAC3C,SAAO;;AAGF,+BAA6C;EAClD,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;AC/Eb;;;;;;;;;;;;;;;;AAmBO,MAAM,cAA4B,wBAAwB;;;ACnBjE;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAe;IAC3D;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAIE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,SAAS,WAAW;AAC3B,SAAO,SAAS;AAEhB,cAAY,SAAQ,WAAW,OAAO,QAAQ;AAC9C,MAAI,aAAK,cAAc,WAAW;AAChC,WAAO;;AAGT,SAAO,WAAW,YAAY,WAAW,SAAS,cAC9C,wBAAa,gBAAgB,SAAS,SAAS;AAEnD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBACI,WAAW,WAAW,SAAS,QAAQ,QAAQ,WAAW,YAC1D,WAAW,cAAc,YAAY;AAEzC,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Ed;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAM;IAC9C;IACA;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,WAAW,GAAG,KAAK;AAE1B,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBAAc,UAAU,MAAM;AAC9B,gBAAc,EAAE,MAAM;AAEtB,iBAAe,UAAU,KAAK,QAAQ,KAAK,UAAU,IACjD,IACA,aAAK,cAAc,EAAE,MAAM,MAAM;AAErC,aAAW,aAAa,KAAK,KAAK,QAAQ;AAC1C,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;AC5Db;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEJ;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB,CAAC,UAAU;;AAGrE;AAEE,SAAO,mBAAS,SAAS,MAAM;AAC/B,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK;AACd,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,MAAM,YAA0B,wBAAwB;;;ACnB/D;;;;;;;;;;;;;;;;AAsBM,eACF;AACF,SAAO,SAAS,IAAI,QAAQ,OAAO,OAAO,qBAAW;AAErD,0BAAwB,mBAAW,iBAAiB,GAAG,OAAO;AAE9D,sBAAoB,mBAAW,iBAAiB,EAAE,OAAO,QAAQ;AACjE,gBAAc,SAAQ,mBAAmB;AACzC,cAAY,SAAQ,WAAW,OAAO,EAAE;AACxC,kBAAgB,SAAQ,mBAAmB;AAC3C,mBAAiB,aAAK,eAAe,EAAE;AACvC,MAAI;AACF,uBAAmB,mBAAW,kBAAkB,QAAQ;AACxD,YAAQ,IACJ,MAAM,SAAS,YAAY,aAAa,aAAK,cAAc;AAC/D,WAAO;;AAET,eAAa,EAAE,MAAM;AACrB,MAAI,SAAS;AACX,YACI,OAAO,SAAS,IAAI,SAAS,QAC7B;aACK,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SACjC,QAAoC;aAC/B,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SAAS,IAAI,SAC9C,QACA;;AAEJ,qBAAiB,OAAO,GAAG,SAAS,QAAQ;;AAE9C,SAAO;;AAGT;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,oBAAgB,IAAI,UAAU;AAC9B,YAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,iBAAa,KAAK;;;AAItB;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,sBAAgB,IAAI,WAAW,IAAI,WAAW;AAC9C,cAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,mBAAa,KAAK;;;;AAKxB;AAKE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,iBAAe,MAAM;AAErB,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,mBAAa,QAAQ,IAAI,MAAM;AAC7B,wBAAgB,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW;AAC7D,gBAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,qBAAa,KAAK;;;;;AAM1B;AAGE,iBAAe,OAAO,MAAM,MAAM,OAAO;AACzC,eAAa,OAAO,MAAM,OAAO,MAAM,OAAO;AAC9C,eAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AACjC,gBAAY,OAAO,WAAW;AAC9B,iBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,YAAQ,KAAK,KAAK,IAAI,GAAG;;;AAItB,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACrId;;;;;;;;;;;;;;;;AAqBA,IAAI;AAGJ;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACtD;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,SAAS,SAAS,QAAQ,QAAQ;AAClD,cAAY,SAAQ,UAAU,IAAI,OAAO,QAAQ;AACjD,cAAY,SAAQ,WAAW,OAAO,OAAO,OAAO;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,mBAAiB,OAAO,MAAM;AAC9B,gBAAc,aAAK,cAAc,OAAO,SAAS;AAGjD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK,OAAO,UAAU;AAC/B,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACzDd;;;;;;;;;;;;;;;;AAwBM,eACF;AACF,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,KAAK;AACZ,SAAO,iBAAiB,QAAQ;AAEhC,gBAAc,aAAK,eAAe,MAAM,EAAE,OAAO;AAEjD,qBAAmB,qBAAa,iBAAiB,GAAG,iBAAiB;AACrE,gBAAc,IAAI,MAAM,EAAE,MAAM,QAAQ,KAAK;AAC7C,eAAa,EAAE,MAAM;AACrB,SAAO,WAAW,IAAI;AACpB,uBAAmB,CAAC,GAAG;AACvB,eAAW,SAAS;AACpB,mBACI,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,MAAM,aAAa;AAC1D,UAAM,UAAU;AAChB,WAAO;;;AAIJ,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;;;AChDd;;;;;;;;;;;;;;;;AAoBO,MAAM,aAA2B,wBAAwB;;;ACpBhE;;;;;;;;;;;;;;;;AAmBO,MAAM,eAA6B,wBAAwB;;;ACnBlE;;;;;;;;;;;;;;;;AAkBA,MAAM,0BAAwB;AACvB,gCACH,yBAAyB,mBAAmB;;;ACpBhD;;;;;;;;;;;;;;;;AAuBA,IAAI;AAMJ;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AAEZ,OAAK,OAAO,KAAK,WAAW;AAC5B,MAAI,WAAW;AACb,cAAU,IAAI,MAAM,MAAM;;AAG5B,SAAO,WAAW,SAAS,cAAc,aAAa,kBAAkB;AAExE,uBAAqB,qBAAa,WAAW,WAAW;AACxD,MAAI,aAAa,SAAS;AACxB,UAAM,IAAI,MAAM;;AAGlB,MAAI,iBAAiB,KAAK,gBAAgB;AACxC,UAAM,IAAI,MACN;;AAGN,MAAI,iBAAiB,KAAK,mBAAmB;AAC3C,UAAM,IAAI,MACN;;AAGN,8BAA4B,EAAE,MAAM,SAAS,MAAM;AAGnD,qBAAmB,qBAAa,WAAW,WAAW;AACtD,mBAAiB,EAAE,MAAM;AACzB,aAAW,QAAQ;AACjB,UAAM,QAAQ;AACd,QAAI,QAAQ;AACZ,aAAS,OAAO,MAAM,GAAG;;AAG3B,oBAAkB,SAAQ,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,WAAW;AAElE,SACE,wBACA,oBACA,8BAEE,qBAAa,WAAW,kBACpB,UAAU,OAAO,cAAc,qBAAqB,OAAO,KAC3D,SAAS,WAAW,SAAS;AACrC,UAAQ;AACR,QAAM;AACN,YAAU;AAEV,qBAAmB,qBAAa,WAAW,WAAW;AAEtD,aAAW,QAAQ;AACjB,QAAI,QAAQ,MAAM,QAAQ;AAC1B,YAAQ,QAAQ;;AAIlB,eAAa,qBAAa,WAAW,gBAAgB,OAAO,KAAK;AAEjE,mBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AAEvE,qBAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,MAAI;AACF,oBAAgB,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,OAAO;AAC1D,WAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,UAAU,OAAO,CAAC,OAAO,WAAW;;AAGlE,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,CAAC,SAAS,KAAK,UAAQ,SAAS;AAClC,gBAAY,SAAQ,UAAU,IAAI,UAAU,QAAQ;AACpD,0BAAsB,IAAI,WACtB,IAAI,WAAW,aAAK,eAAe,UAAU,QAAQ;AACzD,uBAAmB,IAAI,WAAW,IAAI,WAAW,OAAO;AACxD,qBAAiB,IAAI,WAAW,IAAI,WAAW,KAAK;AACpD,yBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,6BAAyB,IAAI,WAAW,IAAI,WAAW,UAAU;AACjE,4BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AACjE,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,eAAe,UAAU,MAAM,QAAQ,YAAY,UACxD,cAAc,kBAAkB,iBAAiB,SAAS,QAC1D;;AAGN,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,WAAW;;AAGvD,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AChJd;;;;;;;;;;;;;;;;AAkBA,MAAM,0BAAwB;AACvB,kBACH,yBAAyB,KAAK;;;ACpBlC;;;;;;;;;;;;;;;;AAuBA,IAAI;AAEJ;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,sBAAoB;AACpB,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;AACV,sBAAgB,qBAAa,iBACzB,cAAc,QAAQ,MAAM,MAAM;;;AAI1C,uBAAa,2BACT,OAAO,eAAe,MAAM,MAAM;AACtC,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACpFd;;;;;;;;;;;;;;;;AAmBO,MAAM,aAA2B,wBAAwB;;;ACnBhE;;;;;;;;;;;;;;;;AAuBA,IAAI;AAIJ;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,SAAO,QAAQ;AAEf,mBAA2B,IAAI,MAAM,EAAE,MAAM;AAC7C,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,EAAE,MAAM,KAAK,KAAK;;AAElC,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAE9D,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WACI,KAAK,aAAa,EAAE,MAAM,QAAQ,eAAe,SAAS,QAC1D,SAAS,IAAI,QAAQ;AACzB,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AChEd;;;;;;;;;;;;;;;;AAuBA,gBACI;AAEF,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,SAAS;AAChB,SAAO,QAAQ;AACf,qBAAmB,MAAM,MAAM;AAC/B,eAAa,MAAM,MAAM;AACzB,mBAA2B,IAAI,MAAM,OAAO;AAC5C,iBAAe;AACf,eAAa,GAAG,IAAI,MAAM;AACxB,QAAI,MAAM;AACR,eAAS,cAAc,MAAM,MAAM;;;AAGvC,eAA2B,IAAI,MAAM;AACrC,gBAAc,IAAI,MAAM,MAAM,KAAK;AACnC,eAAa,MAAM,MAAM;AACzB,OAAK,QAAQ;AACb,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,UAAM,QAAQ;AACd,SAAK,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,QAAQ,OAAO,CAAC,OAAO,OAAO;;AAE7D,SAAO,KAAK,IAAI,EAAE,QAAQ,WAAY,EAAC,QAAQ,OAAO,OAAO;;AAGxD,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACpDd;;;;;;;;;;;;;;;;AAqBA,mBAAmB;AACjB,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;;;;AChCd;;;;;;;;;;;;;;;;AAmGA,MAAM,gBAAgC;EACpC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;AAGF,2BAA2B;AACzB,iBAAe;;;;ACnLjB;;;;;;;;;;;;;;;;AAmBA,MAAM,OAAM;AAMZ,KAAI,aAIA,yBAAyB,YAAY,YAAY,SAAS,IAAI,WAAW;EACvE;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;EAAG;EACpD;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;;AAOvD,KAAI,aAAa,gCAAgC;AAG/C,MAAI,KAAI,IAAI;AACV,WAAO;;AAGT;AAGE,QAAI,iBAAiB,MAAM,YAAY,IAAI,kBAAkB;AAG7D,WAAO,YAAY,SAAS,IAAI,WAAW;MACzC;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;MAAG;MACnE;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;;;AAGlE,WAAO;;;;;ACnCX,MAAA,kCAAoC;;;ACrB7B,MAAM,qBAAqB;;;ACwBlC,MAAA,oBAAwB;AAxBxB;;;;;;;;;;;;;;;;AA0BA,sBAAsB;AA1BtB,0BAuCiC;EAK/B;AACE;AADiB,SAAA,OAAA;AAHX,SAAA,mBAAmB;AAKzB,SAAK,KAAK,KAAK;AACf,SAAK,YAAY,IAAI,YAAY,MAAM;;EAGzC;AAEE,mBAAe;AACf,SAAK,KAAK,QAAQ,QAAQ,OAAO;AACjC,WAAO;;EAGT;AACE,WAAO,KAAK,UAAU;;QAGlB;AACJ,kBAAc,aAAK;AACnB;AACA,qBAAiB,aAAK,QAAQ;AAC9B,WAAO,CAAC;;EAGV;AAGE,eAAW,KAAK;AAChB,QAAI,UAAU;AACZ,0BAAoB;AACpB,WAAK,UAAU,IACX,QAAQ,CAAC,IAAI,aAAa,OAAO,OAAO,cAAc;AAC1D;;AAGF,iBAAa,aAAK,cAAc;AAChC,qBAAiB,OAAO,aAAK,gBAAgB;AAC7C,yBAAqB,KAAK,KAAK,QAAQ;AAEvC,SAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AAErD,SAAK,KAAK,KAAK,eAAe,IAAI,MAAM;AAExC,QAAI,UAAU;AACZ,WAAK,KAAK,OAAO,IACb,IAAI,WACC,OAAmC,QACnC,OAAmC,YAAY,WACpD;;;QAIF;AACJ,WAAO,KAAK,SAAS;;EAGvB;AACE,WAAO,cAAc,OAAO,OAAO,eAC/B,KAAK,UAAU,IAAI;AACvB,QAAI,UAAU;AACZ,aAAO;;AAET,kBAAc,KAAK,KAAK,OAAO,MAC3B,cACA,eAAe,aAAK,cAAc,SAAS,aAAK,gBAAgB;AACpE,WAAO,qBAAqB,MAAM,QAAQ;;EAG5C;AACE,kBAAa,KAAK,UAAU,IAAI;AAChC,SAAK,KAAK,MAAM,MAAK;AACrB,SAAK,KAAK,KAAK,YAAY,MAAK;AAChC,SAAK,UAAU,OAAO;;EAGxB;AACE,WAAO;;EAKT;AACE,WAAO,KAAK,UAAU,IAAI,QAAQ;;EAGpC;AACE,SAAK,KAAK,KAAK;AACf,SAAK,OAAO;;EAGd;AACE,WAAO,CAAC,YAAY;;EAStB;AAEE;AACA,QAAI,gBAAgB;AAClB,eAAS,KAAK,MAAM,MAAmB,OAAO;;AAE9C,eAAS;AACT,iBAAW,KAAK;AAChB,WAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AACrD,mBAAa,aAAK,cAAc;AAChC,WAAK,KAAK,KAAK,eAAe,IAAI,MAAM;;AAE1C,WAAO,CAAC,QAAQ,OAAO;;EAGzB,oBAAoB,OAAO,OAAO;AAEhC,oBAAe,KAAK,KAAK,OAAO;AAChC,WAAO,gBAAgB,KAAK,UAAU,IAAI;AAC1C,iBAAa,aAAK,cAAc;AAChC,YAAQ;WACD;AACH,eAAO,IAAI,aAAa,SAAQ,cAAc;WAC3C;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;WACzC;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;;AAE5C,cAAM,IAAI,MAAM,iBAAiB;;;;AAKzC,gBAAgB,QAAQ;AACtB,SAAO,QAAQ,MAAM;AACrB,SAAO,IAAI,YAAY;GACtB;AAEH;AAEE,SAAO;AACL,iBAAK,MAAM,MAAM,CAAC,aAAa,gBAAgB,KAAK;AAClD,UAAI,CAAC,SAAS;AACZ,gBAAQ,IAAI,EAAE,uCAAuC;;AAEvD,eAAS,cAAc,KAAK;AAC1B,oBAAY,YAAY,QAAQ,SAAS,KAAK;AAC5C,mBAAS,OAAO;;;;AAItB,WAAO;;;AAUX;AAGE,MAAI,YAAY;AAGd,WAAO;;AAGT,aAA2B;AAC3B,MAAI,iBAAiB;AACnB,WAAO;aACE;AACT,WAAO;;AAGT,MAAI,eAAe;AACjB,QAAI,YAAY,SAAS;AACvB,aAAO,YAAY;;;AAIvB,SAAO,mBAAmB;;AAU5B;AACE,4CAA0C,MAAM,QAAQ,IAAI;IAC1D,MAAM,SAAS;IACf,MAAM,SAAS;;AAGjB,SAAO,IAAI,QAAQ;AACjB,0BAAyC;AAOzC,kBAAc,aAAa;AACzB,UAAI,KAAK,SAAS;AAChB,yBAAiB;AACjB,qBAAa,IAAI,KAAK,CAAC,WAAW,CAAC,MAAM;AACzC,eAAO,IAAI,gBAAgB;;AAG7B,UAAI,KAAK,SAAS;AAChB,eAAO,oBACH,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEhD,aAAO,SAAS;;AAMlB,QAAI;AACF,oBAAc,kBACV,0BAA0B,oBACtB,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEpD;AAEA,QAAI,oBAAoB,iBAAiB,YAAY;AACnD,aAAO,wCAAwB;AAC/B,WAAK,sBAAsB,IAAI,KAC3B,CAAC,qEACA,wCAAwB,aACzB,CAAC,MAAM;;AAGX,aAAO,0BAAY;;AAGrB,2BAA+B;AAE/B,SAAK,OAAO;MACV,MAAM,KAAK,MAAM,QAAQ,MAAM;MAC/B,gBAAgB,KAAK,MACjB,mBAAmB,MACnB;QACE;QACA;QACA;;MAEN,aAAa,KAAK,MAAM,gBAAgB,gBAAgB,CAAC;MACzD,SAAS,KAAK,MAAM,WAAW,gBAAgB;;AAEjD,sBAAkB;AAClB,SAAK,uBAAuB;AAC1B,oBAAc;AACd,oBAAc;AACd,cAAQ,CAAC;;AAEX,SAAK,UAAU;AACb,UAAI;AAEF;;AAEF,UAAI;AAGF;;AAEF,oBAAc;AACd,wBACI;AAEJ,aAAO,CAAC,SAAS;;;;AAKvB;AAEE,UAAQ;SACD;AACH,aAAO,IAAI,aAAa;SACrB;AACH,aAAO,IAAI,WAAW;SACnB;AACH,aAAO,IAAI,WAAW;;AAEtB,YAAM,IAAI,MAAM,iBAAiB;;;AAIvC,wBAAwB;EACtB;EAA0B;EAC1B;;AAIF,eAAuB;AACvB,qBAA6B;AAC7B,kBAAsD;AACtD,kBAAkB;AAClB,kBAAkB;ACoDZ,sBACF,oCACmB;AACrB,MAAI;AACF,UAAM,IAAI,MACN;;AAKN,MAAI,OAAO,oBAAoB;AAC7B,qBAAiB;;AAEjB,kBAAc;AACd,yBACI,gBAAgB,OAAO,UAAQ,YAAY,SAAS;AACxD,QAAI,aAAa,SAAS;AACxB,YAAM,IAAI,MACN,2DACG,aAAa,KAAK;;;AAM7B,gBAAc;;;;AC5ahB;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;;ACaA,MAAM,iBAAoB;;;ACb1B,IAAO,wBAAQ;AAAA,EACb;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EACpE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACtE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACxE;AAAA,EAAG;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACvE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC1E;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACzE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACxE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACzE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACpE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA;;;ACvKnE,MAAA,WAA0B;AAC1B,YAAqB;AACrB,eAAwB;AACxB,gBAAyB;AACzB,gBAAyB;;;ACLzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkBA,oBAAoB;AAClB,SAAO;AAAA,IACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA,IAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA;AAAA;AAG9C;AACE,SAAO;AAAA,IACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA,IAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA;AAAA;AAGhE;AACE,YAAU,OAAM,MAAM;AACtB,YAAU,OAAM,MAAM;AACtB,gBAAc,CAAC;AAAA,IACb,IAAI,WAAW,KAAK;AAAA,IACpB,IAAI,WAAW,KAAK;AAAA,IACpB,IAAI,SAAS,KAAK;AAAA,IAClB,IAAI,SAAS,KAAK;AAAA;AAEpB,SAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;AAAA;AAEnD;AACE,qBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,mBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,wBAAsB,IAAI,cAAc,IAAI;AAC1C,wBAAoB,CAAC,MAAM,KAAK,OAAO,IAAI,MAAM,KAAK,OAAO;AAC7D,WAAO;AAAA;AAET,SAAO,CAAE,YAAY,UAAU,eAAe,YAAY,IAAI;AAAA;AAEhE,kCAAkC;AAChC,iBAAe,aAAa;AAC5B,eAAa,WAAW;AACxB,sBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,qBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,mBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;AAAA;AAEpD;AACE,kBAAgB,aAAa;AAC7B,eAAa,WAAW;AACxB,kBAAgB,KAAK,IAAI,GAAG;AAC5B,mBAAiB,UAAU;AAC3B,qBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,mBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;AAAA;AAEpD;AACE,kBAAgB;AAAA,IACd,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA,IACjC,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA;AAEnC,sBAAoB,CAAC,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,YAAY;AAC3E,qBAAmB,CAAC,IAAI,WAAW,KAAK,YAAY,IAAI,IAAI,WAAW,KAAK,YAAY;AACxF,mBAAiB,CAAC,IAAI,SAAS,KAAK,YAAY,IAAI,IAAI,SAAS,KAAK,YAAY;AAClF,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;AAAA;;;AC3EpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgBA,0BAA0B;AACxB,SAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;AAAA;AAExE;AACE,kBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,SAAO,iBAAiB;AAAA;AAE1B,+BAA+B,UAAU,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AACvE;AACE,gBAAc;AACd,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,eAAW,GAAG,KAAK,GAAG;AAAA;AAExB,SAAO;AAAA;AAET;AACE,iBAAe;AACf,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,WAAO,KAAK,IAAI,GAAG;AAAA;AAErB,SAAO;AAAA;AAET;AACE,kBAAgB;AAChB,eAAa,KAAK;AAClB,iBAAe,GAAG,MAAM,MAAM;AAC5B,YAAQ,KAAK;AACb,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK,KAAK,IAAI,KAAK,MAAM,mBAAmB,MAAM;AAAA;AAAA;AAG9D,SAAO;AAAA;AAET;AACE,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,yBAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,4BAA0B,uBAAuB,OAAO,IAAI,OAAO;AACnE,mCAAiC,0BAA0B,mBAAmB;AAC9E,oCAAkC,uBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,SAAO,0BAA0B,0BAA0B;AAAA;AAE7D;AACE,4BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,+BAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,8BAA4B;AAAA,IAC1B,CAAC,IAAI,kBAAkB,IAAI;AAAA,IAC3B,CAAC,IAAI,kBAAkB,IAAI;AAAA;AAE7B,SAAO;AAAA,IACL,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,IAChD,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,IAChD,CAAC,GAAG,GAAG;AAAA;AAAA;AAGX;AACE,SAAO;AAAA,IACL,IAAI,uBAAuB,eAAe;AAAA,IAC1C,IAAI,uBAAuB,eAAe;AAAA;AAAA;;;ACpE9C,MAAA,WAA0B;AAC1B,gBAAyB;AACzB,cAAuB;AACvB,gBAAyB;;;ACNzB,IAAO,iBAAQ;AAAA,EACb,SAAS;AAAA,EACT,UAAU;AAAA,EAEV,SAAS;AAAA,EACT,OAAO;AAAA,EAIP,SAAS;AAAA,EAIT,YAAY;AAAA,EAKZ,QAAQ;AAAA,EAIR,gBAAgB;AAAA,EAKhB,QAAQ;AAAA,IACN,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IAIR,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,UAAU;AAAA,IACV,WAAW;AAAA,IACX,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,KAAK;AAAA,IACL,UAAU;AAAA,IACV,OAAO;AAAA,IACP,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,aAAa;AAAA,IACb,UAAU;AAAA,IACV,UAAU;AAAA;AAAA,EAGZ,SAAS;AAAA,IACP,SAAS;AAAA;AAAA,EAGX,MAAM;AAAA,IACJ,SAAS;AAAA,IAIT,UAAU;AAAA,MACR,WAAW;AAAA,MAIX,WAAW;AAAA,MACX,UAAU;AAAA,MAEV,YAAY;AAAA,MAKZ,eAAe;AAAA,MACf,cAAc;AAAA,MAEd,gBAAgB;AAAA;AAAA,IAKlB,MAAM;AAAA,MACJ,SAAS;AAAA,MACT,WAAW;AAAA,MACX,WAAW;AAAA;AAAA,IAGb,MAAM;AAAA,MACJ,SAAS;AAAA,MACT,WAAW;AAAA,MACX,WAAW;AAAA;AAAA,IAGb,KAAK;AAAA,MACH,SAAS;AAAA,MACT,WAAW;AAAA,MAEX,WAAW;AAAA,MACX,YAAY;AAAA;AAAA,IAId,QAAQ;AAAA,MACN,SAAS;AAAA,MACT,eAAe;AAAA,MACf,WAAW;AAAA,MACX,WAAW;AAAA,MACX,YAAY;AAAA;AAAA,IAId,SAAS;AAAA,MACP,SAAS;AAAA,MACT,WAAW;AAAA,MACX,eAAe;AAAA,MACf,YAAY;AAAA,MACZ,WAAW;AAAA;AAAA;AAAA,EAIf,MAAM;AAAA,IACJ,SAAS;AAAA,IACT,WAAW;AAAA,IACX,WAAW;AAAA,IACX,eAAe;AAAA,IAEf,gBAAgB;AAAA,IAEhB,WAAW;AAAA;AAAA,EAGb,MAAM;AAAA,IACJ,SAAS;AAAA,IACT,WAAW;AAAA,IACX,YAAY;AAAA,IAKZ,eAAe;AAAA,IACf,cAAc;AAAA,IAEd,gBAAgB;AAAA,IAEhB,UAAU;AAAA,IAEV,WAAW;AAAA,IACX,UAAU;AAAA,MACR,WAAW;AAAA;AAAA,IAEb,UAAU;AAAA,MACR,WAAW;AAAA;AAAA;AAAA;;;;;;AC3IjB,MAAM,oBAAoB;AAAA,EACxB,MAAM,CAAE,UAAU,CAAE,YAAY,IAAK,KAAK,CAAE,YAAY,IAAK,QAAQ,CAAE,YAAY,IAAK,SAAS,CAAE,YAAY;AAAA,EAAO,MAAM,CAAE,YAAY;AAAA;AAI5I,aAAY;AACV,MAAI,OAAO,gBAAgB;AAAa,WAAO,YAAY;AAC3D,SAAO,SAAS,OAAO,QAAQ,OAAO,YAAY,MAAO;AAAA;AAI3D;AACE,mBAAiB,SAAS,OAAO,OAAO,QAAQ;AAChD,SAAO,QAAQ,OAAO;AACpB,WAAO,KAAK,OAAO,IAAI,QAAQ;AAC7B,mBAAa,KAAK;AAClB,mBAAa,IAAI;AACjB,UAAI,MAAM,QAAQ,SAAS,MAAM,QAAQ;AACvC,aAAK,OAAO,KAAK,OAAO,GAAG;AAAA,iBAClB,SAAS,SAAS,SAAS;AACpC,aAAK,OAAO,UAAU,MAAM;AAAA;AAE5B,aAAK,OAAO;AAAA;AAAA;AAGhB,WAAO;AAAA,KACN;AAAA;AAxCL;AAAA,EA4CE,yBAAyB;AACvB,SAAK,KAAK;AACV,SAAK,UAAc;AACnB,SAAK,SAAS,UAAiB,gBAAS;AACxC,SAAK,KAAK;AACV,SAAK,QAAQ;AACb,SAAK,aAAa;AAClB,SAAK,qBAAqB;AAC1B,SAAK,cAAc;AACnB,SAAK,WAAW;AAChB,SAAK,OAAO;AAEZ,SAAK,SAAS;AAAA,MACZ,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,MACV,MAAM;AAAA,MACN,KAAK;AAAA,MACL,QAAQ;AAAA,MACR,SAAS;AAAA;AAGX,SAAK,WAAW;AAChB,SAAK,MAAM;AACX,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,OAAO;AAAA;AAAA,EAId;AAEE,QAAI,OAAO,KAAK,OAAO;AAAS,cAAQ,IAAI,UAAU,GAAG;AAAA;AAAA,EAG3D;AACE,QAAI,KAAK,OAAO;AAAS,aAAe;AACxC,WAAO;AAAA;AAAA,EAIT;AACE,QAAI,CAAC,KAAK;AAAoB;AAC9B,oBAAgB,GAAG,SAAS,MAAM;AAClC,qBAAiB,KAAK;AACtB,SAAK,aAAa;AAClB,mBAAe,UAAU;AACzB,QAAI,WAAW;AAAG,WAAK,IAAI,GAAG,KAAK;AAAA;AAAA,EAIrC;AACE,QAAI,CAAC,KAAK;AAAa,aAAO;AAC9B,QAAI,CAAC;AAAO,aAAO;AACnB,QAAI,GAAG,IAAI,MAAM,WAAW,CAAE,kBAAiB,GAAG;AAChD,aAAO;AAAA;AAET;AACE,SAAG;AAAA;AAEH,aAAO;AAAA;AAET,WAAO;AAAA;AAAA,QAIH;AACJ,SAAK,QAAQ;AACb,sBAAkB;AAClB,QAAI;AAAY,WAAK,SAAS,UAAU,KAAK,QAAQ;AAErD,QAAI,KAAK;AACP,WAAK,aAAa;AAClB,WAAK,IAAI,YAAY,KAAK,kCAAkC,GAAG;AAC/D,WAAK,IAAI,kBAAkB,KAAK;AAChC,WAAK,IAAI,UAAU,GAAG,IAAI;AAC1B,WAAK,WAAW;AAAA;AAElB,QAAI,KAAK,OAAO;AACd;AAAA,QACE,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,UACV,MAAM,QAAQ,IAAI;AAAA,QACpB,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAU,AAAS,cAAK,KAAK,OAAO,QAAQ;AAAA,QACtF,KAAK,OAAO,OAAS,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,UAAW,AAAI,SAAK,KAAK,UAAU;AAAA,QACzG,KAAK,OAAO,UAAY,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,UAAW,AAAO,YAAK,KAAK,UAAU;AAAA,QAClH,KAAK,OAAO,WAAa,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,UAAW,AAAQ,aAAK,KAAK,UAAU;AAAA,QACrH,KAAK,OAAO,WAAY,MAAK,OAAO,KAAK,UAAU,AAAQ,aAAK,KAAK,UAAU;AAAA,QAC/E,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAU,AAAS,cAAK,KAAK,OAAO,QAAQ;AAAA;AAAA;AAGxF,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAM,AAAS,cAAK,KAAK,OAAO;AAC9G,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,WAAW,CAAC,KAAK,OAAO;AAAK,aAAK,OAAO,MAAM,MAAM,AAAI,SAAK,KAAK;AACxH,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO;AAAQ,aAAK,OAAO,SAAS,MAAM,AAAO,YAAK,KAAK;AACpI,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAM,AAAQ,aAAK,KAAK;AACxI,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAM,AAAQ,aAAK,KAAK;AACpG,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAM,AAAS,cAAK,KAAK,OAAO;AAAA;AAEhH,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,QAAQ;AAAI,WAAK,KAAK,OAAO;AAAA;AAAA,QAIlD;AACJ,sBAAkB;AAClB,QAAI,KAAK,OAAO,WAAY,KAAK,OAAO,YAAY,MAAO,SAAU,GAAG,iBAAiB,KAAK,OAAO;AACnG,WAAK,QAAQ;AAWb,WAAK,IAAI,oBAAoB,KAAK,OAAO;AAEzC,UAAI,KAAK,OAAO,YAAY;AAC1B,aAAK,IAAI,uBAAuB,KAAK,OAAO;AAC5C,qBAAa,KAAK,OAAO;AACzB,qBAAa,MAAM,GAAG,MAAM,SAAS;AACrC,YAAI,CAAC;AAAM,eAAK,IAAI;AAAA;AAGtB,YAAM,GAAG,WAAW,KAAK,OAAO;AAChC,SAAG;AAIH,UAAI,KAAK,OAAO,YAAY;AAC1B,YAAI,KAAK,OAAO;AACd,eAAK,IAAI,mDAAmD,KAAK,OAAO;AACxE,aAAG,IAAI,IAAI,kCAAkC,KAAK,OAAO,aAAa,IAAI;AAAA;AAG5E,WAAG,IAAI,IAAI,4BAA4B;AAAA;AAEzC,YAAM,GAAG;AAAA;AAEX,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,WAAW;AAAI,WAAK,KAAK,UAAU;AAAA;AAAA,QAGxD;AAGJ;AACA;AACA;AACA;AACA,oBAAgB;AAChB,SAAK,QAAQ;AACb,gBAAY;AACZ,kBAAc,MAAM,KAAK,OAAO,SAAS,cAAc,OAAO,KAAK,OAAO;AAC1E,SAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AACpC,wBAAmB;AACjB,WAAK,QAAQ;AAEb,UAAI,CAAC,MAAK,SAAS,MAAK,MAAM;AAC5B,aAAK,IAAI,4BAA4B,MAAK;AAC1C;AAAA;AAGF,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAU,AAAI,YAAQ,MAAK,OAAO,KAAK,UAAU;AAAA;AAE/E,aAAK,QAAQ;AACb,oBAAY;AACZ,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAU,MAAM,AAAI,YAAQ,MAAK,OAAO,KAAK,UAAU;AACrF,aAAK,KAAK,MAAM,KAAK,MAAM,SAAQ;AAAA;AAIrC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAU,AAAO,eAAQ,MAAK,OAAO,KAAK,UAAU;AAAA;AAExF,aAAK,QAAQ;AACb,oBAAY;AACZ,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAU,MAAM,AAAO,eAAQ,MAAK,OAAO,KAAK,UAAU;AAC9F,aAAK,KAAK,SAAS,KAAK,MAAM,SAAQ;AAAA;AAGxC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAU,AAAQ,gBAAQ,MAAK,OAAO,KAAK,UAAU;AAAA;AAE3F,aAAK,QAAQ;AACb,oBAAY;AACZ,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAU,MAAM,AAAQ,gBAAQ,MAAK,OAAO,KAAK,UAAU;AACjG,aAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;AAAA;AAEzC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,SAAC,QAAQ,WAAW,cAAc,MAAM,QAAQ,IAAI,CAAC,QAAQ,WAAW;AAAA;AAG1E,WAAK,QAAQ;AAEb,YAAK,MAAM;AAIX,uBAAkB,MAAK,YAAY,eAAe,MAAK,YAAY,eAE/D,OAAO,KAAK,IAAI,KAAK,IAAI,MAAK,YAAY,YAAY,GAAG,KAAK,MAAK,YAAY,YAAY,GAAG,KAAK,KAAK,IAAI,MAAK,YAAY,aAAa,GAAG,KAAK,MAAK,YAAY,aAAa,GAAG,OACnL;AAGJ,cAAQ,KAAK;AAAA,QACX,YAAY,MAAK;AAAA,QACjB,KAAK,MAAK;AAAA,QACV,MAAM,MAAK;AAAA,QACX,aAAa,MAAK;AAAA,QAClB,KAAK,OAAO;AAAA,QACZ,QAAQ,UAAU;AAAA,QAClB,kBAAkB,UAAU;AAAA,QAC5B,SAAS;AAAA,QACT,MAAO,aAAa,IAAK,KAAK,MAAM,YAAY,MAAM;AAAA;AAExD,WAAK,QAAQ;AAAA;AAEf,SAAK,QAAQ;AACb,QAAI,KAAK,OAAO;AACd,UAAI,KAAK,KAAK;AAAM,eAAO,KAAK,KAAK;AACrC,UAAI,KAAK,KAAK;AAAK,eAAO,KAAK,KAAK;AACpC,UAAI,KAAK,KAAK;AAAQ,eAAO,KAAK,KAAK;AACvC,UAAI,KAAK,KAAK;AAAS,eAAO,KAAK,KAAK;AAAA;AAE1C,WAAO;AAAA;AAAA,QAIH,2BAA2B;AAC/B,SAAK,QAAQ;AACb;AAGA,SAAK,SAAS,UAAU,KAAK,QAAQ;AACrC,QAAI,CAAC,KAAK,OAAO;AAAgB,WAAK,SAAS,UAAU,KAAK,QAAQ;AAGtE,SAAK,QAAQ;AACb,kBAAc,KAAK,OAAO;AAC1B,QAAI;AACF,WAAK,IAAI,OAAO;AAChB,aAAO,CAAE;AAAA;AAIX,WAAO,IAAI,QAAQ;AACjB;AACA;AACA;AAEA,wBAAkB;AAGlB,YAAM,KAAK;AAGX,YAAM,KAAK;AAEX,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,kBAAY;AACZ,uBAAgB,AAAM,cAAQ,OAAO,KAAK;AAC1C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,WAAW,SAAQ,UAAU;AACvE,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;AAAA;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,WAAW,SAAQ,UAAU;AAC7E,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AAAA;AAItC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AACtG,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;AAAA;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AAC5G,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AAAA;AAEtC,WAAK,QAAQ;AAGb,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAC5G,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;AAAA;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAClH,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AAAA;AAKtC,UAAI,KAAK,OAAO;AACd,SAAC,SAAS,SAAS,WAAW,MAAM,QAAQ,IAAI,CAAC,SAAS,SAAS;AAAA;AAErE,eAAQ,OAAO;AAEf,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,uBAAiB;AACjB,UAAI,KAAK,OAAO,QAAQ;AACtB,oBAAY;AACZ,qBAAa,CAAE,MAAM,AAAQ,aAAK,UAAU,MAAM,AAAQ,aAAK,UAAU,MAAM,AAAQ,aAAK;AAC5F,YAAI,CAAC,KAAK,OAAO;AAAO,eAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;AAAA,iBACtD,KAAK,KAAK;AAAS,iBAAO,KAAK,KAAK;AAAA;AAG/C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AACb,cAAQ,CAAE,MAAM,SAAS,MAAM,SAAS,MAAM,SAAS,SAAS,YAAY,aAAa,KAAK,MAAM,QAAQ,SAAQ;AAAA;AAAA;AAAA,QAIlH;AACJ,mBAAe,IAAI,UAAU,KAAK;AAClC,UAAM,KAAK,OAAO,QAAQ;AAC1B,SAAK,IAAI;AAAA;AAAA;",
+ "sourcesContent": ["\"use strict\";\n\n// ref: https://github.com/tc39/proposal-global\nvar getGlobal = function () {\n\t// the only reliable means to get the global object is\n\t// `Function('return this')()`\n\t// However, this causes CSP violations in Chrome apps.\n\tif (typeof self !== 'undefined') { return self; }\n\tif (typeof window !== 'undefined') { return window; }\n\tif (typeof global !== 'undefined') { return global; }\n\tthrow new Error('unable to locate global object');\n}\n\nvar global = getGlobal();\n\nmodule.exports = exports = global.fetch;\n\n// Needed for TypeScript and Webpack.\nif (global.fetch) {\n\texports.default = global.fetch.bind(global);\n}\n\nexports.Headers = global.Headers;\nexports.Request = global.Request;\nexports.Response = global.Response;", "/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n", "// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/* */\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\n/** Convenient class for storing tensor-related data. */\nexport class DataStorage {\n constructor(backend, dataMover) {\n this.backend = backend;\n this.dataMover = dataMover;\n this.data = new WeakMap();\n this.dataIdsCount = 0;\n }\n get(dataId) {\n if (!this.data.has(dataId)) {\n this.dataMover.moveData(this.backend, dataId);\n }\n return this.data.get(dataId);\n }\n set(dataId, value) {\n this.dataIdsCount++;\n this.data.set(dataId, value);\n }\n has(dataId) {\n return this.data.has(dataId);\n }\n delete(dataId) {\n this.dataIdsCount--;\n return this.data.delete(dataId);\n }\n numDataIds() {\n return this.dataIdsCount;\n }\n}\n/**\n * The interface that defines the kernels that should be implemented when\n * adding a new backend. New backends don't need to implement every one of the\n * methods, this can be done gradually (throw an error for unimplemented\n * methods).\n */\nexport class KernelBackend {\n time(f) {\n return notYetImplemented('time');\n }\n read(dataId) {\n return notYetImplemented('read');\n }\n readSync(dataId) {\n return notYetImplemented('readSync');\n }\n numDataIds() {\n return notYetImplemented('numDataIds');\n }\n disposeData(dataId) {\n return notYetImplemented('disposeData');\n }\n write(values, shape, dtype) {\n return notYetImplemented('write');\n }\n move(dataId, values, shape, dtype) {\n return notYetImplemented('move');\n }\n memory() {\n return notYetImplemented('memory');\n }\n /** Returns the highest precision for floats in bits (e.g. 16 or 32) */\n floatPrecision() {\n return notYetImplemented('floatPrecision');\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n batchMatMul(a, b, transposeA, transposeB) {\n return notYetImplemented('batchMatMul');\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedBatchMatMul');\n }\n slice(x, begin, size) {\n return notYetImplemented('slice');\n }\n stridedSlice(x, begin, end, strides) {\n return notYetImplemented('stridedSlice');\n }\n unstack(x, axis) {\n return notYetImplemented('unstack');\n }\n reverse(a, axis) {\n return notYetImplemented('reverse');\n }\n concat(tensors, axis) {\n return notYetImplemented('concat');\n }\n neg(a) {\n return notYetImplemented('neg');\n }\n add(a, b) {\n return notYetImplemented('add');\n }\n addN(tensors) {\n return notYetImplemented('addN');\n }\n subtract(a, b) {\n return notYetImplemented('subtract');\n }\n multiply(a, b) {\n return notYetImplemented('multiply');\n }\n realDivide(a, b) {\n return notYetImplemented('realDivide');\n }\n floorDiv(a, b) {\n return notYetImplemented('floorDiv');\n }\n sum(x, axes) {\n return notYetImplemented('sum');\n }\n prod(x, axes) {\n return notYetImplemented('prod');\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n return notYetImplemented('unsortedSegmentSum');\n }\n argMin(x, axis) {\n return notYetImplemented('argMin');\n }\n argMax(x, axis) {\n return notYetImplemented('argMax');\n }\n equal(a, b) {\n return notYetImplemented('equal');\n }\n notEqual(a, b) {\n return notYetImplemented('notEqual');\n }\n less(a, b) {\n return notYetImplemented('less');\n }\n lessEqual(a, b) {\n return notYetImplemented('lessEqual');\n }\n greater(a, b) {\n return notYetImplemented('greater');\n }\n greaterEqual(a, b) {\n return notYetImplemented('greaterEqual');\n }\n logicalNot(a) {\n return notYetImplemented('logicalNot');\n }\n logicalAnd(a, b) {\n return notYetImplemented('logicalAnd');\n }\n logicalOr(a, b) {\n return notYetImplemented('logicalOr');\n }\n where(condition) {\n return notYetImplemented('where');\n }\n select(condition, a, b) {\n return notYetImplemented('select');\n }\n topk(x, k, sorted) {\n return notYetImplemented('topk');\n }\n min(x, axes) {\n return notYetImplemented('min');\n }\n minimum(a, b) {\n return notYetImplemented('minimum');\n }\n mod(a, b) {\n return notYetImplemented('mod');\n }\n max(x, axes) {\n return notYetImplemented('max');\n }\n maximum(a, b) {\n return notYetImplemented('maximum');\n }\n all(x, axes) {\n return notYetImplemented('all');\n }\n any(x, axes) {\n return notYetImplemented('any');\n }\n squaredDifference(a, b) {\n return notYetImplemented('squaredDifference');\n }\n ceil(x) {\n return notYetImplemented('ceil');\n }\n floor(x) {\n return notYetImplemented('floor');\n }\n round(x) {\n return notYetImplemented('round');\n }\n sign(x) {\n return notYetImplemented('sign');\n }\n isNaN(x) {\n return notYetImplemented('isNaN');\n }\n isInf(x) {\n return notYetImplemented('isInf');\n }\n isFinite(x) {\n return notYetImplemented('isFinite');\n }\n pow(a, b) {\n return notYetImplemented('pow');\n }\n exp(x) {\n return notYetImplemented('exp');\n }\n expm1(x) {\n return notYetImplemented('expm1');\n }\n softmax(x, dim) {\n return notYetImplemented('softmax');\n }\n log(x) {\n return notYetImplemented('log');\n }\n log1p(x) {\n return notYetImplemented('log1p');\n }\n sqrt(x) {\n return notYetImplemented('sqrt');\n }\n rsqrt(x) {\n return notYetImplemented('rsqrt');\n }\n square(x) {\n return notYetImplemented('square');\n }\n reciprocal(x) {\n return notYetImplemented('reciprocal');\n }\n relu(x) {\n return notYetImplemented('relu');\n }\n relu6(x) {\n return notYetImplemented('relu6');\n }\n prelu(x, a) {\n return notYetImplemented('prelu');\n }\n elu(x) {\n return notYetImplemented('elu');\n }\n eluDer(dy, y) {\n return notYetImplemented('eluDer');\n }\n selu(x) {\n return notYetImplemented('selu');\n }\n int(x) {\n return notYetImplemented('int');\n }\n clip(x, min, max) {\n return notYetImplemented('clip');\n }\n abs(x) {\n return notYetImplemented('abs');\n }\n complexAbs(x) {\n return notYetImplemented('complexAbs');\n }\n sigmoid(x) {\n return notYetImplemented('sigmoid');\n }\n softplus(x) {\n return notYetImplemented('softplus');\n }\n sin(x) {\n return notYetImplemented('sin');\n }\n cos(x) {\n return notYetImplemented('cos');\n }\n tan(x) {\n return notYetImplemented('tan');\n }\n asin(x) {\n return notYetImplemented('asin');\n }\n acos(x) {\n return notYetImplemented('acos');\n }\n atan(x) {\n return notYetImplemented('atan');\n }\n atan2(a, b) {\n return notYetImplemented('atan2');\n }\n sinh(x) {\n return notYetImplemented('sinh');\n }\n cosh(x) {\n return notYetImplemented('cosh');\n }\n tanh(x) {\n return notYetImplemented('tanh');\n }\n asinh(x) {\n return notYetImplemented('asinh');\n }\n acosh(x) {\n return notYetImplemented('acosh');\n }\n atanh(x) {\n return notYetImplemented('atanh');\n }\n erf(x) {\n return notYetImplemented('erf');\n }\n step(x, alpha) {\n return notYetImplemented('step');\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedConv2d');\n }\n conv2d(x, filter, convInfo) {\n return notYetImplemented('conv2d');\n }\n conv2dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv2dDerInput');\n }\n conv2dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv2dDerFilter');\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n return notYetImplemented('fusedDepthwiseConv2D');\n }\n depthwiseConv2D(input, filter, convInfo) {\n return notYetImplemented('depthwiseConv2D');\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n return notYetImplemented('depthwiseConv2DDerInput');\n }\n depthwiseConv2DDerFilter(x, dY, convInfo) {\n return notYetImplemented('depthwiseConv2DDerFilter');\n }\n conv3d(x, filter, convInfo) {\n return notYetImplemented('conv3d');\n }\n conv3dDerInput(dy, filter, convInfo) {\n return notYetImplemented('conv3dDerInput');\n }\n conv3dDerFilter(x, dY, convInfo) {\n return notYetImplemented('conv3dDerFilter');\n }\n maxPool(x, convInfo) {\n return notYetImplemented('maxPool');\n }\n maxPoolBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPoolBackprop');\n }\n avgPool(x, convInfo) {\n return notYetImplemented('avgPool');\n }\n avgPoolBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPoolBackprop');\n }\n avgPool3d(x, convInfo) {\n return notYetImplemented('avgPool3d');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n return notYetImplemented('avgPool3dBackprop');\n }\n maxPool3d(x, convInfo) {\n return notYetImplemented('maxPool3d');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n return notYetImplemented('maxPool3dBackprop');\n }\n reshape(x, shape) {\n return notYetImplemented('reshape');\n }\n cast(x, dtype) {\n return notYetImplemented('cast');\n }\n tile(x, reps) {\n return notYetImplemented('tile');\n }\n pad(x, paddings, constantValue) {\n return notYetImplemented('pad');\n }\n transpose(x, perm) {\n return notYetImplemented('transpose');\n }\n gather(x, indices, axis) {\n return notYetImplemented('gather');\n }\n gatherND(x, indices) {\n return notYetImplemented('gatherND');\n }\n scatterND(indices, updates, shape) {\n return notYetImplemented('scatterND');\n }\n batchToSpaceND(x, blockShape, crops) {\n return notYetImplemented('batchToSpaceND');\n }\n spaceToBatchND(x, blockShape, paddings) {\n return notYetImplemented('spaceToBatchND');\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n return notYetImplemented('resizeBilinear');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeBilinearBackprop');\n }\n resizeNearestNeighbor(x, newHEight, newWidth, alignCorners) {\n return notYetImplemented('resizeNearestNeighbor');\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n return notYetImplemented('resizeNearestNeighborBackprop');\n }\n batchNorm(x, mean, variance, offset, scale, varianceEpsilon) {\n return notYetImplemented('batchNorm');\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n return notYetImplemented('localResponseNormalization4D');\n }\n LRNGrad(dy, inputImage, outputImage, radius, bias, alpha, beta) {\n return notYetImplemented('LRNGrad');\n }\n multinomial(logits, normalized, numSamples, seed) {\n return notYetImplemented('multinomial');\n }\n oneHot(indices, depth, onValue, offValue) {\n return notYetImplemented('oneHot');\n }\n cumsum(x, axis, exclusive, reverse) {\n return notYetImplemented('cumsum');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return notYetImplemented('nonMaxSuppression');\n }\n fft(x) {\n return notYetImplemented('fft');\n }\n ifft(x) {\n return notYetImplemented('ifft');\n }\n complex(real, imag) {\n return notYetImplemented('complex');\n }\n real(input) {\n return notYetImplemented('real');\n }\n imag(input) {\n return notYetImplemented('imag');\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n return notYetImplemented('cropAndResize');\n }\n depthToSpace(x, blockSize, dataFormat) {\n return notYetImplemented('depthToSpace');\n }\n // Aligns with the \"SplitV\" kernel in TensorFlow.\n split(value, sizeSplits, axis) {\n return notYetImplemented('split');\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n return notYetImplemented('sparseToDense');\n }\n diag(x) {\n return notYetImplemented('diag');\n }\n fill(shape, value, dtype) {\n return notYetImplemented('fill');\n }\n onesLike(x) {\n return notYetImplemented('onesLike');\n }\n zerosLike(x) {\n return notYetImplemented('zerosLike');\n }\n linspace(start, stop, num) {\n return notYetImplemented('linspace');\n }\n dispose() {\n return notYetImplemented('dispose');\n }\n}\nfunction notYetImplemented(kernelName) {\n throw new Error(`'${kernelName}' not yet implemented or not found in the registry. ` +\n `This kernel may not be supported by the tfjs backend you have chosen`);\n}\n//# sourceMappingURL=backend.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Shuffles the array in-place using Fisher-Yates algorithm.\n *\n * ```js\n * const a = [1, 2, 3, 4, 5];\n * tf.util.shuffle(a);\n * console.log(a);\n * ```\n *\n * @param array The array to shuffle in-place.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\n// tslint:disable-next-line:no-any\nexport function shuffle(array) {\n let counter = array.length;\n let temp = 0;\n let index = 0;\n // While there are elements in the array\n while (counter > 0) {\n // Pick a random index\n index = (Math.random() * counter) | 0;\n // Decrease counter by 1\n counter--;\n // And swap the last element with it\n temp = array[counter];\n array[counter] = array[index];\n array[index] = temp;\n }\n}\n/** Clamps a value to a specified range. */\nexport function clamp(min, x, max) {\n return Math.max(min, Math.min(x, max));\n}\nexport function nearestLargerEven(val) {\n return val % 2 === 0 ? val : val + 1;\n}\nexport function sum(arr) {\n let sum = 0;\n for (let i = 0; i < arr.length; i++) {\n sum += arr[i];\n }\n return sum;\n}\n/**\n * Returns a sample from a uniform [a, b) distribution.\n *\n * @param a The minimum support (inclusive).\n * @param b The maximum support (exclusive).\n * @return A pseudorandom number on the half-open interval [a,b).\n */\nexport function randUniform(a, b) {\n const r = Math.random();\n return (b * r) + (1 - r) * a;\n}\n/** Returns the squared Euclidean distance between two vectors. */\nexport function distSquared(a, b) {\n let result = 0;\n for (let i = 0; i < a.length; i++) {\n const diff = Number(a[i]) - Number(b[i]);\n result += diff * diff;\n }\n return result;\n}\n/**\n * Asserts that the expression is true. Otherwise throws an error with the\n * provided message.\n *\n * ```js\n * const x = 2;\n * tf.util.assert(x === 2, 'x is not 2');\n * ```\n *\n * @param expr The expression to assert (as a boolean).\n * @param msg A function that returns the message to report when throwing an\n * error. We use a function for performance reasons.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function assert(expr, msg) {\n if (!expr) {\n throw new Error(typeof msg === 'string' ? msg : msg());\n }\n}\nexport function assertShapesMatch(shapeA, shapeB, errorMessagePrefix = '') {\n assert(arraysEqual(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function assertNonNull(a) {\n assert(a != null, () => `The input to the tensor constructor must be a non-null value.`);\n}\n// NOTE: We explicitly type out what T extends instead of any so that\n// util.flatten on a nested array of number doesn't try to infer T as a\n// number[][], causing us to explicitly type util.flatten().\n/**\n * Flattens an arbitrarily nested array.\n *\n * ```js\n * const a = [[1, 2], [3, 4], [5, [6, [7]]]];\n * const flat = tf.util.flatten(a);\n * console.log(flat);\n * ```\n *\n * @param arr The nested array to flatten.\n * @param result The destination array which holds the elements.\n * @param skipTypedArray If true, avoids flattening the typed arrays. Defaults\n * to false.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function flatten(arr, result = [], skipTypedArray = false) {\n if (result == null) {\n result = [];\n }\n if (Array.isArray(arr) || isTypedArray(arr) && !skipTypedArray) {\n for (let i = 0; i < arr.length; ++i) {\n flatten(arr[i], result, skipTypedArray);\n }\n }\n else {\n result.push(arr);\n }\n return result;\n}\n/**\n * Returns the size (number of elements) of the tensor given its shape.\n *\n * ```js\n * const shape = [3, 4, 2];\n * const size = tf.util.sizeFromShape(shape);\n * console.log(size);\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function sizeFromShape(shape) {\n if (shape.length === 0) {\n // Scalar.\n return 1;\n }\n let size = shape[0];\n for (let i = 1; i < shape.length; i++) {\n size *= shape[i];\n }\n return size;\n}\nexport function isScalarShape(shape) {\n return shape.length === 0;\n}\nexport function arraysEqual(n1, n2) {\n if (n1 === n2) {\n return true;\n }\n if (n1 == null || n2 == null) {\n return false;\n }\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\nexport function isInt(a) {\n return a % 1 === 0;\n}\nexport function tanh(x) {\n // tslint:disable-next-line:no-any\n if (Math.tanh != null) {\n // tslint:disable-next-line:no-any\n return Math.tanh(x);\n }\n if (x === Infinity) {\n return 1;\n }\n else if (x === -Infinity) {\n return -1;\n }\n else {\n const e2x = Math.exp(2 * x);\n return (e2x - 1) / (e2x + 1);\n }\n}\nexport function sizeToSquarishShape(size) {\n const width = Math.ceil(Math.sqrt(size));\n return [width, Math.ceil(size / width)];\n}\n/**\n * Creates a new array with randomized indicies to a given quantity.\n *\n * ```js\n * const randomTen = tf.util.createShuffledIndices(10);\n * console.log(randomTen);\n * ```\n *\n * @param number Quantity of how many shuffled indicies to create.\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function createShuffledIndices(n) {\n const shuffledIndices = new Uint32Array(n);\n for (let i = 0; i < n; ++i) {\n shuffledIndices[i] = i;\n }\n shuffle(shuffledIndices);\n return shuffledIndices;\n}\nexport function rightPad(a, size) {\n if (size <= a.length) {\n return a;\n }\n return a + ' '.repeat(size - a.length);\n}\nexport function repeatedTry(checkFn, delayFn = (counter) => 0, maxCounter) {\n return new Promise((resolve, reject) => {\n let tryCount = 0;\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n tryCount++;\n const nextBackoff = delayFn(tryCount);\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n tryFn();\n });\n}\n/**\n * Given the full size of the array and a shape that may contain -1 as the\n * implicit dimension, returns the inferred shape where -1 is replaced.\n * E.g. For shape=[2, -1, 3] and size=24, it will return [2, 4, 3].\n *\n * @param shape The shape, which may contain -1 in some dimension.\n * @param size The full size (number of elements) of the array.\n * @return The inferred shape where -1 is replaced with the inferred size.\n */\nexport function inferFromImplicitShape(shape, size) {\n let shapeProd = 1;\n let implicitIdx = -1;\n for (let i = 0; i < shape.length; ++i) {\n if (shape[i] >= 0) {\n shapeProd *= shape[i];\n }\n else if (shape[i] === -1) {\n if (implicitIdx !== -1) {\n throw Error(`Shapes can only have 1 implicit size. ` +\n `Found -1 at dim ${implicitIdx} and dim ${i}`);\n }\n implicitIdx = i;\n }\n else if (shape[i] < 0) {\n throw Error(`Shapes can not be < 0. Found ${shape[i]} at dim ${i}`);\n }\n }\n if (implicitIdx === -1) {\n if (size > 0 && size !== shapeProd) {\n throw Error(`Size(${size}) must match the product of shape ${shape}`);\n }\n return shape;\n }\n if (shapeProd === 0) {\n throw Error(`Cannot infer the missing size in [${shape}] when ` +\n `there are 0 elements`);\n }\n if (size % shapeProd !== 0) {\n throw Error(`The implicit shape can't be a fractional number. ` +\n `Got ${size} / ${shapeProd}`);\n }\n const newShape = shape.slice();\n newShape[implicitIdx] = size / shapeProd;\n return newShape;\n}\nexport function parseAxisParam(axis, shape) {\n const rank = shape.length;\n // Normalize input\n axis = axis == null ? shape.map((s, i) => i) : [].concat(axis);\n // Check for valid range\n assert(axis.every(ax => ax >= -rank && ax < rank), () => `All values in axis param must be in range [-${rank}, ${rank}) but ` +\n `got axis ${axis}`);\n // Check for only integers\n assert(axis.every(ax => isInt(ax)), () => `All values in axis param must be integers but ` +\n `got axis ${axis}`);\n // Handle negative axis.\n return axis.map(a => a < 0 ? rank + a : a);\n}\n/** Reduces the shape by removing all dimensions of shape 1. */\nexport function squeezeShape(shape, axis) {\n const newShape = [];\n const keptDims = [];\n const isEmptyArray = axis != null && Array.isArray(axis) && axis.length === 0;\n const axes = (axis == null || isEmptyArray) ?\n null :\n parseAxisParam(axis, shape).sort();\n let j = 0;\n for (let i = 0; i < shape.length; ++i) {\n if (axes != null) {\n if (axes[j] === i && shape[i] !== 1) {\n throw new Error(`Can't squeeze axis ${i} since its dim '${shape[i]}' is not 1`);\n }\n if ((axes[j] == null || axes[j] > i) && shape[i] === 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n if (axes[j] <= i) {\n j++;\n }\n }\n if (shape[i] !== 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n }\n return { newShape, keptDims };\n}\nexport function getTypedArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function getArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else if (dtype === 'string') {\n values = new Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nexport function checkConversionForErrors(vals, dtype) {\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n throw Error(`A tensor of type ${dtype} being uploaded contains ${num}.`);\n }\n }\n}\n/** Returns true if the dtype is valid. */\nexport function isValidDtype(dtype) {\n return dtype === 'bool' || dtype === 'complex64' || dtype === 'float32' ||\n dtype === 'int32' || dtype === 'string';\n}\n/**\n * Returns true if the new type can't encode the old type without loss of\n * precision.\n */\nexport function hasEncodingLoss(oldType, newType) {\n if (newType === 'complex64') {\n return false;\n }\n if (newType === 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'int32' && oldType !== 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'bool' && oldType === 'bool') {\n return false;\n }\n return true;\n}\nexport function isTypedArray(a) {\n return a instanceof Float32Array || a instanceof Int32Array ||\n a instanceof Uint8Array;\n}\nexport function bytesPerElement(dtype) {\n if (dtype === 'float32' || dtype === 'int32') {\n return 4;\n }\n else if (dtype === 'complex64') {\n return 8;\n }\n else if (dtype === 'bool') {\n return 1;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n/**\n * Returns the approximate number of bytes allocated in the string array - 2\n * bytes per character. Computing the exact bytes for a native string in JS is\n * not possible since it depends on the encoding of the html page that serves\n * the website.\n */\nexport function bytesFromStringArray(arr) {\n if (arr == null) {\n return 0;\n }\n let bytes = 0;\n arr.forEach(x => bytes += x.length);\n return bytes;\n}\n/** Returns true if the value is a string. */\nexport function isString(value) {\n return typeof value === 'string' || value instanceof String;\n}\nexport function isBoolean(value) {\n return typeof value === 'boolean';\n}\nexport function isNumber(value) {\n return typeof value === 'number';\n}\nexport function inferDtype(values) {\n if (Array.isArray(values)) {\n return inferDtype(values[0]);\n }\n if (values instanceof Float32Array) {\n return 'float32';\n }\n else if (values instanceof Int32Array || values instanceof Uint8Array) {\n return 'int32';\n }\n else if (isNumber(values)) {\n return 'float32';\n }\n else if (isString(values)) {\n return 'string';\n }\n else if (isBoolean(values)) {\n return 'bool';\n }\n return 'float32';\n}\nexport function isFunction(f) {\n return !!(f && f.constructor && f.call && f.apply);\n}\nexport function nearestDivisor(size, start) {\n for (let i = start; i < size; ++i) {\n if (size % i === 0) {\n return i;\n }\n }\n return size;\n}\nexport function computeStrides(shape) {\n const rank = shape.length;\n if (rank < 2) {\n return [];\n }\n // Last dimension has implicit stride of 1, thus having D-1 (instead of D)\n // strides.\n const strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n return strides;\n}\nfunction createNestedArray(offset, shape, a) {\n const ret = new Array();\n if (shape.length === 1) {\n const d = shape[0];\n for (let i = 0; i < d; i++) {\n ret[i] = a[offset + i];\n }\n }\n else {\n const d = shape[0];\n const rest = shape.slice(1);\n const len = rest.reduce((acc, c) => acc * c);\n for (let i = 0; i < d; i++) {\n ret[i] = createNestedArray(offset + i * len, rest, a);\n }\n }\n return ret;\n}\n// Provide a nested array of TypedArray in given shape.\nexport function toNestedArray(shape, a) {\n if (shape.length === 0) {\n // Scalar type should return a single number.\n return a[0];\n }\n const size = shape.reduce((acc, c) => acc * c);\n if (size === 0) {\n // A tensor with shape zero should be turned into empty list.\n return [];\n }\n if (size !== a.length) {\n throw new Error(`[${shape}] does not match the input size ${a.length}.`);\n }\n return createNestedArray(0, shape, a);\n}\nexport function makeOnesTypedArray(size, dtype) {\n const array = makeZerosTypedArray(size, dtype);\n for (let i = 0; i < array.length; i++) {\n array[i] = 1;\n }\n return array;\n}\nexport function makeZerosTypedArray(size, dtype) {\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(size);\n }\n else if (dtype === 'int32') {\n return new Int32Array(size);\n }\n else if (dtype === 'bool') {\n return new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Make nested `TypedArray` filled with zeros.\n * @param shape The shape information for the nested array.\n * @param dtype dtype of the array element.\n */\nexport function makeZerosNestedTypedArray(shape, dtype) {\n const size = shape.reduce((prev, curr) => prev * curr, 1);\n if (dtype == null || dtype === 'float32') {\n return toNestedArray(shape, new Float32Array(size));\n }\n else if (dtype === 'int32') {\n return toNestedArray(shape, new Int32Array(size));\n }\n else if (dtype === 'bool') {\n return toNestedArray(shape, new Uint8Array(size));\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\nexport function assertNonNegativeIntegerDimensions(shape) {\n shape.forEach(dimSize => {\n assert(Number.isInteger(dimSize) && dimSize >= 0, () => `Tensor must have a shape comprised of positive integers but got ` +\n `shape [${shape}].`);\n });\n}\n/**\n * Computes flat index for a given location (multidimentionsal index) in a\n * Tensor/multidimensional array.\n *\n * @param locs Location in the tensor.\n * @param rank Rank of the tensor.\n * @param strides Tensor strides.\n */\nexport function locToIndex(locs, rank, strides) {\n if (rank === 0) {\n return 0;\n }\n else if (rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += strides[i] * locs[i];\n }\n return index;\n}\n/**\n * Computes the location (multidimensional index) in a tensor/multidimentional\n * array for a given flat index.\n *\n * @param index Index in flat array.\n * @param rank Rank of tensor.\n * @param strides Strides of tensor.\n */\nexport function indexToLoc(index, rank, strides) {\n if (rank === 0) {\n return [];\n }\n else if (rank === 1) {\n return [index];\n }\n const locs = new Array(rank);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / strides[i]);\n index -= locs[i] * strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n}\n/**\n * This method asserts whether an object is a Promise instance.\n * @param object\n */\n// tslint:disable-next-line: no-any\nexport function isPromise(object) {\n // We chose to not use 'obj instanceOf Promise' for two reasons:\n // 1. It only reliably works for es6 Promise, not other Promise\n // implementations.\n // 2. It doesn't work with framework that uses zone.js. zone.js monkey patch\n // the async calls, so it is possible the obj (patched) is comparing to a\n // pre-patched Promise.\n return object && object.then && typeof object.then === 'function';\n}\n//# sourceMappingURL=util_base.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isPromise } from './util_base';\n// Expects flags from URL in the format ?tfjsflags=FLAG1:1,FLAG2:true.\nconst TENSORFLOWJS_FLAGS_PREFIX = 'tfjsflags';\n/**\n * The environment contains evaluated flags as well as the registered platform.\n * This is always used as a global singleton and can be retrieved with\n * `tf.env()`.\n *\n * @doc {heading: 'Environment'}\n */\nexport class Environment {\n // tslint:disable-next-line: no-any\n constructor(global) {\n this.global = global;\n this.flags = {};\n this.flagRegistry = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n setPlatform(platformName, platform) {\n if (this.platform != null) {\n console.warn(`Platform ${this.platformName} has already been set. ` +\n `Overwriting the platform with ${platform}.`);\n }\n this.platformName = platformName;\n this.platform = platform;\n }\n registerFlag(flagName, evaluationFn, setHook) {\n this.flagRegistry[flagName] = { evaluationFn, setHook };\n // Override the flag value from the URL. This has to happen here because the\n // environment is initialized before flags get registered.\n if (this.urlFlags[flagName] != null) {\n const flagValue = this.urlFlags[flagName];\n console.warn(`Setting feature override from URL ${flagName}: ${flagValue}.`);\n this.set(flagName, flagValue);\n }\n }\n async getAsync(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n this.flags[flagName] = await this.evaluateFlag(flagName);\n return this.flags[flagName];\n }\n get(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n const flagValue = this.evaluateFlag(flagName);\n if (isPromise(flagValue)) {\n throw new Error(`Flag ${flagName} cannot be synchronously evaluated. ` +\n `Please use getAsync() instead.`);\n }\n this.flags[flagName] = flagValue;\n return this.flags[flagName];\n }\n getNumber(flagName) {\n return this.get(flagName);\n }\n getBool(flagName) {\n return this.get(flagName);\n }\n getFlags() {\n return this.flags;\n }\n // For backwards compatibility.\n get features() {\n return this.flags;\n }\n set(flagName, value) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot set flag ${flagName} as it has not been registered.`);\n }\n this.flags[flagName] = value;\n if (this.flagRegistry[flagName].setHook != null) {\n this.flagRegistry[flagName].setHook(value);\n }\n }\n evaluateFlag(flagName) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot evaluate flag '${flagName}': no evaluation function found.`);\n }\n return this.flagRegistry[flagName].evaluationFn();\n }\n setFlags(flags) {\n this.flags = Object.assign({}, flags);\n }\n reset() {\n this.flags = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n populateURLFlags() {\n if (typeof this.global === 'undefined' ||\n typeof this.global.location === 'undefined' ||\n typeof this.global.location.search === 'undefined') {\n return;\n }\n const urlParams = getQueryParams(this.global.location.search);\n if (TENSORFLOWJS_FLAGS_PREFIX in urlParams) {\n const keyValues = urlParams[TENSORFLOWJS_FLAGS_PREFIX].split(',');\n keyValues.forEach(keyValue => {\n const [key, value] = keyValue.split(':');\n this.urlFlags[key] = parseValue(key, value);\n });\n }\n }\n}\nexport function getQueryParams(queryString) {\n const params = {};\n queryString.replace(/[?&]([^=?&]+)(?:=([^&]*))?/g, (s, ...t) => {\n decodeParam(params, t[0], t[1]);\n return t.join('=');\n });\n return params;\n}\nfunction decodeParam(params, name, value) {\n params[decodeURIComponent(name)] = decodeURIComponent(value || '');\n}\nfunction parseValue(flagName, value) {\n value = value.toLowerCase();\n if (value === 'true' || value === 'false') {\n return value === 'true';\n }\n else if (`${+value}` === value) {\n return +value;\n }\n throw new Error(`Could not parse value flag value ${value} for flag ${flagName}.`);\n}\n/**\n * Returns the current environment (a global singleton).\n *\n * The environment object contains the evaluated feature values as well as the\n * active platform.\n *\n * @doc {heading: 'Environment'}\n */\nexport function env() {\n return ENV;\n}\nexport let ENV = null;\nexport function setEnvironmentGlobal(environment) {\n ENV = environment;\n}\n//# sourceMappingURL=environment.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Note that the identifier globalNameSpace is scoped to this module, but will\n// always resolve to the same global object regardless of how the module is\n// resolved.\n// tslint:disable-next-line:no-any\nlet globalNameSpace;\n// tslint:disable-next-line:no-any\nexport function getGlobalNamespace() {\n if (globalNameSpace == null) {\n // tslint:disable-next-line:no-any\n let ns;\n if (typeof (window) !== 'undefined') {\n ns = window;\n }\n else if (typeof (global) !== 'undefined') {\n ns = global;\n }\n else if (typeof (process) !== 'undefined') {\n ns = process;\n }\n else if (typeof (self) !== 'undefined') {\n ns = self;\n }\n else {\n throw new Error('Could not find a global object');\n }\n globalNameSpace = ns;\n }\n return globalNameSpace;\n}\n// tslint:disable-next-line:no-any\nfunction getGlobalMap() {\n const ns = getGlobalNamespace();\n if (ns._tfGlobals == null) {\n ns._tfGlobals = new Map();\n }\n return ns._tfGlobals;\n}\n/**\n * Returns a globally accessible 'singleton' object.\n *\n * @param key the name of the object\n * @param init a function to initialize to initialize this object\n * the first time it is fetched.\n */\nexport function getGlobal(key, init) {\n const globalMap = getGlobalMap();\n if (globalMap.has(key)) {\n return globalMap.get(key);\n }\n else {\n const singleton = init();\n globalMap.set(key, singleton);\n return globalMap.get(key);\n }\n}\n//# sourceMappingURL=global_util.js.map", "export const Abs = 'Abs';\nexport const Acos = 'Acos';\nexport const Acosh = 'Acosh';\nexport const Add = 'Add';\nexport const AddN = 'AddN';\nexport const All = 'All';\nexport const Any = 'Any';\nexport const ArgMax = 'ArgMax';\nexport const ArgMin = 'ArgMin';\nexport const Asin = 'Asin';\nexport const Asinh = 'Asinh';\nexport const Atan = 'Atan';\nexport const Atanh = 'Atanh';\nexport const Atan2 = 'Atan2';\nexport const AvgPool = 'AvgPool';\nexport const AvgPoolBackprop = 'AvgPoolBackprop';\nexport const AvgPool3D = 'AvgPool3D';\nexport const AvgPool3DBackprop = 'AvgPool3DBackprop';\nexport const BatchMatMul = 'BatchMatMul';\nexport const BatchToSpaceND = 'BatchToSpaceND';\nexport const BroadcastTo = 'BroadcastTo';\nexport const Cast = 'Cast';\nexport const Ceil = 'Ceil';\nexport const ClipByValue = 'ClipByValue';\nexport const Complex = 'Complex';\nexport const Concat = 'Concat';\nexport const Conv2D = 'Conv2D';\nexport const Conv2DBackpropFilter = 'Conv2DBackpropFilter';\nexport const Conv2DBackpropInput = 'Conv2DBackpropInput';\nexport const Conv3D = 'Conv3D';\nexport const Conv3DBackpropFilterV2 = 'Conv3DBackpropFilterV2';\nexport const Conv3DBackpropInputV2 = 'Conv3DBackpropInputV2';\nexport const Cos = 'Cos';\nexport const Cosh = 'Cosh';\nexport const Cumsum = 'Cumsum';\nexport const CropAndResize = 'CropAndResize';\nexport const DepthToSpace = 'DepthToSpace';\nexport const DepthwiseConv2dNative = 'DepthwiseConv2dNative';\nexport const DepthwiseConv2dNativeBackpropFilter = 'DepthwiseConv2dNativeBackpropFilter';\nexport const DepthwiseConv2dNativeBackpropInput = 'DepthwiseConv2dNativeBackpropInput';\nexport const Diag = 'Diag';\nexport const Dilation2D = 'Dilation2D';\nexport const Dilation2DBackpropInput = 'Dilation2DBackpropInput';\nexport const Dilation2DBackpropFilter = 'Dilation2DBackpropFilter';\nexport const Div = 'Div';\nexport const Elu = 'Elu';\nexport const EluGrad = 'EluGrad';\nexport const Erf = 'Erf';\nexport const Equal = 'Equal';\nexport const Exp = 'Exp';\nexport const Expm1 = 'Expm1';\nexport const FFT = 'FFT';\nexport const Fill = 'Fill';\nexport const FlipLeftRight = 'FlipLeftRight';\nexport const Floor = 'Floor';\nexport const FloorDiv = 'FloorDiv';\nexport const FusedBatchNorm = 'FusedBatchNorm';\nexport const GatherV2 = 'GatherV2';\nexport const GatherNd = 'GatherNd';\nexport const Greater = 'Greater';\nexport const GreaterEqual = 'GreaterEqual';\nexport const Identity = 'Identity';\nexport const IFFT = 'IFFT';\nexport const Imag = 'Imag';\nexport const IsFinite = 'IsFinite';\nexport const IsInf = 'IsInf';\nexport const IsNan = 'IsNan';\nexport const Less = 'Less';\nexport const LessEqual = 'LessEqual';\nexport const LinSpace = 'LinSpace';\nexport const Log = 'Log';\nexport const Log1p = 'Log1p';\nexport const LogicalAnd = 'LogicalAnd';\nexport const LogicalNot = 'LogicalNot';\nexport const LogicalOr = 'LogicalOr';\nexport const LogSoftmax = 'LogSoftmax';\nexport const LRN = 'LRN';\nexport const LRNBackprop = 'LRNBackprop';\nexport const Max = 'Max';\nexport const Maximum = 'Maximum';\nexport const MaxPool = 'MaxPool';\nexport const MaxPoolBackprop = 'MaxPoolBackprop';\nexport const MaxPool3D = 'MaxPool3D';\nexport const MaxPool3DBackprop = 'MaxPool3DBackprop';\nexport const MaxPoolWithArgmax = 'MaxPoolWithArgmax';\nexport const Mean = 'Mean';\nexport const Min = 'Min';\nexport const Minimum = 'Minimum';\nexport const MirrorPad = 'MirrorPad';\nexport const Mod = 'Mod';\nexport const Multiply = 'Multiply';\nexport const Negate = 'Negate';\nexport const NotEqual = 'NotEqual';\nexport const NonMaxSuppressionV3 = 'NonMaxSuppressionV3';\nexport const NonMaxSuppressionV4 = 'NonMaxSuppressionV4';\nexport const NonMaxSuppressionV5 = 'NonMaxSuppressionV5';\nexport const OnesLike = 'OnesLike';\nexport const OneHot = 'OneHot';\nexport const PadV2 = 'PadV2';\nexport const Pool = 'Pool';\nexport const Pow = 'Pow';\nexport const Prelu = 'Prelu';\nexport const Prod = 'Prod';\nexport const Range = 'Range';\nexport const Real = 'Real';\nexport const Reciprocal = 'Reciprocal';\nexport const Relu = 'Relu';\nexport const Reshape = 'Reshape';\nexport const ResizeNearestNeighbor = 'ResizeNearestNeighbor';\nexport const ResizeNearestNeighborGrad = 'ResizeNearestNeighborGrad';\nexport const ResizeBilinear = 'ResizeBilinear';\nexport const ResizeBilinearGrad = 'ResizeBilinearGrad';\nexport const Relu6 = 'Relu6';\nexport const Reverse = 'Reverse';\nexport const Round = 'Round';\nexport const Rsqrt = 'Rsqrt';\nexport const ScatterNd = 'ScatterNd';\nexport const SelectV2 = 'SelectV2';\nexport const Selu = 'Selu';\nexport const Slice = 'Slice';\nexport const Sin = 'Sin';\nexport const Sinh = 'Sinh';\nexport const Sign = 'Sign';\nexport const Sigmoid = 'Sigmoid';\nexport const Softplus = 'Softplus';\nexport const Sqrt = 'Sqrt';\nexport const Sum = 'Sum';\nexport const SpaceToBatchND = 'SpaceToBatchND';\nexport const SplitV = 'SplitV';\nexport const Softmax = 'Softmax';\nexport const SquaredDifference = 'SquaredDifference';\nexport const Square = 'Square';\nexport const Sub = 'Sub';\nexport const SparseToDense = 'SparseToDense';\nexport const StridedSlice = 'StridedSlice';\nexport const Tan = 'Tan';\nexport const Tanh = 'Tanh';\nexport const Tile = 'Tile';\nexport const TopK = 'TopK';\nexport const Transpose = 'Transpose';\nexport const Unique = 'Unique';\nexport const Unpack = 'Unpack';\nexport const UnsortedSegmentSum = 'UnsortedSegmentSum';\nexport const ZerosLike = 'ZerosLike';\n/**\n * TensorFlow.js-only kernels\n */\nexport const Step = 'Step';\nexport const FromPixels = 'FromPixels';\nexport const RotateWithOffset = 'RotateWithOffset';\nexport const _FusedMatMul = '_FusedMatMul';\nexport const FusedConv2D = 'FusedConv2D';\nexport const FusedDepthwiseConv2D = 'FusedDepthwiseConv2D';\n//# sourceMappingURL=kernel_names.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport { getGlobal } from './global_util';\nconst kernelRegistry = getGlobal('kernelRegistry', () => new Map());\nconst gradRegistry = getGlobal('gradRegistry', () => new Map());\n/**\n * Returns the kernel function (code) associated with the provided names.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n */\nexport function getKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n return kernelRegistry.get(key);\n}\n/**\n * Returns the registered gradient info associated with the provided kernel.\n * @param kernelName The official TF kernel name.\n */\nexport function getGradient(kernelName) {\n return gradRegistry.get(kernelName);\n}\nexport function getKernelsForBackend(backendName) {\n const it = kernelRegistry.entries();\n const result = [];\n while (true) {\n const { done, value } = it.next();\n if (done) {\n break;\n }\n const [key, config] = value;\n const [backend,] = key.split('_');\n if (backend === backendName) {\n result.push(config);\n }\n }\n return result;\n}\n/**\n * Registers the function (forward pass) for the kernel in a global registry.\n *\n * @param config A config object with the following properties:\n * - `kernelName` The official name of the kernel.\n * - `backendName` The official name of the backend.\n * - `kernelFunc` The function to run during the forward pass of the kernel.\n * - `setupFunc` Optional. Gets called once, after the backend initializes.\n * - `disposeFunc` Optional. Gets called once, right before the backend is\n * disposed.\n */\nexport function registerKernel(config) {\n const { kernelName, backendName } = config;\n const key = makeKey(kernelName, backendName);\n if (kernelRegistry.has(key)) {\n console.warn(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is already registered`);\n }\n kernelRegistry.set(key, config);\n}\n/**\n * Registers a gradient function for a given kernel in the global registry,\n * to be used during the back-propagation of that kernel.\n *\n * @param config An object with the following properties:\n * - `kernelName` The name of the kernel that the gradient function is for.\n * - `gradFunc` The function to run during back-propagation.\n */\nexport function registerGradient(config) {\n const { kernelName } = config;\n if (gradRegistry.has(kernelName)) {\n // TODO (yassogba) after 3.0 assess whether we need to keep this gated\n // to debug mode.\n if (env().getBool('DEBUG')) {\n console.warn(`Overriding the gradient for '${kernelName}'`);\n }\n }\n gradRegistry.set(kernelName, config);\n}\n/**\n * Removes the kernel function from the registry.\n *\n * @param kernelName The official name of the kernel.\n * @param backendName The official name of the backend.\n *\n */\nexport function unregisterKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n if (!kernelRegistry.has(key)) {\n throw new Error(`The kernel '${kernelName}' for backend ` +\n `'${backendName}' is not registered`);\n }\n kernelRegistry.delete(key);\n}\n/** Removes the registered gradient from the global registry. */\nexport function unregisterGradient(kernelName) {\n if (!gradRegistry.has(kernelName)) {\n throw new Error(`The gradient '${kernelName}' for backend is not registered`);\n }\n gradRegistry.delete(kernelName);\n}\n/**\n * Finds kernels that have already been registered to a backend and re-registers\n * them for a new backend. Useful for registering custom backends.\n * @param registeredBackendName Already registered backend.\n * @param newBackendName New backend.\n */\nexport function copyRegisteredKernels(registeredBackendName, newBackendName) {\n const kernels = getKernelsForBackend(registeredBackendName);\n kernels.forEach(kernelConfig => {\n const newKernelConfig = Object.assign({}, kernelConfig, { backendName: newBackendName });\n registerKernel(newKernelConfig);\n });\n}\nfunction makeKey(kernelName, backendName) {\n return `${backendName}_${kernelName}`;\n}\n//# sourceMappingURL=kernel_registry.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nimport * as base from './util_base';\nexport * from './util_base';\n/**\n * Create typed array for scalar value. Used for storing in `DataStorage`.\n */\nexport function createScalarValue(value, dtype) {\n if (dtype === 'string') {\n return encodeString(value);\n }\n return toTypedArray([value], dtype);\n}\nfunction noConversionNeeded(a, dtype) {\n return (a instanceof Float32Array && dtype === 'float32') ||\n (a instanceof Int32Array && dtype === 'int32') ||\n (a instanceof Uint8Array && dtype === 'bool');\n}\nexport function toTypedArray(a, dtype) {\n if (dtype === 'string') {\n throw new Error('Cannot convert a string[] to a TypedArray');\n }\n if (Array.isArray(a)) {\n a = base.flatten(a);\n }\n if (env().getBool('DEBUG')) {\n base.checkConversionForErrors(a, dtype);\n }\n if (noConversionNeeded(a, dtype)) {\n return a;\n }\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(a);\n }\n else if (dtype === 'int32') {\n return new Int32Array(a);\n }\n else if (dtype === 'bool') {\n const bool = new Uint8Array(a.length);\n for (let i = 0; i < bool.length; ++i) {\n if (Math.round(a[i]) !== 0) {\n bool[i] = 1;\n }\n }\n return bool;\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n/**\n * Returns the current high-resolution time in milliseconds relative to an\n * arbitrary time in the past. It works across different platforms (node.js,\n * browsers).\n *\n * ```js\n * console.log(tf.util.now());\n * ```\n *\n * @doc {heading: 'Util', namespace: 'util'}\n */\nexport function now() {\n return env().platform.now();\n}\n/**\n * Returns a platform-specific implementation of\n * [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n *\n * If `fetch` is defined on the global object (`window`, `process`, etc.),\n * `tf.util.fetch` returns that function.\n *\n * If not, `tf.util.fetch` returns a platform-specific solution.\n *\n * ```js\n * const resource = await tf.util.fetch('https://unpkg.com/@tensorflow/tfjs');\n * // handle response\n * ```\n *\n * @doc {heading: 'Util'}\n */\nexport function fetch(path, requestInits) {\n return env().platform.fetch(path, requestInits);\n}\n/**\n * Encodes the provided string into bytes using the provided encoding scheme.\n *\n * @param s The string to encode.\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function encodeString(s, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.encode(s, encoding);\n}\n/**\n * Decodes the provided bytes into a string using the provided encoding scheme.\n * @param bytes The bytes to decode.\n *\n * @param encoding The encoding scheme. Defaults to utf-8.\n *\n * @doc {heading: 'Util'}\n */\nexport function decodeString(bytes, encoding = 'utf-8') {\n encoding = encoding || 'utf-8';\n return env().platform.decode(bytes, encoding);\n}\n//# sourceMappingURL=util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\nexport class Profiler {\n constructor(backendTimer, logger) {\n this.backendTimer = backendTimer;\n this.logger = logger;\n if (logger == null) {\n this.logger = new Logger();\n }\n }\n profileKernel(kernelName, inputs, f) {\n let outputs;\n const holdResultWrapperFn = () => {\n outputs = f();\n };\n const timer = this.backendTimer.time(holdResultWrapperFn);\n for (let i = 0; i < outputs.length; i++) {\n const output = outputs[i];\n // Dangling promise here because we don't want to propagate up\n // asynchronicity.\n output.data().then(tensorVals => {\n checkComputationForErrors(tensorVals, output.dtype, kernelName);\n });\n }\n const kernelProfile = {\n kernelName,\n outputs,\n inputs,\n timeMs: timer.then(timing => timing.kernelMs),\n extraInfo: timer.then(timing => timing.getExtraProfileInfo != null ?\n timing.getExtraProfileInfo() :\n '')\n };\n return kernelProfile;\n }\n logKernelProfile(kernelProfile) {\n const { kernelName, outputs, timeMs, inputs, extraInfo } = kernelProfile;\n outputs.forEach(result => {\n Promise.all([result.data(), timeMs, extraInfo]).then(valueContainer => {\n this.logger.logKernelProfile(kernelName, result, valueContainer[0], valueContainer[1], inputs, valueContainer[2]);\n });\n });\n }\n}\nexport function checkComputationForErrors(vals, dtype, kernelName) {\n if (dtype !== 'float32') {\n // Only floating point computations will generate NaN values\n return false;\n }\n for (let i = 0; i < vals.length; i++) {\n const num = vals[i];\n if (isNaN(num) || !isFinite(num)) {\n // Throwing custom exception so behavior is testable.\n console.warn(`Found ${num} in the result of '${kernelName}'`);\n return true;\n }\n }\n return false;\n}\nexport class Logger {\n logKernelProfile(name, result, vals, timeMs, inputs, extraInfo) {\n const time = typeof timeMs === 'number' ? util.rightPad(`${timeMs}ms`, 9) :\n timeMs['error'];\n const paddedName = util.rightPad(name, 25);\n const rank = result.rank;\n const size = result.size;\n const shape = util.rightPad(result.shape.toString(), 14);\n let inputShapesDescription = '';\n for (const name in inputs) {\n const input = inputs[name];\n if (input != null) {\n // The input might be a non-tensor (e.g HTMLImageElement), in which case\n // we claim the output shape as input shape.\n const inputShape = input.shape || result.shape;\n const inputRank = inputShape.length;\n inputShapesDescription +=\n `${name}: ${inputRank}D ${inputRank > 0 ? inputShape : ''} `;\n }\n }\n console.log(`%c${paddedName}\\t%c${time}\\t%c${rank}D ${shape}\\t%c${size}\\t%c${inputShapesDescription}\\t%c${extraInfo}`, 'font-weight:bold', 'color:red', 'color:blue', 'color: orange', 'color: green', 'color: steelblue');\n }\n}\n//# sourceMappingURL=profiler.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from './util';\n/**\n * Computes a list of TapeNodes that connect x to y, filtering everything else\n * out and preserving the order of the original tape elements.\n *\n * @param tape The tape elements to filter.\n * @param xs The input Tensors.\n * @param y The output Tensor.\n */\nexport function getFilteredNodesXToY(tape, xs, y) {\n // Forward pass to compute all the nodes and Tensors that are transitively a\n // function of x.\n const tensorsFromX = {};\n const nodesFromX = {};\n for (let i = 0; i < xs.length; i++) {\n tensorsFromX[xs[i].id] = true;\n }\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n for (const inputName in nodeInputs) {\n const input = nodeInputs[inputName];\n let anyInputFromX = false;\n for (let j = 0; j < xs.length; j++) {\n if (tensorsFromX[input.id]) {\n node.outputs.forEach(output => tensorsFromX[output.id] = true);\n anyInputFromX = true;\n nodesFromX[node.id] = true;\n break;\n }\n }\n if (anyInputFromX) {\n break;\n }\n }\n }\n // Backward pass to find all of the nodes and Tensors that lead to y.\n const tensorsLeadToY = {};\n tensorsLeadToY[y.id] = true;\n const nodesToY = {};\n for (let i = tape.length - 1; i >= 0; i--) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n // If any of the outputs lead to y, mark all of the inputs as leading to y.\n for (let j = 0; j < node.outputs.length; j++) {\n if (tensorsLeadToY[node.outputs[j].id]) {\n for (const inputName in nodeInputs) {\n tensorsLeadToY[nodeInputs[inputName].id] = true;\n nodesToY[node.id] = true;\n }\n break;\n }\n }\n }\n // Return the paths that come from x and lead to y.\n const filteredTape = [];\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n if (nodesFromX[node.id] && nodesToY[node.id]) {\n // Prune the inputs from the node that aren't a function of x.\n const prunedInputs = {};\n for (const inputName in node.inputs) {\n const nodeInput = node.inputs[inputName];\n if (tensorsFromX[nodeInput.id]) {\n prunedInputs[inputName] = nodeInput;\n }\n }\n // Copy the node and overwrite inputsAndArgs to the pruned version.\n const prunedNode = Object.assign({}, node);\n prunedNode.inputs = prunedInputs;\n prunedNode.outputs = node.outputs;\n filteredTape.push(prunedNode);\n }\n }\n return filteredTape;\n}\n/**\n * Backpropagate gradients through the filtered TapeNodes.\n *\n * @param tensorAccumulatedGradientMap A map of Tensor to its gradient. This map\n * is mutated by this method.\n * @param filteredTape The filtered TapeNodes to backprop through.\n */\nexport function backpropagateGradients(tensorAccumulatedGradientMap, filteredTape, tidy, add) {\n // Walk the tape backward and keep a map of Tensor to its gradient.\n for (let i = filteredTape.length - 1; i >= 0; i--) {\n const node = filteredTape[i];\n const dys = [];\n node.outputs.forEach(o => {\n const gradTensor = tensorAccumulatedGradientMap[o.id];\n if (gradTensor != null) {\n dys.push(gradTensor);\n }\n else {\n // This particular output is not in the back-propagation subgraph, so it\n // does not affect the final output, thus we put null for its dy.\n dys.push(null);\n }\n });\n if (node.gradient == null) {\n throw new Error(`Cannot compute gradient: gradient function not found ` +\n `for ${node.kernelName}.`);\n }\n // Backprop dy through this node and accumulate gradients over the inputs.\n const inputGradients = node.gradient(dys);\n for (const inputName in node.inputs) {\n if (!(inputName in inputGradients)) {\n throw new Error(`Cannot backprop through input ${inputName}. ` +\n `Available gradients found: ${Object.keys(inputGradients)}.`);\n }\n // Call the gradient function.\n const dx = tidy(() => inputGradients[inputName]());\n if (dx.dtype !== 'float32') {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `${inputName} must have 'float32' dtype, but has '${dx.dtype}'`);\n }\n const x = node.inputs[inputName];\n if (!util.arraysEqual(dx.shape, x.shape)) {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ` +\n `'${inputName}' has shape '${dx.shape}', which does not match ` +\n `the shape of the input '${x.shape}'`);\n }\n if (tensorAccumulatedGradientMap[x.id] == null) {\n tensorAccumulatedGradientMap[x.id] = dx;\n }\n else {\n const curGradient = tensorAccumulatedGradientMap[x.id];\n tensorAccumulatedGradientMap[x.id] = add(curGradient, dx);\n curGradient.dispose();\n }\n }\n }\n}\n//# sourceMappingURL=tape.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { computeStrides, isString, rightPad, sizeFromShape } from './util';\n// Maximum number of values before we decide to show ellipsis.\nconst FORMAT_LIMIT_NUM_VALS = 20;\n// Number of first and last values to show when displaying a, b,...,y, z.\nconst FORMAT_NUM_FIRST_LAST_VALS = 3;\n// Number of significant digits to show.\nconst FORMAT_NUM_SIG_DIGITS = 7;\nexport function tensorToString(vals, shape, dtype, verbose) {\n const strides = computeStrides(shape);\n const padPerCol = computeMaxSizePerColumn(vals, shape, dtype, strides);\n const rank = shape.length;\n const valsLines = subTensorToString(vals, shape, dtype, strides, padPerCol);\n const lines = ['Tensor'];\n if (verbose) {\n lines.push(` dtype: ${dtype}`);\n lines.push(` rank: ${rank}`);\n lines.push(` shape: [${shape}]`);\n lines.push(` values:`);\n }\n lines.push(valsLines.map(l => ' ' + l).join('\\n'));\n return lines.join('\\n');\n}\nfunction computeMaxSizePerColumn(vals, shape, dtype, strides) {\n const n = sizeFromShape(shape);\n const numCols = strides[strides.length - 1];\n const padPerCol = new Array(numCols).fill(0);\n const rank = shape.length;\n const valuesOrTuples = dtype === 'complex64' ? createComplexTuples(vals) : vals;\n if (rank > 1) {\n for (let row = 0; row < n / numCols; row++) {\n const offset = row * numCols;\n for (let j = 0; j < numCols; j++) {\n padPerCol[j] = Math.max(padPerCol[j], valToString(valuesOrTuples[offset + j], 0, dtype).length);\n }\n }\n }\n return padPerCol;\n}\nfunction valToString(val, pad, dtype) {\n let valStr;\n if (Array.isArray(val)) {\n valStr = `${parseFloat(val[0].toFixed(FORMAT_NUM_SIG_DIGITS))} + ` +\n `${parseFloat(val[1].toFixed(FORMAT_NUM_SIG_DIGITS))}j`;\n }\n else if (isString(val)) {\n valStr = `'${val}'`;\n }\n else if (dtype === 'bool') {\n valStr = boolNumToString(val);\n }\n else {\n valStr = parseFloat(val.toFixed(FORMAT_NUM_SIG_DIGITS)).toString();\n }\n return rightPad(valStr, pad);\n}\nfunction boolNumToString(v) {\n return v === 0 ? 'false' : 'true';\n}\nfunction subTensorToString(vals, shape, dtype, strides, padPerCol, isLast = true) {\n const storagePerElement = dtype === 'complex64' ? 2 : 1;\n const size = shape[0];\n const rank = shape.length;\n if (rank === 0) {\n if (dtype === 'complex64') {\n const complexTuple = createComplexTuples(vals);\n return [valToString(complexTuple[0], 0, dtype)];\n }\n if (dtype === 'bool') {\n return [boolNumToString(vals[0])];\n }\n return [vals[0].toString()];\n }\n if (rank === 1) {\n if (size > FORMAT_LIMIT_NUM_VALS) {\n const firstValsSize = FORMAT_NUM_FIRST_LAST_VALS * storagePerElement;\n let firstVals = Array.from(vals.slice(0, firstValsSize));\n let lastVals = Array.from(vals.slice((size - FORMAT_NUM_FIRST_LAST_VALS) * storagePerElement, size * storagePerElement));\n if (dtype === 'complex64') {\n firstVals = createComplexTuples(firstVals);\n lastVals = createComplexTuples(lastVals);\n }\n return [\n '[' +\n firstVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ', ..., ' +\n lastVals\n .map((x, i) => valToString(x, padPerCol[size - FORMAT_NUM_FIRST_LAST_VALS + i], dtype))\n .join(', ') +\n ']'\n ];\n }\n const displayVals = dtype === 'complex64' ? createComplexTuples(vals) :\n Array.from(vals);\n return [\n '[' +\n displayVals.map((x, i) => valToString(x, padPerCol[i], dtype))\n .join(', ') +\n ']'\n ];\n }\n // The array is rank 2 or more.\n const subshape = shape.slice(1);\n const substrides = strides.slice(1);\n const stride = strides[0] * storagePerElement;\n const lines = [];\n if (size > FORMAT_LIMIT_NUM_VALS) {\n for (let i = 0; i < FORMAT_NUM_FIRST_LAST_VALS; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, false /* isLast */));\n }\n lines.push('...');\n for (let i = size - FORMAT_NUM_FIRST_LAST_VALS; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n else {\n for (let i = 0; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i === size - 1 /* isLast */));\n }\n }\n const sep = rank === 2 ? ',' : '';\n lines[0] = '[' + lines[0] + sep;\n for (let i = 1; i < lines.length - 1; i++) {\n lines[i] = ' ' + lines[i] + sep;\n }\n let newLineSep = ',\\n';\n for (let i = 2; i < rank; i++) {\n newLineSep += '\\n';\n }\n lines[lines.length - 1] =\n ' ' + lines[lines.length - 1] + ']' + (isLast ? '' : newLineSep);\n return lines;\n}\nfunction createComplexTuples(vals) {\n const complexTuples = [];\n for (let i = 0; i < vals.length; i += 2) {\n complexTuples.push([vals[i], vals[i + 1]]);\n }\n return complexTuples;\n}\n//# sourceMappingURL=tensor_format.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensorToString } from './tensor_format';\nimport * as util from './util';\nimport { computeStrides, toNestedArray } from './util';\n/**\n * A mutable object, similar to `tf.Tensor`, that allows users to set values\n * at locations before converting to an immutable `tf.Tensor`.\n *\n * See `tf.buffer` for creating a tensor buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class TensorBuffer {\n constructor(shape, dtype, values) {\n this.dtype = dtype;\n this.shape = shape.slice();\n this.size = util.sizeFromShape(shape);\n if (values != null) {\n const n = values.length;\n util.assert(n === this.size, () => `Length of values '${n}' does not match the size ` +\n `inferred by the shape '${this.size}'.`);\n }\n if (dtype === 'complex64') {\n throw new Error(`complex64 dtype TensorBuffers are not supported. Please create ` +\n `a TensorBuffer for the real and imaginary parts separately and ` +\n `call tf.complex(real, imag).`);\n }\n this.values = values || util.getArrayFromDType(dtype, this.size);\n this.strides = computeStrides(shape);\n }\n /**\n * Sets a value in the buffer at a given location.\n *\n * @param value The value to set.\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n set(value, ...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n util.assert(locs.length === this.rank, () => `The number of provided coordinates (${locs.length}) must ` +\n `match the rank (${this.rank})`);\n const index = this.locToIndex(locs);\n this.values[index] = value;\n }\n /**\n * Returns the value in the buffer at the provided location.\n *\n * @param locs The location indices.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n get(...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n let i = 0;\n for (const loc of locs) {\n if (loc < 0 || loc >= this.shape[i]) {\n const msg = `Requested out of range element at ${locs}. ` +\n ` Buffer shape=${this.shape}`;\n throw new Error(msg);\n }\n i++;\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return this.values[index];\n }\n locToIndex(locs) {\n if (this.rank === 0) {\n return 0;\n }\n else if (this.rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return index;\n }\n indexToLoc(index) {\n if (this.rank === 0) {\n return [];\n }\n else if (this.rank === 1) {\n return [index];\n }\n const locs = new Array(this.shape.length);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / this.strides[i]);\n index -= locs[i] * this.strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Creates an immutable `tf.Tensor` object from the buffer.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\n toTensor() {\n return trackerFn().makeTensor(this.values, this.shape, this.dtype);\n }\n}\n// For tracking tensor creation and disposal.\nlet trackerFn = null;\n// Used by chaining methods to call into ops.\nlet opHandler = null;\n// Used to warn about deprecated methods.\nlet deprecationWarningFn = null;\n// This here so that we can use this method on dev branches and keep the\n// functionality at master.\n// tslint:disable-next-line:no-unused-expression\n[deprecationWarningFn];\n/**\n * An external consumer can register itself as the tensor tracker. This way\n * the Tensor class can notify the tracker for every tensor created and\n * disposed.\n */\nexport function setTensorTracker(fn) {\n trackerFn = fn;\n}\n/**\n * An external consumer can register itself as the op handler. This way the\n * Tensor class can have chaining methods that call into ops via the op\n * handler.\n */\nexport function setOpHandler(handler) {\n opHandler = handler;\n}\n/**\n * Sets the deprecation warning function to be used by this file. This way the\n * Tensor class can be a leaf but still use the environment.\n */\nexport function setDeprecationWarningFn(fn) {\n deprecationWarningFn = fn;\n}\n/**\n * A `tf.Tensor` object represents an immutable, multidimensional array of\n * numbers that has a shape and a data type.\n *\n * See `tf.tensor` for details on how to create a `tf.Tensor`.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Tensor {\n constructor(shape, dtype, dataId, id) {\n /** Whether this tensor has been globally kept. */\n this.kept = false;\n this.isDisposedInternal = false;\n this.shape = shape.slice();\n this.dtype = dtype || 'float32';\n this.size = util.sizeFromShape(shape);\n this.strides = computeStrides(shape);\n this.dataId = dataId;\n this.id = id;\n this.rankType = (this.rank < 5 ? this.rank.toString() : 'higher');\n }\n get rank() {\n return this.shape.length;\n }\n /**\n * Returns a promise of `tf.TensorBuffer` that holds the underlying data.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async buffer() {\n const vals = await this.data();\n return opHandler.buffer(this.shape, this.dtype, vals);\n }\n /**\n * Returns a `tf.TensorBuffer` that holds the underlying data.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n bufferSync() {\n return opHandler.buffer(this.shape, this.dtype, this.dataSync());\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * asynchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async array() {\n const vals = await this.data();\n return toNestedArray(this.shape, vals);\n }\n /**\n * Returns the tensor data as a nested array. The transfer of data is done\n * synchronously.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n arraySync() {\n return toNestedArray(this.shape, this.dataSync());\n }\n /**\n * Asynchronously downloads the values from the `tf.Tensor`. Returns a\n * promise of `TypedArray` that resolves when the computation has finished.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n async data() {\n this.throwIfDisposed();\n const data = trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n const bytes = await data;\n try {\n return bytes.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /**\n * Synchronously downloads the values from the `tf.Tensor`. This blocks the\n * UI thread until the values are ready, which can cause performance issues.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dataSync() {\n this.throwIfDisposed();\n const data = trackerFn().readSync(this.dataId);\n if (this.dtype === 'string') {\n try {\n return data.map(b => util.decodeString(b));\n }\n catch (_a) {\n throw new Error('Failed to decode the string bytes into utf-8. ' +\n 'To get the original bytes, call tensor.bytes().');\n }\n }\n return data;\n }\n /** Returns the underlying bytes of the tensor's data. */\n async bytes() {\n this.throwIfDisposed();\n const data = await trackerFn().read(this.dataId);\n if (this.dtype === 'string') {\n return data;\n }\n else {\n return new Uint8Array(data.buffer);\n }\n }\n /**\n * Disposes `tf.Tensor` from memory.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n dispose() {\n if (this.isDisposed) {\n return;\n }\n trackerFn().disposeTensor(this);\n this.isDisposedInternal = true;\n }\n get isDisposed() {\n return this.isDisposedInternal;\n }\n throwIfDisposed() {\n if (this.isDisposed) {\n throw new Error(`Tensor is disposed.`);\n }\n }\n /**\n * Prints the `tf.Tensor`. See `tf.print` for details.\n *\n * @param verbose Whether to print verbose information about the tensor,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n print(verbose = false) {\n return opHandler.print(this, verbose);\n }\n /**\n * Returns a copy of the tensor. See `tf.clone` for details.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n clone() {\n this.throwIfDisposed();\n return opHandler.clone(this);\n }\n /**\n * Returns a human-readable description of the tensor. Useful for logging.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n toString(verbose = false) {\n const vals = this.dataSync();\n return tensorToString(vals, this.shape, this.dtype, verbose);\n }\n cast(dtype) {\n this.throwIfDisposed();\n return opHandler.cast(this, dtype);\n }\n variable(trainable = true, name, dtype) {\n this.throwIfDisposed();\n return trackerFn().makeVariable(this, trainable, name, dtype);\n }\n}\nObject.defineProperty(Tensor, Symbol.hasInstance, {\n value: (instance) => {\n // Implementation note: we should use properties of the object that will be\n // defined before the constructor body has finished executing (methods).\n // This is because when this code is transpiled by babel, babel will call\n // classCallCheck before the constructor body is run.\n // See https://github.com/tensorflow/tfjs/issues/3384 for backstory.\n return !!instance && instance.data != null && instance.dataSync != null &&\n instance.throwIfDisposed != null;\n }\n});\n/**\n * A mutable `tf.Tensor`, useful for persisting state, e.g. for training.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nexport class Variable extends Tensor {\n constructor(initialValue, trainable, name, tensorId) {\n super(initialValue.shape, initialValue.dtype, initialValue.dataId, tensorId);\n this.trainable = trainable;\n this.name = name;\n }\n /**\n * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have\n * the same shape and dtype as the old `tf.Tensor`.\n *\n * @param newValue New tensor to be assigned to this variable.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\n assign(newValue) {\n if (newValue.dtype !== this.dtype) {\n throw new Error(`dtype of the new value (${newValue.dtype}) and ` +\n `previous value (${this.dtype}) must match`);\n }\n if (!util.arraysEqual(newValue.shape, this.shape)) {\n throw new Error(`shape of the new value (${newValue.shape}) and ` +\n `previous value (${this.shape}) must match`);\n }\n trackerFn().disposeTensor(this);\n this.dataId = newValue.dataId;\n trackerFn().incRef(this, null /* backend */);\n }\n dispose() {\n trackerFn().disposeVariable(this);\n this.isDisposedInternal = true;\n }\n}\nObject.defineProperty(Variable, Symbol.hasInstance, {\n value: (instance) => {\n return instance instanceof Tensor && instance.assign != null &&\n instance.assign instanceof Function;\n }\n});\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Rank;\n(function (Rank) {\n Rank[\"R0\"] = \"R0\";\n Rank[\"R1\"] = \"R1\";\n Rank[\"R2\"] = \"R2\";\n Rank[\"R3\"] = \"R3\";\n Rank[\"R4\"] = \"R4\";\n Rank[\"R5\"] = \"R5\";\n Rank[\"R6\"] = \"R6\";\n})(Rank || (Rank = {}));\n// Looks for upcasting types. Used, for example, in operations with mixed dtype\n// inputs.\nvar UpcastInt32AndMap;\n(function (UpcastInt32AndMap) {\n UpcastInt32AndMap[\"float32\"] = \"float32\";\n UpcastInt32AndMap[\"int32\"] = \"int32\";\n UpcastInt32AndMap[\"bool\"] = \"int32\";\n UpcastInt32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastInt32AndMap || (UpcastInt32AndMap = {}));\nvar UpcastBoolAndMap;\n(function (UpcastBoolAndMap) {\n UpcastBoolAndMap[\"float32\"] = \"float32\";\n UpcastBoolAndMap[\"int32\"] = \"int32\";\n UpcastBoolAndMap[\"bool\"] = \"bool\";\n UpcastBoolAndMap[\"complex64\"] = \"complex64\";\n})(UpcastBoolAndMap || (UpcastBoolAndMap = {}));\nvar UpcastFloat32AndMap;\n(function (UpcastFloat32AndMap) {\n UpcastFloat32AndMap[\"float32\"] = \"float32\";\n UpcastFloat32AndMap[\"int32\"] = \"float32\";\n UpcastFloat32AndMap[\"bool\"] = \"float32\";\n UpcastFloat32AndMap[\"complex64\"] = \"complex64\";\n})(UpcastFloat32AndMap || (UpcastFloat32AndMap = {}));\nvar UpcastComplex64AndMap;\n(function (UpcastComplex64AndMap) {\n UpcastComplex64AndMap[\"float32\"] = \"complex64\";\n UpcastComplex64AndMap[\"int32\"] = \"complex64\";\n UpcastComplex64AndMap[\"bool\"] = \"complex64\";\n UpcastComplex64AndMap[\"complex64\"] = \"complex64\";\n})(UpcastComplex64AndMap || (UpcastComplex64AndMap = {}));\nconst upcastTypeMap = {\n 'float32': UpcastFloat32AndMap,\n 'int32': UpcastInt32AndMap,\n 'bool': UpcastBoolAndMap,\n 'complex64': UpcastComplex64AndMap\n};\nexport function upcastType(typeA, typeB) {\n if (typeA === 'string' || typeB === 'string') {\n if (typeA === 'string' && typeB === 'string') {\n return 'string';\n }\n throw new Error(`Can not upcast ${typeA} with ${typeB}`);\n }\n return upcastTypeMap[typeA][typeB];\n}\n/** Returns the output type after summation. */\nexport function sumOutType(type) {\n return upcastType(type, 'int32');\n}\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from './tensor';\nimport { upcastType } from './types';\nimport { assert } from './util';\nexport function makeTypesMatch(a, b) {\n if (a.dtype === b.dtype) {\n return [a, b];\n }\n const dtype = upcastType(a.dtype, b.dtype);\n return [a.cast(dtype), b.cast(dtype)];\n}\nexport function assertTypesMatch(a, b) {\n assert(a.dtype === b.dtype, () => `The dtypes of the first(${a.dtype}) and` +\n ` second(${b.dtype}) input must match`);\n}\nexport function isTensorInList(tensor, tensorList) {\n return tensorList.some(x => x.id === tensor.id);\n}\n/**\n * Extracts any `Tensor`s found within the provided object.\n *\n * @param container an object that may be a `Tensor` or may directly contain\n * `Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. In general it\n * is safe to pass any object here, except that `Promise`s are not\n * supported.\n * @returns An array of `Tensors` found within the passed object. If the\n * argument is simply a `Tensor', a list containing that `Tensor` is\n * returned. If the object is not a `Tensor` or does not\n * contain `Tensors`, an empty list is returned.\n */\nexport function getTensorsInContainer(result) {\n const list = [];\n const seen = new Set();\n walkTensorContainer(result, list, seen);\n return list;\n}\nfunction walkTensorContainer(container, list, seen) {\n if (container == null) {\n return;\n }\n if (container instanceof Tensor) {\n list.push(container);\n return;\n }\n if (!isIterable(container)) {\n return;\n }\n // Iteration over keys works also for arrays.\n const iterable = container;\n for (const k in iterable) {\n const val = iterable[k];\n if (!seen.has(val)) {\n seen.add(val);\n walkTensorContainer(val, list, seen);\n }\n }\n}\n// tslint:disable-next-line:no-any\nfunction isIterable(obj) {\n return Array.isArray(obj) || typeof obj === 'object';\n}\n//# sourceMappingURL=tensor_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { KernelBackend } from './backends/backend';\nimport { Environment, setEnvironmentGlobal } from './environment';\nimport { getGlobalNamespace } from './global_util';\nimport { Add, Cast } from './kernel_names';\nimport { getGradient, getKernel, getKernelsForBackend } from './kernel_registry';\nimport { Profiler } from './profiler';\nimport { backpropagateGradients, getFilteredNodesXToY } from './tape';\nimport { setTensorTracker, Tensor, Variable } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\nimport * as util from './util';\nimport { bytesFromStringArray, makeOnesTypedArray, now, sizeFromShape } from './util';\nclass EngineState {\n constructor() {\n // Public since optimizers will use it.\n this.registeredVariables = {};\n this.nextTapeNodeId = 0;\n this.numBytes = 0;\n this.numTensors = 0;\n this.numStringTensors = 0;\n this.numDataBuffers = 0;\n // Number of nested tf.grad() statements when computing higher-order\n // gradients. E.g. `1` for first-order gradients and `2` for second-order\n // gradients. Used to track if the tape should be removed after a backprop.\n this.gradientDepth = 0;\n // Number of nested kernel calls. When kernel depth is greater than 1, we turn\n // off the tape.\n this.kernelDepth = 0;\n this.scopeStack = [];\n /**\n * Keeps track of the number of data moves during a kernel execution. We\n * maintain a stack since kernels can call other kernels, recursively.\n */\n this.numDataMovesStack = [];\n this.nextScopeId = 0;\n this.tensorInfo = new WeakMap();\n this.profiling = false;\n this.activeProfile = { newBytes: 0, newTensors: 0, peakBytes: 0, kernels: [], result: null };\n }\n dispose() {\n for (const variableName in this.registeredVariables) {\n this.registeredVariables[variableName].dispose();\n }\n }\n}\nexport class Engine {\n constructor(ENV) {\n this.ENV = ENV;\n this.registry = {};\n this.registryFactory = {};\n this.pendingBackendInitId = 0;\n this.state = new EngineState();\n }\n async ready() {\n if (this.pendingBackendInit != null) {\n return this.pendingBackendInit.then(() => { });\n }\n if (this.backendInstance != null) {\n return;\n }\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const success = await this.initializeBackend(backendName).success;\n if (success) {\n await this.setBackend(backendName);\n return;\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n get backend() {\n if (this.pendingBackendInit != null) {\n throw new Error(`Backend '${this.backendName}' has not yet been initialized. Make ` +\n `sure to await tf.ready() or await tf.setBackend() before calling ` +\n `other methods`);\n }\n if (this.backendInstance == null) {\n const { name, asyncInit } = this.initializeBackendsAndReturnBest();\n if (asyncInit) {\n throw new Error(`The highest priority backend '${name}' has not yet been ` +\n `initialized. Make sure to await tf.ready() or ` +\n `await tf.setBackend() before calling other methods`);\n }\n this.setBackend(name);\n }\n return this.backendInstance;\n }\n backendNames() {\n return Object.keys(this.registryFactory);\n }\n findBackend(backendName) {\n if (!(backendName in this.registry)) {\n // If the backend hasn't been initialized but we have a registry entry for\n // it, initialize it and return it.\n if (backendName in this.registryFactory) {\n const { asyncInit } = this.initializeBackend(backendName);\n if (asyncInit) {\n // Backend is not ready yet.\n return null;\n }\n }\n else {\n return null;\n }\n }\n return this.registry[backendName];\n }\n findBackendFactory(backendName) {\n if (!(backendName in this.registryFactory)) {\n return null;\n }\n return this.registryFactory[backendName].factory;\n }\n registerBackend(backendName, factory, priority = 1) {\n if (backendName in this.registryFactory) {\n console.warn(`${backendName} backend was already registered. ` +\n `Reusing existing backend factory.`);\n return false;\n }\n this.registryFactory[backendName] = { factory, priority };\n return true;\n }\n async setBackend(backendName) {\n if (this.registryFactory[backendName] == null) {\n throw new Error(`Backend name '${backendName}' not found in registry`);\n }\n this.backendName = backendName;\n if (this.registry[backendName] == null) {\n this.backendInstance = null;\n const { success, asyncInit } = this.initializeBackend(backendName);\n const result = asyncInit ? await success : success;\n if (!result) {\n return false;\n }\n }\n this.backendInstance = this.registry[backendName];\n this.setupRegisteredKernels();\n // Reset the profiler.\n this.profiler = new Profiler(this.backendInstance);\n return true;\n }\n setupRegisteredKernels() {\n const kernels = getKernelsForBackend(this.backendName);\n kernels.forEach(kernel => {\n if (kernel.setupFunc != null) {\n kernel.setupFunc(this.backendInstance);\n }\n });\n }\n disposeRegisteredKernels(backendName) {\n const kernels = getKernelsForBackend(backendName);\n kernels.forEach(kernel => {\n if (kernel.disposeFunc != null) {\n kernel.disposeFunc(this.registry[backendName]);\n }\n });\n }\n /**\n * Initializes a backend by looking up the backend name in the factory\n * registry and calling the factory method. Returns a boolean representing\n * whether the initialization of the backend suceeded. Throws an error if\n * there is no backend in the factory registry.\n */\n initializeBackend(backendName) {\n const registryFactoryEntry = this.registryFactory[backendName];\n if (registryFactoryEntry == null) {\n throw new Error(`Cannot initialize backend ${backendName}, no registration found.`);\n }\n try {\n const backend = registryFactoryEntry.factory();\n /* Test if the factory returns a promise.\n Done in a more liberal way than\n previous 'Promise.resolve(backend)===backend'\n as we needed to account for custom Promise\n implementations (e.g. Angular) */\n if (backend && !(backend instanceof KernelBackend)\n && typeof backend.then === 'function') {\n const promiseId = ++this.pendingBackendInitId;\n const success = backend\n .then(backendInstance => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.registry[backendName] = backendInstance;\n this.pendingBackendInit = null;\n return true;\n })\n .catch(err => {\n // Outdated promise. Another backend was set in the meantime.\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.pendingBackendInit = null;\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return false;\n });\n this.pendingBackendInit = success;\n return { success, asyncInit: true };\n }\n else {\n this.registry[backendName] = backend;\n return { success: true, asyncInit: false };\n }\n }\n catch (err) {\n console.warn(`Initialization of backend ${backendName} failed`);\n console.warn(err.stack || err.message);\n return { success: false, asyncInit: false };\n }\n }\n removeBackend(backendName) {\n if (!(backendName in this.registryFactory)) {\n throw new Error(`${backendName} backend not found in registry`);\n }\n if (this.backendName === backendName && this.pendingBackendInit != null) {\n // There is a pending promise of the backend we want to remove. Make it\n // obsolete.\n this.pendingBackendInitId++;\n }\n if (backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n delete this.registryFactory[backendName];\n // Unset the backend if it is active.\n if (this.backendName === backendName) {\n this.pendingBackendInit = null;\n this.backendName = null;\n this.backendInstance = null;\n }\n }\n getSortedBackends() {\n if (Object.keys(this.registryFactory).length === 0) {\n throw new Error('No backend found in registry.');\n }\n return Object.keys(this.registryFactory).sort((a, b) => {\n // Highest priority comes first.\n return this.registryFactory[b].priority -\n this.registryFactory[a].priority;\n });\n }\n initializeBackendsAndReturnBest() {\n const sortedBackends = this.getSortedBackends();\n for (let i = 0; i < sortedBackends.length; i++) {\n const backendName = sortedBackends[i];\n const { success, asyncInit } = this.initializeBackend(backendName);\n if (asyncInit || success) {\n return { name: backendName, asyncInit };\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations ` +\n `failed.`);\n }\n moveData(backend, dataId) {\n const info = this.state.tensorInfo.get(dataId);\n const srcBackend = info.backend;\n const values = this.readSync(dataId);\n // Delete the tensor from the old backend and move it to the new\n // backend.\n srcBackend.disposeData(dataId);\n info.backend = backend;\n backend.move(dataId, values, info.shape, info.dtype);\n if (this.shouldCheckForMemLeaks()) {\n // Track the number of moves during a kernel execution to correctly\n // detect memory leaks.\n this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1]++;\n }\n }\n tidy(nameOrFn, fn) {\n let name = null;\n if (fn == null) {\n // Called with only 1 argument.\n if (typeof nameOrFn !== 'function') {\n throw new Error('Please provide a function to tidy()');\n }\n fn = nameOrFn;\n }\n else {\n // Called with 2 arguments.\n if (typeof nameOrFn !== 'string' && !(nameOrFn instanceof String)) {\n throw new Error('When calling with two arguments, the first argument ' +\n 'to tidy() must be a string');\n }\n if (typeof fn !== 'function') {\n throw new Error('When calling with two arguments, the 2nd argument ' +\n 'to tidy() must be a function');\n }\n name = nameOrFn;\n // TODO(nsthorat,smilkov): Do operation logging and performance\n // profiling.\n }\n let result;\n return this.scopedRun(() => this.startScope(name), () => this.endScope(result), () => {\n result = fn();\n if (result instanceof Promise) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n return result;\n });\n }\n scopedRun(start, end, f) {\n start();\n try {\n const res = f();\n end();\n return res;\n }\n catch (ex) {\n end();\n throw ex;\n }\n }\n nextTensorId() {\n return Engine.nextTensorId++;\n }\n nextVariableId() {\n return Engine.nextVariableId++;\n }\n /**\n * This method is called instead of the public-facing tensor.clone() when\n * saving a tensor for backwards pass. It makes sure to add the clone\n * operation to the tape regardless of being called inside a kernel\n * execution.\n *\n * This method will go away once all kernels are modularized since we won't\n * need to turn off the tape inside runKernel().\n */\n clone(x) {\n const y = this.makeTensorFromDataId(x.dataId, x.shape, x.dtype);\n const inputs = { x };\n const grad = (dy) => ({\n x: () => {\n const dtype = 'float32';\n const gradInputs = { x: dy };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast(dy, dtype), gradInputs, null /* grad */, Cast, attrs);\n }\n });\n const saved = [];\n this.addTapeNode(this.state.activeScope.name, inputs, [y], grad, saved, {});\n return y;\n }\n /**\n * Execute a kernel with the given name and return the output tensor.\n *\n * @param kernelName The name of the kernel to execute.\n * @param inputs A map of input names to tensors.\n * @param attrs A map of attribute names to their values. An attribute is a\n * primitive (non-tensor) input to the kernel.\n * @param inputsToSave A list of tensors, inputs to save for the backprop\n * computation.\n * @param outputsToSave A list of booleans, specifying which output to save\n * for the backprop computation. These are booleans since the output\n * tensors are not visible to the user.\n */\n runKernel(kernelName, inputs, attrs, inputsToSave, outputsToSave) {\n const forwardFunc = null;\n const backwardsFunc = null;\n // Call runKernel as a stop-gap until we modularize all kernels.\n // Once we modularize all kernels, we will remove the existing\n // `runKernelFunc`.\n return this.runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave);\n }\n shouldCheckForMemLeaks() {\n return this.ENV.getBool('IS_TEST');\n }\n checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos) {\n const numDataIdsAfter = this.backend.numDataIds();\n // Count the number of data ids associated with the result of the kernel.\n let numOutputDataIds = 0;\n outInfos.forEach(info => {\n // Complex numbers allocate 3 data ids, one for 'real', one for\n // 'imaginary', and one for the container that holds the former two.\n numOutputDataIds += (info.dtype === 'complex64' ? 3 : 1);\n });\n // Account for the number of moves during kernel execution. A \"data move\"\n // can happen in the middle of a kernel execution, placing a new (key,value)\n // pair in the data storage. Since data moves have net zero effect (we\n // always remove the data from the old backend), we have to cancel them out\n // when detecting memory leaks.\n const numMoves = this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1];\n const dataIdsLeaked = numDataIdsAfter - numDataIdsBefore - numOutputDataIds - numMoves;\n if (dataIdsLeaked > 0) {\n throw new Error(`Backend '${this.backendName}' has an internal memory leak ` +\n `(${dataIdsLeaked} data ids) after running '${kernelName}'`);\n }\n }\n /**\n * @deprecated Use `runKernel` for newly added kernels. Keep using this method\n * only for kernels that are not yet fully modularized.\n */\n runKernelFunc(forwardFunc, inputs, backwardsFunc, kernelName, attrs, inputsToSave, outputsToSave) {\n let outputs;\n let saved = [];\n const isTapeOn = this.isTapeOn();\n if (kernelName == null) {\n kernelName =\n this.state.activeScope != null ? this.state.activeScope.name : '';\n }\n const startingBytecount = this.state.numBytes;\n const startingNumTensors = this.state.numTensors;\n if (this.shouldCheckForMemLeaks()) {\n this.state.numDataMovesStack.push(0);\n }\n let kernelFunc;\n const kernel = getKernel(kernelName, this.backendName);\n let out;\n if (kernel != null) {\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = kernel.kernelFunc({ inputs, attrs, backend: this.backend });\n const outInfos = Array.isArray(out) ? out : [out];\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos);\n }\n const outTensors = outInfos.map(({ dataId, shape, dtype }) => this.makeTensorFromDataId(dataId, shape, dtype));\n // Save the inputs and outputs.\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (isTapeOn) {\n let tensorsToSave = this.getTensorsForGradient(kernelName, inputs, outTensors);\n if (tensorsToSave == null) {\n // Fallback for ops that call runKernelFunc and pass in\n // inputsToSave and outputsToSave. Currently this is the set of ops\n // with kernel support in the WASM backend. Once those ops and\n // respective gradients are modularised we can remove this path.\n if (outputsToSave == null) {\n outputsToSave = [];\n }\n const outsToSave = outTensors.filter((_, i) => outputsToSave[i]);\n tensorsToSave = (inputsToSave || []).slice().concat(outsToSave);\n }\n saved = this.saveTensorsForBackwardMode(tensorsToSave);\n }\n return outTensors;\n };\n }\n else {\n const saveFunc = (tensors) => {\n // Do not save unless we are recording to the tape. Otherwise it would\n // cause a mem leak since we would never run backprop, which disposes\n // the kept tensors.\n if (!isTapeOn) {\n return;\n }\n saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n };\n kernelFunc = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = this.tidy(() => forwardFunc(this.backend, saveFunc));\n const outs = (Array.isArray(out) ? out : [out]);\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outs);\n }\n return outs;\n };\n }\n // Stop recording to a tape when running a kernel.\n let kernelProfile;\n this.scopedRun(() => this.state.kernelDepth++, () => this.state.kernelDepth--, () => {\n if (!this.ENV.getBool('DEBUG') && !this.state.profiling) {\n outputs = kernelFunc();\n }\n else {\n kernelProfile = this.profiler.profileKernel(kernelName, inputs, () => kernelFunc());\n if (this.ENV.getBool('DEBUG')) {\n this.profiler.logKernelProfile(kernelProfile);\n }\n outputs = kernelProfile.outputs;\n }\n });\n if (isTapeOn) {\n this.addTapeNode(kernelName, inputs, outputs, backwardsFunc, saved, attrs);\n }\n if (this.state.profiling) {\n this.state.activeProfile.kernels.push({\n name: kernelName,\n bytesAdded: this.state.numBytes - startingBytecount,\n totalBytesSnapshot: this.state.numBytes,\n tensorsAdded: this.state.numTensors - startingNumTensors,\n totalTensorsSnapshot: this.state.numTensors,\n inputShapes: Object.keys(inputs).map(key => inputs[key] != null ? inputs[key].shape : null),\n outputShapes: outputs.map(item => item.shape),\n kernelTimeMs: kernelProfile.timeMs,\n extraInfo: kernelProfile.extraInfo\n });\n }\n return (Array.isArray(out) ? outputs : outputs[0]);\n }\n /**\n * Saves tensors used in forward mode for use in backward mode.\n *\n * @param tensors the list of tensors to save.\n */\n saveTensorsForBackwardMode(tensors) {\n const saved = tensors.map(tensor => this.keep(this.clone(tensor)));\n return saved;\n }\n /**\n * Returns a list of tensors to save for a given gradient calculation.\n *\n * Returns undefined if their is no registered gradient for this kernel in the\n * gradient registry.\n *\n * @param kernelName name of kernel to look up gradient for.\n * @param inputs a map of input tensors.\n * @param outputs an array of output tensors from forward mode of kernel.\n */\n getTensorsForGradient(kernelName, inputs, outputs) {\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n const inputsToSave = gradConfig.inputsToSave || [];\n const outputsToSave = gradConfig.outputsToSave || [];\n // If saveAllInputs is true, all inputs will be saved. Otherwise, inputs\n // specified in inputsToSave will be saved.\n let inputTensorsToSave;\n if (gradConfig.saveAllInputs) {\n util.assert(Array.isArray(inputs), () => 'saveAllInputs is true, expected inputs to be an array.');\n inputTensorsToSave = Object.keys(inputs).map((key) => inputs[key]);\n }\n else {\n inputTensorsToSave = inputsToSave.map((inputName) => inputs[inputName]);\n }\n const outputTensorsToSave = outputs.filter((_, i) => outputsToSave[i]);\n return inputTensorsToSave.concat(outputTensorsToSave);\n }\n // TODO(yassogba) throw exception here once all runkernelFunc calls with\n // inputsToSave/outputsToSave are removed\n return null;\n }\n /**\n * Internal method used by public APIs for tensor creation. Makes a new\n * tensor with the provided shape, dtype and values. It always\n * creates a new data id and writes the values to the underlying backend.\n */\n makeTensor(values, shape, dtype, backend) {\n if (values == null) {\n throw new Error('Values passed to engine.makeTensor() are null');\n }\n dtype = dtype || 'float32';\n backend = backend || this.backend;\n let backendVals = values;\n if (dtype === 'string' && util.isString(values[0])) {\n backendVals = values.map(d => util.encodeString(d));\n }\n const dataId = backend.write(backendVals, shape, dtype);\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n // Count bytes for string tensors.\n if (dtype === 'string') {\n const info = this.state.tensorInfo.get(dataId);\n const newBytes = bytesFromStringArray(backendVals);\n this.state.numBytes += newBytes - info.bytes;\n info.bytes = newBytes;\n }\n return t;\n }\n /**\n * Internal method used by backends. Makes a new tensor\n * that is a wrapper around an existing data id. It doesn't create\n * a new data id, only increments the ref count used in memory tracking.\n */\n makeTensorFromDataId(dataId, shape, dtype, backend) {\n dtype = dtype || 'float32';\n const t = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.incRef(t, backend);\n return t;\n }\n makeVariable(initialValue, trainable = true, name, dtype) {\n name = name || this.nextVariableId().toString();\n if (dtype != null && dtype !== initialValue.dtype) {\n initialValue = initialValue.cast(dtype);\n }\n const v = new Variable(initialValue, trainable, name, this.nextTensorId());\n if (this.state.registeredVariables[v.name] != null) {\n throw new Error(`Variable with name ${v.name} was already registered`);\n }\n this.state.registeredVariables[v.name] = v;\n this.incRef(v, this.backend);\n return v;\n }\n incRef(a, backend) {\n const refCount = this.state.tensorInfo.has(a.dataId) ?\n this.state.tensorInfo.get(a.dataId).refCount :\n 0;\n this.state.numTensors++;\n if (a.dtype === 'string') {\n this.state.numStringTensors++;\n }\n if (refCount === 0) {\n this.state.numDataBuffers++;\n // Bytes for complex numbers are counted by their components. Bytes for\n // string tensors are counted when writing values.\n let bytes = 0;\n if (a.dtype !== 'complex64' && a.dtype !== 'string') {\n bytes = a.size * util.bytesPerElement(a.dtype);\n }\n this.state.tensorInfo.set(a.dataId, {\n backend: backend || this.backend,\n dtype: a.dtype,\n shape: a.shape,\n bytes,\n refCount: 0\n });\n this.state.numBytes += bytes;\n }\n this.state.tensorInfo.get(a.dataId).refCount++;\n if (!(a instanceof Variable)) {\n this.track(a);\n }\n }\n disposeTensor(a) {\n if (!this.state.tensorInfo.has(a.dataId)) {\n return;\n }\n this.state.numTensors--;\n if (a.dtype === 'string') {\n this.state.numStringTensors--;\n }\n const info = this.state.tensorInfo.get(a.dataId);\n const refCount = info.refCount;\n if (refCount <= 1) {\n // Don't count bytes for complex numbers as they are counted by their\n // components.\n if (a.dtype !== 'complex64') {\n this.state.numBytes -= info.bytes;\n }\n this.state.numDataBuffers--;\n info.backend.disposeData(a.dataId);\n this.state.tensorInfo.delete(a.dataId);\n }\n else {\n this.state.tensorInfo.get(a.dataId).refCount--;\n }\n // TODO(nsthorat): Construct an error and save the stack trace for\n // debugging when in debug mode. Creating a stack trace is too expensive\n // to do unconditionally.\n }\n disposeVariables() {\n for (const varName in this.state.registeredVariables) {\n const v = this.state.registeredVariables[varName];\n this.disposeVariable(v);\n }\n }\n disposeVariable(v) {\n this.disposeTensor(v);\n if (this.state.registeredVariables[v.name] != null) {\n delete this.state.registeredVariables[v.name];\n }\n }\n memory() {\n const info = this.backend.memory();\n info.numTensors = this.state.numTensors;\n info.numDataBuffers = this.state.numDataBuffers;\n info.numBytes = this.state.numBytes;\n if (this.state.numStringTensors > 0) {\n info.unreliable = true;\n if (info.reasons == null) {\n info.reasons = [];\n }\n info.reasons.push('Memory usage by string tensors is approximate ' +\n '(2 bytes per character)');\n }\n return info;\n }\n async profile(query) {\n this.state.profiling = true;\n const startBytes = this.state.numBytes;\n const startNumTensors = this.state.numTensors;\n this.state.activeProfile.kernels = [];\n this.state.activeProfile.result = await query();\n this.state.profiling = false;\n this.state.activeProfile.peakBytes = Math.max(...this.state.activeProfile.kernels.map(d => d.totalBytesSnapshot));\n this.state.activeProfile.newBytes = this.state.numBytes - startBytes;\n this.state.activeProfile.newTensors =\n this.state.numTensors - startNumTensors;\n for (const kernel of this.state.activeProfile.kernels) {\n kernel.kernelTimeMs = await kernel.kernelTimeMs;\n kernel.extraInfo = await kernel.extraInfo;\n }\n return this.state.activeProfile;\n }\n isTapeOn() {\n return this.state.gradientDepth > 0 && this.state.kernelDepth === 0;\n }\n addTapeNode(kernelName, inputs, outputs, gradientsFunc, saved, attrs) {\n const tapeNode = { id: this.state.nextTapeNodeId++, kernelName, inputs, outputs, saved };\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n gradientsFunc = gradConfig.gradFunc;\n }\n if (gradientsFunc != null) {\n tapeNode.gradient = (dys) => {\n // TODO(smilkov): To optimize back-prop, pass dys that are not used in\n // the backprop graph to the user as null instead of zeros\n dys = dys.map((dy, i) => {\n if (dy == null) {\n const output = outputs[i];\n const vals = util.makeZerosTypedArray(output.size, output.dtype);\n return this.makeTensor(vals, output.shape, output.dtype);\n }\n return dy;\n });\n // Grad functions of ops with single outputs expect a dy, while ops\n // with multiple outputs expect dys (array of dy).\n return gradientsFunc(dys.length > 1 ? dys : dys[0], saved, attrs);\n };\n }\n this.state.activeTape.push(tapeNode);\n }\n keep(result) {\n result.kept = true;\n return result;\n }\n startTape() {\n if (this.state.gradientDepth === 0) {\n this.state.activeTape = [];\n }\n this.state.gradientDepth++;\n }\n endTape() {\n this.state.gradientDepth--;\n }\n /**\n * Start a scope. Use this with endScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n startScope(name) {\n const scopeInfo = {\n track: [],\n name: 'unnamed scope',\n id: this.state.nextScopeId++\n };\n if (name) {\n scopeInfo.name = name;\n }\n this.state.scopeStack.push(scopeInfo);\n this.state.activeScope = scopeInfo;\n }\n /**\n * End a scope. Use this with startScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n endScope(result) {\n const tensorsToTrackInParent = getTensorsInContainer(result);\n const tensorsToTrackInParentSet = new Set(tensorsToTrackInParent.map(t => t.id));\n // Dispose the arrays tracked in this scope.\n for (let i = 0; i < this.state.activeScope.track.length; i++) {\n const tensor = this.state.activeScope.track[i];\n if (!tensor.kept && !tensorsToTrackInParentSet.has(tensor.id)) {\n tensor.dispose();\n }\n }\n const oldScope = this.state.scopeStack.pop();\n this.state.activeScope = this.state.scopeStack.length === 0 ?\n null :\n this.state.scopeStack[this.state.scopeStack.length - 1];\n // Track the current result in the parent scope.\n tensorsToTrackInParent.forEach(tensor => {\n // Only track the tensor if was allocated in the inner scope and is not\n // globally kept.\n if (!tensor.kept && tensor.scopeId === oldScope.id) {\n this.track(tensor);\n }\n });\n }\n /**\n * Returns gradients of `f` with respect to each of the `xs`. The gradients\n * returned are of the same length as `xs`, but some might be null if `f`\n * was not a function of that `x`. It also takes optional dy to multiply the\n * gradient, which defaults to `1`.\n */\n gradients(f, xs, dy, allowNoGradients = false) {\n util.assert(xs.length > 0, () => 'gradients() received an empty list of xs.');\n if (dy != null && dy.dtype !== 'float32') {\n throw new Error(`dy must have 'float32' dtype, but has '${dy.dtype}'`);\n }\n const y = this.scopedRun(() => this.startTape(), () => this.endTape(), () => this.tidy('forward', f));\n util.assert(y instanceof Tensor, () => 'The result y returned by f() must be a tensor.');\n // Filter out the nodes that don't connect x => y.\n const filteredTape = getFilteredNodesXToY(this.state.activeTape, xs, y);\n if (!allowNoGradients && filteredTape.length === 0 && xs.length > 0) {\n throw new Error('Cannot compute gradient of y=f(x) with respect to x. Make sure ' +\n 'that the f you passed encloses all operations that lead from x ' +\n 'to y.');\n }\n return this.tidy('backward', () => {\n const accumulatedGradientMap = {};\n accumulatedGradientMap[y.id] = (dy == null) ? ones(y.shape) : dy;\n // Backprop gradients through the filtered nodes.\n backpropagateGradients(accumulatedGradientMap, filteredTape, \n // Pass the tidy function to avoid circular dep with `tape.ts`.\n f => this.tidy(f), \n // Pass an add function to avoide a circular dep with `tape.ts`.\n add);\n const grads = xs.map(x => accumulatedGradientMap[x.id]);\n if (this.state.gradientDepth === 0) {\n // This means that we are not computing higher-order gradients\n // and can clean up the tape.\n this.state.activeTape.forEach(node => {\n for (const tensor of node.saved) {\n tensor.dispose();\n }\n });\n this.state.activeTape = null;\n }\n return { value: y, grads };\n });\n }\n customGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in customGrad(f) must be a function.');\n return (...inputs) => {\n util.assert(inputs.every(t => t instanceof Tensor), () => 'The args passed in customGrad(f)(x1, x2,...) must all be ' +\n 'tensors');\n let res;\n const inputMap = {};\n inputs.forEach((input, i) => {\n inputMap[i] = input;\n });\n return this.runKernelFunc((_, save) => {\n res = f(...[...inputs, save]);\n util.assert(res.value instanceof Tensor, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.value` is a tensor');\n util.assert(util.isFunction(res.gradFunc), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function.');\n return res.value;\n }, inputMap, (dy, saved) => {\n const gradRes = res.gradFunc(dy, saved);\n const grads = Array.isArray(gradRes) ? gradRes : [gradRes];\n util.assert(grads.length === inputs.length, () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'the same number of tensors as inputs passed to f(...).');\n util.assert(grads.every(t => t instanceof Tensor), () => 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function that returns ' +\n 'a list of only tensors.');\n const gradMap = {};\n grads.forEach((grad, i) => {\n gradMap[i] = () => grad;\n });\n return gradMap;\n });\n };\n }\n readSync(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.readSync(dataId);\n }\n read(dataId) {\n // Route the read to the correct backend.\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.read(dataId);\n }\n async time(query) {\n const start = now();\n const timingInfo = await this.backend.time(query);\n timingInfo.wallMs = now() - start;\n return timingInfo;\n }\n /**\n * Tracks a Tensor in the current scope to be automatically cleaned up\n * when the current scope ends, and returns the value.\n *\n * @param result The Tensor to track in the current scope.\n */\n track(result) {\n if (this.state.activeScope != null) {\n result.scopeId = this.state.activeScope.id;\n this.state.activeScope.track.push(result);\n }\n return result;\n }\n get registeredVariables() {\n return this.state.registeredVariables;\n }\n /**\n * Resets the engine state. Removes all backends but does not remove\n * registered backend factories.\n */\n reset() {\n // Make any pending promise obsolete.\n this.pendingBackendInitId++;\n this.state.dispose();\n this.ENV.reset();\n this.state = new EngineState();\n for (const backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n this.backendName = null;\n this.backendInstance = null;\n this.pendingBackendInit = null;\n }\n}\nEngine.nextTensorId = 0;\nEngine.nextVariableId = 0;\nfunction ones(shape) {\n const values = makeOnesTypedArray(sizeFromShape(shape), 'float32');\n return ENGINE.makeTensor(values, shape, 'float32');\n}\nexport function getOrMakeEngine() {\n const ns = getGlobalNamespace();\n if (ns._tfengine == null) {\n const environment = new Environment(ns);\n ns._tfengine = new Engine(environment);\n }\n setEnvironmentGlobal(ns._tfengine.ENV);\n // Tell the current tensor interface that the global engine is responsible\n // for tracking.\n setTensorTracker(() => ns._tfengine);\n return ns._tfengine;\n}\nexport const ENGINE = getOrMakeEngine();\n/**\n * A implementation of the add op for use within engine and tape.\n *\n * This allows us to avoid a circular dependency between add.ts and engine.\n * It is exported to be available in tape tests.\n */\nexport function add(a, b) {\n // We duplicate Add here to avoid a circular dependency with add.ts.\n const inputs = { a, b };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.add(a, b);\n save([a, b]);\n return res;\n }, inputs, null /* gradient */, Add);\n}\n//# sourceMappingURL=engine.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line:no-any\nfunction _isNavigatorDefined() {\n return typeof navigator !== 'undefined' && navigator != null;\n}\nexport function isMobile() {\n if (_isNavigatorDefined()) {\n // tslint:disable-next-line:no-any\n const a = navigator.userAgent || navigator.vendor || window.opera;\n // tslint:disable-next-line:max-line-length\n return /(android|bb\\d+|meego).+mobile|avantgo|bada\\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i\n .test(a) ||\n // tslint:disable-next-line:max-line-length\n /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\\-(n|u)|c55\\/|capi|ccwa|cdm\\-|cell|chtm|cldc|cmd\\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\\-s|devi|dica|dmob|do(c|p)o|ds(12|\\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\\-|_)|g1 u|g560|gene|gf\\-5|g\\-mo|go(\\.w|od)|gr(ad|un)|haie|hcit|hd\\-(m|p|t)|hei\\-|hi(pt|ta)|hp( i|ip)|hs\\-c|ht(c(\\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\\-(20|go|ma)|i230|iac( |\\-|\\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\\/)|klon|kpt |kwc\\-|kyo(c|k)|le(no|xi)|lg( g|\\/(k|l|u)|50|54|\\-[a-w])|libw|lynx|m1\\-w|m3ga|m50\\/|ma(te|ui|xo)|mc(01|21|ca)|m\\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\\-2|po(ck|rt|se)|prox|psio|pt\\-g|qa\\-a|qc(07|12|21|32|60|\\-[2-7]|i\\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\\-|oo|p\\-)|sdk\\/|se(c(\\-|0|1)|47|mc|nd|ri)|sgh\\-|shar|sie(\\-|m)|sk\\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\\-|v\\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\\-|tdg\\-|tel(i|m)|tim\\-|t\\-mo|to(pl|sh)|ts(70|m\\-|m3|m5)|tx\\-9|up(\\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\\-|your|zeto|zte\\-/i\n .test(a.substr(0, 4));\n }\n return false;\n}\nexport function isBrowser() {\n return (typeof window !== 'undefined' && window.document != null) ||\n //@ts-ignore\n (typeof WorkerGlobalScope !== 'undefined');\n}\n//# sourceMappingURL=device_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './engine';\nimport * as device_util from './device_util';\nimport { env } from './environment';\nconst ENV = env();\n/**\n * This file contains environment-related flag registrations.\n */\n/** Whether to enable debug mode. */\nENV.registerFlag('DEBUG', () => false, debugValue => {\n if (debugValue) {\n console.warn('Debugging mode is ON. The output of every math call will ' +\n 'be downloaded to CPU and checked for NaNs. ' +\n 'This significantly impacts performance.');\n }\n});\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_BROWSER', () => device_util.isBrowser());\n/** Whether we are in a browser (as versus, say, node.js) environment. */\nENV.registerFlag('IS_NODE', () => (typeof process !== 'undefined') &&\n (typeof process.versions !== 'undefined') &&\n (typeof process.versions.node !== 'undefined'));\n/** Whether this browser is Chrome. */\nENV.registerFlag('IS_CHROME', () => typeof navigator !== 'undefined' && navigator != null &&\n navigator.userAgent != null && /Chrome/.test(navigator.userAgent) &&\n /Google Inc/.test(navigator.vendor));\n/**\n * True when the environment is \"production\" where we disable safety checks\n * to gain performance.\n */\nENV.registerFlag('PROD', () => false);\n/**\n * Whether to do sanity checks when inferring a shape from user-provided\n * values, used when creating a new tensor.\n */\nENV.registerFlag('TENSORLIKE_CHECK_SHAPE_CONSISTENCY', () => ENV.getBool('DEBUG'));\n/** Whether deprecation warnings are enabled. */\nENV.registerFlag('DEPRECATION_WARNINGS_ENABLED', () => true);\n/** True if running unit tests. */\nENV.registerFlag('IS_TEST', () => false);\n//# sourceMappingURL=flags.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { Tensor } from './tensor';\nimport { assert, flatten, inferDtype, isTypedArray, toTypedArray } from './util';\nexport function inferShape(val, dtype) {\n let firstElem = val;\n if (isTypedArray(val)) {\n return dtype === 'string' ? [] : [val.length];\n }\n if (!Array.isArray(val)) {\n return []; // Scalar.\n }\n const shape = [];\n while (Array.isArray(firstElem) ||\n isTypedArray(firstElem) && dtype !== 'string') {\n shape.push(firstElem.length);\n firstElem = firstElem[0];\n }\n if (Array.isArray(val) &&\n env().getBool('TENSORLIKE_CHECK_SHAPE_CONSISTENCY')) {\n deepAssertShapeConsistency(val, shape, []);\n }\n return shape;\n}\nfunction deepAssertShapeConsistency(val, shape, indices) {\n indices = indices || [];\n if (!(Array.isArray(val)) && !isTypedArray(val)) {\n assert(shape.length === 0, () => `Element arr[${indices.join('][')}] is a primitive, ` +\n `but should be an array/TypedArray of ${shape[0]} elements`);\n return;\n }\n assert(shape.length > 0, () => `Element arr[${indices.join('][')}] should be a primitive, ` +\n `but is an array of ${val.length} elements`);\n assert(val.length === shape[0], () => `Element arr[${indices.join('][')}] should have ${shape[0]} ` +\n `elements, but has ${val.length} elements`);\n const subShape = shape.slice(1);\n for (let i = 0; i < val.length; ++i) {\n deepAssertShapeConsistency(val[i], subShape, indices.concat(i));\n }\n}\nfunction assertDtype(expectedDtype, actualDType, argName, functionName) {\n if (expectedDtype == null) {\n return;\n }\n if (expectedDtype !== 'numeric' && expectedDtype !== actualDType ||\n expectedDtype === 'numeric' && actualDType === 'string') {\n throw new Error(`Argument '${argName}' passed to '${functionName}' must ` +\n `be ${expectedDtype} tensor, but got ${actualDType} tensor`);\n }\n}\nexport function convertToTensor(x, argName, functionName, parseAsDtype = 'numeric') {\n if (x instanceof Tensor) {\n assertDtype(parseAsDtype, x.dtype, argName, functionName);\n return x;\n }\n let inferredDtype = inferDtype(x);\n // If the user expects a bool/int/float, use that info to update the\n // inferredDtype when it is not a string.\n if (inferredDtype !== 'string' &&\n ['bool', 'int32', 'float32'].indexOf(parseAsDtype) >= 0) {\n inferredDtype = parseAsDtype;\n }\n assertDtype(parseAsDtype, inferredDtype, argName, functionName);\n if ((x == null) ||\n (!isTypedArray(x) && !Array.isArray(x) && typeof x !== 'number' &&\n typeof x !== 'boolean' && typeof x !== 'string')) {\n const type = x == null ? 'null' : x.constructor.name;\n throw new Error(`Argument '${argName}' passed to '${functionName}' must be a ` +\n `Tensor or TensorLike, but got '${type}'`);\n }\n const inferredShape = inferShape(x, inferredDtype);\n if (!isTypedArray(x) && !Array.isArray(x)) {\n x = [x];\n }\n const skipTypedArray = true;\n const values = inferredDtype !== 'string' ?\n toTypedArray(x, inferredDtype) :\n flatten(x, [], skipTypedArray);\n return ENGINE.makeTensor(values, inferredShape, inferredDtype);\n}\nexport function convertToTensorArray(arg, argName, functionName, parseAsDtype = 'numeric') {\n if (!Array.isArray(arg)) {\n throw new Error(`Argument ${argName} passed to ${functionName} must be a ` +\n '`Tensor[]` or `TensorLike[]`');\n }\n const tensors = arg;\n return tensors.map((t, i) => convertToTensor(t, `${argName}[${i}]`, functionName), parseAsDtype);\n}\n//# sourceMappingURL=tensor_util_env.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { isPromise } from '../util';\nexport const OP_SCOPE_SUFFIX = '__op';\n/**\n * Used for wrapping functions that perform math operations on\n * Tensors. The function will be wrapped in a named scope that cleans all\n * memory usage after the function is done.\n */\nexport function op(f) {\n const keys = Object.keys(f);\n if (keys.length !== 1) {\n throw new Error(`Please provide an object with a single key ` +\n `(operation name) mapping to a function. Got an object with ` +\n `${keys.length} keys.`);\n }\n let opName = keys[0];\n const fn = f[opName];\n // Strip the underscore from the end of the function name.\n if (opName.endsWith('_')) {\n opName = opName.substring(0, opName.length - 1);\n }\n // add an __op suffix to distinguish ops from kernels in tf.profile\n opName = opName + OP_SCOPE_SUFFIX;\n // tslint:disable-next-line:no-any\n const f2 = (...args) => {\n ENGINE.startScope(opName);\n try {\n const result = fn(...args);\n if (isPromise(result)) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n ENGINE.endScope(result);\n return result;\n }\n catch (ex) {\n ENGINE.endScope(null);\n throw ex;\n }\n };\n Object.defineProperty(f2, 'name', { value: opName, configurable: true });\n // tslint:disable-next-line:no-any\n return f2;\n}\n//# sourceMappingURL=operation.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Complex } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Converts two real numbers to a complex number.\n *\n * Given a tensor `real` representing the real part of a complex number, and a\n * tensor `imag` representing the imaginary part of a complex number, this\n * operation returns complex numbers elementwise of the form [r0, i0, r1, i1],\n * where r represents the real part and i represents the imag part.\n *\n * The input tensors real and imag must have the same shape.\n *\n * ```js\n * const real = tf.tensor1d([2.25, 3.25]);\n * const imag = tf.tensor1d([4.75, 5.75]);\n * const complex = tf.complex(real, imag);\n *\n * complex.print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction complex_(real, imag) {\n const $real = convertToTensor(real, 'real', 'complex');\n const $imag = convertToTensor(imag, 'imag', 'complex');\n util.assertShapesMatch($real.shape, $imag.shape, `real and imag shapes, ${$real.shape} and ${$imag.shape}, ` +\n `must match in call to tf.complex().`);\n const forward = (backend) => {\n return backend.complex($real, $imag);\n };\n const inputs = { real: $real, imag: $imag };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Complex);\n}\nexport const complex = op({ complex_ });\n//# sourceMappingURL=complex.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { assert, assertNonNegativeIntegerDimensions, flatten, inferDtype, isTypedArray, sizeFromShape, toTypedArray } from '../util';\n/** This is shared code across all tensor creation methods. */\nexport function makeTensor(values, shape, inferredShape, dtype) {\n if (dtype == null) {\n dtype = inferDtype(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot construct a complex64 tensor directly. ` +\n `Please use tf.complex(real, imag).`);\n }\n if (!isTypedArray(values) && !Array.isArray(values) &&\n typeof values !== 'number' && typeof values !== 'boolean' &&\n typeof values !== 'string') {\n throw new Error('values passed to tensor(values) must be a number/boolean/string or ' +\n 'an array of numbers/booleans/strings, or a TypedArray');\n }\n if (shape != null) {\n assertNonNegativeIntegerDimensions(shape);\n const providedSize = sizeFromShape(shape);\n const inferredSize = sizeFromShape(inferredShape);\n assert(providedSize === inferredSize, () => `Based on the provided shape, [${shape}], the tensor should have ` +\n `${providedSize} values but has ${inferredSize}`);\n for (let i = 0; i < inferredShape.length; ++i) {\n const inferred = inferredShape[i];\n const flatDimsDontMatch = i === inferredShape.length - 1 ?\n inferred !== sizeFromShape(shape.slice(i)) :\n true;\n assert(inferredShape[i] === shape[i] || !flatDimsDontMatch, () => `Error creating a new Tensor. Inferred shape ` +\n `(${inferredShape}) does not match the provided ` +\n `shape (${shape}). `);\n }\n }\n if (!isTypedArray(values) && !Array.isArray(values)) {\n values = [values];\n }\n shape = shape || inferredShape;\n values = dtype !== 'string' ?\n toTypedArray(values, dtype) :\n flatten(values, [], true);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=tensor_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates a `tf.Tensor` with the provided values, shape and dtype.\n *\n * ```js\n * // Pass an array of values to create a vector.\n * tf.tensor([1, 2, 3, 4]).print();\n * ```\n *\n * ```js\n * // Pass a nested array of values to make a matrix or a higher\n * // dimensional tensor.\n * tf.tensor([[1, 2], [3, 4]]).print();\n * ```\n *\n * ```js\n * // Pass a flat array and specify a shape yourself.\n * tf.tensor([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`. If the values are strings,\n * they will be encoded as utf-8 and kept as `Uint8Array[]`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor(values, shape, dtype) {\n const inferredShape = inferShape(values, dtype);\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/* Type definitions for exporting and importing of models. */\n/**\n * A map from Tensor dtype to number of bytes per element of the Tensor.\n */\nexport const DTYPE_VALUE_SIZE_MAP = {\n 'float32': 4,\n 'float16': 2,\n 'int32': 4,\n 'uint16': 2,\n 'uint8': 1,\n 'bool': 1,\n 'complex64': 8\n};\n//# sourceMappingURL=types.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../ops/complex';\nimport { tensor } from '../ops/tensor';\nimport { sizeFromShape } from '../util';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/** Number of bytes reserved for the length of the string. (32bit integer). */\nconst NUM_BYTES_STRING_LENGTH = 4;\n/**\n * Encode a map from names to weight values as an ArrayBuffer, along with an\n * `Array` of `WeightsManifestEntry` as specification of the encoded weights.\n *\n * This function does not perform sharding.\n *\n * This function is the reverse of `decodeWeights`.\n *\n * @param tensors A map (\"dict\") from names to tensors.\n * @param group Group to which the weights belong (optional).\n * @returns A `Promise` of\n * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s\n * concatenated.\n * - An `Array` of `WeightManifestEntry`s, carrying information including\n * tensor names, `dtype`s and shapes.\n * @throws Error: on unsupported tensor `dtype`.\n */\nexport async function encodeWeights(tensors, group) {\n // TODO(adarob, cais): Support quantization.\n const specs = [];\n const dataPromises = [];\n const names = Array.isArray(tensors) ?\n tensors.map(tensor => tensor.name) :\n Object.keys(tensors);\n for (let i = 0; i < names.length; ++i) {\n const name = names[i];\n const t = Array.isArray(tensors) ? tensors[i].tensor : tensors[name];\n if (t.dtype !== 'float32' && t.dtype !== 'int32' && t.dtype !== 'bool' &&\n t.dtype !== 'string' && t.dtype !== 'complex64') {\n throw new Error(`Unsupported dtype in weight '${name}': ${t.dtype}`);\n }\n const spec = { name, shape: t.shape, dtype: t.dtype };\n if (t.dtype === 'string') {\n const utf8bytes = new Promise(async (resolve) => {\n const vals = await t.bytes();\n const totalNumBytes = vals.reduce((p, c) => p + c.length, 0) +\n NUM_BYTES_STRING_LENGTH * vals.length;\n const bytes = new Uint8Array(totalNumBytes);\n let offset = 0;\n for (let i = 0; i < vals.length; i++) {\n const val = vals[i];\n const bytesOfLength = new Uint8Array(new Uint32Array([val.length]).buffer);\n bytes.set(bytesOfLength, offset);\n offset += NUM_BYTES_STRING_LENGTH;\n bytes.set(val, offset);\n offset += val.length;\n }\n resolve(bytes);\n });\n dataPromises.push(utf8bytes);\n }\n else {\n dataPromises.push(t.data());\n }\n if (group != null) {\n spec.group = group;\n }\n specs.push(spec);\n }\n const tensorValues = await Promise.all(dataPromises);\n return { data: concatenateTypedArrays(tensorValues), specs };\n}\n/**\n * Decode flat ArrayBuffer as weights.\n *\n * This function does not handle sharding.\n *\n * This function is the reverse of `encodeWeights`.\n *\n * @param buffer A flat ArrayBuffer carrying the binary values of the tensors\n * concatenated in the order specified in `specs`.\n * @param specs Specifications of the names, dtypes and shapes of the tensors\n * whose value are encoded by `buffer`.\n * @return A map from tensor name to tensor value, with the names corresponding\n * to names in `specs`.\n * @throws Error, if any of the tensors has unsupported dtype.\n */\nexport function decodeWeights(buffer, specs) {\n // TODO(adarob, cais): Support quantization.\n const out = {};\n let float16Decode;\n let offset = 0;\n for (const spec of specs) {\n const name = spec.name;\n const dtype = spec.dtype;\n const shape = spec.shape;\n const size = sizeFromShape(shape);\n let values;\n if ('quantization' in spec) {\n const quantization = spec.quantization;\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n if (!('min' in quantization && 'scale' in quantization)) {\n throw new Error(`Weight ${spec.name} with quantization ${quantization.dtype} ` +\n `doesn't have corresponding metadata min and scale.`);\n }\n }\n else if (quantization.dtype === 'float16') {\n if (dtype !== 'float32') {\n throw new Error(`Weight ${spec.name} is quantized with ${quantization.dtype} ` +\n `which only supports weights of type float32 not ${dtype}.`);\n }\n }\n else {\n throw new Error(`Weight ${spec.name} has unknown ` +\n `quantization dtype ${quantization.dtype}. ` +\n `Supported quantization dtypes are: ` +\n `'uint8', 'uint16', and 'float16'.`);\n }\n const quantizationSizeFactor = DTYPE_VALUE_SIZE_MAP[quantization.dtype];\n const byteBuffer = buffer.slice(offset, offset + size * quantizationSizeFactor);\n const quantizedArray = (quantization.dtype === 'uint8') ?\n new Uint8Array(byteBuffer) :\n new Uint16Array(byteBuffer);\n if (dtype === 'float32') {\n if (quantization.dtype === 'uint8' || quantization.dtype === 'uint16') {\n values = new Float32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = v * quantization.scale + quantization.min;\n }\n }\n else if (quantization.dtype === 'float16') {\n if (float16Decode === undefined) {\n float16Decode = getFloat16Decoder();\n }\n values = float16Decode(quantizedArray);\n }\n else {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type float32.`);\n }\n }\n else if (dtype === 'int32') {\n if (quantization.dtype !== 'uint8' && quantization.dtype !== 'uint16') {\n throw new Error(`Unsupported quantization type ${quantization.dtype} ` +\n `for weight type int32.`);\n }\n values = new Int32Array(quantizedArray.length);\n for (let i = 0; i < quantizedArray.length; i++) {\n const v = quantizedArray[i];\n values[i] = Math.round(v * quantization.scale + quantization.min);\n }\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * quantizationSizeFactor;\n }\n else if (dtype === 'string') {\n const size = sizeFromShape(spec.shape);\n values = [];\n for (let i = 0; i < size; i++) {\n const byteLength = new Uint32Array(buffer.slice(offset, offset + NUM_BYTES_STRING_LENGTH))[0];\n offset += NUM_BYTES_STRING_LENGTH;\n const bytes = new Uint8Array(buffer.slice(offset, offset + byteLength));\n values.push(bytes);\n offset += byteLength;\n }\n }\n else {\n const dtypeFactor = DTYPE_VALUE_SIZE_MAP[dtype];\n const byteBuffer = buffer.slice(offset, offset + size * dtypeFactor);\n if (dtype === 'float32') {\n values = new Float32Array(byteBuffer);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(byteBuffer);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(byteBuffer);\n }\n else if (dtype === 'complex64') {\n values = new Float32Array(byteBuffer);\n const real = new Float32Array(values.length / 2);\n const image = new Float32Array(values.length / 2);\n for (let i = 0; i < real.length; i++) {\n real[i] = values[i * 2];\n image[i] = values[i * 2 + 1];\n }\n const realTensor = tensor(real, shape, 'float32');\n const imageTensor = tensor(image, shape, 'float32');\n out[name] = complex(realTensor, imageTensor);\n realTensor.dispose();\n imageTensor.dispose();\n }\n else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * dtypeFactor;\n }\n if (dtype !== 'complex64') {\n out[name] = tensor(values, shape, dtype);\n }\n }\n return out;\n}\n/**\n * Concatenate TypedArrays into an ArrayBuffer.\n */\nexport function concatenateTypedArrays(xs) {\n // TODO(adarob, cais): Support quantization.\n if (xs === null) {\n throw new Error(`Invalid input value: ${JSON.stringify(xs)}`);\n }\n let totalByteLength = 0;\n // `normalizedXs` is here for this reason: a `TypedArray`'s `buffer'\n // can have a different byte length from that of the `TypedArray` itself,\n // for example, when the `TypedArray` is created from an offset in an\n // `ArrayBuffer`. `normliazedXs` holds `TypedArray`s whose `buffer`s match\n // the `TypedArray` in byte length. If an element of `xs` does not show\n // this property, a new `TypedArray` that satisfy this property will be\n // constructed and pushed into `normalizedXs`.\n const normalizedXs = [];\n xs.forEach((x) => {\n totalByteLength += x.byteLength;\n // tslint:disable:no-any\n normalizedXs.push(x.byteLength === x.buffer.byteLength ? x :\n new x.constructor(x));\n if (!(x instanceof Float32Array || x instanceof Int32Array ||\n x instanceof Uint8Array)) {\n throw new Error(`Unsupported TypedArray subtype: ${x.constructor.name}`);\n }\n // tslint:enable:no-any\n });\n const y = new Uint8Array(totalByteLength);\n let offset = 0;\n normalizedXs.forEach((x) => {\n y.set(new Uint8Array(x.buffer), offset);\n offset += x.byteLength;\n });\n return y.buffer;\n}\n// Use Buffer on Node.js instead of Blob/atob/btoa\nconst useNodeBuffer = typeof Buffer !== 'undefined' &&\n (typeof Blob === 'undefined' || typeof atob === 'undefined' ||\n typeof btoa === 'undefined');\n/**\n * Calculate the byte length of a JavaScript string.\n *\n * Note that a JavaScript string can contain wide characters, therefore the\n * length of the string is not necessarily equal to the byte length.\n *\n * @param str Input string.\n * @returns Byte length.\n */\nexport function stringByteLength(str) {\n if (useNodeBuffer) {\n return Buffer.byteLength(str);\n }\n return new Blob([str]).size;\n}\n/**\n * Encode an ArrayBuffer as a base64 encoded string.\n *\n * @param buffer `ArrayBuffer` to be converted.\n * @returns A string that base64-encodes `buffer`.\n */\nexport function arrayBufferToBase64String(buffer) {\n if (useNodeBuffer) {\n return Buffer.from(buffer).toString('base64');\n }\n const buf = new Uint8Array(buffer);\n let s = '';\n for (let i = 0, l = buf.length; i < l; i++) {\n s += String.fromCharCode(buf[i]);\n }\n return btoa(s);\n}\n/**\n * Decode a base64 string as an ArrayBuffer.\n *\n * @param str Base64 string.\n * @returns Decoded `ArrayBuffer`.\n */\nexport function base64StringToArrayBuffer(str) {\n if (useNodeBuffer) {\n const buf = Buffer.from(str, 'base64');\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n }\n const s = atob(str);\n const buffer = new Uint8Array(s.length);\n for (let i = 0; i < s.length; ++i) {\n buffer.set([s.charCodeAt(i)], i);\n }\n return buffer.buffer;\n}\n/**\n * Concatenate a number of ArrayBuffers into one.\n *\n * @param buffers A number of array buffers to concatenate.\n * @returns Result of concatenating `buffers` in order.\n */\nexport function concatenateArrayBuffers(buffers) {\n if (buffers.length === 1) {\n return buffers[0];\n }\n let totalByteLength = 0;\n buffers.forEach((buffer) => {\n totalByteLength += buffer.byteLength;\n });\n const temp = new Uint8Array(totalByteLength);\n let offset = 0;\n buffers.forEach((buffer) => {\n temp.set(new Uint8Array(buffer), offset);\n offset += buffer.byteLength;\n });\n return temp.buffer;\n}\n/**\n * Get the basename of a path.\n *\n * Behaves in a way analogous to Linux's basename command.\n *\n * @param path\n */\nexport function basename(path) {\n const SEPARATOR = '/';\n path = path.trim();\n while (path.endsWith(SEPARATOR)) {\n path = path.slice(0, path.length - 1);\n }\n const items = path.split(SEPARATOR);\n return items[items.length - 1];\n}\n/**\n * Populate ModelArtifactsInfo fields for a model with JSON topology.\n * @param modelArtifacts\n * @returns A ModelArtifactsInfo object.\n */\nexport function getModelArtifactsInfoForJSON(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('Expected JSON model topology, received ArrayBuffer.');\n }\n return {\n dateSaved: new Date(),\n modelTopologyType: 'JSON',\n modelTopologyBytes: modelArtifacts.modelTopology == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.modelTopology)),\n weightSpecsBytes: modelArtifacts.weightSpecs == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.weightSpecs)),\n weightDataBytes: modelArtifacts.weightData == null ?\n 0 :\n modelArtifacts.weightData.byteLength,\n };\n}\n/**\n * Computes mantisa table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 2048 mantissa lookup values.\n */\nfunction computeFloat16MantisaTable() {\n const convertMantissa = (i) => {\n let m = i << 13;\n let e = 0;\n while ((m & 0x00800000) === 0) {\n e -= 0x00800000;\n m <<= 1;\n }\n m &= ~0x00800000;\n e += 0x38800000;\n return m | e;\n };\n const mantisaTable = new Uint32Array(2048);\n mantisaTable[0] = 0;\n for (let i = 1; i < 1024; i++) {\n mantisaTable[i] = convertMantissa(i);\n }\n for (let i = 1024; i < 2048; i++) {\n mantisaTable[i] = 0x38000000 + ((i - 1024) << 13);\n }\n return mantisaTable;\n}\n/**\n * Computes exponent table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 64 exponent lookup values.\n */\nfunction computeFloat16ExponentTable() {\n const exponentTable = new Uint32Array(64);\n exponentTable[0] = 0;\n exponentTable[31] = 0x47800000;\n exponentTable[32] = 0x80000000;\n exponentTable[63] = 0xc7800000;\n for (let i = 1; i < 31; i++) {\n exponentTable[i] = i << 23;\n }\n for (let i = 33; i < 63; i++) {\n exponentTable[i] = 0x80000000 + ((i - 32) << 23);\n }\n return exponentTable;\n}\n/**\n * Computes offset table for casting Float16 to Float32\n * See http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n *\n * @returns Uint32Array, 6d offset values.\n */\nfunction computeFloat16OffsetTable() {\n const offsetTable = new Uint32Array(64);\n for (let i = 0; i < 64; i++) {\n offsetTable[i] = 1024;\n }\n offsetTable[0] = offsetTable[32] = 0;\n return offsetTable;\n}\n/**\n * Retrieve a Float16 decoder which will decode a ByteArray of Float16 values\n * to a Float32Array.\n *\n * @returns Function (buffer: Uint16Array) => Float32Array which decodes\n * the Uint16Array of Float16 bytes to a Float32Array.\n */\nexport function getFloat16Decoder() {\n // Algorithm is based off of\n // http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf\n // Cache lookup tables\n const mantisaTable = computeFloat16MantisaTable();\n const exponentTable = computeFloat16ExponentTable();\n const offsetTable = computeFloat16OffsetTable();\n return (quantizedArray) => {\n const buffer = new ArrayBuffer(4 * quantizedArray.length);\n const bufferUint32View = new Uint32Array(buffer);\n for (let index = 0; index < quantizedArray.length; index++) {\n const float16Bits = quantizedArray[index];\n const float32Bits = mantisaTable[offsetTable[float16Bits >> 10] + (float16Bits & 0x3ff)] +\n exponentTable[float16Bits >> 10];\n bufferUint32View[index] = float32Bits;\n }\n return new Float32Array(buffer);\n };\n}\n//# sourceMappingURL=io_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class IORouterRegistry {\n constructor() {\n this.saveRouters = [];\n this.loadRouters = [];\n }\n static getInstance() {\n if (IORouterRegistry.instance == null) {\n IORouterRegistry.instance = new IORouterRegistry();\n }\n return IORouterRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerSaveRouter(saveRouter) {\n IORouterRegistry.getInstance().saveRouters.push(saveRouter);\n }\n /**\n * Register a load-handler router.\n *\n * @param loadRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `load` method defined or `null`.\n */\n static registerLoadRouter(loadRouter) {\n IORouterRegistry.getInstance().loadRouters.push(loadRouter);\n }\n /**\n * Look up IOHandler for saving, given a URL-like string.\n *\n * @param url\n * @returns If only one match is found, an instance of IOHandler with the\n * `save` method defined. If no match is found, `null`.\n * @throws Error, if more than one match is found.\n */\n static getSaveHandlers(url) {\n return IORouterRegistry.getHandlers(url, 'save');\n }\n /**\n * Look up IOHandler for loading, given a URL-like string.\n *\n * @param url\n * @param loadOptions Optional, custom load options.\n * @returns All valid handlers for `url`, given the currently registered\n * handler routers.\n */\n static getLoadHandlers(url, loadOptions) {\n return IORouterRegistry.getHandlers(url, 'load', loadOptions);\n }\n static getHandlers(url, handlerType, loadOptions) {\n const validHandlers = [];\n const routers = handlerType === 'load' ?\n IORouterRegistry.getInstance().loadRouters :\n IORouterRegistry.getInstance().saveRouters;\n routers.forEach(router => {\n const handler = router(url, loadOptions);\n if (handler !== null) {\n validHandlers.push(handler);\n }\n });\n return validHandlers;\n }\n}\nexport const registerSaveRouter = (loudRouter) => IORouterRegistry.registerSaveRouter(loudRouter);\nexport const registerLoadRouter = (loudRouter) => IORouterRegistry.registerLoadRouter(loudRouter);\nexport const getSaveHandlers = (url) => IORouterRegistry.getSaveHandlers(url);\nexport const getLoadHandlers = (url, loadOptions) => IORouterRegistry.getLoadHandlers(url, loadOptions);\n//# sourceMappingURL=router_registry.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DATABASE_NAME = 'tensorflowjs';\nconst DATABASE_VERSION = 1;\n// Model data and ModelArtifactsInfo (metadata) are stored in two separate\n// stores for efficient access of the list of stored models and their metadata.\n// 1. The object store for model data: topology, weights and weight manifests.\nconst MODEL_STORE_NAME = 'models_store';\n// 2. The object store for ModelArtifactsInfo, including meta-information such\n// as the type of topology (JSON vs binary), byte size of the topology, byte\n// size of the weights, etc.\nconst INFO_STORE_NAME = 'model_info_store';\n/**\n * Delete the entire database for tensorflow.js, including the models store.\n */\nexport async function deleteDatabase() {\n const idbFactory = getIndexedDBFactory();\n return new Promise((resolve, reject) => {\n const deleteRequest = idbFactory.deleteDatabase(DATABASE_NAME);\n deleteRequest.onsuccess = () => resolve();\n deleteRequest.onerror = error => reject(error);\n });\n}\nfunction getIndexedDBFactory() {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Add more info about what IOHandler subtypes are available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('Failed to obtain IndexedDB factory because the current environment' +\n 'is not a web browser.');\n }\n // tslint:disable-next-line:no-any\n const theWindow = typeof window === 'undefined' ? self : window;\n const factory = theWindow.indexedDB || theWindow.mozIndexedDB ||\n theWindow.webkitIndexedDB || theWindow.msIndexedDB ||\n theWindow.shimIndexedDB;\n if (factory == null) {\n throw new Error('The current browser does not appear to support IndexedDB.');\n }\n return factory;\n}\nfunction setUpDatabase(openRequest) {\n const db = openRequest.result;\n db.createObjectStore(MODEL_STORE_NAME, { keyPath: 'modelPath' });\n db.createObjectStore(INFO_STORE_NAME, { keyPath: 'modelPath' });\n}\n/**\n * IOHandler subclass: Browser IndexedDB.\n *\n * See the doc string of `browserIndexedDB` for more details.\n */\nexport class BrowserIndexedDB {\n constructor(modelPath) {\n this.indexedDB = getIndexedDBFactory();\n if (modelPath == null || !modelPath) {\n throw new Error('For IndexedDB, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n }\n async save(modelArtifacts) {\n // TODO(cais): Support saving GraphDef models.\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n return this.databaseAction(this.modelPath, modelArtifacts);\n }\n async load() {\n return this.databaseAction(this.modelPath);\n }\n /**\n * Perform database action to put model artifacts into or read model artifacts\n * from IndexedDB object store.\n *\n * Whether the action is put or get depends on whether `modelArtifacts` is\n * specified. If it is specified, the action will be put; otherwise the action\n * will be get.\n *\n * @param modelPath A unique string path for the model.\n * @param modelArtifacts If specified, it will be the model artifacts to be\n * stored in IndexedDB.\n * @returns A `Promise` of `SaveResult`, if the action is put, or a `Promise`\n * of `ModelArtifacts`, if the action is get.\n */\n databaseAction(modelPath, modelArtifacts) {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n if (modelArtifacts == null) {\n // Read model out from object store.\n const modelTx = db.transaction(MODEL_STORE_NAME, 'readonly');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const getRequest = modelStore.get(this.modelPath);\n getRequest.onsuccess = () => {\n if (getRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${this.modelPath}' ` +\n `in IndexedDB.`));\n }\n else {\n resolve(getRequest.result.modelArtifacts);\n }\n };\n getRequest.onerror = error => {\n db.close();\n return reject(getRequest.error);\n };\n modelTx.oncomplete = () => db.close();\n }\n else {\n // Put model into object store.\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n // First, put ModelArtifactsInfo into info store.\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n let infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const putInfoRequest = infoStore.put({ modelPath: this.modelPath, modelArtifactsInfo });\n let modelTx;\n putInfoRequest.onsuccess = () => {\n // Second, put model data into model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const putModelRequest = modelStore.put({\n modelPath: this.modelPath,\n modelArtifacts,\n modelArtifactsInfo\n });\n putModelRequest.onsuccess = () => resolve({ modelArtifactsInfo });\n putModelRequest.onerror = error => {\n // If the put-model request fails, roll back the info entry as\n // well.\n infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const deleteInfoRequest = infoStore.delete(this.modelPath);\n deleteInfoRequest.onsuccess = () => {\n db.close();\n return reject(putModelRequest.error);\n };\n deleteInfoRequest.onerror = error => {\n db.close();\n return reject(putModelRequest.error);\n };\n };\n };\n putInfoRequest.onerror = error => {\n db.close();\n return reject(putInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n }\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\nBrowserIndexedDB.URL_SCHEME = 'indexeddb://';\nexport const indexedDBRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserIndexedDB.URL_SCHEME)) {\n return browserIndexedDB(url.slice(BrowserIndexedDB.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(indexedDBRouter);\nIORouterRegistry.registerLoadRouter(indexedDBRouter);\n/**\n * Creates a browser IndexedDB IOHandler for saving and loading models.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save('indexeddb://MyModel'));\n * console.log(saveResult);\n * ```\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `BrowserIndexedDB` (sublcass of `IOHandler`),\n * which can be used with, e.g., `tf.Model.save`.\n */\nexport function browserIndexedDB(modelPath) {\n return new BrowserIndexedDB(modelPath);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserIndexedDB.URL_SCHEME) ?\n key.slice(BrowserIndexedDB.URL_SCHEME.length) :\n key;\n}\nexport class BrowserIndexedDBManager {\n constructor() {\n this.indexedDB = getIndexedDBFactory();\n }\n async listModels() {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const tx = db.transaction(INFO_STORE_NAME, 'readonly');\n const store = tx.objectStore(INFO_STORE_NAME);\n // tslint:disable:max-line-length\n // Need to cast `store` as `any` here because TypeScript's DOM\n // library does not have the `getAll()` method even though the\n // method is supported in the latest version of most mainstream\n // browsers:\n // https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/getAll\n // tslint:enable:max-line-length\n // tslint:disable-next-line:no-any\n const getAllInfoRequest = store.getAll();\n getAllInfoRequest.onsuccess = () => {\n const out = {};\n for (const item of getAllInfoRequest.result) {\n out[item.modelPath] = item.modelArtifactsInfo;\n }\n resolve(out);\n };\n getAllInfoRequest.onerror = error => {\n db.close();\n return reject(getAllInfoRequest.error);\n };\n tx.oncomplete = () => db.close();\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n const infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const getInfoRequest = infoStore.get(path);\n let modelTx;\n getInfoRequest.onsuccess = () => {\n if (getInfoRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${path}' ` +\n `in IndexedDB.`));\n }\n else {\n // First, delete the entry in the info store.\n const deleteInfoRequest = infoStore.delete(path);\n const deleteModelData = () => {\n // Second, delete the entry in the model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const deleteModelRequest = modelStore.delete(path);\n deleteModelRequest.onsuccess = () => resolve(getInfoRequest.result.modelArtifactsInfo);\n deleteModelRequest.onerror = error => reject(getInfoRequest.error);\n };\n // Proceed with deleting model data regardless of whether deletion\n // of info data succeeds or not.\n deleteInfoRequest.onsuccess = deleteModelData;\n deleteInfoRequest.onerror = error => {\n deleteModelData();\n db.close();\n return reject(getInfoRequest.error);\n };\n }\n };\n getInfoRequest.onerror = error => {\n db.close();\n return reject(getInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n }\n else {\n modelTx.oncomplete = () => db.close();\n }\n };\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\n//# sourceMappingURL=indexed_db.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { arrayBufferToBase64String, base64StringToArrayBuffer, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst PATH_SEPARATOR = '/';\nconst PATH_PREFIX = 'tensorflowjs_models';\nconst INFO_SUFFIX = 'info';\nconst MODEL_TOPOLOGY_SUFFIX = 'model_topology';\nconst WEIGHT_SPECS_SUFFIX = 'weight_specs';\nconst WEIGHT_DATA_SUFFIX = 'weight_data';\nconst MODEL_METADATA_SUFFIX = 'model_metadata';\n/**\n * Purge all tensorflow.js-saved model artifacts from local storage.\n *\n * @returns Paths of the models purged.\n */\nexport function purgeLocalStorageArtifacts() {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n throw new Error('purgeLocalStorageModels() cannot proceed because local storage is ' +\n 'unavailable in the current environment.');\n }\n const LS = window.localStorage;\n const purgedModelPaths = [];\n for (let i = 0; i < LS.length; ++i) {\n const key = LS.key(i);\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n if (key.startsWith(prefix) && key.length > prefix.length) {\n LS.removeItem(key);\n const modelName = getModelPathFromKey(key);\n if (purgedModelPaths.indexOf(modelName) === -1) {\n purgedModelPaths.push(modelName);\n }\n }\n }\n return purgedModelPaths;\n}\nfunction getModelKeys(path) {\n return {\n info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR),\n topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR),\n weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR),\n weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR),\n modelMetadata: [PATH_PREFIX, path, MODEL_METADATA_SUFFIX].join(PATH_SEPARATOR)\n };\n}\n/**\n * Get model path from a local-storage key.\n *\n * E.g., 'tensorflowjs_models/my/model/1/info' --> 'my/model/1'\n *\n * @param key\n */\nfunction getModelPathFromKey(key) {\n const items = key.split(PATH_SEPARATOR);\n if (items.length < 3) {\n throw new Error(`Invalid key format: ${key}`);\n }\n return items.slice(1, items.length - 1).join(PATH_SEPARATOR);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserLocalStorage.URL_SCHEME) ?\n key.slice(BrowserLocalStorage.URL_SCHEME.length) :\n key;\n}\n/**\n * IOHandler subclass: Browser Local Storage.\n *\n * See the doc string to `browserLocalStorage` for more details.\n */\nexport class BrowserLocalStorage {\n constructor(modelPath) {\n if (!env().getBool('IS_BROWSER') || typeof window === 'undefined' ||\n typeof window.localStorage === 'undefined') {\n // TODO(cais): Add more info about what IOHandler subtypes are\n // available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error('The current environment does not support local storage.');\n }\n this.LS = window.localStorage;\n if (modelPath == null || !modelPath) {\n throw new Error('For local storage, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n this.keys = getModelKeys(this.modelPath);\n }\n /**\n * Save model artifacts to browser local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @param modelArtifacts The model artifacts to be stored.\n * @returns An instance of SaveResult.\n */\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const topology = JSON.stringify(modelArtifacts.modelTopology);\n const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs);\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n try {\n this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo));\n this.LS.setItem(this.keys.topology, topology);\n this.LS.setItem(this.keys.weightSpecs, weightSpecs);\n this.LS.setItem(this.keys.weightData, arrayBufferToBase64String(modelArtifacts.weightData));\n this.LS.setItem(this.keys.modelMetadata, JSON.stringify({\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata\n }));\n return { modelArtifactsInfo };\n }\n catch (err) {\n // If saving failed, clean up all items saved so far.\n this.LS.removeItem(this.keys.info);\n this.LS.removeItem(this.keys.topology);\n this.LS.removeItem(this.keys.weightSpecs);\n this.LS.removeItem(this.keys.weightData);\n this.LS.removeItem(this.keys.modelMetadata);\n throw new Error(`Failed to save model '${this.modelPath}' to local storage: ` +\n `size quota being exceeded is a possible cause of this failure: ` +\n `modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, ` +\n `weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, ` +\n `weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`);\n }\n }\n }\n /**\n * Load a model from local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @returns The loaded model (if loading succeeds).\n */\n async load() {\n const info = JSON.parse(this.LS.getItem(this.keys.info));\n if (info == null) {\n throw new Error(`In local storage, there is no model with name '${this.modelPath}'`);\n }\n if (info.modelTopologyType !== 'JSON') {\n throw new Error('BrowserLocalStorage does not support loading non-JSON model ' +\n 'topology yet.');\n }\n const out = {};\n // Load topology.\n const topology = JSON.parse(this.LS.getItem(this.keys.topology));\n if (topology == null) {\n throw new Error(`In local storage, the topology of model '${this.modelPath}' ` +\n `is missing.`);\n }\n out.modelTopology = topology;\n // Load weight specs.\n const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs));\n if (weightSpecs == null) {\n throw new Error(`In local storage, the weight specs of model '${this.modelPath}' ` +\n `are missing.`);\n }\n out.weightSpecs = weightSpecs;\n // Load meta-data fields.\n const metadataString = this.LS.getItem(this.keys.modelMetadata);\n if (metadataString != null) {\n const metadata = JSON.parse(metadataString);\n out.format = metadata['format'];\n out.generatedBy = metadata['generatedBy'];\n out.convertedBy = metadata['convertedBy'];\n out.userDefinedMetadata = metadata['userDefinedMetadata'];\n }\n // Load weight data.\n const weightDataBase64 = this.LS.getItem(this.keys.weightData);\n if (weightDataBase64 == null) {\n throw new Error(`In local storage, the binary weight values of model ` +\n `'${this.modelPath}' are missing.`);\n }\n out.weightData = base64StringToArrayBuffer(weightDataBase64);\n return out;\n }\n}\nBrowserLocalStorage.URL_SCHEME = 'localstorage://';\nexport const localStorageRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserLocalStorage.URL_SCHEME)) {\n return browserLocalStorage(url.slice(BrowserLocalStorage.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(localStorageRouter);\nIORouterRegistry.registerLoadRouter(localStorageRouter);\n/**\n * Factory function for local storage IOHandler.\n *\n * This `IOHandler` supports both `save` and `load`.\n *\n * For each model's saved artifacts, four items are saved to local storage.\n * - `${PATH_SEPARATOR}/${modelPath}/info`: Contains meta-info about the\n * model, such as date saved, type of the topology, size in bytes, etc.\n * - `${PATH_SEPARATOR}/${modelPath}/topology`: Model topology. For Keras-\n * style models, this is a stringized JSON.\n * - `${PATH_SEPARATOR}/${modelPath}/weight_specs`: Weight specs of the\n * model, can be used to decode the saved binary weight values (see\n * item below).\n * - `${PATH_SEPARATOR}/${modelPath}/weight_data`: Concatenated binary\n * weight values, stored as a base64-encoded string.\n *\n * Saving may throw an `Error` if the total size of the artifacts exceed the\n * browser-specific quota.\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `IOHandler`, which can be used with, e.g.,\n * `tf.Model.save`.\n */\nexport function browserLocalStorage(modelPath) {\n return new BrowserLocalStorage(modelPath);\n}\nexport class BrowserLocalStorageManager {\n constructor() {\n assert(env().getBool('IS_BROWSER'), () => 'Current environment is not a web browser');\n assert(typeof window === 'undefined' ||\n typeof window.localStorage !== 'undefined', () => 'Current browser does not appear to support localStorage');\n this.LS = window.localStorage;\n }\n async listModels() {\n const out = {};\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n const suffix = PATH_SEPARATOR + INFO_SUFFIX;\n for (let i = 0; i < this.LS.length; ++i) {\n const key = this.LS.key(i);\n if (key.startsWith(prefix) && key.endsWith(suffix)) {\n const modelPath = getModelPathFromKey(key);\n out[modelPath] = JSON.parse(this.LS.getItem(key));\n }\n }\n return out;\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n const keys = getModelKeys(path);\n if (this.LS.getItem(keys.info) == null) {\n throw new Error(`Cannot find model at path '${path}'`);\n }\n const info = JSON.parse(this.LS.getItem(keys.info));\n this.LS.removeItem(keys.info);\n this.LS.removeItem(keys.topology);\n this.LS.removeItem(keys.weightSpecs);\n this.LS.removeItem(keys.weightData);\n return info;\n }\n}\n//# sourceMappingURL=local_storage.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Classes and functions for model management across multiple storage mediums.\n *\n * Supported client actions:\n * - Listing models on all registered storage mediums.\n * - Remove model by URL from any registered storage mediums, by using URL\n * string.\n * - Moving or copying model from one path to another in the same medium or from\n * one medium to another, by using URL strings.\n */\nimport { assert } from '../util';\nimport { IORouterRegistry } from './router_registry';\nconst URL_SCHEME_SUFFIX = '://';\nexport class ModelStoreManagerRegistry {\n constructor() {\n this.managers = {};\n }\n static getInstance() {\n if (ModelStoreManagerRegistry.instance == null) {\n ModelStoreManagerRegistry.instance = new ModelStoreManagerRegistry();\n }\n return ModelStoreManagerRegistry.instance;\n }\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerManager(scheme, manager) {\n assert(scheme != null, () => 'scheme must not be undefined or null.');\n if (scheme.endsWith(URL_SCHEME_SUFFIX)) {\n scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX));\n }\n assert(scheme.length > 0, () => 'scheme must not be an empty string.');\n const registry = ModelStoreManagerRegistry.getInstance();\n assert(registry.managers[scheme] == null, () => `A model store manager is already registered for scheme '${scheme}'.`);\n registry.managers[scheme] = manager;\n }\n static getManager(scheme) {\n const manager = this.getInstance().managers[scheme];\n if (manager == null) {\n throw new Error(`Cannot find model manager for scheme '${scheme}'`);\n }\n return manager;\n }\n static getSchemes() {\n return Object.keys(this.getInstance().managers);\n }\n}\n/**\n * Helper method for parsing a URL string into a scheme and a path.\n *\n * @param url E.g., 'localstorage://my-model'\n * @returns A dictionary with two fields: scheme and path.\n * Scheme: e.g., 'localstorage' in the example above.\n * Path: e.g., 'my-model' in the example above.\n */\nfunction parseURL(url) {\n if (url.indexOf(URL_SCHEME_SUFFIX) === -1) {\n throw new Error(`The url string provided does not contain a scheme. ` +\n `Supported schemes are: ` +\n `${ModelStoreManagerRegistry.getSchemes().join(',')}`);\n }\n return {\n scheme: url.split(URL_SCHEME_SUFFIX)[0],\n path: url.split(URL_SCHEME_SUFFIX)[1],\n };\n}\nasync function cloneModelInternal(sourceURL, destURL, deleteSource = false) {\n assert(sourceURL !== destURL, () => `Old path and new path are the same: '${sourceURL}'`);\n const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL);\n assert(loadHandlers.length > 0, () => `Copying failed because no load handler is found for source URL ${sourceURL}.`);\n assert(loadHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `load handlers for source URL ${sourceURL}.`);\n const loadHandler = loadHandlers[0];\n const saveHandlers = IORouterRegistry.getSaveHandlers(destURL);\n assert(saveHandlers.length > 0, () => `Copying failed because no save handler is found for destination ` +\n `URL ${destURL}.`);\n assert(saveHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) ` +\n `save handlers for destination URL ${destURL}.`);\n const saveHandler = saveHandlers[0];\n const sourceScheme = parseURL(sourceURL).scheme;\n const sourcePath = parseURL(sourceURL).path;\n const sameMedium = sourceScheme === parseURL(sourceURL).scheme;\n const modelArtifacts = await loadHandler.load();\n // If moving within the same storage medium, remove the old model as soon as\n // the loading is done. Without doing this, it is possible that the combined\n // size of the two models will cause the cloning to fail.\n if (deleteSource && sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n const saveResult = await saveHandler.save(modelArtifacts);\n // If moving between mediums, the deletion is done after the save succeeds.\n // This guards against the case in which saving to the destination medium\n // fails.\n if (deleteSource && !sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n return saveResult.modelArtifactsInfo;\n}\n/**\n * List all models stored in registered storage mediums.\n *\n * For a web browser environment, the registered mediums are Local Storage and\n * IndexedDB.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @returns A `Promise` of a dictionary mapping URLs of existing models to\n * their model artifacts info. URLs include medium-specific schemes, e.g.,\n * 'indexeddb://my/model/1'. Model artifacts info include type of the\n * model's topology, byte sizes of the topology, weights, etc.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function listModels() {\n const schemes = ModelStoreManagerRegistry.getSchemes();\n const out = {};\n for (const scheme of schemes) {\n const schemeOut = await ModelStoreManagerRegistry.getManager(scheme).listModels();\n for (const path in schemeOut) {\n const url = scheme + URL_SCHEME_SUFFIX + path;\n out[url] = schemeOut[path];\n }\n }\n return out;\n}\n/**\n * Remove a model specified by URL from a reigstered storage medium.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @param url A URL to a stored model, with a scheme prefix, e.g.,\n * 'localstorage://my-model-1', 'indexeddb://my/model/2'.\n * @returns ModelArtifactsInfo of the deleted model (if and only if deletion\n * is successful).\n * @throws Error if deletion fails, e.g., if no model exists at `path`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function removeModel(url) {\n const schemeAndPath = parseURL(url);\n const manager = ModelStoreManagerRegistry.getManager(schemeAndPath.scheme);\n return manager.removeModel(schemeAndPath.path);\n}\n/**\n * Copy a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Copying within a storage medium, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Copying between two storage mediums, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Copy the model, from Local Storage to IndexedDB.\n * await tf.io.copyModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove both models.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of copying.\n * @param destURL Destination URL of copying.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function copyModel(sourceURL, destURL) {\n const deleteSource = false;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\n/**\n * Move a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Moving within a storage medium, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Moving between two storage mediums, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Move the model, from Local Storage to IndexedDB.\n * await tf.io.moveModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove the moved model.\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of moving.\n * @param destURL Destination URL of moving.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Management',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nasync function moveModel(sourceURL, destURL) {\n const deleteSource = true;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\nexport { moveModel, copyModel, removeModel, listModels };\n//# sourceMappingURL=model_management.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { BrowserIndexedDB, BrowserIndexedDBManager } from '../io/indexed_db';\nimport { BrowserLocalStorage, BrowserLocalStorageManager } from '../io/local_storage';\nimport { ModelStoreManagerRegistry } from '../io/model_management';\nexport class PlatformBrowser {\n fetch(path, init) {\n return fetch(path, init);\n }\n now() {\n return performance.now();\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);\n }\n if (this.textEncoder == null) {\n this.textEncoder = new TextEncoder();\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n return new TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_BROWSER')) {\n env().setPlatform('browser', new PlatformBrowser());\n // Register LocalStorage IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserLocalStorage.URL_SCHEME, new BrowserLocalStorageManager());\n }\n catch (err) {\n }\n // Register IndexedDB IOHandler\n try {\n ModelStoreManagerRegistry.registerManager(BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager());\n }\n catch (err) {\n }\n}\n//# sourceMappingURL=platform_browser.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\n// We are wrapping this within an object so it can be stubbed by Jasmine.\nexport const getNodeFetch = {\n // tslint:disable-next-line:no-require-imports\n importFetch: () => require('node-fetch')\n};\nlet systemFetch;\n// These getters and setters are for testing so we don't export a mutable\n// variable.\nexport function resetSystemFetch() {\n systemFetch = null;\n}\nexport function setSystemFetch(fetchFn) {\n systemFetch = fetchFn;\n}\nexport function getSystemFetch() {\n return systemFetch;\n}\nexport class PlatformNode {\n constructor() {\n // tslint:disable-next-line:no-require-imports\n this.util = require('util');\n // According to the spec, the built-in encoder can do only UTF-8 encoding.\n // https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder/TextEncoder\n this.textEncoder = new this.util.TextEncoder();\n }\n fetch(path, requestInits) {\n if (env().global.fetch != null) {\n return env().global.fetch(path, requestInits);\n }\n if (systemFetch == null) {\n systemFetch = getNodeFetch.importFetch();\n }\n return systemFetch(path, requestInits);\n }\n now() {\n const time = process.hrtime();\n return time[0] * 1000 + time[1] / 1000000;\n }\n encode(text, encoding) {\n if (encoding !== 'utf-8' && encoding !== 'utf8') {\n throw new Error(`Node built-in encoder only supports utf-8, but got ${encoding}`);\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n if (bytes.length === 0) {\n return '';\n }\n return new this.util.TextDecoder(encoding).decode(bytes);\n }\n}\nif (env().get('IS_NODE')) {\n env().setPlatform('node', new PlatformNode());\n}\n//# sourceMappingURL=platform_node.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport * as util from '../util';\n/**\n * Creates an empty `tf.TensorBuffer` with the specified `shape` and `dtype`.\n *\n * The values are stored in CPU as `TypedArray`. Fill the buffer using\n * `buffer.set()`, or by modifying directly `buffer.values`.\n *\n * When done, call `buffer.toTensor()` to get an immutable `tf.Tensor` with\n * those values.\n *\n * ```js\n * // Create a buffer and set values at particular indices.\n * const buffer = tf.buffer([2, 2]);\n * buffer.set(3, 0, 0);\n * buffer.set(5, 1, 0);\n *\n * // Convert the buffer back to a tensor.\n * buffer.toTensor().print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The dtype of the buffer. Defaults to 'float32'.\n * @param values The values of the buffer as `TypedArray`. Defaults to\n * zeros.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function buffer(shape, dtype = 'float32', values) {\n dtype = dtype || 'float32';\n util.assertNonNegativeIntegerDimensions(shape);\n return new TensorBuffer(shape, dtype, values);\n}\n//# sourceMappingURL=buffer.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cast } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Casts a `tf.Tensor` to a new dtype.\n *\n * ```js\n * const x = tf.tensor1d([1.5, 2.5, 3]);\n * tf.cast(x, 'int32').print();\n * ```\n * @param x The input tensor to be casted.\n * @param dtype The dtype to cast the input tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction cast_(x, dtype) {\n const $x = convertToTensor(x, 'x', 'cast');\n // Sanity checks.\n if (!util.isValidDtype(dtype)) {\n throw new Error(`Failed to cast to unknown dtype ${dtype}`);\n }\n if (dtype === 'string' && $x.dtype !== 'string' ||\n dtype !== 'string' && $x.dtype === 'string') {\n throw new Error('Only strings can be casted to strings');\n }\n const inputs = { x: $x };\n const attrs = { dtype };\n return ENGINE.runKernelFunc(backend => backend.cast($x, dtype), inputs, null /* grad */, Cast, attrs);\n}\nexport const cast = op({ cast_ });\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Identity } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a new tensor with the same values and shape as the specified\n * tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n *\n * x.clone().print();\n * ```\n *\n * @param x The tensor to clone.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction clone_(x) {\n const $x = convertToTensor(x, 'x', 'clone', null);\n const forward = () => ENGINE.makeTensorFromDataId($x.dataId, $x.shape, $x.dtype);\n const inputs = { x: $x };\n // Note this op is called tf.identity in python. Hence the kernel name used\n // here.\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Identity);\n}\nexport const clone = op({ clone_ });\n//# sourceMappingURL=clone.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Prints information about the `tf.Tensor` including its data.\n *\n * ```js\n * const verbose = true;\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print(verbose);\n * ```\n * @param x The tensor to be printed.\n * @param verbose Whether to print verbose information about the ` Tensor`,\n * including dtype and size.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function print(x, verbose = false) {\n console.log(x.toString(verbose));\n}\n//# sourceMappingURL=print.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code for tfjs-core\n// Set up Engine and ENV\nimport { getOrMakeEngine } from './engine';\ngetOrMakeEngine();\n// Register backend-agnostic flags.\nimport './flags';\n// Register platforms\nimport './platforms/platform_browser';\nimport './platforms/platform_node';\n// Set up OpHandler\nimport { buffer } from './ops/buffer';\nimport { cast } from './ops/cast';\nimport { clone } from './ops/clone';\nimport { print } from './ops/print';\nimport { setOpHandler } from './tensor';\nconst opHandler = {\n buffer,\n cast,\n clone,\n print\n};\nsetOpHandler(opHandler);\n//# sourceMappingURL=base_side_effects.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandlers related to files, such as browser-triggered file downloads,\n * user-selected files in browser.\n */\nimport '../flags';\nimport { env } from '../environment';\nimport { basename, concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nconst DEFAULT_FILE_NAME_PREFIX = 'model';\nconst DEFAULT_JSON_EXTENSION_NAME = '.json';\nconst DEFAULT_WEIGHT_DATA_EXTENSION_NAME = '.weights.bin';\nfunction defer(f) {\n return new Promise(resolve => setTimeout(resolve)).then(f);\n}\nexport class BrowserDownloads {\n constructor(fileNamePrefix) {\n if (!env().getBool('IS_BROWSER')) {\n // TODO(cais): Provide info on what IOHandlers are available under the\n // current environment.\n throw new Error('browserDownloads() cannot proceed because the current environment ' +\n 'is not a browser.');\n }\n if (fileNamePrefix.startsWith(BrowserDownloads.URL_SCHEME)) {\n fileNamePrefix = fileNamePrefix.slice(BrowserDownloads.URL_SCHEME.length);\n }\n if (fileNamePrefix == null || fileNamePrefix.length === 0) {\n fileNamePrefix = DEFAULT_FILE_NAME_PREFIX;\n }\n this.modelTopologyFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME;\n this.weightDataFileName =\n fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME;\n }\n async save(modelArtifacts) {\n if (typeof (document) === 'undefined') {\n throw new Error('Browser downloads are not supported in ' +\n 'this environment since `document` is not present');\n }\n const weightsURL = window.URL.createObjectURL(new Blob([modelArtifacts.weightData], { type: 'application/octet-stream' }));\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserDownloads.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n else {\n const weightsManifest = [{\n paths: ['./' + this.weightDataFileName],\n weights: modelArtifacts.weightSpecs\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n weightsManifest\n };\n const modelTopologyAndWeightManifestURL = window.URL.createObjectURL(new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: 'application/json' }));\n // If anchor elements are not provided, create them without attaching them\n // to parents, so that the downloaded file names can be controlled.\n const jsonAnchor = this.jsonAnchor == null ? document.createElement('a') :\n this.jsonAnchor;\n jsonAnchor.download = this.modelTopologyFileName;\n jsonAnchor.href = modelTopologyAndWeightManifestURL;\n // Trigger downloads by evoking a click event on the download anchors.\n // When multiple downloads are started synchronously, Firefox will only\n // save the last one.\n await defer(() => jsonAnchor.dispatchEvent(new MouseEvent('click')));\n if (modelArtifacts.weightData != null) {\n const weightDataAnchor = this.weightDataAnchor == null ?\n document.createElement('a') :\n this.weightDataAnchor;\n weightDataAnchor.download = this.weightDataFileName;\n weightDataAnchor.href = weightsURL;\n await defer(() => weightDataAnchor.dispatchEvent(new MouseEvent('click')));\n }\n return { modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts) };\n }\n }\n}\nBrowserDownloads.URL_SCHEME = 'downloads://';\nclass BrowserFiles {\n constructor(files) {\n if (files == null || files.length < 1) {\n throw new Error(`When calling browserFiles, at least 1 file is required, ` +\n `but received ${files}`);\n }\n this.files = files;\n }\n async load() {\n const jsonFile = this.files[0];\n const weightFiles = this.files.slice(1);\n return new Promise((resolve, reject) => {\n const jsonReader = new FileReader();\n jsonReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const modelJSON = JSON.parse(event.target.result);\n const modelTopology = modelJSON.modelTopology;\n if (modelTopology == null) {\n reject(new Error(`modelTopology field is missing from file ${jsonFile.name}`));\n return;\n }\n if (weightFiles.length === 0) {\n resolve({ modelTopology });\n }\n const weightsManifest = modelJSON.weightsManifest;\n if (weightsManifest == null) {\n reject(new Error(`weightManifest field is missing from file ${jsonFile.name}`));\n return;\n }\n let pathToFile;\n try {\n pathToFile =\n this.checkManifestAndWeightFiles(weightsManifest, weightFiles);\n }\n catch (err) {\n reject(err);\n return;\n }\n const weightSpecs = [];\n const paths = [];\n const perFileBuffers = [];\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n paths.push(path);\n perFileBuffers.push(null);\n });\n weightSpecs.push(...weightsGroup.weights);\n });\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n const weightFileReader = new FileReader();\n weightFileReader.onload = (event) => {\n // tslint:disable-next-line:no-any\n const weightData = event.target.result;\n const index = paths.indexOf(path);\n perFileBuffers[index] = weightData;\n if (perFileBuffers.indexOf(null) === -1) {\n resolve({\n modelTopology,\n weightSpecs,\n weightData: concatenateArrayBuffers(perFileBuffers),\n format: modelJSON.format,\n generatedBy: modelJSON.generatedBy,\n convertedBy: modelJSON.convertedBy,\n userDefinedMetadata: modelJSON.userDefinedMetadata\n });\n }\n };\n weightFileReader.onerror = error => reject(`Failed to weights data from file of path '${path}'.`);\n weightFileReader.readAsArrayBuffer(pathToFile[path]);\n });\n });\n };\n jsonReader.onerror = error => reject(`Failed to read model topology and weights manifest JSON ` +\n `from file '${jsonFile.name}'. BrowserFiles supports loading ` +\n `Keras-style tf.Model artifacts only.`);\n jsonReader.readAsText(jsonFile);\n });\n }\n /**\n * Check the compatibility between weights manifest and weight files.\n */\n checkManifestAndWeightFiles(manifest, files) {\n const basenames = [];\n const fileNames = files.map(file => basename(file.name));\n const pathToFile = {};\n for (const group of manifest) {\n group.paths.forEach(path => {\n const pathBasename = basename(path);\n if (basenames.indexOf(pathBasename) !== -1) {\n throw new Error(`Duplicate file basename found in weights manifest: ` +\n `'${pathBasename}'`);\n }\n basenames.push(pathBasename);\n if (fileNames.indexOf(pathBasename) === -1) {\n throw new Error(`Weight file with basename '${pathBasename}' is not provided.`);\n }\n else {\n pathToFile[path] = files[fileNames.indexOf(pathBasename)];\n }\n });\n }\n if (basenames.length !== files.length) {\n throw new Error(`Mismatch in the number of files in weights manifest ` +\n `(${basenames.length}) and the number of weight files provided ` +\n `(${files.length}).`);\n }\n return pathToFile;\n }\n}\nexport const browserDownloadsRouter = (url) => {\n if (!env().getBool('IS_BROWSER')) {\n return null;\n }\n else {\n if (!Array.isArray(url) && url.startsWith(BrowserDownloads.URL_SCHEME)) {\n return browserDownloads(url.slice(BrowserDownloads.URL_SCHEME.length));\n }\n else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(browserDownloadsRouter);\n/**\n * Creates an IOHandler that triggers file downloads from the browser.\n *\n * The returned `IOHandler` instance can be used as model exporting methods such\n * as `tf.Model.save` and supports only saving.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * const saveResult = await model.save('downloads://mymodel');\n * // This will trigger downloading of two files:\n * // 'mymodel.json' and 'mymodel.weights.bin'.\n * console.log(saveResult);\n * ```\n *\n * @param fileNamePrefix Prefix name of the files to be downloaded. For use with\n * `tf.Model`, `fileNamePrefix` should follow either of the following two\n * formats:\n * 1. `null` or `undefined`, in which case the default file\n * names will be used:\n * - 'model.json' for the JSON file containing the model topology and\n * weights manifest.\n * - 'model.weights.bin' for the binary file containing the binary weight\n * values.\n * 2. A single string or an Array of a single string, as the file name prefix.\n * For example, if `'foo'` is provided, the downloaded JSON\n * file and binary weights file will be named 'foo.json' and\n * 'foo.weights.bin', respectively.\n * @param config Additional configuration for triggering downloads.\n * @returns An instance of `BrowserDownloads` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserDownloads(fileNamePrefix = 'model') {\n return new BrowserDownloads(fileNamePrefix);\n}\n/**\n * Creates an IOHandler that loads model artifacts from user-selected files.\n *\n * This method can be used for loading from files such as user-selected files\n * in the browser.\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * // Note: This code snippet won't run properly without the actual file input\n * // elements in the HTML DOM.\n *\n * // Suppose there are two HTML file input (` `)\n * // elements.\n * const uploadJSONInput = document.getElementById('upload-json');\n * const uploadWeightsInput = document.getElementById('upload-weights');\n * const model = await tf.loadLayersModel(tf.io.browserFiles(\n * [uploadJSONInput.files[0], uploadWeightsInput.files[0]]));\n * ```\n *\n * @param files `File`s to load from. Currently, this function supports only\n * loading from files that contain Keras-style models (i.e., `tf.Model`s), for\n * which an `Array` of `File`s is expected (in that order):\n * - A JSON file containing the model topology and weight manifest.\n * - Optionally, One or more binary files containing the binary weights.\n * These files must have names that match the paths in the `weightsManifest`\n * contained by the aforementioned JSON file, or errors will be thrown\n * during loading. These weights files have the same format as the ones\n * generated by `tensorflowjs_converter` that comes with the `tensorflowjs`\n * Python PIP package. If no weights files are provided, only the model\n * topology will be loaded from the JSON file above.\n * @returns An instance of `Files` `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function browserFiles(files) {\n return new BrowserFiles(files);\n}\n//# sourceMappingURL=browser_files.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../util';\n/**\n * Monitor Promise.all progress, fire onProgress callback function.\n *\n * @param promises Promise list going to be monitored\n * @param onProgress Callback function. Fired when a promise resolved.\n * @param startFraction Optional fraction start. Default to 0.\n * @param endFraction Optional fraction end. Default to 1.\n */\nexport function monitorPromisesProgress(promises, onProgress, startFraction, endFraction) {\n checkPromises(promises);\n startFraction = startFraction == null ? 0 : startFraction;\n endFraction = endFraction == null ? 1 : endFraction;\n checkFraction(startFraction, endFraction);\n let resolvedPromise = 0;\n const registerMonitor = (promise) => {\n promise.then(value => {\n const fraction = startFraction +\n ++resolvedPromise / promises.length * (endFraction - startFraction);\n // pass fraction as parameter to callback function.\n onProgress(fraction);\n return value;\n });\n return promise;\n };\n function checkPromises(promises) {\n assert(promises != null && Array.isArray(promises) && promises.length > 0, () => 'promises must be a none empty array');\n }\n function checkFraction(startFraction, endFraction) {\n assert(startFraction >= 0 && startFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got startFraction ${startFraction}`);\n assert(endFraction >= 0 && endFraction <= 1, () => `Progress fraction must be in range [0, 1], but ` +\n `got endFraction ${endFraction}`);\n assert(endFraction >= startFraction, () => `startFraction must be no more than endFraction, but ` +\n `got startFraction ${startFraction} and endFraction ` +\n `${endFraction}`);\n }\n return Promise.all(promises.map(registerMonitor));\n}\n//# sourceMappingURL=progress.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '../environment';\nimport * as util from '../util';\nimport { decodeWeights } from './io_utils';\nimport { monitorPromisesProgress } from './progress';\nimport { DTYPE_VALUE_SIZE_MAP } from './types';\n/**\n * Reads binary weights data from a number of URLs.\n *\n * @param fetchURLs URLs to send the HTTP requests at, using `fetch` calls.\n * @param requestOptions RequestInit (options) for the HTTP requests.\n * @param fetchFunc Optional overriding value for the `window.fetch` function.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns A `Promise` of an Array of `ArrayBuffer`. The Array has the same\n * length as `fetchURLs`.\n */\nexport async function loadWeightsAsArrayBuffer(fetchURLs, loadOptions) {\n if (loadOptions == null) {\n loadOptions = {};\n }\n const fetchFunc = loadOptions.fetchFunc == null ? env().platform.fetch :\n loadOptions.fetchFunc;\n // Create the requests for all of the weights in parallel.\n const requests = fetchURLs.map(fetchURL => fetchFunc(fetchURL, loadOptions.requestInit, { isBinary: true }));\n const fetchStartFraction = 0;\n const fetchEndFraction = 0.5;\n const responses = loadOptions.onProgress == null ?\n await Promise.all(requests) :\n await monitorPromisesProgress(requests, loadOptions.onProgress, fetchStartFraction, fetchEndFraction);\n const bufferPromises = responses.map(response => response.arrayBuffer());\n const bufferStartFraction = 0.5;\n const bufferEndFraction = 1;\n const buffers = loadOptions.onProgress == null ?\n await Promise.all(bufferPromises) :\n await monitorPromisesProgress(bufferPromises, loadOptions.onProgress, bufferStartFraction, bufferEndFraction);\n return buffers;\n}\n/**\n * Reads a weights manifest JSON configuration, fetches the weights and\n * returns them as `Tensor`s.\n *\n * @param manifest The weights manifest JSON.\n * @param filePathPrefix The path prefix for filenames given in the manifest.\n * Defaults to the empty string.\n * @param weightNames The names of the weights to be fetched.\n */\nexport async function loadWeights(manifest, filePathPrefix = '', weightNames, requestInit) {\n // TODO(nsthorat): Groups are currently fetched atomically. If you need a\n // single weight from a group, the whole group will be fetched. At a future\n // date, we should support fetching only the individual shards within a\n // group that are needed to reconstruct the requested weight.\n // TODO(cais): Use `decodeWeights` for implementation.\n const fetchWeights = (fetchUrls) => loadWeightsAsArrayBuffer(fetchUrls, { requestInit });\n const loadWeights = weightsLoaderFactory(fetchWeights);\n return loadWeights(manifest, filePathPrefix, weightNames);\n}\n/**\n * Creates a function, which reads a weights manifest JSON configuration,\n * fetches the weight files using the specified function and returns them as\n * `Tensor`s.\n *\n * ```js\n * // example for creating a nodejs weight loader, which reads the weight files\n * // from disk using fs.readFileSync\n *\n * import * as fs from 'fs'\n *\n * const fetchWeightsFromDisk = (filePaths: string[]) =>\n * filePaths.map(filePath => fs.readFileSync(filePath).buffer)\n *\n * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk)\n *\n * const manifest = JSON.parse(\n * fs.readFileSync('./my_model-weights_manifest').toString()\n * )\n * const weightMap = await loadWeights(manifest, './')\n * ```\n * @param fetchWeightsFunction The function used for fetching the weight files.\n * @returns Weight loading function.\n */\nexport function weightsLoaderFactory(fetchWeightsFunction) {\n return async (manifest, filePathPrefix = '', weightNames) => {\n // Collect all the groups, weights, and their relative offsets to be\n // fetched.\n const groupIndicesToFetchMap = manifest.map(() => false);\n const groupWeightsToFetch = {};\n const weightsFound = weightNames != null ? weightNames.map(() => false) : [];\n const allManifestWeightNames = [];\n manifest.forEach((manifestGroupConfig, groupIndex) => {\n let groupOffset = 0;\n manifestGroupConfig.weights.forEach(weightsEntry => {\n const rawDtype = ('quantization' in weightsEntry) ?\n weightsEntry.quantization.dtype :\n weightsEntry.dtype;\n const weightsBytes = DTYPE_VALUE_SIZE_MAP[rawDtype] *\n util.sizeFromShape(weightsEntry.shape);\n const enqueueWeightsForFetchingFn = () => {\n groupIndicesToFetchMap[groupIndex] = true;\n if (groupWeightsToFetch[groupIndex] == null) {\n groupWeightsToFetch[groupIndex] = [];\n }\n groupWeightsToFetch[groupIndex].push({\n manifestEntry: weightsEntry,\n groupOffset,\n sizeBytes: weightsBytes\n });\n };\n if (weightNames != null) {\n weightNames.forEach((weightName, weightIndex) => {\n if (weightName === weightsEntry.name) {\n enqueueWeightsForFetchingFn();\n weightsFound[weightIndex] = true;\n }\n });\n }\n else {\n enqueueWeightsForFetchingFn();\n }\n allManifestWeightNames.push(weightsEntry.name);\n groupOffset += weightsBytes;\n });\n });\n if (!weightsFound.every(found => found)) {\n const weightsNotFound = weightNames.filter((_, i) => !weightsFound[i]);\n throw new Error(`Could not find weights in manifest with names: ` +\n `${weightsNotFound.join(', ')}. \\n` +\n `Manifest JSON has weights with names: ` +\n `${allManifestWeightNames.join(', ')}.`);\n }\n // Convert the one-hot boolean groupId => shouldFetch map to a list of group\n // IDs.\n const groupIndicesToFetch = groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i) => {\n if (shouldFetch) {\n accumulator.push(i);\n }\n return accumulator;\n }, []);\n const fetchUrls = [];\n groupIndicesToFetch.forEach(i => {\n manifest[i].paths.forEach(filepath => {\n const fetchUrl = filePathPrefix +\n (!filePathPrefix.endsWith('/') ? '/' : '') + filepath;\n fetchUrls.push(fetchUrl);\n });\n });\n const buffers = await fetchWeightsFunction(fetchUrls);\n const weightsTensorMap = {};\n let bufferIndexOffset = 0;\n groupIndicesToFetch.forEach(i => {\n const numBuffers = manifest[i].paths.length;\n let groupBytes = 0;\n for (let i = 0; i < numBuffers; i++) {\n groupBytes += buffers[bufferIndexOffset + i].byteLength;\n }\n // Create a buffer for the whole group.\n const groupBuffer = new ArrayBuffer(groupBytes);\n const groupByteBuffer = new Uint8Array(groupBuffer);\n let groupBufferOffset = 0;\n for (let i = 0; i < numBuffers; i++) {\n const buffer = new Uint8Array(buffers[bufferIndexOffset + i]);\n groupByteBuffer.set(buffer, groupBufferOffset);\n groupBufferOffset += buffer.byteLength;\n }\n const weightsEntries = groupWeightsToFetch[i];\n weightsEntries.forEach(weightsEntry => {\n const byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes);\n const nameToTensorMap = decodeWeights(byteBuffer, [weightsEntry.manifestEntry]);\n for (const name in nameToTensorMap) {\n weightsTensorMap[name] = nameToTensorMap[name];\n }\n });\n bufferIndexOffset += numBuffers;\n });\n return weightsTensorMap;\n };\n}\n//# sourceMappingURL=weights_loader.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * IOHandler implementations based on HTTP requests in the web browser.\n *\n * Uses [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n */\nimport { env } from '../environment';\nimport { assert } from '../util';\nimport { concatenateArrayBuffers, getModelArtifactsInfoForJSON } from './io_utils';\nimport { IORouterRegistry } from './router_registry';\nimport { loadWeightsAsArrayBuffer } from './weights_loader';\nconst OCTET_STREAM_MIME_TYPE = 'application/octet-stream';\nconst JSON_TYPE = 'application/json';\nexport class HTTPRequest {\n constructor(path, loadOptions) {\n this.DEFAULT_METHOD = 'POST';\n if (loadOptions == null) {\n loadOptions = {};\n }\n this.weightPathPrefix = loadOptions.weightPathPrefix;\n this.onProgress = loadOptions.onProgress;\n this.weightUrlConverter = loadOptions.weightUrlConverter;\n if (loadOptions.fetchFunc != null) {\n assert(typeof loadOptions.fetchFunc === 'function', () => 'Must pass a function that matches the signature of ' +\n '`fetch` (see ' +\n 'https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)');\n this.fetch = loadOptions.fetchFunc;\n }\n else {\n this.fetch = env().platform.fetch;\n }\n assert(path != null && path.length > 0, () => 'URL path for http must not be null, undefined or ' +\n 'empty.');\n if (Array.isArray(path)) {\n assert(path.length === 2, () => 'URL paths for http must have a length of 2, ' +\n `(actual length is ${path.length}).`);\n }\n this.path = path;\n if (loadOptions.requestInit != null &&\n loadOptions.requestInit.body != null) {\n throw new Error('requestInit is expected to have no pre-existing body, but has one.');\n }\n this.requestInit = loadOptions.requestInit || {};\n }\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('BrowserHTTPRequest.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n const init = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit);\n init.body = new FormData();\n const weightsManifest = [{\n paths: ['./model.weights.bin'],\n weights: modelArtifacts.weightSpecs,\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata,\n weightsManifest\n };\n init.body.append('model.json', new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: JSON_TYPE }), 'model.json');\n if (modelArtifacts.weightData != null) {\n init.body.append('model.weights.bin', new Blob([modelArtifacts.weightData], { type: OCTET_STREAM_MIME_TYPE }), 'model.weights.bin');\n }\n const response = await this.fetch(this.path, init);\n if (response.ok) {\n return {\n modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts),\n responses: [response],\n };\n }\n else {\n throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ` +\n `${response.status}.`);\n }\n }\n /**\n * Load model artifacts via HTTP request(s).\n *\n * See the documentation to `tf.io.http` for details on the saved\n * artifacts.\n *\n * @returns The loaded model artifacts (if loading succeeds).\n */\n async load() {\n const modelConfigRequest = await this.fetch(this.path, this.requestInit);\n if (!modelConfigRequest.ok) {\n throw new Error(`Request to ${this.path} failed with status code ` +\n `${modelConfigRequest.status}. Please verify this URL points to ` +\n `the model JSON of the model to load.`);\n }\n let modelConfig;\n try {\n modelConfig = await modelConfigRequest.json();\n }\n catch (e) {\n let message = `Failed to parse model JSON of response from ${this.path}.`;\n // TODO(nsthorat): Remove this after some time when we're comfortable that\n // .pb files are mostly gone.\n if (this.path.endsWith('.pb')) {\n message += ' Your path contains a .pb file extension. ' +\n 'Support for .pb models have been removed in TensorFlow.js 1.0 ' +\n 'in favor of .json models. You can re-convert your Python ' +\n 'TensorFlow model using the TensorFlow.js 1.0 conversion scripts ' +\n 'or you can convert your.pb models with the \\'pb2json\\'' +\n 'NPM script in the tensorflow/tfjs-converter repository.';\n }\n else {\n message += ' Please make sure the server is serving valid ' +\n 'JSON for this request.';\n }\n throw new Error(message);\n }\n const modelTopology = modelConfig.modelTopology;\n const weightsManifest = modelConfig.weightsManifest;\n const generatedBy = modelConfig.generatedBy;\n const convertedBy = modelConfig.convertedBy;\n const format = modelConfig.format;\n const userDefinedMetadata = modelConfig.userDefinedMetadata;\n // We do not allow both modelTopology and weightsManifest to be missing.\n if (modelTopology == null && weightsManifest == null) {\n throw new Error(`The JSON from HTTP path ${this.path} contains neither model ` +\n `topology or manifest for weights.`);\n }\n let weightSpecs;\n let weightData;\n if (weightsManifest != null) {\n const results = await this.loadWeights(weightsManifest);\n [weightSpecs, weightData] = results;\n }\n const artifacts = {\n modelTopology,\n weightSpecs,\n weightData,\n userDefinedMetadata,\n generatedBy,\n convertedBy,\n format\n };\n const initializer = modelConfig.modelInitializer;\n if (initializer) {\n artifacts.modelInitializer = initializer;\n }\n return artifacts;\n }\n async loadWeights(weightsManifest) {\n const weightPath = Array.isArray(this.path) ? this.path[1] : this.path;\n const [prefix, suffix] = parseUrl(weightPath);\n const pathPrefix = this.weightPathPrefix || prefix;\n const weightSpecs = [];\n for (const entry of weightsManifest) {\n weightSpecs.push(...entry.weights);\n }\n const fetchURLs = [];\n const urlPromises = [];\n for (const weightsGroup of weightsManifest) {\n for (const path of weightsGroup.paths) {\n if (this.weightUrlConverter != null) {\n urlPromises.push(this.weightUrlConverter(path));\n }\n else {\n fetchURLs.push(pathPrefix + path + suffix);\n }\n }\n }\n if (this.weightUrlConverter) {\n fetchURLs.push(...await Promise.all(urlPromises));\n }\n const buffers = await loadWeightsAsArrayBuffer(fetchURLs, {\n requestInit: this.requestInit,\n fetchFunc: this.fetch,\n onProgress: this.onProgress\n });\n return [weightSpecs, concatenateArrayBuffers(buffers)];\n }\n}\nHTTPRequest.URL_SCHEME_REGEX = /^https?:\\/\\//;\n/**\n * Extract the prefix and suffix of the url, where the prefix is the path before\n * the last file, and suffix is the search params after the last file.\n * ```\n * const url = 'http://tfhub.dev/model/1/tensorflowjs_model.pb?tfjs-format=file'\n * [prefix, suffix] = parseUrl(url)\n * // prefix = 'http://tfhub.dev/model/1/'\n * // suffix = '?tfjs-format=file'\n * ```\n * @param url the model url to be parsed.\n */\nexport function parseUrl(url) {\n const lastSlash = url.lastIndexOf('/');\n const lastSearchParam = url.lastIndexOf('?');\n const prefix = url.substring(0, lastSlash);\n const suffix = lastSearchParam > lastSlash ? url.substring(lastSearchParam) : '';\n return [prefix + '/', suffix];\n}\nexport function isHTTPScheme(url) {\n return url.match(HTTPRequest.URL_SCHEME_REGEX) != null;\n}\nexport const httpRouter = (url, loadOptions) => {\n if (typeof fetch === 'undefined' &&\n (loadOptions == null || loadOptions.fetchFunc == null)) {\n // `http` uses `fetch` or `node-fetch`, if one wants to use it in\n // an environment that is not the browser or node they have to setup a\n // global fetch polyfill.\n return null;\n }\n else {\n let isHTTP = true;\n if (Array.isArray(url)) {\n isHTTP = url.every(urlItem => isHTTPScheme(urlItem));\n }\n else {\n isHTTP = isHTTPScheme(url);\n }\n if (isHTTP) {\n return http(url, loadOptions);\n }\n }\n return null;\n};\nIORouterRegistry.registerSaveRouter(httpRouter);\nIORouterRegistry.registerLoadRouter(httpRouter);\n/**\n * Creates an IOHandler subtype that sends model artifacts to HTTP server.\n *\n * An HTTP request of the `multipart/form-data` mime type will be sent to the\n * `path` URL. The form data includes artifacts that represent the topology\n * and/or weights of the model. In the case of Keras-style `tf.Model`, two\n * blobs (files) exist in form-data:\n * - A JSON file consisting of `modelTopology` and `weightsManifest`.\n * - A binary weights file consisting of the concatenated weight values.\n * These files are in the same format as the one generated by\n * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html).\n *\n * The following code snippet exemplifies the client-side code that uses this\n * function:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save(tf.io.http(\n * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}}));\n * console.log(saveResult);\n * ```\n *\n * If the default `POST` method is to be used, without any custom parameters\n * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`:\n *\n * ```js\n * const saveResult = await model.save('http://model-server:5000/upload');\n * ```\n *\n * The following GitHub Gist\n * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864\n * implements a server based on [flask](https://github.com/pallets/flask) that\n * can receive the request. Upon receiving the model artifacts via the requst,\n * this particular server reconsistutes instances of [Keras\n * Models](https://keras.io/models/model/) in memory.\n *\n *\n * @param path A URL path to the model.\n * Can be an absolute HTTP path (e.g.,\n * 'http://localhost:8000/model-upload)') or a relative path (e.g.,\n * './model-upload').\n * @param requestInit Request configurations to be used when sending\n * HTTP request to server using `fetch`. It can contain fields such as\n * `method`, `credentials`, `headers`, `mode`, etc. See\n * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n * for more information. `requestInit` must not have a body, because the\n * body will be set by TensorFlow.js. File blobs representing the model\n * topology (filename: 'model.json') and the weights of the model (filename:\n * 'model.weights.bin') will be appended to the body. If `requestInit` has a\n * `body`, an Error will be thrown.\n * @param loadOptions Optional configuration for the loading. It includes the\n * following fields:\n * - weightPathPrefix Optional, this specifies the path prefix for weight\n * files, by default this is calculated from the path param.\n * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js,\n * the `fetch` from node-fetch can be used here.\n * - onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n * @returns An instance of `IOHandler`.\n *\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * namespace: 'io',\n * ignoreCI: true\n * }\n */\nexport function http(path, loadOptions) {\n return new HTTPRequest(path, loadOptions);\n}\n/**\n * Deprecated. Use `tf.io.http`.\n * @param path\n * @param loadOptions\n */\nexport function browserHTTPRequest(path, loadOptions) {\n return http(path, loadOptions);\n}\n//# sourceMappingURL=http.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nclass PassthroughLoader {\n constructor(modelArtifacts) {\n this.modelArtifacts = modelArtifacts;\n }\n async load() {\n return this.modelArtifacts;\n }\n}\nclass PassthroughSaver {\n constructor(saveHandler) {\n this.saveHandler = saveHandler;\n }\n async save(modelArtifacts) {\n return this.saveHandler(modelArtifacts);\n }\n}\n/**\n * Creates an IOHandler that loads model artifacts from memory.\n *\n * When used in conjunction with `tf.loadLayersModel`, an instance of\n * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * const model = await tf.loadLayersModel(tf.io.fromMemory(\n * modelTopology, weightSpecs, weightData));\n * ```\n *\n * @param modelArtifacts a object containing model topology (i.e., parsed from\n * the JSON format).\n * @param weightSpecs An array of `WeightsManifestEntry` objects describing the\n * names, shapes, types, and quantization of the weight data.\n * @param weightData A single `ArrayBuffer` containing the weight data,\n * concatenated in the order described by the weightSpecs.\n * @param trainingConfig Model training configuration. Optional.\n *\n * @returns A passthrough `IOHandler` that simply loads the provided data.\n */\nexport function fromMemory(modelArtifacts, weightSpecs, weightData, trainingConfig) {\n if (arguments.length === 1) {\n const isModelArtifacts = modelArtifacts.modelTopology != null ||\n modelArtifacts.weightSpecs != null;\n if (isModelArtifacts) {\n return new PassthroughLoader(modelArtifacts);\n }\n else {\n // Legacy support: with only modelTopology.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({ modelTopology: modelArtifacts });\n }\n }\n else {\n // Legacy support.\n // TODO(cais): Remove this deprecated API.\n console.warn('Please call tf.io.fromMemory() with only one argument. ' +\n 'The argument should be of type ModelArtifacts. ' +\n 'The multi-argument signature of tf.io.fromMemory() has been ' +\n 'deprecated and will be removed in a future release.');\n return new PassthroughLoader({\n modelTopology: modelArtifacts,\n weightSpecs,\n weightData,\n trainingConfig\n });\n }\n}\n/**\n * Creates an IOHandler that passes saved model artifacts to a callback.\n *\n * ```js\n * function handleSave(artifacts) {\n * // ... do something with the artifacts ...\n * return {modelArtifactsInfo: {...}, ...};\n * }\n *\n * const saveResult = model.save(tf.io.withSaveHandler(handleSave));\n * ```\n *\n * @param saveHandler A function that accepts a `ModelArtifacts` and returns a\n * `SaveResult`.\n */\nexport function withSaveHandler(saveHandler) {\n return new PassthroughSaver(saveHandler);\n}\n//# sourceMappingURL=passthrough.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Importing local_storage and indexed_db is necessary for the routers to be\n// registered.\nimport './indexed_db';\nimport './local_storage';\nimport { browserFiles } from './browser_files';\nimport { browserHTTPRequest, http, isHTTPScheme } from './http';\nimport { concatenateArrayBuffers, decodeWeights, encodeWeights, getModelArtifactsInfoForJSON } from './io_utils';\nimport { fromMemory, withSaveHandler } from './passthrough';\nimport { getLoadHandlers, getSaveHandlers, registerLoadRouter, registerSaveRouter } from './router_registry';\nimport { loadWeights, weightsLoaderFactory } from './weights_loader';\nexport { copyModel, listModels, moveModel, removeModel } from './model_management';\nexport { browserFiles, browserHTTPRequest, concatenateArrayBuffers, decodeWeights, encodeWeights, fromMemory, getLoadHandlers, getModelArtifactsInfoForJSON, getSaveHandlers, http, isHTTPScheme, loadWeights, registerLoadRouter, registerSaveRouter, weightsLoaderFactory, withSaveHandler };\n//# sourceMappingURL=io.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reshape } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Reshapes a `tf.Tensor` to a given shape.\n *\n * Given an input tensor, returns a new tensor with the same values as the\n * input tensor with shape `shape`.\n *\n * If one component of shape is the special value -1, the size of that\n * dimension is computed so that the total size remains constant. In\n * particular, a shape of [-1] flattens into 1-D. At most one component of\n * shape can be -1.\n *\n * If shape is 1-D or higher, then the operation returns a tensor with shape\n * shape filled with the values of tensor. In this case, the number of\n * elements implied by shape must be the same as the number of elements in\n * tensor.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.reshape([2, 2]).print();\n * ```\n *\n * @param x The input tensor to be reshaped.\n * @param shape An array of integers defining the output tensor shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction reshape_(x, shape) {\n const $x = convertToTensor(x, 'x', 'reshape', null);\n const inputs = { x: $x };\n const attrs = { shape };\n const forward = (backend, save) => {\n shape = util.inferFromImplicitShape(shape, $x.size);\n util.assert($x.size === util.sizeFromShape(shape), () => 'new shape and old shape must have the same number of elements.');\n save([$x]);\n return backend.reshape($x, shape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Reshape, attrs);\n}\nexport const reshape = op({ reshape_ });\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchMatMul } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices, A * B. These must be matrices.\n *\n * ```js\n * const a = tf.tensor2d([1, 2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.matMul(b).print(); // or tf.matMul(a, b)\n * ```\n * @param a First matrix in dot product operation.\n * @param b Second matrix in dot product operation.\n * @param transposeA If true, `a` is transposed before multiplication.\n * @param transposeB If true, `b` is transposed before multiplication.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction matMul_(a, b, transposeA = false, transposeB = false) {\n let $a = convertToTensor(a, 'a', 'matMul');\n let $b = convertToTensor(b, 'b', 'matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n save([$a, $b]);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert($a.rank >= 2 && $b.rank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShapeOuterDims = batchDimA > batchDimB ? outerDimsA : outerDimsB;\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n const res3d = backend.batchMatMul(a3D, b3D, transposeA, transposeB);\n return reshape(res3d, outShape);\n };\n const inputs = { a: $a, b: $b };\n const attrs = { transposeA, transposeB };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BatchMatMul, attrs);\n}\nexport const matMul = op({ matMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OneHot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a one-hot `tf.Tensor`. The locations represented by `indices` take\n * value `onValue` (defaults to 1), while all other locations take value\n * `offValue` (defaults to 0). If `indices` is rank `R`, the output has rank\n * `R+1` with the last axis of size `depth`.\n *\n * ```js\n * tf.oneHot(tf.tensor1d([0, 1], 'int32'), 3).print();\n * ```\n *\n * @param indices `tf.Tensor` of indices with dtype `int32`.\n * @param depth The depth of the one hot dimension.\n * @param onValue A number used to fill in the output when the index matches\n * the location.\n * @param offValue A number used to fill in the output when the index does\n * not match the location.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction oneHot_(indices, depth, onValue = 1, offValue = 0) {\n if (depth < 2) {\n throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`);\n }\n const $indices = convertToTensor(indices, 'indices', 'oneHot', 'int32');\n const outShape = [...$indices.shape, depth];\n const forward = (backend, save) => {\n save([$indices]);\n return reshape(backend.oneHot(reshape($indices, [$indices.size]), depth, onValue, offValue), outShape);\n };\n const inputs = { indices: $indices };\n const attrs = { depth, onValue, offValue };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OneHot, attrs);\n}\nexport const oneHot = op({ oneHot_ });\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Transpose } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Transposes the `tf.Tensor`. Permutes the dimensions according to `perm`.\n *\n * The returned `tf.Tensor`'s dimension `i` will correspond to the input\n * dimension `perm[i]`. If `perm` is not given, it is set to `[n-1...0]`,\n * where `n` is the rank of the input `tf.Tensor`. Hence by default, this\n * operation performs a regular matrix transpose on 2-D input `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4, 5, 6], [2, 3]);\n *\n * a.transpose().print(); // or tf.transpose(a)\n * ```\n *\n * @param x The tensor to transpose.\n * @param perm The permutation of the dimensions of a.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction transpose_(x, perm) {\n const $x = convertToTensor(x, 'x', 'transpose');\n if (perm == null) {\n perm = $x.shape.map((s, i) => i).reverse();\n }\n util.assert($x.rank === perm.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of perm ${perm}.`);\n perm.forEach(axis => {\n util.assert(axis >= 0 && axis < $x.rank, () => `All entries in 'perm' must be between 0 and ${$x.rank - 1}` +\n ` but got ${perm}`);\n });\n if ($x.rank <= 1) {\n return $x.clone();\n }\n const inputs = { x: $x };\n const attrs = { perm };\n return ENGINE.runKernelFunc(backend => backend.transpose($x, perm), inputs, null /* gradient */, Transpose, attrs);\n}\nexport const transpose = op({ transpose_ });\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { matMul } from './mat_mul';\nimport { oneHot } from './one_hot';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the confusion matrix from true labels and predicted labels.\n *\n * ```js\n * const labels = tf.tensor1d([0, 1, 2, 1, 0], 'int32');\n * const predictions = tf.tensor1d([0, 2, 2, 1, 0], 'int32');\n * const numClasses = 3;\n * const out = tf.math.confusionMatrix(labels, predictions, numClasses);\n * out.print();\n * // Expected output matrix:\n * // [[2, 0, 0],\n * // [0, 1, 1],\n * // [0, 0, 1]]\n * ```\n *\n * @param labels The target labels, assumed to be 0-based integers\n * for the classes. The shape is `[numExamples]`, where\n * `numExamples` is the number of examples included.\n * @param predictions The predicted classes, assumed to be\n * 0-based integers for the classes. Must have the same shape as `labels`.\n * @param numClasses Number of all classes, as an integer.\n * Its value must be larger than the largest element in `labels` and\n * `predictions`.\n * @returns The confusion matrix as a int32-type 2D tensor. The value at\n * row `r` and column `c` is the number of times examples of actual class\n * `r` were predicted as class `c`.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nexport function confusionMatrix_(labels, predictions, numClasses) {\n const $labels = convertToTensor(labels, 'labels', 'confusionMatrix');\n const $predictions = convertToTensor(predictions, 'predictions', 'confusionMatrix');\n util.assert(numClasses == null || numClasses > 0 && Number.isInteger(numClasses), () => `If provided, numClasses must be a positive integer, ` +\n `but got ${numClasses}`);\n util.assert($labels.rank === 1, () => `Expected the rank of labels to be 1, but got ${$labels.rank}`);\n util.assert($predictions.rank === 1, () => `Expected the rank of predictions to be 1, ` +\n `but got ${$predictions.rank}`);\n util.assert($labels.shape[0] === $predictions.shape[0], () => `Mismatch in the number of examples: ` +\n `${$labels.shape[0]} vs. ${$predictions.shape[0]}. ` +\n `Labels and predictions should have the same number of elements.`);\n util.assert(numClasses > 0 && Number.isInteger(numClasses), () => `numClasses is required to be a positive integer, but got ` +\n `${numClasses}`);\n // TODO(cais): In the future, if oneHot supports tensors inputs for\n // `numClasses`, `confusionMatrix` can make `numClasses` optional.\n const oneHotLabels = oneHot(cast($labels, 'int32'), numClasses);\n const oneHotPredictions = oneHot(cast($predictions, 'int32'), numClasses);\n const oneHotLabelsT = transpose(oneHotLabels);\n const product = matMul(oneHotLabelsT, oneHotPredictions);\n return cast(product, 'int32');\n}\nexport const confusionMatrix = op({ confusionMatrix_ });\n//# sourceMappingURL=confusion_matrix.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Exports under the tf.math.* namespace.\n */\nimport { confusionMatrix } from './ops/confusion_matrix';\nexport { confusionMatrix };\n//# sourceMappingURL=math.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-3 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor3d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor3d([[[1], [2]], [[3], [4]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor3d([1, 2, 3, 4], [2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor3d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 3) {\n throw new Error('tensor3d() requires shape to have three numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 3 && inferredShape.length !== 1) {\n throw new Error('tensor3d() requires values to be number[][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor3d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor3d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FromPixels } from '../kernel_names';\nimport { getKernel } from '../kernel_registry';\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { tensor3d } from './tensor3d';\nlet fromPixels2DContext;\n/**\n * Creates a `tf.Tensor` from an image.\n *\n * ```js\n * const image = new ImageData(1, 1);\n * image.data[0] = 100;\n * image.data[1] = 150;\n * image.data[2] = 200;\n * image.data[3] = 255;\n *\n * tf.browser.fromPixels(image).print();\n * ```\n *\n * @param pixels The input image to construct the tensor from. The\n * supported image types are all 4-channel. You can also pass in an image\n * object with following attributes:\n * `{data: Uint8Array; width: number; height: number}`\n * @param numChannels The number of channels of the output tensor. A\n * numChannels value less than 4 allows you to ignore channels. Defaults to\n * 3 (ignores alpha channel of input image).\n *\n * @doc {heading: 'Browser', namespace: 'browser', ignoreCI: true}\n */\nfunction fromPixels_(pixels, numChannels = 3) {\n // Sanity checks.\n if (numChannels > 4) {\n throw new Error('Cannot construct Tensor with more than 4 channels from pixels.');\n }\n if (pixels == null) {\n throw new Error('pixels passed to tf.browser.fromPixels() can not be null');\n }\n let isPixelData = false;\n let isImageData = false;\n let isVideo = false;\n let isImage = false;\n let isCanvasLike = false;\n if (pixels.data instanceof Uint8Array) {\n isPixelData = true;\n }\n else if (typeof (ImageData) !== 'undefined' && pixels instanceof ImageData) {\n isImageData = true;\n }\n else if (typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement) {\n isVideo = true;\n }\n else if (typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement) {\n isImage = true;\n // tslint:disable-next-line: no-any\n }\n else if (pixels.getContext != null) {\n isCanvasLike = true;\n }\n else {\n throw new Error('pixels passed to tf.browser.fromPixels() must be either an ' +\n `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData ` +\n `in browser, or OffscreenCanvas, ImageData in webworker` +\n ` or {data: Uint32Array, width: number, height: number}, ` +\n `but was ${pixels.constructor.name}`);\n }\n if (isVideo) {\n const HAVE_CURRENT_DATA_READY_STATE = 2;\n if (isVideo &&\n pixels.readyState <\n HAVE_CURRENT_DATA_READY_STATE) {\n throw new Error('The video element has not loaded data yet. Please wait for ' +\n '`loadeddata` event on the element.');\n }\n }\n // If the current backend has 'FromPixels' registered, it has a more\n // efficient way of handling pixel uploads, so we call that.\n const kernel = getKernel(FromPixels, ENGINE.backendName);\n if (kernel != null) {\n const inputs = { pixels };\n const attrs = { numChannels };\n return ENGINE.runKernel(FromPixels, inputs, attrs);\n }\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n let vals;\n if (isCanvasLike) {\n vals =\n // tslint:disable-next-line:no-any\n pixels.getContext('2d').getImageData(0, 0, width, height).data;\n }\n else if (isImageData || isPixelData) {\n vals = pixels.data;\n }\n else if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n vals = fromPixels2DContext.getImageData(0, 0, width, height).data;\n }\n let values;\n if (numChannels === 4) {\n values = new Int32Array(vals);\n }\n else {\n const numPixels = width * height;\n values = new Int32Array(numPixels * numChannels);\n for (let i = 0; i < numPixels; i++) {\n for (let channel = 0; channel < numChannels; ++channel) {\n values[i * numChannels + channel] = vals[i * 4 + channel];\n }\n }\n }\n const outShape = [height, width, numChannels];\n return tensor3d(values, outShape, 'int32');\n}\n/**\n * Draws a `tf.Tensor` of pixel values to a byte array or optionally a\n * canvas.\n *\n * When the dtype of the input is 'float32', we assume values in the range\n * [0-1]. Otherwise, when input is 'int32', we assume values in the range\n * [0-255].\n *\n * Returns a promise that resolves when the canvas has been drawn to.\n *\n * @param img A rank-2 or rank-3 tensor. If rank-2, draws grayscale. If\n * rank-3, must have depth of 1, 3 or 4. When depth of 1, draws\n * grayscale. When depth of 3, we draw with the first three components of\n * the depth dimension corresponding to r, g, b and alpha = 1. When depth of\n * 4, all four components of the depth dimension correspond to r, g, b, a.\n * @param canvas The canvas to draw to.\n *\n * @doc {heading: 'Browser', namespace: 'browser'}\n */\nexport async function toPixels(img, canvas) {\n let $img = convertToTensor(img, 'img', 'toPixels');\n if (!(img instanceof Tensor)) {\n // Assume int32 if user passed a native array.\n const originalImgTensor = $img;\n $img = cast(originalImgTensor, 'int32');\n originalImgTensor.dispose();\n }\n if ($img.rank !== 2 && $img.rank !== 3) {\n throw new Error(`toPixels only supports rank 2 or 3 tensors, got rank ${$img.rank}.`);\n }\n const [height, width] = $img.shape.slice(0, 2);\n const depth = $img.rank === 2 ? 1 : $img.shape[2];\n if (depth > 4 || depth === 2) {\n throw new Error(`toPixels only supports depth of size ` +\n `1, 3 or 4 but got ${depth}`);\n }\n if ($img.dtype !== 'float32' && $img.dtype !== 'int32') {\n throw new Error(`Unsupported type for toPixels: ${$img.dtype}.` +\n ` Please use float32 or int32 tensors.`);\n }\n const data = await $img.data();\n const multiplier = $img.dtype === 'float32' ? 255 : 1;\n const bytes = new Uint8ClampedArray(width * height * 4);\n for (let i = 0; i < height * width; ++i) {\n const rgba = [0, 0, 0, 255];\n for (let d = 0; d < depth; d++) {\n const value = data[i * depth + d];\n if ($img.dtype === 'float32') {\n if (value < 0 || value > 1) {\n throw new Error(`Tensor values for a float32 Tensor must be in the ` +\n `range [0 - 1] but encountered ${value}.`);\n }\n }\n else if ($img.dtype === 'int32') {\n if (value < 0 || value > 255) {\n throw new Error(`Tensor values for a int32 Tensor must be in the ` +\n `range [0 - 255] but encountered ${value}.`);\n }\n }\n if (depth === 1) {\n rgba[0] = value * multiplier;\n rgba[1] = value * multiplier;\n rgba[2] = value * multiplier;\n }\n else {\n rgba[d] = value * multiplier;\n }\n }\n const j = i * 4;\n bytes[j + 0] = Math.round(rgba[0]);\n bytes[j + 1] = Math.round(rgba[1]);\n bytes[j + 2] = Math.round(rgba[2]);\n bytes[j + 3] = Math.round(rgba[3]);\n }\n if (canvas != null) {\n canvas.width = width;\n canvas.height = height;\n const ctx = canvas.getContext('2d');\n const imageData = new ImageData(bytes, width, height);\n ctx.putImageData(imageData, 0, 0);\n }\n if ($img !== img) {\n $img.dispose();\n }\n return bytes;\n}\nexport const fromPixels = op({ fromPixels_ });\n//# sourceMappingURL=browser.js.map", "import { computeStrides } from '../util';\n/**\n * Validate gather nd inputs.\n *\n * @param tensor The tensor contains the source values.\n * @param indices The tensor contains the indices to slice the source.\n *\n * @returns [resultShape, numUpdates, sliceSize, strides]\n */\nexport function prepareAndValidate(tensor, indices) {\n if (tensor.rank < 1) {\n throw new Error('tf.gatherND() expects the input to be rank 1 or higher,' +\n ` but the rank was ${tensor.rank}.`);\n }\n if (indices.rank < 1) {\n throw new Error('tf.gatherND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error('tf.gatherND() expects the indices to be int32 type,' +\n ` but the dtype was ${indices.dtype}.`);\n }\n if (indices.shape[indices.rank - 1] > tensor.rank) {\n throw new Error('index innermost dimension length must be <= tensor rank; saw: ' +\n `${indices.shape[indices.rank - 1]} vs. ${tensor.rank}`);\n }\n if (tensor.size === 0) {\n throw new Error('Requested more than 0 entries, but input is empty.' +\n ` Input shape: ${tensor.shape}.`);\n }\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n // The result shape is\n // indices.shape[:-1] + params.shape[indices.shape[-1]:]\n let nResult = 1;\n for (let i = 0; i < indicesShape.length - 1; ++i) {\n nResult *= indicesShape[i];\n }\n const inputShape = tensor.shape;\n const resultShape = indicesShape.slice();\n resultShape.pop();\n let sliceSize = 1;\n for (let i = sliceRank; i < tensor.rank; ++i) {\n sliceSize *= inputShape[i];\n resultShape.push(inputShape[i]);\n }\n const strides = [...computeStrides(tensor.shape).map(stride => stride / sliceSize),\n 1].slice(0, sliceRank);\n return [resultShape, nResult, sliceSize, strides];\n}\n//# sourceMappingURL=gather_nd_util.js.map", "import { computeStrides, sizeFromShape } from '../util';\n/**\n * Check whether updates.shape = indices.shape[:batchDim] +\n * shape[sliceDim:]\n *\n * @param x The input tensor.\n */\nexport function validateUpdateShape(shape, indices, updates) {\n const sliceDim = (indices.rank > 1) ? indices.shape[indices.rank - 1] : 1;\n const batchDim = (indices.rank > 1) ? indices.rank - 1 : 1;\n const shapeError = 'Must have updates.shape = indices.shape[:batchDim] + ' +\n `shape[sliceDim:], got updates.shape: ${updates.shape}` +\n `, indices.shape: ${indices.shape}, shape: ${shape}` +\n `, sliceDim: ${sliceDim}, and batchDim: ${batchDim}.`;\n if (updates.rank < batchDim) {\n throw new Error(shapeError + ` update.rank < ${batchDim}. `);\n }\n if (shape.length < sliceDim + (updates.rank - batchDim)) {\n throw new Error(shapeError +\n ` Output shape length < ${sliceDim + (updates.rank - batchDim)}`);\n }\n if (updates.rank !== batchDim + shape.length - sliceDim) {\n throw new Error(shapeError + ` update.rank != ${batchDim + shape.length - sliceDim}`);\n }\n for (let d = 0; d < batchDim; ++d) {\n if (updates.shape[d] !== indices.shape[d]) {\n throw new Error(shapeError +\n ` updates.shape[${d}] (${updates.shape[d]}) != indices.shape[${d}] (${indices.shape[d]}).`);\n }\n }\n for (let d = 0; d < updates.rank - batchDim; ++d) {\n if (updates.shape[d + batchDim] !== shape[d + sliceDim]) {\n throw new Error(shapeError +\n ` updates.shape[${d + batchDim}] (${updates.shape[d + batchDim]}) != shape[${d + batchDim}] (${shape[d + batchDim]})`);\n }\n }\n}\n/**\n * Validate scatter nd inputs.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n */\nexport function validateInput(updates, indices, shape) {\n if (indices.rank < 1) {\n throw new Error('tf.scatterND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (updates.rank < 1) {\n throw new Error('tf.scatterND() expects the updates to be rank 1 or higher,' +\n ` but the rank was ${updates.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error(`The dtype of 'indices' should be int32, but got dtype: ${indices.dtype}`);\n }\n if (shape.length < 1) {\n throw new Error(`Output rank must be greater or equal to 1, but got shape: ${shape}`);\n }\n if (shape.length === 0) {\n if (indices.size === 0) {\n throw new Error(`Indices specified for empty output. indices shape: ${indices.shape}`);\n }\n if (updates.size === 0) {\n throw new Error(`Updates specified for empty output. updates shape: ${updates.shape}`);\n }\n }\n validateUpdateShape(shape, indices, updates);\n}\n/**\n * Calculate the shape information for the output.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n *\n * @returns ScatterShapeInfo\n */\nexport function calculateShapes(updates, indices, shape) {\n // Calculate the number of dimensions in indices\n const indicesRank = indices.shape.length;\n const sliceRank = (indicesRank > 1) ? indices.shape[indicesRank - 1] : 1;\n // Calculate the number of elements that make up each slice of our updated\n // tensor. This allows us to work with flattened tensors and copy over whole\n // slices at a time.\n const totalNd = shape.length;\n let sliceSize = 1;\n for (let i = sliceRank; i < totalNd; ++i) {\n sliceSize *= shape[i];\n }\n const safeSliceDim = (sliceRank < 1) ? 1 : sliceRank;\n const numUpdates = sizeFromShape(indices.shape) / safeSliceDim;\n const strides = [...computeStrides(shape.slice(0, sliceRank)), 1];\n const outputSize = sizeFromShape(shape);\n return { sliceRank, numUpdates, sliceSize, strides, outputSize };\n}\n//# sourceMappingURL=scatter_nd_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsValid(input, begin, size) {\n const inputRank = input.shape.length;\n util.assert(inputRank === begin.length, () => `Error in slice${inputRank}D: Length of begin ${begin} must ` +\n `match the rank of the array (${inputRank}).`);\n util.assert(inputRank === size.length, () => `Error in slice${inputRank}D: Length of size ${size} must ` +\n `match the rank of the array (${inputRank}).`);\n for (let i = 0; i < inputRank; ++i) {\n util.assert(begin[i] + size[i] <= input.shape[i], () => `Error in slice${inputRank}D: begin[${i}] + size[${i}] ` +\n `(${begin[i] + size[i]}) would overflow input.shape[${i}] (${input.shape[i]})`);\n }\n}\n/** Converts a binary mask to an array of axes. Used in stridedSlice(). */\nexport function maskToAxes(mask) {\n const axes = [];\n let axis = 0;\n while (mask > 0) {\n if (mask & 1) {\n axes.push(axis);\n }\n mask /= 2;\n axis++;\n }\n return axes;\n}\n/** Computes the output shape given the strided slice params. */\nexport function computeOutShape(begin, end, strides) {\n const size = [];\n for (let axis = 0; axis < begin.length; axis++) {\n size[axis] = Math.ceil((end[axis] - begin[axis]) / strides[axis]);\n }\n return size;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stride value. Otherwise, insert.\nexport function stridesWithElidedDims(strides, ellipsisInsertionIndex, numElidedAxes, inputShape) {\n const newStrides = [...strides];\n for (let i = newStrides.length; i < inputShape.length; i++) {\n newStrides.push(1);\n }\n for (let i = 0; i < numElidedAxes; i++) {\n if (i === 0) {\n newStrides[ellipsisInsertionIndex] = 1;\n }\n else {\n newStrides.splice(ellipsisInsertionIndex, 0 /* num elements to delete */, 1 /* element to add */);\n newStrides.pop();\n }\n }\n return newStrides;\n}\nfunction unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, normalizedAxis) {\n if (normalizedAxis <= ellipsisInsertionIndex) {\n return normalizedAxis;\n }\n return normalizedAxis - (numElidedAxes - 1);\n}\nfunction getElidedAxes(numElidedAxes, ellipsisInsertionIndex) {\n const elidedAxes = [];\n for (let i = 0; i < numElidedAxes; i++) {\n elidedAxes.push(ellipsisInsertionIndex + i);\n }\n return elidedAxes;\n}\n// Normalize the start, end and strides.\nexport function getNormalizedAxes(inputShape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask) {\n const inputRank = inputShape.length;\n let normalizedBegin = new Array(inputRank), normalizedEnd = new Array(inputRank), normalizedStrides = new Array(inputRank);\n if (ellipsisAxes.length && numInterpolatedAxes > 0) {\n const fullIndex = ellipsisAxes[0];\n // The ellipsis applies to the masked index as well as any dimensions\n // that are interpolated.\n const numElidedAxes = numInterpolatedAxes + 1;\n normalizedBegin = startIndicesWithElidedDims(beginMask, fullIndex, numElidedAxes, begin, inputShape);\n normalizedEnd = stopIndicesWithElidedDims(endMask, fullIndex, numElidedAxes, end, inputShape);\n normalizedStrides =\n stridesWithElidedDims(strides, fullIndex, numElidedAxes, inputShape);\n }\n else {\n for (let axis = 0; axis < inputRank; axis++) {\n normalizedBegin[axis] = startForAxis(beginMask, begin, strides, inputShape, axis, ellipsisMask);\n normalizedEnd[axis] =\n stopForAxis(endMask, end, strides, inputShape, axis, ellipsisMask);\n normalizedStrides[axis] = stridesForAxis(strides, axis, ellipsisMask);\n }\n }\n return {\n begin: normalizedBegin,\n end: normalizedEnd,\n strides: normalizedStrides\n };\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current start value. Otherwise, insert.\nexport function startIndicesWithElidedDims(beginMask, ellipsisInsertionIndex, numElidedAxes, originalBegin, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = 0;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalBegin[originalAxis];\n if (beginMask & 1 << originalAxis) {\n originalValue = 0;\n }\n newIndices[axis] = originalValue;\n }\n }\n return newIndices;\n}\n// Creates full selection at the elided dimensions. If the dimension matches\n// the ellipsis mask, override the current stop value. Otherwise, insert.\nexport function stopIndicesWithElidedDims(endMask, ellipsisInsertionIndex, numElidedAxes, originalEnd, inputShape) {\n const newIndices = [...inputShape];\n const elidedAxes = getElidedAxes(numElidedAxes, ellipsisInsertionIndex);\n for (let axis = 0; axis < newIndices.length; axis++) {\n if (elidedAxes.indexOf(axis) > -1) {\n newIndices[axis] = Number.MAX_SAFE_INTEGER;\n }\n else {\n const originalAxis = unnormalizeAxis(ellipsisInsertionIndex, numElidedAxes, axis);\n let originalValue = originalEnd[originalAxis];\n if (endMask & 1 << originalAxis) {\n originalValue = Number.MAX_SAFE_INTEGER;\n }\n newIndices[axis] = originalValue;\n }\n }\n for (let i = 0; i < newIndices.length; i++) {\n // Handle negative indices\n const axisSize = inputShape[i];\n if (newIndices[i] < 0) {\n newIndices[i] += axisSize;\n }\n newIndices[i] = util.clamp(0, newIndices[i], inputShape[i]);\n }\n return newIndices;\n}\nexport function stridesForAxis(strides, axis, ellipsisMask) {\n let stride = strides[axis];\n if (ellipsisMask & (1 << axis) || stride == null) {\n stride = 1;\n }\n return stride;\n}\nexport function startForAxis(beginMask, startIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let start = startIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or the begin index is not set\n // for the axis.\n if (beginMask & 1 << axis || ellipsisMask & 1 << axis || start == null) {\n if (stride > 0) {\n // Forward iteration - use the first element. These values will get\n // clamped below (Note: We could have set them to 0 and axis_size-1, but\n // use lowest() and max() to maintain symmetry with StopForAxis())\n start = Number.MIN_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the last element.\n start = Number.MAX_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (start < 0) {\n start += axisSize;\n }\n // Clamping\n start = util.clamp(0, start, axisSize - 1);\n return start;\n}\nexport function stopForAxis(endMask, stopIndices, strides, inputShape, axis, ellipsisMask) {\n // Begin with the specified index\n let stop = stopIndices[axis];\n const stride = strides[axis] || 1;\n // Check the axis bit from right of masked axes, or if the stop index is not\n // set for this axis.\n if (endMask & (1 << axis) || ellipsisMask & (1 << axis) || stop == null) {\n if (stride > 0) {\n // Forward iteration - use the last element. These values will get\n // clamped below\n stop = Number.MAX_SAFE_INTEGER;\n }\n else {\n // Backward iteration - use the first element.\n stop = Number.MIN_SAFE_INTEGER;\n }\n }\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (stop < 0) {\n stop += axisSize;\n }\n // Clamping\n // Because the end index points one past the last element, we need slightly\n // different clamping ranges depending on the direction.\n if (stride > 0) {\n // Forward iteration\n stop = util.clamp(0, stop, axisSize);\n }\n else {\n // Backward iteration\n stop = util.clamp(-1, stop, axisSize - 1);\n }\n return stop;\n}\n/**\n * Returns true if the slice occupies a continous set of elements in the\n * 'flat' space.\n */\nexport function isSliceContinous(shape, begin, size) {\n // Index of the first axis that has size > 1.\n let firstNonOneAxis = size.length;\n for (let i = 0; i < size.length; i++) {\n if (size[i] > 1) {\n firstNonOneAxis = i;\n break;\n }\n }\n for (let i = firstNonOneAxis + 1; i < size.length; i++) {\n if (begin[i] > 0 || size[i] !== shape[i]) {\n return false;\n }\n }\n return true;\n}\nexport function computeFlatOffset(begin, strides) {\n let flatOffset = begin.length > 0 ? begin[begin.length - 1] : 1;\n for (let i = 0; i < begin.length - 1; i++) {\n flatOffset += begin[i] * strides[i];\n }\n return flatOffset;\n}\nexport function parseSliceParams(x, begin, size) {\n // The following logic allows for more ergonomic calls.\n let begin_;\n const xRank = x.shape.length;\n if (typeof begin === 'number') {\n begin_ = [begin, ...new Array(xRank - 1).fill(0)];\n }\n else if (begin.length < xRank) {\n begin_ = begin.concat(new Array(xRank - begin.length).fill(0));\n }\n else {\n begin_ = begin.slice();\n }\n begin_.forEach(d => {\n util.assert(d !== -1, () => 'slice() does not support negative begin indexing.');\n });\n let size_;\n if (size == null) {\n size_ = new Array(xRank).fill(-1);\n }\n else if (typeof size === 'number') {\n size_ = [size, ...new Array(xRank - 1).fill(-1)];\n }\n else if (size.length < xRank) {\n size_ = size.concat(new Array(xRank - size.length).fill(-1));\n }\n else {\n size_ = size;\n }\n size_ = size_.map((d, i) => {\n if (d >= 0) {\n return d;\n }\n else {\n util.assert(d === -1, () => `Negative size values should be exactly -1 but got ` +\n `${d} for the slice() size at index ${i}.`);\n return x.shape[i] - begin_[i];\n }\n });\n return [begin_, size_];\n}\n//# sourceMappingURL=slice_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from './util';\n/**\n * Serializable defines the serialization contract.\n *\n * TFJS requires serializable classes to return their className when asked\n * to avoid issues with minification.\n */\nexport class Serializable {\n /**\n * Return the class name for this class to use in serialization contexts.\n *\n * Generally speaking this will be the same thing that constructor.name\n * would have returned. However, the class name needs to be robust\n * against minification for serialization/deserialization to work properly.\n *\n * There's also places such as initializers.VarianceScaling, where\n * implementation details between different languages led to different\n * class hierarchies and a non-leaf node is used for serialization purposes.\n */\n getClassName() {\n return this.constructor\n .className;\n }\n /**\n * Creates an instance of T from a ConfigDict.\n *\n * This works for most descendants of serializable. A few need to\n * provide special handling.\n * @param cls A Constructor for the class to instantiate.\n * @param config The Configuration for the object.\n */\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/**\n * Maps string keys to class constructors.\n *\n * Used during (de)serialization from the cross-language JSON format, which\n * requires the class name in the serialization format matches the class\n * names as used in Python, should it exist.\n */\nexport class SerializationMap {\n constructor() {\n this.classNameMap = {};\n }\n /**\n * Returns the singleton instance of the map.\n */\n static getMap() {\n if (SerializationMap.instance == null) {\n SerializationMap.instance = new SerializationMap();\n }\n return SerializationMap.instance;\n }\n /**\n * Registers the class as serializable.\n */\n static register(cls) {\n SerializationMap.getMap().classNameMap[cls.className] =\n [cls, cls.fromConfig];\n }\n}\n/**\n * Register a class with the serialization map of TensorFlow.js.\n *\n * This is often used for registering custom Layers, so they can be\n * serialized and deserialized.\n *\n * Example:\n *\n * ```js\n * class MyCustomLayer extends tf.layers.Layer {\n * static className = 'MyCustomLayer';\n *\n * constructor(config) {\n * super(config);\n * }\n * }\n * tf.serialization.registerClass(MyCustomLayer);\n * ```\n *\n * @param cls The class to be registered. It must have a public static member\n * called `className` defined and the value must be a non-empty string.\n *\n * @doc {heading: 'Models', subheading: 'Serialization', ignoreCI: true}\n */\nexport function registerClass(cls) {\n assert(cls.className != null, () => `Class being registered does not have the static className ` +\n `property defined.`);\n assert(typeof cls.className === 'string', () => `className is required to be a string, but got type ` +\n typeof cls.className);\n assert(cls.className.length > 0, () => `Class being registered has an empty-string as its className, ` +\n `which is disallowed.`);\n SerializationMap.register(cls);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { inferShape } from './tensor_util_env';\nimport { arraysEqual, flatten, isString, isTypedArray } from './util';\nconst TEST_EPSILON_FLOAT32 = 1e-3;\nexport const TEST_EPSILON_FLOAT16 = 1e-1;\nexport function expectArraysClose(actual, expected, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, epsilon));\n}\nexport function testEpsilon() {\n return ENGINE.backend.floatPrecision() === 32 ? TEST_EPSILON_FLOAT32 :\n TEST_EPSILON_FLOAT16;\n}\nfunction expectArraysPredicate(actual, expected, predicate) {\n let checkClassType = true;\n if (isTypedArray(actual) || isTypedArray(expected)) {\n checkClassType = false;\n }\n if (isTypedArray(actual) && isTypedArray(expected)) {\n checkClassType = true;\n }\n if (checkClassType) {\n const aType = actual.constructor.name;\n const bType = expected.constructor.name;\n if (aType !== bType) {\n throw new Error(`Arrays are of different type. Actual: ${aType}. ` +\n `Expected: ${bType}`);\n }\n }\n if (Array.isArray(actual) && Array.isArray(expected)) {\n const actualShape = inferShape(actual);\n const expectedShape = inferShape(expected);\n if (!arraysEqual(actualShape, expectedShape)) {\n throw new Error(`Arrays have different shapes. ` +\n `Actual: [${actualShape}]. Expected: [${expectedShape}]`);\n }\n }\n const actualFlat = isTypedArray(actual) ? actual : flatten(actual);\n const expectedFlat = isTypedArray(expected) ?\n expected :\n flatten(expected);\n if (actualFlat.length !== expectedFlat.length) {\n throw new Error(`Arrays have different lengths actual: ${actualFlat.length} vs ` +\n `expected: ${expectedFlat.length}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n for (let i = 0; i < expectedFlat.length; ++i) {\n const a = actualFlat[i];\n const e = expectedFlat[i];\n if (!predicate(a, e)) {\n throw new Error(`Arrays differ: actual[${i}] = ${a}, expected[${i}] = ${e}.\\n` +\n `Actual: ${actualFlat}.\\n` +\n `Expected: ${expectedFlat}.`);\n }\n }\n}\nexport function expectPromiseToFail(fn, done) {\n fn().then(() => done.fail(), () => done());\n}\nexport function expectArraysEqual(actual, expected) {\n const exp = typeof expected === 'string' || typeof expected === 'number' ||\n typeof expected === 'boolean' ?\n [expected] :\n expected;\n if (isString(actual) || isString(actual[0]) ||\n isString(expected) || isString(expected[0])) {\n // tslint:disable-next-line: triple-equals\n return expectArraysPredicate(actual, exp, (a, b) => a == b);\n }\n return expectArraysPredicate(actual, expected, (a, b) => areClose(a, b, 0));\n}\nexport function expectNumbersClose(a, e, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n if (!areClose(a, e, epsilon)) {\n throw new Error(`Numbers differ: actual === ${a}, expected === ${e}`);\n }\n}\nfunction areClose(a, e, epsilon) {\n if (!isFinite(a) && !isFinite(e)) {\n return true;\n }\n if (isNaN(a) || isNaN(e) || Math.abs(a - e) > epsilon) {\n return false;\n }\n return true;\n}\nexport function expectValuesInRange(actual, low, high) {\n for (let i = 0; i < actual.length; i++) {\n if (actual[i] < low || actual[i] > high) {\n throw new Error(`Value out of range:${actual[i]} low: ${low}, high: ${high}`);\n }\n }\n}\nexport function expectArrayBuffersEqual(actual, expected) {\n // Safari & Jasmine don't like comparing ArrayBuffers directly. Wrapping in\n // a Float32Array solves this issue.\n expect(new Float32Array(actual)).toEqual(new Float32Array(expected));\n}\n//# sourceMappingURL=test_util.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { env } from './environment';\nimport { setDeprecationWarningFn } from './tensor';\nimport { getTensorsInContainer } from './tensor_util';\n/**\n * Enables production mode which disables correctness checks in favor of\n * performance.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableProdMode() {\n env().set('PROD', true);\n}\n/**\n * Enables debug mode which will log information about all executed kernels:\n * the elapsed time of the kernel execution, as well as the rank, shape, and\n * size of the output tensor.\n *\n * Debug mode will significantly slow down your application as it will\n * download the result of every operation to the CPU. This should not be used in\n * production. Debug mode does not affect the timing information of the kernel\n * execution as we do not measure download time in the kernel execution time.\n *\n * See also: `tf.profile`, `tf.memory`.\n *\n * @doc {heading: 'Environment'}\n */\nexport function enableDebugMode() {\n env().set('DEBUG', true);\n}\n/** Globally disables deprecation warnings */\nexport function disableDeprecationWarnings() {\n env().set('DEPRECATION_WARNINGS_ENABLED', false);\n console.warn(`TensorFlow.js deprecation warnings have been disabled.`);\n}\n/** Warn users about deprecated functionality. */\nexport function deprecationWarn(msg) {\n if (env().getBool('DEPRECATION_WARNINGS_ENABLED')) {\n console.warn(msg + ' You can disable deprecation warnings with ' +\n 'tf.disableDeprecationWarnings().');\n }\n}\nsetDeprecationWarningFn(deprecationWarn);\n/**\n * Dispose all variables kept in backend engine.\n *\n * @doc {heading: 'Environment'}\n */\nexport function disposeVariables() {\n ENGINE.disposeVariables();\n}\n/**\n * It returns the global engine that keeps track of all tensors and backends.\n *\n * @doc {heading: 'Environment'}\n */\nexport function engine() {\n return ENGINE;\n}\n/**\n * Returns memory info at the current time in the program. The result is an\n * object with the following properties:\n *\n * - `numBytes`: Number of bytes allocated (undisposed) at this time.\n * - `numTensors`: Number of unique tensors allocated.\n * - `numDataBuffers`: Number of unique data buffers allocated\n * (undisposed) at this time, which is \u2264 the number of tensors\n * (e.g. `a.reshape(newShape)` makes a new Tensor that shares the same\n * data buffer with `a`).\n * - `unreliable`: True if the memory usage is unreliable. See `reasons` when\n * `unreliable` is true.\n * - `reasons`: `string[]`, reasons why the memory is unreliable, present if\n * `unreliable` is true.\n *\n * WebGL Properties:\n * - `numBytesInGPU`: Number of bytes allocated (undisposed) in the GPU only at\n * this time.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function memory() {\n return ENGINE.memory();\n}\n/**\n * Executes the provided function `f()` and returns a promise that resolves\n * with information about the function's memory use:\n * - `newBytes`: the number of new bytes allocated\n * - `newTensors`: the number of new tensors created\n * - `peakBytes`: the peak number of bytes allocated\n * - `kernels`: an array of objects for each kernel involved that reports\n * their input and output shapes, number of bytes used, and number of new\n * tensors created.\n *\n * ```js\n * const profile = await tf.profile(() => {\n * const x = tf.tensor1d([1, 2, 3]);\n * let x2 = x.square();\n * x2.dispose();\n * x2 = x.square();\n * x2.dispose();\n * return x;\n * });\n *\n * console.log(`newBytes: ${profile.newBytes}`);\n * console.log(`newTensors: ${profile.newTensors}`);\n * console.log(`byte usage over all kernels: ${profile.kernels.map(k =>\n * k.totalBytesSnapshot)}`);\n * ```\n *\n *\n * @doc {heading: 'Performance', subheading: 'Profile'}\n */\nexport function profile(f) {\n return ENGINE.profile(f);\n}\n/**\n * Executes the provided function `fn` and after it is executed, cleans up all\n * intermediate tensors allocated by `fn` except those returned by `fn`.\n * `fn` must not return a Promise (async functions not allowed). The returned\n * result can be a complex object.\n *\n * Using this method helps avoid memory leaks. In general, wrap calls to\n * operations in `tf.tidy` for automatic memory cleanup.\n *\n * NOTE: Variables do *not* get cleaned up when inside a tidy(). If you want to\n * dispose variables, please use `tf.disposeVariables` or call dispose()\n * directly on variables.\n *\n * ```js\n * // y = 2 ^ 2 + 1\n * const y = tf.tidy(() => {\n * // a, b, and one will be cleaned up when the tidy ends.\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n * const b = a.square();\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * y.print();\n * ```\n *\n * @param nameOrFn The name of the closure, or the function to execute.\n * If a name is provided, the 2nd argument should be the function.\n * If debug mode is on, the timing and the memory usage of the function\n * will be tracked and displayed on the console using the provided name.\n * @param fn The function to execute.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function tidy(nameOrFn, fn) {\n return ENGINE.tidy(nameOrFn, fn);\n}\n/**\n * Disposes any `tf.Tensor`s found within the provided object.\n *\n * @param container an object that may be a `tf.Tensor` or may directly\n * contain `tf.Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. If\n * the object is not a `tf.Tensor` or does not contain `Tensors`, nothing\n * happens. In general it is safe to pass any object here, except that\n * `Promise`s are not supported.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function dispose(container) {\n const tensors = getTensorsInContainer(container);\n tensors.forEach(tensor => tensor.dispose());\n}\n/**\n * Keeps a `tf.Tensor` generated inside a `tf.tidy` from being disposed\n * automatically.\n *\n * ```js\n * let b;\n * const y = tf.tidy(() => {\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n *\n * // b will not be cleaned up by the tidy. a and one will be cleaned up\n * // when the tidy ends.\n * b = tf.keep(a.square());\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * console.log('y:');\n * y.print();\n * console.log('b:');\n * b.print();\n * ```\n *\n * @param result The tensor to keep from being disposed.\n *\n * @doc {heading: 'Performance', subheading: 'Memory'}\n */\nexport function keep(result) {\n return ENGINE.keep(result);\n}\n/**\n * Executes `f()` and returns a promise that resolves with timing\n * information.\n *\n * The result is an object with the following properties:\n *\n * - `wallMs`: Wall execution time.\n * - `kernelMs`: Kernel execution time, ignoring data transfer. If using the\n * WebGL backend and the query timer extension is not available, this will\n * return an error object.\n * - On `WebGL` The following additional properties exist:\n * - `uploadWaitMs`: CPU blocking time on texture uploads.\n * - `downloadWaitMs`: CPU blocking time on texture downloads (readPixels).\n *\n * ```js\n * const x = tf.randomNormal([20, 20]);\n * const time = await tf.time(() => x.matMul(x));\n *\n * console.log(`kernelMs: ${time.kernelMs}, wallTimeMs: ${time.wallMs}`);\n * ```\n *\n * @param f The function to execute and time.\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nexport function time(f) {\n return ENGINE.time(f);\n}\n/**\n * Sets the backend (cpu, webgl, wasm, etc) responsible for creating tensors and\n * executing operations on those tensors. Returns a promise that resolves\n * to a boolean if the backend initialization was successful.\n *\n * Note this disposes the current backend, if any, as well as any tensors\n * associated with it. A new backend is initialized, even if it is of the\n * same type as the previous one.\n *\n * @param backendName The name of the backend. Currently supports\n * `'webgl'|'cpu'` in the browser, `'tensorflow'` under node.js\n * (requires tfjs-node), and `'wasm'` (requires tfjs-backend-wasm).\n *\n * @doc {heading: 'Backends'}\n */\nexport function setBackend(backendName) {\n return ENGINE.setBackend(backendName);\n}\n/**\n * Returns a promise that resolves when the currently selected backend (or the\n * highest priority one) has initialized. Await this promise when you are using\n * a backend that has async initialization.\n *\n * @doc {heading: 'Backends'}\n */\nexport function ready() {\n return ENGINE.ready();\n}\n/**\n * Returns the current backend name (cpu, webgl, etc). The backend is\n * responsible for creating tensors and executing operations on those tensors.\n *\n * @doc {heading: 'Backends'}\n */\nexport function getBackend() {\n return ENGINE.backendName;\n}\n/**\n * Removes a backend and the registered factory.\n *\n * @doc {heading: 'Backends'}\n */\nexport function removeBackend(name) {\n ENGINE.removeBackend(name);\n}\n/**\n * Finds the backend registered under the provided name. Returns null if the\n * name is not in the registry, or the registration hasn't finished yet.\n */\nexport function findBackend(name) {\n return ENGINE.findBackend(name);\n}\n/**\n * Finds the backend factory registered under the provided name. Returns a\n * function that produces a new backend when called. Returns null if the name\n * is not in the registry.\n */\nexport function findBackendFactory(name) {\n return ENGINE.findBackendFactory(name);\n}\n/**\n * Registers a global backend. The registration should happen when importing\n * a module file (e.g. when importing `backend_webgl.ts`), and is used for\n * modular builds (e.g. custom tfjs bundle with only webgl support).\n *\n * @param factory The backend factory function. When called, it should\n * return a backend instance, or a promise of an instance.\n * @param priority The priority of the backend (higher = more important).\n * In case multiple backends are registered, the priority is used to find\n * the best backend. Defaults to 1.\n * @return False if there is already a registered backend under this name, true\n * if not.\n *\n * @doc {heading: 'Backends'}\n */\nexport function registerBackend(name, factory, priority = 1) {\n return ENGINE.registerBackend(name, factory, priority);\n}\n/**\n * Gets the current backend. If no backends have been initialized, this will\n * attempt to initialize the best backend. Will throw an error if the highest\n * priority backend has async initialization, in which case, you should call\n * 'await tf.ready()' before running other code.\n *\n * @doc {heading: 'Backends'}\n */\nexport function backend() {\n return ENGINE.backend;\n}\n/**\n * Sets the global platform.\n *\n * @param platformName The name of this platform.\n * @param platform A platform implementation.\n */\nexport function setPlatform(platformName, platform) {\n env().setPlatform(platformName, platform);\n}\n//# sourceMappingURL=globals.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Add } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Adds two `tf.Tensor`s element-wise, A + B. Supports broadcasting.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n *\n * ```js\n * // Broadcast add a with b.\n * const a = tf.scalar(5);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n * @param a The first `tf.Tensor` to add.\n * @param b The second `tf.Tensor` to add. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction add_(a, b) {\n let $a = convertToTensor(a, 'a', 'add');\n let $b = convertToTensor(b, 'b', 'add');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.add($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Add);\n}\nexport const add = op({ add_ });\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FloorDiv } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n * The result is rounded with floor function.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.floorDiv(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.floorDiv(b).print(); // or tf.floorDiv(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction floorDiv_(a, b) {\n let $a = convertToTensor(a, 'a', 'floorDiv');\n let $b = convertToTensor(b, 'b', 'floorDiv');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.floorDiv($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FloorDiv);\n}\nexport const floorDiv = op({ floorDiv_ });\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Div } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { floorDiv } from './floorDiv';\nimport { op } from './operation';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction div_(a, b) {\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'int32' && $b.dtype === 'int32') {\n return floorDiv($a, $b);\n }\n const forward = (backend, save) => {\n const res = backend.realDivide($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Div, attrs);\n}\nexport const div = op({ div_ });\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Multiply } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Multiplies two `tf.Tensor`s element-wise, A * B. Supports broadcasting.\n *\n * We also expose `tf.mulStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([2, 3, 4, 5]);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n *\n * ```js\n * // Broadcast mul a with b.\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.scalar(5);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n * @param a The first tensor to multiply.\n * @param b The second tensor to multiply. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mul_(a, b) {\n let $a = convertToTensor(a, 'a', 'mul');\n let $b = convertToTensor(b, 'b', 'mul');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.multiply($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Multiply);\n}\nexport const mul = op({ mul_ });\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Abs } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes absolute value element-wise: `abs(x)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.abs().print(); // or tf.abs(x)\n * ```\n * @param x The input `tf.Tensor`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction abs_(x) {\n const $x = convertToTensor(x, 'x', 'abs');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n if ($x.dtype === 'complex64') {\n return backend.complexAbs($x);\n }\n return backend.abs($x);\n }, inputs, null /* grad */, Abs);\n}\nexport const abs = op({ abs_ });\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes acos of the input `tf.Tensor` element-wise: `acos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.acos().print(); // or tf.acos(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acos_(x) {\n const $x = convertToTensor(x, 'x', 'acos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acos);\n}\nexport const acos = op({ acos_ });\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Acosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the inverse hyperbolic cos of the input `tf.Tensor` element-wise:\n * `acosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([10, 1, 3, 5.7]);\n *\n * x.acosh().print(); // or tf.acosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction acosh_(x) {\n const $x = convertToTensor(x, 'x', 'acosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.acosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Acosh);\n}\nexport const acosh = op({ acosh_ });\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AddN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Adds a list of `tf.Tensor`s element-wise, each with the same shape and dtype.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n *\n * tf.addN([a, b, c]).print();\n * ```\n * @param tensors A list of tensors with the same shape and dtype.\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction addN_(tensors) {\n util.assert(Array.isArray(tensors), () => 'The argument passed to tf.addN() must be a list of tensors');\n util.assert(tensors.length >= 1, () => `Must pass at least one tensor to tf.addN(), but got ` +\n `${tensors.length}`);\n const $tensors = tensors.map((t, i) => convertToTensor(t, `tensors${i}`, 'addN'));\n const firstTensor = $tensors[0];\n $tensors.forEach(t => {\n if (t.dtype !== firstTensor.dtype) {\n throw new Error('All tensors passed to tf.addN() must have the same dtype');\n }\n });\n $tensors.forEach(t => {\n if (!util.arraysEqual(t.shape, firstTensor.shape)) {\n throw new Error('All tensors passed to tf.addN() must have the same shape');\n }\n });\n const forward = (backend, save) => {\n const res = backend.addN($tensors);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, AddN);\n}\nexport const addN = op({ addN_ });\n//# sourceMappingURL=add_n.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Returns true if the axis specifies the inner most dimensions of the\n * array.\n */\nexport function axesAreInnerMostDims(axes, rank) {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n}\nexport function combineLocations(outputLoc, reduceLoc, axes) {\n const rank = outputLoc.length + reduceLoc.length;\n const loc = [];\n let outIdx = 0;\n let reduceIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n loc.push(outputLoc[outIdx++]);\n }\n else {\n loc.push(reduceLoc[reduceIdx++]);\n }\n }\n return loc;\n}\nexport function computeOutAndReduceShapes(aShape, axes) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outShape.push(aShape[dim]);\n }\n }\n const reduceShape = axes.map(dim => aShape[dim]);\n return [outShape, reduceShape];\n}\nexport function expandShapeToKeepDim(shape, axes) {\n const reduceSubShape = axes.map(x => 1);\n return combineLocations(shape, reduceSubShape, axes);\n}\nexport function assertAxesAreInnerMostDims(msg, axes, rank) {\n util.assert(axesAreInnerMostDims(axes, rank), () => `${msg} supports only inner-most axes for now. ` +\n `Got axes ${axes} and rank-${rank} input.`);\n}\n/**\n * Returns the axes permutation to be used with `tf.transpose`, if such\n * permutation is necessary. Otherwise it returns null. This method is used by\n * operations that operate only on inner-most axes.\n */\nexport function getAxesPermutation(axes, rank) {\n if (axesAreInnerMostDims(axes, rank)) {\n return null;\n }\n const result = [];\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n result.push(i);\n }\n }\n axes.forEach(axis => result.push(axis));\n return result;\n}\n/** Returns the axes permutation that undoes the original permutation. */\nexport function getUndoAxesPermutation(axes) {\n return axes.map((axis, i) => [i, axis])\n .sort((a, b) => a[1] - b[1])\n .map(x => x[0]);\n}\nexport function getInnerMostAxes(numAxes, rank) {\n const res = [];\n for (let i = rank - numAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n}\n//# sourceMappingURL=axis_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { All } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical and of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.all().print(); // or tf.all(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.all(axis).print(); // or tf.all(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction all_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'all', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.all($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, All, attrs);\n}\nexport const all = op({ all_ });\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Any } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the logical or of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1], 'bool');\n *\n * x.any().print(); // or tf.any(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.any(axis).print(); // or tf.any(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction any_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'any', 'bool');\n const forward = (backend) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = getInnerMostAxes(axes.length, $x.rank);\n }\n const res = backend.any($x, axes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, origAxes);\n return reshape(res, newShape);\n }\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Any, attrs);\n}\n// tslint:disable-next-line:variable-name\nexport const any = op({ any_ });\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the maximum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMax().print(); // or tf.argMax(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMax(axis).print(); // or tf.argMax(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMax_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMax');\n const forward = (backend, save) => {\n save([$x]);\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMax($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMax, attrs);\n}\nexport const argMax = op({ argMax_ });\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ArgMin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Returns the indices of the minimum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMin().print(); // or tf.argMin(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMin(axis).print(); // or tf.argMin(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction argMin_(x, axis = 0) {\n let $x = convertToTensor(x, 'x', 'argMin');\n const forward = (backend, save) => {\n save([$x]);\n if (axis == null) {\n axis = 0;\n }\n let axes = util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n return backend.argMin($x, axes[0]);\n };\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ArgMin, attrs);\n}\nexport const argMin = op({ argMin_ });\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes asin of the input `tf.Tensor` element-wise: `asin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asin().print(); // or tf.asin(x)\n * ```\n * @param x The input tensor.\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asin_(x) {\n const $x = convertToTensor(x, 'x', 'asin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asin);\n}\nexport const asin = op({ asin_ });\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Asinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic sin of the input `tf.Tensor` element-wise:\n * `asinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asinh().print(); // or tf.asinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction asinh_(x) {\n const $x = convertToTensor(x, 'x', 'asinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.asinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Asinh);\n}\nexport const asinh = op({ asinh_ });\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes atan of the input `tf.Tensor` element-wise: `atan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.atan().print(); // or tf.atan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan_(x) {\n const $x = convertToTensor(x, 'x', 'atan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atan);\n}\nexport const atan = op({ atan_ });\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atan2 } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes arctangent of `tf.Tensor`s a / b element-wise: `atan2(a, b)`.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1.0, 1.0, -1.0, .7]);\n * const b = tf.tensor1d([2.0, 13.0, 3.5, .21]);\n *\n * tf.atan2(a, b).print()\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atan2_(a, b) {\n let $a = convertToTensor(a, 'a', 'atan2');\n let $b = convertToTensor(b, 'b', 'atan2');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.atan2($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Atan2);\n}\nexport const atan2 = op({ atan2_ });\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Atanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes inverse hyperbolic tan of the input `tf.Tensor` element-wise:\n * `atanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.atanh().print(); // or tf.atanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction atanh_(x) {\n const $x = convertToTensor(x, 'x', 'atanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.atanh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Atanh);\n}\nexport const atanh = op({ atanh_ });\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n *\n * @param inputShape Input tensor shape is of the following dimensions:\n * `[batch, height, width, inChannels]`.\n * @param filterShape The filter shape is of the following dimensions:\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat The data format of the input and output data.\n * Defaults to 'NHWC'.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`.\n * Defaults to `[1, 1]`. If `dilations` is a single number, then\n * `dilationHeight == dilationWidth`.\n */\nexport function computeDilation2DInfo(inputShape, filterShape, strides, pad, dataFormat = 'NHWC', dilations) {\n // `computerConv2DInfo` require filterShape to be in the dimension of:\n // `[filterHeight, filterWidth, depth, outDepth]`, dilation2d doesn't have\n // outDepth, it should have the same depth as the input.\n // Input shape: [batch, height, width, inChannels]\n const inputChannels = inputShape[3];\n const $filterShape = [...filterShape, inputChannels];\n const $dataFormat = convertConv2DDataFormat(dataFormat);\n return computeConv2DInfo(inputShape, $filterShape, strides, dilations, pad, null /* roundingMode */, null /* depthWise */, $dataFormat);\n}\nexport function computePool2DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'channelsLast') {\n const [filterHeight, filterWidth] = parseTupleParam(filterSize);\n let filterShape;\n if (dataFormat === 'channelsLast') {\n filterShape = [filterHeight, filterWidth, inShape[3], inShape[3]];\n }\n else if (dataFormat === 'channelsFirst') {\n filterShape = [filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, false, dataFormat);\n}\n/**\n * Computes the information for a forward pass of a pooling3D operation.\n */\nexport function computePool3DInfo(inShape, filterSize, strides, dilations, pad, roundingMode, dataFormat = 'NDHWC') {\n const [filterDepth, filterHeight, filterWidth] = parse3TupleParam(filterSize);\n let filterShape;\n let $dataFormat;\n if (dataFormat === 'NDHWC') {\n $dataFormat = 'channelsLast';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[4], inShape[4]];\n }\n else if (dataFormat === 'NCDHW') {\n $dataFormat = 'channelsFirst';\n filterShape =\n [filterDepth, filterHeight, filterWidth, inShape[1], inShape[1]];\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n return computeConv3DInfo(inShape, filterShape, strides, dilations, pad, false, $dataFormat, roundingMode);\n}\n/**\n * Computes the information for a forward pass of a convolution/pooling\n * operation.\n */\nexport function computeConv2DInfo(inShape, filterShape, strides, dilations, pad, roundingMode, depthwise = false, dataFormat = 'channelsLast') {\n let [batchSize, inHeight, inWidth, inChannels] = [-1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideHeight, strideWidth] = parseTupleParam(strides);\n const [dilationHeight, dilationWidth] = parseTupleParam(dilations);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outHeight, outWidth } = getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, effectiveFilterHeight, effectiveFilterWidth, roundingMode, dataFormat);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inHeight,\n inWidth,\n inChannels,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideHeight,\n strideWidth,\n filterHeight,\n filterWidth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\n/**\n * Computes the information for a forward pass of a 3D convolution/pooling\n * operation.\n */\nexport function computeConv3DInfo(inShape, filterShape, strides, dilations, pad, depthwise = false, dataFormat = 'channelsLast', roundingMode) {\n let [batchSize, inDepth, inHeight, inWidth, inChannels] = [-1, -1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inDepth, inHeight, inWidth, inChannels] = inShape;\n }\n else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inDepth, inHeight, inWidth] = inShape;\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n const [filterDepth, filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideDepth, strideHeight, strideWidth] = parse3TupleParam(strides);\n const [dilationDepth, dilationHeight, dilationWidth] = parse3TupleParam(dilations);\n const effectiveFilterDepth = getEffectiveFilterSize(filterDepth, dilationDepth);\n const effectiveFilterHeight = getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth = getEffectiveFilterSize(filterWidth, dilationWidth);\n const { padInfo, outDepth, outHeight, outWidth } = get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, effectiveFilterDepth, effectiveFilterHeight, effectiveFilterWidth, roundingMode);\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n let outShape;\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outDepth, outHeight, outWidth];\n }\n else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outDepth, outHeight, outWidth, outChannels];\n }\n return {\n batchSize,\n dataFormat,\n inDepth,\n inHeight,\n inWidth,\n inChannels,\n outDepth,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideDepth,\n strideHeight,\n strideWidth,\n filterDepth,\n filterHeight,\n filterWidth,\n effectiveFilterDepth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationDepth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\nfunction computeOutputShape2D(inShape, fieldSize, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputRows = inShape[0];\n const inputCols = inShape[1];\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputRows, outputCols];\n}\nfunction computeOutputShape4D(inShape, fieldSize, outChannels, stride, zeroPad, roundingMode) {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputDepth = inShape[0];\n const inputRows = inShape[1];\n const inputCols = inShape[2];\n const outputDepths = conditionalRound((inputDepth - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputDepths), () => `The output # of depths (${outputDepths}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputRows = conditionalRound((inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputRows), () => `The output # of rows (${outputRows}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n const outputCols = conditionalRound((inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(util.isInt(outputCols), () => `The output # of columns (${outputCols}) must be an integer. ` +\n `Change the stride and/or zero pad parameters`);\n return [outputDepths, outputRows, outputCols, outChannels];\n}\nexport function computeDefaultPad(inputShape, fieldSize, stride, dilation = 1) {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor((inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n}\nfunction parseTupleParam(param) {\n if (typeof param === 'number') {\n return [param, param, param];\n }\n if (param.length === 2) {\n return [param[0], param[1], 1];\n }\n return param;\n}\nfunction parse3TupleParam(param) {\n return typeof param === 'number' ? [param, param, param] : param;\n}\n/* See https://www.tensorflow.org/api_docs/python/tf/nn/atrous_conv2d\n * Atrous convolution is equivalent to standard convolution with upsampled\n * filters with effective_filter_height =\n * filter_height + (filter_height - 1) * (dilation - 1)\n * and effective_filter_width =\n * filter_width + (filter_width - 1) * (dilation - 1),\n * produced by inserting dilation - 1 zeros along consecutive elements across\n * the filters' spatial dimensions.\n * When there is a dilation, this converts a filter dimension to the\n * effective filter dimension, so it can be used in a standard convolution.\n */\nfunction getEffectiveFilterSize(filterSize, dilation) {\n if (dilation <= 1) {\n return filterSize;\n }\n return filterSize + (filterSize - 1) * (dilation - 1);\n}\nfunction getPadAndOutInfo(pad, inHeight, inWidth, strideHeight, strideWidth, filterHeight, filterWidth, roundingMode, dataFormat) {\n let padInfo;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = { top: pad, bottom: pad, left: pad, right: pad, type: padType };\n const outShape = computeOutputShape2D([inHeight, inWidth], filterHeight, strideHeight, pad, roundingMode);\n outHeight = outShape[0];\n outWidth = outShape[1];\n }\n else if (pad === 'same') {\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongHeight = Math.max(0, (outHeight - 1) * strideHeight + filterHeight - inHeight);\n const padAlongWidth = Math.max(0, (outWidth - 1) * strideWidth + filterWidth - inWidth);\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = { top: 0, bottom: 0, left: 0, right: 0, type: 'VALID' };\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else if (typeof pad === 'object') {\n const top = dataFormat === 'channelsLast' ? pad[1][0] : pad[2][0];\n const bottom = dataFormat === 'channelsLast' ? pad[1][1] : pad[2][1];\n const left = dataFormat === 'channelsLast' ? pad[2][0] : pad[3][0];\n const right = dataFormat === 'channelsLast' ? pad[2][1] : pad[3][1];\n const padType = (top === 0 && bottom === 0 && left === 0 && right === 0) ?\n 'VALID' :\n 'EXPLICIT';\n padInfo = { top, bottom, left, right, type: padType };\n outHeight = conditionalRound((inHeight - filterHeight + top + bottom) / strideHeight + 1, roundingMode);\n outWidth = conditionalRound((inWidth - filterWidth + left + right) / strideWidth + 1, roundingMode);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outHeight, outWidth };\n}\nfunction get3DPadAndOutInfo(pad, inDepth, inHeight, inWidth, strideDepth, strideHeight, strideWidth, filterDepth, filterHeight, filterWidth, roundingMode) {\n let padInfo;\n let outDepth;\n let outHeight;\n let outWidth;\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = {\n top: pad,\n bottom: pad,\n left: pad,\n right: pad,\n front: pad,\n back: pad,\n type: padType\n };\n const outShape = computeOutputShape4D([inDepth, inHeight, inWidth, 1], filterDepth, 1, strideDepth, pad, roundingMode);\n outDepth = outShape[0];\n outHeight = outShape[1];\n outWidth = outShape[2];\n }\n else if (pad === 'same') {\n outDepth = Math.ceil(inDepth / strideDepth);\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongDepth = (outDepth - 1) * strideDepth + filterDepth - inDepth;\n const padAlongHeight = (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const front = Math.floor(padAlongDepth / 2);\n const back = padAlongDepth - front;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = { top, bottom, left, right, front, back, type: 'SAME' };\n }\n else if (pad === 'valid') {\n padInfo = {\n top: 0,\n bottom: 0,\n left: 0,\n right: 0,\n front: 0,\n back: 0,\n type: 'VALID'\n };\n outDepth = Math.ceil((inDepth - filterDepth + 1) / strideDepth);\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n }\n else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return { padInfo, outDepth, outHeight, outWidth };\n}\n/**\n * Rounds a value depending on the rounding mode\n * @param value\n * @param roundingMode\n */\nfunction conditionalRound(value, roundingMode) {\n if (!roundingMode) {\n return value;\n }\n switch (roundingMode) {\n case 'round':\n // used for Caffe Conv\n return Math.round(value);\n case 'ceil':\n // used for Caffe Pool\n return Math.ceil(value);\n case 'floor':\n return Math.floor(value);\n default:\n throw new Error(`Unknown roundingMode ${roundingMode}`);\n }\n}\nexport function tupleValuesAreOne(param) {\n const [dimA, dimB, dimC] = parseTupleParam(param);\n return dimA === 1 && dimB === 1 && dimC === 1;\n}\nexport function eitherStridesOrDilationsAreOne(strides, dilations) {\n return tupleValuesAreOne(strides) || tupleValuesAreOne(dilations);\n}\n/**\n * Convert Conv2D dataFormat from 'NHWC'|'NCHW' to\n * 'channelsLast'|'channelsFirst'\n * @param dataFormat in 'NHWC'|'NCHW' mode\n * @return dataFormat in 'channelsLast'|'channelsFirst' mode\n * @throws unknown dataFormat\n */\nexport function convertConv2DDataFormat(dataFormat) {\n if (dataFormat === 'NHWC') {\n return 'channelsLast';\n }\n else if (dataFormat === 'NCHW') {\n return 'channelsFirst';\n }\n else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n}\n//# sourceMappingURL=conv_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D average pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction avgPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'avgPool', 'float32');\n const dilations = 1;\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in avgPool: x must be rank 4 but got rank ${x4D.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n save([x4D]);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return x4D.clone();\n }\n return backend.avgPool(x4D, convInfo);\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool, attrs);\n res = cast(res, $x.dtype);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPool = op({ avgPool_ });\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { AvgPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D average pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.avgPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates:\n * `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction avgPool3d_(x, filterSize, strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'avgPool3d', 'float32');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in avgPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in avgPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in avgPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n save([x5D]);\n return backend.avgPool3d(x5D, convInfo);\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3D, attrs);\n res = cast(res, x5D.dtype);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3d = op({ avgPool3d_ });\n//# sourceMappingURL=avg_pool_3d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nexport function assertParamsConsistent(shapes, axis) {\n const rank = shapes[0].length;\n shapes.forEach((shape, i) => {\n util.assert(shape.length === rank, () => `Error in concat${rank}D: rank of tensors[${i}] must be the same ` +\n `as the rank of the rest (${rank})`);\n });\n util.assert(axis >= 0 && axis < rank, () => `Error in concat${rank}D: axis must be between 0 and ${rank - 1}.`);\n const firstShape = shapes[0];\n shapes.forEach((shape, i) => {\n for (let r = 0; r < rank; r++) {\n util.assert((r === axis) || (shape[r] === firstShape[r]), () => `Error in concat${rank}D: Shape of tensors[${i}] (${shape}) ` +\n `does not match the shape of the rest (${firstShape}) ` +\n `along the non-concatenated axis ${i}.`);\n }\n });\n}\nexport function computeOutShape(shapes, axis) {\n const outputShape = shapes[0].slice();\n for (let i = 1; i < shapes.length; i++) {\n outputShape[axis] += shapes[i][axis];\n }\n return outputShape;\n}\n//# sourceMappingURL=concat_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Concat } from '../kernel_names';\nimport { convertToTensorArray } from '../tensor_util_env';\nimport { assert, parseAxisParam, sizeFromShape } from '../util';\nimport { assertParamsConsistent, computeOutShape } from './concat_util';\nimport { op } from './operation';\nimport { tensor } from './tensor';\n/**\n * Concatenates a list of `tf.Tensor`s along a given axis.\n *\n * The tensors ranks and types must match, and their sizes must match in all\n * dimensions except `axis`.\n *\n * Also available are stricter rank-specific methods that assert that\n * `tensors` are of the given rank:\n * - `tf.concat1d`\n * - `tf.concat2d`\n * - `tf.concat3d`\n * - `tf.concat4d`\n *\n * Except `tf.concat1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * a.concat(b).print(); // or a.concat(b)\n * ```\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.concat([a, b, c]).print();\n * ```\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [10, 20]]);\n * const b = tf.tensor2d([[3, 4], [30, 40]]);\n * const axis = 1;\n * tf.concat([a, b], axis).print();\n * ```\n * @param tensors A list of tensors to concatenate.\n * @param axis The axis to concate along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction concat_(tensors, axis = 0) {\n assert(tensors.length >= 1, () => 'Pass at least one tensor to concat');\n let $tensors = convertToTensorArray(tensors, 'tensors', 'concat');\n if ($tensors[0].dtype === 'complex64') {\n $tensors.forEach(tensor => {\n if (tensor.dtype !== 'complex64') {\n throw new Error(`Cannot concatenate complex64 tensors with a tensor\n with dtype ${tensor.dtype}. `);\n }\n });\n }\n const forward = (backend, save) => {\n const $axis = parseAxisParam(axis, $tensors[0].shape)[0];\n const outShape = computeOutShape($tensors.map(t => t.shape), $axis);\n if (sizeFromShape(outShape) === 0) {\n return tensor([], outShape);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n $tensors = $tensors.filter(t => t.size > 0);\n if ($tensors.length === 1) {\n return $tensors[0];\n }\n const shapes = $tensors.map(t => t.shape);\n assertParamsConsistent(shapes, $axis);\n const res = backend.concat($tensors, $axis);\n save($tensors);\n return res;\n };\n const inputs = $tensors;\n const attr = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Concat, attr);\n}\nexport const concat = op({ concat_ });\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sigmoid } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sigmoid element-wise, `1 / (1 + exp(-x))`\n *\n * ```js\n * const x = tf.tensor1d([0, -1, 2, -3]);\n *\n * x.sigmoid().print(); // or tf.sigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'sigmoid');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sigmoid($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Sigmoid);\n}\nexport const sigmoid = op({ sigmoid_ });\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Slice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as slice_util from './slice_util';\n/**\n * Extracts a slice from a `tf.Tensor` starting at coordinates `begin`\n * and is of size `size`.\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `x` is of the given rank:\n * - `tf.slice1d`\n * - `tf.slice2d`\n * - `tf.slice3d`\n * - `tf.slice4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.slice([1], [2]).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * x.slice([1, 0], [1, 2]).print();\n * ```\n * @param x The input `tf.Tensor` to slice from.\n * @param begin The coordinates to start the slice from. The length can be\n * less than the rank of x - the rest of the axes will have implicit 0 as\n * start. Can also be a single number, in which case it specifies the\n * first axis.\n * @param size The size of the slice. The length can be less than the rank of\n * x - the rest of the axes will have implicit -1. A value of -1 requests\n * the rest of the dimensions in the axis. Can also be a single number,\n * in which case it specifies the size of the first axis.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction slice_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice');\n if ($x.rank === 0) {\n throw new Error('Slicing scalar is not possible');\n }\n const forward = (backend, save) => {\n const [begin_, size_] = slice_util.parseSliceParams($x, begin, size);\n slice_util.assertParamsValid($x, begin_, size_);\n save([$x]);\n return backend.slice($x, begin_, size_);\n };\n const inputs = { x: $x };\n const attrs = { begin, size };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Slice, attrs);\n}\nexport const slice = op({ slice_ });\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tanh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic tangent of the input `tf.Tensor` element-wise: `tanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, 70]);\n *\n * x.tanh().print(); // or tf.tanh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tanh_(x) {\n const $x = convertToTensor(x, 'x', 'tanh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.tanh($x);\n save([y]);\n return y;\n }, inputs, null /* grad */, Tanh);\n}\nexport const tanh = op({ tanh_ });\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { add } from './add';\nimport { concat } from './concat';\nimport { matMul } from './mat_mul';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { slice } from './slice';\nimport { tanh } from './tanh';\n/**\n * Computes the next state and output of a BasicLSTMCell.\n *\n * Returns `[newC, newH]`.\n *\n * Derived from tf.contrib.rnn.BasicLSTMCell.\n *\n * @param forgetBias Forget bias for the cell.\n * @param lstmKernel The weights for the cell.\n * @param lstmBias The bias for the cell.\n * @param data The input to the cell.\n * @param c Previous cell state.\n * @param h Previous cell output.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction basicLSTMCell_(forgetBias, lstmKernel, lstmBias, data, c, h) {\n const $forgetBias = convertToTensor(forgetBias, 'forgetBias', 'basicLSTMCell');\n const $lstmKernel = convertToTensor(lstmKernel, 'lstmKernel', 'basicLSTMCell');\n const $lstmBias = convertToTensor(lstmBias, 'lstmBias', 'basicLSTMCell');\n const $data = convertToTensor(data, 'data', 'basicLSTMCell');\n const $c = convertToTensor(c, 'c', 'basicLSTMCell');\n const $h = convertToTensor(h, 'h', 'basicLSTMCell');\n const combined = concat([$data, $h], 1);\n const weighted = matMul(combined, $lstmKernel);\n const res = add(weighted, $lstmBias);\n // i = input_gate, j = new_input, f = forget_gate, o = output_gate\n const batchSize = res.shape[0];\n const sliceCols = res.shape[1] / 4;\n const sliceSize = [batchSize, sliceCols];\n const i = slice(res, [0, 0], sliceSize);\n const j = slice(res, [0, sliceCols], sliceSize);\n const f = slice(res, [0, sliceCols * 2], sliceSize);\n const o = slice(res, [0, sliceCols * 3], sliceSize);\n const newC = add(mul(sigmoid(i), tanh(j)), mul($c, sigmoid(add($forgetBias, f))));\n const newH = mul(tanh(newC), sigmoid(o));\n return [newC, newH];\n}\nexport const basicLSTMCell = op({ basicLSTMCell_ });\n//# sourceMappingURL=basic_lstm_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BatchToSpaceND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation reshapes the \"batch\" dimension 0 into `M + 1` dimensions of\n * shape `blockShape + [batch]`, interleaves these blocks back into the grid\n * defined by the spatial dimensions `[1, ..., M]`, to obtain a result with\n * the same rank as the input. The spatial dimensions of this intermediate\n * result are then optionally cropped according to `crops` to produce the\n * output. This is the reverse of `tf.spaceToBatchND`. See below for a precise\n * description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]);\n * const blockShape = [2, 2];\n * const crops = [[0, 0], [0, 0]];\n *\n * x.batchToSpaceND(blockShape, crops).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param crops A 2-D array. Must have shape `[M, 2]`, all values must be >= 0.\n * `crops[i] = [cropStart, cropEnd]` specifies the amount to crop from input\n * dimension `i + 1`, which corresponds to spatial dimension `i`. It is required\n * that `cropStart[i] + cropEnd[i] <= blockShape[i] * inputShape[i + 1]`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Reshape `x` to `reshaped` of shape: `[blockShape[0], ...,\n * blockShape[M-1], batch / prod(blockShape), x.shape[1], ...,\n * x.shape[N-1]]`\n *\n * 2. Permute dimensions of `reshaped`to produce `permuted` of shape `[batch /\n * prod(blockShape),x.shape[1], blockShape[0], ..., x.shape[M],\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 3. Reshape `permuted` to produce `reshapedPermuted` of shape `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0], ..., x.shape[M] *\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 4. Crop the start and end of dimensions `[1, ..., M]` of `reshapedPermuted`\n * according to `crops` to produce the output of shape: `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0] - crops[0,0] - crops[0,1],\n * ..., x.shape[M] * blockShape[M-1] - crops[M-1,0] -\n * crops[M-1,1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction batchToSpaceND_(x, blockShape, crops) {\n const $x = convertToTensor(x, 'x', 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank is ${$x.rank} but should be > than blockShape.length ${blockShape.length}`);\n util.assert(crops.length === blockShape.length, () => `crops.length is ${crops.length} but should be equal to blockShape.length ${blockShape.length}`);\n util.assert($x.shape[0] % prod === 0, () => `input tensor batch is ${$x.shape[0]} but is not divisible by the product of ` +\n `the elements of blockShape ${blockShape.join(' * ')} === ${prod}`);\n const forward = backend => {\n return backend.batchToSpaceND($x, blockShape, crops);\n };\n const inputs = { x: $x };\n const attrs = { blockShape, crops };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, BatchToSpaceND, attrs);\n}\nexport const batchToSpaceND = op({ batchToSpaceND_ });\n//# sourceMappingURL=batch_to_space_nd.js.map", "import { reshape } from './reshape';\nexport function xAs4D(x) {\n let x4D;\n if (x.rank === 0 || x.rank === 1) {\n x4D = reshape(x, [1, 1, 1, x.size]);\n }\n else if (x.rank === 2) {\n x4D = reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n else {\n x4D = x;\n }\n return x4D;\n}\n//# sourceMappingURL=batchnorm_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { FusedBatchNorm } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { xAs4D } from './batchnorm_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Batch normalization.\n *\n * As described in\n * [http://arxiv.org/abs/1502.03167](http://arxiv.org/abs/1502.03167).\n *\n * Mean, variance, scale, and offset can be of two shapes:\n * - The same shape as the input.\n * - In the common case, the depth dimension is the last dimension of x, so\n * the values would be an `tf.Tensor1D` of shape [depth].\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that parameters passed are of given rank\n * - `tf.batchNorm2d`\n * - `tf.batchNorm3d`\n * - `tf.batchNorm4d`\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction batchNorm_(x, mean, variance, offset, scale, varianceEpsilon) {\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($mean.rank === $variance.rank, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert($offset == null || $mean.rank === $offset.rank, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert($scale == null || $mean.rank === $scale.rank, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n const x4D = xAs4D($x);\n const forward = (backend, save) => {\n save([x4D, $mean, $variance, $scale]);\n return backend.batchNorm(x4D, as1DOr4D($mean), as1DOr4D($variance), as1DOr4D($offset), as1DOr4D($scale), varianceEpsilon);\n };\n const inputs = {\n x: x4D,\n scale: $scale,\n offset: $offset,\n mean: $mean,\n variance: $variance\n };\n const attrs = { varianceEpsilon };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, FusedBatchNorm, attrs);\n return reshape(res, $x.shape);\n}\nfunction as1DOr4D(x) {\n if (x == null) {\n return null;\n }\n if (x.rank === 0) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [x.size]);\n }\n else if (x.rank === 1) {\n return x;\n }\n else if (x.rank === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, 1, x.shape[0], x.shape[1]]);\n }\n else if (x.rank === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n return x;\n}\nexport const batchNorm = op({ batchNorm_ });\n//# sourceMappingURL=batchnorm.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 2D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm2d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 2, () => `Error in batchNorm2D: x must be rank 2 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 2 || $mean.rank === 1, () => `Error in batchNorm2D: mean must be rank 2 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 2 || $variance.rank === 1, () => `Error in batchNorm2D: variance must be rank 2 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 2 || $scale.rank === 1, () => `Error in batchNorm2D: scale must be rank 2 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 2 || $offset.rank === 1, () => `Error in batchNorm2D: offset must be rank 2 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm2d = op({ batchNorm2d_ });\n//# sourceMappingURL=batchnorm2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 3D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm3d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 3, () => `Error in batchNorm3D: x must be rank 3 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 3 || $mean.rank === 1, () => `Error in batchNorm3D: mean must be rank 3 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 3 || $variance.rank === 1, () => `Error in batchNorm3D: variance must be rank 3 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 3 || $scale.rank === 1, () => `Error in batchNorm3D: scale must be rank 3 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 3 || $offset.rank === 1, () => `Error in batchNorm3D: offset must be rank 3 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm3d = op({ batchNorm3d_ });\n//# sourceMappingURL=batchnorm3d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { batchNorm } from './batchnorm';\nimport { op } from './operation';\n/**\n * Batch normalization, strictly for 4D. For the more relaxed version, see\n * `tf.batchNorm`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param offset An offset Tensor.\n * @param scale A scale Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n */\nfunction batchNorm4d_(x, mean, variance, offset, scale, varianceEpsilon) {\n const $x = convertToTensor(x, 'x', 'batchNorm');\n const $mean = convertToTensor(mean, 'mean', 'batchNorm');\n const $variance = convertToTensor(variance, 'variance', 'batchNorm');\n let $scale;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNorm');\n }\n let $offset;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNorm');\n }\n util.assert($x.rank === 4, () => `Error in batchNorm4D: x must be rank 4 but got rank ` +\n `${$x.rank}.`);\n util.assert($mean.rank === 4 || $mean.rank === 1, () => `Error in batchNorm4D: mean must be rank 4 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert($variance.rank === 4 || $variance.rank === 1, () => `Error in batchNorm4D: variance must be rank 4 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert($scale.rank === 4 || $scale.rank === 1, () => `Error in batchNorm4D: scale must be rank 4 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert($offset.rank === 4 || $offset.rank === 1, () => `Error in batchNorm4D: offset must be rank 4 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNorm($x, $mean, $variance, $offset, $scale, varianceEpsilon);\n}\nexport const batchNorm4d = op({ batchNorm4d_ });\n//# sourceMappingURL=batchnorm4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { BroadcastTo } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Broadcast an array to a compatible shape NumPy-style.\n *\n * The tensor's shape is compared to the broadcast shape from end to beginning.\n * Ones are prepended to the tensor's shape until is has the same length as\n * the broadcast shape. If input.shape[i]==shape[i], the (i+1)-th axis is\n * already broadcast-compatible. If input.shape[i]==1 and shape[i]==N, then\n * the input tensor is tiled N times along that axis (using tf.tile).\n *\n * @param input The tensor that is to be broadcasted.\n * @param shape The input is to be broadcast to this shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction broadcastTo_(x, shape) {\n let input = convertToTensor(x, 'broadcastTo', 'x');\n const xShape = input.shape;\n if (shape.some(d => !(d > 0) || d % 1 !== 0)) {\n throw new Error(`broadcastTo(): Invalid broadcast shape [${shape}].`);\n }\n if (shape.length < input.rank) {\n throw new Error(`broadcastTo(): shape.length=${shape.length} < input.rank=${input.rank}.`);\n }\n if (shape.length > input.rank) {\n const newShape = input.shape.slice();\n while (newShape.length < shape.length) {\n newShape.unshift(1);\n }\n input = reshape(input, newShape);\n }\n const inputShape = input.shape;\n const reps = Array.from(shape);\n for (let i = shape.length - 1; i >= 0; i--) {\n if (inputShape[i] === shape[i]) {\n reps[i] = 1;\n }\n else if (input.shape[i] !== 1) {\n throw new Error(`broadcastTo(): [${xShape}] cannot be broadcast to [${shape}].`);\n }\n }\n const axes = reps.map((n, i) => n > 1 ? i : -1).filter(i => i >= 0);\n if (axes.length === 0) {\n return clone(input);\n }\n const forward = (backend) => backend.tile(input, reps);\n const inputs = { x: input };\n const attrs = { shape, inputShape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, BroadcastTo, attrs);\n}\nexport const broadcastTo = op({ broadcastTo_ });\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Ceil } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes ceiling of input `tf.Tensor` element-wise: `ceil(x)`\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.ceil().print(); // or tf.ceil(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction ceil_(x) {\n const $x = convertToTensor(x, 'x', 'ceil');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.ceil($x), inputs, null /* grad */, Ceil);\n}\nexport const ceil = op({ ceil_ });\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ClipByValue } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Clips values element-wise. `max(min(x, clipValueMax), clipValueMin)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.clipByValue(-2, 3).print(); // or tf.clipByValue(x, -2, 3)\n * ```\n * @param x The input tensor.\n * @param clipValueMin Lower-bound of range to be clipped to.\n * @param clipValueMax Upper-bound of range to be clipped to.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction clipByValue_(x, clipValueMin, clipValueMax) {\n const $x = convertToTensor(x, 'x', 'clipByValue');\n util.assert((clipValueMin <= clipValueMax), () => `Error in clip: min (${clipValueMin}) must be ` +\n `less than or equal to max (${clipValueMax}).`);\n const inputs = { x: $x };\n const attrs = { clipValueMin, clipValueMax };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.clip($x, clipValueMin, clipValueMax);\n save([$x]);\n return res;\n }, inputs, null /* grad */, ClipByValue, attrs);\n}\nexport const clipByValue = op({ clipByValue_ });\n//# sourceMappingURL=clip_by_value.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor1D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(3) = |r1, g1, b1|\n * B: shape(2) = |r2, g2|\n * C = tf.concat1d([A, B]) == |r1, g1, b1, r2, g2|\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @return The concatenated array.\n */\nfunction concat1d_(tensors) {\n return concat(tensors, 0 /* axis */);\n}\nexport const concat1d = op({ concat1d_ });\n//# sourceMappingURL=concat_1d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of`tf.Tensor2D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat2d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C = shape(2, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concatenate along.\n * @return The concatenated array.\n */\nfunction concat2d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat2d = op({ concat2d_ });\n//# sourceMappingURL=concat_2d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor3D`s along an axis.\n * See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 1, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat3d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C: shape(2, 2, 3) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * if axis = 2:\n * C = shape(2, 1, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat3d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat3d = op({ concat3d_ });\n//# sourceMappingURL=concat_3d.js.map", "import { concat } from './concat';\nimport { op } from './operation';\n/**\n * Concatenates a list of `tf.Tensor4D`s along an axis.\n * See `concat` for details.\n *\n * @param tensors A list of `tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat4d_(tensors, axis) {\n return concat(tensors, axis);\n}\nexport const concat4d = op({ concat4d_ });\n//# sourceMappingURL=concat_4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 2D convolution over the input x.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2d: input must be rank 4, but got rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n util.assert(inDepth === $filter.shape[2], () => `Error in conv2d: depth of input (${inDepth}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const forward = (backend, save) => {\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2d(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2D, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2d = op({ conv2d_ });\n//# sourceMappingURL=conv2d.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 1D convolution over the input x.\n *\n * @param x The input tensor, of rank 3 or rank 2, of shape\n * `[batch, width, inChannels]`. If rank 2, batch of 1 is assumed.\n * @param filter The filter, rank 3, of shape\n * `[filterWidth, inDepth, outDepth]`.\n * @param stride The number of entries by which the filter is moved right at\n * each step.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from \"NWC\", \"NCW\". Defaults to \"NWC\",\n * the data is stored in the order of [batch, in_width, in_channels]. Only\n * \"NWC\" is currently supported.\n * @param dilation The dilation rate in which we sample input values in\n * atrous convolution. Defaults to `1`. If it is greater than 1, then\n * stride must be `1`.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv1d_(x, filter, stride, pad, dataFormat = 'NWC', dilation = 1, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv1d');\n const $filter = convertToTensor(filter, 'filter', 'conv1d');\n let x3D = $x;\n let reshapedTo3D = false;\n if ($x.rank === 2) {\n reshapedTo3D = true;\n x3D = reshape($x, [1, $x.shape[0], $x.shape[1]]);\n }\n util.assert(x3D.rank === 3, () => `Error in conv1d: input must be rank 3, but got rank ${x3D.rank}.`);\n util.assert($filter.rank === 3, () => `Error in conv1d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv1d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x3D.shape[2] === $filter.shape[1], () => `Error in conv1d: depth of input (${x3D.shape[2]}) must match ` +\n `input depth for filter ${$filter.shape[1]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(stride, dilation), () => 'Error in conv1D: Either stride or dilation must be 1. ' +\n `Got stride ${stride} and dilation '${dilation}'`);\n util.assert(dataFormat === 'NWC', () => `Error in conv1d: got dataFormat of ${dataFormat} but only NWC is currently supported.`);\n const filter4D = reshape($filter, [1, $filter.shape[0], $filter.shape[1], $filter.shape[2]]);\n const input4D = reshape(x3D, [x3D.shape[0], 1, x3D.shape[1], x3D.shape[2]]);\n const strides = [1, stride];\n const dilations = [1, dilation];\n const conv2dDataFormat = 'NHWC';\n const res = conv2d(input4D, filter4D, strides, pad, conv2dDataFormat, dilations, dimRoundingMode);\n if (reshapedTo3D) {\n return reshape(res, [res.shape[2], res.shape[3]]);\n }\n return reshape(res, [res.shape[0], res.shape[2], res.shape[3]]);\n}\nexport const conv1d = op({ conv1d_ });\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 2D convolution.\n *\n * @param xShape The shape of the input: [batch, height, width, inDepth].\n * If length of 3, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 4 or rank 3 of shape\n * `[batch, outHeight, outWidth, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction conv2DBackpropInput_(xShape, dy, filter, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape4D = xShape;\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n xShape4D = [1, xShape[0], xShape[1], xShape[2]];\n }\n util.assert(xShape4D.length === 4, () => `Error in conv2dDerInput: inShape must be length 4, but got length ` +\n `${xShape4D.length}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerInput: dy must be rank 4, but got ` +\n `rank ${dy4D.rank}`);\n util.assert(filter.rank === 4, () => `Error in conv2dDerInput: filter must be rank 4, but got ` +\n `rank ${filter.rank}`);\n const inDepth = dataFormat === 'NHWC' ? xShape4D[3] : xShape4D[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filter.shape[2], () => `Error in conv2dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[2]}.`);\n util.assert(outDepth === filter.shape[3], () => `Error in conv2dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[3]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerInput: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(xShape4D, filter.shape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n const res = backend.conv2dDerInput(dy4D, filter, convInfo);\n save([dy4D, filter]);\n return res;\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, inputShape: xShape4D };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv2DBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const conv2DBackpropInput = op({ conv2DBackpropInput_ });\n//# sourceMappingURL=conv2d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv2DBackpropInput } from './conv2d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 2D convolution of an image, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 4 or rank 3, of shape\n * `[batch, height, width, inDepth]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 4 or rank 3:\n * `[batch, height, width, outDepth]`. If rank 3, batch of 1 is assumed.\n * @param strides The strides of the original convolution:\n * `[strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv2dTranspose_(x, filter, outputShape, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'conv2dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv2dTranspose');\n return conv2DBackpropInput(outputShape, $x, $filter, strides, pad, 'NHWC', dimRoundingMode);\n}\nexport const conv2dTranspose = op({ conv2dTranspose_ });\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { eitherStridesOrDilationsAreOne } from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes a 3D convolution over the input x.\n *\n * @param x The input tensor, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, channels]`. If rank 4,\n * batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inChannels, outChannels]`.\n * inChannels must match between input and filter.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationDepth, dilationHeight,\n * dilationWidth]` in which we sample input values across the height\n * and width dimensions in atrous convolution. Defaults to `[1, 1, 1]`.\n * If `dilations` is a single number, then\n * `dilationDepth == dilationHeight == dilationWidth`. If it is greater\n * than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3d_(x, filter, strides, pad, dataFormat = 'NDHWC', dilations = [1, 1, 1]) {\n const $x = convertToTensor(x, 'x', 'conv3d');\n const $filter = convertToTensor(filter, 'filter', 'conv3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3d: input must be rank 5, but got rank ${x5D.rank}.`);\n util.assert($filter.rank === 5, () => `Error in conv3d: filter must be rank 5, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x5D.shape[4] === $filter.shape[3], () => `Error in conv3d: depth of input (${x5D.shape[4]}) must match ` +\n `input depth for filter ${$filter.shape[3]}.`);\n util.assert(eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv3D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NDHWC', () => `Error in conv3d: got dataFormat of ${dataFormat} but only NDHWC is currently supported.`);\n const forward = (backend, save) => {\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, $filter.shape, strides, dilations, pad);\n const res = backend.conv3d(x5D, $filter, convInfo);\n save([x5D, $filter]);\n return res;\n };\n const inputs = { x: x5D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Conv3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3d = op({ conv3d_ });\n//# sourceMappingURL=conv3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropInputV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the input of a 3D convolution.\n *\n * @param xShape The shape of the input: [batch, depth, height, width,\n * in_channels]. If length of 4, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 5 or rank 4 of shape\n * `[batch, outDepth, outHeight, outWidth, in_channels]`.\n * If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 5, of shape\n * `[filterDepth, filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideDepth, strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n */\nfunction conv3DBackpropInput_(xShape, dy, filter, strides, pad) {\n util.assert(xShape.length === dy.rank, () => `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n let xShape5D = xShape;\n let dy5D = dy;\n let reshapedTo5D = false;\n if (dy.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n xShape5D = [1, xShape[0], xShape[1], xShape[2], xShape[3]];\n }\n const inDepth = xShape5D[4];\n const outDepth = dy5D.shape[4];\n util.assert(xShape5D.length === 5, () => `Error in conv3dDerInput: inShape must be length 5, but got length ` +\n `${xShape5D.length}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerInput: dy must be rank 5, but got ` +\n `rank ${dy5D.rank}`);\n util.assert(filter.rank === 5, () => `Error in conv3dDerInput: filter must be rank 5, but got ` +\n `rank ${filter.rank}`);\n util.assert(inDepth === filter.shape[3], () => `Error in conv3dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[3]}.`);\n util.assert(outDepth === filter.shape[4], () => `Error in conv3dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[4]}.`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(xShape5D, filter.shape, strides, dilations, pad);\n return backend.conv3dDerInput(dy5D, filter, convInfo);\n };\n const inputs = { dy: dy5D, filter };\n const attrs = { pad, strides, inputShape: xShape5D };\n const res = ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropInputV2, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const conv3DBackpropInput = op({ conv3DBackpropInput_ });\n//# sourceMappingURL=conv3d_backprop_input.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport { conv3DBackpropInput } from './conv3d_backprop_input';\nimport { op } from './operation';\n/**\n * Computes the transposed 3D convolution of a volume, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 5 or rank 4, of shape\n * `[batch, depth, height, width, inDepth]`. If rank 4, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[depth, filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 5 or rank 4:\n * `[batch, depth, height, width, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param strides The strides of the original convolution:\n * `[strideDepth, strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction conv3dTranspose_(x, filter, outputShape, strides, pad) {\n const $x = convertToTensor(x, 'x', 'conv3dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv3dTranspose');\n return conv3DBackpropInput(outputShape, $x, $filter, strides, pad);\n}\nexport const conv3dTranspose = op({ conv3dTranspose_ });\n//# sourceMappingURL=conv3d_transpose.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cos } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes cos of the input `tf.Tensor` element-wise: `cos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.cos().print(); // or tf.cos(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cos_(x) {\n const $x = convertToTensor(x, 'x', 'cos');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cos($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cos);\n}\nexport const cos = op({ cos_ });\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cosh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic cos of the input `tf.Tensor` element-wise: `cosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.cosh().print(); // or tf.cosh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction cosh_(x) {\n const $x = convertToTensor(x, 'x', 'cosh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.cosh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Cosh);\n}\nexport const cosh = op({ cosh_ });\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Cumsum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { getAxesPermutation, getInnerMostAxes, getUndoAxesPermutation } from './axis_util';\nimport { op } from './operation';\nimport { transpose } from './transpose';\n/**\n * Computes the cumulative sum of a `tf.Tensor` along `axis`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4]);\n * x.cumsum().print();\n * ```\n * ```js\n * const x = tf.tensor([[1, 2], [3, 4]]);\n * x.cumsum().print();\n * ```\n *\n * @param x The input tensor to be summed.\n * @param axis The axis along which to sum. Optional. Defaults to 0.\n * @param exclusive Whether to perform exclusive cumulative sum. Optional.\n * Defaults to false. If set to true then the sum of each tensor entry\n * does not include its own value, but only the values previous to it\n * along the specified axis.\n * @param reverse Whether to sum in the opposite direction. Optional.\n * Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Scan'}\n */\nfunction cumsum_(x, axis = 0, exclusive = false, reverse = false) {\n const $x = convertToTensor(x, 'x', 'cumsum');\n const forward = (backend, save) => {\n const permutation = getAxesPermutation([axis], $x.rank);\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n }\n const permutedAxis = getInnerMostAxes(1, $x.rank)[0];\n let value = backend.cumsum(permutedX, permutedAxis, exclusive, reverse);\n save([$x]);\n if (permutation != null) {\n const reversePermutation = getUndoAxesPermutation(permutation);\n value = transpose(value, reversePermutation);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, exclusive, reverse };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Cumsum, attrs);\n}\nexport const cumsum = op({ cumsum_ });\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthToSpace } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Rearranges data from depth into blocks of spatial data. More specifically,\n * this op outputs a copy of the input tensor where values from the `depth`\n * dimension are moved in spatial blocks to the `height` and `width` dimensions.\n * The attr `blockSize` indicates the input block size and how the data is\n * moved.\n *\n * - Chunks of data of size `blockSize * blockSize` from depth are rearranged\n * into non-overlapping blocks of size `blockSize x blockSize`\n *\n * - The width the output tensor is `inputWidth * blockSize`, whereas the\n * height is `inputHeight * blockSize`\n *\n * - The Y, X coordinates within each block of the output image are determined\n * by the high order component of the input channel index\n *\n * - The depth of the input tensor must be divisible by `blockSize *\n * blockSize`\n *\n * The `dataFormat` attr specifies the layout of the input and output tensors\n * with the following options: \"NHWC\": [ `batch, height, width, channels` ]\n * \"NCHW\": [ `batch, channels, height, width` ]\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 1, 1, 4]);\n * const blockSize = 2;\n * const dataFormat = \"NHWC\";\n *\n * tf.depthToSpace(x, blockSize, dataFormat).print();\n * ```\n *\n * @param x The input tensor of rank 4\n * @param blockSIze An `int` that is `>= 2`. The size of the spatial block\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to \"NHWC\"\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction depthToSpace_(x, blockSize, dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'depthToSpace');\n const inputHeight = (dataFormat === 'NHWC') ? $x.shape[1] : $x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? $x.shape[2] : $x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? $x.shape[3] : $x.shape[1];\n util.assert(inputHeight * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputHeight} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert(inputWidth * blockSize >= 0, () => `Negative dimension size caused by overflow when multiplying\n ${inputWidth} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n util.assert((inputDepth % (blockSize * blockSize) === 0), () => `Dimension size must be evenly divisible by ${blockSize * blockSize} but is ${inputDepth} for depthToSpace with input shape ${$x.shape}`);\n const forward = backend => backend.depthToSpace($x, blockSize, dataFormat);\n const inputs = { x: $x };\n const attrs = { blockSize, dataFormat };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, DepthToSpace, attrs);\n}\nexport const depthToSpace = op({ depthToSpace_ });\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Depthwise 2D convolution.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction depthwiseConv2d_(x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in depthwiseConv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const res = backend.depthwiseConv2D(x4D, $filter, convInfo);\n save([x4D, $filter]);\n return res;\n };\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, DepthwiseConv2dNative, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2d = op({ depthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Diag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a diagonal tensor with a given diagonal values.\n *\n * Given a diagonal, this operation returns a tensor with the diagonal and\n * everything else padded with zeros.\n *\n * Assume the input has dimensions `[D1,..., Dk]`, then the output is a tensor\n * of rank 2k with dimensions `[D1,..., Dk, D1,..., Dk]`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * tf.diag(x).print()\n * ```\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4, 5, 6, 6, 8], [4, 2])\n *\n * tf.diag(x).print()\n * ```\n * @param x The input tensor.\n */\nfunction diag_(x) {\n const $x = convertToTensor(x, 'x', 'diag');\n const forward = backend => {\n const flat = reshape($x, [$x.size]);\n const result = backend.diag(flat);\n const outShape = [...x.shape, ...x.shape];\n return reshape(result, outShape);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Diag);\n}\nexport const diag = op({ diag_ });\n//# sourceMappingURL=diag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the grayscale dilation over the input `x`.\n *\n * @param x The input tensor, rank 3 or rank 4 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter tensor, rank 3, of shape\n * `[filterHeight, filterWidth, depth]`.\n * @param strides The strides of the sliding window for each dimension of the\n * input tensor: `[strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat Specify the data format of the input and output data.\n * Defaults to 'NHWC'. Only 'NHWC' is currently supported. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * for atrous morphological dilation. Defaults to `[1, 1]`. If `dilations`\n * is a single number, then `dilationHeight == dilationWidth`. If it is\n * greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction dilation2d_(x, filter, strides, pad, dilations = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'dilation2d');\n const $filter = convertToTensor(filter, 'filter', 'dilation2d');\n util.assert($x.rank === 3 || $x.rank === 4, () => `Error in dilation2d: input must be rank 3 or 4, but got rank ` +\n `${$x.rank}.`);\n util.assert($filter.rank === 3, () => `Error in dilation2d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n util.assert(dataFormat === 'NHWC', () => `Error in dilation2d: Only NHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n reshapedTo4D = true;\n }\n const inputs = { x: x4D, filter: $filter };\n const attrs = { strides, pad, dilations };\n const res = ENGINE.runKernel(Dilation2D, inputs, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const dilation2d = op({ dilation2d_ });\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport function getBroadcastDims(inShape, outShape) {\n const inRank = inShape.length;\n const dims = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n}\n/**\n * Returns the axes in the output space that should be reduced to produce\n * the input space.\n */\nexport function getReductionAxes(inShape, outShape) {\n const result = [];\n for (let i = 0; i < outShape.length; i++) {\n const inDim = inShape[inShape.length - i - 1];\n const outAxis = outShape.length - i - 1;\n const outDim = outShape[outAxis];\n if (inDim == null || (inDim === 1 && outDim > 1)) {\n result.unshift(outAxis);\n }\n }\n return result;\n}\nexport function assertAndGetBroadcastShape(shapeA, shapeB) {\n const result = [];\n const l = Math.max(shapeA.length, shapeB.length);\n for (let i = 0; i < l; i++) {\n let a = shapeA[shapeA.length - i - 1];\n if (a == null) {\n a = 1;\n }\n let b = shapeB[shapeB.length - i - 1];\n if (b == null) {\n b = 1;\n }\n if (a === 1) {\n result.unshift(b);\n }\n else if (b === 1) {\n result.unshift(a);\n }\n else if (a !== b) {\n const errMsg = `Operands could not be broadcast together with shapes ` +\n `${shapeA} and ${shapeB}.`;\n throw Error(errMsg);\n }\n else {\n result.unshift(a);\n }\n }\n return result;\n}\n//# sourceMappingURL=broadcast_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Equal } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a == b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.equal(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction equal_(a, b) {\n let $a = convertToTensor(a, 'a', 'equal');\n let $b = convertToTensor(b, 'b', 'equal');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.equal($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null, Equal);\n}\nexport const equal = op({ equal_ });\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SelectV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch } from '../util';\nimport { broadcastTo } from './broadcast_to';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the elements, either `a` or `b` depending on the `condition`.\n *\n * If the condition is true, select from `a`, otherwise select from `b`.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const a = tf.tensor1d([1 , 2, 3]);\n * const b = tf.tensor1d([-1, -2, -3]);\n *\n * a.where(cond, b).print();\n * ```\n *\n * @param condition The input condition. Must be of dtype bool.\n * @param a If `condition` is rank 1, `a` may have a higher rank but\n * its first dimension must match the size of `condition`.\n * @param b A tensor with the same dtype as `a` and with shape that is\n * compatible with `a`.\n * @return A tensor with same dtype as `a` and `b`, and shape that is\n * broadcastable from `a` and `b`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction where_(condition, a, b) {\n const $a = convertToTensor(a, 'a', 'where');\n const $b = convertToTensor(b, 'b', 'where');\n const $condition = convertToTensor(condition, 'condition', 'where', 'bool');\n // TODO: move this logic to forward function when the broadcastTo op is\n // implemented in WASM.\n // Find the broadcastable shape for $a and $b.\n const broadcastShape = assertAndGetBroadcastShape($a.shape, $b.shape);\n const $broadcastedA = broadcastTo($a, broadcastShape);\n const $broadcastedB = broadcastTo($b, broadcastShape);\n if ($condition.rank === 1) {\n // If condition rank is 1, then the first dimension must match the size of\n // condition.\n assert($condition.shape[0] === $a.shape[0], () => 'The first dimension of `a` must match the size of `condition`.');\n }\n if ($condition.rank !== 1) {\n // A must have the same shape as condition.\n assertShapesMatch($condition.shape, $broadcastedB.shape, 'Error in where: ');\n }\n const forward = (backend, save) => {\n const res = backend.select($condition, $broadcastedA, $broadcastedB);\n save([$condition]);\n return res;\n };\n const inputs = {\n condition: $condition,\n t: $broadcastedA,\n e: $broadcastedB\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SelectV2);\n}\nexport const where = op({ where_ });\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ZerosLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with all elements set to 0 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.zerosLike(x).print();\n * ```\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction zerosLike_(x) {\n const $x = convertToTensor(x, 'x', 'zerosLike');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.zerosLike($x), inputs, null /* grad */, ZerosLike);\n}\nexport const zerosLike = op({ zerosLike_ });\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { div } from './div';\nimport { equal } from './equal';\nimport { op } from './operation';\nimport { where } from './where';\nimport { zerosLike } from './zeros_like';\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting. Return 0\n * if denominator is 0.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n * const c = tf.tensor1d([0, 0, 0, 0]);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n * const c = tf.scalar(0);\n *\n * a.divNoNan(b).print(); // or tf.divNoNan(a, b)\n * a.divNoNan(c).print(); // or tf.divNoNan(a, c)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction divNoNan_(a, b) {\n // TODO: Make this into its own kernel.\n let $a = convertToTensor(a, 'a', 'div');\n let $b = convertToTensor(b, 'b', 'div');\n [$a, $b] = makeTypesMatch($a, $b);\n const divResult = div($a, $b);\n const zeros = zerosLike(divResult);\n const bEqualsZero = equal($b, zeros);\n return where(bEqualsZero, zeros, divResult);\n}\nexport const divNoNan = op({ divNoNan_ });\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the dot product of two matrices and/or vectors, `t1` and `t2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor2d([[1, 2], [3, 4]]);\n * const c = tf.tensor2d([[1, 2, 3], [4, 5, 6]]);\n *\n * a.dot(b).print(); // or tf.dot(a, b)\n * b.dot(a).print();\n * b.dot(c).print();\n * ```\n * @param t1 The first tensor in the dot operation.\n * @param t2 The second tensor in the dot operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction dot_(t1, t2) {\n const $t1 = convertToTensor(t1, 't1', 'dot');\n const $t2 = convertToTensor(t2, 't2', 'dot');\n util.assert(($t1.rank === 1 || $t1.rank === 2) && ($t2.rank === 1 || $t2.rank === 2), () => `Error in dot: inputs must all be rank 1 or 2, but got ranks ` +\n `${$t1.rank} and ${$t2.rank}.`);\n const t1Inner = ($t1.rank === 1 ? $t1.size : $t1.shape[1]);\n const t2Inner = ($t2.rank === 1 ? $t2.size : $t2.shape[0]);\n util.assert(t1Inner === t2Inner, () => `Error in dot: inner dimensions of inputs must match, but got ` +\n `${t1Inner} and ${t2Inner}.`);\n if ($t1.rank === 1 && $t2.rank === 1) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, []);\n }\n else if ($t1.rank === 1 && $t2.rank === 2) {\n const t12D = reshape($t1, [1, -1]);\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul(t12D, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else if ($t1.rank === 2 && $t2.rank === 1) {\n const t22D = reshape($t2, [-1, 1]);\n const t1t2 = matMul($t1, t22D);\n return reshape(t1t2, [t1t2.size]);\n }\n else {\n const t22D = reshape($t2, [$t2.shape[0], $t2.shape[1]]);\n const t1t2 = matMul($t1, t22D);\n return t1t2;\n }\n}\nexport const dot = op({ dot_ });\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential linear element-wise: `x > 0 ? e ^ x - 1 : 0`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 1, -3, 2]);\n *\n * x.elu().print(); // or tf.elu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction elu_(x) {\n const $x = convertToTensor(x, 'x', 'elu');\n const forward = (backend, save) => {\n const y = backend.elu($x);\n save([y]);\n return y;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Elu);\n}\nexport const elu = op({ elu_ });\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Erf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes gause error function of the input `tf.Tensor` element-wise:\n * `erf(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.erf().print(); // or tf.erf(x);\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction erf_(x) {\n let $x = convertToTensor(x, 'x', 'erf');\n util.assert($x.dtype === 'int32' || $x.dtype === 'float32', () => 'Input dtype must be `int32` or `float32`.');\n if ($x.dtype === 'int32') {\n $x = cast($x, 'float32');\n }\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.erf($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Erf);\n}\nexport const erf = op({ erf_ });\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Exp } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` element-wise. `e ^ x`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.exp().print(); // or tf.exp(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction exp_(x) {\n const $x = convertToTensor(x, 'x', 'exp');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.exp($x);\n save([res]);\n return res;\n }, inputs, null /* grad */, Exp);\n}\nexport const exp = op({ exp_ });\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension\n * into the tensor's shape.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const axis = 1;\n * x.expandDims(axis).print();\n * ```\n *\n * @param x The input tensor whose dimensions to be expanded.\n * @param axis The dimension index at which to insert shape of `1`. Defaults\n * to 0 (the first dimension).\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction expandDims_(x, axis = 0) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'expandDims', parseAs);\n util.assert(axis <= $x.rank, () => 'Axis must be <= rank of the tensor');\n const newShape = $x.shape.slice();\n if (axis < 0) {\n // Negative value is counted from the tail of rank.\n util.assert(-($x.rank + 1) <= axis, () => `Axis must be in the interval [${-($x.rank + 1)}, ${$x.rank}]`);\n axis = $x.rank + axis + 1;\n }\n newShape.splice(axis, 0, 1);\n return reshape($x, newShape);\n}\nexport const expandDims = op({ expandDims_ });\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Expm1 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes exponential of the input `tf.Tensor` minus one element-wise.\n * `e ^ x - 1`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.expm1().print(); // or tf.expm1(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction expm1_(x) {\n const $x = convertToTensor(x, 'x', 'expm1');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.expm1($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Expm1);\n}\nexport const expm1 = op({ expm1_ });\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tile } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Construct a tensor by repeating it the number of times given by reps.\n *\n * This operation creates a new tensor by replicating `input` `reps`\n * times. The output tensor's i'th dimension has `input.shape[i] *\n * reps[i]` elements, and the values of `input` are replicated\n * `reps[i]` times along the i'th dimension. For example, tiling\n * `[a, b, c, d]` by `[2]` produces `[a, b, c, d, a, b, c, d]`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n *\n * a.tile([2]).print(); // or a.tile([2])\n * ```\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.tile([1, 2]).print(); // or a.tile([1, 2])\n * ```\n * @param x The tensor to tile.\n * @param reps Determines the number of replications per dimension.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction tile_(x, reps) {\n const parseAs = null;\n const $x = convertToTensor(x, 'x', 'tile', parseAs);\n util.assert($x.rank === reps.length, () => `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of reps ${reps}.`);\n const forward = (backend, save) => {\n const res = backend.tile($x, reps);\n save([$x]);\n return res;\n };\n const inputsToSave = [$x];\n const inputs = { x: $x };\n const attrs = { reps };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Tile, attrs, inputsToSave);\n}\nexport const tile = op({ tile_ });\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { tile } from './tile';\n/**\n * Create an identity matrix.\n *\n * @param numRows Number of rows.\n * @param numColumns Number of columns. Defaults to `numRows`.\n * @param batchShape If provided, will add the batch shape to the beginning\n * of the shape of the returned `tf.Tensor` by repeating the identity\n * matrix.\n * @param dtype Data type.\n * @returns Identity matrix of the specified size and data type, possibly\n * with batch repetition if `batchShape` is specified.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction eye_(numRows, numColumns, batchShape, dtype = 'float32') {\n if (numColumns == null) {\n numColumns = numRows;\n }\n const buff = buffer([numRows, numColumns], dtype);\n const n = numRows <= numColumns ? numRows : numColumns;\n for (let i = 0; i < n; ++i) {\n buff.set(1, i, i);\n }\n const out = reshape(buff.toTensor(), [numRows, numColumns]);\n if (batchShape == null) {\n return out;\n }\n else {\n if (batchShape.length === 1) {\n return tile(expandDims(out, 0), [batchShape[0], 1, 1]);\n }\n else if (batchShape.length === 2) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(out, 0), 0), [batchShape[0], batchShape[1], 1, 1]);\n }\n else if (batchShape.length === 3) {\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return tile(expandDims(expandDims(expandDims(out, 0), 0), 0), [\n batchShape[0], batchShape[1], batchShape[2], 1, 1\n ]);\n }\n else {\n throw new Error(`eye() currently supports only 1D and 2D ` +\n // tslint:disable-next-line:no-any\n `batchShapes, but received ${batchShape.length}D.`);\n }\n }\n}\nexport const eye = op({ eye_ });\n//# sourceMappingURL=eye.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Fill } from '../kernel_names';\n/**\n * Creates a `tf.Tensor` filled with a scalar value.\n *\n * ```js\n * tf.fill([2, 2], 4).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param value The scalar value to fill the tensor with.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction fill(shape, value, dtype) {\n const attrs = { shape, value, dtype };\n return ENGINE.runKernelFunc(backend => backend.fill(shape, value, dtype), {}, null, Fill, attrs);\n}\nexport { fill };\n//# sourceMappingURL=fill.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Floor } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes floor of input `tf.Tensor` element-wise: `floor(x)`.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.floor().print(); // or tf.floor(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction floor_(x) {\n const $x = convertToTensor(x, 'x', 'floor');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.floor($x), inputs, null /* grad */, Floor);\n}\nexport const floor = op({ floor_ });\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inputs of size above this threshold will be parallelized by calling multiple\n * shader programs.\n */\nimport { nearestDivisor } from '../util';\nexport const PARALLELIZE_THRESHOLD = 30;\nexport function computeOptimalWindowSize(inSize) {\n if (inSize <= PARALLELIZE_THRESHOLD) {\n return inSize;\n }\n return nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n}\n//# sourceMappingURL=reduce_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nearestDivisor } from '../util';\nimport { PARALLELIZE_THRESHOLD } from './reduce_util';\nexport function segOpComputeOptimalWindowSize(inSize, numSegments) {\n let done = false;\n let res;\n if (inSize <= PARALLELIZE_THRESHOLD) {\n res = inSize;\n done = true;\n }\n else {\n res = nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n }\n while (!done) {\n if (res > numSegments || res === inSize) {\n done = true;\n }\n else {\n res = nearestDivisor(inSize, res + 1);\n }\n }\n return res;\n}\nexport function computeOutShape(aShape, axis, numSegments) {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (dim !== axis) {\n outShape.push(aShape[dim]);\n }\n else {\n outShape.push(numSegments);\n }\n }\n return outShape;\n}\nexport function collectGatherOpShapeInfo(x, indices, axis) {\n const dimSize = x.shape[axis];\n const outputShape = [];\n let batchSize = 1;\n let sliceSize = 1;\n for (let i = 0; i < axis; i++) {\n outputShape.push(x.shape[i]);\n batchSize *= x.shape[i];\n }\n for (let i = 0; i < indices.rank; i++) {\n outputShape.push(indices.shape[i]);\n }\n for (let i = axis + 1; i < x.rank; i++) {\n outputShape.push(x.shape[i]);\n sliceSize *= x.shape[i];\n }\n return { batchSize, sliceSize, dimSize, outputShape };\n}\n//# sourceMappingURL=segment_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { collectGatherOpShapeInfo } from './segment_util';\n/**\n * Gather slices from tensor `x`'s axis `axis` according to `indices`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const indices = tf.tensor1d([1, 3, 3], 'int32');\n *\n * x.gather(indices).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const indices = tf.tensor1d([1, 1, 0], 'int32');\n *\n * x.gather(indices).print();\n * ```\n * @param x The input tensor whose slices to be gathered.\n * @param indices The indices of the values to extract.\n * @param axis The axis over which to select values. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction gather_(x, indices, axis = 0) {\n const $x = convertToTensor(x, 'x', 'gather');\n const $indices = convertToTensor(indices, 'indices', 'gather', 'int32');\n const inputs = { x: $x, indices: $indices };\n const attrs = { axis };\n const forward = (backend, save) => {\n const parsedAxis = parseAxisParam(axis, $x.shape)[0];\n const shapeInfo = collectGatherOpShapeInfo($x, $indices, parsedAxis);\n const res = backend.gather($x, reshape($indices, [$indices.size]), parsedAxis);\n save([$x, $indices]);\n return reshape(res, shapeInfo.outputShape);\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GatherV2, attrs);\n}\nexport const gather = op({ gather_ });\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Greater } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a > b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greater(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greater_(a, b) {\n let $a = convertToTensor(a, 'a', 'greater');\n let $b = convertToTensor(b, 'b', 'greater');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.greater($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Greater);\n}\nexport const greater = op({ greater_ });\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GreaterEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a >= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greaterEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction greaterEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'greaterEqual');\n let $b = convertToTensor(b, 'b', 'greaterEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.greaterEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, GreaterEqual);\n}\nexport const greaterEqual = op({ greaterEqual_ });\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Imag } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the imaginary part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the imaginary part of each element in input considered as a complex number.\n * If input is real, a tensor of all zeros is returned.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.imag(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction imag_(input) {\n const $input = convertToTensor(input, 'input', 'imag');\n const forward = (backend) => {\n return backend.imag($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Imag);\n}\nexport const imag = op({ imag_ });\n//# sourceMappingURL=imag.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsFinite } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are finite.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isFinite().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isFinite_(x) {\n const $x = convertToTensor(x, 'x', 'isFinite');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isFinite($x), inputs, null /* grad */, IsFinite);\n}\nexport const isFinite = op({ isFinite_ });\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsInf } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns which elements of x are Infinity or -Infinity.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isInf().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isInf_(x) {\n const $x = convertToTensor(x, 'x', 'isInf');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.isInf($x), inputs, null /* grad */, IsInf);\n}\nexport const isInf = op({ isInf_ });\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { IsNan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * RReturns which elements of x are NaN.\n *\n * ```js\n * const x = tf.tensor1d([NaN, Infinity, -Infinity, 0, 1]);\n *\n * x.isNaN().print(); // or tf.isNaN(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction isNaN_(x) {\n const $x = convertToTensor(x, 'x', 'isNaN');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.isNaN($x), inputs, null /* grad */, IsNan);\n}\nexport const isNaN = op({ isNaN_ });\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Maximum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the max of a and b (`a > b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `tf.maximumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * ```js\n * // Broadcast maximum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction maximum_(a, b) {\n let $a = convertToTensor(a, 'a', 'maximum');\n let $b = convertToTensor(b, 'b', 'maximum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.maximum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Maximum);\n}\nexport const maximum = op({ maximum_ });\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { isTypedArray } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-0 `tf.Tensor` (scalar) with the provided value and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.scalar` as it makes the code more readable.\n *\n * ```js\n * tf.scalar(3.14).print();\n * ```\n *\n * @param value The value of the scalar.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function scalar(value, dtype) {\n if (((isTypedArray(value) && dtype !== 'string') || Array.isArray(value)) &&\n dtype !== 'complex64') {\n throw new Error('Error creating a new Scalar: value must be a primitive ' +\n '(number|boolean|string)');\n }\n if (dtype === 'string' && isTypedArray(value) &&\n !(value instanceof Uint8Array)) {\n throw new Error('When making a scalar from encoded string, ' +\n 'the value must be `Uint8Array`.');\n }\n const shape = [];\n const inferredShape = [];\n return makeTensor(value, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { maximum } from './maximum';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { scalar } from './scalar';\n/**\n * Computes leaky rectified linear element-wise.\n *\n * See\n * [http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf](\n * http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf)\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.leakyRelu(0.1).print(); // or tf.leakyRelu(x, 0.1)\n * ```\n * @param x The input tensor.\n * @param alpha The scaling factor for negative values, defaults to 0.2.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction leakyRelu_(x, alpha = 0.2) {\n const $x = convertToTensor(x, 'x', 'leakyRelu');\n return maximum(mul(scalar(alpha), $x), $x);\n}\nexport const leakyRelu = op({ leakyRelu_ });\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Less } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a < b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.less(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction less_(a, b) {\n let $a = convertToTensor(a, 'a', 'less');\n let $b = convertToTensor(b, 'b', 'less');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = backend => backend.less($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Less);\n}\nexport const less = op({ less_ });\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LessEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a <= b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.lessEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction lessEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'lessEqual');\n let $b = convertToTensor(b, 'b', 'lessEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.lessEqual($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LessEqual);\n}\nexport const lessEqual = op({ lessEqual_ });\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LinSpace } from '../kernel_names';\n/**\n * Return an evenly spaced sequence of numbers over the given interval.\n *\n * ```js\n * tf.linspace(0, 9, 10).print();\n * ```\n * @param start The start value of the sequence.\n * @param stop The end value of the sequence.\n * @param num The number of values to generate.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function linspace(start, stop, num) {\n if (num <= 0) {\n throw new Error('The number of values should be positive.');\n }\n const attrs = { start, stop, num };\n return ENGINE.runKernelFunc(backend => backend.linspace(start, stop, num), {} /* inputs */, null /* grad */, LinSpace, attrs);\n}\n//# sourceMappingURL=linspace.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRN } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Normalizes the activation of a local neighborhood across or within\n * channels.\n *\n * @param x The input tensor. The 4-D input tensor is treated as a 3-D array\n * of 1D vectors (along the last dimension), and each vector is\n * normalized independently.\n * @param depthRadius The number of adjacent channels in the 1D normalization\n * window.\n * @param bias A constant bias term for the basis.\n * @param alpha A scale factor, usually positive.\n * @param beta An exponent.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction localResponseNormalization_(x, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const $x = convertToTensor(x, 'x', 'localResponseNormalization');\n util.assert($x.rank === 4 || $x.rank === 3, () => `Error in localResponseNormalization: x must be rank 3 or 4 but got\n rank ${$x.rank}.`);\n util.assert(util.isInt(depthRadius), () => `Error in localResponseNormalization: depthRadius must be an ` +\n `integer but got depthRadius ${depthRadius}.`);\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n const forward = (backend, save) => {\n const y = backend.localResponseNormalization4D(x4D, depthRadius, bias, alpha, beta);\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { depthRadius, bias, alpha, beta };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRN, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n else {\n return res;\n }\n}\nexport const localResponseNormalization = op({ localResponseNormalization_ });\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` element-wise: `ln(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E]);\n *\n * x.log().print(); // or tf.log(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log_(x) {\n const $x = convertToTensor(x, 'x', 'log');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log);\n}\nexport const log = op({ log_ });\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Log1p } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes natural logarithm of the input `tf.Tensor` plus one\n * element-wise: `ln(1 + x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E - 1]);\n *\n * x.log1p().print(); // or tf.log1p(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction log1p_(x) {\n const $x = convertToTensor(x, 'x', 'log1p');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.log1p($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Log1p);\n}\nexport const log1p = op({ log1p_ });\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from './engine';\nimport { Tensor, Variable } from './tensor';\nimport { convertToTensor, convertToTensorArray } from './tensor_util_env';\nimport * as util from './util';\n/**\n * Provided `f(x)`, returns another function `g(x, dy?)`, which gives the\n * gradient of `f(x)` with respect to `x`.\n *\n * If `dy` is provided, the gradient of `f(x).mul(dy).sum()` with respect to\n * `x` is computed instead. `f(x)` must take a single tensor `x` and return a\n * single tensor `y`. If `f()` takes multiple inputs, use `tf.grads` instead.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.grad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * g(x).print();\n * ```\n *\n * ```js\n * // f(x) = x ^ 3\n * const f = x => x.pow(tf.scalar(3, 'int32'));\n * // f'(x) = 3x ^ 2\n * const g = tf.grad(f);\n * // f''(x) = 6x\n * const gg = tf.grad(g);\n *\n * const x = tf.tensor1d([2, 3]);\n * gg(x).print();\n * ```\n *\n * @param f The function f(x), to compute gradient for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grad(f) must be a function');\n return (x, dy) => {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'tf.grad', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grad') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f($x), [$x], $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grad(f)(x, dy) must match the shape ' +\n 'returned by f(x)');\n }\n checkGrads(grads);\n return grads[0];\n });\n };\n}\n/**\n * Provided `f(x1, x2,...)`, returns another function `g([x1, x2,...], dy?)`,\n * which gives an array of gradients of `f()` with respect to each input\n * [`x1`,`x2`,...].\n *\n * If `dy` is passed when calling `g()`, the gradient of\n * `f(x1,...).mul(dy).sum()` with respect to each input is computed instead.\n * The provided `f` must take one or more tensors and return a single tensor\n * `y`. If `f()` takes a single input, we recommend using `tf.grad` instead.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df / da = b, df / db = a\n * const g = tf.grads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const [da, db] = g([a, b]);\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @param f The function `f(x1, x2,...)` to compute gradients for.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction grads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in grads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args), () => 'The args passed in grads(f)(args) must be an array ' +\n 'of `Tensor`s or `TensorLike`s');\n // args can be of any dtype, thus null as the last argument.\n const $args = convertToTensorArray(args, 'args', 'tf.grads', null);\n const $dy = (dy != null) ? convertToTensor(dy, 'dy', 'tf.grads') : null;\n return ENGINE.tidy(() => {\n const { value, grads } = ENGINE.gradients(() => f(...$args), $args, $dy);\n if ($dy != null) {\n util.assertShapesMatch(value.shape, $dy.shape, 'The shape of dy passed in grads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(grads);\n return grads;\n });\n };\n}\n/**\n * Like `tf.grad`, but also returns the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grad: The gradient of `f(x)` w.r.t `x` (result of `tf.grad`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.valueAndGrad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * const {value, grad} = g(x);\n *\n * console.log('value');\n * value.print();\n * console.log('grad');\n * grad.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrad(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrad(f) must be a function');\n return (x, dy) => {\n util.assert(x instanceof Tensor, () => 'The x passed in valueAndGrad(f)(x) must be a tensor');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrad(f)(x, dy) must be a tensor');\n const { grads, value } = ENGINE.gradients(() => f(x), [x], dy);\n checkGrads(grads);\n return { grad: grads[0], value };\n };\n}\n/**\n * Like `tf.grads`, but returns also the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grads: The gradients of `f()` w.r.t each input (result of `tf.grads`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df/da = b, df/db = a\n * const g = tf.valueAndGrads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const {value, grads} = g([a, b]);\n *\n * const [da, db] = grads;\n *\n * console.log('value');\n * value.print();\n *\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction valueAndGrads(f) {\n util.assert(util.isFunction(f), () => 'The f passed in valueAndGrads(f) must be a function');\n return (args, dy) => {\n util.assert(Array.isArray(args) && args.every(arg => arg instanceof Tensor), () => 'The args passed in valueAndGrads(f)(args) must be array of ' +\n 'tensors');\n util.assert(dy == null || dy instanceof Tensor, () => 'The dy passed in valueAndGrads(f)(args, dy) must be a tensor');\n const res = ENGINE.gradients(() => f(...args), args, dy);\n if (dy != null) {\n util.assertShapesMatch(res.value.shape, dy.shape, 'The shape of dy passed in valueAndGrads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(res.grads);\n return res;\n };\n}\n/**\n * Computes and returns the gradient of f(x) with respect to the list of\n * trainable variables provided by `varList`. If no list is provided, it\n * defaults to all trainable variables.\n *\n * ```js\n * const a = tf.variable(tf.tensor1d([3, 4]));\n * const b = tf.variable(tf.tensor1d([5, 6]));\n * const x = tf.tensor1d([1, 2]);\n *\n * // f(a, b) = a * x ^ 2 + b * x\n * const f = () => a.mul(x.square()).add(b.mul(x)).sum();\n * // df/da = x ^ 2, df/db = x\n * const {value, grads} = tf.variableGrads(f);\n *\n * Object.keys(grads).forEach(varName => grads[varName].print());\n * ```\n *\n * @param f The function to execute. f() should return a scalar.\n * @param varList The list of variables to compute the gradients with respect\n * to. Defaults to all trainable variables.\n * @returns An object with the following keys and values:\n * - `value`: The value of the function `f`.\n * - `grads`: A map from the names of the variables to the gradients.\n * If the `varList` argument is provided explicitly and contains a subset of\n * non-trainable variables, this map in the return value will contain keys\n * that map the names of the non-trainable variables to `null`.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction variableGrads(f, varList) {\n util.assert(util.isFunction(f), () => 'The f passed in variableGrads(f) must be a function');\n util.assert(varList == null ||\n Array.isArray(varList) && varList.every(v => v instanceof Variable), () => 'The varList passed in variableGrads(f, varList) must be an array ' +\n 'of variables');\n const specifiedVarList = varList != null;\n if (!specifiedVarList) {\n // Get all of the trainable variables.\n varList = [];\n for (const varName in ENGINE.registeredVariables) {\n varList.push(ENGINE.registeredVariables[varName]);\n }\n }\n const specifiedNonTrainable = specifiedVarList ? varList.filter(variable => !variable.trainable) : null;\n // Prune non-trainable variables.\n const originalVarCount = varList.length;\n varList = varList.filter(variable => variable.trainable);\n util.assert(varList.length > 0, () => `variableGrads() expects at least one of the input variables to ` +\n `be trainable, but none of the ${originalVarCount} variables is ` +\n `trainable.`);\n const allowNoGradients = true;\n const { value, grads } = ENGINE.gradients(f, varList, null, allowNoGradients);\n util.assert(grads.some(g => g != null), () => 'Cannot find a connection between any variable and the result of ' +\n 'the loss function y=f(x). Please make sure the operations that ' +\n 'use variables are inside the function f passed to minimize().');\n util.assert(value.rank === 0, () => `The f passed in variableGrads(f) must return a scalar, but it ` +\n `returned a rank-${value.rank} tensor`);\n const namedGrads = {};\n varList.forEach((v, i) => {\n if (grads[i] != null) {\n namedGrads[v.name] = grads[i];\n }\n });\n if (specifiedNonTrainable != null) {\n // If varList is explicitly provided and contains non-trainable values,\n // add them to the returned gradients with `null` values.\n specifiedNonTrainable.forEach(v => namedGrads[v.name] = null);\n }\n return { value, grads: namedGrads };\n}\n/**\n * Overrides the gradient computation of a function `f`.\n *\n * Takes a function\n * `f(...inputs, save) => {value: Tensor, gradFunc: (dy, saved) => Tensor[]}`\n * and returns another function `g(...inputs)` which takes the same inputs as\n * `f`. When called, `g` returns `f().value`. In backward mode, custom gradients\n * with respect to each input of `f` are computed using `f().gradFunc`.\n *\n * The `save` function passsed to `f` should be used for saving tensors needed\n * in the gradient. And the `saved` passed to the `gradFunc` is a\n * `NamedTensorMap`, which contains those saved tensor.\n *\n * ```js\n * const customOp = tf.customGrad((x, save) => {\n * // Save x to make sure it's available later for the gradient.\n * save([x]);\n * // Override gradient of our custom x ^ 2 op to be dy * abs(x);\n * return {\n * value: x.square(),\n * // Note `saved.x` which points to the `x` we saved earlier.\n * gradFunc: (dy, saved) => [dy.mul(saved[0].abs())]\n * };\n * });\n *\n * const x = tf.tensor1d([-1, -2, 3]);\n * const dx = tf.grad(x => customOp(x));\n *\n * console.log(`f(x):`);\n * customOp(x).print();\n * console.log(`f'(x):`);\n * dx(x).print();\n * ```\n *\n * @param f The function to evaluate in forward mode, which should return\n * `{value: Tensor, gradFunc: (dy, saved) => Tensor[]}`, where `gradFunc`\n * returns the custom gradients of `f` with respect to its inputs.\n *\n * @doc {heading: 'Training', subheading: 'Gradients'}\n */\nfunction customGrad(f) {\n return ENGINE.customGrad(f);\n}\nfunction checkGrads(grads) {\n const numNullGradients = grads.filter(g => g == null).length;\n if (numNullGradients > 0) {\n throw new Error(`Cannot compute gradient of y=f(x) with respect to x. Make sure that\n the f you passed encloses all operations that lead from x to y.`);\n }\n}\nexport { customGrad, variableGrads, valueAndGrad, valueAndGrads, grad, grads, };\n//# sourceMappingURL=gradients.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Negate } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes `-1 * x` element-wise.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, -2, 0], [2, 2]);\n *\n * x.neg().print(); // or tf.neg(x)\n * ```\n *\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction neg_(x) {\n const $x = convertToTensor(x, 'x', 'neg');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.neg($x), inputs, null /* grad */, Negate);\n}\nexport const neg = op({ neg_ });\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softplus } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes softplus of the input `tf.Tensor` element-wise: `log(exp(x) + 1)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.softplus().print(); // or tf.softplus(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction softplus_(x) {\n const $x = convertToTensor(x, 'x', 'softplus');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.softplus($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Softplus);\n}\nexport const softplus = op({ softplus_ });\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../gradients';\nimport { convertToTensor } from '../tensor_util_env';\nimport { mul } from './mul';\nimport { neg } from './neg';\nimport { op } from './operation';\nimport { sigmoid } from './sigmoid';\nimport { softplus } from './softplus';\n/**\n * Computes log sigmoid of the input `tf.Tensor` element-wise:\n * `logSigmoid(x)`. For numerical stability, we use `-tf.softplus(-x)`.\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.logSigmoid().print(); // or tf.logSigmoid(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction logSigmoid_(x) {\n const $x = convertToTensor(x, 'x', 'logSigmoid');\n // Use a custom gradient to maintain previous implementation.\n // There is no LogSigmoid kernel in TF so we can't use engine.runKernel\n // directly\n const customOp = customGrad((x) => {\n // TODO(yassogba) we can remove the chained softplus call here only\n // after backends have modualrized softplus at which point we can call\n // engine runKernel(..., Sotfplus, ...) directly.\n const value = neg(softplus(neg(x)));\n const gradFunc = (dy) => {\n const derX = mul(dy, sigmoid(neg(x)));\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const logSigmoid = op({ logSigmoid_ });\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Max } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the maximum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.max().print(); // or tf.max(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.max(axis).print(); // or tf.max(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction max_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'max');\n const forward = (backend, save) => {\n const origAxes = util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let maxInput = $x;\n if (permutedAxes != null) {\n maxInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, maxInput.rank);\n }\n const y = backend.max(maxInput, axes);\n if (permutedAxes != null) {\n maxInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, util.parseAxisParam(axis, $x.shape));\n res = reshape(res, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { reductionIndices: axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Max, attrs);\n}\nexport const max = op({ max_ });\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sub } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Subtracts two `tf.Tensor`s element-wise, A - B. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n *\n * ```js\n * // Broadcast subtract a with b.\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.scalar(5);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n * @param a The first `tf.Tensor` to subtract from.\n * @param b The second `tf.Tensor` to be subtracted. Must have the same dtype as\n * `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction sub_(a, b) {\n let $a = convertToTensor(a, 'a', 'sub');\n let $b = convertToTensor(b, 'b', 'sub');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.subtract($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sub);\n}\nexport const sub = op({ sub_ });\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the sum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If axes has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.sum().print(); // or tf.sum(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.sum(axis).print(); // or tf.sum(x, axis)\n * ```\n *\n * @param x The input tensor to compute the sum over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction sum_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'sum');\n if ($x.dtype === 'bool') {\n $x = cast($x, 'int32');\n }\n const forward = (backend, save) => {\n save([$x]);\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.sum(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Sum, attrs);\n}\nexport const sum = op({ sum_ });\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogSoftmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log softmax.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param axis The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction logSoftmax_(logits, axis = -1) {\n const $logits = convertToTensor(logits, 'logits', 'logSoftmax');\n if (axis === -1) {\n axis = $logits.rank - 1;\n }\n if (axis !== $logits.rank - 1) {\n throw Error('Log Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and axis was ${axis}`);\n }\n const forward = (backend, save) => {\n const keepDims = true;\n const xMax = max(logits, axis, true);\n const shifted = sub(logits, xMax);\n const value = sub(cast(shifted, 'float32'), log(sum(exp(shifted), axis, keepDims)));\n save([value]);\n return value;\n };\n const inputs = { logits: $logits };\n const attrs = { axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LogSoftmax, attrs);\n}\nexport const logSoftmax = op({ logSoftmax_ });\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { add } from './add';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { exp } from './exp';\nimport { log } from './log';\nimport { max } from './max';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { sub } from './sub';\nimport { sum } from './sum';\n/**\n * Computes the log(sum(exp(elements across the reduction dimensions)).\n *\n * Reduces the input along the dimensions given in `axis`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.logSumExp().print(); // or tf.logSumExp(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.logSumExp(axis).print(); // or tf.logSumExp(a, axis)\n * ```\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. If null (the default),\n * reduces all dimensions.\n * @param keepDims If true, retains reduced dimensions with length\n * of 1. Defaults to false.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction logSumExp_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'logSumExp');\n const axes = parseAxisParam(axis, $x.shape);\n const xMax = max($x, axes, true /* keepDims */);\n const a = sub($x, xMax);\n const b = exp(a);\n const c = sum(b, axes);\n const d = log(c);\n const res = add(reshape(xMax, d.shape), d);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(res.shape, axes);\n return reshape(res, newShape);\n }\n return res;\n}\nexport const logSumExp = op({ logSumExp_ });\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalAnd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a AND b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalAnd(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalAnd_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalAnd', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalAnd', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalAnd($a, $b), inputs, null /* grad */, LogicalAnd);\n}\nexport const logicalAnd = op({ logicalAnd_ });\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalNot } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the truth value of `NOT x` element-wise.\n *\n * ```js\n * const a = tf.tensor1d([false, true], 'bool');\n *\n * a.logicalNot().print();\n * ```\n *\n * @param x The input tensor. Must be of dtype 'bool'.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalNot_(x) {\n const $x = convertToTensor(x, 'x', 'logicalNot', 'bool');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.logicalNot($x), inputs, null /* grad */, LogicalNot);\n}\nexport const logicalNot = op({ logicalNot_ });\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LogicalOr } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a OR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalOr(b).print();\n * ```\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalOr_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalOr', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalOr', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(backend => backend.logicalOr($a, $b), inputs, null /* grad */, LogicalOr);\n}\nexport const logicalOr = op({ logicalOr_ });\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { logicalAnd } from './logical_and';\nimport { logicalNot } from './logical_not';\nimport { logicalOr } from './logical_or';\nimport { op } from './operation';\n/**\n * Returns the truth value of `a XOR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalXor(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction logicalXor_(a, b) {\n const $a = convertToTensor(a, 'a', 'logicalXor', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalXor', 'bool');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n // x ^ y = (x | y) & ~(x & y)\n return logicalAnd(logicalOr(a, b), logicalNot(logicalAnd(a, b)));\n}\nexport const logicalXor = op({ logicalXor_ });\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 2D max pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction maxPool_(x, filterSize, strides, pad, dimRoundingMode) {\n const $x = convertToTensor(x, 'x', 'maxPool');\n const dilations = 1;\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x4D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n const convInfo = conv_util.computePool2DInfo(x4D.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n let y;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n y = x4D.clone();\n }\n else {\n y = backend.maxPool(x4D, convInfo);\n }\n save([x4D, y]);\n return y;\n };\n const inputs = { x: x4D };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const maxPool = op({ maxPool_ });\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { deprecationWarn } from '../globals';\nimport { MaxPool3D } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the 3D max pooling.\n *\n * ```js\n * const x = tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]);\n * const result = tf.maxPool3d(x, 2, 1, 'valid');\n * result.print();\n * ```\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * `[batch, depth, height, width, inChannels]`.\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * If `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`.\n * If `strides` is a single number,\n * then `strideDepth == strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1*1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPool3d_(x, filterSize = [1, 1, 1], strides, pad, dimRoundingMode, dataFormat = 'NDHWC', dilations) {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n else {\n deprecationWarn('dilations is deprecated, this field will be gone in ' +\n 'v3.0.0.');\n }\n const $x = convertToTensor(x, 'x', 'maxPool3d');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in maxPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n util.assert(dataFormat === 'NDHWC', () => `Error in maxPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = (backend, save) => {\n if (dilations == null) {\n dilations = [1, 1, 1];\n }\n const convInfo = conv_util.computePool3DInfo(x5D.shape, filterSize, strides, dilations, pad, dimRoundingMode, dataFormat);\n const y = backend.maxPool3d(x5D, convInfo);\n save([x5D, y]);\n return y;\n };\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat, dilations };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3D, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3d = op({ maxPool3d_ });\n//# sourceMappingURL=max_pool_3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolWithArgmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the 2D max pooling of an image with Argmax index.\n * The indices in argmax are flattened, so that a maximum value at position `[b,\n * y, x, c]` becomes flattened index: `(y * width + x) * channels + c` if\n * include_batch_in_index is False; `((b * height + y) * width + x) * channels\n * +c` if include_batch_in_index is True.\n *\n * The indices returned are always in `[0, height) x [0, width)` before\n * flattening.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dataFormat An optional string from: \"NDHWC\", \"NCDHW\". Defaults to\n * \"NDHWC\". Specify the data format of the input and output data. With the\n * default format \"NDHWC\", the data is stored in the order of: [batch,\n * depth, height, width, channels]. Only \"NDHWC\" is currently supported.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param includeBatchIndex Defaults to False. Whether to include batch\n * dimension in flattened index of argmax.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction maxPoolWithArgmax_(x, filterSize, strides, pad, includeBatchInIndex = false) {\n const $x = convertToTensor(x, 'x', 'maxPoolWithArgmax');\n const inputs = { x: $x };\n const attrs = { filterSize, strides, pad, includeBatchInIndex };\n const result = ENGINE.runKernel(MaxPoolWithArgmax, inputs, attrs);\n return { result: result[0], indexes: result[1] };\n}\nexport const maxPoolWithArgmax = op({ maxPoolWithArgmax_ });\n//# sourceMappingURL=max_pool_with_argmax.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeZerosTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\n/**\n * Creates a `tf.Tensor` with all elements set to 0.\n *\n * ```js\n * tf.zeros([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Can\n * be 'float32', 'int32' or 'bool'. Defaults to 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function zeros(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = zeros(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeZerosTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=zeros.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { makeOnesTypedArray, sizeFromShape } from '../util';\nimport { complex } from './complex';\nimport { zeros } from './zeros';\n/**\n * Creates a `tf.Tensor` with all elements set to 1.\n *\n * ```js\n * tf.ones([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function ones(shape, dtype = 'float32') {\n if (dtype === 'complex64') {\n const real = ones(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeOnesTypedArray(sizeFromShape(shape), dtype);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n//# sourceMappingURL=ones.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { customGrad } from '../gradients';\nimport { Mean } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam, sizeFromShape } from '../util';\nimport { computeOutAndReduceShapes } from './axis_util';\nimport { cast } from './cast';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { ones } from './ones';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sum } from './sum';\n/**\n * Computes the mean of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is\n * true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with\n * a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.mean().print(); // or tf.mean(a)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.mean(axis).print(); // or tf.mean(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction mean_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'mean');\n const axes = parseAxisParam(axis, $x.shape);\n const shapes = computeOutAndReduceShapes($x.shape, axes);\n const reduceShape = shapes[1];\n const reduceSize = sizeFromShape(reduceShape);\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n const forward = () => {\n const reduceSizeScalar = scalar(reduceSize);\n // Cast if needed.\n const xReduce = reduceSizeScalar.dtype === $x.dtype ?\n $x :\n cast($x, reduceSizeScalar.dtype);\n const res = div(xReduce, reduceSizeScalar);\n return sum(res, axis, keepDims);\n };\n // Use a custom gradient to bypass 2 gradient backprops since mean is used\n // extremely often.\n const customOp = customGrad((x) => {\n const value = ENGINE.runKernelFunc(forward, inputs, null /* grad */, Mean, attrs);\n const gradFunc = (dy) => {\n const expandedDyShape = x.shape.slice();\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = div(mul(expandedDy, ones(x.shape, 'float32')), reduceSize);\n return derX;\n };\n return { value, gradFunc };\n });\n return customOp($x);\n}\nexport const mean = op({ mean_ });\n//# sourceMappingURL=mean.js.map", "import { ENGINE } from '../engine';\nimport { Min } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport * as axis_util from './axis_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the minimum value from the input.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axes`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axes` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.min().print(); // or tf.min(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.min(axis).print(); // or tf.min(x, axis)\n * ```\n *\n * @param x The input Tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction min_(x, axis = null, keepDims = false) {\n const $x = convertToTensor(x, 'x', 'min');\n const forward = (backend, save) => {\n const origAxes = parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n let minInput = $x;\n if (permutedAxes != null) {\n minInput = transpose($x, permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const y = backend.min(minInput, axes);\n if (permutedAxes != null) {\n minInput.dispose();\n }\n let res = y;\n if (keepDims) {\n const expandedShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n res = reshape(y, expandedShape);\n y.dispose();\n }\n save([$x, res]);\n return res;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Min, attrs);\n}\nexport const min = op({ min_ });\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Minimum } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Returns the min of a and b (`a < b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `minimumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * ```js\n * // Broadcast minimum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction minimum_(a, b) {\n let $a = convertToTensor(a, 'a', 'minimum');\n let $b = convertToTensor(b, 'b', 'minimum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.minimum($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Minimum);\n}\nexport const minimum = op({ minimum_ });\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MirrorPad } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` using mirror padding.\n *\n * This operation implements the `REFLECT` and `SYMMETRIC` modes of pad.\n *\n * ```js\n * const x = tf.range(0, 9).reshape([1, 1, 3, 3]);\n * x.mirrorPad([[0, 0], [0, 0], [2, 2], [2, 2]], 'reflect').print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * In \"reflect\" mode, the padded regions do not include the borders,\n * while in \"symmetric\" mode the padded regions do include the borders.\n * For example, if the input is `[1, 2, 3]` and paddings is `[0, 2]`,\n * then the output is `[1, 2, 3, 2, 1]` in \"reflect\" mode, and\n * `[1, 2, 3, 3, 2]` in \"symmetric\" mode.\n * If `mode` is \"reflect\" then both `paddings[D, 0]` and `paddings[D, 1]`\n * must be no greater than `x.shape[D] - 1`. If mode is \"symmetric\"\n * then both `paddings[D, 0]` and `paddings[D, 1]` must be no greater than\n * `x.shape[D]`\n * @param mode String to specify padding mode. Can be `'reflect' | 'symmetric'`\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction mirrorPad_(x, paddings, mode) {\n util.assert(mode === 'reflect' || mode === 'symmetric', () => `Invalid mode. Mode must be either reflect or symmetric. ` +\n `Got ${mode}.`);\n const $x = convertToTensor(x, 'x', 'mirrorPad');\n if ($x.rank === 0) {\n throw new Error('mirrorPad(scalar) is not defined. ' +\n 'Pass non-scalar to mirrorPad');\n }\n util.assert(paddings.length === $x.rank, () => `Padding doesn't match input. Must be ${$x.rank}. ` +\n `Got ${paddings.length}.`);\n const shapeOffset = mode === 'reflect' ? 1 : 0;\n for (let i = 0; i < $x.rank; i++) {\n util.assert(paddings[i].length === 2, () => `Invalid number of paddings. Must be length of 2 each.`);\n util.assert(paddings[i][0] >= 0 && paddings[i][0] <= $x.shape[i] - shapeOffset &&\n paddings[i][1] >= 0 && paddings[i][1] <= $x.shape[i] - shapeOffset, () => `Padding in dimension ${i} cannot be greater than or equal ` +\n `to ${$x.shape[i] - shapeOffset} or less than 0 for input of ` +\n `shape ${$x.shape}`);\n }\n const attrs = { paddings, mode };\n const inputs = { x: $x };\n return ENGINE.runKernel(MirrorPad, inputs, attrs);\n}\nexport const mirrorPad = op({ mirrorPad_ });\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Mod } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the mod of a and b element-wise.\n * `floor(x / y) * y + mod(x, y) = x`\n * Supports broadcasting.\n *\n * We also expose `tf.modStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * ```js\n * // Broadcast a mod b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction mod_(a, b) {\n let $a = convertToTensor(a, 'a', 'mod');\n let $b = convertToTensor(b, 'b', 'mod');\n [$a, $b] = makeTypesMatch($a, $b);\n const forward = (backend, save) => {\n const res = backend.mod($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Mod);\n}\nexport const mod = op({ mod_ });\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square of `x` element-wise: `x ^ 2`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.sqrt(2), -1]);\n *\n * x.square().print(); // or tf.square(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction square_(x) {\n const $x = convertToTensor(x, 'x', 'square');\n const attrs = {};\n const inputsToSave = [$x];\n const outputsToSave = [];\n return ENGINE.runKernelFunc((backend, save) => {\n save([$x]);\n return backend.square($x);\n }, { x: $x }, null /* grad */, 'Square', attrs, inputsToSave, outputsToSave);\n}\nexport const square = op({ square_ });\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim } from './axis_util';\nimport { cast } from './cast';\nimport { mean } from './mean';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { square } from './square';\nimport { sub } from './sub';\n/**\n * Calculates the mean and variance of `x`. The mean and variance are\n * calculated by aggregating the contents of `x` across `axes`. If `x` is\n * 1-D and `axes = [0]` this is just the mean and variance of a vector.\n *\n * @param x The input tensor.\n * @param axis The dimension(s) along with to compute mean and\n * variance. By default it reduces all dimensions.\n * @param keepDims If true, the moments have the same dimensionality as the\n * input.\n * @return An object with two keys: `mean` and `variance`.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction moments_(x, axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'moments');\n const axes = parseAxisParam(axis, x.shape);\n const xMean = mean(x, axes, keepDims);\n let keepDimsShape = xMean.shape;\n if (!keepDims) {\n keepDimsShape = expandShapeToKeepDim(xMean.shape, axes);\n }\n const devSquared = square(sub(cast(x, 'float32'), reshape(xMean, keepDimsShape)));\n const variance = mean(devSquared, axes, keepDims);\n return { mean: xMean, variance };\n}\nexport const moments = op({ moments_ });\n//# sourceMappingURL=moments.js.map", "import { convertToTensor, convertToTensorArray } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the next states and outputs of a stack of LSTMCells.\n *\n * Each cell output is used as input to the next cell.\n *\n * Returns `[cellState, cellOutput]`.\n *\n * Derived from tf.contrib.rn.MultiRNNCell.\n *\n * @param lstmCells Array of LSTMCell functions.\n * @param data The input to the cell.\n * @param c Array of previous cell states.\n * @param h Array of previous cell outputs.\n *\n * @doc {heading: 'Operations', subheading: 'RNN'}\n */\nfunction multiRNNCell_(lstmCells, data, c, h) {\n const $data = convertToTensor(data, 'data', 'multiRNNCell');\n const $c = convertToTensorArray(c, 'c', 'multiRNNCell');\n const $h = convertToTensorArray(h, 'h', 'multiRNNCell');\n let input = $data;\n const newStates = [];\n for (let i = 0; i < lstmCells.length; i++) {\n const output = lstmCells[i](input, $c[i], $h[i]);\n newStates.push(output[0]);\n newStates.push(output[1]);\n input = output[1];\n }\n const newC = [];\n const newH = [];\n for (let i = 0; i < newStates.length; i += 2) {\n newC.push(newStates[i]);\n newH.push(newStates[i + 1]);\n }\n return [newC, newH];\n}\nexport const multiRNNCell = op({ multiRNNCell_ });\n//# sourceMappingURL=multi_rnn_cell.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Creates a `tf.Tensor` with values drawn from a multinomial distribution.\n *\n * ```js\n * const probs = tf.tensor([.75, .25]);\n * tf.multinomial(probs, 3).print();\n * ```\n *\n * @param logits 1D array with unnormalized log-probabilities, or\n * 2D array of shape `[batchSize, numOutcomes]`. See the `normalized`\n * parameter.\n * @param numSamples Number of samples to draw for each row slice.\n * @param seed The seed number.\n * @param normalized Whether the provided `logits` are normalized true\n * probabilities (sum to 1). Defaults to false.\n * @return 1D array of shape `[numSamples]`, or 2D array of shape\n * `[batchSize, numSamples]`, depending on the rank of the input.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction multinomial_(logits, numSamples, seed, normalized = false) {\n const $logits = convertToTensor(logits, 'logits', 'multinomial');\n const numOutcomes = $logits.size;\n const origRank = $logits.rank;\n if (numOutcomes < 2) {\n throw new Error(`Error in multinomial: you need at least 2 outcomes, but got ` +\n `${numOutcomes}.`);\n }\n if (origRank > 2) {\n throw new Error(`Rank of probabilities must be 1 or 2, but is ${origRank}`);\n }\n seed = seed || Math.random();\n const logits2D = origRank === 1 ? reshape($logits, [1, -1]) : $logits;\n const res = ENGINE.runKernelFunc(backend => backend.multinomial(logits2D, normalized, numSamples, seed), { logits2D });\n // tslint:disable-next-line:no-unnecessary-type-assertion\n return origRank === 1 ? reshape(res, [res.size]) : res;\n}\nexport const multinomial = op({ multinomial_ });\n//# sourceMappingURL=multinomial.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { NotEqual } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns the truth value of (a != b) element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([0, 2, 3]);\n *\n * a.notEqual(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nfunction notEqual_(a, b) {\n let $a = convertToTensor(a, 'a', 'notEqual');\n let $b = convertToTensor(b, 'b', 'notEqual');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend) => backend.notEqual($a, $b);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, NotEqual);\n}\nexport const notEqual = op({ notEqual_ });\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Real } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns the real part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the real part of each element in input considered as a complex number.\n *\n * If the input is real, it simply makes a clone.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.real(x).print();\n * ```\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction real_(input) {\n const $input = convertToTensor(input, 'input', 'real');\n const forward = (backend) => {\n return backend.real($input);\n };\n const inputs = { input: $input };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Real);\n}\nexport const real = op({ real_ });\n//# sourceMappingURL=real.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { OnesLike } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { complex } from './complex';\nimport { imag } from './imag';\nimport { op } from './operation';\nimport { real } from './real';\nimport { zerosLike } from './zeros_like';\n/**\n * Creates a `tf.Tensor` with all elements set to 1 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.onesLike(x).print();\n * ```\n * @param x A tensor.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction onesLike_(x) {\n const $x = convertToTensor(x, 'x', 'onesLike');\n const forward = (backend, save) => {\n if ($x.dtype === 'complex64') {\n const r = onesLike(real($x));\n const i = zerosLike(imag($x));\n return complex(r, i);\n }\n return backend.onesLike($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, OnesLike);\n}\nexport const onesLike = op({ onesLike_ });\n//# sourceMappingURL=ones_like.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { matMul } from './mat_mul';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the outer product of two vectors, `v1` and `v2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([3, 4, 5]);\n *\n * tf.outerProduct(a, b).print();\n * ```\n * @param v1 The first vector in the outer product operation.\n * @param v2 The second vector in the outer product operation.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction outerProduct_(v1, v2) {\n const $v1 = convertToTensor(v1, 'v1', 'outerProduct');\n const $v2 = convertToTensor(v2, 'v2', 'outerProduct');\n util.assert($v1.rank === 1 && $v2.rank === 1, () => `Error in outerProduct: inputs must be rank 1, but got ranks ` +\n `${$v1.rank} and ${$v2.rank}.`);\n const v12D = reshape($v1, [-1, 1]);\n const v22D = reshape($v2, [1, -1]);\n return matMul(v12D, v22D);\n}\nexport const outerProduct = op({ outerProduct_ });\n//# sourceMappingURL=outer_product.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { PadV2 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Pads a `tf.Tensor` with a given value and paddings.\n *\n * This operation implements `CONSTANT` mode. For `REFLECT` and `SYMMETRIC`,\n * refer to `tf.mirrorPad`\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `paddings` is of given length.\n * - `tf.pad1d`\n * - `tf.pad2d`\n * - `tf.pad3d`\n * - `tf.pad4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.pad([[1, 2]]).print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * @param constantValue The pad value to use. Defaults to 0.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction pad_(x, paddings, constantValue = 0) {\n const $x = convertToTensor(x, 'x', 'pad');\n if ($x.rank === 0) {\n throw new Error('pad(scalar) is not defined. Pass non-scalar to pad');\n }\n const forward = (backend, save) => {\n save([$x]);\n return backend.pad($x, paddings, constantValue);\n };\n const attrs = { paddings, constantValue };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, PadV2, attrs);\n}\nexport const pad = op({ pad_ });\n//# sourceMappingURL=pad.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor1D` with a given value and paddings. See `pad` for details.\n */\nfunction pad1d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2, () => 'Invalid number of paddings. Must be length of 2.');\n return pad(x, [paddings], constantValue);\n}\nexport const pad1d = op({ pad1d_ });\n//# sourceMappingURL=pad1d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor2D` with a given value and paddings. See `pad` for details.\n */\nfunction pad2d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 2 && paddings[0].length === 2 &&\n paddings[1].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad2d = op({ pad2d_ });\n//# sourceMappingURL=pad2d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor3D` with a given value and paddings. See `pad` for details.\n */\nfunction pad3d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 3 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad3d = op({ pad3d_ });\n//# sourceMappingURL=pad3d.js.map", "import { assert } from '../util';\nimport { op } from './operation';\nimport { pad } from './pad';\n/**\n * Pads a `tf.Tensor4D` with a given value and paddings. See `pad` for details.\n */\nfunction pad4d_(x, paddings, constantValue = 0) {\n assert(paddings.length === 4 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2 &&\n paddings[3].length === 2, () => 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\nexport const pad4d = op({ pad4d_ });\n//# sourceMappingURL=pad4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SpaceToBatchND } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * This operation divides \"spatial\" dimensions `[1, ..., M]` of the input into\n * a grid of blocks of shape `blockShape`, and interleaves these blocks with\n * the \"batch\" dimension (0) such that in the output, the spatial\n * dimensions `[1, ..., M]` correspond to the position within the grid,\n * and the batch dimension combines both the position within a spatial block\n * and the original batch position. Prior to division into blocks,\n * the spatial dimensions of the input are optionally zero padded\n * according to `paddings`. See below for a precise description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]);\n * const blockShape = [2, 2];\n * const paddings = [[0, 0], [0, 0]];\n *\n * x.spaceToBatchND(blockShape, paddings).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must have shape `[M]`, all values must\n * be >= 1.\n * @param paddings A 2-D array. Must have shape `[M, 2]`, all values must be >=\n * 0. `paddings[i] = [padStart, padEnd]` specifies the amount to zero-pad\n * from input dimension `i + 1`, which corresponds to spatial dimension `i`. It\n * is required that\n * `(inputShape[i + 1] + padStart + padEnd) % blockShape[i] === 0`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Zero-pad the start and end of dimensions `[1, ..., M]` of the input\n * according to `paddings` to produce `padded` of shape paddedShape.\n *\n * 2. Reshape `padded` to `reshapedPadded` of shape:\n * `[batch] + [paddedShape[1] / blockShape[0], blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1], blockShape[M-1]] + remainingShape`\n *\n * 3. Permute dimensions of `reshapedPadded` to produce `permutedReshapedPadded`\n * of shape: `blockShape + [batch] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * 4. Reshape `permutedReshapedPadded` to flatten `blockShape` into the\n * batch dimension, producing an output tensor of shape:\n * `[batch * prod(blockShape)] + [paddedShape[1] / blockShape[0], ...,\n * paddedShape[M] / blockShape[M-1]] + remainingShape`\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction spaceToBatchND_(x, blockShape, paddings) {\n const $x = convertToTensor(x, 'x', 'spaceToBatchND');\n util.assert($x.rank >= 1 + blockShape.length, () => `input rank ${$x.rank} should be > than [blockShape] ${blockShape.length}`);\n util.assert(paddings.length === blockShape.length, () => `paddings.shape[0] ${paddings.length} must be equal to [blockShape] ${blockShape.length}`);\n util.assert($x.shape.reduce((a, b, i) => {\n if (i > 0 && i <= blockShape.length) {\n return a &&\n ((b + paddings[i - 1][0] + paddings[i - 1][1]) %\n blockShape[i - 1] ===\n 0);\n }\n return a;\n }, true), () => `input spatial dimensions ${$x.shape.slice(1)} with paddings ${paddings.toString()} must be divisible by blockShapes ${blockShape.toString()}`);\n const forward = backend => backend.spaceToBatchND($x, blockShape, paddings);\n const inputs = { x: $x };\n const attrs = { blockShape, paddings };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, SpaceToBatchND, attrs);\n}\nexport const spaceToBatchND = op({ spaceToBatchND_ });\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { avgPool } from './avg_pool';\nimport { batchToSpaceND } from './batch_to_space_nd';\nimport * as conv_util from './conv_util';\nimport { maxPool } from './max_pool';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { spaceToBatchND } from './space_to_batch_nd';\n/**\n * Performs an N-D pooling operation\n *\n * @param input The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param windowShape The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param poolingType The type of pooling, either 'max' or 'avg'.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilationRate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction pool_(input, windowShape, poolingType, pad, dilations, strides) {\n if (dilations == null) {\n dilations = [1, 1];\n }\n if (strides == null) {\n strides = 1;\n }\n if (pad === 0) {\n pad = 'valid';\n }\n const $x = convertToTensor(input, 'x', 'maxPool');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in pool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computePool2DInfo(x4D.shape, windowShape, strides, dilations, pad);\n const dilation = [convInfo.dilationHeight, convInfo.dilationWidth];\n // The following implementation does batchToSpace(pool(spaceToBatch(x)))\n // whenever dilation > 1 since the TF kernels do not support dilation > 1.\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L1037\n let basePadding;\n if (pad === 'same') {\n basePadding = withSpaceToBatchBasePaddings([convInfo.filterHeight, convInfo.filterWidth], dilation);\n }\n else {\n basePadding = [[0, 0], [0, 0]];\n }\n const isDilationOne = dilation[0] === 1 && dilation[1] === 1;\n const [adjustedPadding, adjustedCrops] = requiredSpaceToBatchPaddings([convInfo.inHeight, convInfo.inWidth], dilation, basePadding);\n const convertedPad = isDilationOne ? pad : 'valid';\n const convertedX = isDilationOne ? x4D : spaceToBatchND(x4D, dilation, adjustedPadding);\n const forwardOp = poolingType === 'avg' ?\n () => avgPool(convertedX, windowShape, strides, convertedPad) :\n () => maxPool(convertedX, windowShape, strides, convertedPad);\n const y = forwardOp();\n const res = isDilationOne ? y : batchToSpaceND(y, dilation, adjustedCrops);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\n// Helper function to compute crops and paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/array_ops.py#L2184\nfunction requiredSpaceToBatchPaddings(inputShape, blockShape, basePadding) {\n const padStart = basePadding.map(b => b[0]);\n const origPadEnd = basePadding.map(b => b[1]);\n const fullInputShape = inputShape.concat(padStart, origPadEnd);\n const padEndExtra = blockShape.map((b, i) => (b - fullInputShape[i] % b) % b);\n const padEnd = origPadEnd.map((s, i) => s + padEndExtra[i]);\n const paddings = blockShape.map((_, i) => [padStart[i], padEnd[i]]);\n const crops = blockShape.map((_, i) => [0, padEndExtra[i]]);\n return [paddings, crops];\n}\n// Helper function to compute base paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L524\nfunction withSpaceToBatchBasePaddings(filterShape, dilation) {\n // Spatial dimensions of the filters and the upsampled filters in which we\n // introduce (rate - 1) zeros between consecutive filter values.\n const dilatedFilterShape = filterShape.map((s, i) => {\n return s + (s - 1) * (dilation[i] - 1);\n });\n const padExtraShape = dilatedFilterShape.map(s => s - 1);\n // When padding is odd, we pad more at end, following the same\n // convention as conv2d.\n const padExtraStart = padExtraShape.map(s => Math.floor(s / 2));\n const padExtraEnd = padExtraShape.map((s, i) => s - padExtraStart[i]);\n return padExtraShape.map((_, i) => {\n return [padExtraStart[i], padExtraEnd[i]];\n });\n}\nexport const pool = op({ pool_ });\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Pow } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the power of one `tf.Tensor` to another. Supports broadcasting.\n *\n * Given a `tf.Tensor` x and a `tf.Tensor` y, this operation computes x^y for\n * corresponding elements in x and y. The result's dtype will be the upcasted\n * type of the `base` and `exp` dtypes.\n *\n * ```js\n * const a = tf.tensor([[2, 3], [4, 5]])\n * const b = tf.tensor([[1, 2], [3, 0]]).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n *\n * ```js\n * const a = tf.tensor([[1, 2], [3, 4]])\n * const b = tf.tensor(2).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n * We also expose `powStrict` which has the same signature as this op and\n * asserts that `base` and `exp` are the same shape (does not broadcast).\n *\n * @param base The base `tf.Tensor` to pow element-wise.\n * @param exp The exponent `tf.Tensor` to pow element-wise.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction pow_(base, exp) {\n let $base = convertToTensor(base, 'base', 'pow');\n let $exp = convertToTensor(exp, 'exp', 'pow');\n [$base, $exp] = makeTypesMatch($base, $exp);\n const inputs = { a: $base, b: $exp };\n const forward = (backend, save) => {\n const y = backend.pow($base, $exp);\n save([$base, $exp, y]);\n return y;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Pow);\n}\nexport const pow = op({ pow_ });\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prelu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes leaky rectified linear element-wise with parametric alphas.\n *\n * `x < 0 ? alpha * x : f(x) = x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n * const alpha = tf.scalar(0.1);\n *\n * x.prelu(alpha).print(); // or tf.prelu(x, alpha)\n * ```\n * @param x The input tensor.\n * @param alpha Scaling factor for negative values.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction prelu_(x, alpha) {\n const $x = convertToTensor(x, 'x', 'prelu');\n const $alpha = convertToTensor(alpha, 'alpha', 'prelu');\n const forward = (backend, save) => {\n const res = backend.prelu($x, $alpha);\n save([$x, $alpha]);\n return res;\n };\n const inputs = { x: $x, alpha: $alpha };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prelu);\n}\nexport const prelu = op({ prelu_ });\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Prod } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { expandShapeToKeepDim, getAxesPermutation, getInnerMostAxes } from './axis_util';\nimport { cast } from './cast';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { transpose } from './transpose';\n/**\n * Computes the product of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.prod().print(); // or tf.prod(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.prod(axis).print(); // or tf.prod(x, axis)\n * ```\n *\n * @param x The input tensor to compute the product over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n *\n * @doc {heading: 'Operations', subheading: 'Reduction'}\n */\nfunction prod_(x, axis = null, keepDims = false) {\n let $x = convertToTensor(x, 'x', 'prod');\n if ($x.dtype === 'bool') {\n // bool is not an allowed type for the underlying kernel.\n $x = cast($x, 'int32');\n }\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n const permutation = getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = transpose($x, permutation);\n reductionAxes = getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = backend.prod(permutedX, reductionAxes);\n if (keepDims) {\n const newShape = expandShapeToKeepDim(value.shape, axes);\n value = reshape(value, newShape);\n }\n return value;\n };\n const inputs = { x: $x };\n const attrs = { axis, keepDims };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Prod, attrs);\n}\nexport const prod = op({ prod_ });\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { sizeFromShape } from '../util';\nimport { op } from './operation';\n/**\n * Creates a `tf.Tensor` with values sampled from a random number generator\n * function defined by the user.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param randFunction A random number generator function which is called\n * for each element in the output tensor.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n */\nfunction rand_(shape, randFunction, dtype) {\n const size = sizeFromShape(shape);\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n }\n else if (dtype === 'int32') {\n values = new Int32Array(size);\n }\n else if (dtype === 'bool') {\n values = new Uint8Array(size);\n }\n else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n for (let i = 0; i < size; i++) {\n values[i] = randFunction();\n }\n return ENGINE.makeTensor(values, shape, dtype);\n}\nexport const rand = op({ rand_ });\n//# sourceMappingURL=rand.js.map", "// A port of an algorithm by Johannes Baag\u00F8e , 2010\n// http://baagoe.com/en/RandomMusings/javascript/\n// https://github.com/nquinlan/better-random-numbers-for-javascript-mirror\n// Original work is under MIT license -\n\n// Copyright (C) 2010 by Johannes Baag\u00F8e \n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n// \n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n// \n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n\n\n\n(function(global, module, define) {\n\nfunction Alea(seed) {\n var me = this, mash = Mash();\n\n me.next = function() {\n var t = 2091639 * me.s0 + me.c * 2.3283064365386963e-10; // 2^-32\n me.s0 = me.s1;\n me.s1 = me.s2;\n return me.s2 = t - (me.c = t | 0);\n };\n\n // Apply the seeding algorithm from Baagoe.\n me.c = 1;\n me.s0 = mash(' ');\n me.s1 = mash(' ');\n me.s2 = mash(' ');\n me.s0 -= mash(seed);\n if (me.s0 < 0) { me.s0 += 1; }\n me.s1 -= mash(seed);\n if (me.s1 < 0) { me.s1 += 1; }\n me.s2 -= mash(seed);\n if (me.s2 < 0) { me.s2 += 1; }\n mash = null;\n}\n\nfunction copy(f, t) {\n t.c = f.c;\n t.s0 = f.s0;\n t.s1 = f.s1;\n t.s2 = f.s2;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new Alea(seed),\n state = opts && opts.state,\n prng = xg.next;\n prng.int32 = function() { return (xg.next() * 0x100000000) | 0; }\n prng.double = function() {\n return prng() + (prng() * 0x200000 | 0) * 1.1102230246251565e-16; // 2^-53\n };\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nfunction Mash() {\n var n = 0xefc8249d;\n\n var mash = function(data) {\n data = data.toString();\n for (var i = 0; i < data.length; i++) {\n n += data.charCodeAt(i);\n var h = 0.02519603282416938 * n;\n n = h >>> 0;\n h -= n;\n h *= n;\n n = h >>> 0;\n h -= n;\n n += h * 0x100000000; // 2^32\n }\n return (n >>> 0) * 2.3283064365386963e-10; // 2^-32\n };\n\n return mash;\n}\n\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.alea = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xor128\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n\n // Set up generator function.\n me.next = function() {\n var t = me.x ^ (me.x << 11);\n me.x = me.y;\n me.y = me.z;\n me.z = me.w;\n return me.w ^= (me.w >>> 19) ^ t ^ (t >>> 8);\n };\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor128 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorwow\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var t = (me.x ^ (me.x >>> 2));\n me.x = me.y; me.y = me.z; me.z = me.w; me.w = me.v;\n return (me.d = (me.d + 362437 | 0)) +\n (me.v = (me.v ^ (me.v << 4)) ^ (t ^ (t << 1))) | 0;\n };\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n me.v = 0;\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n if (k == strseed.length) {\n me.d = me.x << 10 ^ me.x >>> 4;\n }\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n t.v = f.v;\n t.d = f.d;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorwow = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "// A Javascript implementaion of the \"xorshift7\" algorithm by\n// Fran\u00E7ois Panneton and Pierre L'ecuyer:\n// \"On the Xorgshift Random Number Generators\"\n// http://saluc.engr.uconn.edu/refs/crypto/rng/panneton05onthexorshift.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n // Update xor generator.\n var X = me.x, i = me.i, t, v, w;\n t = X[i]; t ^= (t >>> 7); v = t ^ (t << 24);\n t = X[(i + 1) & 7]; v ^= t ^ (t >>> 10);\n t = X[(i + 3) & 7]; v ^= t ^ (t >>> 3);\n t = X[(i + 4) & 7]; v ^= t ^ (t << 7);\n t = X[(i + 7) & 7]; t = t ^ (t << 13); v ^= t ^ (t << 9);\n X[i] = v;\n me.i = (i + 1) & 7;\n return v;\n };\n\n function init(me, seed) {\n var j, w, X = [];\n\n if (seed === (seed | 0)) {\n // Seed state array using a 32-bit integer.\n w = X[0] = seed;\n } else {\n // Seed state using a string.\n seed = '' + seed;\n for (j = 0; j < seed.length; ++j) {\n X[j & 7] = (X[j & 7] << 15) ^\n (seed.charCodeAt(j) + X[(j + 1) & 7] << 13);\n }\n }\n // Enforce an array length of 8, not all zeroes.\n while (X.length < 8) X.push(0);\n for (j = 0; j < 8 && X[j] === 0; ++j);\n if (j == 8) w = X[7] = -1; else w = X[j];\n\n me.x = X;\n me.i = 0;\n\n // Discard an initial 256 values.\n for (j = 256; j > 0; --j) {\n me.next();\n }\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.x = f.x.slice();\n t.i = f.i;\n return t;\n}\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.x) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorshift7 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n", "// A Javascript implementaion of Richard Brent's Xorgens xor4096 algorithm.\n//\n// This fast non-cryptographic random number generator is designed for\n// use in Monte-Carlo algorithms. It combines a long-period xorshift\n// generator with a Weyl generator, and it passes all common batteries\n// of stasticial tests for randomness while consuming only a few nanoseconds\n// for each prng generated. For background on the generator, see Brent's\n// paper: \"Some long-period random number generators using shifts and xors.\"\n// http://arxiv.org/pdf/1004.3115v1.pdf\n//\n// Usage:\n//\n// var xor4096 = require('xor4096');\n// random = xor4096(1); // Seed with int32 or string.\n// assert.equal(random(), 0.1520436450538547); // (0, 1) range, 53 bits.\n// assert.equal(random.int32(), 1806534897); // signed int32, 32 bits.\n//\n// For nonzero numeric keys, this impelementation provides a sequence\n// identical to that by Brent's xorgens 3 implementaion in C. This\n// implementation also provides for initalizing the generator with\n// string seeds, or for saving and restoring the state of the generator.\n//\n// On Chrome, this prng benchmarks about 2.1 times slower than\n// Javascript's built-in Math.random().\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n var w = me.w,\n X = me.X, i = me.i, t, v;\n // Update Weyl generator.\n me.w = w = (w + 0x61c88647) | 0;\n // Update xor generator.\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n // Update Xor generator array state.\n v = X[i] = v ^ t;\n me.i = i;\n // Result is the combination.\n return (v + (w ^ (w >>> 16))) | 0;\n };\n\n function init(me, seed) {\n var t, v, i, j, w, X = [], limit = 128;\n if (seed === (seed | 0)) {\n // Numeric seeds initialize v, which is used to generates X.\n v = seed;\n seed = null;\n } else {\n // String seeds are mixed into v and X one character at a time.\n seed = seed + '\\0';\n v = 0;\n limit = Math.max(limit, seed.length);\n }\n // Initialize circular array and weyl value.\n for (i = 0, j = -32; j < limit; ++j) {\n // Put the unicode characters into the array, and shuffle them.\n if (seed) v ^= seed.charCodeAt((j + 32) % seed.length);\n // After 32 shuffles, take v as the starting w value.\n if (j === 0) w = v;\n v ^= v << 10;\n v ^= v >>> 15;\n v ^= v << 4;\n v ^= v >>> 13;\n if (j >= 0) {\n w = (w + 0x61c88647) | 0; // Weyl.\n t = (X[j & 127] ^= (v + w)); // Combine xor and weyl to init array.\n i = (0 == t) ? i + 1 : 0; // Count zeroes.\n }\n }\n // We have detected all zeroes; make the key nonzero.\n if (i >= 128) {\n X[(seed && seed.length || 0) & 127] = -1;\n }\n // Run the generator 512 times to further mix the state before using it.\n // Factoring this as a function slows the main generator, so it is just\n // unrolled here. The weyl generator is not advanced while warming up.\n i = 127;\n for (j = 4 * 128; j > 0; --j) {\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n X[i] = v ^ t;\n }\n // Storing state as object members is faster than using closure variables.\n me.w = w;\n me.X = X;\n me.i = i;\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.i = f.i;\n t.w = f.w;\n t.X = f.X.slice();\n return t;\n};\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.X) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor4096 = impl;\n}\n\n})(\n this, // window object or global\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n", "// A Javascript implementaion of the \"Tyche-i\" prng algorithm by\n// Samuel Neves and Filipe Araujo.\n// See https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var b = me.b, c = me.c, d = me.d, a = me.a;\n b = (b << 25) ^ (b >>> 7) ^ c;\n c = (c - d) | 0;\n d = (d << 24) ^ (d >>> 8) ^ a;\n a = (a - b) | 0;\n me.b = b = (b << 20) ^ (b >>> 12) ^ c;\n me.c = c = (c - d) | 0;\n me.d = (d << 16) ^ (c >>> 16) ^ a;\n return me.a = (a - b) | 0;\n };\n\n /* The following is non-inverted tyche, which has better internal\n * bit diffusion, but which is about 25% slower than tyche-i in JS.\n me.next = function() {\n var a = me.a, b = me.b, c = me.c, d = me.d;\n a = (me.a + me.b | 0) >>> 0;\n d = me.d ^ a; d = d << 16 ^ d >>> 16;\n c = me.c + d | 0;\n b = me.b ^ c; b = b << 12 ^ d >>> 20;\n me.a = a = a + b | 0;\n d = d ^ a; me.d = d = d << 8 ^ d >>> 24;\n me.c = c = c + d | 0;\n b = b ^ c;\n return me.b = (b << 7 ^ b >>> 25);\n }\n */\n\n me.a = 0;\n me.b = 0;\n me.c = 2654435769 | 0;\n me.d = 1367130551;\n\n if (seed === Math.floor(seed)) {\n // Integer seed.\n me.a = (seed / 0x100000000) | 0;\n me.b = seed | 0;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 20; k++) {\n me.b ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.a = f.a;\n t.b = f.b;\n t.c = f.c;\n t.d = f.d;\n return t;\n};\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.tychei = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n", "/*\nCopyright 2014 David Bau.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n*/\n\n(function (pool, math) {\n//\n// The following constants are related to IEEE 754 limits.\n//\nvar global = this,\n width = 256, // each RC4 output is 0 <= x < 256\n chunks = 6, // at least six RC4 outputs for each double\n digits = 52, // there are 52 significant digits in a double\n rngname = 'random', // rngname: name for Math.random and Math.seedrandom\n startdenom = math.pow(width, chunks),\n significance = math.pow(2, digits),\n overflow = significance * 2,\n mask = width - 1,\n nodecrypto; // node.js crypto module, initialized at the bottom.\n\n//\n// seedrandom()\n// This is the seedrandom function described above.\n//\nfunction seedrandom(seed, options, callback) {\n var key = [];\n options = (options == true) ? { entropy: true } : (options || {});\n\n // Flatten the seed string or build one from local entropy if needed.\n var shortseed = mixkey(flatten(\n options.entropy ? [seed, tostring(pool)] :\n (seed == null) ? autoseed() : seed, 3), key);\n\n // Use the seed to initialize an ARC4 generator.\n var arc4 = new ARC4(key);\n\n // This function returns a random double in [0, 1) that contains\n // randomness in every bit of the mantissa of the IEEE 754 value.\n var prng = function() {\n var n = arc4.g(chunks), // Start with a numerator n < 2 ^ 48\n d = startdenom, // and denominator d = 2 ^ 48.\n x = 0; // and no 'extra last byte'.\n while (n < significance) { // Fill up all significant digits by\n n = (n + x) * width; // shifting numerator and\n d *= width; // denominator and generating a\n x = arc4.g(1); // new least-significant-byte.\n }\n while (n >= overflow) { // To avoid rounding up, before adding\n n /= 2; // last byte, shift everything\n d /= 2; // right using integer math until\n x >>>= 1; // we have exactly the desired bits.\n }\n return (n + x) / d; // Form the number within [0, 1).\n };\n\n prng.int32 = function() { return arc4.g(4) | 0; }\n prng.quick = function() { return arc4.g(4) / 0x100000000; }\n prng.double = prng;\n\n // Mix the randomness into accumulated entropy.\n mixkey(tostring(arc4.S), pool);\n\n // Calling convention: what to return as a function of prng, seed, is_math.\n return (options.pass || callback ||\n function(prng, seed, is_math_call, state) {\n if (state) {\n // Load the arc4 state from the given state if it has an S array.\n if (state.S) { copy(state, arc4); }\n // Only provide the .state method if requested via options.state.\n prng.state = function() { return copy(arc4, {}); }\n }\n\n // If called as a method of Math (Math.seedrandom()), mutate\n // Math.random because that is how seedrandom.js has worked since v1.0.\n if (is_math_call) { math[rngname] = prng; return seed; }\n\n // Otherwise, it is a newer calling convention, so return the\n // prng directly.\n else return prng;\n })(\n prng,\n shortseed,\n 'global' in options ? options.global : (this == math),\n options.state);\n}\nmath['seed' + rngname] = seedrandom;\n\n//\n// ARC4\n//\n// An ARC4 implementation. The constructor takes a key in the form of\n// an array of at most (width) integers that should be 0 <= x < (width).\n//\n// The g(count) method returns a pseudorandom integer that concatenates\n// the next (count) outputs from ARC4. Its return value is a number x\n// that is in the range 0 <= x < (width ^ count).\n//\nfunction ARC4(key) {\n var t, keylen = key.length,\n me = this, i = 0, j = me.i = me.j = 0, s = me.S = [];\n\n // The empty key [] is treated as [0].\n if (!keylen) { key = [keylen++]; }\n\n // Set up S using the standard key scheduling algorithm.\n while (i < width) {\n s[i] = i++;\n }\n for (i = 0; i < width; i++) {\n s[i] = s[j = mask & (j + key[i % keylen] + (t = s[i]))];\n s[j] = t;\n }\n\n // The \"g\" method returns the next (count) outputs as one number.\n (me.g = function(count) {\n // Using instance members instead of closure state nearly doubles speed.\n var t, r = 0,\n i = me.i, j = me.j, s = me.S;\n while (count--) {\n t = s[i = mask & (i + 1)];\n r = r * width + s[mask & ((s[i] = s[j = mask & (j + t)]) + (s[j] = t))];\n }\n me.i = i; me.j = j;\n return r;\n // For robust unpredictability, the function call below automatically\n // discards an initial batch of values. This is called RC4-drop[256].\n // See http://google.com/search?q=rsa+fluhrer+response&btnI\n })(width);\n}\n\n//\n// copy()\n// Copies internal state of ARC4 to or from a plain object.\n//\nfunction copy(f, t) {\n t.i = f.i;\n t.j = f.j;\n t.S = f.S.slice();\n return t;\n};\n\n//\n// flatten()\n// Converts an object tree to nested arrays of strings.\n//\nfunction flatten(obj, depth) {\n var result = [], typ = (typeof obj), prop;\n if (depth && typ == 'object') {\n for (prop in obj) {\n try { result.push(flatten(obj[prop], depth - 1)); } catch (e) {}\n }\n }\n return (result.length ? result : typ == 'string' ? obj : obj + '\\0');\n}\n\n//\n// mixkey()\n// Mixes a string seed into a key that is an array of integers, and\n// returns a shortened string seed that is equivalent to the result key.\n//\nfunction mixkey(seed, key) {\n var stringseed = seed + '', smear, j = 0;\n while (j < stringseed.length) {\n key[mask & j] =\n mask & ((smear ^= key[mask & j] * 19) + stringseed.charCodeAt(j++));\n }\n return tostring(key);\n}\n\n//\n// autoseed()\n// Returns an object for autoseeding, using window.crypto and Node crypto\n// module if available.\n//\nfunction autoseed() {\n try {\n var out;\n if (nodecrypto && (out = nodecrypto.randomBytes)) {\n // The use of 'out' to remember randomBytes makes tight minified code.\n out = out(width);\n } else {\n out = new Uint8Array(width);\n (global.crypto || global.msCrypto).getRandomValues(out);\n }\n return tostring(out);\n } catch (e) {\n var browser = global.navigator,\n plugins = browser && browser.plugins;\n return [+new Date, global, plugins, global.screen, tostring(pool)];\n }\n}\n\n//\n// tostring()\n// Converts an array of charcodes to a string\n//\nfunction tostring(a) {\n return String.fromCharCode.apply(0, a);\n}\n\n//\n// When seedrandom.js is loaded, we immediately mix a few bits\n// from the built-in RNG into the entropy pool. Because we do\n// not want to interfere with deterministic PRNG state later,\n// seedrandom will not call math.random on its own again after\n// initialization.\n//\nmixkey(math.random(), pool);\n\n//\n// Nodejs and AMD support: export the implementation as a module using\n// either convention.\n//\nif ((typeof module) == 'object' && module.exports) {\n module.exports = seedrandom;\n // When in node.js, try using crypto package for autoseeding.\n try {\n nodecrypto = require('crypto');\n } catch (ex) {}\n} else if ((typeof define) == 'function' && define.amd) {\n define(function() { return seedrandom; });\n}\n\n// End anonymous scope, and pass initial values.\n})(\n [], // pool: entropy pool starts empty\n Math // math: package containing random, pow, and seedrandom\n);\n", "// A library of seedable RNGs implemented in Javascript.\n//\n// Usage:\n//\n// var seedrandom = require('seedrandom');\n// var random = seedrandom(1); // or any seed.\n// var x = random(); // 0 <= x < 1. Every bit is random.\n// var x = random.quick(); // 0 <= x < 1. 32 bits of randomness.\n\n// alea, a 53-bit multiply-with-carry generator by Johannes Baag\u00F8e.\n// Period: ~2^116\n// Reported to pass all BigCrush tests.\nvar alea = require('./lib/alea');\n\n// xor128, a pure xor-shift generator by George Marsaglia.\n// Period: 2^128-1.\n// Reported to fail: MatrixRank and LinearComp.\nvar xor128 = require('./lib/xor128');\n\n// xorwow, George Marsaglia's 160-bit xor-shift combined plus weyl.\n// Period: 2^192-2^32\n// Reported to fail: CollisionOver, SimpPoker, and LinearComp.\nvar xorwow = require('./lib/xorwow');\n\n// xorshift7, by Fran\u00E7ois Panneton and Pierre L'ecuyer, takes\n// a different approach: it adds robustness by allowing more shifts\n// than Marsaglia's original three. It is a 7-shift generator\n// with 256 bits, that passes BigCrush with no systmatic failures.\n// Period 2^256-1.\n// No systematic BigCrush failures reported.\nvar xorshift7 = require('./lib/xorshift7');\n\n// xor4096, by Richard Brent, is a 4096-bit xor-shift with a\n// very long period that also adds a Weyl generator. It also passes\n// BigCrush with no systematic failures. Its long period may\n// be useful if you have many generators and need to avoid\n// collisions.\n// Period: 2^4128-2^32.\n// No systematic BigCrush failures reported.\nvar xor4096 = require('./lib/xor4096');\n\n// Tyche-i, by Samuel Neves and Filipe Araujo, is a bit-shifting random\n// number generator derived from ChaCha, a modern stream cipher.\n// https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n// Period: ~2^127\n// No systematic BigCrush failures reported.\nvar tychei = require('./lib/tychei');\n\n// The original ARC4-based prng included in this library.\n// Period: ~2^1600\nvar sr = require('./seedrandom');\n\nsr.alea = alea;\nsr.xor128 = xor128;\nsr.xorwow = xorwow;\nsr.xorshift7 = xorshift7;\nsr.xor4096 = xor4096;\nsr.tychei = tychei;\n\nmodule.exports = sr;\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as seedrandom from 'seedrandom';\nimport { expectNumbersClose, testEpsilon } from '../test_util';\n// https://en.wikipedia.org/wiki/Marsaglia_polar_method\nexport class MPRandGauss {\n constructor(mean, stdDeviation, dtype, truncated, seed) {\n this.mean = mean;\n this.stdDev = stdDeviation;\n this.dtype = dtype;\n this.nextVal = NaN;\n this.truncated = truncated;\n if (this.truncated) {\n this.upper = this.mean + this.stdDev * 2;\n this.lower = this.mean - this.stdDev * 2;\n }\n const seedValue = seed ? seed : Math.random();\n this.random = seedrandom.alea(seedValue.toString());\n }\n /** Returns next sample from a Gaussian distribution. */\n nextValue() {\n if (!isNaN(this.nextVal)) {\n const value = this.nextVal;\n this.nextVal = NaN;\n return value;\n }\n let resultX, resultY;\n let isValid = false;\n while (!isValid) {\n let v1, v2, s;\n do {\n v1 = 2 * this.random() - 1;\n v2 = 2 * this.random() - 1;\n s = v1 * v1 + v2 * v2;\n } while (s >= 1 || s === 0);\n const mul = Math.sqrt(-2.0 * Math.log(s) / s);\n resultX = this.mean + this.stdDev * v1 * mul;\n resultY = this.mean + this.stdDev * v2 * mul;\n if (!this.truncated || this.isValidTruncated(resultX)) {\n isValid = true;\n }\n }\n if (!this.truncated || this.isValidTruncated(resultY)) {\n this.nextVal = this.convertValue(resultY);\n }\n return this.convertValue(resultX);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype == null || this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n /** Returns true if less than 2-standard-deviations from the mean. */\n isValidTruncated(value) {\n return value <= this.upper && value >= this.lower;\n }\n}\n// Marsaglia, George, and Wai Wan Tsang. 2000. \"A Simple Method for Generating\n// Gamma Variables.\"\nexport class RandGamma {\n constructor(alpha, beta, dtype, seed) {\n this.alpha = alpha;\n this.beta = 1 / beta; // convert rate to scale parameter\n this.dtype = dtype;\n const seedValue = seed ? seed : Math.random();\n this.randu = seedrandom.alea(seedValue.toString());\n this.randn = new MPRandGauss(0, 1, dtype, false, this.randu());\n if (alpha < 1) {\n this.d = alpha + (2 / 3);\n }\n else {\n this.d = alpha - (1 / 3);\n }\n this.c = 1 / Math.sqrt(9 * this.d);\n }\n /** Returns next sample from a gamma distribution. */\n nextValue() {\n let x2, v0, v1, x, u, v;\n while (true) {\n do {\n x = this.randn.nextValue();\n v = 1 + (this.c * x);\n } while (v <= 0);\n v *= v * v;\n x2 = x * x;\n v0 = 1 - (0.331 * x2 * x2);\n v1 = (0.5 * x2) + (this.d * (1 - v + Math.log(v)));\n u = this.randu();\n if (u < v0 || Math.log(u) < v1) {\n break;\n }\n }\n v = (1 / this.beta) * this.d * v;\n if (this.alpha < 1) {\n v *= Math.pow(this.randu(), 1 / this.alpha);\n }\n return this.convertValue(v);\n }\n /** Handles proper rounding for non-floating-point numbers. */\n convertValue(value) {\n if (this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n}\nexport class UniformRandom {\n constructor(min = 0, max = 1, dtype, seed) {\n /** Handles proper rounding for non floating point numbers. */\n this.canReturnFloat = () => (this.dtype == null || this.dtype === 'float32');\n this.min = min;\n this.range = max - min;\n this.dtype = dtype;\n if (seed == null) {\n seed = Math.random();\n }\n if (typeof seed === 'number') {\n seed = seed.toString();\n }\n if (!this.canReturnFloat() && this.range <= 1) {\n throw new Error(`The difference between ${min} - ${max} <= 1 and dtype is not float`);\n }\n this.random = seedrandom.alea(seed);\n }\n convertValue(value) {\n if (this.canReturnFloat()) {\n return value;\n }\n return Math.round(value);\n }\n nextValue() {\n return this.convertValue(this.min + this.range * this.random());\n }\n}\nexport function jarqueBeraNormalityTest(values) {\n // https://en.wikipedia.org/wiki/Jarque%E2%80%93Bera_test\n const n = values.length;\n const s = skewness(values);\n const k = kurtosis(values);\n const jb = n / 6 * (Math.pow(s, 2) + 0.25 * Math.pow(k - 3, 2));\n // JB test requires 2-degress of freedom from Chi-Square @ 0.95:\n // http://www.itl.nist.gov/div898/handbook/eda/section3/eda3674.htm\n const CHI_SQUARE_2DEG = 5.991;\n if (jb > CHI_SQUARE_2DEG) {\n throw new Error(`Invalid p-value for JB: ${jb}`);\n }\n}\nexport function expectArrayInMeanStdRange(actual, expectedMean, expectedStdDev, epsilon) {\n if (epsilon == null) {\n epsilon = testEpsilon();\n }\n const actualMean = mean(actual);\n expectNumbersClose(actualMean, expectedMean, epsilon);\n expectNumbersClose(standardDeviation(actual, actualMean), expectedStdDev, epsilon);\n}\nfunction mean(values) {\n let sum = 0;\n for (let i = 0; i < values.length; i++) {\n sum += values[i];\n }\n return sum / values.length;\n}\nfunction standardDeviation(values, mean) {\n let squareDiffSum = 0;\n for (let i = 0; i < values.length; i++) {\n const diff = values[i] - mean;\n squareDiffSum += diff * diff;\n }\n return Math.sqrt(squareDiffSum / values.length);\n}\nfunction kurtosis(values) {\n // https://en.wikipedia.org/wiki/Kurtosis\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum4 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum4 += Math.pow(v, 4);\n }\n return (1 / n) * sum4 / Math.pow((1 / n) * sum2, 2);\n}\nfunction skewness(values) {\n // https://en.wikipedia.org/wiki/Skewness\n const valuesMean = mean(values);\n const n = values.length;\n let sum2 = 0;\n let sum3 = 0;\n for (let i = 0; i < n; i++) {\n const v = values[i] - valuesMean;\n sum2 += Math.pow(v, 2);\n sum3 += Math.pow(v, 3);\n }\n return (1 / n) * sum3 / Math.pow((1 / (n - 1)) * sum2, 3 / 2);\n}\n//# sourceMappingURL=rand_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { RandGamma } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a gamma distribution.\n *\n * ```js\n * tf.randomGamma([2, 2], 1).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param alpha The shape parameter of the gamma distribution.\n * @param beta The inverse scale parameter of the gamma distribution. Defaults\n * to 1.\n * @param dtype The data type of the output. Defaults to float32.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomGamma_(shape, alpha, beta = 1, dtype = 'float32', seed) {\n if (beta == null) {\n beta = 1;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const rgamma = new RandGamma(alpha, beta, dtype, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = rgamma.nextValue();\n }\n return res.toTensor();\n}\nexport const randomGamma = op({ randomGamma_ });\n//# sourceMappingURL=random_gamma.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a normal distribution.\n *\n * ```js\n * tf.randomNormal([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, false /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const randomNormal = op({ randomNormal_ });\n//# sourceMappingURL=random_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { UniformRandom } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a uniform distribution.\n *\n * The generated values follow a uniform distribution in the range [minval,\n * maxval). The lower bound minval is included in the range, while the upper\n * bound maxval is excluded.\n *\n * ```js\n * tf.randomUniform([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param minval The lower bound on the range of random values to generate.\n * Defaults to 0.\n * @param maxval The upper bound on the range of random values to generate.\n * Defaults to 1.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Random'}\n */\nfunction randomUniform_(shape, minval = 0, maxval = 1, dtype = 'float32', seed) {\n const res = buffer(shape, dtype);\n const random = new UniformRandom(minval, maxval, null, seed);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = random.nextValue();\n }\n return res.toTensor();\n}\nexport const randomUniform = op({ randomUniform_ });\n//# sourceMappingURL=random_uniform.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-1 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor1d` as it makes the code more readable.\n *\n * ```js\n * tf.tensor1d([1, 2, 3]).print();\n * ```\n *\n * @param values The values of the tensor. Can be array of numbers,\n * or a `TypedArray`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor1d(values, dtype) {\n assertNonNull(values);\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 1) {\n throw new Error('tensor1d() requires values to be a flat/TypedArray');\n }\n const shape = null;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Range } from '../kernel_names';\nimport { makeZerosTypedArray } from '../util';\nimport { tensor1d } from './tensor1d';\nimport { zeros } from './zeros';\n/**\n * Creates a new `tf.Tensor1D` filled with the numbers in the range provided.\n *\n * The tensor is a is half-open interval meaning it includes start, but\n * excludes stop. Decrementing ranges and negative step values are also\n * supported.sv\n *\n *\n * ```js\n * tf.range(0, 9, 2).print();\n * ```\n *\n * @param start An integer start value\n * @param stop An integer stop value\n * @param step An integer increment (will default to 1 or -1)\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function range(start, stop, step = 1, dtype = 'float32') {\n if (step === 0) {\n throw new Error('Cannot have a step of zero');\n }\n const forward = () => {\n const sameStartStop = start === stop;\n const increasingRangeNegativeStep = start < stop && step < 0;\n const decreasingRangePositiveStep = stop < start && step > 1;\n if (sameStartStop || increasingRangeNegativeStep ||\n decreasingRangePositiveStep) {\n return zeros([0], dtype);\n }\n const numElements = Math.abs(Math.ceil((stop - start) / step));\n const values = makeZerosTypedArray(numElements, dtype);\n if (stop < start && step === 1) {\n // Auto adjust the step's sign if it hasn't been set\n // (or was set to 1)\n step = -1;\n }\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, dtype);\n };\n const attrs = { start, stop, step, dtype };\n return ENGINE.runKernelFunc(forward, {} /* inputs */, null /* grad */, Range, attrs);\n}\n//# sourceMappingURL=range.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reciprocal } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of x element-wise: `1 / x`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, 2]);\n *\n * x.reciprocal().print(); // or tf.reciprocal(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction reciprocal_(x) {\n const $x = convertToTensor(x, 'x', 'reciprocal');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.reciprocal($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Reciprocal);\n}\nexport const reciprocal = op({ reciprocal_ });\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { cast } from './cast';\n/**\n * Computes rectified linear element-wise: `max(x, 0)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.relu().print(); // or tf.relu(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu_(x) {\n const $x = convertToTensor(x, 'x', 'relu');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu);\n}\nexport const relu = op({ relu_ });\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Relu6 } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { cast } from './cast';\nimport { op } from './operation';\n/**\n * Computes rectified linear 6 element-wise: `min(max(x, 0), 6)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 8]);\n *\n * x.relu6().print(); // or tf.relu6(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction relu6_(x) {\n const $x = convertToTensor(x, 'x', 'relu6');\n const forward = (backend, save) => {\n save([$x]);\n if ($x.dtype === 'bool') {\n return cast($x, 'int32');\n }\n return backend.relu6($x);\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Relu6);\n}\nexport const relu6 = op({ relu6_ });\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Reverse } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { clone } from './clone';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Reverses a `tf.Tensor` along a specified axis.\n *\n * Also available are stricter rank-specific methods that assert that `x` is\n * of the given rank:\n * - `tf.reverse1d`\n * - `tf.reverse2d`\n * - `tf.reverse3d`\n * - `tf.reverse4d`\n *\n * Except `tf.reverse1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.reverse().print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.reverse(axis).print();\n * ```\n * @param x The input tensor to be reversed.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction reverse_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n const forward = (backend) => {\n const axes = parseAxisParam(axis, $x.shape);\n if ($x.rank === 0) {\n return clone($x);\n }\n const res = backend.reverse($x, axes);\n return reshape(res, $x.shape);\n };\n const inputs = { x: $x };\n const attrs = { dims: axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, Reverse, attrs);\n}\nexport const reverse = op({ reverse_ });\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor1D`.\n *\n * @param x The input tensor.\n */\nfunction reverse1d_(x) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 1, () => `Error in reverse1D: x must be rank 1 but got rank ${$x.rank}.`);\n return reverse($x, 0);\n}\nexport const reverse1d = op({ reverse1d_ });\n//# sourceMappingURL=reverse_1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor2D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse2d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 2, () => `Error in reverse2D: x must be rank 2 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse2d = op({ reverse2d_ });\n//# sourceMappingURL=reverse_2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor3D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse3d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 3, () => `Error in reverse3D: x must be rank 3 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse3d = op({ reverse3d_ });\n//# sourceMappingURL=reverse_3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { reverse } from './reverse';\n/**\n * Reverses a `tf.Tensor4D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse4d_(x, axis) {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 4, () => `Error in reverse4D: x must be rank 4 but got rank ${$x.rank}.`);\n return reverse($x, axis);\n}\nexport const reverse4d = op({ reverse4d_ });\n//# sourceMappingURL=reverse_4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Round } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes round of input `tf.Tensor` element-wise: `round(x)`.\n * It implements banker's rounding.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.round().print(); // or tf.round(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction round_(x) {\n const $x = convertToTensor(x, 'x', 'round');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend) => backend.round($x), inputs, null /* grad */, Round);\n}\nexport const round = op({ round_ });\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Rsqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes reciprocal of square root of the input `tf.Tensor` element-wise:\n * `y = 1 / sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.rsqrt().print(); // or tf.rsqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction rsqrt_(x) {\n const $x = convertToTensor(x, 'x', 'rsqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.rsqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Rsqrt);\n}\nexport const rsqrt = op({ rsqrt_ });\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Selu } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes scaled exponential linear element-wise.\n *\n * `x < 0 ? scale * alpha * (exp(x) - 1) : x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.selu().print(); // or tf.selu(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction selu_(x) {\n const $x = convertToTensor(x, 'x', 'selu');\n const forward = (backend, save) => {\n const res = backend.selu($x);\n save([$x]);\n return res;\n };\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Selu);\n}\nexport const selu = op({ selu_ });\n//# sourceMappingURL=selu.js.map", "import { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { conv2d } from './conv2d';\nimport { depthwiseConv2d } from './depthwise_conv2d';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * 2-D convolution with separable filters.\n *\n * Performs a depthwise convolution that acts separately on channels followed\n * by a pointwise convolution that mixes channels. Note that this is\n * separability between dimensions [1, 2] and 3, not spatial separability\n * between dimensions 1 and 2.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param depthwiseFilter The depthwise filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`. This is\n * the filter used in the first step.\n * @param pointwiseFilter The pointwise filter tensor, rank 4, of shape\n * `[1, 1, inChannels * channelMultiplier, outChannels]`. This is\n * the filter used in the second step.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n *\n * @doc {heading: 'Operations', subheading: 'Convolution'}\n */\nfunction separableConv2d_(x, depthwiseFilter, pointwiseFilter, strides, pad, dilation = [1, 1], dataFormat = 'NHWC') {\n const $x = convertToTensor(x, 'x', 'separableConv2d');\n const $depthwiseFilter = convertToTensor(depthwiseFilter, 'depthwiseFilter', 'separableConv2d');\n const $pointwiseFilter = convertToTensor(pointwiseFilter, 'pointwiseFilter', 'separableConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n if (dataFormat === 'NCHW') {\n throw new Error('separableConv2d currently does not support dataFormat NCHW; only ' +\n 'NHWC is supported');\n }\n util.assert(x4D.rank === 4, () => `Error in separableConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($depthwiseFilter.rank === 4, () => `Error in separableConv2d: depthwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.rank === 4, () => `Error in separableConv2d: pointwise filter must be rank 4, but ` +\n `got rank ${$depthwiseFilter.rank}.`);\n util.assert($pointwiseFilter.shape[0] === 1, () => `Error in separableConv2d: the first dimension of pointwise filter ` +\n ` must be 1, but got ${$pointwiseFilter.shape[0]}.`);\n util.assert($pointwiseFilter.shape[1] === 1, () => `Error in separableConv2d: the second dimension of pointwise ` +\n `filter must be 1, but got ${$pointwiseFilter.shape[1]}.`);\n const inChannels = $depthwiseFilter.shape[2];\n const channelMultiplier = $depthwiseFilter.shape[3];\n util.assert($pointwiseFilter.shape[2] === inChannels * channelMultiplier, () => `Error in separableConv2d: the third dimension of pointwise filter ` +\n `must be ${inChannels * channelMultiplier}, ` +\n `but got ${$pointwiseFilter.shape[2]}.`);\n const depthwise = depthwiseConv2d(x4D, $depthwiseFilter, strides, pad, dataFormat, dilation);\n const pointwiseStride = 1;\n const res = conv2d(depthwise, $pointwiseFilter, pointwiseStride, 'valid', dataFormat);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const separableConv2d = op({ separableConv2d_ });\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\n/**\n * Computes the difference between two lists of numbers.\n *\n * Given a Tensor `x` and a Tensor `y`, this operation returns a Tensor `out`\n * that represents all values that are in `x` but not in `y`. The returned\n * Tensor `out` is sorted in the same order that the numbers appear in `x`\n * (duplicates are preserved). This operation also returns a Tensor indices that\n * represents the position of each out element in `x`. In other words:\n *\n * `out[i] = x[idx[i]] for i in [0, 1, ..., out.length - 1]`\n *\n * ```js\n * const x = [1, 2, 3, 4, 5, 6];\n * const y = [1, 3, 5];\n *\n * const [out, indices] = await tf.setdiff1dAsync(x, y);\n * out.print(); // [2, 4, 6]\n * indices.print(); // [1, 3, 5]\n * ```\n *\n * @param x 1-D Tensor. Values to keep.\n * @param y 1-D Tensor. Must have the same type as x. Values to exclude in the\n * output.\n * @returns Promise of Tensor tuple [out, indices].\n * out: Tensor with the same type as x.\n * indices: A Tensor of type int32.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nasync function setdiff1dAsync_(x, y) {\n const $x = convertToTensor(x, 'x', 'setdiff1d');\n const $y = convertToTensor(y, 'y', 'setdiff1d');\n util.assert($x.dtype === $y.dtype, () => `x and y should have the same dtype, but got x (${$x.dtype}) and y (${$y.dtype}).`);\n util.assert($x.rank === 1, () => `x should be 1D tensor, but got x (${$x.shape}).`);\n util.assert($y.rank === 1, () => `y should be 1D tensor, but got y (${$y.shape}).`);\n const xVals = await $x.data();\n const yVals = await $y.data();\n const ySet = new Set(yVals);\n let outputSize = 0;\n for (let i = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n outputSize++;\n }\n }\n const buffer = new TensorBuffer([outputSize], $x.dtype);\n const indices = new TensorBuffer([outputSize], 'int32');\n for (let i = 0, p = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n buffer.values[p] = xVals[i];\n indices.values[p] = i;\n p++;\n }\n }\n return [buffer.toTensor(), indices.toTensor()];\n}\nexport const setdiff1dAsync = setdiff1dAsync_;\n//# sourceMappingURL=setdiff1d_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sign } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Returns an element-wise indication of the sign of a number.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3, NaN, 0]);\n *\n * x.sign().print(); // or tf.sign(x)\n * ```\n * @param x The input Tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sign_(x) {\n const $x = convertToTensor(x, 'x', 'sign');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc(backend => backend.sign($x), inputs, null /* grad */, Sign);\n}\nexport const sign = op({ sign_ });\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sin } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes sin of the input Tensor element-wise: `sin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.sin().print(); // or tf.sin(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sin_(x) {\n const $x = convertToTensor(x, 'x', 'sin');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sin($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sin);\n}\nexport const sin = op({ sin_ });\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sinh } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes hyperbolic sin of the input `tf.Tensor` element-wise: `sinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.sinh().print(); // or tf.sinh(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sinh_(x) {\n const $x = convertToTensor(x, 'x', 'sinh');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sinh($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sinh);\n}\nexport const sinh = op({ sinh_ });\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 1D slice from 1D array starting at coordinates `begin` and is\n * of length `size`. See `slice` for details.\n */\nfunction slice1d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice1d');\n util.assert($x.rank === 1, () => `slice1d expects a rank-1 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, [begin], [size]);\n}\nexport const slice1d = op({ slice1d_ });\n//# sourceMappingURL=slice1d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 2D slice from a 2D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice2d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice2d');\n util.assert($x.rank === 2, () => `slice2d expects a rank-2 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice2d = op({ slice2d_ });\n//# sourceMappingURL=slice2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 3D slice from a 3D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice3d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice3d');\n util.assert($x.rank === 3, () => `slice3d expects a rank-3 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice3d = op({ slice3d_ });\n//# sourceMappingURL=slice3d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\nimport { slice } from './slice';\n/**\n * Extracts a 4D slice from a 4D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice4d_(x, begin, size) {\n const $x = convertToTensor(x, 'x', 'slice4d');\n util.assert($x.rank === 4, () => `slice4d expects a rank-4 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\nexport const slice4d = op({ slice4d_ });\n//# sourceMappingURL=slice4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Softmax } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes the softmax normalized vector given the logits.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction softmax_(logits, dim = -1) {\n const $logits = convertToTensor(logits, 'logits', 'softmax', 'float32');\n if (dim === -1) {\n dim = $logits.rank - 1;\n }\n if (dim !== $logits.rank - 1) {\n throw Error('Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and dim was ${dim}`);\n }\n const inputs = { logits: $logits };\n const attrs = { dim };\n return ENGINE.runKernelFunc((backend, save) => {\n const y = backend.softmax($logits, dim);\n save([y]);\n return y;\n }, inputs, null /* grad */, Softmax, attrs);\n}\nexport const softmax = op({ softmax_ });\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\n/**\n * Fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the inner-most\n * dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.fft().print(); // tf.spectral.fft(x).print();\n * ```\n * @param input The complex input to compute an fft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction fft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.fft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = input.as2D(batch, innerDimensionSize);\n const result = backend.fft(input2D);\n return result.reshape(input.shape);\n }, inputs, null /* gradient */, FFT);\n}\nexport const fft = op({ fft_ });\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { IFFT } from '../../kernel_names';\nimport { assert } from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Inverse fast Fourier transform.\n *\n * Computes the inverse 1-dimensional discrete Fourier transform over the\n * inner-most dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.ifft().print(); // tf.spectral.ifft(x).print();\n * ```\n * @param input The complex input to compute an ifft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction ifft_(input) {\n assert(input.dtype === 'complex64', () => `The dtype for tf.spectral.ifft() must be complex64 ` +\n `but got ${input.dtype}.`);\n const inputs = { input };\n return ENGINE.runKernelFunc(backend => {\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = reshape(input, [batch, innerDimensionSize]);\n const result = backend.ifft(input2D);\n return reshape(result, input.shape);\n }, inputs, null /* gradient */, IFFT);\n}\nexport const ifft = op({ ifft_ });\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { reverse } from '../reverse';\nimport { scalar } from '../scalar';\nimport { slice } from '../slice';\nimport { ifft } from './ifft';\n/**\n * Inversed real value input fast Fourier transform.\n *\n * Computes the 1-dimensional inversed discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([0, 0, 0]);\n * const x = tf.complex(real, imag);\n *\n * x.irfft().print();\n * ```\n * @param input The real value input to compute an irfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction irfft_(input) {\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let ret;\n if (innerDimensionSize <= 2) {\n const complexInput = reshape(input, [batch, innerDimensionSize]);\n ret = ifft(complexInput);\n }\n else {\n // The length of unique components of the DFT of a real-valued signal\n // is 2 * (input_len - 1)\n const outputShape = [batch, 2 * (innerDimensionSize - 1)];\n const realInput = reshape(real(input), [batch, innerDimensionSize]);\n const imagInput = reshape(imag(input), [batch, innerDimensionSize]);\n const realConjugate = reverse(slice(realInput, [0, 1], [batch, innerDimensionSize - 2]), 1);\n const imagConjugate = mul(reverse(slice(imagInput, [0, 1], [batch, innerDimensionSize - 2]), 1), scalar(-1));\n const r = concat([realInput, realConjugate], 1);\n const i = concat([imagInput, imagConjugate], 1);\n const complexInput = reshape(complex(r, i), [outputShape[0], outputShape[1]]);\n ret = ifft(complexInput);\n }\n ret = real(ret);\n // reshape the result if the input is 3D tensor.\n if (input.rank === 3 && input.shape[0] !== 0) {\n const temp = ret;\n const batch = input.shape[0];\n ret = reshape(ret, [batch, ret.shape[0] / batch, ret.shape[1]]);\n temp.dispose();\n }\n return ret;\n}\nexport const irfft = op({ irfft_ });\n//# sourceMappingURL=irfft.js.map", "import { assert } from '../util';\n/**\n * Prepare the split size array. When the input is a number, the axis is evenly\n * divided among the split size. When the input contains the negative value, the\n * rest of the axis is allocated toward that.\n */\nexport function prepareSplitSize(x, numOrSizeSplits, axis = 0) {\n let splitSizes = [];\n if (typeof (numOrSizeSplits) === 'number') {\n assert(x.shape[axis] % numOrSizeSplits === 0, () => 'Number of splits must evenly divide the axis.');\n splitSizes =\n new Array(numOrSizeSplits).fill(x.shape[axis] / numOrSizeSplits);\n }\n else {\n const numOfNegs = numOrSizeSplits.reduce((count, value) => {\n if (value === -1) {\n count += 1;\n }\n return count;\n }, 0);\n assert(numOfNegs <= 1, () => 'There should be only one negative value in split array.');\n const negIndex = numOrSizeSplits.indexOf(-1);\n // Allow the number of split array to be -1, which indicates the rest\n // of dimension is allocated to that split.\n if (negIndex !== -1) {\n const total = numOrSizeSplits.reduce((a, b) => b > 0 ? a + b : a);\n numOrSizeSplits[negIndex] = x.shape[axis] - total;\n }\n assert(x.shape[axis] === numOrSizeSplits.reduce((a, b) => a + b), () => 'The sum of sizes must match the size of the axis dimension.');\n splitSizes = numOrSizeSplits;\n }\n return splitSizes;\n}\n//# sourceMappingURL=split_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SplitV } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { op } from './operation';\nimport { prepareSplitSize } from './split_util';\n/**\n * Splits a `tf.Tensor` into sub tensors.\n *\n * If `numOrSizeSplits` is a number, splits `x` along dimension `axis`\n * into `numOrSizeSplits` smaller tensors.\n * Requires that `numOrSizeSplits` evenly divides `x.shape[axis]`.\n *\n * If `numOrSizeSplits` is a number array, splits `x` into\n * `numOrSizeSplits.length` pieces. The shape of the `i`-th piece has the\n * same size as `x` except along dimension `axis` where the size is\n * `numOrSizeSplits[i]`.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [2, 4]);\n * const [a, b] = tf.split(x, 2, 1);\n * a.print();\n * b.print();\n *\n * const [c, d, e] = tf.split(x, [1, 2, 1], 1);\n * c.print();\n * d.print();\n * e.print();\n * ```\n *\n * @param x The input tensor to split.\n * @param numOrSizeSplits Either an integer indicating the number of\n * splits along the axis or an array of integers containing the sizes of\n * each output tensor along the axis. If a number then it must evenly divide\n * `x.shape[axis]`; otherwise the sum of sizes must match `x.shape[axis]`.\n * Can contain one -1 indicating that dimension is to be inferred.\n * @param axis The dimension along which to split. Defaults to 0 (the first\n * dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction split_(x, numOrSizeSplits, axis = 0) {\n const $x = convertToTensor(x, 'x', 'split');\n const forward = (backend, _) => {\n const $axis = parseAxisParam(axis, $x.shape)[0];\n const splitSizes = prepareSplitSize($x, numOrSizeSplits, $axis);\n return backend.split($x, splitSizes, $axis);\n };\n const inputs = { x: $x };\n const attr = { numOrSizeSplits, axis };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SplitV, attr);\n}\nexport const split = op({ split_ });\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assert } from '../../util';\nimport { complex } from '../complex';\nimport { concat } from '../concat';\nimport { imag } from '../imag';\nimport { op } from '../operation';\nimport { real } from '../real';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { split } from '../split';\nimport { zeros } from '../zeros';\nimport { zerosLike } from '../zeros_like';\nimport { fft } from './fft';\n/**\n * Real value input fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n *\n * real.rfft().print();\n * ```\n * @param input The real value input to compute an rfft over.\n *\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction rfft_(input, fftLength) {\n assert(input.dtype === 'float32', () => `The dtype for rfft() must be real value but got ${input.dtype}`);\n let innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n let adjustedInput;\n if (fftLength != null && fftLength < innerDimensionSize) {\n // Need to crop\n const begin = input.shape.map(v => 0);\n const size = input.shape.map(v => v);\n size[input.shape.length - 1] = fftLength;\n adjustedInput = slice(input, begin, size);\n innerDimensionSize = fftLength;\n }\n else if (fftLength != null && fftLength > innerDimensionSize) {\n // Need to pad with zeros\n const zerosShape = input.shape.map(v => v);\n zerosShape[input.shape.length - 1] = fftLength - innerDimensionSize;\n adjustedInput = concat([input, zeros(zerosShape)], input.shape.length - 1);\n innerDimensionSize = fftLength;\n }\n else {\n adjustedInput = input;\n }\n // Complement the input with zero imaginary numbers.\n const zerosInput = zerosLike(adjustedInput);\n const complexInput = reshape(complex(adjustedInput, zerosInput), [batch, innerDimensionSize]);\n const ret = fft(complexInput);\n // Exclude complex conjugations. These conjugations are put symmetrically.\n const half = Math.floor(innerDimensionSize / 2) + 1;\n const realValues = real(ret);\n const imagValues = imag(ret);\n const realComplexConjugate = split(realValues, [half, innerDimensionSize - half], realValues.shape.length - 1);\n const imagComplexConjugate = split(imagValues, [half, innerDimensionSize - half], imagValues.shape.length - 1);\n const outputShape = adjustedInput.shape.slice();\n outputShape[adjustedInput.shape.length - 1] = half;\n return reshape(complex(realComplexConjugate[0], imagComplexConjugate[0]), outputShape);\n}\nexport const rfft = op({ rfft_ });\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Sqrt } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes square root of the input `tf.Tensor` element-wise: `y = sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.sqrt().print(); // or tf.sqrt(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction sqrt_(x) {\n const $x = convertToTensor(x, 'x', 'sqrt');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.sqrt($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Sqrt);\n}\nexport const sqrt = op({ sqrt_ });\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SquaredDifference } from '../kernel_names';\nimport { makeTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertAndGetBroadcastShape } from './broadcast_util';\nimport { op } from './operation';\n/**\n * Returns (a - b) * (a - b) element-wise.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * ```js\n * // Broadcast squared difference a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n *\n * @doc {heading: 'Operations', subheading: 'Arithmetic'}\n */\nfunction squaredDifference_(a, b) {\n let $a = convertToTensor(a, 'a', 'squaredDifference');\n let $b = convertToTensor(b, 'b', 'squaredDifference');\n [$a, $b] = makeTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const forward = (backend, save) => {\n const res = backend.squaredDifference($a, $b);\n save([$a, $b]);\n return res;\n };\n const inputs = { a: $a, b: $b };\n const attrs = {};\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, SquaredDifference, attrs);\n}\nexport const squaredDifference = op({ squaredDifference_ });\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { squeezeShape } from '../util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Removes dimensions of size 1 from the shape of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4], [1, 1, 4]);\n * x.squeeze().print();\n * ```\n *\n * @param x The input tensor to be squeezed.\n * @param axis An optional list of numbers. If specified, only\n * squeezes the dimensions listed. The dimension index starts at 0. It\n * is an error to squeeze a dimension that is not 1.\n *\n * @doc {heading: 'Tensors', subheading: 'Transformations'}\n */\nfunction squeeze_(x, axis) {\n const $x = convertToTensor(x, 'x', 'squeeze');\n return reshape($x, squeezeShape($x.shape, axis).newShape);\n}\nexport const squeeze = op({ squeeze_ });\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensorArray } from '../tensor_util_env';\nimport * as util from '../util';\nimport { concat } from './concat';\nimport { expandDims } from './expand_dims';\nimport { op } from './operation';\n/**\n * Stacks a list of rank-`R` `tf.Tensor`s into one rank-`(R+1)` `tf.Tensor`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.stack([a, b, c]).print();\n * ```\n *\n * @param tensors A list of tensor objects with the same shape and dtype.\n * @param axis The axis to stack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction stack_(tensors, axis = 0) {\n const $tensors = convertToTensorArray(tensors, 'tensors', 'stack');\n util.assert($tensors.length >= 1, () => 'Pass at least one tensor to tf.stack');\n if ($tensors.length === 1) {\n return expandDims($tensors[0], axis);\n }\n const rank = $tensors[0].rank;\n const shape = $tensors[0].shape;\n const dtype = $tensors[0].dtype;\n util.assert(axis <= rank, () => 'Axis must be <= rank of the tensor');\n $tensors.forEach(t => {\n util.assertShapesMatch(shape, t.shape, 'All tensors passed to stack must have matching shapes');\n util.assert(dtype === t.dtype, () => 'All tensors passed to stack must have matching dtypes');\n });\n const expandedTensors = $tensors.map(t => expandDims(t, axis));\n // Stack exists in the TensorFlow C++ API\n // (https://www.tensorflow.org/api_docs/cc/class/tensorflow/ops/stack) but not\n // in\n // https://raw.githubusercontent.com/tensorflow/tensorflow/master/tensorflow/core/ops/ops.pbtxt.\n // Therefore we are treating it like a high-level op rather than\n // creating a dedicated stack kernel.\n return concat(expandedTensors, axis);\n}\nexport const stack = op({ stack_ });\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Step } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes step of the input `tf.Tensor` element-wise: `x > 0 ? 1 : alpha * x`\n *\n * ```js\n * const x = tf.tensor1d([0, 2, -1, -3]);\n *\n * x.step(.5).print(); // or tf.step(x, .5)\n * ```\n * @param x The input tensor.\n * @param alpha The gradient when input is negative.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction step_(x, alpha = 0.0) {\n const $x = convertToTensor(x, 'x', 'step');\n const inputs = { x: $x };\n const attrs = { alpha };\n return ENGINE.runKernelFunc(backend => backend.step($x, alpha), inputs, null /* grad */, Step, attrs);\n}\nexport const step = op({ step_ });\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { StridedSlice } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nimport { slice } from './slice';\nimport { computeOutShape, getNormalizedAxes, maskToAxes } from './slice_util';\n/**\n * Extracts a strided slice of a tensor.\n *\n * Roughly speaking, this op extracts a slice of size (end-begin)/stride from\n * the given input tensor (x). Starting at the location specified by begin the\n * slice continues by adding stride to the index until all dimensions are not\n * less than end. Note that a stride can be negative, which causes a reverse\n * slice.\n *\n * ```js\n * const t = tf.tensor3d([1, 1, 1 ,2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6],\n * [3, 2, 3]);\n * t.stridedSlice([1, 0, 0], [2, 1, 3], [1, 1, 1]).print() // [[[3, 3, 3]]]\n * t.stridedSlice([1, 0, 0], [2, 2, 3], [1, 1, 1]).print() // [[[3, 3, 3],\n * // [4, 4, 4]]]\n * t.stridedSlice([1, -1, 0], [2, -3, 3], [1, -1, 1]).print() // [[[4, 4, 4],\n * // [3, 3, 3]]]\n * ```\n *\n * @param x The tensor to stride slice.\n * @param begin The coordinates to start the slice from.\n * @param end: The coordinates to end the slice at.\n * @param strides: The size of the slice.\n * @param beginMask: If the ith bit of beginMask is set, begin[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param endMask: If the ith bit of endMask is set, end[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param shrinkAxisMask: a bitmask where bit i implies that\n * the ith specification should shrink the dimensionality. begin and end must\n * imply a slice of size 1 in the dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction stridedSlice_(x, begin, end, strides, beginMask = 0, endMask = 0, ellipsisMask = 0, newAxisMask = 0, shrinkAxisMask = 0) {\n let $x = convertToTensor(x, 'x', 'stridedSlice');\n const forward = (backend) => {\n if (strides == null) {\n strides = new Array(begin.length);\n }\n const ellipsisAxes = maskToAxes(ellipsisMask);\n if (ellipsisAxes.length > 1) {\n throw new Error('Multiple ellipses in slice is not allowed.');\n }\n if (ellipsisMask !== 0 && newAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and newAxisMask is not yet supported.');\n }\n if (ellipsisMask !== 0 && shrinkAxisMask !== 0) {\n throw new Error('Using both ellipsisMask and shrinkAxisMask is not yet supported.');\n }\n const numInterpolatedAxes = $x.rank - begin.length;\n // Expand the dims of x based on the newAxisMask.\n const expandAxes = maskToAxes(newAxisMask);\n const newShape = $x.shape.slice();\n expandAxes.forEach(axis => {\n begin[axis] = 0;\n end[axis] = 1;\n newShape.splice(axis, 0, 1);\n });\n $x = reshape($x, newShape);\n const { begin: normalizedBegin, end: normalizedEnd, strides: normalizedStrides } = getNormalizedAxes($x.shape, ellipsisAxes, numInterpolatedAxes, begin, end, strides, beginMask, endMask, ellipsisMask);\n begin = normalizedBegin;\n end = normalizedEnd;\n strides = normalizedStrides;\n const shrinkAxes = maskToAxes(shrinkAxisMask);\n // Adjust the ends based on the shrink mask.\n shrinkAxes.forEach(axis => {\n end[axis] = begin[axis] + 1;\n strides[axis] = 1;\n });\n // Figure out the output shape.\n const size = computeOutShape(begin, end, strides);\n // Remove the axes based on shrinkMask.\n const outShape = size.filter((_, axis) => shrinkAxes.indexOf(axis) === -1);\n const nonStrided = strides.every(v => v === 1);\n if (nonStrided) {\n return reshape(slice($x, begin, size), outShape);\n }\n const res = backend.stridedSlice($x, begin, end, strides);\n return reshape(res, outShape);\n };\n const inputs = { x: $x };\n const attrs = {\n begin,\n end,\n strides,\n beginMask,\n endMask,\n ellipsisMask,\n newAxisMask,\n shrinkAxisMask\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, StridedSlice, attrs);\n}\nexport const stridedSlice = op({ stridedSlice_ });\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Tan } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Computes tan of the input `tf.Tensor` element-wise, `tan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.tan().print(); // or tf.tan(x)\n * ```\n * @param x The input tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Basic math'}\n */\nfunction tan_(x) {\n const $x = convertToTensor(x, 'x', 'tan');\n const inputs = { x: $x };\n return ENGINE.runKernelFunc((backend, save) => {\n const res = backend.tan($x);\n save([$x]);\n return res;\n }, inputs, null /* grad */, Tan);\n}\nexport const tan = op({ tan_ });\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-2 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor2d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor2d([[1, 2], [3, 4]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor2d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 2) {\n throw new Error('tensor2d() requires shape to have two numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 2 && inferredShape.length !== 1) {\n throw new Error('tensor2d() requires values to be number[][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor2d() requires shape to be provided when `values` ' +\n 'are a flat/TypedArray');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor2d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-4 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor4d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor4d([[[[1], [2]], [[3], [4]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor4d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 4) {\n throw new Error('tensor4d() requires shape to have four numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 4 && inferredShape.length !== 1) {\n throw new Error('tensor4d() requires values to be number[][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor4d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor4d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-5 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor5d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor5d([[[[[1], [2]], [[3], [4]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor5d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 5) {\n throw new Error('tensor5d() requires shape to have five numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 5 && inferredShape.length !== 1) {\n throw new Error('tensor5d() requires values to be ' +\n 'number[][][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor5d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor5d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { inferShape } from '../tensor_util_env';\nimport { assertNonNull } from '../util';\nimport { makeTensor } from './tensor_ops_util';\n/**\n * Creates rank-6 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor6d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor6d([[[[[[1],[2]],[[3],[4]]],[[[5],[6]],[[7],[8]]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor6d([1, 2, 3, 4, 5, 6, 7, 8], [1, 1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function tensor6d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 6) {\n throw new Error('tensor6d() requires shape to have six numbers');\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 6 && inferredShape.length !== 1) {\n throw new Error('tensor6d() requires values to be number[][][][][][] or ' +\n 'flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error('tensor6d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape ||\n inferredShape;\n return makeTensor(values, shape, inferredShape, dtype);\n}\n//# sourceMappingURL=tensor6d.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { TopK } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Finds the values and indices of the `k` largest entries along the last\n * dimension.\n *\n * If the input is a vector (rank=1), finds the k largest entries in the vector\n * and outputs their values and indices as vectors. Thus values[j] is the j-th\n * largest entry in input, and its index is indices[j].\n * For higher rank inputs, computes the top k entries along the last dimension.\n *\n * If two elements are equal, the lower-index element appears first.\n *\n * ```js\n * const a = tf.tensor2d([[1, 5], [4, 3]]);\n * const {values, indices} = tf.topk(a);\n * values.print();\n * indices.print();\n * ```\n * @param x 1-D or higher `tf.Tensor` with last dimension being at least `k`.\n * @param k Number of top elements to look for along the last dimension.\n * @param sorted If true, the resulting `k` elements will be sorted by the\n * values in descending order.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction topk_(x, k = 1, sorted = true) {\n const $x = convertToTensor(x, 'x', 'topk');\n if ($x.rank === 0) {\n throw new Error('topk() expects the input to be of rank 1 or higher');\n }\n const lastDim = $x.shape[$x.shape.length - 1];\n if (k > lastDim) {\n throw new Error(`'k' passed to topk() must be <= the last dimension (${lastDim}) ` +\n `but got ${k}`);\n }\n const inputs = { x: $x };\n const attrs = { k, sorted };\n const [values, indices] = ENGINE.runKernelFunc(b => b.topk($x, k, sorted), inputs, null /* grad */, TopK, attrs);\n return { values, indices };\n}\nexport const topk = op({ topk_ });\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from './buffer';\nimport { op } from './operation';\nimport { MPRandGauss } from './rand_util';\n/**\n * Creates a `tf.Tensor` with values sampled from a truncated normal\n * distribution.\n *\n * ```js\n * tf.truncatedNormal([2, 2]).print();\n * ```\n *\n * The generated values follow a normal distribution with specified mean and\n * standard deviation, except that values whose magnitude is more than 2\n * standard deviations from the mean are dropped and re-picked.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output tensor.\n * @param seed The seed for the random number generator.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nfunction truncatedNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type $ { dtype }`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, true /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\nexport const truncatedNormal = op({ truncatedNormal_ });\n//# sourceMappingURL=truncated_normal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unique } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert } from '../util';\nimport { op } from './operation';\n/**\n * Finds unique elements along an axis of a tensor.\n *\n * It returns a tensor `values` containing all of the unique elements along the\n * `axis` of the given tensor `x` in the same order that they occur along the\n * `axis` in `x`; `x` does not need to be sorted. It also returns a tensor\n * `indices` the same size as the number of the elements in `x` along the `axis`\n * dimension. It contains the index in the unique output `values`.\n *\n * ```js\n * // A 1-D tensor\n * const a = tf.tensor1d([1, 1, 2, 4, 4, 4, 7, 8, 8]);\n * const {values, indices} = tf.unique(a);\n * values.print(); // [1, 2, 4, 7, 8,]\n * indices.print(); // [0, 0, 1, 2, 2, 2, 3, 4, 4]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=0\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 0)\n * values.print(); // [[1, 0, 0],\n * // [2, 0, 0]]\n * indices.print(); // [0, 0, 1]\n * ```\n *\n * ```js\n * // A 2-D tensor with axis=1\n * //\n * // 'a' is: [[1, 0, 0],\n * // [1, 0, 0],\n * // [2, 0, 0]]\n * const a = tf.tensor2d([[1, 0, 0], [1, 0, 0], [2, 0, 0]]);\n * const {values, indices} = tf.unique(a, 1)\n * values.print(); // [[1, 0],\n * // [1, 0],\n * // [2, 0]]\n * indices.print(); // [0, 1, 1]\n * ```\n * @param x A tensor (int32, string, bool).\n * @param axis The axis of the tensor to find the unique elements.\n * @returns [uniqueElements, indices] (see above for details)\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nfunction unique_(x, axis = 0) {\n // x can be of any dtype, thus null as the last argument.\n const $x = convertToTensor(x, 'x', 'unique', null);\n assert($x.rank > 0, () => 'The input tensor must be at least 1D');\n const inputs = { x: $x };\n const attrs = { axis };\n const [values, indices] = ENGINE.runKernel(Unique, inputs, attrs);\n return { values, indices };\n}\nexport const unique = op({ unique_ });\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, isInt } from '../util';\nimport { op } from './operation';\n/**\n * Computes the sum along segments of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const segmentIds = tf.tensor1d([1, 2, 0, 1], 'int32');\n * const numSegments = 3;\n *\n * x.unsortedSegmentSum(segmentIds, numSegments).print()\n * //or tf.unsortedSegmentSum(x, segmentIds, numSegments)\n * ```\n * @param x The `tf.Tensor` that will be summed along its segments.\n * @param segmentIds A `tf.Tensor1D` whose rank is equal to the rank of `x`'s\n * dimension along the `axis`. Maps each element of `x` to a segment.\n * @param numSegments The number of distinct `segmentIds`.\n *\n * @doc {heading: 'Operations', subheading: 'Segment'}\n */\nfunction unsortedSegmentSum_(x, segmentIds, numSegments) {\n const $x = convertToTensor(x, 'x', 'unsortedSegmentSum');\n const $segmentIds = convertToTensor(segmentIds, 'segmentIds', 'unsortedSegmentSum', 'int32');\n assert(isInt(numSegments), () => 'numSegments must be of dtype int');\n const inputs = { x: $x, segmentIds: $segmentIds };\n const attrs = { numSegments };\n const forward = (backend, save) => {\n const res = backend.unsortedSegmentSum($x, $segmentIds, numSegments);\n save([$segmentIds]);\n return res;\n };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, UnsortedSegmentSum, attrs);\n}\nexport const unsortedSegmentSum = op({ unsortedSegmentSum_ });\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Unpack } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { op } from './operation';\n/**\n * Unstacks a `tf.Tensor` of rank-`R` into a list of rank-`(R-1)` `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * tf.unstack(a).forEach(tensor => tensor.print());\n * ```\n *\n * @param x A tensor object.\n * @param axis The axis to unstack along. Defaults to 0 (the first dim).\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nfunction unstack_(x, axis = 0) {\n const $x = convertToTensor(x, 'x', 'unstack');\n util.assert(axis >= -$x.shape.length && axis < $x.shape.length, () => `Axis = ${axis} is not in [-${$x.shape.length}, ${$x.shape.length})`);\n if (axis < 0) {\n axis += $x.shape.length;\n }\n const inputs = { value: $x };\n const attrs = { axis };\n const forward = (backend) => backend.unstack($x, axis);\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, Unpack, attrs);\n}\nexport const unstack = op({ unstack_ });\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\n/**\n * Creates a new variable with the provided initial value.\n * ```js\n * const x = tf.variable(tf.tensor([1, 2, 3]));\n * x.assign(tf.tensor([4, 5, 6]));\n *\n * x.print();\n * ```\n *\n * @param initialValue Initial value for the tensor.\n * @param trainable If true, optimizers are allowed to update it.\n * @param name Name of the variable. Defaults to a unique id.\n * @param dtype If set, initialValue will be converted to the given type.\n *\n * @doc {heading: 'Tensors', subheading: 'Creation'}\n */\nexport function variable(initialValue, trainable = true, name, dtype) {\n return ENGINE.makeVariable(initialValue, trainable, name, dtype);\n}\n//# sourceMappingURL=variable.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the Where kernel shared between cpu and webgl */\nimport { buffer } from '../ops/buffer';\nexport function whereImpl(condShape, condVals) {\n const indices = [];\n for (let i = 0; i < condVals.length; i++) {\n if (condVals[i]) {\n indices.push(i);\n }\n }\n const inBuffer = buffer(condShape, 'int32');\n const out = buffer([indices.length, condShape.length], 'int32');\n for (let i = 0; i < indices.length; i++) {\n const loc = inBuffer.indexToLoc(indices[i]);\n const offset = i * condShape.length;\n out.values.set(loc, offset);\n }\n return out.toTensor();\n}\n//# sourceMappingURL=where_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { whereImpl } from '../backends/where_impl';\nimport { convertToTensor } from '../tensor_util_env';\n/**\n * Returns the coordinates of true elements of condition.\n *\n * The coordinates are returned in a 2-D tensor where the first dimension (rows)\n * represents the number of true elements, and the second dimension (columns)\n * represents the coordinates of the true elements. Keep in mind, the shape of\n * the output tensor can vary depending on how many true values there are in\n * input. Indices are output in row-major order. The resulting tensor has the\n * shape `[numTrueElems, condition.rank]`.\n *\n * This is analogous to calling the python `tf.where(cond)` without an x or y.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const result = await tf.whereAsync(cond);\n * result.print();\n * ```\n *\n * @doc {heading: 'Operations', subheading: 'Logical'}\n */\nasync function whereAsync_(condition) {\n const $condition = convertToTensor(condition, 'condition', 'whereAsync', 'bool');\n const vals = await $condition.data();\n const res = whereImpl($condition.shape, vals);\n if (condition !== $condition) {\n $condition.dispose();\n }\n return res;\n}\nexport const whereAsync = whereAsync_;\n//# sourceMappingURL=where_async.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { gather } from './gather';\nimport { reshape } from './reshape';\nimport { squeeze } from './squeeze';\nimport { whereAsync } from './where_async';\n/**\n * Apply boolean mask to tensor.\n *\n * ```js\n * const tensor = tf.tensor2d([1, 2, 3, 4, 5, 6], [3, 2]);\n * const mask = tf.tensor1d([1, 0, 1], 'bool');\n * const result = await tf.booleanMaskAsync(tensor, mask);\n * result.print();\n * ```\n *\n * @param tensor N-D tensor.\n * @param mask K-D boolean tensor, K <= N and K must be known statically.\n * @param axis A 0-D int Tensor representing the axis in tensor to mask from.\n * By default, axis is 0 which will mask from the first dimension.\n * Otherwise K + axis <= N.\n *\n * @doc {heading: 'Tensors', subheading: 'Slicing and Joining'}\n */\nasync function booleanMaskAsync_(tensor, mask, axis) {\n const $tensor = convertToTensor(tensor, 'tensor', 'boolMask');\n const $mask = convertToTensor(mask, 'mask', 'boolMask', 'bool');\n const axisFrom = axis == null ? 0 : axis;\n const maskDim = $mask.rank;\n const tensorShape = $tensor.shape;\n util.assert(maskDim > 0, () => 'mask cannot be scalar');\n util.assertShapesMatch(tensorShape.slice(axisFrom, axisFrom + maskDim), $mask.shape, `mask's shape must match the first K dimensions of tensor's shape,`);\n let leadingSize = 1;\n for (let i = axisFrom; i < axisFrom + maskDim; i++) {\n leadingSize *= tensorShape[i];\n }\n const targetTensorShape = tensorShape.slice(0, axisFrom)\n .concat([leadingSize], tensorShape.slice(axisFrom + maskDim));\n const reshapedTensor = reshape($tensor, targetTensorShape);\n const reshapedMask = reshape($mask, [-1]);\n const positivePositions = await whereAsync(reshapedMask);\n const indices = squeeze(positivePositions, [1]);\n const res = gather(reshapedTensor, indices, axisFrom);\n // Ensure no memory leak.\n if (tensor !== $tensor) {\n $tensor.dispose();\n }\n if (mask !== $mask) {\n $mask.dispose();\n }\n indices.dispose();\n reshapedTensor.dispose();\n reshapedMask.dispose();\n positivePositions.dispose();\n return res;\n}\nexport const booleanMaskAsync = booleanMaskAsync_;\n//# sourceMappingURL=boolean_mask.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport { assertShapesMatch } from '../util';\nimport { equal } from './equal';\nimport { greater } from './greater';\nimport { greaterEqual } from './greater_equal';\nimport { less } from './less';\nimport { lessEqual } from './less_equal';\nimport { notEqual } from './not_equal';\nimport { op } from './operation';\n/**\n * @deprecated\n * Strict version of `tf.notEqual` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction notEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'notEqualStrict');\n const $b = convertToTensor(b, 'b', 'notEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in notEqualStrict: ');\n return notEqual($a, $b);\n}\n/**\n * @deprecated\n * Strict version of `tf.less` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction lessStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessStrict');\n const $b = convertToTensor(b, 'b', 'lessStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessStrict: ');\n return less($a, $b);\n}\nfunction equalStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'equalStrict');\n const $b = convertToTensor(b, 'b', 'equalStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in equalStrict: ');\n return equal($a, $b);\n}\nfunction lessEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'lessEqualStrict');\n const $b = convertToTensor(b, 'b', 'lessEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessEqualStrict: ');\n return lessEqual($a, $b);\n}\nfunction greaterStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterStrict');\n const $b = convertToTensor(b, 'b', 'greaterStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterStrict: ');\n return greater($a, $b);\n}\nfunction greaterEqualStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'greaterEqualStrict');\n const $b = convertToTensor(b, 'b', 'greaterEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterEqualStrict: ');\n return greaterEqual($a, $b);\n}\nexport const equalStrict = op({ equalStrict_ });\nexport const greaterEqualStrict = op({ greaterEqualStrict_ });\nexport const greaterStrict = op({ greaterStrict_ });\nexport const lessEqualStrict = op({ lessEqualStrict_ });\nexport const lessStrict = op({ lessStrict_ });\nexport const notEqualStrict = op({ notEqualStrict_ });\n//# sourceMappingURL=compare.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../globals';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { maximum } from './maximum';\nimport { minimum } from './minimum';\nimport { mod } from './mod';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { squaredDifference } from './squared_difference';\nimport { sub } from './sub';\n/**\n * @deprecated\n * Adds two `tf.Tensor`s element-wise, A + B.\n *\n * Inputs must be the same shape. For broadcasting support, use add() instead.\n *\n * @param a The first Tensor to add element-wise.\n * @param b The second Tensor to add element-wise.\n */\nfunction addStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'addStrict');\n const $b = convertToTensor(b, 'b', 'addStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in addStrict: ');\n return add($a, $b);\n}\n/**\n * @deprecated\n * Subtracts two `tf.Tensor`s element-wise, A - B. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.sub` instead.\n *\n * @param a The first Tensor to subtract element-wise.\n * @param b The second Tensor to subtract element-wise.\n */\nfunction subStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'subStrict');\n const $b = convertToTensor(b, 'b', 'subStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in subStrict: ');\n return sub($a, $b);\n}\n/**\n * @deprecated\n * Computes the power of one `tf.Tensor` to another. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.pow` instead.\n *\n * @param base The base tensor to pow element-wise.\n * @param exp The exponent tensor to pow element-wise.\n */\nfunction powStrict_(base, exp) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n util.assertShapesMatch(base.shape, exp.shape, 'Error in powStrict: ');\n return pow(base, exp);\n}\n/**\n * @deprecated\n * Multiplies two `tf.Tensor`s element-wise, A * B.\n *\n * Inputs must be the same shape. For broadcasting support, use `tf.mul`.\n *\n * @param a The first tensor to multiply.\n * @param b The first tensor to multiply. Must have the same\n * dtype as `a`.\n */\nfunction mulStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'mul');\n const $b = convertToTensor(b, 'b', 'mul');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in multiplyStrict: ');\n return mul($a, $b);\n}\n/**\n * @deprecated\n * Divides two `tf.Tensor`s element-wise, A / B. Inputs must\n * be the same shape.\n *\n * @param a The first tensor as the numerator for element-wise division.\n * @param b The second tensor as the denominator for element-wise division.\n */\nfunction divStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'div');\n const $b = convertToTensor(b, 'b', 'div');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in divideStrict: ');\n return div($a, $b);\n}\n/**\n * @deprecated\n * Returns the mod of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use mod().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction modStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'modStrict');\n const $b = convertToTensor(b, 'b', 'modStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in modStrict: ');\n return mod($a, $b);\n}\n/**\n * @deprecated\n * Returns the min of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use minimum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction minimumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'minimumStrict');\n const $b = convertToTensor(b, 'b', 'minimumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in minimumStrict: ');\n return minimum($a, $b);\n}\n/**\n * @deprecated\n * Returns the max of a and b (`a > b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use maximum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction maximumStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'maximumStrict');\n const $b = convertToTensor(b, 'b', 'maximumStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in maximumStrict: ');\n return maximum($a, $b);\n}\n/**\n * @deprecated\n * Returns (a - b) * (a - b) element-wise.\n *\n * Inputs must be the same shape. For broadcasting support, use\n * `tf.squaredDifference` instead.\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\nfunction squaredDifferenceStrict_(a, b) {\n deprecationWarn('strict variants of ops have been deprecated ' +\n 'and will be removed in future');\n const $a = convertToTensor(a, 'a', 'squaredDifferenceStrict');\n const $b = convertToTensor(b, 'b', 'squaredDifferenceStrict');\n util.assertShapesMatch($a.shape, $b.shape, 'Error in squaredDifferenceStrict: ');\n return squaredDifference($a, $b);\n}\nexport const addStrict = op({ addStrict_ });\nexport const divStrict = op({ divStrict_ });\nexport const maximumStrict = op({ maximumStrict_ });\nexport const minimumStrict = op({ minimumStrict_ });\nexport const modStrict = op({ modStrict_ });\nexport const mulStrict = op({ mulStrict_ });\nexport const powStrict = op({ powStrict_ });\nexport const squaredDifferenceStrict = op({ squaredDifferenceStrict_ });\nexport const subStrict = op({ subStrict_ });\n//# sourceMappingURL=binary_ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { parseAxisParam } from '../util';\nimport { abs } from './abs';\nimport * as axis_util from './axis_util';\nimport { max } from './max';\nimport { min } from './min';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { reshape } from './reshape';\nimport { scalar } from './scalar';\nimport { sqrt } from './sqrt';\nimport { square } from './square';\nimport { sum } from './sum';\n/**\n * Computes the norm of scalar, vectors, and matrices.\n * This function can compute several different vector norms (the 1-norm, the\n * Euclidean or 2-norm, the inf-norm, and in general the p-norm for p > 0)\n * and matrix norms (Frobenius, 1-norm, and inf-norm).\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.norm().print(); // or tf.norm(x)\n * ```\n *\n * @param x The input array.\n * @param ord Optional. Order of the norm. Supported norm types are\n * following:\n *\n * | ord | norm for matrices | norm for vectors\n * |------------|---------------------------|---------------------\n * |'euclidean' |Frobenius norm |2-norm\n * |'fro' |Frobenius norm\t |\n * |Infinity |max(sum(abs(x), axis=1)) |max(abs(x))\n * |-Infinity |min(sum(abs(x), axis=1)) |min(abs(x))\n * |1 |max(sum(abs(x), axis=0)) |sum(abs(x))\n * |2 | |sum(abs(x)^2)^1/2*\n *\n * @param axis Optional. If axis is null (the default), the input is\n * considered a vector and a single vector norm is computed over the entire\n * set of values in the Tensor, i.e. norm(x, ord) is equivalent\n * to norm(x.reshape([-1]), ord). If axis is a integer, the input\n * is considered a batch of vectors, and axis determines the axis in x\n * over which to compute vector norms. If axis is a 2-tuple of integer it is\n * considered a batch of matrices and axis determines the axes in NDArray\n * over which to compute a matrix norm.\n * @param keepDims Optional. If true, the norm have the same dimensionality\n * as the input.\n *\n * @doc {heading: 'Operations', subheading: 'Matrices'}\n */\nfunction norm_(x, ord = 'euclidean', axis = null, keepDims = false) {\n x = convertToTensor(x, 'x', 'norm');\n const norm = normImpl(x, ord, axis);\n let keepDimsShape = norm.shape;\n if (keepDims) {\n const axes = parseAxisParam(axis, x.shape);\n keepDimsShape = axis_util.expandShapeToKeepDim(norm.shape, axes);\n }\n return reshape(norm, keepDimsShape);\n}\nfunction normImpl(x, p, axis = null) {\n if (x.rank === 0) {\n return abs(x);\n }\n // consider vector when no axis is specified\n if (x.rank !== 1 && axis === null) {\n return normImpl(reshape(x, [-1]), p, axis);\n }\n // vector\n if (x.rank === 1 || typeof axis === 'number' ||\n Array.isArray(axis) && axis.length === 1) {\n if (p === 1) {\n return sum(abs(x), axis);\n }\n if (p === Infinity) {\n return max(abs(x), axis);\n }\n if (p === -Infinity) {\n return min(abs(x), axis);\n }\n if (p === 'euclidean' || p === 2) {\n // norm(x, 2) = sum(abs(xi) ^ 2) ^ 1/2\n return sqrt(sum(pow(abs(x), scalar(2, 'int32')), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n // matrix (assumption axis[0] < axis[1])\n if (Array.isArray(axis) && axis.length === 2) {\n if (p === 1) {\n return max(sum(abs(x), axis[0]), axis[1] - 1);\n }\n if (p === Infinity) {\n return max(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === -Infinity) {\n return min(sum(abs(x), axis[1]), axis[0]);\n }\n if (p === 'fro' || p === 'euclidean') {\n // norm(x) = sqrt(sum(pow(x, 2)))\n return sqrt(sum(square(x), axis));\n }\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n throw new Error(`Error in norm: invalid axis: ${axis}`);\n}\nexport const norm = op({ norm_ });\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { assertTypesMatch } from '../tensor_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { pow } from './pow';\nimport { scalar } from './scalar';\nimport { sub } from './sub';\n/**\n * Compute the moving average of a variable.\n *\n * Without zeroDebias, the moving average operation is defined by:\n * `v += delta`\n * where\n * `delta = (1 - decay) * (x - v)`\n *\n * With zeroDebias (default), the `delta` term is scaled to debias the\n * effect of the (assumed) zero-initialization of `v`.\n * `delta /= (1 - decay ^ step)`\n *\n * For more details on the zero-debiasing algorithm, see:\n * https://arxiv.org/abs/1412.6980\n *\n * Note that this function is completely stateless and does not keep track of\n * step count. The step count needs to be maintained by the caller and passed\n * in as `step`.\n *\n * @param v The current moving average value.\n * @param x New input value, must have the same shape and dtype as `v`.\n * @param decay The decay factor. Typical values are 0.95 and 0.99.\n * @param step Step count.\n * @param zeroDebias: Whether zeroDebias is to be performed (default: `true`).\n * @returns The new moving average value.\n *\n * @doc {heading: 'Operations', subheading: 'Moving Average'}\n */\nfunction movingAverage_(v, x, decay, step, zeroDebias = true) {\n const $v = convertToTensor(v, 'v', 'movingAverage');\n const $x = convertToTensor(x, 'x', 'movingAverage');\n const $decay = convertToTensor(decay, 'decay', 'movingAverage');\n assertTypesMatch($v, $x);\n util.assert(util.arraysEqual($v.shape, $x.shape), () => 'Shape mismatch in v and x');\n const one = scalar(1);\n const oneMinusDecay = sub(one, $decay);\n let update = mul(sub($x, $v), oneMinusDecay);\n if (zeroDebias) {\n util.assert(step != null, () => 'When using zeroDebias: true, step is required.');\n const $step = convertToTensor(step, 'step', 'movingAverage');\n update = div(update, sub(one, pow($decay, $step)));\n }\n return add($v, update);\n}\nexport const movingAverage = op({ movingAverage_ });\n//# sourceMappingURL=moving_average.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ScatterNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\nimport * as scatter_nd_util from './scatter_nd_util';\n/**\n * Creates a new tensor by applying sparse updates to individual\n * values or slices within a zero tensor of the given shape tensor according to\n * indices. This operator is the inverse of the `tf.gatherND` operator which\n * extracts values or slices from a given tensor.\n *\n * ```js\n * const indices = tf.tensor2d([4, 3, 1, 7], [4, 1], 'int32');\n * const updates = tf.tensor1d([9, 10, 11, 12]);\n * const shape = [8];\n * tf.scatterND(indices, updates, shape).print() //[0, 11, 0, 10, 9, 0, 0, 12]\n * ```\n *\n * @param indices The tensor contains the indices into the output tensor.\n * @param updates The tensor contains the value for the indices.\n * @param shape: The shape of the output tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction scatterND_(indices, updates, shape) {\n const $indices = convertToTensor(indices, 'indices', 'scatterND', 'int32');\n const $updates = convertToTensor(updates, 'updates', 'scatterND');\n scatter_nd_util.validateInput($updates, $indices, shape);\n const forward = (backend) => {\n return backend.scatterND($indices, $updates, shape);\n };\n const inputs = { indices: $indices, updates: $updates };\n const attrs = { shape };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, ScatterNd, attrs);\n}\nexport const scatterND = op({ scatterND_ });\n//# sourceMappingURL=scatter_nd.js.map", "/**\n * Validate sparseToDense inputs.\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape number[]. Shape of the dense output tensor.\n * @param validateIndices boolean. indice validation is not supported, error\n * will be thrown if it is set.\n */\nexport function validateInput(sparseIndices, sparseValues, outputShape, defaultValues) {\n if (sparseIndices.dtype !== 'int32') {\n throw new Error('tf.sparseToDense() expects the indices to be int32 type,' +\n ` but the dtype was ${sparseIndices.dtype}.`);\n }\n if (sparseIndices.rank > 2) {\n throw new Error('sparseIndices should be a scalar, vector, or matrix,' +\n ` but got shape ${sparseIndices.shape}.`);\n }\n const numElems = sparseIndices.rank > 0 ? sparseIndices.shape[0] : 1;\n const numDims = sparseIndices.rank > 1 ? sparseIndices.shape[1] : 1;\n if (outputShape.length !== numDims) {\n throw new Error('outputShape has incorrect number of elements:,' +\n ` ${outputShape.length}, should be: ${numDims}.`);\n }\n const numValues = sparseValues.size;\n if (!(sparseValues.rank === 0 ||\n sparseValues.rank === 1 && numValues === numElems)) {\n throw new Error('sparseValues has incorrect shape ' +\n `${sparseValues.shape}, should be [] or [${numElems}]`);\n }\n if (sparseValues.dtype !== defaultValues.dtype) {\n throw new Error('sparseValues.dtype must match defaultValues.dtype');\n }\n}\n//# sourceMappingURL=sparse_to_dense_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { SparseToDense } from '../kernel_names';\nimport * as sparse_to_dense from '../ops/sparse_to_dense_util';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Converts a sparse representation into a dense tensor.\n *\n * Builds an array dense with shape outputShape such that:\n *\n * // If sparseIndices is scalar\n * dense[i] = (i == sparseIndices ? sparseValues : defaultValue)\n *\n * // If sparseIndices is a vector, then for each i\n * dense[sparseIndices[i]] = sparseValues[i]\n *\n * // If sparseIndices is an n by d matrix, then for each i in [0, n)\n * dense[sparseIndices[i][0], ..., sparseIndices[i][d-1]] = sparseValues[i]\n * All other values in dense are set to defaultValue. If sparseValues is a\n * scalar, all sparse indices are set to this single value.\n *\n * If indices are repeated the final value is summed over all values for those\n * indices.\n *\n * ```js\n * const indices = tf.tensor1d([4, 5, 6, 1, 2, 3], 'int32');\n * const values = tf.tensor1d([10, 11, 12, 13, 14, 15], 'float32');\n * const shape = [8];\n * tf.sparseToDense(indices, values, shape).print();\n * ```\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape Shape of the dense output tensor. the type is inferred.\n * @param defaultValue Scalar. Value to set for indices not specified in\n * sparseIndices. Defaults to zero.\n *\n * @doc {heading: 'Operations', subheading: 'Normalization'}\n */\nfunction sparseToDense_(sparseIndices, sparseValues, outputShape, defaultValue = 0) {\n const $sparseIndices = convertToTensor(sparseIndices, 'sparseIndices', 'sparseToDense', 'int32');\n const $sparseValues = convertToTensor(sparseValues, 'sparseValues', 'sparseToDense');\n const $defaultValue = convertToTensor(defaultValue, 'defaultValue', 'sparseToDense', $sparseValues.dtype);\n sparse_to_dense.validateInput($sparseIndices, $sparseValues, outputShape, $defaultValue);\n const inputs = {\n sparseIndices: $sparseIndices,\n sparseValues: $sparseValues,\n defaultValue: $defaultValue\n };\n const attrs = { outputShape };\n return ENGINE.runKernelFunc(backend => backend.sparseToDense($sparseIndices, $sparseValues, outputShape, $defaultValue), inputs, null /* grad */, SparseToDense, attrs);\n}\nexport const sparseToDense = op({ sparseToDense_ });\n//# sourceMappingURL=sparse_to_dense.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { GatherNd } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport { op } from './operation';\n/**\n * Gather slices from input tensor into a Tensor with shape specified by\n * `indices`.\n *\n * `indices` is an K-dimensional integer tensor, best thought of as a\n * (K-1)-dimensional tensor of indices into input, where each element defines a\n * slice of input:\n * output[\\\\(i_0, ..., i_{K-2}\\\\)] = input[indices[\\\\(i_0, ..., i_{K-2}\\\\)]]\n *\n * Whereas in `tf.gather`, `indices` defines slices into the first dimension of\n * input, in `tf.gatherND`, `indices` defines slices into the first N dimensions\n * of input, where N = indices.shape[-1].\n *\n * The last dimension of indices can be at most the rank of input:\n * indices.shape[-1] <= input.rank\n *\n * The last dimension of `indices` corresponds to elements\n * (if indices.shape[-1] == input.rank) or slices\n * (if indices.shape[-1] < input.rank) along dimension indices.shape[-1] of\n * input.\n * The output tensor has shape\n * indices.shape[:-1] + input.shape[indices.shape[-1]:]\n *\n * Note that on CPU, if an out of bound index is found, an error is returned. On\n * GPU, if an out of bound index is found, a 0 is stored in the corresponding\n * output value.\n *\n * ```js\n * const indices = tf.tensor2d([0, 1, 1, 0], [2,2], 'int32');\n * const input = tf.tensor2d([9, 10, 11, 12], [2, 2]);\n * tf.gatherND(input, indices).print() // [10, 11]\n * ```\n *\n * @param x The tensor from which to gather values.\n * @param indices Index tensor, must be of type int32.\n *\n * @doc {heading: 'Operations', subheading: 'Slicing and Joining'}\n */\nfunction gatherND_(x, indices) {\n const $indices = convertToTensor(indices, 'indices', 'gatherND', 'int32');\n const $x = convertToTensor(x, 'x', 'gatherND');\n const forward = (backend) => {\n return backend.gatherND($x, $indices);\n };\n const inputs = { params: $x, indices: $indices };\n return ENGINE.runKernelFunc(forward, inputs, null /* gradient */, GatherNd);\n}\nexport const gatherND = op({ gatherND_ });\n//# sourceMappingURL=gather_nd.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\n/**\n * Normalize noise shape based on provided tensor and noise shape.\n *\n * @param x Tensor.\n * @param noiseShape The shape for the randomly generated keep/drop flags, as\n * an array of numbers. Optional.\n * @returns Normalized noise shape.\n */\nexport function getNoiseShape(x, noiseShape) {\n if (noiseShape == null) {\n return x.shape.slice();\n }\n if (util.arraysEqual(x.shape, noiseShape)) {\n return noiseShape;\n }\n if (x.shape.length === noiseShape.length) {\n const newDimension = [];\n for (let i = 0; i < x.shape.length; i++) {\n if (noiseShape[i] == null && x.shape[i] != null) {\n newDimension.push(x.shape[i]);\n }\n else {\n newDimension.push(noiseShape[i]);\n }\n }\n return newDimension;\n }\n return noiseShape;\n}\n//# sourceMappingURL=dropout_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tensor } from '../tensor';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport { add } from './add';\nimport { div } from './div';\nimport { getNoiseShape } from './dropout_util';\nimport { floor } from './floor';\nimport { mul } from './mul';\nimport { op } from './operation';\nimport { randomUniform } from './random_uniform';\n/**\n * Computes dropout.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 2, 1]);\n * const rate = 0.75;\n * const output = tf.dropout(x, rate);\n * output.print();\n * ```\n *\n * @param x A floating point Tensor or TensorLike.\n * @param rate A float in the range [0, 1). The probability that each element\n * of x is discarded.\n * @param noiseShape An array of numbers of type int32, representing the\n * shape for randomly generated keep/drop flags. If the noiseShape has null\n * value, it will be automatically replaced with the x's relative dimension\n * size. Optional.\n * @param seed Used to create random seeds. Optional.\n * @returns A Tensor of the same shape of x.\n *\n * @doc {heading: 'Operations', subheading: 'Dropout'}\n */\nfunction dropout_(x, rate, noiseShape, seed) {\n const $x = convertToTensor(x, 'x', 'dropout');\n util.assert($x.dtype === 'float32', () => `x has to be a floating point tensor since it's going to be ` +\n `scaled, but got a ${$x.dtype} tensor instead.`);\n util.assert(rate >= 0 && rate < 1, () => `rate must be a float in the range [0, 1), but got ${rate}.`);\n if (rate === 0) {\n return x instanceof Tensor ? $x.clone() : $x;\n }\n const $noiseShape = getNoiseShape($x, noiseShape);\n const keepProb = 1 - rate;\n const multiplier = div(floor(add(randomUniform($noiseShape, 0, 1, 'float32', seed), keepProb)), keepProb);\n return mul($x, multiplier);\n}\nexport const dropout = op({ dropout_ });\n//# sourceMappingURL=dropout.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tensor1d } from './tensor1d';\nexport function enclosingPowerOfTwo(value) {\n // Return 2**N for integer N such that 2**N >= value.\n return Math.floor(Math.pow(2, Math.ceil(Math.log(value) / Math.log(2.0))));\n}\nexport function cosineWindow(windowLength, a, b) {\n const even = 1 - windowLength % 2;\n const newValues = new Float32Array(windowLength);\n for (let i = 0; i < windowLength; ++i) {\n const cosArg = (2.0 * Math.PI * i) / (windowLength + even - 1);\n newValues[i] = a - b * Math.cos(cosArg);\n }\n return tensor1d(newValues, 'float32');\n}\n//# sourceMappingURL=signal_ops_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../tensor_util_env';\nimport { assert, assertShapesMatch, getTypedArrayFromDType } from '../util';\nimport { tensor } from './tensor';\n/**\n * Returns whether the targets are in the top K predictions.\n *\n * ```js\n * const predictions = tf.tensor2d([[20, 10, 40, 30], [30, 50, -20, 10]]);\n * const targets = tf.tensor1d([2, 0]);\n * const precision = await tf.inTopKAsync(predictions, targets);\n * precision.print();\n * ```\n * @param predictions 2-D or higher `tf.Tensor` with last dimension being\n * at least `k`.\n * @param targets 1-D or higher `tf.Tensor`.\n * @param k Optional Number of top elements to look at for computing precision,\n * default to 1.\n *\n * @doc {heading: 'Operations', subheading: 'Evaluation'}\n */\nasync function inTopKAsync_(predictions, targets, k = 1) {\n const $predictions = convertToTensor(predictions, 'predictions', 'inTopK');\n const $targets = convertToTensor(targets, 'targets', 'inTopK');\n assert($predictions.rank > 1, () => 'inTopK() expects the predictions to be of rank 2 or higher, ' +\n `but got ${$predictions.rank}`);\n assert($predictions.rank - 1 === $targets.rank, () => `predictions rank should be 1 larger than ` +\n `targets rank, but got predictions rank ` +\n `${$predictions.rank} and targets rank ${$targets.rank}`);\n assertShapesMatch($predictions.shape.slice(0, $predictions.shape.length - 1), $targets.shape, `predictions's shape should be align with the targets' shape, ` +\n 'except the last dimension.');\n const lastDim = $predictions.shape[$predictions.shape.length - 1];\n assert(k > 0 && k <= lastDim, () => `'k' passed to inTopK() must be > 0 && <= the predictions last ` +\n `dimension (${lastDim}), but got ${k}`);\n const predictionsVals = await $predictions.data();\n const targetsVals = await $targets.data();\n // Reshape predictionsVals into a 2d tensor [batch, lastDim]\n // and look up topK along lastDim.\n const [batch, size] = [predictionsVals.length / lastDim, lastDim];\n const precision = getTypedArrayFromDType('bool', batch);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = predictionsVals.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n precision[b] = 0;\n for (let i = 0; i < k; i++) {\n if (valAndInd[i].index === targetsVals[b]) {\n precision[b] = 1;\n break;\n }\n }\n }\n if (predictions !== $predictions) {\n $predictions.dispose();\n }\n if (targets !== $targets) {\n $targets.dispose();\n }\n // Output precision has the same shape as targets.\n return tensor(precision, $targets.shape, 'bool');\n}\nexport const inTopKAsync = inTopKAsync_;\n//# sourceMappingURL=in_top_k.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv2DBackpropFilter } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 2D convolution.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * [batch, height, width, inChannels]. If rank 3, batch of 1 is assumed.\n * @param dy The dy image, of rank 4 or rank 3, of shape\n * [batch, height, width, outDepth]. If rank 3, batch of 1 is assumed.\n * @param filterShape The shape of the filter, length 4,\n * [filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels].\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction conv2DBackpropFilter_(x, dy, filterShape, strides, pad, dataFormat = 'NHWC', dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in conv2dDerFilter: input must be rank 4, but got shape ` +\n `${x4D.shape}.`);\n util.assert(dy4D.rank === 4, () => `Error in conv2dDerFilter: dy must be rank 4, but got shape ` +\n `${dy4D.shape}.`);\n util.assert(filterShape.length === 4, () => `Error in conv2dDerFilter: filterShape must be length 4, but got ` +\n `${filterShape}.`);\n const inDepth = dataFormat === 'NHWC' ? x4D.shape[3] : x4D.shape[1];\n const outDepth = dataFormat === 'NHWC' ? dy4D.shape[3] : dy4D.shape[1];\n util.assert(inDepth === filterShape[2], () => `Error in conv2dDerFilter: depth of input ${inDepth}) must ` +\n `match input depth in filter (${filterShape[2]}.`);\n util.assert(outDepth === filterShape[3], () => `Error in conv2dDerFilter: depth of dy (${outDepth}) must ` +\n `match output depth for filter (${filterShape[3]}).`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in conv2dDerFilter: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const dilations = 1;\n const $dataFormat = conv_util.convertConv2DDataFormat(dataFormat);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, filterShape, strides, dilations, pad, dimRoundingMode, false, $dataFormat);\n return backend.conv2dDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dataFormat, dimRoundingMode, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv2DBackpropFilter, attrs);\n}\nexport const conv2DBackpropFilter = op({ conv2DBackpropFilter_ });\n//# sourceMappingURL=conv2d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as broadcast_util from './broadcast_util';\nimport { elu } from './elu';\nimport { mul } from './mul';\nimport { prelu } from './prelu';\nimport { relu } from './relu';\nimport { relu6 } from './relu6';\nimport { reshape } from './reshape';\nimport { step } from './step';\nimport { sum } from './sum';\n// Returns gradient for fused activation.\nexport function getFusedDyActivation(dy, y, activation) {\n if (activation == null || activation === 'linear') {\n return dy;\n }\n if (activation === 'relu') {\n return mul(dy, step(y));\n }\n throw new Error(`Cannot compute gradient for fused activation ${activation}.`);\n}\n// Returns gradient for fused bias.\nexport function getFusedBiasGradient(bias, dyActivation) {\n let res = dyActivation;\n const reduceAxes = broadcast_util.getReductionAxes(bias.shape, dyActivation.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, bias.shape);\n}\nexport function applyActivation(x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return x;\n }\n else if (activation === 'relu') {\n return relu(x);\n }\n else if (activation === 'elu') {\n return elu(x);\n }\n else if (activation === 'relu6') {\n return relu6(x);\n }\n else if (activation === 'prelu') {\n return prelu(x, preluActivationWeights);\n }\n throw new Error(`Unknown fused activation ${activation}.`);\n}\n// Whether we should call fused ops.\nexport const shouldFuse = (gradientDepth, activation) => {\n const gradientMode = gradientDepth > 0;\n return !gradientMode || activation === 'linear';\n};\n//# sourceMappingURL=fused_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { conv2d as unfusedConv2d } from '../conv2d';\nimport { conv2DBackpropFilter } from '../conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../conv2d_backprop_input';\nimport * as conv_util from '../conv_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes a 2D convolution over the input x, optionally fused with adding a\n * bias and applying an activation.\n *\n * ```js\n * const inputDepth = 2;\n * const inShape = [2, 2, 2, inputDepth];\n * const outputDepth = 2;\n * const fSize = 1;\n * const pad = 0;\n * const strides = 1;\n *\n * const x = tf.tensor4d( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,\n * 16], inShape);\n * const w = tf.tensor4d([-1, 1, -2, 0.5], [fSize, fSize, inputDepth,\n * outputDepth]);\n *\n * tf.fused.conv2d({ x, filter: w, strides, pad, dataFormat: 'NHWC',\n * dilations: [1, 1], bias: tf.scalar(5), activation: 'relu' }).print();\n * ```\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid` output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`) to be\n * applied\n * after biasAdd.\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n activation = activation || 'linear';\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused conv2d: input must be rank 4, but got rank ` +\n `${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in conv2d: depth of input (${x4D.shape[3]}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(dataFormat === 'NHWC', () => `Error in conv2d: got dataFormat of ${dataFormat} but only NHWC is currently supported.`);\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused conv2d');\n }\n const grad = (dy, saved) => {\n const [$filter, x4D, y, $bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused conv2D: ' +\n `dilation rates greater than 1 ` +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n const xDer = conv2DBackpropInput(x4D.shape, dyActivation, $filter, strides, pad);\n const filterDer = conv2DBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad);\n const der = [xDer, filterDer];\n if ($bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n der.push(biasDer);\n }\n return der;\n };\n const forward = (backend) => {\n const res = backend.fusedConv2d({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const conv2d = op({ fusedConv2d_ });\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropFilter } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropFilter_(x, dy, filterShape, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n let dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerFilter(x4D, dy4D, convInfo);\n };\n const inputs = { x: x4D, dy: dy4D };\n const attrs = { strides, pad, dimRoundingMode, dilations, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropFilter, attrs);\n}\nexport const depthwiseConv2dNativeBackpropFilter = op({ depthwiseConv2dNativeBackpropFilter_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { DepthwiseConv2dNativeBackpropInput } from '../kernel_names';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\nfunction depthwiseConv2dNativeBackpropInput_(xShape, dy, filter, strides, pad, dilations = [1, 1], dimRoundingMode) {\n let dy4D = dy;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n const forward = backend => {\n const convInfo = conv_util.computeConv2DInfo(xShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n return backend.depthwiseConv2DDerInput(dy4D, filter, convInfo);\n };\n const inputs = { dy: dy4D, filter };\n const attrs = { strides, pad, dimRoundingMode, dilations, inputShape: xShape };\n const res = ENGINE.runKernelFunc(forward, inputs, null, DepthwiseConv2dNativeBackpropInput, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const depthwiseConv2dNativeBackpropInput = op({ depthwiseConv2dNativeBackpropInput_ });\n//# sourceMappingURL=depthwise_conv2d_native_backprop_input.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { FusedDepthwiseConv2D } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport * as conv_util from '../conv_util';\nimport { depthwiseConv2d as unfusedDepthwiseConv2d } from '../depthwise_conv2d';\nimport { depthwiseConv2dNativeBackpropFilter } from '../depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../depthwise_conv2d_native_backprop_input';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes depthwise 2D convolution, optionally fused with adding a\n * bias and applying an activation.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param obj An object with the following properties:\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n * @param bias Tensor to be added to the result.\n * @param activation Name of activation kernel (defaults to `linear`).\n * @param preluActivationWeights Tensor of prelu weights to be applied as part\n * of a `prelu` activation, typically the same shape as `x`.\n */\nfunction fusedDepthwiseConv2d_({ x, filter, strides, pad, dataFormat = 'NHWC', dilations = [1, 1], dimRoundingMode, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedDepthwiseConv2d(x, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n let x4D = $x;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2]]);\n }\n util.assert(x4D.rank === 4, () => `Error in fused depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert($filter.rank === 4, () => `Error in fused depthwiseConv2d: filter must be rank 4, ` +\n `but got rank ${$filter.rank}.`);\n util.assert(x4D.shape[3] === $filter.shape[2], () => `Error in fused depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in fused depthwiseConv2d: Either strides or dilations must ' +\n `be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in fused depthwiseConv2d: pad must be an integer when ` +\n `using dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const convInfo = conv_util.computeConv2DInfo(x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused conv2d');\n [$bias] = makeTypesMatch($bias, $x);\n broadcast_util.assertAndGetBroadcastShape(convInfo.outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused depthwiseConv2d');\n }\n const grad = (dy, saved) => {\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of fused depthwiseConv2d: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${dilations}'`);\n const [$filter, x4D, y, bias] = saved;\n const dyActivation = getFusedDyActivation(dy, y, activation);\n const xDer = depthwiseConv2dNativeBackpropInput(x4D.shape, dyActivation, $filter, strides, pad, dilations, dimRoundingMode);\n const filterDer = depthwiseConv2dNativeBackpropFilter(x4D, dyActivation, $filter.shape, strides, pad, dilations, dimRoundingMode);\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [xDer, filterDer, biasDer];\n }\n return [xDer, filterDer];\n };\n const forward = (backend) => {\n const res = backend.fusedDepthwiseConv2D({\n input: x4D,\n filter: $filter,\n convInfo,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return res;\n };\n const inputs = {\n x: x4D,\n filter: $filter,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { strides, pad, dataFormat, dilations, dimRoundingMode, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((x4D, filter, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOp(x4D, $filter);\n }\n else {\n const customOpWithBias = customGrad((x4D, filter, bias, save) => {\n let res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, FusedDepthwiseConv2D, attrs);\n save([filter, x4D, res, bias]);\n if (reshapedTo4D) {\n res = reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return { value: res, gradFunc: grad };\n });\n return customOpWithBias(x4D, $filter, $bias);\n }\n}\nexport const depthwiseConv2d = op({ fusedDepthwiseConv2d_ });\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { customGrad } from '../../gradients';\nimport { _FusedMatMul } from '../../kernel_names';\nimport { makeTypesMatch } from '../../tensor_util';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { add } from '../add';\nimport * as broadcast_util from '../broadcast_util';\nimport { applyActivation, getFusedBiasGradient, getFusedDyActivation, shouldFuse } from '../fused_util';\nimport { matMul as unfusedMatMul } from '../mat_mul';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Computes the dot product of two matrices with optional activation and bias.\n *\n * ```js\n * const a = tf.tensor2d([-1, -2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const bias = tf.tensor2d([1, 2], [1, 2]);\n *\n * tf.fused.matMul({a, b, bias, activation: 'relu'}).print();\n * ```\n *\n * @param obj An object with the following properties:\n * - `a` First matrix in dot product operation.\n * - `b` Second matrix in dot product operation.\n * - `transposeA` If true, `a` is transposed before multiplication.\n * - `transposeB` If true, `b` is transposed before multiplication.\n * - `bias` Matrix to be added to the result.\n * - `activation` Name of activation kernel (defaults to `linear`).\n * - `preluActivationWeights` Tensor of prelu weights.\n */\nfunction fusedMatMul_({ a, b, transposeA = false, transposeB = false, bias, activation = 'linear', preluActivationWeights }) {\n if (shouldFuse(ENGINE.state.gradientDepth, activation) === false) {\n let result = unfusedMatMul(a, b, transposeA, transposeB);\n if (bias != null) {\n result = add(result, bias);\n }\n return applyActivation(result, activation, preluActivationWeights);\n }\n let $a = convertToTensor(a, 'a', 'fused matMul');\n let $b = convertToTensor(b, 'b', 'fused matMul');\n [$a, $b] = makeTypesMatch($a, $b);\n const innerShapeA = transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB = transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n const outerShapeA = transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB = transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n util.assert($a.rank >= 2 && $b.rank >= 2 && $a.rank === $b.rank, () => `Error in fused matMul: inputs must have the same rank of at least ` +\n `2, got ranks ${$a.rank} and ${$b.rank}.`);\n util.assert(util.arraysEqual(outerDimsA, outerDimsB), () => `Error in fused matMul: outer dimensions (${outerDimsA}) and (` +\n `${outerDimsB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} must match.`);\n util.assert(innerShapeA === innerShapeB, () => `Error in fused matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const outShape = $a.shape.slice(0, -2).concat([outerShapeA, outerShapeB]);\n const a3D = transposeA ?\n reshape($a, [batchDimA, innerShapeA, outerShapeA]) :\n reshape($a, [batchDimA, outerShapeA, innerShapeA]);\n const b3D = transposeB ?\n reshape($b, [batchDimB, outerShapeB, innerShapeB]) :\n reshape($b, [batchDimB, innerShapeB, outerShapeB]);\n let $bias;\n if (bias != null) {\n $bias = convertToTensor(bias, 'bias', 'fused matMul');\n [$bias] = makeTypesMatch($bias, $a);\n broadcast_util.assertAndGetBroadcastShape(outShape, $bias.shape);\n }\n let $preluActivationWeights;\n if (preluActivationWeights != null) {\n $preluActivationWeights = convertToTensor(preluActivationWeights, 'prelu weights', 'fused matMul');\n }\n const grad = (dy, saved) => {\n const [a3D, b3D, y, $bias] = saved;\n // we reshape dy because the result of the forward is not\n // necessarily going to be a 3d tensor due to a reshape done at the end of\n // the customOp.\n const dyActivation = getFusedDyActivation(reshape(dy, y.shape), y, activation);\n let aDer;\n let bDer;\n if (!transposeA && !transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, true, false);\n }\n else if (!transposeA && transposeB) {\n aDer = unfusedMatMul(dyActivation, b3D, false, false);\n bDer = unfusedMatMul(dyActivation, a3D, true, false);\n }\n else if (transposeA && !transposeB) {\n aDer = unfusedMatMul(b3D, dyActivation, false, true);\n bDer = unfusedMatMul(a3D, dyActivation, false, false);\n }\n else {\n aDer = unfusedMatMul(b3D, dyActivation, true, true);\n bDer = unfusedMatMul(dyActivation, a3D, true, true);\n }\n if (bias != null) {\n const biasDer = getFusedBiasGradient($bias, dyActivation);\n return [aDer, bDer, biasDer];\n }\n else {\n return [aDer, bDer];\n }\n };\n const forward = (backend) => {\n const y = backend.fusedBatchMatMul({\n a: a3D,\n b: b3D,\n transposeA,\n transposeB,\n bias: $bias,\n activation,\n preluActivationWeights: $preluActivationWeights\n });\n return y;\n };\n const inputs = {\n a: a3D,\n b: b3D,\n bias: $bias,\n preluActivationWeights: $preluActivationWeights\n };\n const attrs = { transposeA, transposeB, activation };\n // Depending on the the params passed in we will have different number of\n // inputs and thus a a different number of elements in the gradient.\n if (bias == null) {\n const customOp = customGrad((a3D, b3D, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOp(a3D, b3D);\n }\n else {\n const customOpWithBias = customGrad((a3D, b3D, $bias, save) => {\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, _FusedMatMul, attrs);\n save([a3D, b3D, res, $bias]);\n return { value: reshape(res, outShape), gradFunc: grad };\n });\n return customOpWithBias(a3D, b3D, $bias);\n }\n}\nexport const matMul = op({ fusedMatMul_ });\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from './fused/conv2d';\nimport { depthwiseConv2d } from './fused/depthwise_conv2d';\nimport { matMul } from './fused/mat_mul';\nexport { conv2d, depthwiseConv2d, matMul };\n//# sourceMappingURL=fused_ops.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a hamming window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hammingWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hammingWindow_(windowLength) {\n return cosineWindow(windowLength, 0.54, 0.46);\n}\nexport const hammingWindow = op({ hammingWindow_ });\n//# sourceMappingURL=hamming_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { op } from '../operation';\nimport { cosineWindow } from '../signal_ops_util';\n/**\n * Generate a Hann window.\n *\n * See: https://en.wikipedia.org/wiki/Window_function#Hann_and_Hamming_windows\n *\n * ```js\n * tf.signal.hannWindow(10).print();\n * ```\n * @param The length of window\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction hannWindow_(windowLength) {\n return cosineWindow(windowLength, 0.5, 0.5);\n}\nexport const hannWindow = op({ hannWindow_ });\n//# sourceMappingURL=hann_window.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { fill } from '../fill';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { tensor2d } from '../tensor2d';\n/**\n * Expands input into frames of frameLength.\n * Slides a window size with frameStep.\n *\n * ```js\n * tf.signal.frame([1, 2, 3], 2, 1).print();\n * ```\n * @param signal The input tensor to be expanded\n * @param frameLength Length of each frame\n * @param frameStep The frame hop size in samples.\n * @param padEnd Whether to pad the end of signal with padValue.\n * @param padValue An number to use where the input signal does\n * not exist when padEnd is True.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction frame_(signal, frameLength, frameStep, padEnd = false, padValue = 0) {\n let start = 0;\n const output = [];\n while (start + frameLength <= signal.size) {\n output.push(slice(signal, start, frameLength));\n start += frameStep;\n }\n if (padEnd) {\n while (start < signal.size) {\n const padLen = (start + frameLength) - signal.size;\n const pad = concat([\n slice(signal, start, frameLength - padLen), fill([padLen], padValue)\n ]);\n output.push(pad);\n start += frameStep;\n }\n }\n if (output.length === 0) {\n return tensor2d([], [0, frameLength]);\n }\n return reshape(concat(output), [output.length, frameLength]);\n}\nexport const frame = op({ frame_ });\n//# sourceMappingURL=frame.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../concat';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { enclosingPowerOfTwo } from '../signal_ops_util';\nimport { slice } from '../slice';\nimport { rfft } from '../spectral/rfft';\nimport { frame } from './frame';\nimport { hannWindow } from './hann_window';\n/**\n * Computes the Short-time Fourier Transform of signals\n * See: https://en.wikipedia.org/wiki/Short-time_Fourier_transform\n *\n * ```js\n * const input = tf.tensor1d([1, 1, 1, 1, 1])\n * tf.signal.stft(input, 3, 1).print();\n * ```\n * @param signal 1-dimensional real value tensor.\n * @param frameLength The window length of samples.\n * @param frameStep The number of samples to step.\n * @param fftLength The size of the FFT to apply.\n * @param windowFn A callable that takes a window length and returns 1-d tensor.\n *\n * @doc {heading: 'Operations', subheading: 'Signal', namespace: 'signal'}\n */\nfunction stft_(signal, frameLength, frameStep, fftLength, windowFn = hannWindow) {\n if (fftLength == null) {\n fftLength = enclosingPowerOfTwo(frameLength);\n }\n const framedSignal = frame(signal, frameLength, frameStep);\n const windowedSignal = mul(framedSignal, windowFn(frameLength));\n const output = [];\n for (let i = 0; i < framedSignal.shape[0]; i++) {\n output.push(rfft(slice(windowedSignal, [i, 0], [1, frameLength]), fftLength));\n }\n return concat(output);\n}\nexport const stft = op({ stft_ });\n//# sourceMappingURL=stft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { CropAndResize } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Extracts crops from the input image tensor and resizes them using bilinear\n * sampling or nearest neighbor sampling (possibly with aspect ratio change)\n * to a common output size specified by cropSize.\n *\n * @param image 4d tensor of shape `[batch,imageHeight,imageWidth, depth]`,\n * where imageHeight and imageWidth must be positive, specifying the\n * batch of images from which to take crops\n * @param boxes 2d float32 tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the normalized\n * coordinates of the box in the boxInd[i]'th image in the batch\n * @param boxInd 1d int32 tensor of shape `[numBoxes]` with values in range\n * `[0, batch)` that specifies the image that the `i`-th box refers to.\n * @param cropSize 1d int32 tensor of 2 elements `[cropHeigh, cropWidth]`\n * specifying the size to which all crops are resized to.\n * @param method Optional string from `'bilinear' | 'nearest'`,\n * defaults to bilinear, which specifies the sampling method for resizing\n * @param extrapolationValue A threshold for deciding when to remove boxes based\n * on score. Defaults to 0.\n * @return A 4D tensor of the shape `[numBoxes,cropHeight,cropWidth,depth]`\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction cropAndResize_(image, boxes, boxInd, cropSize, method, extrapolationValue) {\n const $image = convertToTensor(image, 'image', 'cropAndResize');\n const $boxes = convertToTensor(boxes, 'boxes', 'cropAndResize', 'float32');\n const $boxInd = convertToTensor(boxInd, 'boxInd', 'cropAndResize', 'int32');\n method = method || 'bilinear';\n extrapolationValue = extrapolationValue || 0;\n const numBoxes = $boxes.shape[0];\n util.assert($image.rank === 4, () => 'Error in cropAndResize: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n util.assert($boxes.rank === 2 && $boxes.shape[1] === 4, () => `Error in cropAndResize: boxes must be have size [${numBoxes},4] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert($boxInd.rank === 1 && $boxInd.shape[0] === numBoxes, () => `Error in cropAndResize: boxInd must be have size [${numBoxes}] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert(cropSize.length === 2, () => `Error in cropAndResize: cropSize must be of length 2, but got ` +\n `length ${cropSize.length}.`);\n util.assert(cropSize[0] >= 1 && cropSize[1] >= 1, () => `cropSize must be atleast [1,1], but was ${cropSize}`);\n util.assert(method === 'bilinear' || method === 'nearest', () => `method must be bilinear or nearest, but was ${method}`);\n const forward = (backend) => backend.cropAndResize($image, $boxes, $boxInd, cropSize, method, extrapolationValue);\n const inputs = { image: $image, boxes: $boxes, boxInd: $boxInd };\n const attrs = { method, extrapolationValue, cropSize };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, CropAndResize, attrs);\n return res;\n}\nexport const cropAndResize = op({ cropAndResize_ });\n//# sourceMappingURL=crop_and_resize.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { FlipLeftRight } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Flips the image left to right. Currently available in the CPU, WebGL, and\n * WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction flipLeftRight_(image) {\n const $image = convertToTensor(image, 'image', 'flipLeftRight', 'float32');\n util.assert($image.rank === 4, () => 'Error in flipLeftRight: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const res = ENGINE.runKernel(FlipLeftRight, inputs, {});\n return res;\n}\nexport const flipLeftRight = op({ flipLeftRight_ });\n//# sourceMappingURL=flip_left_right.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { RotateWithOffset } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\n/**\n * Rotates the input image tensor counter-clockwise with an optional offset\n * center of rotation. Currently available in the CPU, WebGL, and WASM backends.\n *\n * @param image 4d tensor of shape `[batch, imageHeight, imageWidth, depth]`.\n * @param radians The amount of rotation.\n * @param fillValue The value to fill in the empty space leftover\n * after rotation. Can be either a single grayscale value (0-255), or an\n * array of three numbers `[red, green, blue]` specifying the red, green,\n * and blue channels. Defaults to `0` (black).\n * @param center The center of rotation. Can be either a single value (0-1), or\n * an array of two numbers `[centerX, centerY]`. Defaults to `0.5` (rotates\n * the image around its center).\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction rotateWithOffset_(image, radians, fillValue = 0, center = 0.5) {\n const $image = convertToTensor(image, 'image', 'rotateWithOffset', 'float32');\n util.assert($image.rank === 4, () => 'Error in rotateWithOffset: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n const inputs = { image: $image };\n const attrs = { radians, fillValue, center };\n const res = ENGINE.runKernel(RotateWithOffset, inputs, attrs);\n return res;\n}\nexport const rotateWithOffset = op({ rotateWithOffset_ });\n//# sourceMappingURL=rotate_with_offset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as util from '../util';\nfunction nonMaxSuppSanityCheck(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n if (iouThreshold == null) {\n iouThreshold = 0.5;\n }\n if (scoreThreshold == null) {\n scoreThreshold = Number.NEGATIVE_INFINITY;\n }\n if (softNmsSigma == null) {\n softNmsSigma = 0.0;\n }\n const numBoxes = boxes.shape[0];\n maxOutputSize = Math.min(maxOutputSize, numBoxes);\n util.assert(0 <= iouThreshold && iouThreshold <= 1, () => `iouThreshold must be in [0, 1], but was '${iouThreshold}'`);\n util.assert(boxes.rank === 2, () => `boxes must be a 2D tensor, but was of rank '${boxes.rank}'`);\n util.assert(boxes.shape[1] === 4, () => `boxes must have 4 columns, but 2nd dimension was ${boxes.shape[1]}`);\n util.assert(scores.rank === 1, () => 'scores must be a 1D tensor');\n util.assert(scores.shape[0] === numBoxes, () => `scores has incompatible shape with boxes. Expected ${numBoxes}, ` +\n `but was ${scores.shape[0]}`);\n util.assert(0 <= softNmsSigma && softNmsSigma <= 1, () => `softNmsSigma must be in [0, 1], but was '${softNmsSigma}'`);\n return { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n}\nexport { nonMaxSuppSanityCheck };\n//# sourceMappingURL=nonmax_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV3 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\nfunction nonMaxSuppression_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold };\n return ENGINE.runKernelFunc(b => b.nonMaxSuppression($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold), { boxes: $boxes, scores: $scores }, null /* grad */, NonMaxSuppressionV3, attrs);\n}\nexport const nonMaxSuppression = op({ nonMaxSuppression_ });\n//# sourceMappingURL=non_max_suppression.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Inserts a value into a sorted array. This method allows duplicate, meaning it\n * allows inserting duplicate value, in which case, the element will be inserted\n * at the lowest index of the value.\n * @param arr The array to modify.\n * @param element The element to insert.\n * @param comparator Optional. If no comparator is specified, elements are\n * compared using array_util.defaultComparator, which is suitable for Strings\n * and Numbers in ascending arrays. If the array contains multiple instances of\n * the target value, the left-most instance will be returned. To provide a\n * comparator, it should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n */\nexport function binaryInsert(arr, element, comparator) {\n const index = binarySearch(arr, element, comparator);\n const insertionPoint = index < 0 ? -(index + 1) : index;\n arr.splice(insertionPoint, 0, element);\n}\n/**\n * Searches the array for the target using binary search, returns the index\n * of the found element, or position to insert if element not found. If no\n * comparator is specified, elements are compared using array_\n * util.defaultComparator, which is suitable for Strings and Numbers in\n * ascending arrays. If the array contains multiple instances of the target\n * value, the left-most instance will be returned.\n * @param arr The array to be searched in.\n * @param target The target to be searched for.\n * @param comparator Should take 2 arguments to compare and return a negative,\n * zero, or a positive number.\n * @return Lowest index of the target value if found, otherwise the insertion\n * point where the target should be inserted, in the form of\n * (-insertionPoint - 1).\n */\nexport function binarySearch(arr, target, comparator) {\n return binarySearch_(arr, target, comparator || defaultComparator);\n}\n/**\n * Compares its two arguments for order.\n * @param a The first element to be compared.\n * @param b The second element to be compared.\n * @return A negative number, zero, or a positive number as the first\n * argument is less than, equal to, or greater than the second.\n */\nfunction defaultComparator(a, b) {\n return a > b ? 1 : a < b ? -1 : 0;\n}\nfunction binarySearch_(arr, target, comparator) {\n let left = 0;\n let right = arr.length;\n let middle = 0;\n let found = false;\n while (left < right) {\n middle = left + ((right - left) >>> 1);\n const compareResult = comparator(target, arr[middle]);\n if (compareResult > 0) {\n left = middle + 1;\n }\n else {\n right = middle;\n // If compareResult is 0, the value is found. We record it is found,\n // and then keep looking because there may be duplicate.\n found = !compareResult;\n }\n }\n return found ? left : -left - 1;\n}\n//# sourceMappingURL=array_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Implementation of the NonMaxSuppression kernel shared between webgl and cpu.\n */\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { binaryInsert } from './array_util';\nexport function nonMaxSuppressionV3Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */)\n .selectedIndices;\n}\nexport function nonMaxSuppressionV4Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, 0 /* softNmsSigma */, false /* returnScoresTensor */, padToMaxOutputSize /* padToMaxOutputSize */, true\n /* returnValidOutputs */ );\n}\nexport function nonMaxSuppressionV5Impl(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma) {\n return nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, true /* returnScoresTensor */);\n}\nfunction nonMaxSuppressionImpl_(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma, returnScoresTensor = false, padToMaxOutputSize = false, returnValidOutputs = false) {\n // The list is sorted in ascending order, so that we can always pop the\n // candidate with the largest score in O(1) time.\n const candidates = [];\n for (let i = 0; i < scores.length; i++) {\n if (scores[i] > scoreThreshold) {\n candidates.push({ score: scores[i], boxIndex: i, suppressBeginIndex: 0 });\n }\n }\n candidates.sort(ascendingComparator);\n // If softNmsSigma is 0, the outcome of this algorithm is exactly same as\n // before.\n const scale = softNmsSigma > 0 ? (-0.5 / softNmsSigma) : 0.0;\n const selectedIndices = [];\n const selectedScores = [];\n while (selectedIndices.length < maxOutputSize && candidates.length > 0) {\n const candidate = candidates.pop();\n const { score: originalScore, boxIndex, suppressBeginIndex } = candidate;\n if (originalScore < scoreThreshold) {\n break;\n }\n // Overlapping boxes are likely to have similar scores, therefore we\n // iterate through the previously selected boxes backwards in order to\n // see if candidate's score should be suppressed. We use\n // suppressBeginIndex to track and ensure a candidate can be suppressed\n // by a selected box no more than once. Also, if the overlap exceeds\n // iouThreshold, we simply ignore the candidate.\n let ignoreCandidate = false;\n for (let j = selectedIndices.length - 1; j >= suppressBeginIndex; --j) {\n const iou = intersectionOverUnion(boxes, boxIndex, selectedIndices[j]);\n if (iou >= iouThreshold) {\n ignoreCandidate = true;\n break;\n }\n candidate.score =\n candidate.score * suppressWeight(iouThreshold, scale, iou);\n if (candidate.score <= scoreThreshold) {\n break;\n }\n }\n // At this point, if `candidate.score` has not dropped below\n // `scoreThreshold`, then we know that we went through all of the\n // previous selections and can safely update `suppressBeginIndex` to the\n // end of the selected array. Then we can re-insert the candidate with\n // the updated score and suppressBeginIndex back in the candidate list.\n // If on the other hand, `candidate.score` has dropped below the score\n // threshold, we will not add it back to the candidates list.\n candidate.suppressBeginIndex = selectedIndices.length;\n if (!ignoreCandidate) {\n // Candidate has passed all the tests, and is not suppressed, so\n // select the candidate.\n if (candidate.score === originalScore) {\n selectedIndices.push(boxIndex);\n selectedScores.push(candidate.score);\n }\n else if (candidate.score > scoreThreshold) {\n // Candidate's score is suppressed but is still high enough to be\n // considered, so add back to the candidates list.\n binaryInsert(candidates, candidate, ascendingComparator);\n }\n }\n }\n // NonMaxSuppressionV4 feature: padding output to maxOutputSize.\n const validOutputs = selectedIndices.length;\n const elemsToPad = maxOutputSize - validOutputs;\n if (padToMaxOutputSize && elemsToPad > 0) {\n selectedIndices.push(...new Array(elemsToPad).fill(0));\n selectedScores.push(...new Array(elemsToPad).fill(0.0));\n }\n const result = { selectedIndices: tensor1d(selectedIndices, 'int32') };\n if (returnScoresTensor) {\n result['selectedScores'] = tensor1d(selectedScores, 'float32');\n }\n if (returnValidOutputs) {\n result['validOutputs'] = scalar(validOutputs, 'int32');\n }\n return result;\n}\nfunction intersectionOverUnion(boxes, i, j) {\n const iCoord = boxes.subarray(i * 4, i * 4 + 4);\n const jCoord = boxes.subarray(j * 4, j * 4 + 4);\n const yminI = Math.min(iCoord[0], iCoord[2]);\n const xminI = Math.min(iCoord[1], iCoord[3]);\n const ymaxI = Math.max(iCoord[0], iCoord[2]);\n const xmaxI = Math.max(iCoord[1], iCoord[3]);\n const yminJ = Math.min(jCoord[0], jCoord[2]);\n const xminJ = Math.min(jCoord[1], jCoord[3]);\n const ymaxJ = Math.max(jCoord[0], jCoord[2]);\n const xmaxJ = Math.max(jCoord[1], jCoord[3]);\n const areaI = (ymaxI - yminI) * (xmaxI - xminI);\n const areaJ = (ymaxJ - yminJ) * (xmaxJ - xminJ);\n if (areaI <= 0 || areaJ <= 0) {\n return 0.0;\n }\n const intersectionYmin = Math.max(yminI, yminJ);\n const intersectionXmin = Math.max(xminI, xminJ);\n const intersectionYmax = Math.min(ymaxI, ymaxJ);\n const intersectionXmax = Math.min(xmaxI, xmaxJ);\n const intersectionArea = Math.max(intersectionYmax - intersectionYmin, 0.0) *\n Math.max(intersectionXmax - intersectionXmin, 0.0);\n return intersectionArea / (areaI + areaJ - intersectionArea);\n}\n// A Gaussian penalty function, this method always returns values in [0, 1].\n// The weight is a function of similarity, the more overlap two boxes are, the\n// smaller the weight is, meaning highly overlapping boxe will be significantly\n// penalized. On the other hand, a non-overlapping box will not be penalized.\nfunction suppressWeight(iouThreshold, scale, iou) {\n const weight = Math.exp(scale * iou * iou);\n return iou <= iouThreshold ? weight : 0.0;\n}\nfunction ascendingComparator(c1, c2) {\n // For objects with same scores, we make the object with the larger index go\n // first. In an array that pops from the end, this means that the object with\n // the smaller index will be popped first. This ensures the same output as\n // the TensorFlow python version.\n return (c1.score - c2.score) ||\n ((c1.score === c2.score) && (c2.boxIndex - c1.boxIndex));\n}\n//# sourceMappingURL=non_max_suppression_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV3Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This is the async version of `nonMaxSuppression`\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @return A 1D tensor with the selected box indices.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const inputs = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionAsync = nonMaxSuppressionAsync_;\n//# sourceMappingURL=non_max_suppression_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV5 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionWithScore_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma };\n const result = ENGINE.runKernel(NonMaxSuppressionV5, inputs, attrs);\n return { selectedIndices: result[0], selectedScores: result[1] };\n}\nexport const nonMaxSuppressionWithScore = op({ nonMaxSuppressionWithScore_ });\n//# sourceMappingURL=non_max_suppression_with_score.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV5Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union).\n *\n * This op also supports a Soft-NMS mode (c.f.\n * Bodla et al, https://arxiv.org/abs/1704.04503) where boxes reduce the score\n * of other overlapping boxes, therefore favoring different regions of the image\n * with high scores. To enable this Soft-NMS mode, set the `softNmsSigma`\n * parameter to be larger than 0.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param softNmsSigma A float representing the sigma parameter for Soft NMS.\n * When sigma is 0, it falls back to nonMaxSuppression.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - selectedScores: A 1D tensor with the corresponding scores for each\n * selected box.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionWithScoreAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, softNmsSigma = 0.0) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n maxOutputSize = params.maxOutputSize;\n iouThreshold = params.iouThreshold;\n scoreThreshold = params.scoreThreshold;\n softNmsSigma = params.softNmsSigma;\n const boxesAndScores = await Promise.all([$boxes.data(), $scores.data()]);\n const boxesVals = boxesAndScores[0];\n const scoresVals = boxesAndScores[1];\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionWithScoreAsync = nonMaxSuppressionWithScoreAsync_;\n//# sourceMappingURL=non_max_suppression_with_score_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { NonMaxSuppressionV4 } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\nimport { op } from '../operation';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction nonMaxSuppressionPadded_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const inputs = { boxes: $boxes, scores: $scores };\n const attrs = {\n maxOutputSize: $maxOutputSize,\n iouThreshold: $iouThreshold,\n scoreThreshold: $scoreThreshold,\n padToMaxOutputSize\n };\n const result = ENGINE.runKernel(NonMaxSuppressionV4, inputs, attrs);\n return { selectedIndices: result[0], validOutputs: result[1] };\n}\nexport const nonMaxSuppressionPadded = op({ nonMaxSuppressionPadded_ });\n//# sourceMappingURL=non_max_suppression_padded.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { nonMaxSuppressionV4Impl } from '../../backends/non_max_suppression_impl';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { nonMaxSuppSanityCheck } from '../nonmax_util';\n/**\n * Asynchronously performs non maximum suppression of bounding boxes based on\n * iou (intersection over union), with an option to pad results.\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @param padToMaxOutputSize Defalts to false. If true, size of output\n * `selectedIndices` is padded to maxOutputSize.\n * @return A map with the following properties:\n * - selectedIndices: A 1D tensor with the selected box indices.\n * - validOutputs: A scalar denoting how many elements in `selectedIndices`\n * are valid. Valid elements occur first, then padding.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nasync function nonMaxSuppressionPaddedAsync_(boxes, scores, maxOutputSize, iouThreshold = 0.5, scoreThreshold = Number.NEGATIVE_INFINITY, padToMaxOutputSize = false) {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n const params = nonMaxSuppSanityCheck($boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold, null /* softNmsSigma */);\n const $maxOutputSize = params.maxOutputSize;\n const $iouThreshold = params.iouThreshold;\n const $scoreThreshold = params.scoreThreshold;\n const [boxesVals, scoresVals] = await Promise.all([$boxes.data(), $scores.data()]);\n // We call a cpu based impl directly with the typedarray data here rather\n // than a kernel because all kernels are synchronous (and thus cannot await\n // .data()).\n const res = nonMaxSuppressionV4Impl(boxesVals, scoresVals, $maxOutputSize, $iouThreshold, $scoreThreshold, padToMaxOutputSize);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\nexport const nonMaxSuppressionPaddedAsync = nonMaxSuppressionPaddedAsync_;\n//# sourceMappingURL=non_max_suppression_padded_async.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeBilinear } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * Bilinear resize a single 3D image or a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeBilinear_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeBilinear');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeBilinear: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeBilinear: new shape must 2D, but got shape ` +\n `${size}.`);\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeBilinear(batchImages, newHeight, newWidth, alignCorners);\n };\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeBilinear, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeBilinear = op({ resizeBilinear_ });\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { ResizeNearestNeighbor } from '../../kernel_names';\nimport { convertToTensor } from '../../tensor_util_env';\nimport * as util from '../../util';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\n/**\n * NearestNeighbor resize a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n *\n * @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'}\n */\nfunction resizeNearestNeighbor_(images, size, alignCorners = false) {\n const $images = convertToTensor(images, 'images', 'resizeNearestNeighbor');\n util.assert($images.rank === 3 || $images.rank === 4, () => `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(size.length === 2, () => `Error in resizeNearestNeighbor: new shape must 2D, but got shape ` +\n `${size}.`);\n util.assert($images.dtype === 'float32' || $images.dtype === 'int32', () => '`images` must have `int32` or `float32` as dtype');\n let batchImages = $images;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages = reshape($images, [1, $images.shape[0], $images.shape[1], $images.shape[2]]);\n }\n const [newHeight, newWidth] = size;\n const inputs = { images: batchImages };\n const attrs = { alignCorners, size };\n const forward = (backend, save) => {\n save([batchImages]);\n return backend.resizeNearestNeighbor(batchImages, newHeight, newWidth, alignCorners);\n };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* gradient */, ResizeNearestNeighbor, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const resizeNearestNeighbor = op({ resizeNearestNeighbor_ });\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assert } from '../../util';\nimport { greaterEqual } from '../greater_equal';\nimport { lessEqual } from '../less_equal';\nimport { logicalAnd } from '../logical_and';\nimport { op } from '../operation';\nimport { range } from '../range';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\nimport { zeros } from '../zeros';\n/**\n * Copy a tensor setting everything outside a central band in each innermost\n * matrix to zero.\n *\n * The band part is computed as follows: Assume input has `k` dimensions\n * `[I, J, K, ..., M, N]`, then the output is a tensor with the same shape where\n * `band[i, j, k, ..., m, n] = in_band(m, n) * input[i, j, k, ..., m, n]`.\n * The indicator function\n * `in_band(m, n) = (num_lower < 0 || (m-n) <= num_lower))`\n * `&& (num_upper < 0 || (n-m) <= num_upper)`\n *\n * ```js\n * const x = tf.tensor2d([[ 0, 1, 2, 3],\n * [-1, 0, 1, 2],\n * [-2, -1, 0, 1],\n * [-3, -2, -1, 0]]);\n * let y = tf.linalg.bandPart(x, 1, -1);\n * y.print(); // [[ 0, 1, 2, 3],\n * // [-1, 0, 1, 2],\n * // [ 0, -1, 0, 1],\n * // [ 0, 0 , -1, 0]]\n * let z = tf.linalg.bandPart(x, 2, 1);\n * z.print(); // [[ 0, 1, 0, 0],\n * // [-1, 0, 1, 0],\n * // [-2, -1, 0, 1],\n * // [ 0, -2, -1, 0]]\n * ```\n *\n * @param x Rank `k` tensor\n * @param numLower Number of subdiagonals to keep.\n * If negative, keep entire lower triangle.\n * @param numUpper Number of subdiagonals to keep.\n * If negative, keep entire upper triangle.\n * @returns Rank `k` tensor of the same shape as input.\n * The extracted banded tensor.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction bandPart_(a, numLower, numUpper) {\n assert(numLower % 1 === 0, () => `bandPart(): numLower must be an integer, got ${numLower}.`);\n assert(numUpper % 1 === 0, () => `bandPart(): numUpper must be an integer, got ${numUpper}.`);\n const $a = convertToTensor(a, 'a', 'bandPart');\n assert($a.rank >= 2, () => `bandPart(): Rank must be at least 2, got ${$a.rank}.`);\n const shape = $a.shape;\n const [M, N] = $a.shape.slice(-2);\n if (!(numLower <= M)) {\n throw new Error(`bandPart(): numLower (${numLower})` +\n ` must not be greater than the number of rows (${M}).`);\n }\n if (!(numUpper <= N)) {\n throw new Error(`bandPart(): numUpper (${numUpper})` +\n ` must not be greater than the number of columns (${N}).`);\n }\n if (numLower < 0) {\n numLower = M;\n }\n if (numUpper < 0) {\n numUpper = N;\n }\n const i = reshape(range(0, M, 1, 'int32'), [-1, 1]);\n const j = range(0, N, 1, 'int32');\n const ij = sub(i, j);\n const inBand = logicalAnd(lessEqual(ij, scalar(+numLower, 'int32')), greaterEqual(ij, scalar(-numUpper, 'int32')));\n const zero = zeros([M, N], $a.dtype);\n return reshape(stack(unstack(reshape($a, [-1, M, N]))\n .map(mat => where(inBand, mat, zero))), shape);\n}\nexport const bandPart = op({ bandPart_ });\n//# sourceMappingURL=band_part.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { assert } from '../../util';\nimport { div } from '../div';\nimport { mul } from '../mul';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { split } from '../split';\nimport { squeeze } from '../squeeze';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\n/**\n * Gram-Schmidt orthogonalization.\n *\n * ```js\n * const x = tf.tensor2d([[1, 2], [3, 4]]);\n * let y = tf.linalg.gramSchmidt(x);\n * y.print();\n * console.log('Othogonalized:');\n * y.dot(y.transpose()).print(); // should be nearly the identity matrix.\n * console.log('First row direction maintained:');\n * const data = await y.array();\n * console.log(data[0][1] / data[0][0]); // should be nearly 2.\n * ```\n *\n * @param xs The vectors to be orthogonalized, in one of the two following\n * formats:\n * - An Array of `tf.Tensor1D`.\n * - A `tf.Tensor2D`, i.e., a matrix, in which case the vectors are the rows\n * of `xs`.\n * In each case, all the vectors must have the same length and the length\n * must be greater than or equal to the number of vectors.\n * @returns The orthogonalized and normalized vectors or matrix.\n * Orthogonalization means that the vectors or the rows of the matrix\n * are orthogonal (zero inner products). Normalization means that each\n * vector or each row of the matrix has an L2 norm that equals `1`.\n *\n * @doc {heading:'Operations', subheading:'Linear Algebra', namespace:'linalg'}\n */\nfunction gramSchmidt_(xs) {\n let inputIsTensor2D;\n if (Array.isArray(xs)) {\n inputIsTensor2D = false;\n assert(xs != null && xs.length > 0, () => 'Gram-Schmidt process: input must not be null, undefined, or ' +\n 'empty');\n const dim = xs[0].shape[0];\n for (let i = 1; i < xs.length; ++i) {\n assert(xs[i].shape[0] === dim, () => 'Gram-Schmidt: Non-unique lengths found in the input vectors: ' +\n `(${xs[i].shape[0]} vs. ${dim})`);\n }\n }\n else {\n inputIsTensor2D = true;\n xs = split(xs, xs.shape[0], 0).map(x => squeeze(x, [0]));\n }\n assert(xs.length <= xs[0].shape[0], () => `Gram-Schmidt: Number of vectors (${xs.length}) exceeds ` +\n `number of dimensions (${xs[0].shape[0]}).`);\n const ys = [];\n const xs1d = xs;\n for (let i = 0; i < xs.length; ++i) {\n ys.push(ENGINE.tidy(() => {\n let x = xs1d[i];\n if (i > 0) {\n for (let j = 0; j < i; ++j) {\n const proj = mul(sum(mul(ys[j], x)), ys[j]);\n x = sub(x, proj);\n }\n }\n return div(x, norm(x, 'euclidean'));\n }));\n }\n if (inputIsTensor2D) {\n return stack(ys, 0);\n }\n else {\n return ys;\n }\n}\nexport const gramSchmidt = op({ gramSchmidt_ });\n//# sourceMappingURL=gram_schmidt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../../engine';\nimport { dispose } from '../../globals';\nimport { assert } from '../../util';\nimport { clone } from '../clone';\nimport { concat } from '../concat';\nimport { div } from '../div';\nimport { eye } from '../eye';\nimport { greater } from '../greater';\nimport { matMul } from '../mat_mul';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { norm } from '../norm';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { slice } from '../slice';\nimport { stack } from '../stack';\nimport { sub } from '../sub';\nimport { tensor2d } from '../tensor2d';\nimport { transpose } from '../transpose';\nimport { unstack } from '../unstack';\nimport { where } from '../where';\n/**\n * Compute QR decomposition of m-by-n matrix using Householder transformation.\n *\n * Implementation based on\n * [http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf]\n * (http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf)\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [3, 4]]);\n * let [q, r] = tf.linalg.qr(a);\n * console.log('Q');\n * q.print();\n * console.log('R');\n * r.print();\n * console.log('Orthogonalized');\n * q.dot(q.transpose()).print() // should be nearly the identity matrix.\n * console.log('Reconstructed');\n * q.dot(r).print(); // should be nearly [[1, 2], [3, 4]];\n * ```\n *\n * @param x The `tf.Tensor` to be QR-decomposed. Must have rank >= 2. Suppose\n * it has the shape `[..., M, N]`.\n * @param fullMatrices An optional boolean parameter. Defaults to `false`.\n * If `true`, compute full-sized `Q`. If `false` (the default),\n * compute only the leading N columns of `Q` and `R`.\n * @returns An `Array` of two `tf.Tensor`s: `[Q, R]`. `Q` is a unitary matrix,\n * i.e., its columns all have unit norm and are mutually orthogonal.\n * If `M >= N`,\n * If `fullMatrices` is `false` (default),\n * - `Q` has a shape of `[..., M, N]`,\n * - `R` has a shape of `[..., N, N]`.\n * If `fullMatrices` is `true` (default),\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * If `M < N`,\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * @throws If the rank of `x` is less than 2.\n *\n * @doc {heading:'Operations',\n * subheading:'Linear Algebra',\n * namespace:'linalg'}\n */\nfunction qr_(x, fullMatrices = false) {\n assert(x.rank >= 2, () => `qr() requires input tensor to have a rank >= 2, but got rank ${x.rank}`);\n if (x.rank === 2) {\n return qr2d(x, fullMatrices);\n }\n else {\n // Rank > 2.\n // TODO(cais): Below we split the input into individual 2D tensors,\n // perform QR decomposition on them and then stack the results back\n // together. We should explore whether this can be parallelized.\n const outerDimsProd = x.shape.slice(0, x.shape.length - 2)\n .reduce((value, prev) => value * prev);\n const x2ds = unstack(reshape(x, [\n outerDimsProd, x.shape[x.shape.length - 2],\n x.shape[x.shape.length - 1]\n ]), 0);\n const q2ds = [];\n const r2ds = [];\n x2ds.forEach(x2d => {\n const [q2d, r2d] = qr2d(x2d, fullMatrices);\n q2ds.push(q2d);\n r2ds.push(r2d);\n });\n const q = reshape(stack(q2ds, 0), x.shape);\n const r = reshape(stack(r2ds, 0), x.shape);\n return [q, r];\n }\n}\nfunction qr2d(x, fullMatrices = false) {\n return ENGINE.tidy(() => {\n assert(x.shape.length === 2, () => `qr2d() requires a 2D Tensor, but got a ${x.shape.length}D Tensor.`);\n const m = x.shape[0];\n const n = x.shape[1];\n let q = eye(m); // Orthogonal transform so far.\n let r = clone(x); // Transformed matrix so far.\n const one2D = tensor2d([[1]], [1, 1]);\n let w = clone(one2D);\n const iters = m >= n ? n : m;\n for (let j = 0; j < iters; ++j) {\n // This tidy within the for-loop ensures we clean up temporary\n // tensors as soon as they are no longer needed.\n const rTemp = r;\n const wTemp = w;\n const qTemp = q;\n [w, r, q] = ENGINE.tidy(() => {\n // Find H = I - tau * w * w', to put zeros below R(j, j).\n const rjEnd1 = slice(r, [j, j], [m - j, 1]);\n const normX = norm(rjEnd1);\n const rjj = slice(r, [j, j], [1, 1]);\n // The sign() function returns 0 on 0, which causes division by zero.\n const s = where(greater(rjj, 0), tensor2d([[-1]]), tensor2d([[1]]));\n const u1 = sub(rjj, mul(s, normX));\n const wPre = div(rjEnd1, u1);\n if (wPre.shape[0] === 1) {\n w = clone(one2D);\n }\n else {\n w = concat([\n one2D,\n slice(wPre, [1, 0], [wPre.shape[0] - 1, wPre.shape[1]])\n ], 0);\n }\n const tau = neg(div(matMul(s, u1), normX));\n // -- R := HR, Q := QH.\n const rjEndAll = slice(r, [j, 0], [m - j, n]);\n const tauTimesW = mul(tau, w);\n const wT = transpose(w);\n if (j === 0) {\n r = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n }\n else {\n const rTimesTau = sub(rjEndAll, matMul(tauTimesW, matMul(wT, rjEndAll)));\n r = concat([slice(r, [0, 0], [j, n]), rTimesTau], 0);\n }\n const tawTimesWT = transpose(tauTimesW);\n const qAllJEnd = slice(q, [0, j], [m, q.shape[1] - j]);\n if (j === 0) {\n q = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n }\n else {\n const qTimesTau = sub(qAllJEnd, matMul(matMul(qAllJEnd, w), tawTimesWT));\n q = concat([slice(q, [0, 0], [m, j]), qTimesTau], 1);\n }\n return [w, r, q];\n });\n dispose([rTemp, wTemp, qTemp]);\n }\n if (!fullMatrices && m > n) {\n q = slice(q, [0, 0], [m, n]);\n r = slice(r, [0, 0], [n, n]);\n }\n return [q, r];\n });\n}\nexport const qr = op({ qr_ });\n//# sourceMappingURL=qr.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport var Reduction;\n(function (Reduction) {\n Reduction[Reduction[\"NONE\"] = 0] = \"NONE\";\n Reduction[Reduction[\"MEAN\"] = 1] = \"MEAN\";\n Reduction[Reduction[\"SUM\"] = 2] = \"SUM\";\n Reduction[Reduction[\"SUM_BY_NONZERO_WEIGHTS\"] = 3] = \"SUM_BY_NONZERO_WEIGHTS\";\n})(Reduction || (Reduction = {}));\n//# sourceMappingURL=loss_ops_utils.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { Reduction } from '../loss_ops_utils';\nimport { mean } from '../mean';\nimport { mul } from '../mul';\nimport { notEqual } from '../not_equal';\nimport { ones } from '../ones';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sum } from '../sum';\n/**\n * Computes the weighted loss between two tensors.\n *\n * @param losses Tensor of shape `[batch_size, d1, ... dN]`.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `losses`, and must be broadcastable to `losses` (i.e., all\n * dimensions must be either `1`, or the same as the corresponding\n * `losses` dimension).\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction computeWeightedLoss_(losses, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $losses = convertToTensor(losses, 'losses', 'computeWeightedLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'computeWeightedLoss');\n }\n const weightedLoss = ($weights == null) ? $losses : mul($losses, $weights);\n if (reduction === Reduction.NONE) {\n return weightedLoss;\n }\n if (reduction === Reduction.SUM) {\n return sum(weightedLoss);\n }\n if (reduction === Reduction.MEAN) {\n if ($weights == null) {\n return mean(weightedLoss);\n }\n else {\n const broadcastFactor = $losses.size / $weights.size;\n const result = div(sum(weightedLoss), sum($weights));\n return broadcastFactor > 1 ? div(result, scalar(broadcastFactor)) :\n result;\n }\n }\n if (reduction === Reduction.SUM_BY_NONZERO_WEIGHTS) {\n if ($weights == null) {\n return div(sum(weightedLoss), scalar($losses.size));\n }\n else {\n const broadcastedWeights = mul($weights, ones($losses.shape));\n const numNonZeros = cast(sum(notEqual(broadcastedWeights, scalar(0))), 'float32');\n return div(sum(weightedLoss), numNonZeros);\n }\n }\n throw Error(`Unknown reduction: ${reduction}`);\n}\nexport const computeWeightedLoss = op({ computeWeightedLoss_ });\n//# sourceMappingURL=compute_weighted_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the absolute difference loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction absoluteDifference_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'absoluteDifference');\n const $predictions = convertToTensor(predictions, 'predictions', 'absoluteDifference');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'absoluteDifference');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in absoluteDifference: ');\n const losses = abs(sub($labels, $predictions));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const absoluteDifference = op({ absoluteDifference_ });\n//# sourceMappingURL=absolute_difference.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the cosine distance loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param axis The dimension along which the cosine distance is computed.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction cosineDistance_(labels, predictions, axis, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'cosineDistance');\n const $predictions = convertToTensor(predictions, 'predictions', 'cosineDistance');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'cosineDistance');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in cosineDistance: ');\n const one = scalar(1);\n const losses = sub(one, sum(mul($labels, $predictions), axis, true));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const cosineDistance = op({ cosineDistance_ });\n//# sourceMappingURL=cosine_distance.js.map", "import { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the Hinge loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction hingeLoss_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $labels = convertToTensor(labels, 'labels', 'hingeLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'hingeLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'hingeLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in hingeLoss: ');\n const one = scalar(1);\n // Convert binary labels to (-1, 1)\n $labels = sub(mul(scalar(2), $labels), one);\n const losses = relu(sub(one, mul($labels, $predictions)));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const hingeLoss = op({ hingeLoss_ });\n//# sourceMappingURL=hinge_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { Reduction } from '../loss_ops_utils';\nimport { minimum } from '../minimum';\nimport { mul } from '../mul';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { square } from '../square';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the huber loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param delta Point where huber loss changes from quadratic to linear.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`.\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction huberLoss_(labels, predictions, weights, delta = 1.0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'huberLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'huberLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'huberLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in huberLoss: ');\n const deltaScalar = scalar(delta);\n const error = abs(sub($predictions, $labels));\n const quadratic = minimum(error, deltaScalar);\n const linear = sub(error, quadratic);\n const losses = add(mul(scalar(0.5), square(quadratic)), mul(deltaScalar, linear));\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const huberLoss = op({ huberLoss_ });\n//# sourceMappingURL=huber_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { log } from '../log';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the log loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param epsilon A small increment to avoid taking log of zero\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction logLoss_(labels, predictions, weights, epsilon = 1e-7, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'logLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'logLoss');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'logLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in logLoss: ');\n const one = scalar(1);\n const epsilonScalar = scalar(epsilon);\n const l1 = neg(mul($labels, log(add($predictions, epsilonScalar))));\n const l2 = mul(sub(one, $labels), log(add(sub(one, $predictions), epsilonScalar)));\n const losses = sub(l1, l2);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const logLoss = op({ logLoss_ });\n//# sourceMappingURL=log_loss.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { Reduction } from '../loss_ops_utils';\nimport { op } from '../operation';\nimport { squaredDifference } from '../squared_difference';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes the mean squared error between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'}\n */\nfunction meanSquaredError_(labels, predictions, weights, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n const $labels = convertToTensor(labels, 'labels', 'meanSquaredError');\n const $predictions = convertToTensor(predictions, 'predictions', 'meanSquaredError');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'meanSquaredError');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in meanSquaredError: ');\n const losses = squaredDifference($labels, $predictions);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const meanSquaredError = op({ meanSquaredError_ });\n//# sourceMappingURL=mean_squared_error.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { abs } from '../abs';\nimport { add } from '../add';\nimport { exp } from '../exp';\nimport { log1p } from '../log1p';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { relu } from '../relu';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { computeWeightedLoss } from './compute_weighted_loss';\nfunction sigmoidCrossEntropyWithLogits_(labels, logits) {\n const $labels = convertToTensor(labels, 'labels', 'sigmoidCrossEntropyWithLogits');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropyWithLogits');\n assertShapesMatch($labels.shape, $logits.shape, 'Error in sigmoidCrossEntropyWithLogits: ');\n /**\n * Implementation Details:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n *\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n *\n * Hence, to ensure stability and avoid overflow, the implementation uses\n * this equivalent formulation:\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n */\n const maxOutput = relu($logits);\n const outputXTarget = mul($logits, $labels);\n const sigmoidOutput = log1p(exp(neg(abs($logits))));\n return add(sub(maxOutput, outputXTarget), sigmoidOutput);\n}\n/**\n * Computes the sigmoid cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newMulticlassLabels = multiclassLabels * (1 - labelSmoothing)\n * + 0.5 * labelSmoothing\n *\n * @param multiClassLabels The ground truth output tensor of shape\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction sigmoidCrossEntropy_(multiClassLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $multiClassLabels = convertToTensor(multiClassLabels, 'multiClassLabels', 'sigmoidCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'sigmoidCrossEntropy');\n }\n assertShapesMatch($multiClassLabels.shape, $logits.shape, 'Error in sigmoidCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const half = scalar(0.5);\n $multiClassLabels =\n add(mul($multiClassLabels, sub(one, labelSmoothingScalar)), mul(half, labelSmoothingScalar));\n }\n const losses = sigmoidCrossEntropyWithLogits_($multiClassLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const sigmoidCrossEntropy = op({ sigmoidCrossEntropy_ });\n//# sourceMappingURL=sigmoid_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { customGrad } from '../../gradients';\nimport { convertToTensor } from '../../tensor_util_env';\nimport { assertShapesMatch } from '../../util';\nimport { add } from '../add';\nimport { expandShapeToKeepDim } from '../axis_util';\nimport { cast } from '../cast';\nimport { div } from '../div';\nimport { exp } from '../exp';\nimport { logSumExp } from '../log_sum_exp';\nimport { Reduction } from '../loss_ops_utils';\nimport { mul } from '../mul';\nimport { neg } from '../neg';\nimport { op } from '../operation';\nimport { reshape } from '../reshape';\nimport { scalar } from '../scalar';\nimport { sub } from '../sub';\nimport { sum } from '../sum';\nimport { computeWeightedLoss } from './compute_weighted_loss';\n/**\n * Computes softmax cross entropy between logits and labels.\n *\n * Measures the probability error in discrete classification tasks in which\n * the classes are mutually exclusive (each entry is in exactly one class).\n * For example, each CIFAR-10 image is labeled with one and only one label: an\n * image can be a dog or a truck, but not both.\n *\n * `NOTE`: While the classes are mutually exclusive, their probabilities need\n * not be. All that is required is that each row of labels is a valid\n * probability distribution. If they are not, the computation of the gradient\n * will be incorrect.\n *\n * `WARNING`: This op expects unscaled logits, since it performs a softmax on\n * logits internally for efficiency. Do not call this op with the output of\n * softmax, as it will produce incorrect results.\n *\n * logits and labels must have the same shape, e.g. [batch_size, num_classes]\n * and the same dtype.\n * @param labels The labels array.\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n */\nfunction softmaxCrossEntropyWithLogits_(labels, logits, dim = -1) {\n if (dim === -1) {\n dim = logits.rank - 1;\n }\n if (dim !== logits.rank - 1) {\n throw Error(`Softmax cross entropy along a non-last dimension is not yet ` +\n `supported. Labels / logits was rank ${logits.rank} ` +\n `and dim was ${dim}`);\n }\n // Use a custom gradient for numerical stability.\n const customOp = customGrad((labels, logits, save) => {\n // Reference:\n // 1. http://cs231n.github.io/linear-classify/#softmax\n // 2. https://blog.feedly.com/tricks-of-the-trade-logsumexp/\n const keepDims = true;\n const lse = logSumExp(logits, [dim], keepDims);\n const logResult = sub(cast(logits, 'float32'), lse);\n save([labels, logResult]);\n const costVector = neg(mul(logResult, labels));\n const value = sum(costVector, [dim]);\n const gradFunc = (dy, saved) => {\n const [labels, logResult] = saved;\n const dyShape = expandShapeToKeepDim(dy.shape, [dim]);\n return [\n mul(reshape(dy, dyShape), sub(cast(labels, 'float32'), exp(logResult))),\n mul(reshape(dy, dyShape), sub(exp(logResult), cast(labels, 'float32'))),\n ];\n };\n return { value, gradFunc };\n });\n return customOp(labels, logits);\n}\n/**\n * Computes the softmax cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newOnehotLabels = onehotLabels * (1 - labelSmoothing)\n * + labelSmoothing / numClasses\n *\n * @param onehotLabels One hot encoded labels\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or 1, and must be\n * broadcastable to `loss` of shape [batch_size]\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n *\n * @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' }\n */\nfunction softmaxCrossEntropy_(onehotLabels, logits, weights, labelSmoothing = 0, reduction = Reduction.SUM_BY_NONZERO_WEIGHTS) {\n let $onehotLabels = convertToTensor(onehotLabels, 'onehotLabels', 'softmaxCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'softmaxCrossEntropy');\n let $weights = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'softmaxCrossEntropy');\n }\n assertShapesMatch($onehotLabels.shape, $logits.shape, 'Error in softmaxCrossEntropy: ');\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const numClasses = scalar($onehotLabels.shape[1]);\n $onehotLabels =\n add(mul($onehotLabels, sub(one, labelSmoothingScalar)), div(labelSmoothingScalar, numClasses));\n }\n const losses = softmaxCrossEntropyWithLogits_($onehotLabels, $logits);\n return computeWeightedLoss(losses, $weights, reduction);\n}\nexport const softmaxCrossEntropy = op({ softmaxCrossEntropy_ });\n//# sourceMappingURL=softmax_cross_entropy.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Modularized ops.\nexport { abs } from './abs';\nexport { acos } from './acos';\nexport { acosh } from './acosh';\nexport { add } from './add';\nexport { addN } from './add_n';\nexport { all } from './all';\nexport { any } from './any';\nexport { argMax } from './arg_max';\nexport { argMin } from './arg_min';\nexport { asin } from './asin';\nexport { asinh } from './asinh';\nexport { atan } from './atan';\nexport { atan2 } from './atan2';\nexport { atanh } from './atanh';\nexport { avgPool } from './avg_pool';\nexport { avgPool3d } from './avg_pool_3d';\nexport { basicLSTMCell } from './basic_lstm_cell';\nexport { batchToSpaceND } from './batch_to_space_nd';\nexport { batchNorm } from './batchnorm';\nexport { batchNorm2d } from './batchnorm2d';\nexport { batchNorm3d } from './batchnorm3d';\nexport { batchNorm4d } from './batchnorm4d';\nexport { broadcastTo } from './broadcast_to';\nexport { buffer } from './buffer';\nexport { cast } from './cast';\nexport { ceil } from './ceil';\nexport { clipByValue } from './clip_by_value';\nexport { clone } from './clone';\nexport { complex } from './complex';\nexport { concat } from './concat';\nexport { concat1d } from './concat_1d';\nexport { concat2d } from './concat_2d';\nexport { concat3d } from './concat_3d';\nexport { concat4d } from './concat_4d';\nexport { conv1d } from './conv1d';\nexport { conv2d } from './conv2d';\nexport { conv2dTranspose } from './conv2d_transpose';\nexport { conv3d } from './conv3d';\nexport { conv3dTranspose } from './conv3d_transpose';\nexport { cos } from './cos';\nexport { cosh } from './cosh';\nexport { cumsum } from './cumsum';\nexport { depthToSpace } from './depth_to_space';\nexport { depthwiseConv2d } from './depthwise_conv2d';\nexport { diag } from './diag';\nexport { dilation2d } from './dilation2d';\nexport { div } from './div';\nexport { divNoNan } from './div_no_nan';\nexport { dot } from './dot';\nexport { elu } from './elu';\nexport { equal } from './equal';\nexport { erf } from './erf';\nexport { exp } from './exp';\nexport { expandDims } from './expand_dims';\nexport { expm1 } from './expm1';\nexport { eye } from './eye';\nexport { fill } from './fill';\nexport { floor } from './floor';\nexport { floorDiv } from './floorDiv';\nexport { gather } from './gather';\nexport { greater } from './greater';\nexport { greaterEqual } from './greater_equal';\nexport { imag } from './imag';\nexport { isFinite } from './is_finite';\nexport { isInf } from './is_inf';\nexport { isNaN } from './is_nan';\nexport { leakyRelu } from './leaky_relu';\nexport { less } from './less';\nexport { lessEqual } from './less_equal';\nexport { linspace } from './linspace';\nexport { localResponseNormalization } from './local_response_normalization';\nexport { log } from './log';\nexport { log1p } from './log1p';\nexport { logSigmoid } from './log_sigmoid';\nexport { logSoftmax } from './log_softmax';\nexport { logSumExp } from './log_sum_exp';\nexport { logicalAnd } from './logical_and';\nexport { logicalNot } from './logical_not';\nexport { logicalOr } from './logical_or';\nexport { logicalXor } from './logical_xor';\nexport { matMul } from './mat_mul';\nexport { max } from './max';\nexport { maxPool } from './max_pool';\nexport { maxPool3d } from './max_pool_3d';\nexport { maxPoolWithArgmax } from './max_pool_with_argmax';\nexport { maximum } from './maximum';\nexport { mean } from './mean';\nexport { min } from './min';\nexport { minimum } from './minimum';\nexport { mirrorPad } from './mirror_pad';\nexport { mod } from './mod';\nexport { moments } from './moments';\nexport { mul } from './mul';\nexport { multiRNNCell } from './multi_rnn_cell';\nexport { multinomial } from './multinomial';\nexport { neg } from './neg';\nexport { notEqual } from './not_equal';\nexport { oneHot } from './one_hot';\nexport { ones } from './ones';\nexport { onesLike } from './ones_like';\nexport { outerProduct } from './outer_product';\nexport { pad } from './pad';\nexport { pad1d } from './pad1d';\nexport { pad2d } from './pad2d';\nexport { pad3d } from './pad3d';\nexport { pad4d } from './pad4d';\nexport { pool } from './pool';\nexport { pow } from './pow';\nexport { prelu } from './prelu';\nexport { print } from './print';\nexport { prod } from './prod';\nexport { rand } from './rand';\nexport { randomGamma } from './random_gamma';\nexport { randomNormal } from './random_normal';\nexport { randomUniform } from './random_uniform';\nexport { range } from './range';\nexport { real } from './real';\nexport { reciprocal } from './reciprocal';\nexport { relu } from './relu';\nexport { relu6 } from './relu6';\nexport { reshape } from './reshape';\nexport { reverse } from './reverse';\nexport { reverse1d } from './reverse_1d';\nexport { reverse2d } from './reverse_2d';\nexport { reverse3d } from './reverse_3d';\nexport { reverse4d } from './reverse_4d';\nexport { round } from './round';\nexport { rsqrt } from './rsqrt';\nexport { scalar } from './scalar';\nexport { selu } from './selu';\nexport { separableConv2d } from './separable_conv2d';\nexport { setdiff1dAsync } from './setdiff1d_async';\nexport { sigmoid } from './sigmoid';\nexport { sign } from './sign';\nexport { sin } from './sin';\nexport { sinh } from './sinh';\nexport { slice } from './slice';\nexport { slice1d } from './slice1d';\nexport { slice2d } from './slice2d';\nexport { slice3d } from './slice3d';\nexport { slice4d } from './slice4d';\nexport { softmax } from './softmax';\nexport { softplus } from './softplus';\nexport { spaceToBatchND } from './space_to_batch_nd';\nexport { fft } from './spectral/fft';\nexport { ifft } from './spectral/ifft';\nexport { irfft } from './spectral/irfft';\nexport { rfft } from './spectral/rfft';\nexport { split } from './split';\nexport { sqrt } from './sqrt';\nexport { square } from './square';\nexport { squaredDifference } from './squared_difference';\nexport { squeeze } from './squeeze';\nexport { stack } from './stack';\nexport { step } from './step';\nexport { stridedSlice } from './strided_slice';\nexport { sub } from './sub';\nexport { sum } from './sum';\nexport { tan } from './tan';\nexport { tanh } from './tanh';\nexport { tensor } from './tensor';\nexport { tensor1d } from './tensor1d';\nexport { tensor2d } from './tensor2d';\nexport { tensor3d } from './tensor3d';\nexport { tensor4d } from './tensor4d';\nexport { tensor5d } from './tensor5d';\nexport { tensor6d } from './tensor6d';\nexport { tile } from './tile';\nexport { topk } from './topk';\nexport { truncatedNormal } from './truncated_normal';\nexport { unique } from './unique';\nexport { unsortedSegmentSum } from './unsorted_segment_sum';\nexport { unstack } from './unstack';\nexport { variable } from './variable';\nexport { where } from './where';\nexport { whereAsync } from './where_async';\nexport { zeros } from './zeros';\nexport { zerosLike } from './zeros_like';\nexport * from './boolean_mask';\nexport * from './compare';\nexport * from './binary_ops';\nexport * from './transpose';\nexport * from './norm';\nexport * from './moving_average';\nexport * from './scatter_nd';\nexport * from './sparse_to_dense';\nexport * from './gather_nd';\nexport * from './dropout';\nexport * from './signal_ops_util';\nexport * from './in_top_k';\nexport { op, OP_SCOPE_SUFFIX } from './operation';\nimport { rfft } from './spectral/rfft';\nimport { fft } from './spectral/fft';\nimport { ifft } from './spectral/ifft';\nimport { irfft } from './spectral/irfft';\nconst spectral = {\n fft,\n ifft,\n rfft,\n irfft\n};\nimport * as fused from './fused_ops';\nimport { hammingWindow } from './signal/hamming_window';\nimport { hannWindow } from './signal/hann_window';\nimport { frame } from './signal/frame';\nimport { stft } from './signal/stft';\nconst signal = {\n hammingWindow,\n hannWindow,\n frame,\n stft,\n};\n// Image Ops namespace\nimport { cropAndResize } from './image/crop_and_resize';\nimport { flipLeftRight } from './image/flip_left_right';\nimport { rotateWithOffset } from './image/rotate_with_offset';\nimport { nonMaxSuppression } from './image/non_max_suppression';\nimport { nonMaxSuppressionAsync } from './image/non_max_suppression_async';\nimport { nonMaxSuppressionWithScore } from './image/non_max_suppression_with_score';\nimport { nonMaxSuppressionWithScoreAsync } from './image/non_max_suppression_with_score_async';\nimport { nonMaxSuppressionPadded } from './image/non_max_suppression_padded';\nimport { nonMaxSuppressionPaddedAsync } from './image/non_max_suppression_padded_async';\nimport { resizeBilinear } from './image/resize_bilinear';\nimport { resizeNearestNeighbor } from './image/resize_nearest_neighbor';\nconst image = {\n flipLeftRight,\n resizeNearestNeighbor,\n resizeBilinear,\n rotateWithOffset,\n cropAndResize,\n nonMaxSuppression,\n nonMaxSuppressionAsync,\n nonMaxSuppressionWithScore,\n nonMaxSuppressionWithScoreAsync,\n nonMaxSuppressionPadded,\n nonMaxSuppressionPaddedAsync\n};\n// linalg namespace\nimport { bandPart } from './linalg/band_part';\nimport { gramSchmidt } from './linalg/gram_schmidt';\nimport { qr } from './linalg/qr';\nconst linalg = {\n bandPart,\n gramSchmidt,\n qr\n};\n// losses namespace;\nimport { absoluteDifference } from './losses/absolute_difference';\nimport { computeWeightedLoss } from './losses/compute_weighted_loss';\nimport { cosineDistance } from './losses/cosine_distance';\nimport { hingeLoss } from './losses/hinge_loss';\nimport { huberLoss } from './losses/huber_loss';\nimport { logLoss } from './losses/log_loss';\nimport { meanSquaredError } from './losses/mean_squared_error';\nimport { sigmoidCrossEntropy } from './losses/sigmoid_cross_entropy';\nimport { softmaxCrossEntropy } from './losses/softmax_cross_entropy';\nconst losses = {\n absoluteDifference,\n computeWeightedLoss,\n cosineDistance,\n hingeLoss,\n huberLoss,\n logLoss,\n meanSquaredError,\n sigmoidCrossEntropy,\n softmaxCrossEntropy\n};\n// Second level exports.\nexport { image, linalg, losses, spectral, fused, signal };\n//# sourceMappingURL=ops.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dispose } from '../globals';\nimport { variableGrads } from '../gradients';\nimport { scalar } from '../ops/ops';\nimport { Serializable } from '../serialization';\n/** @doc {heading: 'Training', subheading: 'Classes', namespace: 'train'} */\nexport class Optimizer extends Serializable {\n /**\n * Executes `f()` and minimizes the scalar output of `f()` by computing\n * gradients of y with respect to the list of trainable variables provided by\n * `varList`. If no list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to minimize.\n * @param returnCost Whether to return the scalar cost value produced by\n * executing `f()`.\n * @param varList An optional list of variables to update. If specified, only\n * the trainable variables in varList will be updated by minimize. Defaults to\n * all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n minimize(f, returnCost = false, varList) {\n const { value, grads } = this.computeGradients(f, varList);\n if (varList != null) {\n const gradArray = varList.map(v => ({ name: v.name, tensor: grads[v.name] }));\n this.applyGradients(gradArray);\n }\n else {\n this.applyGradients(grads);\n }\n // Dispose gradients.\n dispose(grads);\n if (returnCost) {\n return value;\n }\n else {\n value.dispose();\n return null;\n }\n }\n /**\n * The number of iterations that this optimizer instance has been invoked for.\n */\n get iterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return this.iterations_;\n }\n incrementIterations() {\n this.iterations_ = this.iterations + 1;\n }\n /**\n * Executes f() and computes the gradient of the scalar output of f() with\n * respect to the list of trainable variables provided by `varList`. If no\n * list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to use for computing\n * gradients with respect to variables.\n * @param varList An optional list of variables to compute gradients with\n * respect to. If specified, only the trainable variables in varList will have\n * gradients computed with respect to. Defaults to all trainable variables.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers'}\n */\n computeGradients(f, varList) {\n return variableGrads(f, varList);\n }\n /**\n * Dispose the variables (if any) owned by this optimizer instance.\n */\n dispose() {\n if (this.iterations_ != null) {\n dispose(this.iterations_);\n }\n }\n async saveIterations() {\n if (this.iterations_ == null) {\n this.iterations_ = 0;\n }\n return {\n name: 'iter',\n // TODO(cais): Use 'int64' type when available.\n tensor: scalar(this.iterations_, 'int32')\n };\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for this optimizer yet.');\n }\n async setWeights(weightValues) {\n throw new Error(`setWeights() is not implemented for this optimizer class ` +\n `${this.getClassName()}`);\n }\n /**\n * Extract the first element of the weight values and set it\n * as the iterations counter variable of this instance of optimizer.\n *\n * @param weightValues\n * @returns Weight values with the first element consumed and excluded.\n */\n async extractIterations(weightValues) {\n this.iterations_ = (await weightValues[0].tensor.data())[0];\n return weightValues.slice(1);\n }\n}\nObject.defineProperty(Optimizer, Symbol.hasInstance, {\n value: (instance) => {\n return instance.minimize != null && instance.computeGradients != null &&\n instance.applyGradients != null;\n }\n});\n//# sourceMappingURL=optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/ops';\nimport { square } from '../ops/square';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdadeltaOptimizer extends Optimizer {\n constructor(learningRate, rho, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.rho = rho;\n this.epsilon = epsilon;\n this.accumulatedGrads = [];\n this.accumulatedUpdates = [];\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedGrads[i] == null) {\n this.accumulatedGrads[i] = {\n originalName: `${name}/accum_grad`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedUpdates[i] == null) {\n this.accumulatedUpdates[i] = {\n originalName: `${name}/accum_var`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n const accumulatedUpdate = this.accumulatedUpdates[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(mul(accumulatedGrad, this.rho), mul(square(gradient), 1 - this.rho));\n const updates = mul(div(sqrt(add(accumulatedUpdate, this.epsilon)), sqrt(add(accumulatedGrad, this.epsilon))), gradient);\n const newAccumulatedUpdate = add(mul(accumulatedUpdate, this.rho), mul(square(updates), 1 - this.rho));\n accumulatedGrad.assign(newAccumulatedGrad);\n accumulatedUpdate.assign(newAccumulatedUpdate);\n const newValue = add(mul(updates, -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedUpdates != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n dispose(this.accumulatedUpdates.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedGrads, ...this.accumulatedUpdates];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedGrads =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedUpdates =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'rho': this.rho,\n 'epsilon': this.epsilon\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['rho'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdadeltaOptimizer.className = 'Adadelta'; // Name matters for Python compatibility.\nregisterClass(AdadeltaOptimizer);\n//# sourceMappingURL=adadelta_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { fill } from '../ops/fill';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class AdagradOptimizer extends Optimizer {\n constructor(learningRate, initialAccumulatorValue = 0.1) {\n super();\n this.learningRate = learningRate;\n this.initialAccumulatorValue = initialAccumulatorValue;\n this.accumulatedGrads = [];\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulatedGrads[i] == null) {\n const trainable = false;\n this.accumulatedGrads[i] = {\n originalName: `${name}/accumulator`,\n variable: tidy(() => fill(value.shape, this.initialAccumulatorValue)\n .variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedGrad = this.accumulatedGrads[i].variable;\n tidy(() => {\n const newAccumulatedGrad = add(accumulatedGrad, square(gradient));\n accumulatedGrad.assign(newAccumulatedGrad);\n const newValue = add(mul(div(gradient, sqrt(add(newAccumulatedGrad, ENGINE.backend.epsilon()))), -this.learningRate), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedGrads != null) {\n dispose(this.accumulatedGrads.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulatedGrads.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulatedGrads = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'initialAccumulatorValue': this.initialAccumulatorValue,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['initialAccumulatorValue']);\n }\n}\n/** @nocollapse */\nAdagradOptimizer.className = 'Adagrad'; // Note: Name matters for Python compatibility.\nregisterClass(AdagradOptimizer);\n//# sourceMappingURL=adagrad_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.accumulatedFirstMoment = [];\n this.accumulatedSecondMoment = [];\n tidy(() => {\n // accB* will be updated by batch.\n this.accBeta1 = scalar(beta1).variable();\n this.accBeta2 = scalar(beta2).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const oneMinusAccBeta2 = sub(1, this.accBeta2);\n varNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedSecondMoment[i] == null) {\n this.accumulatedSecondMoment[i] = {\n originalName: `${name}/v`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const secondMoment = this.accumulatedSecondMoment[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const newSecondMoment = add(mul(secondMoment, this.beta2), mul(square(gradient), 1 - this.beta2));\n const biasCorrectedFirstMoment = div(newFirstMoment, oneMinusAccBeta1);\n const biasCorrectedSecondMoment = div(newSecondMoment, oneMinusAccBeta2);\n firstMoment.assign(newFirstMoment);\n secondMoment.assign(newSecondMoment);\n const newValue = add(mul(div(biasCorrectedFirstMoment, add(sqrt(biasCorrectedSecondMoment), this.epsilon)), -this.learningRate), value);\n value.assign(newValue);\n });\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n this.accBeta2.assign(mul(this.accBeta2, this.beta2));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.accBeta2.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedSecondMoment != null) {\n dispose(this.accumulatedSecondMoment.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedFirstMoment, ...this.accumulatedSecondMoment];\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n tidy(() => {\n this.accBeta1.assign(pow(this.beta1, this.iterations_ + 1));\n this.accBeta2.assign(pow(this.beta2, this.iterations_ + 1));\n });\n const variableCount = weightValues.length / 2;\n const trainable = false;\n this.accumulatedFirstMoment =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedSecondMoment =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon']);\n }\n}\n/** @nocollapse */\nAdamOptimizer.className = 'Adam'; // Note: Name matters for Python compatibility.\nregisterClass(AdamOptimizer);\n//# sourceMappingURL=adam_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { abs } from '../ops/abs';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { maximum } from '../ops/maximum';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\nexport class AdamaxOptimizer extends Optimizer {\n constructor(learningRate, beta1, beta2, epsilon = null, decay = 0.0) {\n super();\n this.learningRate = learningRate;\n this.beta1 = beta1;\n this.beta2 = beta2;\n this.epsilon = epsilon;\n this.decay = decay;\n this.accumulatedFirstMoment = [];\n this.accumulatedWeightedInfNorm = [];\n tidy(() => {\n this.iteration = scalar(0).variable();\n this.accBeta1 = scalar(beta1).variable();\n });\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n tidy(() => {\n const oneMinusAccBeta1 = sub(1, this.accBeta1);\n const lr = div(-this.learningRate, add(mul(this.iteration, this.decay), 1));\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedFirstMoment[i] == null) {\n this.accumulatedFirstMoment[i] = {\n originalName: `${name}/m`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n if (this.accumulatedWeightedInfNorm[i] == null) {\n this.accumulatedWeightedInfNorm[i] = {\n originalName: `${name}/v`,\n variable: zerosLike(value).variable(trainable)\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const firstMoment = this.accumulatedFirstMoment[i].variable;\n const weightedInfNorm = this.accumulatedWeightedInfNorm[i].variable;\n const newFirstMoment = add(mul(firstMoment, this.beta1), mul(gradient, 1 - this.beta1));\n const ut0 = mul(weightedInfNorm, this.beta2);\n const ut1 = abs(gradient);\n const newWeightedInfNorm = maximum(ut0, ut1);\n firstMoment.assign(newFirstMoment);\n weightedInfNorm.assign(newWeightedInfNorm);\n const newValue = add(mul(div(lr, oneMinusAccBeta1), div(newFirstMoment, add(newWeightedInfNorm, this.epsilon))), value);\n value.assign(newValue);\n });\n this.iteration.assign(add(this.iteration, 1));\n this.accBeta1.assign(mul(this.accBeta1, this.beta1));\n });\n this.incrementIterations();\n }\n dispose() {\n this.accBeta1.dispose();\n this.iteration.dispose();\n if (this.accumulatedFirstMoment != null) {\n dispose(this.accumulatedFirstMoment.map(v => v.variable));\n }\n if (this.accumulatedWeightedInfNorm != null) {\n dispose(this.accumulatedWeightedInfNorm.map(v => v.variable));\n }\n }\n async getWeights() {\n throw new Error('getWeights() is not implemented for Adamax yet.');\n }\n async setWeights(weightValues) {\n throw new Error('setWeights() is not implemented for Adamax yet.');\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'beta1': this.beta1,\n 'beta2': this.beta2,\n 'epsilon': this.epsilon,\n 'decay': this.decay\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['beta1'], config['beta2'], config['epsilon'], config['decay']);\n }\n}\n/** @nocollapse */\nAdamaxOptimizer.className = 'Adamax'; // Note: Name matters for Python compatbility.\nregisterClass(AdamaxOptimizer);\n//# sourceMappingURL=adamax_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { keep, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class SGDOptimizer extends Optimizer {\n constructor(learningRate) {\n super();\n this.learningRate = learningRate;\n this.setLearningRate(learningRate);\n }\n applyGradients(variableGradients) {\n const varNames = Array.isArray(variableGradients) ?\n variableGradients.map(v => v.name) :\n Object.keys(variableGradients);\n varNames.forEach((name, i) => {\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const value = ENGINE.registeredVariables[name];\n tidy(() => {\n const newValue = add(mul(this.c, gradient), value);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n /**\n * Sets the learning rate of the optimizer.\n */\n setLearningRate(learningRate) {\n this.learningRate = learningRate;\n if (this.c != null) {\n this.c.dispose();\n }\n this.c = keep(scalar(-learningRate));\n }\n dispose() {\n this.c.dispose();\n }\n async getWeights() {\n return [await this.saveIterations()];\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n if (weightValues.length !== 0) {\n throw new Error('SGD optimizer does not have settable weights.');\n }\n }\n getConfig() {\n return { 'learningRate': this.learningRate };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate']);\n }\n}\n/** @nocollapse */\nSGDOptimizer.className = 'SGD'; // Note: Name matters for Python compatibility.\nregisterClass(SGDOptimizer);\n//# sourceMappingURL=sgd_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { SGDOptimizer } from './sgd_optimizer';\n/** @doclink Optimizer */\nexport class MomentumOptimizer extends SGDOptimizer {\n constructor(learningRate, momentum, useNesterov = false) {\n super(learningRate);\n this.learningRate = learningRate;\n this.momentum = momentum;\n this.useNesterov = useNesterov;\n this.accumulations = [];\n this.m = scalar(this.momentum);\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n if (this.accumulations[i] == null) {\n const trainable = false;\n this.accumulations[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const accumulation = this.accumulations[i].variable;\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n tidy(() => {\n let newValue;\n const newAccumulation = add(mul(this.m, accumulation), gradient);\n if (this.useNesterov) {\n newValue = add(mul(this.c, add(gradient, mul(newAccumulation, this.m))), value);\n }\n else {\n newValue = add(mul(this.c, newAccumulation), value);\n }\n accumulation.assign(newAccumulation);\n value.assign(newValue);\n });\n });\n this.incrementIterations();\n }\n dispose() {\n this.m.dispose();\n if (this.accumulations != null) {\n dispose(this.accumulations.map(v => v.variable));\n }\n }\n /**\n * Sets the momentum of the optimizer.\n *\n * @param momentum\n */\n setMomentum(momentum) {\n this.momentum = momentum;\n }\n async getWeights() {\n // Order matters for Python compatibility.\n return [await this.saveIterations()].concat(this.accumulations.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const trainable = false;\n this.accumulations = weightValues.map(v => ({ originalName: v.name, variable: v.tensor.variable(trainable) }));\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'momentum': this.momentum,\n 'useNesterov': this.useNesterov\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['momentum'], config['useNesterov']);\n }\n}\n/** @nocollapse */\nMomentumOptimizer.className = 'Momentum'; // Name matters for Python compatibility.\nregisterClass(MomentumOptimizer);\n//# sourceMappingURL=momentum_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { dispose, tidy } from '../globals';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { zerosLike } from '../ops/zeros_like';\nimport { registerClass } from '../serialization';\nimport { Optimizer } from './optimizer';\n/** @doclink Optimizer */\nexport class RMSPropOptimizer extends Optimizer {\n constructor(learningRate, decay = 0.9, momentum = 0.0, epsilon = null, centered = false) {\n super();\n this.learningRate = learningRate;\n this.decay = decay;\n this.momentum = momentum;\n this.epsilon = epsilon;\n this.accumulatedMeanSquares = [];\n this.accumulatedMoments = [];\n this.accumulatedMeanGrads = [];\n this.centered = centered;\n if (epsilon == null) {\n this.epsilon = ENGINE.backend.epsilon();\n }\n if (learningRate == null) {\n throw new Error(`learningRate for RMSPropOptimizer must be defined.`);\n }\n }\n applyGradients(variableGradients) {\n const variableNames = Array.isArray(variableGradients) ?\n variableGradients.map(item => item.name) :\n Object.keys(variableGradients);\n variableNames.forEach((name, i) => {\n const value = ENGINE.registeredVariables[name];\n const trainable = false;\n if (this.accumulatedMeanSquares[i] == null) {\n this.accumulatedMeanSquares[i] = {\n originalName: `${name}/rms`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMoments[i] == null) {\n this.accumulatedMoments[i] = {\n originalName: `${name}/momentum`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n if (this.accumulatedMeanGrads[i] == null && this.centered) {\n this.accumulatedMeanGrads[i] = {\n originalName: `${name}/mg`,\n variable: tidy(() => zerosLike(value).variable(trainable))\n };\n }\n const gradient = Array.isArray(variableGradients) ?\n variableGradients[i].tensor :\n variableGradients[name];\n if (gradient == null) {\n return;\n }\n const accumulatedMeanSquare = this.accumulatedMeanSquares[i].variable;\n const accumulatedMoments = this.accumulatedMoments[i].variable;\n tidy(() => {\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n if (this.centered) {\n const accumulatedMeanGrad = this.accumulatedMeanGrads[i].variable;\n // Centered gradient\n const newAccumulatedMeanGrad = add(mul(accumulatedMeanGrad, this.decay), mul(gradient, 1 - this.decay));\n const gradContribution = div(mul(gradient, this.learningRate), sqrt(sub(newAccumulatedMeanSquare, add(square(newAccumulatedMeanGrad), this.epsilon))));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), gradContribution);\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMeanGrad.assign(newAccumulatedMeanGrad);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n else {\n // Plain gradient\n const newAccumulatedMeanSquare = add(mul(accumulatedMeanSquare, this.decay), mul(square(gradient), 1 - this.decay));\n const newAccumulatedMoments = add(mul(accumulatedMoments, this.momentum), div(mul(gradient, this.learningRate), sqrt(add(newAccumulatedMeanSquare, this.epsilon))));\n accumulatedMeanSquare.assign(newAccumulatedMeanSquare);\n accumulatedMoments.assign(newAccumulatedMoments);\n const newValue = sub(value, newAccumulatedMoments);\n value.assign(newValue);\n }\n });\n });\n this.incrementIterations();\n }\n dispose() {\n if (this.accumulatedMeanSquares != null) {\n dispose(this.accumulatedMeanSquares.map(v => v.variable));\n }\n if (this.accumulatedMeanGrads != null && this.centered) {\n dispose(this.accumulatedMeanGrads.map(v => v.variable));\n }\n if (this.accumulatedMoments != null) {\n dispose(this.accumulatedMoments.map(v => v.variable));\n }\n }\n async getWeights() {\n // Order matters for Python compatibility.\n const variables = [...this.accumulatedMeanSquares, ...this.accumulatedMoments];\n if (this.centered) {\n variables.push(...this.accumulatedMeanGrads);\n }\n return [await this.saveIterations()].concat(variables.map(v => ({ name: v.originalName, tensor: v.variable })));\n }\n async setWeights(weightValues) {\n weightValues = await this.extractIterations(weightValues);\n const variableCount = this.centered ? weightValues.length / 3 : weightValues.length / 2;\n const trainable = false;\n this.accumulatedMeanSquares =\n weightValues.slice(0, variableCount).map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n this.accumulatedMoments =\n weightValues.slice(variableCount, variableCount * 2)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n if (this.centered) {\n this.accumulatedMeanGrads =\n weightValues.slice(variableCount * 2, variableCount * 3)\n .map(v => ({\n originalName: v.name,\n variable: v.tensor.variable(trainable)\n }));\n }\n }\n getConfig() {\n return {\n 'learningRate': this.learningRate,\n 'decay': this.decay,\n 'momentum': this.momentum,\n 'epsilon': this.epsilon,\n 'centered': this.centered\n };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config['learningRate'], config['decay'], config['momentum'], config['epsilon'], config['centered']);\n }\n}\n/** @nocollapse */\nRMSPropOptimizer.className = 'RMSProp'; // Note: Name matters for Python compatibility.\nregisterClass(RMSPropOptimizer);\n//# sourceMappingURL=rmsprop_optimizer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AdadeltaOptimizer } from './adadelta_optimizer';\nimport { AdagradOptimizer } from './adagrad_optimizer';\nimport { AdamOptimizer } from './adam_optimizer';\nimport { AdamaxOptimizer } from './adamax_optimizer';\nimport { MomentumOptimizer } from './momentum_optimizer';\nimport { RMSPropOptimizer } from './rmsprop_optimizer';\nimport { SGDOptimizer } from './sgd_optimizer';\nexport class OptimizerConstructors {\n /**\n * Constructs a `tf.SGDOptimizer` that uses stochastic gradient descent.\n *\n * ```js\n * // Fit a quadratic function by learning the coefficients a, b, c.\n * const xs = tf.tensor1d([0, 1, 2, 3]);\n * const ys = tf.tensor1d([1.1, 5.9, 16.8, 33.9]);\n *\n * const a = tf.scalar(Math.random()).variable();\n * const b = tf.scalar(Math.random()).variable();\n * const c = tf.scalar(Math.random()).variable();\n *\n * // y = a * x^2 + b * x + c.\n * const f = x => a.mul(x.square()).add(b.mul(x)).add(c);\n * const loss = (pred, label) => pred.sub(label).square().mean();\n *\n * const learningRate = 0.01;\n * const optimizer = tf.train.sgd(learningRate);\n *\n * // Train the model.\n * for (let i = 0; i < 10; i++) {\n * optimizer.minimize(() => loss(f(xs), ys));\n * }\n *\n * // Make predictions.\n * console.log(\n * `a: ${a.dataSync()}, b: ${b.dataSync()}, c: ${c.dataSync()}`);\n * const preds = f(xs).dataSync();\n * preds.forEach((pred, i) => {\n * console.log(`x: ${i}, pred: ${pred}`);\n * });\n * ```\n *\n * @param learningRate The learning rate to use for the SGD algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static sgd(learningRate) {\n return new SGDOptimizer(learningRate);\n }\n /**\n * Constructs a `tf.MomentumOptimizer` that uses momentum gradient\n * descent.\n *\n * See\n * [http://proceedings.mlr.press/v28/sutskever13.pdf](\n * http://proceedings.mlr.press/v28/sutskever13.pdf)\n *\n * @param learningRate The learning rate to use for the Momentum gradient\n * descent algorithm.\n * @param momentum The momentum to use for the momentum gradient descent\n * algorithm.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static momentum(learningRate, momentum, useNesterov = false) {\n return new MomentumOptimizer(learningRate, momentum, useNesterov);\n }\n /**\n * Constructs a `tf.RMSPropOptimizer` that uses RMSProp gradient\n * descent. This implementation uses plain momentum and is not centered\n * version of RMSProp.\n *\n * See\n * [http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf](\n * http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)\n *\n * @param learningRate The learning rate to use for the RMSProp gradient\n * descent algorithm.\n * @param decay The discounting factor for the history/coming gradient.\n * @param momentum The momentum to use for the RMSProp gradient descent\n * algorithm.\n * @param epsilon Small value to avoid zero denominator.\n * @param centered If true, gradients are normalized by the estimated\n * variance of the gradient.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static rmsprop(learningRate, decay = .9, momentum = 0.0, epsilon = null, centered = false) {\n return new RMSPropOptimizer(learningRate, decay, momentum, epsilon, centered);\n }\n /**\n * Constructs a `tf.AdamOptimizer` that uses the Adam algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adam gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adam(learningRate = 0.001, beta1 = 0.9, beta2 = 0.999, epsilon = null) {\n return new AdamOptimizer(learningRate, beta1, beta2, epsilon);\n }\n /**\n * Constructs a `tf.AdadeltaOptimizer` that uses the Adadelta algorithm.\n * See [https://arxiv.org/abs/1212.5701](https://arxiv.org/abs/1212.5701)\n *\n * @param learningRate The learning rate to use for the Adadelta gradient\n * descent algorithm.\n * @param rho The learning rate decay over each update.\n * @param epsilon A constant epsilon used to better condition the grad\n * update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adadelta(learningRate = .001, rho = .95, epsilon = null) {\n return new AdadeltaOptimizer(learningRate, rho, epsilon);\n }\n /**\n * Constructs a `tf.AdamaxOptimizer` that uses the Adamax algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adamax gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n * @param decay The learning rate decay over each update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adamax(learningRate = 0.002, beta1 = 0.9, beta2 = 0.999, epsilon = null, decay = 0.0) {\n return new AdamaxOptimizer(learningRate, beta1, beta2, epsilon, decay);\n }\n /**\n * Constructs a `tf.AdagradOptimizer` that uses the Adagrad algorithm.\n * See\n * [http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf](\n * http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)\n * or\n * [http://ruder.io/optimizing-gradient-descent/index.html#adagrad](\n * http://ruder.io/optimizing-gradient-descent/index.html#adagrad)\n *\n * @param learningRate The learning rate to use for the Adagrad gradient\n * descent algorithm.\n * @param initialAccumulatorValue Starting value for the accumulators, must be\n * positive.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adagrad(learningRate, initialAccumulatorValue = 0.1) {\n return new AdagradOptimizer(learningRate, initialAccumulatorValue);\n }\n}\n//# sourceMappingURL=optimizer_constructors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// So typings can propagate.\nimport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nimport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nimport { AdamOptimizer } from './optimizers/adam_optimizer';\nimport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nimport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nimport { OptimizerConstructors } from './optimizers/optimizer_constructors';\nimport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nimport { SGDOptimizer } from './optimizers/sgd_optimizer';\n// tslint:disable-next-line:no-unused-expression\n[MomentumOptimizer, SGDOptimizer, AdadeltaOptimizer, AdagradOptimizer,\n RMSPropOptimizer, AdamaxOptimizer, AdamOptimizer];\nexport const train = {\n sgd: OptimizerConstructors.sgd,\n momentum: OptimizerConstructors.momentum,\n adadelta: OptimizerConstructors.adadelta,\n adagrad: OptimizerConstructors.adagrad,\n rmsprop: OptimizerConstructors.rmsprop,\n adamax: OptimizerConstructors.adamax,\n adam: OptimizerConstructors.adam\n};\n//# sourceMappingURL=train.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst delayCallback = (() => {\n if (typeof requestAnimationFrame !== 'undefined') {\n return requestAnimationFrame;\n }\n else if (typeof setImmediate !== 'undefined') {\n return setImmediate;\n }\n return (f) => f(); // no delays\n})();\n/**\n * Returns a promise that resolve when a requestAnimationFrame has completed.\n *\n * On Node.js this uses setImmediate instead of requestAnimationFrame.\n *\n * This is simply a sugar method so that users can do the following:\n * `await tf.nextFrame();`\n *\n * @doc {heading: 'Performance', subheading: 'Timing'}\n */\nfunction nextFrame() {\n return new Promise(resolve => delayCallback(() => resolve()));\n}\nexport { nextFrame };\n//# sourceMappingURL=browser_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Returns the image center in pixels.\nexport function getImageCenter(center, imageHeight, imageWidth) {\n const centerX = imageWidth * (typeof center === 'number' ? center : center[0]);\n const centerY = imageHeight * (typeof center === 'number' ? center : center[1]);\n return [centerX, centerY];\n}\n//# sourceMappingURL=rotate_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Gets the new shape of the input Tensor after it's been reshaped\n * to:\n * [blockShape[0], ..., blockShape[M-1], batch / prod(blockShape),\n * inputShape[1], ..., inputShape[N-1]]\n *\n * See step 1: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshaped(inputShape, blockShape, prod, batchToSpace = true) {\n let reshaped = [];\n if (batchToSpace) {\n reshaped = reshaped.concat(blockShape.slice(0));\n reshaped.push(inputShape[0] / prod);\n reshaped = reshaped.concat(inputShape.slice(1));\n }\n else {\n reshaped = reshaped.concat(inputShape[0]);\n const spatialLength = blockShape.length;\n for (let i = 0; i < spatialLength; ++i) {\n reshaped =\n reshaped.concat([inputShape[i + 1] / blockShape[i], blockShape[i]]);\n }\n reshaped = reshaped.concat(inputShape.slice(spatialLength + 1));\n }\n return reshaped;\n}\n/**\n * Gets the permutation that will transpose the dimensions of the\n * reshaped tensor to shape:\n *\n * [batch / prod(block_shape),inputShape[1], blockShape[0], ...,\n * inputShape[M], blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * see step 2: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getPermuted(reshapedRank, blockShapeRank, batchToSpace = true) {\n const permuted = [];\n if (batchToSpace) {\n permuted.push(blockShapeRank);\n for (let i = blockShapeRank + 1; i < reshapedRank; ++i) {\n if (i <= 2 * blockShapeRank) {\n permuted.push(i);\n permuted.push(i - (blockShapeRank + 1));\n }\n else {\n permuted.push(i);\n }\n }\n }\n else {\n const permutedBeforeBatch = [];\n const permutedAfterBatch = [];\n for (let i = 1; i < reshapedRank; ++i) {\n if (i >= blockShapeRank * 2 + 1 || i % 2 === 1) {\n permutedAfterBatch.push(i);\n }\n else {\n permutedBeforeBatch.push(i);\n }\n }\n permuted.push(...permutedBeforeBatch);\n permuted.push(0);\n permuted.push(...permutedAfterBatch);\n }\n return permuted;\n}\n/**\n * Gets the shape of the reshaped and permuted input Tensor before any cropping\n * is applied. The new shape will be:\n *\n * [batch / prod(blockShape),inputShape[1] * blockShape[0], ...,\n * inputShape[M] * blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 3: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshapedPermuted(inputShape, blockShape, prod, batchToSpace = true) {\n const reshapedPermuted = [];\n if (batchToSpace) {\n reshapedPermuted.push(inputShape[0] / prod);\n }\n else {\n reshapedPermuted.push(inputShape[0] * prod);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n if (i <= blockShape.length) {\n if (batchToSpace) {\n reshapedPermuted.push(blockShape[i - 1] * inputShape[i]);\n }\n else {\n reshapedPermuted.push(inputShape[i] / blockShape[i - 1]);\n }\n }\n else {\n reshapedPermuted.push(inputShape[i]);\n }\n }\n return reshapedPermuted;\n}\n/**\n * Converts the crops argument into the beginning coordinates of a slice\n * operation.\n */\nexport function getSliceBeginCoords(crops, blockShape) {\n const sliceBeginCoords = [0];\n for (let i = 0; i < blockShape; ++i) {\n sliceBeginCoords.push(crops[i][0]);\n }\n return sliceBeginCoords;\n}\n/**\n * Converts the crops argument into the size of a slice operation. When\n * combined with getSliceBeginCoords this function allows the reshaped and\n * permuted Tensor to be cropped to its final output shape of:\n *\n * inputShape[1] * blockShape[0] - crops[0,0] - crops[0,1], ...,\n * inputShape[M] * blockShape[M-1] -crops[M-1,0] -\n * crops[M-1,1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 4: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getSliceSize(uncroppedShape, crops, blockShape) {\n const sliceSize = uncroppedShape.slice(0, 1);\n for (let i = 0; i < blockShape; ++i) {\n sliceSize.push(uncroppedShape[i + 1] - crops[i][0] - crops[i][1]);\n }\n return sliceSize;\n}\n//# sourceMappingURL=array_ops_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const SELU_SCALEALPHA = 1.7580993408473768599402175208123;\nexport const SELU_SCALE = 1.0507009873554804934193349852946;\n//# sourceMappingURL=selu_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const ERF_P = 0.3275911;\nexport const ERF_A1 = 0.254829592;\nexport const ERF_A2 = -0.284496736;\nexport const ERF_A3 = 1.421413741;\nexport const ERF_A4 = -1.453152027;\nexport const ERF_A5 = 1.061405429;\n//# sourceMappingURL=erf_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from './environment';\nexport function warn(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.warn(...msg);\n }\n}\nexport function log(...msg) {\n if (!env().getBool('IS_TEST')) {\n console.log(...msg);\n }\n}\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * Merges real and imaginary Float32Arrays into a single complex Float32Array.\n *\n * The memory layout is interleaved as follows:\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n * complex: [r0, i0, r1, i1, r2, i2]\n *\n * This is the inverse of splitRealAndImagArrays.\n *\n * @param real The real values of the complex tensor values.\n * @param imag The imag values of the complex tensor values.\n * @returns A complex tensor as a Float32Array with merged values.\n */\nexport function mergeRealAndImagArrays(real, imag) {\n if (real.length !== imag.length) {\n throw new Error(`Cannot merge real and imag arrays of different lengths. real:` +\n `${real.length}, imag: ${imag.length}.`);\n }\n const result = new Float32Array(real.length * 2);\n for (let i = 0; i < result.length; i += 2) {\n result[i] = real[i / 2];\n result[i + 1] = imag[i / 2];\n }\n return result;\n}\n/**\n * Splits a complex Float32Array into real and imag parts.\n *\n * The memory layout is interleaved as follows:\n * complex: [r0, i0, r1, i1, r2, i2]\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n *\n * This is the inverse of mergeRealAndImagArrays.\n *\n * @param complex The complex tensor values.\n * @returns An object with real and imag Float32Array components of the complex\n * tensor.\n */\nexport function splitRealAndImagArrays(complex) {\n const real = new Float32Array(complex.length / 2);\n const imag = new Float32Array(complex.length / 2);\n for (let i = 0; i < complex.length; i += 2) {\n real[i / 2] = complex[i];\n imag[i / 2] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts even indexed complex values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithEvenIndex(complex) {\n const len = Math.ceil(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 0; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Extracts odd indexed comple values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithOddIndex(complex) {\n const len = Math.floor(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 2; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return { real, imag };\n}\n/**\n * Get the map representing a complex value in the given array.\n * @param complex The complex tensor values.\n * @param index An index of the target complex value.\n */\nexport function getComplexWithIndex(complex, index) {\n const real = complex[index * 2];\n const imag = complex[index * 2 + 1];\n return { real, imag };\n}\n/**\n * Insert a given complex value into the TypedArray.\n * @param data The array in which the complex value is inserted.\n * @param c The complex value to be inserted.\n * @param index An index of the target complex value.\n */\nexport function assignToTypedArray(data, real, imag, index) {\n data[index * 2] = real;\n data[index * 2 + 1] = imag;\n}\n/**\n * Make the list of exponent terms used by FFT.\n */\nexport function exponents(n, inverse) {\n const real = new Float32Array(n / 2);\n const imag = new Float32Array(n / 2);\n for (let i = 0; i < Math.ceil(n / 2); i++) {\n const x = (inverse ? 2 : -2) * Math.PI * (i / n);\n real[i] = Math.cos(x);\n imag[i] = Math.sin(x);\n }\n return { real, imag };\n}\n/**\n * Make the exponent term used by FFT.\n */\nexport function exponent(k, n, inverse) {\n const x = (inverse ? 2 : -2) * Math.PI * (k / n);\n const real = Math.cos(x);\n const imag = Math.sin(x);\n return { real, imag };\n}\n//# sourceMappingURL=complex_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { cast } from '../ops/cast';\nimport { scalar } from '../ops/scalar';\nimport { tensor1d } from '../ops/tensor1d';\nimport { zeros } from '../ops/zeros';\nimport { hasEncodingLoss, makeZerosTypedArray } from '../util';\n// Utilities needed by backend consumers of tf-core.\nexport * from '../ops/axis_util';\nexport * from '../ops/broadcast_util';\nexport * from '../ops/concat_util';\nexport * from '../ops/conv_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/reduce_util';\nimport * as slice_util from '../ops/slice_util';\nexport { slice_util };\nexport { upcastType } from '../types';\nexport * from '../ops/rotate_util';\nexport * from '../ops/array_ops_util';\nexport * from '../ops/gather_nd_util';\nexport * from '../ops/scatter_nd_util';\nexport * from '../ops/selu_util';\nexport * from '../ops/fused_util';\nexport * from '../ops/erf_util';\nexport * from '../log';\nexport * from '../backends/complex_util';\nexport * from '../ops/split_util';\nimport * as segment_util from '../ops/segment_util';\nexport { segment_util };\nexport function castTensor(x, dtype, backend) {\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return x.clone();\n }\n const zerosTensor = zeros(x.shape);\n const floatX = cast(x, 'float32');\n const result = backend.complex(floatX, zerosTensor);\n zerosTensor.dispose();\n floatX.dispose();\n return result;\n }\n if (!hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n return ENGINE.makeTensorFromDataId(x.dataId, x.shape, dtype);\n }\n if (x.dtype === 'complex64') {\n const real = backend.real(x);\n const result = cast(real, dtype);\n real.dispose();\n return result;\n }\n if (dtype === 'int32') {\n return backend.int(x);\n }\n else if (dtype === 'bool') {\n const zero = scalar(0, x.dtype);\n const result = backend.notEqual(x, zero);\n zero.dispose();\n return result;\n }\n else {\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n }\n}\nexport function reshapeTensor(x, shape) {\n return ENGINE.makeTensorFromDataId(x.dataId, shape, x.dtype);\n}\nexport function linspaceImpl(start, stop, num) {\n const step = (stop - start) / (num - 1);\n const values = makeZerosTypedArray(num, 'float32');\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n return tensor1d(values, 'float32');\n}\n//# sourceMappingURL=backend_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { slice } from '../ops/slice';\n// TODO(annxingyuan): Use this helper in WASM Split kernel once intermediate\n// kernels have been modularized in WebGL and CPU\n// https://github.com/tensorflow/tfjs/issues/2822.\n/** Shared implementation of the split kernel across WebGL and CPU. */\nexport function split(x, sizeSplits, axis) {\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n return sizeSplits.map(s => {\n const sliceSize = [...size];\n sliceSize[axis] = s;\n const sliceT = slice(x, begin, sliceSize);\n begin[axis] += s;\n return sliceT;\n });\n}\n//# sourceMappingURL=split_shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * An implementation of the tile kernel shared between webgl and cpu for string\n * tensors only.\n */\nimport { buffer } from '../ops/buffer';\nexport function tile(xBuf, reps) {\n const newShape = new Array(xBuf.rank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xBuf.shape[i] * reps[i];\n }\n const result = buffer(newShape, xBuf.dtype);\n for (let i = 0; i < result.values.length; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = new Array(xBuf.rank);\n for (let j = 0; j < originalLoc.length; j++) {\n originalLoc[j] = newLoc[j] % xBuf.shape[j];\n }\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n}\n//# sourceMappingURL=tile_impl.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/** An implementation of the TopK kernel shared between webgl and cpu. */\nimport { tensor } from '../ops/tensor';\nimport { getTypedArrayFromDType } from '../util';\nexport function topkImpl(x, xShape, xDtype, k, sorted) {\n // Reshape into a 2d tensor [batch, lastDim] and compute topk along lastDim.\n const lastDim = xShape[xShape.length - 1];\n const [batch, size] = [x.length / lastDim, lastDim];\n const allTopKVals = getTypedArrayFromDType(xDtype, batch * k);\n const allTopKIndices = getTypedArrayFromDType('int32', batch * k);\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = x.subarray(offset, offset + size);\n const valAndInd = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({ value: vals[i], index: i });\n }\n valAndInd.sort((a, b) => b.value - a.value);\n const outOffset = b * k;\n const topKVals = allTopKVals.subarray(outOffset, outOffset + k);\n const topKIndices = allTopKIndices.subarray(outOffset, outOffset + k);\n for (let i = 0; i < k; i++) {\n topKVals[i] = valAndInd[i].value;\n topKIndices[i] = valAndInd[i].index;\n }\n }\n // Reshape back to the original input shape, except that the last\n // dimension is k.\n const outputShape = xShape.slice();\n outputShape[outputShape.length - 1] = k;\n return [\n tensor(allTopKVals, outputShape, xDtype),\n tensor(allTopKIndices, outputShape, 'int32')\n ];\n}\n//# sourceMappingURL=topk_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { nonMaxSuppressionV3Impl, nonMaxSuppressionV4Impl, nonMaxSuppressionV5Impl } from './non_max_suppression_impl';\nexport { split } from './split_shared';\nexport { tile } from './tile_impl';\nexport { topkImpl } from './topk_impl';\nexport { whereImpl } from './where_impl';\n//# sourceMappingURL=kernel_impls.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is tfjs-core without auto registration of things like flags,\n// gradients, chained ops or the opHandler. See base_side_effects.ts for parts\n// tfjs core that are required side effects.\n/**\n * @fileoverview\n * @suppress {partialAlias} Optimization disabled due to passing the module\n * object into a function below:\n *\n * import * as ops from './ops/ops';\n * setOpHandler(ops);\n */\n// Serialization.\nimport * as io from './io/io';\nimport * as math from './math';\nimport * as browser from './ops/browser';\nimport * as gather_util from './ops/gather_nd_util';\nimport * as scatter_util from './ops/scatter_nd_util';\nimport * as slice_util from './ops/slice_util';\nimport * as serialization from './serialization';\nimport * as tensor_util from './tensor_util';\nimport * as test_util from './test_util';\nimport * as util from './util';\nimport { version } from './version';\n// Optimizers.\nexport { AdadeltaOptimizer } from './optimizers/adadelta_optimizer';\nexport { AdagradOptimizer } from './optimizers/adagrad_optimizer';\nexport { AdamOptimizer } from './optimizers/adam_optimizer';\nexport { AdamaxOptimizer } from './optimizers/adamax_optimizer';\nexport { MomentumOptimizer } from './optimizers/momentum_optimizer';\nexport { Optimizer } from './optimizers/optimizer';\nexport { RMSPropOptimizer } from './optimizers/rmsprop_optimizer';\nexport { SGDOptimizer } from './optimizers/sgd_optimizer';\nexport { Tensor, TensorBuffer, Variable } from './tensor';\nexport { Rank, sumOutType, upcastType } from './types';\nexport * from './ops/ops';\nexport { Reduction } from './ops/loss_ops_utils';\nexport * from './train';\nexport * from './globals';\nexport * from './kernel_registry';\nexport { customGrad, grad, grads, valueAndGrad, valueAndGrads, variableGrads } from './gradients';\nexport { Environment, env, ENV } from './environment';\nexport { version as version_core };\n// Top-level method exports.\nexport { nextFrame } from './browser_util';\n// Second level exports.\nimport * as backend_util from './backends/backend_util';\nimport * as device_util from './device_util';\nexport { browser, io, math, serialization, test_util, util, backend_util, tensor_util, slice_util, gather_util, scatter_util, device_util };\nimport * as kernel_impls from './backends/kernel_impls';\nexport { kernel_impls };\n// Backend specific.\nexport { KernelBackend, DataStorage } from './backends/backend';\n// Export all kernel names / info.\nexport * from './kernel_names';\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const absGradConfig = {\n kernelName: Abs,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, step(cast(x, 'float32'), -1)) };\n }\n};\n//# sourceMappingURL=Abs_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acosGradConfig = {\n kernelName: Acos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = square(cast(x, 'float32'));\n const b = sqrt(sub(scalar(1), a));\n return neg(div(dy, b));\n }\n };\n }\n};\n//# sourceMappingURL=Acos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const acoshGradConfig = {\n kernelName: Acosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(sub(square(cast(x, 'float32')), 1));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Acosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const addGradConfig = {\n kernelName: Add,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Add_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AddN } from '../kernel_names';\nexport const addNGradConfig = {\n kernelName: AddN,\n saveAllInputs: true,\n gradFunc: (dy, saved) => {\n const ders = {};\n saved.forEach((_, i) => {\n ders[i] = () => dy.clone();\n });\n return ders;\n }\n};\n//# sourceMappingURL=AddN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMax } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMaxGradConfig = {\n kernelName: ArgMax,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ArgMin } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const argMinGradConfig = {\n kernelName: ArgMin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => zerosLike(x) };\n }\n};\n//# sourceMappingURL=ArgMin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const asinGradConfig = {\n kernelName: Asin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sqrt(sub(scalar(1), square(cast(x, 'float32'))))) };\n }\n};\n//# sourceMappingURL=Asin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { scalar } from '../ops/scalar';\nimport { sqrt } from '../ops/sqrt';\nimport { square } from '../ops/square';\nexport const asinhGradConfig = {\n kernelName: Asinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const a = sqrt(add(scalar(1), square(cast(x, 'float32'))));\n return div(dy, a);\n }\n };\n }\n};\n//# sourceMappingURL=Asinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const atan2GradConfig = {\n kernelName: Atan2,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const d = add(square(a), square(b));\n let res = mul(dy, div(b, d));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n const d = add(square(a), square(b));\n let res = neg(mul(dy, div(a, d)));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Atan2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const atanGradConfig = {\n kernelName: Atan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(square(cast(x, 'float32')), 1)) };\n }\n};\n//# sourceMappingURL=Atan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nimport { scalar } from '../ops/scalar';\nexport const atanhGradConfig = {\n kernelName: Atanh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, sub(scalar(1), square(cast(x, 'float32')))) };\n }\n};\n//# sourceMappingURL=Atanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d avg pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank4 of shape\n * [batchSize, depth, height, width, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0. The dilation\n * rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction avgPool3dBackprop_(dy, input, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'avgPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'avgPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in avgPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in avgPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.avgPool3dBackprop(dy5D, input5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, AvgPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const avgPool3dBackprop = op({ avgPool3dBackprop_ });\n//# sourceMappingURL=avg_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool3D } from '../kernel_names';\nimport { avgPool3dBackprop } from '../ops/avg_pool_3d_backprop';\nexport const avgPool3DGradConfig = {\n kernelName: AvgPool3D,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => avgPool3dBackprop(dy, x, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=AvgPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { AvgPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of an 2D avg pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The input image, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction avgPoolBackprop_(dy, input, filterSize, strides, pad) {\n const $dy = convertToTensor(dy, 'dy', 'avgPoolBackprop');\n const $input = convertToTensor(input, 'input', 'avgPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`);\n let input4D = $input;\n let dy4D = $dy;\n let reshapedTo4D = false;\n if ($input.rank === 3) {\n reshapedTo4D = true;\n input4D =\n reshape($input, [1, $input.shape[0], $input.shape[1], $input.shape[2]]);\n dy4D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2]]);\n }\n util.assert(dy4D.rank === 4, () => `Error in avgPoolBackprop: dy must be rank 4 but got rank ` +\n `${dy4D.rank}.`);\n util.assert(input4D.rank === 4, () => `Error in avgPoolBackprop: input must be rank 4 but got rank ` +\n `${input4D.rank}.`);\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo(input4D.shape, filterSize, strides, 1 /* dilations */, pad);\n return backend.avgPoolBackprop(dy4D, input4D, convInfo);\n };\n const inputs = { dy: dy4D, input: input4D };\n const attrs = { filterSize, strides, pad };\n const res = ENGINE.runKernelFunc(forward, inputs, null, AvgPoolBackprop, attrs);\n if (reshapedTo4D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3]]);\n }\n return res;\n}\nexport const avgPoolBackprop = op({ avgPoolBackprop_ });\n//# sourceMappingURL=avg_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool } from '../kernel_names';\nimport { avgPoolBackprop } from '../ops/avg_pool_backprop';\nexport const avgPoolGradConfig = {\n kernelName: AvgPool,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => avgPoolBackprop(dy, x, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=AvgPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul } from '../kernel_names';\nimport { matMul } from '../ops/mat_mul';\nexport const batchMatMulGradConfig = {\n kernelName: BatchMatMul,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved, attrs) => {\n const [a, b] = saved;\n const { transposeA, transposeB } = attrs;\n if (!transposeA && !transposeB) {\n return {\n a: () => matMul(dy, b, false, true),\n b: () => matMul(a, dy, true, false)\n };\n }\n else if (!transposeA && transposeB) {\n return {\n a: () => matMul(dy, b, false, false),\n b: () => matMul(dy, a, true, false)\n };\n }\n else if (transposeA && !transposeB) {\n return {\n a: () => matMul(b, dy, false, true),\n b: () => matMul(a, dy, false, false)\n };\n }\n else {\n return {\n a: () => matMul(b, dy, true, true),\n b: () => matMul(dy, a, true, true)\n };\n }\n }\n};\n//# sourceMappingURL=BatchMatMul_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchToSpaceND } from '../kernel_names';\nimport { spaceToBatchND } from '../ops/space_to_batch_nd';\nexport const batchToSpaceNDGradConfig = {\n kernelName: BatchToSpaceND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, crops } = attrs;\n return { x: () => spaceToBatchND(dy, blockShape, crops) };\n }\n};\n//# sourceMappingURL=BatchToSpaceND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BroadcastTo } from '../kernel_names';\nimport { sum } from '../ops/sum';\nexport const broadcastToGradConfig = {\n kernelName: BroadcastTo,\n gradFunc: (dy, saved, attrs) => {\n const broadCastToAttrs = attrs;\n const inputShape = broadCastToAttrs.inputShape;\n const outputShape = broadCastToAttrs.shape;\n const reps = Array.from(outputShape);\n for (let i = inputShape.length - 1; i >= 0; i--) {\n if (inputShape[i] === outputShape[i]) {\n reps[i] = 1;\n }\n else if (inputShape[i] !== 1) {\n throw new Error(`broadcastTo(): [${inputShape}] cannot be broadcast to [${outputShape}].`);\n }\n }\n const axes = [];\n for (let i = 0; i < reps.length; i++) {\n if (reps[i] > 1) {\n axes.push(i);\n }\n }\n return { x: () => sum(dy, axes, true /* keepDims */) };\n }\n};\n//# sourceMappingURL=BroadcastTo_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cast } from '../kernel_names';\nexport const castGradConfig = {\n kernelName: Cast,\n gradFunc: (dy) => {\n return { x: () => dy.clone() };\n }\n};\n//# sourceMappingURL=Cast_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const ceilGradConfig = {\n kernelName: Ceil,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Ceil_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '../kernel_names';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { lessEqual } from '../ops/less_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const clipByValueGradConfig = {\n kernelName: ClipByValue,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { clipValueMin, clipValueMax } = attrs;\n return {\n x: () => where(logicalAnd(greaterEqual(x, clipValueMin), lessEqual(x, clipValueMax)), dy, zerosLike(dy)),\n };\n }\n};\n//# sourceMappingURL=ClipByValue_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Concat } from '../kernel_names';\nimport { split } from '../ops/split';\nimport { parseAxisParam } from '../util';\nexport const concatGradConfig = {\n kernelName: Concat,\n saveAllInputs: true,\n gradFunc: (dy, saved, attrs) => {\n const shapes = saved.map(t => t.shape);\n const { axis } = attrs;\n const $axis = parseAxisParam(axis, saved[0].shape)[0];\n const sizeSplits = shapes.map(s => s[$axis]);\n const derTensors = split(dy, sizeSplits, $axis);\n return derTensors.map(t => () => t);\n }\n};\n//# sourceMappingURL=Concat_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2D } from '../kernel_names';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nimport { conv2DBackpropInput } from '../ops/conv2d_backprop_input';\nimport * as conv_util from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv2DGradConfig = {\n kernelName: Conv2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x4D, $filter] = saved;\n const { dilations, strides, pad, dataFormat } = attrs;\n util.assert(conv_util.tupleValuesAreOne(dilations), () => 'Error in gradient of conv2D: dilation rates greater than 1 ' +\n `are not yet supported in gradients. Got dilations '${dilations}'`);\n return {\n x: () => conv2DBackpropInput(x4D.shape, dy, $filter, strides, pad, dataFormat),\n filter: () => conv2DBackpropFilter(x4D, dy, $filter.shape, strides, pad, dataFormat)\n };\n }\n};\n//# sourceMappingURL=Conv2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv2DBackpropInput } from '../kernel_names';\nimport { conv2d } from '../ops/conv2d';\nimport { conv2DBackpropFilter } from '../ops/conv2d_backprop_filter';\nexport const conv2DBackpropInputGradConfig = {\n kernelName: Conv2DBackpropInput,\n inputsToSave: ['dy', 'filter'],\n gradFunc: (ddx, saved, attrs) => {\n const [dy, filter] = saved;\n const { strides, pad, dataFormat, dimRoundingMode } = attrs;\n return {\n dy: () => conv2d(ddx, filter, strides, pad, dataFormat, 1 /* dilations */, dimRoundingMode),\n filter: () => conv2DBackpropFilter(ddx, dy, filter.shape, strides, pad, dataFormat, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=Conv2DBackpropInput_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Conv3DBackpropFilterV2 } from '../kernel_names';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the derivative of the filter of a 3D convolution.\n *\n * @param x The input tensor, of rank 5 or rank 4 of shape\n * [batch, depth, height, width, inChannels]. If rank 4, batch of 1 is\n * assumed.\n * @param dy The dy image, of rank 5 or rank 4, of shape\n * [batch, depth, height, width, outDepth]. If rank 4, batch of 1 is\n * assumed.\n * @param filterShape The shape of the filter, length 5,\n * [filterDepth, filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideDepth, strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction conv3DBackpropFilter_(x, dy, filterShape, strides, pad) {\n let x5D = x;\n if (x.rank === 4) {\n x5D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2], x.shape[3]]);\n }\n let dy5D = dy;\n if (dy5D.rank === 4) {\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n }\n util.assert(x5D.rank === 5, () => `Error in conv3dDerFilter: input must be rank 5, but got shape ` +\n `${x5D.shape}.`);\n util.assert(dy5D.rank === 5, () => `Error in conv3dDerFilter: dy must be rank 5, but got shape ` +\n `${dy5D.shape}.`);\n util.assert(filterShape.length === 5, () => `Error in conv3dDerFilter: filterShape must be length 5, but got ` +\n `${filterShape}.`);\n util.assert(x5D.shape[4] === filterShape[3], () => `Error in conv3dDerFilter: depth of input ${x5D.shape[4]}) must ` +\n `match input depth in filter (${filterShape[3]}.`);\n util.assert(dy5D.shape[4] === filterShape[4], () => `Error in conv3dDerFilter: depth of dy (${dy5D.shape[4]}) must ` +\n `match output depth for filter (${filterShape[4]}).`);\n const forward = backend => {\n const dilations = 1;\n const convInfo = conv_util.computeConv3DInfo(x5D.shape, filterShape, strides, dilations, pad);\n return backend.conv3dDerFilter(x5D, dy5D, convInfo);\n };\n const inputs = { x: x5D, dy: dy5D };\n const attrs = { strides, pad, filterShape };\n return ENGINE.runKernelFunc(forward, inputs, null, Conv3DBackpropFilterV2, attrs);\n}\nexport const conv3DBackpropFilter = op({ conv3DBackpropFilter_ });\n//# sourceMappingURL=conv3d_backprop_filter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Conv3D } from '../kernel_names';\nimport { conv3DBackpropFilter } from '../ops/conv3d_backprop_filter';\nimport { conv3DBackpropInput } from '../ops/conv3d_backprop_input';\nimport { tupleValuesAreOne } from '../ops/conv_util';\nimport * as util from '../util';\nexport const conv3DGradConfig = {\n kernelName: Conv3D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad } = attrs;\n util.assert(tupleValuesAreOne(dilations), () => 'Error in gradient of conv3D: dilation rates greater than 1 are ' +\n `not yet supported in gradients. Got dilations '${dilations}'`);\n const [x5D, $filter] = saved;\n return {\n x: () => conv3DBackpropInput(x5D.shape, dy, $filter, strides, pad),\n filter: () => conv3DBackpropFilter(x5D, dy, $filter.shape, strides, pad)\n };\n }\n};\n//# sourceMappingURL=Conv3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { sin } from '../ops/sin';\nexport const cosGradConfig = {\n kernelName: Cos,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(neg(sin(cast(x, 'float32'))), dy) };\n }\n};\n//# sourceMappingURL=Cos_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { sinh } from '../ops/sinh';\nexport const coshGradConfig = {\n kernelName: Cosh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(sinh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Cosh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cumsum } from '../kernel_names';\nimport { getAxesPermutation } from '../ops/axis_util';\nimport { cumsum } from '../ops/cumsum';\nimport { transpose } from '../ops/transpose';\nexport const cumsumGradConfig = {\n kernelName: Cumsum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { axis, exclusive, reverse } = attrs;\n return {\n x: () => {\n const permutation = getAxesPermutation([axis], x.rank);\n let out = cumsum(dy, axis, exclusive, !reverse);\n if (permutation != null) {\n out = transpose(out, permutation);\n }\n return out;\n }\n };\n }\n};\n//# sourceMappingURL=Cumsum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { DepthwiseConv2dNative } from '../kernel_names';\nimport * as conv_util from '../ops/conv_util';\nimport { depthwiseConv2dNativeBackpropFilter } from '../ops/depthwise_conv2d_native_backprop_filter';\nimport { depthwiseConv2dNativeBackpropInput } from '../ops/depthwise_conv2d_native_backprop_input';\nimport * as util from '../util';\nexport const depthwiseConv2dNativeGradConfig = {\n kernelName: DepthwiseConv2dNative,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const { dilations, strides, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1] : dilations;\n util.assert(conv_util.tupleValuesAreOne($dilations), () => 'Error in gradient of depthwiseConv2dNative: dilation rates ' +\n `greater than 1 are not yet supported. Got dilations ` +\n `'${$dilations}'`);\n const [x, filter] = saved;\n util.assert(x.rank === 4, () => `Error in gradient of depthwiseConv2dNative: input must be ` +\n `rank 4, but got rank ${x.rank}.`);\n util.assert(filter.rank === 4, () => `Error in gradient of depthwiseConv2dNative: filter must be ` +\n `rank 4, but got rank ${filter.rank}.`);\n util.assert(x.shape[3] === filter.shape[2], () => `Error in gradient of depthwiseConv2d: number of input ` +\n `channels (${x.shape[3]}) must match the inChannels dimension ` +\n `in filter ${filter.shape[2]}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in gradient of depthwiseConv2d: Either strides or ' +\n `dilations must be 1. Got strides ${strides} and dilations ` +\n `'${$dilations}'.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n return {\n x: () => depthwiseConv2dNativeBackpropInput(x.shape, dy, filter, strides, pad, dilations, dimRoundingMode),\n filter: () => depthwiseConv2dNativeBackpropFilter(x, dy, filter.shape, strides, pad, dilations, dimRoundingMode),\n };\n }\n};\n//# sourceMappingURL=DepthwiseConv2dNative_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Dilation2D, Dilation2DBackpropFilter, Dilation2DBackpropInput } from '../kernel_names';\nexport const dilation2dGradConfig = {\n kernelName: Dilation2D,\n inputsToSave: ['x', 'filter'],\n gradFunc: (dy, saved, attrs) => {\n const [x, filter] = saved;\n const inputInputs = { x, filter, dy };\n const filterInputs = { x, filter, dy };\n return {\n x: () => ENGINE.runKernel(Dilation2DBackpropInput, inputInputs, attrs),\n filter: () => ENGINE.runKernel(Dilation2DBackpropFilter, filterInputs, attrs)\n };\n }\n};\n//# sourceMappingURL=Dilation2D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const divGradConfig = {\n kernelName: Div,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Div_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { Elu, EluGrad } from '../kernel_names';\nexport const eluGradConfig = {\n kernelName: Elu,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n const backPropKernelFunc = (backend) => {\n return backend.eluDer(dy, y);\n };\n const inputs = { dy, y };\n return {\n x: () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* grad */, EluGrad)\n };\n }\n};\n//# sourceMappingURL=Elu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Erf } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const erfGradConfig = {\n kernelName: Erf,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const a = mul(exp(neg(square(x))), 2 / Math.sqrt(Math.PI));\n return { x: () => mul(dy, a) };\n }\n};\n//# sourceMappingURL=Erf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '../kernel_names';\nimport { mul } from '../ops/mul';\nexport const expGradConfig = {\n kernelName: Exp,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, y) };\n }\n};\n//# sourceMappingURL=Exp_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nexport const expm1GradConfig = {\n kernelName: Expm1,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, exp(x)) };\n }\n};\n//# sourceMappingURL=Expm1_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const floorGradConfig = {\n kernelName: Floor,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Floor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FloorDiv } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { square } from '../ops/square';\nimport { sum } from '../ops/sum';\nexport const floorDivGradConfig = {\n kernelName: FloorDiv,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = div(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = reshape(sum(res, reduceAxes), b.shape);\n }\n const tmp = square(b);\n return neg(div(res, cast(tmp, 'float32')));\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=FloorDiv_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { rsqrt } from '../ops/rsqrt';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { tile } from '../ops/tile';\nexport const fusedBatchNormGradConfig = {\n kernelName: FusedBatchNorm,\n inputsToSave: ['x', 'mean', 'variance', 'scale'],\n gradFunc: (dy, saved, attrs) => {\n const { varianceEpsilon } = attrs;\n const [x, mean, variance, scale] = saved;\n const scaleValue = scale == null ? scalar(1) : scale;\n const reductionAxes = getReductionAxes(mean.shape, x.shape);\n const tileShape = [];\n if (mean.rank === 1) {\n for (let i = 0; i < x.shape.length - 1; ++i) {\n tileShape.push(x.shape[i]);\n }\n tileShape.push(1);\n }\n const xMinusMean = sub(x, mean);\n const dyTimesScaleValue = mul(dy, scaleValue);\n const oneOverSqrtVariance = rsqrt(add(variance, scalar(varianceEpsilon)));\n const minusHalfRCube = mul(mul(mul(oneOverSqrtVariance, oneOverSqrtVariance), oneOverSqrtVariance), scalar(-0.5));\n const derX = () => {\n if (mean.rank === 1) {\n return reshape(mul(mul(dy, tile(reshape(oneOverSqrtVariance, [1, 1, 1, mean.shape[0]]), tileShape)), scaleValue), x.shape);\n }\n else {\n return reshape(mul(mul(dy, oneOverSqrtVariance), scaleValue), x.shape);\n }\n };\n const derMean = () => {\n let meanDer = mul(mul(oneOverSqrtVariance, scalar(-1)), dyTimesScaleValue);\n if (mean.rank === 1) {\n meanDer = sum(meanDer, reductionAxes);\n }\n return reshape(meanDer, mean.shape);\n };\n const derVariance = () => {\n let varianceDer = mul(mul(minusHalfRCube, xMinusMean), dyTimesScaleValue);\n if (mean.rank === 1) {\n varianceDer = sum(varianceDer, reductionAxes);\n }\n return reshape(varianceDer, mean.shape);\n };\n const derScale = () => {\n const xMinusMean2TimesRsqrt = mul(xMinusMean, oneOverSqrtVariance);\n let scaleDer = mul(dy, xMinusMean2TimesRsqrt);\n if (mean.rank === 1) {\n scaleDer = sum(scaleDer, reductionAxes);\n }\n return reshape(scaleDer, mean.shape);\n };\n const derOffset = () => {\n let offsetDer = dy;\n if (mean.rank === 1) {\n offsetDer = sum(offsetDer, reductionAxes);\n }\n return reshape(offsetDer, mean.shape);\n };\n return {\n x: derX,\n mean: derMean,\n variance: derVariance,\n scale: derScale,\n offset: derOffset\n };\n }\n};\n//# sourceMappingURL=FusedBatchNorm_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GatherV2 } from '../kernel_names';\nimport { getUndoAxesPermutation } from '../ops/axis_util';\nimport { reshape } from '../ops/reshape';\nimport { transpose } from '../ops/transpose';\nimport { unsortedSegmentSum } from '../ops/unsorted_segment_sum';\nimport { parseAxisParam } from '../util';\nexport const gatherGradConfig = {\n kernelName: GatherV2,\n inputsToSave: ['x', 'indices'],\n gradFunc: (dy, saved, attrs) => {\n const [x, indices] = saved;\n const { axis } = attrs;\n const parsedAxis = parseAxisParam(axis, x.shape)[0];\n const derX = () => {\n const paramsShape = x.shape;\n const indicesSize = indices.size;\n const outerShape = paramsShape.slice(0, parsedAxis);\n const outerDims = outerShape.length;\n const innerShape = paramsShape.slice(axis, paramsShape.length).slice(1);\n const innerDims = innerShape.length;\n const outerAxesIndices = arrayRange(0, outerDims);\n const innerAxesIndices = arrayRange(outerDims + 1, outerDims + 1 + innerDims);\n const valuesShape = arrayConcat([outerShape, [indicesSize], innerShape]);\n const values = reshape(dy, valuesShape);\n const reshapedIndices = reshape(indices, [indicesSize]);\n const transposeDims = arrayConcat([[outerDims], outerAxesIndices, innerAxesIndices]);\n const valuesTranspose = transpose(values, transposeDims);\n let paramsGrad = unsortedSegmentSum(valuesTranspose, reshapedIndices, x.shape[parsedAxis]);\n const invertTransposeDims = getUndoAxesPermutation(transposeDims);\n paramsGrad = transpose(paramsGrad, invertTransposeDims);\n return paramsGrad;\n };\n return { x: derX, indices: () => indices };\n }\n};\nfunction arrayRange(start, stop) {\n const result = [];\n for (let i = start; i < stop; ++i) {\n result.push(i);\n }\n return result;\n}\nfunction arrayConcat(arrays) {\n const result = [];\n for (let i = 0; i < arrays.length; ++i) {\n for (let j = 0; j < arrays[i].length; ++j) {\n result.push(arrays[i][j]);\n }\n }\n return result;\n}\n//# sourceMappingURL=GatherV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { GreaterEqual } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const greaterEqualGradConfig = {\n kernelName: GreaterEqual,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n return { a: () => zerosLike(a), b: () => zerosLike(b) };\n }\n};\n//# sourceMappingURL=GreaterEqual_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '../kernel_names';\nimport { cast } from '../ops/cast';\nexport const identityGradConfig = {\n kernelName: Identity,\n gradFunc: (dy) => {\n return { x: () => cast(dy, 'float32') };\n }\n};\n//# sourceMappingURL=Identity_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isFiniteGradConfig = {\n kernelName: IsFinite,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsFinite_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isInfGradConfig = {\n kernelName: IsInf,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsInf_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const isNanGradConfig = {\n kernelName: IsNan,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=IsNan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { div } from '../ops/div';\nexport const log1pGradConfig = {\n kernelName: Log1p,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, add(x, 1)) };\n }\n};\n//# sourceMappingURL=Log1p_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nexport const logGradConfig = {\n kernelName: Log,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, cast(x, 'float32')) };\n }\n};\n//# sourceMappingURL=Log_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogSoftmax } from '../kernel_names';\nimport { exp } from '../ops/exp';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const logSoftmaxGradConfig = {\n kernelName: LogSoftmax,\n inputsToSave: [],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [value] = saved;\n const { axis } = attrs;\n return {\n logits: () => {\n const keepDims = true;\n const softmax = exp(value);\n return sub(dy, mul(sum(dy, axis, keepDims), softmax));\n }\n };\n }\n};\n//# sourceMappingURL=LogSoftmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { LRNBackprop } from '../kernel_names';\nimport { op } from './operation';\nfunction localResponseNormalizationBackprop_(x, y, dy, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5) {\n const forward = backend => backend.LRNGrad(dy, x, y, depthRadius, bias, alpha, beta);\n const inputs = { x, y, dy };\n const attrs = { depthRadius, bias, alpha, beta };\n return ENGINE.runKernelFunc(forward, inputs, null /* grad */, LRNBackprop, attrs);\n}\nexport const localResponseNormalizationBackprop = op({ localResponseNormalizationBackprop_ });\n//# sourceMappingURL=local_response_normalization_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LRN } from '../kernel_names';\nimport { localResponseNormalizationBackprop } from '../ops/local_response_normalization_backprop';\nexport const lrnGradConfig = {\n kernelName: LRN,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { depthRadius, bias, alpha, beta } = attrs;\n return {\n x: () => localResponseNormalizationBackprop(x, y, dy, depthRadius, bias, alpha, beta)\n };\n }\n};\n//# sourceMappingURL=LRN_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as axis_util from '../ops/axis_util';\nimport { cast } from '../ops/cast';\nimport { equal } from '../ops/equal';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\n/**\n * Gradient helper function for the min and max operations.\n */\nexport function gradForMinAndMax(dy, y, xOrig, origAxes) {\n if (y.rank < xOrig.rank) {\n y = reshape(y, axis_util.expandShapeToKeepDim(y.shape, origAxes));\n }\n if (dy.rank < xOrig.rank) {\n dy = reshape(dy, axis_util.expandShapeToKeepDim(dy.shape, origAxes));\n }\n return {\n x: () => {\n const dx = mul(dy, cast(equal(xOrig, y), dy.dtype));\n return dx;\n }\n };\n}\n//# sourceMappingURL=min_max_grad_util.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const maxGradConfig = {\n kernelName: Max,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const maxAttrs = attrs;\n const { reductionIndices } = maxAttrs;\n const x = saved[0];\n const y = saved[1];\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n const maxGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return maxGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Max_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Maximum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { less } from '../ops/less';\nimport { mul } from '../ops/mul';\nexport const maximumGradConfig = {\n kernelName: Maximum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(greaterEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(less(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Maximum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPool3DBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\nimport { reshape } from './reshape';\n/**\n * Computes the backprop of a 3d max pool.\n *\n * @param dy The dy error, of rank 5 of shape\n * [batchSize, depth, height, width, channels].\n * assumed.\n * @param input The original input image, of rank 5 or rank 4 of shape\n * [batchSize, depth, height, width, channels].\n * @param output The original output image, of rank 5 of shape\n * [batchSize, outDepth, outHeight, outWidth, channels].\n * @param filterSize The filter size:\n * `[filterDepth, filterHeight, filterWidth]`.\n * `filterSize` is a single number,\n * then `filterDepth == filterHeight == filterWidth`.\n * @param strides The strides of the pooling:\n * `[strideDepth, strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations Deprecated, this field will be gone in v3.0.0.\n * The dilation rates: `[dilationDepth, dilationHeight, dilationWidth]`\n * in which we sample input values across the depth, height and width\n * dimensions in dilated pooling.\n * Defaults to `[1, 1, 1]`. If `dilations` is a single number,\n * then `dilationDepth == dilationHeight == dilationWidth`.\n * If it is greater than 1, then all values of `strides` must be 1.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPool3dBackprop_(dy, input, output, filterSize, strides, dilations = [1, 1, 1], pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPool3dBackprop');\n const $input = convertToTensor(input, 'input', 'maxPool3dBackprop');\n const $output = convertToTensor(output, 'output', 'maxPool3dBackprop');\n let dy5D = $dy;\n let input5D = $input;\n let output5D = $output;\n let reshapedTo5D = false;\n if ($input.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape($dy, [1, $dy.shape[0], $dy.shape[1], $dy.shape[2], $dy.shape[3]]);\n input5D = reshape($input, [\n 1, $input.shape[0], $input.shape[1], $input.shape[2], $input.shape[3]\n ]);\n output5D = reshape($output, [\n 1, $output.shape[0], $output.shape[1], $output.shape[2], $output.shape[3]\n ]);\n }\n util.assert(dy5D.rank === 5, () => `Error in maxPool3dBackprop: dy must be rank 5 but got rank ` +\n `${dy5D.rank}.`);\n util.assert(input5D.rank === 5, () => `Error in maxPool3dBackprop: input must be rank 5 but got rank ` +\n `${input5D.rank}.`);\n util.assert(output5D.rank === 5, () => `Error in maxPool3dBackprop: output must be rank 5 but got rank ` +\n `${output5D.rank}.`);\n util.assert(conv_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool3dBackprop: Either strides or dilations ' +\n `must be 1. Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPool3dBackprop: pad must be an integer when ` +\n `using, dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool3DInfo(input5D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n return backend.maxPool3dBackprop(dy5D, input5D, output5D, convInfo);\n };\n const inputs = { dy: dy5D, input: input5D, output: output5D };\n const attrs = { filterSize, strides, dilations, pad, dimRoundingMode };\n const res = ENGINE.runKernelFunc(forward, inputs, null /* grad */, MaxPool3DBackprop, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}\nexport const maxPool3dBackprop = op({ maxPool3dBackprop_ });\n//# sourceMappingURL=max_pool_3d_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool3D } from '../kernel_names';\nimport { maxPool3dBackprop } from '../ops/max_pool_3d_backprop';\nexport const maxPool3DGradConfig = {\n kernelName: MaxPool3D,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, dilations, pad, dimRoundingMode } = attrs;\n const $dilations = dilations == null ? [1, 1, 1] : dilations;\n return {\n x: () => maxPool3dBackprop(dy, x, y, filterSize, strides, $dilations, pad, dimRoundingMode)\n };\n }\n};\n//# sourceMappingURL=MaxPool3D_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { MaxPoolBackprop } from '../kernel_names';\nimport { convertToTensor } from '../tensor_util_env';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport { op } from './operation';\n/**\n * Computes the backprop of a 2D max pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The original input image, of rank 4, of shape\n * [batchSize, height, width, channels].\n * @param output The original output image, of rank 4, of shape\n * [batchSize, outHeight, outWidth, channels].\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPoolBackprop_(dy, input, output, filterSize, strides, pad, dimRoundingMode) {\n const $dy = convertToTensor(dy, 'dy', 'maxPoolBackprop');\n const $input = convertToTensor(input, 'input', 'maxPoolBackprop');\n const $output = convertToTensor(output, 'output', 'maxPoolBackprop');\n util.assert($input.rank === $dy.rank, () => `Rank of input (${$input.rank}) does not match rank of dy ` +\n `(${$dy.rank})`);\n util.assert($dy.rank === 4, () => `Error in maxPoolBackprop: dy must be rank 4 but got rank ` +\n `${$dy.rank}.`);\n util.assert($input.rank === 4, () => `Error in maxPoolBackprop: input must be rank 4 but got rank ` +\n `${$input.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(util.isInt(pad), () => `Error in maxPoolBackprop: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const forward = backend => {\n const convInfo = conv_util.computePool2DInfo($input.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n return backend.maxPoolBackprop($dy, $input, $output, convInfo);\n };\n const inputs = { dy: $dy, input: $input, output: $output };\n const attrs = { filterSize, strides, pad, dimRoundingMode };\n return ENGINE.runKernelFunc(forward, inputs, null, MaxPoolBackprop, attrs);\n}\nexport const maxPoolBackprop = op({ maxPoolBackprop_ });\n//# sourceMappingURL=max_pool_backprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPool } from '../kernel_names';\nimport { maxPoolBackprop } from '../ops/max_pool_backprop';\nexport const maxPoolGradConfig = {\n kernelName: MaxPool,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [x, y] = saved;\n const { filterSize, strides, pad } = attrs;\n return {\n x: () => maxPoolBackprop(dy, x, y, filterSize, strides, pad)\n };\n }\n};\n//# sourceMappingURL=MaxPool_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Min } from '../kernel_names';\nimport * as util from '../util';\nimport { gradForMinAndMax } from './min_max_grad_util';\nexport const minGradConfig = {\n kernelName: Min,\n inputsToSave: ['x'],\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const minAttrs = attrs;\n const { axis } = minAttrs;\n const [x, y] = saved;\n const origAxes = util.parseAxisParam(axis, x.shape);\n const minGrad = gradForMinAndMax(dy, y, x, origAxes);\n return {\n x: () => {\n return minGrad['x']();\n }\n };\n }\n};\n//# sourceMappingURL=Min_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Minimum } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nexport const minimumGradConfig = {\n kernelName: Minimum,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const derA = () => mul(dy, cast(lessEqual(a, b), 'float32'));\n const derB = () => mul(dy, cast(greater(a, b), 'float32'));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Minimum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const mirrorPadGradConfig = {\n kernelName: MirrorPad,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=MirrorPad_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Mod } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { div } from '../ops/div';\nimport { floor } from '../ops/floor';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const modGradConfig = {\n kernelName: Mod,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(dy, reduceAxes), a.shape);\n }\n return dy;\n };\n const derB = () => {\n const res = mul(dy, neg(floor(div(a, b))));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Mod_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '../kernel_names';\nimport { assertAndGetBroadcastShape, getReductionAxes } from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const multiplyGradConfig = {\n kernelName: Multiply,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n const res = mul(dy, cast(b, 'float32'));\n const reduceAxes = getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), a.shape);\n }\n return res;\n };\n const derB = () => {\n const res = mul(dy, cast(a, 'float32'));\n const reduceAxes = getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n return reshape(sum(res, reduceAxes), b.shape);\n }\n return res;\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Multiply_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Negate } from '../kernel_names';\nimport { neg } from '../ops/neg';\nexport const negateGradConfig = {\n kernelName: Negate,\n gradFunc: (dy) => {\n return { x: () => neg(dy) };\n }\n};\n//# sourceMappingURL=Negate_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OneHot } from '../kernel_names';\nimport { zeros } from '../ops/zeros';\nexport const oneHotGradConfig = {\n kernelName: OneHot,\n inputsToSave: ['indices'],\n gradFunc: (dy, saved) => {\n const indices = saved[0];\n return { indices: () => zeros(indices.shape, 'float32') };\n }\n};\n//# sourceMappingURL=OneHot_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { OnesLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const onesLikeGradConfig = {\n kernelName: OnesLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=OnesLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2 } from '../kernel_names';\nimport { slice } from '../ops/slice';\nexport const padV2GradConfig = {\n kernelName: PadV2,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const x = saved[0];\n const { paddings } = attrs;\n const begin = paddings.map(p => p[0]);\n return { x: () => slice(dy, begin, x.shape) };\n }\n};\n//# sourceMappingURL=PadV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pow } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { cast } from '../ops/cast';\nimport { greater } from '../ops/greater';\nimport { log } from '../ops/log';\nimport { mul } from '../ops/mul';\nimport { pow } from '../ops/pow';\nimport { reshape } from '../ops/reshape';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const powGradConfig = {\n kernelName: Pow,\n inputsToSave: ['a', 'b'],\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [a, b, y] = saved;\n const base = a;\n const exp = b;\n const outShape = broadcast_util.assertAndGetBroadcastShape(base.shape, exp.shape);\n const derBase = () => {\n const expFloat = cast(exp, 'float32');\n let res = mul(dy, mul(expFloat, pow(base, sub(expFloat, scalar(1)))));\n const reduceAxes = broadcast_util.getReductionAxes(base.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, base.shape);\n };\n const derExp = () => {\n const condition = greater(base, 0);\n const logBase = where(condition, log(base), zerosLike(base));\n let res = mul(dy, mul(y, logBase));\n const reduceAxes = broadcast_util.getReductionAxes(exp.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, exp.shape);\n };\n return { a: derBase, b: derExp };\n }\n};\n//# sourceMappingURL=Pow_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '../kernel_names';\nimport { getReductionAxes } from '../ops/broadcast_util';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const preluGradConfig = {\n kernelName: Prelu,\n inputsToSave: ['x', 'alpha'],\n gradFunc: (dy, saved) => {\n const [x, alpha] = saved;\n const mask = greater(x, 0);\n return {\n x: () => where(mask, dy, mul(dy, alpha)),\n alpha: () => {\n let res = where(mask, zerosLike(dy), mul(dy, x));\n const reduceAxes = getReductionAxes(alpha.shape, dy.shape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, alpha.shape);\n }\n };\n }\n};\n//# sourceMappingURL=Prelu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { neg } from '../ops/neg';\nimport { square } from '../ops/square';\nexport const reciprocalGradConfig = {\n kernelName: Reciprocal,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, neg(square(x))) };\n }\n};\n//# sourceMappingURL=Reciprocal_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { lessEqual } from '../ops/less_equal';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const relu6GradConfig = {\n kernelName: Relu6,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n const mask = mul(lessEqual(x, 6), step(x));\n return { x: () => mul(dy, cast(mask, 'float32')) };\n }\n};\n//# sourceMappingURL=Relu6_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nimport { step } from '../ops/step';\nexport const reluGradConfig = {\n kernelName: Relu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, cast(step(x), 'float32')) };\n }\n};\n//# sourceMappingURL=Relu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape } from '../kernel_names';\nimport { reshape } from '../ops/reshape';\nexport const reshapeGradConfig = {\n kernelName: Reshape,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => reshape(dy, x.shape) };\n }\n};\n//# sourceMappingURL=Reshape_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeBilinear, ResizeBilinearGrad } from '../kernel_names';\nexport const resizeBilinearGradConfig = {\n kernelName: ResizeBilinear,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeBilinearBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeBilinearGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeBilinear_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ENGINE } from '../engine';\nimport { ResizeNearestNeighbor, ResizeNearestNeighborGrad } from '../kernel_names';\nexport const resizeNearestNeighborGradConfig = {\n kernelName: ResizeNearestNeighbor,\n inputsToSave: ['images'],\n gradFunc: (dy, saved, attrs) => {\n const [images] = saved;\n const backPropKernelFunc = (backend) => {\n const { alignCorners } = attrs;\n return backend.resizeNearestNeighborBackprop(dy, images, alignCorners);\n };\n const inputs = { images };\n const imagesDer = () => ENGINE.runKernelFunc(backPropKernelFunc, inputs, null /* gradient */, ResizeNearestNeighborGrad, attrs);\n return { images: imagesDer };\n }\n};\n//# sourceMappingURL=ResizeNearestNeighbor_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reverse } from '../kernel_names';\nimport { reverse } from '../ops/reverse';\nimport { parseAxisParam } from '../util';\nexport const reverseGradConfig = {\n kernelName: Reverse,\n gradFunc: (dy, saved, attrs) => {\n const { dims } = attrs;\n const axes = parseAxisParam(dims, dy.shape);\n return { x: () => reverse(dy, axes) };\n }\n};\n//# sourceMappingURL=Reverse_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const roundGradConfig = {\n kernelName: Round,\n gradFunc: (dy) => {\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Round_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '../kernel_names';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { neg } from '../ops/neg';\nimport { pow } from '../ops/pow';\nexport const rsqrtGradConfig = {\n kernelName: Rsqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => neg(div(dy, mul(pow(x, 1.5), 2))) };\n }\n};\n//# sourceMappingURL=Rsqrt_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SelectV2 } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { logicalNot } from '../ops/logical_not';\nimport { mul } from '../ops/mul';\nimport { zerosLike } from '../ops/zeros_like';\nexport const selectV2PoolGradConfig = {\n kernelName: SelectV2,\n inputsToSave: ['condition'],\n gradFunc: (dy, saved) => {\n const [condition] = saved;\n return {\n // TODO(julianoks): Return null for condition gradient\n // when backprop supports it.\n condition: () => cast(zerosLike(condition), 'float32'),\n t: () => mul(dy, cast(condition, dy.dtype)),\n e: () => mul(dy, cast(logicalNot(condition), dy.dtype))\n };\n }\n};\n//# sourceMappingURL=SelectV2_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Selu } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { exp } from '../ops/exp';\nimport { greater } from '../ops/greater';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { SELU_SCALE, SELU_SCALEALPHA } from '../ops/selu_util';\nimport { where } from '../ops/where';\nexport const seluGradConfig = {\n kernelName: Selu,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return {\n x: () => {\n const mask = greater(x, scalar(0));\n const scaleAlpha = scalar(SELU_SCALEALPHA);\n const scale = scalar(SELU_SCALE);\n const greaterThanZeroDer = mul(dy, scale);\n const lessEqualZeroDer = mul(mul(dy, scaleAlpha), exp(cast(x, 'float32')));\n return where(mask, greaterThanZeroDer, lessEqualZeroDer);\n }\n };\n }\n};\n//# sourceMappingURL=Selu_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const sigmoidGradConfig = {\n kernelName: Sigmoid,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(dy, mul(y, sub(scalar(1), y))) };\n }\n};\n//# sourceMappingURL=Sigmoid_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const signGradConfig = {\n kernelName: Sign,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Sign_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cos } from '../ops/cos';\nimport { mul } from '../ops/mul';\nexport const sinGradConfig = {\n kernelName: Sin,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cos(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sin_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { cosh } from '../ops/cosh';\nimport { mul } from '../ops/mul';\nexport const sinhGradConfig = {\n kernelName: Sinh,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(cosh(cast(x, 'float32')), dy) };\n }\n};\n//# sourceMappingURL=Sinh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice } from '../kernel_names';\nimport { pad } from '../ops/pad';\nimport { parseSliceParams } from '../ops/slice_util';\nexport const sliceGradConfig = {\n kernelName: Slice,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { begin, size } = attrs;\n const inputShape = x.shape;\n const [begin_, size_] = parseSliceParams(x, begin, size);\n // Create an Nx2 padding where the first column represents how many\n // zeros are prepended (at start) for each dimension, and the second\n // column indicates how many zeros are appended (at end).\n // The number of zeros to append is the shape of the input\n // elementwise-subtracted by both the begin vector and sizes vector.\n const paddings = [];\n for (let i = 0; i < dy.rank; i++) {\n paddings.push([begin_[i], inputShape[i] - begin_[i] - size_[i]]);\n }\n return { x: () => pad(dy, paddings) };\n }\n};\n//# sourceMappingURL=Slice_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softmax } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sub } from '../ops/sub';\nimport { sum } from '../ops/sum';\nexport const softmaxGradConfig = {\n kernelName: Softmax,\n outputsToSave: [true],\n gradFunc: (dy, saved, attrs) => {\n const [y] = saved;\n const { dim } = attrs;\n const keepDims = true;\n const dyTimesY = mul(dy, y);\n return {\n logits: () => sub(dyTimesY, mul(sum(dyTimesY, [dim], keepDims), y))\n };\n }\n};\n//# sourceMappingURL=Softmax_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { sigmoid } from '../ops/sigmoid';\nexport const softplusGradConfig = {\n kernelName: Softplus,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, sigmoid(x)) };\n }\n};\n//# sourceMappingURL=Softplus_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SpaceToBatchND } from '../kernel_names';\nimport { batchToSpaceND } from '../ops/batch_to_space_nd';\nexport const spaceToBatchNDGradConfig = {\n kernelName: SpaceToBatchND,\n gradFunc: (dy, saved, attrs) => {\n const { blockShape, paddings } = attrs;\n return { x: () => batchToSpaceND(dy, blockShape, paddings) };\n }\n};\n//# sourceMappingURL=SpaceToBatchND_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SplitV } from '../kernel_names';\nimport { concat } from '../ops/concat';\nexport const splitVGradConfig = {\n kernelName: SplitV,\n gradFunc: (dy, saved, attrs) => {\n const { axis } = attrs;\n return { x: () => concat(dy, axis) };\n }\n};\n//# sourceMappingURL=SplitV_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { div } from '../ops/div';\nimport { mul } from '../ops/mul';\nimport { sqrt } from '../ops/sqrt';\nexport const sqrtGradConfig = {\n kernelName: Sqrt,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, mul(sqrt(cast(x, 'float32')), 2)) };\n }\n};\n//# sourceMappingURL=Sqrt_grad.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '../kernel_names';\nimport { cast } from '../ops/cast';\nimport { mul } from '../ops/mul';\nexport const squareGradConfig = {\n kernelName: Square,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => mul(dy, mul(cast(x, 'float32'), 2)) };\n }\n};\n//# sourceMappingURL=Square_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { sub } from '../ops/sub';\nexport const squaredDifferenceGradConfig = {\n kernelName: SquaredDifference,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const two = scalar(2);\n const derA = () => mul(dy, mul(two, sub(a, b)));\n const derB = () => mul(dy, mul(two, sub(b, a)));\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=SquaredDifference_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const stepGradConfig = {\n kernelName: Step,\n gradFunc: (dy) => {\n // TODO(manrajgrover): Return null for gradients when backprop supports\n // it.\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=Step_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '../kernel_names';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport { neg } from '../ops/neg';\nimport { reshape } from '../ops/reshape';\nimport { sum } from '../ops/sum';\nexport const subGradConfig = {\n kernelName: Sub,\n inputsToSave: ['a', 'b'],\n gradFunc: (dy, saved) => {\n const [a, b] = saved;\n const outShape = broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(res, a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes(b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = sum(res, reduceAxes);\n }\n return reshape(neg(res), b.shape);\n };\n return { a: derA, b: derB };\n }\n};\n//# sourceMappingURL=Sub_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sum } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { ones } from '../ops/ones';\nimport { reshape } from '../ops/reshape';\nimport { parseAxisParam } from '../util';\nexport const sumGradConfig = {\n kernelName: Sum,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const expandedDyShape = x.shape.slice();\n const { axis } = attrs;\n const axes = parseAxisParam(axis, x.shape);\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = reshape(dy, expandedDyShape);\n const derX = mul(expandedDy, ones(x.shape, 'float32'));\n return { x: () => derX };\n }\n};\n//# sourceMappingURL=Sum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '../kernel_names';\nimport { cos } from '../ops/cos';\nimport { div } from '../ops/div';\nimport { square } from '../ops/square';\nexport const tanGradConfig = {\n kernelName: Tan,\n inputsToSave: ['x'],\n gradFunc: (dy, saved) => {\n const [x] = saved;\n return { x: () => div(dy, square(cos(x))) };\n }\n};\n//# sourceMappingURL=Tan_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '../kernel_names';\nimport { mul } from '../ops/mul';\nimport { scalar } from '../ops/scalar';\nimport { square } from '../ops/square';\nimport { sub } from '../ops/sub';\nexport const tanhGradConfig = {\n kernelName: Tanh,\n outputsToSave: [true],\n gradFunc: (dy, saved) => {\n const [y] = saved;\n return { x: () => mul(sub(scalar(1), square(y)), dy) };\n }\n};\n//# sourceMappingURL=Tanh_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tile } from '../kernel_names';\nimport { add } from '../ops/add';\nimport { slice } from '../ops/slice';\nimport { zerosLike } from '../ops/zeros_like';\nexport const tileGradConfig = {\n kernelName: Tile,\n inputsToSave: ['x'],\n gradFunc: (dy, saved, attrs) => {\n const [x] = saved;\n const { reps } = attrs;\n const derX = () => {\n let xGrad = zerosLike(x);\n // TODO(cais): Maybe reduce memory footprint by avoiding repeated\n // slicing.\n if (x.rank === 1) {\n for (let i = 0; i < reps[0]; ++i) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0]], [x.shape[0]]));\n }\n }\n else if (x.rank === 2) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n xGrad = add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1]], [\n x.shape[0], x.shape[1]\n ]));\n }\n }\n }\n else if (x.rank === 3) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n xGrad =\n add(xGrad, slice(dy, [i * x.shape[0], j * x.shape[1], k * x.shape[2]], [x.shape[0], x.shape[1], x.shape[2]]));\n }\n }\n }\n }\n else if (x.rank === 4) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n for (let l = 0; l < reps[3]; ++l) {\n xGrad =\n add(xGrad, slice(dy, [\n i * x.shape[0], j * x.shape[1], k * x.shape[2],\n l * x.shape[3]\n ], [x.shape[0], x.shape[1], x.shape[2], x.shape[3]]));\n }\n }\n }\n }\n }\n else {\n throw new Error(`Gradient for tile operation is not implemented for rank-` +\n `${x.rank} tensors yet.`);\n }\n return xGrad;\n };\n return { x: derX };\n },\n};\n//# sourceMappingURL=Tile_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '../kernel_names';\nimport * as axis_util from '../ops/axis_util';\nimport { transpose } from '../ops/transpose';\nexport const transposeGradConfig = {\n kernelName: Transpose,\n gradFunc: (dy, saved, attrs) => {\n const transposeAttrs = attrs;\n const { perm } = transposeAttrs;\n const undoPerm = axis_util.getUndoAxesPermutation(perm);\n return { x: () => transpose(dy, undoPerm) };\n }\n};\n//# sourceMappingURL=Transpose_grad.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unpack } from '../kernel_names';\nimport { stack } from '../ops/stack';\nexport const unpackGradConfig = {\n kernelName: Unpack,\n gradFunc: (dy, saved, attrs) => {\n const unpackAttrs = attrs;\n const { axis } = unpackAttrs;\n return { value: () => stack(dy, axis) };\n }\n};\n//# sourceMappingURL=Unpack_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnsortedSegmentSum } from '../kernel_names';\nimport { expandDims } from '../ops/expand_dims';\nimport { gather } from '../ops/gather';\nimport { greaterEqual } from '../ops/greater_equal';\nimport { logicalAnd } from '../ops/logical_and';\nimport { maximum } from '../ops/maximum';\nimport { ones } from '../ops/ones';\nimport { scalar } from '../ops/scalar';\nimport { where } from '../ops/where';\nimport { zerosLike } from '../ops/zeros_like';\nexport const unsortedSegmentSumGradConfig = {\n kernelName: UnsortedSegmentSum,\n inputsToSave: ['segmentIds'],\n gradFunc: (dy, saved) => {\n const [segmentIds] = saved;\n const derX = () => {\n return gatherDropNegatives(dy, segmentIds);\n };\n return { x: derX };\n }\n};\nfunction gatherDropNegatives(x, indices) {\n // Helper function for unsorted segment ops. Gathers params for\n // positive segment ids and gathers 0 for inputs with negative segment id.\n // Mirrors _GatherDropNegatives from tensorflow/python/ops/math_grad.py\n const zeroClippedIndices = maximum(indices, zerosLike(indices));\n const gathered = gather(x, zeroClippedIndices);\n let isPositive = greaterEqual(indices, scalar(0, 'int32'));\n const numIters = gathered.rank - isPositive.rank;\n for (let i = 0; i < numIters; ++i) {\n isPositive = expandDims(isPositive, i + 1);\n }\n isPositive = logicalAnd(isPositive, ones(gathered.shape, 'bool'));\n const zeroSlice = zerosLike(gathered);\n return where(isPositive, gathered, zeroSlice);\n}\n//# sourceMappingURL=UnsortedSegmentSum_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ZerosLike } from '../kernel_names';\nimport { zerosLike } from '../ops/zeros_like';\nexport const zerosLikeGradConfig = {\n kernelName: ZerosLike,\n gradFunc: (dy) => {\n return { x: () => zerosLike(dy) };\n }\n};\n//# sourceMappingURL=ZerosLike_grad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { absGradConfig } from './gradients/Abs_grad';\nimport { acosGradConfig } from './gradients/Acos_grad';\nimport { acoshGradConfig } from './gradients/Acosh_grad';\nimport { addGradConfig } from './gradients/Add_grad';\nimport { addNGradConfig } from './gradients/AddN_grad';\nimport { argMaxGradConfig } from './gradients/ArgMax_grad';\nimport { argMinGradConfig } from './gradients/ArgMin_grad';\nimport { asinGradConfig } from './gradients/Asin_grad';\nimport { asinhGradConfig } from './gradients/Asinh_grad';\nimport { atan2GradConfig } from './gradients/Atan2_grad';\nimport { atanGradConfig } from './gradients/Atan_grad';\nimport { atanhGradConfig } from './gradients/Atanh_grad';\nimport { avgPool3DGradConfig } from './gradients/AvgPool3D_grad';\nimport { avgPoolGradConfig } from './gradients/AvgPool_grad';\nimport { batchMatMulGradConfig } from './gradients/BatchMatMul_grad';\nimport { batchToSpaceNDGradConfig } from './gradients/BatchToSpaceND_grad';\nimport { broadcastToGradConfig } from './gradients/BroadcastTo_grad';\nimport { castGradConfig } from './gradients/Cast_grad';\nimport { ceilGradConfig } from './gradients/Ceil_grad';\nimport { clipByValueGradConfig } from './gradients/ClipByValue_grad';\nimport { concatGradConfig } from './gradients/Concat_grad';\nimport { conv2DGradConfig } from './gradients/Conv2D_grad';\nimport { conv2DBackpropInputGradConfig } from './gradients/Conv2DBackpropInput_grad';\nimport { conv3DGradConfig } from './gradients/Conv3D_grad';\nimport { cosGradConfig } from './gradients/Cos_grad';\nimport { coshGradConfig } from './gradients/Cosh_grad';\nimport { cumsumGradConfig } from './gradients/Cumsum_grad';\nimport { depthwiseConv2dNativeGradConfig } from './gradients/DepthwiseConv2dNative_grad';\nimport { dilation2dGradConfig } from './gradients/Dilation2D_grad';\nimport { divGradConfig } from './gradients/Div_grad';\nimport { eluGradConfig } from './gradients/Elu_grad';\nimport { erfGradConfig } from './gradients/Erf_grad';\nimport { expGradConfig } from './gradients/Exp_grad';\nimport { expm1GradConfig } from './gradients/Expm1_grad';\nimport { floorGradConfig } from './gradients/Floor_grad';\nimport { floorDivGradConfig } from './gradients/FloorDiv_grad';\nimport { fusedBatchNormGradConfig } from './gradients/FusedBatchNorm_grad';\nimport { gatherGradConfig } from './gradients/GatherV2_grad';\nimport { greaterEqualGradConfig } from './gradients/GreaterEqual_grad';\nimport { identityGradConfig } from './gradients/Identity_grad';\nimport { isFiniteGradConfig } from './gradients/IsFinite_grad';\nimport { isInfGradConfig } from './gradients/IsInf_grad';\nimport { isNanGradConfig } from './gradients/IsNan_grad';\nimport { log1pGradConfig } from './gradients/Log1p_grad';\nimport { logGradConfig } from './gradients/Log_grad';\nimport { logSoftmaxGradConfig } from './gradients/LogSoftmax_grad';\nimport { lrnGradConfig } from './gradients/LRN_grad';\nimport { maxGradConfig } from './gradients/Max_grad';\nimport { maximumGradConfig } from './gradients/Maximum_grad';\nimport { maxPool3DGradConfig } from './gradients/MaxPool3D_grad';\nimport { maxPoolGradConfig } from './gradients/MaxPool_grad';\nimport { minGradConfig } from './gradients/Min_grad';\nimport { minimumGradConfig } from './gradients/Minimum_grad';\nimport { mirrorPadGradConfig } from './gradients/MirrorPad_grad';\nimport { modGradConfig } from './gradients/Mod_grad';\nimport { multiplyGradConfig } from './gradients/Multiply_grad';\nimport { negateGradConfig } from './gradients/Negate_grad';\nimport { oneHotGradConfig } from './gradients/OneHot_grad';\nimport { onesLikeGradConfig } from './gradients/OnesLike_grad';\nimport { padV2GradConfig } from './gradients/PadV2_grad';\nimport { powGradConfig } from './gradients/Pow_grad';\nimport { preluGradConfig } from './gradients/Prelu_grad';\nimport { reciprocalGradConfig } from './gradients/Reciprocal_grad';\nimport { relu6GradConfig } from './gradients/Relu6_grad';\nimport { reluGradConfig } from './gradients/Relu_grad';\nimport { reshapeGradConfig } from './gradients/Reshape_grad';\nimport { resizeBilinearGradConfig } from './gradients/ResizeBilinear_grad';\nimport { resizeNearestNeighborGradConfig } from './gradients/ResizeNearestNeighbor_grad';\nimport { reverseGradConfig } from './gradients/Reverse_grad';\nimport { roundGradConfig } from './gradients/Round_grad';\nimport { rsqrtGradConfig } from './gradients/Rsqrt_grad';\nimport { selectV2PoolGradConfig } from './gradients/SelectV2_grad';\nimport { seluGradConfig } from './gradients/Selu_grad';\nimport { sigmoidGradConfig } from './gradients/Sigmoid_grad';\nimport { signGradConfig } from './gradients/Sign_grad';\nimport { sinGradConfig } from './gradients/Sin_grad';\nimport { sinhGradConfig } from './gradients/Sinh_grad';\nimport { sliceGradConfig } from './gradients/Slice_grad';\nimport { softmaxGradConfig } from './gradients/Softmax_grad';\nimport { softplusGradConfig } from './gradients/Softplus_grad';\nimport { spaceToBatchNDGradConfig } from './gradients/SpaceToBatchND_grad';\nimport { splitVGradConfig } from './gradients/SplitV_grad';\nimport { sqrtGradConfig } from './gradients/Sqrt_grad';\nimport { squareGradConfig } from './gradients/Square_grad';\nimport { squaredDifferenceGradConfig } from './gradients/SquaredDifference_grad';\nimport { stepGradConfig } from './gradients/Step_grad';\nimport { subGradConfig } from './gradients/Sub_grad';\nimport { sumGradConfig } from './gradients/Sum_grad';\nimport { tanGradConfig } from './gradients/Tan_grad';\nimport { tanhGradConfig } from './gradients/Tanh_grad';\nimport { tileGradConfig } from './gradients/Tile_grad';\nimport { transposeGradConfig } from './gradients/Transpose_grad';\nimport { unpackGradConfig } from './gradients/Unpack_grad';\nimport { unsortedSegmentSumGradConfig } from './gradients/UnsortedSegmentSum_grad';\nimport { zerosLikeGradConfig } from './gradients/ZerosLike_grad';\nimport { registerGradient } from './kernel_registry';\n// Export all kernel configs here so that the package can auto register them\nconst gradConfigs = [\n absGradConfig,\n acosGradConfig,\n acoshGradConfig,\n addGradConfig,\n addNGradConfig,\n argMaxGradConfig,\n argMinGradConfig,\n asinGradConfig,\n asinhGradConfig,\n atan2GradConfig,\n atanGradConfig,\n atanhGradConfig,\n avgPool3DGradConfig,\n avgPoolGradConfig,\n batchMatMulGradConfig,\n batchToSpaceNDGradConfig,\n broadcastToGradConfig,\n castGradConfig,\n ceilGradConfig,\n clipByValueGradConfig,\n concatGradConfig,\n conv2DBackpropInputGradConfig,\n conv2DGradConfig,\n conv3DGradConfig,\n cosGradConfig,\n coshGradConfig,\n cumsumGradConfig,\n depthwiseConv2dNativeGradConfig,\n dilation2dGradConfig,\n divGradConfig,\n eluGradConfig,\n erfGradConfig,\n expGradConfig,\n expm1GradConfig,\n floorDivGradConfig,\n floorGradConfig,\n fusedBatchNormGradConfig,\n gatherGradConfig,\n greaterEqualGradConfig,\n identityGradConfig,\n isFiniteGradConfig,\n isInfGradConfig,\n isNanGradConfig,\n log1pGradConfig,\n logGradConfig,\n logSoftmaxGradConfig,\n lrnGradConfig,\n maxGradConfig,\n maxGradConfig,\n maximumGradConfig,\n maxPool3DGradConfig,\n maxPoolGradConfig,\n minGradConfig,\n minimumGradConfig,\n mirrorPadGradConfig,\n modGradConfig,\n multiplyGradConfig,\n negateGradConfig,\n oneHotGradConfig,\n onesLikeGradConfig,\n padV2GradConfig,\n padV2GradConfig,\n powGradConfig,\n preluGradConfig,\n reciprocalGradConfig,\n relu6GradConfig,\n reluGradConfig,\n reshapeGradConfig,\n resizeBilinearGradConfig,\n resizeNearestNeighborGradConfig,\n reverseGradConfig,\n roundGradConfig,\n rsqrtGradConfig,\n selectV2PoolGradConfig,\n seluGradConfig,\n sigmoidGradConfig,\n signGradConfig,\n sinGradConfig,\n sinhGradConfig,\n sliceGradConfig,\n softmaxGradConfig,\n softplusGradConfig,\n spaceToBatchNDGradConfig,\n spaceToBatchNDGradConfig,\n splitVGradConfig,\n splitVGradConfig,\n sqrtGradConfig,\n squaredDifferenceGradConfig,\n squareGradConfig,\n stepGradConfig,\n subGradConfig,\n sumGradConfig,\n tanGradConfig,\n tanhGradConfig,\n tileGradConfig,\n transposeGradConfig,\n unpackGradConfig,\n unsortedSegmentSumGradConfig,\n zerosLikeGradConfig\n];\nfor (const gradientConfig of gradConfigs) {\n registerGradient(gradientConfig);\n}\n//# sourceMappingURL=register_all_gradients.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { abs } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.abs = function () {\n this.throwIfDisposed();\n return abs(this);\n};\n//# sourceMappingURL=abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acos = function () {\n this.throwIfDisposed();\n return acos(this);\n};\n//# sourceMappingURL=acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { acosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.acosh = function () {\n this.throwIfDisposed();\n return acosh(this);\n};\n//# sourceMappingURL=acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { addStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.addStrict = function (x) {\n this.throwIfDisposed();\n return addStrict(this, x);\n};\n//# sourceMappingURL=add_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { add } from '../../ops/add';\nimport { Tensor } from '../../tensor';\nTensor.prototype.add = function (b) {\n this.throwIfDisposed();\n return add(this, b);\n};\n//# sourceMappingURL=add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { all } from '../../ops/all';\nimport { Tensor } from '../../tensor';\nTensor.prototype.all = function (axis, keepDims) {\n this.throwIfDisposed();\n return all(this, axis, keepDims);\n};\n//# sourceMappingURL=all.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { any } from '../../ops/any';\nimport { Tensor } from '../../tensor';\nTensor.prototype.any = function (axis, keepDims) {\n this.throwIfDisposed();\n return any(this, axis, keepDims);\n};\n//# sourceMappingURL=any.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMax } from '../../ops/arg_max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMax = function (axis) {\n this.throwIfDisposed();\n return argMax(this, axis);\n};\n//# sourceMappingURL=arg_max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { argMin } from '../../ops/arg_min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.argMin = function (axis) {\n this.throwIfDisposed();\n return argMin(this, axis);\n};\n//# sourceMappingURL=arg_min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nimport { assert } from '../../util';\n/** Converts a size-1 `tf.Tensor` to a `tf.Scalar`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asScalar = function () {\n this.throwIfDisposed();\n assert(this.size === 1, () => 'The array must have only 1 element.');\n return reshape(this, []);\n};\n//# sourceMappingURL=as_scalar.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * Casts a `tf.Tensor` to a specified dtype.\n *\n * @param dtype Data-type to cast the tensor to.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.asType = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=as_type.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Converts a `tf.Tensor` to a `tf.Tensor1D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as1D = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=as1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor2D`.\n *\n * @param rows Number of rows in `tf.Tensor2D`.\n * @param columns Number of columns in `tf.Tensor2D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as2D = function (rows, columns) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns]);\n};\n//# sourceMappingURL=as2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor3D`.\n *\n * @param rows Number of rows in `tf.Tensor3D`.\n * @param columns Number of columns in `tf.Tensor3D`.\n * @param depth Depth of `tf.Tensor3D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as3D = function (rows, columns, depth) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth]);\n};\n//# sourceMappingURL=as3d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor4D`.\n *\n * @param rows Number of rows in `tf.Tensor4D`.\n * @param columns Number of columns in `tf.Tensor4D`.\n * @param depth Depth of `tf.Tensor4D`.\n * @param depth2 4th dimension of `tf.Tensor4D`.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as4D = function (rows, columns, depth, depth2) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2]);\n};\n//# sourceMappingURL=as4d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Converts a `tf.Tensor` to a `tf.Tensor5D`.\n *\n * @param rows Number of rows in `tf.Tensor5D`.\n * @param columns Number of columns in `tf.Tensor5D`.\n * @param depth Depth of `tf.Tensor5D`.\n * @param depth2 4th dimension of `tf.Tensor5D`.\n * @param depth3 5th dimension of 'tf.Tensor5D'\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.as5D = function (rows, columns, depth, depth2, depth3) {\n this.throwIfDisposed();\n return reshape(this, [rows, columns, depth, depth2, depth3]);\n};\n//# sourceMappingURL=as5d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asin = function () {\n this.throwIfDisposed();\n return asin(this);\n};\n//# sourceMappingURL=asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { asinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.asinh = function () {\n this.throwIfDisposed();\n return asinh(this);\n};\n//# sourceMappingURL=asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan = function () {\n this.throwIfDisposed();\n return atan(this);\n};\n//# sourceMappingURL=atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { atan2 } from '../../ops/atan2';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atan2 = function (b) {\n this.throwIfDisposed();\n return atan2(this, b);\n};\n//# sourceMappingURL=atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { atanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.atanh = function () {\n this.throwIfDisposed();\n return atanh(this);\n};\n//# sourceMappingURL=atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { avgPool } from '../../ops/avg_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.avgPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return avgPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=avg_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchToSpaceND } from '../../ops/batch_to_space_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchToSpaceND = function (blockShape, crops) {\n this.throwIfDisposed();\n return batchToSpaceND(this, blockShape, crops);\n};\n//# sourceMappingURL=batch_to_space_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { batchNorm } from '../../ops/batchnorm';\nimport { Tensor } from '../../tensor';\nTensor.prototype.batchNorm = function (mean, variance, offset, scale, varianceEpsilon) {\n this.throwIfDisposed();\n return batchNorm(this, mean, variance, offset, scale, varianceEpsilon);\n};\n//# sourceMappingURL=batchnorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { broadcastTo } from '../../ops/broadcast_to';\nimport { Tensor } from '../../tensor';\nTensor.prototype.broadcastTo = function (shape) {\n this.throwIfDisposed();\n return broadcastTo(this, shape);\n};\n//# sourceMappingURL=broadcast_to.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cast = function (dtype) {\n this.throwIfDisposed();\n return cast(this, dtype);\n};\n//# sourceMappingURL=cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ceil } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ceil = function () {\n this.throwIfDisposed();\n return ceil(this);\n};\n//# sourceMappingURL=ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { clipByValue } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.clipByValue = function (min, max) {\n this.throwIfDisposed();\n return clipByValue(this, min, max);\n};\n//# sourceMappingURL=clip_by_value.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat } from '../../ops/concat';\nimport { Tensor } from '../../tensor';\nTensor.prototype.concat = function (x, axis) {\n this.throwIfDisposed();\n if (x instanceof Tensor) {\n x = [x];\n }\n return concat([this, ...x], axis);\n};\n//# sourceMappingURL=concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv1d } from '../../ops/conv1d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv1d = function (filter, stride, pad, dataFormat, dilation, dimRoundingMode) {\n this.throwIfDisposed();\n return conv1d(this, filter, stride, pad, dataFormat, dilation, dimRoundingMode);\n};\n//# sourceMappingURL=conv1d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2dTranspose } from '../../ops/conv2d_transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2dTranspose = function (filter, outputShape, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2dTranspose(this, filter, outputShape, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d_transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { conv2d } from '../../ops/conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.conv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return conv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cos } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cos = function () {\n this.throwIfDisposed();\n return cos(this);\n};\n//# sourceMappingURL=cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cosh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cosh = function () {\n this.throwIfDisposed();\n return cosh(this);\n};\n//# sourceMappingURL=cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { cumsum } from '../../ops/cumsum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.cumsum = function (axis, exclusive, reverse) {\n this.throwIfDisposed();\n return cumsum(this, axis, exclusive, reverse);\n};\n//# sourceMappingURL=cumsum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthToSpace } from '../../ops/depth_to_space';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthToSpace = function (blockSize, dataFormat) {\n this.throwIfDisposed();\n return depthToSpace(this, blockSize, dataFormat);\n};\n//# sourceMappingURL=depth_to_space.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { deprecationWarn } from '../../globals';\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated Use `depthwiseConv2d` instead.\n */\nTensor.prototype.depthwiseConv2D = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n deprecationWarn('depthwiseConv2D is deprecated, use depthwiseConv2d instead');\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2D_deprecated.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { depthwiseConv2d } from '../../ops/depthwise_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.depthwiseConv2d = function (filter, strides, pad, dataFormat, dilations, dimRoundingMode) {\n this.throwIfDisposed();\n return depthwiseConv2d(this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n};\n//# sourceMappingURL=depthwise_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dilation2d } from '../../ops/dilation2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dilation2d = function (filter, strides, pad, dilations, dataFormat) {\n this.throwIfDisposed();\n return dilation2d(this, filter, strides, pad, dilations, dataFormat);\n};\n//# sourceMappingURL=dilation2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { divNoNan } from '../../ops/div_no_nan';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divNoNan = function (b) {\n this.throwIfDisposed();\n return divNoNan(this, b);\n};\n//# sourceMappingURL=div_no_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { divStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.divStrict = function (x) {\n this.throwIfDisposed();\n return divStrict(this, x);\n};\n//# sourceMappingURL=div_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { div } from '../../ops/div';\nimport { Tensor } from '../../tensor';\nTensor.prototype.div = function (b) {\n this.throwIfDisposed();\n return div(this, b);\n};\n//# sourceMappingURL=div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { dot } from '../../ops/dot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.dot = function (b) {\n this.throwIfDisposed();\n return dot(this, b);\n};\n//# sourceMappingURL=dot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../../ops/elu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.elu = function () {\n this.throwIfDisposed();\n return elu(this);\n};\n//# sourceMappingURL=elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { equalStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.equalStrict = function (x) {\n this.throwIfDisposed();\n return equalStrict(this, x);\n};\n//# sourceMappingURL=equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { equal } from '../../ops/equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.equal = function (b) {\n this.throwIfDisposed();\n return equal(this, b);\n};\n//# sourceMappingURL=equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { erf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.erf = function () {\n this.throwIfDisposed();\n return erf(this);\n};\n//# sourceMappingURL=erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { exp } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.exp = function () {\n this.throwIfDisposed();\n return exp(this);\n};\n//# sourceMappingURL=exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { expandDims } from '../../ops/expand_dims';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expandDims = function (axis) {\n this.throwIfDisposed();\n return expandDims(this, axis);\n};\n//# sourceMappingURL=expand_dims.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { expm1 } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.expm1 = function () {\n this.throwIfDisposed();\n return expm1(this);\n};\n//# sourceMappingURL=expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { fft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.fft = function () {\n this.throwIfDisposed();\n return fft(this);\n};\n//# sourceMappingURL=fft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/** Flatten a Tensor to a 1D array.\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.flatten = function () {\n this.throwIfDisposed();\n return reshape(this, [this.size]);\n};\n//# sourceMappingURL=flatten.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { floor } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floor = function () {\n this.throwIfDisposed();\n return floor(this);\n};\n//# sourceMappingURL=floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { floorDiv } from '../../ops/floorDiv';\nimport { Tensor } from '../../tensor';\nTensor.prototype.floorDiv = function (b) {\n this.throwIfDisposed();\n return floorDiv(this, b);\n};\n//# sourceMappingURL=floorDiv.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { gather } from '../../ops/gather';\nimport { Tensor } from '../../tensor';\nTensor.prototype.gather = function (indices, axis) {\n this.throwIfDisposed();\n return gather(this, indices, axis);\n};\n//# sourceMappingURL=gather.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterEqualStrict = function (x) {\n this.throwIfDisposed();\n return greaterEqualStrict(this, x);\n};\n//# sourceMappingURL=greater_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greaterEqual } from '../../ops/greater_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greaterEqual = function (b) {\n this.throwIfDisposed();\n return greaterEqual(this, b);\n};\n//# sourceMappingURL=greater_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { greaterStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.greaterStrict = function (x) {\n this.throwIfDisposed();\n return greaterStrict(this, x);\n};\n//# sourceMappingURL=greater_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { greater } from '../../ops/greater';\nimport { Tensor } from '../../tensor';\nTensor.prototype.greater = function (b) {\n this.throwIfDisposed();\n return greater(this, b);\n};\n//# sourceMappingURL=greater.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { ifft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.ifft = function () {\n this.throwIfDisposed();\n return ifft(this);\n};\n//# sourceMappingURL=ifft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { irfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.irfft = function () {\n this.throwIfDisposed();\n return irfft(this);\n};\n//# sourceMappingURL=irfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isFinite } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isFinite = function () {\n this.throwIfDisposed();\n return isFinite(this);\n};\n//# sourceMappingURL=is_finite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isInf } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isInf = function () {\n this.throwIfDisposed();\n return isInf(this);\n};\n//# sourceMappingURL=is_inf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { isNaN } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.isNaN = function () {\n this.throwIfDisposed();\n return isNaN(this);\n};\n//# sourceMappingURL=is_nan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { leakyRelu } from '../../ops/leaky_relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.leakyRelu = function (alpha) {\n this.throwIfDisposed();\n return leakyRelu(this, alpha);\n};\n//# sourceMappingURL=leaky_relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.lessEqualStrict = function (x) {\n this.throwIfDisposed();\n return lessEqualStrict(this, x);\n};\n//# sourceMappingURL=less_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { lessEqual } from '../../ops/less_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessEqual = function (b) {\n this.throwIfDisposed();\n return lessEqual(this, b);\n};\n//# sourceMappingURL=less_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { lessStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.lessStrict = function (x) {\n this.throwIfDisposed();\n return lessStrict(this, x);\n};\n//# sourceMappingURL=less_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { less } from '../../ops/less';\nimport { Tensor } from '../../tensor';\nTensor.prototype.less = function (b) {\n this.throwIfDisposed();\n return less(this, b);\n};\n//# sourceMappingURL=less.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { localResponseNormalization } from '../../ops/local_response_normalization';\nimport { Tensor } from '../../tensor';\nTensor.prototype.localResponseNormalization = function (depthRadius, bias, alpha, beta) {\n this.throwIfDisposed();\n return localResponseNormalization(this, depthRadius, bias, alpha, beta);\n};\n//# sourceMappingURL=local_response_normalization.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSigmoid = function () {\n this.throwIfDisposed();\n return logSigmoid(this);\n};\n//# sourceMappingURL=log_sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { logSoftmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSoftmax = function (axis) {\n this.throwIfDisposed();\n return logSoftmax(this, axis);\n};\n//# sourceMappingURL=log_softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logSumExp } from '../../ops/log_sum_exp';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logSumExp = function (axis, keepDims) {\n this.throwIfDisposed();\n return logSumExp(this, axis, keepDims);\n};\n//# sourceMappingURL=log_sum_exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log = function () {\n this.throwIfDisposed();\n return log(this);\n};\n//# sourceMappingURL=log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { log1p } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.log1p = function () {\n this.throwIfDisposed();\n return log1p(this);\n};\n//# sourceMappingURL=log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalAnd } from '../../ops/logical_and';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalAnd = function (b) {\n this.throwIfDisposed();\n return logicalAnd(this, b);\n};\n//# sourceMappingURL=logical_and.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalNot } from '../../ops/logical_not';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalNot = function () {\n this.throwIfDisposed();\n return logicalNot(this);\n};\n//# sourceMappingURL=logical_not.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalOr } from '../../ops/logical_or';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalOr = function (b) {\n this.throwIfDisposed();\n return logicalOr(this, b);\n};\n//# sourceMappingURL=logical_or.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { logicalXor } from '../../ops/logical_xor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.logicalXor = function (b) {\n this.throwIfDisposed();\n return logicalXor(this, b);\n};\n//# sourceMappingURL=logical_xor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { matMul } from '../../ops/mat_mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.matMul = function (b, transposeA, transposeB) {\n this.throwIfDisposed();\n return matMul(this, b, transposeA, transposeB);\n};\n//# sourceMappingURL=mat_mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maxPool } from '../../ops/max_pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maxPool = function (filterSize, strides, pad, dimRoundingMode) {\n this.throwIfDisposed();\n return maxPool(this, filterSize, strides, pad, dimRoundingMode);\n};\n//# sourceMappingURL=max_pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { max } from '../../ops/max';\nimport { Tensor } from '../../tensor';\nTensor.prototype.max = function (axis, keepDims) {\n this.throwIfDisposed();\n return max(this, axis, keepDims);\n};\n//# sourceMappingURL=max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { maximumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.maximumStrict = function (x) {\n this.throwIfDisposed();\n return maximumStrict(this, x);\n};\n//# sourceMappingURL=maximum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { maximum } from '../../ops/maximum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.maximum = function (b) {\n this.throwIfDisposed();\n return maximum(this, b);\n};\n//# sourceMappingURL=maximum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mean } from '../../ops/mean';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mean = function (axis, keepDims) {\n this.throwIfDisposed();\n return mean(this, axis, keepDims);\n};\n//# sourceMappingURL=mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { min } from '../../ops/min';\nimport { Tensor } from '../../tensor';\nTensor.prototype.min = function (axis, keepDims) {\n this.throwIfDisposed();\n return min(this, axis, keepDims);\n};\n//# sourceMappingURL=min.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { minimumStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.minimumStrict = function (x) {\n this.throwIfDisposed();\n return minimumStrict(this, x);\n};\n//# sourceMappingURL=minimum_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { minimum } from '../../ops/minimum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.minimum = function (b) {\n this.throwIfDisposed();\n return minimum(this, b);\n};\n//# sourceMappingURL=minimum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mirrorPad } from '../../ops/mirror_pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mirrorPad = function (paddings, mode) {\n this.throwIfDisposed();\n return mirrorPad(this, paddings, mode);\n};\n//# sourceMappingURL=mirror_pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { modStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.modStrict = function (x) {\n this.throwIfDisposed();\n return modStrict(this, x);\n};\n//# sourceMappingURL=mod_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mod } from '../../ops/mod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mod = function (b) {\n this.throwIfDisposed();\n return mod(this, b);\n};\n//# sourceMappingURL=mod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { mulStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.mulStrict = function (x) {\n this.throwIfDisposed();\n return mulStrict(this, x);\n};\n//# sourceMappingURL=mul_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { mul } from '../../ops/mul';\nimport { Tensor } from '../../tensor';\nTensor.prototype.mul = function (b) {\n this.throwIfDisposed();\n return mul(this, b);\n};\n//# sourceMappingURL=mul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { neg } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.neg = function () {\n this.throwIfDisposed();\n return neg(this);\n};\n//# sourceMappingURL=neg.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { norm } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.norm = function (ord, axis, keepDims) {\n this.throwIfDisposed();\n return norm(this, ord, axis, keepDims);\n};\n//# sourceMappingURL=norm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { notEqualStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.notEqualStrict = function (x) {\n this.throwIfDisposed();\n return notEqualStrict(this, x);\n};\n//# sourceMappingURL=not_equal_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { notEqual } from '../../ops/not_equal';\nimport { Tensor } from '../../tensor';\nTensor.prototype.notEqual = function (b) {\n this.throwIfDisposed();\n return notEqual(this, b);\n};\n//# sourceMappingURL=not_equal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { oneHot } from '../../ops/one_hot';\nimport { Tensor } from '../../tensor';\nTensor.prototype.oneHot = function (depth, onValue = 1, offValue = 0) {\n this.throwIfDisposed();\n return oneHot(this, depth, onValue, offValue);\n};\n//# sourceMappingURL=one_hot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { onesLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.onesLike = function () {\n this.throwIfDisposed();\n return onesLike(this);\n};\n//# sourceMappingURL=ones_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pad } from '../../ops/pad';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pad = function (paddings, constantValue) {\n this.throwIfDisposed();\n return pad(this, paddings, constantValue);\n};\n//# sourceMappingURL=pad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pool } from '../../ops/pool';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pool = function (windowShape, poolingType, padding, dilationRate, strides) {\n this.throwIfDisposed();\n return pool(this, windowShape, poolingType, padding, dilationRate, strides);\n};\n//# sourceMappingURL=pool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { powStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.powStrict = function (exp) {\n this.throwIfDisposed();\n return powStrict(this, exp);\n};\n//# sourceMappingURL=pow_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { pow } from '../../ops/pow';\nimport { Tensor } from '../../tensor';\nTensor.prototype.pow = function (exp) {\n this.throwIfDisposed();\n return pow(this, exp);\n};\n//# sourceMappingURL=pow.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prelu } from '../../ops/prelu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prelu = function (alpha) {\n this.throwIfDisposed();\n return prelu(this, alpha);\n};\n//# sourceMappingURL=prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { prod } from '../../ops/prod';\nimport { Tensor } from '../../tensor';\nTensor.prototype.prod = function (axis, keepDims) {\n this.throwIfDisposed();\n return prod(this, axis, keepDims);\n};\n//# sourceMappingURL=prod.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { reciprocal } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reciprocal = function () {\n this.throwIfDisposed();\n return reciprocal(this);\n};\n//# sourceMappingURL=reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu } from '../../ops/relu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu = function () {\n this.throwIfDisposed();\n return relu(this);\n};\n//# sourceMappingURL=relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { relu6 } from '../../ops/relu6';\nimport { Tensor } from '../../tensor';\nTensor.prototype.relu6 = function () {\n this.throwIfDisposed();\n return relu6(this);\n};\n//# sourceMappingURL=relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\n/**\n * Reshapes the tensor into the shape of the provided tensor.\n *\n * @param x The tensor of required shape.\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.reshapeAs = function (x) {\n this.throwIfDisposed();\n return reshape(this, x.shape);\n};\n//# sourceMappingURL=reshape_as.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reshape } from '../../ops/reshape';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reshape = function (shape) {\n this.throwIfDisposed();\n return reshape(this, shape);\n};\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeBilinear } from '../../ops/image/resize_bilinear';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeBilinear = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeBilinear(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_bilinear.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { resizeNearestNeighbor } from '../../ops/image/resize_nearest_neighbor';\nimport { Tensor } from '../../tensor';\nTensor.prototype.resizeNearestNeighbor = function (newShape2D, alignCorners) {\n this.throwIfDisposed();\n return resizeNearestNeighbor(this, newShape2D, alignCorners);\n};\n//# sourceMappingURL=resize_nearest_neighbor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { reverse } from '../../ops/reverse';\nimport { Tensor } from '../../tensor';\nTensor.prototype.reverse = function (axis) {\n this.throwIfDisposed();\n return reverse(this, axis);\n};\n//# sourceMappingURL=reverse.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rfft } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rfft = function () {\n this.throwIfDisposed();\n return rfft(this);\n};\n//# sourceMappingURL=rfft.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { round } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.round = function () {\n this.throwIfDisposed();\n return round(this);\n};\n//# sourceMappingURL=round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { rsqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.rsqrt = function () {\n this.throwIfDisposed();\n return rsqrt(this);\n};\n//# sourceMappingURL=rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { selu } from '../../ops/selu';\nimport { Tensor } from '../../tensor';\nTensor.prototype.selu = function () {\n this.throwIfDisposed();\n return selu(this);\n};\n//# sourceMappingURL=selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { separableConv2d } from '../../ops/separable_conv2d';\nimport { Tensor } from '../../tensor';\nTensor.prototype.separableConv2d = function (depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat) {\n this.throwIfDisposed();\n return separableConv2d(this, depthwiseFilter, pointwiseFilter, strides, pad, dilation, dataFormat);\n};\n//# sourceMappingURL=separable_conv2d.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sigmoid } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sigmoid = function () {\n this.throwIfDisposed();\n return sigmoid(this);\n};\n//# sourceMappingURL=sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sign } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sign = function () {\n this.throwIfDisposed();\n return sign(this);\n};\n//# sourceMappingURL=sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sin } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sin = function () {\n this.throwIfDisposed();\n return sin(this);\n};\n//# sourceMappingURL=sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sinh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sinh = function () {\n this.throwIfDisposed();\n return sinh(this);\n};\n//# sourceMappingURL=sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { slice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.slice = function (begin, size) {\n this.throwIfDisposed();\n return slice(this, begin, size);\n};\n//# sourceMappingURL=slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softmax } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softmax = function (dim) {\n this.throwIfDisposed();\n return softmax(this, dim);\n};\n//# sourceMappingURL=softmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { softplus } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.softplus = function () {\n this.throwIfDisposed();\n return softplus(this);\n};\n//# sourceMappingURL=softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { spaceToBatchND } from '../../ops/space_to_batch_nd';\nimport { Tensor } from '../../tensor';\nTensor.prototype.spaceToBatchND = function (blockShape, paddings) {\n this.throwIfDisposed();\n return spaceToBatchND(this, blockShape, paddings);\n};\n//# sourceMappingURL=space_to_batch_nd.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { split } from '../../ops/split';\nimport { Tensor } from '../../tensor';\nTensor.prototype.split = function (numOrSizeSplits, axis) {\n this.throwIfDisposed();\n return split(this, numOrSizeSplits, axis);\n};\n//# sourceMappingURL=split.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { sqrt } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sqrt = function () {\n this.throwIfDisposed();\n return sqrt(this);\n};\n//# sourceMappingURL=sqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { square } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.square = function () {\n this.throwIfDisposed();\n return square(this);\n};\n//# sourceMappingURL=square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squaredDifference } from '../../ops/squared_difference';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squaredDifference = function (b) {\n this.throwIfDisposed();\n return squaredDifference(this, b);\n};\n//# sourceMappingURL=squared_difference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { squaredDifferenceStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.squaredDifferenceStrict = function (x) {\n this.throwIfDisposed();\n return squaredDifferenceStrict(this, x);\n};\n//# sourceMappingURL=squared_difference_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { squeeze } from '../../ops/squeeze';\nimport { Tensor } from '../../tensor';\nTensor.prototype.squeeze = function (axis) {\n this.throwIfDisposed();\n return squeeze(this, axis);\n};\n//# sourceMappingURL=squeeze.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { stack } from '../../ops/stack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stack = function (x, axis) {\n this.throwIfDisposed();\n const tensorsToBeStacked = x instanceof Tensor ? [this, x] : [this, ...x];\n return stack(tensorsToBeStacked, axis);\n};\n//# sourceMappingURL=stack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { step } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.step = function (alpha) {\n this.throwIfDisposed();\n return step(this, alpha);\n};\n//# sourceMappingURL=step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { stridedSlice } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.stridedSlice = function (begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask) {\n this.throwIfDisposed();\n return stridedSlice(this, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask);\n};\n//# sourceMappingURL=strided_slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { subStrict } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/**\n * @deprecated strict variants of ops have been deprecated\n */\nTensor.prototype.subStrict = function (x) {\n this.throwIfDisposed();\n return subStrict(this, x);\n};\n//# sourceMappingURL=sub_strict.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sub } from '../../ops/sub';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sub = function (b) {\n this.throwIfDisposed();\n return sub(this, b);\n};\n//# sourceMappingURL=sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { sum } from '../../ops/sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.sum = function (axis, keepDims) {\n this.throwIfDisposed();\n return sum(this, axis, keepDims);\n};\n//# sourceMappingURL=sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tan } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tan = function () {\n this.throwIfDisposed();\n return tan(this);\n};\n//# sourceMappingURL=tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { tanh } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tanh = function () {\n this.throwIfDisposed();\n return tanh(this);\n};\n//# sourceMappingURL=tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tile } from '../../ops/tile';\nimport { Tensor } from '../../tensor';\nTensor.prototype.tile = function (reps) {\n this.throwIfDisposed();\n return tile(this, reps);\n};\n//# sourceMappingURL=tile.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `bool`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toBool = function () {\n this.throwIfDisposed();\n return cast(this, 'bool');\n};\n//# sourceMappingURL=to_bool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `float32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toFloat = function () {\n this.throwIfDisposed();\n return cast(this, 'float32');\n};\n//# sourceMappingURL=to_float.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { cast } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\n/** Casts the array to type `int32`\n *\n * @doc {heading: 'Tensors', subheading: 'Classes'}\n */\nTensor.prototype.toInt = function () {\n this.throwIfDisposed();\n return cast(this, 'int32');\n};\n//# sourceMappingURL=to_int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { topk } from '../../ops/topk';\nimport { Tensor } from '../../tensor';\nTensor.prototype.topk = function (k, sorted) {\n this.throwIfDisposed();\n return topk(this, k, sorted);\n};\n//# sourceMappingURL=topk.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { transpose } from '../../ops/transpose';\nimport { Tensor } from '../../tensor';\nTensor.prototype.transpose = function (perm) {\n this.throwIfDisposed();\n return transpose(this, perm);\n};\n//# sourceMappingURL=transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unique } from '../../ops/unique';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unique = function (axis) {\n this.throwIfDisposed();\n return unique(this, axis);\n};\n//# sourceMappingURL=unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unsortedSegmentSum } from '../../ops/unsorted_segment_sum';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unsortedSegmentSum = function (segmentIds, numSegments) {\n this.throwIfDisposed();\n return unsortedSegmentSum(this, segmentIds, numSegments);\n};\n//# sourceMappingURL=unsorted_segment_sum.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { unstack } from '../../ops/unstack';\nimport { Tensor } from '../../tensor';\nTensor.prototype.unstack = function (axis) {\n this.throwIfDisposed();\n return unstack(this, axis);\n};\n//# sourceMappingURL=unstack.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { where } from '../../ops/where';\nimport { Tensor } from '../../tensor';\nTensor.prototype.where = function (condition, x) {\n this.throwIfDisposed();\n return where(condition, this, x);\n};\n//# sourceMappingURL=where.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// TODO update import path once op is modularized.\nimport { zerosLike } from '../../ops/ops';\nimport { Tensor } from '../../tensor';\nTensor.prototype.zerosLike = function () {\n this.throwIfDisposed();\n return zerosLike(this);\n};\n//# sourceMappingURL=zeros_like.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport './abs';\nimport './acos';\nimport './acosh';\nimport './add_strict';\nimport './add';\nimport './all';\nimport './any';\nimport './arg_max';\nimport './arg_min';\nimport './as_scalar';\nimport './as_type';\nimport './as1d';\nimport './as2d';\nimport './as3d';\nimport './as4d';\nimport './as5d';\nimport './asin';\nimport './asinh';\nimport './atan';\nimport './atan2';\nimport './atanh';\nimport './avg_pool';\nimport './batch_to_space_nd';\nimport './batchnorm';\nimport './broadcast_to';\nimport './cast';\nimport './ceil';\nimport './clip_by_value';\nimport './concat';\nimport './conv1d';\nimport './conv2d_transpose';\nimport './conv2d';\nimport './cos';\nimport './cosh';\nimport './cumsum';\nimport './depth_to_space';\nimport './depthwise_conv2D_deprecated';\nimport './depthwise_conv2d';\nimport './dilation2d';\nimport './div_no_nan';\nimport './div_strict';\nimport './div';\nimport './dot';\nimport './elu';\nimport './equal_strict';\nimport './equal';\nimport './erf';\nimport './exp';\nimport './expand_dims';\nimport './expm1';\nimport './fft';\nimport './flatten';\nimport './floor';\nimport './floorDiv';\nimport './gather';\nimport './greater_equal_strict';\nimport './greater_equal';\nimport './greater_strict';\nimport './greater';\nimport './ifft';\nimport './irfft';\nimport './is_finite';\nimport './is_inf';\nimport './is_nan';\nimport './leaky_relu';\nimport './less_equal_strict';\nimport './less_equal';\nimport './less_strict';\nimport './less';\nimport './local_response_normalization';\nimport './log_sigmoid';\nimport './log_softmax';\nimport './log_sum_exp';\nimport './log';\nimport './log1p';\nimport './logical_and';\nimport './logical_not';\nimport './logical_or';\nimport './logical_xor';\nimport './mat_mul';\nimport './max_pool';\nimport './max';\nimport './maximum_strict';\nimport './maximum';\nimport './mean';\nimport './min';\nimport './minimum_strict';\nimport './minimum';\nimport './mirror_pad';\nimport './mod_strict';\nimport './mod';\nimport './mul_strict';\nimport './mul';\nimport './neg';\nimport './norm';\nimport './not_equal_strict';\nimport './not_equal';\nimport './one_hot';\nimport './ones_like';\nimport './pad';\nimport './pool';\nimport './pow_strict';\nimport './pow';\nimport './prelu';\nimport './prod';\nimport './reciprocal';\nimport './relu';\nimport './relu6';\nimport './reshape_as';\nimport './reshape';\nimport './resize_bilinear';\nimport './resize_nearest_neighbor';\nimport './reverse';\nimport './rfft';\nimport './round';\nimport './rsqrt';\nimport './selu';\nimport './separable_conv2d';\nimport './sigmoid';\nimport './sign';\nimport './sin';\nimport './sinh';\nimport './slice';\nimport './softmax';\nimport './softplus';\nimport './space_to_batch_nd';\nimport './split';\nimport './sqrt';\nimport './square';\nimport './squared_difference';\nimport './squared_difference_strict';\nimport './squeeze';\nimport './stack';\nimport './step';\nimport './strided_slice';\nimport './sub_strict';\nimport './sub';\nimport './sum';\nimport './tan';\nimport './tanh';\nimport './tile';\nimport './to_bool';\nimport './to_float';\nimport './to_int';\nimport './topk';\nimport './transpose';\nimport './unique';\nimport './unsorted_segment_sum';\nimport './unstack';\nimport './where';\nimport './zeros_like';\n//# sourceMappingURL=register_all_chained_ops.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Required side effectful code.\nimport './base_side_effects';\n// All exports from this package should be in base.\nexport * from './base';\n// Register all the gradients.\nimport './register_all_gradients';\n// Import all op chainers and add type info to Tensor.\nimport './public/chained_ops/register_all_chained_ops';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { backend } from '@tensorflow/tfjs-core';\nlet _epsilon;\n/**\n * Returns the value of the fuzz factor used in numeric expressions.\n */\nexport function epsilon() {\n if (_epsilon == null) {\n _epsilon = backend().epsilon();\n }\n return _epsilon;\n}\n/**\n * Sets the value of the fuzz factor used in numeric expressions.\n * @param e New value of epsilon.\n */\nexport function setEpsilon(e) {\n _epsilon = e;\n}\n/**\n * Returns the default image data format convention.\n */\nexport function imageDataFormat() {\n return 'channelsLast';\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Explicit error types.\n *\n * See the following link for more information about why the code includes\n * calls to setPrototypeOf:\n *\n * https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work\n */\n// tslint:enable\n/**\n * Equivalent of Python's AttributeError.\n */\nexport class AttributeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AttributeError.prototype);\n }\n}\n/**\n * Equivalent of Python's RuntimeError.\n */\nexport class RuntimeError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, RuntimeError.prototype);\n }\n}\n/**\n * Equivalent of Python's ValueError.\n */\nexport class ValueError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, ValueError.prototype);\n }\n}\n/**\n * Equivalent of Python's NotImplementedError.\n */\nexport class NotImplementedError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, NotImplementedError.prototype);\n }\n}\n/**\n * Equivalent of Python's AssertionError.\n */\nexport class AssertionError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AssertionError.prototype);\n }\n}\n/**\n * Equivalent of Python's IndexError.\n */\nexport class IndexError extends Error {\n constructor(message) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, IndexError.prototype);\n }\n}\n//# sourceMappingURL=errors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: utils/generic_utils.py */\nimport { util } from '@tensorflow/tfjs-core';\nimport { AssertionError, ValueError } from '../errors';\n// tslint:enable\n/**\n * If `value` is an Array, equivalent to Python's `value * numValues`.\n * If `value` is not an Array, equivalent to Python's `[value] * numValues`\n */\n// tslint:disable-next-line:no-any\nexport function pyListRepeat(value, numValues) {\n if (Array.isArray(value)) {\n // tslint:disable-next-line:no-any\n let newArray = [];\n for (let i = 0; i < numValues; i++) {\n newArray = newArray.concat(value);\n }\n return newArray;\n }\n else {\n const newArray = new Array(numValues);\n newArray.fill(value);\n return newArray;\n }\n}\nexport function assert(val, message) {\n if (!val) {\n throw new AssertionError(message);\n }\n}\n/**\n * Count the number of elements of the `array` that are equal to `reference`.\n */\nexport function count(array, refernce) {\n let counter = 0;\n for (const item of array) {\n if (item === refernce) {\n counter++;\n }\n }\n return counter;\n}\n/**\n * If an array is of length 1, just return the first element. Otherwise, return\n * the full array.\n * @param tensors\n */\nexport function singletonOrArray(xs) {\n if (xs.length === 1) {\n return xs[0];\n }\n return xs;\n}\n/**\n * Normalizes a list/tensor into a list.\n *\n * If a tensor is passed, we return\n * a list of size 1 containing the tensor.\n *\n * @param x target object to be normalized.\n */\n// tslint:disable-next-line:no-any\nexport function toList(x) {\n if (Array.isArray(x)) {\n return x;\n }\n return [x];\n}\n/**\n * Generate a UID for a list\n */\n// tslint:disable-next-line:no-any\nexport function objectListUid(objs) {\n const objectList = toList(objs);\n let retVal = '';\n for (const obj of objectList) {\n if (obj.id == null) {\n throw new ValueError(`Object ${obj} passed to objectListUid without an id`);\n }\n if (retVal !== '') {\n retVal = retVal + ', ';\n }\n retVal = `${retVal}${Math.abs(obj.id)}`;\n }\n return retVal;\n}\n/**\n * Converts string to snake-case.\n * @param name\n */\nexport function toSnakeCase(name) {\n const intermediate = name.replace(/(.)([A-Z][a-z0-9]+)/g, '$1_$2');\n const insecure = intermediate.replace(/([a-z])([A-Z])/g, '$1_$2').toLowerCase();\n /*\n If the class is private the name starts with \"_\" which is not secure\n for creating scopes. We prefix the name with \"private\" in this case.\n */\n if (insecure[0] !== '_') {\n return insecure;\n }\n return 'private' + insecure;\n}\nexport function toCamelCase(identifier) {\n // quick return for empty string or single character strings\n if (identifier.length <= 1) {\n return identifier;\n }\n // Check for the underscore indicating snake_case\n if (identifier.indexOf('_') === -1) {\n return identifier;\n }\n return identifier.replace(/[_]+(\\w|$)/g, (m, p1) => p1.toUpperCase());\n}\n// tslint:disable-next-line:no-any\nlet _GLOBAL_CUSTOM_OBJECTS = {};\nexport function serializeKerasObject(instance) {\n if (instance === null || instance === undefined) {\n return null;\n }\n const dict = {};\n dict['className'] = instance.getClassName();\n dict['config'] = instance.getConfig();\n return dict;\n}\n/**\n * Replace ndarray-style scalar objects in serialization objects with numbers.\n *\n * Background: In some versions of tf.keras, certain scalar values in the HDF5\n * model save file can be serialized as: `{'type': 'ndarray', 'value': num}`,\n * where in `num` is a plain number. This method converts such serialization\n * to a `number`.\n *\n * @param config The keras-format serialization object to be processed\n * (in place).\n */\nfunction convertNDArrayScalarsInConfig(config) {\n if (config == null || typeof config !== 'object') {\n return;\n }\n else if (Array.isArray(config)) {\n config.forEach(configItem => convertNDArrayScalarsInConfig(configItem));\n }\n else {\n const fields = Object.keys(config);\n for (const field of fields) {\n const value = config[field];\n if (value != null && typeof value === 'object') {\n if (!Array.isArray(value) && value['type'] === 'ndarray' &&\n typeof value['value'] === 'number') {\n config[field] = value['value'];\n }\n else {\n convertNDArrayScalarsInConfig(value);\n }\n }\n }\n }\n}\n/**\n * Deserialize a saved Keras Object\n * @param identifier either a string ID or a saved Keras dictionary\n * @param moduleObjects a list of Python class names to object constructors\n * @param customObjects a list of Python class names to object constructors\n * @param printableModuleName debug text for the object being reconstituted\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns a TensorFlow.js Layers object\n */\n// tslint:disable:no-any\nexport function deserializeKerasObject(identifier, moduleObjects = {}, customObjects = {}, printableModuleName = 'object', fastWeightInit = false) {\n // tslint:enable\n if (typeof identifier === 'string') {\n const functionName = identifier;\n let fn;\n if (functionName in customObjects) {\n fn = customObjects[functionName];\n }\n else if (functionName in _GLOBAL_CUSTOM_OBJECTS) {\n fn = _GLOBAL_CUSTOM_OBJECTS[functionName];\n }\n else {\n fn = moduleObjects[functionName];\n if (fn == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${identifier}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n }\n return fn;\n }\n else {\n // In this case we are dealing with a Keras config dictionary.\n const config = identifier;\n if (config['className'] == null || config['config'] == null) {\n throw new ValueError(`${printableModuleName}: Improper config format: ` +\n `${JSON.stringify(config)}.\\n` +\n `'className' and 'config' must set.`);\n }\n const className = config['className'];\n let cls, fromConfig;\n if (className in customObjects) {\n [cls, fromConfig] = customObjects[className];\n }\n else if (className in _GLOBAL_CUSTOM_OBJECTS) {\n [cls, fromConfig] = _GLOBAL_CUSTOM_OBJECTS['className'];\n }\n else if (className in moduleObjects) {\n [cls, fromConfig] = moduleObjects[className];\n }\n if (cls == null) {\n throw new ValueError(`Unknown ${printableModuleName}: ${className}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n if (fromConfig != null) {\n // Porting notes: Instead of checking to see whether fromConfig accepts\n // customObjects, we create a customObjects dictionary and tack it on to\n // config['config'] as config['config'].customObjects. Objects can use it,\n // if they want.\n // tslint:disable-next-line:no-any\n const customObjectsCombined = {};\n for (const key of Object.keys(_GLOBAL_CUSTOM_OBJECTS)) {\n customObjectsCombined[key] = _GLOBAL_CUSTOM_OBJECTS[key];\n }\n for (const key of Object.keys(customObjects)) {\n customObjectsCombined[key] = customObjects[key];\n }\n // Add the customObjects to config\n const nestedConfig = config['config'];\n nestedConfig['customObjects'] = customObjectsCombined;\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n convertNDArrayScalarsInConfig(config['config']);\n const returnObj = fromConfig(cls, config['config'], customObjects, fastWeightInit);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n else {\n // Then `cls` may be a function returning a class.\n // In this case by convention `config` holds\n // the kwargs of the function.\n const backupCustomObjects = Object.assign({}, _GLOBAL_CUSTOM_OBJECTS);\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n // In python this is **config['config'], for tfjs-layers we require\n // classes that use this fall-through construction method to take\n // a config interface that mimics the expansion of named parameters.\n const returnObj = new cls(config['config']);\n _GLOBAL_CUSTOM_OBJECTS = Object.assign({}, backupCustomObjects);\n return returnObj;\n }\n }\n}\n/**\n * Compares two numbers for sorting.\n * @param a\n * @param b\n */\nexport function numberCompare(a, b) {\n return (a < b) ? -1 : ((a > b) ? 1 : 0);\n}\n/**\n * Comparison of two numbers for reverse sorting.\n * @param a\n * @param b\n */\nexport function reverseNumberCompare(a, b) {\n return -1 * numberCompare(a, b);\n}\n/**\n * Convert a string into the corresponding DType.\n * @param dtype\n * @returns An instance of DType.\n */\nexport function stringToDType(dtype) {\n switch (dtype) {\n case 'float32':\n return 'float32';\n default:\n throw new ValueError(`Invalid dtype: ${dtype}`);\n }\n}\n/**\n * Test the element-by-element equality of two Arrays of strings.\n * @param xs First array of strings.\n * @param ys Second array of strings.\n * @returns Wether the two arrays are all equal, element by element.\n */\nexport function stringsEqual(xs, ys) {\n if (xs == null || ys == null) {\n return xs === ys;\n }\n if (xs.length !== ys.length) {\n return false;\n }\n for (let i = 0; i < xs.length; ++i) {\n if (xs[i] !== ys[i]) {\n return false;\n }\n }\n return true;\n}\n/**\n * Get the unique elements of an array.\n * @param xs Array.\n * @returns An Array consisting of the unique elements in `xs`.\n */\nexport function unique(xs) {\n if (xs == null) {\n return xs;\n }\n const out = [];\n // TODO(cais): Maybe improve performance by sorting.\n for (const x of xs) {\n if (out.indexOf(x) === -1) {\n out.push(x);\n }\n }\n return out;\n}\n/**\n * Determine if an Object is empty (i.e., does not have own properties).\n * @param obj Object\n * @returns Whether the Object is empty.\n * @throws ValueError: If object is `null` or `undefined`.\n */\nexport function isObjectEmpty(obj) {\n if (obj == null) {\n throw new ValueError(`Invalid value in obj: ${JSON.stringify(obj)}`);\n }\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n return false;\n }\n }\n return true;\n}\n/**\n * Helper function used to build type union/enum run-time checkers.\n * @param values The list of allowed values.\n * @param label A string name for the type\n * @param value The value to test.\n * @throws ValueError: If the value is not in values nor `undefined`/`null`.\n */\nexport function checkStringTypeUnionValue(values, label, value) {\n if (value == null) {\n return;\n }\n if (values.indexOf(value) < 0) {\n throw new ValueError(`${value} is not a valid ${label}. Valid values are ${values} or null/undefined.`);\n }\n}\n/**\n * Helper function for verifying the types of inputs.\n *\n * Ensures that the elements of `x` are all of type `expectedType`.\n * Also verifies that the length of `x` is within bounds.\n *\n * @param x Object to test.\n * @param expectedType The string expected type of all of the elements in the\n * Array.\n * @param minLength Return false if x.length is less than this.\n * @param maxLength Return false if x.length is greater than this.\n * @returns true if and only if `x` is an `Array` with\n * length >= `minLength` and <= `maxLength`.\n */\n// tslint:disable:no-any\nexport function checkArrayTypeAndLength(x, expectedType, minLength = 0, maxLength = Infinity) {\n assert(minLength >= 0);\n assert(maxLength >= minLength);\n return (Array.isArray(x) && x.length >= minLength && x.length <= maxLength &&\n x.every(e => typeof e === expectedType));\n}\n// tslint:enable:no-any\n/**\n * Assert that a value or an array of value are positive integer.\n *\n * @param value The value being asserted on. May be a single number or an array\n * of numbers.\n * @param name Name of the value, used to make the error message.\n */\nexport function assertPositiveInteger(value, name) {\n if (Array.isArray(value)) {\n util.assert(value.length > 0, () => `${name} is unexpectedly an empty array.`);\n value.forEach((v, i) => assertPositiveInteger(v, `element ${i + 1} of ${name}`));\n }\n else {\n util.assert(Number.isInteger(value) && value > 0, () => `Expected ${name} to be a positive integer, but got ` +\n `${formatAsFriendlyString(value)}.`);\n }\n}\n/**\n * Format a value into a display-friendly, human-readable fashion.\n *\n * - `null` is formatted as `'null'`\n * - Strings are formated with flanking pair of quotes.\n * - Arrays are formatted with flanking pair of square brackets.\n *\n * @param value The value to display.\n * @return Formatted string.\n */\n// tslint:disable-next-line:no-any\nexport function formatAsFriendlyString(value) {\n if (value === null) {\n return 'null';\n }\n else if (Array.isArray(value)) {\n return '[' + value.map(v => formatAsFriendlyString(v)).join(',') + ']';\n }\n else if (typeof value === 'string') {\n return `\"${value}\"`;\n }\n else {\n return `${value}`;\n }\n}\n/**\n * Returns a function `f2` (decorator) which wraps the original function\n * `f`. `f2` guarantees that `f` can be called at most once\n * every `waitMs` ms. If `f2` is called more often, it will return\n * the last returned result of `f`.\n *\n * @param f The original function `f` to wrap.\n * @param waitMs The time between two consecutive calls to `f` in ms.\n */\nexport function debounce(f, waitMs) {\n let lastTime = util.now();\n let lastResult;\n const f2 = (...args) => {\n const now = util.now();\n if (now - lastTime < waitMs) {\n return lastResult;\n }\n lastTime = now;\n lastResult = f(...args);\n return lastResult;\n };\n return f2;\n}\n/**\n * Returns the fusable activation given a layers identifier.\n *\n * @param activationName The layers identifier string.\n * @return The name of the fusable activation.\n */\nexport function mapActivationToFusedKernel(activationName) {\n if (activationName === 'relu') {\n return 'relu';\n }\n if (activationName === 'linear') {\n return 'linear';\n }\n if (activationName === 'elu') {\n return 'elu';\n }\n return null;\n}\n/**\n * Returns the cartesian product of sets of values.\n * This works the same as itertools.product in Python.\n *\n * Example:\n *\n * filters = [128, 256, 512]\n * paddings = ['same', 'valid']\n *\n * product = [ [128, 'same'], [128, 'valid'], [256, 'same'], [256, 'valid'],\n * [512, 'same'], [512, 'valid']]\n *\n * @param arrayOfValues List/array of values.\n * @return The cartesian product.\n */\nexport function getCartesianProductOfValues(...arrayOfValues) {\n assert(arrayOfValues.length > 0, 'arrayOfValues is empty');\n for (const values of arrayOfValues) {\n assert(Array.isArray(values), 'one of the values is not an array');\n assert(values.length > 0, 'one of the values is empty');\n }\n return arrayOfValues.reduce((products, values) => {\n if (products.length === 0) {\n return values.map(value => [value]);\n }\n return values\n .map(value => {\n return products.map((prevValue) => [...prevValue, value]);\n })\n .reduce((flattenedProduct, unflattenedProduct) => {\n return flattenedProduct.concat(unflattenedProduct);\n }, []);\n }, []);\n}\n//# sourceMappingURL=generic_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/contraints.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\n/**\n * Helper function used by many of the Constraints to find the L2Norms.\n */\nfunction calcL2Norms(w, axis) {\n return tidy(() => tfc.sqrt(tfc.sum(tfc.mul(w, w), axis, true)));\n}\n/**\n * Base class for functions that impose constraints on weight values\n *\n * @doc {\n * heading: 'Constraints',\n * subheading: 'Classes',\n * namespace: 'constraints'\n * }\n */\nexport class Constraint extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\nexport class MaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMaxValue = 2;\n this.defaultAxis = 0;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.clipByValue(norms, 0, this.maxValue);\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return { maxValue: this.maxValue, axis: this.axis };\n }\n}\n/** @nocollapse */\nMaxNorm.className = 'MaxNorm';\nserialization.registerClass(MaxNorm);\nexport class UnitNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultAxis = 0;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => tfc.div(w, tfc.add(epsilon(), calcL2Norms(w, this.axis))));\n }\n getConfig() {\n return { axis: this.axis };\n }\n}\n/** @nocollapse */\nUnitNorm.className = 'UnitNorm';\nserialization.registerClass(UnitNorm);\nexport class NonNeg extends Constraint {\n apply(w) {\n return tfc.relu(w);\n }\n}\n/** @nocollapse */\nNonNeg.className = 'NonNeg';\nserialization.registerClass(NonNeg);\nexport class MinMaxNorm extends Constraint {\n constructor(args) {\n super();\n this.defaultMinValue = 0.0;\n this.defaultMaxValue = 1.0;\n this.defaultRate = 1.0;\n this.defaultAxis = 0;\n this.minValue =\n args.minValue != null ? args.minValue : this.defaultMinValue;\n this.maxValue =\n args.maxValue != null ? args.maxValue : this.defaultMaxValue;\n this.rate = args.rate != null ? args.rate : this.defaultRate;\n this.axis = args.axis != null ? args.axis : this.defaultAxis;\n }\n apply(w) {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.add(tfc.mul(this.rate, tfc.clipByValue(norms, this.minValue, this.maxValue)), tfc.mul(1.0 - this.rate, norms));\n return tfc.mul(w, tfc.div(desired, tfc.add(epsilon(), norms)));\n });\n }\n getConfig() {\n return {\n minValue: this.minValue,\n maxValue: this.maxValue,\n rate: this.rate,\n axis: this.axis\n };\n }\n}\n/** @nocollapse */\nMinMaxNorm.className = 'MinMaxNorm';\nserialization.registerClass(MinMaxNorm);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'maxNorm': 'MaxNorm',\n 'minMaxNorm': 'MinMaxNorm',\n 'nonNeg': 'NonNeg',\n 'unitNorm': 'UnitNorm'\n};\nexport function serializeConstraint(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeConstraint(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'constraint');\n}\nexport function getConstraint(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeConstraint(config);\n }\n else if (identifier instanceof Constraint) {\n return identifier;\n }\n else {\n return deserializeConstraint(identifier);\n }\n}\n//# sourceMappingURL=constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { MaxNorm, MinMaxNorm, NonNeg, UnitNorm } from './constraints';\n/**\n * MaxNorm weight constraint.\n *\n * Constrains the weights incident to each hidden unit\n * to have a norm less than or equal to a desired value.\n *\n * References\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting\n * Srivastava, Hinton, et al.\n * 2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Constraints',namespace: 'constraints'}\n */\nexport function maxNorm(args) {\n return new MaxNorm(args);\n}\n/**\n * Constrains the weights incident to each hidden unit to have unit norm.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function unitNorm(args) {\n return new UnitNorm(args);\n}\n/**\n * Constains the weight to be non-negative.\n *\n * @doc {heading: 'Constraints', namespace: 'constraints'}\n */\nexport function nonNeg() {\n return new NonNeg();\n}\n/** @doc {heading: 'Constraints', namespace: 'constraints'} */\nexport function minMaxNorm(config) {\n return new MinMaxNorm(config);\n}\n//# sourceMappingURL=exports_constraints.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_DATA_FORMAT_VALUES = ['channelsFirst', 'channelsLast'];\nexport const VALID_PADDING_MODE_VALUES = ['valid', 'same', 'causal'];\nexport const VALID_POOL_MODE_VALUES = ['max', 'avg'];\nexport const VALID_BIDIRECTIONAL_MERGE_MODES = ['sum', 'mul', 'concat', 'ave'];\nexport const VALID_SAMPLE_WEIGHT_MODES = ['temporal'];\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Common functions for TensorFlow.js Layers.\n */\nimport { VALID_DATA_FORMAT_VALUES, VALID_PADDING_MODE_VALUES, VALID_POOL_MODE_VALUES } from './keras_format/common';\nimport { checkStringTypeUnionValue } from './utils/generic_utils';\n// A map from the requested scoped name of a Tensor to the number of Tensors\n// wanting that name so far. This allows enforcing name uniqueness by appending\n// an incrementing index, e.g. scope/name, scope/name_1, scope/name_2, etc.\nconst nameMap = new Map();\nexport function checkDataFormat(value) {\n checkStringTypeUnionValue(VALID_DATA_FORMAT_VALUES, 'DataFormat', value);\n}\nexport function checkPaddingMode(value) {\n checkStringTypeUnionValue(VALID_PADDING_MODE_VALUES, 'PaddingMode', value);\n}\nexport function checkPoolMode(value) {\n checkStringTypeUnionValue(VALID_POOL_MODE_VALUES, 'PoolMode', value);\n}\nconst _nameScopeStack = [];\nconst _nameScopeDivider = '/';\n/**\n * Enter namescope, which can be nested.\n */\nexport function nameScope(name, fn) {\n _nameScopeStack.push(name);\n try {\n const val = fn();\n _nameScopeStack.pop();\n return val;\n }\n catch (e) {\n _nameScopeStack.pop();\n throw e;\n }\n}\n/**\n * Get the current namescope as a flat, concatenated string.\n */\nfunction currentNameScopePrefix() {\n if (_nameScopeStack.length === 0) {\n return '';\n }\n else {\n return _nameScopeStack.join(_nameScopeDivider) + _nameScopeDivider;\n }\n}\n/**\n * Get the name a Tensor (or Variable) would have if not uniqueified.\n * @param tensorName\n * @return Scoped name string.\n */\nexport function getScopedTensorName(tensorName) {\n if (!isValidTensorName(tensorName)) {\n throw new Error('Not a valid tensor name: \\'' + tensorName + '\\'');\n }\n return currentNameScopePrefix() + tensorName;\n}\n/**\n * Get unique names for Tensors and Variables.\n * @param scopedName The fully-qualified name of the Tensor, i.e. as produced by\n * `getScopedTensorName()`.\n * @return A unique version of the given fully scoped name.\n * If this is the first time that the scoped name is seen in this session,\n * then the given `scopedName` is returned unaltered. If the same name is\n * seen again (producing a collision), an incrementing suffix is added to the\n * end of the name, so it takes the form 'scope/name_1', 'scope/name_2', etc.\n */\nexport function getUniqueTensorName(scopedName) {\n if (!isValidTensorName(scopedName)) {\n throw new Error('Not a valid tensor name: \\'' + scopedName + '\\'');\n }\n if (!nameMap.has(scopedName)) {\n nameMap.set(scopedName, 0);\n }\n const index = nameMap.get(scopedName);\n nameMap.set(scopedName, nameMap.get(scopedName) + 1);\n if (index > 0) {\n const result = `${scopedName}_${index}`;\n // Mark the composed name as used in case someone wants\n // to call getUniqueTensorName(\"name_1\").\n nameMap.set(result, 1);\n return result;\n }\n else {\n return scopedName;\n }\n}\nconst tensorNameRegex = new RegExp(/^[A-Za-z0-9][-A-Za-z0-9\\._\\/]*$/);\n/**\n * Determine whether a string is a valid tensor name.\n * @param name\n * @returns A Boolean indicating whether `name` is a valid tensor name.\n */\nexport function isValidTensorName(name) {\n return !!name.match(tensorNameRegex);\n}\n//# sourceMappingURL=common.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Math utility functions.\n *\n * This file contains some frequently used math function that operates on\n * number[] or Float32Array and return a number. Many of these functions are\n * not-so-thick wrappers around TF.js Core functions. But they offer the\n * convenience of\n * 1) not having to convert the inputs into Tensors,\n * 2) not having to convert the returned Tensors to numbers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar, tensor1d } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\n/**\n * Determine if a number is an integer.\n */\nexport function isInteger(x) {\n return x === parseInt(x.toString(), 10);\n}\n/**\n * Calculate the product of an array of numbers.\n * @param array The array to calculate the product over.\n * @param begin Beginning index, inclusive.\n * @param end Ending index, exclusive.\n * @return The product.\n */\nexport function arrayProd(array, begin, end) {\n if (begin == null) {\n begin = 0;\n }\n if (end == null) {\n end = array.length;\n }\n let prod = 1;\n for (let i = begin; i < end; ++i) {\n prod *= array[i];\n }\n return prod;\n}\n/**\n * A helper function transforms the two input types to an instance of Tensor1D,\n * so the return value can be fed directly into various TF.js Core functions.\n * @param array\n */\nfunction toArray1D(array) {\n array = Array.isArray(array) ? new Float32Array(array) : array;\n return tensor1d(array);\n}\n/**\n * Compute minimum value.\n * @param array\n * @return minimum value.\n */\nexport function min(array) {\n return tfc.min(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute maximum value.\n * @param array\n * @return maximum value\n */\nexport function max(array) {\n return tfc.max(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute sum of array.\n * @param array\n * @return The sum.\n */\nexport function sum(array) {\n return tfc.sum(toArray1D(array)).dataSync()[0];\n}\n/**\n * Compute mean of array.\n * @param array\n * @return The mean.\n */\nexport function mean(array) {\n return sum(array) / array.length;\n}\n/**\n * Compute variance of array.\n * @param array\n * @return The variance.\n */\nexport function variance(array) {\n const demeaned = tfc.sub(toArray1D(array), scalar(mean(array)));\n const sumSquare = tfc.sum(tfc.mul(demeaned, demeaned)).dataSync()[0];\n return sumSquare / array.length;\n}\n/**\n * Compute median of array.\n * @param array\n * @return The median value.\n */\nexport function median(array) {\n const arraySorted = array.slice().sort((a, b) => a - b);\n const lowIdx = Math.floor((arraySorted.length - 1) / 2);\n const highIdx = Math.ceil((arraySorted.length - 1) / 2);\n if (lowIdx === highIdx) {\n return arraySorted[lowIdx];\n }\n return (arraySorted[lowIdx] + arraySorted[highIdx]) / 2;\n}\n/**\n * Generate an array of integers in [begin, end).\n * @param begin Beginning integer, inclusive.\n * @param end Ending integer, exclusive.\n * @returns Range array.\n * @throws ValueError, iff `end` < `begin`.\n */\nexport function range(begin, end) {\n if (end < begin) {\n throw new ValueError(`end (${end}) < begin (${begin}) is forbidden.`);\n }\n const out = [];\n for (let i = begin; i < end; ++i) {\n out.push(i);\n }\n return out;\n}\n//# sourceMappingURL=math_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * deeplearn.js backend.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { onesLike as coreOnesLike, scalar, tensor1d, tidy, where, zerosLike as coreZerosLike } from '@tensorflow/tfjs-core';\nimport { checkDataFormat } from '../common';\nimport { NotImplementedError, ValueError } from '../errors';\nimport * as math_utils from '../utils/math_utils';\nimport { imageDataFormat } from './common';\n// tslint:enable\n/* Setting and getting backend from deeplearn.js. */\n// Default deeplearn.js backend is WebGL (GPU).\nlet backend = 'webgl';\nexport function setBackend(requestedBackend) {\n tfc.setBackend(requestedBackend);\n backend = requestedBackend;\n}\nexport function getBackend() {\n return backend;\n}\n/**\n * Indicates whether the backend is operating symbolically.\n *\n * This function will be used to determine how to interpret user code. If\n * it returns true, calls to the backend construct a symbolic graph; if\n * it returns false, calls to the backend execute immediately.\n */\nexport function isBackendSymbolic() {\n return false;\n}\n/**\n * Get the number of elements in a Tensor.\n * @param x The Tensor.\n * @return Number of elements in `x`.\n */\nexport function countParams(x) {\n const shape = x.shape;\n if (shape.length > 0) {\n return shape.reduce((a, b) => a * b);\n }\n else {\n // Scalar.\n return 1;\n }\n}\n/**\n * Casts a tensor to a different dtype and returns it.\n * @param x Input tensor.\n * @param dtype String: 'float32'|'int32'|'bool'.\n * @returns Tensor of the specified `dtype`.\n */\nexport function cast(x, dtype) {\n return x.asType(dtype);\n}\n/**\n * Adds a 1-sized dimension at index \"axis\".\n * @param x Input tensor.\n * @param axis Position where to add the new axis.\n * @returns Result of the dimension expansion.\n */\nexport function expandDims(x, axis = -1) {\n const outShape = x.shape.slice();\n if (axis < 0) {\n axis = outShape.length + axis + 1;\n }\n outShape.splice(axis, 0, 1);\n return x.reshape(outShape);\n}\n/**\n * Repeats a 2D tensor.\n *\n * If `x` has shape `[samples, dim]` and `n` is 2, for example, the output\n * will have shape `[samples, 2, dim]`.\n *\n * @param x Input tensor.\n * @param n Integer, number of times to repeat.\n * @returns The result of the repeat operation.\n * @throws ValueError: If input tensor is not 2D.\n */\nexport function repeat(x, n) {\n return tidy(() => {\n if (x.shape.length !== 2) {\n throw new ValueError(`repeat() expects a rank-2 tensor, but received a ` +\n `rank-${x.shape.length} tensor.`);\n }\n const y = expandDims(x, 1);\n return tile(y, [1, n, 1]);\n });\n}\n/**\n * Flatten a Tensor into 1D.\n * @param x Input tensor.\n * @return The result of the flattening `x`.\n */\nexport function flatten(x) {\n const newShape = [math_utils.arrayProd(x.shape)];\n return x.reshape(newShape);\n}\n/**\n * Turn a nD tensor into a 2D tensor with same 0th dimension.\n * In other words, it flattens each data samples of a batch.\n *\n * @param x The tensor to flatten. The rank of this tensor is required to be 2\n * or higher.\n * @return The result of the flattening.\n */\nexport function batchFlatten(x) {\n if (x.rank <= 1) {\n throw new ValueError(`batchFlatten requires a minimum rank of 2. Got rank: ${x.rank}.`);\n }\n const newShape = [x.shape[0], math_utils.arrayProd(x.shape, 1)];\n return x.reshape(newShape);\n}\n/**\n * Do slicing along the first axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the first axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongFirstAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [start, 0], [size, array.shape[1]]);\n case 3:\n return tfc.slice3d(array, [start, 0, 0], [size, array.shape[1], array.shape[2]]);\n case 4:\n return tfc.slice4d(array, [start, 0, 0, 0], [size, array.shape[1], array.shape[2], array.shape[3]]);\n case 5:\n return tfc.slice(array, [start, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4]\n ]);\n case 6:\n return tfc.slice(array, [start, 0, 0, 0, 0, 0], [\n size, array.shape[1], array.shape[2], array.shape[3], array.shape[4],\n array.shape[5]\n ]);\n default:\n throw new ValueError(`sliceAlongFirstAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the last axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the last axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongLastAxis(array, start, size) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n return tfc.slice2d(array, [0, start], [array.shape[0], size]);\n case 3:\n return tfc.slice3d(array, [0, 0, start], [array.shape[0], array.shape[1], size]);\n case 4:\n return tfc.slice4d(array, [0, 0, 0, start], [array.shape[0], array.shape[1], array.shape[2], size]);\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Do slicing along the sepcified axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size of the slice along the chosen axis.\n * @param choose an axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongAxis(array, start, size, axis) {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array, start, size);\n case 2:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 3:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice3d(array, [0, start, 0], [array.shape[0], size, array.shape[2]]);\n case 3:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 4:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice4d(array, [0, start, 0, 0], [array.shape[0], size, array.shape[2], array.shape[3]]);\n case 3:\n return tfc.slice4d(array, [0, 0, start, 0], [array.shape[0], array.shape[1], size, array.shape[3]]);\n case 4:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(`The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n default:\n throw new ValueError(`sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n/**\n * Concatenates a list of tensors alongside the specified axis.\n * @param tensors `Array` of tensors to concatenate.\n * @param axis Concatenation axis.\n * @returns The result of the concatenation.\n */\nexport function concatenate(tensors, axis = -1) {\n let rank;\n if (axis < 0) {\n rank = tensors[0].rank;\n if (rank !== 0) {\n axis = rank;\n }\n else {\n axis = 0;\n }\n }\n if (axis === tensors[0].rank) {\n // Porting Note: This is necessary because tfc.concat() requires axis to be\n // in the interval [-rank, rank).\n axis = -1;\n }\n // Porting Note: Sparse concat is not supported yet.\n return tfc.concat(tensors, axis);\n}\n/**\n * Concatenate two arrays along the first dimension.\n * @param a The 1st `tf.Tensor` to concatenate.\n * @param b The 2nd `tf.Tensor` to concatenate.\n * @returns Result of the concatenation.\n * @throws ValueError: If `a` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function concatAlongFirstAxis(a, b) {\n switch (a.rank) {\n case 1:\n return tfc.concat1d([a, b]);\n case 2:\n return tfc.concat2d([a, b], 0);\n case 3:\n return tfc.concat3d([a, b], 0);\n case 4:\n return tfc.concat4d([a, b], 0);\n default:\n throw new ValueError(`concatAlongFirstAxis() received an unsupported ` +\n `tensor rank: ${a.rank}`);\n }\n}\n/**\n * Creates a tensor by tiling `x` by `n`.\n * @param x A tensor.\n * @param n An Array of integers or a single integer. If an Array, the length\n * must be the same as the number of dimensions in `x`. If a single integer,\n * it will be treated as an Array of length 1.\n */\nexport function tile(x, n) {\n if (!Array.isArray(n)) {\n n = [n];\n }\n if (x.rank !== n.length) {\n throw new ValueError(`The length of input n (${n.length}) does not match ` +\n `the number of dimensions in input x (${x.rank})`);\n }\n return tfc.tile(x, n);\n}\n/* Creation of random tensors. */\n/**\n * Get a tensor with normal distribution of values.\n *\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @return The normal tensor.\n */\nexport function randomNormal(shape, mean = 0.0, stddev = 1.0, dtype, seed) {\n return tfc.randomNormal(shape, mean, stddev, dtype, seed);\n}\n/* Linear Algebra */\n/**\n * Multiply two tensors and returns the result as a tensor.\n *\n * For 2D tensors, this is equivalent to matrix multiplication (matMul).\n * For tensors of higher ranks, it follows the Theano behavior,\n * (e.g. `(2, 3) * (4, 3, 5) -> (2, 4, 5)`). From the Theano documentation:\n *\n * For N dimensions it is a sum product over the last axis of x and the\n * second-to-last of y:\n *\n * @param a A tensor of at least rank 2.\n * @param b A tensor of at least rank 2.\n * @param activation (optional) A string identifying the activation\n * function.\n * @return Result of the dot operation.\n */\nexport function dot(a, b, activation, bias) {\n if ((a.rank < 2) || (b.rank < 2)) {\n throw new NotImplementedError(`dot requires both inputs to be rank >= 2` +\n ` but got x shape = ${a.shape} and y shape = ${b.shape}`);\n }\n if (b.rank >= 3) {\n const xLastDim = a.shape.slice(-1)[0];\n const ySecondLastDim = b.shape.slice(-2)[0];\n if (xLastDim !== ySecondLastDim) {\n throw new NotImplementedError(`If rank y >= 3, then the second last dim` +\n ` of y must equal the last dim of x but got x shape = ${a.shape} and ` +\n ` y shape = ${b.shape}`);\n }\n }\n // Handle basic 2D x 2D case.\n if ((a.rank === 2) && (b.rank === 2)) {\n const transposeA = false;\n const transposeB = false;\n // tfc.fused.matMul only fuses certain activation functions. Unsupported\n // activation functions are treated as 'linear' activations, which is\n // equivalent to a no-op.\n return tfc.fused.matMul({\n a,\n b: b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n });\n }\n else {\n // Reshape x into the analogous 2D Tensor.\n const aFirstDims = a.shape.slice(); // Holds all but the last dim of x.\n const aLastDim = aFirstDims.pop();\n a = a.reshape([-1, aLastDim]);\n // Reshape y into the analogous 2D Tensor, and keep track of the\n // required dimensions to reproduce the output shape.\n const bShape = b.shape.slice();\n const bLastDim = bShape.pop();\n const ySecondLastDim = bShape.pop();\n const yOtherDims = [...bShape, bLastDim];\n // permutation should be like [r-2, 0, 1, 2, ... r-4, r-3, r-1]\n // where r is the rank of y.\n const perm = Array.from({ length: b.rank }, (_, i) => {\n if (i === 0) {\n return b.rank - 2;\n }\n else if (i <= b.rank - 2) {\n return i - 1;\n }\n return i;\n });\n b = b.transpose(perm).reshape([ySecondLastDim, -1]);\n // Multiply x and y as 2D Tensors, and then reshape back to original.\n const outputShape = [...aFirstDims, ...yOtherDims];\n const transposeA = false;\n const transposeB = false;\n return tfc.fused\n .matMul({\n a,\n b,\n transposeA,\n transposeB,\n bias: bias ? reshapeBias(a.rank, bias, imageDataFormat()) : null,\n activation\n })\n .reshape(outputShape);\n }\n}\n/**\n * Compute the sign Tensor of an input Tensor.\n *\n * Elements of the input `tf.Tensor` that are === 0 are mapped to 0.\n * Elements of the input `tf.Tensor` that are > 0 are mapped to 1.\n * Elements of the input `tf.Tensor` that are < 0 are mapped to -1.\n *\n * @param x Input `tf.Tensor`.\n * @return The sign `tf.Tensor`.\n */\nexport function sign(x) {\n // TODO(cais): Move to the core.\n return tidy(() => {\n const zerosLikeX = coreZerosLike(x);\n const onesLikeX = coreOnesLike(x);\n return where(tfc.equal(x, zerosLikeX), zerosLikeX, where(tfc.greater(x, coreZerosLike(x)), onesLikeX, tfc.mul(-1, onesLikeX)));\n });\n}\n/**\n * Computes the one-hot representation of an integer tensor.\n * @param indices nD integer tensor of shape\n * `(batch_size, dim1, dim2, ... dim(n-1))`\n * @param numClasses Integer, number of classes to consider.\n * @returns (n + 1)D one hot representation of the input\n * with shape `(batch_size, dim1, dim2, ... dim(n-1), num_classes)`\n */\nexport function oneHot(indices, numClasses) {\n return tidy(() => {\n if (indices.rank !== 1) {\n throw new Error('Only 1D one-hot tensors are supported in the ' +\n 'deeplearn backend, at present.');\n }\n indices = indices.toInt();\n return tfc.oneHot(indices, numClasses).toFloat();\n });\n}\n/* Elementary math functions. */\n/**\n * Retrieves the elements of indices `indices` in the tensor `reference`.\n * @param reference A tensor.\n * @param indices An integer tensor of indices or an `Array` of integers.\n * @param axis Axis along which to perform the gather operation.\n * @returns The result of the gathering as a tensor.\n */\nexport function gather(reference, indices, axis) {\n return tidy(() => {\n if (Array.isArray(indices)) {\n indices = tensor1d(indices, 'int32');\n }\n else {\n indices = indices.toInt();\n }\n return tfc.gather(reference, indices, axis);\n });\n}\n/**\n * Element-wise square.\n * @param x Input tensor.\n * @return element-wise x^2\n */\nexport function square(x) {\n return tfc.mul(x, x);\n}\n/**\n * Element-wise exponentiation.\n *\n * Porting Note: In PyKeras, `a` (the exponent) is a Python integer, which\n * takes advatnage of the backend's (e.g., TensorFlow's) automatic\n * conversion to tensor. Here we allow `a` to be either a number or a tensor.\n *\n * @param x The base tensor.\n * @param a The exponent, tensor or number. If a number, it is rounded to the\n * nearest integer and converted to a tensor.\n * @returns A tensor of the same shape as `x`.\n */\nexport function pow(x, a) {\n return tidy(() => {\n if (typeof (a) === 'number') {\n a = scalar(Math.round(a), 'int32');\n }\n if (a.dtype !== 'int32') {\n throw new NotImplementedError(`Non-int32 dtype (${a.dtype}) is not supported by pow() yet`);\n }\n return tfc.pow(x, a);\n });\n}\n/**\n * Reshapes bias tensor according to rank of x.\n */\nfunction reshapeBias(xRank, bias, dataFormat) {\n const biasShape = bias.shape;\n if (bias.rank !== 1 && bias.rank !== xRank) {\n throw new ValueError(`Unexpected bias dimensions: ${bias.rank}` +\n `; expected it to be 1 or ${xRank}`);\n }\n if (xRank === 5) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[3], biasShape[0], biasShape[1], biasShape[2]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 4) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1, 1]);\n }\n else {\n return bias.reshape([1, biasShape[2], biasShape[0], biasShape[1]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank === 3) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n return bias.reshape([1, biasShape[0], 1]);\n }\n else {\n return bias.reshape([1, biasShape[1], biasShape[0]]);\n }\n }\n else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n return bias.reshape([1, 1, biasShape[0]]);\n }\n else {\n return bias.reshape([1].concat(biasShape));\n }\n }\n }\n else if (xRank < 3) {\n return bias;\n }\n throw new ValueError(`Unsupported input rank by biasAdd: ${bias.rank}`);\n}\n/* Neural-network operations. */\n/**\n * Add a bias to a tensor.\n *\n * @param x The tensor to add the bias to.\n * @param bias The bias to add to `x`. Must be 1D or the same rank as `x`.\n * @return Result of the bias adding.\n * @throws ValueError: If the rank of `bias` is incorrect.\n */\nexport function biasAdd(x, bias, dataFormat) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n return x.add(reshapeBias(x.rank, bias, dataFormat));\n });\n}\n/**\n * Exponential linear unit (ELU).\n * @param x A tensor or variable to compute the activation function for.\n * @param alpha: A scalar, a scaling factor for the negative section.\n * @return Output of the ELU operation.\n */\nexport function elu(x, alpha = 1) {\n // TODO(cais): Add support for alpha values other than 1.\n if (alpha !== 1) {\n throw new NotImplementedError(`Support for alpha values other than 1 (${alpha}) is not implemented ` +\n `yet.`);\n }\n return tfc.elu(x);\n}\n/**\n * Softsign of a tensor.\n *\n * Defined as x / (abs(x) + 1), element-wise.\n *\n * @param x: Input.\n * @returns Output.\n */\nexport function softsign(x) {\n return tidy(() => tfc.div(x, tfc.abs(x).add(1)));\n}\n/**\n * Sets entries in `x` to zero at random, while scaling the entire tensor.\n *\n * @param x input tensor.\n * @param level fraction of the entries in the tensor that will be set to 0.\n * @param noiseShape shape of randomly generated keep/drop flags, must be\n * broadcastable to the shape of `x`. Optional.\n * @param seed random seed to ensure determinism. Optional.\n * @returns Result of the dropout operation.\n */\nexport function dropout(x, level, noiseShape, seed) {\n return tidy(() => tfc.dropout(x, level, noiseShape, seed));\n}\n/**\n * Element-wise, segment-wise linear approximation of sigmoid.\n *\n * Returns `0.` if `x < -2.5`, `1.` if `x > 2.5`.\n * In `-2.5 <= x <= 2.5`, returns `0.2 * x + 0.5`.\n *\n * @param x Input tensor.\n * @returns Output tensor.\n */\nexport function hardSigmoid(x) {\n return tidy(() => {\n const y = tfc.add(.5, tfc.mul(.2, x));\n return tfc.clipByValue(y, 0, 1);\n });\n}\n/**\n * Invoke `x` in the training phase, and `alt` otherwise.\n *\n * Porting Note: We do not create placeholder tensors for the `training`\n * boolean flag here, because there is no such thing in the TF.js imperative\n * backend.\n *\n * @param x The function to invoke iff `training` is `true`.\n * @param alt The function to invoke iff `training` is `false`.\n * @param training Boolean flag for whether training phase is active.\n * @returns The return value of `x()` if `training` is `true`, or the return\n * value of `alt()` if `training` is `false`.\n */\nexport function inTrainPhase(x, alt, training = false) {\n return training ? x() : alt();\n}\n//# sourceMappingURL=tfjs_backend.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport const VALID_FAN_MODE_VALUES = ['fanIn', 'fanOut', 'fanAvg'];\nexport const VALID_DISTRIBUTION_VALUES = ['normal', 'uniform', 'truncatedNormal'];\n// We can't easily extract a string[] from the string union type, but we can\n// recapitulate the list, enforcing at compile time that the values are valid\n// and that we have the right number of them.\n/**\n * A string array of valid Initializer class names.\n *\n * This is guaranteed to match the `InitializerClassName` union type.\n */\nexport const initializerClassNames = [\n 'Zeros', 'Ones', 'Constant', 'RandomNormal', 'RandomUniform',\n 'TruncatedNormal', 'VarianceScaling', 'Orthogonal', 'Identity'\n];\n//# sourceMappingURL=initializer_config.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { eye, linalg, mul, ones, randomUniform, scalar, serialization, tidy, truncatedNormal, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { checkDataFormat } from './common';\nimport { NotImplementedError, ValueError } from './errors';\nimport { VALID_DISTRIBUTION_VALUES, VALID_FAN_MODE_VALUES } from './keras_format/initializer_config';\nimport { checkStringTypeUnionValue, deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nimport { arrayProd } from './utils/math_utils';\nexport function checkFanMode(value) {\n checkStringTypeUnionValue(VALID_FAN_MODE_VALUES, 'FanMode', value);\n}\nexport function checkDistribution(value) {\n checkStringTypeUnionValue(VALID_DISTRIBUTION_VALUES, 'Distribution', value);\n}\n/**\n * Initializer base class.\n *\n * @doc {\n * heading: 'Initializers', subheading: 'Classes', namespace: 'initializers'}\n */\nexport class Initializer extends serialization.Serializable {\n fromConfigUsesCustomObjects() {\n return false;\n }\n getConfig() {\n return {};\n }\n}\nexport class Zeros extends Initializer {\n apply(shape, dtype) {\n return zeros(shape, dtype);\n }\n}\n/** @nocollapse */\nZeros.className = 'Zeros';\nserialization.registerClass(Zeros);\nexport class Ones extends Initializer {\n apply(shape, dtype) {\n return ones(shape, dtype);\n }\n}\n/** @nocollapse */\nOnes.className = 'Ones';\nserialization.registerClass(Ones);\nexport class Constant extends Initializer {\n constructor(args) {\n super();\n if (typeof args !== 'object') {\n throw new ValueError(`Expected argument of type ConstantConfig but got ${args}`);\n }\n if (args.value === undefined) {\n throw new ValueError(`config must have value set but got ${args}`);\n }\n this.value = args.value;\n }\n apply(shape, dtype) {\n return tidy(() => mul(scalar(this.value), ones(shape, dtype)));\n }\n getConfig() {\n return {\n value: this.value,\n };\n }\n}\n/** @nocollapse */\nConstant.className = 'Constant';\nserialization.registerClass(Constant);\nexport class RandomUniform extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MINVAL = -0.05;\n this.DEFAULT_MAXVAL = 0.05;\n this.minval = args.minval || this.DEFAULT_MINVAL;\n this.maxval = args.maxval || this.DEFAULT_MAXVAL;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n return randomUniform(shape, this.minval, this.maxval, dtype);\n }\n getConfig() {\n return { minval: this.minval, maxval: this.maxval, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomUniform.className = 'RandomUniform';\nserialization.registerClass(RandomUniform);\nexport class RandomNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return K.randomNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nRandomNormal.className = 'RandomNormal';\nserialization.registerClass(RandomNormal);\nexport class TruncatedNormal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_MEAN = 0.;\n this.DEFAULT_STDDEV = 0.05;\n this.mean = args.mean || this.DEFAULT_MEAN;\n this.stddev = args.stddev || this.DEFAULT_STDDEV;\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`truncatedNormal does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n getConfig() {\n return { mean: this.mean, stddev: this.stddev, seed: this.seed };\n }\n}\n/** @nocollapse */\nTruncatedNormal.className = 'TruncatedNormal';\nserialization.registerClass(TruncatedNormal);\nexport class Identity extends Initializer {\n constructor(args) {\n super();\n this.gain = args.gain != null ? args.gain : 1.0;\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length !== 2 || shape[0] !== shape[1]) {\n throw new ValueError('Identity matrix initializer can only be used for' +\n ' 2D square matrices.');\n }\n else {\n return mul(this.gain, eye(shape[0]));\n }\n });\n }\n getConfig() {\n return { gain: this.gain };\n }\n}\n/** @nocollapse */\nIdentity.className = 'Identity';\nserialization.registerClass(Identity);\n/**\n * Computes the number of input and output units for a weight shape.\n * @param shape Shape of weight.\n * @param dataFormat data format to use for convolution kernels.\n * Note that all kernels in Keras are standardized on the\n * CHANNEL_LAST ordering (even when inputs are set to CHANNEL_FIRST).\n * @return An length-2 array: fanIn, fanOut.\n */\nfunction computeFans(shape, dataFormat = 'channelsLast') {\n let fanIn;\n let fanOut;\n checkDataFormat(dataFormat);\n if (shape.length === 2) {\n fanIn = shape[0];\n fanOut = shape[1];\n }\n else if ([3, 4, 5].indexOf(shape.length) !== -1) {\n if (dataFormat === 'channelsFirst') {\n const receptiveFieldSize = arrayProd(shape, 2);\n fanIn = shape[1] * receptiveFieldSize;\n fanOut = shape[0] * receptiveFieldSize;\n }\n else if (dataFormat === 'channelsLast') {\n const receptiveFieldSize = arrayProd(shape, 0, shape.length - 2);\n fanIn = shape[shape.length - 2] * receptiveFieldSize;\n fanOut = shape[shape.length - 1] * receptiveFieldSize;\n }\n }\n else {\n const shapeProd = arrayProd(shape);\n fanIn = Math.sqrt(shapeProd);\n fanOut = Math.sqrt(shapeProd);\n }\n return [fanIn, fanOut];\n}\nexport class VarianceScaling extends Initializer {\n /**\n * Constructor of VarianceScaling.\n * @throws ValueError for invalid value in scale.\n */\n constructor(args) {\n super();\n if (args.scale < 0.0) {\n throw new ValueError(`scale must be a positive float. Got: ${args.scale}`);\n }\n this.scale = args.scale == null ? 1.0 : args.scale;\n this.mode = args.mode == null ? 'fanIn' : args.mode;\n checkFanMode(this.mode);\n this.distribution =\n args.distribution == null ? 'normal' : args.distribution;\n checkDistribution(this.distribution);\n this.seed = args.seed;\n }\n apply(shape, dtype) {\n const fans = computeFans(shape);\n const fanIn = fans[0];\n const fanOut = fans[1];\n let scale = this.scale;\n if (this.mode === 'fanIn') {\n scale /= Math.max(1, fanIn);\n }\n else if (this.mode === 'fanOut') {\n scale /= Math.max(1, fanOut);\n }\n else {\n scale /= Math.max(1, (fanIn + fanOut) / 2);\n }\n if (this.distribution === 'normal') {\n const stddev = Math.sqrt(scale);\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`${this.getClassName()} does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, 0, stddev, dtype, this.seed);\n }\n else {\n const limit = Math.sqrt(3 * scale);\n return randomUniform(shape, -limit, limit, dtype);\n }\n }\n getConfig() {\n return {\n scale: this.scale,\n mode: this.mode,\n distribution: this.distribution,\n seed: this.seed\n };\n }\n}\n/** @nocollapse */\nVarianceScaling.className = 'VarianceScaling';\nserialization.registerClass(VarianceScaling);\nexport class GlorotUniform extends VarianceScaling {\n /**\n * Constructor of GlorotUniform\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotUniform.className = 'GlorotUniform';\nserialization.registerClass(GlorotUniform);\nexport class GlorotNormal extends VarianceScaling {\n /**\n * Constructor of GlorotNormal.\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, GlorotNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nGlorotNormal.className = 'GlorotNormal';\nserialization.registerClass(GlorotNormal);\nexport class HeNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeNormal.className = 'HeNormal';\nserialization.registerClass(HeNormal);\nexport class HeUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, HeUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nHeUniform.className = 'HeUniform';\nserialization.registerClass(HeUniform);\nexport class LeCunNormal extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunNormal.className = 'LeCunNormal';\nserialization.registerClass(LeCunNormal);\nexport class LeCunUniform extends VarianceScaling {\n constructor(args) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'uniform',\n seed: args == null ? null : args.seed\n });\n }\n getClassName() {\n // In Python Keras, LeCunUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\n/** @nocollapse */\nLeCunUniform.className = 'LeCunNormal';\nserialization.registerClass(LeCunUniform);\nexport class Orthogonal extends Initializer {\n constructor(args) {\n super();\n this.DEFAULT_GAIN = 1;\n this.gain = args.gain == null ? this.DEFAULT_GAIN : args.gain;\n this.seed = args.seed;\n if (this.seed != null) {\n throw new NotImplementedError('Random seed is not implemented for Orthogonal Initializer yet.');\n }\n }\n apply(shape, dtype) {\n return tidy(() => {\n if (shape.length < 2) {\n throw new NotImplementedError('Shape must be at least 2D.');\n }\n if (shape[0] * shape[1] > 2000) {\n console.warn(`Orthogonal initializer is being called on a matrix with more ` +\n `than 2000 (${shape[0] * shape[1]}) elements: ` +\n `Slowness may result.`);\n }\n // TODO(cais): Add seed support.\n const normalizedShape = shape[0] > shape[1] ? [shape[1], shape[0]] : shape;\n const a = K.randomNormal(normalizedShape, 0, 1, 'float32');\n let q = linalg.gramSchmidt(a);\n if (shape[0] > shape[1]) {\n q = q.transpose();\n }\n return mul(this.gain, q);\n });\n }\n getConfig() {\n return {\n gain: this.gain,\n seed: this.seed,\n };\n }\n}\n/** @nocollapse */\nOrthogonal.className = 'Orthogonal';\nserialization.registerClass(Orthogonal);\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'constant': 'Constant',\n 'glorotNormal': 'GlorotNormal',\n 'glorotUniform': 'GlorotUniform',\n 'heNormal': 'HeNormal',\n 'heUniform': 'HeUniform',\n 'identity': 'Identity',\n 'leCunNormal': 'LeCunNormal',\n 'leCunUniform': 'LeCunUniform',\n 'ones': 'Ones',\n 'orthogonal': 'Orthogonal',\n 'randomNormal': 'RandomNormal',\n 'randomUniform': 'RandomUniform',\n 'truncatedNormal': 'TruncatedNormal',\n 'varianceScaling': 'VarianceScaling',\n 'zeros': 'Zeros'\n};\nfunction deserializeInitializer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'initializer');\n}\nexport function serializeInitializer(initializer) {\n return serializeKerasObject(initializer);\n}\nexport function getInitializer(identifier) {\n if (typeof identifier === 'string') {\n const className = identifier in INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n /* We have four 'helper' classes for common initializers that\n all get serialized as 'VarianceScaling' and shouldn't go through\n the deserializeInitializer pathway. */\n if (className === 'GlorotNormal') {\n return new GlorotNormal();\n }\n else if (className === 'GlorotUniform') {\n return new GlorotUniform();\n }\n else if (className === 'HeNormal') {\n return new HeNormal();\n }\n else if (className === 'HeUniform') {\n return new HeUniform();\n }\n else if (className === 'LeCunNormal') {\n return new LeCunNormal();\n }\n else if (className === 'LeCunUniform') {\n return new LeCunUniform();\n }\n else {\n const config = {};\n config['className'] = className;\n config['config'] = {};\n return deserializeInitializer(config);\n }\n }\n else if (identifier instanceof Initializer) {\n return identifier;\n }\n else {\n return deserializeInitializer(identifier);\n }\n}\n//# sourceMappingURL=initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport { Constant, GlorotNormal, GlorotUniform, HeNormal, HeUniform, Identity, LeCunNormal, LeCunUniform, Ones, Orthogonal, RandomNormal, RandomUniform, TruncatedNormal, VarianceScaling, Zeros } from './initializers';\n/**\n * Initializer that generates tensors initialized to 0.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function zeros() {\n return new Zeros();\n}\n/**\n * Initializer that generates tensors initialized to 1.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function ones() {\n return new Ones();\n}\n/**\n * Initializer that generates values initialized to some constant.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function constant(args) {\n return new Constant(args);\n}\n/**\n * Initializer that generates random values initialized to a uniform\n * distribution.\n *\n * Values will be distributed uniformly between the configured minval and\n * maxval.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomUniform(args) {\n return new RandomUniform(args);\n}\n/**\n * Initializer that generates random values initialized to a normal\n * distribution.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function randomNormal(args) {\n return new RandomNormal(args);\n}\n/**\n * Initializer that generates random values initialized to a truncated normal.\n * distribution.\n *\n * These values are similar to values from a `RandomNormal` except that values\n * more than two standard deviations from the mean are discarded and re-drawn.\n * This is the recommended initializer for neural network weights and filters.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function truncatedNormal(args) {\n return new TruncatedNormal(args);\n}\n/**\n * Initializer that generates the identity matrix.\n * Only use for square 2D matrices.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function identity(args) {\n return new Identity(args);\n}\n/**\n * Initializer capable of adapting its scale to the shape of weights.\n * With distribution=NORMAL, samples are drawn from a truncated normal\n * distribution centered on zero, with `stddev = sqrt(scale / n)` where n is:\n * - number of input units in the weight tensor, if mode = FAN_IN.\n * - number of output units, if mode = FAN_OUT.\n * - average of the numbers of input and output units, if mode = FAN_AVG.\n * With distribution=UNIFORM,\n * samples are drawn from a uniform distribution\n * within [-limit, limit], with `limit = sqrt(3 * scale / n)`.\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function varianceScaling(config) {\n return new VarianceScaling(config);\n}\n/**\n * Glorot uniform initializer, also called Xavier uniform initializer.\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotUniform(args) {\n return new GlorotUniform(args);\n}\n/**\n * Glorot normal initializer, also called Xavier normal initializer.\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor.\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function glorotNormal(args) {\n return new GlorotNormal(args);\n}\n/**\n * He normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function heNormal(args) {\n return new HeNormal(args);\n}\n/**\n * He uniform initializer.\n *\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / fan_in)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n *\n * @doc {heading: 'Initializers',namespace: 'initializers'}\n */\nexport function heUniform(args) {\n return new HeUniform(args);\n}\n/**\n * LeCun normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(1 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * References:\n * [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n * [Efficient Backprop](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunNormal(args) {\n return new LeCunNormal(args);\n}\n/**\n * LeCun uniform initializer.\n *\n * It draws samples from a uniform distribution in the interval\n * `[-limit, limit]` with `limit = sqrt(3 / fanIn)`,\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function leCunUniform(args) {\n return new LeCunUniform(args);\n}\n/**\n * Initializer that generates a random orthogonal matrix.\n *\n * Reference:\n * [Saxe et al., http://arxiv.org/abs/1312.6120](http://arxiv.org/abs/1312.6120)\n *\n * @doc {heading: 'Initializers', namespace: 'initializers'}\n */\nexport function orthogonal(args) {\n return new Orthogonal(args);\n}\n//# sourceMappingURL=exports_initializers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Utilities related to persistent state in the backend.\n */\n/**\n * An ID to track `tf.SymbolicTensor`s and derived classes.\n * Required in different places in engine/topology.ts to identify unique\n * tensors.\n */\nlet _nextUniqueTensorId = 0;\nexport function getNextUniqueTensorId() {\n return _nextUniqueTensorId++;\n}\nconst _uidPrefixes = {};\n/**\n * Provides a unique UID given a string prefix.\n *\n * @param prefix\n */\nexport function getUid(prefix = '') {\n if (!(prefix in _uidPrefixes)) {\n _uidPrefixes[prefix] = 0;\n }\n _uidPrefixes[prefix] += 1;\n return prefix + _uidPrefixes[prefix].toString();\n}\n//# sourceMappingURL=state.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\n// tslint:enable\n/**\n * Determine whether the input is an Array of Shapes.\n */\nexport function isArrayOfShapes(x) {\n return Array.isArray(x) && Array.isArray(x[0]);\n}\n/**\n * Special case of normalizing shapes to lists.\n *\n * @param x A shape or list of shapes to normalize into a list of Shapes.\n * @return A list of Shapes.\n */\nexport function normalizeShapeList(x) {\n if (x.length === 0) {\n return [];\n }\n if (!Array.isArray(x[0])) {\n return [x];\n }\n return x;\n}\n/**\n * Helper function to obtain exactly one Tensor.\n * @param xs: A single `tf.Tensor` or an `Array` of `tf.Tensor`s.\n * @return A single `tf.Tensor`. If `xs` is an `Array`, return the first one.\n * @throws ValueError: If `xs` is an `Array` and its length is not 1.\n */\nexport function getExactlyOneTensor(xs) {\n let x;\n if (Array.isArray(xs)) {\n if (xs.length !== 1) {\n throw new ValueError(`Expected Tensor length to be 1; got ${xs.length}`);\n }\n x = xs[0];\n }\n else {\n x = xs;\n }\n return x;\n}\n/**\n * Helper function to obtain exactly on instance of Shape.\n *\n * @param shapes Input single `Shape` or Array of `Shape`s.\n * @returns If input is a single `Shape`, return it unchanged. If the input is\n * an `Array` containing exactly one instance of `Shape`, return the instance.\n * Otherwise, throw a `ValueError`.\n * @throws ValueError: If input is an `Array` of `Shape`s, and its length is not\n * 1.\n */\nexport function getExactlyOneShape(shapes) {\n if (Array.isArray(shapes) && Array.isArray(shapes[0])) {\n if (shapes.length === 1) {\n shapes = shapes;\n return shapes[0];\n }\n else {\n throw new ValueError(`Expected exactly 1 Shape; got ${shapes.length}`);\n }\n }\n else {\n return shapes;\n }\n}\n//# sourceMappingURL=types_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Count the elements in an Array of LayerVariables.\n *\n * @param weights: The LayerVariables of which the constituent numbers are to\n * be counted.\n * @returns A count of the elements in all the LayerVariables\n */\nexport function countParamsInWeights(weights) {\n let count = 0;\n for (const weight of weights) {\n if (weight.shape.length === 0) {\n count += 1;\n }\n else {\n count += weight.shape.reduce((a, b) => a * b);\n }\n }\n return count;\n}\n//# sourceMappingURL=variable_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { variableGrads } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId } from './backend/state';\nimport { getScopedTensorName, getUniqueTensorName } from './common';\nimport { NotImplementedError } from './errors';\nconst DEFAULT_VARIABLE_NAME_PREFIX = 'Variable';\n/**\n * A `tf.layers.LayerVariable` is similar to a `tf.Tensor` in that it has a\n * dtype and shape, but its value is mutable. The value is itself represented\n * as a`tf.Tensor`, and can be read with the `read()` method and updated with\n * the `write()` method.\n */\nexport class LayerVariable {\n /**\n * Construct Variable from a `tf.Tensor`.\n *\n * If not explicitly named, the Variable will be given a name with the\n * prefix 'Variable'. Variable names are unique. In the case of name\n * collision, suffixies '_' will be added to the name.\n *\n * @param val Initial value of the Variable.\n * @param name Name of the variable. If `null` or `undefined` is provided, it\n * will default a name with the prefix 'Variable'.\n * @param constraint Optional, projection function to be applied to the\n * variable after optimize updates\n * @throws ValueError if `name` is `null` or `undefined`.\n */\n constructor(val, dtype = 'float32', name = DEFAULT_VARIABLE_NAME_PREFIX, trainable = true, constraint = null) {\n this.dtype = dtype == null ? 'float32' : dtype;\n this.shape = val.shape;\n this.id = getNextUniqueTensorId();\n name = name == null ? DEFAULT_VARIABLE_NAME_PREFIX : name;\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n this.trainable_ = trainable;\n this.constraint = constraint;\n this.val = tfc.variable(val, this.trainable_, this.name, this.dtype);\n }\n /**\n * Get a snapshot of the Variable's value.\n *\n * The returned value is a snapshot of the Variable's value at the time of\n * the invocation. Future mutations in the value of the tensor will only\n * be reflected by future calls to this method.\n */\n read() {\n this.assertNotDisposed();\n return this.val;\n }\n /**\n * Update the value of the Variable.\n *\n * @param newVal: The new value to update to. Must be consistent with the\n * dtype and shape of the Variable.\n * @return This Variable.\n */\n write(newVal) {\n // TODO(cais): Once TF.js Core supports Tensor.dtype, check dtype match.\n this.assertNotDisposed();\n checkShapesMatch(this.val, newVal);\n // Skip updating if this is the exact same tensor.\n if (this.val.id !== newVal.id) {\n this.val.assign(newVal);\n if (this.constraint != null) {\n this.val.assign(this.constraint.apply(this.val));\n }\n }\n return this;\n }\n /**\n * Dispose this LayersVariable instance from memory.\n */\n dispose() {\n this.assertNotDisposed();\n this.val.dispose();\n }\n assertNotDisposed() {\n if (this.val.isDisposed) {\n throw new Error(`LayersVariable ${this.name} is already disposed.`);\n }\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.trainable_ = trainable;\n this.val.trainable = trainable;\n }\n}\nfunction checkShapesMatch(x, y) {\n if (x.shape.toString() !== y.shape.toString()) {\n throw new Error('Shape mismatch: ' + JSON.stringify(x.shape) + ' vs. ' +\n JSON.stringify(y.shape));\n }\n}\n/**\n * Create a Variable.\n * @param x The initial value of the `Variable`.\n * @param dtype optional, the type of the variable.\n * @param name optional, the name of the variable, default provided by\n * Variable.\n * @param constraint optional, a constraint to be applied after every update.\n * @return The newly instantiated `Variable`.\n */\nexport function variable(x, dtype, name, constraint) {\n return new LayerVariable(x, dtype, name, true, constraint);\n}\n/**\n * Instantiates an all-zeros Variable and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-zero Variable.\n */\nexport function zerosVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n return new LayerVariable(tfc.zeros(shape), dtype, name);\n}\n/**\n * Instantiates an all-zeros tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function zerosLike(x, dtype, name) {\n return new LayerVariable(tfc.zerosLike(x), dtype, name);\n}\n/**\n * Instantiates an all-ones tensor and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-ones Variable.\n */\nexport function onesVariable(shape, dtype, name) {\n // TODO(cais): Implement logic for dtype.\n const allocated = tfc.ones(shape);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiates an all-ones tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function onesLike(x, dtype, name) {\n const allocated = tfc.onesLike(x);\n return new LayerVariable(allocated, dtype, name);\n}\n/**\n * Instantiate an identity matrix and returns it, as a Variable\n *\n * @param size Number of rows/columns.\n * @param dtype Data type of returned Variable.\n * @param name Name of returned Variable.\n * @return A Variable, an identity matrix.\n */\nexport function eyeVariable(size, dtype, name) {\n return new LayerVariable(tfc.eye(size), dtype, name);\n}\n/**\n * Get a Variable with uniform distribution of values.\n * @param shape Shape of the tensor.\n * @param minval Lower bound of the uniform distribution.\n * @param maxval Upper bound of the uniform distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The uniform-random Variable.\n */\nexport function randomUniformVariable(shape, minval, maxval, dtype, seed, name = 'randomUniform') {\n return new LayerVariable(tfc.randomUniform(shape, minval, maxval, dtype), dtype, name);\n}\n/**\n * Get a Variable with truncated-normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function truncatedNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'truncatedNormal') {\n // TODO(cais): Implement logic for dtype and seed once they are supported\n // by deeplearn.js.\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormal does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.truncatedNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Get a Variable with normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function randomNormalVariable(shape, mean = 0.0, stddev = 1.0, dtype, seed, name = 'randomNormal') {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(`randomNormalVariable does not support dType ${dtype}.`);\n }\n return new LayerVariable(tfc.randomNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Update the value of a Variable.\n * @param x The Variable to be updated.\n * @param xNew The new value to update to.\n * @return The Variable updated.\n */\nexport function update(x, xNew) {\n return x.write(xNew);\n}\n/**\n * Update the value of a Variable by adding an increment.\n * @param x The Variable to be updated.\n * @param increment The incrment to add to `x`.\n * @return The Variable updated.\n */\nexport function updateAdd(x, increment) {\n return x.write(tfc.add(x.read(), increment));\n}\n/**\n * Update the value of a Variable by subtracting a decrement.\n * @param x The Variable to be updated.\n * @param decrement The decrement to subtract from `x`.\n * @return The Variable updated.\n */\nexport function updateSub(x, decrement) {\n return x.write(tfc.sub(x.read(), decrement));\n}\n/**\n * Get the values of an array of Variables.\n *\n * @param tensors An `Array` of `Variable`s to get the values of.\n * @return The values of the inputs, as an `Array` of`tf.Tensor`s.\n */\nexport function batchGetValue(xs) {\n return xs.map(x => x.read());\n}\n/**\n * Update the value of multiple Variables at once.\n *\n * @param variablesAndValues An `Array`, each element is of type\n * [Variable, Tensor]. The first item is the\n * `Variable` of which the value is to be updated. The second item\n * carries the new value.\n */\nexport function batchSetValue(variablesAndValues) {\n variablesAndValues.forEach(variableAndValue => {\n const variable = variableAndValue[0];\n variable.write(variableAndValue[1]);\n });\n}\n/**\n * Returns the gradients of `variables` w.r.t. the return value of `lossFn`.\n * @param lossFn A function which returns a Scalar to be used as the function\n * value (i.e., numerator) for differentiation.\n * @param variables List of variables to be used as the independent variables\n * (i.e., denominator) for differentiation.\n * @returns An Array of gradients tensors.\n */\nexport function gradients(lossFn, variables) {\n // TODO(cais): The return type signature can be simplified if deeplearn makes\n // the corresponding type public.\n const variableList = variables.map(variable => variable.read());\n const valudAndGrads = variableGrads(lossFn, variableList);\n return variables.map(variable => valudAndGrads.grads[variable.name]);\n}\n//# sourceMappingURL=variables.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getNextUniqueTensorId, getUid } from '../backend/state';\nimport { getScopedTensorName, getUniqueTensorName, nameScope } from '../common';\nimport { AttributeError, NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { getInitializer } from '../initializers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as types_utils from '../utils/types_utils';\nimport * as variable_utils from '../utils/variable_utils';\nimport { batchGetValue, batchSetValue, LayerVariable } from '../variables';\n/**\n * Specifies the ndim, dtype and shape of every input to a layer.\n *\n * Every layer should expose (if appropriate) an `inputSpec` attribute:\n * a list of instances of InputSpec (one per input tensor).\n *\n * A null entry in a shape is compatible with any dimension,\n * a null shape is compatible with any shape.\n */\nexport class InputSpec {\n constructor(args) {\n this.dtype = args.dtype;\n this.shape = args.shape;\n /*\n TODO(michaelterry): Could throw error if ndim and shape are both defined\n (then backport).\n */\n if (args.shape != null) {\n this.ndim = args.shape.length;\n }\n else {\n this.ndim = args.ndim;\n }\n this.maxNDim = args.maxNDim;\n this.minNDim = args.minNDim;\n this.axes = args.axes || {};\n }\n}\n/**\n * `tf.SymbolicTensor` is a placeholder for a Tensor without any concrete value.\n *\n * They are most often encountered when building a graph of `Layer`s for a\n * a `tf.LayersModel` and the input data's shape, but not values are known.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\nexport class SymbolicTensor {\n /**\n *\n * @param dtype\n * @param shape\n * @param sourceLayer The Layer that produced this symbolic tensor.\n * @param inputs The inputs passed to sourceLayer's __call__() method.\n * @param nodeIndex\n * @param tensorIndex\n * @param callArgs The keyword arguments passed to the __call__() method.\n * @param name\n * @param outputTensorIndex The index of this tensor in the list of outputs\n * returned by apply().\n */\n constructor(dtype, shape, sourceLayer, inputs, callArgs, name, outputTensorIndex) {\n this.dtype = dtype;\n this.shape = shape;\n this.sourceLayer = sourceLayer;\n this.inputs = inputs;\n this.callArgs = callArgs;\n this.outputTensorIndex = outputTensorIndex;\n this.id = getNextUniqueTensorId();\n if (name != null) {\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n }\n this.rank = shape.length;\n }\n}\nlet _nextNodeID = 0;\n/**\n * A `Node` describes the connectivity between two layers.\n *\n * Each time a layer is connected to some new input,\n * a node is added to `layer.inboundNodes`.\n *\n * Each time the output of a layer is used by another layer,\n * a node is added to `layer.outboundNodes`.\n *\n * `nodeIndices` and `tensorIndices` are basically fine-grained coordinates\n * describing the origin of the `inputTensors`, verifying the following:\n *\n * `inputTensors[i] ==\n * inboundLayers[i].inboundNodes[nodeIndices[i]].outputTensors[\n * tensorIndices[i]]`\n *\n * A node from layer A to layer B is added to:\n * A.outboundNodes\n * B.inboundNodes\n */\nexport class Node {\n constructor(args, \n // TODO(michaelterry): Define actual type for this.\n callArgs) {\n this.callArgs = callArgs;\n this.id = _nextNodeID++;\n /*\n Layer instance (NOT a list).\n this is the layer that takes a list of input tensors\n and turns them into a list of output tensors.\n the current node will be added to\n the inboundNodes of outboundLayer.\n */\n this.outboundLayer = args.outboundLayer;\n /*\n The following 3 properties describe where\n the input tensors come from: which layers,\n and for each layer, which node and which\n tensor output of each node.\n */\n // List of layer instances.\n this.inboundLayers = args.inboundLayers;\n // List of integers, 1:1 mapping with inboundLayers.\n this.nodeIndices = args.nodeIndices;\n // List of integers, 1:1 mapping with inboundLayers.\n this.tensorIndices = args.tensorIndices;\n /*\n Following 2 properties:\n tensor inputs and outputs of outboundLayer.\n */\n // List of tensors. 1:1 mapping with inboundLayers.\n this.inputTensors = args.inputTensors;\n // List of tensors, created by outboundLayer.call().\n this.outputTensors = args.outputTensors;\n /*\n Following 2 properties: input and output masks.\n List of tensors, 1:1 mapping with inputTensor.\n */\n this.inputMasks = args.inputMasks;\n // List of tensors, created by outboundLayer.computeMask().\n this.outputMasks = args.outputMasks;\n // Following 2 properties: input and output shapes.\n // List of shape tuples, shapes of inputTensors.\n this.inputShapes = args.inputShapes;\n // List of shape tuples, shapes of outputTensors.\n this.outputShapes = args.outputShapes;\n // Add nodes to all layers involved.\n for (const layer of args.inboundLayers) {\n if (layer != null) {\n layer.outboundNodes.push(this);\n }\n }\n args.outboundLayer.inboundNodes.push(this);\n }\n getConfig() {\n const inboundNames = [];\n for (const layer of this.inboundLayers) {\n if (layer != null) {\n inboundNames.push(layer.name);\n }\n else {\n inboundNames.push(null);\n }\n }\n return {\n outboundLayer: this.outboundLayer ? this.outboundLayer.name : null,\n inboundLayers: inboundNames,\n nodeIndices: this.nodeIndices,\n tensorIndices: this.tensorIndices\n };\n }\n}\nlet _nextLayerID = 0;\n/**\n * A layer is a grouping of operations and weights that can be composed to\n * create a `tf.LayersModel`.\n *\n * Layers are constructed by using the functions under the\n * [tf.layers](#Layers-Basic) namespace.\n *\n * @doc {heading: 'Layers', subheading: 'Classes', namespace: 'layers'}\n */\nexport class Layer extends serialization.Serializable {\n constructor(args = {}) {\n super();\n this._callHook = null;\n this._addedWeightNames = [];\n // Porting Notes: PyKeras does not have this property in this base Layer\n // class. Instead lets Layer subclass set it dynamically and checks the\n // value with `hasattr`. In tfjs-layers, we let this be a member of this\n // base class.\n this._stateful = false;\n this.id = _nextLayerID++;\n this.activityRegularizer = null;\n this.inputSpec = null;\n this.supportsMasking = false;\n // These properties will be set upon call of this.build()\n this._trainableWeights = [];\n this._nonTrainableWeights = [];\n this._losses = [];\n this._updates = [];\n this._built = false;\n /*\n These lists will be filled via successive calls\n to this.addInboundNode().\n */\n this.inboundNodes = [];\n this.outboundNodes = [];\n let name = args.name;\n if (!name) {\n const prefix = this.getClassName();\n name = generic_utils.toSnakeCase(prefix) + '_' + getUid(prefix);\n }\n this.name = name;\n this.trainable_ = args.trainable == null ? true : args.trainable;\n if (args.inputShape != null || args.batchInputShape != null) {\n /*\n In this case we will later create an input layer\n to insert before the current layer\n */\n let batchInputShape;\n if (args.batchInputShape != null) {\n batchInputShape = args.batchInputShape;\n }\n else if (args.inputShape != null) {\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n batchInputShape = [batchSize].concat(args.inputShape);\n }\n this.batchInputShape = batchInputShape;\n // Set dtype.\n let dtype = args.dtype;\n if (dtype == null) {\n dtype = args.inputDType;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n this.dtype = dtype;\n }\n if (args.weights != null) {\n this.initialWeights = args.weights;\n }\n else {\n this.initialWeights = null;\n }\n // The value of `_refCount` is initialized to null. When the layer is used\n // in a symbolic way for the first time, it will be set to 1.\n this._refCount = null;\n this.fastWeightInitDuringBuild = false;\n }\n /**\n * Converts a layer and its index to a unique (immutable type) name.\n * This function is used internally with `this.containerNodes`.\n * @param layer The layer.\n * @param nodeIndex The layer's position (e.g. via enumerate) in a list of\n * nodes.\n *\n * @returns The unique name.\n */\n static nodeKey(layer, nodeIndex) {\n return layer.name + '_ib-' + nodeIndex.toString();\n }\n /**\n * Returns this.inboundNode at index nodeIndex.\n *\n * Porting note: This is a replacement for _get_node_attribute_at_index()\n * @param nodeIndex\n * @param attrName The name of the attribute related to request for this node.\n */\n getNodeAtIndex(nodeIndex, attrName) {\n if (this.inboundNodes.length === 0) {\n throw new RuntimeError('The layer has never been called ' +\n `and thus has no defined ${attrName}.`);\n }\n if (this.inboundNodes.length <= nodeIndex) {\n throw new ValueError(`Asked to get ${attrName} at node ${nodeIndex}, ` +\n `but the layer has only ${this.inboundNodes.length} inbound nodes.`);\n }\n return this.inboundNodes[nodeIndex];\n }\n /**\n * Retrieves the input tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple inputs).\n */\n getInputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple outputs).\n */\n getOutputAt(nodeIndex) {\n return generic_utils.singletonOrArray(this.getNodeAtIndex(nodeIndex, 'output').outputTensors);\n }\n // Properties\n /**\n * Retrieves the input tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Input tensor or list of input tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get input() {\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer input\" ' +\n 'is ill-defined. ' +\n 'Use `getInputAt(nodeIndex)` instead.');\n }\n else if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' is not connected, no input to return.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'input').inputTensors);\n }\n /**\n * Retrieves the output tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Output tensor or list of output tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get output() {\n if (this.inboundNodes.length === 0) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has no inbound nodes.');\n }\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(`Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer output\" ' +\n 'is ill-defined. ' +\n 'Use `getOutputAt(nodeIndex)` instead.');\n }\n return generic_utils.singletonOrArray(this.getNodeAtIndex(0, 'output').outputTensors);\n }\n get losses() {\n return this._losses;\n }\n /**\n * Retrieves the Layer's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Layer.loss in PyKeras.\n // In PyKeras, Layer.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return this.losses.map(lossFn => lossFn());\n }\n get updates() {\n return this._updates;\n }\n get built() {\n return this._built;\n }\n set built(built) {\n this._built = built;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this._trainableWeights.forEach(w => w.trainable = trainable);\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n if (this.trainable_) {\n return this._trainableWeights.filter(w => w.trainable);\n }\n else {\n return [];\n }\n }\n set trainableWeights(weights) {\n this._trainableWeights = weights;\n }\n get nonTrainableWeights() {\n if (this.trainable) {\n return this._trainableWeights.filter(w => !w.trainable)\n .concat(this._nonTrainableWeights);\n }\n else {\n return this._trainableWeights.concat(this._nonTrainableWeights);\n }\n }\n set nonTrainableWeights(weights) {\n this._nonTrainableWeights = weights;\n }\n /**\n * The concatenation of the lists trainableWeights and nonTrainableWeights\n * (in this order).\n */\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n get stateful() {\n return this._stateful;\n }\n /**\n * Reset the states of the layer.\n *\n * This method of the base Layer class is essentially a no-op.\n * Subclasses that are stateful (e.g., stateful RNNs) should override this\n * method.\n */\n resetStates() {\n if (!this.stateful) {\n throw new Error('Cannot call the resetStates() method of a non-stateful Layer ' +\n 'object.');\n }\n }\n /**\n * Checks compatibility between the layer and provided inputs.\n *\n * This checks that the tensor(s) `input`\n * verify the input assumptions of the layer\n * (if any). If not, exceptions are raised.\n *\n * @param inputs Input tensor or list of input tensors.\n *\n * @exception ValueError in case of mismatch between\n * the provided inputs and the expectations of the layer.\n */\n assertInputCompatibility(inputs) {\n inputs = generic_utils.toList(inputs);\n if (this.inputSpec == null || this.inputSpec.length === 0) {\n return;\n }\n const inputSpec = generic_utils.toList(this.inputSpec);\n if (inputs.length !== inputSpec.length) {\n throw new ValueError(`Layer ${this.name} expects ${inputSpec.length} inputs, ` +\n `but it received ${inputs.length} input tensors. ` +\n `Input received: ${inputs}`);\n }\n for (let inputIndex = 0; inputIndex < inputs.length; inputIndex++) {\n const x = inputs[inputIndex];\n const spec = inputSpec[inputIndex];\n if (spec == null) {\n continue;\n }\n // Check ndim.\n const ndim = x.rank;\n if (spec.ndim != null) {\n if (ndim !== spec.ndim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}: ` +\n `expected ndim=${spec.ndim}, found ndim=${ndim}`);\n }\n }\n if (spec.maxNDim != null) {\n if (ndim > spec.maxNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected max_ndim=${spec.maxNDim}, found ndim=${ndim}`);\n }\n }\n if (spec.minNDim != null) {\n if (ndim < spec.minNDim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected min_ndim=${spec.minNDim}, found ndim=${ndim}.`);\n }\n }\n // Check dtype.\n if (spec.dtype != null) {\n if (x.dtype !== spec.dtype) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ${this.name} ` +\n `: expected dtype=${spec.dtype}, found dtype=${x.dtype}.`);\n }\n }\n // Check specific shape axes.\n if (spec.axes) {\n const xShape = x.shape;\n for (const key in spec.axes) {\n const axis = Number(key);\n const value = spec.axes[key];\n // Perform Python-style slicing in case axis < 0;\n // TODO(cais): Use https://github.com/alvivi/typescript-underscore to\n // ensure type safety through Underscore calls.\n const xShapeAtAxis = axis >= 0 ? xShape[axis] : xShape[xShape.length + axis];\n if (value != null && [value, null].indexOf(xShapeAtAxis) === -1) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected axis ${axis} of input shape to ` +\n `have value ${value} but got shape ${xShape}.`);\n }\n }\n }\n // Check shape.\n if (spec.shape != null) {\n for (let i = 0; i < spec.shape.length; ++i) {\n const specDim = spec.shape[i];\n const dim = x.shape[i];\n if (specDim != null && dim != null) {\n if (specDim !== dim) {\n throw new ValueError(`Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected shape=${spec.shape}, ` +\n `found shape=${x.shape}.`);\n }\n }\n }\n }\n }\n }\n /**\n * This is where the layer's logic lives.\n *\n * @param inputs Input tensor, or list/tuple of input tensors.\n * @param kwargs Additional keyword arguments.\n *\n * @return A tensor or list/tuple of tensors.\n */\n call(inputs, kwargs) {\n return inputs;\n }\n invokeCallHook(inputs, kwargs) {\n if (this._callHook != null) {\n this._callHook(inputs, kwargs);\n }\n }\n /**\n * Set call hook.\n * This is currently used for testing only.\n * @param callHook\n */\n setCallHook(callHook) {\n this._callHook = callHook;\n }\n /**\n * Clear call hook.\n * This is currently used for testing only.\n */\n clearCallHook() {\n this._callHook = null;\n }\n /**\n * Builds or executes a `Layer's logic.\n *\n * When called with `tf.Tensor`(s), execute the `Layer`s computation and\n * return Tensor(s). For example:\n *\n * ```js\n * const denseLayer = tf.layers.dense({\n * units: 1,\n * kernelInitializer: 'zeros',\n * useBias: false\n * });\n *\n * // Invoke the layer's apply() method with a `tf.Tensor` (with concrete\n * // numeric values).\n * const input = tf.ones([2, 2]);\n * const output = denseLayer.apply(input);\n *\n * // The output's value is expected to be [[0], [0]], due to the fact that\n * // the dense layer has a kernel initialized to all-zeros and does not have\n * // a bias.\n * output.print();\n * ```\n *\n * When called with `tf.SymbolicTensor`(s), this will prepare the layer for\n * future execution. This entails internal book-keeping on shapes of\n * expected Tensors, wiring layers together, and initializing weights.\n *\n * Calling `apply` with `tf.SymbolicTensor`s are typically used during the\n * building of non-`tf.Sequential` models. For example:\n *\n * ```js\n * const flattenLayer = tf.layers.flatten();\n * const denseLayer = tf.layers.dense({units: 1});\n *\n * // Use tf.layers.input() to obtain a SymbolicTensor as input to apply().\n * const input = tf.input({shape: [2, 2]});\n * const output1 = flattenLayer.apply(input);\n *\n * // output1.shape is [null, 4]. The first dimension is the undetermined\n * // batch size. The second dimension comes from flattening the [2, 2]\n * // shape.\n * console.log(JSON.stringify(output1.shape));\n *\n * // The output SymbolicTensor of the flatten layer can be used to call\n * // the apply() of the dense layer:\n * const output2 = denseLayer.apply(output1);\n *\n * // output2.shape is [null, 1]. The first dimension is the undetermined\n * // batch size. The second dimension matches the number of units of the\n * // dense layer.\n * console.log(JSON.stringify(output2.shape));\n *\n * // The input and output and be used to construct a model that consists\n * // of the flatten and dense layers.\n * const model = tf.model({inputs: input, outputs: output2});\n * ```\n *\n * @param inputs a `tf.Tensor` or `tf.SymbolicTensor` or an Array of them.\n * @param kwargs Additional keyword arguments to be passed to `call()`.\n *\n * @return Output of the layer's `call` method.\n *\n * @exception ValueError error in case the layer is missing shape information\n * for its `build` call.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n // Porting Note: This is a replacement for __call__() in Python.\n apply(inputs, kwargs) {\n kwargs = kwargs || {};\n this.assertNotDisposed();\n // Ensure inputs are all the same type.\n const inputsList = generic_utils.toList(inputs);\n let allAreSymbolic = true;\n for (const input of inputsList) {\n if (!(input instanceof SymbolicTensor)) {\n allAreSymbolic = false;\n break;\n }\n }\n let noneAreSymbolic = true;\n for (const input of inputsList) {\n if (input instanceof SymbolicTensor) {\n noneAreSymbolic = false;\n break;\n }\n }\n if (allAreSymbolic === noneAreSymbolic) {\n throw new ValueError('Arguments to apply() must be all ' +\n 'SymbolicTensors or all Tensors');\n }\n // TODO(michaelterry): nameScope() may not be necessary.\n return nameScope(this.name, () => {\n // Handle laying building (weight creating, input spec locking).\n if (!this.built) {\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec specified in the layer constructor.\n */\n this.assertInputCompatibility(inputs);\n // Collect input shapes to build layer.\n const inputShapes = [];\n for (const xElem of generic_utils.toList(inputs)) {\n inputShapes.push(xElem.shape);\n }\n this.build(generic_utils.singletonOrArray(inputShapes));\n this.built = true;\n // Load weights that were specified at layer instantiation.\n if (this.initialWeights) {\n this.setWeights(this.initialWeights);\n }\n if (this._refCount === null && noneAreSymbolic) {\n // The first use of this layer is a non-symbolic call, set ref count\n // to 1 so the Layer can be properly disposed if its dispose() method\n // is called.\n this._refCount = 1;\n }\n }\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec set at build time.\n */\n this.assertInputCompatibility(inputs);\n // Handle mask propagation.\n // TODO(michaelterry): Mask propagation not currently implemented.\n // Actually call the layer, collecting output(s), mask(s), and shape(s).\n if (noneAreSymbolic) {\n let output = this.call(inputs, kwargs);\n // TODO(michaelterry): Compute the outputMask\n // If the layer returns tensors from its inputs, unmodified,\n // we copy them to avoid loss of tensor metadata.\n const outputList = generic_utils.toList(output);\n const outputListCopy = [];\n // TODO(michaelterry): This copying may not be necessary given our eager\n // backend.\n for (let x of outputList) {\n if (inputsList.indexOf(x) !== -1) {\n x = x.clone();\n }\n outputListCopy.push(x);\n }\n output = generic_utils.singletonOrArray(outputListCopy);\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Call addInboundNode()?\n return output;\n }\n else {\n const inputShape = collectInputShape(inputs);\n const outputShape = this.computeOutputShape(inputShape);\n let output;\n const outputDType = guessOutputDType(inputs);\n this.warnOnIncompatibleInputShape(Array.isArray(inputs) ? inputShape[0] :\n inputShape);\n if (outputShape != null && outputShape.length > 0 &&\n Array.isArray(outputShape[0])) {\n // We have multiple output shapes. Create multiple output tensors.\n output = outputShape\n .map((shape, index) => new SymbolicTensor(outputDType, shape, this, generic_utils.toList(inputs), kwargs, this.name, index));\n }\n else {\n output = new SymbolicTensor(outputDType, outputShape, this, generic_utils.toList(inputs), kwargs, this.name);\n }\n /*\n Add an inbound node to the layer, so that it keeps track\n of the call and of all new variables created during the call.\n This also updates the layer history of the output tensor(s).\n If the input tensor(s) had no previous history,\n this does nothing.\n */\n this.addInboundNode(inputs, output, null, null, inputShape, outputShape, kwargs);\n this._refCount++;\n if (this.activityRegularizer != null) {\n throw new NotImplementedError('Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n return output;\n }\n });\n }\n /**\n * Check compatibility between input shape and this layer's batchInputShape.\n *\n * Print warning if any incompatibility is found.\n *\n * @param inputShape Input shape to be checked.\n */\n warnOnIncompatibleInputShape(inputShape) {\n if (this.batchInputShape == null) {\n return;\n }\n else if (inputShape.length !== this.batchInputShape.length) {\n console.warn(`The rank of the input tensor provided (shape: ` +\n `${JSON.stringify(inputShape)}) does not match that of the ` +\n `batchInputShape (${JSON.stringify(this.batchInputShape)}) ` +\n `of the layer ${this.name}`);\n }\n else {\n let dimMismatch = false;\n this.batchInputShape.forEach((dimension, i) => {\n if (dimension != null && inputShape[i] != null &&\n inputShape[i] !== dimension) {\n dimMismatch = true;\n }\n });\n if (dimMismatch) {\n console.warn(`The shape of the input tensor ` +\n `(${JSON.stringify(inputShape)}) does not ` +\n `match the expectation of layer ${this.name}: ` +\n `${JSON.stringify(this.batchInputShape)}`);\n }\n }\n }\n /**\n * Retrieves the output shape(s) of a layer.\n *\n * Only applicable if the layer has only one inbound node, or if all inbound\n * nodes have the same output shape.\n *\n * @returns Output shape or shapes.\n * @throws AttributeError: if the layer is connected to more than one incoming\n * nodes.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n get outputShape() {\n if (this.inboundNodes == null || this.inboundNodes.length === 0) {\n throw new AttributeError(`The layer ${this.name} has never been called and thus has no ` +\n `defined output shape.`);\n }\n const allOutputShapes = [];\n for (const node of this.inboundNodes) {\n const shapeString = JSON.stringify(node.outputShapes);\n if (allOutputShapes.indexOf(shapeString) === -1) {\n allOutputShapes.push(shapeString);\n }\n }\n if (allOutputShapes.length === 1) {\n const outputShapes = this.inboundNodes[0].outputShapes;\n if (Array.isArray(outputShapes) && Array.isArray(outputShapes[0]) &&\n outputShapes.length === 1) {\n return outputShapes[0];\n }\n else {\n return outputShapes;\n }\n }\n else {\n throw new AttributeError(`The layer ${this.name} has multiple inbound nodes with different ` +\n `output shapes. Hence the notion of \"output shape\" is ill-defined ` +\n `for the layer.`);\n // TODO(cais): Implement getOutputShapeAt().\n }\n }\n /**\n * Counts the total number of numbers (e.g., float32, int32) in the\n * weights.\n *\n * @returns An integer count.\n * @throws RuntimeError: If the layer is not built yet (in which case its\n * weights are not defined yet.)\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n countParams() {\n if (!this.built) {\n throw new RuntimeError(`You tried to call countParams() on ${this.name}, ` +\n `but the layer is not built yet. Build it first by calling ` +\n `build(batchInputShape).`);\n }\n return variable_utils.countParamsInWeights(this.weights);\n }\n /**\n * Creates the layer weights.\n *\n * Must be implemented on all layers that have weights.\n *\n * Called when apply() is called to construct the weights.\n *\n * @param inputShape A `Shape` or array of `Shape` (unused).\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n build(inputShape) {\n this.built = true;\n }\n /**\n * Returns the current values of the weights of the layer.\n *\n * @param trainableOnly Whether to get the values of only trainable weights.\n * @returns Weight values as an `Array` of `tf.Tensor`s.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getWeights(trainableOnly = false) {\n return batchGetValue(trainableOnly ? this.trainableWeights : this.weights);\n }\n /**\n * Sets the weights of the layer, from Tensors.\n *\n * @param weights a list of Tensors. The number of arrays and their shape\n * must match number of the dimensions of the weights of the layer (i.e.\n * it should match the output of `getWeights`).\n *\n * @exception ValueError If the provided weights list does not match the\n * layer's specifications.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n setWeights(weights) {\n tidy(() => {\n const params = this.weights;\n if (params.length !== weights.length) {\n // TODO(cais): Restore the following and use `providedWeights`, instead\n // of `weights` in the error message, once the deeplearn.js bug is\n // fixed: https://github.com/PAIR-code/deeplearnjs/issues/498 const\n // providedWeights = JSON.stringify(weights).substr(0, 50);\n throw new ValueError(`You called setWeights(weights) on layer \"${this.name}\" ` +\n `with a weight list of length ${weights.length}, ` +\n `but the layer was expecting ${params.length} weights. ` +\n `Provided weights: ${weights}...`);\n }\n if (params.length === 0) {\n return;\n }\n const weightValueTuples = [];\n const paramValues = batchGetValue(params);\n for (let i = 0; i < paramValues.length; ++i) {\n const pv = paramValues[i];\n const p = params[i];\n const w = weights[i];\n if (!util.arraysEqual(pv.shape, w.shape)) {\n throw new ValueError(`Layer weight shape ${pv.shape} ` +\n `not compatible with provided weight shape ${w.shape}`);\n }\n weightValueTuples.push([p, w]);\n }\n batchSetValue(weightValueTuples);\n });\n }\n /**\n * Adds a weight variable to the layer.\n *\n * @param name Name of the new weight variable.\n * @param shape The shape of the weight.\n * @param dtype The dtype of the weight.\n * @param initializer An initializer instance.\n * @param regularizer A regularizer instance.\n * @param trainable Whether the weight should be trained via backprop or not\n * (assuming that the layer itself is also trainable).\n * @param constraint An optional trainable.\n * @return The created weight variable.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addWeight(name, shape, dtype, initializer, regularizer, trainable, constraint) {\n // Reject duplicate weight names.\n if (this._addedWeightNames.indexOf(name) !== -1) {\n throw new ValueError(`Duplicate weight name ${name} for layer ${this.name}`);\n }\n this._addedWeightNames.push(name);\n if (dtype == null) {\n dtype = 'float32';\n }\n if (this.fastWeightInitDuringBuild) {\n initializer = getInitializer('zeros');\n }\n const initValue = initializer.apply(shape, dtype);\n const weight = new LayerVariable(initValue, dtype, name, trainable, constraint);\n initValue.dispose();\n // Request backend not to dispose the weights of the model on scope() exit.\n if (regularizer != null) {\n this.addLoss(() => regularizer.apply(weight.read()));\n }\n if (trainable == null) {\n trainable = true;\n }\n if (trainable) {\n this._trainableWeights.push(weight);\n }\n else {\n this._nonTrainableWeights.push(weight);\n }\n return weight;\n }\n /**\n * Set the fast-weight-initialization flag.\n *\n * In cases where the initialized weight values will be immediately\n * overwritten by loaded weight values during model loading, setting\n * the flag to `true` saves unnecessary calls to potentially expensive\n * initializers and speeds up the loading process.\n *\n * @param value Target value of the flag.\n */\n setFastWeightInitDuringBuild(value) {\n this.fastWeightInitDuringBuild = value;\n }\n /**\n * Add losses to the layer.\n *\n * The loss may potentionally be conditional on some inputs tensors,\n * for instance activity losses are conditional on the layer's inputs.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n addLoss(losses) {\n if (losses == null || Array.isArray(losses) && losses.length === 0) {\n return;\n }\n // Update this.losses\n losses = generic_utils.toList(losses);\n if (this._losses !== undefined && this._losses !== null) {\n this.losses.push(...losses);\n }\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n computeOutputShape(inputShape) {\n return inputShape;\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n if (!this.supportsMasking) {\n if (mask != null) {\n if (Array.isArray(mask)) {\n mask.forEach(maskElement => {\n if (maskElement != null) {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n });\n }\n else {\n throw new TypeError(`Layer ${this.name} does not support masking, ` +\n 'but was passed an inputMask.');\n }\n }\n // masking not explicitly supported: return null as mask\n return null;\n }\n // if masking is explictly supported, by default\n // carry over the input mask\n return mask;\n }\n /**\n * Internal method to create an inbound node for the layer.\n *\n * @param inputTensors List of input tensors.\n * @param outputTensors List of output tensors.\n * @param inputMasks List of input masks (a mask can be a tensor, or null).\n * @param outputMasks List of output masks (a mask can be a tensor, or null).\n * @param inputShapes List of input shape tuples.\n * @param outputShapes List of output shape tuples.\n * @param kwargs Dictionary of keyword arguments that were passed to the\n * `call` method of the layer at the call that created the node.\n */\n addInboundNode(inputTensors, outputTensors, inputMasks, outputMasks, inputShapes, outputShapes, kwargs = null) {\n const inputTensorList = generic_utils.toList(inputTensors);\n outputTensors = generic_utils.toList(outputTensors);\n inputMasks = generic_utils.toList(inputMasks);\n outputMasks = generic_utils.toList(outputMasks);\n inputShapes = types_utils.normalizeShapeList(inputShapes);\n outputShapes = types_utils.normalizeShapeList(outputShapes);\n // Collect input tensor(s) coordinates.\n const inboundLayers = [];\n const nodeIndices = [];\n const tensorIndices = [];\n for (const x of inputTensorList) {\n /*\n * TODO(michaelterry): Keras adds this value to tensors; it's not\n * clear whether we'll use this or not.\n */\n inboundLayers.push(x.sourceLayer);\n nodeIndices.push(x.nodeIndex);\n tensorIndices.push(x.tensorIndex);\n }\n // Create node, add it to inbound nodes.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers,\n nodeIndices,\n tensorIndices,\n inputTensors: inputTensorList,\n outputTensors,\n inputMasks,\n outputMasks,\n inputShapes,\n outputShapes\n }, kwargs);\n // Update tensor history\n for (let i = 0; i < outputTensors.length; i++) {\n // TODO(michaelterry: _uses_learning_phase not tracked.\n outputTensors[i].sourceLayer = this;\n outputTensors[i].nodeIndex = this.inboundNodes.length - 1;\n outputTensors[i].tensorIndex = i;\n }\n }\n /**\n * Returns the config of the layer.\n *\n * A layer config is a TS dictionary (serializable)\n * containing the configuration of a layer.\n * The same layer can be reinstantiated later\n * (without its trained weights) from this configuration.\n *\n * The config of a layer does not include connectivity\n * information, nor the layer class name. These are handled\n * by 'Container' (one layer of abstraction above).\n *\n * Porting Note: The TS dictionary follows TS naming standrds for\n * keys, and uses tfjs-layers type-safe Enums. Serialization methods\n * should use a helper function to convert to the pythonic storage\n * standard. (see serialization_utils.convertTsToPythonic)\n *\n * @returns TS dictionary of configuration.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n getConfig() {\n const config = { name: this.name, trainable: this.trainable };\n if (this.batchInputShape != null) {\n config['batchInputShape'] = this.batchInputShape;\n }\n if (this.dtype != null) {\n config['dtype'] = this.dtype;\n }\n return config;\n }\n /**\n * Dispose the weight variables that this Layer instance holds.\n *\n * @returns {number} Number of disposed variables.\n */\n disposeWeights() {\n this.weights.forEach(weight => weight.dispose());\n return this.weights.length;\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Layer '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose layer's weights.\n *\n * This method decrease the reference count of the Layer object by 1.\n *\n * A Layer is reference-counted. Its reference count is incremented by 1\n * the first item its `apply()` method is called and when it becomes a part\n * of a new `Node` (through calling the `apply()`) method on a\n * `tf.SymbolicTensor`).\n *\n * If the reference count of a Layer becomes 0, all the weights will be\n * disposed and the underlying memory (e.g., the textures allocated in WebGL)\n * will be freed.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * weights of the Layer will *not* be disposed.\n *\n * After a Layer is disposed, it cannot be used in calls such as `apply()`,\n * `getWeights()` or `setWeights()` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the Container after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the layer has already\n * been disposed.\n *\n * @doc {heading: 'Models', 'subheading': 'Classes'}\n */\n dispose() {\n if (!this.built) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been ` +\n `built yet.`);\n }\n if (this._refCount === null) {\n throw new Error(`Cannot dispose Layer ${this.name} because it has not been used ` +\n `yet.`);\n }\n this.assertNotDisposed();\n let numDisposedVariables = 0;\n if (--this._refCount === 0) {\n numDisposedVariables = this.disposeWeights();\n }\n return { refCountAfterDispose: this._refCount, numDisposedVariables };\n }\n}\n/**\n * Collects the input shape(s) of a list of `tf.Tensor`s or\n * `tf.SymbolicTensor`s.\n *\n * TODO(michaelterry): Update PyKeras docs (backport).\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return List of shape tuples (or single tuple), one tuple per input.\n */\nfunction collectInputShape(inputTensors) {\n inputTensors =\n generic_utils.toList(inputTensors);\n const shapes = [];\n for (const x of inputTensors) {\n shapes.push(x.shape);\n }\n return generic_utils.singletonOrArray(shapes);\n}\n/**\n * Guesses output dtype based on inputs.\n *\n * At present, just returns 'float32' for any input.\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return The guessed DType. At present, always returns 'float32'.\n */\nfunction guessOutputDType(inputTensors) {\n return 'float32';\n}\n/**\n * Returns the list of input tensors necessary to compute `tensor`.\n *\n * Output will always be a list of tensors (potentially with 1 element).\n *\n * @param tensor The tensor to start from.\n * @param layer Origin layer of the tensor.\n * @param nodeIndex Origin node index of the tensor.\n *\n * @return Array of input tensors.\n */\nexport function getSourceInputs(tensor, layer, nodeIndex) {\n if (layer == null || (nodeIndex != null && nodeIndex > 0)) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n }\n if (layer.inboundNodes.length === 0) {\n return [tensor];\n }\n else {\n const node = layer.inboundNodes[nodeIndex];\n if (node.inboundLayers.length === 0) {\n return node.inputTensors;\n }\n else {\n const sourceTensors = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const previousSources = getSourceInputs(x, layer, nodeIndex);\n // Avoid input redundancy.\n for (const x of previousSources) {\n if (sourceTensors.indexOf(x) === -1) {\n sourceTensors.push(x);\n }\n }\n }\n return sourceTensors;\n }\n }\n}\n//# sourceMappingURL=topology.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { ValueError } from '../errors';\nimport { Layer, Node, SymbolicTensor } from './topology';\nexport class InputLayer extends Layer {\n constructor(args) {\n super({\n dtype: args.dtype,\n name: args.name != null ? args.name : getUid('input').toString()\n });\n // Normalize config.batchSize and config.sparse\n if (args.batchSize == null) {\n args.batchSize = null;\n }\n if (args.sparse == null) {\n args.sparse = false;\n }\n this.trainable = false;\n this.built = true;\n this.sparse = args.sparse;\n if (args.inputShape != null && args.batchInputShape != null) {\n throw new ValueError('Only provide the inputShape OR ' +\n 'batchInputShape argument to inputLayer, not both at the same time.');\n }\n let batchInputShape = args.batchInputShape;\n if (batchInputShape == null) {\n if (args.inputShape == null) {\n throw new ValueError('An InputLayer should be passed either a ' +\n '`batchInputShape` or an `inputShape`.');\n }\n else {\n batchInputShape = [args.batchSize].concat(args.inputShape);\n }\n }\n else {\n // TODO(michaelterry): Backport to PyKeras\n if (args.batchSize != null) {\n throw new ValueError('Cannot specify batchSize if batchInputShape is ' +\n 'specified when creating an InputLayer.');\n }\n }\n const dtype = args.dtype || 'float32';\n this.batchInputShape = batchInputShape;\n this.dtype = dtype;\n // TODO(michaelterry): Backport this to PyKeras?\n this.inputSpec = [{ shape: batchInputShape }];\n const inputTensor = new SymbolicTensor(this.dtype, this.batchInputShape, this, [], {}, this.name);\n inputTensor.nodeIndex = 0;\n inputTensor.tensorIndex = 0;\n // Create an input node to add to this.outboundNode.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: [inputTensor],\n outputTensors: [inputTensor],\n inputMasks: [null],\n outputMasks: [null],\n inputShapes: [batchInputShape],\n outputShapes: [batchInputShape]\n });\n }\n apply(inputs, kwargs) {\n throw new ValueError('Cannot pass any input to an ' +\n `InputLayer's apply() method. InputLayer name: ${this.name}`);\n }\n dispose() {\n // dispose() for InputLayer is overridden as no-op.\n return { refCountAfterDispose: this._refCount, numDisposedVariables: 0 };\n }\n getConfig() {\n return {\n batchInputShape: this.batchInputShape,\n dtype: this.dtype,\n sparse: this.sparse,\n name: this.name\n };\n }\n}\n/** @nocollapse */\nInputLayer.className = 'InputLayer';\nserialization.registerClass(InputLayer);\nexport function Input(config) {\n if (config.batchShape == null && config.shape == null) {\n throw new Error('Please provide to Input either a `shape`' +\n ' or a `batchShape` argument. Note that ' +\n '`shape` does not include the batch ' +\n 'dimension.');\n }\n if (config.batchShape != null && config.shape != null) {\n // TODO(michaelterry): Backport to PyKeras.\n throw new ValueError('Please provide either a `shape` or `batchShape` ' +\n 'argument to Input, but not both.');\n }\n let batchShape = config.batchShape;\n if (config.shape != null && batchShape == null) {\n batchShape = [null].concat(config.shape);\n }\n let dtype = config.dtype;\n if (dtype == null) {\n dtype = 'float32';\n }\n const inputLayer = new InputLayer({\n batchInputShape: batchShape,\n name: config.name,\n dtype,\n sparse: config.sparse\n });\n const outputs = inputLayer.inboundNodes[0].outputTensors;\n return outputs[0];\n}\n//# sourceMappingURL=input_layer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose } from '@tensorflow/tfjs-core';\n/**\n * Turn any Scalar values in a Logs object into actual number values.\n *\n * @param logs The `Logs` object to be resolved in place.\n */\nexport async function resolveScalarsInLogs(logs) {\n if (logs == null) {\n return;\n }\n const promises = [];\n const keys = [];\n const scalarsToDispose = [];\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n const valueScalar = value;\n promises.push(valueScalar.data());\n keys.push(key);\n scalarsToDispose.push(valueScalar);\n }\n }\n if (promises.length > 0) {\n const values = await Promise.all(promises);\n for (let i = 0; i < values.length; ++i) {\n logs[keys[i]] = values[i][0];\n }\n // Dispose the original scalar tensors.\n dispose(scalarsToDispose);\n }\n}\n/**\n * Dispose all Tensors in an UnresolvedLogs object.\n *\n * @param logs An `UnresolvedLogs` object potentially containing `tf.Tensor`s in\n * places where the values can be `tf.Tensor` or `number`.\n */\nexport function disposeTensorsInLogs(logs) {\n if (logs == null) {\n return;\n }\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n value.dispose();\n }\n }\n}\n//# sourceMappingURL=logs.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { add, div, keep, mul, nextFrame, tidy, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nimport * as generic_utils from './utils/generic_utils';\n/** Verbosity logging level when fitting a model. */\nexport var ModelLoggingVerbosity;\n(function (ModelLoggingVerbosity) {\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"SILENT\"] = 0] = \"SILENT\";\n ModelLoggingVerbosity[ModelLoggingVerbosity[\"VERBOSE\"] = 1] = \"VERBOSE\";\n})(ModelLoggingVerbosity || (ModelLoggingVerbosity = {}));\n/** How often to yield to the main thread when training (in ms). */\nexport const DEFAULT_YIELD_EVERY_MS = 125;\n/**\n * Abstract base class used to build new callbacks.\n *\n * The `logs` dictionary that callback methods take as argument will contain\n * keys for quantities relevant to the current batch or epoch.\n *\n * Currently, the `.fit()` method of the `Sequential` model class\n * will include the following quantities in the `logs` that\n * it passes to its callbacks:\n *\n * onEpochEnd: Logs include `acc` and `loss`, and optionally include `valLoss`\n * (if validation is enabled in `fit`), and `valAcc` (if validation and\n * accuracy monitoring are enabled).\n * onBatchBegin: Logs include `size`, the number of samples in the current\n * batch.\n * onBatchEnd: Logs include `loss`, and optionally `acc` (if accuracy monitoring\n * is enabled).\n */\nexport class BaseCallback {\n constructor() {\n // TODO(michaelterry): This type is a best guess.\n this.validationData = null;\n }\n setParams(params) {\n this.params = params;\n }\n async onEpochBegin(epoch, logs) { }\n async onEpochEnd(epoch, logs) { }\n async onBatchBegin(batch, logs) { }\n async onBatchEnd(batch, logs) { }\n async onTrainBegin(logs) { }\n async onTrainEnd(logs) { }\n // LayersModel needs to call Callback.setModel(), but cannot actually depend\n // on Callback because that creates a cyclic dependency. Providing this no-op\n // method on BaseCallback breaks the cycle: this way LayersModel can depend on\n // BaseCallback but not on Callback. The argument is typed as `Container`\n // (the superclass of LayersModel) to avoid recapitulating the cycle. Callback\n // overrides this method and enforces that the argument is really a\n // LayersModel.\n setModel(model) {\n // Do nothing. Use Callback instead of BaseCallback to track the model.\n }\n}\n/**\n * Container abstracting a list of callbacks.\n */\nexport class CallbackList {\n // TODO(cais): When the need arises, uncomment the following lines and\n // implement the queue for time values.\n // private deltaTBatch: number;\n // private deltaTsBatchBegin: Array;\n // private deltaTsBatchEnd: Array;\n /**\n * Constructor of CallbackList.\n * @param callbacks Array of `Callback` instances.\n * @param queueLength Queue length for keeping running statistics over\n * callback execution time.\n */\n constructor(callbacks, queueLength = 10) {\n // TODO(cais): Make use of queueLength when implementing the queue for time\n // values.\n if (callbacks == null) {\n callbacks = [];\n }\n this.callbacks = callbacks;\n this.queueLength = queueLength;\n }\n append(callback) {\n this.callbacks.push(callback);\n }\n setParams(params) {\n for (const callback of this.callbacks) {\n callback.setParams(params);\n }\n }\n setModel(model) {\n for (const callback of this.callbacks) {\n callback.setModel(model);\n }\n }\n /**\n * Called at the start of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochBegin(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochBegin(epoch, logs);\n }\n }\n /**\n * Called at the end of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochEnd(epoch, logs);\n }\n }\n /**\n * Called right before processing a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchBegin(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchBegin(batch, logs);\n }\n }\n /**\n * Called at the end of a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchEnd(batch, logs);\n }\n }\n /**\n * Called at the beginning of training.\n * @param logs Dictionary of logs.\n */\n async onTrainBegin(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainBegin(logs);\n }\n }\n /**\n * Called at the end of training.\n * @param logs Dictionary of logs.\n */\n async onTrainEnd(logs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainEnd(logs);\n }\n }\n}\n/**\n * Callback that accumulates epoch averages of metrics.\n *\n * This callback is automatically applied to every LayersModel.\n */\nexport class BaseLogger extends BaseCallback {\n constructor() {\n super();\n }\n async onEpochBegin(epoch) {\n this.seen = 0;\n this.totals = {};\n }\n async onBatchEnd(batch, logs) {\n if (logs == null) {\n logs = {};\n }\n const batchSize = logs['size'] == null ? 0 : logs['size'];\n this.seen += batchSize;\n for (const key in logs) {\n const value = logs[key];\n if (typeof value === 'number') {\n if (!this.totals.hasOwnProperty(key)) {\n this.totals[key] = 0;\n }\n this.totals[key] = this.totals[key] + value * batchSize;\n }\n else {\n let oldTotalsToDispose;\n if (key in this.totals) {\n oldTotalsToDispose = this.totals[key];\n }\n else {\n this.totals[key] = 0;\n }\n const total = tidy(() => add((this.totals[key]), mul(value, batchSize)));\n this.totals[key] = total;\n if (oldTotalsToDispose != null) {\n oldTotalsToDispose.dispose();\n }\n }\n }\n }\n async onEpochEnd(epoch, logs) {\n if (logs != null) {\n for (const key of this.params['metrics']) {\n if (this.totals[key] == null) {\n continue;\n }\n if (typeof this.totals[key] === 'number') {\n logs[key] = this.totals[key] / this.seen;\n }\n else {\n tidy(() => {\n const log = mul(div(1, this.seen), this.totals[key]);\n logs[key] = log;\n this.totals[key].dispose();\n keep(logs[key]);\n });\n }\n }\n }\n }\n}\n/**\n * Callback that records events into a `History` object. This callback is\n * automatically applied to every TF.js Layers model. The `History` object\n * gets returned by the `fit` method of models.\n */\nexport class History extends BaseCallback {\n async onTrainBegin(logs) {\n this.epoch = [];\n this.history = {};\n }\n async onEpochEnd(epoch, logs) {\n if (logs == null) {\n logs = {};\n }\n this.epoch.push(epoch);\n for (const key in logs) {\n if (this.history[key] == null) {\n this.history[key] = [];\n }\n this.history[key].push(logs[key]);\n }\n }\n /**\n * Await the values of all losses and metrics.\n */\n async syncData() {\n const promises = [];\n const keys = [];\n const indices = [];\n for (const key in this.history) {\n const valueArray = this.history[key];\n for (let i = 0; i < valueArray.length; ++i) {\n if (typeof valueArray[i] !== 'number') {\n const valueScalar = valueArray[i];\n promises.push(valueScalar.data());\n keys.push(key);\n indices.push(i);\n }\n }\n }\n const values = await Promise.all(promises);\n for (let n = 0; n < values.length; ++n) {\n const tensorToDispose = this.history[keys[n]][indices[n]];\n tensorToDispose.dispose();\n this.history[keys[n]][indices[n]] = values[n][0];\n }\n }\n}\n/**\n * Custom callback for training.\n */\nexport class CustomCallback extends BaseCallback {\n constructor(args, yieldEvery) {\n super();\n this.currentEpoch = 0;\n this.yieldEvery = yieldEvery || 'auto';\n if (this.yieldEvery === 'auto') {\n this.yieldEvery = DEFAULT_YIELD_EVERY_MS;\n }\n if (this.yieldEvery === 'never' && args.onYield != null) {\n throw new Error('yieldEvery is `never` but you provided an `onYield` callback. ' +\n 'Either change `yieldEvery` or remove the callback');\n }\n if (util.isNumber(this.yieldEvery)) {\n // Decorate `maybeWait` so it will be called at most once every\n // `yieldEvery` ms.\n this.maybeWait = generic_utils.debounce(this.maybeWait.bind(this), this.yieldEvery);\n }\n this.trainBegin = args.onTrainBegin;\n this.trainEnd = args.onTrainEnd;\n this.epochBegin = args.onEpochBegin;\n this.epochEnd = args.onEpochEnd;\n this.batchBegin = args.onBatchBegin;\n this.batchEnd = args.onBatchEnd;\n this.yield = args.onYield;\n }\n async maybeWait(epoch, batch, logs) {\n const ps = [];\n if (this.yield != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.yield(epoch, batch, logs));\n }\n ps.push(nextFrame());\n await Promise.all(ps);\n }\n async onEpochBegin(epoch, logs) {\n this.currentEpoch = epoch;\n if (this.epochBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.epochBegin(epoch, logs);\n }\n }\n async onEpochEnd(epoch, logs) {\n const ps = [];\n if (this.epochEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.epochEnd(epoch, logs));\n }\n if (this.yieldEvery === 'epoch') {\n ps.push(nextFrame());\n }\n await Promise.all(ps);\n }\n async onBatchBegin(batch, logs) {\n if (this.batchBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.batchBegin(batch, logs);\n }\n }\n async onBatchEnd(batch, logs) {\n const ps = [];\n if (this.batchEnd != null) {\n await resolveScalarsInLogs(logs);\n ps.push(this.batchEnd(batch, logs));\n }\n if (this.yieldEvery === 'batch') {\n ps.push(nextFrame());\n }\n else if (util.isNumber(this.yieldEvery)) {\n ps.push(this.maybeWait(this.currentEpoch, batch, logs));\n }\n await Promise.all(ps);\n }\n async onTrainBegin(logs) {\n if (this.trainBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.trainBegin(logs);\n }\n }\n async onTrainEnd(logs) {\n if (this.trainEnd != null) {\n await resolveScalarsInLogs(logs);\n await this.trainEnd(logs);\n }\n }\n}\n/**\n * Standardize callbacks or configurations of them to an Array of callbacks.\n */\nexport function standardizeCallbacks(callbacks, yieldEvery) {\n if (callbacks == null) {\n callbacks = {};\n }\n if (callbacks instanceof BaseCallback) {\n return [callbacks];\n }\n if (Array.isArray(callbacks) && callbacks[0] instanceof BaseCallback) {\n return callbacks;\n }\n // Convert custom callback configs to custom callback objects.\n const callbackConfigs = generic_utils.toList(callbacks);\n return callbackConfigs.map(callbackConfig => new CustomCallback(callbackConfig, yieldEvery));\n}\n/**\n * A global registry for callback constructors to be used during\n * LayersModel.fit().\n */\nexport class CallbackConstructorRegistry {\n /**\n * Blocks public access to constructor.\n */\n constructor() { }\n /**\n * Register a tf.LayersModel.fit() callback constructor.\n *\n * The registered callback constructor will be used to instantiate\n * callbacks for every tf.LayersModel.fit() call afterwards.\n *\n * @param verbosityLevel Level of verbosity at which the `callbackConstructor`\n * is to be reigstered.\n * @param callbackConstructor A no-arg constructor for `tf.Callback`.\n * @throws Error, if the same callbackConstructor has been registered before,\n * either at the same or a different `verbosityLevel`.\n */\n static registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n util.assert(verbosityLevel >= 0 && Number.isInteger(verbosityLevel), () => `Verbosity level is expected to be an integer >= 0, ` +\n `but got ${verbosityLevel}`);\n CallbackConstructorRegistry.checkForDuplicate(callbackConstructor);\n if (CallbackConstructorRegistry.constructors[verbosityLevel] == null) {\n CallbackConstructorRegistry.constructors[verbosityLevel] = [];\n }\n CallbackConstructorRegistry.constructors[verbosityLevel].push(callbackConstructor);\n }\n static checkForDuplicate(callbackConstructor) {\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const constructors = CallbackConstructorRegistry.constructors[+levelName];\n constructors.forEach(ctor => {\n if (ctor === callbackConstructor) {\n throw new ValueError('Duplicate callback constructor.');\n }\n });\n }\n }\n /**\n * Clear all registered callback constructors.\n */\n static clear() {\n CallbackConstructorRegistry.constructors = {};\n }\n /**\n * Create callbacks using the registered callback constructors.\n *\n * Given `verbosityLevel`, all constructors registered at that level or above\n * will be called and the instantiated callbacks will be used.\n *\n * @param verbosityLevel: Level of verbosity.\n */\n static createCallbacks(verbosityLevel) {\n const constructors = [];\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const level = +levelName;\n if (verbosityLevel >= level) {\n constructors.push(...CallbackConstructorRegistry.constructors[level]);\n }\n }\n return constructors.map(ctor => new ctor());\n }\n}\nCallbackConstructorRegistry.constructors = {};\nexport function configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics) {\n const history = new History();\n const actualCallbacks = [\n new BaseLogger(), ...CallbackConstructorRegistry.createCallbacks(verbose)\n ];\n if (callbacks != null) {\n actualCallbacks.push(...callbacks);\n }\n actualCallbacks.push(history);\n const callbackList = new CallbackList(actualCallbacks);\n // TODO(cais): Figure out when this LayersModel instance can have a\n // dynamically\n // set property called 'callback_model' as in PyKeras.\n callbackList.setParams({\n epochs,\n initialEpoch,\n samples: numTrainSamples,\n steps: stepsPerEpoch,\n batchSize,\n verbose,\n doValidation,\n metrics: callbackMetrics,\n });\n return { callbackList, history };\n}\n//# sourceMappingURL=base_callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source layers/__init__.py */\nimport { serialization } from '@tensorflow/tfjs-core';\nimport { deserializeKerasObject } from '../utils/generic_utils';\n/**\n * Instantiate a layer from a config dictionary.\n * @param config dict of the form {class_name: str, config: dict}\n * @param customObjects dict mapping class names (or function names)\n * of custom (non-Keras) objects to class/functions\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns Layer instance (may be LayersModel, Sequential, Layer...)\n */\nexport function deserialize(config, customObjects = {}, fastWeightInit = false) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'layer', fastWeightInit);\n}\n//# sourceMappingURL=serialization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: losses.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport * as K from './backend/tfjs_backend';\nimport { ValueError } from './errors';\n/**\n * Normalizes a tensor wrt the L2 norm alongside the specified axis.\n * @param x\n * @param axis Axis along which to perform normalization.\n */\nexport function l2Normalize(x, axis) {\n return tidy(() => {\n if (x.dtype !== 'float32') {\n x = x.asType('float32');\n }\n const squareSum = tfc.sum(K.square(x), axis, true);\n const epsilonTensor = tfc.fill(squareSum.shape, epsilon());\n const norm = tfc.sqrt(tfc.maximum(squareSum, epsilonTensor));\n return tfc.div(x, norm);\n });\n}\nexport function meanSquaredError(yTrue, yPred) {\n return tidy(() => tfc.mean(K.square(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsoluteError(yTrue, yPred) {\n return tidy(() => tfc.mean(tfc.abs(tfc.sub(yPred, yTrue)), -1));\n}\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return tidy(() => {\n const diff = tfc.sub(yTrue, yPred);\n const clippedTrue = tfc.clipByValue(tfc.abs(yTrue), epsilon(), Number.MAX_VALUE);\n const absResult = tfc.abs(tfc.div(diff, clippedTrue));\n return tfc.mul(100, tfc.mean(absResult, -1));\n });\n}\nexport function meanSquaredLogarithmicError(yTrue, yPred) {\n return tidy(() => {\n const clippedPred = tfc.clipByValue(yPred, epsilon(), Number.MAX_VALUE);\n const firstLog = tfc.log(tfc.add(1, clippedPred));\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), Number.MAX_VALUE);\n const secondLog = tfc.log(tfc.add(1, clippedTrue));\n return tfc.mean(K.square(tfc.sub(firstLog, secondLog)), -1);\n });\n}\nexport function squaredHinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(K.square(maxResult), -1);\n });\n}\nexport function hinge(yTrue, yPred) {\n return tidy(() => {\n const maxResult = tfc.maximum(0, tfc.sub(1, tfc.mul(yTrue, yPred)));\n return tfc.mean(maxResult, -1);\n });\n}\nexport function categoricalHinge(yTrue, yPred) {\n return tidy(() => {\n const pos = tfc.sum(tfc.mul(yTrue, yPred), -1);\n const neg = tfc.max(tfc.mul(tfc.sub(1, yTrue), yPred), -1);\n return tfc.maximum(0, tfc.add(1, tfc.sub(neg, pos)));\n });\n}\n/**\n * Logarithm of the hyperbolic cosine of the prediction error.\n *\n * `log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and\n * to `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly\n * like the mean squared error, but will not be so strongly affected by the\n * occasional wildly incorrect prediction.\n */\nexport function logcosh(yTrue, yPred) {\n return tidy(() => {\n const log2 = Math.log(2);\n const predictionDiff = tfc.sub(yPred, yTrue);\n const logcoshResult = tfc.sub(tfc.add(predictionDiff, tfc.softplus(tfc.mul(-2, predictionDiff))), log2);\n return tfc.mean(logcoshResult, -1);\n });\n}\nexport function categoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n if (fromLogits) {\n output = tfc.softmax(output);\n }\n else {\n // scale preds so that the class probabilities of each sample sum to 1.\n const outputSum = tfc.sum(output, output.shape.length - 1, true);\n output = tfc.div(output, outputSum);\n }\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n return tfc.neg(tfc.sum(tfc.mul(target.toFloat(), tfc.log(output)), output.shape.length - 1));\n });\n}\n/**\n * Categorical crossentropy with integer targets.\n *\n * @param target An integer tensor.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n */\nexport function sparseCategoricalCrossentropy(target, output, fromLogits = false) {\n return tidy(() => {\n const flatTarget = tfc.floor(K.flatten(target)).toInt();\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n const outputShape = output.shape;\n const oneHotTarget = tfc.oneHot(flatTarget, outputShape[outputShape.length - 1])\n .reshape(outputShape);\n return categoricalCrossentropy(oneHotTarget, output, fromLogits);\n });\n}\n/**\n * From TensorFlow's implementation in nn_impl.py:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n * Hence, to ensure stability and avoid overflow, the implementation uses this\n * equivalent formulation\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n *\n * @param labels The labels.\n * @param logits The logits.\n */\nexport function sigmoidCrossEntropyWithLogits(labels, logits) {\n if (!util.arraysEqual(labels.shape, logits.shape)) {\n throw new ValueError(`logits and labels must have the same shape, but got shapes ` +\n `${JSON.stringify(labels.shape)} and ${JSON.stringify(logits.shape)}`);\n }\n return tidy(() => {\n // The logistic loss formula from above is\n // x - x * z + log(1 + exp(-x))\n // For x < 0, a more numerically stable formula is\n // -x * z + log(1 + exp(x))\n // Note that these two expressions can be combined into the following:\n // max(x, 0) - x * z + log(1 + exp(-abs(x)))\n const reluLogits = logits.relu();\n const negAbsLogits = logits.abs().neg();\n return reluLogits.sub(logits.mul(labels)).add(negAbsLogits.exp().log1p());\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return tidy(() => {\n let y;\n y = tfc.clipByValue(yPred, epsilon(), 1 - epsilon());\n y = tfc.log(tfc.div(y, tfc.sub(1, y)));\n return tfc.mean(sigmoidCrossEntropyWithLogits(yTrue, y), -1);\n });\n}\nexport function kullbackLeiblerDivergence(yTrue, yPred) {\n return tidy(() => {\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), 1);\n const clippedPred = tfc.clipByValue(yPred, epsilon(), 1);\n return tfc.sum(tfc.mul(yTrue, tfc.log(tfc.div(clippedTrue, clippedPred))), -1);\n });\n}\nexport function poisson(yTrue, yPred) {\n return tidy(() => {\n const logPred = tfc.log(tfc.add(epsilon(), yPred));\n return tfc.mean(tfc.sub(yPred, tfc.mul(yTrue, logPred)), -1);\n });\n}\nexport function cosineProximity(yTrue, yPred) {\n return tidy(() => {\n const trueNormalized = l2Normalize(yTrue, -1);\n const predNormalized = l2Normalize(yPred, -1);\n const trueXPred = tfc.mul(trueNormalized, predNormalized);\n return tfc.neg(tfc.sum(trueXPred, -1));\n });\n}\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const msle = meanSquaredLogarithmicError;\nexport const MSLE = meanSquaredLogarithmicError;\nexport const kld = kullbackLeiblerDivergence;\nexport const KLD = kullbackLeiblerDivergence;\nexport const cosine = cosineProximity;\n// TODO(michaelterry): Add deserialize() function.\nexport const lossesMap = {\n meanSquaredError,\n meanAbsoluteError,\n meanAbsolutePercentageError,\n meanSquaredLogarithmicError,\n squaredHinge,\n hinge,\n categoricalHinge,\n logcosh,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n binaryCrossentropy,\n kullbackLeiblerDivergence,\n poisson,\n cosineProximity\n};\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function get(identifierOrFn) {\n if (typeof identifierOrFn === 'string') {\n if (identifierOrFn in lossesMap) {\n return lossesMap[identifierOrFn];\n }\n let errMsg = `Unknown loss ${identifierOrFn}`;\n if (identifierOrFn.toLowerCase().includes('softmaxcrossentropy')) {\n errMsg = `Unknown loss ${identifierOrFn}. ` +\n 'Use \"categoricalCrossentropy\" as the string name for ' +\n 'tf.losses.softmaxCrossEntropy';\n }\n throw new ValueError(errMsg);\n }\n else {\n return identifierOrFn;\n }\n}\n//# sourceMappingURL=losses.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Built-in metrics.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { NotImplementedError, ValueError } from './errors';\nimport { categoricalCrossentropy as categoricalCrossentropyLoss, cosineProximity, meanAbsoluteError, meanAbsolutePercentageError, meanSquaredError, sparseCategoricalCrossentropy as sparseCategoricalCrossentropyLoss } from './losses';\nimport { binaryCrossentropy as lossBinaryCrossentropy } from './losses';\nimport { lossesMap } from './losses';\nimport * as util from './utils/generic_utils';\nexport function binaryAccuracy(yTrue, yPred) {\n return tidy(() => {\n const threshold = tfc.mul(.5, tfc.onesLike(yPred));\n const yPredThresholded = K.cast(tfc.greater(yPred, threshold), yTrue.dtype);\n return tfc.mean(tfc.equal(yTrue, yPredThresholded), -1);\n });\n}\nexport function categoricalAccuracy(yTrue, yPred) {\n return tidy(() => K.cast(tfc.equal(tfc.argMax(yTrue, -1), tfc.argMax(yPred, -1)), 'float32'));\n}\nfunction truePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(1)).sum().cast('float32');\n });\n}\nfunction falseNegatives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(1), yPred.equal(0)).sum().cast('float32');\n });\n}\nfunction falsePositives(yTrue, yPred) {\n return tidy(() => {\n return tfc.logicalAnd(yTrue.equal(0), yPred.equal(1)).sum().cast('float32');\n });\n}\nexport function precision(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fp = falsePositives(yTrue, yPred);\n const denominator = tp.add(fp);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function recall(yTrue, yPred) {\n return tidy(() => {\n const tp = truePositives(yTrue, yPred);\n const fn = falseNegatives(yTrue, yPred);\n const denominator = tp.add(fn);\n return tfc.where(tfc.greater(denominator, 0), tp.div(denominator), 0)\n .cast('float32');\n });\n}\nexport function binaryCrossentropy(yTrue, yPred) {\n return lossBinaryCrossentropy(yTrue, yPred);\n}\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n if (yTrue.rank === yPred.rank) {\n yTrue = yTrue.squeeze([yTrue.rank - 1]);\n }\n yPred = yPred.argMax(-1);\n if (yPred.dtype !== yTrue.dtype) {\n yPred = yPred.asType(yTrue.dtype);\n }\n return tfc.equal(yTrue, yPred).asType('float32');\n}\nexport function topKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\nexport function sparseTopKCategoricalAccuracy(yTrue, yPred) {\n throw new NotImplementedError();\n}\n// Aliases.\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const categoricalCrossentropy = categoricalCrossentropyLoss;\nexport const cosine = cosineProximity;\nexport const sparseCategoricalCrossentropy = sparseCategoricalCrossentropyLoss;\n// TODO(cais, nielsene): Add serialize().\nexport const metricsMap = {\n binaryAccuracy,\n categoricalAccuracy,\n precision,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n mse,\n MSE,\n mae,\n MAE,\n mape,\n MAPE,\n cosine\n};\nexport function get(identifier) {\n if (typeof identifier === 'string' && identifier in metricsMap) {\n return metricsMap[identifier];\n }\n else if (typeof identifier !== 'string' && identifier != null) {\n return identifier;\n }\n else {\n throw new ValueError(`Unknown metric ${identifier}`);\n }\n}\n/**\n * Get the shortcut function name.\n *\n * If the fn name is a string,\n * directly return the string name.\n * If the function is included in metricsMap or lossesMap,\n * return key of the map.\n * - If the function relative to multiple keys,\n * return the first found key as the function name.\n * - If the function exists in both lossesMap and metricsMap,\n * search lossesMap first.\n * If the function is not included in metricsMap or lossesMap,\n * return the function name.\n *\n * @param fn loss function, metric function, or short cut name.\n * @returns Loss or Metric name in string.\n */\nexport function getLossOrMetricName(fn) {\n util.assert(fn !== null, `Unknown LossOrMetricFn ${fn}`);\n if (typeof fn === 'string') {\n return fn;\n }\n else {\n let fnName;\n for (const key of Object.keys(lossesMap)) {\n if (lossesMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n for (const key of Object.keys(metricsMap)) {\n if (metricsMap[key] === fn) {\n fnName = key;\n break;\n }\n }\n if (fnName !== undefined) {\n return fnName;\n }\n return fn.name;\n }\n}\n//# sourceMappingURL=metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Optimizers.\n */\nimport { train } from '@tensorflow/tfjs-core';\nimport { epsilon } from './backend/common';\nimport { ValueError } from './errors';\n// Add (de)serialize()\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function getOptimizer(identifier) {\n const optimizerMap = {\n 'Adagrad': () => train.adagrad(0.01),\n 'Adadelta': () => train.adadelta(1, 0.95, epsilon()),\n 'Adam': () => train.adam(0.001, 0.9, 0.999, epsilon()),\n 'Adamax': () => train.adamax(0.002, 0.9, 0.999, epsilon(), 0),\n 'RMSProp': () => train.rmsprop(0.001, 0.9, 0, epsilon()),\n 'SGD': () => train.sgd(0.01)\n };\n optimizerMap['adagrad'] = optimizerMap['Adagrad'];\n optimizerMap['adadelta'] = optimizerMap['Adadelta'];\n optimizerMap['adam'] = optimizerMap['Adam'];\n optimizerMap['adamax'] = optimizerMap['Adamax'];\n optimizerMap['rmsprop'] = optimizerMap['RMSProp'];\n optimizerMap['sgd'] = optimizerMap['SGD'];\n if (identifier in optimizerMap) {\n return optimizerMap[identifier]();\n }\n throw new ValueError(`Unknown Optimizer ${identifier}`);\n}\n//# sourceMappingURL=optimizers.js.map", "/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/** Utility functions related to user-defined metadata. */\n// Maximum recommended serialized size for user-defined metadata.\n// Beyond this limit, a warning message will be printed during model loading and\n// saving.\nexport const MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH = 1 * 1024 * 1024;\n/**\n * Check validity of user-defined metadata.\n *\n * @param userDefinedMetadata\n * @param modelName Name of the model that the user-defined metadata belongs to.\n * Used during construction of error messages.\n * @param checkSize Whether to check the size of the metadata is under\n * recommended limit. Default: `false`. If `true`, will try stringify the\n * JSON object and print a console warning if the serialzied size is above the\n * limit.\n * @throws Error if `userDefinedMetadata` is not a plain JSON object.\n */\nexport function checkUserDefinedMetadata(userDefinedMetadata, modelName, checkSize = false) {\n if (userDefinedMetadata == null ||\n typeof userDefinedMetadata !== 'object' ||\n Object.getPrototypeOf(userDefinedMetadata) !== Object.prototype ||\n !plainObjectCheck(userDefinedMetadata)) {\n throw new Error('User-defined metadata is expected to be a JSON object, but is not.');\n }\n if (checkSize) {\n const out = JSON.stringify(userDefinedMetadata);\n if (out.length > MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH) {\n console.warn(`User-defined metadata of model \"${modelName}\" is too large in ` +\n `size (length=${out.length} when serialized). It is not ` +\n `recommended to store such large objects in user-defined metadata. ` +\n `Please make sure its serialized length is <= ` +\n `${MAX_USER_DEFINED_METADATA_SERIALIZED_LENGTH}.`);\n }\n }\n}\n/**\n * Check if an input is plain JSON object or any valid subfield of it.\n *\n * @param x The input to be checked.\n * @param assertObject Whether to assert `x` is a JSON object, i.e., reject\n * cases of arrays and primitives.\n * @return Returns `true` if and only if `x` is a plain JSON object,\n * a JSON-valid primitive including string, number, boolean and null,\n * or an array of the said types.\n */\n// tslint:disable-next-line:no-any\nexport function plainObjectCheck(x) {\n if (x === null) {\n // Note: typeof `null` is 'object', and `null` is valid in JSON.\n return true;\n }\n else if (typeof x === 'object') {\n if (Object.getPrototypeOf(x) === Object.prototype) {\n // `x` is a JavaScript object and its prototype is Object.\n const keys = Object.keys(x);\n for (const key of keys) {\n if (typeof key !== 'string') {\n // JSON keys must be strings.\n return false;\n }\n if (!plainObjectCheck(x[key])) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object but its prototype is not Object.\n if (Array.isArray(x)) {\n // `x` is a JavaScript array.\n for (const item of x) {\n if (!plainObjectCheck(item)) { // Recursive call.\n return false;\n }\n }\n return true;\n }\n else {\n // `x` is a JavaScript object and its prototype is not Object,\n // and it's not an Array. I.e., it's a complex object such as\n // `Error` and `Date`.\n return false;\n }\n }\n }\n else {\n // `x` is not a JavaScript object or `null`.\n const xType = typeof x;\n return xType === 'string' || xType === 'number' || xType === 'boolean';\n }\n}\n//# sourceMappingURL=user_defined_metadata.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { countParamsInWeights } from './variable_utils';\n/**\n * Print the summary of a LayersModel object.\n *\n * @param model tf.LayersModel instance.\n * @param lineLength Total length of printed lines. Set this to adapt to the\n * display to different terminal or console sizes.\n * @param positions Relative or absolute positions of log elements in each\n * line. Each number corresponds to right-most (i.e., ending) position of a\n * column.\n * If not provided, defaults to `[0.45, 0.85, 1]` for sequential-like\n * models and `[0.33, 0.55, 0.67, 1]` for non-sequential like models.\n * @param printFn Print function to use.\n * It will be called on each line of the summary. You can provide a custom\n * function in order to capture the string summary. Defaults to `console.log`.\n */\nexport function printSummary(model, lineLength, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n const sequentialLike = isModelSequentialLike(model);\n // Header names for different log elements.\n const toDisplay = ['Layer (type)', 'Output shape', 'Param #'];\n if (sequentialLike) {\n lineLength = lineLength || 65;\n positions = positions || [0.45, 0.85, 1];\n }\n else {\n lineLength = lineLength || 98;\n positions = positions || [0.33, 0.55, 0.67, 1];\n // Header names for different log elements.\n }\n if (positions[positions.length - 1] <= 1) {\n // `positions` is relative. Convert it to absolute positioning.\n positions = positions.map(p => Math.floor(lineLength * p));\n }\n let relevantNodes;\n if (!sequentialLike) {\n toDisplay.push('Receives inputs');\n relevantNodes = [];\n for (const depth in model.nodesByDepth) {\n relevantNodes.push(...model.nodesByDepth[depth]);\n }\n }\n printFn('_'.repeat(lineLength));\n printRow(toDisplay, positions, printFn);\n printFn('='.repeat(lineLength));\n const layers = model.layers;\n for (let i = 0; i < layers.length; ++i) {\n if (sequentialLike) {\n printLayerSummary(layers[i], positions, printFn);\n }\n else {\n printLayerSummaryWithConnections(layers[i], positions, relevantNodes, printFn);\n }\n printFn((i === layers.length - 1 ? '=' : '_').repeat(lineLength));\n }\n // tslint:disable-next-line:no-any\n model.checkTrainableWeightsConsistency();\n const trainableCount = countTrainableParams(model);\n const nonTrainableCount = countParamsInWeights(model.nonTrainableWeights);\n printFn(`Total params: ${trainableCount + nonTrainableCount}`);\n printFn(`Trainable params: ${trainableCount}`);\n printFn(`Non-trainable params: ${nonTrainableCount}`);\n printFn('_'.repeat(lineLength));\n}\nfunction countTrainableParams(model) {\n let trainableCount;\n // tslint:disable:no-any\n if (model.collectedTrainableWeights != null) {\n trainableCount =\n countParamsInWeights(model.collectedTrainableWeights);\n }\n else {\n trainableCount = countParamsInWeights(model.trainableWeights);\n }\n // tslint:enable:no-any\n return trainableCount;\n}\nfunction isModelSequentialLike(model) {\n let sequentialLike = true;\n const nodesByDepth = [];\n const nodes = [];\n for (const depth in model.nodesByDepth) {\n nodesByDepth.push(model.nodesByDepth[depth]);\n }\n for (const depthNodes of nodesByDepth) {\n if (depthNodes.length > 1 ||\n depthNodes.length === 1 && depthNodes[0].inboundLayers.length > 1) {\n sequentialLike = false;\n break;\n }\n nodes.push(...depthNodes);\n }\n if (sequentialLike) {\n // Search for shared layers.\n for (const layer of model.layers) {\n let flag = false;\n for (const node of layer.inboundNodes) {\n if (nodes.indexOf(node) !== -1) {\n if (flag) {\n sequentialLike = false;\n break;\n }\n else {\n flag = true;\n }\n }\n }\n if (!sequentialLike) {\n break;\n }\n }\n }\n return sequentialLike;\n}\nfunction printRow(fields, positions, \n// tslint:disable-next-line:no-any\nprintFn = console.log) {\n let line = '';\n for (let i = 0; i < fields.length; ++i) {\n if (i > 0) {\n line = line.slice(0, line.length - 1) + ' ';\n }\n line += fields[i];\n line = line.slice(0, positions[i]);\n line += ' '.repeat(positions[i] - line.length);\n }\n printFn(line);\n}\n/**\n * Prints a summary for a single Layer, without connectivity information.\n *\n * @param layer: Layer instance to print.\n */\nfunction printLayerSummary(layer, positions, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const name = layer.name;\n const className = layer.getClassName();\n const fields = [`${name} (${className})`, outputShape, layer.countParams().toString()];\n printRow(fields, positions, printFn);\n}\n/**\n * Prints a summary for a single Layer, with connectivity information.\n */\nfunction printLayerSummaryWithConnections(layer, positions, relevantNodes, \n// tslint:disable-next-line:no-any\nprintFn) {\n let outputShape;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n }\n catch (err) {\n outputShape = 'multiple';\n }\n const connections = [];\n for (const node of layer.inboundNodes) {\n if (relevantNodes != null && relevantNodes.length > 0 &&\n relevantNodes.indexOf(node) === -1) {\n continue;\n }\n for (let i = 0; i < node.inboundLayers.length; ++i) {\n const inboundLayer = node.inboundLayers[i].name;\n const inboundLayerIndex = node.nodeIndices[i];\n const inboundTensorIndex = node.tensorIndices[i];\n connections.push(`${inboundLayer}[${inboundLayerIndex}][${inboundTensorIndex}]`);\n }\n }\n const name = layer.name;\n const className = layer.getClassName();\n const firstConnection = connections.length === 0 ? '' : connections[0];\n const fields = [\n `${name} (${className})`, outputShape, layer.countParams().toString(),\n firstConnection\n ];\n printRow(fields, positions, printFn);\n for (let i = 1; i < connections.length; ++i) {\n printRow(['', '', '', connections[i]], positions, printFn);\n }\n}\n//# sourceMappingURL=layer_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as generic_utils from '../utils/generic_utils';\n// tslint:enable\n/**\n * Test whether a value in an array is the name of a LayersModel or Layer.\n * @param key The key name that the value is found under. Note that the key\n * may not be at the level immediately above the value, if the value is in a\n * nested array.\n * @param index Index of the value in the Array that it is found in.\n * @param value The value object.\n * @returns A boolean indicating whether value is a name.\n */\nfunction isArrayItemInputOrOutputName(key, index, value) {\n return (key === 'inboundNodes' || key === 'outputLayers' ||\n key === 'inputLayers') &&\n index === 0 && typeof value === 'string';\n}\n/**\n * Convert a Pythonic config object to TypeScript config object.\n * @param pythonicConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertPythonicToTs(pythonicConfig, key) {\n if (pythonicConfig === null) {\n return null;\n }\n else if (typeof pythonicConfig === 'string') {\n return generic_utils.toCamelCase(pythonicConfig);\n }\n else if ((typeof pythonicConfig === 'number') ||\n (typeof pythonicConfig === 'boolean')) {\n return pythonicConfig;\n }\n else if (pythonicConfig instanceof Array) {\n const tsArray = [];\n const arrayLength = pythonicConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = pythonicConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n tsArray.push(item);\n }\n else {\n tsArray.push(convertPythonicToTs(item, key));\n }\n }\n return tsArray;\n }\n else {\n const tsDict = {};\n for (const pythonicKey of Object.keys(pythonicConfig)) {\n const pythonicValue = pythonicConfig[pythonicKey];\n if (pythonicKey === 'name' && typeof pythonicValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // camel-case conversion.\n tsDict[pythonicKey] = pythonicValue;\n }\n else {\n const tsKey = generic_utils.toCamelCase(pythonicKey);\n tsDict[tsKey] = convertPythonicToTs(pythonicValue, tsKey);\n }\n }\n return tsDict;\n }\n}\n/**\n * Convert a TypeScript config object to Python config object.\n * @param tsConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertTsToPythonic(tsConfig, key) {\n if (tsConfig === null || tsConfig === undefined) {\n return null;\n }\n else if (typeof tsConfig === 'string') {\n return generic_utils.toSnakeCase(tsConfig);\n }\n else if ((typeof tsConfig === 'number') || (typeof tsConfig === 'boolean')) {\n return tsConfig;\n }\n else if (tsConfig instanceof Array) {\n const pyArray = [];\n const arrayLength = tsConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = tsConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n pyArray.push(item);\n }\n else {\n pyArray.push(convertTsToPythonic(item, key));\n }\n }\n return pyArray;\n }\n else {\n const pyDict = {};\n for (const tsKey of Object.keys(tsConfig)) {\n const tsValue = tsConfig[tsKey];\n const pyKey = generic_utils.toSnakeCase(tsKey);\n if ((tsKey === 'name' || tsKey === 'className') &&\n typeof tsValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of LayersModel and Layer instances, should not undergo the\n // snake-case conversion.\n pyDict[pyKey] = tsValue;\n }\n else {\n pyDict[pyKey] = convertTsToPythonic(tsValue, tsKey);\n }\n }\n return pyDict;\n }\n}\n//# sourceMappingURL=serialization_utils.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Executor: Evaluates SymbolicTensor based on feeds.\n */\nimport { cast, dispose, memory, util } from '@tensorflow/tfjs-core';\nimport { ValueError } from '../errors';\nimport { toList } from '../utils/generic_utils';\nimport { InputLayer } from './input_layer';\nimport { SymbolicTensor } from './topology';\n/**\n * Helper function to check the dtype and shape compatibility of a feed value.\n */\nfunction assertFeedCompatibility(key, val) {\n // Check dtype compatibility.\n if (key.dtype == null || key.dtype === val.dtype) {\n // a. If types match, return val tensor as is.\n return val;\n }\n try {\n // b. Attempt to convert to expected type.\n return cast(val, key.dtype);\n }\n catch (err) {\n // c. If conversion fails, return helpful error.\n throw new ValueError(`The dtype of the feed (${val.dtype}) can not be cast to the dtype ` +\n `of the key '${key.name}' (${key.dtype}).`);\n }\n}\n/**\n * FeedDict: A mapping from unique SymbolicTensors to feed values for them.\n * A feed value is a concrete value represented as an `Tensor`.\n */\nexport class FeedDict {\n /**\n * Constructor, optionally does copy-construction.\n * @param feeds An Array of `Feed`s, or another `FeedDict`, in which case\n * copy-construction will be performed.\n */\n constructor(feeds) {\n this.id2Value = {};\n this.id2Mask = {};\n this.name2Id = {};\n if (feeds instanceof FeedDict) {\n for (const id in feeds.id2Value) {\n this.id2Value[id] = feeds.id2Value[id];\n if (id in feeds.id2Mask) {\n this.id2Mask[id] = feeds.id2Mask[id];\n }\n }\n }\n else {\n if (feeds == null) {\n return;\n }\n for (const feed of feeds) {\n this.add(feed.key, feed.value);\n }\n }\n }\n /**\n * Add a key-value pair to the FeedDict.\n *\n * @param key The key of the feed.\n * @param value The value of the tensor feed.\n * @param mask The value of the mask feed (optional).\n * @returns This `FeedDict`.\n * @throws ValueError: If the key `SymbolicTensor` already exists in the\n * `FeedDict`.\n */\n add(key, value, mask) {\n if (this.id2Value[key.id] == null) {\n this.id2Value[key.id] = assertFeedCompatibility(key, value);\n this.name2Id[key.name] = key.id;\n if (mask != null) {\n this.id2Mask[key.id] = mask;\n }\n }\n else {\n throw new ValueError(`Duplicate key: name=${key.name}, id=${key.id}`);\n }\n return this;\n }\n /**\n * Add a Feed to the FeedDict.\n * @param feed The new `Feed` to add.\n * @returns This `FeedDict`.\n */\n addFeed(feed) {\n this.add(feed.key, feed.value);\n }\n /**\n * Probe whether a key already exists in the FeedDict.\n * @param key\n */\n hasKey(key) {\n return this.id2Value[key.id] != null;\n }\n /**\n * Get all the SymbolicTensor available in this FeedDict.\n */\n names() {\n return Object.keys(this.name2Id);\n }\n /**\n * Get the feed value for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed value.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getValue(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Value[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Value[id];\n }\n }\n /**\n * Get the feed mask for given key.\n * @param key The SymbolicTensor, or its name (as a string), of which the\n * value is sought.\n * @returns If `key` exists, the corresponding feed mask.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getMask(key) {\n if (key instanceof SymbolicTensor) {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${key.name}`);\n }\n else {\n return this.id2Mask[key.id];\n }\n }\n else {\n const id = this.name2Id[key];\n if (id == null) {\n throw new ValueError(`Feed dict has no SymbolicTensor name: ${key}`);\n }\n return this.id2Mask[id];\n }\n }\n /** Dispose all mask Tensors held by this object. */\n disposeMasks() {\n if (this.id2Mask != null) {\n dispose(this.id2Mask);\n }\n }\n}\n// Cache for topologically sorted SymbolicTensors for given execution\n// targets (i.e., fetches).\nconst cachedSorted = {};\n// Cache for recipient count maps for given execution targets (i.e., fetches).\nconst cachedRecipientCounts = {};\n/**\n * Execute a SymbolicTensor by using concrete feed values.\n *\n * A `SymbolicTensor` object is a node in a computation graph of TF.js\n * Layers. The object is backed by a source layer and input\n * `SymbolicTensor`s to the source layer. This method evaluates\n * the `call()` method of the source layer, using concrete values of the\n * inputs obtained from either\n * * `feedDict`, if the input key exists in `feedDict`, or else,\n * * a recursive call to `execute()` itself.\n *\n * @param x: The `SymbolicTensor` to execute.\n * @param feedDict: The feed values, as base condition of the recursion.\n * execution.\n * @param kwargs: Optional keyword arguments.\n * @param probe: A probe object (of interface `ExecutionProbe`) used for\n * testing memory footprint of `execute` calls.\n * @returns Result of the execution.\n * @throws ValueError: If any `SymbolicTensor`s from `InputLayer`s\n * encountered during the execution lacks a feed value in `feedDict`.\n */\nexport function execute(fetches, feedDict, kwargs, probe) {\n const training = kwargs == null ? false : kwargs['training'];\n const arrayFetches = Array.isArray(fetches);\n const fetchArray = arrayFetches ? fetches : [fetches];\n const outputNames = fetchArray.map(t => t.name);\n const finalOutputs = [];\n const feedNames = feedDict.names();\n for (const outputName of outputNames) {\n if (feedNames.indexOf(outputName) !== -1) {\n finalOutputs.push(feedDict.getValue(outputName));\n }\n else {\n finalOutputs.push(null);\n }\n }\n if (probe != null) {\n // For optional probing of memory footprint during execution.\n probe.maxNumTensors = -Infinity;\n probe.minNumTensors = Infinity;\n }\n // Check cache.\n const fetchAndFeedKey = outputNames.join(',') + '|' + feedDict.names().join(',');\n let sorted;\n let recipientCounts;\n if (cachedSorted[fetchAndFeedKey] == null) {\n // Cache doesn't contain the desired combination of fetches. Compute\n // topological sort for the combination for the first time.\n const out = getTopologicalSortAndRecipientCounts(fetchArray, feedDict);\n sorted = out.sorted;\n recipientCounts = out.recipientCounts;\n // Store results in cache for future use.\n cachedSorted[fetchAndFeedKey] = sorted;\n cachedRecipientCounts[fetchAndFeedKey] = recipientCounts;\n }\n sorted = cachedSorted[fetchAndFeedKey];\n recipientCounts = {};\n if (!training) {\n Object.assign(recipientCounts, cachedRecipientCounts[fetchAndFeedKey]);\n }\n const internalFeedDict = new FeedDict(feedDict);\n // Start iterative execution on the topologically-sorted SymbolicTensors.\n for (let i = 0; i < sorted.length; ++i) {\n if (probe != null) {\n // For optional probing of memory usage during execution.\n const numTensors = memory().numTensors;\n if (numTensors > probe.maxNumTensors) {\n probe.maxNumTensors = numTensors;\n }\n if (numTensors < probe.minNumTensors) {\n probe.minNumTensors = numTensors;\n }\n }\n const symbolic = sorted[i];\n const srcLayer = symbolic.sourceLayer;\n if (srcLayer instanceof InputLayer) {\n continue;\n }\n const inputValues = [];\n const inputMasks = [];\n const tensorsToDispose = [];\n let maskExists = false;\n for (const input of symbolic.inputs) {\n const value = internalFeedDict.getValue(input);\n const mask = internalFeedDict.getMask(input);\n inputValues.push(value);\n inputMasks.push(mask);\n if (mask != null) {\n maskExists = true;\n }\n if (!training) {\n recipientCounts[input.name]--;\n if (recipientCounts[input.name] === 0 && !feedDict.hasKey(input) &&\n outputNames.indexOf(input.name) === -1 && !value.isDisposed &&\n input.sourceLayer.stateful !== true) {\n tensorsToDispose.push(value);\n }\n }\n }\n if (maskExists) {\n kwargs = kwargs || {};\n kwargs['mask'] = inputMasks[0];\n }\n const outputTensors = toList(srcLayer.apply(inputValues, kwargs));\n let outputMask = null;\n if (srcLayer.supportsMasking) {\n outputMask = srcLayer.computeMask(inputValues, inputMasks);\n }\n const layerOutputs = getNodeOutputs(symbolic);\n const outputSymbolicTensors = Array.isArray(layerOutputs) ? layerOutputs : [layerOutputs];\n for (let i = 0; i < outputSymbolicTensors.length; ++i) {\n if (!internalFeedDict.hasKey(outputSymbolicTensors[i])) {\n internalFeedDict.add(outputSymbolicTensors[i], outputTensors[i], Array.isArray(outputMask) ? outputMask[0] : outputMask);\n }\n const index = outputNames.indexOf(outputSymbolicTensors[i].name);\n if (index !== -1) {\n finalOutputs[index] = outputTensors[i];\n }\n }\n if (!training) {\n // Clean up Tensors that are no longer needed.\n dispose(tensorsToDispose);\n }\n }\n // NOTE(cais): Unlike intermediate tensors, we don't discard mask\n // tensors as we go, because these tensors are sometimes passed over a\n // series of mutliple layers, i.e., not obeying the immediate input\n // relations in the graph. If this becomes a memory-usage concern,\n // we can improve this in the future.\n internalFeedDict.disposeMasks();\n return arrayFetches ? finalOutputs : finalOutputs[0];\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for an array of fetches.\n *\n * This function calls getTopologicalSortAndRecipientCountsForOneFetch and\n * merges their results.\n *\n * @param fetch The array of fetches requested. Must be a non-empty array.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientCounts: Recipient counts for all SymbolicTensors in `sorted`.\n */\nfunction getTopologicalSortAndRecipientCounts(fetches, feedDict) {\n util.assert(fetches != null && fetches.length > 0, () => `Expected at least one fetch, got none`);\n let finalSorted = [];\n let finalRecipientMap = {};\n if (fetches.length === 1) {\n // Special-casing 1 fetch for efficiency.\n const out = getTopologicalSortAndRecipientCountsForOneFetch(fetches[0], feedDict);\n finalSorted = out.sorted;\n finalRecipientMap = out.recipientMap;\n }\n else {\n const visited = new Set();\n for (const fetch of fetches) {\n const { sorted, recipientMap } = getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict);\n // Merge sorted SymbolicTensor Arrays.\n for (const symbolicTensor of sorted) {\n if (!visited.has(symbolicTensor.name)) {\n finalSorted.push(symbolicTensor);\n visited.add(symbolicTensor.name);\n }\n }\n // Merge recipient maps.\n for (const name in recipientMap) {\n if (finalRecipientMap[name] == null) {\n finalRecipientMap[name] = new Set();\n }\n recipientMap[name].forEach(recipient => finalRecipientMap[name].add(recipient));\n }\n }\n }\n return {\n sorted: finalSorted,\n recipientCounts: recipientMap2Counts(finalRecipientMap)\n };\n}\nfunction recipientMap2Counts(recipientMap) {\n const recipientCounts = {};\n for (const name in recipientMap) {\n recipientCounts[name] = recipientMap[name].size;\n }\n return recipientCounts;\n}\n/**\n * Sort the `SymbolicTensor`s topologically, for a single fetch.\n *\n * This helper function processes the upstream SymbolicTensors of a single\n * fetch.\n *\n * @param fetch The single fetch requested.\n * @param feedDict The dictionary of fed values.\n * @returns sorted: Topologically-sorted array of SymbolicTensors.\n * recipientMap: Recipient names for all SymbolicTensors in `sorted`.\n */\nexport function getTopologicalSortAndRecipientCountsForOneFetch(fetch, feedDict) {\n const visited = new Set();\n const sorted = [];\n const recipientMap = {};\n // Put keys of the feedDict into visited first, so they don't have to be\n // walked. This is needed in case where there are feeds for intermediate\n // SymbolicTensors of the graph.\n for (const key of feedDict.names()) {\n visited.add(key);\n }\n const stack = [];\n const marks = [];\n // Initial population of stack and marks.\n stack.push(fetch);\n while (stack.length > 0) {\n const top = stack[stack.length - 1];\n if (visited.has(top.name)) {\n stack.pop();\n continue;\n }\n const topIsMarked = marks[marks.length - 1] === stack.length - 1;\n if (top.inputs.length === 0 || topIsMarked) {\n // Input SymbolicTensor or all children have been visited.\n stack.pop();\n sorted.push(top);\n visited.add(top.name);\n if (topIsMarked) {\n marks.pop();\n }\n }\n else {\n // A non-input SymbolicTensor whose upstream SymbolicTensors haven't\n // been visited yet. Push them onto the stack.\n marks.push(stack.length - 1);\n for (const input of top.inputs) {\n // Increment the recipient count. Note that this needs to happen\n // regardless of whether the SymbolicTensor has been visited before.\n if (recipientMap[input.name] == null) {\n recipientMap[input.name] = new Set();\n }\n recipientMap[input.name].add(top.name);\n if (visited.has(input.name)) {\n continue; // Avoid repeated visits to the same SymbolicTensor.\n }\n stack.push(input);\n }\n }\n }\n return { sorted, recipientMap };\n}\n/**\n * Get the symbolic output tensors of the node to which a given fetch belongs.\n * @param fetch The fetched symbolic tensor.\n * @returns The Array of symbolic tensors output by the node to which `fetch`\n * belongs.\n */\nfunction getNodeOutputs(fetch) {\n let layerOutputs;\n if (fetch.sourceLayer.inboundNodes.length === 1) {\n layerOutputs = fetch.sourceLayer.output;\n }\n else {\n let nodeIndex = null;\n for (let i = 0; i < fetch.sourceLayer.inboundNodes.length; ++i) {\n for (const outputTensor of fetch.sourceLayer.inboundNodes[i]\n .outputTensors) {\n if (outputTensor.id === fetch.id) {\n nodeIndex = i;\n break;\n }\n }\n }\n layerOutputs = fetch.sourceLayer.getOutputAt(nodeIndex);\n }\n return layerOutputs;\n}\n//# sourceMappingURL=executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/engine/topology.py */\nimport { tidy } from '@tensorflow/tfjs-core';\nimport { getUid } from '../backend/state';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize as deserializeLayer } from '../layers/serialization';\nimport * as generic_utils from '../utils/generic_utils';\nimport { convertTsToPythonic } from '../utils/serialization_utils';\nimport * as types_utils from '../utils/types_utils';\nimport { batchSetValue } from '../variables';\nimport { version as layersVersion } from '../version';\nimport { execute, FeedDict } from './executor';\nimport { InputLayer } from './input_layer';\nimport { Layer, Node } from './topology';\n/**\n * A Container is a directed acyclic graph of layers.\n *\n * It is the topological form of a \"model\". A LayersModel\n * is simply a Container with added training routines.\n *\n */\nexport class Container extends Layer {\n constructor(args) {\n // No args passed to super's constructor.\n super({});\n this.containerNodes = new Set();\n this.name = args.name;\n if (this.name == null) {\n const prefix = this.getClassName().toLowerCase();\n this.name = getUid(prefix);\n }\n this.supportsMasking = false;\n this.trainable_ = true;\n // TODO(michaelterry): Initialize perInputLosses/Updates here.\n // Container-specific properties.\n if (Array.isArray(args.inputs)) {\n this.inputs = args.inputs.slice();\n }\n else {\n this.inputs = [args.inputs];\n }\n if (Array.isArray(args.outputs)) {\n this.outputs = args.outputs.slice();\n }\n else {\n this.outputs = [args.outputs];\n }\n // Check for redundancy in inputs.\n if (generic_utils.unique(this.inputs).length !== this.inputs.length) {\n throw new ValueError('The list of inputs passed to the model is ' +\n 'redundant. All inputs should only appear once. Found: ' +\n `${this.inputs.map(x => x.name)}`);\n }\n // Check for redundancy in outputs.\n if (generic_utils.unique(this.outputs).length !== this.outputs.length) {\n console.warn('The list of outputs passed to the model is redundant. ' +\n 'All outputs should only appear once. Found: ' +\n `${this.outputs.map(x => x.name)}`);\n }\n /*\n List of initial layers (1 to 1 mapping with this.inputs, hence the same\n layer might appear twice)\n */\n this.inputLayers = [];\n this.inputLayersNodeIndices = [];\n this.inputLayersTensorIndices = [];\n /*\n List of layers (1 to 1 mapping with this.outputs, hence the same layer\n might appear twice)\n */\n this.outputLayers = [];\n this.outputLayersNodeIndices = [];\n this.outputLayersTensorIndices = [];\n /*\n All layers in order of horizontal graph traversal. Entries are unique.\n Includes input and output layers.\n */\n this.layers = [];\n /*\n References to container layers that were constructed internally. We need\n these to properly dispose of tensors from nested containers.\n */\n this.internalContainerRefs = [];\n // TODO(michaelterry): Determine if caching still needed with eager\n // backend.\n /*\n This is for performance optimization when calling the Container on new\n inputs. Every time the Container is called on a set on input tensors,\n we compute the output tensors, output masks and output shapes in one pass,\n then cache them here. When one of these outputs is queried later,\n we retrieve it from there instead of recomputing it.\n */\n // this.outputTensorCache = {};\n // this.outputShapeCache = {};\n // Build this.outputLayers:\n for (const x of this.outputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n this.outputLayers.push(layer);\n this.outputLayersNodeIndices.push(nodeIndex);\n this.outputLayersTensorIndices.push(tensorIndex);\n }\n // TODO(michaelterry): Add output mask cache code.\n // Build this.inputLayers:\n for (const x of this.inputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n /*\n It's supposed to be an input layer, so only one node\n and one tensor output.\n */\n generic_utils.assert(nodeIndex === 0, 'input layer has >1 nodes');\n generic_utils.assert(tensorIndex === 0, 'input layer has >1 tensors');\n this.inputLayers.push(layer);\n this.inputLayersNodeIndices.push(nodeIndex);\n this.inputLayersTensorIndices.push(tensorIndex);\n }\n // Build this.inputNames and this.outputNames.\n this.inputNames = [];\n this.outputNames = [];\n this.feedInputShapes = [];\n this.feedInputNames = [];\n this.feedOutputNames = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n // Check that layer is an InputLayer.\n if (!(layer instanceof InputLayer)) {\n throw new TypeError('Input layers to a LayersModel must be InputLayer objects. ' +\n `Received inputs: ${args.inputs}. ` +\n `Input ${i} (0-based) originates ` +\n `from layer type ${layer.getClassName()}.`);\n }\n this.inputNames.push(layer.name);\n this.feedInputShapes.push(layer.batchInputShape);\n this.feedInputNames.push(layer.name);\n }\n for (const layer of this.outputLayers) {\n this.outputNames.push(layer.name);\n }\n this.internalInputShapes = this.inputs.map(x => x.shape);\n this.internalOutputShapes = this.outputs.map(x => x.shape);\n /*\n Container_nodes: set of nodes included in the graph (not all nodes\n included in the layers are relevant to the current graph).\n */\n // ids of all nodes relevant to the Container:\n const nodesDepths = {};\n // To recover nodes from their ID.\n const nodeIDToNode = {};\n const layersDepths = {};\n // To layers from their ID.\n const layerIDToLayer = {};\n const layerIndices = {};\n const nodesInDecreasingDepth = [];\n /**\n * Builds a map of the graph of layers.\n *\n * This recursively updates the map `layerIndices`,\n * the list `nodesInDecreasingDepth` and the set `containerNodes`.\n *\n * @param tensor Some tensor in a graph.\n * @param finishedNodes Set of nodes whose subgraphs have been traversed\n * completely. Useful to prevent duplicated work.\n * @param nodesInProgress Set of nodes that are currently active on the\n * recursion stack. Useful to detect cycles.\n * @param layer Layer from which `tensor` comes from. If not provided,\n * will be obtained from tensor.sourceLayer.\n * @param nodeIndex Node index from which `tensor` comes from.\n * @param tensorIndex TensorIndex from which `tensor` comes from.\n *\n * @exception RuntimeError if a cycle is detected.\n */\n const buildMapOfGraph = (tensor, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex) => {\n if (layer == null || nodeIndex == null || tensorIndex == null) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n tensorIndex = tensor.tensorIndex;\n }\n const node = layer.inboundNodes[nodeIndex];\n // Prevent cycles.\n if (nodesInProgress.indexOf(node) !== -1) {\n throw new RuntimeError(`The tensor ${tensor.name} at layer \"${layer.name}\" ` +\n 'is part of a cycle.');\n }\n // Don't repeat work for shared subgraphs\n if (finishedNodes.indexOf(node) !== -1) {\n return;\n }\n // Update containerNodes.\n this.containerNodes.add(Container.nodeKey(layer, nodeIndex));\n // Store the traversal order for layer sorting.\n if (!(layer.id in layerIndices)) {\n layerIndices[layer.id] = Object.keys(layerIndices).length;\n }\n if (nodesInProgress.indexOf(node) === -1) {\n nodesInProgress.push(node);\n }\n // Propagate to all previous tensors connected to this node.\n const numInboundLayers = node.inboundLayers.length;\n for (let i = 0; i < numInboundLayers; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n buildMapOfGraph(x, finishedNodes, nodesInProgress, layer, nodeIndex, tensorIndex);\n }\n finishedNodes.push(node);\n while (nodesInProgress.indexOf(node) >= 0) {\n nodesInProgress.splice(nodesInProgress.indexOf(node), 1);\n }\n nodesInDecreasingDepth.push(node);\n };\n const finishedNodes = [];\n const nodesInProgress = [];\n for (const x of this.outputs) {\n buildMapOfGraph(x, finishedNodes, nodesInProgress);\n }\n const reversedNodesInDecreasingDepth = nodesInDecreasingDepth.slice().reverse();\n for (const node of reversedNodesInDecreasingDepth) {\n nodeIDToNode[node.id] = node;\n // If the depth is not set, the node has no outbound nodes (depth 0).\n if (!(node.id in nodesDepths)) {\n nodesDepths[node.id] = 0;\n }\n let depth = nodesDepths[node.id];\n // Update the depth of the corresponding layer\n const previousDepth = (layersDepths[node.outboundLayer.id] == null ?\n 0 :\n layersDepths[node.outboundLayer.id]);\n /*\n If we've seen this layer before at a higher depth, we should use that\n depth instead of the node depth. This is necessary for shared layers\n that have inputs at different depth levels in the graph.\n */\n depth = Math.max(depth, previousDepth);\n layersDepths[node.outboundLayer.id] = depth;\n layerIDToLayer[node.outboundLayer.id] = node.outboundLayer;\n nodesDepths[node.id] = depth;\n // Update the depth of inbound nodes.\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const inboundNode = inboundLayer.inboundNodes[nodeIndex];\n const previousDepth = (nodesDepths[inboundNode.id] == null ? 0 :\n nodesDepths[inboundNode.id]);\n nodesDepths[inboundNode.id] = Math.max(depth + 1, previousDepth);\n nodeIDToNode[inboundNode.id] = inboundNode;\n }\n }\n // Build a dict {depth: list of nodes with this depth}\n const nodesByDepth = {};\n for (const nodeID in nodesDepths) {\n const depth = nodesDepths[nodeID];\n if (!(depth in nodesByDepth)) {\n nodesByDepth[depth] = [];\n }\n nodesByDepth[depth].push(nodeIDToNode[nodeID]);\n }\n // Build a dict {depth: list of layers with this depth}\n const layersByDepth = {};\n for (const layerID in layersDepths) {\n const depth = layersDepths[layerID];\n if (!(depth in layersByDepth)) {\n layersByDepth[depth] = [];\n }\n layersByDepth[depth].push(layerIDToLayer[layerID]);\n }\n // Get sorted list of layer depths.\n let depthKeys = Object.keys(layersByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Set this.layers and this.layersByDepth.\n this.layers = [];\n for (const depth of depthKeys) {\n const layersForDepth = layersByDepth[depth];\n // Container.layers needs to have a deterministic order:\n // here we order them by traversal order.\n layersForDepth.sort((a, b) => {\n const aIndex = layerIndices[a.id];\n const bIndex = layerIndices[b.id];\n if (aIndex < bIndex) {\n return -1;\n }\n if (aIndex > bIndex) {\n return 1;\n }\n return 0;\n });\n for (const layer of layersForDepth) {\n if (layer instanceof Container) {\n this.internalContainerRefs.push(layer);\n }\n this.layers.push(layer);\n }\n }\n this.layersByDepth = layersByDepth;\n // Get sorted list of node depths;\n depthKeys = Object.keys(nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Check that all tensors required are computable.\n // computable_tensors: all tensors in the graph\n // that can be computed from the inputs provided.\n const computableTensors = this.inputs.slice();\n // To provide a better error msg.\n const layersWithCompleteInput = [];\n for (const depth of depthKeys) {\n for (const node of nodesByDepth[depth]) {\n const layer = node.outboundLayer;\n if (layer != null) {\n for (const x of node.inputTensors) {\n if (computableTensors.indexOf(x) === -1) {\n throw new RuntimeError(`Graph disconnected: cannot obtain value for tensor ${x}` +\n ` at layer \"${layer.name}\". ` +\n 'The following previous layers were accessed without ' +\n `issue: ${layersWithCompleteInput}`);\n }\n }\n for (const x of node.outputTensors) {\n computableTensors.push(x);\n }\n layersWithCompleteInput.push(layer.name);\n }\n }\n }\n // Set this.containerNodes and this.nodesByDepth.\n this.nodesByDepth = nodesByDepth;\n // Ensure name unicity, which will be crucial for serialization\n // (since serialized nodes refer to layers by their name).\n const allNames = this.layers.map(x => x.name);\n for (const name of allNames) {\n const numOccurrences = allNames.filter(x => x === name).length;\n if (numOccurrences !== 1) {\n throw new RuntimeError(`The name \"${name}\" is used ${numOccurrences} times ` +\n 'in the model. All layer names should be unique. Layer names: ' +\n JSON.stringify(allNames));\n }\n }\n // Layer parameters.\n // The new container starts with a single inbound node\n // for its inputs, and no outbound nodes.\n // Will be appended to by future calls to apply().\n this.outboundNodes = [];\n // Will be appended to below, and by future calls to apply().\n this.inboundNodes = [];\n // Create the node linking internal inputs to internal outputs.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n inputMasks: this.inputs.map(x => null),\n outputMasks: this.outputs.map(x => null),\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs.map(x => x.shape)\n });\n this.built = true;\n this._refCount = 1; // The ref count of a container always start at 1.\n }\n assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Container '${this.name}' is already disposed.`);\n }\n }\n /**\n * Attempt to dispose a LayersModel's weights.\n *\n * This method decrease the reference count of the LayersModel object by 1.\n *\n * A LayersModel is reference-counted. Its reference count is incremented by 1\n * when it is first constructed and when it is used as a Layer of another\n * LayersModel.\n *\n * If the reference count of a LayersModel becomes 0, the `dispose` method of\n * all its constituent `Layer`s will be called.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * `dispose` method of its constituent `Layer`s will *not* be called.\n *\n * After a LayersModel is disposed, it cannot be used in calls such as\n * 'predict`, `evaluate` or `fit` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the LayersModel after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the LayersModel has\n * already been disposed.\n */\n dispose() {\n this.assertNotDisposed();\n const result = { refCountAfterDispose: null, numDisposedVariables: 0 };\n if (--this._refCount === 0) {\n for (const layer of this.layers) {\n result.numDisposedVariables += layer.dispose().numDisposedVariables;\n }\n // Call dispose on each internally created container layer again to ensure\n // their refCounts hit zero and their tensors are subsequently deleted.\n for (const container of this.internalContainerRefs) {\n result.numDisposedVariables += container.dispose().numDisposedVariables;\n }\n }\n result.refCountAfterDispose = this._refCount;\n return result;\n }\n get trainable() {\n return this.trainable_;\n }\n set trainable(trainable) {\n this.layers.forEach(layer => {\n // tslint:disable-next-line:no-any\n layer._trainableWeights\n .forEach(w => w.trainable = trainable);\n });\n this.trainable_ = trainable;\n }\n get trainableWeights() {\n // Porting Note: This check below is to prevent errors where the\n // _trainableWeights inherited from the parent class (Layer) gets\n // inadvertently used.\n if (this._trainableWeights.length > 0) {\n throw new ValueError('Container instance unexpectedly contains _trainableWeights.' +\n 'The trainable weights of a Container are a union of the ' +\n 'trainable weights of its consituent Layers. Its own ' +\n '_trainableWeights must remain an empty Array.');\n }\n if (!this.trainable) {\n return [];\n }\n let weights = [];\n for (const layer of this.layers) {\n weights = weights.concat(layer.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const layer of this.layers) {\n weights.push(...layer.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const layer of this.layers) {\n trainableWeights.push(...layer.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n get weights() {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n /**\n * Loads all layer weights from a JSON object.\n *\n * Porting Note: HDF5 weight files cannot be directly loaded in JavaScript /\n * TypeScript. The utility script at `scripts/pykeras.py` offers means\n * to convert them into JSON strings compatible with this method.\n * Porting Note: TensorFlow.js Layers supports only loading by name currently.\n *\n * @param weights A JSON mapping weight names to weight values as nested\n * arrays of numbers, or a `NamedTensorMap`, i.e., a JSON mapping weight\n * names to `tf.Tensor` objects.\n * @param strict Require that the provided weights exactly match those\n * required by the container. Default: `true`. Passing `false` means that\n * extra weights and missing weights will be silently ignored.\n */\n loadWeights(weights, strict = true) {\n const nameToWeight = {};\n let totalWeightsCount = 0;\n for (const layer of this.layers) {\n for (const weight of layer.weights) {\n if (nameToWeight[weight.originalName] != null) {\n throw new ValueError(`Duplicate weight name: ${weight.originalName}`);\n }\n nameToWeight[weight.originalName] = weight;\n totalWeightsCount++;\n }\n }\n const weightValueTuples = [];\n for (const name in weights) {\n // TF 2.2.0 added cell name to the weight name in the format of\n // layer_name/cell_name/weight_name, we need to remove\n // the inner cell name.\n let validatedName = name;\n if (nameToWeight[name] == null) {\n const tokens = name.split('/');\n const shortenNameArray = tokens.slice(0, -2).concat([tokens[tokens.length - 1]]);\n validatedName = shortenNameArray.join('/');\n }\n if (nameToWeight[validatedName] != null) {\n weightValueTuples.push([nameToWeight[validatedName], weights[name]]);\n }\n else if (strict) {\n throw new ValueError(`Provided weight data has no target variable: ${name}`);\n }\n delete nameToWeight[validatedName];\n }\n if (strict) {\n // Check that all weights are set.\n const unsetNames = [];\n for (const name in nameToWeight) {\n unsetNames.push(name);\n }\n if (unsetNames.length > 0) {\n throw new ValueError(`${unsetNames.length} of ${totalWeightsCount} weights are not set: ` +\n `${unsetNames}`);\n }\n }\n batchSetValue(weightValueTuples);\n }\n /**\n * Util shared between different serialization methods.\n * @returns LayersModel config with Keras version information added.\n */\n updatedConfig() {\n const theConfig = this.getConfig();\n const modelConfig = {};\n modelConfig['className'] = this.getClassName();\n modelConfig['config'] = theConfig;\n modelConfig['kerasVersion'] = `tfjs-layers ${layersVersion}`;\n // TODO(nielsene): Replace something like K.backend() once\n // possible.\n modelConfig['backend'] = 'TensorFlow.js';\n return modelConfig;\n }\n /**\n * Returns a JSON string containing the network configuration.\n *\n * To load a network from a JSON save file, use\n * models.modelFromJSON(jsonString);\n * @param extraJsonArgs Unused in tfjs-layers, maintained for PyKeras\n * @param returnString Whether the return value should be stringified\n * (default: `true`).\n * @returns a JSON string if `returnString` (default), or a JSON object if\n * `!returnString`.\n */\n // tslint:disable-next-line:no-any\n toJSON(unused, returnString = true) {\n const modelConfig = convertTsToPythonic(this.updatedConfig());\n return returnString ? JSON.stringify(modelConfig) : modelConfig;\n }\n /**\n * Call the model on new inputs.\n *\n * In this case `call` just reapplies all ops in the graph to the new inputs\n * (e.g. build a new computational graph from the provided inputs).\n *\n * @param inputs A tensor or list of tensors.\n * @param mask A mask or list of masks. A mask can be either a tensor or null\n * (no mask).\n *\n * @return A tensor if there is a single output, or a list of tensors if there\n * are more than one outputs.\n */\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n const feedDict = new FeedDict();\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n return execute(this.outputs, feedDict, kwargs);\n });\n }\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs, mask) {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n let masks;\n if (mask == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n else {\n masks = generic_utils.toList(mask);\n }\n // TODO(michaelterry): Add support for mask caching.\n return this.runInternalGraph(inputs, masks)[1];\n });\n }\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n */\n computeOutputShape(inputShape) {\n const inputShapes = types_utils.normalizeShapeList(inputShape);\n if (inputShapes.length !== this.inputLayers.length) {\n throw new ValueError(`Invalid inputShape argument ${inputShape}: ` +\n `model has ${this.inputLayers.length} tensor inputs.`);\n }\n // TODO(michaelterry): Add caching\n const layersToOutputShapes = {};\n for (let i = 0; i < inputShapes.length; i++) {\n const layer = this.inputLayers[i];\n const inputShape = inputShapes[i];\n // It's an input layer: computeOutputShape is identity,\n // and there is only one node and one tensor output.\n const shapeKey = layer.name + '_0_0';\n layersToOutputShapes[shapeKey] = inputShape;\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Iterate over nodes, by depth level.\n if (depthKeys.length > 1) {\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n if (this.inputLayers.map(x => x.id).indexOf(layer.id) !== -1) {\n // We've already covered the input layers a few lines above.\n continue;\n }\n // Potentially redundant list, same size of node.inputTensors.\n const inputShapes = [];\n for (let j = 0; j < node.inboundLayers.length; j++) {\n const inboundLayer = node.inboundLayers[j];\n const nodeIndex = node.nodeIndices[j];\n const tensorIndex = node.tensorIndices[j];\n const shapeKey = `${inboundLayer.name}_${nodeIndex}_${tensorIndex}`;\n const inputShape = layersToOutputShapes[shapeKey];\n inputShapes.push(inputShape);\n }\n const outputShape = layer.computeOutputShape(generic_utils.singletonOrArray(inputShapes));\n const outputShapes = types_utils.normalizeShapeList(outputShape);\n const nodeIndex = layer.inboundNodes.indexOf(node);\n for (let j = 0; j < outputShapes.length; j++) {\n const shapeKey = `${layer.name}_${nodeIndex}_${j}`;\n layersToOutputShapes[shapeKey] = outputShapes[j];\n }\n }\n }\n }\n // Read final output shapes from layersToOutputShapes.\n const outputShapes = [];\n const outputShapeKeys = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const tensorIndex = this.outputLayersTensorIndices[i];\n const shapeKey = `${layer.name}_${nodeIndex}_${tensorIndex}`;\n outputShapeKeys.push(shapeKey);\n }\n for (let i = 0; i < outputShapeKeys.length; i++) {\n const key = outputShapeKeys[i];\n generic_utils.assert(key in layersToOutputShapes);\n outputShapes.push(layersToOutputShapes[key]);\n }\n // TODO(michaelterry): Update cache\n return generic_utils.singletonOrArray(outputShapes);\n }\n /**\n * Computes output tensors for new inputs.\n *\n * Note:\n * - Expects `inputs` to be a list (potentially with 1 element).\n *\n * @param inputs List of tensors\n * @param masks List of masks (tensors or null).\n * @return Three lists: outputTensors, outputMasks, outputShapes\n */\n runInternalGraph(inputs, masks) {\n if (masks == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n // Dictionary mapping reference tensors to tuples\n // (computed tensor, compute mask)\n // we assume a 1:1 mapping from tensor to mask\n // TODO: raise exception when a `.computeMask()` call\n // does not return a list the same size as `call`\n const tensorMap = {};\n for (let i = 0; i < this.inputs.length; ++i) {\n const x = this.inputs[i];\n const y = inputs[i];\n const mask = masks[i];\n tensorMap[x.id] = [y, mask];\n }\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n const referenceInputTensors = node.inputTensors;\n const referenceOutputTensors = node.outputTensors;\n // If all previous input tensors are available in tensorMap,\n // then call node.inboundLayer on them.\n // List of tuples [input, mask]:\n const computedData = new Array();\n for (const x of referenceInputTensors) {\n if (x.id in tensorMap) {\n computedData.push(tensorMap[x.id]);\n }\n }\n if (computedData.length === referenceInputTensors.length) {\n // TODO(michaelterry): Add K.name_scope here, if we need it.\n let kwargs = {};\n let computedTensors;\n let computedMasks;\n let outputTensors;\n let outputMasks;\n // call layer\n if (node.callArgs != null) {\n kwargs = node.callArgs;\n }\n if (computedData.length === 1) {\n const [computedTensor, computedMask] = computedData[0];\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMask;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensor, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensor, computedMask));\n computedTensors = [computedTensor];\n computedMasks = [computedMask];\n }\n else {\n computedTensors = computedData.map(x => x[0]);\n computedMasks = computedData.map(x => x[1]);\n if (kwargs['mask'] == null) {\n kwargs['mask'] = computedMasks;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensors, kwargs));\n outputMasks = generic_utils.toList(layer.computeMask(computedTensors, computedMasks));\n }\n if (layer.activityRegularizer) {\n throw new NotImplementedError('LayersModel invocation with concrete Tensor value(s) in the ' +\n 'presence of activity regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Add model updates and losses\n // Update tensor map.\n for (let i = 0; i < referenceOutputTensors.length; ++i) {\n const x = referenceOutputTensors[i];\n const y = outputTensors[i];\n const mask = outputMasks[i];\n tensorMap[x.id] = [y, mask];\n }\n }\n }\n }\n const outputTensors = [];\n const outputMasks = [];\n const outputShapes = [];\n for (const x of this.outputs) {\n generic_utils.assert(x.id in tensorMap, `Could not compute output ${x.name} : ${x.id}`);\n const [tensor, mask] = tensorMap[x.id];\n outputShapes.push(tensor.shape);\n outputTensors.push(tensor);\n outputMasks.push(mask);\n }\n // TODO(michaelterry): Add support for caches.\n return [outputTensors, outputMasks, outputShapes];\n }\n /**\n * Builds a map of internal node keys to node ordering.\n * Used in serializaion a node orderings may change as unused nodes are\n * dropped. Porting Note: This helper method was pulled out of getConfig to\n * improve readability.\n * @param layers An array of Layers in the model.\n * @returns Map of Node Keys to index order within the layer.\n */\n buildNodeConversionMap(layers) {\n const nodeConversionMap = {};\n let keptNodes;\n for (const layer of this.layers) {\n keptNodes = layer instanceof Container ? 1 : 0;\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n // i.e. we mark it to be saved\n nodeConversionMap[nodeKey] = keptNodes;\n keptNodes += 1;\n }\n }\n }\n return nodeConversionMap;\n }\n /**\n * Retrieves a layer based on either its name (unique) or index.\n *\n * Indices are based on order of horizontal graph traversal (bottom-up).\n *\n * If both `name` and `index` are specified, `index` takes precedence.\n *\n * @param name Name of layer.\n * @param index Index of layer.\n * @returns A Layer instance.\n * @throws ValueError: In case of invalid layer name or index.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Classes',\n * namespace: 'layers',\n * subclasses: ['LayersModel']\n * }\n */\n getLayer(name, index) {\n if (index != null) {\n if (this.layers.length <= index) {\n throw new ValueError(`Was asked to retrieve layer at index ${index}, but model only ` +\n `has ${this.layers.length} layer(s).`);\n }\n else {\n return this.layers[index];\n }\n }\n else {\n if (name == null) {\n throw new ValueError('Provide either a layer name or layer index');\n }\n }\n for (const layer of this.layers) {\n if (layer.name === name) {\n return layer;\n }\n }\n throw new ValueError(`No such layer: ${name}`);\n }\n /**\n * Retrieves the Container's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses() {\n // Porting Node: This is an augmentation to Container.loss in PyKeras.\n // In PyKeras, Container.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return tidy(() => {\n const losses = [];\n for (const layer of this.layers) {\n for (let nodeIndex = 0; nodeIndex < layer.inboundNodes.length; ++nodeIndex) {\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n losses.push(...layer.calculateLosses());\n }\n }\n }\n // TODO(cais): Add any unconditional model-level losses?\n return losses;\n });\n }\n getConfig() {\n const config = { name: this.name };\n // Build a map from layer unique name (self._node_key)\n // to the index of the nodes that are saved in the config.\n // Only nodes in container_nodes are saved.\n const nodeConversionMap = this.buildNodeConversionMap(this.layers);\n // Serialize and save the layers in layerConfigs\n const layerConfigs = [];\n for (const layer of this.layers) {\n const layerClassName = layer.getClassName();\n const layerConfig = layer.getConfig();\n const filteredInboundNodes = [];\n for (let originalNodeIndex = 0; originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const node = layer.inboundNodes[originalNodeIndex];\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n let kwargs = {};\n if (this.containerNodes.has(nodeKey)) {\n // The node is relevant to the model:\n // add to filteredInboundNodes.\n if (node.callArgs) {\n try {\n JSON.stringify(node.callArgs);\n kwargs = node.callArgs;\n }\n catch (err) {\n console.warn(`Layer ${layer.name} was passed ` +\n `non-serializable keyword arguments: ` +\n `${node.callArgs}. They will not be included ` +\n `in the serialized model (and thus will be ` +\n `missing at deserialization time).`);\n kwargs = {};\n }\n }\n if (node.inboundLayers.length > 0) {\n const nodeData = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n const nodeKey = Container.nodeKey(inboundLayer, nodeIndex);\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex == null) {\n newNodeIndex = 0;\n }\n nodeData.push([inboundLayer.name, newNodeIndex, tensorIndex, kwargs]);\n }\n filteredInboundNodes.push(nodeData);\n }\n }\n }\n const dict = {};\n dict['name'] = layer.name;\n dict['className'] = layerClassName;\n dict['config'] = layerConfig;\n dict['inboundNodes'] = filteredInboundNodes;\n layerConfigs.push(dict);\n }\n config['layers'] = layerConfigs;\n // Gather info about inputs and outputs\n const modelInputs = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n const nodeIndex = this.inputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.inputLayersTensorIndices[i];\n modelInputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['inputLayers'] = modelInputs;\n const modelOutputs = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.outputLayersTensorIndices[i];\n modelOutputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['outputLayers'] = modelOutputs;\n return config;\n }\n /**\n * Instantiates a LayersModel from its config (output of `get_config()`).\n * @param cls the class to create\n * @param config LayersModel config dictionary.\n * @param customObjects An optional dictionary of custom objects.\n * @param fastWeightInit Optional flag to use fast weight initialization\n * during deserialization. This is applicable to cases in which\n * the initialization will be immediately overwritten by loaded weight\n * values. Default: `false`.\n * @returns A LayersModel instance.\n * @throws ValueError: In case of improperly formatted config dict.\n */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n // Layer instances created during\n // the graph reconstruction process\n const createdLayers = {};\n // Dictionary mapping layer instances to\n // node data that specifies a layer call.\n // It acts as a queue that maintains any unprocessed\n // layer call until it becomes possible to process it\n // (i.e. until the input tensors to the call all exist).\n const unprocessedNodes = {};\n function addUnprocessedNode(layer, nodeData) {\n if (!(layer.name in unprocessedNodes)) {\n unprocessedNodes[layer.name] = [nodeData];\n }\n else {\n unprocessedNodes[layer.name].push(nodeData);\n }\n }\n function processNode(layer, nodeData) {\n const inputTensors = [];\n let kwargs;\n for (const inputData of nodeData) {\n const inboundLayerName = inputData[0];\n const inboundNodeIndex = inputData[1];\n const inboundTensorIndex = inputData[2];\n kwargs = inputData[3] == null ?\n {} :\n inputData[3];\n if (!(inboundLayerName in createdLayers)) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundLayer = createdLayers[inboundLayerName];\n if (inboundLayer.inboundNodes.length <= inboundNodeIndex) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundNode = inboundLayer.inboundNodes[inboundNodeIndex];\n inputTensors.push(inboundNode.outputTensors[inboundTensorIndex]);\n }\n // Call layer on its inputs, thus creating the node\n // and building the layer if needed.\n // Note: This has Eager vs Graph Implications.\n if (inputTensors.length > 0) {\n layer.apply(generic_utils.singletonOrArray(inputTensors), kwargs); // was ** kwargs\n }\n }\n /**\n * Deserialize a layer, then call it on appropriate inputs.\n * @param layerData: layer config dict.\n * @throws ValueError: In case of improperly formatted `layer_data`\n * dict.\n */\n function processLayer(layerData) {\n const layerName = layerData['name'];\n // Instantiate layer.\n const layer = deserializeLayer(layerData, config['customObjects'] != null ?\n config['customObjects'] :\n {});\n layer.setFastWeightInitDuringBuild(fastWeightInit);\n createdLayers[layerName] = layer;\n // Gather layer inputs.\n const inboundNodesData = layerData['inboundNodes'];\n inboundNodesData.forEach(nodeData => {\n if (!(nodeData instanceof Array)) {\n throw new ValueError(`Corrupted configuration, expected array for nodeData: ${nodeData}`);\n }\n // We don't process nodes (i.e. make layer calls)\n // on the fly because the inbound node may not yet exist,\n // in case of layer shared at different topological depths\n // (e.g.a model such as A(B(A(B(x)))))\n addUnprocessedNode(layer, nodeData);\n });\n }\n // First, we create all layers and enqueue nodes to be processed.\n const name = config['name'];\n const layersFromConfig = config['layers'];\n for (const layerData of layersFromConfig) {\n processLayer(layerData);\n }\n // Then we process nodes in order of layer depth.\n // Nodes that cannot yet be processed(if the inbound node\n // does not yet exist) are re - enqueued, and the process\n // is repeated until all nodes are processed.\n while (!generic_utils.isObjectEmpty(unprocessedNodes)) {\n for (const layerData of layersFromConfig) {\n const layer = createdLayers[layerData['name']];\n if (layer.name in unprocessedNodes) {\n const currentUnprocessedNodesForLayer = unprocessedNodes[layer.name];\n delete unprocessedNodes[layer.name];\n for (const nodeData of currentUnprocessedNodesForLayer) {\n processNode(layer, nodeData);\n }\n }\n }\n }\n const inputTensors = [];\n const outputTensors = [];\n const inputLayersFromConfig = config['inputLayers'];\n for (const layerData of inputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n inputTensors.push(layerOutputTensors[tensorIndex]);\n }\n const outputLayersFromConfig = config['outputLayers'];\n for (const layerData of outputLayersFromConfig) {\n const layerName = layerData[0];\n const nodeIndex = layerData[1];\n const tensorIndex = layerData[2];\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n outputTensors.push(layerOutputTensors[tensorIndex]);\n }\n return new cls({ inputs: inputTensors, outputs: outputTensors, name });\n }\n /**\n * Determine whether the container is stateful.\n *\n * Porting Note: this is the equivalent of the stateful @property of\n * the Container class in PyKeras.\n */\n get stateful() {\n // Porting Note: This check is to prevent inadvertent setting of the\n // _stateful property of the Container instance.\n if (this._stateful) {\n throw new ValueError('Container instance unexpectedly has _stateful = true. The ' +\n 'statefulness of a Container is determined by the Layers it ' +\n 'contains. Its _stateful property must remain the default false.');\n }\n for (const layer of this.layers) {\n if (layer.stateful) {\n return true;\n }\n }\n return false;\n }\n /**\n * Reset the state of all stateful constituent layers (if any).\n *\n * Examples of stateful layers include RNN layers whose `stateful` property\n * is set as `true`.\n */\n resetStates() {\n tidy(() => {\n this.layers.forEach(layer => {\n // tslint:disable:no-any\n if (layer.stateful) {\n layer.resetStates();\n }\n // tslint:enable:no-any\n });\n });\n }\n}\n//# sourceMappingURL=container.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { dispose, mul, tensor1d, tidy } from '@tensorflow/tfjs-core';\nfunction standardizeSampleOrClassWeights(xWeight, outputNames, weightType) {\n const numOutputs = outputNames.length;\n if (xWeight == null || (Array.isArray(xWeight) && xWeight.length === 0)) {\n return outputNames.map(name => null);\n }\n if (numOutputs === 1) {\n if (Array.isArray(xWeight) && xWeight.length === 1) {\n return xWeight;\n }\n else if (typeof xWeight === 'object' && outputNames[0] in xWeight) {\n return [xWeight[outputNames[0]]];\n }\n else {\n return [xWeight];\n }\n }\n if (Array.isArray(xWeight)) {\n if (xWeight.length !== numOutputs) {\n throw new Error(`Provided ${weightType} is an array of ${xWeight.length} ` +\n `element(s), but the model has ${numOutputs} outputs. ` +\n `Make sure a set of weights is provided for each model output.`);\n }\n return xWeight;\n }\n else if (typeof xWeight === 'object' && Object.keys(xWeight).length > 0 &&\n typeof xWeight[Object.keys(xWeight)[0]] ===\n 'object') {\n const output = [];\n outputNames.forEach(outputName => {\n if (outputName in xWeight) {\n output.push(xWeight[outputName]);\n }\n else {\n output.push(null);\n }\n });\n return output;\n }\n else {\n throw new Error(`The model has multiple (${numOutputs}) outputs, ` +\n `so ${weightType} must be either an array with ` +\n `${numOutputs} elements or an object with ${outputNames} keys. ` +\n `Provided ${weightType} not understood: ${JSON.stringify(xWeight)}`);\n }\n}\n/**\n * Standardize class weighting objects.\n *\n * This function takes a single class-weighting object, an array of them,\n * or a map from output name to class-weighting object. It compares it to the\n * output name(s) of the model, base on which it outputs an array of\n * class-weighting objects of which the length matches the number of outputs.\n *\n * @param classWeight Input class-weighting object(s).\n * @param outputNames All output name(s) of the model.\n * @return An array of class-weighting objects. The length of the array matches\n * the model's number of outputs.\n */\nexport function standardizeClassWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'classWeight');\n}\nexport function standardizeSampleWeights(classWeight, outputNames) {\n return standardizeSampleOrClassWeights(classWeight, outputNames, 'sampleWeight');\n}\n/**\n * Standardize by-sample and/or by-class weights for training.\n *\n * Note that this function operates on one model output at a time. For a model\n * with multiple outputs, you must call this function multiple times.\n *\n * @param y The target tensor that the by-sample and/or by-class weight is for.\n * The values of y are assumed to encode the classes, either directly\n * as an integer index, or as one-hot encoding.\n * @param sampleWeight By-sample weights.\n * @param classWeight By-class weights: an object mapping class indices\n * (integers) to a weight (float) to apply to the model's loss for the\n * samples from this class during training. This can be useful to tell the\n * model to \"pay more attention\" to samples from an under-represented class.\n * @param sampleWeightMode The mode for the sample weights.\n * @return A Promise of weight tensor, of which the size of the first dimension\n * matches that of `y`.\n */\nexport async function standardizeWeights(y, sampleWeight, classWeight, sampleWeightMode) {\n if (sampleWeight != null || sampleWeightMode != null) {\n // TODO(cais): Once 'temporal' mode is implemented, document it in the doc\n // string.\n throw new Error('Support sampleWeight is not implemented yet');\n }\n if (classWeight != null) {\n // Apply class weights per sample.\n const yClasses = tidy(() => {\n if (y.shape.length === 1) {\n // Assume class indices.\n return y.clone();\n }\n else if (y.shape.length === 2) {\n if (y.shape[1] > 1) {\n // Assume one-hot encoding of classes.\n const axis = 1;\n return y.argMax(axis);\n }\n else if (y.shape[1] === 1) {\n // Class index.\n return y.reshape([y.shape[0]]);\n }\n else {\n throw new Error(`Encountered unexpected last-dimension size (${y.shape[1]}) ` +\n `during handling of class weights. The size is expected to be ` +\n `>= 1.`);\n }\n }\n else {\n throw new Error(`Unexpected rank of target (y) tensor (${y.rank}) during ` +\n `handling of class weights. The rank is expected to be 1 or 2.`);\n }\n });\n const yClassIndices = Array.from(await yClasses.data());\n dispose(yClasses);\n const classSampleWeight = [];\n yClassIndices.forEach(classIndex => {\n if (classWeight[classIndex] == null) {\n throw new Error(`classWeight must contain all classes in the training data. ` +\n `The class ${classIndex} exists in the data but not in ` +\n `classWeight`);\n }\n else {\n classSampleWeight.push(classWeight[classIndex]);\n }\n });\n return tensor1d(classSampleWeight, 'float32');\n }\n else {\n return null;\n }\n}\n/**\n * Apply per-sample weights on the loss values from a number of samples.\n *\n * @param losses Loss tensor of shape `[batchSize]`.\n * @param sampleWeights Per-sample weight tensor of shape `[batchSize]`.\n * @returns Tensor of the same shape as`losses`.\n */\nexport function computeWeightedLoss(losses, sampleWeights) {\n return mul(losses, sampleWeights);\n}\n//# sourceMappingURL=training_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using TensorFlow.js datasets.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { singletonOrArray, toList } from '../utils/generic_utils';\nimport { standardizeClassWeights, standardizeWeights } from './training_utils';\n// Default batch size used during tensor-based validation.\nconst DEFAULT_VALIDATION_BATCH_SIZE = 32;\n/**\n * Standardize the output of a dataset iterator for use by\n * LayersModel.fitDataset().\n *\n * @param model: A `tf.LayersModel` object.\n * @param iteratorOut The output of a dataset iterator. It is required to be\n * an object of the form `{xs: TensorOrArrayOrMap, ys:\n * TensorOrArrayOrMap}`, where `TensorOrArrayOrMap` is a single `tf.Tensor`,\n * a `tf.Tensor[]`, or a flat map from string names to `tf.Tensor`s.\n * @returns A flat array of `tf.Tensor` objects: the input `tf.Tensor`s\n * followed by the target `tf.Tensor`s. When `tf.Tensor`s are provided\n * as a map, the order in the resulting array is taken from the `inputNames`\n * and `outputNames` of the model.\n */\nfunction standardizeDataIteratorOutput(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, iteratorOut) {\n let xs;\n let ys;\n const iteratorOutObj = iteratorOut;\n xs = iteratorOutObj['xs'];\n ys = iteratorOutObj['ys'];\n tfc.util.assert(xs != null && ys != null, () => 'A Dataset iterator for fitDataset() is expected to generate ' +\n 'objects of the form `{xs: xVal, ys: yVal}`, where the two ' +\n 'values may be `tf.Tensor`, an array of Tensors, or a map of ' +\n 'string to Tensor. The provided Dataset instead generates ' +\n `${iteratorOut}`);\n const flattenedXs = flattenTensorOrArrayOrMap('input', model.inputNames, xs);\n const flattenedYs = flattenTensorOrArrayOrMap('output', model.outputNames, ys);\n const batchSize = flattenedXs[0].shape[0];\n tfc.util.assert(flattenedXs.length === model.inputs.length, () => `LayersModel has ${model.inputs.length} inputs, but the dataset ` +\n `provides ${flattenedXs.length} inputs. (Expected input keys: ` +\n `${JSON.stringify(model.inputNames)})`);\n tfc.util.assert(flattenedYs.length === model.outputs.length, () => `LayersModel has ${model.outputs.length} outputs, but the dataset ` +\n `provides ${flattenedYs.length} outputs. (Expected output keys: ` +\n `${JSON.stringify(model.outputNames)})`);\n for (let xIndex = 0; xIndex < flattenedXs.length; xIndex++) {\n tfc.util.assert(flattenedXs[xIndex].shape[0] === batchSize, () => `Batch size mismatch: input ` +\n `${model.inputNames[xIndex]} has ${flattenedXs[xIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n for (let yIndex = 0; yIndex < flattenedYs.length; yIndex++) {\n tfc.util.assert(flattenedYs[yIndex].shape[0] === batchSize, () => `Batch size mismatch: output ` +\n `${model.outputNames[yIndex]} has ${flattenedYs[yIndex].shape[0]}; ` +\n `expected ${batchSize} based on input ${model.inputNames[0]}.`);\n }\n return { xs: flattenedXs, ys: flattenedYs };\n}\nfunction flattenTensorOrArrayOrMap(inputOrOutput, names, values) {\n if (values instanceof tfc.Tensor) {\n return [values];\n }\n else if (Array.isArray(values)) {\n tfc.util.assert(values.length === names.length, () => `Received an array of ${values.length} Tensors, but expected ${names.length} to match the ${inputOrOutput} keys ${names}.`);\n return values;\n }\n else {\n const result = [];\n // Check that all the required keys are available.\n for (const name of names) {\n if (values[name] == null) {\n throw new ValueError(`The feature data generated by the dataset lacks the required ` +\n `${inputOrOutput} key '${name}'.`);\n }\n result.push(values[name]);\n }\n return result;\n }\n}\nfunction standardizeTensorValidationData(data) {\n if (data.length === 3) {\n throw new NotImplementedError('Validation with sample weights is not implemented yet.');\n }\n return { xs: data[0], ys: data[1] };\n}\nexport async function fitDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n const hasBatchesPerEpoch = args.batchesPerEpoch != null;\n tfc.util.assert(model.optimizer != null, () => 'You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileConfig).');\n tfc.util.assert(args != null, () => `For fitDataset(), the 2nd argument (config) is required, ` +\n `but it is not provided in this call.`);\n tfc.util.assert(args.epochs != null && args.epochs > 0 && Number.isInteger(args.epochs), () => `For fitDataset(), config.epochs is expected to be a positive ` +\n `integer, but got ${args.epochs}`);\n tfc.util.assert(!hasBatchesPerEpoch ||\n (args.batchesPerEpoch > 0 && Number.isInteger(args.batchesPerEpoch)), () => `For fitDataset(), config.batchesPerEpoch is expected to be a ` +\n `positive integer if specified, but got ${args.batchesPerEpoch}`);\n tfc.util.assert(\n // tslint:disable-next-line:no-any\n args['validationSplit'] == null, () => '`validationSplit` is not supported by `fitDataset()`. ' +\n 'Use validationData instead.');\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n try {\n const doValidation = args.validationData != null;\n let valXs;\n let valYs;\n if (doValidation) {\n if (isDatasetObject(args.validationData)) {\n tfc.util.assert(args.validationBatches == null ||\n (args.validationBatches > 0 &&\n Number.isInteger(args.validationBatches)), () => `For fitDataset() with dataset-based validation, ` +\n `config.validationBatches is expected not to be provided, ` +\n `or to be a positive integer, ` +\n `but got ${args.validationBatches}`);\n }\n else {\n const validationData = standardizeTensorValidationData(args.validationData);\n valXs = validationData.xs;\n valYs = validationData.ys;\n }\n }\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let callbackMetrics;\n if (doValidation) {\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const verbose = args.verbose == null ? 1 : args.verbose;\n const { callbackList, history } = configureCallbacks(callbacks, verbose, args.epochs, null, null, getStepsPerEpoch(dataset, args), null, // Batch size determined by the dataset itself.\n doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n let epoch = args.initialEpoch == null ? 0 : args.initialEpoch;\n let dataIterator = await dataset.iterator();\n while (epoch < args.epochs) {\n const epochLogs = {};\n await callbackList.onEpochBegin(epoch);\n let stepsDone = 0;\n let batchIndex = 0;\n if (!hasBatchesPerEpoch) {\n dataIterator = await dataset.iterator();\n }\n while (hasBatchesPerEpoch ? stepsDone < args.batchesPerEpoch : true) {\n const iteratorOut = await dataIterator.next();\n // If `batchesPerEpoch` is specified, the dataset should not be\n // exhausted until all epoches are done.\n if (hasBatchesPerEpoch && iteratorOut.done) {\n console.warn('You provided `batchesPerEpoch` as ' +\n `${args.batchesPerEpoch}, ` +\n 'but your dataset iterator ran out of data after ' +\n `${stepsDone} batches; ` +\n 'interrupting training. Make sure that your ' +\n 'dataset can generate at least `batchesPerEpoch * epochs` ' +\n 'batches (in this case, ' +\n `${args.batchesPerEpoch * args.epochs} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n break;\n }\n if (iteratorOut.value != null) {\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const batchLogs = {};\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = xs[0].shape[0];\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n const sampleWeights = [];\n if (args.classWeight != null) {\n const standardClassWeights = standardizeClassWeights(args.classWeight, model.outputNames);\n for (let i = 0; i < standardClassWeights.length; ++i) {\n sampleWeights.push(await standardizeWeights(ys[i], null, standardClassWeights[i]));\n }\n }\n // Train on batch.\n const ins = xs.concat(ys).concat(sampleWeights);\n const outs = trainFunction(ins);\n tfc.dispose(ins);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n }\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n batchIndex++;\n stepsDone++;\n }\n if (hasBatchesPerEpoch ? stepsDone >= args.batchesPerEpoch :\n iteratorOut.done) {\n // Epoch finished. Perform validation.\n if (doValidation) {\n let valOuts;\n if (isDatasetObject(args.validationData)) {\n valOuts = toList(await model.evaluateDataset(args.validationData, { batches: args.validationBatches }));\n }\n else {\n valOuts = toList(model.evaluate(valXs, valYs, {\n batchSize: args.validationBatchSize == null ?\n DEFAULT_VALIDATION_BATCH_SIZE :\n args.validationBatchSize,\n verbose: 0\n }));\n }\n for (let i = 0; i < model.metricsNames.length; ++i) {\n epochLogs[`val_${model.metricsNames[i]}`] = valOuts[i];\n }\n }\n // Call `break` to exit one epoch lopp after validation is done. If\n // config.batchesPerEpoch is specified, an epoch while loop will\n // stop when `stepsDone >= config.batchesPerEpoch`. When\n // config.batchesPerEpoch is not provided, the following `break` is\n // required to exit the while lopp after dataset is exhausted.\n break;\n }\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onEpochEnd(epoch, epochLogs);\n epoch++;\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n }\n finally {\n model.isTraining = false;\n }\n}\n/** Helper function that determines number of steps (batches) per epoch. */\nfunction getStepsPerEpoch(dataset, args) {\n // Attempt to determine # of batches in an epoch.\n let stepsPerEpoch = null;\n if (args.batchesPerEpoch != null) {\n stepsPerEpoch = args.batchesPerEpoch;\n }\n else if (Number.isFinite(dataset.size)) {\n stepsPerEpoch = dataset.size;\n }\n return stepsPerEpoch;\n}\n// Check if provided object is a Dataset object by checking its .iterator\n// element.\nfunction isDatasetObject(dataset) {\n return (typeof dataset.iterator === 'function');\n}\n// Check if provided object is a LazyIterator object by checking it's .next\n// element.\nfunction isLazyIteratorObject(iterator) {\n return (typeof iterator.next === 'function');\n}\nexport async function evaluateDataset(\n// Type `model` as `any` here to avoid circular dependency w/\n// training.ts.\n// tslint:disable-next-line:no-any\nmodel, dataset, args) {\n args = args || {};\n const hasBatches = args.batches != null;\n const f = model.testFunction;\n let outs = [];\n if (args.verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n tfc.util.assert(!hasBatches || (args.batches > 0 && Number.isInteger(args.batches)), () => 'Test loop expects `batches` to be a positive integer, but ' +\n `received ${JSON.stringify(args.batches)}`);\n const dataIterator = isLazyIteratorObject(dataset) ?\n dataset :\n await dataset.iterator();\n // Keeps track of number of examples used in this evaluation.\n let numExamples = 0;\n let batch = 0;\n while (hasBatches ? batch < args.batches : true) {\n const iteratorOut = await dataIterator.next();\n outs = tfc.tidy(() => {\n if (iteratorOut.value) {\n // TODO(cais): Once real dataset is available, use\n // `map(x => standardizeDataIteratorOutput(model, x).map(f)`.\n const { xs, ys } = standardizeDataIteratorOutput(model, iteratorOut.value);\n const xsAndYs = xs.concat(ys);\n const batchOuts = tfc.tidy(() => f(xsAndYs));\n tfc.dispose(xsAndYs);\n if (batch === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n const batchSize = xsAndYs[0].shape[0];\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n const oldScalar = outs[i];\n outs[i] =\n tfc.tidy(() => tfc.add(outs[i], tfc.mul(batchSize, batchOut)));\n if (batch > 0) {\n tfc.dispose(oldScalar);\n }\n }\n tfc.dispose(batchOuts);\n numExamples += batchSize;\n ++batch;\n }\n return outs;\n });\n if (iteratorOut.done) {\n if (hasBatches) {\n console.warn('Your dataset iterator ran out of data during evaluateDataset(). ' +\n 'Interrupting evalution. Make sure that your ' +\n 'dataset can generate at least `batches` ' +\n `batches (in this case, ${args.batches} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n }\n break;\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n const oldScalar = outs[i];\n outs[i] = tfc.div(outs[i], numExamples);\n tfc.dispose(oldScalar);\n }\n return singletonOrArray(outs);\n}\n//# sourceMappingURL=training_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Interfaces and methods for training models using tf.Tensor objects.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport { expandDims, gather, sliceAlongFirstAxis } from '../backend/tfjs_backend';\nimport { configureCallbacks, standardizeCallbacks } from '../base_callbacks';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { disposeTensorsInLogs } from '../logs';\nimport { range } from '../utils/math_utils';\nexport function checkBatchSize(batchSize) {\n tfc.util.assert(batchSize > 0 && Number.isInteger(batchSize), () => `batchSize is required to be a positive integer, but got ${batchSize}`);\n}\n/**\n * Slice a Tensor or an Array of Tensors, by start and stop indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArraysByIndices()` together.\n *\n * @param arrays: the input.\n * @param start: the starting index (inclusive).\n * @param stop: the stopping index (exclusive).\n * @returns The result of the slicing. If `arrays` is an `Array` of\n * `tf.Tensor`s, the slicing will be applied to all elements of the `Array`\n * in the same way.\n */\nexport function sliceArrays(arrays, start, stop) {\n if (arrays == null) {\n return [null];\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceAlongFirstAxis(array, start, stop - start));\n }\n else { // Tensor.\n return sliceAlongFirstAxis(arrays, start, stop - start);\n }\n}\n/**\n * Slice a Tensor or an Array of Tensors, by random-order indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArrays()` together.\n *\n * @param arrays The input `tf.Tensor` or `Array` of `tf.Tensor`s to slice.\n * If an `Array` of `tf.Tensor`s, all `tf.Tensor`s will be sliced in the\n * same fashion.\n * @param indices The indices to use for slicing along the first (batch)\n * dimension.\n * @returns Result(s) of the slicing.\n */\nexport function sliceArraysByIndices(arrays, indices) {\n return tfc.tidy(() => {\n if (arrays == null) {\n return null;\n }\n else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceArraysByIndices(array, indices));\n }\n else {\n // TODO(cais): indices should be a pre-constructed Tensor1D to avoid\n // tensor1d() calls.\n return gather(arrays, indices.dtype === 'int32' ? indices : indices.toInt());\n }\n });\n}\n/**\n * Returns a list of batch indices (tuples of indices).\n * @param size: Integer, total size of the data to slice into batches.\n * @param batchSize: Integer, batch size.\n * @returns An Array of [batchStart, batchEnd] tuples. batchStart is\n * inclusive; batchEnd is exclusive. I.e., each batch consists of indices x\n * that satisfy batchStart <= x < batchEnd.\n */\nexport function makeBatches(size, batchSize) {\n const output = [];\n let batchStart = 0;\n let batchEnd = null;\n while (batchStart < size) {\n batchEnd = batchStart + batchSize;\n if (batchEnd >= size) {\n batchEnd = size;\n }\n output.push([batchStart, batchEnd]);\n batchStart = batchEnd;\n }\n return output;\n}\n/**\n * Abstract fit function for `f(ins)`.\n * @param f A Function returning a list of tensors. For training, this\n * function is expected to perform the updates to the variables.\n * @param ins List of tensors to be fed to `f`.\n * @param outLabels List of strings, display names of the outputs of `f`.\n * @param batchSize Integer batch size or `== null` if unknown. Default : 32.\n * @param epochs Number of times to iterate over the data. Default : 1.\n * @param verbose Verbosity mode: 0, 1, or 2. Default: 1.\n * @param callbacks List of callbacks to be called during training.\n * @param valF Function to call for validation.\n * @param valIns List of tensors to be fed to `valF`.\n * @param shuffle Whether to shuffle the data at the beginning of every\n * epoch. Default : true.\n * @param callbackMetrics List of strings, the display names of the metrics\n * passed to the callbacks. They should be the concatenation of the\n * display names of the outputs of `f` and the list of display names\n * of the outputs of `valF`.\n * @param initialEpoch Epoch at which to start training (useful for\n * resuming a previous training run). Default : 0.\n * @param stepsPerEpoch Total number of steps (batches on samples) before\n * declaring one epoch finished and starting the next epoch. Ignored with\n * the default value of `undefined` or `null`.\n * @param validationSteps Number of steps to run validation for (only if\n * doing validation from data tensors). Not applicable for tfjs-layers.\n * @returns A `History` object.\n */\nasync function fitLoop(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, f, ins, outLabels, batchSize, epochs, verbose, callbacks, valF, valIns, shuffle, callbackMetrics, initialEpoch, stepsPerEpoch, validationSteps) {\n if (batchSize == null) {\n batchSize = 32;\n }\n if (epochs == null) {\n epochs = 1;\n }\n if (shuffle == null) {\n shuffle = true;\n }\n if (initialEpoch == null) {\n initialEpoch = 0;\n }\n // TODO(cais): Change const to let below when implementing validation.\n let doValidation = false;\n if (valF != null && valIns != null) {\n doValidation = true;\n // TODO(cais): verbose message.\n }\n if (validationSteps != null) {\n doValidation = true;\n if (stepsPerEpoch == null) {\n throw new ValueError('Can only use `validationSteps` when doing step-wise training, ' +\n 'i.e., `stepsPerEpoch` must be set.');\n }\n }\n const numTrainSamples = model.checkNumSamples(ins, batchSize, stepsPerEpoch, 'steps_per_epoch');\n let indexArray;\n if (numTrainSamples != null) {\n indexArray = range(0, numTrainSamples);\n }\n if (verbose == null) {\n verbose = 1;\n }\n const { callbackList, history } = configureCallbacks(callbacks, verbose, epochs, initialEpoch, numTrainSamples, stepsPerEpoch, batchSize, doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n // TODO(cais): Take care of callbacks.validation_data as in PyKeras.\n // TODO(cais): Pre-convert feeds for performance as in PyKeras.\n for (let epoch = initialEpoch; epoch < epochs; ++epoch) {\n await callbackList.onEpochBegin(epoch);\n const epochLogs = {};\n if (stepsPerEpoch != null) {\n throw new NotImplementedError('stepsPerEpoch mode is not implemented yet.');\n }\n else {\n if (shuffle === 'batch') {\n throw new NotImplementedError('batch shuffling is not implemneted yet');\n }\n else if (shuffle) {\n util.shuffle(indexArray);\n }\n // Convert the potentially shuffled indices to Tensor1D, to avoid the\n // cost of repeated creation of Array1Ds later on.\n const epochIndexArray1D = tensor1d(indexArray);\n const batches = makeBatches(numTrainSamples, batchSize);\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchLogs = {};\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = sliceAlongFirstAxis(epochIndexArray1D, batchStart, batchEnd - batchStart);\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = batchEnd - batchStart;\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const outs = f(insBatch);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n }\n if (batchIndex === batches.length - 1) { // Last batch.\n if (doValidation) {\n const valOuts = model.testLoop(valF, valIns, batchSize);\n // Porting Notes: In tfjs-layers, valOuts is always an Array.\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = valOuts[i];\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n epochLogs['val_' + label] = out;\n }\n }\n }\n });\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n if (model.stopTraining_) {\n break;\n }\n // TODO(cais): return outs as list of Tensor.\n }\n epochIndexArray1D.dispose();\n }\n // TODO(cais): Run validation at the end of the epoch.\n await callbackList.onEpochEnd(epoch, epochLogs);\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n}\nexport async function fitTensors(\n// Type `model` as `any` here to avoid circular dependency w/ training.ts.\n// tslint:disable-next-line:no-any\nmodel, x, y, args = {}) {\n if (model.isTraining) {\n throw new Error('Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n let inputs;\n let targets;\n let inputValX;\n let inputValY;\n let valX;\n let valY;\n let sampleWeights;\n try {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // Validate user data.\n // TODO(cais): Support sampleWeight.\n const checkBatchAxis = false;\n const standardizedOuts = await model.standardizeUserData(x, y, args.sampleWeight, args.classWeight, checkBatchAxis, batchSize);\n inputs = standardizedOuts[0];\n targets = standardizedOuts[1];\n sampleWeights = standardizedOuts[2];\n // Prepare validation data.\n let doValidation = false;\n let valIns;\n if (args.validationData != null && args.validationData.length > 0) {\n doValidation = true;\n if (args.validationData.length === 2) {\n // config.validationData consists of valX and valY.\n inputValX = args.validationData[0];\n inputValY = args.validationData[1];\n }\n else if (args.validationData.length === 3) {\n throw new NotImplementedError('validationData including sample weights is not supported yet.');\n }\n else {\n throw new ValueError(`When passing validation data, it must contain 2 (valX, valY) ` +\n `or 3 (valX, valY, valSampleWeight) items; ` +\n `${args.validationData} is invalid.`);\n }\n const checkBatchAxis = true;\n const valStandardized = await model.standardizeUserData(inputValX, inputValY, null, /** Unused sample weights. */ null, /** Unused class weights. */ checkBatchAxis, batchSize);\n valX = valStandardized[0];\n valY = valStandardized[1];\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSplit != null && args.validationSplit > 0 &&\n args.validationSplit < 1) {\n doValidation = true;\n // Porting Note: In tfjs-layers, inputs[0] is always a Tensor.\n const splitAt = Math.floor(inputs[0].shape[0] * (1 - args.validationSplit));\n const originalBatchSize = inputs[0].shape[0];\n valX = sliceArrays(inputs, splitAt, originalBatchSize);\n inputs = sliceArrays(inputs, 0, splitAt);\n valY = sliceArrays(targets, splitAt, originalBatchSize);\n targets = sliceArrays(targets, 0, splitAt);\n // TODO(cais): Once sampleWeights becomes available, slice it to get\n // valSampleWeights.\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n }\n else if (args.validationSteps != null) {\n doValidation = true;\n // TODO(cais): Add useLearningPhase.\n }\n const ins = inputs.concat(targets).concat(sampleWeights);\n model.checkTrainableWeightsConsistency();\n // TODO(cais): Handle use_learning_phase and learning_phase?\n // Porting Note: Here we see a key deviation of tfjs-layers from\n // Keras.\n // Due to the imperative nature of tfjs-layers' backend (tfjs-core),\n // we do not construct symbolic computation graphs to embody the\n // training process. Instead, we define a function that performs the\n // training action. In PyKeras, the data (inputs and targets) are fed\n // through graph placeholders. In tfjs-layers, the data are fed as\n // function arguments. Since the function are defined below in the\n // scope, we don't have equivalents of PyKeras's\n // `_make_train_funciton`.\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames();\n let valFunction;\n let callbackMetrics;\n if (doValidation) {\n model.makeTestFunction();\n valFunction = model.testFunction;\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n }\n else {\n valFunction = null;\n valIns = [];\n callbackMetrics = outLabels.slice();\n }\n const callbacks = standardizeCallbacks(args.callbacks, args.yieldEvery);\n const out = await fitLoop(model, trainFunction, ins, outLabels, batchSize, args.epochs, args.verbose, callbacks, valFunction, valIns, args.shuffle, callbackMetrics, args.initialEpoch, null, null);\n return out;\n }\n finally {\n model.isTraining = false;\n // Memory clean up.\n disposeNewTensors(inputs, x);\n disposeNewTensors(targets, y);\n disposeNewTensors(valX, inputValX);\n disposeNewTensors(valY, inputValY);\n if (sampleWeights != null) {\n tfc.dispose(sampleWeights);\n }\n }\n // TODO(cais): Add value to outLabels.\n}\n/**\n * Ensure tensors all have a rank of at least 2.\n *\n * If a tensor has a rank of 1, it is dimension-expanded to rank 2.\n * If any tensor has a rank of 0 (i.e., is a scalar), an error will be thrown.\n */\nexport function ensureTensorsRank2OrHigher(tensors) {\n const outs = [];\n if (tensors instanceof Tensor) {\n tensors = [tensors];\n }\n // Make Tensors at least 2D.\n for (let i = 0; i < tensors.length; ++i) {\n const tensor = tensors[i];\n if (tensor.rank === 1) {\n outs.push(expandDims(tensor, 1));\n }\n else if (tensor.rank === 0) {\n throw new Error('Expected tensor to be at least 1D, but received a 0D tensor ' +\n '(scalar).');\n }\n else {\n outs.push(tensor);\n }\n }\n return outs;\n}\n/**\n * Compare a set of tensors with a reference (old) set, discard the ones\n * in the new set that are not present in the reference set.\n *\n * This method is used for memory clenaup during calls such as\n * LayersModel.fit().\n *\n * @param tensors New set which may contain Tensors not present in\n * `refTensors`.\n * @param refTensors Reference Tensor set.\n */\n// TODO(cais, kangyizhang): Deduplicate with tfjs-data.\nexport function disposeNewTensors(tensors, refTensors) {\n if (tensors == null) {\n return;\n }\n const oldTensorIds = [];\n if (refTensors instanceof Tensor) {\n oldTensorIds.push(refTensors.id);\n }\n else if (Array.isArray(refTensors)) {\n refTensors.forEach(t => oldTensorIds.push(t.id));\n }\n else if (refTensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in refTensors) {\n const oldTensor = refTensors[name];\n oldTensorIds.push(oldTensor.id);\n }\n }\n const tensorsToDispose = [];\n if (tensors instanceof Tensor) {\n if (oldTensorIds.indexOf(tensors.id) === -1) {\n tensorsToDispose.push(tensors);\n }\n }\n else if (Array.isArray(tensors)) {\n tensors.forEach(t => {\n if (oldTensorIds.indexOf(t.id) === -1) {\n tensorsToDispose.push(t);\n }\n });\n }\n else if (tensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in tensors) {\n const tensor = tensors[name];\n if (oldTensorIds.indexOf(tensor.id) === -1) {\n tensorsToDispose.push(tensor);\n }\n }\n }\n tensorsToDispose.forEach(t => {\n if (!t.isDisposed) {\n t.dispose();\n }\n });\n}\n//# sourceMappingURL=training_tensors.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original Source: engine/training.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { io, Optimizer, scalar, serialization, Tensor, tensor1d, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { NotImplementedError, RuntimeError, ValueError } from '../errors';\nimport { deserialize } from '../layers/serialization';\nimport * as losses from '../losses';\nimport * as Metrics from '../metrics';\nimport * as optimizers from '../optimizers';\nimport { checkUserDefinedMetadata } from '../user_defined_metadata';\nimport { count, pyListRepeat, singletonOrArray, toCamelCase, toSnakeCase, unique } from '../utils/generic_utils';\nimport { printSummary } from '../utils/layer_utils';\nimport { range } from '../utils/math_utils';\nimport { convertPythonicToTs } from '../utils/serialization_utils';\nimport { version } from '../version';\nimport { Container } from './container';\nimport { execute, FeedDict } from './executor';\nimport { evaluateDataset, fitDataset } from './training_dataset';\nimport { checkBatchSize, disposeNewTensors, ensureTensorsRank2OrHigher, fitTensors, makeBatches, sliceArrays, sliceArraysByIndices } from './training_tensors';\nimport { computeWeightedLoss, standardizeClassWeights, standardizeWeights } from './training_utils';\n/**\n * Helper function for polymorphic input data: 1. singleton Tensor.\n */\nexport function isDataTensor(x) {\n return x instanceof Tensor;\n}\n/**\n * Helper function for polymorphic input data: 2. Array of Tensor.\n */\nexport function isDataArray(x) {\n return Array.isArray(x);\n}\n/**\n * Helper function for polymorphic input data: 3. \"dict\" of Tensor.\n */\nexport function isDataDict(x) {\n return !isDataTensor(x) && !isDataArray(x);\n}\n/**\n * Normalizes inputs and targets provided by users.\n * @param data User-provided input data (polymorphic).\n * @param names An Array of expected Tensor names.\n * @param shapes Optional Array of expected Tensor shapes.\n * @param checkBatchAxis Whether to check that the batch axis of the arrays\n * match the expected value found in `shapes`.\n * @param exceptionPrefix String prefix used for exception formatting.\n * @returns List of standardized input Tensors (one Tensor per model input).\n * @throws ValueError: in case of improperly formatted user data.\n */\nexport function standardizeInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n if (names == null || names.length === 0) {\n // Check for the case where the model expected no data, but some data got\n // sent.\n if (data != null) {\n let gotUnexpectedData = false;\n if (isDataArray(data) && data.length > 0) {\n gotUnexpectedData = true;\n }\n else if (isDataDict(data)) {\n for (const key in data) {\n if (data.hasOwnProperty(key)) {\n gotUnexpectedData = true;\n break;\n }\n }\n }\n else {\n // `data` is a singleton Tensor in this case.\n gotUnexpectedData = true;\n }\n if (gotUnexpectedData) {\n throw new ValueError(`Error when checking model ${exceptionPrefix} expected no data, ` +\n `but got ${data}`);\n }\n }\n return [];\n }\n if (data == null) {\n return names.map(name => null);\n }\n let arrays;\n if (isDataDict(data)) {\n data = data;\n arrays = [];\n for (const name of names) {\n if (data[name] == null) {\n throw new ValueError(`No data provided for \"${name}\". Need data for each key in: ` +\n `${names}`);\n }\n arrays.push(data[name]);\n }\n }\n else if (isDataArray(data)) {\n data = data;\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `model expected. Expected to see ${names.length} Tensor(s), but ` +\n `instead got the following list of Tensor(s): ${data}`);\n }\n arrays = data;\n }\n else {\n data = data;\n if (names.length > 1) {\n throw new ValueError(`The model ${exceptionPrefix} expects ${names.length} Tensor(s), ` +\n `but only received one Tensor. Found: Tensor with shape ${data.shape}`);\n }\n arrays = [data];\n }\n arrays = ensureTensorsRank2OrHigher(arrays);\n // Check shape compatibility.\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s). but got array with ` +\n `shape ${array.shape}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n // Skip the first (batch) axis.\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null && refDim >= 0 && dim !== refDim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have shape [${shapes[i]}], but got array with shape ` +\n `[${array.shape}].`);\n }\n }\n }\n }\n return arrays;\n}\n/**\n * User input validation for Tensors.\n * @param inputs `Array` of `tf.Tensor`s for inputs.\n * @param targets `Array` of `tf.Tensor`s for targets.\n * @param weights Optional `Array` of `tf.Tensor`s for sample weights.\n * @throws ValueError: in case of incorrectly formatted data.\n */\nexport function checkArrayLengths(inputs, targets, weights) {\n const setX = unique(inputs.map(input => input.shape[0]));\n setX.sort();\n const setY = unique(targets.map(target => target.shape[0]));\n setY.sort();\n // TODO(cais): Check `weights` as well.\n if (setX.length > 1) {\n throw new ValueError(`All input Tensors (x) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(inputs.map(input => input.shape))}`);\n }\n if (setY.length > 1) {\n throw new ValueError(`All target Tensors (y) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(targets.map(target => target.shape))}`);\n }\n if (setX.length > 0 && setY.length > 0 && !util.arraysEqual(setX, setY)) {\n throw new ValueError(`Input Tensors should have the same number of samples as target ` +\n `Tensors. Found ${setX[0]} input sample(s) and ${setY[0]} target ` +\n `sample(s).`);\n }\n}\n/**\n * Validation on the compatibility of targes and loss functions.\n *\n * This helps prevent users from using loss functions incorrectly.\n *\n * @param targets `Array` of `tf.Tensor`s of targets.\n * @param lossFns `Array` of loss functions.\n * @param outputShapes `Array` of shapes of model outputs.\n */\nfunction checkLossAndTargetCompatibility(targets, lossFns, outputShapes) {\n // TODO(cais): Dedicated test coverage?\n const keyLosses = [\n losses.meanSquaredError, losses.binaryCrossentropy,\n losses.categoricalCrossentropy\n ];\n for (let i = 0; i < targets.length; ++i) {\n const y = targets[i];\n const loss = lossFns[i];\n const shape = outputShapes[i];\n if (loss == null) {\n continue;\n }\n if (loss === losses.categoricalCrossentropy) {\n if (y.shape[y.shape.length - 1] === 1) {\n throw new ValueError(`You are passing a target array of shape ${y.shape} while using ` +\n `a loss 'categorical_crossentropy'. 'categorical_crossentropy'` +\n `expects targets to be binary matrices (1s and 0s) of shape ` +\n `[samples, classes].`);\n // TODO(cais): Example code in error message.\n }\n }\n if (keyLosses.indexOf(loss) !== -1) {\n const slicedYShape = y.shape.slice(1);\n const slicedShape = shape.slice(1);\n for (let j = 0; j < slicedYShape.length; ++j) {\n const targetDim = slicedYShape[j];\n const outDim = slicedShape[j];\n if (outDim != null && targetDim !== outDim) {\n throw new ValueError(`A target Tensor with shape ${y.shape} was passed for an ` +\n `output of shape ${shape}, while using a loss function that ` +\n `expects targets to have the same shape as the output.`);\n }\n }\n }\n }\n}\n/**\n * Check inputs provided by the user.\n *\n * Porting Note: This corresponds to _standardize_input_data() in Python\n * Keras. Because of the strong typing in TF.js, we do not need to convert\n * the data. Specifically:\n * 1) in PyKeras, `data` can be `DataFrame` instances from pandas, for\n * example. We don't need to worry about that here because there is no\n * widely popular javascript/typesdcript equivalent of pandas (so far).\n * If one becomes available in the future, we can add support.\n * 2) in PyKeras, inputs can be Python dict. But here we are stipulating\n * that the data is either a single `tf.Tensor` or an Array of `tf.Tensor`s. We\n * may add support for `Object` data inputs in the future when the need\n * arises.\n *\n * Instead, we perform basic checks for number of parameters and shapes.\n *\n * @param data: The input data.\n * @param names: Name for the inputs, from the model.\n * @param shapes: Expected shapes for the input data, from the model.\n * @param checkBatchAxis: Whether the size along the batch axis (i.e., the\n * first dimension) will be checked for matching.\n * @param exceptionPrefix: Execption prefix message, used in generating error\n * messages.\n * @throws ValueError: on incorrect number of inputs or mismatches in shapes.\n */\nfunction checkInputData(data, names, shapes, checkBatchAxis = true, exceptionPrefix = '') {\n let arrays;\n if (Array.isArray(data)) {\n if (data.length !== names.length) {\n throw new ValueError(`Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `the model expected. Expected to see ${names.length} Tensor(s),` +\n ` but instead got ${data.length} Tensors(s).`);\n }\n arrays = data;\n }\n else {\n if (names.length > 1) {\n throw new ValueError(`The model expects ${names.length} ${exceptionPrefix} Tensors, ` +\n `but only received one Tensor. Found: array with shape ` +\n `${JSON.stringify(data.shape)}.`);\n }\n arrays = [data];\n }\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s), but got array with ` +\n `shape ${JSON.stringify(array.shape)}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null) {\n if (refDim !== dim) {\n throw new ValueError(`Error when checking ${exceptionPrefix}: expected ` +\n `${names[i]} to have shape ${JSON.stringify(shapes[i])} but ` +\n `got array with shape ${JSON.stringify(array.shape)}.`);\n }\n }\n }\n }\n }\n}\n/**\n * Maps metric functions to model outputs.\n * @param metrics An shortcut strings name, metric function, `Array` or dict\n * (`Object`) of metric functions.\n * @param outputNames An `Array` of the names of model outputs.\n * @returns An `Array` (one entry per model output) of `Array` of metric\n * functions. For instance, if the model has 2 outputs, and for the first\n * output we want to compute `binaryAccuracy` and `binaryCrossentropy`,\n * and just `binaryAccuracy` for the second output, the `Array` would look\n * like:\n * `[[binaryAccuracy, binaryCrossentropy], [binaryAccuracy]]`\n * @throws TypeError: incompatible metrics format.\n */\nexport function collectMetrics(metrics, outputNames) {\n if (metrics == null || Array.isArray(metrics) && metrics.length === 0) {\n return outputNames.map(name => []);\n }\n let wrappedMetrics;\n if (typeof metrics === 'string' || typeof metrics === 'function') {\n wrappedMetrics = [metrics];\n }\n else if (Array.isArray(metrics) || typeof metrics === 'object') {\n wrappedMetrics = metrics;\n }\n else {\n throw new TypeError('Type of metrics argument not understood. Expected an string,' +\n `function, Array, or Object, found: ${metrics}`);\n }\n if (Array.isArray(wrappedMetrics)) {\n // We then apply all metrics to all outputs.\n return outputNames.map(name => wrappedMetrics);\n }\n else {\n // In this case, metrics is a dict.\n const nestedMetrics = [];\n for (const name of outputNames) {\n let outputMetrics = wrappedMetrics.hasOwnProperty(name) ? wrappedMetrics[name] : [];\n if (!Array.isArray(outputMetrics)) {\n outputMetrics = [outputMetrics];\n }\n nestedMetrics.push(outputMetrics);\n }\n return nestedMetrics;\n }\n}\nconst LAYERS_MODEL_FORMAT_NAME = 'layers-model';\n/**\n * A `tf.LayersModel` is a directed, acyclic graph of `tf.Layer`s plus methods\n * for training, evaluation, prediction and saving.\n *\n * `tf.LayersModel` is the basic unit of training, inference and evaluation in\n * TensorFlow.js. To create a `tf.LayersModel`, use `tf.LayersModel`.\n *\n * See also:\n * `tf.Sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class LayersModel extends Container {\n constructor(args) {\n super(args);\n this.isTraining = false;\n }\n /**\n * Print a text summary of the model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - If the model has non-sequential-like topology, the inputs each layer\n * receives\n * - The total number of trainable and non-trainable parameters of the model.\n *\n * ```js\n * const input1 = tf.input({shape: [10]});\n * const input2 = tf.input({shape: [20]});\n * const dense1 = tf.layers.dense({units: 4}).apply(input1);\n * const dense2 = tf.layers.dense({units: 8}).apply(input2);\n * const concat = tf.layers.concatenate().apply([dense1, dense2]);\n * const output =\n * tf.layers.dense({units: 3, activation: 'softmax'}).apply(concat);\n *\n * const model = tf.model({inputs: [input1, input2], outputs: output});\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n throw new ValueError(`This model has never been called, thus its weights have not been ` +\n `created yet. So no summary can be displayed. Build the model ` +\n `first (e.g., by calling it on some test data).`);\n }\n printSummary(this, lineLength, positions, printFn);\n }\n /**\n * Configures and prepares the model for training and evaluation. Compiling\n * outfits the model with an optimizer, loss, and/or metrics. Calling `fit`\n * or `evaluate` on an un-compiled model will throw an error.\n *\n * @param args a `ModelCompileArgs` specifying the loss, optimizer, and\n * metrics to be used for fitting and evaluating this model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n compile(args) {\n if (args.loss == null) {\n args.loss = [];\n }\n this.loss = args.loss;\n if (typeof args.optimizer === 'string') {\n this.optimizer_ = optimizers.getOptimizer(args.optimizer);\n this.isOptimizerOwned = true;\n }\n else {\n if (!(args.optimizer instanceof Optimizer)) {\n throw new ValueError(`User-defined optimizer must be an instance of tf.Optimizer.`);\n }\n this.optimizer_ = args.optimizer;\n this.isOptimizerOwned = false;\n }\n // TODO(cais): Add lossWeights.\n // TODO(cais): Add sampleWeightMode.\n // Prepare loss functions.\n let lossFunctions = [];\n if (!Array.isArray(args.loss) && typeof args.loss !== 'string' &&\n typeof args.loss !== 'function') {\n args.loss = args.loss;\n for (const name in args.loss) {\n if (this.outputNames.indexOf(name) === -1) {\n throw new ValueError(`Unknown entry in loss dictionary: \"${name}\". ` +\n `Only expected the following keys: ${this.outputNames}`);\n }\n }\n for (const name of this.outputNames) {\n if (args.loss[name] == null) {\n console.warn(`Output \"${name}\" is missing from loss dictionary. We assume ` +\n `this was done on purpose, and we will not be expecting data ` +\n `to be passed to ${name} during training`);\n }\n lossFunctions.push(losses.get(args.loss[name]));\n }\n }\n else if (Array.isArray(args.loss)) {\n if (args.loss.length !== this.outputs.length) {\n throw new ValueError(`When passing an Array as loss, it should have one entry per ` +\n `model output. The model has ${this.outputs.length} output(s), ` +\n `but you passed loss=${args.loss}.`);\n }\n const theLosses = args.loss;\n lossFunctions = theLosses.map(l => losses.get(l));\n }\n else {\n const lossFunction = losses.get(args.loss);\n this.outputs.forEach(_ => {\n lossFunctions.push(lossFunction);\n });\n }\n this.lossFunctions = lossFunctions;\n this.feedOutputNames = [];\n this.feedOutputShapes = [];\n this.feedLossFns = [];\n for (let i = 0; i < this.outputs.length; ++i) {\n // TODO(cais): Logic for skipping target(s).\n const shape = this.internalOutputShapes[i];\n const name = this.outputNames[i];\n this.feedOutputNames.push(name);\n this.feedOutputShapes.push(shape);\n this.feedLossFns.push(this.lossFunctions[i]);\n }\n // TODO(cais): Add logic for output masks.\n // TODO(cais): Add logic for sample weights.\n const skipTargetIndices = [];\n // Prepare metrics.\n this.metrics = args.metrics;\n // TODO(cais): Add weightedMetrics.\n this.metricsNames = ['loss'];\n this.metricsTensors = [];\n // Compute total loss.\n // Porting Note: In PyKeras, metrics_tensors are symbolic tensor objects.\n // Here, metricsTensors are TypeScript functions. This difference is due\n // to the difference in symbolic/imperative property of the backends.\n nameScope('loss', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n // TODO(cais): Add weightedLoss, sampleWeight and mask.\n // The following line should be weightedLoss\n const weightedLoss = this.lossFunctions[i];\n if (this.outputs.length > 1) {\n this.metricsTensors.push([weightedLoss, i]);\n this.metricsNames.push(this.outputNames[i] + '_loss');\n }\n }\n // Porting Note: Due to the imperative nature of the backend, we calculate\n // the regularizer penalties in the totalLossFunction, instead of here.\n });\n const nestedMetrics = collectMetrics(args.metrics, this.outputNames);\n // TODO(cais): Add nestedWeightedMetrics.\n /**\n * Helper function used in loop below.\n */\n const appendMetric = (outputIndex, metricName, metricTensor) => {\n if (this.outputNames.length > 1) {\n metricName = this.outputNames[outputIndex] + '_' + metricName;\n }\n this.metricsNames.push(metricName);\n this.metricsTensors.push([metricTensor, outputIndex]);\n };\n nameScope('metric', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n const outputMetrics = nestedMetrics[i];\n // TODO(cais): Add weights and outputWeightedMetrics.\n // TODO(cais): Add optional arg `weights` to the following function.\n const handleMetrics = (metrics) => {\n const metricNamePrefix = '';\n let metricName;\n let accFn;\n let weightedMetricFn;\n // TODO(cais): Use 'weights_' for weighted metrics.\n for (const metric of metrics) {\n if (typeof metric === 'string' &&\n ['accuracy', 'acc', 'crossentropy', 'ce'].indexOf(metric) !==\n -1) {\n const outputShape = this.internalOutputShapes[i];\n if (outputShape[outputShape.length - 1] === 1 ||\n this.lossFunctions[i] === losses.binaryCrossentropy) {\n // case: binary accuracy/crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryCrossentropy;\n }\n }\n else if (this.lossFunctions[i] ===\n losses.sparseCategoricalCrossentropy) {\n // case: categorical accuracy / crossentropy with sparse\n // targets.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalCrossentropy;\n }\n }\n else {\n // case: categorical accuracy / crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalAccuracy;\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalCrossentropy;\n }\n }\n let suffix;\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n suffix = 'acc';\n }\n else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n suffix = 'ce';\n }\n // TODO(cais): Add weighting actually.\n weightedMetricFn = accFn;\n metricName = metricNamePrefix + suffix;\n }\n else {\n const metricFn = Metrics.get(metric);\n // TODO(cais): Add weighting actually.\n weightedMetricFn = metricFn;\n metricName =\n metricNamePrefix + Metrics.getLossOrMetricName(metric);\n }\n // TODO(cais): Add weighting and masking to metricResult.\n let metricResult;\n nameScope(metricName, () => {\n metricResult = weightedMetricFn;\n });\n appendMetric(i, metricName, metricResult);\n }\n };\n handleMetrics(outputMetrics);\n // TODO(cais): Call handleMetrics with weights.\n }\n });\n // Porting Notes: Given the imperative backend of tfjs-core,\n // there is no need for constructing the symbolic graph and placeholders.\n this.collectedTrainableWeights = this.trainableWeights;\n }\n /**\n * Check trainable weights count consistency.\n *\n * This will raise a warning if `this.trainableWeights` and\n * `this.collectedTrainableWeights` are inconsistent (i.e., have different\n * numbers of parameters).\n * Inconsistency will typically arise when one modifies `model.trainable`\n * without calling `model.compile()` again.\n */\n checkTrainableWeightsConsistency() {\n if (this.collectedTrainableWeights == null) {\n return;\n }\n if (this.trainableWeights.length !==\n this.collectedTrainableWeights.length) {\n console.warn('Discrepancy between trainableweights and collected trainable ' +\n 'weights. Did you set `model.trainable` without calling ' +\n '`model.compile()` afterwards?');\n }\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(\n * tf.ones([8, 10]), tf.ones([8, 1]), {batchSize: 4});\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateArgs`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n // TODO(cais): Standardize `config.sampleWeights` as well.\n // Validate user data.\n const checkBatchAxis = true;\n const standardizedOuts = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n try {\n // TODO(cais): If uses `useLearningPhase`, set the corresponding element\n // of the input to 0.\n const ins = standardizedOuts[0].concat(standardizedOuts[1]);\n this.makeTestFunction();\n const f = this.testFunction;\n const testOuts = this.testLoop(f, ins, batchSize, args.verbose, args.steps);\n return singletonOrArray(testOuts);\n }\n finally {\n disposeNewTensors(standardizedOuts[0], x);\n disposeNewTensors(standardizedOuts[1], y);\n }\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n this.makeTestFunction();\n return evaluateDataset(this, dataset, args);\n }\n /**\n * Get number of samples provided for training, evaluation or prediction.\n *\n * @param ins Input `tf.Tensor`.\n * @param batchSize Integer batch size, optional.\n * @param steps Total number of steps (batches of samples) before\n * declaring loop finished. Optional.\n * @param stepsName The public API's parameter name for `steps`.\n * @returns Number of samples provided.\n */\n checkNumSamples(ins, batchSize, steps, stepsName = 'steps') {\n let numSamples;\n if (steps != null) {\n numSamples = null;\n if (batchSize != null) {\n throw new ValueError(`If ${stepsName} is set, batchSize must be null or undefined.` +\n `Got batchSize = ${batchSize}`);\n }\n }\n else if (ins != null) {\n if (Array.isArray(ins)) {\n numSamples = ins[0].shape[0];\n }\n else {\n numSamples = ins.shape[0];\n }\n }\n else {\n throw new ValueError(`Either the input data should have a defined shape, or ` +\n `${stepsName} shoud be specified.`);\n }\n return numSamples;\n }\n /**\n * Execute internal tensors of the model with input data feed.\n * @param inputs Input data feed. Must match the inputs of the model.\n * @param outputs Names of the output tensors to be fetched. Must match\n * names of the SymbolicTensors that belong to the graph.\n * @returns Fetched values for `outputs`.\n */\n execute(inputs, outputs) {\n if (Array.isArray(outputs) && outputs.length === 0) {\n throw new ValueError('`outputs` is an empty Array, which is not allowed.');\n }\n const outputsIsArray = Array.isArray(outputs);\n const outputNames = (outputsIsArray ? outputs : [outputs]);\n const outputSymbolicTensors = this.retrieveSymbolicTensors(outputNames);\n // Format the input into a FeedDict.\n const feedDict = new FeedDict();\n if (inputs instanceof Tensor) {\n inputs = [inputs];\n }\n if (Array.isArray(inputs)) {\n if (inputs.length !== this.inputs.length) {\n throw new ValueError(`The number of inputs provided (${inputs.length}) ` +\n `does not match the number of inputs of this model ` +\n `(${this.inputs.length}).`);\n }\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], inputs[i]);\n }\n }\n else {\n for (const input of this.inputs) {\n const tensorValue = inputs[input.name];\n if (tensorValue == null) {\n throw new ValueError(`No value is provided for the model's input ${input.name}`);\n }\n feedDict.add(input, tensorValue);\n }\n }\n // Run execution.\n const executeOutputs = execute(outputSymbolicTensors, feedDict);\n return outputsIsArray ? executeOutputs : executeOutputs[0];\n }\n /**\n * Retrieve the model's internal symbolic tensors from symbolic-tensor names.\n */\n retrieveSymbolicTensors(symbolicTensorNames) {\n const outputSymbolicTensors = pyListRepeat(null, symbolicTensorNames.length);\n let outputsRemaining = symbolicTensorNames.length;\n for (const layer of this.layers) {\n const layerOutputs = Array.isArray(layer.output) ? layer.output : [layer.output];\n const layerOutputNames = layerOutputs.map(output => output.name);\n for (let i = 0; i < symbolicTensorNames.length; ++i) {\n const index = layerOutputNames.indexOf(symbolicTensorNames[i]);\n if (index !== -1) {\n outputSymbolicTensors[i] = layerOutputs[index];\n outputsRemaining--;\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining > 0) {\n const remainingNames = [];\n outputSymbolicTensors.forEach((tensor, i) => {\n if (tensor == null) {\n remainingNames.push(symbolicTensorNames[i]);\n }\n });\n throw new ValueError(`Cannot find SymbolicTensors for output name(s): ` +\n `${JSON.stringify(remainingNames)}`);\n }\n return outputSymbolicTensors;\n }\n /**\n * Helper method to loop over some data in batches.\n *\n * Porting Note: Not using the functional approach in the Python equivalent\n * due to the imperative backend.\n * Porting Note: Does not support step mode currently.\n *\n * @param ins: input data\n * @param batchSize: integer batch size.\n * @param verbose: verbosity model\n * @returns: Predictions as `tf.Tensor` (if a single output) or an `Array` of\n * `tf.Tensor` (if multipe outputs).\n */\n predictLoop(ins, batchSize = 32, verbose = false) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins);\n if (verbose) {\n throw new NotImplementedError('Verbose predictLoop() is not implemented yet.');\n }\n // Sample-based predictions.\n // Porting Note: Tensor currently does not support sliced assignments as\n // in numpy, e.g., x[1:3] = y. Therefore we use concatenation while\n // iterating over the batches.\n const batches = makeBatches(numSamples, batchSize);\n const outsBatches = this.outputs.map(output => []);\n // TODO(cais): Can the scope() be pushed down inside the for loop?\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchOuts = tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n // TODO(cais): Take care of the case of the last element is a flag for\n // training/test.\n const insBatch = sliceArrays(ins, batchStart, batchEnd);\n // Construct the feeds for execute();\n const feeds = [];\n if (Array.isArray(insBatch)) {\n for (let i = 0; i < insBatch.length; ++i) {\n feeds.push({ key: this.inputs[i], value: insBatch[i] });\n }\n }\n else {\n feeds.push({ key: this.inputs[0], value: insBatch });\n }\n const feedDict = new FeedDict(feeds);\n return execute(this.outputs, feedDict);\n });\n batchOuts.forEach((batchOut, i) => outsBatches[i].push(batchOut));\n }\n return singletonOrArray(outsBatches.map(batches => tfc.concat(batches, 0)));\n });\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFlow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([8, 10]), {batchSize: 4}).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param args A `ModelPredictArgs` object containing optional fields.\n *\n * @return Prediction results as a `tf.Tensor`(s).\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n const xsRank2OrHigher = ensureTensorsRank2OrHigher(x);\n checkInputData(xsRank2OrHigher, this.inputNames, this.feedInputShapes, false);\n try {\n // TODO(cais): Take care of stateful models.\n // if (this.stateful) ...\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = args.batchSize == null ? 32 : args.batchSize;\n checkBatchSize(batchSize);\n return this.predictLoop(xsRank2OrHigher, batchSize);\n }\n finally {\n disposeNewTensors(xsRank2OrHigher, x);\n }\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predictOnBatch(tf.ones([8, 10])).print();\n * ```\n * @param x: Input samples, as a Tensor (for models with exactly one\n * input) or an array of Tensors (for models with more than one input).\n * @return Tensor(s) of predictions\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predictOnBatch(x) {\n checkInputData(x, this.inputNames, this.feedInputShapes, true);\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = (Array.isArray(x) ? x[0] : x).shape[0];\n return this.predictLoop(x, batchSize);\n }\n standardizeUserDataXY(x, y, checkBatchAxis = true, batchSize) {\n // TODO(cais): Add sampleWeight, classWeight\n if (this.optimizer_ == null) {\n throw new RuntimeError('You must compile a model before training/testing. Use ' +\n 'LayersModel.compile(modelCompileArgs).');\n }\n const outputShapes = [];\n for (let i = 0; i < this.feedOutputShapes.length; ++i) {\n const outputShape = this.feedOutputShapes[i];\n const lossFn = this.feedLossFns[i];\n if (lossFn === losses.sparseCategoricalCrossentropy) {\n outputShapes.push(outputShape.slice(0, outputShape.length - 1).concat([1]));\n }\n else {\n // Porting Note: Because of strong typing `lossFn` must be a function.\n outputShapes.push(outputShape);\n }\n }\n x = standardizeInputData(x, this.feedInputNames, this.feedInputShapes, false, 'input');\n y = standardizeInputData(y, this.feedOutputNames, outputShapes, false, 'target');\n // TODO(cais): Standardize sampleWeights & classWeights.\n checkArrayLengths(x, y, null);\n // TODO(cais): Check sampleWeights as well.\n checkLossAndTargetCompatibility(y, this.feedLossFns, this.feedOutputShapes);\n if (this.stateful && batchSize != null && batchSize > 0) {\n if (x[0].shape[0] % batchSize !== 0) {\n throw new ValueError(`In a stateful network, you should only pass inputs with a ` +\n `number of samples that is divisible by the batch size ` +\n `${batchSize}. Found: ${x[0].shape[0]} sample(s).`);\n }\n }\n return [x, y];\n }\n async standardizeUserData(x, y, sampleWeight, classWeight, checkBatchAxis = true, batchSize) {\n const [standardXs, standardYs] = this.standardizeUserDataXY(x, y, checkBatchAxis, batchSize);\n // TODO(cais): Handle sampleWeights.\n if (sampleWeight != null) {\n throw new Error('sample weight is not supported yet.');\n }\n let standardSampleWeights = null;\n if (classWeight != null) {\n const classWeights = standardizeClassWeights(classWeight, this.outputNames);\n standardSampleWeights = [];\n for (let i = 0; i < classWeights.length; ++i) {\n standardSampleWeights.push(await standardizeWeights(standardYs[i], null, classWeights[i]));\n }\n }\n // TODO(cais): Deal with the case of model.stateful == true.\n return [standardXs, standardYs, standardSampleWeights];\n }\n /**\n * Loop over some test data in batches.\n * @param f A Function returning a list of tensors.\n * @param ins Array of tensors to be fed to `f`.\n * @param batchSize Integer batch size or `null` / `undefined`.\n * @param verbose verbosity mode.\n * @param steps Total number of steps (batches of samples) before\n * declaring test finished. Ignored with the default value of `null` /\n * `undefined`.\n * @returns Array of Scalars.\n */\n testLoop(f, ins, batchSize, verbose = 0, steps) {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins, batchSize, steps, 'steps');\n const outs = [];\n if (verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n // TODO(cais): Use `indicesForConversionToDense' to prevent slow down.\n if (steps != null) {\n throw new NotImplementedError('steps mode in testLoop() is not implemented yet');\n }\n else {\n const batches = makeBatches(numSamples, batchSize);\n const indexArray = tensor1d(range(0, numSamples));\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = K.sliceAlongFirstAxis(indexArray, batchStart, batchEnd - batchStart);\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds);\n const batchOuts = f(insBatch);\n if (batchIndex === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(scalar(0));\n }\n }\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n outs[i] =\n tfc.add(outs[i], tfc.mul(batchEnd - batchStart, batchOut));\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n outs[i] = tfc.div(outs[i], numSamples);\n }\n }\n return outs;\n });\n }\n getDedupedMetricsNames() {\n const outLabels = this.metricsNames;\n // Rename duplicated metrics names (can happen with an output layer\n // shared among multiple dataflows).\n const dedupedOutLabels = [];\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n let newLabel = label;\n if (count(outLabels, label) > 1) {\n const dupIndex = count(outLabels.slice(0, i), label);\n newLabel += `_${dupIndex}`;\n }\n dedupedOutLabels.push(newLabel);\n }\n return dedupedOutLabels;\n }\n /**\n * Creates a function that performs the following actions:\n *\n * 1. computes the losses\n * 2. sums them to get the total loss\n * 3. call the optimizer computes the gradients of the LayersModel's\n * trainable weights w.r.t. the total loss and update the variables\n * 4. calculates the metrics\n * 5. returns the values of the losses and metrics.\n */\n makeTrainFunction() {\n return (data) => {\n const lossValues = [];\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const sampleWeights = data.slice(this.inputs.length + this.outputs.length, this.inputs.length + this.outputs.length * 2);\n const metricsValues = [];\n // Create a function that computes the total loss based on the\n // inputs. This function is used for obtaining gradients through\n // backprop.\n const totalLossFunction = () => {\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict, { 'training': true });\n // TODO(cais): Take care of the case of multiple outputs from a\n // single layer?\n let totalLoss;\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n let loss = lossFunction(targets[i], outputs[i]);\n if (sampleWeights[i] != null) {\n loss = computeWeightedLoss(loss, sampleWeights[i]);\n }\n // TODO(cais): push Scalar instead.\n const meanLoss = tfc.mean(loss);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n lossValues.push(meanLoss);\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n }\n // Compute the metrics.\n // TODO(cais): These should probably be calculated outside\n // totalLossFunction to benefit speed?\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n let weightedMetric;\n if (this.outputs.length > 1 && i < this.outputs.length) {\n weightedMetric = lossValues[i];\n }\n else {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n weightedMetric =\n tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n }\n tfc.keep(weightedMetric);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n metricsValues.push(weightedMetric);\n }\n totalLoss = tfc.mean(totalLoss);\n // Add regularizer penalties.\n this.calculateLosses().forEach(regularizerLoss => {\n totalLoss = tfc.add(totalLoss, regularizerLoss);\n });\n return totalLoss;\n };\n const variables = this.collectedTrainableWeights.map(param => param.read());\n const returnCost = true;\n const totalLossValue = this.optimizer_.minimize(totalLossFunction, returnCost, variables);\n return [totalLossValue].concat(metricsValues);\n };\n }\n /**\n * Create a function which, when invoked with an array of `tf.Tensor`s as a\n * batch of inputs, returns the prespecified loss and metrics of the model\n * under the batch of input data.\n */\n makeTestFunction() {\n this.testFunction = (data) => {\n return tfc.tidy(() => {\n const valOutputs = [];\n let totalLoss;\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(this.inputs.length, this.inputs.length + this.outputs.length);\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({ key: this.inputs[i], value: inputs[i] });\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict);\n // Compute total loss.\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n // TODO(cais): Add sample weighting and replace the simple\n // averaging.\n const loss = tfc.mean(lossFunction(targets[i], outputs[i]));\n if (i === 0) {\n totalLoss = loss;\n }\n else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n valOutputs.push(totalLoss);\n }\n // Compute the metrics.\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n // TODO(cais): Replace K.mean() with a proper weighting function.\n const meanMetric = tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n valOutputs.push(meanMetric);\n }\n return valOutputs;\n });\n };\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a\n * dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * for (let i = 1; i < 5 ; ++i) {\n * const h = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(\"Loss after Epoch \" + i + \" : \" + h.history.loss[0]);\n * }\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you\n * can also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named,\n * you can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input\n * data and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n return fitTensors(this, x, y, args);\n }\n // TODO(cais): Add code snippet below when it's possible to instantiate\n // actual dataset objects.\n /**\n * Trains the model using a dataset object.\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for training. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs.\n * Of the two items in the array, the first is the input feature(s) and\n * the second is the output target(s).\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fitDataset(dataset, args) {\n return fitDataset(this, dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n // TODO(cais): Support sampleWeight and classWeight.\n // TODO(cais): Support Dataset objects.\n const standardizeOut = await this.standardizeUserData(x, y);\n const inputs = standardizeOut[0];\n const targets = standardizeOut[1];\n const trainFunction = this.makeTrainFunction();\n const losses = trainFunction(inputs.concat(targets));\n const lossValues = [];\n for (const loss of losses) {\n const v = await loss.data();\n lossValues.push(v[0]);\n }\n tfc.dispose(losses);\n return singletonOrArray(lossValues);\n }\n /**\n * Extract weight values of the model.\n *\n * @param config: An instance of `io.SaveConfig`, which specifies\n * model-saving options such as whether only trainable weights are to be\n * saved.\n * @returns A `NamedTensorMap` mapping original weight names (i.e.,\n * non-uniqueified weight names) to their values.\n */\n getNamedWeights(config) {\n const namedWeights = [];\n const trainableOnly = config != null && config.trainableOnly;\n const weights = trainableOnly ? this.trainableWeights : this.weights;\n const weightValues = this.getWeights(trainableOnly);\n for (let i = 0; i < weights.length; ++i) {\n if (trainableOnly && !weights[i].trainable) {\n // Optionally skip non-trainable weights.\n continue;\n }\n namedWeights.push({ name: weights[i].originalName, tensor: weightValues[i] });\n }\n return namedWeights;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const input = tf.input({shape: [10]});\n * const output = tf.layers.dense({units: 1}).apply(input);\n * const model = tf.model({inputs: [input], outputs: [output]});\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10\n * values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n this.stopTraining_ = stop;\n }\n get stopTraining() {\n return this.stopTraining_;\n }\n get optimizer() {\n return this.optimizer_;\n }\n set optimizer(optimizer) {\n if (this.optimizer_ !== optimizer) {\n this.optimizer_ = optimizer;\n this.isOptimizerOwned = false;\n }\n }\n dispose() {\n const result = super.dispose();\n if (result.refCountAfterDispose === 0 && this.optimizer != null &&\n this.isOptimizerOwned) {\n const numTensorsBeforeOptmizerDisposal = tfc.memory().numTensors;\n this.optimizer_.dispose();\n result.numDisposedVariables +=\n numTensorsBeforeOptmizerDisposal - tfc.memory().numTensors;\n }\n return result;\n }\n getLossIdentifiers() {\n let lossNames;\n if (typeof this.loss === 'string') {\n lossNames = toSnakeCase(this.loss);\n }\n else if (Array.isArray(this.loss)) {\n for (const loss of this.loss) {\n if (typeof loss !== 'string') {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n lossNames = this.loss.map(name => toSnakeCase(name));\n }\n else {\n const outputNames = Object.keys(this.loss);\n lossNames = {};\n const losses = this.loss;\n for (const outputName of outputNames) {\n if (typeof losses[outputName] === 'string') {\n lossNames[outputName] =\n toSnakeCase(losses[outputName]);\n }\n else {\n throw new Error('Serialization of non-string loss is not supported.');\n }\n }\n }\n return lossNames;\n }\n getMetricIdentifiers() {\n if (typeof this.metrics === 'string' ||\n typeof this.metrics === 'function') {\n return [toSnakeCase(Metrics.getLossOrMetricName(this.metrics))];\n }\n else if (Array.isArray(this.metrics)) {\n return this.metrics.map(metric => toSnakeCase(Metrics.getLossOrMetricName(metric)));\n }\n else {\n const metricsIdentifiers = {};\n for (const key in this.metrics) {\n metricsIdentifiers[key] =\n toSnakeCase(Metrics.getLossOrMetricName(this.metrics[key]));\n }\n return metricsIdentifiers;\n }\n }\n getTrainingConfig() {\n return {\n loss: this.getLossIdentifiers(),\n metrics: this.getMetricIdentifiers(),\n optimizer_config: {\n class_name: this.optimizer.getClassName(),\n config: this.optimizer.getConfig()\n }\n };\n // TODO(cais): Add weight_metrics when they are supported.\n // TODO(cais): Add sample_weight_mode when it's supported.\n // TODO(cais): Add loss_weights when it's supported.\n }\n loadTrainingConfig(trainingConfig) {\n if (trainingConfig.weighted_metrics != null) {\n throw new Error('Loading weight_metrics is not supported yet.');\n }\n if (trainingConfig.loss_weights != null) {\n throw new Error('Loading loss_weights is not supported yet.');\n }\n if (trainingConfig.sample_weight_mode != null) {\n throw new Error('Loading sample_weight_mode is not supported yet.');\n }\n const tsConfig = convertPythonicToTs(trainingConfig.optimizer_config);\n const optimizer = deserialize(tsConfig);\n let loss;\n if (typeof trainingConfig.loss === 'string') {\n loss = toCamelCase(trainingConfig.loss);\n }\n else if (Array.isArray(trainingConfig.loss)) {\n loss = trainingConfig.loss.map(lossEntry => toCamelCase(lossEntry));\n }\n else if (trainingConfig.loss != null) {\n loss = {};\n for (const key in trainingConfig.loss) {\n loss[key] = toCamelCase(trainingConfig.loss[key]);\n }\n }\n let metrics;\n if (Array.isArray(trainingConfig.metrics)) {\n metrics = trainingConfig.metrics.map(metric => toCamelCase(metric));\n }\n else if (trainingConfig.metrics != null) {\n metrics = {};\n for (const key in trainingConfig.metrics) {\n metrics[key] = toCamelCase(trainingConfig.metrics[key]);\n }\n }\n this.compile({ loss, metrics, optimizer });\n }\n /**\n * Save the configuration and/or weights of the LayersModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights as two files\n * (`my-model-1.json` and `my-model-1.weights.bin`) downloaded from\n * browser.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('downloads://my-model-1');\n * ```\n *\n * Example 4. Send `model`'s topology and weights to an HTTP server.\n * See the documentation of `tf.io.http` for more details\n * including specifying request parameters and implementation of the\n * server.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('http://my-server/model/upload');\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new ValueError(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new ValueError('LayersModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n const weightDataAndSpecs = await io.encodeWeights(this.getNamedWeights(config));\n const returnString = false;\n const unusedArg = null;\n const modelConfig = this.toJSON(unusedArg, returnString);\n const modelArtifacts = {\n modelTopology: modelConfig,\n format: LAYERS_MODEL_FORMAT_NAME,\n generatedBy: `TensorFlow.js tfjs-layers v${version}`,\n convertedBy: null,\n };\n const includeOptimizer = config == null ? false : config.includeOptimizer;\n if (includeOptimizer && this.optimizer != null) {\n modelArtifacts.trainingConfig = this.getTrainingConfig();\n const weightType = 'optimizer';\n const { data: optimizerWeightData, specs: optimizerWeightSpecs } = await io.encodeWeights(await this.optimizer.getWeights(), weightType);\n weightDataAndSpecs.specs.push(...optimizerWeightSpecs);\n weightDataAndSpecs.data = io.concatenateArrayBuffers([weightDataAndSpecs.data, optimizerWeightData]);\n }\n if (this.userDefinedMetadata != null) {\n // Check serialized size of user-defined metadata.\n const checkSize = true;\n checkUserDefinedMetadata(this.userDefinedMetadata, this.name, checkSize);\n modelArtifacts.userDefinedMetadata = this.userDefinedMetadata;\n }\n modelArtifacts.weightData = weightDataAndSpecs.data;\n modelArtifacts.weightSpecs = weightDataAndSpecs.specs;\n return handlerOrURL.save(modelArtifacts);\n }\n /**\n * Set user-defined metadata.\n *\n * The set metadata will be serialized together with the topology\n * and weights of the model during `save()` calls.\n *\n * @param setUserDefinedMetadata\n */\n setUserDefinedMetadata(userDefinedMetadata) {\n checkUserDefinedMetadata(userDefinedMetadata, this.name);\n this.userDefinedMetadata = userDefinedMetadata;\n }\n /**\n * Get user-defined metadata.\n *\n * The metadata is supplied via one of the two routes:\n * 1. By calling `setUserDefinedMetadata()`.\n * 2. Loaded during model loading (if the model is constructed\n * via `tf.loadLayersModel()`.)\n *\n * If no user-defined metadata is available from either of the\n * two routes, this function will return `undefined`.\n */\n getUserDefinedMetadata() {\n return this.userDefinedMetadata;\n }\n}\n// The class name is 'Model' rather than 'LayersModel' for backwards\n// compatibility since this class name shows up in the serialization format.\n/** @nocollapse */\nLayersModel.className = 'Model';\nserialization.registerClass(LayersModel);\n/**\n * A `tf.Functional` is an alias to `tf.LayersModel`.\n *\n * See also:\n * `tf.LayersModel`, `tf.Sequential`, `tf.loadLayersModel`.\n */\n/** @doc {heading: 'Models', subheading: 'Classes'} */\nexport class Functional extends LayersModel {\n}\nFunctional.className = 'Functional';\nserialization.registerClass(Functional);\n//# sourceMappingURL=training.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source keras/models.py */\nimport { dispose, io, serialization, util } from '@tensorflow/tfjs-core';\nimport { getUid } from './backend/state';\nimport { Input } from './engine/input_layer';\nimport { getSourceInputs, Node } from './engine/topology';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError, RuntimeError, ValueError } from './errors';\nimport { deserialize } from './layers/serialization';\nimport * as generic_utils from './utils/generic_utils';\nimport { convertPythonicToTs } from './utils/serialization_utils';\nimport { getExactlyOneShape } from './utils/types_utils';\n/**\n * Parses a JSON model configuration file and returns a model instance.\n *\n * ```js\n * // This example shows how to serialize a model using `toJSON()` and\n * // deserialize it as another model using `tf.models.modelFromJSON()`.\n * // Note: this example serializes and deserializes only the topology\n * // of the model; the weights of the loaded model will be different\n * // from those of the the original model, due to random weight\n * // initialization.\n * // To load the topology and weights of a model, use `tf.loadLayersModel()`.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.repeatVector({inputShape: [2], n: 4}));\n * // Serialize `model1` as a JSON object.\n * const model1JSON = model1.toJSON(null, false);\n * model1.summary();\n *\n * const model2 = await tf.models.modelFromJSON(model1JSON);\n * model2.summary();\n * ```\n *\n * @param modelAndWeightsConfig JSON object or string encoding a model and\n * weights configuration. It can also be only the topology JSON of the\n * model, in which case the weights will not be loaded.\n * @param custom_objects Optional dictionary mapping names\n * (strings) to custom classes or functions to be\n * considered during deserialization.\n * @returns A TensorFlow.js Layers `tf.LayersModel` instance (uncompiled).\n */\nexport async function modelFromJSON(modelAndWeightsConfig, customObjects) {\n if (!('modelTopology' in modelAndWeightsConfig)) {\n modelAndWeightsConfig = { modelTopology: modelAndWeightsConfig };\n }\n modelAndWeightsConfig = modelAndWeightsConfig;\n let modelTopology = modelAndWeightsConfig.modelTopology;\n if (modelTopology['model_config'] != null) {\n // If the model-topology JSON contains a 'model_config' field, then it is\n // a full model JSON (e.g., from `keras.Model.save()`), which contains\n // not only the model's architecture in its 'model_config' field, but\n // additional information such as the model's optimizer. We use only the\n // 'model_config' field currently.\n modelTopology = modelTopology['model_config'];\n }\n const tsConfig = convertPythonicToTs(modelTopology);\n const model = deserialize(tsConfig, customObjects);\n if (modelAndWeightsConfig.weightsManifest != null) {\n // Load the weight values keyed by the original tensor names in the model\n // file that was loaded. These should match the keys of the weight\n // manifest.\n const weightValues = await io.loadWeights(modelAndWeightsConfig.weightsManifest, modelAndWeightsConfig.pathPrefix, model.weights.map(weight => weight.originalName));\n // Map the weights to the unique tensor names generated during model loading\n const uniqueWeightValues = {};\n for (const weight of model.weights) {\n uniqueWeightValues[weight.originalName] =\n weightValues[weight.originalName];\n }\n model.loadWeights(uniqueWeightValues);\n // Dispose temporary weight values.\n dispose(weightValues);\n }\n return model;\n}\n/**\n * Load a model, including its topology and optionally weights. See the\n * Tutorial named \"How to import a Keras Model\" for usage examples.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * Example 4. Load a model from an HTTP server.\n *\n * ```js\n * const model = await\n * tf.loadLayersModel('https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. This path will be\n * interpreted as a relative HTTP path, to which `fetch` will be used to\n * request the model topology and weight manifest JSON.\n * The content of the JSON file is assumed to be a JSON object with the\n * following fields and values:\n * - 'modelTopology': A JSON object that can be either of:\n * 1. a model architecture JSON consistent with the format of the return\n * value of `keras.Model.to_json()`\n * 2. a full model JSON in the format of `keras.models.save_model()`.\n * - 'weightsManifest': A TensorFlow.js weights manifest.\n * See the Python converter function `save_model()` for more details.\n * It is also assumed that model weights can be accessed from relative\n * paths described by the `paths` fields in weights manifest.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A progress callback of the form:\n * `(fraction: number) => void`. This callback can be used to monitor the\n * model-loading process.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n */\nexport async function loadLayersModelInternal(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n if (typeof pathOrIOHandler === 'string') {\n const handlers = io.getLoadHandlers(pathOrIOHandler, options);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n // TODO(cais): Reformat the args into a single `LoadOptions` once the core\n // is refactored.\n handlers.push(io.browserHTTPRequest(pathOrIOHandler, options));\n }\n else if (handlers.length > 1) {\n throw new ValueError(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${pathOrIOHandler}'`);\n }\n pathOrIOHandler = handlers[0];\n }\n return loadLayersModelFromIOHandler(pathOrIOHandler, undefined, options);\n}\n/**\n * Load a model and optionally its weights, using an IOHandler object.\n *\n * @param handler The instance of `IOHandler` to be used during the model\n * loading.\n * @param customObjects Any optional custom objects to be used during model\n * loading.\n * @param strict Whether the weight loading will be done in strict mode.\n * Default: `true`.\n */\nexport async function loadLayersModelFromIOHandler(handler, customObjects, options) {\n if (options == null) {\n options = {};\n }\n if (handler.load == null) {\n throw new ValueError('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await handler.load();\n let modelTopology = artifacts.modelTopology;\n if (modelTopology['model_config'] != null) {\n modelTopology = modelTopology['model_config'];\n }\n const strict = options.strict == null ? true : options.strict;\n // If weights are provided and the weight-loading mode is strict, use\n // fast weight initialization. This skips costly initializers such as\n // 'orthogonal' and saves unnecessary computation in cases where\n // the initialized weight values will immediately be overwritten by\n // loaded weight values.\n const fastWeightInit = artifacts.weightData != null && artifacts.weightSpecs != null && strict;\n const model = deserialize(convertPythonicToTs(modelTopology), customObjects, fastWeightInit);\n const trainingConfig = artifacts.trainingConfig;\n if (trainingConfig != null) {\n model.loadTrainingConfig(trainingConfig);\n }\n if (artifacts.userDefinedMetadata != null) {\n model.setUserDefinedMetadata(artifacts.userDefinedMetadata);\n }\n // If weightData is present, load the weights into the model.\n if (artifacts.weightData != null) {\n // Loading weights requires weightSpecs.\n if (artifacts.weightSpecs == null) {\n throw new ValueError('LayersModel artifacts contains weight data, but not weight specs. ' +\n 'Therefore loading of weights cannot proceed.');\n }\n const { modelWeights, optimizerWeights } = decodeModelAndOptimizerWeights(artifacts.weightData, artifacts.weightSpecs);\n model.loadWeights(modelWeights, strict);\n if (model.optimizer != null && optimizerWeights.length > 0) {\n await model.optimizer.setWeights(optimizerWeights);\n }\n // Dispose temporary weight values.\n dispose(modelWeights);\n dispose(optimizerWeights.map(w => w.tensor));\n }\n return model;\n}\nfunction decodeModelAndOptimizerWeights(buffer, specs) {\n const name2Tensor = io.decodeWeights(buffer, specs);\n const modelWeights = {};\n const optimizerWeights = [];\n specs.forEach(spec => {\n if (spec.group === 'optimizer') {\n optimizerWeights.push({ name: spec.name, tensor: name2Tensor[spec.name] });\n }\n else {\n modelWeights[spec.name] = name2Tensor[spec.name];\n }\n });\n return { modelWeights, optimizerWeights };\n}\n/**\n * A model with a stack of layers, feeding linearly from one to the next.\n *\n * `tf.sequential` is a factory function that creates an instance of\n * `tf.Sequential`.\n *\n * ```js\n * // Define a model for linear regression.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [1]}));\n *\n * // Prepare the model for training: Specify the loss and the optimizer.\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n *\n * // Generate some synthetic data for training.\n * const xs = tf.tensor2d([1, 2, 3, 4], [4, 1]);\n * const ys = tf.tensor2d([1, 3, 5, 7], [4, 1]);\n *\n * // Train the model using the data then do inference on a data point the\n * // model hasn't seen:\n * await model.fit(xs, ys);\n * model.predict(tf.tensor2d([5], [1, 1])).print();\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class Sequential extends LayersModel {\n constructor(args) {\n super({ inputs: [], outputs: [] });\n args = args || {};\n this.trainable = true;\n this.built = false;\n // Set model name.\n this.name = (args.name != null) ? args.name : getUid('sequential_');\n // Add to the model any layers passed to the constructor.\n if (args.layers != null) {\n for (const layer of args.layers) {\n this.add(layer);\n }\n }\n }\n // Helper function to Sequential.add Throws if the new output shape will be\n // invalid.\n checkShape(layer) {\n const shape = layer.inboundNodes[0].outputTensors[0].shape;\n if (shape.some(x => x < 0)) {\n throw new ValueError('Negative dimension size caused by adding layer ' +\n `${layer.name} with input shape [` +\n `${layer.inboundNodes[0].inputTensors[0].shape}]`);\n }\n }\n /**\n * Adds a layer instance on top of the layer stack.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 8, inputShape: [1]}));\n * model.add(tf.layers.dense({units: 4, activation: 'relu6'}));\n * model.add(tf.layers.dense({units: 1, activation: 'relu6'}));\n * // Note that the untrained model is random at this point.\n * model.predict(tf.randomNormal([10, 1])).print();\n * ```\n * @param layer Layer instance.\n *\n * @exception ValueError In case the `layer` argument does not know its\n * input shape.\n * @exception ValueError In case the `layer` argument has multiple output\n * tensors, or is already connected somewhere else (forbidden in\n * `Sequential` models).\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n add(layer) {\n const isLayerModelInstance = layer instanceof Sequential || layer instanceof LayersModel;\n let modelLayer;\n if (isLayerModelInstance) {\n modelLayer = layer;\n if (modelLayer.outputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n if (modelLayer.inputs.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single input tensor. ' +\n 'For multi-input layers, ' +\n 'use the functional API.');\n }\n }\n if (this.outputs.length === 0) {\n // first layer in model: check that it is an input layer\n if (layer.inboundNodes.length === 0) {\n // create an input layer\n if (layer.batchInputShape == null) {\n throw new ValueError('The first layer in a Sequential model must ' +\n 'get an `inputShape` or `batchInputShape` argument.');\n }\n // Instantiate the input layer.\n const x = Input({\n batchShape: layer.batchInputShape,\n dtype: layer.dtype,\n name: layer.name + '_input'\n });\n // This will build the current layer and create the node connecting\n // the current layer to the input layer we just created.\n layer.apply(x);\n }\n if (isLayerModelInstance) {\n this.outputs = modelLayer.outputs;\n this.inputs = modelLayer.inputs;\n }\n else {\n if (layer.inboundNodes.length !== 1) {\n throw new ValueError('A layer added to a Sequential model must not already be ' +\n `connected somewhere else. LayersModel received layer ${layer.name} ` +\n `which has ${layer.inboundNodes.length} pre-existing inbound ` +\n 'connections.');\n }\n if (layer.inboundNodes[0].outputTensors.length !== 1) {\n throw new ValueError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [layer.inboundNodes[0].outputTensors[0]];\n this.inputs = getSourceInputs(this.outputs[0]);\n }\n this.inboundNodes = [];\n // We create an input node, which we will keep updated\n // as we add more layers.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n // no model-level masking for now\n inputMasks: generic_utils.pyListRepeat(null, this.inputs.length),\n outputMasks: [null],\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs[0].shape\n });\n }\n else {\n const outputTensor = layer.apply(this.outputs[0]);\n if (Array.isArray(outputTensor)) {\n throw new TypeError('All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [outputTensor];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n this.layers.push(layer);\n this.built = false;\n }\n /**\n * Removes the last layer in the model.\n *\n * @exception TypeError if there are no layers in the model.\n */\n pop() {\n if (this.layers.length === 0) {\n throw new TypeError('There are no layers in the model.');\n }\n this.layers.pop();\n if (this.layers.length === 0) {\n this.outputs = [];\n this.inboundNodes = [];\n this.outboundNodes = [];\n }\n else {\n const lastLayerIndex = this.layers.length - 1;\n this.layers[lastLayerIndex].outboundNodes = [];\n this.outputs = [this.layers[lastLayerIndex].output];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n }\n call(inputs, kwargs) {\n if (this.model == null) {\n this.build();\n }\n return this.model.call(inputs, kwargs);\n }\n build(inputShape) {\n // Call `getExactlyOneShape` without using its return value,\n // to verify that exactly one input shape is provided.\n getExactlyOneShape(inputShape);\n if (this.inputs.length === 0 || this.outputs.length === 0) {\n throw new TypeError('Sequential model cannot be built: model is empty.' +\n ' Add some layers first.');\n }\n // actually create the model\n this.model = new LayersModel({\n inputs: this.inputs,\n outputs: this.outputs[0],\n name: this.name + '_model'\n });\n this.model.trainable = this.trainable;\n // mirror model attributes\n this.supportsMasking = this.model.supportsMasking;\n // TODO(michaelterry): Add caches\n this.inputLayers = this.model.inputLayers;\n this.inputLayersNodeIndices = this.model.inputLayersNodeIndices;\n this.inputLayersTensorIndices = this.model.inputLayersTensorIndices;\n this.outputLayers = this.model.outputLayers;\n this.outputLayersNodeIndices = this.model.outputLayersNodeIndices;\n this.outputLayersTensorIndices = this.model.outputLayersTensorIndices;\n this.nodesByDepth = this.model.nodesByDepth;\n this.containerNodes = this.model.containerNodes;\n this.outputNames = this.model.outputNames;\n this.inputNames = this.model.inputNames;\n // TODO(michaelterry): Add feedInputNames, feedInputs, if needed.\n // TODO(michaelterry): Add callbackModel if needed.\n this.built = true;\n }\n countParams() {\n if (!this.built) {\n this.build();\n }\n return super.countParams();\n }\n /**\n * Print a text summary of the Sequential model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - The total number of trainable and non-trainable parameters of the\n * model.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 100, inputShape: [10], activation: 'relu'}));\n * model.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n *\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n summary(lineLength, positions, printFn = console.log) {\n if (!this.built) {\n this.build();\n }\n super.summary(lineLength, positions, printFn);\n }\n /**\n * Sets the weights of the model.\n *\n * @param weights Should be a list of Tensors with shapes and types matching\n * the output of `model.getWeights()`.\n */\n setWeights(weights) {\n if (this.model == null) {\n this.build();\n }\n this.model.setWeights(weights);\n }\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * });\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param args A `ModelEvaluateConfig`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n evaluate(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluate(x, y, args);\n }\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param args A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async evaluateDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before being used.');\n }\n return this.model.evaluateDataset(dataset, args);\n }\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([2, 10])).print();\n * ```\n *\n * @param x The input data, as a Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param conifg A `ModelPredictConfig` object containing optional fields.\n *\n * @return `tf.Tensor`(s) of predictions.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(x, args = {}) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predict(x, args);\n }\n /**\n * Returns predictions for a single batch of samples.\n *\n * @param x: Input samples, as a Tensor, or list of Tensors (if the model\n * has multiple inputs).\n * @return Tensor(s) of predictions\n */\n predictOnBatch(x) {\n if (this.model == null) {\n this.build();\n }\n return this.model.predictOnBatch(x);\n }\n /**\n * See `LayersModel.compile`.\n *\n * @param args\n */\n compile(args) {\n this.build();\n this.model.compile(args);\n this.optimizer_ = this.model.optimizer;\n // tslint:disable-next-line:no-any\n this.isOptimizerOwned = this.model.isOptimizerOwned;\n this.loss = this.model.loss;\n this.metrics = this.model.metrics;\n // TODO(cais): Add this.lossWeights, this.sampleWeightMode,\n // this.weightedMetrics, this.targets.\n this.metricsTensors = this.model.metricsTensors;\n this.metricsNames = this.model.metricsNames;\n // TODO(cais): Add sampleWeights.\n }\n get optimizer() {\n return this.model == null ? undefined : this.model.optimizer;\n }\n set optimizer(optimizer) {\n this.model.optimizer = optimizer;\n }\n /**\n * Trains the model for a fixed number of epochs (iterations on a dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(history.history.loss[0]);\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you can\n * also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named, you\n * can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param args A `ModelFitConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and what the model expects.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async fit(x, y, args = {}) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fit(x, y, args);\n }\n /**\n * Trains the model using a dataset object.\n *\n * ```js\n * const xArray = [\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * [1, 1, 1, 1, 1, 1, 1, 1, 1],\n * ];\n * const yArray = [1, 1, 1, 1];\n * // Create a dataset from the JavaScript array.\n * const xDataset = tf.data.array(xArray);\n * const yDataset = tf.data.array(yArray);\n * // Zip combines the `x` and `y` Datasets into a single Dataset, the\n * // iterator of which will return an object containing of two tensors,\n * // corresponding to `x` and `y`. The call to `batch(4)` will bundle\n * // four such samples into a single object, with the same keys now pointing\n * // to tensors that hold 4 examples, organized along the batch dimension.\n * // The call to `shuffle(4)` causes each iteration through the dataset to\n * // happen in a different order. The size of the shuffle window is 4.\n * const xyDataset = tf.data.zip({xs: xDataset, ys: yDataset})\n * .batch(4)\n * .shuffle(4);\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [9]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fitDataset(xyDataset, {\n * epochs: 4,\n * callbacks: {onEpochEnd: (epoch, logs) => console.log(logs.loss)}\n * });\n * ```\n *\n * @param dataset A dataset object. Its `iterator()` method is expected to\n * generate a dataset iterator object, the `next()` method of which is\n * expected to produce data batches for evaluation. The return value of the\n * `next()` call ought to contain a boolean `done` field and a `value`\n * field.\n *\n * The `value` field is expected to be an object of with fields\n * `xs` and `ys`, which point to the feature tensor and the target tensor,\n * respectively. This case is for models with exactly one input and one\n * output (e.g.. a sequential model). For example:\n * ```js\n * {value: {xs: xsTensor, ys: ysTensor}, done: false}\n * ```\n *\n * If the model has multiple inputs, the `xs` field of `value` should\n * be an object mapping input names to their respective feature tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: {\n * input_1: xsTensor1,\n * input_2: xsTensor2\n * },\n * ys: ysTensor\n * },\n * done: false\n * }\n * ```\n * If the model has multiple outputs, the `ys` field of `value` should\n * be an object mapping output names to their respective target tensors.\n * For example:\n * ```js\n * {\n * value: {\n * xs: xsTensor,\n * ys: {\n * output_1: ysTensor1,\n * output_2: ysTensor2\n * },\n * },\n * done: false\n * }\n * ```\n * @param args A `ModelFitDatasetArgs`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async fitDataset(dataset, args) {\n if (!this.built) {\n throw new RuntimeError('The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fitDataset(dataset, args);\n }\n /**\n * Runs a single gradient update on a single batch of data.\n *\n * This method differs from `fit()` and `fitDataset()` in the following\n * regards:\n * - It operates on exactly one batch of data.\n * - It returns only the loss and matric values, instead of\n * returning the batch-by-batch loss and metric values.\n * - It doesn't support fine-grained options such as verbosity and\n * callbacks.\n *\n * @param x Input data. It could be one of the following:\n * - A `tf.Tensor`, or an Array of `tf.Tensor`s (in case the model has\n * multiple inputs).\n * - An Object mapping input names to corresponding `tf.Tensor` (if the\n * model has named inputs).\n * @param y Target darta. It could be either a `tf.Tensor` a multiple\n * `tf.Tensor`s. It should be consistent with `x`.\n * @returns Training loss or losses (in case the model has\n * multiple outputs), along with metrics (if any), as numbers.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async trainOnBatch(x, y) {\n return this.model.trainOnBatch(x, y);\n }\n /* See parent class for JsDoc */\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}, fastWeightInit = false) {\n let configArray;\n let extraModelConfig = {};\n if (config instanceof Array) {\n if (!(config[0].className != null) ||\n config[0]['className'] === 'Merge') {\n throw new ValueError('Legacy serialization format not supported yet.');\n }\n configArray = config;\n }\n else {\n util.assert(config['layers'] != null, () => `When the config data for a Sequential model is not an Array, ` +\n `it must be an Object that contains the 'layers' field.`);\n configArray = config['layers'];\n delete config['layers'];\n extraModelConfig = config;\n }\n const model = new cls(extraModelConfig);\n if (!(model instanceof Sequential)) {\n throw new NotImplementedError(`Sequential.fromConfig called on non-Sequential input: ${model}`);\n }\n for (const conf of configArray) {\n const customObjects = undefined;\n const layer = deserialize(conf, customObjects, fastWeightInit);\n if (fastWeightInit) {\n layer.setFastWeightInitDuringBuild(true);\n }\n model.add(layer);\n }\n return model;\n }\n /**\n * Setter used for force stopping of LayersModel.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [10]}));\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10 values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop) {\n // TODO(cais): When refactoring to remove the composition pattern happens,\n // remove this method overriding.\n if (this.model == null) {\n throw new ValueError('Cannot set the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n this.model.stopTraining = stop;\n }\n get stopTraining() {\n if (this.model == null) {\n throw new ValueError('Cannot get the stopTraining property of a sequential model before ' +\n 'it is compiled.');\n }\n return this.model.stopTraining;\n }\n // TODO(cais): Override get trainableWeights() here\n // tslint:disable-next-line:no-any\n getConfig() {\n // NOTE(cais): We override the return type of getConfig() to `any` here,\n // because the `Sequential` class is a special case among `Container`\n // subtypes in that its getConfig() method returns an Array (not a\n // dict).\n const layers = [];\n for (const layer of this.layers) {\n const dict = {};\n dict['className'] = layer.getClassName();\n dict['config'] = layer.getConfig();\n layers.push(dict);\n }\n return { name: this.name, layers };\n }\n}\n/** @nocollapse */\nSequential.className = 'Sequential';\nserialization.registerClass(Sequential);\n//# sourceMappingURL=models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { CallbackConstructorRegistry } from './base_callbacks';\nimport { Input, } from './engine/input_layer';\nimport { LayersModel } from './engine/training';\nimport { loadLayersModelInternal, Sequential } from './models';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// LayersModel and related factory methods.\n/**\n * A model is a data structure that consists of `Layers` and defines inputs\n * and outputs.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.model` is more generic, supporting an arbitrary graph (without\n * cycles) of layers. `tf.sequential` is less generic and supports only a linear\n * stack of layers.\n *\n * When creating a `tf.LayersModel`, specify its input(s) and output(s). Layers\n * are used to wire input(s) to output(s).\n *\n * For example, the following code snippet defines a model consisting of\n * two `dense` layers, with 10 and 4 units, respectively.\n *\n * ```js\n * // Define input, which has a size of 5 (not including batch dimension).\n * const input = tf.input({shape: [5]});\n *\n * // First dense layer uses relu activation.\n * const denseLayer1 = tf.layers.dense({units: 10, activation: 'relu'});\n * // Second dense layer uses softmax activation.\n * const denseLayer2 = tf.layers.dense({units: 4, activation: 'softmax'});\n *\n * // Obtain the output symbolic tensor by applying the layers on the input.\n * const output = denseLayer2.apply(denseLayer1.apply(input));\n *\n * // Create the model based on the inputs.\n * const model = tf.model({inputs: input, outputs: output});\n *\n * // The model can be used for training, evaluation and prediction.\n * // For example, the following line runs prediction with the model on\n * // some fake data.\n * model.predict(tf.ones([2, 5])).print();\n * ```\n * See also:\n * `tf.sequential`, `tf.loadLayersModel`.\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function model(args) {\n return new LayersModel(args);\n}\n/**\n * Creates a `tf.Sequential` model. A sequential model is any model where the\n * outputs of one layer are the inputs to the next layer, i.e. the model\n * topology is a simple 'stack' of layers, with no branching or skipping.\n *\n * This means that the first layer passed to a `tf.Sequential` model should have\n * a defined input shape. What that means is that it should have received an\n * `inputShape` or `batchInputShape` argument, or for some type of layers\n * (recurrent, Dense...) an `inputDim` argument.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.sequential` is less generic, supporting only a linear stack of layers.\n * `tf.model` is more generic and supports an arbitrary graph (without\n * cycles) of layers.\n *\n * Examples:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have an input shape defined.\n * model.add(tf.layers.dense({units: 32, inputShape: [50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output, which equals\n * // `[null, 4]`. The 1st dimension is the undetermined batch dimension; the\n * // 2nd is the output size of the model's last layer.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * It is also possible to specify a batch size (with potentially undetermined\n * batch dimension, denoted by \"null\") for the first layer using the\n * `batchInputShape` key. The following example is equivalent to the above:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have a defined input shape\n * model.add(tf.layers.dense({units: 32, batchInputShape: [null, 50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * You can also use an `Array` of already-constructed `Layer`s to create\n * a `tf.Sequential` model:\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 32, inputShape: [50]}),\n * tf.layers.dense({units: 4})]\n * });\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Models', subheading: 'Creation'}\n */\nexport function sequential(config) {\n return new Sequential(config);\n}\n/**\n * Load a model composed of Layer objects, including its topology and optionally\n * weights. See the Tutorial named \"How to import a Keras Model\" for usage\n * examples.\n *\n * This method is applicable to:\n *\n * 1. Models created with the `tf.layers.*`, `tf.sequential`, and\n * `tf.model` APIs of TensorFlow.js and later saved with the\n * `tf.LayersModel.save` method.\n * 2. Models converted from Keras or TensorFlow tf.keras using the\n * [tensorflowjs_converter](https://github.com/tensorflow/tfjs/tree/master/tfjs-converter).\n *\n * This mode is *not* applicable to TensorFlow `SavedModel`s or their converted\n * forms. For those models, use `tf.loadGraphModel`.\n *\n * Example 1. Load a model from an HTTP server.\n *\n * ```js\n * const model = await tf.loadLayersModel(\n * 'https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * Example 2: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadLayersModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 4. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadLayersModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. For file://\n * (tfjs-node-only), http:// and https:// schemas, the path can be\n * either absolute or relative.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param options Optional configuration arguments for the model loading,\n * including:\n * - `strict`: Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra\n * weights and missing weights will be silently ignored.\n * - `onProgress`: A function of the signature `(fraction: number) => void',\n * that can be used as the progress callback for the model loading.\n * @returns A `Promise` of `tf.LayersModel`, with the topology and weights\n * loaded.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport function loadLayersModel(pathOrIOHandler, options) {\n if (options == null) {\n options = {};\n }\n return loadLayersModelInternal(pathOrIOHandler, options);\n}\n/**\n * Used to instantiate an input to a model as a `tf.SymbolicTensor`.\n *\n * Users should call the `input` factory function for\n * consistency with other generator functions.\n *\n * Example:\n *\n * ```js\n * // Defines a simple logistic regression model with 32 dimensional input\n * // and 3 dimensional output.\n * const x = tf.input({shape: [32]});\n * const y = tf.layers.dense({units: 3, activation: 'softmax'}).apply(x);\n * const model = tf.model({inputs: x, outputs: y});\n * model.predict(tf.ones([2, 32])).print();\n * ```\n *\n * Note: `input` is only necessary when using `model`. When using\n * `sequential`, specify `inputShape` for the first layer or use `inputLayer`\n * as the first layer.\n *\n * @doc {heading: 'Models', subheading: 'Inputs'}\n */\nexport function input(config) {\n return Input(config);\n}\nexport function registerCallbackConstructor(verbosityLevel, callbackConstructor) {\n CallbackConstructorRegistry.registerCallbackConstructor(verbosityLevel, callbackConstructor);\n}\n//# sourceMappingURL=exports.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// Layer activation functions\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject } from './utils/generic_utils';\n/**\n * Base class for Activations.\n *\n * Special note: due to cross-language compatibility reasons, the\n * static readonly className field in this family of classes must be set to\n * the initialLowerCamelCase name of the activation.\n */\nexport class Activation extends serialization.Serializable {\n getConfig() {\n return {};\n }\n}\n/**\n * Exponential linear unit (ELU).\n * Reference: https://arxiv.org/abs/1511.07289\n */\nexport class Elu extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x: Input.\n * @param alpha: Scaling factor the negative section.\n * @return Output of the ELU activation.\n */\n apply(x, alpha = 1) {\n return K.elu(x, alpha);\n }\n}\n/** @nocollapse */\nElu.className = 'elu';\nserialization.registerClass(Elu);\n/**\n * Scaled Exponential Linear Unit. (Klambauer et al., 2017).\n * Reference: Self-Normalizing Neural Networks, https://arxiv.org/abs/1706.02515\n * Notes:\n * - To be used together with the initialization \"lecunNormal\".\n * - To be used together with the dropout variant \"AlphaDropout\".\n */\nexport class Selu extends Activation {\n apply(x) {\n return tfc.selu(x);\n }\n}\n/** @nocollapse */\nSelu.className = 'selu';\nserialization.registerClass(Selu);\n/**\n * Rectified linear unit\n */\nexport class Relu extends Activation {\n apply(x) {\n return tfc.relu(x);\n }\n}\n/** @nocollapse */\nRelu.className = 'relu';\nserialization.registerClass(Relu);\n/**\n * Rectified linear unit activation maxing out at 6.0.\n */\nexport class Relu6 extends Activation {\n apply(x) {\n return tidy(() => tfc.minimum(6.0, tfc.relu(x)));\n }\n}\n/** @nocollapse */\nRelu6.className = 'relu6';\nserialization.registerClass(Relu6);\n//* Linear activation (no-op) */\nexport class Linear extends Activation {\n apply(x) {\n return x;\n }\n}\n/** @nocollapse */\nLinear.className = 'linear';\nserialization.registerClass(Linear);\n/**\n * Sigmoid activation function.\n */\nexport class Sigmoid extends Activation {\n apply(x) {\n return tfc.sigmoid(x);\n }\n}\n/** @nocollapse */\nSigmoid.className = 'sigmoid';\nserialization.registerClass(Sigmoid);\n/**\n * Segment-wise linear approximation of sigmoid.\n */\nexport class HardSigmoid extends Activation {\n apply(x) {\n return K.hardSigmoid(x);\n }\n}\n/** @nocollapse */\nHardSigmoid.className = 'hardSigmoid';\nserialization.registerClass(HardSigmoid);\n/**\n * Softplus activation function.\n */\nexport class Softplus extends Activation {\n apply(x) {\n return tfc.softplus(x);\n }\n}\n/** @nocollapse */\nSoftplus.className = 'softplus';\nserialization.registerClass(Softplus);\n/**\n * Softsign activation function.\n */\nexport class Softsign extends Activation {\n apply(x) {\n return K.softsign(x);\n }\n}\n/** @nocollapse */\nSoftsign.className = 'softsign';\nserialization.registerClass(Softsign);\n/**\n * Hyperbolic tangent function.\n */\nexport class Tanh extends Activation {\n apply(x) {\n return tfc.tanh(x);\n }\n}\n/** @nocollapse */\nTanh.className = 'tanh';\nserialization.registerClass(Tanh);\n/**\n * Softmax activation function\n */\nexport class Softmax extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.softmax(x, axis);\n }\n}\n/** @nocollapse */\nSoftmax.className = 'softmax';\nserialization.registerClass(Softmax);\n/**\n * Log softmax activation function\n */\nexport class LogSoftmax extends Activation {\n /**\n * Calculate the activation function of log softmax:\n * log( exp(x_i) / sum(exp(x)) )\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x, axis = (-1)) {\n return tfc.logSoftmax(x, axis);\n }\n}\n/** @nocollapse */\nLogSoftmax.className = 'logSoftmax';\nserialization.registerClass(LogSoftmax);\n/**\n * Swish activation function\n */\nexport class Swish extends Activation {\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param alpha Scaling factor for the sigmoid function.\n * @returns a Tensor of the same shape as x\n */\n apply(x, alpha = 1) {\n return tidy(() => tfc.sigmoid(x.mul(alpha)).mul(x));\n }\n}\n/** @nocollapse */\nSwish.className = 'swish';\nserialization.registerClass(Swish);\nexport function serializeActivation(activation) {\n return activation.getClassName();\n}\nexport function deserializeActivation(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'activation');\n}\nexport function getActivation(identifier) {\n if (identifier == null) {\n const config = {};\n config['className'] = 'linear';\n config['config'] = {};\n return deserializeActivation(config);\n }\n if (typeof identifier === 'string') {\n const config = {};\n config['className'] = identifier;\n config['config'] = {};\n return deserializeActivation(config);\n }\n else if (identifier instanceof Activation) {\n return identifier;\n }\n else {\n return deserializeActivation(identifier);\n }\n}\n//# sourceMappingURL=activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* original source: keras/regularizers.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { abs, add, serialization, sum, tidy, zeros } from '@tensorflow/tfjs-core';\nimport * as K from './backend/tfjs_backend';\nimport { deserializeKerasObject, serializeKerasObject } from './utils/generic_utils';\nfunction assertObjectArgs(args) {\n if (args != null && typeof args !== 'object') {\n throw new Error(`Argument to L1L2 regularizer's constructor is expected to be an ` +\n `object, but received: ${args}`);\n }\n}\n/**\n * Regularizer base class.\n */\nexport class Regularizer extends serialization.Serializable {\n}\nexport class L1L2 extends Regularizer {\n constructor(args) {\n super();\n assertObjectArgs(args);\n this.l1 = args == null || args.l1 == null ? 0.01 : args.l1;\n this.l2 = args == null || args.l2 == null ? 0.01 : args.l2;\n this.hasL1 = this.l1 !== 0;\n this.hasL2 = this.l2 !== 0;\n }\n /**\n * Porting note: Renamed from __call__.\n * @param x Variable of which to calculate the regularization score.\n */\n apply(x) {\n return tidy(() => {\n let regularization = zeros([1]);\n if (this.hasL1) {\n regularization = add(regularization, sum(tfc.mul(this.l1, abs(x))));\n }\n if (this.hasL2) {\n regularization =\n add(regularization, sum(tfc.mul(this.l2, K.square(x))));\n }\n return regularization.asScalar();\n });\n }\n getConfig() {\n return { 'l1': this.l1, 'l2': this.l2 };\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls({ l1: config['l1'], l2: config['l2'] });\n }\n}\n/** @nocollapse */\nL1L2.className = 'L1L2';\nserialization.registerClass(L1L2);\nexport function l1(args) {\n assertObjectArgs(args);\n return new L1L2({ l1: args != null ? args.l1 : null, l2: 0 });\n}\nexport function l2(args) {\n assertObjectArgs(args);\n return new L1L2({ l2: args != null ? args.l2 : null, l1: 0 });\n}\n// Maps the JavaScript-like identifier keys to the corresponding keras symbols.\nexport const REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP = {\n 'l1l2': 'L1L2'\n};\nexport function serializeRegularizer(constraint) {\n return serializeKerasObject(constraint);\n}\nexport function deserializeRegularizer(config, customObjects = {}) {\n return deserializeKerasObject(config, serialization.SerializationMap.getMap().classNameMap, customObjects, 'regularizer');\n}\nexport function getRegularizer(identifier) {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = { className, config: {} };\n return deserializeRegularizer(config);\n }\n else if (identifier instanceof Regularizer) {\n return identifier;\n }\n else {\n return deserializeRegularizer(identifier);\n }\n}\n//# sourceMappingURL=regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Advanced activation layers.\n */\nimport { clipByValue, elu, leakyRelu, prelu, relu, serialization } from '@tensorflow/tfjs-core';\nimport { Softmax as softmaxActivation } from '../activations';\nimport { cast } from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class ReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maxValue = args.maxValue;\n }\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n let output = relu(inputs);\n if (this.maxValue != null) {\n output = clipByValue(output, 0, this.maxValue);\n }\n return output;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { maxValue: this.maxValue };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReLU.className = 'ReLU';\nserialization.registerClass(ReLU);\nexport class LeakyReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 0.3;\n if (args == null) {\n args = {};\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return leakyRelu(x, this.alpha);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLeakyReLU.className = 'LeakyReLU';\nserialization.registerClass(LeakyReLU);\nexport class PReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA_INITIALIZER = 'zeros';\n if (args == null) {\n args = {};\n }\n this.supportsMasking = true;\n this.alphaInitializer =\n getInitializer(args.alphaInitializer || this.DEFAULT_ALPHA_INITIALIZER);\n this.alphaRegularizer = getRegularizer(args.alphaRegularizer);\n this.alphaConstraint = getConstraint(args.alphaConstraint);\n if (args.sharedAxes == null) {\n this.sharedAxes = null;\n }\n else if (Array.isArray(args.sharedAxes)) {\n this.sharedAxes = args.sharedAxes;\n }\n else if (typeof args.sharedAxes === 'number') {\n this.sharedAxes = [args.sharedAxes];\n }\n else {\n throw new ValueError(`Expected sharedAxes to be a number or an array of numbers, ` +\n `but got ${args.sharedAxes}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const paramShape = inputShape.slice(1);\n if (this.sharedAxes != null) {\n for (const i of this.sharedAxes) {\n paramShape[i - 1] = 1;\n }\n }\n this.alpha = this.addWeight('alpha', paramShape, 'float32', this.alphaInitializer, this.alphaRegularizer, true, this.alphaConstraint);\n // Set input spec.\n const axes = {};\n if (this.sharedAxes != null) {\n for (let i = 1; i < inputShape.length; ++i) {\n axes[i] = inputShape[i];\n }\n }\n this.inputSpec = [new InputSpec({\n ndim: inputShape.length,\n axes,\n })];\n this.built = true;\n }\n call(inputs, kwargs) {\n inputs = getExactlyOneTensor(inputs);\n return prelu(inputs, this.alpha.read());\n }\n getConfig() {\n const config = {\n alphaInitializer: serializeInitializer(this.alphaInitializer),\n alphaRegularizer: serializeRegularizer(this.alphaRegularizer),\n alphaConstraint: serializeConstraint(this.alphaConstraint),\n sharedAxes: this.sharedAxes\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPReLU.className = 'PReLU';\nserialization.registerClass(PReLU);\nexport class ELU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_ALPHA = 1.0;\n if (args == null) {\n args = {};\n }\n if (args.alpha != null && args.alpha !== this.DEFAULT_ALPHA) {\n throw new NotImplementedError(`Non-default alpha value (${args.alpha}) is not supported by the ` +\n `ELU layer yet.`);\n }\n this.alpha = args.alpha == null ? this.DEFAULT_ALPHA : args.alpha;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return elu(x);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { alpha: this.alpha };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nELU.className = 'ELU';\nserialization.registerClass(ELU);\nexport class ThresholdedReLU extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_THETA = 1.0;\n if (args == null) {\n args = {};\n }\n this.theta = args.theta == null ? this.DEFAULT_THETA : args.theta;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return x.mul(cast(x.greater(this.theta), 'float32'));\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { theta: this.theta };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nThresholdedReLU.className = 'ThresholdedReLU';\nserialization.registerClass(ThresholdedReLU);\nexport class Softmax extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.DEFAULT_AXIS = 1.0;\n if (args == null) {\n args = {};\n }\n this.softmax = new softmaxActivation().apply;\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n }\n call(inputs, kwargs) {\n const x = getExactlyOneTensor(inputs);\n return this.softmax(x, this.axis);\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const config = { axis: this.axis };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nSoftmax.className = 'Softmax';\nserialization.registerClass(Softmax);\n//# sourceMappingURL=advanced_activations.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { ValueError } from '../errors';\nimport { pyListRepeat } from './generic_utils';\nimport { isInteger, max } from './math_utils';\n/**\n * Transforms a single number of array of numbers into an array of numbers.\n * @param value\n * @param n: The size of the tuple to be returned.\n * @param name: Name of the parameter, used for generating error messages.\n * @returns An array of numbers.\n */\nexport function normalizeArray(value, n, name) {\n if (typeof value === 'number') {\n return pyListRepeat(value, n);\n }\n else {\n if (value.length !== n) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n} integers.` +\n ` Received: ${value.length} elements.`);\n }\n for (let i = 0; i < n; ++i) {\n const singleValue = value[i];\n if (!isInteger(singleValue)) {\n throw new ValueError(`The ${name} argument must be an integer or tuple of ${n}` +\n ` integers. Received: ${JSON.stringify(value)} including a` +\n ` non-integer number ${singleValue}`);\n }\n }\n return value;\n }\n}\n/**\n * Determines output length of a convolution given input length.\n * @param inputLength\n * @param filterSize\n * @param padding\n * @param stride\n * @param dilation: dilation rate.\n */\nexport function convOutputLength(inputLength, filterSize, padding, stride, dilation = 1) {\n if (inputLength == null) {\n return inputLength;\n }\n const dilatedFilterSize = filterSize + (filterSize - 1) * (dilation - 1);\n let outputLength;\n if (padding === 'same') {\n outputLength = inputLength;\n }\n else { // VALID\n outputLength = inputLength - dilatedFilterSize + 1;\n }\n return Math.floor((outputLength + stride - 1) / stride);\n}\nexport function deconvLength(dimSize, strideSize, kernelSize, padding) {\n if (dimSize == null) {\n return null;\n }\n if (padding === 'valid') {\n dimSize = dimSize * strideSize + max([kernelSize - strideSize, 0]);\n }\n else if (padding === 'same') {\n dimSize = dimSize * strideSize;\n }\n else {\n throw new ValueError(`Unsupport padding mode: ${padding}.`);\n }\n return dimSize;\n}\n//# sourceMappingURL=conv_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength, deconvLength, normalizeArray } from '../utils/conv_utils';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Transpose and cast the input before the conv2d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv2DInput(x, dataFormat) {\n // TODO(cais): Cast type to float32 if not.\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * Transpose and cast the input before the conv3d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv3DInput(x, dataFormat) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 4, 1]); // NCDHW -> NDHWC.\n }\n else {\n return x;\n }\n });\n}\n/**\n * 1D-convolution with bias added.\n *\n * Porting Note: This function does not exist in the Python Keras backend.\n * It is exactly the same as `conv2d`, except the added `bias`.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.\n * @param bias Bias, rank-3, of shape `[outDepth]`.\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1dWithBias(x, kernel, bias, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n // Check the ranks of x, kernel and bias.\n if (x.shape.length !== 3) {\n throw new ValueError(`The input of a conv1dWithBias operation should be 3, but is ` +\n `${x.shape.length} instead.`);\n }\n if (kernel.shape.length !== 3) {\n throw new ValueError(`The kernel for a conv1dWithBias operation should be 3, but is ` +\n `${kernel.shape.length} instead`);\n }\n if (bias != null && bias.shape.length !== 1) {\n throw new ValueError(`The bias for a conv1dWithBias operation should be 1, but is ` +\n `${kernel.shape.length} instead`);\n }\n // TODO(cais): Support CAUSAL padding mode.\n if (dataFormat === 'channelsFirst') {\n x = tfc.transpose(x, [0, 2, 1]); // NCW -> NWC.\n }\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n let y = tfc.conv1d(x, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n return y;\n });\n}\n/**\n * 1D-convolution.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.s\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1d(x, kernel, strides = 1, padding = 'valid', dataFormat, dilationRate = 1) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv1dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 2D pooling.\n */\nexport function conv2d(x, kernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv2dWithBiasActivation(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 2D Convolution with an added bias and optional activation.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv2d`, except the added `bias`.\n */\nexport function conv2dWithBiasActivation(x, kernel, bias, strides = [1, 1], padding = 'valid', dataFormat, dilationRate, activation = null) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 3 && x.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects input to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n if (kernel.rank !== 3 && kernel.rank !== 4) {\n throw new ValueError(`conv2dWithBiasActivation expects kernel to be of rank 3 or 4, ` +\n `but received ${x.rank}.`);\n }\n let y = preprocessConv2DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.fused.conv2d({\n x: y,\n filter: kernel,\n strides: strides,\n pad: padding === 'same' ? 'same' : 'valid',\n dilations: dilationRate,\n dataFormat: 'NHWC',\n bias,\n activation\n });\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\n/**\n * 3D Convolution.\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 3D convolution.\n */\nexport function conv3d(x, kernel, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv3dWithBias(x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n/**\n * 3D Convolution with an added bias.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv3d`, except the added `bias`.\n */\nexport function conv3dWithBias(x, kernel, bias, strides = [1, 1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 4 && x.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects input to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n if (kernel.rank !== 4 && kernel.rank !== 5) {\n throw new ValueError(`conv3dWithBias expects kernel to be of rank 4 or 5, but received ` +\n `${x.rank}.`);\n }\n let y = preprocessConv3DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError('The support for CAUSAL padding mode in conv3dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.conv3d(y, kernel, strides, padding === 'same' ? 'same' : 'valid', 'NDHWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]);\n }\n return y;\n });\n}\n/**\n * Abstract convolution layer.\n */\nexport class BaseConv extends Layer {\n constructor(rank, args) {\n super(args);\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n BaseConv.verifyArgs(args);\n this.rank = rank;\n generic_utils.assertPositiveInteger(this.rank, 'rank');\n if (this.rank !== 1 && this.rank !== 2 && this.rank !== 3) {\n throw new NotImplementedError(`Convolution layer for rank other than 1, 2, or 3 (${this.rank}) is ` +\n `not implemented yet.`);\n }\n this.kernelSize = normalizeArray(args.kernelSize, rank, 'kernelSize');\n this.strides = normalizeArray(args.strides == null ? 1 : args.strides, rank, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.activation = getActivation(args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.dilationRate = normalizeArray(args.dilationRate == null ? 1 : args.dilationRate, rank, 'dilationRate');\n if (this.rank === 1 &&\n (Array.isArray(this.dilationRate) && this.dilationRate.length !== 1)) {\n throw new ValueError(`dilationRate must be a number or an array of a single number ` +\n `for 1D convolution, but received ` +\n `${JSON.stringify(this.dilationRate)}`);\n }\n else if (this.rank === 2) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate = [this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 2) {\n throw new ValueError(`dilationRate must be a number or array of two numbers for 2D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n else if (this.rank === 3) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate =\n [this.dilationRate, this.dilationRate, this.dilationRate];\n }\n else if (this.dilationRate.length !== 3) {\n throw new ValueError(`dilationRate must be a number or array of three numbers for 3D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n }\n static verifyArgs(args) {\n // Check config.kernelSize type and shape.\n generic_utils.assert('kernelSize' in args, `required key 'kernelSize' not in config`);\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 3)) {\n throw new ValueError(`BaseConv expects config.kernelSize to be number or number[] with ` +\n `length 1, 2, or 3, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n getConfig() {\n const config = {\n kernelSize: this.kernelSize,\n strides: this.strides,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n biasInitializer: serializeInitializer(this.biasInitializer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/**\n * Abstract nD convolution layer. Ancestor of convolution layers which reduce\n * across channels, i.e., Conv1D and Conv2D, but not DepthwiseConv2D.\n */\nexport class Conv extends BaseConv {\n constructor(rank, args) {\n super(rank, args);\n this.kernel = null;\n Conv.verifyArgs(args);\n this.filters = args.filters;\n generic_utils.assertPositiveInteger(this.filters, 'filters');\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([inputDim, this.filters]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.inputSpec = [{ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } }];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs;\n const biasValue = this.bias == null ? null : this.bias.read();\n const fusedActivationName = generic_utils.mapActivationToFusedKernel(this.activation.getClassName());\n if (fusedActivationName != null && this.rank === 2) {\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate, fusedActivationName);\n }\n else {\n if (this.rank === 1) {\n outputs = conv1dWithBias(inputs, this.kernel.read(), biasValue, this.strides[0], this.padding, this.dataFormat, this.dilationRate[0]);\n }\n else if (this.rank === 2) {\n // TODO(cais): Move up to constructor.\n outputs = conv2dWithBiasActivation(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else if (this.rank === 3) {\n outputs = conv3dWithBias(inputs, this.kernel.read(), biasValue, this.strides, this.padding, this.dataFormat, this.dilationRate);\n }\n else {\n throw new NotImplementedError('convolutions greater than 3D are not implemented yet.');\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const newSpace = [];\n const space = (this.dataFormat === 'channelsLast') ?\n inputShape.slice(1, inputShape.length - 1) :\n inputShape.slice(2);\n for (let i = 0; i < space.length; ++i) {\n const newDim = convOutputLength(space[i], this.kernelSize[i], this.padding, this.strides[i], typeof this.dilationRate === 'number' ? this.dilationRate :\n this.dilationRate[i]);\n newSpace.push(newDim);\n }\n let outputShape = [inputShape[0]];\n if (this.dataFormat === 'channelsLast') {\n outputShape = outputShape.concat(newSpace);\n outputShape.push(this.filters);\n }\n else {\n outputShape.push(this.filters);\n outputShape = outputShape.concat(newSpace);\n }\n return outputShape;\n }\n getConfig() {\n const config = {\n filters: this.filters,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n static verifyArgs(args) {\n // Check config.filters type, shape, and value.\n if (!('filters' in args) || typeof args.filters !== 'number' ||\n args.filters < 1) {\n throw new ValueError(`Convolution layer expected config.filters to be a 'number' > 0 ` +\n `but got ${JSON.stringify(args.filters)}`);\n }\n }\n}\nexport class Conv2D extends Conv {\n constructor(args) {\n super(2, args);\n Conv2D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if ((typeof args.kernelSize !== 'number') &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 2)) {\n throw new ValueError(`Conv2D expects config.kernelSize to be number or number[] with ` +\n `length 1 or 2, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv2D.className = 'Conv2D';\nserialization.registerClass(Conv2D);\nexport class Conv3D extends Conv {\n constructor(args) {\n super(3, args);\n Conv3D.verifyArgs(args);\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number') {\n if (!(Array.isArray(args.kernelSize) &&\n (args.kernelSize.length === 1 || args.kernelSize.length === 3))) {\n throw new ValueError(`Conv3D expects config.kernelSize to be number or` +\n ` [number, number, number], but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n }\n}\n/** @nocollapse */\nConv3D.className = 'Conv3D';\nserialization.registerClass(Conv3D);\nexport class Conv2DTranspose extends Conv2D {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n if (this.padding !== 'same' && this.padding !== 'valid') {\n throw new ValueError(`Conv2DTranspose currently supports only padding modes 'same' ` +\n `and 'valid', but received padding mode ${this.padding}`);\n }\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length !== 4) {\n throw new ValueError('Input should have rank 4; Received input shape: ' +\n JSON.stringify(inputShape));\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError('The channel dimension of the inputs should be defined. ' +\n 'Found `None`.');\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([this.filters, inputDim]);\n this.kernel = this.addWeight('kernel', kernelShape, 'float32', this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n // Set input spec.\n this.inputSpec =\n [new InputSpec({ ndim: 4, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n if (input.shape.length !== 4) {\n throw new ValueError(`Conv2DTranspose.call() expects input tensor to be rank-4, but ` +\n `received a tensor of rank-${input.shape.length}`);\n }\n const inputShape = input.shape;\n const batchSize = inputShape[0];\n let hAxis;\n let wAxis;\n if (this.dataFormat === 'channelsFirst') {\n hAxis = 2;\n wAxis = 3;\n }\n else {\n hAxis = 1;\n wAxis = 2;\n }\n const height = inputShape[hAxis];\n const width = inputShape[wAxis];\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n // Infer the dynamic output shape.\n const outHeight = deconvLength(height, strideH, kernelH, this.padding);\n const outWidth = deconvLength(width, strideW, kernelW, this.padding);\n // Porting Note: We don't branch based on `this.dataFormat` here,\n // because\n // the tjfs-core function `conv2dTranspose` called below always\n // assumes channelsLast.\n const outputShape = [batchSize, outHeight, outWidth, this.filters];\n if (this.dataFormat !== 'channelsLast') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n }\n let outputs = tfc.conv2dTranspose(input, this.kernel.read(), outputShape, this.strides, this.padding);\n if (this.dataFormat !== 'channelsLast') {\n outputs = tfc.transpose(outputs, [0, 3, 1, 2]);\n }\n if (this.bias != null) {\n outputs =\n K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n let channelAxis;\n let heightAxis;\n let widthAxis;\n if (this.dataFormat === 'channelsFirst') {\n channelAxis = 1;\n heightAxis = 2;\n widthAxis = 3;\n }\n else {\n channelAxis = 3;\n heightAxis = 1;\n widthAxis = 2;\n }\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n outputShape[channelAxis] = this.filters;\n outputShape[heightAxis] =\n deconvLength(outputShape[heightAxis], strideH, kernelH, this.padding);\n outputShape[widthAxis] =\n deconvLength(outputShape[widthAxis], strideW, kernelW, this.padding);\n return outputShape;\n }\n getConfig() {\n const config = super.getConfig();\n delete config['dilationRate'];\n return config;\n }\n}\n/** @nocollapse */\nConv2DTranspose.className = 'Conv2DTranspose';\nserialization.registerClass(Conv2DTranspose);\nexport class SeparableConv extends Conv {\n constructor(rank, config) {\n super(rank, config);\n this.DEFAULT_DEPTHWISE_INITIALIZER = 'glorotUniform';\n this.DEFAULT_POINTWISE_INITIALIZER = 'glorotUniform';\n this.depthwiseKernel = null;\n this.pointwiseKernel = null;\n if (config.filters == null) {\n throw new ValueError('The `filters` configuration field is required by SeparableConv, ' +\n 'but is unspecified.');\n }\n if (config.kernelInitializer != null || config.kernelRegularizer != null ||\n config.kernelConstraint != null) {\n throw new ValueError('Fields kernelInitializer, kernelRegularizer and kernelConstraint ' +\n 'are invalid for SeparableConv2D. Use depthwiseInitializer, ' +\n 'depthwiseRegularizer, depthwiseConstraint, pointwiseInitializer, ' +\n 'pointwiseRegularizer and pointwiseConstraint instead.');\n }\n if (config.padding != null && config.padding !== 'same' &&\n config.padding !== 'valid') {\n throw new ValueError(`SeparableConv${this.rank}D supports only padding modes: ` +\n `'same' and 'valid', but received ${JSON.stringify(config.padding)}`);\n }\n this.depthMultiplier =\n config.depthMultiplier == null ? 1 : config.depthMultiplier;\n this.depthwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_DEPTHWISE_INITIALIZER);\n this.depthwiseRegularizer = getRegularizer(config.depthwiseRegularizer);\n this.depthwiseConstraint = getConstraint(config.depthwiseConstraint);\n this.pointwiseInitializer = getInitializer(config.depthwiseInitializer || this.DEFAULT_POINTWISE_INITIALIZER);\n this.pointwiseRegularizer = getRegularizer(config.pointwiseRegularizer);\n this.pointwiseConstraint = getConstraint(config.pointwiseConstraint);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < this.rank + 2) {\n throw new ValueError(`Inputs to SeparableConv${this.rank}D should have rank ` +\n `${this.rank + 2}, but received input shape: ` +\n `${JSON.stringify(inputShape)}`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError(`The channel dimension of the inputs should be defined, ` +\n `but found ${JSON.stringify(inputShape[channelAxis])}`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = this.kernelSize.concat([inputDim, this.depthMultiplier]);\n const pointwiseKernelShape = [];\n for (let i = 0; i < this.rank; ++i) {\n pointwiseKernelShape.push(1);\n }\n pointwiseKernelShape.push(inputDim * this.depthMultiplier, this.filters);\n const trainable = true;\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, 'float32', this.depthwiseInitializer, this.depthwiseRegularizer, trainable, this.depthwiseConstraint);\n this.pointwiseKernel = this.addWeight('pointwise_kernel', pointwiseKernelShape, 'float32', this.pointwiseInitializer, this.pointwiseRegularizer, trainable, this.pointwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.filters], 'float32', this.biasInitializer, this.biasRegularizer, trainable, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.inputSpec =\n [new InputSpec({ ndim: this.rank + 2, axes: { [channelAxis]: inputDim } })];\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let output;\n if (this.rank === 1) {\n throw new NotImplementedError('1D separable convolution is not implemented yet.');\n }\n else if (this.rank === 2) {\n if (this.dataFormat === 'channelsFirst') {\n inputs = tfc.transpose(inputs, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n output = tfc.separableConv2d(inputs, this.depthwiseKernel.read(), this.pointwiseKernel.read(), this.strides, this.padding, this.dilationRate, 'NHWC');\n }\n if (this.useBias) {\n output = K.biasAdd(output, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n if (this.dataFormat === 'channelsFirst') {\n output = tfc.transpose(output, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return output;\n });\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['kernelInitializer'];\n delete config['kernelRegularizer'];\n delete config['kernelConstraint'];\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['pointwiseInitializer'] =\n serializeInitializer(this.pointwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['pointwiseRegularizer'] =\n serializeRegularizer(this.pointwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseConstraint);\n config['pointwiseConstraint'] =\n serializeConstraint(this.pointwiseConstraint);\n return config;\n }\n}\n/** @nocollapse */\nSeparableConv.className = 'SeparableConv';\nexport class SeparableConv2D extends SeparableConv {\n constructor(args) {\n super(2, args);\n }\n}\n/** @nocollapse */\nSeparableConv2D.className = 'SeparableConv2D';\nserialization.registerClass(SeparableConv2D);\nexport class Conv1D extends Conv {\n constructor(args) {\n super(1, args);\n Conv1D.verifyArgs(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getConfig() {\n const config = super.getConfig();\n delete config['rank'];\n delete config['dataFormat'];\n return config;\n }\n static verifyArgs(args) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof args.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(args.kernelSize, 'number', 1, 1)) {\n throw new ValueError(`Conv1D expects config.kernelSize to be number or number[] with ` +\n `length 1, but received ${JSON.stringify(args.kernelSize)}.`);\n }\n }\n}\n/** @nocollapse */\nConv1D.className = 'Conv1D';\nserialization.registerClass(Conv1D);\nexport class Cropping2D extends Layer {\n constructor(args) {\n super(args);\n if (typeof args.cropping === 'number') {\n this.cropping =\n [[args.cropping, args.cropping], [args.cropping, args.cropping]];\n }\n else if (typeof args.cropping[0] === 'number') {\n this.cropping = [\n [args.cropping[0], args.cropping[0]],\n [args.cropping[1], args.cropping[1]]\n ];\n }\n else {\n this.cropping = args.cropping;\n }\n this.dataFormat =\n args.dataFormat === undefined ? 'channelsLast' : args.dataFormat;\n this.inputSpec = [{ ndim: 4 }];\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n return [\n inputShape[0], inputShape[1],\n inputShape[2] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[3] - this.cropping[1][0] - this.cropping[1][1]\n ];\n }\n else {\n return [\n inputShape[0],\n inputShape[1] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[2] - this.cropping[1][0] - this.cropping[1][1], inputShape[3]\n ];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[1] - this.cropping[0][0] - this.cropping[0][1], 2);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[2] - this.cropping[1][1] - this.cropping[1][0], 3);\n }\n else {\n const hSliced = K.sliceAlongAxis(inputs, this.cropping[0][0], inputs.shape[2] - this.cropping[0][0] - this.cropping[0][1], 3);\n return K.sliceAlongAxis(hSliced, this.cropping[1][0], inputs.shape[3] - this.cropping[1][1] - this.cropping[1][0], 4);\n }\n });\n }\n getConfig() {\n const config = { cropping: this.cropping, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nCropping2D.className = 'Cropping2D';\nserialization.registerClass(Cropping2D);\nexport class UpSampling2D extends Layer {\n constructor(args) {\n super(args);\n this.DEFAULT_SIZE = [2, 2];\n this.inputSpec = [{ ndim: 4 }];\n this.size = args.size == null ? this.DEFAULT_SIZE : args.size;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n }\n computeOutputShape(inputShape) {\n if (this.dataFormat === 'channelsFirst') {\n const height = inputShape[2] == null ? null : this.size[0] * inputShape[2];\n const width = inputShape[3] == null ? null : this.size[1] * inputShape[3];\n return [inputShape[0], inputShape[1], height, width];\n }\n else {\n const height = inputShape[1] == null ? null : this.size[0] * inputShape[1];\n const width = inputShape[2] == null ? null : this.size[1] * inputShape[2];\n return [inputShape[0], height, width, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n if (this.dataFormat === 'channelsFirst') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n const height = this.size[0] * inputShape[2];\n const width = this.size[1] * inputShape[3];\n const resized = input.resizeNearestNeighbor([height, width]);\n return tfc.transpose(resized, [0, 3, 1, 2]);\n }\n else {\n const height = this.size[0] * inputShape[1];\n const width = this.size[1] * inputShape[2];\n return input.resizeNearestNeighbor([height, width]);\n }\n });\n }\n getConfig() {\n const config = { size: this.size, dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nUpSampling2D.className = 'UpSampling2D';\nserialization.registerClass(UpSampling2D);\n//# sourceMappingURL=convolutional.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Depthwise Convolutional Layers\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { BaseConv, preprocessConv2DInput } from './convolutional';\n/**\n * 2D convolution with separable filters.\n * @param x Input tensor.\n * @param depthwiseKernel Convolution kernel for depthwise convolution.\n * @param strides Strides (Array of two integers).\n * @param padding Padding model.\n * @param dataFormat Data format.\n * @param dilationRate Array of two integers, dilation rates for the separable\n * convolution.\n * @returns Output tensor.\n * @throws ValueError If depthwiseKernel is not a 4D array.\n */\nexport function depthwiseConv2d(x, depthwiseKernel, strides = [1, 1], padding = 'valid', dataFormat, dilationRate) {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n let y = preprocessConv2DInput(x, dataFormat);\n if (x.rank !== 4) {\n throw new ValueError(`Input for depthwiseConv2d is required to be 4-D, but is instead ` +\n `${x.rank}-D`);\n }\n if (depthwiseKernel.rank !== 4) {\n throw new ValueError(`depthwiseKernel is required to be 4-D, but is instead ` +\n `${depthwiseKernel.rank}-D`);\n }\n y = tfc.depthwiseConv2d(y, depthwiseKernel, strides, padding === 'same' ? 'same' : 'valid', 'NHWC', dilationRate);\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\nexport class DepthwiseConv2D extends BaseConv {\n constructor(args) {\n super(2, args);\n this.depthwiseKernel = null;\n this.depthMultiplier =\n args.depthMultiplier == null ? 1 : args.depthMultiplier;\n this.depthwiseInitializer = getInitializer(args.depthwiseInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.depthwiseConstraint = getConstraint(args.depthwiseConstraint);\n this.depthwiseRegularizer = getRegularizer(args.depthwiseRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 4) {\n throw new ValueError(`Inputs to DepthwiseConv2D should have rank 4. ` +\n `Received input shape: ${JSON.stringify(inputShape)}.`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : 3;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError('The channel dimension of the inputs to DepthwiseConv2D should ' +\n `be defined, but is not (${inputShape[channelAxis]}).`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape = [\n this.kernelSize[0], this.kernelSize[1], inputDim, this.depthMultiplier\n ];\n this.depthwiseKernel = this.addWeight('depthwise_kernel', depthwiseKernelShape, null, this.depthwiseInitializer, this.depthwiseRegularizer, true, this.depthwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [inputDim * this.depthMultiplier], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs = depthwiseConv2d(inputs, this.depthwiseKernel.read(), this.strides, this.padding, this.dataFormat, null);\n // TODO(cais): Add support for dilation.\n if (this.useBias) {\n outputs = K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n const cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n const outFilters = this.dataFormat === 'channelsFirst' ?\n inputShape[1] * this.depthMultiplier :\n inputShape[3] * this.depthMultiplier;\n const outRows = convOutputLength(rows, this.kernelSize[0], this.padding, this.strides[0]);\n const outCols = convOutputLength(cols, this.kernelSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], outFilters, outRows, outCols];\n }\n else {\n // In this case, assume 'channelsLast'.\n return [inputShape[0], outRows, outCols, outFilters];\n }\n }\n getConfig() {\n const config = super.getConfig();\n config['depthMultiplier'] = this.depthMultiplier;\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseRegularizer);\n return config;\n }\n}\n/** @nocollapse */\nDepthwiseConv2D.className = 'DepthwiseConv2D';\nserialization.registerClass(DepthwiseConv2D);\n//# sourceMappingURL=convolutional_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Recurrent Neural Network Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, SymbolicTensor } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, Initializer, Ones, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor, isArrayOfShapes } from '../utils/types_utils';\nimport { batchGetValue, batchSetValue } from '../variables';\nimport { deserialize } from './serialization';\n/**\n * Standardize `apply()` args to a single list of tensor inputs.\n *\n * When running a model loaded from file, the input tensors `initialState` and\n * `constants` are passed to `RNN.apply()` as part of `inputs` instead of the\n * dedicated kwargs fields. `inputs` consists of\n * `[inputs, initialState0, initialState1, ..., constant0, constant1]` in this\n * case.\n * This method makes sure that arguments are\n * separated and that `initialState` and `constants` are `Array`s of tensors\n * (or None).\n *\n * @param inputs Tensor or `Array` of tensors.\n * @param initialState Tensor or `Array` of tensors or `null`/`undefined`.\n * @param constants Tensor or `Array` of tensors or `null`/`undefined`.\n * @returns An object consisting of\n * inputs: A tensor.\n * initialState: `Array` of tensors or `null`.\n * constants: `Array` of tensors or `null`.\n * @throws ValueError, if `inputs` is an `Array` but either `initialState` or\n * `constants` is provided.\n */\nexport function standardizeArgs(inputs, initialState, constants, numConstants) {\n if (Array.isArray(inputs)) {\n if (initialState != null || constants != null) {\n throw new ValueError('When inputs is an array, neither initialState or constants ' +\n 'should be provided');\n }\n if (numConstants != null) {\n constants = inputs.slice(inputs.length - numConstants, inputs.length);\n inputs = inputs.slice(0, inputs.length - numConstants);\n }\n if (inputs.length > 1) {\n initialState = inputs.slice(1, inputs.length);\n }\n inputs = inputs[0];\n }\n function toListOrNull(x) {\n if (x == null || Array.isArray(x)) {\n return x;\n }\n else {\n return [x];\n }\n }\n initialState = toListOrNull(initialState);\n constants = toListOrNull(constants);\n return { inputs, initialState, constants };\n}\n/**\n * Iterates over the time dimension of a tensor.\n *\n * @param stepFunction RNN step function.\n * Parameters:\n * inputs: tensor with shape `[samples, ...]` (no time dimension),\n * representing input for the batch of samples at a certain time step.\n * states: an Array of tensors.\n * Returns:\n * outputs: tensor with shape `[samples, outputDim]` (no time dimension).\n * newStates: list of tensors, same length and shapes as `states`. The first\n * state in the list must be the output tensor at the previous timestep.\n * @param inputs Tensor of temporal data of shape `[samples, time, ...]` (at\n * least 3D).\n * @param initialStates Tensor with shape `[samples, outputDim]` (no time\n * dimension), containing the initial values of the states used in the step\n * function.\n * @param goBackwards If `true`, do the iteration over the time dimension in\n * reverse order and return the reversed sequence.\n * @param mask Binary tensor with shape `[sample, time, 1]`, with a zero for\n * every element that is masked.\n * @param constants An Array of constant values passed at each step.\n * @param unroll Whether to unroll the RNN or to use a symbolic loop. *Not*\n * applicable to this imperative deeplearn.js backend. Its value is ignored.\n * @param needPerStepOutputs Whether the per-step outputs are to be\n * concatenated into a single tensor and returned (as the second return\n * value). Default: `false`. This arg is included so that the relatively\n * expensive concatenation of the stepwise outputs can be omitted unless\n * the stepwise outputs need to be kept (e.g., for an LSTM layer of which\n * `returnSequence` is `true`.)\n * @returns An Array: `[lastOutput, outputs, newStates]`.\n * lastOutput: the lastest output of the RNN, of shape `[samples, ...]`.\n * outputs: tensor with shape `[samples, time, ...]` where each entry\n * `output[s, t]` is the output of the step function at time `t` for sample\n * `s`. This return value is provided if and only if the\n * `needPerStepOutputs` is set as `true`. If it is set as `false`, this\n * return value will be `undefined`.\n * newStates: Array of tensors, latest states returned by the step function,\n * of shape `(samples, ...)`.\n * @throws ValueError If input dimension is less than 3.\n *\n * TODO(nielsene): This needs to be tidy-ed.\n */\nexport function rnn(stepFunction, inputs, initialStates, goBackwards = false, mask, constants, unroll = false, needPerStepOutputs = false) {\n return tfc.tidy(() => {\n const ndim = inputs.shape.length;\n if (ndim < 3) {\n throw new ValueError(`Input should be at least 3D, but is ${ndim}D.`);\n }\n // Transpose to time-major, i.e., from [batch, time, ...] to [time, batch,\n // ...].\n const axes = [1, 0].concat(math_utils.range(2, ndim));\n inputs = tfc.transpose(inputs, axes);\n if (constants != null) {\n throw new NotImplementedError('The rnn() functoin of the deeplearn.js backend does not support ' +\n 'constants yet.');\n }\n // Porting Note: the unroll option is ignored by the imperative backend.\n if (unroll) {\n console.warn('Backend rnn(): the unroll = true option is not applicable to the ' +\n 'imperative deeplearn.js backend.');\n }\n if (mask != null) {\n mask = mask.asType('bool').asType('float32');\n if (mask.rank === ndim - 1) {\n mask = tfc.expandDims(mask, -1);\n }\n mask = tfc.transpose(mask, axes);\n }\n if (goBackwards) {\n inputs = tfc.reverse(inputs, 0);\n if (mask != null) {\n mask = tfc.reverse(mask, 0);\n }\n }\n // Porting Note: PyKeras with TensorFlow backend uses a symbolic loop\n // (tf.while_loop). But for the imperative deeplearn.js backend, we just\n // use the usual TypeScript control flow to iterate over the time steps in\n // the inputs.\n // Porting Note: PyKeras patches a \"_use_learning_phase\" attribute to\n // outputs.\n // This is not idiomatic in TypeScript. The info regarding whether we are\n // in a learning (i.e., training) phase for RNN is passed in a different\n // way.\n const perStepOutputs = [];\n let lastOutput;\n let states = initialStates;\n const timeSteps = inputs.shape[0];\n const perStepInputs = tfc.unstack(inputs);\n let perStepMasks;\n if (mask != null) {\n perStepMasks = tfc.unstack(mask);\n }\n for (let t = 0; t < timeSteps; ++t) {\n const currentInput = perStepInputs[t];\n const stepOutputs = tfc.tidy(() => stepFunction(currentInput, states));\n if (mask == null) {\n lastOutput = stepOutputs[0];\n states = stepOutputs[1];\n }\n else {\n const maskedOutputs = tfc.tidy(() => {\n const stepMask = perStepMasks[t];\n const negStepMask = tfc.onesLike(stepMask).sub(stepMask);\n // TODO(cais): Would tfc.where() be better for performance?\n const output = stepOutputs[0].mul(stepMask).add(states[0].mul(negStepMask));\n const newStates = states.map((state, i) => {\n return stepOutputs[1][i].mul(stepMask).add(state.mul(negStepMask));\n });\n return { output, newStates };\n });\n lastOutput = maskedOutputs.output;\n states = maskedOutputs.newStates;\n }\n if (needPerStepOutputs) {\n perStepOutputs.push(lastOutput);\n }\n }\n let outputs;\n if (needPerStepOutputs) {\n const axis = 1;\n outputs = tfc.stack(perStepOutputs, axis);\n }\n return [lastOutput, outputs, states];\n });\n}\nexport class RNN extends Layer {\n constructor(args) {\n super(args);\n let cell;\n if (args.cell == null) {\n throw new ValueError('cell property is missing for the constructor of RNN.');\n }\n else if (Array.isArray(args.cell)) {\n cell = new StackedRNNCells({ cells: args.cell });\n }\n else {\n cell = args.cell;\n }\n if (cell.stateSize == null) {\n throw new ValueError('The RNN cell should have an attribute `stateSize` (tuple of ' +\n 'integers, one integer per RNN state).');\n }\n this.cell = cell;\n this.returnSequences =\n args.returnSequences == null ? false : args.returnSequences;\n this.returnState = args.returnState == null ? false : args.returnState;\n this.goBackwards = args.goBackwards == null ? false : args.goBackwards;\n this._stateful = args.stateful == null ? false : args.stateful;\n this.unroll = args.unroll == null ? false : args.unroll;\n this.supportsMasking = true;\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n this.stateSpec = null;\n this.states_ = null;\n // TODO(cais): Add constantsSpec and numConstants.\n this.numConstants = null;\n // TODO(cais): Look into the use of initial_state in the kwargs of the\n // constructor.\n this.keptStates = [];\n }\n // Porting Note: This is the equivalent of `RNN.states` property getter in\n // PyKeras.\n getStates() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n return math_utils.range(0, numStates).map(x => null);\n }\n else {\n return this.states_;\n }\n }\n // Porting Note: This is the equivalent of the `RNN.states` property setter in\n // PyKeras.\n setStates(states) {\n this.states_ = states;\n }\n computeOutputShape(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n // TODO(cais): Remove the casting once stacked RNN cells become supported.\n let stateSize = this.cell.stateSize;\n if (!Array.isArray(stateSize)) {\n stateSize = [stateSize];\n }\n const outputDim = stateSize[0];\n let outputShape;\n if (this.returnSequences) {\n outputShape = [inputShape[0], inputShape[1], outputDim];\n }\n else {\n outputShape = [inputShape[0], outputDim];\n }\n if (this.returnState) {\n const stateShape = [];\n for (const dim of stateSize) {\n stateShape.push([inputShape[0], dim]);\n }\n return [outputShape].concat(stateShape);\n }\n else {\n return outputShape;\n }\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n const outputMask = this.returnSequences ? mask : null;\n if (this.returnState) {\n const stateMask = this.states.map(s => null);\n return [outputMask].concat(stateMask);\n }\n else {\n return outputMask;\n }\n });\n }\n /**\n * Get the current state tensors of the RNN.\n *\n * If the state hasn't been set, return an array of `null`s of the correct\n * length.\n */\n get states() {\n if (this.states_ == null) {\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n const output = [];\n for (let i = 0; i < numStates; ++i) {\n output.push(null);\n }\n return output;\n }\n else {\n return this.states_;\n }\n }\n set states(s) {\n this.states_ = s;\n }\n build(inputShape) {\n // Note inputShape will be an Array of Shapes of initial states and\n // constants if these are passed in apply().\n const constantShape = null;\n if (this.numConstants != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n if (isArrayOfShapes(inputShape)) {\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n const batchSize = this.stateful ? inputShape[0] : null;\n const inputDim = inputShape.slice(2);\n this.inputSpec[0] = new InputSpec({ shape: [batchSize, null, ...inputDim] });\n // Allow cell (if RNNCell Layer) to build before we set or validate\n // stateSpec.\n const stepInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (constantShape != null) {\n throw new NotImplementedError('Constants support is not implemented in RNN yet.');\n }\n else {\n this.cell.build(stepInputShape);\n }\n // Set or validate stateSpec.\n let stateSize;\n if (Array.isArray(this.cell.stateSize)) {\n stateSize = this.cell.stateSize;\n }\n else {\n stateSize = [this.cell.stateSize];\n }\n if (this.stateSpec != null) {\n if (!util.arraysEqual(this.stateSpec.map(spec => spec.shape[spec.shape.length - 1]), stateSize)) {\n throw new ValueError(`An initialState was passed that is not compatible with ` +\n `cell.stateSize. Received stateSpec=${this.stateSpec}; ` +\n `However cell.stateSize is ${this.cell.stateSize}`);\n }\n }\n else {\n this.stateSpec =\n stateSize.map(dim => new InputSpec({ shape: [null, dim] }));\n }\n if (this.stateful) {\n this.resetStates();\n }\n }\n /**\n * Reset the state tensors of the RNN.\n *\n * If the `states` argument is `undefined` or `null`, will set the\n * state tensor(s) of the RNN to all-zero tensors of the appropriate\n * shape(s).\n *\n * If `states` is provided, will set the state tensors of the RNN to its\n * value.\n *\n * @param states Optional externally-provided initial states.\n * @param training Whether this call is done during training. For stateful\n * RNNs, this affects whether the old states are kept or discarded. In\n * particular, if `training` is `true`, the old states will be kept so\n * that subsequent backpropgataion through time (BPTT) may work properly.\n * Else, the old states will be discarded.\n */\n resetStates(states, training = false) {\n tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const batchSize = this.inputSpec[0].shape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.states_ == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_ = [tfc.zeros([batchSize, this.cell.stateSize])];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n }\n else {\n this.states_[0] = tfc.zeros([batchSize, this.cell.stateSize]);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training === true) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const dim = Array.isArray(this.cell.stateSize) ?\n this.cell.stateSize[index] :\n this.cell.stateSize;\n const expectedShape = [batchSize, dim];\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n apply(inputs, kwargs) {\n // TODO(cais): Figure out whether initialState is in kwargs or inputs.\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n // If any of `initial_state` or `constants` are specified and are\n // `tf.SymbolicTensor`s, then add them to the inputs and temporarily modify\n // the input_spec to include them.\n let additionalInputs = [];\n let additionalSpecs = [];\n if (initialState != null) {\n kwargs['initialState'] = initialState;\n additionalInputs = additionalInputs.concat(initialState);\n this.stateSpec = [];\n for (const state of initialState) {\n this.stateSpec.push(new InputSpec({ shape: state.shape }));\n }\n // TODO(cais): Use the following instead.\n // this.stateSpec = initialState.map(state => new InputSpec({shape:\n // state.shape}));\n additionalSpecs = additionalSpecs.concat(this.stateSpec);\n }\n if (constants != null) {\n kwargs['constants'] = constants;\n additionalInputs = additionalInputs.concat(constants);\n // TODO(cais): Add this.constantsSpec.\n this.numConstants = constants.length;\n }\n const isTensor = additionalInputs[0] instanceof SymbolicTensor;\n if (isTensor) {\n // Compute full input spec, including state and constants.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call with temporarily replaced inputSpec.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n // tslint:disable-next-line:no-any\n call(inputs, kwargs) {\n // Input shape: `[samples, time (padded with zeros), input_dim]`.\n // Note that the .build() method of subclasses **must** define\n // this.inputSpec and this.stateSpec owith complete input shapes.\n return tidy(() => {\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n let initialState = kwargs == null ? null : kwargs['initialState'];\n inputs = getExactlyOneTensor(inputs);\n if (initialState == null) {\n if (this.stateful) {\n initialState = this.states_;\n }\n else {\n initialState = this.getInitialState(inputs);\n }\n }\n const numStates = Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n if (initialState.length !== numStates) {\n throw new ValueError(`RNN Layer has ${numStates} state(s) but was passed ` +\n `${initialState.length} initial state(s).`);\n }\n if (this.unroll) {\n console.warn('Ignoring unroll = true for RNN layer, due to imperative backend.');\n }\n const cellCallKwargs = { training };\n // TODO(cais): Add support for constants.\n const step = (inputs, states) => {\n // `inputs` and `states` are concatenated to form a single `Array` of\n // `tf.Tensor`s as the input to `cell.call()`.\n const outputs = this.cell.call([inputs].concat(states), cellCallKwargs);\n // Marshall the return value into output and new states.\n return [outputs[0], outputs.slice(1)];\n };\n // TODO(cais): Add support for constants.\n const rnnOutputs = rnn(step, inputs, initialState, this.goBackwards, mask, null, this.unroll, this.returnSequences);\n const lastOutput = rnnOutputs[0];\n const outputs = rnnOutputs[1];\n const states = rnnOutputs[2];\n if (this.stateful) {\n this.resetStates(states, training);\n }\n const output = this.returnSequences ? outputs : lastOutput;\n // TODO(cais): Porperty set learning phase flag.\n if (this.returnState) {\n return [output].concat(states);\n }\n else {\n return output;\n }\n });\n }\n getInitialState(inputs) {\n return tidy(() => {\n // Build an all-zero tensor of shape [samples, outputDim].\n // [Samples, timeSteps, inputDim].\n let initialState = tfc.zeros(inputs.shape);\n // [Samples].\n initialState = tfc.sum(initialState, [1, 2]);\n initialState = K.expandDims(initialState); // [Samples, 1].\n if (Array.isArray(this.cell.stateSize)) {\n return this.cell.stateSize.map(dim => dim > 1 ? K.tile(initialState, [1, dim]) : initialState);\n }\n else {\n return this.cell.stateSize > 1 ?\n [K.tile(initialState, [1, this.cell.stateSize])] :\n [initialState];\n }\n });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n return this.cell.trainableWeights;\n }\n get nonTrainableWeights() {\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n if (!this.trainable) {\n return this.cell.weights;\n }\n return this.cell.nonTrainableWeights;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.cell != null) {\n this.cell.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n returnSequences: this.returnSequences,\n returnState: this.returnState,\n goBackwards: this.goBackwards,\n stateful: this.stateful,\n unroll: this.unroll,\n };\n if (this.numConstants != null) {\n config['numConstants'] = this.numConstants;\n }\n const cellConfig = this.cell.getConfig();\n if (this.getClassName() === RNN.className) {\n config['cell'] = {\n 'className': this.cell.getClassName(),\n 'config': cellConfig,\n };\n }\n // this order is necessary, to prevent cell name from replacing layer name\n return Object.assign({}, cellConfig, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cellConfig = config['cell'];\n const cell = deserialize(cellConfig, customObjects);\n return new cls(Object.assign(config, { cell }));\n }\n}\n/** @nocollapse */\nRNN.className = 'RNN';\nserialization.registerClass(RNN);\n// Porting Note: This is a common parent class for RNN cells. There is no\n// equivalent of this in PyKeras. Having a common parent class forgoes the\n// need for `has_attr(cell, ...)` checks or its TypeScript equivalent.\n/**\n * An RNNCell layer.\n *\n * @doc {heading: 'Layers', subheading: 'Classes'}\n */\nexport class RNNCell extends Layer {\n}\nexport class SimpleRNNCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, `units`);\n this.activation = getActivation(args.activation == null ? this.DEFAULT_ACTIVATION : args.activation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n // TODO(cais): Use regularizer.\n this.kernel = this.addWeight('kernel', [inputShape[inputShape.length - 1], this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n this.built = true;\n }\n // Porting Note: PyKeras' equivalent of this method takes two tensor inputs:\n // `inputs` and `states`. Here, the two tensors are combined into an\n // `Tensor[]` Array as the first input argument.\n // Similarly, PyKeras' equivalent of this method returns two values:\n // `output` and `[output]`. Here the two are combined into one length-2\n // `Tensor[]`, consisting of `output` repeated.\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`SimpleRNNCell expects 2 input Tensors, got ${inputs.length}.`);\n }\n let prevOutput = inputs[1];\n inputs = inputs[0];\n const training = kwargs['training'] == null ? false : kwargs['training'];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(prevOutput),\n rate: this.recurrentDropout,\n training\n });\n }\n let h;\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n if (dpMask != null) {\n h = K.dot(tfc.mul(inputs, dpMask), this.kernel.read());\n }\n else {\n h = K.dot(inputs, this.kernel.read());\n }\n if (this.bias != null) {\n h = K.biasAdd(h, this.bias.read());\n }\n if (recDpMask != null) {\n prevOutput = tfc.mul(prevOutput, recDpMask);\n }\n let output = tfc.add(h, K.dot(prevOutput, this.recurrentKernel.read()));\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n // TODO(cais): Properly set learning phase on output tensor?\n return [output, output];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nSimpleRNNCell.className = 'SimpleRNNCell';\nserialization.registerClass(SimpleRNNCell);\nexport class SimpleRNN extends RNN {\n constructor(args) {\n args.cell = new SimpleRNNCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nSimpleRNN.className = 'SimpleRNN';\nserialization.registerClass(SimpleRNN);\nexport class GRUCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.resetAfter) {\n throw new ValueError(`GRUCell does not support reset_after parameter set to true.`);\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 3], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 3], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units * 3], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (inputs.length !== 2) {\n throw new ValueError(`GRUCell expects 2 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] == null ? false : kwargs['training'];\n let hTMinus1 = inputs[1]; // Previous memory state.\n inputs = inputs[0];\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2, regardless of the actual value of\n // config.implementation.\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 3\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 3\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n let z;\n let r;\n let hh;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let matrixX = K.dot(inputs, this.kernel.read());\n if (this.useBias) {\n matrixX = K.biasAdd(matrixX, this.bias.read());\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n const recurrentKernelValue = this.recurrentKernel.read();\n const [rk1, rk2] = tfc.split(recurrentKernelValue, [2 * this.units, this.units], recurrentKernelValue.rank - 1);\n const matrixInner = K.dot(hTMinus1, rk1);\n const [xZ, xR, xH] = tfc.split(matrixX, 3, matrixX.rank - 1);\n const [recurrentZ, recurrentR] = tfc.split(matrixInner, 2, matrixInner.rank - 1);\n z = this.recurrentActivation.apply(tfc.add(xZ, recurrentZ));\n r = this.recurrentActivation.apply(tfc.add(xR, recurrentR));\n const recurrentH = K.dot(tfc.mul(r, hTMinus1), rk2);\n hh = this.activation.apply(tfc.add(xH, recurrentH));\n const h = tfc.add(tfc.mul(z, hTMinus1), tfc.mul(tfc.add(1, tfc.neg(z)), hh));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n resetAfter: false\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nGRUCell.className = 'GRUCell';\nserialization.registerClass(GRUCell);\nexport class GRU extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new GRUCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nGRU.className = 'GRU';\nserialization.registerClass(GRU);\nexport class LSTMCell extends RNNCell {\n constructor(args) {\n super(args);\n this.DEFAULT_ACTIVATION = 'tanh';\n this.DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation === undefined ? this.DEFAULT_ACTIVATION :\n args.activation);\n this.recurrentActivation = getActivation(args.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n args.recurrentActivation);\n this.useBias = args.useBias == null ? true : args.useBias;\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(args.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.unitForgetBias = args.unitForgetBias;\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(args.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.recurrentConstraint = getConstraint(args.recurrentConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.dropout = math_utils.min([1, math_utils.max([0, args.dropout == null ? 0 : args.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max([0, args.recurrentDropout == null ? 0 : args.recurrentDropout])\n ]);\n this.implementation = args.implementation;\n this.stateSize = [this.units, this.units];\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight('kernel', [inputDim, this.units * 4], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight('recurrent_kernel', [this.units, this.units * 4], null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n let biasInitializer;\n if (this.useBias) {\n if (this.unitForgetBias) {\n const capturedBiasInit = this.biasInitializer;\n const capturedUnits = this.units;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n // TODO(cais): More informative variable names?\n const bI = capturedBiasInit.apply([capturedUnits]);\n const bF = (new Ones()).apply([capturedUnits]);\n const bCAndH = capturedBiasInit.apply([capturedUnits * 2]);\n return K.concatAlongFirstAxis(K.concatAlongFirstAxis(bI, bF), bCAndH);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.units * 4], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n inputs = inputs;\n if (inputs.length !== 3) {\n throw new ValueError(`LSTMCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n let hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n inputs = inputs[0];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(inputs),\n rate: this.dropout,\n training,\n count: 4\n });\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: 4\n });\n }\n const dpMask = this.dropoutMask;\n const recDpMask = this.recurrentDropoutMask;\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2 regardless of the actual value of\n // config.implementation.\n let i;\n let f;\n let c;\n let o;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let z = K.dot(inputs, this.kernel.read());\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n z = tfc.add(z, K.dot(hTMinus1, this.recurrentKernel.read()));\n if (this.useBias) {\n z = K.biasAdd(z, this.bias.read());\n }\n const [z0, z1, z2, z3] = tfc.split(z, 4, z.rank - 1);\n i = this.recurrentActivation.apply(z0);\n f = this.recurrentActivation.apply(z1);\n c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(z2)));\n o = this.recurrentActivation.apply(z3);\n const h = tfc.mul(o, this.activation.apply(c));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h, c];\n });\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n unitForgetBias: this.unitForgetBias,\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n return Object.assign({}, baseConfig, config);\n }\n}\n/** @nocollapse */\nLSTMCell.className = 'LSTMCell';\nserialization.registerClass(LSTMCell);\nexport class LSTM extends RNN {\n constructor(args) {\n if (args.implementation === 0) {\n console.warn('`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n args.cell = new LSTMCell(args);\n super(args);\n // TODO(cais): Add activityRegularizer.\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\n/** @nocollapse */\nLSTM.className = 'LSTM';\nserialization.registerClass(LSTM);\nexport class StackedRNNCells extends RNNCell {\n constructor(args) {\n super(args);\n this.cells = args.cells;\n }\n get stateSize() {\n // States are a flat list in reverse order of the cell stack.\n // This allows perserving the requirement `stack.statesize[0] ===\n // outputDim`. E.g., states of a 2-layer LSTM would be `[h2, c2, h1, c1]`,\n // assuming one LSTM has states `[h, c]`.\n const stateSize = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n stateSize.push(...cell.stateSize);\n }\n else {\n stateSize.push(cell.stateSize);\n }\n }\n return stateSize;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n let states = inputs.slice(1);\n // Recover per-cell states.\n const nestedStates = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n nestedStates.push(states.splice(0, cell.stateSize.length));\n }\n else {\n nestedStates.push(states.splice(0, 1));\n }\n }\n nestedStates.reverse();\n // Call the cells in order and store the returned states.\n const newNestedStates = [];\n let callInputs;\n for (let i = 0; i < this.cells.length; ++i) {\n const cell = this.cells[i];\n states = nestedStates[i];\n // TODO(cais): Take care of constants.\n if (i === 0) {\n callInputs = [inputs[0]].concat(states);\n }\n else {\n callInputs = [callInputs[0]].concat(states);\n }\n callInputs = cell.call(callInputs, kwargs);\n newNestedStates.push(callInputs.slice(1));\n }\n // Format the new states as a flat list in reverse cell order.\n states = [];\n for (const cellStates of newNestedStates.slice().reverse()) {\n states.push(...cellStates);\n }\n return [callInputs[0]].concat(states);\n });\n }\n build(inputShape) {\n if (isArrayOfShapes(inputShape)) {\n // TODO(cais): Take care of input constants.\n // const constantShape = inputShape.slice(1);\n inputShape = inputShape[0];\n }\n inputShape = inputShape;\n let outputDim;\n this.cells.forEach((cell, i) => {\n nameScope(`RNNCell_${i}`, () => {\n // TODO(cais): Take care of input constants.\n cell.build(inputShape);\n if (Array.isArray(cell.stateSize)) {\n outputDim = cell.stateSize[0];\n }\n else {\n outputDim = cell.stateSize;\n }\n inputShape = [inputShape[0], outputDim];\n });\n });\n this.built = true;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const getCellConfig = (cell) => {\n return {\n 'className': cell.getClassName(),\n 'config': cell.getConfig(),\n };\n };\n const cellConfigs = this.cells.map(getCellConfig);\n const config = { 'cells': cellConfigs };\n return Object.assign({}, baseConfig, config);\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const cells = [];\n for (const cellConfig of config['cells']) {\n cells.push(deserialize(cellConfig, customObjects));\n }\n return new cls({ cells });\n }\n get trainableWeights() {\n if (!this.trainable) {\n return [];\n }\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.trainableWeights);\n }\n return weights;\n }\n get nonTrainableWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights = [];\n for (const cell of this.cells) {\n trainableWeights.push(...cell.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n /**\n * Retrieve the weights of a the model.\n *\n * @returns A flat `Array` of `tf.Tensor`s.\n */\n getWeights() {\n const weights = [];\n for (const cell of this.cells) {\n weights.push(...cell.weights);\n }\n return batchGetValue(weights);\n }\n /**\n * Set the weights of the model.\n *\n * @param weights An `Array` of `tf.Tensor`s with shapes and types matching\n * the output of `getWeights()`.\n */\n setWeights(weights) {\n const tuples = [];\n for (const cell of this.cells) {\n const numParams = cell.weights.length;\n const inputWeights = weights.splice(numParams);\n for (let i = 0; i < cell.weights.length; ++i) {\n tuples.push([cell.weights[i], inputWeights[i]]);\n }\n }\n batchSetValue(tuples);\n }\n}\n/** @nocollapse */\nStackedRNNCells.className = 'StackedRNNCells';\nserialization.registerClass(StackedRNNCells);\nexport function generateDropoutMask(args) {\n const { ones, rate, training = false, count = 1 } = args;\n const droppedInputs = () => K.dropout(ones(), rate);\n const createMask = () => K.inTrainPhase(droppedInputs, ones, training);\n // just in case count is provided with null or undefined\n if (!count || count <= 1) {\n return tfc.keep(createMask().clone());\n }\n const masks = Array(count).fill(undefined).map(createMask);\n return masks.map(m => tfc.keep(m.clone()));\n}\n//# sourceMappingURL=recurrent.js.map", "/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { AttributeError, NotImplementedError, ValueError } from '../errors';\nimport { Initializer } from '../initializers';\nimport { convOutputLength, normalizeArray } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\nimport { generateDropoutMask, LSTMCell, RNN, RNNCell } from './recurrent';\nclass ConvRNN2DCell extends RNNCell {\n}\n/**\n * Base class for convolutional-recurrent layers.\n */\nclass ConvRNN2D extends RNN {\n constructor(args) {\n if (args.unroll) {\n throw new NotImplementedError('Unrolling is not possible with convolutional RNNs.');\n }\n if (Array.isArray(args.cell)) {\n throw new NotImplementedError('It is not possible at the moment to stack convolutional cells.');\n }\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n if (kwargs && kwargs['constants']) {\n throw new ValueError('ConvRNN2D cell does not support constants');\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState = kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, { mask, training, initialState });\n });\n }\n computeOutputShape(inputShape) {\n let outShape = this.computeSingleOutputShape(inputShape);\n if (!this.returnSequences) {\n outShape = [outShape[0], ...outShape.slice(2)];\n }\n if (this.returnState) {\n outShape =\n [outShape, ...Array(2).fill([inputShape[0], ...outShape.slice(-3)])];\n }\n return outShape;\n }\n getInitialState(inputs) {\n return tfc.tidy(() => {\n const { stateSize } = this.cell;\n const inputShape = inputs.shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const initialState = tfc.zeros(stateShape);\n if (Array.isArray(stateSize)) {\n return Array(stateSize.length).fill(initialState);\n }\n return [initialState];\n });\n }\n resetStates(states, training = false) {\n tfc.tidy(() => {\n if (!this.stateful) {\n throw new AttributeError('Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const inputShape = this.inputSpec[0].shape;\n const outputShape = this.computeSingleOutputShape(inputShape);\n const stateShape = [outputShape[0], ...outputShape.slice(2)];\n const batchSize = inputShape[0];\n if (batchSize == null) {\n throw new ValueError('If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.getStates() == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_ = [tfc.zeros(stateShape)];\n }\n }\n else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states_);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n if (Array.isArray(this.cell.stateSize)) {\n this.states_ = this.cell.stateSize.map(() => tfc.zeros(stateShape));\n }\n else {\n this.states_[0] = tfc.zeros(stateShape);\n }\n }\n else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states_.length) {\n throw new ValueError(`Layer ${this.name} expects ${this.states_.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n if (training) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states_.slice());\n }\n else {\n tfc.dispose(this.states_);\n }\n for (let index = 0; index < this.states_.length; ++index) {\n const value = states[index];\n const expectedShape = stateShape;\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(`State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${value.shape}`);\n }\n this.states_[index] = value;\n }\n }\n this.states_ = this.states_.map(state => tfc.keep(state.clone()));\n });\n }\n computeSingleOutputShape(inputShape) {\n const { dataFormat, filters, kernelSize, padding, strides, dilationRate } = this.cell;\n const isChannelsFirst = dataFormat === 'channelsFirst';\n const h = inputShape[isChannelsFirst ? 3 : 2];\n const w = inputShape[isChannelsFirst ? 4 : 3];\n const hOut = convOutputLength(h, kernelSize[0], padding, strides[0], dilationRate[0]);\n const wOut = convOutputLength(w, kernelSize[1], padding, strides[1], dilationRate[1]);\n const outShape = [\n ...inputShape.slice(0, 2),\n ...(isChannelsFirst ? [filters, hOut, wOut] : [hOut, wOut, filters])\n ];\n return outShape;\n }\n}\n/** @nocollapse */\nConvRNN2D.className = 'ConvRNN2D';\nexport class ConvLSTM2DCell extends LSTMCell {\n constructor(args) {\n const { filters, kernelSize, strides, padding, dataFormat, dilationRate, } = args;\n super(Object.assign({}, args, { units: filters }));\n this.filters = filters;\n assertPositiveInteger(this.filters, 'filters');\n this.kernelSize = normalizeArray(kernelSize, 2, 'kernelSize');\n this.kernelSize.forEach(size => assertPositiveInteger(size, 'kernelSize'));\n this.strides = normalizeArray(strides || 1, 2, 'strides');\n this.strides.forEach(stride => assertPositiveInteger(stride, 'strides'));\n this.padding = padding || 'valid';\n checkPaddingMode(this.padding);\n this.dataFormat = dataFormat || 'channelsLast';\n checkDataFormat(this.dataFormat);\n this.dilationRate = normalizeArray(dilationRate || 1, 2, 'dilationRate');\n this.dilationRate.forEach(rate => assertPositiveInteger(rate, 'dilationRate'));\n }\n build(inputShape) {\n var _a;\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(`The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n const numOfKernels = 4;\n const kernelShape = this.kernelSize.concat([inputDim, this.filters * numOfKernels]);\n this.kernel = this.addWeight('kernel', kernelShape, null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n const recurrentKernelShape = this.kernelSize.concat([this.filters, this.filters * numOfKernels]);\n this.recurrentKernel = this.addWeight('recurrent_kernel', recurrentKernelShape, null, this.recurrentInitializer, this.recurrentRegularizer, true, this.recurrentConstraint);\n if (this.useBias) {\n let biasInitializer;\n if (this.unitForgetBias) {\n const init = this.biasInitializer;\n const filters = this.filters;\n biasInitializer = new (_a = class CustomInit extends Initializer {\n apply(shape, dtype) {\n const biasI = init.apply([filters]);\n const biasF = tfc.ones([filters]);\n const biasCAndO = init.apply([filters * 2]);\n return K.concatenate([biasI, biasF, biasCAndO]);\n }\n },\n /** @nocollapse */\n _a.className = 'CustomInit',\n _a)();\n }\n else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight('bias', [this.filters * numOfKernels], null, biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n return tfc.tidy(() => {\n if (inputs.length !== 3) {\n throw new ValueError(`ConvLSTM2DCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n const training = kwargs['training'] || false;\n const x = inputs[0]; // Current input\n const hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n const numOfKernels = 4;\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(x),\n rate: this.dropout,\n training,\n count: numOfKernels\n });\n }\n const dropoutMask = this.dropoutMask;\n const applyDropout = (x, mask, index) => {\n if (!mask || !mask[index]) {\n return x;\n }\n return tfc.mul(mask[index], x);\n };\n let xI = applyDropout(x, dropoutMask, 0);\n let xF = applyDropout(x, dropoutMask, 1);\n let xC = applyDropout(x, dropoutMask, 2);\n let xO = applyDropout(x, dropoutMask, 3);\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask = generateDropoutMask({\n ones: () => tfc.onesLike(hTMinus1),\n rate: this.recurrentDropout,\n training,\n count: numOfKernels\n });\n }\n const recDropoutMask = this.recurrentDropoutMask;\n let hI = applyDropout(hTMinus1, recDropoutMask, 0);\n let hF = applyDropout(hTMinus1, recDropoutMask, 1);\n let hC = applyDropout(hTMinus1, recDropoutMask, 2);\n let hO = applyDropout(hTMinus1, recDropoutMask, 3);\n const kernelChannelAxis = 3;\n const [kernelI, kernelF, kernelC, kernelO] = tfc.split(this.kernel.read(), numOfKernels, kernelChannelAxis);\n const [biasI, biasF, biasC, biasO] = this.useBias ?\n tfc.split(this.bias.read(), numOfKernels) :\n [null, null, null, null];\n xI = this.inputConv(xI, kernelI, biasI, this.padding);\n xF = this.inputConv(xF, kernelF, biasF, this.padding);\n xC = this.inputConv(xC, kernelC, biasC, this.padding);\n xO = this.inputConv(xO, kernelO, biasO, this.padding);\n const [recKernelI, recKernelF, recKernelC, recKernelO] = tfc.split(this.recurrentKernel.read(), numOfKernels, kernelChannelAxis);\n hI = this.recurrentConv(hI, recKernelI);\n hF = this.recurrentConv(hF, recKernelF);\n hC = this.recurrentConv(hC, recKernelC);\n hO = this.recurrentConv(hO, recKernelO);\n const i = this.recurrentActivation.apply(tfc.add(xI, hI));\n const f = this.recurrentActivation.apply(tfc.add(xF, hF));\n const c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(tfc.add(xC, hC))));\n const h = tfc.mul(this.recurrentActivation.apply(tfc.add(xO, hO)), this.activation.apply(c));\n return [h, h, c];\n });\n }\n getConfig() {\n const _a = super.getConfig(), { 'units': _ } = _a, baseConfig = __rest(_a, ['units']);\n const config = {\n filters: this.filters,\n kernelSize: this.kernelSize,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n strides: this.strides,\n };\n return Object.assign({}, baseConfig, config);\n }\n inputConv(x, w, b, padding) {\n const out = tfc.conv2d(x, w, this.strides, (padding || 'valid'), this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC', this.dilationRate);\n if (b) {\n return K.biasAdd(out, b, this.dataFormat);\n }\n return out;\n }\n recurrentConv(x, w) {\n const strides = 1;\n return tfc.conv2d(x, w, strides, 'same', this.dataFormat === 'channelsFirst' ? 'NCHW' : 'NHWC');\n }\n}\n/** @nocollapse */\nConvLSTM2DCell.className = 'ConvLSTM2DCell';\ntfc.serialization.registerClass(ConvLSTM2DCell);\nexport class ConvLSTM2D extends ConvRNN2D {\n constructor(args) {\n const cell = new ConvLSTM2DCell(args);\n super(Object.assign({}, args, { cell }));\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n return new cls(config);\n }\n}\n/** @nocollapse */\nConvLSTM2D.className = 'ConvLSTM2D';\ntfc.serialization.registerClass(ConvLSTM2D);\n//# sourceMappingURL=convolutional_recurrent.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Basic Layers.\n */\nimport { any, notEqual, serialization, tidy, transpose, util } from '@tensorflow/tfjs-core';\nimport { getActivation, serializeActivation } from '../activations';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport { assertPositiveInteger, mapActivationToFusedKernel } from '../utils/generic_utils';\nimport { arrayProd, range } from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Dropout extends Layer {\n constructor(args) {\n super(args);\n this.rate = Math.max(Math.min(args.rate, 1), 0);\n // So that the scalar doesn't get tidied up between executions.\n this.noiseShape = args.noiseShape;\n this.seed = args.seed;\n this.supportsMasking = true;\n }\n getNoiseShape(input) {\n if (this.noiseShape == null) {\n return this.noiseShape;\n }\n const inputShape = input.shape;\n const noiseShape = [];\n for (let i = 0; i < this.noiseShape.length; ++i) {\n noiseShape.push(this.noiseShape[i] == null ? inputShape[i] : this.noiseShape[i]);\n }\n return noiseShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (0 < this.rate && this.rate < 1) {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const noiseShape = this.getNoiseShape(input);\n const output = K.inTrainPhase(() => K.dropout(input, this.rate, noiseShape, this.seed), () => input, training);\n return output;\n }\n return inputs;\n });\n }\n getConfig() {\n const config = {\n rate: this.rate,\n noiseShape: this.noiseShape,\n seed: this.seed,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n dispose() {\n return super.dispose();\n }\n}\n/** @nocollapse */\nDropout.className = 'Dropout';\nserialization.registerClass(Dropout);\nexport class SpatialDropout1D extends Dropout {\n constructor(args) {\n super(args);\n this.inputSpec = [{ ndim: 3 }];\n }\n getNoiseShape(input) {\n const inputShape = input.shape;\n return [inputShape[0], 1, inputShape[2]];\n }\n}\n/** @nocollapse */\nSpatialDropout1D.className = 'SpatialDropout1D';\nserialization.registerClass(SpatialDropout1D);\nexport class Dense extends Layer {\n constructor(args) {\n super(args);\n // Default activation: Linear (none).\n this.activation = null;\n this.useBias = true;\n this.kernel = null;\n this.bias = null;\n this.DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n this.DEFAULT_BIAS_INITIALIZER = 'zeros';\n if (args.batchInputShape == null && args.inputShape == null &&\n args.inputDim != null) {\n // This logic is copied from Layer's constructor, since we can't\n // do exactly what the Python constructor does for Dense().\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n this.batchInputShape = [batchSize, args.inputDim];\n }\n this.units = args.units;\n assertPositiveInteger(this.units, 'units');\n this.activation = getActivation(args.activation);\n if (args.useBias != null) {\n this.useBias = args.useBias;\n }\n this.kernelInitializer = getInitializer(args.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.biasInitializer =\n getInitializer(args.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelConstraint = getConstraint(args.kernelConstraint);\n this.biasConstraint = getConstraint(args.biasConstraint);\n this.kernelRegularizer = getRegularizer(args.kernelRegularizer);\n this.biasRegularizer = getRegularizer(args.biasRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.supportsMasking = true;\n this.inputSpec = [{ minNDim: 2 }];\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const inputLastDim = inputShape[inputShape.length - 1];\n if (this.kernel == null) {\n this.kernel = this.addWeight('kernel', [inputLastDim, this.units], null, this.kernelInitializer, this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight('bias', [this.units], null, this.biasInitializer, this.biasRegularizer, true, this.biasConstraint);\n }\n }\n this.inputSpec = [{ minNDim: 2, axes: { [-1]: inputLastDim } }];\n this.built = true;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n outputShape[outputShape.length - 1] = this.units;\n return outputShape;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Dense layer accepts only a single input.\n const input = getExactlyOneTensor(inputs);\n const fusedActivationName = mapActivationToFusedKernel(this.activation.getClassName());\n let output;\n if (fusedActivationName != null) {\n output = K.dot(input, this.kernel.read(), fusedActivationName, this.bias ? this.bias.read() : null);\n }\n else {\n output = K.dot(input, this.kernel.read());\n if (this.bias != null) {\n output = K.biasAdd(output, this.bias.read());\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n }\n return output;\n });\n }\n getConfig() {\n const config = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDense.className = 'Dense';\nserialization.registerClass(Dense);\nexport class Flatten extends Layer {\n constructor(args) {\n args = args || {};\n super(args);\n this.inputSpec = [{ minNDim: 3 }];\n this.dataFormat = args.dataFormat;\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n for (const dim of inputShape.slice(1)) {\n if (dim == null) {\n throw new ValueError(`The shape of the input to \"Flatten\" is not fully defined ` +\n `(got ${inputShape.slice(1)}). Make sure to pass a complete ` +\n `\"input_shape\" or \"batch_input_shape\" argument to the first ` +\n `layer in your model.`);\n }\n }\n return [inputShape[0], arrayProd(inputShape, 1)];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n let input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsFirst' && input.rank > 1) {\n const permutation = [0];\n for (let i = 2; i < input.rank; ++i) {\n permutation.push(i);\n }\n permutation.push(1);\n input = input.transpose(permutation);\n }\n return K.batchFlatten(input);\n });\n }\n getConfig() {\n const config = {};\n if (this.dataFormat != null) {\n config['dataFormat'] = this.dataFormat;\n }\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nFlatten.className = 'Flatten';\nserialization.registerClass(Flatten);\nexport class Activation extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.activation = getActivation(args.activation);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n return this.activation.apply(input);\n });\n }\n getConfig() {\n const config = { activation: serializeActivation(this.activation) };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nActivation.className = 'Activation';\nserialization.registerClass(Activation);\nexport class RepeatVector extends Layer {\n constructor(args) {\n super(args);\n this.n = args.n;\n this.inputSpec = [{ ndim: 2 }];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], this.n, inputShape[1]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n return K.repeat(inputs, this.n);\n });\n }\n getConfig() {\n const config = {\n n: this.n,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nRepeatVector.className = 'RepeatVector';\nserialization.registerClass(RepeatVector);\nexport class Reshape extends Layer {\n constructor(args) {\n super(args);\n this.targetShape = args.targetShape;\n // Make sure that all unknown dimensions are represented as `null`.\n for (let i = 0; i < this.targetShape.length; ++i) {\n if (this.isUnknown(this.targetShape[i])) {\n this.targetShape[i] = null;\n }\n }\n }\n isUnknown(dim) {\n return dim < 0 || dim == null;\n }\n /**\n * Finds and replaces a missing dimension in output shape.\n *\n * This is a near direct port of the internal Numpy function\n * `_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`.\n *\n * @param inputShape: Original shape of array begin reshape.\n * @param outputShape: Target shape of the array, with at most a single\n * `null` or negative number, which indicates an underdetermined dimension\n * that should be derived from `inputShape` and the known dimensions of\n * `outputShape`.\n * @returns: The output shape with `null` replaced with its computed value.\n * @throws: ValueError: If `inputShape` and `outputShape` do not match.\n */\n fixUnknownDimension(inputShape, outputShape) {\n const errorMsg = 'Total size of new array must be unchanged.';\n const finalShape = outputShape.slice();\n let known = 1;\n let unknown = null;\n for (let i = 0; i < finalShape.length; ++i) {\n const dim = finalShape[i];\n if (this.isUnknown(dim)) {\n if (unknown === null) {\n unknown = i;\n }\n else {\n throw new ValueError('Can only specifiy one unknown dimension.');\n }\n }\n else {\n known *= dim;\n }\n }\n const originalSize = arrayProd(inputShape);\n if (unknown !== null) {\n if (known === 0 || originalSize % known !== 0) {\n throw new ValueError(errorMsg);\n }\n finalShape[unknown] = originalSize / known;\n }\n else if (originalSize !== known) {\n throw new ValueError(errorMsg);\n }\n return finalShape;\n }\n computeOutputShape(inputShape) {\n let anyUnknownDims = false;\n for (let i = 0; i < inputShape.length; ++i) {\n if (this.isUnknown(inputShape[i])) {\n anyUnknownDims = true;\n break;\n }\n }\n if (anyUnknownDims) {\n return inputShape.slice(0, 1).concat(this.targetShape);\n }\n else {\n return inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const outputShape = inputShape.slice(0, 1).concat(this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n return input.reshape(outputShape);\n });\n }\n getConfig() {\n const config = {\n targetShape: this.targetShape,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nReshape.className = 'Reshape';\nserialization.registerClass(Reshape);\nexport class Permute extends Layer {\n constructor(args) {\n super(args);\n if (args.dims == null) {\n throw new Error('Required configuration field `dims` is missing during Permute ' +\n 'constructor call.');\n }\n if (!Array.isArray(args.dims)) {\n throw new Error('Permute constructor requires `dims` to be an Array, but received ' +\n `${args.dims} instead.`);\n }\n // Check the validity of the permutation indices.\n const expectedSortedIndices = range(1, args.dims.length + 1);\n if (!util.arraysEqual(args.dims.slice().sort(), expectedSortedIndices)) {\n throw new Error('Invalid permutation `dims`: ' + JSON.stringify(args.dims) +\n ' `dims` must contain consecutive integers starting from 1.');\n }\n this.dims = args.dims;\n this.dimsIncludingBatch = [0].concat(this.dims);\n this.inputSpec = [new InputSpec({ ndim: this.dims.length + 1 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n this.dims.forEach((dim, i) => {\n outputShape[i + 1] = inputShape[dim];\n });\n return outputShape;\n }\n call(inputs, kwargs) {\n return transpose(getExactlyOneTensor(inputs), this.dimsIncludingBatch);\n }\n getConfig() {\n const config = {\n dims: this.dims,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nPermute.className = 'Permute';\nserialization.registerClass(Permute);\nexport class Masking extends Layer {\n constructor(args) {\n super(args == null ? {} : args);\n this.supportsMasking = true;\n if (args != null) {\n this.maskValue = args.maskValue == null ? 0 : args.maskValue;\n }\n else {\n this.maskValue = 0;\n }\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { maskValue: this.maskValue };\n Object.assign(config, baseConfig);\n return config;\n }\n computeMask(inputs, mask) {\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n return any(notEqual(input, this.maskValue), axis);\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const axis = -1;\n const keepDims = true;\n const booleanMask = any(notEqual(input, this.maskValue), axis, keepDims);\n const output = input.mul(booleanMask.asType(input.dtype));\n return output;\n });\n }\n}\n/** @nocollapse */\nMasking.className = 'Masking';\nserialization.registerClass(Masking);\n//# sourceMappingURL=core.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Embedding Layer.\n *\n * Original source: keras/constraints.py\n */\nimport { notEqual, serialization, tidy, zerosLike } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nexport class Embedding extends Layer {\n constructor(args) {\n super(args);\n this.embeddings = null;\n this.DEFAULT_EMBEDDINGS_INITIALIZER = 'randomUniform';\n if (args.batchInputShape == null && args.inputShape == null) {\n // Porting Note: This logic is copied from Layer's constructor, since we\n // can't do exactly what the Python constructor does for Embedding().\n // Specifically, the super constructor can not be called after the\n // mutation of the `config` argument.\n let batchSize = null;\n if (args.batchSize != null) {\n batchSize = args.batchSize;\n }\n if (args.inputLength == null) {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (None, )\n this.batchInputShape = [batchSize, null];\n }\n else {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (config.inputLength, )\n this.batchInputShape =\n [batchSize].concat(generic_utils.toList(args.inputLength));\n }\n }\n this.inputDim = args.inputDim;\n generic_utils.assertPositiveInteger(this.inputDim, 'inputDim');\n this.outputDim = args.outputDim;\n generic_utils.assertPositiveInteger(this.outputDim, 'outputDim');\n this.embeddingsInitializer = getInitializer(args.embeddingsInitializer || this.DEFAULT_EMBEDDINGS_INITIALIZER);\n this.embeddingsRegularizer = getRegularizer(args.embeddingsRegularizer);\n this.activityRegularizer = getRegularizer(args.activityRegularizer);\n this.embeddingsConstraint = getConstraint(args.embeddingsConstraint);\n this.maskZero = args.maskZero;\n this.supportsMasking = args.maskZero;\n this.inputLength = args.inputLength;\n }\n build(inputShape) {\n this.embeddings = this.addWeight('embeddings', [this.inputDim, this.outputDim], this.dtype, this.embeddingsInitializer, this.embeddingsRegularizer, true, this.embeddingsConstraint);\n this.built = true;\n }\n // Override warnOnIncompatibleInputShape because an embedding layer allows\n // the input to have varying ranks.\n warnOnIncompatibleInputShape(inputShape) { }\n computeMask(inputs, mask) {\n return tidy(() => {\n if (!this.maskZero) {\n return null;\n }\n else {\n inputs = getExactlyOneTensor(inputs);\n return notEqual(inputs, zerosLike(inputs));\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (this.inputLength == null) {\n return [...inputShape, this.outputDim];\n }\n // inputLength can be an array if input is 3D or higher.\n const inLens = generic_utils.toList(this.inputLength);\n if (inLens.length !== inputShape.length - 1) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else {\n let i = 0;\n for (let k = 0; k < inLens.length; ++k) {\n const s1 = inLens[k];\n const s2 = inputShape[k + 1];\n if ((s1 != null) && (s2 != null) && (s1 !== s2)) {\n throw new ValueError(`\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n }\n else if (s1 == null) {\n inLens[i] = s2;\n }\n i++;\n }\n }\n return [inputShape[0], ...inLens, this.outputDim];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Embedding layer accepts only a single input.\n let input = getExactlyOneTensor(inputs);\n if (input.dtype !== 'int32') {\n input = K.cast(input, 'int32');\n }\n const output = K.gather(this.embeddings.read(), input.as1D());\n return output.reshape(getExactlyOneShape(this.computeOutputShape(input.shape)));\n });\n }\n getConfig() {\n const config = {\n inputDim: this.inputDim,\n outputDim: this.outputDim,\n embeddingsInitializer: serializeInitializer(this.embeddingsInitializer),\n embeddingsRegularizer: serializeRegularizer(this.embeddingsRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n embeddingsConstraint: serializeConstraint(this.embeddingsConstraint),\n maskZero: this.maskZero,\n inputLength: this.inputLength\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nEmbedding.className = 'Embedding';\nserialization.registerClass(Embedding);\n//# sourceMappingURL=embeddings.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Merge Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { l2Normalize } from '../losses';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as mathUtils from '../utils/math_utils';\nimport { getExactlyOneShape } from '../utils/types_utils';\n/**\n * Generic Merge layer for element-wise merge functions.\n *\n * Used to implement `Sum`, `Average`, `Concatenate`, etc.\n */\nexport class Merge extends Layer {\n constructor(args) {\n super(args || {});\n this.supportsMasking = true;\n }\n /**\n * Logic for merging multiple tensors, to be overridden by subclasses.\n * @param inputs\n */\n mergeFunction(inputs) {\n throw new NotImplementedError();\n }\n /**\n * Computes the shape of the result of an elementwise operation.\n *\n * @param shape1: Shape of the first tensor.\n * @param shape2: Shape of the second tensor.\n * @returns Expected output shape when an elementwise operation is carried\n * out on 2 tensors with shapes `shape1` and `shape2`.\n * @throws ValueError: If `shape1` and `shape2` are not compatible for\n * element-wise operations.\n */\n computeElementwiseOpOutputShape(shape1, shape2) {\n if (shape1 == null || shape2 == null) {\n return null;\n }\n else if (shape1.length < shape2.length) {\n return this.computeElementwiseOpOutputShape(shape2, shape1);\n }\n else if (shape2.length === 0) {\n return shape1;\n }\n const outputShape = shape1.slice(0, shape1.length - shape2.length);\n for (let k = 0; k < shape2.length; ++k) {\n const i = shape1[shape1.length - shape2.length + k];\n const j = shape2[k];\n if (i == null || j == null || i < 0 || j < 0) {\n outputShape.push(null);\n }\n else if (i === 1) {\n outputShape.push(j);\n }\n else if (j === 1) {\n outputShape.push(i);\n }\n else {\n if (i !== j) {\n throw new ValueError('Operands could not be broadcast together with shapes ' +\n JSON.stringify(shape1) + ' ' + JSON.stringify(shape2));\n }\n outputShape.push(i);\n }\n }\n return outputShape;\n }\n build(inputShape) {\n // Used purely for shape validation.\n if (Array.isArray(inputShape) && !Array.isArray(inputShape[0])) {\n // Make sure that inputShape is an Array of shape.\n inputShape = [getExactlyOneShape(inputShape)];\n }\n inputShape = inputShape;\n if (inputShape.length < 2) {\n throw new ValueError('A merge layer should be called on an Array of at least 2 inputs.' +\n ` Got ${inputShape.length} input(s).`);\n }\n // Make sure that there is at most one unique batch size among the input\n // shapes.\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length > 1) {\n throw new ValueError(`Can not merge tensors with different batch sizes. ` +\n `Got tensors with shapes: ${JSON.stringify(inputShape)}.`);\n }\n let outputShape = inputShape[0] == null ? null : inputShape[0].slice(1);\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n // If the inputs have different ranks, we have to reshape them to make them\n // broadcastable.\n const allRanks = inputShape.map(shape => shape.length);\n if (inputShape.indexOf(null) === -1 &&\n generic_utils.unique(allRanks).length === 1) {\n this.reshapeRequired = false;\n }\n else {\n this.reshapeRequired = true;\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n inputs = inputs;\n if (this.reshapeRequired) {\n const reshapedInputs = [];\n const inputDims = inputs.map(input => input.rank);\n if (inputDims.indexOf(null) === -1) {\n // If ranks of all inputs are available, we simply expand each of them\n // at axis=1 until all of them have the same rank.\n const maxNDim = mathUtils.max(inputDims);\n for (let x of inputs) {\n const xNDim = x.rank;\n for (let k = 0; k < maxNDim - xNDim; ++k) {\n x = K.expandDims(x, 1);\n }\n reshapedInputs.push(x);\n }\n return this.mergeFunction(reshapedInputs);\n }\n else {\n // Transpose all inputs so that batch size is the last dimension.\n // [batchSize, dim1, dim2, ...] -> [dim1, dim2, ..., batchSize]\n let transposed = false;\n for (const x of inputs) {\n const xNDim = x.rank;\n if (xNDim == null) {\n const xShape = x.shape;\n const batchSize = xShape[0];\n const newShape = xShape.slice(1).concat([batchSize]);\n let xTransposed = x.reshape([batchSize].concat(mathUtils.arrayProd(xShape.slice(1))));\n xTransposed = tfc.transpose(xTransposed, [1, 0]);\n xTransposed = xTransposed.reshape(newShape);\n reshapedInputs.push(xTransposed);\n transposed = true;\n }\n else if (xNDim > 1) {\n const dims = mathUtils.range(1, xNDim).concat([0]);\n reshapedInputs.push(tfc.transpose(x, dims));\n transposed = true;\n }\n else {\n // We don't transpose inputs if they are 1D vectors or scalars.\n reshapedInputs.push(x);\n }\n }\n let y = this.mergeFunction(reshapedInputs);\n const yNDim = y.rank;\n if (transposed) {\n // If inputs have been transposed, we have to transpose the output\n // too.\n if (yNDim == null) {\n const yShape = y.shape;\n const yNDim = yShape.length;\n const batchSize = yShape[yNDim - 1];\n const newShape = [batchSize].concat(yShape.slice(0, yShape.length - 1));\n y = tfc.transpose(y.reshape([-1, batchSize]), [1, 0])\n .reshape(newShape);\n }\n else if (yNDim > 1) {\n const dims = [yNDim - 1].concat(mathUtils.range(0, yNDim - 1));\n y = tfc.transpose(y, dims);\n }\n }\n return y;\n }\n }\n else {\n return this.mergeFunction(inputs);\n }\n });\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n let outputShape;\n if (inputShape[0] == null) {\n outputShape = null;\n }\n else {\n outputShape = inputShape[0].slice(1);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n let batchSizes = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length === 1) {\n outputShape = batchSizes.concat(outputShape);\n }\n else {\n outputShape = [null].concat(outputShape);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return tfc.tidy(() => {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an Array');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an Array');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`The Array 'inputs' and 'mask' are expected to have the same ` +\n `length, but have different lengths ` +\n `(${inputs.length} vs ${mask.length})`);\n }\n if (mask.every(m => m == null)) {\n return null;\n }\n mask = mask.map(m => m == null ? m : tfc.expandDims(m, 0));\n let output = mask[0];\n for (let i = 1; i < mask.length - 1; ++i) {\n output = tfc.logicalAnd(output, mask[i]);\n }\n return output;\n });\n }\n}\nexport class Add extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nAdd.className = 'Add';\nserialization.registerClass(Add);\n/**\n * Calculate the element-wise sum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Add` layer, by using no input argument\n * or a single configuration argument. The resultant `Add` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const addLayer = tf.layers.add();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = addLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.add([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.add([input1, input2]).print();\n * // Gives [[11, 22], [33, 44]].\n *\n */\nexport function add(config) {\n if (Array.isArray(config)) {\n const layer = new Add({});\n return layer.apply(config);\n }\n else {\n return new Add(config);\n }\n}\nexport class Multiply extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.mul(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMultiply.className = 'Multiply';\nserialization.registerClass(Multiply);\n/**\n * Calculate the element-wise product of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Multiply` layer, by using no input argument\n * or a single configuration argument. The resultant `Multiply` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const multiplyLayer = tf.layers.multiply();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = multiplyLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.multiply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.multiply([input1, input2]).print();\n * // Gives [[10, 40], [90, 160]].\n *\n */\nexport function multiply(config) {\n if (Array.isArray(config)) {\n const layer = new Multiply({});\n return layer.apply(config);\n }\n else {\n return new Multiply(config);\n }\n}\nexport class Average extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return tfc.mul(1 / inputs.length, output);\n });\n }\n}\n/** @nocollapse */\nAverage.className = 'Average';\nserialization.registerClass(Average);\n/**\n * Calculate the element-wise arithmetic mean of inputs, which all have the same\n * shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Average` layer, by using no input argument\n * or a single configuration argument. The resultant `Average` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const averageLayer = tf.layers.average();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = averageLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.average([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.average([input1, input2]).print();\n * // Gives [[5.5, 11], [16.5, 22]].\n *\n */\nexport function average(config) {\n if (Array.isArray(config)) {\n const layer = new Average({});\n return layer.apply(config);\n }\n else {\n return new Average(config);\n }\n}\nexport class Maximum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.maximum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMaximum.className = 'Maximum';\nserialization.registerClass(Maximum);\n/**\n * Calculate the element-wise maximum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Maximum` layer, by using no input argument\n * or a single configuration argument. The resultant `Maximum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const maximumLayer = tf.layers.maximum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = maximumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.maximum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.maximum([input1, input2]).print();\n * // Gives [[10, 20], [30, 40]].\n *\n */\nexport function maximum(config) {\n if (Array.isArray(config)) {\n const layer = new Maximum({});\n return layer.apply(config);\n }\n else {\n return new Maximum(config);\n }\n}\nexport class Minimum extends Merge {\n constructor(args) {\n super(args);\n }\n mergeFunction(inputs) {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.minimum(output, inputs[i]);\n }\n return output;\n });\n }\n}\n/** @nocollapse */\nMinimum.className = 'Minimum';\nserialization.registerClass(Minimum);\n/**\n * Calculate the element-wise minimum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Minimum` layer, by using no input argument\n * or a single configuration argument. The resultant `Minimum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const minimumLayer = tf.layers.minimum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = minimumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.minimum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.minimum([input1, input2]).print();\n * // Gives [[1, 2], [3, 4]].\n *\n */\nexport function minimum(config) {\n if (Array.isArray(config)) {\n const layer = new Minimum({});\n return layer.apply(config);\n }\n else {\n return new Minimum(config);\n }\n}\nexport class Concatenate extends Merge {\n constructor(args) {\n super(args);\n this.DEFAULT_AXIS = -1;\n if (args == null) {\n args = {};\n }\n this.axis = args.axis == null ? this.DEFAULT_AXIS : args.axis;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n // Used purely for shape validation.]\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0])) ||\n inputShape.length === 1) {\n throw new ValueError('A `Concatenate` layer should be called on a list of at least 2 ' +\n 'inputs');\n }\n inputShape = inputShape;\n let allNoneShape = true;\n for (const shape of inputShape) {\n if (shape != null) {\n allNoneShape = false;\n break;\n }\n }\n if (allNoneShape) {\n return;\n }\n const shapeSet = [];\n for (let i = 0; i < inputShape.length; ++i) {\n const shapeWithoutConcatAxis = inputShape[i].slice();\n shapeWithoutConcatAxis.splice(this.axis, 1);\n let exists = false;\n for (const shape of shapeSet) {\n if (util.arraysEqual(shape, shapeWithoutConcatAxis)) {\n exists = true;\n break;\n }\n }\n if (!exists) {\n shapeSet.push(shapeWithoutConcatAxis);\n }\n }\n if (shapeSet.length > 1) {\n throw new ValueError('A `Concatenate` layer requires inputs with matching shapes ' +\n 'except for the concat axis. Got input shapes: ' +\n JSON.stringify(inputShape));\n }\n }\n mergeFunction(inputs) {\n return tidy(() => {\n return K.concatenate(inputs, this.axis);\n });\n }\n computeOutputShape(inputShape) {\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0]))) {\n throw new ValueError('A `Concatenate` layer should be called on a list of inputs.');\n }\n const inputShapes = inputShape;\n const outputShape = inputShapes[0].slice();\n const axis = this.axis < 0 ? outputShape.length + this.axis : this.axis;\n // Porting Note: the line above is because TypeScript doesn't support\n // negative indices.\n for (const shape of inputShapes.slice(1)) {\n if (outputShape[axis] == null || shape[axis] == null) {\n outputShape[axis] = null;\n break;\n }\n outputShape[axis] += shape[axis];\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n if (mask == null) {\n return null;\n }\n if (!Array.isArray(mask)) {\n throw new ValueError('`mask` should be an array for Concatenate');\n }\n if (!Array.isArray(inputs)) {\n throw new ValueError('`inputs` should be an array for Concatenate');\n }\n if (mask.length !== inputs.length) {\n throw new ValueError(`Mismatch in the length of mask (${mask.length}) ` +\n `and the legnth of inputs (${inputs.length})`);\n }\n return tfc.tidy(() => {\n let allNullMasks = true;\n mask.forEach(m => {\n if (m != null) {\n allNullMasks = false;\n return;\n }\n });\n if (allNullMasks) {\n return null;\n }\n const outputMasks = [];\n for (let i = 0; i < inputs.length; ++i) {\n if (mask[i] == null) {\n // Input is unmasked. Append all 1's to masks.\n outputMasks.push(tfc.onesLike(inputs[i]).asType('bool'));\n }\n else if (mask[i].rank < inputs[i].rank) {\n // Mask is smaller than the input, expand it.\n outputMasks.push(tfc.expandDims(mask[i], -1));\n }\n else {\n outputMasks.push(mask[i]);\n }\n }\n const concatenatedMasks = tfc.concat(outputMasks, this.axis);\n return tfc.all(concatenatedMasks, -1, false);\n });\n }\n getConfig() {\n const config = {\n 'axis': this.axis,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nConcatenate.className = 'Concatenate';\nserialization.registerClass(Concatenate);\n/**\n * Concatenate an `Array` of inputs.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Concatenate` layer, by using no input argument\n * or a single configuration argument. The resultant `Concatenate` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const concatLayer = tf.layers.concatenate();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = concatLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 7], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = tf.layers.concatenate([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([[1, 2], [3, 4]], [2, 2]);\n * const input2 = tf.tensor2d([[10, 20], [30, 40]], [2, 2]);\n * tf.layers.concatenate([input1, input2]).print();\n * // Gives [[1, 2, 10, 20], [3, 4, 30, 40]].\n *\n */\nexport function concatenate(config) {\n if (Array.isArray(config)) {\n const layer = new Concatenate({});\n return layer.apply(config);\n }\n else {\n return new Concatenate(config);\n }\n}\n/**\n * Interpretable potentially negative axis index.\n *\n * For example, given axis = -1, and dim = 3, this function will return 2.\n *\n * @param axis The axis index, may be a positive, zero or negative integer.\n * @param dim Total number of dimensions, a positive integer.\n * @returns A non-negative axis index equivalent to the input `axis`.\n */\nfunction interpretAxis(axis, dim) {\n while (axis < 0) {\n axis += dim;\n }\n return axis;\n}\nfunction batchDot(x, y, axes) {\n if (x.shape.length > 3 || y.shape.length > 3) {\n throw new NotImplementedError('batchDot is not implemented for tensors of 4D or higher rank yet');\n }\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of x to be >= 2, ` +\n `but got ${x.shape.length}`);\n tfc.util.assert(x.shape.length >= 2, () => `batchDot requires the rank of y to be >= 2, ` +\n `but got ${y.shape.length}`);\n if (typeof axes === 'number') {\n axes = [axes, axes];\n }\n if (x.dtype === 'complex64' || y.dtype === 'complex64') {\n throw new NotImplementedError('batchDot is not implemented for complex64-type Tensors yet.');\n }\n const xNDim = x.shape.length;\n const yNDim = y.shape.length;\n if (axes == null) {\n // Behave like batchMatmul by default.\n axes = [xNDim - 1, yNDim - 2];\n }\n const axesArray = axes;\n return tfc.tidy(() => {\n let diff;\n if (xNDim > yNDim) {\n diff = xNDim - yNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n y = y.reshape(y.shape.concat(diffShape));\n }\n else if (yNDim > xNDim) {\n diff = yNDim - xNDim;\n const diffShape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n x = x.reshape(x.shape.concat(diffShape));\n }\n else {\n diff = 0;\n }\n let out;\n if (x.shape.length === 2 && y.shape.length === 2) {\n if (axesArray[0] === axesArray[1]) {\n out = x.mul(y).sum(axesArray[0]);\n }\n else {\n out = x.transpose([1, 0]).mul(y).sum(axesArray[1]);\n }\n }\n else {\n const adjX = axesArray[0] !== x.shape.length - 1;\n const adjY = axesArray[1] === y.shape.length - 1;\n out = x.matMul(y, adjX, adjY);\n }\n if (diff > 0) {\n let idx;\n if (xNDim > yNDim) {\n idx = xNDim + yNDim - 3;\n }\n else {\n idx = xNDim - 1;\n }\n const squeezeAxes = [];\n for (let i = idx; i < idx + diff; ++i) {\n squeezeAxes.push(i);\n }\n out = out.squeeze(squeezeAxes);\n }\n if (out.shape.length === 1) {\n out = out.expandDims(1);\n }\n return out;\n });\n}\nexport class Dot extends Merge {\n constructor(args) {\n super(args);\n this.axes = args.axes;\n this.normalize = args.normalize == null ? false : args.normalize;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n build(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0];\n const shape2 = inputShape[1];\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n if (shape1[axes[0]] !== shape2[axes[1]]) {\n throw new ValueError(`Dimension incompatibility: ` +\n `${shape1[axes[0]]} !== ${shape2[axes[1]]}`);\n }\n }\n mergeFunction(inputs) {\n if (inputs.length !== 2) {\n throw new ValueError('A `Dot` layer must be called on exactly 2 inputs, ' +\n `but received ${inputs.length} input(s).`);\n }\n let x1 = inputs[0];\n let x2 = inputs[1];\n let axes;\n if (!Array.isArray(this.axes)) {\n axes = [\n interpretAxis(this.axes, x1.shape.length),\n interpretAxis(this.axes, x2.shape.length)\n ];\n }\n else {\n axes = this.axes.map((axis, i) => interpretAxis(axis, inputs[i].shape.length));\n }\n if (this.normalize) {\n x1 = l2Normalize(x1, axes[0]);\n x2 = l2Normalize(x2, axes[1]);\n }\n return batchDot(x1, x2, axes);\n }\n interpretAxes(shape1, shape2) {\n let axes;\n if (!Array.isArray(this.axes)) {\n // `this.axes` is a single integer.\n axes = [\n interpretAxis(this.axes, shape1.length),\n interpretAxis(this.axes, shape2.length)\n ];\n }\n else {\n // `this.axes` is an Array of integers.\n axes = this.axes;\n }\n return axes;\n }\n computeOutputShape(inputShape) {\n tfc.util.assert(Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]), () => 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0].slice();\n const shape2 = inputShape[1].slice();\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError('Dot layer does not support tensors of 4D or higher rank yet.');\n }\n const axes = this.interpretAxes(shape1, shape2);\n shape1.splice(axes[0], 1);\n shape2.splice(axes[1], 1);\n shape2.splice(0, 1);\n const outputShape = shape1.concat(shape2);\n if (outputShape.length === 1) {\n outputShape.push(1);\n }\n return outputShape;\n }\n computeMask(inputs, mask) {\n return null;\n }\n getConfig() {\n const config = {\n 'axes': this.axes,\n 'normalize': this.normalize\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nDot.className = 'Dot';\nserialization.registerClass(Dot);\n// TODO(cais): Add functional interfaces for the merge layers.\n//# sourceMappingURL=merge.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Noise Layers.\n */\nimport { greaterEqual, randomUniform, serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { Layer } from '../engine/topology';\nimport { getExactlyOneTensor } from '../utils/types_utils';\nexport class GaussianNoise extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.stddev = args.stddev;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { stddev: this.stddev };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const noised = () => K.randomNormal(input.shape, 0, this.stddev).add(input);\n const output = K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n return output;\n });\n }\n}\n/** @nocollapse */\nGaussianNoise.className = 'GaussianNoise';\nserialization.registerClass(GaussianNoise);\nexport class GaussianDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (this.rate > 0 && this.rate < 1) {\n const noised = () => {\n const stddev = Math.sqrt(this.rate / (1 - this.rate));\n return input.mul(K.randomNormal(input.shape, 1, stddev));\n };\n return K.inTrainPhase(noised, () => input, kwargs['training'] || false);\n }\n return input;\n });\n }\n}\n/** @nocollapse */\nGaussianDropout.className = 'GaussianDropout';\nserialization.registerClass(GaussianDropout);\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n */\nexport class AlphaDropout extends Layer {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n this.rate = args.rate;\n this.noiseShape = args.noiseShape;\n }\n _getNoiseShape(inputs) {\n return this.noiseShape || getExactlyOneTensor(inputs).shape;\n }\n computeOutputShape(inputShape) {\n return inputShape;\n }\n getConfig() {\n const baseConfig = super.getConfig();\n const config = { rate: this.rate };\n Object.assign(config, baseConfig);\n return config;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n if (this.rate < 1 && this.rate > 0) {\n const noiseShape = this._getNoiseShape(inputs);\n const droppedInputs = () => {\n const input = getExactlyOneTensor(inputs);\n const alpha = 1.6732632423543772848170429916717;\n const scale = 1.0507009873554804934193349852946;\n const alphaP = -alpha * scale;\n let keptIdx = greaterEqual(randomUniform(noiseShape), this.rate);\n keptIdx = K.cast(keptIdx, 'float32'); // get default dtype.\n // Get affine transformation params.\n const a = ((1 - this.rate) * (1 + this.rate * alphaP ** 2)) ** -0.5;\n const b = -a * alphaP * this.rate;\n // Apply mask.\n const x = input.mul(keptIdx).add(keptIdx.add(-1).mul(alphaP));\n return x.mul(a).add(b);\n };\n return K.inTrainPhase(droppedInputs, () => getExactlyOneTensor(inputs), kwargs['training'] || false);\n }\n return inputs;\n });\n }\n}\n/** @nocollapse */\nAlphaDropout.className = 'AlphaDropout';\nserialization.registerClass(AlphaDropout);\n//# sourceMappingURL=noise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Normalization layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { moments, serialization, tidy, util } from '@tensorflow/tfjs-core';\nimport { getConstraint, serializeConstraint } from '../constraints';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { getInitializer, serializeInitializer } from '../initializers';\nimport { getRegularizer, serializeRegularizer } from '../regularizers';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Applies batch normalization on x given mean, var, beta and gamma.\n *\n * I.e. returns:\n * `output = (x - mean) / (sqrt(var) + epsilon) * gamma + beta`\n *\n * @param x Input tensor.\n * @param mean Mean of batch.\n * @param variance Variance of batch.\n * @param beta Tensor with which to center the input.\n * @param gamma Tensor by which to scale the input.\n * @param epsilon Fuzz factor.\n * @returns The result of the batch normalization.\n */\nexport function batchNormalization(x, mean, variance, beta, gamma, epsilon = 1e-3) {\n let out;\n if (x.rank === 2) {\n out = tfc.batchNorm2d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 3) {\n // TODO(cais): Check rank; give proper error message.\n out = tfc.batchNorm3d(x, mean, variance, beta, gamma, epsilon);\n }\n else if (x.rank === 4) {\n out = tfc.batchNorm4d(x, mean, variance, beta, gamma, epsilon);\n }\n else {\n throw new NotImplementedError(`batchNormalization is not implemented for array of rank ${x.rank} ` +\n `yet`);\n }\n return out;\n}\n/**\n * Non-broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const normed = batchNormalization(x, mean, variance, beta, gamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const targetShape = [];\n for (const axis of math_utils.range(0, x.rank)) {\n if (reductionAxes.indexOf(axis) !== -1) {\n targetShape.push(1);\n }\n else {\n targetShape.push(x.shape[axis]);\n }\n }\n const broadcastMean = mean.reshape(targetShape);\n const broadcastVariance = variance.reshape(targetShape);\n const broadcastGamma = gamma == null ? null : gamma.reshape(targetShape);\n const broadcastBeta = beta == null ? null : beta.reshape(targetShape);\n const normed = batchNormalization(x, broadcastMean, broadcastVariance, broadcastBeta, broadcastGamma, epsilon);\n return [normed, mean, variance];\n });\n}\n/**\n * Batch normalization for use in training (not inference).\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nexport function normalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon = 1e-3) {\n if (util.arraysEqual(reductionAxes.slice().sort(), math_utils.range(0, x.rank - 1))) {\n return regularNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n else {\n return broadcastNormalizeBatchInTraining(x, gamma, beta, reductionAxes, epsilon);\n }\n}\nexport class BatchNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.supportsMasking = true;\n this.axis = args.axis == null ? -1 : args.axis;\n this.momentum = args.momentum == null ? 0.99 : args.momentum;\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.movingMeanInitializer =\n getInitializer(args.movingMeanInitializer || 'zeros');\n this.movingVarianceInitializer =\n getInitializer(args.movingVarianceInitializer || 'ones');\n this.betaConstraint = getConstraint(args.betaConstraint);\n this.gammaConstraint = getConstraint(args.gammaConstraint);\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const axis = this.axis >= 0 ? this.axis : (this.axis + inputShape.length);\n const dim = inputShape[axis];\n if (dim == null) {\n throw new ValueError(`Axis ${axis} of input tensor should have a defined dimension but ` +\n `the layer received an input with shape ` +\n `${JSON.stringify(inputShape)}.`);\n }\n this.inputSpec =\n [new InputSpec({ ndim: inputShape.length, axes: { [axis]: dim } })];\n const shape = [dim];\n if (this.scale) {\n this.gamma = this.addWeight('gamma', shape, null, this.gammaInitializer, this.gammaRegularizer, true, this.gammaConstraint);\n }\n if (this.center) {\n this.beta = this.addWeight('beta', shape, null, this.betaInitializer, this.betaRegularizer, true, this.betaConstraint);\n }\n this.movingMean = this.addWeight('moving_mean', shape, null, this.movingMeanInitializer, null, false);\n this.movingVariance = this.addWeight('moving_variance', shape, null, this.movingVarianceInitializer, null, false);\n this.built = true;\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const ndim = inputShape.length;\n const reductionAxes = math_utils.range(0, ndim);\n const axis = this.axis >= 0 ? this.axis : (this.axis + ndim);\n reductionAxes.splice(axis, 1);\n const broadcastShape = generic_utils.pyListRepeat(1, ndim);\n broadcastShape[axis] = inputShape[axis];\n const sortedReductionAxes = reductionAxes.slice();\n sortedReductionAxes.sort();\n const needsBroadcasting = !util.arraysEqual(sortedReductionAxes, math_utils.range(0, ndim).slice(0, ndim - 1));\n const normalizeInference = () => {\n if (needsBroadcasting) {\n const broadcastMovingMean = this.movingMean.read().reshape(broadcastShape);\n const broadcastMovingVariance = this.movingVariance.read().reshape(broadcastShape);\n const broadcastBeta = this.center ? this.beta.read().reshape(broadcastShape) : null;\n const broadcastGamma = this.scale ? this.gamma.read().reshape(broadcastShape) : null;\n return batchNormalization(input, broadcastMovingMean, broadcastMovingVariance, broadcastBeta, broadcastGamma, this.epsilon);\n }\n else {\n return batchNormalization(input, this.movingMean.read(), this.movingVariance.read(), this.beta == null ? null : this.beta.read(), this.gamma == null ? null : this.gamma.read(), this.epsilon);\n }\n };\n if (!training) {\n return normalizeInference();\n }\n const [normedTraining, mean, variance] = normalizeBatchInTraining(input, this.gamma.read(), this.beta.read(), reductionAxes, this.epsilon);\n const doMovingAverage = (variable, value, momentum) => {\n tfc.tidy(() => {\n const decay = 1 - momentum;\n const origValue = variable.read();\n const updateDelta = origValue.sub(value).mul(decay);\n variable.write(origValue.sub(updateDelta));\n });\n };\n // Perform updates to moving mean and moving variance for training.\n // Porting Note: In PyKeras, these updates to `movingMean` and\n // `movingAverage` are done as a deferred Graph, added to the `Layer`'s\n // `update`s using the `add_update()` method. Here we do it imperatively\n // and encapsulate the updates in a function that is invoked\n // immediately.\n const updateMovingMeanAndVariance = () => {\n doMovingAverage(this.movingMean, mean, this.momentum);\n doMovingAverage(this.movingVariance, variance, this.momentum);\n };\n updateMovingMeanAndVariance();\n return normedTraining;\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n momentum: this.momentum,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n movingMeanInitializer: serializeInitializer(this.movingMeanInitializer),\n movingVarianceInitializer: serializeInitializer(this.movingVarianceInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer),\n betaConstraint: serializeConstraint(this.betaConstraint),\n gammaConstraint: serializeConstraint(this.gammaConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nBatchNormalization.className = 'BatchNormalization';\nserialization.registerClass(BatchNormalization);\nexport class LayerNormalization extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.axis = args.axis == null ? -1 : args.axis;\n if (typeof this.axis === 'number') {\n if (!Number.isInteger(this.axis)) {\n throw new Error(`Expected axis to be an integer, but received ${this.axis}`);\n }\n }\n else if (Array.isArray(this.axis)) {\n for (const axis of this.axis) {\n if (!Number.isInteger(axis)) {\n throw new Error(`Expected axis to be an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n }\n }\n else {\n throw new Error(`Expected axis to be an integer or an array of integers, ` +\n `but received ${JSON.stringify(this.axis)}`);\n }\n this.epsilon = args.epsilon == null ? 1e-3 : args.epsilon;\n this.center = args.center == null ? true : args.center;\n this.scale = args.scale == null ? true : args.scale;\n this.betaInitializer = getInitializer(args.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(args.gammaInitializer || 'ones');\n this.betaRegularizer = getRegularizer(args.betaRegularizer);\n this.gammaRegularizer = getRegularizer(args.gammaRegularizer);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const nDims = inputShape.length;\n // Convert axis to array and resolve negatives.\n if (typeof this.axis === 'number') {\n this.axis = [this.axis];\n }\n for (let i = 0; i < this.axis.length; ++i) {\n if (this.axis[i] < 0) {\n this.axis[i] += nDims;\n }\n }\n // Further validate axes.\n for (const axis of this.axis) {\n if (axis < 0 || axis >= nDims) {\n throw new Error(`Invalid axis: ${axis}`);\n }\n }\n if (this.axis.length !== generic_utils.unique(this.axis).length) {\n throw new Error(`Found duplicate axes in: ${this.axis}`);\n }\n const paramShape = this.axis.map(axis => inputShape[axis]);\n const trainable = true;\n if (this.scale) {\n this.gamma = this.addWeight('gamma', paramShape, 'float32', this.gammaInitializer, this.gammaRegularizer, trainable);\n }\n else {\n this.gamma = null;\n }\n if (this.center) {\n this.beta = this.addWeight('beta', paramShape, 'float32', this.betaInitializer, this.betaRegularizer, trainable);\n }\n else {\n this.beta = null;\n }\n this.built = true;\n }\n call(inputs, kwargs) {\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const nDims = inputShape.length;\n return tidy(() => {\n const keepDims = true;\n let { mean, variance } = moments(input, this.axis, keepDims);\n const broadcastShape = generic_utils.pyListRepeat(1, nDims);\n for (const dim of this.axis) {\n broadcastShape[dim] = inputShape[dim];\n }\n const broadcast = (v) => {\n if (v != null && v.shape.length !== nDims &&\n this.axis !== [nDims - 1]) {\n return v.reshape(broadcastShape);\n }\n else {\n return v;\n }\n };\n let scale = broadcast(this.gamma.read());\n let offset = broadcast(this.beta.read());\n // TODO(https://github.com/tensorflow/tfjs/issues/2120): The tiling below\n // is a workaround for the limitation of core's batchNormalization?d don't\n // support broadcasting in their gradients. In addition, the tiling is\n // necessary to ensure correctness on the browser CPU backend regardless\n // of forward or backward computation. Remove this workaround once the\n // limitation is addressed. See .\n const momentsTiling = [];\n const scaleOffsetTiling = [];\n for (let i = 0; i < nDims; ++i) {\n if (this.axis.indexOf(i) !== -1) {\n momentsTiling.push(inputShape[i]);\n scaleOffsetTiling.push(1);\n }\n else {\n momentsTiling.push(1);\n scaleOffsetTiling.push(inputShape[i]);\n }\n }\n mean = mean.tile(momentsTiling);\n variance = variance.tile(momentsTiling);\n scale = scale.tile(scaleOffsetTiling);\n offset = offset.tile(scaleOffsetTiling);\n return batchNormalization(input, mean, variance, offset, scale, this.epsilon);\n });\n }\n getConfig() {\n const config = {\n axis: this.axis,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nLayerNormalization.className = 'LayerNormalization';\nserialization.registerClass(LayerNormalization);\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Padding Layers.\n */\n// Porting Note: In Python Keras, the padding layers are in convolutional.py,\n// but we decided to put them in a separate file (padding.ts) for clarity.\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport { InputSpec, Layer } from '../engine/topology';\nimport { ValueError } from '../errors';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\n/**\n * Pads the middle dimension of a 3D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of 2 integers, how many zeros to add at the start and\n * end of the middle dimension (i.e., dimension 1).\n * @return A padded 3D `tf.Tensor`.\n */\nexport function temporalPadding(x, padding) {\n return tidy(() => {\n if (x.rank !== 3) {\n throw new ValueError(`temporalPadding expects input tensor to be 3-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [1, 1];\n }\n if (padding.length !== 2) {\n throw new ValueError(`temporalPadding expects input padding pattern to be a length-2 ` +\n `array, but received a length-${padding.length} array.`);\n }\n const pattern = [[0, 0], padding, [0, 0]];\n return tfc.pad(x, pattern);\n });\n}\n/**\n * Pads the 2nd and 3rd dimensions of a 4D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of two `Array`s, each of which is an `Array` of two\n * integers. The amount of padding at the beginning and end of the 2nd and 3rd\n * dimensions, respectively.\n * @param dataFormat 'channelsLast' (default) or 'channelsFirst'.\n * @return Padded 4D `tf.Tensor`.\n */\nexport function spatial2dPadding(x, padding, dataFormat) {\n return tidy(() => {\n if (x.rank !== 4) {\n throw new ValueError(`temporalPadding expects input tensor to be 4-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n if (padding == null) {\n padding = [[1, 1], [1, 1]];\n }\n if (padding.length !== 2 || padding[0].length !== 2 ||\n padding[1].length !== 2) {\n throw new ValueError('spatial2dPadding expects `padding` to be an Array of two Arrays, ' +\n 'each of which is an Array of two integers.');\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (dataFormat !== 'channelsLast' && dataFormat !== 'channelsFirst') {\n throw new ValueError(`Unknown data format: ${dataFormat}. ` +\n `Supported data formats are 'channelsLast' and 'channelsFirst.`);\n }\n let pattern;\n if (dataFormat === 'channelsFirst') {\n pattern = [[0, 0], [0, 0], padding[0], padding[1]];\n }\n else {\n pattern = [[0, 0], padding[0], padding[1], [0, 0]];\n }\n return tfc.pad(x, pattern);\n });\n}\nexport class ZeroPadding2D extends Layer {\n constructor(args) {\n if (args == null) {\n args = {};\n }\n super(args);\n this.dataFormat =\n args.dataFormat == null ? imageDataFormat() : args.dataFormat;\n // TODO(cais): Maybe refactor the following logic surrounding `padding`\n // into a helper method.\n if (args.padding == null) {\n this.padding = [[1, 1], [1, 1]];\n }\n else if (typeof args.padding === 'number') {\n this.padding =\n [[args.padding, args.padding], [args.padding, args.padding]];\n }\n else {\n args.padding = args.padding;\n if (args.padding.length !== 2) {\n throw new ValueError(`ZeroPadding2D expects padding to be a length-2 array, but ` +\n `received a length-${args.padding.length} array.`);\n }\n let heightPadding;\n let widthPadding;\n if (typeof args.padding[0] === 'number') {\n heightPadding = [args.padding[0], args.padding[0]];\n widthPadding = [args.padding[1], args.padding[1]];\n }\n else {\n args.padding = args.padding;\n if (args.padding[0].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects height padding to be a length-2 array, ` +\n `but received a length-${args.padding[0].length} array.`);\n }\n heightPadding = args.padding[0];\n if (args.padding[1].length !== 2) {\n throw new ValueError(`ZeroPadding2D expects width padding to be a length-2 array, ` +\n `but received a length-${args.padding[1].length} array.`);\n }\n widthPadding = args.padding[1];\n }\n this.padding = [heightPadding, widthPadding];\n }\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows;\n let cols;\n if (this.dataFormat === 'channelsFirst') {\n if (inputShape[2] != null && inputShape[2] >= 0) {\n rows = inputShape[2] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[3] != null && inputShape[3] >= 0) {\n cols = inputShape[3] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n if (inputShape[1] != null && inputShape[1] >= 0) {\n rows = inputShape[1] + this.padding[0][0] + this.padding[0][1];\n }\n else {\n rows = null;\n }\n if (inputShape[2] != null && inputShape[2] >= 0) {\n cols = inputShape[2] + this.padding[1][0] + this.padding[1][1];\n }\n else {\n cols = null;\n }\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => spatial2dPadding(getExactlyOneTensor(inputs), this.padding, this.dataFormat));\n }\n getConfig() {\n const config = {\n padding: this.padding,\n dataFormat: this.dataFormat,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n/** @nocollapse */\nZeroPadding2D.className = 'ZeroPadding2D';\nserialization.registerClass(ZeroPadding2D);\n//# sourceMappingURL=padding.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * TensorFlow.js Layers: Pooling Layers.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport { imageDataFormat } from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport { checkDataFormat, checkPaddingMode, checkPoolMode } from '../common';\nimport { InputSpec } from '../engine/topology';\nimport { Layer } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { convOutputLength } from '../utils/conv_utils';\nimport { assertPositiveInteger } from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { preprocessConv2DInput, preprocessConv3DInput } from './convolutional';\n/**\n * 2D pooling.\n * @param x\n * @param poolSize\n * @param stridesdes strides. Defaults to [1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 2D pooling.\n */\nexport function pool2d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // TODO(cais): Remove the preprocessing step once deeplearn.js supports\n // dataFormat as an input argument.\n x = preprocessConv2DInput(x, dataFormat); // x is NHWC after preprocessing.\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n // TODO(cais): Rank check?\n y = tfc.maxPool(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n // TODO(cais): Check the dtype and rank of x and give clear error message\n // if those are incorrect.\n y = tfc.avgPool(\n // TODO(cais): Rank check?\n x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return y;\n });\n}\n/**\n * 3D pooling.\n * @param x\n * @param poolSize. Default to [1, 1, 1].\n * @param strides strides. Defaults to [1, 1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 3D pooling.\n */\nexport function pool3d(x, poolSize, strides, padding, dataFormat, poolMode) {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n // x is NDHWC after preprocessing.\n x = preprocessConv3DInput(x, dataFormat);\n let y;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n y = tfc.maxPool3d(x, poolSize, strides, paddingString);\n }\n else { // 'avg'\n y = tfc.avgPool3d(x, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 4, 1, 2, 3]); // NDHWC -> NCDHW.\n }\n return y;\n });\n}\n/**\n * Abstract class for different pooling 1D layers.\n */\nexport class Pooling1D extends Layer {\n /**\n *\n * @param args Parameters for the Pooling layer.\n *\n * config.poolSize defaults to 2.\n */\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = 2;\n }\n super(args);\n if (typeof args.poolSize === 'number') {\n this.poolSize = [args.poolSize];\n }\n else if (Array.isArray(args.poolSize) &&\n args.poolSize.length === 1 &&\n typeof args.poolSize[0] === 'number') {\n this.poolSize = args.poolSize;\n }\n else {\n throw new ValueError(`poolSize for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.poolSize)}`);\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else {\n if (typeof args.strides === 'number') {\n this.strides = [args.strides];\n }\n else if (Array.isArray(args.strides) &&\n args.strides.length === 1 &&\n typeof args.strides[0] === 'number') {\n this.strides = args.strides;\n }\n else {\n throw new ValueError(`strides for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(args.strides)}`);\n }\n }\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const length = convOutputLength(inputShape[1], this.poolSize[0], this.padding, this.strides[0]);\n return [inputShape[0], length, inputShape[2]];\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Add dummy last dimension.\n inputs = K.expandDims(getExactlyOneTensor(inputs), 2);\n const output = this.poolingFunction(getExactlyOneTensor(inputs), [this.poolSize[0], 1], [this.strides[0], 1], this.padding, 'channelsLast');\n // Remove dummy last dimension.\n return tfc.squeeze(output, [2]);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling1D.className = 'MaxPooling1D';\nserialization.registerClass(MaxPooling1D);\nexport class AveragePooling1D extends Pooling1D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling1D.className = 'AveragePooling1D';\nserialization.registerClass(AveragePooling1D);\n/**\n * Abstract class for different pooling 2D layers.\n */\nexport class Pooling2D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 2) {\n throw new ValueError(`If the strides property of a 2D pooling layer is an Array, ` +\n `it is expected to have a length of 2, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n rows =\n convOutputLength(rows, this.poolSize[0], this.padding, this.strides[0]);\n cols =\n convOutputLength(cols, this.poolSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], rows, cols];\n }\n else {\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling2D.className = 'MaxPooling2D';\nserialization.registerClass(MaxPooling2D);\nexport class AveragePooling2D extends Pooling2D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling2D.className = 'AveragePooling2D';\nserialization.registerClass(AveragePooling2D);\n/**\n * Abstract class for different pooling 3D layers.\n */\nexport class Pooling3D extends Layer {\n constructor(args) {\n if (args.poolSize == null) {\n args.poolSize = [2, 2, 2];\n }\n super(args);\n this.poolSize = Array.isArray(args.poolSize) ?\n args.poolSize :\n [args.poolSize, args.poolSize, args.poolSize];\n if (args.strides == null) {\n this.strides = this.poolSize;\n }\n else if (Array.isArray(args.strides)) {\n if (args.strides.length !== 3) {\n throw new ValueError(`If the strides property of a 3D pooling layer is an Array, ` +\n `it is expected to have a length of 3, but received length ` +\n `${args.strides.length}.`);\n }\n this.strides = args.strides;\n }\n else {\n // `config.strides` is a number.\n this.strides = [args.strides, args.strides, args.strides];\n }\n assertPositiveInteger(this.poolSize, 'poolSize');\n assertPositiveInteger(this.strides, 'strides');\n this.padding = args.padding == null ? 'valid' : args.padding;\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ ndim: 5 })];\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n let depths = this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let rows = this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n let cols = this.dataFormat === 'channelsFirst' ? inputShape[4] : inputShape[3];\n depths = convOutputLength(depths, this.poolSize[0], this.padding, this.strides[0]);\n rows =\n convOutputLength(rows, this.poolSize[1], this.padding, this.strides[1]);\n cols =\n convOutputLength(cols, this.poolSize[2], this.padding, this.strides[2]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], depths, rows, cols];\n }\n else {\n return [inputShape[0], depths, rows, cols, inputShape[4]];\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(getExactlyOneTensor(inputs), this.poolSize, this.strides, this.padding, this.dataFormat);\n });\n }\n getConfig() {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class MaxPooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\n/** @nocollapse */\nMaxPooling3D.className = 'MaxPooling3D';\nserialization.registerClass(MaxPooling3D);\nexport class AveragePooling3D extends Pooling3D {\n constructor(args) {\n super(args);\n }\n poolingFunction(inputs, poolSize, strides, padding, dataFormat) {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool3d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\n/** @nocollapse */\nAveragePooling3D.className = 'AveragePooling3D';\nserialization.registerClass(AveragePooling3D);\n/**\n * Abstract class for different global pooling 1D layers.\n */\nexport class GlobalPooling1D extends Layer {\n constructor(args) {\n super(args);\n this.inputSpec = [new InputSpec({ ndim: 3 })];\n }\n computeOutputShape(inputShape) {\n return [inputShape[0], inputShape[2]];\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n}\nexport class GlobalAveragePooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.mean(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling1D.className = 'GlobalAveragePooling1D';\nserialization.registerClass(GlobalAveragePooling1D);\nexport class GlobalMaxPooling1D extends GlobalPooling1D {\n constructor(args) {\n super(args || {});\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.max(input, 1);\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling1D.className = 'GlobalMaxPooling1D';\nserialization.registerClass(GlobalMaxPooling1D);\n/**\n * Abstract class for different global pooling 2D layers.\n */\nexport class GlobalPooling2D extends Layer {\n constructor(args) {\n super(args);\n this.dataFormat =\n args.dataFormat == null ? 'channelsLast' : args.dataFormat;\n checkDataFormat(this.dataFormat);\n this.inputSpec = [new InputSpec({ ndim: 4 })];\n }\n computeOutputShape(inputShape) {\n inputShape = inputShape;\n if (this.dataFormat === 'channelsLast') {\n return [inputShape[0], inputShape[3]];\n }\n else {\n return [inputShape[0], inputShape[1]];\n }\n }\n call(inputs, kwargs) {\n throw new NotImplementedError();\n }\n getConfig() {\n const config = { dataFormat: this.dataFormat };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nexport class GlobalAveragePooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.mean(input, [1, 2]);\n }\n else {\n return tfc.mean(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalAveragePooling2D.className = 'GlobalAveragePooling2D';\nserialization.registerClass(GlobalAveragePooling2D);\nexport class GlobalMaxPooling2D extends GlobalPooling2D {\n call(inputs, kwargs) {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.max(input, [1, 2]);\n }\n else {\n return tfc.max(input, [2, 3]);\n }\n });\n }\n}\n/** @nocollapse */\nGlobalMaxPooling2D.className = 'GlobalMaxPooling2D';\nserialization.registerClass(GlobalMaxPooling2D);\n//# sourceMappingURL=pooling.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/**\n * Layers that augment the functionality of a base layer.\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { serialization, tidy } from '@tensorflow/tfjs-core';\nimport * as K from '../backend/tfjs_backend';\nimport { nameScope } from '../common';\nimport { InputSpec, Layer, SymbolicTensor } from '../engine/topology';\nimport { NotImplementedError, ValueError } from '../errors';\nimport { VALID_BIDIRECTIONAL_MERGE_MODES } from '../keras_format/common';\nimport * as generic_utils from '../utils/generic_utils';\nimport { getExactlyOneShape, getExactlyOneTensor } from '../utils/types_utils';\nimport { rnn, standardizeArgs } from './recurrent';\nimport { deserialize } from './serialization';\n/**\n * Abstract wrapper base class.\n *\n * Wrappers take another layer and augment it in various ways.\n * Do not use this class as a layer, it is only an abstract base class.\n * Two usable wrappers are the `TimeDistributed` and `Bidirectional` wrappers.\n */\nexport class Wrapper extends Layer {\n constructor(args) {\n // Porting Note: In PyKeras, `self.layer` is set prior to the calling\n // `super()`. But we can't do that here due to TypeScript's restriction.\n // See: https://github.com/Microsoft/TypeScript/issues/8277\n // As a result, we have to add checks in `get trainable()` and\n // `set trainable()` below in order to prevent using `this.layer` when\n // its value is `undefined`. The super constructor does use the getter\n // and the setter of `this.layer`.\n super(args);\n this.layer = args.layer;\n }\n build(inputShape) {\n this.built = true;\n }\n // TODO(cais): Implement activityRegularizer getter.\n get trainable() {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n return this.layer.trainable;\n }\n else {\n return false;\n }\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n this.layer.trainable = value;\n }\n }\n get trainableWeights() {\n return this.layer.trainableWeights;\n }\n // TODO(cais): Implement setter for trainableWeights.\n get nonTrainableWeights() {\n return this.layer.nonTrainableWeights;\n }\n // TODO(cais): Implement setter for nonTrainableWeights.\n get updates() {\n // tslint:disable-next-line:no-any\n return this.layer._updates;\n }\n // TODO(cais): Implement getUpdatesFor().\n get losses() {\n return this.layer.losses;\n }\n // TODO(cais): Implement getLossesFor().\n getWeights() {\n return this.layer.getWeights();\n }\n setWeights(weights) {\n this.layer.setWeights(weights);\n }\n getConfig() {\n const config = {\n 'layer': {\n 'className': this.layer.getClassName(),\n 'config': this.layer.getConfig(),\n }\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.layer != null) {\n this.layer.setFastWeightInitDuringBuild(value);\n }\n }\n /** @nocollapse */\n static fromConfig(cls, config, customObjects = {}) {\n const layerConfig = config['layer'];\n const layer = deserialize(layerConfig, customObjects);\n delete config['layer'];\n const newConfig = { layer };\n Object.assign(newConfig, config);\n return new cls(newConfig);\n }\n}\nexport class TimeDistributed extends Wrapper {\n constructor(args) {\n super(args);\n this.supportsMasking = true;\n }\n build(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 3) {\n throw new ValueError(`TimeDistributed layer expects an input shape >= 3D, but received ` +\n `input shape ${JSON.stringify(inputShape)}`);\n }\n this.inputSpec = [{ shape: inputShape }];\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (!this.layer.built) {\n this.layer.build(childInputShape);\n this.layer.built = true;\n }\n super.build(inputShape);\n }\n computeOutputShape(inputShape) {\n inputShape = getExactlyOneShape(inputShape);\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n const childOutputShape = this.layer.computeOutputShape(childInputShape);\n const timesteps = inputShape[1];\n return [childOutputShape[0], timesteps].concat(childOutputShape.slice(1));\n }\n call(inputs, kwargs) {\n return tidy(() => {\n // TODO(cais): Add 'training' and 'useLearningPhase' to kwargs.\n inputs = getExactlyOneTensor(inputs);\n // Porting Note: In tfjs-layers, `inputs` are always concrete tensor\n // values. Hence the inputs can't have an undetermined first (batch)\n // dimension, which is why we always use the K.rnn approach here.\n const step = (inputs, states) => {\n // TODO(cais): Add useLearningPhase.\n // NOTE(cais): `layer.call` may return a length-1 array of Tensor in\n // some cases (e.g., `layer` is a `Sequential` instance), which is\n // why `getExactlyOneTensor` is used below.\n const output = getExactlyOneTensor(this.layer.call(inputs, kwargs));\n return [output, []];\n };\n const rnnOutputs = rnn(step, inputs, [], false /* goBackwards */, null /* mask */, null /* constants */, false /* unroll */, true /* needPerStepOutputs */);\n const y = rnnOutputs[1];\n // TODO(cais): Add activity regularization.\n // TODO(cais): Add useLearningPhase.\n return y;\n });\n }\n}\n/** @nocollapse */\nTimeDistributed.className = 'TimeDistributed';\nserialization.registerClass(TimeDistributed);\nexport function checkBidirectionalMergeMode(value) {\n generic_utils.checkStringTypeUnionValue(VALID_BIDIRECTIONAL_MERGE_MODES, 'BidirectionalMergeMode', value);\n}\nconst DEFAULT_BIDIRECTIONAL_MERGE_MODE = 'concat';\nexport class Bidirectional extends Wrapper {\n constructor(args) {\n super(args);\n // Note: When creating `this.forwardLayer`, the original Layer object\n // (`config.layer`) ought to be cloned. This is why we call\n // `getConfig()` followed by `deserialize()`. Without this cloning,\n // the layer names saved during serialization will incorrectly contain\n // the 'forward_' prefix. In Python Keras, this is done using\n // `copy.copy` (shallow copy), which does not have a simple equivalent\n // in JavaScript. JavaScript's `Object.assign()` does not copy\n // methods.\n const layerConfig = args.layer.getConfig();\n const forwDict = {};\n forwDict['className'] = args.layer.getClassName();\n forwDict['config'] = layerConfig;\n this.forwardLayer = deserialize(forwDict);\n layerConfig['goBackwards'] =\n layerConfig['goBackwards'] === true ? false : true;\n const backDict = {};\n backDict['className'] = args.layer.getClassName();\n backDict['config'] = layerConfig;\n this.backwardLayer = deserialize(backDict);\n this.forwardLayer.name = 'forward_' + this.forwardLayer.name;\n this.backwardLayer.name = 'backward_' + this.backwardLayer.name;\n this.mergeMode = args.mergeMode === undefined ?\n DEFAULT_BIDIRECTIONAL_MERGE_MODE :\n args.mergeMode;\n checkBidirectionalMergeMode(this.mergeMode);\n if (args.weights) {\n throw new NotImplementedError('weights support is not implemented for Bidirectional layer yet.');\n }\n this._stateful = args.layer.stateful;\n this.returnSequences = args.layer.returnSequences;\n this.returnState = args.layer.returnState;\n this.supportsMasking = true;\n this._trainable = true;\n this.inputSpec = args.layer.inputSpec;\n this.numConstants = null;\n }\n get trainable() {\n return this._trainable;\n }\n set trainable(value) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n this._trainable = value;\n if (this.forwardLayer != null) {\n this.forwardLayer.trainable = value;\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.trainable = value;\n }\n }\n getWeights() {\n return this.forwardLayer.getWeights().concat(this.backwardLayer.getWeights());\n }\n setWeights(weights) {\n const numWeights = weights.length;\n const numeightsOver2 = Math.floor(numWeights / 2);\n this.forwardLayer.setWeights(weights.slice(0, numeightsOver2));\n this.backwardLayer.setWeights(weights.slice(numeightsOver2));\n }\n computeOutputShape(inputShape) {\n let layerShapes = this.forwardLayer.computeOutputShape(inputShape);\n if (!(Array.isArray(layerShapes) && Array.isArray(layerShapes[0]))) {\n layerShapes = [layerShapes];\n }\n layerShapes = layerShapes;\n let outputShape;\n let outputShapes;\n let stateShape;\n if (this.returnState) {\n stateShape = layerShapes.slice(1);\n outputShape = layerShapes[0];\n }\n else {\n outputShape = layerShapes[0];\n }\n outputShape = outputShape;\n if (this.mergeMode === 'concat') {\n outputShape[outputShape.length - 1] *= 2;\n outputShapes = [outputShape];\n }\n else if (this.mergeMode == null) {\n outputShapes = [outputShape, outputShape.slice()];\n }\n else {\n outputShapes = [outputShape];\n }\n if (this.returnState) {\n if (this.mergeMode == null) {\n return outputShapes.concat(stateShape).concat(stateShape.slice());\n }\n return [outputShape].concat(stateShape).concat(stateShape.slice());\n }\n return generic_utils.singletonOrArray(outputShapes);\n }\n apply(inputs, kwargs) {\n let initialState = kwargs == null ? null : kwargs['initialState'];\n let constants = kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized = standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n if (Array.isArray(inputs)) {\n initialState = inputs.slice(1);\n inputs = inputs[0];\n }\n if ((initialState == null || initialState.length === 0) &&\n constants == null) {\n return super.apply(inputs, kwargs);\n }\n const additionalInputs = [];\n const additionalSpecs = [];\n if (initialState != null) {\n const numStates = initialState.length;\n if (numStates % 2 > 0) {\n throw new ValueError('When passing `initialState` to a Bidrectional RNN, ' +\n 'the state should be an Array containing the states of ' +\n 'the underlying RNNs.');\n }\n kwargs['initialState'] = initialState;\n additionalInputs.push(...initialState);\n const stateSpecs = initialState\n .map(state => new InputSpec({ shape: state.shape }));\n this.forwardLayer.stateSpec = stateSpecs.slice(0, numStates / 2);\n this.backwardLayer.stateSpec = stateSpecs.slice(numStates / 2);\n additionalSpecs.push(...stateSpecs);\n }\n if (constants != null) {\n throw new NotImplementedError('Support for constants in Bidirectional layers is not ' +\n 'implemented yet.');\n }\n const isSymbolicTensor = additionalInputs[0] instanceof SymbolicTensor;\n for (const tensor of additionalInputs) {\n if (tensor instanceof SymbolicTensor !== isSymbolicTensor) {\n throw new ValueError('The initial state of a Bidirectional layer cannot be ' +\n 'specified as a mix of symbolic and non-symbolic tensors');\n }\n }\n if (isSymbolicTensor) {\n // Compute the full input and specs, including the states.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call temporarily and replace inputSpec.\n // Note: with initial states symbolic calls and non-symbolic calls to\n // this method differ in how the initial states are passed. For\n // symbolic calls, the initial states are passed in the first arg, as\n // an Array of SymbolicTensors; for non-symbolic calls, they are\n // passed in the second arg as a part of the kwargs. Hence the need to\n // temporarily modify inputSpec here.\n // TODO(cais): Make refactoring so that this hacky code below is no\n // longer needed.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n }\n else {\n return super.apply(inputs, kwargs);\n }\n }\n call(inputs, kwargs) {\n return tidy(() => {\n const initialState = kwargs['initialState'];\n let y;\n let yRev;\n if (initialState == null) {\n y = this.forwardLayer.call(inputs, kwargs);\n yRev = this.backwardLayer.call(inputs, kwargs);\n }\n else {\n const forwardState = initialState.slice(0, initialState.length / 2);\n const backwardState = initialState.slice(initialState.length / 2);\n y = this.forwardLayer.call(inputs, Object.assign(kwargs, { initialState: forwardState }));\n yRev = this.backwardLayer.call(inputs, Object.assign(kwargs, { initialState: backwardState }));\n }\n let states;\n if (this.returnState) {\n if (Array.isArray(y)) {\n states = y.slice(1).concat(yRev.slice(1));\n }\n else {\n }\n y = y[0];\n yRev = yRev[0];\n }\n if (this.returnSequences) {\n yRev = tfc.reverse(yRev, 1);\n }\n let output;\n if (this.mergeMode === 'concat') {\n output = K.concatenate([y, yRev]);\n }\n else if (this.mergeMode === 'sum') {\n output = tfc.add(y, yRev);\n }\n else if (this.mergeMode === 'ave') {\n output = tfc.mul(.5, tfc.add(y, yRev));\n }\n else if (this.mergeMode === 'mul') {\n output = tfc.mul(y, yRev);\n }\n else if (this.mergeMode == null) {\n output = [y, yRev];\n }\n // TODO(cais): Properly set learning phase.\n if (this.returnState) {\n if (this.mergeMode == null) {\n return output.concat(states);\n }\n return [output].concat(states);\n }\n return output;\n });\n }\n resetStates(states) {\n this.forwardLayer.resetStates();\n this.backwardLayer.resetStates();\n }\n build(inputShape) {\n nameScope(this.forwardLayer.name, () => {\n this.forwardLayer.build(inputShape);\n });\n nameScope(this.backwardLayer.name, () => {\n this.backwardLayer.build(inputShape);\n });\n this.built = true;\n }\n computeMask(inputs, mask) {\n if (Array.isArray(mask)) {\n mask = mask[0];\n }\n let outputMask;\n if (this.returnSequences) {\n if (this.mergeMode == null) {\n outputMask = [mask, mask];\n }\n else {\n outputMask = mask;\n }\n }\n else {\n if (this.mergeMode == null) {\n outputMask = [null, null];\n }\n else {\n outputMask = null;\n }\n }\n if (this.returnState) {\n const states = this.forwardLayer.states;\n const stateMask = states.map(state => null);\n if (Array.isArray(outputMask)) {\n return outputMask.concat(stateMask).concat(stateMask);\n }\n else {\n return [outputMask].concat(stateMask).concat(stateMask);\n }\n }\n else {\n return outputMask;\n }\n }\n get trainableWeights() {\n return this.forwardLayer.trainableWeights.concat(this.backwardLayer.trainableWeights);\n }\n get nonTrainableWeights() {\n return this.forwardLayer.nonTrainableWeights.concat(this.backwardLayer.nonTrainableWeights);\n }\n // TODO(cais): Implement constraints().\n setFastWeightInitDuringBuild(value) {\n super.setFastWeightInitDuringBuild(value);\n if (this.forwardLayer != null) {\n this.forwardLayer.setFastWeightInitDuringBuild(value);\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.setFastWeightInitDuringBuild(value);\n }\n }\n getConfig() {\n const config = {\n 'mergeMode': this.mergeMode,\n };\n // TODO(cais): Add logic for `numConstants` once the property is added.\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n /** @nocollapse */\n static fromConfig(cls, config) {\n const rnnLayer = deserialize(config['layer']);\n delete config['layer'];\n // TODO(cais): Add logic for `numConstants` once the property is added.\n if (config['numConstants'] != null) {\n throw new NotImplementedError(`Deserialization of a Bidirectional layer with numConstants ` +\n `present is not supported yet.`);\n }\n // tslint:disable-next-line:no-any\n const newConfig = config;\n newConfig['layer'] = rnnLayer;\n return new cls(newConfig);\n }\n}\n/** @nocollapse */\nBidirectional.className = 'Bidirectional';\nserialization.registerClass(Bidirectional);\n//# sourceMappingURL=wrappers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport { InputLayer } from './engine/input_layer';\nimport { Layer } from './engine/topology';\nimport { input } from './exports';\nimport { ELU, LeakyReLU, PReLU, ReLU, Softmax, ThresholdedReLU } from './layers/advanced_activations';\nimport { Conv1D, Conv2D, Conv2DTranspose, Conv3D, Cropping2D, SeparableConv2D, UpSampling2D } from './layers/convolutional';\nimport { DepthwiseConv2D } from './layers/convolutional_depthwise';\nimport { ConvLSTM2D, ConvLSTM2DCell } from './layers/convolutional_recurrent';\nimport { Activation, Dense, Dropout, Flatten, Masking, Permute, RepeatVector, Reshape, SpatialDropout1D } from './layers/core';\nimport { Embedding } from './layers/embeddings';\nimport { Add, Average, Concatenate, Dot, Maximum, Minimum, Multiply } from './layers/merge';\nimport { AlphaDropout, GaussianDropout, GaussianNoise } from './layers/noise';\nimport { BatchNormalization, LayerNormalization } from './layers/normalization';\nimport { ZeroPadding2D } from './layers/padding';\nimport { AveragePooling1D, AveragePooling2D, AveragePooling3D, GlobalAveragePooling1D, GlobalAveragePooling2D, GlobalMaxPooling1D, GlobalMaxPooling2D, MaxPooling1D, MaxPooling2D, MaxPooling3D } from './layers/pooling';\nimport { GRU, GRUCell, LSTM, LSTMCell, RNN, RNNCell, SimpleRNN, SimpleRNNCell, StackedRNNCells } from './layers/recurrent';\nimport { Bidirectional, TimeDistributed } from './layers/wrappers';\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n// Input Layer.\n/**\n * An input layer is an entry point into a `tf.LayersModel`.\n *\n * `InputLayer` is generated automatically for `tf.Sequential`` models by\n * specifying the `inputshape` or `batchInputShape` for the first layer. It\n * should not be specified explicitly. However, it can be useful sometimes,\n * e.g., when constructing a sequential model from a subset of another\n * sequential model's layers. Like the code snippet below shows.\n *\n * ```js\n * // Define a model which simply adds two inputs.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.dense({inputShape: [4], units: 3, activation: 'relu'}));\n * model1.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n * model1.summary();\n * model1.predict(tf.zeros([1, 4])).print();\n *\n * // Construct another model, reusing the second layer of `model1` while\n * // not using the first layer of `model1`. Note that you cannot add the second\n * // layer of `model` directly as the first layer of the new sequential model,\n * // because doing so will lead to an error related to the fact that the layer\n * // is not an input layer. Instead, you need to create an `inputLayer` and add\n * // it to the new sequential model before adding the reused layer.\n * const model2 = tf.sequential();\n * // Use an inputShape that matches the input shape of `model1`'s second\n * // layer.\n * model2.add(tf.layers.inputLayer({inputShape: [3]}));\n * model2.add(model1.layers[1]);\n * model2.summary();\n * model2.predict(tf.zeros([1, 3])).print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Inputs', namespace: 'layers'}\n */\nexport function inputLayer(args) {\n return new InputLayer(args);\n}\n// Advanced Activation Layers.\n/**\n * Exponetial Linear Unit (ELU).\n *\n * It follows:\n * `f(x) = alpha * (exp(x) - 1.) for x < 0`,\n * `f(x) = x for x >= 0`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Fast and Accurate Deep Network Learning by Exponential Linear Units\n * (ELUs)](https://arxiv.org/abs/1511.07289v1)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function elu(args) {\n return new ELU(args);\n}\n/**\n * Rectified Linear Unit activation function.\n *\n * Input shape:\n * Arbitrary. Use the config field `inputShape` (Array of integers, does\n * not include the sample axis) when using this layer as the first layer\n * in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function reLU(args) {\n return new ReLU(args);\n}\n/**\n * Leaky version of a rectified linear unit.\n *\n * It allows a small gradient when the unit is not active:\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function leakyReLU(args) {\n return new LeakyReLU(args);\n}\n/**\n * Parameterized version of a leaky rectified linear unit.\n *\n * It follows\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n * wherein `alpha` is a trainable weight.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function prelu(args) {\n return new PReLU(args);\n}\n/**\n * Softmax activation layer.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function softmax(args) {\n return new Softmax(args);\n}\n/**\n * Thresholded Rectified Linear Unit.\n *\n * It follows:\n * `f(x) = x for x > theta`,\n * `f(x) = 0 otherwise`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Zero-Bias Autoencoders and the Benefits of Co-Adapting\n * Features](http://arxiv.org/abs/1402.3337)\n *\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers'\n * }\n */\nexport function thresholdedReLU(args) {\n return new ThresholdedReLU(args);\n}\n// Convolutional Layers.\n/**\n * 1D convolution layer (e.g., temporal convolution).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input over a single spatial (or temporal) dimension\n * to produce a tensor of outputs.\n *\n * If `use_bias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model, provide an\n * `inputShape` argument `Array` or `null`.\n *\n * For example, `inputShape` would be:\n * - `[10, 128]` for sequences of 10 vectors of 128-dimensional vectors\n * - `[null, 128]` for variable-length sequences of 128-dimensional vectors.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv1d(args) {\n return new Conv1D(args);\n}\n/**\n * 2D convolution layer (e.g. spatial convolution over images).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 3]` for 128x128 RGB pictures\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2d(args) {\n return new Conv2D(args);\n}\n/**\n * Transposed convolutional layer (sometimes called Deconvolution).\n *\n * The need for transposed convolutions generally arises\n * from the desire to use a transformation going in the opposite direction of\n * a normal convolution, i.e., from something that has the shape of the output\n * of some convolution to something that has the shape of its input while\n * maintaining a connectivity pattern that is compatible with said\n * convolution.\n *\n * When using this layer as the first layer in a model, provide the\n * configuration `inputShape` (`Array` of integers, does not include the\n * sample axis), e.g., `inputShape: [128, 128, 3]` for 128x128 RGB pictures in\n * `dataFormat: 'channelsLast'`.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if `dataFormat` is `'channelsFirst'`.\n * or 4D tensor with shape\n * `[batch, rows, cols, channels]` if `dataFormat` is `'channelsLast`.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if `dataFormat` is\n * `'channelsFirst'`. or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if `dataFormat` is `'channelsLast'`.\n *\n * References:\n * - [A guide to convolution arithmetic for deep\n * learning](https://arxiv.org/abs/1603.07285v1)\n * - [Deconvolutional\n * Networks](http://www.matthewzeiler.com/pubs/cvpr2010/cvpr2010.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv2dTranspose(args) {\n return new Conv2DTranspose(args);\n}\n/**\n * 3D convolution layer (e.g. spatial convolution over volumes).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 128, 1]` for 128x128x128 grayscale volumes\n * in `dataFormat='channelsLast'`.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function conv3d(args) {\n return new Conv3D(args);\n}\n/**\n * Depthwise separable 2D convolution.\n *\n * Separable convolution consists of first performing\n * a depthwise spatial convolution\n * (which acts on each input channel separately)\n * followed by a pointwise convolution which mixes together the resulting\n * output channels. The `depthMultiplier` argument controls how many\n * output channels are generated per input channel in the depthwise step.\n *\n * Intuitively, separable convolutions can be understood as\n * a way to factorize a convolution kernel into two smaller kernels,\n * or as an extreme version of an Inception block.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, rows, cols, channels]` if data_format='channelsLast'.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if data_format='channelsLast'.\n * `rows` and `cols` values might have changed due to padding.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function separableConv2d(args) {\n return new SeparableConv2D(args);\n}\n/**\n * Cropping layer for 2D input (e.g., image).\n *\n * This layer can crop an input\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, croppedRows, croppedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, croppedRows, croppedCols]`.\n *\n * Examples\n * ```js\n *\n * const model = tf.sequential();\n * model.add(tf.layers.cropping2D({cropping:[[2, 2], [2, 2]],\n * inputShape: [128, 128, 3]}));\n * //now output shape is [batch, 124, 124, 3]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function cropping2D(args) {\n return new Cropping2D(args);\n}\n/**\n * Upsampling layer for 2D inputs.\n *\n * Repeats the rows and columns of the data\n * by size[0] and size[1] respectively.\n *\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, rows, cols]`\n *\n * Output shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, upsampledRows, upsampledCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, upsampledRows, upsampledCols]`\n *\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function upSampling2d(args) {\n return new UpSampling2D(args);\n}\n// Convolutional(depthwise) Layers.\n/**\n * Depthwise separable 2D convolution.\n *\n * Depthwise Separable convolutions consists in performing just the first step\n * in a depthwise spatial convolution (which acts on each input channel\n * separately). The `depthMultplier` argument controls how many output channels\n * are generated per input channel in the depthwise step.\n *\n * @doc {heading: 'Layers', subheading: 'Convolutional', namespace: 'layers'}\n */\nexport function depthwiseConv2d(args) {\n return new DepthwiseConv2D(args);\n}\n// Basic Layers.\n/**\n * Applies an activation function to an output.\n *\n * This layer applies element-wise activation function. Other layers, notably\n * `dense` can also apply activation functions. Use this isolated activation\n * function to extract the values before and after the\n * activation. For instance:\n *\n * ```js\n * const input = tf.input({shape: [5]});\n * const denseLayer = tf.layers.dense({units: 1});\n * const activationLayer = tf.layers.activation({activation: 'relu6'});\n *\n * // Obtain the output symbolic tensors by applying the layers in order.\n * const denseOutput = denseLayer.apply(input);\n * const activationOutput = activationLayer.apply(denseOutput);\n *\n * // Create the model based on the inputs.\n * const model = tf.model({\n * inputs: input,\n * outputs: [denseOutput, activationOutput]\n * });\n *\n * // Collect both outputs and print separately.\n * const [denseOut, activationOut] = model.predict(tf.randomNormal([6, 5]));\n * denseOut.print();\n * activationOut.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function activation(args) {\n return new Activation(args);\n}\n/**\n * Creates a dense (fully connected) layer.\n *\n * This layer implements the operation:\n * `output = activation(dot(input, kernel) + bias)`\n *\n * `activation` is the element-wise activation function\n * passed as the `activation` argument.\n *\n * `kernel` is a weights matrix created by the layer.\n *\n * `bias` is a bias vector created by the layer (only applicable if `useBias`\n * is `true`).\n *\n * **Input shape:**\n *\n * nD `tf.Tensor` with shape: `(batchSize, ..., inputDim)`.\n *\n * The most common situation would be\n * a 2D input with shape `(batchSize, inputDim)`.\n *\n * **Output shape:**\n *\n * nD tensor with shape: `(batchSize, ..., units)`.\n *\n * For instance, for a 2D input with shape `(batchSize, inputDim)`,\n * the output would have shape `(batchSize, units)`.\n *\n * Note: if the input to the layer has a rank greater than 2, then it is\n * flattened prior to the initial dot product with the kernel.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dense(args) {\n return new Dense(args);\n}\n/**\n * Applies\n * [dropout](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf) to\n * the input.\n *\n * Dropout consists in randomly setting a fraction `rate` of input units to 0 at\n * each update during training time, which helps prevent overfitting.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function dropout(args) {\n return new Dropout(args);\n}\n/**\n * Spatial 1D version of Dropout.\n *\n * This Layer type performs the same function as the Dropout layer, but it drops\n * entire 1D feature maps instead of individual elements. For example, if an\n * input example consists of 3 timesteps and the feature map for each timestep\n * has a size of 4, a `spatialDropout1d` layer may zero out the feature maps\n * of the 1st timesteps and 2nd timesteps completely while sparing all feature\n * elements of the 3rd timestep.\n *\n * If adjacent frames (timesteps) are strongly correlated (as is normally the\n * case in early convolution layers), regular dropout will not regularize the\n * activation and will otherwise just result in merely an effective learning\n * rate decrease. In this case, `spatialDropout1d` will help promote\n * independence among feature maps and should be used instead.\n *\n * **Arguments:**\n * rate: A floating-point number >=0 and <=1. Fraction of the input elements\n * to drop.\n *\n * **Input shape:**\n * 3D tensor with shape `(samples, timesteps, channels)`.\n *\n * **Output shape:**\n * Same as the input shape.\n *\n * References:\n * - [Efficient Object Localization Using Convolutional\n * Networks](https://arxiv.org/abs/1411.4280)\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function spatialDropout1d(args) {\n return new SpatialDropout1D(args);\n}\n/**\n * Flattens the input. Does not affect the batch size.\n *\n * A `Flatten` layer flattens each batch in its inputs to 1D (making the output\n * 2D).\n *\n * For example:\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const flattenLayer = tf.layers.flatten();\n * // Inspect the inferred output shape of the flatten layer, which\n * // equals `[null, 12]`. The 2nd dimension is 4 * 3, i.e., the result of the\n * // flattening. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(flattenLayer.apply(input).shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function flatten(args) {\n return new Flatten(args);\n}\n/**\n * Repeats the input n times in a new dimension.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.repeatVector({n: 4, inputShape: [2]}));\n * const x = tf.tensor2d([[10, 20]]);\n * // Use the model to do inference on a data point the model hasn't see\n * model.predict(x).print();\n * // output shape is now [batch, 2, 4]\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function repeatVector(args) {\n return new RepeatVector(args);\n}\n/**\n * Reshapes an input to a certain shape.\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const reshapeLayer = tf.layers.reshape({targetShape: [2, 6]});\n * // Inspect the inferred output shape of the Reshape layer, which\n * // equals `[null, 2, 6]`. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(reshapeLayer.apply(input).shape));\n * ```\n *\n * Input shape:\n * Arbitrary, although all dimensions in the input shape must be fixed.\n * Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n *\n * Output shape:\n * [batchSize, targetShape[0], targetShape[1], ...,\n * targetShape[targetShape.length - 1]].\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function reshape(args) {\n return new Reshape(args);\n}\n/**\n * Permutes the dimensions of the input according to a given pattern.\n *\n * Useful for, e.g., connecting RNNs and convnets together.\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.permute({\n * dims: [2, 1],\n * inputShape: [10, 64]\n * }));\n * console.log(model.outputShape);\n * // Now model's output shape is [null, 64, 10], where null is the\n * // unpermuted sample (batch) dimension.\n * ```\n *\n * Input shape:\n * Arbitrary. Use the configuration field `inputShape` when using this\n * layer as the first layer in a model.\n *\n * Output shape:\n * Same rank as the input shape, but with the dimensions re-ordered (i.e.,\n * permuted) according to the `dims` configuration of this layer.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function permute(args) {\n return new Permute(args);\n}\n/**\n * Maps positive integers (indices) into dense vectors of fixed size.\n * eg. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]\n *\n * **Input shape:** 2D tensor with shape: `[batchSize, sequenceLength]`.\n *\n * **Output shape:** 3D tensor with shape: `[batchSize, sequenceLength,\n * outputDim]`.\n *\n * @doc {heading: 'Layers', subheading: 'Basic', namespace: 'layers'}\n */\nexport function embedding(args) {\n return new Embedding(args);\n}\n// Merge Layers.\n/**\n * Layer that performs element-wise addition on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). The inputs are specified as an\n * `Array` when the `apply` method of the `Add` layer instance is called. For\n * example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const addLayer = tf.layers.add();\n * const sum = addLayer.apply([input1, input2]);\n * console.log(JSON.stringify(sum.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function add(args) {\n return new Add(args);\n}\n/**\n * Layer that performs element-wise averaging on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const averageLayer = tf.layers.average();\n * const average = averageLayer.apply([input1, input2]);\n * console.log(JSON.stringify(average.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function average(args) {\n return new Average(args);\n}\n/**\n * Layer that concatenates an `Array` of inputs.\n *\n * It takes a list of tensors, all of the same shape except for the\n * concatenation axis, and returns a single tensor, the concatenation\n * of all inputs. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 3]});\n * const concatLayer = tf.layers.concatenate();\n * const output = concatLayer.apply([input1, input2]);\n * console.log(JSON.stringify(output.shape));\n * // You get [null, 2, 5], with the first dimension as the undetermined batch\n * // dimension. The last dimension (5) is the result of concatenating the\n * // last dimensions of the inputs (2 and 3).\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function concatenate(args) {\n return new Concatenate(args);\n}\n/**\n * Layer that computes the element-wise maximum an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const maxLayer = tf.layers.maximum();\n * const max = maxLayer.apply([input1, input2]);\n * console.log(JSON.stringify(max.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function maximum(args) {\n return new Maximum(args);\n}\n/**\n * Layer that computes the element-wise minimum of an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const minLayer = tf.layers.minimum();\n * const min = minLayer.apply([input1, input2]);\n * console.log(JSON.stringify(min.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function minimum(args) {\n return new Minimum(args);\n}\n/**\n * Layer that multiplies (element-wise) an `Array` of inputs.\n *\n * It takes as input an Array of tensors, all of the same\n * shape, and returns a single tensor (also of the same shape).\n * For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const input3 = tf.input({shape: [2, 2]});\n * const multiplyLayer = tf.layers.multiply();\n * const product = multiplyLayer.apply([input1, input2, input3]);\n * console.log(product.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function multiply(args) {\n return new Multiply(args);\n}\n/**\n * Layer that computes a dot product between samples in two tensors.\n *\n * E.g., if applied to a list of two tensors `a` and `b` both of shape\n * `[batchSize, n]`, the output will be a tensor of shape `[batchSize, 1]`,\n * where each entry at index `[i, 0]` will be the dot product between\n * `a[i, :]` and `b[i, :]`.\n *\n * Example:\n *\n * ```js\n * const dotLayer = tf.layers.dot({axes: -1});\n * const x1 = tf.tensor2d([[10, 20], [30, 40]]);\n * const x2 = tf.tensor2d([[-1, -2], [-3, -4]]);\n *\n * // Invoke the layer's apply() method in eager (imperative) mode.\n * const y = dotLayer.apply([x1, x2]);\n * y.print();\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Merge', namespace: 'layers'}\n */\nexport function dot(args) {\n return new Dot(args);\n}\n// Normalization Layers.\n/**\n * Batch normalization layer (Ioffe and Szegedy, 2014).\n *\n * Normalize the activations of the previous layer at each batch,\n * i.e. applies a transformation that maintains the mean activation\n * close to 0 and the activation standard deviation close to 1.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape` (Array of integers, does\n * not include the sample axis) when calling the constructor of this class,\n * if this layer is used as a first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Batch Normalization: Accelerating Deep Network Training by Reducing\n * Internal Covariate Shift](https://arxiv.org/abs/1502.03167)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function batchNormalization(args) {\n return new BatchNormalization(args);\n}\n/**\n * Layer-normalization layer (Ba et al., 2016).\n *\n * Normalizes the activations of the previous layer for each given example in a\n * batch independently, instead of across a batch like in `batchNormalization`.\n * In other words, this layer applies a transformation that maintanis the mean\n * activation within each example close to0 and activation variance close to 1.\n *\n * Input shape:\n * Arbitrary. Use the argument `inputShape` when using this layer as the first\n * layer in a model.\n *\n * Output shape:\n * Same as input.\n *\n * References:\n * - [Layer Normalization](https://arxiv.org/abs/1607.06450)\n *\n * @doc {heading: 'Layers', subheading: 'Normalization', namespace: 'layers'}\n */\nexport function layerNormalization(args) {\n return new LayerNormalization(args);\n}\n// Padding Layers.\n/**\n * Zero-padding layer for 2D input (e.g., image).\n *\n * This layer can add rows and columns of zeros\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, paddedRows, paddedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, paddedRows, paddedCols]`.\n *\n * @doc {heading: 'Layers', subheading: 'Padding', namespace: 'layers'}\n */\nexport function zeroPadding2d(args) {\n return new ZeroPadding2D(args);\n}\n// Pooling Layers.\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * `tf.avgPool1d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling1d(args) {\n return new AveragePooling1D(args);\n}\nexport function avgPool1d(args) {\n return averagePooling1d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling1d(args) {\n return averagePooling1d(args);\n}\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * `tf.avgPool2d` is an alias.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling2d(args) {\n return new AveragePooling2D(args);\n}\nexport function avgPool2d(args) {\n return averagePooling2d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling2d(args) {\n return averagePooling2d(args);\n}\n/**\n * Average pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 4D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function averagePooling3d(args) {\n return new AveragePooling3D(args);\n}\nexport function avgPool3d(args) {\n return averagePooling3d(args);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling3d(args) {\n return averagePooling3d(args);\n}\n/**\n * Global average pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling1d(args) {\n return new GlobalAveragePooling1D(args);\n}\n/**\n * Global average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalAveragePooling2d(args) {\n return new GlobalAveragePooling2D(args);\n}\n/**\n * Global max pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling1d(args) {\n return new GlobalMaxPooling1D(args);\n}\n/**\n * Global max pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function globalMaxPooling2d(args) {\n return new GlobalMaxPooling2D(args);\n}\n/**\n * Max pooling operation for temporal data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling1d(args) {\n return new MaxPooling1D(args);\n}\n/**\n * Max pooling operation for spatial data.\n *\n * Input shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat=CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling2d(args) {\n return new MaxPooling2D(args);\n}\n/**\n * Max pooling operation for 3D data.\n *\n * Input shape\n * - If `dataFormat === channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, depths, rows, cols, channels]`\n * - If `dataFormat === channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, depths, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=channelsLast`:\n * 5D tensor with shape:\n * `[batchSize, pooledDepths, pooledRows, pooledCols, channels]`\n * - If `dataFormat=channelsFirst`:\n * 5D tensor with shape:\n * `[batchSize, channels, pooledDepths, pooledRows, pooledCols]`\n *\n * @doc {heading: 'Layers', subheading: 'Pooling', namespace: 'layers'}\n */\nexport function maxPooling3d(args) {\n return new MaxPooling3D(args);\n}\n// Recurrent Layers.\n/**\n * Gated Recurrent Unit - Cho et al. 2014.\n *\n * This is an `RNN` layer consisting of one `GRUCell`. However, unlike\n * the underlying `GRUCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.gru({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `GRUCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gru(args) {\n return new GRU(args);\n}\n/**\n * Cell class for `GRU`.\n *\n * `GRUCell` is distinct from the `RNN` subclass `GRU` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `GRU` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.gruCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `GRUCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.gruCell({units: 4}),\n * tf.layers.gruCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `gruCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `GRUCell`, use the\n * `tf.layers.gru`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function gruCell(args) {\n return new GRUCell(args);\n}\n/**\n * Long-Short Term Memory layer - Hochreiter 1997.\n *\n * This is an `RNN` layer consisting of one `LSTMCell`. However, unlike\n * the underlying `LSTMCell`, the `apply` method of `LSTM` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const lstm = tf.layers.lstm({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = lstm.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `LSTMCell`'s number of units.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstm(args) {\n return new LSTM(args);\n}\n/**\n * Cell class for `LSTM`.\n *\n * `LSTMCell` is distinct from the `RNN` subclass `LSTM` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `LSTM` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.lstmCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `LSTMCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.lstmCell({units: 4}),\n * tf.layers.lstmCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `lstmCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `LSTMCell`, use the\n * `tf.layers.lstm`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function lstmCell(args) {\n return new LSTMCell(args);\n}\n/**\n * Fully-connected RNN where the output is to be fed back to input.\n *\n * This is an `RNN` layer consisting of one `SimpleRNNCell`. However, unlike\n * the underlying `SimpleRNNCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.simpleRNN({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `SimpleRNNCell`'s number of units.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNN(args) {\n return new SimpleRNN(args);\n}\n/**\n * Cell class for `SimpleRNN`.\n *\n * `SimpleRNNCell` is distinct from the `RNN` subclass `SimpleRNN` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `SimpleRNN` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.simpleRNNCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `SimpleRNNCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.simpleRNNCell({units: 4}),\n * tf.layers.simpleRNNCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `SimpleRNNCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `SimpleRNNCell`, use the\n * `tf.layers.simpleRNN`.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function simpleRNNCell(args) {\n return new SimpleRNNCell(args);\n}\n/**\n * Convolutional LSTM layer - Xingjian Shi 2015.\n *\n * This is an `ConvRNN2D` layer consisting of one `ConvLSTM2DCell`. However,\n * unlike the underlying `ConvLSTM2DCell`, the `apply` method of `ConvLSTM2D`\n * operates on a sequence of inputs. The shape of the input (not including the\n * first, batch dimension) needs to be 4-D, with the first dimension being time\n * steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const batchSize = 4;\n * const sequenceLength = 2;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [batchSize, sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const layer = tf.layers.convLstm2d({filters, kernelSize});\n *\n * const output = layer.apply(input);\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2d(args) {\n return new ConvLSTM2D(args);\n}\n/**\n * Cell class for `ConvLSTM2D`.\n *\n * `ConvLSTM2DCell` is distinct from the `ConvRNN2D` subclass `ConvLSTM2D` in\n * that its `call` method takes the input data of only a single time step and\n * returns the cell's output at the time step, while `ConvLSTM2D` takes the\n * input data over a number of time steps. For example:\n *\n * ```js\n * const filters = 3;\n * const kernelSize = 3;\n *\n * const sequenceLength = 1;\n * const size = 5;\n * const channels = 3;\n *\n * const inputShape = [sequenceLength, size, size, channels];\n * const input = tf.ones(inputShape);\n *\n * const cell = tf.layers.convLstm2dCell({filters, kernelSize});\n *\n * cell.build(input.shape);\n *\n * const outputSize = size - kernelSize + 1;\n * const outShape = [sequenceLength, outputSize, outputSize, filters];\n *\n * const initialH = tf.zeros(outShape);\n * const initialC = tf.zeros(outShape);\n *\n * const [o, h, c] = cell.call([input, initialH, initialC], {});\n * ```\n */\n/** @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'} */\nexport function convLstm2dCell(args) {\n return new ConvLSTM2DCell(args);\n}\n/**\n * Base class for recurrent layers.\n *\n * Input shape:\n * 3D tensor with shape `[batchSize, timeSteps, inputDim]`.\n *\n * Output shape:\n * - if `returnState`, an Array of tensors (i.e., `tf.Tensor`s). The first\n * tensor is the output. The remaining tensors are the states at the\n * last time step, each with shape `[batchSize, units]`.\n * - if `returnSequences`, the output will have shape\n * `[batchSize, timeSteps, units]`.\n * - else, the output will have shape `[batchSize, units]`.\n *\n * Masking:\n * This layer supports masking for input data with a variable number\n * of timesteps. To introduce masks to your data,\n * use an embedding layer with the `mask_zero` parameter\n * set to `True`.\n *\n * Notes on using statefulness in RNNs:\n * You can set RNN layers to be 'stateful', which means that the states\n * computed for the samples in one batch will be reused as initial states\n * for the samples in the next batch. This assumes a one-to-one mapping\n * between samples in different successive batches.\n *\n * To enable statefulness:\n * - specify `stateful: true` in the layer constructor.\n * - specify a fixed batch size for your model, by passing\n * if sequential model:\n * `batchInputShape=[...]` to the first layer in your model.\n * else for functional model with 1 or more Input layers:\n * `batchShape=[...]` to all the first layers in your model.\n * This is the expected shape of your inputs *including the batch size*.\n * It should be a tuple of integers, e.g. `(32, 10, 100)`.\n * - specify `shuffle=False` when calling fit().\n *\n * To reset the states of your model, call `.resetStates()` on either\n * a specific layer, or on your entire model.\n *\n * Note on specifying the initial state of RNNs\n * You can specify the initial state of RNN layers symbolically by\n * calling them with the option `initialState`. The value of\n * `initialState` should be a tensor or list of tensors representing\n * the initial state of the RNN layer.\n *\n * You can specify the initial state of RNN layers numerically by\n * calling `resetStates` with the keyword argument `states`. The value of\n * `states` should be a numpy array or list of numpy arrays representing\n * the initial state of the RNN layer.\n *\n * Note on passing external constants to RNNs\n * You can pass \"external\" constants to the cell using the `constants`\n * keyword argument of `RNN.call` method. This requires that the `cell.call`\n * method accepts the same keyword argument `constants`. Such constants\n * can be used to conditon the cell transformation on additional static inputs\n * (not changing over time), a.k.a an attention mechanism.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function rnn(args) {\n return new RNN(args);\n}\n/**\n * Wrapper allowing a stack of RNN cells to behave as a single cell.\n *\n * Used to implement efficient stacked RNNs.\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */\nexport function stackedRNNCells(args) {\n return new StackedRNNCells(args);\n}\n// Wrapper Layers.\n/** @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'} */\nexport function bidirectional(args) {\n return new Bidirectional(args);\n}\n/**\n * This wrapper applies a layer to every temporal slice of an input.\n *\n * The input should be at least 3D, and the dimension of the index `1` will be\n * considered to be the temporal dimension.\n *\n * Consider a batch of 32 samples, where each sample is a sequence of 10 vectors\n * of 16 dimensions. The batch input shape of the layer is then `[32, 10,\n * 16]`, and the `inputShape`, not including the sample dimension, is\n * `[10, 16]`.\n *\n * You can then use `TimeDistributed` to apply a `Dense` layer to each of the 10\n * timesteps, independently:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.dense({units: 8}),\n * inputShape: [10, 16],\n * }));\n *\n * // Now model.outputShape = [null, 10, 8].\n * // The output will then have shape `[32, 10, 8]`.\n *\n * // In subsequent layers, there is no need for `inputShape`:\n * model.add(tf.layers.timeDistributed({layer: tf.layers.dense({units: 32})}));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * // Now model.outputShape = [null, 10, 32].\n * ```\n *\n * The output will then have shape `[32, 10, 32]`.\n *\n * `TimeDistributed` can be used with arbitrary layers, not just `Dense`, for\n * instance a `Conv2D` layer.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.conv2d({filters: 64, kernelSize: [3, 3]}),\n * inputShape: [10, 299, 299, 3],\n * }));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Wrapper', namespace: 'layers'}\n */\nexport function timeDistributed(args) {\n return new TimeDistributed(args);\n}\n// Aliases for pooling.\nexport const globalMaxPool1d = globalMaxPooling1d;\nexport const globalMaxPool2d = globalMaxPooling2d;\nexport const maxPool1d = maxPooling1d;\nexport const maxPool2d = maxPooling2d;\nexport { Layer, RNN, RNNCell, input /* alias for tf.input */ };\n/**\n * Apply additive zero-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * This is useful to mitigate overfitting\n * (you could see it as a form of random data augmentation).\n * Gaussian Noise (GS) is a natural choice as corruption process\n * for real valued inputs.\n *\n * # Arguments\n * stddev: float, standard deviation of the noise distribution.\n *\n * # Input shape\n * Arbitrary. Use the keyword argument `input_shape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * # Output shape\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianNoise(args) {\n return new GaussianNoise(args);\n}\n/**\n * Apply multiplicative 1-centered Gaussian noise.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting](\n * http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function gaussianDropout(args) {\n return new GaussianDropout(args);\n}\n/**\n * Applies Alpha Dropout to the input.\n *\n * As it is a regularization layer, it is only active at training time.\n *\n * Alpha Dropout is a `Dropout` that keeps mean and variance of inputs\n * to their original values, in order to ensure the self-normalizing property\n * even after this dropout.\n * Alpha Dropout fits well to Scaled Exponential Linear Units\n * by randomly setting activations to the negative saturation value.\n *\n * Arguments:\n * - `rate`: float, drop probability (as with `Dropout`).\n * The multiplicative noise will have\n * standard deviation `sqrt(rate / (1 - rate))`.\n * - `noise_shape`: A 1-D `Tensor` of type `int32`, representing the\n * shape for randomly generated keep/drop flags.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n *\n * @doc {heading: 'Layers', subheading: 'Noise', namespace: 'layers'}\n */\nexport function alphaDropout(args) {\n return new AlphaDropout(args);\n}\n/**\n * Masks a sequence by using a mask value to skip timesteps.\n *\n * If all features for a given sample timestep are equal to `mask_value`,\n * then the sample timestep will be masked (skipped) in all downstream layers\n * (as long as they support masking).\n *\n * If any downstream layer does not support masking yet receives such\n * an input mask, an exception will be raised.\n *\n * Arguments:\n * - `maskValue`: Either None or mask value to skip.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape`\n * (tuple of integers, does not include the samples axis)\n * when using this layer as the first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * @doc {heading: 'Layers', subheading: 'Mask', namespace: 'layers'}\n */\nexport function masking(args) {\n return new Masking(args);\n}\n//# sourceMappingURL=exports_layers.js.map", "import * as losses from './losses';\nimport * as metrics from './metrics';\n/**\n * Binary accuracy metric function.\n *\n * `yTrue` and `yPred` can have 0-1 values. Example:\n * ```js\n * const x = tf.tensor2d([[1, 1, 1, 1], [0, 0, 0, 0]], [2, 4]);\n * const y = tf.tensor2d([[1, 0, 1, 0], [0, 0, 0, 1]], [2, 4]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * `yTrue` and `yPred` can also have floating-number values between 0 and 1, in\n * which case the values will be thresholded at 0.5 to yield 0-1 values (i.e.,\n * a value >= 0.5 and <= 1.0 is interpreted as 1.\n * )\n * Example:\n * ```js\n * const x = tf.tensor1d([1, 1, 1, 1, 0, 0, 0, 0]);\n * const y = tf.tensor1d([0.2, 0.4, 0.6, 0.8, 0.2, 0.3, 0.4, 0.7]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryAccuracy(yTrue, yPred) {\n return metrics.binaryAccuracy(yTrue, yPred);\n}\n/**\n * Binary crossentropy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0], [1], [1], [1]]);\n * const y = tf.tensor2d([[0], [0], [0.5], [1]]);\n * const crossentropy = tf.metrics.binaryCrossentropy(x, y);\n * crossentropy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction, probabilities for the `1` case.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function binaryCrossentropy(yTrue, yPred) {\n return metrics.binaryCrossentropy(yTrue, yPred);\n}\n/**\n * Sparse categorical accuracy metric function.\n *\n * Example:\n * ```js\n *\n * const yTrue = tf.tensor1d([1, 1, 2, 2, 0]);\n * const yPred = tf.tensor2d(\n * [[0, 1, 0], [1, 0, 0], [0, 0.4, 0.6], [0, 0.6, 0.4], [0.7, 0.3, 0]]);\n * const crossentropy = tf.metrics.sparseCategoricalAccuracy(yTrue, yPred);\n * crossentropy.print();\n * ```\n *\n * @param yTrue True labels: indices.\n * @param yPred Predicted probabilities or logits.\n * @returns Accuracy tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function sparseCategoricalAccuracy(yTrue, yPred) {\n return metrics.sparseCategoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical accuracy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0, 0, 0, 1], [0, 0, 0, 1]]);\n * const y = tf.tensor2d([[0.1, 0.8, 0.05, 0.05], [0.1, 0.05, 0.05, 0.8]]);\n * const accuracy = tf.metrics.categoricalAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth: one-hot encoding of categories.\n * @param yPred Binary Tensor of prediction: probabilities or logits for the\n * same categories as in `yTrue`.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalAccuracy(yTrue, yPred) {\n return metrics.categoricalAccuracy(yTrue, yPred);\n}\n/**\n * Categorical crossentropy between an output tensor and a target tensor.\n *\n * @param target A tensor of the same shape as `output`.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function categoricalCrossentropy(yTrue, yPred) {\n return metrics.categoricalCrossentropy(yTrue, yPred);\n}\n/**\n * Computes the precision of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const precision = tf.metrics.precision(x, y);\n * precision.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Precision Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function precision(yTrue, yPred) {\n return metrics.precision(yTrue, yPred);\n}\n/**\n * Computes the recall of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tf.tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const recall = tf.metrics.recall(x, y);\n * recall.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Recall Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function recall(yTrue, yPred) {\n return metrics.recall(yTrue, yPred);\n}\n/**\n * Loss or metric function: Cosine proximity.\n *\n * Mathematically, cosine proximity is defined as:\n * `-sum(l2Normalize(yTrue) * l2Normalize(yPred))`,\n * wherein `l2Normalize()` normalizes the L2 norm of the input to 1 and `*`\n * represents element-wise multiplication.\n *\n * ```js\n * const yTrue = tf.tensor2d([[1, 0], [1, 0]]);\n * const yPred = tf.tensor2d([[1 / Math.sqrt(2), 1 / Math.sqrt(2)], [0, 1]]);\n * const proximity = tf.metrics.cosineProximity(yTrue, yPred);\n * proximity.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Cosine proximity Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function cosineProximity(yTrue, yPred) {\n return losses.cosineProximity(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute error.\n *\n * Mathematically, mean absolute error is defined as:\n * `mean(abs(yPred - yTrue))`,\n * wherein the `mean` is applied over feature dimensions.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [0, 0], [2, 3]]);\n * const yPred = tf.tensor2d([[0, 1], [0, 1], [-2, -3]]);\n * const mse = tf.metrics.meanAbsoluteError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsoluteError(yTrue, yPred) {\n return losses.meanAbsoluteError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean absolute percentage error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [10, 20]]);\n * const yPred = tf.tensor2d([[0, 1], [11, 24]]);\n * const mse = tf.metrics.meanAbsolutePercentageError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MAPE`, `tf.metrics.mape`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute percentage error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanAbsolutePercentageError(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function MAPE(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\nexport function mape(yTrue, yPred) {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\n/**\n * Loss or metric function: Mean squared error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [3, 4]]);\n * const yPred = tf.tensor2d([[0, 1], [-3, -4]]);\n * const mse = tf.metrics.meanSquaredError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MSE`, `tf.metrics.mse`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean squared error Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */\nexport function meanSquaredError(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function MSE(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\nexport function mse(yTrue, yPred) {\n return losses.meanSquaredError(yTrue, yPred);\n}\n//# sourceMappingURL=exports_metrics.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nexport { modelFromJSON } from './models';\n//# sourceMappingURL=exports_models.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as regularizers from './regularizers';\n// tslint:disable-next-line:max-line-length\nimport { L1L2 } from './regularizers';\n/**\n * Regularizer for L1 and L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x)) + sum(l2 * x^2)\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1l2(config) {\n return new L1L2(config);\n}\n/**\n * Regularizer for L1 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x))\n * @param args l1 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l1(config) {\n return regularizers.l1(config);\n}\n/**\n * Regularizer for L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l2 * x^2)\n * @param args l2 config.\n *\n * @doc {heading: 'Regularizers', namespace: 'regularizers'}\n */\nexport function l2(config) {\n return regularizers.l2(config);\n}\n//# sourceMappingURL=exports_regularizers.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n/* Original source: keras/callbacks.py */\nimport { BaseCallback } from './base_callbacks';\nimport { LayersModel } from './engine/training';\nimport { NotImplementedError } from './errors';\nimport { resolveScalarsInLogs } from './logs';\nexport class Callback extends BaseCallback {\n constructor() {\n super(...arguments);\n /** Instance of `keras.models.Model`. Reference of the model being trained. */\n this.model = null;\n }\n setModel(model) {\n if (!(model instanceof LayersModel)) {\n throw new Error('model must be a LayersModel, not some other Container');\n }\n this.model = model;\n }\n}\nfunction less(currVal, prevVal) {\n return currVal < prevVal;\n}\nfunction greater(currVal, prevVal) {\n return currVal > prevVal;\n}\n/**\n * A Callback that stops training when a monitored quantity has stopped\n * improving.\n */\nexport class EarlyStopping extends Callback {\n constructor(args) {\n super();\n if (args == null) {\n args = {};\n }\n if (args.restoreBestWeights) {\n throw new NotImplementedError('restoreBestWeights = True is not implemented in EarlyStopping yet.');\n }\n this.monitor = args.monitor || 'val_loss';\n this.minDelta = Math.abs(args.minDelta || 0);\n this.patience = args.patience || 0;\n this.verbose = args.verbose || 0;\n this.mode = args.mode || 'auto';\n this.baseline = args.baseline;\n if (['auto', 'min', 'max'].indexOf(this.mode) === -1) {\n console.warn(`EarlyStopping mode '${this.mode}' is invalid. ` +\n `Falling back to mode 'auto'.`);\n this.mode = 'auto';\n }\n if (this.mode === 'min') {\n this.monitorFunc = less;\n }\n else if (this.mode === 'max') {\n this.monitorFunc = greater;\n }\n else {\n // For mode === 'auto'.\n if (this.monitor.indexOf('acc') !== -1) {\n this.monitorFunc = greater;\n }\n else {\n this.monitorFunc = less;\n }\n }\n if (this.monitorFunc === less) {\n this.minDelta *= -1;\n }\n }\n async onTrainBegin(logs) {\n this.wait = 0;\n this.stoppedEpoch = 0;\n if (this.baseline != null) {\n this.best = this.baseline;\n }\n else {\n this.best = this.monitorFunc === less ? Infinity : -Infinity;\n }\n }\n async onEpochEnd(epoch, logs) {\n await resolveScalarsInLogs(logs);\n const current = this.getMonitorValue(logs);\n if (current == null) {\n return;\n }\n if (this.monitorFunc(current - this.minDelta, this.best)) {\n this.best = current;\n this.wait = 0;\n // TODO(cais): Logic for restoreBestWeights.\n }\n else {\n this.wait++;\n if (this.wait >= this.patience) {\n this.stoppedEpoch = epoch;\n this.model.stopTraining = true;\n }\n // TODO(cais): Logic for restoreBestWeights.\n }\n }\n async onTrainEnd(logs) {\n if (this.stoppedEpoch > 0 && this.verbose) {\n console.log(`Epoch ${this.stoppedEpoch}: early stopping.`);\n }\n }\n getMonitorValue(logs) {\n if (logs == null) {\n logs = {};\n }\n const monitorValue = logs[this.monitor];\n if (monitorValue == null) {\n console.warn(`Metric for EarlyStopping ${this.monitor} is not available. ` +\n `Available metrics are: ${Object.keys(logs)}`);\n }\n return monitorValue;\n }\n}\n/**\n * Factory function for a Callback that stops training when a monitored\n * quantity has stopped improving.\n *\n * Early stopping is a type of regularization, and protects model against\n * overfitting.\n *\n * The following example based on fake data illustrates how this callback\n * can be used during `tf.LayersModel.fit()`:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * units: 3,\n * activation: 'softmax',\n * kernelInitializer: 'ones',\n * inputShape: [2]\n * }));\n * const xs = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const ys = tf.tensor2d([[1, 0, 0], [0, 1, 0]], [2, 3]);\n * const xsVal = tf.tensor2d([4, 3, 2, 1], [2, 2]);\n * const ysVal = tf.tensor2d([[0, 0, 1], [0, 1, 0]], [2, 3]);\n * model.compile(\n * {loss: 'categoricalCrossentropy', optimizer: 'sgd', metrics: ['acc']});\n *\n * // Without the EarlyStopping callback, the val_acc value would be:\n * // 0.5, 0.5, 0.5, 0.5, ...\n * // With val_acc being monitored, training should stop after the 2nd epoch.\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * validationData: [xsVal, ysVal],\n * callbacks: tf.callbacks.earlyStopping({monitor: 'val_acc'})\n * });\n *\n * // Expect to see a length-2 array.\n * console.log(history.history.val_acc);\n * ```\n *\n * @doc {\n * heading: 'Callbacks',\n * namespace: 'callbacks'\n * }\n */\nexport function earlyStopping(args) {\n return new EarlyStopping(args);\n}\nexport const callbacks = { earlyStopping };\n//# sourceMappingURL=callbacks.js.map", "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// This file lists all exports of TensorFlow.js Layers\nimport * as constraints from './exports_constraints';\nimport * as initializers from './exports_initializers';\nimport * as layers from './exports_layers';\nimport * as metrics from './exports_metrics';\nimport * as models from './exports_models';\nimport * as regularizers from './exports_regularizers';\nexport { CallbackList, CustomCallback, History } from './base_callbacks';\nexport { Callback, callbacks, EarlyStopping } from './callbacks';\nexport { InputSpec, SymbolicTensor } from './engine/topology';\nexport { LayersModel } from './engine/training';\nexport { input, loadLayersModel, model, registerCallbackConstructor, sequential } from './exports';\nexport { RNN } from './layers/recurrent';\nexport { Sequential } from './models';\nexport { LayerVariable } from './variables';\nexport { version as version_layers } from './version';\nexport { constraints, initializers, layers, metrics, models, regularizers };\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/** DataType enum. */\nexport var DataType;\n(function (DataType) {\n DataType[DataType[\"DT_INVALID\"] = 0] = \"DT_INVALID\";\n DataType[DataType[\"DT_FLOAT\"] = 1] = \"DT_FLOAT\";\n DataType[DataType[\"DT_DOUBLE\"] = 2] = \"DT_DOUBLE\";\n DataType[DataType[\"DT_INT32\"] = 3] = \"DT_INT32\";\n DataType[DataType[\"DT_UINT8\"] = 4] = \"DT_UINT8\";\n DataType[DataType[\"DT_INT16\"] = 5] = \"DT_INT16\";\n DataType[DataType[\"DT_INT8\"] = 6] = \"DT_INT8\";\n DataType[DataType[\"DT_STRING\"] = 7] = \"DT_STRING\";\n DataType[DataType[\"DT_COMPLEX64\"] = 8] = \"DT_COMPLEX64\";\n DataType[DataType[\"DT_INT64\"] = 9] = \"DT_INT64\";\n DataType[DataType[\"DT_BOOL\"] = 10] = \"DT_BOOL\";\n DataType[DataType[\"DT_QINT8\"] = 11] = \"DT_QINT8\";\n DataType[DataType[\"DT_QUINT8\"] = 12] = \"DT_QUINT8\";\n DataType[DataType[\"DT_QINT32\"] = 13] = \"DT_QINT32\";\n DataType[DataType[\"DT_BFLOAT16\"] = 14] = \"DT_BFLOAT16\";\n DataType[DataType[\"DT_FLOAT_REF\"] = 101] = \"DT_FLOAT_REF\";\n DataType[DataType[\"DT_DOUBLE_REF\"] = 102] = \"DT_DOUBLE_REF\";\n DataType[DataType[\"DT_INT32_REF\"] = 103] = \"DT_INT32_REF\";\n DataType[DataType[\"DT_UINT8_REF\"] = 104] = \"DT_UINT8_REF\";\n DataType[DataType[\"DT_INT16_REF\"] = 105] = \"DT_INT16_REF\";\n DataType[DataType[\"DT_INT8_REF\"] = 106] = \"DT_INT8_REF\";\n DataType[DataType[\"DT_STRING_REF\"] = 107] = \"DT_STRING_REF\";\n DataType[DataType[\"DT_COMPLEX64_REF\"] = 108] = \"DT_COMPLEX64_REF\";\n DataType[DataType[\"DT_INT64_REF\"] = 109] = \"DT_INT64_REF\";\n DataType[DataType[\"DT_BOOL_REF\"] = 110] = \"DT_BOOL_REF\";\n DataType[DataType[\"DT_QINT8_REF\"] = 111] = \"DT_QINT8_REF\";\n DataType[DataType[\"DT_QUINT8_REF\"] = 112] = \"DT_QUINT8_REF\";\n DataType[DataType[\"DT_QINT32_REF\"] = 113] = \"DT_QINT32_REF\";\n DataType[DataType[\"DT_BFLOAT16_REF\"] = 114] = \"DT_BFLOAT16_REF\";\n})(DataType || (DataType = {}));\nexport var SaverDef;\n(function (SaverDef) {\n /** CheckpointFormatVersion enum. */\n let CheckpointFormatVersion;\n (function (CheckpointFormatVersion) {\n CheckpointFormatVersion[CheckpointFormatVersion[\"LEGACY\"] = 0] = \"LEGACY\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V1\"] = 1] = \"V1\";\n CheckpointFormatVersion[CheckpointFormatVersion[\"V2\"] = 2] = \"V2\";\n })(CheckpointFormatVersion = SaverDef.CheckpointFormatVersion || (SaverDef.CheckpointFormatVersion = {}));\n})(SaverDef || (SaverDef = {}));\n//# sourceMappingURL=compiled_api.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst CUSTOM_OPS = {};\n/**\n * Register an Op for graph model executor. This allow you to register\n * TensorFlow custom op or override existing op.\n *\n * Here is an example of registering a new MatMul Op.\n * ```js\n * const customMatmul = (node) =>\n * tf.matMul(\n * node.inputs[0], node.inputs[1],\n * node.attrs['transpose_a'], node.attrs['transpose_b']);\n *\n * tf.registerOp('MatMul', customMatmul);\n * ```\n * The inputs and attrs of the node object is based on the TensorFlow op\n * registry.\n *\n * @param name The Tensorflow Op name.\n * @param opFunc An op function which is called with the current graph node\n * during execution and needs to return a tensor or a list of tensors. The node\n * has the following attributes:\n * - attr: A map from attribute name to its value\n * - inputs: A list of input tensors\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function registerOp(name, opFunc) {\n const opMapper = {\n tfOpName: name,\n category: 'custom',\n inputs: [],\n attrs: [],\n customExecutor: opFunc\n };\n CUSTOM_OPS[name] = opMapper;\n}\n/**\n * Retrieve the OpMapper object for the registered op.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function getRegisteredOp(name) {\n return CUSTOM_OPS[name];\n}\n/**\n * Deregister the Op for graph model executor.\n *\n * @param name The Tensorflow Op name.\n *\n * @doc {heading: 'Models', subheading: 'Op Registry'}\n */\nexport function deregisterOp(name) {\n delete CUSTOM_OPS[name];\n}\n//# sourceMappingURL=register.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { clone, util } from '@tensorflow/tfjs-core';\nexport function getParamValue(paramName, node, tensorMap, context, resourceManager) {\n const inputParam = node.inputParams[paramName];\n if (inputParam && inputParam.inputIndexStart !== undefined) {\n const start = inputParam.inputIndexStart;\n const end = inputParam.inputIndexEnd === 0 ?\n undefined :\n (inputParam.inputIndexEnd === undefined ? start + 1 :\n inputParam.inputIndexEnd);\n if (inputParam.type === 'tensor') {\n return getTensor(node.inputNames[inputParam.inputIndexStart], tensorMap, context, resourceManager);\n }\n if (inputParam.type === 'tensors') {\n const inputs = node.inputNames.slice(start, end);\n return inputs.map(name => getTensor(name, tensorMap, context, resourceManager));\n }\n const tensor = getTensor(node.inputNames.slice(start)[0], tensorMap, context, resourceManager);\n const data = tensor.dataSync();\n return inputParam.type === 'number' ?\n data[0] :\n util.toNestedArray(tensor.shape, data);\n }\n const attrParam = node.attrParams[paramName];\n return attrParam && attrParam.value;\n}\n/**\n * Retrieve the tensor from tensorsMap based on input name.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function getTensor(name, tensorsMap, context, resourceManager) {\n const [nodeName, index] = parseNodeName(name);\n if (resourceManager != null) {\n const tensor = resourceManager.getHashTableHandleByName(nodeName);\n if (tensor != null) {\n return tensor;\n }\n }\n const contextId = context.currentContextIds.find(contextId => {\n return !!tensorsMap[getNodeNameWithContextId(nodeName, contextId)];\n });\n return contextId !== undefined ?\n tensorsMap[getNodeNameWithContextId(nodeName, contextId)][index] :\n undefined;\n}\n/**\n * Retrieve the tensors based on input name for current context.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n */\nexport function getTensorsForCurrentContenxt(name, tensorsMap, context) {\n return tensorsMap[getNodeNameWithContextId(name, context.currentContextId)];\n}\n/**\n * Returns the node name and index from the Node input name.\n * @param inputName The input name of the node, in format of\n * node_name:output_index, i.e. MatMul:0, if the output_index is not set, it is\n * default to 0.\n */\nexport function getNodeNameAndIndex(inputName, context) {\n const [nodeName, index] = parseNodeName(inputName);\n return [\n getNodeNameWithContextId(nodeName, context && context.currentContextId),\n index\n ];\n}\nfunction getNodeNameWithContextId(name, contextId) {\n return !!contextId ? `${name}-${contextId}` : name;\n}\nexport function parseNodeName(name) {\n const parts = name.split(':');\n if (parts.length === 1) {\n return [name, 0];\n }\n const nodeName = parts[0];\n return [nodeName, Number(parts[parts.length - 1])];\n}\nexport function split(arr, size) {\n const res = [];\n for (let i = 0; i < arr.length; i += size) {\n res.push(arr.slice(i, i + size));\n }\n return res;\n}\nexport function getPadding(node, tensorMap, context) {\n let pad = getParamValue('pad', node, tensorMap, context);\n if (pad === 'explicit') {\n // This is 1d array, we need to convert it to 2d array\n pad = getParamValue('explicitPaddings', node, tensorMap, context);\n const explicitPadding = [[0, 0], [0, 0], [0, 0], [0, 0]];\n for (let i = 0; i < 4; i++) {\n explicitPadding[i][0] = pad[i * 2];\n explicitPadding[i][1] = pad[i * 2 + 1];\n }\n return explicitPadding;\n }\n return pad;\n}\n/**\n * Reuse the tensor if it is marked as keep, otherwise clone the tensor to\n * avoid disposal. This is important for TensorArray and TensorList ops, since\n * internally they use a tensor as the id for TensorArray and TensorList, and\n * to simplify lookup, they also use Tensor.id as the key to the internal map.\n * These id tensors have been marked as kept in the backend, we need avoid clone\n * them in order to create new Tensor.id.\n * @param tensor\n */\nexport function cloneTensor(tensor) {\n return tensor.kept ? tensor : clone(tensor);\n}\n//# sourceMappingURL=utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Add',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddV2',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AddN',\n 'category': 'arithmetic',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'BiasAdd',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sub',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'RealDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Div',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'DivNoNan',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorDiv',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mul',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Maximum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Minimum',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Pow',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SquaredDifference',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Mod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'FloorMod',\n 'category': 'arithmetic',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n }\n];\n//# sourceMappingURL=arithmetic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Abs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atan2',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ceil',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ClipByValue',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'clip_value_min', 'name': 'clipValueMin', 'type': 'number' },\n { 'tfName': 'clip_value_max', 'name': 'clipValueMax', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Complex',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'real', 'type': 'tensor' },\n { 'start': 1, 'name': 'imag', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ComplexAbs',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cos',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Cosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Elu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Exp',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Floor',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Imag',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Neg',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Real',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'outputType',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Prelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'alpha', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Relu6',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'clipValueMin',\n 'name': 'clipValueMin',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'clipValueMax',\n 'name': 'clipValueMax',\n 'type': 'number',\n 'defaultValue': 6\n }\n ]\n },\n {\n 'tfOpName': 'Selu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sigmoid',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sin',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Rsqrt',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Square',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tan',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Tanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Sign',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Round',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Expm1',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Log1p',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Reciprocal',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Softplus',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Asinh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Acosh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Atanh',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Erf',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axes', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'keep_dims',\n 'name': 'keepDims',\n 'type': 'bool',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LeakyRelu',\n 'category': 'basic_math',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 0.2\n },\n {\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=basic_math.js.map", "export const json = [\n {\n 'tfOpName': 'LoopCond',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'pred', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Switch',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'data', 'type': 'tensor' },\n { 'start': 1, 'name': 'pred', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'Merge',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Enter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'frame_name', 'name': 'frameName', 'type': 'string' },\n { 'tfName': 'is_constant', 'name': 'isConstant', 'type': 'bool' }\n ]\n },\n {\n 'tfOpName': 'Exit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NextIteration',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'size', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'dynamic_size', 'name': 'dynamicSize', 'type': 'bool' },\n { 'tfName': 'clear_after_read', 'name': 'clearAfterRead', 'type': 'bool' },\n {\n 'tfName': 'identical_element_shapes',\n 'name': 'identicalElementShapes',\n 'type': 'bool'\n },\n { 'tfName': 'tensor_array_name', 'name': 'name', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayWriteV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'TensorArrayReadV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{\n 'tfName': 'dtype',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n },\n {\n 'tfOpName': 'TensorArrayGatherV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayScatterV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArrayConcatV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }, {\n 'tfName': 'element_shape_except0',\n 'name': 'elementShapeExcept0',\n 'type': 'shape',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArraySplitV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n { 'start': 3, 'name': 'flowIn', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorArraySizeV3',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' },\n { 'start': 1, 'name': 'flowIn', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'TensorArrayCloseV3',\n 'category': 'control',\n 'inputs': [{ 'start': 0, 'name': 'tensorArrayId', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'StatelessIf',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'If',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'cond', 'type': 'tensor' },\n { 'start': 1, 'end': 0, 'name': 'args', 'type': 'tensors' }\n ],\n 'attrs': [\n { 'tfName': 'then_branch', 'name': 'thenBranch', 'type': 'func' },\n { 'tfName': 'else_branch', 'name': 'elseBranch', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'StatelessWhile',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'While',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'cond', 'name': 'cond', 'type': 'func' },\n { 'tfName': 'body', 'name': 'body', 'type': 'func' }\n ]\n },\n {\n 'tfOpName': 'TensorListScatter',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListScatterV2',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 3, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGather',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'number[]' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListGetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListSetItem',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'index', 'type': 'number' },\n { 'start': 2, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListReserve',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 1, 'name': 'numElements', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListFromTensor',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' }\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListStack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' },\n { 'tfName': 'num_elements', 'name': 'numElements', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListSplit',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n { 'start': 2, 'name': 'lengths', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListConcat',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_shape', 'name': 'elementShape', 'type': 'shape' },\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'TensorListPopBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'elementShape', 'type': 'shape' },\n ],\n 'attrs': [{ 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TensorListPushBack',\n 'category': 'control',\n 'inputs': [\n { 'start': 0, 'name': 'tensorListId', 'type': 'tensor' },\n { 'start': 1, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'element_dtype', 'name': 'elementDType', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=control.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'AvgPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPoolWithArgmax',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' }, {\n 'tfName': 'include_batch_in_index',\n 'name': 'includeBatchInIndex',\n 'type': 'bool'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'AvgPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MaxPool3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n { 'tfName': 'ksize', 'name': 'kernelSize', 'type': 'number[]' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Conv1D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'stride', 'name': 'stride', 'type': 'number' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NWC'\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'dilation',\n 'name': 'dilation',\n 'type': 'number',\n 'defaultValue': 1\n }\n ]\n },\n {\n 'tfOpName': 'Conv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n { 'tfName': 'useCudnnOnGpu', 'name': 'useCudnnOnGpu', 'type': 'bool' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': '_FusedConv2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'use_cudnn_on_gpu',\n 'name': 'useCudnnOnGpu',\n 'type': 'bool',\n 'defaultValue': true\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n ]\n },\n {\n 'tfOpName': 'Conv2DBackpropInput',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 2, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 0, 'name': 'outputShape', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2d',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'input', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'explicit_paddings',\n 'name': 'explicitPaddings',\n 'type': 'number[]',\n 'defaultValue': []\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'FusedDepthwiseConv2dNative',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true },\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfName': 'dilations',\n 'name': 'dilations',\n 'type': 'number[]',\n 'defaultValue': [1, 1, 1, 1]\n },\n {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n }\n ]\n },\n {\n 'tfOpName': 'Conv3D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }, {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n { 'tfName': 'dilations', 'name': 'dilations', 'type': 'number[]' }\n ],\n },\n {\n 'tfOpName': 'Dilation2D',\n 'category': 'convolution',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'filter', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'strides', 'name': 'strides', 'type': 'number[]' },\n { 'tfName': 'rates', 'name': 'dilations', 'type': 'number[]' },\n { 'tfName': 'padding', 'name': 'pad', 'type': 'string' }\n ]\n }\n];\n//# sourceMappingURL=convolution.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Fill',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n { 'start': 1, 'name': 'value', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'LinSpace',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'num', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'OneHot',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'depth', 'type': 'number' },\n { 'start': 2, 'name': 'onValue', 'type': 'number', 'defaultValue': 1 },\n { 'start': 3, 'name': 'offValue', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [\n {\n 'tfName': 'axis',\n 'name': 'axis',\n 'type': 'number',\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Ones',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'OnesLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'RandomUniform',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'minval',\n 'name': 'minval',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'maxval',\n 'name': 'maxval',\n 'type': 'number',\n 'defaultValue': 1\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Range',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'start', 'type': 'number' },\n { 'start': 1, 'name': 'stop', 'type': 'number' },\n { 'start': 2, 'name': 'step', 'type': 'number', 'defaultValue': 0 },\n ],\n 'attrs': [{ 'tfName': 'Tidx', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'TruncatedNormal',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'means',\n 'name': 'mean',\n 'type': 'number',\n 'defaultValue': 0.0\n },\n {\n 'tfName': 'stddev',\n 'name': 'stdDev',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' }, {\n 'tfName': 'seed2',\n 'name': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'T', 'name': 'T', 'type': 'number', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Zeros',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'ZerosLike',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{ 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' }]\n },\n {\n 'tfOpName': 'Multinomial',\n 'category': 'creation',\n 'inputs': [\n { 'start': 0, 'name': 'logits', 'type': 'tensor' },\n { 'start': 1, 'name': 'numSamples', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'seed', 'name': 'seed', 'type': 'number' },\n { 'tfName': 'seed2', 'name': 'seed2', 'type': 'number' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype' },\n { 'tfName': 'output_dtype', 'name': 'output_dtype', 'type': 'dtype' }\n ]\n }\n];\n//# sourceMappingURL=creation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'NonMaxSuppressionV2',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV3',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV4',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' }\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'T_threshold',\n 'name': 'threshold',\n 'type': 'dtype',\n 'notSupported': true\n },\n {\n 'tfName': 'pad_to_max_output_size',\n 'name': 'padToMaxOutputSize',\n 'type': 'bool'\n }\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV5',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 1, 'name': 'scores', 'type': 'tensor' },\n { 'start': 2, 'name': 'maxOutputSize', 'type': 'number' },\n { 'start': 3, 'name': 'iouThreshold', 'type': 'number' },\n { 'start': 4, 'name': 'scoreThreshold', 'type': 'number' },\n { 'start': 5, 'name': 'softNmsSigma', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Where',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ListDiff',\n 'category': 'dynamic',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'y', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=dynamic.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'TopKV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'k', 'type': 'number' },\n ],\n 'attrs': [{ 'tfName': 'sorted', 'name': 'sorted', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Unique',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n },\n {\n 'tfOpName': 'UniqueV2',\n 'category': 'evaluation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n },\n];\n//# sourceMappingURL=evaluation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'PlaceholderWithDefault',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'default', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'Placeholder',\n 'category': 'graph',\n 'attrs': [\n { 'tfName': 'shape', 'name': 'shape', 'type': 'shape' },\n { 'tfName': 'dtype', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n { 'tfOpName': 'Const', 'category': 'graph' }, {\n 'tfOpName': 'Identity',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IdentityN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Snapshot',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Rank',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Size',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'Shape',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'ShapeN',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'end': 0, 'name': 'x', 'type': 'tensors' }]\n },\n {\n 'tfOpName': 'Print',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'data', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'message', 'name': 'message', 'type': 'string' }, {\n 'tfName': 'first_n',\n 'name': 'firstN',\n 'type': 'number',\n 'notSupported': true\n },\n {\n 'tfName': 'summarize',\n 'name': 'summarize',\n 'type': 'number',\n 'defaultValue': 3\n }\n ]\n },\n { 'tfOpName': 'NoOp', 'category': 'graph', 'inputs': [] }, {\n 'tfOpName': 'StopGradient',\n 'category': 'graph',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'FakeQuantWithMinMaxVars',\n 'category': 'graph',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'min', 'name': 'min', 'type': 'number' },\n { 'tfName': 'max', 'name': 'max', 'type': 'number' }\n ]\n }\n];\n//# sourceMappingURL=graph.js.map", "export const json = [\n {\n 'tfOpName': 'HashTable',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'HashTableV2',\n 'category': 'hash_table',\n 'inputs': [],\n 'attrs': [\n { 'tfName': 'shared_name', 'name': 'sharedName', 'type': 'string' },\n {\n 'tfName': 'use_node_name_sharing',\n 'name': 'useNodeNameSharing',\n 'type': 'bool'\n },\n { 'tfName': 'key_dtype', 'name': 'keyDType', 'type': 'dtype' },\n { 'tfName': 'value_dtype', 'name': 'valueDType', 'type': 'dtype' },\n ]\n },\n {\n 'tfOpName': 'LookupTableImport',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableImportV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'values', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFind',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LookupTableFindV2',\n 'category': 'hash_table',\n 'inputs': [\n { 'start': 0, 'name': 'tableHandle', 'type': 'tensor' },\n { 'start': 1, 'name': 'keys', 'type': 'tensor' },\n { 'start': 2, 'name': 'defaultValue', 'type': 'tensor' }\n ],\n 'attrs': [\n { 'tfName': 'Tin', 'name': 'tIn', 'type': 'dtype', 'notSupported': true }, {\n 'tfName': 'Tout',\n 'name': 'tOut',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ResizeBilinear',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ResizeNearestNeighbor',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'images', 'type': 'tensor' },\n { 'start': 1, 'name': 'size', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'align_corners', 'name': 'alignCorners', 'type': 'bool' },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'CropAndResize',\n 'category': 'image',\n 'inputs': [\n { 'start': 0, 'name': 'image', 'type': 'tensor' },\n { 'start': 1, 'name': 'boxes', 'type': 'tensor' },\n { 'start': 2, 'name': 'boxInd', 'type': 'tensor' },\n { 'start': 3, 'name': 'cropSize', 'type': 'number[]' },\n ],\n 'attrs': [\n { 'tfName': 'method', 'name': 'method', 'type': 'string' }, {\n 'tfName': 'extrapolation_value',\n 'name': 'extrapolationValue',\n 'type': 'number'\n }\n ]\n }\n];\n//# sourceMappingURL=image.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Equal',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'NotEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Greater',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'GreaterEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Less',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LessEqual',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalAnd',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalNot',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'LogicalOr',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Select',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'SelectV2',\n 'category': 'logical',\n 'inputs': [\n { 'start': 0, 'name': 'condition', 'type': 'tensor' },\n { 'start': 1, 'name': 'a', 'type': 'tensor' },\n { 'start': 2, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=logical.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': '_FusedMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n { 'start': 2, end: 0, 'name': 'args', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'num_args', 'name': 'numArgs', 'type': 'number' }, {\n 'tfName': 'fused_ops',\n 'name': 'fusedOps',\n 'type': 'string[]',\n 'defaultValue': []\n },\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.0001\n },\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'MatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'transpose_a',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'transpose_b',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMul',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'BatchMatMulV2',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'a', 'type': 'tensor' },\n { 'start': 1, 'name': 'b', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'adj_x',\n 'name': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfName': 'adj_y',\n 'name': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n { 'tfName': 'T', 'name': 'dtype', 'type': 'dtype', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'Transpose',\n 'category': 'matrices',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'perm', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'T',\n 'name': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=matrices.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FusedBatchNorm',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV2',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV3',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'scale', 'type': 'tensor' },\n { 'start': 2, 'name': 'offset', 'type': 'tensor' },\n { 'start': 3, 'name': 'mean', 'type': 'tensor' },\n { 'start': 4, 'name': 'variance', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'epsilon',\n 'name': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfName': 'data_format',\n 'name': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LRN',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'depth_radius',\n 'name': 'radius',\n 'type': 'number',\n 'defaultValue': 5\n },\n { 'tfName': 'bias', 'name': 'bias', 'type': 'number', 'defaultValue': 1.0 },\n {\n 'tfName': 'alpha',\n 'name': 'alpha',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n {\n 'tfName': 'beta',\n 'name': 'beta',\n 'type': 'number',\n 'defaultValue': 0.5\n }\n ]\n },\n {\n 'tfOpName': 'Softmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'LogSoftmax',\n 'category': 'normalization',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'normalization',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': true,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=normalization.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Max',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Mean',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Min',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Sum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'All',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Any',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'ArgMax',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'ArgMin',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'keep_dims', 'name': 'keepDims', 'type': 'bool' }]\n },\n {\n 'tfOpName': 'Cumsum',\n 'category': 'reduction',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' },\n ],\n 'attrs': [\n { 'tfName': 'exclusive', 'name': 'exclusive', 'type': 'bool' },\n { 'tfName': 'reverse', 'name': 'reverse', 'type': 'bool' }\n ]\n }\n];\n//# sourceMappingURL=reduction.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'ConcatV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': -1, 'name': 'tensors', 'type': 'tensors' },\n { 'start': -1, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'Concat',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 1, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n { 'start': 0, 'name': 'axis', 'type': 'number' }\n ],\n 'attrs': [{ 'tfName': 'N', 'name': 'n', 'type': 'number', 'defaultValue': 2 }]\n },\n {\n 'tfOpName': 'GatherV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Gather',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Reverse',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'dims', 'type': 'bool', 'notSupported': true }\n ]\n },\n {\n 'tfOpName': 'ReverseV2',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Slice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'size', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'StridedSlice',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'begin', 'type': 'number[]' },\n { 'start': 2, 'name': 'end', 'type': 'number[]' },\n { 'start': 3, 'name': 'strides', 'type': 'number[]' },\n ],\n 'attrs': [\n {\n 'tfName': 'begin_mask',\n 'name': 'beginMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'end_mask',\n 'name': 'endMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'new_axis_mask',\n 'name': 'newAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'ellipsis_mask',\n 'name': 'ellipsisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfName': 'shrink_axis_mask',\n 'name': 'shrinkAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Pack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'end': 0, 'name': 'tensors', 'type': 'tensors' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'Unpack',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'tensor', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'axis', 'name': 'axis', 'type': 'number', 'defaultValue': 0 }, {\n 'tfName': 'num',\n 'name': 'num',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Tile',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'reps', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Split',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'axis', 'type': 'number', 'defaultValue': 0 },\n { 'start': 1, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'num_split',\n 'name': 'numOrSizeSplits',\n 'type': 'number',\n 'defaultValue': 1\n }]\n },\n {\n 'tfOpName': 'SplitV',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'numOrSizeSplits', 'type': 'number[]' },\n { 'start': 2, 'name': 'axis', 'type': 'number', 'defaultValue': 0 }\n ]\n },\n {\n 'tfOpName': 'ScatterNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'indices', 'type': 'tensor' },\n { 'start': 1, 'name': 'values', 'type': 'tensor' },\n { 'start': 2, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'GatherNd',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'indices', 'type': 'tensor' }\n ]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'category': 'slice_join',\n 'inputs': [\n { 'start': 0, 'name': 'sparseIndices', 'type': 'tensor' },\n { 'start': 1, 'name': 'outputShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'sparseValues', 'type': 'tensor' },\n { 'start': 3, 'name': 'defaultValue', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'validate_indices',\n 'name': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': false,\n 'notSupported': true\n }]\n }\n];\n//# sourceMappingURL=slice_join.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'FFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'IFFT',\n 'category': 'spectral',\n 'inputs': [{ 'start': 0, 'name': 'x', 'type': 'tensor' }]\n },\n {\n 'tfOpName': 'RFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'IRFFT',\n 'category': 'spectral',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' }, {\n 'start': 1,\n 'name': 'fft_length',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n }\n];\n//# sourceMappingURL=spectral.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const json = [\n {\n 'tfOpName': 'Cast',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n {\n 'tfName': 'SrcT',\n 'name': 'sdtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n { 'tfName': 'DstT', 'name': 'dtype', 'type': 'dtype' }\n ]\n },\n {\n 'tfOpName': 'ExpandDims',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'axis', 'type': 'number' }\n ]\n },\n {\n 'tfOpName': 'MirrorPad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{ 'tfName': 'mode', 'name': 'mode', 'type': 'string' }]\n },\n {\n 'tfOpName': 'Pad',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' },\n ],\n 'attrs': [{\n 'tfName': 'constant_value',\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }]\n },\n {\n 'tfOpName': 'PadV2',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'padding', 'type': 'number[]' }, {\n 'start': 2,\n 'name': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Reshape',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'Squeeze',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [{\n 'tfName': 'axis',\n 'tfDeprecatedName': 'squeeze_dims',\n 'name': 'axis',\n 'type': 'number[]'\n }]\n },\n {\n 'tfOpName': 'SpaceToBatchND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'paddings', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'BatchToSpaceND',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'blockShape', 'type': 'number[]' },\n { 'start': 2, 'name': 'crops', 'type': 'number[]' }\n ]\n },\n {\n 'tfOpName': 'DepthToSpace',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n ],\n 'attrs': [\n { 'tfName': 'block_size', 'name': 'blockSize', 'type': 'number' },\n { 'tfName': 'data_format', 'name': 'dataFormat', 'type': 'string' }\n ]\n },\n {\n 'tfOpName': 'BroadcastTo',\n 'category': 'transformation',\n 'inputs': [\n { 'start': 0, 'name': 'x', 'type': 'tensor' },\n { 'start': 1, 'name': 'shape', 'type': 'number[]' },\n ],\n 'attrs': []\n }\n];\n//# sourceMappingURL=transformation.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as tensorflow from '../data/compiled_api';\nimport { getRegisteredOp } from './custom_op/register';\nimport { getNodeNameAndIndex } from './executors/utils';\nimport * as arithmetic from './op_list/arithmetic';\nimport * as basicMath from './op_list/basic_math';\nimport * as control from './op_list/control';\nimport * as convolution from './op_list/convolution';\nimport * as creation from './op_list/creation';\nimport * as dynamic from './op_list/dynamic';\nimport * as evaluation from './op_list/evaluation';\nimport * as graph from './op_list/graph';\nimport * as hashTable from './op_list/hash_table';\nimport * as image from './op_list/image';\nimport * as logical from './op_list/logical';\nimport * as matrices from './op_list/matrices';\nimport * as normalization from './op_list/normalization';\nimport * as reduction from './op_list/reduction';\nimport * as sliceJoin from './op_list/slice_join';\nimport * as spectral from './op_list/spectral';\nimport * as transformation from './op_list/transformation';\nexport class OperationMapper {\n // Singleton instance for the mapper\n static get Instance() {\n return this._instance || (this._instance = new this());\n }\n // Loads the op mapping from the JSON file.\n constructor() {\n const ops = [\n arithmetic, basicMath, control, convolution, creation, dynamic,\n evaluation, logical, image, graph, matrices, normalization, reduction,\n sliceJoin, spectral, transformation, hashTable\n ];\n const mappersJson = [].concat(...ops.map(op => op.json));\n this.opMappers = mappersJson.reduce((map, mapper) => {\n map[mapper.tfOpName] = mapper;\n return map;\n }, {});\n }\n // Converts the model inference graph from Tensorflow GraphDef to local\n // representation for TensorFlow.js API\n transformGraph(graph, signature = {}) {\n const tfNodes = graph.node;\n const placeholders = [];\n const weights = [];\n const initNodes = [];\n const nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op.startsWith('Placeholder')) {\n placeholders.push(map[node.name]);\n }\n else if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n else if (node.input == null || node.input.length === 0) {\n initNodes.push(map[node.name]);\n }\n return map;\n }, {});\n let inputs = [];\n const outputs = [];\n let inputNodeNameToKey = {};\n let outputNodeNameToKey = {};\n if (signature != null) {\n inputNodeNameToKey = this.mapSignatureEntries(signature.inputs);\n outputNodeNameToKey = this.mapSignatureEntries(signature.outputs);\n }\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n // if signature has not outputs set, add any node that does not have\n // outputs.\n if (Object.keys(outputNodeNameToKey).length === 0) {\n allNodes.forEach(key => {\n const node = nodes[key];\n if (node.children.length === 0) {\n outputs.push(node);\n }\n });\n }\n else {\n Object.keys(outputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node != null) {\n node.signatureKey = outputNodeNameToKey[name];\n outputs.push(node);\n }\n });\n }\n if (Object.keys(inputNodeNameToKey).length > 0) {\n Object.keys(inputNodeNameToKey).forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n const node = nodes[nodeName];\n if (node) {\n node.signatureKey = inputNodeNameToKey[name];\n inputs.push(node);\n }\n });\n }\n else {\n inputs = placeholders;\n }\n let functions = {};\n if (graph.library != null && graph.library.function != null) {\n functions = graph.library.function.reduce((functions, func) => {\n functions[func.signature.name] = this.mapFunction(func);\n return functions;\n }, {});\n }\n const result = { nodes, inputs, outputs, weights, placeholders, signature, functions };\n if (initNodes.length > 0) {\n result.initNodes = initNodes;\n }\n return result;\n }\n mapSignatureEntries(entries) {\n return Object.keys(entries || {})\n .reduce((prev, curr) => {\n prev[entries[curr].name] = curr;\n return prev;\n }, {});\n }\n mapNode(node) {\n // Unsupported ops will cause an error at run-time (not parse time), since\n // they may not be used by the actual execution subgraph.\n const mapper = getRegisteredOp(node.op) || this.opMappers[node.op] || {};\n if (node.attr == null) {\n node.attr = {};\n }\n const newNode = {\n name: node.name,\n op: node.op,\n category: mapper.category,\n inputNames: (node.input ||\n []).map(input => input.startsWith('^') ? input.substr(1) : input),\n inputs: [],\n children: [],\n inputParams: {},\n attrParams: {},\n rawAttrs: node.attr\n };\n if (mapper.inputs != null) {\n newNode.inputParams =\n mapper.inputs.reduce((map, param) => {\n map[param.name] = {\n type: param.type,\n inputIndexStart: param.start,\n inputIndexEnd: param.end\n };\n return map;\n }, {});\n }\n if (mapper.attrs != null) {\n newNode.attrParams =\n mapper.attrs.reduce((map, param) => {\n const type = param.type;\n let value = undefined;\n switch (param.type) {\n case 'string':\n value = getStringParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'string[]':\n value = getStringArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getStringArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number':\n value = getNumberParam(node.attr, param.tfName, (param.defaultValue || 0));\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumberParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'number[]':\n value = getNumericArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getNumericArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool':\n value = getBoolParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'bool[]':\n value = getBoolArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getBoolArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape':\n value = getTensorShapeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'shape[]':\n value = getTensorShapeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getTensorShapeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype':\n value = getDtypeParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'dtype[]':\n value = getDtypeArrayParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getDtypeArrayParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'func':\n value = getFuncParam(node.attr, param.tfName, param.defaultValue);\n if (value === undefined && !!param.tfDeprecatedName) {\n value = getFuncParam(node.attr, param.tfDeprecatedName, param.defaultValue);\n }\n break;\n case 'tensor':\n case 'tensors':\n break;\n default:\n throw new Error(`Unsupported param type: ${param.type} for op: ${node.op}`);\n }\n map[param.name] = { value, type };\n return map;\n }, {});\n }\n return newNode;\n }\n // map the TFunctionDef to TFJS graph object\n mapFunction(functionDef) {\n const tfNodes = functionDef.nodeDef;\n const placeholders = [];\n const weights = [];\n let nodes = {};\n if (tfNodes != null) {\n nodes = tfNodes.reduce((map, node) => {\n map[node.name] = this.mapNode(node);\n if (node.op === 'Const') {\n weights.push(map[node.name]);\n }\n return map;\n }, {});\n }\n const inputs = [];\n const outputs = [];\n functionDef.signature.inputArg.forEach(arg => {\n const [nodeName,] = getNodeNameAndIndex(arg.name);\n const node = {\n name: nodeName,\n op: 'Placeholder',\n inputs: [],\n inputNames: [],\n category: 'graph',\n inputParams: {},\n attrParams: { dtype: { value: parseDtypeParam(arg.type), type: 'dtype' } },\n children: []\n };\n node.signatureKey = arg.name;\n inputs.push(node);\n nodes[nodeName] = node;\n });\n const allNodes = Object.keys(nodes);\n allNodes.forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName,] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n });\n const returnNodeMap = functionDef.ret;\n functionDef.signature.outputArg.forEach(output => {\n const [nodeName, index] = getNodeNameAndIndex(returnNodeMap[output.name]);\n const node = nodes[nodeName];\n if (node != null) {\n node.defaultOutput = index;\n outputs.push(node);\n }\n });\n const signature = this.mapArgsToSignature(functionDef);\n return { nodes, inputs, outputs, weights, placeholders, signature };\n }\n mapArgsToSignature(functionDef) {\n return {\n methodName: functionDef.signature.name,\n inputs: functionDef.signature.inputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg);\n return map;\n }, {}),\n outputs: functionDef.signature.outputArg.reduce((map, arg) => {\n map[arg.name] = this.mapArgToTensorInfo(arg, functionDef.ret);\n return map;\n }, {}),\n };\n }\n mapArgToTensorInfo(arg, nameMap) {\n let name = arg.name;\n if (nameMap != null) {\n name = nameMap[name];\n }\n return { name, dtype: arg.type };\n }\n}\nexport function decodeBase64(text) {\n const global = env().global;\n if (typeof global.atob !== 'undefined') {\n return global.atob(text);\n }\n else if (typeof Buffer !== 'undefined') {\n return new Buffer(text, 'base64').toString();\n }\n else {\n throw new Error('Unable to decode base64 in this environment. ' +\n 'Missing built-in atob() or Buffer()');\n }\n}\nexport function parseStringParam(s, keepCase) {\n const value = Array.isArray(s) ? String.fromCharCode.apply(null, s) : decodeBase64(s);\n return keepCase ? value : value.toLowerCase();\n}\nexport function getStringParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param != null) {\n return parseStringParam(param.s, keepCase);\n }\n return def;\n}\nexport function getBoolParam(attrs, name, def) {\n const param = attrs[name];\n return param ? param.b : def;\n}\nexport function getNumberParam(attrs, name, def) {\n const param = attrs[name] || {};\n const value = param['i'] != null ? param['i'] : (param['f'] != null ? param['f'] : def);\n return (typeof value === 'number') ? value : parseInt(value, 10);\n}\nexport function parseDtypeParam(value) {\n if (typeof (value) === 'string') {\n // tslint:disable-next-line:no-any\n value = tensorflow.DataType[value];\n }\n switch (value) {\n case tensorflow.DataType.DT_FLOAT:\n return 'float32';\n case tensorflow.DataType.DT_INT32:\n case tensorflow.DataType.DT_INT64:\n case tensorflow.DataType.DT_INT8:\n case tensorflow.DataType.DT_UINT8:\n return 'int32';\n case tensorflow.DataType.DT_BOOL:\n return 'bool';\n case tensorflow.DataType.DT_DOUBLE:\n return 'float32';\n case tensorflow.DataType.DT_STRING:\n return 'string';\n default:\n // Unknown dtype error will happen at runtime (instead of parse time),\n // since these nodes might not be used by the actual subgraph execution.\n return null;\n }\n}\nexport function getFuncParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.func) {\n return param.func.name;\n }\n return def;\n}\nexport function getDtypeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.type) {\n return parseDtypeParam(param.type);\n }\n return def;\n}\nexport function getDtypeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.type) {\n return param.list.type.map(v => parseDtypeParam(v));\n }\n return def;\n}\nexport function parseTensorShapeParam(shape) {\n if (shape.unknownRank) {\n return undefined;\n }\n if (shape.dim != null) {\n return shape.dim.map(dim => (typeof dim.size === 'number') ? dim.size : parseInt(dim.size, 10));\n }\n return [];\n}\nexport function getTensorShapeParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.shape) {\n return parseTensorShapeParam(param.shape);\n }\n return def;\n}\nexport function getNumericArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param) {\n return ((param.list.f && param.list.f.length ? param.list.f :\n param.list.i) ||\n [])\n .map(v => (typeof v === 'number') ? v : parseInt(v, 10));\n }\n return def;\n}\nexport function getStringArrayParam(attrs, name, def, keepCase = false) {\n const param = attrs[name];\n if (param && param.list && param.list.s) {\n return param.list.s.map((v) => {\n return parseStringParam(v, keepCase);\n });\n }\n return def;\n}\nexport function getTensorShapeArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.shape) {\n return param.list.shape.map((v) => {\n return parseTensorShapeParam(v);\n });\n }\n return def;\n}\nexport function getBoolArrayParam(attrs, name, def) {\n const param = attrs[name];\n if (param && param.list && param.list.b) {\n return param.list.b;\n }\n return def;\n}\n//# sourceMappingURL=operation_mapper.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getTensor } from '../executors/utils';\nimport { getBoolArrayParam, getBoolParam, getDtypeArrayParam, getDtypeParam, getNumberParam, getNumericArrayParam, getStringArrayParam, getStringParam, getTensorShapeArrayParam, getTensorShapeParam } from '../operation_mapper';\n/**\n * Helper class for lookup inputs and params for nodes in the model graph.\n */\nexport class NodeValueImpl {\n constructor(node, tensorMap, context) {\n this.node = node;\n this.tensorMap = tensorMap;\n this.context = context;\n this.inputs = [];\n this.attrs = {};\n this.inputs = node.inputNames.map(name => this.getInput(name));\n if (node.rawAttrs != null) {\n this.attrs = Object.keys(node.rawAttrs)\n .reduce((attrs, key) => {\n attrs[key] = this.getAttr(key);\n return attrs;\n }, {});\n }\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getInput(name) {\n return getTensor(name, this.tensorMap, this.context);\n }\n /**\n * Return the value of the attribute or input param.\n * @param name String: name of attribute or input param.\n */\n getAttr(name, defaultValue) {\n const value = this.node.rawAttrs[name];\n if (value.tensor != null) {\n return getTensor(name, this.tensorMap, this.context);\n }\n if (value.i != null || value.f != null) {\n return getNumberParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.s != null) {\n return getStringParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.b != null) {\n return getBoolParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.shape != null) {\n return getTensorShapeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.type != null) {\n return getDtypeParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list != null) {\n if (value.list.i != null || value.list.f != null) {\n return getNumericArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.s != null) {\n return getStringArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.shape != null) {\n return getTensorShapeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.b != null) {\n return getBoolArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n if (value.list.type != null) {\n return getDtypeArrayParam(this.node.rawAttrs, name, defaultValue);\n }\n }\n return defaultValue;\n }\n}\n//# sourceMappingURL=node_value_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This file exports ops used by the converters executors. By default it\n * re-exports all ops. In a custom build this is aliased to a file that will\n * only exports ops for a given model.json.\n */\nexport * from './ops';\n//# sourceMappingURL=ops_for_converter.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BiasAdd':\n case 'AddV2':\n case 'Add': {\n return [tfOps.add(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'AddN': {\n return [tfOps.addN(getParamValue('tensors', node, tensorMap, context))];\n }\n case 'FloorMod':\n case 'Mod':\n return [tfOps.mod(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'Mul':\n return [tfOps.mul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n case 'RealDiv':\n case 'Div': {\n return [tfOps.div(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'DivNoNan': {\n return [tfOps.divNoNan(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'FloorDiv': {\n return [tfOps.floorDiv(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Sub': {\n return [tfOps.sub(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Minimum': {\n return [tfOps.minimum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Maximum': {\n return [tfOps.maximum(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Pow': {\n return [tfOps.pow(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'SquaredDifference': {\n return [tfOps.squaredDifference(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'arithmetic';\n//# sourceMappingURL=arithmetic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Abs':\n case 'ComplexAbs':\n return [tfOps.abs(getParamValue('x', node, tensorMap, context))];\n case 'Acos':\n return [tfOps.acos(getParamValue('x', node, tensorMap, context))];\n case 'Acosh':\n return [tfOps.acosh(getParamValue('x', node, tensorMap, context))];\n case 'Asin':\n return [tfOps.asin(getParamValue('x', node, tensorMap, context))];\n case 'Asinh':\n return [tfOps.asinh(getParamValue('x', node, tensorMap, context))];\n case 'Atan':\n return [tfOps.atan(getParamValue('x', node, tensorMap, context))];\n case 'Atan2':\n return [tfOps.atan2(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context))];\n case 'Atanh':\n return [tfOps.atanh(getParamValue('x', node, tensorMap, context))];\n case 'Ceil':\n return [tfOps.ceil(getParamValue('x', node, tensorMap, context))];\n case 'Complex':\n return [tfOps.complex(getParamValue('real', node, tensorMap, context), getParamValue('imag', node, tensorMap, context))];\n case 'Cos':\n return [tfOps.cos(getParamValue('x', node, tensorMap, context))];\n case 'Cosh':\n return [tfOps.cosh(getParamValue('x', node, tensorMap, context))];\n case 'Elu':\n return [tfOps.elu(getParamValue('x', node, tensorMap, context))];\n case 'Erf':\n return [tfOps.erf(getParamValue('x', node, tensorMap, context))];\n case 'Exp':\n return [tfOps.exp(getParamValue('x', node, tensorMap, context))];\n case 'Expm1': {\n return [tfOps.expm1(getParamValue('x', node, tensorMap, context))];\n }\n case 'Floor':\n return [tfOps.floor(getParamValue('x', node, tensorMap, context))];\n case 'Log':\n return [tfOps.log(getParamValue('x', node, tensorMap, context))];\n case 'Log1p': {\n return [tfOps.log1p(getParamValue('x', node, tensorMap, context))];\n }\n case 'Imag':\n return [tfOps.imag(getParamValue('x', node, tensorMap, context))];\n case 'Neg':\n return [tfOps.neg(getParamValue('x', node, tensorMap, context))];\n case 'Reciprocal': {\n return [tfOps.reciprocal(getParamValue('x', node, tensorMap, context))];\n }\n case 'Real':\n return [tfOps.real(getParamValue('x', node, tensorMap, context))];\n case 'Relu':\n return [tfOps.relu(getParamValue('x', node, tensorMap, context))];\n case 'Round': {\n return [tfOps.round(getParamValue('x', node, tensorMap, context))];\n }\n case 'Selu':\n return [tfOps.selu(getParamValue('x', node, tensorMap, context))];\n case 'Sigmoid':\n return [tfOps.sigmoid(getParamValue('x', node, tensorMap, context))];\n case 'Sin':\n return [tfOps.sin(getParamValue('x', node, tensorMap, context))];\n case 'Sign': {\n return [tfOps.sign(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sinh': {\n return [tfOps.sinh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Softplus': {\n return [tfOps.softplus(getParamValue('x', node, tensorMap, context))];\n }\n case 'Sqrt': {\n return [tfOps.sqrt(getParamValue('x', node, tensorMap, context))];\n }\n case 'Square': {\n return [tfOps.square(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tanh': {\n return [tfOps.tanh(getParamValue('x', node, tensorMap, context))];\n }\n case 'Tan':\n return [tfOps.tan(getParamValue('x', node, tensorMap, context))];\n case 'Relu6':\n case 'ClipByValue':\n return [tfOps.clipByValue(getParamValue('x', node, tensorMap, context), getParamValue('clipValueMin', node, tensorMap, context), getParamValue('clipValueMax', node, tensorMap, context))];\n case 'Rsqrt':\n return [tfOps.rsqrt(getTensor(node.inputNames[0], tensorMap, context))];\n case 'Prod':\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), getParamValue('axes', node, tensorMap, context))];\n case 'LeakyRelu':\n return [tfOps.leakyRelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n case 'Prelu':\n return [tfOps.prelu(getParamValue('x', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context))];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'basic_math';\n//# sourceMappingURL=basic_math_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * This differs from util.assertShapesMatch in that it allows values of\n * negative one, an undefined size of a dimensinon, in a shape to match\n * anything.\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertShapesMatchAllowUndefinedSize(shapeA, shapeB, errorMessagePrefix = '') {\n util.assert(shapesEqualAllowUndefinedSize(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nexport function shapesEqualAllowUndefinedSize(n1, n2) {\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== -1 && n2[i] !== -1 && n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\n//# sourceMappingURL=tensor_utils.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * The TensorArray object keeps an array of Tensors. It\n * allows reading from the array and writing to the array.\n */\nexport class TensorArray {\n constructor(name, dtype, maxSize, elementShape, identicalElementShapes, dynamicSize, clearAfterRead) {\n this.name = name;\n this.dtype = dtype;\n this.maxSize = maxSize;\n this.elementShape = elementShape;\n this.identicalElementShapes = identicalElementShapes;\n this.dynamicSize = dynamicSize;\n this.clearAfterRead = clearAfterRead;\n this.tensors = [];\n this.closed_ = false;\n this.idTensor = scalar(0);\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n get closed() {\n return this.closed_;\n }\n /**\n * Dispose the tensors and idTensor and mark the TensoryArray as closed.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.tensor.id)) {\n tensor.tensor.dispose();\n }\n });\n this.tensors = [];\n this.closed_ = true;\n this.idTensor.dispose();\n }\n size() {\n return this.tensors.length;\n }\n /**\n * Read the value at location index in the TensorArray.\n * @param index Number the index to read from.\n */\n read(index) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || index >= this.size()) {\n throw new Error(`Tried to read from index ${index}, but array size is: ${this.size()}`);\n }\n const tensorWithState = this.tensors[index];\n if (tensorWithState.cleared) {\n throw new Error(`TensorArray ${this.name}: Could not read index ${index} twice because it was cleared after a previous read ` +\n `(perhaps try setting clear_after_read = false?).`);\n }\n if (this.clearAfterRead) {\n tensorWithState.cleared = true;\n }\n tensorWithState.read = true;\n return tensorWithState.tensor;\n }\n /**\n * Helper method to read multiple tensors from the specified indices.\n */\n readMany(indices) {\n return indices.map(index => this.read(index));\n }\n /**\n * Write value into the index of the TensorArray.\n * @param index number the index to write to.\n * @param tensor\n */\n write(index, tensor) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n if (index < 0 || !this.dynamicSize && index >= this.maxSize) {\n throw new Error(`Tried to write to index ${index}, but array is not resizeable and size is: ${this.maxSize}`);\n }\n const t = this.tensors[index] || {};\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index},\n because the value dtype is ${tensor.dtype}, but TensorArray dtype is ${this.dtype}.`);\n }\n // Set the shape for the first time write to unknow shape tensor array\n if (this.size() === 0 &&\n (this.elementShape == null || this.elementShape.length === 0)) {\n this.elementShape = tensor.shape;\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, `TensorArray ${this.name}: Could not write to TensorArray index ${index}.`);\n if (t.read) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been read.`);\n }\n if (t.written) {\n throw new Error(`TensorArray ${this.name}: Could not write to TensorArray index ${index}, because it has already been written.`);\n }\n t.tensor = tensor;\n keep(tensor);\n t.written = true;\n this.tensors[index] = t;\n }\n /**\n * Helper method to write multiple tensors to the specified indices.\n */\n writeMany(indices, tensors) {\n if (indices.length !== tensors.length) {\n throw new Error(`TensorArray ${this.name}: could not write multiple tensors,` +\n `because the index size: ${indices.length} is not the same as tensors size: ${tensors.length}.`);\n }\n indices.forEach((i, index) => this.write(i, tensors[index]));\n }\n /**\n * Return selected values in the TensorArray as a packed Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param [indices] number[] Optional. Taking values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size(). If not specified returns\n * all tensors in the original order.\n * @param [dtype]\n */\n gather(indices, dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but gather requested dtype ${dtype}`);\n }\n if (!indices) {\n indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n }\n else {\n indices = indices.slice(0, this.size());\n }\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n // Read all the PersistentTensors into a vector to keep track of\n // their memory.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, 'TensorArray shape mismatch: ');\n return stack(tensors, 0);\n }\n /**\n * Return the values in the TensorArray as a concatenated Tensor.\n */\n concat(dtype) {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but concat requested dtype ${dtype}`);\n }\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n const indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n // Collect all the tensors from the tensors array.\n const tensors = this.readMany(indices);\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensors[0].shape, `TensorArray shape mismatch: tensor array shape (${this.elementShape}) vs first tensor shape (${tensors[0].shape})`);\n return concat(tensors, 0);\n }\n /**\n * Scatter the values of a Tensor in specific indices of a TensorArray.\n * @param indices nummber[] values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size().\n * @param tensor Tensor input tensor.\n */\n scatter(indices, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (!this.dynamicSize && maxIndex >= this.maxSize) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${this.maxSize})`);\n }\n this.writeMany(indices, unstack(tensor, 0));\n }\n /**\n * Split the values of a Tensor into the TensorArray.\n * @param length number[] with the lengths to use when splitting value along\n * its first dimension.\n * @param tensor Tensor, the tensor to split.\n */\n split(length, tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n if (!this.dynamicSize && length.length !== this.maxSize) {\n throw new Error(`TensorArray's size is not equal to the size of lengths (${this.maxSize} vs. ${length.length}), ` +\n 'and the TensorArray is not marked as dynamically resizeable');\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = [];\n tidy(() => {\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), this.elementShape);\n }\n return tensors;\n });\n const indices = [];\n for (let i = 0; i < length.length; i++) {\n indices[i] = i;\n }\n this.writeMany(indices, tensors);\n }\n}\n//# sourceMappingURL=tensor_array.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { concat, keep, reshape, scalar, slice, stack, tensor, tidy, unstack } from '@tensorflow/tfjs-core';\nimport { assertShapesMatchAllowUndefinedSize } from './tensor_utils';\n/**\n * TensorList stores a container of `tf.Tensor` objects, which are accessible\n * via tensors field.\n *\n * In order to get a copy of the underlying list, use the copy method:\n * ```\n * TensorList b = a.copy();\n * b.tensors().pushBack(t); // This does not modify a.tensors().\n * ```\n *\n * Note that this is not a deep copy: the memory locations of the underlying\n * tensors will still point to the same locations of the corresponding tensors\n * in the original.\n */\nexport class TensorList {\n /**\n *\n * @param tensors list of tensors\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param maxNumElements The maximum allowed size of `tensors`. Defaults to -1\n * meaning that the size of `tensors` is unbounded.\n */\n constructor(tensors, elementShape, elementDtype, maxNumElements = -1) {\n this.tensors = tensors;\n this.elementShape = elementShape;\n this.elementDtype = elementDtype;\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (elementDtype !== tensor.dtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${tensor.dtype}`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n });\n }\n this.idTensor = scalar(0);\n this.maxNumElements = maxNumElements;\n keep(this.idTensor);\n }\n get id() {\n return this.idTensor.id;\n }\n /**\n * Get a new TensorList containing a copy of the underlying tensor container.\n */\n copy() {\n return new TensorList([...this.tensors], this.elementShape, this.elementDtype);\n }\n /**\n * Dispose the tensors and idTensor and clear the tensor list.\n */\n clearAndClose(keepIds) {\n this.tensors.forEach(tensor => {\n if (keepIds == null || !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n this.tensors.length = 0;\n this.idTensor.dispose();\n }\n /**\n * The size of the tensors in the tensor list.\n */\n size() {\n return this.tensors.length;\n }\n /**\n * Return a tensor that stacks a list of rank-R tf.Tensors into one rank-(R+1)\n * tf.Tensor.\n * @param elementShape shape of each tensor\n * @param elementDtype data type of each tensor\n * @param numElements the number of elements to stack\n */\n stack(elementShape, elementDtype, numElements = -1) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (numElements !== -1 && this.tensors.length !== numElements) {\n throw new Error(`Operation expected a list with ${numElements} elements but got a list with ${this.tensors.length} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(elementShape, this.elementShape, 'TensorList shape mismatch: ');\n return tidy(() => {\n const reshapedTensors = this.tensors.map(tensor => reshape(tensor, elementShape));\n return stack(reshapedTensors, 0);\n });\n }\n /**\n * Pop a tensor from the end of the list.\n * @param elementShape shape of the tensor\n * @param elementDtype data type of the tensor\n */\n popBack(elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (this.size() === 0) {\n throw new Error('Trying to pop from an empty list.');\n }\n const tensor = this.tensors.pop();\n assertShapesMatchAllowUndefinedSize(tensor.shape, elementShape, 'TensorList shape mismatch: ');\n return reshape(tensor, elementShape);\n }\n /**\n * Push a tensor to the end of the list.\n * @param tensor Tensor to be pushed.\n */\n pushBack(tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(tensor.shape, this.elementShape, 'TensorList shape mismatch: ');\n if (this.maxNumElements === this.size()) {\n throw new Error(`Trying to push element into a full list.`);\n }\n keep(tensor);\n this.tensors.push(tensor);\n }\n /**\n * Update the size of the list.\n * @param size the new size of the list.\n */\n resize(size) {\n if (size < 0) {\n throw new Error(`TensorListResize expects size to be non-negative. Got: ${size}`);\n }\n if (this.maxNumElements !== -1 && size > this.maxNumElements) {\n throw new Error(`TensorListResize input size ${size} is greater maxNumElement ${this.maxNumElements}.`);\n }\n this.tensors.length = size;\n }\n /**\n * Retrieve the element at the provided index\n * @param elementShape shape of the tensor\n * @param elementDtype dtype of the tensor\n * @param elementIndex index of the tensor\n */\n getItem(elementIndex, elementShape, elementDtype) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 || elementIndex > this.tensors.length) {\n throw new Error(`Trying to access element ${elementIndex} in a list with ${this.tensors.length} elements.`);\n }\n if (this.tensors[elementIndex] == null) {\n throw new Error(`element at index ${elementIndex} is null.`);\n }\n assertShapesMatchAllowUndefinedSize(this.tensors[elementIndex].shape, elementShape, 'TensorList shape mismatch: ');\n return this.tensors[elementIndex];\n }\n /**\n * Set the tensor at the index\n * @param elementIndex index of the tensor\n * @param tensor the tensor to be inserted into the list\n */\n setItem(elementIndex, tensor) {\n if (tensor.dtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${this.elementDtype}`);\n }\n if (elementIndex < 0 ||\n this.maxNumElements !== -1 && elementIndex >= this.maxNumElements) {\n throw new Error(`Trying to set element ${elementIndex} in a list with max ${this.maxNumElements} elements.`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, tensor.shape, 'TensorList shape mismatch: ');\n keep(tensor);\n this.tensors[elementIndex] = tensor;\n }\n /**\n * Return selected values in the TensorList as a stacked Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param indices indices of tensors to gather\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n gather(indices, elementDtype, elementShape) {\n if (elementDtype !== this.elementDtype) {\n throw new Error(`Invalid data types; op elements ${elementDtype}, but list elements ${this.elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n // When indices is greater than the size of the list, indices beyond the\n // size of the list are ignored.\n indices = indices.slice(0, this.size());\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = indices.map(i => reshape(this.tensors[i], elementShape));\n return stack(tensors, 0);\n });\n }\n /**\n * Return the values in the TensorList as a concatenated Tensor.\n * @param elementDtype output tensor dtype\n * @param elementShape output tensor element shape\n */\n concat(elementDtype, elementShape) {\n if (!!elementDtype && elementDtype !== this.elementDtype) {\n throw new Error(`TensorList dtype is ${this.elementDtype} but concat requested dtype ${elementDtype}`);\n }\n assertShapesMatchAllowUndefinedSize(this.elementShape, elementShape, 'TensorList shape mismatch: ');\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n return tidy(() => {\n const tensors = this.tensors.map(t => reshape(t, elementShape));\n return concat(tensors, 0);\n });\n }\n}\n/**\n * Creates a TensorList which, when stacked, has the value of tensor.\n * @param tensor from tensor\n * @param elementShape output tensor element shape\n */\nexport function fromTensor(tensor, elementShape, elementDtype) {\n const dtype = tensor.dtype;\n if (tensor.shape.length < 1) {\n throw new Error(`Tensor must be at least a vector, but saw shape: ${tensor.shape}`);\n }\n if (tensor.dtype !== elementDtype) {\n throw new Error(`Invalid data types; op elements ${tensor.dtype}, but list elements ${elementDtype}`);\n }\n const outputShape = tensor.shape.slice(1);\n assertShapesMatchAllowUndefinedSize(outputShape, elementShape, 'TensorList shape mismatch: ');\n const tensorList = unstack(tensor);\n return new TensorList(tensorList, elementShape, dtype);\n}\n/**\n * Return a TensorList of the given size with empty elements.\n * @param elementShape the shape of the future elements of the list\n * @param elementDtype the desired type of elements in the list\n * @param numElements the number of elements to reserve\n */\nexport function reserve(elementShape, elementDtype, numElements) {\n return new TensorList([], elementShape, elementDtype, numElements);\n}\n/**\n * Put tensors at specific indices of a stacked tensor into a TensorList.\n * @param indices list of indices on how to scatter the tensor.\n * @param tensor input tensor.\n * @param elementShape the shape of the future elements of the list\n * @param numElements the number of elements to scatter\n */\nexport function scatter(tensor, indices, elementShape, numElements) {\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${indices.length} vs. ${tensor.shape[0]}`);\n }\n const maxIndex = Math.max(...indices);\n if (numElements != null && numElements !== -1 && maxIndex >= numElements) {\n throw new Error(`Max index must be < array size (${maxIndex} vs. ${numElements})`);\n }\n const list = new TensorList([], elementShape, tensor.dtype, numElements);\n const tensors = unstack(tensor, 0);\n indices.forEach((value, index) => {\n list.setItem(value, tensors[index]);\n });\n return list;\n}\n/**\n * Split the values of a Tensor into a TensorList.\n * @param length the lengths to use when splitting value along\n * its first dimension.\n * @param tensor the tensor to split.\n * @param elementShape the shape of the future elements of the list\n */\nexport function split(tensor, length, elementShape) {\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors = tidy(() => {\n const tensors = [];\n tensor = reshape(tensor, [1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = reshape(slice(tensor, indices, sizes), elementShape);\n }\n tensor.dispose();\n return tensors;\n });\n const list = new TensorList([], elementShape, tensor.dtype, length.length);\n for (let i = 0; i < tensors.length; i++) {\n list.setItem(i, tensors[i]);\n }\n return list;\n}\n//# sourceMappingURL=tensor_list.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { scalar } from '@tensorflow/tfjs-core';\nimport { TensorArray } from '../../executor/tensor_array';\nimport { fromTensor, reserve, scatter, split } from '../../executor/tensor_list';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'If':\n case 'StatelessIf': {\n const thenFunc = getParamValue('thenBranch', node, tensorMap, context);\n const elseFunc = getParamValue('elseBranch', node, tensorMap, context);\n const cond = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n const condValue = await cond.data();\n if (condValue[0]) {\n return context.functionMap[thenFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n else {\n return context.functionMap[elseFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap);\n }\n }\n case 'While':\n case 'StatelessWhile': {\n const bodyFunc = getParamValue('body', node, tensorMap, context);\n const condFunc = getParamValue('cond', node, tensorMap, context);\n const args = getParamValue('args', node, tensorMap, context);\n // Calculate the condition of the loop\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(args, context.tensorArrayMap, context.tensorListMap));\n const argIds = args.map(tensor => tensor.id);\n let condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n let result = args;\n while (condValue[0]) {\n // Record the previous result for intermediate tensor tracking\n const origResult = result;\n // Execution the body of the loop\n result = await context.functionMap[bodyFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap);\n const resultIds = result.map(tensor => tensor.id);\n // Dispose the intermediate tensor for body function that is not global\n // kept, not input/output of the body function\n origResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n // Recalcuate the condition of the loop using the latest results.\n const condResult = (await context.functionMap[condFunc].executeFunctionAsync(result, context.tensorArrayMap, context.tensorListMap));\n condValue = await condResult[0].data();\n // Dispose the intermediate tensors for condition function\n condResult.forEach(tensor => {\n if (!tensor.kept && argIds.indexOf(tensor.id) === -1 &&\n resultIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n }\n return result;\n }\n case 'LoopCond': {\n const pred = getParamValue('pred', node, tensorMap, context);\n return [cloneTensor(pred)];\n }\n case 'Switch': {\n const pred = getParamValue('pred', node, tensorMap, context);\n let data = getParamValue('data', node, tensorMap, context);\n if (!data.kept) {\n data = cloneTensor(data);\n }\n // Outputs nodes :0 => false, :1 => true\n return (await pred.data())[0] ? [undefined, data] : [data, undefined];\n }\n case 'Merge': {\n const inputName = node.inputNames.find(name => getTensor(name, tensorMap, context) !== undefined);\n if (inputName) {\n const data = getTensor(inputName, tensorMap, context);\n return [cloneTensor(data)];\n }\n return undefined;\n }\n case 'Enter': {\n const frameId = getParamValue('frameName', node, tensorMap, context);\n const data = getParamValue('tensor', node, tensorMap, context);\n context.enterFrame(frameId);\n return [cloneTensor(data)];\n }\n case 'Exit': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.exitFrame();\n return [cloneTensor(data)];\n }\n case 'NextIteration': {\n const data = getParamValue('tensor', node, tensorMap, context);\n context.nextIteration();\n return [cloneTensor(data)];\n }\n case 'TensorArrayV3': {\n const size = getParamValue('size', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const dynamicSize = getParamValue('dynamicSize', node, tensorMap, context);\n const clearAfterRead = getParamValue('clearAfterRead', node, tensorMap, context);\n const identicalElementShapes = getParamValue('identicalElementShapes', node, tensorMap, context);\n const name = getParamValue('name', node, tensorMap, context);\n const tensorArray = new TensorArray(name, dtype, size, elementShape, identicalElementShapes, dynamicSize, clearAfterRead);\n context.addTensorArray(tensorArray);\n return [tensorArray.idTensor, scalar(1.0)];\n }\n case 'TensorArrayWriteV3': {\n const id = getParamValue('tensorArrayId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const writeTensorArray = context.getTensorArray(id.id);\n writeTensorArray.write(index, writeTensor);\n return [writeTensorArray.idTensor];\n }\n case 'TensorArrayReadV3': {\n const readId = getParamValue('tensorArrayId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const readTensorArray = context.getTensorArray(readId.id);\n return [readTensorArray.read(readIndex)];\n }\n case 'TensorArrayGatherV3': {\n const gatherId = getParamValue('tensorArrayId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const gatherDtype = getParamValue('dtype', node, tensorMap, context);\n const gatherTensorArray = context.getTensorArray(gatherId.id);\n return [gatherTensorArray.gather(gatherIndices, gatherDtype)];\n }\n case 'TensorArrayScatterV3': {\n const scatterId = getParamValue('tensorArrayId', node, tensorMap, context);\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const scatterTensorArray = context.getTensorArray(scatterId.id);\n scatterTensorArray.scatter(scatterIndices, scatterTensor);\n return [scatterTensorArray.idTensor];\n }\n case 'TensorArrayConcatV3': {\n const concatId = getParamValue('tensorArrayId', node, tensorMap, context);\n const concatTensorArray = context.getTensorArray(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n return [concatTensorArray.concat(concatDtype)];\n }\n case 'TensorArraySplitV3': {\n const splitId = getParamValue('tensorArrayId', node, tensorMap, context);\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const splitTensorArray = context.getTensorArray(splitId.id);\n splitTensorArray.split(lengths, splitTensor);\n return [splitTensorArray.idTensor];\n }\n case 'TensorArraySizeV3': {\n const sizeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const sizeTensorArray = context.getTensorArray(sizeId.id);\n return [scalar(sizeTensorArray.size(), 'int32')];\n }\n case 'TensorArrayCloseV3': {\n const closeId = getParamValue('tensorArrayId', node, tensorMap, context);\n const closeTensorArray = context.getTensorArray(closeId.id);\n closeTensorArray.clearAndClose();\n return [closeTensorArray.idTensor];\n }\n case 'TensorListSetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const index = getParamValue('index', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.setItem(index, writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListGetItem': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const readIndex = getParamValue('index', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.getItem(readIndex, elementShape, elementDType)];\n }\n case 'TensorListScatterV2':\n case 'TensorListScatter': {\n const scatterIndices = getParamValue('indices', node, tensorMap, context);\n const scatterTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = scatter(scatterTensor, scatterIndices, elementShape, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListReserve': {\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = reserve(elementShape, elementDtype, numElements);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListGather': {\n const gatherId = getParamValue('tensorListId', node, tensorMap, context);\n const gatherIndices = getParamValue('indices', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(gatherId.id);\n return [tensorList.gather(gatherIndices, elementDtype, elementShape)];\n }\n case 'TensorListStack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const numElements = getParamValue('numElements', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.stack(elementShape, elementDtype, numElements)];\n }\n case 'TensorListFromTensor': {\n const tensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDtype = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = fromTensor(tensor, elementShape, elementDtype);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n case 'TensorListConcat': {\n const concatId = getParamValue('tensorListId', node, tensorMap, context);\n const tensorList = context.getTensorList(concatId.id);\n const concatDtype = getParamValue('dtype', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n return [tensorList.concat(concatDtype, elementShape)];\n }\n case 'TensorListPushBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const writeTensor = getParamValue('tensor', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n tensorList.pushBack(writeTensor);\n return [tensorList.idTensor];\n }\n case 'TensorListPopBack': {\n const idTensor = getParamValue('tensorListId', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const elementDType = getParamValue('elementDType', node, tensorMap, context);\n const tensorList = context.getTensorList(idTensor.id);\n return [tensorList.popBack(elementShape, elementDType)];\n }\n case 'TensorListSplit': {\n const splitTensor = getParamValue('tensor', node, tensorMap, context);\n const elementShape = getParamValue('elementShape', node, tensorMap, context);\n const lengths = getParamValue('lengths', node, tensorMap, context);\n const tensorList = split(splitTensor, lengths, elementShape);\n context.addTensorList(tensorList);\n return [tensorList.idTensor];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'control';\n//# sourceMappingURL=control_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getPadding, getParamValue } from './utils';\nfunction fusedConvAndDepthWiseParams(node, tensorMap, context) {\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const isBatchNorm = extraOp === 'fusedbatchnorm';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd and Prelu ' +\n 'must have two extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('FusedConv2d and DepthwiseConv2d with BiasAdd must have ' +\n 'one extra argument: bias.');\n }\n }\n if (isBatchNorm) {\n throw new Error('FusedConv2d and DepthwiseConv2d with FusedBatchNorm is not supported.');\n }\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return {\n stride,\n pad,\n dataFormat,\n dilations,\n biasArg,\n preluArg,\n activationFunc\n };\n}\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Conv1D': {\n const stride = getParamValue('stride', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilation = getParamValue('dilation', node, tensorMap, context);\n return [tfOps.conv1d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), stride, pad, dataFormat, dilation)];\n }\n case 'Conv2D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case '_FusedConv2D': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.conv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'FusedDepthwiseConv2dNative': {\n const { stride, pad, dataFormat, dilations, biasArg, preluArg, activationFunc } = fusedConvAndDepthWiseParams(node, tensorMap, context);\n return [tfOps.fused.depthwiseConv2d({\n x: getParamValue('x', node, tensorMap, context),\n filter: getParamValue('filter', node, tensorMap, context),\n strides: [stride[1], stride[2]],\n pad: pad,\n dataFormat: dataFormat,\n dilations: [dilations[1], dilations[2]],\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n }\n case 'Conv2DBackpropInput':\n case 'Conv2dTranspose': {\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n return [tfOps.conv2dTranspose(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), shape, [stride[1], stride[2]], pad)];\n }\n case 'DepthwiseConv2dNative':\n case 'DepthwiseConv2d': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getPadding(node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n return [tfOps.depthwiseConv2d(getParamValue('input', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2]], pad, dataFormat, [dilations[1], dilations[2]])];\n }\n case 'Conv3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context)\n .toUpperCase();\n const dilations = getParamValue('dilations', node, tensorMap, context);\n return [tfOps.conv3d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [stride[1], stride[2], stride[3]], pad, dataFormat, [dilations[1], dilations[2], dilations[3]])];\n }\n case 'AvgPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPool': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad)];\n }\n case 'MaxPoolWithArgmax': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n const includeBatchInIndex = getParamValue('includeBatchInIndex', node, tensorMap, context);\n const { result, indexes } = tfOps.maxPoolWithArgmax(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad, includeBatchInIndex);\n return [result, indexes];\n }\n case 'AvgPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.avgPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'MaxPool3D': {\n const stride = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize = getParamValue('kernelSize', node, tensorMap, context);\n return [tfOps.maxPool3d(getParamValue('x', node, tensorMap, context), [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad)];\n }\n case 'Dilation2D': {\n const strides = getParamValue('strides', node, tensorMap, context);\n const pad = getParamValue('pad', node, tensorMap, context);\n const dilations = getParamValue('dilations', node, tensorMap, context);\n // strides: [1, stride_height, stride_width, 1].\n const strideHeight = strides[1];\n const strideWidth = strides[2];\n // dilations: [1, dilation_height, dilation_width, 1].\n const dilationHeight = dilations[1];\n const dilationWidth = dilations[2];\n return [tfOps.dilation2d(getParamValue('x', node, tensorMap, context), getParamValue('filter', node, tensorMap, context), [strideHeight, strideWidth], pad, [dilationHeight, dilationWidth], 'NHWC' /* dataFormat */)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'convolution';\n//# sourceMappingURL=convolution_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Fill': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const dtype = getParamValue('dtype', node, tensorMap, context);\n const value = getParamValue('value', node, tensorMap, context);\n return [tfOps.fill(shape, value, dtype)];\n }\n case 'LinSpace': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const num = getParamValue('num', node, tensorMap, context);\n return [tfOps.linspace(start, stop, num)];\n }\n case 'Multinomial': {\n const logits = getParamValue('logits', node, tensorMap, context);\n const numSamples = getParamValue('numSamples', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.multinomial(logits, numSamples, seed)];\n }\n case 'OneHot': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const depth = getParamValue('depth', node, tensorMap, context);\n const onValue = getParamValue('onValue', node, tensorMap, context);\n const offValue = getParamValue('offValue', node, tensorMap, context);\n return [tfOps.oneHot(indices, depth, onValue, offValue)];\n }\n case 'Ones': {\n return [tfOps.ones(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'OnesLike': {\n return [tfOps.onesLike(getParamValue('x', node, tensorMap, context))];\n }\n case 'RandomUniform': {\n return [tfOps.randomUniform(\n // tslint:disable-next-line:no-any\n getParamValue('shape', node, tensorMap, context), getParamValue('minval', node, tensorMap, context), getParamValue('maxval', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'Range': {\n const start = getParamValue('start', node, tensorMap, context);\n const stop = getParamValue('stop', node, tensorMap, context);\n const step = getParamValue('step', node, tensorMap, context);\n return [tfOps.range(start, stop, step, getParamValue('dtype', node, tensorMap, context))];\n }\n case 'TruncatedNormal': {\n const shape = getParamValue('shape', node, tensorMap, context);\n const mean = getParamValue('mean', node, tensorMap, context);\n const stdDev = getParamValue('stdDev', node, tensorMap, context);\n const seed = getParamValue('seed', node, tensorMap, context);\n return [tfOps.truncatedNormal(shape, mean, stdDev, getParamValue('dtype', node, tensorMap, context), seed)];\n }\n case 'Zeros': {\n return [tfOps.zeros(getParamValue('shape', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ZerosLike': {\n return [tfOps.zerosLike(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'creation';\n//# sourceMappingURL=creation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nfunction nmsParams(node, tensorMap, context) {\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const scores = getParamValue('scores', node, tensorMap, context);\n const maxOutputSize = getParamValue('maxOutputSize', node, tensorMap, context);\n const iouThreshold = getParamValue('iouThreshold', node, tensorMap, context);\n const scoreThreshold = getParamValue('scoreThreshold', node, tensorMap, context);\n const softNmsSigma = getParamValue('softNmsSigma', node, tensorMap, context);\n return {\n boxes,\n scores,\n maxOutputSize,\n iouThreshold,\n scoreThreshold,\n softNmsSigma\n };\n}\nexport const executeOp = async (node, tensorMap, context) => {\n switch (node.op) {\n case 'NonMaxSuppressionV5': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = nmsParams(node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionWithScoreAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma);\n return [result.selectedIndices, result.selectedScores];\n }\n case 'NonMaxSuppressionV4': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n const padToMaxOutputSize = getParamValue('padToMaxOutputSize', node, tensorMap, context);\n const result = await tfOps.image.nonMaxSuppressionPaddedAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [result.selectedIndices, result.validOutputs];\n }\n case 'NonMaxSuppressionV3':\n case 'NonMaxSuppressionV2': {\n const { boxes, scores, maxOutputSize, iouThreshold, scoreThreshold } = nmsParams(node, tensorMap, context);\n return [await tfOps.image.nonMaxSuppressionAsync(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold)];\n }\n case 'Where': {\n const condition = tfOps.cast(getParamValue('condition', node, tensorMap, context), 'bool');\n const result = [await tfOps.whereAsync(condition)];\n condition.dispose();\n return result;\n }\n case 'ListDiff': {\n return tfOps.setdiff1dAsync(getParamValue('x', node, tensorMap, context), getParamValue('y', node, tensorMap, context));\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'dynamic';\n//# sourceMappingURL=dynamic_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'TopKV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const k = getParamValue('k', node, tensorMap, context);\n const sorted = getParamValue('sorted', node, tensorMap, context);\n const result = tfOps.topk(x, k, sorted);\n return [result.values, result.indices];\n }\n case 'Unique': {\n const x = getParamValue('x', node, tensorMap, context);\n const result = tfOps.unique(x);\n return [result.values, result.indices];\n }\n case 'UniqueV2': {\n const x = getParamValue('x', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n const result = tfOps.unique(x, axis);\n return [result.values, result.indices];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'evaluation';\n//# sourceMappingURL=evaluation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { cloneTensor, getParamValue, getTensor } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Const': {\n return tensorMap[node.name];\n }\n case 'PlaceholderWithDefault':\n const def = getParamValue('default', node, tensorMap, context);\n return [getTensor(node.name, tensorMap, context) || def];\n case 'Placeholder':\n return [getTensor(node.name, tensorMap, context)];\n case 'Identity':\n case 'StopGradient':\n case 'FakeQuantWithMinMaxVars': { // This op is currently ignored.\n const data = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(data)];\n }\n case 'IdentityN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => cloneTensor(t));\n case 'Snapshot':\n const snapshot = getParamValue('x', node, tensorMap, context);\n return [cloneTensor(snapshot)];\n case 'Shape':\n return [tfOps.tensor1d(getParamValue('x', node, tensorMap, context).shape, 'int32')];\n case 'ShapeN':\n return getParamValue('x', node, tensorMap, context)\n .map((t) => tfOps.tensor1d(t.shape));\n case 'Size':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).size, 'int32')];\n case 'Rank':\n return [tfOps.scalar(getParamValue('x', node, tensorMap, context).rank, 'int32')];\n case 'NoOp':\n return [tfOps.scalar(1)];\n case 'Print':\n const input = getParamValue('x', node, tensorMap, context);\n const data = getParamValue('data', node, tensorMap, context);\n const message = getParamValue('message', node, tensorMap, context);\n const summarize = getParamValue('summarize', node, tensorMap, context);\n console.warn('The graph has a tf.print() operation,' +\n 'usually used for debugging, which slows down performance.');\n console.log(message);\n for (let i = 0; i < data.length; i++) {\n console.log(Array.prototype.slice.call(data[i].dataSync())\n .slice(0, summarize));\n }\n return [input];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'graph';\n//# sourceMappingURL=graph_executor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { keep, scalar, stack, tidy, unstack, util } from '@tensorflow/tfjs-core';\n/**\n * Hashtable contains a set of tensors, which can be accessed by key.\n */\nexport class HashTable {\n /**\n * Constructor of HashTable. Creates a hash table.\n *\n * @param keyDType `dtype` of the table keys.\n * @param valueDType `dtype` of the table values.\n */\n constructor(keyDType, valueDType) {\n this.keyDType = keyDType;\n this.valueDType = valueDType;\n this.handle = scalar(0);\n // tslint:disable-next-line: no-any\n this.tensorMap = new Map();\n keep(this.handle);\n }\n get id() {\n return this.handle.id;\n }\n /**\n * Dispose the tensors and handle and clear the hashtable.\n */\n clearAndClose() {\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n this.handle.dispose();\n }\n /**\n * The number of items in the hash table.\n */\n size() {\n return this.tensorMap.size;\n }\n /**\n * Replaces the contents of the table with the specified keys and values.\n * @param keys Keys to store in the hashtable.\n * @param values Values to store in the hashtable.\n */\n async import(keys, values) {\n this.checkKeyAndValueTensor(keys, values);\n // We only store the primitive values of the keys, this allows lookup\n // to be O(1).\n const $keys = await keys.data();\n // Clear the hashTable before inserting new values.\n this.tensorMap.forEach(value => value.dispose());\n this.tensorMap.clear();\n return tidy(() => {\n const $values = unstack(values);\n const keysLength = $keys.length;\n const valuesLength = $values.length;\n util.assert(keysLength === valuesLength, () => `The number of elements doesn't match, keys has ` +\n `${keysLength} elements, the values has ${valuesLength} ` +\n `elements.`);\n for (let i = 0; i < keysLength; i++) {\n const key = $keys[i];\n const value = $values[i];\n keep(value);\n this.tensorMap.set(key, value);\n }\n return this.handle;\n });\n }\n /**\n * Looks up keys in a hash table, outputs the corresponding values.\n *\n * Performs batch lookups, for every element in the key tensor, `find`\n * stacks the corresponding value into the return tensor.\n *\n * If an element is not present in the table, the given `defaultValue` is\n * used.\n *\n * @param keys Keys to look up. Must have the same type as the keys of the\n * table.\n * @param defaultValue The scalar `defaultValue` is the value output for keys\n * not present in the table. It must also be of the same type as the\n * table values.\n */\n async find(keys, defaultValue) {\n this.checkKeyAndValueTensor(keys, defaultValue);\n const $keys = await keys.data();\n return tidy(() => {\n const result = [];\n for (let i = 0; i < $keys.length; i++) {\n const key = $keys[i];\n const value = this.findWithDefault(key, defaultValue);\n result.push(value);\n }\n return stack(result);\n });\n }\n // tslint:disable-next-line: no-any\n findWithDefault(key, defaultValue) {\n const result = this.tensorMap.get(key);\n return result != null ? result : defaultValue;\n }\n checkKeyAndValueTensor(key, value) {\n if (key.dtype !== this.keyDType) {\n throw new Error(`Expect key dtype ${this.keyDType}, but got ` +\n `${key.dtype}`);\n }\n if (value.dtype !== this.valueDType) {\n throw new Error(`Expect value dtype ${this.valueDType}, but got ` +\n `${value.dtype}`);\n }\n }\n}\n//# sourceMappingURL=hash_table.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { HashTable } from '../../executor/hash_table';\nimport { getParamValue } from './utils';\nexport const executeOp = async (node, tensorMap, context, resourceManager) => {\n switch (node.op) {\n case 'HashTable':\n case 'HashTableV2': {\n const keyDType = getParamValue('keyDType', node, tensorMap, context);\n const valueDType = getParamValue('valueDType', node, tensorMap, context);\n const hashTable = new HashTable(keyDType, valueDType);\n resourceManager.addHashTable(node.name, hashTable);\n return [hashTable.handle];\n }\n case 'LookupTableImport':\n case 'LookupTableImportV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.import(keys, values)];\n }\n case 'LookupTableFind':\n case 'LookupTableFindV2': {\n const handle = getParamValue('tableHandle', node, tensorMap, context, resourceManager);\n const keys = getParamValue('keys', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n const hashTable = resourceManager.getHashTableById(handle.id);\n return [await hashTable.find(keys, defaultValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'hash_table';\n//# sourceMappingURL=hash_table_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ResizeBilinear': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeBilinear(images, [size[0], size[1]], alignCorners)];\n }\n case 'ResizeNearestNeighbor': {\n const images = getParamValue('images', node, tensorMap, context);\n const size = getParamValue('size', node, tensorMap, context);\n const alignCorners = getParamValue('alignCorners', node, tensorMap, context);\n return [tfOps.image.resizeNearestNeighbor(images, [size[0], size[1]], alignCorners)];\n }\n case 'CropAndResize': {\n const image = getParamValue('image', node, tensorMap, context);\n const boxes = getParamValue('boxes', node, tensorMap, context);\n const boxInd = getParamValue('boxInd', node, tensorMap, context);\n const cropSize = getParamValue('cropSize', node, tensorMap, context);\n const method = getParamValue('method', node, tensorMap, context);\n const extrapolationValue = getParamValue('extrapolationValue', node, tensorMap, context);\n return [tfOps.image.cropAndResize(image, boxes, boxInd, cropSize, method, extrapolationValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'image';\n//# sourceMappingURL=image_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Equal': {\n return [tfOps.equal(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'NotEqual': {\n return [tfOps.notEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Greater': {\n return [tfOps.greater(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'GreaterEqual': {\n return [tfOps.greaterEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Less': {\n return [tfOps.less(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LessEqual': {\n return [tfOps.lessEqual(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalAnd': {\n return [tfOps.logicalAnd(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'LogicalNot': {\n return [tfOps.logicalNot(getParamValue('a', node, tensorMap, context))];\n }\n case 'LogicalOr': {\n return [tfOps.logicalOr(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n case 'Select':\n case 'SelectV2': {\n return [tfOps.where(getParamValue('condition', node, tensorMap, context), getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'logical';\n//# sourceMappingURL=logical_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'BatchMatMul':\n case 'BatchMatMulV2':\n case 'MatMul':\n return [tfOps.matMul(getParamValue('a', node, tensorMap, context), getParamValue('b', node, tensorMap, context), getParamValue('transposeA', node, tensorMap, context), getParamValue('transposeB', node, tensorMap, context))];\n case 'Transpose':\n return [tfOps.transpose(getParamValue('x', node, tensorMap, context), getParamValue('perm', node, tensorMap, context))];\n case '_FusedMatMul':\n const [extraOp, activationFunc] = getParamValue('fusedOps', node, tensorMap, context);\n const isBiasAdd = extraOp === 'biasadd';\n const isPrelu = activationFunc === 'prelu';\n const numArgs = getParamValue('numArgs', node, tensorMap, context);\n if (isBiasAdd) {\n if (isPrelu && numArgs !== 2) {\n throw new Error('Fused MatMul with BiasAdd and Prelu must have two ' +\n 'extra arguments: bias and alpha.');\n }\n if (!isPrelu && numArgs !== 1) {\n throw new Error('Fused MatMul with BiasAdd must have one extra argument: bias.');\n }\n }\n const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context);\n return [tfOps.fused.matMul({\n a: getParamValue('a', node, tensorMap, context),\n b: getParamValue('b', node, tensorMap, context),\n transposeA: getParamValue('transposeA', node, tensorMap, context),\n transposeB: getParamValue('transposeB', node, tensorMap, context),\n bias: biasArg,\n activation: activationFunc,\n preluActivationWeights: preluArg\n })];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'matrices';\n//# sourceMappingURL=matrices_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FusedBatchNorm':\n case 'FusedBatchNormV2': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'FusedBatchNormV3': {\n return [tfOps.batchNorm(getParamValue('x', node, tensorMap, context), getParamValue('mean', node, tensorMap, context), getParamValue('variance', node, tensorMap, context), getParamValue('offset', node, tensorMap, context), getParamValue('scale', node, tensorMap, context), getParamValue('epsilon', node, tensorMap, context))];\n }\n case 'LRN': {\n return [tfOps.localResponseNormalization(getParamValue('x', node, tensorMap, context), getParamValue('radius', node, tensorMap, context), getParamValue('bias', node, tensorMap, context), getParamValue('alpha', node, tensorMap, context), getParamValue('beta', node, tensorMap, context))];\n }\n case 'Softmax': {\n return [tfOps.softmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'LogSoftmax': {\n return [tfOps.logSoftmax(getParamValue('x', node, tensorMap, context))];\n }\n case 'SparseToDense': {\n return [tfOps.sparseToDense(getParamValue('sparseIndices', node, tensorMap, context), getParamValue('outputShape', node, tensorMap, context), getParamValue('sparseValues', node, tensorMap, context), getParamValue('defaultValue', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'normalization';\n//# sourceMappingURL=normalization_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Max': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.max(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Mean': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.mean(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Min': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.min(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Sum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.sum(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'All': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.all(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Any': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.any(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'ArgMax': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMax(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'ArgMin': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.argMin(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Prod': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const keepDims = getParamValue('keepDims', node, tensorMap, context);\n return [tfOps.prod(getParamValue('x', node, tensorMap, context), axis, keepDims)];\n }\n case 'Cumsum': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const exclusive = getParamValue('exclusive', node, tensorMap, context);\n const reverse = getParamValue('reverse', node, tensorMap, context);\n return [tfOps.cumsum(getParamValue('x', node, tensorMap, context), axis, exclusive, reverse)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'reduction';\n//# sourceMappingURL=reduction_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'ConcatV2':\n case 'Concat': {\n const n = getParamValue('n', node, tensorMap, context);\n const axis = getParamValue('axis', node, tensorMap, context);\n let inputs = getParamValue('tensors', node, tensorMap, context);\n inputs = inputs.slice(0, n);\n return [tfOps.concat(inputs, axis)];\n }\n case 'GatherV2':\n case 'Gather': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gather(input, tfOps.cast(indices, 'int32'), axis)];\n }\n case 'ReverseV2':\n case 'Reverse': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const input = getParamValue('x', node, tensorMap, context);\n return [tfOps.reverse(input, axis)];\n }\n case 'Slice': {\n // tslint:disable-next-line:no-any\n const begin = getParamValue('begin', node, tensorMap, context);\n // tslint:disable-next-line:no-any\n const size = getParamValue('size', node, tensorMap, context);\n return [tfOps.slice(getParamValue('x', node, tensorMap, context), begin, size)];\n }\n case 'StridedSlice': {\n const begin = getParamValue('begin', node, tensorMap, context);\n const end = getParamValue('end', node, tensorMap, context);\n const strides = getParamValue('strides', node, tensorMap, context);\n const beginMask = getParamValue('beginMask', node, tensorMap, context);\n const endMask = getParamValue('endMask', node, tensorMap, context);\n const ellipsisMask = getParamValue('ellipsisMask', node, tensorMap, context);\n const newAxisMask = getParamValue('newAxisMask', node, tensorMap, context);\n const shrinkAxisMask = getParamValue('shrinkAxisMask', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return [tfOps.stridedSlice(tensor, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask)];\n }\n case 'Pack': {\n return tidy(() => {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensors = getParamValue('tensors', node, tensorMap, context);\n // Reshape the tensors to the first tensor's shape if they don't\n // match.\n const shape = tensors[0].shape;\n const squeezedShape = tfOps.squeeze(tensors[0]).shape;\n const mapped = tensors.map(tensor => {\n const sameShape = util.arraysEqual(tensor.shape, shape);\n if (!sameShape &&\n !util.arraysEqual(tfOps.squeeze(tensor).shape, squeezedShape)) {\n throw new Error('the input tensors shape does not match');\n }\n return sameShape ? tensor : tfOps.reshape(tensor, shape);\n });\n return [tfOps.stack(mapped, axis)];\n });\n }\n case 'Unpack': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const tensor = getParamValue('tensor', node, tensorMap, context);\n return tfOps.unstack(tensor, axis);\n }\n case 'Tile': {\n const reps = getParamValue('reps', node, tensorMap, context);\n return [tfOps.tile(getParamValue('x', node, tensorMap, context), reps)];\n }\n case 'Split':\n case 'SplitV': {\n const axis = getParamValue('axis', node, tensorMap, context);\n const numOrSizeSplits = getParamValue('numOrSizeSplits', node, tensorMap, context);\n const tensor = getParamValue('x', node, tensorMap, context);\n return tfOps.split(tensor, numOrSizeSplits, axis);\n }\n case 'ScatterNd': {\n const indices = getParamValue('indices', node, tensorMap, context);\n const values = getParamValue('values', node, tensorMap, context);\n const shape = getParamValue('shape', node, tensorMap, context);\n return [tfOps.scatterND(indices, values, shape)];\n }\n case 'GatherNd': {\n const x = getParamValue('x', node, tensorMap, context);\n const indices = getParamValue('indices', node, tensorMap, context);\n return [tfOps.gatherND(x, indices)];\n }\n case 'SparseToDense': {\n const indices = getParamValue('sparseIndices', node, tensorMap, context);\n const shape = getParamValue('outputShape', node, tensorMap, context);\n const sparseValues = getParamValue('sparseValues', node, tensorMap, context);\n const defaultValue = getParamValue('defaultValue', node, tensorMap, context);\n return [tfOps.sparseToDense(indices, sparseValues, shape, sparseValues.dtype === defaultValue.dtype ?\n defaultValue :\n tfOps.cast(defaultValue, sparseValues.dtype))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'slice_join';\n//# sourceMappingURL=slice_join_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'FFT': {\n return [tfOps.fft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IFFT': {\n return [tfOps.ifft(getParamValue('x', node, tensorMap, context))];\n }\n case 'RFFT': {\n return [tfOps.rfft(getParamValue('x', node, tensorMap, context))];\n }\n case 'IRFFT': {\n return [tfOps.irfft(getParamValue('x', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'spectral';\n//# sourceMappingURL=spectral_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line: no-imports-from-dist\nimport * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter';\nimport { getParamValue } from './utils';\nexport const executeOp = (node, tensorMap, context) => {\n switch (node.op) {\n case 'Cast': {\n return [tfOps.cast(getParamValue('x', node, tensorMap, context), getParamValue('dtype', node, tensorMap, context))];\n }\n case 'ExpandDims': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.expandDims(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Squeeze': {\n const axis = getParamValue('axis', node, tensorMap, context);\n return [tfOps.squeeze(getParamValue('x', node, tensorMap, context), axis)];\n }\n case 'Reshape': {\n return [tfOps.reshape(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n case 'MirrorPad': {\n return [tfOps.mirrorPad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('mode', node, tensorMap, context))];\n }\n case 'PadV2':\n case 'Pad': {\n return [tfOps.pad(getParamValue('x', node, tensorMap, context), getParamValue('padding', node, tensorMap, context), getParamValue('constantValue', node, tensorMap, context))];\n }\n case 'SpaceToBatchND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const paddings = getParamValue('paddings', node, tensorMap, context);\n return [tfOps.spaceToBatchND(getParamValue('x', node, tensorMap, context), blockShape, paddings)];\n }\n case 'BatchToSpaceND': {\n const blockShape = getParamValue('blockShape', node, tensorMap, context);\n const crops = getParamValue('crops', node, tensorMap, context);\n return [tfOps.batchToSpaceND(getParamValue('x', node, tensorMap, context), blockShape, crops)];\n }\n case 'DepthToSpace': {\n const blockSize = getParamValue('blockSize', node, tensorMap, context);\n const dataFormat = getParamValue('dataFormat', node, tensorMap, context).toUpperCase();\n return [tfOps.depthToSpace(getParamValue('x', node, tensorMap, context), blockSize, dataFormat)];\n }\n case 'BroadcastTo': {\n return [tfOps.broadcastTo(getParamValue('x', node, tensorMap, context), getParamValue('shape', node, tensorMap, context))];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\nexport const CATEGORY = 'transformation';\n//# sourceMappingURL=transformation_executor.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport { NodeValueImpl } from './custom_op/node_value_impl';\nimport { getRegisteredOp } from './custom_op/register';\nimport * as arithmetic from './executors/arithmetic_executor';\nimport * as basicMath from './executors/basic_math_executor';\nimport * as control from './executors/control_executor';\nimport * as convolution from './executors/convolution_executor';\nimport * as creation from './executors/creation_executor';\nimport * as dynamic from './executors/dynamic_executor';\nimport * as evaluation from './executors/evaluation_executor';\nimport * as graph from './executors/graph_executor';\nimport * as hashTable from './executors/hash_table_executor';\nimport * as image from './executors/image_executor';\nimport * as logical from './executors/logical_executor';\nimport * as matrices from './executors/matrices_executor';\nimport * as normalization from './executors/normalization_executor';\nimport * as reduction from './executors/reduction_executor';\nimport * as sliceJoin from './executors/slice_join_executor';\nimport * as spectral from './executors/spectral_executor';\nimport * as transformation from './executors/transformation_executor';\n/**\n * Executes the op defined by the node object.\n * @param node\n * @param tensorMap contains tensors for executed nodes and weights\n * @param context contains tensors and information for running the current node.\n * @param resourceManager Optional. Contains global resources of the model.\n */\nexport function executeOp(node, tensorMap, context, resourceManager) {\n const value = ((node, tensorMap, context) => {\n switch (node.category) {\n case 'arithmetic':\n return tfc.tidy(() => arithmetic.executeOp(node, tensorMap, context));\n case 'basic_math':\n return tfc.tidy(() => basicMath.executeOp(node, tensorMap, context));\n case 'control':\n return control.executeOp(node, tensorMap, context);\n case 'convolution':\n return tfc.tidy(() => convolution.executeOp(node, tensorMap, context));\n case 'creation':\n return tfc.tidy(() => creation.executeOp(node, tensorMap, context));\n case 'dynamic':\n return dynamic.executeOp(node, tensorMap, context);\n case 'evaluation':\n return tfc.tidy(() => evaluation.executeOp(node, tensorMap, context));\n case 'image':\n return tfc.tidy(() => image.executeOp(node, tensorMap, context));\n case 'graph':\n return tfc.tidy(() => graph.executeOp(node, tensorMap, context));\n case 'logical':\n return tfc.tidy(() => logical.executeOp(node, tensorMap, context));\n case 'matrices':\n return tfc.tidy(() => matrices.executeOp(node, tensorMap, context));\n case 'normalization':\n return tfc.tidy(() => normalization.executeOp(node, tensorMap, context));\n case 'reduction':\n return tfc.tidy(() => reduction.executeOp(node, tensorMap, context));\n case 'slice_join':\n return tfc.tidy(() => sliceJoin.executeOp(node, tensorMap, context));\n case 'spectral':\n return tfc.tidy(() => spectral.executeOp(node, tensorMap, context));\n case 'transformation':\n return tfc.tidy(() => transformation.executeOp(node, tensorMap, context));\n case 'hash_table':\n return hashTable.executeOp(node, tensorMap, context, resourceManager);\n case 'custom':\n const opMapper = getRegisteredOp(node.op);\n if (opMapper && opMapper.customExecutor) {\n return opMapper.customExecutor(new NodeValueImpl(node, tensorMap, context));\n }\n else {\n throw TypeError(`Custom op ${node.op} is not registered.`);\n }\n default:\n throw TypeError(`Unknown op '${node.op}'. File an issue at ` +\n `https://github.com/tensorflow/tfjs/issues so we can add it` +\n `, or register a custom execution with tf.registerOp()`);\n }\n })(node, tensorMap, context);\n if (tfc.util.isPromise(value)) {\n return value.then((data) => [].concat(data));\n }\n return [].concat(value);\n}\n//# sourceMappingURL=operation_executor.js.map", "/**\n * ExecutionContext captures the runtime environment of the node. It keeps\n * track of the current frame and iteration for the control flow ops.\n *\n * For example, typical Dynamic RNN model may contain loops, for which\n * TensorFlow will generate graphs with Enter/Exit nodes to control the\n * current execution frame, and NextIteration Nodes for iteration id increment.\n * For model with branch logic, TensorFLow will generate Switch/Merge ops.\n */\nexport class ExecutionContext {\n constructor(weightMap = {}, tensorArrayMap = {}, tensorListMap = {}, functionMap = {}) {\n this.weightMap = weightMap;\n this.tensorArrayMap = tensorArrayMap;\n this.tensorListMap = tensorListMap;\n this.functionMap = functionMap;\n this.rootContext = { id: 0, frameName: '', iterationId: 0 };\n this.contexts = [this.rootContext];\n this.lastId = 0;\n this.generateCurrentContextIds();\n }\n newFrame(id, frameName) {\n return { id, frameName, iterationId: 0 };\n }\n /**\n * Set the current context\n * @param contexts: ExecutionContextInfo[] the current path of execution\n * frames\n */\n set currentContext(contexts) {\n if (this.contexts !== contexts) {\n this.contexts = contexts;\n this.generateCurrentContextIds();\n }\n }\n get currentContext() {\n return this.contexts;\n }\n /**\n * Returns the current context in string format.\n */\n get currentContextId() {\n return this._currentContextIds[0];\n }\n /**\n * Returns the current context and all parent contexts in string format.\n * This allow access to the nodes in the current and parent frames.\n */\n get currentContextIds() {\n return this._currentContextIds;\n }\n generateCurrentContextIds() {\n const names = [];\n for (let i = 0; i < this.contexts.length - 1; i++) {\n const contexts = this.contexts.slice(0, this.contexts.length - i);\n names.push(this.contextIdforContexts(contexts));\n }\n names.push('');\n this._currentContextIds = names;\n }\n contextIdforContexts(contexts) {\n return contexts ?\n contexts\n .map(context => (context.id === 0 && context.iterationId === 0) ?\n '' :\n `${context.frameName}-${context.iterationId}`)\n .join('/') :\n '';\n }\n /**\n * Enter a new frame, a new context is pushed on the current context list.\n * @param frameId new frame id\n */\n enterFrame(frameId) {\n if (this.contexts) {\n this.lastId++;\n this.contexts = this.contexts.slice();\n this.contexts.push(this.newFrame(this.lastId, frameId));\n this._currentContextIds.unshift(this.contextIdforContexts(this.contexts));\n }\n }\n /**\n * Exit the current frame, the last context is removed from the current\n * context list.\n */\n exitFrame() {\n if (this.contexts && this.contexts.length > 1) {\n this.contexts = this.contexts.slice();\n this.contexts.splice(-1);\n this.currentContextIds.shift();\n }\n else {\n throw new Error('Cannot exit frame, the context is empty');\n }\n }\n /**\n * Enter the next iteration of a loop, the iteration id of last context is\n * increased.\n */\n nextIteration() {\n if (this.contexts && this.contexts.length > 0) {\n this.contexts = this.contexts.slice();\n this.lastId++;\n const context = Object.assign({}, this.contexts[this.contexts.length - 1]);\n context.iterationId += 1;\n context.id = this.lastId;\n this.contexts.splice(-1, 1, context);\n this._currentContextIds.splice(0, 1, this.contextIdforContexts(this.contexts));\n }\n else {\n throw new Error('Cannot increase frame iteration, the context is empty');\n }\n }\n getWeight(name) {\n return this.weightMap[name];\n }\n addTensorArray(tensorArray) {\n this.tensorArrayMap[tensorArray.id] = tensorArray;\n }\n getTensorArray(id) {\n return this.tensorArrayMap[id];\n }\n addTensorList(tensorList) {\n this.tensorListMap[tensorList.id] = tensorList;\n }\n getTensorList(id) {\n return this.tensorListMap[id];\n }\n dispose(keepIds) {\n for (const key in this.tensorArrayMap) {\n this.tensorArrayMap[key].clearAndClose(keepIds);\n }\n for (const key in this.tensorListMap) {\n this.tensorListMap[key].clearAndClose(keepIds);\n }\n }\n}\n//# sourceMappingURL=execution_context.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { parseNodeName } from '../operations/executors/utils';\n/**\n * Given graph inputs and desired outputs, find the minimal set of nodes\n * to execute in order to compute the outputs. In addition return other useful\n * info such:\n * - Missing inputs needed to compute the output.\n * - Whether the subgraph contains dynamic ops (control flow, dynamic shape).\n * - Alternative inputs in order to avoid async (dynamic op) execution.\n */\nexport function getExecutionSubgraph(inputs, outputs, weightMap, initNodes) {\n const usedNodes = new Set();\n const missingInputs = [];\n let dynamicNode = null;\n let syncInputs = null;\n // Start with the outputs, going backwards and find all the nodes that are\n // needed to compute those outputs.\n const seen = new Set();\n const inputNodeNames = Object.keys(inputs).map(name => parseNodeName(name)[0]);\n let initNodeNames = [];\n if (initNodes != null) {\n initNodeNames = initNodes.map(node => parseNodeName(node.name)[0]);\n }\n const frontier = [...outputs];\n while (frontier.length > 0) {\n const node = frontier.pop();\n if (isControlFlow(node) || isDynamicShape(node) || isHashTable(node)) {\n if (dynamicNode == null) {\n dynamicNode = node;\n syncInputs = dynamicNode.children.map(child => child.name)\n .filter(name => usedNodes.has(name));\n }\n }\n usedNodes.add(node.name);\n // Weights are dead end since we already have their values.\n if (weightMap[node.name] != null) {\n continue;\n }\n // This node is a dead end since it's one of the user-provided inputs.\n if (inputNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n // This node is a dead end since it doesn't have any inputs.\n if (initNodeNames.indexOf(node.name) !== -1) {\n continue;\n }\n if (node.inputs.length === 0) {\n missingInputs.push(node.name);\n continue;\n }\n node.inputs.forEach(input => {\n // Don't add to the frontier if it is already there.\n if (seen.has(input.name)) {\n return;\n }\n seen.add(input.name);\n frontier.push(input);\n });\n }\n return { inputs, outputs, usedNodes, missingInputs, dynamicNode, syncInputs };\n}\n/**\n * Given the execution info, return a list of nodes in topological order that\n * need to be executed to compute the output.\n */\nexport function getNodesInTopologicalOrder(graph, weightMap, executionInfo) {\n const { usedNodes, inputs } = executionInfo;\n const frontier = [];\n const inputNodes = Object.keys(inputs)\n .map(name => parseNodeName(name)[0])\n .map(name => graph.nodes[name]);\n const initNodes = graph.initNodes;\n inputNodes.forEach(input => {\n if (usedNodes.has(input.name)) {\n frontier.push(input);\n }\n });\n graph.weights.forEach(weight => {\n if (usedNodes.has(weight.name)) {\n frontier.push(weight);\n }\n });\n if (initNodes != null) {\n initNodes.forEach(node => {\n if (usedNodes.has(node.name)) {\n frontier.push(node);\n }\n });\n }\n const seen = new Set();\n const orderedNodes = [];\n while (frontier.length > 0) {\n const node = frontier.pop();\n seen.add(node.name);\n if (!weightMap[node.name]) {\n orderedNodes.push(node);\n }\n node.children.forEach(child => {\n if (!seen.has(child.name) && usedNodes.has(child.name) &&\n child.inputs.every(input => seen.has(input.name))) {\n frontier.push(child);\n }\n });\n }\n return orderedNodes;\n}\nconst CONTROL_FLOW_OPS = [\n 'Switch', 'Merge', 'Enter', 'Exit', 'NextIteration', 'StatelessIf',\n 'StatelessWhile', 'if', 'While'\n];\nconst DYNAMIC_SHAPE_OPS = [\n 'NonMaxSuppressionV2', 'NonMaxSuppressionV3', 'NonMaxSuppressionV5', 'Where'\n];\nconst HASH_TABLE_OPS = [\n 'HashTable', 'HashTableV2', 'LookupTableImport', 'LookupTableImportV2',\n 'LookupTableFind', 'LookupTableFindV2'\n];\nexport function isControlFlow(node) {\n return CONTROL_FLOW_OPS.indexOf(node.op) >= 0;\n}\nexport function isDynamicShape(node) {\n return DYNAMIC_SHAPE_OPS.indexOf(node.op) >= 0;\n}\nexport function isHashTable(node) {\n return HASH_TABLE_OPS.indexOf(node.op) >= 0;\n}\n//# sourceMappingURL=model_analysis.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tidy, util } from '@tensorflow/tfjs-core';\nimport { getNodeNameAndIndex, getParamValue, getTensor, getTensorsForCurrentContenxt, parseNodeName } from '../operations/executors/utils';\nimport { executeOp } from '../operations/operation_executor';\nimport { ExecutionContext } from './execution_context';\nimport { getExecutionSubgraph, getNodesInTopologicalOrder, isControlFlow } from './model_analysis';\nexport class GraphExecutor {\n /**\n *\n * @param graph Graph the model or function graph to be executed.\n * @param parent When building function exector you need to set the parent\n * executor. Since the weights and function executor maps are set at parant\n * level, that function executor can access the function maps and weight maps\n * through the parent.\n */\n constructor(graph, parent) {\n this.graph = graph;\n this.parent = parent;\n this.compiledMap = new Map();\n this._weightMap = {};\n this.SEPERATOR = ',';\n this._functions = {};\n this._functionExecutorMap = {};\n this._outputs = graph.outputs;\n this._inputs = graph.inputs;\n this._initNodes = graph.initNodes;\n this._signature = graph.signature;\n this._functions = graph.functions;\n // create sub-graph executors\n if (graph.functions != null) {\n Object.keys(graph.functions).forEach(name => {\n this._functionExecutorMap[name] =\n new GraphExecutor(graph.functions[name], this);\n });\n }\n }\n get weightIds() {\n return this.parent ? this.parent.weightIds : this._weightIds;\n }\n get functionExecutorMap() {\n return this.parent ? this.parent.functionExecutorMap :\n this._functionExecutorMap;\n }\n get weightMap() {\n return this.parent ? this.parent.weightMap : this._weightMap;\n }\n set weightMap(weightMap) {\n const weightIds = Object.keys(weightMap).map(key => weightMap[key].map(tensor => tensor.id));\n this._weightIds = [].concat(...weightIds);\n this._weightMap = weightMap;\n }\n /**\n * Set `ResourceManager` shared by executors of a model.\n * @param resourceManager: `ResourceManager` of the `GraphModel`.\n */\n set resourceManager(resourceManager) {\n this._resourceManager = resourceManager;\n }\n get inputs() {\n return this._inputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get outputs() {\n return this._outputs.map(node => {\n return {\n name: node.name,\n shape: node.attrParams['shape'] ?\n node.attrParams['shape'].value :\n undefined,\n dtype: node.attrParams['dtype'] ?\n node.attrParams['dtype'].value :\n undefined\n };\n });\n }\n get inputNodes() {\n return this._inputs.map(node => node.signatureKey || node.name);\n }\n get outputNodes() {\n return this._outputs.map((node) => {\n const name = node.signatureKey || node.name;\n return node.defaultOutput ? (`${name}:${node.defaultOutput}`) : name;\n });\n }\n get functions() {\n return Object.keys(this._functions).reduce((map, key) => {\n map[key] = this._functions[key].signature;\n return map;\n }, {});\n }\n getCompilationKey(inputs, outputs) {\n const sortedInputs = inputs.map(node => node.name).sort();\n const sortedOutputs = outputs.map(node => node.name).sort();\n return sortedInputs.join(this.SEPERATOR) + '--' +\n sortedOutputs.join(this.SEPERATOR);\n }\n /**\n * Compiles the inference graph and returns the minimal set of nodes that are\n * required for execution, in the correct execution order.\n */\n compile(inputs, outputs) {\n const executionInfo = getExecutionSubgraph(inputs, outputs, this.weightMap, this._initNodes);\n const { missingInputs, dynamicNode, syncInputs } = executionInfo;\n if (dynamicNode != null) {\n throw new Error(`This execution contains the node '${dynamicNode.name}', which has ` +\n `the dynamic op '${dynamicNode.op}'. Please use ` +\n `model.executeAsync() instead. Alternatively, to avoid the ` +\n `dynamic ops, specify the inputs [${syncInputs}]`);\n }\n if (missingInputs.length > 0) {\n const outNames = outputs.map(n => n.name);\n const inNames = Object.keys(inputs);\n throw new Error(`Cannot compute the outputs [${outNames}] from the provided inputs ` +\n `[${inNames}]. Missing the following inputs: [${missingInputs}]`);\n }\n return getNodesInTopologicalOrder(this.graph, this.weightMap, executionInfo);\n }\n /**\n * Executes the inference for given input tensors.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model, if\n * no outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n */\n execute(inputs, outputs) {\n inputs = this.mapInputs(inputs);\n const names = Object.keys(inputs).sort();\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputs.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const compilationKey = this.getCompilationKey(inputNodes, outputNodes);\n // Do nothing if the compiled graph cache contains the input.\n let orderedNodes = this.compiledMap.get(compilationKey);\n if (orderedNodes == null) {\n orderedNodes = this.compile(inputs, outputNodes);\n this.compiledMap.set(compilationKey, orderedNodes);\n }\n const tensorArrayMap = {};\n const tensorListMap = {};\n return tidy(() => {\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const intermediateTensorConsumerCount = {};\n for (let i = 0; i < orderedNodes.length; i++) {\n const node = orderedNodes[i];\n if (!tensorsMap[node.name]) {\n const tensors = executeOp(node, tensorsMap, context, this._resourceManager);\n if (util.isPromise(tensors)) {\n throw new Error(`The execution of the op '${node.op}' returned a promise. ` +\n `Please use model.executeAsync() instead.`);\n }\n tensorsMap[node.name] = tensors;\n this.checkTensorForDisposal(node.name, node, tensorsMap, context, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount);\n }\n }\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(tensorsToKeep);\n }\n return outputs.map(name => getTensor(name, tensorsMap, context));\n });\n }\n getFrozenTensorIds(tensorMap) {\n const ids = [].concat.apply([], Object.keys(tensorMap)\n .map(key => tensorMap[key])\n .map(tensors => tensors.map(tensor => tensor.id)));\n return new Set(ids);\n }\n checkTensorForDisposal(nodeName, node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount) {\n // Skip output nodes and any control flow nodes, since its dependency is\n // tricky to track correctly.\n if (node.category === 'control' || outputNames.indexOf(nodeName) !== -1) {\n return;\n }\n tensorMap[nodeName].forEach(tensor => {\n if (tensor != null) {\n intermediateTensorConsumerCount[tensor.id] =\n (intermediateTensorConsumerCount[tensor.id] || 0) +\n node.children.length;\n }\n });\n node.inputs.forEach(input => {\n // Skip any control flow nodes, since its dependency is tricky to track\n // correctly.\n if (input.category !== 'control') {\n const tensors = getTensorsForCurrentContenxt(input.name, tensorMap, context);\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (tensor && !tensorsToKeep.has(tensor.id)) {\n const count = intermediateTensorConsumerCount[tensor.id];\n if (count === 1) {\n tensor.dispose();\n delete intermediateTensorConsumerCount[tensor.id];\n }\n else if (count != null) {\n // only intermediate nodes has count set, inputs and weights are\n // not.\n intermediateTensorConsumerCount[tensor.id]--;\n }\n }\n });\n }\n }\n });\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n */\n async executeAsync(inputs, outputs) {\n return this._executeAsync(inputs, outputs);\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Optional. Flag for executing a function.\n * @param tensorArrayMap Optional, global TensorArray map by id. Used for\n * function execution.\n * @param tensorArrayMap Optinal global TensorList map by id. Used for\n * function execution.\n */\n async _executeAsync(inputs, outputs, isFunctionExecution = false, tensorArrayMap = {}, tensorListMap = {}) {\n if (!isFunctionExecution) {\n inputs = this.mapInputs(inputs);\n this.checkInputs(inputs);\n this.checkInputShapeAndType(inputs);\n outputs = this.mapOutputs(outputs);\n this.checkOutputs(outputs);\n }\n const context = new ExecutionContext(this.weightMap, tensorArrayMap, tensorListMap, this.functionExecutorMap);\n // Graph with control flow op requires runtime evaluation of the execution\n // order, while without control flow the execution order is pre-determined\n // in the compile method.\n const tensorMap = await this.executeWithControlFlow(inputs, context, outputs, isFunctionExecution);\n const results = outputs.map(name => getTensor(name, tensorMap, context));\n // dispose all the intermediate tensors\n const outputIds = results.map(t => t.id);\n const inputIds = Object.keys(inputs).map(name => inputs[name].id);\n const keepIds = new Set([...outputIds, ...inputIds, ...this.weightIds]);\n Object.keys(tensorMap).forEach(key => {\n const tensorArray = tensorMap[key];\n tensorArray.forEach(tensor => {\n if (tensor && !tensor.isDisposed && !keepIds.has(tensor.id)) {\n tensor.dispose();\n }\n });\n });\n // dispose the context for the root executor\n if (this.parent == null) {\n context.dispose(keepIds);\n }\n return results;\n }\n async executeFunctionAsync(inputs, tensorArrayMap, tensorListMap) {\n const mappedInputs = inputs.reduce((map, tensor, index) => {\n map[this.inputs[index].name] = tensor;\n return map;\n }, {});\n return this._executeAsync(mappedInputs, this.outputNodes, true, tensorArrayMap, tensorListMap);\n }\n /**\n * When there are control flow nodes in the graph, the graph execution use\n * ExecutionContext to keep track of the frames and loop iterators.\n * @param inputs placeholder tensors for the graph.\n * @param context the execution context object for current execution.\n * @param outputNames Optional. output node name from the Tensorflow model,\n * if no outputs are specified, the default outputs of the model would be\n * used. You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n * @param isFunctionExecution Flag for executing a function.\n */\n async executeWithControlFlow(inputs, context, outputNames, isFunctionExecution) {\n const names = Object.keys(inputs);\n const inputNodes = names.map(name => this.graph.nodes[parseNodeName(name)[0]]);\n const outputNodeNames = outputNames.map(name => parseNodeName(name)[0]);\n let outputNodes = outputNodeNames.map(name => this.graph.nodes[name]);\n // If no outputs are specified, then use the default outputs of the model.\n if (outputNodes.length === 0) {\n outputNodes = this._outputs;\n }\n const { usedNodes, missingInputs, dynamicNode, syncInputs } = getExecutionSubgraph(inputs, outputNodes, this.weightMap, this._initNodes);\n // First nodes to execute include inputNodes, weights, and initNodes.\n const stack = [\n ...inputNodes, ...this.graph.weights, ...(this._initNodes || [])\n ].map(node => {\n return { node, contexts: context.currentContext };\n });\n const tensorsMap = Object.assign({}, this.weightMap);\n Object.keys(inputs).forEach(name => {\n const [nodeName, index] = parseNodeName(name);\n const tensors = [];\n tensors[index] = inputs[name];\n tensorsMap[nodeName] = tensors;\n });\n const intermediateTensorConsumerCount = {};\n const tensorsToKeep = this.getFrozenTensorIds(tensorsMap);\n const added = {};\n while (stack.length > 0) {\n const promises = this.processStack(inputNodes, stack, context, tensorsMap, added, tensorsToKeep, outputNodeNames, intermediateTensorConsumerCount, usedNodes);\n await Promise.all(promises);\n }\n if (dynamicNode == null && !isFunctionExecution) {\n console.warn(`This model execution did not contain any nodes with control flow ` +\n `or dynamic output shapes. You can use model.execute() instead.`);\n }\n const missingOutputs = outputNodes\n .filter(node => !isControlFlow(node) &&\n !getTensor(node.name, tensorsMap, context))\n .map(node => node.name);\n if (missingOutputs.length > 0) {\n let alternativeMsg = '';\n if (dynamicNode != null) {\n alternativeMsg =\n `Alternatively, to avoid the dynamic ops, use model.execute() ` +\n `and specify the inputs [${syncInputs}]`;\n }\n throw new Error(`Cannot compute the outputs [${missingOutputs}] from the provided ` +\n `inputs [${names}]. Consider providing the following inputs: ` +\n `[${missingInputs}]. ${alternativeMsg}`);\n }\n return tensorsMap;\n }\n processStack(inputNodes, stack, context, tensorMap, added, tensorsToKeep, outputNames, intermediateTensorConsumerCount, usedNodes) {\n const promises = [];\n while (stack.length > 0) {\n const item = stack.pop();\n context.currentContext = item.contexts;\n let nodeName = '';\n // The tensor of the Enter op with isConstant set should be set\n // in the parent scope, so it will be available as constant for the\n // whole loop.\n if (item.node.op === 'Enter' &&\n getParamValue('isConstant', item.node, tensorMap, context)) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n // only process nodes that are not in the tensorMap yet, this include\n // inputNodes and internal initNodes.\n if (tensorMap[item.node.name] == null) {\n const tensors = executeOp(item.node, tensorMap, context, this._resourceManager);\n if (!nodeName) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n const currentContext = context.currentContext;\n if (util.isPromise(tensors)) {\n promises.push(tensors.then(t => {\n tensorMap[nodeName] = t;\n context.currentContext = currentContext;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n return t;\n }));\n }\n else {\n tensorMap[nodeName] = tensors;\n this.checkTensorForDisposal(nodeName, item.node, tensorMap, context, tensorsToKeep, outputNames, intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n else {\n this.processChildNodes(item.node, stack, context, tensorMap, added, usedNodes);\n }\n }\n return promises;\n }\n processChildNodes(node, stack, context, tensorMap, added, usedNodes) {\n node.children.forEach((childNode) => {\n const [nodeName,] = getNodeNameAndIndex(childNode.name, context);\n if (added[nodeName] || !usedNodes.has(childNode.name)) {\n return;\n }\n // Merge op can be pushed if any of its inputs has value.\n if (childNode.op === 'Merge') {\n if (childNode.inputNames.some(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n }\n else // Otherwise all inputs must to have value.\n if (childNode.inputNames.every(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({ contexts: context.currentContext, node: childNode });\n }\n });\n }\n /**\n * Releases the memory used by the weight tensors.\n */\n dispose() {\n Object.keys(this.weightMap)\n .forEach(key => this.weightMap[key].forEach(tensor => tensor.dispose()));\n }\n checkInputShapeAndType(inputs) {\n Object.keys(inputs).forEach(name => {\n const input = inputs[name];\n const [nodeName,] = parseNodeName(name);\n const node = this.graph.nodes[nodeName];\n if (node.attrParams['shape'] && node.attrParams['shape'].value) {\n const shape = node.attrParams['shape'].value;\n const match = shape.length === input.shape.length &&\n input.shape.every((dim, index) => shape[index] === -1 || shape[index] === dim);\n util.assert(match, () => `The shape of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be [${shape}], but was ` +\n `[${input.shape}]`);\n }\n if (node.attrParams['dtype'] && node.attrParams['dtype'].value) {\n util.assert(input.dtype === node.attrParams['dtype'].value, () => `The dtype of dict['${node.name}'] provided in ` +\n `model.execute(dict) must be ` +\n `${node.attrParams['dtype'].value}, but was ${input.dtype}`);\n }\n });\n }\n mapInputs(inputs) {\n const result = {};\n for (const inputName in inputs) {\n if (this._signature != null && this._signature.inputs != null &&\n this._signature.inputs[inputName] != null) {\n const tensor = this._signature.inputs[inputName];\n result[tensor.name] = inputs[inputName];\n }\n else {\n result[inputName] = inputs[inputName];\n }\n }\n return result;\n }\n checkInputs(inputs) {\n const notInGraph = Object.keys(inputs).filter(name => {\n const [nodeName] = parseNodeName(name);\n return this.graph.nodes[nodeName] == null;\n });\n if (notInGraph.length > 0) {\n throw new Error(`The dict provided in model.execute(dict) has ` +\n `keys: [${notInGraph}] that are not part of graph`);\n }\n }\n mapOutputs(outputs) {\n return outputs.map(name => {\n if (this._signature != null && this._signature.outputs != null &&\n this._signature.outputs[name] != null) {\n const tensor = this._signature.outputs[name];\n return tensor.name;\n }\n return name;\n }, {});\n }\n checkOutputs(outputs) {\n outputs.forEach(name => {\n const [normalizedName] = parseNodeName(name);\n if (!this.graph.nodes[normalizedName]) {\n throw new Error(`The output '${name}' is not found in the graph`);\n }\n });\n }\n}\n//# sourceMappingURL=graph_executor.js.map", "/**\n * Contains global resources of a model.\n */\nexport class ResourceManager {\n constructor(hashTableNameToHandle = {}, hashTableMap = {}) {\n this.hashTableNameToHandle = hashTableNameToHandle;\n this.hashTableMap = hashTableMap;\n }\n /**\n * Register a `HashTable` in the resource manager.\n *\n * The `HashTable` can be retrieved by `resourceManager.getHashTableById`,\n * where id is the table handle tensor's id.\n *\n * @param name Op node name that creates the `HashTable`.\n * @param hashTable The `HashTable` to be added to resource manager.\n */\n addHashTable(name, hashTable) {\n this.hashTableNameToHandle[name] = hashTable.handle;\n this.hashTableMap[hashTable.id] = hashTable;\n }\n /**\n * Get the table handle by node name.\n * @param name Op node name that creates the `HashTable`. This name is also\n * used in the inputs list of lookup and import `HashTable` ops.\n */\n getHashTableHandleByName(name) {\n return this.hashTableNameToHandle[name];\n }\n /**\n * Get the actual `HashTable` by its handle tensor's id.\n * @param id The id of the handle tensor.\n */\n getHashTableById(id) {\n return this.hashTableMap[id];\n }\n /**\n * Dispose `ResourceManager`, including its hashTables and tensors in them.\n */\n dispose() {\n for (const key in this.hashTableMap) {\n this.hashTableMap[key].clearAndClose();\n delete this.hashTableMap[key];\n }\n for (const name in this.hashTableNameToHandle) {\n this.hashTableNameToHandle[name].dispose();\n delete this.hashTableNameToHandle[name];\n }\n }\n}\n//# sourceMappingURL=resource_manager.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { io, Tensor } from '@tensorflow/tfjs-core';\nimport { OperationMapper } from '../operations/operation_mapper';\nimport { GraphExecutor } from './graph_executor';\nimport { ResourceManager } from './resource_manager';\nexport const TFHUB_SEARCH_PARAM = '?tfjs-format=file';\nexport const DEFAULT_MODEL_NAME = 'model.json';\n/**\n * A `tf.GraphModel` is a directed, acyclic graph built from a\n * SavedModel GraphDef and allows inference execution.\n *\n * A `tf.GraphModel` can only be created by loading from a model converted from\n * a [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) using\n * the command line converter tool and loaded via `tf.loadGraphModel`.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\nexport class GraphModel {\n /**\n * @param modelUrl url for the model, or an `io.IOHandler`.\n * @param weightManifestUrl url for the weight file generated by\n * scripts/convert.py script.\n * @param requestOption options for Request, which allows to send credentials\n * and custom headers.\n * @param onProgress Optional, progress callback function, fired periodically\n * before the load is completed.\n */\n constructor(modelUrl, loadOptions = {}) {\n this.modelUrl = modelUrl;\n this.loadOptions = loadOptions;\n this.version = 'n/a';\n if (loadOptions == null) {\n this.loadOptions = {};\n }\n this.resourceManager = new ResourceManager();\n }\n // Returns the version information for the tensorflow model GraphDef.\n get modelVersion() {\n return this.version;\n }\n get inputNodes() {\n return this.executor.inputNodes;\n }\n get outputNodes() {\n return this.executor.outputNodes;\n }\n get inputs() {\n return this.executor.inputs;\n }\n get outputs() {\n return this.executor.outputs;\n }\n get weights() {\n return this.executor.weightMap;\n }\n findIOHandler() {\n const path = this.modelUrl;\n if (path.load != null) {\n // Path is an IO Handler.\n this.handler = path;\n }\n else if (this.loadOptions.requestInit != null) {\n this.handler = io.browserHTTPRequest(path, this.loadOptions);\n }\n else {\n const handlers = io.getLoadHandlers(path, this.loadOptions);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n handlers.push(io.browserHTTPRequest(path, this.loadOptions));\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) load handlers for ` +\n `URL '${[path]}'`);\n }\n this.handler = handlers[0];\n }\n }\n /**\n * Loads the model and weight files, construct the in memory weight map and\n * compile the inference graph.\n */\n async load() {\n this.findIOHandler();\n if (this.handler.load == null) {\n throw new Error('Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await this.handler.load();\n return this.loadSync(artifacts);\n }\n /**\n * Synchronously construct the in memory weight map and\n * compile the inference graph. Also initialize hashtable if any.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n loadSync(artifacts) {\n this.artifacts = artifacts;\n const graph = this.artifacts.modelTopology;\n let signature = {};\n if (this.artifacts.userDefinedMetadata != null) {\n signature = // tslint:disable-next-line:no-any\n this.artifacts.userDefinedMetadata.signature;\n }\n this.version = `${graph.versions.producer}.${graph.versions.minConsumer}`;\n const weightMap = io.decodeWeights(this.artifacts.weightData, this.artifacts.weightSpecs);\n this.executor = new GraphExecutor(OperationMapper.Instance.transformGraph(graph, signature));\n this.executor.weightMap = this.convertTensorMapToTensorsMap(weightMap);\n // Attach a model-level resourceManager to each executor to share resources,\n // such as `HashTable`.\n this.executor.resourceManager = this.resourceManager;\n if (artifacts.modelInitializer != null) {\n const initializer = OperationMapper.Instance.transformGraph(artifacts.modelInitializer);\n this.initializer = new GraphExecutor(initializer);\n this.initializer.weightMap = this.executor.weightMap;\n // Attach a model-level resourceManager to the initializer, the\n // hashTables created from when executing the initializer will be stored\n // in the resourceManager.\n this.initializer.resourceManager = this.resourceManager;\n this.initializer.executeAsync({}, []);\n }\n return true;\n }\n /**\n * Save the configuration and/or weights of the GraphModel.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadGraphModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * model.predict(zeros).print();\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n *\n * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true}\n */\n async save(handlerOrURL, config) {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new Error(`Cannot find any save handlers for URL '${handlerOrURL}'`);\n }\n else if (handlers.length > 1) {\n throw new Error(`Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new Error('GraphModel.save() cannot proceed because the IOHandler ' +\n 'provided does not have the `save` attribute defined.');\n }\n return handlerOrURL.save(this.artifacts);\n }\n /**\n * Execute the inference for the input tensors.\n *\n * @param input The input tensors, when there is single input for the model,\n * inputs param should be a `tf.Tensor`. For models with mutliple inputs,\n * inputs params should be in either `tf.Tensor`[] if the input order is\n * fixed, or otherwise NamedTensorMap format.\n *\n * For model with multiple inputs, we recommend you use NamedTensorMap as the\n * input type, if you use `tf.Tensor`[], the order of the array needs to\n * follow the\n * order of inputNodes array. @see {@link GraphModel.inputNodes}\n *\n * You can also feed any intermediate nodes using the NamedTensorMap as the\n * input type. For example, given the graph\n * InputNode => Intermediate => OutputNode,\n * you can execute the subgraph Intermediate => OutputNode by calling\n * model.execute('IntermediateNode' : tf.tensor(...));\n *\n * This is useful for models that uses tf.dynamic_rnn, where the intermediate\n * state needs to be fed manually.\n *\n * For batch inference execution, the tensors for each input need to be\n * concatenated together. For example with mobilenet, the required input shape\n * is [1, 244, 244, 3], which represents the [batch, height, width, channel].\n * If we are provide a batched data of 100 images, the input tensor should be\n * in the shape of [100, 244, 244, 3].\n *\n * @param config Prediction configuration for specifying the batch size and\n * output node names. Currently the batch size option is ignored for graph\n * model.\n *\n * @returns Inference result tensors. The output would be single `tf.Tensor`\n * if model has single output node, otherwise Tensor[] or NamedTensorMap[]\n * will be returned for model with multiple outputs.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n predict(inputs, config) {\n return this.execute(inputs, this.outputNodes);\n }\n normalizeInputs(inputs) {\n if (!(inputs instanceof Tensor) && !Array.isArray(inputs)) {\n // The input is already a NamedTensorMap.\n return inputs;\n }\n inputs = Array.isArray(inputs) ? inputs : [inputs];\n if (inputs.length !== this.inputNodes.length) {\n throw new Error('Input tensor count mismatch,' +\n `the graph model has ${this.inputNodes.length} placeholders, ` +\n `while there are ${inputs.length} input tensors.`);\n }\n return this.inputNodes.reduce((map, inputName, i) => {\n map[inputName] = inputs[i];\n return map;\n }, {});\n }\n normalizeOutputs(outputs) {\n outputs = outputs || this.outputNodes;\n return !Array.isArray(outputs) ? [outputs] : outputs;\n }\n /**\n * Executes inference for the model for given input tensors.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no\n * outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n *\n * @returns A single tensor if provided with a single output or no outputs\n * are provided and there is only one default output, otherwise return a\n * tensor array. The order of the tensor array is the same as the outputs\n * if provided, otherwise the order of outputNodes attribute of the model.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n execute(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = this.executor.execute(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n /**\n * Executes inference for the model for given input tensors in async\n * fashion, use this method when your model contains control flow ops.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n *\n * @returns A Promise of single tensor if provided with a single output or\n * no outputs are provided and there is only one default output, otherwise\n * return a tensor map.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n async executeAsync(inputs, outputs) {\n inputs = this.normalizeInputs(inputs);\n outputs = this.normalizeOutputs(outputs);\n const result = await this.executor.executeAsync(inputs, outputs);\n return result.length > 1 ? result : result[0];\n }\n convertTensorMapToTensorsMap(map) {\n return Object.keys(map).reduce((newMap, key) => {\n newMap[key] = [map[key]];\n return newMap;\n }, {});\n }\n /**\n * Releases the memory used by the weight tensors and resourceManager.\n *\n * @doc {heading: 'Models', subheading: 'Classes'}\n */\n dispose() {\n this.executor.dispose();\n if (this.initializer) {\n this.initializer.dispose();\n }\n this.resourceManager.dispose();\n }\n}\n/**\n * Load a graph model given a URL to the model definition.\n *\n * Example of loading MobileNetV2 from a URL and making a prediction with a\n * zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';\n * const model = await tf.loadGraphModel(modelUrl);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n *\n * Example of loading MobileNetV2 from a TF Hub URL and making a prediction with\n * a zeros input:\n *\n * ```js\n * const modelUrl =\n * 'https://tfhub.dev/google/imagenet/mobilenet_v2_140_224/classification/2';\n * const model = await tf.loadGraphModel(modelUrl, {fromTFHub: true});\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n * @param modelUrl The url or an `io.IOHandler` that loads the model.\n * @param options Options for the HTTP request, which allows to send credentials\n * and custom headers.\n *\n * @doc {heading: 'Models', subheading: 'Loading'}\n */\nexport async function loadGraphModel(modelUrl, options = {}) {\n if (modelUrl == null) {\n throw new Error('modelUrl in loadGraphModel() cannot be null. Please provide a url ' +\n 'or an IOHandler that loads the model');\n }\n if (options == null) {\n options = {};\n }\n if (options.fromTFHub) {\n if (modelUrl.load == null) {\n if (!modelUrl.endsWith('/')) {\n modelUrl = modelUrl + '/';\n }\n modelUrl = `${modelUrl}${DEFAULT_MODEL_NAME}${TFHUB_SEARCH_PARAM}`;\n }\n }\n const model = new GraphModel(modelUrl, options);\n await model.load();\n return model;\n}\n//# sourceMappingURL=graph_model.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { GraphModel, loadGraphModel } from './executor/graph_model';\nexport { deregisterOp, registerOp } from './operations/custom_op/register';\nexport { version as version_converter } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\n/**\n * Apply a mapping function to a nested structure in a recursive manner.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapResult`. The `DeepMapResult` either provides a\n * replacement value for that node (i.e., replacing the subtree), or indicates\n * that the node should be processed recursively.\n */\nexport function deepMap(input, mapFn) {\n return deepMapInternal(input, mapFn);\n}\n/**\n * @param seen: A Map of known object mappings (i.e., memoized results of\n * `mapFn()`)\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepMapInternal(input, mapFn, seen = new Map(), containedIn = new Set()) {\n if (input == null) {\n return null;\n }\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n if (seen.has(input)) {\n return seen.get(input);\n }\n const result = mapFn(input);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep map function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n seen.set(input, result.value);\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const child = input[k];\n const childResult = deepMapInternal(child, mapFn, seen, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// TODO(soergel, kangyizhang) Reconsider naming of deepZip() to avoid confusion\n// with zip()\n/**\n * Zip nested structures together in a recursive manner.\n *\n * This has the effect of transposing or pivoting data, e.g. converting it from\n * a row-major representation to a column-major representation.\n *\n * For example, `deepZip([{a: 1, b: 2}, {a: 3, b: 4}])` returns\n * `{a: [1, 3], b: [2, 4]}`.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure, where\n * the leaves are arrays collecting the values of the inputs at that location\n * (or, optionally, the result of a custom function applied to those arrays).\n *\n * @param inputs: An array of the objects to zip together.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n */\nexport function deepZip(inputs, zipFn = zipToList) {\n return deepZipInternal(inputs, zipFn);\n}\n/**\n * @param containedIn: An set containing objects on the reference path currently\n * being processed (used to detect cycles).\n */\nfunction deepZipInternal(inputs, zipFn, containedIn = new Set()) {\n // The recursion follows the structure of input 0; it's assumed that all the\n // other inputs have the same structure.\n const input = inputs[0];\n if (containedIn.has(input)) {\n throw new Error('Circular references are not supported.');\n }\n const result = zipFn(inputs);\n if (result.recurse && result.value !== null) {\n throw new Error('A deep zip function may not return both a value and recurse=true.');\n }\n if (!result.recurse) {\n return result.value;\n }\n else if (isIterable(input)) {\n // tslint:disable-next-line:no-any\n const mappedIterable = Array.isArray(input) ? [] : {};\n containedIn.add(input);\n for (const k in input) {\n const children = inputs.map(x => x[k]);\n const childResult = deepZipInternal(children, zipFn, containedIn);\n mappedIterable[k] = childResult;\n }\n containedIn.delete(input);\n return mappedIterable;\n }\n else {\n throw new Error(`Can't recurse into non-iterable type: ${input}`);\n }\n}\n// tslint:disable-next-line:no-any\nexport function zipToList(x) {\n if (x === null) {\n return null;\n }\n // TODO(soergel): validate array type?\n if (isIterable(x[0])) {\n return { value: null, recurse: true };\n }\n else {\n return { value: x, recurse: false };\n }\n}\n/**\n * Apply an async mapping function to a nested structure in a recursive manner.\n *\n * This first creates a nested structure of Promises, and then awaits all of\n * those, resulting in a single Promise for a resolved nested structure.\n *\n * The result of the mapping is an object with the same nested structure (i.e.,\n * of arrays and dicts) as the input, except that some subtrees are replaced,\n * according to the results of the mapping function.\n *\n * Mappings are memoized. Thus, if the nested structure contains the same\n * object in multiple positions, the output will contain the same mapped object\n * in those positions. Cycles are not supported, however.\n *\n * @param input: The object to which to apply the mapping function.\n * @param mapFn: A function that expects a single node of the object tree, and\n * returns a `DeepMapAsyncResult`. The `DeepMapAsyncResult` either provides\n * a `Promise` for a replacement value for that node (i.e., replacing the\n * subtree), or indicates that the node should be processed recursively. Note\n * that the decision whether or not to recurse must be made immediately; only\n * the mapped value may be promised.\n */\nexport async function deepMapAndAwaitAll(input, mapFn) {\n const seen = new Map();\n // First do a normal deepMap, collecting Promises in 'seen' as a side effect.\n deepMapInternal(input, mapFn, seen);\n // Replace the Promises in 'seen' in place.\n // Note TypeScript provides no async map iteration, and regular map iteration\n // is broken too, so sadly we have to do Array.from() to make it work.\n // (There's no advantage to Promise.all(), and that would be tricky anyway.)\n for (const key of Array.from(seen.keys())) {\n const value = seen.get(key);\n if (tf.util.isPromise(value)) {\n const mappedValue = await value;\n seen.set(key, mappedValue);\n }\n }\n // Normal deepMap again, this time filling in the resolved values.\n // It's unfortunate that we have to do two passes.\n // TODO(soergel): test performance and think harder about a fast solution.\n const result = deepMapInternal(input, mapFn, seen);\n return result;\n}\n/**\n * Determine whether the argument is iterable.\n *\n * @returns true if the argument is an array or any non-Tensor object.\n */\n// tslint:disable-next-line:no-any\nexport function isIterable(obj) {\n return obj != null && (!ArrayBuffer.isView(obj)) &&\n (Array.isArray(obj) ||\n (typeof obj === 'object' && !(obj instanceof tf.Tensor)));\n}\n/**\n * Determine whether the argument can be converted to Tensor.\n *\n * Tensors, primitives, arrays, and TypedArrays all qualify; anything else does\n * not.\n *\n * @returns true if the argument can be converted to Tensor.\n */\n// tslint:disable-next-line:no-any\nexport function canTensorify(obj) {\n return obj == null || isPrimitive(obj) || Array.isArray(obj) ||\n (typeof obj === 'object' && (obj instanceof tf.Tensor)) ||\n tf.util.isTypedArray(obj);\n}\n/**\n * Returns true if the given `value` is a primitive type. Otherwise returns\n * false. This is equivalant to node util.isPrimitive\n */\nfunction isPrimitive(value) {\n return (value === null ||\n (typeof value !== 'object' && typeof value !== 'function'));\n}\n//# sourceMappingURL=deep_map.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { deepMap, isIterable } from './deep_map';\nexport function deepClone(container) {\n return deepMap(container, cloneIfTensor);\n}\n// tslint:disable-next-line: no-any\nfunction cloneIfTensor(item) {\n if (item instanceof tf.Tensor) {\n return ({ value: item.clone(), recurse: false });\n }\n else if (isIterable(item)) {\n return { value: null, recurse: true };\n }\n else {\n return { value: item, recurse: false };\n }\n}\n//# sourceMappingURL=deep_clone.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * A ring buffer, providing O(1) FIFO, LIFO, and related operations.\n */\nexport class RingBuffer {\n /**\n * Constructs a `RingBuffer`.\n * @param capacity The number of items that the buffer can accomodate.\n */\n constructor(capacity) {\n this.capacity = capacity;\n // Note we store the indices in the range 0 <= index < 2*capacity.\n // This allows us to distinguish the full from the empty case.\n // See https://www.snellman.net/blog/archive/2016-12-13-ring-buffers/\n this.begin = 0; // inclusive\n this.end = 0; // exclusive\n if (capacity == null) {\n throw new RangeError('Can\\'t create a ring buffer of unknown capacity.');\n }\n if (capacity < 1) {\n throw new RangeError('Can\\'t create ring buffer of capacity < 1.');\n }\n this.data = new Array(capacity);\n this.doubledCapacity = 2 * capacity;\n }\n /**\n * Map any index into the range 0 <= index < 2*capacity.\n */\n wrap(index) {\n // don't trust % on negative numbers\n while (index < 0) {\n index += this.doubledCapacity;\n }\n return index % this.doubledCapacity;\n }\n get(index) {\n if (index < 0) {\n throw new RangeError('Can\\'t get item at a negative index.');\n }\n return this.data[index % this.capacity];\n }\n set(index, value) {\n if (index < 0) {\n throw new RangeError('Can\\'t set item at a negative index.');\n }\n this.data[index % this.capacity] = value;\n }\n /**\n * Returns the current number of items in the buffer.\n */\n length() {\n let length = this.end - this.begin;\n if (length < 0) {\n length = this.doubledCapacity + length;\n }\n return length;\n }\n /**\n * Reports whether the buffer is full.\n * @returns true if the number of items in the buffer equals its capacity, and\n * false otherwise.\n */\n isFull() {\n return this.length() === this.capacity;\n }\n /**\n * Reports whether the buffer is empty.\n * @returns true if the number of items in the buffer equals zero, and\n * false otherwise.\n */\n isEmpty() {\n return this.length() === 0;\n }\n /**\n * Adds an item to the end of the buffer.\n */\n push(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.set(this.end, value);\n this.end = this.wrap(this.end + 1);\n }\n /**\n * Adds many items to the end of the buffer, in order.\n */\n pushAll(values) {\n for (const value of values) {\n this.push(value);\n }\n }\n /**\n * Removes and returns the last item in the buffer.\n */\n pop() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n this.end = this.wrap(this.end - 1);\n const result = this.get(this.end);\n this.set(this.end, undefined);\n return result;\n }\n /**\n * Adds an item to the beginning of the buffer.\n */\n unshift(value) {\n if (this.isFull()) {\n throw new RangeError('Ring buffer is full.');\n }\n this.begin = this.wrap(this.begin - 1);\n this.set(this.begin, value);\n }\n /**\n * Removes and returns the first item in the buffer.\n */\n shift() {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const result = this.get(this.begin);\n this.set(this.begin, undefined);\n this.begin = this.wrap(this.begin + 1);\n return result;\n }\n /**\n * Removes and returns a specific item in the buffer, and moves the last item\n * to the vacated slot. This is useful for implementing a shuffling stream.\n * Note that this operation necessarily scrambles the original order.\n *\n * @param relativeIndex: the index of the item to remove, relative to the\n * first item in the buffer (e.g., hiding the ring nature of the underlying\n * storage).\n */\n shuffleExcise(relativeIndex) {\n if (this.isEmpty()) {\n throw new RangeError('Ring buffer is empty.');\n }\n const index = this.wrap(this.begin + relativeIndex);\n const result = this.get(index);\n this.set(index, this.pop());\n return result;\n }\n}\n//# sourceMappingURL=ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { RingBuffer } from './ring_buffer';\nexport class GrowingRingBuffer extends RingBuffer {\n /**\n * Constructs a `GrowingRingBuffer`.\n */\n constructor() {\n super(GrowingRingBuffer.INITIAL_CAPACITY);\n }\n isFull() {\n return false;\n }\n push(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.push(value);\n }\n unshift(value) {\n if (super.isFull()) {\n this.expand();\n }\n super.unshift(value);\n }\n /**\n * Doubles the capacity of the buffer.\n */\n expand() {\n const newCapacity = this.capacity * 2;\n const newData = new Array(newCapacity);\n const len = this.length();\n // Rotate the buffer to start at index 0 again, since we can't just\n // allocate more space at the end.\n for (let i = 0; i < len; i++) {\n newData[i] = this.get(this.wrap(this.begin + i));\n }\n this.data = newData;\n this.capacity = newCapacity;\n this.doubledCapacity = 2 * this.capacity;\n this.begin = 0;\n this.end = len;\n }\n}\nGrowingRingBuffer.INITIAL_CAPACITY = 32;\n//# sourceMappingURL=growing_ring_buffer.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { deepClone } from '../util/deep_clone';\nimport { deepMapAndAwaitAll, deepZip, zipToList } from '../util/deep_map';\nimport { GrowingRingBuffer } from '../util/growing_ring_buffer';\nimport { RingBuffer } from '../util/ring_buffer';\n// Here we implement a simple asynchronous iterator.\n// This lets us avoid using either third-party stream libraries or\n// recent TypeScript language support requiring polyfills.\n/**\n * Create a `LazyIterator` from an array of items.\n */\nexport function iteratorFromItems(items) {\n return new ArrayIterator(items);\n}\n/**\n * Create a `LazyIterator` of incrementing integers.\n */\nexport function iteratorFromIncrementing(start) {\n let i = start;\n return iteratorFromFunction(() => ({ value: i++, done: false }));\n}\n/**\n * Create a `LazyIterator` from a function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * await iter.forEachAsync(e => console.log(e));\n * ```\n *\n * @param func A function that produces data on each call.\n */\nexport function iteratorFromFunction(func) {\n return new FunctionCallIterator(func);\n}\n/**\n * Create a `LazyIterator` by concatenating underlying streams, which are\n * themselves provided as a stream.\n *\n * This can also be thought of as a \"stream flatten\" operation.\n *\n * @param baseIterators A stream of streams to be concatenated.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenated(baseIterators, baseErrorHandler) {\n return new ChainedIterator(baseIterators, baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by concatenating streams produced by calling a\n * stream-generating function a given number of times.\n *\n * Since a `LazyIterator` is read-once, it cannot be repeated, but this\n * function can be used to achieve a similar effect:\n *\n * LazyIterator.ofConcatenatedFunction(() => new MyIterator(), 6);\n *\n * @param iteratorFunc: A function that produces a new stream on each call.\n * @param count: The number of times to call the function.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can decide\n * whether the error should be propagated, whether the error should be\n * ignored, or whether the base stream should be terminated.\n */\nexport function iteratorFromConcatenatedFunction(iteratorFunc, count, baseErrorHandler) {\n return iteratorFromConcatenated(iteratorFromFunction(iteratorFunc).take(count), baseErrorHandler);\n}\n/**\n * Create a `LazyIterator` by zipping together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nexport function iteratorFromZipped(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n return new ZipIterator(iterators, mismatchMode);\n}\n/**\n * An asynchronous iterator, providing lazy access to a potentially\n * unbounded stream of elements.\n *\n * Iterator can be obtained from a dataset:\n * `const iter = await dataset.iterator();`\n */\nexport class LazyIterator {\n /**\n * Collect all remaining elements of a bounded stream into an array.\n * Obviously this will succeed only for small streams that fit in memory.\n * Useful for testing.\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArray() {\n const result = [];\n let x = await this.next();\n while (!x.done) {\n result.push(x.value);\n x = await this.next();\n }\n return result;\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of stream elements, which will resolve\n * when the stream is exhausted.\n */\n async toArrayForTest() {\n const stream = this.prefetch(100);\n const result = [];\n let x = await stream.next();\n while (!x.done) {\n result.push(x.value);\n x = await stream.next();\n }\n return result;\n }\n /**\n * Draw items from the stream until it is exhausted.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveFully() {\n let x = await this.next();\n while (!x.done) {\n x = await this.next();\n }\n }\n /**\n * Draw items from the stream until it is exhausted, or a predicate fails.\n *\n * This can be useful when the stream has side effects but no output. In\n * that case, calling this function guarantees that the stream will be\n * fully processed.\n */\n async resolveWhile(predicate) {\n let x = await this.next();\n let shouldContinue = predicate(x.value);\n while ((!x.done) && shouldContinue) {\n x = await this.next();\n shouldContinue = predicate(x.value);\n }\n }\n /**\n * Handles errors thrown on this stream using a provided handler function.\n *\n * @param handler A function that handles any `Error` thrown during a `next()`\n * call and returns true if the stream should continue (dropping the failed\n * call) or false if the stream should quietly terminate. If the handler\n * itself throws (or rethrows) an `Error`, that will be propagated.\n *\n * @returns A `LazyIterator` of elements passed through from upstream,\n * possibly filtering or terminating on upstream `next()` calls that\n * throw an `Error`.\n */\n handleErrors(handler) {\n return new ErrorHandlingLazyIterator(this, handler);\n }\n // TODO(soergel): Implement reduce() etc.\n /**\n * Filters this stream according to `predicate`.\n *\n * @param predicate A function mapping a stream element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `LazyIterator` of elements for which the predicate was true.\n */\n filter(predicate) {\n return new FilterIterator(this, predicate);\n }\n /**\n * Maps this stream through a 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n map(transform) {\n return new MapIterator(this, transform);\n }\n /**\n * Maps this stream through an async 1-to-1 transform.\n *\n * @param transform A function mapping a stream element to a `Promise` for a\n * transformed stream element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n mapAsync(transform) {\n return new AsyncMapIterator(this, transform);\n }\n /**\n * Maps this stream through a 1-to-1 transform, forcing serial execution.\n *\n * @param transform A function mapping a stream element to a transformed\n * element.\n *\n * @returns A `LazyIterator` of transformed elements.\n */\n serialMapAsync(transform) {\n return new AsyncMapIterator(this, transform).serial();\n }\n /**\n * Maps this stream through a 1-to-many transform.\n *\n * @param transform A function mapping a stream element to an array of\n * transformed elements.\n *\n * @returns A `DataStream` of transformed elements.\n */\n flatmap(transform) {\n return new FlatmapIterator(this, transform);\n }\n /**\n * Apply a function to every element of the stream.\n *\n * @param f A function to apply to each stream element.\n */\n async forEachAsync(f) {\n return this.map(f).resolveFully();\n }\n /**\n * Apply a function to every element of the stream, forcing serial execution.\n *\n * @param f A function to apply to each stream element. Should return 'true'\n * to indicate that the stream should continue, or 'false' to cause it to\n * terminate.\n */\n async serialForEach(f) {\n return this.serialMapAsync(f).resolveWhile(x => (x === true));\n }\n /**\n * Groups elements into batches, represented as arrays of elements.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"Row-major\" means that the resulting batch is simply a collection of\n * rows: `[row1, row2, row3, ...]`. This is contrast to the column-major\n * form, which is needed for vectorized computation.\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `LazyIterator` of batches of elements, represented as arrays\n * of the original element type.\n */\n rowMajorBatch(batchSize, smallLastBatch = true) {\n return new RowMajorBatchIterator(this, batchSize, smallLastBatch);\n }\n /**\n * Groups elements into batches, represented in column-major form.\n *\n * We can think of the elements of this iterator as 'rows' (even if they are\n * nested structures). By the same token, consecutive values for a given\n * key within the elements form a 'column'. This matches the usual sense of\n * 'row' and 'column' when processing tabular data (e.g., parsing a CSV).\n *\n * Thus, \"column-major\" means that the resulting batch is a (potentially\n * nested) structure representing the columns. Each column entry, then,\n * contains a collection of the values found in that column for a range of\n * input elements. This representation allows for vectorized computation, in\n * contrast to the row-major form.\n *\n * The inputs should all have the same nested structure (i.e., of arrays and\n * dicts). The result is a single object with the same nested structure,\n * where the leaves are arrays collecting the values of the inputs at that\n * location (or, optionally, the result of a custom function applied to those\n * arrays).\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @param zipFn: (optional) A function that expects an array of elements at a\n * single node of the object tree, and returns a `DeepMapResult`. The\n * `DeepMapResult` either provides a result value for that node (i.e.,\n * representing the subtree), or indicates that the node should be processed\n * recursively. The default zipFn recurses as far as possible and places\n * arrays at the leaves.\n * @returns A `LazyIterator` of batches of elements, represented as an object\n * with collections at the leaves.\n */\n columnMajorBatch(batchSize, smallLastBatch = true, \n // tslint:disable-next-line:no-any\n zipFn = zipToList) {\n // First collect the desired number of input elements as a row-major batch.\n const rowBatches = this.rowMajorBatch(batchSize, smallLastBatch);\n // Now 'rotate' or 'pivot' the data, collecting all values from each column\n // in the batch (i.e., for each key within the elements) into an array.\n return rowBatches.map(x => deepZip(x, zipFn));\n }\n /**\n * Concatenate this `LazyIterator` with another.\n *\n * @param iterator A `LazyIterator` to be concatenated onto this one.\n * @param baseErrorHandler An optional function that can intercept `Error`s\n * raised during a `next()` call on the base stream. This function can\n * decide whether the error should be propagated, whether the error should\n * be ignored, or whether the base stream should be terminated.\n * @returns A `LazyIterator`.\n */\n concatenate(iterator, baseErrorHandler) {\n return new ChainedIterator(iteratorFromItems([this, iterator]), baseErrorHandler);\n }\n /**\n * Limits this stream to return at most `count` items.\n *\n * @param count The maximum number of items to provide from the stream. If\n * a negative or undefined value is given, the entire stream is returned\n * unaltered.\n */\n take(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new TakeIterator(this, count);\n }\n /**\n * Skips the first `count` items in this stream.\n *\n * @param count The number of items to skip. If a negative or undefined\n * value is given, the entire stream is returned unaltered.\n */\n skip(count) {\n if (count < 0 || count == null) {\n return this;\n }\n return new SkipIterator(this, count);\n }\n /**\n * Prefetch the first `bufferSize` items in this stream.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n */\n prefetch(bufferSize) {\n return new PrefetchIterator(this, bufferSize);\n }\n // TODO(soergel): deep sharded shuffle, where supported\n /**\n * Randomly shuffles the elements of this stream.\n *\n * @param bufferSize: An integer specifying the number of elements from\n * this stream from which the new stream will sample.\n * @param seed: (Optional.) An integer specifying the random seed that\n * will be used to create the distribution.\n */\n shuffle(windowSize, seed) {\n return new ShuffleIterator(this, windowSize, seed);\n }\n /**\n * Force an iterator to execute serially: each next() call will await the\n * prior one, so that they cannot execute concurrently.\n */\n serial() {\n return new SerialIterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on LazyIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// Iterators that just extend LazyIterator directly\n// ============================================================================\nclass ArrayIterator extends LazyIterator {\n constructor(items) {\n super();\n this.items = items;\n this.trav = 0;\n }\n summary() {\n return `Array of ${this.items.length} items`;\n }\n async next() {\n if (this.trav >= this.items.length) {\n return { value: null, done: true };\n }\n const item = this.items[this.trav];\n this.trav++;\n return { value: deepClone(item), done: false };\n }\n}\nclass FunctionCallIterator extends LazyIterator {\n constructor(nextFn) {\n super();\n this.nextFn = nextFn;\n }\n summary() {\n return `Function call`;\n }\n async next() {\n try {\n return this.nextFn();\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message =\n `Error thrown while iterating through a dataset: ${e.message}`;\n throw e;\n }\n }\n}\nclass SerialIterator extends LazyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Serial`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n return this.upstream.next();\n }\n}\nclass SkipIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n // Local state that should not be clobbered by out-of-order execution.\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Skip`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // TODO(soergel): consider tradeoffs of reading in parallel, eg.\n // collecting next() promises in an Array and then waiting for\n // Promise.all() of those. Benefit: pseudo-parallel execution. Drawback:\n // maybe delayed GC.\n while (this.count++ < this.maxCount) {\n const skipped = await this.upstream.next();\n // short-circuit if upstream is already empty\n if (skipped.done) {\n return skipped;\n }\n tf.dispose(skipped.value);\n }\n return this.upstream.next();\n }\n}\nclass TakeIterator extends LazyIterator {\n constructor(upstream, maxCount) {\n super();\n this.upstream = upstream;\n this.maxCount = maxCount;\n this.count = 0;\n }\n summary() {\n return `${this.upstream.summary()} -> Take`;\n }\n async next() {\n if (this.count++ >= this.maxCount) {\n return { value: null, done: true };\n }\n return this.upstream.next();\n }\n}\n// Note this batch just groups items into row-wise element arrays.\n// Rotating these to a column-wise representation happens only at the dataset\n// level.\nclass RowMajorBatchIterator extends LazyIterator {\n constructor(upstream, batchSize, enableSmallLastBatch = true) {\n super();\n this.upstream = upstream;\n this.batchSize = batchSize;\n this.enableSmallLastBatch = enableSmallLastBatch;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> RowMajorBatch`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n const batch = [];\n while (batch.length < this.batchSize) {\n const item = await this.upstream.next();\n if (item.done) {\n if (this.enableSmallLastBatch && batch.length > 0) {\n return { value: batch, done: false };\n }\n return { value: null, done: true };\n }\n batch.push(item.value);\n }\n return { value: batch, done: false };\n }\n}\nclass FilterIterator extends LazyIterator {\n constructor(upstream, predicate) {\n super();\n this.upstream = upstream;\n this.predicate = predicate;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> Filter`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n const item = await this.upstream.next();\n if (item.done || this.predicate(item.value)) {\n return item;\n }\n tf.dispose(item.value);\n }\n }\n}\nclass MapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Map`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\nclass ErrorHandlingLazyIterator extends LazyIterator {\n constructor(upstream, handler) {\n super();\n this.upstream = upstream;\n this.handler = handler;\n this.count = 0;\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n summary() {\n return `${this.upstream.summary()} -> handleErrors`;\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n while (true) {\n try {\n return await this.upstream.next();\n }\n catch (e) {\n if (!this.handler(e)) {\n return { value: null, done: true };\n }\n // If the handler returns true, loop and fetch the next upstream item.\n // If the upstream iterator throws an endless stream of errors, and if\n // the handler says to ignore them, then we loop forever here. That is\n // the correct behavior-- it's up to the handler to decide when to stop.\n }\n }\n }\n}\nclass AsyncMapIterator extends LazyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> AsyncMap`;\n }\n async next() {\n const item = await this.upstream.next();\n if (item.done) {\n return { value: null, done: true };\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // That's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying\n // any intermediate Tensors. Here we are concerned only about the\n // inputs.\n const mapped = await this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mapped);\n // TODO(soergel) faster intersection\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return { value: mapped, done: false };\n }\n}\n// Iterators that maintain a queue of pending items\n// ============================================================================\n/**\n * A base class for transforming streams that operate by maintaining an\n * output queue of elements that are ready to return via next(). This is\n * commonly required when the transformation is 1-to-many: A call to next()\n * may trigger a call to the underlying stream, which will produce many\n * mapped elements of this stream-- of which we need to return only one, so\n * we have to queue the rest.\n */\nexport class OneToManyIterator extends LazyIterator {\n constructor() {\n super();\n this.outputQueue = new GrowingRingBuffer();\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n async serialNext() {\n // Fetch so that the queue contains at least one item if possible.\n // If the upstream source is exhausted, AND there are no items left in\n // the output queue, then this stream is also exhausted.\n while (this.outputQueue.length() === 0) {\n // TODO(soergel): consider parallel reads.\n if (!await this.pump()) {\n return { value: null, done: true };\n }\n }\n return { value: this.outputQueue.shift(), done: false };\n }\n}\nclass FlatmapIterator extends OneToManyIterator {\n constructor(upstream, transform) {\n super();\n this.upstream = upstream;\n this.transform = transform;\n }\n summary() {\n return `${this.upstream.summary()} -> Flatmap`;\n }\n async pump() {\n const item = await this.upstream.next();\n if (item.done) {\n return false;\n }\n const inputTensors = tf.tensor_util.getTensorsInContainer(item.value);\n // Careful: the transform may mutate the item in place.\n // that's why we have to remember the input Tensors above, and then\n // below dispose only those that were not passed through to the output.\n // Note too that the transform function is responsible for tidying any\n // intermediate Tensors. Here we are concerned only about the inputs.\n const mappedArray = this.transform(item.value);\n const outputTensors = tf.tensor_util.getTensorsInContainer(mappedArray);\n this.outputQueue.pushAll(mappedArray);\n // TODO(soergel) faster intersection, and deduplicate outputTensors\n // TODO(soergel) move to tf.disposeExcept(in, out)?\n for (const t of inputTensors) {\n if (!tf.tensor_util.isTensorInList(t, outputTensors)) {\n t.dispose();\n }\n }\n return true;\n }\n}\n/**\n * Provides a `LazyIterator` that concatenates a stream of underlying\n * streams.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n */\nexport class ChainedIterator extends LazyIterator {\n constructor(iterators, baseErrorHandler) {\n super();\n this.baseErrorHandler = baseErrorHandler;\n // Strict Promise execution order:\n // a next() call may not even begin until the previous one completes.\n this.lastRead = null;\n // Local state that should not be clobbered by out-of-order execution.\n this.iterator = null;\n this.moreIterators = iterators;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of chained summaries';\n return `${upstreamSummaries} -> Chained`;\n }\n async next() {\n this.lastRead = this.readFromChain(this.lastRead);\n return this.lastRead;\n }\n async readFromChain(lastRead) {\n // Must await on the previous read since the previous read may have advanced\n // the stream of streams, from which we need to read.\n // This is unfortunate since we can't parallelize reads. Which means\n // prefetching of chained streams is a no-op.\n // One solution is to prefetch immediately upstream of this.\n await lastRead;\n if (this.iterator == null) {\n const iteratorResult = await this.moreIterators.next();\n if (iteratorResult.done) {\n // No more streams to stream from.\n return { value: null, done: true };\n }\n this.iterator = iteratorResult.value;\n if (this.baseErrorHandler != null) {\n this.iterator = this.iterator.handleErrors(this.baseErrorHandler);\n }\n }\n const itemResult = await this.iterator.next();\n if (itemResult.done) {\n this.iterator = null;\n return this.readFromChain(lastRead);\n }\n return itemResult;\n }\n}\nexport var ZipMismatchMode;\n(function (ZipMismatchMode) {\n ZipMismatchMode[ZipMismatchMode[\"FAIL\"] = 0] = \"FAIL\";\n ZipMismatchMode[ZipMismatchMode[\"SHORTEST\"] = 1] = \"SHORTEST\";\n ZipMismatchMode[ZipMismatchMode[\"LONGEST\"] = 2] = \"LONGEST\"; // use nulls for exhausted streams; use up the longest stream.\n})(ZipMismatchMode || (ZipMismatchMode = {}));\n/**\n * Provides a `LazyIterator` that zips together an array, dict, or nested\n * structure of `LazyIterator`s (and perhaps additional constants).\n *\n * The underlying streams must provide elements in a consistent order such\n * that they correspond.\n *\n * Typically, the underlying streams should have the same number of\n * elements. If they do not, the behavior is determined by the\n * `mismatchMode` argument.\n *\n * The nested structure of the `iterators` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Doing this in a concurrency-safe way requires some trickery. In\n * particular, we want this stream to return the elements from the\n * underlying streams in the correct order according to when next() was\n * called, even if the resulting Promises resolve in a different order.\n *\n * @param iterators: An array or object containing LazyIterators at the\n * leaves.\n * @param mismatchMode: Determines what to do when one underlying iterator\n * is exhausted before the others. `ZipMismatchMode.FAIL` (the default)\n * causes an error to be thrown in this case. `ZipMismatchMode.SHORTEST`\n * causes the zipped iterator to terminate with the furst underlying\n * streams, so elements remaining on the longer streams are ignored.\n * `ZipMismatchMode.LONGEST` causes the zipped stream to continue, filling\n * in nulls for the exhausted streams, until all streams are exhausted.\n */\nclass ZipIterator extends LazyIterator {\n constructor(iterators, mismatchMode = ZipMismatchMode.FAIL) {\n super();\n this.iterators = iterators;\n this.mismatchMode = mismatchMode;\n this.count = 0;\n this.currentPromise = null;\n }\n summary() {\n const upstreamSummaries = 'TODO: fill in upstream of zip summaries';\n return `{${upstreamSummaries}} -> Zip`;\n }\n async nextState(afterState) {\n // This chaining ensures that the underlying next() are not even called\n // before the previous ones have resolved.\n await afterState;\n // Collect underlying iterator \"done\" signals as a side effect in\n // getNext()\n let numIterators = 0;\n let iteratorsDone = 0;\n function getNext(container) {\n if (container instanceof LazyIterator) {\n const result = container.next();\n return {\n value: result.then(x => {\n numIterators++;\n if (x.done) {\n iteratorsDone++;\n }\n return x.value;\n }),\n recurse: false\n };\n }\n else {\n return { value: null, recurse: true };\n }\n }\n const mapped = await deepMapAndAwaitAll(this.iterators, getNext);\n if (numIterators === iteratorsDone) {\n // The streams have all ended.\n return { value: null, done: true };\n }\n if (iteratorsDone > 0) {\n switch (this.mismatchMode) {\n case ZipMismatchMode.FAIL:\n throw new Error('Zipped streams should have the same length. ' +\n `Mismatched at element ${this.count}.`);\n case ZipMismatchMode.SHORTEST:\n return { value: null, done: true };\n case ZipMismatchMode.LONGEST:\n default:\n // Continue. The exhausted streams already produced value: null.\n }\n }\n this.count++;\n return { value: mapped, done: false };\n }\n async next() {\n this.currentPromise = this.nextState(this.currentPromise);\n return this.currentPromise;\n }\n}\n// Iterators that maintain a ring buffer of pending promises\n// ============================================================================\n/**\n * A stream that prefetches a given number of items from an upstream source,\n * returning them in FIFO order.\n *\n * Note this prefetches Promises, but makes no guarantees about when those\n * Promises resolve.\n */\nexport class PrefetchIterator extends LazyIterator {\n constructor(upstream, bufferSize) {\n super();\n this.upstream = upstream;\n this.bufferSize = bufferSize;\n this.buffer = new RingBuffer(bufferSize);\n }\n summary() {\n return `${this.upstream.summary()} -> Prefetch`;\n }\n /**\n * Refill the prefetch buffer. Returns only after the buffer is full, or\n * the upstream source is exhausted.\n */\n refill() {\n while (!this.buffer.isFull()) {\n const v = this.upstream.next();\n this.buffer.push(v);\n }\n }\n next() {\n this.refill();\n // This shift will never throw an error because the buffer is always\n // full after a refill. If the stream is exhausted, the buffer will be\n // full of Promises that will resolve to the end-of-stream signal.\n return this.buffer.shift();\n }\n}\n/**\n * A stream that performs a sliding-window random shuffle on an upstream\n * source. This is like a `PrefetchIterator` except that the items are\n * returned in randomized order. Mixing naturally improves as the buffer\n * size increases.\n */\nexport class ShuffleIterator extends PrefetchIterator {\n constructor(upstream, windowSize, seed) {\n super(upstream, windowSize);\n this.upstream = upstream;\n this.windowSize = windowSize;\n // Local state that should not be clobbered by out-of-order execution.\n this.upstreamExhausted = false;\n this.random = seedrandom.alea(seed || tf.util.now().toString());\n this.lastRead = Promise.resolve({ value: null, done: false });\n }\n async next() {\n // This sets this.lastRead to a new Promise right away, as opposed to\n // saying `await this.lastRead; this.lastRead = this.serialNext();` which\n // would not work because this.nextRead would be updated only after the\n // promise resolves.\n this.lastRead = this.lastRead.then(() => this.serialNext());\n return this.lastRead;\n }\n randomInt(max) {\n return Math.floor(this.random() * max);\n }\n chooseIndex() {\n return this.randomInt(this.buffer.length());\n }\n async serialNext() {\n // TODO(soergel): consider performance\n if (!this.upstreamExhausted) {\n this.refill();\n }\n while (!this.buffer.isEmpty()) {\n const chosenIndex = this.chooseIndex();\n const result = await this.buffer.shuffleExcise(chosenIndex);\n if (result.done) {\n this.upstreamExhausted = true;\n }\n else {\n this.refill();\n return result;\n }\n }\n return { value: null, done: true };\n }\n}\n//# sourceMappingURL=lazy_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport * as seedrandom from 'seedrandom';\nimport { iteratorFromConcatenated, iteratorFromFunction, iteratorFromItems, iteratorFromZipped, ZipMismatchMode } from './iterators/lazy_iterator';\nimport { canTensorify, deepMapAndAwaitAll, isIterable } from './util/deep_map';\n// TODO(soergel): consider vectorized operations within the pipeline.\n/**\n * Represents a potentially large list of independent data elements (typically\n * 'samples' or 'examples').\n *\n * A 'data example' may be a primitive, an array, a map from string keys to\n * values, or any nested structure of these.\n *\n * A `Dataset` represents an ordered collection of elements, together with a\n * chain of transformations to be performed on those elements. Each\n * transformation is a method of `Dataset` that returns another `Dataset`, so\n * these may be chained, e.g.\n * `const processedDataset = rawDataset.filter(...).map(...).batch(...)`.\n *\n * Data loading and transformation is done in a lazy, streaming fashion. The\n * dataset may be iterated over multiple times; each iteration starts the data\n * loading anew and recapitulates the transformations.\n *\n * A `Dataset` is typically processed as a stream of unbatched examples --i.e.,\n * its transformations are applied one example at a time. Batching produces a\n * new `Dataset` where each element is a batch. Batching should usually come\n * last in a pipeline, because data transformations are easier to express on a\n * per-example basis than on a per-batch basis.\n *\n * The following code examples are calling `await dataset.forEachAsync(...)` to\n * iterate once over the entire dataset in order to print out the data.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class Dataset {\n constructor() {\n this.size = null;\n }\n // TODO(soergel): Make Datasets report whether repeated iterator() calls\n // produce the same result (e.g., reading from a file) or different results\n // (e.g., from the webcam). Currently we don't make this distinction but it\n // could be important for the user to know.\n // abstract isDeterministic(): boolean;\n /**\n * Groups elements into batches.\n *\n * It is assumed that each of the incoming dataset elements has the same\n * structure-- i.e. the same set of keys at each location in an object\n * hierarchy. For each key, the resulting `Dataset` provides a batched\n * element collecting all of the incoming values for that key.\n *\n * * Incoming primitives are grouped into a 1-D Tensor.\n * * Incoming Tensors are grouped into a new Tensor where the 0'th axis is\n * the batch dimension.\n * * Incoming arrays are converted to Tensor and then batched.\n * * A nested array is interpreted as an n-D Tensor, so the batched result\n * has n+1 dimensions.\n * * An array that cannot be converted to Tensor produces an error.\n *\n * If an array should not be batched as a unit, it should first be converted\n * to an object with integer keys.\n *\n * Here are a few examples:\n *\n * Batch a dataset of numbers:\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8]).batch(4);\n * await a.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of arrays:\n * ```js\n * const b = tf.data.array([[1], [2], [3], [4], [5], [6], [7], [8]]).batch(4);\n * await b.forEachAsync(e => e.print());\n * ```\n *\n * Batch a dataset of objects:\n * ```js\n * const c = tf.data.array([{a: 1, b: 11}, {a: 2, b: 12}, {a: 3, b: 13},\n * {a: 4, b: 14}, {a: 5, b: 15}, {a: 6, b: 16}, {a: 7, b: 17},\n * {a: 8, b: 18}]).batch(4);\n * await c.forEachAsync(e => {\n * console.log('{');\n * for(var key in e) {\n * console.log(key+':');\n * e[key].print();\n * }\n * console.log('}');\n * })\n * ```\n *\n * @param batchSize The number of elements desired per batch.\n * @param smallLastBatch Whether to emit the final batch when it has fewer\n * than batchSize elements. Default true.\n * @returns A `Dataset`, from which a stream of batches can be obtained.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n batch(batchSize, smallLastBatch = true) {\n const base = this;\n tf.util.assert(batchSize > 0, () => `batchSize needs to be positive, but it is\n ${batchSize}`);\n let size;\n if (this.size === Infinity || this.size == null) {\n // If the size of this dataset is infinity or null, the new size keeps the\n // same.\n size = this.size;\n }\n else if (smallLastBatch) {\n // If the size of this dataset is known and include small last batch, the\n // new size is full batch count plus last batch.\n size = Math.ceil(this.size / batchSize);\n }\n else {\n // If the size of this dataset is known and not include small last batch,\n // the new size is full batch count.\n size = Math.floor(this.size / batchSize);\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator())\n .columnMajorBatch(batchSize, smallLastBatch, deepBatchConcat);\n }, size);\n }\n /**\n * Concatenates this `Dataset` with another.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * const b = tf.data.array([4, 5, 6]);\n * const c = a.concatenate(b);\n * await c.forEachAsync(e => console.log(e));\n * ```\n *\n * @param dataset A `Dataset` to be concatenated onto this one.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n concatenate(dataset) {\n const base = this;\n let size;\n if (this.size === Infinity || dataset.size === Infinity) {\n // If the size of any of these two dataset is infinity, new size is\n // infinity.\n size = Infinity;\n }\n else if (this.size != null && dataset.size != null) {\n // If the size of both datasets are known and not infinity, new size is\n // sum the size of these two datasets.\n size = this.size + dataset.size;\n }\n else {\n // If neither of these two datasets has infinite size and any of these two\n // datasets' size is null, the new size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).concatenate(await dataset.iterator()), size);\n }\n /**\n * Filters this dataset according to `predicate`.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])\n * .filter(x => x%2 === 0);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param predicate A function mapping a dataset element to a boolean or a\n * `Promise` for one.\n *\n * @returns A `Dataset` of elements for which the predicate was true.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n filter(predicate) {\n const base = this;\n let size;\n if (this.size === Infinity) {\n // If the size of this dataset is infinity, new size is infinity\n size = Infinity;\n }\n else {\n // If this dataset has limited elements, new size is null because it might\n // exhausted randomly.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).filter(x => tf.tidy(() => predicate(x)));\n }, size);\n }\n /**\n * Apply a function to every element of the dataset.\n *\n * After the function is applied to a dataset element, any Tensors contained\n * within that element are disposed.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function to apply to each dataset element.\n * @returns A `Promise` that resolves after all elements have been processed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async forEachAsync(f) {\n return (await this.iterator()).forEachAsync(f);\n }\n /**\n * Maps this dataset through a 1-to-1 transform.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).map(x => x*x);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param transform A function mapping a dataset element to a transformed\n * dataset element.\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n map(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).map(x => tf.tidy(() => transform(x)));\n }, this.size);\n }\n /**\n * Maps this dataset through an async 1-to-1 transform.\n *\n * ```js\n * const a =\n * tf.data.array([1, 2, 3]).mapAsync(x => new Promise(function(resolve){\n * setTimeout(() => {\n * resolve(x * x);\n * }, Math.random()*1000 + 500);\n * }));\n * console.log(await a.toArray());\n * ```\n *\n * @param transform A function mapping a dataset element to a `Promise` for a\n * transformed dataset element. This transform is responsible for disposing\n * any intermediate `Tensor`s, i.e. by wrapping its computation in\n * `tf.tidy()`; that cannot be automated here (as it is in the synchronous\n * `map()` case).\n *\n * @returns A `Dataset` of transformed elements.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n mapAsync(transform) {\n const base = this;\n return datasetFromIteratorFn(async () => {\n return (await base.iterator()).mapAsync(transform);\n }, this.size);\n }\n /**\n * Creates a `Dataset` that prefetches elements from this dataset.\n *\n * @param bufferSize: An integer specifying the number of elements to be\n * prefetched.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n prefetch(bufferSize) {\n if (bufferSize == null) {\n throw new RangeError('`Dataset.prefetch()` requires bufferSize to be specified.');\n }\n const base = this;\n return datasetFromIteratorFn(async () => (await base.iterator()).prefetch(bufferSize), this.size);\n }\n /**\n * Repeats this dataset `count` times.\n *\n * NOTE: If this dataset is a function of global state (e.g. a random number\n * generator), then different repetitions may produce different elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3]).repeat(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: (Optional) An integer, representing the number of times\n * the dataset should be repeated. The default behavior (if `count` is\n * `undefined` or negative) is for the dataset be repeated indefinitely.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n repeat(count) {\n const base = this;\n let size;\n if (this.size != null && count > 0) {\n // If this dataset has size and count is positive, new size is current\n // size multiply count. This also covers the case that current size is\n // infinity.\n size = this.size * count;\n }\n else if (count === 0) {\n // If count is 0, new size is 0.\n size = 0;\n }\n else if (this.size != null && (count === undefined || count < 0)) {\n // If this dataset has size and count is undefined or negative, the\n // dataset will be repeated indefinitely and new size is infinity.\n size = Infinity;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => {\n const iteratorIterator = iteratorFromFunction(async () => ({ value: await base.iterator(), done: false }));\n return iteratorFromConcatenated(iteratorIterator.take(count));\n }, size);\n }\n /**\n * Creates a `Dataset` that skips `count` initial elements from this dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).skip(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be skipped\n * to form the new dataset. If `count` is greater than the size of this\n * dataset, the new dataset will contain no elements. If `count`\n * is `undefined` or negative, skips the entire dataset.\n *\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n skip(count) {\n const base = this;\n let size;\n if (this.size != null && count >= 0 && this.size >= count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is current size minus skipped size.This also covers the case that\n // current size is infinity.\n size = this.size - count;\n }\n else if (this.size != null &&\n (this.size < count || count === undefined || count < 0)) {\n // If the size of this dataset is smaller than count, or count is\n // undefined or negative, skips the entire dataset and the new size is 0.\n size = 0;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).skip(count), size);\n }\n /**\n * Pseudorandomly shuffles the elements of this dataset. This is done in a\n * streaming manner, by sampling from a given number of prefetched elements.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).shuffle(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param bufferSize: An integer specifying the number of elements from this\n * dataset from which the new dataset will sample.\n * @param seed: (Optional) An integer specifying the random seed that will\n * be used to create the distribution.\n * @param reshuffleEachIteration: (Optional) A boolean, which if true\n * indicates that the dataset should be pseudorandomly reshuffled each time\n * it is iterated over. If false, elements will be returned in the same\n * shuffled order on each iteration. (Defaults to `true`.)\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n shuffle(bufferSize, seed, reshuffleEachIteration = true) {\n if (bufferSize == null || bufferSize < 0) {\n if (this.size == null) {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified.');\n }\n else {\n throw new RangeError('`Dataset.shuffle()` requires bufferSize to be specified. ' +\n 'If your data fits in main memory (for regular JS objects), ' +\n 'and/or GPU memory (for `tf.Tensor`s), consider setting ' +\n `bufferSize to the dataset size (${this.size} elements)`);\n }\n }\n const base = this;\n const random = seedrandom.alea(seed || tf.util.now().toString());\n return datasetFromIteratorFn(async () => {\n let seed2 = random.int32();\n if (reshuffleEachIteration) {\n seed2 += random.int32();\n }\n return (await base.iterator()).shuffle(bufferSize, seed2.toString());\n }, this.size);\n }\n /**\n * Creates a `Dataset` with at most `count` initial elements from this\n * dataset.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]).take(3);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * @param count: The number of elements of this dataset that should be taken\n * to form the new dataset. If `count` is `undefined` or negative, or if\n * `count` is greater than the size of this dataset, the new dataset will\n * contain all elements of this dataset.\n * @returns A `Dataset`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n take(count) {\n const base = this;\n let size;\n if (this.size != null && this.size > count) {\n // If the size of this dataset is greater than count, the new dataset's\n // size is count.\n size = count;\n }\n else if (this.size != null && this.size <= count) {\n // If the size of this dataset is equal or smaller than count, the new\n // dataset's size is the size of this dataset.\n size = this.size;\n }\n else {\n // If the size of this dataset is null, the new dataset's size is null.\n size = null;\n }\n return datasetFromIteratorFn(async () => (await base.iterator()).take(count), size);\n }\n /**\n * Collect all elements of this dataset into an array.\n *\n * Obviously this will succeed only for small datasets that fit in memory.\n * Useful for testing and generally should be avoided if possible.\n *\n * ```js\n * const a = tf.data.array([1, 2, 3, 4, 5, 6]);\n * console.log(await a.toArray());\n * ```\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async toArray() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArray();\n }\n /**\n * Collect all elements of this dataset into an array with prefetching 100\n * elements. This is useful for testing, because the prefetch changes the\n * order in which the Promises are resolved along the processing pipeline.\n * This may help expose bugs where results are dependent on the order of\n * Promise resolution rather than on the logical order of the stream (i.e.,\n * due to hidden mutable state).\n *\n * @returns A Promise for an array of elements, which will resolve\n * when a new stream has been obtained and fully consumed.\n */\n async toArrayForTest() {\n if (this.size === Infinity) {\n throw new Error('Can not convert infinite data stream to array.');\n }\n return (await this.iterator()).toArrayForTest();\n }\n}\n// TODO(soergel): deep sharded shuffle, where supported\nDataset.MAX_BUFFER_SIZE = 10000;\n/**\n * Create a `Dataset` defined by a provided iterator() function.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const iter = tf.data.iteratorFromFunction(func);\n * const ds = tf.data.datasetFromIteratorFn(iter);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n */\nexport function datasetFromIteratorFn(iteratorFn, size = null) {\n return new class extends Dataset {\n constructor() {\n super(...arguments);\n this.size = size;\n }\n /*\n * Provide a new stream of elements. Note this will also start new streams\n * from any underlying `Dataset`s.\n */\n async iterator() {\n return iteratorFn();\n }\n }();\n}\n/**\n * Create a `Dataset` from an array of elements.\n *\n * Create a Dataset from an array of objects:\n * ```js\n * const a = tf.data.array([{'item': 1}, {'item': 2}, {'item': 3}]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n *\n * Create a Dataset from an array of numbers:\n * ```js\n * const a = tf.data.array([4, 5, 6]);\n * await a.forEachAsync(e => console.log(e));\n * ```\n * @param items An array of elements that will be parsed as items in a dataset.\n *\n * @doc {heading: 'Data', subheading: 'Creation', namespace: 'data'}\n */\nexport function array(items) {\n return datasetFromIteratorFn(async () => iteratorFromItems(items), items.length);\n}\n/**\n * Create a `Dataset` by zipping together an array, dict, or nested\n * structure of `Dataset`s (and perhaps additional constants).\n * The underlying datasets must provide elements in a consistent order such that\n * they correspond.\n *\n * The number of elements in the resulting dataset is the same as the size of\n * the smallest dataset in datasets.\n *\n * The nested structure of the `datasets` argument determines the\n * structure of elements in the resulting iterator.\n *\n * Note this means that, given an array of two datasets that produce dict\n * elements, the result is a dataset that produces elements that are arrays\n * of two dicts:\n *\n * Zip an array of datasets:\n * ```js\n * console.log('Zip two datasets of objects:');\n * const ds1 = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const ds2 = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const ds3 = tf.data.zip([ds1, ds2]);\n * await ds3.forEachAsync(e => console.log(JSON.stringify(e)));\n *\n * // If the goal is to merge the dicts in order to produce elements like\n * // {a: ..., b: ...}, this requires a second step such as:\n * console.log('Merge the objects:');\n * const ds4 = ds3.map(x => {return {a: x[0].a, b: x[1].b}});\n * await ds4.forEachAsync(e => console.log(e));\n * ```\n *\n * Zip a dict of datasets:\n * ```js\n * const a = tf.data.array([{a: 1}, {a: 2}, {a: 3}]);\n * const b = tf.data.array([{b: 4}, {b: 5}, {b: 6}]);\n * const c = tf.data.zip({c: a, d: b});\n * await c.forEachAsync(e => console.log(JSON.stringify(e)));\n * ```\n *\n * @doc {heading: 'Data', subheading: 'Operations', namespace: 'data'}\n */\nexport function zip(datasets) {\n // manually type-check the argument for JS users\n if (!isIterable(datasets)) {\n throw new Error('The argument to zip() must be an object or array.');\n }\n let size;\n if (Array.isArray(datasets)) {\n for (let i = 0; i < datasets.length; i++) {\n size = size == null ? datasets[i].size :\n Math.min(size, datasets[i].size);\n }\n }\n else if (datasets instanceof Object) {\n for (const ds in datasets) {\n size = size == null ? datasets[ds].size :\n Math.min(size, datasets[ds].size);\n }\n }\n return datasetFromIteratorFn(async () => {\n const streams = await deepMapAndAwaitAll(datasets, d => {\n if (d instanceof Dataset) {\n return { value: d.iterator(), recurse: false };\n }\n else if (isIterable(d)) {\n return { value: null, recurse: true };\n }\n else {\n throw new Error('Leaves of the structure passed to zip() must be Datasets, ' +\n 'not primitives.');\n }\n });\n return iteratorFromZipped(streams, ZipMismatchMode.SHORTEST);\n }, size);\n}\n/**\n * A zip function for use with deepZip, passed via the columnMajorBatch call.\n *\n * Accepts an array of identically-structured nested elements and either batches\n * them (if they are primitives, numeric arrays, or Tensors) or requests\n * recursion (if not).\n */\n// tslint:disable-next-line:no-any\nfunction deepBatchConcat(rows) {\n if (rows === null) {\n return null;\n }\n // use the first item to decide whether to recurse or batch here.\n const exampleRow = rows[0];\n if (canTensorify(exampleRow)) {\n // rows is an array of primitives, Tensors, or arrays. Batch them.\n const value = batchConcat(rows);\n return { value, recurse: false };\n }\n // the example row is an object, so recurse into it.\n return { value: null, recurse: true };\n}\n/**\n * Assembles a list of same-shaped numbers, number arrays, or Tensors\n * into a single new Tensor where axis 0 is the batch dimension.\n */\nfunction batchConcat(arrays) {\n if (arrays.length === 0) {\n // We can't return an empty Tensor because we don't know the element shape.\n throw new Error('Can\\'t make a batch of zero elements.');\n }\n if (arrays[0] instanceof tf.Tensor) {\n // Input is an array of Tensors\n return tf.stack(arrays);\n }\n else {\n // Input is a possibly-nested array of numbers.\n return tf.tensor(arrays);\n }\n}\n//# sourceMappingURL=dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { Dataset } from '../dataset';\n/**\n * Represents a potentially large collection of text lines.\n *\n * The results are not batched.\n */\nexport class TextLineDataset extends Dataset {\n /**\n * Create a `TextLineDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n */\n constructor(input) {\n super();\n this.input = input;\n }\n async iterator() {\n const inputIterator = await this.input.iterator();\n const utf8Iterator = inputIterator.decodeUTF8();\n const lineIterator = utf8Iterator.split('\\n').map(line => {\n // Windows/DOS format text file has extra line breaker at the end of line.\n if (line.endsWith('\\r')) {\n line = line.slice(0, -1);\n }\n return line;\n });\n return lineIterator;\n }\n}\n//# sourceMappingURL=text_line_dataset.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { Dataset } from '../dataset';\nimport { TextLineDataset } from './text_line_dataset';\nconst CODE_QUOTE = '\"';\nconst STATE_OUT = Symbol('out');\nconst STATE_FIELD = Symbol('field');\nconst STATE_QUOTE = Symbol('quote');\nconst STATE_QUOTE_AFTER_QUOTE = Symbol('quoteafterquote');\nconst STATE_WITHIN_QUOTE_IN_QUOTE = Symbol('quoteinquote');\n/**\n * Represents a potentially large collection of delimited text records.\n *\n * The produced `TensorContainer`s each contain one key-value pair for\n * every column of the table. When a field is empty in the incoming data, the\n * resulting value is `undefined`, or throw error if it is required. Values\n * that can be parsed as numbers are emitted as type `number`, other values\n * are parsed as `string`.\n *\n * The results are not batched.\n *\n * @doc {heading: 'Data', subheading: 'Classes', namespace: 'data'}\n */\nexport class CSVDataset extends Dataset {\n /**\n * Create a `CSVDataset`.\n *\n * @param input A `DataSource` providing a chunked, UTF8-encoded byte stream.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * hasHeader: (Optional) A boolean value that indicates whether the first\n * row of provided CSV file is a header line with column names, and should\n * not be included in the data. Defaults to `true`.\n *\n * columnNames: (Optional) A list of strings that corresponds to\n * the CSV column names, in order. If provided, it ignores the column\n * names inferred from the header row. If not provided, infers the column\n * names from the first row of the records. If hasHeader is false and\n * columnNames is not provided, this method throws an error.\n *\n * columnConfigs: (Optional) A dictionary whose key is column names, value\n * is an object stating if this column is required, column's data type,\n * default value, and if this column is label. If provided, keys must\n * correspond to names provided in columnNames or inferred from the file\n * header lines. If isLabel is true any column, returns an array of two\n * items: the first item is a dict of features key/value pairs, the second\n * item is a dict of labels key/value pairs. If no feature is marked as\n * label, returns a dict of features only.\n *\n * configuredColumnsOnly (Optional) If true, only columns provided in\n * columnConfigs will be parsed and provided during iteration.\n *\n * delimiter (Optional) The string used to parse each line of the input\n * file. Defaults to `,`.\n */\n constructor(input, csvConfig) {\n super();\n this.input = input;\n this.hasHeader = true;\n this.fullColumnNames = null;\n this.columnNamesValidated = false;\n this.columnConfigs = null;\n this.configuredColumnsOnly = false;\n this.delimiter = ',';\n this.delimWhitespace = false;\n this.base = new TextLineDataset(input);\n if (!csvConfig) {\n csvConfig = {};\n }\n this.hasHeader = csvConfig.hasHeader === false ? false : true;\n this.fullColumnNames = csvConfig.columnNames;\n this.columnConfigs = csvConfig.columnConfigs;\n this.configuredColumnsOnly = csvConfig.configuredColumnsOnly;\n if (csvConfig.delimWhitespace) {\n util.assert(csvConfig.delimiter == null, () => 'Delimiter should not be provided when delimWhitespace is true.');\n this.delimWhitespace = true;\n this.delimiter = ' ';\n }\n else {\n this.delimiter = csvConfig.delimiter ? csvConfig.delimiter : ',';\n }\n }\n /**\n * Returns column names of the csv dataset. If `configuredColumnsOnly` is\n * true, return column names in `columnConfigs`. If `configuredColumnsOnly` is\n * false and `columnNames` is provided, `columnNames`. If\n * `configuredColumnsOnly` is false and `columnNames` is not provided, return\n * all column names parsed from the csv file. For example usage please go to\n * `tf.data.csv`.\n *\n * @doc {heading: 'Data', subheading: 'Classes'}\n */\n async columnNames() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n return this.configuredColumnsOnly ? Object.keys(this.columnConfigs) :\n this.fullColumnNames;\n }\n /* 1) If `columnNames` is provided as string[], use this string[] as output\n * keys in corresponding order. The length must match the number of inferred\n * columns if `hasHeader` is true .\n * 2) If `columnNames` is not provided, parse header line as `columnNames` if\n * hasHeader is true. If `hasHeader` is false, throw an error.\n * 3) If `columnConfigs` is provided, all the keys in `columnConfigs` must\n * exist in parsed `columnNames`.\n */\n async setColumnNames() {\n const columnNamesFromFile = await this.maybeReadHeaderLine();\n if (!this.fullColumnNames && !columnNamesFromFile) {\n // Throw an error if columnNames is not provided and no header line.\n throw new Error('Column names must be provided if there is no header line.');\n }\n else if (this.fullColumnNames && columnNamesFromFile) {\n // Check provided columnNames match header line.\n util.assert(columnNamesFromFile.length === this.fullColumnNames.length, () => 'The length of provided columnNames (' +\n this.fullColumnNames.length.toString() +\n ') does not match the length of the header line read from ' +\n 'file (' + columnNamesFromFile.length.toString() + ').');\n }\n if (!this.fullColumnNames) {\n this.fullColumnNames = columnNamesFromFile;\n }\n // Check if there are duplicate column names.\n const counts = this.fullColumnNames.reduce((countAcc, name) => {\n countAcc[name] = (countAcc[name] + 1) || 1;\n return countAcc;\n }, {});\n const duplicateNames = Object.keys(counts).filter((name) => (counts[name] > 1));\n util.assert(duplicateNames.length === 0, () => 'Duplicate column names found: ' + duplicateNames.toString());\n // Check if keys in columnConfigs match columnNames.\n if (this.columnConfigs) {\n for (const key of Object.keys(this.columnConfigs)) {\n const index = this.fullColumnNames.indexOf(key);\n if (index === -1) {\n throw new Error('The key \"' + key +\n '\" provided in columnConfigs does not match any of the column ' +\n 'names (' + this.fullColumnNames.toString() + ').');\n }\n }\n }\n this.columnNamesValidated = true;\n }\n async maybeReadHeaderLine() {\n if (this.hasHeader) {\n const iter = await this.base.iterator();\n const firstElement = await iter.next();\n if (firstElement.done) {\n throw new Error('No data was found for CSV parsing.');\n }\n const firstLine = firstElement.value;\n const headers = this.parseRow(firstLine, false);\n return headers;\n }\n else {\n return null;\n }\n }\n async iterator() {\n if (!this.columnNamesValidated) {\n await this.setColumnNames();\n }\n let lines = await this.base.iterator();\n if (this.hasHeader) {\n // We previously read the first line to get the columnNames.\n // Now that we're providing data, skip it.\n lines = lines.skip(1);\n }\n return lines.map(x => this.makeDataElement(x));\n }\n makeDataElement(line) {\n const values = this.parseRow(line);\n const features = {};\n const labels = {};\n for (let i = 0; i < this.fullColumnNames.length; i++) {\n const key = this.fullColumnNames[i];\n const config = this.columnConfigs ? this.columnConfigs[key] : null;\n if (this.configuredColumnsOnly && !config) {\n // This column is not selected.\n continue;\n }\n else {\n const value = values[i];\n let parsedValue = null;\n if (value === '') {\n // If default value is provided, use it. If default value is not\n // provided, set as undefined.\n if (config && config.default !== undefined) {\n parsedValue = config.default;\n }\n else if (config && (config.required || config.isLabel)) {\n throw new Error(`Required column ${key} is empty in this line: ${line}`);\n }\n else {\n parsedValue = undefined;\n }\n }\n else {\n // A value is present, so parse it based on type\n const valueAsNum = Number(value);\n if (isNaN(valueAsNum)) {\n // The value is a string and this column is declared as boolean\n // in config, parse it as boolean.\n if (config && config.dtype === 'bool') {\n parsedValue = this.getBoolean(value);\n }\n else {\n // Set value as string\n parsedValue = value;\n }\n }\n else if (!config || !config.dtype) {\n // If this value is a number and no type config is provided, return\n // it as number.\n parsedValue = valueAsNum;\n }\n else {\n // If this value is a number and data type is provided, parse it\n // according to provided data type.\n switch (config.dtype) {\n case 'float32':\n parsedValue = valueAsNum;\n break;\n case 'int32':\n parsedValue = Math.floor(valueAsNum);\n break;\n case 'bool':\n parsedValue = this.getBoolean(value);\n break;\n default:\n parsedValue = valueAsNum;\n }\n }\n }\n // Check if this column is label.\n (config && config.isLabel) ? labels[key] = parsedValue :\n features[key] = parsedValue;\n }\n }\n // If label exists, return an object of features and labels as {xs:features,\n // ys:labels}, otherwise return features only.\n if (Object.keys(labels).length === 0) {\n return features;\n }\n else {\n return { xs: features, ys: labels };\n }\n }\n getBoolean(value) {\n if (value === '1' || value.toLowerCase() === 'true') {\n return 1;\n }\n else {\n return 0;\n }\n }\n // adapted from https://beta.observablehq.com/@mbostock/streaming-csv\n parseRow(line, validateElementCount = true) {\n const result = [];\n let readOffset = 0;\n const readLength = line.length;\n let currentState = STATE_OUT;\n // Goes through the line to parse quote.\n for (let i = 0; i < readLength; i++) {\n switch (currentState) {\n // Before enter a new field\n case STATE_OUT:\n switch (line.charAt(i)) {\n // Enter a quoted field\n case CODE_QUOTE:\n readOffset = i + 1;\n currentState = STATE_QUOTE;\n break;\n // Read an empty field\n case this.delimiter:\n readOffset = i + 1;\n // If delimiter is white space and configured to collapse\n // multiple white spaces, ignore this white space.\n if (this.delimiter === ' ' && this.delimWhitespace) {\n break;\n }\n result.push('');\n currentState = STATE_OUT;\n break;\n // Enter an unquoted field\n default:\n currentState = STATE_FIELD;\n readOffset = i;\n break;\n }\n break;\n // In an unquoted field\n case STATE_FIELD:\n switch (line.charAt(i)) {\n // Exit an unquoted field, add it to result\n case this.delimiter:\n result.push(line.substring(readOffset, i));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n default:\n }\n break;\n // In a quoted field\n case STATE_QUOTE:\n switch (line.charAt(i)) {\n // Read a quote after a quote\n case CODE_QUOTE:\n currentState = STATE_QUOTE_AFTER_QUOTE;\n break;\n default:\n }\n break;\n // This state means it's right after a second quote in a field\n case STATE_QUOTE_AFTER_QUOTE:\n switch (line.charAt(i)) {\n // Finished a quoted field\n case this.delimiter:\n result.push(line.substring(readOffset, i - 1));\n currentState = STATE_OUT;\n readOffset = i + 1;\n break;\n // Finished a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n // In a quoted part in a quoted field\n default:\n currentState = STATE_WITHIN_QUOTE_IN_QUOTE;\n break;\n }\n break;\n case STATE_WITHIN_QUOTE_IN_QUOTE:\n switch (line.charAt(i)) {\n // Exit a quoted part in a quoted field\n case CODE_QUOTE:\n currentState = STATE_QUOTE;\n break;\n default:\n }\n break;\n default:\n }\n }\n // Adds last item based on if it is quoted.\n if (currentState === STATE_QUOTE_AFTER_QUOTE) {\n result.push(line.substring(readOffset, readLength - 1));\n }\n else {\n result.push(line.substring(readOffset));\n }\n // Check if each row has the same number of elements as column names.\n if (validateElementCount && result.length !== this.fullColumnNames.length) {\n throw new Error(`Invalid row in csv file. Should have ${this.fullColumnNames.length} elements in a row, but got ${result}`);\n }\n return result;\n }\n}\n// TODO(soergel): add more basic datasets for parity with tf.data\n// tf.data.FixedLengthRecordDataset()\n// tf.data.TFRecordDataset()\n//# sourceMappingURL=csv_dataset.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env, tensor, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of tensors from microphone audio stream. The tensors are\n * representing audio data as frequency-domain spectrogram generated with\n * browser's native FFT. Tensors representing time-domain waveform is available\n * based on configuration. Only works in browser environment.\n */\nexport class MicrophoneIterator extends LazyIterator {\n constructor(microphoneConfig) {\n super();\n this.microphoneConfig = microphoneConfig;\n this.isClosed = false;\n this.fftSize = microphoneConfig.fftSize || 1024;\n const fftSizeLog2 = Math.log2(this.fftSize);\n if (this.fftSize < 0 || fftSizeLog2 < 4 || fftSizeLog2 > 14 ||\n !Number.isInteger(fftSizeLog2)) {\n throw new Error(`Invalid fftSize: it must be a power of 2 between ` +\n `2 to 4 and 2 to 14, but got ${this.fftSize}`);\n }\n this.numFrames = microphoneConfig.numFramesPerSpectrogram || 43;\n this.sampleRateHz = microphoneConfig.sampleRateHz;\n this.columnTruncateLength =\n microphoneConfig.columnTruncateLength || this.fftSize;\n this.audioTrackConstraints = microphoneConfig.audioTrackConstraints;\n this.smoothingTimeConstant = microphoneConfig.smoothingTimeConstant || 0;\n this.includeSpectrogram =\n microphoneConfig.includeSpectrogram === false ? false : true;\n this.includeWaveform =\n microphoneConfig.includeWaveform === true ? true : false;\n if (!this.includeSpectrogram && !this.includeWaveform) {\n throw new Error('Both includeSpectrogram and includeWaveform are false. ' +\n 'At least one type of data should be returned.');\n }\n }\n summary() {\n return `microphone`;\n }\n // Construct a MicrophoneIterator and start the audio stream.\n static async create(microphoneConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('microphone API is only supported in browser environment.');\n }\n const microphoneIterator = new MicrophoneIterator(microphoneConfig);\n // Call async function start() to initialize the audio stream.\n await microphoneIterator.start();\n return microphoneIterator;\n }\n // Start the audio stream and FFT.\n async start() {\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n audio: this.audioTrackConstraints == null ? true :\n this.audioTrackConstraints,\n video: false\n });\n }\n catch (e) {\n throw new Error(`Error thrown while initializing video stream: ${e.message}`);\n }\n if (!this.stream) {\n throw new Error('Could not obtain audio from microphone.');\n }\n const ctxConstructor = \n // tslint:disable-next-line:no-any\n window.AudioContext || window.webkitAudioContext;\n this.audioContext = new ctxConstructor();\n if (!this.sampleRateHz) {\n // If sample rate is not provided, use the available sample rate on\n // device.\n this.sampleRateHz = this.audioContext.sampleRate;\n }\n else if (this.audioContext.sampleRate !== this.sampleRateHz) {\n throw new Error(`Mismatch in sampling rate: ` +\n `Expected: ${this.sampleRateHz}; ` +\n `Actual: ${this.audioContext.sampleRate}`);\n }\n const streamSource = this.audioContext.createMediaStreamSource(this.stream);\n this.analyser = this.audioContext.createAnalyser();\n this.analyser.fftSize = this.fftSize * 2;\n this.analyser.smoothingTimeConstant = this.smoothingTimeConstant;\n streamSource.connect(this.analyser);\n this.freqData = new Float32Array(this.fftSize);\n this.timeData = new Float32Array(this.fftSize);\n return;\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let spectrogramTensor;\n let waveformTensor;\n const audioDataQueue = await this.getAudioData();\n if (this.includeSpectrogram) {\n const freqData = this.flattenQueue(audioDataQueue.freqDataQueue);\n spectrogramTensor = this.getTensorFromAudioDataArray(freqData, [this.numFrames, this.columnTruncateLength, 1]);\n }\n if (this.includeWaveform) {\n const timeData = this.flattenQueue(audioDataQueue.timeDataQueue);\n waveformTensor = this.getTensorFromAudioDataArray(timeData, [this.numFrames * this.fftSize, 1]);\n }\n return {\n value: { 'spectrogram': spectrogramTensor, 'waveform': waveformTensor },\n done: false\n };\n }\n // Capture one result from the audio stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n async getAudioData() {\n const freqDataQueue = [];\n const timeDataQueue = [];\n let currentFrames = 0;\n return new Promise(resolve => {\n const intervalID = setInterval(() => {\n if (this.includeSpectrogram) {\n this.analyser.getFloatFrequencyData(this.freqData);\n // If the audio stream is initializing, return empty queue.\n if (this.freqData[0] === -Infinity) {\n resolve({ freqDataQueue, timeDataQueue });\n }\n freqDataQueue.push(this.freqData.slice(0, this.columnTruncateLength));\n }\n if (this.includeWaveform) {\n this.analyser.getFloatTimeDomainData(this.timeData);\n timeDataQueue.push(this.timeData.slice());\n }\n // Clean interval and return when all frames have been collected\n if (++currentFrames === this.numFrames) {\n clearInterval(intervalID);\n resolve({ freqDataQueue, timeDataQueue });\n }\n }, this.fftSize / this.sampleRateHz * 1e3);\n });\n }\n // Stop the audio stream and pause the iterator.\n stop() {\n if (!this.isClosed) {\n this.isClosed = true;\n this.analyser.disconnect();\n this.audioContext.close();\n if (this.stream != null && this.stream.getTracks().length > 0) {\n this.stream.getTracks()[0].stop();\n }\n }\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite audio stream to array.');\n }\n // Return audio sampling rate in Hz\n getSampleRate() {\n return this.sampleRateHz;\n }\n flattenQueue(queue) {\n const frameSize = queue[0].length;\n const freqData = new Float32Array(queue.length * frameSize);\n queue.forEach((data, i) => freqData.set(data, i * frameSize));\n return freqData;\n }\n getTensorFromAudioDataArray(freqData, shape) {\n const vals = new Float32Array(util.sizeFromShape(shape));\n // If the data is less than the output shape, the rest is padded with zeros.\n vals.set(freqData, vals.length - freqData.length);\n return tensor(vals, shape);\n }\n}\n//# sourceMappingURL=microphone_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { browser, env, image, tensor1d, tensor2d, tidy, util } from '@tensorflow/tfjs-core';\nimport { LazyIterator } from './lazy_iterator';\n/**\n * Provide a stream of image tensors from webcam video stream. Only works in\n * browser environment.\n */\nexport class WebcamIterator extends LazyIterator {\n constructor(webcamVideoElement, webcamConfig) {\n super();\n this.webcamVideoElement = webcamVideoElement;\n this.webcamConfig = webcamConfig;\n this.isClosed = true;\n this.resize = false;\n if (this.needToResize()) {\n this.resize = true;\n this.cropSize =\n [this.webcamConfig.resizeHeight, this.webcamConfig.resizeWidth];\n this.cropBoxInd = tensor1d([0], 'int32');\n if (this.webcamConfig.centerCrop) {\n // Calculate the box based on resizing shape.\n const widthCroppingRatio = this.webcamConfig.resizeWidth * 1.0 / this.webcamVideoElement.width;\n const heightCroppingRatio = this.webcamConfig.resizeHeight * 1.0 /\n this.webcamVideoElement.height;\n const widthCropStart = (1 - widthCroppingRatio) / 2;\n const heightCropStart = (1 - heightCroppingRatio) / 2;\n const widthCropEnd = widthCropStart + widthCroppingRatio;\n const heightCropEnd = heightCroppingRatio + heightCropStart;\n this.cropBox = tensor2d([heightCropStart, widthCropStart, heightCropEnd, widthCropEnd], [1, 4]);\n }\n else {\n this.cropBox = tensor2d([0, 0, 1, 1], [1, 4]);\n }\n }\n }\n summary() {\n return `webcam`;\n }\n // Construct a WebcamIterator and start it's video stream.\n static async create(webcamVideoElement, webcamConfig = {}) {\n if (env().get('IS_NODE')) {\n throw new Error('tf.data.webcam is only supported in browser environment.');\n }\n if (!webcamVideoElement) {\n // If webcam video element is not provided, create a hidden video element\n // with provided width and height.\n webcamVideoElement = document.createElement('video');\n if (!webcamConfig.resizeWidth || !webcamConfig.resizeHeight) {\n throw new Error('Please provide webcam video element, or resizeWidth and ' +\n 'resizeHeight to create a hidden video element.');\n }\n webcamVideoElement.width = webcamConfig.resizeWidth;\n webcamVideoElement.height = webcamConfig.resizeHeight;\n }\n const webcamIterator = new WebcamIterator(webcamVideoElement, webcamConfig);\n // Call async function to initialize the video stream.\n await webcamIterator.start();\n return webcamIterator;\n }\n // Async function to start video stream.\n async start() {\n if (this.webcamConfig.facingMode) {\n util.assert((this.webcamConfig.facingMode === 'user') ||\n (this.webcamConfig.facingMode === 'environment'), () => `Invalid webcam facing mode: ${this.webcamConfig.facingMode}. ` +\n `Please provide 'user' or 'environment'`);\n }\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n video: {\n deviceId: this.webcamConfig.deviceId,\n facingMode: this.webcamConfig.facingMode ?\n this.webcamConfig.facingMode :\n 'user',\n width: this.webcamVideoElement.width,\n height: this.webcamVideoElement.height\n }\n });\n }\n catch (e) {\n // Modify the error message but leave the stack trace intact\n e.message = `Error thrown while initializing video stream: ${e.message}`;\n throw e;\n }\n if (!this.stream) {\n throw new Error('Could not obtain video from webcam.');\n }\n // Older browsers may not have srcObject\n try {\n this.webcamVideoElement.srcObject = this.stream;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = window.URL.createObjectURL(this.stream);\n }\n // Start the webcam video stream\n this.webcamVideoElement.play();\n this.isClosed = false;\n return new Promise(resolve => {\n // Add event listener to make sure the webcam has been fully initialized.\n this.webcamVideoElement.onloadedmetadata = () => {\n resolve();\n };\n });\n }\n async next() {\n if (this.isClosed) {\n return { value: null, done: true };\n }\n let img;\n try {\n img = browser.fromPixels(this.webcamVideoElement);\n }\n catch (e) {\n throw new Error(`Error thrown converting video to pixels: ${JSON.stringify(e)}`);\n }\n if (this.resize) {\n try {\n return { value: this.cropAndResizeFrame(img), done: false };\n }\n catch (e) {\n throw new Error(`Error thrown cropping the video: ${e.message}`);\n }\n finally {\n img.dispose();\n }\n }\n else {\n return { value: img, done: false };\n }\n }\n needToResize() {\n // If resizeWidth and resizeHeight are provided, and different from the\n // width and height of original HTMLVideoElement, then resizing and cropping\n // is required.\n if (this.webcamConfig.resizeWidth && this.webcamConfig.resizeHeight &&\n (this.webcamVideoElement.width !== this.webcamConfig.resizeWidth ||\n this.webcamVideoElement.height !== this.webcamConfig.resizeHeight)) {\n return true;\n }\n return false;\n }\n // Cropping and resizing each frame based on config\n cropAndResizeFrame(img) {\n return tidy(() => {\n const expandedImage = img.toFloat().expandDims(0);\n let resizedImage;\n resizedImage = image.cropAndResize(expandedImage, this.cropBox, this.cropBoxInd, this.cropSize, 'bilinear');\n // Extract image from batch cropping.\n const shape = resizedImage.shape;\n return resizedImage.reshape(shape.slice(1));\n });\n }\n // Capture one frame from the video stream, and extract the value from\n // iterator.next() result.\n async capture() {\n return (await this.next()).value;\n }\n // Stop the video stream and pause webcam iterator.\n stop() {\n const tracks = this.stream.getTracks();\n tracks.forEach(track => track.stop());\n try {\n this.webcamVideoElement.srcObject = null;\n }\n catch (error) {\n console.log(error);\n this.webcamVideoElement.src = null;\n }\n this.isClosed = true;\n }\n // Override toArray() function to prevent collecting.\n toArray() {\n throw new Error('Can not convert infinite video stream to array.');\n }\n}\n//# sourceMappingURL=webcam_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n/**\n * Represents a data source readable as a stream of binary data chunks.\n *\n * Because `Dataset`s can be read repeatedly (via `Dataset.iterator()`), this\n * provides a means to repeatedly create streams from the underlying data\n * sources.\n */\nexport class DataSource {\n}\n// TODO(soergel): consider convenience factory functions here\n// in combination with chainable source->dataset above, e.g.:\n// tf.data.url(...).asCsvDataset().shuffle().batch()\n//# sourceMappingURL=datasource.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nexport class StringIterator extends LazyIterator {\n /**\n * Splits a string stream on a given separator.\n *\n * It is assumed that the incoming chunk boundaries have no semantic meaning,\n * so conceptually the incoming stream is treated simply as the concatenation\n * of its elements.\n *\n * The outgoing stream provides chunks corresponding to the results of the\n * standard string split() operation (even if such a chunk spanned incoming\n * chunks). The separators are not included.\n *\n * A typical usage is to split a text file (represented as a stream with\n * arbitrary chunk boundaries) into lines.\n *\n * @param upstream A readable stream of strings that can be treated as\n * concatenated.\n * @param separator A character to split on.\n */\n split(separator) {\n return new SplitIterator(this, separator);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on StringIterator. Unfortunately they can't be placed in separate files, due\n// to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class SplitIterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass SplitIterator extends StringIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.impl = new SplitIteratorImpl(upstream, separator);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\nclass SplitIteratorImpl extends OneToManyIterator {\n constructor(upstream, separator) {\n super();\n this.upstream = upstream;\n this.separator = separator;\n // A partial string at the end of an upstream chunk\n this.carryover = '';\n }\n summary() {\n return `${this.upstream.summary()} -> Split('${this.separator}')`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n if (chunkResult.done) {\n if (this.carryover === '') {\n return false;\n }\n // Pretend that the pump succeeded in order to emit the small last batch.\n // The next pump() call will actually fail.\n this.outputQueue.push(this.carryover);\n this.carryover = '';\n return true;\n }\n const lines = chunkResult.value.split(this.separator);\n // Note the behavior: \" ab \".split(' ') === ['', 'ab', '']\n // Thus the carryover may be '' if the separator falls on a chunk\n // boundary; this produces the correct result.\n lines[0] = this.carryover + lines[0];\n for (const line of lines.slice(0, -1)) {\n this.outputQueue.push(line);\n }\n this.carryover = lines[lines.length - 1];\n return true;\n }\n}\n//# sourceMappingURL=string_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { LazyIterator, OneToManyIterator } from './lazy_iterator';\nimport { StringIterator } from './string_iterator';\nexport class ByteChunkIterator extends LazyIterator {\n /**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * The byte arrays producetd from the ByteChunkIterator on which this is\n * called will be interpreted as concatenated. No assumptions are made about\n * the boundaries of the incoming chunks, so a multi-byte UTF8 encoding of a\n * character may span the boundary between chunks. This naturally happens,\n * for instance, when reading fixed-size byte arrays from a file.\n */\n decodeUTF8() {\n return new Utf8Iterator(this);\n }\n}\n// ============================================================================\n// The following private classes serve to implement the chainable methods\n// on ByteChunkIterator. Unfortunately they can't be placed in separate files,\n// due to resulting trouble with circular imports.\n// ============================================================================\n// We wanted multiple inheritance, e.g.\n// class Utf8Iterator extends QueueIterator, StringIterator\n// but the TypeScript mixin approach is a bit hacky, so we take this adapter\n// approach instead.\nclass Utf8Iterator extends StringIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n this.impl = new Utf8IteratorImpl(upstream);\n }\n summary() {\n return this.impl.summary();\n }\n async next() {\n return this.impl.next();\n }\n}\n/**\n * Decode a stream of UTF8-encoded byte arrays to a stream of strings.\n *\n * This is tricky because the incoming byte array boundaries may disrupt a\n * multi-byte UTF8 character. Thus any incomplete character data at the end of\n * a chunk must be carried over and prepended to the next chunk before\n * decoding. Luckily with native decoder, TextDecoder in browser and\n * string_decoder in node, byte array boundaries are handled automatically.\n *\n * In the context of an input pipeline for machine learning, UTF8 decoding is\n * needed to parse text files containing training examples or prediction\n * requests (e.g., formatted as CSV or JSON). We cannot use the built-in\n * decoding provided by FileReader.readAsText() because here we are in a\n * streaming context, which FileReader does not support.\n *\n * @param upstream A `LazyIterator` of `Uint8Arrays` containing UTF8-encoded\n * text, which should be interpreted as concatenated. No assumptions are\n * made about the boundaries of the incoming chunks, so a multi-byte UTF8\n * encoding of a character may span the boundary between chunks. This\n * naturally happens, for instance, when reading fixed-size byte arrays from a\n * file.\n */\nclass Utf8IteratorImpl extends OneToManyIterator {\n constructor(upstream) {\n super();\n this.upstream = upstream;\n if (env().get('IS_BROWSER')) {\n this.decoder = new TextDecoder('utf-8');\n }\n else {\n // tslint:disable-next-line:no-require-imports\n const { StringDecoder } = require('string_decoder');\n this.decoder = new StringDecoder('utf8');\n }\n }\n summary() {\n return `${this.upstream.summary()} -> Utf8`;\n }\n async pump() {\n const chunkResult = await this.upstream.next();\n let chunk;\n if (chunkResult.done) {\n return false;\n }\n else {\n chunk = chunkResult.value;\n }\n let text;\n if (env().get('IS_BROWSER')) {\n text = this.decoder.decode(chunk, { stream: true });\n }\n else {\n text = this.decoder.write(Buffer.from(chunk.buffer));\n }\n this.outputQueue.push(text);\n return true;\n }\n}\n//# sourceMappingURL=byte_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// inspired by https://github.com/maxogden/filereader-stream\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { ByteChunkIterator } from './byte_chunk_iterator';\n/**\n * Provide a stream of chunks from a File, Blob, or Uint8Array.\n * @param file The source File, Blob or Uint8Array.\n * @param options Optional settings controlling file reading.\n * @returns a lazy Iterator of Uint8Arrays containing sequential chunks of the\n * input File, Blob or Uint8Array.\n */\nexport class FileChunkIterator extends ByteChunkIterator {\n constructor(file, options = {}) {\n super();\n this.file = file;\n this.options = options;\n util.assert((file instanceof Uint8Array) ||\n (env().get('IS_BROWSER') ?\n (file instanceof File || file instanceof Blob) :\n false), () => 'FileChunkIterator only supports File, Blob and Uint8Array ' +\n 'right now.');\n this.offset = options.offset || 0;\n // default 1MB chunk has tolerable perf on large files\n this.chunkSize = options.chunkSize || 1024 * 1024;\n }\n summary() {\n return `FileChunks ${this.file}`;\n }\n async next() {\n if (this.offset >= ((this.file instanceof Uint8Array) ?\n this.file.byteLength :\n this.file.size)) {\n return { value: null, done: true };\n }\n const chunk = new Promise((resolve, reject) => {\n const end = this.offset + this.chunkSize;\n if (this.file instanceof Uint8Array) {\n // Note if end > this.uint8Array.byteLength, we just get a small last\n // chunk.\n resolve(new Uint8Array(this.file.slice(this.offset, end)));\n }\n else {\n // This branch assumes that this.file type is File or Blob, which\n // means it is in the browser environment.\n // TODO(soergel): is this a performance issue?\n const fileReader = new FileReader();\n fileReader.onload = (event) => {\n let data = fileReader.result;\n // Not sure we can trust the return type of\n // FileReader.readAsArrayBuffer See e.g.\n // https://github.com/node-file-api/FileReader/issues/2\n if (data instanceof ArrayBuffer) {\n data = new Uint8Array(data);\n }\n if (!(data instanceof Uint8Array)) {\n return reject(new TypeError('FileReader returned unknown type.'));\n }\n resolve(data);\n };\n fileReader.onabort = (event) => {\n return reject(new Error('Aborted'));\n };\n fileReader.onerror = (event) => {\n return reject(new Error(event.type));\n };\n // TODO(soergel): better handle onabort, onerror\n // Note if end > this.file.size, we just get a small last chunk.\n const slice = this.file.slice(this.offset, end);\n // We can't use readAsText here (even if we know the file is text)\n // because the slice boundary may fall within a multi-byte character.\n fileReader.readAsArrayBuffer(slice);\n }\n this.offset = end;\n });\n return { value: (await chunk), done: false };\n }\n}\n//# sourceMappingURL=file_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FileChunkIterator } from './file_chunk_iterator';\n/**\n * Provide a stream of chunks from a URL.\n *\n * Note this class first downloads the entire file into memory before providing\n * the first element from the stream. This is because the Fetch API does not\n * yet reliably provide a reader stream for the response body.\n */\nexport async function urlChunkIterator(url, options = {}) {\n let urlString;\n let requestInit;\n if ((typeof url) === 'string') {\n urlString = url;\n }\n else {\n urlString = url.url;\n requestInit = getRequestInitFromRequest(url);\n }\n const response = await util.fetch(urlString, requestInit);\n if (response.ok) {\n const uint8Array = new Uint8Array(await response.arrayBuffer());\n return new FileChunkIterator(uint8Array, options);\n }\n else {\n throw new Error(response.statusText);\n }\n}\n// Generate RequestInit from Request to match tf.util.fetch signature.\nconst getRequestInitFromRequest = (request) => {\n const init = {\n method: request.method,\n headers: request.headers,\n body: request.body,\n mode: request.mode,\n credentials: request.credentials,\n cache: request.cache,\n redirect: request.redirect,\n referrer: request.referrer,\n integrity: request.integrity,\n };\n return init;\n};\n//# sourceMappingURL=url_chunk_iterator.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\n// Skip tslint any type check cause this method is aiming to check type of\n// input.\n// tslint:disable-next-line:no-any\nexport function isLocalPath(source) {\n return (typeof source === 'string') && source.substr(0, 7) === 'file://';\n}\n//# sourceMappingURL=source_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { DataSource } from '../datasource';\nimport { FileChunkIterator } from '../iterators/file_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\n/**\n * Represents a file, blob, or Uint8Array readable as a stream of binary data\n * chunks.\n */\nexport class FileDataSource extends DataSource {\n /**\n * Create a `FileDataSource`.\n *\n * @param input Local file path, or `File`/`Blob`/`Uint8Array` object to\n * read. Local file only works in node environment.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(input, options = {}) {\n super();\n this.input = input;\n this.options = options;\n }\n async iterator() {\n if (isLocalPath(this.input) && env().get('IS_NODE')) {\n // tslint:disable-next-line:no-require-imports\n const fs = require('fs');\n this.input = fs.readFileSync(this.input.substr(7));\n }\n // TODO(kangyizhang): Add LocalFileChunkIterator to split local streaming\n // with file in browser.\n return new FileChunkIterator(this.input, this.options);\n }\n}\n//# sourceMappingURL=file_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { DataSource } from '../datasource';\nimport { urlChunkIterator } from '../iterators/url_chunk_iterator';\nimport { isLocalPath } from '../util/source_util';\nimport { FileDataSource } from './file_data_source';\n/*\n * Represents a URL readable as a stream of binary data chunks.\n */\nexport class URLDataSource extends DataSource {\n /**\n * Create a `URLDataSource`.\n *\n * @param url A source URL string, or a `Request` object.\n * @param options Options passed to the underlying `FileChunkIterator`s,\n * such as {chunksize: 1024}.\n */\n constructor(url, fileOptions = {}) {\n super();\n this.url = url;\n this.fileOptions = fileOptions;\n }\n // TODO(soergel): provide appropriate caching options. Currently this\n // will download the URL anew for each call to iterator(). Since we have\n // to treat the downloaded file as a blob/buffer anyway, we may as well retain\n // it-- but that raises GC issues. Also we may want a persistent disk cache.\n async iterator() {\n if (isLocalPath(this.url)) {\n return (new FileDataSource(this.url, this.fileOptions))\n .iterator();\n }\n else {\n return urlChunkIterator(this.url, this.fileOptions);\n }\n }\n}\n//# sourceMappingURL=url_data_source.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * =============================================================================\n */\nimport { datasetFromIteratorFn } from './dataset';\nimport { CSVDataset } from './datasets/csv_dataset';\nimport { iteratorFromFunction } from './iterators/lazy_iterator';\nimport { MicrophoneIterator } from './iterators/microphone_iterator';\nimport { WebcamIterator } from './iterators/webcam_iterator';\nimport { URLDataSource } from './sources/url_data_source';\n/**\n * Create a `CSVDataset` by reading and decoding CSV file(s) from provided URL\n * or local path if it's in Node environment.\n *\n * Note: If isLabel in columnConfigs is `true` for at least one column, the\n * element in returned `CSVDataset` will be an object of\n * `{xs:features, ys:labels}`: xs is a dict of features key/value pairs, ys\n * is a dict of labels key/value pairs. If no column is marked as label,\n * returns a dict of features only.\n *\n * ```js\n * const csvUrl =\n * 'https://storage.googleapis.com/tfjs-examples/multivariate-linear-regression/data/boston-housing-train.csv';\n *\n * async function run() {\n * // We want to predict the column \"medv\", which represents a median value of\n * // a home (in $1000s), so we mark it as a label.\n * const csvDataset = tf.data.csv(\n * csvUrl, {\n * columnConfigs: {\n * medv: {\n * isLabel: true\n * }\n * }\n * });\n *\n * // Number of features is the number of column names minus one for the label\n * // column.\n * const numOfFeatures = (await csvDataset.columnNames()).length - 1;\n *\n * // Prepare the Dataset for training.\n * const flattenedDataset =\n * csvDataset\n * .map(({xs, ys}) =>\n * {\n * // Convert xs(features) and ys(labels) from object form (keyed by\n * // column name) to array form.\n * return {xs:Object.values(xs), ys:Object.values(ys)};\n * })\n * .batch(10);\n *\n * // Define the model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({\n * inputShape: [numOfFeatures],\n * units: 1\n * }));\n * model.compile({\n * optimizer: tf.train.sgd(0.000001),\n * loss: 'meanSquaredError'\n * });\n *\n * // Fit the model using the prepared Dataset\n * return model.fitDataset(flattenedDataset, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * console.log(epoch + ':' + logs.loss);\n * }\n * }\n * });\n * }\n *\n * await run();\n * ```\n *\n * @param source URL or local path to get CSV file. If it's a local path, it\n * must have prefix `file://` and it only works in node environment.\n * @param csvConfig (Optional) A CSVConfig object that contains configurations\n * of reading and decoding from CSV file(s).\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function csv(source, csvConfig = {}) {\n return new CSVDataset(new URLDataSource(source), csvConfig);\n}\n/**\n * Create a `Dataset` that produces each element by calling a provided function.\n *\n * Note that repeated iterations over this `Dataset` may produce different\n * results, because the function will be called anew for each element of each\n * iteration.\n *\n * Also, beware that the sequence of calls to this function may be out of order\n * in time with respect to the logical order of the Dataset. This is due to the\n * asynchronous lazy nature of stream processing, and depends on downstream\n * transformations (e.g. .shuffle()). If the provided function is pure, this is\n * no problem, but if it is a closure over a mutable state (e.g., a traversal\n * pointer), then the order of the produced elements may be scrambled.\n *\n * ```js\n * let i = -1;\n * const func = () =>\n * ++i < 5 ? {value: i, done: false} : {value: null, done: true};\n * const ds = tf.data.func(func);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param f A function that produces one data element on each call.\n */\nexport function func(f) {\n const iter = iteratorFromFunction(f);\n return datasetFromIteratorFn(async () => iter);\n}\n/**\n * Create a `Dataset` that produces each element from provided JavaScript\n * generator, which is a function*\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions),\n * or a function that returns an\n * iterator\n * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Generator_functions).\n *\n * The returned iterator should have `.next()` function that returns element in\n * format of `{value: TensorContainer, done:boolean}`.\n *\n * Example of creating a dataset from an iterator factory:\n * ```js\n * function makeIterator() {\n * const numElements = 10;\n * let index = 0;\n *\n * const iterator = {\n * next: () => {\n * let result;\n * if (index < numElements) {\n * result = {value: index, done: false};\n * index++;\n * return result;\n * }\n * return {value: index, done: true};\n * }\n * };\n * return iterator;\n * }\n * const ds = tf.data.generator(makeIterator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * Example of creating a dataset from a generator:\n * ```js\n * function* dataGenerator() {\n * const numElements = 10;\n * let index = 0;\n * while (index < numElements) {\n * const x = index;\n * index++;\n * yield x;\n * }\n * }\n *\n * const ds = tf.data.generator(dataGenerator);\n * await ds.forEachAsync(e => console.log(e));\n * ```\n *\n * @param generator A Javascript generator function that returns a JavaScript\n * iterator.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * configParamIndices: [1]\n * }\n */\nexport function generator(generator) {\n return datasetFromIteratorFn(async () => {\n const gen = await generator();\n return iteratorFromFunction(() => gen.next());\n });\n}\n/**\n * Create an iterator that generate `Tensor`s from webcam video stream. This API\n * only works in Browser environment when the device has webcam.\n *\n * Note: this code snippet only works when the device has a webcam. It will\n * request permission to open the webcam when running.\n * ```js\n * const videoElement = document.createElement('video');\n * videoElement.width = 100;\n * videoElement.height = 100;\n * const cam = await tf.data.webcam(videoElement);\n * const img = await cam.capture();\n * img.print();\n * cam.stop();\n * ```\n *\n * @param webcamVideoElement A `HTMLVideoElement` used to play video from\n * webcam. If this element is not provided, a hidden `HTMLVideoElement` will\n * be created. In that case, `resizeWidth` and `resizeHeight` must be\n * provided to set the generated tensor shape.\n * @param webcamConfig A `WebcamConfig` object that contains configurations of\n * reading and manipulating data from webcam video stream.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function webcam(webcamVideoElement, webcamConfig) {\n return WebcamIterator.create(webcamVideoElement, webcamConfig);\n}\n/**\n * Create an iterator that generate frequency-domain spectrogram `Tensor`s from\n * microphone audio stream with browser's native FFT. This API only works in\n * browser environment when the device has microphone.\n *\n * Note: this code snippet only works when the device has a microphone. It will\n * request permission to open the microphone when running.\n * ```js\n * const mic = await tf.data.microphone({\n * fftSize: 1024,\n * columnTruncateLength: 232,\n * numFramesPerSpectrogram: 43,\n * sampleRateHz:44100,\n * includeSpectrogram: true,\n * includeWaveform: true\n * });\n * const audioData = await mic.capture();\n * const spectrogramTensor = audioData.spectrogram;\n * spectrogramTensor.print();\n * const waveformTensor = audioData.waveform;\n * waveformTensor.print();\n * mic.stop();\n * ```\n *\n * @param microphoneConfig A `MicrophoneConfig` object that contains\n * configurations of reading audio data from microphone.\n *\n * @doc {\n * heading: 'Data',\n * subheading: 'Creation',\n * namespace: 'data',\n * ignoreCI: true\n * }\n */\nexport async function microphone(microphoneConfig) {\n return MicrophoneIterator.create(microphoneConfig);\n}\n//# sourceMappingURL=readers.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport { array, Dataset, zip } from './dataset';\nexport { CSVDataset } from './datasets/csv_dataset';\nexport { TextLineDataset } from './datasets/text_line_dataset';\nexport { csv, func, generator, microphone, webcam } from './readers';\nexport { FileDataSource } from './sources/file_data_source';\nexport { URLDataSource } from './sources/url_data_source';\nexport { version as version_data } from './version';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors in the CPU backend.`);\n }\n });\n}\n//# sourceMappingURL=cpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { backend_util, DataStorage, engine, env, kernel_impls, KernelBackend, max, slice_util, TensorBuffer, upcastType, util } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV3Impl = kernel_impls.nonMaxSuppressionV3Impl;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport * as seedrandom from 'seedrandom';\nimport { assertNotComplex } from './cpu_util';\nexport class MathBackendCPU extends KernelBackend {\n constructor() {\n super();\n this.blockSize = 48;\n this.firstUse = true;\n this.data = new DataStorage(this, engine());\n }\n write(values, shape, dtype) {\n if (this.firstUse) {\n this.firstUse = false;\n if (env().get('IS_NODE')) {\n backend_util.warn('\\n============================\\n' +\n 'Hi there \uD83D\uDC4B. Looks like you are running TensorFlow.js in ' +\n 'Node.js. To speed things up dramatically, install our node ' +\n 'backend, which binds to TensorFlow C++, by running ' +\n 'npm i @tensorflow/tfjs-node, ' +\n 'or npm i @tensorflow/tfjs-node-gpu if you have CUDA. ' +\n 'Then call require(\\'@tensorflow/tfjs-node\\'); (-gpu ' +\n 'suffix for CUDA) at the start of your program. ' +\n 'Visit https://github.com/tensorflow/tfjs-node for more details.' +\n '\\n============================');\n }\n }\n const dataId = {};\n this.data.set(dataId, { values, dtype, refCount: 1 });\n return dataId;\n }\n /**\n * Create a data bucket in cpu backend.\n * @param shape Shape of the `TensorInfo`.\n * @param dtype DType of the `TensorInfo`.\n * @param values The value of the `TensorInfo` stored as a flattened array.\n */\n makeTensorInfo(shape, dtype, values) {\n let outId;\n if (dtype === 'string' && values != null && values.length > 0 &&\n util.isString(values[0])) {\n const encodedValues = values.map(d => util.encodeString(d));\n outId = this.write(encodedValues, shape, dtype);\n }\n else {\n outId = this.write(values, shape, dtype);\n }\n return { dataId: outId, shape, dtype };\n }\n /** Increase refCount of a `TensorData`. */\n incRef(dataId) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount++;\n }\n /** Decrease refCount of a `TensorData`. */\n decRef(dataId) {\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n this.data.set(dataId, { values, dtype, refCount: 1 });\n }\n numDataIds() {\n return this.data.numDataIds();\n }\n async read(dataId) {\n return this.readSync(dataId);\n }\n readSync(dataId) {\n const { dtype, complexTensorInfos } = this.data.get(dataId);\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n return backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n return this.data.get(dataId).values;\n }\n bufferSync(t) {\n const data = this.readSync(t.dataId);\n let decodedData = data;\n if (t.dtype === 'string') {\n try {\n // Decode the bytes into string.\n decodedData = data.map(d => util.decodeString(d));\n }\n catch (_a) {\n throw new Error('Failed to decode encoded string bytes into utf-8');\n }\n }\n return tf.buffer(t.shape, t.dtype, decodedData);\n }\n makeOutput(values, shape, dtype) {\n const dataId = this.write(values, shape, dtype);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n disposeData(dataId) {\n if (this.data.has(dataId)) {\n const { complexTensorInfos } = this.data.get(dataId);\n if (complexTensorInfos != null) {\n this.disposeData(complexTensorInfos.real.dataId);\n this.disposeData(complexTensorInfos.imag.dataId);\n }\n this.data.delete(dataId);\n }\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.data.has(dataId)) {\n const tensorData = this.data.get(dataId);\n tensorData.refCount--;\n if (tensorData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n async time(f) {\n const start = util.now();\n f();\n const kernelMs = util.now() - start;\n return { kernelMs };\n }\n memory() {\n return {\n // Unreliable due to automatic gc. The numbers above are cumulative.\n unreliable: true,\n reasons: ['The reported memory is an upper bound. Due to automatic garbage ' +\n 'collection, the true allocated memory may be less.']\n };\n }\n stridedSlice(x, begin, end, strides) {\n assertNotComplex(x, 'stridedSlice');\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tf.tensor([], outShape);\n }\n const buffer = tf.buffer(outShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const loc = buffer.indexToLoc(i);\n const newLoc = new Array(loc.length);\n for (let j = 0; j < newLoc.length; j++) {\n newLoc[j] = loc[j] * strides[j] + begin[j];\n }\n buffer.set(xBuf.get(...newLoc), ...loc);\n }\n return buffer.toTensor();\n }\n diag(x) {\n const xVals = this.readSync(x.dataId);\n const buffer = tf.buffer([x.size, x.size], x.dtype);\n const vals = buffer.values;\n for (let i = 0; i < xVals.length; i++) {\n vals[i * x.size + i] = xVals[i];\n }\n return buffer.toTensor();\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = tf.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n reverse(x, axis) {\n assertNotComplex(x, 'reverse');\n const buffer = tf.buffer(x.shape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < buffer.size; i++) {\n const outLoc = buffer.indexToLoc(i);\n const inLoc = outLoc.slice();\n axis.forEach(ax => inLoc[ax] = x.shape[ax] - 1 - inLoc[ax]);\n buffer.set(xBuf.get(...inLoc), ...outLoc);\n }\n return buffer.toTensor();\n }\n neg(x) {\n assertNotComplex(x, 'neg');\n // TODO(lina128): Use mul directly once neg is modularized.\n return tf.mul(tf.scalar(-1), x);\n }\n addN(tensors) {\n assertNotComplex(tensors, 'addN');\n const vals = tensors.map(t => this.readSync(t.dataId));\n const result = tf.buffer(tensors[0].shape, tensors[0].dtype);\n const resultVals = result.values;\n for (let i = 0; i < tensors.length; i++) {\n const currVals = vals[i];\n for (let j = 0; j < resultVals.length; j++) {\n resultVals[j] += currVals[j];\n }\n }\n return result.toTensor();\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(lina128): Use sub directly once softmax is modularized.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = tf.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax\n // kernel modularization.\n return tf.div(b, sumExp);\n }\n pow(a, b) {\n assertNotComplex([a, b], 'pow');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.pow(aValue, bValue));\n }\n floorDiv(a, b) {\n assertNotComplex([a, b], 'floorDiv');\n const op = (a, b) => Math.floor(a / b);\n const outputDtype = 'int32';\n return this.broadcastedBinaryOp(a, b, outputDtype, op);\n }\n sum(x, axes) {\n assertNotComplex(x, 'sum');\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let sum = 0;\n for (let j = 0; j < reduceSize; ++j) {\n sum += aVals[offset + j];\n }\n vals[i] = sum;\n }\n return result;\n }\n prod(x, axes) {\n assertNotComplex(x, 'sum');\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let prod = 1;\n for (let j = 0; j < reduceSize; ++j) {\n prod *= aVals[offset + j];\n }\n vals[i] = prod;\n }\n return result;\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n assertNotComplex(x, 'unsortedSegmentSum');\n const res = [];\n // Reshape the segment id's so that they can be broadcast with\n // x. The new shape should be [segmentIds.shape, 1, ..., 1]\n const numIters = x.rank - segmentIds.rank;\n for (let i = 0; i < numIters; ++i) {\n segmentIds = segmentIds.expandDims(i + 1);\n }\n for (let i = 0; i < numSegments; ++i) {\n const segmentId = tf.scalar(i, 'int32');\n const mask = tf.equal(segmentId, segmentIds).asType('float32');\n const sum = mask.mul(x).sum(0);\n res.push(sum);\n }\n return tf.stack(res);\n }\n argMin(x, axis) {\n assertNotComplex(x, 'argMin');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMin', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n let minIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n minIndex = j;\n }\n }\n vals[i] = minIndex;\n }\n return result;\n }\n argMax(x, axis) {\n assertNotComplex(x, 'argMax');\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('argMax', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n let maxIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n maxIndex = j;\n }\n }\n vals[i] = maxIndex;\n }\n return result;\n }\n cumsum(x, axis, exclusive, reverse) {\n assertNotComplex(x, 'cumsum');\n if (axis !== x.rank - 1) {\n throw new Error(`backend.cumsum in CPU expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = tf.zeros(x.shape, resultDtype);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n const finalDim = x.shape[x.rank - 1];\n const indexAdjuster = reverse ?\n (i, j) => i + finalDim - j - 1 :\n (i, j) => i + j;\n for (let i = 0; i < aVals.length; i += finalDim) {\n for (let j = 0; j < finalDim; j++) {\n const idx = indexAdjuster(i, j);\n if (j === 0) {\n vals[idx] = exclusive ? 0 : aVals[idx];\n }\n else {\n const prevIdx = indexAdjuster(i, j - 1);\n vals[idx] = exclusive ? aVals[prevIdx] + vals[prevIdx] :\n aVals[idx] + vals[prevIdx];\n }\n }\n }\n return result;\n }\n equal(a, b) {\n assertNotComplex([a, b], 'equal');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal === bVal) ? 1 : 0;\n });\n }\n notEqual(a, b) {\n assertNotComplex([a, b], 'notEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal !== bVal) ? 1 : 0;\n });\n }\n less(a, b) {\n assertNotComplex([a, b], 'less');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal < bVal) ? 1 : 0;\n });\n }\n lessEqual(a, b) {\n assertNotComplex([a, b], 'lessEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal <= bVal) ? 1 : 0;\n });\n }\n greater(a, b) {\n assertNotComplex([a, b], 'greater');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal > bVal) ? 1 : 0;\n });\n }\n greaterEqual(a, b) {\n assertNotComplex([a, b], 'greaterEqual');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal >= bVal) ? 1 : 0;\n });\n }\n logicalAnd(a, b) {\n assertNotComplex([a, b], 'logicalAnd');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal && bVal;\n });\n }\n logicalOr(a, b) {\n assertNotComplex([a, b], 'logicalOr');\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal || bVal;\n });\n }\n select(condition, a, b) {\n assertNotComplex([condition, a, b], 'select');\n const values = this.readSync(condition.dataId);\n const aValues = this.readSync(a.dataId);\n const bValues = this.readSync(b.dataId);\n const result = tf.zeros(a.shape, upcastType(a.dtype, b.dtype));\n const newValues = this.readSync(result.dataId);\n let index = 0;\n const offset = condition.rank === 0 || condition.rank > 1 || a.rank === 1 ?\n 1 :\n util.sizeFromShape(a.shape.slice(1));\n for (let i = 0; i < values.length; i++) {\n for (let j = 0; j < offset; j++) {\n if (values[i] === 1) {\n newValues[index++] = aValues[i];\n }\n else {\n newValues[index++] = bValues[i];\n }\n }\n }\n return result;\n }\n where(condition) {\n assertNotComplex([condition], 'where');\n const condVals = this.readSync(condition.dataId);\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n assertNotComplex(x, 'topk');\n const xVals = this.readSync(x.dataId);\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n assertNotComplex(x, 'min');\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n }\n }\n vals[i] = min;\n }\n return result;\n }\n minimum(a, b) {\n assertNotComplex([a, b], 'minimum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.min(aVal, bVal));\n }\n mod(a, b) {\n assertNotComplex([a, b], 'mod');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const rem = aVal % bVal;\n if ((aVal < 0 && bVal < 0) || (aVal >= 0 && bVal >= 0)) {\n return rem;\n }\n else {\n return (rem + bVal) % bVal;\n }\n });\n }\n maximum(a, b) {\n assertNotComplex([a, b], 'maximum');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => Math.max(aVal, bVal));\n }\n all(x, axes) {\n assertNotComplex(x, 'all');\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let all = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n all = all && value;\n }\n vals[i] = all;\n }\n return result;\n }\n any(x, axes) {\n assertNotComplex(x, 'any');\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const result = tf.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = this.readSync(result.dataId);\n const aVals = this.readSync(x.dataId);\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let anyVal = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n anyVal = anyVal || value;\n }\n vals[i] = anyVal;\n }\n return result;\n }\n squaredDifference(a, b) {\n assertNotComplex([a, b], 'squaredDifference');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const diff = aVal - bVal;\n return diff * diff;\n });\n }\n eluDer(dy, y) {\n assertNotComplex([dy, y], 'eluDer');\n const resultValues = new Float32Array(y.size);\n const values = this.readSync(y.dataId);\n const dyValues = this.readSync(dy.dataId);\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n if (v >= 1) {\n resultValues[i] = dyValues[i];\n }\n else {\n resultValues[i] = dyValues[i] * (v + 1);\n }\n }\n return this.makeOutput(resultValues, y.shape, 'float32');\n }\n atan2(a, b) {\n assertNotComplex([a, b], 'atan2');\n return this.broadcastedBinaryOp(a, b, a.dtype, (aValue, bValue) => Math.atan2(aValue, bValue));\n }\n tile(x, reps) {\n assertNotComplex(x, 'tile');\n return tile(this.bufferSync(x), reps);\n }\n gather(x, indices, axis) {\n assertNotComplex([x, indices], 'gather');\n const newShape = x.shape.slice();\n const indicesValues = this.readSync(indices.dataId);\n newShape[axis] = indicesValues.length;\n const result = tf.buffer(newShape, x.dtype);\n const xBuf = this.bufferSync(x);\n for (let i = 0; i < result.size; ++i) {\n const newLoc = result.indexToLoc(i);\n const originalLoc = newLoc.slice();\n originalLoc[axis] = indicesValues[newLoc[axis]];\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor();\n }\n batchToSpaceND(x, blockShape, crops) {\n assertNotComplex([x], 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return tf.transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n pool3d(x, convInfo, poolType) {\n assertNotComplex(x, 'pool3d');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const xValues = this.readSync(x.dataId);\n const output = tf.buffer(convInfo.outShape, x.dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] *\n convInfo.outShape[3] * convInfo.outShape[4];\n const outputDepthStrides = convInfo.outShape[2] * convInfo.outShape[3] * convInfo.outShape[4];\n const outputRowStrides = convInfo.outShape[3] * convInfo.outShape[4];\n const outputColStrides = convInfo.outShape[4];\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n const outputBatchOffset = batch * outputBatchStrides;\n const inputBatchOffset = batch * x.strides[0];\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n const outputDepthOffset = outputBatchOffset + yDepth * outputDepthStrides;\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n const outputRowOffset = outputDepthOffset + yRow * outputRowStrides;\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n const outputColOffset = outputRowOffset + yCol * outputColStrides;\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const xDepthOffset = inputBatchOffset + xDepth * x.strides[1];\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const xRowOffset = xDepthOffset + xRow * x.strides[2];\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const xColOffset = xRowOffset + xCol * x.strides[3];\n const pixel = xValues[xColOffset + channel];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputColOffset + channel;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n avgPool3d(x, convInfo) {\n assertNotComplex(x, 'avgPool3d');\n return this.pool3d(x, convInfo, 'avg').toFloat();\n }\n avgPool3dBackprop(dy, x, convInfo) {\n assertNotComplex([dy, x], 'avgPool3dBackprop');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins.\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel;\n }\n }\n }\n dx.set(dotProd * avgMultiplier, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n maxPool3d(x, convInfo) {\n assertNotComplex(x, 'maxPool3d');\n return this.pool3d(x, convInfo, 'max').toFloat();\n }\n maxPool3dPositions(x, convInfo) {\n const maxPositions = tf.buffer(convInfo.outShape, 'int32');\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = this.bufferSync(x);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let yDepth = 0; yDepth < convInfo.outDepth; ++yDepth) {\n const xDepthCorner = yDepth * strideDepth - padFront;\n let xDepthMin = xDepthCorner;\n while (xDepthMin < 0) {\n xDepthMin += dilationDepth;\n }\n const xDepthMax = Math.min(convInfo.inDepth, effectiveFilterDepth + xDepthCorner);\n for (let yRow = 0; yRow < convInfo.outHeight; ++yRow) {\n const xRowCorner = yRow * strideHeight - padTop;\n let xRowMin = xRowCorner;\n while (xRowMin < 0) {\n xRowMin += dilationHeight;\n }\n const xRowMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRowCorner);\n for (let yCol = 0; yCol < convInfo.outWidth; ++yCol) {\n const xColCorner = yCol * strideWidth - padLeft;\n let xColMin = xColCorner;\n while (xColMin < 0) {\n xColMin += dilationWidth;\n }\n const xColMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xColCorner);\n // Shader code begins\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xDepth = xDepthMin; xDepth < xDepthMax; xDepth += dilationDepth) {\n const wDepth = xDepth - xDepthCorner;\n for (let xRow = xRowMin; xRow < xRowMax; xRow += dilationHeight) {\n const wRow = xRow - xRowCorner;\n for (let xCol = xColMin; xCol < xColMax; xCol += dilationWidth) {\n const wCol = xCol - xColCorner;\n const pixel = xBuf.get(batch, xDepth, xRow, xCol, channel);\n if (pixel >= maxValue) {\n maxValue = pixel;\n maxPosition = wDepth * effectiveFilterHeight *\n effectiveFilterWidth +\n wRow * effectiveFilterHeight + wCol;\n }\n }\n }\n }\n maxPositions.set(maxPosition, batch, yDepth, yRow, yCol, channel);\n }\n }\n }\n }\n }\n return maxPositions.toTensor();\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n assertNotComplex([x, y], 'maxPool3dBackprop');\n const maxPositions = this.maxPool3dPositions(x, convInfo);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tf.buffer(x.shape, 'float32');\n const maxPosBuf = this.bufferSync(maxPositions);\n const dyBuf = this.bufferSync(dy);\n for (let batch = 0; batch < convInfo.batchSize; ++batch) {\n for (let channel = 0; channel < convInfo.inChannels; ++channel) {\n for (let dxDepth = 0; dxDepth < convInfo.inDepth; ++dxDepth) {\n for (let dxRow = 0; dxRow < convInfo.inHeight; ++dxRow) {\n for (let dxCol = 0; dxCol < convInfo.inWidth; ++dxCol) {\n // Shader code begins\n const dyDepthCorner = dxDepth - padFront;\n const dyRowCorner = dxRow - padTop;\n const dyColCorner = dxCol - padLeft;\n let dotProd = 0;\n for (let wDepth = 0; wDepth < effectiveFilterDepth; wDepth += dilationDepth) {\n const dyDepth = (dyDepthCorner + wDepth) / strideDepth;\n if (dyDepth < 0 || dyDepth >= convInfo.outDepth ||\n Math.floor(dyDepth) !== dyDepth) {\n continue;\n }\n for (let wRow = 0; wRow < effectiveFilterHeight; wRow += dilationHeight) {\n const dyRow = (dyRowCorner + wRow) / strideHeight;\n if (dyRow < 0 || dyRow >= convInfo.outHeight ||\n Math.floor(dyRow) !== dyRow) {\n continue;\n }\n for (let wCol = 0; wCol < effectiveFilterWidth; wCol += dilationWidth) {\n const dyCol = (dyColCorner + wCol) / strideWidth;\n if (dyCol < 0 || dyCol >= convInfo.outWidth ||\n Math.floor(dyCol) !== dyCol) {\n continue;\n }\n const maxPos = effectiveFilterDepth *\n effectiveFilterHeight * effectiveFilterWidth -\n 1 -\n maxPosBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n const curPos = wDepth * effectiveFilterHeight * effectiveFilterWidth +\n wRow * effectiveFilterWidth + wCol;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(batch, dyDepth, dyRow, dyCol, channel);\n dotProd += pixel * mask;\n }\n }\n }\n dx.set(dotProd, batch, dxDepth, dxRow, dxCol, channel);\n }\n }\n }\n }\n }\n return dx.toTensor();\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeBilinear');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(util.sizeFromShape([batch, newHeight, newWidth, numChannels]));\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n let outputIdx = 0;\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n for (let b = 0; b < batch; b++) {\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceRowFloor = Math.floor(sourceFracRow);\n const rowFrac = sourceFracRow - sourceRowFloor;\n const sourceRowCeil = Math.min(oldHeight - 1, Math.ceil(sourceFracRow));\n const topRowOffset = b * x.strides[0] + sourceRowFloor * x.strides[1];\n const botRowOffset = b * x.strides[0] + sourceRowCeil * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceColFloor = Math.floor(sourceFracCol);\n const colFrac = sourceFracCol - sourceColFloor;\n const sourceColCeil = Math.min(oldWidth - 1, Math.ceil(sourceFracCol));\n const topLeftOffest = topRowOffset + sourceColFloor * x.strides[2];\n const botLeftOffset = botRowOffset + sourceColFloor * x.strides[2];\n const topRightOffset = topRowOffset + sourceColCeil * x.strides[2];\n const botRightOffest = botRowOffset + sourceColCeil * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const topLeft = xValues[topLeftOffest + d];\n const bottomLeft = xValues[botLeftOffset + d];\n const topRight = xValues[topRightOffset + d];\n const bottomRight = xValues[botRightOffest + d];\n const top = topLeft + (topRight - topLeft) * colFrac;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * colFrac;\n const newValue = top + (bottom - top) * rowFrac;\n result[outputIdx++] = newValue;\n }\n }\n }\n }\n return tf.tensor(result, [batch, newHeight, newWidth, numChannels]);\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeBilinearBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass and add the\n // corresponding coefficient from dy to the gradient (with some\n // interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/3039375c86a5bbc9610c7725dcaa95d635f87ba2/tensorflow/core/kernels/resize_bilinear_op.cc#L275\n const dyValues = this.readSync(dy.dataId);\n let offset = 0;\n for (let b = 0; b < batch; b++) {\n const bOffset = b * x.strides[0];\n for (let r = 0; r < yHeight; r++) {\n const dxR = r * heightScale;\n const topDxRIndex = Math.floor(dxR);\n const bottomDxRIndex = Math.min(Math.ceil(dxR), xHeight - 1);\n const topDxROffset = bOffset + topDxRIndex * x.strides[1];\n const bottomDxROffset = bOffset + bottomDxRIndex * x.strides[1];\n const dxRLerp = dxR - topDxRIndex;\n const inverseDxRLerp = 1.0 - dxRLerp;\n for (let c = 0; c < yWidth; c++) {\n const dxC = c * widthScale;\n const leftDxCIndex = Math.floor(dxC);\n const rightDxCIndex = Math.min(Math.ceil(dxC), xWidth - 1);\n const dxCLerp = dxC - leftDxCIndex;\n const inverseDxCLerp = 1.0 - dxCLerp;\n const topLeftRCOffset = topDxROffset + leftDxCIndex * x.strides[2];\n const topRightRCOffset = topDxROffset + rightDxCIndex * x.strides[2];\n const bottomLeftRCOffset = bottomDxROffset + leftDxCIndex * x.strides[2];\n const bottomRightRCOffset = bottomDxROffset + rightDxCIndex * x.strides[2];\n const inverseDxRLerpTimesInverseDxCLerp = inverseDxRLerp * inverseDxCLerp;\n const inverseDxRLerpTimesDxCLerp = inverseDxRLerp * dxCLerp;\n const dxRLerpTimesInverseDxCLerp = dxRLerp * inverseDxCLerp;\n const dxRLerpTimesDxCLerp = dxRLerp * dxCLerp;\n for (let d = 0; d < depth; d++) {\n const dyVal = dyValues[offset++];\n output[topLeftRCOffset + d] +=\n dyVal * inverseDxRLerpTimesInverseDxCLerp;\n output[topRightRCOffset + d] += dyVal * inverseDxRLerpTimesDxCLerp;\n output[bottomLeftRCOffset + d] +=\n dyVal * dxRLerpTimesInverseDxCLerp;\n output[bottomRightRCOffset + d] += dyVal * dxRLerpTimesDxCLerp;\n }\n }\n }\n }\n return tf.tensor4d(output, [batch, xWidth, xHeight, depth], x.dtype);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n assertNotComplex(x, 'resizeNearestNeighbor');\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = this.readSync(x.dataId);\n const output = new Float32Array(batch * newHeight * newWidth * numChannels);\n const effectiveInputSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutputSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n const effectiveRowSizeRatio = effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio = effectiveInputSize[1] / effectiveOutputSize[1];\n let outputOffset = 0;\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceNearestRow = Math.min(oldHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n const rowOffset = batchOffset + sourceNearestRow * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceNearestCol = Math.min(oldWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n const colOffset = rowOffset + sourceNearestCol * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const newVal = xValues[colOffset + d];\n output[outputOffset++] = newVal;\n }\n }\n }\n }\n return tf.tensor(output, [batch, newHeight, newWidth, numChannels], x.dtype);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n assertNotComplex([dy, x], 'resizeNearestNeighborBackprop');\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n const dyValues = this.readSync(dy.dataId);\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n // Loop over the output space.\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < xHeight; r++) {\n const rowOffset = batchOffset + r * x.strides[1];\n // Compute bounds for where in dy we will look\n const startRLerp = Math.floor(r * invHeightScale);\n const startDyR = Math.floor(startRLerp - (winHeight / 2));\n for (let c = 0; c < xWidth; c++) {\n const colOffset = rowOffset + c * x.strides[2];\n // Compute bounds for where in dy we will look\n const startCLerp = Math.floor(c * invWidthScale);\n const startDyC = Math.floor(startCLerp - (winWidth / 2));\n for (let d = 0; d < depth; d++) {\n let accum = 0;\n // loop over dy\n for (let dyRIndex = 0; dyRIndex < winHeight; dyRIndex++) {\n const dyR = dyRIndex + startDyR;\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= yHeight) {\n continue;\n }\n const dyROffset = batchOffset + dyR * dy.strides[1];\n const sourceFracRow = dyR * heightScale;\n const sourceNearestRow = Math.min(xHeight - 1, alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n if (r !== sourceNearestRow) {\n continue;\n }\n for (let dyCIndex = 0; dyCIndex < winWidth; dyCIndex++) {\n const dyC = dyCIndex + startDyC;\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= yWidth) {\n continue;\n }\n const dyCOffset = dyROffset + dyC * dy.strides[2];\n const sourceFracCol = dyC * widthScale;\n const sourceNearestCol = Math.min(xWidth - 1, alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n if (c === sourceNearestCol) {\n accum += dyValues[dyCOffset + d];\n }\n }\n }\n output[colOffset + d] = accum;\n }\n }\n }\n }\n return tf.tensor4d(output, x.shape, x.dtype);\n }\n localResponseNormalization4D(x, depthRadius, bias, alpha, beta) {\n assertNotComplex(x, 'localResponseNormalization4D');\n const channels = x.shape[3];\n const maxD = channels - 1;\n const xValues = this.readSync(x.dataId);\n const size = x.size;\n const result = new Float32Array(size);\n function sumAcrossChannels(offset) {\n const currentChannel = offset % channels;\n let beginSumOffset = offset - currentChannel + Math.max(0, currentChannel - depthRadius);\n const endSumOffset = offset - currentChannel +\n Math.min(currentChannel + depthRadius, maxD);\n let sum = 0.0;\n for (; beginSumOffset <= endSumOffset; beginSumOffset++) {\n const z = xValues[beginSumOffset];\n sum += z * z;\n }\n return sum;\n }\n for (let offset = 0; offset < size; offset++) {\n const sum = sumAcrossChannels(offset);\n const val = xValues[offset] * Math.pow(bias + alpha * sum, -beta);\n result[offset] = val;\n }\n return tf.tensor4d(result, x.shape);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n assertNotComplex(dy, 'LRNGrad');\n const channels = dy.shape[3];\n const dyValues = this.readSync(dy.dataId);\n const inputImageValues = this.readSync(inputImage.dataId);\n const outputImageValues = this.readSync(outputImage.dataId);\n const result = new Float32Array(dy.size);\n const size = dy.size;\n for (let offset = 0; offset < size; offset++) {\n const currentChannel = offset % channels;\n const depthBegin = (offset - currentChannel) + Math.max(0, currentChannel - depthRadius);\n const depthEnd = (offset - currentChannel) +\n Math.min(channels, currentChannel + depthRadius + 1);\n let norm = 0;\n for (let k = depthBegin; k < depthEnd; k++) {\n norm += Math.pow(inputImageValues[k], 2);\n }\n norm = alpha * norm + bias;\n for (let k = depthBegin; k < depthEnd; k++) {\n let dyi = -2 * alpha * beta * inputImageValues[k] *\n outputImageValues[offset] / norm;\n if (offset === k) {\n dyi += Math.pow(norm, -beta);\n }\n dyi *= dyValues[offset];\n result[k] += dyi;\n }\n }\n return tf.tensor4d(result, dy.shape);\n }\n multinomial(logits, normalized, numSamples, seed) {\n assertNotComplex(logits, 'multinomial');\n const probabilities = normalized ? logits : tf.softmax(logits);\n const batchSize = probabilities.shape[0];\n const numEvents = probabilities.shape[1];\n const res = tf.zeros([batchSize, numSamples], 'int32');\n const resVals = this.readSync(res.dataId);\n const probVals = this.readSync(probabilities.dataId);\n for (let b = 0; b < batchSize; ++b) {\n const offset = b * numEvents;\n // The cdf won't include the last event. It will be implicit if no other\n // event happened.\n const cdf = new Float32Array(numEvents - 1);\n cdf[0] = probVals[offset];\n for (let event = 1; event < cdf.length; ++event) {\n cdf[event] = cdf[event - 1] + probVals[offset + event];\n }\n const random = seedrandom.alea(seed.toString());\n const outOffset = b * numSamples;\n for (let sampleId = 0; sampleId < numSamples; ++sampleId) {\n const r = random();\n // Assume last event happened by default.\n resVals[outOffset + sampleId] = cdf.length;\n for (let event = 0; event < cdf.length; event++) {\n if (r < cdf[event]) {\n resVals[outOffset + sampleId] = event;\n break;\n }\n }\n }\n }\n return res;\n }\n oneHot(indices, depth, onValue, offValue) {\n assertNotComplex(indices, 'oneHot');\n const res = new Float32Array(indices.size * depth);\n res.fill(offValue);\n const indicesVal = this.readSync(indices.dataId);\n for (let event = 0; event < indices.size; ++event) {\n if (indicesVal[event] >= 0 && indicesVal[event] < depth) {\n res[event * depth + indicesVal[event]] = onValue;\n }\n }\n return tf.tensor2d(res, [indices.size, depth], 'int32');\n }\n nonMaxSuppression(boxes, scores, maxOutputSize, iouThreshold, scoreThreshold) {\n assertNotComplex(boxes, 'nonMaxSuppression');\n const boxesVals = this.readSync(boxes.dataId);\n const scoresVals = this.readSync(scores.dataId);\n return nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(dataFormat === 'NHWC', () => `Only NHWC dataFormat supported on CPU for depthToSpace. Got ${dataFormat}`);\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = x.shape[1];\n const inputWidth = x.shape[2];\n const inputDepth = x.shape[3];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const xValues = this.readSync(x.dataId);\n const result = new Float32Array(batchSize * outputHeight * outputWidth * outputDepth);\n let outputIdx = 0;\n for (let b = 0; b < batchSize; ++b) {\n for (let h = 0; h < outputHeight; ++h) {\n const inH = Math.floor(h / blockSize);\n const offsetH = (h % blockSize);\n for (let w = 0; w < outputWidth; ++w) {\n const inW = Math.floor(w / blockSize);\n const offsetW = (w % blockSize);\n const offsetD = (offsetH * blockSize + offsetW) * outputDepth;\n for (let d = 0; d < outputDepth; ++d) {\n const inD = d + offsetD;\n const inputIdx = inD + inputDepth * (inW + inputWidth * (inH + inputHeight * b));\n result[outputIdx++] = xValues[inputIdx];\n }\n }\n }\n }\n return tf.tensor4d(result, [batchSize, outputHeight, outputWidth, outputDepth]);\n }\n broadcastedBinaryOp(a, b, dtype, op) {\n const newShape = backend_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const result = tf.buffer(newShape, dtype);\n const aVals = this.readSync(a.dataId);\n const bVals = this.readSync(b.dataId);\n const aBroadcastDims = backend_util.getBroadcastDims(a.shape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(b.shape, newShape);\n const resVals = result.values;\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resVals.length; ++i) {\n resVals[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n const aBuf = this.bufferSync(a);\n const bBuf = this.bufferSync(b);\n for (let i = 0; i < resVals.length; ++i) {\n const loc = result.indexToLoc(i);\n const aLoc = loc.slice(-a.rank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = aBuf.locToIndex(aLoc);\n const bLoc = loc.slice(-b.rank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = bBuf.locToIndex(bLoc);\n resVals[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return result.toTensor();\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n dispose() { }\n floatPrecision() {\n return 32;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return super.epsilon();\n }\n cropAndResize(images, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const [batch, imageHeight, imageWidth, numChannels] = images.shape;\n const numBoxes = boxes.shape[0];\n const [cropHeight, cropWidth] = cropSize;\n const output = tf.buffer([numBoxes, cropHeight, cropWidth, numChannels], 'float32');\n const boxVals = this.readSync(boxes.dataId);\n const boxIndVals = this.readSync(boxIndex.dataId);\n const imageVals = this.readSync(images.dataId);\n const inStride = images.strides; // to calculate flat indexes into image\n const outStride = output.strides; // to calculate flat indexes into output\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op.cc\n for (let b = 0; b < numBoxes; b++) {\n const startInd = b * 4;\n const y1 = boxVals[startInd];\n const x1 = boxVals[startInd + 1];\n const y2 = boxVals[startInd + 2];\n const x2 = boxVals[startInd + 3];\n const bInd = boxIndVals[b];\n if (bInd >= batch) {\n continue;\n }\n const heightScale = (cropHeight > 1) ?\n (y2 - y1) * (imageHeight - 1) / (cropHeight - 1) :\n 0;\n const widthScale = (cropWidth > 1) ? (x2 - x1) * (imageWidth - 1) / (cropWidth - 1) : 0;\n for (let y = 0; y < cropHeight; y++) {\n const yInd = (cropHeight > 1) ?\n y1 * (imageHeight - 1) + y * (heightScale) :\n 0.5 * (y1 + y2) * (imageHeight - 1);\n if (yInd < 0 || yInd > imageHeight - 1) {\n for (let x = 0; x < cropWidth; x++) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n }\n continue;\n }\n if (method === 'bilinear') {\n const topInd = Math.floor(yInd);\n const bottomInd = Math.ceil(yInd);\n const yLerp = yInd - topInd;\n for (let x = 0; x < cropWidth; x++) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const leftInd = Math.floor(xInd);\n const rightInd = Math.ceil(xInd);\n const xLerp = xInd - leftInd;\n for (let c = 0; c < numChannels; c++) {\n let ind = c + leftInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topRight = imageVals[ind];\n ind = c + leftInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomLeft = imageVals[ind];\n ind = c + rightInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomRight = imageVals[ind];\n const top = topLeft + (topRight - topLeft) * xLerp;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * xLerp;\n ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = top + ((bottom - top) * yLerp);\n }\n }\n }\n else { // method == \"nearest\"\n for (let x = 0; x < cropWidth; ++x) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n const closestX = Math.round(xInd);\n const closestY = Math.round(yInd);\n for (let c = 0; c < numChannels; c++) {\n const inInd = c + closestX * inStride[2] +\n closestY * inStride[1] + bInd * inStride[0];\n const outInd = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[outInd] = imageVals[inInd];\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n return this.scatter(sparseIndices, sparseValues, outputShape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n if (numSlices === 0) {\n return tf.tensor([], resultShape, x.dtype);\n }\n const buffer = new TensorBuffer([numSlices, sliceSize], x.dtype);\n const indicesData = this.readSync(indices.dataId);\n const xData = this.readSync(x.dataId);\n for (let i = 0; i < numSlices; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n flattenIndex += dim * strides[j];\n index.push(dim);\n }\n if (flattenIndex < 0 || flattenIndex >= x.size / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${x.shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n buffer.values[i * sliceSize + k] = xData[flattenIndex * sliceSize + k];\n }\n }\n return buffer.toTensor().reshape(resultShape);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const defaultValue = tf.scalar(0);\n const sumDupeIndices = true;\n return this.scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported for string tensors');\n }\n else {\n // TODO(lina128): Use fill kernel directly once this kernel is\n // modularized.\n return tf.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n const values = util.getArrayFromDType(x.dtype, util.sizeFromShape(x.shape));\n return this.makeOutput(values, x.shape, x.dtype);\n }\n linspace(start, stop, num) {\n return backend_util.linspaceImpl(start, stop, num);\n }\n scatter(indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank, strides, defaultValue, sumDupeIndices) {\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const indicesData = this.readSync(indices.dataId);\n const updatesData = this.readSync(updates.dataId);\n if (outputSize === 0) {\n return tf.tensor([], shape, updates.dtype);\n }\n const buffer = new TensorBuffer(flattenShape, updates.dtype);\n buffer.values.fill(this.readSync(defaultValue.dataId)[0]);\n for (let i = 0; i < numUpdates; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n index.push(dim);\n flattenIndex += dim * strides[j];\n }\n if (flattenIndex < 0 || flattenIndex >= outputSize / sliceSize) {\n throw new Error(`Invalid indices: ${index} does not index into ${shape}`);\n }\n for (let k = 0; k < sliceSize; k++) {\n if (sumDupeIndices) {\n buffer.values[flattenIndex * sliceSize + k] +=\n updatesData[i * sliceSize + k];\n }\n else {\n buffer.values[flattenIndex * sliceSize + k] = updates.rank === 0 ?\n updatesData[0] :\n updatesData[i * sliceSize + k];\n }\n }\n }\n return buffer.toTensor().reshape(shape);\n }\n}\n//# sourceMappingURL=backend_cpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Abs, util } from '@tensorflow/tfjs-core';\nexport function simpleAbsImpl(vals) {\n const resultValues = new Float32Array(vals.length);\n for (let i = 0; i < vals.length; ++i) {\n resultValues[i] = Math.abs(vals[i]);\n }\n return resultValues;\n}\nexport const abs = (args) => {\n const { x } = args.inputs;\n const cpuBackend = args.backend;\n let resultValues = new Float32Array(util.sizeFromShape(x.shape));\n if (x.dtype !== 'complex64') {\n const values = cpuBackend.data.get(x.dataId).values;\n resultValues = simpleAbsImpl(values);\n }\n else {\n const complexVals = cpuBackend.data.get(x.dataId);\n const real = complexVals.complexTensorInfos.real;\n const imag = complexVals.complexTensorInfos.imag;\n const realVals = cpuBackend.data.get(real.dataId).values;\n const imagVals = cpuBackend.data.get(imag.dataId).values;\n for (let i = 0; i < realVals.length; i++) {\n const real = realVals[i];\n const imag = imagVals[i];\n resultValues[i] = Math.hypot(real, imag);\n }\n }\n return cpuBackend.makeOutput(resultValues, x.shape, 'float32');\n};\nexport const absConfig = {\n kernelName: Abs,\n backendName: 'cpu',\n kernelFunc: abs,\n};\n//# sourceMappingURL=Abs.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for binary ops. Supports broadcast.\n */\nexport function createSimpleBinaryKernelImpl(op) {\n return (aShape, bShape, aVals, bVals, dtype) => {\n const newShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultRank = newShape.length;\n const resultStrides = util.computeStrides(newShape);\n const resultSize = util.sizeFromShape(newShape);\n const result = util.getTypedArrayFromDType(dtype, resultSize);\n const aRank = aShape.length;\n const bRank = bShape.length;\n const aStrides = util.computeStrides(aShape);\n const bStrides = util.computeStrides(bShape);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, newShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, newShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < result.length; ++i) {\n result[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n }\n else {\n for (let i = 0; i < result.length; ++i) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n result[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return [result, newShape];\n };\n}\n//# sourceMappingURL=binary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const realVals = backend.data.get(real.dataId).values;\n const imagVals = backend.data.get(imag.dataId).values;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.data.get(complexInfo.dataId);\n // The complex tensor owns the underlying real and imag tensorInfos, only the\n // complex tensor tracks refCount, when complexData is disposed the\n // underlying tensorData will be disposed.\n complex.complexTensorInfos = {\n real: backend.makeTensorInfo(real.shape, 'float32', realVals),\n imag: backend.makeTensorInfo(imag.shape, 'float32', imagVals)\n };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'cpu',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'cpu',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const real = backend.data.get(input.dataId).complexTensorInfos.real;\n const realVal = backend.data.get(real.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the real value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(real.shape, real.dtype, realVal);\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'cpu',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { real } from './Real';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(lina128): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n const values = backend.data.get(x.dataId).values;\n const resultValues = Int32Array.from(values);\n return backend.makeTensorInfo(x.shape, 'int32', resultValues);\n }\n if (dtype === 'bool') {\n // This is essentially the result of notEqual(x, 0). We avoid using\n // kernel notEqual to avoid circular dependency, i.e. binary_utils ->\n // cast -> notEqual -> binary_utils.\n const xVals = backend.data.get(x.dataId).values;\n const zero = util.toTypedArray([0], x.dtype);\n const [resultData, resultShape] = createSimpleBinaryKernelImpl((a, b) => (a !== b) ? 1 : 0)(x.shape, [], xVals, zero, 'bool');\n return backend.makeTensorInfo(resultShape, 'bool', resultData);\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'cpu',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { cast } from '../kernels/Cast';\nimport { complex } from '../kernels/Complex';\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param name Kernel name.\n * @param binaryKernelImpl A `SimpleBinaryKernelImpl` for the kernel.\n * @param binaryKernelComplexImpl Optional. If exists, represents a\n * `ComplexBinaryKernelImpl` for the kernel, will be used when input dtype\n * is `complex64`.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc(name, simpleImpl, complexImpl, dtype) {\n if (complexImpl == null) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n assertNotComplex([a, b], name);\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n };\n }\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const cpuBackend = backend;\n if (a.dtype === 'complex64' || b.dtype === 'complex64') {\n const $aComplex = cast({ inputs: { x: a }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $aComplexVals = cpuBackend.data.get($aComplex.dataId);\n const aReal = $aComplexVals.complexTensorInfos.real;\n const aImag = $aComplexVals.complexTensorInfos.imag;\n const aRealVals = cpuBackend.data.get(aReal.dataId).values;\n const aImagVals = cpuBackend.data.get(aImag.dataId).values;\n const $bComplex = cast({ inputs: { x: b }, backend: cpuBackend, attrs: { dtype: 'complex64' } });\n const $bComplexVals = cpuBackend.data.get($bComplex.dataId);\n const bReal = $bComplexVals.complexTensorInfos.real;\n const bImag = $bComplexVals.complexTensorInfos.imag;\n const bRealVals = cpuBackend.data.get(bReal.dataId).values;\n const bImagVals = cpuBackend.data.get(bImag.dataId).values;\n const [resultRealData, resultImagData, resultShape] = complexImpl(a.shape, b.shape, aRealVals, aImagVals, bRealVals, bImagVals);\n const resultReal = cpuBackend.makeTensorInfo(resultShape, 'float32', resultRealData);\n const resultImag = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImagData);\n const result = complex({ inputs: { real: resultReal, imag: resultImag }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($aComplex);\n cpuBackend.disposeIntermediateTensorInfo($bComplex);\n cpuBackend.disposeIntermediateTensorInfo(resultReal);\n cpuBackend.disposeIntermediateTensorInfo(resultImag);\n return result;\n }\n else {\n const aVals = cpuBackend.data.get(a.dataId).values;\n const bVals = cpuBackend.data.get(b.dataId).values;\n const $dtype = dtype || a.dtype;\n const [resultData, resultShape] = simpleImpl(a.shape, b.shape, aVals, bVals, $dtype);\n return cpuBackend.makeTensorInfo(resultShape, $dtype, resultData);\n }\n };\n}\n/**\n * Template that creates the complex type implementation for binary ops.\n * Supports broadcast.\n */\nexport function createComplexBinaryKernelImpl(op) {\n return (aShape, bShape, aRealVals, aImagVals, bRealVals, bImagVals) => {\n const resultShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const resultSize = util.sizeFromShape(resultShape);\n const resultRank = resultShape.length;\n const resultStrides = util.computeStrides(resultShape);\n const resultRealVals = util.getTypedArrayFromDType('float32', resultSize);\n const resultImagVals = util.getTypedArrayFromDType('float32', resultSize);\n const aBroadcastDims = backend_util.getBroadcastDims(aShape, resultShape);\n const bBroadcastDims = backend_util.getBroadcastDims(bShape, resultShape);\n const aVals = backend_util.mergeRealAndImagArrays(aRealVals, aImagVals);\n const bVals = backend_util.mergeRealAndImagArrays(bRealVals, bImagVals);\n const aRank = aShape.length;\n const aStrides = util.computeStrides(aShape);\n const bRank = bShape.length;\n const bStrides = util.computeStrides(bShape);\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resultRealVals.length; i++) {\n const aIdx = i % aVals.length;\n const bIdx = i % bVals.length;\n const result = op(aVals[aIdx * 2], aVals[aIdx * 2 + 1], bVals[bIdx * 2], bVals[bIdx * 2 + 1]);\n resultRealVals[i] = result.real;\n resultImagVals[i] = result.imag;\n }\n }\n else {\n for (let i = 0; i < resultRealVals.length; i++) {\n const loc = util.indexToLoc(i, resultRank, resultStrides);\n const aLoc = loc.slice(-aRank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = util.locToIndex(aLoc, aRank, aStrides);\n const bLoc = loc.slice(-bRank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = util.locToIndex(bLoc, bRank, bStrides);\n const opResult = op(aVals[aIndex * 2], aVals[aIndex * 2 + 1], bVals[bIndex * 2], bVals[bIndex * 2 + 1]);\n resultRealVals[i] = opResult.real;\n resultImagVals[i] = opResult.imag;\n }\n }\n return [resultRealVals, resultImagVals, resultShape];\n };\n}\n//# sourceMappingURL=kernel_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const addImpl = createSimpleBinaryKernelImpl(((a, b) => a + b));\nexport const addComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal + bReal, imag: aImag + bImag };\n}));\nexport const add = binaryKernelFunc(Add, addImpl, addComplexImpl);\nexport const addConfig = {\n kernelName: Add,\n backendName: 'cpu',\n kernelFunc: add\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Template that creates implementation for unary op.\n */\nexport function createSimpleUnaryImpl(op) {\n return (values, dtype, attrs) => {\n const newValues = util.getTypedArrayFromDType(dtype, values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return newValues;\n };\n}\n//# sourceMappingURL=unary_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param name Kernel name.\n * @param op A `SimpleUnaryOperation` for the kernel.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFunc(name, op, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const $dtype = dtype || x.dtype;\n const newValues = util.getArrayFromDType($dtype, xSize);\n for (let i = 0; i < xSize; ++i) {\n newValues[i] = op(values[i], attrs);\n }\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n/**\n * Template that creates a `KernelFunc` for unary ops from the given\n * `SimpleUnaryImpl`..\n * @param name Kernel name.\n * @param unaryImpl A `SimpleUnaryImpl` that implements the op.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the input. This is mainly used in certain\n * kernels that return bool type, such as isFinite, isInf, etc.\n */\nexport function unaryKernelFuncFromImpl(name, unaryImpl, dtype) {\n return ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n assertNotComplex(x, name);\n if (x.dtype === 'string' || dtype === 'string') {\n throw new Error('unaryKernelFunc does not support string input/output');\n }\n const cpuBackend = backend;\n const values = cpuBackend.data.get(x.dataId).values;\n const $dtype = dtype || x.dtype;\n const newValues = unaryImpl(values, $dtype, attrs);\n return cpuBackend.makeTensorInfo(x.shape, $dtype, newValues);\n };\n}\n//# sourceMappingURL=unary_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Ceil } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const ceilImpl = createSimpleUnaryImpl((xi) => Math.ceil(xi));\nexport const ceil = unaryKernelFuncFromImpl(Ceil, ceilImpl);\nexport const ceilConfig = {\n kernelName: Ceil,\n backendName: 'cpu',\n kernelFunc: ceil,\n};\n//# sourceMappingURL=Ceil.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Exp } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expImpl = createSimpleUnaryImpl((xi) => Math.exp(xi));\nexport const exp = unaryKernelFuncFromImpl(Exp, expImpl);\nexport const expConfig = {\n kernelName: Exp,\n backendName: 'cpu',\n kernelFunc: exp,\n};\n//# sourceMappingURL=Exp.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Expm1 } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const expm1Impl = createSimpleUnaryImpl((xi) => Math.expm1(xi));\nexport const expm1 = unaryKernelFuncFromImpl(Expm1, expm1Impl);\nexport const expm1Config = {\n kernelName: Expm1,\n backendName: 'cpu',\n kernelFunc: expm1,\n};\n//# sourceMappingURL=Expm1.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Floor } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const floorImpl = createSimpleUnaryImpl((xi) => Math.floor(xi));\nexport const floor = unaryKernelFuncFromImpl(Floor, floorImpl);\nexport const floorConfig = {\n kernelName: Floor,\n backendName: 'cpu',\n kernelFunc: floor,\n};\n//# sourceMappingURL=Floor.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const logImpl = createSimpleUnaryImpl((xi) => Math.log(xi));\nexport const log = unaryKernelFuncFromImpl(Log, logImpl);\nexport const logConfig = {\n kernelName: Log,\n backendName: 'cpu',\n kernelFunc: log,\n};\n//# sourceMappingURL=Log.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function maxImpl(aVals, reduceSize, outShape, dtype) {\n const vals = util.getTypedArrayFromDType(dtype, util.sizeFromShape(outShape));\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n }\n }\n vals[i] = max;\n }\n return vals;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Multiply } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const multiplyImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue * bValue));\nexport const multiplyComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return {\n real: aReal * bReal - aImag * bImag,\n imag: aReal * bImag + aImag * bReal\n };\n}));\nexport const multiply = binaryKernelFunc(Multiply, multiplyImpl, multiplyComplexImpl);\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'cpu',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const notEqualImpl = createSimpleBinaryKernelImpl(((a, b) => (a !== b) ? 1 : 0));\nexport const notEqual = binaryKernelFunc(NotEqual, notEqualImpl, null /* complexOp */, 'bool');\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'cpu',\n kernelFunc: notEqual\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Rsqrt } from '@tensorflow/tfjs-core';\nimport { createSimpleUnaryImpl } from '../utils/unary_impl';\nimport { unaryKernelFuncFromImpl } from '../utils/unary_utils';\nexport const rsqrtImpl = createSimpleUnaryImpl((xi) => 1 / Math.sqrt(xi));\nexport const rsqrt = unaryKernelFuncFromImpl(Rsqrt, rsqrtImpl);\nexport const rsqrtConfig = {\n kernelName: Rsqrt,\n backendName: 'cpu',\n kernelFunc: rsqrt,\n};\n//# sourceMappingURL=Rsqrt.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Slice, slice_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function sliceImpl(vals, begin, size, shape, dtype) {\n const isContinous = slice_util.isSliceContinous(shape, begin, size);\n const length = util.sizeFromShape(size);\n const xStrides = util.computeStrides(shape);\n if (isContinous) {\n const flatOffset = slice_util.computeFlatOffset(begin, xStrides);\n return vals.subarray(flatOffset, flatOffset + length);\n }\n const outVals = util.getTypedArrayFromDType(dtype, length);\n for (let i = 0; i < length; ++i) {\n const rank = size.length;\n const strides = util.computeStrides(size);\n const loc = util.indexToLoc(i, rank, strides);\n const xLoc = loc.map((idx, j) => idx + begin[j]);\n const xIndex = util.locToIndex(xLoc, shape.length, xStrides);\n outVals[i] = vals[xIndex];\n }\n return outVals;\n}\nexport function slice(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { begin, size } = attrs;\n assertNotComplex(x, 'slice');\n const [$begin, $size] = slice_util.parseSliceParams(x, begin, size);\n slice_util.assertParamsValid(x, $begin, $size);\n const vals = backend.data.get(x.dataId).values;\n const outVals = sliceImpl(vals, $begin, $size, x.shape, x.dtype);\n return backend.makeTensorInfo($size, x.dtype, outVals);\n}\nexport const sliceConfig = {\n kernelName: Slice,\n backendName: 'cpu',\n kernelFunc: slice\n};\n//# sourceMappingURL=Slice.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const squaredDifferenceImpl = createSimpleBinaryKernelImpl(((a, b) => {\n const diff = a - b;\n return diff * diff;\n}));\nexport const squaredDifference = binaryKernelFunc(SquaredDifference, squaredDifferenceImpl);\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'cpu',\n kernelFunc: squaredDifference\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc, createComplexBinaryKernelImpl } from '../utils/kernel_utils';\nexport const subImpl = createSimpleBinaryKernelImpl(((aValue, bValue) => aValue - bValue));\nexport const subComplexImpl = createComplexBinaryKernelImpl(((aReal, aImag, bReal, bImag) => {\n return { real: aReal - bReal, imag: aImag - bImag };\n}));\nexport const sub = binaryKernelFunc(Sub, subImpl, subComplexImpl);\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'cpu',\n kernelFunc: sub\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport function transposeImpl(xVals, xShape, dtype, perm, newShape) {\n const xRank = xShape.length;\n const xSize = util.sizeFromShape(xShape);\n const xStrides = util.computeStrides(xShape);\n const newStrides = util.computeStrides(newShape);\n const result = util.getTypedArrayFromDType(dtype, util.sizeFromShape(newShape));\n for (let i = 0; i < xSize; ++i) {\n const loc = util.indexToLoc(i, xRank, xStrides);\n // Permute location.\n const newLoc = new Array(loc.length);\n for (let i = 0; i < newLoc.length; i++) {\n newLoc[i] = loc[perm[i]];\n }\n const newIndex = util.locToIndex(newLoc, xRank, newStrides);\n result[newIndex] = xVals[i];\n }\n return result;\n}\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { TensorBuffer, util } from '@tensorflow/tfjs-core';\nexport function uniqueImpl(values, axis, shape, dtype) {\n // Normalize and validate axis.\n const $axis = util.parseAxisParam(axis, shape)[0];\n // Calculate the new shape that is suitable for extracting data along the\n // given axis.\n //\n // The rank is 3.\n // The size of the 1st dimension is the size of all the axes < the given axis.\n // The size of the 2nd dimension is the same as the size of the given axis.\n // The size of the 3rd dimension is the size of all the axes > the given axis.\n //\n // For example, for a 4D tensor with shape=[2, 3, 5, 4] and axis=2, the\n // newShape would be: [2*3, 5, 4].\n //\n // Note that this is not the final output shape. This will be the shape for an\n // intermediate TensorBuffer (see inputBuffer below) to allow us to extract\n // values along the given axis. To demonstrate how it works, consider the\n // following example:\n //\n // Input: a 3D tensor, with shape [1, 2, 3]\n // [\n // [\n // [1,2,3],\n // [4,5,6]\n // ]\n // ]\n // Axis: 2 (the last axis).\n // Along axis 2, we expect to extract 3 tensors: [1,4], [2,5], [3,6].\n //\n // For this example, newShape would be: [2, 3, 1], where 2 is calculated from\n // 1*2. The re-shaped data would look like:\n //\n // [\n // [\n // [1], [2], [3]\n // ],\n // [\n // [4], [5], [6]\n // ]\n // ]\n //\n // Then, we can construct a 3-level nested loop by the following dimension\n // order to extract the values along the axis (dimension1):\n // i: dimension1 // 0,1,2 (newShape[1])\n // m: dimension0 // 0,1 (newShape[0])\n // n: dimension2 // 0 (newShape[2])\n //\n // m, i, n\n // ---------\n // Iteration 0: data at [0, 0, 0] => \"1\"\n // Iteration 1: data at [1, 0, 0] => \"4\"\n // We got [1,4].\n // Iteration 2: data at [0, 1, 0] => \"2\"\n // Iteration 3: data at [1, 1, 0] => \"5\"\n // We got [2,5].\n // Iteration 4: data at [0, 2, 0] => \"3\"\n // Iteration 5: data at [1, 2, 0] => \"6\"\n // We got [3,6].\n const newShape = [1, shape[0], 1];\n for (let i = 0; i < $axis; i++) {\n newShape[0] *= shape[i];\n }\n newShape[1] = shape[$axis];\n for (let i = $axis + 1; i < shape.length; i++) {\n newShape[2] *= shape[i];\n }\n // A map from unique elements (their string representations) to their values\n // in \"indices\" (below).\n const uniqueElements = {};\n // The indices of each unique element in the original tensor along the given\n // axis. It is 1D and has the same size as the given axis.\n const indices = new Int32Array(shape[$axis]);\n // Create a buffer so we can easily extract value at a given location.\n const inputBuffer = new TensorBuffer(newShape, dtype, values);\n // The indices along the given axis that have unique elements. This is a\n // de-duped version of \"indices\" above.\n const uniqueIndices = [];\n const is1DTensor = newShape[0] === 1 && newShape[2] === 1;\n for (let i = 0; i < shape[$axis]; i++) {\n // Extract values along the axis.\n let element;\n if (is1DTensor) {\n // Fast path for 1D tensor input.\n element = values[i].toString();\n }\n else {\n const axisValues = [];\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n axisValues.push(inputBuffer.get(m, i, n));\n }\n }\n element = axisValues.join(',');\n }\n // Dedup and update various indices.\n if (uniqueElements[element] !== undefined) {\n indices[i] = uniqueElements[element];\n }\n else {\n const uniqueIndex = Object.keys(uniqueElements).length;\n uniqueElements[element] = uniqueIndex;\n indices[i] = uniqueIndex;\n uniqueIndices.push(i);\n }\n }\n // Now we know where each of the unique elements are located along the axis\n // (uniqueIndices). Extract them from input buffer and store them in the\n // output buffer.\n const outputTmpShape = newShape.slice();\n outputTmpShape[1] = Object.keys(uniqueElements).length;\n const outputBuffer = new TensorBuffer(outputTmpShape, dtype);\n uniqueIndices.forEach((uniqueElementIndex, i) => {\n for (let m = 0; m < newShape[0]; m++) {\n for (let n = 0; n < newShape[2]; n++) {\n outputBuffer.set(inputBuffer.get(m, uniqueElementIndex, n), m, i, n);\n }\n }\n });\n // The output shape can be calculated from the input shape with the size of\n // the given axis replaced by the number of unique elements along that axis.\n const outputShape = shape.slice();\n outputShape[$axis] = outputTmpShape[1];\n return {\n outputValues: outputBuffer.values,\n outputShape,\n indices,\n };\n}\n//# sourceMappingURL=Unique_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Shared functionality among backends.\nexport { simpleAbsImpl } from './kernels/Abs';\nexport { addImpl } from './kernels/Add';\nexport { ceilImpl } from './kernels/Ceil';\nexport { expImpl } from './kernels/Exp';\nexport { expm1Impl } from './kernels/Expm1';\nexport { floorImpl } from './kernels/Floor';\nexport { logImpl } from './kernels/Log';\nexport { maxImpl } from './kernels/Max_impl';\nexport { multiplyImpl } from './kernels/Multiply';\nexport { notEqualImpl } from './kernels/NotEqual';\nexport { rsqrtImpl } from './kernels/Rsqrt';\nexport { sliceImpl } from './kernels/Slice';\nexport { squaredDifferenceImpl } from './kernels/SquaredDifference';\nexport { subImpl } from './kernels/Sub';\nexport { transposeImpl } from './kernels/Transpose_impl';\nexport { uniqueImpl } from './kernels/Unique_impl';\n//# sourceMappingURL=shared.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/*\n * base.ts contains all the exports from tfjs-backend-cpu\n * without auto-kernel registration\n */\nimport { registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendCPU } from './backend_cpu';\nimport * as shared from './shared';\nexport { MathBackendCPU } from './backend_cpu';\nexport { version as version_cpu } from './version';\nexport { shared };\n// Side effects for default initialization of MathBackendCPU\nregisterBackend('cpu', () => new MathBackendCPU(), 1 /* priority */);\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Elu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const elu = unaryKernelFunc(Elu, (xi) => xi >= 0 ? xi : (Math.exp(xi) - 1));\nexport const eluConfig = {\n kernelName: Elu,\n backendName: 'cpu',\n kernelFunc: elu,\n};\n//# sourceMappingURL=Elu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Prelu } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nconst preluImpl = createSimpleBinaryKernelImpl((xValue, aValue) => xValue < 0 ? aValue * xValue : xValue);\nexport function prelu(args) {\n const { inputs, backend } = args;\n const { x, alpha } = inputs;\n assertNotComplex([x, alpha], 'prelu');\n const aVals = backend.data.get(x.dataId).values;\n const bVals = backend.data.get(alpha.dataId).values;\n const [resultData, resultShape] = preluImpl(x.shape, alpha.shape, aVals, bVals, x.dtype);\n return backend.makeTensorInfo(resultShape, x.dtype, resultData);\n}\nexport const preluConfig = {\n kernelName: Prelu,\n backendName: 'cpu',\n kernelFunc: prelu,\n};\n//# sourceMappingURL=Prelu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu = unaryKernelFunc(Relu, (xi) => Math.max(0, xi));\nexport const reluConfig = {\n kernelName: Relu,\n backendName: 'cpu',\n kernelFunc: relu,\n};\n//# sourceMappingURL=Relu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Relu6 } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const relu6 = unaryKernelFunc(Relu6, (xi) => Math.min(Math.max(0, xi), 6));\nexport const relu6Config = {\n kernelName: Relu6,\n backendName: 'cpu',\n kernelFunc: relu6,\n};\n//# sourceMappingURL=Relu6.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { elu } from '../kernels/Elu';\nimport { identity } from '../kernels/Identity';\nimport { prelu } from '../kernels/Prelu';\nimport { relu } from '../kernels/Relu';\nimport { relu6 } from '../kernels/Relu6';\nexport function applyActivation(backend, x, activation, preluActivationWeights) {\n if (activation === 'linear') {\n return identity({ inputs: { x }, backend });\n }\n else if (activation === 'relu') {\n return relu({ inputs: { x }, backend });\n }\n else if (activation === 'elu') {\n return elu({ inputs: { x }, backend });\n }\n else if (activation === 'relu6') {\n return relu6({ inputs: { x }, backend });\n }\n else if (activation === 'prelu') {\n return prelu({ inputs: { x, alpha: preluActivationWeights }, backend });\n }\n throw new Error(`Activation ${activation} has not been implemented for the CPU backend.`);\n}\n//# sourceMappingURL=fused_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n backend.incRef(x.dataId);\n const xData = backend.data.get(x.dataId);\n if (xData.complexTensorInfos != null) {\n const real = xData.complexTensorInfos.real;\n const imag = xData.complexTensorInfos.imag;\n real.shape = $shape;\n imag.shape = $shape;\n }\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'cpu',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { BatchMatMul, buffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { reshape } from './Reshape';\nexport function batchMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b } = inputs;\n const { transposeA, transposeB } = attrs;\n assertNotComplex([a, b], 'matMul');\n const aRank = a.shape.length;\n const bRank = b.shape.length;\n const innerShapeA = transposeA ? a.shape[aRank - 2] : a.shape[aRank - 1];\n const innerShapeB = transposeB ? b.shape[bRank - 1] : b.shape[bRank - 2];\n const outerShapeA = transposeA ? a.shape[aRank - 1] : a.shape[aRank - 2];\n const outerShapeB = transposeB ? b.shape[bRank - 2] : b.shape[bRank - 1];\n const outerDimsA = a.shape.slice(0, -2);\n const outerDimsB = b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n const batchDimsCompatible = batchDimA === batchDimB || batchDimA === 1 || batchDimB === 1;\n util.assert(aRank >= 2 && bRank >= 2 && batchDimsCompatible, () => `Error in matMul: the input batch dimensions must either be the ` +\n `same or at least one input batch dimension must be 1. Got input ` +\n `batch dimensions of (${outerDimsA}) and (${outerDimsB}).`);\n const outShapeOuterDims = batchDimA > batchDimB ? a.shape.slice(0, -2) : b.shape.slice(0, -2);\n const outShape = outShapeOuterDims.concat([outerShapeA, outerShapeB]);\n util.assert(innerShapeA === innerShapeB, () => `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${a.shape} and ` +\n `${b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n const a3dShape = transposeA ? [batchDimA, innerShapeA, outerShapeA] :\n [batchDimA, outerShapeA, innerShapeA];\n const b3dShape = transposeB ? [batchDimB, outerShapeB, innerShapeB] :\n [batchDimB, innerShapeB, outerShapeB];\n // The rest of the implementation is designed to operate on rank-3 tensors\n const a3d = reshape({ inputs: { x: a }, backend, attrs: { shape: a3dShape } });\n const b3d = reshape({ inputs: { x: b }, backend, attrs: { shape: b3dShape } });\n const sharedDim = transposeA ? a3d.shape[1] : a3d.shape[2];\n const leftDim = transposeA ? a3d.shape[2] : a3d.shape[1];\n const rightDim = transposeB ? b3d.shape[1] : b3d.shape[2];\n const batchDim = Math.max(batchDimA, batchDimB);\n const a3dValues = backend.data.get(a3d.dataId).values;\n const b3dValues = backend.data.get(b3d.dataId).values;\n const a3dStrides = util.computeStrides(a3d.shape);\n const b3dStrides = util.computeStrides(b3d.shape);\n const [aBatch, aOuterStep, aInnerStep] = transposeA ?\n [a3dStrides[0], 1, a3dStrides[1]] :\n [a3dStrides[0], a3dStrides[1], 1];\n const [bInnerStep, bOuterStep, bBatch] = transposeB ?\n [1, b3dStrides[1], b3dStrides[0]] :\n [b3dStrides[1], 1, b3dStrides[0]];\n const size = leftDim * rightDim;\n const result = buffer([batchDim, leftDim, rightDim], a3d.dtype);\n const resVals = result.values;\n const blockSize = backend.blockSize;\n for (let bi = 0; bi < batchDim; bi++) {\n for (let i0 = 0; i0 < leftDim; i0 += blockSize) {\n for (let j0 = 0; j0 < rightDim; j0 += blockSize) {\n for (let k0 = 0; k0 < sharedDim; k0 += blockSize) {\n // for when blockSize doesn't evenly divide the input\n const iBlock = Math.min(i0 + blockSize, leftDim);\n const jBlock = Math.min(j0 + blockSize, rightDim);\n const kBlock = Math.min(k0 + blockSize, sharedDim);\n for (let i = i0; i < iBlock; i++) {\n for (let j = j0; j < jBlock; j++) {\n let sum = 0.0;\n for (let k = k0; k < kBlock; k++) {\n const batchOffsetA = Math.min(bi, batchDimA - 1) * aBatch;\n const batchOffsetB = Math.min(bi, batchDimB - 1) * bBatch;\n const aVal = a3dValues[batchOffsetA + i * aOuterStep + k * aInnerStep];\n const bVal = b3dValues[k * bInnerStep + j * bOuterStep + batchOffsetB];\n sum += aVal * bVal;\n }\n resVals[bi * size + (i * rightDim + j)] += sum;\n }\n }\n }\n }\n }\n }\n backend.disposeIntermediateTensorInfo(a3d);\n backend.disposeIntermediateTensorInfo(b3d);\n // set correct shape on output.\n return backend.makeTensorInfo(outShape, result.dtype, result.values);\n}\nexport const batchMatMulConfig = {\n kernelName: BatchMatMul,\n backendName: 'cpu',\n kernelFunc: batchMatMul,\n};\n//# sourceMappingURL=BatchMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { _FusedMatMul } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { batchMatMul } from './BatchMatMul';\nexport function _fusedMatMul(args) {\n const { inputs, backend, attrs } = args;\n const { a, b, bias, preluActivationWeights } = inputs;\n const { transposeA, transposeB, activation } = attrs;\n let current;\n let addRes;\n let activationRes;\n const intermediates = [];\n const matMulRes = batchMatMul({ inputs: { a, b }, attrs: { transposeA, transposeB }, backend });\n current = matMulRes;\n if (bias) {\n addRes = add({ inputs: { a: current, b: bias }, backend });\n intermediates.push(current);\n current = addRes;\n }\n if (activation) {\n activationRes =\n applyActivation(backend, current, activation, preluActivationWeights);\n intermediates.push(current);\n current = activationRes;\n }\n for (const i of intermediates) {\n backend.disposeIntermediateTensorInfo(i);\n }\n return current;\n}\nexport const _fusedMatMulConfig = {\n kernelName: _FusedMatMul,\n backendName: 'cpu',\n kernelFunc: _fusedMatMul,\n};\n//# sourceMappingURL=_FusedMatMul.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acos = unaryKernelFunc(Acos, (xi) => Math.acos(xi));\nexport const acosConfig = {\n kernelName: Acos,\n backendName: 'cpu',\n kernelFunc: acos,\n};\n//# sourceMappingURL=Acos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Acosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const acosh = unaryKernelFunc(Acosh, (xi) => Math.acosh(xi));\nexport const acoshConfig = {\n kernelName: Acosh,\n backendName: 'cpu',\n kernelFunc: acosh,\n};\n//# sourceMappingURL=Acosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asin = unaryKernelFunc(Asin, (xi) => Math.asin(xi));\nexport const asinConfig = {\n kernelName: Asin,\n backendName: 'cpu',\n kernelFunc: asin,\n};\n//# sourceMappingURL=Asin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Asinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const asinh = unaryKernelFunc(Asinh, (xi) => Math.asinh(xi));\nexport const asinhConfig = {\n kernelName: Asinh,\n backendName: 'cpu',\n kernelFunc: asinh,\n};\n//# sourceMappingURL=Asinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atan = unaryKernelFunc(Atan, (xi) => Math.atan(xi));\nexport const atanConfig = {\n kernelName: Atan,\n backendName: 'cpu',\n kernelFunc: atan,\n};\n//# sourceMappingURL=Atan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const atanh = unaryKernelFunc(Atanh, (xi) => Math.atanh(xi));\nexport const atanhConfig = {\n kernelName: Atanh,\n backendName: 'cpu',\n kernelFunc: atanh,\n};\n//# sourceMappingURL=Atanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { buffer } from '@tensorflow/tfjs-core';\nexport function pool(xValues, xShape, dtype, strides, convInfo, poolType) {\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const initialValue = (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n const output = buffer(convInfo.outShape, dtype);\n const outputVals = output.values;\n const outputBatchStrides = convInfo.outShape[1] * convInfo.outShape[2] * convInfo.outShape[3];\n const outputRowStrides = convInfo.outShape[2] * convInfo.outShape[3];\n const outputColStrides = convInfo.outShape[3];\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const outputBatchOffset = b * outputBatchStrides;\n const inputBatchOffset = b * strides[0];\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n const outputRowOffset = outputBatchOffset + yR * outputRowStrides;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n const xCMin = Math.max(0, xCCorner);\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const xROffset = inputBatchOffset + xR * strides[1];\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const xCOffset = xROffset + xC * strides[2];\n const pixel = xValues[xCOffset + d];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n }\n else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputRowOffset + yC * outputColStrides + d;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n return output;\n}\nexport function maxPoolPositions(xValues, xShape, dtype, convInfo, flattenPositions = false, includeBatchInIndex = false) {\n const maxPositions = buffer(convInfo.outShape, 'int32');\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const xBuf = buffer(xShape, dtype, xValues);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n let xRMin = xRCorner;\n while (xRMin < 0) {\n xRMin += dilationHeight;\n }\n // const xRMin = Math.max(0, xRCorner);\n const xRMax = Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n let xCMin = xCCorner;\n while (xCMin < 0) {\n xCMin += dilationWidth;\n }\n const xCMax = Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const wR = xR - xRCorner;\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const wC = xC - xCCorner;\n const pixel = xBuf.get(b, xR, xC, d);\n if (pixel > maxValue) {\n maxValue = pixel;\n if (flattenPositions) {\n maxPosition = includeBatchInIndex ?\n ((b * convInfo.inHeight + xR) * convInfo.inWidth + xC) *\n convInfo.inChannels +\n d :\n (xR * convInfo.inWidth + xC) * convInfo.inChannels + d;\n }\n else {\n maxPosition = wR * effectiveFilterWidth + wC;\n }\n }\n }\n }\n maxPositions.set(maxPosition, b, yR, yC, d);\n }\n }\n }\n }\n return maxPositions;\n}\n//# sourceMappingURL=pool_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'avg');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'cpu',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util, buffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel;\n }\n }\n dx.set(dotProd * avgMultiplier, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'cpu',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function batchNorm(args) {\n const { inputs, backend, attrs } = args;\n const { x, scale, offset, mean, variance } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n assertNotComplex([x, mean, variance, scale, offset], 'batchNorm');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const xVals = backend.data.get(x.dataId).values;\n const mVals = backend.data.get(mean.dataId).values;\n const varVals = backend.data.get(variance.dataId).values;\n const sVals = scale ? backend.data.get(scale.dataId).values :\n new Float32Array([1]);\n const offVals = offset ?\n backend.data.get(offset.dataId).values :\n new Float32Array([0]);\n const outVals = new Float32Array(xVals.length);\n const offValsLength = offVals.length;\n const sValsLength = sVals.length;\n const varValsLength = varVals.length;\n const mValsLength = mVals.length;\n let offi = 0;\n let mi = 0;\n let si = 0;\n let vi = 0;\n for (let i = 0; i < xVals.length; ++i) {\n outVals[i] = offVals[offi++] +\n (xVals[i] - mVals[mi++]) * sVals[si++] /\n Math.sqrt(varVals[vi++] + varianceEpsilon);\n if (offi >= offValsLength) {\n offi = 0;\n }\n if (mi >= mValsLength) {\n mi = 0;\n }\n if (si >= sValsLength) {\n si = 0;\n }\n if (vi >= varValsLength) {\n vi = 0;\n }\n }\n return backend.makeTensorInfo(x.shape, x.dtype, outVals);\n}\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'cpu',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ClipByValue } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const clip = unaryKernelFunc(ClipByValue, (xi, attrs) => {\n const clipAttrs = attrs;\n if (xi > clipAttrs.clipValueMax) {\n return clipAttrs.clipValueMax;\n }\n return xi < clipAttrs.clipValueMin ? clipAttrs.clipValueMin : xi;\n});\nexport const clipConfig = {\n kernelName: ClipByValue,\n backendName: 'cpu',\n kernelFunc: clip,\n};\n//# sourceMappingURL=Clip.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const imag = backend.data.get(input.dataId).complexTensorInfos.imag;\n const imagVal = backend.data.get(imag.dataId).values;\n // When complex tensor is disposed, its underlying parts will be disposed too.\n // Make new tensor out of the imag value of the complex. This makes sure the\n // value is still accessible even if complex tensor is disposed.\n return backend.makeTensorInfo(imag.shape, imag.dtype, imagVal);\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'cpu',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n let outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n if ($inputs[0].dtype === 'complex64') {\n const reals = $inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = $inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concat({ inputs: reals, backend, attrs: { axis: $axis } });\n const imagConcated = concat({ inputs: imags, backend, attrs: { axis: $axis } });\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const inputs2D = $inputs.map(t => {\n const innerSize = util.sizeFromShape(t.shape.slice($axis));\n const shape = [-1, innerSize];\n return reshape({ inputs: { x: t }, backend, attrs: { shape } });\n });\n // Concats 2d tensors along axis=1.\n outShape =\n backend_util.computeOutShape(inputs2D.map(t => t.shape), 1 /* axis */);\n const outVals = util.getTypedArrayFromDType($inputs[0].dtype, util.sizeFromShape(outShape));\n if (inputs2D[0].shape[0] === 1) {\n // Use built-in TypedArray.set() method for speed.\n let offset = 0;\n inputs2D.forEach(t => {\n const val = backend.data.get(t.dataId).values;\n const size = util.sizeFromShape(t.shape);\n outVals.set(val, offset);\n offset += size;\n });\n }\n else {\n let colOffset = 0;\n inputs2D.forEach(t => {\n const tVals = backend.data.get(t.dataId).values;\n let tIdx = 0;\n for (let row = 0; row < t.shape[0]; ++row) {\n const resIdx = row * outShape[1] + colOffset;\n for (let col = 0; col < t.shape[1]; ++col) {\n outVals[resIdx + col] = tVals[tIdx++];\n }\n }\n colOffset += t.shape[1];\n });\n }\n const finalOutShape = backend_util.computeOutShape($inputs.map(t => t.shape), $axis);\n const outInfo = backend.makeTensorInfo(finalOutShape, inputs[0].dtype, outVals);\n inputs2D.forEach(t => backend.disposeIntermediateTensorInfo(t));\n return outInfo;\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'cpu',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'conv2d');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, dilations, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const padLeft = convInfo.padInfo.left;\n const padTop = convInfo.padInfo.top;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const xBatchStride = xStrides[0];\n const xRowStride = isChannelsLast ? xStrides[1] : xStrides[2];\n const xColStride = isChannelsLast ? xStrides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : xStrides[1];\n const yBatchStride = y.strides[0];\n const yRowStride = isChannelsLast ? y.strides[1] : y.strides[2];\n const yColStride = isChannelsLast ? y.strides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : y.strides[1];\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xBatchStride;\n const yOffset1 = b * yBatchStride;\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * yRowStride;\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xRowStride;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * yColStride;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * xColStride;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1 * xChannelStride];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset3 + d2 * yChannelStride] +=\n xVal * wVals[wOffset3 + d2];\n }\n wOffset3 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, yVals);\n}\nexport const conv2DConfig = {\n kernelName: Conv2D,\n backendName: 'cpu',\n kernelFunc: conv2D\n};\n//# sourceMappingURL=Conv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, dataFormat, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv2dBackpropFilter');\n const $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad, dimRoundingMode, false /* depthwise */, $dataFormat);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const xVals = backend.data.get(x.dataId).values;\n const dyVals = backend.data.get(dy.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n if (isChannelsLast) {\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n else {\n dotProd += xBuf.get(b, d1, xR, xC) *\n dyBuf.get(b, d2, yR, yC);\n }\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, d2);\n }\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const conv2DBackpropFilterConfig = {\n kernelName: Conv2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropFilter\n};\n//# sourceMappingURL=Conv2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv2DBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv2DBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { inputShape, strides, pad, dataFormat, dimRoundingMode } = attrs;\n assertNotComplex([dy, filter], 'conv2dBackpropInput');\n const filterStrides = util.computeStrides(filter.shape);\n const dyStrides = util.computeStrides(dy.shape);\n let $dataFormat = backend_util.convertConv2DDataFormat(dataFormat);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad, dimRoundingMode, false, $dataFormat);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const dyValues = backend.data.get(dy.dataId).values;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n $dataFormat = convInfo.dataFormat;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const isChannelsLast = $dataFormat === 'channelsLast';\n const xBatchStride = dx.strides[0];\n const xRowStride = isChannelsLast ? dx.strides[1] : dx.strides[2];\n const xColStride = isChannelsLast ? dx.strides[2] : 1;\n const xChannelStride = isChannelsLast ? 1 : dx.strides[1];\n const yBatchStride = dyStrides[0];\n const yRowStride = isChannelsLast ? dyStrides[1] : dyStrides[2];\n const yColStride = isChannelsLast ? dyStrides[2] : 1;\n const yChannelStride = isChannelsLast ? 1 : dyStrides[1];\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = yBatchStride * b + yRowStride * yR + yColStride * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + yChannelStride * d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n const dxOffset = xBatchStride * b + xRowStride * xR +\n xColStride * xC + xChannelStride * d1;\n dxValues[dxOffset] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv2DBackpropInputConfig = {\n kernelName: Conv2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: conv2DBackpropInput\n};\n//# sourceMappingURL=Conv2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3D, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n assertNotComplex([x, filter], 'conv3d');\n const convInfo = backend_util.computeConv3DInfo(x.shape, filter.shape, strides, dilations, pad);\n const { filterDepth, filterHeight, filterWidth, dilationDepth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padFront = padInfo.front;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yF = 0; yF < convInfo.outDepth; ++yF) {\n const yOffset2 = yOffset1 + yF * y.strides[1];\n const xFCorner = yF * convInfo.strideDepth - padFront;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const xF = xFCorner + wF * dilationDepth;\n if (xF < 0 || xF >= convInfo.inDepth) {\n continue;\n }\n const wOffset1 = wF * filterStrides[0];\n const xOffset2 = xOffset1 + xF * xStrides[1];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset3 = yOffset2 + yR * y.strides[2];\n const xRCorner = yR * convInfo.strideHeight - padTop;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset2 = wOffset1 + wR * filterStrides[1];\n const xOffset3 = xOffset2 + xR * xStrides[2];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset4 = yOffset3 + yC * convInfo.outChannels;\n const xCCorner = yC * convInfo.strideWidth - padLeft;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset3 = wOffset2 + wC * filterStrides[2];\n const xOffset4 = xOffset3 + xC * convInfo.inChannels;\n let wOffset4 = wOffset3;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset4 + d1];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset4 + d2] += xVal * wVals[wOffset4 + d2];\n }\n wOffset4 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const conv3DConfig = {\n kernelName: Conv3D,\n backendName: 'cpu',\n kernelFunc: conv3D\n};\n//# sourceMappingURL=Conv3D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropFilterV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropFilterV2(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, pad, filterShape } = attrs;\n assertNotComplex([x, dy], 'conv3dBackpropFilterV2');\n const xStrides = util.computeStrides(x.shape);\n const dyStrides = util.computeStrides(dy.shape);\n const convInfo = backend_util.computeConv3DInfo(x.shape, filterShape, strides, 1 /* dilations */, pad);\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dw = new TensorBuffer(convInfo.filterShape, 'float32');\n const dwValues = dw.values;\n const [dwS0, dwS1, dwS2, dwS3] = dw.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const xValues = backend.data.get(x.dataId).values;\n const [xS0, xS1, xS2, xS3] = xStrides;\n const frontPad = convInfo.padInfo.front;\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n for (let wF = 0; wF < filterDepth; ++wF) {\n const yFMin = Math.max(0, Math.ceil((frontPad - wF) / strideDepth));\n const yFMax = Math.min(convInfo.outDepth, (convInfo.inDepth + frontPad - wF) / strideDepth);\n const wOffset1 = wF * dwS0;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n const wOffset2 = wR * dwS1 + wOffset1;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n const wOffset3 = wC * dwS2 + wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const wOffset4 = d1 * dwS3 + wOffset3;\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xS0;\n const yOffset1 = b * dyS0;\n for (let yF = yFMin; yF < yFMax; ++yF) {\n const xF = wF + yF * strideDepth - frontPad;\n const xOffset2 = xF * xS1 + xOffset1;\n const yOffset2 = yF * dyS1 + yOffset1;\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n const xOffset3 = xR * xS2 + xOffset2;\n const yOffset3 = yR * dyS2 + yOffset2;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n const xOffset4 = xC * xS3 + xOffset3;\n const yOffset4 = yC * dyS3 + yOffset3;\n dotProd += xValues[xOffset4 + d1] * dyValues[yOffset4 + d2];\n }\n }\n }\n }\n dwValues[wOffset4 + d2] = dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dw.shape, dw.dtype, dw.values);\n}\nexport const conv3DBackpropFilterV2Config = {\n kernelName: Conv3DBackpropFilterV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropFilterV2\n};\n//# sourceMappingURL=Conv3DBackpropFilterV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Conv3DBackpropInputV2, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function conv3DBackpropInputV2(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { pad, strides, inputShape } = attrs;\n assertNotComplex([dy], 'conv3dBackpropInputV2');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv3DInfo(inputShape, filter.shape, strides, 1 /* dilations */, pad);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2, dxS3] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2, dyS3] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2, fltS3] = filterStrides;\n const { batchSize, filterDepth, filterHeight, filterWidth, inChannels, inDepth, inHeight, inWidth, outChannels, outDepth, outHeight, outWidth, strideDepth, strideHeight, strideWidth } = convInfo;\n const frontPad = filterDepth - 1 - convInfo.padInfo.front;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n // Frames of depth\n for (let xF = 0; xF < inDepth; ++xF) {\n const xFCorner = xF - frontPad;\n const xFMin = Math.max(0, Math.ceil(xFCorner / strideDepth));\n const yFMax = Math.min(outDepth, (filterDepth + xFCorner) / strideDepth);\n // Rows as per standard 2d matrix notation\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n // Columns as per standard 2d matrix notation\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yF = xFMin; yF < yFMax; ++yF) {\n const wF = yF * strideDepth - xFCorner;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yF + dyS2 * yR + dyS3 * yC;\n const fltOffset = fltS0 * (filterDepth - 1 - wF) +\n fltS1 * (filterHeight - 1 - wR) +\n fltS2 * (filterWidth - 1 - wC) + fltS3 * d1;\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xF + dxS2 * xR + dxS3 * xC + d1] =\n dotProd;\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const conv3DBackpropInputV2Config = {\n kernelName: Conv3DBackpropInputV2,\n backendName: 'cpu',\n kernelFunc: conv3DBackpropInputV2\n};\n//# sourceMappingURL=Conv3DBackpropInputV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cos = unaryKernelFunc(Cos, (xi) => Math.cos(xi));\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'cpu',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cosh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const cosh = unaryKernelFunc(Cosh, (xi) => Math.cosh(xi));\nexport const coshConfig = {\n kernelName: Cosh,\n backendName: 'cpu',\n kernelFunc: cosh,\n};\n//# sourceMappingURL=Cosh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNative, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNative(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter } = inputs;\n const { strides, pad, dilations, dimRoundingMode } = attrs;\n assertNotComplex([x, filter], 'depthwiseConv2DNative');\n const xStrides = util.computeStrides(x.shape);\n const filterStrides = util.computeStrides(filter.shape);\n let $dilations = dilations;\n if ($dilations == null) {\n $dilations = [1, 1];\n }\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, $dilations), () => 'Error in depthwiseConv2d: Either strides or dilations must be ' +\n `1. Got strides ${strides} and dilations '${$dilations}'`);\n const convInfo = backend_util.computeConv2DInfo(x.shape, filter.shape, strides, $dilations, pad, dimRoundingMode, true /* depthwise */);\n const { filterHeight, filterWidth, dilationHeight, dilationWidth, padInfo } = convInfo;\n const padLeft = padInfo.left;\n const padTop = padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const y = new TensorBuffer(convInfo.outShape, x.dtype);\n const xVals = backend.data.get(x.dataId).values;\n const wVals = backend.data.get(filter.dataId).values;\n const yVals = y.values;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * xStrides[0];\n const yOffset1 = b * y.strides[0];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * y.strides[1];\n const xRCorner = yR * convInfo.strideHeight - padLeft;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filterStrides[0];\n const xOffset2 = xOffset1 + xR * xStrides[1];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * y.strides[2];\n const xCCorner = yC * convInfo.strideWidth - padTop;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filterStrides[1];\n const xOffset3 = xOffset2 + xC * convInfo.inChannels;\n let yOffset4 = yOffset3;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1];\n for (let q = 0; q < chMul; ++q) {\n yVals[yOffset4 + q] += xVal * wVals[wOffset3 + q];\n }\n yOffset4 += chMul;\n wOffset3 += chMul;\n }\n }\n }\n }\n }\n }\n return backend.makeTensorInfo(y.shape, y.dtype, y.values);\n}\nexport const depthwiseConv2dNativeConfig = {\n kernelName: DepthwiseConv2dNative,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNative\n};\n//# sourceMappingURL=DepthwiseConv2dNative.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropFilter, TensorBuffer } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropFilter(args) {\n const { inputs, backend, attrs } = args;\n const { x, dy } = inputs;\n const { strides, dilations, pad, dimRoundingMode, filterShape } = attrs;\n assertNotComplex([x, dy], 'depthwiseConv2dNativeBackpropFilter');\n const convInfo = backend_util.computeConv2DInfo(x.shape, filterShape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const { strideHeight, strideWidth, filterHeight, filterWidth } = convInfo;\n const dW = new TensorBuffer(convInfo.filterShape, 'float32');\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const xVals = backend.data.get(x.dataId).values;\n const xBuf = new TensorBuffer(x.shape, x.dtype, xVals);\n const dyVals = backend.data.get(dy.dataId).values;\n const dyBuf = new TensorBuffer(dy.shape, dy.dtype, dyVals);\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n const d1 = Math.trunc(d2 / chMul);\n const dm = d2 % chMul;\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n dotProd += xBuf.get(b, xR, xC, d1) *\n dyBuf.get(b, yR, yC, d2);\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, dm);\n }\n }\n }\n return backend.makeTensorInfo(dW.shape, dW.dtype, dW.values);\n}\nexport const depthwiseConv2dNativeBackpropFilterConfig = {\n kernelName: DepthwiseConv2dNativeBackpropFilter,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropFilter\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, DepthwiseConv2dNativeBackpropInput, TensorBuffer, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function depthwiseConv2dNativeBackpropInput(args) {\n const { inputs, backend, attrs } = args;\n const { dy, filter } = inputs;\n const { strides, dilations, pad, dimRoundingMode, inputShape } = attrs;\n assertNotComplex([dy, filter], 'depthwiseConv2DNativeBackpropInput');\n const dyStrides = util.computeStrides(dy.shape);\n const filterStrides = util.computeStrides(filter.shape);\n const convInfo = backend_util.computeConv2DInfo(inputShape, filter.shape, strides, dilations, pad, dimRoundingMode, true /* depthwise */);\n const dx = new TensorBuffer(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2] = dx.strides;\n const dyValues = backend.data.get(dy.dataId).values;\n const [dyS0, dyS1, dyS2] = dyStrides;\n const fltValues = backend.data.get(filter.dataId).values;\n const [fltS0, fltS1, fltS2] = filterStrides;\n const { batchSize, filterHeight, filterWidth, inChannels, inHeight, inWidth, outChannels, outHeight, outWidth, strideHeight, strideWidth } = convInfo;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const chMul = outChannels / inChannels;\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax = Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax = Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yR + dyS2 * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n for (let dm = 0; dm < chMul; ++dm) {\n const d2 = d1 * chMul + dm;\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + dm];\n dotProd += pixel * weight;\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xR + dxS2 * xC + d1] = dotProd;\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const depthwiseConv2dNativeBackpropInputConfig = {\n kernelName: DepthwiseConv2dNativeBackpropInput,\n backendName: 'cpu',\n kernelFunc: depthwiseConv2dNativeBackpropInput\n};\n//# sourceMappingURL=DepthwiseConv2dNativeBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2D, util } from '@tensorflow/tfjs-core';\nexport const dilation2dConfig = {\n kernelName: Dilation2D,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const xVals = cpuBackend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const filterVals = cpuBackend.data.get(filter.dataId).values;\n const filterRank = filter.shape.length;\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n const outSize = util.sizeFromShape(outShape);\n const outRank = outShape.length;\n const outputVals = util.getArrayFromDType(x.dtype, outSize);\n // Upsampling the input by fill in `dilation size - 1` values between each\n // input value.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const xIndex = util.locToIndex([b, hIn, wIn, d], xRank, util.computeStrides(x.shape));\n const filterIndex = util.locToIndex([h, w, d], filterRank, util.computeStrides(filter.shape));\n const val = xVals[xIndex] + filterVals[filterIndex];\n if (val > curVal) {\n curVal = val;\n }\n }\n }\n }\n }\n const outputIndex = util.locToIndex([b, hOut, wOut, d], outRank, util.computeStrides(outShape));\n outputVals[outputIndex] = curVal;\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(outputVals, x.dtype), outShape, x.dtype);\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropFilter, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropFilterConfig = {\n kernelName: Dilation2DBackpropFilter,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropFilter}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed filter gradients has the same dimensions as the filter:\n // [filterHeight, filterWidth, depth]\n const gradients = util.makeZerosNestedTypedArray(filter.shape, filter.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hMax = 0;\n let wMax = 0;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hMax = h;\n wMax = w;\n }\n }\n }\n }\n }\n gradients[hMax][wMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), filter.shape, filter.dtype);\n return { dataId, shape: filter.shape, dtype: filter.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropFilter.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Dilation2DBackpropInput, util } from '@tensorflow/tfjs-core';\nexport const dilation2dBackpropInputConfig = {\n kernelName: Dilation2DBackpropInput,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { x, filter, dy } = inputs;\n const { strides, pad, dilations } = attrs;\n const cpuBackend = backend;\n const $x = util.toNestedArray(x.shape, cpuBackend.data.get(x.dataId).values);\n const $filter = util.toNestedArray(filter.shape, cpuBackend.data.get(filter.dataId).values);\n const { batchSize, inHeight, inWidth, inChannels, outHeight, outWidth, padInfo, strideHeight, strideWidth, filterHeight, filterWidth, dilationHeight, dilationWidth, outShape } = backend_util.computeDilation2DInfo(x.shape, filter.shape, strides, pad, 'NHWC' /* dataFormat */, dilations);\n util.assert(dy.rank === outShape.length, () => `Error in ${Dilation2DBackpropInput}, dy ` +\n `must have the same rank as output ${outShape.length}, but got ` +\n `${dy.rank}`);\n const $dy = util.toNestedArray(outShape, cpuBackend.data.get(dy.dataId).values);\n // The computed gradients has the same dimensions as the input:\n // [batch, inputHeight, inputCols, inChannel]\n const gradients = util.makeZerosNestedTypedArray(x.shape, x.dtype);\n // In the case of multiple argmax branches, we only back-propagate along the\n // last branch, i.e., the one with largest value of `h * filter_cols + w`,\n // similarly to the max-pooling backward routines.\n // This implementation follows the TF c++ implementation:\n // https://github.com/tensorflow/tensorflow/blob/d9a3a849edc198e90172bc58eb293de457f9d986/tensorflow/core/kernels/dilation_ops.cc\n for (let b = 0; b < batchSize; ++b) {\n for (let hOut = 0; hOut < outHeight; ++hOut) {\n const hBeg = hOut * strideHeight - padInfo.top;\n for (let wOut = 0; wOut < outWidth; ++wOut) {\n const wBeg = wOut * strideWidth - padInfo.left;\n for (let d = 0; d < inChannels; ++d) {\n let curVal = Number.MIN_SAFE_INTEGER;\n let hInMax = (hBeg < 0) ? 0 : hBeg;\n let wInMax = (wBeg < 0) ? 0 : wBeg;\n for (let h = 0; h < filterHeight; ++h) {\n const hIn = hBeg + h * dilationHeight;\n if (hIn >= 0 && hIn < inHeight) {\n for (let w = 0; w < filterWidth; ++w) {\n const wIn = wBeg + w * dilationWidth;\n if (wIn >= 0 && wIn < inWidth) {\n const val = $x[b][hIn][wIn][d] + $filter[h][w][d];\n if (val > curVal) {\n curVal = val;\n hInMax = hIn;\n wInMax = wIn;\n }\n }\n }\n }\n }\n gradients[b][hInMax][wInMax][d] += $dy[b][hOut][wOut][d];\n }\n }\n }\n }\n const dataId = cpuBackend.write(util.toTypedArray(gradients, x.dtype), x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Dilation2DBackpropInput.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { createSimpleBinaryKernelImpl } from '../utils/binary_impl';\nimport { binaryKernelFunc } from '../utils/kernel_utils';\nexport const divImpl = createSimpleBinaryKernelImpl((a, b) => a / b);\nexport const div = binaryKernelFunc(Div, divImpl);\nexport const divConfig = {\n kernelName: Div,\n backendName: 'cpu',\n kernelFunc: div\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Erf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst p = backend_util.ERF_P;\nconst a1 = backend_util.ERF_A1;\nconst a2 = backend_util.ERF_A2;\nconst a3 = backend_util.ERF_A3;\nconst a4 = backend_util.ERF_A4;\nconst a5 = backend_util.ERF_A5;\nexport const erf = unaryKernelFunc(Erf, (xi) => {\n const sign = Math.sign(xi);\n const v = Math.abs(xi);\n const t = 1.0 / (1.0 + p * v);\n return sign *\n (1.0 -\n (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t *\n Math.exp(-v * v));\n});\nexport const erfConfig = {\n kernelName: Erf,\n backendName: 'cpu',\n kernelFunc: erf,\n};\n//# sourceMappingURL=Erf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { add } from '../kernels/Add';\nimport { complex } from '../kernels/Complex';\nimport { concat } from '../kernels/Concat';\nimport { divConfig } from '../kernels/Div';\nimport { identity } from '../kernels/Identity';\nimport { imag } from '../kernels/Imag';\nimport { multiply } from '../kernels/Multiply';\nimport { real } from '../kernels/Real';\nimport { slice } from '../kernels/Slice';\nimport { sub } from '../kernels/Sub';\n/**\n * Calculate FFT of inner most elements of batch tensor.\n */\nexport function fftBatch(input, inverse, cpuBackend) {\n const inputShape = input.shape;\n const batch = inputShape[0];\n const innerDim = inputShape[1];\n const inputVals = cpuBackend.data.get(input.dataId);\n const real2D = inputVals.complexTensorInfos.real;\n const imag2D = inputVals.complexTensorInfos.imag;\n // Collects real and imaginary values separately.\n const resultShape = [batch, innerDim];\n const resultSize = util.sizeFromShape(resultShape);\n const resultReal = util.getTypedArrayFromDType('float32', resultSize);\n const resultImag = util.getTypedArrayFromDType('float32', resultSize);\n for (let b = 0; b < batch; b++) {\n // TODO: Support slice ops for complex type.\n const r = slice({\n inputs: { x: real2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const i = slice({\n inputs: { x: imag2D },\n backend: cpuBackend,\n attrs: { begin: [b, 0], size: [1, innerDim] }\n });\n const input = complex({ inputs: { real: r, imag: i }, backend: cpuBackend });\n // Run FFT by batch element.\n const { real, imag } = fftImpl(input, inverse, cpuBackend);\n const res = backend_util.mergeRealAndImagArrays(real, imag);\n for (let d = 0; d < innerDim; d++) {\n const c = backend_util.getComplexWithIndex(res, d);\n resultReal[b * innerDim + d] = c.real;\n resultImag[b * innerDim + d] = c.imag;\n }\n cpuBackend.disposeIntermediateTensorInfo(r);\n cpuBackend.disposeIntermediateTensorInfo(i);\n cpuBackend.disposeIntermediateTensorInfo(input);\n }\n const $realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultReal);\n const $imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', resultImag);\n const result = complex({ inputs: { real: $realInfo, imag: $imagInfo }, backend: cpuBackend });\n cpuBackend.disposeIntermediateTensorInfo($realInfo);\n cpuBackend.disposeIntermediateTensorInfo($imagInfo);\n return result;\n}\nexport function fftImpl(input, inverse, cpuBackend) {\n const inputSize = util.sizeFromShape(input.shape);\n const inputVals = cpuBackend.data.get(input.dataId);\n const realVals = cpuBackend.data.get(inputVals.complexTensorInfos.real.dataId).values;\n const imagVals = cpuBackend.data.get(inputVals.complexTensorInfos.imag.dataId).values;\n if (isExponentOf2(inputSize)) {\n const result = fftRadix2(realVals, imagVals, inputSize, inverse, cpuBackend);\n const resultShape = [input.shape[0], input.shape[1]];\n if (inverse) {\n const realInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.real);\n const imagInfo = cpuBackend.makeTensorInfo(resultShape, 'float32', result.imag);\n const sizeInfo = cpuBackend.makeTensorInfo([], 'float32', util.createScalarValue(inputSize, 'float32'));\n const sizeInfoCopy = identity({ inputs: { x: sizeInfo }, backend: cpuBackend });\n const divRealInfo = divConfig.kernelFunc({ inputs: { a: realInfo, b: sizeInfo }, backend: cpuBackend });\n const divImagInfo = divConfig.kernelFunc({ inputs: { a: imagInfo, b: sizeInfoCopy }, backend: cpuBackend });\n const divRealVals = cpuBackend.data.get(divRealInfo.dataId).values;\n const divImagVals = cpuBackend.data.get(divImagInfo.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(realInfo);\n cpuBackend.disposeIntermediateTensorInfo(imagInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfo);\n cpuBackend.disposeIntermediateTensorInfo(sizeInfoCopy);\n cpuBackend.disposeIntermediateTensorInfo(divRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(divImagInfo);\n return { real: divRealVals, imag: divImagVals };\n }\n return result;\n }\n else {\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const rawOutput = fourierTransformByMatmul(data, inputSize, inverse);\n return backend_util.splitRealAndImagArrays(rawOutput);\n }\n}\nfunction isExponentOf2(size) {\n return (size & size - 1) === 0;\n}\n// FFT using Cooley-Tukey algorithm on radix 2 dimensional input.\nfunction fftRadix2(realVals, imagVals, size, inverse, cpuBackend) {\n if (size === 1) {\n return { real: realVals, imag: imagVals };\n }\n const data = backend_util.mergeRealAndImagArrays(realVals, imagVals);\n const half = size / 2;\n const evenComplex = backend_util.complexWithEvenIndex(data);\n const evenRealVals = evenComplex.real;\n const evenImagVals = evenComplex.imag;\n const evenShape = [evenRealVals.length];\n const evenRealInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenRealVals);\n const evenImagInfo = cpuBackend.makeTensorInfo(evenShape, 'float32', evenImagVals);\n const evenTensorInfo = complex({ inputs: { real: evenRealInfo, imag: evenImagInfo }, backend: cpuBackend });\n const oddComplex = backend_util.complexWithOddIndex(data);\n const oddRealVals = oddComplex.real;\n const oddImagVals = oddComplex.imag;\n const oddShape = [oddRealVals.length];\n const oddRealInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddRealVals);\n const oddImagInfo = cpuBackend.makeTensorInfo(oddShape, 'float32', oddImagVals);\n const oddTensorInfo = complex({ inputs: { real: oddRealInfo, imag: oddImagInfo }, backend: cpuBackend });\n // Recursive call for half part of original input.\n const $evenComplex = fftRadix2(evenRealVals, evenImagVals, half, inverse, cpuBackend);\n const $evenRealVals = $evenComplex.real;\n const $evenImagVals = $evenComplex.imag;\n const $evenShape = [$evenRealVals.length];\n const $evenRealInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenRealVals);\n const $evenImagInfo = cpuBackend.makeTensorInfo($evenShape, 'float32', $evenImagVals);\n const $evenTensorInfo = complex({\n inputs: { real: $evenRealInfo, imag: $evenImagInfo },\n backend: cpuBackend\n });\n const $oddComplex = fftRadix2(oddRealVals, oddImagVals, half, inverse, cpuBackend);\n const $oddRealVals = $oddComplex.real;\n const $oddImagVals = $oddComplex.imag;\n const $oddShape = [$oddRealVals.length];\n const $oddRealInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddRealVals);\n const $oddImagInfo = cpuBackend.makeTensorInfo($oddShape, 'float32', $oddImagVals);\n const $oddTensorInfo = complex({ inputs: { real: $oddRealInfo, imag: $oddImagInfo }, backend: cpuBackend });\n const e = backend_util.exponents(size, inverse);\n const eShape = [e.real.length];\n const eRealInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.real);\n const eImagInfo = cpuBackend.makeTensorInfo(eShape, 'float32', e.imag);\n const complexInfo = complex({ inputs: { real: eRealInfo, imag: eImagInfo }, backend: cpuBackend });\n const exponentInfo = multiply({ inputs: { a: complexInfo, b: $oddTensorInfo }, backend: cpuBackend });\n const addPart = add({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const subPart = sub({\n inputs: { a: $evenTensorInfo, b: exponentInfo },\n backend: cpuBackend\n });\n const addPartReal = real({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartReal = real({ inputs: { input: subPart }, backend: cpuBackend });\n const addPartImag = imag({ inputs: { input: addPart }, backend: cpuBackend });\n const subPartImag = imag({ inputs: { input: subPart }, backend: cpuBackend });\n const $real = concat({\n inputs: [addPartReal, subPartReal],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $imag = concat({\n inputs: [addPartImag, subPartImag],\n backend: cpuBackend,\n attrs: { axis: 0 }\n });\n const $realVals = cpuBackend.data.get($real.dataId).values;\n const $imagVals = cpuBackend.data.get($imag.dataId).values;\n cpuBackend.disposeIntermediateTensorInfo(evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($evenTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddRealInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddImagInfo);\n cpuBackend.disposeIntermediateTensorInfo($oddTensorInfo);\n cpuBackend.disposeIntermediateTensorInfo(eRealInfo);\n cpuBackend.disposeIntermediateTensorInfo(eImagInfo);\n cpuBackend.disposeIntermediateTensorInfo(complexInfo);\n cpuBackend.disposeIntermediateTensorInfo(exponentInfo);\n cpuBackend.disposeIntermediateTensorInfo(addPart);\n cpuBackend.disposeIntermediateTensorInfo(subPart);\n cpuBackend.disposeIntermediateTensorInfo(addPartReal);\n cpuBackend.disposeIntermediateTensorInfo(addPartImag);\n cpuBackend.disposeIntermediateTensorInfo(subPartReal);\n cpuBackend.disposeIntermediateTensorInfo(subPartImag);\n cpuBackend.disposeIntermediateTensorInfo($real);\n cpuBackend.disposeIntermediateTensorInfo($imag);\n return { real: $realVals, imag: $imagVals };\n}\n// Calculate fourier transform by multplying sinusoid matrix.\nfunction fourierTransformByMatmul(data, size, inverse) {\n const ret = new Float32Array(size * 2);\n // TODO: Use matmul instead once it supports complex64 type.\n for (let r = 0; r < size; r++) {\n let real = 0.0;\n let imag = 0.0;\n for (let c = 0; c < size; c++) {\n const e = backend_util.exponent(r * c, size, inverse);\n const term = backend_util.getComplexWithIndex(data, c);\n real += term.real * e.real - term.imag * e.imag;\n imag += term.real * e.imag + term.imag * e.real;\n }\n if (inverse) {\n real /= size;\n imag /= size;\n }\n backend_util.assignToTypedArray(ret, real, imag, r);\n }\n return ret;\n}\n//# sourceMappingURL=fft_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, false, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'cpu',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Fill, util } from '@tensorflow/tfjs-core';\nexport function fill(args) {\n const { backend, attrs } = args;\n const { shape, value, dtype } = attrs;\n const $dtype = dtype || util.inferDtype(value);\n const values = util.getArrayFromDType($dtype, util.sizeFromShape(shape));\n fillValues(values, value, $dtype);\n return backend.makeTensorInfo(shape, $dtype, values);\n}\nexport const fillConfig = {\n kernelName: Fill,\n backendName: 'cpu',\n kernelFunc: fill\n};\nfunction fillValues(values, value, dtype) {\n if (dtype === 'string') {\n values.fill(value);\n }\n else {\n values.fill(value);\n }\n}\n//# sourceMappingURL=Fill.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight, util } from '@tensorflow/tfjs-core';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const coordX = Math.round(imageWidth - x);\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n let outputValue = imageVals[outIdx];\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth) {\n // set the output to the image value at the coordinate position.\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { conv2D } from './Conv2D';\nexport function fusedConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = conv2D({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const resultOld = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n if (activation) {\n const resultOld = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(resultOld);\n }\n return result;\n}\nexport const fusedConv2DConfig = {\n kernelName: FusedConv2D,\n backendName: 'cpu',\n kernelFunc: fusedConv2D\n};\n//# sourceMappingURL=FusedConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FusedDepthwiseConv2D } from '@tensorflow/tfjs-core';\nimport { applyActivation } from '../utils/fused_utils';\nimport { add } from './Add';\nimport { depthwiseConv2dNative } from './DepthwiseConv2dNative';\nexport function fusedDepthwiseConv2D(args) {\n const { inputs, backend, attrs } = args;\n const { x, filter, bias, preluActivationWeights } = inputs;\n const { strides, pad, dataFormat, dilations, dimRoundingMode, activation } = attrs;\n let result = depthwiseConv2dNative({\n inputs: { x, filter },\n backend,\n attrs: { strides, pad, dataFormat, dilations, dimRoundingMode }\n });\n if (bias) {\n const oldResult = result;\n result = add({ inputs: { a: result, b: bias }, backend });\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n if (activation) {\n const oldResult = result;\n result =\n applyActivation(backend, result, activation, preluActivationWeights);\n backend.disposeIntermediateTensorInfo(oldResult);\n }\n return result;\n}\nexport const fusedDepthwiseConv2DConfig = {\n kernelName: FusedDepthwiseConv2D,\n backendName: 'cpu',\n kernelFunc: fusedDepthwiseConv2D\n};\n//# sourceMappingURL=FusedDepthwiseConv2D.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT, util } from '@tensorflow/tfjs-core';\nimport { fftBatch } from '../utils/fft_utils';\nimport { reshape } from './Reshape';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputSize = util.sizeFromShape(input.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({\n inputs: { x: input },\n backend,\n attrs: { shape: [batch, innerDimensionSize] }\n });\n const result = fftBatch(input2D, true, backend);\n const resultReshaped = reshape({ inputs: { x: result }, backend, attrs: { shape: input.shape } });\n backend.disposeIntermediateTensorInfo(input2D);\n backend.disposeIntermediateTensorInfo(result);\n return resultReshaped;\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'cpu',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsFinite } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isFinite = unaryKernelFunc(IsFinite, (xi) => Number.isFinite(xi) ? 1 : 0, 'bool');\nexport const isFiniteConfig = {\n kernelName: IsFinite,\n backendName: 'cpu',\n kernelFunc: isFinite,\n};\n//# sourceMappingURL=IsFinite.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsInf } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isInf = unaryKernelFunc(IsInf, (xi) => Math.abs(xi) === Infinity ? 1 : 0, 'bool');\nexport const isInfConfig = {\n kernelName: IsInf,\n backendName: 'cpu',\n kernelFunc: isInf,\n};\n//# sourceMappingURL=IsInf.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IsNan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const isNaN = unaryKernelFunc(IsNan, (xi) => Number.isNaN(xi) ? 1 : 0, 'bool');\nexport const isNaNConfig = {\n kernelName: IsNan,\n backendName: 'cpu',\n kernelFunc: isNaN,\n};\n//# sourceMappingURL=IsNaN.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Log1p } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const log1p = unaryKernelFunc(Log1p, (xi) => Math.log1p(xi));\nexport const log1pConfig = {\n kernelName: Log1p,\n backendName: 'cpu',\n kernelFunc: log1p,\n};\n//# sourceMappingURL=Log1p.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { LogicalNot } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const logicalNot = unaryKernelFunc(LogicalNot, (xi) => xi ? 0 : 1, 'bool');\nexport const logicalNotConfig = {\n kernelName: LogicalNot,\n backendName: 'cpu',\n kernelFunc: logicalNot,\n};\n//# sourceMappingURL=LogicalNot.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const cpuBackend = backend;\n let xShape = x.shape;\n const xRank = xShape.length;\n const origAxes = util.parseAxisParam(reductionIndices, xShape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n let xVals = cpuBackend.data.get(x.dataId).values;\n if (permutedAxes != null) {\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = xShape[permutedAxes[i]];\n }\n xVals = transposeImpl(xVals, xShape, x.dtype, permutedAxes, newShape);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n xShape = newShape;\n }\n assertNotComplex(x, 'max');\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(xShape, axes);\n const reduceSize = util.sizeFromShape(reduceShape);\n const result = maxImpl(xVals, reduceSize, maxOutShape, x.dtype);\n const dataId = cpuBackend.write(result, maxOutShape, x.dtype);\n let outShape = maxOutShape;\n if (keepDims) {\n // reshape\n const newShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n outShape = newShape;\n }\n return { dataId, shape: outShape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { pool } from '../utils/pool_utils';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n let res;\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n res = identity({ inputs: { x }, backend });\n }\n else {\n const xValues = backend.data.get(x.dataId).values;\n const strides = util.computeStrides(x.shape);\n const buffer = pool(xValues, x.shape, x.dtype, strides, convInfo, 'max');\n res = backend.makeTensorInfo(convInfo.outShape, x.dtype, buffer.values);\n }\n return res;\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'cpu',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, buffer, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolPositions } from '../utils/pool_utils';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const xValues = backend.data.get(x.dataId).values;\n const maxPosBuf = buffer(convInfo.outShape, x.dtype, maxPoolPositions(xValues, x.shape, x.dtype, convInfo).values);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = buffer(x.shape, 'float32');\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const maxPos = effectiveFilterHeight * effectiveFilterWidth - 1 -\n maxPosBuf.get(b, dyR, dyC, d);\n const curPos = wR * effectiveFilterWidth + wC;\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel * mask;\n }\n }\n dx.set(dotProd, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'cpu',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { maxPoolPositions, pool } from '../utils/pool_utils';\nexport function maxPoolWithArgmaxImpl(xValues, xShape, dtype, includeBatchInIndex, convInfo) {\n const strides = util.computeStrides(xShape);\n const maxPools = pool(xValues, xShape, dtype, strides, convInfo, 'max');\n const maxPositions = maxPoolPositions(xValues, xShape, dtype, convInfo, true, includeBatchInIndex);\n return [maxPools.values, maxPositions.values];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const cpuBackend = backend;\n assertNotComplex(x, 'MaxPoolWithArgmax');\n const values = cpuBackend.data.get(x.dataId).values;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, [1, 1], pad);\n const [pooled, indexes] = maxPoolWithArgmaxImpl(values, x.shape, x.dtype, includeBatchInIndex, convInfo);\n const pooledDataId = cpuBackend.write(pooled, convInfo.outShape, x.dtype);\n const indexesDataId = cpuBackend.write(indexes, convInfo.outShape, x.dtype);\n return [\n { dataId: pooledDataId, shape: convInfo.outShape, dtype: x.dtype },\n { dataId: indexesDataId, shape: convInfo.outShape, dtype: 'int32' }\n ];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MirrorPad, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function mirrorPad(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, mode } = attrs;\n assertNotComplex(x, 'mirrorPad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const end = paddings.map((p, i) => p[0] + x.shape[i]);\n const offset = mode === 'reflect' ? 0 : 1;\n const xVals = backend.data.get(x.dataId).values;\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n for (let i = 0; i < resultSize; i++) {\n let coords = util.indexToLoc(i, resultRank, resultStrides);\n for (let i = 0; i < resultRank; i++) {\n if (coords[i] < start[i]) {\n coords[i] = start[i] * 2 - coords[i] - offset;\n }\n else if (coords[i] >= end[i]) {\n coords[i] = (end[i] - 1) * 2 - coords[i] + offset;\n }\n }\n coords = coords.map((c, i) => c - start[i]);\n const inIndex = util.locToIndex(coords, xRank, xStrides);\n resVals[i] = xVals[inIndex];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'cpu',\n kernelFunc: mirrorPad\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionPadded');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nimport { kernel_impls } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nimport { assertNotComplex } from '../cpu_util';\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend, attrs }) => {\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const cpuBackend = backend;\n assertNotComplex(boxes, 'NonMaxSuppressionWithScore');\n const boxesVals = cpuBackend.data.get(boxes.dataId).values;\n const scoresVals = cpuBackend.data.get(scores.dataId).values;\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { PadV2, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport function padV2(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { paddings, constantValue } = attrs;\n assertNotComplex(x, 'pad');\n const outShape = paddings.map((p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const xVals = backend.data.get(x.dataId).values;\n const xSize = util.sizeFromShape(x.shape);\n const xRank = x.shape.length;\n const xStrides = util.computeStrides(x.shape);\n const resultSize = util.sizeFromShape(outShape);\n const resultRank = outShape.length;\n const resultStrides = util.computeStrides(outShape);\n const resVals = util.getTypedArrayFromDType(x.dtype, resultSize);\n if (constantValue !== 0) {\n resVals.fill(constantValue);\n }\n for (let i = 0; i < xSize; i++) {\n const coords = util.indexToLoc(i, xRank, xStrides);\n const outCoords = coords.map((c, i) => c + start[i]);\n const outIndex = util.locToIndex(outCoords, resultRank, resultStrides);\n resVals[outIndex] = xVals[i];\n }\n const outId = backend.write(resVals, outShape, x.dtype);\n return { dataId: outId, shape: outShape, dtype: x.dtype };\n}\nexport const padV2Config = {\n kernelName: PadV2,\n backendName: 'cpu',\n kernelFunc: padV2\n};\n//# sourceMappingURL=PadV2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reciprocal } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const reciprocal = unaryKernelFunc(Reciprocal, (xi) => 1 / xi);\nexport const reciprocalConfig = {\n kernelName: Reciprocal,\n backendName: 'cpu',\n kernelFunc: reciprocal,\n};\n//# sourceMappingURL=Reciprocal.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, RotateWithOffset, util } from '@tensorflow/tfjs-core';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'cpu',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const cpuBackend = backend;\n const output = util.getTypedArrayFromDType(image.dtype, util.sizeFromShape(image.shape));\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const fullOpacityValue = 255;\n const sinFactor = Math.sin(radians);\n const cosFactor = Math.cos(radians);\n const imageVals = cpuBackend.data.get(image.dataId).values;\n for (let batchIdx = 0; batchIdx < batch; batchIdx++) {\n const batchOffset = batchIdx * imageWidth * imageHeight * numChannels;\n for (let row = 0; row < imageHeight; row++) {\n const rowOffset = row * (imageWidth * numChannels);\n for (let col = 0; col < imageWidth; col++) {\n const colOffset = col * numChannels;\n for (let channel = 0; channel < numChannels; channel++) {\n const coords = [batch, row, col, channel];\n const x = coords[2];\n const y = coords[1];\n // coordX/coordY are the result of rotating and translating x/y.\n let coordX = (x - centerX) * cosFactor - (y - centerY) * sinFactor;\n let coordY = (x - centerX) * sinFactor + (y - centerY) * cosFactor;\n coordX = Math.round(coordX + centerX);\n coordY = Math.round(coordY + centerY);\n let outputValue = fillValue;\n if (typeof fillValue !== 'number') {\n if (channel === 3) {\n outputValue = fullOpacityValue;\n }\n else {\n outputValue = fillValue[channel];\n }\n }\n // If the coordinate position falls within the image boundaries...\n if (coordX >= 0 && coordX < imageWidth && coordY >= 0 &&\n coordY < imageHeight) {\n // set the output to the image value at the coordinate position.\n const rotatedRowOffset = coordY * (imageWidth * numChannels);\n const rotatedColOffset = coordX * numChannels;\n const imageIdx = batchOffset + rotatedRowOffset + rotatedColOffset + channel;\n outputValue = imageVals[imageIdx];\n }\n const outIdx = batchOffset + rowOffset + colOffset + channel;\n output[outIdx] = outputValue;\n }\n }\n }\n }\n const dataId = cpuBackend.write(output, image.shape, image.dtype);\n return { dataId, shape: image.shape, dtype: image.dtype };\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Round } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const round = unaryKernelFunc(Round, (xi) => {\n // The algorithm is based on banker's rounding.\n const base = Math.floor(xi);\n if (xi - base < 0.5) {\n return Math.floor(xi);\n }\n else if (xi - base > 0.5) {\n return Math.ceil(xi);\n }\n else {\n if (base % 2.0 === 0.0) {\n return base;\n }\n else {\n return base + 1.0;\n }\n }\n});\nexport const roundConfig = {\n kernelName: Round,\n backendName: 'cpu',\n kernelFunc: round,\n};\n//# sourceMappingURL=Round.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Selu } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nconst scaleAlpha = backend_util.SELU_SCALEALPHA;\nconst scale = backend_util.SELU_SCALE;\nexport const selu = unaryKernelFunc(Selu, (xi) => {\n if (xi >= 0) {\n return scale * xi;\n }\n else {\n return scaleAlpha * (Math.exp(xi) - 1);\n }\n});\nexport const seluConfig = {\n kernelName: Selu,\n backendName: 'cpu',\n kernelFunc: selu,\n};\n//# sourceMappingURL=Selu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sigmoid } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sigmoid = unaryKernelFunc(Sigmoid, (xi) => 1 / (1 + Math.exp(-xi)));\nexport const sigmoidConfig = {\n kernelName: Sigmoid,\n backendName: 'cpu',\n kernelFunc: sigmoid,\n};\n//# sourceMappingURL=Sigmoid.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sign } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sign = unaryKernelFunc(Sign, (xi) => {\n if (xi < 0) {\n return -1;\n }\n else if (xi > 0) {\n return 1;\n }\n else {\n return 0;\n }\n});\nexport const signConfig = {\n kernelName: Sign,\n backendName: 'cpu',\n kernelFunc: sign,\n};\n//# sourceMappingURL=Sign.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sin = unaryKernelFunc(Sin, (xi) => Math.sin(xi));\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'cpu',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sinh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sinh = unaryKernelFunc(Sinh, (xi) => Math.sinh(xi));\nexport const sinhConfig = {\n kernelName: Sinh,\n backendName: 'cpu',\n kernelFunc: sinh,\n};\n//# sourceMappingURL=Sinh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Softplus } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\n// mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n// epsilon is the difference between 1.0 and the next representable float.\n// For a single precision 32 bit float this should be 2^-23, see:\n// https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\nconst epsilon = 1.1920928955078125e-7;\nconst threshold = Math.log(epsilon) + 2.0;\nexport const softplus = unaryKernelFunc(Softplus, (xi) => {\n // Value above which exp(x) may overflow, but softplus(x) == x\n // is within machine epsilon.\n const tooLarge = xi > -threshold;\n // Value below which exp(x) may underflow, but softplus(x) == exp(x)\n // is within machine epsilon.\n const tooSmall = xi < threshold;\n const expX = Math.exp(xi);\n let result;\n if (tooSmall) {\n result = expX;\n }\n else if (tooLarge) {\n result = xi;\n }\n else {\n result = Math.log(1.0 + expX);\n }\n return result;\n});\nexport const softplusConfig = {\n kernelName: Softplus,\n backendName: 'cpu',\n kernelFunc: softplus,\n};\n//# sourceMappingURL=Softplus.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { transposeImpl } from './Transpose_impl';\nexport function transpose(args) {\n const { inputs, attrs, backend } = args;\n const { x } = inputs;\n const { perm } = attrs;\n assertNotComplex(x, 'transpose');\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n const values = backend.data.get(x.dataId).values;\n const result = transposeImpl(values, x.shape, x.dtype, perm, newShape);\n const dataId = backend.write(result, newShape, x.dtype);\n return { dataId, shape: newShape, dtype: x.dtype };\n}\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'cpu',\n kernelFunc: transpose\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, SpaceToBatchND, util } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { padV2Config } from './PadV2';\nimport { reshape } from './Reshape';\nimport { transpose } from './Transpose';\nexport function spaceToBatchND(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { blockShape, paddings } = attrs;\n assertNotComplex([x], 'spaceToBatchND');\n const prod = util.sizeFromShape(blockShape);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = padV2Config.kernelFunc({\n inputs: { x },\n backend,\n attrs: { paddings: completePaddings, constantValue: 0 }\n });\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const reshapeInputs = { x: paddedX };\n const reshapeAttrs = { shape: reshapedPaddedShape };\n const paddedXReshaped = reshape({ inputs: reshapeInputs, backend, attrs: reshapeAttrs });\n const transposeInputs = { x: paddedXReshaped };\n const transposeAttrs = { perm: permutedReshapedPaddedPermutation };\n const paddedXT = transpose({ inputs: transposeInputs, backend, attrs: transposeAttrs });\n const resultReshapeInputs = { x: paddedXT };\n const resultReshapeAttrs = { shape: flattenShape };\n const result = reshape({ inputs: resultReshapeInputs, backend, attrs: resultReshapeAttrs });\n backend.disposeIntermediateTensorInfo(paddedX);\n backend.disposeIntermediateTensorInfo(paddedXReshaped);\n backend.disposeIntermediateTensorInfo(paddedXT);\n return result;\n}\nexport const spaceToBatchNDConfig = {\n kernelName: SpaceToBatchND,\n backendName: 'cpu',\n kernelFunc: spaceToBatchND\n};\n//# sourceMappingURL=SpaceToBatchND.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sqrt } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const sqrt = unaryKernelFunc(Sqrt, (xi) => Math.sqrt(xi));\nexport const sqrtConfig = {\n kernelName: Sqrt,\n backendName: 'cpu',\n kernelFunc: sqrt,\n};\n//# sourceMappingURL=Sqrt.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'cpu',\n kernelFunc: ({ inputs, backend }) => {\n const { x } = inputs;\n const cpuBackend = backend;\n assertNotComplex(x, 'square');\n const values = cpuBackend.data.get(x.dataId).values;\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = value * value;\n }\n const dataId = cpuBackend.write(newValues, x.shape, x.dtype);\n return { dataId, shape: x.shape, dtype: x.dtype };\n }\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Step } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const step = unaryKernelFunc(Step, (xi, attrs) => {\n const stepAttrs = attrs;\n if (isNaN(xi)) {\n return NaN;\n }\n else {\n return xi > 0 ? 1 : stepAttrs.alpha;\n }\n});\nexport const stepConfig = {\n kernelName: Step,\n backendName: 'cpu',\n kernelFunc: step,\n};\n//# sourceMappingURL=Step.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tan = unaryKernelFunc(Tan, (xi) => Math.tan(xi));\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'cpu',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tanh } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../utils/unary_utils';\nexport const tanh = unaryKernelFunc(Tanh, (xi) => Math.tanh(xi));\nexport const tanhConfig = {\n kernelName: Tanh,\n backendName: 'cpu',\n kernelFunc: tanh,\n};\n//# sourceMappingURL=Tanh.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { assertNotComplex } from '../cpu_util';\nimport { uniqueImpl } from './Unique_impl';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n const values = backend.data.get(x.dataId).values;\n const { outputValues, outputShape, indices } = uniqueImpl(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'cpu',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// We explicitly import the modular kernels so they get registered in the\n// global registry when we compile the library. A modular build would replace\n// the contents of this file and import only the kernels that are needed.\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { _fusedMatMulConfig } from './kernels/_FusedMatMul';\nimport { absConfig } from './kernels/Abs';\nimport { acosConfig } from './kernels/Acos';\nimport { acoshConfig } from './kernels/Acosh';\nimport { addConfig } from './kernels/Add';\nimport { asinConfig } from './kernels/Asin';\nimport { asinhConfig } from './kernels/Asinh';\nimport { atanConfig } from './kernels/Atan';\nimport { atanhConfig } from './kernels/Atanh';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchMatMulConfig } from './kernels/BatchMatMul';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { ceilConfig } from './kernels/Ceil';\nimport { clipConfig } from './kernels/Clip';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { conv2DConfig } from './kernels/Conv2D';\nimport { conv2DBackpropFilterConfig } from './kernels/Conv2DBackpropFilter';\nimport { conv2DBackpropInputConfig } from './kernels/Conv2DBackpropInput';\nimport { conv3DConfig } from './kernels/Conv3D';\nimport { conv3DBackpropFilterV2Config } from './kernels/Conv3DBackpropFilterV2';\nimport { conv3DBackpropInputV2Config } from './kernels/Conv3DBackpropInputV2';\nimport { cosConfig } from './kernels/Cos';\nimport { coshConfig } from './kernels/Cosh';\nimport { depthwiseConv2dNativeConfig } from './kernels/DepthwiseConv2dNative';\nimport { depthwiseConv2dNativeBackpropFilterConfig } from './kernels/DepthwiseConv2dNativeBackpropFilter';\nimport { depthwiseConv2dNativeBackpropInputConfig } from './kernels/DepthwiseConv2dNativeBackpropInput';\nimport { dilation2dConfig } from './kernels/Dilation2D';\nimport { dilation2dBackpropFilterConfig } from './kernels/Dilation2DBackpropFilter';\nimport { dilation2dBackpropInputConfig } from './kernels/Dilation2DBackpropInput';\nimport { divConfig } from './kernels/Div';\nimport { eluConfig } from './kernels/Elu';\nimport { erfConfig } from './kernels/Erf';\nimport { expConfig } from './kernels/Exp';\nimport { expm1Config } from './kernels/Expm1';\nimport { fftConfig } from './kernels/FFT';\nimport { fillConfig } from './kernels/Fill';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { floorConfig } from './kernels/Floor';\nimport { fusedConv2DConfig } from './kernels/FusedConv2D';\nimport { fusedDepthwiseConv2DConfig } from './kernels/FusedDepthwiseConv2D';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { isFiniteConfig } from './kernels/IsFinite';\nimport { isInfConfig } from './kernels/IsInf';\nimport { isNaNConfig } from './kernels/IsNaN';\nimport { logConfig } from './kernels/Log';\nimport { log1pConfig } from './kernels/Log1p';\nimport { logicalNotConfig } from './kernels/LogicalNot';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { padV2Config } from './kernels/PadV2';\nimport { preluConfig } from './kernels/Prelu';\nimport { realConfig } from './kernels/Real';\nimport { reciprocalConfig } from './kernels/Reciprocal';\nimport { reluConfig } from './kernels/Relu';\nimport { relu6Config } from './kernels/Relu6';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { roundConfig } from './kernels/Round';\nimport { rsqrtConfig } from './kernels/Rsqrt';\nimport { seluConfig } from './kernels/Selu';\nimport { sigmoidConfig } from './kernels/Sigmoid';\nimport { signConfig } from './kernels/Sign';\nimport { sinConfig } from './kernels/Sin';\nimport { sinhConfig } from './kernels/Sinh';\nimport { sliceConfig } from './kernels/Slice';\nimport { softplusConfig } from './kernels/Softplus';\nimport { spaceToBatchNDConfig } from './kernels/SpaceToBatchND';\nimport { sqrtConfig } from './kernels/Sqrt';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { stepConfig } from './kernels/Step';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { tanhConfig } from './kernels/Tanh';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n _fusedMatMulConfig,\n absConfig,\n acosConfig,\n acoshConfig,\n addConfig,\n asinConfig,\n asinhConfig,\n atanConfig,\n atanhConfig,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchMatMulConfig,\n batchNormConfig,\n castConfig,\n ceilConfig,\n clipConfig,\n complexConfig,\n concatConfig,\n conv2DBackpropFilterConfig,\n conv2DBackpropInputConfig,\n conv2DConfig,\n conv3DBackpropFilterV2Config,\n conv3DBackpropInputV2Config,\n conv3DConfig,\n cosConfig,\n coshConfig,\n depthwiseConv2dNativeConfig,\n depthwiseConv2dNativeBackpropFilterConfig,\n depthwiseConv2dNativeBackpropInputConfig,\n dilation2dConfig,\n dilation2dBackpropInputConfig,\n dilation2dBackpropFilterConfig,\n divConfig,\n eluConfig,\n erfConfig,\n expConfig,\n expm1Config,\n fftConfig,\n fillConfig,\n flipLeftRightConfig,\n floorConfig,\n fusedConv2DConfig,\n fusedDepthwiseConv2DConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n isFiniteConfig,\n isInfConfig,\n isNaNConfig,\n logConfig,\n log1pConfig,\n logicalNotConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n maxConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n padV2Config,\n preluConfig,\n realConfig,\n reciprocalConfig,\n reluConfig,\n relu6Config,\n reshapeConfig,\n rotateWithOffsetConfig,\n roundConfig,\n rsqrtConfig,\n seluConfig,\n sigmoidConfig,\n signConfig,\n sinConfig,\n sinhConfig,\n sliceConfig,\n softplusConfig,\n spaceToBatchNDConfig,\n sqrtConfig,\n squareConfig,\n squaredDifferenceConfig,\n stepConfig,\n subConfig,\n tanConfig,\n tanhConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nconst contexts = {};\nconst WEBGL_ATTRIBUTES = {\n alpha: false,\n antialias: false,\n premultipliedAlpha: false,\n preserveDrawingBuffer: false,\n depth: false,\n stencil: false,\n failIfMajorPerformanceCaveat: true\n};\nexport function clearWebGLContext(webGLVersion) {\n delete contexts[webGLVersion];\n}\nexport function setWebGLContext(webGLVersion, gl) {\n contexts[webGLVersion] = gl;\n}\nexport function getWebGLContext(webGLVersion) {\n if (!(webGLVersion in contexts)) {\n const newCtx = getWebGLRenderingContext(webGLVersion);\n if (newCtx !== null) {\n contexts[webGLVersion] = newCtx;\n }\n else {\n console.log('Could not get context for WebGL version', webGLVersion);\n return null;\n }\n }\n const gl = contexts[webGLVersion];\n if (gl.isContextLost()) {\n delete contexts[webGLVersion];\n return getWebGLContext(webGLVersion);\n }\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n return contexts[webGLVersion];\n}\nfunction createCanvas(webGLVersion) {\n if (typeof OffscreenCanvas !== 'undefined' && webGLVersion === 2) {\n return new OffscreenCanvas(300, 150);\n }\n else if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n }\n else {\n throw new Error('Cannot create a canvas in this context');\n }\n}\nfunction getWebGLRenderingContext(webGLVersion) {\n if (webGLVersion !== 1 && webGLVersion !== 2) {\n throw new Error('Cannot get WebGL rendering context, WebGL is disabled.');\n }\n const canvas = createCanvas(webGLVersion);\n canvas.addEventListener('webglcontextlost', (ev) => {\n ev.preventDefault();\n delete contexts[webGLVersion];\n }, false);\n if (webGLVersion === 1) {\n return (canvas.getContext('webgl', WEBGL_ATTRIBUTES) ||\n canvas.getContext('experimental-webgl', WEBGL_ATTRIBUTES));\n }\n return canvas.getContext('webgl2', WEBGL_ATTRIBUTES);\n}\n//# sourceMappingURL=canvas_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nexport var PackingScheme;\n(function (PackingScheme) {\n /**\n * All values in a single texel are densely packed without any constraints.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 4]\n * (indices are [batch, row, col]).\n *\n * 000|001 010|011 020|021\n * ------- ------- -------\n * 002|003 012|013 022|023\n *\n * 100|101 110|111 120|121\n * ------- ------- -------\n * 102|103 112|113 122|123\n *\n */\n PackingScheme[PackingScheme[\"DENSE\"] = 0] = \"DENSE\";\n /**\n * Single texels contain only values from the same batch, and from adjacent\n * rows and columns.\n *\n * This is how the shader encodes a tensor with shape = [2, 3, 5]\n * (indices are [batch, row, col]).\n *\n * 000|001 002|003 004|xxx 020|021 022|023 024|xxx\n * ------- ------- ------- ------- ------- -------\n * 010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n * 100|101 102|103 104|xxx 120|121 122|123 124|xxx\n * ------- ------- ------- ------- ------- -------\n * 110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n *\n */\n PackingScheme[PackingScheme[\"SHARED_BATCH\"] = 1] = \"SHARED_BATCH\";\n})(PackingScheme || (PackingScheme = {}));\nexport var TextureUsage;\n(function (TextureUsage) {\n TextureUsage[TextureUsage[\"RENDER\"] = 0] = \"RENDER\";\n TextureUsage[TextureUsage[\"UPLOAD\"] = 1] = \"UPLOAD\";\n TextureUsage[TextureUsage[\"PIXELS\"] = 2] = \"PIXELS\";\n TextureUsage[TextureUsage[\"DOWNLOAD\"] = 3] = \"DOWNLOAD\";\n})(TextureUsage || (TextureUsage = {}));\nexport var PhysicalTextureType;\n(function (PhysicalTextureType) {\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT16\"] = 0] = \"UNPACKED_FLOAT16\";\n PhysicalTextureType[PhysicalTextureType[\"UNPACKED_FLOAT32\"] = 1] = \"UNPACKED_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_4X1_UNSIGNED_BYTE\"] = 2] = \"PACKED_4X1_UNSIGNED_BYTE\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT32\"] = 3] = \"PACKED_2X2_FLOAT32\";\n PhysicalTextureType[PhysicalTextureType[\"PACKED_2X2_FLOAT16\"] = 4] = \"PACKED_2X2_FLOAT16\";\n})(PhysicalTextureType || (PhysicalTextureType = {}));\nexport function getUnpackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns, rows];\n}\nexport function getUnpackedArraySizeFromMatrixSize(matrixSize, channelsPerTexture) {\n return matrixSize * channelsPerTexture;\n}\nexport function getColorMatrixTextureShapeWidthHeight(rows, columns) {\n return [columns * 4, rows];\n}\n/**\n * Get shape for densely packed RGBA texture.\n */\nexport function getDenseTexShape(shape) {\n const size = util.sizeFromShape(shape);\n const texelsNeeded = Math.ceil(size / 4);\n return util.sizeToSquarishShape(texelsNeeded);\n}\nexport function getMatrixSizeFromUnpackedArraySize(unpackedSize, channelsPerTexture) {\n if (unpackedSize % channelsPerTexture !== 0) {\n throw new Error(`unpackedSize (${unpackedSize}) must be a multiple of ` +\n `${channelsPerTexture}`);\n }\n return unpackedSize / channelsPerTexture;\n}\nexport function decodeMatrixFromUnpackedColorRGBAArray(unpackedArray, matrix, channels) {\n const requiredSize = unpackedArray.length * channels / 4;\n if (matrix.length < requiredSize) {\n throw new Error(`matrix length (${matrix.length}) must be >= ${requiredSize}`);\n }\n let dst = 0;\n for (let src = 0; src < unpackedArray.length; src += 4) {\n for (let c = 0; c < channels; c++) {\n matrix[dst++] = unpackedArray[src + c];\n }\n }\n}\nexport function getPackedMatrixTextureShapeWidthHeight(rows, columns) {\n return [\n Math.max(1, Math.ceil(columns / 2)), Math.max(1, Math.ceil(rows / 2))\n ];\n}\nexport function getPackedRGBAArraySizeFromMatrixShape(rows, columns) {\n const [w, h] = getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return w * h * 4;\n}\nexport function getTextureConfig(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n let internalFormatFloat;\n let internalFormatHalfFloat;\n let internalFormatPackedHalfFloat;\n let internalFormatPackedFloat;\n let textureFormatFloat;\n let downloadTextureFormat;\n let downloadUnpackNumChannels;\n let defaultNumChannels;\n let textureTypeHalfFloat;\n let textureTypeFloat;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n internalFormatFloat = glany.R32F;\n internalFormatHalfFloat = glany.R16F;\n internalFormatPackedHalfFloat = glany.RGBA16F;\n internalFormatPackedFloat = glany.RGBA32F;\n textureFormatFloat = glany.RED;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 1;\n textureTypeHalfFloat = glany.HALF_FLOAT;\n textureTypeFloat = glany.FLOAT;\n }\n else {\n internalFormatFloat = gl.RGBA;\n internalFormatHalfFloat = gl.RGBA;\n internalFormatPackedHalfFloat = gl.RGBA;\n internalFormatPackedFloat = glany.RGBA;\n textureFormatFloat = gl.RGBA;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 4;\n textureTypeHalfFloat = textureHalfFloatExtension != null ?\n textureHalfFloatExtension.HALF_FLOAT_OES :\n null;\n textureTypeFloat = gl.FLOAT;\n }\n downloadTextureFormat = gl.RGBA;\n return {\n internalFormatFloat,\n internalFormatHalfFloat,\n internalFormatPackedHalfFloat,\n internalFormatPackedFloat,\n textureFormatFloat,\n downloadTextureFormat,\n downloadUnpackNumChannels,\n defaultNumChannels,\n textureTypeHalfFloat,\n textureTypeFloat\n };\n}\n//# sourceMappingURL=tex_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext } from './canvas_util';\nimport { getTextureConfig } from './tex_util';\nexport function callAndCheck(gl, func) {\n const returnValue = func();\n if (env().getBool('DEBUG')) {\n checkWebGLError(gl);\n }\n return returnValue;\n}\nfunction checkWebGLError(gl) {\n const error = gl.getError();\n if (error !== gl.NO_ERROR) {\n throw new Error('WebGL Error: ' + getWebGLErrorMessage(gl, error));\n }\n}\n// https://en.wikipedia.org/wiki/Half-precision_floating-point_format\nconst MIN_FLOAT16 = 5.96e-8;\nconst MAX_FLOAT16 = 65504;\nexport function canBeRepresented(num) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED') || num === 0 ||\n (MIN_FLOAT16 < Math.abs(num) && Math.abs(num) < MAX_FLOAT16)) {\n return true;\n }\n return false;\n}\nexport function getWebGLErrorMessage(gl, status) {\n switch (status) {\n case gl.NO_ERROR:\n return 'NO_ERROR';\n case gl.INVALID_ENUM:\n return 'INVALID_ENUM';\n case gl.INVALID_VALUE:\n return 'INVALID_VALUE';\n case gl.INVALID_OPERATION:\n return 'INVALID_OPERATION';\n case gl.INVALID_FRAMEBUFFER_OPERATION:\n return 'INVALID_FRAMEBUFFER_OPERATION';\n case gl.OUT_OF_MEMORY:\n return 'OUT_OF_MEMORY';\n case gl.CONTEXT_LOST_WEBGL:\n return 'CONTEXT_LOST_WEBGL';\n default:\n return `Unknown error code ${status}`;\n }\n}\nexport function getExtensionOrThrow(gl, extensionName) {\n return throwIfNull(gl, () => gl.getExtension(extensionName), 'Extension \"' + extensionName + '\" not supported on this browser.');\n}\nexport function createVertexShader(gl, vertexShaderSource) {\n const vertexShader = throwIfNull(gl, () => gl.createShader(gl.VERTEX_SHADER), 'Unable to create vertex WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(vertexShader, vertexShaderSource));\n callAndCheck(gl, () => gl.compileShader(vertexShader));\n if (gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS) === false) {\n console.log(gl.getShaderInfoLog(vertexShader));\n throw new Error('Failed to compile vertex shader.');\n }\n return vertexShader;\n}\nexport function createFragmentShader(gl, fragmentShaderSource) {\n const fragmentShader = throwIfNull(gl, () => gl.createShader(gl.FRAGMENT_SHADER), 'Unable to create fragment WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(fragmentShader, fragmentShaderSource));\n callAndCheck(gl, () => gl.compileShader(fragmentShader));\n if (gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS) === false) {\n logShaderSourceAndInfoLog(fragmentShaderSource, gl.getShaderInfoLog(fragmentShader));\n throw new Error('Failed to compile fragment shader.');\n }\n return fragmentShader;\n}\nconst lineNumberRegex = /ERROR: [0-9]+:([0-9]+):/g;\nfunction logShaderSourceAndInfoLog(shaderSource, shaderInfoLog) {\n const lineNumberRegexResult = lineNumberRegex.exec(shaderInfoLog);\n if (lineNumberRegexResult == null) {\n console.log(`Couldn't parse line number in error: ${shaderInfoLog}`);\n console.log(shaderSource);\n return;\n }\n const lineNumber = +lineNumberRegexResult[1];\n const shaderLines = shaderSource.split('\\n');\n const pad = shaderLines.length.toString().length + 2;\n const linesWithLineNumbers = shaderLines.map((line, lineNumber) => util.rightPad((lineNumber + 1).toString(), pad) + line);\n let maxLineLength = 0;\n for (let i = 0; i < linesWithLineNumbers.length; i++) {\n maxLineLength = Math.max(linesWithLineNumbers[i].length, maxLineLength);\n }\n const beforeErrorLines = linesWithLineNumbers.slice(0, lineNumber - 1);\n const errorLine = linesWithLineNumbers.slice(lineNumber - 1, lineNumber);\n const afterErrorLines = linesWithLineNumbers.slice(lineNumber);\n console.log(beforeErrorLines.join('\\n'));\n console.log(shaderInfoLog.split('\\n')[0]);\n console.log(`%c ${util.rightPad(errorLine[0], maxLineLength)}`, 'border:1px solid red; background-color:#e3d2d2; color:#a61717');\n console.log(afterErrorLines.join('\\n'));\n}\nexport function createProgram(gl) {\n return throwIfNull(gl, () => gl.createProgram(), 'Unable to create WebGLProgram.');\n}\nexport function linkProgram(gl, program) {\n callAndCheck(gl, () => gl.linkProgram(program));\n if (gl.getProgramParameter(program, gl.LINK_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Failed to link vertex and fragment shaders.');\n }\n}\nexport function validateProgram(gl, program) {\n callAndCheck(gl, () => gl.validateProgram(program));\n if (gl.getProgramParameter(program, gl.VALIDATE_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Shader program validation failed.');\n }\n}\nexport function createStaticVertexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function createStaticIndexBuffer(gl, data) {\n const buffer = throwIfNull(gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\nexport function getNumChannels() {\n if (env().getNumber('WEBGL_VERSION') === 2) {\n return 1;\n }\n return 4;\n}\nexport function createTexture(gl) {\n return throwIfNull(gl, () => gl.createTexture(), 'Unable to create WebGLTexture.');\n}\nexport function validateTextureSize(width, height) {\n const maxTextureSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if ((width <= 0) || (height <= 0)) {\n const requested = `[${width}x${height}]`;\n throw new Error('Requested texture size ' + requested + ' is invalid.');\n }\n if ((width > maxTextureSize) || (height > maxTextureSize)) {\n const requested = `[${width}x${height}]`;\n const max = `[${maxTextureSize}x${maxTextureSize}]`;\n throw new Error('Requested texture size ' + requested +\n ' greater than WebGL maximum on this browser / GPU ' + max + '.');\n }\n}\nexport function createFramebuffer(gl) {\n return throwIfNull(gl, () => gl.createFramebuffer(), 'Unable to create WebGLFramebuffer.');\n}\nexport function bindVertexBufferToProgramAttribute(gl, program, attribute, buffer, arrayEntriesPerItem, itemStrideInBytes, itemOffsetInBytes) {\n const loc = gl.getAttribLocation(program, attribute);\n if (loc === -1) {\n // The GPU compiler decided to strip out this attribute because it's unused,\n // thus no need to bind.\n return false;\n }\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.vertexAttribPointer(loc, arrayEntriesPerItem, gl.FLOAT, false, itemStrideInBytes, itemOffsetInBytes));\n callAndCheck(gl, () => gl.enableVertexAttribArray(loc));\n return true;\n}\nexport function bindTextureUnit(gl, texture, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n}\nexport function unbindTextureUnit(gl, textureUnit) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function getProgramUniformLocationOrThrow(gl, program, uniformName) {\n return throwIfNull(gl, () => gl.getUniformLocation(program, uniformName), 'uniform \"' + uniformName + '\" not present in program.');\n}\nexport function getProgramUniformLocation(gl, program, uniformName) {\n return gl.getUniformLocation(program, uniformName);\n}\nexport function bindTextureToProgramUniformSampler(gl, texture, uniformSamplerLocation, textureUnit) {\n callAndCheck(gl, () => bindTextureUnit(gl, texture, textureUnit));\n callAndCheck(gl, () => gl.uniform1i(uniformSamplerLocation, textureUnit));\n}\nexport function bindCanvasToFramebuffer(gl) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n callAndCheck(gl, () => gl.viewport(0, 0, gl.canvas.width, gl.canvas.height));\n callAndCheck(gl, () => gl.scissor(0, 0, gl.canvas.width, gl.canvas.height));\n}\nexport function bindColorTextureToFramebuffer(gl, texture, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0));\n}\nexport function unbindColorTextureFromFramebuffer(gl, framebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(gl, () => gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, null, 0));\n}\nexport function validateFramebuffer(gl) {\n const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);\n if (status !== gl.FRAMEBUFFER_COMPLETE) {\n throw new Error('Error binding framebuffer: ' + getFramebufferErrorMessage(gl, status));\n }\n}\nexport function getFramebufferErrorMessage(gl, status) {\n switch (status) {\n case gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:\n return 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS';\n case gl.FRAMEBUFFER_UNSUPPORTED:\n return 'FRAMEBUFFER_UNSUPPORTED';\n default:\n return `unknown error ${status}`;\n }\n}\nfunction throwIfNull(gl, returnTOrNull, failureMessage) {\n const tOrNull = callAndCheck(gl, () => returnTOrNull());\n if (tOrNull == null) {\n throw new Error(failureMessage);\n }\n return tOrNull;\n}\nfunction validateTextureUnit(gl, textureUnit) {\n const maxTextureUnit = gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS - 1;\n const glTextureUnit = textureUnit + gl.TEXTURE0;\n if (glTextureUnit < gl.TEXTURE0 || glTextureUnit > maxTextureUnit) {\n const textureUnitRange = `[gl.TEXTURE0, gl.TEXTURE${maxTextureUnit}]`;\n throw new Error(`textureUnit must be in ${textureUnitRange}.`);\n }\n}\nexport function getBatchDim(shape, dimsToSkip = 2) {\n return util.sizeFromShape(shape.slice(0, shape.length - dimsToSkip));\n}\nexport function getRowsCols(shape) {\n if (shape.length === 0) {\n throw Error('Cannot get rows and columns of an empty shape array.');\n }\n return [\n shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]\n ];\n}\nexport function getShapeAs3D(shape) {\n let shapeAs3D = [1, 1, 1];\n const isScalar = shape.length === 0 || (shape.length === 1 && shape[0] === 1);\n if (!isScalar) {\n shapeAs3D =\n [getBatchDim(shape), ...getRowsCols(shape)];\n }\n return shapeAs3D;\n}\nexport function getTextureShapeFromLogicalShape(logShape, isPacked = false) {\n let maxTexSize = env().getNumber('WEBGL_MAX_TEXTURE_SIZE');\n if (isPacked) {\n maxTexSize = maxTexSize * 2;\n // This logic ensures we accurately count the number of packed texels needed\n // to accommodate the tensor. We can only pack values in the same texel if\n // they are from adjacent pairs of rows/cols within the same batch. So if a\n // tensor has 3 rows, we pretend it has 4 rows in order to account for the\n // fact that the texels containing the third row are half empty.\n logShape = logShape.map((d, i) => i >= logShape.length - 2 ?\n util.nearestLargerEven(logShape[i]) :\n logShape[i]);\n // Packed texture height is at least 2 (the channel height of a single\n // texel).\n if (logShape.length === 1) {\n logShape = [2, logShape[0]];\n }\n }\n // If logical shape is 2, we don't squeeze, since we want to match physical.\n if (logShape.length !== 2) {\n const squeezeResult = util.squeezeShape(logShape);\n logShape = squeezeResult.newShape;\n }\n let size = util.sizeFromShape(logShape);\n if (logShape.length <= 1 && size <= maxTexSize) {\n return [1, size];\n }\n else if (logShape.length === 2 && logShape[0] <= maxTexSize &&\n logShape[1] <= maxTexSize) {\n return logShape;\n }\n else if (logShape.length === 3 && logShape[0] * logShape[1] <= maxTexSize &&\n logShape[2] <= maxTexSize) {\n return [logShape[0] * logShape[1], logShape[2]];\n }\n else if (logShape.length === 3 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2]];\n }\n else if (logShape.length === 4 &&\n logShape[0] * logShape[1] * logShape[2] <= maxTexSize &&\n logShape[3] <= maxTexSize) {\n return [logShape[0] * logShape[1] * logShape[2], logShape[3]];\n }\n else if (logShape.length === 4 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] * logShape[3] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2] * logShape[3]];\n }\n else {\n if (isPacked) {\n // For packed textures size equals the number of channels required to\n // accommodate the texture data. However in order to squarify such that\n // inner dimensions stay even, we rewrite size to equal the number of\n // texels. Then in the return statement we rehydrate the squarified\n // dimensions to channel units.\n const batchDim = getBatchDim(logShape);\n let rows = 2, cols = 2;\n if (logShape.length) {\n [rows, cols] = getRowsCols(logShape);\n }\n size = batchDim * (rows / 2) * (cols / 2);\n return util.sizeToSquarishShape(size).map(d => d * 2);\n }\n return util.sizeToSquarishShape(size);\n }\n}\nfunction isEven(n) {\n return n % 2 === 0;\n}\n/**\n * This determines whether reshaping a packed texture requires rearranging\n * the data within the texture, assuming 2x2 packing.\n */\nexport function isReshapeFree(shape1, shape2) {\n shape1 = shape1.slice(-2);\n shape2 = shape2.slice(-2);\n if (util.arraysEqual(shape1, shape2)) {\n return true;\n }\n if (!shape1.length || !shape2.length) { // One of the shapes is a scalar.\n return true;\n }\n if (shape1[0] === 0 || shape1[1] === 0 || shape2[0] === 0 ||\n shape2[1] === 0) {\n return true;\n }\n if (shape1.length !== shape2.length) { // One of the shapes is a vector.\n const shape1Cols = shape1.slice(-1)[0];\n const shape2Cols = shape2.slice(-1)[0];\n if (shape1Cols === shape2Cols) {\n return true;\n }\n if (isEven(shape1Cols) && isEven(shape2Cols) &&\n (shape1[0] === 1 || shape2[0] === 1)) {\n return true;\n }\n }\n return shape1[1] === shape2[1] && isEven(shape1[0]) && isEven(shape2[0]);\n}\n// We cache webgl params because the environment gets reset between\n// unit tests and we don't want to constantly query the WebGLContext for\n// MAX_TEXTURE_SIZE.\nlet MAX_TEXTURE_SIZE;\nlet MAX_TEXTURES_IN_SHADER;\nexport function getWebGLMaxTextureSize(webGLVersion) {\n if (MAX_TEXTURE_SIZE == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURE_SIZE = gl.getParameter(gl.MAX_TEXTURE_SIZE);\n }\n return MAX_TEXTURE_SIZE;\n}\nexport function resetMaxTextureSize() {\n MAX_TEXTURE_SIZE = null;\n}\nexport function resetMaxTexturesInShader() {\n MAX_TEXTURES_IN_SHADER = null;\n}\nexport function getMaxTexturesInShader(webGLVersion) {\n if (MAX_TEXTURES_IN_SHADER == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURES_IN_SHADER = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);\n }\n // We cap at 16 to avoid spurious runtime \"memory exhausted\" error.\n return Math.min(16, MAX_TEXTURES_IN_SHADER);\n}\nexport function getWebGLDisjointQueryTimerVersion(webGLVersion) {\n if (webGLVersion === 0) {\n return 0;\n }\n let queryTimerVersion;\n const gl = getWebGLContext(webGLVersion);\n if (hasExtension(gl, 'EXT_disjoint_timer_query_webgl2') &&\n webGLVersion === 2) {\n queryTimerVersion = 2;\n }\n else if (hasExtension(gl, 'EXT_disjoint_timer_query')) {\n queryTimerVersion = 1;\n }\n else {\n queryTimerVersion = 0;\n }\n return queryTimerVersion;\n}\nexport function hasExtension(gl, extensionName) {\n const ext = gl.getExtension(extensionName);\n return ext != null;\n}\nexport function isWebGLVersionEnabled(webGLVersion) {\n try {\n const gl = getWebGLContext(webGLVersion);\n if (gl != null) {\n return true;\n }\n }\n catch (e) {\n console.log('Error when getting WebGL context: ', e);\n return false;\n }\n return false;\n}\nexport function isCapableOfRenderingToFloatTexture(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n }\n else {\n if (!hasExtension(gl, 'EXT_color_buffer_float')) {\n return false;\n }\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\n/**\n * Check if we can download values from a float/half-float texture.\n *\n * Note that for performance reasons we use binding a texture to a framebuffer\n * as a proxy for ability to download float values later using readPixels. The\n * texture params of this texture will not match those in readPixels exactly\n * but if we are unable to bind some kind of float texture to the frameBuffer\n * then we definitely will not be able to read float values from it.\n */\nexport function isDownloadFloatTextureEnabled(webGLVersion) {\n if (webGLVersion === 0) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n if (!hasExtension(gl, 'WEBGL_color_buffer_float')) {\n return false;\n }\n }\n else {\n if (hasExtension(gl, 'EXT_color_buffer_float')) {\n return createFloatTextureAndBindToFramebuffer(gl);\n }\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (hasExtension(gl, COLOR_BUFFER_HALF_FLOAT)) {\n const textureHalfFloatExtension = gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n return createHalfFloatTextureAndBindToFramebuffer(gl, textureHalfFloatExtension);\n }\n return false;\n }\n const isFrameBufferComplete = createFloatTextureAndBindToFramebuffer(gl);\n return isFrameBufferComplete;\n}\nfunction createFloatTextureAndBindToFramebuffer(gl) {\n const texConfig = getTextureConfig(gl);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nfunction createHalfFloatTextureAndBindToFramebuffer(\n// tslint:disable-next-line:no-any\ngl, textureHalfFloatExtension) {\n const texConfig = getTextureConfig(gl, textureHalfFloatExtension);\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n const width = 1;\n const height = 1;\n gl.texImage2D(gl.TEXTURE_2D, 0, texConfig.internalFormatHalfFloat, width, height, 0, texConfig.textureFormatFloat, texConfig.textureTypeHalfFloat, null);\n const frameBuffer = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n const isFrameBufferComplete = gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n return isFrameBufferComplete;\n}\nexport function isWebGLFenceEnabled(webGLVersion) {\n if (webGLVersion !== 2) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n // tslint:disable-next-line:no-any\n const isEnabled = gl.fenceSync != null;\n return isEnabled;\n}\nexport function assertNotComplex(tensor, opName) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(t.dtype !== 'complex64', () => `${opName} does not support complex64 tensors ` +\n 'in the WebGL backend.');\n }\n });\n}\n//# sourceMappingURL=webgl_util.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { device_util, env } from '@tensorflow/tfjs-core';\nimport { getMaxTexturesInShader, getWebGLDisjointQueryTimerVersion, getWebGLMaxTextureSize, isCapableOfRenderingToFloatTexture, isDownloadFloatTextureEnabled, isWebGLFenceEnabled, isWebGLVersionEnabled } from './webgl_util';\nconst ENV = env();\n/**\n * This file contains WebGL-specific flag registrations.\n */\n/**\n * True if WebGL is supported.\n */\nENV.registerFlag('HAS_WEBGL', () => ENV.getNumber('WEBGL_VERSION') > 0);\n/** 0: No WebGL, 1: WebGL 1.0, 2: WebGL 2.0. */\nENV.registerFlag('WEBGL_VERSION', () => {\n if (isWebGLVersionEnabled(2)) {\n return 2;\n }\n else if (isWebGLVersionEnabled(1)) {\n return 1;\n }\n return 0;\n});\n/** Whether to check for numerical representation problems. */\nENV.registerFlag('WEBGL_CHECK_NUMERICAL_PROBLEMS', () => false);\nENV.registerFlag('WEBGL_BUFFER_SUPPORTED', () => ENV.get('WEBGL_VERSION') === 2);\n/** Whether the WebGL backend will sometimes forward ops to the CPU. */\nENV.registerFlag('WEBGL_CPU_FORWARD', () => true);\n/** Whether the WebGL backend will always use f16 textures for rendering. */\nENV.registerFlag('WEBGL_FORCE_F16_TEXTURES', () => false);\n/** Whether to turn all packing related flags on. */\nENV.registerFlag('WEBGL_PACK', () => ENV.getBool('HAS_WEBGL'));\n/** Whether we will pack the batchnormalization op. */\nENV.registerFlag('WEBGL_PACK_NORMALIZATION', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the clip op. */\nENV.registerFlag('WEBGL_PACK_CLIP', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack the depthwise conv op. */\n// TODO: https://github.com/tensorflow/tfjs/issues/1679\nENV.registerFlag('WEBGL_PACK_DEPTHWISECONV', () => false);\n/** Whether we will pack binary ops. */\nENV.registerFlag('WEBGL_PACK_BINARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack unary ops. */\nENV.registerFlag('WEBGL_PACK_UNARY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack array ops. */\nENV.registerFlag('WEBGL_PACK_ARRAY_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack image ops. */\nENV.registerFlag('WEBGL_PACK_IMAGE_OPERATIONS', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will pack reduce ops. */\nENV.registerFlag('WEBGL_PACK_REDUCE', () => ENV.getBool('WEBGL_PACK'));\n/** Whether packed WebGL kernels lazily unpack their outputs. */\nENV.registerFlag('WEBGL_LAZILY_UNPACK', () => ENV.getBool('WEBGL_PACK'));\n/** Whether we will use the im2col algorithm to speed up convolutions. */\nENV.registerFlag('WEBGL_CONV_IM2COL', () => ENV.getBool('WEBGL_PACK'));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURE_SIZE', () => getWebGLMaxTextureSize(ENV.getNumber('WEBGL_VERSION')));\n/** The maximum texture dimension. */\nENV.registerFlag('WEBGL_MAX_TEXTURES_IN_SHADER', () => getMaxTexturesInShader(ENV.getNumber('WEBGL_VERSION')));\n/**\n * The disjoint_query_timer extension version.\n * 0: disabled, 1: EXT_disjoint_timer_query, 2:\n * EXT_disjoint_timer_query_webgl2.\n * In Firefox with WebGL 2.0,\n * EXT_disjoint_timer_query_webgl2 is not available, so we must use the\n * WebGL 1.0 extension.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION', () => {\n const webGLVersion = ENV.getNumber('WEBGL_VERSION');\n if (webGLVersion === 0) {\n return 0;\n }\n return getWebGLDisjointQueryTimerVersion(webGLVersion);\n});\n/**\n * Whether the timer object from the disjoint_query_timer extension gives\n * timing information that is reliable.\n */\nENV.registerFlag('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE', () => ENV.getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0 &&\n !device_util.isMobile());\n/**\n * Whether the device is physically capable of rendering to float32 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_CAPABLE', () => isCapableOfRenderingToFloatTexture(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Whether rendering to float32 textures is enabled. If disabled, renders to\n * float16 textures.\n */\nENV.registerFlag('WEBGL_RENDER_FLOAT32_ENABLED', () => {\n return ENV.getBool('WEBGL_FORCE_F16_TEXTURES') ?\n false :\n ENV.getBool('WEBGL_RENDER_FLOAT32_CAPABLE');\n});\n/**\n * Whether downloading float textures is enabled (16 or 32 bit). If disabled,\n * uses IEEE 754 encoding of the float32 values to 4 uint8 when downloading.\n */\nENV.registerFlag('WEBGL_DOWNLOAD_FLOAT_ENABLED', () => isDownloadFloatTextureEnabled(ENV.getNumber('WEBGL_VERSION')));\n/** Whether the fence API is available. */\nENV.registerFlag('WEBGL_FENCE_API_ENABLED', () => isWebGLFenceEnabled(ENV.getNumber('WEBGL_VERSION')));\n/**\n * Tensors with size <= than this will be uploaded as uniforms, not textures.\n */\nENV.registerFlag('WEBGL_SIZE_UPLOAD_UNIFORM', () => {\n // Use uniform uploads only when 32bit floats are supported. In\n // 16bit\n // environments there are problems with comparing a 16bit texture value\n // with a 32bit uniform value.\n const useUniforms = ENV.getBool('WEBGL_RENDER_FLOAT32_ENABLED');\n return useUniforms ? 4 : 0;\n});\n/**\n * If the total number of bytes allocated on the GPU is greater than this\n * number, we will aggressively delete textures upon disposal with\n * gl.deleteMatrixTexture, rather than making them available for reuse.\n *\n * Default value -1 indicates that we will never aggressively delete textures.\n */\nENV.registerFlag('WEBGL_DELETE_TEXTURE_THRESHOLD', () => {\n return -1;\n}, threshold => {\n if (threshold < 0 && threshold !== -1) {\n throw new Error(`WEBGL_DELETE_TEXTURE_THRESHOLD must be -1 (indicating never ` +\n `delete) or at least 0, but got ${threshold}.`);\n }\n});\n//# sourceMappingURL=flags_webgl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import shared functionality from tfjs-backend-cpu without triggering\n// side effects.\n// tslint:disable-next-line: no-imports-from-dist\nimport * as shared from '@tensorflow/tfjs-backend-cpu/dist/shared';\nconst { simpleAbsImpl: simpleAbsImplCPU, addImpl: addImplCPU, ceilImpl: ceilImplCPU, expImpl: expImplCPU, expm1Impl: expm1ImplCPU, floorImpl: floorImplCPU, logImpl: logImplCPU, maxImpl: maxImplCPU, multiplyImpl: multiplyImplCPU, rsqrtImpl: rsqrtImplCPU, sliceImpl: sliceImplCPU, subImpl: subImplCPU, transposeImpl: transposeImplCPU, uniqueImpl: uniqueImplCPU, } = shared;\nexport { simpleAbsImplCPU, addImplCPU, ceilImplCPU, expImplCPU, expm1ImplCPU, logImplCPU, multiplyImplCPU, sliceImplCPU, subImplCPU, floorImplCPU, maxImplCPU, rsqrtImplCPU, transposeImplCPU, uniqueImplCPU, };\n//# sourceMappingURL=shared.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`float v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n float result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AddNPackedProgram {\n constructor(outputShape, shapes) {\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const snippets = [];\n // Get target elements from every input tensor.\n this.variableNames.forEach(variable => {\n snippets.push(`vec4 v${variable} = get${variable}AtOutCoords();`);\n });\n // Calculate the sum of all elements.\n const operation = this.variableNames\n .map(variable => {\n return `v${variable}`;\n })\n .join(' + ');\n this.userCode = `\n void main() {\n ${snippets.join('\\n ')}\n\n vec4 result = ${operation};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=addn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ArgMinMaxProgram {\n constructor(reduceInfo, op, firstPass) {\n this.variableNames = ['A'];\n const { windowSize, batchSize, outSize } = reduceInfo;\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n this.outputShape = [batchSize, outSize];\n const compOp = (op === 'max') ? '>' : '<';\n const indexSnippet = firstPass ?\n 'inOffset + i;' :\n 'round(getBestIndicesA(batch, inOffset + i));';\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n int bestIndex = inOffset;\n float bestValue = getA(batch, bestIndex);\n\n for (int i = 0; i < ${windowSize}; i++) {\n int inIdx = ${indexSnippet};\n float candidate = getA(batch, inIdx);\n if (candidate ${compOp} bestValue) {\n bestValue = candidate;\n bestIndex = inIdx;\n }\n }\n setOutput(float(bestIndex));\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport function getVecChannels(name, rank) {\n return ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank).map(d => `${name}.${d}`);\n}\nexport function getChannels(name, rank) {\n if (rank === 1) {\n return [name];\n }\n return getVecChannels(name, rank);\n}\nexport function getSourceCoords(rank, dims) {\n if (rank === 1) {\n return 'rc';\n }\n let coords = '';\n for (let i = 0; i < rank; i++) {\n coords += dims[i];\n if (i < rank - 1) {\n coords += ',';\n }\n }\n return coords;\n}\n//# sourceMappingURL=packing_util.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nexport function getGlslDifferences() {\n let version;\n let attribute;\n let varyingVs;\n let varyingFs;\n let texture2D;\n let output;\n let defineOutput;\n let defineSpecialNaN;\n let defineSpecialInf;\n let defineRound;\n if (env().getNumber('WEBGL_VERSION') === 2) {\n version = '#version 300 es';\n attribute = 'in';\n varyingVs = 'out';\n varyingFs = 'in';\n texture2D = 'texture';\n output = 'outputColor';\n defineOutput = 'out vec4 outputColor;';\n // Use custom isnan definition to work across differences between\n // implementations on various platforms. While this should happen in ANGLE\n // we still see differences between android and windows (on chrome) when\n // using isnan directly.\n defineSpecialNaN = `\n bool isnan_custom(float val) {\n return (val > 0.0 || val < 0.0) ? false : val != 0.0;\n }\n\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan_custom(val.x),\n isnan_custom(val.y), isnan_custom(val.z), isnan_custom(val.w));\n }\n\n #define isnan(value) isnan_custom(value)\n `;\n // In webgl 2 we do not need to specify a custom isinf so there is no\n // need for a special INFINITY constant.\n defineSpecialInf = ``;\n defineRound = `\n #define round(value) newRound(value)\n int newRound(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 newRound(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n else {\n version = '';\n attribute = 'attribute';\n varyingVs = 'varying';\n varyingFs = 'varying';\n texture2D = 'texture2D';\n output = 'gl_FragColor';\n defineOutput = '';\n // WebGL1 has no built in isnan so we define one here.\n defineSpecialNaN = `\n #define isnan(value) isnan_custom(value)\n bool isnan_custom(float val) {\n return (val > 0. || val < 1. || val == 0.) ? false : true;\n }\n bvec4 isnan_custom(vec4 val) {\n return bvec4(isnan(val.x), isnan(val.y), isnan(val.z), isnan(val.w));\n }\n `;\n defineSpecialInf = `\n uniform float INFINITY;\n\n bool isinf(float val) {\n return abs(val) == INFINITY;\n }\n bvec4 isinf(vec4 val) {\n return equal(abs(val), vec4(INFINITY));\n }\n `;\n defineRound = `\n int round(float value) {\n return int(floor(value + 0.5));\n }\n\n ivec4 round(vec4 value) {\n return ivec4(floor(value + vec4(0.5)));\n }\n `;\n }\n return {\n version,\n attribute,\n varyingVs,\n varyingFs,\n texture2D,\n output,\n defineOutput,\n defineSpecialNaN,\n defineSpecialInf,\n defineRound\n };\n}\n//# sourceMappingURL=glsl_version.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\n/**\n * Produces GLSL code that derives logical coordinates from a flat\n * index. The code performs integer division with each stride and decrements\n * the index until the index equals the final dimension coordinate.\n */\nexport function getLogicalCoordinatesFromFlatIndex(coords, shape, index = 'index') {\n const strides = util.computeStrides(shape);\n return strides\n .map((stride, i) => {\n const line1 = `int ${coords[i]} = ${index} / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coords[i + 1]} = ${index} - ${coords[i]} * ${stride}` :\n `index -= ${coords[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n}\nfunction buildVec(x) {\n if (x.length === 1) {\n return `${x[0]}`;\n }\n return `vec${x.length}(${x.join(',')})`;\n}\n/**\n * Produces GLSL code that computes the dot product of the input x and y\n * vectors. Handles splitting inputs into increments of vec4s when necessary.\n */\nexport function dotify(x, y) {\n if (x.length !== y.length) {\n throw new Error(`Vectors to be dotted must be of the same length -` +\n `got ${x.length} and ${y.length}`);\n }\n const slices = [];\n const nearestVec4 = Math.floor(x.length / 4);\n const nearestVec4Remainder = x.length % 4;\n for (let i = 0; i < nearestVec4; i++) {\n const xSlice = x.slice(i * 4, i * 4 + 4);\n const ySlice = y.slice(i * 4, i * 4 + 4);\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n if (nearestVec4Remainder !== 0) {\n let xSlice = x.slice(nearestVec4 * 4);\n let ySlice = y.slice(nearestVec4 * 4);\n if (xSlice.length === 1) {\n xSlice = xSlice.map(d => `float(${d})`);\n ySlice = ySlice.map(d => `float(${d})`);\n }\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n return slices.map((d, i) => `dot(${d})`).join('+');\n}\n/**\n * Produces GLSL that computes the flat index from 3D coordinates.\n */\nexport function getFlatIndexFrom3D(shape) {\n const strides = util.computeStrides(shape).map(d => d.toString());\n return `\n int getFlatIndex(ivec3 coords) {\n return coords.x * ${strides[0]} + coords.y * ${strides[1]} + coords.z;\n }\n`;\n}\nexport const ENCODE_FLOAT_SNIPPET = `\n const float FLOAT_MAX = 1.70141184e38;\n const float FLOAT_MIN = 1.17549435e-38;\n\n lowp vec4 encode_float(highp float v) {\n if (isnan(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n }\n`;\n//# sourceMappingURL=shader_compiler_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nconst { getBroadcastDims } = backend_util;\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport function makeShader(inputsInfo, outputShape, userCode, usesPackedTextures) {\n const prefixSnippets = [];\n inputsInfo.forEach(x => {\n const size = util.sizeFromShape(x.shapeInfo.logicalShape);\n // Snippet when we decided to upload the values as uniform.\n if (x.shapeInfo.isUniform) {\n prefixSnippets.push(`uniform float ${x.name}${size > 1 ? `[${size}]` : ''};`);\n }\n else {\n prefixSnippets.push(`uniform sampler2D ${x.name};`);\n prefixSnippets.push(`uniform int offset${x.name};`);\n }\n });\n const inputPrefixSnippet = prefixSnippets.join('\\n');\n const inputSamplingSnippet = inputsInfo\n .map(x => getInputSamplingSnippet(x, outputShape, usesPackedTextures))\n .join('\\n');\n const outTexShape = outputShape.texShape;\n const glsl = getGlslDifferences();\n const floatTextureSampleSnippet = getFloatTextureSampleSnippet(glsl);\n let outputSamplingSnippet;\n let floatTextureSetOutputSnippet;\n let shaderPrefix = getShaderPrefix(glsl);\n if (outputShape.isPacked) {\n outputSamplingSnippet =\n getPackedOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRGBASnippet(glsl);\n }\n else {\n outputSamplingSnippet =\n getOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = getFloatTextureSetRSnippet(glsl);\n }\n if (usesPackedTextures) {\n shaderPrefix += SHADER_PACKED_PREFIX;\n }\n const source = [\n shaderPrefix, floatTextureSampleSnippet, floatTextureSetOutputSnippet,\n inputPrefixSnippet, outputSamplingSnippet, inputSamplingSnippet, userCode\n ].join('\\n');\n return source;\n}\nfunction getSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getSamplerScalar(inInfo);\n case 1:\n return getSampler1D(inInfo);\n case 2:\n return getSampler2D(inInfo);\n case 3:\n return getSampler3D(inInfo);\n case 4:\n return getSampler4D(inInfo);\n case 5:\n return getSampler5D(inInfo);\n case 6:\n return getSampler6D(inInfo);\n default:\n throw new Error(`${shape.length}-D input sampling` +\n ` is not yet supported`);\n }\n}\nfunction getPackedSamplerFromInInfo(inInfo) {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getPackedSamplerScalar(inInfo);\n case 1:\n return getPackedSampler1D(inInfo);\n case 2:\n return getPackedSampler2D(inInfo);\n case 3:\n return getPackedSampler3D(inInfo);\n default:\n return getPackedSamplerND(inInfo);\n }\n}\nfunction getInputSamplingSnippet(inInfo, outShapeInfo, usesPackedTextures = false) {\n let res = '';\n if (usesPackedTextures) {\n res += getPackedSamplerFromInInfo(inInfo);\n }\n else {\n res += getSamplerFromInInfo(inInfo);\n }\n const inShape = inInfo.shapeInfo.logicalShape;\n const outShape = outShapeInfo.logicalShape;\n if (inShape.length <= outShape.length) {\n if (usesPackedTextures) {\n res += getPackedSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n else {\n res += getSamplerAtOutputCoords(inInfo, outShapeInfo);\n }\n }\n return res;\n}\nfunction getPackedOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutputPacked1DCoords(outShape, outTexShape);\n case 2:\n return getOutputPacked2DCoords(outShape, outTexShape);\n case 3:\n return getOutputPacked3DCoords(outShape, outTexShape);\n default:\n return getOutputPackedNDCoords(outShape, outTexShape);\n }\n}\nfunction getOutputSamplingSnippet(outShape, outTexShape) {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutput1DCoords(outShape, outTexShape);\n case 2:\n return getOutput2DCoords(outShape, outTexShape);\n case 3:\n return getOutput3DCoords(outShape, outTexShape);\n case 4:\n return getOutput4DCoords(outShape, outTexShape);\n case 5:\n return getOutput5DCoords(outShape, outTexShape);\n case 6:\n return getOutput6DCoords(outShape, outTexShape);\n default:\n throw new Error(`${outShape.length}-D output sampling is not yet supported`);\n }\n}\nfunction getFloatTextureSampleSnippet(glsl) {\n return `\n float sampleTexture(sampler2D textureSampler, vec2 uv) {\n return ${glsl.texture2D}(textureSampler, uv).r;\n }\n `;\n}\nfunction getFloatTextureSetRSnippet(glsl) {\n return `\n void setOutput(float val) {\n ${glsl.output} = vec4(val, 0, 0, 0);\n }\n `;\n}\nfunction getFloatTextureSetRGBASnippet(glsl) {\n return `\n void setOutput(vec4 val) {\n ${glsl.output} = val;\n }\n `;\n}\nfunction getShaderPrefix(glsl) {\n const SHADER_PREFIX = `${glsl.version}\n precision highp float;\n precision highp int;\n precision highp sampler2D;\n ${glsl.varyingFs} vec2 resultUV;\n ${glsl.defineOutput}\n const vec2 halfCR = vec2(0.5, 0.5);\n\n struct ivec5\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n };\n\n struct ivec6\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n };\n\n uniform float NAN;\n ${glsl.defineSpecialNaN}\n ${glsl.defineSpecialInf}\n ${glsl.defineRound}\n\n int imod(int x, int y) {\n return x - y * (x / y);\n }\n\n int idiv(int a, int b, float sign) {\n int res = a / b;\n int mod = imod(a, b);\n if (sign < 0. && mod != 0) {\n res -= 1;\n }\n return res;\n }\n\n //Based on the work of Dave Hoskins\n //https://www.shadertoy.com/view/4djSRW\n #define HASHSCALE1 443.8975\n float random(float seed){\n vec2 p = resultUV * seed;\n vec3 p3 = fract(vec3(p.xyx) * HASHSCALE1);\n p3 += dot(p3, p3.yzx + 19.19);\n return fract((p3.x + p3.y) * p3.z);\n }\n\n ${SAMPLE_1D_SNIPPET}\n ${SAMPLE_2D_SNIPPET}\n ${SAMPLE_3D_SNIPPET}\n `;\n return SHADER_PREFIX;\n}\nconst SAMPLE_1D_SNIPPET = `\nvec2 uvFromFlat(int texNumR, int texNumC, int index) {\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom1D(int texNumR, int texNumC, int index) {\n int texelIndex = index / 2;\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_2D_SNIPPET = `\nvec2 packedUVfrom2D(int texelsInLogicalRow, int texNumR,\n int texNumC, int row, int col) {\n int texelIndex = (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SAMPLE_3D_SNIPPET = `\nvec2 packedUVfrom3D(int texNumR, int texNumC,\n int texelsInBatch, int texelsInLogicalRow, int b,\n int row, int col) {\n int index = b * texelsInBatch + (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\nconst SHADER_PACKED_PREFIX = `\n float getChannel(vec4 frag, vec2 innerDims) {\n vec2 modCoord = mod(innerDims, 2.);\n return modCoord.x == 0. ?\n (modCoord.y == 0. ? frag.r : frag.g) :\n (modCoord.y == 0. ? frag.b : frag.a);\n }\n float getChannel(vec4 frag, int dim) {\n float modCoord = mod(float(dim), 2.);\n return modCoord == 0. ? frag.r : frag.g;\n }\n`;\nfunction getOutputScalarCoords() {\n return `\n int getOutputCoords() {\n return 0;\n }\n `;\n}\nfunction getOutputPacked1DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (packedTexShape[0] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.x * ${packedTexShape[1]}.0);\n }\n `;\n }\n if (packedTexShape[1] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.y * ${packedTexShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n return 2 * (resTexRC.x * ${packedTexShape[1]} + resTexRC.y);\n }\n `;\n}\nfunction getOutput1DCoords(shape, texShape) {\n if (texShape[0] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.x * ${texShape[1]}.0);\n }\n `;\n }\n if (texShape[1] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.y * ${texShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n return resTexRC.x * ${texShape[1]} + resTexRC.y;\n }\n `;\n}\nfunction getOutputPacked3DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[1] / 2);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec3(b, r, c);\n }\n `;\n}\nfunction getOutput3DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\nfunction getOutputPackedNDCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[shape.length - 1] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[shape.length - 2] / 2);\n let texelsInBatchN = texelsInBatch;\n let batches = ``;\n let coords = 'b, r, c';\n for (let b = 2; b < shape.length - 1; b++) {\n texelsInBatchN *= shape[shape.length - b - 1];\n batches = `\n int b${b} = index / ${texelsInBatchN};\n index -= b${b} * ${texelsInBatchN};\n ` + batches;\n coords = `b${b}, ` + coords;\n }\n return `\n ivec${shape.length} getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n ${batches}\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec${shape.length}(${coords});\n }\n `;\n}\nfunction getOutput4DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2'], shape);\n return `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec4(r, c, d, d2);\n }\n `;\n}\nfunction getOutput5DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3'], shape);\n return `\n ivec5 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx * vec2(${texShape[0]},\n ${texShape[1]}));\n\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec5 outShape = ivec5(r, c, d, d2, d3);\n return outShape;\n }\n `;\n}\nfunction getOutput6DCoords(shape, texShape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd', 'd2', 'd3', 'd4'], shape);\n return `\n ivec6 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec6 result = ivec6(r, c, d, d2, d3, d4);\n return result;\n }\n `;\n}\nfunction getOutputPacked2DCoords(shape, texShape) {\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return 2 * ivec2(resultUV.yx * vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n }\n `;\n }\n // texels needed to accommodate a logical row\n const texelsInLogicalRow = Math.ceil(shape[1] / 2);\n /**\n * getOutputCoords\n *\n * resTexRC: The rows and columns of the texels. If you move over one\n * texel to the right in the packed texture, you are moving over one column\n * (not two).\n *\n * index: The texel index\n */\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec2(r, c);\n }\n `;\n}\nfunction getOutput2DCoords(shape, texShape) {\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return ivec2(resultUV.yx * vec2(${texShape[0]}, ${texShape[1]}));\n }\n `;\n }\n if (shape[1] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(index, 0);\n }\n `;\n }\n if (shape[0] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(0, index);\n }\n `;\n }\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n int r = index / ${shape[1]};\n int c = index - r * ${shape[1]};\n return ivec2(r, c);\n }\n `;\n}\nfunction getFlatOffsetUniformName(texName) {\n return `offset${texName}`;\n}\nfunction getPackedSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}() {\n return ${glsl.texture2D}(${texName}, halfCR);\n }\n `;\n}\nfunction getSamplerScalar(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n return `float ${funcName}() {return ${texName};}`;\n }\n const [texNumR, texNumC] = inputInfo.shapeInfo.texShape;\n if (texNumR === 1 && texNumC === 1) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const [tNumR, tNumC] = inputInfo.shapeInfo.texShape;\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}() {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int index) {\n vec2 uv = packedUVfrom1D(\n ${packedTexShape[0]}, ${packedTexShape[1]}, index);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler1D(inputInfo) {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int index) {\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const tNumR = texShape[0];\n const tNumC = texShape[1];\n if (tNumC === 1 && tNumR === 1) {\n return `\n float ${funcName}(int index) {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n if (tNumC === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2(0.5, (float(index + ${offset}) + 0.5) / ${tNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (tNumR === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2((float(index + ${offset}) + 0.5) / ${tNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int index) {\n vec2 uv = uvFromFlat(${tNumR}, ${tNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const glsl = getGlslDifferences();\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n }\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const valuesPerRow = Math.ceil(shape[1] / 2);\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = packedUVfrom2D(${valuesPerRow}, ${packedTexShape[0]}, ${packedTexShape[1]}, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler2D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n return `\n float ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n int index = round(dot(vec2(row, col), vec2(${shape[1]}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const offset = getFlatOffsetUniformName(texName);\n if (texNumC === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2(0.5, (index + 0.5) / ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumR === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec3(row, col, ${offset}), vec3(${shape[1]}, 1, 1));\n vec2 uv = vec2((index + 0.5) / ${texNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int row, int col) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${shape[1]} + col + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n`;\n}\nfunction getPackedSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (shape[0] === 1) {\n const squeezedShape = shape.slice(1);\n const keptDims = [1, 2];\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['b', 'row', 'col'];\n return `\n ${getPackedSamplerFromInInfo(newInputInfo)}\n vec4 ${funcName}(int b, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[1] / 2);\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(int b, int row, int col) {\n vec2 uv = packedUVfrom3D(\n ${texNumR}, ${texNumC}, ${texelsInBatch}, ${valuesPerRow}, b, row, col);\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler3D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride0 = shape[1] * shape[2];\n const stride1 = shape[2];\n const { newShape, keptDims } = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col', 'depth'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n int index = round(dot(vec3(row, col, depth),\n vec3(${stride0}, ${stride1}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = float(row);\n float texC = dot(vec2(col, depth), vec2(${stride1}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride1 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = dot(vec2(row, col), vec2(${shape[1]}, 1));\n float texC = float(depth);\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getPackedSamplerND(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const rank = shape.length;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape = [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n const valuesPerRow = Math.ceil(shape[rank - 1] / 2);\n let texelsInBatch = valuesPerRow * Math.ceil(shape[rank - 2] / 2);\n let params = `int b, int row, int col`;\n let index = `b * ${texelsInBatch} + (row / 2) * ${valuesPerRow} + (col / 2)`;\n for (let b = 2; b < rank - 1; b++) {\n params = `int b${b}, ` + params;\n texelsInBatch *= shape[rank - b - 1];\n index = `b${b} * ${texelsInBatch} + ` + index;\n }\n const glsl = getGlslDifferences();\n return `\n vec4 ${funcName}(${params}) {\n int index = ${index};\n int texR = index / ${texNumC};\n int texC = index - texR * ${texNumC};\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}, ${texNumR});\n return ${glsl.texture2D}(${texName}, uv);\n }\n `;\n}\nfunction getSampler4D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride2 = shape[3];\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n int index = round(dot(vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = float(row);\n float texC =\n dot(vec3(col, depth, depth2),\n vec3(${stride1}, ${stride2}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride2 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = dot(vec3(row, col, depth),\n vec3(${shape[1] * shape[2]}, ${shape[2]}, 1));\n float texC = float(depth2);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} +\n depth * ${stride2} + depth2;\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index + ${offset});\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler5D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride3 = shape[4];\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float index = dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n depth3;\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride3 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float texR = dot(\n vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3]},\n ${shape[2] * shape[3]}, ${shape[3]}, 1));\n int texC = depth3;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getSampler6D(inputInfo) {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const { newShape, keptDims } = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3', 'depth4'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n const stride4 = shape[5];\n const stride3 = shape[4] * stride4;\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int index = round(dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n dot(\n vec2(depth3, depth4),\n vec2(${stride4}, 1)));\n ${getUniformSampler(inputInfo)}\n }\n `;\n }\n const flatOffset = inputInfo.shapeInfo.flatOffset;\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0 && flatOffset == null) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int texR = row;\n float texC = dot(vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, ${stride4})) +\n float(depth4);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride4 && flatOffset == null) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n float texR = dot(vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3] * shape[4]},\n ${shape[2] * shape[3] * shape[4]},\n ${shape[3] * shape[4]},\n ${shape[4]})) + float(depth3);\n int texC = depth4;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n const offset = getFlatOffsetUniformName(texName);\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * ${stride0} + col * ${stride1} + depth * ${stride2} +\n depth2 * ${stride3} + depth3 * ${stride4} + depth4 + ${offset};\n vec2 uv = uvFromFlat(${texNumR}, ${texNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\nfunction getUniformSampler(inputInfo) {\n const texName = inputInfo.name;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n if (inSize < 2) {\n return `return ${texName};`;\n }\n return `\n for (int i = 0; i < ${inSize}; i++) {\n if (i == index) {\n return ${texName}[i];\n }\n }\n `;\n}\nfunction getPackedSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const type = getCoordsDataType(outRank);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n let output = `return outputValue;`;\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n const isInputScalar = inSize === 1;\n const outSize = util.sizeFromShape(outShapeInfo.logicalShape);\n const isOutputScalar = outSize === 1;\n if (inRank === 1 && !isInputScalar && !isOutputScalar) {\n output = `\n return vec4(outputValue.xy, outputValue.xy);\n `;\n }\n else if (isInputScalar && !isOutputScalar) {\n if (outRank === 1) {\n output = `\n return vec4(outputValue.x, outputValue.x, 0., 0.);\n `;\n }\n else {\n output = `\n return vec4(outputValue.x);\n `;\n }\n }\n else if (broadcastDims.length) {\n const rows = inRank - 2;\n const cols = inRank - 1;\n if (broadcastDims.indexOf(rows) > -1 && broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.x);`;\n }\n else if (broadcastDims.indexOf(rows) > -1) {\n output = `return vec4(outputValue.x, outputValue.y, ` +\n `outputValue.x, outputValue.y);`;\n }\n else if (broadcastDims.indexOf(cols) > -1) {\n output = `return vec4(outputValue.xx, outputValue.zz);`;\n }\n }\n return `\n vec4 ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n vec4 outputValue = get${texFuncSnippet}(${unpackedCoordsSnippet});\n ${output}\n }\n `;\n}\nfunction getSamplerAtOutputCoords(inputInfo, outShapeInfo) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n const outTexShape = outShapeInfo.texShape;\n const inTexShape = inputInfo.shapeInfo.texShape;\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n if (!inputInfo.shapeInfo.isUniform && inRank === outRank &&\n inputInfo.shapeInfo.flatOffset == null &&\n util.arraysEqual(inTexShape, outTexShape)) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, resultUV);\n }\n `;\n }\n const type = getCoordsDataType(outRank);\n const broadcastDims = getBroadcastDims(inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const rankDiff = outRank - inRank;\n let coordsSnippet;\n const fields = ['x', 'y', 'z', 'w', 'u', 'v'];\n if (inRank === 0) {\n coordsSnippet = '';\n }\n else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n }\n else {\n coordsSnippet =\n broadcastDims.map(d => `coords.${fields[d + rankDiff]} = 0;`)\n .join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n }\n else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords.${fields[i + rankDiff]}`)\n .join(', ');\n }\n return `\n float ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n return get${texFuncSnippet}(${unpackedCoordsSnippet});\n }\n `;\n}\nexport function getCoordsDataType(rank) {\n if (rank <= 1) {\n return 'int';\n }\n else if (rank === 2) {\n return 'ivec2';\n }\n else if (rank === 3) {\n return 'ivec3';\n }\n else if (rank === 4) {\n return 'ivec4';\n }\n else if (rank === 5) {\n return 'ivec5';\n }\n else if (rank === 6) {\n return 'ivec6';\n }\n else {\n throw Error(`GPU for rank ${rank} is not yet supported`);\n }\n}\n/** Returns a new input info (a copy) that has a squeezed logical shape. */\nfunction squeezeInputInfo(inInfo, squeezedShape) {\n // Deep copy.\n const newInputInfo = JSON.parse(JSON.stringify(inInfo));\n newInputInfo.shapeInfo.logicalShape = squeezedShape;\n return newInputInfo;\n}\nfunction getSqueezedParams(params, keptDims) {\n return keptDims.map(d => params[d]).join(', ');\n}\n//# sourceMappingURL=shader_compiler.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ArgMinMaxPackedProgram {\n constructor(shape, windowSize, op, firstPass) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n util.assert(shape.length > 2, () => `Packed arg${op.charAt(0).toUpperCase() +\n op.slice(1)} supports only inputs with rank above 2.`);\n const inSize = shape[shape.length - 1];\n const outSize = Math.ceil(inSize / windowSize);\n this.outputShape = shape.slice(0, -1);\n if (outSize > 1) {\n this.outputShape.push(outSize);\n }\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n const outShape = this.outputShape;\n const rank = outShape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n let sourceLocSetup;\n let sourceRank;\n if (outSize === 1) {\n sourceRank = rank + 1;\n const sourceLocDType = getCoordsDataType(sourceRank);\n sourceLocSetup = `\n ${sourceLocDType} sourceLocR = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 1]};\n ${sourceLocDType} sourceLocG = ${sourceLocDType}(${coords.join()}, 0);\n ++${coords[rank - 2]};\n ${sourceLocDType} sourceLocA = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 1]};\n ${sourceLocDType} sourceLocB = ${sourceLocDType}(${coords.join()}, 0);\n --${coords[rank - 2]};`;\n }\n else {\n sourceRank = rank;\n sourceLocSetup = `\n ${dtype} sourceLocR = coords;\n ++${coords[rank - 1]};\n ${dtype} sourceLocG = coords;\n ++${coords[rank - 2]};\n ${dtype} sourceLocA = coords;\n --${coords[rank - 1]};\n ${dtype} sourceLocB = coords;\n --${coords[rank - 2]};`;\n }\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, sourceRank);\n const inChannel = '.' + channels[sourceRank - 1]; // e.g. \".b\" for rank 3.\n const intChannels = channels.map(x => 'int ' + x);\n const srcRCoords = getChannels('sourceLocR', sourceRank - 1).concat('inIdx.r');\n const srcGCoords = getChannels('sourceLocG', sourceRank - 1).concat('inIdx.g');\n const srcBCoords = getChannels('sourceLocB', sourceRank - 1).concat('inIdx.b');\n const srcACoords = getChannels('sourceLocA', sourceRank - 1).concat('inIdx.a');\n const compOp = (op === 'max') ? 'greaterThan' : 'lessThan';\n const fetchCandidateIdx = firstPass ? '' : `\n inIdx = round(vec4(getBestIndicesAChannel(${srcRCoords.join()}),\n getBestIndicesAChannel(${srcGCoords.join()}),\n getBestIndicesAChannel(${srcBCoords.join()}),\n getBestIndicesAChannel(${srcACoords.join()})));`;\n const fetchValue = `vec4(\n getAChannel(${srcRCoords.join()}),\n hasNextCol ? getAChannel(${srcGCoords.join()}) : 0.,\n hasNextRow ? getAChannel(${srcBCoords.join()}) : 0.,\n hasNextRow && hasNextCol ? getAChannel(${srcACoords.join()}) : 0.)`;\n const getBestIndicesAChannelSnippet = firstPass ? '' : `\n float getBestIndicesAChannel(${intChannels.join()}) {\n return getChannel(getBestIndicesA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }`;\n this.userCode = `\n float getAChannel(${intChannels.join()}) {\n return getChannel(getA(${channels.join()}),\n vec2(${channels.slice(-2).join()}));\n }\n ${getBestIndicesAChannelSnippet}\n void main() {\n ${dtype} coords = getOutputCoords();\n bool hasNextCol = ${coords[rank - 1]} < ${outShape[rank - 1] - 1};\n bool hasNextRow = ${coords[rank - 2]} < ${outShape[rank - 2] - 1};\n ${sourceLocSetup}\n ivec4 srcIdx = ivec4(sourceLocR${inChannel}, sourceLocG${inChannel},\n sourceLocB${inChannel}, sourceLocA${inChannel}) * ${windowSize};\n ivec4 inIdx = srcIdx;\n vec4 bestIndex = vec4(inIdx);\n vec4 bestValue = ${fetchValue};\n\n for (int i = 0; i < ${windowSize}; i++) {\n inIdx = srcIdx;\n ${fetchCandidateIdx}\n vec4 candidate = ${fetchValue};\n bvec4 nan = isnan(candidate);\n bvec4 replace = bvec4(\n vec4(${compOp}(candidate, bestValue)) * (vec4(1.0) - vec4(nan)));\n\n bestValue = vec4(replace.x ? candidate.x : bestValue.x,\n replace.y ? candidate.y : bestValue.y,\n replace.z ? candidate.z : bestValue.z,\n replace.w ? candidate.w : bestValue.w);\n bestIndex = mix(bestIndex, vec4(inIdx), vec4(replace));\n srcIdx++;\n }\n setOutput(bestIndex);\n }\n `;\n }\n}\n//# sourceMappingURL=argminmax_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class AvgPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC+= ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class AvgPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const avgMultiplier = 1 / (filterDepth * filterHeight * filterWidth);\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, d) with pos mask(:, :, :, ch) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=avg_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nconst CHECK_NAN_SNIPPET = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\n// We use native integer division to deal with floating point imprecision. Since\n// we implement floor division and glsl implements truncated division, we\n// correct for this by subtracting 1 from result when the result is negative and\n// there is a remainder.\nexport const INT_DIV = `\n float s = sign(a) * sign(b);\n int ia = round(a);\n int ib = round(b);\n if (ib != 0) {\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n return float(idiv(ia, ib, s));\n } else {\n return NAN;\n }\n`;\nexport const POW = `\nif(a < 0.0 && floor(b) < b){\n return NAN;\n}\nif (b == 0.0) {\n return 1.0;\n}\nreturn (round(mod(b, 2.0)) != 1) ?\n pow(abs(a), b) : sign(a) * pow(abs(a), b);\n`;\nexport const SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const EQUAL = `return float(a == b);`;\nexport const LESS = `return float(a < b);`;\nexport const LESS_EQUAL = `return float(a <= b);`;\nexport const GREATER = `return float(a > b);`;\nexport const GREATER_EQUAL = `return float(a >= b);`;\nexport const LOGICAL_AND = `return float(a >= 1.0 && b >= 1.0);`;\nexport const LOGICAL_OR = `return float(a >= 1.0 || b >= 1.0);`;\nexport const MAX = CHECK_NAN_SNIPPET + `\n return max(a, b);\n`;\nexport const MIN = CHECK_NAN_SNIPPET + `\n return min(a, b);\n`;\nexport const MOD = `if (b == 0.0) return NAN;\n return mod(a, b);`;\nexport const ELU_DER = `return (b >= 1.0) ? a : a * (b + 1.0);`;\nexport const PRELU = `return (a < 0.) ? b * a : a;`;\nexport class BinaryOpProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['A', 'B'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOperation(float a, float b) {\n ${op}\n }\n\n void main() {\n float a = getAAtOutCoords();\n float b = getBAtOutCoords();\n setOutput(binaryOperation(a, b));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nconst CHECK_NAN_SNIPPET = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\nexport const INT_DIV = `\n ivec4 ia = round(a);\n ivec4 ib = round(b);\n bvec4 cond = notEqual(ib, ivec4(0));\n ivec4 result = ivec4(0);\n vec4 s = sign(a) * sign(b);\n\n // Windows (D3D) wants guaranteed non-zero int division at compile-time.\n if (cond[0]) {\n result[0] = idiv(ia[0], ib[0], s[0]);\n }\n if (cond[1]) {\n result[1] = idiv(ia[1], ib[1], s[1]);\n }\n if (cond[2]) {\n result[2] = idiv(ia[2], ib[2], s[2]);\n }\n if (cond[3]) {\n result[3] = idiv(ia[3], ib[3], s[3]);\n }\n return vec4(result);\n`;\nexport const POW = `\n // isModRound1 has 1 for components with round(mod(b, 2.0)) == 1, 0 otherwise.\n vec4 isModRound1 = vec4(equal(round(mod(b, 2.0)), ivec4(1)));\n vec4 multiplier = sign(a) * isModRound1 + (vec4(1.0) - isModRound1);\n vec4 result = multiplier * pow(abs(a), b);\n\n // Ensure that a^0 = 1, including 0^0 = 1 as this correspond to TF and JS\n bvec4 isExpZero = equal(b, vec4(0.0));\n result.r = isExpZero.r ? 1.0 : result.r;\n result.g = isExpZero.g ? 1.0 : result.g;\n result.b = isExpZero.b ? 1.0 : result.b;\n result.a = isExpZero.a ? 1.0 : result.a;\n\n vec4 isNaN = vec4(lessThan(a, vec4(0.0))) * vec4(lessThan(floor(b), b));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const PRELU = `\n vec4 aLessThanZero = vec4(lessThan(a, vec4(0.)));\n return (aLessThanZero * (b * a)) + ((vec4(1.0) - aLessThanZero) * a);\n`;\nexport const ELU_DER = `\n vec4 bGTEZero = vec4(greaterThanEqual(b, vec4(0.)));\n return (bGTEZero * a) + ((vec4(1.0) - bGTEZero) * (a * (b + vec4(1.0))));\n`;\nexport const EQUAL = `\n return vec4(equal(a, b));\n`;\nexport const NOT_EQUAL = `\n return vec4(notEqual(a, b));\n`;\nexport const LESS = `\n return vec4(lessThan(a, b));\n`;\nexport const LESS_EQUAL = `\n return vec4(lessThanEqual(a, b));\n`;\nexport const GREATER = `\n return vec4(greaterThan(a, b));\n`;\nexport const GREATER_EQUAL = `\n return vec4(greaterThanEqual(a, b));\n`;\nexport const LOGICAL_AND = `\n return vec4(\n vec4(greaterThanEqual(a, vec4(1.0))) *\n vec4(greaterThanEqual(b, vec4(1.0))));\n`;\nexport const LOGICAL_OR = `\n return min(\n vec4(greaterThanEqual(a, vec4(1.0))) +\n vec4(greaterThanEqual(b, vec4(1.0))),\n vec4(1.0));\n`;\nexport const MAX = `\n vec4 result = vec4(max(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MIN = `\n vec4 result = vec4(min(a, b));\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport const MOD = `\n vec4 result = mod(a, b);\n vec4 isNaN = vec4(equal(b, vec4(0.0)));\n ` +\n CHECK_NAN_SNIPPET + `\n return result;\n`;\nexport class BinaryOpPackedProgram {\n constructor(op, aShape, bShape, checkOutOfBounds = false) {\n this.variableNames = ['A', 'B'];\n this.supportsBroadcasting = true;\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n const rank = this.outputShape.length;\n let checkOutOfBoundsString = '';\n if (checkOutOfBounds) {\n if (rank === 0 || util.sizeFromShape(this.outputShape) === 1) {\n checkOutOfBoundsString = `\n result.y = 0.;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const dtype = getCoordsDataType(rank);\n checkOutOfBoundsString = `\n ${dtype} coords = getOutputCoords();\n `;\n if (rank === 1) {\n checkOutOfBoundsString += `\n result.y = (coords + 1) >= ${this.outputShape[0]} ? 0. : result.y;\n result.z = 0.;\n result.w = 0.;\n `;\n }\n else {\n const channels = getChannels('coords', rank);\n checkOutOfBoundsString += `\n bool nextRowOutOfBounds =\n (${channels[rank - 2]} + 1) >= ${this.outputShape[rank - 2]};\n bool nextColOutOfBounds =\n (${channels[rank - 1]} + 1) >= ${this.outputShape[rank - 1]};\n result.y = nextColOutOfBounds ? 0. : result.y;\n result.z = nextRowOutOfBounds ? 0. : result.z;\n result.w = nextColOutOfBounds || nextRowOutOfBounds ? 0. : result.w;\n `;\n }\n }\n }\n this.userCode = `\n vec4 binaryOperation(vec4 a, vec4 b) {\n ${op}\n }\n\n void main() {\n vec4 a = getAAtOutCoords();\n vec4 b = getBAtOutCoords();\n\n vec4 result = binaryOperation(a, b);\n ${checkOutOfBoundsString}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n float value = getAAtOutCoords();\n if (isnan(value)) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, minVal, maxVal));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ClipPackedProgram {\n constructor(aShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n uniform float minVal;\n uniform float maxVal;\n\n void main() {\n vec4 value = getAAtOutCoords();\n\n if (any(isnan(value))) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, vec4(minVal), vec4(maxVal)));\n }\n `;\n }\n getCustomSetupFunc(min, max) {\n return (gpgpu, webGLProgram) => {\n if (this.minLoc == null) {\n this.minLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'minVal');\n this.maxLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'maxVal');\n }\n gpgpu.gl.uniform1f(this.minLoc, min);\n gpgpu.gl.uniform1f(this.maxLoc, max);\n };\n }\n}\n//# sourceMappingURL=clip_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ComplexAbsProgram {\n constructor(shape) {\n this.variableNames = ['real', 'imag'];\n this.outputShape = shape;\n this.userCode = `\n void main() {\n float re = abs(getRealAtOutCoords());\n float im = abs(getImagAtOutCoords());\n float mx = max(re, im);\n\n // sadly the length function in glsl is not underflow-safe\n // (at least not on Intel GPUs). So the safe solution is\n // to ensure underflow-safety in all cases.\n setOutput(\n mx == 0.0 ? 0.0 : mx * length(vec2(1, min(re, im)/mx))\n );\n }\n `;\n }\n}\n//# sourceMappingURL=complex_abs_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int d2 = coords.w;\n\n // Convolve x(?, ?, d1) with dy(:, :, d2) to get dw(wR, wC, d1, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n if (${isChannelsLast}) {\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n } else {\n float dyValue = getDy(b, d2, yR, yC);\n float xValue = getX(b, d1, xR, xC);\n dotProd += (xValue * dyValue);\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[${channelDim}];\n\n ivec2 dyCorner = ivec2(coords[${rowDim}], coords[${colDim}]) - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n\n if (${isChannelsLast}) {\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n } else {\n float xValue = getDy(batch, d2, idyR, idyC);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.userCode = `\n void main() {\n ivec5 coords = getOutputCoords();\n int wF = coords.x;\n int wR = coords.y;\n int wC = coords.z;\n int d1 = coords.w;\n int d2 = coords.u;\n\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yF = 0; yF < ${convInfo.outDepth}; yF++) {\n int xF = wF + yF * ${strideDepth} - ${padFront};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yF, yR, yC, d2);\n float xValue = getX(b, xF, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class Conv3DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padFront = filterDepth - 1 - convInfo.padInfo.front;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d1 = coords.u;\n\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyFCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n float dyF = float(dyFCorner + wF) / ${strideDepth}.0;\n\n if (dyF < 0.0 || dyF >= ${convInfo.outDepth}.0 || fract(dyF) > 0.0) {\n continue;\n }\n int idyF = int(dyF);\n\n int wFPerm = ${filterDepth} - 1 - wF;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n float xValue = getDy(batch, idyF, idyR, idyC, d2);\n float wValue = getW(wFPerm, wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DDerFilterProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'dy'];\n this.outputShape = convInfo.filterShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int dm = coords.w;\n int d2 = d1 * ${channelMul} + dm;\n\n float dotProd = 0.0;\n\n // TO DO: Vec4 over the batch size\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class DepthwiseConv2DDerInputProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'W'];\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[3];\n ivec2 dyCorner = coords.yz - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n float dotProd = 0.0;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n // TO DO: Vec4 over the channelMul\n for (int dm = 0; dm < ${channelMul}; dm++) {\n int d2 = d1 * ${channelMul} + dm;\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, dm);\n dotProd += xValue * wValue;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_backprop_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Conv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivationWeights = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivationWeights) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivationWeights) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d2 = coords[${channelDim}];\n\n ivec2 xRCCorner =\n ivec2(coords[${rowDim}], coords[${colDim}]) * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, d2) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 wValues = vec4(\n getW(wR, wC, d1, d2),\n getW(wR, wC, d1 + 1, d2),\n getW(wR, wC, d1 + 2, d2),\n getW(wR, wC, d1 + 3, d2)\n );\n\n if (${isChannelsLast}) {\n vec4 xValues = vec4(\n getX(batch, xR, xC, d1),\n getX(batch, xR, xC, d1 + 1),\n getX(batch, xR, xC, d1 + 2),\n getX(batch, xR, xC, d1 + 3)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec4 xValues = vec4(\n getX(batch, d1, xR, xC),\n getX(batch, d1 + 1, xR, xC),\n getX(batch, d1 + 2, xR, xC),\n getX(batch, d1 + 3, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n\n if (${isChannelsLast}) {\n dotProd +=\n getX(batch, xR, xC, ${inputDepthNearestVec4}) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n } else {\n dotProd +=\n getX(batch, ${inputDepthNearestVec4}, xR, xC) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n }\n\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 wValues = vec2(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n\n if (${isChannelsLast}) {\n vec2 xValues = vec2(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec2 xValues = vec2(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 wValues = vec3(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n\n if (${isChannelsLast}) {\n vec3 xValues = vec3(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n dotProd += dot(xValues, wValues);\n } else {\n vec3 xValues = vec3(\n getX(batch, ${inputDepthNearestVec4}, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 1, xR, xC),\n getX(batch, ${inputDepthNearestVec4} + 2, xR, xC)\n );\n dotProd += dot(xValues, wValues);\n }\n\n }\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\nexport class Conv3DProgram {\n constructor(convInfo) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterDepth = convInfo.filterDepth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n this.userCode = `\n const ivec3 strides = ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int d2 = coords.u;\n\n ivec3 xFRCCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xFCorner = xFRCCorner.x;\n int xRCorner = xFRCCorner.y;\n int xCCorner = xFRCCorner.z;\n\n // Convolve x(?, ?, ?, d1) with w(:, :, :, d1, d2) to get\n // y(yF, yR, yC, d2). ? = to be determined. : = across all\n // values in that axis.\n float dotProd = 0.0;\n for (int wF = 0; wF < ${filterDepth}; wF++) {\n int xF = xFCorner + wF * ${dilationDepth};\n\n if (xF < 0 || xF >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 xValues = vec4(\n getX(batch, xF, xR, xC, d1),\n getX(batch, xF, xR, xC, d1 + 1),\n getX(batch, xF, xR, xC, d1 + 2),\n getX(batch, xF, xR, xC, d1 + 3)\n );\n vec4 wValues = vec4(\n getW(wF, wR, wC, d1, d2),\n getW(wF, wR, wC, d1 + 1, d2),\n getW(wF, wR, wC, d1 + 2, d2),\n getW(wF, wR, wC, d1 + 3, d2)\n );\n\n dotProd += dot(xValues, wValues);\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n dotProd +=\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}) *\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2);\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 xValues = vec2(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n vec2 wValues = vec2(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n dotProd += dot(xValues, wValues);\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 xValues = vec3(\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xF, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n vec3 wValues = vec3(\n getW(wF, wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wF, wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthwiseConv2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `float activation(float a) {\n float b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `\n float activation(float x) {\n ${activation}\n }\n `;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2 / ${channelMul};\n int q = d2 - d1 * ${channelMul};\n\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, q) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n // TO DO(dsmilkov): Flatten the two for loops and vec4 the operations.\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${xNumRows}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${xNumCols}) {\n continue;\n }\n\n float xVal = getX(batch, xR, xC, d1);\n float wVal = getW(wR, wC, d1, q);\n dotProd += xVal * wVal;\n }\n }\n\n float result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class DepthwiseConvPacked2DProgram {\n constructor(convInfo, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['x', 'W'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = convInfo.outShape;\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const texelsAcross = filterWidth;\n let mainLoop = `int xR; int xC; int xCOffset;`;\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `\n vec4 xTexelR${r}C${c * 2} = vec4(0.);\n vec4 wR${r}C${c} = vec4(0.);\n vec4 xR${r}C${c} = vec4(0.);`;\n }\n }\n /**\n * This vectorized implementation works by gathering the values needed for\n * each output channel's dot product into vec4's and then multiplying them\n * all together (this happens in the final double for-loop below). Most of\n * the main loop consists of constructing these vec4's with the minimum\n * number of texture2D calls, which means making use of all four returned\n * values from a texture2D call at once.\n */\n for (let r = 0; r < filterHeight; r++) {\n for (let texelC = 0; texelC < texelsAcross; texelC++) {\n const c = texelC * 2;\n mainLoop += `\n xR = xRCorner + ${r * dilationHeight};\n xC = xCCorner + ${c * dilationWidth};\n `;\n if (strideWidth === 1) {\n if (c < filterWidth) {\n // If padding is odd, the outer texels have to be composed.\n if (padLeft % 2 === 1) {\n // TODO: Ensure vec4 previous does not result in redundant sample,\n // and avoid setting xTexelRC's that exceed the boundary in the\n // first place rather than resetting them to vec4(0)).\n // To compute xCOffset:\n // - If padding is odd, we must add 1 to ensure we ask for an\n // even-numbered row.\n // - We subtract 2 to access the previous texel.\n mainLoop += `\n xCOffset = xC + 1;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n xTexelR${r}C${c}.zw = vec2(0.);\n }\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + 1 - 2;\n if(xR >= 0 && xR < ${xNumRows} && xCOffset >= 0 && xCOffset < ${xNumCols}) {\n vec4 previous = getX(batch, xR, xCOffset, d1);\n\n // Need to manually clear unused channels in case\n // we're reading from recycled texture.\n if(xCOffset + 1 >= ${xNumCols}) {\n previous.zw = vec2(0.);\n }\n\n xR${r}C${c} = vec4(previous.zw, xTexelR${r}C${c}.xy);\n } else {\n xR${r}C${c} = vec4(0, 0, xTexelR${r}C${c}.xy);\n }\n `;\n }\n else {\n // Padding is even, so xRC corresponds to a single texel.\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows} && xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xR${r}C${c} = xTexelR${r}C${c};\n `;\n }\n if (c + 1 < filterWidth) {\n // If dilation is even, the second entry should match the first\n // (either both are composed or both are single samples). But if\n // dilation is odd, then the second entry should be the opposite\n // of the first (if the first is composed, the second is a single\n // sample, and vice versa.)\n const nextTexelOffset = padLeft % 2 === 0 ?\n util.nearestLargerEven(dilationWidth) :\n dilationWidth;\n if ((dilationWidth % 2 === 0 && padLeft % 2 === 1) ||\n (dilationWidth % 2 !== 0 && padLeft % 2 !== 1)) {\n mainLoop += `\n xCOffset = xC + ${padLeft % 2} + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n `;\n // If dilation > 1 then the xRC's will not be able to share any\n // values, so each xRC will require two unique calls to getX.\n if (dilationWidth > 1) {\n mainLoop += `\n xCOffset -= 2;\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n `;\n }\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.xy);\n `;\n }\n else {\n mainLoop += `\n xCOffset = xC + ${nextTexelOffset};\n\n if(xR >= 0 && xR < ${xNumRows} &&\n xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n }\n\n xR${r}C${c + 1} = xTexelR${r}C${c + 2};\n `;\n }\n }\n }\n }\n else { // stride > 1\n if (c < filterWidth) {\n mainLoop += `\n if(xR >= 0 && xR < ${xNumRows}) {\n `;\n // Depending on whether padLeft is even or odd, we want either the\n // xy or zw channels from X texels for xR${r}C${c}. If padLeft is\n // even, xR${r}C${c + 1} is simply the zw channels of texels we've\n // already sampled. But if padLeft is odd, xR${r}C{$c + 1}.zw will\n // need to come from the xy channels of a new texel, hence the `vec4\n // final` initialized below.\n if (padLeft % 2 === 1) {\n mainLoop += `\n xCOffset = xC + 1 - ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n if(xC + 1 >= 0 && xC + 1 < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xC + 1, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 final = vec4(0.);\n xCOffset = xC + 1 + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n final = getX(batch, xR, xCOffset, d1);\n }\n xR${r}C${c + 1} = vec4(xTexelR${r}C${c + 2}.xy, final.xy);\n `;\n }\n }\n else {\n mainLoop += `\n if(xC >= 0 && xC < ${xNumCols}) {\n xTexelR${r}C${c} = getX(batch, xR, xC, d1);\n } else {\n xTexelR${r}C${c} = vec4(0.);\n }\n\n xCOffset = xC + ${strideWidth};\n if(xCOffset >= 0 && xCOffset < ${xNumCols}) {\n xTexelR${r}C${c + 2} = getX(batch, xR, xCOffset, d1);\n } else {\n xTexelR${r}C${c + 2} = vec4(0.);\n }\n\n xR${r}C${c} = vec4(\n xTexelR${r}C${c}.xy, xTexelR${r}C${c + 2}.xy);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n xR${r}C${c + 1} = vec4(\n xTexelR${r}C${c}.zw, xTexelR${r}C${c + 2}.zw);\n `;\n }\n }\n mainLoop += `}`;\n }\n }\n if (c < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c} = getW(${r}, ${c}, d1, q);\n wR${r}C${c} = vec4(wTexelR${r}C${c}.xz, wTexelR${r}C${c}.xz);\n `;\n if (c + 1 < filterWidth) {\n mainLoop += `\n vec4 wTexelR${r}C${c + 1} = getW(${r}, ${c + 1}, d1, q);\n wR${r}C${c + 1} =\n vec4(wTexelR${r}C${c + 1}.xz, wTexelR${r}C${c + 1}.xz);`;\n }\n }\n }\n }\n for (let r = 0; r < filterHeight; r++) {\n for (let c = 0; c < filterWidth; c++) {\n mainLoop += `dotProd += xR${r}C${c} * wR${r}C${c};`;\n }\n }\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n this.userCode = `\n ${activationSnippet}\n\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2;\n int q = 0;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n vec4 dotProd = vec4(0.);\n\n ${mainLoop}\n\n vec4 result = dotProd;\n ${addBiasSnippet}\n ${applyActivationSnippet}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=conv_packed_gpu_depthwise.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class CropAndResizeProgram {\n constructor(imageShape, boxShape, cropSize, method, extrapolationValue) {\n this.variableNames = ['Image', 'Boxes', 'BoxInd'];\n this.outputShape = [];\n const [batch, imageHeight, imageWidth, depth] = imageShape;\n const [numBoxes,] = boxShape;\n const [cropHeight, cropWidth] = cropSize;\n this.outputShape = [numBoxes, cropHeight, cropWidth, depth];\n const methodId = method === 'bilinear' ? 1 : 0;\n const [inputHeightFloat, inputWidthFloat] = [`${imageHeight - 1}.0`, `${imageWidth - 1}.0`];\n const [heightRatio, heightScale, inY] = cropHeight > 1 ?\n [\n `${(imageHeight - 1) / (cropHeight - 1)}`,\n '(y2-y1) * height_ratio',\n `y1*${inputHeightFloat} + float(y)*(height_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (y1+y2) * ${inputHeightFloat}`,\n ];\n const [widthRatio, widthScale, inX] = cropWidth > 1 ?\n [\n `${(imageWidth - 1) / (cropWidth - 1)}`,\n '(x2-x1) * width_ratio',\n `x1*${inputWidthFloat} + float(x)*(width_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (x1+x2) * ${inputWidthFloat}`,\n ];\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op_gpu.cu.cc\n this.userCode = `\n const float height_ratio = float(${heightRatio});\n const float width_ratio = float(${widthRatio});\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int y = coords[1];\n int x = coords[2];\n int d = coords[3];\n\n // get box vals\n float y1 = getBoxes(b,0);\n float x1 = getBoxes(b,1);\n float y2 = getBoxes(b,2);\n float x2 = getBoxes(b,3);\n\n // get image in batch index\n int bInd = round(getBoxInd(b));\n if(bInd < 0 || bInd >= ${batch}) {\n return;\n }\n\n float height_scale = ${heightScale};\n float width_scale = ${widthScale};\n\n float in_y = ${inY};\n if( in_y < 0.0 || in_y > ${inputHeightFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n float in_x = ${inX};\n if( in_x < 0.0 || in_x > ${inputWidthFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n\n vec2 sourceFracIndexCR = vec2(in_x,in_y);\n if(${methodId} == 1) {\n // Compute the four integer indices.\n ivec2 sourceFloorCR = ivec2(sourceFracIndexCR);\n ivec2 sourceCeilCR = ivec2(ceil(sourceFracIndexCR));\n\n float topLeft = getImage(b, sourceFloorCR.y, sourceFloorCR.x, d);\n float bottomLeft = getImage(b, sourceCeilCR.y, sourceFloorCR.x, d);\n float topRight = getImage(b, sourceFloorCR.y, sourceCeilCR.x, d);\n float bottomRight = getImage(b, sourceCeilCR.y, sourceCeilCR.x, d);\n\n vec2 fracCR = sourceFracIndexCR - vec2(sourceFloorCR);\n\n float top = topLeft + (topRight - topLeft) * fracCR.x;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracCR.x;\n float newValue = top + (bottom - top) * fracCR.y;\n setOutput(newValue);\n } else {\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestCR = ivec2(floor(\n sourceFracIndexCR + vec2(0.5,0.5)));\n float newValue = getImage(b, sourceNearestCR.y, sourceNearestCR.x, d);\n setOutput(newValue);\n }\n }\n `;\n }\n}\n//# sourceMappingURL=crop_and_resize_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class CumSumProgram {\n constructor(shape, exclusive, reverse) {\n this.variableNames = ['x'];\n this.outputShape = shape;\n const rank = shape.length;\n const val = exclusive ? '0.0' : `getX(${getCoords(rank, 'coords')})`;\n const length = shape[shape.length - 1];\n let condition = '';\n let idxString = '';\n // When exclusive is set, the cumsum op becomes roll op that copies the\n // value from the previous index based on the direction specified by the\n // reverse flag.\n if (exclusive) {\n condition = reverse ? `end != ${length - 1}` : 'end != 0';\n idxString = reverse ? 'end + 1' : 'end - 1';\n }\n else {\n condition = reverse ? `end + pow2 < ${length}` : 'end >= pow2';\n idxString = (reverse ? 'end + pow2' : 'end - pow2');\n }\n this.userCode = `\n uniform float index;\n void main() {\n ${getCoordsDataType(rank)} coords = getOutputCoords();\n int end = ${getFinalCoord(rank, 'coords')};\n float val = ${val};\n int pow2 = int(pow(2.0, index));\n if (${condition}) {\n int idx = ${idxString};\n ${getFinalCoord(rank, 'coords')} = idx;\n val += getX(${getCoords(rank, 'coords')});\n }\n setOutput(val);\n }\n `;\n }\n getCustomSetupFunc(index) {\n return (gpgpu, webGLProgram) => {\n if (this.index == null) {\n this.index = gpgpu.getUniformLocation(webGLProgram, 'index');\n }\n gpgpu.gl.uniform1f(this.index, index);\n };\n }\n}\nfunction getCoords(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.x, ${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.x, ${name}.y, ${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.x, ${name}.y, ${name}.z, ${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\nfunction getFinalCoord(rank, name) {\n if (rank === 1) {\n return `${name}`;\n }\n else if (rank === 2) {\n return `${name}.y`;\n }\n else if (rank === 3) {\n return `${name}.z`;\n }\n else if (rank === 4) {\n return `${name}.w`;\n }\n else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=cumsum_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getA(rc.x, rc.y, rc.z);\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nimport { getDenseTexShape, PackingScheme } from './tex_util';\nexport class DecodeMatrixPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outPackingScheme = PackingScheme.DENSE;\n const texShape = getDenseTexShape(outputShape);\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ivec3 outCoordsFromFlatIndex(int index) {\n ${shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], outputShape)}\n return ivec3(r, c, d);\n }\n\n void main() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = 4 * (resTexRC.x * ${texShape[1]} + resTexRC.y);\n\n vec4 result = vec4(0.);\n\n for (int i=0; i<4; i++) {\n int flatIndex = index + i;\n ivec3 rc = outCoordsFromFlatIndex(flatIndex);\n result[i] = getChannel(getA(rc.x, rc.y, rc.z), vec2(rc.y, rc.z));\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=decode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DepthToSpaceProgram {\n constructor(outputShape, blockSize, dataFormat) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.outputShape = outputShape;\n this.blockSize = blockSize;\n this.dataFormat = dataFormat;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int h = ${this.getHeightCoordString()};\n int w = ${this.getWidthCoordString()};\n int d = ${this.getDepthCoordString()};\n\n int in_h = h / ${blockSize};\n int offset_h = imod(h, ${blockSize});\n int in_w = w / ${blockSize};\n int offset_w = imod(w, ${blockSize});\n int offset_d = (offset_h * ${blockSize} + offset_w) *\n ${this.getOutputDepthSize()};\n int in_d = d + offset_d;\n\n float result = ${this.getInputSamplingString()};\n setOutput(result);\n }\n `;\n }\n getHeightCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[1]`;\n }\n else {\n return `coords[2]`;\n }\n }\n getWidthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[2]`;\n }\n else {\n return `coords[3]`;\n }\n }\n getDepthCoordString() {\n if (this.dataFormat === 'NHWC') {\n return `coords[3]`;\n }\n else {\n return `coords[1]`;\n }\n }\n getOutputDepthSize() {\n if (this.dataFormat === 'NHWC') {\n return this.outputShape[3];\n }\n else {\n return this.outputShape[1];\n }\n }\n getInputSamplingString() {\n if (this.dataFormat === 'NHWC') {\n return `getX(b, in_h, in_w, in_d)`;\n }\n else {\n return `getX(b, in_d, in_h, in_w)`;\n }\n }\n}\n//# sourceMappingURL=depth_to_space_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class DiagProgram {\n constructor(size) {\n this.variableNames = ['X'];\n this.outputShape = [size, size];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n float val = coords[0] == coords[1] ? getX(coords[0]) : 0.0;\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=diag_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n float x = getAAtOutCoords();\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util';\nimport { TextureUsage } from './tex_util';\nexport class EncodeFloatPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outTexUsage = TextureUsage.DOWNLOAD;\n const glsl = getGlslDifferences();\n this.outputShape = outputShape;\n this.userCode = `\n ${ENCODE_FLOAT_SNIPPET}\n\n void main() {\n ivec3 coords = getOutputCoords();\n float x = getChannel(getAAtOutCoords(), vec2(coords.y, coords.z));\n ${glsl.output} = encode_float(x);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_float_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\nexport class EncodeMatrixProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let output = `result`;\n if (inputIsUnsignedByte) {\n output = `floor(result * 255. + 0.5)`;\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n int flatIndex = getFlatIndex(coords);\n int offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n int r = flatIndex / ${width};\n int c = imod(flatIndex, ${width});\n vec2 uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n\n float result;\n\n if(offset == 0) {\n result = values[0];\n } else if(offset == 1) {\n result = values[1];\n } else if(offset == 2) {\n result = values[2];\n } else {\n result = values[3];\n }\n\n ${glsl.output} = vec4(${output}, 0., 0., 0.);\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as shader_util from './shader_compiler_util';\n/*\nThis is how the shader encodes a tensor with shape = [2, 3, 5]\n(indices are [batch, row, col]).\n\n000|001 002|003 004|xxx 020|021 022|023 024|xxx\n------- ------- ------- ------- ------- -------\n010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n\n100|101 102|103 104|xxx 120|121 122|123 124|xxx\n------- ------- ------- ------- ------- -------\n110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n\nSingle texels contain only values from the same batch, and from adjacent rows\nand columns.\n */\nexport class EncodeMatrixPackedProgram {\n constructor(outputShape, texShape, inputIsUnsignedByte = false) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width] = texShape;\n this.outputShape = outputShape;\n let mainLoop = '';\n let output = 'result';\n if (inputIsUnsignedByte) {\n output = 'floor(result * 255. + 0.5)';\n }\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n const channel = row * 2 + col;\n mainLoop += `\n localCoords = coords;\n if(localCoords[2] + ${col} < ${outputShape[2]}) {\n localCoords[2] += ${col};\n if(localCoords[1] + ${row} < ${outputShape[1]}) {\n localCoords[1] += ${row};\n\n flatIndex = getFlatIndex(localCoords);\n offset = imod(flatIndex, 4);\n\n flatIndex = idiv(flatIndex, 4, 1.);\n\n r = flatIndex / ${width};\n c = imod(flatIndex, ${width});\n uv = (vec2(c, r) + halfCR) / vec2(${width}.0, ${height}.0);\n values = ${glsl.texture2D}(A, uv);\n\n if(offset == 0) {\n result[${channel}] = values[0];\n } else if(offset == 1) {\n result[${channel}] = values[1];\n } else if(offset == 2) {\n result[${channel}] = values[2];\n } else {\n result[${channel}] = values[3];\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 coords = getOutputCoords();\n\n vec4 result = vec4(0.);\n int flatIndex, r, c, offset;\n ivec3 localCoords;\n vec2 uv;\n vec4 values;\n\n ${mainLoop}\n\n ${glsl.output} = ${output};\n }\n `;\n }\n}\n//# sourceMappingURL=encode_matrix_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FillProgram {\n constructor(shape, value) {\n this.outputShape = [];\n this.variableNames = ['x'];\n this.outputShape = shape;\n this.userCode = `\n uniform float value;\n void main() {\n // Input can be obtained from uniform value.\n setOutput(value);\n }\n `;\n }\n getCustomSetupFunc(value) {\n return (gpgpu, webGLProgram) => {\n if (this.valueLoc == null) {\n this.valueLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'value');\n }\n gpgpu.gl.uniform1f(this.valueLoc, value);\n };\n }\n}\n//# sourceMappingURL=fill_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class GatherProgram {\n constructor(aShape, indicesLength, axis) {\n this.variableNames = ['A', 'indices'];\n const outputShape = aShape.slice();\n outputShape[axis] = indicesLength;\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape, axis);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape, axis) {\n const rank = aShape.length;\n if (rank > 4) {\n throw Error(`Gather for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `int(getIndices(resRC))`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n if (i === axis) {\n sourceCoords.push(`int(getIndices(${currentCoords[i]}))`);\n }\n else {\n sourceCoords.push(`${currentCoords[i]}`);\n }\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=gather_gpu.js.map", "import { getCoordsDataType } from './shader_compiler';\nexport class GatherNDProgram {\n constructor(sliceDim, strides, shape) {\n this.sliceDim = sliceDim;\n this.strides = strides;\n this.variableNames = ['x', 'indices'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n const strideString = this.sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${this.strides});\n void main() {\n ${dtype} coords = getOutputCoords();\n int flattenIndex = 0;\n for (int j = 0; j < ${this.sliceDim}; j++) {\n int index = round(getIndices(coords[0], j));\n flattenIndex += index * ${strideString};\n }\n setOutput(getX(flattenIndex, coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=gather_nd_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport function createVertexShader(gl) {\n const glsl = getGlslDifferences();\n const vertexShaderSource = `${glsl.version}\n precision highp float;\n ${glsl.attribute} vec3 clipSpacePos;\n ${glsl.attribute} vec2 uv;\n ${glsl.varyingVs} vec2 resultUV;\n\n void main() {\n gl_Position = vec4(clipSpacePos, 1);\n resultUV = uv;\n }`;\n return webgl_util.createVertexShader(gl, vertexShaderSource);\n}\nexport function createVertexBuffer(gl) {\n // [x y z u v] * [upper-left, lower-left, upper-right, lower-right]\n const vertexArray = new Float32Array([-1, 1, 0, 0, 1, -1, -1, 0, 0, 0, 1, 1, 0, 1, 1, 1, -1, 0, 1, 0]);\n return webgl_util.createStaticVertexBuffer(gl, vertexArray);\n}\nexport function createIndexBuffer(gl) {\n // OpenGL (and WebGL) have \"CCW == front\" winding\n const triangleVertexIndices = new Uint16Array([0, 1, 2, 2, 1, 3]);\n return webgl_util.createStaticIndexBuffer(gl, triangleVertexIndices);\n}\nfunction createAndConfigureTexture(gl, width, height, internalFormat, textureFormat, textureType) {\n webgl_util.validateTextureSize(width, height);\n const texture = webgl_util.createTexture(gl);\n const tex2d = gl.TEXTURE_2D;\n webgl_util.callAndCheck(gl, () => gl.bindTexture(tex2d, texture));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MIN_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MAG_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(gl, () => gl.texImage2D(tex2d, 0, internalFormat, width, height, 0, textureFormat, textureType, null));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n return texture;\n}\nexport function getInternalFormatForFloat32MatrixTexture(textureConfig) {\n return textureConfig.internalFormatFloat;\n}\nexport function createFloat32MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat32MatrixTexture(textureConfig), textureConfig.textureFormatFloat, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16MatrixTexture(textureConfig) {\n return textureConfig.internalFormatHalfFloat;\n}\nexport function createFloat16MatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16MatrixTexture(textureConfig), textureConfig.textureFormatFloat, textureConfig.textureTypeHalfFloat);\n}\nexport function getInternalFormatForUnsignedBytesMatrixTexture(textureConfig) {\n return textureConfig.downloadTextureFormat;\n}\nexport function createUnsignedBytesMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForUnsignedBytesMatrixTexture(textureConfig), gl.RGBA, gl.UNSIGNED_BYTE);\n}\nexport function getInternalFormatForPackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedFloat;\n}\nexport function createPackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForPackedMatrixTexture(textureConfig), gl.RGBA, gl.FLOAT);\n}\nexport function getInternalFormatForFloat16PackedMatrixTexture(textureConfig) {\n return textureConfig.internalFormatPackedHalfFloat;\n}\nexport function createFloat16PackedMatrixTexture(gl, rows, columns, textureConfig) {\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(gl, width, height, getInternalFormatForFloat16PackedMatrixTexture(textureConfig), gl.RGBA, textureConfig.textureTypeHalfFloat);\n}\nexport function bindVertexProgramAttributeStreams(gl, program, vertexBuffer) {\n const posOffset = 0; // x is the first buffer element\n const uvOffset = 3 * 4; // uv comes after [x y z]\n const stride = (3 * 4) + (2 * 4); // xyz + uv, each entry is 4-byte float.\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer));\n const success = webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'clipSpacePos', vertexBuffer, 3, stride, posOffset);\n return success &&\n webgl_util.bindVertexBufferToProgramAttribute(gl, program, 'uv', vertexBuffer, 2, stride, uvOffset);\n}\nexport function uploadDenseMatrixToTexture(gl, texture, width, height, data, textureConfig) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n let dataForUpload, texelDataType, internalFormat;\n if (data instanceof Uint8Array) {\n dataForUpload = new Uint8Array(width * height * 4);\n texelDataType = gl.UNSIGNED_BYTE;\n internalFormat = gl.RGBA;\n }\n else {\n dataForUpload = new Float32Array(width * height * 4);\n texelDataType = gl.FLOAT;\n internalFormat = textureConfig.internalFormatPackedFloat;\n }\n dataForUpload.set(data);\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, width, height, 0, gl.RGBA, texelDataType, dataForUpload));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function uploadPixelDataToTexture(gl, texture, pixels) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n if (pixels.data instanceof Uint8Array) {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, pixels.width, pixels.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, pixels.data));\n }\n else {\n webgl_util.callAndCheck(gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, pixels));\n }\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\nexport function createBufferFromOutputTexture(gl2, rows, columns, textureConfig) {\n // Create and bind the buffer.\n const buffer = gl2.createBuffer();\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer));\n // Initialize the buffer to the size of the texture in bytes.\n const bytesPerFloat = 4;\n const valuesPerTexel = 4;\n const bufferSizeBytes = bytesPerFloat * valuesPerTexel * rows * columns;\n webgl_util.callAndCheck(gl2, () => gl2.bufferData(gl2.PIXEL_PACK_BUFFER, bufferSizeBytes, gl2.STREAM_READ));\n // Enqueue a command on the GPU command queue to copy of texture into the\n // buffer.\n webgl_util.callAndCheck(gl2, () => gl2.readPixels(0, 0, columns, rows, gl2.RGBA, gl2.FLOAT, 0));\n webgl_util.callAndCheck(gl2, () => gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null));\n return buffer;\n}\nexport function downloadFloat32MatrixFromBuffer(gl, buffer, size) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(size);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadByteEncodedFloatMatrixFromOutputTexture(gl, rows, columns, textureConfig) {\n const [w, h] = tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n const numChannels = 4;\n const downloadTarget = new Uint8Array(tex_util.getUnpackedArraySizeFromMatrixSize(rows * columns, numChannels));\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, w, h, textureConfig.downloadTextureFormat, gl.UNSIGNED_BYTE, downloadTarget));\n // By wrapping the buffer in a Float32Array, we use native browser IEEE 754\n // decoding of the 4 bytes that back each 32 bit float.\n return new Float32Array(downloadTarget.buffer);\n}\nexport function downloadPackedMatrixFromBuffer(gl, buffer, batch, rows, cols, physicalRows, physicalCols, textureConfig) {\n const gl2 = gl;\n const downloadTarget = new Float32Array(tex_util.getPackedRGBAArraySizeFromMatrixShape(physicalRows, physicalCols));\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer);\n gl2.getBufferSubData(gl2.PIXEL_PACK_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl2.PIXEL_PACK_BUFFER, null);\n return downloadTarget;\n}\nexport function downloadMatrixFromPackedOutputTexture(gl, physicalRows, physicalCols) {\n const packedRGBA = new Float32Array(physicalRows * physicalCols * 4);\n webgl_util.callAndCheck(gl, () => gl.readPixels(0, 0, physicalCols, physicalRows, gl.RGBA, gl.FLOAT, packedRGBA));\n return packedRGBA;\n}\n//# sourceMappingURL=gpgpu_util.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport { getWebGLContext, setWebGLContext } from './canvas_util';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\nexport class GPGPUContext {\n constructor(gl) {\n this.outputTexture = null;\n this.program = null;\n this.disposed = false;\n this.vertexAttrsAreBound = false;\n this.itemsToPoll = [];\n const glVersion = env().getNumber('WEBGL_VERSION');\n if (gl != null) {\n this.gl = gl;\n setWebGLContext(glVersion, gl);\n }\n else {\n this.gl = getWebGLContext(glVersion);\n }\n // WebGL 2.0 enables texture floats without an extension.\n let COLOR_BUFFER_FLOAT = 'WEBGL_color_buffer_float';\n const COLOR_BUFFER_HALF_FLOAT = 'EXT_color_buffer_half_float';\n if (env().getNumber('WEBGL_VERSION') === 1) {\n const TEXTURE_FLOAT = 'OES_texture_float';\n const TEXTURE_HALF_FLOAT = 'OES_texture_half_float';\n this.textureFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_FLOAT);\n if (webgl_util.hasExtension(this.gl, TEXTURE_HALF_FLOAT)) {\n this.textureHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, TEXTURE_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support half float textures, yet the ' +\n 'environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n this.colorBufferFloatExtension = this.gl.getExtension(COLOR_BUFFER_FLOAT);\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, COLOR_BUFFER_HALF_FLOAT);\n }\n else if (env().get('WEBGL_FORCE_F16_TEXTURES')) {\n throw new Error('GL context does not support color renderable half floats, yet ' +\n 'the environment flag WEBGL_FORCE_F16_TEXTURES is set to true.');\n }\n }\n else {\n COLOR_BUFFER_FLOAT = 'EXT_color_buffer_float';\n if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_FLOAT)) {\n this.colorBufferFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_FLOAT);\n }\n else if (webgl_util.hasExtension(this.gl, COLOR_BUFFER_HALF_FLOAT)) {\n this.colorBufferHalfFloatExtension =\n this.gl.getExtension(COLOR_BUFFER_HALF_FLOAT);\n }\n else {\n throw new Error('GL context does not support color renderable floats');\n }\n }\n this.vertexBuffer = gpgpu_util.createVertexBuffer(this.gl);\n this.indexBuffer = gpgpu_util.createIndexBuffer(this.gl);\n this.framebuffer = webgl_util.createFramebuffer(this.gl);\n this.textureConfig =\n tex_util.getTextureConfig(this.gl, this.textureHalfFloatExtension);\n }\n get debug() {\n return env().getBool('DEBUG');\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n if (this.program != null) {\n console.warn('Disposing a GPGPUContext that still has a bound WebGLProgram.' +\n ' This is probably a resource leak, delete the program with ' +\n 'GPGPUContext.deleteProgram before disposing.');\n }\n if (this.outputTexture != null) {\n console.warn('Disposing a GPGPUContext that still has a bound output matrix ' +\n 'texture. This is probably a resource leak, delete the output ' +\n 'matrix texture with GPGPUContext.deleteMatrixTexture before ' +\n 'disposing.');\n }\n const gl = this.gl;\n webgl_util.callAndCheck(gl, () => gl.finish());\n webgl_util.callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteFramebuffer(this.framebuffer));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteBuffer(this.indexBuffer));\n this.disposed = true;\n }\n createFloat32MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat32MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createFloat16MatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16MatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createUnsignedBytesMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createUnsignedBytesMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n uploadPixelDataToTexture(texture, pixels) {\n this.throwIfDisposed();\n gpgpu_util.uploadPixelDataToTexture(this.gl, texture, pixels);\n }\n uploadDenseMatrixToTexture(texture, width, height, data) {\n this.throwIfDisposed();\n gpgpu_util.uploadDenseMatrixToTexture(this.gl, texture, width, height, data, this.textureConfig);\n }\n createFloat16PackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16PackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n createPackedMatrixTexture(rows, columns) {\n this.throwIfDisposed();\n return gpgpu_util.createPackedMatrixTexture(this.gl, rows, columns, this.textureConfig);\n }\n deleteMatrixTexture(texture) {\n this.throwIfDisposed();\n if (this.outputTexture === texture) {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n this.outputTexture = null;\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteTexture(texture));\n }\n downloadByteEncodedFloatMatrixFromOutputTexture(texture, rows, columns) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadByteEncodedFloatMatrixFromOutputTexture(this.gl, rows, columns, this.textureConfig));\n }\n downloadPackedMatrixFromBuffer(buffer, batch, rows, columns, physicalRows, physicalCols) {\n return gpgpu_util.downloadPackedMatrixFromBuffer(this.gl, buffer, batch, rows, columns, physicalRows, physicalCols, this.textureConfig);\n }\n downloadFloat32MatrixFromBuffer(buffer, size) {\n return gpgpu_util.downloadFloat32MatrixFromBuffer(this.gl, buffer, size);\n }\n createBufferFromTexture(texture, rows, columns) {\n this.bindTextureToFrameBuffer(texture);\n const result = gpgpu_util.createBufferFromOutputTexture(this.gl, rows, columns, this.textureConfig);\n this.unbindTextureToFrameBuffer();\n return result;\n }\n createAndWaitForFence() {\n const fenceContext = this.createFence(this.gl);\n return this.pollFence(fenceContext);\n }\n createFence(gl) {\n let query;\n let isFencePassed;\n if (env().getBool('WEBGL_FENCE_API_ENABLED')) {\n const gl2 = gl;\n const sync = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);\n gl.flush();\n isFencePassed = () => {\n const status = gl2.clientWaitSync(sync, 0, 0);\n return status === gl2.ALREADY_SIGNALED ||\n status === gl2.CONDITION_SATISFIED;\n };\n query = sync;\n }\n else if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n query = this.beginQuery();\n this.endQuery();\n isFencePassed = () => this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n else {\n // If we have no way to fence, return true immediately. This will fire in\n // WebGL 1.0 when there is no disjoint query timer. In this case, because\n // the fence passes immediately, we'll immediately ask for a download of\n // the texture, which will cause the UI thread to hang.\n isFencePassed = () => true;\n }\n return { query, isFencePassed };\n }\n downloadMatrixFromPackedTexture(texture, physicalRows, physicalCols) {\n return this.downloadMatrixDriver(texture, () => gpgpu_util.downloadMatrixFromPackedOutputTexture(this.gl, physicalRows, physicalCols));\n }\n createProgram(fragmentShaderSource) {\n this.throwIfDisposed();\n const gl = this.gl;\n const fragmentShader = webgl_util.createFragmentShader(gl, fragmentShaderSource);\n const vertexShader = gpgpu_util.createVertexShader(gl);\n const program = webgl_util.createProgram(gl);\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, vertexShader));\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, fragmentShader));\n webgl_util.linkProgram(gl, program);\n if (this.debug) {\n webgl_util.validateProgram(gl, program);\n }\n if (!this.vertexAttrsAreBound) {\n this.setProgram(program);\n this.vertexAttrsAreBound = gpgpu_util.bindVertexProgramAttributeStreams(gl, this.program, this.vertexBuffer);\n }\n return program;\n }\n deleteProgram(program) {\n this.throwIfDisposed();\n if (program === this.program) {\n this.program = null;\n }\n if (program != null) {\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteProgram(program));\n }\n }\n setProgram(program) {\n this.throwIfDisposed();\n this.program = program;\n if ((this.program != null) && this.debug) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.useProgram(program));\n }\n getUniformLocation(program, uniformName, shouldThrow = true) {\n this.throwIfDisposed();\n if (shouldThrow) {\n return webgl_util.getProgramUniformLocationOrThrow(this.gl, program, uniformName);\n }\n else {\n return webgl_util.getProgramUniformLocation(this.gl, program, uniformName);\n }\n }\n getAttributeLocation(program, attribute) {\n this.throwIfDisposed();\n return webgl_util.callAndCheck(this.gl, () => this.gl.getAttribLocation(program, attribute));\n }\n getUniformLocationNoThrow(program, uniformName) {\n this.throwIfDisposed();\n return this.gl.getUniformLocation(program, uniformName);\n }\n setInputMatrixTexture(inputMatrixTexture, uniformLocation, textureUnit) {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n webgl_util.bindTextureToProgramUniformSampler(this.gl, inputMatrixTexture, uniformLocation, textureUnit);\n }\n setOutputMatrixTexture(outputMatrixTexture, rows, columns) {\n this.setOutputMatrixTextureDriver(outputMatrixTexture, columns, rows);\n }\n setOutputPackedMatrixTexture(outputPackedMatrixTexture, rows, columns) {\n this.throwIfDisposed();\n const [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n this.setOutputMatrixTextureDriver(outputPackedMatrixTexture, width, height);\n }\n setOutputMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n this.setOutputMatrixWriteRegionDriver(startColumn, startRow, numColumns, numRows);\n }\n setOutputPackedMatrixWriteRegion(startRow, numRows, startColumn, numColumns) {\n throw new Error('setOutputPackedMatrixWriteRegion not implemented.');\n }\n debugValidate() {\n if (this.program != null) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.validateFramebuffer(this.gl);\n }\n executeProgram() {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n const gl = this.gl;\n if (this.debug) {\n this.debugValidate();\n }\n webgl_util.callAndCheck(gl, () => gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0));\n }\n blockUntilAllProgramsCompleted() {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.finish());\n }\n getQueryTimerExtension() {\n if (this.disjointQueryTimerExtension == null) {\n this.disjointQueryTimerExtension =\n webgl_util.getExtensionOrThrow(this.gl, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2 ?\n 'EXT_disjoint_timer_query_webgl2' :\n 'EXT_disjoint_timer_query');\n }\n return this.disjointQueryTimerExtension;\n }\n getQueryTimerExtensionWebGL2() {\n return this.getQueryTimerExtension();\n }\n getQueryTimerExtensionWebGL1() {\n return this.getQueryTimerExtension();\n }\n beginQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const query = gl2.createQuery();\n gl2.beginQuery(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n const query = ext.createQueryEXT();\n ext.beginQueryEXT(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n endQuery() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n gl2.endQuery(ext.TIME_ELAPSED_EXT);\n return;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n ext.endQueryEXT(ext.TIME_ELAPSED_EXT);\n }\n async waitForQueryAndGetTime(query) {\n await util.repeatedTry(() => this.disposed || // while testing contexts are created / disposed\n // in rapid succession, so without this check we\n // may poll for the query timer indefinitely\n this.isQueryAvailable(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION')));\n return this.getQueryTime(query, env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n getQueryTime(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return null;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const timeElapsedNanos = gl2.getQueryParameter(query, gl2.QUERY_RESULT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const timeElapsedNanos = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_EXT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n }\n isQueryAvailable(query, queryTimerVersion) {\n if (queryTimerVersion === 0) {\n return true;\n }\n if (queryTimerVersion === 2) {\n const gl2 = this.gl;\n const ext = this.getQueryTimerExtensionWebGL2();\n const available = gl2.getQueryParameter(query, gl2.QUERY_RESULT_AVAILABLE);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n else {\n const ext = this.getQueryTimerExtensionWebGL1();\n const available = ext.getQueryObjectEXT(query, ext.QUERY_RESULT_AVAILABLE_EXT);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n return available && !this.disjoint;\n }\n }\n pollFence(fenceContext) {\n return new Promise(resolve => {\n this.addItemToPoll(() => fenceContext.isFencePassed(), () => resolve());\n });\n }\n pollItems() {\n // Find the last query that has finished.\n const index = linearSearchLastTrue(this.itemsToPoll.map(x => x.isDoneFn));\n for (let i = 0; i <= index; ++i) {\n const { resolveFn } = this.itemsToPoll[i];\n resolveFn();\n }\n this.itemsToPoll = this.itemsToPoll.slice(index + 1);\n }\n addItemToPoll(isDoneFn, resolveFn) {\n this.itemsToPoll.push({ isDoneFn, resolveFn });\n if (this.itemsToPoll.length > 1) {\n // We already have a running loop that polls.\n return;\n }\n // Start a new loop that polls.\n util.repeatedTry(() => {\n this.pollItems();\n // End the loop if no more items to poll.\n return this.itemsToPoll.length === 0;\n });\n }\n bindTextureToFrameBuffer(texture) {\n this.throwIfDisposed();\n webgl_util.bindColorTextureToFramebuffer(this.gl, texture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n unbindTextureToFrameBuffer() {\n if (this.outputTexture != null) {\n webgl_util.bindColorTextureToFramebuffer(this.gl, this.outputTexture, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n else {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n }\n }\n downloadMatrixDriver(texture, downloadAndDecode) {\n this.bindTextureToFrameBuffer(texture);\n const result = downloadAndDecode();\n this.unbindTextureToFrameBuffer();\n return result;\n }\n setOutputMatrixTextureDriver(outputMatrixTextureMaybePacked, width, height) {\n this.throwIfDisposed();\n const gl = this.gl;\n webgl_util.bindColorTextureToFramebuffer(gl, outputMatrixTextureMaybePacked, this.framebuffer);\n if (this.debug) {\n webgl_util.validateFramebuffer(gl);\n }\n this.outputTexture = outputMatrixTextureMaybePacked;\n webgl_util.callAndCheck(gl, () => gl.viewport(0, 0, width, height));\n webgl_util.callAndCheck(gl, () => gl.scissor(0, 0, width, height));\n }\n setOutputMatrixWriteRegionDriver(x, y, width, height) {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.scissor(x, y, width, height));\n }\n throwIfDisposed() {\n if (this.disposed) {\n throw new Error('Attempted to use disposed GPGPUContext.');\n }\n }\n throwIfNoProgram() {\n if (this.program == null) {\n throw new Error('No GPU program is currently set.');\n }\n }\n}\n/**\n * Finds the index of the last true element using linear search.\n * Note: We can't do binary search because Chrome expects us to explicitly\n * test all fences before download:\n * https://github.com/tensorflow/tfjs/issues/1145\n */\nexport function linearSearchLastTrue(arr) {\n let i = 0;\n for (; i < arr.length; ++i) {\n const isDone = arr[i]();\n if (!isDone) {\n break;\n }\n }\n return i - 1;\n}\n//# sourceMappingURL=gpgpu_context.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, util } from '@tensorflow/tfjs-core';\nimport * as shader_compiler from './shader_compiler';\nexport function compileProgram(gpgpu, program, inputs, output) {\n const userCode = program.userCode;\n const inputInfos = inputs.map((input, i) => {\n const shapeInfo = {\n logicalShape: input.shape,\n texShape: input.isUniform ? null : input.texData.texShape,\n isUniform: input.isUniform,\n isPacked: input.isUniform ? false : input.texData.isPacked,\n flatOffset: null\n };\n if (input.texData != null && input.texData.slice != null &&\n input.texData.slice.flatOffset > 0) {\n shapeInfo.flatOffset = input.texData.slice.flatOffset;\n }\n return { name: program.variableNames[i], shapeInfo };\n });\n const inShapeInfos = inputInfos.map(x => x.shapeInfo);\n const outShapeInfo = {\n logicalShape: output.shape,\n texShape: output.texData.texShape,\n isUniform: false,\n isPacked: output.texData.isPacked,\n flatOffset: null\n };\n const source = shader_compiler.makeShader(inputInfos, outShapeInfo, userCode, program.packedInputs);\n const webGLProgram = gpgpu.createProgram(source);\n // Add special uniforms (NAN, INFINITY)\n let infLoc = null;\n const nanLoc = gpgpu.getUniformLocation(webGLProgram, 'NAN', false);\n if (env().getNumber('WEBGL_VERSION') === 1) {\n infLoc = gpgpu.getUniformLocation(webGLProgram, 'INFINITY', false);\n }\n // Add user-defined uniforms\n const uniformLocations = {};\n for (let i = 0; i < program.variableNames.length; i++) {\n const varName = program.variableNames[i];\n const shouldThrow = false;\n uniformLocations[varName] =\n gpgpu.getUniformLocation(webGLProgram, varName, shouldThrow);\n uniformLocations[`offset${varName}`] =\n gpgpu.getUniformLocation(webGLProgram, `offset${varName}`, shouldThrow);\n }\n return {\n program,\n source,\n webGLProgram,\n uniformLocations,\n inShapeInfos,\n outShapeInfo,\n infLoc,\n nanLoc,\n };\n}\nfunction validateBinaryAndProgram(shapeInfos, inputs) {\n if (shapeInfos.length !== inputs.length) {\n throw Error(`Binary was compiled with ${shapeInfos.length} inputs, but ` +\n `was executed with ${inputs.length} inputs`);\n }\n shapeInfos.forEach((s, i) => {\n const shapeA = s.logicalShape;\n const input = inputs[i];\n const shapeB = input.shape;\n if (!util.arraysEqual(shapeA, shapeB)) {\n throw Error(`Binary was compiled with different shapes than ` +\n `the current args. Shapes ${shapeA} and ${shapeB} must match`);\n }\n // The input is uploaded as uniform.\n if (s.isUniform && input.isUniform) {\n return;\n }\n const texShapeA = s.texShape;\n const texShapeB = input.isUniform ? null : input.texData.texShape;\n if (!util.arraysEqual(texShapeA, texShapeB)) {\n throw Error(`Binary was compiled with different texture shapes than the` +\n ` current args. Shape ${texShapeA} and ${texShapeB} must match`);\n }\n });\n}\nexport function runProgram(gpgpu, binary, inputs, output, customSetup) {\n validateBinaryAndProgram(binary.inShapeInfos, inputs);\n validateBinaryAndProgram([binary.outShapeInfo], [output]);\n const outTex = output.texData.texture;\n const outTexShape = output.texData.texShape;\n if (output.texData.isPacked) {\n gpgpu.setOutputPackedMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n else {\n gpgpu.setOutputMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n gpgpu.setProgram(binary.webGLProgram);\n // Set special uniforms (NAN, INFINITY)\n if (env().getNumber('WEBGL_VERSION') === 1) {\n if (binary.infLoc !== null) {\n gpgpu.gl.uniform1f(binary.infLoc, Infinity);\n }\n }\n if (binary.nanLoc !== null) {\n gpgpu.gl.uniform1f(binary.nanLoc, NaN);\n }\n // Set user-defined inputs\n inputs.forEach((input, i) => {\n const varName = binary.program.variableNames[i];\n const varLoc = binary.uniformLocations[varName];\n const varOffsetLoc = binary.uniformLocations[`offset${varName}`];\n if (varLoc == null) {\n // The compiler inferred that this variable is not used in this shader.\n return;\n }\n if (input.isUniform) {\n // Upload the values of the tensor as uniform.\n if (util.sizeFromShape(input.shape) < 2) {\n gpgpu.gl.uniform1f(varLoc, input.uniformValues[0]);\n }\n else {\n let vals = input.uniformValues;\n if (!(vals instanceof Float32Array)) {\n vals = new Float32Array(vals);\n }\n gpgpu.gl.uniform1fv(varLoc, vals);\n }\n return;\n }\n // If the input was sliced, upload the flat offset index.\n if (input.texData.slice != null && varOffsetLoc != null) {\n gpgpu.gl.uniform1i(varOffsetLoc, input.texData.slice.flatOffset);\n }\n gpgpu.setInputMatrixTexture(input.texData.texture, varLoc, i);\n });\n if (customSetup != null) {\n customSetup(gpgpu, binary.webGLProgram);\n }\n gpgpu.executeProgram();\n}\nexport function makeShaderKey(program, inputs, output) {\n let keyInputs = '';\n inputs.concat(output).forEach(x => {\n const hasOffset = x.texData != null && x.texData.slice != null &&\n x.texData.slice.flatOffset > 0;\n const texShape = x.isUniform ? 'uniform' : x.texData.texShape;\n keyInputs += `${x.shape}_${texShape}_${hasOffset}`;\n });\n const keyUserCode = program.userCode;\n let key = program.constructor.name;\n // Fast string concat. See https://jsperf.com/string-concatenation/14.\n key += '_' + keyInputs + '_' + keyUserCode;\n return key;\n}\n//# sourceMappingURL=gpgpu_math.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from './glsl_version';\nexport class Im2ColPackedProgram {\n constructor(outputShape, inputShape, convInfo) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const { filterWidth, inChannels, strideWidth, strideHeight, padInfo, outWidth, dilationWidth, dilationHeight, dataFormat } = convInfo;\n const { left, top } = padInfo;\n const itemsPerBlockRow = inChannels * filterWidth;\n const glsl = getGlslDifferences();\n const isChannelsLast = dataFormat === 'channelsLast';\n const rowDim = isChannelsLast ? 0 : 1;\n const colDim = isChannelsLast ? 1 : 2;\n let unrolled = ``;\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n unrolled += `\n blockIndex = rc.y + ${col};\n pos = rc.x + ${row};\n\n if(blockIndex < ${outputShape[1]} && pos < ${outputShape[0]}) {\n offsetY = int(blockIndex / (${outWidth})) * ${strideHeight} - ${top};\n d0 = offsetY + ${dilationHeight} * (pos / ${itemsPerBlockRow});\n\n if(d0 < ${inputShape[rowDim]} && d0 >= 0) {\n\n offsetX = int(mod(float(blockIndex), ${outWidth}.) * ${strideWidth}. - ${left}.);\n d1 = offsetX + ${dilationWidth} * (int(mod(float(pos), ${itemsPerBlockRow}.) / ${inChannels}.));\n\n if(d1 < ${inputShape[colDim]} && d1 >= 0) {\n\n ch = int(mod(float(pos), ${inChannels}.));\n\n if (${isChannelsLast}) {\n innerDims = vec2(d1, ch);\n result[${row * 2 + col}] = getChannel(\n getA(d0, int(innerDims.x),\n int(innerDims.y)), innerDims);\n } else {\n innerDims = vec2(d0, d1);\n result[${row * 2 + col}] = getChannel(\n getA(ch, int(innerDims.x),\n int(innerDims.y)), innerDims);\n }\n }\n }\n }\n `;\n }\n }\n this.userCode = `\n void main() {\n ivec2 rc = getOutputCoords();\n\n vec4 result = vec4(0);\n\n int blockIndex, pos, offsetY, d0, offsetX, d1, ch;\n vec2 innerDims;\n\n ${unrolled}\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=im2col_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n int d = coords[3];\n float x = getX(b, r, c, d);\n float sum = 0.0;\n for (int j = -${rad}; j <= ${rad}; j++) {\n int idx = d + j;\n if (idx >= 0 && idx <= ${maxD}) {\n float z = getX(b, r, c, idx);\n sum += z * z;\n }\n }\n float val = x * ${powOperator};\n setOutput(val);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNGradProgram {\n constructor(inputShape, depthRadius, bias, alpha, beta) {\n this.variableNames = ['inputImage', 'outputImage', 'dy'];\n this.outputShape = [];\n this.outputShape = inputShape;\n this.depth = inputShape[3];\n this.depthRadius = depthRadius;\n this.bias = bias;\n this.alpha = alpha;\n this.beta = beta;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n\n float result = 0.0;\n for (int d = 0; d < ${this.depth}; ++d) {\n int depthBegin = int(max(0.0, float(d - ${depthRadius})));\n int depthEnd = int(min(float(${this.depth}),\n float(d + ${depthRadius} + 1)));\n\n const int MIN_DEPTH_BEGIN = 0;\n const int MAX_DEPTH_END = ${this.depth};\n\n float norm = 0.0;\n for (int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k) {\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd) {\n norm += getInputImage(b, r, c, k) * getInputImage(b, r, c, k);\n }\n else {\n break;\n }\n }\n\n norm = float(${alpha}) * norm + float(${bias});\n\n for(int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k){\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd){\n float dyi = -2.0 * float(${alpha})\n * float(${beta})\n * getInputImage(b ,r ,c, k) * getOutputImage(b, r, c, d)\n / norm;\n if (k == d) {\n dyi += pow(norm, -1.0 * ${beta});\n }\n if (k == coords[3]) {\n dyi *= getDy(b, r, c, d);\n result += dyi;\n }\n }\n else {\n break;\n }\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_grad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class LRNPackedProgram {\n constructor(xShape, radius, bias, alpha, beta) {\n this.variableNames = ['x'];\n this.outputShape = [];\n this.packedInputs = true;\n this.packedOutput = true;\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n }\n else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n }\n else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords.x;\n int r = coords.y;\n int c = coords.z;\n int d = coords.w;\n\n bool hasNextCol = d < ${this.outputShape[3]};\n bool hasNextRow = c < ${this.outputShape[2]};\n\n vec4 sum = vec4(0.);\n vec4 xFragAtOutputCoords = getX(b, r, c, d);\n\n vec4 xAtOutputCoords = vec4(\n getChannel(xFragAtOutputCoords, vec2(c, d)),\n hasNextCol ?\n getChannel(xFragAtOutputCoords, vec2(c, d + 1)) : 0.0,\n hasNextRow ?\n getChannel(xFragAtOutputCoords , vec2(c + 1, d)) : 0.0,\n (hasNextRow && hasNextCol) ?\n getChannel(xFragAtOutputCoords, vec2(c + 1, d + 1)) : 0.0\n );\n\n int firstChannel = d - ${rad};\n vec2 cache = vec2(0.);\n if(firstChannel >= 0){\n vec4 firstChannelFrag = getX(b, r, c, firstChannel);\n cache.x = getChannel(firstChannelFrag, vec2(c, firstChannel));\n if(hasNextRow){\n cache.y = getChannel(firstChannelFrag, vec2(c + 1, firstChannel));\n }\n }\n\n ivec2 depth = ivec2(d, d + 1);\n for (int j = - ${rad}; j <= ${rad}; j++) {\n ivec2 idx = depth + j;\n bvec2 aboveLowerBound = greaterThanEqual(idx, ivec2(0));\n bvec2 belowUpperBound = lessThanEqual(idx, ivec2(${maxD}));\n\n bool depthInRange = aboveLowerBound.x && belowUpperBound.x;\n bool depthPlusOneInRange = aboveLowerBound.y && belowUpperBound.y;\n\n if(depthInRange || depthPlusOneInRange){\n vec4 z = vec4(0.);\n vec4 xFragAtCurrentDepth;\n z.xz = cache.xy;\n if(depthPlusOneInRange && hasNextCol){\n xFragAtCurrentDepth = idx.y != d ?\n getX(b, r, c, idx.y) : xFragAtOutputCoords;\n z.y = getChannel(xFragAtCurrentDepth, vec2(c, idx.y));\n if(hasNextRow){\n z.w = getChannel(xFragAtCurrentDepth, vec2(c + 1, idx.y));\n }\n }\n cache.xy = z.yw;\n sum += z * z;\n }\n }\n vec4 result = xAtOutputCoords * ${powOperator};\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=lrn_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MaxPool2DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n int maxPosValue = ${lastIndex} - int(getMaxPos(b, idyR, idyC, d));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue = wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\nexport class MaxPool3DBackpropProgram {\n constructor(convInfo) {\n this.variableNames = ['dy', 'maxPos'];\n this.outputShape = convInfo.inShape;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = effectiveFilterDepth - 1 - convInfo.padInfo.front;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const lastIndex = effectiveFilterDepth * effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 dyCorner = ivec3(coords.y, coords.z, coords.w) - pads;\n int dyDCorner = dyCorner.x;\n int dyRCorner = dyCorner.y;\n int dyCCorner = dyCorner.z;\n\n // Convolve dy(?, ?, ?, ch) with pos mask(:, :, :, d) to get\n // dx(xD, xR, xC, ch).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n float dyD = float(dyDCorner + wD) / ${strideDepth}.0;\n\n if (dyD < 0.0 || dyD >= ${convInfo.outDepth}.0 || fract(dyD) > 0.0) {\n continue;\n }\n int idyD = int(dyD);\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 ||\n fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(batch, idyD, idyR, idyC, ch);\n int maxPosValue = ${lastIndex} -\n int(getMaxPos(batch, idyD, idyR, idyC, ch));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue =\n wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n//# sourceMappingURL=max_pool_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MatMulPackedProgram {\n constructor(aShape, bShape, outputShape, transposeA = false, transposeB = false, addBias = false, activation = null, hasPreluActivation = false) {\n this.variableNames = ['matrixA', 'matrixB'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n const sharedDim = transposeA ? aShape[1] : aShape[2];\n const sharedDimensionPacked = Math.ceil(sharedDim / 2);\n const aSample = transposeA ? 'i * 2, rc.y' : 'rc.y, i * 2';\n const bSample = transposeB ? 'rc.z, i * 2' : 'i * 2, rc.z';\n const aSwizzle = transposeA ? ['a.xxyy', 'a.zzww'] : ['a.xxzz', 'a.yyww'];\n const bSwizzle = transposeB ? ['b.xzxz', 'b.ywyw'] : ['b.xyxy', 'b.zwzw'];\n let activationSnippet = '', applyActivationSnippet = '';\n if (activation) {\n if (hasPreluActivation) {\n activationSnippet = `vec4 activation(vec4 a) {\n vec4 b = getPreluActivationWeightsAtOutCoords();\n ${activation}\n }`;\n }\n else {\n activationSnippet = `vec4 activation(vec4 x) {\n ${activation}\n }`;\n }\n applyActivationSnippet = `result = activation(result);`;\n }\n const addBiasSnippet = addBias ? 'result += getBiasAtOutCoords();' : '';\n if (addBias) {\n this.variableNames.push('bias');\n }\n if (hasPreluActivation) {\n this.variableNames.push('preluActivationWeights');\n }\n let batchASnippet = 'rc.x';\n let batchBSnippet = 'rc.x';\n if (aShape[0] < bShape[0]) {\n batchASnippet = `int(min(float(rc.x), ${aShape[0] - 1}.))`;\n }\n else if (bShape[0] < aShape[0]) {\n batchBSnippet = `int(min(float(rc.x), ${bShape[0] - 1}.))`;\n }\n this.userCode = `\n ${activationSnippet}\n\n const float sharedDimension = ${sharedDimensionPacked}.0;\n\n vec4 dot2x2ARowBCol(ivec3 rc) {\n vec4 result = vec4(0);\n for (int i = 0; i < ${sharedDimensionPacked}; i++) {\n int batchA = ${batchASnippet};\n int batchB = ${batchBSnippet};\n vec4 a = getMatrixA(batchA, ${aSample});\n vec4 b = getMatrixB(batchB, ${bSample});\n\n // These swizzled products need to be separately added.\n // See: https://github.com/tensorflow/tfjs/issues/1735\n result += (${aSwizzle[0]} * ${bSwizzle[0]});\n result += (${aSwizzle[1]} * ${bSwizzle[1]});\n }\n return result;\n }\n\n void main() {\n ivec3 rc = getOutputCoords();\n vec4 result = dot2x2ARowBCol(rc);\n\n ${addBiasSnippet}\n\n ${applyActivationSnippet}\n\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mulmat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class MultinomialProgram {\n constructor(batchSize, numOutcomes, numSamples) {\n this.variableNames = ['probs'];\n this.outputShape = [batchSize, numSamples];\n this.userCode = `\n uniform float seed;\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n\n float r = random(seed);\n float cdf = 0.0;\n\n for (int i = 0; i < ${numOutcomes - 1}; i++) {\n cdf += getProbs(batch, i);\n\n if (r < cdf) {\n setOutput(float(i));\n return;\n }\n }\n\n // If no other event happened, last event happened.\n setOutput(float(${numOutcomes - 1}));\n }\n `;\n }\n getCustomSetupFunc(seed) {\n return (gpgpu, webGLProgram) => {\n if (this.seedLoc == null) {\n this.seedLoc = gpgpu.getUniformLocation(webGLProgram, 'seed');\n }\n gpgpu.gl.uniform1f(this.seedLoc, seed);\n };\n }\n}\n//# sourceMappingURL=multinomial_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class OneHotProgram {\n constructor(numIndices, depth, onValue, offValue) {\n this.variableNames = ['indices'];\n this.outputShape = [numIndices, depth];\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int index = round(getIndices(coords.x));\n setOutput(mix(float(${offValue}), float(${onValue}),\n float(index == coords.y)));\n }\n `;\n }\n}\n//# sourceMappingURL=onehot_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n // Only input / output 3D tensors.\n this.outputShape = outputShape;\n const rank = outputShape.length;\n if (rank === 0) {\n this.userCode = `\n void main() {\n setOutput(vec4(getA(), 0., 0., 0.));\n }\n `;\n }\n else {\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const outOfBoundsCondition = getOutOfBoundsCondition(rank, outputShape, channels);\n const setup = getSetup(rank, outputShape[outputShape.length - 1], outputShape[outputShape.length - 2], channels);\n const output = getOutput(outputShape, channels);\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n\n if(${outOfBoundsCondition}) {\n setOutput(vec4(0));\n } else {\n ${setup}\n\n setOutput(vec4(${output}));\n }\n }\n `;\n }\n }\n}\nfunction getSourceCoordsArr(rank, dims) {\n const coords = [];\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n let coord = `${row === 0 ? 'r' : 'rp1'}, ${col === 0 ? 'c' : 'cp1'}`;\n for (let d = 2; d < rank; d++) {\n coord = `${dims[dims.length - 1 - d]},` + coord;\n }\n coords.push(coord);\n }\n }\n return coords;\n}\nfunction getOutOfBoundsCondition(rank, shape, dims) {\n if (rank === 1) {\n return `rc > ${shape[0]}`;\n }\n let cond = '';\n for (let i = rank - 2; i < rank; i++) {\n cond += `${dims[i]} >= ${shape[i]}`;\n if (i < rank - 1) {\n cond += '||';\n }\n }\n return cond;\n}\nfunction getSetup(rank, cols, rows, dims) {\n if (rank === 1) {\n return '';\n }\n const innerDims = dims.slice(-2);\n return `\n int r = ${innerDims[0]};\n int c = ${innerDims[1]};\n int rp1 = r + 1;\n int cp1 = c + 1;\n\n bool cEdge = cp1 >= ${cols};\n bool rEdge = rp1 >= ${rows};\n `;\n}\nfunction getOutput(shape, dims) {\n const rank = shape.length;\n const sourceCoords = getSourceCoordsArr(rank, dims);\n if (rank === 1) {\n return `getA(rc),\n rc + 1 >= ${shape[0]} ? 0. : getA(rc + 1),\n 0, 0`;\n }\n return `getA(${sourceCoords[0]}),\n cEdge ? 0. : getA(${sourceCoords[1]}),\n rEdge ? 0. : getA(${sourceCoords[2]}),\n rEdge || cEdge ? 0. : getA(${sourceCoords[3]})`;\n}\n//# sourceMappingURL=pack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const type = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start || outC >= end) {\n setOutput(float(${constantValue}));\n } else {\n setOutput(getX(outC - start));\n }\n }\n `;\n return;\n }\n this.userCode = `\n ${type} start = ${type}(${start});\n ${type} end = ${type}(${end});\n\n void main() {\n ${type} outC = getOutputCoords();\n if (any(lessThan(outC, start)) || any(greaterThanEqual(outC, end))) {\n setOutput(float(${constantValue}));\n } else {\n ${type} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pad_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class PadPackedProgram {\n constructor(xShape, paddings, constantValue) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const componentSetup = [\n `${dtype} rc = outputLoc;`, `${coords[rank - 1]} += 1;\n if(${cLimit}) {\n `,\n rank === 1 ? '' : `}\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {`,\n rank === 1 ? '' : ` ${coords[rank - 1]} += 1;\n if(${cLimit}) {`\n ];\n const paddingArea = rank === 1 ?\n 'rc < start || rc >= end' :\n 'any(lessThan(rc, start)) || any(greaterThanEqual(rc, end))';\n let mainLoop = '';\n for (let i = 0, j = rank === 1 ? 2 : 4; i < j; i++) {\n mainLoop += `\n ${componentSetup[i]}\n if (${paddingArea}) {\n result[${i}] = float(${constantValue});\n } else {\n ${dtype} source = rc - start;\n result[${i}] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n mainLoop += (rank === 1 ? `} ` : `}}`);\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class Pool2DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n const batchFlattenPositionStr = `((batch * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n const flattenPositionStr = `(xR * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + d`;\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n float avgValue = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xR, xC, d);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ? (includeBatchInIndex ? batchFlattenPositionStr :\n flattenPositionStr) :\n `wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xR, int xC, int d) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xR, xC, d);\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n getValue(batch, xR, xC + 3 * ${dilationWidth}, d)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\nexport class Pool3DProgram {\n constructor(convInfo, poolType, computePositions, flattenPositions = false, includeBatchInIndex = false) {\n this.variableNames = ['x'];\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n const filterWidth = convInfo.filterWidth;\n const strideDepth = convInfo.strideDepth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationDepth = convInfo.dilationDepth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterDepth = convInfo.effectiveFilterDepth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padFront = convInfo.padInfo.front;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n const isAvgPool = poolType === 'avg';\n let initializationValue = '0.0';\n if (!isAvgPool) {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n }\n if (computePositions) {\n const compareOp = '>=';\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, ch) to get y(yD, yR, yC, ch).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xD, xR, xC, ch);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = ${flattenPositions ?\n (includeBatchInIndex ?\n `(((batch * ${convInfo.inDepth} + xD) * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch` :\n `((xD * ${convInfo.inHeight} + xR) * ${convInfo.inWidth} + xC) * ${convInfo.inChannels} + ch`) :\n `wD * ${effectiveFilterHeight} * ${effectiveFilterWidth} +\n wR * ${effectiveFilterWidth} + wC`};\n }\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n const compareOp = 'max';\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n this.userCode = `\n const ivec3 strides =\n ivec3(${strideDepth}, ${strideHeight}, ${strideWidth});\n const ivec3 pads = ivec3(${padFront}, ${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xD, int xR, int xC, int ch) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xD, xR, xC, ch);\n }\n\n void main() {\n ivec5 coords = getOutputCoords();\n int batch = coords.x;\n int ch = coords.u;\n\n ivec3 xCorner = ivec3(coords.y, coords.z, coords.w) * strides - pads;\n int xDCorner = xCorner.x;\n int xRCorner = xCorner.y;\n int xCCorner = xCorner.z;\n\n // max/min x(?, ?, ?, d) to get y(yD, yR, yC, ch).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wD = 0; wD < ${effectiveFilterDepth};\n wD += ${dilationDepth}) {\n int xD = xDCorner + wD;\n\n if (xD < 0 || xD >= ${convInfo.inDepth}) {\n continue;\n }\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 3 * ${dilationWidth}, ch)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xD, xR, xC, ch),\n getValue(batch, xD, xR, xC + ${dilationWidth}, ch),\n getValue(batch, xD, xR, xC + 2 * ${dilationWidth}, ch),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n }\n `;\n }\n}\n//# sourceMappingURL=pool_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ReduceProgram {\n constructor(reduceInfo, reduceType) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n let initializationValue = '0.0';\n let compareOp = ``;\n if (reduceType === 'prod') {\n initializationValue = '1.0';\n }\n else if (reduceType === 'min') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '1.0 / 1e-20';\n compareOp = `min`;\n }\n else if (reduceType === 'max') {\n // WebGL on Firefox Linux can't compile 1/0 so we do 1/eps.\n initializationValue = '-1.0 / 1e-20';\n compareOp = `max`;\n }\n let returnValue = `${reduceType}(${reduceType}(${reduceType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (reduceType === 'sum') {\n returnValue = `sumValue`;\n }\n else if (reduceType === 'prod') {\n returnValue = `prodValue`;\n }\n else if (reduceType === 'all') {\n returnValue = `allValue`;\n }\n else if (reduceType === 'any') {\n returnValue = `anyValue`;\n }\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `\n if (${reduceType === 'sum'}) {\n sumValue += dot(values, ones);\n } else if (${reduceType === 'prod'}) {\n vec2 tmp = vec2(values[0], values[1]) * vec2(values[2], values[3]);\n prodValue *= tmp[0] * tmp[1];\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n let vecType = `vec4`;\n if (reduceType === 'all') {\n initializationValue = '1.0';\n updateSnippet = `\n bool reducedAllValue = all(values);\n float floatedReducedAllValue = float(reducedAllValue);\n allValue = float(allValue >= 1.0 && floatedReducedAllValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n else if (reduceType === 'any') {\n initializationValue = '0.0';\n updateSnippet = `\n bool reducedAnyValue = any(values);\n float floatedReducedAnyValue = float(reducedAnyValue);\n anyValue = float(anyValue >= 1.0 || floatedReducedAnyValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n vec4 minMaxValue = vec4(${initializationValue});\n float prodValue = 1.0;\n float sumValue = 0.0;\n float allValue = 1.0;\n float anyValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=reduce_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as shader_util from './shader_compiler_util';\nexport class ReshapePackedProgram {\n constructor(outputShape, inputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = outputShape;\n let mainLoop = ``;\n for (let i = 0; i < 4; i++) {\n let thisRC = `thisRC = rc;`;\n if (i % 2 === 1) {\n thisRC += `thisRC.z += 1;`;\n }\n if (i > 1) {\n thisRC += `thisRC.y += 1;`;\n }\n mainLoop += `\n ${thisRC}\n ${i > 0 ? `if(thisRC.y < rows && thisRC.z < cols){` : ''}\n int flatIndex = getFlatIndex(thisRC);\n\n ivec3 inputRC = inputCoordsFromReshapedOutCoords(flatIndex);\n vec2 inputRCInnerDims = vec2(float(inputRC.y),float(inputRC.z));\n\n result[${i}] =\n getChannel(getA(inputRC.x, inputRC.y, inputRC.z), inputRCInnerDims);\n ${i > 0 ? '}' : ''}\n `;\n }\n this.userCode = `\n ${getReshapedInputCoords(inputShape)}\n ${shader_util.getFlatIndexFrom3D(outputShape)}\n\n void main() {\n ivec3 rc = getOutputCoords();\n\n vec4 result = vec4(0.);\n\n ivec3 thisRC;\n int rows = ${outputShape[1]};\n int cols = ${outputShape[2]};\n\n ${mainLoop}\n\n setOutput(result);\n }\n `;\n }\n}\nfunction getReshapedInputCoords(shape) {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n return `\n ivec3 inputCoordsFromReshapedOutCoords(int index) {\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\n//# sourceMappingURL=reshape_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(startRLerp - float(winHeight / 2));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(startCLerp - float(winWidth / 2));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float dxR = float(dyR) * heightScale;\n int topDxRIndex = int(floor(dxR));\n int bottomDxRIndex = int(min(ceil(dxR), ${xHeight - 1}.0));\n float dxRLerp = dxR - float(topDxRIndex);\n float inverseDxRLerp = 1.0 - dxRLerp;\n\n float dxC = float(dyC) * widthScale;\n int leftDxCIndex = int(floor(dxC));\n int rightDxCIndex = int(min(ceil(dxC), ${xWidth - 1}.0));\n float dxCLerp = dxC - float(leftDxCIndex);\n float inverseDxCLerp = 1.0 - dxCLerp;\n\n if (r == topDxRIndex && c == leftDxCIndex) {\n // topLeft\n accumulator +=\n getDy(b, dyR, dyC, d) * inverseDxRLerp * inverseDxCLerp;\n }\n\n if (r == topDxRIndex && c == rightDxCIndex) {\n // topRight\n accumulator += getDy(b, dyR, dyC, d) * inverseDxRLerp * dxCLerp;\n }\n\n if (r == bottomDxRIndex && c == leftDxCIndex) {\n // bottomLeft\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * inverseDxCLerp;\n }\n\n if (r == bottomDxRIndex && c == rightDxCIndex) {\n // bottomRight\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * dxCLerp;\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec2 sourceFloorRC = ivec2(sourceFracIndexRC);\n ivec2 sourceCeilRC = ivec2(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n float topLeft = getA(b, sourceFloorRC.x, sourceFloorRC.y, d);\n float bottomLeft = getA(b, sourceCeilRC.x, sourceFloorRC.y, d);\n float topRight = getA(b, sourceFloorRC.x, sourceCeilRC.y, d);\n float bottomRight = getA(b, sourceCeilRC.x, sourceCeilRC.y, d);\n\n vec2 fracRC = sourceFracIndexRC - vec2(sourceFloorRC);\n\n float top = topLeft + (topRight - topLeft) * fracRC.y;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracRC.y;\n float newValue = top + (bottom - top) * fracRC.x;\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeBilinearPackedProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n this.userCode = `\n const vec3 effectiveInputOverOutputRatioRC = vec3(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec3 inputShapeRC = vec3(${oldHeight}.0, ${oldWidth}.0,\n ${oldWidth}.0);\n\n float getAValue(int b, int r, int c, int d) {\n return getChannel(getA(b, r, c, d), vec2(c, d));\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n // Calculate values for next column in yRC.z.\n ivec3 yRC = coords.yzz + ivec3(0, 0, 1);\n\n // Fractional source index.\n vec3 sourceFracIndexRC = vec3(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec3 sourceFloorRC = ivec3(sourceFracIndexRC);\n ivec3 sourceCeilRC = ivec3(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n // Should we calculate next column and row elements in 2x2 packed cell.\n bool hasNextCol = d < ${depth - 1};\n bool hasNextRow = coords.z < ${newWidth - 1};\n\n // In parallel, construct four corners for all four components in\n // packed 2x2 cell.\n vec4 topLeft = vec4(\n getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 bottomLeft = vec4(\n getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceFloorRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceFloorRC.z, d + 1) : 0.0);\n\n vec4 topRight = vec4(\n getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceFloorRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceFloorRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec4 bottomRight = vec4(\n getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d),\n hasNextCol ? getAValue(b, sourceCeilRC.x, sourceCeilRC.y, d + 1)\n : 0.0,\n hasNextRow ? getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d)\n : 0.0,\n (hasNextRow && hasNextCol) ?\n getAValue(b, sourceCeilRC.x, sourceCeilRC.z, d + 1) : 0.0);\n\n vec3 fracRC = sourceFracIndexRC - vec3(sourceFloorRC);\n\n vec4 top = mix(topLeft, topRight, fracRC.yyzz);\n vec4 bottom = mix(bottomLeft, bottomRight, fracRC.yyzz);\n vec4 newValue = mix(top, bottom, fracRC.x);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_bilinear_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeigborBackpropProgram {\n constructor(dy, x, alignCorners) {\n this.variableNames = ['dy'];\n this.outputShape = [];\n this.outputShape = x.shape;\n const [, xHeight, xWidth,] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n const effectiveXSize = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n const effectiveYSize = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(floor(startRLerp - float(winHeight / 2)));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(floor(startCLerp - float(winWidth / 2)));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float sourceFracRow =\n float(${effectiveXSize[0]}) *\n (float(dyR) / float(${effectiveYSize[0]}));\n\n float sourceFracCol =\n float(${effectiveXSize[1]}) *\n (float(dyC) / float(${effectiveYSize[1]}));\n\n int sourceNearestRow = int(min(\n float(int(${xHeight}) - 1),\n ${alignCorners} ? float(round(sourceFracRow)) :\n float(floor(sourceFracRow))));\n\n int sourceNearestCol = int(min(\n float(int(${xWidth}) - 1),\n ${alignCorners} ? float(round(sourceFracCol)) :\n float(floor(sourceFracCol))));\n\n if (r == sourceNearestRow && c == sourceNearestCol) {\n accumulator += getDy(b, dyR, dyC, d);\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_backprop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class ResizeNearestNeighborProgram {\n constructor(inputShape, newHeight, newWidth, alignCorners) {\n this.variableNames = ['A'];\n this.outputShape = [];\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n const effectiveInSize = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n const effectiveOutSize = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n // When align corners is false, we rounds the value with floor.\n const roundBase = alignCorners ? '0.5' : '0.0';\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestRC = ivec2(\n min(inputShapeRC - 1.0, floor(sourceFracIndexRC + ${roundBase})));\n\n float newValue = getA(b, sourceNearestRC.x, sourceNearestRC.y, d);\n\n setOutput(newValue);\n }\n `;\n }\n}\n//# sourceMappingURL=resize_nearest_neighbor_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReverseProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n if (rank === 1) {\n this.userCode = `\n void main() {\n int coord = getOutputCoords();\n setOutput(getX(${xShape[0]} - coord - 1));\n }\n `;\n return;\n }\n const getInCoord = (i) => {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - coords[${i}] - 1`;\n }\n return `coords[${i}]`;\n };\n const inCoords = xShape.map((_, i) => getInCoord(i)).join(',');\n const type = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${type} coords = getOutputCoords();\n setOutput(getX(${inCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=reverse_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ReversePackedProgram {\n constructor(xShape, axis) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(`WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n const channels = getChannels('rc', rank);\n const nextColumn = `${channels[rank - 1]} + 1 < ${this.outputShape[rank - 1]}`;\n const nextRow = `${channels[rank - 2]} + 1 < ${this.outputShape[rank - 2]}`;\n const type = getCoordsDataType(rank);\n if (rank === 1) {\n this.userCode = `\n void main(){\n int rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = getChannel(getX(${xShape[0]} - rc - 1),\n ${xShape[0]} - rc - 1);\n if(${nextColumn}){\n result.g = getChannel(getX(${xShape[0]} - (rc + 1) - 1),\n ${xShape[0]} - (rc + 1) - 1);\n }\n setOutput(result);\n }\n `;\n }\n else {\n this.userCode = `\n void main() {\n ${type} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result.r = ${getR(channels.slice())};\n if(${nextColumn}){\n result.g = ${getG(channels.slice())};\n }\n if(${nextRow}) {\n result.b = ${getB(channels.slice())};\n if(${nextColumn}) {\n result.a = ${getA(channels.slice())};\n }\n }\n setOutput(result);\n }\n `;\n }\n function getR(channels) {\n return getChannel(channels);\n }\n function getG(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n return getChannel(channels);\n }\n function getB(channels) {\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getA(channels) {\n channels[rank - 1] = '(' + channels[rank - 1] + ` + 1)`;\n channels[rank - 2] = '(' + channels[rank - 2] + ` + 1)`;\n return getChannel(channels);\n }\n function getChannel(channels) {\n const inCoordsArray = xShape.map((_, i) => getInCoord(i, channels));\n const inCoords = inCoordsArray.join(',');\n const innerDims = inCoordsArray.slice(-2).join(',');\n return `getChannel(getX(${inCoords}), vec2(${innerDims}))`;\n }\n function getInCoord(i, channels1) {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - ${channels1[i]} - 1`;\n }\n else {\n return `${channels1[i]}`;\n }\n }\n }\n}\n//# sourceMappingURL=reverse_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class ScatterProgram {\n constructor(updateSize, sliceDim, indicesRank, updatesRank, strides, shape, summingDupeIndex = true) {\n this.variableNames = ['updates', 'indices', 'defaultValue'];\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n let indicesString = '';\n if (indicesRank === 1) {\n indicesString = 'i';\n }\n else if (indicesRank === 2) {\n indicesString = 'i, j';\n }\n const indicesSnippet = `getIndices(${indicesString})`;\n let updatesString = '';\n if (updatesRank === 1) {\n updatesString = 'i';\n }\n else if (updatesRank === 2) {\n updatesString = 'i, coords[1]';\n }\n const updatesSnippet = `getUpdates(${updatesString})`;\n const strideString = sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n float sum = 0.0;\n bool found = false;\n for (int i = 0; i < ${updateSize}; i++) {\n int flattenedIndex = 0;\n for (int j = 0; j < ${sliceDim}; j++) {\n int index = round(${indicesSnippet});\n flattenedIndex += index * ${strideString};\n }\n if (flattenedIndex == coords[0]) {\n sum += ${updatesSnippet};\n found = true;\n }\n }\n setOutput(mix(getDefaultValue(), sum, float(found)));\n }\n `;\n }\n}\n//# sourceMappingURL=scatter_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class SegmentOpProgram {\n constructor(segOpInfo, segOpType) {\n this.variableNames = ['x', 'segmentIds'];\n const windowSize = segOpInfo.windowSize;\n const batchSize = segOpInfo.batchSize;\n const inSize = segOpInfo.inSize;\n const numSegments = segOpInfo.numSegments;\n const outSize = numSegments * Math.ceil(inSize / windowSize);\n this.outputShape = [batchSize, outSize];\n const initializationValue = '0.0';\n const returnValue = `sumValue`;\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n const updateSnippet = `\n sumValue += dot(values, segFilter);\n `;\n let checkValueOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkValueOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n let checkSegmentIdOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkSegmentIdOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return -1.0;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n\n float getValue(int batch, int inIdx) {\n ${checkValueOutOfBounds}\n return getX(batch, inIdx);\n }\n\n float getSegmentIdAtIndex(int inIdx) {\n ${checkSegmentIdOutOfBounds}\n return getSegmentIds(inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = int(floor(float(outIdx) / float(\n ${numSegments})) * float(${windowSize}));\n int currentSeg = int(mod(float(outIdx), float(${numSegments})));\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 3)) == currentSeg ? 1 : 0\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n int inIdxSeg = int(getSegmentIdAtIndex(inIdx));\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n vec4 segFilter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n 0\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n//# sourceMappingURL=segment_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SelectProgram {\n constructor(cRank, shape, rank) {\n this.variableNames = ['c', 'a', 'b'];\n this.outputShape = shape;\n let cCoords;\n let abCoords;\n if (rank > 4) {\n throw Error(`Where for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n abCoords = `resRC`;\n cCoords = `resRC`;\n }\n else {\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const cCoordVars = [];\n const abCoordVars = [];\n for (let i = 0; i < shape.length; i++) {\n abCoordVars.push(`${currentCoords[i]}`);\n if (i < cRank) {\n cCoordVars.push(`${currentCoords[i]}`);\n }\n }\n cCoords = cCoordVars.join();\n abCoords = abCoordVars.join();\n }\n const dtype = getCoordsDataType(rank);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n float cVal = getC(${cCoords});\n if (cVal >= 1.0) {\n setOutput(getA(${abCoords}));\n } else {\n setOutput(getB(${abCoords}));\n }\n }\n `;\n }\n}\n//# sourceMappingURL=select_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class SliceProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const uniformPart = `uniform int start[${this.rank}];`;\n const sourceCoords = getCoords(this.rank);\n let body;\n const coordSum = destSize.map((_, i) => {\n return `sourceLoc.${coords[i]} = start[${i}] + coords.${coords[i]};`;\n });\n body = `\n ${dtype} sourceLoc;\n ${dtype} coords = getOutputCoords();\n ${coordSum.join('\\n')}\n `;\n this.userCode = `\n ${uniformPart}\n void main() {\n ${body}\n setOutput(getSource(${sourceCoords}));\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\nconst coords = ['x', 'y', 'z', 'w', 'u', 'v'];\nfunction getCoords(rank) {\n if (rank === 1) {\n return 'sourceLoc';\n }\n else if (rank <= 6) {\n return coords.slice(0, rank).map(x => 'sourceLoc.' + x).join(',');\n }\n else {\n throw Error(`Slicing for rank ${rank} is not yet supported`);\n }\n}\n//# sourceMappingURL=slice_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class SlicePackedProgram {\n constructor(destSize) {\n this.variableNames = ['source'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = destSize;\n this.rank = destSize.length;\n const dtype = getCoordsDataType(this.rank);\n const coords = getChannels('coords', this.rank);\n const sourceLoc = getChannels('sourceLoc', this.rank);\n const innerDims = this.rank === 1 ? 'sourceLoc' : `vec2(${sourceLoc.slice(-2).join()})`;\n const getChannel = `getChannel(getSource(${sourceLoc.join()}), ${innerDims})`;\n const upperRow = `\n result.x = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.y = ${getChannel};\n --${sourceLoc[this.rank - 1]};\n }\n `;\n const lowerRow = this.rank === 1 ? '' : `\n --${coords[this.rank - 1]};\n if (++${coords[this.rank - 2]} < ${destSize[this.rank - 2]}) {\n ++${sourceLoc[this.rank - 2]};\n result.z = ${getChannel};\n if (++${coords[this.rank - 1]} < ${destSize[this.rank - 1]}) {\n ++${sourceLoc[this.rank - 1]};\n result.w = ${getChannel};\n }\n }\n `;\n const sourceLocSetup = this.rank <= 4 ?\n `sourceLoc = coords +\n ${dtype}(${destSize.map((_, i) => `start[${i}]`).join()});` :\n destSize.map((_, i) => `${sourceLoc[i]} = ${coords[i]} + start[${i}];`)\n .join('\\n');\n this.userCode = `\n uniform int start[${this.rank}];\n void main() {\n ${dtype} coords = getOutputCoords();\n ${dtype} sourceLoc;\n ${sourceLocSetup}\n vec4 result = vec4(0.);\n ${upperRow}\n ${lowerRow}\n setOutput(result);\n }\n `;\n }\n getCustomSetupFunc(start) {\n if (start.length !== this.rank) {\n throw Error(`The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu, webGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1iv(this.startLoc, start);\n };\n }\n}\n//# sourceMappingURL=slice_packed_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class StridedSliceProgram {\n constructor(begin, strides, size) {\n this.variableNames = ['x'];\n this.outputShape = size;\n const rank = size.length;\n const inputDtype = getCoordsDataType(size.length);\n const dtype = getCoordsDataType(size.length);\n let newCoords = '';\n if (rank === 1) {\n newCoords = 'coords * strides + begin';\n }\n else {\n let outputAxis = 0;\n newCoords =\n size.map((_, i) => {\n outputAxis++;\n return size.length === 1 ?\n `coords * strides[${i}] + begin[${i}]` :\n `coords[${outputAxis - 1}] * strides[${i}] + begin[${i}]`;\n })\n .join(',');\n }\n this.userCode = `\n ${inputDtype} begin = ${inputDtype}(${begin});\n ${inputDtype} strides = ${inputDtype}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n setOutput(getX(${newCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=strided_slice_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { getInternalFormatForFloat16MatrixTexture, getInternalFormatForFloat16PackedMatrixTexture, getInternalFormatForFloat32MatrixTexture, getInternalFormatForPackedMatrixTexture, getInternalFormatForUnsignedBytesMatrixTexture } from './gpgpu_util';\nimport { getPackedMatrixTextureShapeWidthHeight, getUnpackedMatrixTextureShapeWidthHeight, PhysicalTextureType, TextureUsage } from './tex_util';\nexport class TextureManager {\n constructor(gpgpu) {\n this.gpgpu = gpgpu;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0; // How many bytes that have been allocated\n // are available for reuse.\n this.freeTextures = {};\n this.logEnabled = false;\n this.usedTextures = {};\n }\n acquireTexture(shapeRC, usage, isPacked) {\n const physicalTexType = getPhysicalFromLogicalTextureType(usage, isPacked);\n const shapeKey = getKeyFromTextureShape(shapeRC, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n if (!(shapeKey in this.usedTextures)) {\n this.usedTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shapeRC, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n if (this.freeTextures[shapeKey].length > 0) {\n this.numFreeTextures--;\n this.numUsedTextures++;\n this._numBytesFree -= texBytes;\n this.log();\n const newTexture = this.freeTextures[shapeKey].shift();\n this.usedTextures[shapeKey].push(newTexture);\n return newTexture;\n }\n let newTexture;\n if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT32) {\n newTexture = this.gpgpu.createPackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16PackedMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT32) {\n newTexture =\n this.gpgpu.createFloat32MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16MatrixTexture(shapeRC[0], shapeRC[1]);\n }\n else if (physicalTexType === PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE) {\n newTexture =\n this.gpgpu.createUnsignedBytesMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n this.usedTextures[shapeKey].push(newTexture);\n this.numUsedTextures++;\n this._numBytesAllocated += texBytes;\n this.log();\n return newTexture;\n }\n releaseTexture(texture, shape, logicalTexType, isPacked) {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n const physicalTexType = getPhysicalFromLogicalTextureType(logicalTexType, isPacked);\n const shapeKey = getKeyFromTextureShape(shape, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n const texBytes = computeBytes(shape, physicalTexType, this.gpgpu.gl, this.gpgpu.textureConfig, isPacked);\n const deleteTexThreshold = env().get('WEBGL_DELETE_TEXTURE_THRESHOLD');\n if (deleteTexThreshold !== -1 &&\n this._numBytesAllocated > deleteTexThreshold) {\n this.gpgpu.deleteMatrixTexture(texture);\n this._numBytesAllocated -= texBytes;\n }\n else {\n this.freeTextures[shapeKey].push(texture);\n this.numFreeTextures++;\n this._numBytesFree += texBytes;\n }\n this.numUsedTextures--;\n const texList = this.usedTextures[shapeKey];\n const texIndex = texList.indexOf(texture);\n if (texIndex < 0) {\n throw new Error('Cannot release a texture that was never provided by this ' +\n 'texture manager');\n }\n texList.splice(texIndex, 1);\n this.log();\n }\n log() {\n if (!this.logEnabled) {\n return;\n }\n const total = this.numFreeTextures + this.numUsedTextures;\n console.log('Free/Used', `${this.numFreeTextures} / ${this.numUsedTextures}`, `(${total})`);\n const freeRatio = this._numBytesFree / this._numBytesAllocated;\n console.log(`Bytes allocated: ${this._numBytesAllocated}`);\n console.log(`Bytes unused: ${this._numBytesFree} (${Math.round(100 * freeRatio)}%)`);\n }\n get numBytesAllocated() {\n return this._numBytesAllocated;\n }\n get numBytesFree() {\n return this._numBytesFree;\n }\n getNumUsedTextures() {\n return this.numUsedTextures;\n }\n getNumFreeTextures() {\n return this.numFreeTextures;\n }\n dispose() {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n for (const texShape in this.freeTextures) {\n this.freeTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n for (const texShape in this.usedTextures) {\n this.usedTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n this.freeTextures = null;\n this.usedTextures = null;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n this._numBytesAllocated = 0;\n this._numBytesFree = 0;\n }\n}\nfunction numBytesForInternalFormat(gl, internalFormat) {\n // tslint:disable-next-line:no-any\n const glany = gl;\n if (internalFormat === glany.R32F) {\n return 4;\n }\n else if (internalFormat === glany.R16F) {\n return 2;\n }\n else if (internalFormat === glany.RGBA32F) {\n return 16;\n }\n else if (internalFormat === gl.RGBA) {\n return 16;\n }\n else if (internalFormat === glany.RGBA16F) {\n return 8;\n }\n throw new Error(`Unknown internal format ${internalFormat}`);\n}\nexport function computeBytes(shape, physicalTexType, gl, textureConfig, isPacked) {\n // It is not possible to infer packed status from the texture type because\n // depending on the textureConfig, different texture types may resolve to the\n // same internal format (e.g. in WebGL1, the internal format for\n // UNPACKED_FLOAT16 textures is gl.RGBA). Therefore we pass in `isPacked`\n // explicitly.\n const internalFormat = internalFormatForPhysicalTexType(physicalTexType, textureConfig);\n let numElements;\n if (isPacked) {\n const [packedWidth, packedHeight] = getPackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = packedWidth * packedHeight;\n }\n else {\n const [width, height] = getUnpackedMatrixTextureShapeWidthHeight(shape[0], shape[1]);\n numElements = width * height;\n }\n const bytesPerElement = numBytesForInternalFormat(gl, internalFormat);\n return numElements * bytesPerElement;\n}\nfunction internalFormatForPhysicalTexType(physicalTexType, textureConfig) {\n switch (physicalTexType) {\n case PhysicalTextureType.PACKED_2X2_FLOAT32:\n return getInternalFormatForPackedMatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_2X2_FLOAT16:\n return getInternalFormatForFloat16PackedMatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT32:\n return getInternalFormatForFloat32MatrixTexture(textureConfig);\n case PhysicalTextureType.UNPACKED_FLOAT16:\n return getInternalFormatForFloat16MatrixTexture(textureConfig);\n case PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE:\n return getInternalFormatForUnsignedBytesMatrixTexture(textureConfig);\n default:\n throw new Error(`Unknown physical texture type ${physicalTexType}`);\n }\n}\nfunction getPhysicalTextureForRendering(isPacked) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_ENABLED')) {\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n return PhysicalTextureType.UNPACKED_FLOAT32;\n }\n if (isPacked) {\n return PhysicalTextureType.PACKED_2X2_FLOAT16;\n }\n return PhysicalTextureType.UNPACKED_FLOAT16;\n}\nfunction getPhysicalFromLogicalTextureType(logicalTexType, isPacked) {\n if (logicalTexType === TextureUsage.UPLOAD) {\n return PhysicalTextureType.PACKED_2X2_FLOAT32;\n }\n else if (logicalTexType === TextureUsage.RENDER || logicalTexType == null) {\n return getPhysicalTextureForRendering(isPacked);\n }\n else if (logicalTexType === TextureUsage.DOWNLOAD ||\n logicalTexType === TextureUsage.PIXELS) {\n return PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE;\n }\n throw new Error(`Unknown logical texture type ${logicalTexType}`);\n}\nfunction getKeyFromTextureShape(shapeRowsCol, physicalTexType, isPacked) {\n return `${shapeRowsCol[0]}_${shapeRowsCol[1]}_${physicalTexType}_${isPacked}`;\n}\n//# sourceMappingURL=texture_manager.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TileProgram {\n constructor(aShape, reps) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[i] * reps[i];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\nfunction getSourceCoords(aShape) {\n const rank = aShape.length;\n if (rank > 5) {\n throw Error(`Tile for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `imod(resRC, ${aShape[0]})`;\n }\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u'];\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n sourceCoords.push(`imod(${currentCoords[i]}, ${aShape[i]})`);\n }\n return sourceCoords.join();\n}\n//# sourceMappingURL=tile_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class UnaryOpProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.outputShape = aShape;\n this.userCode = `\n float unaryOperation(float x) {\n ${opSnippet}\n }\n\n void main() {\n float x = getAAtOutCoords();\n float y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\nconst CHECK_NAN_SNIPPET = `if (isnan(x)) return x;`;\nexport const LINEAR = `return x;`;\nexport const ABS = `return abs(x);`;\nexport const RELU = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : x;\n`;\nexport const RELU6 = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : min(6.0, x);\n`;\nexport const ELU = `return (x >= 0.0) ? x : (exp(x) - 1.0);`;\nexport const SELU = `\n // Stable and Attracting Fixed Point (0, 1) for Normalized Weights.\n // see: https://arxiv.org/abs/1706.02515\n float scaleAlpha = ${backend_util.SELU_SCALEALPHA};\n float scale = ${backend_util.SELU_SCALE};\n return (x >= 0.0) ? scale * x : scaleAlpha * (exp(x) - 1.0);\n`;\nexport function STEP(alpha = 0.0) {\n return CHECK_NAN_SNIPPET + `\n return x > 0.0 ? 1.0 : float(${alpha});\n `;\n}\nexport const NEG = `return -x;`;\nexport const CEIL = `return ceil(x);`;\nexport const FLOOR = `return floor(x);`;\nexport const SIGN = `\n if (isnan(x)) { return 0.0; }\n return sign(x);\n`;\nexport const IS_NAN = `return float(isnan(x));`;\nexport const IS_INF = `return float(isinf(x));`;\nexport const IS_FINITE = `return float(!isnan(x) && !isinf(x));`;\nexport const ROUND = `\n // OpenGL ES does not support round function.\n // The algorithm is based on banker's rounding.\n float base = floor(x);\n if ((x - base) < 0.5) {\n return floor(x);\n } else if ((x - base) > 0.5) {\n return ceil(x);\n } else {\n if (mod(base, 2.0) == 0.0) {\n return base;\n } else {\n return base + 1.0;\n }\n }\n`;\nexport const EXP = `return exp(x);`;\nexport const EXPM1 = `return exp(x) - 1.0;`;\nexport const LOG = `if (x < 0.0) return NAN;\n return log(x);`;\nexport const LOG1P = `return log(1.0 + x);`;\nexport const SQRT = `return sqrt(x);`;\nexport const RSQRT = `return inversesqrt(x);`;\nexport const SIGMOID = `return 1.0 / (1.0 + exp(-1.0 * x));`;\n/**\n * mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n *\n * epsilon is the difference between 1.0 and the next representable\n * float. For a single precision 32 bit float this should be 2^-23, see:\n * https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\n *\n * too_large = (x > -threshold) is value above which exp(x) may overflow\n * but softplus(x) == x is within machine epsilon\n *\n * too_small = (x < threshold) is value below which exp(x) may underflow,\n * but softplus(x) == exp(x) is within machine epsilon.\n */\nexport const SOFTPLUS = `\n float epsilon = 1.1920928955078125e-7;\n float threshold = log(epsilon) + 2.0;\n\n bool too_large = x > -threshold;\n bool too_small = x < threshold;\n\n float result;\n float exp_x = exp(x);\n\n if (too_large){\n result = x;\n }\n else if (too_small){\n result = exp_x;\n }\n else{\n result = log(exp_x + 1.0);\n }\n return result;\n`;\nexport const ASIN = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return asin(x);\n`;\nexport const ACOS = CHECK_NAN_SNIPPET + `\n if (abs(x) > 1.) {\n return NAN;\n }\n return acos(x);\n`;\nexport const ATAN = CHECK_NAN_SNIPPET + `\n return atan(x);\n`;\nexport const SINH = `\n float e2x = exp(x);\n return (e2x - 1.0 / e2x) / 2.0;\n`;\nexport const COSH = `\n float e2x = exp(-x);\n return (e2x + 1.0 / e2x) / 2.0;\n`;\nexport const TANH = `\n float e2x = exp(-2.0 * abs(x));\n return sign(x) * (1.0 - e2x) / (1.0 + e2x);\n`;\nexport const ASINH = CHECK_NAN_SNIPPET + `return log(x + sqrt(x * x + 1.0));`;\nexport const ACOSH = CHECK_NAN_SNIPPET + `\n if (x < 1.0) return NAN;\n return log(x + sqrt(x * x - 1.0));`;\nexport const ATANH = CHECK_NAN_SNIPPET + `\n if ((x < -1.0) || (x > 1.0)) return NAN;\n return (log(1.0 + x) - log(1.0 - x)) / 2.0;`;\nexport const ERF = `\n // Error function is calculated approximately with elementary function.\n // See \"Handbook of Mathematical Functions with Formulas,\n // Graphs, and Mathematical Tables\", Abramowitz and Stegun.\n float p = ${backend_util.ERF_P};\n float a1 = ${backend_util.ERF_A1};\n float a2 = ${backend_util.ERF_A2};\n float a3 = ${backend_util.ERF_A3};\n float a4 = ${backend_util.ERF_A4};\n float a5 = ${backend_util.ERF_A5};\n\n float sign = sign(x);\n x = abs(x);\n float t = 1.0 / (1.0 + p * x);\n return sign * (1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*exp(-x*x));\n`;\nexport const RECIPROCAL = `return 1.0 / x;`;\nexport const LOGICAL_NOT = `return float(!(x >= 1.0));`;\nexport const CLONE = 'return x;';\n//# sourceMappingURL=unaryop_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport const LINEAR = `return x;`;\nexport const LOG = `\n vec4 result = log(x);\n vec4 isNaN = vec4(lessThan(x, vec4(0.0)));\n result.r = isNaN.r == 1.0 ? NAN : result.r;\n result.g = isNaN.g == 1.0 ? NAN : result.g;\n result.b = isNaN.b == 1.0 ? NAN : result.b;\n result.a = isNaN.a == 1.0 ? NAN : result.a;\n\n return result;\n`;\nexport const RELU = `\n vec4 result = x * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const RELU6 = `\n vec4 result = min(x, vec4(6.)) * vec4(greaterThanEqual(x, vec4(0.0)));\n bvec4 isNaN = isnan(x);\n\n result.r = isNaN.r ? x.r : result.r;\n result.g = isNaN.g ? x.g : result.g;\n result.b = isNaN.b ? x.b : result.b;\n result.a = isNaN.a ? x.a : result.a;\n\n return result;\n`;\nexport const ELU = `\n vec4 result;\n\n result.r = (x.r >= 0.0) ? x.r : (exp(x.r) - 1.0);\n result.g = (x.g >= 0.0) ? x.g : (exp(x.g) - 1.0);\n result.b = (x.b >= 0.0) ? x.b : (exp(x.b) - 1.0);\n result.a = (x.a >= 0.0) ? x.a : (exp(x.a) - 1.0);\n\n return result;\n`;\nexport class UnaryOpPackedProgram {\n constructor(aShape, opSnippet) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = aShape;\n this.userCode = `\n vec4 unaryOperation(vec4 x) {\n ${opSnippet}\n }\n\n void main() {\n vec4 x = getAAtOutCoords();\n vec4 y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n}\n//# sourceMappingURL=unaryop_packed_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels, getSourceCoords } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class UnpackProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = false;\n this.outputShape = outputShape;\n const rank = outputShape.length;\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const sourceCoords = getSourceCoords(rank, channels);\n const innerDims = channels.slice(-2);\n const coords = rank <= 1 ? 'rc' : `vec2(${innerDims.join(',')})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 packedInput = getA(${sourceCoords});\n\n setOutput(getChannel(packedInput, ${coords}));\n }\n `;\n }\n}\n//# sourceMappingURL=unpack_gpu.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// Import webgl flags.\nimport './flags_webgl';\nimport * as tf from '@tensorflow/tfjs-core';\nimport { div, engine, env, max, range, reshape, scalar, softmax, tensor, tidy, transpose } from '@tensorflow/tfjs-core';\nimport { backend_util, buffer, kernel_impls, slice_util, util } from '@tensorflow/tfjs-core';\nimport { DataStorage, KernelBackend, upcastType } from '@tensorflow/tfjs-core';\nimport { ceilImplCPU, expImplCPU, expm1ImplCPU, floorImplCPU, logImplCPU, rsqrtImplCPU, simpleAbsImplCPU, sliceImplCPU } from './kernel_utils/shared';\nconst { segment_util } = backend_util;\nconst split = kernel_impls.split;\nconst tile = kernel_impls.tile;\nconst topkImpl = kernel_impls.topkImpl;\nconst whereImpl = kernel_impls.whereImpl;\nimport { AddNProgram } from './addn_gpu';\nimport { AddNPackedProgram } from './addn_packed_gpu';\nimport { ArgMinMaxProgram } from './argminmax_gpu';\nimport { ArgMinMaxPackedProgram } from './argminmax_packed_gpu';\nimport { AvgPool3DBackpropProgram } from './avg_pool_backprop_gpu';\nimport * as binaryop_gpu from './binaryop_gpu';\nimport { BinaryOpProgram } from './binaryop_gpu';\nimport * as binaryop_packed_gpu from './binaryop_packed_gpu';\nimport { BinaryOpPackedProgram } from './binaryop_packed_gpu';\nimport { getWebGLContext } from './canvas_util';\nimport { ClipProgram } from './clip_gpu';\nimport { ClipPackedProgram } from './clip_packed_gpu';\nimport { ComplexAbsProgram } from './complex_abs_gpu';\nimport { Conv2DDerFilterProgram, Conv2DDerInputProgram, Conv3DDerFilterProgram, Conv3DDerInputProgram } from './conv_backprop_gpu';\nimport { DepthwiseConv2DDerFilterProgram, DepthwiseConv2DDerInputProgram } from './conv_backprop_gpu_depthwise';\nimport { Conv2DProgram, Conv3DProgram } from './conv_gpu';\nimport { DepthwiseConv2DProgram } from './conv_gpu_depthwise';\nimport { DepthwiseConvPacked2DProgram } from './conv_packed_gpu_depthwise';\nimport { CropAndResizeProgram } from './crop_and_resize_gpu';\nimport { CumSumProgram } from './cumsum_gpu';\nimport { DecodeMatrixProgram } from './decode_matrix_gpu';\nimport { DecodeMatrixPackedProgram } from './decode_matrix_packed_gpu';\nimport { DepthToSpaceProgram } from './depth_to_space_gpu';\nimport { DiagProgram } from './diag_gpu';\nimport { EncodeFloatProgram } from './encode_float_gpu';\nimport { EncodeFloatPackedProgram } from './encode_float_packed_gpu';\nimport { EncodeMatrixProgram } from './encode_matrix_gpu';\nimport { EncodeMatrixPackedProgram } from './encode_matrix_packed_gpu';\nimport { FillProgram } from './fill_gpu';\nimport { GatherProgram } from './gather_gpu';\nimport { GatherNDProgram } from './gather_nd_gpu';\nimport { GPGPUContext } from './gpgpu_context';\nimport * as gpgpu_math from './gpgpu_math';\nimport { Im2ColPackedProgram } from './im2col_packed_gpu';\nimport { LRNProgram } from './lrn_gpu';\nimport { LRNGradProgram } from './lrn_grad_gpu';\nimport { LRNPackedProgram } from './lrn_packed_gpu';\nimport { MaxPool3DBackpropProgram } from './max_pool_backprop_gpu';\nimport { MatMulPackedProgram } from './mulmat_packed_gpu';\nimport { MultinomialProgram } from './multinomial_gpu';\nimport { OneHotProgram } from './onehot_gpu';\nimport { PackProgram } from './pack_gpu';\nimport { PadProgram } from './pad_gpu';\nimport { PadPackedProgram } from './pad_packed_gpu';\nimport { Pool3DProgram } from './pool_gpu';\nimport { ReduceProgram } from './reduce_gpu';\nimport { ReshapePackedProgram } from './reshape_packed_gpu';\nimport { ResizeBilinearBackpropProgram } from './resize_bilinear_backprop_gpu';\nimport { ResizeBilinearProgram } from './resize_bilinear_gpu';\nimport { ResizeBilinearPackedProgram } from './resize_bilinear_packed_gpu';\nimport { ResizeNearestNeigborBackpropProgram } from './resize_nearest_neighbor_backprop_gpu';\nimport { ResizeNearestNeighborProgram } from './resize_nearest_neighbor_gpu';\nimport { ReverseProgram } from './reverse_gpu';\nimport { ReversePackedProgram } from './reverse_packed_gpu';\nimport { ScatterProgram } from './scatter_gpu';\nimport { SegmentOpProgram } from './segment_gpu';\nimport { SelectProgram } from './select_gpu';\nimport { SliceProgram } from './slice_gpu';\nimport { SlicePackedProgram } from './slice_packed_gpu';\nimport { StridedSliceProgram } from './strided_slice_gpu';\nimport * as tex_util from './tex_util';\nimport { TextureUsage } from './tex_util';\nimport { TextureManager } from './texture_manager';\nimport { TileProgram } from './tile_gpu';\nimport * as unary_op from './unaryop_gpu';\nimport { UnaryOpProgram } from './unaryop_gpu';\nimport * as unary_packed_op from './unaryop_packed_gpu';\nimport { UnaryOpPackedProgram } from './unaryop_packed_gpu';\nimport { UnpackProgram } from './unpack_gpu';\nimport * as webgl_util from './webgl_util';\nexport const EPSILON_FLOAT32 = 1e-7;\nexport const EPSILON_FLOAT16 = 1e-4;\nconst binaryCaches = {};\nexport function getBinaryCache(webGLVersion) {\n if (webGLVersion in binaryCaches) {\n return binaryCaches[webGLVersion];\n }\n binaryCaches[webGLVersion] = {};\n return binaryCaches[webGLVersion];\n}\nfunction mapActivationToShaderProgram(activation, packed = false) {\n if (activation === 'linear') {\n if (packed) {\n return unary_packed_op.LINEAR;\n }\n return unary_op.LINEAR;\n }\n else if (activation === 'relu') {\n if (packed) {\n return unary_packed_op.RELU;\n }\n return unary_op.RELU;\n }\n else if (activation === 'elu') {\n if (packed) {\n return unary_packed_op.ELU;\n }\n return unary_op.ELU;\n }\n else if (activation === 'relu6') {\n if (packed) {\n return unary_packed_op.RELU6;\n }\n return unary_op.RELU6;\n }\n else if (activation === 'prelu') {\n if (packed) {\n return binaryop_packed_gpu.PRELU;\n }\n return binaryop_gpu.PRELU;\n }\n throw new Error(`Activation ${activation} has not been implemented for the WebGL backend.`);\n}\n// Empirically determined constant used to determine size threshold for handing\n// off execution to the CPU.\nconst CPU_HANDOFF_SIZE_THRESHOLD = 128;\n// Empirically determined constant used to decide the number of MB on GPU\n// before we warn about high memory use. The MB are this constant * screen area\n// * dpi / 1024 / 1024.\nconst BEFORE_PAGING_CONSTANT = 600;\nfunction numMBBeforeWarning() {\n if (env().global.screen == null) {\n return 1024; // 1 GB.\n }\n return (env().global.screen.height * env().global.screen.width *\n window.devicePixelRatio) *\n BEFORE_PAGING_CONSTANT / 1024 / 1024;\n}\n// Empirically determined minimal shared dimension in matmul before we forward\n// to a.mul(b).sum() in order to take advantage of GPU parallelism. See\n// https://github.com/tensorflow/tfjs-core/pull/1379 for benchmarks.\nexport const MATMUL_SHARED_DIM_THRESHOLD = 1000;\nexport class MathBackendWebGL extends KernelBackend {\n constructor(gpgpu) {\n super();\n // Maps data ids that have a pending read operation, to list of subscribers.\n this.pendingRead = new WeakMap();\n // List of data ids that are scheduled for disposal, but are waiting on a\n // pending read operation.\n this.pendingDisposal = new WeakSet();\n // Used to count the number of 'shallow' sliced tensors that point to the\n // same data id.\n this.dataRefCount = new WeakMap();\n this.numBytesInGPU = 0;\n // Accumulated time spent (including blocking) in uploading data to webgl.\n this.uploadWaitMs = 0;\n // Accumulated time spent (including blocking in downloading data from webgl.\n this.downloadWaitMs = 0;\n this.warnedAboutMemory = false;\n this.warnedAboutCPUBackend = false;\n this.pendingDeletes = 0;\n this.disposed = false;\n if (!env().getBool('HAS_WEBGL')) {\n throw new Error('WebGL is not supported on this device');\n }\n if (gpgpu == null) {\n const gl = getWebGLContext(env().getNumber('WEBGL_VERSION'));\n this.binaryCache = getBinaryCache(env().getNumber('WEBGL_VERSION'));\n this.gpgpu = new GPGPUContext(gl);\n this.canvas = gl.canvas;\n this.gpgpuCreatedLocally = true;\n }\n else {\n this.gpgpu = gpgpu;\n this.binaryCache = {};\n this.gpgpuCreatedLocally = false;\n this.canvas = gpgpu.gl.canvas;\n }\n this.textureManager = new TextureManager(this.gpgpu);\n this.numMBBeforeWarning = numMBBeforeWarning();\n this.texData = new DataStorage(this, engine());\n }\n numDataIds() {\n return this.texData.numDataIds() +\n (this.cpuBackend ? this.cpuBackend.numDataIds() : 0) -\n this.pendingDeletes;\n }\n write(values, shape, dtype) {\n if (env().getBool('WEBGL_CHECK_NUMERICAL_PROBLEMS') ||\n env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64' && values != null) {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n const dataId = {};\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n return dataId;\n }\n /** Increase refCount of a `TextureData`. */\n incRef(dataId) {\n const texData = this.texData.get(dataId);\n texData.refCount++;\n }\n /** Decrease refCount of a `TextureData`. */\n decRef(dataId) {\n if (this.texData.has(dataId)) {\n const texData = this.texData.get(dataId);\n texData.refCount--;\n }\n }\n move(dataId, values, shape, dtype) {\n if (env().getBool('DEBUG')) {\n this.checkNumericalProblems(values);\n }\n if (dtype === 'complex64') {\n throw new Error(`Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n this.texData.set(dataId, {\n shape,\n dtype,\n values,\n usage: TextureUsage.UPLOAD,\n refCount: 1,\n complexParentRefCount: 0\n });\n }\n disposeIntermediateTensorInfo(tensorInfo) {\n const dataId = tensorInfo.dataId;\n if (this.texData.has(dataId)) {\n const textureData = this.texData.get(dataId);\n textureData.refCount--;\n if (textureData.refCount < 1) {\n this.disposeData(dataId);\n }\n }\n }\n readSync(dataId) {\n const texData = this.texData.get(dataId);\n const { values, dtype, complexTensorInfos, slice, shape, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.readSync(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (dtype === 'string') {\n return values;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let result;\n if (dtype === 'complex64') {\n const realValues = this.readSync(complexTensorInfos.real.dataId);\n const imagValues = this.readSync(complexTensorInfos.imag.dataId);\n result = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else {\n result = this.getValuesFromTexture(dataId);\n }\n if (shouldTimeProgram) {\n this.downloadWaitMs += util.now() - start;\n }\n return this.convertAndCacheOnCPU(dataId, result);\n }\n async read(dataId) {\n if (this.pendingRead.has(dataId)) {\n const subscribers = this.pendingRead.get(dataId);\n return new Promise(resolve => subscribers.push(resolve));\n }\n const texData = this.texData.get(dataId);\n const { values, shape, slice, dtype, complexTensorInfos, isPacked } = texData;\n // The presence of `slice` indicates this tensor is a shallow slice of a\n // different tensor, and is using that original tensor's texture. Run\n // `clone` in order to copy that texture and read from it.\n if (slice != null) {\n let program;\n if (isPacked) {\n program = new UnaryOpPackedProgram(shape, unary_op.CLONE);\n }\n else {\n program = new UnaryOpProgram(shape, unary_op.CLONE);\n }\n const res = this.runWebGLProgram(program, [{ dataId, shape, dtype }], dtype);\n const data = this.read(res.dataId);\n this.disposeIntermediateTensorInfo(res);\n return data;\n }\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n if (!env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED') &&\n env().getNumber('WEBGL_VERSION') === 2) {\n throw new Error(`tensor.data() with WEBGL_DOWNLOAD_FLOAT_ENABLED=false and ` +\n `WEBGL_VERSION=2 not yet supported.`);\n }\n let buffer = null;\n let tmpDownloadTarget;\n if (dtype !== 'complex64' && env().get('WEBGL_BUFFER_SUPPORTED')) {\n // Possibly copy the texture into a buffer before inserting a fence.\n tmpDownloadTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpDownloadTarget.dataId);\n buffer = this.gpgpu.createBufferFromTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape));\n }\n this.pendingRead.set(dataId, []);\n if (dtype !== 'complex64') {\n // Create a fence and wait for it to resolve.\n await this.gpgpu.createAndWaitForFence();\n }\n // Download the values from the GPU.\n let vals;\n if (dtype === 'complex64') {\n const ps = await Promise.all([\n this.read(complexTensorInfos.real.dataId),\n this.read(complexTensorInfos.imag.dataId)\n ]);\n const realValues = ps[0];\n const imagValues = ps[1];\n vals = backend_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n else if (buffer == null) {\n vals = this.getValuesFromTexture(dataId);\n }\n else {\n const size = util.sizeFromShape(shape);\n vals = this.gpgpu.downloadFloat32MatrixFromBuffer(buffer, size);\n }\n if (tmpDownloadTarget != null) {\n this.disposeIntermediateTensorInfo(tmpDownloadTarget);\n }\n const dTypeVals = this.convertAndCacheOnCPU(dataId, vals);\n const subscribers = this.pendingRead.get(dataId);\n this.pendingRead.delete(dataId);\n // Notify all pending reads.\n subscribers.forEach(resolve => resolve(dTypeVals));\n if (this.pendingDisposal.has(dataId)) {\n this.pendingDisposal.delete(dataId);\n this.disposeData(dataId);\n this.pendingDeletes--;\n }\n return dTypeVals;\n }\n checkNumericalProblems(values) {\n if (values == null) {\n return;\n }\n for (let i = 0; i < values.length; i++) {\n const num = values[i];\n if (!webgl_util.canBeRepresented(num)) {\n if (env().getBool('WEBGL_RENDER_FLOAT32_CAPABLE')) {\n throw Error(`The value ${num} cannot be represented with your ` +\n `current settings. Consider enabling float32 rendering: ` +\n `'tf.env().set('WEBGL_RENDER_FLOAT32_ENABLED', true);'`);\n }\n throw Error(`The value ${num} cannot be represented on this device.`);\n }\n }\n }\n getValuesFromTexture(dataId) {\n const { shape, dtype, isPacked } = this.texData.get(dataId);\n const size = util.sizeFromShape(shape);\n if (env().getBool('WEBGL_DOWNLOAD_FLOAT_ENABLED')) {\n const tmpTarget = this.decode(dataId);\n const tmpData = this.texData.get(tmpTarget.dataId);\n const vals = this.gpgpu\n .downloadMatrixFromPackedTexture(tmpData.texture, ...tex_util.getDenseTexShape(shape))\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(tmpTarget);\n return vals;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK') && isPacked === true;\n const outputShape = shouldUsePackedProgram ? webgl_util.getShapeAs3D(shape) : shape;\n const program = shouldUsePackedProgram ?\n new EncodeFloatPackedProgram(outputShape) :\n new EncodeFloatProgram(outputShape);\n const output = this.runWebGLProgram(program, [{ shape: outputShape, dtype, dataId }], 'float32');\n const tmpData = this.texData.get(output.dataId);\n const vals = this.gpgpu\n .downloadByteEncodedFloatMatrixFromOutputTexture(tmpData.texture, tmpData.texShape[0], tmpData.texShape[1])\n .subarray(0, size);\n this.disposeIntermediateTensorInfo(output);\n return vals;\n }\n async time(f) {\n const oldActiveTimers = this.activeTimers;\n const newActiveTimers = [];\n let outerMostTime = false;\n if (this.programTimersStack == null) {\n this.programTimersStack = newActiveTimers;\n outerMostTime = true;\n }\n else {\n this.activeTimers.push(newActiveTimers);\n }\n this.activeTimers = newActiveTimers;\n f();\n // needing to split these up because util.flatten only accepts certain types\n const flattenedActiveTimerQueries = util.flatten(this.activeTimers.map((d) => d.query))\n .filter(d => d != null);\n const flattenedActiveTimerNames = util.flatten(this.activeTimers.map((d) => d.name))\n .filter(d => d != null);\n this.activeTimers = oldActiveTimers;\n if (outerMostTime) {\n this.programTimersStack = null;\n }\n const res = {\n uploadWaitMs: this.uploadWaitMs,\n downloadWaitMs: this.downloadWaitMs,\n kernelMs: null,\n wallMs: null // will be filled by the engine\n };\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n const kernelMs = await Promise.all(flattenedActiveTimerQueries);\n res['kernelMs'] = util.sum(kernelMs);\n res['getExtraProfileInfo'] = () => kernelMs.map((d, i) => ({ name: flattenedActiveTimerNames[i], ms: d }))\n .map(d => `${d.name}: ${d.ms}`)\n .join(', ');\n }\n else {\n res['kernelMs'] = {\n error: 'WebGL query timers are not supported in this environment.'\n };\n }\n this.uploadWaitMs = 0;\n this.downloadWaitMs = 0;\n return res;\n }\n memory() {\n return {\n unreliable: false,\n numBytesInGPU: this.numBytesInGPU,\n numBytesInGPUAllocated: this.textureManager.numBytesAllocated,\n numBytesInGPUFree: this.textureManager.numBytesFree\n };\n }\n startTimer() {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.beginQuery();\n }\n return { startMs: util.now(), endMs: null };\n }\n endTimer(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n this.gpgpu.endQuery();\n return query;\n }\n query.endMs = util.now();\n return query;\n }\n async getQueryTime(query) {\n if (env().getNumber('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') > 0) {\n return this.gpgpu.waitForQueryAndGetTime(query);\n }\n const timerQuery = query;\n return timerQuery.endMs - timerQuery.startMs;\n }\n disposeData(dataId) {\n if (this.pendingDisposal.has(dataId)) {\n return;\n }\n if (this.pendingRead.has(dataId)) {\n this.pendingDisposal.add(dataId);\n this.pendingDeletes++;\n return;\n }\n // No-op if already disposed.\n if (!this.texData.has(dataId)) {\n return;\n }\n // Trying to dispose a textureData that has a 'kept' refCount, e.g. trying\n // to dispose a tensor whose data bucket is shared with a complex tensor. In\n // this case we are removing a reference to the textureData, but we\n // shouldn't actually dispose the texture.\n if (this.texData.get(dataId).complexParentRefCount > 0) {\n this.texData.get(dataId).refCount--;\n return;\n }\n this.releaseGPUData(dataId);\n const { complexTensorInfos } = this.texData.get(dataId);\n if (complexTensorInfos != null) {\n this.texData.get(complexTensorInfos.real.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.real);\n this.texData.get(complexTensorInfos.imag.dataId).complexParentRefCount--;\n this.disposeIntermediateTensorInfo(complexTensorInfos.imag);\n }\n this.texData.delete(dataId);\n }\n releaseGPUData(dataId) {\n const { texture, dtype, texShape, usage, isPacked, slice } = this.texData.get(dataId);\n const key = slice && slice.origDataId || dataId;\n const refCount = this.dataRefCount.get(key);\n if (refCount > 1) {\n this.dataRefCount.set(key, refCount - 1);\n }\n else {\n this.dataRefCount.delete(key);\n if (texture != null) {\n this.numBytesInGPU -= this.computeBytes(texShape, dtype);\n this.textureManager.releaseTexture(texture, texShape, usage, isPacked);\n }\n }\n const texData = this.texData.get(dataId);\n texData.texture = null;\n texData.texShape = null;\n texData.isPacked = false;\n texData.slice = null;\n }\n getTexture(dataId) {\n this.uploadToGPU(dataId);\n return this.texData.get(dataId).texture;\n }\n /**\n * Returns internal information for the specific data bucket. Used in unit\n * tests.\n */\n getDataInfo(dataId) {\n return this.texData.get(dataId);\n }\n getCPUBackend() {\n if (!env().getBool('WEBGL_CPU_FORWARD')) {\n return null;\n }\n if (this.cpuBackend == null) {\n this.cpuBackend = engine().findBackend('cpu');\n }\n return this.cpuBackend;\n }\n /*\n Tests whether all the inputs to an op are small and on the CPU. This heuristic\n determines when it would be faster to execute a kernel on the CPU. WebGL\n kernels opt into running this check and forwarding when appropriate.\n TODO(https://github.com/tensorflow/tfjs/issues/872): Develop a more\n sustainable strategy for optimizing backend execution of ops.\n */\n shouldExecuteOnCPU(inputs, sizeThreshold = CPU_HANDOFF_SIZE_THRESHOLD) {\n const cpuBackend = this.getCPUBackend();\n if (!this.warnedAboutCPUBackend && cpuBackend == null) {\n console.warn('Your application contains ops that are small enough to be ' +\n 'executed on the CPU backend, however the CPU backend cannot ' +\n 'be found. Consider importing the CPU backend ' +\n '(@tensorflow/tfjs-backend-cpu) for better performance.');\n this.warnedAboutCPUBackend = true;\n }\n return cpuBackend != null &&\n inputs.every(input => this.texData.get(input.dataId).texture == null &&\n util.sizeFromShape(input.shape) < sizeThreshold);\n }\n getGPGPUContext() {\n return this.gpgpu;\n }\n slice(x, begin, size) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = sliceImplCPU(this.texData.get(x.dataId).values, begin, size, x.shape, x.dtype);\n return this.makeOutput(size, x.dtype, outValues);\n }\n // Short-circuit computation if the slice is zero-sized.\n if (util.sizeFromShape(size) === 0) {\n return tensor([], size, x.dtype);\n }\n const { isPacked } = this.texData.get(x.dataId);\n const isContinous = slice_util.isSliceContinous(x.shape, begin, size);\n if (isPacked || !isContinous) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new SlicePackedProgram(size) :\n new SliceProgram(size);\n const customSetup = program.getCustomSetupFunc(begin);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n this.uploadToGPU(x.dataId);\n return this.shallowSlice(x, begin, size);\n }\n shallowSlice(x, begin, size) {\n const xTexData = this.texData.get(x.dataId);\n const t = this.makeOutput(size, x.dtype);\n const newTexData = this.texData.get(t.dataId);\n // Copy texture data from the original tensor.\n Object.assign(newTexData, xTexData);\n newTexData.shape = size;\n newTexData.dtype = x.dtype;\n let flatOffset = slice_util.computeFlatOffset(begin, x.strides);\n if (xTexData.slice) {\n // We are slicing an already sliced tensor, so we have to accumulate\n // the offset.\n flatOffset += xTexData.slice.flatOffset;\n }\n newTexData.slice = {\n flatOffset,\n // Point to the original dataId, which is used to do ref counting.\n origDataId: xTexData.slice && xTexData.slice.origDataId || x.dataId\n };\n // Increase the ref count for that data bucket.\n const refCount = this.dataRefCount.get(newTexData.slice.origDataId) || 1;\n this.dataRefCount.set(newTexData.slice.origDataId, refCount + 1);\n return t;\n }\n stridedSlice(x, begin, end, strides) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.stridedSlice(x, begin, end, strides));\n if (cpuRes) {\n return cpuRes;\n }\n const outShape = slice_util.computeOutShape(begin, end, strides);\n if (outShape.some(axis => axis === 0)) {\n return tensor([], outShape);\n }\n const program = new StridedSliceProgram(begin, strides, outShape);\n return this.compileAndRun(program, [x]);\n }\n reverse(x, axis) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new ReversePackedProgram(x.shape, axis) :\n new ReverseProgram(x.shape, axis);\n return this.compileAndRun(program, [x]);\n }\n neg(x) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.neg(x));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.NEG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.NEG);\n return this.compileAndRun(program, [x]);\n }\n batchMatMul(a, b, transposeA, transposeB) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const sharedDim = transposeA ? a.shape[1] : a.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n // Since the matrices are vectors, it is faster to call mul().sum()\n // because sum() is O(sqrt(N)) due to divide-and-conquer.\n if ((outerShapeA === 1 || outerShapeB === 1) &&\n sharedDim > MATMUL_SHARED_DIM_THRESHOLD) {\n if (transposeA) {\n a = transpose(a, [0, 2, 1]);\n }\n if (transposeB) {\n b = transpose(b, [0, 2, 1]);\n }\n const a3D = outerShapeB === 1 ? a : a.as3D(batch, sharedDim, 1);\n const axis = outerShapeB === 1 ? 2 : 1;\n const b3D = outerShapeB === 1 ? b.as3D(batch, 1, sharedDim) : b;\n // TODO(annxingyuan): Call multiply directly as part of batchMatMul\n // modularization.\n const product = tf.mul(a3D, b3D);\n return product.sum(axis, true /* keepDims */);\n }\n const dtype = upcastType(a.dtype, b.dtype);\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB);\n return this.compileAndRun(program, [a, b], dtype);\n }\n fusedBatchMatMul({ a, b, transposeA, transposeB, bias, activation, preluActivationWeights }) {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const batch = Math.max(a.shape[0], b.shape[0]);\n const dtype = upcastType(a.dtype, b.dtype);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const program = new MatMulPackedProgram(a.shape, b.shape, [batch, outerShapeA, outerShapeB], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [a, b];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs, dtype);\n }\n localResponseNormalization4D(x, radius, bias, alpha, beta) {\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new LRNPackedProgram(x.shape, radius, bias, alpha, beta) :\n new LRNProgram(x.shape, radius, bias, alpha, beta);\n return this.compileAndRun(program, [x]);\n }\n LRNGrad(dy, inputImage, outputImage, depthRadius, bias, alpha, beta) {\n const program = new LRNGradProgram(inputImage.shape, depthRadius, bias, alpha, beta);\n return this.compileAndRun(program, [inputImage, outputImage, dy]);\n }\n tile(x, reps) {\n if (x.dtype === 'string') {\n const data = this.readSync(x.dataId);\n const decodedData = data.map(d => util.decodeString(d));\n const buf = buffer(x.shape, x.dtype, decodedData);\n return tile(buf, reps);\n }\n const program = new TileProgram(x.shape, reps);\n return this.compileAndRun(program, [x]);\n }\n pad(x, paddings, constantValue) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new PadPackedProgram(x.shape, paddings, constantValue) :\n new PadProgram(x.shape, paddings, constantValue);\n return this.compileAndRun(program, [x]);\n }\n gather(x, indices, axis) {\n const cpuRes = this.tryRunOnCpuOrThrow([x, indices], () => this.cpuBackend.gather(x, indices, axis));\n if (cpuRes) {\n return cpuRes;\n }\n const program = new GatherProgram(x.shape, indices.size, axis);\n return this.compileAndRun(program, [x, indices]);\n }\n batchToSpaceND(x, blockShape, crops) {\n util.assert(x.rank <= 4, () => 'batchToSpaceND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const reshaped = backend_util.getReshaped(x.shape, blockShape, prod);\n const permuted = backend_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted = backend_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords = backend_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize = backend_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n return transpose(x.reshape(reshaped), permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize);\n }\n spaceToBatchND(x, blockShape, paddings) {\n util.assert(x.rank <= 4, () => 'spaceToBatchND for rank > 4 with a WebGL backend not ' +\n 'implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n const completePaddings = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n const paddedX = x.pad(completePaddings);\n const reshapedPaddedShape = backend_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = backend_util.getPermuted(reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = backend_util.getReshapedPermuted(paddedX.shape, blockShape, prod, false);\n const paddedXT = transpose(paddedX.reshape(reshapedPaddedShape), permutedReshapedPaddedPermutation);\n return reshape(paddedXT, flattenShape);\n }\n reduce(x, reduceType, dtype) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const outSize = Math.ceil(inSize / windowSize);\n const reduceInfo = { windowSize, inSize, batchSize, outSize };\n const program = new ReduceProgram(reduceInfo, reduceType);\n const output = this.compileAndRun(program, [x], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.reduce(output, reduceType, dtype);\n }\n argReduce(x, reduceType, bestIndicesA = null) {\n let batchSize = x.shape[0];\n let inSize = x.shape[1];\n if (bestIndicesA != null) {\n batchSize = bestIndicesA.shape[0];\n inSize = bestIndicesA.shape[1];\n }\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const reduceInfo = {\n windowSize,\n inSize,\n batchSize,\n outSize: Math.ceil(inSize / windowSize)\n };\n const program = new ArgMinMaxProgram(reduceInfo, reduceType, bestIndicesA == null);\n const inputs = [x];\n if (bestIndicesA != null) {\n inputs.push(bestIndicesA);\n }\n const output = this.compileAndRun(program, inputs, 'int32');\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.argReduce(x, reduceType, output);\n }\n argReducePacked(x, reduceType, bestIndicesA = null) {\n const inShape = bestIndicesA != null ? bestIndicesA.shape : x.shape;\n const inSize = inShape[inShape.length - 1];\n const windowSize = backend_util.computeOptimalWindowSize(inSize);\n const program = new ArgMinMaxPackedProgram(inShape, windowSize, reduceType, bestIndicesA == null);\n const inputs = bestIndicesA == null ? [x] : [x, bestIndicesA];\n const output = this.compileAndRun(program, inputs, 'int32');\n if (output.rank === x.rank) {\n return this.argReducePacked(x, reduceType, output);\n }\n return output;\n }\n sum(x, axes) {\n backend_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'sum', outputDType).reshape(outShape);\n }\n prod(x, axes) {\n const cpuRes = this.tryRunOnCpuOrThrow([x], () => this.cpuBackend.prod(x, axes));\n if (cpuRes) {\n return cpuRes;\n }\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n return this.reduce(a2D, 'prod', outputDType).reshape(outShape);\n }\n unsortedSegmentSum(x, segmentIds, numSegments) {\n let axis = 0;\n const permutation = backend_util.getAxesPermutation([axis], x.rank);\n let permutedX = x;\n if (permutation != null) {\n permutedX = transpose(x, permutation);\n axis = backend_util.getInnerMostAxes(1, x.rank)[0];\n }\n const outShape = segment_util.computeOutShape(permutedX.shape, axis, numSegments);\n const inSize = util.sizeFromShape([permutedX.shape[axis]]);\n const a2D = permutedX.as2D(-1, inSize);\n const outputDType = tf.sumOutType(x.dtype);\n let result = this.segOpCompute(a2D, 'unsortedSegmentSum', segmentIds, outputDType, numSegments)\n .reshape(outShape);\n if (permutation != null) {\n result =\n transpose(result, backend_util.getUndoAxesPermutation(permutation));\n }\n return result;\n }\n segOpCompute(x, segOpType, segmentIds, dtype, numSegments) {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = segment_util.segOpComputeOptimalWindowSize(inSize, numSegments);\n const segOpInfo = { windowSize, inSize, batchSize, numSegments };\n const program = new SegmentOpProgram(segOpInfo, segOpType);\n const output = this.compileAndRun(program, [x, segmentIds], dtype);\n // No need to run another GPGPU program.\n if (output.shape[1] === numSegments) {\n return output;\n }\n segmentIds = range(0, numSegments).tile([inSize / windowSize]);\n return this.segOpCompute(output, segOpType, segmentIds, dtype, numSegments);\n }\n argMinMaxReduce(x, axis, reduceType) {\n const axes = [axis];\n backend_util.assertAxesAreInnerMostDims('arg' + reduceType.charAt(0).toUpperCase() + reduceType.slice(1), axes, x.rank);\n if (!env().getBool('WEBGL_PACK_REDUCE') || x.rank <= 2) {\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.argReduce(a2D, reduceType).reshape(outShape);\n }\n return this.argReducePacked(x, reduceType);\n }\n argMin(x, axis) {\n return this.argMinMaxReduce(x, axis, 'min');\n }\n argMax(x, axis) {\n return this.argMinMaxReduce(x, axis, 'max');\n }\n cumsum(x, axis, exclusive, reverse) {\n if (axis !== x.rank - 1) {\n throw new Error(`WebGL cumsum shader expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const size = x.shape[axis];\n let result = x;\n // Use cumsum parallel algorithm, ref:\n // https://developer.nvidia.com/gpugems/gpugems3/part-vi-gpu-computing/chapter-39-parallel-prefix-sum-scan-cuda\n for (let i = 0; i <= Math.ceil(Math.log2(size)) - 1; i++) {\n const program = new CumSumProgram(x.shape, false, reverse);\n const customSetup = program.getCustomSetupFunc(i);\n const prevResult = result;\n result = this.compileAndRun(program, [result], result.dtype, customSetup);\n prevResult.dispose();\n }\n // For exclusive cumsum, shift the end result in the direction of sum and\n // add 0 to the front index.\n if (exclusive) {\n const program = new CumSumProgram(x.shape, exclusive, reverse);\n const prevResult = result;\n result = this.compileAndRun(program, [result]);\n prevResult.dispose();\n }\n return result;\n }\n equal(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n less(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.less(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n lessEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LESS_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LESS_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greater(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.greater(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n greaterEqual(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.GREATER_EQUAL, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.GREATER_EQUAL, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalNot(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOGICAL_NOT);\n return this.compileAndRun(program, [x]);\n }\n logicalAnd(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_AND, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_AND, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n logicalOr(a, b) {\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.LOGICAL_OR, 'bool');\n }\n const program = new BinaryOpProgram(binaryop_gpu.LOGICAL_OR, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], 'bool');\n }\n select(condition, a, b) {\n const program = new SelectProgram(condition.rank, a.shape, a.rank);\n return this.compileAndRun(program, [condition, a, b], upcastType(a.dtype, b.dtype));\n }\n where(condition) {\n backend_util.warn('tf.where() in webgl locks the UI thread. ' +\n 'Call tf.whereAsync() instead');\n const condVals = condition.dataSync();\n return whereImpl(condition.shape, condVals);\n }\n topk(x, k, sorted) {\n const xVals = x.dataSync();\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n min(x, axes) {\n backend_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'min', a2D.dtype).reshape(outShape);\n }\n minimum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.minimum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MIN, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MIN, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n mod(a, b) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MOD, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MOD, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n maximum(a, b) {\n const cpuRes = this.tryRunOnCpuOrThrow([a, b], () => this.cpuBackend.maximum(a, b));\n if (cpuRes) {\n return cpuRes;\n }\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.MAX, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.MAX, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n all(x, axes) {\n backend_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'all', a2D.dtype).reshape(outShape);\n }\n any(x, axes) {\n backend_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] = backend_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'any', a2D.dtype).reshape(outShape);\n }\n floorDiv(a, b) {\n const op = binaryop_gpu.INT_DIV;\n const outputDtype = 'int32';\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n return this.packedBinaryOp(a, b, binaryop_packed_gpu.INT_DIV, outputDtype);\n }\n const program = new BinaryOpProgram(op, a.shape, b.shape);\n return this.compileAndRun(program, [a, b], outputDtype);\n }\n packedUnaryOp(x, op, dtype) {\n const program = new UnaryOpPackedProgram(x.shape, op);\n return this.compileAndRun(program, [x], dtype);\n }\n packedBinaryOp(a, b, op, dtype, checkOutOfBounds = false) {\n const program = new BinaryOpPackedProgram(op, a.shape, b.shape, checkOutOfBounds);\n return this.compileAndRun(program, [a, b], dtype);\n }\n // Returns a TensorInfo with the complex shape and the dataId of the\n // underlying part. We need to do this because a reshaped complex tensor is\n // not reflected in its parts.\n makeComplexComponentTensorInfo(complexTensor, complexPart) {\n return {\n dataId: complexPart.dataId,\n dtype: complexPart.dtype,\n shape: complexTensor.shape\n };\n }\n addN(tensors) {\n if (tensors.length === 1) {\n return tensors[0];\n }\n // Limit the number of uploaded textures for optimization.\n if (tensors.length > env().get('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(tensors.length / 2);\n const leftSide = this.addN(tensors.slice(0, midIndex));\n const rightSide = this.addN(tensors.slice(midIndex));\n return this.addN([leftSide, rightSide]);\n }\n const dtype = tensors.map(t => t.dtype).reduce((d1, d2) => upcastType(d1, d2));\n const shapes = tensors.map(t => t.shape);\n // We can make sure shapes are identical in op level.\n const usePackedOp = env().getBool('WEBGL_PACK');\n const program = usePackedOp ?\n new AddNPackedProgram(tensors[0].shape, shapes) :\n new AddNProgram(tensors[0].shape, shapes);\n return this.compileAndRun(program, tensors, dtype);\n }\n pow(a, b) {\n const usePackedOp = env().getBool('WEBGL_PACK_BINARY_OPERATIONS');\n const program = usePackedOp ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.POW, a.shape, b.shape) :\n new BinaryOpProgram(binaryop_gpu.POW, a.shape, b.shape);\n const dtype = upcastType(a.dtype, b.dtype);\n return this.compileAndRun(program, [a, b], dtype);\n }\n ceil(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = ceilImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.CEIL, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.CEIL);\n return this.compileAndRun(program, [x]);\n }\n floor(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = floorImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.FLOOR, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.FLOOR);\n return this.compileAndRun(program, [x]);\n }\n sign(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGN);\n return this.compileAndRun(program, [x]);\n }\n isNaN(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_NAN);\n return this.compileAndRun(program, [x], 'bool');\n }\n isInf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_INF);\n return this.compileAndRun(program, [x], 'bool');\n }\n isFinite(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.IS_FINITE);\n return this.compileAndRun(program, [x], 'bool');\n }\n round(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ROUND);\n return this.compileAndRun(program, [x]);\n }\n exp(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXP, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXP);\n return this.compileAndRun(program, [x]);\n }\n expm1(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = expm1ImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.EXPM1, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.EXPM1);\n return this.compileAndRun(program, [x]);\n }\n softmax(logits, dim) {\n const axes = util.parseAxisParam([dim], logits.shape);\n // TODO(annxingyuan): Call maxImpl rather than op as part of softmax kernel\n // modularization.\n const maxLogit = max(logits, axes);\n const expandedShape = backend_util.expandShapeToKeepDim(maxLogit.shape, axes);\n // TODO(annxingyuan): Call sub directly as part of softmax kernel\n // modularization.\n const a = tf.sub(logits, maxLogit.reshape(expandedShape));\n const b = this.exp(a);\n const sumExp = this.sum(b, axes).reshape(expandedShape);\n // TODO(annxingyuan): Call divImpl rather than op as part of softmax kernel\n // modularization.\n return div(b, sumExp);\n }\n log(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = logImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.LOG, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.LOG);\n return this.compileAndRun(program, [x]);\n }\n log1p(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.LOG1P);\n return this.compileAndRun(program, [x]);\n }\n sqrt(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SQRT);\n return this.compileAndRun(program, [x]);\n }\n rsqrt(x) {\n if (this.shouldExecuteOnCPU([x])) {\n const outValues = rsqrtImplCPU(this.texData.get(x.dataId).values, x.dtype);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.RSQRT);\n return this.compileAndRun(program, [x]);\n }\n reciprocal(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.RECIPROCAL);\n return this.compileAndRun(program, [x]);\n }\n relu(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU);\n }\n return this.compileAndRun(program, [x]);\n }\n relu6(x) {\n let program;\n if (env().getBool('WEBGL_PACK')) {\n program = new UnaryOpPackedProgram(x.shape, unary_packed_op.RELU6);\n }\n else {\n program = new UnaryOpProgram(x.shape, unary_op.RELU6);\n }\n return this.compileAndRun(program, [x]);\n }\n prelu(x, alpha) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.PRELU, x.shape, alpha.shape) :\n new BinaryOpProgram(binaryop_gpu.PRELU, x.shape, alpha.shape);\n return this.compileAndRun(program, [x, alpha]);\n }\n elu(x) {\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_packed_op.ELU, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ELU);\n return this.compileAndRun(program, [x]);\n }\n eluDer(dy, y) {\n const program = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ?\n new BinaryOpPackedProgram(binaryop_packed_gpu.ELU_DER, dy.shape, y.shape) :\n new BinaryOpProgram(binaryop_gpu.ELU_DER, dy.shape, y.shape);\n return this.compileAndRun(program, [dy, y]);\n }\n selu(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SELU);\n return this.compileAndRun(program, [x]);\n }\n clip(x, min, max) {\n let program;\n if (env().getBool('WEBGL_PACK_CLIP')) {\n program = new ClipPackedProgram(x.shape);\n }\n else {\n program = new ClipProgram(x.shape);\n }\n const customSetup = program.getCustomSetupFunc(min, max);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n abs(x) {\n // TODO: handle cases when x is complex.\n if (this.shouldExecuteOnCPU([x]) && x.dtype !== 'complex64') {\n const outValues = simpleAbsImplCPU(this.texData.get(x.dataId).values);\n return this.makeOutput(x.shape, x.dtype, outValues);\n }\n if (env().getBool('WEBGL_PACK_UNARY_OPERATIONS')) {\n return this.packedUnaryOp(x, unary_op.ABS, x.dtype);\n }\n const program = new UnaryOpProgram(x.shape, unary_op.ABS);\n return this.compileAndRun(program, [x]);\n }\n complexAbs(x) {\n const xData = this.texData.get(x.dataId);\n const program = new ComplexAbsProgram(x.shape);\n const inputs = [\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.real),\n this.makeComplexComponentTensorInfo(x, xData.complexTensorInfos.imag),\n ];\n return this.compileAndRun(program, inputs);\n }\n sigmoid(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGMOID);\n return this.compileAndRun(program, [x]);\n }\n softplus(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SOFTPLUS);\n return this.compileAndRun(program, [x]);\n }\n asin(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASIN);\n return this.compileAndRun(program, [x]);\n }\n acos(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOS);\n return this.compileAndRun(program, [x]);\n }\n atan(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATAN);\n return this.compileAndRun(program, [x]);\n }\n sinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.SINH);\n return this.compileAndRun(program, [x]);\n }\n cosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.COSH);\n return this.compileAndRun(program, [x]);\n }\n tanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.TANH);\n return this.compileAndRun(program, [x]);\n }\n asinh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ASINH);\n return this.compileAndRun(program, [x]);\n }\n acosh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOSH);\n return this.compileAndRun(program, [x]);\n }\n atanh(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ATANH);\n return this.compileAndRun(program, [x]);\n }\n erf(x) {\n const program = new UnaryOpProgram(x.shape, unary_op.ERF);\n return this.compileAndRun(program, [x]);\n }\n step(x, alpha) {\n const program = new UnaryOpProgram(x.shape, unary_op.STEP(alpha));\n return this.compileAndRun(program, [x]);\n }\n conv2dByMatMul(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Reshapes conv2D input to 2D tensors, uses matMul and then reshape the\n // result from 2D to 4D.\n const xShape = x.shape;\n const xTexData = this.texData.get(x.dataId);\n const sharedMatMulDim = convInfo.inChannels;\n const outerShapeX = xShape[0] * xShape[1] * xShape[2];\n const outerShapeFilter = convInfo.outChannels;\n const isChannelsLast = convInfo.dataFormat === 'channelsLast';\n const transposeA = false;\n const transposeB = false;\n // TODO: Once reduction ops are packed, batchMatMul will always be packed\n // and we can remove this condition.\n const batchMatMulWillBeUnpacked = (outerShapeX === 1 || outerShapeFilter === 1) &&\n sharedMatMulDim > MATMUL_SHARED_DIM_THRESHOLD;\n const reshapeWillBeExpensive = xShape[2] % 2 !== 0 && !!xTexData.isPacked;\n if (batchMatMulWillBeUnpacked || !env().getBool('WEBGL_LAZILY_UNPACK') ||\n !env().getBool('WEBGL_PACK_BINARY_OPERATIONS') ||\n !reshapeWillBeExpensive) {\n const targetShape = isChannelsLast ? xShape[0] * xShape[1] * xShape[2] :\n xShape[0] * xShape[2] * xShape[3];\n const xReshaped = reshape(x, [1, targetShape, convInfo.inChannels]);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const result = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n return reshape(result, convInfo.outShape);\n }\n // Following optimization is specific to packed |x| with odd row count\n // (For example, in channelLast mode, 'row count' refers to x.shape[2]):\n // we avoid expensive packed 2x2 reshape by padding row count to next,\n // even number. When x.shape[2] is odd, the result of packed batchMatMul is\n // the same (has the same texture layout and and values in the texture) as\n // it is for even x.shape[2] + 1. We make the odd-rows tensor to look like\n // even-rows tensor before the operation and, after the batchMatMul,\n // fix the even-rows result to have odd number of rows.\n const targetShape = isChannelsLast ?\n xShape[0] * xShape[1] * (xShape[2] + 1) :\n xShape[0] * xShape[2] * (xShape[3] + 1);\n const xReshaped = {\n dataId: x.dataId,\n shape: [1, targetShape, convInfo.inChannels],\n dtype: x.dtype\n };\n // xTexData.shape gets referenced from GPGPUBinary.inShapeInfos.\n // Decrementing row count, after batchMatMul->...->compileProgram leads to\n // invalid row count within the reference in GPGPUBinary.inShapeInfos.\n // Alternative fix would be to provide a copy to GPGPUBinary.inShapeInfos\n // in compileProgram method, but that would affect compilation of all\n // programs - instead, provide a copy here, with even row count, before\n // calling batchMatMul->...->compileProgram and after that, the original\n // xTexData.shape is restored.\n const originalXTexDataShape = xTexData.shape;\n xTexData.shape = xTexData.shape.slice();\n xTexData.shape[xTexData.shape.length - 2]++;\n util.assert(webgl_util.isReshapeFree(xTexData.shape, xReshaped.shape), () => `packed reshape ${xTexData.shape} to ${xReshaped.shape} isn't free`);\n const filterReshaped = reshape(filter, [1, convInfo.inChannels, convInfo.outChannels]);\n const pointwiseConv = this.fusedBatchMatMul({\n a: xReshaped,\n b: filterReshaped,\n transposeA,\n transposeB,\n bias,\n activation,\n preluActivationWeights\n });\n const pointwiseConvTexData = this.texData.get(pointwiseConv.dataId);\n util.assert(pointwiseConvTexData.isPacked, () => 'batchMatMul result is expected to be packed');\n // Restore the input shape to original.\n xTexData.shape = originalXTexDataShape;\n // Set the output shape - there is no need for expensive reshape as data\n // layout is already correct.\n pointwiseConvTexData.shape = convInfo.outShape;\n return engine().makeTensorFromDataId(pointwiseConv.dataId, convInfo.outShape, pointwiseConv.dtype);\n }\n conv2dWithIm2Row(x, filter, convInfo, bias, activation, preluActivationWeights) {\n // Rearranges conv2d input so each block to be convolved over forms the\n // column of a new matrix with shape [filterWidth * filterHeight *\n // inChannels, outHeight * outWidth]. The filter is also rearranged so each\n // output channel forms a row of a new matrix with shape [outChannels,\n // filterWidth * filterHeight * inChannels]. The convolution is then\n // computed by multiplying these matrices and reshaping the result.\n const { filterWidth, filterHeight, inChannels, outWidth, outHeight, dataFormat } = convInfo;\n const isChannelsLast = dataFormat === 'channelsLast';\n const sharedDim = filterWidth * filterHeight * inChannels;\n const numCols = outHeight * outWidth;\n const x2ColShape = [sharedDim, numCols];\n const transposeA = true;\n const transposeB = false;\n const xSqueezed = x.squeeze([0]);\n const w2Row = filter.reshape([1, sharedDim, -1]);\n const im2ColProgram = new Im2ColPackedProgram(x2ColShape, xSqueezed.shape, convInfo);\n const im2Col = this.compileAndRun(im2ColProgram, [xSqueezed]).reshape([\n 1, x2ColShape[0], x2ColShape[1]\n ]);\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, true) : null;\n const matmulProgram = new MatMulPackedProgram(im2Col.shape, w2Row.shape, [1, numCols, convInfo.outChannels], transposeA, transposeB, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [im2Col, w2Row];\n if (bias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n const product = this.compileAndRun(matmulProgram, inputs);\n if (isChannelsLast) {\n return product.reshape([1, outHeight, outWidth, convInfo.outChannels]);\n }\n else {\n return product.reshape([1, convInfo.outChannels, outHeight, outWidth]);\n }\n }\n fusedConv2d({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && input.shape[0] === 1) {\n return this.conv2dWithIm2Row(input, filter, convInfo, bias, activation, preluActivationWeights);\n }\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n const fusedActivation = activation ? mapActivationToShaderProgram(activation, false) : null;\n const program = new Conv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n const inputs = [input, filter];\n if (bias) {\n inputs.push(bias);\n }\n if (preluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n return this.compileAndRun(program, inputs);\n }\n conv2d(x, filter, convInfo) {\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' ||\n convInfo.padInfo.type === 'VALID')) {\n return this.conv2dByMatMul(x, filter, convInfo);\n }\n if (env().getBool('WEBGL_CONV_IM2COL') && x.shape[0] === 1) {\n return this.conv2dWithIm2Row(x, filter, convInfo);\n }\n const program = new Conv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv2dDerInput(dy, filter, convInfo) {\n const program = new Conv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv2dDerFilter(x, dy, convInfo) {\n const program = new Conv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n fusedDepthwiseConv2D({ input, filter, convInfo, bias, activation, preluActivationWeights }) {\n const shouldPackDepthwiseConv = env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1;\n const fusedActivation = activation ?\n mapActivationToShaderProgram(activation, shouldPackDepthwiseConv) :\n null;\n const inputs = [input, filter];\n const hasBias = bias != null;\n const hasPreluActivationWeights = preluActivationWeights != null;\n if (hasBias) {\n inputs.push(bias);\n }\n if (hasPreluActivationWeights) {\n inputs.push(preluActivationWeights);\n }\n let program;\n if (shouldPackDepthwiseConv) {\n program = new DepthwiseConvPacked2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n program = new DepthwiseConv2DProgram(convInfo, hasBias, fusedActivation, hasPreluActivationWeights);\n return this.compileAndRun(program, inputs);\n }\n depthwiseConv2D(x, filter, convInfo) {\n let program;\n if (env().getBool('WEBGL_PACK_DEPTHWISECONV') &&\n convInfo.strideWidth <= 2 &&\n convInfo.outChannels / convInfo.inChannels === 1) {\n program = new DepthwiseConvPacked2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n program = new DepthwiseConv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n depthwiseConv2DDerInput(dy, filter, convInfo) {\n const program = new DepthwiseConv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n depthwiseConv2DDerFilter(x, dy, convInfo) {\n const program = new DepthwiseConv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n conv3d(x, filter, convInfo) {\n const program = new Conv3DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n conv3dDerInput(dy, filter, convInfo) {\n const program = new Conv3DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n conv3dDerFilter(x, dy, convInfo) {\n const program = new Conv3DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n unstack(x, axis) {\n const num = x.shape[axis];\n const outShape = new Array(x.rank - 1);\n let outIndex = 0;\n for (let i = 0; i < x.rank; i++) {\n if (i !== axis) {\n outShape[outIndex++] = x.shape[i];\n }\n }\n const begin = new Array(x.rank).fill(0);\n const size = x.shape.slice();\n size[axis] = 1;\n const res = new Array(num);\n for (let i = 0; i < res.length; i++) {\n begin[axis] = i;\n res[i] = this.slice(x, begin, size).reshape(outShape);\n }\n return res;\n }\n avgPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'avg', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n avgPool3dBackprop(dy, x, convInfo) {\n const avgPool3dBackpropProgram = new AvgPool3DBackpropProgram(convInfo);\n return this.compileAndRun(avgPool3dBackpropProgram, [dy], x.dtype);\n }\n maxPool3d(x, convInfo) {\n const program = new Pool3DProgram(convInfo, 'max', false);\n return this.compileAndRun(program, [x], 'float32');\n }\n maxPool3dBackprop(dy, x, y, convInfo) {\n const getPositions = true;\n const maxPool3dPositionsProgram = new Pool3DProgram(convInfo, 'max', getPositions);\n const maxPool3dPositions = this.compileAndRun(maxPool3dPositionsProgram, [x]);\n const maxPool3dBackPropProgram = new MaxPool3DBackpropProgram(convInfo);\n const result = this.compileAndRun(maxPool3dBackPropProgram, [dy, maxPool3dPositions], x.dtype);\n maxPool3dPositions.dispose();\n return result;\n }\n resizeBilinear(x, newHeight, newWidth, alignCorners) {\n const program = env().getBool('WEBGL_PACK_IMAGE_OPERATIONS') ?\n new ResizeBilinearPackedProgram(x.shape, newHeight, newWidth, alignCorners) :\n new ResizeBilinearProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x], 'float32');\n }\n resizeBilinearBackprop(dy, x, alignCorners) {\n const program = new ResizeBilinearBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n resizeNearestNeighbor(x, newHeight, newWidth, alignCorners) {\n const program = new ResizeNearestNeighborProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x]);\n }\n resizeNearestNeighborBackprop(dy, x, alignCorners) {\n const program = new ResizeNearestNeigborBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n multinomial(logits, normalized, numSamples, seed) {\n const probs = normalized ? logits : softmax(logits);\n const batchSize = probs.shape[0];\n const numOutcomes = probs.shape[1];\n const program = new MultinomialProgram(batchSize, numOutcomes, numSamples);\n const customSetup = program.getCustomSetupFunc(seed);\n return this.compileAndRun(program, [probs], 'int32', customSetup);\n }\n oneHot(indices, depth, onValue, offValue) {\n const program = new OneHotProgram(indices.size, depth, onValue, offValue);\n return this.compileAndRun(program, [indices]);\n }\n diag(x) {\n const program = new DiagProgram(x.size);\n return this.compileAndRun(program, [x]);\n }\n cropAndResize(image, boxes, boxIndex, cropSize, method, extrapolationValue) {\n const program = new CropAndResizeProgram(image.shape, boxes.shape, cropSize, method, extrapolationValue);\n return this.compileAndRun(program, [image, boxes, boxIndex], 'float32');\n }\n depthToSpace(x, blockSize, dataFormat) {\n util.assert(blockSize > 1, () => `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n const batchSize = x.shape[0];\n const inputHeight = (dataFormat === 'NHWC') ? x.shape[1] : x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? x.shape[2] : x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? x.shape[3] : x.shape[1];\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n const outputShape = (dataFormat === 'NHWC') ?\n [batchSize, outputHeight, outputWidth, outputDepth] :\n [batchSize, outputDepth, outputHeight, outputWidth];\n const program = new DepthToSpaceProgram(outputShape, blockSize, dataFormat);\n return this.compileAndRun(program, [x]);\n }\n split(x, sizeSplits, axis) {\n return split(x, sizeSplits, axis);\n }\n scatterND(indices, updates, shape) {\n const { sliceRank, numUpdates, sliceSize, strides, outputSize } = backend_util.calculateShapes(updates, indices, shape);\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const flattenIndices = indices.reshape([numUpdates, sliceRank]);\n const flattenX = updates.reshape([numUpdates, sliceSize]);\n if (outputSize === 0) {\n return backend_util.reshapeTensor(tensor([]), shape);\n }\n const defaultValue = scalar(0);\n const program = new ScatterProgram(numUpdates, sliceRank, flattenIndices.rank, flattenX.rank, strides, flattenShape);\n const res = this.compileAndRun(program, [flattenX, flattenIndices, defaultValue]);\n return res.reshape(shape);\n }\n sparseToDense(sparseIndices, sparseValues, outputShape, defaultValue) {\n const { sliceRank, numUpdates, strides, outputSize } = backend_util.calculateShapes(sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n const program = new ScatterProgram(numUpdates, sliceRank, sparseIndices.rank, sparseValues.rank, strides, [outputSize, 1], sumDupeIndices);\n const res = this.compileAndRun(program, [sparseValues, sparseIndices, defaultValue]);\n return res.reshape(outputShape);\n }\n gatherND(x, indices) {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n const [resultShape, numSlices, sliceSize, strides] = backend_util.prepareAndValidate(x, indices);\n const flattenIndices = indices.reshape([numSlices, sliceRank]);\n const flattenX = x.reshape([x.size / sliceSize, sliceSize]);\n const program = new GatherNDProgram(sliceRank, strides, [numSlices, sliceSize]);\n const res = this.compileAndRun(program, [flattenX, flattenIndices]);\n return res.reshape(resultShape);\n }\n fill(shape, value, dtype) {\n dtype = dtype || util.inferDtype(value);\n if (dtype === 'string') {\n // String type should be handled in CPU memory.\n const values = util.getArrayFromDType(dtype, util.sizeFromShape(shape));\n values.fill(value);\n return engine().makeTensor(values, shape, dtype, this);\n }\n else {\n const program = new FillProgram(shape, value);\n const customSetup = program.getCustomSetupFunc(value);\n return this.compileAndRun(program, [], dtype, customSetup);\n }\n }\n onesLike(x) {\n if (x.dtype === 'string') {\n throw new Error('onesLike is not supported under string dtype');\n }\n else {\n // TODO(cais, smilkov): Add WebGL shader for onesLike:\n // https://github.com/tensorflow/tfjs/issues/1293\n return this.fill(x.shape, 1, x.dtype);\n }\n }\n zerosLike(x) {\n return this.fill(x.shape, x.dtype === 'string' ? '' : 0, x.dtype);\n }\n linspace(start, stop, num) {\n // TODO: Use CPU implementation due to the precision problem in Safari.\n return backend_util.linspaceImpl(start, stop, num);\n }\n makeTensorInfo(shape, dtype, values) {\n const dataId = this.write(values, shape, dtype);\n this.texData.get(dataId).usage = null;\n return { dataId, shape, dtype };\n }\n makeOutput(shape, dtype, values) {\n const { dataId } = this.makeTensorInfo(shape, dtype, values);\n return engine().makeTensorFromDataId(dataId, shape, dtype, this);\n }\n unpackTensor(input) {\n const program = new UnpackProgram(input.shape);\n return this.runWebGLProgram(program, [input], input.dtype);\n }\n packTensor(input) {\n const program = new PackProgram(input.shape);\n const preventEagerUnpackingOutput = true;\n return this.runWebGLProgram(program, [input], input.dtype, null /* customSetup */, preventEagerUnpackingOutput);\n }\n packedReshape(input, afterShape) {\n const input3DShape = [\n webgl_util.getBatchDim(input.shape),\n ...webgl_util.getRowsCols(input.shape)\n ];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [\n webgl_util.getBatchDim(afterShape), ...webgl_util.getRowsCols(afterShape)\n ];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = this.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n }\n decode(dataId) {\n const texData = this.texData.get(dataId);\n const { isPacked, shape, dtype } = texData;\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n if (isPacked) {\n program = new DecodeMatrixPackedProgram(shapeAs3D);\n }\n else {\n program = new DecodeMatrixProgram(shapeAs3D);\n }\n const preventEagerUnpackingOfOutput = true;\n const out = this.runWebGLProgram(program, [{ shape: shapeAs3D, dtype, dataId }], dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dtype, shape, dataId: out.dataId };\n }\n runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n const output = this.makeTensorInfo(program.outputShape, outputDtype);\n const outData = this.texData.get(output.dataId);\n if (program.packedOutput) {\n outData.isPacked = true;\n }\n if (program.outPackingScheme === tex_util.PackingScheme.DENSE) {\n const texelShape = tex_util.getDenseTexShape(program.outputShape);\n // For a densely packed output, we explicitly set texShape\n // so it doesn't get assigned later according to our typical packing\n // scheme wherein a single texel can only contain values from adjacent\n // rows/cols.\n outData.texShape = texelShape.map(d => d * 2);\n }\n if (program.outTexUsage != null) {\n outData.usage = program.outTexUsage;\n }\n if (util.sizeFromShape(output.shape) === 0) {\n // Short-circuit the computation since the result is empty (has 0 in its\n // shape).\n outData.values =\n util.getTypedArrayFromDType(output.dtype, 0);\n return output;\n }\n const dataToDispose = [];\n const inputsData = inputs.map(input => {\n if (input.dtype === 'complex64') {\n throw new Error(`GPGPUProgram does not support complex64 input. For complex64 ` +\n `dtypes, please separate the program into real and imaginary ` +\n `parts.`);\n }\n let texData = this.texData.get(input.dataId);\n if (texData.texture == null) {\n if (!program.packedInputs &&\n util.sizeFromShape(input.shape) <=\n env().getNumber('WEBGL_SIZE_UPLOAD_UNIFORM')) {\n // Upload small tensors that live on the CPU as uniforms, not as\n // textures. Do this only when the environment supports 32bit floats\n // due to problems when comparing 16bit floats with 32bit floats.\n // TODO(https://github.com/tensorflow/tfjs/issues/821): Make it\n // possible for packed shaders to sample from uniforms.\n return {\n shape: input.shape,\n texData: null,\n isUniform: true,\n uniformValues: texData.values\n };\n }\n // This ensures that if a packed program's inputs have not yet been\n // uploaded to the GPU, they get uploaded as packed right off the bat.\n if (program.packedInputs) {\n texData.isPacked = true;\n texData.shape = input.shape;\n }\n }\n else if (!!texData.isPacked !== !!program.packedInputs) {\n input = texData.isPacked ? this.unpackTensor(input) :\n this.packTensor(input);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n }\n else if (texData.isPacked &&\n !webgl_util.isReshapeFree(texData.shape, input.shape)) {\n // This is a special case where a texture exists for a tensor\n // but the shapes are incompatible (due to packing constraints) because\n // the tensor did not have a chance to go through the packed reshape\n // shader. This only happens when we reshape the *same* tensor to form\n // *distinct* inputs to an op, e.g. dotting a vector with itself. This\n // case will disappear once packed uploading is the default.\n const savedInput = input;\n const targetShape = input.shape;\n input.shape = texData.shape;\n input = this.packedReshape(input, targetShape);\n dataToDispose.push(input);\n texData = this.texData.get(input.dataId);\n savedInput.shape = targetShape;\n }\n this.uploadToGPU(input.dataId);\n return { shape: input.shape, texData, isUniform: false };\n });\n this.uploadToGPU(output.dataId);\n const outputData = { shape: output.shape, texData: outData, isUniform: false };\n const key = gpgpu_math.makeShaderKey(program, inputsData, outputData);\n const binary = this.getAndSaveBinary(key, () => {\n return gpgpu_math.compileProgram(this.gpgpu, program, inputsData, outputData);\n });\n const shouldTimeProgram = this.activeTimers != null;\n let query;\n if (shouldTimeProgram) {\n query = this.startTimer();\n }\n gpgpu_math.runProgram(this.gpgpu, binary, inputsData, outputData, customSetup);\n dataToDispose.forEach(info => this.disposeIntermediateTensorInfo(info));\n if (shouldTimeProgram) {\n query = this.endTimer(query);\n this.activeTimers.push({ name: program.constructor.name, query: this.getQueryTime(query) });\n }\n if (!env().getBool('WEBGL_LAZILY_UNPACK') && outData.isPacked &&\n preventEagerUnpackingOfOutput === false) {\n const unpacked = this.unpackTensor(output);\n this.disposeIntermediateTensorInfo(output);\n return unpacked;\n }\n return output;\n }\n compileAndRun(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput = false) {\n outputDtype = outputDtype || inputs[0].dtype;\n const outInfo = this.runWebGLProgram(program, inputs, outputDtype, customSetup, preventEagerUnpackingOfOutput);\n return engine().makeTensorFromDataId(outInfo.dataId, outInfo.shape, outInfo.dtype);\n }\n getAndSaveBinary(key, getBinary) {\n if (!(key in this.binaryCache)) {\n this.binaryCache[key] = getBinary();\n }\n return this.binaryCache[key];\n }\n getTextureManager() {\n return this.textureManager;\n }\n dispose() {\n if (this.disposed) {\n return;\n }\n // Avoid disposing the compiled webgl programs during unit testing because\n // it slows down test execution.\n if (!env().getBool('IS_TEST')) {\n const allKeys = Object.keys(this.binaryCache);\n allKeys.forEach(key => {\n this.gpgpu.deleteProgram(this.binaryCache[key].webGLProgram);\n delete this.binaryCache[key];\n });\n }\n this.textureManager.dispose();\n if (this.canvas != null &&\n (typeof (HTMLCanvasElement) !== 'undefined' &&\n this.canvas instanceof HTMLCanvasElement)) {\n this.canvas.remove();\n }\n else {\n this.canvas = null;\n }\n if (this.gpgpuCreatedLocally) {\n this.gpgpu.program = null;\n this.gpgpu.dispose();\n }\n this.disposed = true;\n }\n floatPrecision() {\n if (this.floatPrecisionValue == null) {\n this.floatPrecisionValue = tidy(() => {\n if (!env().get('WEBGL_RENDER_FLOAT32_ENABLED')) {\n // Momentarily switching DEBUG flag to false so we don't throw an\n // error trying to upload a small value.\n const debugFlag = env().getBool('DEBUG');\n env().set('DEBUG', false);\n const underflowCheckValue = this.abs(scalar(1e-8)).dataSync()[0];\n env().set('DEBUG', debugFlag);\n if (underflowCheckValue > 0) {\n return 32;\n }\n }\n return 16;\n });\n }\n return this.floatPrecisionValue;\n }\n /** Returns the smallest representable number. */\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n uploadToGPU(dataId) {\n const texData = this.texData.get(dataId);\n const { shape, dtype, values, texture, usage, isPacked } = texData;\n if (texture != null) {\n // Array is already on GPU. No-op.\n return;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start;\n if (shouldTimeProgram) {\n start = util.now();\n }\n let texShape = texData.texShape;\n if (texShape == null) {\n texShape = webgl_util.getTextureShapeFromLogicalShape(shape, isPacked);\n texData.texShape = texShape;\n }\n if (values != null) {\n const shapeAs3D = webgl_util.getShapeAs3D(shape);\n let program;\n let width = texShape[1], height = texShape[0];\n const isByteArray = values instanceof Uint8Array;\n if (isPacked) {\n [width, height] = tex_util.getPackedMatrixTextureShapeWidthHeight(texShape[0], texShape[1]);\n program = new EncodeMatrixPackedProgram(shapeAs3D, [height, width], isByteArray);\n }\n else {\n program =\n new EncodeMatrixProgram(shapeAs3D, [height, width], isByteArray);\n }\n const tempDenseInputHandle = this.makeTensorInfo([height, width], dtype);\n if (isByteArray) {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.PIXELS;\n }\n else {\n this.texData.get(tempDenseInputHandle.dataId).usage =\n TextureUsage.UPLOAD;\n }\n this.gpgpu.uploadDenseMatrixToTexture(this.getTexture(tempDenseInputHandle.dataId), width, height, values);\n // We want the output to remain packed regardless of the value of\n // WEBGL_PACK.\n const preventEagerUnpacking = true;\n const encodedOutputTarget = this.runWebGLProgram(program, [tempDenseInputHandle], dtype, null, preventEagerUnpacking);\n // Have the original texture assume the identity of the encoded output.\n const outputTexData = this.texData.get(encodedOutputTarget.dataId);\n texData.texture = outputTexData.texture;\n texData.texShape = outputTexData.texShape;\n texData.isPacked = outputTexData.isPacked;\n texData.usage = outputTexData.usage;\n this.disposeIntermediateTensorInfo(tempDenseInputHandle);\n this.texData.delete(encodedOutputTarget.dataId);\n // Once uploaded, don't store the values on cpu.\n texData.values = null;\n if (shouldTimeProgram) {\n this.uploadWaitMs += util.now() - start;\n }\n }\n else {\n const newTexture = this.acquireTexture(texShape, usage, dtype, isPacked);\n texData.texture = newTexture;\n }\n }\n convertAndCacheOnCPU(dataId, float32Values) {\n const texData = this.texData.get(dataId);\n const { dtype } = texData;\n this.releaseGPUData(dataId);\n if (float32Values != null) {\n texData.values = float32ToTypedArray(float32Values, dtype);\n }\n return texData.values;\n }\n acquireTexture(texShape, texType, dtype, isPacked) {\n this.numBytesInGPU += this.computeBytes(texShape, dtype);\n if (!this.warnedAboutMemory &&\n this.numBytesInGPU > this.numMBBeforeWarning * 1024 * 1024) {\n const mb = (this.numBytesInGPU / 1024 / 1024).toFixed(2);\n this.warnedAboutMemory = true;\n console.warn(`High memory usage in GPU: ${mb} MB, ` +\n `most likely due to a memory leak`);\n }\n return this.textureManager.acquireTexture(texShape, texType, isPacked);\n }\n computeBytes(shape, dtype) {\n return shape[0] * shape[1] * util.bytesPerElement(dtype);\n }\n tryRunOnCpuOrThrow(inputs, fn) {\n if (this.shouldExecuteOnCPU(inputs)) {\n try {\n return fn();\n }\n catch (e) {\n if (env().getBool('IS_TEST')) {\n throw new Error('CPU forwarding failed');\n }\n }\n }\n return null;\n }\n}\nfunction float32ToTypedArray(a, dtype) {\n if (dtype === 'float32' || dtype === 'complex64') {\n return a;\n }\n else if (dtype === 'int32' || dtype === 'bool') {\n const result = (dtype === 'int32') ? new Int32Array(a.length) :\n new Uint8Array(a.length);\n for (let i = 0; i < result.length; ++i) {\n result[i] = Math.round(a[i]);\n }\n return result;\n }\n else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n//# sourceMappingURL=backend_webgl.js.map", "/** @license See the LICENSE file. */\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport { version };\n//# sourceMappingURL=version.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport * as gpgpu_util from './gpgpu_util';\nimport * as webgl_util from './webgl_util';\nexport { MathBackendWebGL } from './backend_webgl';\nexport { setWebGLContext } from './canvas_util';\nexport { GPGPUContext } from './gpgpu_context';\n// WebGL specific utils.\nexport { gpgpu_util, webgl_util };\n/**\n * Enforce use of half precision textures if available on the platform.\n *\n * @doc {heading: 'Environment', namespace: 'webgl'}\n */\nexport function forceHalfFloat() {\n env().set('WEBGL_FORCE_F16_TEXTURES', true);\n}\n//# sourceMappingURL=webgl.js.map", "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// base.ts is the webgl backend without auto kernel registration.\nimport { device_util, registerBackend } from '@tensorflow/tfjs-core';\nimport { MathBackendWebGL } from './backend_webgl';\nexport { version as version_webgl } from './version';\nif (device_util.isBrowser()) {\n registerBackend('webgl', () => new MathBackendWebGL(), 2 /* priority */);\n}\n// Export webgl utilities\nexport * from './webgl';\n// Export forceHalfFlost under webgl namespace for the union bundle.\nimport { forceHalfFloat } from './webgl';\nexport const webgl = { forceHalfFloat };\n//# sourceMappingURL=base.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Identity } from '@tensorflow/tfjs-core';\nexport function identity(args) {\n const { inputs, backend } = args;\n const { x } = inputs;\n backend.incRef(x.dataId);\n return { dataId: x.dataId, shape: x.shape, dtype: x.dtype };\n}\nexport const identityConfig = {\n kernelName: Identity,\n backendName: 'webgl',\n kernelFunc: identity\n};\n//# sourceMappingURL=Identity.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Complex } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\n/**\n * In WebGL data is stored in GPU textures which can't be efficiently copied, so\n * complex tensors share data with their real and imaginary components. Complex\n * tensors increment the `complexParentRefCount` properties of the underlying\n * data buckets to prevent them from being disposed, as the engine's disposal\n * logic does not account for data sharing by complex tensors.\n *\n * When a complex tensor is disposed, it will explicitly decrease the\n * `complexParentRefCount` properties of its underlying components.\n */\nexport function complex(args) {\n const { inputs, backend } = args;\n const { real, imag } = inputs;\n const complexInfo = backend.makeTensorInfo(real.shape, 'complex64');\n const complex = backend.texData.get(complexInfo.dataId);\n const realTensorInfo = identity({ inputs: { x: real }, backend });\n const realData = backend.texData.get(realTensorInfo.dataId);\n realData.complexParentRefCount++;\n const imagTensorInfo = identity({ inputs: { x: imag }, backend });\n const imagData = backend.texData.get(imagTensorInfo.dataId);\n imagData.complexParentRefCount++;\n complex.complexTensorInfos = { real: realTensorInfo, imag: imagTensorInfo };\n return complexInfo;\n}\nexport const complexConfig = {\n kernelName: Complex,\n backendName: 'webgl',\n kernelFunc: complex\n};\n//# sourceMappingURL=Complex.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, upcastType } from '@tensorflow/tfjs-core';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { complex } from '../kernels/Complex';\nimport { UnaryOpProgram } from '../unaryop_gpu';\nexport const CHECK_NAN_SNIPPET_UNARY = `if (isnan(x)) return x;`;\nexport const CHECK_NAN_SNIPPET_BINARY = `\n if (isnan(a)) return a;\n if (isnan(b)) return b;\n`;\nexport const CHECK_NAN_SNIPPET_BINARY_PACKED = `\n result.r = isNaN.r > 0. ? NAN : result.r;\n result.g = isNaN.g > 0. ? NAN : result.g;\n result.b = isNaN.b > 0. ? NAN : result.b;\n result.a = isNaN.a > 0. ? NAN : result.a;\n`;\n/**\n * Template that creates a `KernelFunc` for unary ops.\n * @param opSnippets Op snippet to create `UnaryOpProgram`.\n */\nexport function unaryKernelFunc(opSnippet) {\n return ({ inputs, backend }) => {\n const { x } = inputs;\n const webglBackend = backend;\n const program = new UnaryOpProgram(x.shape, opSnippet);\n return webglBackend.runWebGLProgram(program, [x], x.dtype);\n };\n}\n/**\n * Template that creates a `KernelFunc` for binary ops.\n * @param opSnippet Op snippet to create `BinaryOpProgram`.\n * @param packedOpSnippet Op snippet to create `BinaryOpPackedProgram`.\n * @param checkOutOfBoundsForPackedProgram Whether to set checkOutOfBounds=true\n * when creating BinaryOpPackedProgram.\n * @param dtype Optional. If set, the result has this dtype. Otherwise, the\n * result has the same dtype as the first input. This is mainly used in\n * comparison kernels, such as Equal, Less, Greater, etc.\n */\nexport function binaryKernelFunc({ opSnippet, packedOpSnippet, checkOutOfBounds = false, supportsComplex = false, cpuKernelImpl, dtype }) {\n return ({ inputs, backend }) => {\n const { a, b } = inputs;\n const webglBackend = backend;\n if (supportsComplex && a.dtype === 'complex64') {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [real, imag] = [\n [aData.complexTensorInfos.real, bData.complexTensorInfos.real],\n [aData.complexTensorInfos.imag, bData.complexTensorInfos.imag]\n ].map(complexParts => {\n const [aPart, bPart] = complexParts;\n const aHandle = {\n dataId: aPart.dataId,\n dtype: aPart.dtype,\n shape: a.shape\n };\n const bHandle = {\n dataId: bPart.dataId,\n dtype: bPart.dtype,\n shape: b.shape\n };\n const program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n return webglBackend.runWebGLProgram(program, [aHandle, bHandle], upcastType(aPart.dtype, bPart.dtype));\n });\n const complexOutput = complex({ inputs: { real, imag }, backend: webglBackend });\n webglBackend.disposeIntermediateTensorInfo(real);\n webglBackend.disposeIntermediateTensorInfo(imag);\n // TODO(annxingyuan): Implement CPU forwarding for complex inputs.\n return complexOutput;\n }\n const $dtype = dtype || upcastType(a.dtype, b.dtype);\n if (webglBackend.shouldExecuteOnCPU([a, b]) && cpuKernelImpl != null) {\n const aData = webglBackend.texData.get(a.dataId);\n const bData = webglBackend.texData.get(b.dataId);\n const [outValues, outShape] = cpuKernelImpl(a.shape, b.shape, aData.values, bData.values, $dtype);\n const out = webglBackend.makeTensorInfo(outShape, $dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n const shouldUsePackedProgram = env().getBool('WEBGL_PACK_BINARY_OPERATIONS') &&\n packedOpSnippet != null;\n let program;\n if (shouldUsePackedProgram) {\n program = new BinaryOpPackedProgram(packedOpSnippet, a.shape, b.shape, checkOutOfBounds);\n }\n else {\n program = new BinaryOpProgram(opSnippet, a.shape, b.shape);\n }\n return webglBackend.runWebGLProgram(program, [a, b], $dtype);\n };\n}\n//# sourceMappingURL=kernel_funcs_utils.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Add } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { addImplCPU as cpuAdd } from '../kernel_utils/shared';\nconst ADD = 'return a + b;';\nexport const addKernelFunc = binaryKernelFunc({\n opSnippet: ADD,\n packedOpSnippet: ADD,\n supportsComplex: true,\n cpuKernelImpl: cpuAdd\n});\nexport const addConfig = {\n kernelName: Add,\n backendName: 'webgl',\n kernelFunc: addKernelFunc\n};\n//# sourceMappingURL=Add.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Atan2 } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc, CHECK_NAN_SNIPPET_BINARY, CHECK_NAN_SNIPPET_BINARY_PACKED } from '../kernel_utils/kernel_funcs_utils';\nconst ATAN2 = CHECK_NAN_SNIPPET_BINARY + `\n return atan(a, b);\n`;\nconst ATAN2_PACKED = `\n vec4 result = atan(a, b);\n vec4 isNaN = min(vec4(isnan(a)) + vec4(isnan(b)), vec4(1.0));\n ` +\n CHECK_NAN_SNIPPET_BINARY_PACKED + `\n return result;\n`;\nexport const atan2 = binaryKernelFunc({ opSnippet: ATAN2, packedOpSnippet: ATAN2_PACKED });\nexport const atan2Config = {\n kernelName: Atan2,\n backendName: 'webgl',\n kernelFunc: atan2,\n};\n//# sourceMappingURL=Atan2.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPool, backend_util, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function avgPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'avgPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const avgPoolProgram = new Pool2DProgram(convInfo, 'avg', false);\n return backend.runWebGLProgram(avgPoolProgram, [x], 'float32');\n}\nexport const avgPoolConfig = {\n kernelName: AvgPool,\n backendName: 'webgl',\n kernelFunc: avgPool\n};\n//# sourceMappingURL=AvgPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { AvgPoolBackprop, backend_util } from '@tensorflow/tfjs-core';\nimport { AvgPool2DBackpropProgram } from '../avg_pool_backprop_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function avgPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolBackprop');\n const { filterSize, strides, pad } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const avgPoolBackpropProgram = new AvgPool2DBackpropProgram(convInfo);\n return backend.runWebGLProgram(avgPoolBackpropProgram, [dy], x.dtype);\n}\nexport const avgPoolBackpropConfig = {\n kernelName: AvgPoolBackprop,\n backendName: 'webgl',\n kernelFunc: avgPoolBackprop\n};\n//# sourceMappingURL=AvgPoolBackprop.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.outputShape = [];\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = '0.0';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = '1.0';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n float x = getXAtOutCoords();\n float mean = getMeanAtOutCoords();\n float variance = getVarianceAtOutCoords();\n float offset = ${offsetSnippet};\n float scale = ${scaleSnippet};\n float inv = scale * inversesqrt(variance + float(${varianceEpsilon}));\n setOutput(dot(vec3(x, -mean, offset), vec3(inv, inv, 1)));\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class BatchNormPackedProgram {\n constructor(xShape, meanShape, varianceShape, offsetShape, scaleShape, varianceEpsilon) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.variableNames = ['x', 'mean', 'variance'];\n backend_util.assertAndGetBroadcastShape(xShape, meanShape);\n backend_util.assertAndGetBroadcastShape(xShape, varianceShape);\n let offsetSnippet = 'vec4(0.0)';\n if (offsetShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n let scaleSnippet = 'vec4(1.0)';\n if (scaleShape != null) {\n backend_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n vec4 offset = ${offsetSnippet};\n vec4 scale = ${scaleSnippet};\n\n vec4 x = getXAtOutCoords();\n vec4 mean = getMeanAtOutCoords();\n vec4 variance = getVarianceAtOutCoords();\n\n vec4 inv = scale * inversesqrt(variance + vec4(${varianceEpsilon}));\n\n setOutput((x - mean) * inv + offset);\n }\n `;\n }\n}\n//# sourceMappingURL=batchnorm_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, FusedBatchNorm, util } from '@tensorflow/tfjs-core';\nimport { BatchNormProgram } from '../batchnorm_gpu';\nimport { BatchNormPackedProgram } from '../batchnorm_packed_gpu';\nexport const batchNorm = ({ inputs, backend, attrs }) => {\n const { x, mean, variance, offset, scale } = inputs;\n util.assert(mean.shape.length === variance.shape.length, () => 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(offset == null || mean.shape.length === offset.shape.length, () => 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(scale == null || mean.shape.length === scale.shape.length, () => 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n let { varianceEpsilon } = attrs;\n if (varianceEpsilon == null) {\n varianceEpsilon = 0.001;\n }\n const finalInputs = [x, mean, variance];\n let offsetShape = null;\n if (offset != null) {\n offsetShape = offset.shape;\n finalInputs.push(offset);\n }\n let scaleShape = null;\n if (scale != null) {\n scaleShape = scale.shape;\n finalInputs.push(scale);\n }\n const program = env().getBool('WEBGL_PACK_NORMALIZATION') ?\n new BatchNormPackedProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon) :\n new BatchNormProgram(x.shape, mean.shape, variance.shape, offsetShape, scaleShape, varianceEpsilon);\n const output = backend.runWebGLProgram(program, finalInputs, finalInputs[0].dtype);\n return output;\n};\nexport const batchNormConfig = {\n kernelName: FusedBatchNorm,\n backendName: 'webgl',\n kernelFunc: batchNorm,\n};\n//# sourceMappingURL=BatchNorm.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { NotEqual } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst NOT_EQUAL = `return float(a != b);`;\nexport const notEqual = binaryKernelFunc({ opSnippet: NOT_EQUAL, dtype: 'bool' });\nexport const notEqualConfig = {\n kernelName: NotEqual,\n backendName: 'webgl',\n kernelFunc: notEqual,\n};\n//# sourceMappingURL=NotEqual.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Real } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function real(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.real }, backend });\n}\nexport const realConfig = {\n kernelName: Real,\n backendName: 'webgl',\n kernelFunc: real\n};\n//# sourceMappingURL=Real.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { UnaryOpProgram } from '../unaryop_gpu';\nconst TO_INT = `return float(int(x));`;\nexport function int(input, backend) {\n const program = new UnaryOpProgram(input.shape, TO_INT);\n const output = backend.runWebGLProgram(program, [input], 'int32');\n return { dataId: output.dataId, shape: output.shape, dtype: output.dtype };\n}\n//# sourceMappingURL=int.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as tf from '@tensorflow/tfjs-core';\nimport { Cast, util } from '@tensorflow/tfjs-core';\nimport { complex } from './Complex';\nimport { identity } from './Identity';\nimport { notEqual } from './NotEqual';\nimport { real } from './Real';\nimport { int } from '../kernel_utils/int';\nexport function cast(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { dtype } = attrs;\n // Casting to complex64.\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return identity({ inputs: { x }, backend });\n }\n // TODO(annxingyuan): Import kernel function once zeros is modularized.\n const zerosTensor = tf.zeros(x.shape);\n const floatX = cast({ inputs: { x }, backend, attrs: { dtype: 'float32' } });\n const result = complex({ inputs: { real: floatX, imag: zerosTensor }, backend });\n zerosTensor.dispose();\n backend.disposeIntermediateTensorInfo(floatX);\n return result;\n }\n // Casting from complex64\n if (x.dtype === 'complex64') {\n const realPart = real({ inputs: { input: x }, backend });\n const result = cast({ inputs: { x: realPart }, backend, attrs: { dtype } });\n backend.disposeIntermediateTensorInfo(realPart);\n return result;\n }\n if (!util.hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n const result = identity({ inputs: { x }, backend });\n return { dataId: result.dataId, shape: result.shape, dtype };\n }\n if (dtype === 'int32') {\n return int(x, backend);\n }\n if (dtype === 'bool') {\n const zerosTensorInfo = backend.makeTensorInfo([], 'bool', util.getTypedArrayFromDType('bool', 1));\n const binaryInputs = { a: x, b: zerosTensorInfo };\n const result = notEqual({ inputs: binaryInputs, backend });\n backend.disposeIntermediateTensorInfo(zerosTensorInfo);\n return result;\n }\n throw new Error(`Error in Cast: failed to cast ${x.dtype} to ${dtype}`);\n}\nexport const castConfig = {\n kernelName: Cast,\n backendName: 'webgl',\n kernelFunc: cast\n};\n//# sourceMappingURL=Cast.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class ConcatProgram {\n // Concats 2d tensors along axis=1. See comments in MathBackendWebGL.concat().\n constructor(shapes) {\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, 1 /* axis */);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][1];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][1];\n }\n const snippets = [`if (yC < ${offsets[0]}) setOutput(getT0(yR, yC));`];\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n snippets.push(`else if (yC < ${offsets[i]}) ` +\n `setOutput(getT${i}(yR, yC-${shift}));`);\n }\n const lastIndex = offsets.length;\n const lastShift = offsets[offsets.length - 1];\n snippets.push(`else setOutput(getT${lastIndex}(yR, yC-${lastShift}));`);\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int yR = coords.x;\n int yC = coords.y;\n\n ${snippets.join('\\n ')}\n }\n `;\n }\n}\n//# sourceMappingURL=concat_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class ConcatPackedProgram {\n constructor(shapes, axis) {\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = [];\n this.outputShape = backend_util.computeOutShape(shapes, axis);\n const shape = this.outputShape;\n const rank = shape.length;\n const dtype = getCoordsDataType(rank);\n const coords = getChannels('coords', rank);\n const channels = ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank);\n this.variableNames = shapes.map((_, i) => `T${i}`);\n const offsets = new Array(shapes.length - 1);\n offsets[0] = shapes[0][axis];\n for (let i = 1; i < offsets.length; i++) {\n offsets[i] = offsets[i - 1] + shapes[i][axis];\n }\n const channel = channels[axis];\n const lastChannels = channels.slice(-2);\n const allChannels = channels.join();\n let getValueSnippet = `if (${channel} < ${offsets[0]}) {\n return getChannel(\n getT0(${allChannels}), vec2(${lastChannels.join()}));\n }`;\n for (let i = 1; i < offsets.length; i++) {\n const shift = offsets[i - 1];\n // Note: the >= comparison below may seem unnecessary given the check\n // above but is needed to workaround branch execution issues on some\n // devices. It makes all the conditions exclusive without relying on\n // execution order.\n getValueSnippet += `\n if (${channel} < ${offsets[i]} && ${channel} >= ${offsets[i - 1]}) {\n return getChannel(\n getT${i}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));\n }`;\n }\n const lastIndex = offsets.length;\n const shift = offsets[offsets.length - 1];\n getValueSnippet += `\n return getChannel(\n getT${lastIndex}(${shiftedChannels(channels, channel, shift)}),\n vec2(${shiftedChannels(lastChannels, channel, shift)}));`;\n this.userCode = `\n float getValue(${channels.map(x => 'int ' + x)}) {\n ${getValueSnippet}\n }\n\n void main() {\n ${dtype} coords = getOutputCoords();\n vec4 result = vec4(getValue(${coords}), 0., 0., 0.);\n\n ${coords[rank - 1]} = ${coords[rank - 1]} + 1;\n if (${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.g = getValue(${coords});\n }\n\n ${coords[rank - 2]} = ${coords[rank - 2]} + 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]}) {\n result.a = getValue(${coords});\n }\n\n ${coords[rank - 1]} = ${coords[rank - 1]} - 1;\n if (${coords[rank - 2]} < ${shape[rank - 2]} &&\n ${coords[rank - 1]} < ${shape[rank - 1]}) {\n result.b = getValue(${coords});\n }\n setOutput(result);\n }\n `;\n }\n}\n/**\n * Return an expression for coordinates into a vector where a given channel\n * will be offset by [shift].\n *\n * @param channels the channels to consider\n * @param channel the channel we want shifted\n * @param shift the amount to subtract from the channel.\n *\n * @returns a string of the form 'x, y-[shift], z' where any one channel can\n * have the shift applied.\n */\nfunction shiftedChannels(channels, channel, shift) {\n const channelIdx = channels.indexOf(channel);\n const res = channels.map((c, idx) => {\n if (idx === channelIdx) {\n return `${c} - ${shift}`;\n }\n else {\n return c;\n }\n });\n return res.join();\n}\n//# sourceMappingURL=concat_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Imag } from '@tensorflow/tfjs-core';\nimport { identity } from './Identity';\nexport function imag(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n const inputData = backend.texData.get(input.dataId);\n return identity({ inputs: { x: inputData.complexTensorInfos.imag }, backend });\n}\nexport const imagConfig = {\n kernelName: Imag,\n backendName: 'webgl',\n kernelFunc: imag\n};\n//# sourceMappingURL=Imag.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { ReshapePackedProgram } from '../reshape_packed_gpu';\nimport { getBatchDim, getRowsCols } from '../webgl_util';\nexport function packedReshape(input, afterShape, backend) {\n const input3DShape = [getBatchDim(input.shape),\n ...getRowsCols(input.shape)];\n const input3D = {\n dtype: input.dtype,\n shape: input3DShape,\n dataId: input.dataId\n };\n const afterShapeAs3D = [getBatchDim(afterShape),\n ...getRowsCols(afterShape)];\n const program = new ReshapePackedProgram(afterShapeAs3D, input3DShape);\n const preventEagerUnpackingOfOutput = true;\n const output = backend.runWebGLProgram(program, [input3D], input.dtype, null /* customSetup */, preventEagerUnpackingOfOutput);\n return { dataId: output.dataId, shape: afterShape, dtype: output.dtype };\n}\n//# sourceMappingURL=reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Reshape, util } from '@tensorflow/tfjs-core';\nimport { packedReshape } from '../kernel_utils/reshape';\nimport { isReshapeFree } from '../webgl_util';\nexport function reshape(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n const { shape } = attrs;\n const webglBackend = backend;\n const xSize = util.sizeFromShape(x.shape);\n const $shape = util.inferFromImplicitShape(shape, xSize);\n const $xSize = util.sizeFromShape($shape);\n util.assert(xSize === $xSize, () => `The new shape (${$shape}) has ${$xSize} elements and the old ` +\n `shape (${x.shape}) has ${xSize} elements. The new shape and old ` +\n `shape must have the same number of elements.`);\n const xTexData = webglBackend.texData.get(x.dataId);\n if (xTexData.isPacked && !isReshapeFree(x.shape, $shape) &&\n !(xTexData.texture !== null && isReshapeFree(xTexData.shape, $shape))) {\n return packedReshape(x, $shape, webglBackend);\n }\n webglBackend.incRef(x.dataId);\n return { dataId: x.dataId, shape: $shape, dtype: x.dtype };\n}\nexport const reshapeConfig = {\n kernelName: Reshape,\n backendName: 'webgl',\n kernelFunc: reshape\n};\n//# sourceMappingURL=Reshape.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, util } from '@tensorflow/tfjs-core';\nimport { ConcatProgram } from '../concat_gpu';\nimport { ConcatPackedProgram } from '../concat_packed_gpu';\nimport { complex } from './Complex';\nimport { imag } from './Imag';\nimport { real } from './Real';\nimport { reshape } from './Reshape';\nexport function concatImpl(inputs, axis, backend) {\n const dtype = inputs[0].dtype;\n if (dtype === 'complex64') {\n const reals = inputs.map((t) => real({ inputs: { input: t }, backend }));\n const imags = inputs.map((t) => imag({ inputs: { input: t }, backend }));\n const realConcated = concatImpl(reals, axis, backend);\n const imagConcated = concatImpl(imags, axis, backend);\n const result = complex({ inputs: { real: realConcated, imag: imagConcated }, backend });\n reals.forEach(r => backend.disposeIntermediateTensorInfo(r));\n imags.forEach(i => backend.disposeIntermediateTensorInfo(i));\n backend.disposeIntermediateTensorInfo(realConcated);\n backend.disposeIntermediateTensorInfo(imagConcated);\n return result;\n }\n if (inputs.length > env().getNumber('WEBGL_MAX_TEXTURES_IN_SHADER')) {\n const midIndex = Math.floor(inputs.length / 2);\n const leftSide = concatImpl(inputs.slice(0, midIndex), axis, backend);\n const rightSide = concatImpl(inputs.slice(midIndex), axis, backend);\n const result = concatImpl([leftSide, rightSide], axis, backend);\n backend.disposeIntermediateTensorInfo(leftSide);\n backend.disposeIntermediateTensorInfo(rightSide);\n return result;\n }\n if (env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') &&\n inputs[0].shape.length > 1) {\n const program = new ConcatPackedProgram(inputs.map(t => t.shape), axis);\n return backend.runWebGLProgram(program, inputs, dtype);\n }\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), axis);\n const tensors2D = inputs.map(x => reshape({\n inputs: { x },\n attrs: { shape: [-1, util.sizeFromShape(x.shape.slice(axis))] },\n backend\n }));\n const program = new ConcatProgram(tensors2D.map(t => t.shape));\n const result = backend.runWebGLProgram(program, tensors2D, dtype);\n tensors2D.forEach(r => backend.disposeIntermediateTensorInfo(r));\n const reshapedResult = reshape({ inputs: { x: result }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(result);\n return reshapedResult;\n}\n//# sourceMappingURL=Concat_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Concat, util } from '@tensorflow/tfjs-core';\nimport { concatImpl } from './Concat_impl';\nexport function concat(args) {\n const { inputs, backend, attrs } = args;\n const { axis } = attrs;\n const $axis = util.parseAxisParam(axis, inputs[0].shape)[0];\n const outShape = backend_util.computeOutShape(inputs.map(t => t.shape), $axis);\n if (util.sizeFromShape(outShape) === 0) {\n return backend.makeTensorInfo(outShape, inputs[0].dtype, []);\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n const $inputs = inputs.filter(t => util.sizeFromShape(t.shape) > 0);\n if ($inputs.length === 1) {\n return $inputs[0];\n }\n const shapes = $inputs.map(t => t.shape);\n backend_util.assertParamsConsistent(shapes, $axis);\n return concatImpl($inputs, $axis, backend);\n}\nexport const concatConfig = {\n kernelName: Concat,\n backendName: 'webgl',\n kernelFunc: concat\n};\n//# sourceMappingURL=Concat.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Cos } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst COS = CHECK_NAN_SNIPPET_UNARY + `\n return cos(x);\n`;\nexport const cos = unaryKernelFunc(COS);\nexport const cosConfig = {\n kernelName: Cos,\n backendName: 'webgl',\n kernelFunc: cos,\n};\n//# sourceMappingURL=Cos.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Div } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\n// Without the equality check div produces 0.9999 for a = b, which when\n// floored can cause errors.\nconst DIV = `\nif (a == b) {\n return 1.0;\n};\nreturn a / b;`;\n// We do the same as in ./binaryop_gpu, with vec4 and ivec4.\n// On Linux, the vectorized implementation produces NaNs when a and b are 0.\nconst DIV_PACKED = `\n // vec4 one = vec4(equal(a, b));\n // return one + (vec4(1.0) - one) * a / b;\n vec4 result = a / b;\n if(a.x == b.x) {\n result.x = 1.;\n }\n if(a.y == b.y) {\n result.y = 1.;\n }\n if(a.z == b.z) {\n result.z = 1.;\n }\n if(a.w == b.w) {\n result.w = 1.;\n }\n\n return result;\n`;\nexport const div = binaryKernelFunc({ opSnippet: DIV, packedOpSnippet: DIV_PACKED, checkOutOfBounds: true });\nexport const divConfig = {\n kernelName: Div,\n backendName: 'webgl',\n kernelFunc: div,\n};\n//# sourceMappingURL=Div.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FFTProgram {\n constructor(component, inputShape, inverse) {\n this.variableNames = ['real', 'imag'];\n const innerDim = inputShape[1];\n this.outputShape = inputShape;\n const exponentMultiplierSnippet = inverse ? `2.0 * ${Math.PI}` : `-2.0 * ${Math.PI}`;\n const resultDenominator = inverse ? `${innerDim}.0` : '1.0';\n let opString;\n if (component === 'real') {\n opString = 'return real * expR - imag * expI;';\n }\n else if (component === 'imag') {\n opString = 'return real * expI + imag * expR;';\n }\n else {\n throw new Error(`FFT component must be either \"real\" or \"imag\", got ${component}.`);\n }\n this.userCode = `\n const float exponentMultiplier = ${exponentMultiplierSnippet};\n\n float unaryOpComplex(float real, float expR, float imag, float expI) {\n ${opString}\n }\n\n float mulMatDFT(int batch, int index) {\n float indexRatio = float(index) / float(${innerDim});\n float exponentMultiplierTimesIndexRatio =\n exponentMultiplier * indexRatio;\n\n float result = 0.0;\n\n for (int i = 0; i < ${innerDim}; i++) {\n // x = (-2|2 * PI / N) * index * i;\n float x = exponentMultiplierTimesIndexRatio * float(i);\n float expR = cos(x);\n float expI = sin(x);\n float real = getReal(batch, i);\n float imag = getImag(batch, i);\n\n result +=\n unaryOpComplex(real, expR, imag, expI) / ${resultDenominator};\n }\n\n return result;\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n setOutput(mulMatDFT(coords[0], coords[1]));\n }\n `;\n }\n}\n//# sourceMappingURL=fft_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { FFTProgram } from '../fft_gpu';\nimport { complex } from './Complex';\nimport { reshape } from './Reshape';\nexport function fftImpl(x, inverse, backend) {\n const xData = backend.texData.get(x.dataId);\n const inputSize = util.sizeFromShape(x.shape);\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = x.shape[x.shape.length - 1];\n const batch = inputSize / innerDimensionSize;\n const input2D = reshape({ inputs: { x }, backend, attrs: { shape: [batch, innerDimensionSize] } });\n const xShape = input2D.shape;\n const realProgram = new FFTProgram('real', xShape, inverse);\n const imagProgram = new FFTProgram('imag', xShape, inverse);\n const inputs = [\n {\n dataId: xData.complexTensorInfos.real.dataId,\n dtype: xData.complexTensorInfos.real.dtype,\n shape: xShape\n },\n {\n dataId: xData.complexTensorInfos.imag.dataId,\n dtype: xData.complexTensorInfos.imag.dtype,\n shape: xShape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n const complexOutputReshaped = reshape({ inputs: { x: complexOutput }, backend, attrs: { shape: x.shape } });\n backend.disposeIntermediateTensorInfo(complexOutputReshaped);\n return complexOutputReshaped;\n}\n//# sourceMappingURL=FFT_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function fft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, false /* inverse */, backend);\n}\nexport const fftConfig = {\n kernelName: FFT,\n backendName: 'webgl',\n kernelFunc: fft\n};\n//# sourceMappingURL=FFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nexport class FlipLeftRightProgram {\n constructor(imageShape) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageWidth = imageShape[2];\n this.outputShape = imageShape;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n\n int coordX = ${imageWidth} - x;\n float outputValue;\n if(coordX >= 0 && coordX < ${imageWidth}) {\n outputValue = getImage(coords[0], coords[1], coordX, coords[3]);\n } else {\n outputValue = getImage(coords[0], coords[1], coords[2], coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=flip_left_right_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { FlipLeftRight } from '@tensorflow/tfjs-core';\nimport { FlipLeftRightProgram } from '../flip_left_right_gpu';\nexport const flipLeftRightConfig = {\n kernelName: FlipLeftRight,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend }) => {\n const { image } = inputs;\n const webglBackend = backend;\n const program = new FlipLeftRightProgram(image.shape);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=FlipLeftRight.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${width}.0, ${height}.0);\n\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n setOutput(floor(value * 255.0 + 0.5));\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_gpu.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getGlslDifferences } from '../../glsl_version';\nexport class FromPixelsPackedProgram {\n constructor(outputShape) {\n this.variableNames = ['A'];\n this.packedInputs = false;\n this.packedOutput = true;\n const glsl = getGlslDifferences();\n const [height, width,] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n\n vec4 result = vec4(0.);\n\n for(int row=0; row<=1; row++) {\n for(int col=0; col<=1; col++) {\n texC = coords[1] + row;\n depth = coords[2] + col;\n\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${width}.0, ${height}.0);\n vec4 values = ${glsl.texture2D}(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n result[row * 2 + col] = floor(value * 255.0 + 0.5);\n }\n }\n\n ${glsl.output} = result;\n }\n `;\n }\n}\n//# sourceMappingURL=from_pixels_packed_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { FromPixels } from '@tensorflow/tfjs-core';\nimport { TextureUsage } from '../tex_util';\nimport { FromPixelsProgram } from './FromPixels_utils/from_pixels_gpu';\nimport { FromPixelsPackedProgram } from './FromPixels_utils/from_pixels_packed_gpu';\nexport const fromPixelsConfig = {\n kernelName: FromPixels,\n backendName: 'webgl',\n kernelFunc: fromPixels,\n};\nlet fromPixels2DContext;\nfunction fromPixels(args) {\n const { inputs, backend, attrs } = args;\n let { pixels } = inputs;\n const { numChannels } = attrs;\n const isVideo = typeof (HTMLVideoElement) !== 'undefined' &&\n pixels instanceof HTMLVideoElement;\n const isImage = typeof (HTMLImageElement) !== 'undefined' &&\n pixels instanceof HTMLImageElement;\n const [width, height] = isVideo ?\n [\n pixels.videoWidth,\n pixels.videoHeight\n ] :\n [pixels.width, pixels.height];\n const texShape = [height, width];\n const outShape = [height, width, numChannels];\n if (isImage || isVideo) {\n if (fromPixels2DContext == null) {\n fromPixels2DContext = document.createElement('canvas').getContext('2d');\n }\n fromPixels2DContext.canvas.width = width;\n fromPixels2DContext.canvas.height = height;\n fromPixels2DContext.drawImage(pixels, 0, 0, width, height);\n pixels = fromPixels2DContext.canvas;\n }\n const tempPixelHandle = backend.makeTensorInfo(texShape, 'int32');\n // This is a byte texture with pixels.\n backend.texData.get(tempPixelHandle.dataId).usage = TextureUsage.PIXELS;\n backend.gpgpu.uploadPixelDataToTexture(backend.getTexture(tempPixelHandle.dataId), pixels);\n const program = env().getBool('WEBGL_PACK') ?\n new FromPixelsPackedProgram(outShape) :\n new FromPixelsProgram(outShape);\n const res = backend.runWebGLProgram(program, [tempPixelHandle], 'int32');\n backend.disposeData(tempPixelHandle.dataId);\n return res;\n}\n//# sourceMappingURL=FromPixels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { IFFT } from '@tensorflow/tfjs-core';\nimport { fftImpl } from './FFT_impl';\nexport function ifft(args) {\n const { inputs, backend } = args;\n const { input } = inputs;\n return fftImpl(input, true /* inverse */, backend);\n}\nexport const ifftConfig = {\n kernelName: IFFT,\n backendName: 'webgl',\n kernelFunc: ifft\n};\n//# sourceMappingURL=IFFT.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nexport class MeanProgram {\n constructor(reduceInfo, divisor) {\n this.variableNames = ['x'];\n const { windowSize, batchSize, inSize, outSize } = reduceInfo;\n this.outputShape = [batchSize, outSize];\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n let updateSnippet = `sumValue += dot(values, ones);`;\n if (divisor != null) {\n const denominator = 1 / divisor;\n updateSnippet = `sumValue += dot(values * ${util.isInt(denominator) ? denominator.toPrecision(2) :\n denominator}, ones);`;\n }\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return 0.0;\n }\n `;\n }\n this.userCode = `\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(getValue(batch, inIdx), 0.0, 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1), 0.0, 0.0);\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2), 0.0);\n\n ${updateSnippet}\n }\n setOutput(sumValue);\n }\n `;\n }\n}\n//# sourceMappingURL=mean_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nimport { MeanProgram } from '../mean_gpu';\nimport { ReduceProgram } from '../reduce_gpu';\n// Returns an array of configuration objects that describe each stage of the\n// reduction.\nfunction getReductionStages(inShape) {\n const stages = [];\n while (stages.length === 0 || stages[stages.length - 1].outSize !== 1) {\n const outSize = stages.length ? stages[stages.length - 1].outSize : inShape[1];\n const windowSize = backend_util.computeOptimalWindowSize(outSize);\n stages.push({\n inSize: outSize,\n windowSize,\n outSize: Math.ceil(outSize / windowSize)\n });\n }\n return stages;\n}\nexport function reduce(x, dtype, reductionType, backend) {\n const reductionStages = getReductionStages(x.shape);\n let result = x;\n for (let i = 0; i < reductionStages.length; i++) {\n const { inSize, windowSize, outSize } = reductionStages[i];\n let program;\n let previousResult;\n if (reductionType === 'mean') {\n program = i === 0 ?\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, inSize) :\n new MeanProgram({ windowSize, inSize, batchSize: x.shape[0], outSize });\n }\n else {\n program = new ReduceProgram({ windowSize, inSize, batchSize: x.shape[0], outSize }, reductionType);\n }\n previousResult = result;\n result = backend.runWebGLProgram(program, [result], dtype);\n if (previousResult.dataId !== x.dataId) {\n backend.disposeIntermediateTensorInfo(previousResult);\n }\n }\n return result;\n}\n//# sourceMappingURL=reduce.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function maxImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, x.dtype, 'max', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Max_impl.js.map", "/**\n * @license\n * Copyright 2017 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposeProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const switched = getSwitchedCoords(newDim);\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${switched}));\n }\n `;\n }\n}\nfunction getSwitchedCoords(newDim) {\n const rank = newDim.length;\n if (rank > 6) {\n throw Error(`Transpose for rank ${rank} is not yet supported`);\n }\n const originalOrder = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u', 'resRC.v'];\n const switchedCoords = new Array(rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedCoords[newDim[i]] = originalOrder[i];\n }\n return switchedCoords.join();\n}\n//# sourceMappingURL=transpose_gpu.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getVecChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\nexport class TransposePackedProgram {\n constructor(aShape, newDim) {\n this.variableNames = ['A'];\n this.packedInputs = true;\n this.packedOutput = true;\n const outputShape = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n if (this.rank > 6) {\n throw Error(`Packed transpose for rank ${this.rank} is not yet supported.`);\n }\n const dtype = getCoordsDataType(this.rank);\n const outputOrder = getVecChannels('rc', this.rank);\n const switchedOrder = new Array(this.rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedOrder[newDim[i]] = outputOrder[i];\n }\n const innerDims = `vec2(${switchedOrder.slice(-2).join()})`;\n const nextColumn = `++${outputOrder[this.rank - 1]} < ${outputShape[this.rank - 1]}`;\n const getc = `getChannel(getA(${switchedOrder.join()}), ${innerDims})`;\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 result = vec4(0.);\n result[0] = ${getc};\n if(${nextColumn}) {\n result[1] = ${getc};\n }\n --${outputOrder[this.rank - 1]};\n if(++${outputOrder[this.rank - 2]} < ${outputShape[this.rank - 2]}) {\n result[2] = ${getc};\n if(${nextColumn}) {\n result[3] = ${getc};\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=transpose_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env } from '@tensorflow/tfjs-core';\nimport { transposeImplCPU } from '../kernel_utils/shared';\nimport { TransposeProgram } from '../transpose_gpu';\nimport { TransposePackedProgram } from '../transpose_packed_gpu';\nexport function transposeImpl(x, perm, backend) {\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new TransposePackedProgram(x.shape, perm) :\n new TransposeProgram(x.shape, perm);\n return backend.runWebGLProgram(program, [x], x.dtype);\n}\nexport { transposeImplCPU };\n//# sourceMappingURL=Transpose_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Max } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxImplCPU } from '../kernel_utils/shared';\nimport { maxImpl } from './Max_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const maxConfig = {\n kernelName: Max,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { reductionIndices, keepDims } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(reductionIndices, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const maxInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n let maxInput = x;\n if (maxInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const maxInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n maxInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const maxInputData = webglBackend.texData.get(maxInput.dataId);\n maxInputData.values = maxInputValues;\n }\n else {\n maxInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('max', axes, xRank);\n const [maxOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(maxInput.shape, axes);\n let outShape = maxOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(maxOutShape, origAxes);\n }\n let out;\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(maxInput.dataId);\n const values = xTexData.values;\n const outValues = maxImplCPU(values, util.sizeFromShape(reduceShape), outShape, x.dtype);\n out = webglBackend.makeTensorInfo(outShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = maxImpl(maxInput, reduceShape, outShape, webglBackend);\n }\n if (maxInputIsTransposed) {\n webglBackend.disposeIntermediateTensorInfo(maxInput);\n }\n return out;\n }\n};\n//# sourceMappingURL=Max.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPool, util } from '@tensorflow/tfjs-core';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nimport { identity } from './Identity';\nexport function maxPool(args) {\n const { inputs, backend, attrs } = args;\n const { x } = inputs;\n assertNotComplex(x, 'maxPool');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const dilations = 1;\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n if (convInfo.filterWidth === 1 && convInfo.filterHeight === 1 &&\n util.arraysEqual(convInfo.inShape, convInfo.outShape)) {\n return identity({ inputs: { x }, backend });\n }\n const maxPoolProgram = new Pool2DProgram(convInfo, 'max', false);\n return backend.runWebGLProgram(maxPoolProgram, [x], x.dtype);\n}\nexport const maxPoolConfig = {\n kernelName: MaxPool,\n backendName: 'webgl',\n kernelFunc: maxPool\n};\n//# sourceMappingURL=MaxPool.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, MaxPoolBackprop } from '@tensorflow/tfjs-core';\nimport { MaxPool2DBackpropProgram } from '../max_pool_backprop_gpu';\nimport { Pool2DProgram } from '../pool_gpu';\nimport { assertNotComplex } from '../webgl_util';\nexport function maxPoolBackprop(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input, output } = inputs;\n const x = input;\n assertNotComplex([input, output], 'maxPoolBackprop');\n const { filterSize, strides, pad, dimRoundingMode } = attrs;\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad, dimRoundingMode);\n const getPositions = true;\n const maxPoolPositionsProgram = new Pool2DProgram(convInfo, 'max', getPositions);\n const maxPoolPositions = backend.runWebGLProgram(maxPoolPositionsProgram, [x], x.dtype);\n const maxPoolBackPropProgram = new MaxPool2DBackpropProgram(convInfo);\n const result = backend.runWebGLProgram(maxPoolBackPropProgram, [dy, maxPoolPositions], x.dtype);\n backend.disposeIntermediateTensorInfo(maxPoolPositions);\n return result;\n}\nexport const maxPoolBackpropConfig = {\n kernelName: MaxPoolBackprop,\n backendName: 'webgl',\n kernelFunc: maxPoolBackprop\n};\n//# sourceMappingURL=MaxPoolBackprop.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Pool2DProgram } from '../pool_gpu';\nexport function maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, backend) {\n let program = new Pool2DProgram(convInfo, 'max', false);\n const poolOutput = backend.runWebGLProgram(program, [x], 'float32');\n program = new Pool2DProgram(convInfo, 'max', true, true, includeBatchInIndex);\n const indexOutput = backend.runWebGLProgram(program, [x], 'float32');\n return [poolOutput, indexOutput];\n}\n//# sourceMappingURL=MaxPoolWithArgmax_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { MaxPoolWithArgmax } from '@tensorflow/tfjs-core';\nimport { backend_util, util } from '@tensorflow/tfjs-core';\nimport { maxPoolWithArgmaxImpl } from './MaxPoolWithArgmax_impl';\nexport const maxPoolWithArgmaxConfig = {\n kernelName: MaxPoolWithArgmax,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { filterSize, strides, pad, includeBatchInIndex } = attrs;\n const webglBackend = backend;\n util.assert(x.shape.length === 4, () => `Error in maxPool: input must be rank 4 but got rank ${x.shape.length}.`);\n const dilations = [1, 1];\n util.assert(backend_util.eitherStridesOrDilationsAreOne(strides, dilations), () => 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = backend_util.computePool2DInfo(x.shape, filterSize, strides, dilations, pad);\n const [result, indexes] = maxPoolWithArgmaxImpl(x, includeBatchInIndex, convInfo, webglBackend);\n return [result, indexes];\n }\n};\n//# sourceMappingURL=MaxPoolWithArgmax.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { util } from '@tensorflow/tfjs-core';\nimport { reduce } from '../kernel_utils/reduce';\nimport { reshape } from '../kernels/Reshape';\nexport function meanImpl(x, reduceShape, outShape, backend) {\n const inSize = util.sizeFromShape(reduceShape);\n const xSize = util.sizeFromShape(x.shape);\n const batchSize = xSize / inSize;\n const reshapedInput = reshape({ inputs: { x }, attrs: { shape: [batchSize, inSize] }, backend });\n const reduced = reduce(reshapedInput, 'float32', 'mean', backend);\n const reshapedOutput = reshape({ inputs: { x: reduced }, attrs: { shape: outShape }, backend });\n backend.disposeIntermediateTensorInfo(reshapedInput);\n backend.disposeIntermediateTensorInfo(reduced);\n return reshapedOutput;\n}\n//# sourceMappingURL=Mean_impl.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, Mean, util } from '@tensorflow/tfjs-core';\nimport { meanImpl } from './Mean_impl';\nimport { transposeImpl, transposeImplCPU } from './Transpose_impl';\nexport const meanConfig = {\n kernelName: Mean,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { keepDims, axis } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const origAxes = util.parseAxisParam(axis, x.shape);\n let axes = origAxes;\n const permutedAxes = backend_util.getAxesPermutation(axes, xRank);\n const meanInputIsTransposed = permutedAxes != null;\n const shouldExecuteOnCPU = webglBackend.shouldExecuteOnCPU([x]);\n const intermediates = [];\n let meanInput = x;\n if (meanInputIsTransposed) {\n if (shouldExecuteOnCPU) {\n const xTexData = webglBackend.texData.get(meanInput.dataId);\n const values = xTexData.values;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[permutedAxes[i]];\n }\n const meanInputValues = transposeImplCPU(values, x.shape, x.dtype, permutedAxes, newShape);\n meanInput = webglBackend.makeTensorInfo(newShape, x.dtype);\n const meanInputData = webglBackend.texData.get(meanInput.dataId);\n meanInputData.values = meanInputValues;\n }\n else {\n meanInput = transposeImpl(x, permutedAxes, webglBackend);\n }\n intermediates.push(meanInput);\n axes = backend_util.getInnerMostAxes(axes.length, xRank);\n }\n backend_util.assertAxesAreInnerMostDims('sum', axes, xRank);\n const [meanOutShape, reduceShape] = backend_util.computeOutAndReduceShapes(meanInput.shape, axes);\n let outShape = meanOutShape;\n if (keepDims) {\n // rather than reshape at the end, set the target shape here.\n outShape = backend_util.expandShapeToKeepDim(meanOutShape, origAxes);\n }\n const out = meanImpl(meanInput, reduceShape, outShape, webglBackend);\n for (const i of intermediates) {\n webglBackend.disposeIntermediateTensorInfo(i);\n }\n return out;\n }\n};\n//# sourceMappingURL=Mean.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getCoordsDataType } from './shader_compiler';\nexport class MirrorPadProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords = ['coords[0]', 'coords[1]', 'coords[2]', 'coords[3]'].slice(0, rank);\n const offset = mode === 'reflect' ? 0 : 1;\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start) {\n outC = start * 2 - outC - ${offset};\n } else if(outC >= end) {\n outC = (end - 1) * 2 - outC + ${offset};\n }\n setOutput(getX(outC - start));\n }\n `;\n return;\n }\n this.userCode = `\n ${dtype} start = ${dtype}(${start});\n ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outC = getOutputCoords();\n for (int i = 0; i < ${rank}; i++) {\n if (outC[i] < start[i]) {\n outC[i] = start[i] * 2 - outC[i] - ${offset};\n } else if(outC[i] >= end[i]) {\n outC[i] = (end[i] - 1) * 2 - outC[i] + ${offset};\n }\n }\n ${dtype} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { getChannels } from './packing_util';\nimport { getCoordsDataType } from './shader_compiler';\n/**\n * Example shader code for\n * `mirrorPad(tf.tensor1d([1, 2, 3], 'int32'), [[2, 2]], 'reflect')`\n * ```\n * const int start = int(2);\n * const int end = int(5);\n *\n * void main() {\n * int outputLoc = getOutputCoords();\n * vec4 result = vec4(0.);\n *\n * int rc = outputLoc;\n *\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[0] = getChannel(getX(source), source);\n * rc += 1;\n * if(rc < 6) {\n * int source = rc;\n * if (source < start) {\n * source = start * 2 - source - 0;\n * } else if (source >= end) {\n * source = (end - 1) * 2 - source + 0;\n * }\n * source -= start;\n *\n * result[1] = getChannel(getX(source), source);\n * }\n *\n * setOutput(result);\n * }\n * ```\n */\nexport class MirrorPadPackedProgram {\n constructor(xShape, paddings, mode) {\n this.variableNames = ['x'];\n this.packedInputs = true;\n this.packedOutput = true;\n this.outputShape = paddings.map((p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const dtype = getCoordsDataType(rank);\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const coords = getChannels('rc', rank);\n const source = getChannels('source', rank);\n const cLimit = `${coords[rank - 1]} < ${this.outputShape[rank - 1]}`;\n const innerDims = rank === 1 ? 'source' : `vec2(${source.slice(-2).join()})`;\n const offset = mode === 'reflect' ? 0 : 1;\n let mainLoop = '';\n if (rank === 1) {\n const padSetup = `\n ${dtype} source = rc;\n if (source < start) {\n source = start * 2 - source - ${offset};\n } else if (source >= end) {\n source = (end - 1) * 2 - source + ${offset};\n }\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n `;\n }\n else {\n const padSetup = `\n ${dtype} source = rc;\n ${dtype} lt = ${dtype}(lessThan(source, start));\n ${dtype} gte = ${dtype}(greaterThanEqual(source, end));\n ${dtype} orig = 1 - (lt + gte);\n source = orig * source +\n lt * (start * 2 - source - ${offset}) +\n gte * ((end - 1) * 2 - source + ${offset});\n source -= start;\n `;\n mainLoop = `\n ${dtype} rc = outputLoc;\n ${padSetup}\n result[0] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[1] = getChannel(getX(${source.join()}), ${innerDims});\n }\n rc = outputLoc;\n ${coords[rank - 2]} += 1;\n if(${coords[rank - 2]} < ${this.outputShape[rank - 2]}) {\n ${padSetup}\n result[2] = getChannel(getX(${source.join()}), ${innerDims});\n ${coords[rank - 1]} += 1;\n if(${cLimit}) {\n ${padSetup}\n result[3] = getChannel(getX(${source.join()}), ${innerDims});\n }\n }\n `;\n }\n this.userCode = `\n const ${dtype} start = ${dtype}(${start});\n const ${dtype} end = ${dtype}(${end});\n\n void main() {\n ${dtype} outputLoc = getOutputCoords();\n vec4 result = vec4(0.);\n ${mainLoop}\n setOutput(result);\n }\n `;\n }\n}\n//# sourceMappingURL=mirror_pad_packed_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { env, MirrorPad } from '@tensorflow/tfjs-core';\nimport { MirrorPadProgram } from '../mirror_pad_gpu';\nimport { MirrorPadPackedProgram } from '../mirror_pad_packed_gpu';\nexport const mirrorPadKernelFunc = ({ inputs, backend, attrs }) => {\n const { x } = inputs;\n const { paddings, mode } = attrs;\n const program = env().getBool('WEBGL_PACK_ARRAY_OPERATIONS') ?\n new MirrorPadPackedProgram(x.shape, paddings, mode) :\n new MirrorPadProgram(x.shape, paddings, mode);\n const output = backend.runWebGLProgram(program, [x], x.dtype);\n return output;\n};\nexport const mirrorPadConfig = {\n kernelName: MirrorPad,\n backendName: 'webgl',\n kernelFunc: mirrorPadKernelFunc,\n};\n//# sourceMappingURL=MirrorPad.js.map", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\n// (Ar + Ai)(Br + Bi) =\n// ArBr + ArBi + AiBr + AiBi = ArBr - AB + ArBi + AiBr\n// Yr = ArBr - AB\n// Yi = ArBi + AiBr\nexport const COMPLEX_MULTIPLY = {\n REAL: 'return areal * breal - aimag * bimag;',\n IMAG: 'return areal * bimag + aimag * breal;'\n};\nexport class BinaryOpComplexProgram {\n constructor(op, aShape, bShape) {\n this.variableNames = ['AReal', 'AImag', 'BReal', 'BImag'];\n this.outputShape = backend_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n float binaryOpComplex(\n float areal, float aimag, float breal, float bimag) {\n ${op}\n }\n\n void main() {\n float areal = getARealAtOutCoords();\n float aimag = getAImagAtOutCoords();\n float breal = getBRealAtOutCoords();\n float bimag = getBImagAtOutCoords();\n setOutput(binaryOpComplex(areal, aimag, breal, bimag));\n }\n `;\n }\n}\n//# sourceMappingURL=binaryop_complex_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, env, Multiply } from '@tensorflow/tfjs-core';\nimport * as binaryop_complex_gpu from '../binaryop_complex_gpu';\nimport { BinaryOpComplexProgram } from '../binaryop_complex_gpu';\nimport { BinaryOpProgram } from '../binaryop_gpu';\nimport { BinaryOpPackedProgram } from '../binaryop_packed_gpu';\nimport { multiplyImplCPU as cpuMultiply } from '../kernel_utils/shared';\nimport { complex } from './Complex';\nconst MUL = 'return a * b;';\nexport function multiply(args) {\n const { inputs, backend } = args;\n const { a, b } = inputs;\n const dtype = backend_util.upcastType(a.dtype, b.dtype);\n if (a.dtype === 'complex64') {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const realProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.REAL, a.shape, b.shape);\n const imagProgram = new BinaryOpComplexProgram(binaryop_complex_gpu.COMPLEX_MULTIPLY.IMAG, a.shape, b.shape);\n const inputs = [\n {\n dataId: aData.complexTensorInfos.real.dataId,\n dtype: aData.complexTensorInfos.real.dtype,\n shape: a.shape\n },\n {\n dataId: aData.complexTensorInfos.imag.dataId,\n dtype: aData.complexTensorInfos.imag.dtype,\n shape: a.shape\n },\n {\n dataId: bData.complexTensorInfos.real.dataId,\n dtype: bData.complexTensorInfos.real.dtype,\n shape: b.shape\n },\n {\n dataId: bData.complexTensorInfos.imag.dataId,\n dtype: bData.complexTensorInfos.imag.dtype,\n shape: b.shape\n }\n ];\n const realPart = backend.runWebGLProgram(realProgram, inputs, 'float32');\n const imagPart = backend.runWebGLProgram(imagProgram, inputs, 'float32');\n const complexOutput = complex({ inputs: { real: realPart, imag: imagPart }, backend });\n backend.disposeIntermediateTensorInfo(realPart);\n backend.disposeIntermediateTensorInfo(imagPart);\n // TODO(annxingyuan): CPU forwarding for complex inputs.\n return complexOutput;\n }\n if (backend.shouldExecuteOnCPU([a, b])) {\n const aData = backend.texData.get(a.dataId);\n const bData = backend.texData.get(b.dataId);\n const [outValues, outShape] = cpuMultiply(a.shape, b.shape, aData.values, bData.values, dtype);\n const out = backend.makeTensorInfo(outShape, dtype);\n const outData = backend.texData.get(out.dataId);\n outData.values = outValues;\n return out;\n }\n let program;\n if (env().getBool('WEBGL_PACK_BINARY_OPERATIONS')) {\n program = new BinaryOpPackedProgram(MUL, a.shape, b.shape);\n }\n else {\n program = new BinaryOpProgram(MUL, a.shape, b.shape);\n }\n return backend.runWebGLProgram(program, [a, b], dtype);\n}\nexport const multiplyConfig = {\n kernelName: Multiply,\n backendName: 'webgl',\n kernelFunc: multiply\n};\n//# sourceMappingURL=Multiply.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV3 } from '@tensorflow/tfjs-core';\nexport const nonMaxSuppressionV3Config = {\n kernelName: NonMaxSuppressionV3,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n return kernel_impls.nonMaxSuppressionV3Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal);\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV3.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls, NonMaxSuppressionV4 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV4Impl = kernel_impls.nonMaxSuppressionV4Impl;\nexport const nonMaxSuppressionV4Config = {\n kernelName: NonMaxSuppressionV4,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const { selectedIndices, validOutputs } = nonMaxSuppressionV4Impl(boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize);\n return [selectedIndices, validOutputs];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV4.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util, kernel_impls } from '@tensorflow/tfjs-core';\nimport { NonMaxSuppressionV5 } from '@tensorflow/tfjs-core';\nconst nonMaxSuppressionV5Impl = kernel_impls.nonMaxSuppressionV5Impl;\nexport const nonMaxSuppressionV5Config = {\n kernelName: NonMaxSuppressionV5,\n backendName: 'webgl',\n kernelFunc: ({ inputs, backend, attrs }) => {\n backend_util.warn('tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const { boxes, scores } = inputs;\n const { maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma } = attrs;\n const gpuBackend = backend;\n const boxesVals = gpuBackend.readSync(boxes.dataId);\n const scoresVals = gpuBackend.readSync(scores.dataId);\n const maxOutputSizeVal = maxOutputSize;\n const iouThresholdVal = iouThreshold;\n const scoreThresholdVal = scoreThreshold;\n const softNmsSigmaVal = softNmsSigma;\n const { selectedIndices, selectedScores } = nonMaxSuppressionV5Impl(boxesVals, scoresVals, maxOutputSizeVal, iouThresholdVal, scoreThresholdVal, softNmsSigmaVal);\n return [selectedIndices, selectedScores];\n }\n};\n//# sourceMappingURL=NonMaxSuppressionV5.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { backend_util } from '@tensorflow/tfjs-core';\nexport class RotateProgram {\n constructor(imageShape, radians, fillValue, center) {\n this.variableNames = ['Image'];\n this.outputShape = [];\n const imageHeight = imageShape[1];\n const imageWidth = imageShape[2];\n const sinFactor = Math.sin(radians).toFixed(3);\n const cosFactor = Math.cos(radians).toFixed(3);\n this.outputShape = imageShape;\n const [centerX, centerY] = backend_util.getImageCenter(center, imageHeight, imageWidth);\n const centerXString = centerX.toFixed(3);\n const centerYString = centerY.toFixed(3);\n let fillSnippet = '';\n if (typeof fillValue === 'number') {\n fillSnippet = `float outputValue = ${fillValue.toFixed(2)};`;\n }\n else {\n fillSnippet = `\n vec3 fill = vec3(${fillValue.join(',')});\n float outputValue = fill[coords[3]];`;\n }\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int x = coords[2];\n int y = coords[1];\n float coordXFloat = (float(x) - ${centerXString}) * ${cosFactor} - (float(y) - ${centerYString}) * ${sinFactor};\n float coordYFloat = (float(x) - ${centerXString}) * ${sinFactor} + (float(y) - ${centerYString}) * ${cosFactor};\n int coordX = int(round(coordXFloat + ${centerXString}));\n int coordY = int(round(coordYFloat + ${centerYString}));\n ${fillSnippet}\n if(coordX >= 0 && coordX < ${imageWidth} && coordY >= 0 && coordY < ${imageHeight}) {\n outputValue = getImage(coords[0], coordY, coordX, coords[3]);\n }\n setOutput(outputValue);\n }\n `;\n }\n}\n//# sourceMappingURL=rotate_gpu.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { RotateWithOffset } from '@tensorflow/tfjs-core';\nimport { RotateProgram } from '../rotate_gpu';\nexport const rotateWithOffsetConfig = {\n kernelName: RotateWithOffset,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { image } = inputs;\n const { radians, fillValue, center } = attrs;\n const webglBackend = backend;\n const program = new RotateProgram(image.shape, radians, fillValue, center);\n const output = webglBackend.runWebGLProgram(program, [image], image.dtype);\n return output;\n }\n};\n//# sourceMappingURL=RotateWithOffset.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sin } from '@tensorflow/tfjs-core';\nimport { CHECK_NAN_SNIPPET_UNARY, unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SIN = CHECK_NAN_SNIPPET_UNARY + `\n return sin(x);\n`;\nexport const sin = unaryKernelFunc(SIN);\nexport const sinConfig = {\n kernelName: Sin,\n backendName: 'webgl',\n kernelFunc: sin,\n};\n//# sourceMappingURL=Sin.js.map", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Square } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARE = `return x * x;`;\nexport const square = unaryKernelFunc(SQUARE);\nexport const squareConfig = {\n kernelName: Square,\n backendName: 'webgl',\n kernelFunc: square,\n};\n//# sourceMappingURL=Square.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { SquaredDifference } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\nexport const squaredDifference = binaryKernelFunc({ opSnippet: SQUARED_DIFFERENCE, packedOpSnippet: SQUARED_DIFFERENCE });\nexport const squaredDifferenceConfig = {\n kernelName: SquaredDifference,\n backendName: 'webgl',\n kernelFunc: squaredDifference,\n};\n//# sourceMappingURL=SquaredDifference.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Sub } from '@tensorflow/tfjs-core';\nimport { binaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nimport { subImplCPU as cpuSub } from '../kernel_utils/shared';\nconst SUB = 'return a - b;';\nexport const subKernelFunc = binaryKernelFunc({\n opSnippet: SUB,\n packedOpSnippet: SUB,\n supportsComplex: true,\n cpuKernelImpl: cpuSub\n});\nexport const subConfig = {\n kernelName: Sub,\n backendName: 'webgl',\n kernelFunc: subKernelFunc\n};\n//# sourceMappingURL=Sub.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Tan } from '@tensorflow/tfjs-core';\nimport { unaryKernelFunc } from '../kernel_utils/kernel_funcs_utils';\nconst TAN = `return tan(x);`;\nexport const tan = unaryKernelFunc(TAN);\nexport const tanConfig = {\n kernelName: Tan,\n backendName: 'webgl',\n kernelFunc: tan,\n};\n//# sourceMappingURL=Tan.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Transpose } from '@tensorflow/tfjs-core';\nimport { transposeImpl } from './Transpose_impl';\nimport { transposeImplCPU as cpuTranspose } from './Transpose_impl';\nexport const transposeConfig = {\n kernelName: Transpose,\n backendName: 'webgl',\n kernelFunc: ({ inputs, attrs, backend }) => {\n const { x } = inputs;\n const { perm } = attrs;\n const webglBackend = backend;\n const xRank = x.shape.length;\n const newShape = new Array(xRank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n let out;\n if (webglBackend.shouldExecuteOnCPU([x])) {\n const xTexData = webglBackend.texData.get(x.dataId);\n const values = xTexData.values;\n const outValues = cpuTranspose(values, x.shape, x.dtype, perm, newShape);\n out = webglBackend.makeTensorInfo(newShape, x.dtype);\n const outData = webglBackend.texData.get(out.dataId);\n outData.values = outValues;\n }\n else {\n out = transposeImpl(x, perm, webglBackend);\n }\n return out;\n }\n};\n//# sourceMappingURL=Transpose.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the License);\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an AS IS BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { Unique } from '@tensorflow/tfjs-core';\nimport { uniqueImplCPU } from '../kernel_utils/shared';\nimport { assertNotComplex } from '../webgl_util';\nexport function unique(args) {\n const { inputs, attrs, backend } = args;\n const { axis } = attrs;\n const { x } = inputs;\n assertNotComplex(x, 'unique');\n // For now, always forward calculation to the CPU backend.\n console.warn('WARNING: ', 'UI might be locked temporarily as data is being downloaded');\n const values = backend.readSync(x.dataId);\n const { outputValues, outputShape, indices } = uniqueImplCPU(values, axis, x.shape, x.dtype);\n return [\n backend.makeTensorInfo(outputShape, x.dtype, outputValues),\n backend.makeTensorInfo([indices.length], 'int32', indices),\n ];\n}\nexport const uniqueConfig = {\n kernelName: Unique,\n backendName: 'webgl',\n kernelFunc: unique,\n};\n//# sourceMappingURL=Unique.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { registerKernel } from '@tensorflow/tfjs-core';\nimport { addConfig } from './kernels/Add';\nimport { atan2Config } from './kernels/Atan2';\nimport { avgPoolConfig } from './kernels/AvgPool';\nimport { avgPoolBackpropConfig } from './kernels/AvgPoolBackprop';\nimport { batchNormConfig } from './kernels/BatchNorm';\nimport { castConfig } from './kernels/Cast';\nimport { complexConfig } from './kernels/Complex';\nimport { concatConfig } from './kernels/Concat';\nimport { cosConfig } from './kernels/Cos';\nimport { divConfig } from './kernels/Div';\nimport { fftConfig } from './kernels/FFT';\nimport { flipLeftRightConfig } from './kernels/FlipLeftRight';\nimport { fromPixelsConfig } from './kernels/FromPixels';\nimport { identityConfig } from './kernels/Identity';\nimport { ifftConfig } from './kernels/IFFT';\nimport { imagConfig } from './kernels/Imag';\nimport { maxConfig } from './kernels/Max';\nimport { maxPoolConfig } from './kernels/MaxPool';\nimport { maxPoolBackpropConfig } from './kernels/MaxPoolBackprop';\nimport { maxPoolWithArgmaxConfig } from './kernels/MaxPoolWithArgmax';\nimport { meanConfig } from './kernels/Mean';\nimport { mirrorPadConfig } from './kernels/MirrorPad';\nimport { multiplyConfig } from './kernels/Multiply';\nimport { nonMaxSuppressionV3Config } from './kernels/NonMaxSuppressionV3';\nimport { nonMaxSuppressionV4Config } from './kernels/NonMaxSuppressionV4';\nimport { nonMaxSuppressionV5Config } from './kernels/NonMaxSuppressionV5';\nimport { notEqualConfig } from './kernels/NotEqual';\nimport { realConfig } from './kernels/Real';\nimport { reshapeConfig } from './kernels/Reshape';\nimport { rotateWithOffsetConfig } from './kernels/RotateWithOffset';\nimport { sinConfig } from './kernels/Sin';\nimport { squareConfig } from './kernels/Square';\nimport { squaredDifferenceConfig } from './kernels/SquaredDifference';\nimport { subConfig } from './kernels/Sub';\nimport { tanConfig } from './kernels/Tan';\nimport { transposeConfig } from './kernels/Transpose';\nimport { uniqueConfig } from './kernels/Unique';\n// List all kernel configs here\nconst kernelConfigs = [\n addConfig,\n atan2Config,\n avgPoolConfig,\n avgPoolBackpropConfig,\n batchNormConfig,\n castConfig,\n complexConfig,\n concatConfig,\n cosConfig,\n divConfig,\n fftConfig,\n flipLeftRightConfig,\n fromPixelsConfig,\n identityConfig,\n ifftConfig,\n imagConfig,\n maxConfig,\n maxPoolConfig,\n maxPoolBackpropConfig,\n maxPoolWithArgmaxConfig,\n meanConfig,\n mirrorPadConfig,\n multiplyConfig,\n nonMaxSuppressionV3Config,\n nonMaxSuppressionV4Config,\n nonMaxSuppressionV5Config,\n notEqualConfig,\n realConfig,\n reshapeConfig,\n rotateWithOffsetConfig,\n sinConfig,\n squareConfig,\n subConfig,\n squaredDifferenceConfig,\n tanConfig,\n transposeConfig,\n uniqueConfig,\n];\nfor (const kernelConfig of kernelConfigs) {\n registerKernel(kernelConfig);\n}\n//# sourceMappingURL=register_all_kernels.js.map", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// All exports from this package should be in base.\nexport * from './base';\nimport './register_all_kernels';\n//# sourceMappingURL=index.js.map", "/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '2.7.0';\nexport {version};\n", "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport * from '@tensorflow/tfjs-core';\nexport * from '@tensorflow/tfjs-layers';\nexport * from '@tensorflow/tfjs-converter';\n\n// Export data api as tf.data\nimport * as data from '@tensorflow/tfjs-data';\nexport {data};\n\n// Import and register backends.\nimport '@tensorflow/tfjs-backend-cpu';\nimport '@tensorflow/tfjs-backend-webgl';\n\n// Import versions of all sub-packages.\nimport {version_core} from '@tensorflow/tfjs-core';\nimport {version_cpu} from '@tensorflow/tfjs-backend-cpu';\nimport {version_webgl} from '@tensorflow/tfjs-backend-webgl';\nimport {version_data} from '@tensorflow/tfjs-data';\nimport {version_layers} from '@tensorflow/tfjs-layers';\nimport {version_converter} from '@tensorflow/tfjs-converter';\nimport {version as version_union} from './version';\n\nexport const version = {\n 'tfjs-core': version_core,\n 'tfjs-backend-cpu': version_cpu,\n 'tfjs-backend-webgl': version_webgl,\n 'tfjs-data': version_data,\n 'tfjs-layers': version_layers,\n 'tfjs-converter': version_converter,\n 'tfjs': version_union\n};\n", "", "", "", "\nvar WasmBackendModuleThreadedSimd = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModuleThreadedSimd) {\n WasmBackendModuleThreadedSimd = WasmBackendModuleThreadedSimd || {};\n\nfunction GROWABLE_HEAP_I8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP8}function GROWABLE_HEAP_U8(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU8}function GROWABLE_HEAP_I32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAP32}function GROWABLE_HEAP_U32(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPU32}function GROWABLE_HEAP_F64(){if(wasmMemory.buffer!=buffer){updateGlobalBufferAndViews(wasmMemory.buffer)}return HEAPF64}var Module=typeof WasmBackendModuleThreadedSimd!==\"undefined\"?WasmBackendModuleThreadedSimd:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var ENVIRONMENT_IS_PTHREAD=Module[\"ENVIRONMENT_IS_PTHREAD\"]||false;if(ENVIRONMENT_IS_PTHREAD){buffer=Module[\"buffer\"];DYNAMIC_BASE=Module[\"DYNAMIC_BASE\"];DYNAMICTOP_PTR=Module[\"DYNAMICTOP_PTR\"]}var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"};var nodeWorkerThreads;try{nodeWorkerThreads=require(\"worker_threads\")}catch(e){console.error('The \"worker_threads\" module is not supported in this node.js build - perhaps a newer version is needed?');throw e}Worker=nodeWorkerThreads.Worker}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}if(ENVIRONMENT_IS_NODE){read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret}}else{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}if(ENVIRONMENT_IS_NODE){if(typeof performance===\"undefined\"){performance=require(\"perf_hooks\").performance}}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var Atomics_load=Atomics.load;var Atomics_store=Atomics.store;var Atomics_compareExchange=Atomics.compareExchange;var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":165,\"maximum\":165+0,\"element\":\"anyfunc\"});var wasmModule;var threadInfoStruct=0;var selfThreadId=0;var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx)){var u0=heap[idx++];if(!u0)return str;if(!(u0&128)){str+=String.fromCharCode(u0);continue}var u1=heap[idx++]&63;if((u0&224)==192){str+=String.fromCharCode((u0&31)<<6|u1);continue}var u2=heap[idx++]&63;if((u0&240)==224){u0=(u0&15)<<12|u1<<6|u2}else{u0=(u0&7)<<18|u1<<12|u2<<6|heap[idx++]&63}if(u0<65536){str+=String.fromCharCode(u0)}else{var ch=u0-65536;str+=String.fromCharCode(55296|ch>>10,56320|ch&1023)}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(GROWABLE_HEAP_U8(),ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,GROWABLE_HEAP_U8(),outPtr,maxBytesToWrite)}function lengthBytesUTF8(str){var len=0;for(var i=0;i=55296&&u<=57343)u=65536+((u&1023)<<10)|str.charCodeAt(++i)&1023;if(u<=127)++len;else if(u<=2047)len+=2;else if(u<=65535)len+=3;else len+=4}return len}function writeArrayToMemory(array,buffer){GROWABLE_HEAP_I8().set(array,buffer)}var WASM_PAGE_SIZE=65536;function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var STACK_BASE=5256384,STACKTOP=STACK_BASE,STACK_MAX=13504,DYNAMIC_BASE=5256384,DYNAMICTOP_PTR=12576;if(ENVIRONMENT_IS_PTHREAD){}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;if(ENVIRONMENT_IS_PTHREAD){wasmMemory=Module[\"wasmMemory\"];buffer=Module[\"buffer\"]}else{if(Module[\"wasmMemory\"]){wasmMemory=Module[\"wasmMemory\"]}else{wasmMemory=new WebAssembly.Memory({\"initial\":INITIAL_INITIAL_MEMORY/WASM_PAGE_SIZE,\"maximum\":2147483648/WASM_PAGE_SIZE,\"shared\":true});if(!(wasmMemory.buffer instanceof SharedArrayBuffer)){err(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\");if(ENVIRONMENT_IS_NODE){console.log(\"(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and also use a recent version)\")}throw Error(\"bad memory\")}}}if(wasmMemory){buffer=wasmMemory.buffer}INITIAL_INITIAL_MEMORY=buffer.byteLength;updateGlobalBufferAndViews(buffer);if(!ENVIRONMENT_IS_PTHREAD){GROWABLE_HEAP_I32()[DYNAMICTOP_PTR>>2]=DYNAMIC_BASE}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATEXIT__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;if(ENVIRONMENT_IS_PTHREAD)runtimeInitialized=true;function preRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){if(ENVIRONMENT_IS_PTHREAD)return;callRuntimeCallbacks(__ATMAIN__)}function postRun(){if(ENVIRONMENT_IS_PTHREAD)return;if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){assert(!ENVIRONMENT_IS_PTHREAD,\"addRunDependency cannot be used in a pthread worker\");runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}if(ENVIRONMENT_IS_PTHREAD)console.error(\"Pthread aborting at \"+(new Error).stack);what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm-threaded-simd.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"a\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmModule=module;if(!ENVIRONMENT_IS_PTHREAD){var numWorkersToLoad=PThread.unusedWorkers.length;PThread.unusedWorkers.forEach(function(w){PThread.loadWasmModuleToWorker(w,function(){if(!--numWorkersToLoad)removeRunDependency(\"wasm-instantiate\")})})}}if(!ENVIRONMENT_IS_PTHREAD){addRunDependency(\"wasm-instantiate\")}function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"],output[\"module\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}var ASM_CONSTS={};function initPthreadsJS(){PThread.initRuntime()}if(!ENVIRONMENT_IS_PTHREAD)__ATINIT__.push({func:function(){___wasm_call_ctors()}});var __pthread_ptr=0;var __pthread_is_main_runtime_thread=0;var __pthread_is_main_browser_thread=0;function __register_pthread_ptr(pthreadPtr,isMainBrowserThread,isMainRuntimeThread){pthreadPtr=pthreadPtr|0;isMainBrowserThread=isMainBrowserThread|0;isMainRuntimeThread=isMainRuntimeThread|0;__pthread_ptr=pthreadPtr;__pthread_is_main_browser_thread=isMainBrowserThread;__pthread_is_main_runtime_thread=isMainRuntimeThread}Module[\"__register_pthread_ptr\"]=__register_pthread_ptr;var ERRNO_CODES={EPERM:63,ENOENT:44,ESRCH:71,EINTR:27,EIO:29,ENXIO:60,E2BIG:1,ENOEXEC:45,EBADF:8,ECHILD:12,EAGAIN:6,EWOULDBLOCK:6,ENOMEM:48,EACCES:2,EFAULT:21,ENOTBLK:105,EBUSY:10,EEXIST:20,EXDEV:75,ENODEV:43,ENOTDIR:54,EISDIR:31,EINVAL:28,ENFILE:41,EMFILE:33,ENOTTY:59,ETXTBSY:74,EFBIG:22,ENOSPC:51,ESPIPE:70,EROFS:69,EMLINK:34,EPIPE:64,EDOM:18,ERANGE:68,ENOMSG:49,EIDRM:24,ECHRNG:106,EL2NSYNC:156,EL3HLT:107,EL3RST:108,ELNRNG:109,EUNATCH:110,ENOCSI:111,EL2HLT:112,EDEADLK:16,ENOLCK:46,EBADE:113,EBADR:114,EXFULL:115,ENOANO:104,EBADRQC:103,EBADSLT:102,EDEADLOCK:16,EBFONT:101,ENOSTR:100,ENODATA:116,ETIME:117,ENOSR:118,ENONET:119,ENOPKG:120,EREMOTE:121,ENOLINK:47,EADV:122,ESRMNT:123,ECOMM:124,EPROTO:65,EMULTIHOP:36,EDOTDOT:125,EBADMSG:9,ENOTUNIQ:126,EBADFD:127,EREMCHG:128,ELIBACC:129,ELIBBAD:130,ELIBSCN:131,ELIBMAX:132,ELIBEXEC:133,ENOSYS:52,ENOTEMPTY:55,ENAMETOOLONG:37,ELOOP:32,EOPNOTSUPP:138,EPFNOSUPPORT:139,ECONNRESET:15,ENOBUFS:42,EAFNOSUPPORT:5,EPROTOTYPE:67,ENOTSOCK:57,ENOPROTOOPT:50,ESHUTDOWN:140,ECONNREFUSED:14,EADDRINUSE:3,ECONNABORTED:13,ENETUNREACH:40,ENETDOWN:38,ETIMEDOUT:73,EHOSTDOWN:142,EHOSTUNREACH:23,EINPROGRESS:26,EALREADY:7,EDESTADDRREQ:17,EMSGSIZE:35,EPROTONOSUPPORT:66,ESOCKTNOSUPPORT:137,EADDRNOTAVAIL:4,ENETRESET:39,EISCONN:30,ENOTCONN:53,ETOOMANYREFS:141,EUSERS:136,EDQUOT:19,ESTALE:72,ENOTSUP:138,ENOMEDIUM:148,EILSEQ:25,EOVERFLOW:61,ECANCELED:11,ENOTRECOVERABLE:56,EOWNERDEAD:62,ESTRPIPE:135};var __main_thread_futex_wait_address=13488;function _emscripten_futex_wake(addr,count){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0||count<0)return-28;if(count==0)return 0;if(count>=2147483647)count=Infinity;var mainThreadWaitAddress=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2);var mainThreadWoken=0;if(mainThreadWaitAddress==addr){var loadedAddr=Atomics.compareExchange(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,mainThreadWaitAddress,0);if(loadedAddr==mainThreadWaitAddress){--count;mainThreadWoken=1;if(count<=0)return 1}}var ret=Atomics.notify(GROWABLE_HEAP_I32(),addr>>2,count);if(ret>=0)return ret+mainThreadWoken;throw\"Atomics.notify returned an unexpected value \"+ret}Module[\"_emscripten_futex_wake\"]=_emscripten_futex_wake;function __kill_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _kill_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _kill_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];pthread.worker.terminate();PThread.freeThreadData(pthread);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(pthread.worker),1);pthread.worker.pthread=undefined}function __cancel_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cancel_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cancel_thread!\";var pthread=PThread.pthreads[pthread_ptr];pthread.worker.postMessage({\"cmd\":\"cancel\"})}function __cleanup_thread(pthread_ptr){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _cleanup_thread() can only ever be called from main application thread!\";if(!pthread_ptr)throw\"Internal Error! Null pthread_ptr in _cleanup_thread!\";GROWABLE_HEAP_I32()[pthread_ptr+12>>2]=0;var pthread=PThread.pthreads[pthread_ptr];if(pthread){var worker=pthread.worker;PThread.returnWorkerToPool(worker)}}var PThread={MAIN_THREAD_ID:1,mainThreadInfo:{schedPolicy:0,schedPrio:0},unusedWorkers:[],runningWorkers:[],initRuntime:function(){__register_pthread_ptr(PThread.mainThreadBlock,!ENVIRONMENT_IS_WORKER,1);_emscripten_register_main_browser_thread_id(PThread.mainThreadBlock)},initMainThreadBlock:function(){var pthreadPoolSize=8;for(var i=0;i>2]=PThread.mainThreadBlock;var headPtr=PThread.mainThreadBlock+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var tlsMemory=12976;for(var i=0;i<128;++i)GROWABLE_HEAP_U32()[tlsMemory/4+i]=0;Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+104>>2,tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+40>>2,PThread.mainThreadBlock);Atomics.store(GROWABLE_HEAP_U32(),PThread.mainThreadBlock+44>>2,42)},initWorker:function(){},pthreads:{},exitHandlers:null,setThreadStatus:function(){},runExitHandlers:function(){if(PThread.exitHandlers!==null){while(PThread.exitHandlers.length>0){PThread.exitHandlers.pop()()}PThread.exitHandlers=null}if(ENVIRONMENT_IS_PTHREAD&&threadInfoStruct)___pthread_tsd_run_dtors()},threadExit:function(exitCode){var tb=_pthread_self();if(tb){Atomics.store(GROWABLE_HEAP_U32(),tb+4>>2,exitCode);Atomics.store(GROWABLE_HEAP_U32(),tb+0>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+60>>2,1);Atomics.store(GROWABLE_HEAP_U32(),tb+64>>2,0);PThread.runExitHandlers();_emscripten_futex_wake(tb+0,2147483647);__register_pthread_ptr(0,0,0);threadInfoStruct=0;if(ENVIRONMENT_IS_PTHREAD){postMessage({\"cmd\":\"exit\"})}}},threadCancel:function(){PThread.runExitHandlers();Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+4>>2,-1);Atomics.store(GROWABLE_HEAP_U32(),threadInfoStruct+0>>2,1);_emscripten_futex_wake(threadInfoStruct+0,2147483647);threadInfoStruct=selfThreadId=0;__register_pthread_ptr(0,0,0);postMessage({\"cmd\":\"cancelDone\"})},terminateAllThreads:function(){for(var t in PThread.pthreads){var pthread=PThread.pthreads[t];if(pthread&&pthread.worker){PThread.returnWorkerToPool(pthread.worker)}}PThread.pthreads={};for(var i=0;i>2];GROWABLE_HEAP_I32()[pthread.threadInfoStruct+104>>2]=0;_free(tlsMemory);_free(pthread.threadInfoStruct)}pthread.threadInfoStruct=0;if(pthread.allocatedOwnStack&&pthread.stackBase)_free(pthread.stackBase);pthread.stackBase=0;if(pthread.worker)pthread.worker.pthread=null},returnWorkerToPool:function(worker){delete PThread.pthreads[worker.pthread.thread];PThread.unusedWorkers.push(worker);PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(worker),1);PThread.freeThreadData(worker.pthread);worker.pthread=undefined},receiveObjectTransfer:function(data){},loadWasmModuleToWorker:function(worker,onFinishedLoading){worker.onmessage=function(e){var d=e[\"data\"];var cmd=d[\"cmd\"];if(worker.pthread)PThread.currentProxiedOperationCallerThread=worker.pthread.threadInfoStruct;if(d[\"targetThread\"]&&d[\"targetThread\"]!=_pthread_self()){var thread=PThread.pthreads[d.targetThread];if(thread){thread.worker.postMessage(e.data,d[\"transferList\"])}else{console.error('Internal error! Worker sent a message \"'+cmd+'\" to target pthread '+d[\"targetThread\"]+\", but that thread no longer exists!\")}PThread.currentProxiedOperationCallerThread=undefined;return}if(cmd===\"processQueuedMainThreadWork\"){_emscripten_main_thread_process_queued_calls()}else if(cmd===\"spawnThread\"){__spawn_thread(e.data)}else if(cmd===\"cleanupThread\"){__cleanup_thread(d[\"thread\"])}else if(cmd===\"killThread\"){__kill_thread(d[\"thread\"])}else if(cmd===\"cancelThread\"){__cancel_thread(d[\"thread\"])}else if(cmd===\"loaded\"){worker.loaded=true;if(onFinishedLoading)onFinishedLoading(worker);if(worker.runPthread){worker.runPthread();delete worker.runPthread}}else if(cmd===\"print\"){out(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"printErr\"){err(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"alert\"){alert(\"Thread \"+d[\"threadId\"]+\": \"+d[\"text\"])}else if(cmd===\"exit\"){var detached=worker.pthread&&Atomics.load(GROWABLE_HEAP_U32(),worker.pthread.thread+68>>2);if(detached){PThread.returnWorkerToPool(worker)}}else if(cmd===\"cancelDone\"){PThread.returnWorkerToPool(worker)}else if(cmd===\"objectTransfer\"){PThread.receiveObjectTransfer(e.data)}else if(e.data.target===\"setimmediate\"){worker.postMessage(e.data)}else{err(\"worker sent an unknown command \"+cmd)}PThread.currentProxiedOperationCallerThread=undefined};worker.onerror=function(e){err(\"pthread sent an error! \"+e.filename+\":\"+e.lineno+\": \"+e.message)};if(ENVIRONMENT_IS_NODE){worker.on(\"message\",function(data){worker.onmessage({data:data})});worker.on(\"error\",function(data){worker.onerror(data)});worker.on(\"exit\",function(data){console.log(\"worker exited - TODO: update the worker queue?\")})}worker.postMessage({\"cmd\":\"load\",\"urlOrBlob\":Module[\"mainScriptUrlOrBlob\"]||_scriptDir,\"wasmMemory\":wasmMemory,\"wasmModule\":wasmModule,\"DYNAMIC_BASE\":DYNAMIC_BASE,\"DYNAMICTOP_PTR\":DYNAMICTOP_PTR})},allocateUnusedWorker:function(){var pthreadMainJs=locateFile(\"tfjs-backend-wasm-threaded-simd.worker.js\");PThread.unusedWorkers.push(new Worker(pthreadMainJs))},getNewWorker:function(){if(PThread.unusedWorkers.length==0){PThread.allocateUnusedWorker();PThread.loadWasmModuleToWorker(PThread.unusedWorkers[0])}if(PThread.unusedWorkers.length>0)return PThread.unusedWorkers.pop();else return null},busySpinWait:function(msecs){var t=performance.now()+msecs;while(performance.now()>2]=value;return value}function _atexit(func,arg){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(1,1,func,arg);__ATEXIT__.unshift({func:func,arg:arg})}function __emscripten_notify_thread_queue(targetThreadId,mainThreadId){if(targetThreadId==mainThreadId){postMessage({\"cmd\":\"processQueuedMainThreadWork\"})}else if(ENVIRONMENT_IS_PTHREAD){postMessage({\"targetThread\":targetThreadId,\"cmd\":\"processThreadQueue\"})}else{var pthread=PThread.pthreads[targetThreadId];var worker=pthread&&pthread.worker;if(!worker){return}worker.postMessage({\"cmd\":\"processThreadQueue\"})}return 1}function _abort(){abort()}function _emscripten_conditional_set_current_thread_status(expectedStatus,newStatus){expectedStatus=expectedStatus|0;newStatus=newStatus|0}function _emscripten_futex_wait(addr,val,timeout){if(addr<=0||addr>GROWABLE_HEAP_I8().length||addr&3!=0)return-28;if(ENVIRONMENT_IS_WORKER){var ret=Atomics.wait(GROWABLE_HEAP_I32(),addr>>2,val,timeout);if(ret===\"timed-out\")return-73;if(ret===\"not-equal\")return-6;if(ret===\"ok\")return 0;throw\"Atomics.wait returned an unexpected value \"+ret}else{var loadedVal=Atomics.load(GROWABLE_HEAP_I32(),addr>>2);if(val!=loadedVal)return-6;var tNow=performance.now();var tEnd=tNow+timeout;Atomics.store(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2,addr);var ourWaitAddress=addr;while(addr==ourWaitAddress){tNow=performance.now();if(tNow>tEnd){return-73}_emscripten_main_thread_process_queued_calls();addr=Atomics.load(GROWABLE_HEAP_I32(),__main_thread_futex_wait_address>>2)}return 0}}function _emscripten_is_main_browser_thread(){return __pthread_is_main_browser_thread|0}function _emscripten_is_main_runtime_thread(){return __pthread_is_main_runtime_thread|0}function _emscripten_memcpy_big(dest,src,num){GROWABLE_HEAP_U8().copyWithin(dest,src,src+num)}function _emscripten_num_logical_cores(){return navigator[\"hardwareConcurrency\"]}function _emscripten_proxy_to_main_thread_js(index,sync){var numCallArgs=arguments.length-2;var stack=stackSave();var args=stackAlloc(numCallArgs*8);var b=args>>3;for(var i=0;i>3]);buf+=8}else{buf=buf+3&~3;args.push(GROWABLE_HEAP_I32()[buf>>2]);buf+=4}}return args}function _emscripten_receive_on_main_thread_js(index,numCallArgs,args){_emscripten_receive_on_main_thread_js_callArgs.length=numCallArgs;var b=args>>3;for(var i=0;i>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){requestedSize=requestedSize>>>0;var oldSize=_emscripten_get_heap_size();if(requestedSize<=oldSize){return false}var PAGE_MULTIPLE=65536;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}var minHeapSize=16777216;for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(minHeapSize,requestedSize,overGrownHeapSize),PAGE_MULTIPLE));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}var JSEvents={keyEvent:0,mouseEvent:0,wheelEvent:0,uiEvent:0,focusEvent:0,deviceOrientationEvent:0,deviceMotionEvent:0,fullscreenChangeEvent:0,pointerlockChangeEvent:0,visibilityChangeEvent:0,touchEvent:0,previousFullscreenElement:null,previousScreenX:null,previousScreenY:null,removeEventListenersRegistered:false,removeAllEventListeners:function(){for(var i=JSEvents.eventHandlers.length-1;i>=0;--i){JSEvents._removeHandler(i)}JSEvents.eventHandlers=[];JSEvents.deferredCalls=[]},registerRemoveEventListeners:function(){if(!JSEvents.removeEventListenersRegistered){__ATEXIT__.push(JSEvents.removeAllEventListeners);JSEvents.removeEventListenersRegistered=true}},deferredCalls:[],deferCall:function(targetFunction,precedence,argsList){function arraysHaveEqualContent(arrA,arrB){if(arrA.length!=arrB.length)return false;for(var i in arrA){if(arrA[i]!=arrB[i])return false}return true}for(var i in JSEvents.deferredCalls){var call=JSEvents.deferredCalls[i];if(call.targetFunction==targetFunction&&arraysHaveEqualContent(call.argsList,argsList)){return}}JSEvents.deferredCalls.push({targetFunction:targetFunction,precedence:precedence,argsList:argsList});JSEvents.deferredCalls.sort(function(x,y){return x.precedence>2]=eventTypeId;GROWABLE_HEAP_I32()[varargs+4>>2]=eventData;GROWABLE_HEAP_I32()[varargs+8>>2]=userData;_emscripten_async_queue_on_thread_(targetThread,637534208,eventHandlerFunc,eventData,varargs);stackRestore(stackTop)},getTargetThreadForEventCallback:function(targetThread){switch(targetThread){case 1:return 0;case 2:return PThread.currentProxiedOperationCallerThread;default:return targetThread}},getNodeNameForTarget:function(target){if(!target)return\"\";if(target==window)return\"#window\";if(target==screen)return\"#screen\";return target&&target.nodeName?target.nodeName:\"\"},fullscreenEnabled:function(){return document.fullscreenEnabled||document.webkitFullscreenEnabled}};function stringToNewUTF8(jsString){var length=lengthBytesUTF8(jsString)+1;var cString=_malloc(length);stringToUTF8(jsString,cString,length);return cString}function _emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height){var stackTop=stackSave();var varargs=stackAlloc(12);var targetCanvasPtr=0;if(targetCanvas){targetCanvasPtr=stringToNewUTF8(targetCanvas)}GROWABLE_HEAP_I32()[varargs>>2]=targetCanvasPtr;GROWABLE_HEAP_I32()[varargs+4>>2]=width;GROWABLE_HEAP_I32()[varargs+8>>2]=height;_emscripten_async_queue_on_thread_(targetThread,657457152,0,targetCanvasPtr,varargs);stackRestore(stackTop)}function _emscripten_set_offscreencanvas_size_on_target_thread(targetThread,targetCanvas,width,height){targetCanvas=targetCanvas?UTF8ToString(targetCanvas):\"\";_emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread,targetCanvas,width,height)}function __maybeCStringToJsString(cString){return cString>2?UTF8ToString(cString):cString}var specialHTMLTargets=[0,typeof document!==\"undefined\"?document:0,typeof window!==\"undefined\"?window:0];function __findEventTarget(target){target=__maybeCStringToJsString(target);var domElement=specialHTMLTargets[target]||(typeof document!==\"undefined\"?document.querySelector(target):undefined);return domElement}function __findCanvasEventTarget(target){return __findEventTarget(target)}function _emscripten_set_canvas_element_size_calling_thread(target,width,height){var canvas=__findCanvasEventTarget(target);if(!canvas)return-4;if(canvas.canvasSharedPtr){GROWABLE_HEAP_I32()[canvas.canvasSharedPtr>>2]=width;GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+4>>2]=height}if(canvas.offscreenCanvas||!canvas.controlTransferredOffscreen){if(canvas.offscreenCanvas)canvas=canvas.offscreenCanvas;var autoResizeViewport=false;if(canvas.GLctxObject&&canvas.GLctxObject.GLctx){var prevViewport=canvas.GLctxObject.GLctx.getParameter(2978);autoResizeViewport=prevViewport[0]===0&&prevViewport[1]===0&&prevViewport[2]===canvas.width&&prevViewport[3]===canvas.height}canvas.width=width;canvas.height=height;if(autoResizeViewport){canvas.GLctxObject.GLctx.viewport(0,0,width,height)}}else if(canvas.canvasSharedPtr){var targetThread=GROWABLE_HEAP_I32()[canvas.canvasSharedPtr+8>>2];_emscripten_set_offscreencanvas_size_on_target_thread(targetThread,target,width,height);return 1}else{return-4}return 0}function _emscripten_set_canvas_element_size_main_thread(target,width,height){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(2,1,target,width,height);return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}function _emscripten_set_canvas_element_size(target,width,height){var canvas=__findCanvasEventTarget(target);if(canvas){return _emscripten_set_canvas_element_size_calling_thread(target,width,height)}else{return _emscripten_set_canvas_element_size_main_thread(target,width,height)}}function _emscripten_set_current_thread_status(newStatus){newStatus=newStatus|0}function _emscripten_set_thread_name(threadId,name){threadId=threadId|0;name=name|0}function __webgl_enable_ANGLE_instanced_arrays(ctx){var ext=ctx.getExtension(\"ANGLE_instanced_arrays\");if(ext){ctx[\"vertexAttribDivisor\"]=function(index,divisor){ext[\"vertexAttribDivisorANGLE\"](index,divisor)};ctx[\"drawArraysInstanced\"]=function(mode,first,count,primcount){ext[\"drawArraysInstancedANGLE\"](mode,first,count,primcount)};ctx[\"drawElementsInstanced\"]=function(mode,count,type,indices,primcount){ext[\"drawElementsInstancedANGLE\"](mode,count,type,indices,primcount)};return 1}}function __webgl_enable_OES_vertex_array_object(ctx){var ext=ctx.getExtension(\"OES_vertex_array_object\");if(ext){ctx[\"createVertexArray\"]=function(){return ext[\"createVertexArrayOES\"]()};ctx[\"deleteVertexArray\"]=function(vao){ext[\"deleteVertexArrayOES\"](vao)};ctx[\"bindVertexArray\"]=function(vao){ext[\"bindVertexArrayOES\"](vao)};ctx[\"isVertexArray\"]=function(vao){return ext[\"isVertexArrayOES\"](vao)};return 1}}function __webgl_enable_WEBGL_draw_buffers(ctx){var ext=ctx.getExtension(\"WEBGL_draw_buffers\");if(ext){ctx[\"drawBuffers\"]=function(n,bufs){ext[\"drawBuffersWEBGL\"](n,bufs)};return 1}}var GL={counter:1,lastError:0,buffers:[],mappedBuffers:{},programs:[],framebuffers:[],renderbuffers:[],textures:[],uniforms:[],shaders:[],vaos:[],contexts:{},currentContext:null,offscreenCanvases:{},timerQueriesEXT:[],programInfos:{},stringCache:{},unpackAlignment:4,init:function(){var miniTempFloatBuffer=new Float32Array(GL.MINI_TEMP_BUFFER_SIZE);for(var i=0;i>2]:-1;source+=UTF8ToString(GROWABLE_HEAP_I32()[string+i*4>>2],len<0?undefined:len)}return source},createContext:function(canvas,webGLContextAttributes){var ctx=canvas.getContext(\"webgl\",webGLContextAttributes);if(!ctx)return 0;var handle=GL.registerContext(ctx,webGLContextAttributes);return handle},registerContext:function(ctx,webGLContextAttributes){var handle=_malloc(8);GROWABLE_HEAP_I32()[handle+4>>2]=_pthread_self();var context={handle:handle,attributes:webGLContextAttributes,version:webGLContextAttributes.majorVersion,GLctx:ctx};if(ctx.canvas)ctx.canvas.GLctxObject=context;GL.contexts[handle]=context;if(typeof webGLContextAttributes.enableExtensionsByDefault===\"undefined\"||webGLContextAttributes.enableExtensionsByDefault){GL.initExtensions(context)}return handle},makeContextCurrent:function(contextHandle){GL.currentContext=GL.contexts[contextHandle];Module.ctx=GLctx=GL.currentContext&&GL.currentContext.GLctx;return!(contextHandle&&!GLctx)},getContext:function(contextHandle){return GL.contexts[contextHandle]},deleteContext:function(contextHandle){if(GL.currentContext===GL.contexts[contextHandle])GL.currentContext=null;if(typeof JSEvents===\"object\")JSEvents.removeAllHandlersOnTarget(GL.contexts[contextHandle].GLctx.canvas);if(GL.contexts[contextHandle]&&GL.contexts[contextHandle].GLctx.canvas)GL.contexts[contextHandle].GLctx.canvas.GLctxObject=undefined;_free(GL.contexts[contextHandle].handle);GL.contexts[contextHandle]=null},initExtensions:function(context){if(!context)context=GL.currentContext;if(context.initExtensionsDone)return;context.initExtensionsDone=true;var GLctx=context.GLctx;__webgl_enable_ANGLE_instanced_arrays(GLctx);__webgl_enable_OES_vertex_array_object(GLctx);__webgl_enable_WEBGL_draw_buffers(GLctx);GLctx.disjointTimerQueryExt=GLctx.getExtension(\"EXT_disjoint_timer_query\");var automaticallyEnabledExtensions=[\"OES_texture_float\",\"OES_texture_half_float\",\"OES_standard_derivatives\",\"OES_vertex_array_object\",\"WEBGL_compressed_texture_s3tc\",\"WEBGL_depth_texture\",\"OES_element_index_uint\",\"EXT_texture_filter_anisotropic\",\"EXT_frag_depth\",\"WEBGL_draw_buffers\",\"ANGLE_instanced_arrays\",\"OES_texture_float_linear\",\"OES_texture_half_float_linear\",\"EXT_blend_minmax\",\"EXT_shader_texture_lod\",\"EXT_texture_norm16\",\"WEBGL_compressed_texture_pvrtc\",\"EXT_color_buffer_half_float\",\"WEBGL_color_buffer_float\",\"EXT_sRGB\",\"WEBGL_compressed_texture_etc1\",\"EXT_disjoint_timer_query\",\"WEBGL_compressed_texture_etc\",\"WEBGL_compressed_texture_astc\",\"EXT_color_buffer_float\",\"WEBGL_compressed_texture_s3tc_srgb\",\"EXT_disjoint_timer_query_webgl2\",\"WEBKIT_WEBGL_compressed_texture_pvrtc\"];var exts=GLctx.getSupportedExtensions()||[];exts.forEach(function(ext){if(automaticallyEnabledExtensions.indexOf(ext)!=-1){GLctx.getExtension(ext)}})},populateUniformTable:function(program){var p=GL.programs[program];var ptable=GL.programInfos[program]={uniforms:{},maxUniformLength:0,maxAttributeLength:-1,maxUniformBlockNameLength:-1};var utable=ptable.uniforms;var numUniforms=GLctx.getProgramParameter(p,35718);for(var i=0;i>2;contextAttributes[\"alpha\"]=!!GROWABLE_HEAP_I32()[a+(0>>2)];contextAttributes[\"depth\"]=!!GROWABLE_HEAP_I32()[a+(4>>2)];contextAttributes[\"stencil\"]=!!GROWABLE_HEAP_I32()[a+(8>>2)];contextAttributes[\"antialias\"]=!!GROWABLE_HEAP_I32()[a+(12>>2)];contextAttributes[\"premultipliedAlpha\"]=!!GROWABLE_HEAP_I32()[a+(16>>2)];contextAttributes[\"preserveDrawingBuffer\"]=!!GROWABLE_HEAP_I32()[a+(20>>2)];var powerPreference=GROWABLE_HEAP_I32()[a+(24>>2)];contextAttributes[\"powerPreference\"]=__emscripten_webgl_power_preferences[powerPreference];contextAttributes[\"failIfMajorPerformanceCaveat\"]=!!GROWABLE_HEAP_I32()[a+(28>>2)];contextAttributes.majorVersion=GROWABLE_HEAP_I32()[a+(32>>2)];contextAttributes.minorVersion=GROWABLE_HEAP_I32()[a+(36>>2)];contextAttributes.enableExtensionsByDefault=GROWABLE_HEAP_I32()[a+(40>>2)];contextAttributes.explicitSwapControl=GROWABLE_HEAP_I32()[a+(44>>2)];contextAttributes.proxyContextToMainThread=GROWABLE_HEAP_I32()[a+(48>>2)];contextAttributes.renderViaOffscreenBackBuffer=GROWABLE_HEAP_I32()[a+(52>>2)];var canvas=__findCanvasEventTarget(target);if(!canvas){return-4}if(contextAttributes.explicitSwapControl){return-1}var contextHandle=GL.createContext(canvas,contextAttributes);return contextHandle}function _emscripten_webgl_create_context(a0,a1){return _emscripten_webgl_do_create_context(a0,a1)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=GROWABLE_HEAP_I32()[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(3,1,fd);return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(4,1,fd,offset_low,offset_high,whence,newOffset)}function _fd_write(fd,iov,iovcnt,pnum){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(5,1,fd,iov,iovcnt,pnum);var num=0;for(var i=0;i>2];var len=GROWABLE_HEAP_I32()[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _pthread_cleanup_pop(execute){var routine=PThread.exitHandlers.pop();if(execute)routine()}function _pthread_cleanup_push(routine,arg){if(PThread.exitHandlers===null){PThread.exitHandlers=[]}PThread.exitHandlers.push(function(){dynCall_vi(routine,arg)})}function __spawn_thread(threadParams){if(ENVIRONMENT_IS_PTHREAD)throw\"Internal Error! _spawn_thread() can only ever be called from main application thread!\";var worker=PThread.getNewWorker();if(worker.pthread!==undefined)throw\"Internal error!\";if(!threadParams.pthread_ptr)throw\"Internal error, no pthread ptr!\";PThread.runningWorkers.push(worker);var tlsMemory=_malloc(128*4);for(var i=0;i<128;++i){GROWABLE_HEAP_I32()[tlsMemory+i*4>>2]=0}var stackHigh=threadParams.stackBase+threadParams.stackSize;var pthread=PThread.pthreads[threadParams.pthread_ptr]={worker:worker,stackBase:threadParams.stackBase,stackSize:threadParams.stackSize,allocatedOwnStack:threadParams.allocatedOwnStack,thread:threadParams.pthread_ptr,threadInfoStruct:threadParams.pthread_ptr};var tis=pthread.threadInfoStruct>>2;Atomics.store(GROWABLE_HEAP_U32(),tis+(0>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(4>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(8>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(68>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(104>>2),tlsMemory);Atomics.store(GROWABLE_HEAP_U32(),tis+(48>>2),0);Atomics.store(GROWABLE_HEAP_U32(),tis+(40>>2),pthread.threadInfoStruct);Atomics.store(GROWABLE_HEAP_U32(),tis+(44>>2),42);Atomics.store(GROWABLE_HEAP_U32(),tis+(108>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(84>>2),threadParams.stackSize);Atomics.store(GROWABLE_HEAP_U32(),tis+(80>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+8>>2),stackHigh);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+12>>2),threadParams.detached);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+20>>2),threadParams.schedPolicy);Atomics.store(GROWABLE_HEAP_U32(),tis+(108+24>>2),threadParams.schedPrio);var global_libc=_emscripten_get_global_libc();var global_locale=global_libc+40;Atomics.store(GROWABLE_HEAP_U32(),tis+(176>>2),global_locale);worker.pthread=pthread;var msg={\"cmd\":\"run\",\"start_routine\":threadParams.startRoutine,\"arg\":threadParams.arg,\"threadInfoStruct\":threadParams.pthread_ptr,\"selfThreadId\":threadParams.pthread_ptr,\"parentThreadId\":threadParams.parent_pthread_ptr,\"stackBase\":threadParams.stackBase,\"stackSize\":threadParams.stackSize};worker.runPthread=function(){msg.time=performance.now();worker.postMessage(msg,threadParams.transferList)};if(worker.loaded){worker.runPthread();delete worker.runPthread}}function _pthread_getschedparam(thread,policy,schedparam){if(!policy&&!schedparam)return ERRNO_CODES.EINVAL;if(!thread){err(\"pthread_getschedparam called with a null thread pointer!\");return ERRNO_CODES.ESRCH}var self=GROWABLE_HEAP_I32()[thread+12>>2];if(self!==thread){err(\"pthread_getschedparam attempted on thread \"+thread+\", which does not point to a valid thread, or does not exist anymore!\");return ERRNO_CODES.ESRCH}var schedPolicy=Atomics.load(GROWABLE_HEAP_U32(),thread+108+20>>2);var schedPrio=Atomics.load(GROWABLE_HEAP_U32(),thread+108+24>>2);if(policy)GROWABLE_HEAP_I32()[policy>>2]=schedPolicy;if(schedparam)GROWABLE_HEAP_I32()[schedparam>>2]=schedPrio;return 0}function _pthread_self(){return __pthread_ptr|0}Module[\"_pthread_self\"]=_pthread_self;function _pthread_create(pthread_ptr,attr,start_routine,arg){if(typeof SharedArrayBuffer===\"undefined\"){err(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\");return 6}if(!pthread_ptr){err(\"pthread_create called with a null thread pointer!\");return 28}var transferList=[];var error=0;if(ENVIRONMENT_IS_PTHREAD&&(transferList.length===0||error)){return _emscripten_sync_run_in_main_thread_4(687865856,pthread_ptr,attr,start_routine,arg)}if(error)return error;var stackSize=0;var stackBase=0;var detached=0;var schedPolicy=0;var schedPrio=0;if(attr){stackSize=GROWABLE_HEAP_I32()[attr>>2];stackSize+=81920;stackBase=GROWABLE_HEAP_I32()[attr+8>>2];detached=GROWABLE_HEAP_I32()[attr+12>>2]!==0;var inheritSched=GROWABLE_HEAP_I32()[attr+16>>2]===0;if(inheritSched){var prevSchedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];var prevSchedPrio=GROWABLE_HEAP_I32()[attr+24>>2];var parentThreadPtr=PThread.currentProxiedOperationCallerThread?PThread.currentProxiedOperationCallerThread:_pthread_self();_pthread_getschedparam(parentThreadPtr,attr+20,attr+24);schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2];GROWABLE_HEAP_I32()[attr+20>>2]=prevSchedPolicy;GROWABLE_HEAP_I32()[attr+24>>2]=prevSchedPrio}else{schedPolicy=GROWABLE_HEAP_I32()[attr+20>>2];schedPrio=GROWABLE_HEAP_I32()[attr+24>>2]}}else{stackSize=2097152}var allocatedOwnStack=stackBase==0;if(allocatedOwnStack){stackBase=_memalign(16,stackSize)}else{stackBase-=stackSize;assert(stackBase>0)}var threadInfoStruct=_malloc(232);for(var i=0;i<232>>2;++i)GROWABLE_HEAP_U32()[(threadInfoStruct>>2)+i]=0;GROWABLE_HEAP_I32()[pthread_ptr>>2]=threadInfoStruct;GROWABLE_HEAP_I32()[threadInfoStruct+12>>2]=threadInfoStruct;var headPtr=threadInfoStruct+156;GROWABLE_HEAP_I32()[headPtr>>2]=headPtr;var threadParams={stackBase:stackBase,stackSize:stackSize,allocatedOwnStack:allocatedOwnStack,schedPolicy:schedPolicy,schedPrio:schedPrio,detached:detached,startRoutine:start_routine,pthread_ptr:threadInfoStruct,parent_pthread_ptr:_pthread_self(),arg:arg,transferList:transferList};if(ENVIRONMENT_IS_PTHREAD){threadParams.cmd=\"spawnThread\";postMessage(threadParams,transferList)}else{__spawn_thread(threadParams)}return 0}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}function _sysconf(name){if(ENVIRONMENT_IS_PTHREAD)return _emscripten_proxy_to_main_thread_js(6,1,name);switch(name){case 30:return 16384;case 85:var maxHeapSize=2147483648;return maxHeapSize/16384;case 132:case 133:case 12:case 137:case 138:case 15:case 235:case 16:case 17:case 18:case 19:case 20:case 149:case 13:case 10:case 236:case 153:case 9:case 21:case 22:case 159:case 154:case 14:case 77:case 78:case 139:case 80:case 81:case 82:case 68:case 67:case 164:case 11:case 29:case 47:case 48:case 95:case 52:case 51:case 46:case 79:return 200809;case 27:case 246:case 127:case 128:case 23:case 24:case 160:case 161:case 181:case 182:case 242:case 183:case 184:case 243:case 244:case 245:case 165:case 178:case 179:case 49:case 50:case 168:case 169:case 175:case 170:case 171:case 172:case 97:case 76:case 32:case 173:case 35:return-1;case 176:case 177:case 7:case 155:case 8:case 157:case 125:case 126:case 92:case 93:case 129:case 130:case 131:case 94:case 91:return 1;case 74:case 60:case 69:case 70:case 4:return 1024;case 31:case 42:case 72:return 32;case 87:case 26:case 33:return 2147483647;case 34:case 1:return 47839;case 38:case 36:return 99;case 43:case 37:return 2048;case 0:return 2097152;case 3:return 65536;case 28:return 32768;case 44:return 32767;case 75:return 16384;case 39:return 1e3;case 89:return 700;case 71:return 256;case 40:return 255;case 2:return 100;case 180:return 64;case 25:return 20;case 5:return 16;case 6:return 6;case 73:return 4;case 84:{if(typeof navigator===\"object\")return navigator[\"hardwareConcurrency\"]||1;return 1}}setErrNo(28);return-1}if(!ENVIRONMENT_IS_PTHREAD)PThread.initMainThreadBlock();else PThread.initWorker();var GLctx;GL.init();var proxiedFunctionTable=[null,_atexit,_emscripten_set_canvas_element_size_main_thread,_fd_close,_fd_seek,_fd_write,_sysconf];var asmLibraryArg={\"e\":___assert_fail,\"r\":___call_main,\"w\":__emscripten_notify_thread_queue,\"a\":_abort,\"l\":_emscripten_conditional_set_current_thread_status,\"d\":_emscripten_futex_wait,\"c\":_emscripten_futex_wake,\"h\":_emscripten_get_now,\"g\":_emscripten_is_main_browser_thread,\"x\":_emscripten_is_main_runtime_thread,\"q\":_emscripten_memcpy_big,\"B\":_emscripten_num_logical_cores,\"t\":_emscripten_receive_on_main_thread_js,\"A\":_emscripten_resize_heap,\"u\":_emscripten_set_canvas_element_size,\"k\":_emscripten_set_current_thread_status,\"s\":_emscripten_set_thread_name,\"v\":_emscripten_webgl_create_context,\"m\":_fd_close,\"o\":_fd_seek,\"i\":_fd_write,\"p\":initPthreadsJS,\"memory\":wasmMemory||Module[\"wasmMemory\"],\"y\":_pthread_cleanup_pop,\"z\":_pthread_cleanup_push,\"j\":_pthread_create,\"b\":_pthread_self,\"f\":_roundf,\"n\":_sysconf,\"table\":wasmTable};var asm=createWasm();Module[\"asm\"]=asm;var ___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=function(){return(___wasm_call_ctors=Module[\"___wasm_call_ctors\"]=Module[\"asm\"][\"C\"]).apply(null,arguments)};var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"D\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"E\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"F\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"G\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"H\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"I\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"J\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"K\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"L\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"M\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"N\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"O\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"P\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Q\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"R\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"S\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"T\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"U\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"V\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"W\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"X\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"Y\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"Z\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"_\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"$\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"aa\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"ba\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"ca\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"da\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"ea\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"fa\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"ga\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"ha\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"ia\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"ja\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"ka\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"la\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"ma\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"na\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"oa\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"pa\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"qa\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"ra\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"sa\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"ta\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"ua\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"va\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"wa\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"xa\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"ya\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"za\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"Aa\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Ba\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"Ca\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Da\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"Ea\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"Fa\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Ga\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Ha\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Ia\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Ja\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Ka\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"La\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"Ma\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Na\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Oa\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Pa\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Qa\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Ra\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"Sa\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"Ta\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"Ua\"]).apply(null,arguments)};var _emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=function(){return(_emscripten_get_global_libc=Module[\"_emscripten_get_global_libc\"]=Module[\"asm\"][\"Va\"]).apply(null,arguments)};var ___errno_location=Module[\"___errno_location\"]=function(){return(___errno_location=Module[\"___errno_location\"]=Module[\"asm\"][\"Wa\"]).apply(null,arguments)};var ___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=function(){return(___em_js__initPthreadsJS=Module[\"___em_js__initPthreadsJS\"]=Module[\"asm\"][\"Xa\"]).apply(null,arguments)};var _memalign=Module[\"_memalign\"]=function(){return(_memalign=Module[\"_memalign\"]=Module[\"asm\"][\"Ya\"]).apply(null,arguments)};var ___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=function(){return(___pthread_tsd_run_dtors=Module[\"___pthread_tsd_run_dtors\"]=Module[\"asm\"][\"Za\"]).apply(null,arguments)};var _emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=function(){return(_emscripten_main_thread_process_queued_calls=Module[\"_emscripten_main_thread_process_queued_calls\"]=Module[\"asm\"][\"_a\"]).apply(null,arguments)};var _emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=function(){return(_emscripten_current_thread_process_queued_calls=Module[\"_emscripten_current_thread_process_queued_calls\"]=Module[\"asm\"][\"$a\"]).apply(null,arguments)};var _emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=function(){return(_emscripten_register_main_browser_thread_id=Module[\"_emscripten_register_main_browser_thread_id\"]=Module[\"asm\"][\"ab\"]).apply(null,arguments)};var _emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=function(){return(_emscripten_main_browser_thread_id=Module[\"_emscripten_main_browser_thread_id\"]=Module[\"asm\"][\"bb\"]).apply(null,arguments)};var _emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=function(){return(_emscripten_async_run_in_main_thread=Module[\"_emscripten_async_run_in_main_thread\"]=Module[\"asm\"][\"cb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=function(){return(_emscripten_sync_run_in_main_thread=Module[\"_emscripten_sync_run_in_main_thread\"]=Module[\"asm\"][\"db\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=function(){return(_emscripten_sync_run_in_main_thread_0=Module[\"_emscripten_sync_run_in_main_thread_0\"]=Module[\"asm\"][\"eb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=function(){return(_emscripten_sync_run_in_main_thread_1=Module[\"_emscripten_sync_run_in_main_thread_1\"]=Module[\"asm\"][\"fb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=function(){return(_emscripten_sync_run_in_main_thread_2=Module[\"_emscripten_sync_run_in_main_thread_2\"]=Module[\"asm\"][\"gb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=function(){return(_emscripten_sync_run_in_main_thread_xprintf_varargs=Module[\"_emscripten_sync_run_in_main_thread_xprintf_varargs\"]=Module[\"asm\"][\"hb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=function(){return(_emscripten_sync_run_in_main_thread_3=Module[\"_emscripten_sync_run_in_main_thread_3\"]=Module[\"asm\"][\"ib\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=function(){return(_emscripten_sync_run_in_main_thread_4=Module[\"_emscripten_sync_run_in_main_thread_4\"]=Module[\"asm\"][\"jb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=function(){return(_emscripten_sync_run_in_main_thread_5=Module[\"_emscripten_sync_run_in_main_thread_5\"]=Module[\"asm\"][\"kb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=function(){return(_emscripten_sync_run_in_main_thread_6=Module[\"_emscripten_sync_run_in_main_thread_6\"]=Module[\"asm\"][\"lb\"]).apply(null,arguments)};var _emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=function(){return(_emscripten_sync_run_in_main_thread_7=Module[\"_emscripten_sync_run_in_main_thread_7\"]=Module[\"asm\"][\"mb\"]).apply(null,arguments)};var _emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=function(){return(_emscripten_run_in_main_runtime_thread_js=Module[\"_emscripten_run_in_main_runtime_thread_js\"]=Module[\"asm\"][\"nb\"]).apply(null,arguments)};var _emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=function(){return(_emscripten_async_queue_on_thread_=Module[\"_emscripten_async_queue_on_thread_\"]=Module[\"asm\"][\"ob\"]).apply(null,arguments)};var _emscripten_tls_init=Module[\"_emscripten_tls_init\"]=function(){return(_emscripten_tls_init=Module[\"_emscripten_tls_init\"]=Module[\"asm\"][\"pb\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"qb\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"rb\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"sb\"]).apply(null,arguments)};var dynCall_vi=Module[\"dynCall_vi\"]=function(){return(dynCall_vi=Module[\"dynCall_vi\"]=Module[\"asm\"][\"tb\"]).apply(null,arguments)};var dynCall_v=Module[\"dynCall_v\"]=function(){return(dynCall_v=Module[\"dynCall_v\"]=Module[\"asm\"][\"ub\"]).apply(null,arguments)};var dynCall_ii=Module[\"dynCall_ii\"]=function(){return(dynCall_ii=Module[\"dynCall_ii\"]=Module[\"asm\"][\"vb\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;Module[\"PThread\"]=PThread;Module[\"PThread\"]=PThread;Module[\"_pthread_self\"]=_pthread_self;Module[\"wasmMemory\"]=wasmMemory;Module[\"ExitStatus\"]=ExitStatus;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}if(!ENVIRONMENT_IS_PTHREAD)noExitRuntime=true;if(!ENVIRONMENT_IS_PTHREAD)run();\n\n\n return WasmBackendModuleThreadedSimd\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModuleThreadedSimd;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModuleThreadedSimd; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModuleThreadedSimd\"] = WasmBackendModuleThreadedSimd;\n ", "\nvar WasmBackendModule = (function() {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(WasmBackendModule) {\n WasmBackendModule = WasmBackendModule || {};\n\nvar Module=typeof WasmBackendModule!==\"undefined\"?WasmBackendModule:{};var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram=\"./this.program\";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window===\"object\";ENVIRONMENT_IS_WORKER=typeof importScripts===\"function\";ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory=\"\";function locateFile(path){if(Module[\"locateFile\"]){return Module[\"locateFile\"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require(\"path\").dirname(scriptDirectory)+\"/\"}else{scriptDirectory=__dirname+\"/\"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require(\"fs\");if(!nodePath)nodePath=require(\"path\");filename=nodePath[\"normalize\"](filename);return nodeFS[\"readFileSync\"](filename,binary?null:\"utf8\")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process[\"argv\"].length>1){thisProgram=process[\"argv\"][1].replace(/\\\\/g,\"/\")}arguments_=process[\"argv\"].slice(2);process[\"on\"](\"uncaughtException\",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process[\"on\"](\"unhandledRejection\",abort);quit_=function(status){process[\"exit\"](status)};Module[\"inspect\"]=function(){return\"[Emscripten Module object]\"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!=\"undefined\"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer===\"function\"){return new Uint8Array(readbuffer(f))}data=read(f,\"binary\");assert(typeof data===\"object\");return data};if(typeof scriptArgs!=\"undefined\"){arguments_=scriptArgs}else if(typeof arguments!=\"undefined\"){arguments_=arguments}if(typeof quit===\"function\"){quit_=function(status){quit(status)}}if(typeof print!==\"undefined\"){if(typeof console===\"undefined\")console={};console.log=print;console.warn=console.error=typeof printErr!==\"undefined\"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf(\"blob:\")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf(\"/\")+1)}else{scriptDirectory=\"\"}{read_=function shell_read(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function readBinary(url){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,false);xhr.responseType=\"arraybuffer\";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function readAsync(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open(\"GET\",url,true);xhr.responseType=\"arraybuffer\";xhr.onload=function xhr_onload(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module[\"print\"]||console.log.bind(console);var err=Module[\"printErr\"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module[\"arguments\"])arguments_=Module[\"arguments\"];if(Module[\"thisProgram\"])thisProgram=Module[\"thisProgram\"];if(Module[\"quit\"])quit_=Module[\"quit\"];var wasmBinary;if(Module[\"wasmBinary\"])wasmBinary=Module[\"wasmBinary\"];var noExitRuntime;if(Module[\"noExitRuntime\"])noExitRuntime=Module[\"noExitRuntime\"];if(typeof WebAssembly!==\"object\"){err(\"no native wasm support detected\")}var wasmMemory;var wasmTable=new WebAssembly.Table({\"initial\":147,\"maximum\":147+0,\"element\":\"anyfunc\"});var ABORT=false;var EXITSTATUS=0;function assert(condition,text){if(!condition){abort(\"Assertion failed: \"+text)}}function getCFunc(ident){var func=Module[\"_\"+ident];assert(func,\"Cannot call unknown function \"+ident+\", make sure it is exported\");return func}function ccall(ident,returnType,argTypes,args,opts){var toC={\"string\":function(str){var ret=0;if(str!==null&&str!==undefined&&str!==0){var len=(str.length<<2)+1;ret=stackAlloc(len);stringToUTF8(str,ret,len)}return ret},\"array\":function(arr){var ret=stackAlloc(arr.length);writeArrayToMemory(arr,ret);return ret}};function convertReturnValue(ret){if(returnType===\"string\")return UTF8ToString(ret);if(returnType===\"boolean\")return Boolean(ret);return ret}var func=getCFunc(ident);var cArgs=[];var stack=0;if(args){for(var i=0;i=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str=\"\";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):\"\"}function stringToUTF8Array(str,heap,outIdx,maxBytesToWrite){if(!(maxBytesToWrite>0))return 0;var startIdx=outIdx;var endIdx=outIdx+maxBytesToWrite-1;for(var i=0;i=55296&&u<=57343){var u1=str.charCodeAt(++i);u=65536+((u&1023)<<10)|u1&1023}if(u<=127){if(outIdx>=endIdx)break;heap[outIdx++]=u}else if(u<=2047){if(outIdx+1>=endIdx)break;heap[outIdx++]=192|u>>6;heap[outIdx++]=128|u&63}else if(u<=65535){if(outIdx+2>=endIdx)break;heap[outIdx++]=224|u>>12;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}else{if(outIdx+3>=endIdx)break;heap[outIdx++]=240|u>>18;heap[outIdx++]=128|u>>12&63;heap[outIdx++]=128|u>>6&63;heap[outIdx++]=128|u&63}}heap[outIdx]=0;return outIdx-startIdx}function stringToUTF8(str,outPtr,maxBytesToWrite){return stringToUTF8Array(str,HEAPU8,outPtr,maxBytesToWrite)}function writeArrayToMemory(array,buffer){HEAP8.set(array,buffer)}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module[\"HEAP8\"]=HEAP8=new Int8Array(buf);Module[\"HEAP16\"]=HEAP16=new Int16Array(buf);Module[\"HEAP32\"]=HEAP32=new Int32Array(buf);Module[\"HEAPU8\"]=HEAPU8=new Uint8Array(buf);Module[\"HEAPU16\"]=HEAPU16=new Uint16Array(buf);Module[\"HEAPU32\"]=HEAPU32=new Uint32Array(buf);Module[\"HEAPF32\"]=HEAPF32=new Float32Array(buf);Module[\"HEAPF64\"]=HEAPF64=new Float64Array(buf)}var INITIAL_INITIAL_MEMORY=Module[\"INITIAL_MEMORY\"]||16777216;function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback==\"function\"){callback(Module);continue}var func=callback.func;if(typeof func===\"number\"){if(callback.arg===undefined){Module[\"dynCall_v\"](func)}else{Module[\"dynCall_vi\"](func,callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module[\"preRun\"]){if(typeof Module[\"preRun\"]==\"function\")Module[\"preRun\"]=[Module[\"preRun\"]];while(Module[\"preRun\"].length){addOnPreRun(Module[\"preRun\"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module[\"postRun\"]){if(typeof Module[\"postRun\"]==\"function\")Module[\"postRun\"]=[Module[\"postRun\"]];while(Module[\"postRun\"].length){addOnPostRun(Module[\"postRun\"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var Math_ceil=Math.ceil;var Math_floor=Math.floor;var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module[\"monitorRunDependencies\"]){Module[\"monitorRunDependencies\"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module[\"preloadedImages\"]={};Module[\"preloadedAudios\"]={};function abort(what){if(Module[\"onAbort\"]){Module[\"onAbort\"](what)}what+=\"\";out(what);err(what);ABORT=true;EXITSTATUS=1;what=\"abort(\"+what+\"). Build with -s ASSERTIONS=1 for more info.\";throw new WebAssembly.RuntimeError(what)}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix=\"data:application/octet-stream;base64,\";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix=\"file://\";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile=\"tfjs-backend-wasm.wasm\";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(){try{if(wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(wasmBinaryFile)}else{throw\"both async and sync fetching of the wasm failed\"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)&&typeof fetch===\"function\"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){if(!response[\"ok\"]){throw\"failed to load wasm binary file at '\"+wasmBinaryFile+\"'\"}return response[\"arrayBuffer\"]()}).catch(function(){return getBinary()})}return new Promise(function(resolve,reject){resolve(getBinary())})}function createWasm(){var info={\"env\":asmLibraryArg,\"wasi_snapshot_preview1\":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module[\"asm\"]=exports;wasmMemory=exports[\"memory\"];updateGlobalBufferAndViews(wasmMemory.buffer);removeRunDependency(\"wasm-instantiate\")}addRunDependency(\"wasm-instantiate\");function receiveInstantiatedSource(output){receiveInstance(output[\"instance\"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){return WebAssembly.instantiate(binary,info)}).then(receiver,function(reason){err(\"failed to asynchronously prepare wasm: \"+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming===\"function\"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch===\"function\"){fetch(wasmBinaryFile,{credentials:\"same-origin\"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err(\"wasm streaming compile failed: \"+reason);err(\"falling back to ArrayBuffer instantiation\");instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module[\"instantiateWasm\"]){try{var exports=Module[\"instantiateWasm\"](info,receiveInstance);return exports}catch(e){err(\"Module.instantiateWasm callback failed with error: \"+e);return false}}instantiateAsync();return{}}__ATINIT__.push();function _emscripten_notify_memory_growth(memoryIndex){updateGlobalBufferAndViews(wasmMemory.buffer)}var PATH={splitPath:function(filename){var splitPathRe=/^(\\/?|)([\\s\\S]*?)((?:\\.{1,2}|[^\\/]+?|)(\\.[^.\\/]*|))(?:[\\/]*)$/;return splitPathRe.exec(filename).slice(1)},normalizeArray:function(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last===\".\"){parts.splice(i,1)}else if(last===\"..\"){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up;up--){parts.unshift(\"..\")}}return parts},normalize:function(path){var isAbsolute=path.charAt(0)===\"/\",trailingSlash=path.substr(-1)===\"/\";path=PATH.normalizeArray(path.split(\"/\").filter(function(p){return!!p}),!isAbsolute).join(\"/\");if(!path&&!isAbsolute){path=\".\"}if(path&&trailingSlash){path+=\"/\"}return(isAbsolute?\"/\":\"\")+path},dirname:function(path){var result=PATH.splitPath(path),root=result[0],dir=result[1];if(!root&&!dir){return\".\"}if(dir){dir=dir.substr(0,dir.length-1)}return root+dir},basename:function(path){if(path===\"/\")return\"/\";var lastSlash=path.lastIndexOf(\"/\");if(lastSlash===-1)return path;return path.substr(lastSlash+1)},extname:function(path){return PATH.splitPath(path)[3]},join:function(){var paths=Array.prototype.slice.call(arguments,0);return PATH.normalize(paths.join(\"/\"))},join2:function(l,r){return PATH.normalize(l+\"/\"+r)}};var SYSCALLS={mappings:{},buffers:[null,[],[]],printChar:function(stream,curr){var buffer=SYSCALLS.buffers[stream];if(curr===0||curr===10){(stream===1?out:err)(UTF8ArrayToString(buffer,0));buffer.length=0}else{buffer.push(curr)}},varargs:undefined,get:function(){SYSCALLS.varargs+=4;var ret=HEAP32[SYSCALLS.varargs-4>>2];return ret},getStr:function(ptr){var ret=UTF8ToString(ptr);return ret},get64:function(low,high){return low}};function _fd_close(fd){return 0}function _fd_seek(fd,offset_low,offset_high,whence,newOffset){}function _fd_write(fd,iov,iovcnt,pnum){var num=0;for(var i=0;i>2];var len=HEAP32[iov+(i*8+4)>>2];for(var j=0;j>2]=num;return 0}function _exit(status){exit(status)}function _proc_exit(code){_exit(code)}function _roundf(d){d=+d;return d>=+0?+Math_floor(d+ +.5):+Math_ceil(d-+.5)}var asmLibraryArg={\"emscripten_notify_memory_growth\":_emscripten_notify_memory_growth,\"fd_close\":_fd_close,\"fd_seek\":_fd_seek,\"fd_write\":_fd_write,\"proc_exit\":_proc_exit,\"roundf\":_roundf};var asm=createWasm();Module[\"asm\"]=asm;var _init=Module[\"_init\"]=function(){return(_init=Module[\"_init\"]=Module[\"asm\"][\"init\"]).apply(null,arguments)};var _register_tensor=Module[\"_register_tensor\"]=function(){return(_register_tensor=Module[\"_register_tensor\"]=Module[\"asm\"][\"register_tensor\"]).apply(null,arguments)};var _dispose_data=Module[\"_dispose_data\"]=function(){return(_dispose_data=Module[\"_dispose_data\"]=Module[\"asm\"][\"dispose_data\"]).apply(null,arguments)};var _dispose=Module[\"_dispose\"]=function(){return(_dispose=Module[\"_dispose\"]=Module[\"asm\"][\"dispose\"]).apply(null,arguments)};var _Abs=Module[\"_Abs\"]=function(){return(_Abs=Module[\"_Abs\"]=Module[\"asm\"][\"Abs\"]).apply(null,arguments)};var _Add=Module[\"_Add\"]=function(){return(_Add=Module[\"_Add\"]=Module[\"asm\"][\"Add\"]).apply(null,arguments)};var _AddN=Module[\"_AddN\"]=function(){return(_AddN=Module[\"_AddN\"]=Module[\"asm\"][\"AddN\"]).apply(null,arguments)};var _ArgMax=Module[\"_ArgMax\"]=function(){return(_ArgMax=Module[\"_ArgMax\"]=Module[\"asm\"][\"ArgMax\"]).apply(null,arguments)};var _AvgPool=Module[\"_AvgPool\"]=function(){return(_AvgPool=Module[\"_AvgPool\"]=Module[\"asm\"][\"AvgPool\"]).apply(null,arguments)};var _BatchMatMul=Module[\"_BatchMatMul\"]=function(){return(_BatchMatMul=Module[\"_BatchMatMul\"]=Module[\"asm\"][\"BatchMatMul\"]).apply(null,arguments)};var _ClipByValue=Module[\"_ClipByValue\"]=function(){return(_ClipByValue=Module[\"_ClipByValue\"]=Module[\"asm\"][\"ClipByValue\"]).apply(null,arguments)};var _Conv2D=Module[\"_Conv2D\"]=function(){return(_Conv2D=Module[\"_Conv2D\"]=Module[\"asm\"][\"Conv2D\"]).apply(null,arguments)};var _Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=function(){return(_Conv2DBackpropInput=Module[\"_Conv2DBackpropInput\"]=Module[\"asm\"][\"Conv2DBackpropInput\"]).apply(null,arguments)};var _Cos=Module[\"_Cos\"]=function(){return(_Cos=Module[\"_Cos\"]=Module[\"asm\"][\"Cos\"]).apply(null,arguments)};var _CropAndResize=Module[\"_CropAndResize\"]=function(){return(_CropAndResize=Module[\"_CropAndResize\"]=Module[\"asm\"][\"CropAndResize\"]).apply(null,arguments)};var _Cumsum=Module[\"_Cumsum\"]=function(){return(_Cumsum=Module[\"_Cumsum\"]=Module[\"asm\"][\"Cumsum\"]).apply(null,arguments)};var _DepthToSpace=Module[\"_DepthToSpace\"]=function(){return(_DepthToSpace=Module[\"_DepthToSpace\"]=Module[\"asm\"][\"DepthToSpace\"]).apply(null,arguments)};var _DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=function(){return(_DepthwiseConv2dNative=Module[\"_DepthwiseConv2dNative\"]=Module[\"asm\"][\"DepthwiseConv2dNative\"]).apply(null,arguments)};var _Div=Module[\"_Div\"]=function(){return(_Div=Module[\"_Div\"]=Module[\"asm\"][\"Div\"]).apply(null,arguments)};var _Equal=Module[\"_Equal\"]=function(){return(_Equal=Module[\"_Equal\"]=Module[\"asm\"][\"Equal\"]).apply(null,arguments)};var _Exp=Module[\"_Exp\"]=function(){return(_Exp=Module[\"_Exp\"]=Module[\"asm\"][\"Exp\"]).apply(null,arguments)};var _FlipLeftRight=Module[\"_FlipLeftRight\"]=function(){return(_FlipLeftRight=Module[\"_FlipLeftRight\"]=Module[\"asm\"][\"FlipLeftRight\"]).apply(null,arguments)};var _FloorDiv=Module[\"_FloorDiv\"]=function(){return(_FloorDiv=Module[\"_FloorDiv\"]=Module[\"asm\"][\"FloorDiv\"]).apply(null,arguments)};var _FusedBatchNorm=Module[\"_FusedBatchNorm\"]=function(){return(_FusedBatchNorm=Module[\"_FusedBatchNorm\"]=Module[\"asm\"][\"FusedBatchNorm\"]).apply(null,arguments)};var _FusedConv2D=Module[\"_FusedConv2D\"]=function(){return(_FusedConv2D=Module[\"_FusedConv2D\"]=Module[\"asm\"][\"FusedConv2D\"]).apply(null,arguments)};var _FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=function(){return(_FusedDepthwiseConv2D=Module[\"_FusedDepthwiseConv2D\"]=Module[\"asm\"][\"FusedDepthwiseConv2D\"]).apply(null,arguments)};var _Gather=Module[\"_Gather\"]=function(){return(_Gather=Module[\"_Gather\"]=Module[\"asm\"][\"Gather\"]).apply(null,arguments)};var _GatherNd=Module[\"_GatherNd\"]=function(){return(_GatherNd=Module[\"_GatherNd\"]=Module[\"asm\"][\"GatherNd\"]).apply(null,arguments)};var _Greater=Module[\"_Greater\"]=function(){return(_Greater=Module[\"_Greater\"]=Module[\"asm\"][\"Greater\"]).apply(null,arguments)};var _GreaterEqual=Module[\"_GreaterEqual\"]=function(){return(_GreaterEqual=Module[\"_GreaterEqual\"]=Module[\"asm\"][\"GreaterEqual\"]).apply(null,arguments)};var _Less=Module[\"_Less\"]=function(){return(_Less=Module[\"_Less\"]=Module[\"asm\"][\"Less\"]).apply(null,arguments)};var _LessEqual=Module[\"_LessEqual\"]=function(){return(_LessEqual=Module[\"_LessEqual\"]=Module[\"asm\"][\"LessEqual\"]).apply(null,arguments)};var _Log=Module[\"_Log\"]=function(){return(_Log=Module[\"_Log\"]=Module[\"asm\"][\"Log\"]).apply(null,arguments)};var _LogicalAnd=Module[\"_LogicalAnd\"]=function(){return(_LogicalAnd=Module[\"_LogicalAnd\"]=Module[\"asm\"][\"LogicalAnd\"]).apply(null,arguments)};var _Max=Module[\"_Max\"]=function(){return(_Max=Module[\"_Max\"]=Module[\"asm\"][\"Max\"]).apply(null,arguments)};var _MaxPool=Module[\"_MaxPool\"]=function(){return(_MaxPool=Module[\"_MaxPool\"]=Module[\"asm\"][\"MaxPool\"]).apply(null,arguments)};var _Maximum=Module[\"_Maximum\"]=function(){return(_Maximum=Module[\"_Maximum\"]=Module[\"asm\"][\"Maximum\"]).apply(null,arguments)};var _Min=Module[\"_Min\"]=function(){return(_Min=Module[\"_Min\"]=Module[\"asm\"][\"Min\"]).apply(null,arguments)};var _Minimum=Module[\"_Minimum\"]=function(){return(_Minimum=Module[\"_Minimum\"]=Module[\"asm\"][\"Minimum\"]).apply(null,arguments)};var _Multiply=Module[\"_Multiply\"]=function(){return(_Multiply=Module[\"_Multiply\"]=Module[\"asm\"][\"Multiply\"]).apply(null,arguments)};var _Negate=Module[\"_Negate\"]=function(){return(_Negate=Module[\"_Negate\"]=Module[\"asm\"][\"Negate\"]).apply(null,arguments)};var _NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=function(){return(_NonMaxSuppressionV3=Module[\"_NonMaxSuppressionV3\"]=Module[\"asm\"][\"NonMaxSuppressionV3\"]).apply(null,arguments)};var _NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=function(){return(_NonMaxSuppressionV4=Module[\"_NonMaxSuppressionV4\"]=Module[\"asm\"][\"NonMaxSuppressionV4\"]).apply(null,arguments)};var _NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=function(){return(_NonMaxSuppressionV5=Module[\"_NonMaxSuppressionV5\"]=Module[\"asm\"][\"NonMaxSuppressionV5\"]).apply(null,arguments)};var _NotEqual=Module[\"_NotEqual\"]=function(){return(_NotEqual=Module[\"_NotEqual\"]=Module[\"asm\"][\"NotEqual\"]).apply(null,arguments)};var _OneHot=Module[\"_OneHot\"]=function(){return(_OneHot=Module[\"_OneHot\"]=Module[\"asm\"][\"OneHot\"]).apply(null,arguments)};var _PadV2=Module[\"_PadV2\"]=function(){return(_PadV2=Module[\"_PadV2\"]=Module[\"asm\"][\"PadV2\"]).apply(null,arguments)};var _Pow=Module[\"_Pow\"]=function(){return(_Pow=Module[\"_Pow\"]=Module[\"asm\"][\"Pow\"]).apply(null,arguments)};var _Prelu=Module[\"_Prelu\"]=function(){return(_Prelu=Module[\"_Prelu\"]=Module[\"asm\"][\"Prelu\"]).apply(null,arguments)};var _Relu=Module[\"_Relu\"]=function(){return(_Relu=Module[\"_Relu\"]=Module[\"asm\"][\"Relu\"]).apply(null,arguments)};var _Relu6=Module[\"_Relu6\"]=function(){return(_Relu6=Module[\"_Relu6\"]=Module[\"asm\"][\"Relu6\"]).apply(null,arguments)};var _ResizeBilinear=Module[\"_ResizeBilinear\"]=function(){return(_ResizeBilinear=Module[\"_ResizeBilinear\"]=Module[\"asm\"][\"ResizeBilinear\"]).apply(null,arguments)};var _Reverse=Module[\"_Reverse\"]=function(){return(_Reverse=Module[\"_Reverse\"]=Module[\"asm\"][\"Reverse\"]).apply(null,arguments)};var _RotateWithOffset=Module[\"_RotateWithOffset\"]=function(){return(_RotateWithOffset=Module[\"_RotateWithOffset\"]=Module[\"asm\"][\"RotateWithOffset\"]).apply(null,arguments)};var _Rsqrt=Module[\"_Rsqrt\"]=function(){return(_Rsqrt=Module[\"_Rsqrt\"]=Module[\"asm\"][\"Rsqrt\"]).apply(null,arguments)};var _ScatterNd=Module[\"_ScatterNd\"]=function(){return(_ScatterNd=Module[\"_ScatterNd\"]=Module[\"asm\"][\"ScatterNd\"]).apply(null,arguments)};var _SelectV2=Module[\"_SelectV2\"]=function(){return(_SelectV2=Module[\"_SelectV2\"]=Module[\"asm\"][\"SelectV2\"]).apply(null,arguments)};var _Sigmoid=Module[\"_Sigmoid\"]=function(){return(_Sigmoid=Module[\"_Sigmoid\"]=Module[\"asm\"][\"Sigmoid\"]).apply(null,arguments)};var _Sin=Module[\"_Sin\"]=function(){return(_Sin=Module[\"_Sin\"]=Module[\"asm\"][\"Sin\"]).apply(null,arguments)};var _Softmax=Module[\"_Softmax\"]=function(){return(_Softmax=Module[\"_Softmax\"]=Module[\"asm\"][\"Softmax\"]).apply(null,arguments)};var _Sqrt=Module[\"_Sqrt\"]=function(){return(_Sqrt=Module[\"_Sqrt\"]=Module[\"asm\"][\"Sqrt\"]).apply(null,arguments)};var _Square=Module[\"_Square\"]=function(){return(_Square=Module[\"_Square\"]=Module[\"asm\"][\"Square\"]).apply(null,arguments)};var _SquaredDifference=Module[\"_SquaredDifference\"]=function(){return(_SquaredDifference=Module[\"_SquaredDifference\"]=Module[\"asm\"][\"SquaredDifference\"]).apply(null,arguments)};var _StridedSlice=Module[\"_StridedSlice\"]=function(){return(_StridedSlice=Module[\"_StridedSlice\"]=Module[\"asm\"][\"StridedSlice\"]).apply(null,arguments)};var _Sub=Module[\"_Sub\"]=function(){return(_Sub=Module[\"_Sub\"]=Module[\"asm\"][\"Sub\"]).apply(null,arguments)};var _Sum=Module[\"_Sum\"]=function(){return(_Sum=Module[\"_Sum\"]=Module[\"asm\"][\"Sum\"]).apply(null,arguments)};var _Tanh=Module[\"_Tanh\"]=function(){return(_Tanh=Module[\"_Tanh\"]=Module[\"asm\"][\"Tanh\"]).apply(null,arguments)};var _Tile=Module[\"_Tile\"]=function(){return(_Tile=Module[\"_Tile\"]=Module[\"asm\"][\"Tile\"]).apply(null,arguments)};var _Transpose=Module[\"_Transpose\"]=function(){return(_Transpose=Module[\"_Transpose\"]=Module[\"asm\"][\"Transpose\"]).apply(null,arguments)};var __FusedMatMul=Module[\"__FusedMatMul\"]=function(){return(__FusedMatMul=Module[\"__FusedMatMul\"]=Module[\"asm\"][\"_FusedMatMul\"]).apply(null,arguments)};var _malloc=Module[\"_malloc\"]=function(){return(_malloc=Module[\"_malloc\"]=Module[\"asm\"][\"malloc\"]).apply(null,arguments)};var _free=Module[\"_free\"]=function(){return(_free=Module[\"_free\"]=Module[\"asm\"][\"free\"]).apply(null,arguments)};var __start=Module[\"__start\"]=function(){return(__start=Module[\"__start\"]=Module[\"asm\"][\"_start\"]).apply(null,arguments)};var stackSave=Module[\"stackSave\"]=function(){return(stackSave=Module[\"stackSave\"]=Module[\"asm\"][\"stackSave\"]).apply(null,arguments)};var stackAlloc=Module[\"stackAlloc\"]=function(){return(stackAlloc=Module[\"stackAlloc\"]=Module[\"asm\"][\"stackAlloc\"]).apply(null,arguments)};var stackRestore=Module[\"stackRestore\"]=function(){return(stackRestore=Module[\"stackRestore\"]=Module[\"asm\"][\"stackRestore\"]).apply(null,arguments)};Module[\"asm\"]=asm;Module[\"cwrap\"]=cwrap;var calledRun;Module[\"then\"]=function(func){if(calledRun){func(Module)}else{var old=Module[\"onRuntimeInitialized\"];Module[\"onRuntimeInitialized\"]=function(){if(old)old();func(Module)}}return Module};function ExitStatus(status){this.name=\"ExitStatus\";this.message=\"Program terminated with exit(\"+status+\")\";this.status=status}var calledMain=false;dependenciesFulfilled=function runCaller(){if(!calledRun)run();if(!calledRun)dependenciesFulfilled=runCaller};function callMain(args){var entryFunction=Module[\"__start\"];try{entryFunction();var ret=0;exit(ret,true)}catch(e){if(e instanceof ExitStatus){return}else if(e==\"unwind\"){noExitRuntime=true;return}else{var toLog=e;if(e&&typeof e===\"object\"&&e.stack){toLog=[e,e.stack]}err(\"exception thrown: \"+toLog);quit_(1,e)}}finally{calledMain=true}}function run(args){args=args||arguments_;if(runDependencies>0){return}preRun();if(runDependencies>0)return;function doRun(){if(calledRun)return;calledRun=true;Module[\"calledRun\"]=true;if(ABORT)return;initRuntime();preMain();if(Module[\"onRuntimeInitialized\"])Module[\"onRuntimeInitialized\"]();if(shouldRunNow)callMain(args);postRun()}if(Module[\"setStatus\"]){Module[\"setStatus\"](\"Running...\");setTimeout(function(){setTimeout(function(){Module[\"setStatus\"](\"\")},1);doRun()},1)}else{doRun()}}Module[\"run\"]=run;function exit(status,implicit){if(implicit&&noExitRuntime&&status===0){return}if(noExitRuntime){}else{ABORT=true;EXITSTATUS=status;exitRuntime();if(Module[\"onExit\"])Module[\"onExit\"](status)}quit_(status,new ExitStatus(status))}if(Module[\"preInit\"]){if(typeof Module[\"preInit\"]==\"function\")Module[\"preInit\"]=[Module[\"preInit\"]];while(Module[\"preInit\"].length>0){Module[\"preInit\"].pop()()}}var shouldRunNow=true;if(Module[\"noInitialRun\"])shouldRunNow=false;noExitRuntime=true;run();\n\n\n return WasmBackendModule\n}\n);\n})();\nif (typeof exports === 'object' && typeof module === 'object')\n module.exports = WasmBackendModule;\n else if (typeof define === 'function' && define['amd'])\n define([], function() { return WasmBackendModule; });\n else if (typeof exports === 'object')\n exports[\"WasmBackendModule\"] = WasmBackendModule;\n ", "import { tf, loadGraphModel } from '../tf.js';\n\nconst NUM_LANDMARKS = 6;\n\nfunction generateAnchors(inputSize) {\n const spec = { strides: [inputSize / 16, inputSize / 8], anchors: [2, 6] };\n const anchors = [];\n for (let i = 0; i < spec.strides.length; i++) {\n const stride = spec.strides[i];\n const gridRows = Math.floor((inputSize + stride - 1) / stride);\n const gridCols = Math.floor((inputSize + stride - 1) / stride);\n const anchorsNum = spec.anchors[i];\n for (let gridY = 0; gridY < gridRows; gridY++) {\n const anchorY = stride * (gridY + 0.5);\n for (let gridX = 0; gridX < gridCols; gridX++) {\n const anchorX = stride * (gridX + 0.5);\n for (let n = 0; n < anchorsNum; n++) {\n anchors.push([anchorX, anchorY]);\n }\n }\n }\n }\n return anchors;\n}\n\nconst disposeBox = (box) => {\n box.startEndTensor.dispose();\n box.startPoint.dispose();\n box.endPoint.dispose();\n};\n\nconst createBox = (startEndTensor) => ({\n startEndTensor,\n startPoint: tf.slice(startEndTensor, [0, 0], [-1, 2]),\n endPoint: tf.slice(startEndTensor, [0, 2], [-1, 2]),\n});\n\nconst scaleBox = (box, factors) => {\n const starts = tf.mul(box.startPoint, factors);\n const ends = tf.mul(box.endPoint, factors);\n const newCoordinates = tf.concat2d([starts, ends], 1);\n return createBox(newCoordinates);\n};\n\nfunction decodeBounds(boxOutputs, anchors, inputSize) {\n const boxStarts = tf.slice(boxOutputs, [0, 1], [-1, 2]);\n const centers = tf.add(boxStarts, anchors);\n const boxSizes = tf.slice(boxOutputs, [0, 3], [-1, 2]);\n const boxSizesNormalized = tf.div(boxSizes, inputSize);\n const centersNormalized = tf.div(centers, inputSize);\n const halfBoxSize = tf.div(boxSizesNormalized, 2);\n const starts = tf.sub(centersNormalized, halfBoxSize);\n const ends = tf.add(centersNormalized, halfBoxSize);\n const startNormalized = tf.mul(starts, inputSize);\n const endNormalized = tf.mul(ends, inputSize);\n const concatAxis = 1;\n return tf.concat2d([startNormalized, endNormalized], concatAxis);\n}\n\nfunction scaleBoxFromPrediction(face, scaleFactor) {\n return tf.tidy(() => {\n const box = face['box'] ? face['box'] : face;\n return scaleBox(box, scaleFactor).startEndTensor.squeeze();\n });\n}\n\nclass BlazeFaceModel {\n constructor(model, config) {\n this.blazeFaceModel = model;\n this.width = config.detector.inputSize;\n this.height = config.detector.inputSize;\n this.anchorsData = generateAnchors(config.detector.inputSize);\n this.anchors = tf.tensor2d(this.anchorsData);\n this.inputSize = tf.tensor1d([this.width, this.height]);\n this.config = config;\n this.scaleFaces = 0.8;\n }\n\n async getBoundingBoxes(inputImage) {\n // sanity check on input\n if ((!inputImage) || (inputImage.isDisposedInternal) || (inputImage.shape.length !== 4) || (inputImage.shape[1] < 1) || (inputImage.shape[2] < 1)) return null;\n const [detectedOutputs, boxes, scores] = tf.tidy(() => {\n const resizedImage = inputImage.resizeBilinear([this.width, this.height]);\n // const normalizedImage = tf.mul(tf.sub(resizedImage.div(255), 0.5), 2);\n const normalizedImage = tf.sub(resizedImage.div(127.5), 1);\n const batchedPrediction = this.blazeFaceModel.predict(normalizedImage);\n let prediction;\n // are we using tfhub or pinto converted model?\n if (Array.isArray(batchedPrediction)) {\n const sorted = batchedPrediction.sort((a, b) => a.size - b.size);\n const concat384 = tf.concat([sorted[0], sorted[2]], 2); // dim: 384, 1 + 16\n const concat512 = tf.concat([sorted[1], sorted[3]], 2); // dim: 512, 1 + 16\n const concat = tf.concat([concat512, concat384], 1);\n prediction = concat.squeeze(0);\n } else {\n prediction = batchedPrediction.squeeze(); // when using tfhub model\n }\n const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);\n const logits = tf.slice(prediction, [0, 0], [-1, 1]);\n const scoresOut = tf.sigmoid(logits).squeeze();\n return [prediction, decodedBounds, scoresOut];\n });\n const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);\n const boxIndices = boxIndicesTensor.arraySync();\n boxIndicesTensor.dispose();\n const boundingBoxesMap = boxIndices.map((boxIndex) => tf.slice(boxes, [boxIndex, 0], [1, -1]));\n const boundingBoxes = boundingBoxesMap.map((boundingBox) => {\n const vals = boundingBox.arraySync();\n boundingBox.dispose();\n return vals;\n });\n\n const scoresVal = scores.dataSync();\n const annotatedBoxes = [];\n for (const i in boundingBoxes) {\n const boxIndex = boxIndices[i];\n const confidence = scoresVal[boxIndex];\n if (confidence > this.config.detector.minConfidence) {\n const box = createBox(boundingBoxes[i]);\n const anchor = this.anchorsData[boxIndex];\n const landmarks = tf.tidy(() => tf.slice(detectedOutputs, [boxIndex, NUM_LANDMARKS - 1], [1, -1]).squeeze().reshape([NUM_LANDMARKS, -1]));\n annotatedBoxes.push({ box, landmarks, anchor, confidence });\n }\n }\n detectedOutputs.dispose();\n boxes.dispose();\n scores.dispose();\n detectedOutputs.dispose();\n return {\n boxes: annotatedBoxes,\n scaleFactor: [inputImage.shape[2] / this.width, inputImage.shape[1] / this.height],\n };\n }\n\n async estimateFaces(input) {\n const { boxes, scaleFactor } = await this.getBoundingBoxes(input);\n const faces = [];\n for (const face of boxes) {\n const landmarkData = face.landmarks.arraySync();\n const scaledBox = scaleBoxFromPrediction(face, scaleFactor);\n const boxData = scaleBox.arraySync();\n const probabilityData = face.probability.arraySync();\n const anchor = face.anchor;\n const [scaleFactorX, scaleFactorY] = scaleFactor;\n const scaledLandmarks = landmarkData\n .map((landmark) => ([\n (landmark[0] + anchor[0]) * scaleFactorX,\n (landmark[1] + anchor[1]) * scaleFactorY,\n ]));\n const normalizedFace = {\n topLeft: boxData.slice(0, 2),\n bottomRight: boxData.slice(2),\n landmarks: scaledLandmarks,\n probability: probabilityData,\n };\n disposeBox(face.box);\n face.landmarks.dispose();\n face.probability.dispose();\n scaledBox.dispose();\n faces.push(normalizedFace);\n }\n return faces;\n }\n}\n\nasync function load(config) {\n const blazeface = await loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') });\n const model = new BlazeFaceModel(blazeface, config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n return model;\n}\n\nexports.load = load;\nexports.BlazeFaceModel = BlazeFaceModel;\nexports.disposeBox = disposeBox;\n", "exports.MESH_ANNOTATIONS = {\n silhouette: [\n 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288,\n 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136,\n 172, 58, 132, 93, 234, 127, 162, 21, 54, 103, 67, 109,\n ],\n lipsUpperOuter: [61, 185, 40, 39, 37, 0, 267, 269, 270, 409, 291],\n lipsLowerOuter: [146, 91, 181, 84, 17, 314, 405, 321, 375, 291],\n lipsUpperInner: [78, 191, 80, 81, 82, 13, 312, 311, 310, 415, 308],\n lipsLowerInner: [78, 95, 88, 178, 87, 14, 317, 402, 318, 324, 308],\n rightEyeUpper0: [246, 161, 160, 159, 158, 157, 173],\n rightEyeLower0: [33, 7, 163, 144, 145, 153, 154, 155, 133],\n rightEyeUpper1: [247, 30, 29, 27, 28, 56, 190],\n rightEyeLower1: [130, 25, 110, 24, 23, 22, 26, 112, 243],\n rightEyeUpper2: [113, 225, 224, 223, 222, 221, 189],\n rightEyeLower2: [226, 31, 228, 229, 230, 231, 232, 233, 244],\n rightEyeLower3: [143, 111, 117, 118, 119, 120, 121, 128, 245],\n rightEyebrowUpper: [156, 70, 63, 105, 66, 107, 55, 193],\n rightEyebrowLower: [35, 124, 46, 53, 52, 65],\n rightEyeIris: [473, 474, 475, 476, 477],\n leftEyeUpper0: [466, 388, 387, 386, 385, 384, 398],\n leftEyeLower0: [263, 249, 390, 373, 374, 380, 381, 382, 362],\n leftEyeUpper1: [467, 260, 259, 257, 258, 286, 414],\n leftEyeLower1: [359, 255, 339, 254, 253, 252, 256, 341, 463],\n leftEyeUpper2: [342, 445, 444, 443, 442, 441, 413],\n leftEyeLower2: [446, 261, 448, 449, 450, 451, 452, 453, 464],\n leftEyeLower3: [372, 340, 346, 347, 348, 349, 350, 357, 465],\n leftEyebrowUpper: [383, 300, 293, 334, 296, 336, 285, 417],\n leftEyebrowLower: [265, 353, 276, 283, 282, 295],\n leftEyeIris: [468, 469, 470, 471, 472],\n midwayBetweenEyes: [168],\n noseTip: [1],\n noseBottom: [2],\n noseRightCorner: [98],\n noseLeftCorner: [327],\n rightCheek: [205],\n leftCheek: [425],\n};\nexports.MESH_TO_IRIS_INDICES_MAP = [ // A mapping from facemesh model keypoints to iris model keypoints.\n { key: 'EyeUpper0', indices: [9, 10, 11, 12, 13, 14, 15] },\n { key: 'EyeUpper1', indices: [25, 26, 27, 28, 29, 30, 31] },\n { key: 'EyeUpper2', indices: [41, 42, 43, 44, 45, 46, 47] },\n { key: 'EyeLower0', indices: [0, 1, 2, 3, 4, 5, 6, 7, 8] },\n { key: 'EyeLower1', indices: [16, 17, 18, 19, 20, 21, 22, 23, 24] },\n { key: 'EyeLower2', indices: [32, 33, 34, 35, 36, 37, 38, 39, 40] },\n { key: 'EyeLower3', indices: [54, 55, 56, 57, 58, 59, 60, 61, 62] },\n { key: 'EyebrowUpper', indices: [63, 64, 65, 66, 67, 68, 69, 70] },\n { key: 'EyebrowLower', indices: [48, 49, 50, 51, 52, 53] },\n];\n", "import { tf } from '../tf.js';\n\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n return { startPoint, endPoint };\n}\nexports.scaleBoxCoordinates = scaleBoxCoordinates;\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nexports.getBoxSize = getBoxSize;\n\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nexports.getBoxCenter = getBoxCenter;\n\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h, box.startPoint[0] / w, box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nexports.cutBoxFromImageAndResize = cutBoxFromImageAndResize;\n\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.enlargeBox = enlargeBox;\n\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, landmarks: box.landmarks };\n}\nexports.squarifyBox = squarifyBox;\n", "exports.IDENTITY_MATRIX = [[1, 0, 0], [0, 1, 0], [0, 0, 1]];\n/**\n * Normalizes the provided angle to the range -pi to pi.\n * @param angle The angle in radians to be normalized.\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nexports.normalizeRadians = normalizeRadians;\n/**\n * Computes the angle of rotation between two anchor points.\n * @param point1 First anchor point\n * @param point2 Second anchor point\n */\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nexports.computeRotation = computeRotation;\nfunction radToDegrees(rad) {\n return rad * 180 / Math.PI;\n}\nexports.radToDegrees = radToDegrees;\nfunction buildTranslationMatrix(x, y) {\n return [[1, 0, x], [0, 1, y], [0, 0, 1]];\n}\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nexports.dot = dot;\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nexports.getColumnFrom2DArr = getColumnFrom2DArr;\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nexports.buildRotationMatrix = buildRotationMatrix;\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nexports.invertTransformMatrix = invertTransformMatrix;\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexports.rotatePoint = rotatePoint;\nfunction xyDistanceBetweenPoints(a, b) {\n return Math.sqrt(((a[0] - b[0]) ** 2) + ((a[1] - b[1]) ** 2));\n}\nexports.xyDistanceBetweenPoints = xyDistanceBetweenPoints;\n", "/* eslint-disable class-methods-use-this */\nimport { tf } from '../tf.js';\nimport * as bounding from './box';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nconst LANDMARKS_COUNT = 468;\nconst MESH_MOUTH_INDEX = 13;\nconst MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [MESH_MOUTH_INDEX, keypoints.MESH_ANNOTATIONS['midwayBetweenEyes'][0]];\nconst BLAZEFACE_MOUTH_INDEX = 3;\nconst BLAZEFACE_NOSE_INDEX = 2;\nconst BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES = [BLAZEFACE_MOUTH_INDEX, BLAZEFACE_NOSE_INDEX];\nconst LEFT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['leftEyeLower0'];\nconst LEFT_EYE_BOUNDS = [LEFT_EYE_OUTLINE[0], LEFT_EYE_OUTLINE[LEFT_EYE_OUTLINE.length - 1]];\nconst RIGHT_EYE_OUTLINE = keypoints.MESH_ANNOTATIONS['rightEyeLower0'];\nconst RIGHT_EYE_BOUNDS = [RIGHT_EYE_OUTLINE[0], RIGHT_EYE_OUTLINE[RIGHT_EYE_OUTLINE.length - 1]];\nconst IRIS_UPPER_CENTER_INDEX = 3;\nconst IRIS_LOWER_CENTER_INDEX = 4;\nconst IRIS_IRIS_INDEX = 71;\nconst IRIS_NUM_COORDINATES = 76;\n\n// Replace the raw coordinates returned by facemesh with refined iris model coordinates. Update the z coordinate to be an average of the original and the new. This produces the best visual effect.\nfunction replaceRawCoordinates(rawCoords, newCoords, prefix, keys) {\n for (let i = 0; i < keypoints.MESH_TO_IRIS_INDICES_MAP.length; i++) {\n const { key, indices } = keypoints.MESH_TO_IRIS_INDICES_MAP[i];\n const originalIndices = keypoints.MESH_ANNOTATIONS[`${prefix}${key}`];\n const shouldReplaceAllKeys = keys == null;\n if (shouldReplaceAllKeys || keys.includes(key)) {\n for (let j = 0; j < indices.length; j++) {\n const index = indices[j];\n rawCoords[originalIndices[j]] = [\n newCoords[index][0], newCoords[index][1],\n (newCoords[index][2] + rawCoords[originalIndices[j]][2]) / 2,\n ];\n }\n }\n }\n}\n// The Pipeline coordinates between the bounding box and skeleton models.\nclass Pipeline {\n constructor(boundingBoxDetector, meshDetector, irisModel, config) {\n // An array of facial bounding boxes.\n this.storedBoxes = [];\n this.runsWithoutFaceDetector = 0;\n this.boundingBoxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.irisModel = irisModel;\n this.meshWidth = config.mesh.inputSize;\n this.meshHeight = config.mesh.inputSize;\n this.irisSize = config.iris.inputSize;\n this.irisEnlarge = 2.3;\n this.skipped = 1000;\n this.detectedFaces = 0;\n }\n\n transformRawCoords(rawCoords, box, angle, rotationMatrix) {\n const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });\n const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight];\n const coordsScaled = rawCoords.map((coord) => ([\n scaleFactor[0] * (coord[0] - this.meshWidth / 2),\n scaleFactor[1] * (coord[1] - this.meshHeight / 2), coord[2],\n ]));\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]]));\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => ([\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1], coord[2],\n ]));\n }\n\n getLeftToRightEyeDepthDifference(rawCoords) {\n const leftEyeZ = rawCoords[LEFT_EYE_BOUNDS[0]][2];\n const rightEyeZ = rawCoords[RIGHT_EYE_BOUNDS[0]][2];\n return leftEyeZ - rightEyeZ;\n }\n\n // Returns a box describing a cropped region around the eye fit for passing to the iris model.\n getEyeBox(rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {\n const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));\n const boxSize = bounding.getBoxSize(box);\n let crop = tf.image.cropAndResize(face, [[\n box.startPoint[1] / this.meshHeight,\n box.startPoint[0] / this.meshWidth, box.endPoint[1] / this.meshHeight,\n box.endPoint[0] / this.meshWidth,\n ]], [0], [this.irisSize, this.irisSize]);\n if (flip) {\n crop = tf.image.flipLeftRight(crop);\n }\n return { box, boxSize, crop };\n }\n\n // Given a cropped image of an eye, returns the coordinates of the contours surrounding the eye and the iris.\n getEyeCoords(eyeData, eyeBox, eyeBoxSize, flip = false) {\n const eyeRawCoords = [];\n for (let i = 0; i < IRIS_NUM_COORDINATES; i++) {\n const x = eyeData[i * 3];\n const y = eyeData[i * 3 + 1];\n const z = eyeData[i * 3 + 2];\n eyeRawCoords.push([\n (flip\n ? (1 - (x / this.irisSize))\n : (x / this.irisSize)) * eyeBoxSize[0] + eyeBox.startPoint[0],\n (y / this.irisSize) * eyeBoxSize[1] + eyeBox.startPoint[1], z,\n ]);\n }\n return { rawCoords: eyeRawCoords, iris: eyeRawCoords.slice(IRIS_IRIS_INDEX) };\n }\n\n // The z-coordinates returned for the iris are unreliable, so we take the z values from the surrounding keypoints.\n getAdjustedIrisCoords(rawCoords, irisCoords, direction) {\n const upperCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeUpper0`][IRIS_UPPER_CENTER_INDEX]][2];\n const lowerCenterZ = rawCoords[keypoints.MESH_ANNOTATIONS[`${direction}EyeLower0`][IRIS_LOWER_CENTER_INDEX]][2];\n const averageZ = (upperCenterZ + lowerCenterZ) / 2;\n // Iris indices: 0: center | 1: right | 2: above | 3: left | 4: below\n return irisCoords.map((coord, i) => {\n let z = averageZ;\n if (i === 2) {\n z = upperCenterZ;\n } else if (i === 4) {\n z = lowerCenterZ;\n }\n return [coord[0], coord[1], z];\n });\n }\n\n async predict(input, config) {\n this.skipped++;\n let useFreshBox = false;\n // run new detector every skipFrames unless we only want box to start with\n let detector;\n if ((this.skipped > config.detector.skipFrames) || !config.mesh.enabled) {\n detector = await this.boundingBoxDetector.getBoundingBoxes(input);\n // don't reset on test image\n if ((input.shape[1] !== 255) && (input.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (detector && detector.boxes && (detector.boxes.length > 0) && (!config.mesh.enabled || (detector.boxes.length !== this.detectedFaces) && (this.detectedFaces !== config.detector.maxFaces))) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n for (const possible of detector.boxes) {\n this.storedBoxes.push({ startPoint: possible.box.startPoint.dataSync(), endPoint: possible.box.endPoint.dataSync(), landmarks: possible.landmarks, confidence: possible.confidence });\n }\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n\n if (useFreshBox) {\n if (!detector || !detector.boxes || (detector.boxes.length === 0)) {\n this.storedBoxes = [];\n this.detectedFaces = 0;\n return null;\n }\n for (const i in this.storedBoxes) {\n const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);\n const enlargedBox = bounding.enlargeBox(scaledBox);\n const landmarks = this.storedBoxes[i].landmarks.arraySync();\n const confidence = this.storedBoxes[i].confidence;\n this.storedBoxes[i] = { ...enlargedBox, confidence, landmarks };\n }\n this.runsWithoutFaceDetector = 0;\n }\n if (detector && detector.boxes) {\n detector.boxes.forEach((prediction) => {\n prediction.box.startPoint.dispose();\n prediction.box.endPoint.dispose();\n prediction.landmarks.dispose();\n });\n }\n\n // console.log(this.skipped, config.detector.skipFrames, this.detectedFaces, config.detector.maxFaces, detector?.boxes.length, this.storedBoxes.length);\n\n let results = tf.tidy(() => this.storedBoxes.map((box, i) => {\n let angle = 0;\n // The facial bounding box landmarks could come either from blazeface (if we are using a fresh box), or from the mesh model (if we are reusing an old box).\n const boxLandmarksFromMeshModel = box.landmarks.length >= LANDMARKS_COUNT;\n let [indexOfMouth, indexOfForehead] = MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n if (boxLandmarksFromMeshModel === false) {\n [indexOfMouth, indexOfForehead] = BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;\n }\n angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);\n const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });\n const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];\n let rotatedImage = input;\n let rotationMatrix = util.IDENTITY_MATRIX;\n if (angle !== 0) {\n rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);\n rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);\n }\n const boxCPU = { startPoint: box.startPoint, endPoint: box.endPoint };\n const face = bounding.cutBoxFromImageAndResize(boxCPU, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);\n\n // if we're not going to produce mesh, don't spend time with further processing\n if (!config.mesh.enabled) {\n const prediction = {\n coords: null,\n box,\n faceConfidence: null,\n confidence: box.confidence,\n image: face,\n };\n return prediction;\n }\n\n // The first returned tensor represents facial contours, which are included in the coordinates.\n const [, confidence, coords] = this.meshDetector.predict(face);\n const confidenceVal = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceVal < config.detector.minConfidence) {\n coords.dispose();\n return null;\n }\n const coordsReshaped = tf.reshape(coords, [-1, 3]);\n let rawCoords = coordsReshaped.arraySync();\n if (config.iris.enabled) {\n const { box: leftEyeBox, boxSize: leftEyeBoxSize, crop: leftEyeCrop } = this.getEyeBox(rawCoords, face, LEFT_EYE_BOUNDS[0], LEFT_EYE_BOUNDS[1], true);\n const { box: rightEyeBox, boxSize: rightEyeBoxSize, crop: rightEyeCrop } = this.getEyeBox(rawCoords, face, RIGHT_EYE_BOUNDS[0], RIGHT_EYE_BOUNDS[1]);\n const eyePredictions = (this.irisModel.predict(tf.concat([leftEyeCrop, rightEyeCrop])));\n const eyePredictionsData = eyePredictions.dataSync();\n eyePredictions.dispose();\n const leftEyeData = eyePredictionsData.slice(0, IRIS_NUM_COORDINATES * 3);\n const { rawCoords: leftEyeRawCoords, iris: leftIrisRawCoords } = this.getEyeCoords(leftEyeData, leftEyeBox, leftEyeBoxSize, true);\n const rightEyeData = eyePredictionsData.slice(IRIS_NUM_COORDINATES * 3);\n const { rawCoords: rightEyeRawCoords, iris: rightIrisRawCoords } = this.getEyeCoords(rightEyeData, rightEyeBox, rightEyeBoxSize);\n const leftToRightEyeDepthDifference = this.getLeftToRightEyeDepthDifference(rawCoords);\n if (Math.abs(leftToRightEyeDepthDifference) < 30) { // User is looking straight ahead.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left');\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right');\n // If the user is looking to the left or to the right, the iris coordinates tend to diverge too much from the mesh coordinates for them to be merged. So we only update a single contour line above and below the eye.\n } else if (leftToRightEyeDepthDifference < 1) { // User is looking towards the right.\n replaceRawCoordinates(rawCoords, leftEyeRawCoords, 'left', ['EyeUpper0', 'EyeLower0']);\n } else { // User is looking towards the left.\n replaceRawCoordinates(rawCoords, rightEyeRawCoords, 'right', ['EyeUpper0', 'EyeLower0']);\n }\n const adjustedLeftIrisCoords = this.getAdjustedIrisCoords(rawCoords, leftIrisRawCoords, 'left');\n const adjustedRightIrisCoords = this.getAdjustedIrisCoords(rawCoords, rightIrisRawCoords, 'right');\n rawCoords = rawCoords.concat(adjustedLeftIrisCoords).concat(adjustedRightIrisCoords);\n }\n const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);\n tf.dispose(rawCoords);\n const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));\n const transformedCoords = tf.tensor2d(transformedCoordsData);\n const prediction = {\n coords: transformedCoords,\n box: landmarksBox,\n faceConfidence: confidenceVal,\n confidence: box.confidence,\n image: face,\n };\n this.storedBoxes[i] = { ...landmarksBox, landmarks: transformedCoords.arraySync(), confidence: box.confidence, faceConfidence: confidenceVal };\n return prediction;\n }));\n results = results.filter((a) => a !== null);\n this.detectedFaces = results.length;\n return results;\n }\n\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint, landmarks };\n }\n}\nexports.Pipeline = Pipeline;\n", "exports.UV_COORDS = [\n [0.499976992607117, 0.652534008026123],\n [0.500025987625122, 0.547487020492554],\n [0.499974012374878, 0.602371990680695],\n [0.482113003730774, 0.471979022026062],\n [0.500150978565216, 0.527155995368958],\n [0.499909996986389, 0.498252987861633],\n [0.499523013830185, 0.40106201171875],\n [0.289712011814117, 0.380764007568359],\n [0.499954998493195, 0.312398016452789],\n [0.499987006187439, 0.269918978214264],\n [0.500023007392883, 0.107050001621246],\n [0.500023007392883, 0.666234016418457],\n [0.5000159740448, 0.679224014282227],\n [0.500023007392883, 0.692348003387451],\n [0.499976992607117, 0.695277988910675],\n [0.499976992607117, 0.70593398809433],\n [0.499976992607117, 0.719385027885437],\n [0.499976992607117, 0.737019002437592],\n [0.499967992305756, 0.781370997428894],\n [0.499816000461578, 0.562981009483337],\n [0.473773002624512, 0.573909997940063],\n [0.104906998574734, 0.254140973091125],\n [0.365929991006851, 0.409575998783112],\n [0.338757991790771, 0.41302502155304],\n [0.311120003461838, 0.409460008144379],\n [0.274657994508743, 0.389131009578705],\n [0.393361985683441, 0.403706014156342],\n [0.345234006643295, 0.344011008739471],\n [0.370094001293182, 0.346076011657715],\n [0.319321990013123, 0.347265005111694],\n [0.297903001308441, 0.353591024875641],\n [0.24779200553894, 0.410809993743896],\n [0.396889001131058, 0.842755019664764],\n [0.280097991228104, 0.375599980354309],\n [0.106310002505779, 0.399955987930298],\n [0.2099249958992, 0.391353011131287],\n [0.355807989835739, 0.534406006336212],\n [0.471751004457474, 0.65040397644043],\n [0.474155008792877, 0.680191993713379],\n [0.439785003662109, 0.657229006290436],\n [0.414617002010345, 0.66654098033905],\n [0.450374007225037, 0.680860996246338],\n [0.428770989179611, 0.682690978050232],\n [0.374971002340317, 0.727805018424988],\n [0.486716985702515, 0.547628998756409],\n [0.485300987958908, 0.527395009994507],\n [0.257764995098114, 0.314490020275116],\n [0.401223003864288, 0.455172002315521],\n [0.429818987846375, 0.548614978790283],\n [0.421351999044418, 0.533740997314453],\n [0.276895999908447, 0.532056987285614],\n [0.483370006084442, 0.499586999416351],\n [0.33721199631691, 0.282882988452911],\n [0.296391993761063, 0.293242990970612],\n [0.169294998049736, 0.193813979625702],\n [0.447580009698868, 0.302609980106354],\n [0.392390012741089, 0.353887975215912],\n [0.354490011930466, 0.696784019470215],\n [0.067304998636246, 0.730105042457581],\n [0.442739009857178, 0.572826027870178],\n [0.457098007202148, 0.584792017936707],\n [0.381974011659622, 0.694710969924927],\n [0.392388999462128, 0.694203019142151],\n [0.277076005935669, 0.271932005882263],\n [0.422551989555359, 0.563233017921448],\n [0.385919004678726, 0.281364023685455],\n [0.383103013038635, 0.255840003490448],\n [0.331431001424789, 0.119714021682739],\n [0.229923993349075, 0.232002973556519],\n [0.364500999450684, 0.189113974571228],\n [0.229622006416321, 0.299540996551514],\n [0.173287004232407, 0.278747975826263],\n [0.472878992557526, 0.666198015213013],\n [0.446828007698059, 0.668527007102966],\n [0.422762006521225, 0.673889994621277],\n [0.445307999849319, 0.580065965652466],\n [0.388103008270264, 0.693961024284363],\n [0.403039008378983, 0.706539988517761],\n [0.403629004955292, 0.693953037261963],\n [0.460041999816895, 0.557139039039612],\n [0.431158006191254, 0.692366003990173],\n [0.452181994915009, 0.692366003990173],\n [0.475387006998062, 0.692366003990173],\n [0.465828001499176, 0.779190003871918],\n [0.472328990697861, 0.736225962638855],\n [0.473087012767792, 0.717857003211975],\n [0.473122000694275, 0.704625964164734],\n [0.473033010959625, 0.695277988910675],\n [0.427942007780075, 0.695277988910675],\n [0.426479011774063, 0.703539967536926],\n [0.423162013292313, 0.711845993995667],\n [0.4183090031147, 0.720062971115112],\n [0.390094995498657, 0.639572978019714],\n [0.013953999616206, 0.560034036636353],\n [0.499913990497589, 0.58014702796936],\n [0.413199990987778, 0.69539999961853],\n [0.409626007080078, 0.701822996139526],\n [0.468080013990402, 0.601534962654114],\n [0.422728985548019, 0.585985004901886],\n [0.463079988956451, 0.593783974647522],\n [0.37211999297142, 0.47341400384903],\n [0.334562003612518, 0.496073007583618],\n [0.411671012639999, 0.546965003013611],\n [0.242175996303558, 0.14767599105835],\n [0.290776997804642, 0.201445996761322],\n [0.327338010072708, 0.256527006626129],\n [0.399509996175766, 0.748921036720276],\n [0.441727995872498, 0.261676013469696],\n [0.429764986038208, 0.187834024429321],\n [0.412198007106781, 0.108901023864746],\n [0.288955003023148, 0.398952007293701],\n [0.218936994671822, 0.435410976409912],\n [0.41278201341629, 0.398970007896423],\n [0.257135003805161, 0.355440020561218],\n [0.427684992551804, 0.437960982322693],\n [0.448339998722076, 0.536936044692993],\n [0.178560003638268, 0.45755398273468],\n [0.247308000922203, 0.457193970680237],\n [0.286267012357712, 0.467674970626831],\n [0.332827985286713, 0.460712015628815],\n [0.368755996227264, 0.447206974029541],\n [0.398963987827301, 0.432654976844788],\n [0.476410001516342, 0.405806005001068],\n [0.189241006970406, 0.523923993110657],\n [0.228962004184723, 0.348950982093811],\n [0.490725994110107, 0.562400996685028],\n [0.404670000076294, 0.485132992267609],\n [0.019469000399113, 0.401564002037048],\n [0.426243007183075, 0.420431017875671],\n [0.396993011236191, 0.548797011375427],\n [0.266469985246658, 0.376977026462555],\n [0.439121007919312, 0.51895797252655],\n [0.032313998788595, 0.644356966018677],\n [0.419054001569748, 0.387154996395111],\n [0.462783008813858, 0.505746960639954],\n [0.238978996872902, 0.779744982719421],\n [0.198220998048782, 0.831938028335571],\n [0.107550002634525, 0.540755033493042],\n [0.183610007166862, 0.740257024765015],\n [0.134409993886948, 0.333683013916016],\n [0.385764002799988, 0.883153975009918],\n [0.490967005491257, 0.579378008842468],\n [0.382384985685349, 0.508572995662689],\n [0.174399003386497, 0.397670984268188],\n [0.318785011768341, 0.39623498916626],\n [0.343364000320435, 0.400596976280212],\n [0.396100014448166, 0.710216999053955],\n [0.187885001301765, 0.588537991046906],\n [0.430987000465393, 0.944064974784851],\n [0.318993002176285, 0.898285031318665],\n [0.266247987747192, 0.869701027870178],\n [0.500023007392883, 0.190576016902924],\n [0.499976992607117, 0.954452991485596],\n [0.366169989109039, 0.398822009563446],\n [0.393207013607025, 0.39553701877594],\n [0.410373002290726, 0.391080021858215],\n [0.194993004202843, 0.342101991176605],\n [0.388664990663528, 0.362284004688263],\n [0.365961998701096, 0.355970978736877],\n [0.343364000320435, 0.355356991291046],\n [0.318785011768341, 0.35834002494812],\n [0.301414996385574, 0.363156020641327],\n [0.058132998645306, 0.319076001644135],\n [0.301414996385574, 0.387449026107788],\n [0.499987989664078, 0.618434011936188],\n [0.415838003158569, 0.624195992946625],\n [0.445681989192963, 0.566076993942261],\n [0.465844005346298, 0.620640993118286],\n [0.49992299079895, 0.351523995399475],\n [0.288718998432159, 0.819945991039276],\n [0.335278987884521, 0.852819979190826],\n [0.440512001514435, 0.902418971061707],\n [0.128294005990028, 0.791940987110138],\n [0.408771991729736, 0.373893976211548],\n [0.455606997013092, 0.451801002025604],\n [0.499877005815506, 0.908990025520325],\n [0.375436991453171, 0.924192011356354],\n [0.11421000212431, 0.615022003650665],\n [0.448662012815475, 0.695277988910675],\n [0.4480200111866, 0.704632043838501],\n [0.447111994028091, 0.715808033943176],\n [0.444831997156143, 0.730794012546539],\n [0.430011987686157, 0.766808986663818],\n [0.406787008047104, 0.685672998428345],\n [0.400738000869751, 0.681069016456604],\n [0.392399996519089, 0.677703022956848],\n [0.367855995893478, 0.663918972015381],\n [0.247923001646996, 0.601333022117615],\n [0.452769994735718, 0.420849978923798],\n [0.43639200925827, 0.359887003898621],\n [0.416164010763168, 0.368713974952698],\n [0.413385987281799, 0.692366003990173],\n [0.228018000721931, 0.683571994304657],\n [0.468268007040024, 0.352671027183533],\n [0.411361992359161, 0.804327011108398],\n [0.499989002943039, 0.469825029373169],\n [0.479153990745544, 0.442654013633728],\n [0.499974012374878, 0.439637005329132],\n [0.432112008333206, 0.493588984012604],\n [0.499886006116867, 0.866917014122009],\n [0.49991300702095, 0.821729004383087],\n [0.456548988819122, 0.819200992584229],\n [0.344549000263214, 0.745438992977142],\n [0.37890899181366, 0.574010014533997],\n [0.374292999505997, 0.780184984207153],\n [0.319687992334366, 0.570737957954407],\n [0.357154995203018, 0.604269981384277],\n [0.295284003019333, 0.621580958366394],\n [0.447750002145767, 0.862477004528046],\n [0.410986006259918, 0.508723020553589],\n [0.31395098567009, 0.775308012962341],\n [0.354128003120422, 0.812552988529205],\n [0.324548006057739, 0.703992962837219],\n [0.189096003770828, 0.646299958229065],\n [0.279776990413666, 0.71465802192688],\n [0.1338230073452, 0.682700991630554],\n [0.336768001317978, 0.644733011722565],\n [0.429883986711502, 0.466521978378296],\n [0.455527991056442, 0.548622965812683],\n [0.437114000320435, 0.558896005153656],\n [0.467287987470627, 0.529924988746643],\n [0.414712011814117, 0.335219979286194],\n [0.37704598903656, 0.322777986526489],\n [0.344107985496521, 0.320150971412659],\n [0.312875986099243, 0.32233202457428],\n [0.283526003360748, 0.333190023899078],\n [0.241245999932289, 0.382785975933075],\n [0.102986000478268, 0.468762993812561],\n [0.267612010240555, 0.424560010433197],\n [0.297879010438919, 0.433175981044769],\n [0.333433985710144, 0.433878004550934],\n [0.366427004337311, 0.426115989685059],\n [0.396012008190155, 0.416696012020111],\n [0.420121014118195, 0.41022801399231],\n [0.007561000064015, 0.480777025222778],\n [0.432949006557465, 0.569517970085144],\n [0.458638995885849, 0.479089021682739],\n [0.473466008901596, 0.545744001865387],\n [0.476087987422943, 0.563830018043518],\n [0.468472003936768, 0.555056989192963],\n [0.433990985155106, 0.582361996173859],\n [0.483518004417419, 0.562983989715576],\n [0.482482999563217, 0.57784903049469],\n [0.42645001411438, 0.389798998832703],\n [0.438998997211456, 0.39649498462677],\n [0.450067013502121, 0.400434017181396],\n [0.289712011814117, 0.368252992630005],\n [0.276670008897781, 0.363372981548309],\n [0.517862021923065, 0.471948027610779],\n [0.710287988185883, 0.380764007568359],\n [0.526226997375488, 0.573909997940063],\n [0.895093023777008, 0.254140973091125],\n [0.634069979190826, 0.409575998783112],\n [0.661242008209229, 0.41302502155304],\n [0.688880026340485, 0.409460008144379],\n [0.725341975688934, 0.389131009578705],\n [0.606630027294159, 0.40370500087738],\n [0.654766023159027, 0.344011008739471],\n [0.629905998706818, 0.346076011657715],\n [0.680678009986877, 0.347265005111694],\n [0.702096998691559, 0.353591024875641],\n [0.75221198797226, 0.410804986953735],\n [0.602918028831482, 0.842862963676453],\n [0.719901978969574, 0.375599980354309],\n [0.893692970275879, 0.399959981441498],\n [0.790081977844238, 0.391354024410248],\n [0.643998026847839, 0.534487962722778],\n [0.528249025344849, 0.65040397644043],\n [0.525849997997284, 0.680191040039062],\n [0.560214996337891, 0.657229006290436],\n [0.585384011268616, 0.66654098033905],\n [0.549625992774963, 0.680860996246338],\n [0.57122802734375, 0.682691991329193],\n [0.624852001667023, 0.72809898853302],\n [0.513050019741058, 0.547281980514526],\n [0.51509702205658, 0.527251958847046],\n [0.742246985435486, 0.314507007598877],\n [0.598631024360657, 0.454979002475739],\n [0.570338010787964, 0.548575043678284],\n [0.578631997108459, 0.533622980117798],\n [0.723087012767792, 0.532054007053375],\n [0.516445994377136, 0.499638974666595],\n [0.662801027297974, 0.282917976379395],\n [0.70362401008606, 0.293271005153656],\n [0.830704987049103, 0.193813979625702],\n [0.552385985851288, 0.302568018436432],\n [0.607609987258911, 0.353887975215912],\n [0.645429015159607, 0.696707010269165],\n [0.932694971561432, 0.730105042457581],\n [0.557260990142822, 0.572826027870178],\n [0.542901992797852, 0.584792017936707],\n [0.6180260181427, 0.694710969924927],\n [0.607590973377228, 0.694203019142151],\n [0.722943007946014, 0.271963000297546],\n [0.577413976192474, 0.563166975975037],\n [0.614082992076874, 0.281386971473694],\n [0.616907000541687, 0.255886018276215],\n [0.668509006500244, 0.119913995265961],\n [0.770092010498047, 0.232020974159241],\n [0.635536015033722, 0.189248979091644],\n [0.77039098739624, 0.299556016921997],\n [0.826722025871277, 0.278755009174347],\n [0.527121007442474, 0.666198015213013],\n [0.553171992301941, 0.668527007102966],\n [0.577238023281097, 0.673889994621277],\n [0.554691970348358, 0.580065965652466],\n [0.611896991729736, 0.693961024284363],\n [0.59696102142334, 0.706539988517761],\n [0.596370995044708, 0.693953037261963],\n [0.539958000183105, 0.557139039039612],\n [0.568841993808746, 0.692366003990173],\n [0.547818005084991, 0.692366003990173],\n [0.52461302280426, 0.692366003990173],\n [0.534089982509613, 0.779141008853912],\n [0.527670979499817, 0.736225962638855],\n [0.526912987232208, 0.717857003211975],\n [0.526877999305725, 0.704625964164734],\n [0.526966989040375, 0.695277988910675],\n [0.572058022022247, 0.695277988910675],\n [0.573521018028259, 0.703539967536926],\n [0.57683801651001, 0.711845993995667],\n [0.581691026687622, 0.720062971115112],\n [0.609944999217987, 0.639909982681274],\n [0.986046016216278, 0.560034036636353],\n [0.5867999792099, 0.69539999961853],\n [0.590372025966644, 0.701822996139526],\n [0.531915009021759, 0.601536989212036],\n [0.577268004417419, 0.585934996604919],\n [0.536915004253387, 0.593786001205444],\n [0.627542972564697, 0.473352015018463],\n [0.665585994720459, 0.495950996875763],\n [0.588353991508484, 0.546862006187439],\n [0.757824003696442, 0.14767599105835],\n [0.709249973297119, 0.201507985591888],\n [0.672684013843536, 0.256581008434296],\n [0.600408971309662, 0.74900496006012],\n [0.55826598405838, 0.261672019958496],\n [0.570303976535797, 0.187870979309082],\n [0.588165998458862, 0.109044015407562],\n [0.711045026779175, 0.398952007293701],\n [0.781069993972778, 0.435405015945435],\n [0.587247014045715, 0.398931980133057],\n [0.742869973182678, 0.355445981025696],\n [0.572156012058258, 0.437651991844177],\n [0.55186802148819, 0.536570012569427],\n [0.821442008018494, 0.457556009292603],\n [0.752701997756958, 0.457181990146637],\n [0.71375697851181, 0.467626988887787],\n [0.66711300611496, 0.460672974586487],\n [0.631101012229919, 0.447153985500336],\n [0.6008620262146, 0.432473003864288],\n [0.523481011390686, 0.405627012252808],\n [0.810747981071472, 0.523926019668579],\n [0.771045982837677, 0.348959028720856],\n [0.509127020835876, 0.562718033790588],\n [0.595292985439301, 0.485023975372314],\n [0.980530977249146, 0.401564002037048],\n [0.573499977588654, 0.420000016689301],\n [0.602994978427887, 0.548687994480133],\n [0.733529984951019, 0.376977026462555],\n [0.560611009597778, 0.519016981124878],\n [0.967685997486115, 0.644356966018677],\n [0.580985009670258, 0.387160003185272],\n [0.537728011608124, 0.505385041236877],\n [0.760966002941132, 0.779752969741821],\n [0.801778972148895, 0.831938028335571],\n [0.892440974712372, 0.54076099395752],\n [0.816350996494293, 0.740260004997253],\n [0.865594983100891, 0.333687007427216],\n [0.614073991775513, 0.883246004581451],\n [0.508952975273132, 0.579437971115112],\n [0.617941975593567, 0.508316040039062],\n [0.825608015060425, 0.397674977779388],\n [0.681214988231659, 0.39623498916626],\n [0.656635999679565, 0.400596976280212],\n [0.603900015354156, 0.710216999053955],\n [0.81208598613739, 0.588539004325867],\n [0.56801301240921, 0.944564998149872],\n [0.681007981300354, 0.898285031318665],\n [0.733752012252808, 0.869701027870178],\n [0.633830010890961, 0.398822009563446],\n [0.606792986392975, 0.39553701877594],\n [0.589659988880157, 0.391062021255493],\n [0.805015981197357, 0.342108011245728],\n [0.611334979534149, 0.362284004688263],\n [0.634037971496582, 0.355970978736877],\n [0.656635999679565, 0.355356991291046],\n [0.681214988231659, 0.35834002494812],\n [0.698584973812103, 0.363156020641327],\n [0.941866993904114, 0.319076001644135],\n [0.698584973812103, 0.387449026107788],\n [0.584177017211914, 0.624107003211975],\n [0.554318010807037, 0.566076993942261],\n [0.534153997898102, 0.62064003944397],\n [0.711217999458313, 0.819975018501282],\n [0.664629995822906, 0.852871000766754],\n [0.559099972248077, 0.902631998062134],\n [0.871706008911133, 0.791940987110138],\n [0.591234028339386, 0.373893976211548],\n [0.544341027736664, 0.451583981513977],\n [0.624562978744507, 0.924192011356354],\n [0.88577002286911, 0.615028977394104],\n [0.551338016986847, 0.695277988910675],\n [0.551980018615723, 0.704632043838501],\n [0.552887976169586, 0.715808033943176],\n [0.555167973041534, 0.730794012546539],\n [0.569944024085999, 0.767035007476807],\n [0.593203008174896, 0.685675978660583],\n [0.599261999130249, 0.681069016456604],\n [0.607599973678589, 0.677703022956848],\n [0.631937980651855, 0.663500010967255],\n [0.752032995223999, 0.601315021514893],\n [0.547226011753082, 0.420395016670227],\n [0.563543975353241, 0.359827995300293],\n [0.583841025829315, 0.368713974952698],\n [0.586614012718201, 0.692366003990173],\n [0.771915018558502, 0.683578014373779],\n [0.531597018241882, 0.352482974529266],\n [0.588370978832245, 0.804440975189209],\n [0.52079701423645, 0.442565023899078],\n [0.567984998226166, 0.493479013442993],\n [0.543282985687256, 0.819254994392395],\n [0.655317008495331, 0.745514988899231],\n [0.621008992195129, 0.574018001556396],\n [0.625559985637665, 0.78031200170517],\n [0.680198013782501, 0.570719003677368],\n [0.64276397228241, 0.604337990283966],\n [0.704662978649139, 0.621529996395111],\n [0.552012026309967, 0.862591981887817],\n [0.589071989059448, 0.508637011051178],\n [0.685944974422455, 0.775357007980347],\n [0.645735025405884, 0.812640011310577],\n [0.675342977046967, 0.703978002071381],\n [0.810858011245728, 0.646304965019226],\n [0.72012197971344, 0.714666962623596],\n [0.866151988506317, 0.682704985141754],\n [0.663187026977539, 0.644596993923187],\n [0.570082008838654, 0.466325998306274],\n [0.544561982154846, 0.548375964164734],\n [0.562758982181549, 0.558784961700439],\n [0.531987011432648, 0.530140042304993],\n [0.585271000862122, 0.335177004337311],\n [0.622952997684479, 0.32277899980545],\n [0.655896008014679, 0.320163011550903],\n [0.687132000923157, 0.322345972061157],\n [0.716481983661652, 0.333200991153717],\n [0.758756995201111, 0.382786989212036],\n [0.897013008594513, 0.468769013881683],\n [0.732392013072968, 0.424547016620636],\n [0.70211398601532, 0.433162987232208],\n [0.66652500629425, 0.433866024017334],\n [0.633504986763, 0.426087975502014],\n [0.603875994682312, 0.416586995124817],\n [0.579657971858978, 0.409945011138916],\n [0.992439985275269, 0.480777025222778],\n [0.567192018032074, 0.569419980049133],\n [0.54136598110199, 0.478899002075195],\n [0.526564002037048, 0.546118021011353],\n [0.523913025856018, 0.563830018043518],\n [0.531529009342194, 0.555056989192963],\n [0.566035985946655, 0.582329034805298],\n [0.51631098985672, 0.563053965568542],\n [0.5174720287323, 0.577877044677734],\n [0.573594987392426, 0.389806985855103],\n [0.560697972774506, 0.395331978797913],\n [0.549755990505219, 0.399751007556915],\n [0.710287988185883, 0.368252992630005],\n [0.723330020904541, 0.363372981548309],\n];\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as blazeface from './blazeface';\nimport * as keypoints from './keypoints';\nimport * as pipe from './facepipeline';\nimport * as uv_coords from './uvcoords';\nimport * as triangulation from './triangulation';\n\nclass MediaPipeFaceMesh {\n constructor(blazeFace, blazeMeshModel, irisModel, config) {\n this.pipeline = new pipe.Pipeline(blazeFace, blazeMeshModel, irisModel, config);\n if (config) this.config = config;\n }\n\n async estimateFaces(input, config) {\n if (config) this.config = config;\n const predictions = await this.pipeline.predict(input, config);\n const results = [];\n for (const prediction of (predictions || [])) {\n // guard against disposed tensors on long running operations such as pause in middle of processing\n if (prediction.isDisposedInternal) continue;\n const mesh = prediction.coords ? prediction.coords.arraySync() : null;\n const annotations = {};\n if (mesh && mesh.length > 0) {\n for (const key in keypoints.MESH_ANNOTATIONS) {\n if (this.config.iris.enabled || key.includes('Iris') === false) {\n annotations[key] = keypoints.MESH_ANNOTATIONS[key].map((index) => mesh[index]);\n }\n }\n }\n results.push({\n confidence: prediction.confidence || 0,\n box: prediction.box ? [prediction.box.startPoint[0], prediction.box.startPoint[1], prediction.box.endPoint[0] - prediction.box.startPoint[0], prediction.box.endPoint[1] - prediction.box.startPoint[1]] : 0,\n mesh,\n annotations,\n image: prediction.image ? tf.clone(prediction.image) : null,\n });\n if (prediction.coords) prediction.coords.dispose();\n if (prediction.image) prediction.image.dispose();\n }\n return results;\n }\n}\n\nasync function load(config) {\n const models = await Promise.all([\n blazeface.load(config),\n loadGraphModel(config.mesh.modelPath, { fromTFHub: config.mesh.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.iris.modelPath, { fromTFHub: config.iris.modelPath.includes('tfhub.dev') }),\n ]);\n const faceMesh = new MediaPipeFaceMesh(models[0], models[1], models[2], config);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.mesh.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.iris.modelPath.match(/\\/(.*)\\./)[1]}`);\n return faceMesh;\n}\n\nexports.load = load;\nexports.MediaPipeFaceMesh = MediaPipeFaceMesh;\nexports.uv_coords = uv_coords;\nexports.triangulation = triangulation.default;\n", "const profileData = {};\n\nfunction profile(name, data) {\n if (!data || !data.kernels) return;\n const maxResults = 5;\n const time = data.kernels\n .filter((a) => a.kernelTimeMs > 0)\n .reduce((a, b) => a += b.kernelTimeMs, 0);\n const slowest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.kernelTimeMs > 0)\n .sort((a, b) => b.kernelTimeMs - a.kernelTimeMs);\n const largest = data.kernels\n .map((a, i) => { a.id = i; return a; })\n .filter((a) => a.totalBytesSnapshot > 0)\n .sort((a, b) => b.totalBytesSnapshot - a.totalBytesSnapshot);\n if (slowest.length > maxResults) slowest.length = maxResults;\n if (largest.length > maxResults) largest.length = maxResults;\n const res = { newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };\n profileData[name] = res;\n // eslint-disable-next-line no-console\n console.log('Human profiler', name, res);\n}\n\nexports.run = profile;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { age: 0 };\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\n\nasync function load(config) {\n if (!models.age) {\n models.age = await loadGraphModel(config.face.age.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.age.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.age;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.age.skipFrames) && last.age && (last.age > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.age.inputSize, config.face.age.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n const enhance = tf.mul(resize, [255.0]);\n tf.dispose(resize);\n\n let ageT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.age.enabled) ageT = await models.age.predict(enhance);\n } else {\n const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};\n ageT = profileAge.result.clone();\n profileAge.result.dispose();\n profile.run('age', profileAge);\n }\n enhance.dispose();\n\n if (ageT) {\n const data = ageT.dataSync();\n obj.age = Math.trunc(10 * data[0]) / 10;\n }\n ageT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst models = {};\nlet last = { gender: '' };\nlet frame = Number.MAX_SAFE_INTEGER;\nlet alternative = false;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\n\nasync function load(config) {\n if (!models.gender) {\n models.gender = await loadGraphModel(config.face.gender.modelPath);\n alternative = models.gender.inputs[0].shape[3] === 1;\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.gender.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.gender;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.gender.skipFrames) && last.gender !== '') {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.gender.inputSize, config.face.gender.inputSize]);\n let enhance;\n if (alternative) {\n enhance = tf.tidy(() => {\n const [red, green, blue] = tf.split(resize, 3, 3);\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n return grayscale.sub(0.5).mul(2);\n });\n } else {\n enhance = tf.mul(resize, [255.0]);\n }\n // const resize = tf.image.resizeBilinear(image, [config.face.age.inputSize, config.face.age.inputSize], false);\n tf.dispose(resize);\n\n let genderT;\n const obj = {};\n\n if (!config.profile) {\n if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);\n } else {\n const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};\n genderT = profileGender.result.clone();\n profileGender.result.dispose();\n profile.run('gender', profileGender);\n }\n enhance.dispose();\n\n if (genderT) {\n const data = genderT.dataSync();\n if (alternative) {\n // returns two values 0..1, bigger one is prediction\n const confidence = Math.trunc(100 * Math.abs(data[0] - data[1])) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] > data[1] ? 'female' : 'male';\n obj.confidence = confidence;\n }\n } else {\n // returns one value 0..1, .5 is prediction threshold\n const confidence = Math.trunc(200 * Math.abs((data[0] - 0.5))) / 100;\n if (confidence > config.face.gender.minConfidence) {\n obj.gender = data[0] <= 0.5 ? 'female' : 'male';\n obj.confidence = Math.min(0.99, confidence);\n }\n }\n }\n genderT.dispose();\n\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf, loadGraphModel } from '../tf.js';\nimport * as profile from '../profile.js';\n\nconst annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surpise', 'neutral'];\nconst models = {};\nlet last = [];\nlet frame = Number.MAX_SAFE_INTEGER;\n\n// tuning values\nconst zoom = [0, 0]; // 0..1 meaning 0%..100%\nconst rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale\nconst scale = 1; // score multiplication factor\n\nasync function load(config) {\n if (!models.emotion) {\n models.emotion = await loadGraphModel(config.face.emotion.modelPath);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.face.emotion.modelPath.match(/\\/(.*)\\./)[1]}`);\n }\n return models.emotion;\n}\n\nasync function predict(image, config) {\n if ((frame < config.face.emotion.skipFrames) && (last.length > 0)) {\n frame += 1;\n return last;\n }\n frame = 0;\n return new Promise(async (resolve) => {\n const box = [[\n (image.shape[1] * zoom[0]) / image.shape[1],\n (image.shape[2] * zoom[1]) / image.shape[2],\n (image.shape[1] - (image.shape[1] * zoom[0])) / image.shape[1],\n (image.shape[2] - (image.shape[2] * zoom[1])) / image.shape[2],\n ]];\n const resize = tf.image.cropAndResize(image, box, [0], [config.face.emotion.inputSize, config.face.emotion.inputSize]);\n // const resize = tf.image.resizeBilinear(image, [config.face.emotion.inputSize, config.face.emotion.inputSize], false);\n const [red, green, blue] = tf.split(resize, 3, 3);\n resize.dispose();\n // weighted rgb to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html\n const redNorm = tf.mul(red, rgb[0]);\n const greenNorm = tf.mul(green, rgb[1]);\n const blueNorm = tf.mul(blue, rgb[2]);\n red.dispose();\n green.dispose();\n blue.dispose();\n const grayscale = tf.addN([redNorm, greenNorm, blueNorm]);\n redNorm.dispose();\n greenNorm.dispose();\n blueNorm.dispose();\n const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));\n grayscale.dispose();\n const obj = [];\n if (config.face.emotion.enabled) {\n let data;\n if (!config.profile) {\n const emotionT = await models.emotion.predict(normalize);\n data = emotionT.dataSync();\n tf.dispose(emotionT);\n } else {\n const profileData = await tf.profile(() => models.emotion.predict(normalize));\n data = profileData.result.dataSync();\n profileData.result.dispose();\n profile.run('emotion', profileData);\n }\n for (let i = 0; i < data.length; i++) {\n if (scale * data[i] > config.face.emotion.minConfidence) obj.push({ score: Math.min(0.99, Math.trunc(100 * scale * data[i]) / 100), emotion: annotations[i] });\n }\n obj.sort((a, b) => b.score - a.score);\n }\n normalize.dispose();\n last = obj;\n resolve(obj);\n });\n}\n\nexports.predict = predict;\nexports.load = load;\n", "import { tf } from '../tf.js';\n\nclass BaseModel {\n constructor(model, outputStride) {\n this.model = model;\n this.outputStride = outputStride;\n }\n\n predict(input) {\n return tf.tidy(() => {\n const asFloat = this.preprocessInput(input.toFloat());\n const asBatch = asFloat.expandDims(0);\n const results = this.model.predict(asBatch);\n const results3d = results.map((y) => y.squeeze([0]));\n const namedResults = this.nameOutputResults(results3d);\n return {\n heatmapScores: namedResults.heatmap.sigmoid(),\n offsets: namedResults.offsets,\n displacementFwd: namedResults.displacementFwd,\n displacementBwd: namedResults.displacementBwd,\n };\n });\n }\n\n /**\n * Releases the CPU and GPU memory allocated by the model.\n */\n dispose() {\n this.model.dispose();\n }\n}\nexports.BaseModel = BaseModel;\n", "import { tf } from '../tf.js';\nimport * as modelBase from './modelBase';\n\nclass MobileNet extends modelBase.BaseModel {\n // eslint-disable-next-line class-methods-use-this\n preprocessInput(input) {\n // Normalize the pixels [0, 255] to be between [-1, 1].\n return tf.tidy(() => tf.div(input, 127.5).sub(1.0));\n }\n\n // eslint-disable-next-line class-methods-use-this\n nameOutputResults(results) {\n const [offsets, heatmap, displacementFwd, displacementBwd] = results;\n return { offsets, heatmap, displacementFwd, displacementBwd };\n }\n}\nexports.MobileNet = MobileNet;\n", "// algorithm based on Coursera Lecture from Algorithms, Part 1: https://www.coursera.org/learn/algorithms-part1/lecture/ZjoSM/heapsort\nfunction half(k) {\n return Math.floor(k / 2);\n}\nclass MaxHeap {\n constructor(maxSize, getElementValue) {\n this.priorityQueue = new Array(maxSize);\n this.numberOfElements = -1;\n this.getElementValue = getElementValue;\n }\n\n enqueue(x) {\n this.priorityQueue[++this.numberOfElements] = x;\n this.swim(this.numberOfElements);\n }\n\n dequeue() {\n const max = this.priorityQueue[0];\n this.exchange(0, this.numberOfElements--);\n this.sink(0);\n this.priorityQueue[this.numberOfElements + 1] = null;\n return max;\n }\n\n empty() {\n return this.numberOfElements === -1;\n }\n\n size() {\n return this.numberOfElements + 1;\n }\n\n all() {\n return this.priorityQueue.slice(0, this.numberOfElements + 1);\n }\n\n max() {\n return this.priorityQueue[0];\n }\n\n swim(k) {\n while (k > 0 && this.less(half(k), k)) {\n this.exchange(k, half(k));\n k = half(k);\n }\n }\n\n sink(k) {\n while (2 * k <= this.numberOfElements) {\n let j = 2 * k;\n if (j < this.numberOfElements && this.less(j, j + 1)) j++;\n if (!this.less(k, j)) break;\n this.exchange(k, j);\n k = j;\n }\n }\n\n getValueAt(i) {\n return this.getElementValue(this.priorityQueue[i]);\n }\n\n less(i, j) {\n return this.getValueAt(i) < this.getValueAt(j);\n }\n\n exchange(i, j) {\n const t = this.priorityQueue[i];\n this.priorityQueue[i] = this.priorityQueue[j];\n this.priorityQueue[j] = t;\n }\n}\nexports.MaxHeap = MaxHeap;\n", "import * as heapSort from './heapSort';\n\nfunction scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores) {\n const [height, width] = scores.shape;\n let localMaximum = true;\n const yStart = Math.max(heatmapY - localMaximumRadius, 0);\n const yEnd = Math.min(heatmapY + localMaximumRadius + 1, height);\n for (let yCurrent = yStart; yCurrent < yEnd; ++yCurrent) {\n const xStart = Math.max(heatmapX - localMaximumRadius, 0);\n const xEnd = Math.min(heatmapX + localMaximumRadius + 1, width);\n for (let xCurrent = xStart; xCurrent < xEnd; ++xCurrent) {\n if (scores.get(yCurrent, xCurrent, keypointId) > score) {\n localMaximum = false;\n break;\n }\n }\n if (!localMaximum) {\n break;\n }\n }\n return localMaximum;\n}\n/**\n * Builds a priority queue with part candidate positions for a specific image in\n * the batch. For this we find all local maxima in the score maps with score\n * values above a threshold. We create a single priority queue across all parts.\n */\nfunction buildPartWithScoreQueue(scoreThreshold, localMaximumRadius, scores) {\n const [height, width, numKeypoints] = scores.shape;\n const queue = new heapSort.MaxHeap(height * width * numKeypoints, ({ score }) => score);\n for (let heatmapY = 0; heatmapY < height; ++heatmapY) {\n for (let heatmapX = 0; heatmapX < width; ++heatmapX) {\n for (let keypointId = 0; keypointId < numKeypoints; ++keypointId) {\n const score = scores.get(heatmapY, heatmapX, keypointId);\n // Only consider parts with score greater or equal to threshold as root candidates.\n if (score < scoreThreshold) continue;\n // Only consider keypoints whose score is maximum in a local window.\n if (scoreIsMaximumInLocalWindow(keypointId, score, heatmapY, heatmapX, localMaximumRadius, scores)) {\n queue.enqueue({ score, part: { heatmapY, heatmapX, id: keypointId } });\n }\n }\n }\n }\n return queue;\n}\nexports.buildPartWithScoreQueue = buildPartWithScoreQueue;\n", "exports.partNames = [\n 'nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar', 'leftShoulder',\n 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist',\n 'leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle',\n];\nexports.NUM_KEYPOINTS = exports.partNames.length;\nexports.partIds = exports.partNames.reduce((result, jointName, i) => {\n result[jointName] = i;\n return result;\n}, {});\nconst connectedPartNames = [\n ['leftHip', 'leftShoulder'], ['leftElbow', 'leftShoulder'],\n ['leftElbow', 'leftWrist'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['rightHip', 'rightShoulder'],\n ['rightElbow', 'rightShoulder'], ['rightElbow', 'rightWrist'],\n ['rightHip', 'rightKnee'], ['rightKnee', 'rightAnkle'],\n ['leftShoulder', 'rightShoulder'], ['leftHip', 'rightHip'],\n];\n/*\n * Define the skeleton. This defines the parent->child relationships of our\n * tree. Arbitrarily this defines the nose as the root of the tree, however\n * since we will infer the displacement for both parent->child and\n * child->parent, we can define the tree root as any node.\n */\nexports.poseChain = [\n ['nose', 'leftEye'], ['leftEye', 'leftEar'], ['nose', 'rightEye'],\n ['rightEye', 'rightEar'], ['nose', 'leftShoulder'],\n ['leftShoulder', 'leftElbow'], ['leftElbow', 'leftWrist'],\n ['leftShoulder', 'leftHip'], ['leftHip', 'leftKnee'],\n ['leftKnee', 'leftAnkle'], ['nose', 'rightShoulder'],\n ['rightShoulder', 'rightElbow'], ['rightElbow', 'rightWrist'],\n ['rightShoulder', 'rightHip'], ['rightHip', 'rightKnee'],\n ['rightKnee', 'rightAnkle'],\n];\nexports.connectedPartIndices = connectedPartNames.map(([jointNameA, jointNameB]) => ([exports.partIds[jointNameA], exports.partIds[jointNameB]]));\nexports.partChannels = [\n 'left_face',\n 'right_face',\n 'right_upper_leg_front',\n 'right_lower_leg_back',\n 'right_upper_leg_back',\n 'left_lower_leg_front',\n 'left_upper_leg_front',\n 'left_upper_leg_back',\n 'left_lower_leg_back',\n 'right_feet',\n 'right_lower_leg_front',\n 'left_feet',\n 'torso_front',\n 'torso_back',\n 'right_upper_arm_front',\n 'right_upper_arm_back',\n 'right_lower_arm_back',\n 'left_lower_arm_front',\n 'left_upper_arm_front',\n 'left_upper_arm_back',\n 'left_lower_arm_back',\n 'right_hand',\n 'right_lower_arm_front',\n 'left_hand',\n];\n", "import * as kpt from './keypoints';\n\nfunction getOffsetPoint(y, x, keypoint, offsets) {\n return {\n y: offsets.get(y, x, keypoint),\n x: offsets.get(y, x, keypoint + kpt.NUM_KEYPOINTS),\n };\n}\nexports.getOffsetPoint = getOffsetPoint;\n\nfunction getImageCoords(part, outputStride, offsets) {\n const { heatmapY, heatmapX, id: keypoint } = part;\n const { y, x } = getOffsetPoint(heatmapY, heatmapX, keypoint, offsets);\n return {\n x: part.heatmapX * outputStride + x,\n y: part.heatmapY * outputStride + y,\n };\n}\nexports.getImageCoords = getImageCoords;\n\nfunction fillArray(element, size) {\n const result = new Array(size);\n for (let i = 0; i < size; i++) {\n result[i] = element;\n }\n return result;\n}\nexports.fillArray = fillArray;\n\nfunction clamp(a, min, max) {\n if (a < min) return min;\n if (a > max) return max;\n return a;\n}\nexports.clamp = clamp;\n\nfunction squaredDistance(y1, x1, y2, x2) {\n const dy = y2 - y1;\n const dx = x2 - x1;\n return dy * dy + dx * dx;\n}\nexports.squaredDistance = squaredDistance;\n\nfunction addVectors(a, b) {\n return { x: a.x + b.x, y: a.y + b.y };\n}\nexports.addVectors = addVectors;\n\nfunction clampVector(a, min, max) {\n return { y: clamp(a.y, min, max), x: clamp(a.x, min, max) };\n}\nexports.clampVector = clampVector;\n", "import * as keypoints from './keypoints';\nimport * as vectors from './vectors';\n\nconst parentChildrenTuples = keypoints.poseChain.map(([parentJoinName, childJoinName]) => ([keypoints.partIds[parentJoinName], keypoints.partIds[childJoinName]]));\nconst parentToChildEdges = parentChildrenTuples.map(([, childJointId]) => childJointId);\nconst childToParentEdges = parentChildrenTuples.map(([parentJointId]) => parentJointId);\nfunction getDisplacement(edgeId, point, displacements) {\n const numEdges = displacements.shape[2] / 2;\n return {\n y: displacements.get(point.y, point.x, edgeId),\n x: displacements.get(point.y, point.x, numEdges + edgeId),\n };\n}\nfunction getStridedIndexNearPoint(point, outputStride, height, width) {\n return {\n y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1),\n x: vectors.clamp(Math.round(point.x / outputStride), 0, width - 1),\n };\n}\n/**\n * We get a new keypoint along the `edgeId` for the pose instance, assuming\n * that the position of the `idSource` part is already known. For this, we\n * follow the displacement vector from the source to target part (stored in\n * the `i`-t channel of the displacement tensor). The displaced keypoint\n * vector is refined using the offset vector by `offsetRefineStep` times.\n */\nfunction traverseToTargetKeypoint(edgeId, sourceKeypoint, targetKeypointId, scoresBuffer, offsets, outputStride, displacements, offsetRefineStep = 2) {\n const [height, width] = scoresBuffer.shape;\n // Nearest neighbor interpolation for the source->target displacements.\n const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, outputStride, height, width);\n const displacement = getDisplacement(edgeId, sourceKeypointIndices, displacements);\n const displacedPoint = vectors.addVectors(sourceKeypoint.position, displacement);\n let targetKeypoint = displacedPoint;\n for (let i = 0; i < offsetRefineStep; i++) {\n const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const offsetPoint = vectors.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetKeypointId, offsets);\n targetKeypoint = vectors.addVectors({\n x: targetKeypointIndices.x * outputStride,\n y: targetKeypointIndices.y * outputStride,\n }, { x: offsetPoint.x, y: offsetPoint.y });\n }\n const targetKeyPointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);\n const score = scoresBuffer.get(targetKeyPointIndices.y, targetKeyPointIndices.x, targetKeypointId);\n return { position: targetKeypoint, part: keypoints.partNames[targetKeypointId], score };\n}\n/**\n * Follows the displacement fields to decode the full pose of the object\n * instance given the position of a part that acts as root.\n *\n * @return An array of decoded keypoints and their scores for a single pose\n */\nfunction decodePose(root, scores, offsets, outputStride, displacementsFwd, displacementsBwd) {\n const numParts = scores.shape[2];\n const numEdges = parentToChildEdges.length;\n const instanceKeypoints = new Array(numParts);\n // Start a new detection instance at the position of the root.\n const { part: rootPart, score: rootScore } = root;\n const rootPoint = vectors.getImageCoords(rootPart, outputStride, offsets);\n instanceKeypoints[rootPart.id] = {\n score: rootScore,\n part: keypoints.partNames[rootPart.id],\n position: rootPoint,\n };\n // Decode the part positions upwards in the tree, following the backward\n // displacements.\n for (let edge = numEdges - 1; edge >= 0; --edge) {\n const sourceKeypointId = parentToChildEdges[edge];\n const targetKeypointId = childToParentEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsBwd);\n }\n }\n // Decode the part positions downwards in the tree, following the forward\n // displacements.\n for (let edge = 0; edge < numEdges; ++edge) {\n const sourceKeypointId = childToParentEdges[edge];\n const targetKeypointId = parentToChildEdges[edge];\n if (instanceKeypoints[sourceKeypointId] && !instanceKeypoints[targetKeypointId]) {\n instanceKeypoints[targetKeypointId] = traverseToTargetKeypoint(edge, instanceKeypoints[sourceKeypointId], targetKeypointId, scores, offsets, outputStride, displacementsFwd);\n }\n }\n return instanceKeypoints;\n}\nexports.decodePose = decodePose;\n", "import * as buildParts from './buildParts';\nimport * as decodePose from './decodePose';\nimport * as vectors from './vectors';\n\nfunction withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) {\n return poses.some(({ keypoints }) => {\n const correspondingKeypoint = keypoints[keypointId].position;\n return vectors.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;\n });\n}\n/* Score the newly proposed object instance without taking into account\n * the scores of the parts that overlap with any previously detected\n * instance.\n */\nfunction getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {\n const notOverlappedKeypointScores = instanceKeypoints.reduce((result, { position, score }, keypointId) => {\n if (!withinNmsRadiusOfCorrespondingPoint(existingPoses, squaredNmsRadius, position, keypointId)) {\n result += score;\n }\n return result;\n }, 0.0);\n return notOverlappedKeypointScores / instanceKeypoints.length;\n}\n// A point (y, x) is considered as root part candidate if its score is a\n// maximum in a window |y - y'| <= kLocalMaximumRadius, |x - x'| <=\n// kLocalMaximumRadius.\nconst kLocalMaximumRadius = 1;\n/**\n * Detects multiple poses and finds their parts from part scores and\n * displacement vectors. It returns up to `maxDetections` object instance\n * detections in decreasing root score order. It works as follows: We first\n * create a priority queue with local part score maxima above\n * `scoreThreshold`, considering all parts at the same time. Then we\n * iteratively pull the top element of the queue (in decreasing score order)\n * and treat it as a root candidate for a new object instance. To avoid\n * duplicate detections, we reject the root candidate if it is within a disk\n * of `nmsRadius` pixels from the corresponding part of a previously detected\n * instance, which is a form of part-based non-maximum suppression (NMS). If\n * the root candidate passes the NMS check, we start a new object instance\n * detection, treating the corresponding part as root and finding the\n * positions of the remaining parts by following the displacement vectors\n * along the tree-structured part graph. We assign to the newly detected\n * instance a score equal to the sum of scores of its parts which have not\n * been claimed by a previous instance (i.e., those at least `nmsRadius`\n * pixels away from the corresponding part of all previously detected\n * instances), divided by the total number of parts `numParts`.\n *\n * @param heatmapScores 3-D tensor with shape `[height, width, numParts]`.\n * The value of heatmapScores[y, x, k]` is the score of placing the `k`-th\n * object part at position `(y, x)`.\n *\n * @param offsets 3-D tensor with shape `[height, width, numParts * 2]`.\n * The value of [offsets[y, x, k], offsets[y, x, k + numParts]]` is the\n * short range offset vector of the `k`-th object part at heatmap\n * position `(y, x)`.\n *\n * @param displacementsFwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the forward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param displacementsBwd 3-D tensor of shape\n * `[height, width, 2 * num_edges]`, where `num_edges = num_parts - 1` is the\n * number of edges (parent-child pairs) in the tree. It contains the backward\n * displacements between consecutive part from the root towards the leaves.\n *\n * @param outputStride The output stride that was used when feed-forwarding\n * through the PoseNet model. Must be 32, 16, or 8.\n *\n * @param maxPoseDetections Maximum number of returned instance detections per\n * image.\n *\n * @param scoreThreshold Only return instance detections that have root part\n * score greater or equal to this value. Defaults to 0.5.\n *\n * @param nmsRadius Non-maximum suppression part distance. It needs to be\n * strictly positive. Two parts suppress each other if they are less than\n * `nmsRadius` pixels away. Defaults to 20.\n *\n * @return An array of poses and their scores, each containing keypoints and\n * the corresponding keypoint scores.\n */\nfunction decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, maxPoseDetections, scoreThreshold = 0.5, nmsRadius = 20) {\n const poses = [];\n const queue = buildParts.buildPartWithScoreQueue(scoreThreshold, kLocalMaximumRadius, scoresBuffer);\n const squaredNmsRadius = nmsRadius * nmsRadius;\n // Generate at most maxDetections object instances per image in\n // decreasing root part score order.\n while (poses.length < maxPoseDetections && !queue.empty()) {\n // The top element in the queue is the next root candidate.\n const root = queue.dequeue();\n // Part-based non-maximum suppression: We reject a root candidate if it\n // is within a disk of `nmsRadius` pixels from the corresponding part of\n // a previously detected instance.\n const rootImageCoords = vectors.getImageCoords(root.part, outputStride, offsetsBuffer);\n if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;\n // Start a new detection instance at the position of the root.\n const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, outputStride, displacementsFwdBuffer, displacementsBwdBuffer);\n const score = getInstanceScore(poses, squaredNmsRadius, keypoints);\n poses.push({ keypoints, score });\n }\n return poses;\n}\nexports.decodeMultiplePoses = decodeMultiplePoses;\n", "import * as kpt from './keypoints';\n\nfunction eitherPointDoesntMeetConfidence(a, b, minConfidence) {\n return (a < minConfidence || b < minConfidence);\n}\n\nfunction getAdjacentKeyPoints(keypoints, minConfidence) {\n return kpt.connectedPartIndices.reduce((result, [leftJoint, rightJoint]) => {\n if (eitherPointDoesntMeetConfidence(keypoints[leftJoint].score, keypoints[rightJoint].score, minConfidence)) {\n return result;\n }\n result.push([keypoints[leftJoint], keypoints[rightJoint]]);\n return result;\n }, []);\n}\nexports.getAdjacentKeyPoints = getAdjacentKeyPoints;\n\nconst { NEGATIVE_INFINITY, POSITIVE_INFINITY } = Number;\nfunction getBoundingBox(keypoints) {\n return keypoints.reduce(({ maxX, maxY, minX, minY }, { position: { x, y } }) => ({\n maxX: Math.max(maxX, x),\n maxY: Math.max(maxY, y),\n minX: Math.min(minX, x),\n minY: Math.min(minY, y),\n }), {\n maxX: NEGATIVE_INFINITY,\n maxY: NEGATIVE_INFINITY,\n minX: POSITIVE_INFINITY,\n minY: POSITIVE_INFINITY,\n });\n}\nexports.getBoundingBox = getBoundingBox;\n\nfunction getBoundingBoxPoints(keypoints) {\n const { minX, minY, maxX, maxY } = getBoundingBox(keypoints);\n return [{ x: minX, y: minY }, { x: maxX, y: minY }, { x: maxX, y: maxY }, { x: minX, y: maxY }];\n}\nexports.getBoundingBoxPoints = getBoundingBoxPoints;\n\nasync function toTensorBuffers3D(tensors) {\n return Promise.all(tensors.map((tensor) => tensor.buffer()));\n}\nexports.toTensorBuffers3D = toTensorBuffers3D;\n\nfunction scalePose(pose, scaleY, scaleX) {\n return {\n score: pose.score,\n keypoints: pose.keypoints.map(({ score, part, position }) => ({\n score,\n part,\n position: { x: position.x * scaleX, y: position.y * scaleY },\n })),\n };\n}\nexports.scalePose = scalePose;\n\nfunction resizeTo(image, [targetH, targetW]) {\n const input = image.squeeze(0);\n const resized = input.resizeBilinear([targetH, targetW]);\n input.dispose();\n return resized;\n}\nexports.resizeTo = resizeTo;\n\nfunction scaleAndFlipPoses(poses, [height, width], [inputResolutionHeight, inputResolutionWidth]) {\n const scaledPoses = poses.map((pose) => scalePose(pose, height / inputResolutionHeight, width / inputResolutionWidth));\n return scaledPoses;\n}\nexports.scaleAndFlipPoses = scaleAndFlipPoses;\n", "import { loadGraphModel } from '../tf.js';\nimport * as modelMobileNet from './modelMobileNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as util from './util';\n\nclass PoseNet {\n constructor(net) {\n this.baseModel = net;\n this.outputStride = 16;\n }\n\n async estimatePoses(input, config) {\n return new Promise(async (resolve) => {\n const height = input.shape[1];\n const width = input.shape[2];\n const resized = util.resizeTo(input, [config.body.inputSize, config.body.inputSize]);\n const res = this.baseModel.predict(resized);\n const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);\n const scoresBuffer = allTensorBuffers[0];\n const offsetsBuffer = allTensorBuffers[1];\n const displacementsFwdBuffer = allTensorBuffers[2];\n const displacementsBwdBuffer = allTensorBuffers[3];\n const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, this.outputStride, config.body.maxDetections, config.body.scoreThreshold, config.body.nmsRadius);\n const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);\n res.heatmapScores.dispose();\n res.offsets.dispose();\n res.displacementFwd.dispose();\n res.displacementBwd.dispose();\n resized.dispose();\n resolve(resultPoses);\n });\n }\n\n dispose() {\n this.baseModel.dispose();\n }\n}\nexports.PoseNet = PoseNet;\n\nasync function load(config) {\n const graphModel = await loadGraphModel(config.body.modelPath);\n const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.body.modelPath.match(/\\/(.*)\\./)[1]}`);\n return new PoseNet(mobilenet);\n}\nexports.load = load;\n", "import * as modelMobileNet from './modelMobileNet';\nimport * as modelPoseNet from './modelPoseNet';\nimport * as decodeMultiple from './decodeMultiple';\nimport * as keypoints from './keypoints';\nimport * as util from './util';\n\nexports.load = modelPoseNet.load;\nexports.PoseNet = modelPoseNet.PoseNet;\n\nexports.MobileNet = modelMobileNet.MobileNet;\nexports.decodeMultiplePoses = decodeMultiple.decodeMultiplePoses;\nexports.partChannels = keypoints.partChannels;\nexports.partIds = keypoints.partIds;\nexports.partNames = keypoints.partNames;\nexports.poseChain = keypoints.poseChain;\nexports.getAdjacentKeyPoints = util.getAdjacentKeyPoints;\nexports.getBoundingBox = util.getBoundingBox;\nexports.getBoundingBoxPoints = util.getBoundingBoxPoints;\nexports.scaleAndFlipPoses = util.scaleAndFlipPoses;\nexports.scalePose = util.scalePose;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\n\nclass HandDetector {\n constructor(model, inputSize, anchorsAnnotated) {\n this.model = model;\n this.anchors = anchorsAnnotated.map((anchor) => [anchor.x_center, anchor.y_center]);\n this.anchorsTensor = tf.tensor2d(this.anchors);\n this.inputSizeTensor = tf.tensor1d([inputSize, inputSize]);\n this.doubleInputSizeTensor = tf.tensor1d([inputSize * 2, inputSize * 2]);\n }\n\n normalizeBoxes(boxes) {\n return tf.tidy(() => {\n const boxOffsets = tf.slice(boxes, [0, 0], [-1, 2]);\n const boxSizes = tf.slice(boxes, [0, 2], [-1, 2]);\n const boxCenterPoints = tf.add(tf.div(boxOffsets, this.inputSizeTensor), this.anchorsTensor);\n const halfBoxSizes = tf.div(boxSizes, this.doubleInputSizeTensor);\n const startPoints = tf.mul(tf.sub(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n const endPoints = tf.mul(tf.add(boxCenterPoints, halfBoxSizes), this.inputSizeTensor);\n return tf.concat2d([startPoints, endPoints], 1);\n });\n }\n\n normalizeLandmarks(rawPalmLandmarks, index) {\n return tf.tidy(() => {\n const landmarks = tf.add(tf.div(rawPalmLandmarks.reshape([-1, 7, 2]), this.inputSizeTensor), this.anchors[index]);\n return tf.mul(landmarks, this.inputSizeTensor);\n });\n }\n\n async getBoxes(input, config) {\n const batched = this.model.predict(input);\n const predictions = batched.squeeze();\n batched.dispose();\n const scores = tf.tidy(() => tf.sigmoid(tf.slice(predictions, [0, 0], [-1, 1])).squeeze());\n const scoresVal = scores.dataSync();\n const rawBoxes = tf.slice(predictions, [0, 1], [-1, 4]);\n const boxes = this.normalizeBoxes(rawBoxes);\n rawBoxes.dispose();\n const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);\n const filtered = filteredT.arraySync();\n\n scores.dispose();\n filteredT.dispose();\n const hands = [];\n for (const boxIndex of filtered) {\n if (scoresVal[boxIndex] >= config.minConfidence) {\n const matchingBox = tf.slice(boxes, [boxIndex, 0], [1, -1]);\n const rawPalmLandmarks = tf.slice(predictions, [boxIndex, 5], [1, 14]);\n const palmLandmarks = tf.tidy(() => this.normalizeLandmarks(rawPalmLandmarks, boxIndex).reshape([-1, 2]));\n rawPalmLandmarks.dispose();\n hands.push({ box: matchingBox, palmLandmarks, confidence: scoresVal[boxIndex] });\n }\n }\n predictions.dispose();\n boxes.dispose();\n return hands;\n }\n\n async estimateHandBounds(input, config) {\n const inputHeight = input.shape[1];\n const inputWidth = input.shape[2];\n const image = tf.tidy(() => input.resizeBilinear([config.inputSize, config.inputSize]).div(127.5).sub(1));\n const predictions = await this.getBoxes(image, config);\n image.dispose();\n if (!predictions || predictions.length === 0) return null;\n const hands = [];\n for (const prediction of predictions) {\n const boxes = prediction.box.dataSync();\n const startPoint = boxes.slice(0, 2);\n const endPoint = boxes.slice(2, 4);\n const palmLandmarks = prediction.palmLandmarks.arraySync();\n prediction.box.dispose();\n prediction.palmLandmarks.dispose();\n hands.push(box.scaleBoxCoordinates({ startPoint, endPoint, palmLandmarks, confidence: prediction.confidence }, [inputWidth / config.inputSize, inputHeight / config.inputSize]));\n }\n return hands;\n }\n}\nexports.HandDetector = HandDetector;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { tf } from '../tf.js';\nimport * as box from './box';\nimport * as util from './util';\n\nconst PALM_BOX_SHIFT_VECTOR = [0, -0.4];\nconst PALM_BOX_ENLARGE_FACTOR = 3;\nconst HAND_BOX_SHIFT_VECTOR = [0, -0.1]; // move detected hand box by x,y to ease landmark detection\nconst HAND_BOX_ENLARGE_FACTOR = 1.65; // increased from model default 1.65;\nconst PALM_LANDMARK_IDS = [0, 5, 9, 13, 17, 1, 2];\nconst PALM_LANDMARKS_INDEX_OF_PALM_BASE = 0;\nconst PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE = 2;\n\nclass HandPipeline {\n constructor(boundingBoxDetector, meshDetector, inputSize) {\n this.boxDetector = boundingBoxDetector;\n this.meshDetector = meshDetector;\n this.inputSize = inputSize;\n this.storedBoxes = [];\n this.skipped = 1000;\n this.detectedHands = 0;\n }\n\n getBoxForPalmLandmarks(palmLandmarks, rotationMatrix) {\n const rotatedPalmLandmarks = palmLandmarks.map((coord) => {\n const homogeneousCoordinate = [...coord, 1];\n return util.rotatePoint(homogeneousCoordinate, rotationMatrix);\n });\n const boxAroundPalm = this.calculateLandmarksBoundingBox(rotatedPalmLandmarks);\n return box.enlargeBox(box.squarifyBox(box.shiftBox(boxAroundPalm, PALM_BOX_SHIFT_VECTOR)), PALM_BOX_ENLARGE_FACTOR);\n }\n\n getBoxForHandLandmarks(landmarks) {\n const boundingBox = this.calculateLandmarksBoundingBox(landmarks);\n const boxAroundHand = box.enlargeBox(box.squarifyBox(box.shiftBox(boundingBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const palmLandmarks = [];\n for (let i = 0; i < PALM_LANDMARK_IDS.length; i++) {\n palmLandmarks.push(landmarks[PALM_LANDMARK_IDS[i]].slice(0, 2));\n }\n boxAroundHand.palmLandmarks = palmLandmarks;\n return boxAroundHand;\n }\n\n transformRawCoords(rawCoords, box2, angle, rotationMatrix) {\n const boxSize = box.getBoxSize(box2);\n const scaleFactor = [boxSize[0] / this.inputSize, boxSize[1] / this.inputSize];\n const coordsScaled = rawCoords.map((coord) => [\n scaleFactor[0] * (coord[0] - this.inputSize / 2),\n scaleFactor[1] * (coord[1] - this.inputSize / 2),\n coord[2],\n ]);\n const coordsRotationMatrix = util.buildRotationMatrix(angle, [0, 0]);\n const coordsRotated = coordsScaled.map((coord) => {\n const rotated = util.rotatePoint(coord, coordsRotationMatrix);\n return [...rotated, coord[2]];\n });\n const inverseRotationMatrix = util.invertTransformMatrix(rotationMatrix);\n const boxCenter = [...box.getBoxCenter(box2), 1];\n const originalBoxCenter = [\n util.dot(boxCenter, inverseRotationMatrix[0]),\n util.dot(boxCenter, inverseRotationMatrix[1]),\n ];\n return coordsRotated.map((coord) => [\n coord[0] + originalBoxCenter[0],\n coord[1] + originalBoxCenter[1],\n coord[2],\n ]);\n }\n\n async estimateHands(image, config) {\n this.skipped++;\n let useFreshBox = false;\n\n // run new detector every skipFrames unless we only want box to start with\n let boxes;\n if ((this.skipped > config.skipFrames) || !config.landmarks) {\n boxes = await this.boxDetector.estimateHandBounds(image, config);\n // don't reset on test image\n if ((image.shape[1] !== 255) && (image.shape[2] !== 255)) this.skipped = 0;\n }\n\n // if detector result count doesn't match current working set, use it to reset current working set\n if (boxes && (boxes.length > 0) && ((boxes.length !== this.detectedHands) && (this.detectedHands !== config.maxHands) || !config.landmarks)) {\n this.storedBoxes = [];\n this.detectedHands = 0;\n for (const possible of boxes) this.storedBoxes.push(possible);\n if (this.storedBoxes.length > 0) useFreshBox = true;\n }\n const hands = [];\n // console.log(`skipped: ${this.skipped} max: ${config.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);\n\n // go through working set of boxes\n for (const i in this.storedBoxes) {\n const currentBox = this.storedBoxes[i];\n if (!currentBox) continue;\n if (config.landmarks) {\n const angle = util.computeRotation(currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_PALM_BASE], currentBox.palmLandmarks[PALM_LANDMARKS_INDEX_OF_MIDDLE_FINGER_BASE]);\n const palmCenter = box.getBoxCenter(currentBox);\n const palmCenterNormalized = [palmCenter[0] / image.shape[2], palmCenter[1] / image.shape[1]];\n const rotatedImage = tf.image.rotateWithOffset(image, angle, 0, palmCenterNormalized);\n const rotationMatrix = util.buildRotationMatrix(-angle, palmCenter);\n const newBox = useFreshBox ? this.getBoxForPalmLandmarks(currentBox.palmLandmarks, rotationMatrix) : currentBox;\n const croppedInput = box.cutBoxFromImageAndResize(newBox, rotatedImage, [this.inputSize, this.inputSize]);\n const handImage = croppedInput.div(255);\n croppedInput.dispose();\n rotatedImage.dispose();\n const [confidence, keypoints] = await this.meshDetector.predict(handImage);\n handImage.dispose();\n const confidenceValue = confidence.dataSync()[0];\n confidence.dispose();\n if (confidenceValue >= config.minConfidence) {\n const keypointsReshaped = tf.reshape(keypoints, [-1, 3]);\n const rawCoords = keypointsReshaped.arraySync();\n keypoints.dispose();\n keypointsReshaped.dispose();\n const coords = this.transformRawCoords(rawCoords, newBox, angle, rotationMatrix);\n const nextBoundingBox = this.getBoxForHandLandmarks(coords);\n this.storedBoxes[i] = nextBoundingBox;\n const result = {\n landmarks: coords,\n confidence: confidenceValue,\n box: {\n topLeft: nextBoundingBox.startPoint,\n bottomRight: nextBoundingBox.endPoint,\n },\n };\n hands.push(result);\n } else {\n this.storedBoxes[i] = null;\n }\n keypoints.dispose();\n } else {\n const enlarged = box.enlargeBox(box.squarifyBox(box.shiftBox(currentBox, HAND_BOX_SHIFT_VECTOR)), HAND_BOX_ENLARGE_FACTOR);\n const result = {\n confidence: currentBox.confidence,\n box: {\n topLeft: enlarged.startPoint,\n bottomRight: enlarged.endPoint,\n },\n };\n hands.push(result);\n }\n }\n this.storedBoxes = this.storedBoxes.filter((a) => a !== null);\n this.detectedHands = hands.length;\n return hands;\n }\n\n // eslint-disable-next-line class-methods-use-this\n calculateLandmarksBoundingBox(landmarks) {\n const xs = landmarks.map((d) => d[0]);\n const ys = landmarks.map((d) => d[1]);\n const startPoint = [Math.min(...xs), Math.min(...ys)];\n const endPoint = [Math.max(...xs), Math.max(...ys)];\n return { startPoint, endPoint };\n }\n}\n\nexports.HandPipeline = HandPipeline;\n", "exports.anchors = [\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.015625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.046875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.078125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.109375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.140625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.171875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.203125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.234375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.265625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.296875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.328125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.359375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.390625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.421875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.453125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.484375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.515625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.546875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.578125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.609375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.640625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.671875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.703125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.734375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.765625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.796875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.828125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.859375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.890625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.921875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.953125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.015625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.046875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.078125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.109375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.140625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.171875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.203125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.234375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.265625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.296875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.328125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.359375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.390625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.421875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.453125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.484375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.515625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.546875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.578125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.609375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.640625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.671875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.703125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.734375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.765625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.796875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.828125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.859375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.890625,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.921875,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.953125,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.984375,\n y_center: 0.984375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.03125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.09375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.15625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.21875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.28125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.34375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.40625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.46875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.53125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.59375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.65625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.71875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.78125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.84375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.90625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.03125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.09375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.15625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.21875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.28125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.34375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.40625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.46875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.53125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.59375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.65625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.71875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.78125,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.84375,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.90625,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.96875,\n y_center: 0.96875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.0625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.1875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.3125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.4375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.5625,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.6875,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.8125,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.0625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.1875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.3125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.4375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.5625,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.6875,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.8125,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n {\n w: 1,\n h: 1,\n x_center: 0.9375,\n y_center: 0.9375,\n },\n];\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html\n\nimport { loadGraphModel } from '../tf.js';\nimport * as handdetector from './handdetector';\nimport * as pipeline from './handpipeline';\nimport * as anchors from './anchors';\n\nconst MESH_ANNOTATIONS = {\n thumb: [1, 2, 3, 4],\n indexFinger: [5, 6, 7, 8],\n middleFinger: [9, 10, 11, 12],\n ringFinger: [13, 14, 15, 16],\n pinky: [17, 18, 19, 20],\n palmBase: [0],\n};\n\nclass HandPose {\n constructor(pipe) {\n this.pipeline = pipe;\n }\n\n static getAnnotations() {\n return MESH_ANNOTATIONS;\n }\n\n async estimateHands(input, config) {\n const predictions = await this.pipeline.estimateHands(input, config);\n if (!predictions) return [];\n const hands = [];\n for (const prediction of predictions) {\n const annotations = {};\n if (prediction.landmarks) {\n for (const key of Object.keys(MESH_ANNOTATIONS)) {\n annotations[key] = MESH_ANNOTATIONS[key].map((index) => prediction.landmarks[index]);\n }\n }\n hands.push({\n confidence: prediction.confidence,\n box: prediction.box ? [\n prediction.box.topLeft[0],\n prediction.box.topLeft[1],\n prediction.box.bottomRight[0] - prediction.box.topLeft[0],\n prediction.box.bottomRight[1] - prediction.box.topLeft[1],\n ] : 0,\n landmarks: prediction.landmarks,\n annotations,\n });\n }\n return hands;\n }\n}\nexports.HandPose = HandPose;\n\nasync function load(config) {\n const [handDetectorModel, handPoseModel] = await Promise.all([\n loadGraphModel(config.detector.modelPath, { fromTFHub: config.detector.modelPath.includes('tfhub.dev') }),\n loadGraphModel(config.skeleton.modelPath, { fromTFHub: config.skeleton.modelPath.includes('tfhub.dev') }),\n ]);\n const detector = new handdetector.HandDetector(handDetectorModel, config.inputSize, anchors.anchors);\n const pipe = new pipeline.HandPipeline(detector, handPoseModel, config.inputSize);\n const handpose = new HandPose(pipe);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.detector.modelPath.match(/\\/(.*)\\./)[1]}`);\n // eslint-disable-next-line no-console\n console.log(`Human: load model: ${config.skeleton.modelPath.match(/\\/(.*)\\./)[1]}`);\n return handpose;\n}\nexports.load = load;\n", "exports.body = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const pose of res) {\n // raising hands\n const leftWrist = pose.keypoints.find((a) => (a.part === 'leftWrist'));\n const rightWrist = pose.keypoints.find((a) => (a.part === 'rightWrist'));\n const nose = pose.keypoints.find((a) => (a.part === 'nose'));\n if (nose && leftWrist && rightWrist && (leftWrist.position.y < nose.position.y) && (rightWrist.position.y < nose.position.y)) gestures.push('i give up');\n else if (nose && leftWrist && (leftWrist.position.y < nose.position.y)) gestures.push('raise left hand');\n else if (nose && rightWrist && (rightWrist.position.y < nose.position.y)) gestures.push('raise right hand');\n\n // leaning\n const leftShoulder = pose.keypoints.find((a) => (a.part === 'leftShoulder'));\n const rightShoulder = pose.keypoints.find((a) => (a.part === 'rightShoulder'));\n if (leftShoulder && rightShoulder) gestures.push(`leaning ${(leftShoulder.position.y > rightShoulder.position.y) ? 'left' : 'right'}`);\n }\n return gestures;\n};\n\nexports.face = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const face of res) {\n // if (face.annotations['rightCheek'] && face.annotations['leftCheek'] && (face.annotations['rightCheek'].length > 0) && (face.annotations['leftCheek'].length > 0)) {\n // gestures.push(`facing ${((face.annotations['rightCheek'][0][2] > 0) || (face.annotations['leftCheek'][0][2] < 0)) ? 'right' : 'left'}`);\n // }\n if (face.mesh && face.mesh.length > 0) {\n const eyeFacing = face.mesh[35][2] - face.mesh[263][2];\n if (Math.abs(eyeFacing) < 10) gestures.push('facing camera');\n else gestures.push(`facing ${eyeFacing < 0 ? 'right' : 'left'}`);\n const openLeft = Math.abs(face.mesh[374][1] - face.mesh[386][1]) / Math.abs(face.mesh[443][1] - face.mesh[450][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openLeft < 0.2) gestures.push('blink left eye');\n const openRight = Math.abs(face.mesh[145][1] - face.mesh[159][1]) / Math.abs(face.mesh[223][1] - face.mesh[230][1]); // center of eye inner lid y coord div center of wider eye border y coord\n if (openRight < 0.2) gestures.push('blink right eye');\n const mouthOpen = Math.min(100, 500 * Math.abs(face.mesh[13][1] - face.mesh[14][1]) / Math.abs(face.mesh[10][1] - face.mesh[152][1]));\n if (mouthOpen > 10) gestures.push(`mouth ${Math.trunc(mouthOpen)}% open`);\n const chinDepth = face.mesh[152][2];\n if (Math.abs(chinDepth) > 10) gestures.push(`head ${chinDepth < 0 ? 'up' : 'down'}`);\n }\n }\n return gestures;\n};\n\nexports.hand = (res) => {\n if (!res) return [];\n const gestures = [];\n for (const hand of res) {\n const fingers = [];\n for (const [finger, pos] of Object.entries(hand['annotations'])) {\n if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger\n }\n if (fingers && fingers.length > 0) {\n const closest = fingers.reduce((best, a) => (best.position[2] < a.position[2] ? best : a));\n const highest = fingers.reduce((best, a) => (best.position[1] < a.position[1] ? best : a));\n gestures.push(`${closest.name} forward ${highest.name} up`);\n }\n }\n return gestures;\n};\n", "/* eslint-disable no-use-before-define */\n/*\nWebGLImageFilter - MIT Licensed\n2013, Dominic Szablewski - phoboslab.org\n\n*/\n\nconst WebGLProgram = function (gl, vertexSource, fragmentSource) {\n const _collect = function (source, prefix, collection) {\n const r = new RegExp('\\\\b' + prefix + ' \\\\w+ (\\\\w+)', 'ig');\n source.replace(r, (match, name) => {\n collection[name] = 0;\n return match;\n });\n };\n\n const _compile = function (source, type) {\n const shader = gl.createShader(type);\n gl.shaderSource(shader, source);\n gl.compileShader(shader);\n\n if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {\n throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));\n }\n return shader;\n };\n\n this.uniform = {};\n this.attribute = {};\n\n const _vsh = _compile(vertexSource, gl.VERTEX_SHADER);\n const _fsh = _compile(fragmentSource, gl.FRAGMENT_SHADER);\n\n this.id = gl.createProgram();\n gl.attachShader(this.id, _vsh);\n gl.attachShader(this.id, _fsh);\n gl.linkProgram(this.id);\n\n if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {\n throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));\n }\n\n gl.useProgram(this.id);\n\n // Collect attributes\n _collect(vertexSource, 'attribute', this.attribute);\n for (const a in this.attribute) {\n this.attribute[a] = gl.getAttribLocation(this.id, a);\n }\n\n // Collect uniforms\n _collect(vertexSource, 'uniform', this.uniform);\n _collect(fragmentSource, 'uniform', this.uniform);\n for (const u in this.uniform) {\n this.uniform[u] = gl.getUniformLocation(this.id, u);\n }\n};\n\nconst WebGLImageFilter = function (params) {\n if (!params) params = { };\n let _drawCount = 0;\n let _sourceTexture = null;\n let _lastInChain = false;\n let _currentFramebufferIndex = -1;\n let _tempFramebuffers = [null, null];\n let _filterChain = [];\n let _width = -1;\n let _height = -1;\n let _vertexBuffer = null;\n let _currentProgram = null;\n const _canvas = params.canvas || document.createElement('canvas');\n\n // key is the shader program source, value is the compiled program\n const _shaderProgramCache = { };\n\n const gl = _canvas.getContext('webgl');\n if (!gl) throw new Error('Filter: getContext() failed');\n\n this.addFilter = function (name) {\n // eslint-disable-next-line prefer-rest-params\n const args = Array.prototype.slice.call(arguments, 1);\n const filter = _filter[name];\n\n _filterChain.push({ func: filter, args });\n };\n\n this.reset = function () {\n _filterChain = [];\n };\n\n this.apply = function (image) {\n _resize(image.width, image.height);\n _drawCount = 0;\n\n // Create the texture for the input image if we haven't yet\n if (!_sourceTexture) _sourceTexture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, _sourceTexture);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);\n\n // No filters? Just draw\n if (_filterChain.length === 0) {\n // const program = _compileShader(SHADER.FRAGMENT_IDENTITY);\n _draw();\n return _canvas;\n }\n\n for (let i = 0; i < _filterChain.length; i++) {\n _lastInChain = (i === _filterChain.length - 1);\n const f = _filterChain[i];\n f.func.apply(this, f.args || []);\n }\n\n return _canvas;\n };\n\n const _resize = function (width, height) {\n // Same width/height? Nothing to do here\n if (width === _width && height === _height) { return; }\n\n _canvas.width = width;\n _width = width;\n _canvas.height = height;\n _height = height;\n\n // Create the context if we don't have it yet\n if (!_vertexBuffer) {\n // Create the vertex buffer for the two triangles [x, y, u, v] * 6\n const vertices = new Float32Array([\n -1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0,\n -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0,\n ]);\n // eslint-disable-next-line no-unused-expressions\n (_vertexBuffer = gl.createBuffer(), gl.bindBuffer(gl.ARRAY_BUFFER, _vertexBuffer));\n gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);\n\n // Note sure if this is a good idea; at least it makes texture loading\n // in Ejecta instant.\n gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);\n }\n\n gl.viewport(0, 0, _width, _height);\n\n // Delete old temp framebuffers\n _tempFramebuffers = [null, null];\n };\n\n const _getTempFramebuffer = function (index) {\n _tempFramebuffers[index] = _tempFramebuffers[index]\n || _createFramebufferTexture(_width, _height);\n\n return _tempFramebuffers[index];\n };\n\n const _createFramebufferTexture = function (width, height) {\n const fbo = gl.createFramebuffer();\n gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);\n\n const renderbuffer = gl.createRenderbuffer();\n gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);\n\n const texture = gl.createTexture();\n gl.bindTexture(gl.TEXTURE_2D, texture);\n gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);\n\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n\n gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n\n return { fbo, texture };\n };\n\n const _draw = function (flags) {\n let source = null;\n let target = null;\n let flipY = false;\n\n // Set up the source\n if (_drawCount === 0) {\n // First draw call - use the source texture\n source = _sourceTexture;\n } else {\n // All following draw calls use the temp buffer last drawn to\n source = _getTempFramebuffer(_currentFramebufferIndex).texture;\n }\n _drawCount++;\n\n // Set up the target\n if (_lastInChain && !(flags & DRAW.INTERMEDIATE)) {\n // Last filter in our chain - draw directly to the WebGL Canvas. We may\n // also have to flip the image vertically now\n target = null;\n flipY = _drawCount % 2 === 0;\n } else {\n // Intermediate draw call - get a temp buffer to draw to\n _currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;\n target = _getTempFramebuffer(_currentFramebufferIndex).fbo;\n }\n\n // Bind the source and target and draw the two triangles\n gl.bindTexture(gl.TEXTURE_2D, source);\n gl.bindFramebuffer(gl.FRAMEBUFFER, target);\n\n gl.uniform1f(_currentProgram.uniform.flipY, (flipY ? -1 : 1));\n gl.drawArrays(gl.TRIANGLES, 0, 6);\n };\n\n const _compileShader = function (fragmentSource) {\n if (_shaderProgramCache[fragmentSource]) {\n _currentProgram = _shaderProgramCache[fragmentSource];\n gl.useProgram(_currentProgram.id);\n return _currentProgram;\n }\n\n // Compile shaders\n _currentProgram = new WebGLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);\n\n const floatSize = Float32Array.BYTES_PER_ELEMENT;\n const vertSize = 4 * floatSize;\n gl.enableVertexAttribArray(_currentProgram.attribute.pos);\n gl.vertexAttribPointer(_currentProgram.attribute.pos, 2, gl.FLOAT, false, vertSize, 0 * floatSize);\n gl.enableVertexAttribArray(_currentProgram.attribute.uv);\n gl.vertexAttribPointer(_currentProgram.attribute.uv, 2, gl.FLOAT, false, vertSize, 2 * floatSize);\n\n _shaderProgramCache[fragmentSource] = _currentProgram;\n return _currentProgram;\n };\n\n let DRAW = { INTERMEDIATE: 1 };\n\n let SHADER = {};\n SHADER.VERTEX_IDENTITY = [\n 'precision highp float;',\n 'attribute vec2 pos;',\n 'attribute vec2 uv;',\n 'varying vec2 vUv;',\n 'uniform float flipY;',\n\n 'void main(void) {',\n 'vUv = uv;',\n 'gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.);',\n '}',\n ].join('\\n');\n\n SHADER.FRAGMENT_IDENTITY = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n\n 'void main(void) {',\n 'gl_FragColor = texture2D(texture, vUv);',\n '}',\n ].join('\\n');\n\n let _filter = {};\n\n // -------------------------------------------------------------------------\n // Color Matrix Filter\n\n _filter.colorMatrix = function (matrix) {\n // Create a Float32 Array and normalize the offset component to 0-1\n const m = new Float32Array(matrix);\n m[4] /= 255;\n m[9] /= 255;\n m[14] /= 255;\n m[19] /= 255;\n\n // Can we ignore the alpha value? Makes things a bit faster.\n const shader = (m[18] === 1 && m[3] === 0 && m[8] === 0 && m[13] === 0 && m[15] === 0 && m[16] === 0 && m[17] === 0 && m[19] === 0)\n ? _filter.colorMatrix.SHADER.WITHOUT_ALPHA\n : _filter.colorMatrix.SHADER.WITH_ALPHA;\n\n const program = _compileShader(shader);\n gl.uniform1fv(program.uniform.m, m);\n _draw();\n };\n\n _filter.colorMatrix.SHADER = {};\n _filter.colorMatrix.SHADER.WITH_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14];',\n 'gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19];',\n '}',\n ].join('\\n');\n _filter.colorMatrix.SHADER.WITHOUT_ALPHA = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform float m[20];',\n\n 'void main(void) {',\n 'vec4 c = texture2D(texture, vUv);',\n 'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4];',\n 'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9];',\n 'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14];',\n 'gl_FragColor.a = c.a;',\n '}',\n ].join('\\n');\n\n _filter.brightness = function (brightness) {\n const b = (brightness || 0) + 1;\n _filter.colorMatrix([\n b, 0, 0, 0, 0,\n 0, b, 0, 0, 0,\n 0, 0, b, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.saturation = function (amount) {\n const x = (amount || 0) * 2 / 3 + 1;\n const y = ((x - 1) * -0.5);\n _filter.colorMatrix([\n x, y, y, 0, 0,\n y, x, y, 0, 0,\n y, y, x, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturate = function () {\n _filter.saturation(-1);\n };\n\n _filter.contrast = function (amount) {\n const v = (amount || 0) + 1;\n const o = -128 * (v - 1);\n\n _filter.colorMatrix([\n v, 0, 0, 0, o,\n 0, v, 0, 0, o,\n 0, 0, v, 0, o,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.negative = function () {\n _filter.contrast(-2);\n };\n\n _filter.hue = function (rotation) {\n rotation = (rotation || 0) / 180 * Math.PI;\n const cos = Math.cos(rotation);\n const sin = Math.sin(rotation);\n const lumR = 0.213;\n const lumG = 0.715;\n const lumB = 0.072;\n\n _filter.colorMatrix([\n lumR + cos * (1 - lumR) + sin * (-lumR), lumG + cos * (-lumG) + sin * (-lumG), lumB + cos * (-lumB) + sin * (1 - lumB), 0, 0,\n lumR + cos * (-lumR) + sin * (0.143), lumG + cos * (1 - lumG) + sin * (0.140), lumB + cos * (-lumB) + sin * (-0.283), 0, 0,\n lumR + cos * (-lumR) + sin * (-(1 - lumR)), lumG + cos * (-lumG) + sin * (lumG), lumB + cos * (1 - lumB) + sin * (lumB), 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.desaturateLuminance = function () {\n _filter.colorMatrix([\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0.2764723, 0.9297080, 0.0938197, 0, -37.1,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.sepia = function () {\n _filter.colorMatrix([\n 0.393, 0.7689999, 0.18899999, 0, 0,\n 0.349, 0.6859999, 0.16799999, 0, 0,\n 0.272, 0.5339999, 0.13099999, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.brownie = function () {\n _filter.colorMatrix([\n 0.5997023498159715, 0.34553243048391263, -0.2708298674538042, 0, 47.43192855600873,\n -0.037703249837783157, 0.8609577587992641, 0.15059552388459913, 0, -36.96841498319127,\n 0.24113635128153335, -0.07441037908422492, 0.44972182064877153, 0, -7.562075277591283,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.vintagePinhole = function () {\n _filter.colorMatrix([\n 0.6279345635605994, 0.3202183420819367, -0.03965408211312453, 0, 9.651285835294123,\n 0.02578397704808868, 0.6441188644374771, 0.03259127616149294, 0, 7.462829176470591,\n 0.0466055556782719, -0.0851232987247891, 0.5241648018700465, 0, 5.159190588235296,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.kodachrome = function () {\n _filter.colorMatrix([\n 1.1285582396593525, -0.3967382283601348, -0.03992559172921793, 0, 63.72958762196502,\n -0.16404339962244616, 1.0835251566291304, -0.05498805115633132, 0, 24.732407896706203,\n -0.16786010706155763, -0.5603416277695248, 1.6014850761964943, 0, 35.62982807460946,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.technicolor = function () {\n _filter.colorMatrix([\n 1.9125277891456083, -0.8545344976951645, -0.09155508482755585, 0, 11.793603434377337,\n -0.3087833385928097, 1.7658908555458428, -0.10601743074722245, 0, -70.35205161461398,\n -0.231103377548616, -0.7501899197440212, 1.847597816108189, 0, 30.950940869491138,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.polaroid = function () {\n _filter.colorMatrix([\n 1.438, -0.062, -0.062, 0, 0,\n -0.122, 1.378, -0.122, 0, 0,\n -0.016, -0.016, 1.483, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n _filter.shiftToBGR = function () {\n _filter.colorMatrix([\n 0, 0, 1, 0, 0,\n 0, 1, 0, 0, 0,\n 1, 0, 0, 0, 0,\n 0, 0, 0, 1, 0,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Convolution Filter\n\n _filter.convolution = function (matrix) {\n const m = new Float32Array(matrix);\n const pixelSizeX = 1 / _width;\n const pixelSizeY = 1 / _height;\n\n const program = _compileShader(_filter.convolution.SHADER);\n gl.uniform1fv(program.uniform.m, m);\n gl.uniform2f(program.uniform.px, pixelSizeX, pixelSizeY);\n _draw();\n };\n\n _filter.convolution.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n 'uniform float m[9];',\n\n 'void main(void) {',\n 'vec4 c11 = texture2D(texture, vUv - px);', // top left\n 'vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y));', // top center\n 'vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y));', // top right\n\n 'vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) );', // mid left\n 'vec4 c22 = texture2D(texture, vUv);', // mid center\n 'vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) );', // mid right\n\n 'vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) );', // bottom left\n 'vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) );', // bottom center\n 'vec4 c33 = texture2D(texture, vUv + px );', // bottom right\n\n 'gl_FragColor = ',\n 'c11 * m[0] + c12 * m[1] + c22 * m[2] +',\n 'c21 * m[3] + c22 * m[4] + c23 * m[5] +',\n 'c31 * m[6] + c32 * m[7] + c33 * m[8];',\n 'gl_FragColor.a = c22.a;',\n '}',\n ].join('\\n');\n\n _filter.detectEdges = function () {\n _filter.convolution.call(this, [\n 0, 1, 0,\n 1, -4, 1,\n 0, 1, 0,\n ]);\n };\n\n _filter.sobelX = function () {\n _filter.convolution.call(this, [\n -1, 0, 1,\n -2, 0, 2,\n -1, 0, 1,\n ]);\n };\n\n _filter.sobelY = function () {\n _filter.convolution.call(this, [\n -1, -2, -1,\n 0, 0, 0,\n 1, 2, 1,\n ]);\n };\n\n _filter.sharpen = function (amount) {\n const a = amount || 1;\n _filter.convolution.call(this, [\n 0, -1 * a, 0,\n -1 * a, 1 + 4 * a, -1 * a,\n 0, -1 * a, 0,\n ]);\n };\n\n _filter.emboss = function (size) {\n const s = size || 1;\n _filter.convolution.call(this, [\n -2 * s, -1 * s, 0,\n -1 * s, 1, 1 * s,\n 0, 1 * s, 2 * s,\n ]);\n };\n\n // -------------------------------------------------------------------------\n // Blur Filter\n\n _filter.blur = function (size) {\n const blurSizeX = (size / 7) / _width;\n const blurSizeY = (size / 7) / _height;\n\n const program = _compileShader(_filter.blur.SHADER);\n\n // Vertical\n gl.uniform2f(program.uniform.px, 0, blurSizeY);\n _draw(DRAW.INTERMEDIATE);\n\n // Horizontal\n gl.uniform2f(program.uniform.px, blurSizeX, 0);\n _draw();\n };\n\n _filter.blur.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform sampler2D texture;',\n 'uniform vec2 px;',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv )*0.159576912161;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794;',\n 'gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265;',\n '}',\n ].join('\\n');\n\n // -------------------------------------------------------------------------\n // Pixelate Filter\n\n _filter.pixelate = function (size) {\n const blurSizeX = (size) / _width;\n const blurSizeY = (size) / _height;\n\n const program = _compileShader(_filter.pixelate.SHADER);\n\n // Horizontal\n gl.uniform2f(program.uniform.size, blurSizeX, blurSizeY);\n _draw();\n };\n\n _filter.pixelate.SHADER = [\n 'precision highp float;',\n 'varying vec2 vUv;',\n 'uniform vec2 size;',\n 'uniform sampler2D texture;',\n\n 'vec2 pixelate(vec2 coord, vec2 size) {',\n 'return floor( coord / size ) * size;',\n '}',\n\n 'void main(void) {',\n 'gl_FragColor = vec4(0.0);',\n 'vec2 coord = pixelate(vUv, size);',\n 'gl_FragColor += texture2D(texture, coord);',\n '}',\n ].join('\\n');\n};\n\nexports.Canvas = WebGLImageFilter;\n", "import { tf } from './tf.js';\nimport * as fxImage from './imagefx.js';\n\n// internal temp canvases\nlet inCanvas = null;\nlet outCanvas = null;\n\n// process input image and return tensor\n// input can be tensor, imagedata, htmlimageelement, htmlvideoelement\n// input is resized and run through imagefx filter\nfunction process(input, config) {\n let tensor;\n if (input instanceof tf.Tensor) {\n tensor = tf.clone(input);\n } else {\n const originalWidth = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));\n const originalHeight = input.naturalHeight || input.videoHeight || input.height || (input.shape && (input.shape[2] > 0));\n let targetWidth = originalWidth;\n let targetHeight = originalHeight;\n if (config.filter.width > 0) targetWidth = config.filter.width;\n else if (config.filter.height > 0) targetWidth = originalWidth * (config.filter.height / originalHeight);\n if (config.filter.height > 0) targetHeight = config.filter.height;\n else if (config.filter.width > 0) targetHeight = originalHeight * (config.filter.width / originalWidth);\n if (!inCanvas || (inCanvas.width !== targetWidth) || (inCanvas.height !== targetHeight)) {\n inCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n if (inCanvas.width !== targetWidth) inCanvas.width = targetWidth;\n if (inCanvas.height !== targetHeight) inCanvas.height = targetHeight;\n }\n const ctx = inCanvas.getContext('2d');\n if (input instanceof ImageData) ctx.putImageData(input, 0, 0);\n else ctx.drawImage(input, 0, 0, originalWidth, originalHeight, 0, 0, inCanvas.width, inCanvas.height);\n if (config.filter.enabled) {\n if (!this.fx || !outCanvas || (inCanvas.width !== outCanvas.width) || (inCanvas.height !== outCanvas.height)) {\n outCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(inCanvas.width, inCanvas.height) : document.createElement('canvas');\n if (outCanvas.width !== inCanvas.width) outCanvas.width = inCanvas.width;\n if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;\n this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')\n }\n this.fx.reset();\n this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled\n if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast);\n if (config.filter.sharpness !== 0) this.fx.addFilter('sharpen', config.filter.sharpness);\n if (config.filter.blur !== 0) this.fx.addFilter('blur', config.filter.blur);\n if (config.filter.saturation !== 0) this.fx.addFilter('saturation', config.filter.saturation);\n if (config.filter.hue !== 0) this.fx.addFilter('hue', config.filter.hue);\n if (config.filter.negative) this.fx.addFilter('negative');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.vintage) this.fx.addFilter('brownie');\n if (config.filter.sepia) this.fx.addFilter('sepia');\n if (config.filter.kodachrome) this.fx.addFilter('kodachrome');\n if (config.filter.technicolor) this.fx.addFilter('technicolor');\n if (config.filter.polaroid) this.fx.addFilter('polaroid');\n if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);\n this.fx.apply(inCanvas);\n // read pixel data\n // const gl = outCanvas.getContext('webgl');\n const gl = false;\n if (gl) {\n const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);\n const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);\n gl.readPixels(0, 0, outCanvas.width, outCanvas.height, gl.RGBA, gl.UNSIGNED_BYTE, glBuffer);\n // gl returns rbga while we only need rgb, so discarding alpha channel\n // gl returns starting point as lower left, so need to invert vertical\n let i = 0;\n for (let y = outCanvas.height - 1; y >= 0; y--) {\n for (let x = 0; x < outCanvas.width; x++) {\n const index = (x + y * outCanvas.width) * 4;\n pixBuffer[i++] = glBuffer[index + 0];\n pixBuffer[i++] = glBuffer[index + 1];\n pixBuffer[i++] = glBuffer[index + 2];\n }\n }\n outCanvas.data = pixBuffer;\n }\n } else {\n outCanvas = inCanvas;\n }\n let pixels;\n if (outCanvas.data) {\n const shape = [outCanvas.height, outCanvas.width, 3];\n pixels = tf.tensor3d(outCanvas.data, shape, 'int32');\n } else if ((config.backend === 'webgl') || (outCanvas instanceof ImageData)) {\n // tf kernel-optimized method to get imagedata, also if input is imagedata, just use it\n pixels = tf.browser.fromPixels(outCanvas);\n } else {\n // cpu and wasm kernel does not implement efficient fromPixels method nor we can use canvas as-is, so we do a silly one more canvas\n const tempCanvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(targetWidth, targetHeight) : document.createElement('canvas');\n tempCanvas.width = targetWidth;\n tempCanvas.height = targetHeight;\n const tempCtx = tempCanvas.getContext('2d');\n tempCtx.drawImage(outCanvas, 0, 0);\n const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);\n pixels = tf.browser.fromPixels(data);\n }\n const casted = pixels.toFloat();\n tensor = casted.expandDims(0);\n pixels.dispose();\n casted.dispose();\n }\n return { tensor, canvas: config.filter.return ? outCanvas : null };\n}\n\nexports.process = process;\n", "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, "export const wasmWorkerContents = 'var threadInfoStruct=0;var selfThreadId=0;var parentThreadId=0;var Module={};function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(\" \");console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(\" \");postMessage({cmd:\"alert\",text:text,threadId:selfThreadId})}var err=threadPrintErr;this.alert=threadAlert;Module[\"instantiateWasm\"]=function(info,receiveInstance){var instance=new WebAssembly.Instance(Module[\"wasmModule\"],info);Module[\"wasmModule\"]=null;receiveInstance(instance);return instance.exports};this.onmessage=function(e){try{if(e.data.cmd===\"load\"){Module[\"DYNAMIC_BASE\"]=e.data.DYNAMIC_BASE;Module[\"DYNAMICTOP_PTR\"]=e.data.DYNAMICTOP_PTR;Module[\"wasmModule\"]=e.data.wasmModule;Module[\"wasmMemory\"]=e.data.wasmMemory;Module[\"buffer\"]=Module[\"wasmMemory\"].buffer;Module[\"ENVIRONMENT_IS_PTHREAD\"]=true;if(typeof e.data.urlOrBlob===\"string\"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}Module=WasmBackendModuleThreadedSimd(Module);postMessage({\"cmd\":\"loaded\"})}else if(e.data.cmd===\"objectTransfer\"){Module[\"PThread\"].receiveObjectTransfer(e.data)}else if(e.data.cmd===\"run\"){Module[\"__performance_now_clock_drift\"]=performance.now()-e.data.time;threadInfoStruct=e.data.threadInfoStruct;Module[\"__register_pthread_ptr\"](threadInfoStruct,0,0);selfThreadId=e.data.selfThreadId;parentThreadId=e.data.parentThreadId;var max=e.data.stackBase;var top=e.data.stackBase+e.data.stackSize;Module[\"establishStackSpace\"](top,max);Module[\"_emscripten_tls_init\"]();Module[\"PThread\"].receiveObjectTransfer(e.data);Module[\"PThread\"].setThreadStatus(Module[\"_pthread_self\"](),1);try{var result=Module[\"dynCall_ii\"](e.data.start_routine,e.data.arg);if(!Module[\"getNoExitRuntime\"]())Module[\"PThread\"].threadExit(result)}catch(ex){if(ex===\"Canceled!\"){Module[\"PThread\"].threadCancel()}else if(ex!=\"unwind\"){Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+4>>2,ex instanceof Module[\"ExitStatus\"]?ex.status:-2);Atomics.store(Module[\"HEAPU32\"],threadInfoStruct+0>>2,1);Module[\"_emscripten_futex_wake\"](threadInfoStruct+0,2147483647);if(!(ex instanceof Module[\"ExitStatus\"]))throw ex}}}else if(e.data.cmd===\"cancel\"){if(threadInfoStruct){Module[\"PThread\"].threadCancel()}}else if(e.data.target===\"setimmediate\"){}else if(e.data.cmd===\"processThreadQueue\"){if(threadInfoStruct){Module[\"_emscripten_current_thread_process_queued_calls\"]()}}else{err(\"worker.js received unknown command \"+e.data.cmd);err(e.data)}}catch(ex){err(\"worker.js onmessage() captured an uncaught exception: \"+ex);if(ex.stack)err(ex.stack);throw ex}};if(typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\"){self={location:{href:__filename}};var onmessage=this.onmessage;var nodeWorkerThreads=require(\"worker_threads\");Worker=nodeWorkerThreads.Worker;var parentPort=nodeWorkerThreads.parentPort;parentPort.on(\"message\",function(data){onmessage({data:data})});var nodeFS=require(\"fs\");var nodeRead=function(filename){return nodeFS.readFileSync(filename,\"utf8\")};function globalEval(x){global.require=require;global.Module=Module;eval.call(null,x)}importScripts=function(f){globalEval(nodeRead(f))};postMessage=function(msg){parentPort.postMessage(msg)};if(typeof performance===\"undefined\"){performance={now:function(){return Date.now()}}}}';", null, null, null, null, "// custom: bundle 3.4M\n/*\nimport * as tf from '../../../dev-clone/tfjs/tfjs/dist/tf.esnext.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n*/\n\n// monolithic: bundle 3.4M\nimport * as tf from '@tensorflow/tfjs/dist/tf.es2017.js';\nimport { setWasmPaths } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst loadGraphModel = tf.loadGraphModel;\nexport { tf, setWasmPaths, loadGraphModel };\n\n// modular: bundle 4.2M\n/*\nimport * as tf from '@tensorflow/tfjs-core/dist/tf-core.es2017.js';\nimport { loadGraphModel } from '@tensorflow/tfjs-converter/dist/tf-converter.es2017.js';\nimport * as tfCPU from '@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js';\nimport * as tfWebGL from '@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.es2017.js';\nimport { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm/dist/index.js';\n\nconst version = { core: tf.version, cpu: tfCPU.version_cpu, webgl: tfWebGL.version_webgl, wasm: version_wasm };\n\nexport { tf, setWasmPaths, loadGraphModel, version };\n*/\n", "export default [\n 127, 34, 139, 11, 0, 37, 232, 231, 120, 72, 37, 39, 128, 121, 47, 232, 121,\n 128, 104, 69, 67, 175, 171, 148, 157, 154, 155, 118, 50, 101, 73, 39, 40, 9,\n 151, 108, 48, 115, 131, 194, 204, 211, 74, 40, 185, 80, 42, 183, 40, 92,\n 186, 230, 229, 118, 202, 212, 214, 83, 18, 17, 76, 61, 146, 160, 29, 30, 56,\n 157, 173, 106, 204, 194, 135, 214, 192, 203, 165, 98, 21, 71, 68, 51, 45, 4,\n 144, 24, 23, 77, 146, 91, 205, 50, 187, 201, 200, 18, 91, 106, 182, 90, 91,\n 181, 85, 84, 17, 206, 203, 36, 148, 171, 140, 92, 40, 39, 193, 189, 244,\n 159, 158, 28, 247, 246, 161, 236, 3, 196, 54, 68, 104, 193, 168, 8, 117,\n 228, 31, 189, 193, 55, 98, 97, 99, 126, 47, 100, 166, 79, 218, 155, 154, 26,\n 209, 49, 131, 135, 136, 150, 47, 126, 217, 223, 52, 53, 45, 51, 134, 211,\n 170, 140, 67, 69, 108, 43, 106, 91, 230, 119, 120, 226, 130, 247, 63, 53,\n 52, 238, 20, 242, 46, 70, 156, 78, 62, 96, 46, 53, 63, 143, 34, 227, 173,\n 155, 133, 123, 117, 111, 44, 125, 19, 236, 134, 51, 216, 206, 205, 154, 153,\n 22, 39, 37, 167, 200, 201, 208, 36, 142, 100, 57, 212, 202, 20, 60, 99, 28,\n 158, 157, 35, 226, 113, 160, 159, 27, 204, 202, 210, 113, 225, 46, 43, 202,\n 204, 62, 76, 77, 137, 123, 116, 41, 38, 72, 203, 129, 142, 64, 98, 240, 49,\n 102, 64, 41, 73, 74, 212, 216, 207, 42, 74, 184, 169, 170, 211, 170, 149,\n 176, 105, 66, 69, 122, 6, 168, 123, 147, 187, 96, 77, 90, 65, 55, 107, 89,\n 90, 180, 101, 100, 120, 63, 105, 104, 93, 137, 227, 15, 86, 85, 129, 102,\n 49, 14, 87, 86, 55, 8, 9, 100, 47, 121, 145, 23, 22, 88, 89, 179, 6, 122,\n 196, 88, 95, 96, 138, 172, 136, 215, 58, 172, 115, 48, 219, 42, 80, 81, 195,\n 3, 51, 43, 146, 61, 171, 175, 199, 81, 82, 38, 53, 46, 225, 144, 163, 110,\n 246, 33, 7, 52, 65, 66, 229, 228, 117, 34, 127, 234, 107, 108, 69, 109, 108,\n 151, 48, 64, 235, 62, 78, 191, 129, 209, 126, 111, 35, 143, 163, 161, 246,\n 117, 123, 50, 222, 65, 52, 19, 125, 141, 221, 55, 65, 3, 195, 197, 25, 7,\n 33, 220, 237, 44, 70, 71, 139, 122, 193, 245, 247, 130, 33, 71, 21, 162,\n 153, 158, 159, 170, 169, 150, 188, 174, 196, 216, 186, 92, 144, 160, 161, 2,\n 97, 167, 141, 125, 241, 164, 167, 37, 72, 38, 12, 145, 159, 160, 38, 82, 13,\n 63, 68, 71, 226, 35, 111, 158, 153, 154, 101, 50, 205, 206, 92, 165, 209,\n 198, 217, 165, 167, 97, 220, 115, 218, 133, 112, 243, 239, 238, 241, 214,\n 135, 169, 190, 173, 133, 171, 208, 32, 125, 44, 237, 86, 87, 178, 85, 86,\n 179, 84, 85, 180, 83, 84, 181, 201, 83, 182, 137, 93, 132, 76, 62, 183, 61,\n 76, 184, 57, 61, 185, 212, 57, 186, 214, 207, 187, 34, 143, 156, 79, 239,\n 237, 123, 137, 177, 44, 1, 4, 201, 194, 32, 64, 102, 129, 213, 215, 138, 59,\n 166, 219, 242, 99, 97, 2, 94, 141, 75, 59, 235, 24, 110, 228, 25, 130, 226,\n 23, 24, 229, 22, 23, 230, 26, 22, 231, 112, 26, 232, 189, 190, 243, 221, 56,\n 190, 28, 56, 221, 27, 28, 222, 29, 27, 223, 30, 29, 224, 247, 30, 225, 238,\n 79, 20, 166, 59, 75, 60, 75, 240, 147, 177, 215, 20, 79, 166, 187, 147, 213,\n 112, 233, 244, 233, 128, 245, 128, 114, 188, 114, 217, 174, 131, 115, 220,\n 217, 198, 236, 198, 131, 134, 177, 132, 58, 143, 35, 124, 110, 163, 7, 228,\n 110, 25, 356, 389, 368, 11, 302, 267, 452, 350, 349, 302, 303, 269, 357,\n 343, 277, 452, 453, 357, 333, 332, 297, 175, 152, 377, 384, 398, 382, 347,\n 348, 330, 303, 304, 270, 9, 336, 337, 278, 279, 360, 418, 262, 431, 304,\n 408, 409, 310, 415, 407, 270, 409, 410, 450, 348, 347, 422, 430, 434, 313,\n 314, 17, 306, 307, 375, 387, 388, 260, 286, 414, 398, 335, 406, 418, 364,\n 367, 416, 423, 358, 327, 251, 284, 298, 281, 5, 4, 373, 374, 253, 307, 320,\n 321, 425, 427, 411, 421, 313, 18, 321, 405, 406, 320, 404, 405, 315, 16, 17,\n 426, 425, 266, 377, 400, 369, 322, 391, 269, 417, 465, 464, 386, 257, 258,\n 466, 260, 388, 456, 399, 419, 284, 332, 333, 417, 285, 8, 346, 340, 261,\n 413, 441, 285, 327, 460, 328, 355, 371, 329, 392, 439, 438, 382, 341, 256,\n 429, 420, 360, 364, 394, 379, 277, 343, 437, 443, 444, 283, 275, 440, 363,\n 431, 262, 369, 297, 338, 337, 273, 375, 321, 450, 451, 349, 446, 342, 467,\n 293, 334, 282, 458, 461, 462, 276, 353, 383, 308, 324, 325, 276, 300, 293,\n 372, 345, 447, 382, 398, 362, 352, 345, 340, 274, 1, 19, 456, 248, 281, 436,\n 427, 425, 381, 256, 252, 269, 391, 393, 200, 199, 428, 266, 330, 329, 287,\n 273, 422, 250, 462, 328, 258, 286, 384, 265, 353, 342, 387, 259, 257, 424,\n 431, 430, 342, 353, 276, 273, 335, 424, 292, 325, 307, 366, 447, 345, 271,\n 303, 302, 423, 266, 371, 294, 455, 460, 279, 278, 294, 271, 272, 304, 432,\n 434, 427, 272, 407, 408, 394, 430, 431, 395, 369, 400, 334, 333, 299, 351,\n 417, 168, 352, 280, 411, 325, 319, 320, 295, 296, 336, 319, 403, 404, 330,\n 348, 349, 293, 298, 333, 323, 454, 447, 15, 16, 315, 358, 429, 279, 14, 15,\n 316, 285, 336, 9, 329, 349, 350, 374, 380, 252, 318, 402, 403, 6, 197, 419,\n 318, 319, 325, 367, 364, 365, 435, 367, 397, 344, 438, 439, 272, 271, 311,\n 195, 5, 281, 273, 287, 291, 396, 428, 199, 311, 271, 268, 283, 444, 445,\n 373, 254, 339, 263, 466, 249, 282, 334, 296, 449, 347, 346, 264, 447, 454,\n 336, 296, 299, 338, 10, 151, 278, 439, 455, 292, 407, 415, 358, 371, 355,\n 340, 345, 372, 390, 249, 466, 346, 347, 280, 442, 443, 282, 19, 94, 370,\n 441, 442, 295, 248, 419, 197, 263, 255, 359, 440, 275, 274, 300, 383, 368,\n 351, 412, 465, 263, 467, 466, 301, 368, 389, 380, 374, 386, 395, 378, 379,\n 412, 351, 419, 436, 426, 322, 373, 390, 388, 2, 164, 393, 370, 462, 461,\n 164, 0, 267, 302, 11, 12, 374, 373, 387, 268, 12, 13, 293, 300, 301, 446,\n 261, 340, 385, 384, 381, 330, 266, 425, 426, 423, 391, 429, 355, 437, 391,\n 327, 326, 440, 457, 438, 341, 382, 362, 459, 457, 461, 434, 430, 394, 414,\n 463, 362, 396, 369, 262, 354, 461, 457, 316, 403, 402, 315, 404, 403, 314,\n 405, 404, 313, 406, 405, 421, 418, 406, 366, 401, 361, 306, 408, 407, 291,\n 409, 408, 287, 410, 409, 432, 436, 410, 434, 416, 411, 264, 368, 383, 309,\n 438, 457, 352, 376, 401, 274, 275, 4, 421, 428, 262, 294, 327, 358, 433,\n 416, 367, 289, 455, 439, 462, 370, 326, 2, 326, 370, 305, 460, 455, 254,\n 449, 448, 255, 261, 446, 253, 450, 449, 252, 451, 450, 256, 452, 451, 341,\n 453, 452, 413, 464, 463, 441, 413, 414, 258, 442, 441, 257, 443, 442, 259,\n 444, 443, 260, 445, 444, 467, 342, 445, 459, 458, 250, 289, 392, 290, 290,\n 328, 460, 376, 433, 435, 250, 290, 392, 411, 416, 433, 341, 463, 464, 453,\n 464, 465, 357, 465, 412, 343, 412, 399, 360, 363, 440, 437, 399, 456, 420,\n 456, 363, 401, 435, 288, 372, 383, 353, 339, 255, 249, 448, 261, 255, 133,\n 243, 190, 133, 155, 112, 33, 246, 247, 33, 130, 25, 398, 384, 286, 362, 398,\n 414, 362, 463, 341, 263, 359, 467, 263, 249, 255, 466, 467, 260, 75, 60,\n 166, 238, 239, 79, 162, 127, 139, 72, 11, 37, 121, 232, 120, 73, 72, 39,\n 114, 128, 47, 233, 232, 128, 103, 104, 67, 152, 175, 148, 173, 157, 155,\n 119, 118, 101, 74, 73, 40, 107, 9, 108, 49, 48, 131, 32, 194, 211, 184, 74,\n 185, 191, 80, 183, 185, 40, 186, 119, 230, 118, 210, 202, 214, 84, 83, 17,\n 77, 76, 146, 161, 160, 30, 190, 56, 173, 182, 106, 194, 138, 135, 192, 129,\n 203, 98, 54, 21, 68, 5, 51, 4, 145, 144, 23, 90, 77, 91, 207, 205, 187, 83,\n 201, 18, 181, 91, 182, 180, 90, 181, 16, 85, 17, 205, 206, 36, 176, 148,\n 140, 165, 92, 39, 245, 193, 244, 27, 159, 28, 30, 247, 161, 174, 236, 196,\n 103, 54, 104, 55, 193, 8, 111, 117, 31, 221, 189, 55, 240, 98, 99, 142, 126,\n 100, 219, 166, 218, 112, 155, 26, 198, 209, 131, 169, 135, 150, 114, 47,\n 217, 224, 223, 53, 220, 45, 134, 32, 211, 140, 109, 67, 108, 146, 43, 91,\n 231, 230, 120, 113, 226, 247, 105, 63, 52, 241, 238, 242, 124, 46, 156, 95,\n 78, 96, 70, 46, 63, 116, 143, 227, 116, 123, 111, 1, 44, 19, 3, 236, 51,\n 207, 216, 205, 26, 154, 22, 165, 39, 167, 199, 200, 208, 101, 36, 100, 43,\n 57, 202, 242, 20, 99, 56, 28, 157, 124, 35, 113, 29, 160, 27, 211, 204, 210,\n 124, 113, 46, 106, 43, 204, 96, 62, 77, 227, 137, 116, 73, 41, 72, 36, 203,\n 142, 235, 64, 240, 48, 49, 64, 42, 41, 74, 214, 212, 207, 183, 42, 184, 210,\n 169, 211, 140, 170, 176, 104, 105, 69, 193, 122, 168, 50, 123, 187, 89, 96,\n 90, 66, 65, 107, 179, 89, 180, 119, 101, 120, 68, 63, 104, 234, 93, 227, 16,\n 15, 85, 209, 129, 49, 15, 14, 86, 107, 55, 9, 120, 100, 121, 153, 145, 22,\n 178, 88, 179, 197, 6, 196, 89, 88, 96, 135, 138, 136, 138, 215, 172, 218,\n 115, 219, 41, 42, 81, 5, 195, 51, 57, 43, 61, 208, 171, 199, 41, 81, 38,\n 224, 53, 225, 24, 144, 110, 105, 52, 66, 118, 229, 117, 227, 34, 234, 66,\n 107, 69, 10, 109, 151, 219, 48, 235, 183, 62, 191, 142, 129, 126, 116, 111,\n 143, 7, 163, 246, 118, 117, 50, 223, 222, 52, 94, 19, 141, 222, 221, 65,\n 196, 3, 197, 45, 220, 44, 156, 70, 139, 188, 122, 245, 139, 71, 162, 145,\n 153, 159, 149, 170, 150, 122, 188, 196, 206, 216, 92, 163, 144, 161, 164, 2,\n 167, 242, 141, 241, 0, 164, 37, 11, 72, 12, 144, 145, 160, 12, 38, 13, 70,\n 63, 71, 31, 226, 111, 157, 158, 154, 36, 101, 205, 203, 206, 165, 126, 209,\n 217, 98, 165, 97, 237, 220, 218, 237, 239, 241, 210, 214, 169, 140, 171, 32,\n 241, 125, 237, 179, 86, 178, 180, 85, 179, 181, 84, 180, 182, 83, 181, 194,\n 201, 182, 177, 137, 132, 184, 76, 183, 185, 61, 184, 186, 57, 185, 216, 212,\n 186, 192, 214, 187, 139, 34, 156, 218, 79, 237, 147, 123, 177, 45, 44, 4,\n 208, 201, 32, 98, 64, 129, 192, 213, 138, 235, 59, 219, 141, 242, 97, 97, 2,\n 141, 240, 75, 235, 229, 24, 228, 31, 25, 226, 230, 23, 229, 231, 22, 230,\n 232, 26, 231, 233, 112, 232, 244, 189, 243, 189, 221, 190, 222, 28, 221,\n 223, 27, 222, 224, 29, 223, 225, 30, 224, 113, 247, 225, 99, 60, 240, 213,\n 147, 215, 60, 20, 166, 192, 187, 213, 243, 112, 244, 244, 233, 245, 245,\n 128, 188, 188, 114, 174, 134, 131, 220, 174, 217, 236, 236, 198, 134, 215,\n 177, 58, 156, 143, 124, 25, 110, 7, 31, 228, 25, 264, 356, 368, 0, 11, 267,\n 451, 452, 349, 267, 302, 269, 350, 357, 277, 350, 452, 357, 299, 333, 297,\n 396, 175, 377, 381, 384, 382, 280, 347, 330, 269, 303, 270, 151, 9, 337,\n 344, 278, 360, 424, 418, 431, 270, 304, 409, 272, 310, 407, 322, 270, 410,\n 449, 450, 347, 432, 422, 434, 18, 313, 17, 291, 306, 375, 259, 387, 260,\n 424, 335, 418, 434, 364, 416, 391, 423, 327, 301, 251, 298, 275, 281, 4,\n 254, 373, 253, 375, 307, 321, 280, 425, 411, 200, 421, 18, 335, 321, 406,\n 321, 320, 405, 314, 315, 17, 423, 426, 266, 396, 377, 369, 270, 322, 269,\n 413, 417, 464, 385, 386, 258, 248, 456, 419, 298, 284, 333, 168, 417, 8,\n 448, 346, 261, 417, 413, 285, 326, 327, 328, 277, 355, 329, 309, 392, 438,\n 381, 382, 256, 279, 429, 360, 365, 364, 379, 355, 277, 437, 282, 443, 283,\n 281, 275, 363, 395, 431, 369, 299, 297, 337, 335, 273, 321, 348, 450, 349,\n 359, 446, 467, 283, 293, 282, 250, 458, 462, 300, 276, 383, 292, 308, 325,\n 283, 276, 293, 264, 372, 447, 346, 352, 340, 354, 274, 19, 363, 456, 281,\n 426, 436, 425, 380, 381, 252, 267, 269, 393, 421, 200, 428, 371, 266, 329,\n 432, 287, 422, 290, 250, 328, 385, 258, 384, 446, 265, 342, 386, 387, 257,\n 422, 424, 430, 445, 342, 276, 422, 273, 424, 306, 292, 307, 352, 366, 345,\n 268, 271, 302, 358, 423, 371, 327, 294, 460, 331, 279, 294, 303, 271, 304,\n 436, 432, 427, 304, 272, 408, 395, 394, 431, 378, 395, 400, 296, 334, 299,\n 6, 351, 168, 376, 352, 411, 307, 325, 320, 285, 295, 336, 320, 319, 404,\n 329, 330, 349, 334, 293, 333, 366, 323, 447, 316, 15, 315, 331, 358, 279,\n 317, 14, 316, 8, 285, 9, 277, 329, 350, 253, 374, 252, 319, 318, 403, 351,\n 6, 419, 324, 318, 325, 397, 367, 365, 288, 435, 397, 278, 344, 439, 310,\n 272, 311, 248, 195, 281, 375, 273, 291, 175, 396, 199, 312, 311, 268, 276,\n 283, 445, 390, 373, 339, 295, 282, 296, 448, 449, 346, 356, 264, 454, 337,\n 336, 299, 337, 338, 151, 294, 278, 455, 308, 292, 415, 429, 358, 355, 265,\n 340, 372, 388, 390, 466, 352, 346, 280, 295, 442, 282, 354, 19, 370, 285,\n 441, 295, 195, 248, 197, 457, 440, 274, 301, 300, 368, 417, 351, 465, 251,\n 301, 389, 385, 380, 386, 394, 395, 379, 399, 412, 419, 410, 436, 322, 387,\n 373, 388, 326, 2, 393, 354, 370, 461, 393, 164, 267, 268, 302, 12, 386, 374,\n 387, 312, 268, 13, 298, 293, 301, 265, 446, 340, 380, 385, 381, 280, 330,\n 425, 322, 426, 391, 420, 429, 437, 393, 391, 326, 344, 440, 438, 458, 459,\n 461, 364, 434, 394, 428, 396, 262, 274, 354, 457, 317, 316, 402, 316, 315,\n 403, 315, 314, 404, 314, 313, 405, 313, 421, 406, 323, 366, 361, 292, 306,\n 407, 306, 291, 408, 291, 287, 409, 287, 432, 410, 427, 434, 411, 372, 264,\n 383, 459, 309, 457, 366, 352, 401, 1, 274, 4, 418, 421, 262, 331, 294, 358,\n 435, 433, 367, 392, 289, 439, 328, 462, 326, 94, 2, 370, 289, 305, 455, 339,\n 254, 448, 359, 255, 446, 254, 253, 449, 253, 252, 450, 252, 256, 451, 256,\n 341, 452, 414, 413, 463, 286, 441, 414, 286, 258, 441, 258, 257, 442, 257,\n 259, 443, 259, 260, 444, 260, 467, 445, 309, 459, 250, 305, 289, 290, 305,\n 290, 460, 401, 376, 435, 309, 250, 392, 376, 411, 433, 453, 341, 464, 357,\n 453, 465, 343, 357, 412, 437, 343, 399, 344, 360, 440, 420, 437, 456, 360,\n 420, 363, 361, 401, 288, 265, 372, 353, 390, 339, 249, 339, 448, 255];\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n async image(input, userConfig = {}) {\n this.state = 'image';\n this.config = mergeDeep(this.config, userConfig);\n const process = image.process(input, this.config);\n process.tensor.dispose();\n return process.canvas;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport { tf } from '../tf.js';\n\nfunction getBoxSize(box) {\n return [\n Math.abs(box.endPoint[0] - box.startPoint[0]),\n Math.abs(box.endPoint[1] - box.startPoint[1]),\n ];\n}\nfunction getBoxCenter(box) {\n return [\n box.startPoint[0] + (box.endPoint[0] - box.startPoint[0]) / 2,\n box.startPoint[1] + (box.endPoint[1] - box.startPoint[1]) / 2,\n ];\n}\nfunction cutBoxFromImageAndResize(box, image, cropSize) {\n const h = image.shape[1];\n const w = image.shape[2];\n const boxes = [[\n box.startPoint[1] / h,\n box.startPoint[0] / w,\n box.endPoint[1] / h,\n box.endPoint[0] / w,\n ]];\n return tf.image.cropAndResize(image, boxes, [0], cropSize);\n}\nfunction scaleBoxCoordinates(box, factor) {\n const startPoint = [box.startPoint[0] * factor[0], box.startPoint[1] * factor[1]];\n const endPoint = [box.endPoint[0] * factor[0], box.endPoint[1] * factor[1]];\n const palmLandmarks = box.palmLandmarks.map((coord) => {\n const scaledCoord = [coord[0] * factor[0], coord[1] * factor[1]];\n return scaledCoord;\n });\n return { startPoint, endPoint, palmLandmarks, confidence: box.confidence };\n}\nfunction enlargeBox(box, factor = 1.5) {\n const center = getBoxCenter(box);\n const size = getBoxSize(box);\n const newHalfSize = [factor * size[0] / 2, factor * size[1] / 2];\n const startPoint = [center[0] - newHalfSize[0], center[1] - newHalfSize[1]];\n const endPoint = [center[0] + newHalfSize[0], center[1] + newHalfSize[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction squarifyBox(box) {\n const centers = getBoxCenter(box);\n const size = getBoxSize(box);\n const maxEdge = Math.max(...size);\n const halfSize = maxEdge / 2;\n const startPoint = [centers[0] - halfSize, centers[1] - halfSize];\n const endPoint = [centers[0] + halfSize, centers[1] + halfSize];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nfunction shiftBox(box, shiftFactor) {\n const boxSize = [\n box.endPoint[0] - box.startPoint[0],\n box.endPoint[1] - box.startPoint[1],\n ];\n const shiftVector = [boxSize[0] * shiftFactor[0], boxSize[1] * shiftFactor[1]];\n const startPoint = [box.startPoint[0] + shiftVector[0], box.startPoint[1] + shiftVector[1]];\n const endPoint = [box.endPoint[0] + shiftVector[0], box.endPoint[1] + shiftVector[1]];\n return { startPoint, endPoint, palmLandmarks: box.palmLandmarks };\n}\nexport {\n cutBoxFromImageAndResize,\n enlargeBox,\n getBoxCenter,\n getBoxSize,\n scaleBoxCoordinates,\n shiftBox,\n squarifyBox,\n};\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nfunction normalizeRadians(angle) {\n return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));\n}\nfunction computeRotation(point1, point2) {\n const radians = Math.PI / 2 - Math.atan2(-(point2[1] - point1[1]), point2[0] - point1[0]);\n return normalizeRadians(radians);\n}\nconst buildTranslationMatrix = (x, y) => [[1, 0, x], [0, 1, y], [0, 0, 1]];\nfunction dot(v1, v2) {\n let product = 0;\n for (let i = 0; i < v1.length; i++) {\n product += v1[i] * v2[i];\n }\n return product;\n}\nfunction getColumnFrom2DArr(arr, columnIndex) {\n const column = [];\n for (let i = 0; i < arr.length; i++) {\n column.push(arr[i][columnIndex]);\n }\n return column;\n}\nfunction multiplyTransformMatrices(mat1, mat2) {\n const product = [];\n const size = mat1.length;\n for (let row = 0; row < size; row++) {\n product.push([]);\n for (let col = 0; col < size; col++) {\n product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));\n }\n }\n return product;\n}\nfunction buildRotationMatrix(rotation, center) {\n const cosA = Math.cos(rotation);\n const sinA = Math.sin(rotation);\n const rotationMatrix = [[cosA, -sinA, 0], [sinA, cosA, 0], [0, 0, 1]];\n const translationMatrix = buildTranslationMatrix(center[0], center[1]);\n const translationTimesRotation = multiplyTransformMatrices(translationMatrix, rotationMatrix);\n const negativeTranslationMatrix = buildTranslationMatrix(-center[0], -center[1]);\n return multiplyTransformMatrices(translationTimesRotation, negativeTranslationMatrix);\n}\nfunction invertTransformMatrix(matrix) {\n const rotationComponent = [[matrix[0][0], matrix[1][0]], [matrix[0][1], matrix[1][1]]];\n const translationComponent = [matrix[0][2], matrix[1][2]];\n const invertedTranslation = [\n -dot(rotationComponent[0], translationComponent),\n -dot(rotationComponent[1], translationComponent),\n ];\n return [\n rotationComponent[0].concat(invertedTranslation[0]),\n rotationComponent[1].concat(invertedTranslation[1]),\n [0, 0, 1],\n ];\n}\nfunction rotatePoint(homogeneousCoordinate, rotationMatrix) {\n return [\n dot(homogeneousCoordinate, rotationMatrix[0]),\n dot(homogeneousCoordinate, rotationMatrix[1]),\n ];\n}\nexport {\n buildRotationMatrix,\n computeRotation,\n dot,\n getColumnFrom2DArr,\n invertTransformMatrix,\n normalizeRadians,\n rotatePoint,\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n async image(input, userConfig = {}) {\n this.state = 'image';\n this.config = mergeDeep(this.config, userConfig);\n const process = image.process(input, this.config);\n process.tensor.dispose();\n return process.canvas;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n", "/* eslint-disable indent */\n/* eslint-disable no-multi-spaces */\n\nexport default {\n backend: 'webgl', // select tfjs backend to use\n wasmPath: '../assets/', // path for wasm binaries\n // only used for backend: wasm\n console: true, // enable debugging output to console\n async: true, // execute enabled models in parallel\n // this disables per-model performance data but\n // slightly increases performance\n // cannot be used if profiling is enabled\n profile: false, // enable tfjs profiling\n // this has significant performance impact\n // only enable for debugging purposes\n // currently only implemented for age,gender,emotion models\n deallocate: false, // aggresively deallocate gpu memory after each usage\n // only valid for webgl backend and only during first call\n // cannot be changed unless library is reloaded\n // this has significant performance impact\n // only enable on low-memory devices\n scoped: false, // enable scoped runs\n // some models *may* have memory leaks,\n // this wrapps everything in a local scope at a cost of performance\n // typically not needed\n videoOptimized: true, // perform additional optimizations when input is video,\n // must be disabled for images\n // basically this skips object box boundary detection for every n frames\n // while maintaining in-box detection since objects cannot move that fast\n\n filter: {\n enabled: true, // enable image pre-processing filters\n width: 0, // resize input width\n height: 0, // resize input height\n // if both width and height are set to 0, there is no resizing\n // if just one is set, second one is scaled automatically\n // if both are set, values are used as-is\n return: true, // return processed canvas imagedata in result\n brightness: 0, // range: -1 (darken) to 1 (lighten)\n contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)\n sharpness: 0, // range: 0 (no sharpening) to 1 (maximum sharpening)\n blur: 0, // range: 0 (no blur) to N (blur radius in pixels)\n saturation: 0, // range: -1 (reduce saturation) to 1 (increase saturation)\n hue: 0, // range: 0 (no change) to 360 (hue rotation in degrees)\n negative: false, // image negative\n sepia: false, // image sepia colors\n vintage: false, // image vintage colors\n kodachrome: false, // image kodachrome colors\n technicolor: false, // image technicolor colors\n polaroid: false, // image polaroid camera effect\n pixelate: 0, // range: 0 (no pixelate) to N (number of pixels to pixelate)\n },\n\n gesture: {\n enabled: true, // enable simple gesture recognition\n },\n\n face: {\n enabled: true, // controls if specified modul is enabled\n // face.enabled is required for all face models:\n // detector, mesh, iris, age, gender, emotion\n // (note: module is not loaded until it is required)\n detector: {\n modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.\n // 'front' is optimized for large faces\n // such as front-facing camera and\n // 'back' is optimized for distanct faces.\n inputSize: 256, // fixed value: 128 for front and 256 for 'back'\n maxFaces: 10, // maximum number of faces detected in the input\n // should be set to the minimum number for performance\n skipFrames: 15, // how many frames to go without re-running the face bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated face analysis as the head probably hasn't moved much\n // in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.2, // threshold for deciding whether boxes overlap too much in\n // non-maximum suppression (0.1 means drop if overlap 10%)\n scoreThreshold: 0.5, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression,\n // this is applied on detection objects only and before minConfidence\n },\n\n mesh: {\n enabled: true,\n modelPath: '../models/facemesh.json',\n inputSize: 192, // fixed value\n },\n\n iris: {\n enabled: true,\n modelPath: '../models/iris.json',\n inputSize: 64, // fixed value\n },\n\n age: {\n enabled: true,\n modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'\n // which determines training set for model\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n gender: {\n enabled: true,\n minConfidence: 0.1, // threshold for discarding a prediction\n modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'\n inputSize: 64, // fixed value\n skipFrames: 15, // how many frames to go without re-running the detector\n // only used for video inputs\n },\n\n emotion: {\n enabled: true,\n inputSize: 64, // fixed value\n minConfidence: 0.2, // threshold for discarding a prediction\n skipFrames: 15, // how many frames to go without re-running the detector\n modelPath: '../models/emotion-large.json', // can be 'mini', 'large'\n },\n },\n\n body: {\n enabled: true,\n modelPath: '../models/posenet.json',\n inputSize: 257, // fixed value\n maxDetections: 10, // maximum number of people detected in the input\n // should be set to the minimum number for performance\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score\n // in non-maximum suppression\n nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression\n },\n\n hand: {\n enabled: true,\n inputSize: 256, // fixed value\n skipFrames: 15, // how many frames to go without re-running the hand bounding box detector\n // only used for video inputs\n // e.g., if model is running st 25 FPS, we can re-use existing bounding\n // box for updated hand skeleton analysis as the hand probably\n // hasn't moved much in short time (10 * 1/25 = 0.25 sec)\n minConfidence: 0.5, // threshold for discarding a prediction\n iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much\n // in non-maximum suppression\n scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on\n // score in non-maximum suppression\n maxHands: 1, // maximum number of hands detected in the input\n // should be set to the minimum number for performance\n landmarks: true, // detect hand landmarks or just hand boundary box\n detector: {\n modelPath: '../models/handdetect.json',\n },\n skeleton: {\n modelPath: '../models/handskeleton.json',\n },\n },\n};\n", "import { tf, setWasmPaths } from './tf.js';\nimport * as facemesh from './face/facemesh.js';\nimport * as age from './age/age.js';\nimport * as gender from './gender/gender.js';\nimport * as emotion from './emotion/emotion.js';\nimport * as posenet from './body/posenet.js';\nimport * as handpose from './hand/handpose.js';\nimport * as gesture from './gesture.js';\nimport * as image from './image.js';\nimport * as profile from './profile.js';\nimport * as config from '../config.js';\nimport * as app from '../package.json';\n\n// static config override for non-video detection\nconst disableSkipFrames = {\n face: { detector: { skipFrames: 0 }, age: { skipFrames: 0 }, gender: { skipFrames: 0 }, emotion: { skipFrames: 0 } }, hand: { skipFrames: 0 },\n};\n\n// helper function: gets elapsed time on both browser and nodejs\nconst now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);\n};\n\n// helper function: perform deep merge of multiple objects so it allows full inheriance with overrides\nfunction mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) {\n prev[key] = pVal.concat(...oVal);\n } else if (isObject(pVal) && isObject(oVal)) {\n prev[key] = mergeDeep(pVal, oVal);\n } else {\n prev[key] = oVal;\n }\n });\n return prev;\n }, {});\n}\n\nclass Human {\n constructor(userConfig = {}) {\n this.tf = tf;\n this.version = app.version;\n this.config = mergeDeep(config.default, userConfig);\n this.fx = null;\n this.state = 'idle';\n this.numTensors = 0;\n this.analyzeMemoryLeaks = false;\n this.checkSanity = false;\n this.firstRun = true;\n this.perf = {};\n // object that contains all initialized models\n this.models = {\n facemesh: null,\n posenet: null,\n handpose: null,\n iris: null,\n age: null,\n gender: null,\n emotion: null,\n };\n // export raw access to underlying models\n this.facemesh = facemesh;\n this.age = age;\n this.gender = gender;\n this.emotion = emotion;\n this.body = posenet;\n this.hand = handpose;\n }\n\n // helper function: wrapper around console output\n log(...msg) {\n // eslint-disable-next-line no-console\n if (msg && this.config.console) console.log('Human:', ...msg);\n }\n\n profile() {\n if (this.config.profile) return profile.data;\n return {};\n }\n\n // helper function: measure tensor leak\n analyze(...msg) {\n if (!this.analyzeMemoryLeaks) return;\n const current = tf.engine().state.numTensors;\n const previous = this.numTensors;\n this.numTensors = current;\n const leaked = current - previous;\n if (leaked !== 0) this.log(...msg, leaked);\n }\n\n // quick sanity check on inputs\n sanity(input) {\n if (!this.checkSanity) return null;\n if (!input) return 'input is not defined';\n if (tf.ENV.flags.IS_NODE && !(input instanceof tf.Tensor)) {\n return 'input must be a tensor';\n }\n try {\n tf.getBackend();\n } catch {\n return 'backend not loaded';\n }\n return null;\n }\n\n // preload models, not explicitly required as it's done automatically on first use\n async load(userConfig) {\n this.state = 'load';\n const timeStamp = now();\n if (userConfig) this.config = mergeDeep(this.config, userConfig);\n\n if (this.firstRun) {\n this.checkBackend(true);\n this.log(`version: ${this.version} TensorFlow/JS version: ${tf.version_core}`);\n this.log('configuration:', this.config);\n this.log('flags:', tf.ENV.flags);\n this.firstRun = false;\n }\n if (this.config.async) {\n [\n this.models.facemesh,\n this.models.age,\n this.models.gender,\n this.models.emotion,\n this.models.posenet,\n this.models.handpose,\n ] = await Promise.all([\n this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config.face) : null),\n this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),\n this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),\n this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),\n this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),\n this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config.hand) : null),\n ]);\n } else {\n if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);\n if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);\n if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);\n if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);\n if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);\n if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.load || 0)) this.perf.load = current;\n }\n\n // check if backend needs initialization if it changed\n async checkBackend(force) {\n const timeStamp = now();\n if (this.config.backend && (this.config.backend !== '') && force || (tf.getBackend() !== this.config.backend)) {\n this.state = 'backend';\n /* force backend reload\n if (this.config.backend in tf.engine().registry) {\n const backendFactory = tf.findBackendFactory(this.config.backend);\n tf.removeBackend(this.config.backend);\n tf.registerBackend(this.config.backend, backendFactory);\n } else {\n this.log('Backend not registred:', this.config.backend);\n }\n */\n\n this.log('setting backend:', this.config.backend);\n\n if (this.config.backend === 'wasm') {\n this.log('settings wasm path:', this.config.wasmPath);\n setWasmPaths(this.config.wasmPath);\n const simd = await tf.env().getAsync('WASM_HAS_SIMD_SUPPORT');\n if (!simd) this.log('warning: wasm simd support is not enabled');\n }\n\n await tf.setBackend(this.config.backend);\n tf.enableProdMode();\n /* debug mode is really too mcuh\n tf.enableDebugMode();\n */\n if (this.config.backend === 'webgl') {\n if (this.config.deallocate) {\n this.log('changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:', this.config.deallocate);\n tf.ENV.set('WEBGL_DELETE_TEXTURE_THRESHOLD', this.config.deallocate ? 0 : -1);\n }\n // tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);\n tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);\n }\n await tf.ready();\n }\n const current = Math.trunc(now() - timeStamp);\n if (current > (this.perf.backend || 0)) this.perf.backend = current;\n }\n\n async detectFace(input) {\n // run facemesh, includes blazeface and iris\n // eslint-disable-next-line no-async-promise-executor\n let timeStamp;\n let ageRes;\n let genderRes;\n let emotionRes;\n const faceRes = [];\n this.state = 'run:face';\n timeStamp = now();\n const faces = await this.models.facemesh.estimateFaces(input, this.config.face);\n this.perf.face = Math.trunc(now() - timeStamp);\n for (const face of faces) {\n this.analyze('Get Face');\n // is something went wrong, skip the face\n if (!face.image || face.image.isDisposedInternal) {\n this.log('Face object is disposed:', face.image);\n continue;\n }\n // run age, inherits face from blazeface\n this.analyze('Start Age:');\n if (this.config.async) {\n ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:age';\n timeStamp = now();\n ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};\n this.perf.age = Math.trunc(now() - timeStamp);\n }\n\n // run gender, inherits face from blazeface\n this.analyze('Start Gender:');\n if (this.config.async) {\n genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:gender';\n timeStamp = now();\n genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};\n this.perf.gender = Math.trunc(now() - timeStamp);\n }\n // run emotion, inherits face from blazeface\n this.analyze('Start Emotion:');\n if (this.config.async) {\n emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};\n } else {\n this.state = 'run:emotion';\n timeStamp = now();\n emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};\n this.perf.emotion = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Emotion:');\n\n // if async wait for results\n if (this.config.async) {\n [ageRes, genderRes, emotionRes] = await Promise.all([ageRes, genderRes, emotionRes]);\n }\n\n this.analyze('Finish Face:');\n // dont need face anymore\n face.image.dispose();\n\n // calculate iris distance\n // iris: array[ center, left, top, right, bottom]\n const irisSize = (face.annotations.leftEyeIris && face.annotations.rightEyeIris)\n /* average human iris size is 11.7mm */\n ? 11.7 * Math.max(Math.abs(face.annotations.leftEyeIris[3][0] - face.annotations.leftEyeIris[1][0]), Math.abs(face.annotations.rightEyeIris[4][1] - face.annotations.rightEyeIris[2][1]))\n : 0;\n\n // combine results\n faceRes.push({\n confidence: face.confidence,\n box: face.box,\n mesh: face.mesh,\n annotations: face.annotations,\n age: ageRes.age,\n gender: genderRes.gender,\n genderConfidence: genderRes.confidence,\n emotion: emotionRes,\n iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0,\n });\n this.analyze('End Face');\n }\n this.analyze('End FaceMesh:');\n if (this.config.async) {\n if (this.perf.face) delete this.perf.face;\n if (this.perf.age) delete this.perf.age;\n if (this.perf.gender) delete this.perf.gender;\n if (this.perf.emotion) delete this.perf.emotion;\n }\n return faceRes;\n }\n\n async image(input, userConfig = {}) {\n this.state = 'image';\n this.config = mergeDeep(this.config, userConfig);\n const process = image.process(input, this.config);\n process.tensor.dispose();\n return process.canvas;\n }\n\n // main detect function\n async detect(input, userConfig = {}) {\n this.state = 'config';\n let timeStamp;\n\n // update configuration\n this.config = mergeDeep(this.config, userConfig);\n if (!this.config.videoOptimized) this.config = mergeDeep(this.config, disableSkipFrames);\n\n // sanity checks\n this.state = 'check';\n const error = this.sanity(input);\n if (error) {\n this.log(error, input);\n return { error };\n }\n\n // detection happens inside a promise\n return new Promise(async (resolve) => {\n let poseRes;\n let handRes;\n let faceRes;\n\n const timeStart = now();\n\n // configure backend\n await this.checkBackend();\n\n // load models if enabled\n await this.load();\n\n if (this.config.scoped) tf.engine().startScope();\n this.analyze('Start Scope:');\n\n timeStamp = now();\n const process = image.process(input, this.config);\n this.perf.image = Math.trunc(now() - timeStamp);\n this.analyze('Get Image:');\n\n // run face detection followed by all models that rely on face bounding box: face mesh, age, gender, emotion\n if (this.config.async) {\n faceRes = this.config.face.enabled ? this.detectFace(process.tensor) : [];\n if (this.perf.face) delete this.perf.face;\n } else {\n this.state = 'run:face';\n timeStamp = now();\n faceRes = this.config.face.enabled ? await this.detectFace(process.tensor) : [];\n this.perf.face = Math.trunc(now() - timeStamp);\n }\n\n // run posenet\n this.analyze('Start Body:');\n if (this.config.async) {\n poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n if (this.perf.body) delete this.perf.body;\n } else {\n this.state = 'run:body';\n timeStamp = now();\n poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];\n this.perf.body = Math.trunc(now() - timeStamp);\n }\n this.analyze('End Body:');\n\n // run handpose\n this.analyze('Start Hand:');\n if (this.config.async) {\n handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n if (this.perf.hand) delete this.perf.hand;\n } else {\n this.state = 'run:hand';\n timeStamp = now();\n handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config.hand) : [];\n this.perf.hand = Math.trunc(now() - timeStamp);\n }\n // this.analyze('End Hand:');\n\n // if async wait for results\n if (this.config.async) {\n [faceRes, poseRes, handRes] = await Promise.all([faceRes, poseRes, handRes]);\n }\n process.tensor.dispose();\n\n if (this.config.scoped) tf.engine().endScope();\n this.analyze('End Scope:');\n\n let gestureRes = [];\n if (this.config.gesture.enabled) {\n timeStamp = now();\n gestureRes = { face: gesture.face(faceRes), body: gesture.body(poseRes), hand: gesture.hand(handRes) };\n if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);\n else if (this.perf.gesture) delete this.perf.gesture;\n }\n\n this.perf.total = Math.trunc(now() - timeStart);\n this.state = 'idle';\n resolve({ face: faceRes, body: poseRes, hand: handRes, gesture: gestureRes, performance: this.perf, canvas: process.canvas });\n });\n }\n\n async warmup(userConfig) {\n const warmup = new ImageData(255, 255);\n await this.detect(warmup, userConfig);\n this.log('warmed up');\n }\n}\n\nexport { Human as default };\n"],
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,IAAA;AAAA;AAGA,mBAAgB;AAIf,QAAI,OAAO,SAAS;AAAe,aAAO;AAAA;AAC1C,QAAI,OAAO,WAAW;AAAe,aAAO;AAAA;AAC5C,QAAI,OAAO,YAAW;AAAe,aAAO;AAAA;AAC5C,UAAM,IAAI,MAAM;AAAA;AAGjB,gBAAa;AAEb,SAAO,UAAU,UAAU,QAAO;AAGlC,MAAI,QAAO;AACV,YAAQ,UAAU,QAAO,MAAM,KAAK;AAAA;AAGrC,UAAQ,UAAU,QAAO;AACzB,UAAQ,UAAU,QAAO;AACzB,UAAQ,WAAW,QAAO;AAAA;;;ACxB1B,IAAA;AACA,gBAAa;AACb,gBAAa,QAAO;AAGpB;AACE,oBAAgB;AACd,UAAI,OAAO,IAAI;AAAA;AAAA;AAGnB,MAAI,QAAO,QAAQ,QAAO,SAAS,QAAO,eAAe,QAAO;AAC9D,WAAO,UAAU;AAAA;AAGjB,cAAU,SAAQ;AAClB,YAAQ,SAAS;AAAA;AAGnB;AACE,WAAO,QAAO,KAAK,kBAAkB;AAAA;AAIvC,YAAU,SAAQ;AAElB,aAAW,OAAO;AAChB,QAAI,OAAO,QAAQ;AACjB,YAAM,IAAI,UAAU;AAAA;AAEtB,WAAO,QAAO,KAAK,kBAAkB;AAAA;AAGvC,aAAW,QAAQ;AACjB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;AAAA;AAEtB,cAAU,QAAO;AACjB,QAAI,UAAS;AACX,UAAI,OAAO,aAAa;AACtB,YAAI,KAAK,OAAM;AAAA;AAEf,YAAI,KAAK;AAAA;AAAA;AAGX,UAAI,KAAK;AAAA;AAEX,WAAO;AAAA;AAGT,aAAW,cAAc;AACvB,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;AAAA;AAEtB,WAAO,QAAO;AAAA;AAGhB,aAAW,kBAAkB;AAC3B,QAAI,OAAO,SAAS;AAClB,YAAM,IAAI,UAAU;AAAA;AAEtB,WAAO,QAAO,WAAW;AAAA;AAAA;;;AC5D3B,IAAA;AAqBA;AAIA,gBAAa,sBAAuB;AAGpC,mBAAiB,QAAO,cAAc;AACpC,eAAW,KAAK;AAChB,YAAQ,YAAY,SAAS;AAAA,WACtB;AAAA,WAAW;AAAA,WAAY;AAAA,WAAa;AAAA,WAAa;AAAA,WAAc;AAAA,WAAc;AAAA,WAAY;AAAA,WAAa;AAAA,WAAe;AAAA,WAAgB;AACxI,eAAO;AAAA;AAEP,eAAO;AAAA;AAAA;AAIb;AACE,QAAI,CAAC;AAAK,aAAO;AACjB;AACA,WAAO;AACL,cAAQ;AAAA,aACD;AAAA,aACA;AACH,iBAAO;AAAA,aACJ;AAAA,aACA;AAAA,aACA;AAAA,aACA;AACH,iBAAO;AAAA,aACJ;AAAA,aACA;AACH,iBAAO;AAAA,aACJ;AAAA,aACA;AAAA,aACA;AACH,iBAAO;AAAA;AAEP,cAAI;AAAS;AACb,gBAAO,MAAK,KAAK;AACjB,oBAAU;AAAA;AAAA;AAAA;AAOlB;AACE,eAAW,mBAAmB;AAC9B,QAAI,OAAO,SAAS,YAAa,SAAO,eAAe,cAAc,CAAC,WAAW;AAAO,YAAM,IAAI,MAAM,uBAAuB;AAC/H,WAAO,QAAQ;AAAA;AAMjB,UAAQ,gBAAgB;AACxB;AACE,SAAK,WAAW,kBAAkB;AAClC;AACA,YAAQ,KAAK;AAAA,WACN;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;AAAA,WACG;AACH,aAAK,WAAW;AAChB,aAAK;AACL;AAAA,WACG;AACH,aAAK,OAAO;AACZ,aAAK,MAAM;AACX,aAAK;AACL;AAAA;AAEA,aAAK,QAAQ;AACb,aAAK,MAAM;AACX;AAAA;AAEJ,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,WAAW,QAAO,YAAY;AAAA;AAGrC,gBAAc,UAAU,QAAQ;AAC9B,QAAI,IAAI,WAAW;AAAG,aAAO;AAC7B;AACA;AACA,QAAI,KAAK;AACP,UAAI,KAAK,SAAS;AAClB,UAAI,MAAM;AAAW,eAAO;AAC5B,UAAI,KAAK;AACT,WAAK,WAAW;AAAA;AAEhB,UAAI;AAAA;AAEN,QAAI,IAAI,IAAI;AAAQ,aAAO,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AACtE,WAAO,KAAK;AAAA;AAGd,gBAAc,UAAU,MAAM;AAG9B,gBAAc,UAAU,OAAO;AAG/B,gBAAc,UAAU,WAAW;AACjC,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,KAAK;AAChE,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;AAAA;AAEvD,QAAI,KAAK,KAAK,UAAU,KAAK,YAAY,KAAK,UAAU,GAAG,IAAI;AAC/D,SAAK,YAAY,IAAI;AAAA;AAKvB;AACE,QAAI,QAAQ;AAAM,aAAO;AAAA,aAAW,QAAQ,MAAM;AAAM,aAAO;AAAA,aAAW,QAAQ,MAAM;AAAM,aAAO;AAAA,aAAW,QAAQ,MAAM;AAAM,aAAO;AAC3I,WAAO,QAAQ,MAAM,IAAO,KAAK;AAAA;AAMnC;AACE,YAAQ,IAAI,SAAS;AACrB,QAAI,IAAI;AAAG,aAAO;AAClB,aAAS,cAAc,IAAI;AAC3B,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;AAAA;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AAAG,cAAK,WAAW,KAAK;AACjC,aAAO;AAAA;AAET,QAAI,EAAE,IAAI,KAAK,OAAO;AAAI,aAAO;AACjC,SAAK,cAAc,IAAI;AACvB,QAAI,MAAM;AACR,UAAI,KAAK;AACP,YAAI,OAAO;AAAG,eAAK;AAAA;AAAO,gBAAK,WAAW,KAAK;AAAA;AAEjD,aAAO;AAAA;AAET,WAAO;AAAA;AAWT;AACE,QAAK,KAAI,KAAK,SAAU;AACtB,YAAK,WAAW;AAChB,aAAO;AAAA;AAET,QAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,UAAK,KAAI,KAAK,SAAU;AACtB,cAAK,WAAW;AAChB,eAAO;AAAA;AAET,UAAI,MAAK,WAAW,KAAK,IAAI,SAAS;AACpC,YAAK,KAAI,KAAK,SAAU;AACtB,gBAAK,WAAW;AAChB,iBAAO;AAAA;AAAA;AAAA;AAAA;AAOf;AACE,YAAQ,KAAK,YAAY,KAAK;AAC9B,YAAQ,oBAAoB,MAAM,KAAK;AACvC,QAAI,MAAM;AAAW,aAAO;AAC5B,QAAI,KAAK,YAAY,IAAI;AACvB,UAAI,KAAK,KAAK,UAAU,GAAG,GAAG,KAAK;AACnC,aAAO,KAAK,SAAS,SAAS,KAAK,UAAU,GAAG,KAAK;AAAA;AAEvD,QAAI,KAAK,KAAK,UAAU,GAAG,GAAG,IAAI;AAClC,SAAK,YAAY,IAAI;AAAA;AAMvB;AACE,gBAAY,oBAAoB,MAAM,KAAK;AAC3C,QAAI,CAAC,KAAK;AAAU,aAAO,IAAI,SAAS,QAAQ;AAChD,SAAK,YAAY;AACjB,cAAU,IAAI,SAAU,SAAQ,KAAK;AACrC,QAAI,KAAK,KAAK,UAAU,GAAG;AAC3B,WAAO,IAAI,SAAS,QAAQ,GAAG;AAAA;AAKjC;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI;AAC9B,WAAO;AAAA;AAOT;AACE,QAAK,KAAI,SAAS,KAAK,MAAM;AAC3B,cAAQ,IAAI,SAAS,WAAW;AAChC,UAAI;AACF,gBAAQ,EAAE,WAAW,EAAE,SAAS;AAChC,YAAI,KAAK,SAAU,KAAK;AACtB,eAAK,WAAW;AAChB,eAAK,YAAY;AACjB,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,eAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,iBAAO,EAAE,MAAM,GAAG;AAAA;AAAA;AAGtB,aAAO;AAAA;AAET,SAAK,WAAW;AAChB,SAAK,YAAY;AACjB,SAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAO,IAAI,SAAS,WAAW,GAAG,IAAI,SAAS;AAAA;AAKjD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AACP,gBAAU,KAAK,YAAY,KAAK;AAChC,aAAO,IAAI,KAAK,SAAS,SAAS,WAAW,GAAG;AAAA;AAElD,WAAO;AAAA;AAGT;AACE,YAAS,KAAI,SAAS,KAAK;AAC3B,QAAI,MAAM;AAAG,aAAO,IAAI,SAAS,UAAU;AAC3C,SAAK,WAAW,IAAI;AACpB,SAAK,YAAY;AACjB,QAAI,MAAM;AACR,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AAAA;AAEpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AACpC,WAAK,SAAS,KAAK,IAAI,IAAI,SAAS;AAAA;AAEtC,WAAO,IAAI,SAAS,UAAU,GAAG,IAAI,SAAS;AAAA;AAGhD;AACE,YAAQ,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAC9C,QAAI,KAAK;AAAU,aAAO,IAAI,KAAK,SAAS,SAAS,UAAU,GAAG,IAAI,KAAK;AAC3E,WAAO;AAAA;AAIT;AACE,WAAO,IAAI,SAAS,KAAK;AAAA;AAG3B;AACE,WAAO,OAAO,IAAI,SAAS,KAAK,MAAM,OAAO;AAAA;AAAA;;;;;;;;;;;;;;;;;;;;;;;;ACtS/C;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,6BAAwB;;MAG3B;AACI,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,aAAK,OAAO,IAAI;AAChB,aAAK,eAAe;;MAExB;AACI,YAAI,CAAC,KAAK,KAAK,IAAI;AACf,eAAK,UAAU,SAAS,KAAK,SAAS;;AAE1C,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,aAAK,KAAK,IAAI,QAAQ;;MAE1B;AACI,eAAO,KAAK,KAAK,IAAI;;MAEzB;AACI,aAAK;AACL,eAAO,KAAK,KAAK,OAAO;;MAE5B;AACI,eAAO,KAAK;;;;MAUhB;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,KAAK,qBAAqB,KAAK,mBAAkB;;MAE5D;AACI,eAAO,mBAAkB;;MAE7B,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAG7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;MAE7B;AACI,eAAO,mBAAkB;;;AAGjC;AACI,YAAM,IAAI,MAAM,IAAI;;ACvfxB;;;;;;;;;;;;;;;;AA8BO;AACH,oBAAc,OAAM;AACpB,iBAAW;AACX,mBAAY;AAEZ,aAAO,UAAU;AAEb,iBAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,eAAO,OAAM;AACb,eAAM,WAAW,OAAM;AACvB,eAAM,UAAS;;;AAIhB;AACH,aAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAE9B;AACH,aAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAEhC;AACH,iBAAU;AACV,mBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAO,IAAI;;AAEf,aAAO;;AASJ;AACH,gBAAU,KAAK;AACf,aAAQ,IAAI,IAAM,KAAI,KAAK;;AAGxB;AACH,mBAAa;AACb,mBAAa,GAAG,IAAI,EAAE,QAAQ;AAC1B,qBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,kBAAU,OAAO;;AAErB,aAAO;;AAiBJ;AACH,UAAI,CAAC;AACD,cAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAGjD,qEAAgE;AACnE,cAAO,aAAY,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAErF;AACH,cAAO,KAAK,MAAM,MAAM;;AAqBrB,oCAA+B,qBAAqB;AACvD,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,MAAM,QAAQ,QAAQ,cAAa,QAAQ,CAAC;AAC5C,qBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,mBAAQ,IAAI,IAAI,QAAQ;;;AAI5B,eAAO,KAAK;;AAEhB,aAAO;;AAaJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO;;AAEX,iBAAW,MAAM;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,gBAAQ,MAAM;;AAElB,aAAO;;AAEJ;AACH,aAAO,MAAM,WAAW;;AAErB;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO;;AAEX,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,aAAO,IAAI,MAAM;;AAEd;AAEH,UAAI,KAAK,QAAQ;AAEb,eAAO,KAAK,KAAK;;AAErB,UAAI,MAAM;AACN,eAAO;iBAEF,MAAM;AACX,eAAO;;AAGP,oBAAY,KAAK,IAAI,IAAI;AACzB,eAAQ,OAAM,KAAM,OAAM;;;AAG3B;AACH,oBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,aAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAc7B;AACH,8BAAwB,IAAI,YAAY;AACxC,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,wBAAgB,KAAK;;AAEzB,eAAQ;AACR,aAAO;;AAEJ;AACH,UAAI,QAAQ,EAAE;AACV,eAAO;;AAEX,aAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAE5B,6CAAwC,aAAa;AACxD,aAAO,IAAI,QAAQ;AACf,uBAAe;AACf,sBAAc;AACV,cAAI;AACA;AACA;;AAEJ;AACA,8BAAoB,QAAQ;AAC5B,cAAI,cAAc,QAAQ,YAAY;AAClC;AACA;;AAEJ,qBAAW,OAAO;;AAEtB;;;AAYD;AACH,sBAAgB;AAChB,wBAAkB;AAClB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,MAAM,MAAM;AACZ,uBAAa,MAAM;mBAEd,MAAM,OAAO;AAClB,cAAI,gBAAgB;AAChB,kBAAM,MAAM,yDACW,uBAAuB;;AAElD,wBAAc;mBAET,MAAM,KAAK;AAChB,gBAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAGvE,UAAI,gBAAgB;AAChB,YAAI,OAAO,KAAK,SAAS;AACrB,gBAAM,MAAM,QAAQ,yCAAyC;;AAEjE,eAAO;;AAEX,UAAI,cAAc;AACd,cAAM,MAAM,qCAAqC;;AAGrD,UAAI,OAAO,cAAc;AACrB,cAAM,MAAM,wDACD,UAAU;;AAEzB,uBAAiB,MAAM;AACvB,eAAS,eAAe,OAAO;AAC/B,aAAO;;AAEJ;AACH,mBAAa,MAAM;AAEnB,aAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAEzD,cAAO,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OAAO,MAAM,+CAA+C,SAAS,sBACjG;AAEhB,cAAO,KAAK,MAAM,QAAM,OAAM,MAAM,MAAM,0DAC1B;AAEhB,aAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAGrC;AACH,uBAAiB;AACjB,uBAAiB;AACjB,2BAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,mBAAc,QAAQ,QAAQ,eAC1B,OACA,gBAAe,MAAM,OAAO;AAChC,cAAQ;AACR,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,YAAI,QAAQ;AACR,cAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAC9B,kBAAM,IAAI,MAAM,sBAAsB,oBAAoB,MAAM;;AAEpE,cAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACjD,qBAAS,KAAK,MAAM;AACpB,qBAAS,KAAK;;AAElB,cAAI,KAAK,MAAM;AACX;;;AAGR,YAAI,MAAM,OAAO;AACb,mBAAS,KAAK,MAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,UAAU;;AAEhB;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,MAAM;;AAGnB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AACxB,gBAAM,MAAM,oBAAoB,iCAAiC;;;;AAKtE;AACH,aAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAMhC;AACH,UAAI,YAAY;AACZ,eAAO;;AAEX,UAAI,YAAY,aAAa,YAAY;AACrC,eAAO;;AAEX,UAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC5D,eAAO;;AAEX,UAAI,YAAY,UAAU,YAAY;AAClC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAEd;AACH,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU;AACf,eAAO;iBAEF,UAAU;AACf,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;AASlC;AACH,UAAI,OAAO;AACP,eAAO;;AAEX,kBAAY;AACZ,UAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,aAAO;;AAGJ;AACH,aAAO,OAAO,UAAU,YAAY,iBAAiB;;AAElD;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,aAAO,OAAO,UAAU;;AAErB;AACH,UAAI,MAAM,QAAQ;AACd,eAAO,YAAW,OAAO;;AAE7B,UAAI,kBAAkB;AAClB,eAAO;iBAEF,kBAAkB,cAAc,kBAAkB;AACvD,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,UAAS;AACd,eAAO;iBAEF,WAAU;AACf,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAEzC;AACH,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,YAAI,OAAO,MAAM;AACb,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,MAAM;AACnB,UAAI,OAAO;AACP,eAAO;;AAIX,sBAAgB,IAAI,MAAM,OAAO;AACjC,cAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,mBAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC7B,gBAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE5C,aAAO;;AAEX;AACI,kBAAY,IAAI;AAChB,UAAI,MAAM,WAAW;AACjB,kBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,EAAE,SAAS;;;AAIxB,kBAAU,MAAM;AAChB,qBAAa,MAAM,MAAM;AACzB,oBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,KAAK,mBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAG3D,aAAO;;AAGJ;AACH,UAAI,MAAM,WAAW;AAEjB,eAAO,EAAE;;AAEb,mBAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,UAAI,SAAS;AAET,eAAO;;AAEX,UAAI,SAAS,EAAE;AACX,cAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAElE,aAAO,mBAAkB,GAAG,OAAO;;AAEhC;AACH,qBAAc,qBAAoB,MAAM;AACxC,mBAAa,GAAG,IAAI,OAAM,QAAQ;AAC9B,eAAM,KAAK;;AAEf,aAAO;;AAEJ;AACH,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,eAAO,IAAI,WAAW;;AAGtB,cAAM,IAAI,MAAM,qBAAqB;;;AAQtC;AACH,mBAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,UAAI,SAAS,QAAQ,UAAU;AAC3B,eAAO,eAAc,OAAO,IAAI,aAAa;iBAExC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;iBAEtC,UAAU;AACf,eAAO,eAAc,OAAO,IAAI,WAAW;;AAG3C,cAAM,IAAI,MAAM,qBAAqB;;;AAGtC;AACH,YAAM,QAAQ;AACV,gBAAO,OAAO,UAAU,YAAY,WAAW,GAAG,MAAM,0EAC1C;;;AAWf;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,KAAK;;AAEhB,mBAAY,KAAK,KAAK,SAAS;AAC/B,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,kBAAS,QAAQ,KAAK,KAAK;;AAE/B,aAAO;;AAUJ;AACH,UAAI,SAAS;AACT,eAAO;iBAEF,SAAS;AACd,eAAO,CAAC;;AAEZ,mBAAa,IAAI,MAAM;AACvB,mBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,aAAK,KAAK,KAAK,MAAM,SAAQ,QAAQ;AACrC,kBAAS,KAAK,KAAK,QAAQ;;AAE/B,WAAK,KAAK,SAAS,KAAK;AACxB,aAAO;;AAOJ;AAOH,aAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;ACrnB3D;;;;;;;;;;;;;;;;AAkBA,uCAAkC;;MAU9B;AACI,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,KAAK,YAAY;AACjB,kBAAQ,KAAK,YAAY,KAAK,oEACO;;AAEzC,aAAK,eAAe;AACpB,aAAK,WAAW;;MAEpB;AACI,aAAK,aAAa,YAAY,CAAE,cAAc;AAG9C,YAAI,KAAK,SAAS,aAAa;AAC3B,4BAAkB,KAAK,SAAS;AAChC,kBAAQ,KAAK,qCAAqC,aAAa;AAC/D,eAAK,IAAI,UAAU;;;YAGrB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,aAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,eAAO,KAAK,MAAM;;MAEtB;AACI,YAAI,YAAY,KAAK;AACjB,iBAAO,KAAK,MAAM;;AAEtB,0BAAkB,KAAK,aAAa;AACpC,YAAI,WAAU;AACV,gBAAM,IAAI,MAAM,QAAQ;;AAG5B,aAAK,MAAM,YAAY;AACvB,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK,IAAI;;MAEpB;AACI,eAAO,KAAK;;UAGZ;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,mBAAmB;;AAEvC,aAAK,MAAM,YAAY;AACvB,YAAI,KAAK,aAAa,UAAU,WAAW;AACvC,eAAK,aAAa,UAAU,QAAQ;;;MAG5C;AACI,YAAI,KAAK,aAAa,aAAa;AAC/B,gBAAM,IAAI,MAAM,yBAAyB;;AAE7C,eAAO,KAAK,aAAa,UAAU;;MAEvC;AACI,aAAK,QAAQ,OAAO,OAAO,IAAI;;MAEnC;AACI,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK;;MAET;AACI,YAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACvC;;AAEJ,0BAAkB,gBAAe,KAAK,OAAO,SAAS;AACtD,YAAI,8BAA6B;AAC7B,4BAAkB,UAAU,4BAA2B,MAAM;AAC7D,oBAAU,QAAQ;AACd,iCAAqB,SAAS,MAAM;AACpC,iBAAK,SAAS,OAAO,YAAW,KAAK;;;;;AAK9C;AACH,qBAAe;AACf,kBAAY,QAAQ,+BAA+B;AAC/C,qBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,eAAO,EAAE,KAAK;;AAElB,aAAO;;AAEX;AACI,aAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAEnE;AACI,cAAQ,MAAM;AACd,UAAI,UAAU,UAAU,UAAU;AAC9B,eAAO,UAAU;iBAEZ,GAAG,CAAC,YAAY;AACrB,eAAO,CAAC;;AAEZ,YAAM,IAAI,MAAM,oCAAoC,kBAAkB;;AAUnE;AACH,aAAO,SAAA;;AAEA,aAAA,MAAM;AACV;AACH,eAAA,MAAM;;AC9JV;;;;;;;;;;;;;;;;AAoBA;AAEO;AACH,UAAI,oBAAmB;AAEnB;AACA,YAAI,OAAQ,WAAY;AACpB,eAAK;mBAEA,OAAQ,WAAY;AACzB,eAAK;mBAEA,OAAQ,YAAa;AAC1B,eAAK;mBAEA,OAAQ,SAAU;AACvB,eAAK;;AAGL,gBAAM,IAAI,MAAM;;AAEpB,2BAAkB;;AAEtB,aAAO;;AAGX;AACI,iBAAW;AACX,UAAI,GAAG,cAAc;AACjB,WAAG,aAAa,IAAI;;AAExB,aAAO,GAAG;;AASP;AACH,wBAAkB;AAClB,UAAI,UAAU,IAAI;AACd,eAAO,UAAU,IAAI;;AAGrB,0BAAkB;AAClB,kBAAU,IAAI,KAAK;AACnB,eAAO,UAAU,IAAI;;;ACpEjB,iBAAO;AACP,iBAAQ;AACR,kBAAS;AACT,iBAAO;AACP,kBAAQ;AACR,gBAAO;AACP,gBAAO;AACP,oBAAU;AACV,mBAAU;AACV,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,kBAAS;AACT,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,yBAAe;AACf,2BAAkB;AAClB,wBAAe;AACf,kBAAQ;AACR,iBAAQ;AACR,yBAAe;AACf,qBAAW;AACX,oBAAU;AACV,oBAAU;AACV,iCAAwB;AACxB,iCAAuB;AACvB,mBAAU;AACV,mCAA0B;AAC1B,kCAAyB;AACzB,iBAAO;AACP,iBAAQ;AACR,oBAAU;AACV,2BAAiB;AACjB,0BAAgB;AAChB,mCAAyB;AACzB,gDAAuC;AACvC,+CAAsC;AACtC,iBAAQ;AACR,uBAAc;AACd,oCAA2B;AAC3B,qCAA4B;AAC5B,iBAAO;AACP,iBAAO;AACP,oBAAW;AACX,gBAAO;AACP,mBAAS;AACT,iBAAO;AACP,kBAAS;AACT,gBAAO;AACP,kBAAQ;AACR,2BAAiB;AACjB,kBAAS;AACT,sBAAY;AACZ,4BAAkB;AAClB,sBAAY;AACZ,sBAAY;AACZ,qBAAW;AACX,0BAAgB;AAChB,sBAAY;AACZ,iBAAQ;AACR,iBAAQ;AACR,qBAAY;AACZ,kBAAS;AACT,kBAAS;AACT,kBAAQ;AACR,uBAAa;AACb,qBAAY;AACZ,iBAAO;AACP,kBAAS;AACT,wBAAc;AACd,uBAAc;AACd,sBAAa;AACb,uBAAc;AACd,gBAAO;AACP,wBAAe;AACf,iBAAO;AACP,qBAAW;AACX,qBAAW;AACX,4BAAmB;AACnB,sBAAa;AACb,8BAAqB;AACrB,8BAAqB;AACrB,iBAAQ;AACR,iBAAO;AACP,qBAAW;AACX,sBAAa;AACb,gBAAO;AACP,sBAAY;AACZ,oBAAU;AACV,sBAAY;AACZ,iCAAuB;AACvB,iCAAuB;AACvB,iCAAuB;AACvB,sBAAY;AACZ,oBAAU;AACV,mBAAS;AACT,iBAAQ;AACR,iBAAO;AACP,mBAAS;AACT,iBAAQ;AACR,kBAAS;AACT,iBAAQ;AACR,uBAAc;AACd,kBAAQ;AACR,qBAAW;AACX,kCAAyB;AACzB,sCAA6B;AAC7B,4BAAkB;AAClB,+BAAsB;AACtB,mBAAS;AACT,qBAAW;AACX,kBAAS;AACT,mBAAS;AACT,uBAAa;AACb,sBAAY;AACZ,iBAAQ;AACR,mBAAS;AACT,iBAAO;AACP,iBAAQ;AACR,iBAAQ;AACR,qBAAW;AACX,qBAAY;AACZ,kBAAQ;AACR,iBAAO;AACP,2BAAkB;AAClB,oBAAU;AACV,qBAAW;AACX,+BAAqB;AACrB,oBAAU;AACV,iBAAO;AACP,0BAAiB;AACjB,0BAAgB;AAChB,gBAAO;AACP,kBAAQ;AACR,kBAAQ;AACR,iBAAQ;AACR,uBAAa;AACb,mBAAU;AACV,oBAAU;AACV,+BAAsB;AACtB,uBAAa;AAIb,kBAAQ;AACR,uBAAc;AACd,8BAAoB;AACpB,0BAAgB;AAChB,yBAAe;AACf,kCAAwB;ACxJpC;;;;;;;;;;;;;;;;AAkBA,4BAAuB,WAAU,kBAAkB,MAAM,IAAI;AAC7D,0BAAqB,WAAU,gBAAgB,MAAM,IAAI;AAOlD;AACH,kBAAY,SAAQ,YAAY;AAChC,aAAO,gBAAe,IAAI;;AAMvB;AACH,aAAO,cAAa,IAAI;;AAErB;AACH,iBAAW,gBAAe;AAC1B,qBAAe;AACf,aAAO;AACH,eAAQ,MAAM,SAAU,GAAG;AAC3B,YAAI;AACA;;AAEJ,+BAAsB;AACtB,2BAAmB,IAAI,MAAM;AAC7B,YAAI,aAAY;AACZ,iBAAO,KAAK;;;AAGpB,aAAO;;AAaJ;AACH,aAAQ,YAAY,eAAgB;AACpC,kBAAY,SAAQ,YAAY;AAChC,UAAI,gBAAe,IAAI;AACnB,gBAAQ,KAAK,eAAe,4BACpB;;AAEZ,sBAAe,IAAI,KAAK;;AAUrB;AACH,aAAQ,cAAe;AACvB,UAAI,cAAa,IAAI;AAGjB,YAAI,OAAM,QAAQ;AACd,kBAAQ,KAAK,gCAAgC;;;AAGrD,oBAAa,IAAI,YAAY;;AAS1B;AACH,kBAAY,SAAQ,YAAY;AAChC,UAAI,CAAC,gBAAe,IAAI;AACpB,cAAM,IAAI,MAAM,eAAe,4BACvB;;AAEZ,sBAAe,OAAO;;AAGnB;AACH,UAAI,CAAC,cAAa,IAAI;AAClB,cAAM,IAAI,MAAM,iBAAiB;;AAErC,oBAAa,OAAO;;AAQjB;AACH,sBAAgB,sBAAqB;AACrC,cAAQ,QAAQ;AACZ,gCAAwB,OAAO,OAAO,IAAI,cAAc,CAAE,aAAa;AACvE,wBAAe;;;AAGvB;AACI,aAAO,GAAG,eAAe;;AChI7B;;;;;;;;;;;;;;;;AAsBO;AACH,UAAI,UAAU;AACV,eAAO,cAAa;;AAExB,aAAO,cAAa,CAAC,QAAQ;;AAEjC;AACI,aAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAEvC;AACH,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,UAAI,MAAM,QAAQ;AACd,YAAI,SAAa;;AAErB,UAAI,OAAM,QAAQ;AACd,kCAA8B,GAAG;;AAErC,UAAI,oBAAmB,GAAG;AACtB,eAAO;;AAEX,UAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AAClD,eAAO,IAAI,aAAa;iBAEnB,UAAU;AACf,eAAO,IAAI,WAAW;iBAEjB,UAAU;AACf,qBAAa,IAAI,WAAW,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,cAAI,KAAK,MAAM,EAAE,QAAQ;AACrB,iBAAK,KAAK;;;AAGlB,eAAO;;AAGP,cAAM,IAAI,MAAM,qBAAqB;;;AActC;AACH,aAAO,OAAM,SAAS;;AAkBnB;AACH,aAAO,OAAM,SAAS,MAAM,MAAM;;AAU/B,yCAAoC;AACvC,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,GAAG;;AAU7B,6CAAwC;AAC3C,iBAAW,YAAY;AACvB,aAAO,OAAM,SAAS,OAAO,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACxHxC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,SAAS;AACd,YAAI,UAAU;AACV,eAAK,SAAS,IAAI;;;MAG1B;AACI;AACA,oCAA4B;AACxB,oBAAU;;AAEd,sBAAc,KAAK,aAAa,KAAK;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAe,QAAQ;AAGvB,iBAAO,OAAO,KAAK;AACf,uCAA0B,YAAY,OAAO,OAAO;;;AAG5D,8BAAsB;UAClB;UACA;UACA;UACA,QAAQ,MAAM,KAAK,YAAU,OAAO;UACpC,WAAW,MAAM,KAAK,YAAU,OAAO,uBAAuB,OAC1D,OAAO,wBACP;;AAER,eAAO;;MAEX;AACI,eAAQ,YAAY,SAAS,QAAQ,QAAQ,aAAc;AAC3D,gBAAQ,QAAQ;AACZ,kBAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACjD,iBAAK,OAAO,iBAAiB,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAAQ,eAAe;;;;;AAKvH;AACH,UAAI,UAAU;AAEV,eAAO;;AAEX,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAY,KAAK;AACjB,YAAI,MAAM,QAAQ,CAAC,SAAS;AAExB,kBAAQ,KAAK,SAAS,yBAAyB;AAC/C,iBAAO;;;AAGf,aAAO;;;MAGP;AACI,sBAAa,OAAO,WAAW,WAAW,UAAc,GAAG,YAAY,KACnE,OAAO;AACX,2BAAmB,UAAc,MAAM;AACvC,qBAAa,OAAO;AACpB,qBAAa,OAAO;AACpB,sBAAc,UAAc,OAAO,MAAM,YAAY;AACrD,qCAA6B;AAC7B,4BAAmB;AACf,yBAAc,OAAO;AACrB,cAAI,UAAS;AAGT,+BAAmB,OAAM,SAAS,OAAO;AACzC,8BAAkB,WAAW;AAC7B,sCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAGnE,gBAAQ,IAAI,KAAK,gBAAiB,WAAW,SAAS,WAAY,UAAW,4BAA6B,aAAa,oBAAoB,aAAa,cAAc,iBAAiB,gBAAgB;;;AC9F/M;;;;;;;;;;;;;;;;AAyBO;AAGH,2BAAqB;AACrB,yBAAmB;AACnB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,qBAAa,GAAG,GAAG,MAAM;;AAE7B,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AACxB,gCAAwB;AACpB,yBAAc,WAAW;AACzB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,gBAAI,aAAa,OAAM;AACnB,mBAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,8BAAgB;AAChB,yBAAW,KAAK,MAAM;AACtB;;;AAGR,cAAI;AACA;;;;AAKZ,6BAAuB;AACvB,qBAAe,EAAE,MAAM;AACvB,uBAAiB;AACjB,mBAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AAClC,qBAAa,MAAK;AAClB,2BAAmB,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACrC,cAAI,eAAe,KAAK,QAAQ,GAAG;AAC/B,oCAAwB;AACpB,6BAAe,WAAW,WAAW,MAAM;AAC3C,uBAAS,KAAK,MAAM;;AAExB;;;;AAKZ,2BAAqB;AACrB,mBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,qBAAa,MAAK;AAClB,YAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAErC,+BAAqB;AACrB,kCAAwB,KAAK;AACzB,8BAAkB,KAAK,OAAO;AAC9B,gBAAI,aAAa,UAAU;AACvB,2BAAa,aAAa;;;AAIlC,6BAAmB,OAAO,OAAO,IAAI;AACrC,qBAAW,SAAS;AACpB,qBAAW,UAAU,KAAK;AAC1B,uBAAa,KAAK;;;AAG1B,aAAO;;AASJ;AAEH,mBAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC1C,qBAAa,aAAa;AAC1B,oBAAY;AACZ,aAAK,QAAQ,QAAQ;AACjB,6BAAmB,6BAA6B,EAAE;AAClD,cAAI,cAAc;AACd,gBAAI,KAAK;;AAKT,gBAAI,KAAK;;;AAGjB,YAAI,KAAK,YAAY;AACjB,gBAAM,IAAI,MAAM,4DACL,KAAK;;AAGpB,+BAAuB,KAAK,SAAS;AACrC,gCAAwB,KAAK;AACzB,cAAI,CAAE,cAAa;AACf,kBAAM,IAAI,MAAM,iCAAiC,yCACf,OAAO,KAAK;;AAGlD,qBAAW,MAAK,MAAM,eAAe;AACrC,cAAI,GAAG,UAAU;AACb,kBAAM,IAAI,MAAM,4BAA4B,KAAK,qCAC1C,iDAAiD,GAAG;;AAE/D,oBAAU,KAAK,OAAO;AACtB,cAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,kBAAM,IAAI,MAAM,4BAA4B,KAAK,sCACzC,yBAAyB,GAAG,wDACL,EAAE;;AAErC,cAAI,6BAA6B,EAAE,OAAO;AACtC,yCAA6B,EAAE,MAAM;;AAGrC,gCAAoB,6BAA6B,EAAE;AACnD,yCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,wBAAY;;;;;AChJ5B;;;;;;;;;;;;;;;;AAkBA,mCAA8B;AAE9B,wCAAmC;AAEnC,mCAA8B;AACvB;AACH,sBAAgB,gBAAe;AAC/B,wBAAkB,yBAAwB,MAAM,OAAO,OAAO;AAC9D,mBAAa,MAAM;AACnB,wBAAkB,mBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,oBAAc,CAAC;AACf,UAAI;AACA,cAAM,KAAK,YAAY;AACvB,cAAM,KAAK,WAAW;AACtB,cAAM,KAAK,aAAa;AACxB,cAAM,KAAK;;AAEf,YAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,aAAO,MAAM,KAAK;;AAEtB;AACI,gBAAU,eAAc;AACxB,sBAAgB,QAAQ,QAAQ,SAAS;AACzC,wBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,mBAAa,MAAM;AACnB,6BAAuB,UAAU,cAAc,qBAAoB,QAAQ;AAC3E,UAAI,OAAO;AACP,uBAAe,GAAG,MAAM,IAAI,SAAS;AACjC,yBAAe,MAAM;AACrB,uBAAa,GAAG,IAAI,SAAS;AACzB,sBAAU,KAAK,KAAK,IAAI,UAAU,IAAI,aAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAIpG,aAAO;;AAEX;AACI;AACA,UAAI,MAAM,QAAQ;AACd,iBAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,8BAC/B,WAAW,IAAI,GAAG,QAAQ;iBAE5B,UAAS;AACd,iBAAS,IAAI;iBAER,UAAU;AACf,iBAAS,iBAAgB;;AAGzB,iBAAS,WAAW,IAAI,QAAQ,yBAAwB;;AAE5D,aAAO,UAAS,QAAQ;;AAE5B;AACI,aAAO,MAAM,IAAI,UAAU;;AAE/B,iFAA4E;AACxE,gCAA0B,UAAU,cAAc,IAAI;AACtD,mBAAa,MAAM;AACnB,mBAAa,MAAM;AACnB,UAAI,SAAS;AACT,YAAI,UAAU;AACV,+BAAqB,qBAAoB;AACzC,iBAAO,CAAC,aAAY,aAAa,IAAI,GAAG;;AAE5C,YAAI,UAAU;AACV,iBAAO,CAAC,iBAAgB,KAAK;;AAEjC,eAAO,CAAC,KAAK,GAAG;;AAEpB,UAAI,SAAS;AACT,YAAI,OAAO;AACP,gCAAsB,8BAA6B;AACnD,0BAAgB,MAAM,KAAK,KAAK,MAAM,GAAG;AACzC,yBAAe,MAAM,KAAK,KAAK,MAAO,QAAO,+BAA8B,mBAAmB,OAAO;AACrG,cAAI,UAAU;AACV,wBAAY,qBAAoB;AAChC,uBAAW,qBAAoB;;AAEnC,iBAAO;YACH,MACI,UAAU,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IAAI,UAAU,aAAY,GAAG,UAAU,OAAO,8BAA6B,IAAI,QAC/E,KAAK,QACV;;;AAGZ,4BAAoB,UAAU,cAAc,qBAAoB,QAC5D,MAAM,KAAK;AACf,eAAO;UACH,MACI,YAAY,IAAI,UAAU,aAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAIZ,uBAAiB,MAAM,MAAM;AAC7B,yBAAmB,QAAQ,MAAM;AACjC,qBAAe,QAAQ,KAAK;AAC5B,oBAAc;AACd,UAAI,OAAO;AACP,qBAAa,GAAG,IAAI,6BAA4B;AAC5C,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW;;AAEpG,cAAM,KAAK;AACX,qBAAa,OAAO,6BAA4B,IAAI,MAAM;AACtD,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAIjH,qBAAa,GAAG,IAAI,MAAM;AACtB,wBAAc,IAAI;AAClB,sBAAY,QAAQ;AACpB,gBAAM,KAAK,GAAG,mBAAkB,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WAAW,MAAM,OAAO;;;AAGrH,kBAAY,SAAS,IAAI,MAAM;AAC/B,YAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,cAAM,KAAK,MAAM,MAAM,KAAK;;AAEhC,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM;AACtB,sBAAc;;AAElB,YAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,aAAO;;AAEX;AACI,4BAAsB;AACtB,mBAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,sBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAE1C,aAAO;;AChKX;;;;;;;;;;;;;;;;;MA4BI;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ,MAAM;AACnB,aAAK,OAAO,eAAmB;AAC/B,YAAI,UAAU;AACV,oBAAU,OAAO;AACjB,kBAAY,MAAM,KAAK,MAAM,MAAM,qBAAqB,qDAC1B,KAAK;;AAEvC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAIpB,aAAK,SAAS,UAAU,mBAAuB,OAAO,KAAK;AAC3D,aAAK,UAAU,gBAAe;;MAUlC;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAY,KAAK,WAAW,KAAK,MAAM,MAAM,uCAAuC,KAAK,gCAClE,KAAK;AAC5B,uBAAc,KAAK,WAAW;AAC9B,aAAK,OAAO,UAAS;;MASzB;AACI,YAAI,KAAK,WAAW;AAChB,iBAAO,CAAC;;AAEZ,gBAAQ;AACR,0BAAkB;AACd,cAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC7B,wBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,kBAAM,IAAI,MAAM;;AAEpB;;AAEJ,qBAAY,KAAK,KAAK,SAAS;AAC/B,sBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,MAAK,KAAK;;AAEpC,eAAO,KAAK,OAAO;;MAEvB;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,KAAK;;AAEhB,qBAAY,KAAK,KAAK,SAAS;AAC/B,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,oBAAS,KAAK,QAAQ,KAAK,KAAK;;AAEpC,eAAO;;MAEX;AACI,YAAI,KAAK,SAAS;AACd,iBAAO;mBAEF,KAAK,SAAS;AACnB,iBAAO,CAAC;;AAEZ,qBAAa,IAAI,MAAM,KAAK,MAAM;AAClC,qBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,eAAK,KAAK,KAAK,MAAM,SAAQ,KAAK,QAAQ;AAC1C,oBAAS,KAAK,KAAK,KAAK,QAAQ;;AAEpC,aAAK,KAAK,SAAS,KAAK;AACxB,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAOtB;AACI,eAAO,aAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAIpE,qBAAgB;AAEhB,qBAAgB;AAEhB,gCAA2B;AAI3B,KAAC;AAMM;AACH,mBAAY;;AAOT;AACH,mBAAY;;AAMT;AACH,8BAAuB;;;MAWvB;AAEI,aAAK,OAAO;AACZ,aAAK,qBAAqB;AAC1B,aAAK,QAAQ,MAAM;AACnB,aAAK,QAAQ,SAAS;AACtB,aAAK,OAAO,eAAmB;AAC/B,aAAK,UAAU,gBAAe;AAC9B,aAAK,SAAS;AACd,aAAK,KAAK;AACV,aAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;UAExD;AACA,eAAO,KAAK,MAAM;;YAOhB;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO;;MAMpD;AACI,eAAO,WAAU,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK;;YAQnD;AACF,qBAAa,MAAM,KAAK;AACxB,eAAO,eAAc,KAAK,OAAO;;MAQrC;AACI,eAAO,eAAc,KAAK,OAAO,KAAK;;YAQpC;AACF,aAAK;AACL,sBAAa,aAAY,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU;AACf,wBAAc,MAAM;AACpB;AACI,mBAAO,MAAM,IAAI,OAAK,cAAkB;;AAGxC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;MAQX;AACI,aAAK;AACL,sBAAa,aAAY,SAAS,KAAK;AACvC,YAAI,KAAK,UAAU;AACf;AACI,mBAAO,MAAK,IAAI,OAAK,cAAkB;;AAGvC,kBAAM,IAAI,MAAM;;;AAIxB,eAAO;;YAGL;AACF,aAAK;AACL,sBAAa,MAAM,aAAY,KAAK,KAAK;AACzC,YAAI,KAAK,UAAU;AACf,iBAAO;;AAGP,iBAAO,IAAI,WAAW,MAAK;;;MAQnC;AACI,YAAI,KAAK;AACL;;AAEJ,qBAAY,cAAc;AAC1B,aAAK,qBAAqB;;UAE1B;AACA,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAWxB,gBAAgB;AACZ,eAAO,WAAU,MAAM,MAAM;;MAMjC;AACI,aAAK;AACL,eAAO,WAAU,MAAM;;MAO3B,mBAAmB;AACf,qBAAa,KAAK;AAClB,eAAO,gBAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;MAExD;AACI,aAAK;AACL,eAAO,WAAU,KAAK,MAAM;;MAEhC,qBAAqB;AACjB,aAAK;AACL,eAAO,aAAY,aAAa,MAAM,WAAW,MAAM;;;AAG/D,WAAO,eAAe,SAAQ,OAAO,aAAa;MAC9C,OAAO;AAMH,eAAO,CAAC,CAAC,YAAY,SAAS,QAAQ,QAAQ,SAAS,YAAY,QAC/D,SAAS,mBAAmB;;;4BAQV;MAC1B;AACI,cAAM,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AACnE,aAAK,YAAY;AACjB,aAAK,OAAO;;MAUhB;AACI,YAAI,SAAS,UAAU,KAAK;AACxB,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,YAAI,CAAC,aAAiB,SAAS,OAAO,KAAK;AACvC,gBAAM,IAAI,MAAM,2BAA2B,SAAS,8BAC7B,KAAK;;AAEhC,qBAAY,cAAc;AAC1B,aAAK,SAAS,SAAS;AACvB,qBAAY,OAAO,MAAM;;MAE7B;AACI,qBAAY,gBAAgB;AAC5B,aAAK,qBAAqB;;;AAGlC,WAAO,eAAe,WAAU,OAAO,aAAa;MAChD,OAAO;AACH,eAAO,oBAAoB,WAAU,SAAS,UAAU,QACpD,SAAS,kBAAkB;;;AC5XvC;;;;;;;;;;;;;;;;AAiBA,IAAC;AACG,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;AACb,YAAK,QAAQ;OACd,SAAA,QAAS,UAAA,OAAO;AAGnB;AACA,IAAC;AACG,yBAAkB,aAAa;AAC/B,yBAAkB,WAAW;AAC7B,yBAAkB,UAAU;AAC5B,yBAAkB,eAAe;OAClC,sBAAsB,sBAAoB;AAC7C;AACA,IAAC;AACG,wBAAiB,aAAa;AAC9B,wBAAiB,WAAW;AAC5B,wBAAiB,UAAU;AAC3B,wBAAiB,eAAe;OACjC,qBAAqB,qBAAmB;AAC3C;AACA,IAAC;AACG,2BAAoB,aAAa;AACjC,2BAAoB,WAAW;AAC/B,2BAAoB,UAAU;AAC9B,2BAAoB,eAAe;OACpC,wBAAwB,wBAAsB;AACjD;AACA,IAAC;AACG,6BAAsB,aAAa;AACnC,6BAAsB,WAAW;AACjC,6BAAsB,UAAU;AAChC,6BAAsB,eAAe;OACtC,0BAA0B,0BAAwB;AACrD,2BAAsB;MAClB,SAAW;MACX,OAAS;MACT,MAAQ;MACR,WAAa;;AAEV;AACH,UAAI,UAAU,YAAY,UAAU;AAChC,YAAI,UAAU,YAAY,UAAU;AAChC,iBAAO;;AAEX,cAAM,IAAI,MAAM,kBAAkB,cAAc;;AAEpD,aAAO,eAAc,OAAO;;AAGzB;AACH,aAAO,YAAW,MAAM;;ACzE5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,EAAE,UAAU,EAAE;AACd,eAAO,CAAC,GAAG;;AAEf,oBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,aAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AAE3B;AACH,cAAO,EAAE,UAAU,EAAE,OAAO,MAAM,2BAA2B,EAAE,qBAChD,EAAE;;AAEd;AACH,aAAO,WAAW,KAAK,OAAK,EAAE,OAAO,QAAO;;AAczC;AACH,mBAAa;AACb,mBAAa,IAAI;AACjB,2BAAoB,QAAQ,MAAM;AAClC,aAAO;;AAEX;AACI,UAAI,aAAa;AACb;;AAEJ,UAAI,qBAAqB;AACrB,aAAK,KAAK;AACV;;AAEJ,UAAI,CAAC,YAAW;AACZ;;AAGJ,uBAAiB;AACjB,sBAAgB;AACZ,oBAAY,SAAS;AACrB,YAAI,CAAC,KAAK,IAAI;AACV,eAAK,IAAI;AACT,+BAAoB,KAAK,MAAM;;;;AAK3C;AACI,aAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;;;;;;;;AC1EhD;;;;;;;;;;;;;;;;;MA4BI;AAEI,aAAK,sBAAsB;AAC3B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AAItB,aAAK,gBAAgB;AAGrB,aAAK,cAAc;AACnB,aAAK,aAAa;AAKlB,aAAK,oBAAoB;AACzB,aAAK,cAAc;AACnB,aAAK,aAAa,IAAI;AACtB,aAAK,YAAY;AACjB,aAAK,gBAAgB,CAAE,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;MAE1F;AACI,mCAA2B,KAAK;AAC5B,eAAK,oBAAoB,cAAc;;;;;MAK/C;AACI,aAAK,MAAM;AACX,aAAK,WAAW;AAChB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,QAAQ,IAAI;;YAEf;AACF,YAAI,KAAK,sBAAsB;AAC3B,iBAAO,KAAK,mBAAmB,KAAK;;;AAExC,YAAI,KAAK,mBAAmB;AACxB;;AAEJ,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,0BAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,cAAI;AACA,kBAAM,KAAK,WAAW;AACtB;;;AAGR,cAAM,IAAI,MAAM;;UAGhB;AACA,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM,YAAY,KAAK;;AAIrC,YAAI,KAAK,mBAAmB;AACxB,iBAAQ,MAAM,aAAc,KAAK;AACjC,cAAI;AACA,kBAAM,IAAI,MAAM,iCAAiC;;AAIrD,eAAK,WAAW;;AAEpB,eAAO,KAAK;;MAEhB;AACI,eAAO,OAAO,KAAK,KAAK;;MAE5B;AACI,YAAI,CAAE,gBAAe,KAAK;AAGtB,cAAI,eAAe,KAAK;AACpB,mBAAQ,aAAc,KAAK,kBAAkB;AAC7C,gBAAI;AAEA,qBAAO;;;AAIX,mBAAO;;;AAGf,eAAO,KAAK,SAAS;;MAEzB;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,iBAAO;;AAEX,eAAO,KAAK,gBAAgB,aAAa;;MAE7C,iDAAiD;AAC7C,YAAI,eAAe,KAAK;AACpB,kBAAQ,KAAK,GAAG;AAEhB,iBAAO;;AAEX,aAAK,gBAAgB,eAAe,CAAE,SAAS;AAC/C,eAAO;;YAEL;AACF,YAAI,KAAK,gBAAgB,gBAAgB;AACrC,gBAAM,IAAI,MAAM,iBAAiB;;AAErC,aAAK,cAAc;AACnB,YAAI,KAAK,SAAS,gBAAgB;AAC9B,eAAK,kBAAkB;AACvB,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,yBAAe,YAAY,MAAM,UAAU;AAC3C,cAAI,CAAC;AACD,mBAAO;;;AAGf,aAAK,kBAAkB,KAAK,SAAS;AACrC,aAAK;AAEL,aAAK,WAAW,IAAI,UAAS,KAAK;AAClC,eAAO;;MAEX;AACI,wBAAgB,sBAAqB,KAAK;AAC1C,gBAAQ,QAAQ;AACZ,cAAI,OAAO,aAAa;AACpB,mBAAO,UAAU,KAAK;;;;MAIlC;AACI,wBAAgB,sBAAqB;AACrC,gBAAQ,QAAQ;AACZ,cAAI,OAAO,eAAe;AACtB,mBAAO,YAAY,KAAK,SAAS;;;;MAU7C;AACI,qCAA6B,KAAK,gBAAgB;AAClD,YAAI,wBAAwB;AACxB,gBAAM,IAAI,MAAM,6BAA6B;;AAEjD;AACI,2BAAgB,qBAAqB;AAMrC,cAAI,YAAW,CAAE,qBAAmB,mBAC7B,OAAO,SAAQ,SAAS;AAC3B,8BAAkB,EAAE,KAAK;AACzB,4BAAgB,SACX,KAAK;AAEN,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,SAAS,eAAe;AAC7B,mBAAK,qBAAqB;AAC1B,qBAAO;eAEN,MAAM;AAEP,kBAAI,YAAY,KAAK;AACjB,uBAAO;;AAEX,mBAAK,qBAAqB;AAC1B,sBAAQ,KAAK,6BAA6B;AAC1C,sBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,qBAAO;;AAEX,iBAAK,qBAAqB;AAC1B,mBAAO,CAAE,SAAS,WAAW;;AAG7B,iBAAK,SAAS,eAAe;AAC7B,mBAAO,CAAE,SAAS,MAAM,WAAW;;;AAIvC,kBAAQ,KAAK,6BAA6B;AAC1C,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO,CAAE,SAAS,OAAO,WAAW;;;MAG5C;AACI,YAAI,CAAE,gBAAe,KAAK;AACtB,gBAAM,IAAI,MAAM,GAAG;;AAEvB,YAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAG/D,eAAK;;AAET,YAAI,eAAe,KAAK;AACpB,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,eAAO,KAAK,gBAAgB;AAE5B,YAAI,KAAK,gBAAgB;AACrB,eAAK,qBAAqB;AAC1B,eAAK,cAAc;AACnB,eAAK,kBAAkB;;;MAG/B;AACI,YAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC7C,gBAAM,IAAI,MAAM;;AAEpB,eAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE1C,iBAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;MAGpC;AACI,+BAAuB,KAAK;AAC5B,qBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,8BAAoB,eAAe;AACnC,iBAAQ,SAAS,aAAc,KAAK,kBAAkB;AACtD,cAAI,aAAa;AACb,mBAAO,CAAE,MAAM,aAAa;;;AAGpC,cAAM,IAAI,MAAM;;MAGpB;AACI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAmB,KAAK;AACxB,uBAAe,KAAK,SAAS;AAG7B,mBAAW,YAAY;AACvB,aAAK,UAAU;AACf,iBAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,YAAI,KAAK;AAGL,eAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;MAG3E;AACI,mBAAW;AACX,YAAI,MAAM;AAEN,cAAI,OAAO,aAAa;AACpB,kBAAM,IAAI,MAAM;;AAEpB,eAAK;;AAIL,cAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACtD,kBAAM,IAAI,MAAM;;AAGpB,cAAI,OAAO,OAAO;AACd,kBAAM,IAAI,MAAM;;AAGpB,iBAAO;;AAIX;AACA,eAAO,KAAK,UAAU,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AAC5E,mBAAS;AACT,cAAI,kBAAkB;AAClB,oBAAQ,MAAM;;AAElB,iBAAO;;;MAGf;AACI;AACA;AACI,sBAAY;AACZ;AACA,iBAAO;;AAGP;AACA,gBAAM;;;MAGd;AACI,eAAO,QAAO;;MAElB;AACI,eAAO,QAAO;;MAWlB;AACI,kBAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,uBAAe,CAAE;AACjB,sBAAa,QAAS;UAClB,GAAG;AACC,0BAAc;AACd,+BAAmB,CAAE,GAAG;AACxB,0BAAc,CAAE;AAChB,mBAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,YAAY,MAAiB,OAAM;;;AAG3G,sBAAc;AACd,aAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,OAAM,OAAO;AACxE,eAAO;;MAeX;AACI,4BAAoB;AACpB,8BAAsB;AAItB,eAAO,KAAK,cAAc,aAAa,QAAQ,eAAe,YAAY,OAAO,cAAc;;MAEnG;AACI,eAAO,KAAK,IAAI,QAAQ;;MAE5B;AACI,gCAAwB,KAAK,QAAQ;AAErC,+BAAuB;AACvB,iBAAS,QAAQ;AAGb,8BAAqB,KAAK,UAAU,cAAc,IAAI;;AAO1D,yBAAiB,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACpF,8BAAsB,kBAAkB,mBAAmB,mBAAmB;AAC9E,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,YAAY,KAAK,6CACzB,0CAA0C;;;MAO1D;AACI;AACA,oBAAY;AACZ,yBAAiB,KAAK;AACtB,YAAI,cAAc;AACd,uBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAEvE,kCAA0B,KAAK,MAAM;AACrC,mCAA2B,KAAK,MAAM;AACtC,YAAI,KAAK;AACL,eAAK,MAAM,kBAAkB,KAAK;;AAEtC;AACA,uBAAe,WAAU,YAAY,KAAK;AAC1C;AACA,YAAI,UAAU;AACV,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,OAAO,WAAW,CAAE,QAAQ,OAAO,SAAS,KAAK;AACvD,6BAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,+BAAmB,SAAS,IAAI,EAAG,QAAQ,OAAO,WAAY,KAAK,qBAAqB,QAAQ,OAAO;AAKvG,gBAAI;AACA,kCAAoB,KAAK,sBAAsB,YAAY,QAAQ;AACnE,kBAAI,iBAAiB;AAKjB,oBAAI,iBAAiB;AACjB,kCAAgB;;AAEpB,mCAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,gCAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAExD,sBAAQ,KAAK,2BAA2B;;AAE5C,mBAAO;;;AAIX,2BAAiB;AAIb,gBAAI,CAAC;AACD;;AAEJ,oBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAEvD,wBAAa;AACT,qCAAyB,KAAK,QAAQ;AACtC,kBAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,yBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,gBAAI,KAAK;AACL,mBAAK,sBAAsB,YAAY,kBAAkB;;AAE7D,mBAAO;;;AAIf;AACA,aAAK,UAAU,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC3E,cAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC1C,sBAAU;;AAGV,4BAAgB,KAAK,SAAS,cAAc,YAAY,QAAQ,MAAM;AACtE,gBAAI,KAAK,IAAI,QAAQ;AACjB,mBAAK,SAAS,iBAAiB;;AAEnC,sBAAU,cAAc;;;AAGhC,YAAI;AACA,eAAK,YAAY,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAExE,YAAI,KAAK,MAAM;AACX,eAAK,MAAM,cAAc,QAAQ,KAAK;YAClC,MAAM;YACN,YAAY,KAAK,MAAM,WAAW;YAClC,oBAAoB,KAAK,MAAM;YAC/B,cAAc,KAAK,MAAM,aAAa;YACtC,sBAAsB,KAAK,MAAM;YACjC,aAAa,OAAO,KAAK,QAAQ,IAAI,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;YACtF,cAAc,QAAQ,IAAI,UAAQ,KAAK;YACvC,cAAc,cAAc;YAC5B,WAAW,cAAc;;;AAGjC,eAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;MAOnD;AACI,sBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,eAAO;;MAYX;AACI,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,+BAAqB,WAAW,gBAAgB;AAChD,gCAAsB,WAAW,iBAAiB;AAGlD;AACA,cAAI,WAAW;AACX,oBAAY,MAAM,QAAQ,SAAS,MAAM;AACzC,iCAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAG7D,iCAAqB,aAAa,IAAI,eAAe,OAAO;;AAEhE,sCAA4B,QAAQ,OAAO,UAAU,cAAc;AACnE,iBAAO,mBAAmB,OAAO;;AAIrC,eAAO;;MAOX;AACI,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAEpB,gBAAQ,SAAS;AACjB,mBAAU,YAAW,KAAK;AAC1B,0BAAkB;AAClB,YAAI,UAAU,YAAY,UAAc,OAAO;AAC3C,wBAAc,OAAO,IAAI,OAAK,cAAkB;;AAEpD,uBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AAEf,YAAI,UAAU;AACV,uBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,2BAAiB,sBAAqB;AACtC,eAAK,MAAM,YAAY,WAAW,KAAK;AACvC,eAAK,QAAQ;;AAEjB,eAAO;;MAOX;AACI,gBAAQ,SAAS;AACjB,kBAAU,IAAI,QAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,aAAK,OAAO,GAAG;AACf,eAAO;;MAEX,uCAAuC;AACnC,eAAO,QAAQ,KAAK,iBAAiB;AACrC,YAAI,SAAS,QAAQ,UAAU,aAAa;AACxC,yBAAe,aAAa,KAAK;;AAErC,kBAAU,IAAI,UAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,gBAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE5C,aAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,aAAK,OAAO,GAAG,KAAK;AACpB,eAAO;;MAEX;AACI,yBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,YAAI,aAAa;AACb,eAAK,MAAM;AAGX,sBAAY;AACZ,cAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,oBAAQ,EAAE,OAAO,iBAAqB,EAAE;;AAE5C,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;YAChC,SAAS,YAAW,KAAK;YACzB,OAAO,EAAE;YACT,OAAO,EAAE;YACT;YACA,UAAU;;AAEd,eAAK,MAAM,YAAY;;AAE3B,aAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AACpC,YAAI,CAAE,cAAa;AACf,eAAK,MAAM;;;MAGnB;AACI,YAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC7B;;AAEJ,aAAK,MAAM;AACX,YAAI,EAAE,UAAU;AACZ,eAAK,MAAM;;AAEf,qBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,yBAAiB,KAAK;AACtB,YAAI,YAAY;AAGZ,cAAI,EAAE,UAAU;AACZ,iBAAK,MAAM,YAAY,KAAK;;AAEhC,eAAK,MAAM;AACX,eAAK,QAAQ,YAAY,EAAE;AAC3B,eAAK,MAAM,WAAW,OAAO,EAAE;;AAG/B,eAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;MAM5C;AACI,8BAAsB,KAAK,MAAM;AAC7B,oBAAU,KAAK,MAAM,oBAAoB;AACzC,eAAK,gBAAgB;;;MAG7B;AACI,aAAK,cAAc;AACnB,YAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC1C,iBAAO,KAAK,MAAM,oBAAoB,EAAE;;;MAGhD;AACI,qBAAa,KAAK,QAAQ;AAC1B,aAAK,aAAa,KAAK,MAAM;AAC7B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,WAAW,KAAK,MAAM;AAC3B,YAAI,KAAK,MAAM,mBAAmB;AAC9B,eAAK,aAAa;AAClB,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU;;AAEnB,eAAK,QAAQ,KAAK;;AAGtB,eAAO;;YAEL;AACF,aAAK,MAAM,YAAY;AACvB,2BAAmB,KAAK,MAAM;AAC9B,gCAAwB,KAAK,MAAM;AACnC,aAAK,MAAM,cAAc,UAAU;AACnC,aAAK,MAAM,cAAc,SAAS,MAAM;AACxC,aAAK,MAAM,YAAY;AACvB,aAAK,MAAM,cAAc,YAAY,KAAK,IAAI,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AAC7F,aAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,aAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,6BAAqB,KAAK,MAAM,cAAc;AAC1C,iBAAO,eAAe,MAAM,OAAO;AACnC,iBAAO,YAAY,MAAM,OAAO;;AAEpC,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;MAEtE;AACI,yBAAiB,CAAE,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AACjF,2BAAmB,aAAY;AAC/B,YAAI,cAAc;AACd,0BAAgB,WAAW;;AAE/B,YAAI,iBAAiB;AACjB,mBAAS,WAAW;AAGhB,kBAAM,IAAI,IAAI;AACV,kBAAI,MAAM;AACN,+BAAe,QAAQ;AACvB,6BAAa,qBAAyB,OAAO,MAAM,OAAO;AAC1D,uBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEtD,qBAAO;;AAIX,mBAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAGnE,aAAK,MAAM,WAAW,KAAK;;MAE/B;AACI,eAAO,OAAO;AACd,eAAO;;MAEX;AACI,YAAI,KAAK,MAAM,kBAAkB;AAC7B,eAAK,MAAM,aAAa;;AAE5B,aAAK,MAAM;;MAEf;AACI,aAAK,MAAM;;MAMf;AACI,0BAAkB;UACd,OAAO;UACP,MAAM;UACN,IAAI,KAAK,MAAM;;AAEnB,YAAI;AACA,oBAAU,OAAO;;AAErB,aAAK,MAAM,WAAW,KAAK;AAC3B,aAAK,MAAM,cAAc;;MAM7B;AACI,uCAA+B,uBAAsB;AACrD,0CAAkC,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAE5E,qBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACrD,0BAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,cAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACtD,oBAAO;;;AAGf,yBAAiB,KAAK,MAAM,WAAW;AACvC,aAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAEzD,+BAAuB,QAAQ;AAG3B,cAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC5C,iBAAK,MAAM;;;;MAUvB,wCAAwC;AACpC,gBAAY,GAAG,SAAS,GAAG,MAAM;AACjC,YAAI,MAAM,QAAQ,GAAG,UAAU;AAC3B,gBAAM,IAAI,MAAM,0CAA0C,GAAG;;AAEjE,kBAAU,KAAK,UAAU,MAAM,KAAK,aAAa,MAAM,KAAK,WAAW,MAAM,KAAK,KAAK,WAAW;AAClG,gBAAY,aAAa,SAAQ,MAAM;AAEvC,6BAAqB,sBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,YAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAC9D,gBAAM,IAAI,MAAM;;AAIpB,eAAO,KAAK,KAAK,YAAY;AACzB,yCAA+B;AAC/B,iCAAuB,EAAE,MAAO,MAAM,OAAQ,MAAK,EAAE,SAAS;AAE9D,kCAAuB,wBAAwB,cAE/C,QAAK,KAAK,KAAK,KAEf;AACA,yBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AACnD,cAAI,KAAK,MAAM,kBAAkB;AAG7B,iBAAK,MAAM,WAAW,QAAQ;AAC1B,oCAAqB,KAAK;AACtB,wBAAO;;;AAGf,iBAAK,MAAM,aAAa;;AAE5B,iBAAO,CAAE,OAAO,GAAG;;;MAG3B;AACI,gBAAY,YAAgB,IAAI,MAAM;AACtC,eAAO;AACH,kBAAY,OAAO,MAAM,OAAK,aAAa,UAAS,MAAM;AAE1D;AACA,2BAAiB;AACjB,iBAAO,QAAQ;AACX,qBAAS,KAAK;;AAElB,iBAAO,KAAK,cAAc;AACtB,kBAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AACvB,oBAAY,IAAI,iBAAiB,SAAQ,MAAM;AAE/C,oBAAY,YAAgB,IAAI,WAAW,MAAM;AAEjD,mBAAO,IAAI;aACZ,UAAU;AACT,4BAAgB,IAAI,SAAS,IAAI;AACjC,2BAAc,MAAM,QAAQ,WAAW,UAAU,CAAC;AAClD,oBAAY,OAAM,WAAW,OAAO,QAAQ,MAAM;AAGlD,oBAAY,OAAM,MAAM,OAAK,aAAa,UAAS,MAAM;AAGzD,4BAAgB;AAChB,mBAAM,QAAQ;AACV,sBAAQ,KAAK,MAAM;;AAEvB,mBAAO;;;;MAInB;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,SAAS;;MAEjC;AAEI,qBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,eAAO,KAAK,QAAQ,KAAK;;YAEvB;AACF,sBAAc;AACd,2BAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,mBAAW,SAAS,SAAQ;AAC5B,eAAO;;MAQX;AACI,YAAI,KAAK,MAAM,eAAe;AAC1B,iBAAO,UAAU,KAAK,MAAM,YAAY;AACxC,eAAK,MAAM,YAAY,MAAM,KAAK;;AAEtC,eAAO;;UAEP;AACA,eAAO,KAAK,MAAM;;MAMtB;AAEI,aAAK;AACL,aAAK,MAAM;AACX,aAAK,IAAI;AACT,aAAK,QAAQ,IAAI;AACjB,kCAA0B,KAAK;AAC3B,eAAK,yBAAyB;AAC9B,eAAK,SAAS,aAAa;AAC3B,iBAAO,KAAK,SAAS;;AAEzB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;;;AAGlC,YAAO,eAAe;AACtB,YAAO,iBAAiB;AACxB;AACI,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAErC;AACH,iBAAW;AACX,UAAI,GAAG,aAAa;AAChB,6BAAoB,IAAI,aAAY;AACpC,WAAG,YAAY,IAAI,QAAO;;AAE9B,4BAAqB,GAAG,UAAU;AAGlC,wBAAiB,MAAM,GAAG;AAC1B,aAAO,GAAG;;AAEP,oBAAe;AAOf;AAEH,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI,GAAG;AAC3B,aAAK,CAAC,GAAG;AACT,eAAO;SACR,QAAQ,MAAqB;;ACp7BpC;;;;;;;;;;;;;;;;AAiBA;AACI,aAAO,OAAO,cAAc,eAAe,aAAa;;AAErD;AACH,UAAI;AAEA,kBAAU,UAAU,aAAa,UAAU,UAAU,OAAO;AAE5D,eAAO,2TACF,KAAK,MAEN,0kDACK,KAAK,EAAE,OAAO,GAAG;;AAE9B,aAAO;;AAEJ;AACH,aAAQ,OAAO,WAAW,eAAe,OAAO,YAAY,QAEvD,OAAO,sBAAsB;;;;;;;ACpCtC;;;;;;;;;;;;;;;;AAmBA,iBAAY;AAKZ,SAAI,aAAa,SAAS,MAAM,OAAO;AACnC,UAAI;AACA,gBAAQ,KAAK;;;AAMrB,SAAI,aAAa,cAAc,MAAM;AAErC,SAAI,aAAa,WAAW,MAAO,OAAO,YAAY,eACjD,OAAO,QAAQ,aAAa,eAC5B,OAAO,QAAQ,SAAS,SAAS;AAEtC,SAAI,aAAa,aAAa,MAAM,OAAO,cAAc,eAAe,aAAa,QACjF,UAAU,aAAa,QAAQ,SAAS,KAAK,UAAU,cACvD,aAAa,KAAK,UAAU;AAKhC,SAAI,aAAa,QAAQ,MAAM;AAK/B,SAAI,aAAa,sCAAsC,MAAM,KAAI,QAAQ;AAEzE,SAAI,aAAa,gCAAgC,MAAM;AAEvD,SAAI,aAAa,WAAW,MAAM;ACtDlC;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB;AAChB,UAAI,cAAa;AACb,eAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAE1C,UAAI,CAAC,MAAM,QAAQ;AACf,eAAO;;AAEX,oBAAc;AACd,aAAO,MAAM,QAAQ,cACjB,cAAa,cAAc,UAAU;AACrC,cAAM,KAAK,UAAU;AACrB,oBAAY,UAAU;;AAE1B,UAAI,MAAM,QAAQ,QACd,OAAM,QAAQ;AACd,oCAA2B,KAAK,OAAO;;AAE3C,aAAO;;AAEX;AACI,gBAAU,WAAW;AACrB,UAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,cAAa;AACvC,gBAAO,MAAM,WAAW,GAAG,MAAM,eAAe,QAAQ,KAAK,+DACjB,MAAM;AAClD;;AAEJ,cAAO,MAAM,SAAS,GAAG,MAAM,eAAe,QAAQ,KAAK,oDACjC,IAAI;AAC9B,cAAO,IAAI,WAAW,MAAM,IAAI,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrE,IAAI;AAC7B,uBAAiB,MAAM,MAAM;AAC7B,mBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAC9B,oCAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAGpE;AACI,UAAI,iBAAiB;AACjB;;AAEJ,UAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AAC/C,cAAM,IAAI,MAAM,aAAa,uBAAuB,yBAC1C,iCAAiC;;;AAG5C,uEAAkE;AACrE,UAAI,aAAa;AACb,qBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,eAAO;;AAEX,0BAAoB,YAAW;AAG/B,UAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACtD,wBAAgB;;AAEpB,mBAAY,cAAc,eAAe,SAAS;AAClD,UAAK,KAAK,QACL,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACnD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC3C,qBAAa,KAAK,OAAO,SAAS,EAAE,YAAY;AAChD,cAAM,IAAI,MAAM,aAAa,uBAAuB,0DACd;;AAE1C,4BAAsB,YAAW,GAAG;AACpC,UAAI,CAAC,cAAa,MAAM,CAAC,MAAM,QAAQ;AACnC,YAAI,CAAC;;AAET,6BAAuB;AACvB,qBAAe,kBAAkB,WAC7B,cAAa,GAAG,iBAChB,SAAQ,GAAG,IAAI;AACnB,aAAO,QAAO,WAAW,QAAQ,eAAe;;AAE7C,6EAAyE;AAC5E,UAAI,CAAC,MAAM,QAAQ;AACf,cAAM,IAAI,MAAM,YAAY,qBAAqB;;AAGrD,sBAAgB;AAChB,aAAO,QAAQ,IAAI,UAAU,iBAAgB,GAAG,GAAG,WAAW,MAAM,eAAe;;ACtGvF;;;;;;;;;;;;;;;;AAkBY,6BAAmB;AAMxB;AACH,mBAAa,OAAO,KAAK;AACzB,UAAI,KAAK,WAAW;AAChB,cAAM,IAAI,MAAM,yGAET,KAAK;;AAEhB,mBAAa,KAAK;AAClB,iBAAW,EAAE;AAEb,UAAI,OAAO,SAAS;AAChB,iBAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAGjD,eAAS,SAAS;AAElB,iBAAW;AACP,gBAAO,WAAW;AAClB;AACI,yBAAe,GAAG,GAAG;AACrB,cAAI,WAAU;AACV,oBAAQ,MAAM;;AAElB,kBAAO,SAAS;AAChB,iBAAO;;AAGP,kBAAO,SAAS;AAChB,gBAAM;;;AAGd,aAAO,eAAe,IAAI,QAAQ,CAAE,OAAO,QAAQ,cAAc;AAEjE,aAAO;;ACzDX;;;;;;;;;;;;;;;;AAyCA;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,yBAAuB,MAAM,OAAO,MAAM,OAAO,yBAAyB,MAAM,aAAa,MAAM;AAEnG,sBAAgB;AACZ,eAAO,SAAQ,QAAQ,OAAO;;AAElC,qBAAe,CAAE,MAAM,OAAO,MAAM;AACpC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAW,IAAG,CAAE;ACpD5B;;;;;;;;;;;;;;;;AAmBO;AACH,UAAI,SAAS;AACT,gBAAQ,YAAW;;AAEvB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AAClB,cAAM,IAAI,MAAM;;AAGpB,UAAI,SAAS;AACT,4CAAmC;AACnC,6BAAqB,eAAc;AACnC,6BAAqB,eAAc;AACnC,gBAAO,iBAAiB,cAAc,MAAM,iCAAiC,kCACtE,+BAA+B;AACtC,qBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AACxC,2BAAiB,cAAc;AAC/B,oCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,eAAc,MAAM,MAAM,MACvC;AACJ,kBAAO,cAAc,OAAO,MAAM,MAAM,CAAC,mBAAmB,MAAM,gDAC1D,qDACM;;;AAGtB,UAAI,CAAC,cAAa,WAAW,CAAC,MAAM,QAAQ;AACxC,iBAAS,CAAC;;AAEd,cAAQ,SAAS;AACjB,eAAS,UAAU,WACf,cAAa,QAAQ,SACrB,SAAQ,QAAQ,IAAI;AACxB,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxD5C;;;;;;;;;;;;;;;;AA8CO;AACH,4BAAsB,YAAW,QAAQ;AACzC,aAAO,YAAW,QAAQ,OAAO,eAAe;;AChDpD;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,SAAW;MACX,SAAW;MACX,OAAS;MACT,QAAU;MACV,OAAS;MACT,MAAQ;MACR,WAAa;;AC3BjB;;;;;;;;;;;;;;;;AAqBA,oCAAgC;AAkBzB;AAEH,oBAAc;AACd,2BAAqB;AACrB,oBAAc,MAAM,QAAQ,WACxB,QAAQ,IAAI,aAAU,QAAO,QAC7B,OAAO,KAAK;AAChB,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,qBAAa,MAAM;AACnB,kBAAU,MAAM,QAAQ,WAAW,QAAQ,GAAG,SAAS,QAAQ;AAC/D,YAAI,EAAE,UAAU,aAAa,EAAE,UAAU,WAAW,EAAE,UAAU,UAC5D,EAAE,UAAU,YAAY,EAAE,UAAU;AACpC,gBAAM,IAAI,MAAM,gCAAgC,UAAU,EAAE;;AAEhE,qBAAa,CAAE,MAAM,OAAO,EAAE,OAAO,OAAO,EAAE;AAC9C,YAAI,EAAE,UAAU;AACZ,4BAAkB,IAAI,QAAQ;AAC1B,yBAAa,MAAM,EAAE;AACrB,kCAAsB,KAAK,OAAO,WAAU,KAAI,EAAE,QAAQ,KACtD,0BAA0B,KAAK;AACnC,0BAAc,IAAI,WAAW;AAC7B,yBAAa;AACb,0BAAa,GAAG,KAAI,KAAK,QAAQ;AAC7B,0BAAY,KAAK;AACjB,oCAAsB,IAAI,WAAW,IAAI,YAAY,CAAC,IAAI,SAAS;AACnE,oBAAM,IAAI,eAAe;AACzB,wBAAU;AACV,oBAAM,IAAI,KAAK;AACf,wBAAU,IAAI;;AAElB,oBAAQ;;AAEZ,uBAAa,KAAK;;AAGlB,uBAAa,KAAK,EAAE;;AAExB,YAAI,SAAS;AACT,eAAK,QAAQ;;AAEjB,cAAM,KAAK;;AAEf,2BAAqB,MAAM,QAAQ,IAAI;AACvC,aAAO,CAAE,MAAM,uBAAuB,eAAe;;AAiBlD;AAEH,kBAAY;AACZ;AACA,mBAAa;AACb,yBAAmB;AACf,qBAAa,KAAK;AAClB,sBAAc,KAAK;AACnB,sBAAc,KAAK;AACnB,qBAAa,eAAc;AAC3B;AACA,YAAI,kBAAkB;AAClB,+BAAqB,KAAK;AAC1B,cAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,gBAAI,CAAE,UAAS,gBAAgB,WAAW;AACtC,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa;;qBAIrE,aAAa,UAAU;AAC5B,gBAAI,UAAU;AACV,oBAAM,IAAI,MAAM,UAAU,KAAK,0BAA0B,aAAa,yDACf;;;AAI3D,kBAAM,IAAI,MAAM,UAAU,KAAK,uCACL,aAAa;;AAI3C,yCAA+B,qBAAqB,aAAa;AACjE,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,iCAAwB,aAAa,UAAU,UAC3C,IAAI,WAAW,cACf,IAAI,YAAY;AACpB,cAAI,UAAU;AACV,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,uBAAS,IAAI,aAAa,eAAe;AACzC,2BAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,0BAAU,eAAe;AACzB,uBAAO,KAAK,IAAI,aAAa,QAAQ,aAAa;;uBAGjD,aAAa,UAAU;AAC5B,kBAAI,kBAAkB;AAClB,gCAAgB;;AAEpB,uBAAS,cAAc;;AAGvB,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;qBAI7D,UAAU;AACf,gBAAI,aAAa,UAAU,WAAW,aAAa,UAAU;AACzD,oBAAM,IAAI,MAAM,iCAAiC,aAAa;;AAGlE,qBAAS,IAAI,WAAW,eAAe;AACvC,yBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAU,eAAe;AACzB,qBAAO,KAAK,KAAK,MAAM,IAAI,aAAa,QAAQ,aAAa;;;AAIjE,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;mBAEZ,UAAU;AACf,wBAAa,eAAc,KAAK;AAChC,mBAAS;AACT,uBAAa,GAAG,IAAI,OAAM;AACtB,+BAAmB,IAAI,YAAY,QAAO,MAAM,QAAQ,SAAS,0BAA0B;AAC3F,sBAAU;AACV,0BAAc,IAAI,WAAW,QAAO,MAAM,QAAQ,SAAS;AAC3D,mBAAO,KAAK;AACZ,sBAAU;;;AAId,8BAAoB,qBAAqB;AACzC,6BAAmB,QAAO,MAAM,QAAQ,SAAS,OAAO;AACxD,cAAI,UAAU;AACV,qBAAS,IAAI,aAAa;qBAErB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,WAAW;qBAEnB,UAAU;AACf,qBAAS,IAAI,aAAa;AAC1B,0BAAa,IAAI,aAAa,OAAO,SAAS;AAC9C,2BAAc,IAAI,aAAa,OAAO,SAAS;AAC/C,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAK,KAAK,OAAO,IAAI;AACrB,qBAAM,KAAK,OAAO,IAAI,IAAI;;AAE9B,+BAAmB,QAAO,OAAM,OAAO;AACvC,gCAAoB,QAAO,QAAO,OAAO;AACzC,gBAAI,QAAQ,SAAQ,YAAY;AAChC,uBAAW;AACX,wBAAY;;AAGZ,kBAAM,IAAI,MAAM,gCAAgC,UAAU;;AAE9D,oBAAU,OAAO;;AAErB,YAAI,UAAU;AACV,cAAI,QAAQ,QAAO,QAAQ,OAAO;;;AAG1C,aAAO;;AAKJ;AAEH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM,wBAAwB,KAAK,UAAU;;AAE3D,4BAAsB;AAQtB,2BAAqB;AACrB,SAAG,QAAQ;AACP,2BAAmB,EAAE;AAErB,qBAAa,KAAK,EAAE,eAAe,EAAE,OAAO,aAAa,IACrD,IAAI,EAAE,YAAY;AACtB,YAAI,CAAE,cAAa,gBAAgB,aAAa,cAC5C,aAAa;AACb,gBAAM,IAAI,MAAM,mCAAmC,EAAE,YAAY;;;AAIzE,gBAAU,IAAI,WAAW;AACzB,mBAAa;AACb,mBAAa,QAAQ;AACjB,UAAE,IAAI,IAAI,WAAW,EAAE,SAAS;AAChC,kBAAU,EAAE;;AAEhB,aAAO,EAAE;;AAGb,0BAAsB,OAAO,WAAW,eACnC,QAAO,SAAS,eAAe,OAAO,SAAS,eAC5C,OAAO,SAAS;AAUjB;AACH,UAAI;AACA,eAAO,OAAO,WAAW;;AAE7B,aAAO,IAAI,KAAK,CAAC,MAAM;;AAQpB;AACH,UAAI;AACA,eAAO,OAAO,KAAK,SAAQ,SAAS;;AAExC,kBAAY,IAAI,WAAW;AAC3B,cAAQ;AACR,mBAAa,OAAO,IAAI,QAAQ,IAAI,GAAG;AACnC,aAAK,OAAO,aAAa,IAAI;;AAEjC,aAAO,KAAK;;AAQT;AACH,UAAI;AACA,oBAAY,OAAO,KAAK,KAAK;AAC7B,eAAO,IAAI,OAAO,MAAM,IAAI,YAAY,IAAI,aAAa,IAAI;;AAEjE,gBAAU,KAAK;AACf,sBAAe,IAAI,WAAW,EAAE;AAChC,mBAAa,GAAG,IAAI,EAAE,QAAQ,EAAE;AAC5B,gBAAO,IAAI,CAAC,EAAE,WAAW,KAAK;;AAElC,aAAO,QAAO;;AAQX;AACH,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,4BAAsB;AACtB,cAAQ,QAAQ;AACZ,2BAAmB,QAAO;;AAE9B,mBAAa,IAAI,WAAW;AAC5B,mBAAa;AACb,cAAQ,QAAQ;AACZ,aAAK,IAAI,IAAI,WAAW,UAAS;AACjC,kBAAU,QAAO;;AAErB,aAAO,KAAK;;AAST;AACH,wBAAkB;AAClB,aAAO,KAAK;AACZ,aAAO,KAAK,SAAS;AACjB,eAAO,KAAK,MAAM,GAAG,KAAK,SAAS;;AAEvC,oBAAc,KAAK,MAAM;AACzB,aAAO,MAAM,MAAM,SAAS;;AAOzB;AACH,UAAI,eAAe,yBAAyB;AACxC,cAAM,IAAI,MAAM;;AAEpB,aAAO;QACH,WAAW,IAAI;QACf,mBAAmB;QACnB,oBAAoB,eAAe,iBAAiB,OAChD,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,kBAAkB,eAAe,eAAe,OAC5C,IACA,iBAAiB,KAAK,UAAU,eAAe;QACnD,iBAAiB,eAAe,cAAc,OAC1C,IACA,eAAe,WAAW;;;AAStC;AACI,8BAAwB;AACpB,gBAAQ,KAAK;AACb,gBAAQ;AACR,eAAQ,KAAI,aAAgB;AACxB,eAAK;AACL,gBAAM;;AAEV,aAAK,CAAC;AACN,aAAK;AACL,eAAO,IAAI;;AAEf,2BAAqB,IAAI,YAAY;AACrC,mBAAa,KAAK;AAClB,mBAAa,GAAG,IAAI,MAAM;AACtB,qBAAa,KAAK,gBAAgB;;AAEtC,mBAAa,MAAM,IAAI,MAAM;AACzB,qBAAa,KAAK,YAAe,KAAI,QAAS;;AAElD,aAAO;;AAQX;AACI,4BAAsB,IAAI,YAAY;AACtC,oBAAc,KAAK;AACnB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,oBAAc,MAAM;AACpB,mBAAa,GAAG,IAAI,IAAI;AACpB,sBAAc,KAAK,KAAK;;AAE5B,mBAAa,IAAI,IAAI,IAAI;AACrB,sBAAc,KAAK,aAAe,KAAI,MAAO;;AAEjD,aAAO;;AAQX;AACI,0BAAoB,IAAI,YAAY;AACpC,mBAAa,GAAG,IAAI,IAAI;AACpB,oBAAY,KAAK;;AAErB,kBAAY,KAAK,YAAY,MAAM;AACnC,aAAO;;AASJ;AAIH,2BAAqB;AACrB,4BAAsB;AACtB,0BAAoB;AACpB,aAAO;AACH,wBAAe,IAAI,YAAY,IAAI,eAAe;AAClD,iCAAyB,IAAI,YAAY;AACzC,0BAAiB,GAAG,SAAQ,eAAe,QAAQ;AAC/C,8BAAoB,eAAe;AACnC,8BAAoB,aAAa,YAAY,eAAe,MAAO,eAAc,SAC7E,cAAc,eAAe;AACjC,2BAAiB,UAAS;;AAE9B,eAAO,IAAI,aAAa;;;ACtchC;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;;aAEhB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAQrB;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAQ7C;AACH,yBAAiB,cAAc,YAAY,KAAK;;aAU7C;AACH,eAAO,iBAAiB,YAAY,KAAK;;aAUtC;AACH,eAAO,iBAAiB,YAAY,KAAK,QAAQ;;aAE9C;AACH,8BAAsB;AACtB,wBAAgB,gBAAgB,SAC5B,iBAAiB,cAAc,cAC/B,iBAAiB,cAAc;AACnC,gBAAQ,QAAQ;AACZ,0BAAgB,OAAO,KAAK;AAC5B,cAAI,YAAY;AACZ,0BAAc,KAAK;;;AAG3B,eAAO;;;AAGR,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,+BAA2B,gBAAgB,iBAAiB,mBAAmB;AAC/E,4BAAwB,SAAS,iBAAiB,gBAAgB;AAClE,4BAAwB,sBAAsB,iBAAiB,gBAAgB,KAAK;ACpF3F;;;;;;;;;;;;;;;;AAoBA,0BAAsB;AACtB,6BAAyB;AAIzB,6BAAyB;AAIzB,4BAAwB;AAIjB;AACH,yBAAmB;AACnB,aAAO,IAAI,QAAQ;AACf,8BAAsB,WAAW,eAAe;AAChD,sBAAc,YAAY,MAAM;AAChC,sBAAc,UAAU,WAAS,OAAO;;;AAGhD;AACI,UAAI,CAAC,OAAM,QAAQ;AAIf,cAAM,IAAI,MAAM;;AAIpB,wBAAkB,OAAO,WAAW,cAAc,OAAO;AACzD,sBAAgB,UAAU,aAAa,UAAU,gBAC7C,UAAU,mBAAmB,UAAU,eACvC,UAAU;AACd,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,iBAAW,YAAY;AACvB,SAAG,kBAAkB,kBAAkB,CAAE,SAAS;AAClD,SAAG,kBAAkB,iBAAiB,CAAE,SAAS;;;MAQjD;AACI,aAAK,YAAY;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;;YAEf;AAEF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,eAAO,KAAK,eAAe,KAAK,WAAW;;YAEzC;AACF,eAAO,KAAK,eAAe,KAAK;;MAgBpC;AACI,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,gBAAI,kBAAkB;AAElB,8BAAgB,GAAG,YAAY,kBAAkB;AACjD,iCAAmB,QAAQ,YAAY;AACvC,iCAAmB,WAAW,IAAI,KAAK;AACvC,yBAAW,YAAY;AACnB,oBAAI,WAAW,UAAU;AACrB,qBAAG;AACH,yBAAO,OAAO,IAAI,MAAM,gCAAgC,KAAK;;AAI7D,0BAAQ,WAAW,OAAO;;;AAGlC,yBAAW,UAAU;AACjB,mBAAG;AACH,uBAAO,OAAO,WAAW;;AAE7B,sBAAQ,aAAa,MAAM,GAAG;;AAI9B,yCAA2B,6BAA6B;AAExD,6BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAgB,OAAO,YAAY;AACnC,qCAAuB,UAAU,IAAI,CAAE,WAAW,KAAK,WAAW;AAClE;AACA,6BAAe,YAAY;AAEvB,0BAAU,GAAG,YAAY,kBAAkB;AAC3C,mCAAmB,QAAQ,YAAY;AACvC,wCAAwB,WAAW,IAAI;kBACnC,WAAW,KAAK;kBAChB;kBACA;;AAEJ,gCAAgB,YAAY,MAAM,QAAQ,CAAE;AAC5C,gCAAgB,UAAU;AAGtB,8BAAY,OAAO,YAAY;AAC/B,4CAA0B,UAAU,OAAO,KAAK;AAChD,oCAAkB,YAAY;AAC1B,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;AAElC,oCAAkB,UAAU;AACxB,uBAAG;AACH,2BAAO,OAAO,gBAAgB;;;;AAI1C,6BAAe,UAAU;AACrB,mBAAG;AACH,uBAAO,OAAO,eAAe;;AAEjC,qBAAO,aAAa;AAChB,oBAAI,WAAW;AACX,qBAAG;;AAGH,0BAAQ,aAAa,MAAM,GAAG;;;;;AAK9C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;AAI9D,qBAAiB,aAAa;AACvB,4BAAwB;AAC3B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAkB7B;AACH,aAAO,IAAI,iBAAiB;;AAEhC;AACI,aAAO,IAAI,WAAW,iBAAiB,cACnC,IAAI,MAAM,iBAAiB,WAAW,UACtC;;;MAGJ;AACI,aAAK,YAAY;;YAEf;AACF,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,uBAAW,GAAG,YAAY,iBAAiB;AAC3C,0BAAc,GAAG,YAAY;AAS7B,sCAA0B,MAAM;AAChC,8BAAkB,YAAY;AAC1B,0BAAY;AACZ,iCAAmB,kBAAkB;AACjC,oBAAI,KAAK,aAAa,KAAK;;AAE/B,sBAAQ;;AAEZ,8BAAkB,UAAU;AACxB,iBAAG;AACH,qBAAO,OAAO,kBAAkB;;AAEpC,eAAG,aAAa,MAAM,GAAG;;AAE7B,sBAAY,UAAU,WAAS,OAAO,YAAY;;;YAGpD;AACF,eAAO,iBAAiB;AACxB,eAAO,IAAI,QAAQ;AACf,8BAAoB,KAAK,UAAU,KAAK,eAAe;AACvD,sBAAY,kBAAkB,MAAM,cAAc;AAClD,sBAAY,YAAY;AACpB,uBAAW,YAAY;AACvB,2BAAe,GAAG,YAAY,iBAAiB;AAC/C,8BAAkB,OAAO,YAAY;AACrC,mCAAuB,UAAU,IAAI;AACrC;AACA,2BAAe,YAAY;AACvB,kBAAI,eAAe,UAAU;AACzB,mBAAG;AACH,uBAAO,OAAO,IAAI,MAAM,gCAAgC;;AAKxD,0CAA0B,UAAU,OAAO;AAC3C,wCAAwB;AAEpB,4BAAU,GAAG,YAAY,kBAAkB;AAC3C,qCAAmB,QAAQ,YAAY;AACvC,6CAA2B,WAAW,OAAO;AAC7C,qCAAmB,YAAY,MAAM,QAAQ,eAAe,OAAO;AACnE,qCAAmB,UAAU,WAAS,OAAO,eAAe;;AAIhE,kCAAkB,YAAY;AAC9B,kCAAkB,UAAU;AACxB;AACA,qBAAG;AACH,yBAAO,OAAO,eAAe;;;;AAIzC,2BAAe,UAAU;AACrB,iBAAG;AACH,qBAAO,OAAO,eAAe;;AAEjC,mBAAO,aAAa;AAChB,kBAAI,WAAW;AACX,mBAAG;;AAGH,wBAAQ,aAAa,MAAM,GAAG;;;;AAI1C,sBAAY,UAAU,WAAS,OAAO,YAAY;;;;ACrT9D;;;;;;;;;;;;;;;;AAqBA,2BAAuB;AACvB,wBAAoB;AACpB,wBAAoB;AACpB,kCAA8B;AAC9B,gCAA4B;AAC5B,+BAA2B;AAC3B,kCAA8B;AAMvB;AACH,UAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAC/B,cAAM,IAAI,MAAM;;AAGpB,iBAAW,OAAO;AAClB,+BAAyB;AACzB,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,oBAAY,GAAG,IAAI;AACnB,uBAAe,cAAc;AAC7B,YAAI,IAAI,WAAW,WAAW,IAAI,SAAS,OAAO;AAC9C,aAAG,WAAW;AACd,4BAAkB,oBAAoB;AACtC,cAAI,iBAAiB,QAAQ,eAAe;AACxC,6BAAiB,KAAK;;;;AAIlC,aAAO;;AAEX;AACI,aAAO;QACH,MAAM,CAAC,aAAa,MAAM,aAAa,KAAK;QAC5C,UAAU,CAAC,aAAa,MAAM,uBAAuB,KAAK;QAC1D,aAAa,CAAC,aAAa,MAAM,qBAAqB,KAAK;QAC3D,YAAY,CAAC,aAAa,MAAM,oBAAoB,KAAK;QACzD,eAAe,CAAC,aAAa,MAAM,uBAAuB,KAAK;;;AAUvE;AACI,oBAAc,IAAI,MAAM;AACxB,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,uBAAuB;;AAE3C,aAAO,MAAM,MAAM,GAAG,MAAM,SAAS,GAAG,KAAK;;AAEjD;AACI,aAAO,IAAI,WAAW,oBAAoB,cACtC,IAAI,MAAM,oBAAoB,WAAW,UACzC;;;MAQJ;AACI,YAAI,CAAC,OAAM,QAAQ,iBAAiB,OAAO,WAAW,eAClD,OAAO,OAAO,iBAAiB;AAK/B,gBAAM,IAAI,MAAM;;AAEpB,aAAK,KAAK,OAAO;AACjB,YAAI,aAAa,QAAQ,CAAC;AACtB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,YAAY;AACjB,aAAK,OAAO,aAAa,KAAK;;YAW5B;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,2BAAiB,KAAK,UAAU,eAAe;AAC/C,8BAAoB,KAAK,UAAU,eAAe;AAClD,qCAA2B,6BAA6B;AACxD;AACI,iBAAK,GAAG,QAAQ,KAAK,KAAK,MAAM,KAAK,UAAU;AAC/C,iBAAK,GAAG,QAAQ,KAAK,KAAK,UAAU;AACpC,iBAAK,GAAG,QAAQ,KAAK,KAAK,aAAa;AACvC,iBAAK,GAAG,QAAQ,KAAK,KAAK,YAAY,0BAA0B,eAAe;AAC/E,iBAAK,GAAG,QAAQ,KAAK,KAAK,eAAe,KAAK,UAAU;cACpD,QAAQ,eAAe;cACvB,aAAa,eAAe;cAC5B,aAAa,eAAe;cAC5B,qBAAqB,eAAe;;AAExC,mBAAO,CAAE;;AAIT,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,iBAAK,GAAG,WAAW,KAAK,KAAK;AAC7B,kBAAM,IAAI,MAAM,yBAAyB,KAAK,kHAEpB,mBAAmB,wCACrB,mBAAmB,qCACpB,mBAAmB;;;;YAYhD;AACF,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AAClD,YAAI,QAAQ;AACR,gBAAM,IAAI,MAAM,kDAAkD,KAAK;;AAE3E,YAAI,KAAK,sBAAsB;AAC3B,gBAAM,IAAI,MAAM;;AAGpB,oBAAY;AAEZ,yBAAiB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACtD,YAAI,YAAY;AACZ,gBAAM,IAAI,MAAM,4CAA4C,KAAK;;AAGrE,YAAI,gBAAgB;AAEpB,4BAAoB,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK;AACzD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,gDAAgD,KAAK;;AAGzE,YAAI,cAAc;AAElB,+BAAuB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACjD,YAAI,kBAAkB;AAClB,2BAAiB,KAAK,MAAM;AAC5B,cAAI,SAAS,SAAS;AACtB,cAAI,cAAc,SAAS;AAC3B,cAAI,cAAc,SAAS;AAC3B,cAAI,sBAAsB,SAAS;;AAGvC,iCAAyB,KAAK,GAAG,QAAQ,KAAK,KAAK;AACnD,YAAI,oBAAoB;AACpB,gBAAM,IAAI,MAAM,wDACR,KAAK;;AAEjB,YAAI,aAAa,0BAA0B;AAC3C,eAAO;;;AAGf,wBAAoB,aAAa;AAC1B,+BAA2B;AAC9B,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,oBAAoB;AAC1D,iBAAO,oBAAoB,IAAI,MAAM,oBAAoB,WAAW;;AAGpE,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAyB7B;AACH,aAAO,IAAI,oBAAoB;;;MAG/B;AACI,gBAAO,OAAM,QAAQ,eAAe,MAAM;AAC1C,gBAAO,OAAO,WAAW,eACrB,OAAO,OAAO,iBAAiB,aAAa,MAAM;AACtD,aAAK,KAAK,OAAO;;YAEf;AACF,oBAAY;AACZ,uBAAe,cAAc;AAC7B,uBAAe,iBAAiB;AAChC,qBAAa,GAAG,IAAI,KAAK,GAAG,QAAQ,EAAE;AAClC,sBAAY,KAAK,GAAG,IAAI;AACxB,cAAI,IAAI,WAAW,WAAW,IAAI,SAAS;AACvC,8BAAkB,oBAAoB;AACtC,gBAAI,aAAa,KAAK,MAAM,KAAK,GAAG,QAAQ;;;AAGpD,eAAO;;YAEL;AACF,eAAO,mBAAiB;AACxB,qBAAa,aAAa;AAC1B,YAAI,KAAK,GAAG,QAAQ,KAAK,SAAS;AAC9B,gBAAM,IAAI,MAAM,8BAA8B;;AAElD,qBAAa,KAAK,MAAM,KAAK,GAAG,QAAQ,KAAK;AAC7C,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,aAAK,GAAG,WAAW,KAAK;AACxB,eAAO;;;ACnRf;;;;;;;;;;;;;;;;AA4BA,8BAA0B;;MAEtB;AACI,aAAK,WAAW;;aAEb;AACH,YAAI,0BAA0B,YAAY;AACtC,oCAA0B,WAAW,IAAI;;AAE7C,eAAO,0BAA0B;;aAQ9B;AACH,gBAAO,UAAU,MAAM,MAAM;AAC7B,YAAI,OAAO,SAAS;AAChB,mBAAS,OAAO,MAAM,GAAG,OAAO,QAAQ;;AAE5C,gBAAO,OAAO,SAAS,GAAG,MAAM;AAChC,yBAAiB,0BAA0B;AAC3C,gBAAO,SAAS,SAAS,WAAW,MAAM,MAAM,2DAA2D;AAC3G,iBAAS,SAAS,UAAU;;aAEzB;AACH,wBAAgB,KAAK,cAAc,SAAS;AAC5C,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM,yCAAyC;;AAE7D,eAAO;;aAEJ;AACH,eAAO,OAAO,KAAK,KAAK,cAAc;;;AAW9C;AACI,UAAI,IAAI,QAAQ,uBAAuB;AACnC,cAAM,IAAI,MAAM,6EAET,0BAA0B,aAAa,KAAK;;AAEvD,aAAO;QACH,QAAQ,IAAI,MAAM,mBAAmB;QACrC,MAAM,IAAI,MAAM,mBAAmB;;;AAG3C,yEAAqE;AACjE,cAAO,cAAc,SAAS,MAAM,wCAAwC;AAC5E,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,kEAAkE;AACxG,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,wCACxD;AACpC,0BAAoB,aAAa;AACjC,2BAAqB,iBAAiB,gBAAgB;AACtD,cAAO,aAAa,SAAS,GAAG,MAAM,uEAC3B;AACX,cAAO,aAAa,SAAS,GAAG,MAAM,yCAAyC,aAAa,6CACnD;AACzC,0BAAoB,aAAa;AACjC,2BAAqB,SAAS,WAAW;AACzC,yBAAmB,SAAS,WAAW;AACvC,yBAAmB,iBAAiB,SAAS,WAAW;AACxD,6BAAuB,MAAM,YAAY;AAIzC,UAAI,gBAAgB;AAChB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,yBAAmB,MAAM,YAAY,KAAK;AAI1C,UAAI,gBAAgB,CAAC;AACjB,cAAM,0BAA0B,WAAW,cACtC,YAAY;;AAErB,aAAO,WAAW;;AAqCtB;AACI,sBAAgB,0BAA0B;AAC1C,kBAAY;AACZ,2BAAqB;AACjB,0BAAkB,MAAM,0BAA0B,WAAW,QAAQ;AACrE,2BAAmB;AACf,sBAAY,SAAS,oBAAoB;AACzC,cAAI,OAAO,UAAU;;;AAG7B,aAAO;;AAmCX;AACI,4BAAsB,SAAS;AAC/B,sBAAgB,0BAA0B,WAAW,cAAc;AACnE,aAAO,QAAQ,YAAY,cAAc;;AAiD7C;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AAgDlD;AACI,2BAAqB;AACrB,aAAO,mBAAmB,WAAW,SAAS;;AC/SlD;;;;;;;;;;;;;;;;;MAsBI;AACI,eAAO,MAAM,MAAM;;MAEvB;AACI,eAAO,YAAY;;MAEvB;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,kDAAkD;;AAEtE,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc,IAAI;;AAE3B,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,eAAO,IAAI,YAAY,UAAU,OAAO;;;AAGhD,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,WAAW,IAAI;AAEjC;AACI,kCAA0B,gBAAgB,oBAAoB,YAAY,IAAI;;;AAKlF;AACI,kCAA0B,gBAAgB,iBAAiB,YAAY,IAAI;;;;ACnDnF;;;;;;;;;;;;;;;;AAkBO,yBAAqB;MAExB,aAAa,MAAM;;AAEvB;AAGO;AACH,oBAAc;;AAEX;AACH,oBAAc;;AAEX;AACH,aAAO;;;MAGP;AAEI,aAAK,OAAO;AAGZ,aAAK,cAAc,IAAI,KAAK,KAAK;;MAErC;AACI,YAAI,OAAM,OAAO,SAAS;AACtB,iBAAO,OAAM,OAAO,MAAM,MAAM;;AAEpC,YAAI,eAAe;AACf,wBAAc,aAAa;;AAE/B,eAAO,YAAY,MAAM;;MAE7B;AACI,sBAAa,QAAQ;AACrB,eAAO,MAAK,KAAK,MAAO,MAAK,KAAK;;MAEtC;AACI,YAAI,aAAa,WAAW,aAAa;AACrC,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,eAAO,KAAK,YAAY,OAAO;;MAEnC;AACI,YAAI,MAAM,WAAW;AACjB,iBAAO;;AAEX,eAAO,IAAI,KAAK,KAAK,YAAY,UAAU,OAAO;;;AAG1D,QAAI,OAAM,IAAI;AACV,aAAM,YAAY,QAAQ,IAAI;;ACrElC;;;;;;;;;;;;;;;;AA4CO,oCAA+B;AAClC,cAAQ,SAAS;AACjB,0CAAwC;AACxC,aAAO,IAAI,cAAa,OAAO,OAAO;;AC/C1C;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAEnC,UAAI,CAAC,cAAkB;AACnB,cAAM,IAAI,MAAM,mCAAmC;;AAEvD,UAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE;AC/CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,SAAS;AAC5C,sBAAgB,MAAM,QAAO,qBAAqB,GAAG,QAAQ,GAAG,OAAO,GAAG;AAC1E,qBAAe,CAAE,GAAG;AAGpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA6BO,iCAA4B;AAC/B,cAAQ,IAAI,EAAE,SAAS;;AC9B3B;;;;;;;;;;;;;;;;AAmBA;AAYA,wBAAkB;MACd;MACA;MACA;MACA;;AAEJ,iBAAa;ACrCb;;;;;;;;;;;;;;;;AAwBA,qCAAiC;AACjC,wCAAoC;AACpC,+CAA2C;AAC3C;AACI,aAAO,IAAI,QAAQ,aAAW,WAAW,UAAU,KAAK;;;MAGxD;AACI,YAAI,CAAC,OAAM,QAAQ;AAGf,gBAAM,IAAI,MAAM;;AAGpB,YAAI,eAAe,WAAW,iBAAiB;AAC3C,2BAAiB,eAAe,MAAM,iBAAiB,WAAW;;AAEtE,YAAI,kBAAkB,QAAQ,eAAe,WAAW;AACpD,2BAAiB;;AAErB,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,qBACD,iBAAiB;;YAEnB;AACF,YAAI,OAAQ,aAAc;AACtB,gBAAM,IAAI,MAAM;;AAGpB,2BAAmB,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM;AAC5F,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAIhB,kCAAwB,CAAC;YACjB,OAAO,CAAC,OAAO,KAAK;YACpB,SAAS,eAAe;;AAEhC,iDAAuC;YACnC,eAAe,eAAe;YAC9B,QAAQ,eAAe;YACvB,aAAa,eAAe;YAC5B,aAAa,eAAe;YAC5B;;AAEJ,oDAA0C,OAAO,IAAI,gBAAgB,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM;AAGxI,6BAAmB,KAAK,cAAc,OAAO,SAAS,cAAc,OAChE,KAAK;AACT,qBAAW,WAAW,KAAK;AAC3B,qBAAW,OAAO;AAIlB,gBAAM,MAAM,MAAM,WAAW,cAAc,IAAI,WAAW;AAC1D,cAAI,eAAe,cAAc;AAC7B,qCAAyB,KAAK,oBAAoB,OAC9C,SAAS,cAAc,OACvB,KAAK;AACT,6BAAiB,WAAW,KAAK;AACjC,6BAAiB,OAAO;AACxB,kBAAM,MAAM,MAAM,iBAAiB,cAAc,IAAI,WAAW;;AAEpE,iBAAO,CAAE,oBAAoB,6BAA6B;;;;AAItE,qBAAiB,aAAa;;MAE1B;AACI,YAAI,SAAS,QAAQ,MAAM,SAAS;AAChC,gBAAM,IAAI,MAAM,wEACI;;AAExB,aAAK,QAAQ;;YAEX;AACF,yBAAiB,KAAK,MAAM;AAC5B,4BAAoB,KAAK,MAAM,MAAM;AACrC,eAAO,IAAI,QAAQ;AACf,6BAAmB,IAAI;AACvB,qBAAW,SAAS;AAEhB,8BAAkB,KAAK,MAAM,MAAM,OAAO;AAC1C,kCAAsB,UAAU;AAChC,gBAAI,iBAAiB;AACjB,qBAAO,IAAI,MAAM,4CAA4C,SAAS;AACtE;;AAEJ,gBAAI,YAAY,WAAW;AACvB,sBAAQ,CAAE;;AAEd,oCAAwB,UAAU;AAClC,gBAAI,mBAAmB;AACnB,qBAAO,IAAI,MAAM,6CAA6C,SAAS;AACvE;;AAEJ;AACA;AACI,2BACI,KAAK,4BAA4B,iBAAiB;;AAGtD,qBAAO;AACP;;AAEJ,gCAAoB;AACpB,0BAAc;AACd,mCAAuB;AACvB,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,sBAAM,KAAK;AACX,+BAAe,KAAK;;AAExB,0BAAY,KAAK,GAAG,aAAa;;AAErC,4BAAgB,QAAQ;AACpB,2BAAa,MAAM,QAAQ;AACvB,yCAAyB,IAAI;AAC7B,iCAAiB,SAAS;AAEtB,qCAAmB,OAAM,OAAO;AAChC,iCAAc,MAAM,QAAQ;AAC5B,iCAAe,UAAS;AACxB,sBAAI,eAAe,QAAQ,UAAU;AACjC,4BAAQ;sBACJ;sBACA;sBACA,YAAY,wBAAwB;sBACpC,QAAQ,UAAU;sBAClB,aAAa,UAAU;sBACvB,aAAa,UAAU;sBACvB,qBAAqB,UAAU;;;;AAI3C,iCAAiB,UAAU,WAAS,OAAO,6CAA6C;AACxF,iCAAiB,kBAAkB,WAAW;;;;AAI1D,qBAAW,UAAU,WAAS,OAAO,sEACnB,SAAS;AAE3B,qBAAW,WAAW;;;MAM9B;AACI,0BAAkB;AAClB,0BAAkB,MAAM,IAAI,UAAQ,SAAS,KAAK;AAClD,2BAAmB;AACnB,4BAAoB;AAChB,gBAAM,MAAM,QAAQ;AAChB,iCAAqB,SAAS;AAC9B,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,uDACR;;AAEZ,sBAAU,KAAK;AACf,gBAAI,UAAU,QAAQ,kBAAkB;AACpC,oBAAM,IAAI,MAAM,8BAA8B;;AAG9C,yBAAW,QAAQ,MAAM,UAAU,QAAQ;;;;AAIvD,YAAI,UAAU,WAAW,MAAM;AAC3B,gBAAM,IAAI,MAAM,wDACR,UAAU,oDACV,MAAM;;AAElB,eAAO;;;AAGR,mCAA+B;AAClC,UAAI,CAAC,OAAM,QAAQ;AACf,eAAO;;AAGP,YAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI,WAAW,iBAAiB;AACvD,iBAAO,iBAAiB,IAAI,MAAM,iBAAiB,WAAW;;AAG9D,iBAAO;;;;AAInB,qBAAiB,mBAAmB;AAwC7B,+CAA2C;AAC9C,aAAO,IAAI,iBAAiB;;AA0CzB;AACH,aAAO,IAAI,aAAa;;AC7S5B;;;;;;;;;;;;;;;;AAyBO;AACH,oBAAc;AACd,sBAAgB,iBAAiB,OAAO,IAAI;AAC5C,oBAAc,eAAe,OAAO,IAAI;AACxC,oBAAc,eAAe;AAC7B,4BAAsB;AACtB,8BAAwB;AACpB,gBAAQ,KAAK;AACT,2BAAiB,gBACb,EAAE,kBAAkB,SAAS,SAAU,eAAc;AAEzD,qBAAW;AACX,iBAAO;;AAEX,eAAO;;AAEX;AACI,gBAAO,aAAY,QAAQ,MAAM,QAAQ,cAAa,UAAS,SAAS,GAAG,MAAM;;AAErF;AACI,gBAAO,kBAAiB,KAAK,kBAAiB,GAAG,MAAM,oEAC9B;AACzB,gBAAO,gBAAe,KAAK,gBAAe,GAAG,MAAM,kEAC5B;AACvB,gBAAO,gBAAe,gBAAe,MAAM,yEAClB,kCAClB;;AAEX,aAAO,QAAQ,IAAI,SAAS,IAAI;;ACrDpC;;;;;;;;;;;;;;;;AAgCO;AACH,UAAI,eAAe;AACf,sBAAc;;AAElB,wBAAkB,YAAY,aAAa,OAAO,OAAM,SAAS,QAC7D,YAAY;AAEhB,uBAAiB,UAAU,IAAI,cAAY,UAAU,UAAU,YAAY,aAAa,CAAE,UAAU;AACpG,iCAA2B;AAC3B,+BAAyB;AACzB,wBAAkB,YAAY,cAAc,OACxC,MAAM,QAAQ,IAAI,YAClB,MAAM,wBAAwB,UAAU,YAAY,YAAY,oBAAoB;AACxF,6BAAuB,UAAU,IAAI,cAAY,SAAS;AAC1D,kCAA4B;AAC5B,gCAA0B;AAC1B,sBAAgB,YAAY,cAAc,OACtC,MAAM,QAAQ,IAAI,kBAClB,MAAM,wBAAwB,gBAAgB,YAAY,YAAY,qBAAqB;AAC/F,aAAO;;AAWJ,0DAAsD;AAMzD,2BAAqB,eAAe,yBAAyB,WAAW,CAAE;AAC1E,2BAAoB,qBAAqB;AACzC,aAAO,aAAY,UAAU,gBAAgB;;AA0B1C;AACH,aAAO,kCAAkC;AAGrC,uCAA+B,SAAS,IAAI,MAAM;AAClD,oCAA4B;AAC5B,6BAAqB,eAAe,OAAO,YAAY,IAAI,MAAM,SAAS;AAC1E,uCAA+B;AAC/B,iBAAS,QAAQ;AACb,4BAAkB;AAClB,8BAAoB,QAAQ,QAAQ;AAChC,6BAAkB,kBAAkB,eAChC,aAAa,aAAa,QAC1B,aAAa;AACjB,iCAAqB,qBAAqB,YACtC,eAAmB,aAAa;AACpC,gDAAoC;AAChC,qCAAuB,cAAc;AACrC,kBAAI,oBAAoB,eAAe;AACnC,oCAAoB,cAAc;;AAEtC,kCAAoB,YAAY,KAAK;gBACjC,eAAe;gBACf;gBACA,WAAW;;;AAGnB,gBAAI,eAAe;AACf,0BAAY,QAAQ;AAChB,oBAAI,eAAe,aAAa;AAC5B;AACA,+BAAa,eAAe;;;;AAKpC;;AAEJ,mCAAuB,KAAK,aAAa;AACzC,2BAAe;;;AAGvB,YAAI,CAAC,aAAa,MAAM,WAAS;AAC7B,kCAAwB,YAAY,OAAO,UAAU,CAAC,aAAa;AACnE,gBAAM,IAAI,MAAM,kDACT,gBAAgB,KAAK;wCAErB,uBAAuB,KAAK;;AAIvC,oCAA4B,uBAAuB,OAAO;AACtD,cAAI;AACA,wBAAY,KAAK;;AAErB,iBAAO;WACR;AACH,0BAAkB;AAClB,4BAAoB,QAAQ;AACxB,mBAAS,GAAG,MAAM,QAAQ;AACtB,6BAAiB,iBACZ,EAAC,eAAe,SAAS,OAAO,MAAM,MAAM;AACjD,sBAAU,KAAK;;;AAGvB,wBAAgB,MAAM,qBAAqB;AAC3C,iCAAyB;AACzB,gCAAwB;AACxB,4BAAoB,QAAQ;AACxB,6BAAmB,SAAS,GAAG,MAAM;AACrC,2BAAiB;AACjB,wBAAa,GAAG,KAAI,YAAY;AAC5B,0BAAc,QAAQ,oBAAoB,IAAG;;AAGjD,8BAAoB,IAAI,YAAY;AACpC,kCAAwB,IAAI,WAAW;AACvC,kCAAwB;AACxB,wBAAa,GAAG,KAAI,YAAY;AAC5B,4BAAe,IAAI,WAAW,QAAQ,oBAAoB;AAC1D,4BAAgB,IAAI,SAAQ;AAC5B,iCAAqB,QAAO;;AAEhC,iCAAuB,oBAAoB;AAC3C,yBAAe,QAAQ;AACnB,+BAAmB,YAAY,MAAM,aAAa,aAAa,aAAa,cAAc,aAAa;AACvG,oCAAwB,cAAc,YAAY,CAAC,aAAa;AAChE,+BAAmB;AACf,+BAAiB,QAAQ,gBAAgB;;;AAGjD,+BAAqB;;AAEzB,eAAO;;;AC7Lf;;;;;;;;;;;;;;;;AA0BA,mCAA+B;AAC/B,sBAAkB;;MAEd;AACI,aAAK,iBAAiB;AACtB,YAAI,eAAe;AACf,wBAAc;;AAElB,aAAK,mBAAmB,YAAY;AACpC,aAAK,aAAa,YAAY;AAC9B,aAAK,qBAAqB,YAAY;AACtC,YAAI,YAAY,aAAa;AACzB,kBAAO,OAAO,YAAY,cAAc,YAAY,MAAM;AAG1D,eAAK,QAAQ,YAAY;;AAGzB,eAAK,QAAQ,OAAM,SAAS;;AAEhC,gBAAO,QAAQ,QAAQ,KAAK,SAAS,GAAG,MAAM;AAE9C,YAAI,MAAM,QAAQ;AACd,kBAAO,KAAK,WAAW,GAAG,MAAM,iEACP,KAAK;;AAElC,aAAK,OAAO;AACZ,YAAI,YAAY,eAAe,QAC3B,YAAY,YAAY,QAAQ;AAChC,gBAAM,IAAI,MAAM;;AAEpB,aAAK,cAAc,YAAY,eAAe;;YAE5C;AACF,YAAI,eAAe,yBAAyB;AACxC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa,OAAO,OAAO,CAAE,QAAQ,KAAK,iBAAkB,KAAK;AACjE,cAAK,OAAO,IAAI;AAChB,gCAAwB,CAAC;UACjB,OAAO,CAAC;UACR,SAAS,eAAe;;AAEhC,+CAAuC;UACnC,eAAe,eAAe;UAC9B,QAAQ,eAAe;UACvB,aAAa,eAAe;UAC5B,aAAa,eAAe;UAC5B,qBAAqB,eAAe;UACpC;;AAEJ,cAAK,KAAK,OAAO,cAAc,IAAI,KAAK,CAAC,KAAK,UAAU,kCAAkC,CAAE,MAAM,aAAc;AAChH,YAAI,eAAe,cAAc;AAC7B,gBAAK,KAAK,OAAO,qBAAqB,IAAI,KAAK,CAAC,eAAe,aAAa,CAAE,MAAM,0BAA2B;;AAEnH,yBAAiB,MAAM,KAAK,MAAM,KAAK,MAAM;AAC7C,YAAI,SAAS;AACT,iBAAO;YACH,oBAAoB,6BAA6B;YACjD,WAAW,CAAC;;;AAIhB,gBAAM,IAAI,MAAM,gEACT,SAAS;;;YAWlB;AACF,mCAA2B,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK;AAC5D,YAAI,CAAC,mBAAmB;AACpB,gBAAM,IAAI,MAAM,cAAc,KAAK,gCAC5B,mBAAmB;;AAG9B;AACA;AACI,wBAAc,MAAM,mBAAmB;;AAGvC,wBAAc,+CAA+C,KAAK;AAGlE,cAAI,KAAK,KAAK,SAAS;AACnB,uBAAW;;AAQX,uBAAW;;AAGf,gBAAM,IAAI,MAAM;;AAEpB,8BAAsB,YAAY;AAClC,gCAAwB,YAAY;AACpC,4BAAoB,YAAY;AAChC,4BAAoB,YAAY;AAChC,uBAAe,YAAY;AAC3B,oCAA4B,YAAY;AAExC,YAAI,iBAAiB,QAAQ,mBAAmB;AAC5C,gBAAM,IAAI,MAAM,2BAA2B,KAAK;;AAGpD;AACA;AACA,YAAI,mBAAmB;AACnB,0BAAgB,MAAM,KAAK,YAAY;AACvC,WAAC,aAAa,cAAc;;AAEhC,0BAAkB;UACd;UACA;UACA;UACA;UACA;UACA;UACA;;AAEJ,4BAAoB,YAAY;AAChC,YAAI;AACA,oBAAU,mBAAmB;;AAEjC,eAAO;;YAEL;AACF,2BAAmB,MAAM,QAAQ,KAAK,QAAQ,KAAK,KAAK,KAAK,KAAK;AAClE,iCAAyB,SAAS;AAClC,2BAAmB,KAAK,oBAAoB;AAC5C,4BAAoB;AACpB,4BAAoB;AAChB,sBAAY,KAAK,GAAG,MAAM;;AAE9B,0BAAkB;AAClB,4BAAoB;AACpB,mCAA2B;AACvB,6BAAmB,aAAa;AAC5B,gBAAI,KAAK,sBAAsB;AAC3B,0BAAY,KAAK,KAAK,mBAAmB;;AAGzC,wBAAU,KAAK,aAAa,OAAO;;;;AAI/C,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,MAAM,QAAQ,IAAI;;AAExC,wBAAgB,MAAM,yBAAyB,WAAW;UACtD,aAAa,KAAK;UAClB,WAAW,KAAK;UAChB,YAAY,KAAK;;AAErB,eAAO,CAAC,aAAa,wBAAwB;;;AAGrD,gBAAY,mBAAmB;AAYxB;AACH,wBAAkB,IAAI,YAAY;AAClC,8BAAwB,IAAI,YAAY;AACxC,qBAAe,IAAI,UAAU,GAAG;AAChC,qBAAe,kBAAkB,YAAY,IAAI,UAAU,mBAAmB;AAC9E,aAAO,CAAC,SAAS,KAAK;;AAEnB;AACH,aAAO,IAAI,MAAM,YAAY,qBAAqB;;AAE/C,uBAAmB;AACtB,UAAI,OAAO,UAAU,eAChB,gBAAe,QAAQ,YAAY,aAAa;AAIjD,eAAO;;AAGP,qBAAa;AACb,YAAI,MAAM,QAAQ;AACd,mBAAS,IAAI,MAAM,aAAW,aAAa;;AAG3C,mBAAS,aAAa;;AAE1B,YAAI;AACA,iBAAO,KAAK,KAAK;;;AAGzB,aAAO;;AAEX,qBAAiB,mBAAmB;AACpC,qBAAiB,mBAAmB;AAuE7B;AACH,aAAO,IAAI,YAAY,MAAM;;AAO1B;AACH,aAAO,KAAK,MAAM;;AC/TtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,iBAAiB;;YAEpB;AACF,eAAO,KAAK;;;;MAIhB;AACI,aAAK,cAAc;;YAEjB;AACF,eAAO,KAAK,YAAY;;;AAwBzB;AACH,UAAI,UAAU,WAAW;AACrB,iCAAyB,eAAe,iBAAiB,QACrD,eAAe,eAAe;AAClC,YAAI;AACA,iBAAO,IAAI,kBAAkB;;AAK7B,kBAAQ,KAAK;AAIb,iBAAO,IAAI,kBAAkB,CAAE,eAAe;;;AAMlD,gBAAQ,KAAK;AAIb,eAAO,IAAI,kBAAkB;UACzB,eAAe;UACf;UACA;UACA;;;;AAmBL;AACH,aAAO,IAAI,iBAAiB;;ACrGhC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA+CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,sBAAgB;AACZ,gBAAQ,wBAA4B,OAAO,GAAG;AAC9C,gBAAY,GAAG,SAAS,eAAmB,QAAQ,MAAM;AACzD,aAAK,CAAC;AACN,eAAO,SAAQ,QAAQ,IAAI;;AAE/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;;AAE/D,qBAAW,IAAG,CAAE;AC3D5B;;;;;;;;;;;;;;;;AAuCA,wCAAoC,oBAAoB;AACpD,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,aAAK,CAAC,IAAI;AACV,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,4BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,2BAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,0BAAkB,eAAmB;AACrC,0BAAkB,eAAmB;AACrC,oCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,gBAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,qBAAqB,MAAM,uJAE3C,oBAAoB;AAChD,gBAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,kCAA0B,YAAY,YAAY,aAAa;AAC/D,yBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,oBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,sBAAc,SAAQ,YAAY,KAAK,KAAK,YAAY;AACxD,eAAO,SAAQ,OAAO;;AAE1B,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;;AAEnE,mBAAU,IAAG,CAAE;AC5E3B;;;;;;;;;;;;;;;;AAwCA,+CAA2C,cAAc;AACrD,UAAI,QAAQ;AACR,cAAM,IAAI,MAAM,iDAAiD;;AAErE,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,uBAAiB,CAAC,GAAG,SAAS,OAAO;AACrC,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,SAAQ,OAAO,SAAQ,UAAU,CAAC,SAAS,QAAQ,OAAO,SAAS,WAAW;;AAEjG,qBAAe,CAAE,SAAS;AAC1B,oBAAc,CAAE,OAAO,SAAS;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;ACtD3B;;;;;;;;;;;;;;;;AAwCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,QAAQ;AACR,eAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAErC,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,WAAK,QAAQ;AACT,gBAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,MAAM,+CAA+C,GAAG,OAAO,aACxF;;AAEpB,UAAI,GAAG,QAAQ;AACX,eAAO,GAAG;;AAEd,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,OAAO,QAAQ,MAAqB,YAAW;;AAEpG,uBAAa,IAAG,CAAE;AC1D9B;;;;;;;;;;;;;;;;AAoDO;AACH,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,cAAY,cAAc,QAAQ,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,+DACzE;AACf,cAAY,QAAQ,SAAS,GAAG,MAAM,gDAAgD,QAAQ;AAC9F,cAAY,aAAa,SAAS,GAAG,MAAM,qDAC5B,aAAa;AAC5B,cAAY,QAAQ,MAAM,OAAO,aAAa,MAAM,IAAI,MAAM,uCACvD,QAAQ,MAAM,UAAU,aAAa,MAAM;AAElD,cAAY,aAAa,KAAK,OAAO,UAAU,aAAa,MAAM,4DAC3D;AAGP,2BAAqB,QAAO,MAAK,SAAS,UAAU;AACpD,gCAA0B,QAAO,MAAK,cAAc,UAAU;AAC9D,4BAAsB,WAAU;AAChC,sBAAgB,OAAO,eAAe;AACtC,aAAO,MAAK,SAAS;;AAElB,4BAAwB,IAAG,CAAE;ACzEpC;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AAwBA;AAwBA,+CAA2C;AAEvC,UAAI,cAAc;AACd,cAAM,IAAI,MAAM;;AAEpB,UAAI,UAAU;AACV,cAAM,IAAI,MAAM;;AAEpB,wBAAkB;AAClB,wBAAkB;AAClB,oBAAc;AACd,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,gBAAgB;AACvB,sBAAc;iBAET,OAAQ,cAAe,eAAe,kBAAkB;AAC7D,sBAAc;iBAET,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAEL,OAAQ,qBAAsB,eACnC,kBAAkB;AAClB,kBAAU;iBAGL,OAAO,cAAc;AAC1B,uBAAe;;AAGf,cAAM,IAAI,MAAM,qPAID,OAAO,YAAY;;AAEtC,UAAI;AACA,8CAAsC;AACtC,YAAI,WACA,OAAO,aACH;AACJ,gBAAM,IAAI,MAAM;;;AAMxB,qBAAe,WAAU,YAAY,QAAO;AAC5C,UAAI,UAAU;AACV,uBAAe,CAAE;AACjB,sBAAc,CAAE;AAChB,eAAO,QAAO,UAAU,YAAY,QAAQ;;AAEhD,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B;AACA,UAAI;AACA,eAEI,OAAO,WAAW,MAAM,aAAa,GAAG,GAAG,OAAO,QAAQ;iBAEzD,eAAe;AACpB,eAAO,OAAO;iBAET,WAAW;AAChB,YAAI,uBAAuB;AACvB,gCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,4BAAoB,OAAO,QAAQ;AACnC,4BAAoB,OAAO,SAAS;AACpC,4BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,eAAO,oBAAoB,aAAa,GAAG,GAAG,OAAO,QAAQ;;AAEjE;AACA,UAAI,gBAAgB;AAChB,iBAAS,IAAI,WAAW;;AAGxB,0BAAkB,QAAQ;AAC1B,iBAAS,IAAI,WAAW,YAAY;AACpC,qBAAa,GAAG,IAAI,WAAW;AAC3B,6BAAmB,GAAG,UAAU,aAAa,EAAE;AAC3C,mBAAO,IAAI,cAAc,WAAW,KAAK,IAAI,IAAI;;;;AAI7D,uBAAiB,CAAC,QAAQ,OAAO;AACjC,aAAO,SAAS,QAAQ,UAAU;;AAqB/B;AACH,iBAAW,iBAAgB,KAAK,OAAO;AACvC,UAAI,CAAE,gBAAe;AAEjB,kCAA0B;AAC1B,eAAO,MAAK,mBAAmB;AAC/B,0BAAkB;;AAEtB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,MAAM,wDAAwD,KAAK;;AAEjF,8BAAwB,KAAK,MAAM,MAAM,GAAG;AAC5C,oBAAc,KAAK,SAAS,IAAI,IAAI,KAAK,MAAM;AAC/C,UAAI,QAAQ,KAAK,UAAU;AACvB,cAAM,IAAI,MAAM,0DACS;;AAE7B,UAAI,KAAK,UAAU,aAAa,KAAK,UAAU;AAC3C,cAAM,IAAI,MAAM,kCAAkC,KAAK;;AAG3D,oBAAa,MAAM,KAAK;AACxB,yBAAmB,KAAK,UAAU,YAAY,MAAM;AACpD,oBAAc,IAAI,kBAAkB,QAAQ,SAAS;AACrD,mBAAa,GAAG,IAAI,SAAS,OAAO,EAAE;AAClC,qBAAa,CAAC,GAAG,GAAG,GAAG;AACvB,qBAAa,GAAG,IAAI,OAAO;AACvB,wBAAc,MAAK,IAAI,QAAQ;AAC/B,cAAI,KAAK,UAAU;AACf,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACqB;;qBAGpC,KAAK,UAAU;AACpB,gBAAI,QAAQ,KAAK,QAAQ;AACrB,oBAAM,IAAI,MAAM,mFACuB;;;AAG/C,cAAI,UAAU;AACV,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;AAClB,iBAAK,KAAK,QAAQ;;AAGlB,iBAAK,KAAK,QAAQ;;;AAG1B,kBAAU,IAAI;AACd,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;AAC/B,cAAM,IAAI,KAAK,KAAK,MAAM,KAAK;;AAEnC,UAAI,UAAU;AACV,eAAO,QAAQ;AACf,eAAO,SAAS;AAChB,oBAAY,OAAO,WAAW;AAC9B,0BAAkB,IAAI,UAAU,OAAO,OAAO;AAC9C,YAAI,aAAa,WAAW,GAAG;;AAEnC,UAAI,SAAS;AACT,aAAK;;AAET,aAAO;;AAEJ,uBAAmB,IAAG,CAAE;;;;;;AC5NxB;AACH,UAAI,QAAO,OAAO;AACd,cAAM,IAAI,MAAM,4EACS,QAAO;;AAEpC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,8EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,yEACU,QAAQ;;AAEtC,UAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AACzC,cAAM,IAAI,MAAM,iEACT,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAEzD,UAAI,QAAO,SAAS;AAChB,cAAM,IAAI,MAAM,mEACK,QAAO;;AAEhC,2BAAqB,QAAQ;AAC7B,wBAAkB,aAAa,aAAa,SAAS;AAGrD,oBAAc;AACd,mBAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC3C,mBAAW,aAAa;;AAE5B,yBAAmB,QAAO;AAC1B,0BAAoB,aAAa;AACjC,kBAAY;AACZ,sBAAgB;AAChB,mBAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACvC,qBAAa,WAAW;AACxB,oBAAY,KAAK,WAAW;;AAEhC,sBAAgB;QAAC,GAAG,gBAAe,QAAO,OAAO,IAAI,YAAU,SAAS;QACpE;QAAG,MAAM,GAAG;AAChB,aAAO,CAAC,aAAa,SAAS,WAAW;;;;;;ACzCtC;AACH,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,uBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AACzD,yBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAC9C,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEnD,UAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC1C,cAAM,IAAI,MAAM,aACZ,0BAA0B,WAAY,SAAQ,OAAO;;AAE7D,UAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC3C,cAAM,IAAI,MAAM,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAE9E,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,YAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACnC,gBAAM,IAAI,MAAM,aACZ,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAAO,QAAQ,MAAM;;;AAGhG,mBAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC3C,YAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC1C,gBAAM,IAAI,MAAM,aACZ,kBAAkB,IAAI,cAAc,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAAc,MAAM,IAAI;;;;AAWlH;AACH,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,OAAO;AACf,cAAM,IAAI,MAAM,+EACS,QAAQ;;AAErC,UAAI,QAAQ,UAAU;AAClB,cAAM,IAAI,MAAM,0DAA0D,QAAQ;;AAEtF,UAAI,MAAM,SAAS;AACf,cAAM,IAAI,MAAM,6DAA6D;;AAEjF,UAAI,MAAM,WAAW;AACjB,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;AAElF,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM,sDAAsD,QAAQ;;;AAGtF,2BAAoB,OAAO,SAAS;;AAWjC;AAEH,0BAAoB,QAAQ,MAAM;AAClC,wBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAIvE,sBAAgB,MAAM;AACtB,sBAAgB;AAChB,mBAAa,WAAW,IAAI,SAAS,EAAE;AACnC,qBAAa,MAAM;;AAEvB,2BAAsB,YAAY,IAAK,IAAI;AAC3C,yBAAmB,eAAc,QAAQ,SAAS;AAClD,sBAAgB,CAAC,GAAG,gBAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,yBAAmB,eAAc;AACjC,aAAO,CAAE,WAAW,YAAY,WAAW,SAAS;;;;;;;;AC9FxD;;;;;;;;;;;;;;;;AAiBO;AACH,wBAAkB,OAAM,MAAM;AAC9B,cAAY,cAAc,MAAM,QAAQ,MAAM,iBAAiB,+BAA+B,2CAC1D;AACpC,cAAY,cAAc,KAAK,QAAQ,MAAM,iBAAiB,8BAA8B,0CACxD;AACpC,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,gBAAY,MAAM,KAAK,KAAK,MAAM,OAAM,MAAM,IAAI,MAAM,iBAAiB,qBAAqB,aAAa,OACnG,MAAM,KAAK,KAAK,kCAAkC,OAAO,OAAM,MAAM;;;AAI9E;AACH,mBAAa;AACb,iBAAW;AACX,aAAO,OAAO;AACV,YAAI,OAAO;AACP,eAAK,KAAK;;AAEd,gBAAQ;AACR;;AAEJ,aAAO;;AAGJ;AACH,mBAAa;AACb,sBAAgB,GAAG,OAAO,MAAM,QAAQ;AACpC,aAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE/D,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,mBAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACnD,mBAAW,KAAK;;AAEpB,mBAAa,GAAG,IAAI,eAAe;AAC/B,YAAI,MAAM;AACN,qBAAW,0BAA0B;;AAGrC,qBAAW,OAAO,wBAAwB,GAAgC;AAC1E,qBAAW;;;AAGnB,aAAO;;AAEX;AACI,UAAI,kBAAkB;AAClB,eAAO;;AAEX,aAAO,iBAAkB,iBAAgB;;AAE7C;AACI,yBAAmB;AACnB,mBAAa,GAAG,IAAI,eAAe;AAC/B,mBAAW,KAAK,yBAAyB;;AAE7C,aAAO;;AAGJ;AACH,wBAAkB,WAAW;AAC7B,4BAAsB,IAAI,MAAM,4BAA4B,IAAI,MAAM,gCAAgC,IAAI,MAAM;AAChH,UAAI,aAAa,UAAU,sBAAsB;AAC7C,0BAAkB,aAAa;AAG/B,8BAAsB,sBAAsB;AAC5C,0BAAkB,4BAA2B,WAAW,WAAW,eAAe,OAAO;AACzF,wBAAgB,2BAA0B,SAAS,WAAW,eAAe,KAAK;AAClF,4BACI,uBAAsB,SAAS,WAAW,eAAe;;AAG7D,wBAAgB,GAAG,OAAO,WAAW;AACjC,0BAAgB,QAAQ,cAAa,WAAW,OAAO,SAAS,YAAY,MAAM;AAClF,wBAAc,QACV,aAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,4BAAkB,QAAQ,gBAAe,SAAS,MAAM;;;AAGhE,aAAO;QACH,OAAO;QACP,KAAK;QACL,SAAS;;;AAKV;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ;;AAGnB,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,cAAc;AAClC,cAAI,YAAY,KAAK;AACjB,4BAAgB;;AAEpB,qBAAW,QAAQ;;;AAG3B,aAAO;;AAIJ;AACH,yBAAmB,CAAC,GAAG;AACvB,yBAAmB,eAAc,eAAe;AAChD,sBAAgB,GAAG,OAAO,WAAW,QAAQ;AACzC,YAAI,WAAW,QAAQ,QAAQ;AAC3B,qBAAW,QAAQ,OAAO;;AAG1B,+BAAqB,iBAAgB,wBAAwB,eAAe;AAC5E,8BAAoB,YAAY;AAChC,cAAI,UAAU,KAAK;AACf,4BAAgB,OAAO;;AAE3B,qBAAW,QAAQ;;;AAG3B,mBAAa,GAAG,IAAI,WAAW,QAAQ;AAEnC,yBAAiB,WAAW;AAC5B,YAAI,WAAW,KAAK;AAChB,qBAAW,MAAM;;AAErB,mBAAW,KAAK,OAAW,GAAG,WAAW,IAAI,WAAW;;AAE5D,aAAO;;AAEJ;AACH,mBAAa,QAAQ;AACrB,UAAI,eAAgB,KAAK,QAAS,UAAU;AACxC,iBAAS;;AAEb,aAAO;;AAEJ;AAEH,kBAAY,aAAa;AACzB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAC9D,YAAI,SAAS;AAIT,kBAAQ,OAAO;;AAIf,kBAAQ,OAAO;;;AAIvB,uBAAiB,WAAW;AAC5B,UAAI,QAAQ;AACR,iBAAS;;AAGb,cAAQ,OAAW,GAAG,OAAO,WAAW;AACxC,aAAO;;AAEJ;AAEH,iBAAW,YAAY;AACvB,qBAAe,QAAQ,SAAS;AAGhC,UAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AAC/D,YAAI,SAAS;AAGT,iBAAO,OAAO;;AAId,iBAAO,OAAO;;;AAItB,uBAAiB,WAAW;AAC5B,UAAI,OAAO;AACP,gBAAQ;;AAKZ,UAAI,SAAS;AAET,eAAO,OAAW,GAAG,MAAM;;AAI3B,eAAO,OAAW,IAAI,MAAM,WAAW;;AAE3C,aAAO;;AAMJ;AAEH,4BAAsB,KAAK;AAC3B,mBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,YAAI,KAAK,KAAK;AACV,4BAAkB;AAClB;;;AAGR,mBAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AAC/C,YAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AAClC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,uBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,sBAAc,MAAM,KAAK,QAAQ;;AAErC,aAAO;;AAEJ;AAEH;AACA,oBAAc,EAAE,MAAM;AACtB,UAAI,OAAO,UAAU;AACjB,iBAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEzC,MAAM,SAAS;AACpB,iBAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAG3D,iBAAS,MAAM;;AAEnB,aAAO,QAAQ;AACX,gBAAY,MAAM,IAAI,MAAM;;AAEhC;AACA,UAAI,QAAQ;AACR,gBAAQ,IAAI,MAAM,OAAO,KAAK;iBAEzB,OAAO,SAAS;AACrB,gBAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;iBAEvC,KAAK,SAAS;AACnB,gBAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAGxD,gBAAQ;;AAEZ,cAAQ,MAAM,IAAI;AACd,YAAI,KAAK;AACL,iBAAO;;AAGP,kBAAY,MAAM,IAAI,MAAM,qDACrB,mCAAmC;AAC1C,iBAAO,EAAE,MAAM,KAAK,OAAO;;;AAGnC,aAAO,CAAC,QAAQ;;;;;;;;;;;;;;;;;;ACnSpB;;;;;;;;;;;;;;;;;MAmCI;AACI,eAAO,KAAK,YACP;;aAWF;AACH,eAAO,IAAI,IAAI;;;;MAWnB;AACI,aAAK,eAAe;;aAKjB;AACH,YAAI,iBAAiB,YAAY;AAC7B,2BAAiB,WAAW,IAAI;;AAEpC,eAAO,iBAAiB;;aAKrB;AACH,yBAAiB,SAAS,aAAa,IAAI,aACvC,CAAC,KAAK,IAAI;;;AA2Bf;AACH,cAAO,IAAI,aAAa,MAAM,MAAM;AAEpC,cAAO,OAAO,IAAI,cAAc,UAAU,MAAM,wDAC5C,OAAO,IAAI;AACf,cAAO,IAAI,UAAU,SAAS,GAAG,MAAM;AAEvC,uBAAiB,SAAS;;;;;;;;AC/G9B;;;;;;;;;;;;;;;;AAmBA,iCAA6B;AACtB,iCAA6B;AAC7B;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,aAAO,QAAO,QAAQ,qBAAqB,KAAK,uBAC5C;;AAER;AACI,2BAAqB;AACrB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI,cAAa,WAAW,cAAa;AACrC,yBAAiB;;AAErB,UAAI;AACA,sBAAc,OAAO,YAAY;AACjC,sBAAc,SAAS,YAAY;AACnC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM,yCAAyC,oBACxC;;;AAGzB,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ;AACvC,4BAAoB,YAAW;AAC/B,8BAAsB,YAAW;AACjC,YAAI,CAAC,aAAY,aAAa;AAC1B,gBAAM,IAAI,MAAM,0CACA,4BAA4B;;;AAGpD,yBAAmB,cAAa,UAAU,SAAS,SAAQ;AAC3D,2BAAqB,cAAa,YAC9B,WACA,SAAQ;AACZ,UAAI,WAAW,WAAW,aAAa;AACnC,cAAM,IAAI,MAAM,yCAAyC,WAAW,uBACnD,aAAa;YACb;YACA;;AAErB,mBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kBAAU,WAAW;AACrB,kBAAU,aAAa;AACvB,YAAI,CAAC,UAAU,GAAG;AACd,gBAAM,IAAI,MAAM,yBAAyB,QAAQ,eAAe,QAAQ;YACvD;YACA;;;;AAItB;AACH,WAAK,KAAK,MAAM,KAAK,QAAQ,MAAM;;AAEhC;AACH,mBAAY,OAAO,aAAa,YAAY,OAAO,aAAa,YAC5D,OAAO,aAAa,YACpB,CAAC,YACD;AACJ,UAAI,UAAS,WAAW,UAAS,OAAO,OACpC,UAAS,aAAa,UAAS,SAAS;AAExC,eAAO,sBAAsB,QAAQ,MAAK,UAAU,KAAK;;AAE7D,aAAO,sBAAsB,QAAQ,UAAU,UAAU,SAAS,GAAG,GAAG;;AAErE;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,UAAI,CAAC,SAAS,GAAG,GAAG;AAChB,cAAM,IAAI,MAAM,8BAA8B,mBAAmB;;;AAGzE;AACI,UAAI,CAAC,SAAS,MAAM,CAAC,SAAS;AAC1B,eAAO;;AAEX,UAAI,MAAM,MAAM,MAAM,MAAM,KAAK,IAAI,IAAI,KAAK;AAC1C,eAAO;;AAEX,aAAO;;AAEJ;AACH,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK,OAAO,OAAO,KAAK;AAC/B,gBAAM,IAAI,MAAM,sBAAsB,OAAO,WAAW,cAAc;;;;AAI3E;AAGH,aAAO,IAAI,aAAa,SAAS,QAAQ,IAAI,aAAa;;;;;;;;;;;;;ACrH9D;AAEK,qBAAW;ACFhB;;;;;;;;;;;;;;;;AA0BO;AACH,aAAM,IAAI,QAAQ;;AAgBf;AACH,aAAM,IAAI,SAAS;;AAGhB;AACH,aAAM,IAAI,gCAAgC;AAC1C,cAAQ,KAAK;;AAGV;AACH,UAAI,OAAM,QAAQ;AACd,gBAAQ,KAAK,MAAM;;;AAI3B,6BAAwB;AAMjB;AACH,cAAO;;AAOJ;AACH,aAAO;;AAuBJ;AACH,aAAO,QAAO;;AA+BX;AACH,aAAO,QAAO,QAAQ;;AA0CnB;AACH,aAAO,QAAO,KAAK,UAAU;;AAa1B;AACH,sBAAgB,uBAAsB;AACtC,cAAQ,QAAQ,aAAU,QAAO;;AAkC9B;AACH,aAAO,QAAO,KAAK;;AA2BhB;AACH,aAAO,QAAO,KAAK;;AAiBhB;AACH,aAAO,QAAO,WAAW;;AAStB;AACH,aAAO,QAAO;;AAQX;AACH,aAAO,QAAO;;AAOX;AACH,cAAO,cAAc;;AAMlB;AACH,aAAO,QAAO,YAAY;;AAOvB;AACH,aAAO,QAAO,mBAAmB;;AAiB9B,wDAAmD;AACtD,aAAO,QAAO,gBAAgB,MAAM,SAAS;;AAU1C;AACH,aAAO,QAAO;;AAQX;AACH,aAAM,YAAY,cAAc;;AC5VpC;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AA+CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;AC3D7B;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU,WAAW,GAAG,UAAU;AACrC,eAAO,SAAS,IAAI;;AAExB,sBAAgB;AACZ,oBAAY,SAAQ,WAAW,IAAI;AACnC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,gBAAO,IAAG,CAAE;AC9DxB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAO,IAAG,CAAE;AC1DxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,SAAQ,WAAW;;AAE9B,eAAO,SAAQ,IAAI;SACpB,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AC3CxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,cAAY,MAAM,QAAQ,UAAU,MAAM;AAC1C,cAAY,QAAQ,UAAU,GAAG,MAAM,uDAChC,QAAQ;AACf,uBAAiB,QAAQ,IAAI,UAAU,iBAAgB,GAAG,UAAU,KAAK;AACzE,0BAAoB,SAAS;AAC7B,eAAS,QAAQ;AACb,YAAI,EAAE,UAAU,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAGxB,eAAS,QAAQ;AACb,YAAI,CAAC,aAAiB,EAAE,OAAO,YAAY;AACvC,gBAAM,IAAI,MAAM;;;AAGxB,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC1DzB;;;;;;;;;;;;;;;;AAqBO;AACH,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,YAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AACzC,iBAAO;;;AAGf,aAAO;;AAEJ;AACH,mBAAa,UAAU,SAAS,UAAU;AAC1C,kBAAY;AACZ,mBAAa;AACb,sBAAgB;AAChB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,cAAI,KAAK,UAAU;;AAGnB,cAAI,KAAK,UAAU;;;AAG3B,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,KAAK,QAAQ,SAAS;AACtB,mBAAS,KAAK,OAAO;;;AAG7B,0BAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,aAAO,CAAC,UAAU;;AAEf;AACH,6BAAuB,KAAK,IAAI,OAAK;AACrC,aAAO,kBAAiB,OAAO,gBAAgB;;AAE5C;AACH,cAAY,sBAAqB,MAAM,OAAO,MAAM,GAAG,uDACvC,iBAAiB;;AAO9B;AACH,UAAI,sBAAqB,MAAM;AAC3B,eAAO;;AAEX,qBAAe;AACf,mBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,YAAI,KAAK,QAAQ,OAAO;AACpB,iBAAO,KAAK;;;AAGpB,WAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,aAAO;;AAGJ;AACH,aAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAEb;AACH,kBAAY;AACZ,mBAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACrC,YAAI,KAAK;;AAEb,aAAO;;AC5FX;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAE3D,gBAAO,IAAG,CAAE;AC1ExB;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK,OAAO;AACxC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAAmB,MAAM,GAAG;AACjD,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAAiB,KAAK,QAAQ,GAAG;;AAE5C,oBAAY,SAAQ,IAAI,IAAI;AAC5B,YAAI;AACA,2BAAiB,sBAAqB,IAAI,OAAO;AACjD,iBAAO,SAAQ,KAAK;;AAExB,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;;AAG3D,gBAAO,IAAG,CAAE;AC3ExB;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAU,IAAG,CAAE;AC/D3B;;;;;;;;;;;;;;;;AA+CA,+BAA2B;AACvB,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,QAAQ;AACR,iBAAO;;AAEX,mBAAW,gBAAoB,MAAM,GAAG;AACxC,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,YAAI,gBAAgB;AAChB,eAAK,WAAU,IAAI;AACnB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,eAAO,SAAQ,OAAO,IAAI,KAAK;;AAEnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;;AAE9D,mBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACxCzB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;ACjD1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAyCO,yFAAmF;AAKtF,4BAAsB,WAAW;AACjC,2BAAqB,CAAC,GAAG,aAAa;AACtC,0BAAoB,yBAAwB;AAC5C,aAAO,mBAAkB,YAAY,cAAc,SAAS,WAAW,MAAK,MAAyB,MAAsB;;AAExH,0GAAoG;AACvG,0CAAoC,iBAAgB;AACpD;AACA,UAAI,eAAe;AACf,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAEzD,eAAe;AACpB,sBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAG9D,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAAO;;AAK1F,0GAAoG;AACvG,uDAAiD,kBAAiB;AAClE;AACA;AACA,UAAI,eAAe;AACf,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;iBAE5D,eAAe;AACpB,sBAAc;AACd,sBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAGjE,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,aAAO,mBAAkB,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aAAa;;AAMzF,0GAAoG,oBAAoB;AAC3H,uDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,UAAI,eAAe;AACf,SAAC,WAAW,UAAU,SAAS,cAAc;iBAExC,eAAe;AACpB,SAAC,WAAW,YAAY,UAAU,WAAW;;AAG7C,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,4DAAsD;AACtD,0CAAoC,iBAAgB;AACpD,8CAAwC,iBAAgB;AACxD,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,WAAW,YAAa,kBAAiB,MAAK,UAAU,SAAS,cAAc,aAAa,uBAAuB,sBAAsB,cAAc;AACxK,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,WAAW;iBAE1C,eAAe;AACpB,mBAAW,CAAC,WAAW,WAAW,UAAU;;AAEhD,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAOD,4FAAsF,oBAAoB;AAC7G,gEAA0D,CAAC,IAAI,IAAI,IAAI,IAAI;AAC3E,UAAI,eAAe;AACf,SAAC,WAAW,SAAS,UAAU,SAAS,cAAc;iBAEjD,eAAe;AACpB,SAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAGtD,cAAM,IAAI,MAAM,sBAAsB;;AAE1C,yEAAmE;AACnE,uDAAiD,kBAAiB;AAClE,6DAAuD,kBAAiB;AACxE,mCAA6B,wBAAuB,aAAa;AACjE,oCAA8B,wBAAuB,cAAc;AACnE,mCAA6B,wBAAuB,aAAa;AACjE,aAAQ,SAAS,UAAU,WAAW,YAAa,oBAAmB,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAAa,sBAAsB,uBAAuB,sBAAsB;AAClN,0BAAoB,YAAY,iBAAiB,aAAa;AAC9D;AACA,UAAI,eAAe;AACf,mBAAW,CAAC,WAAW,aAAa,UAAU,WAAW;iBAEpD,eAAe;AACpB,mBAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAE1D,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,YAAY;;AAExB;AACI,UAAI,WAAW;AACX,kBAAU,mBAAkB,SAAS,WAAW;;AAEpD,yBAAmB,QAAQ;AAC3B,wBAAkB,QAAQ;AAC1B,wBAAkB,QAAQ;AAC1B,2BAAqB,kBAAkB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AAC3F,cAAY,OAAW,eAAe,MAAM,2BAA2B;AAEvE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,yBAAyB;AAEnE,yBAAmB,kBAAkB,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxF,cAAY,OAAW,aAAa,MAAM,4BAA4B;AAEtE,aAAO,CAAC,cAAc,YAAY,YAAY;;AAE3C,0EAAqE;AACxE,iCAA2B,wBAAuB,WAAW;AAC7D,aAAO,KAAK,MAAO,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAErF;AACI,UAAI,OAAO,UAAU;AACjB,eAAO,CAAC,OAAO,OAAO;;AAE1B,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAEhC,aAAO;;AAEX;AACI,aAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAa/D;AACI,UAAI,YAAY;AACZ,eAAO;;AAEX,aAAO,aAAc,cAAa,KAAM,YAAW;;AAEvD;AACI;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU,CAAE,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAChE,yBAAiB,sBAAqB,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC5F,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,+BAAuB,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AACnF,8BAAsB,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC/E,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;iBAEvC,SAAQ;AACb,kBAAU,CAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACxD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;iBAE9C,OAAO,SAAQ;AACpB,oBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,uBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,qBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,sBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,wBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC5C,oBAAY,kBAAkB,YAAW,eAAe,MAAM,UAAU,eAAe,GAAG;AAC1F,mBAAW,kBAAkB,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAGtF,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,WAAW;;AAEjC;AACI;AACA;AACA;AACA;AACA,UAAI,OAAO,SAAQ;AACf,wBAAiB,SAAQ,IAAK,UAAU;AACxC,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,yBAAiB,sBAAqB,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAAK;AACzG,mBAAW,SAAS;AACpB,oBAAY,SAAS;AACrB,mBAAW,SAAS;iBAEf,SAAQ;AACb,mBAAW,KAAK,KAAK,UAAU;AAC/B,oBAAY,KAAK,KAAK,WAAW;AACjC,mBAAW,KAAK,KAAK,UAAU;AAC/B,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,+BAAwB,aAAY,KAAK,eAAe,eAAe;AACvE,8BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,sBAAc,KAAK,MAAM,gBAAgB;AACzC,qBAAa,gBAAgB;AAC7B,oBAAY,KAAK,MAAM,iBAAiB;AACxC,uBAAe,iBAAiB;AAChC,qBAAa,KAAK,MAAM,gBAAgB;AACxC,sBAAc,gBAAgB;AAC9B,kBAAU,CAAE,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;iBAEpD,SAAQ;AACb,kBAAU;UACN,KAAK;UACL,QAAQ;UACR,MAAM;UACN,OAAO;UACP,OAAO;UACP,MAAM;UACN,MAAM;;AAEV,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,oBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,mBAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAGnD,cAAM,MAAM,8BAA8B;;AAE9C,aAAO,CAAE,SAAS,UAAU,WAAW;;AAO3C;AACI,UAAI,CAAC;AACD,eAAO;;AAEX,cAAQ;aACC;AAED,iBAAO,KAAK,MAAM;aACjB;AAED,iBAAO,KAAK,KAAK;aAChB;AACD,iBAAO,KAAK,MAAM;;AAElB,gBAAM,IAAI,MAAM,wBAAwB;;;AAG7C;AACH,iCAA2B,iBAAgB;AAC3C,aAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAEzC;AACH,aAAO,mBAAkB,YAAY,mBAAkB;;AASpD;AACH,UAAI,eAAe;AACf,eAAO;iBAEF,eAAe;AACpB,eAAO;;AAGP,cAAM,IAAI,MAAM,sBAAsB;;;ACtZ9C;;;;;;;;;;;;;;;;AA6CA;AACI,iBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,wBAAkB;AAClB,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,mDAAmD,IAAI;AACzF,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG,aAAK,CAAC;AACN,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,iBAAO,IAAI;;AAEf,eAAO,SAAQ,QAAQ,KAAK;;AAEhC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC1E,YAAM,MAAK,KAAK,GAAG;AACnB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;AC/E5B;;;;;;;;;;;;;;;;AAsEA,oFAA+E;AAC3E,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,aAAK,CAAC;AACN,eAAO,SAAQ,UAAU,KAAK;;AAElC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK,iBAAiB,YAAY;AACvE,gBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC5E,YAAM,MAAK,KAAK,IAAI;AACpB,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC/G9B;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,OAAO,GAAG;AACvB,aAAO,QAAQ;AACX,gBAAY,MAAM,WAAW,MAAM,MAAM,kBAAkB,0BAA0B,gDACrD;;AAEpC,cAAY,QAAQ,KAAK,OAAO,MAAM,MAAM,kBAAkB,qCAAqC,OAAO;AAC1G,yBAAmB,OAAO;AAC1B,aAAO,QAAQ;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,kBAAa,MAAM,QAAU,MAAM,OAAO,WAAW,IAAK,MAAM,kBAAkB,2BAA2B,OAAO,gDACvE,+CACN;;;;AAI5C;AACH,0BAAoB,OAAO,GAAG;AAC9B,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,oBAAY,SAAS,OAAO,GAAG;;AAEnC,aAAO;;ACtCX;;;;;;;;;;;;;;;;AA+DA,qCAAiC;AAC7B,cAAO,QAAQ,UAAU,GAAG,MAAM;AAClC,qBAAe,qBAAqB,SAAS,WAAW;AACxD,UAAI,SAAS,GAAG,UAAU;AACtB,iBAAS,QAAQ;AACb,cAAI,QAAO,UAAU;AACjB,kBAAM,IAAI,MAAM;uBACT,QAAO;;;;AAI1B,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,SAAS,GAAG,OAAO;AACtD,yBAAiB,kBAAgB,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,YAAI,eAAc,cAAc;AAC5B,iBAAO,QAAO,IAAI;;AAGtB,mBAAW,SAAS,OAAO,OAAK,EAAE,OAAO;AACzC,YAAI,SAAS,WAAW;AACpB,iBAAO,SAAS;;AAEpB,uBAAe,SAAS,IAAI,OAAK,EAAE;AACnC,gCAAuB,QAAQ;AAC/B,oBAAY,SAAQ,OAAO,UAAU;AACrC,aAAK;AACL,eAAO;;AAEX,qBAAe;AACf,mBAAa,CAAE;AACf,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AC/F3B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,QAAQ;AAC5B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAW,IAAG,CAAE;ACzC5B;;;;;;;;;;;;;;;;AAuDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,gCAAwB,kBAA4B,IAAI,OAAO;AAC/D,2BAA6B,IAAI,QAAQ;AACzC,aAAK,CAAC;AACN,eAAO,SAAQ,MAAM,IAAI,QAAQ;;AAErC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,mBAAS,IAAG,CAAE;ACtE1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,KAAK;AACvB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,mBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAyCA;AACI,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,0BAAoB,iBAAgB,YAAY,cAAc;AAC9D,wBAAkB,iBAAgB,UAAU,YAAY;AACxD,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,QAAO,CAAC,OAAO,KAAK;AACrC,uBAAiB,OAAO,UAAU;AAClC,kBAAY,MAAI,UAAU;AAE1B,wBAAkB,IAAI,MAAM;AAC5B,wBAAkB,IAAI,MAAM,KAAK;AACjC,wBAAkB,CAAC,WAAW;AAC9B,gBAAU,OAAM,KAAK,CAAC,GAAG,IAAI;AAC7B,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY;AACrC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,gBAAU,OAAM,KAAK,CAAC,GAAG,YAAY,IAAI;AACzC,mBAAa,MAAI,KAAI,SAAQ,IAAI,OAAK,KAAK,KAAI,IAAI,SAAQ,MAAI,aAAa;AAC5E,mBAAa,KAAI,OAAK,OAAO,SAAQ;AACrC,aAAO,CAAC,MAAM;;AAEN,0BAAiB,IAAG,CAAE;AC/DlC;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,iBAAiB,GAAG,+CAA+C,WAAW;AAClI,cAAY,MAAM,WAAW,WAAW,QAAQ,MAAM,mBAAmB,MAAM,oDAAoD,WAAW;AAC9I,cAAY,GAAG,MAAM,KAAK,UAAS,GAAG,MAAM,yBAAyB,GAAG,MAAM,wEAC5C,WAAW,KAAK,cAAc;AAChE,sBAAgB;AACZ,eAAO,SAAQ,eAAe,IAAI,YAAY;;AAElD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;AClF5B;AACH;AACA,UAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,GAAG,EAAE;iBAExB,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAEvC,EAAE,SAAS;AAChB,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAGrD,cAAM;;AAEV,aAAO;;ACfX;;;;;;;;;;;;;;;;AAiDA;AACI,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,MAAM,SAAS,UAAU,MAAM,MAAM;AAEjD,cAAY,WAAW,QAAQ,MAAM,SAAS,QAAQ,MAAM,MAAM;AAElE,cAAY,UAAU,QAAQ,MAAM,SAAS,OAAO,MAAM,MAAM;AAEhE,kBAAY,MAAM;AAClB,sBAAgB;AACZ,aAAK,CAAC,KAAK,OAAO,WAAW;AAC7B,eAAO,SAAQ,UAAU,KAAK,SAAS,QAAQ,SAAS,YAAY,SAAS,UAAU,SAAS,SAAS;;AAE7G,qBAAe;QACX,GAAG;QACH,OAAO;QACP,QAAQ;QACR,MAAM;QACN,UAAU;;AAEd,oBAAc,CAAE;AAChB,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,aAAO,SAAQ,KAAK,GAAG;;AAE3B;AACI,UAAI,KAAK;AACL,eAAO;;AAEX,UAAI,EAAE,SAAS;AAEX,eAAO,SAAQ,GAAG,CAAC,EAAE;iBAEhB,EAAE,SAAS;AAChB,eAAO;iBAEF,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM;iBAExC,EAAE,SAAS;AAEhB,eAAO,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAE1D,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC5F9B;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC5BhC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,wBAAkB,iBAAgB,WAAU,YAAY;AACxD;AACA,UAAI,UAAS;AACT,iBAAS,iBAAgB,QAAO,SAAS;;AAE7C;AACA,UAAI,UAAU;AACV,kBAAU,iBAAgB,QAAQ,UAAU;;AAEhD,cAAY,GAAG,SAAS,GAAG,MAAM,uDAC1B,GAAG;AACV,cAAY,MAAM,SAAS,KAAK,MAAM,SAAS,GAAG,MAAM,oEACxC,MAAM;AACtB,cAAY,UAAU,SAAS,KAAK,UAAU,SAAS,GAAG,MAAM,wEAC5C,UAAU;AAC9B,UAAI,UAAU;AACV,gBAAY,OAAO,SAAS,KAAK,OAAO,SAAS,GAAG,MAAM,qEACtC,OAAO;;AAE/B,UAAI,WAAW;AACX,gBAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,sEACxC,QAAQ;;AAEhC,aAAO,UAAU,IAAI,OAAO,WAAW,SAAS,QAAQ;;AAEhD,wBAAe,IAAG,CAAE;AC3ChC;;;;;;;;;;;;;;;;AAoCA;AACI,mBAAY,iBAAgB,GAAG,eAAe;AAC9C,qBAAe,OAAM;AACrB,UAAI,MAAM,KAAK,OAAK,CAAE,KAAI,MAAM,IAAI,MAAM;AACtC,cAAM,IAAI,MAAM,2CAA2C;;AAE/D,UAAI,MAAM,SAAS,OAAM;AACrB,cAAM,IAAI,MAAM,+BAA+B,MAAM,uBAAuB,OAAM;;AAEtF,UAAI,MAAM,SAAS,OAAM;AACrB,yBAAiB,OAAM,MAAM;AAC7B,eAAO,SAAS,SAAS,MAAM;AAC3B,mBAAS,QAAQ;;AAErB,iBAAQ,SAAQ,QAAO;;AAE3B,yBAAmB,OAAM;AACzB,mBAAa,MAAM,KAAK;AACxB,mBAAa,MAAM,SAAS,GAAG,KAAK,GAAG;AACnC,YAAI,WAAW,OAAO,MAAM;AACxB,eAAK,KAAK;mBAEL,OAAM,MAAM,OAAO;AACxB,gBAAM,IAAI,MAAM,mBAAmB,mCAAmC;;;AAG9E,mBAAa,KAAK,IAAI,UAAU,IAAI,IAAI,IAAI,IAAI,OAAO,OAAK,KAAK;AACjE,UAAI,KAAK,WAAW;AAChB,eAAO,MAAM;;AAEjB,sBAAgB,cAAa,SAAQ,KAAK,QAAO;AACjD,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,OAAO;AACvB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAEnE,wBAAe,IAAG,CAAE;ACvEhC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAa,gBAAgB,cAAe,MAAM,uBAAuB,oDACvC;AAClC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,cAAc;AAC9B,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK,IAAI,cAAc;AAC3C,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,cAAa;;AAEjC,wBAAe,IAAG,CAAE;AClChC;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACa7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACC7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;AC1B7B;AACI,aAAO,QAAO,SAAS;;AAEf,qBAAY,IAAG,CAAE;ACb7B;;;;;;;;;;;;;;;;AAwDA,4DAAuD,oBAAoB,CAAC,GAAG;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,cAAY,YAAY,QAAQ,MAAM,IAAI,MAAM,oCAAoC,8CACtD,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,sBAAgB;AACZ,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACxH,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,oBAAU,IAAG,CAAE;ACzD3B,2DAAsD,kBAAkB;AACpE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM;;AAEhD,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,uEACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,QAAQ,WAAW,MAAM,oEAC5D,wBAAwB;AAC1C,cAAY,eAAe,OAAO,MAAM,sCAAsC;AAC9E,uBAAiB,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;AACxF,sBAAgB,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM;AACvE,sBAAgB,CAAC,GAAG;AACpB,wBAAkB,CAAC,GAAG;AACtB,+BAAyB;AACzB,kBAAY,QAAO,SAAS,UAAU,SAAS,MAAK,kBAAkB,WAAW;AACjF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEjD,aAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAEnD,mBAAU,IAAG,CAAE;ACnE3B;;;;;;;;;;;;;;;;AA+CA,kFAA6E;AACzE,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AAC1D,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO;;AAEhD,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,sBAAgB,eAAe,SAAS,SAAS,KAAK,SAAS;AAC/D,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,+EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,qBAAY,SAAQ,eAAe,MAAM,QAAQ;AACjD,aAAK,CAAC,MAAM;AACZ,eAAO;;AAEX,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,WAAK,YAAY,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,sBAAqB;AACxF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,iCAA4B,IAAG,CAAE;AClExC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,qBAAoB,aAAa,IAAI,SAAS,SAAS,MAAK,QAAQ;;AAEnE,4BAAmB,IAAG,CAAE;AC7BpC;;;;;;;;;;;;;;;;AAwDA,4DAAuD,qBAAqB,CAAC,GAAG,GAAG;AAC/E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,QAAQ,SAAS,GAAG,MAAM,wDAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAA+B,SAAS,YAAY,MAAM,uEACnD,0BAA0B;AAC7C,cAAY,eAAe,SAAS,MAAM,sCAAsC;AAChF,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW;AAC3F,qBAAY,SAAQ,OAAO,KAAK,SAAS;AACzC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAQ;AAC3E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,mBAAU,IAAG,CAAE;ACvF3B;;;;;;;;;;;;;;;;AAwCA;AACI,cAAY,OAAO,WAAW,GAAG,MAAM,MAAM,sBACrC,OAAO,2BAA2B,GAAG;AAC7C,qBAAe;AACf,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACvE,mBAAW,CAAC,GAAG,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI,OAAO;;AAE3D,sBAAgB,SAAS;AACzB,uBAAiB,KAAK,MAAM;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,qEAClC,SAAS;AAChB,cAAY,KAAK,SAAS,GAAG,MAAM,4DACvB,KAAK;AACjB,cAAY,OAAO,SAAS,GAAG,MAAM,gEACzB,OAAO;AACnB,cAAY,YAAY,OAAO,MAAM,IAAI,MAAM,4CAA4C,8CACvD,OAAO,MAAM;AACjD,cAAY,aAAa,OAAO,MAAM,IAAI,MAAM,6CAA6C,gDACxD,OAAO,MAAM;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,UAAU,OAAO,OAAO,SAAS,WAAW;AACzF,eAAO,SAAQ,eAAe,MAAM,QAAQ;;AAEhD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,WAAK,SAAS,YAAY;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,uBAAuB;AAC/E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,gCAA4B,IAAG,CAAE;ACtDxC;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,aAAO,oBAAoB,aAAa,IAAI,SAAS,SAAS;;AAEtD,4BAAmB,IAAG,CAAE;AC3BpC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA6CA,+BAA2B,eAAe,kBAAiB;AACvD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,4BAAoB,oBAAmB,CAAC,OAAO,GAAG;AAClD,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;;AAE9B,6BAAqB,kBAAiB,GAAG,GAAG,MAAM;AAClD,oBAAY,SAAQ,OAAO,WAAW,cAAc,WAAW;AAC/D,aAAK,CAAC;AACN,YAAI,eAAe;AACf,qCAA2B,wBAAuB;AAClD,kBAAQ,WAAU,OAAO;;AAE7B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM,WAAW;AACjC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAU,IAAG,CAAE;AClE3B;;;;;;;;;;;;;;;;AA0DA,sDAAkD;AAC9C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAqB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACrE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,yBAAoB,eAAe,SAAU,GAAG,MAAM,KAAK,GAAG,MAAM;AACpE,cAAY,cAAc,aAAa,GAAG,MAAM;MAC9C,mBAAmB;MACnB,GAAG;AACL,cAAY,aAAa,aAAa,GAAG,MAAM;MAC7C,kBAAkB;UACd,GAAG;AACT,cAAa,aAAc,aAAY,eAAe,GAAI,MAAM,8CAA8C,YAAY,oBAAoB,gDAAgD,GAAG;AACjM,sBAAgB,cAAW,SAAQ,aAAa,IAAI,WAAW;AAC/D,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,WAAW;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,eAAc;;AAExE,0BAAgB,IAAG,CAAE;AC3EjC;;;;;;;;;;;;;;;;AAqEA,qEAAgE,oBAAoB,CAAC,GAAG;AACpF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,uDAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG;;AAEpB,gBAAY,gCAAyC,SAAS,YAAY,MAAM,gFAC1D,0BAA0B;AAChD,yBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH,qBAAY,SAAQ,gBAAgB,KAAK,SAAS;AAClD,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,wBAAuB;AAC1F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,6BAAmB,IAAG,CAAE;AC5GpC;;;;;;;;;;;;;;;;AA0CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,SAAQ,IAAI,CAAC,GAAG;AAC7B,uBAAe,SAAQ,KAAK;AAC5B,yBAAiB,CAAC,GAAG,EAAE,OAAO,GAAG,EAAE;AACnC,eAAO,SAAQ,QAAQ;;AAE3B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACrDzB;;;;;;;;;;;;;;;;AAqDA,+DAA0D,CAAC,GAAG,iBAAiB;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM,gEAC3C,GAAG;AACV,cAAY,QAAQ,SAAS,GAAG,MAAM,4DAC/B,QAAQ;AACf,cAAY,eAAe,QAAQ,MAAM,gFACZ;AAC7B,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;AACzD,uBAAe;;AAEnB,qBAAe,CAAE,GAAG,KAAK,QAAQ;AACjC,oBAAc,CAAE,SAAS,WAAK;AAC9B,kBAAY,QAAO,UAAU,YAAY,QAAQ;AACjD,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,uBAAc,IAAG,CAAE;AC5E/B;;;;;;;;;;;;;;;;AAyBO;AACH,qBAAe,QAAQ;AACvB,mBAAa;AACb,mBAAa,GAAG,IAAI,QAAQ;AACxB,oBAAY,SAAS,IAAI;AACzB,kBAAU,QAAQ,QAAQ;AAC1B,kBAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,YAAI,IAAI,KAAK,MAAM;AACf,eAAK,QAAQ;;;AAGrB,aAAO;;AAMJ;AACH,qBAAe;AACf,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,sBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,wBAAgB,SAAS,SAAS,IAAI;AACtC,uBAAe,SAAS;AACxB,YAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC1C,iBAAO,QAAQ;;;AAGvB,aAAO;;AAEJ;AACH,qBAAe;AACf,gBAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AACzC,mBAAa,GAAG,IAAI,GAAG;AACnB,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,gBAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,YAAI,KAAK;AACL,cAAI;;AAER,YAAI,MAAM;AACN,iBAAO,QAAQ;mBAEV,MAAM;AACX,iBAAO,QAAQ;mBAEV,MAAM;AACX,yBAAe,wDACR,cAAc;AACrB,gBAAM,MAAM;;AAGZ,iBAAO,QAAQ;;;AAGvB,aAAO;;ACjFX;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,MAAM,IAAI;AAC7C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM;;AAE3C,kBAAS,IAAG,CAAE;AC9C1B;;;;;;;;;;;;;;;;AA8CA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAmB,iBAAgB,WAAW,aAAa,SAAS;AAIpE,6BAAuB,4BAA2B,GAAG,OAAO,GAAG;AAC/D,4BAAsB,YAAY,IAAI;AACtC,4BAAsB,YAAY,IAAI;AACtC,UAAI,WAAW,SAAS;AAGpB,gBAAO,WAAW,MAAM,OAAO,GAAG,MAAM,IAAI,MAAM;;AAEtD,UAAI,WAAW,SAAS;AAEpB,2BAAkB,WAAW,OAAO,cAAc,OAAO;;AAE7D,sBAAgB;AACZ,oBAAY,SAAQ,OAAO,YAAY,eAAe;AACtD,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe;QACX,WAAW;QACX,GAAG;QACH,GAAG;;AAEP,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,kBAAS,IAAG,CAAE;AC7E1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,KAAK,QAAQ,MAAiB;;AAE/E,uBAAa,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AAqDA;AAEI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,wBAAkB,IAAI,IAAI;AAC1B,qBAAc,WAAU;AACxB,0BAAoB,MAAM,IAAI;AAC9B,aAAO,MAAM,aAAa,QAAO;;AAEzB,qBAAY,IAAG,CAAE;AC/D7B;;;;;;;;;;;;;;;;AAsCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAa,KAAI,SAAS,KAAK,IAAI,SAAS,MAAO,KAAI,SAAS,KAAK,IAAI,SAAS,IAAI,MAAM,+DACrF,IAAI,YAAY,IAAI;AAC3B,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,sBAAiB,IAAI,SAAS,IAAI,IAAI,OAAO,IAAI,MAAM;AACvD,cAAY,YAAY,SAAS,MAAM,gEAChC,eAAe;AACtB,UAAI,IAAI,SAAS,KAAK,IAAI,SAAS;AAC/B,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM;iBAEhB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,MAAM;AAC1B,eAAO,SAAQ,MAAM,CAAC,KAAK;iBAEtB,IAAI,SAAS,KAAK,IAAI,SAAS;AACpC,qBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,qBAAa,OAAO,KAAK;AACzB,eAAO,SAAQ,MAAM,CAAC,KAAK;;AAG3B,qBAAa,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM;AACnD,qBAAa,OAAO,KAAK;AACzB,eAAO;;;AAGH,iBAAO,IAAG,CAAE;ACtExB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,kBAAU,SAAQ,IAAI;AACtB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAO,IAAG,CAAE;AC1CxB;;;;;;;;;;;;;;;;AAmCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,cAAY,GAAG,UAAU,WAAW,GAAG,UAAU,WAAW,MAAM;AAClE,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;AChDxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAoCA,mCAA+B;AAC3B,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,cAAY,QAAQ,GAAG,MAAM,MAAM;AACnC,uBAAiB,GAAG,MAAM;AAC1B,UAAI,OAAO;AAEP,gBAAY,CAAE,IAAG,OAAO,MAAM,MAAM,MAAM,iCAAiC,CAAE,IAAG,OAAO,OAAO,GAAG;AACjG,eAAO,GAAG,OAAO,OAAO;;AAE5B,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,SAAQ,IAAI;;AAEX,uBAAc,IAAG,CAAE;ACjD/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AA8CA;AACI,sBAAgB;AAChB,iBAAW,iBAAgB,GAAG,KAAK,QAAQ;AAC3C,cAAY,GAAG,SAAS,KAAK,QAAQ,MAAM,qCAAqC,GAAG,kCAClD;AACjC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK,IAAI;AAC7B,aAAK,CAAC;AACN,eAAO;;AAEX,2BAAqB,CAAC;AACtB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,OAAM,OAAO;;AAEnE,kBAAQ,IAAG,CAAE;AC7DzB;;;;;;;;;;;;;;;;AAmCA,2DAAuD;AACnD,UAAI,cAAc;AACd,qBAAa;;AAEjB,mBAAa,QAAO,CAAC,SAAS,aAAa;AAC3C,gBAAU,WAAW,aAAa,UAAU;AAC5C,mBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,aAAK,IAAI,GAAG,GAAG;;AAEnB,kBAAY,SAAQ,KAAK,YAAY,CAAC,SAAS;AAC/C,UAAI,cAAc;AACd,eAAO;;AAGP,YAAI,WAAW,WAAW;AACtB,iBAAO,MAAK,WAAW,KAAK,IAAI,CAAC,WAAW,IAAI,GAAG;mBAE9C,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,KAAK,IAAI,IAAI,CAAC,WAAW,IAAI,WAAW,IAAI,GAAG;mBAE5E,WAAW,WAAW;AAE3B,iBAAO,MAAK,WAAW,WAAW,WAAW,KAAK,IAAI,IAAI,IAAI;YAC1D,WAAW;YAAI,WAAW;YAAI,WAAW;YAAI;YAAG;;;AAIpD,gBAAM,IAAI,MAAM,qEAEiB,WAAW;;;;AAI5C,gBAAO,IAAG,CAAE;ACrExB;;;;;;;;;;;;;;;;AAgCA;AACI,oBAAc,CAAE,OAAO,OAAO;AAC9B,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,OAAO,OAAO,QAAQ,IAAI,MAAM,OAAM;;AClC9F;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAqBO,mCAA8B;AAC9B;AACH,UAAI,UAAU;AACV,eAAO;;AAEX,aAAO,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AC1BvD;;;;;;;;;;;;;;;;AAkBO;AACH,iBAAW;AACX;AACA,UAAI,UAAU;AACV,cAAM;AACN,eAAO;;AAGP,cAAM,gBAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAEtD,aAAO,CAAC;AACJ,YAAI,MAAM,eAAe,QAAQ;AAC7B,iBAAO;;AAGP,gBAAM,gBAAe,QAAQ,MAAM;;;AAG3C,aAAO;;AAEJ;AACH,uBAAiB;AACjB,mBAAa,OAAO;AACpB,qBAAe,GAAG,MAAM,MAAM;AAC1B,YAAI,QAAQ;AACR,mBAAS,KAAK,OAAO;;AAGrB,mBAAS,KAAK;;;AAGtB,aAAO;;AAEJ;AACH,sBAAgB,EAAE,MAAM;AACxB,0BAAoB;AACpB,sBAAgB;AAChB,sBAAgB;AAChB,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,mBAAa,GAAG,IAAI,QAAQ,MAAM;AAC9B,oBAAY,KAAK,QAAQ,MAAM;;AAEnC,mBAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AAC/B,oBAAY,KAAK,EAAE,MAAM;AACzB,qBAAa,EAAE,MAAM;;AAEzB,aAAO,CAAE,WAAW,WAAW,SAAS;;;;;;;;ACnE5C;;;;;;;;;;;;;;;;AA6CA,wCAAoC;AAChC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,uBAAiB,iBAAgB,SAAS,WAAW,UAAU;AAC/D,qBAAe,CAAE,GAAG,IAAI,SAAS;AACjC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,2BAAmB,gBAAe,MAAM,GAAG,OAAO;AAClD,0BAAkB,0BAAyB,IAAI,UAAU;AACzD,oBAAY,SAAQ,OAAO,IAAI,SAAQ,UAAU,CAAC,SAAS,QAAQ;AACnE,aAAK,CAAC,IAAI;AACV,eAAO,SAAQ,KAAK,UAAU;;AAElC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAU;;AAEhE,mBAAU,IAAG,CAAE;AC3D3B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,QAAQ,IAAI;AAC/C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,aAAa,IAAI;AACrC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,yBAAgB,IAAG,CAAE;AClDjC;;;;;;;;;;;;;;;;AAkCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC1CzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,SAAS,KAAK,QAAQ,MAAiB;;AAEhF,uBAAY,IAAG,CAAE;ACrC7B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE3E,oBAAS,IAAG,CAAE;ACrC1B;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AAiCO;AACH,UAAM,eAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACV,cAAM,IAAI,MAAM;;AAGpB,UAAI,UAAU,YAAY,cAAa,UACnC,CAAE,kBAAiB;AACnB,cAAM,IAAI,MAAM;;AAGpB,oBAAc;AACd,4BAAsB;AACtB,aAAO,YAAW,OAAO,OAAO,eAAe;;AC9CnD;;;;;;;;;;;;;;;;AAsCA,mCAA+B;AAC3B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,QAAQ,KAAI,QAAO,QAAQ,KAAK;;AAE/B,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAW,SAAQ,KAAK,IAAI;AAC5C,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;AC7CzB;;;;;;;;;;;;;;;;AAqCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,UAAU,IAAI;AAClC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAa,IAAG,CAAE;AClD9B;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,OAAO;AACP,cAAM,IAAI,MAAM;;AAEpB,oBAAc,CAAE,OAAO,MAAM;AAC7B,aAAO,QAAO,cAAc,cAAW,SAAQ,SAAS,OAAO,MAAM,MAAM,IAAiB,MAAiB,UAAU;;ACnC3H;;;;;;;;;;;;;;;;AAqCA,0DAAsD,UAAU,WAAW,UAAU;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,KAAK,GAAG,SAAS,GAAG,MAAM;sBAChC,GAAG;AACrB,cAAY,OAAW,cAAc,MAAM,2FACR;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,sBAAgB;AACZ,kBAAU,SAAQ,6BAA6B,KAAK,aAAa,MAAM,OAAO;AAC9E,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,KAAK;AACxE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAG3D,eAAO;;;AAGH,uCAA8B,IAAG,CAAE;AChE/C;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAsDA;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AAEH,mBAAW,iBAAgB,GAAG,KAAK,WAAW;AAC9C,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,aAAa;AAClE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,KAAK,CAAC,KAAK;AAC7D,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO,OAAM;;;;AAiCzB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,OAAO,MAAM;AAGvC,sBAAc,qBAAqB,MAAM,QAAQ,YAAY;AAC7D,oBAAa,MAAM,OAAQ,iBAAgB,IAAI,MAAM,cAAc;AACnE,eAAO,QAAO,KAAK;AACf,iBAAQ,OAAO,iBAAU,QAAO,UAAU,MAAM,EAAE,GAAG,QAAQ,OAAO;AACpE,cAAI,OAAO;AACP,+BAAuB,MAAM,OAAO,IAAI,OAAO;;AAGnD,qBAAW;AACX,iBAAO;;;;AA6BnB;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,aAAa,SAAQ,MAAM;AACvC,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,eAAQ,eAAO,SAAU,QAAO,UAAU,MAAM,EAAE,IAAI,CAAC,IAAI;AAC3D,mBAAW;AACX,eAAO,CAAE,MAAM,OAAM,IAAI;;;AAkCjC;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,aAAO;AACH,gBAAY,MAAM,QAAQ,SAAS,KAAK,MAAM,SAAO,eAAe,UAAS,MAAM;AAEnF,gBAAY,MAAM,QAAQ,cAAc,SAAQ,MAAM;AACtD,oBAAY,QAAO,UAAU,MAAM,EAAE,GAAG,OAAO,MAAM;AACrD,YAAI,MAAM;AACN,6BAAuB,IAAI,MAAM,OAAO,GAAG,OAAO;;AAGtD,mBAAW,IAAI;AACf,eAAO;;;AAiCf;AACI,cAAY,YAAgB,IAAI,MAAM;AACtC,cAAY,WAAW,QACnB,MAAM,QAAQ,YAAY,QAAQ,MAAM,OAAK,aAAa,YAAW,MAAM;AAE/E,+BAAyB,WAAW;AACpC,UAAI,CAAC;AAED,kBAAU;AACV,8BAAsB,QAAO;AACzB,kBAAQ,KAAK,QAAO,oBAAoB;;;AAGhD,oCAA8B,mBAAmB,QAAQ,OAAO,eAAY,CAAC,UAAS,aAAa;AAEnG,+BAAyB,QAAQ;AACjC,gBAAU,QAAQ,OAAO,eAAY,UAAS;AAC9C,cAAY,QAAQ,SAAS,GAAG,MAAM,gGACD;AAErC,+BAAyB;AACzB,aAAQ,OAAO,iBAAU,QAAO,UAAU,GAAG,SAAS,MAAM;AAC5D,cAAY,OAAM,KAAK,OAAK,KAAK,OAAO,MAAM;AAG9C,cAAY,MAAM,SAAS,GAAG,MAAM,iFACb,MAAM;AAC7B,yBAAmB;AACnB,cAAQ,QAAQ;AACZ,YAAI,OAAM,MAAM;AACZ,qBAAW,EAAE,QAAQ,OAAM;;;AAGnC,UAAI,yBAAyB;AAGzB,8BAAsB,QAAQ,OAAK,WAAW,EAAE,QAAQ;;AAE5D,aAAO,CAAE,OAAO,OAAO;;AA0C3B;AACI,aAAO,QAAO,WAAW;;AAE7B;AACI,+BAAyB,OAAM,OAAO,OAAK,KAAK,MAAM;AACtD,UAAI,mBAAmB;AACnB,cAAM,IAAI,MAAM;;;;AC5TxB;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,IAAI,KAAK,QAAQ,MAAiB;;AAEzE,gBAAO,IAAG,CAAE;ACtCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,SAAS;AAC7B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,qBAAY,IAAG,CAAE;ACzC7B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AAInC,uBAAiB,WAAW;AAIxB,sBAAc,IAAI,SAAS,IAAI;AAC/B,yBAAiB;AACb,uBAAa,KAAI,IAAI,SAAQ,IAAI;AACjC,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,uBAAc,IAAG,CAAE;ACtD/B;;;;;;;;;;;;;;;;AAqDA,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAoB,MAAM,GAAG;AAC9C,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,SAAS;;AAE5D,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO,gBAAoB,MAAM,GAAG;AAC7F,gBAAM,SAAQ,KAAK;AACnB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,kBAAkB,MAAM;AACxC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjFxB;;;;;;;;;;;;;;;;AA4CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,SAAS,IAAI;AACjC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,gBAAO,IAAG,CAAE;ACxDxB;;;;;;;;;;;;;;;;AAuDA,6BAAwB,iBAAiB;AACrC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,aAAK,CAAC;AACN,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,IAAI,WAAW;AACnC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAK;;AAE3D,kBAAO,IAAG,CAAE;ACjFxB;;;;;;;;;;;;;;;;AA+CA,wCAAoC;AAChC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,UAAI,SAAS;AACT,eAAO,QAAQ,OAAO;;AAE1B,UAAI,SAAS,QAAQ,OAAO;AACxB,cAAM,MAAM,gFACW,QAAQ,qBAAqB;;AAExD,sBAAgB;AACZ,yBAAiB;AACjB,qBAAa,KAAI,QAAQ,MAAM;AAC/B,wBAAgB,IAAI,QAAQ;AAC5B,sBAAc,IAAI,MAAK,SAAS,YAAY,KAAI,MAAI,IAAI,UAAU,MAAM;AACxE,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAY;;AAElE,uBAAc,IAAG,CAAE;ACpE/B;;;;;;;;;;;;;;;;AAwDA,kCAA8B,iBAAiB;AAC3C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,mBAAa,KAAI,IAAI,MAAM;AAC3B,gBAAU,IAAI,IAAI;AAClB,gBAAU,IAAI;AACd,gBAAU,MAAI,GAAG;AACjB,gBAAU,KAAI;AACd,kBAAY,MAAI,SAAQ,MAAM,EAAE,QAAQ;AACxC,UAAI;AACA,yBAAiB,sBAAqB,IAAI,OAAO;AACjD,eAAO,SAAQ,KAAK;;AAExB,aAAO;;AAEC,sBAAa,IAAG,CAAE;ACvE9B;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,IAAI,KAAK,QAAQ,MAAiB;;AAEpF,uBAAc,IAAG,CAAE;AC3C/B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,WAAW,KAAK,QAAQ,MAAiB;;AAEhF,uBAAc,IAAG,CAAE;ACtC/B;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,iBAAW,iBAAgB,GAAG,KAAK,aAAa;AAChD,kCAA2B,GAAG,OAAO,GAAG;AACxC,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,cAAW,SAAQ,UAAU,IAAI,KAAK,QAAQ,MAAiB;;AAEnF,sBAAa,IAAG,CAAE;AC1C9B;;;;;;;;;;;;;;;;AAqCA;AACI,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,iBAAW,iBAAgB,GAAG,KAAK,cAAc;AACjD,kCAA2B,GAAG,OAAO,GAAG;AAExC,aAAO,WAAW,UAAU,GAAG,IAAI,WAAW,WAAW,GAAG;;AAEpD,uBAAc,IAAG,CAAE;AC5C/B;;;;;;;;;;;;;;;;AAiDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,wBAAkB;AAClB,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,uDAAuD,IAAI;AAC7F,cAAY,gCAAyC,SAAS,YAAY,MAAM,wEAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,wEACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,GAAmB,MAAK;AACrG;AACA,YAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAI,IAAI;;AAGR,cAAI,SAAQ,QAAQ,KAAK;;AAE7B,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,UAAS;AAC5E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,qBAAW,IAAG,CAAE;ACtF5B;;;;;;;;;;;;;;;;AAoEA,wCAAoC,CAAC,GAAG,GAAG,iDAAgD;AACvF,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG,GAAG;;AAGnB,yBAAgB;;AAGpB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE1E,cAAY,IAAI,SAAS,GAAG,MAAM,qDAAqD,IAAI;AAC3F,cAAY,eAAe,SAAS,MAAM,gFACb;AAC7B,cAAY,gCAAyC,SAAS,YAAY,MAAM,0EAC7D,0BAA0B;AAC7C,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,0EACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,YAAI,aAAa;AACb,sBAAY,CAAC,GAAG,GAAG;;AAEvB,yBAAiB,mBAA4B,IAAI,OAAO,YAAY,SAAS,WAAW,MAAK,iBAAiB;AAC9G,kBAAU,SAAQ,UAAU,KAAK;AACjC,aAAK,CAAC,KAAK;AACX,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK,iBAAiB,YAAY;AACvE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,WAAW;AAC9E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEC,sBAAa,IAAG,CAAE;AC7G9B;;;;;;;;;;;;;;;;AAqDA,oFAA+E;AAC3E,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,qBAAe,QAAO,UAAU,mBAAmB,QAAQ;AAC3D,aAAO,CAAE,QAAQ,OAAO,IAAI,SAAS,OAAO;;AAEpC,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAgCO,mCAA8B;AACjC,UAAI,UAAU;AACV,sBAAa,OAAM,OAAO;AAC1B,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,qBAAoB,eAAc,QAAQ;AACzD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACvC5C;;;;;;;;;;;;;;;;AAiCO,mCAA6B;AAChC,UAAI,UAAU;AACV,sBAAa,OAAK,OAAO;AACzB,sBAAa,OAAM,OAAO;AAC1B,eAAO,SAAQ,OAAM;;AAEzB,qBAAe,oBAAmB,eAAc,QAAQ;AACxD,aAAO,QAAO,WAAW,QAAQ,OAAO;;ACxC5C;;;;;;;;;;;;;;;;AA2DA,6BAAyB,iBAAiB;AACtC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,mBAAa,gBAAe,MAAM,GAAG;AACrC,qBAAe,2BAA0B,GAAG,OAAO;AACnD,0BAAoB,OAAO;AAC3B,yBAAmB,eAAc;AACjC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,sBAAgB;AACZ,iCAAyB,QAAO;AAEhC,wBAAgB,iBAAiB,UAAU,GAAG,QAC1C,KACA,MAAK,IAAI,iBAAiB;AAC9B,oBAAY,IAAI,SAAS;AACzB,eAAO,MAAI,KAAK,MAAM;;AAI1B,uBAAiB,WAAW;AACxB,sBAAc,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;AAC3E,yBAAiB;AACb,kCAAwB,GAAE,MAAM;AAChC,eAAK,QAAQ;AACT,4BAAgB,SAAQ;;AAE5B,6BAAmB,SAAQ,IAAI;AAC/B,uBAAa,IAAI,KAAI,YAAY,OAAK,GAAE,OAAO,aAAa;AAC5D,iBAAO;;AAEX,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS;;AAER,iBAAQ,IAAG,CAAE;ACxDzB,4BAAwB,iBAAiB;AACrC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,yBAAiB,gBAAe,MAAM,GAAG;AACzC,mBAAW;AACX,6BAAqB,oBAA6B,MAAM,GAAG;AAC3D,uBAAe;AACf,YAAI,gBAAgB;AAChB,qBAAW,WAAU,IAAI;AACzB,iBAAO,kBAA2B,KAAK,QAAQ,GAAG;;AAEtD,kBAAU,SAAQ,IAAI,UAAU;AAChC,YAAI,gBAAgB;AAChB,mBAAS;;AAEb,kBAAU;AACV,YAAI;AACA,gCAAsB,sBAA+B,IAAI,OAAO;AAChE,gBAAM,SAAQ,GAAG;AACjB,YAAE;;AAEN,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,MAAK;;AAE/D,iBAAO,IAAG,CAAE;ACjExB;;;;;;;;;;;;;;;;AAkDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,UAAI,GAAG,UAAU;AACb,aAAK,MAAK,IAAI;AACd,aAAK,MAAK,IAAI;;AAElB,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,QAAQ,IAAI;AAChC,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,oBAAW,IAAG,CAAE;ACnE5B;;;;;;;;;;;;;;;;AA8CA;AACI,cAAY,SAAS,aAAa,SAAS,aAAa,MAAM,+DACnD;AACX,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAGpB,cAAY,SAAS,WAAW,GAAG,MAAM,MAAM,wCAAwC,GAAG,aAC/E,SAAS;AACpB,0BAAoB,SAAS,YAAY,IAAI;AAC7C,mBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,gBAAY,SAAS,GAAG,WAAW,GAAG,MAAM;AAC5C,gBAAY,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,eAC/D,SAAS,GAAG,MAAM,KAAK,SAAS,GAAG,MAAM,GAAG,MAAM,KAAK,aAAa,MAAM,wBAAwB,wCAC5F,GAAG,MAAM,KAAK,iDACX,GAAG;;AAEpB,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,UAAU,WAAW,QAAQ;;AAEnC,sBAAa,IAAG,CAAE;ACpE9B;;;;;;;;;;;;;;;;AAiDA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,sBAAgB;AACZ,oBAAY,SAAQ,IAAI,IAAI;AAC5B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AA+BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,oBAAc;AACd,2BAAqB,CAAC;AACtB,4BAAsB;AACtB,aAAO,QAAO,cAAc;AACxB,aAAK,CAAC;AACN,eAAO,SAAQ,OAAO;SACvB,CAAE,GAAG,KAAM,MAAiB,UAAU,OAAO,cAAc;;AAEtD,mBAAU,IAAG,CAAE;ACzC3B;;;;;;;;;;;;;;;;AAuCA,gCAA4B,iBAAiB;AACzC,UAAI,iBAAgB,GAAG,KAAK;AAC5B,mBAAa,gBAAe,MAAM,EAAE;AACpC,oBAAc,KAAK,GAAG,MAAM;AAC5B,0BAAoB,MAAM;AAC1B,UAAI,CAAC;AACD,wBAAgB,sBAAqB,MAAM,OAAO;;AAEtD,yBAAmB,OAAO,IAAI,MAAK,GAAG,YAAY,SAAQ,OAAO;AACjE,wBAAiB,KAAK,YAAY,MAAM;AACxC,aAAO,CAAE,MAAM,OAAO;;AAEd,oBAAW,IAAG,CAAE;ACjC5B;AACI,oBAAc,iBAAgB,OAAM,QAAQ;AAC5C,iBAAW,qBAAqB,GAAG,KAAK;AACxC,iBAAW,qBAAqB,GAAG,KAAK;AACxC,mBAAY;AACZ,wBAAkB;AAClB,mBAAa,GAAG,IAAI,UAAU,QAAQ;AAClC,uBAAe,UAAU,GAAG,QAAO,GAAG,IAAI,GAAG;AAC7C,kBAAU,KAAK,OAAO;AACtB,kBAAU,KAAK,OAAO;AACtB,iBAAQ,OAAO;;AAEnB,mBAAa;AACb,mBAAa;AACb,mBAAa,GAAG,IAAI,UAAU,QAAQ,KAAK;AACvC,aAAK,KAAK,UAAU;AACpB,aAAK,KAAK,UAAU,IAAI;;AAE5B,aAAO,CAAC,MAAM;;AAEN,yBAAgB,IAAG,CAAE;ACtCjC;;;;;;;;;;;;;;;;AAwCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,0BAAoB,QAAQ;AAC5B,uBAAiB,QAAQ;AACzB,UAAI,cAAc;AACd,cAAM,IAAI,MAAM,+DACT;;AAEX,UAAI,WAAW;AACX,cAAM,IAAI,MAAM,gDAAgD;;AAEpE,aAAO,QAAQ,KAAK;AACpB,uBAAiB,aAAa,IAAI,SAAQ,SAAS,CAAC,GAAG,OAAO;AAC9D,kBAAY,QAAO,cAAc,cAAW,SAAQ,YAAY,UAAU,YAAY,YAAY,OAAO,CAAE;AAE3G,aAAO,aAAa,IAAI,SAAQ,KAAK,CAAC,IAAI,SAAS;;AAE3C,wBAAe,IAAG,CAAE;ACzDhC;;;;;;;;;;;;;;;;AAoCA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB,cAAa,SAAQ,SAAS,IAAI;AAClD,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,qBAAY,IAAG,CAAE;AC7C7B;;;;;;;;;;;;;;;;AAmCA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB;AACZ,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,OAAO;AACxB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,iBAAQ,IAAG,CAAE;AC3CzB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,YAAI,GAAG,UAAU;AACb,oBAAU,UAAS,KAAK;AACxB,oBAAU,WAAU,KAAK;AACzB,iBAAO,SAAQ,GAAG;;AAEtB,eAAO,SAAQ,SAAS;;AAE5B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,sBAAY,IAAG,CAAE;AC9B7B;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,kBAAY,iBAAgB,IAAI,MAAM;AACtC,cAAY,IAAI,SAAS,KAAK,IAAI,SAAS,GAAG,MAAM,+DAC7C,IAAI,YAAY,IAAI;AAC3B,mBAAa,SAAQ,KAAK,CAAC,IAAI;AAC/B,mBAAa,SAAQ,KAAK,CAAC,GAAG;AAC9B,aAAO,OAAO,MAAM;;AAEZ,yBAAgB,IAAG,CAAE;AC5BjC;;;;;;;;;;;;;;;;AA6CA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,IAAI,IAAI,UAAU;;AAErC,oBAAc,CAAE,UAAU;AAC1B,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,QAAO;;AAE7D,iBAAO,IAAG,CAAE;ACpDxB,iDAA6C;AACzC,cAAO,SAAS,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,CAAC,WAAW;;AAElB,kBAAS,IAAG,CAAE;ACJ1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,GAAG,MAAM;AAChE,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACL1B,iDAA6C;AACzC,cAAO,SAAS,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,KAAK,SAAS,GAAG,WAAW,KACnD,SAAS,GAAG,WAAW,GAAG,MAAM;AACpC,aAAO,KAAI,GAAG,UAAU;;AAEhB,kBAAS,IAAG,CAAE;ACZ1B;;;;;;;;;;;;;;;;AAqEA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,QAAQ,IAAI,WAAW,QAAQ,MAAM,cAAc,GAAG,sCAAsC,WAAW;AACtH,cAAY,SAAS,WAAW,WAAW,QAAQ,MAAM,qBAAqB,SAAS,wCAAwC,WAAW;AAC1I,cAAY,GAAG,MAAM,OAAO;AACxB,YAAI,IAAI,KAAK,KAAK,WAAW;AACzB,iBAAO,KACD,KAAI,SAAS,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,MACvC,WAAW,IAAI,OACf;;AAEZ,eAAO;SACR,OAAO,MAAM,4BAA4B,GAAG,MAAM,MAAM,oBAAoB,SAAS,+CAA+C,WAAW;AAClJ,sBAAgB,cAAW,SAAQ,eAAe,IAAI,YAAY;AAClE,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,YAAY;AAC5B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,gBAAgB;;AAE1E,2BAAkB,IAAG,CAAE;ACvFnC;;;;;;;;;;;;;;;;AAmDA;AACI,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,SAAQ;AACR,eAAM;;AAEV,iBAAW,iBAAgB,QAAO,KAAK;AACvC,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,gCAAyC,SAAS,YAAY,MAAM,qEAC7D,0BAA0B;AAC7C,uBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,uBAAiB,CAAC,SAAS,gBAAgB,SAAS;AAKpD;AACA,UAAI,SAAQ;AACR,sBAAc,6BAA6B,CAAC,SAAS,cAAc,SAAS,cAAc;;AAG1F,sBAAc,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE/B,4BAAsB,SAAS,OAAO,KAAK,SAAS,OAAO;AAC3D,+CAAyC,6BAA6B,CAAC,SAAS,UAAU,SAAS,UAAU,UAAU;AACvH,2BAAqB,gBAAgB,OAAM;AAC3C,yBAAmB,gBAAgB,MAAM,eAAe,KAAK,UAAU;AACvE,wBAAkB,gBAAgB,QAC9B,MAAM,SAAQ,YAAY,aAAa,SAAS,gBAChD,MAAM,SAAQ,YAAY,aAAa,SAAS;AACpD,gBAAU;AACV,kBAAY,gBAAgB,IAAI,eAAe,GAAG,UAAU;AAC5D,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAKX;AACI,uBAAiB,YAAY,IAAI,OAAK,EAAE;AACxC,yBAAmB,YAAY,IAAI,OAAK,EAAE;AAC1C,6BAAuB,WAAW,OAAO,UAAU;AACnD,0BAAoB,WAAW,IAAI,UAAW,KAAI,eAAe,KAAK,KAAK;AAC3E,qBAAe,WAAW,IAAI,UAAU,IAAI,YAAY;AACxD,uBAAiB,WAAW,IAAI,UAAU,CAAC,SAAS,IAAI,OAAO;AAC/D,oBAAc,WAAW,IAAI,UAAU,CAAC,GAAG,YAAY;AACvD,aAAO,CAAC,UAAU;;AAKtB;AAGI,iCAA2B,YAAY,IAAI;AACvC,eAAO,IAAK,KAAI,KAAM,UAAS,KAAK;;AAExC,4BAAsB,mBAAmB,IAAI,OAAK,IAAI;AAGtD,4BAAsB,cAAc,IAAI,OAAK,KAAK,MAAM,IAAI;AAC5D,0BAAoB,cAAc,IAAI,UAAU,IAAI,cAAc;AAClE,aAAO,cAAc,IAAI;AACrB,eAAO,CAAC,cAAc,IAAI,YAAY;;;AAGlC,iBAAQ,IAAG,CAAE;AChIzB;;;;;;;;;;;;;;;;AAiDA;AACI,kBAAY,iBAAgB,OAAM,QAAQ;AAC1C,iBAAW,iBAAgB,MAAK,OAAO;AACvC,OAAC,OAAO,QAAQ,gBAAe,OAAO;AACtC,qBAAe,CAAE,GAAG,OAAO,GAAG;AAC9B,sBAAgB;AACZ,kBAAU,SAAQ,IAAI,OAAO;AAC7B,aAAK,CAAC,OAAO,MAAM;AACnB,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,gBAAO,IAAG,CAAE;AC7DxB;;;;;;;;;;;;;;;;AAoCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB;AACZ,oBAAY,SAAQ,MAAM,IAAI;AAC9B,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,OAAO;AAC/B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE;AC/C1B;;;;;;;;;;;;;;;;AAuDA,6BAAyB,iBAAiB;AACtC,eAAS,iBAAgB,GAAG,KAAK;AACjC,UAAI,GAAG,UAAU;AAEb,aAAK,MAAK,IAAI;;AAElB,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,4BAAoB,oBAAmB,MAAM,GAAG;AAChD,4BAAoB;AACpB,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,IAAI;AAC1B,0BAAgB,kBAAiB,cAAc,QAAQ,GAAG;;AAE9D,oBAAY,SAAQ,KAAK,WAAW;AACpC,YAAI;AACA,2BAAiB,sBAAqB,MAAM,OAAO;AACnD,kBAAQ,SAAQ,OAAO;;AAE3B,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,MAAM;;AAE5D,iBAAQ,IAAG,CAAE;ACjFzB;;;;;;;;;;;;;;;;AA4BA;AACI,mBAAa,eAAc;AAC3B,mBAAa;AACb,UAAI,SAAS,QAAQ,UAAU;AAC3B,iBAAS,IAAI,aAAa;iBAErB,UAAU;AACf,iBAAS,IAAI,WAAW;iBAEnB,UAAU;AACf,iBAAS,IAAI,WAAW;;AAGxB,cAAM,IAAI,MAAM,qBAAqB;;AAEzC,mBAAa,GAAG,IAAI,MAAM;AACtB,eAAO,KAAK;;AAEhB,aAAO,QAAO,WAAW,QAAQ,OAAO;;AAEhC,iBAAQ,IAAG,CAAE;;;;;;;;;;;;;;;ACrBzB,MAAC;AAED;AACE,mBAAS,aAAa;AAEtB,aAAG,OAAO;AACR,oBAAQ,UAAU,GAAG,KAAK,GAAG,IAAI;AACjC,eAAG,KAAK,GAAG;AACX,eAAG,KAAK,GAAG;AACX,mBAAO,GAAG,KAAK,IAAK,IAAG,IAAI,IAAI;;AAIjC,aAAG,IAAI;AACP,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,KAAK,KAAK;AACb,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,aAAG,MAAM,KAAK;AACd,cAAI,GAAG,KAAK;AAAK,eAAG,MAAM;;AAC1B,iBAAO;;AAGT;AACE,YAAE,IAAI,EAAE;AACR,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,YAAE,KAAK,EAAE;AACT,iBAAO;;AAGT;AACE,mBAAS,IAAI,KAAK,eACN,QAAQ,KAAK,cACd,GAAG;AACd,eAAK,QAAQ;AAAa,mBAAQ,GAAG,SAAS,aAAe;;AAC7D,eAAK,SAAS;AACZ,mBAAO,SAAU,UAAS,UAAW,KAAK;;AAE5C,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT;AACE,kBAAQ;AAER,qBAAW;AACT,oBAAO,MAAK;AACZ,yBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,mBAAK,MAAK,WAAW;AACrB,sBAAQ,sBAAsB;AAC9B,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK;AACL,kBAAI,MAAM;AACV,mBAAK;AACL,mBAAK,IAAI;;AAEX,mBAAQ,OAAM,KAAK;;AAGrB,iBAAO;;AAIT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,OAAO;;SAIZ,gBAC+B,SAC9B;;;AC3GH,MAAC;AAED;AACE,mBAAS,gBAAgB;AAEzB,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAGP,aAAG,OAAO;AACR,oBAAQ,GAAG,IAAK,GAAG,KAAK;AACxB,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,eAAG,IAAI,GAAG;AACV,mBAAO,GAAG,KAAM,GAAG,MAAM,KAAM,IAAK,MAAM;;AAG5C,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC1EH,MAAC;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAS,GAAG,IAAK,GAAG,MAAM;AAC1B,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AAAG,eAAG,IAAI,GAAG;AACjD,mBAAQ,IAAG,IAAK,GAAG,IAAI,SAAS,KAC5B,IAAG,IAAK,GAAG,IAAK,GAAG,KAAK,IAAO,KAAK,KAAK,MAAO;;AAGtD,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI;AAEP,cAAI,SAAU,QAAO;AAEnB,eAAG,IAAI;;AAGP,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,gBAAI,KAAK,QAAQ;AACf,iBAAG,IAAI,GAAG,KAAK,KAAK,GAAG,MAAM;;AAE/B,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC7EH,MAAC;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AAER,oBAAQ,GAAG,OAAO,GAAG;AACrB,gBAAI,EAAE;AAAI,iBAAM,MAAM;AAAI,gBAAI,IAAK,KAAK;AACxC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,MAAM;AACpC,gBAAI,EAAG,IAAI,IAAK;AAAI,iBAAK,IAAK,KAAK;AACnC,gBAAI,EAAG,IAAI,IAAK;AAAI,gBAAI,IAAK,KAAK;AAAK,iBAAK,IAAK,KAAK;AACtD,cAAE,KAAK;AACP,eAAG,IAAK,IAAI,IAAK;AACjB,mBAAO;;AAGT;AACE,0BAAc;AAEd,gBAAI,UAAU,SAAO;AAEnB,kBAAI,EAAE,KAAK;;AAGX,sBAAO,KAAK;AACZ,mBAAK,IAAI,GAAG,IAAI,MAAK,QAAQ,EAAE;AAC7B,kBAAE,IAAI,KAAM,EAAE,IAAI,MAAM,KACnB,MAAK,WAAW,KAAK,EAAG,IAAI,IAAK,MAAM;;;AAIhD,mBAAO,EAAE,SAAS;AAAG,gBAAE,KAAK;AAC5B,iBAAK,IAAI,GAAG,IAAI,KAAK,EAAE,OAAO,GAAG,EAAE;AAAE;AACrC,gBAAI,KAAK;AAAG,kBAAI,EAAE,KAAK;;AAAS,kBAAI,EAAE;AAEtC,gBAAG,IAAI;AACP,gBAAG,IAAI;AAGP,iBAAK,IAAI,KAAK,IAAI,GAAG,EAAE;AACrB,kBAAG;;;AAIP,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE,EAAE;AACV,YAAE,IAAI,EAAE;AACR,iBAAO;;AAGT;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,YAAY;;SAIjB,gBAC+B,SAC9B;;;ACrEH,MAAC;AAED;AACE,mBAAS;AAGT,aAAG,OAAO;AACR,oBAAQ,GAAG,OACH,GAAG,OAAO,GAAG;AAErB,eAAG,IAAI,IAAK,IAAI,aAAc;AAE9B,gBAAI,EAAG,IAAI,KAAM;AACjB,gBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,iBAAK,KAAK;AACV,iBAAK,KAAK;AACV,iBAAK,MAAM;AACX,iBAAK,MAAM;AAEX,gBAAI,EAAE,KAAK,IAAI;AACf,eAAG,IAAI;AAEP,mBAAQ,IAAK,KAAK,MAAM,MAAQ;;AAGlC;AACE,mCAAuB,YAAY;AACnC,gBAAI,UAAU,SAAO;AAEnB,kBAAI;AACJ,sBAAO;;AAGP,sBAAO,QAAO;AACd,kBAAI;AACJ,sBAAQ,KAAK,IAAI,OAAO,MAAK;;AAG/B,iBAAK,IAAI,GAAG,IAAI,KAAK,IAAI,OAAO,EAAE;AAEhC,kBAAI;AAAM,qBAAK,MAAK,WAAY,KAAI,MAAM,MAAK;AAE/C,kBAAI,MAAM;AAAG,oBAAI;AACjB,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,kBAAI,KAAK;AACP,oBAAK,IAAI,aAAc;AACvB,oBAAK,EAAE,IAAI,QAAS,IAAI;AACxB,oBAAK,AAAK,KAAL,IAAU,IAAI,IAAI;;;AAI3B,gBAAI,KAAK;AACP,gBAAG,UAAQ,MAAK,UAAU,KAAK,OAAO;;AAKxC,gBAAI;AACJ,iBAAK,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE;AACzB,kBAAI,EAAG,IAAI,KAAM;AACjB,kBAAI,EAAE,IAAM,IAAI,IAAK;AACrB,mBAAK,KAAK;AACV,mBAAK,KAAK;AACV,mBAAK,MAAM;AACX,mBAAK,MAAM;AACX,gBAAE,KAAK,IAAI;;AAGb,gBAAG,IAAI;AACP,gBAAG,IAAI;AACP,gBAAG,IAAI;;AAGT,gBAAK,IAAI;;AAGX;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAED;AACE,cAAI,QAAQ;AAAM,mBAAO,CAAE,IAAI;AAC/B,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,MAAM;AAAG,mBAAK,OAAO;AACzB,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,UAAU;;SAIf,gBAC+B,SAC9B;;;AC5IH,MAAC;AAED;AACE,mBAAS,gBAAgB;AAGzB,aAAG,OAAO;AACR,oBAAQ,GAAG,OAAO,GAAG,OAAO,GAAG,OAAO,GAAG;AACzC,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,gBAAK,KAAK,KAAO,MAAM,IAAK;AAC5B,gBAAK,IAAI,IAAK;AACd,eAAG,IAAI,IAAK,KAAK,KAAO,MAAM,KAAM;AACpC,eAAG,IAAI,IAAK,IAAI,IAAK;AACrB,eAAG,IAAK,KAAK,KAAO,MAAM,KAAM;AAChC,mBAAO,GAAG,IAAK,IAAI,IAAK;;AAmB1B,aAAG,IAAI;AACP,aAAG,IAAI;AACP,aAAG,IAAI,aAAa;AACpB,aAAG,IAAI;AAEP,cAAI,SAAS,KAAK,MAAM;AAEtB,eAAG,IAAK,OAAO,aAAe;AAC9B,eAAG,IAAI,OAAO;;AAGd,uBAAW;;AAIb,uBAAa,GAAG,IAAI,QAAQ,SAAS,IAAI;AACvC,eAAG,KAAK,QAAQ,WAAW,KAAK;AAChC,eAAG;;;AAIP;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,iBAAO;;AACR;AAED;AACE,mBAAS,IAAI,OAAO,eACR,QAAQ,KAAK,cACd;AAAa,mBAAQ,IAAG,WAAW,KAAK;;AACnD,eAAK,SAAS;AACZ;AACE,wBAAU,GAAG,WAAW,UACb,IAAG,WAAW,KAAK,qBAChB,OAAM,OAAQ,MAAK;qBAC1B,WAAW;AACpB,mBAAO;;AAET,eAAK,QAAQ,GAAG;AAChB,eAAK,QAAQ;AACb,cAAI;AACF,gBAAI,OAAO,SAAU;AAAU,mBAAK,OAAO;AAC3C,iBAAK,QAAQ;AAAa,qBAAO,KAAK,IAAI;;;AAE5C,iBAAO;;AAGT,YAAI,WAAU,QAAO;AACnB,kBAAO,UAAU;mBACR,WAAU,QAAO;AAC1B,kBAAO;AAAa,mBAAO;;;AAE3B,eAAK,SAAS;;SAId,gBAC+B,SAC9B;;;AC3EH,MAAC;AAID,sBAAa,cACD,cACC,YACA,cACC,uBACG,MAAK,IAAI,OAAO,wBACd,MAAK,IAAI,GAAG,oBAChB,eAAe,UACnB,QAAQ;AAOnB;AACE,oBAAU;AACV,oBAAW,WAAW,OAAQ,CAAE,SAAS,QAAU,WAAW;AAG9D,0BAAgB,OAAO,SACrB,QAAQ,UAAU,CAAC,MAAM,SAAS,UACjC,QAAQ,OAAQ,aAAa,MAAM,IAAI;AAG1C,qBAAW,IAAI,KAAK;AAIpB,qBAAW;AACT,oBAAQ,KAAK,EAAE,aACP,gBACA;AACR,mBAAO,IAAI;AACT,kBAAK,KAAI,KAAK;AACd,mBAAK;AACL,kBAAI,KAAK,EAAE;;AAEb,mBAAO,KAAK;AACV,mBAAK;AACL,mBAAK;AACL,qBAAO;;AAET,mBAAQ,KAAI,KAAK;;AAGnB,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,QAAQ;AAAa,mBAAO,KAAK,EAAE,KAAK;;AAC7C,eAAK,SAAS;AAGd,iBAAO,SAAS,KAAK,IAAI;AAGzB,iBAAQ,SAAQ,QAAQ,YACpB;AACE,gBAAI;AAEF,kBAAI,MAAM;AAAK,qBAAK,OAAO;;AAE3B,oBAAK,QAAQ;AAAa,uBAAO,KAAK,MAAM;;;AAK9C,gBAAI;AAAgB,oBAAK,WAAW;AAAM,qBAAO;;AAI5C,qBAAO;aAElB,MACA,WACA,YAAY,UAAU,QAAQ,SAAU,QAAQ,OAChD,QAAQ;;AAEV,cAAK,SAAS,WAAW;AAYzB;AACE,0BAAgB,IAAI,aACX,UAAU,OAAO,GAAG,IAAI,GAAG,IAAI,OAAO,GAAG,IAAI;AAGtD,cAAI,CAAC;AAAU,kBAAM,CAAC;;AAGtB,iBAAO,IAAI;AACT,cAAE,KAAK;;AAET,eAAK,IAAI,GAAG,IAAI,OAAO;AACrB,cAAE,KAAK,EAAE,IAAI,OAAQ,IAAI,IAAI,IAAI,UAAW,KAAI,EAAE;AAClD,cAAE,KAAK;;AAIT,UAAC,IAAG,IAAI;AAEN,wBAAW,QACH,GAAG,QAAO,GAAG,QAAO,GAAG;AAC/B,mBAAO;AACL,mBAAI,GAAE,KAAI,OAAQ,KAAI;AACtB,kBAAI,IAAI,QAAQ,GAAE,OAAS,IAAE,MAAK,GAAE,KAAI,OAAQ,KAAI,OAAQ,IAAE,MAAK;;AAErE,eAAG,IAAI;AAAG,eAAG,IAAI;AACjB,mBAAO;aAIN;;AAOL;AACE,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE;AACR,YAAE,IAAI,EAAE,EAAE;AACV,iBAAO;;AACR;AAMD;AACE,uBAAa,UAAW,OAAO;AAC/B,cAAI,SAAS,OAAO;AAClB,iBAAK,QAAQ;AACX;AAAM,uBAAO,KAAK,SAAQ,IAAI,OAAO,QAAQ;;;;;AAGjD,iBAAQ,OAAO,SAAS,SAAS,OAAO,WAAW,MAAM,MAAM;;AAQjE;AACE,2BAAiB,OAAO,eAAe;AACvC,iBAAO,IAAI,WAAW;AACpB,gBAAI,OAAO,KACT,OAAS,UAAS,IAAI,OAAO,KAAK,MAAM,WAAW,WAAW;;AAElE,iBAAO,SAAS;;AAQlB;AACE;AACE;AACA,gBAAI,cAAe,OAAM,WAAW;AAElC,oBAAM,IAAI;;AAEV,oBAAM,IAAI,WAAW;AACrB,cAAC,SAAO,UAAU,QAAO,UAAU,gBAAgB;;AAErD,mBAAO,SAAS;;AAEhB,2BAAc,QAAO,qBACP,YAAW,SAAQ;AACjC,mBAAO,CAAC,CAAC,IAAI,QAAM,SAAQ,SAAS,QAAO,QAAQ,SAAS;;;AAQhE;AACE,iBAAO,OAAO,aAAa,MAAM,GAAG;;AAUtC,eAAO,MAAK,UAAU;AAMtB,YAAmC,QAAO;AACxC,kBAAA,UAAiB;AAEjB;AACE,yBAAa;;;mBAEL;AACV,mBAAO;AAAa,mBAAO;;;SAK3B,IACA;;ACjMF,eAAG,OAAO;AACV,eAAG,SAAS;AACZ,eAAG,SAAS;AACZ,eAAG,YAAY;AACf,eAAG,UAAU;AACb,eAAG,SAAS;AAEZ,uBAAiB;;AC3DjB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,OAAO;AACZ,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,YAAY;AACjB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;AACvC,eAAK,QAAQ,KAAK,OAAO,KAAK,SAAS;;AAE3C,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,SAAS,aAAgB,UAAU;;MAG5C;AACI,YAAI,CAAC,MAAM,KAAK;AACZ,wBAAc,KAAK;AACnB,eAAK,UAAU;AACf,iBAAO;;AAEX;AACA,sBAAc;AACd,eAAO,CAAC;AACJ;AACA;AACI,iBAAK,IAAI,KAAK,WAAW;AACzB,iBAAK,IAAI,KAAK,WAAW;AACzB,gBAAI,KAAK,KAAK,KAAK;mBACd,KAAK,KAAK,MAAM;AACzB,uBAAY,KAAK,KAAK,KAAO,KAAK,IAAI,KAAK;AAC3C,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,oBAAU,KAAK,OAAO,KAAK,SAAS,KAAK;AACzC,cAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,sBAAU;;;AAGlB,YAAI,CAAC,KAAK,aAAa,KAAK,iBAAiB;AACzC,eAAK,UAAU,KAAK,aAAa;;AAErC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU;AACrC,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,SAAS,KAAK,SAAS,SAAS,KAAK;;;;MAMhD;AACI,aAAK,QAAQ;AACb,aAAK,OAAO,IAAI;AAChB,aAAK,QAAQ;AACb,0BAAkB,OAAO,OAAO,KAAK;AACrC,aAAK,QAAQ,aAAgB,UAAU;AACvC,aAAK,QAAQ,IAAI,YAAY,GAAG,GAAG,OAAO,OAAO,KAAK;AACtD,YAAI,QAAQ;AACR,eAAK,IAAI,QAAS,IAAI;;AAGtB,eAAK,IAAI,QAAS,IAAI;;AAE1B,aAAK,IAAI,IAAI,KAAK,KAAK,IAAI,KAAK;;MAGpC;AACI;AACA,eAAO;AACH;AACI,gBAAI,KAAK,MAAM;AACf,gBAAI,IAAK,KAAK,IAAI;mBACb,KAAK;AACd,eAAK,IAAI;AACT,eAAK,IAAI;AACT,eAAK,IAAK,QAAQ,KAAK;AACvB,eAAM,MAAM,KAAO,KAAK,IAAK,KAAI,IAAI,KAAK,IAAI;AAC9C,cAAI,KAAK;AACT,cAAI,IAAI,MAAM,KAAK,IAAI,KAAK;AACxB;;;AAGR,YAAK,IAAI,KAAK,OAAQ,KAAK,IAAI;AAC/B,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,IAAI,KAAK,SAAS,IAAI,KAAK;;AAEzC,eAAO,KAAK,aAAa;;MAG7B;AACI,YAAI,KAAK,UAAU;AACf,iBAAO;;AAEX,eAAO,KAAK,MAAM;;;;MAItB,mBAAkB,UAAS;AAEvB,aAAK,iBAAiB,MAAO,KAAK,SAAS,QAAQ,KAAK,UAAU;AAClE,aAAK,MAAM;AACX,aAAK,QAAQ,OAAM;AACnB,aAAK,QAAQ;AACb,YAAI,QAAQ;AACR,iBAAO,KAAK;;AAEhB,YAAI,OAAO,SAAS;AAChB,iBAAO,KAAK;;AAEhB,YAAI,CAAC,KAAK,oBAAoB,KAAK,SAAS;AACxC,gBAAM,IAAI,MAAM,0BAA0B,UAAS;;AAEvD,aAAK,SAAS,aAAgB;;MAElC;AACI,YAAI,KAAK;AACL,iBAAO;;AAEX,eAAO,KAAK,MAAM;;MAEtB;AACI,eAAO,KAAK,aAAa,KAAK,MAAM,KAAK,QAAQ,KAAK;;;AAGvD;AAEH,gBAAU,OAAO;AACjB,gBAAU,SAAS;AACnB,gBAAU,SAAS;AACnB,iBAAW,IAAI,IAAK,MAAK,IAAI,GAAG,KAAK,OAAO,KAAK,IAAI,IAAI,GAAG;AAG5D,8BAAwB;AACxB,UAAI,KAAK;AACL,cAAM,IAAI,MAAM,2BAA2B;;;AAG5C;AACH,UAAI,YAAW;AACX,mBAAU;;AAEd,yBAAmB,OAAK;AACxB,yBAAmB,YAAY,cAAc;AAC7C,yBAAmB,kBAAkB,QAAQ,aAAa,gBAAgB;;AAE9E;AACI,iBAAU;AACV,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,gBAAO,OAAO;;AAElB,aAAO,OAAM,OAAO;;AAExB;AACI,0BAAoB;AACpB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,OAAO,KAAK;AACzB,yBAAiB,OAAO;;AAE5B,aAAO,KAAK,KAAK,gBAAgB,OAAO;;AAE5C;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAI,IAAK,OAAM;;AAErD;AAEI,yBAAmB,OAAK;AACxB,gBAAU,OAAO;AACjB,kBAAW;AACX,kBAAW;AACX,mBAAa,GAAG,IAAI,GAAG;AACnB,kBAAU,OAAO,KAAK;AACtB,iBAAQ,KAAK,IAAI,GAAG;AACpB,iBAAQ,KAAK,IAAI,GAAG;;AAExB,aAAQ,IAAI,IAAK,QAAO,KAAK,IAAK,IAAK,KAAI,KAAM,OAAM,IAAI;;AClN/D;;;;;;;;;;;;;;;;AAmCA,+CAA2C,WAAW;AAClD,UAAI,QAAQ;AACR,eAAO;;AAEX,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,qBAAe,IAAI,UAAU,OAAO,MAAM,OAAO;AACjD,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,wBAAe,IAAG,CAAE;ACpDhC;;;;;;;;;;;;;;;;AAkCA,0CAAqC,YAAY;AAC7C,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM,yBAAyB;;AAE7C,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,OAAuB;AAC9E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,yBAAgB,IAAG,CAAE;AC7CjC;;;;;;;;;;;;;;;;AAuCA,4CAAwC,YAAY,WAAW;AAC3D,kBAAY,QAAO,OAAO;AAC1B,qBAAe,IAAI,cAAc,QAAQ,QAAQ,MAAM;AACvD,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,OAAO;;AAE3B,aAAO,IAAI;;AAEH,0BAAiB,IAAG,CAAE;AC/ClC;;;;;;;;;;;;;;;;AAmCO;AACH,qBAAc;AACd,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW;AACzB,cAAM,IAAI,MAAM;;AAEpB,oBAAc;AACd,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1CpD;;;;;;;;;;;;;;;;AAwCO,wCAAmC,WAAW;AACjD,UAAI,UAAS;AACT,cAAM,IAAI,MAAM;;AAEpB,sBAAgB;AACZ,8BAAsB,UAAU;AAChC,4CAAoC,QAAQ,QAAQ,QAAO;AAC3D,4CAAoC,OAAO,SAAS,QAAO;AAC3D,YAAI,iBAAiB,+BACjB;AACA,iBAAO,OAAM,CAAC,IAAI;;AAEtB,4BAAoB,KAAK,IAAI,KAAK,KAAM,QAAO,SAAS;AACxD,uBAAe,qBAAoB,aAAa;AAChD,YAAI,OAAO,SAAS,UAAS;AAGzB,kBAAO;;AAEX,eAAO,KAAK;AACZ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,iBAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,eAAO,UAAS,QAAQ;;AAE5B,oBAAc,CAAE,OAAO,MAAM,aAAM;AACnC,aAAO,QAAO,cAAc,SAAS,IAAiB,MAAiB,OAAO;;AClElF;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,WAAW;AAC/B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,uBAAc,IAAG,CAAE;ACzC/B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,kBAAQ,IAAG,CAAE;AC9CzB;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,aAAK,CAAC;AACN,YAAI,GAAG,UAAU;AACb,iBAAO,MAAK,IAAI;;AAEpB,eAAO,SAAQ,MAAM;;AAEzB,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,mBAAS,IAAG,CAAE;AC9C1B;;;;;;;;;;;;;;;;AAsDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,qBAAa,gBAAe,MAAM,GAAG;AACrC,YAAI,GAAG,SAAS;AACZ,iBAAO,MAAM;;AAEjB,oBAAY,SAAQ,QAAQ,IAAI;AAChC,eAAO,SAAQ,KAAK,GAAG;;AAE3B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,MAAM;AACtB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB,UAAS;;AAEnE,qBAAW,IAAG,CAAE;ACpE5B;;;;;;;;;;;;;;;;AAyBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AC9B9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AA2BA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,qDAAqD,GAAG;AACzF,aAAO,SAAQ,IAAI;;AAEX,sBAAa,IAAG,CAAE;AChC9B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAa,SAAQ,MAAM,KAAK,QAAQ,MAAiB;;AAE7E,kBAAS,IAAG,CAAE;ACtC1B;;;;;;;;;;;;;;;;AAiCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,MAAM;AAC1B,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,kBAAS,IAAG,CAAE;AC1C1B;;;;;;;;;;;;;;;;AAkCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;;AAEX,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB;;AAEtD,iBAAQ,IAAG,CAAE;ACOzB,6FAAwF,CAAC,GAAG,iBAAiB;AACzG,iBAAW,iBAAgB,GAAG,KAAK;AACnC,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,+BAAyB,iBAAgB,iBAAiB,mBAAmB;AAC7E,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,UAAI,eAAe;AACf,cAAM,IAAI,MAAM;;AAGpB,cAAY,IAAI,SAAS,GAAG,MAAM,gEACtB,IAAI;AAChB,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,SAAS,GAAG,MAAM,2EAC/B,iBAAiB;AACjC,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFACxB,iBAAiB,MAAM;AAClD,cAAY,iBAAiB,MAAM,OAAO,GAAG,MAAM,yFAClB,iBAAiB,MAAM;AACxD,yBAAmB,iBAAiB,MAAM;AAC1C,gCAA0B,iBAAiB,MAAM;AACjD,cAAY,iBAAiB,MAAM,OAAO,aAAa,mBAAmB,MAAM,6EACjE,aAAa,8BACb,iBAAiB,MAAM;AACtC,wBAAkB,iBAAgB,KAAK,kBAAkB,SAAS,MAAK,YAAY;AACnF,8BAAwB;AACxB,kBAAY,QAAO,WAAW,kBAAkB,iBAAiB,SAAS;AAC1E,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEC,4BAAmB,IAAG,CAAE;ACxFpC;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,GAAG,OAAO,MAAM,kDAAkD,GAAG,iBAAiB,GAAG;AAClH,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,cAAY,GAAG,SAAS,GAAG,MAAM,qCAAqC,GAAG;AACzE,oBAAc,MAAM,GAAG;AACvB,oBAAc,MAAM,GAAG;AACvB,mBAAa,IAAI,IAAI;AACrB,uBAAiB;AACjB,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB;;;AAGR,sBAAe,IAAI,cAAa,CAAC,aAAa,GAAG;AACjD,sBAAgB,IAAI,cAAa,CAAC,aAAa;AAC/C,mBAAa,QAAO,GAAG,IAAI,MAAM,QAAQ;AACrC,YAAI,CAAC,KAAK,IAAI,MAAM;AAChB,kBAAO,OAAO,MAAK,MAAM;AACzB,kBAAQ,OAAO,MAAK;AACpB;;;AAGR,aAAO,CAAC,QAAO,YAAY,QAAQ;;AAE3B,2BAAkB;AC1E9B;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,KAAK,QAAQ,MAAiB;;AAE1E,iBAAQ,IAAG,CAAE;ACrCzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,CAAC,QAAQ,CAAC;;AAEnB,oBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAwBA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,SAAS,GAAG,MAAM,mDAAmD,GAAG;AACvF,aAAO,OAAM,IAAI,OAAO;;AAEhB,qBAAW,IAAG,CAAE;AC7B5B;;;;;;;;;;;;;;;;AAyCA,oCAAgC;AAC5B,sBAAgB,iBAAgB,QAAQ,UAAU,WAAW;AAC7D,UAAI,QAAQ;AACR,cAAM,QAAQ,OAAO;;AAEzB,UAAI,QAAQ,QAAQ,OAAO;AACvB,cAAM,MAAM,4EACW,QAAQ,oBAAoB;;AAEvD,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc;AACxB,kBAAU,SAAQ,QAAQ,SAAS;AACnC,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB,UAAS;;AAE7B,qBAAW,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,6DAC3B,OAAM;AACrB,qBAAe,CAAE;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,OAAM,KAAK,OAAO;AAClC,uBAAe,SAAQ,IAAI;AAC3B,eAAO,OAAO,QAAQ,OAAM;SAC7B,QAAQ,MAAqB;;AAExB,gBAAO,IAAG,CAAE;AClDxB;;;;;;;;;;;;;;;;AAsCA;AACI,cAAO,OAAM,UAAU,aAAa,MAAM,8DAC3B,OAAM;AACrB,qBAAe,CAAE;AACjB,aAAO,QAAO,cAAc;AAExB,mCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,sBAAc,OAAM,OAAO;AAC3B,wBAAgB,SAAQ,QAAO,CAAC,OAAO;AACvC,uBAAe,SAAQ,KAAK;AAC5B,eAAO,SAAQ,QAAQ,OAAM;SAC9B,QAAQ,MAAqB;;AAExB,iBAAQ,IAAG,CAAE;ACnDzB;;;;;;;;;;;;;;;;AA4CA;AACI,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,sBAAsB;AACtB,6BAAqB,SAAQ,QAAO,CAAC,OAAO;AAC5C,cAAM,KAAK;;AAKX,4BAAoB,CAAC,OAAO,IAAK,sBAAqB;AACtD,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,0BAAkB,SAAQ,KAAK,SAAQ,CAAC,OAAO;AAC/C,8BAAsB,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK;AACzF,8BAAsB,KAAI,SAAQ,OAAM,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO,qBAAqB,KAAK,IAAI,QAAO;AACxG,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,kBAAU,QAAO,CAAC,WAAW,gBAAgB;AAC7C,6BAAqB,SAAQ,SAAQ,GAAG,IAAI,CAAC,YAAY,IAAI,YAAY;AACzE,cAAM,KAAK;;AAEf,YAAM,KAAK;AAEX,UAAI,OAAM,SAAS,KAAK,OAAM,MAAM,OAAO;AACvC,qBAAa;AACb,uBAAc,OAAM,MAAM;AAC1B,cAAM,SAAQ,KAAK,CAAC,QAAO,IAAI,MAAM,KAAK,QAAO,IAAI,MAAM;AAC3D,aAAK;;AAET,aAAO;;AAEC,kBAAS,IAAG,CAAE;ACrEnB,0DAAqD;AACxD,uBAAiB;AACjB,UAAI,OAAQ,oBAAqB;AAC7B,gBAAO,EAAE,MAAM,QAAQ,oBAAoB,GAAG,MAAM;AACpD,qBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAGpD,0BAAkB,gBAAgB,OAAO;AACrC,cAAI,UAAU;AACV,sBAAS;;AAEb,iBAAO;WACR;AACH,gBAAO,aAAa,GAAG,MAAM;AAC7B,yBAAiB,gBAAgB,QAAQ;AAGzC,YAAI,aAAa;AACb,wBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,0BAAgB,YAAY,EAAE,MAAM,QAAQ;;AAEhD,gBAAO,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IAAI,MAAM;AACxE,qBAAa;;AAEjB,aAAO;;AC/BX;;;;;;;;;;;;;;;;AAyDA,+CAA2C;AACvC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,sBAAc,gBAAe,MAAM,GAAG,OAAO;AAC7C,2BAAmB,kBAAiB,IAAI,iBAAiB;AACzD,eAAO,SAAQ,MAAM,IAAI,YAAY;;AAEzC,qBAAe,CAAE,GAAG;AACpB,mBAAa,CAAE,iBAAiB;AAChC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,mBAAS,IAAG,CAAE;ACpE1B;;;;;;;;;;;;;;;;AA2CA;AACI,cAAO,OAAM,UAAU,WAAW,MAAM,mDAAmD,OAAM;AACjG,+BAAyB,OAAM,MAAM,OAAM,MAAM,SAAS;AAC1D,oBAAc,OAAM,OAAO;AAC3B;AACA,UAAI,aAAa,QAAQ,YAAY;AAEjC,sBAAc,OAAM,MAAM,IAAI,OAAK;AACnC,qBAAa,OAAM,MAAM,IAAI,OAAK;AAClC,aAAK,OAAM,MAAM,SAAS,KAAK;AAC/B,wBAAgB,OAAM,QAAO,OAAO;AACpC,6BAAqB;iBAEhB,aAAa,QAAQ,YAAY;AAEtC,2BAAmB,OAAM,MAAM,IAAI,OAAK;AACxC,mBAAW,OAAM,MAAM,SAAS,KAAK,YAAY;AACjD,wBAAgB,QAAO,CAAC,QAAO,OAAM,cAAc,OAAM,MAAM,SAAS;AACxE,6BAAqB;;AAGrB,wBAAgB;;AAGpB,yBAAmB,WAAU;AAC7B,2BAAqB,SAAQ,SAAQ,eAAe,aAAa,CAAC,OAAO;AACzE,kBAAY,IAAI;AAEhB,mBAAa,KAAK,MAAM,qBAAqB,KAAK;AAClD,yBAAmB,KAAK;AACxB,yBAAmB,KAAK;AACxB,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,mCAA6B,OAAM,YAAY,CAAC,MAAM,qBAAqB,OAAO,WAAW,MAAM,SAAS;AAC5G,0BAAoB,cAAc,MAAM;AACxC,kBAAY,cAAc,MAAM,SAAS,KAAK;AAC9C,aAAO,SAAQ,SAAQ,qBAAqB,IAAI,qBAAqB,KAAK;;AAElE,iBAAQ,IAAG,CAAE;AChFzB;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,KAAK;AACzB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,iBAAQ,IAAG,CAAE;ACzCzB;;;;;;;;;;;;;;;;AA8CA;AACI,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,kCAA2B,GAAG,OAAO,GAAG;AACxC,sBAAgB;AACZ,oBAAY,SAAQ,kBAAkB,IAAI;AAC1C,aAAK,CAAC,IAAI;AACV,eAAO;;AAEX,qBAAe,CAAE,GAAG,IAAI,GAAG;AAC3B,oBAAc;AACd,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAmB;;AAEzE,8BAAqB,IAAG,CAAE;AC5DtC;;;;;;;;;;;;;;;;AAmCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,aAAO,SAAQ,IAAI,cAAa,GAAG,OAAO,MAAM;;AAExC,oBAAW,IAAG,CAAE;ACvC5B;;;;;;;;;;;;;;;;AAoCA,oCAAgC;AAC5B,uBAAiB,qBAAqB,SAAS,WAAW;AAC1D,cAAY,SAAS,UAAU,GAAG,MAAM;AACxC,UAAI,SAAS,WAAW;AACpB,eAAO,WAAW,SAAS,IAAI;;AAEnC,mBAAa,SAAS,GAAG;AACzB,oBAAc,SAAS,GAAG;AAC1B,oBAAc,SAAS,GAAG;AAC1B,cAAY,QAAQ,MAAM,MAAM;AAChC,eAAS,QAAQ;AACb,2BAAuB,OAAO,EAAE,OAAO;AACvC,gBAAY,UAAU,EAAE,OAAO,MAAM;;AAEzC,8BAAwB,SAAS,IAAI,OAAK,WAAW,GAAG;AAOxD,aAAO,QAAO,iBAAiB;;AAEvB,kBAAS,IAAG,CAAE;AC3D1B;;;;;;;;;;;;;;;;AAiCA,+BAA0B;AACtB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,KAAK,IAAI,QAAQ,QAAQ,MAAiB,OAAM;;AAEvF,kBAAQ,IAAG,CAAE;ACvCzB;;;;;;;;;;;;;;;;AAwDA,+DAA2D,aAAa,kBAAkB,iBAAiB,oBAAoB;AAC3H,eAAS,iBAAgB,GAAG,KAAK;AACjC,sBAAgB;AACZ,YAAI,WAAW;AACX,oBAAU,IAAI,MAAM,MAAM;;AAE9B,6BAAqB,YAAW;AAChC,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,gBAAgB;AACtC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,iBAAiB,KAAK,mBAAmB;AACzC,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B,GAAG,OAAO,MAAM;AAE5C,2BAAmB,YAAW;AAC9B,yBAAiB,GAAG,MAAM;AAC1B,mBAAW,QAAQ;AACf,gBAAM,QAAQ;AACd,cAAI,QAAQ;AACZ,mBAAS,OAAO,MAAM,GAAG;;AAE7B,aAAK,SAAQ,IAAI;AACjB,eAAQ,wBAAwB,oBAAoB,8BAA+B,mBAAkB,GAAG,OAAO,cAAc,qBAAqB,OAAO,KAAK,SAAS,WAAW,SAAS;AAC3L,gBAAQ;AACR,cAAM;AACN,kBAAU;AACV,2BAAmB,YAAW;AAE9B,mBAAW,QAAQ;AACf,cAAI,QAAQ,MAAM,QAAQ;AAC1B,kBAAQ,QAAQ;;AAGpB,qBAAa,iBAAgB,OAAO,KAAK;AAEzC,yBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AACvE,2BAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,YAAI;AACA,iBAAO,SAAQ,OAAM,IAAI,OAAO,OAAO;;AAE3C,oBAAY,SAAQ,aAAa,IAAI,OAAO,KAAK;AACjD,eAAO,SAAQ,KAAK;;AAExB,qBAAe,CAAE,GAAG;AACpB,oBAAc;QACV;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;AAEJ,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;;AAEpE,0BAAgB,IAAG,CAAE;ACpHjC;;;;;;;;;;;;;;;;AAgCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,CAAE,GAAG;AACpB,aAAO,QAAO,cAAc;AACxB,oBAAY,SAAQ,IAAI;AACxB,aAAK,CAAC;AACN,eAAO;SACR,QAAQ,MAAiB;;AAEpB,gBAAO,IAAG,CAAE;ACzCxB;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAEpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACvDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,aAAO,YAAW,QAAQ,OAAO,eAAe;;ACxDpD;;;;;;;;;;;;;;;;AA0CO;AACH,qBAAc;AACd,UAAI,SAAS,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,MAAM;;AAEpB,4BAAsB,YAAW,QAAQ;AACzC,UAAI,cAAc,WAAW,KAAK,cAAc,WAAW;AACvD,cAAM,IAAI,MAAM;;AAGpB,UAAI,cAAc,WAAW,KAAK,SAAS;AACvC,cAAM,IAAI,MAAM;;AAGpB,cAAQ,SACJ;AACJ,aAAO,YAAW,QAAQ,OAAO,eAAe;;AC1DpD;;;;;;;;;;;;;;;;AA4CA,0BAAsB,YAAY;AAC9B,iBAAW,iBAAgB,GAAG,KAAK;AACnC,UAAI,GAAG,SAAS;AACZ,cAAM,IAAI,MAAM;;AAEpB,sBAAgB,GAAG,MAAM,GAAG,MAAM,SAAS;AAC3C,UAAI,IAAI;AACJ,cAAM,IAAI,MAAM,uDAAuD,oBACxD;;AAEnB,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE,GAAG;AACnB,gCAA0B,QAAO,cAAc,OAAK,EAAE,KAAK,IAAI,GAAG,SAAS,QAAQ,MAAiB,MAAM;AAC1G,aAAO,CAAE,QAAQ;;AAET,iBAAQ,IAAG,CAAE;AC3DzB;;;;;;;;;;;;;;;;AAuCA,6CAAwC,YAAY;AAChD,UAAI,SAAS,QAAQ,UAAU;AAC3B,cAAM,IAAI,MAAM;;AAEpB,wBAAkB,IAAI,YAAY,OAAM,QAAQ,OAAO,MAAsB;AAC7E,kBAAY,QAAO,OAAO;AAC1B,mBAAa,GAAG,IAAI,IAAI,OAAO,QAAQ;AACnC,YAAI,OAAO,KAAK,UAAU;;AAE9B,aAAO,IAAI;;AAEH,4BAAmB,IAAG,CAAE;AClDpC;;;;;;;;;;;;;;;;AAsEA,+BAA2B;AAEvB,iBAAW,iBAAgB,GAAG,KAAK,UAAU;AAC7C,cAAO,GAAG,OAAO,GAAG,MAAM;AAC1B,qBAAe,CAAE,GAAG;AACpB,oBAAc,CAAE;AAChB,gCAA0B,QAAO,UAAU,QAAQ,QAAQ;AAC3D,aAAO,CAAE,QAAQ;;AAET,mBAAU,IAAG,CAAE;AC/E3B;;;;;;;;;;;;;;;;AAuCA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,0BAAoB,iBAAgB,YAAY,cAAc,sBAAsB;AACpF,cAAO,OAAM,cAAc,MAAM;AACjC,qBAAe,CAAE,GAAG,IAAI,YAAY;AACpC,oBAAc,CAAE;AAChB,sBAAgB;AACZ,oBAAY,SAAQ,mBAAmB,IAAI,aAAa;AACxD,aAAK,CAAC;AACN,eAAO;;AAEX,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,oBAAoB;;AAE1E,+BAAsB,IAAG,CAAE;ACpDvC;;;;;;;;;;;;;;;;AAmCA,gCAA4B;AACxB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,QAAQ,CAAC,GAAG,MAAM,UAAU,OAAO,GAAG,MAAM,QAAQ,MAAM,UAAU,oBAAoB,GAAG,MAAM,WAAW,GAAG,MAAM;AACjI,UAAI,OAAO;AACP,gBAAQ,GAAG,MAAM;;AAErB,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE;AAChB,sBAAgB,cAAa,SAAQ,QAAQ,IAAI;AACjD,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,SAAQ;;AAE9D,oBAAW,IAAG,CAAE;AC9C5B;;;;;;;;;;;;;;;;AAiCO,gDAA4C;AAC/C,aAAO,QAAO,aAAa,cAAc,WAAW,MAAM;;AClC9D;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB;AAChB,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,YAAI,SAAS;AACT,kBAAQ,KAAK;;;AAGrB,uBAAiB,QAAO,WAAW;AACnC,kBAAY,QAAO,CAAC,QAAQ,QAAQ,UAAU,SAAS;AACvD,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,oBAAY,SAAS,WAAW,QAAQ;AACxC,uBAAe,IAAI,UAAU;AAC7B,YAAI,OAAO,IAAI,KAAK;;AAExB,aAAO,IAAI;;AChCf;;;;;;;;;;;;;;;;AAsCA;AACI,yBAAmB,iBAAgB,WAAW,aAAa,cAAc;AACzE,mBAAa,MAAM,WAAW;AAC9B,kBAAY,UAAU,WAAW,OAAO;AACxC,UAAI,cAAc;AACd,mBAAW;;AAEf,aAAO;;AAEC,uBAAc;AC/C1B;;;;;;;;;;;;;;;;AAwCA;AACI,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,oBAAc,iBAAgB,MAAM,QAAQ,YAAY;AACxD,uBAAiB,QAAQ,OAAO,IAAI;AACpC,sBAAgB,MAAM;AACtB,0BAAoB,QAAQ;AAC5B,cAAY,UAAU,GAAG,MAAM;AAC/B,yBAAuB,YAAY,MAAM,UAAU,WAAW,UAAU,MAAM,OAAO;AACrF,wBAAkB;AAClB,mBAAa,UAAU,IAAI,WAAW,SAAS;AAC3C,uBAAe,YAAY;;AAE/B,gCAA0B,YAAY,MAAM,GAAG,UAC1C,OAAO,CAAC,cAAc,YAAY,MAAM,WAAW;AACxD,6BAAuB,SAAQ,SAAS;AACxC,2BAAqB,SAAQ,OAAO,CAAC;AACrC,gCAA0B,MAAM,WAAW;AAC3C,sBAAgB,QAAQ,mBAAmB,CAAC;AAC5C,kBAAY,OAAO,gBAAgB,SAAS;AAE5C,UAAI,YAAW;AACX,gBAAQ;;AAEZ,UAAI,SAAS;AACT,cAAM;;AAEV,cAAQ;AACR,qBAAe;AACf,mBAAa;AACb,wBAAkB;AAClB,aAAO;;AAEC,6BAAoB;ACxEhC;;;;;;;;;;;;;;;;AAmCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,SAAS,IAAI;;AAWxB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,KAAK,IAAI;;AAEpB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,MAAM,IAAI;;AAErB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,UAAU,IAAI;;AAEzB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,QAAQ,IAAI;;AAEvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAkB,GAAG,OAAO,GAAG,OAAO;AACtC,aAAO,aAAa,IAAI;;AAEhB,wBAAe,IAAG,CAAE;AACpB,+BAAsB,IAAG,CAAE;AAC3B,0BAAiB,IAAG,CAAE;AACtB,4BAAmB,IAAG,CAAE;AACxB,uBAAc,IAAG,CAAE;AACnB,2BAAkB,IAAG,CAAE;ACjGnC;;;;;;;;;;;;;;;;AAsCA;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,MAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAYnB;AACI,uBAAgB;AAEhB,yBAAuB,MAAK,OAAO,KAAI,OAAO;AAC9C,aAAO,IAAI,OAAM;;AAYrB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,KAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,IAAI,IAAI;;AAUnB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAUvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,QAAQ,IAAI;;AAYvB;AACI,uBAAgB;AAEhB,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,yBAAuB,GAAG,OAAO,GAAG,OAAO;AAC3C,aAAO,kBAAkB,IAAI;;AAErB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,0BAAiB,IAAG,CAAE;AACtB,0BAAiB,IAAG,CAAE;AACtB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,sBAAa,IAAG,CAAE;AAClB,oCAA2B,IAAG,CAAE;AAChC,sBAAa,IAAG,CAAE;AC5L9B;;;;;;;;;;;;;;;;AAmEA,4BAAwB,oBAAoB,iBAAiB;AACzD,UAAI,iBAAgB,GAAG,KAAK;AAC5B,oBAAa,SAAS,GAAG,KAAK;AAC9B,0BAAoB,MAAK;AACzB,UAAI;AACA,qBAAa,gBAAe,MAAM,EAAE;AACpC,wBAAgB,sBAA+B,MAAK,OAAO;;AAE/D,aAAO,SAAQ,OAAM;;AAEzB,oCAA+B;AAC3B,UAAI,EAAE,SAAS;AACX,eAAO,IAAI;;AAGf,UAAI,EAAE,SAAS,KAAK,SAAS;AACzB,eAAO,SAAS,SAAQ,GAAG,CAAC,MAAM,IAAG;;AAGzC,UAAI,EAAE,SAAS,KAAK,OAAO,SAAS,YAChC,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,MAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM;AACN,iBAAO,KAAI,IAAI,IAAI;;AAEvB,YAAI,OAAM,eAAe,OAAM;AAE3B,iBAAO,KAAK,MAAI,IAAI,IAAI,IAAI,QAAO,GAAG,WAAW;;AAErD,cAAM,IAAI,MAAM,qCAAqC;;AAGzD,UAAI,MAAM,QAAQ,SAAS,KAAK,WAAW;AACvC,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;;AAE/C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM;AACN,iBAAO,KAAI,MAAI,IAAI,IAAI,KAAK,KAAK,KAAK;;AAE1C,YAAI,OAAM,SAAS,OAAM;AAErB,iBAAO,KAAK,MAAI,OAAO,IAAI;;AAE/B,cAAM,IAAI,MAAM,qCAAqC;;AAEzD,YAAM,IAAI,MAAM,gCAAgC;;AAExC,iBAAQ,IAAG,CAAE;AC1HzB;;;;;;;;;;;;;;;;AAsDA,6DAAwD;AACpD,iBAAW,iBAAgB,GAAG,KAAK;AACnC,iBAAW,iBAAgB,GAAG,KAAK;AACnC,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,uBAAiB,IAAI;AACrB,cAAY,aAAiB,GAAG,OAAO,GAAG,QAAQ,MAAM;AACxD,kBAAY,QAAO;AACnB,4BAAsB,IAAI,KAAK;AAC/B,oBAAa,KAAI,IAAI,IAAI,KAAK;AAC9B,UAAI;AACA,gBAAY,SAAQ,MAAM,MAAM;AAChC,sBAAc,iBAAgB,OAAM,QAAQ;AAC5C,kBAAS,IAAI,SAAQ,IAAI,KAAK,IAAI,QAAQ;;AAE9C,aAAO,MAAI,IAAI;;AAEP,0BAAiB,IAAG,CAAE;ACtElC;;;;;;;;;;;;;;;;AAwCA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,aAAa;AAClE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,qBAA8B,UAAU,UAAU;AAClD,sBAAgB;AACZ,eAAO,SAAQ,UAAU,UAAU,UAAU;;AAEjD,qBAAe,CAAE,SAAS,UAAU,SAAS;AAC7C,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,YAAW;;AAEjE,sBAAa,IAAG,CAAE;ACtCvB;AACH,UAAI,cAAc,UAAU;AACxB,cAAM,IAAI,MAAM,8EACU,cAAc;;AAE5C,UAAI,cAAc,OAAO;AACrB,cAAM,IAAI,MAAM,sEACM,cAAc;;AAExC,uBAAiB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AACnE,sBAAgB,cAAc,OAAO,IAAI,cAAc,MAAM,KAAK;AAClE,UAAI,YAAY,WAAW;AACvB,cAAM,IAAI,MAAM,kDACR,YAAY,sBAAsB;;AAE9C,wBAAkB,aAAa;AAC/B,UAAI,CAAE,cAAa,SAAS,KACxB,aAAa,SAAS,KAAK,cAAc;AACzC,cAAM,IAAI,MAAM,oCACT,aAAa,2BAA2B;;AAEnD,UAAI,aAAa,UAAU,cAAc;AACrC,cAAM,IAAI,MAAM;;;ACnCxB;;;;;;;;;;;;;;;;AA2DA,qFAAiF;AAC7E,6BAAuB,iBAAgB,eAAe,iBAAiB,iBAAiB;AACxF,4BAAsB,iBAAgB,cAAc,gBAAgB;AACpE,4BAAsB,iBAAgB,cAAc,gBAAgB,iBAAiB,cAAc;AACnG,sBAA8B,gBAAgB,eAAe,aAAa;AAC1E,qBAAe;QACX,eAAe;QACf,cAAc;QACd,cAAc;;AAElB,oBAAc,CAAE;AAChB,aAAO,QAAO,cAAc,cAAW,SAAQ,cAAc,gBAAgB,eAAe,aAAa,gBAAgB,QAAQ,MAAiB,eAAe;;AAEzJ,0BAAiB,IAAG,CAAE;ACxElC;;;;;;;;;;;;;;;;AA0DA;AACI,uBAAiB,iBAAgB,SAAS,WAAW,YAAY;AACjE,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB;AACZ,eAAO,SAAQ,SAAS,IAAI;;AAEhC,qBAAe,CAAE,QAAQ,IAAI,SAAS;AACtC,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAqB;;AAE1D,qBAAY,IAAG,CAAE;ACnE7B;;;;;;;;;;;;;;;;AAyBO;AACH,UAAI,cAAc;AACd,eAAO,EAAE,MAAM;;AAEnB,UAAI,aAAiB,EAAE,OAAO;AAC1B,eAAO;;AAEX,UAAI,EAAE,MAAM,WAAW,WAAW;AAC9B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,EAAE,MAAM,QAAQ;AAChC,cAAI,WAAW,MAAM,QAAQ,EAAE,MAAM,MAAM;AACvC,yBAAa,KAAK,EAAE,MAAM;;AAG1B,yBAAa,KAAK,WAAW;;;AAGrC,eAAO;;AAEX,aAAO;;AC5CX;;;;;;;;;;;;;;;;AAgDA;AACI,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAY,GAAG,UAAU,WAAW,MAAM,gFACjB,GAAG;AAC5B,cAAY,QAAQ,KAAK,OAAO,GAAG,MAAM,qDAAqD;AAC9F,UAAI,SAAS;AACT,eAAO,aAAa,UAAS,GAAG,UAAU;;AAE9C,0BAAoB,cAAc,IAAI;AACtC,uBAAiB,IAAI;AACrB,yBAAmB,IAAI,MAAM,MAAI,cAAc,aAAa,GAAG,GAAG,WAAW,OAAO,YAAY;AAChG,aAAO,KAAI,IAAI;;AAEP,oBAAW,IAAG,CAAE;AC7D5B;;;;;;;;;;;;;;;;AAiBO;AAEH,aAAO,KAAK,MAAM,KAAK,IAAI,GAAG,KAAK,KAAK,KAAK,IAAI,SAAS,KAAK,IAAI;;AAEhE;AACH,mBAAa,IAAI,eAAe;AAChC,wBAAkB,IAAI,aAAa;AACnC,mBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,uBAAgB,IAAM,KAAK,KAAK,IAAM,gBAAe,OAAO;AAC5D,kBAAU,KAAK,IAAI,IAAI,KAAK,IAAI;;AAEpC,aAAO,UAAS,WAAW;;AC5B/B;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,uBAAiB,iBAAgB,SAAS,WAAW;AACrD,cAAO,aAAa,OAAO,GAAG,MAAM,uEACrB,aAAa;AAC5B,cAAO,aAAa,OAAO,MAAM,SAAS,MAAM,MAAM,mFAE/C,aAAa,yBAAyB,SAAS;AACtD,yBAAkB,aAAa,MAAM,MAAM,GAAG,aAAa,MAAM,SAAS,IAAI,SAAS,OAAO;AAE9F,sBAAgB,aAAa,MAAM,aAAa,MAAM,SAAS;AAC/D,cAAO,IAAI,KAAK,KAAK,SAAS,MAAM,4EAClB,qBAAqB;AACvC,8BAAwB,MAAM,aAAa;AAC3C,0BAAoB,MAAM,SAAS;AAGnC,4BAAsB,CAAC,gBAAgB,SAAS,SAAS;AACzD,yBAAkB,wBAAuB,QAAQ;AACjD,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,gBAAgB,SAAS,QAAQ,SAAS;AACvD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,mBAAU,KAAK;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,cAAI,UAAU,GAAG,UAAU,YAAY;AACnC,uBAAU,KAAK;AACf;;;;AAIZ,UAAI,gBAAgB;AAChB,qBAAa;;AAEjB,UAAI,YAAY;AACZ,iBAAS;;AAGb,aAAO,QAAO,YAAW,SAAS,OAAO;;AAEjC,wBAAe;AChF3B;;;;;;;;;;;;;;;;AA4CA,mFAA8E;AAC1E,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,sBAAgB,eAAe,SAAS,IAAI,MAAM,KAAK,IAAI,MAAM;AACjE,uBAAiB,eAAe,SAAS,KAAK,MAAM,KAAK,KAAK,MAAM;AACpE,cAAY,YAAY,YAAY,IAAI,MAAM,4CAA4C,8CACtD,YAAY;AAChD,cAAY,aAAa,YAAY,IAAI,MAAM,0CAA0C,iDACnD,YAAY;AAClD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,0BAAkB;AAClB,4BAAoB,yBAAkC;AACtD,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB,OAAO;AACtH,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,WAAK,YAAY,iBAAiB;AAC3D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,sBAAsB;;AAEtE,iCAA6B,IAAG,CAAE;AC/EzC;;;;;;;;;;;;;;;;AA0BO;AACH,UAAI,eAAc,QAAQ,gBAAe;AACrC,eAAO;;AAEX,UAAI,gBAAe;AACf,eAAO,KAAI,IAAI,MAAK;;AAExB,YAAM,IAAI,MAAM,gDAAgD;;AAG7D;AACH,gBAAU;AACV,yBAAmB,kBAAgC,KAAK,OAAO,aAAa;AAC5E,UAAI,WAAW,SAAS;AACpB,cAAM,MAAI,KAAK;;AAEnB,aAAO,SAAQ,KAAK,KAAK;;AAEtB;AACH,UAAI,gBAAe;AACf,eAAO;iBAEF,gBAAe;AACpB,eAAO,MAAK;iBAEP,gBAAe;AACpB,eAAO,KAAI;iBAEN,gBAAe;AACpB,eAAO,OAAM;iBAER,gBAAe;AACpB,eAAO,OAAM,GAAG;;AAEpB,YAAM,IAAI,MAAM,4BAA4B;;AAGzC,wBAAmB;AACtB,2BAAqB,gBAAgB;AACrC,aAAO,CAAC,gBAAgB,gBAAe;;ACjE3C;;;;;;;;;;;;;;;;AAuFA,2BAAwB,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AACpI,oBAAa,eAAc;AAC3B,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,QAAc,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AAC3E,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,6DAC3B,IAAI;AACX,cAAY,QAAQ,SAAS,GAAG,MAAM,8DAC/B,QAAQ;AACf,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,6EACZ,+BAA+B;;AAE1D,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,oCAAoC,IAAI,MAAM,yCACrE,QAAQ,MAAM;AAC5C,cAAY,gCAAyC,SAAS,YAAY,MAAM,uEAC7D,0BAA0B;AAC7C,cAAY,eAAe,QAAQ,MAAM,sCAAsC;AAC/E,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK;AAChG;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,4CAAiC;AACjC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,gBAAY,mBAA4B,YAAY,MAAM,uHAEA;AAC1D,qBAAa,qBAAoB,KAAI,OAAO,cAAc,UAAS,SAAS;AAC5E,0BAAkB,qBAAqB,MAAK,cAAc,SAAQ,OAAO,SAAS;AAClF,oBAAY,CAAC,MAAM;AACnB,YAAI,UAAS;AACT,0BAAgB,sBAAqB,QAAO;AAC5C,cAAI,KAAK;;AAEb,eAAO;;AAEX,sBAAgB;AACZ,oBAAY,SAAQ,YAAY;UAC5B,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW,iBAAiB;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,cAAa;AAC9E,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,qBAAe,IAAG,CAAE;AC1L3B;;;;;;;;;;;;;;;;AAqBA,iGAA4F,CAAC,GAAG;AAC5F,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAEzD,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,yBAAyB,KAAK,MAAM;;AAEvD,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,WAAK,iBAAiB,WAAW;AAC1D,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,qCAAqC;;AAErF,gDAA4C,IAAG,CAAE;ACtCxD;;;;;;;;;;;;;;;;AAqBA,gGAA2F,CAAC,GAAG;AAC3F,iBAAW;AACX,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE9D,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AAC7G,eAAO,SAAQ,wBAAwB,MAAM,QAAQ;;AAEzD,qBAAe,CAAE,IAAI,MAAM;AAC3B,oBAAc,CAAE,SAAS,WAAK,iBAAiB,WAAW,YAAY;AACtE,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,oCAAoC;AAC5F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,+CAA2C,IAAG,CAAE;ACxCvD;;;;;;;;;;;;;;;;AAiFA,oCAAiC,GAAG,QAAQ,SAAS,WAAK,aAAa,QAAQ,YAAY,CAAC,GAAG,IAAI,iBAAiB,MAAM,0BAAa,UAAU;AAC7I,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,iBAAuB,GAAG,QAAQ,SAAS,MAAK,YAAY,WAAW;AACpF,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,iBAAW,iBAAgB,GAAG,KAAK;AACnC,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,gBAAU;AACV,yBAAmB;AACnB,UAAI,GAAG,SAAS;AACZ,uBAAe;AACf,cAAM,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE7D,cAAY,IAAI,SAAS,GAAG,MAAM,sEACtB,IAAI;AAChB,cAAY,QAAQ,SAAS,GAAG,MAAM,uEAClB,QAAQ;AAC5B,cAAY,IAAI,MAAM,OAAO,QAAQ,MAAM,IAAI,MAAM,6DAC7C,IAAI,MAAM,qDACJ,QAAQ,MAAM;AAC5B,UAAI,aAAa;AACb,oBAAY,CAAC,GAAG;;AAEpB,cAAY,gCAAyC,SAAS,YAAY,MAAM,sFACvD,0BAA0B;AACnD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,qFACN,+BAA+B;;AAEhE,uBAAiB,mBAA4B,IAAI,OAAO,QAAQ,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACjH;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,SAAS,UAAU,MAAM;;AAEvE;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,gBAAY,mBAA4B,YAAY,MAAM,mHAElD;AACR,2CAAgC;AAChC,6BAAqB,sBAAqB,IAAI,GAAG;AACjD,qBAAa,mCAAmC,KAAI,OAAO,cAAc,UAAS,SAAS,MAAK,WAAW;AAC3G,0BAAkB,oCAAoC,MAAK,cAAc,SAAQ,OAAO,SAAS,MAAK,WAAW;AACjH,YAAI,SAAQ;AACR,0BAAgB,sBAAqB,OAAO;AAC5C,iBAAO,CAAC,MAAM,WAAW;;AAE7B,eAAO,CAAC,MAAM;;AAElB,sBAAgB;AACZ,oBAAY,SAAQ,qBAAqB;UACrC,OAAO;UACP,QAAQ;UACR;UACA,MAAM;UACN;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,QAAQ;QACR,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,SAAS,WAAK,YAAY,WAAW,iBAAiB;AAGtE,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK;AACnB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,oBAAU,QAAO,cAAc,SAAS,QAAQ,MAAiB,uBAAsB;AACvF,eAAK,CAAC,SAAQ,MAAK,KAAK;AACxB,cAAI;AACA,kBAAM,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE9D,iBAAO,CAAE,OAAO,KAAK,UAAU;;AAEnC,eAAO,iBAAiB,KAAK,SAAS;;;AAGvC,8BAAwB,IAAG,CAAE;ACrLpC;;;;;;;;;;;;;;;;AAgDA,2BAAwB,GAAG,GAAG,aAAa,OAAO,aAAa,OAAO,MAAM,0BAAa,UAAU;AAC/F,UAAI,YAAW,QAAO,MAAM,eAAe,iBAAgB;AACvD,qBAAa,OAAc,GAAG,GAAG,YAAY;AAC7C,YAAI,QAAQ;AACR,mBAAS,MAAI,QAAQ;;AAEzB,eAAO,iBAAgB,QAAQ,aAAY;;AAE/C,eAAS,iBAAgB,GAAG,KAAK;AACjC,eAAS,iBAAgB,GAAG,KAAK;AACjC,OAAC,IAAI,MAAM,gBAAe,IAAI;AAC9B,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,0BAAoB,aAAa,GAAG,MAAM,GAAG,OAAO,KAAK,GAAG,MAAM,GAAG,OAAO;AAC5E,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,yBAAmB,GAAG,MAAM,MAAM,GAAG;AACrC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,cAAY,GAAG,QAAQ,KAAK,GAAG,QAAQ,KAAK,GAAG,SAAS,GAAG,MAAM,MAAM,kFACnD,GAAG,YAAY,GAAG;AACtC,cAAY,aAAiB,YAAY,aAAa,MAAM,4CAA4C,oBACjG,sCAAsC,GAAG,aACzC,GAAG;AACV,cAAY,gBAAgB,aAAa,MAAM,wCAAwC,qBAChF,uCAAuC,GAAG,aAC1C,GAAG,wBAAwB,6BACX;AACvB,uBAAiB,GAAG,MAAM,MAAM,GAAG,IAAI,OAAO,CAAC,aAAa;AAC5D,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC,kBAAY,aACR,SAAQ,IAAI,CAAC,WAAW,aAAa,gBACrC,SAAQ,IAAI,CAAC,WAAW,aAAa;AACzC;AACA,UAAI,QAAQ;AACR,gBAAQ,iBAAgB,MAAM,QAAQ;AACtC,SAAC,SAAS,gBAAe,OAAO;AAChC,oCAA0C,UAAU,MAAM;;AAE9D;AACA,UAAI,0BAA0B;AAC1B,kCAA0B,iBAAgB,wBAAwB,iBAAiB;;AAEvF,oBAAa;AACT,wCAA6B;AAI7B,6BAAqB,sBAAqB,SAAQ,IAAI,EAAE,QAAQ,GAAG;AACnE;AACA;AACA,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,MAAM;mBAEzC,CAAC,cAAc;AACpB,iBAAO,OAAc,cAAc,MAAK,OAAO;AAC/C,iBAAO,OAAc,cAAc,MAAK,MAAM;mBAEzC,cAAc,CAAC;AACpB,iBAAO,OAAc,MAAK,cAAc,OAAO;AAC/C,iBAAO,OAAc,MAAK,cAAc,OAAO;;AAG/C,iBAAO,OAAc,MAAK,cAAc,MAAM;AAC9C,iBAAO,OAAc,cAAc,MAAK,MAAM;;AAElD,YAAI,QAAQ;AACR,0BAAgB,sBAAqB,QAAO;AAC5C,iBAAO,CAAC,MAAM,MAAM;;AAGpB,iBAAO,CAAC,MAAM;;;AAGtB,sBAAgB;AACZ,kBAAU,SAAQ,iBAAiB;UAC/B,GAAG;UACH,GAAG;UACH;UACA;UACA,MAAM;UACN;UACA,wBAAwB;;AAE5B,eAAO;;AAEX,qBAAe;QACX,GAAG;QACH,GAAG;QACH,MAAM;QACN,wBAAwB;;AAE5B,oBAAc,CAAE,YAAY,YAAY;AAGxC,UAAI,QAAQ;AACR,yBAAiB,WAAW;AACxB,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK;AAChB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,SAAS,KAAK;;AAGrB,iCAAyB,WAAW;AAChC,sBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,eAAc;AACjF,eAAK,CAAC,MAAK,MAAK,KAAK;AACrB,iBAAO,CAAE,OAAO,SAAQ,KAAK,WAAW,UAAU;;AAEtD,eAAO,iBAAiB,KAAK,KAAK;;;AAGnC,qBAAe,IAAG,CAAE;ACnK3B;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,MAAM;;AAErC,0BAAsB,IAAG,CAAE;ACjClC;;;;;;;;;;;;;;;;AA8BA;AACI,aAAO,aAAa,cAAc,KAAK;;AAEpC,uBAAmB,IAAG,CAAE;ACjC/B;;;;;;;;;;;;;;;;AAsCA,8DAAyD,kBAAkB;AACvE,kBAAY;AACZ,qBAAe;AACf,aAAO,QAAQ,eAAe,QAAO;AACjC,eAAO,KAAK,OAAM,SAAQ,OAAO;AACjC,iBAAS;;AAEb,UAAI;AACA,eAAO,QAAQ,QAAO;AAClB,yBAAgB,QAAQ,cAAe,QAAO;AAC9C,uBAAY,QAAO;YACf,OAAM,SAAQ,OAAO,cAAc;YAAS,MAAK,CAAC,SAAS;;AAE/D,iBAAO,KAAK;AACZ,mBAAS;;;AAGjB,UAAI,OAAO,WAAW;AAClB,eAAO,SAAS,IAAI,CAAC,GAAG;;AAE5B,aAAO,SAAQ,QAAO,SAAS,CAAC,OAAO,QAAQ;;AAE5C,kBAAc,IAAG,CAAE;AC5D1B;;;;;;;;;;;;;;;;AAwCA,0EAAqE;AACjE,UAAI,aAAa;AACb,oBAAY,oBAAoB;;AAEpC,2BAAqB,MAAM,SAAQ,aAAa;AAChD,6BAAuB,KAAI,cAAc,SAAS;AAClD,qBAAe;AACf,mBAAa,GAAG,IAAI,aAAa,MAAM,IAAI;AACvC,eAAO,KAAK,KAAK,OAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,GAAG,eAAe;;AAEtE,aAAO,QAAO;;AAEX,iBAAa,IAAG,CAAE;ACpDzB;;;;;;;;;;;;;;;;AA4CA;AACI,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,qBAAe,iBAAgB,OAAO,SAAS,iBAAiB;AAChE,sBAAgB,iBAAgB,QAAQ,UAAU,iBAAiB;AACnE,eAAS,UAAU;AACnB,2BAAqB,sBAAsB;AAC3C,uBAAiB,OAAO,MAAM;AAC9B,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,cAAY,OAAO,SAAS,KAAK,OAAO,MAAM,OAAO,GAAG,MAAM,oDAAoD,6BAC7F,OAAO;AAC5B,cAAY,QAAQ,SAAS,KAAK,QAAQ,MAAM,OAAO,UAAU,MAAM,qDAAqD,2BACvG,OAAO;AAC5B,cAAY,SAAS,WAAW,GAAG,MAAM,wEAC3B,SAAS;AACvB,cAAY,SAAS,MAAM,KAAK,SAAS,MAAM,GAAG,MAAM,2CAA2C;AACnG,cAAY,WAAW,cAAc,WAAW,WAAW,MAAM,+CAA+C;AAChH,sBAAgB,cAAa,SAAQ,cAAc,QAAQ,QAAQ,SAAS,UAAU,QAAQ;AAC9F,qBAAe,CAAE,OAAO,QAAQ,OAAO,QAAQ,QAAQ;AACvD,oBAAc,CAAE,QAAQ,oBAAoB;AAC5C,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,gBAAe;AAClF,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACnElC;;;;;;;;;;;;;;;;AA4BA;AACI,qBAAe,iBAAgB,QAAO,SAAS,iBAAiB;AAChE,cAAY,OAAO,SAAS,GAAG,MAAM,6DACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,kBAAY,QAAO,UAAU,gBAAe,QAAQ;AACpD,aAAO;;AAEJ,2BAAsB,IAAG,CAAE;ACpClC;;;;;;;;;;;;;;;;AAqCA,4DAAuD,YAAY;AAC/D,qBAAe,iBAAgB,QAAO,SAAS,oBAAoB;AACnE,cAAY,OAAO,SAAS,GAAG,MAAM,gEACjB,OAAO;AAC3B,qBAAe,CAAE,OAAO;AACxB,oBAAc,CAAE,SAAS,WAAW;AACpC,kBAAY,QAAO,UAAU,mBAAkB,QAAQ;AACvD,aAAO;;AAEJ,8BAAyB,IAAG,CAAE;AC9CrC;;;;;;;;;;;;;;;;AAiBA;AACI,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,UAAI,kBAAkB;AAClB,yBAAiB,OAAO;;AAE5B,UAAI,gBAAgB;AAChB,uBAAe;;AAEnB,uBAAiB,MAAM,MAAM;AAC7B,sBAAgB,KAAK,IAAI,eAAe;AACxC,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,cAAY,MAAM,SAAS,GAAG,MAAM,+CAA+C,MAAM;AACzF,cAAY,MAAM,MAAM,OAAO,GAAG,MAAM,oDAAoD,MAAM,MAAM;AACxG,cAAY,OAAO,SAAS,GAAG,MAAM;AACrC,cAAY,OAAO,MAAM,OAAO,UAAU,MAAM,sDAAsD,qBACvF,OAAO,MAAM;AAC5B,cAAY,KAAK,gBAAgB,gBAAgB,GAAG,MAAM,4CAA4C;AACtG,aAAO,CAAE,eAAe,cAAc,gBAAgB;;ACpC1D;;;;;;;;;;;;;;;;AAqBA,6EAAyE,sBAAsB,OAAO;AAClG,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,oBAAc,CAAE,eAAe,cAAc;AAC7C,aAAO,QAAO,cAAc,OAAK,EAAE,kBAAkB,QAAQ,SAAS,eAAe,cAAc,iBAAiB,CAAE,OAAO,QAAQ,QAAQ,UAAW,MAAiB,sBAAqB;;AAE3L,8BAA0B,IAAG,CAAE;AC/BtC;;;;;;;;;;;;;;;;AA6BO;AACH,qBAAc,aAAa,KAAK,SAAS;AACzC,6BAAuB,SAAQ,IAAI,CAAE,UAAQ,KAAK;AAClD,UAAI,OAAO,gBAAgB,GAAG;;AAiB3B;AACH,aAAO,cAAc,KAAK,QAAQ,cAAc;;AASpD;AACI,aAAO,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK;;AAEpC;AACI,iBAAW;AACX,kBAAY,IAAI;AAChB,mBAAa;AACb,kBAAY;AACZ,aAAO,OAAO;AACV,iBAAS,OAAS,SAAQ,SAAU;AACpC,8BAAsB,WAAW,QAAQ,IAAI;AAC7C,YAAI,gBAAgB;AAChB,iBAAO,SAAS;;AAGhB,kBAAQ;AAGR,kBAAQ,CAAC;;;AAGjB,aAAO,QAAQ,OAAO,CAAC,OAAO;;AChFlC;;;;;;;;;;;;;;;;AAsBO;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GACrF;;AAEF;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,GAAsB,OAAgC,oBAA6C;;AAG1L;AACH,aAAO,uBAAuB,OAAO,QAAQ,eAAe,cAAc,gBAAgB,cAAc;;AAE5G,mIAA+H,4BAA4B,4BAA4B;AAGnL,yBAAmB;AACnB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,OAAO,KAAK;AACZ,qBAAW,KAAK,CAAE,OAAO,OAAO,IAAI,UAAU,GAAG,oBAAoB;;;AAG7E,iBAAW,KAAK;AAGhB,qBAAc,eAAe,IAAK,OAAO,eAAgB;AACzD,8BAAwB;AACxB,6BAAuB;AACvB,aAAO,gBAAgB,SAAS,iBAAiB,WAAW,SAAS;AACjE,0BAAkB,WAAW;AAC7B,eAAQ,sBAAsB,UAAU,sBAAuB;AAC/D,YAAI,gBAAgB;AAChB;;AAQJ,8BAAsB;AACtB,qBAAa,gBAAgB,SAAS,GAAG,KAAK,oBAAoB,EAAE;AAChE,sBAAY,sBAAsB,OAAO,UAAU,gBAAgB;AACnE,cAAI,OAAO;AACP,8BAAkB;AAClB;;AAEJ,oBAAU,QACN,UAAU,QAAQ,eAAe,cAAc,QAAO;AAC1D,cAAI,UAAU,SAAS;AACnB;;;AAUR,kBAAU,qBAAqB,gBAAgB;AAC/C,YAAI,CAAC;AAGD,cAAI,UAAU,UAAU;AACpB,4BAAgB,KAAK;AACrB,2BAAe,KAAK,UAAU;qBAEzB,UAAU,QAAQ;AAGvB,yBAAa,YAAY,WAAW;;;;AAKhD,2BAAqB,gBAAgB;AACrC,yBAAmB,gBAAgB;AACnC,UAAI,sBAAsB,aAAa;AACnC,wBAAgB,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;AACnD,uBAAe,KAAK,GAAG,IAAI,MAAM,YAAY,KAAK;;AAEtD,qBAAe,CAAE,iBAAiB,UAAS,iBAAiB;AAC5D,UAAI;AACA,eAAO,oBAAoB,UAAS,gBAAgB;;AAExD,UAAI;AACA,eAAO,kBAAkB,QAAO,cAAc;;AAElD,aAAO;;AAEX;AACI,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,qBAAe,MAAM,SAAS,IAAI,GAAG,IAAI,IAAI;AAC7C,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAc,KAAK,IAAI,OAAO,IAAI,OAAO;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,oBAAe,SAAQ,SAAU,SAAQ;AACzC,UAAI,SAAS,KAAK,SAAS;AACvB,eAAO;;AAEX,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,OAAO;AACzC,+BAAyB,KAAK,IAAI,mBAAmB,kBAAkB,KACnE,KAAK,IAAI,mBAAmB,kBAAkB;AAClD,aAAO,mBAAoB,SAAQ,QAAQ;;AAM/C;AACI,qBAAe,KAAK,IAAI,SAAQ,MAAM;AACtC,aAAO,OAAO,eAAe,SAAS;;AAE1C;AAKI,aAAQ,GAAG,QAAQ,GAAG,SAChB,GAAG,UAAU,GAAG,SAAW,GAAG,WAAW,GAAG;;ACrJtD;;;;;;;;;;;;;;;;AAuCA,wFAAoF,sBAAsB,OAAO;AAC7G,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc;AACnF,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc;AACxF,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,mCAA+B;AC7DtC;;;;;;;;;;;;;;;;AAkDA,sFAAkF,sBAAsB,OAAO,kCAAkC;AAC7I,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc,CAAE,eAAe,cAAc,gBAAgB;AAC7D,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,gBAAgB,OAAO;;AAEzD,uCAAmC,IAAG,CAAE;AC/D/C;;;;;;;;;;;;;;;;AAgDA,iGAA6F,sBAAsB,OAAO,kCAAkC;AACxJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,sBAAgB,OAAO;AACvB,qBAAe,OAAO;AACtB,uBAAiB,OAAO;AACxB,qBAAe,OAAO;AACtB,6BAAuB,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AACjE,wBAAkB,eAAe;AACjC,yBAAmB,eAAe;AAIlC,kBAAY,wBAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACxG,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,4CAAwC;ACvE/C;;;;;;;;;;;;;;;;AA4CA,mFAA+E,sBAAsB,OAAO,wCAAwC;AAChJ,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,qBAAe,CAAE,OAAO,QAAQ,QAAQ;AACxC,oBAAc;QACV,eAAe;QACf,cAAc;QACd,gBAAgB;QAChB;;AAEJ,qBAAe,QAAO,UAAU,sBAAqB,QAAQ;AAC7D,aAAO,CAAE,iBAAiB,OAAO,IAAI,cAAc,OAAO;;AAEvD,oCAAgC,IAAG,CAAE;AC7D5C;;;;;;;;;;;;;;;;AA0CA,8FAA0F,sBAAsB,OAAO,wCAAwC;AAC3J,qBAAe,iBAAgB,OAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe,sBAAsB,QAAQ,SAAS,eAAe,cAAc,gBAAgB;AACnG,6BAAuB,OAAO;AAC9B,4BAAsB,OAAO;AAC7B,8BAAwB,OAAO;AAC/B,sCAAgC,MAAM,QAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ;AAI1E,kBAAY,wBAAwB,WAAW,YAAY,gBAAgB,eAAe,iBAAiB;AAC3G,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,gBAAQ;;AAEZ,aAAO;;AAEJ,yCAAqC;AC9D5C;;;;;;;;;;;;;;;;AAoCA,0DAAsD;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,gEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,6DAC9B;AACP,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,eAAe,aAAa,WAAW,UAAU;;AAEpE,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,iBAAgB;AACvF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAuB,IAAG,CAAE;AC7DnC;;;;;;;;;;;;;;;;AAoCA,iEAA6D;AACzD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,QAAQ,SAAS,KAAK,QAAQ,SAAS,GAAG,MAAM,uEAChD,QAAQ;AACpB,cAAY,KAAK,WAAW,GAAG,MAAM,oEAC9B;AACP,cAAY,QAAQ,UAAU,aAAa,QAAQ,UAAU,SAAS,MAAM;AAC5E,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAQ,SAAS;AACjB,uBAAe;AACf,sBAAc,SAAQ,SAAS,CAAC,GAAG,QAAQ,MAAM,IAAI,QAAQ,MAAM,IAAI,QAAQ,MAAM;;AAEzF,oCAA8B;AAC9B,qBAAe,CAAE,QAAQ;AACzB,oBAAc,CAAE,cAAc;AAC9B,sBAAgB;AACZ,aAAK,CAAC;AACN,eAAO,SAAQ,sBAAsB,aAAa,WAAW,UAAU;;AAE3E,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAqB,uBAAuB;AAC9F,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,kCAA8B,IAAG,CAAE;AC9D1C;;;;;;;;;;;;;;;;AAoEA;AACI,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,cAAO,WAAW,MAAM,GAAG,MAAM,gDAAgD;AACjF,iBAAW,iBAAgB,GAAG,KAAK;AACnC,cAAO,GAAG,QAAQ,GAAG,MAAM,4CAA4C,GAAG;AAC1E,oBAAc,GAAG;AACjB,qBAAe,GAAG,MAAM,MAAM;AAC9B,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,0DACY;;AAEzD,UAAI,CAAE,aAAY;AACd,cAAM,IAAI,MAAM,yBAAyB,6DACe;;AAE5D,UAAI,WAAW;AACX,mBAAW;;AAEf,UAAI,WAAW;AACX,mBAAW;;AAEf,gBAAU,SAAQ,MAAM,GAAG,GAAG,GAAG,UAAU,CAAC,IAAI;AAChD,gBAAU,MAAM,GAAG,GAAG,GAAG;AACzB,iBAAW,IAAI,GAAG;AAClB,qBAAe,WAAW,UAAU,IAAI,QAAO,CAAC,UAAU,WAAW,aAAa,IAAI,QAAO,CAAC,UAAU;AACxG,mBAAa,OAAM,CAAC,GAAG,IAAI,GAAG;AAC9B,aAAO,SAAQ,MAAM,QAAQ,SAAQ,IAAI,CAAC,IAAI,GAAG,KAC5C,IAAI,SAAO,MAAM,QAAQ,KAAK,SAAS;;AAEzC,qBAAiB,IAAG,CAAE;ACjG7B;;;;;;;;;;;;;;;;AAuDA;AACI;AACA,UAAI,MAAM,QAAQ;AACd,0BAAkB;AAClB,gBAAO,MAAM,QAAQ,GAAG,SAAS,GAAG,MAAM;AAE1C,oBAAY,GAAG,GAAG,MAAM;AACxB,qBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,kBAAO,GAAG,GAAG,MAAM,OAAO,KAAK,MAAM,iEAC7B,GAAG,GAAG,MAAM,UAAU;;;AAIlC,0BAAkB;AAClB,aAAK,OAAM,IAAI,GAAG,MAAM,IAAI,GAAG,IAAI,OAAK,QAAQ,GAAG,CAAC;;AAExD,cAAO,GAAG,UAAU,GAAG,GAAG,MAAM,IAAI,MAAM,oCAAoC,GAAG,yCACpD,GAAG,GAAG,MAAM;AACzC,iBAAW;AACX,mBAAa;AACb,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,WAAG,KAAK,QAAO,KAAK;AAChB,kBAAQ,KAAK;AACb,cAAI,IAAI;AACJ,yBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,2BAAa,KAAI,MAAI,KAAI,GAAG,IAAI,KAAK,GAAG;AACxC,kBAAI,IAAI,GAAG;;;AAGnB,iBAAO,IAAI,GAAG,KAAK,GAAG;;;AAG9B,UAAI;AACA,eAAO,MAAM,IAAI;;AAGjB,eAAO;;;AAGR,wBAAoB,IAAG,CAAE;AC9FhC;;;;;;;;;;;;;;;;AAgFA,mCAA+B;AAC3B,cAAO,EAAE,QAAQ,GAAG,MAAM,gEAAgE,EAAE;AAC5F,UAAI,EAAE,SAAS;AACX,eAAO,KAAK,GAAG;;AAOf,8BAAsB,EAAE,MAAM,MAAM,GAAG,EAAE,MAAM,SAAS,GACnD,OAAO,iBAAiB,QAAQ;AACrC,qBAAa,QAAQ,SAAQ,GAAG;UAC5B;UAAe,EAAE,MAAM,EAAE,MAAM,SAAS;UACxC,EAAE,MAAM,EAAE,MAAM,SAAS;YACzB;AACJ,qBAAa;AACb,qBAAa;AACb,aAAK,QAAQ;AACT,6BAAmB,KAAK,KAAK;AAC7B,eAAK,KAAK;AACV,eAAK,KAAK;;AAEd,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,kBAAU,SAAQ,MAAM,MAAM,IAAI,EAAE;AACpC,eAAO,CAAC,GAAG;;;AAGnB,oCAAgC;AAC5B,aAAO,QAAO,KAAK;AACf,gBAAO,EAAE,MAAM,WAAW,GAAG,MAAM,0CAA0C,EAAE,MAAM;AACrF,kBAAU,EAAE,MAAM;AAClB,kBAAU,EAAE,MAAM;AAClB,gBAAQ,IAAI;AACZ,gBAAQ,MAAM;AACd,sBAAc,SAAS,CAAC,CAAC,KAAK,CAAC,GAAG;AAClC,gBAAQ,MAAM;AACd,sBAAc,KAAK,IAAI,IAAI;AAC3B,qBAAa,GAAG,IAAI,OAAO,EAAE;AAGzB,wBAAc;AACd,wBAAc;AACd,wBAAc;AACd,WAAC,GAAG,GAAG,KAAK,QAAO,KAAK;AAEpB,2BAAe,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AACxC,0BAAc,KAAK;AACnB,wBAAY,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AAEjC,sBAAU,MAAM,QAAQ,KAAK,IAAI,SAAS,CAAC,CAAC,OAAO,SAAS,CAAC,CAAC;AAC9D,uBAAW,IAAI,KAAK,KAAI,GAAG;AAC3B,yBAAa,IAAI,QAAQ;AACzB,gBAAI,KAAK,MAAM,OAAO;AAClB,kBAAI,MAAM;;AAGV,kBAAI,QAAO;gBACP;gBACA,OAAM,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,MAAM,KAAK,GAAG,KAAK,MAAM;iBACpD;;AAEP,wBAAY,IAAI,IAAI,OAAO,GAAG,KAAK;AAEnC,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,IAAI,GAAG;AAC1C,8BAAkB,KAAI,KAAK;AAC3B,uBAAW,WAAU;AACrB,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;;AAG/C,gCAAkB,IAAI,UAAU,OAAO,WAAW,OAAO,IAAI;AAC7D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,+BAAmB,WAAU;AAC7B,6BAAiB,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,MAAM,KAAK;AACnD,gBAAI,MAAM;AACN,kBAAI,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;;AAG9C,gCAAkB,IAAI,UAAU,OAAO,OAAO,UAAU,IAAI;AAC5D,kBAAI,QAAO,CAAC,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,KAAK,YAAY;;AAEtD,mBAAO,CAAC,GAAG,GAAG;;AAElB,kBAAQ,CAAC,OAAO,OAAO;;AAE3B,YAAI,CAAC,gBAAgB,IAAI;AACrB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;AACzB,cAAI,OAAM,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE7B,eAAO,CAAC,GAAG;;;AAGZ,eAAW,IAAG,CAAE;AC9KvB;;;;;;;;;;;;;;;;AAiBA,IAAC;AACG,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,UAAU,KAAK;AACnC,gBAAU,UAAU,SAAS,KAAK;AAClC,gBAAU,UAAU,4BAA4B,KAAK;OACtD,SAAA,aAAc,UAAA,YAAY;ACA7B,iEAA2D,SAAA,UAAU;AACjE,sBAAgB,iBAAgB,SAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,2BAAsB,YAAY,OAAQ,UAAU,KAAI,SAAS;AACjE,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO;;AAEX,UAAI,eAAc,SAAA,UAAU;AACxB,eAAO,MAAI;;AAEf,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,KAAK;;AAGZ,kCAAwB,QAAQ,OAAO,SAAS;AAChD,yBAAe,IAAI,MAAI,eAAe,MAAI;AAC1C,iBAAO,kBAAkB,IAAI,IAAI,QAAQ,QAAO,oBAC5C;;;AAGZ,UAAI,eAAc,SAAA,UAAU;AACxB,YAAI,YAAY;AACZ,iBAAO,IAAI,MAAI,eAAe,QAAO,QAAQ;;AAG7C,qCAA2B,KAAI,UAAU,OAAK,QAAQ;AACtD,8BAAoB,MAAK,MAAI,SAAS,oBAAoB,QAAO,MAAM;AACvE,iBAAO,IAAI,MAAI,eAAe;;;AAGtC,YAAM,MAAM,sBAAsB;;AAE/B,gCAA4B,IAAG,CAAE;AC1DxC;;;;;;;;;;;;;;;;AAsCA,4EAAuE,SAAA,UAAU;AAC7E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,IAAI,IAAI,SAAS;AAChC,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,+BAA2B,IAAG,CAAE;ACxBvC,8EAAyE,SAAA,UAAU;AAC/E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,sBAAe,IAAI,KAAK,MAAI,KAAI,SAAS,eAAe,MAAM;AAC9D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,2BAAuB,IAAG,CAAE;ACbnC,mEAA8D,SAAA,UAAU;AACpE,oBAAc,iBAAgB,QAAQ,UAAU;AAChD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AAEnB,gBAAU,IAAI,KAAI,QAAO,IAAI,UAAU;AACvC,sBAAe,MAAK,IAAI,KAAK,KAAI,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;ACtC9B;;;;;;;;;;;;;;;;AA4CA,8DAA0D,gBAAiB,SAAA,UAAU;AACjF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,0BAAoB,QAAO;AAC3B,oBAAc,IAAI,IAAI,cAAc;AACpC,wBAAkB,QAAQ,OAAO;AACjC,qBAAe,IAAI,OAAO;AAC1B,sBAAe,MAAI,KAAI,QAAO,MAAM,OAAO,aAAa,KAAI,aAAa;AACzE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,sBAAkB,IAAG,CAAE;AC3D9B;;;;;;;;;;;;;;;;AA2CA,+DAA0D,mBAAkB,SAAA,UAAU;AAClF,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,kBAAY,QAAO;AACnB,4BAAsB,QAAO;AAC7B,kBAAW,IAAI,KAAI,SAAS,KAAI,MAAI,cAAc;AAClD,kBAAW,KAAI,IAAI,KAAK,UAAU,KAAI,MAAI,IAAI,KAAK,eAAe;AAClE,sBAAe,IAAI,KAAI;AACvB,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,oBAAgB,IAAG,CAAE;AC1D5B;;;;;;;;;;;;;;;;AAqCA,0EAAqE,SAAA,UAAU;AAC3E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,2BAAqB,iBAAgB,aAAa,eAAe;AACjE,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,QAAQ,OAAO,aAAa,OAAO;AACrD,sBAAe,kBAAkB,SAAS;AAC1C,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,6BAAyB,IAAG,CAAE;AChDrC;;;;;;;;;;;;;;;;AA8BA;AACI,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,yBAAkB,QAAQ,OAAO,QAAQ,OAAO;AAqBhD,wBAAkB,MAAK;AACvB,4BAAsB,KAAI,SAAS;AACnC,4BAAsB,MAAM,IAAI,IAAI,IAAI;AACxC,aAAO,MAAI,IAAI,WAAW,gBAAgB;;AAuB9C,sFAAkF,gBAAe,SAAA,UAAU;AACvG,8BAAwB,iBAAgB,kBAAkB,oBAAoB;AAC9E,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,kBAAkB,OAAO,QAAQ,OAAO;AAC1D,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,qBAAa,QAAO;AACpB,4BACI,MAAI,KAAI,mBAAmB,IAAI,KAAK,wBAAwB,KAAI,MAAM;;AAE9E,sBAAe,+BAA+B,mBAAmB;AACjE,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AClGxC;;;;;;;;;;;;;;;;AA0DA,kEAA8D;AAC1D,UAAI,QAAQ;AACR,cAAM,OAAO,OAAO;;AAExB,UAAI,QAAQ,OAAO,OAAO;AACtB,cAAM,MAAM,mGAC+B,OAAO,oBAC/B;;AAGvB,uBAAiB,WAAW;AAIxB,yBAAiB;AACjB,oBAAY,UAAU,SAAQ,CAAC,MAAM;AACrC,0BAAkB,IAAI,MAAK,SAAQ,YAAY;AAC/C,aAAK,CAAC,SAAQ;AACd,2BAAmB,IAAI,KAAI,WAAW;AACtC,sBAAc,MAAI,YAAY,CAAC;AAC/B,yBAAiB;AACb,wCAA4B;AAC5B,0BAAgB,sBAAqB,GAAG,OAAO,CAAC;AAChD,iBAAO;YACH,KAAI,SAAQ,IAAI,UAAU,IAAI,MAAK,SAAQ,YAAY,IAAI;YAC3D,KAAI,SAAQ,IAAI,UAAU,IAAI,IAAI,aAAY,MAAK,SAAQ;;;AAGnE,eAAO,CAAE,OAAO;;AAEpB,aAAO,SAAS,QAAQ;;AAqB5B,kFAA8E,gBAAe,SAAA,UAAU;AACnG,0BAAoB,iBAAgB,cAAc,gBAAgB;AAClE,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,qBAAe;AACf,UAAI,WAAW;AACX,mBAAW,iBAAgB,SAAS,WAAW;;AAEnD,yBAAkB,cAAc,OAAO,QAAQ,OAAO;AACtD,UAAI,iBAAiB;AACjB,qCAA6B,QAAO;AACpC,oBAAY,QAAO;AACnB,2BAAmB,QAAO,cAAc,MAAM;AAC9C,wBACI,MAAI,KAAI,eAAe,IAAI,KAAK,wBAAwB,IAAI,sBAAsB;;AAE1F,sBAAe,+BAA+B,eAAe;AAC7D,aAAO,oBAAoB,SAAQ,UAAU;;AAE1C,gCAA4B,IAAG,CAAE;AC/HxC;;;;;;;;;;;;;;;;AAoNK,qBAAY;MACb;MACA;MACA;MACA;;AAOC,mBAAU;MACX;MACA;MACA;MACA;;AAcC,mBAAS;MACV;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAMC,mBAAU;MACX;MACA;MACA;;AAYC,mBAAU;MACX;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AC1RJ;;;;;;;;;;;;;;;;4BAqB+B;MAe3B,yBAAyB;AACrB,eAAQ,OAAO,iBAAU,KAAK,iBAAiB,GAAG;AAClD,YAAI,WAAW;AACX,4BAAkB,QAAQ,IAAI,OAAM,EAAE,MAAM,EAAE,MAAM,QAAQ,OAAM,EAAE;AACpE,eAAK,eAAe;;AAGpB,eAAK,eAAe;;AAGxB,gBAAQ;AACR,YAAI;AACA,iBAAO;;AAGP,gBAAM;AACN,iBAAO;;;UAMX;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO,KAAK;;MAEhB;AACI,aAAK,cAAc,KAAK,aAAa;;MAezC;AACI,eAAO,cAAc,GAAG;;MAK5B;AACI,YAAI,KAAK,eAAe;AACpB,kBAAQ,KAAK;;;YAGf;AACF,YAAI,KAAK,eAAe;AACpB,eAAK,cAAc;;AAEvB,eAAO;UACH,MAAM;UAEN,QAAQ,QAAO,KAAK,aAAa;;;YAGnC;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM,4DACT,KAAK;;YASV;AACF,aAAK,cAAe,OAAM,aAAa,GAAG,OAAO,QAAQ;AACzD,eAAO,aAAa,MAAM;;;AAGlC,WAAO,eAAe,WAAW,OAAO,aAAa;MACjD,OAAO;AACH,eAAO,SAAS,YAAY,QAAQ,SAAS,oBAAoB,QAC7D,SAAS,kBAAkB;;;AC3HvC;;;;;;;;;;;;;;;;oCA2BuC;MACnC,0CAAyC;AACrC;AACA,aAAK,eAAe;AACpB,aAAK,MAAM;AACX,aAAK,UAAU;AACf,aAAK,mBAAmB;AACxB,aAAK,qBAAqB;AAC1B,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,iBAAiB,MAAM;AAC5B,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,oCAA0B,KAAK,mBAAmB,GAAG;AACrD,eAAK;AACD,uCAA2B,MAAI,KAAI,iBAAiB,KAAK,MAAM,KAAI,OAAO,WAAW,IAAI,KAAK;AAC9F,4BAAgB,KAAI,IAAI,KAAK,MAAI,mBAAmB,KAAK,WAAW,KAAK,MAAI,iBAAiB,KAAK,YAAY;AAC/G,yCAA6B,MAAI,KAAI,mBAAmB,KAAK,MAAM,KAAI,OAAO,UAAU,IAAI,KAAK;AACjG,4BAAgB,OAAO;AACvB,8BAAkB,OAAO;AACzB,6BAAiB,MAAI,KAAI,SAAS,CAAC,KAAK,eAAe;AACvD,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;AACzC,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,kBAAkB,GAAG,KAAK;AACrD,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,mBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,KAAO,KAAK;UACZ,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,QAAQ,QAAO;;;AAIrE,sBAAkB,YAAY;AAC9B,kBAAc;ACvHd;;;;;;;;;;;;;;;;mCA2BsC;MAClC,oDAAoD;AAChD;AACA,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,mBAAmB;;MAE5B;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,iBAAiB,MAAM;AAC5B,8BAAkB;AAClB,iBAAK,iBAAiB,KAAK;cACvB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,MAAK,MAAM,OAAO,KAAK,yBACvC,SAAS;;;AAGtB,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,kCAAwB,KAAK,iBAAiB,GAAG;AACjD,eAAK;AACD,uCAA2B,MAAI,iBAAiB,OAAO;AACvD,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,UAAU,KAAK,MAAI,oBAAoB,QAAO,QAAQ,cAAc,CAAC,KAAK,eAAe;AACtH,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,YAAI,KAAK,oBAAoB;AACzB,kBAAQ,KAAK,iBAAiB,IAAI,OAAK,EAAE;;;YAG3C;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,iBAAiB,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAE5G;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,mBAAmB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEvG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,yBAA2B,KAAK;;;aAIjC;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO;;;AAItD,qBAAiB,YAAY;AAC7B,kBAAc;AC3Fd;;;;;;;;;;;;;;;;gCA6BmC;MAC/B,mDAAkD;AAC9C;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,0BAA0B;AAC/B,aAAK;AAED,eAAK,WAAW,QAAO,OAAO;AAC9B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,mCAAyB,IAAI,GAAG,KAAK;AACrC,mBAAS,QAAQ;AACb,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,gBAAI,KAAK,wBAAwB,MAAM;AACnC,mBAAK,wBAAwB,KAAK;gBAC9B,cAAc,GAAG;gBACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,iCAAqB,KAAK,wBAAwB,GAAG;AACrD,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,oCAAwB,MAAI,KAAI,cAAc,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC1F,6CAAiC,IAAI,gBAAgB;AACrD,8CAAkC,IAAI,iBAAiB;AACvD,wBAAY,OAAO;AACnB,yBAAa,OAAO;AACpB,6BAAiB,MAAI,KAAI,IAAI,0BAA0B,MAAI,KAAK,4BAA4B,KAAK,WAAW,CAAC,KAAK,eAAe;AACjI,kBAAM,OAAO;;AAEjB,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;AAC7C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,SAAS;AACd,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,2BAA2B;AAChC,kBAAQ,KAAK,wBAAwB,IAAI,OAAK,EAAE;;;YAGlD;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,aAAK;AACD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;AACxD,eAAK,SAAS,OAAO,IAAI,KAAK,OAAO,KAAK,cAAc;;AAE5D,8BAAsB,aAAa,SAAS;AAC5C,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,0BACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;;MAGxC;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;;;aAIjB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO;;;AAIxF,kBAAc,YAAY;AAC1B,kBAAc;AC7Id;;;;;;;;;;;;;;;;kCA4BqC;MACjC,mDAAkD,cAAc;AAC5D;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,yBAAyB;AAC9B,aAAK,6BAA6B;AAClC,aAAK;AACD,eAAK,YAAY,QAAO,GAAG;AAC3B,eAAK,WAAW,QAAO,OAAO;;AAElC,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;;MAGtC;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,aAAK;AACD,mCAAyB,IAAI,GAAG,KAAK;AACrC,qBAAW,IAAI,CAAC,KAAK,cAAc,MAAI,KAAI,KAAK,WAAW,KAAK,QAAQ;AACxE,wBAAc,QAAQ;AAClB,0BAAc,QAAO,oBAAoB;AACzC,8BAAkB;AAClB,gBAAI,KAAK,uBAAuB,MAAM;AAClC,mBAAK,uBAAuB,KAAK;gBAC7B,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,gBAAI,KAAK,2BAA2B,MAAM;AACtC,mBAAK,2BAA2B,KAAK;gBACjC,cAAc,GAAG;gBACjB,UAAU,WAAU,OAAO,SAAS;;;AAG5C,6BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,gBAAI,YAAY;AACZ;;AAEJ,gCAAoB,KAAK,uBAAuB,GAAG;AACnD,oCAAwB,KAAK,2BAA2B,GAAG;AAC3D,mCAAuB,MAAI,KAAI,aAAa,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChF,wBAAY,KAAI,iBAAiB,KAAK;AACtC,wBAAY,IAAI;AAChB,uCAA2B,QAAQ,KAAK;AACxC,wBAAY,OAAO;AACnB,4BAAgB,OAAO;AACvB,6BAAiB,MAAI,KAAI,IAAI,IAAI,mBAAmB,IAAI,gBAAgB,MAAI,oBAAoB,KAAK,YAAY;AACjH,kBAAM,OAAO;;AAEjB,eAAK,UAAU,OAAO,MAAI,KAAK,WAAW;AAC1C,eAAK,SAAS,OAAO,KAAI,KAAK,UAAU,KAAK;;AAEjD,aAAK;;MAET;AACI,aAAK,SAAS;AACd,aAAK,UAAU;AACf,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,8BAA8B;AACnC,kBAAQ,KAAK,2BAA2B,IAAI,OAAK,EAAE;;;YAGrD;AACF,cAAM,IAAI,MAAM;;YAEd;AACF,cAAM,IAAI,MAAM;;MAEpB;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,OAAS,KAAK;UACd,SAAW,KAAK;UAChB,OAAS,KAAK;;;aAIf;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,UAAU,QAAO,YAAY,QAAO;;;AAI3G,oBAAgB,YAAY;AAC5B,kBAAc;AC1Hd;;;;;;;;;;;;;;;;+BAwBkC;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,gBAAgB;;MAEzB;AACI,yBAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,IAAI,OAAK,EAAE,QAC7B,OAAO,KAAK;AAChB,iBAAS,QAAQ;AACb,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wBAAc,QAAO,oBAAoB;AACzC,eAAK;AACD,6BAAiB,MAAI,KAAI,KAAK,GAAG,WAAW;AAC5C,kBAAM,OAAO;;;AAGrB,aAAK;;MAKT;AACI,aAAK,eAAe;AACpB,YAAI,KAAK,KAAK;AACV,eAAK,EAAE;;AAEX,aAAK,IAAI,KAAK,QAAO,CAAC;;MAE1B;AACI,aAAK,EAAE;;YAEL;AACF,eAAO,CAAC,MAAM,KAAK;;YAEjB;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,YAAI,aAAa,WAAW;AACxB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,CAAE,cAAgB,KAAK;;aAG3B;AACH,eAAO,IAAI,IAAI,QAAO;;;AAI9B,iBAAa,YAAY;AACzB,kBAAc;ACjFd;;;;;;;;;;;;;;;;oCAyBuC;MACnC,kDAAkD;AAC9C,cAAM;AACN,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,aAAK,IAAI,QAAO,KAAK;;MAEzB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,cAAI,KAAK,cAAc,MAAM;AACzB,8BAAkB;AAClB,iBAAK,cAAc,KAAK;cACpB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,+BAAqB,KAAK,cAAc,GAAG;AAC3C,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,eAAK;AACD;AACA,oCAAwB,MAAI,KAAI,KAAK,GAAG,eAAe;AACvD,gBAAI,KAAK;AACL,yBAAW,MAAI,KAAI,KAAK,GAAG,MAAI,UAAU,KAAI,iBAAiB,KAAK,MAAM;;AAGzE,yBAAW,MAAI,KAAI,KAAK,GAAG,kBAAkB;;AAEjD,yBAAa,OAAO;AACpB,kBAAM,OAAO;;;AAGrB,aAAK;;MAET;AACI,aAAK,EAAE;AACP,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK,cAAc,IAAI,OAAK,EAAE;;;MAQ9C;AACI,aAAK,WAAW;;YAEd;AAEF,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,KAAK,cAAc,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEzG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,0BAAkB;AAClB,aAAK,gBAAgB,aAAa,IAAI,OAAM,EAAE,cAAc,EAAE,MAAM,UAAU,EAAE,OAAO,SAAS;;MAEpG;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,UAAY,KAAK;UACjB,aAAe,KAAK;;;aAIrB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,aAAa,QAAO;;;AAI1E,sBAAkB,YAAY;AAC9B,kBAAc;AC1Gd;;;;;;;;;;;;;;;;mCA4BsC;MAClC,kCAAkC,gBAAgB,cAAe,iBAAiB;AAC9E;AACA,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,yBAAyB;AAC9B,aAAK,qBAAqB;AAC1B,aAAK,uBAAuB;AAC5B,aAAK,WAAW;AAChB,YAAI,YAAW;AACX,eAAK,UAAU,QAAO,QAAQ;;AAElC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;;MAGxB;AACI,8BAAsB,MAAM,QAAQ,qBAChC,kBAAkB,IAAI,UAAQ,KAAK,QACnC,OAAO,KAAK;AAChB,sBAAc,QAAQ;AAClB,wBAAc,QAAO,oBAAoB;AACzC,4BAAkB;AAClB,cAAI,KAAK,uBAAuB,MAAM;AAClC,iBAAK,uBAAuB,KAAK;cAC7B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,mBAAmB,MAAM;AAC9B,iBAAK,mBAAmB,KAAK;cACzB,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,cAAI,KAAK,qBAAqB,MAAM,QAAQ,KAAK;AAC7C,iBAAK,qBAAqB,KAAK;cAC3B,cAAc,GAAG;cACjB,UAAU,KAAK,MAAM,WAAU,OAAO,SAAS;;;AAGvD,2BAAiB,MAAM,QAAQ,qBAC3B,kBAAkB,GAAG,SACrB,kBAAkB;AACtB,cAAI,YAAY;AACZ;;AAEJ,wCAA8B,KAAK,uBAAuB,GAAG;AAC7D,qCAA2B,KAAK,mBAAmB,GAAG;AACtD,eAAK;AACD,6CAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,gBAAI,KAAK;AACL,0CAA4B,KAAK,qBAAqB,GAAG;AAEzD,6CAA+B,MAAI,KAAI,qBAAqB,KAAK,QAAQ,KAAI,UAAU,IAAI,KAAK;AAChG,uCAAyB,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,IAAI,0BAA0B,MAAI,OAAO,yBAAyB,KAAK;AAC3I,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW;AAC1E,oCAAsB,OAAO;AAC7B,kCAAoB,OAAO;AAC3B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;AAIb,gDAAiC,MAAI,KAAI,uBAAuB,KAAK,QAAQ,KAAI,OAAO,WAAW,IAAI,KAAK;AAC5G,4CAA8B,MAAI,KAAI,oBAAoB,KAAK,WAAW,IAAI,KAAI,UAAU,KAAK,eAAe,KAAK,MAAI,2BAA0B,KAAK;AACxJ,oCAAsB,OAAO;AAC7B,iCAAmB,OAAO;AAC1B,+BAAiB,IAAI,OAAO;AAC5B,oBAAM,OAAO;;;;AAIzB,aAAK;;MAET;AACI,YAAI,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK,uBAAuB,IAAI,OAAK,EAAE;;AAEnD,YAAI,KAAK,wBAAwB,QAAQ,KAAK;AAC1C,kBAAQ,KAAK,qBAAqB,IAAI,OAAK,EAAE;;AAEjD,YAAI,KAAK,sBAAsB;AAC3B,kBAAQ,KAAK,mBAAmB,IAAI,OAAK,EAAE;;;YAG7C;AAEF,0BAAkB,CAAC,GAAG,KAAK,wBAAwB,GAAG,KAAK;AAC3D,YAAI,KAAK;AACL,oBAAU,KAAK,GAAG,KAAK;;AAE3B,eAAO,CAAC,MAAM,KAAK,kBAAkB,OAAO,UAAU,IAAI,OAAM,EAAE,MAAM,EAAE,cAAc,QAAQ,EAAE;;YAEhG;AACF,uBAAe,MAAM,KAAK,kBAAkB;AAC5C,8BAAsB,KAAK,WAAW,aAAa,SAAS,IAAI,aAAa,SAAS;AACtF,0BAAkB;AAClB,aAAK,yBACD,aAAa,MAAM,GAAG,eAAe,IAAI,OAAM;UAC3C,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,aAAK,qBACD,aAAa,MAAM,eAAe,gBAAgB,GAC7C,IAAI,OAAM;UACX,cAAc,EAAE;UAChB,UAAU,EAAE,OAAO,SAAS;;AAEpC,YAAI,KAAK;AACL,eAAK,uBACD,aAAa,MAAM,gBAAgB,GAAG,gBAAgB,GACjD,IAAI,OAAM;YACX,cAAc,EAAE;YAChB,UAAU,EAAE,OAAO,SAAS;;;;MAI5C;AACI,eAAO;UACH,cAAgB,KAAK;UACrB,OAAS,KAAK;UACd,UAAY,KAAK;UACjB,SAAW,KAAK;UAChB,UAAY,KAAK;;;aAIlB;AACH,eAAO,IAAI,IAAI,QAAO,iBAAiB,QAAO,UAAU,QAAO,aAAa,QAAO,YAAY,QAAO;;;AAI9G,qBAAiB,YAAY;AAC7B,kBAAc;ACrKd;;;;;;;;;;;;;;;;;aA6DW;AACH,eAAO,IAAI,aAAa;;aAiBrB,+CAA+C;AAClD,eAAO,IAAI,kBAAkB,cAAc,UAAU;;aAsBlD,8BAA8B,gBAAe,cAAe,iBAAiB;AAChF,eAAO,IAAI,iBAAiB,cAAc,OAAO,UAAU,UAAS;;aAcjE,oBAAoB,cAAe,aAAa,kBAAiB;AACpE,eAAO,IAAI,cAAc,cAAc,OAAO,OAAO;;aAclD,wBAAwB,YAAY,iBAAe;AACtD,eAAO,IAAI,kBAAkB,cAAc,KAAK;;aAe7C,sBAAsB,cAAe,aAAa,kBAAiB,cAAc;AACpF,eAAO,IAAI,gBAAgB,cAAc,OAAO,OAAO,UAAS;;aAkB7D,gDAAgD;AACnD,eAAO,IAAI,iBAAiB,cAAc;;;ACxKlD;;;;;;;;;;;;;;;;AA0BA;MAAC;MAAmB;MAAc;MAAmB;MACjD;MAAkB;MAAiB;;AAC3B,kBAAS;MACjB,KAAK,sBAAsB;MAC3B,UAAU,sBAAsB;MAChC,UAAU,sBAAsB;MAChC,SAAS,sBAAsB;MAC/B,SAAS,sBAAsB;MAC/B,QAAQ,sBAAsB;MAC9B,MAAM,sBAAsB;;ACnChC;;;;;;;;;;;;;;;;AAgBA,0BAAuB;AACnB,UAAI,OAAO,0BAA0B;AACjC,eAAO;iBAEF,OAAO,iBAAiB;AAC7B,eAAO;;AAEX,aAAO,OAAO;;AAYlB;AACI,aAAO,IAAI,QAAQ,aAAW,cAAc,MAAM;;ACpCtD;;;;;;;;;;;;;;;;AAiBO;AACH,sBAAgB,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC3E,sBAAgB,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAC5E,aAAO,CAAC,SAAS;;ACpBrB;;;;;;;;;;;;;;;;AAwBO,wEAAkE;AACrE,qBAAe;AACf,UAAI;AACA,mBAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,iBAAS,KAAK,WAAW,KAAK;AAC9B,mBAAW,SAAS,OAAO,WAAW,MAAM;;AAG5C,mBAAW,SAAS,OAAO,WAAW;AACtC,8BAAsB,WAAW;AACjC,qBAAa,GAAG,IAAI,eAAe,EAAE;AACjC,qBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAEvE,mBAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAEhE,aAAO;;AAWJ,uEAAkE;AACrE,uBAAiB;AACjB,UAAI;AACA,iBAAS,KAAK;AACd,qBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACjD,cAAI,KAAK,IAAI;AACT,qBAAS,KAAK;AACd,qBAAS,KAAK,IAAK,kBAAiB;;AAGpC,qBAAS,KAAK;;;;AAKtB,oCAA4B;AAC5B,mCAA2B;AAC3B,qBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,cAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AACzC,+BAAmB,KAAK;;AAGxB,gCAAoB,KAAK;;;AAGjC,iBAAS,KAAK,GAAG;AACjB,iBAAS,KAAK;AACd,iBAAS,KAAK,GAAG;;AAErB,aAAO;;AAWJ,gFAA0E;AAC7E,+BAAyB;AACzB,UAAI;AACA,yBAAiB,KAAK,WAAW,KAAK;;AAGtC,yBAAiB,KAAK,WAAW,KAAK;;AAE1C,mBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,YAAI,KAAK,WAAW;AAChB,cAAI;AACA,6BAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAGrD,6BAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAIzD,2BAAiB,KAAK,WAAW;;;AAGzC,aAAO;;AAMJ;AACH,+BAAyB,CAAC;AAC1B,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,yBAAiB,KAAK,MAAM,GAAG;;AAEnC,aAAO;;AAaJ;AACH,wBAAkB,eAAe,MAAM,GAAG;AAC1C,mBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,kBAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAElE,aAAO;;AC7IX;;;;;;;;;;;;;;;;AAgBO,6BAAwB;AACxB,wBAAmB;ACjB1B;;;;;;;;;;;;;;;;AAgBO,mBAAc;AACd,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;AACf,oBAAe;ACrBtB;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,KAAK,GAAG;;;AAGjB;AACH,UAAI,CAAC,OAAM,QAAQ;AACf,gBAAQ,IAAI,GAAG;;;ACxBvB;;;;;;;;;;;;;;;;AA8BO;AACH,UAAI,MAAK,WAAW,MAAK;AACrB,cAAM,IAAI,MAAM,gEACT,MAAK,iBAAiB,MAAK;;AAEtC,qBAAe,IAAI,aAAa,MAAK,SAAS;AAC9C,mBAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACpC,eAAO,KAAK,MAAK,IAAI;AACrB,eAAO,IAAI,KAAK,MAAK,IAAI;;AAE7B,aAAO;;AAgBJ;AACH,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,oBAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,IAAI,KAAK,SAAQ;AACtB,cAAK,IAAI,KAAK,SAAQ,IAAI;;AAE9B,aAAO,CAAE,aAAM;;AAMZ;AACH,kBAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,aAAM;;AAMZ;AACH,kBAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,oBAAa,IAAI,aAAa;AAC9B,oBAAa,IAAI,aAAa;AAC9B,mBAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACrC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,cAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAE1C,aAAO,CAAE,aAAM;;AAOZ;AACH,oBAAa,SAAQ,SAAQ;AAC7B,oBAAa,SAAQ,SAAQ,IAAI;AACjC,aAAO,CAAE,aAAM;;AAQZ;AACH,YAAK,SAAQ,KAAK;AAClB,YAAK,SAAQ,IAAI,KAAK;;AAKnB;AACH,oBAAa,IAAI,aAAa,IAAI;AAClC,oBAAa,IAAI,aAAa,IAAI;AAClC,mBAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AAClC,kBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,cAAK,KAAK,KAAK,IAAI;AACnB,cAAK,KAAK,KAAK,IAAI;;AAEvB,aAAO,CAAE,aAAM;;AAKZ;AACH,gBAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,oBAAa,KAAK,IAAI;AACtB,oBAAa,KAAK,IAAI;AACtB,aAAO,CAAE,aAAM;;ACrInB;;;;;;;;;;;;;;;;AA4CO;AACH,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,EAAE;;AAEb,4BAAoB,OAAM,EAAE;AAC5B,uBAAe,MAAK,GAAG;AACvB,uBAAe,SAAQ,QAAQ,QAAQ;AACvC,oBAAY;AACZ,eAAO;AACP,eAAO;;AAEX,UAAI,CAAC,iBAAgB,EAAE,OAAO;AAG1B,eAAO,QAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAE1D,UAAI,EAAE,UAAU;AACZ,sBAAa,SAAQ,KAAK;AAC1B,uBAAe,MAAK,OAAM;AAC1B,cAAK;AACL,eAAO;;AAEX,UAAI,UAAU;AACV,eAAO,SAAQ,IAAI;iBAEd,UAAU;AACf,qBAAa,QAAO,GAAG,EAAE;AACzB,uBAAe,SAAQ,SAAS,GAAG;AACnC,aAAK;AACL,eAAO;;AAGP,cAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAGhE;AACH,aAAO,QAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAEnD;AACH,oBAAc,QAAO,SAAU,OAAM;AACrC,qBAAe,qBAAoB,KAAK;AACxC,aAAO,KAAK;AACZ,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,eAAO,KAAK,OAAO,IAAI,KAAK;;AAEhC,aAAO,UAAS,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1F5B;;;;;;;;;;;;;;;;AAqBO;AACH,oBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,mBAAa,EAAE,MAAM;AACrB,aAAO,WAAW,IAAI;AAClB,0BAAkB,CAAC,GAAG;AACtB,kBAAU,QAAQ;AAClB,uBAAe,OAAM,GAAG,OAAO;AAC/B,cAAM,SAAS;AACf,eAAO;;;AC7Bf;;;;;;;;;;;;;;;;AAqBO;AACH,uBAAiB,IAAI,MAAM,KAAK;AAChC,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,KAAK,MAAM,KAAK,KAAK;;AAEvC,qBAAe,QAAO,UAAU,KAAK;AACrC,mBAAa,GAAG,IAAI,OAAO,OAAO,QAAQ,EAAE;AACxC,uBAAe,OAAO,WAAW;AACjC,4BAAoB,IAAI,MAAM,KAAK;AACnC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK,MAAM;;AAE5C,8BAAsB,KAAK,WAAW;AACtC,eAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,aAAO,OAAO;;ACpClB;;;;;;;;;;;;;;;;AAmBO;AAEH,sBAAgB,OAAO,OAAO,SAAS;AACvC,4BAAsB,CAAC,EAAE,SAAS,SAAS;AAC3C,0BAAoB,wBAAuB,QAAQ,QAAQ;AAC3D,6BAAuB,wBAAuB,SAAS,QAAQ;AAC/D,mBAAa,GAAG,IAAI,OAAO;AACvB,uBAAe,IAAI;AACnB,qBAAa,EAAE,SAAS,QAAQ,SAAS;AACzC,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAU,KAAK,CAAE,OAAO,KAAK,IAAI,OAAO;;AAE5C,kBAAU,KAAK,WAAU,GAAE,QAAQ,EAAE;AACrC,0BAAkB,IAAI;AACtB,yBAAiB,YAAY,SAAS,WAAW,YAAY;AAC7D,4BAAoB,eAAe,SAAS,WAAW,YAAY;AACnE,qBAAa,GAAG,IAAI,GAAG;AACnB,mBAAS,KAAK,UAAU,GAAG;AAC3B,sBAAY,KAAK,UAAU,GAAG;;;AAKtC,0BAAoB,OAAO;AAC3B,kBAAY,YAAY,SAAS,KAAK;AACtC,aAAO;QACH,QAAO,aAAa,aAAa;QACjC,QAAO,gBAAgB,aAAa;;;AC/C5C;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,GAAG,YAAY;;;ACzB3D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,OAAO,MAAK,GAAG;AACzB,sBAAU,KAAK,IAAI,QAAO,IAAI;AAC9B,mBAAO,IAAI,IAAI,IAAI;;;;;ACjCnC;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,IAAI,OAAO,MAAK,GAAG,aAAa;AAC/C,mBAAO,IAAI,IAAI;;;;;AC9B/B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC1C7B;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,qBAAa;AACb,cAAM,QAAQ;AACV,eAAK,KAAK,MAAM,GAAG;;AAEvB,eAAO;;;ACzBf;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAuBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAK,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC5BrE;;;;;;;;;;;;;;;;AAuBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,sBAAU,KAAK,MAAI,QAAO,IAAI,OAAO,MAAK,GAAG;AAC7C,mBAAO,IAAI,IAAI;;;;;AC/B/B;;;;;;;;;;;;;;;;AAyBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,KAAI,IAAI,IAAI,GAAG;AACzB,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU,MAAI,OAAO,IAAI,OAAO;AAChC,oBAAU,IAAI,KAAI,IAAI,IAAI,GAAG;AAC7B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACjD7B;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,OAAO,MAAK,GAAG,aAAa;;;AC1BlE;;;;;;;;;;;;;;;;AAsBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,QAAO,IAAI,OAAO,MAAK,GAAG;;;AC3BhE;;;;;;;;;;;;;;;;AAoDA,6EAAwE,CAAC,GAAG,GAAG;AAC3E,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,iBAAW;AACX,oBAAc;AACd,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;;AAG3E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS;;AAEpD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS,WAAW,WAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;ACvFtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC1BpF;;;;;;;;;;;;;;;;AAuCA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAAoC,IAAI;AAC7G,oBAAc;AACd,iBAAW;AACX,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,kBACI,SAAQ,QAAQ,CAAC,GAAG,OAAO,MAAM,IAAI,OAAO,MAAM,IAAI,OAAO,MAAM;AACvE,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAElE,cAAY,KAAK,SAAS,GAAG,MAAM,4DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,+DAC/B,QAAQ;AACf,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,GAAmB;AACpG,eAAO,SAAQ,gBAAgB,MAAM,SAAS;;AAElD,qBAAe,CAAE,IAAI,MAAM,OAAO;AAClC,oBAAc,CAAE,YAAY,SAAS;AACrC,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;AACzE,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE/D,aAAO;;AAEJ,4BAAwB,IAAG,CAAE;ACpEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,YAAY,SAAS;;;;ACzBjE;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,cAAe;AACnC,YAAI,CAAC,cAAc,CAAC;AAChB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;;mBAG5B,CAAC,cAAc;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,IAAI,GAAG,OAAO;YAC9B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;mBAG5B,cAAc,CAAC;AACpB,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;YAC9B,GAAG,MAAM,OAAO,GAAG,IAAI,OAAO;;;AAIlC,iBAAO;YACH,GAAG,MAAM,OAAO,GAAG,IAAI,MAAM;YAC7B,GAAG,MAAM,OAAO,IAAI,GAAG,MAAM;;;;;AC7C7C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,SAAU;AAC9B,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,kCAA8B;MACjC,YAAY;MACZ,UAAU;AACN,iCAAyB;AACzB,2BAAmB,iBAAiB;AACpC,4BAAoB,iBAAiB;AACrC,qBAAa,MAAM,KAAK;AACxB,qBAAa,WAAW,SAAS,GAAG,KAAK,GAAG;AACxC,cAAI,WAAW,OAAO,YAAY;AAC9B,iBAAK,KAAK;qBAEL,WAAW,OAAO;AACvB,kBAAM,IAAI,MAAM,mBAAmB,uCAAuC;;;AAGlF,qBAAa;AACb,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,cAAI,KAAK,KAAK;AACV,iBAAK,KAAK;;;AAGlB,eAAO,CAAE,GAAG,MAAM,MAAI,IAAI,MAAM;;;ACvCxC;;;;;;;;;;;;;;;;AAiBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,GAAG;;;ACpB7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAEN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACtBpC;;;;;;;;;;;;;;;;AAsBO,kCAA8B;MACjC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,cAAc,gBAAiB;AACvC,eAAO;UACH,GAAG,MAAM,MAAM,WAAW,aAAa,GAAG,eAAe,UAAU,GAAG,gBAAgB,IAAI,WAAU;;;;AC7BhH;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,eAAe;MACf,UAAU;AACN,uBAAe,MAAM,IAAI,OAAK,EAAE;AAChC,eAAQ,QAAS;AACjB,sBAAc,gBAAe,MAAM,MAAM,GAAG,OAAO;AACnD,2BAAmB,OAAO,IAAI,OAAK,EAAE;AACrC,2BAAmB,OAAM,IAAI,YAAY;AACzC,eAAO,WAAW,IAAI,OAAK,MAAM;;;AC5BzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,+BAAuB;AACvB,eAAQ,WAAW,SAAS,WAAK,cAAe;AAChD,gBAAY,mBAA4B,YAAY,MAAM,iHACA;AAC1D,eAAO;UACH,GAAG,MAAM,qBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS,MAAK;UACnE,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS,MAAK;;;;AC/BrF;;;;;;;;;;;;;;;;AAmBO,0CAAsC;MACzC,YAAY;MACZ,cAAc,CAAC,MAAM;MACrB,UAAU;AACN,6BAAqB;AACrB,eAAQ,SAAS,WAAK,YAAY,mBAAoB;AACtD,eAAO;UACH,IAAI,MAAM,QAAO,KAAK,QAAQ,SAAS,MAAK,YAAY,GAAmB;UAC3E,QAAQ,MAAM,qBAAqB,KAAK,IAAI,OAAO,OAAO,SAAS,MAAK,YAAY;;;;AC3BhG;;;;;;;;;;;;;;;;AAsCA;AACI,gBAAU;AACV,UAAI,EAAE,SAAS;AACX,cAAM,SAAQ,GAAG,CAAC,GAAG,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;AAErE,iBAAW;AACX,UAAI,KAAK,SAAS;AACd,eAAO,SAAQ,IAAI,CAAC,GAAG,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM,IAAI,GAAG,MAAM;;AAE3E,cAAY,IAAI,SAAS,GAAG,MAAM,iEAC3B,IAAI;AACX,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,YAAY,WAAW,GAAG,MAAM,mEACrC;AACP,cAAY,IAAI,MAAM,OAAO,YAAY,IAAI,MAAM,4CAA4C,IAAI,MAAM,yCACrE,YAAY;AAChD,cAAY,KAAK,MAAM,OAAO,YAAY,IAAI,MAAM,0CAA0C,KAAK,MAAM,2CACnE,YAAY;AAClD,sBAAgB;AACZ,0BAAkB;AAClB,yBAAiB,mBAA4B,IAAI,OAAO,aAAa,SAAS,WAAW;AACzF,eAAO,SAAQ,gBAAgB,KAAK,MAAM;;AAE9C,qBAAe,CAAE,GAAG,KAAK,IAAI;AAC7B,oBAAc,CAAE,SAAS,WAAK;AAC9B,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,wBAAwB;;AAExE,iCAA6B,IAAG,CAAE;AClEzC;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,aAAQ;AACpC,gBAAY,mBAAkB,YAAY,MAAM,iHACM;AACtD,+BAAuB;AACvB,eAAO;UACH,GAAG,MAAM,oBAAoB,IAAI,OAAO,IAAI,SAAS,SAAS;UAC9D,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ,OAAO,SAAS;;;;AC/BhF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI,MAAK,GAAG,cAAc;;;AC1B5D;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAoBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,MAAM,WAAW,qBAAY;AACrC,eAAO;UACH,GAAG;AACC,gCAAoB,oBAAmB,CAAC,OAAO,EAAE;AACjD,sBAAU,QAAO,IAAI,MAAM,WAAW,CAAC;AACvC,gBAAI,eAAe;AACf,oBAAM,WAAU,KAAK;;AAEzB,mBAAO;;;;;ACjCvB;;;;;;;;;;;;;;;;AAqBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,eAAQ,WAAW,SAAS,WAAK,mBAAoB;AACrD,2BAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAChD,gBAAY,mBAA4B,aAAa,MAAM,mHAEnD;AACR,4BAAoB;AACpB,gBAAY,EAAE,SAAS,GAAG,MAAM,kFACJ,EAAE;AAC9B,gBAAY,OAAO,SAAS,GAAG,MAAM,mFACT,OAAO;AACnC,gBAAY,EAAE,MAAM,OAAO,OAAO,MAAM,IAAI,MAAM,mEACjC,EAAE,MAAM,qDACR,OAAO,MAAM;AAC9B,gBAAY,gCAAyC,SAAS,aAAa,MAAM,6FACxC,0BACjC;AACR,YAAI,mBAAmB;AACnB,kBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,eAAO;UACH,GAAG,MAAM,mCAAmC,EAAE,OAAO,IAAI,QAAQ,SAAS,MAAK,WAAW;UAC1F,QAAQ,MAAM,oCAAoC,GAAG,IAAI,OAAO,OAAO,SAAS,MAAK,WAAW;;;;AC/C5G;;;;;;;;;;;;;;;;AAkBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,4BAAoB;AACpB,4BAAoB,CAAE,GAAG,QAAQ;AACjC,6BAAqB,CAAE,GAAG,QAAQ;AAClC,eAAO;UACH,GAAG,MAAM,QAAO,UAAU,yBAAyB,aAAa;UAChE,QAAQ,MAAM,QAAO,UAAU,0BAA0B,cAAc;;;;AC3BnF;;;;;;;;;;;;;;;;AAyBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,mCAA2B;AACvB,iBAAO,SAAQ,OAAO,IAAI;;AAE9B,uBAAe,CAAE,IAAI;AACrB,eAAO;UACH,GAAG,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAiB;;;;AC5BvF;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,kBAAU,KAAI,IAAI,IAAI,OAAO,MAAM,IAAI,KAAK,KAAK,KAAK;AACtD,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;AC3BlC;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACvBlC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAyBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,IAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,oBAAU,KAAI,IAAI,MAAK,GAAG;AAC1B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,kBAAM,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE1C,sBAAY,OAAO;AACnB,iBAAO,IAAI,IAAI,KAAK,MAAK,KAAK;;AAElC,eAAO,CAAE,GAAG,MAAM,GAAG;;;AChD7B;;;;;;;;;;;;;;;;AA0BO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC,KAAK,QAAQ,YAAY;MACxC,UAAU;AACN,eAAQ,mBAAoB;AAC5B,8CAAmC;AACnC,2BAAmB,UAAS,OAAO,QAAO,KAAK;AAC/C,8BAAsB,kBAAiB,MAAK,OAAO,EAAE;AACrD,0BAAkB;AAClB,YAAI,MAAK,SAAS;AACd,uBAAa,GAAG,IAAI,EAAE,MAAM,SAAS,GAAG,EAAE;AACtC,sBAAU,KAAK,EAAE,MAAM;;AAE3B,oBAAU,KAAK;;AAEnB,2BAAmB,IAAI,GAAG;AAC1B,kCAA0B,KAAI,IAAI;AAClC,oCAA4B,MAAM,MAAI,WAAU,QAAO;AACvD,+BAAuB,KAAI,KAAI,KAAI,qBAAqB,sBAAsB,sBAAsB,QAAO;AAC3G,qBAAa;AACT,cAAI,MAAK,SAAS;AACd,mBAAO,SAAQ,KAAI,KAAI,IAAI,MAAK,SAAQ,qBAAqB,CAAC,GAAG,GAAG,GAAG,MAAK,MAAM,MAAM,aAAa,aAAa,EAAE;;AAGpH,mBAAO,SAAQ,KAAI,KAAI,IAAI,sBAAsB,aAAa,EAAE;;;AAGxE,wBAAgB;AACZ,wBAAc,KAAI,KAAI,qBAAqB,QAAO,MAAM;AACxD,cAAI,MAAK,SAAS;AACd,sBAAU,MAAI,SAAS;;AAE3B,iBAAO,SAAQ,SAAS,MAAK;;AAEjC,4BAAoB;AAChB,4BAAkB,KAAI,KAAI,gBAAgB,aAAa;AACvD,cAAI,MAAK,SAAS;AACd,0BAAc,MAAI,aAAa;;AAEnC,iBAAO,SAAQ,aAAa,MAAK;;AAErC,yBAAiB;AACb,wCAA8B,KAAI,YAAY;AAC9C,yBAAe,KAAI,IAAI;AACvB,cAAI,MAAK,SAAS;AACd,uBAAW,MAAI,UAAU;;AAE7B,iBAAO,SAAQ,UAAU,MAAK;;AAElC,0BAAkB;AACd,0BAAgB;AAChB,cAAI,MAAK,SAAS;AACd,wBAAY,MAAI,WAAW;;AAE/B,iBAAO,SAAQ,WAAW,MAAK;;AAEnC,eAAO;UACH,GAAG;UACH,MAAM;UACN,UAAU;UACV,OAAO;UACP,QAAQ;;;;ACvFpB;;;;;;;;;;;;;;;;AAsBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,6BAAqB;AACrB,eAAQ,QAAS;AACjB,2BAAmB,gBAAe,MAAM,EAAE,OAAO;AACjD,qBAAa;AACT,8BAAoB,EAAE;AACtB,8BAAoB,QAAQ;AAC5B,6BAAmB,YAAY,MAAM,GAAG;AACxC,4BAAkB,WAAW;AAC7B,6BAAmB,YAAY,MAAM,MAAM,YAAY,QAAQ,MAAM;AACrE,4BAAkB,WAAW;AAC7B,mCAAyB,WAAW,GAAG;AACvC,mCAAyB,WAAW,YAAY,GAAG,YAAY,IAAI;AACnE,8BAAoB,YAAY,CAAC,YAAY,CAAC,cAAc;AAC5D,yBAAe,SAAQ,IAAI;AAC3B,kCAAwB,SAAQ,SAAS,CAAC;AAC1C,gCAAsB,YAAY,CAAC,CAAC,YAAY,kBAAkB;AAClE,kCAAwB,WAAU,QAAQ;AAC1C,2BAAiB,mBAAmB,iBAAiB,iBAAiB,EAAE,MAAM;AAC9E,sCAA4B,wBAAuB;AACnD,uBAAa,WAAU,YAAY;AACnC,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,SAAS,MAAM;;;AAGzC;AACI,qBAAe;AACf,mBAAa,OAAO,IAAI,MAAM,EAAE;AAC5B,eAAO,KAAK;;AAEhB,aAAO;;AAEX;AACI,qBAAe;AACf,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,iBAAO,KAAK,OAAO,GAAG;;;AAG9B,aAAO;;ACjEX;;;;;;;;;;;;;;;;AAkBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI,GAAG,MAAM,WAAU;;;ACvB3D;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,MAAK,IAAI;;;ACrBnC;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAI,GAAG;;;ACxBzC;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,MAAK,GAAG;;;ACxB1C;;;;;;;;;;;;;;;;AAqBO,iCAA6B;MAChC,YAAY;MACZ,cAAc;MACd,eAAe,CAAC;MAChB,UAAU;AACN,wBAAgB;AAChB,eAAQ,QAAS;AACjB,eAAO;UACH,QAAQ;AACJ,6BAAiB;AACjB,6BAAgB,IAAI;AACpB,mBAAO,IAAI,IAAI,KAAI,MAAI,IAAI,MAAM,WAAW;;;;;AChC5D;;;;;;;;;;;;;;;;AAmBA,yEAAqE,UAAU,WAAW,UAAU;AAChG,sBAAgB,cAAW,SAAQ,QAAQ,IAAI,GAAG,GAAG,aAAa,MAAM,OAAO;AAC/E,qBAAe,CAAE,GAAG,GAAG;AACvB,oBAAc,CAAE,aAAa,MAAM,OAAO;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAiB,aAAa;;AAExE,+CAA2C,IAAG,CAAE;ACzBvD;;;;;;;;;;;;;;;;AAkBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,aAAa,MAAM,OAAO,QAAS;AAC3C,eAAO;UACH,GAAG,MAAM,mCAAmC,GAAG,GAAG,IAAI,aAAa,MAAM,OAAO;;;;AC1B5F;;;;;;;;;;;;;;;;AAwBO;AACH,UAAI,EAAE,OAAO,MAAM;AACf,YAAI,SAAQ,GAAG,sBAA+B,EAAE,OAAO;;AAE3D,UAAI,GAAG,OAAO,MAAM;AAChB,aAAK,SAAQ,IAAI,sBAA+B,GAAG,OAAO;;AAE9D,aAAO;QACH,GAAG;AACC,qBAAW,KAAI,IAAI,MAAK,MAAM,OAAO,IAAI,GAAG;AAC5C,iBAAO;;;;AClCnB;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,oBAAqB;AAC7B,kBAAU,MAAM;AAChB,kBAAU,MAAM;AAChB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AChC/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,aAAa,GAAG,IAAI;AACpD,qBAAa,MAAM,KAAI,IAAI,MAAK,KAAK,GAAG,IAAI;AAC5C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAsDA,qFAAgF,CAAC,GAAG,GAAG;AACnF,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,iBAAW;AACX,oBAAc;AACd,qBAAe;AACf,yBAAmB;AACnB,UAAI,OAAO,SAAS;AAChB,uBAAe;AACf,eAAO,SAAQ,KAAK,CAAC,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;AAC5E,kBAAU,SAAQ,QAAQ;UACtB;UAAG,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;UAAI,OAAO,MAAM;;AAEvE,mBAAW,SAAQ,SAAS;UACxB;UAAG,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;UAAI,QAAQ,MAAM;;;AAG/E,cAAY,KAAK,SAAS,GAAG,MAAM,8DAC5B,KAAK;AACZ,cAAY,QAAQ,SAAS,GAAG,MAAM,iEAC/B,QAAQ;AACf,cAAY,SAAS,SAAS,GAAG,MAAM,kEAChC,SAAS;AAChB,cAAY,gCAAyC,SAAS,YAAY,MAAM,kFAClD,0BAA0B;AACxD,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,kFACL,+BAA+B;;AAEjE,sBAAgB;AACZ,yBAAiB,mBAA4B,QAAQ,OAAO,YAAY,SAAS,WAAW,MAAK;AACjG,eAAO,SAAQ,kBAAkB,MAAM,SAAS,UAAU;;AAE9D,qBAAe,CAAE,IAAI,MAAM,OAAO,SAAS,QAAQ;AACnD,oBAAc,CAAE,YAAY,SAAS,WAAW,WAAK;AACrD,kBAAY,QAAO,cAAc,SAAS,QAAQ,MAAiB,mBAAmB;AACtF,UAAI;AACA,eAAO,SAAQ,KAAK,CAAC,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM,IAAI,IAAI,MAAM;;AAE7E,aAAO;;AAEJ,8BAA0B,IAAG,CAAE;AChGtC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,WAAW,WAAK,mBAAoB;AACjE,2BAAmB,aAAa,OAAO,CAAC,GAAG,GAAG,KAAK;AACnD,eAAO;UACH,GAAG,MAAM,kBAAkB,IAAI,GAAG,GAAG,YAAY,SAAS,YAAY,MAAK;;;;AC3BvF;;;;;;;;;;;;;;;;AA2CA;AACI,kBAAY,iBAAgB,IAAI,MAAM;AACtC,qBAAe,iBAAgB,QAAO,SAAS;AAC/C,sBAAgB,iBAAgB,QAAQ,UAAU;AAClD,cAAY,OAAO,SAAS,IAAI,MAAM,MAAM,kBAAkB,OAAO,oCAC7D,IAAI;AACZ,cAAY,IAAI,SAAS,GAAG,MAAM,4DAC3B,IAAI;AACX,cAAY,OAAO,SAAS,GAAG,MAAM,+DAC9B,OAAO;AACd,UAAI,mBAAmB;AACnB,gBAAY,OAAW,OAAM,MAAM,gFACZ,+BAA+B;;AAE1D,sBAAgB;AACZ,yBAAiB,mBAA4B,OAAO,OAAO,YAAY,SAAS,GAAmB,MAAK;AACxG,eAAO,SAAQ,gBAAgB,KAAK,QAAQ,SAAS;;AAEzD,qBAAe,CAAE,IAAI,KAAK,OAAO,QAAQ,QAAQ;AACjD,oBAAc,CAAE,YAAY,SAAS,WAAK;AAC1C,aAAO,QAAO,cAAc,SAAS,QAAQ,MAAM,iBAAiB;;AAEjE,4BAAwB,IAAG,CAAE;ACjEpC;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,uBAAe;AACf,eAAQ,YAAY,SAAS,aAAQ;AACrC,eAAO;UACH,GAAG,MAAM,gBAAgB,IAAI,GAAG,GAAG,YAAY,SAAS;;;;AC1BpE;;;;;;;;;;;;;;;;AAmBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,eAAe,CAAC;MAChB,UAAU;AACN,yBAAiB;AACjB,eAAQ,QAAS;AACjB,uBAAe;AACf,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,wBAAgB,iBAAiB,IAAI,GAAG,GAAG;AAC3C,eAAO;UACH,GAAG;AACC,mBAAO,QAAQ;;;;;AC/B/B;;;;;;;;;;;;;;;;AAqBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,qBAAa,MAAM,KAAI,IAAI,MAAK,UAAU,GAAG,IAAI;AACjD,qBAAa,MAAM,KAAI,IAAI,MAAK,QAAQ,GAAG,IAAI;AAC/C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AAwBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,IAAI,aAAa,EAAE;;AAE1C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,IAAI,MAAM,IAAI,GAAG;AACrC,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC7C7B;;;;;;;;;;;;;;;;AAsBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA2B,EAAE,OAAO,EAAE;AACvD,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,qBAAa;AACT,sBAAY,KAAI,IAAI,MAAK,GAAG;AAC5B,6BAAmB,kBAAiB,EAAE,OAAO;AAC7C,cAAI,WAAW,SAAS;AACpB,mBAAO,SAAQ,MAAI,KAAK,aAAa,EAAE;;AAE3C,iBAAO;;AAEX,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5C7B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,IAAI;;;ACrB9B;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,wBAAgB,MAAM;AACtB,eAAO,CAAE,SAAS,MAAM,OAAM,QAAQ,OAAO;;;ACvBrD;;;;;;;;;;;;;;;;AAkBO,+BAA2B;MAC9B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AAGN,kBAAU,MAAM;AAChB,eAAQ,YAAa;AACrB,sBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,eAAO,CAAE,GAAG,MAAM,OAAM,IAAI,OAAO,EAAE;;;AC3B7C;;;;;;;;;;;;;;;;AA6BO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,eAAe,CAAC;MAChB,UAAU;AACN,0BAAkB;AAClB,sBAAa;AACb,qBAAY;AACZ,yBAAiB,4BAA0C,MAAK,OAAO,KAAI;AAC3E,wBAAgB;AACZ,2BAAiB,MAAK,MAAK;AAC3B,oBAAU,KAAI,IAAI,KAAI,UAAU,IAAI,OAAM,IAAI,UAAU,QAAO;AAC/D,6BAAmB,kBAAgC,MAAK,OAAO;AAC/D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,MAAK;;AAE7B,uBAAe;AACX,4BAAkB,QAAQ,OAAM;AAChC,0BAAgB,MAAM,WAAW,KAAI,QAAO,WAAU;AACtD,oBAAU,KAAI,IAAI,KAAI,GAAG;AACzB,6BAAmB,kBAAgC,KAAI,OAAO;AAC9D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,KAAI;;AAE5B,eAAO,CAAE,GAAG,SAAS,GAAG;;;ACzDhC;;;;;;;;;;;;;;;;AAwBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,2BAAmB;AACnB,qBAAa,QAAQ,GAAG;AACxB,eAAO;UACH,GAAG,MAAM,MAAM,MAAM,IAAI,KAAI,IAAI;UACjC,OAAO;AACH,sBAAU,MAAM,MAAM,WAAU,KAAK,KAAI,IAAI;AAC7C,+BAAmB,kBAAiB,MAAM,OAAO,GAAG;AACpD,gBAAI,WAAW,SAAS;AACpB,oBAAM,MAAI,KAAK;;AAEnB,mBAAO,SAAQ,KAAK,MAAM;;;;;ACtC1C;;;;;;;;;;;;;;;;AAoBO,iCAA6B;MAChC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,OAAO;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,qBAAa,KAAI,UAAU,GAAG,IAAI,MAAK;AACvC,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAM;;;AC3B7C;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,MAAK,IAAI;;;ACzBhD;;;;;;;;;;;;;;;;AAkBO,8BAA0B;MAC7B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI,EAAE;;;ACvBxC;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,uBAAuB,IAAI,QAAQ;;AAEtD,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,oBAAoB;AAClH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAkBO,4CAAwC;MAC3C,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,yBAAiB;AACjB,mCAA2B;AACvB,iBAAQ,gBAAiB;AACzB,iBAAO,SAAQ,8BAA8B,IAAI,QAAQ;;AAE7D,uBAAe,CAAE;AACjB,0BAAkB,MAAM,QAAO,cAAc,oBAAoB,QAAQ,MAAqB,2BAA2B;AACzH,eAAO,CAAE,QAAQ;;;AC7BzB;;;;;;;;;;;;;;;;AAmBO,8BAA0B;MAC7B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,GAAG;AACrC,eAAO,CAAE,GAAG,MAAM,SAAQ,IAAI;;;ACxBtC;;;;;;;;;;;;;;;;AAkBO,4BAAwB;MAC3B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,IAAI,KAAI,IAAI,GAAG,MAAM;;;AC1BvD;;;;;;;;;;;;;;;;AAqBO,mCAA+B;MAClC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,4BAAoB;AACpB,eAAO;UAGH,WAAW,MAAM,MAAK,WAAU,YAAY;UAC5C,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,GAAG;UACpC,GAAG,MAAM,KAAI,IAAI,MAAK,WAAW,YAAY,GAAG;;;;AC/B5D;;;;;;;;;;;;;;;;AAwBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO;UACH,GAAG;AACC,yBAAa,QAAQ,GAAG,QAAO;AAC/B,gCAAmB,QAAO;AAC1B,2BAAc,QAAO;AACrB,uCAA2B,KAAI,IAAI;AACnC,qCAAyB,KAAI,KAAI,IAAI,cAAa,IAAI,MAAK,GAAG;AAC9D,mBAAO,MAAM,MAAM,oBAAoB;;;;;ACpCvD;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,GAAG,IAAI,QAAO,IAAI;;;ACzBxD;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,MAAK,GAAG,aAAa;;;ACzBvD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,KAAK,MAAK,GAAG,aAAa;;;ACzBxD;;;;;;;;;;;;;;;;AAmBO,4BAAwB;MAC3B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAO,QAAS;AACxB,2BAAmB,EAAE;AACrB,gCAAwB,kBAAiB,GAAG,OAAO;AAMnD,yBAAiB;AACjB,qBAAa,GAAG,IAAI,GAAG,MAAM;AACzB,mBAAS,KAAK,CAAC,OAAO,IAAI,WAAW,KAAK,OAAO,KAAK,MAAM;;AAEhE,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI;;;ACpClC;;;;;;;;;;;;;;;;AAoBO,8BAA0B;MAC7B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAQ,OAAQ;AAChB,yBAAiB;AACjB,yBAAiB,KAAI,IAAI;AACzB,eAAO;UACH,QAAQ,MAAM,IAAI,UAAU,KAAI,MAAI,UAAU,CAAC,MAAM,WAAW;;;;AC7B5E;;;;;;;;;;;;;;;;AAmBO,+BAA2B;MAC9B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,SAAQ;;;ACxB1C;;;;;;;;;;;;;;;;AAkBO,qCAAiC;MACpC,YAAY;MACZ,UAAU;AACN,eAAQ,YAAY,YAAa;AACjC,eAAO,CAAE,GAAG,MAAM,eAAe,IAAI,YAAY;;;ACtBzD;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,eAAQ,QAAS;AACjB,eAAO,CAAE,GAAG,MAAM,QAAO,IAAI;;;ACtBrC;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,KAAI,KAAK,MAAK,GAAG,aAAa;;;AC1BhE;;;;;;;;;;;;;;;;AAmBO,6BAAyB;MAC5B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,KAAI,MAAK,GAAG,YAAY;;;ACxB1D;;;;;;;;;;;;;;;;AAoBO,wCAAoC;MACvC,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,oBAAY,QAAO;AACnB,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,qBAAa,MAAM,KAAI,IAAI,KAAI,KAAK,IAAI,GAAG;AAC3C,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC5B7B;;;;;;;;;;;;;;;;AAkBO,2BAAuB;MAC1B,YAAY;MACZ,UAAU;AAGN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACvBpC;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC,KAAK;MACpB,UAAU;AACN,uBAAe;AACf,yBAAiB,4BAA0C,EAAE,OAAO,EAAE;AACtE,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,KAAK,EAAE;;AAE1B,qBAAa;AACT,oBAAU;AACV,6BAAmB,kBAAgC,EAAE,OAAO;AAC5D,cAAI,WAAW,SAAS;AACpB,kBAAM,MAAI,KAAK;;AAEnB,iBAAO,SAAQ,IAAI,MAAM,EAAE;;AAE/B,eAAO,CAAE,GAAG,MAAM,GAAG;;;AC3C7B;;;;;;;;;;;;;;;;AAqBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,gCAAwB,EAAE,MAAM;AAChC,eAAQ,QAAS;AACjB,qBAAa,gBAAe,MAAM,EAAE;AACpC,aAAK,QAAQ;AACT,0BAAgB,SAAQ;;AAE5B,2BAAmB,SAAQ,IAAI;AAC/B,qBAAa,KAAI,YAAY,OAAK,EAAE,OAAO;AAC3C,eAAO,CAAE,GAAG,MAAM;;;AClC1B;;;;;;;;;;;;;;;;AAoBO,0BAAsB;MACzB,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,IAAI,IAAI,OAAO,IAAI;;;ACzB7C;;;;;;;;;;;;;;;;AAqBO,2BAAuB;MAC1B,YAAY;MACZ,eAAe,CAAC;MAChB,UAAU;AACN,oBAAY;AACZ,eAAO,CAAE,GAAG,MAAM,KAAI,IAAI,QAAO,IAAI,OAAO,KAAK;;;AC1BzD;;;;;;;;;;;;;;;;AAoBO,2BAAuB;MAC1B,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,oBAAY;AACZ,eAAQ,QAAS;AACjB,qBAAa;AACT,sBAAY,WAAU;AAGtB,cAAI,EAAE,SAAS;AACX,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,sBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM;;qBAGvD,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,wBAAQ,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK;kBAC3D,EAAE,MAAM;kBAAI,EAAE,MAAM;;;;qBAK3B,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,0BACI,MAAI,OAAO,OAAM,IAAI,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,IAAI,IAAI,EAAE,MAAM,KAAK,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;qBAKnH,EAAE,SAAS;AAChB,yBAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,2BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,6BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,+BAAa,GAAG,IAAI,KAAK,IAAI,EAAE;AAC3B,4BACI,MAAI,OAAO,OAAM,IAAI;sBACjB,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAAI,IAAI,EAAE,MAAM;sBAC5C,IAAI,EAAE,MAAM;uBACb,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;;;;;;AAOpE,kBAAM,IAAI,MAAM,2DACT,EAAE;;AAEb,iBAAO;;AAEX,eAAO,CAAE,GAAG;;;AC3EpB;;;;;;;;;;;;;;;;AAmBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,+BAAuB;AACvB,eAAQ,QAAS;AACjB,yBAAiB,wBAAiC;AAClD,eAAO,CAAE,GAAG,MAAM,WAAU,IAAI;;;ACzBxC;;;;;;;;;;;;;;;;AAkBO,6BAAyB;MAC5B,YAAY;MACZ,UAAU;AACN,4BAAoB;AACpB,eAAQ,QAAS;AACjB,eAAO,CAAE,OAAO,MAAM,MAAM,IAAI;;;ACvBxC;;;;;;;;;;;;;;;;AA0BO,yCAAqC;MACxC,YAAY;MACZ,cAAc,CAAC;MACf,UAAU;AACN,6BAAqB;AACrB,qBAAa;AACT,iBAAO,oBAAoB,IAAI;;AAEnC,eAAO,CAAE,GAAG;;;AAGpB;AAII,iCAA2B,QAAQ,SAAS,WAAU;AACtD,uBAAiB,OAAO,GAAG;AAC3B,uBAAiB,aAAa,SAAS,QAAO,GAAG;AACjD,uBAAiB,SAAS,OAAO,WAAW;AAC5C,mBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,qBAAa,WAAW,YAAY,IAAI;;AAE5C,mBAAa,WAAW,YAAY,OAAK,SAAS,OAAO;AACzD,wBAAkB,WAAU;AAC5B,aAAO,MAAM,YAAY,UAAU;;AClDvC;;;;;;;;;;;;;;;;AAkBO,gCAA4B;MAC/B,YAAY;MACZ,UAAU;AACN,eAAO,CAAE,GAAG,MAAM,WAAU;;;ACrBpC;;;;;;;;;;;;;;;;AAiHA,wBAAoB;MAChB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,iCAA6B;AACzB,uBAAiB;;ACvNrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,cAAO,KAAK,SAAS,GAAG,MAAM;AAC9B,aAAO,SAAQ,MAAM;;ACzBzB;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;AC5BtB;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM;;AC3BhC;;;;;;;;;;;;;;;;AA0BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS;;AC5BzC;;;;;;;;;;;;;;;;AA2BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO;;AC7BhD;;;;;;;;;;;;;;;;AA6BA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQ;;AC/BxD;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,OAAM,WAAU,QAAQ,QAAO;;ACpB1D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACpB7B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM,MAAK;;ACrBlC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,UAAI,aAAa;AACb,YAAI,CAAC;;AAET,aAAO,QAAO,CAAC,MAAM,GAAG,IAAI;;ACvBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,QAAQ,QAAQ,MAAK,YAAY,UAAU;;ACpBnE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,QAAQ,aAAa,SAAS,MAAK;;ACpBpE;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpBrE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,QAAO,MAAM,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,WAAW;;ACpBzC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,uBAAgB;AAChB,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACzB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,iBAAgB,MAAM,QAAQ,SAAS,MAAK,YAAY,WAAW;;ACpB9E;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM,QAAQ,SAAS,MAAK,WAAW;;ACpB7D;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACrB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI;;ACpBf;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,cAAc;AAC3B,WAAK;AACL,aAAO,YAAY,MAAM;;ACxB7B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAqBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,CAAC,KAAK;;ACvB/B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,SAAS;;ACpBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM;;ACxBpC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,aAAa,MAAM;;ACpB9B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,WAAS;;ACrBpB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,QAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM;;ACxBjC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,6BAA6B;AAC1C,WAAK;AACL,aAAO,2BAA2B,MAAM,aAAa,MAAM,OAAO;;ACpBtE;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACrB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,MAAM;;ACpBjC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACpBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM,GAAG,YAAY;;ACpBvC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM,YAAY,SAAS,MAAK;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,gBAAgB;AAC7B,WAAK;AACL,aAAO,cAAc,MAAM;;ACxB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM,UAAU;;ACpBrC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,KAAK,MAAM;;ACrBjC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM;;ACxBhC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS,MAAM;;ACpB1B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS,0BAA2B,cAAc;AAC/D,WAAK;AACL,aAAO,QAAO,MAAM,OAAO,SAAS;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,UAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,KAAI,MAAM,UAAU;;ACpB/B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,aAAa,aAAa,SAAS,cAAc;;ACpBvE;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM;;ACpBvB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,MAAM;;ACpB5B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,aAAa;AAC1B,WAAK;AACL,aAAO,WAAW;;ACrBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM;;ACpBjB;;;;;;;;;;;;;;;;AAyBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,SAAQ,MAAM,EAAE;;AC3B3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,gBAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,wBAAwB;AACrC,WAAK;AACL,aAAO,sBAAsB,MAAM,YAAY;;ACpBnD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM;;ACrBjB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACpBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,kBAAkB;AAC/B,WAAK;AACL,aAAO,gBAAgB,MAAM,iBAAiB,iBAAiB,SAAS,MAAK,UAAU;;ACpB3F;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ;;ACrBnB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,OAAO;;ACrB9B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,SAAQ,MAAM;;ACrBzB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,WAAW;AACxB,WAAK;AACL,aAAO,SAAS;;ACrBpB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,iBAAiB;AAC9B,WAAK;AACL,aAAO,eAAe,MAAM,YAAY;;ACpB5C;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,OAAM,MAAM,iBAAiB;;ACpBxC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK;;ACrBhB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO;;ACrBlB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,oBAAoB;AACjC,WAAK;AACL,aAAO,kBAAkB,MAAM;;ACpBnC;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,0BAA0B;AACvC,WAAK;AACL,aAAO,wBAAwB,MAAM;;ACxBzC;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,iCAA2B,aAAa,UAAS,CAAC,MAAM,KAAK,CAAC,MAAM,GAAG;AACvE,aAAO,MAAM,oBAAoB;;ACrBrC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACrBtB;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,eAAe;AAC5B,WAAK;AACL,aAAO,cAAa,MAAM,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;ACrBlG;;;;;;;;;;;;;;;;AAsBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,UAAU,MAAM;;ACxB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI,MAAM;;ACpBrB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,MAAI,MAAM,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,MAAM;AACnB,WAAK;AACL,aAAO,IAAI;;ACrBf;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,OAAK;;ACrBhB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,MAAK,MAAM;;ACpBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAuBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAK,MAAM;;ACzBtB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,OAAO;AACpB,WAAK;AACL,aAAO,KAAK,MAAM,GAAG;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU,MAAM;;ACpB3B;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,SAAS;AACtB,WAAK;AACL,aAAO,OAAO,MAAM;;ACpBxB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,qBAAqB;AAClC,WAAK;AACL,aAAO,mBAAmB,MAAM,YAAY;;ACpBhD;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,UAAU;AACvB,WAAK;AACL,aAAO,QAAQ,MAAM;;ACpBzB;;;;;;;;;;;;;;;;AAkBA,YAAO,UAAU,QAAQ;AACrB,WAAK;AACL,aAAO,MAAM,WAAW,MAAM;;ACpBlC;;;;;;;;;;;;;;;;AAmBA,YAAO,UAAU,YAAY;AACzB,WAAK;AACL,aAAO,WAAU;;ACrBrB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;AAUA;AAIO;AACH,UAAI,YAAY;AACZ,mBAAW,WAAU;;AAEzB,aAAO;;AAMJ;AACH,iBAAW;;AAKR;AACH,aAAO;;AC/BX;;;;;;;;;iCAqBoC;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;+BAMjB;MAC9B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,aAAa;;;6BAMjB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;sCAMN;MACrC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,oBAAoB;;;iCAMpB;MAChC;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,eAAe;;;6BAMnB;MAC5B;AACI,cAAM;AAEN,eAAO,eAAe,MAAM,WAAW;;;AC3E/C;;;;;;;;;AAkBO;AACH,UAAI,MAAM,QAAQ;AAEd,uBAAe;AACf,qBAAa,GAAG,IAAI,WAAW;AAC3B,qBAAW,SAAS,OAAO;;AAE/B,eAAO;;AAGP,yBAAiB,IAAI,MAAM;AAC3B,iBAAS,KAAK;AACd,eAAO;;;AAGR;AACH,UAAI,CAAC;AACD,cAAM,IAAI,eAAe;;;AAM1B;AACH,oBAAc;AACd,yBAAmB;AACf,YAAI,SAAS;AACT;;;AAGR,aAAO;;AAOJ;AACH,UAAI,GAAG,WAAW;AACd,eAAO,GAAG;;AAEd,aAAO;;AAWJ;AACH,UAAI,MAAM,QAAQ;AACd,eAAO;;AAEX,aAAO,CAAC;;AAML;AACH,yBAAmB,OAAO;AAC1B,mBAAa;AACb,wBAAkB;AACd,YAAI,IAAI,MAAM;AACV,gBAAM,IAAI,WAAW,UAAU;;AAEnC,YAAI,WAAW;AACX,mBAAS,SAAS;;AAEtB,iBAAS,GAAG,SAAS,KAAK,IAAI,IAAI;;AAEtC,aAAO;;AAMJ;AACH,2BAAqB,KAAK,QAAQ,wBAAwB;AAC1D,uBAAiB,aAAa,QAAQ,mBAAmB,SAAS;AAKlE,UAAI,SAAS,OAAO;AAChB,eAAO;;AAEX,aAAO,YAAY;;AAEhB;AAEH,UAAI,WAAW,UAAU;AACrB,eAAO;;AAGX,UAAI,WAAW,QAAQ,SAAS;AAC5B,eAAO;;AAEX,aAAO,WAAW,QAAQ,eAAe,WAAW,GAAG;;AAG3D,iCAA6B;AACtB;AACH,UAAI,aAAa,QAAQ,aAAa;AAClC,eAAO;;AAEX,mBAAa;AACb,WAAK,eAAe,SAAS;AAC7B,WAAK,YAAY,SAAS;AAC1B,aAAO;;AAaX;AACI,UAAI,WAAU,QAAQ,OAAO,YAAW;AACpC;iBAEK,MAAM,QAAQ;AACnB,gBAAO,QAAQ,gBAAc,8BAA8B;;AAG3D,uBAAe,OAAO,KAAK;AAC3B,4BAAoB;AAChB,wBAAc,QAAO;AACrB,cAAI,SAAS,QAAQ,OAAO,UAAU;AAClC,gBAAI,CAAC,MAAM,QAAQ,UAAU,MAAM,YAAY,aAC3C,OAAO,MAAM,aAAa;AAC1B,sBAAO,SAAS,MAAM;;AAGtB,4CAA8B;;;;;;AAmB3C,gEAA4D,oBAAoB,0BAA0B,2BAA2B;AAExI,UAAI,OAAO,eAAe;AACtB,6BAAqB;AACrB;AACA,YAAI,gBAAgB;AAChB,eAAK,cAAc;mBAEd,gBAAgB;AACrB,eAAK,uBAAuB;;AAG5B,eAAK,cAAc;AACnB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;;AAM7B,eAAO;;AAIP,wBAAe;AACf,YAAI,QAAO,gBAAgB,QAAQ,QAAO,aAAa;AACnD,gBAAM,IAAI,WAAW,GAAG,gDACjB,KAAK,UAAU;;;AAG1B,0BAAkB,QAAO;AACzB;AACA,YAAI,aAAa;AACb,WAAC,KAAK,cAAc,cAAc;mBAE7B,aAAa;AAClB,WAAC,KAAK,cAAc,uBAAuB;mBAEtC,aAAa;AAClB,WAAC,KAAK,cAAc,cAAc;;AAEtC,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,WAAW,wBAAwB;SAE1C;gBAGO;;AAKzB,YAAI,cAAc;AAMd,wCAA8B;AAC9B,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,uBAAuB;;AAExD,4BAAkB,OAAO,KAAK;AAC1B,kCAAsB,OAAO,cAAc;;AAG/C,+BAAqB,QAAO;AAC5B,uBAAa,mBAAmB;AAChC,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAEhD,wCAA8B,QAAO;AACrC,4BAAkB,WAAW,KAAK,QAAO,WAAW,eAAe;AACnE,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;AAMP,sCAA4B,OAAO,OAAO,IAAI;AAC9C,4BAAkB,OAAO,KAAK;AAC1B,mCAAuB,OAAO,cAAc;;AAKhD,4BAAkB,IAAI,IAAI,QAAO;AACjC,mCAAyB,OAAO,OAAO,IAAI;AAC3C,iBAAO;;;;AASZ;AACH,aAAQ,IAAI,IAAK,KAAO,IAAI,IAAK,IAAI;;AAOlC;AACH,aAAO,KAAK,cAAc,GAAG;;AAO1B;AACH,cAAQ;aACC;AACD,iBAAO;;AAEP,gBAAM,IAAI,WAAW,kBAAkB;;;AAS5C;AACH,UAAI,MAAM,QAAQ,MAAM;AACpB,eAAO,OAAO;;AAElB,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ,EAAE;AAC7B,YAAI,GAAG,OAAO,GAAG;AACb,iBAAO;;;AAGf,aAAO;;AAOJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,kBAAY;AAEZ,sBAAgB;AACZ,YAAI,IAAI,QAAQ,OAAO;AACnB,cAAI,KAAK;;;AAGjB,aAAO;;AAQJ;AACH,UAAI,OAAO;AACP,cAAM,IAAI,WAAW,yBAAyB,KAAK,UAAU;;AAEjE,wBAAkB;AACd,YAAI,IAAI,eAAe;AACnB,iBAAO;;;AAGf,aAAO;;AASJ;AACH,UAAI,SAAS;AACT;;AAEJ,UAAI,OAAO,QAAQ,SAAS;AACxB,cAAM,IAAI,WAAW,GAAG,wBAAwB,4BAA4B;;;AAkB7E,kEAA8D,eAAe;AAChF,eAAO,aAAa;AACpB,eAAO,aAAa;AACpB,aAAQ,MAAM,QAAQ,MAAM,EAAE,UAAU,aAAa,EAAE,UAAU,aAC7D,EAAE,MAAM,OAAK,OAAO,MAAM;;AAU3B;AACH,UAAI,MAAM,QAAQ;AACd,gBAAY,MAAM,SAAS,GAAG,MAAM,GAAG;AACvC,cAAM,QAAQ,UAAU,sBAAsB,GAAG,WAAW,IAAI,QAAQ;;AAGxE,gBAAY,OAAO,UAAU,UAAU,QAAQ,GAAG,MAAM,YAAY,0CAC7D,uBAAuB;;;AAc/B;AACH,UAAI,UAAU;AACV,eAAO;iBAEF,MAAM,QAAQ;AACnB,eAAO,MAAM,MAAM,IAAI,OAAK,uBAAuB,IAAI,KAAK,OAAO;iBAE9D,OAAO,UAAU;AACtB,eAAO,IAAI;;AAGX,eAAO,GAAG;;;AAYX;AACH,qBAAe;AACf;AACA,iBAAW;AACP,sBAAY;AACZ,YAAI,QAAM,WAAW;AACjB,iBAAO;;AAEX,mBAAW;AACX,qBAAa,EAAE,GAAG;AAClB,eAAO;;AAEX,aAAO;;AAQJ;AACH,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,UAAI,mBAAmB;AACnB,eAAO;;AAEX,aAAO;;AAiBJ;AACH,eAAO,cAAc,SAAS,GAAG;AACjC,2BAAqB;AACjB,iBAAO,MAAM,QAAQ,SAAS;AAC9B,iBAAO,OAAO,SAAS,GAAG;;AAE9B,aAAO,cAAc,OAAO;AACxB,YAAI,SAAS,WAAW;AACpB,iBAAO,OAAO,IAAI,WAAS,CAAC;;AAEhC,eAAO,OACF,IAAI;AACL,iBAAO,SAAS,IAAI,eAAe,CAAC,GAAG,WAAW;WAEjD,OAAO;AACR,iBAAO,iBAAiB,OAAO;WAChC;SACJ;;AClgBP;;;;;;;;;AAiBA;AACI,aAAO,KAAK,MAAM,KAAS,MAAQ,KAAQ,GAAG,IAAI,MAAM;;6BAW5B;MAC5B;AACI,eAAO;;;0BAGc;MACzB;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,YAAgB,OAAO,GAAG,KAAK;AAC/C,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO,CAAE,UAAU,KAAK,UAAU,MAAM,KAAK;;;AAIrD,YAAQ,YAAY;AACpB,kBAA4B;2BACE;MAC1B;AACI;AACA,aAAK,cAAc;AACnB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK,MAAM,IAAQ,GAAG,MAAQ,WAAW,YAAY,GAAG,KAAK;;MAExE;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,aAAS,YAAY;AACrB,kBAA4B;yBACA;MACxB;AACI,eAAO,MAAS;;;AAIxB,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,WACD,KAAK,YAAY,OAAO,KAAK,WAAW,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;AACjD,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO,KAAK;;MAErD;AACI,eAAO,KAAK;AACR,wBAAc,YAAY,GAAG,KAAK;AAClC,0BAAgB,MAAQ,KAAQ,KAAK,MAAM,YAAgB,OAAO,KAAK,UAAU,KAAK,YAAY,KAAQ,IAAM,KAAK,MAAM;AAC3H,iBAAO,KAAQ,GAAG,IAAQ,SAAS,MAAQ,WAAW;;;MAG9D;AACI,eAAO;UACH,UAAU,KAAK;UACf,UAAU,KAAK;UACf,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,sDAAkD;MACrD,SAAW;MACX,YAAc;MACd,QAAU;MACV,UAAY;;AAET;AACH,aAAO,qBAAqB;;AAEzB,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,4CAC5B,0CAA0C,cAC1C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC/IrC;;;;;;;;;AAwBO;AACH,aAAO,IAAI,QAAQ;;AAOhB;AACH,aAAO,IAAI,SAAS;;AAOjB;AACH,aAAO,IAAI;;AAGR;AACH,aAAO,IAAI,WAAW;;;;;;;;;AC7C1B;;;;;;;;;AASO,qCAAiC,CAAC,iBAAiB;AACnD,sCAAkC,CAAC,SAAS,QAAQ;AACpD,mCAA+B,CAAC,OAAO;AACvC,4CAAwC,CAAC,OAAO,OAAO,UAAU;AACjE,sCAAkC,CAAC;ACb1C;;;;;;;;;AAiBA,oBAAgB,IAAI;AACb;AACH,gCAA0B,0BAA0B,cAAc;;AAE/D;AACH,gCAA0B,2BAA2B,eAAe;;AAEjE;AACH,gCAA0B,wBAAwB,YAAY;;AAElE,4BAAwB;AACxB,8BAA0B;AAInB;AACH,sBAAgB,KAAK;AACrB;AACI,oBAAY;AACZ,wBAAgB;AAChB,eAAO;;AAGP,wBAAgB;AAChB,cAAM;;;AAMd;AACI,UAAI,gBAAgB,WAAW;AAC3B,eAAO;;AAGP,eAAO,gBAAgB,KAAK,qBAAqB;;;AAQlD;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,aAAO,2BAA2B;;AAY/B;AACH,UAAI,CAAC,kBAAkB;AACnB,cAAM,IAAI,MAAM,+BAAgC,aAAa;;AAEjE,UAAI,CAAC,QAAQ,IAAI;AACb,gBAAQ,IAAI,YAAY;;AAE5B,qBAAc,QAAQ,IAAI;AAC1B,cAAQ,IAAI,YAAY,QAAQ,IAAI,cAAc;AAClD,UAAI,SAAQ;AACR,uBAAe,GAAG,cAAc;AAGhC,gBAAQ,IAAI,QAAQ;AACpB,eAAO;;AAGP,eAAO;;;AAGf,4BAAwB,IAAI,OAAO;AAM5B;AACH,aAAO,CAAC,CAAC,KAAK,MAAM;;ACvGxB;;;;;;;;;AAyBO;AACH,aAAO,MAAM,SAAS,EAAE,YAAY;;AASjC;AACH,UAAI,SAAS;AACT,gBAAQ;;AAEZ,UAAI,OAAO;AACP,cAAM,OAAM;;AAEhB,kBAAW;AACX,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,iBAAQ,OAAM;;AAElB,aAAO;;AAOX;AACI,eAAQ,MAAM,QAAQ,UAAS,IAAI,aAAa,UAAS;AACzD,aAAO,UAAS;;AAOb;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,KAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAQ,UAAU,SAAQ,WAAW;;AAOzC;AACH,aAAO,MAAI,UAAS,OAAM;;AAOvB;AACH,uBAAiB,IAAQ,UAAU,SAAQ,QAAO,OAAK;AACvD,wBAAkB,MAAQ,KAAQ,UAAU,WAAW,WAAW;AAClE,aAAO,YAAY,OAAM;;AAOtB;AACH,0BAAoB,OAAM,QAAQ,KAAK,UAAU,IAAI;AACrD,qBAAe,KAAK,MAAO,aAAY,SAAS,KAAK;AACrD,sBAAgB,KAAK,KAAM,aAAY,SAAS,KAAK;AACrD,UAAI,WAAW;AACX,eAAO,YAAY;;AAEvB,aAAQ,aAAY,UAAU,YAAY,YAAY;;AASnD;AACH,UAAI,MAAM;AACN,cAAM,IAAI,WAAW,QAAQ,iBAAiB;;AAElD,kBAAY;AACZ,mBAAa,OAAO,IAAI,KAAK,EAAE;AAC3B,YAAI,KAAK;;AAEb,aAAO;;AChIX;;;;;;;;;AAqBA,oBAAc;AACP;AACH,iBAAe;AACf,kBAAU;;AAEP;AACH,aAAO;;AASJ;AACH,aAAO;;AAOJ;AACH,oBAAc,EAAE;AAChB,UAAI,MAAM,SAAS;AACf,eAAO,MAAM,OAAO,UAAU,IAAI;;AAIlC,eAAO;;;AASR;AACH,aAAO,EAAE,OAAO;;AAQb,oCAA8B;AACjC,uBAAiB,EAAE,MAAM;AACzB,UAAI,OAAO;AACP,eAAO,SAAS,SAAS,OAAO;;AAEpC,eAAS,OAAO,MAAM,GAAG;AACzB,aAAO,EAAE,QAAQ;;AAad;AACH,aAAO,KAAK;AACR,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,yDACT,EAAE,MAAM;;AAExB,kBAAU,aAAW,GAAG;AACxB,eAAO,OAAK,GAAG,CAAC,GAAG,GAAG;;;AAQvB;AACH,uBAAiB,CAAC,UAAqB,EAAE;AACzC,aAAO,EAAE,QAAQ;;AAUd;AACH,UAAI,EAAE,QAAQ;AACV,cAAM,IAAI,WAAW,wDAAwD,EAAE;;AAEnF,uBAAiB,CAAC,EAAE,MAAM,IAAI,UAAqB,EAAE,OAAO;AAC5D,aAAO,EAAE,QAAQ;;AAUd;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,IAAI,CAAC,MAAM,OAAM,MAAM;eACxD;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;eAC3E;AACD,mBAAO,SAAY,QAAO,CAAC,OAAO,GAAG,GAAG,IAAI,CAAC,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM;eAC9F;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,IAAI;cACzC;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;;eAErE;AACD,mBAAO,OAAU,QAAO,CAAC,OAAO,GAAG,GAAG,GAAG,GAAG,IAAI;cAC5C;cAAM,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAAI,OAAM,MAAM;cAClE,OAAM,MAAM;;;AAGhB,kBAAM,IAAI,WAAW,8DACd,OAAM;;;;AAYtB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI;eACtD;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;eACzE;AACD,mBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,GAAG,QAAQ,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI;;AAE7F,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAatB;AACH,aAAO,KAAK;AACR,gBAAQ,OAAM;eACL;AACD,mBAAO,QAAY,QAAO,OAAO;eAChC;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC3E;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;eAEd;AACD,oBAAQ;mBACC;AACD,uBAAO,oBAAoB,QAAO,OAAO;mBACxC;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,OAAO,GAAG,IAAI,CAAC,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM,IAAI,OAAM,MAAM;mBAC9F;AACD,uBAAO,SAAY,QAAO,CAAC,GAAG,GAAG,OAAO,IAAI,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM,IAAI,MAAM,OAAM,MAAM;mBAC9F;AACD,uBAAO,mBAAmB,QAAO,OAAO;;AAExC,sBAAM,IAAI,WAAW,iDACd;;;AAGf,kBAAM,IAAI,WAAW,6DACd,OAAM;;;;AAUtB,yCAAqC;AACxC;AACA,UAAI,OAAO;AACP,eAAO,QAAQ,GAAG;AAClB,YAAI,SAAS;AACT,iBAAO;;AAGP,iBAAO;;;AAGf,UAAI,SAAS,QAAQ,GAAG;AAGpB,eAAO;;AAGX,aAAO,QAAW,SAAS;;AASxB;AACH,cAAQ,EAAE;aACD;AACD,iBAAO,SAAa,CAAC,GAAG;aACvB;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;aAC3B;AACD,iBAAO,SAAa,CAAC,GAAG,IAAI;;AAE5B,gBAAM,IAAI,WAAW,+DACD,EAAE;;;AAU3B;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,YAAI,CAAC;;AAET,UAAI,EAAE,SAAS,EAAE;AACb,cAAM,IAAI,WAAW,0BAA0B,EAAE,+DACL,EAAE;;AAElD,aAAO,MAAS,GAAG;;AAahB,2CAAoC,YAAc;AACrD,aAAO,aAAiB,OAAO,OAAM,QAAQ,OAAO;;AAmBjD;AACH,UAAK,EAAE,OAAO,KAAO,EAAE,OAAO;AAC1B,cAAM,IAAI,oBAAoB,8DACJ,EAAE,uBAAuB,EAAE;;AAEzD,UAAI,EAAE,QAAQ;AACV,yBAAiB,EAAE,MAAM,MAAM,IAAI;AACnC,+BAAuB,EAAE,MAAM,MAAM,IAAI;AACzC,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB,gGAC8B,EAAE,wBAC5C,EAAE;;;AAI5B,UAAK,EAAE,SAAS,KAAO,EAAE,SAAS;AAC9B,2BAAmB;AACnB,2BAAmB;AAInB,eAAO,SAAiB;UACpB;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D;;;AAKJ,2BAAmB,EAAE,MAAM;AAC3B,yBAAiB,WAAW;AAC5B,YAAI,EAAE,QAAQ,CAAC,IAAI;AAGnB,uBAAe,EAAE,MAAM;AACvB,yBAAiB,OAAO;AACxB,+BAAuB,OAAO;AAC9B,2BAAmB,CAAC,GAAG,QAAQ;AAG/B,qBAAa,MAAM,KAAK,CAAE,QAAQ,EAAE,OAAQ;AACxC,cAAI,MAAM;AACN,mBAAO,EAAE,OAAO;qBAEX,KAAK,EAAE,OAAO;AACnB,mBAAO,IAAI;;AAEf,iBAAO;;AAEX,YAAI,EAAE,UAAU,MAAM,QAAQ,CAAC,gBAAgB;AAE/C,4BAAoB,CAAC,GAAG,YAAY,GAAG;AACvC,2BAAmB;AACnB,2BAAmB;AACnB,eAAO,SACK;UACR;UACA;UACA;UACA;UACA,MAAM,OAAO,YAAY,EAAE,MAAM,MAAM,qBAAqB;UAC5D;WAEC,QAAQ;;;AAad;AAEH,aAAO,KAAK;AACR,2BAAmB,WAAc;AACjC,0BAAkB,UAAa;AAC/B,eAAO,MAAM,MAAU,GAAG,aAAa,YAAY,MAAM,QAAY,GAAG,WAAc,KAAK,WAAW,KAAQ,IAAI;;;AAWnH;AACH,aAAO,KAAK;AACR,YAAI,QAAQ,SAAS;AACjB,gBAAM,IAAI,MAAM;;AAGpB,kBAAU,QAAQ;AAClB,eAAO,QAAW,SAAS,YAAY;;;AAWxC;AACH,aAAO,KAAK;AACR,YAAI,MAAM,QAAQ;AACd,oBAAU,UAAS,SAAS;;AAG5B,oBAAU,QAAQ;;AAEtB,eAAO,OAAW,WAAW,SAAS;;;AAQvC;AACH,aAAO,KAAQ,GAAG;;AAcf;AACH,aAAO,KAAK;AACR,YAAI,OAAQ,MAAO;AACf,cAAI,QAAO,KAAK,MAAM,IAAI;;AAE9B,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,oBAAoB,oBAAoB,EAAE;;AAExD,eAAO,IAAQ,GAAG;;;AAM1B;AACI,wBAAkB,KAAK;AACvB,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS;AACjC,cAAM,IAAI,WAAW,+BAA+B,KAAK,gCACzB;;AAEpC,UAAI,UAAU;AACV,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG,GAAG;;AAG5C,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG3E,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,GAAG,UAAU;;AAG3C,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,GAAG;;AAGzC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU,IAAI,UAAU;;mBAG7D,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,GAAG,UAAU;;AAGxC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,UAAU;AACf,YAAI,eAAe;AACf,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI;;AAGtC,mBAAO,KAAK,QAAQ,CAAC,GAAG,UAAU,IAAI,UAAU;;mBAG/C,eAAe;AACpB,cAAI,UAAU,WAAW;AACrB,mBAAO,KAAK,QAAQ,CAAC,GAAG,GAAG,UAAU;;AAGrC,mBAAO,KAAK,QAAQ,CAAC,GAAG,OAAO;;;iBAIlC,QAAQ;AACb,eAAO;;AAEX,YAAM,IAAI,WAAW,sCAAsC,KAAK;;AAW7D;AACH,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,eAAO,EAAE,IAAI,YAAY,EAAE,MAAM,MAAM;;;AASxC,8BAAwB;AAE3B,UAAI,UAAU;AACV,cAAM,IAAI,oBAAoB,0CAA0C;;AAG5E,aAAO,KAAQ;;AAUZ;AACH,aAAO,KAAK,MAAM,IAAQ,GAAG,IAAQ,GAAG,IAAI;;AAYzC;AACH,aAAO,KAAK,MAAM,QAAY,GAAG,OAAO,YAAY;;AAWjD;AACH,aAAO,KAAK;AACR,kBAAU,MAAQ,KAAI,KAAQ,KAAI;AAClC,eAAO,YAAgB,GAAG,GAAG;;;AAgB9B,6CAAyC;AAC5C,aAAO,WAAW,MAAM;;AChoB5B;;;;;;;;;AASO,kCAA8B,CAAC,SAAS,UAAU;AAClD,sCAAkC,CAAC,UAAU,WAAW;AASxD,kCAA8B;MACjC;MAAS;MAAQ;MAAY;MAAgB;MAC7C;MAAmB;MAAmB;MAAc;;ACrBxD;;;;;;;;;AAgBO;AACH,gCAA0B,uBAAuB,WAAW;;AAEzD;AACH,gCAA0B,2BAA2B,gBAAgB;;8BAQxC;MAC7B;AACI,eAAO;;MAEX;AACI,eAAO;;;wBAGY;MACvB;AACI,eAAO,OAAM,OAAO;;;AAI5B,UAAM,YAAY;AAClB,kBAA4B;uBACF;MACtB;AACI,eAAO,OAAK,OAAO;;;AAI3B,SAAK,YAAY;AACjB,kBAA4B;2BACE;MAC1B;AACI;AACA,YAAI,OAAO,SAAS;AAChB,gBAAM,IAAI,WAAW,oDAAoD;;AAE7E,YAAI,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,sCAAsC;;AAE/D,aAAK,QAAQ,KAAK;;MAEtB;AACI,eAAO,KAAK,MAAM,KAAI,QAAO,KAAK,QAAQ,OAAK,OAAO;;MAE1D;AACI,eAAO;UACH,OAAO,KAAK;;;;AAKxB,aAAS,YAAY;AACrB,kBAA4B;gCACO;MAC/B;AACI;AACA,aAAK,iBAAiB;AACtB,aAAK,iBAAiB;AACtB,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO,cAAc,OAAO,KAAK,QAAQ,KAAK,QAAQ;;MAE1D;AACI,eAAO,CAAE,QAAQ,KAAK,QAAQ,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAItE,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAC9B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,eAAO,eAAe,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAErE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,iBAAa,YAAY;AACzB,kBAA4B;kCACS;MACjC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,aAAK,OAAO,KAAK,QAAQ,KAAK;AAC9B,aAAK,SAAS,KAAK,UAAU,KAAK;AAClC,aAAK,OAAO,KAAK;;MAErB;AACI,gBAAQ,SAAS;AACjB,YAAI,UAAU,aAAa,UAAU;AACjC,gBAAM,IAAI,oBAAoB,0CAA0C;;AAE5E,eAAO,gBAAgB,OAAO,KAAK,MAAM,KAAK,QAAQ,OAAO,KAAK;;MAEtE;AACI,eAAO,CAAE,MAAM,KAAK,MAAM,QAAQ,KAAK,QAAQ,MAAM,KAAK;;;AAIlE,oBAAgB,YAAY;AAC5B,kBAA4B;6BACE;MAC1B;AACI;AACA,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO;;MAEhD;AACI,eAAO,KAAK;AACR,cAAI,MAAM,WAAW,KAAK,MAAM,OAAO,MAAM;AACzC,kBAAM,IAAI,WAAW;;AAIrB,mBAAO,KAAI,KAAK,MAAM,IAAI,MAAM;;;;MAI5C;AACI,eAAO,CAAE,MAAM,KAAK;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;AAS5B,6CAAyC;AACrC;AACA;AACA,sBAAgB;AAChB,UAAI,MAAM,WAAW;AACjB,gBAAQ,MAAM;AACd,iBAAS,MAAM;iBAEV,CAAC,GAAG,GAAG,GAAG,QAAQ,MAAM,YAAY;AACzC,YAAI,eAAe;AACf,qCAA2B,UAAU,OAAO;AAC5C,kBAAQ,MAAM,KAAK;AACnB,mBAAS,MAAM,KAAK;mBAEf,eAAe;AACpB,qCAA2B,UAAU,OAAO,GAAG,MAAM,SAAS;AAC9D,kBAAQ,MAAM,MAAM,SAAS,KAAK;AAClC,mBAAS,MAAM,MAAM,SAAS,KAAK;;;AAIvC,0BAAkB,UAAU;AAC5B,gBAAQ,KAAK,KAAK;AAClB,iBAAS,KAAK,KAAK;;AAEvB,aAAO,CAAC,OAAO;;kCAEkB;MAKjC;AACI;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW,wCAAwC,KAAK;;AAEtE,aAAK,QAAQ,KAAK,SAAS,OAAO,IAAM,KAAK;AAC7C,aAAK,OAAO,KAAK,QAAQ,OAAO,UAAU,KAAK;AAC/C,qBAAa,KAAK;AAClB,aAAK,eACD,KAAK,gBAAgB,OAAO,WAAW,KAAK;AAChD,0BAAkB,KAAK;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,qBAAa,YAAY;AACzB,sBAAc,KAAK;AACnB,uBAAe,KAAK;AACpB,qBAAY,KAAK;AACjB,YAAI,KAAK,SAAS;AACd,oBAAS,KAAK,IAAI,GAAG;mBAEhB,KAAK,SAAS;AACnB,oBAAS,KAAK,IAAI,GAAG;;AAGrB,oBAAS,KAAK,IAAI,GAAI,SAAQ,UAAU;;AAE5C,YAAI,KAAK,iBAAiB;AACtB,yBAAe,KAAK,KAAK;AACzB,kBAAQ,SAAS;AACjB,cAAI,UAAU,aAAa,UAAU;AACjC,kBAAM,IAAI,oBAAoB,GAAG,KAAK,yCAAyC;;AAEnF,iBAAO,gBAAgB,OAAO,GAAG,QAAQ,OAAO,KAAK;;AAGrD,wBAAc,KAAK,KAAK,IAAI;AAC5B,iBAAO,cAAc,OAAO,CAAC,OAAO,OAAO;;;MAGnD;AACI,eAAO;UACH,OAAO,KAAK;UACZ,MAAM,KAAK;UACX,cAAc,KAAK;UACnB,MAAM,KAAK;;;;AAKvB,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAQ/B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,kBAAc,YAAY;AAC1B,kBAA4B;+BACM;MAQ9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;2BACE;MAC1B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,aAAS,YAAY;AACrB,kBAA4B;4BACG;MAC3B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,cAAU,YAAY;AACtB,kBAA4B;8BACK;MAC7B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,gBAAY,YAAY;AACxB,kBAA4B;+BACM;MAC9B;AACI,cAAM;UACF,OAAO;UACP,MAAM;UACN,cAAc;UACd,MAAM,QAAQ,OAAO,OAAO,KAAK;;;MAGzC;AAII,eAAO,gBAAgB;;;AAI/B,iBAAa,YAAY;AACzB,kBAA4B;6BACI;MAC5B;AACI;AACA,aAAK,eAAe;AACpB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,oBAAoB;;;MAGtC;AACI,eAAO,KAAK;AACR,cAAI,MAAM,SAAS;AACf,kBAAM,IAAI,oBAAoB;;AAElC,cAAI,MAAM,KAAK,MAAM,KAAK;AACtB,oBAAQ,KAAK,2EACK,MAAM,KAAK,MAAM;;AAIvC,kCAAwB,MAAM,KAAK,MAAM,KAAK,CAAC,MAAM,IAAI,MAAM,MAAM;AACrE,oBAAU,eAAe,iBAAiB,GAAG,GAAG;AAChD,kBAAQ,OAAO,YAAY;AAC3B,cAAI,MAAM,KAAK,MAAM;AACjB,gBAAI,EAAE;;AAEV,iBAAO,KAAI,KAAK,MAAM;;;MAG9B;AACI,eAAO;UACH,MAAM,KAAK;UACX,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AAGrB,uDAAmD;MACtD,UAAY;MACZ,cAAgB;MAChB,eAAiB;MACjB,UAAY;MACZ,WAAa;MACb,UAAY;MACZ,aAAe;MACf,cAAgB;MAChB,MAAQ;MACR,YAAc;MACd,cAAgB;MAChB,eAAiB;MACjB,iBAAmB;MACnB,iBAAmB;MACnB,OAAS;;AAEb,6DAAwD;AACpD,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,aAAO,qBAAqB;;AAEzB;AACH,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AAIJ,YAAI,cAAc;AACd,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;mBAEN,cAAc;AACnB,iBAAO,IAAI;;AAGX,0BAAe;AACf,kBAAO,eAAe;AACtB,kBAAO,YAAY;AACnB,iBAAO,uBAAuB;;iBAG7B,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;ACretC;;;;;;;;;AAgBO;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI;;AAOR;AACH,aAAO,IAAI,SAAS;;AAWjB;AACH,aAAO,IAAI,cAAc;;AAQtB;AACH,aAAO,IAAI,aAAa;;AAYrB;AACH,aAAO,IAAI,gBAAgB;;AAQxB;AACH,aAAO,IAAI,WAAS;;AAejB;AACH,aAAO,IAAI,gBAAgB;;AAexB;AACH,aAAO,IAAI,cAAc;;AAetB;AACH,aAAO,IAAI,aAAa;;AAcrB;AACH,aAAO,IAAI,SAAS;;AAcjB;AACH,aAAO,IAAI,UAAU;;AAelB;AACH,aAAO,IAAI,YAAY;;AAWpB;AACH,aAAO,IAAI,aAAa;;AAUrB;AACH,aAAO,IAAI,WAAW;;;;;;;;;;;;;;;;;;;;ACjM1B;;;;;;;;;AAiBA,8BAA0B;AACnB;AACH,aAAO;;AAEX,yBAAqB;AAMd,6BAAyB;AAC5B,UAAI,CAAE,WAAU;AACZ,qBAAa,UAAU;;AAE3B,mBAAa,WAAW;AACxB,aAAO,SAAS,aAAa,QAAQ;;AChCzC;;;;;;;;;AAcO;AACH,aAAO,MAAM,QAAQ,MAAM,MAAM,QAAQ,EAAE;;AAQxC;AACH,UAAI,EAAE,WAAW;AACb,eAAO;;AAEX,UAAI,CAAC,MAAM,QAAQ,EAAE;AACjB,eAAO,CAAC;;AAEZ,aAAO;;AAQJ;AACH;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,GAAG,WAAW;AACd,gBAAM,IAAI,WAAW,uCAAuC,GAAG;;AAEnE,YAAI,GAAG;;AAGP,YAAI;;AAER,aAAO;;AAYJ;AACH,UAAI,MAAM,QAAQ,WAAW,MAAM,QAAQ,OAAO;AAC9C,YAAI,OAAO,WAAW;AAClB,mBAAS;AACT,iBAAO,OAAO;;AAGd,gBAAM,IAAI,WAAW,iCAAiC,OAAO;;;AAIjE,eAAO;;;ACxEf;;;;;;;;;AAgBO;AACH,mBAAY;AACZ,2BAAqB;AACjB,YAAI,OAAO,MAAM,WAAW;AACxB,oBAAS;;AAGT,oBAAS,OAAO,MAAM,OAAO,UAAU,IAAI;;;AAGnD,aAAO;;AC1BX;;;;;;;;;AAcA,yCAAqC;;MAsBjC,yBAAyB,kBAAkB,0CAA0C,mBAAmB;AACpG,aAAK,QAAQ,SAAS,OAAO,YAAY;AACzC,aAAK,QAAQ,IAAI;AACjB,aAAK,KAAK;AACV,eAAO,QAAQ,OAAO,+BAA+B;AACrD,aAAK,eAAe,oBAAoB;AACxC,aAAK,OAAO,oBAAoB,KAAK;AACrC,aAAK,aAAa;AAClB,aAAK,aAAa;AAClB,aAAK,MAAM,SAAa,KAAK,KAAK,YAAY,KAAK,MAAM,KAAK;;MASlE;AACI,aAAK;AACL,eAAO,KAAK;;MAShB;AAEI,aAAK;AACL,yBAAiB,KAAK,KAAK;AAE3B,YAAI,KAAK,IAAI,OAAO,OAAO;AACvB,eAAK,IAAI,OAAO;AAChB,cAAI,KAAK,cAAc;AACnB,iBAAK,IAAI,OAAO,KAAK,WAAW,MAAM,KAAK;;;AAGnD,eAAO;;MAKX;AACI,aAAK;AACL,aAAK,IAAI;;MAEb;AACI,YAAI,KAAK,IAAI;AACT,gBAAM,IAAI,MAAM,kBAAkB,KAAK;;;UAG3C;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,aAAa;AAClB,aAAK,IAAI,YAAY;;;AAG7B;AACI,UAAI,EAAE,MAAM,eAAe,EAAE,MAAM;AAC/B,cAAM,IAAI,MAAM,qBAAqB,KAAK,UAAU,EAAE,SAAS,UAC3D,KAAK,UAAU,EAAE;;;AAYtB;AACH,aAAO,IAAI,cAAc,GAAG,OAAO,MAAM,MAAM;;AAU5C;AAEH,aAAO,IAAI,cAAc,OAAU,QAAQ,OAAO;;AAU/C;AACH,aAAO,IAAI,cAAc,WAAc,IAAI,OAAO;;AAU/C;AAEH,wBAAkB,OAAS;AAC3B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,wBAAkB,UAAa;AAC/B,aAAO,IAAI,cAAc,WAAW,OAAO;;AAUxC;AACH,aAAO,IAAI,cAAc,IAAQ,OAAO,OAAO;;AAY5C,8EAA0E;AAC7E,aAAO,IAAI,cAAc,cAAkB,OAAO,QAAQ,QAAQ,QAAQ,OAAO;;AAY9E,oDAA+C,YAAc,uBAAyB;AAGzF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,uCAAuC;;AAEzE,aAAO,IAAI,cAAc,gBAAoB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAYpF,iDAA4C,YAAc,uBAAyB;AACtF,cAAQ,SAAS;AACjB,UAAI,UAAU,aAAa,UAAU;AACjC,cAAM,IAAI,oBAAoB,+CAA+C;;AAEjF,aAAO,IAAI,cAAc,aAAiB,OAAO,OAAM,QAAQ,OAAO,OAAO,OAAO;;AAQjF;AACH,aAAO,EAAE,MAAM;;AAQZ;AACH,aAAO,EAAE,MAAM,MAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,EAAE,MAAM,IAAQ,EAAE,QAAQ;;AAQ9B;AACH,aAAO,GAAG,IAAI,OAAK,EAAE;;AAUlB;AACH,yBAAmB,QAAQ;AACvB,0BAAiB,iBAAiB;AAClC,kBAAS,MAAM,iBAAiB;;;AAWjC;AAGH,2BAAqB,UAAU,IAAI,eAAY,UAAS;AACxD,4BAAsB,cAAc,QAAQ;AAC5C,aAAO,UAAU,IAAI,eAAY,cAAc,MAAM,UAAS;;AC/RlE;;;;;;;;;;MA6BI;AACI,aAAK,QAAQ,KAAK;AAClB,aAAK,QAAQ,KAAK;AAKlB,YAAI,KAAK,SAAS;AACd,eAAK,OAAO,KAAK,MAAM;;AAGvB,eAAK,OAAO,KAAK;;AAErB,aAAK,UAAU,KAAK;AACpB,aAAK,UAAU,KAAK;AACpB,aAAK,OAAO,KAAK,QAAQ;;;;MAyB7B;AACI,aAAK,QAAQ;AACb,aAAK,QAAQ;AACb,aAAK,cAAc;AACnB,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,oBAAoB;AACzB,aAAK,KAAK;AACV,YAAI,QAAQ;AACR,eAAK,eAAe,oBAAoB;AACxC,eAAK,OAAO,oBAAoB,KAAK;;AAEzC,aAAK,OAAO,MAAM;;;AAG1B,sBAAkB;;MAsBd;AAGI,aAAK,WAAW;AAChB,aAAK,KAAK;AAQV,aAAK,gBAAgB,KAAK;AAQ1B,aAAK,gBAAgB,KAAK;AAE1B,aAAK,cAAc,KAAK;AAExB,aAAK,gBAAgB,KAAK;AAM1B,aAAK,eAAe,KAAK;AAEzB,aAAK,gBAAgB,KAAK;AAK1B,aAAK,aAAa,KAAK;AAEvB,aAAK,cAAc,KAAK;AAGxB,aAAK,cAAc,KAAK;AAExB,aAAK,eAAe,KAAK;AAEzB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,kBAAM,cAAc,KAAK;;;AAGjC,aAAK,cAAc,aAAa,KAAK;;MAEzC;AACI,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,cAAI,SAAS;AACT,yBAAa,KAAK,MAAM;;AAGxB,yBAAa,KAAK;;;AAG1B,eAAO;UACH,eAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;UAC9D,eAAe;UACf,aAAa,KAAK;UAClB,eAAe,KAAK;;;;AAIhC,uBAAmB;wBAUQ;MACvB,mBAAmB;AACf;AACA,aAAK,YAAY;AACjB,aAAK,oBAAoB;AAKzB,aAAK,YAAY;AACjB,aAAK,KAAK;AACV,aAAK,sBAAsB;AAC3B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AAEvB,aAAK,oBAAoB;AACzB,aAAK,uBAAuB;AAC5B,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,SAAS;AAKd,aAAK,eAAe;AACpB,aAAK,gBAAgB;AACrB,mBAAW,KAAK;AAChB,YAAI,CAAC;AACD,yBAAe,KAAK;AACpB,iBAAO,YAA0B,UAAU,MAAM,OAAO;;AAE5D,aAAK,OAAO;AACZ,aAAK,aAAa,KAAK,aAAa,OAAO,OAAO,KAAK;AACvD,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AAKnD;AACA,cAAI,KAAK,mBAAmB;AACxB,8BAAkB,KAAK;qBAElB,KAAK,cAAc;AACxB,4BAAgB;AAChB,gBAAI,KAAK,aAAa;AAClB,0BAAY,KAAK;;AAErB,8BAAkB,CAAC,WAAW,OAAO,KAAK;;AAE9C,eAAK,kBAAkB;AAEvB,sBAAY,KAAK;AACjB,cAAI,SAAS;AACT,oBAAQ,KAAK;;AAEjB,cAAI,SAAS;AACT,oBAAQ;;AAEZ,eAAK,QAAQ;;AAEjB,YAAI,KAAK,WAAW;AAChB,eAAK,iBAAiB,KAAK;;AAG3B,eAAK,iBAAiB;;AAI1B,aAAK,YAAY;AACjB,aAAK,4BAA4B;;aAW9B;AACH,eAAO,MAAM,OAAO,SAAS,UAAU;;MAS3C;AACI,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,aAAa,2DACQ;;AAEnC,YAAI,KAAK,aAAa,UAAU;AAC5B,gBAAM,IAAI,WAAW,gBAAgB,oBAAoB,qCAC3B,KAAK,aAAa;;AAEpD,eAAO,KAAK,aAAa;;MAW7B;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,SAAS;;MAWlF;AACI,eAAO,iBAA+B,KAAK,eAAe,WAAW,UAAU;;UAc/E;AACA,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;mBAMlC,KAAK,aAAa,WAAW;AAClC,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,SAAS;;UAatE;AACA,YAAI,KAAK,aAAa,WAAW;AAC7B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAG3C,YAAI,KAAK,aAAa,SAAS;AAC3B,gBAAM,IAAI,eAAe,SAAS,KAAK;;AAM3C,eAAO,iBAA+B,KAAK,eAAe,GAAG,UAAU;;UAEvE;AACA,eAAO,KAAK;;MAOhB;AAKI,eAAO,KAAK,OAAO,IAAI,YAAU;;UAEjC;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,SAAS;;UAEd;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,kBAAkB,QAAQ,OAAK,EAAE,YAAY;AAClD,aAAK,aAAa;;UAElB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,EAAE;;AAG5C,iBAAO;;;UAGX;AACA,aAAK,oBAAoB;;UAEzB;AACA,YAAI,KAAK;AACL,iBAAO,KAAK,kBAAkB,OAAO,OAAK,CAAC,EAAE,WACxC,OAAO,KAAK;;AAGjB,iBAAO,KAAK,kBAAkB,OAAO,KAAK;;;UAG9C;AACA,aAAK,uBAAuB;;UAM5B;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;UAEzC;AACA,eAAO,KAAK;;MAShB;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;;MAgBxB;AACI,iBAAS,OAAqB;AAC9B,YAAI,KAAK,aAAa,QAAQ,KAAK,UAAU,WAAW;AACpD;;AAEJ,0BAAkB,OAAqB,KAAK;AAC5C,YAAI,OAAO,WAAW,UAAU;AAC5B,gBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,UAAU,kCACtC,OAAO,yCACP;;AAE3B,8BAAsB,GAAG,aAAa,OAAO,QAAQ;AACjD,oBAAU,OAAO;AACjB,uBAAa,UAAU;AACvB,cAAI,QAAQ;AACR;;AAGJ,uBAAa,EAAE;AACf,cAAI,KAAK,QAAQ;AACb,gBAAI,SAAS,KAAK;AACd,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,uBACvD,KAAK,oBAAoB;;;AAGtD,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAG/D,cAAI,KAAK,WAAW;AAChB,gBAAI,OAAO,KAAK;AACZ,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,2BACjD,KAAK,uBAAuB;;;AAI/D,cAAI,KAAK,SAAS;AACd,gBAAI,EAAE,UAAU,KAAK;AACjB,oBAAM,IAAI,WAAW,SAAS,yCAAyC,KAAK,yBACpD,KAAK,sBAAsB,EAAE;;;AAI7D,cAAI,KAAK;AACL,2BAAe,EAAE;AACjB,8BAAkB,KAAK;AACnB,2BAAa,OAAO;AACpB,4BAAc,KAAK,KAAK;AAIxB,mCAAqB,QAAQ,IAAI,OAAO,QAAQ,OAAO,OAAO,SAAS;AACvE,kBAAI,SAAS,QAAQ,CAAC,OAAO,MAAM,QAAQ,kBAAkB;AACzD,sBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,uBAAuB,qCACjB,uBAAuB;;;;AAKrD,cAAI,KAAK,SAAS;AACd,yBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,8BAAgB,KAAK,MAAM;AAC3B,0BAAY,EAAE,MAAM;AACpB,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,YAAY;AACZ,wBAAM,IAAI,WAAW,SAAS,yCACvB,KAAK,wBAAwB,KAAK,sBACtB,EAAE;;;;;;;MAe7C;AACI,eAAO;;MAEX;AACI,YAAI,KAAK,aAAa;AAClB,eAAK,UAAU,QAAQ;;;MAQ/B;AACI,aAAK,YAAY;;MAMrB;AACI,aAAK,YAAY;;MAuErB;AACI,iBAAS,UAAU;AACnB,aAAK;AAEL,2BAAmB,OAAqB;AACxC,6BAAqB;AACrB,6BAAoB;AAChB,cAAI,CAAE,mBAAiB;AACnB,6BAAiB;AACjB;;;AAGR,8BAAsB;AACtB,6BAAoB;AAChB,cAAI,kBAAiB;AACjB,8BAAkB;AAClB;;;AAGR,YAAI,mBAAmB;AACnB,gBAAM,IAAI,WAAW;;AAIzB,eAAO,UAAU,KAAK,MAAM;AAExB,cAAI,CAAC,KAAK;AAKN,iBAAK,yBAAyB;AAE9B,gCAAoB;AACpB,gCAAoB,OAAqB;AACrC,0BAAY,KAAK,MAAM;;AAE3B,iBAAK,MAAM,iBAA+B;AAC1C,iBAAK,QAAQ;AAEb,gBAAI,KAAK;AACL,mBAAK,WAAW,KAAK;;AAEzB,gBAAI,KAAK,cAAc,QAAQ;AAI3B,mBAAK,YAAY;;;AAOzB,eAAK,yBAAyB;AAI9B,cAAI;AACA,yBAAa,KAAK,KAAK,QAAQ;AAI/B,+BAAmB,OAAqB;AACxC,mCAAuB;AAGvB,0BAAc;AACV,kBAAI,WAAW,QAAQ,OAAO;AAC1B,oBAAI,EAAE;;AAEV,6BAAe,KAAK;;AAExB,qBAAS,iBAA+B;AACxC,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAIlC,mBAAO;;AAGP,+BAAmB,kBAAkB;AACrC,gCAAoB,KAAK,mBAAmB;AAC5C;AACA,gCAAoB,iBAAiB;AACrC,iBAAK,6BAA6B,MAAM,QAAQ,UAAU,WAAW,KACjE;AACJ,gBAAI,eAAe,QAAQ,YAAY,SAAS,KAC5C,MAAM,QAAQ,YAAY;AAE1B,uBAAS,YACJ,IAAI,mBAAkB,IAAI,eAAe,aAAa,OAAO,MAAM,OAAqB,SAAS,QAAQ,KAAK,MAAM;;AAGzH,uBAAS,IAAI,eAAe,aAAa,aAAa,MAAM,OAAqB,SAAS,QAAQ,KAAK;;AAS3G,iBAAK,eAAe,QAAQ,QAAQ,MAAM,MAAM,YAAY,aAAa;AACzE,iBAAK;AACL,gBAAI,KAAK,uBAAuB;AAC5B,oBAAM,IAAI,oBAAoB;;AAGlC,mBAAO;;;;MAWnB;AACI,YAAI,KAAK,mBAAmB;AACxB;mBAEK,WAAW,WAAW,KAAK,gBAAgB;AAChD,kBAAQ,KAAK,iDACN,KAAK,UAAU,4DACE,KAAK,UAAU,KAAK,kCACxB,KAAK;;AAGzB,4BAAkB;AAClB,eAAK,gBAAgB,QAAQ;AACzB,gBAAI,aAAa,QAAQ,WAAW,MAAM,QACtC,WAAW,OAAO;AAClB,4BAAc;;;AAGtB,cAAI;AACA,oBAAQ,KAAK,kCACL,KAAK,UAAU,wDACe,KAAK,SACpC,KAAK,UAAU,KAAK;;;;UAgBnC;AACA,YAAI,KAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC1D,gBAAM,IAAI,eAAe,aAAa,KAAK;;AAG/C,gCAAwB;AACxB,2BAAmB,KAAK;AACpB,8BAAoB,KAAK,UAAU,KAAK;AACxC,cAAI,gBAAgB,QAAQ,iBAAiB;AACzC,4BAAgB,KAAK;;;AAG7B,YAAI,gBAAgB,WAAW;AAC3B,+BAAqB,KAAK,aAAa,GAAG;AAC1C,cAAI,MAAM,QAAQ,iBAAiB,MAAM,QAAQ,aAAa,OAC1D,aAAa,WAAW;AACxB,mBAAO,aAAa;;AAGpB,mBAAO;;;AAIX,gBAAM,IAAI,eAAe,aAAa,KAAK;;;MAgBnD;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa,sCAAsC,KAAK;;AAItE,eAAO,qBAAoC,KAAK;;MAapD;AACI,aAAK,QAAQ;;MAUjB,2BAA2B;AACvB,eAAO,cAAc,gBAAgB,KAAK,mBAAmB,KAAK;;MActE;AACI,aAAK;AACD,yBAAe,KAAK;AACpB,cAAI,OAAO,WAAW,QAAQ;AAK1B,kBAAM,IAAI,WAAW,4CAA4C,KAAK,sCAClC,QAAQ,uCACT,OAAO,qCACjB;;AAE7B,cAAI,OAAO,WAAW;AAClB;;AAEJ,oCAA0B;AAC1B,8BAAoB,cAAc;AAClC,uBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,uBAAW,YAAY;AACvB,uBAAU,OAAO;AACjB,sBAAU,QAAQ;AAClB,gBAAI,CAAC,aAAiB,GAAG,OAAO,EAAE;AAC9B,oBAAM,IAAI,WAAW,sBAAsB,GAAG,mDACG,EAAE;;AAEvD,8BAAkB,KAAK,CAAC,IAAG;;AAE/B,wBAAc;;;MAkBtB;AAEI,YAAI,KAAK,kBAAkB,QAAQ,UAAU;AACzC,gBAAM,IAAI,WAAW,yBAAyB,kBAAkB,KAAK;;AAEzE,aAAK,kBAAkB,KAAK;AAC5B,YAAI,SAAS;AACT,kBAAQ;;AAEZ,YAAI,KAAK;AACL,wBAAc,eAAe;;AAEjC,0BAAkB,YAAY,MAAM,OAAO;AAC3C,uBAAe,IAAI,cAAc,WAAW,OAAO,MAAM,WAAW;AACpE,kBAAU;AAEV,YAAI,eAAe;AACf,eAAK,QAAQ,MAAM,YAAY,MAAM,OAAO;;AAEhD,YAAI,aAAa;AACb,sBAAY;;AAEhB,YAAI;AACA,eAAK,kBAAkB,KAAK;;AAG5B,eAAK,qBAAqB,KAAK;;AAEnC,eAAO;;MAYX;AACI,aAAK,4BAA4B;;MAUrC;AACI,YAAI,WAAU,QAAQ,MAAM,QAAQ,YAAW,QAAO,WAAW;AAC7D;;AAGJ,kBAAS,OAAqB;AAC9B,YAAI,KAAK,YAAY,UAAa,KAAK,YAAY;AAC/C,eAAK,OAAO,KAAK,GAAG;;;MAc5B;AACI,eAAO;;MAWX;AACI,YAAI,CAAC,KAAK;AACN,cAAI,QAAQ;AACR,gBAAI,MAAM,QAAQ;AACd,mBAAK,QAAQ;AACT,oBAAI,eAAe;AACf,wBAAM,IAAI,UAAU,SAAS,KAAK;;;;AAM1C,oBAAM,IAAI,UAAU,SAAS,KAAK;;;AAK1C,iBAAO;;AAIX,eAAO;;MAcX,yGAAyG;AACrG,gCAAwB,OAAqB;AAC7C,wBAAgB,OAAqB;AACrC,qBAAa,OAAqB;AAClC,sBAAc,OAAqB;AACnC,sBAAc,mBAA+B;AAC7C,uBAAe,mBAA+B;AAE9C,8BAAsB;AACtB,4BAAoB;AACpB,8BAAsB;AACtB,wBAAgB;AAKZ,wBAAc,KAAK,EAAE;AACrB,sBAAY,KAAK,EAAE;AACnB,wBAAc,KAAK,EAAE;;AAKzB,YAAI,KAAK;UACL,eAAe;UACf;UACA;UACA;UACA,cAAc;UACd;UACA;UACA;UACA;UACA;WACD;AAEH,qBAAa,GAAG,IAAI,cAAc,QAAQ;AAEtC,wBAAc,GAAG,cAAc;AAC/B,wBAAc,GAAG,YAAY,KAAK,aAAa,SAAS;AACxD,wBAAc,GAAG,cAAc;;;MAwBvC;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,WAAW,KAAK;AAClD,YAAI,KAAK,mBAAmB;AACxB,kBAAO,qBAAqB,KAAK;;AAErC,YAAI,KAAK,SAAS;AACd,kBAAO,WAAW,KAAK;;AAE3B,eAAO;;MAOX;AACI,aAAK,QAAQ,QAAQ,YAAU,OAAO;AACtC,eAAO,KAAK,QAAQ;;MAExB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,UAAU,KAAK;;;MAiCvC;AACI,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,wBAAwB,KAAK;;AAGjD,aAAK;AACL,mCAA2B;AAC3B,YAAI,EAAE,KAAK,cAAc;AACrB,iCAAuB,KAAK;;AAEhC,eAAO,CAAE,sBAAsB,KAAK,WAAW;;;AAavD;AACI,qBACI,OAAqB;AACzB,qBAAe;AACf,sBAAgB;AACZ,eAAO,KAAK,EAAE;;AAElB,aAAO,iBAA+B;;AAW1C;AACI,aAAO;;AAaJ;AACH,UAAI,SAAS,QAAS,aAAa,QAAQ,YAAY;AACnD,gBAAQ,QAAO;AACf,oBAAY,QAAO;;AAEvB,UAAI,MAAM,aAAa,WAAW;AAC9B,eAAO,CAAC;;AAGR,qBAAa,MAAM,aAAa;AAChC,YAAI,KAAK,cAAc,WAAW;AAC9B,iBAAO,KAAK;;AAGZ,gCAAsB;AACtB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,oCAAwB,gBAAgB,GAAG,QAAO;AAElD,6BAAgB;AACZ,kBAAI,cAAc,QAAQ,QAAO;AAC7B,8BAAc,KAAK;;;;AAI/B,iBAAO;;;;ACztCnB;;;;;;;;;6BAagC;MAC5B;AACI,cAAM;UACF,OAAO,KAAK;UACZ,MAAM,KAAK,QAAQ,OAAO,KAAK,OAAO,OAAO,SAAS;;AAG1D,YAAI,KAAK,aAAa;AAClB,eAAK,YAAY;;AAErB,YAAI,KAAK,UAAU;AACf,eAAK,SAAS;;AAElB,aAAK,YAAY;AACjB,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK;AACnB,YAAI,KAAK,cAAc,QAAQ,KAAK,mBAAmB;AACnD,gBAAM,IAAI,WAAW;;AAGzB,8BAAsB,KAAK;AAC3B,YAAI,mBAAmB;AACnB,cAAI,KAAK,cAAc;AACnB,kBAAM,IAAI,WAAW;;AAIrB,8BAAkB,CAAC,KAAK,WAAW,OAAO,KAAK;;;AAKnD,cAAI,KAAK,aAAa;AAClB,kBAAM,IAAI,WAAW;;;AAI7B,sBAAc,KAAK,SAAS;AAC5B,aAAK,kBAAkB;AACvB,aAAK,QAAQ;AAEb,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,4BAAoB,IAAI,eAAe,KAAK,OAAO,KAAK,iBAAiB,MAAM,IAAI,IAAI,KAAK;AAC5F,oBAAY,YAAY;AACxB,oBAAY,cAAc;AAI1B,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,CAAC;UACf,eAAe,CAAC;UAChB,YAAY,CAAC;UACb,aAAa,CAAC;UACd,aAAa,CAAC;UACd,cAAc,CAAC;;;MAGvB;AACI,cAAM,IAAI,WAAW,6EACgC,KAAK;;MAE9D;AAEI,eAAO,CAAE,sBAAsB,KAAK,WAAW,sBAAsB;;MAEzE;AACI,eAAO;UACH,iBAAiB,KAAK;UACtB,OAAO,KAAK;UACZ,QAAQ,KAAK;UACb,MAAM,KAAK;;;;AAKvB,eAAW,YAAY;AACvB,kBAA4B;AACrB;AACH,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAC7C,cAAM,IAAI,MAAM;;AAKpB,UAAI,QAAO,cAAc,QAAQ,QAAO,SAAS;AAE7C,cAAM,IAAI,WAAW;;AAGzB,uBAAiB,QAAO;AACxB,UAAI,QAAO,SAAS,QAAQ,cAAc;AACtC,qBAAa,CAAC,MAAM,OAAO,QAAO;;AAEtC,kBAAY,QAAO;AACnB,UAAI,SAAS;AACT,gBAAQ;;AAEZ,0BAAmB,IAAI,WAAW;QAC9B,iBAAiB;QACjB,MAAM,QAAO;QACb;QACA,QAAQ,QAAO;;AAEnB,sBAAgB,YAAW,aAAa,GAAG;AAC3C,aAAO,QAAQ;;ACzHnB;;;;;;;;;AAeO;AACH,UAAI,QAAQ;AACR;;AAEJ,uBAAiB;AACjB,mBAAa;AACb,+BAAyB;AACzB,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,8BAAoB;AACpB,mBAAS,KAAK,YAAY;AAC1B,eAAK,KAAK;AACV,2BAAiB,KAAK;;;AAG9B,UAAI,SAAS,SAAS;AAClB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,eAAK,KAAK,MAAM,OAAO,GAAG;;AAG9B,gBAAQ;;;AAST;AACH,UAAI,QAAQ;AACR;;AAEJ,wBAAkB;AACd,sBAAc,KAAK;AACnB,YAAI,OAAO,UAAU;AACjB,gBAAM;;;;ACrDlB;;;;;;;;;AAeO;AACP,IAAC;AACG,6BAAsB,uBAAsB,YAAY,KAAK;AAC7D,6BAAsB,uBAAsB,aAAa,KAAK;OAC/D,yBAA0B,yBAAwB;AAE9C,mCAA+B;;MAoBlC;AAEI,aAAK,iBAAiB;;MAE1B;AACI,aAAK,SAAS;;YAEZ;;YACA;;YACA;;YACA;;YACA;;YACA;;MAQN;;;;MAmBA,sCAAqC;AAGjC,YAAI,cAAa;AACb,uBAAY;;AAEhB,aAAK,YAAY;AACjB,aAAK,cAAc;;MAEvB;AACI,aAAK,UAAU,KAAK;;MAExB;AACI,+BAAuB,KAAK;AACxB,mBAAS,UAAU;;;MAG3B;AACI,+BAAuB,KAAK;AACxB,mBAAS,SAAS;;;YAQpB;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAQnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa,OAAO;;;YAQrC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW,OAAO;;;YAOnC;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,aAAa;;;YAO9B;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,+BAAuB,KAAK;AACxB,gBAAM,SAAS,WAAW;;;;6BASN;MAC5B;AACI;;YAEE;AACF,aAAK,OAAO;AACZ,aAAK,SAAS;;YAEZ;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,0BAAkB,KAAK,WAAW,OAAO,IAAI,KAAK;AAClD,aAAK,QAAQ;AACb,0BAAkB;AACd,wBAAc,KAAK;AACnB,cAAI,OAAO,UAAU;AACjB,gBAAI,CAAC,KAAK,OAAO,eAAe;AAC5B,mBAAK,OAAO,OAAO;;AAEvB,iBAAK,OAAO,OAAO,KAAK,OAAO,OAAO,QAAQ;;AAG9C;AACA,gBAAI,OAAO,KAAK;AACZ,mCAAqB,KAAK,OAAO;;AAGjC,mBAAK,OAAO,OAAO;;AAEvB,0BAAc,KAAK,MAAM,MAAK,KAAK,OAAO,MAAO,KAAI,OAAO;AAC5D,iBAAK,OAAO,OAAO;AACnB,gBAAI,sBAAsB;AACtB,iCAAmB;;;;;YAK7B;AACF,YAAI,QAAQ;AACR,4BAAkB,KAAK,OAAO;AAC1B,gBAAI,KAAK,OAAO,QAAQ;AACpB;;AAEJ,gBAAI,OAAO,KAAK,OAAO,SAAS;AAC5B,mBAAK,OAAO,KAAK,OAAO,OAAO,KAAK;;AAGpC,mBAAK;AACD,6BAAY,KAAI,IAAI,GAAG,KAAK,OAAO,KAAK,OAAO;AAC/C,qBAAK,OAAO;AACZ,qBAAK,OAAO,KAAK;AACjB,qBAAK,KAAK;;;;;;;0BAYL;YACnB;AACF,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,MAAM,KAAK;AAChB,0BAAkB;AACd,cAAI,KAAK,QAAQ,QAAQ;AACrB,iBAAK,QAAQ,OAAO;;AAExB,eAAK,QAAQ,KAAK,KAAK,KAAK;;;YAM9B;AACF,yBAAiB;AACjB,qBAAa;AACb,wBAAgB;AAChB,0BAAkB,KAAK;AACnB,6BAAmB,KAAK,QAAQ;AAChC,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,gBAAI,OAAO,WAAW,OAAO;AACzB,kCAAoB,WAAW;AAC/B,uBAAS,KAAK,YAAY;AAC1B,mBAAK,KAAK;AACV,sBAAQ,KAAK;;;;AAIzB,uBAAe,MAAM,QAAQ,IAAI;AACjC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,kCAAwB,KAAK,QAAQ,KAAK,IAAI,QAAQ;AACtD,0BAAgB;AAChB,eAAK,QAAQ,KAAK,IAAI,QAAQ,MAAM,OAAO,GAAG;;;;iCAOtB;MAChC;AACI;AACA,aAAK,eAAe;AACpB,aAAK,aAAa,cAAc;AAChC,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;;AAEtB,YAAI,KAAK,eAAe,WAAW,KAAK,WAAW;AAC/C,gBAAM,IAAI,MAAM;;AAGpB,YAAI,UAAc,KAAK;AAGnB,eAAK,YAAY,SAAuB,KAAK,UAAU,KAAK,OAAO,KAAK;;AAE5E,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,WAAW,KAAK;AACrB,aAAK,QAAQ,KAAK;;YAEhB;AACF,mBAAW;AACX,YAAI,KAAK,SAAS;AACd,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,MAAM,OAAO,OAAO;;AAErC,WAAG,KAAK;AACR,cAAM,QAAQ,IAAI;;YAEhB;AACF,aAAK,eAAe;AACpB,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;;AAEZ,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW,OAAO;;;YAG/B;AACF,mBAAW;AACX,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,aAAG,KAAK,KAAK,SAAS,OAAO;;AAEjC,YAAI,KAAK,eAAe;AACpB,aAAG,KAAK;mBAEH,UAAc,KAAK;AACxB,aAAG,KAAK,KAAK,UAAU,KAAK,cAAc,OAAO;;AAErD,cAAM,QAAQ,IAAI;;YAEhB;AACF,YAAI,KAAK,cAAc;AACnB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,WAAW;;;YAGxB;AACF,YAAI,KAAK,YAAY;AACjB,gBAAM,qBAAqB;AAC3B,gBAAM,KAAK,SAAS;;;;AAOzB;AACH,UAAI,cAAa;AACb,qBAAY;;AAEhB,UAAI,sBAAqB;AACrB,eAAO,CAAC;;AAEZ,UAAI,MAAM,QAAQ,eAAc,WAAU,cAAc;AACpD,eAAO;;AAGX,8BAAwB,OAAqB;AAC7C,aAAO,gBAAgB,IAAI,oBAAkB,IAAI,eAAe,gBAAgB;;;MAUhF;;aAaO;AACH,gBAAY,kBAAkB,KAAK,OAAO,UAAU,iBAAiB,MAAM,8DAC5D;AACf,oCAA4B,kBAAkB;AAC9C,YAAI,4BAA4B,aAAa,mBAAmB;AAC5D,sCAA4B,aAAa,kBAAkB;;AAE/D,oCAA4B,aAAa,gBAAgB,KAAK;;aAE3D;AACH,gCAAwB,4BAA4B;AAChD,+BAAqB,4BAA4B,aAAa,CAAC;AAC/D,uBAAa,QAAQ;AACjB,gBAAI,SAAS;AACT,oBAAM,IAAI,WAAW;;;;;aAQ9B;AACH,oCAA4B,eAAe;;aAUxC;AACH,6BAAqB;AACrB,gCAAwB,4BAA4B;AAChD,wBAAc,CAAC;AACf,cAAI,kBAAkB;AAClB,yBAAa,KAAK,GAAG,4BAA4B,aAAa;;;AAGtE,eAAO,aAAa,IAAI,UAAQ,IAAI;;;AAG5C,gCAA4B,eAAe;AACpC;AACH,sBAAgB,IAAI;AACpB,8BAAwB;QACpB,IAAI;QAAc,GAAG,4BAA4B,gBAAgB;;AAErE,UAAI,cAAa;AACb,wBAAgB,KAAK,GAAG;;AAE5B,sBAAgB,KAAK;AACrB,2BAAqB,IAAI,aAAa;AAItC,mBAAa,UAAU;QACnB;QACA;QACA,SAAS;QACT,OAAO;QACP;QACA;QACA;QACA,SAAS;;AAEb,aAAO,CAAE,cAAc;;ACre3B;;;;;;;;;AAuBO,kDAA6C,qBAAqB;AACrE,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe,SAAS;;ACxBxH;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,UAAU;AACZ,cAAI,EAAE,OAAO;;AAEjB,0BAAkB,MAAQ,SAAS,IAAI,MAAM;AAC7C,8BAAsB,MAAS,UAAU,OAAO;AAChD,sBAAa,KAAS,QAAY,WAAW;AAC7C,eAAO,IAAQ,GAAG;;;AAGnB;AACH,aAAO,KAAK,MAAM,KAAS,SAAS,IAAQ,OAAO,SAAS;;AAEzD;AACH,aAAO,KAAK,MAAM,KAAS,IAAQ,IAAQ,OAAO,SAAS;;AAExD;AACH,aAAO,KAAK;AACR,qBAAa,IAAQ,OAAO;AAC5B,4BAAoB,YAAgB,IAAQ,QAAQ,WAAW,OAAO;AACtE,0BAAkB,IAAQ,IAAQ,MAAM;AACxC,eAAO,KAAQ,KAAK,KAAS,WAAW;;;AAGzC;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,yBAAiB,KAAQ,MAAQ,GAAG;AACpC,4BAAoB,YAAgB,OAAO,WAAW,OAAO;AAC7D,0BAAkB,KAAQ,MAAQ,GAAG;AACrC,eAAO,KAAS,SAAS,IAAQ,UAAU,aAAa;;;AAGzD;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,SAAS,YAAY;;;AAGtC;AACH,aAAO,KAAK;AACR,0BAAkB,QAAY,GAAG,IAAQ,GAAG,KAAQ,OAAO;AAC3D,eAAO,KAAS,WAAW;;;AAG5B;AACH,aAAO,KAAK;AACR,oBAAY,MAAQ,KAAQ,OAAO,QAAQ;AAC3C,qBAAY,KAAQ,KAAQ,IAAQ,GAAG,QAAQ,QAAQ;AACvD,eAAO,QAAY,GAAG,MAAQ,GAAG,IAAQ,MAAK;;;AAW/C;AACH,aAAO,KAAK;AACR,sBAAa,KAAK,IAAI;AACtB,+BAAuB,IAAQ,OAAO;AACtC,8BAAsB,IAAQ,MAAQ,gBAAgB,SAAa,KAAQ,IAAI,mBAAmB;AAClG,eAAO,KAAS,eAAe;;;AAGhC,kEAA8D;AACjE,aAAO,KAAK;AACR,YAAI;AACA,mBAAS,SAAY;;AAIrB,4BAAkB,MAAQ,QAAQ,OAAO,MAAM,SAAS,GAAG;AAC3D,mBAAS,IAAQ,QAAQ;;AAE7B,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,eAAO,IAAQ,MAAQ,KAAQ,OAAO,WAAW,KAAQ,UAAU,OAAO,MAAM,SAAS;;;AAY1F,wEAAoE;AACvE,aAAO,KAAK;AACR,2BAAmB,MAAU,UAAU,SAAS;AAChD,iBAAS,YAAgB,QAAQ,WAAW,IAAI;AAChD,4BAAoB,OAAO;AAC3B,6BAAqB,QAAW,YAAY,YAAY,YAAY,SAAS,IACxE,QAAQ;AACb,eAAO,wBAAwB,cAAc,QAAQ;;;AAwBtD;AACH,UAAI,CAAC,aAAiB,OAAO,OAAO,OAAO;AACvC,cAAM,IAAI,WAAW,8DACd,KAAK,UAAU,OAAO,cAAc,KAAK,UAAU,OAAO;;AAErE,aAAO,KAAK;AAOR,2BAAmB,OAAO;AAC1B,6BAAqB,OAAO,MAAM;AAClC,eAAO,WAAW,IAAI,OAAO,IAAI,SAAS,IAAI,aAAa,MAAM;;;AAGlE;AACH,aAAO,KAAK;AACR;AACA,YAAI,YAAgB,OAAO,WAAW,IAAI;AAC1C,YAAI,KAAQ,IAAQ,GAAG,IAAQ,GAAG;AAClC,eAAO,KAAS,8BAA8B,OAAO,IAAI;;;AAG1D;AACH,aAAO,KAAK;AACR,4BAAoB,YAAgB,OAAO,WAAW;AACtD,4BAAoB,YAAgB,OAAO,WAAW;AACtD,eAAO,MAAQ,KAAQ,OAAO,KAAQ,IAAQ,aAAa,gBAAgB;;;AAG5E;AACH,aAAO,KAAK;AACR,wBAAgB,KAAQ,MAAQ,WAAW;AAC3C,eAAO,KAAS,IAAQ,OAAO,KAAQ,OAAO,WAAW;;;AAG1D;AACH,aAAO,KAAK;AACR,+BAAuB,YAAY,OAAO;AAC1C,+BAAuB,YAAY,OAAO;AAC1C,0BAAkB,KAAQ,gBAAgB;AAC1C,eAAO,IAAQ,MAAQ,WAAW;;;AAGnC,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,gBAAY;AACZ,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,iBAAa;AACb,gBAAY;AACZ,gBAAY;AACZ,mBAAe;AAEf,sBAAkB;MACzB,kBAAI;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAIG;AACH,UAAI,OAAO,mBAAmB;AAC1B,YAAI,kBAAkB;AAClB,iBAAO,UAAU;;AAErB,qBAAa,gBAAgB;AAC7B,YAAI,eAAe,cAAc,SAAS;AACtC,mBAAS,gBAAgB;;AAI7B,cAAM,IAAI,WAAW;;AAGrB,eAAO;;;ACzOf;;;;;;;;;AAoBO;AACH,aAAO,KAAK;AACR,2BAAkB,KAAQ,KAAI,UAAa;AAC3C,iCAAyB,OAAO,QAAY,OAAO,aAAY,MAAM;AACrE,eAAO,KAAS,MAAU,OAAO,mBAAmB;;;AAGrD;AACH,aAAO,KAAK,MAAM,OAAO,MAAU,OAAW,OAAO,KAAK,OAAW,OAAO,MAAM;;AAEtF;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGzE;AACI,aAAO,KAAK;AACR,eAAO,WAAe,MAAM,MAAM,IAAI,MAAM,MAAM,IAAI,MAAM,KAAK;;;AAGlE;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,KAAK;AACR,mBAAW,cAAc,OAAO;AAChC,mBAAW,eAAe,OAAO;AACjC,4BAAoB,GAAG,IAAI;AAC3B,eAAO,MAAU,QAAY,aAAa,IAAI,GAAG,IAAI,cAAc,GAC9D,KAAK;;;AAGX;AACH,aAAO,mBAAuB,OAAO;;AAElC;AACH,UAAI,MAAM,SAAS,MAAM;AACrB,gBAAQ,MAAM,QAAQ,CAAC,MAAM,OAAO;;AAExC,cAAQ,MAAM,OAAO;AACrB,UAAI,MAAM,UAAU,MAAM;AACtB,gBAAQ,MAAM,OAAO,MAAM;;AAE/B,aAAO,MAAU,OAAO,OAAO,OAAO;;AAEnC;AACH,YAAM,IAAI;;AAEP;AACH,YAAM,IAAI;;AAGP,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa;AACb,sCAAgC;AAChC,qBAAe;AACf,4CAAsC;AAEtC,uBAAmB;MACtB;MACA;MACA;MACJ,yBAAI;MACJ,+BAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,KAAI;MACJ,MAAI;MACJ,MAAI;MACJ,QAAI;;AAEG;AACH,UAAI,OAAO,eAAe,YAAY,cAAc;AAChD,eAAO,WAAW;iBAEb,OAAO,eAAe,YAAY,cAAc;AACrD,eAAO;;AAGP,cAAM,IAAI,WAAW,kBAAkB;;;AAoBxC;AACH,eAAY,OAAO,MAAM,0BAA0B;AACnD,UAAI,OAAO,OAAO;AACd,eAAO;;AAGP;AACA,0BAAkB,OAAO,KAAK;AAC1B,cAAI,UAAU,SAAS;AACnB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,0BAAkB,OAAO,KAAK;AAC1B,cAAI,WAAW,SAAS;AACpB,qBAAS;AACT;;;AAGR,YAAI,WAAW;AACX,iBAAO;;AAEX,eAAO,GAAG;;;AChKlB;;;;;;;;;AAkBO;AACH,2BAAqB;QACjB,SAAW,MAAM,MAAM,QAAQ;QAC/B,UAAY,MAAM,MAAM,SAAS,GAAG,MAAM;QAC1C,MAAQ,MAAM,MAAM,KAAK,MAAO,KAAK,OAAO;QAC5C,QAAU,MAAM,MAAM,OAAO,MAAO,KAAK,OAAO,WAAW;QAC3D,SAAW,MAAM,MAAM,QAAQ,MAAO,KAAK,GAAG;QAC9C,KAAO,MAAM,MAAM,IAAI;;AAE3B,mBAAa,aAAa,aAAa;AACvC,mBAAa,cAAc,aAAa;AACxC,mBAAa,UAAU,aAAa;AACpC,mBAAa,YAAY,aAAa;AACtC,mBAAa,aAAa,aAAa;AACvC,mBAAa,SAAS,aAAa;AACnC,UAAI,cAAc;AACd,eAAO,aAAa;;AAExB,YAAM,IAAI,WAAW,qBAAqB;;ACpC9C;;;;;;;;;AAaO,wDAAoD,IAAI,OAAO;AAa/D,kFAA8E;AACjF,UAAI,uBAAuB,QACvB,OAAO,wBAAwB,YAC/B,OAAO,eAAe,yBAAyB,OAAO,aACtD,CAAC,iBAAiB;AAClB,cAAM,IAAI,MAAM;;AAEpB,UAAI;AACA,oBAAY,KAAK,UAAU;AAC3B,YAAI,IAAI,SAAS;AACb,kBAAQ,KAAK,mCAAmC,2CAC5B,IAAI,qJAGjB;;;;AAeZ;AACH,UAAI,MAAM;AAEN,eAAO;iBAEF,OAAO,MAAM;AAClB,YAAI,OAAO,eAAe,OAAO,OAAO;AAEpC,uBAAa,OAAO,KAAK;AACzB,4BAAkB;AACd,gBAAI,OAAO,QAAQ;AAEf,qBAAO;;AAEX,gBAAI,CAAC,iBAAiB,EAAE;AACpB,qBAAO;;;AAGf,iBAAO;;AAIP,cAAI,MAAM,QAAQ;AAEd,+BAAmB;AACf,kBAAI,CAAC,iBAAiB;AAClB,uBAAO;;;AAGf,mBAAO;;AAMP,mBAAO;;;;AAMf,sBAAc,OAAO;AACrB,eAAO,UAAU,YAAY,UAAU,YAAY,UAAU;;;ACjGrE;;;;;;;;;AAyBO,mEAEG,QAAQ;AACd,6BAAuB,sBAAsB;AAE7C,wBAAkB,CAAC,gBAAgB,gBAAgB;AACnD,UAAI;AACA,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM;;AAGtC,qBAAa,cAAc;AAC3B,oBAAY,aAAa,CAAC,MAAM,MAAM,MAAM;;AAGhD,UAAI,UAAU,UAAU,SAAS,MAAM;AAEnC,oBAAY,UAAU,IAAI,QAAK,KAAK,MAAM,aAAa;;AAE3D;AACA,UAAI,CAAC;AACD,kBAAU,KAAK;AACf,wBAAgB;AAChB,4BAAoB,OAAM;AACtB,wBAAc,KAAK,GAAG,OAAM,aAAa;;;AAGjD,cAAQ,IAAI,OAAO;AACnB,eAAS,WAAW,WAAW;AAC/B,cAAQ,IAAI,OAAO;AACnB,qBAAe,OAAM;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI;AACA,4BAAkB,OAAO,IAAI,WAAW;;AAGxC,2CAAiC,OAAO,IAAI,WAAW,eAAe;;AAE1E,gBAAS,OAAM,OAAO,SAAS,IAAI,MAAM,KAAK,OAAO;;AAGzD,aAAM;AACN,6BAAuB,qBAAqB;AAC5C,gCAA0B,qBAAqB,OAAM;AACrD,cAAQ,iBAAiB,iBAAiB;AAC1C,cAAQ,qBAAqB;AAC7B,cAAQ,yBAAyB;AACjC,cAAQ,IAAI,OAAO;;AAEvB;AACI;AAEA,UAAI,OAAM,6BAA6B;AACnC,yBACI,qBAAqB,OAAM;;AAG/B,yBAAiB,qBAAqB,OAAM;;AAGhD,aAAO;;AAEX;AACI,2BAAqB;AACrB,2BAAqB;AACrB,oBAAc;AACd,0BAAoB,OAAM;AACtB,qBAAa,KAAK,OAAM,aAAa;;AAEzC,+BAAyB;AACrB,YAAI,WAAW,SAAS,KACpB,WAAW,WAAW,KAAK,WAAW,GAAG,cAAc,SAAS;AAChE,2BAAiB;AACjB;;AAEJ,cAAM,KAAK,GAAG;;AAElB,UAAI;AAEA,4BAAoB,OAAM;AACtB,qBAAW;AACX,6BAAmB,MAAM;AACrB,gBAAI,MAAM,QAAQ,UAAU;AACxB,kBAAI;AACA,iCAAiB;AACjB;;AAGA,uBAAO;;;;AAInB,cAAI,CAAC;AACD;;;;AAIZ,aAAO;;AAEX,mDAEU,QAAQ;AACd,iBAAW;AACX,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,IAAI;AACJ,iBAAO,KAAK,MAAM,GAAG,KAAK,SAAS,KAAK;;AAE5C,gBAAQ,OAAO;AACf,eAAO,KAAK,MAAM,GAAG,UAAU;AAC/B,gBAAQ,IAAI,OAAO,UAAU,KAAK,KAAK;;AAE3C,cAAQ;;AAOZ;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,qBAAe,CAAC,GAAG,SAAS,cAAc,aAAa,MAAM,cAAc;AAC3E,eAAS,QAAQ,WAAW;;AAKhC;AAGI;AACA;AACI,sBAAc,KAAK,UAAU,MAAM;;AAGnC,sBAAc;;AAElB,0BAAoB;AACpB,yBAAmB,MAAM;AACrB,YAAI,iBAAiB,QAAQ,cAAc,SAAS,KAChD,cAAc,QAAQ,UAAU;AAChC;;AAEJ,qBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,+BAAqB,KAAK,cAAc,GAAG;AAC3C,oCAA0B,KAAK,YAAY;AAC3C,qCAA2B,KAAK,cAAc;AAC9C,sBAAY,KAAK,GAAG,gBAAgB,sBAAsB;;;AAGlE,mBAAa,MAAM;AACnB,wBAAkB,MAAM;AACxB,8BAAwB,YAAY,WAAW,IAAI,KAAK,YAAY;AACpE,qBAAe;QACX,GAAG,SAAS;QAAc;QAAa,MAAM,cAAc;QAC3D;;AAEJ,eAAS,QAAQ,WAAW;AAC5B,mBAAa,GAAG,IAAI,YAAY,QAAQ,EAAE;AACtC,iBAAS,CAAC,IAAI,IAAI,IAAI,YAAY,KAAK,WAAW;;;ACjM1D;;;;;;;;;AAoBA;AACI,aAAQ,SAAQ,kBAAkB,QAAQ,kBACtC,QAAQ,kBACR,WAAU,KAAK,OAAO,UAAU;;AAQjC;AACH,UAAI,mBAAmB;AACnB,eAAO;iBAEF,OAAO,mBAAmB;AAC/B,eAAO,YAA0B;iBAE3B,OAAO,mBAAmB,YAC/B,OAAO,mBAAmB;AAC3B,eAAO;iBAEF,0BAA0B;AAC/B,wBAAgB;AAChB,4BAAoB,eAAe;AACnC,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,eAAe;AAC5B,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,kCAA0B,OAAO,KAAK;AAClC,gCAAsB,eAAe;AACrC,cAAI,gBAAgB,UAAU,OAAO,kBAAkB;AAInD,mBAAO,eAAe;;AAGtB,0BAAc,YAA0B;AACxC,mBAAO,SAAS,oBAAoB,eAAe;;;AAG3D,eAAO;;;AASR;AACH,UAAI,aAAa,QAAQ,aAAa;AAClC,eAAO;iBAEF,OAAO,aAAa;AACzB,eAAO,YAA0B;iBAE3B,OAAO,aAAa,YAAc,OAAO,aAAa;AAC5D,eAAO;iBAEF,oBAAoB;AACzB,wBAAgB;AAChB,4BAAoB,SAAS;AAC7B,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,uBAAa,SAAS;AACtB,cAAI,6BAA6B,KAAK,GAAG;AACrC,oBAAQ,KAAK;;AAGb,oBAAQ,KAAK,oBAAoB,MAAM;;;AAG/C,eAAO;;AAGP,uBAAe;AACf,4BAAoB,OAAO,KAAK;AAC5B,0BAAgB,SAAS;AACzB,wBAAc,YAA0B;AACxC,cAAK,WAAU,UAAU,UAAU,gBAC/B,OAAO,YAAY;AAInB,mBAAO,SAAS;;AAGhB,mBAAO,SAAS,oBAAoB,SAAS;;;AAGrD,eAAO;;;ACxHf;AAEK,sBAAW;ACFhB;;;;;;;;;AAoBA;AAEI,UAAI,IAAI,SAAS,QAAQ,IAAI,UAAU,IAAI;AAEvC,eAAO;;AAEX;AAEI,eAAO,MAAK,KAAK,IAAI;;AAIrB,cAAM,IAAI,WAAW,0BAA0B,IAAI,mDAChC,IAAI,UAAU,IAAI;;;;MAazC;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,YAAI,iBAAiB;AACjB,2BAAiB,MAAM;AACnB,iBAAK,SAAS,MAAM,MAAM,SAAS;AACnC,gBAAI,MAAM,MAAM;AACZ,mBAAK,QAAQ,MAAM,MAAM,QAAQ;;;;AAKzC,cAAI,SAAS;AACT;;AAEJ,6BAAmB;AACf,iBAAK,IAAI,KAAK,KAAK,KAAK;;;;MAcpC;AACI,YAAI,KAAK,SAAS,IAAI,OAAO;AACzB,eAAK,SAAS,IAAI,MAAM,wBAAwB,KAAK;AACrD,eAAK,QAAQ,IAAI,QAAQ,IAAI;AAC7B,cAAI,QAAQ;AACR,iBAAK,QAAQ,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,uBAAuB,IAAI,YAAY,IAAI;;AAEpE,eAAO;;MAOX;AACI,aAAK,IAAI,KAAK,KAAK,KAAK;;MAM5B;AACI,eAAO,KAAK,SAAS,IAAI,OAAO;;MAKpC;AACI,eAAO,OAAO,KAAK,KAAK;;MAS5B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,SAAS,IAAI;;;AAI7B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,SAAS;;;MAU7B;AACI,YAAI,eAAe;AACf,cAAI,KAAK,SAAS,IAAI,OAAO;AACzB,kBAAM,IAAI,WAAW,oBAAoB,IAAI;;AAG7C,mBAAO,KAAK,QAAQ,IAAI;;;AAI5B,qBAAW,KAAK,QAAQ;AACxB,cAAI,MAAM;AACN,kBAAM,IAAI,WAAW,yCAAyC;;AAElE,iBAAO,KAAK,QAAQ;;;MAI5B;AACI,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;;;AAMzB,yBAAqB;AAErB,kCAA8B;AAsBvB;AACH,uBAAiB,UAAU,OAAO,QAAQ,OAAO;AACjD,2BAAqB,MAAM,QAAQ;AACnC,yBAAmB,eAAe,UAAU,CAAC;AAC7C,0BAAoB,WAAW,IAAI,OAAK,EAAE;AAC1C,2BAAqB;AACrB,wBAAkB,SAAS;AAC3B,+BAAyB;AACrB,YAAI,UAAU,QAAQ,gBAAgB;AAClC,uBAAa,KAAK,SAAS,SAAS;;AAGpC,uBAAa,KAAK;;;AAG1B,UAAI,SAAS;AAET,cAAM,gBAAgB;AACtB,cAAM,gBAAgB;;AAG1B,8BAAwB,YAAY,KAAK,OAAO,MAAM,SAAS,QAAQ,KAAK;AAC5E;AACA;AACA,UAAI,aAAa,oBAAoB;AAGjC,oBAAY,qCAAqC,YAAY;AAC7D,iBAAS,IAAI;AACb,0BAAkB,IAAI;AAEtB,qBAAa,mBAAmB;AAChC,8BAAsB,mBAAmB;;AAE7C,eAAS,aAAa;AACtB,wBAAkB;AAClB,UAAI,CAAC;AACD,eAAO,OAAO,iBAAiB,sBAAsB;;AAEzD,+BAAyB,IAAI,SAAS;AAEtC,mBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,YAAI,SAAS;AAET,6BAAmB,SAAS;AAC5B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;AAE1B,cAAI,aAAa,MAAM;AACnB,kBAAM,gBAAgB;;;AAG9B,yBAAiB,OAAO;AACxB,yBAAiB,SAAS;AAC1B,YAAI,oBAAoB;AACpB;;AAEJ,4BAAoB;AACpB,2BAAmB;AACnB,iCAAyB;AACzB,yBAAiB;AACjB,6BAAoB,SAAS;AACzB,wBAAc,iBAAiB,SAAS;AACxC,uBAAa,iBAAiB,QAAQ;AACtC,sBAAY,KAAK;AACjB,qBAAW,KAAK;AAChB,cAAI,QAAQ;AACR,yBAAa;;AAEjB,cAAI,CAAC;AACD,4BAAgB,OAAM;AACtB,gBAAI,gBAAgB,OAAM,UAAU,KAAK,CAAC,SAAS,OAAO,WACtD,YAAY,QAAQ,OAAM,UAAU,MAAM,CAAC,MAAM,cACjD,OAAM,YAAY,aAAa;AAC/B,+BAAiB,KAAK;;;;AAIlC,YAAI;AACA,mBAAS,UAAU;AACnB,iBAAO,UAAU,WAAW;;AAEhC,8BAAsB,OAAO,SAAS,MAAM,aAAa;AACzD,yBAAiB;AACjB,YAAI,SAAS;AACT,uBAAa,SAAS,YAAY,aAAa;;AAEnD,6BAAqB,eAAe;AACpC,sCAA8B,MAAM,QAAQ,gBAAgB,eAAe,CAAC;AAC5E,sBAAa,GAAG,KAAI,sBAAsB,QAAQ,EAAE;AAChD,cAAI,CAAC,iBAAiB,OAAO,sBAAsB;AAC/C,6BAAiB,IAAI,sBAAsB,KAAI,cAAc,KAAI,MAAM,QAAQ,cAAc,WAAW,KAAK;;AAEjH,yBAAc,YAAY,QAAQ,sBAAsB,IAAG;AAC3D,cAAI,WAAU;AACV,yBAAa,UAAS,cAAc;;;AAG5C,YAAI,CAAC;AAED,kBAAQ;;;AAQhB,uBAAiB;AACjB,aAAO,eAAe,eAAe,aAAa;;AAatD;AACI,cAAY,WAAW,QAAQ,QAAQ,SAAS,GAAG,MAAM;AACzD,wBAAkB;AAClB,8BAAwB;AACxB,UAAI,QAAQ,WAAW;AAEnB,oBAAY,gDAAgD,QAAQ,IAAI;AACxE,sBAAc,IAAI;AAClB,4BAAoB,IAAI;;AAGxB,wBAAgB,IAAI;AACpB,6BAAoB;AAChB,iBAAQ,QAAQ,gBAAiB,gDAAgD,QAAO;AAExF,uCAA6B;AACzB,gBAAI,CAAC,QAAQ,IAAI,eAAe;AAC5B,0BAAY,KAAK;AACjB,sBAAQ,IAAI,eAAe;;;AAInC,6BAAmB;AACf,gBAAI,kBAAkB,SAAS;AAC3B,gCAAkB,QAAQ,IAAI;;AAElC,yBAAa,MAAM,QAAQ,eAAa,kBAAkB,MAAM,IAAI;;;;AAIhF,aAAO;QACH,QAAQ;QACR,iBAAiB,oBAAoB;;;AAG7C;AACI,8BAAwB;AACxB,yBAAmB;AACf,wBAAgB,QAAQ,aAAa,MAAM;;AAE/C,aAAO;;AAaJ;AACH,sBAAgB,IAAI;AACpB,qBAAe;AACf,2BAAqB;AAIrB,wBAAkB,SAAS;AACvB,gBAAQ,IAAI;;AAEhB,qBAAc;AACd,oBAAc;AAEd,aAAM,KAAK;AACX,aAAO,OAAM,SAAS;AAClB,oBAAY,OAAM,OAAM,SAAS;AACjC,YAAI,QAAQ,IAAI,IAAI;AAChB,iBAAM;AACN;;AAEJ,4BAAoB,MAAM,MAAM,SAAS,OAAO,OAAM,SAAS;AAC/D,YAAI,IAAI,OAAO,WAAW,KAAK;AAE3B,iBAAM;AACN,iBAAO,KAAK;AACZ,kBAAQ,IAAI,IAAI;AAChB,cAAI;AACA,kBAAM;;;AAMV,gBAAM,KAAK,OAAM,SAAS;AAC1B,+BAAoB,IAAI;AAGpB,gBAAI,aAAa,OAAM,SAAS;AAC5B,2BAAa,OAAM,QAAQ,IAAI;;AAEnC,yBAAa,OAAM,MAAM,IAAI,IAAI;AACjC,gBAAI,QAAQ,IAAI,OAAM;AAClB;;AAEJ,mBAAM,KAAK;;;;AAIvB,aAAO,CAAE,QAAQ;;AAQrB;AACI;AACA,UAAI,OAAM,YAAY,aAAa,WAAW;AAC1C,uBAAe,OAAM,YAAY;;AAGjC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAM,YAAY,aAAa,QAAQ,EAAE;AACzD,qCAA2B,OAAM,YAAY,aAAa,GACrD;AACD,gBAAI,aAAa,OAAO,OAAM;AAC1B,0BAAY;AACZ;;;;AAIZ,uBAAe,OAAM,YAAY,YAAY;;AAEjD,aAAO;;ACzbX;;;;;;;;;4BA6B+B;MAC3B;AAEI,cAAM;AACN,aAAK,iBAAiB,IAAI;AAC1B,aAAK,OAAO,KAAK;AACjB,YAAI,KAAK,QAAQ;AACb,yBAAe,KAAK,eAAe;AACnC,eAAK,OAAO,OAAO;;AAEvB,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAGlB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,SAAS,KAAK,OAAO;;AAG1B,eAAK,SAAS,CAAC,KAAK;;AAExB,YAAI,MAAM,QAAQ,KAAK;AACnB,eAAK,UAAU,KAAK,QAAQ;;AAG5B,eAAK,UAAU,CAAC,KAAK;;AAGzB,YAAI,SAAqB,KAAK,QAAQ,WAAW,KAAK,OAAO;AACzD,gBAAM,IAAI,WAAW,mGAEd,KAAK,OAAO,IAAI,OAAK,EAAE;;AAGlC,YAAI,SAAqB,KAAK,SAAS,WAAW,KAAK,QAAQ;AAC3D,kBAAQ,KAAK,qGAEN,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAMnC,aAAK,cAAc;AACnB,aAAK,yBAAyB;AAC9B,aAAK,2BAA2B;AAKhC,aAAK,eAAe;AACpB,aAAK,0BAA0B;AAC/B,aAAK,4BAA4B;AAKjC,aAAK,SAAS;AAKd,aAAK,wBAAwB;AAa7B,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AACtB,eAAK,aAAa,KAAK;AACvB,eAAK,wBAAwB,KAAK;AAClC,eAAK,0BAA0B,KAAK;;AAIxC,wBAAgB,KAAK;AACjB,wBAAc,EAAE;AAChB,4BAAkB,EAAE;AACpB,8BAAoB,EAAE;AAKtB,mBAAqB,cAAc,GAAG;AACtC,mBAAqB,gBAAgB,GAAG;AACxC,eAAK,YAAY,KAAK;AACtB,eAAK,uBAAuB,KAAK;AACjC,eAAK,yBAAyB,KAAK;;AAGvC,aAAK,aAAa;AAClB,aAAK,cAAc;AACnB,aAAK,kBAAkB;AACvB,aAAK,iBAAiB;AACtB,aAAK,kBAAkB;AACvB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAE/B,cAAI,CAAE,kBAAiB;AACnB,kBAAM,IAAI,UAAU,8EACI,KAAK,iBAChB,0CACU,MAAM;;AAEjC,eAAK,WAAW,KAAK,MAAM;AAC3B,eAAK,gBAAgB,KAAK,MAAM;AAChC,eAAK,eAAe,KAAK,MAAM;;AAEnC,4BAAoB,KAAK;AACrB,eAAK,YAAY,KAAK,MAAM;;AAEhC,aAAK,sBAAsB,KAAK,OAAO,IAAI,OAAK,EAAE;AAClD,aAAK,uBAAuB,KAAK,QAAQ,IAAI,OAAK,EAAE;AAMpD,4BAAoB;AAEpB,6BAAqB;AACrB,6BAAqB;AAErB,+BAAuB;AACvB,6BAAqB;AACrB,uCAA+B;AAmB/B,gCAAwB;AACpB,cAAI,SAAS,QAAQ,aAAa,QAAQ,eAAe;AACrD,oBAAQ,QAAO;AACf,wBAAY,QAAO;AACnB,0BAAc,QAAO;;AAEzB,uBAAa,MAAM,aAAa;AAEhC,cAAI,iBAAgB,QAAQ,UAAU;AAClC,kBAAM,IAAI,aAAa,cAAc,QAAO,kBAAkB,MAAM;;AAIxE,cAAI,eAAc,QAAQ,UAAU;AAChC;;AAGJ,eAAK,eAAe,IAAI,UAAU,QAAQ,OAAO;AAEjD,cAAI,CAAE,OAAM,MAAM;AACd,yBAAa,MAAM,MAAM,OAAO,KAAK,cAAc;;AAEvD,cAAI,iBAAgB,QAAQ,UAAU;AAClC,6BAAgB,KAAK;;AAGzB,mCAAyB,KAAK,cAAc;AAC5C,uBAAa,GAAG,IAAI,kBAAkB;AAClC,sBAAU,KAAK,aAAa;AAC5B,2BAAc,KAAK,cAAc;AACjC,+BAAkB,KAAK,YAAY;AACnC,iCAAoB,KAAK,cAAc;AACvC,4BAAgB,GAAG,gBAAe,kBAAiB,QAAO,YAAW;;AAEzE,yBAAc,KAAK;AACnB,iBAAO,iBAAgB,QAAQ,SAAS;AACpC,6BAAgB,OAAO,iBAAgB,QAAQ,OAAO;;AAE1D,iCAAuB,KAAK;;AAEhC,8BAAsB;AACtB,gCAAwB;AACxB,wBAAgB,KAAK;AACjB,0BAAgB,GAAG,eAAe;;AAEtC,+CAAuC,uBAAuB,QAAQ;AACtE,2BAAmB;AACf,uBAAa,KAAK,MAAM;AAExB,cAAI,CAAE,MAAK,MAAM;AACb,wBAAY,KAAK,MAAM;;AAE3B,sBAAY,YAAY,KAAK;AAE7B,gCAAuB,aAAa,KAAK,cAAc,OAAO,OAC1D,IACA,aAAa,KAAK,cAAc;AAMpC,kBAAQ,KAAK,IAAI,OAAO;AACxB,uBAAa,KAAK,cAAc,MAAM;AACtC,yBAAe,KAAK,cAAc,MAAM,KAAK;AAC7C,sBAAY,KAAK,MAAM;AAEvB,uBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,iCAAqB,KAAK,cAAc;AACxC,8BAAkB,KAAK,YAAY;AACnC,gCAAoB,aAAa,aAAa;AAC9C,mCAAuB,YAAY,YAAY,OAAO,OAAO,IACzD,YAAY,YAAY;AAC5B,wBAAY,YAAY,MAAM,KAAK,IAAI,QAAQ,GAAG;AAClD,yBAAa,YAAY,MAAM;;;AAIvC,6BAAqB;AACrB,6BAAqB;AACjB,wBAAc,YAAY;AAC1B,cAAI,CAAE,UAAS;AACX,yBAAa,SAAS;;AAE1B,uBAAa,OAAO,KAAK,aAAa;;AAG1C,8BAAsB;AACtB,8BAAsB;AAClB,wBAAc,aAAa;AAC3B,cAAI,CAAE,UAAS;AACX,0BAAc,SAAS;;AAE3B,wBAAc,OAAO,KAAK,eAAe;;AAG7C,wBAAgB,OAAO,KAAK,eACvB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,aAAK,SAAS;AACd,4BAAoB;AAChB,iCAAuB,cAAc;AAGrC,yBAAe,KAAK;AAChB,2BAAe,aAAa,EAAE;AAC9B,2BAAe,aAAa,EAAE;AAC9B,gBAAI,SAAS;AACT,qBAAO;;AAEX,gBAAI,SAAS;AACT,qBAAO;;AAEX,mBAAO;;AAEX,8BAAoB;AAChB,gBAAI,iBAAiB;AACjB,mBAAK,sBAAsB,KAAK;;AAEpC,iBAAK,OAAO,KAAK;;;AAGzB,aAAK,gBAAgB;AAErB,oBAAY,OAAO,KAAK,cACnB,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAIV,kCAA0B,KAAK,OAAO;AAEtC,wCAAgC;AAChC,4BAAoB;AAChB,6BAAmB,aAAa;AAC5B,0BAAc,KAAK;AACnB,gBAAI,SAAS;AACT,8BAAgB,KAAK;AACjB,oBAAI,kBAAkB,QAAQ,OAAO;AACjC,wBAAM,IAAI,aAAa,sDAAsD,eAC3D,MAAM,qEAEV;;;AAGtB,8BAAgB,KAAK;AACjB,kCAAkB,KAAK;;AAE3B,sCAAwB,KAAK,MAAM;;;;AAK/C,aAAK,eAAe;AAGpB,yBAAiB,KAAK,OAAO,IAAI,OAAK,EAAE;AACxC,2BAAmB;AACf,iCAAuB,SAAS,OAAO,OAAK,MAAM,MAAM;AACxD,cAAI,mBAAmB;AACnB,kBAAM,IAAI,aAAa,aAAa,iBAAiB,uFAEjD,KAAK,UAAU;;;AAO3B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAIpB,YAAI,KAAK;UACL,eAAe;UACf,eAAe;UACf,aAAa;UACb,eAAe;UACf,cAAc,KAAK;UACnB,eAAe,KAAK;UACpB,YAAY,KAAK,OAAO,IAAI,OAAK;UACjC,aAAa,KAAK,QAAQ,IAAI,OAAK;UACnC,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;UACpC,cAAc,KAAK,QAAQ,IAAI,OAAK,EAAE;;AAE1C,aAAK,QAAQ;AACb,aAAK,YAAY;;MAErB;AACI,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,MAAM,cAAc,KAAK;;;MA6B3C;AACI,aAAK;AACL,uBAAe,CAAE,sBAAsB,MAAM,sBAAsB;AACnE,YAAI,EAAE,KAAK,cAAc;AACrB,8BAAoB,KAAK;AACrB,mBAAO,wBAAwB,MAAM,UAAU;;AAInD,kCAAwB,KAAK;AACzB,mBAAO,wBAAwB,UAAU,UAAU;;;AAG3D,eAAO,uBAAuB,KAAK;AACnC,eAAO;;UAEP;AACA,eAAO,KAAK;;UAEZ;AACA,aAAK,OAAO,QAAQ;AAEhB,gBAAM,kBACD,QAAQ,OAAK,EAAE,YAAY;;AAEpC,aAAK,aAAa;;UAElB;AAIA,YAAI,KAAK,kBAAkB,SAAS;AAChC,gBAAM,IAAI,WAAW;;AAKzB,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,sBAAc;AACd,4BAAoB,KAAK;AACrB,oBAAU,QAAQ,OAAO,MAAM;;AAEnC,eAAO;;UAEP;AACA,wBAAgB;AAChB,4BAAoB,KAAK;AACrB,kBAAQ,KAAK,GAAG,MAAM;;AAE1B,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,8BAAoB,KAAK;AACrB,6BAAiB,KAAK,GAAG,MAAM;;AAEnC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;UAEP;AACA,eAAO,KAAK,iBAAiB,OAAO,KAAK;;MAiB7C,8BAA8B;AAC1B,6BAAqB;AACrB,gCAAwB;AACxB,4BAAoB,KAAK;AACrB,+BAAqB,MAAM;AACvB,gBAAI,aAAa,OAAO,iBAAiB;AACrC,oBAAM,IAAI,WAAW,0BAA0B,OAAO;;AAE1D,yBAAa,OAAO,gBAAgB;AACpC;;;AAGR,kCAA0B;AAC1B,2BAAmB;AAIf,8BAAoB;AACpB,cAAI,aAAa,SAAS;AACtB,2BAAe,KAAK,MAAM;AAC1B,qCAAyB,OAAO,MAAM,GAAG,IAAI,OAAO,CAAC,OAAO,OAAO,SAAS;AAC5E,4BAAgB,iBAAiB,KAAK;;AAE1C,cAAI,aAAa,kBAAkB;AAC/B,8BAAkB,KAAK,CAAC,aAAa,gBAAgB,QAAQ;qBAExD;AACL,kBAAM,IAAI,WAAW,gDAAgD;;AAEzE,iBAAO,aAAa;;AAExB,YAAI;AAEA,6BAAmB;AACnB,6BAAmB;AACf,uBAAW,KAAK;;AAEpB,cAAI,WAAW,SAAS;AACpB,kBAAM,IAAI,WAAW,GAAG,WAAW,aAAa,0CACzC;;;AAGf,sBAAc;;MAMlB;AACI,0BAAkB,KAAK;AACvB,4BAAoB;AACpB,oBAAY,eAAe,KAAK;AAChC,oBAAY,YAAY;AACxB,oBAAY,kBAAkB,eAAe;AAG7C,oBAAY,aAAa;AACzB,eAAO;;MAcX,8BAA8B;AAC1B,4BAAoB,oBAAoB,KAAK;AAC7C,eAAO,eAAe,KAAK,UAAU,eAAe;;MAexD;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B,2BAAiB,IAAI;AACrB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;AAExC,iBAAO,QAAQ,KAAK,SAAS,UAAU;;;MAY/C;AACI,eAAO,KAAK;AACR,mBAAS,OAAqB;AAC9B;AACA,cAAI,QAAQ;AACR,oBAAQ,aAA2B,MAAM,OAAO;;AAGhD,oBAAQ,OAAqB;;AAGjC,iBAAO,KAAK,iBAAiB,QAAQ,OAAO;;;MAYpD;AACI,4BAAoB,mBAA+B;AACnD,YAAI,YAAY,WAAW,KAAK,YAAY;AACxC,gBAAM,IAAI,WAAW,+BAA+B,yBACnC,KAAK,YAAY;;AAGtC,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,wBAAc,KAAK,YAAY;AAC/B,8BAAmB,YAAY;AAG/B,2BAAiB,MAAM,OAAO;AAC9B,+BAAqB,YAAY;;AAErC,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AAEV,YAAI,UAAU,SAAS;AACnB,8BAAoB;AAChB,0BAAc,KAAK,aAAa;AAChC,+BAAmB;AAEf,4BAAc,KAAK;AACnB,kBAAI,KAAK,YAAY,IAAI,OAAK,EAAE,IAAI,QAAQ,MAAM,QAAQ;AAEtD;;AAGJ,mCAAoB;AACpB,2BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,qCAAqB,KAAK,cAAc;AACxC,mCAAkB,KAAK,YAAY;AACnC,oCAAoB,KAAK,cAAc;AACvC,iCAAiB,GAAG,aAAa,QAAQ,cAAa;AACtD,oCAAmB,qBAAqB;AACxC,6BAAY,KAAK;;AAErB,kCAAoB,MAAM,mBAAmB,iBAA+B;AAC5E,oCAAqB,mBAA+B;AACpD,gCAAkB,MAAM,aAAa,QAAQ;AAC7C,2BAAa,GAAG,IAAI,cAAa,QAAQ;AACrC,iCAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,qCAAqB,YAAY,cAAa;;;;;AAM9D,6BAAqB;AACrB,gCAAwB;AACxB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,8BAAoB,KAAK,0BAA0B;AACnD,2BAAiB,GAAG,MAAM,QAAQ,aAAa;AAC/C,0BAAgB,KAAK;;AAEzB,qBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,sBAAY,gBAAgB;AAC5B,mBAAqB,OAAO;AAC5B,uBAAa,KAAK,qBAAqB;;AAG3C,eAAO,iBAA+B;;MAY1C;AACI,YAAI,SAAS;AACT,kBAAQ,aAA2B,MAAM,OAAO;;AAOpD,0BAAkB;AAClB,qBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAU,KAAK,OAAO;AACtB,oBAAU,OAAO;AACjB,uBAAa,MAAM;AACnB,oBAAU,EAAE,MAAM,CAAC,GAAG;;AAE1B,0BAAkB,OAAO,KAAK,KAAK,cAC9B,IAAI,OAAK,SAAS,GAAG,KACrB,KAAK;AACV,4BAAoB;AAChB,wBAAc,KAAK,aAAa;AAChC,6BAAmB;AAEf,0BAAc,KAAK;AACnB,0CAA8B,KAAK;AACnC,2CAA+B,KAAK;AAIpC,iCAAqB,IAAI;AACzB,4BAAgB;AACZ,kBAAI,EAAE,MAAM;AACR,6BAAa,KAAK,UAAU,EAAE;;;AAGtC,gBAAI,aAAa,WAAW,sBAAsB;AAE9C,2BAAa;AACb;AACA;AACA;AACA;AAEA,kBAAI,KAAK,YAAY;AACjB,yBAAS,KAAK;;AAElB,kBAAI,aAAa,WAAW;AACxB,uDAAuC,aAAa;AACpD,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,gBAAgB;AACpD,+BAAc,OAAqB,MAAM,YAAY,gBAAgB;AACrE,kCAAkB,CAAC;AACnB,gCAAgB,CAAC;;AAGjB,kCAAkB,aAAa,IAAI,OAAK,EAAE;AAC1C,gCAAgB,aAAa,IAAI,OAAK,EAAE;AACxC,oBAAI,OAAO,WAAW;AAClB,yBAAO,UAAU;;AAErB,iCACI,OAAqB,MAAM,KAAK,iBAAiB;AACrD,+BAAc,OAAqB,MAAM,YAAY,iBAAiB;;AAE1E,kBAAI,MAAM;AACN,sBAAM,IAAI,oBAAoB;;AAKlC,2BAAa,GAAG,IAAI,uBAAuB,QAAQ,EAAE;AACjD,0BAAU,uBAAuB;AACjC,0BAAU,eAAc;AACxB,6BAAa,aAAY;AACzB,0BAAU,EAAE,MAAM,CAAC,GAAG;;;;;AAKtC,8BAAsB;AACtB,4BAAoB;AACpB,6BAAqB;AACrB,wBAAgB,KAAK;AACjB,mBAAqB,EAAE,MAAM,WAAW,4BAA4B,EAAE,UAAU,EAAE;AAClF,kCAAuB,UAAU,EAAE;AACnC,uBAAa,KAAK,QAAO;AACzB,wBAAc,KAAK;AACnB,sBAAY,KAAK;;AAGrB,eAAO,CAAC,eAAe,aAAa;;MAUxC;AACI,kCAA0B;AAC1B;AACA,4BAAoB,KAAK;AACrB,sBAAY,iBAAiB,YAAY,IAAI;AAC7C,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,4BAAgB,UAAU,QAAQ,OAAO;AACzC,gBAAI,KAAK,eAAe,IAAI;AAExB,gCAAkB,WAAW;AAC7B,2BAAa;;;;AAIzB,eAAO;;MAqBX;AACI,YAAI,UAAS;AACT,cAAI,KAAK,OAAO,UAAU;AACtB,kBAAM,IAAI,WAAW,wCAAwC,8BAClD,KAAK,OAAO;;AAGvB,mBAAO,KAAK,OAAO;;;AAIvB,cAAI,QAAQ;AACR,kBAAM,IAAI,WAAW;;;AAG7B,4BAAoB,KAAK;AACrB,cAAI,MAAM,SAAS;AACf,mBAAO;;;AAGf,cAAM,IAAI,WAAW,kBAAkB;;MAO3C;AAKI,eAAO,KAAK;AACR,0BAAe;AACf,8BAAoB,KAAK;AACrB,iCAAqB,GAAG,YAAY,MAAM,aAAa,QAAQ,EAAE;AAC7D,8BAAgB,UAAU,QAAQ,OAAO;AACzC,kBAAI,KAAK,eAAe,IAAI;AACxB,wBAAO,KAAK,GAAG,MAAM;;;;AAKjC,iBAAO;;;MAGf;AACI,wBAAe,CAAE,MAAM,KAAK;AAI5B,kCAA0B,KAAK,uBAAuB,KAAK;AAE3D,6BAAqB;AACrB,4BAAoB,KAAK;AACrB,iCAAuB,MAAM;AAC7B,8BAAoB,MAAM;AAC1B,uCAA6B;AAC7B,uCAA6B,GAAG,oBAAoB,MAAM,aAAa,QAAQ;AAC3E,yBAAa,MAAM,aAAa;AAChC,4BAAgB,UAAU,QAAQ,OAAO;AACzC,yBAAa;AACb,gBAAI,KAAK,eAAe,IAAI;AAGxB,kBAAI,KAAK;AACL;AACI,uBAAK,UAAU,KAAK;AACpB,2BAAS,KAAK;;AAGd,0BAAQ,KAAK,SAAS,MAAM,uDAErB,KAAK;AAGZ,2BAAS;;;AAGjB,kBAAI,KAAK,cAAc,SAAS;AAC5B,iCAAiB;AACjB,6BAAa,GAAG,IAAI,KAAK,cAAc,QAAQ;AAC3C,uCAAqB,KAAK,cAAc;AACxC,oCAAkB,KAAK,YAAY;AACnC,sCAAoB,KAAK,cAAc;AACvC,mCAAgB,UAAU,QAAQ,cAAc;AAChD,qCAAmB,kBAAkB;AACrC,sBAAI,gBAAgB;AAChB,mCAAe;;AAEnB,2BAAS,KAAK,CAAC,aAAa,MAAM,cAAc,aAAa;;AAEjE,qCAAqB,KAAK;;;;AAItC,uBAAa;AACb,eAAK,UAAU,MAAM;AACrB,eAAK,eAAe;AACpB,eAAK,YAAY;AACjB,eAAK,kBAAkB;AACvB,uBAAa,KAAK;;AAEtB,gBAAO,YAAY;AAEnB,4BAAoB;AACpB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ;AACzC,wBAAc,KAAK,YAAY;AAC/B,4BAAkB,KAAK,uBAAuB;AAC9C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,yBAAyB;AAClD,sBAAY,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEhD,gBAAO,iBAAiB;AACxB,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,aAAa,QAAQ;AAC1C,wBAAc,KAAK,aAAa;AAChC,4BAAkB,KAAK,wBAAwB;AAC/C,0BAAgB,UAAU,QAAQ,OAAO;AACzC,cAAI,CAAC,KAAK,eAAe,IAAI;AACzB;;AAEJ,6BAAmB,kBAAkB;AACrC,cAAI,iBAAiB,QAAQ,iBAAiB;AAC1C,2BAAe;;AAEnB,8BAAoB,KAAK,0BAA0B;AACnD,uBAAa,KAAK,CAAC,MAAM,MAAM,cAAc;;AAEjD,gBAAO,kBAAkB;AACzB,eAAO;;aAeJ,yCAAwC,qBAAqB;AAGhE,8BAAsB;AAMtB,iCAAyB;AACzB;AACI,cAAI,CAAE,OAAM,QAAQ;AAChB,6BAAiB,MAAM,QAAQ,CAAC;;AAGhC,6BAAiB,MAAM,MAAM,KAAK;;;AAG1C;AACI,gCAAqB;AACrB;AACA,kCAAwB;AACpB,qCAAyB,UAAU;AACnC,qCAAyB,UAAU;AACnC,uCAA2B,UAAU;AACrC,qBAAS,UAAU,MAAM,OACrB,KACA,UAAU;AACd,gBAAI,CAAE,qBAAoB;AACtB,iCAAmB,OAAO;AAC1B;;AAEJ,iCAAqB,cAAc;AACnC,gBAAI,aAAa,aAAa,UAAU;AACpC,iCAAmB,OAAO;AAC1B;;AAEJ,gCAAoB,aAAa,aAAa;AAC9C,0BAAa,KAAK,YAAY,cAAc;;AAKhD,cAAI,cAAa,SAAS;AACtB,kBAAM,MAAM,iBAA+B,gBAAe;;;AASlE;AACI,4BAAkB,UAAU;AAE5B,wBAAc,YAAiB,WAAW,QAAO,oBAAoB,OACjE,QAAO,mBACP;AACJ,gBAAM,6BAA6B;AACnC,wBAAc,aAAa;AAE3B,mCAAyB,UAAU;AACnC,2BAAiB,QAAQ;AACrB,gBAAI,CAAE,qBAAoB;AACtB,oBAAM,IAAI,WAAW,yDAAyD;;AAMlF,+BAAmB,OAAO;;;AAIlC,qBAAa,QAAO;AACpB,iCAAyB,QAAO;AAChC,gCAAwB;AACpB,uBAAa;;AAMjB,eAAO,CAAC,cAA4B;AAChC,kCAAwB;AACpB,0BAAc,cAAc,UAAU;AACtC,gBAAI,MAAM,QAAQ;AACd,sDAAwC,iBAAiB,MAAM;AAC/D,qBAAO,iBAAiB,MAAM;AAC9B,qCAAuB;AACnB,4BAAY,OAAO;;;;;AAKnC,6BAAqB;AACrB,8BAAsB;AACtB,sCAA8B,QAAO;AACrC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,uBAAa,KAAK,mBAAmB;;AAEzC,uCAA+B,QAAO;AACtC,gCAAwB;AACpB,4BAAkB,UAAU;AAC5B,4BAAkB,UAAU;AAC5B,8BAAoB,UAAU;AAC9B,mBAAqB,aAAa;AAClC,wBAAc,cAAc;AAC5B,qCAA2B,MAAM,aAAa,WAAW;AACzD,wBAAc,KAAK,mBAAmB;;AAE1C,eAAO,IAAI,IAAI,CAAE,QAAQ,cAAc,SAAS,eAAe;;UAQ/D;AAGA,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAIzB,4BAAoB,KAAK;AACrB,cAAI,MAAM;AACN,mBAAO;;;AAGf,eAAO;;MAQX;AACI,aAAK;AACD,eAAK,OAAO,QAAQ;AAEhB,gBAAI,MAAM;AACN,oBAAM;;;;;;AC5mC1B;;;;;;;;;AAUA;AACI,yBAAmB,YAAY;AAC/B,UAAI,WAAW,QAAS,MAAM,QAAQ,YAAY,QAAQ,WAAW;AACjE,eAAO,YAAY,IAAI,UAAQ;;AAEnC,UAAI,eAAe;AACf,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,iBAAO;mBAEF,OAAO,YAAY,YAAY,YAAY,MAAM;AACtD,iBAAO,CAAC,QAAQ,YAAY;;AAG5B,iBAAO,CAAC;;;AAGhB,UAAI,MAAM,QAAQ;AACd,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,MAAM,YAAY,6BAA6B,QAAQ,wCAC5B;;AAGzC,eAAO;iBAEF,OAAO,YAAY,YAAY,OAAO,KAAK,SAAS,SAAS,KAClE,OAAO,QAAQ,OAAO,KAAK,SAAS,QAChC;AACJ,uBAAe;AACf,oBAAY,QAAQ;AAChB,cAAI,cAAc;AACd,mBAAO,KAAK,QAAQ;;AAGpB,mBAAO,KAAK;;;AAGpB,eAAO;;AAGP,cAAM,IAAI,MAAM,2BAA2B,2BACjC,2CACH,yCAAyC,8BAChC,8BAA8B,KAAK,UAAU;;;AAgB9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAE9D;AACH,aAAO,gCAAgC,aAAa,aAAa;;AAoB9D;AACH,UAAI,gBAAgB,QAAQ,oBAAoB;AAG5C,cAAM,IAAI,MAAM;;AAEpB,UAAI,eAAe;AAEf,yBAAiB,KAAK;AAClB,cAAI,EAAE,MAAM,WAAW;AAEnB,mBAAO,EAAE;qBAEJ,EAAE,MAAM,WAAW;AACxB,gBAAI,EAAE,MAAM,KAAK;AAEb,2BAAa;AACb,qBAAO,EAAE,OAAO;uBAEX,EAAE,MAAM,OAAO;AAEpB,qBAAO,EAAE,QAAQ,CAAC,EAAE,MAAM;;AAG1B,oBAAM,IAAI,MAAM,+CAA+C,EAAE,MAAM;;;AAM3E,kBAAM,IAAI,MAAM,yCAAyC,EAAE;;;AAInE,8BAAsB,MAAM,KAAK,MAAM,SAAS;AAChD,gBAAQ;AACR,kCAA0B;AAC1B,sBAAc,QAAQ;AAClB,cAAI,YAAY,eAAe;AAC3B,kBAAM,IAAI,MAAM,wEACC;;AAIjB,8BAAkB,KAAK,YAAY;;;AAG3C,eAAO,UAAS,mBAAmB;;AAGnC,eAAO;;;AAUR;AACH,aAAO,KAAI,SAAQ;;ACzJvB;;;;;;;;;AAoBA,0CAAsC;AAetC;AAKI;AACA;AACA,6BAAuB;AACvB,WAAK,eAAe;AACpB,WAAK,eAAe;AACpB,cAAgB,MAAM,QAAQ,MAAM,MAAM,MAAM,mPAIzC;AACP,0BAAoB,0BAA0B,SAAS,OAAM,YAAY;AACzE,0BAAoB,0BAA0B,UAAU,OAAM,aAAa;AAC3E,wBAAkB,YAAY,GAAG,MAAM;AACvC,cAAgB,YAAY,WAAW,OAAM,OAAO,QAAQ,MAAM,mBAAmB,OAAM,OAAO,2CAClF,YAAY,yCACrB,KAAK,UAAU,OAAM;AAC5B,cAAgB,YAAY,WAAW,OAAM,QAAQ,QAAQ,MAAM,mBAAmB,OAAM,QAAQ,4CACpF,YAAY,2CACrB,KAAK,UAAU,OAAM;AAC5B,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,8BAC3D,OAAM,WAAW,eAAe,YAAY,QAAQ,MAAM,iBAChD,4BAA4B,OAAM,WAAW;;AAElE,wBAAkB,GAAG,SAAS,YAAY,QAAQ;AAC9C,gBAAgB,YAAY,QAAQ,MAAM,OAAO,WAAW,MAAM,+BAC3D,OAAM,YAAY,eAAe,YAAY,QAAQ,MAAM,iBACjD,4BAA4B,OAAM,WAAW;;AAElE,aAAO,CAAE,IAAI,aAAa,IAAI;;AAElC;AACI,UAAI,kBAAkB;AAClB,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,gBAAgB,OAAO,WAAW,MAAM,QAAQ,MAAM,wBAAwB,OAAO,gCAAgC,MAAM,uBAAuB,sBAAsB;AACxK,eAAO;;AAGP,uBAAe;AAEf,2BAAmB;AACf,cAAI,OAAO,SAAS;AAChB,kBAAM,IAAI,WAAW,gEACd,sBAAsB;;AAEjC,iBAAO,KAAK,OAAO;;AAEvB,eAAO;;;AAGf;AACI,UAAI,MAAK,WAAW;AAChB,cAAM,IAAI,oBAAoB;;AAElC,aAAO,CAAE,IAAI,MAAK,IAAI,IAAI,MAAK;;AAE5B;AAKH,iCAA2B,KAAK,mBAAmB;AACnD,cAAgB,OAAM,aAAa,MAAM,MAAM;AAE/C,cAAgB,QAAQ,MAAM,MAAM;AAEpC,cAAgB,KAAK,UAAU,QAAQ,KAAK,SAAS,KAAK,OAAO,UAAU,KAAK,SAAS,MAAM,iFACvE,KAAK;AAC7B,cAAgB,CAAC,sBACZ,KAAK,kBAAkB,KAAK,OAAO,UAAU,KAAK,kBAAmB,MAAM,uGAClC,KAAK;AACnD,cAEA,KAAK,sBAAsB,MAAM,MAAM;AAEvC,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACI,6BAAqB,KAAK,kBAAkB;AAC5C;AACA;AACA,YAAI;AACA,cAAI,gBAAgB,KAAK;AACrB,oBAAgB,KAAK,qBAAqB,QACrC,KAAK,oBAAoB,KACtB,OAAO,UAAU,KAAK,oBAAqB,MAAM,iJAG1C,KAAK;;AAGpB,mCAAuB,gCAAgC,KAAK;AAC5D,oBAAQ,eAAe;AACvB,oBAAQ,eAAe;;;AAG/B,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA,YAAI;AACA,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,wBAAgB,KAAK,WAAW,OAAO,IAAI,KAAK;AAChD,eAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,KAAK,QAAQ,MAAM,MAAM,iBAAiB,SAAS,OAAO,MACnI,cAAc;AACd,qBAAa,SAAS;AACtB,eAAM,UAAU;AAChB,cAAM,aAAa;AACnB,eAAM,gBAAgB;AACtB,oBAAY,KAAK,gBAAgB,OAAO,IAAI,KAAK;AACjD,2BAAmB,MAAM,QAAQ;AACjC,eAAO,QAAQ,KAAK;AAChB,4BAAkB;AAClB,gBAAM,aAAa,aAAa;AAChC,0BAAgB;AAChB,2BAAiB;AACjB,cAAI,CAAC;AACD,2BAAe,MAAM,QAAQ;;AAEjC,iBAAO,qBAAqB,YAAY,KAAK,kBAAkB;AAC3D,gCAAoB,MAAM,aAAa;AAGvC,gBAAI,sBAAsB,YAAY;AAClC,sBAAQ,KAAK,uCACN,KAAK,oEAEL,mJAIA,KAAK,kBAAkB,KAAK;AAGnC;;AAEJ,gBAAI,YAAY,SAAS;AACrB,qBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,gCAAkB;AAClB,wBAAU,WAAW;AACrB,wBAAU,UAAU,GAAG,GAAG,MAAM;AAChC,oBAAM,aAAa,aAAa,YAAY;AAC5C,oCAAsB;AACtB,kBAAI,KAAK,eAAe;AACpB,6CAA6B,wBAAwB,KAAK,aAAa,OAAM;AAC7E,6BAAa,GAAG,IAAI,qBAAqB,QAAQ,EAAE;AAC/C,gCAAc,KAAK,MAAM,mBAAmB,GAAG,IAAI,MAAM,qBAAqB;;;AAItF,0BAAY,GAAG,OAAO,IAAI,OAAO;AACjC,2BAAa,cAAc;AAC3B,sBAAY;AACZ,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAEb,oBAAM,aAAa,WAAW,YAAY;AAC1C,mCAAqB;AACrB;AACA;;AAEJ,gBAAI,qBAAqB,aAAa,KAAK,kBACvC,YAAY;AAEZ,kBAAI;AACA;AACA,oBAAI,gBAAgB,KAAK;AACrB,4BAAU,OAAO,MAAM,OAAM,gBAAgB,KAAK,gBAAgB,CAAE,SAAS,KAAK;;AAGlF,4BAAU,OAAO,OAAM,SAAS,OAAO,OAAO;oBAC1C,WAAW,KAAK,uBAAuB,OACnC,gCACA,KAAK;oBACT,SAAS;;;AAGjB,6BAAa,GAAG,IAAI,OAAM,aAAa,QAAQ,EAAE;AAC7C,4BAAU,OAAO,OAAM,aAAa,QAAQ,QAAQ;;;AAQ5D;;AAEJ,gBAAI,OAAM;AACN;;;AAGR,gBAAM,aAAa,WAAW,OAAO;AACrC;AACA,cAAI,OAAM;AACN;;;AAGR,cAAM,aAAa;AACnB,cAAM,OAAM,QAAQ;AACpB,eAAO,OAAM;;AAGb,eAAM,aAAa;;;AAI3B;AAEI,0BAAoB;AACpB,UAAI,KAAK,mBAAmB;AACxB,wBAAgB,KAAK;iBAEhB,OAAO,SAAS,QAAQ;AAC7B,wBAAgB,QAAQ;;AAE5B,aAAO;;AAIX;AACI,aAAQ,OAAO,QAAQ,aAAa;;AAIxC;AACI,aAAQ,OAAO,SAAS,SAAS;;AAE9B;AAKH,aAAO,QAAQ;AACf,yBAAmB,KAAK,WAAW;AACnC,gBAAU,OAAM;AAChB,iBAAW;AACX,UAAI,KAAK,UAAU;AACf,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,CAAC,cAAe,KAAK,UAAU,KAAK,OAAO,UAAU,KAAK,UAAW,MAAM,wEAC3E,KAAK,UAAU,KAAK;AACpC,2BAAqB,qBAAqB,WACtC,UACA,MAAM,QAAQ;AAElB,wBAAkB;AAClB,kBAAY;AACZ,aAAO,aAAa,QAAQ,KAAK,UAAU;AACvC,4BAAoB,MAAM,aAAa;AACvC,eAAO,KAAS;AACZ,cAAI,YAAY;AAGZ,mBAAQ,IAAI,MAAO,8BAA8B,QAAO,YAAY;AACpE,4BAAgB,GAAG,OAAO;AAC1B,8BAAkB,KAAS,MAAM,EAAE;AACnC,oBAAY;AACZ,gBAAI,UAAU;AACV,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,qBAAK,KAAK,QAAO;;;AAGzB,8BAAkB,QAAQ,GAAG,MAAM;AACnC,yBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,+BAAiB,UAAU;AAC3B,gCAAkB,KAAK;AACvB,mBAAK,KACD,KAAS,MAAM,MAAQ,KAAK,IAAI,KAAQ,WAAW;AACvD,kBAAI,QAAQ;AACR,wBAAY;;;AAGpB,oBAAY;AACZ,2BAAe;AACf,cAAE;;AAEN,iBAAO;;AAEX,YAAI,YAAY;AACZ,cAAI;AACA,oBAAQ,KAAK,gLAGiB,KAAK;;AAIvC;;;AAGR,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,0BAAkB,KAAK;AACvB,aAAK,KAAK,IAAQ,KAAK,IAAI;AAC3B,gBAAY;;AAEhB,aAAO,iBAAiB;;AC5V5B;;;;;;;;;AAmBO;AACH,cAAgB,YAAY,KAAK,OAAO,UAAU,YAAY,MAAM,2DAA2D;;AAe5H;AACH,UAAI,UAAU;AACV,eAAO,CAAC;iBAEH,MAAM,QAAQ;AACnB,eAAO,OAAO,IAAI,YAAS,oBAAoB,QAAO,OAAO,OAAO;;AAGpE,eAAO,oBAAoB,QAAQ,OAAO,OAAO;;;AAgBlD;AACH,aAAO,KAAS;AACZ,YAAI,UAAU;AACV,iBAAO;mBAEF,MAAM,QAAQ;AACnB,iBAAO,OAAO,IAAI,YAAS,qBAAqB,QAAO;;AAKvD,iBAAO,SAAO,QAAQ,QAAQ,UAAU,UAAU,UAAU,QAAQ;;;;AAYzE;AACH,qBAAe;AACf,uBAAiB;AACjB,qBAAe;AACf,aAAO,aAAa;AAChB,mBAAW,aAAa;AACxB,YAAI,YAAY;AACZ,qBAAW;;AAEf,eAAO,KAAK,CAAC,YAAY;AACzB,qBAAa;;AAEjB,aAAO;;AA6BX;AAII,UAAI,aAAa;AACb,oBAAY;;AAEhB,UAAI,UAAU;AACV,iBAAS;;AAEb,UAAI,aAAW;AACX,oBAAU;;AAEd,UAAI,gBAAgB;AAChB,uBAAe;;AAGnB,yBAAmB;AACnB,UAAI,QAAQ,QAAQ,UAAU;AAC1B,uBAAe;;AAGnB,UAAI,mBAAmB;AACnB,uBAAe;AACf,YAAI,iBAAiB;AACjB,gBAAM,IAAI,WAAW;;;AAI7B,8BAAwB,OAAM,gBAAgB,KAAK,WAAW,eAAe;AAC7E;AACA,UAAI,mBAAmB;AACnB,qBAAa,QAAM,GAAG;;AAE1B,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAQ,cAAc,WAAY,mBAAmB,YAAW,SAAS,QAAQ,cAAc,iBAAiB,eAAe,WAAW,cAAc;AACxJ,mBAAa,SAAS;AACtB,aAAM,UAAU;AAChB,YAAM,aAAa;AACnB,aAAM,gBAAgB;AAGtB,uBAAiB,cAAc,QAAQ,QAAQ,EAAE;AAC7C,cAAM,aAAa,aAAa;AAChC,0BAAkB;AAClB,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,cAAI,cAAY;AACZ,kBAAM,IAAI,oBAAoB;qBAEzB;AACL,qBAAa;;AAIjB,oCAA0B,UAAS;AACnC,0BAAgB,YAAY,iBAAiB;AAC7C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB;AAClB,kBAAM,aAAa,aAAa,YAAY;AAC5C,iBAAS;AACL,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAoB,mBAAmB,YAAY,WAAW;AAC/E,wBAAU,WAAW;AACrB,wBAAU,UAAU,WAAW;AAG/B,+BAAiB,qBAAqB,KAAK;AAC3C,2BAAa,EAAE;AACf,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,8BAAc,UAAU;AACxB,4BAAY,KAAK;AACjB,0BAAU,SAAS;AACnB,qBAAS;;AAGb,kBAAI,eAAe,QAAQ,SAAS;AAChC,oBAAI;AACA,kCAAgB,OAAM,SAAS,MAAM,QAAQ;AAE7C,+BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,kCAAc,UAAU;AACxB,gCAAY,QAAQ;AACpB,yBAAS;AAET,8BAAU,SAAS,SAAS;;;;;AAK5C,kBAAM,aAAa,WAAW,YAAY;AAC1C,iCAAqB;AACrB,gBAAI,OAAM;AACN;;;AAIR,4BAAkB;;AAGtB,cAAM,aAAa,WAAW,OAAO;AACrC,YAAI,OAAM;AACN;;;AAGR,YAAM,aAAa;AACnB,YAAM,OAAM,QAAQ;AACpB,aAAO,OAAM;;AAEV,mDAGa;AAChB,UAAI,OAAM;AACN,cAAM,IAAI,MAAM;;AAEpB,aAAM,aAAa;AACnB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACI,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,MAAM,OAAM,oBAAoB,GAAG,GAAG,KAAK,cAAc,KAAK,aAAa,gBAAgB;AACpH,iBAAS,iBAAiB;AAC1B,kBAAU,iBAAiB;AAC3B,wBAAgB,iBAAiB;AAEjC,2BAAmB;AACnB;AACA,YAAI,KAAK,kBAAkB,QAAQ,KAAK,eAAe,SAAS;AAC5D,yBAAe;AACf,cAAI,KAAK,eAAe,WAAW;AAE/B,wBAAY,KAAK,eAAe;AAChC,wBAAY,KAAK,eAAe;qBAE3B,KAAK,eAAe,WAAW;AACpC,kBAAM,IAAI,oBAAoB;;AAG9B,kBAAM,IAAI,WAAW,0GAEd,KAAK;;AAEhB,kCAAuB;AACvB,kCAAwB,MAAM,OAAM,oBAAoB,WAAW,WAAW,MAAoC,MAAmC,iBAAgB;AACrK,iBAAO,gBAAgB;AACvB,iBAAO,gBAAgB;AACvB,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB,QAAQ,KAAK,kBAAkB,KAC5D,KAAK,kBAAkB;AACvB,yBAAe;AAEf,0BAAgB,KAAK,MAAM,OAAO,GAAG,MAAM,KAAM,KAAI,KAAK;AAC1D,oCAA0B,OAAO,GAAG,MAAM;AAC1C,iBAAO,YAAY,QAAQ,SAAS;AACpC,mBAAS,YAAY,QAAQ,GAAG;AAChC,iBAAO,YAAY,SAAS,SAAS;AACrC,oBAAU,YAAY,SAAS,GAAG;AAGlC,mBAAS,KAAK,OAAO;mBAGhB,KAAK,mBAAmB;AAC7B,yBAAe;;AAGnB,oBAAY,OAAO,OAAO,SAAS,OAAO;AAC1C,eAAM;AAYN,8BAAsB,OAAM;AAC5B,0BAAkB,OAAM;AACxB;AACA;AACA,YAAI;AACA,iBAAM;AACN,wBAAc,OAAM;AACpB,4BACI,UAAU,QAAQ,OAAO,UAAU,IAAI,OAAK,SAAS;;AAGzD,wBAAc;AACd,mBAAS;AACT,4BAAkB,UAAU;;AAEhC,2BAAkB,qBAAqB,KAAK,WAAW,KAAK;AAC5D,oBAAY,MAAM,QAAQ,QAAO,eAAe,KAAK,WAAW,WAAW,KAAK,QAAQ,KAAK,SAAS,YAAW,aAAa,QAAQ,KAAK,SAAS,iBAAiB,KAAK,cAAc,MAAM;AAC9L,eAAO;;AAGP,eAAM,aAAa;AAEnB,0BAAkB,QAAQ;AAC1B,0BAAkB,SAAS;AAC3B,0BAAkB,MAAM;AACxB,0BAAkB,MAAM;AACxB,YAAI,iBAAiB;AACjB,kBAAY;;;;AAWjB;AACH,mBAAa;AACb,UAAI,mBAAmB;AACnB,kBAAU,CAAC;;AAGf,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAe,QAAQ;AACvB,YAAI,QAAO,SAAS;AAChB,eAAK,KAAK,aAAW,SAAQ;mBAExB,QAAO,SAAS;AACrB,gBAAM,IAAI,MAAM;;AAIhB,eAAK,KAAK;;;AAGlB,aAAO;;AAcJ;AACH,UAAI,WAAW;AACX;;AAEJ,2BAAqB;AACrB,UAAI,sBAAsB;AACtB,qBAAa,KAAK,WAAW;iBAExB,MAAM,QAAQ;AACnB,mBAAW,QAAQ,OAAK,aAAa,KAAK,EAAE;iBAEvC,cAAc;AAEnB,2BAAmB;AACf,4BAAkB,WAAW;AAC7B,uBAAa,KAAK,UAAU;;;AAGpC,+BAAyB;AACzB,UAAI,mBAAmB;AACnB,YAAI,aAAa,QAAQ,QAAQ,QAAQ;AACrC,2BAAiB,KAAK;;iBAGrB,MAAM,QAAQ;AACnB,gBAAQ,QAAQ;AACZ,cAAI,aAAa,QAAQ,EAAE,QAAQ;AAC/B,6BAAiB,KAAK;;;iBAIzB,WAAW;AAEhB,2BAAmB;AACf,0BAAe,QAAQ;AACvB,cAAI,aAAa,QAAQ,QAAO,QAAQ;AACpC,6BAAiB,KAAK;;;;AAIlC,uBAAiB,QAAQ;AACrB,YAAI,CAAC,EAAE;AACH,YAAE;;;;AChbd;;;;;;;;;AAiCO;AACH,aAAO,aAAa;;AAKjB;AACH,aAAO,MAAM,QAAQ;;AAKlB;AACH,aAAO,CAAC,aAAa,MAAM,CAAC,YAAY;;AAarC,yEAAoE,wBAAwB;AAC/F,UAAI,SAAS,QAAQ,MAAM,WAAW;AAGlC,YAAI,SAAQ;AACR,kCAAwB;AACxB,cAAI,YAAY,UAAS,MAAK,SAAS;AACnC,gCAAoB;qBAEf,WAAW;AAChB,8BAAkB;AACd,kBAAI,MAAK,eAAe;AACpB,oCAAoB;AACpB;;;;AAMR,gCAAoB;;AAExB,cAAI;AACA,kBAAM,IAAI,WAAW,6BAA6B,6CACnC;;;AAGvB,eAAO;;AAEX,UAAI,SAAQ;AACR,eAAO,MAAM,IAAI,UAAQ;;AAE7B;AACA,UAAI,WAAW;AACX,gBAAO;AACP,iBAAS;AACT,2BAAmB;AACf,cAAI,MAAK,SAAS;AACd,kBAAM,IAAI,WAAW,yBAAyB,qCACvC;;AAEX,iBAAO,KAAK,MAAK;;iBAGhB,YAAY;AACjB,gBAAO;AACP,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,gIAEX,MAAM,sEACO;;AAExD,iBAAS;;AAGT,gBAAO;AACP,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,aAAa,2BAA2B,MAAM,4EACL,MAAK;;AAEvE,iBAAS,CAAC;;AAEd,eAAS,2BAA2B;AAEpC,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,OAAM;;AAEvB,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AAEZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU,QAAQ,UAAU,KAAK,QAAQ;AACzC,oBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,qBACzD,OAAO,kCACrB,OAAM;;;;;AAK9B,aAAO;;AASJ;AACH,mBAAa,SAAO,OAAO,IAAI,YAAS,OAAM,MAAM;AACpD,WAAK;AACL,mBAAa,SAAO,QAAQ,IAAI,YAAU,OAAO,MAAM;AACvD,WAAK;AAEL,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,mFAEd,KAAK,UAAU,OAAO,IAAI,YAAS,OAAM;;AAEpD,UAAI,KAAK,SAAS;AACd,cAAM,IAAI,WAAW,oFAEd,KAAK,UAAU,QAAQ,IAAI,YAAU,OAAO;;AAEvD,UAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,CAAC,aAAiB,MAAM;AAC9D,cAAM,IAAI,WAAW,iFACC,KAAK,0BAA0B,KAAK;;;AAalE;AAEI,wBAAkB;QACd;QAAyB;QACzB;;AAEJ,mBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,kBAAU,QAAQ;AAClB,qBAAa,QAAQ;AACrB,sBAAc,aAAa;AAC3B,YAAI,QAAQ;AACR;;AAEJ,YAAI,SAAS;AACT,cAAI,EAAE,MAAM,EAAE,MAAM,SAAS,OAAO;AAChC,kBAAM,IAAI,WAAW,2CAA2C,EAAE;;;AAO1E,YAAI,UAAU,QAAQ,UAAU;AAC5B,+BAAqB,EAAE,MAAM,MAAM;AACnC,8BAAoB,MAAM,MAAM;AAChC,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,8BAAkB,aAAa;AAC/B,2BAAe,YAAY;AAC3B,gBAAI,UAAU,QAAQ,cAAc;AAChC,oBAAM,IAAI,WAAW,8BAA8B,EAAE,2CAC9B;;;;;;AAiC3C,mEAA8D,wBAAwB;AAClF;AACA,UAAI,MAAM,QAAQ;AACd,YAAI,MAAK,WAAW,MAAM;AACtB,gBAAM,IAAI,WAAW,6BAA6B,oIAEP,MAAM,qCACzB,MAAK;;AAEjC,iBAAS;;AAGT,YAAI,MAAM,SAAS;AACf,gBAAM,IAAI,WAAW,qBAAqB,MAAM,UAAU,kFAEnD,KAAK,UAAU,MAAK;;AAE/B,iBAAS,CAAC;;AAEd,UAAI,UAAU;AACV,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,cAAI,OAAO,MAAM;AACb;;AAEJ,yBAAc,OAAO;AACrB,cAAI,OAAM,MAAM,WAAW,OAAO,GAAG;AACjC,kBAAM,IAAI,WAAW,uBAAuB,6BAA6B,MAAM,cAChE,OAAO,GAAG,iDACZ,KAAK,UAAU,OAAM;;AAEtC,uBAAa,GAAG,IAAI,OAAO,GAAG,QAAQ,EAAE;AACpC,gBAAI,MAAM,KAAK,CAAC;AACZ;;AAEJ,wBAAY,OAAM,MAAM;AACxB,2BAAe,OAAO,GAAG;AACzB,gBAAI,UAAU;AACV,kBAAI,WAAW;AACX,sBAAM,IAAI,WAAW,uBAAuB,6BACrC,MAAM,oBAAoB,KAAK,UAAU,OAAO,gCAC3B,KAAK,UAAU,OAAM;;;;;;;AAoBlE;AACH,UAAI,WAAW,QAAQ,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAChE,eAAO,YAAY,IAAI,UAAQ;;AAEnC;AACA,UAAI,OAAO,YAAY,YAAY,OAAO,YAAY;AAClD,yBAAiB,CAAC;iBAEb,MAAM,QAAQ,YAAY,OAAO,YAAY;AAClD,yBAAiB;;AAGjB,cAAM,IAAI,UAAU,kGACsB;;AAE9C,UAAI,MAAM,QAAQ;AAEd,eAAO,YAAY,IAAI,UAAQ;;AAI/B,8BAAsB;AACtB,2BAAmB;AACf,8BAAoB,eAAe,eAAe,QAAQ,eAAe,QAAQ;AACjF,cAAI,CAAC,MAAM,QAAQ;AACf,4BAAgB,CAAC;;AAErB,wBAAc,KAAK;;AAEvB,eAAO;;;AAGf,qCAAiC;8BAaA;MAC7B;AACI,cAAM;AACN,aAAK,aAAa;;MAqCtB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,WAAW;;AAIzB,qBAAa,MAAM,YAAY,WAAW;;MAY9C;AACI,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,aAAK,OAAO,KAAK;AACjB,YAAI,OAAO,KAAK,cAAc;AAC1B,eAAK,aAAa,aAAwB,KAAK;AAC/C,eAAK,mBAAmB;;AAGxB,cAAI,CAAE,MAAK,qBAAqB;AAC5B,kBAAM,IAAI,WAAW;;AAEzB,eAAK,aAAa,KAAK;AACvB,eAAK,mBAAmB;;AAK5B,4BAAoB;AACpB,YAAI,CAAC,MAAM,QAAQ,KAAK,SAAS,OAAO,KAAK,SAAS,YAClD,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,KAAK;AACjB,6BAAmB,KAAK;AACpB,gBAAI,KAAK,YAAY,QAAQ,UAAU;AACnC,oBAAM,IAAI,WAAW,sCAAsC,4CAClB,KAAK;;;AAGtD,6BAAmB,KAAK;AACpB,gBAAI,KAAK,KAAK,SAAS;AACnB,sBAAQ,KAAK,WAAW,gIAED;;AAE3B,0BAAc,KAAK,IAAW,KAAK,KAAK;;mBAGvC,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,KAAK,WAAW,KAAK,QAAQ;AAClC,kBAAM,IAAI,WAAW,2FACc,KAAK,QAAQ,yCACrB,KAAK;;AAEpC,4BAAkB,KAAK;AACvB,0BAAgB,UAAU,IAAI,OAAK,IAAW;;AAG9C,+BAAqB,IAAW,KAAK;AACrC,eAAK,QAAQ,QAAQ;AACjB,0BAAc,KAAK;;;AAG3B,aAAK,gBAAgB;AACrB,aAAK,kBAAkB;AACvB,aAAK,mBAAmB;AACxB,aAAK,cAAc;AACnB,qBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AAEvC,wBAAc,KAAK,qBAAqB;AACxC,uBAAa,KAAK,YAAY;AAC9B,eAAK,gBAAgB,KAAK;AAC1B,eAAK,iBAAiB,KAAK;AAC3B,eAAK,YAAY,KAAK,KAAK,cAAc;;AAI7C,kCAA0B;AAE1B,aAAK,UAAU,KAAK;AAEpB,aAAK,eAAe,CAAC;AACrB,aAAK,iBAAiB;AAKtB,kBAAU,QAAQ;AACd,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAIJ,iCAAqB,KAAK,cAAc;AACxC,gBAAI,KAAK,QAAQ,SAAS;AACtB,mBAAK,eAAe,KAAK,CAAC,cAAc;AACxC,mBAAK,aAAa,KAAK,KAAK,YAAY,KAAK;;;;AAMzD,8BAAsB,eAAe,KAAK,SAAS,KAAK;AAKxD,6BAAqB;AACjB,cAAI,KAAK,YAAY,SAAS;AAC1B,yBAAa,KAAK,YAAY,eAAe,MAAM;;AAEvD,eAAK,aAAa,KAAK;AACvB,eAAK,eAAe,KAAK,CAAC,cAAc;;AAE5C,kBAAU,UAAU;AAChB,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,gBAAI,kBAAkB,QAAQ,OAAO;AACjC;;AAEJ,kCAAsB,cAAc;AAGpC,kCAAsB;AAClB,uCAAyB;AACzB;AACA;AACA;AAEA,mCAAqB;AACjB,oBAAI,OAAO,WAAW,YAClB,CAAC,YAAY,OAAO,gBAAgB,MAAM,QAAQ,YAC9C;AACJ,sCAAoB,KAAK,qBAAqB;AAC9C,sBAAI,YAAY,YAAY,SAAS,OAAO,KACxC,KAAK,cAAc,OAAO;AAE1B,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;6BAGP,KAAK,cAAc,OACxB;AAGA,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAKZ,wBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,8BAAQ;+BAEH,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,8BAAQ;;;AAGhB;AACA,sBAAI,CAAC,YAAY,OAAO,QAAQ,YAAY;AACxC,6BAAS;6BAEJ,CAAC,gBAAgB,MAAM,QAAQ,YAAY;AAChD,6BAAS;;AAGb,qCAAmB;AACnB,+BAAa,mBAAmB;;AAGhC,mCAAiB,MAAY;AAE7B,qCAAmB;AACnB,+BACI,mBAAmB,oBAA4B;;AAGvD;AACA,0BAAU,YAAY;AAClB,iCAAe;;AAEnB,6BAAa,GAAG,YAAY;;;AAGpC,0BAAc;;;AAMtB,aAAK,4BAA4B,KAAK;;MAW1C;AACI,YAAI,KAAK,6BAA6B;AAClC;;AAEJ,YAAI,KAAK,iBAAiB,WACtB,KAAK,0BAA0B;AAC/B,kBAAQ,KAAK;;;MAoCrB,sBAAsB;AAClB,0BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,uBAAe;AAGf,+BAAuB;AACvB,iCAAyB,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAC1E;AAGI,sBAAY,iBAAiB,GAAG,OAAO,iBAAiB;AACxD,eAAK;AACL,oBAAU,KAAK;AACf,2BAAiB,KAAK,SAAS,GAAG,KAAK,WAAW,KAAK,SAAS,KAAK;AACrE,iBAAO,iBAAiB;;AAGxB,4BAAkB,iBAAiB,IAAI;AACvC,4BAAkB,iBAAiB,IAAI;;;YAyBzC;AACF,aAAK;AACL,eAAO,gBAAgB,MAAM,SAAS;;MAY1C,mDAAmD;AAC/C;AACA,YAAI,SAAS;AACT,uBAAa;AACb,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW,MAAM,yEACJ;;mBAGtB,OAAO;AACZ,cAAI,MAAM,QAAQ;AACd,yBAAa,IAAI,GAAG,MAAM;;AAG1B,yBAAa,IAAI,MAAM;;;AAI3B,gBAAM,IAAI,WAAW,yDACd;;AAEX,eAAO;;MASX;AACI,YAAI,MAAM,QAAQ,YAAY,QAAQ,WAAW;AAC7C,gBAAM,IAAI,WAAW;;AAEzB,+BAAuB,MAAM,QAAQ;AACrC,4BAAqB,iBAAiB,UAAU,CAAC;AACjD,sCAA8B,KAAK,wBAAwB;AAE3D,yBAAiB,IAAI;AACrB,YAAI,kBAAkB;AAClB,mBAAS,CAAC;;AAEd,YAAI,MAAM,QAAQ;AACd,cAAI,OAAO,WAAW,KAAK,OAAO;AAC9B,kBAAM,IAAI,WAAW,kCAAkC,OAAO,8DAEtD,KAAK,OAAO;;AAExB,uBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,qBAAS,IAAI,KAAK,OAAO,IAAI,OAAO;;;AAIxC,+BAAoB,KAAK;AACrB,gCAAoB,OAAO,OAAM;AACjC,gBAAI,eAAe;AACf,oBAAM,IAAI,WAAW,8CAA8C,OAAM;;AAE7E,qBAAS,IAAI,QAAO;;;AAI5B,+BAAuB,QAAQ,uBAAuB;AACtD,eAAO,iBAAiB,iBAAiB,eAAe;;MAK5D;AACI,sCAA8B,aAAa,MAAM,oBAAoB;AACrE,+BAAuB,oBAAoB;AAC3C,4BAAoB,KAAK;AACrB,+BAAqB,MAAM,QAAQ,MAAM,UAAU,MAAM,SAAS,CAAC,MAAM;AACzE,mCAAyB,aAAa,IAAI,YAAU,OAAO;AAC3D,uBAAa,GAAG,IAAI,oBAAoB,QAAQ,EAAE;AAC9C,2BAAc,iBAAiB,QAAQ,oBAAoB;AAC3D,gBAAI,WAAU;AACV,oCAAsB,KAAK,aAAa;AACxC;;AAEJ,gBAAI,qBAAqB;AACrB;;;AAGR,cAAI,qBAAqB;AACrB;;;AAGR,YAAI,mBAAmB;AACnB,iCAAuB;AACvB,gCAAsB,QAAQ;AAC1B,gBAAI,WAAU;AACV,6BAAe,KAAK,oBAAoB;;;AAGhD,gBAAM,IAAI,WAAW,mDACd,KAAK,UAAU;;AAE1B,eAAO;;MAeX,6BAA6B,cAAc;AACvC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB;AACxC,cAAI;AACA,kBAAM,IAAI,oBAAoB;;AAMlC,0BAAgB,YAAY,YAAY;AACxC,8BAAoB,KAAK,QAAQ,IAAI,YAAU;AAE/C,gCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,8BAAkB,KAAS;AACvB,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AAGrC,+BAAiB,YAAY,KAAK,YAAY;AAE9C,4BAAc;AACd,kBAAI,MAAM,QAAQ;AACd,6BAAa,GAAG,IAAI,SAAS,QAAQ,EAAE;AACnC,wBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,SAAS;;;AAItD,sBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO;;AAE7C,+BAAiB,IAAI,SAAS;AAC9B,qBAAO,QAAQ,KAAK,SAAS;;AAEjC,sBAAU,QAAQ,iBAAiB,YAAY,GAAG,KAAK;;AAE3D,iBAAO,iBAAiB,YAAY,IAAI,cAAW,QAAW,UAAS;;;MA8B/E,kBAAkB;AACd,gCAAwB,2BAA2B;AACnD,uBAAe,iBAAiB,KAAK,YAAY,KAAK,iBAAiB;AACvE;AAKI,4BAAkB,KAAK,aAAa,OAAO,KAAK,KAAK;AACrD,yBAAe;AACf,iBAAO,KAAK,YAAY,iBAAiB;;AAGzC,4BAAkB,iBAAiB;;;MAkB3C;AACI,uBAAe,GAAG,KAAK,YAAY,KAAK,iBAAiB;AAGzD,0BAAmB,OAAM,QAAQ,KAAK,EAAE,KAAK,GAAG,MAAM;AACtD,eAAO,KAAK,YAAY,GAAG;;MAE/B,6CAA6C;AAEzC,YAAI,KAAK,cAAc;AACnB,gBAAM,IAAI,aAAa;;AAG3B,6BAAqB;AACrB,qBAAa,GAAG,IAAI,KAAK,iBAAiB,QAAQ,EAAE;AAChD,8BAAoB,KAAK,iBAAiB;AAC1C,yBAAe,KAAK,YAAY;AAChC,cAAI,WAAW;AACX,yBAAa,KAAK,YAAY,MAAM,GAAG,YAAY,SAAS,GAAG,OAAO,CAAC;;AAIvE,yBAAa,KAAK;;;AAG1B,YAAI,qBAAqB,GAAG,KAAK,gBAAgB,KAAK,iBAAiB,OAAO;AAC9E,YAAI,qBAAqB,GAAG,KAAK,iBAAiB,cAAc,OAAO;AAEvE,0BAAkB,GAAG,GAAG;AAExB,wCAAgC,GAAG,KAAK,aAAa,KAAK;AAC1D,YAAI,KAAK,YAAY,aAAa,QAAQ,YAAY;AAClD,cAAI,EAAE,GAAG,MAAM,KAAK,cAAc;AAC9B,kBAAM,IAAI,WAAW,mHAEd,qBAAqB,EAAE,GAAG,MAAM;;;AAG/C,eAAO,CAAC,GAAG;;YAET,sEAAsE;AACxE,yCAAiC,KAAK,sBAAsB,GAAG,GAAG,gBAAgB;AAElF,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM;;AAEpB,oCAA4B;AAC5B,YAAI,eAAe;AACf,+BAAqB,wBAAwB,aAAa,KAAK;AAC/D,kCAAwB;AACxB,uBAAa,GAAG,IAAI,aAAa,QAAQ,EAAE;AACvC,kCAAsB,KAAK,MAAM,mBAAmB,WAAW,IAAI,MAAM,aAAa;;;AAI9F,eAAO,CAAC,YAAY,YAAY;;MAapC,sCAAsC;AAClC,eAAO,KAAS;AACZ,6BAAmB,KAAK,gBAAgB,KAAK,WAAW,OAAO;AAC/D,uBAAa;AACb,cAAI,UAAU;AACV,kBAAM,IAAI,oBAAoB;;AAGlC,cAAI,SAAS;AACT,kBAAM,IAAI,oBAAoB;;AAG9B,4BAAgB,YAAY,YAAY;AACxC,+BAAmB,UAAS,QAAM,GAAG;AACrC,kCAAsB,GAAG,aAAa,QAAQ,QAAQ,EAAE;AACpD,iCAAmB,QAAQ,YAAY;AACvC,+BAAiB,QAAQ,YAAY;AACrC,+BAAiB,oBAAsB,YAAY,YAAY,WAAW;AAG1E,+BAAiB,qBAAqB,KAAK;AAC3C,gCAAkB,EAAE;AACpB,kBAAI,eAAe;AACf,6BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,uBAAK,KAAK,QAAO;;;AAGzB,2BAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,iCAAiB,UAAU;AAC3B,qBAAK,KACD,MAAQ,KAAK,IAAI,KAAQ,WAAW,YAAY;;;AAG5D,yBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,mBAAK,KAAK,IAAQ,KAAK,IAAI;;;AAGnC,iBAAO;;;MAGf;AACI,0BAAkB,KAAK;AAGvB,iCAAyB;AACzB,qBAAa,GAAG,IAAI,UAAU,QAAQ,EAAE;AACpC,wBAAc,UAAU;AACxB,yBAAe;AACf,cAAI,MAAM,WAAW,SAAS;AAC1B,6BAAiB,MAAM,UAAU,MAAM,GAAG,IAAI;AAC9C,wBAAY,IAAI;;AAEpB,2BAAiB,KAAK;;AAE1B,eAAO;;MAYX;AACI,eAAO;AACH,6BAAmB;AACnB,yBAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,0BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,gCAAsB,MAAK,MAAM,KAAK,OAAO,SAAS,KAAK,QAAQ,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ,SAAS;AACtH,gCAAsB;AAItB,oCAA0B;AACtB,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS,UAAU,CAAE,UAAY;AAG9D;AACA,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AACxC,yBAAW,aAAa,QAAQ,IAAI,QAAQ;AAC5C,kBAAI,cAAc,MAAM;AACpB,uBAAO,sBAAoB,MAAM,cAAc;;AAGnD,+BAAiB,KAAS;AAE1B,yBAAW,KAAK;AAChB,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;;AAMvC,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C;AACA,kBAAI,KAAK,QAAQ,SAAS,KAAK,IAAI,KAAK,QAAQ;AAC5C,iCAAiB,WAAW;;AAG5B,+BAAe,KAAK,eAAe,GAAG;AACtC,oCAAoB,KAAK,eAAe,GAAG;AAC3C,iCACI,KAAS,OAAO,QAAQ,cAAc,QAAQ;;AAEtD,mBAAS;AAET,4BAAc,KAAK;;AAEvB,wBAAY,KAAS;AAErB,iBAAK,kBAAkB,QAAQ;AAC3B,0BAAY,MAAQ,WAAW;;AAEnC,mBAAO;;AAEX,4BAAkB,KAAK,0BAA0B,IAAI,WAAS,MAAM;AACpE,6BAAmB;AACnB,iCAAuB,KAAK,WAAW,SAAS,mBAAmB,YAAY;AAC/E,iBAAO,CAAC,gBAAgB,OAAO;;;MAQvC;AACI,aAAK,eAAe;AAChB,iBAAO,KAAS;AACZ,+BAAmB;AACnB;AACA,2BAAe,MAAK,MAAM,GAAG,KAAK,OAAO;AACzC,4BAAgB,MAAK,MAAM,KAAK,OAAO,QAAQ,KAAK,OAAO,SAAS,KAAK,QAAQ;AACjF,0BAAc;AACd,yBAAa,GAAG,IAAI,KAAK,OAAO,QAAQ,EAAE;AACtC,oBAAM,KAAK,CAAE,KAAK,KAAK,OAAO,IAAI,OAAO,OAAO;;AAEpD,6BAAiB,IAAI,SAAS;AAC9B,4BAAgB,QAAQ,KAAK,SAAS;AAEtC,yBAAa,GAAG,IAAI,KAAK,cAAc,QAAQ,EAAE;AAC7C,mCAAqB,KAAK,cAAc;AAGxC,2BAAa,KAAS,aAAa,QAAQ,IAAI,QAAQ;AACvD,kBAAI,MAAM;AACN,4BAAY;;AAGZ,4BAAY,MAAQ,WAAW;;AAEnC,yBAAW,KAAK;;AAGpB,yBAAa,GAAG,IAAI,KAAK,eAAe,QAAQ,EAAE;AAC9C,6BAAe,KAAK,eAAe,GAAG;AACtC,kCAAoB,KAAK,eAAe,GAAG;AAE3C,iCAAmB,KAAS,OAAO,QAAQ,cAAc,QAAQ;AACjE,yBAAW,KAAK;;AAEpB,mBAAO;;;;YAsCb,iBAAiB;AACnB,eAAO,WAAW,MAAM,GAAG,GAAG;;YAyB5B;AACF,eAAO,WAAW,MAAM,SAAS;;YAyB/B;AAGF,+BAAuB,MAAM,KAAK,oBAAoB,GAAG;AACzD,uBAAe,eAAe;AAC9B,wBAAgB,eAAe;AAC/B,8BAAsB,KAAK;AAC3B,wBAAe,cAAc,OAAO,OAAO;AAC3C,2BAAmB;AACnB,2BAAmB;AACf,oBAAU,MAAM,KAAK;AACrB,qBAAW,KAAK,EAAE;;AAEtB,gBAAY;AACZ,eAAO,iBAAiB;;MAW5B;AACI,6BAAqB;AACrB,8BAAsB,WAAU,QAAQ,QAAO;AAC/C,wBAAgB,gBAAgB,KAAK,mBAAmB,KAAK;AAC7D,6BAAqB,KAAK,WAAW;AACrC,qBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,cAAI,iBAAiB,CAAC,QAAQ,GAAG;AAE7B;;AAEJ,uBAAa,KAAK,CAAE,MAAM,QAAQ,GAAG,cAAc,QAAQ,aAAa;;AAE5E,eAAO;;UAgCP;AACA,aAAK,gBAAgB;;UAErB;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;UAEZ;AACA,YAAI,KAAK,eAAe;AACpB,eAAK,aAAa;AAClB,eAAK,mBAAmB;;;MAGhC;AACI,uBAAe,MAAM;AACrB,YAAI,OAAO,yBAAyB,KAAK,KAAK,aAAa,QACvD,KAAK;AACL,mDAAyC,SAAa;AACtD,eAAK,WAAW;AAChB,iBAAO,wBACH,mCAAmC,SAAa;;AAExD,eAAO;;MAEX;AACI;AACA,YAAI,OAAO,KAAK,SAAS;AACrB,sBAAY,YAAY,KAAK;mBAExB,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,OAAO,SAAS;AAChB,oBAAM,IAAI,MAAM;;;AAGxB,sBAAY,KAAK,KAAK,IAAI,UAAQ,YAAY;;AAG9C,8BAAoB,OAAO,KAAK,KAAK;AACrC,sBAAY;AACZ,0BAAe,KAAK;AACpB,mCAAyB;AACrB,gBAAI,OAAO,QAAO,gBAAgB;AAC9B,wBAAU,cACN,YAAY,QAAO;;AAGvB,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;MAEX;AACI,YAAI,OAAO,KAAK,YAAY,YACxB,OAAO,KAAK,YAAY;AACxB,iBAAO,CAAC,YAAY,oBAA4B,KAAK;mBAEhD,MAAM,QAAQ,KAAK;AACxB,iBAAO,KAAK,QAAQ,IAAI,YAAU,YAAY,oBAA4B;;AAG1E,qCAA2B;AAC3B,4BAAkB,KAAK;AACnB,+BAAmB,OACf,YAAY,oBAA4B,KAAK,QAAQ;;AAE7D,iBAAO;;;MAGf;AACI,eAAO;UACH,MAAM,KAAK;UACX,SAAS,KAAK;UACd,kBAAkB;YACd,YAAY,KAAK,UAAU;YAC3B,QAAQ,KAAK,UAAU;;;;MAOnC;AACI,YAAI,eAAe,oBAAoB;AACnC,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,gBAAgB;AAC/B,gBAAM,IAAI,MAAM;;AAEpB,YAAI,eAAe,sBAAsB;AACrC,gBAAM,IAAI,MAAM;;AAEpB,yBAAiB,oBAAoB,eAAe;AACpD,0BAAkB,YAAY;AAC9B;AACA,YAAI,OAAO,eAAe,SAAS;AAC/B,iBAAO,YAAY,eAAe;mBAE7B,MAAM,QAAQ,eAAe;AAClC,iBAAO,eAAe,KAAK,IAAI,eAAa,YAAY;mBAEnD,eAAe,QAAQ;AAC5B,iBAAO;AACP,4BAAkB,eAAe;AAC7B,iBAAK,OAAO,YAAY,eAAe,KAAK;;;AAGpD;AACA,YAAI,MAAM,QAAQ,eAAe;AAC7B,oBAAU,eAAe,QAAQ,IAAI,YAAU,YAAY;mBAEtD,eAAe,WAAW;AAC/B,oBAAU;AACV,4BAAkB,eAAe;AAC7B,oBAAQ,OAAO,YAAY,eAAe,QAAQ;;;AAG1D,aAAK,QAAQ,CAAE,MAAM,SAAS;;YAmF5B;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,WAAW,0CAA0C;qBAE1D,SAAS,SAAS;AACvB,kBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,WAAW;;AAGzB,mCAA2B,MAAM,cAAiB,KAAK,gBAAgB;AACvE,6BAAqB;AACrB,0BAAkB;AAClB,4BAAoB,KAAK,OAAO,WAAW;AAC3C,+BAAuB;UACnB,eAAe;UACf,QAAQ;UACR,aAAa,8BAA8B;UAC3C,aAAa;;AAEjB,iCAAyB,WAAU,OAAO,QAAQ,QAAO;AACzD,YAAI,oBAAoB,KAAK,aAAa;AACtC,yBAAe,iBAAiB,KAAK;AACrC,6BAAmB;AACnB,iBAAQ,2BAA2B,+BAAgC,MAAM,cAAiB,MAAM,KAAK,UAAU,cAAc;AAC7H,6BAAmB,MAAM,KAAK,GAAG;AACjC,6BAAmB,OAAO,wBAA2B,CAAC,mBAAmB,MAAM;;AAEnF,YAAI,KAAK,uBAAuB;AAE5B,4BAAkB;AAClB,mCAAyB,KAAK,qBAAqB,KAAK,MAAM;AAC9D,yBAAe,sBAAsB,KAAK;;AAE9C,uBAAe,aAAa,mBAAmB;AAC/C,uBAAe,cAAc,mBAAmB;AAChD,eAAO,aAAa,KAAK;;MAU7B;AACI,iCAAyB,qBAAqB,KAAK;AACnD,aAAK,sBAAsB;;MAa/B;AACI,eAAO,KAAK;;;AAMpB,gBAAY,YAAY;AACxB,kBAA4B;6BAQI;;AAEhC,eAAW,YAAY;AACvB,kBAA4B;ACzkD5B;;;;;;;;;AAiDO;AACH,UAAI,CAAE,oBAAmB;AACrB,gCAAwB,CAAE,eAAe;;AAE7C,8BAAwB;AACxB,0BAAoB,sBAAsB;AAC1C,UAAI,cAAc,mBAAmB;AAMjC,wBAAgB,cAAc;;AAElC,uBAAiB,oBAAoB;AACrC,qBAAc,YAAY,UAAU;AACpC,UAAI,sBAAsB,mBAAmB;AAIzC,6BAAqB,MAAM,YAAe,sBAAsB,iBAAiB,sBAAsB,YAAY,OAAM,QAAQ,IAAI,YAAU,OAAO;AAEtJ,mCAA2B;AAC3B,6BAAqB,OAAM;AACvB,6BAAmB,OAAO,gBACtB,aAAa,OAAO;;AAE5B,eAAM,YAAY;AAElB,gBAAQ;;AAEZ,aAAO;;AA0FJ;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,OAAO,oBAAoB;AAC3B,yBAAiB,gBAAmB,iBAAiB;AACrD,YAAI,SAAS,WAAW;AAKpB,mBAAS,KAAK,mBAAsB,iBAAiB;mBAEhD,SAAS,SAAS;AACvB,gBAAM,IAAI,WAAW,wBAAwB,SAAS,kCAC1C;;AAEhB,0BAAkB,SAAS;;AAE/B,aAAO,6BAA6B,iBAAiB,QAAW;;AAY7D;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ,QAAQ;AAChB,cAAM,IAAI,WAAW;;AAGzB,wBAAkB,MAAM,QAAQ;AAChC,0BAAoB,UAAU;AAC9B,UAAI,cAAc,mBAAmB;AACjC,wBAAgB,cAAc;;AAElC,qBAAe,QAAQ,UAAU,OAAO,OAAO,QAAQ;AAMvD,6BAAuB,UAAU,cAAc,QAAQ,UAAU,eAAe,QAAQ;AACxF,qBAAc,YAAY,oBAAoB,gBAAgB,eAAe;AAC7E,6BAAuB,UAAU;AACjC,UAAI,kBAAkB;AAClB,eAAM,mBAAmB;;AAE7B,UAAI,UAAU,uBAAuB;AACjC,eAAM,uBAAuB,UAAU;;AAG3C,UAAI,UAAU,cAAc;AAExB,YAAI,UAAU,eAAe;AACzB,gBAAM,IAAI,WAAW;;AAGzB,eAAQ,cAAc,oBAAqB,+BAA+B,UAAU,YAAY,UAAU;AAC1G,eAAM,YAAY,cAAc;AAChC,YAAI,OAAM,aAAa,QAAQ,iBAAiB,SAAS;AACrD,gBAAM,OAAM,UAAU,WAAW;;AAGrC,gBAAQ;AACR,gBAAQ,iBAAiB,IAAI,OAAK,EAAE;;AAExC,aAAO;;AAEX;AACI,0BAAoB,cAAiB,SAAQ;AAC7C,2BAAqB;AACrB,+BAAyB;AACzB,YAAM,QAAQ;AACV,YAAI,KAAK,UAAU;AACf,2BAAiB,KAAK,CAAE,MAAM,KAAK,MAAM,QAAQ,YAAY,KAAK;;AAGlE,uBAAa,KAAK,QAAQ,YAAY,KAAK;;;AAGnD,aAAO,CAAE,cAAc;;6BA4BK;MAC5B;AACI,cAAM,CAAE,QAAQ,IAAI,SAAS;AAC7B,eAAO,QAAQ;AACf,aAAK,YAAY;AACjB,aAAK,QAAQ;AAEb,aAAK,OAAQ,KAAK,QAAQ,OAAQ,KAAK,OAAO,OAAO;AAErD,YAAI,KAAK,UAAU;AACf,8BAAoB,KAAK;AACrB,iBAAK,IAAI;;;;MAMrB;AACI,sBAAc,MAAM,aAAa,GAAG,cAAc,GAAG;AACrD,YAAI,MAAM,KAAK,OAAK,IAAI;AACpB,gBAAM,IAAI,WAAW,kDACd,MAAM,0BACN,MAAM,aAAa,GAAG,aAAa,GAAG;;;MAwBrD;AACI,qCAA6B,iBAAiB,cAAc,iBAAiB;AAC7E;AACA,YAAI;AACA,uBAAa;AACb,cAAI,WAAW,QAAQ,WAAW;AAC9B,kBAAM,IAAI,WAAW;;AAKzB,cAAI,WAAW,OAAO,WAAW;AAC7B,kBAAM,IAAI,WAAW;;;AAM7B,YAAI,KAAK,QAAQ,WAAW;AAExB,cAAI,MAAM,aAAa,WAAW;AAE9B,gBAAI,MAAM,mBAAmB;AACzB,oBAAM,IAAI,WAAW;;AAIzB,sBAAU,MAAM;cACZ,YAAY,MAAM;cAClB,OAAO,MAAM;cACb,MAAM,MAAM,OAAO;;AAIvB,kBAAM,MAAM;;AAEhB,cAAI;AACA,iBAAK,UAAU,WAAW;AAC1B,iBAAK,SAAS,WAAW;;AAGzB,gBAAI,MAAM,aAAa,WAAW;AAC9B,oBAAM,IAAI,WAAW,gHACuC,MAAM,kBACjD,MAAM,aAAa;;AAGxC,gBAAI,MAAM,aAAa,GAAG,cAAc,WAAW;AAC/C,oBAAM,IAAI,WAAW;;AAKzB,iBAAK,WAAW;AAChB,iBAAK,UAAU,CAAC,MAAM,aAAa,GAAG,cAAc;AACpD,iBAAK,SAAS,gBAAgB,KAAK,QAAQ;;AAE/C,eAAK,eAAe;AAKpB,cAAI,KAAK;YACL,eAAe;YACf,eAAe;YACf,aAAa;YACb,eAAe;YACf,cAAc,KAAK;YACnB,eAAe,KAAK;YAEpB,YAAY,aAA2B,MAAM,KAAK,OAAO;YACzD,aAAa,CAAC;YACd,aAAa,KAAK,OAAO,IAAI,OAAK,EAAE;YACpC,cAAc,KAAK,QAAQ,GAAG;;;AAIlC,+BAAqB,MAAM,MAAM,KAAK,QAAQ;AAC9C,cAAI,MAAM,QAAQ;AACd,kBAAM,IAAI,UAAU;;AAKxB,eAAK,WAAW;AAChB,eAAK,UAAU,CAAC;AAEhB,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;AAEzD,aAAK,OAAO,KAAK;AACjB,aAAK,QAAQ;;MAOjB;AACI,YAAI,KAAK,OAAO,WAAW;AACvB,gBAAM,IAAI,UAAU;;AAExB,aAAK,OAAO;AACZ,YAAI,KAAK,OAAO,WAAW;AACvB,eAAK,UAAU;AACf,eAAK,eAAe;AACpB,eAAK,gBAAgB;;AAGrB,iCAAuB,KAAK,OAAO,SAAS;AAC5C,eAAK,OAAO,gBAAgB,gBAAgB;AAC5C,eAAK,UAAU,CAAC,KAAK,OAAO,gBAAgB;AAE5C,eAAK,aAAa,GAAG,gBAAgB,KAAK;AAC1C,eAAK,aAAa,GAAG,eAAe,CAAC,KAAK,QAAQ,GAAG;;;MAG7D;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,KAAK,QAAQ;;MAEnC;AAGI,2BAAmB;AACnB,YAAI,KAAK,OAAO,WAAW,KAAK,KAAK,QAAQ,WAAW;AACpD,gBAAM,IAAI,UAAU;;AAIxB,aAAK,QAAQ,IAAI,YAAY;UACzB,QAAQ,KAAK;UACb,SAAS,KAAK,QAAQ;UACtB,MAAM,KAAK,OAAO;;AAEtB,aAAK,MAAM,YAAY,KAAK;AAE5B,aAAK,kBAAkB,KAAK,MAAM;AAElC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,yBAAyB,KAAK,MAAM;AACzC,aAAK,2BAA2B,KAAK,MAAM;AAC3C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,0BAA0B,KAAK,MAAM;AAC1C,aAAK,4BAA4B,KAAK,MAAM;AAC5C,aAAK,eAAe,KAAK,MAAM;AAC/B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,aAAa,KAAK,MAAM;AAG7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,MAAM;;MAgCjB,yCAAyC,QAAQ;AAC7C,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,cAAM,QAAQ,YAAY,WAAW;;MAQzC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,aAAK,MAAM,WAAW;;MAkC1B,sBAAsB;AAClB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,SAAS,GAAG,GAAG;;YAwB/B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAE3B,eAAO,KAAK,MAAM,gBAAgB,SAAS;;MA6B/C,kBAAkB;AACd,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,QAAQ,GAAG;;MASjC;AACI,YAAI,KAAK,SAAS;AACd,eAAK;;AAET,eAAO,KAAK,MAAM,eAAe;;MAOrC;AACI,aAAK;AACL,aAAK,MAAM,QAAQ;AACnB,aAAK,aAAa,KAAK,MAAM;AAE7B,aAAK,mBAAmB,KAAK,MAAM;AACnC,aAAK,OAAO,KAAK,MAAM;AACvB,aAAK,UAAU,KAAK,MAAM;AAG1B,aAAK,iBAAiB,KAAK,MAAM;AACjC,aAAK,eAAe,KAAK,MAAM;;UAG/B;AACA,eAAO,KAAK,SAAS,OAAO,SAAY,KAAK,MAAM;;UAEnD;AACA,aAAK,MAAM,YAAY;;YAiCrB,iBAAiB;AACnB,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,IAAI,GAAG,GAAG;;YAuF1B;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,aAAa;;AAG3B,eAAO,KAAK,MAAM,WAAW,SAAS;;YAyBpC;AACF,eAAO,KAAK,MAAM,aAAa,GAAG;;aAI/B,yCAAwC,qBAAqB;AAChE;AACA,+BAAuB;AACvB,YAAI,mBAAkB;AAClB,cAAI,CAAE,SAAO,GAAG,aAAa,SACzB,QAAO,GAAG,iBAAiB;AAC3B,kBAAM,IAAI,WAAW;;AAEzB,wBAAc;;AAGd,kBAAY,QAAO,aAAa,MAAM,MAAM;AAE5C,wBAAc,QAAO;AACrB,iBAAO,QAAO;AACd,6BAAmB;;AAEvB,uBAAc,IAAI,IAAI;AACtB,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,oBAAoB,yDAAyD;;AAE3F,2BAAmB;AACf,iCAAsB;AACtB,wBAAc,YAAY,MAAM,gBAAe;AAC/C,cAAI;AACA,kBAAM,6BAA6B;;AAEvC,iBAAM,IAAI;;AAEd,eAAO;;UA8BP;AAGA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,aAAK,MAAM,eAAe;;UAE1B;AACA,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,WAAW;;AAGzB,eAAO,KAAK,MAAM;;MAItB;AAKI,uBAAe;AACf,4BAAoB,KAAK;AACrB,uBAAa;AACb,eAAK,eAAe,MAAM;AAC1B,eAAK,YAAY,MAAM;AACvB,iBAAO,KAAK;;AAEhB,eAAO,CAAE,MAAM,KAAK,MAAM;;;AAIlC,eAAW,YAAY;AACvB,kBAA4B;ACn6B5B;;;;;;;;;AAyDO;AACH,aAAO,IAAI,YAAY;;AA8DpB;AACH,aAAO,IAAI,WAAW;;AA6FnB;AACH,UAAI,WAAW;AACX,kBAAU;;AAEd,aAAO,wBAAwB,iBAAiB;;AAyB7C;AACH,aAAO,MAAM;;AAEV;AACH,kCAA4B,4BAA4B,gBAAgB;;ACvP5E;;;;;;;;;6BAqBgC;MAC5B;AACI,eAAO;;;wBAOU;MAQrB,iBAAiB;AACb,eAAO,MAAM,GAAG;;;AAIxB,UAAI,YAAY;AAChB,kBAA4B;yBAQF;MACtB;AACI,eAAO,KAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;yBAIF;MACtB;AACI,eAAO,MAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;0BAID;MACvB;AACI,eAAO,KAAK,MAAM,QAAY,GAAK,MAAS;;;AAIpD,YAAM,YAAY;AAClB,kBAA4B;yBAEA;MACxB;AACI,eAAO;;;AAIf,WAAO,YAAY;AACnB,kBAA4B;4BAIC;MACzB;AACI,eAAO,SAAY;;;AAI3B,cAAQ,YAAY;AACpB,kBAA4B;8BAIK;MAC7B;AACI,eAAO,YAAc;;;AAI7B,gBAAY,YAAY;AACxB,kBAA4B;6BAIE;MAC1B;AACI,eAAO,SAAa;;;AAI5B,eAAS,YAAY;AACrB,kBAA4B;2BAIE;MAC1B;AACI,eAAO,SAAW;;;AAI1B,aAAS,YAAY;AACrB,kBAA4B;yBAIF;MACtB;AACI,eAAO,OAAS;;;AAIxB,WAAK,YAAY;AACjB,kBAA4B;4BAIC;MAazB,gBAAiB;AACb,eAAO,SAAY,GAAG;;;AAI9B,cAAQ,YAAY;AACpB,kBAA4B;+BAII;MAc5B,gBAAiB;AACb,eAAO,WAAe,GAAG;;;AAIjC,iBAAW,YAAY;AACvB,kBAA4B;wBAID;MAQvB,iBAAiB;AACb,eAAO,KAAK,MAAM,SAAY,EAAE,IAAI,QAAQ,IAAI;;;AAIxD,UAAM,YAAY;AAClB,kBAA4B;AACrB;AACH,aAAO,YAAW;;AAEf,4DAAuD;AAC1D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;;AAEjC,UAAI,OAAO,eAAe;AACtB,wBAAe;AACf,gBAAO,eAAe;AACtB,gBAAO,YAAY;AACnB,eAAO,sBAAsB;iBAExB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,sBAAsB;;;AC1OrC;;;;;;;;;AAcA;AACI,UAAI,QAAQ,QAAQ,OAAO,SAAS;AAChC,cAAM,IAAI,MAAM,yFACa;;;8BAMJ;;uBAEP;MACtB;AACI;AACA,yBAAiB;AACjB,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,KAAK,QAAQ,QAAQ,KAAK,MAAM,OAAO,OAAO,KAAK;AACxD,aAAK,QAAQ,KAAK,OAAO;AACzB,aAAK,QAAQ,KAAK,OAAO;;MAM7B;AACI,eAAO,KAAK;AACR,+BAAqB,OAAM,CAAC;AAC5B,cAAI,KAAK;AACL,6BAAiB,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,IAAI;;AAElE,cAAI,KAAK;AACL,6BACI,MAAI,gBAAgB,MAAI,KAAQ,KAAK,IAAI,SAAS;;AAE1D,iBAAO,eAAe;;;MAG9B;AACI,eAAO,CAAE,IAAM,KAAK,IAAI,IAAM,KAAK;;aAGhC;AACH,eAAO,IAAI,IAAI,CAAE,IAAI,QAAO,OAAO,IAAI,QAAO;;;AAItD,SAAK,YAAY;AACjB,kBAA4B;AACrB;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAEtD;AACH,uBAAiB;AACjB,aAAO,IAAI,KAAK,CAAE,IAAI,QAAQ,OAAO,KAAK,KAAK,MAAM,IAAI;;AAGtD,uDAAmD;MACtD,MAAQ;;AAEL;AACH,aAAO,qBAAqB;;AAEzB,6DAAwD;AAC3D,aAAO,uBAAuB,SAAQ,iBAA+B,SAAS,cAAc,eAAe;;AAExG;AACH,UAAI,cAAc;AACd,eAAO;;AAEX,UAAI,OAAO,eAAe;AACtB,0BAAkB,cAAc,6CAC5B,2CAA2C,cAC3C;AACJ,wBAAe,CAAE,WAAW,QAAQ;AACpC,eAAO,uBAAuB;iBAEzB,sBAAsB;AAC3B,eAAO;;AAGP,eAAO,uBAAuB;;;AC/FtC;;;;;;;;;uBAqB0B;MACtB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,WAAW,KAAK;;;MAG7B;AACI,iBAAS,oBAAoB;AAC7B,qBAAa,MAAK;AAClB,YAAI,KAAK,YAAY;AACjB,mBAAS,YAAY,QAAQ,GAAG,KAAK;;AAEzC,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,UAAU,KAAK;AAChC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,SAAK,YAAY;AACjB,kBAA4B;4BACG;MAC3B;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,UAAU,GAAG,KAAK;;MAE7B;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;wBACD;MACvB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,4BAA4B;AACjC,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,kBAAkB;AACvB,aAAK,mBACD,eAAe,KAAK,oBAAoB,KAAK;AACjD,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB,cAAc,KAAK;AAC1C,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa;mBAEb,MAAM,QAAQ,KAAK;AACxB,eAAK,aAAa,KAAK;mBAElB,OAAO,KAAK,eAAe;AAChC,eAAK,aAAa,CAAC,KAAK;;AAGxB,gBAAM,IAAI,WAAW,sEACN,KAAK;;;MAG5B;AACI,qBAAa,mBAAmB;AAChC,2BAAmB,WAAW,MAAM;AACpC,YAAI,KAAK,cAAc;AACnB,0BAAgB,KAAK;AACjB,uBAAW,IAAI,KAAK;;;AAG5B,aAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;AAErH,qBAAa;AACb,YAAI,KAAK,cAAc;AACnB,uBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,iBAAK,KAAK,WAAW;;;AAG7B,aAAK,YAAY,CAAC,IAAI,UAAU;UACxB,MAAM,WAAW;UACjB;;AAER,aAAK,QAAQ;;MAEjB;AACI,iBAAS,oBAAoB;AAC7B,eAAO,OAAM,QAAQ,KAAK,MAAM;;MAEpC;AACI,wBAAe;UACX,kBAAkB,qBAAqB,KAAK;UAC5C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,oBAAoB,KAAK;UAC1C,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;sBACH;MACrB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK,SAAS,QAAQ,KAAK,UAAU,KAAK;AAC1C,gBAAM,IAAI,oBAAoB,4BAA4B,KAAK;;AAGnE,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAI;;MAEf;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;kCACS;MACjC;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,gBAAgB;AACrB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,QAAQ,KAAK,SAAS,OAAO,KAAK,gBAAgB,KAAK;;MAEhE;AACI,kBAAU,oBAAoB;AAC9B,eAAO,EAAE,IAAI,OAAK,EAAE,QAAQ,KAAK,QAAQ;;MAE7C;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,OAAO,KAAK;AAC7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;4BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,UAAU,IAAI,YAAoB;AACvC,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;;MAE7D;AACI,kBAAU,oBAAoB;AAC9B,eAAO,KAAK,QAAQ,GAAG,KAAK;;MAEhC;AACI,eAAO;;MAEX;AACI,wBAAe,CAAE,MAAM,KAAK;AAC5B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;ACjO5B;;;;;;;;;AAmBO;AACH,UAAI,OAAO,UAAU;AACjB,eAAO,aAAa,OAAO;;AAG3B,YAAI,MAAM,WAAW;AACjB,gBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAC1D,MAAM;;AAE5B,qBAAa,GAAG,IAAI,GAAG,EAAE;AACrB,8BAAoB,MAAM;AAC1B,cAAI,CAAC,UAAU;AACX,kBAAM,IAAI,WAAW,OAAO,gDAAgD,yBAChD,KAAK,UAAU,yCAChB;;;AAGnC,eAAO;;;AAWR,mFAA+E;AAClF,UAAI,eAAe;AACf,eAAO;;AAEX,gCAA0B,aAAc,cAAa,KAAM,YAAW;AACtE;AACA,UAAI,YAAY;AACZ,uBAAe;;AAGf,uBAAe,cAAc,oBAAoB;;AAErD,aAAO,KAAK,MAAO,gBAAe,SAAS,KAAK;;AAE7C;AACH,UAAI,WAAW;AACX,eAAO;;AAEX,UAAI,YAAY;AACZ,kBAAU,UAAU,aAAa,MAAI,CAAC,aAAa,YAAY;iBAE1D,YAAY;AACjB,kBAAU,UAAU;;AAGpB,cAAM,IAAI,WAAW,2BAA2B;;AAEpD,aAAO;;AC1EX;;;;;;;;;AA+BO;AAEH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAGlC,iBAAO;;;;AASZ;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,YAAI,eAAe;AACf,iBAAO,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAGrC,iBAAO;;;;AAoBZ,uDAAmD,aAAa,oCAAoC;AACvG,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAEhB,YAAI,EAAE,MAAM,WAAW;AACnB,gBAAM,IAAI,WAAW,+DACd,EAAE,MAAM;;AAEnB,YAAI,OAAO,MAAM,WAAW;AACxB,gBAAM,IAAI,WAAW,iEACd,OAAO,MAAM;;AAExB,YAAI,QAAQ,QAAQ,KAAK,MAAM,WAAW;AACtC,gBAAM,IAAI,WAAW,+DACd,OAAO,MAAM;;AAGxB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG;;AAEhC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,gBAAQ,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,OAAO;AACrF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,eAAO;;;AAeR,2CAAqC,aAAa,oCAAoC;AACzF,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAatE,2CAAqC,CAAC,GAAG,cAAc;AAC1D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,yBAAyB,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQhF,iEAA6D,CAAC,GAAG,cAAc,iDAAgD;AAClI,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,6EACD,EAAE;;AAE1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,8EACD,EAAE;;AAE1B,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,SAAiB;UACjB,GAAG;UACH,QAAQ;UACR;UACA,KAAK,YAAY,SAAS,SAAS;UACnC,WAAW;UACX,YAAY;UACZ;UACA;;AAEJ,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR,2CAAqC,CAAC,GAAG,GAAG,cAAc;AAC7D,aAAO,KAAK;AACR,wBAAgB;AAChB,eAAO,eAAe,GAAG,QAAQ,MAAM,SAAS,SAAS,YAAY;;;AAQtE,uDAAmD,CAAC,GAAG,GAAG,cAAc;AAC3E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,YAAI,EAAE,SAAS,KAAK,EAAE,SAAS;AAC3B,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,WAAW,oEACd,EAAE;;AAEb,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,YAAY;AACZ,gBAAM,IAAI,oBAAoB;;AAGlC,YAAI,OAAW,GAAG,QAAQ,SAAS,YAAY,SAAS,SAAS,SAAS,SAAS;AACnF,YAAI,QAAQ;AACR,cAAI,QAAU,GAAG;;AAErB,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;2BAMe;MAC1B;AACI,cAAM;AACN,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,iBAAS,WAAW;AACpB,aAAK,OAAO;AACZ,8BAAoC,KAAK,MAAM;AAC/C,YAAI,KAAK,SAAS,KAAK,KAAK,SAAS,KAAK,KAAK,SAAS;AACpD,gBAAM,IAAI,oBAAoB,qDAAqD,KAAK;;AAG5F,aAAK,aAAa,eAAe,KAAK,YAAY,MAAM;AACxD,aAAK,UAAU,eAAe,KAAK,WAAW,OAAO,IAAI,KAAK,SAAS,MAAM;AAC7E,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,aAAa,cAAc,KAAK;AACrC,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,eAAe,eAAe,KAAK,gBAAgB,OAAO,IAAI,KAAK,cAAc,MAAM;AAC5F,YAAI,KAAK,SAAS,KACb,OAAM,QAAQ,KAAK,iBAAiB,KAAK,aAAa,WAAW;AAClE,gBAAM,IAAI,WAAW,iGAEd,KAAK,UAAU,KAAK;mBAEtB,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eAAe,CAAC,KAAK,cAAc,KAAK;qBAExC,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,0FACY,KAAK,UAAU,KAAK;;mBAGpD,KAAK,SAAS;AACnB,cAAI,OAAO,KAAK,iBAAiB;AAC7B,iBAAK,eACD,CAAC,KAAK,cAAc,KAAK,cAAc,KAAK;qBAE3C,KAAK,aAAa,WAAW;AAClC,kBAAM,IAAI,WAAW,4FACY,KAAK,UAAU,KAAK;;;;aAI1D;AAEH,iBAAqB,gBAAgB,MAAM;AAC3C,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,oGACkB,KAAK,UAAU,KAAK;;;MAGnE;AACI,wBAAe;UACX,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,iBAAiB,qBAAqB,KAAK;UAC3C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;uBAOW;MACtB;AACI,cAAM,MAAM;AACZ,aAAK,SAAS;AACd,aAAK,WAAW;AAChB,aAAK,UAAU,KAAK;AACpB,8BAAoC,KAAK,SAAS;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,oBAAoB,eAAe,KAAK;;MAEjD;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AAC3D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAEpH,aAAK,YAAY,CAAC,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AAChE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,4BAAkB,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK;AACvD,sCAA4B,2BAAyC,KAAK,WAAW;AACrF,cAAI,uBAAuB,QAAQ,KAAK,SAAS;AAC7C,sBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK,cAAc;;AAG1I,gBAAI,KAAK,SAAS;AACd,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,QAAQ,IAAI,KAAK,SAAS,KAAK,YAAY,KAAK,aAAa;uBAE7H,KAAK,SAAS;AAEnB,wBAAU,yBAAyB,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;uBAEvH,KAAK,SAAS;AACnB,wBAAU,eAAe,QAAQ,KAAK,OAAO,QAAQ,WAAW,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY,KAAK;;AAGlH,oBAAM,IAAI,oBAAoB;;AAElC,gBAAI,KAAK,cAAc;AACnB,wBAAU,KAAK,WAAW,MAAM;;;AAGxC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,yBAAiB;AACjB,sBAAe,KAAK,eAAe,iBAC/B,WAAW,MAAM,GAAG,WAAW,SAAS,KACxC,WAAW,MAAM;AACrB,qBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,yBAAe,iBAAiB,MAAM,IAAI,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ,IAAI,OAAO,KAAK,iBAAiB,WAAW,KAAK,eACtI,KAAK,aAAa;AACtB,mBAAS,KAAK;;AAElB,0BAAkB,CAAC,WAAW;AAC9B,YAAI,KAAK,eAAe;AACpB,wBAAc,YAAY,OAAO;AACjC,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,KAAK,KAAK;AACtB,wBAAc,YAAY,OAAO;;AAErC,eAAO;;MAEX;AACI,wBAAe;UACX,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,mBAAmB,qBAAqB,KAAK;UAC7C,kBAAkB,oBAAoB,KAAK;;AAE/C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAEJ;AAEH,YAAI,CAAE,cAAa,SAAS,OAAO,KAAK,YAAY,YAChD,KAAK,UAAU;AACf,gBAAM,IAAI,WAAW,0EACN,KAAK,UAAU,KAAK;;;;2BAInB;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAK,OAAO,KAAK,eAAe,YAC5B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,8FACc,KAAK,UAAU,KAAK;;;;AAKnE,aAAO,YAAY;AACnB,kBAA4B;2BACA;MACxB;AACI,cAAM,GAAG;AACT,iBAAO,WAAW;;MAEtB;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe;AAC3B,cAAI,CAAE,OAAM,QAAQ,KAAK,eACpB,MAAK,WAAW,WAAW,KAAK,KAAK,WAAW,WAAW;AAC5D,kBAAM,IAAI,WAAW,2FAC0B,KAAK,UAAU,KAAK;;;;;AAMnF,aAAO,YAAY;AACnB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,YAAI,KAAK,YAAY,UAAU,KAAK,YAAY;AAC5C,gBAAM,IAAI,WAAW,uGACyB,KAAK;;;MAG3D;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW,qDACjB,KAAK,UAAU;;AAEvB,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW;;AAGzB,yBAAiB,WAAW;AAC5B,4BAAoB,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS;AAC1D,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,WAAW,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AAC1H,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGzH,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,GAAG,MAAM,EAAG,cAAc;AACrD,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,cAAI,OAAM,MAAM,WAAW;AACvB,kBAAM,IAAI,WAAW,2FACY,OAAM,MAAM;;AAEjD,6BAAmB,OAAM;AACzB,4BAAkB,WAAW;AAC7B;AACA;AACA,cAAI,KAAK,eAAe;AACpB,oBAAQ;AACR,oBAAQ;;AAGR,oBAAQ;AACR,oBAAQ;;AAEZ,yBAAe,WAAW;AAC1B,wBAAc,WAAW;AACzB,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,WAAW;AAChC,0BAAgB,KAAK,QAAQ;AAC7B,0BAAgB,KAAK,QAAQ;AAE7B,4BAAkB,aAAa,QAAQ,SAAS,SAAS,KAAK;AAC9D,2BAAiB,aAAa,OAAO,SAAS,SAAS,KAAK;AAK5D,8BAAoB,CAAC,WAAW,WAAW,UAAU,KAAK;AAC1D,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;;AAE3C,wBAAc,gBAAoB,QAAO,KAAK,OAAO,QAAQ,aAAa,KAAK,SAAS,KAAK;AAC7F,cAAI,KAAK,eAAe;AACpB,sBAAU,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAE/C,cAAI,KAAK,QAAQ;AACb,sBACI,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAElD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B;AACA;AACA;AACA,YAAI,KAAK,eAAe;AACpB,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAGZ,wBAAc;AACd,uBAAa;AACb,sBAAY;;AAEhB,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,WAAW;AAChC,wBAAgB,KAAK,QAAQ;AAC7B,wBAAgB,KAAK,QAAQ;AAC7B,oBAAY,eAAe,KAAK;AAChC,oBAAY,cACR,aAAa,YAAY,aAAa,SAAS,SAAS,KAAK;AACjE,oBAAY,aACR,aAAa,YAAY,YAAY,SAAS,SAAS,KAAK;AAChE,eAAO;;MAEX;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;gCACO;MAC/B;AACI,cAAM,MAAM;AACZ,aAAK,gCAAgC;AACrC,aAAK,gCAAgC;AACrC,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,YAAI,QAAO,WAAW;AAClB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,QAAO,qBAAqB,QAAQ,QAAO,qBAAqB,QAChE,QAAO,oBAAoB;AAC3B,gBAAM,IAAI,WAAW;;AAKzB,YAAI,QAAO,WAAW,QAAQ,QAAO,YAAY,UAC7C,QAAO,YAAY;AACnB,gBAAM,IAAI,WAAW,gBAAgB,KAAK,uEACF,KAAK,UAAU,QAAO;;AAElE,aAAK,kBACD,QAAO,mBAAmB,OAAO,IAAI,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;AAChD,aAAK,uBAAuB,eAAe,QAAO,wBAAwB,KAAK;AAC/E,aAAK,uBAAuB,eAAe,QAAO;AAClD,aAAK,sBAAsB,cAAc,QAAO;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS,KAAK,OAAO;AAChC,gBAAM,IAAI,WAAW,0BAA0B,KAAK,0BAC7C,KAAK,OAAO,gCACZ,KAAK,UAAU;;AAE1B,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,oEACJ,KAAK,UAAU,WAAW;;AAE/C,yBAAiB,WAAW;AAC5B,qCAA6B,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK;AACpE,qCAA6B;AAC7B,qBAAa,GAAG,IAAI,KAAK,MAAM,EAAE;AAC7B,+BAAqB,KAAK;;AAE9B,6BAAqB,KAAK,WAAW,KAAK,iBAAiB,KAAK;AAChE,0BAAkB;AAClB,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,WAAW,KAAK,sBAAsB,KAAK,sBAAsB,WAAW,KAAK;AACjK,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,WAAW,KAAK,iBAAiB,KAAK,iBAAiB,WAAW,KAAK;;AAG1H,eAAK,OAAO;;AAEhB,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,OAAO,GAAG,MAAM,EAAG,cAAc;AACjE,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B;AACA,cAAI,KAAK,SAAS;AACd,kBAAM,IAAI,oBAAoB;qBAEzB,KAAK,SAAS;AACnB,gBAAI,KAAK,eAAe;AACpB,uBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,qBAAS,gBAAoB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,cAAc;;AAElJ,cAAI,KAAK;AACL,qBAAS,QAAU,QAAQ,KAAK,KAAK,QAAQ,KAAK;;AAEtD,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAEnC,cAAI,KAAK,eAAe;AACpB,qBAAS,WAAc,QAAQ,CAAC,GAAG,GAAG,GAAG;;AAE7C,iBAAO;;;MAGf;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO,QAAO;AACd,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,kBAAc,YAAY;kCACW;MACjC;AACI,cAAM,GAAG;;;AAIjB,oBAAgB,YAAY;AAC5B,kBAA4B;yBACA;MACxB;AACI,cAAM,GAAG;AACT,eAAO,WAAW;AAClB,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,wBAAe,MAAM;AACrB,eAAO,QAAO;AACd,eAAO,QAAO;AACd,eAAO;;aAEJ;AAEH,YAAI,OAAO,KAAK,eAAe,YAC3B,CAAC,wBAAsC,KAAK,YAAY,UAAU,GAAG;AACrE,gBAAM,IAAI,WAAW,yFACS,KAAK,UAAU,KAAK;;;;AAK9D,WAAO,YAAY;AACnB,kBAA4B;6BACI;MAC5B;AACI,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WACD,CAAC,CAAC,KAAK,UAAU,KAAK,WAAW,CAAC,KAAK,UAAU,KAAK;mBAErD,OAAO,KAAK,SAAS,OAAO;AACjC,eAAK,WAAW;YACZ,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;YACjC,CAAC,KAAK,SAAS,IAAI,KAAK,SAAS;;;AAIrC,eAAK,WAAW,KAAK;;AAEzB,aAAK,aACD,KAAK,eAAe,SAAY,iBAAiB,KAAK;AAC1D,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;YACH,WAAW;YAAI,WAAW;YAC1B,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;;;AAI3D,iBAAO;YACH,WAAW;YACX,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YACvD,WAAW,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG;YAAI,WAAW;;;;MAIlF;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,cAAI,KAAK,eAAe;AACpB,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;AAGnH,4BAAgB,eAAiB,QAAQ,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;AAC3H,mBAAO,eAAiB,SAAS,KAAK,SAAS,GAAG,IAAI,OAAO,MAAM,KAAK,KAAK,SAAS,GAAG,KAAK,KAAK,SAAS,GAAG,IAAI;;;;MAI/H;AACI,wBAAe,CAAE,UAAU,KAAK,UAAU,YAAY,KAAK;AAC3D,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,eAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,eAAe,CAAC,GAAG;AACxB,aAAK,YAAY,CAAC,CAAE,MAAM;AAC1B,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;;MAExD;AACI,YAAI,KAAK,eAAe;AACpB,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ;;AAG9C,yBAAe,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACxE,wBAAc,WAAW,MAAM,OAAO,OAAO,KAAK,KAAK,KAAK,WAAW;AACvE,iBAAO,CAAC,WAAW,IAAI,QAAQ,OAAO,WAAW;;;MAGzD;AACI,eAAO,KAAS;AACZ,uBAAY,oBAAoB;AAChC,6BAAmB,OAAM;AACzB,cAAI,KAAK,eAAe;AACpB,qBAAQ,WAAc,QAAO,CAAC,GAAG,GAAG,GAAG;AACvC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,4BAAgB,OAAM,sBAAsB,CAAC,QAAQ;AACrD,mBAAO,WAAc,SAAS,CAAC,GAAG,GAAG,GAAG;;AAGxC,2BAAe,KAAK,KAAK,KAAK,WAAW;AACzC,0BAAc,KAAK,KAAK,KAAK,WAAW;AACxC,mBAAO,OAAM,sBAAsB,CAAC,QAAQ;;;;MAIxD;AACI,wBAAe,CAAE,MAAM,KAAK,MAAM,YAAY,KAAK;AACnD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;ACn0B5B;;;;;;;;;AAoCO,6DAAuD,CAAC,GAAG,cAAc;AAC5E,aAAO,KAAK;AACR,YAAI,cAAc;AACd,uBAAa;;AAEjB,wBAAgB;AAChB,gBAAQ,sBAAsB,GAAG;AACjC,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,mEACd,EAAE;;AAEb,YAAI,gBAAgB,SAAS;AACzB,gBAAM,IAAI,WAAW,yDACd,gBAAgB;;AAE3B,YAAI,iBAAoB,GAAG,iBAAiB,SAAS,YAAY,SAAS,SAAS,SAAS,QAAQ;AACpG,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;kCAGsB;MACjC;AACI,cAAM,GAAG;AACT,aAAK,kBAAkB;AACvB,aAAK,kBACD,KAAK,mBAAmB,OAAO,IAAI,KAAK;AAC5C,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,uBAAuB,eAAe,KAAK;;MAEpD;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,uEACQ,KAAK,UAAU;;AAEhD,4BAAoB,KAAK,eAAe,kBAAkB,IAAI;AAC9D,YAAI,WAAW,gBAAgB,QAAQ,WAAW,eAAe;AAC7D,gBAAM,IAAI,WAAW,yFACU,WAAW;;AAE9C,yBAAiB,WAAW;AAC5B,qCAA6B;UACzB,KAAK,WAAW;UAAI,KAAK,WAAW;UAAI;UAAU,KAAK;;AAE3D,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,WAAW,KAAK,kBAAkB,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGnI,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,wBAAc,kBAAgB,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,SAAS,KAAK,SAAS,KAAK,YAAY;AAEhH,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK,QAAQ,KAAK;;AAExD,cAAI,KAAK,cAAc;AACnB,sBAAU,KAAK,WAAW,MAAM;;AAEpC,iBAAO;;;MAGf;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,2BAAmB,KAAK,eAAe,kBACnC,WAAW,KAAK,KAAK,kBACrB,WAAW,KAAK,KAAK;AACzB,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,wBAAgB,iBAAiB,MAAM,KAAK,WAAW,IAAI,KAAK,SAAS,KAAK,QAAQ;AACtF,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,YAAY,SAAS;;AAI5C,iBAAO,CAAC,WAAW,IAAI,SAAS,SAAS;;;MAGjD;AACI,wBAAe,MAAM;AACrB,gBAAO,qBAAqB,KAAK;AACjC,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,0BACH,qBAAqB,KAAK;AAC9B,gBAAO,yBACH,oBAAoB,KAAK;AAC7B,eAAO;;;AAIf,oBAAgB,YAAY;AAC5B,kBAA4B;ACzI5B;;;;;;;;;AAkDO;AACH,UAAI,MAAM,QAAQ;AACd,YAAI,gBAAgB,QAAQ,aAAa;AACrC,gBAAM,IAAI,WAAW;;AAGzB,YAAI,gBAAgB;AAChB,sBAAY,OAAO,MAAM,OAAO,SAAS,cAAc,OAAO;AAC9D,mBAAS,OAAO,MAAM,GAAG,OAAO,SAAS;;AAE7C,YAAI,OAAO,SAAS;AAChB,yBAAe,OAAO,MAAM,GAAG,OAAO;;AAE1C,iBAAS,OAAO;;AAEpB;AACI,YAAI,KAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;;AAGP,iBAAO,CAAC;;;AAGhB,qBAAe,aAAa;AAC5B,kBAAY,aAAa;AACzB,aAAO,CAAE,QAAQ,cAAc;;AA6C5B,oEAAgE,iCAAiC,4BAA4B;AAChI,aAAO,KAAS;AACZ,qBAAa,OAAO,MAAM;AAC1B,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,uCAAuC;;AAIhE,qBAAa,CAAC,GAAG,GAAG,OAAO,QAAiB,GAAG;AAC/C,iBAAS,WAAc,QAAQ;AAC/B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAIlC,YAAI;AACA,kBAAQ,KAAK;;AAGjB,YAAI,QAAQ;AACR,iBAAO,KAAK,OAAO,QAAQ,OAAO;AAClC,cAAI,KAAK,SAAS,OAAO;AACrB,mBAAO,WAAe,MAAM;;AAEhC,iBAAO,WAAc,MAAM;;AAE/B,YAAI;AACA,mBAAS,SAAY,QAAQ;AAC7B,cAAI,QAAQ;AACR,mBAAO,SAAY,MAAM;;;AAYjC,+BAAuB;AACvB;AACA,qBAAa;AACb,0BAAkB,OAAO,MAAM;AAC/B,8BAAsB,QAAY;AAClC;AACA,YAAI,QAAQ;AACR,yBAAe,QAAY;;AAE/B,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,+BAAqB,cAAc;AACnC,8BAAoB,KAAS,MAAM,aAAa,cAAc;AAC9D,cAAI,QAAQ;AACR,yBAAa,YAAY;AACzB,qBAAS,YAAY;;AAGrB,kCAAsB,KAAS;AAC3B,+BAAiB,aAAa;AAC9B,kCAAoB,UAAa,UAAU,IAAI;AAE/C,6BAAe,YAAY,GAAG,IAAI,UAAU,IAAI,OAAO,GAAG,IAAI;AAC9D,gCAAkB,OAAO,IAAI;AACzB,uBAAO,YAAY,GAAG,GAAG,IAAI,UAAU,IAAI,MAAM,IAAI;;AAEzD,qBAAO,CAAE,QAAQ;;AAErB,yBAAa,cAAc;AAC3B,qBAAS,cAAc;;AAE3B,cAAI;AACA,2BAAe,KAAK;;;AAG5B;AACA,YAAI;AACA,uBAAa;AACb,oBAAU,MAAU,gBAAgB;;AAExC,eAAO,CAAC,YAAY,SAAS;;;sBAGZ;MACrB;AACI,cAAM;AACN;AACA,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,WAAW;mBAEhB,MAAM,QAAQ,KAAK;AACxB,iBAAO,IAAI,gBAAgB,CAAE,OAAO,KAAK;;AAGzC,iBAAO,KAAK;;AAEhB,YAAI,KAAK,aAAa;AAClB,gBAAM,IAAI,WAAW;;AAGzB,aAAK,OAAO;AACZ,aAAK,kBACD,KAAK,mBAAmB,OAAO,QAAQ,KAAK;AAChD,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,cAAc,KAAK,eAAe,OAAO,QAAQ,KAAK;AAC3D,aAAK,YAAY,KAAK,YAAY,OAAO,QAAQ,KAAK;AACtD,aAAK,SAAS,KAAK,UAAU,OAAO,QAAQ,KAAK;AACjD,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;AACxC,aAAK,YAAY;AACjB,aAAK,UAAU;AAEf,aAAK,eAAe;AAGpB,aAAK,aAAa;;MAItB;AACI,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,iBAAO,QAAiB,GAAG,WAAW,IAAI,OAAK;;AAG/C,iBAAO,KAAK;;;MAKpB;AACI,aAAK,UAAU;;MAEnB;AACI,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AAEb,wBAAgB,KAAK,KAAK;AAC1B,YAAI,CAAC,MAAM,QAAQ;AACf,sBAAY,CAAC;;AAEjB,0BAAkB,UAAU;AAC5B;AACA,YAAI,KAAK;AACL,wBAAc,CAAC,WAAW,IAAI,WAAW,IAAI;;AAG7C,wBAAc,CAAC,WAAW,IAAI;;AAElC,YAAI,KAAK;AACL,6BAAmB;AACnB,4BAAkB;AACd,uBAAW,KAAK,CAAC,WAAW,IAAI;;AAEpC,iBAAO,CAAC,aAAa,OAAO;;AAG5B,iBAAO;;;MAGf;AACI,eAAO,KAAS;AACZ,cAAI,MAAM,QAAQ;AACd,mBAAO,KAAK;;AAEhB,6BAAmB,KAAK,kBAAkB,OAAO;AACjD,cAAI,KAAK;AACL,8BAAkB,KAAK,OAAO,IAAI,OAAK;AACvC,mBAAO,CAAC,YAAY,OAAO;;AAG3B,mBAAO;;;;UAUf;AACA,YAAI,KAAK,WAAW;AAChB,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,yBAAe;AACf,uBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,mBAAO,KAAK;;AAEhB,iBAAO;;AAGP,iBAAO,KAAK;;;UAGhB;AACA,aAAK,UAAU;;MAEnB;AAGI,8BAAsB;AACtB,YAAI,KAAK,gBAAgB;AACrB,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,gBAAgB;AAChB,uBAAa,WAAW;;AAE5B,qBAAa;AACb,0BAAkB,KAAK,WAAW,WAAW,KAAK;AAClD,yBAAiB,WAAW,MAAM;AAClC,aAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,CAAC,WAAW,MAAM,GAAG;AAGhE,+BAAuB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAC/D,YAAI,iBAAiB;AACjB,gBAAM,IAAI,oBAAoB;;AAG9B,eAAK,KAAK,MAAM;;AAGpB;AACA,YAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,sBAAY,KAAK,KAAK;;AAGtB,sBAAY,CAAC,KAAK,KAAK;;AAE3B,YAAI,KAAK,aAAa;AAClB,cAAI,CAAC,aAAiB,KAAK,UAAU,IAAI,UAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,KAAK;AACjF,kBAAM,IAAI,WAAW,6FACqB,KAAK,wCACd,KAAK,KAAK;;;AAI/C,eAAK,YACD,UAAU,IAAI,SAAO,IAAI,UAAU,CAAE,OAAO,CAAC,MAAM;;AAE3D,YAAI,KAAK;AACL,eAAK;;;MAoBb,+BAA+B;AAC3B,aAAK;AACD,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,4BAAkB,KAAK,UAAU,GAAG,MAAM;AAC1C,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,WAAW;AAChB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,UAAU,CAAC,OAAU,CAAC,WAAW,KAAK,KAAK;;qBAG/C,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UACD,KAAK,KAAK,UAAU,IAAI,SAAO,OAAU,CAAC,WAAW;;AAGzD,mBAAK,QAAQ,KAAK,OAAU,CAAC,WAAW,KAAK,KAAK;;;AAItD,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI,aAAa;AAKb,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,0BAAY,MAAM,QAAQ,KAAK,KAAK,aAChC,KAAK,KAAK,UAAU,UACpB,KAAK,KAAK;AACd,oCAAsB,CAAC,WAAW;AAClC,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AAEI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AAIzB,+BAAuB;AACvB,8BAAsB;AACtB,YAAI,gBAAgB;AAChB,iBAAO,kBAAkB;AACzB,6BAAmB,iBAAiB,OAAO;AAC3C,eAAK,YAAY;AACjB,8BAAoB;AAChB,iBAAK,UAAU,KAAK,IAAI,UAAU,CAAE,OAAO,MAAM;;AAKrD,4BAAkB,gBAAgB,OAAO,KAAK;;AAElD,YAAI,aAAa;AACb,iBAAO,eAAe;AACtB,6BAAmB,iBAAiB,OAAO;AAE3C,eAAK,eAAe,UAAU;;AAElC,yBAAiB,iBAAiB,cAAc;AAChD,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAE5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAInC;AAII,eAAO,KAAK;AACR,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,6BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,mBAAS,oBAAoB;AAC7B,cAAI,gBAAgB;AAChB,gBAAI,KAAK;AACL,6BAAe,KAAK;;AAGpB,6BAAe,KAAK,gBAAgB;;;AAG5C,4BAAkB,MAAM,QAAQ,KAAK,KAAK,aAAa,KAAK,KAAK,UAAU,SAAS;AACpF,cAAI,aAAa,WAAW;AACxB,kBAAM,IAAI,WAAW,iBAAiB,qCAC/B,aAAa;;AAExB,cAAI,KAAK;AACL,oBAAQ,KAAK;;AAEjB,iCAAuB,CAAE;AAEzB,wBAAa;AAGT,6BAAgB,KAAK,KAAK,KAAK,CAAC,SAAQ,OAAO,UAAS;AAExD,mBAAO,CAAC,SAAQ,IAAI,SAAQ,MAAM;;AAGtC,6BAAmB,IAAI,OAAM,QAAQ,cAAc,KAAK,aAAa,MAAM,MAAM,KAAK,QAAQ,KAAK;AACnG,6BAAmB,WAAW;AAC9B,0BAAgB,WAAW;AAC3B,yBAAe,WAAW;AAC1B,cAAI,KAAK;AACL,iBAAK,YAAY,QAAQ;;AAE7B,yBAAe,KAAK,kBAAkB,UAAU;AAEhD,cAAI,KAAK;AACL,mBAAO,CAAC,QAAQ,OAAO;;AAGvB,mBAAO;;;;MAInB;AACI,eAAO,KAAK;AAGR,6BAAmB,OAAU,OAAO;AAEpC,yBAAe,MAAQ,cAAc,CAAC,GAAG;AACzC,yBAAe,aAAa;AAC5B,cAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAO,KAAK,KAAK,UAAU,IAAI,SAAO,MAAM,IAAI,OAAO,cAAc,CAAC,GAAG,QAAQ;;AAGjF,mBAAO,KAAK,KAAK,YAAY,IACzB,CAAC,OAAO,cAAc,CAAC,GAAG,KAAK,KAAK,eACpC,CAAC;;;;UAIb;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAGX,eAAO,KAAK,KAAK;;UAEjB;AAEA,YAAI,CAAC,KAAK;AACN,iBAAO,KAAK,KAAK;;AAErB,eAAO,KAAK,KAAK;;MAErB;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,QAAQ;AACb,eAAK,KAAK,6BAA6B;;;MAG/C;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,iBAAiB,KAAK;UACtB,aAAa,KAAK;UAClB,aAAa,KAAK;UAClB,UAAU,KAAK;UACf,QAAQ,KAAK;;AAEjB,YAAI,KAAK,gBAAgB;AACrB,kBAAO,kBAAkB,KAAK;;AAElC,2BAAmB,KAAK,KAAK;AAC7B,YAAI,KAAK,mBAAmB,IAAI;AAC5B,kBAAO,UAAU;YACb,WAAa,KAAK,KAAK;YACvB,QAAU;;;AAIlB,eAAO,OAAO,OAAO,IAAI,YAAY,YAAY;;aAG9C,yCAAwC;AAC3C,2BAAmB,QAAO;AAC1B,qBAAa,YAAY,YAAY;AACrC,eAAO,IAAI,IAAI,OAAO,OAAO,SAAQ,CAAE;;;AAI/C,QAAI,YAAY;AAChB,kBAA4B;0BASC;;gCAEM;MAC/B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,cAAc,OAAO,KAAK,qBAAqB,KAAK;AACzF,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAEhC,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,WAAW,WAAW,SAAS,IAAI,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACzJ,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC3J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG9G,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAQjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8CAA8C,OAAO;;AAE9E,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;;;AAGR;AACA,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB,cAAI,UAAU;AACV,gBAAI,MAAM,KAAQ,QAAQ,SAAS,KAAK,OAAO;;AAG/C,gBAAI,MAAM,QAAQ,KAAK,OAAO;;AAElC,cAAI,KAAK,QAAQ;AACb,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,cAAI,aAAa;AACb,yBAAa,KAAQ,YAAY;;AAErC,uBAAa,MAAQ,GAAG,MAAM,YAAY,KAAK,gBAAgB;AAC/D,cAAI,KAAK,cAAc;AACnB,qBAAS,KAAK,WAAW,MAAM;;AAGnC,iBAAO,CAAC,QAAQ;;;MAGxB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;;AAE3B,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,kBAAc,YAAY;AAC1B,kBAA4B;4BACG;MAC3B;AACI,aAAK,OAAO,IAAI,cAAc;AAC9B,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,eAAO,IAAI,IAAI;;;AAIvB,cAAU,YAAY;AACtB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,KAAK;AACtB,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAGlH,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,uDACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAe,OAAO;AACtB,mBAAS,OAAO;AAIhB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AACvB;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,wBAAc,MAAM,QAAQ,KAAK,OAAO;AACxC,cAAI,KAAK;AACL,sBAAU,QAAU,SAAS,KAAK,KAAK;;AAE3C,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,uCAA6B,KAAK,gBAAgB;AAClD,6BAAmB,OAAU,sBAAsB,CAAC,IAAI,KAAK,OAAO,KAAK,QAAQ,qBAAqB,OAAO;AAC7G,8BAAoB,MAAM,UAAU;AACpC,+BAAqB,OAAU,SAAS,GAAG,QAAQ,OAAO;AAC1D,2CAAiC,OAAU,aAAa,GAAG,YAAY,OAAO;AAC9E,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,cAAI,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AAC/C,6BAAmB,MAAM,KAAQ,GAAG,WAAW;AAC/C,eAAK,KAAK,WAAW,MAAM,MAAQ,IAAI;AACvC,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,MAAQ,GAAG,IAAQ,KAAK;AAExE,iBAAO,CAAC,GAAG;;;MAGnB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;UACrB,YAAY;;AAEhB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,YAAQ,YAAY;AACpB,kBAA4B;sBACH;MACrB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,QAAQ;AACxB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,QAAI,YAAY;AAChB,kBAA4B;2BACE;MAC1B;AACI,cAAM;AACN,aAAK,qBAAqB;AAC1B,aAAK,+BAA+B;AACpC,aAAK,6BAA6B;AAClC,aAAK,gCAAgC;AACrC,aAAK,2BAA2B;AAChC,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK,eAAe,SAAY,KAAK,qBACjE,KAAK;AACT,aAAK,sBAAsB,cAAc,KAAK,wBAAwB,SAClE,KAAK,+BACL,KAAK;AACT,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,uBAAuB,eAAe,KAAK,wBAAwB,KAAK;AAC7E,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,iBAAiB,KAAK;AAC3B,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,uBAAuB,eAAe,KAAK;AAChD,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,sBAAsB,cAAc,KAAK;AAC9C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,UAAU,MAAe,CAAC,GAAG,MAAe,CAAC,GAAG,KAAK,WAAW,OAAO,IAAI,KAAK;AACrF,aAAK,mBAAmB,MAAe;UACnC;UACA,MAAe,CAAC,GAAG,KAAK,oBAAoB,OAAO,IAAI,KAAK;;AAEhE,aAAK,iBAAiB,KAAK;AAC3B,aAAK,YAAY,CAAC,KAAK,OAAO,KAAK;AACnC,aAAK,cAAc;AACnB,aAAK,uBAAuB;;MAEhC;AACI;AACA,qBAAa,mBAAmB;AAChC,yBAAiB,WAAW,WAAW,SAAS;AAChD,aAAK,SAAS,KAAK,UAAU,UAAU,CAAC,UAAU,KAAK,QAAQ,IAAI,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,CAAC,KAAK,OAAO,KAAK,QAAQ,IAAI,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AAC/J;AACA,YAAI,KAAK;AACL,cAAI,KAAK;AACL,qCAAyB,KAAK;AAC9B,kCAAsB,KAAK;AAC3B,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AAEI,2BAAW,iBAAiB,MAAM,CAAC;AACnC,2BAAY,IAAI,OAAQ,MAAM,CAAC;AAC/B,+BAAe,iBAAiB,MAAM,CAAC,gBAAgB;AACvD,uBAAO,qBAAuB,qBAAuB,IAAI,KAAK;;eAItE,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,IAAI,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAG7G,eAAK,OAAO;;AAIhB,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,mBAAS;AACT,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,wDACd,OAAO;;AAElB,yBAAe,OAAO;AACtB,2BAAiB,OAAO;AACxB,mBAAS,OAAO;AAChB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,yBAAe,KAAK;AACpB,4BAAkB,KAAK;AAIvB;AACA;AACA;AACA;AACA,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU;AACnC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,kBAAQ,MAAM,QAAQ,KAAK,OAAO;AAClC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB;AACrD,uBAAW,KAAQ,UAAU,UAAU;;AAE3C,cAAI,MAAQ,GAAG,MAAM,UAAU,KAAK,gBAAgB;AACpD,cAAI,KAAK;AACL,gBAAI,QAAU,GAAG,KAAK,KAAK;;AAE/B,mCAAyB,OAAU,GAAG,GAAG,EAAE,OAAO;AAClD,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,KAAK,oBAAoB,MAAM;AACnC,cAAI,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM;AACnE,cAAI,KAAK,oBAAoB,MAAM;AACnC,oBAAU,KAAQ,GAAG,KAAK,WAAW,MAAM;AAE3C,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,2BAAmB,MAAM;AACzB,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,qBAAqB,oBAAoB,KAAK;UAC9C,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,gBAAgB,KAAK;UACrB,mBAAmB,qBAAqB,KAAK;UAC7C,sBAAsB,qBAAqB,KAAK;UAChD,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,qBAAqB,oBAAoB,KAAK;UAC9C,gBAAgB,oBAAoB,KAAK;UACzC,SAAS,KAAK;UACd,kBAAkB,KAAK;UACvB,gBAAgB,KAAK;;AAEzB,eAAO,OAAO,OAAO,IAAI,YAAY;;;AAI7C,aAAS,YAAY;AACrB,kBAA4B;uBACF;MACtB;AACI,YAAI,KAAK,mBAAmB;AACxB,kBAAQ,KAAK;;AAGjB,aAAK,OAAO,IAAI,SAAS;AACzB,cAAM;;MAGV;AACI,eAAO,KAAK;AACR,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;aAI7C;AACH,YAAI,QAAO,qBAAqB;AAC5B,kBAAO,oBAAoB;;AAE/B,eAAO,IAAI,IAAI;;;AAIvB,SAAK,YAAY;AACjB,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,QAAQ,KAAK;;UAElB;AAKA,0BAAkB;AAClB,2BAAmB,KAAK,MAAM,QAAQ;AAClC,cAAI,MAAM,QAAQ,KAAK;AACnB,sBAAU,KAAK,GAAG,KAAK;;AAGvB,sBAAU,KAAK,KAAK;;;AAG5B,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,mBAAS;AACT,uBAAa,OAAO,MAAM;AAE1B,+BAAqB;AACrB,6BAAmB,KAAK,MAAM,QAAQ;AAClC,gBAAI,MAAM,QAAQ,KAAK;AACnB,2BAAa,KAAK,OAAO,OAAO,GAAG,KAAK,UAAU;;AAGlD,2BAAa,KAAK,OAAO,OAAO,GAAG;;;AAG3C,uBAAa;AAEb,kCAAwB;AACxB;AACA,uBAAa,GAAG,IAAI,KAAK,MAAM,QAAQ,EAAE;AACrC,yBAAa,KAAK,MAAM;AACxB,qBAAS,aAAa;AAEtB,gBAAI,MAAM;AACN,2BAAa,CAAC,OAAO,IAAI,OAAO;;AAGhC,2BAAa,CAAC,WAAW,IAAI,OAAO;;AAExC,yBAAa,KAAK,KAAK,YAAY;AACnC,4BAAgB,KAAK,WAAW,MAAM;;AAG1C,mBAAS;AACT,mCAAyB,gBAAgB,QAAQ;AAC7C,mBAAO,KAAK,GAAG;;AAEnB,iBAAO,CAAC,WAAW,IAAI,OAAO;;;MAGtC;AACI,YAAI,gBAAgB;AAGhB,uBAAa,WAAW;;AAE5B,qBAAa;AACb;AACA,aAAK,MAAM,QAAQ;AACf,oBAAU,WAAW,KAAK;AAEtB,iBAAK,MAAM;AACX,gBAAI,MAAM,QAAQ,KAAK;AACnB,0BAAY,KAAK,UAAU;;AAG3B,0BAAY,KAAK;;AAErB,yBAAa,CAAC,WAAW,IAAI;;;AAGrC,aAAK,QAAQ;;MAEjB;AACI,2BAAmB,MAAM;AACzB,8BAAsB;AAClB,iBAAO;YACH,WAAa,KAAK;YAClB,QAAU,KAAK;;;AAGvB,4BAAoB,KAAK,MAAM,IAAI;AACnC,wBAAe,CAAE,OAAS;AAC1B,eAAO,OAAO,OAAO,IAAI,YAAY;;aAGlC,yCAAwC;AAC3C,sBAAc;AACd,iCAAyB,QAAO;AAC5B,gBAAM,KAAK,YAAY,YAAY;;AAEvC,eAAO,IAAI,IAAI,CAAE;;UAEjB;AACA,YAAI,CAAC,KAAK;AACN,iBAAO;;AAEX,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO;;UAEP;AACA,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,YAAI,CAAC,KAAK;AACN,mCAAyB;AACzB,6BAAmB,KAAK;AACpB,6BAAiB,KAAK,GAAG,KAAK;;AAElC,iBAAO,iBAAiB,OAAO;;AAEnC,eAAO;;MAOX;AACI,wBAAgB;AAChB,2BAAmB,KAAK;AACpB,kBAAQ,KAAK,GAAG,KAAK;;AAEzB,eAAO,cAAc;;MAQzB;AACI,uBAAe;AACf,2BAAmB,KAAK;AACpB,4BAAkB,KAAK,QAAQ;AAC/B,+BAAqB,QAAQ,OAAO;AACpC,uBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ,EAAE;AACvC,mBAAO,KAAK,CAAC,KAAK,QAAQ,IAAI,aAAa;;;AAGnD,sBAAc;;;AAItB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,aAAQ,aAAM,MAAM,WAAW,OAAO,gBAAQ,KAAM;AACpD,4BAAsB,MAAM,UAAU,SAAQ;AAC9C,yBAAmB,MAAM,aAAe,eAAe,OAAM;AAE7D,UAAI,CAAC,UAAS,UAAS;AACnB,eAAO,KAAS,aAAa;;AAEjC,oBAAc,MAAM,QAAO,KAAK,QAAW,IAAI;AAC/C,aAAO,MAAM,IAAI,OAAK,KAAS,EAAE;;ACtzCrC;;;;;;;;;AASA,iBAAsC;AAClC,cAAQ;AACR,qBAAc;AAAG,YAAI,OAAO,UAAU,eAAe,KAAK,GAAG,OAAM,EAAE,QAAQ,MAAK;AAC9E,YAAE,MAAK,EAAE;AACb,UAAI,KAAK,QAAQ,OAAO,OAAO,0BAA0B;AACrD,qBAAa,QAAO,OAAO,sBAAsB,IAAI,IAAI,GAAE,QAAQ;AAC/D,cAAI,EAAE,QAAQ,GAAE,MAAM,KAAK,OAAO,UAAU,qBAAqB,KAAK,GAAG,GAAE;AACvE,cAAE,GAAE,MAAM,EAAE,GAAE;;AAE1B,aAAO;;gCAaiB;;4BAKJ;MACpB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,YAAI,MAAM,QAAQ,KAAK;AACnB,gBAAM,IAAI,oBAAoB;;AAElC,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,KAAS;AACZ,cAAI,KAAK,KAAK,eAAe;AACzB,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,cAAc;;AAE5B,cAAI,KAAK,KAAK,wBAAwB;AAClC,oBAAY,KAAK,KAAK;AACtB,iBAAK,KAAK,uBAAuB;;AAErC,cAAI,UAAU,OAAO;AACjB,kBAAM,IAAI,WAAW;;AAEzB,uBAAa,UAAU,OAAO,OAAO,OAAO;AAC5C,2BAAiB,UAAU,OAAO,OAAO,OAAO;AAChD,+BAAqB,UAAU,OAAO,OAAO,OAAO;AACpD,iBAAO,MAAM,KAAK,QAAQ,CAAE,MAAM,UAAU;;;MAGpD;AACI,uBAAe,KAAK,yBAAyB;AAC7C,YAAI,CAAC,KAAK;AACN,qBAAW,CAAC,SAAS,IAAI,GAAG,SAAS,MAAM;;AAE/C,YAAI,KAAK;AACL,qBACI,CAAC,UAAU,GAAG,MAAM,GAAG,KAAK,CAAC,WAAW,IAAI,GAAG,SAAS,MAAM;;AAEtE,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,iBAAQ,aAAc,KAAK;AAC3B,6BAAmB,OAAO;AAC1B,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,+BAAqB,OAAU;AAC/B,cAAI,MAAM,QAAQ;AACd,mBAAO,MAAM,UAAU,QAAQ,KAAK;;AAExC,iBAAO,CAAC;;;MAGhB,+BAA+B;AAC3B,aAAS;AACL,cAAI,CAAC,KAAK;AACN,kBAAM,IAAI,eAAe;;AAE7B,6BAAmB,KAAK,UAAU,GAAG;AACrC,8BAAoB,KAAK,yBAAyB;AAClD,6BAAmB,CAAC,YAAY,IAAI,GAAG,YAAY,MAAM;AACzD,4BAAkB,WAAW;AAC7B,cAAI,aAAa;AACb,kBAAM,IAAI,WAAW;;AAQzB,cAAI,KAAK,eAAe;AACpB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,UAAU,CAAC,OAAU;;qBAGzB,UAAU;AAEf,oBAAY,KAAK;AAEjB,gBAAI,KAAK,cAAc;AACnB,sBAAY,KAAK;AACjB,mBAAK,aAAa;;AAEtB,gBAAI,MAAM,QAAQ,KAAK,KAAK;AACxB,mBAAK,UAAU,KAAK,KAAK,UAAU,IAAI,MAAM,OAAU;;AAGvD,mBAAK,QAAQ,KAAK,OAAU;;;AAIhC,gBAAI,CAAC,MAAM,QAAQ;AACf,uBAAS,CAAC;;AAEd,gBAAI,OAAO,WAAW,KAAK,QAAQ;AAC/B,oBAAM,IAAI,WAAW,SAAS,KAAK,gBAAgB,KAAK,QAAQ,oCACzC,OAAO,0CACb;;AAErB,gBAAI;AAKA,mBAAK,WAAW,KAAK,KAAK,QAAQ;;AAGlC,sBAAY,KAAK;;AAErB,8BAAiB,GAAG,SAAQ,KAAK,QAAQ,QAAQ,EAAE;AAC/C,4BAAc,OAAO;AACrB,oCAAsB;AACtB,kBAAI,CAAC,aAAiB,MAAM,OAAO;AAC/B,sBAAM,IAAI,WAAW,SAAS,qCAAoC,KAAK,wBACjD,iCAAiC,MAAM;;AAEjE,mBAAK,QAAQ,UAAS;;;AAG9B,eAAK,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAS,MAAM;;;MAGhE;AACI,eAAQ,YAAY,SAAS,YAAY,SAAS,SAAS,gBAAiB,KAAK;AACjF,gCAAwB,eAAe;AACvC,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,kBAAU,WAAW,kBAAkB,IAAI;AAC3C,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,qBAAa,iBAAiB,GAAG,WAAW,IAAI,SAAS,QAAQ,IAAI,aAAa;AAClF,yBAAiB;UACb,GAAG,WAAW,MAAM,GAAG;UACvB,GAAI,kBAAkB,CAAC,SAAS,MAAM,QAAQ,CAAC,MAAM,MAAM;;AAE/D,eAAO;;;AAIf,cAAU,YAAY;iCACc;MAChC;AACI,eAAQ,SAAS,YAAY,SAAS,SAAS,YAAY,gBAAkB;AAC7E,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE,OAAO;AACvC,aAAK,UAAU;AACf,8BAAsB,KAAK,SAAS;AACpC,aAAK,aAAa,eAAe,YAAY,GAAG;AAChD,aAAK,WAAW,QAAQ,UAAQ,sBAAsB,MAAM;AAC5D,aAAK,UAAU,eAAe,WAAW,GAAG,GAAG;AAC/C,aAAK,QAAQ,QAAQ,YAAU,sBAAsB,QAAQ;AAC7D,aAAK,UAAU,WAAW;AAC1B,yBAAiB,KAAK;AACtB,aAAK,aAAa,cAAc;AAChC,wBAAgB,KAAK;AACrB,aAAK,eAAe,eAAe,gBAAgB,GAAG,GAAG;AACzD,aAAK,aAAa,QAAQ,UAAQ,sBAAsB,MAAM;;MAElE;AACI;AACA,qBAAa,mBAAmB;AAChC,4BAAoB,KAAK,eAAe,kBAAkB,IAAI,WAAW,SAAS;AAClF,YAAI,WAAW,gBAAgB;AAC3B,gBAAM,IAAI,WAAW,+DACR,WAAW;;AAE5B,yBAAiB,WAAW;AAC5B,6BAAqB;AACrB,4BAAoB,KAAK,WAAW,OAAO,CAAC,UAAU,KAAK,UAAU;AACrE,aAAK,SAAS,KAAK,UAAU,UAAU,aAAa,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACrH,qCAA6B,KAAK,WAAW,OAAO,CAAC,KAAK,SAAS,KAAK,UAAU;AAClF,aAAK,kBAAkB,KAAK,UAAU,oBAAoB,sBAAsB,MAAM,KAAK,sBAAsB,KAAK,sBAAsB,MAAM,KAAK;AACvJ,YAAI,KAAK;AACL;AACA,cAAI,KAAK;AACL,0BAAa,KAAK;AAClB,4BAAgB,KAAK;AACrB,8BAAkB,IAAK,MAAK,yBAAyB;cAC7C;AACI,8BAAc,MAAK,MAAM,CAAC;AAC1B,8BAAc,OAAS,CAAC;AACxB,kCAAkB,MAAK,MAAM,CAAC,UAAU;AACxC,uBAAO,YAAc,CAAC,OAAO,OAAO;;eAI5C,GAAG,YAAY,cACf;;AAGJ,8BAAkB,KAAK;;AAE3B,eAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,UAAU,eAAe,MAAM,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE9H,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAS;AACZ,cAAI,OAAO,WAAW;AAClB,kBAAM,IAAI,WAAW,8DACd,OAAO;;AAElB,2BAAiB,OAAO,eAAe;AACvC,oBAAU,OAAO;AACjB,2BAAiB,OAAO;AACxB,2BAAiB,OAAO;AACxB,+BAAqB;AACrB,cAAI,IAAI,KAAK,WAAW,KAAK,UAAU,KAAK,KAAK,eAAe;AAC5D,iBAAK,cAAc,oBAAoB;cACnC,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,8BAAoB,KAAK;AACzB,+BAAqB;AACjB,gBAAI,CAAC,QAAQ,CAAC,KAAK;AACf,qBAAO;;AAEX,mBAAO,KAAQ,KAAK,SAAQ;;AAEhC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,mBAAS,aAAa,GAAG,aAAa;AACtC,cAAI,IAAI,KAAK,oBAAoB,KAAK,mBAAmB,KACrD,KAAK,wBAAwB;AAC7B,iBAAK,uBAAuB,oBAAoB;cAC5C,MAAM,MAAM,UAAa;cACzB,MAAM,KAAK;cACX;cACA,OAAO;;;AAGf,iCAAuB,KAAK;AAC5B,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,mBAAS,aAAa,UAAU,gBAAgB;AAChD,oCAA0B;AAC1B,uDAA6C,OAAU,KAAK,OAAO,QAAQ,cAAc;AACzF,+CAAqC,KAAK,UACtC,OAAU,KAAK,KAAK,QAAQ,gBAC5B,CAAC,MAAM,MAAM,MAAM;AACvB,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,eAAK,KAAK,UAAU,IAAI,SAAS,OAAO,KAAK;AAC7C,mEAAyD,OAAU,KAAK,gBAAgB,QAAQ,cAAc;AAC9G,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,eAAK,KAAK,cAAc,IAAI;AAC5B,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,KAAK,oBAAoB,MAAM,MAAQ,IAAI;AACrD,oBAAU,MAAQ,KAAQ,GAAG,WAAW,KAAQ,GAAG,KAAK,WAAW,MAAM,MAAQ,IAAI;AACrF,oBAAU,KAAQ,KAAK,oBAAoB,MAAM,MAAQ,IAAI,MAAM,KAAK,WAAW,MAAM;AACzF,iBAAO,CAAC,GAAG,GAAG;;;MAGtB;AACI,mBAAW,MAAM,cAAe,YAAe,iBAAiB,OAAO,IAAI,CAAC;AAC5E,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,SAAS,KAAK;UACd,YAAY,KAAK;UACjB,cAAc,KAAK;UACnB,SAAS,KAAK;;AAElB,eAAO,OAAO,OAAO,IAAI,YAAY;;MAEzC;AACI,oBAAY,QAAW,GAAG,GAAG,KAAK,SAAU,WAAW,SAAU,KAAK,eAAe,kBAAkB,SAAS,QAAQ,KAAK;AAC7H,YAAI;AACA,iBAAO,QAAU,KAAK,GAAG,KAAK;;AAElC,eAAO;;MAEX;AACI,wBAAgB;AAChB,eAAO,QAAW,GAAG,GAAG,SAAS,QAAQ,KAAK,eAAe,kBAAkB,SAAS;;;AAIhG,mBAAe,YAAY;AAC3B,kBAAgC;6BACA;MAC5B;AACI,qBAAa,IAAI,eAAe;AAChC,cAAM,OAAO,OAAO,IAAI,MAAM,CAAE;;aAG7B;AACH,eAAO,IAAI,IAAI;;;AAIvB,eAAW,YAAY;AACvB,kBAAgC;AClVhC;;;;;;;;;0BAuB6B;MACzB;AACI,cAAM;AACN,aAAK,OAAO,KAAK,IAAI,KAAK,IAAI,KAAK,MAAM,IAAI;AAE7C,aAAK,aAAa,KAAK;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,kBAAkB;;MAE3B;AACI,YAAI,KAAK,cAAc;AACnB,iBAAO,KAAK;;AAEhB,2BAAmB,OAAM;AACzB,2BAAmB;AACnB,qBAAa,GAAG,IAAI,KAAK,WAAW,QAAQ,EAAE;AAC1C,qBAAW,KAAK,KAAK,WAAW,MAAM,OAAO,WAAW,KAAK,KAAK,WAAW;;AAEjF,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,IAAI,KAAK,QAAQ,KAAK,OAAO;AAC7B,6BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,+BAAmB,KAAK,cAAc;AACtC,2BAAe,aAAe,MAAM,UAAU,QAAO,KAAK,MAAM,YAAY,KAAK,OAAO,MAAM,QAAO;AACrG,mBAAO;;AAEX,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,YAAY,KAAK;UACjB,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,MAAM;;;AAIrB,YAAQ,YAAY;AACpB,kBAA4B;mCACU;MAClC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,2BAAmB,OAAM;AACzB,eAAO,CAAC,WAAW,IAAI,GAAG,WAAW;;;AAI7C,qBAAiB,YAAY;AAC7B,kBAA4B;wBACD;MACvB;AACI,cAAM;AAEN,aAAK,aAAa;AAClB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,OAAO;AACZ,aAAK,6BAA6B;AAClC,aAAK,2BAA2B;AAChC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc,QACnD,KAAK,YAAY;AAGjB,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,eAAK,kBAAkB,CAAC,WAAW,KAAK;;AAE5C,aAAK,QAAQ,KAAK;AAClB,8BAAsB,KAAK,OAAO;AAClC,aAAK,aAAa,cAAc,KAAK;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAExB,aAAK,oBAAoB,eAAe,KAAK,qBAAqB,KAAK;AACvE,aAAK,kBACD,eAAe,KAAK,mBAAmB,KAAK;AAChD,aAAK,mBAAmB,cAAc,KAAK;AAC3C,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,oBAAoB,eAAe,KAAK;AAC7C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,kBAAkB;AACvB,aAAK,YAAY,CAAC,CAAE,SAAS;;MAEjC;AACI,qBAAa,mBAAmB;AAChC,6BAAqB,WAAW,WAAW,SAAS;AACpD,YAAI,KAAK,UAAU;AACf,eAAK,SAAS,KAAK,UAAU,UAAU,CAAC,cAAc,KAAK,QAAQ,MAAM,KAAK,mBAAmB,KAAK,mBAAmB,MAAM,KAAK;AACpI,cAAI,KAAK;AACL,iBAAK,OAAO,KAAK,UAAU,QAAQ,CAAC,KAAK,QAAQ,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;;AAGtH,aAAK,YAAY,CAAC,CAAE,SAAS,GAAG,MAAM,EAAG,KAAK;AAC9C,aAAK,QAAQ;;MAEjB;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,oBAAY,YAAY,SAAS,KAAK,KAAK;AAC3C,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,yBAAc,oBAAoB;AAClC,sCAA4B,2BAA2B,KAAK,WAAW;AACvE;AACA,cAAI,uBAAuB;AACvB,qBAAS,MAAM,QAAO,KAAK,OAAO,QAAQ,qBAAqB,KAAK,OAAO,KAAK,KAAK,SAAS;;AAG9F,qBAAS,MAAM,QAAO,KAAK,OAAO;AAClC,gBAAI,KAAK,QAAQ;AACb,uBAAS,QAAU,QAAQ,KAAK,KAAK;;AAEzC,gBAAI,KAAK,cAAc;AACnB,uBAAS,KAAK,WAAW,MAAM;;;AAGvC,iBAAO;;;MAGf;AACI,wBAAe;UACX,OAAO,KAAK;UACZ,YAAY,oBAAoB,KAAK;UACrC,SAAS,KAAK;UACd,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,mBAAmB,qBAAqB,KAAK;UAC7C,iBAAiB,qBAAqB,KAAK;UAC3C,qBAAqB,qBAAqB,KAAK;UAC/C,kBAAkB,oBAAoB,KAAK;UAC3C,gBAAgB,oBAAoB,KAAK;;AAE7C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,UAAM,YAAY;AAClB,kBAA4B;0BACC;MACzB;AACI,eAAO,QAAQ;AACf,cAAM;AACN,aAAK,YAAY,CAAC,CAAE,SAAS;AAC7B,aAAK,aAAa,KAAK;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,0BAAkB,WAAW,MAAM;AAC/B,cAAI,OAAO;AACP,kBAAM,IAAI,WAAW,iEACT,WAAW,MAAM;;;AAKrC,eAAO,CAAC,WAAW,IAAI,UAAU,YAAY;;MAEjD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,uBAAY,oBAAoB;AAChC,cAAI,KAAK,eAAe,mBAAmB,OAAM,OAAO;AACpD,gCAAoB,CAAC;AACrB,yBAAa,GAAG,IAAI,OAAM,MAAM,EAAE;AAC9B,0BAAY,KAAK;;AAErB,wBAAY,KAAK;AACjB,qBAAQ,OAAM,UAAU;;AAE5B,iBAAO,aAAe;;;MAG9B;AACI,wBAAe;AACf,YAAI,KAAK,cAAc;AACnB,kBAAO,gBAAgB,KAAK;;AAEhC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;+BACI;MAC5B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,aAAa,cAAc,KAAK;;MAEzC;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,iBAAO,KAAK,WAAW,MAAM;;;MAGrC;AACI,wBAAe,CAAE,YAAY,oBAAoB,KAAK;AACtD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAW,YAAY;AACvB,kBAA4B;+BACM;MAC9B;AACI,cAAM;AACN,aAAK,IAAI,KAAK;AACd,aAAK,YAAY,CAAC,CAAE,MAAM;;MAE9B;AACI,eAAO,CAAC,WAAW,IAAI,KAAK,GAAG,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,mBAAS,oBAAoB;AAC7B,iBAAO,OAAS,QAAQ,KAAK;;;MAGrC;AACI,wBAAe;UACX,GAAG,KAAK;;AAEZ,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,iBAAa,YAAY;AACzB,kBAA4B;4BACC;MACzB;AACI,cAAM;AACN,aAAK,cAAc,KAAK;AAExB,qBAAa,GAAG,IAAI,KAAK,YAAY,QAAQ,EAAE;AAC3C,cAAI,KAAK,UAAU,KAAK,YAAY;AAChC,iBAAK,YAAY,KAAK;;;;MAIlC;AACI,eAAO,MAAM,KAAK,OAAO;;MAgB7B;AACI,yBAAiB;AACjB,2BAAmB,YAAY;AAC/B,oBAAY;AACZ,sBAAc;AACd,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,sBAAY,WAAW;AACvB,cAAI,KAAK,UAAU;AACf,gBAAI,YAAY;AACZ,wBAAU;;AAGV,oBAAM,IAAI,WAAW;;;AAIzB,qBAAS;;;AAGjB,6BAAqB,UAAU;AAC/B,YAAI,YAAY;AACZ,cAAI,UAAU,KAAK,eAAe,UAAU;AACxC,kBAAM,IAAI,WAAW;;AAEzB,qBAAW,WAAW,eAAe;mBAEhC,iBAAiB;AACtB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;MAEX;AACI,6BAAqB;AACrB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,cAAI,KAAK,UAAU,WAAW;AAC1B,6BAAiB;AACjB;;;AAGR,YAAI;AACA,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK;;AAG1C,iBAAO,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;;;MAGhG;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,8BAAoB,WAAW,MAAM,GAAG,GAAG,OAAO,KAAK,oBAAoB,WAAW,MAAM,IAAI,KAAK;AACrG,iBAAO,OAAM,QAAQ;;;MAG7B;AACI,wBAAe;UACX,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM;AACN,YAAI,KAAK,QAAQ;AACb,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,gBAAM,IAAI,MAAM,sEACT,KAAK;;AAGhB,sCAA8B,QAAM,GAAG,KAAK,KAAK,SAAS;AAC1D,YAAI,CAAC,aAAiB,KAAK,KAAK,QAAQ,QAAQ;AAC5C,gBAAM,IAAI,MAAM,iCAAiC,KAAK,UAAU,KAAK,QACjE;;AAER,aAAK,OAAO,KAAK;AACjB,aAAK,qBAAqB,CAAC,GAAG,OAAO,KAAK;AAC1C,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM,KAAK,KAAK,SAAS;;MAE/D;AACI,qBAAa,mBAAmB;AAChC,4BAAoB,WAAW;AAC/B,aAAK,KAAK,QAAQ;AACd,sBAAY,IAAI,KAAK,WAAW;;AAEpC,eAAO;;MAEX;AACI,eAAO,WAAU,oBAAoB,SAAS,KAAK;;MAEvD;AACI,wBAAe;UACX,MAAM,KAAK;;AAEf,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,YAAQ,YAAY;AACpB,kBAA4B;0BACC;MACzB;AACI,cAAM,QAAQ,OAAO,KAAK;AAC1B,aAAK,kBAAkB;AACvB,YAAI,QAAQ;AACR,eAAK,YAAY,KAAK,aAAa,OAAO,IAAI,KAAK;;AAGnD,eAAK,YAAY;;;MAGzB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,WAAW,KAAK;AACjC,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,uBAAc,oBAAoB;AAClC,qBAAa;AACb,eAAO,IAAI,SAAS,QAAO,KAAK,YAAY;;MAEhD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,uBAAa;AACb,2BAAiB;AACjB,8BAAoB,IAAI,SAAS,QAAO,KAAK,YAAY,MAAM;AAC/D,yBAAe,OAAM,IAAI,YAAY,OAAO,OAAM;AAClD,iBAAO;;;;AAKnB,YAAQ,YAAY;AACpB,kBAA4B;AC3c5B;;;;;;;;;4BAuB+B;MAC3B;AACI,cAAM;AACN,aAAK,aAAa;AAClB,aAAK,iCAAiC;AACtC,YAAI,KAAK,mBAAmB,QAAQ,KAAK,cAAc;AAKnD,0BAAgB;AAChB,cAAI,KAAK,aAAa;AAClB,wBAAY,KAAK;;AAErB,cAAI,KAAK,eAAe;AAGpB,iBAAK,kBAAkB,CAAC,WAAW;;AAKnC,iBAAK,kBACD,CAAC,WAAW,OAAO,OAAqB,KAAK;;;AAGzD,aAAK,WAAW,KAAK;AACrB,8BAAoC,KAAK,UAAU;AACnD,aAAK,YAAY,KAAK;AACtB,8BAAoC,KAAK,WAAW;AACpD,aAAK,wBAAwB,eAAe,KAAK,yBAAyB,KAAK;AAC/E,aAAK,wBAAwB,eAAe,KAAK;AACjD,aAAK,sBAAsB,eAAe,KAAK;AAC/C,aAAK,uBAAuB,cAAc,KAAK;AAC/C,aAAK,WAAW,KAAK;AACrB,aAAK,kBAAkB,KAAK;AAC5B,aAAK,cAAc,KAAK;;MAE5B;AACI,aAAK,aAAa,KAAK,UAAU,cAAc,CAAC,KAAK,UAAU,KAAK,YAAY,KAAK,OAAO,KAAK,uBAAuB,KAAK,uBAAuB,MAAM,KAAK;AAC/J,aAAK,QAAQ;;MAIjB;;MACA;AACI,eAAO,KAAK;AACR,cAAI,CAAC,KAAK;AACN,mBAAO;;AAGP,qBAAS,oBAAoB;AAC7B,mBAAO,SAAS,QAAQ,WAAU;;;;MAI9C;AACI,qBAAa,mBAAmB;AAChC,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,GAAG,YAAY,KAAK;;AAGhC,uBAAe,OAAqB,KAAK;AACzC,YAAI,OAAO,WAAW,WAAW,SAAS;AACtC,gBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;;AAG7B,kBAAQ;AACR,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,uBAAW,OAAO;AAClB,uBAAW,WAAW,IAAI;AAC1B,gBAAK,MAAM,QAAU,MAAM,QAAU,OAAO;AACxC,oBAAM,IAAI,WAAW,oBAAoB,KAAK,mDACjB;uBAExB,MAAM;AACX,qBAAO,KAAK;;AAEhB;;;AAGR,eAAO,CAAC,WAAW,IAAI,GAAG,QAAQ,KAAK;;MAE3C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,uBAAY,oBAAoB;AAChC,cAAI,OAAM,UAAU;AAChB,qBAAQ,OAAO,QAAO;;AAE1B,yBAAe,SAAS,KAAK,WAAW,QAAQ,OAAM;AACtD,iBAAO,OAAO,QAAQ,mBAAmB,KAAK,mBAAmB,OAAM;;;MAG/E;AACI,wBAAe;UACX,UAAU,KAAK;UACf,WAAW,KAAK;UAChB,uBAAuB,qBAAqB,KAAK;UACjD,uBAAuB,qBAAqB,KAAK;UACjD,qBAAqB,qBAAqB,KAAK;UAC/C,sBAAsB,oBAAoB,KAAK;UAC/C,UAAU,KAAK;UACf,aAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,cAAU,YAAY;AACtB,kBAA4B;ACzI5B;;;;;;;;;wBA0B2B;MACvB;AACI,cAAM,QAAQ;AACd,aAAK,kBAAkB;;MAM3B;AACI,cAAM,IAAI;;MAYd;AACI,YAAI,UAAU,QAAQ,UAAU;AAC5B,iBAAO;mBAEF,OAAO,SAAS,OAAO;AAC5B,iBAAO,KAAK,gCAAgC,QAAQ;mBAE/C,OAAO,WAAW;AACvB,iBAAO;;AAEX,4BAAoB,OAAO,MAAM,GAAG,OAAO,SAAS,OAAO;AAC3D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO,OAAO,SAAS,OAAO,SAAS;AACjD,oBAAU,OAAO;AACjB,cAAI,KAAK,QAAQ,KAAK,QAAQ,IAAI,KAAK,IAAI;AACvC,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;qBAEZ,MAAM;AACX,wBAAY,KAAK;;AAGjB,gBAAI,MAAM;AACN,oBAAM,IAAI,WAAW,0DACjB,KAAK,UAAU,UAAU,MAAM,KAAK,UAAU;;AAEtD,wBAAY,KAAK;;;AAGzB,eAAO;;MAEX;AAEI,YAAI,MAAM,QAAQ,eAAe,CAAC,MAAM,QAAQ,WAAW;AAEvD,uBAAa,CAAC,mBAAmB;;AAErC,qBAAa;AACb,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,wEACT,WAAW;;AAI3B,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,8EACW,KAAK,UAAU;;AAEnD,0BAAkB,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACrE,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAIpE,yBAAiB,WAAW,IAAI,WAAS,MAAM;AAC/C,YAAI,WAAW,QAAQ,UAAU,MAC7B,SAAqB,UAAU,WAAW;AAC1C,eAAK,kBAAkB;;AAGvB,eAAK,kBAAkB;;;MAG/B;AACI,eAAO,KAAK;AACR,mBAAS;AACT,cAAI,KAAK;AACL,mCAAuB;AACvB,8BAAkB,OAAO,IAAI,YAAS,OAAM;AAC5C,gBAAI,UAAU,QAAQ,UAAU;AAG5B,8BAAgB,MAAc;AAC9B,4BAAc;AACV,8BAAc,EAAE;AAChB,6BAAa,GAAG,IAAI,UAAU,OAAO,EAAE;AACnC,sBAAI,aAAa,GAAG;;AAExB,+BAAe,KAAK;;AAExB,qBAAO,KAAK,cAAc;;AAK1B,+BAAiB;AACjB,8BAAgB;AACZ,8BAAc,EAAE;AAChB,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,oCAAkB,OAAO;AACzB,mCAAiB,OAAO,MAAM,GAAG,OAAO,CAAC;AACzC,oCAAkB,EAAE,QAAQ,CAAC,WAAW,OAAO,UAAoB,OAAO,MAAM;AAChF,gCAAc,WAAc,aAAa,CAAC,GAAG;AAC7C,gCAAc,YAAY,QAAQ;AAClC,iCAAe,KAAK;AACpB,+BAAa;2BAER,QAAQ;AACb,+BAAa,QAAgB,GAAG,OAAO,OAAO,CAAC;AAC/C,iCAAe,KAAK,WAAc,GAAG;AACrC,+BAAa;;AAIb,iCAAe,KAAK;;;AAG5B,sBAAQ,KAAK,cAAc;AAC3B,4BAAc,EAAE;AAChB,kBAAI;AAGA,oBAAI,SAAS;AACT,iCAAe,EAAE;AACjB,iCAAc,OAAO;AACrB,oCAAkB,OAAO,SAAQ;AACjC,mCAAiB,CAAC,WAAW,OAAO,OAAO,MAAM,GAAG,OAAO,SAAS;AACpE,sBAAI,WAAc,EAAE,QAAQ,CAAC,IAAI,aAAa,CAAC,GAAG,IAC7C,QAAQ;2BAER,QAAQ;AACb,+BAAa,CAAC,QAAQ,GAAG,OAAO,QAAgB,GAAG,QAAQ;AAC3D,sBAAI,WAAc,GAAG;;;AAG7B,qBAAO;;;AAIX,mBAAO,KAAK,cAAc;;;;MAItC;AACI,qBAAa;AACb;AACA,YAAI,WAAW,MAAM;AACjB,wBAAc;;AAGd,wBAAc,WAAW,GAAG,MAAM;;AAEtC,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,wBAAc,WAAW,MAAM,OAAO,OAAO,WAAW,GAAG,MAAM;AACjE,wBAAc,KAAK,gCAAgC,aAAa;;AAEpE,yBAAiB;AACjB,4BAAoB;AAChB,cAAI,SAAS,QAAQ,MAAM,OAAO;AAC9B,uBAAW,KAAK,MAAM;;;AAG9B,qBAAa,SAAqB;AAClC,YAAI,WAAW,WAAW;AACtB,wBAAc,WAAW,OAAO;;AAGhC,wBAAc,CAAC,MAAM,OAAO;;AAEhC,eAAO;;MAEX;AACI,eAAO,KAAS;AACZ,cAAI,QAAQ;AACR,mBAAO;;AAEX,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,CAAC,MAAM,QAAQ;AACf,kBAAM,IAAI,WAAW;;AAEzB,cAAI,KAAK,WAAW,OAAO;AACvB,kBAAM,IAAI,WAAW,mGAEb,OAAO,aAAa,KAAK;;AAErC,cAAI,KAAK,MAAM,OAAK,KAAK;AACrB,mBAAO;;AAEX,iBAAO,KAAK,IAAI,OAAK,KAAK,OAAO,IAAI,WAAe,GAAG;AACvD,uBAAa,KAAK;AAClB,uBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACnC,qBAAS,WAAe,QAAQ,KAAK;;AAEzC,iBAAO;;;;wBAIM;MACrB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,UAAI,YAAY;AAChB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,MAAI;AACtB,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,MAAI;;;6BAGO;MAC1B;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,KAAQ,QAAQ,OAAO;;AAEpC,iBAAO;;;;AAKnB,eAAS,YAAY;AACrB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,WAAS;AAC3B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,WAAS;;;0BAGC;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO,GAAG;AACvB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,MAAQ,QAAQ,OAAO;;AAEpC,iBAAO,KAAQ,IAAI,OAAO,QAAQ;;;;AAK9C,YAAQ,YAAY;AACpB,kBAA4B;AAgDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,QAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,QAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;4BAGE;MACzB;AACI,cAAM;;MAEV;AACI,eAAO,KAAK;AACR,uBAAa,OAAO;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,qBAAS,QAAY,QAAQ,OAAO;;AAExC,iBAAO;;;;AAKnB,cAAQ,YAAY;AACpB,kBAA4B;AA+CrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,UAAQ;AAC1B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,UAAQ;;;8BAGM;MAC7B;AACI,cAAM;AACN,aAAK,eAAe;AACpB,YAAI,QAAQ;AACR,iBAAO;;AAEX,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,eAAe,KAAK;AACzD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AAEI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW,QACxD,WAAW,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,qBAAa;AACb,2BAAmB;AACnB,4BAAoB;AAChB,cAAI,SAAS;AACT,2BAAe;AACf;;;AAGR,YAAI;AACA;;AAEJ,yBAAiB;AACjB,qBAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACrC,yCAA+B,WAAW,GAAG;AAC7C,iCAAuB,OAAO,KAAK,MAAM;AACzC,uBAAa;AACb,8BAAoB;AAChB,gBAAI,aAAiB,OAAO;AACxB,uBAAS;AACT;;;AAGR,cAAI,CAAC;AACD,qBAAS,KAAK;;;AAGtB,YAAI,SAAS,SAAS;AAClB,gBAAM,IAAI,WAAW,8GAEjB,KAAK,UAAU;;;MAG3B;AACI,eAAO,KAAK;AACR,iBAAO,YAAc,QAAQ,KAAK;;;MAG1C;AACI,YAAI,CAAE,OAAM,QAAQ,eAAe,MAAM,QAAQ,WAAW;AACxD,gBAAM,IAAI,WAAW;;AAEzB,4BAAoB;AACpB,4BAAoB,YAAY,GAAG;AACnC,qBAAa,KAAK,OAAO,IAAI,YAAY,SAAS,KAAK,OAAO,KAAK;AAGnE,4BAAoB,YAAY,MAAM;AAClC,cAAI,YAAY,SAAS,QAAQ,MAAM,SAAS;AAC5C,wBAAY,QAAQ;AACpB;;AAEJ,sBAAY,SAAS,MAAM;;AAE/B,eAAO;;MAEX;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,CAAC,MAAM,QAAQ;AACf,gBAAM,IAAI,WAAW;;AAEzB,YAAI,KAAK,WAAW,OAAO;AACvB,gBAAM,IAAI,WAAW,mCAAmC,KAAK,qCAC5B,OAAO;;AAE5C,eAAO,KAAS;AACZ,6BAAmB;AACnB,eAAK,QAAQ;AACT,gBAAI,KAAK;AACL,6BAAe;AACf;;;AAGR,cAAI;AACA,mBAAO;;AAEX,8BAAoB;AACpB,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,gBAAI,KAAK,MAAM;AAEX,0BAAY,KAAK,UAAa,OAAO,IAAI,OAAO;uBAE3C,KAAK,GAAG,OAAO,OAAO,GAAG;AAE9B,0BAAY,KAAK,WAAe,KAAK,IAAI;;AAGzC,0BAAY,KAAK,KAAK;;;AAG9B,oCAA0B,QAAW,aAAa,KAAK;AACvD,iBAAO,IAAQ,mBAAmB,IAAI;;;MAG9C;AACI,wBAAe;UACX,MAAQ,KAAK;;AAEjB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,gBAAY,YAAY;AACxB,kBAA4B;AAiDrB;AACH,UAAI,MAAM,QAAQ;AACd,sBAAc,IAAI,YAAY;AAC9B,eAAO,MAAM,MAAM;;AAGnB,eAAO,IAAI,YAAY;;;AAY/B;AACI,aAAO,OAAO;AACV,gBAAQ;;AAEZ,aAAO;;AAEX;AACI,UAAI,EAAE,MAAM,SAAS,KAAK,EAAE,MAAM,SAAS;AACvC,cAAM,IAAI,oBAAoB;;AAElC,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,cAAgB,EAAE,MAAM,UAAU,GAAG,MAAM,uDAC5B,EAAE,MAAM;AACvB,UAAI,OAAO,SAAS;AAChB,eAAO,CAAC,MAAM;;AAElB,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,cAAM,IAAI,oBAAoB;;AAElC,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,UAAI,QAAQ;AAER,eAAO,CAAC,QAAQ,GAAG,QAAQ;;AAE/B,wBAAkB;AAClB,aAAO,KAAS;AACZ;AACA,YAAI,QAAQ;AACR,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;mBAExB,QAAQ;AACb,iBAAO,QAAQ;AACf,4BAAkB;AAClB,uBAAa,GAAG,IAAI,MAAM,EAAE;AACxB,sBAAU,KAAK;;AAEnB,cAAI,EAAE,QAAQ,EAAE,MAAM,OAAO;;AAG7B,iBAAO;;AAEX;AACA,YAAI,EAAE,MAAM,WAAW,KAAK,EAAE,MAAM,WAAW;AAC3C,cAAI,UAAU,OAAO,UAAU;AAC3B,kBAAM,EAAE,IAAI,GAAG,IAAI,UAAU;;AAG7B,kBAAM,EAAE,UAAU,CAAC,GAAG,IAAI,IAAI,GAAG,IAAI,UAAU;;;AAInD,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,uBAAa,UAAU,OAAO,EAAE,MAAM,SAAS;AAC/C,gBAAM,EAAE,OAAO,GAAG,MAAM;;AAE5B,YAAI,OAAO;AACP;AACA,cAAI,QAAQ;AACR,kBAAM,QAAQ,QAAQ;;AAGtB,kBAAM,QAAQ;;AAElB,8BAAoB;AACpB,uBAAa,KAAK,IAAI,MAAM,MAAM,EAAE;AAChC,wBAAY,KAAK;;AAErB,gBAAM,IAAI,QAAQ;;AAEtB,YAAI,IAAI,MAAM,WAAW;AACrB,gBAAM,IAAI,WAAW;;AAEzB,eAAO;;;sBAGU;MACrB;AACI,cAAM;AACN,aAAK,OAAO,KAAK;AACjB,aAAK,YAAY,KAAK,aAAa,OAAO,QAAQ,KAAK;AACvD,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;;MAE3B;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW;AAC1B,uBAAe,WAAW;AAC1B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,YAAI,OAAO,KAAK,QAAQ,OAAO,KAAK;AAChC,gBAAM,IAAI,WAAW,8BACd,OAAO,KAAK,WAAW,OAAO,KAAK;;;MAGlD;AACI,YAAI,OAAO,WAAW;AAClB,gBAAM,IAAI,WAAW,oEACD,OAAO;;AAE/B,iBAAS,OAAO;AAChB,iBAAS,OAAO;AAChB;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AACpB,iBAAO;YACH,cAAc,KAAK,MAAM,GAAG,MAAM;YAClC,cAAc,KAAK,MAAM,GAAG,MAAM;;;AAItC,iBAAO,KAAK,KAAK,IAAI,aAAa,cAAc,MAAM,OAAO,GAAG,MAAM;;AAE1E,YAAI,KAAK;AACL,eAAK,YAAY,IAAI,KAAK;AAC1B,eAAK,YAAY,IAAI,KAAK;;AAE9B,eAAO,SAAS,IAAI,IAAI;;MAE5B;AACI;AACA,YAAI,CAAC,MAAM,QAAQ,KAAK;AAEpB,iBAAO;YACH,cAAc,KAAK,MAAM,OAAO;YAChC,cAAc,KAAK,MAAM,OAAO;;;AAKpC,iBAAO,KAAK;;AAEhB,eAAO;;MAEX;AACI,gBAAgB,MAAM,QAAQ,eAAe,WAAW,WAAW,KAC/D,MAAM,QAAQ,WAAW,OAAO,MAAM,QAAQ,WAAW,KAAK,MAAM;AACxE,uBAAe,WAAW,GAAG;AAC7B,uBAAe,WAAW,GAAG;AAC7B,YAAI,OAAO,SAAS,KAAK,OAAO,SAAS;AACrC,gBAAM,IAAI,oBAAoB;;AAElC,qBAAa,KAAK,cAAc,QAAQ;AACxC,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,KAAK,IAAI;AACvB,eAAO,OAAO,GAAG;AACjB,4BAAoB,OAAO,OAAO;AAClC,YAAI,YAAY,WAAW;AACvB,sBAAY,KAAK;;AAErB,eAAO;;MAEX;AACI,eAAO;;MAEX;AACI,wBAAe;UACX,MAAQ,KAAK;UACb,WAAa,KAAK;;AAEtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,QAAI,YAAY;AAChB,kBAA4B;ACl9B5B;;;;;;;;;gCAgBmC;MAC/B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,SAAS,KAAK;;MAEvB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,QAAQ,KAAK;AAC9B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,yBAAe,MAAM,eAAe,OAAM,OAAO,GAAG,KAAK,QAAQ,IAAI;AACrE,yBAAe,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;AACzE,iBAAO;;;;AAKnB,kBAAc,YAAY;AAC1B,kBAA4B;kCACS;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;;MAErB;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,yBAAc,oBAAoB;AAClC,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,2BAAe;AACX,6BAAe,KAAK,KAAK,KAAK,OAAQ,KAAI,KAAK;AAC/C,qBAAO,OAAM,IAAI,eAAe,OAAM,OAAO,GAAG;;AAEpD,mBAAO,aAAe,QAAQ,MAAM,QAAO,OAAO,eAAe;;AAErE,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;+BA8BM;MAC9B;AACI,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK;AACjB,aAAK,aAAa,KAAK;;MAE3B;AACI,eAAO,KAAK,cAAc,oBAAoB,QAAQ;;MAE1D;AACI,eAAO;;MAEX;AACI,2BAAmB,MAAM;AACzB,wBAAe,CAAE,MAAM,KAAK;AAC5B,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,eAAO,KAAK;AACR,cAAI,KAAK,OAAO,KAAK,KAAK,OAAO;AAC7B,+BAAmB,KAAK,eAAe;AACvC,kCAAsB;AAClB,6BAAc,oBAAoB;AAClC,4BAAc;AACd,6BAAc;AACd,6BAAe,CAAC,QAAQ;AACxB,4BAAc,aAAa,cAAc,aAAa,KAAK;AAC3D,wBAAU,OAAO,SAAS;AAE1B,wBAAY,MAAI,KAAK,QAAS,KAAI,KAAK,OAAO,UAAU,OAAO;AAC/D,wBAAU,CAAC,IAAI,SAAS,KAAK;AAE7B,wBAAU,OAAM,IAAI,SAAS,IAAI,QAAQ,IAAI,IAAI,IAAI;AACrD,qBAAO,EAAE,IAAI,GAAG,IAAI;;AAExB,mBAAO,aAAe,eAAe,MAAM,oBAAoB,SAAS,OAAO,eAAe;;AAElG,iBAAO;;;;AAKnB,iBAAa,YAAY;AACzB,kBAA4B;ACvJ5B;;;;;;;;;AAoCO,6EAAsE;AACzE;AACA,UAAI,EAAE,SAAS;AACX,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAEhB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;iBAEjD,EAAE,SAAS;AAChB,cAAM,YAAgB,GAAG,OAAM,WAAU,MAAM,OAAO;;AAGtD,cAAM,IAAI,oBAAoB,2DAA2D,EAAE;;AAG/F,aAAO;;AAmBX,uFAAkF;AAC9E,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,uBAAe,mBAAmB,GAAG,OAAM,WAAU,MAAM,OAAO;AAClE,eAAO,CAAC,QAAQ,OAAM;;;AAoB9B,yFAAoF;AAChF,aAAO,KAAK;AACR,gCAAwB,QAAY,GAAG;AACvC,sBAAa,gBAAgB;AAC7B,0BAAiB,gBAAgB;AACjC,4BAAoB;AACpB,2BAAmB,QAAiB,GAAG,EAAE;AACrC,cAAI,cAAc,QAAQ,UAAU;AAChC,wBAAY,KAAK;;AAGjB,wBAAY,KAAK,EAAE,MAAM;;;AAGjC,8BAAsB,MAAK,QAAQ;AACnC,kCAA0B,UAAS,QAAQ;AAC3C,+BAAuB,SAAS,OAAO,OAAO,MAAM,QAAQ;AAC5D,8BAAsB,QAAQ,OAAO,OAAO,KAAK,QAAQ;AACzD,uBAAe,mBAAmB,GAAG,eAAe,mBAAmB,eAAe,gBAAgB;AACtG,eAAO,CAAC,QAAQ,OAAM;;;AAcvB,gFAA2E;AAC9E,UAAI,aAAiB,cAAc,QAAQ,QAAQ,QAAiB,GAAG,EAAE,OAAO;AAC5E,eAAO,gCAAgC,GAAG,OAAO,MAAM,eAAe;;AAGtE,eAAO,kCAAkC,GAAG,OAAO,MAAM,eAAe;;;qCAGxC;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,kBAAkB;AACvB,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,aAAK,WAAW,KAAK,YAAY,OAAO,OAAO,KAAK;AACpD,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,wBACD,eAAe,KAAK,yBAAyB;AACjD,aAAK,4BACD,eAAe,KAAK,6BAA6B;AACrD,aAAK,iBAAiB,cAAc,KAAK;AACzC,aAAK,kBAAkB,cAAc,KAAK;AAC1C,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;;MAEhD;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO,WAAW;AAClE,oBAAY,WAAW;AACvB,YAAI,OAAO;AACP,gBAAM,IAAI,WAAW,QAAQ,mGAEtB,KAAK,UAAU;;AAE1B,aAAK,YACD,CAAC,IAAI,UAAU,CAAE,MAAM,WAAW,QAAQ,MAAM,EAAG,OAAO;AAC9D,sBAAc,CAAC;AACf,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,OAAO,MAAM,KAAK,kBAAkB,KAAK,kBAAkB,MAAM,KAAK;;AAE/G,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,OAAO,MAAM,KAAK,iBAAiB,KAAK,iBAAiB,MAAM,KAAK;;AAE3G,aAAK,aAAa,KAAK,UAAU,eAAe,OAAO,MAAM,KAAK,uBAAuB,MAAM;AAC/F,aAAK,iBAAiB,KAAK,UAAU,mBAAmB,OAAO,MAAM,KAAK,2BAA2B,MAAM;AAC3G,aAAK,QAAQ;;MAEjB;AACI,eAAO,KAAK;AACR,2BAAiB,OAAO,eAAe,OAAO,QAAQ,OAAO;AAC7D,yBAAc,oBAAoB;AAClC,6BAAmB,OAAM;AACzB,uBAAa,WAAW;AACxB,gCAAsB,QAAiB,GAAG;AAC1C,uBAAa,KAAK,QAAQ,IAAI,KAAK,OAAQ,KAAK,OAAO;AACvD,wBAAc,OAAO,MAAM;AAC3B,iCAAuB,aAA2B,GAAG;AACrD,yBAAe,QAAQ,WAAW;AAClC,sCAA4B,cAAc;AAC1C,8BAAoB;AACpB,oCAA0B,CAAC,aAAiB,qBAAqB,QAAiB,GAAG,MAAM,MAAM,GAAG,OAAO;AAC3G,qCAA2B;AACvB,gBAAI;AACA,0CAA4B,KAAK,WAAW,OAAO,QAAQ;AAC3D,8CAAgC,KAAK,eAAe,OAAO,QAAQ;AACnE,oCAAsB,KAAK,SAAS,KAAK,KAAK,OAAO,QAAQ,kBAAkB;AAC/E,qCAAuB,KAAK,QAAQ,KAAK,MAAM,OAAO,QAAQ,kBAAkB;AAChF,qBAAO,mBAAmB,QAAO,qBAAqB,yBAAyB,eAAe,gBAAgB,KAAK;;AAGnH,qBAAO,mBAAmB,QAAO,KAAK,WAAW,QAAQ,KAAK,eAAe,QAAQ,KAAK,QAAQ,OAAO,OAAO,KAAK,KAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK,MAAM,QAAQ,KAAK;;;AAG9L,cAAI,CAAC;AACD,mBAAO;;AAEX,qDAAyC,yBAAyB,QAAO,KAAK,MAAM,QAAQ,KAAK,KAAK,QAAQ,eAAe,KAAK;AAClI,kCAAwB;AACpB,iBAAS;AACL,4BAAc,IAAI;AAClB,gCAAkB,UAAS;AAC3B,kCAAoB,UAAU,IAAI,OAAO,IAAI;AAC7C,wBAAS,MAAM,UAAU,IAAI;;;AASrC,8CAAoC;AAChC,4BAAgB,KAAK,YAAY,OAAM,KAAK;AAC5C,4BAAgB,KAAK,gBAAgB,WAAU,KAAK;;AAExD;AACA,iBAAO;;;MAGf;AACI,wBAAe;UACX,MAAM,KAAK;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,uBAAuB,qBAAqB,KAAK;UACjD,2BAA2B,qBAAqB,KAAK;UACrD,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,gBAAgB,oBAAoB,KAAK;UACzC,iBAAiB,oBAAoB,KAAK;;AAE9C,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;qCACY;MACpC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,OAAO,KAAK,QAAQ,OAAO,KAAK,KAAK;AAC1C,YAAI,OAAO,KAAK,SAAS;AACrB,cAAI,CAAC,OAAO,UAAU,KAAK;AACvB,kBAAM,IAAI,MAAM,gDAAgD,KAAK;;mBAGpE,MAAM,QAAQ,KAAK;AACxB,6BAAmB,KAAK;AACpB,gBAAI,CAAC,OAAO,UAAU;AAClB,oBAAM,IAAI,MAAM,0DACI,KAAK,UAAU,KAAK;;;;AAKhD,gBAAM,IAAI,MAAM,wEACI,KAAK,UAAU,KAAK;;AAE5C,aAAK,UAAU,KAAK,WAAW,OAAO,OAAO,KAAK;AAClD,aAAK,SAAS,KAAK,UAAU,OAAO,OAAO,KAAK;AAChD,aAAK,QAAQ,KAAK,SAAS,OAAO,OAAO,KAAK;AAC9C,aAAK,kBAAkB,eAAe,KAAK,mBAAmB;AAC9D,aAAK,mBAAmB,eAAe,KAAK,oBAAoB;AAChE,aAAK,kBAAkB,eAAe,KAAK;AAC3C,aAAK,mBAAmB,eAAe,KAAK;AAC5C,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,sBAAc,WAAW;AAEzB,YAAI,OAAO,KAAK,SAAS;AACrB,eAAK,OAAO,CAAC,KAAK;;AAEtB,qBAAa,GAAG,IAAI,KAAK,KAAK,QAAQ,EAAE;AACpC,cAAI,KAAK,KAAK,KAAK;AACf,iBAAK,KAAK,MAAM;;;AAIxB,2BAAmB,KAAK;AACpB,cAAI,OAAO,KAAK,QAAQ;AACpB,kBAAM,IAAI,MAAM,iBAAiB;;;AAGzC,YAAI,KAAK,KAAK,WAAW,SAAqB,KAAK,MAAM;AACrD,gBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAErD,2BAAmB,KAAK,KAAK,IAAI,UAAQ,WAAW;AACpD,0BAAkB;AAClB,YAAI,KAAK;AACL,eAAK,QAAQ,KAAK,UAAU,SAAS,YAAY,WAAW,KAAK,kBAAkB,KAAK,kBAAkB;;AAG1G,eAAK,QAAQ;;AAEjB,YAAI,KAAK;AACL,eAAK,OAAO,KAAK,UAAU,QAAQ,YAAY,WAAW,KAAK,iBAAiB,KAAK,iBAAiB;;AAGtG,eAAK,OAAO;;AAEhB,aAAK,QAAQ;;MAEjB;AACI,uBAAc,oBAAoB;AAClC,2BAAmB,OAAM;AACzB,sBAAc,WAAW;AACzB,eAAO,KAAK;AACR,2BAAiB;AACjB,eAAM,aAAM,uBAAa,QAAQ,QAAO,KAAK,MAAM;AACnD,iCAAuB,aAA2B,GAAG;AACrD,4BAAkB,KAAK;AACnB,2BAAe,OAAO,WAAW;;AAErC,4BAAkB;AACd,gBAAI,KAAK,QAAQ,EAAE,MAAM,WAAW,SAChC,KAAK,SAAS,CAAC,QAAQ;AACvB,qBAAO,EAAE,QAAQ;;AAGjB,qBAAO;;;AAGf,uBAAY,UAAU,KAAK,MAAM;AACjC,uBAAa,UAAU,KAAK,KAAK;AAOjC,gCAAsB;AACtB,oCAA0B;AAC1B,uBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,gBAAI,KAAK,KAAK,QAAQ,OAAO;AACzB,4BAAc,KAAK,WAAW;AAC9B,gCAAkB,KAAK;;AAGvB,4BAAc,KAAK;AACnB,gCAAkB,KAAK,WAAW;;;AAG1C,kBAAO,MAAK,KAAK;AACjB,sBAAW,UAAS,KAAK;AACzB,mBAAQ,OAAM,KAAK;AACnB,mBAAS,OAAO,KAAK;AACrB,iBAAO,mBAAmB,QAAO,OAAM,WAAU,QAAQ,QAAO,KAAK;;;MAG7E;AACI,wBAAe;UACX,MAAM,KAAK;UACX,SAAS,KAAK;UACd,QAAQ,KAAK;UACb,OAAO,KAAK;UACZ,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;UAC5C,iBAAiB,qBAAqB,KAAK;UAC3C,kBAAkB,qBAAqB,KAAK;;AAEhD,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,uBAAmB,YAAY;AAC/B,kBAA4B;AC1Y5B;;;;;;;;;AA4BO;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,QAAQ,WAAW;AACnB,gBAAM,IAAI,WAAW,+FACe,QAAQ;;AAEhD,wBAAgB,CAAC,CAAC,GAAG,IAAI,SAAS,CAAC,GAAG;AACtC,eAAO,KAAQ,GAAG;;;AAanB;AACH,aAAO,KAAK;AACR,YAAI,EAAE,SAAS;AACX,gBAAM,IAAI,WAAW,kEACd,EAAE;;AAEb,YAAI,WAAW;AACX,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;;AAE3B,YAAI,QAAQ,WAAW,KAAK,QAAQ,GAAG,WAAW,KAC9C,QAAQ,GAAG,WAAW;AACtB,gBAAM,IAAI,WAAW;;AAGzB,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,eAAe,kBAAkB,eAAe;AAChD,gBAAM,IAAI,WAAW,wBAAwB;;AAGjD;AACA,YAAI,eAAe;AACf,oBAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ;;AAG/C,oBAAU,CAAC,CAAC,GAAG,IAAI,QAAQ,IAAI,QAAQ,IAAI,CAAC,GAAG;;AAEnD,eAAO,KAAQ,GAAG;;;gCAGS;MAC/B;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,oBAAoB,KAAK;AAGvD,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG;mBAEvB,OAAO,KAAK,YAAY;AAC7B,eAAK,UACD,CAAC,CAAC,KAAK,SAAS,KAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAGvD,eAAK,UAAU,KAAK;AACpB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,+EACI,KAAK,QAAQ;;AAE1C;AACA;AACA,cAAI,OAAO,KAAK,QAAQ,OAAO;AAC3B,4BAAgB,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;AAC/C,2BAAe,CAAC,KAAK,QAAQ,IAAI,KAAK,QAAQ;;AAG9C,iBAAK,UAAU,KAAK;AACpB,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,sFACQ,KAAK,QAAQ,GAAG;;AAEjD,4BAAgB,KAAK,QAAQ;AAC7B,gBAAI,KAAK,QAAQ,GAAG,WAAW;AAC3B,oBAAM,IAAI,WAAW,qFACQ,KAAK,QAAQ,GAAG;;AAEjD,2BAAe,KAAK,QAAQ;;AAEhC,eAAK,UAAU,CAAC,eAAe;;AAEnC,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC;AACA;AACA,YAAI,KAAK,eAAe;AACpB,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,cAAI,WAAW,MAAM,QAAQ,WAAW,MAAM;AAC1C,mBAAO,WAAW,KAAK,KAAK,QAAQ,GAAG,KAAK,KAAK,QAAQ,GAAG;;AAG5D,mBAAO;;AAEX,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK,MAAM,iBAAiB,oBAAoB,SAAS,KAAK,SAAS,KAAK;;MAEvF;AACI,wBAAe;UACX,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;AAIf,kBAAc,YAAY;AAC1B,kBAA4B;ACtL5B;;;;;;;;;AAkCO;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG;;AAElB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAIf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AAEb,cAAI,SAAY,GAAG,UAAU,SAAS;;AAKtC,cAAI,SAEJ,GAAG,UAAU,SAAS;;AAE1B,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG;;AAEnC,eAAO;;;AAaR;AACH,aAAO,KAAK;AACR,wBAAgB;AAChB,sBAAc;AACd,yBAAiB;AACjB,YAAI,WAAW;AACX,oBAAU,CAAC,GAAG,GAAG;;AAErB,YAAI,WAAW;AACX,oBAAU;;AAEd,YAAI,cAAc;AACd,uBAAa;;AAEjB,YAAI,YAAY;AACZ,qBAAW;;AAGf,YAAI,sBAAsB,GAAG;AAC7B;AACA,8BAAuB,YAAY,SAAU,SAAS;AACtD,YAAI,aAAa;AACb,cAAI,UAAc,GAAG,UAAU,SAAS;;AAGxC,cAAI,UAAc,GAAG,UAAU,SAAS;;AAE5C,YAAI,eAAe;AACf,cAAI,WAAc,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG;;AAEtC,eAAO;;;4BAMgB;MAO3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW;;AAEpB,cAAM;AACN,YAAI,OAAO,KAAK,aAAa;AACzB,eAAK,WAAW,CAAC,KAAK;mBAEjB,MAAM,QAAQ,KAAK,aACxB,KAAK,SAAS,WAAW,KACzB,OAAO,KAAK,SAAS,OAAO;AAC5B,eAAK,WAAW,KAAK;;AAGrB,gBAAM,IAAI,WAAW,qGAEd,KAAK,UAAU,KAAK;;AAE/B,8BAAsB,KAAK,UAAU;AACrC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;;AAGpB,cAAI,OAAO,KAAK,YAAY;AACxB,iBAAK,UAAU,CAAC,KAAK;qBAEhB,MAAM,QAAQ,KAAK,YACxB,KAAK,QAAQ,WAAW,KACxB,OAAO,KAAK,QAAQ,OAAO;AAC3B,iBAAK,UAAU,KAAK;;AAGpB,kBAAM,IAAI,WAAW,oGAEd,KAAK,UAAU,KAAK;;;AAGnC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,uBAAe,iBAAiB,WAAW,IAAI,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC5F,eAAO,CAAC,WAAW,IAAI,QAAQ,WAAW;;MAE9C;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAE5B,mBAAS,aAAa,oBAAoB,SAAS;AACnD,yBAAe,KAAK,gBAAgB,oBAAoB,SAAS,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,KAAK,QAAQ,IAAI,IAAI,KAAK,SAAS;AAE5H,iBAAO,QAAY,QAAQ,CAAC;;;MAGpC;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;;AAElB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG;;AAExB,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK;AACzB,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK;;AAEvC,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,MAAM;;AAG5C,iBAAO,CAAC,WAAW,IAAI,MAAM,MAAM,WAAW;;;MAGtD;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;4BAIG;MAC3B;AACI,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,CAAC,GAAG,GAAG;;AAE3B,cAAM;AACN,aAAK,WAAW,MAAM,QAAQ,KAAK,YAC/B,KAAK,WACL,CAAC,KAAK,UAAU,KAAK,UAAU,KAAK;AACxC,YAAI,KAAK,WAAW;AAChB,eAAK,UAAU,KAAK;mBAEf,MAAM,QAAQ,KAAK;AACxB,cAAI,KAAK,QAAQ,WAAW;AACxB,kBAAM,IAAI,WAAW,wHAEd,KAAK,QAAQ;;AAExB,eAAK,UAAU,KAAK;;AAIpB,eAAK,UAAU,CAAC,KAAK,SAAS,KAAK,SAAS,KAAK;;AAErD,8BAAsB,KAAK,UAAU;AACrC,8BAAsB,KAAK,SAAS;AACpC,aAAK,UAAU,KAAK,WAAW,OAAO,UAAU,KAAK;AACrD,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,yBAAiB,KAAK;AACtB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa,mBAAmB;AAChC,qBAAa,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC9E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,mBAAW,KAAK,eAAe,kBAAkB,WAAW,KAAK,WAAW;AAC5E,iBAAS,iBAAiB,QAAQ,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AAC/E,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,eACI,iBAAiB,MAAM,KAAK,SAAS,IAAI,KAAK,SAAS,KAAK,QAAQ;AACxE,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW,IAAI,QAAQ,MAAM;;AAGpD,iBAAO,CAAC,WAAW,IAAI,QAAQ,MAAM,MAAM,WAAW;;;MAG9D;AACI,eAAO,KAAK;AACR,eAAK,eAAe,QAAQ;AAC5B,iBAAO,KAAK,gBAAgB,oBAAoB,SAAS,KAAK,UAAU,KAAK,SAAS,KAAK,SAAS,KAAK;;;MAGjH;AACI,wBAAe;UACX,UAAU,KAAK;UACf,SAAS,KAAK;UACd,SAAS,KAAK;UACd,YAAY,KAAK;;AAErB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;+BAGmB;MAC9B;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,iBAAa,YAAY;AACzB,kBAA4B;mCACU;MAClC;AACI,cAAM;;MAEV;AACI,wBAAgB;AAChB,yBAAiB;AACjB,eAAO,OAAO,QAAQ,UAAU,SAAS,SAAS,YAAY;;;AAItE,qBAAiB,YAAY;AAC7B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,eAAO,CAAC,WAAW,IAAI,WAAW;;MAEtC;AACI,cAAM,IAAI;;;yCAG0B;MACxC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAS,QAAO;;;;AAKnC,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,cAAM,QAAQ;;MAElB;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,iBAAO,KAAQ,QAAO;;;;AAKlC,uBAAmB,YAAY;AAC/B,kBAA4B;kCAIS;MACjC;AACI,cAAM;AACN,aAAK,aACD,KAAK,cAAc,OAAO,iBAAiB,KAAK;AACpD,wBAAgB,KAAK;AACrB,aAAK,YAAY,CAAC,IAAI,UAAU,CAAE,MAAM;;MAE5C;AACI,qBAAa;AACb,YAAI,KAAK,eAAe;AACpB,iBAAO,CAAC,WAAW,IAAI,WAAW;;AAGlC,iBAAO,CAAC,WAAW,IAAI,WAAW;;;MAG1C;AACI,cAAM,IAAI;;MAEd;AACI,wBAAe,CAAE,YAAY,KAAK;AAClC,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;;yCAG6B;MACxC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAS,QAAO,CAAC,GAAG;;AAG3B,mBAAO,KAAS,QAAO,CAAC,GAAG;;;;;AAM3C,2BAAuB,YAAY;AACnC,kBAA4B;qCACY;MACpC;AACI,eAAO,KAAK;AACR,yBAAc,oBAAoB;AAClC,cAAI,KAAK,eAAe;AACpB,mBAAO,KAAQ,QAAO,CAAC,GAAG;;AAG1B,mBAAO,KAAQ,QAAO,CAAC,GAAG;;;;;AAM1C,uBAAmB,YAAY;AAC/B,kBAA4B;ACpgB5B;;;;;;;;;0BA8B6B;MACzB;AAQI,cAAM;AACN,aAAK,QAAQ,KAAK;;MAEtB;AACI,aAAK,QAAQ;;UAGb;AAIA,YAAI,KAAK,SAAS;AACd,iBAAO,KAAK,MAAM;;AAGlB,iBAAO;;;UAGX;AAIA,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,YAAY;;;UAG3B;AACA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;UAGlB;AAEA,eAAO,KAAK,MAAM;;UAGlB;AACA,eAAO,KAAK,MAAM;;MAGtB;AACI,eAAO,KAAK,MAAM;;MAEtB;AACI,aAAK,MAAM,WAAW;;MAE1B;AACI,wBAAe;UACX,OAAS;YACL,WAAa,KAAK,MAAM;YACxB,QAAU,KAAK,MAAM;;;AAG7B,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;MAEX;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,SAAS;AACd,eAAK,MAAM,6BAA6B;;;aAIzC,yCAAwC;AAC3C,4BAAoB,QAAO;AAC3B,sBAAc,YAAY,aAAa;AACvC,eAAO,QAAO;AACd,0BAAkB,CAAE;AACpB,eAAO,OAAO,WAAW;AACzB,eAAO,IAAI,IAAI;;;kCAGc;MACjC;AACI,cAAM;AACN,aAAK,kBAAkB;;MAE3B;AACI,qBAAa,mBAAmB;AAChC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,WAAW,gFACF,KAAK,UAAU;;AAEtC,aAAK,YAAY,CAAC,CAAE,OAAO;AAC3B,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,YAAI,CAAC,KAAK,MAAM;AACZ,eAAK,MAAM,MAAM;AACjB,eAAK,MAAM,QAAQ;;AAEvB,cAAM,MAAM;;MAEhB;AACI,qBAAa,mBAAmB;AAChC,gCAAwB,CAAC,WAAW,IAAI,OAAO,WAAW,MAAM;AAChE,iCAAyB,KAAK,MAAM,mBAAmB;AACvD,0BAAkB,WAAW;AAC7B,eAAO,CAAC,iBAAiB,IAAI,WAAW,OAAO,iBAAiB,MAAM;;MAE1E;AACI,eAAO,KAAK;AAER,mBAAS,oBAAoB;AAI7B,wBAAa;AAKT,2BAAe,oBAAoB,KAAK,MAAM,KAAK,SAAQ;AAC3D,mBAAO,CAAC,QAAQ;;AAEpB,6BAAmB,IAAI,OAAM,QAAQ,IAAI,OAAyB,MAAiB,MAAsB,OAAoB;AAC7H,oBAAU,WAAW;AAGrB,iBAAO;;;;AAKnB,oBAAgB,YAAY;AAC5B,kBAA4B;AACrB;AACH,gCAAwC,iCAAiC,0BAA0B;;AAEvG,6CAAyC;gCACN;MAC/B;AACI,cAAM;AASN,4BAAoB,KAAK,MAAM;AAC/B,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,eAAe,YAAY;AAChC,oBAAY,iBACR,YAAY,mBAAmB,OAAO,QAAQ;AAClD,yBAAiB;AACjB,iBAAS,eAAe,KAAK,MAAM;AACnC,iBAAS,YAAY;AACrB,aAAK,gBAAgB,YAAY;AACjC,aAAK,aAAa,OAAO,aAAa,KAAK,aAAa;AACxD,aAAK,cAAc,OAAO,cAAc,KAAK,cAAc;AAC3D,aAAK,YAAY,KAAK,cAAc,SAChC,mCACA,KAAK;AACT,oCAA4B,KAAK;AACjC,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,kBAAkB,KAAK,MAAM;AAClC,aAAK,cAAc,KAAK,MAAM;AAC9B,aAAK,kBAAkB;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY,KAAK,MAAM;AAC5B,aAAK,eAAe;;UAEpB;AACA,eAAO,KAAK;;UAEZ;AAIA,aAAK,aAAa;AAClB,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,YAAY;;AAElC,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,YAAY;;;MAGvC;AACI,eAAO,KAAK,aAAa,aAAa,OAAO,KAAK,cAAc;;MAEpE;AACI,2BAAmB,QAAQ;AAC3B,+BAAuB,KAAK,MAAM,aAAa;AAC/C,aAAK,aAAa,WAAW,QAAQ,MAAM,GAAG;AAC9C,aAAK,cAAc,WAAW,QAAQ,MAAM;;MAEhD;AACI,0BAAkB,KAAK,aAAa,mBAAmB;AACvD,YAAI,CAAE,OAAM,QAAQ,gBAAgB,MAAM,QAAQ,YAAY;AAC1D,wBAAc,CAAC;;AAEnB,sBAAc;AACd;AACA;AACA;AACA,YAAI,KAAK;AACL,uBAAa,YAAY,MAAM;AAC/B,wBAAc,YAAY;;AAG1B,wBAAc,YAAY;;AAE9B,sBAAc;AACd,YAAI,KAAK,cAAc;AACnB,sBAAY,YAAY,SAAS,MAAM;AACvC,yBAAe,CAAC;mBAEX,KAAK,aAAa;AACvB,yBAAe,CAAC,aAAa,YAAY;;AAGzC,yBAAe,CAAC;;AAEpB,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,mBAAO,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE7D,iBAAO,CAAC,aAAa,OAAO,YAAY,OAAO,WAAW;;AAE9D,eAAO,iBAA+B;;MAE1C;AACI,2BAAmB,UAAU,OAAO,OAAO,OAAO;AAClD,wBAAgB,UAAU,OAAO,OAAO,OAAO;AAC/C,YAAI,UAAU;AACV,mBAAS;;AAEb,6BAAqB,gBAAgB,QAAQ,cAAc,WAAW,KAAK;AAC3E,iBAAS,aAAa;AACtB,uBAAe,aAAa;AAC5B,oBAAY,aAAa;AACzB,YAAI,MAAM,QAAQ;AACd,yBAAe,OAAO,MAAM;AAC5B,mBAAS,OAAO;;AAEpB,YAAK,iBAAgB,QAAQ,aAAa,WAAW,MACjD,aAAa;AACb,iBAAO,MAAM,MAAM,QAAQ;;AAE/B,iCAAyB;AACzB,gCAAwB;AACxB,YAAI,gBAAgB;AAChB,4BAAkB,aAAa;AAC/B,cAAI,YAAY,IAAI;AAChB,kBAAM,IAAI,WAAW;;AAIzB,iBAAO,kBAAkB;AACzB,2BAAiB,KAAK,GAAG;AACzB,6BAAmB,aACd,IAAI,WAAS,IAAI,UAAU,CAAE,OAAO,MAAM;AAC/C,eAAK,aAAa,YAAY,WAAW,MAAM,GAAG,YAAY;AAC9D,eAAK,cAAc,YAAY,WAAW,MAAM,YAAY;AAC5D,0BAAgB,KAAK,GAAG;;AAE5B,YAAI,aAAa;AACb,gBAAM,IAAI,oBAAoB;;AAGlC,iCAAyB,iBAAiB,cAAc;AACxD,8BAAqB;AACjB,cAAI,mBAAkB,mBAAmB;AACrC,kBAAM,IAAI,WAAW;;;AAI7B,YAAI;AAEA,4BAAkB,CAAC,QAAQ,OAAO;AAClC,gCAAsB,KAAK,UAAU,OAAO;AAU5C,oCAA0B,KAAK;AAC/B,eAAK,YAAY;AACjB,yBAAe,MAAM,MAAM,WAAW;AACtC,eAAK,YAAY;AACjB,iBAAO;;AAGP,iBAAO,MAAM,MAAM,QAAQ;;;MAGnC;AACI,eAAO,KAAK;AACR,+BAAqB,OAAO;AAC5B;AACA;AACA,cAAI,gBAAgB;AAChB,gBAAI,KAAK,aAAa,KAAK,QAAQ;AACnC,mBAAO,KAAK,cAAc,KAAK,QAAQ;;AAGvC,iCAAqB,aAAa,MAAM,GAAG,aAAa,SAAS;AACjE,kCAAsB,aAAa,MAAM,aAAa,SAAS;AAC/D,gBAAI,KAAK,aAAa,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;AACzE,mBAAO,KAAK,cAAc,KAAK,QAAQ,OAAO,OAAO,QAAQ,CAAE,cAAc;;AAEjF;AACA,cAAI,KAAK;AACL,gBAAI,MAAM,QAAQ;AACd,uBAAS,EAAE,MAAM,GAAG,OAAO,KAAK,MAAM;;;AAI1C,gBAAI,EAAE;AACN,mBAAO,KAAK;;AAEhB,cAAI,KAAK;AACL,mBAAO,SAAY,MAAM;;AAE7B;AACA,cAAI,KAAK,cAAc;AACnB,qBAAS,YAAc,CAAC,GAAG;qBAEtB,KAAK,cAAc;AACxB,qBAAS,MAAQ,GAAG;qBAEf,KAAK,cAAc;AACxB,qBAAS,KAAQ,KAAI,MAAQ,GAAG;qBAE3B,KAAK,cAAc;AACxB,qBAAS,KAAQ,GAAG;qBAEf,KAAK,aAAa;AACvB,qBAAS,CAAC,GAAG;;AAGjB,cAAI,KAAK;AACL,gBAAI,KAAK,aAAa;AAClB,qBAAO,OAAO,OAAO;;AAEzB,mBAAO,CAAC,QAAQ,OAAO;;AAE3B,iBAAO;;;MAGf;AACI,aAAK,aAAa;AAClB,aAAK,cAAc;;MAEvB;AACI,kBAAU,KAAK,aAAa,MAAM;AAC9B,eAAK,aAAa,MAAM;;AAE5B,kBAAU,KAAK,cAAc,MAAM;AAC/B,eAAK,cAAc,MAAM;;AAE7B,aAAK,QAAQ;;MAEjB;AACI,YAAI,MAAM,QAAQ;AACd,iBAAO,KAAK;;AAEhB;AACA,YAAI,KAAK;AACL,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAIjB,cAAI,KAAK,aAAa;AAClB,yBAAa,CAAC,MAAM;;AAGpB,yBAAa;;;AAGrB,YAAI,KAAK;AACL,yBAAe,KAAK,aAAa;AACjC,4BAAkB,OAAO,IAAI,WAAS;AACtC,cAAI,MAAM,QAAQ;AACd,mBAAO,WAAW,OAAO,WAAW,OAAO;;AAG3C,mBAAO,CAAC,YAAY,OAAO,WAAW,OAAO;;;AAIjD,iBAAO;;;UAGX;AACA,eAAO,KAAK,aAAa,iBAAiB,OAAO,KAAK,cAAc;;UAEpE;AACA,eAAO,KAAK,aAAa,oBAAoB,OAAO,KAAK,cAAc;;MAG3E;AACI,cAAM,6BAA6B;AACnC,YAAI,KAAK,gBAAgB;AACrB,eAAK,aAAa,6BAA6B;;AAEnD,YAAI,KAAK,iBAAiB;AACtB,eAAK,cAAc,6BAA6B;;;MAGxD;AACI,wBAAe;UACX,WAAa,KAAK;;AAGtB,2BAAmB,MAAM;AACzB,eAAO,OAAO,SAAQ;AACtB,eAAO;;aAGJ;AACH,yBAAiB,YAAY,QAAO;AACpC,eAAO,QAAO;AAEd,YAAI,QAAO,mBAAmB;AAC1B,gBAAM,IAAI,oBAAoB;;AAIlC,0BAAkB;AAClB,kBAAU,WAAW;AACrB,eAAO,IAAI,IAAI;;;AAIvB,kBAAc,YAAY;AAC1B,kBAA4B;ACle5B;;;;;;;;;AA+DO;AACH,aAAO,IAAI,WAAW;;AA2BnB;AACH,aAAO,IAAI,IAAI;;AAmBZ;AACH,aAAO,IAAI,KAAK;;AAsBb;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAM;;AAkBd;AACH,aAAO,IAAI,UAAQ;;AA0BhB;AACH,aAAO,IAAI,gBAAgB;;AAuBxB;AACH,aAAO,IAAI,OAAO;;AAoBf;AACH,aAAO,IAAI,SAAO;;AAqCf;AACH,aAAO,IAAI,gBAAgB;;AAoBxB;AACH,aAAO,IAAI,SAAO;;AA+Bf;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,WAAW;;AA0BnB;AACH,aAAO,IAAI,aAAa;;AAarB;AACH,aAAO,IAAI,gBAAgB;;AAkCxB;AACH,aAAO,IAAI,aAAW;;AAmCnB;AACH,aAAO,IAAI,MAAM;;AAYd;AACH,aAAO,IAAI,QAAQ;;AAkChB;AACH,aAAO,IAAI,iBAAiB;;AAqBzB;AACH,aAAO,IAAI,QAAQ;;AAgBhB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,UAAQ;;AA8BhB;AACH,aAAO,IAAI,QAAQ;;AAahB;AACH,aAAO,IAAI,UAAU;;AAuBlB;AACH,aAAO,IAAI,MAAI;;AAoBZ;AACH,aAAO,IAAI,QAAQ;;AAsBhB;AACH,aAAO,IAAI,YAAY;;AAoBpB;AACH,aAAO,IAAI,UAAQ;;AAoBhB;AACH,aAAO,IAAI,UAAQ;;AAqBhB;AACH,aAAO,IAAI,WAAS;;AAwBjB;AACH,aAAO,IAAI,IAAI;;AAwBZ;AACH,aAAO,IAAI,mBAAmB;;AAsB3B;AACH,aAAO,IAAI,mBAAmB;;AAyB3B;AACH,aAAO,IAAI,cAAc;;AActB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAyBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAuBrB;AACH,aAAO,IAAI,iBAAiB;;AAEzB;AACH,aAAO,iBAAiB;;AAIrB;AACH,aAAO,iBAAiB;;AAWrB;AACH,aAAO,IAAI,uBAAuB;;AAgB/B;AACH,aAAO,IAAI,uBAAuB;;AAW/B;AACH,aAAO,IAAI,mBAAmB;;AAgB3B;AACH,aAAO,IAAI,mBAAmB;;AAW3B;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AAuBrB;AACH,aAAO,IAAI,aAAa;;AA0BrB;AACH,aAAO,IAAI,IAAI;;AA+CZ;AACH,aAAO,IAAI,QAAQ;;AAyBhB;AACH,aAAO,IAAI,KAAK;;AA+Cb;AACH,aAAO,IAAI,SAAS;;AA0BjB;AACH,aAAO,IAAI,UAAU;;AA+ClB;AACH,aAAO,IAAI,cAAc;;AA6BtB;AACH,aAAO,IAAI,WAAW;;AAmCnB;AACH,aAAO,IAAI,eAAe;;AA8DvB;AACH,aAAO,IAAI,IAAI;;AASZ;AACH,aAAO,IAAI,gBAAgB;;AAIxB;AACH,aAAO,IAAI,cAAc;;AAgDtB;AACH,aAAO,IAAI,gBAAgB;;AAGxB,4BAAwB;AACxB,4BAAwB;AACxB,sBAAkB;AAClB,sBAAkB;AAyBlB;AACH,aAAO,IAAI,cAAc;;AA0BtB;AACH,aAAO,IAAI,gBAAgB;;AAiCxB;AACH,aAAO,IAAI,aAAa;;AAyBrB;AACH,aAAO,IAAI,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACpjDhB;AACH,aAAO,eAAuB,OAAO;;AAmBlC;AACH,aAAO,qBAA2B,OAAO;;AAqBtC;AACH,aAAO,0BAAkC,OAAO;;AAoB7C;AACH,aAAO,oBAA4B,OAAO;;AAavC;AACH,aAAO,0BAAgC,OAAO;;AAqC3C;AACH,aAAO,UAAkB,OAAO;;AAqC7B;AACH,aAAO,OAAe,OAAO;;AAuB1B;AACH,aAAO,gBAAuB,OAAO;;AAsBlC;AACH,aAAO,kBAAyB,OAAO;;AAoBpC;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAE9C;AACH,aAAO,4BAAmC,OAAO;;AAoB9C;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;AAEnC;AACH,aAAO,mBAAwB,OAAO;;;;;;;;;;;;;;;;;;;;AC9R1C;;;;;;;;;;;;;ACAA;;;;;;;;;AAoBO;AACH,aAAO,IAAI,KAAK;;AAWb;AACH,aAAO,GAAgB;;AAWpB;AACH,aAAO,GAAgB;;;;;;;;AC7C3B;;;;;;;;;2BAc8B;MAC1B;AACI,cAAM,GAAG;AAET,aAAK,QAAQ;;MAEjB;AACI,YAAI,CAAE,mBAAiB;AACnB,gBAAM,IAAI,MAAM;;AAEpB,aAAK,QAAQ;;;AAGrB;AACI,aAAO,UAAU;;AAErB;AACI,aAAO,UAAU;;gCAMc;MAC/B;AACI;AACA,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,KAAK;AACL,gBAAM,IAAI,oBAAoB;;AAElC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,WAAW,KAAK,IAAI,KAAK,YAAY;AAC1C,aAAK,WAAW,KAAK,YAAY;AACjC,aAAK,UAAU,KAAK,WAAW;AAC/B,aAAK,OAAO,KAAK,QAAQ;AACzB,aAAK,WAAW,KAAK;AACrB,YAAI,CAAC,QAAQ,OAAO,OAAO,QAAQ,KAAK,UAAU;AAC9C,kBAAQ,KAAK,uBAAuB,KAAK;AAEzC,eAAK,OAAO;;AAEhB,YAAI,KAAK,SAAS;AACd,eAAK,cAAc;mBAEd,KAAK,SAAS;AACnB,eAAK,cAAc;;AAInB,cAAI,KAAK,QAAQ,QAAQ,WAAW;AAChC,iBAAK,cAAc;;AAGnB,iBAAK,cAAc;;;AAG3B,YAAI,KAAK,gBAAgB;AACrB,eAAK,YAAY;;;YAGnB;AACF,aAAK,OAAO;AACZ,aAAK,eAAe;AACpB,YAAI,KAAK,YAAY;AACjB,eAAK,OAAO,KAAK;;AAGjB,eAAK,OAAO,KAAK,gBAAgB,SAAO,WAAW;;;YAGrD;AACF,cAAM,qBAAqB;AAC3B,wBAAgB,KAAK,gBAAgB;AACrC,YAAI,WAAW;AACX;;AAEJ,YAAI,KAAK,YAAY,UAAU,KAAK,UAAU,KAAK;AAC/C,eAAK,OAAO;AACZ,eAAK,OAAO;;AAIZ,eAAK;AACL,cAAI,KAAK,QAAQ,KAAK;AAClB,iBAAK,eAAe;AACpB,iBAAK,MAAM,eAAe;;;;YAKhC;AACF,YAAI,KAAK,eAAe,KAAK,KAAK;AAC9B,kBAAQ,IAAI,SAAS,KAAK;;;MAGlC;AACI,YAAI,QAAQ;AACR,iBAAO;;AAEX,6BAAqB,KAAK,KAAK;AAC/B,YAAI,gBAAgB;AAChB,kBAAQ,KAAK,4BAA4B,KAAK,oDAChB,OAAO,KAAK;;AAE9C,eAAO;;;AA8CR;AACH,aAAO,IAAI,cAAc;;AAEjB,sBAAa,CAAE;ACzK3B;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAkBO;AACP,IAAC;AACG,gBAAS,UAAS,gBAAgB,KAAK;AACvC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,KAAK;AACpC,gBAAS,UAAS,eAAe,KAAK;AACtC,gBAAS,UAAS,kBAAkB,KAAK;AACzC,gBAAS,UAAS,cAAc,KAAK;AACrC,gBAAS,UAAS,aAAa,MAAM;AACrC,gBAAS,UAAS,cAAc,MAAM;AACtC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,eAAe,MAAM;AACvC,gBAAS,UAAS,iBAAiB,MAAM;AACzC,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,sBAAsB,OAAO;AAC/C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,iBAAiB,OAAO;AAC1C,gBAAS,UAAS,kBAAkB,OAAO;AAC3C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,mBAAmB,OAAO;AAC5C,gBAAS,UAAS,qBAAqB,OAAO;OAC/C,YAAa,YAAW;AACpB;AACP,IAAC;AAEG;AACA,MAAC;AACG,iCAAwB,yBAAwB,YAAY,KAAK;AACjE,iCAAwB,yBAAwB,QAAQ,KAAK;AAC7D,iCAAwB,yBAAwB,QAAQ,KAAK;SAC9D,0BAA0B,UAAS,2BAA4B,WAAS,0BAA0B;OACtG,YAAa,YAAW;AC3D3B;;;;;;;;;;;;;;;;AAgBA,uBAAmB;AA0BZ;AACH,uBAAiB;QACb,UAAU;QACV,UAAU;QACV,QAAQ;QACR,OAAO;QACP,gBAAgB;;AAEpB,iBAAW,QAAQ;;AAShB;AACH,aAAO,WAAW;;AASf;AACH,aAAO,WAAW;;ACtEtB;;;;;;;;;;;;;;;;AAiBO;AACH,yBAAmB,KAAK,YAAY;AACpC,UAAI,cAAc,WAAW,oBAAoB;AAC7C,sBAAc,WAAW;AACzB,oBAAY,WAAW,kBAAkB,IACrC,SACC,WAAW,kBAAkB,SAAY,QAAQ,IAC9C,WAAW;AACnB,YAAI,WAAW,SAAS;AACpB,iBAAO,UAAU,KAAK,WAAW,WAAW,kBAAkB,WAAW,SAAS;;AAEtF,YAAI,WAAW,SAAS;AACpB,yBAAe,KAAK,WAAW,MAAM,OAAO;AAC5C,iBAAO,OAAO,IAAI,UAAQ,UAAU,MAAM,WAAW,SAAS;;AAElE,wBAAe,UAAU,KAAK,WAAW,MAAM,OAAO,IAAI,WAAW,SAAS;AAC9E,sBAAa,QAAO;AACpB,eAAO,WAAW,SAAS,WACvB,MAAK,KACL,eAAmB,QAAO,OAAO;;AAEzC,wBAAkB,KAAK,WAAW;AAClC,aAAO,aAAa,UAAU;;AAS3B;AACH,iCAA0B,cAAc;AACxC,UAAI,mBAAmB;AACnB,wBAAe,gBAAgB,yBAAyB;AACxD,YAAI,WAAU;AACV,iBAAO;;;AAGf,wBAAkB,QAAQ,kBAAkB,KAAK;AAC7C,eAAO,CAAC,CAAC,WAAW,yBAAyB,UAAU;;AAE3D,aAAO,cAAc,SACjB,WAAW,yBAAyB,UAAU,YAAY,UAC1D;;AAOD;AACH,aAAO,WAAW,yBAAyB,MAAM,QAAQ;;AAQtD;AACH,iCAA0B,cAAc;AACxC,aAAO;QACH,yBAAyB,UAAU,WAAW,QAAQ;QACtD;;;AAGR;AACI,aAAO,CAAC,CAAC,YAAY,GAAG,QAAQ,cAAc;;AAE3C;AACH,oBAAc,KAAK,MAAM;AACzB,UAAI,MAAM,WAAW;AACjB,eAAO,CAAC,MAAM;;AAElB,uBAAiB,MAAM;AACvB,aAAO,CAAC,UAAU,OAAO,MAAM,MAAM,SAAS;;AAE3C;AACH,kBAAY;AACZ,mBAAa,GAAG,IAAI,IAAI,QAAQ,KAAK;AACjC,YAAI,KAAK,IAAI,MAAM,GAAG,IAAI;;AAE9B,aAAO;;AAEJ;AACH,iBAAU,cAAc,OAAO,MAAM,WAAW;AAChD,UAAI,SAAQ;AAER,eAAM,cAAc,oBAAoB,MAAM,WAAW;AACzD,gCAAwB,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,GAAG;AACrD,qBAAa,GAAG,IAAI,GAAG;AACnB,0BAAgB,GAAG,KAAK,KAAI,IAAI;AAChC,0BAAgB,GAAG,KAAK,KAAI,IAAI,IAAI;;AAExC,eAAO;;AAEX,aAAO;;AAWJ;AACH,aAAO,QAAO,OAAO,UAAS,MAAM;;AC9HxC;;;;;;;;;;;;;;;;AAgBO,iBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;;;;;;AClL/E;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;UAC9D,CAAE,QAAU,kBAAkB,MAAQ,gBAAgB,MAAQ;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC5dzB,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;MAErD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;MAElE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;UAC3D,CAAE,QAAU,oBAAoB,MAAQ,kBAAkB,MAAQ;UAClE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,qBAAqB,MAAQ,QAAQ,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAAW;YACrD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;;MAGhD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,QAAQ,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;;MAGpD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,gBAAgB,MAAQ,eAAe,MAAQ;;;MAGnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;MAGrE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;MAE3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;;;;;;;;ACvUzE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UAAc;YAC7D,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,cAAc,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAChD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C,CAAE,QAAU,iBAAiB,MAAQ,iBAAiB,MAAQ;UAAU;YACpE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;;QAEjD,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAC9C;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UACnD,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UACnE,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB,CAAC,GAAG,GAAG,GAAG;;UAE9B;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;UAAY;YACtD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;;;MAG9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAClD,CAAE,QAAU,SAAS,MAAQ,aAAa,MAAQ;UAClD,CAAE,QAAU,WAAW,MAAQ,OAAO,MAAQ;;;;;;;;ACnV1D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;;QAEzC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ,UAAU,cAAgB;UACnE,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ,UAAU,cAAgB;;QAExE,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;QAEpE,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;MAE3D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAAY;YACpD,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;UAEpB,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;;MAExD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;;QAEhD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;UAC5C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ;UAC1C,CAAE,QAAU,gBAAgB,MAAQ,gBAAgB,MAAQ;;;;;;;;AC1KxE;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;;MAGxD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;;QAEpD,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;;;MAIpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,kBAAkB,MAAQ;UAChD,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;;QAE/C,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;;MAE9D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;;MAG3C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;;;;;;ACtClD;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD;QACI,UAAY;QACZ,UAAY;QACZ,OAAS;UACL,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;UAC9C,CAAE,QAAU,SAAS,MAAQ,SAAS,MAAQ;;;MAGtD,CAAE,UAAY,SAAS,UAAY;MAAW;QAC1C,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,KAAK,MAAQ;;MAE5D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;UAAY;YAC1D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B,CAAE,UAAY,QAAQ,UAAY,SAAS,QAAU;MAAM;QACvD,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;UAC1C,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ;;;;;;;;AC1G/C,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;QACV,OAAS;UACL,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;UACzD;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;;UAEZ,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;UACrD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,OAAO,MAAQ,OAAO,MAAQ,SAAS,cAAgB;UAAQ;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/FhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,iBAAiB,MAAQ,gBAAgB,MAAQ;UAC7D,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL,CAAE,QAAU,UAAU,MAAQ,UAAU,MAAQ;UAAY;YACxD,QAAU;YACV,MAAQ;YACR,MAAQ;;;;;;;;;ACtDxB;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,aAAa,MAAQ;UAC3C,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;AC3IhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,KAAK,GAAG,MAAQ,QAAQ,MAAQ;;QAElD,OAAS;UACL,CAAE,QAAU,YAAY,MAAQ,WAAW,MAAQ;UAAY;YAC3D,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,KAAK,MAAQ,SAAS,MAAQ,SAAS,cAAgB;;;MAG3E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;;;;;;ACrIhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;UACtC,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;QAE9C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UACtE;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;AChJhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,aAAa,MAAQ,YAAY,MAAQ;;MAEnE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS;UACL,CAAE,QAAU,aAAa,MAAQ,aAAa,MAAQ;UACtD,CAAE,QAAU,WAAW,MAAQ,WAAW,MAAQ;;;;;;;;ACzG9D;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,IAAI,MAAQ,WAAW,MAAQ;UACpD,CAAE,OAAS,IAAI,MAAQ,QAAQ,MAAQ;;QAE3C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;UACnD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;QAE1C,OAAS,CAAC,CAAE,QAAU,KAAK,MAAQ,KAAK,MAAQ,UAAU,cAAgB;;MAE9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,QAAQ,cAAgB;;;MAGtE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;UACvC,CAAE,OAAS,GAAG,MAAQ,OAAO,MAAQ;UACrC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,KAAO,GAAG,MAAQ,WAAW,MAAQ;;QAEvD,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAG9E;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;;QAE5C,OAAS;UACL,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAAK;YACvE,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;YAChB,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;UAChE,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,mBAAmB,MAAQ;UACjD,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ,UAAU,cAAgB;;;MAGxE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UACzC,CAAE,OAAS,GAAG,MAAQ,UAAU,MAAQ;UACxC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;;MAGjD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,iBAAiB,MAAQ;UAC/C,CAAE,OAAS,GAAG,MAAQ,eAAe,MAAQ;UAC7C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;UAC9C,CAAE,OAAS,GAAG,MAAQ,gBAAgB,MAAQ;;QAElD,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;UAChB,cAAgB;;;;;;;;ACvNhC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU,CAAC,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;MAElD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UAAY;YAC3C,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;;;;;;AC/ChC;;;;;;;;;;;;;;;;AAgBO,mBAAa;MAChB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL;YACI,QAAU;YACV,MAAQ;YACR,MAAQ;YACR,cAAgB;;UAEpB,CAAE,QAAU,QAAQ,MAAQ,SAAS,MAAQ;;;MAGrD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,QAAQ,MAAQ;;;MAG9C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC,CAAE,QAAU,QAAQ,MAAQ,QAAQ,MAAQ;;MAE1D;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;;QAE7C,OAAS,CAAC;UACF,QAAU;UACV,MAAQ;UACR,MAAQ;UACR,cAAgB;;;MAG5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,WAAW,MAAQ;UAAc;YACnD,OAAS;YACT,MAAQ;YACR,MAAQ;YACR,cAAgB;;;;MAI5B;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS,CAAC;UACF,QAAU;UACV,kBAAoB;UACpB,MAAQ;UACR,MAAQ;;;MAGpB;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,YAAY,MAAQ;;;MAGlD;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,cAAc,MAAQ;UAC5C,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;;MAG/C;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;;QAEvC,OAAS;UACL,CAAE,QAAU,cAAc,MAAQ,aAAa,MAAQ;UACvD,CAAE,QAAU,eAAe,MAAQ,cAAc,MAAQ;;;MAGjE;QACI,UAAY;QACZ,UAAY;QACZ,QAAU;UACN,CAAE,OAAS,GAAG,MAAQ,KAAK,MAAQ;UACnC,CAAE,OAAS,GAAG,MAAQ,SAAS,MAAQ;;QAE3C,OAAS;;;;;;;ACtIjB;;;;;;;;;;;;;;;;;iBAuCe;AACP,eAAO,KAAK,aAAc,MAAK,YAAY,IAAI;;MAGnD;AACI,oBAAY;UACR;UAAY;UAAW;UAAS;UAAa;UAAU;UACvD;UAAY;UAAS;UAAO;UAAO;UAAU;UAAe;UAC5D;UAAW;UAAU;UAAgB;;AAEzC,4BAAoB,GAAG,OAAO,GAAG,IAAI,IAAI,SAAM,IAAG;AAClD,aAAK,YAAY,YAAY,OAAO;AAChC,cAAI,OAAO,YAAY;AACvB,iBAAO;WACR;;MAIP,mCAAkC;AAC9B,wBAAgB,OAAM;AACtB,6BAAqB;AACrB,wBAAgB;AAChB,0BAAkB;AAClB,sBAAc,QAAQ,OAAO;AACzB,cAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,cAAI,KAAK,GAAG,WAAW;AACnB,yBAAa,KAAK,IAAI,KAAK;qBAEtB,KAAK,OAAO;AACjB,oBAAQ,KAAK,IAAI,KAAK;qBAEjB,KAAK,SAAS,QAAQ,KAAK,MAAM,WAAW;AACjD,sBAAU,KAAK,IAAI,KAAK;;AAE5B,iBAAO;WACR;AACH,qBAAa;AACb,wBAAgB;AAChB,iCAAyB;AACzB,kCAA0B;AAC1B,YAAI,aAAa;AACb,+BAAqB,KAAK,oBAAoB,UAAU;AACxD,gCAAsB,KAAK,oBAAoB,UAAU;;AAE7D,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAKtC,YAAI,OAAO,KAAK,qBAAqB,WAAW;AAC5C,mBAAS,QAAQ;AACb,yBAAa,MAAM;AACnB,gBAAI,KAAK,SAAS,WAAW;AACzB,sBAAQ,KAAK;;;;AAKrB,iBAAO,KAAK,qBAAqB,QAAQ;AACrC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI,QAAQ;AACR,mBAAK,eAAe,oBAAoB;AACxC,sBAAQ,KAAK;;;;AAIzB,YAAI,OAAO,KAAK,oBAAoB,SAAS;AACzC,iBAAO,KAAK,oBAAoB,QAAQ;AACpC,+BAAoB,oBAAoB;AACxC,yBAAa,MAAM;AACnB,gBAAI;AACA,mBAAK,eAAe,mBAAmB;AACvC,qBAAO,KAAK;;;;AAKpB,mBAAS;;AAEb,wBAAgB;AAChB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,YAAY;AACnD,sBAAY,OAAM,QAAQ,SAAS,OAAO;AACtC,uBAAU,MAAK,UAAU,QAAQ,KAAK,YAAY;AAClD,mBAAO;aACR;;AAEP,uBAAe,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc,WAAW;AAC3E,YAAI,UAAU,SAAS;AACnB,iBAAO,YAAY;;AAEvB,eAAO;;MAEX;AACI,eAAO,OAAO,KAAK,WAAW,IACzB,OAAO;AACR,eAAK,QAAQ,MAAM,QAAQ;AAC3B,iBAAO;WACR;;MAEP;AAGI,uBAAe,gBAAgB,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO;AACtE,YAAI,KAAK,QAAQ;AACb,eAAK,OAAO;;AAEhB,wBAAgB;UACZ,MAAM,KAAK;UACX,IAAI,KAAK;UACT,UAAU,OAAO;UACjB,YAAa,MAAK,SACd,IAAI,IAAI,YAAS,OAAM,WAAW,OAAO,OAAM,OAAO,KAAK;UAC/D,QAAQ;UACR,UAAU;UACV,aAAa;UACb,YAAY;UACZ,UAAU,KAAK;;AAEnB,YAAI,OAAO,UAAU;AACjB,kBAAQ,cACJ,OAAO,OAAO,OAAO;AACjB,gBAAI,MAAM,QAAQ;cACd,MAAM,MAAM;cACZ,iBAAiB,MAAM;cACvB,eAAe,MAAM;;AAEzB,mBAAO;aACR;;AAEX,YAAI,OAAO,SAAS;AAChB,kBAAQ,aACJ,OAAO,MAAM,OAAO;AAChB,yBAAa,MAAM;AACnB,wBAAY;AACZ,oBAAQ,MAAM;mBACL;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAQ,MAAM;AACtD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,eAAe,KAAK,MAAM,MAAM,QAAS,MAAM,gBAAgB;AACvE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,eAAe,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEpE;mBACC;AACD,wBAAQ,qBAAqB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC5D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,qBAAqB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE1E;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;AACD,wBAAQ,kBAAkB,KAAK,MAAM,MAAM,QAAQ,MAAM;AACzD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,kBAAkB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEvE;mBACC;AACD,wBAAQ,oBAAoB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC3D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,oBAAoB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEzE;mBACC;AACD,wBAAQ,yBAAyB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAChE,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,yBAAyB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAE9E;mBACC;AACD,wBAAQ,cAAc,KAAK,MAAM,MAAM,QAAQ,MAAM;AACrD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,cAAc,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAEnE;mBACC;AACD,wBAAQ,mBAAmB,KAAK,MAAM,MAAM,QAAQ,MAAM;AAC1D,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,mBAAmB,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAExE;mBACC;AACD,wBAAQ,aAAa,KAAK,MAAM,MAAM,QAAQ,MAAM;AACpD,oBAAI,UAAU,UAAa,CAAC,CAAC,MAAM;AAC/B,0BAAQ,aAAa,KAAK,MAAM,MAAM,kBAAkB,MAAM;;AAElE;mBACC;mBACA;AACD;;AAEA,sBAAM,IAAI,MAAM,2BAA2B,MAAM,gBAAgB,KAAK;;AAE9E,gBAAI,MAAM,QAAQ,CAAE,OAAO;AAC3B,mBAAO;aACR;;AAEX,eAAO;;MAGX;AACI,wBAAgB,YAAY;AAC5B,6BAAqB;AACrB,wBAAgB;AAChB,oBAAY;AACZ,YAAI,WAAW;AACX,kBAAQ,QAAQ,OAAO;AACnB,gBAAI,KAAK,QAAQ,KAAK,QAAQ;AAC9B,gBAAI,KAAK,OAAO;AACZ,sBAAQ,KAAK,IAAI,KAAK;;AAE1B,mBAAO;aACR;;AAEP,uBAAe;AACf,wBAAgB;AAChB,oBAAY,UAAU,SAAS,QAAQ;AACnC,6BAAoB,oBAAoB,IAAI;AAC5C,uBAAa;YACT,MAAM;YACN,IAAI;YACJ,QAAQ;YACR,YAAY;YACZ,UAAU;YACV,aAAa;YACb,YAAY,CAAE,OAAO,CAAE,OAAO,gBAAgB,IAAI,OAAO,MAAM;YAC/D,UAAU;;AAEd,eAAK,eAAe,IAAI;AACxB,iBAAO,KAAK;AACZ,gBAAM,YAAY;;AAEtB,yBAAiB,OAAO,KAAK;AAC7B,iBAAS,QAAQ;AACb,uBAAa,MAAM;AACnB,eAAK,WAAW,QAAQ;AACpB,+BAAoB,oBAAoB;AACxC,iBAAK,OAAO,KAAK,MAAM;AACvB,kBAAM,UAAU,SAAS,KAAK;;;AAGtC,8BAAsB,YAAY;AAClC,oBAAY,UAAU,UAAU,QAAQ;AACpC,qCAA0B,oBAAoB,cAAc,OAAO;AACnE,uBAAa,MAAM;AACnB,cAAI,QAAQ;AACR,iBAAK,gBAAgB;AACrB,oBAAQ,KAAK;;;AAGrB,0BAAkB,KAAK,mBAAmB;AAC1C,eAAO,CAAE,OAAO,QAAQ,SAAS,SAAS,cAAc;;MAE5D;AACI,eAAO;UACH,YAAY,YAAY,UAAU;UAClC,QAAQ,YAAY,UAAU,SAAS,OAAO;AAC1C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB;AACxC,mBAAO;aACR;UACH,SAAS,YAAY,UAAU,UAAU,OAAO;AAC5C,gBAAI,IAAI,QAAQ,KAAK,mBAAmB,KAAK,YAAY;AACzD,mBAAO;aACR;;;MAGX;AACI,mBAAW,IAAI;AACf,YAAI,YAAW;AACX,iBAAO,SAAQ;;AAEnB,eAAO,CAAE,MAAM,OAAO,IAAI;;;AAG3B;AACH,sBAAe,OAAM;AACrB,UAAI,OAAO,QAAO,SAAS;AACvB,eAAO,QAAO,KAAK;iBAEd,OAAO,WAAW;AACvB,eAAO,IAAI,OAAO,MAAM,UAAU;;AAGlC,cAAM,IAAI,MAAM;;;AAIjB;AACH,oBAAc,MAAM,QAAQ,KAAK,OAAO,aAAa,MAAM,MAAM,KAAK,aAAa;AACnF,aAAO,WAAW,QAAQ,MAAM;;AAE7B,yDAAqD;AACxD,oBAAc,MAAM;AACpB,UAAI,SAAS;AACT,eAAO,iBAAiB,MAAM,GAAG;;AAErC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,aAAO,QAAQ,MAAM,IAAI;;AAEtB;AACH,oBAAc,MAAM,SAAS;AAC7B,oBAAc,MAAM,QAAQ,OAAO,MAAM,OAAQ,MAAM,QAAQ,OAAO,MAAM,OAAO;AACnF,aAAQ,OAAO,UAAU,WAAY,QAAQ,SAAS,OAAO;;AAE1D;AACH,UAAI,OAAQ,UAAW;AAEnB,gBAAQ,SAAoB;;AAEhC,cAAQ;aACC,SAAoB;AACrB,iBAAO;aACN,SAAoB;aACpB,SAAoB;aACpB,SAAoB;aACpB,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;aACN,SAAoB;AACrB,iBAAO;;AAIP,iBAAO;;;AAGZ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,gBAAgB,MAAM;;AAEjC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,KAAK,IAAI,OAAK,gBAAgB;;AAEpD,aAAO;;AAEJ;AACH,UAAI,MAAM;AACN,eAAO;;AAEX,UAAI,MAAM,OAAO;AACb,eAAO,MAAM,IAAI,IAAI,SAAQ,OAAO,IAAI,SAAS,WAAY,IAAI,OAAO,SAAS,IAAI,MAAM;;AAE/F,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM;AACf,eAAO,sBAAsB,MAAM;;AAEvC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI;AACA,eAAS,QAAM,KAAK,KAAK,MAAM,KAAK,EAAE,SAAS,MAAM,KAAK,IACtD,MAAM,KAAK,MACX,IACC,IAAI,OAAM,OAAO,MAAM,WAAY,IAAI,SAAS,GAAG;;AAE5D,aAAO;;AAEJ,8DAA0D;AAC7D,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,EAAE,IAAI;AACpB,iBAAO,iBAAiB,GAAG;;;AAGnC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK,MAAM,IAAI;AACxB,iBAAO,sBAAsB;;;AAGrC,aAAO;;AAEJ;AACH,oBAAc,MAAM;AACpB,UAAI,SAAS,MAAM,QAAQ,MAAM,KAAK;AAClC,eAAO,MAAM,KAAK;;AAEtB,aAAO;;AC9cX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,OAAO;AACZ,aAAK,YAAY;AACjB,aAAK,UAAU;AACf,aAAK,SAAS;AACd,aAAK,QAAQ;AACb,aAAK,SAAS,KAAK,WAAW,IAAI,UAAQ,KAAK,SAAS;AACxD,YAAI,KAAK,YAAY;AACjB,eAAK,QAAQ,OAAO,KAAK,KAAK,UACzB,OAAO;AACR,kBAAM,OAAO,KAAK,QAAQ;AAC1B,mBAAO;aACR;;;MAOX;AACI,eAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;MAMhD;AACI,sBAAc,KAAK,KAAK,SAAS;AACjC,YAAI,MAAM,UAAU;AAChB,iBAAO,UAAU,MAAM,KAAK,WAAW,KAAK;;AAEhD,YAAI,MAAM,KAAK,QAAQ,MAAM,KAAK;AAC9B,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,eAAe,KAAK,KAAK,UAAU,MAAM;;AAEpD,YAAI,MAAM,KAAK;AACX,iBAAO,aAAa,KAAK,KAAK,UAAU,MAAM;;AAElD,YAAI,MAAM,SAAS;AACf,iBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,YAAI,MAAM,QAAQ;AACd,iBAAO,cAAc,KAAK,KAAK,UAAU,MAAM;;AAEnD,YAAI,MAAM,QAAQ;AACd,cAAI,MAAM,KAAK,KAAK,QAAQ,MAAM,KAAK,KAAK;AACxC,mBAAO,qBAAqB,KAAK,KAAK,UAAU,MAAM;;AAE1D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,oBAAoB,KAAK,KAAK,UAAU,MAAM;;AAEzD,cAAI,MAAM,KAAK,SAAS;AACpB,mBAAO,yBAAyB,KAAK,KAAK,UAAU,MAAM;;AAE9D,cAAI,MAAM,KAAK,KAAK;AAChB,mBAAO,kBAAkB,KAAK,KAAK,UAAU,MAAM;;AAEvD,cAAI,MAAM,KAAK,QAAQ;AACnB,mBAAO,mBAAmB,KAAK,KAAK,UAAU,MAAM;;;AAG5D,eAAO;;;ACrFf;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAmBO,sBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,KAAW,cAAc,WAAW,MAAM,WAAW;;aAE5D;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACnG;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEnG;AACD,iBAAO,CAAC,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAGlH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,qBAAiB;AC/DxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;aACrG;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;aACvD;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;aAEvD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;aACtD;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;aACzD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW;;aAExD;AACD,iBAAO,CAAC,OAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;aACrD;aACA;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;aAC/K;AACD,iBAAO,CAAC,MAAY,UAAU,KAAK,WAAW,IAAI,WAAW;aAC5D;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aACvG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;aAC7G;AACD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;AAE1G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrHxB;;;;;;;;;;;;;;;;AAsBO,sFAAkF;AACrF,cAAY,8BAA8B,QAAQ,SAAS,MAAM,qBAAqB,WAAW,cAAc;;AAE5G;AACH,UAAI,GAAG,WAAW,GAAG;AACjB,eAAO;;AAEX,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC3B,YAAI,GAAG,OAAO,MAAM,GAAG,OAAO,MAAM,GAAG,OAAO,GAAG;AAC7C,iBAAO;;;AAGf,aAAO;;AClCX;;;;;;;;;;;;;;;;;MAuBI;AACI,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,yBAAyB;AAC9B,aAAK,cAAc;AACnB,aAAK,iBAAiB;AACtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,WAAW,QAAO;AACvB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK;;MAKhB;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO,OAAO;AAC9C,oBAAO,OAAO;;;AAGtB,aAAK,UAAU;AACf,aAAK,UAAU;AACf,aAAK,SAAS;;MAElB;AACI,eAAO,KAAK,QAAQ;;MAMxB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,UAAS,KAAK;AAC3B,gBAAM,IAAI,MAAM,4BAA4B,8BAA6B,KAAK;;AAElF,gCAAwB,KAAK,QAAQ;AACrC,YAAI,gBAAgB;AAChB,gBAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;;AAGtE,YAAI,KAAK;AACL,0BAAgB,UAAU;;AAE9B,wBAAgB,OAAO;AACvB,eAAO,gBAAgB;;MAK3B;AACI,eAAO,QAAQ,IAAI,YAAS,KAAK,KAAK;;MAO1C;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM,eAAe,KAAK;;AAExC,YAAI,SAAQ,KAAK,CAAC,KAAK,eAAe,UAAS,KAAK;AAChD,gBAAM,IAAI,MAAM,2BAA2B,oDAAmD,KAAK;;AAEvG,kBAAU,KAAK,QAAQ,WAAU;AACjC,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;uCACvD,QAAO,mCAAmC,KAAK;;AAG9E,YAAI,KAAK,WAAW,KACf,MAAK,gBAAgB,QAAQ,KAAK,aAAa,WAAW;AAC3D,eAAK,eAAe,QAAO;;AAE/B,4CAAoC,KAAK,cAAc,QAAO,OAAO,eAAe,KAAK,8CAA8C;AACvI,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,YAAI,EAAE;AACF,gBAAM,IAAI,MAAM,eAAe,KAAK,8CAA8C;;AAEtF,UAAE,SAAS;AACX,aAAK;AACL,UAAE,UAAU;AACZ,aAAK,QAAQ,UAAS;;MAK1B;AACI,YAAI,QAAQ,WAAW,QAAQ;AAC3B,gBAAM,IAAI,MAAM,eAAe,KAAK,kEACL,QAAQ,2CAA2C,QAAQ;;AAE9F,gBAAQ,QAAQ,eAAc,KAAK,MAAM,GAAG,QAAQ;;MAUxD;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,CAAC;AACD,oBAAU;AACV,uBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,oBAAQ,KAAK;;;AAIjB,oBAAU,QAAQ,MAAM,GAAG,KAAK;;AAEpC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAItC,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO;AACzE,eAAO,MAAM,SAAS;;MAK1B;AACI,YAAI,CAAC,CAAC,SAAS,UAAU,KAAK;AAC1B,gBAAM,IAAI,MAAM,wBAAwB,KAAK,oCAAoC;;AAErF,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,wBAAgB;AAChB,qBAAa,GAAG,IAAI,KAAK,QAAQ;AAC7B,kBAAQ,KAAK;;AAGjB,wBAAgB,KAAK,SAAS;AAC9B,4CAAoC,KAAK,cAAc,QAAQ,GAAG,OAAO,mDAAmD,KAAK,wCAAwC,QAAQ,GAAG;AACpL,eAAO,QAAO,SAAS;;MAQ3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,YAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,gBAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,yBAAiB,KAAK,IAAI,GAAG;AAC7B,YAAI,CAAC,KAAK,eAAe,YAAY,KAAK;AACtC,gBAAM,IAAI,MAAM,mCAAmC,iBAAiB,KAAK;;AAE7E,aAAK,UAAU,SAAS,QAAQ,SAAQ;;MAQ5C;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,wBAAwB,KAAK,8BAA8B,QAAO;;AAEtF,0BAAkB;AAClB,kCAA0B,OAAO,IAAI;AACjC,yBAAe;AACf,iBAAO;;AAEX,YAAI,gBAAgB,QAAO,MAAM;AAC7B,gBAAM,IAAI,MAAM;;UAElB,uCAAuC,QAAO;;AAEhD,YAAI,CAAC,KAAK,eAAe,OAAO,WAAW,KAAK;AAC5C,gBAAM,IAAI,MAAM,2DAA2D,KAAK,eAAe,OAAO;;AAG1G,8BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,wBAAgB;AAChB,aAAK;AACD,oBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,6BAAgB,CAAC,GAAG,gBAAgB;AACpC,0BAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,oBAAQ,KAAK,SAAQ,OAAM,SAAQ,UAAS,QAAQ,KAAK;;AAE7D,iBAAO;;AAEX,wBAAgB;AAChB,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,kBAAQ,KAAK;;AAEjB,aAAK,UAAU,SAAS;;;AC9OhC;;;;;;;;;;;;;;;;;MAyCI,kEAAkE;AAC9D,aAAK,UAAU;AACf,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,YAAI,WAAW;AACX,kBAAQ,QAAQ;AACZ,gBAAI,iBAAiB,QAAO;AACxB,oBAAM,IAAI,MAAM,mCAAmC,mCAAmC,QAAO;;AAEjG,gDAAoC,cAAc,QAAO,OAAO;AAChE,iBAAK;;;AAGb,aAAK,WAAW,QAAO;AACvB,aAAK,iBAAiB;AACtB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,SAAS;;MAKzB;AACI,eAAO,IAAI,WAAW,CAAC,GAAG,KAAK,UAAU,KAAK,cAAc,KAAK;;MAKrE;AACI,aAAK,QAAQ,QAAQ;AACjB,cAAI,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAO;AACvC,oBAAO;;;AAGf,aAAK,QAAQ,SAAS;AACtB,aAAK,SAAS;;MAKlB;AACI,eAAO,KAAK,QAAQ;;MASxB,gDAAgD;AAC5C,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,gBAAgB,MAAM,KAAK,QAAQ,WAAW;AAC9C,gBAAM,IAAI,MAAM,kCAAkC,4CAA4C,KAAK,QAAQ;;AAE/G,4CAAoC,cAAc,KAAK,cAAc;AACrE,eAAO,KAAK;AACR,kCAAwB,KAAK,QAAQ,IAAI,aAAU,SAAQ,SAAQ;AACnE,iBAAO,MAAM,iBAAiB;;;MAQtC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;AAEpB,wBAAe,KAAK,QAAQ;AAC5B,4CAAoC,QAAO,OAAO,cAAc;AAChE,eAAO,SAAQ,SAAQ;;MAM3B;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,4CAAoC,QAAO,OAAO,KAAK,cAAc;AACrE,YAAI,KAAK,mBAAmB,KAAK;AAC7B,gBAAM,IAAI,MAAM;;AAEpB,aAAK;AACL,aAAK,QAAQ,KAAK;;MAMtB;AACI,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,0DAA0D;;AAE9E,YAAI,KAAK,mBAAmB,MAAM,OAAO,KAAK;AAC1C,gBAAM,IAAI,MAAM,+BAA+B,iCAAiC,KAAK;;AAEzF,aAAK,QAAQ,SAAS;;MAQ1B;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,YAAI,eAAe,KAAK,eAAe,KAAK,QAAQ;AAChD,gBAAM,IAAI,MAAM,4BAA4B,+BAA+B,KAAK,QAAQ;;AAE5F,YAAI,KAAK,QAAQ,iBAAiB;AAC9B,gBAAM,IAAI,MAAM,oBAAoB;;AAExC,4CAAoC,KAAK,QAAQ,cAAc,OAAO,cAAc;AACpF,eAAO,KAAK,QAAQ;;MAOxB;AACI,YAAI,QAAO,UAAU,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B,KAAK;;AAE/F,YAAI,eAAe,KACf,KAAK,mBAAmB,MAAM,gBAAgB,KAAK;AACnD,gBAAM,IAAI,MAAM,yBAAyB,mCAAmC,KAAK;;AAErF,4CAAoC,KAAK,cAAc,QAAO,OAAO;AACrE,aAAK;AACL,aAAK,QAAQ,gBAAgB;;MASjC;AACI,YAAI,iBAAiB,KAAK;AACtB,gBAAM,IAAI,MAAM,mCAAmC,mCAAmC,KAAK;;AAE/F,4CAAoC,KAAK,cAAc,cAAc;AAGrE,kBAAU,QAAQ,MAAM,GAAG,KAAK;AAChC,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,QAAQ,IAAI,OAAK,SAAQ,KAAK,QAAQ,IAAI;AAC1D,iBAAO,MAAM,SAAS;;;MAQ9B;AACI,YAAI,CAAC,CAAC,gBAAgB,iBAAiB,KAAK;AACxC,gBAAM,IAAI,MAAM,uBAAuB,KAAK,2CAA2C;;AAE3F,4CAAoC,KAAK,cAAc,cAAc;AACrE,YAAI,KAAK,WAAW;AAChB,iBAAO,QAAO,IAAI,CAAC,GAAG,OAAO,KAAK;;AAEtC,eAAO,KAAK;AACR,0BAAgB,KAAK,QAAQ,IAAI,OAAK,SAAQ,GAAG;AACjD,iBAAO,QAAO,SAAS;;;;AAS5B;AACH,oBAAc,QAAO;AACrB,UAAI,QAAO,MAAM,SAAS;AACtB,cAAM,IAAI,MAAM,oDAAoD,QAAO;;AAE/E,UAAI,QAAO,UAAU;AACjB,cAAM,IAAI,MAAM,mCAAmC,QAAO,4BAA4B;;AAE1F,0BAAoB,QAAO,MAAM,MAAM;AACvC,0CAAoC,aAAa,cAAc;AAC/D,yBAAmB,QAAQ;AAC3B,aAAO,IAAI,WAAW,YAAY,cAAc;;AAQ7C;AACH,aAAO,IAAI,WAAW,IAAI,cAAc,cAAc;;AASnD;AACH,UAAI,QAAQ,WAAW,QAAO,MAAM;AAChC,cAAM,IAAI,MAAM,sDAAsD,QAAQ,cAAc,QAAO,MAAM;;AAE7G,uBAAiB,KAAK,IAAI,GAAG;AAC7B,UAAI,eAAe,QAAQ,gBAAgB,MAAM,YAAY;AACzD,cAAM,IAAI,MAAM,mCAAmC,iBAAiB;;AAExE,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO;AAC5D,sBAAgB,QAAQ,SAAQ;AAChC,cAAQ,QAAQ;AACZ,aAAK,QAAQ,OAAO,QAAQ;;AAEhC,aAAO;;AASJ;AACH,wBAAkB;AAClB,gCAA0B,OAAO,IAAI;AACjC,uBAAe;AACf,eAAO;;AAEX,UAAI,gBAAgB,QAAO,MAAM;AAC7B,cAAM,IAAI,MAAM;;UAEd,uCAAuC,QAAO;;AAEpD,4BAAsB,gBAAgB,IAAI,IAAI,QAAO,OAAO;AAC5D,sBAAgB,KAAK;AACjB,yBAAgB;AAChB,kBAAS,SAAQ,SAAQ,CAAC,GAAG,aAAa;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iCAAwB,MAAM,IAAK,IAAI,kBAAkB,IAAI;AAC7D,0BAAgB,CAAC,GAAG,gBAAgB;AACpC,wBAAc,CAAC,GAAG,OAAO,IAAI;AAC7B,mBAAQ,KAAK,SAAQ,OAAM,SAAQ,SAAS,QAAQ;;AAExD,gBAAO;AACP,eAAO;;AAEX,mBAAa,IAAI,WAAW,IAAI,cAAc,QAAO,OAAO,OAAO;AACnE,mBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,aAAK,QAAQ,GAAG,QAAQ;;AAE5B,aAAO;;ACvTX;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,2BAAiB,cAAc,cAAc,MAAM,WAAW;AAC9D,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,MAAM,KAAK;AAC7B,cAAI,UAAU;AACV,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;AAGhG,mBAAO,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;;;aAGnG;aACA;AACD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,2BAAiB,cAAc,QAAQ,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AAEpD,6BAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,MAAM,QAAQ,gBAAgB,QAAQ;AACnH,yBAAe,KAAK,IAAI,aAAU,QAAO;AACzC,0BAAgB,MAAM,WAAW,GAAG;AAEpC,qBAAW,QAAQ;AACf,gBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ;AAC9C,sBAAO;;;AAGf,uBAAa;AACb,iBAAO,UAAU;AAEb,+BAAmB;AAEnB,qBAAS,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AAC1G,8BAAkB,OAAO,IAAI,aAAU,QAAO;AAG9C,uBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;AAIf,gCAAoB,MAAM,QAAQ,YAAY,UAAU,qBAAqB,QAAQ,QAAQ,gBAAgB,QAAQ;AACrH,wBAAY,MAAM,YAAW,GAAG;AAEhC,wBAAW,QAAQ;AACf,kBAAI,CAAC,QAAO,QAAQ,OAAO,QAAQ,QAAO,QAAQ,MAC9C,UAAU,QAAQ,QAAO,QAAQ;AACjC,wBAAO;;;;AAInB,iBAAO;;aAEN;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAW,cAAc,QAAQ,MAAM,WAAW;AAClD,cAAI,CAAC,MAAK;AACN,oBAAO,YAAY;;AAGvB,iBAAQ,OAAM,KAAK,QAAQ,KAAK,CAAC,QAAW,SAAQ,CAAC,OAAM;;aAE1D;AACD,4BAAkB,KAAK,WAAW,KAAK,UAAQ,UAAU,MAAM,WAAW,aAAa;AACvF,cAAI;AACA,0BAAa,UAAU,WAAW,WAAW;AAC7C,mBAAO,CAAC,YAAY;;AAExB,iBAAO;;aAEN;AACD,0BAAgB,cAAc,aAAa,MAAM,WAAW;AAC5D,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ,WAAW;AACnB,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,wBAAa,cAAc,UAAU,MAAM,WAAW;AACtD,kBAAQ;AACR,iBAAO,CAAC,YAAY;;aAEnB;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,yCAA+B,cAAc,0BAA0B,MAAM,WAAW;AACxF,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,8BAAoB,IAAI,YAAY,MAAM,OAAO,MAAM,cAAc,wBAAwB,aAAa;AAC1G,kBAAQ,eAAe;AACvB,iBAAO,CAAC,YAAY,UAAU,QAAO;;aAEpC;AACD,qBAAW,cAAc,iBAAiB,MAAM,WAAW;AAC3D,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,mCAAyB,QAAQ,eAAe,GAAG;AACnD,2BAAiB,MAAM,QAAO;AAC9B,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,gBAAgB,KAAK;;aAE5B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,iBAAO,CAAC,kBAAkB,OAAO,eAAe;;aAE/C;AACD,4BAAkB,cAAc,iBAAiB,MAAM,WAAW;AAClE,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,qCAA2B,QAAQ,eAAe,UAAU;AAC5D,6BAAmB,QAAQ,gBAAgB;AAC3C,iBAAO,CAAC,mBAAmB;;aAE1B;AACD,2BAAiB,cAAc,iBAAiB,MAAM,WAAW;AACjE,oCAA0B,QAAQ,eAAe,SAAS;AAC1D,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,iBAAO,CAAC,kBAAkB,OAAO;;aAEhC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB,MAAM,SAAS;AAChC,iBAAO,CAAC,iBAAiB;;aAExB;AACD,yBAAe,cAAc,iBAAiB,MAAM,WAAW;AAC/D,kCAAwB,QAAQ,eAAe,OAAO;AACtD,iBAAO,CAAC,QAAO,gBAAgB,QAAQ;;aAEtC;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,mCAAyB,QAAQ,eAAe,QAAQ;AACxD,2BAAiB;AACjB,iBAAO,CAAC,iBAAiB;;aAExB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,yBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,QAAQ,QAAO;AAC1B,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,4BAAkB,cAAc,SAAS,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,WAAW,cAAc;;aAEnD;aACA;AACD,iCAAuB,cAAc,WAAW,MAAM,WAAW;AACjE,gCAAsB,cAAc,UAAU,MAAM,WAAW;AAC/D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,eAAe,gBAAgB,cAAc;AACxE,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,cAAc;AACvD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,gCAAsB,cAAc,WAAW,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,OAAO,eAAe,cAAc;;aAEtD;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,MAAM,cAAc,cAAc;;aAEpD;AACD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,WAAW,SAAQ,cAAc;AACpD,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,8BAAoB,cAAc,SAAS,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,WAAW,OAAO,aAAa;;aAEtC;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,6BAAmB,QAAQ,cAAc,SAAS;AAClD,qBAAW,SAAS;AACpB,iBAAO,CAAC,WAAW;;aAElB;AACD,2BAAiB,cAAc,gBAAgB,MAAM,WAAW;AAChE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAmB,QAAQ,cAAc,SAAS;AAClD,iBAAO,CAAC,WAAW,QAAQ,cAAc;;aAExC;AACD,8BAAoB,cAAc,UAAU,MAAM,WAAW;AAC7D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,6BAAmB,QAAM,aAAa,SAAS;AAC/C,kBAAQ,cAAc;AACtB,iBAAO,CAAC,WAAW;;;AAGnB,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACjRxB;;;;;;;;;;;;;;;;AAmBA;AACI,wCAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,wBAAkB,YAAY;AAC9B,sBAAgB,mBAAmB;AACnC,0BAAoB,YAAY;AAChC,sBAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,UAAI;AACA,YAAI,WAAW,YAAY;AACvB,gBAAM,IAAI,MAAM;;AAGpB,YAAI,CAAC,WAAW,YAAY;AACxB,gBAAM,IAAI,MAAM;;;AAIxB,UAAI;AACA,cAAM,IAAI,MAAM;;AAEpB,qBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,mBAAY,WAAW,MAAM,WAAW;AACxC,yBAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,wBAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,QAAQ,MAAK,YAAY;;aAE9I;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEvL;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;AACD,iBAAQ,QAAQ,WAAK,YAAY,WAAW,SAAS,UAAU,kBAAmB,4BAA4B,MAAM,WAAW;AAC/H,iBAAO,CAAC,kBAA4B;YAC5B,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,QAAQ,cAAc,UAAU,MAAM,WAAW;YACjD,SAAS,CAAC,OAAO,IAAI,OAAO;YAC5B,KAAK;YACL;YACA,WAAW,CAAC,UAAU,IAAI,UAAU;YACpC,MAAM;YACN,YAAY;YACZ,wBAAwB;;;aAG/B;aACA;AACD,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,iBAAO,CAAC,gBAAsB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE7J;aACA;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,WAAW,MAAM,WAAW;AACxC,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,iBAAO,CAAC,iBAAsB,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU;;aAEpM;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAC3D;AACL,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK,MAAK,YAAY,CAAC,UAAU,IAAI,UAAU,IAAI,UAAU;;aAEhN;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK;;aAE3H;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,sCAA4B,cAAc,uBAAuB,MAAM,WAAW;AAClF,iBAAQ,QAAQ,WAAY,kBAAwB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,KAAK,MAAK;AAC/J,iBAAO,CAAC,QAAQ;;aAEf;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,yBAAe,cAAc,WAAW,MAAM,WAAW;AACzD,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,KAAK,CAAC,OAAO,IAAI,OAAO,IAAI,OAAO,KAAK;;aAEvJ;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,uBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAE9D,+BAAqB,QAAQ;AAC7B,8BAAoB,QAAQ;AAE5B,iCAAuB,UAAU;AACjC,gCAAsB,UAAU;AAChC,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,CAAC,cAAc,cAAc,MAAK,CAAC,gBAAgB,gBAAgB;;;AAG7L,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5KxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,MAAW,OAAO,OAAO;;aAEhC;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAe,OAAO,MAAM;;aAEnC;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,YAAkB,QAAQ,YAAY;;aAE7C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,QAAa,SAAS,OAAO,SAAS;;aAE7C;AACD,iBAAO,CAAC,OAAW,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW;;aAE1D;AACD,iBAAO,CAAC,cAEJ,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAEnM;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAY,OAAO,MAAM,OAAM,cAAc,SAAS,MAAM,WAAW;;aAE9E;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,gBAAsB,OAAO,OAAM,QAAQ,cAAc,SAAS,MAAM,WAAW,UAAU;;aAEpG;AACD,iBAAO,CAAC,OAAY,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE7G;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW;;;AAG5D,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChFxB;;;;;;;;;;;;;;;;AAmBA;AACI,oBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,qBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,4BAAsB,cAAc,iBAAiB,MAAM,WAAW;AACtE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,2BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;;;AAGD,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,gBAAgB,gBAAiB,UAAU,MAAM,WAAW;AAChH,yBAAe,MAAM,OAAY,gCAAgC,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC7H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,yBAAe,MAAM,OAAY,6BAA6B,OAAO,QAAQ,eAAe,cAAc,gBAAgB;AAC1H,iBAAO,CAAC,OAAO,iBAAiB,OAAO;;aAEtC;aACA;AACD,iBAAQ,OAAO,QAAQ,eAAe,cAAc,kBAAmB,UAAU,MAAM,WAAW;AAClG,iBAAO,CAAC,MAAM,OAAY,uBAAuB,OAAO,QAAQ,eAAe,cAAc;;aAE5F;AACD,4BAAkB,MAAW,cAAc,aAAa,MAAM,WAAW,UAAU;AACnF,yBAAe,CAAC,MAAM,WAAiB;AACvC,oBAAU;AACV,iBAAO;;aAEN;AACD,iBAAO,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG9G,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AClExB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,yBAAe,KAAW,GAAG,GAAG;AAChC,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,yBAAe,OAAa;AAC5B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;aAE7B;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,OAAa,GAAG;AAC/B,iBAAO,CAAC,OAAO,QAAQ,OAAO;;;AAG9B,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC3CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,UAAU,KAAK;;aAErB;AACD,sBAAY,cAAc,WAAW,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW,YAAY;aACnD;AACD,iBAAO,CAAC,UAAU,KAAK,MAAM,WAAW;aACvC;aACA;aACA;AACD,wBAAa,cAAc,KAAK,MAAM,WAAW;AACjD,iBAAO,CAAC,YAAY;;aAEnB;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,YAAY;aAC3B;AACD,2BAAiB,cAAc,KAAK,MAAM,WAAW;AACrD,iBAAO,CAAC,YAAY;aACnB;AACD,iBAAO,CAAC,UAAe,cAAc,KAAK,MAAM,WAAW,SAAS,OAAO;aAC1E;AACD,iBAAO,cAAc,KAAK,MAAM,WAAW,SACtC,IAAI,OAAO,UAAe,EAAE;aAChC;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,SAAS,MAAM;aACvE;AACD,iBAAO,CAAC,QAAa;aACpB;AACD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,wBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,kBAAQ,KAAK;AAEb,kBAAQ,IAAI;AACZ,uBAAa,GAAG,IAAI,MAAK,QAAQ;AAC7B,oBAAQ,IAAI,MAAM,UAAU,MAAM,KAAK,MAAK,GAAG,YAC1C,MAAM,GAAG;;AAElB,iBAAO,CAAC;;AAER,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrExB;;;;;;;;;;;;;;;;;MA2BI;AACI,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,QAAO;AAErB,aAAK,YAAY,IAAI;AACrB,aAAK,KAAK;;UAEV;AACA,eAAO,KAAK,OAAO;;MAKvB;AACI,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,aAAK,OAAO;;MAKhB;AACI,eAAO,KAAK,UAAU;;YAOpB;AACF,aAAK,uBAAuB,MAAM;AAGlC,sBAAc,MAAM,KAAK;AAEzB,aAAK,UAAU,QAAQ,WAAS,MAAM;AACtC,aAAK,UAAU;AACf,eAAO,KAAK;AACR,0BAAgB,QAAQ;AACxB,6BAAmB,MAAM;AACzB,+BAAqB,QAAQ;AAC7B,kBAAY,eAAe,cAAc,MAAM,kDACxC,uCAAuC;AAE9C,uBAAa,GAAG,IAAI,YAAY;AAC5B,wBAAY,MAAM;AAClB,0BAAc,QAAQ;AACtB,iBAAK;AACL,iBAAK,UAAU,IAAI,KAAK;;AAE5B,iBAAO,KAAK;;;YAkBd;AACF,aAAK,uBAAuB,MAAM;AAClC,sBAAc,MAAM,KAAK;AACzB,eAAO,KAAK;AACR,yBAAe;AACf,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,MAAM;AAClB,0BAAc,KAAK,gBAAgB,KAAK;AACxC,mBAAO,KAAK;;AAEhB,iBAAO,MAAM;;;MAIrB;AACI,uBAAe,KAAK,UAAU,IAAI;AAClC,eAAO,UAAU,OAAO,SAAS;;MAErC;AACI,YAAI,IAAI,UAAU,KAAK;AACnB,gBAAM,IAAI,MAAM,oBAAoB,KAAK,qBAClC,IAAI;;AAEf,YAAI,MAAM,UAAU,KAAK;AACrB,gBAAM,IAAI,MAAM,sBAAsB,KAAK,uBACpC,MAAM;;;;ACzHzB;;;;;;;;;;;;;;;;AAkBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,6BAAkB,IAAI,UAAU,UAAU;AAC1C,0BAAgB,aAAa,KAAK,MAAM;AACxC,iBAAO,CAAC,WAAU;;aAEjB;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,OAAO,MAAM;;aAEpC;aACA;AACD,yBAAe,cAAc,eAAe,MAAM,WAAW,SAAS;AACtE,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,6BAAkB,gBAAgB,iBAAiB,OAAO;AAC1D,iBAAO,CAAC,MAAM,WAAU,KAAK,MAAM;;;AAGnC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,eAAe,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAE9D;AACD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,OAAY,sBAAsB,QAAQ,CAAC,KAAK,IAAI,KAAK,KAAK;;aAErE;AACD,2BAAc,cAAc,SAAS,MAAM,WAAW;AACtD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,qCAA2B,cAAc,sBAAsB,MAAM,WAAW;AAChF,iBAAO,CAAC,OAAY,cAAc,UAAO,OAAO,QAAQ,UAAU,QAAQ;;;AAG1E,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC9CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAErG;AACD,iBAAO,CAAC,SAAe,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAExG;AACD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEvG;AACD,iBAAO,CAAC,aAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE5G;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEpG;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAE1G;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;aAEzG;aACA;AACD,iBAAO,CAAC,MAAY,cAAc,aAAa,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW;;;AAG5J,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACxDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;aACA;AACD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW,UAAU,cAAc,cAAc,MAAM,WAAW;aACpN;AACD,iBAAO,CAAC,WAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;aAC5G;AACD,4CAAkC,cAAc,YAAY,MAAM,WAAW;AAC7E,4BAAkB,YAAY;AAC9B,0BAAgB,mBAAmB;AACnC,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,cAAI;AACA,gBAAI,WAAW,YAAY;AACvB,oBAAM,IAAI,MAAM;;AAGpB,gBAAI,CAAC,WAAW,YAAY;AACxB,oBAAM,IAAI,MAAM;;;AAGxB,sCAA4B,cAAc,QAAQ,MAAM,WAAW;AACnE,iBAAO,CAAC,SAAmB;YACnB,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,GAAG,cAAc,KAAK,MAAM,WAAW;YACvC,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,YAAY,cAAc,cAAc,MAAM,WAAW;YACzD,MAAM;YACN,YAAY;YACZ,wBAAwB;;;AAGhC,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACvDxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,YAAY,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW;;aAE1T;AACD,iBAAO,CAAC,2BAAiC,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,UAAU,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEnR;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW;;aAEzD;AACD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW;;aAE5D;AACD,iBAAO,CAAC,cAAoB,cAAc,iBAAiB,MAAM,WAAW,UAAU,cAAc,eAAe,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW,UAAU,cAAc,gBAAgB,MAAM,WAAW;;;AAGtP,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC5CxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,MAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAErE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAa,cAAc,KAAK,MAAM,WAAW,UAAU;;aAElE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,2BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,QAAa,cAAc,KAAK,MAAM,WAAW,UAAU,MAAM,WAAW;;;AAGpF,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AC1ExB;;;;;;;;;;;;;;;;AAoBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;aACA;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,uBAAa,cAAc,WAAW,MAAM,WAAW;AACvD,mBAAS,OAAO,MAAM,GAAG;AACzB,iBAAO,CAAC,QAAa,QAAQ;;aAE5B;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,OAAa,QAAO,MAAW,SAAS,UAAU;;aAEzD;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,yBAAc,cAAc,KAAK,MAAM,WAAW;AAClD,iBAAO,CAAC,SAAc,QAAO;;aAE5B;AAED,wBAAc,cAAc,SAAS,MAAM,WAAW;AAEtD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,OAAY,cAAc,KAAK,MAAM,WAAW,UAAU,OAAO;;aAExE;AACD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,sBAAY,cAAc,OAAO,MAAM,WAAW;AAClD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,8BAAoB,cAAc,eAAe,MAAM,WAAW;AAClE,iCAAuB,cAAc,kBAAkB,MAAM,WAAW;AACxE,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,CAAC,cAAmB,SAAQ,OAAO,KAAK,SAAS,WAAW,SAAS,cAAc,aAAa;;aAEtG;AACD,iBAAO,KAAK;AACR,yBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,4BAAgB,cAAc,WAAW,MAAM,WAAW;AAG1D,0BAAc,QAAQ,GAAG;AACzB,kCAAsB,QAAc,QAAQ,IAAI;AAChD,2BAAe,QAAQ,IAAI;AACvB,gCAAkB,aAAiB,QAAO,OAAO;AACjD,kBAAI,CAAC,aACD,CAAC,aAAiB,QAAc,SAAQ,OAAO;AAC/C,sBAAM,IAAI,MAAM;;AAEpB,qBAAO,YAAY,UAAS,SAAc,SAAQ;;AAEtD,mBAAO,CAAC,MAAY,QAAQ;;;aAG/B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,0BAAe,cAAc,UAAU,MAAM,WAAW;AACxD,iBAAO,QAAc,SAAQ;;aAE5B;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEhE;aACA;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,kCAAwB,cAAc,mBAAmB,MAAM,WAAW;AAC1E,0BAAe,cAAc,KAAK,MAAM,WAAW;AACnD,iBAAO,OAAY,SAAQ,iBAAiB;;aAE3C;AACD,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,yBAAe,cAAc,UAAU,MAAM,WAAW;AACxD,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,UAAgB,SAAS,QAAQ;;aAExC;AACD,oBAAU,cAAc,KAAK,MAAM,WAAW;AAC9C,0BAAgB,cAAc,WAAW,MAAM,WAAW;AAC1D,iBAAO,CAAC,SAAe,GAAG;;aAEzB;AACD,0BAAgB,cAAc,iBAAiB,MAAM,WAAW;AAChE,wBAAc,cAAc,eAAe,MAAM,WAAW;AAC5D,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,+BAAqB,cAAc,gBAAgB,MAAM,WAAW;AACpE,iBAAO,CAAC,cAAoB,SAAS,cAAc,OAAO,aAAa,UAAU,aAAa,QACtF,eACA,MAAW,cAAc,aAAa;;;AAG9C,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACzHxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,IAAU,cAAc,KAAK,MAAM,WAAW;;aAErD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,KAAW,cAAc,KAAK,MAAM,WAAW;;aAEtD;AACD,iBAAO,CAAC,MAAY,cAAc,KAAK,MAAM,WAAW;;;AAGxD,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;ACrCxB;;;;;;;;;;;;;;;;AAmBO,wBAAkB;AACrB,cAAQ,KAAK;aACJ;AACD,iBAAO,CAAC,MAAW,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAExG;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,WAAiB,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEtE;AACD,uBAAa,cAAc,QAAQ,MAAM,WAAW;AACpD,iBAAO,CAAC,QAAc,cAAc,KAAK,MAAM,WAAW,UAAU;;aAEnE;AACD,iBAAO,CAAC,SAAc,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;aAE3G;AACD,iBAAO,CAAC,UAAgB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,QAAQ,MAAM,WAAW;;aAEhK;aACA;AACD,iBAAO,CAAC,KAAU,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,WAAW,MAAM,WAAW,UAAU,cAAc,iBAAiB,MAAM,WAAW;;aAEnK;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,2BAAiB,cAAc,YAAY,MAAM,WAAW;AAC5D,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,6BAAmB,cAAc,cAAc,MAAM,WAAW;AAChE,wBAAc,cAAc,SAAS,MAAM,WAAW;AACtD,iBAAO,CAAC,eAAqB,cAAc,KAAK,MAAM,WAAW,UAAU,YAAY;;aAEtF;AACD,4BAAkB,cAAc,aAAa,MAAM,WAAW;AAC9D,6BAAmB,cAAc,cAAc,MAAM,WAAW,SAAS;AACzE,iBAAO,CAAC,cAAmB,cAAc,KAAK,MAAM,WAAW,UAAU,WAAW;;aAEnF;AACD,iBAAO,CAAC,YAAkB,cAAc,KAAK,MAAM,WAAW,UAAU,cAAc,SAAS,MAAM,WAAW;;;AAGhH,gBAAM,UAAU,aAAa,KAAK;;;AAGvC,uBAAiB;AChExB;;;;;;;;;;;;;;;;AA2CO;AACH,oBAAe;AACX,gBAAQ,MAAK;eACJ;AACD,mBAAO,KAAS,MAAM,UAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAsB,OAAM,YAAW;eAC5D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,YAAkB,OAAM,YAAW;eACzC;AACD,mBAAO,KAAS,MAAM,YAAqB,OAAM,YAAW;eAC3D;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAgB,OAAM,YAAW;eACtD;AACD,mBAAO,KAAS,MAAM,YAAkB,OAAM,YAAW;eACxD;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAwB,OAAM,YAAW;eAC9D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAoB,OAAM,YAAW;eAC1D;AACD,mBAAO,KAAS,MAAM,YAAmB,OAAM,YAAW;eACzD;AACD,mBAAO,KAAS,MAAM,YAAyB,OAAM,YAAW;eAC/D;AACD,mBAAO,YAAoB,OAAM,YAAW,UAAS;eACpD;AACD,6BAAiB,gBAAgB,MAAK;AACtC,gBAAI,YAAY,SAAS;AACrB,qBAAO,SAAS,eAAe,IAAI,cAAc,OAAM,YAAW;;AAGlE,oBAAM,UAAU,aAAa,MAAK;;;AAGtC,kBAAM,UAAU,eAAe,MAAK;;SAI7C,MAAM,WAAW;AACpB,UAAI,WAAmB;AACnB,eAAO,MAAM,KAAK,WAAU,GAAG,OAAO;;AAE1C,aAAO,GAAG,OAAO;;;MCvFjB,wBAAwB,qBAAqB,oBAAoB,kBAAkB;AAC/E,aAAK,YAAY;AACjB,aAAK,iBAAiB;AACtB,aAAK,gBAAgB;AACrB,aAAK,cAAc;AACnB,aAAK,cAAc,CAAE,IAAI,GAAG,WAAW,IAAI,aAAa;AACxD,aAAK,WAAW,CAAC,KAAK;AACtB,aAAK,SAAS;AACd,aAAK;;MAET;AACI,eAAO,CAAE,IAAI,WAAW,aAAa;;UAOrC;AACA,YAAI,KAAK,aAAa;AAClB,eAAK,WAAW;AAChB,eAAK;;;UAGT;AACA,eAAO,KAAK;;UAKZ;AACA,eAAO,KAAK,mBAAmB;;UAM/B;AACA,eAAO,KAAK;;MAEhB;AACI,sBAAc;AACd,qBAAa,GAAG,IAAI,KAAK,SAAS,SAAS,GAAG;AAC1C,4BAAiB,KAAK,SAAS,MAAM,GAAG,KAAK,SAAS,SAAS;AAC/D,gBAAM,KAAK,KAAK,qBAAqB;;AAEzC,cAAM,KAAK;AACX,aAAK,qBAAqB;;MAE9B;AACI,eAAO,YACH,UACK,IAAI,aAAY,QAAQ,OAAO,KAAK,QAAQ,gBAAgB,IAC7D,KACA,GAAG,QAAQ,aAAa,QAAQ,eAC/B,KAAK,OACV;;MAMR;AACI,YAAI,KAAK;AACL,eAAK;AACL,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,KAAK,KAAK,SAAS,KAAK,QAAQ;AAC9C,eAAK,mBAAmB,QAAQ,KAAK,qBAAqB,KAAK;;;MAOvE;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK,SAAS,OAAO;AACrB,eAAK,kBAAkB;;AAGvB,gBAAM,IAAI,MAAM;;;MAOxB;AACI,YAAI,KAAK,YAAY,KAAK,SAAS,SAAS;AACxC,eAAK,WAAW,KAAK,SAAS;AAC9B,eAAK;AACL,0BAAgB,OAAO,OAAO,IAAI,KAAK,SAAS,KAAK,SAAS,SAAS;AACvE,kBAAQ,eAAe;AACvB,kBAAQ,KAAK,KAAK;AAClB,eAAK,SAAS,OAAO,IAAI,GAAG;AAC5B,eAAK,mBAAmB,OAAO,GAAG,GAAG,KAAK,qBAAqB,KAAK;;AAGpE,gBAAM,IAAI,MAAM;;;MAGxB;AACI,eAAO,KAAK,UAAU;;MAE1B;AACI,aAAK,eAAe,YAAY,MAAM;;MAE1C;AACI,eAAO,KAAK,eAAe;;MAE/B;AACI,aAAK,cAAc,WAAW,MAAM;;MAExC;AACI,eAAO,KAAK,cAAc;;MAE9B;AACI,0BAAkB,KAAK;AACnB,eAAK,eAAe,KAAK,cAAc;;AAE3C,0BAAkB,KAAK;AACnB,eAAK,cAAc,KAAK,cAAc;;;;ACpIlD;;;;;;;;;;;;;;;;AAyBO;AACH,wBAAkB,IAAI;AACtB,4BAAsB;AACtB,wBAAkB;AAClB,uBAAiB;AAGjB,mBAAa,IAAI;AACjB,6BAAuB,OAAO,KAAK,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAC3E,0BAAoB;AACpB,UAAI,aAAa;AACb,wBAAgB,UAAU,IAAI,UAAQ,cAAc,KAAK,MAAM;;AAEnE,uBAAiB,CAAC,GAAG;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,YAAI,cAAc,SAAS,eAAe,SAAS,YAAY;AAC3D,cAAI,eAAe;AACf,0BAAc;AACd,yBAAa,YAAY,SAAS,IAAI,WAAS,MAAM,MAChD,OAAO,UAAQ,UAAU,IAAI;;;AAG1C,kBAAU,IAAI,KAAK;AAEnB,YAAI,UAAU,KAAK,SAAS;AACxB;;AAGJ,YAAI,eAAe,QAAQ,KAAK,UAAU;AACtC;;AAGJ,YAAI,cAAc,QAAQ,KAAK,UAAU;AACrC;;AAEJ,YAAI,KAAK,OAAO,WAAW;AACvB,wBAAc,KAAK,KAAK;AACxB;;AAEJ,aAAK,OAAO,QAAQ;AAEhB,cAAI,KAAK,IAAI,OAAM;AACf;;AAEJ,eAAK,IAAI,OAAM;AACf,mBAAS,KAAK;;;AAGtB,aAAO,CAAE,QAAQ,SAAS,WAAW,eAAe,aAAa;;AAM9D;AACH,aAAQ,WAAW,UAAW;AAC9B,uBAAiB;AACjB,yBAAmB,OAAO,KAAK,QAC1B,IAAI,UAAQ,cAAc,MAAM,IAChC,IAAI,UAAQ,OAAM,MAAM;AAC7B,wBAAkB,OAAM;AACxB,iBAAW,QAAQ;AACf,YAAI,UAAU,IAAI,OAAM;AACpB,mBAAS,KAAK;;;AAGtB,aAAM,QAAQ,QAAQ;AAClB,YAAI,UAAU,IAAI,OAAO;AACrB,mBAAS,KAAK;;;AAGtB,UAAI,aAAa;AACb,kBAAU,QAAQ;AACd,cAAI,UAAU,IAAI,KAAK;AACnB,qBAAS,KAAK;;;;AAI1B,mBAAa,IAAI;AACjB,2BAAqB;AACrB,aAAO,SAAS,SAAS;AACrB,qBAAa,SAAS;AACtB,aAAK,IAAI,KAAK;AACd,YAAI,CAAC,UAAU,KAAK;AAChB,uBAAa,KAAK;;AAEtB,aAAK,SAAS,QAAQ;AAClB,cAAI,CAAC,KAAK,IAAI,MAAM,SAAS,UAAU,IAAI,MAAM,SAC7C,MAAM,OAAO,MAAM,YAAS,KAAK,IAAI,OAAM;AAC3C,qBAAS,KAAK;;;;AAI1B,aAAO;;AAEX,6BAAyB;MACrB;MAAU;MAAS;MAAS;MAAQ;MAAiB;MACrD;MAAkB;MAAM;;AAE5B,8BAA0B;MACtB;MAAuB;MAAuB;MAAuB;;AAEzE,2BAAuB;MACnB;MAAa;MAAe;MAAqB;MACjD;MAAmB;;AAEhB;AACH,aAAO,iBAAiB,QAAQ,KAAK,OAAO;;AAEzC;AACH,aAAO,kBAAkB,QAAQ,KAAK,OAAO;;AAE1C;AACH,aAAO,eAAe,QAAQ,KAAK,OAAO;;AC3I9C;;;;;;;;;;;;;;;;;MA8BI;AACI,aAAK,QAAQ;AACb,aAAK,SAAS;AACd,aAAK,cAAc,IAAI;AACvB,aAAK,aAAa;AAClB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,OAAM;AACtB,aAAK,UAAU,OAAM;AACrB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AACxB,aAAK,aAAa,OAAM;AAExB,YAAI,OAAM,aAAa;AACnB,iBAAO,KAAK,OAAM,WAAW,QAAQ;AACjC,iBAAK,qBAAqB,QACtB,IAAI,cAAc,OAAM,UAAU,OAAO;;;;UAIrD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,sBAC7B,KAAK;;UAET;AACA,eAAO,KAAK,SAAS,KAAK,OAAO,YAAY,KAAK;;UAElD;AACA,0BAAkB,OAAO,KAAK,WAAW,IAAI,SAAO,UAAU,KAAK,IAAI,aAAU,QAAO;AACxF,aAAK,aAAa,GAAG,OAAO,GAAG;AAC/B,aAAK,aAAa;;UAMlB;AACA,aAAK,mBAAmB;;UAExB;AACA,eAAO,KAAK,QAAQ,IAAI;AACpB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,iBAAO;YACH,MAAM,KAAK;YACX,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;YACJ,OAAO,KAAK,WAAW,WACnB,KAAK,WAAW,SAAS,QACzB;;;;UAIZ;AACA,eAAO,KAAK,QAAQ,IAAI,UAAQ,KAAK,gBAAgB,KAAK;;UAE1D;AACA,eAAO,KAAK,SAAS,IAAI;AACrB,uBAAa,KAAK,gBAAgB,KAAK;AACvC,iBAAO,KAAK,gBAAiB,GAAG,QAAQ,KAAK,kBAAmB;;;UAGpE;AACA,eAAO,OAAO,KAAK,KAAK,YAAY,OAAO;AACvC,cAAI,OAAO,KAAK,WAAW,KAAK;AAChC,iBAAO;WACR;;MAEP;AACI,6BAAqB,OAAO,IAAI,UAAQ,KAAK,MAAM;AACnD,8BAAsB,QAAQ,IAAI,UAAQ,KAAK,MAAM;AACrD,eAAO,aAAa,KAAK,KAAK,aAAa,OACvC,cAAc,KAAK,KAAK;;MAMhC;AACI,8BAAsB,qBAAqB,QAAQ,SAAS,KAAK,WAAW,KAAK;AACjF,eAAQ,eAAe,aAAa,cAAe;AACnD,YAAI,eAAe;AACf,gBAAM,IAAI,MAAM,qCAAqC,YAAY,oCAC1C,YAAY,8GAEK;;AAE5C,YAAI,cAAc,SAAS;AACvB,2BAAiB,QAAQ,IAAI,OAAK,EAAE;AACpC,0BAAgB,OAAO,KAAK;AAC5B,gBAAM,IAAI,MAAM,+BAA+B,uCACvC,4CAA4C;;AAExD,eAAO,2BAA2B,KAAK,OAAO,KAAK,WAAW;;MAWlE;AACI,iBAAS,KAAK,UAAU;AACxB,sBAAc,OAAO,KAAK,QAAQ;AAClC,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,kBAAU,KAAK,WAAW;AAC1B,aAAK,aAAa;AAClB,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,QAAQ,IAAI,UAAQ,cAAc,MAAM;AAChE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,+BAAuB,KAAK,kBAAkB,YAAY;AAE1D,2BAAmB,KAAK,YAAY,IAAI;AACxC,YAAI,gBAAgB;AAChB,yBAAe,KAAK,QAAQ,QAAQ;AACpC,eAAK,YAAY,IAAI,gBAAgB;;AAEzC,+BAAuB;AACvB,8BAAsB;AACtB,eAAO,KAAK;AACR,0BAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AACzF,6BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,iBAAO,KAAK,QAAQ,QAAQ;AACxB,uCAA0B,cAAc;AACxC,4BAAgB;AAChB,oBAAQ,UAAS,OAAO;AACxB,uBAAW,YAAY;;AAE3B,gCAAsB,KAAK,mBAAmB;AAC9C,kDAAwC;AACxC,uBAAa,GAAG,IAAI,aAAa,QAAQ;AACrC,yBAAa,aAAa;AAC1B,gBAAI,CAAC,WAAW,KAAK;AACjB,8BAAgB,YAAU,MAAM,YAAY,SAAS,KAAK;AAC1D,kBAAI,WAAe;AACf,sBAAM,IAAI,MAAM,4BAA4B,KAAK;;AAGrD,yBAAW,KAAK,QAAQ;AACxB,mBAAK,uBAAuB,KAAK,MAAM,MAAM,YAAY,SAAS,eAAe,iBAAiB;;;AAI1G,cAAI,KAAK,UAAU;AACf,oBAAQ,QAAQ;;AAEpB,iBAAO,QAAQ,IAAI,UAAQ,UAAU,MAAM,YAAY;;;MAG/D;AACI,oBAAY,GAAG,OAAO,MAAM,IAAI,OAAO,KAAK,WACvC,IAAI,SAAO,UAAU,MACrB,IAAI,aAAW,QAAQ,IAAI,aAAU,QAAO;AACjD,eAAO,IAAI,IAAI;;MAEnB;AAGI,YAAI,KAAK,aAAa,aAAa,YAAY,QAAQ,cAAc;AACjE;;AAEJ,kBAAU,UAAU,QAAQ;AACxB,cAAI,WAAU;AACV,4CAAgC,QAAO,MAClC,iCAAgC,QAAO,OAAO,KAC3C,KAAK,SAAS;;;AAG9B,aAAK,OAAO,QAAQ;AAGhB,cAAI,OAAM,aAAa;AACnB,4BAAgB,6BAA6B,OAAM,MAAM,WAAW;AACpE,gBAAI,WAAW;AACX,sBAAQ,QAAQ;AACZ,oBAAI,WAAU,CAAC,cAAc,IAAI,QAAO;AACpC,iCAAc,gCAAgC,QAAO;AACrD,sBAAI,WAAU;AACV,4BAAO;AACP,2BAAO,gCAAgC,QAAO;6BAEzC,UAAS;AAGd,oDAAgC,QAAO;;;;;;;;YAiB7D;AACF,eAAO,KAAK,cAAc,QAAQ;;YAgBhC,qDAAqD,wBAAwB,oBAAoB;AACnG,YAAI,CAAC;AACD,mBAAS,KAAK,UAAU;AACxB,eAAK,YAAY;AACjB,eAAK,uBAAuB;AAC5B,oBAAU,KAAK,WAAW;AAC1B,eAAK,aAAa;;AAEtB,wBAAgB,IAAI,iBAAiB,KAAK,WAAW,gBAAgB,eAAe,KAAK;AAIzF,0BAAkB,MAAM,KAAK,uBAAuB,QAAQ,SAAS,SAAS;AAC9E,wBAAgB,QAAQ,IAAI,UAAQ,UAAU,MAAM,WAAW;AAE/D,0BAAkB,QAAQ,IAAI,OAAK,EAAE;AACrC,yBAAiB,OAAO,KAAK,QAAQ,IAAI,UAAQ,OAAO,MAAM;AAC9D,wBAAgB,IAAI,IAAI,CAAC,GAAG,WAAW,GAAG,UAAU,GAAG,KAAK;AAC5D,eAAO,KAAK,WAAW,QAAQ;AAC3B,8BAAoB,UAAU;AAC9B,sBAAY,QAAQ;AAChB,gBAAI,WAAU,CAAC,QAAO,cAAc,CAAC,QAAQ,IAAI,QAAO;AACpD,sBAAO;;;;AAKnB,YAAI,KAAK,UAAU;AACf,kBAAQ,QAAQ;;AAEpB,eAAO;;YAEL;AACF,6BAAqB,OAAO,OAAO;AAC/B,cAAI,KAAK,OAAO,QAAO,QAAQ;AAC/B,iBAAO;WACR;AACH,eAAO,KAAK,cAAc,cAAc,KAAK,aAAa,MAAM,gBAAgB;;YAa9E;AACF,sBAAc,OAAO,KAAK;AAC1B,2BAAmB,MAAM,IAAI,UAAQ,KAAK,MAAM,MAAM,cAAc,MAAM;AAC1E,gCAAwB,YAAY,IAAI,UAAQ,cAAc,MAAM;AACpE,0BAAkB,gBAAgB,IAAI,UAAQ,KAAK,MAAM,MAAM;AAE/D,YAAI,YAAY,WAAW;AACvB,wBAAc,KAAK;;AAEvB,eAAQ,WAAW,eAAe,aAAa,cAAe,qBAAqB,QAAQ,aAAa,KAAK,WAAW,KAAK;AAE7H,uBAAc;UACV,GAAG;UAAY,GAAG,KAAK,MAAM;UAAS,GAAI,KAAK,cAAc;UAC/D,IAAI;AACF,iBAAO,CAAE,MAAM,UAAU,QAAQ;;AAErC,2BAAmB,OAAO,OAAO,IAAI,KAAK;AAC1C,eAAO,KAAK,QAAQ,QAAQ;AACxB,qCAA0B,cAAc;AACxC,0BAAgB;AAChB,kBAAQ,UAAS,OAAO;AACxB,qBAAW,YAAY;;AAE3B,gDAAwC;AACxC,8BAAsB,KAAK,mBAAmB;AAC9C,sBAAc;AACd,eAAO,OAAM,SAAS;AAClB,2BAAiB,KAAK,aAAa,YAAY,QAAO,SAAS,YAAY,OAAO,eAAe,iBAAiB,iCAAiC;AACnJ,gBAAM,QAAQ,IAAI;;AAEtB,YAAI,eAAe,QAAQ,CAAC;AACxB,kBAAQ,KAAK;;AAGjB,+BAAuB,YAClB,OAAO,UAAQ,CAAC,cAAc,SAC/B,CAAC,UAAU,KAAK,MAAM,YAAY,UACjC,IAAI,UAAQ,KAAK;AACtB,YAAI,eAAe,SAAS;AACxB,+BAAqB;AACrB,cAAI,eAAe;AACf,6BACI,wFAC+B;;AAEvC,gBAAM,IAAI,MAAM,+BAA+B,6CAChC,qDACP,mBAAmB;;AAE/B,eAAO;;MAEX;AACI,yBAAiB;AACjB,eAAO,OAAM,SAAS;AAClB,uBAAa,OAAM;AACnB,kBAAQ,iBAAiB,KAAK;AAC9B,yBAAe;AAIf,cAAI,KAAK,KAAK,OAAO,WACjB,cAAc,cAAc,KAAK,MAAM,WAAW;AAClD,aAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAIrD,cAAI,UAAU,KAAK,KAAK,SAAS;AAC7B,4BAAgB,YAAU,KAAK,MAAM,WAAW,SAAS,KAAK;AAC9D,gBAAI,CAAC;AACD,eAAC,YAAY,oBAAoB,KAAK,KAAK,MAAM;;AAErD,mCAAuB,QAAQ;AAC/B,gBAAI,WAAe;AACf,uBAAS,KAAK,QAAQ,KAAK;AACvB,0BAAU,YAAY;AACtB,wBAAQ,iBAAiB;AACzB,qBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,qBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;AACpE,uBAAO;;;AAIX,wBAAU,YAAY;AACtB,mBAAK,uBAAuB,UAAU,KAAK,MAAM,WAAW,SAAS,eAAe,aAAa;AACjG,mBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAIxE,iBAAK,kBAAkB,KAAK,MAAM,QAAO,SAAS,WAAW,OAAO;;;AAG5E,eAAO;;MAEX;AACI,aAAK,SAAS,QAAQ;AAClB,6BAAoB,oBAAoB,UAAU,MAAM;AACxD,cAAI,MAAM,aAAa,CAAC,UAAU,IAAI,UAAU;AAC5C;;AAGJ,cAAI,UAAU,OAAO;AACjB,gBAAI,UAAU,WAAW,KAAK;AAC1B,qBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,oBAAM,YAAY;AAClB,qBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;qBAIxD,UAAU,WAAW,MAAM;AAC5B,mBAAO,CAAC,CAAC,UAAU,MAAM,WAAW;;AAEpC,kBAAM,YAAY;AAClB,mBAAM,KAAK,CAAE,UAAU,QAAQ,gBAAgB,MAAM;;;;MAOjE;AACI,eAAO,KAAK,KAAK,WACZ,QAAQ,SAAO,KAAK,UAAU,KAAK,QAAQ,aAAU,QAAO;;MAErE;AACI,eAAO,KAAK,QAAQ,QAAQ;AACxB,yBAAc,OAAO;AACrB,6BAAoB,cAAc;AAClC,uBAAa,KAAK,MAAM,MAAM;AAC9B,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,0BAAc,KAAK,WAAW,SAAS;AACvC,0BAAc,MAAM,WAAW,OAAM,MAAM,UACvC,OAAM,MAAM,MAAM,iBAAgB,MAAM,YAAW,MAAM,MAAM,YAAW;AAC9E,oBAAY,OAAO,MAAM,sBAAsB,KAAK,mDAChB,oBAC5B,OAAM;;AAElB,cAAI,KAAK,WAAW,YAAY,KAAK,WAAW,SAAS;AACrD,oBAAY,OAAM,UAAU,KAAK,WAAW,SAAS,OAAO,MAAM,sBAAsB,KAAK,kDAEtF,KAAK,WAAW,SAAS,kBAAkB,OAAM;;;;MAIpE;AACI,uBAAe;AACf,gCAAwB;AACpB,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,UAAU,QACrD,KAAK,WAAW,OAAO,cAAc;AACrC,4BAAe,KAAK,WAAW,OAAO;AACtC,mBAAO,QAAO,QAAQ,OAAO;;AAG7B,mBAAO,aAAa,OAAO;;;AAGnC,eAAO;;MAEX;AACI,2BAAmB,OAAO,KAAK,QAAQ,OAAO;AAC1C,6BAAmB,cAAc;AACjC,iBAAO,KAAK,MAAM,MAAM,aAAa;;AAEzC,YAAI,WAAW,SAAS;AACpB,gBAAM,IAAI,MAAM,uDACF;;;MAGtB;AACI,eAAO,QAAQ,IAAI;AACf,cAAI,KAAK,cAAc,QAAQ,KAAK,WAAW,WAAW,QACtD,KAAK,WAAW,QAAQ,SAAS;AACjC,4BAAe,KAAK,WAAW,QAAQ;AACvC,mBAAO,QAAO;;AAElB,iBAAO;WACR;;MAEP;AACI,gBAAQ,QAAQ;AACZ,mCAAyB,cAAc;AACvC,cAAI,CAAC,KAAK,MAAM,MAAM;AAClB,kBAAM,IAAI,MAAM,eAAe;;;;;;MCpf3C,oCAAoC,mBAAmB;AACnD,aAAK,wBAAwB;AAC7B,aAAK,eAAe;;MAWxB;AACI,aAAK,sBAAsB,QAAQ,WAAU;AAC7C,aAAK,aAAa,WAAU,MAAM;;MAOtC;AACI,eAAO,KAAK,sBAAsB;;MAMtC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,0BAAkB,KAAK;AACnB,eAAK,aAAa,KAAK;AACvB,iBAAO,KAAK,aAAa;;AAE7B,2BAAmB,KAAK;AACpB,eAAK,sBAAsB,MAAM;AACjC,iBAAO,KAAK,sBAAsB;;;;AC9C9C;;;;;;;;;;;;;;;;AAoBO,+BAA2B;AAC3B,+BAA2B;;MAqB9B,oCAAoC;AAChC,aAAK,WAAW;AAChB,aAAK,cAAc;AACnB,aAAK,UAAU;AACf,YAAI,eAAe;AACf,eAAK,cAAc;;AAEvB,aAAK,kBAAkB,IAAI;;UAG3B;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;UAErB;AACA,eAAO,KAAK,SAAS;;MAEzB;AACI,qBAAa,KAAK;AAClB,YAAI,KAAK,QAAQ;AAEb,eAAK,UAAU;mBAEV,KAAK,YAAY,eAAe;AACrC,eAAK,UAAU,mBAAsB,MAAM,KAAK;;AAGhD,2BAAiB,gBAAmB,MAAM,KAAK;AAC/C,cAAI,SAAS,WAAW;AAGpB,qBAAS,KAAK,mBAAsB,MAAM,KAAK;qBAE1C,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC,CAAC;;AAEjB,eAAK,UAAU,SAAS;;;YAO1B;AACF,aAAK;AACL,YAAI,KAAK,QAAQ,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,0BAAkB,MAAM,KAAK,QAAQ;AACrC,eAAO,KAAK,SAAS;;MAQzB;AACI,aAAK,YAAY;AACjB,uBAAc,KAAK,UAAU;AAC7B,wBAAgB;AAChB,YAAI,KAAK,UAAU,uBAAuB;AACtC,sBACI,KAAK,UAAU,oBAAoB;;AAE3C,aAAK,UAAU,GAAG,OAAM,SAAS,YAAY,OAAM,SAAS;AAC5D,0BAAkB,cAAiB,KAAK,UAAU,YAAY,KAAK,UAAU;AAC7E,aAAK,WAAW,IAAI,cAAc,gBAAgB,SAAS,eAAe,QAAO;AACjF,aAAK,SAAS,YAAY,KAAK,6BAA6B;AAG5D,aAAK,SAAS,kBAAkB,KAAK;AACrC,YAAI,UAAU,oBAAoB;AAC9B,8BAAoB,gBAAgB,SAAS,eAAe,UAAU;AACtE,eAAK,cAAc,IAAI,cAAc;AACrC,eAAK,YAAY,YAAY,KAAK,SAAS;AAI3C,eAAK,YAAY,kBAAkB,KAAK;AACxC,eAAK,YAAY,aAAa,IAAI;;AAEtC,eAAO;;YA8CL;AACF,YAAI,OAAO,iBAAiB;AACxB,2BAAiB,gBAAmB;AACpC,cAAI,SAAS,WAAW;AACpB,kBAAM,IAAI,MAAM,0CAA0C;qBAErD,SAAS,SAAS;AACvB,kBAAM,IAAI,MAAM,wBAAwB,SAAS,kCACrC;;AAEhB,yBAAe,SAAS;;AAE5B,YAAI,aAAa,QAAQ;AACrB,gBAAM,IAAI,MAAM;;AAGpB,eAAO,aAAa,KAAK,KAAK;;MAwClC;AACI,eAAO,KAAK,QAAQ,QAAQ,KAAK;;MAErC;AACI,YAAI,CAAE,mBAAkB,YAAW,CAAC,MAAM,QAAQ;AAE9C,iBAAO;;AAEX,iBAAS,MAAM,QAAQ,UAAU,SAAS,CAAC;AAC3C,YAAI,OAAO,WAAW,KAAK,WAAW;AAClC,gBAAM,IAAI,MAAM,mDACW,KAAK,WAAW,wCACpB,OAAO;;AAElC,eAAO,KAAK,WAAW,OAAO;AAC1B,cAAI,aAAa,OAAO;AACxB,iBAAO;WACR;;MAEP;AACI,kBAAU,WAAW,KAAK;AAC1B,eAAO,CAAC,MAAM,QAAQ,WAAW,CAAC,WAAW;;MAkBjD;AACI,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,KAAK,SAAS,QAAQ,QAAQ;AAC7C,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;YAkBzC;AACF,iBAAS,KAAK,gBAAgB;AAC9B,kBAAU,KAAK,iBAAiB;AAChC,uBAAe,MAAM,KAAK,SAAS,aAAa,QAAQ;AACxD,eAAO,OAAO,SAAS,IAAI,SAAS,OAAO;;MAE/C;AACI,eAAO,OAAO,KAAK,KAAK,OAAO;AAC3B,iBAAO,OAAO,CAAC,IAAI;AACnB,iBAAO;WACR;;MAOP;AACI,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,YAAY;;AAErB,aAAK,gBAAgB;;;AAiCtB,uDAAkD;AACrD,UAAI,YAAY;AACZ,cAAM,IAAI,MAAM;;AAGpB,UAAI,WAAW;AACX,kBAAU;;AAEd,UAAI,QAAQ;AACR,YAAI,SAAS,QAAQ;AACjB,cAAI,CAAC,SAAS,SAAS;AACnB,uBAAW,WAAW;;AAE1B,qBAAW,GAAG,WAAW,qBAAqB;;;AAGtD,qBAAc,IAAI,WAAW,UAAU;AACvC,YAAM,OAAM;AACZ,aAAO;;ACrXX;AAEK,sBAAW;ACFhB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;AAmCO;AACH,aAAO,gBAAgB,QAAO;;AAQlC,mDAA8C,IAAI,qBAAqB,IAAI;AACvE,UAAI,UAAS;AACT,eAAO;;AAEX,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,UAAI,KAAK,IAAI;AACT,eAAO,KAAK,IAAI;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,aAAK,IAAI,QAAO,OAAO;AACvB,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,wBAAc,OAAM;AACpB,8BAAoB,gBAAgB,OAAO,OAAO,MAAM;AACxD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AA2B1D,qCAAiC;AACpC,aAAO,gBAAgB,QAAQ;;AAMnC,0DAAsD,IAAI;AAGtD,qBAAc,OAAO;AACrB,UAAI,YAAY,IAAI;AAChB,cAAM,IAAI,MAAM;;AAEpB,qBAAe,MAAM;AACrB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,cAAM,IAAI,MAAM;;AAEpB,UAAI,CAAC,OAAO;AACR,eAAO,OAAO;iBAET,aAAW;AAEhB,+BAAuB,MAAM,QAAQ,UAAS,KAAK;AACnD,oBAAY,IAAI;AAChB,wBAAgB;AACZ,2BAAiB,OAAO,IAAI,OAAK,EAAE;AACnC,8BAAoB,gBAAgB,UAAU,OAAO;AACrD,yBAAe,KAAK;;AAExB,oBAAY,OAAO;AACnB,eAAO;;AAGP,cAAM,IAAI,MAAM,yCAAyC;;;AAI1D;AACH,UAAI,MAAM;AACN,eAAO;;AAGX,UAAI,aAAW,EAAE;AACb,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,GAAG,SAAS;;;AAyB7B;AACH,mBAAa,IAAI;AAEjB,sBAAgB,QAAO,OAAO;AAK9B,wBAAkB,MAAM,KAAK,KAAK;AAC9B,sBAAc,KAAK,IAAI;AACvB,YAAI,WAAkB;AAClB,8BAAoB,MAAM;AAC1B,eAAK,IAAI,KAAK;;;AAMtB,qBAAe,gBAAgB,QAAO,OAAO;AAC7C,aAAO;;AAQJ;AACH,aAAO,OAAO,QAAS,CAAC,YAAY,OAAO,QACtC,OAAM,QAAQ,QACV,OAAO,QAAQ,YAAY,CAAE,gBAAe;;AAWlD;AACH,aAAO,OAAO,QAAQ,YAAY,QAAQ,MAAM,QAAQ,QACnD,OAAO,QAAQ,YAAa,eAAe,WAC5C,cAAqB;;AAM7B;AACI,aAAQ,UAAU,QACb,OAAO,UAAU,YAAY,OAAO,UAAU;;AClOvD;;;;;;;;;;;;;;;;;AAmBO;AACH,aAAO,QAAQ,WAAW;;AAG9B;AACI,UAAI,gBAAgB;AAChB,eAAQ,CAAE,OAAO,KAAK,SAAS,SAAS;iBAEnC,aAAW;AAChB,eAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,eAAO,CAAE,OAAO,MAAM,SAAS;;;AC/BvC;;;;;;;;;;;;;;;;;;MAyBI;AACI,aAAK,WAAW;AAIhB,aAAK,QAAQ;AACb,aAAK,MAAM;AACX,YAAI,YAAY;AACZ,gBAAM,IAAI,WAAW;;AAEzB,YAAI,WAAW;AACX,gBAAM,IAAI,WAAW;;AAEzB,aAAK,OAAO,IAAI,MAAM;AACtB,aAAK,kBAAkB,IAAI;;MAK/B;AAEI,eAAO,SAAQ;AACX,oBAAS,KAAK;;AAElB,eAAO,SAAQ,KAAK;;MAExB;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,eAAO,KAAK,KAAK,SAAQ,KAAK;;MAElC;AACI,YAAI,SAAQ;AACR,gBAAM,IAAI,WAAW;;AAEzB,aAAK,KAAK,SAAQ,KAAK,YAAY;;MAKvC;AACI,qBAAa,KAAK,MAAM,KAAK;AAC7B,YAAI,SAAS;AACT,mBAAS,KAAK,kBAAkB;;AAEpC,eAAO;;MAOX;AACI,eAAO,KAAK,aAAa,KAAK;;MAOlC;AACI,eAAO,KAAK,aAAa;;MAK7B;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,IAAI,KAAK,KAAK;AACnB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;;MAKpC;AACI,4BAAoB;AAChB,eAAK,KAAK;;;MAMlB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,MAAM,KAAK,KAAK,KAAK,MAAM;AAChC,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,KAAK;AACnB,eAAO;;MAKX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,aAAK,IAAI,KAAK,OAAO;;MAKzB;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAe,KAAK,IAAI,KAAK;AAC7B,aAAK,IAAI,KAAK,OAAO;AACrB,aAAK,QAAQ,KAAK,KAAK,KAAK,QAAQ;AACpC,eAAO;;MAWX;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,WAAW;;AAEzB,uBAAc,KAAK,KAAK,KAAK,QAAQ;AACrC,uBAAe,KAAK,IAAI;AACxB,aAAK,IAAI,QAAO,KAAK;AACrB,eAAO;;;AC7Jf;;;;;;;;;;;;;;;;;oCAkBuC;MAInC;AACI,cAAM,kBAAkB;;MAE5B;AACI,eAAO;;MAEX;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,KAAK;;MAEf;AACI,YAAI,MAAM;AACN,eAAK;;AAET,cAAM,QAAQ;;MAKlB;AACI,4BAAoB,KAAK,WAAW;AACpC,wBAAgB,IAAI,MAAM;AAC1B,oBAAY,KAAK;AAGjB,qBAAa,GAAG,IAAI,KAAK;AACrB,kBAAQ,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK,QAAQ;;AAEjD,aAAK,OAAO;AACZ,aAAK,WAAW;AAChB,aAAK,kBAAkB,IAAI,KAAK;AAChC,aAAK,QAAQ;AACb,aAAK,MAAM;;;AAGnB,sBAAkB,mBAAmB;AC3DrC;;;;;;;;;;;;;;;;;AA6BO;AACH,aAAO,IAAI,cAAc;;AAKtB;AACH,cAAQ;AACR,aAAO,qBAAqB,MAAO,EAAE,OAAO,KAAK,MAAM;;AAepD;AACH,aAAO,IAAI,qBAAqB;;AAc7B;AACH,aAAO,IAAI,gBAAgB,eAAe;;AAkBvC;AACH,aAAO,yBAAyB,qBAAqB,cAAc,KAAK,SAAQ;;AA0B7E,0DAAsD,gBAAgB;AACzE,aAAO,IAAI,YAAY,WAAW;;;YAkB5B;AACF,uBAAe;AACf,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,KAAK;;AAEnB,eAAO;;YAaL;AACF,uBAAe,KAAK,SAAS;AAC7B,uBAAe;AACf,gBAAQ,MAAM,OAAO;AACrB,eAAO,CAAC,EAAE;AACN,iBAAO,KAAK,EAAE;AACd,cAAI,MAAM,OAAO;;AAErB,eAAO;;YASL;AACF,gBAAQ,MAAM,KAAK;AACnB,eAAO,CAAC,EAAE;AACN,cAAI,MAAM,KAAK;;;YAUjB;AACF,gBAAQ,MAAM,KAAK;AACnB,6BAAqB,UAAU,EAAE;AACjC,eAAQ,CAAC,EAAE,QAAS;AAChB,cAAI,MAAM,KAAK;AACf,2BAAiB,UAAU,EAAE;;;MAerC;AACI,eAAO,IAAI,0BAA0B,MAAM;;MAW/C;AACI,eAAO,IAAI,eAAe,MAAM;;MAUpC;AACI,eAAO,IAAI,YAAY,MAAM;;MAUjC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAUtC;AACI,eAAO,IAAI,iBAAiB,MAAM,WAAW;;MAUjD;AACI,eAAO,IAAI,gBAAgB,MAAM;;YAO/B;AACF,eAAO,KAAK,IAAI,GAAG;;YASjB;AACF,eAAO,KAAK,eAAe,GAAG,aAAa,OAAM,MAAM;;MAoB3D,0CAA0C;AACtC,eAAO,IAAI,sBAAsB,MAAM,WAAW;;MAkCtD,6CAA6C,cAErC;AAEJ,2BAAmB,KAAK,cAAc,WAAW;AAGjD,eAAO,WAAW,IAAI,OAAK,QAAQ,GAAG;;MAY1C;AACI,eAAO,IAAI,gBAAgB,kBAAkB,CAAC,MAAM,YAAY;;MASpE;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAQlC;AACI,YAAI,SAAQ,KAAK,UAAS;AACtB,iBAAO;;AAEX,eAAO,IAAI,aAAa,MAAM;;MAWlC;AACI,eAAO,IAAI,iBAAiB,MAAM;;MAWtC;AACI,eAAO,IAAI,gBAAgB,MAAM,YAAY;;MAMjD;AACI,eAAO,IAAI,eAAe;;;gCAUN;MACxB;AACI;AACA,aAAK,QAAQ;AACb,aAAK,OAAO;;MAEhB;AACI,eAAO,YAAY,KAAK,MAAM;;YAE5B;AACF,YAAI,KAAK,QAAQ,KAAK,MAAM;AACxB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,qBAAa,KAAK,MAAM,KAAK;AAC7B,aAAK;AACL,eAAO,CAAE,OAAO,UAAU,OAAO,MAAM;;;uCAGZ;MAC/B;AACI;AACA,aAAK,SAAS;;MAElB;AACI,eAAO;;YAEL;AACF;AACI,iBAAO,KAAK;;AAIZ,YAAE,UACE,mDAAmD,EAAE;AACzD,gBAAM;;;;iCAIW;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAEhB,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAKF,eAAO,KAAK,UAAU,KAAK;AACvB,0BAAgB,MAAM,KAAK,SAAS;AAEpC,cAAI,QAAQ;AACR,mBAAO;;AAEX,kBAAW,QAAQ;;AAEvB,eAAO,KAAK,SAAS;;;+BAGF;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,WAAW;AAChB,aAAK,QAAQ;;MAEjB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,YAAI,KAAK,WAAW,KAAK;AACrB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAO,KAAK,SAAS;;;wCAMO;MAChC,wDAAwD;AACpD;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,uBAAuB;AAC5B,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,sBAAc;AACd,eAAO,MAAM,SAAS,KAAK;AACvB,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK;AACL,gBAAI,KAAK,wBAAwB,MAAM,SAAS;AAC5C,qBAAO,CAAE,OAAO,OAAO,MAAM;;AAEjC,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,gBAAM,KAAK,KAAK;;AAEpB,eAAO,CAAE,OAAO,OAAO,MAAM;;;iCAGR;MACzB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH,uBAAa,MAAM,KAAK,SAAS;AACjC,cAAI,KAAK,QAAQ,KAAK,UAAU,KAAK;AACjC,mBAAO;;AAEX,kBAAW,KAAK;;;;8BAIF;MACtB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,KAAK,UAAU,KAAK;AACnC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;4CAGE;MACpC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,QAAQ;AACb,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;MAEzD;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AACF,eAAO;AACH;AACI,mBAAO,MAAM,KAAK,SAAS;;AAG3B,gBAAI,CAAC,KAAK,QAAQ;AACd,qBAAO,CAAE,OAAO,MAAM,MAAM;;;;;;mCAUjB;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,6BAAqB,uBAAqC,KAAK;AAO/D,uBAAe,MAAM,KAAK,UAAU,KAAK;AACzC,8BAAsB,uBAAqC;AAG3D,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO,CAAE,OAAO,QAAQ,MAAM;;;oCAaC;MACnC;AACI;AACA,aAAK,cAAc,IAAI;AACvB,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;YAEV;AAIF,eAAO,KAAK,YAAY,aAAa;AAEjC,cAAI,CAAC,MAAM,KAAK;AACZ,mBAAO,CAAE,OAAO,MAAM,MAAM;;;AAGpC,eAAO,CAAE,OAAO,KAAK,YAAY,SAAS,MAAM;;;kCAG1B;MAC1B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,qBAAa,MAAM,KAAK,SAAS;AACjC,YAAI,KAAK;AACL,iBAAO;;AAEX,6BAAqB,uBAAqC,KAAK;AAM/D,4BAAoB,KAAK,UAAU,KAAK;AACxC,8BAAsB,uBAAqC;AAC3D,aAAK,YAAY,QAAQ;AAGzB,wBAAgB;AACZ,cAAI,CAAC,eAA8B,GAAG;AAClC,cAAE;;;AAGV,eAAO;;;kCAYsB;MACjC;AACI;AACA,aAAK,mBAAmB;AAGxB,aAAK,WAAW;AAEhB,aAAK,WAAW;AAChB,aAAK,gBAAgB;;MAEzB;AACI,kCAA0B;AAC1B,eAAO,GAAG;;YAER;AACF,aAAK,WAAW,KAAK,cAAc,KAAK;AACxC,eAAO,KAAK;;YAEV;AAMF,cAAM;AACN,YAAI,KAAK,YAAY;AACjB,iCAAuB,MAAM,KAAK,cAAc;AAChD,cAAI,eAAe;AAEf,mBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,eAAK,WAAW,eAAe;AAC/B,cAAI,KAAK,oBAAoB;AACzB,iBAAK,WAAW,KAAK,SAAS,aAAa,KAAK;;;AAGxD,2BAAmB,MAAM,KAAK,SAAS;AACvC,YAAI,WAAW;AACX,eAAK,WAAW;AAChB,iBAAO,KAAK,cAAc;;AAE9B,eAAO;;;AAGR;AACP,IAAC;AACG,uBAAgB,iBAAgB,UAAU,KAAK;AAC/C,uBAAgB,iBAAgB,cAAc,KAAK;AACnD,uBAAgB,iBAAgB,aAAa,KAAK;OACnD,mBAAoB,mBAAkB;8BA8Bf;MACtB,sCAAsC,gBAAgB;AAClD;AACA,aAAK,YAAY;AACjB,aAAK,eAAe;AACpB,aAAK,QAAQ;AACb,aAAK,iBAAiB;;MAE1B;AACI,kCAA0B;AAC1B,eAAO,IAAI;;YAET;AAGF,cAAM;AAGN,2BAAmB;AACnB,4BAAoB;AACpB;AACI,cAAI,qBAAqB;AACrB,2BAAe,UAAU;AACzB,mBAAO;cACH,OAAO,OAAO,KAAK;AACf;AACA,oBAAI,EAAE;AACF;;AAEJ,uBAAO,EAAE;;cAEb,SAAS;;;AAIb,mBAAO,CAAE,OAAO,MAAM,SAAS;;;AAGvC,uBAAe,MAAM,mBAAmB,KAAK,WAAW;AACxD,YAAI,iBAAiB;AAEjB,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,YAAI,gBAAgB;AAChB,kBAAQ,KAAK;iBACJ,gBAAgB;AACjB,oBAAM,IAAI,MAAM,qEACa,KAAK;iBACjC,gBAAgB;AACjB,qBAAO,CAAE,OAAO,MAAM,MAAM;iBAC3B,gBAAgB;;;;AAK7B,aAAK;AACL,eAAO,CAAE,OAAO,QAAQ,MAAM;;YAE5B;AACF,aAAK,iBAAiB,KAAK,UAAU,KAAK;AAC1C,eAAO,KAAK;;;mCAYkB;MAClC;AACI;AACA,aAAK,WAAW;AAChB,aAAK,aAAa;AAClB,aAAK,SAAS,IAAI,WAAW;;MAEjC;AACI,eAAO,GAAG,KAAK,SAAS;;MAM5B;AACI,eAAO,CAAC,KAAK,OAAO;AAChB,oBAAU,KAAK,SAAS;AACxB,eAAK,OAAO,KAAK;;;MAGzB;AACI,aAAK;AAIL,eAAO,KAAK,OAAO;;;kCASU;MACjC;AACI,cAAM,UAAU;AAChB,aAAK,WAAW;AAChB,aAAK,aAAa;AAElB,aAAK,oBAAoB;AACzB,aAAK,SAAS,aAAgB,QAAQ,OAAc;AACpD,aAAK,WAAW,QAAQ,QAAQ,CAAE,OAAO,MAAM,MAAM;;YAEnD;AAKF,aAAK,WAAW,KAAK,SAAS,KAAK,MAAM,KAAK;AAC9C,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK,MAAM,KAAK,WAAW;;MAEtC;AACI,eAAO,KAAK,UAAU,KAAK,OAAO;;YAEhC;AAEF,YAAI,CAAC,KAAK;AACN,eAAK;;AAET,eAAO,CAAC,KAAK,OAAO;AAChB,8BAAoB,KAAK;AACzB,yBAAe,MAAM,KAAK,OAAO,cAAc;AAC/C,cAAI,OAAO;AACP,iBAAK,oBAAoB;;AAGzB,iBAAK;AACL,mBAAO;;;AAGf,eAAO,CAAE,OAAO,MAAM,MAAM;;;AC1+BpC;;;;;;;;;;;;;;;;;;MAmDI;AACI,aAAK,OAAO;;MA8DhB,kCAAkC;AAC9B,sBAAa;AACb,gBAAe,YAAY,GAAG,MAAM;QACpC;AACA;AACA,YAAI,KAAK,SAAS,YAAY,KAAK,QAAQ;AAGvC,iBAAO,KAAK;mBAEP;AAGL,iBAAO,KAAK,KAAK,KAAK,OAAO;;AAK7B,iBAAO,KAAK,MAAM,KAAK,OAAO;;AAElC,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YACd,iBAAiB,WAAW,gBAAgB;WAClD;;MAiBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS,YAAY,QAAQ,SAAS;AAG3C,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,QAAQ,QAAQ;AAG1C,iBAAO,KAAK,OAAO,QAAQ;;AAK3B,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,YAAY,MAAM,QAAQ,aAAa;;MAkB5G;AACI,sBAAa;AACb;AACA,YAAI,KAAK,SAAS;AAEd,iBAAO;;AAKP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,OAAO,OAAK,KAAQ,MAAM,UAAU;WACpE;;YAkBD;AACF,eAAQ,OAAM,KAAK,YAAY,aAAa;;MAiBhD;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,IAAI,OAAK,KAAQ,MAAM,UAAU;WACjE,KAAK;;MAyBZ;AACI,sBAAa;AACb,eAAO,sBAAsB;AACzB,iBAAQ,OAAM,MAAK,YAAY,SAAS;WACzC,KAAK;;MAWZ;AACI,YAAI,cAAc;AACd,gBAAM,IAAI,WAAW;;AAEzB,sBAAa;AACb,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,SAAS,aAAa,KAAK;;MAoBhG;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,SAAQ;AAI7B,iBAAO,KAAK,OAAO;mBAEd,WAAU;AAEf,iBAAO;mBAEF,KAAK,QAAQ,QAAS,YAAU,UAAa,SAAQ;AAG1D,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB;AACzB,mCAAyB,qBAAqB,YAAa,EAAE,OAAO,MAAM,MAAK,YAAY,MAAM;AACjG,iBAAO,yBAAyB,iBAAiB,KAAK;WACvD;;MAmBP;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,UAAS,KAAK,KAAK,QAAQ;AAIhD,iBAAO,KAAK,OAAO;mBAEd,KAAK,QAAQ,QACjB,MAAK,OAAO,UAAS,WAAU,UAAa,SAAQ;AAGrD,iBAAO;;AAIP,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;MAuBlF,mDAAmD;AAC/C,YAAI,cAAc,QAAQ,aAAa;AACnC,cAAI,KAAK,QAAQ;AACb,kBAAM,IAAI,WAAW;;AAGrB,kBAAM,IAAI,WAAW,mNAGkB,KAAK;;;AAGpD,sBAAa;AACb,uBAAe,aAAgB,QAAQ,OAAc;AACrD,eAAO,sBAAsB;AACzB,sBAAY,OAAO;AACnB,cAAI;AACA,qBAAS,OAAO;;AAEpB,iBAAQ,OAAM,MAAK,YAAY,QAAQ,YAAY,MAAM;WAC1D,KAAK;;MAmBZ;AACI,sBAAa;AACb;AACA,YAAI,KAAK,QAAQ,QAAQ,KAAK,OAAO;AAGjC,iBAAO;mBAEF,KAAK,QAAQ,QAAQ,KAAK,QAAQ;AAGvC,iBAAO,KAAK;;AAIZ,iBAAO;;AAEX,eAAO,sBAAsB,YAAa,OAAM,MAAK,YAAY,KAAK,SAAQ;;YAkB5E;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;YAa7B;AACF,YAAI,KAAK,SAAS;AACd,gBAAM,IAAI,MAAM;;AAEpB,eAAQ,OAAM,KAAK,YAAY;;;AAIvC,YAAQ,kBAAkB;AAanB,sDAAkD;AACrD,aAAO,IAAI,cAAc;QACrB;AACI,gBAAM,GAAG;AACT,eAAK,OAAO;;cAMV;AACF,iBAAO;;;;AAsBZ;AACH,aAAO,sBAAsB,YAAY,kBAAkB,QAAQ,MAAM;;AA2CtE;AAEH,UAAI,CAAC,aAAW;AACZ,cAAM,IAAI,MAAM;;AAEpB;AACA,UAAI,MAAM,QAAQ;AACd,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAO,QAAQ,OAAO,SAAS,GAAG,OAC9B,KAAK,IAAI,MAAM,SAAS,GAAG;;iBAG9B,oBAAoB;AACzB,yBAAiB;AACb,iBAAO,QAAQ,OAAO,SAAS,IAAI,OAC/B,KAAK,IAAI,MAAM,SAAS,IAAI;;;AAGxC,aAAO,sBAAsB;AACzB,wBAAgB,MAAM,mBAAmB,UAAU;AAC/C,cAAI,aAAa;AACb,mBAAO,CAAE,OAAO,EAAE,YAAY,SAAS;qBAElC,aAAW;AAChB,mBAAO,CAAE,OAAO,MAAM,SAAS;;AAG/B,kBAAM,IAAI,MAAM;;;AAIxB,eAAO,mBAAmB,SAAS,gBAAgB;SACpD;;AAUP;AACI,UAAI,SAAS;AACT,eAAO;;AAGX,yBAAmB,KAAK;AACxB,UAAI,aAAa;AAEb,sBAAc,YAAY;AAC1B,eAAO,CAAE,OAAO,SAAS;;AAG7B,aAAO,CAAE,OAAO,MAAM,SAAS;;AAMnC;AACI,UAAI,OAAO,WAAW;AAElB,cAAM,IAAI,MAAM;;AAEpB,UAAI,OAAO,cAAc;AAErB,eAAO,MAAS;;AAIhB,eAAO,QAAU;;;AC/oBzB;;;;;;;;;;;;;;;;;kCAuBqC;MAMjC;AACI;AACA,aAAK,QAAQ;;YAEX;AACF,8BAAsB,MAAM,KAAK,MAAM;AACvC,6BAAqB,cAAc;AACnC,6BAAqB,aAAa,MAAM,MAAM,IAAI;AAE9C,cAAI,KAAK,SAAS;AACd,mBAAO,KAAK,MAAM,GAAG;;AAEzB,iBAAO;;AAEX,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;;AAoBA,uBAAmB;AACnB,sBAAkB,OAAO;AACzB,wBAAoB,OAAO;AAC3B,wBAAoB,OAAO;AAC3B,oCAAgC,OAAO;AACvC,wCAAoC,OAAO;6BAcX;MAiC5B;AACI;AACA,aAAK,QAAQ;AACb,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,uBAAuB;AAC5B,aAAK,gBAAgB;AACrB,aAAK,wBAAwB;AAC7B,aAAK,YAAY;AACjB,aAAK,kBAAkB;AACvB,aAAK,OAAO,IAAI,gBAAgB;AAChC,YAAI,CAAC;AACD,sBAAY;;AAEhB,aAAK,YAAY,UAAU,cAAc,QAAQ,QAAQ;AACzD,aAAK,kBAAkB,UAAU;AACjC,aAAK,gBAAgB,UAAU;AAC/B,aAAK,wBAAwB,UAAU;AACvC,YAAI,UAAU;AACV,kBAAY,UAAU,aAAa,MAAM,MAAM;AAC/C,eAAK,kBAAkB;AACvB,eAAK,YAAY;;AAGjB,eAAK,YAAY,UAAU,YAAY,UAAU,YAAY;;;YAa/D;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,eAAO,KAAK,wBAAwB,OAAO,KAAK,KAAK,iBACjD,KAAK;;YAUP;AACF,oCAA4B,MAAM,KAAK;AACvC,YAAI,CAAC,KAAK,mBAAmB,CAAC;AAE1B,gBAAM,IAAI,MAAM;mBAEX,KAAK,mBAAmB;AAE7B,kBAAY,oBAAoB,WAAW,KAAK,gBAAgB,QAAQ,MAAM,yCAC1E,KAAK,gBAAgB,OAAO,aAC5B,oEACW,oBAAoB,OAAO,aAAa;;AAE3D,YAAI,CAAC,KAAK;AACN,eAAK,kBAAkB;;AAG3B,uBAAe,KAAK,gBAAgB,OAAO;AACvC,mBAAS,QAAS,SAAS,QAAQ,KAAM;AACzC,iBAAO;WACR;AACH,+BAAuB,OAAO,KAAK,QAAQ,OAAO,UAAW,OAAO,QAAQ;AAC5E,gBAAY,eAAe,WAAW,GAAG,MAAM,mCAAmC,eAAe;AAEjG,YAAI,KAAK;AACL,4BAAkB,OAAO,KAAK,KAAK;AAC/B,2BAAc,KAAK,gBAAgB,QAAQ;AAC3C,gBAAI,WAAU;AACV,oBAAM,IAAI,MAAM,cAAc,MAC1B,yEACY,KAAK,gBAAgB,aAAa;;;;AAI9D,aAAK,uBAAuB;;YAE1B;AACF,YAAI,KAAK;AACL,uBAAa,MAAM,KAAK,KAAK;AAC7B,+BAAqB,MAAM,KAAK;AAChC,cAAI,aAAa;AACb,kBAAM,IAAI,MAAM;;AAEpB,4BAAkB,aAAa;AAC/B,0BAAgB,KAAK,SAAS,WAAW;AACzC,iBAAO;;AAGP,iBAAO;;;YAGT;AACF,YAAI,CAAC,KAAK;AACN,gBAAM,KAAK;;AAEf,oBAAY,MAAM,KAAK,KAAK;AAC5B,YAAI,KAAK;AAGL,kBAAQ,MAAM,KAAK;;AAEvB,eAAO,MAAM,IAAI,OAAK,KAAK,gBAAgB;;MAE/C;AACI,uBAAe,KAAK,SAAS;AAC7B,yBAAiB;AACjB,uBAAe;AACf,qBAAa,GAAG,IAAI,KAAK,gBAAgB,QAAQ;AAC7C,sBAAY,KAAK,gBAAgB;AACjC,0BAAe,KAAK,gBAAgB,KAAK,cAAc,OAAO;AAC9D,cAAI,KAAK,yBAAyB,CAAC;AAE/B;;AAGA,0BAAc,OAAO;AACrB,8BAAkB;AAClB,gBAAI,UAAU;AAGV,kBAAI,WAAU,QAAO,YAAY;AAC7B,8BAAc,QAAO;yBAEhB,WAAW,SAAO,YAAY,QAAO;AAC1C,sBAAM,IAAI,MAAM,mBAAmB,8BAA8B;;AAGjE,8BAAc;;;AAKlB,iCAAmB,OAAO;AAC1B,kBAAI,MAAM;AAGN,oBAAI,WAAU,QAAO,UAAU;AAC3B,gCAAc,KAAK,WAAW;;AAI9B,gCAAc;;yBAGb,CAAC,WAAU,CAAC,QAAO;AAGxB,8BAAc;;AAKd,wBAAQ,QAAO;uBACN;AACD,kCAAc;AACd;uBACC;AACD,kCAAc,KAAK,MAAM;AACzB;uBACC;AACD,kCAAc,KAAK,WAAW;AAC9B;;AAEA,kCAAc;;;;AAK9B,YAAC,WAAU,QAAO,UAAW,OAAO,OAAO,cACvC,SAAS,OAAO;;;AAK5B,YAAI,OAAO,KAAK,QAAQ,WAAW;AAC/B,iBAAO;;AAGP,iBAAO,CAAE,IAAI,UAAU,IAAI;;;MAGnC;AACI,YAAI,UAAU,OAAO,MAAM,kBAAkB;AACzC,iBAAO;;AAGP,iBAAO;;;MAIf,sCAAsC;AAClC,uBAAe;AACf,yBAAiB;AACjB,2BAAmB,KAAK;AACxB,2BAAmB;AAEnB,qBAAa,GAAG,IAAI,YAAY;AAC5B,kBAAQ;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,+BAAa,IAAI;AACjB,iCAAe;AACf;qBAEC,KAAK;AACN,+BAAa,IAAI;AAGjB,sBAAI,KAAK,cAAc,OAAO,KAAK;AAC/B;;AAEJ,yBAAO,KAAK;AACZ,iCAAe;AACf;;AAGA,iCAAe;AACf,+BAAa;AACb;;AAER;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY;AACvC,iCAAe;AACf,+BAAa,IAAI;AACjB;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;iBAEC;AACD,sBAAQ,KAAK,OAAO;qBAEX,KAAK;AACN,yBAAO,KAAK,KAAK,UAAU,YAAY,IAAI;AAC3C,iCAAe;AACf,+BAAa,IAAI;AACjB;qBAEC;AACD,iCAAe;AACf;;AAGA,iCAAe;AACf;;AAER;iBACC;AACD,sBAAQ,KAAK,OAAO;qBAEX;AACD,iCAAe;AACf;;;AAGR;;;;AAKZ,YAAI,iBAAiB;AACjB,iBAAO,KAAK,KAAK,UAAU,YAAY,aAAa;;AAGpD,iBAAO,KAAK,KAAK,UAAU;;AAG/B,YAAI,wBAAwB,OAAO,WAAW,KAAK,gBAAgB;AAC/D,gBAAM,IAAI,MAAM,wCAAwC,KAAK,gBAAgB,qCAAqC;;AAEtH,eAAO;;;ACpXf;;;;;;;;;;;;;;;;;qCAyBwC;MACpC;AACI;AACA,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,aAAK,UAAU,iBAAiB,WAAW;AAC3C,4BAAoB,KAAK,KAAK,KAAK;AACnC,YAAI,KAAK,UAAU,KAAK,cAAc,KAAK,cAAc,MACrD,CAAC,OAAO,UAAU;AAClB,gBAAM,IAAI,MAAM,gFACmB,KAAK;;AAE5C,aAAK,YAAY,iBAAiB,2BAA2B;AAC7D,aAAK,eAAe,iBAAiB;AACrC,aAAK,uBACD,iBAAiB,wBAAwB,KAAK;AAClD,aAAK,wBAAwB,iBAAiB;AAC9C,aAAK,wBAAwB,iBAAiB,yBAAyB;AACvE,aAAK,qBACD,iBAAiB,uBAAuB,QAAQ,QAAQ;AAC5D,aAAK,kBACD,iBAAiB,oBAAoB,OAAO,OAAO;AACvD,YAAI,CAAC,KAAK,sBAAsB,CAAC,KAAK;AAClC,gBAAM,IAAI,MAAM;;;MAIxB;AACI,eAAO;;mBAGE,0BAA0B;AACnC,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,mCAA2B,IAAI,mBAAmB;AAElD,cAAM,mBAAmB;AACzB,eAAO;;YAGL;AACF;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO,KAAK,yBAAyB,OAAO,OACxC,KAAK;YACT,OAAO;;;AAIX,gBAAM,IAAI,MAAM,iDAAiD,EAAE;;AAEvE,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAEpB,+BAEA,OAAO,gBAAgB,OAAO;AAC9B,aAAK,eAAe,IAAI;AACxB,YAAI,CAAC,KAAK;AAGN,eAAK,eAAe,KAAK,aAAa;mBAEjC,KAAK,aAAa,eAAe,KAAK;AAC3C,gBAAM,IAAI,MAAM,wCACC,KAAK,yBACP,KAAK,aAAa;;AAErC,6BAAqB,KAAK,aAAa,wBAAwB,KAAK;AACpE,aAAK,WAAW,KAAK,aAAa;AAClC,aAAK,SAAS,UAAU,KAAK,UAAU;AACvC,aAAK,SAAS,wBAAwB,KAAK;AAC3C,qBAAa,QAAQ,KAAK;AAC1B,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC,aAAK,WAAW,IAAI,aAAa,KAAK;AACtC;;YAEE;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACA,+BAAuB,MAAM,KAAK;AAClC,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,8BAAoB,KAAK,4BAA4B,UAAU,CAAC,KAAK,WAAW,KAAK,sBAAsB;;AAE/G,YAAI,KAAK;AACL,2BAAiB,KAAK,aAAa,eAAe;AAClD,2BAAiB,KAAK,4BAA4B,UAAU,CAAC,KAAK,YAAY,KAAK,SAAS;;AAEhG,eAAO;UACH,OAAO,CAAE,aAAe,mBAAmB,UAAY;UACvD,MAAM;;;YAKR;AACF,eAAQ,OAAM,KAAK,QAAQ;;YAEzB;AACF,8BAAsB;AACtB,8BAAsB;AACtB,4BAAoB;AACpB,eAAO,IAAI,QAAQ;AACf,6BAAmB,YAAY;AAC3B,gBAAI,KAAK;AACL,mBAAK,SAAS,sBAAsB,KAAK;AAEzC,kBAAI,KAAK,SAAS,OAAO;AACrB,wBAAQ,CAAE,eAAe;;AAE7B,4BAAc,KAAK,KAAK,SAAS,MAAM,GAAG,KAAK;;AAEnD,gBAAI,KAAK;AACL,mBAAK,SAAS,uBAAuB,KAAK;AAC1C,4BAAc,KAAK,KAAK,SAAS;;AAGrC,gBAAI,EAAE,kBAAkB,KAAK;AACzB,4BAAc;AACd,sBAAQ,CAAE,eAAe;;aAE9B,KAAK,UAAU,KAAK,eAAe;;;MAI9C;AACI,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,SAAS;AACd,eAAK,aAAa;AAClB,cAAI,KAAK,UAAU,QAAQ,KAAK,OAAO,YAAY,SAAS;AACxD,iBAAK,OAAO,YAAY,GAAG;;;;MAKvC;AACI,cAAM,IAAI,MAAM;;MAGpB;AACI,eAAO,KAAK;;MAEhB;AACI,0BAAkB,MAAM,GAAG;AAC3B,yBAAiB,IAAI,aAAa,MAAM,SAAS;AACjD,cAAM,QAAQ,cAAa,SAAS,IAAI,OAAM,IAAI;AAClD,eAAO;;MAEX;AACI,qBAAa,IAAI,aAAa,eAAmB;AAEjD,aAAK,IAAI,UAAU,KAAK,SAAS,SAAS;AAC1C,eAAO,QAAO,MAAM;;;ACvL5B;;;;;;;;;;;;;;;;;iCAuBoC;MAChC;AACI;AACA,aAAK,qBAAqB;AAC1B,aAAK,eAAe;AACpB,aAAK,WAAW;AAChB,aAAK,SAAS;AACd,YAAI,KAAK;AACL,eAAK,SAAS;AACd,eAAK,WACD,CAAC,KAAK,aAAa,cAAc,KAAK,aAAa;AACvD,eAAK,aAAa,UAAS,CAAC,IAAI;AAChC,cAAI,KAAK,aAAa;AAElB,uCAA2B,KAAK,aAAa,cAAc,IAAM,KAAK,mBAAmB;AACzF,wCAA4B,KAAK,aAAa,eAAe,IACzD,KAAK,mBAAmB;AAC5B,mCAAwB,KAAI,sBAAsB;AAClD,oCAAyB,KAAI,uBAAuB;AACpD,iCAAqB,iBAAiB;AACtC,kCAAsB,sBAAsB;AAC5C,iBAAK,UAAU,SAAS,CAAC,iBAAiB,gBAAgB,eAAe,eAAe,CAAC,GAAG;;AAG5F,iBAAK,UAAU,SAAS,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,GAAG;;;;MAItD;AACI,eAAO;;mBAGE,0CAA0C;AACnD,YAAI,OAAM,IAAI;AACV,gBAAM,IAAI,MAAM;;AAEpB,YAAI,CAAC;AAGD,+BAAqB,SAAS,cAAc;AAC5C,cAAI,CAAC,aAAa,eAAe,CAAC,aAAa;AAC3C,kBAAM,IAAI,MAAM;;AAGpB,6BAAmB,QAAQ,aAAa;AACxC,6BAAmB,SAAS,aAAa;;AAE7C,+BAAuB,IAAI,eAAe,oBAAoB;AAE9D,cAAM,eAAe;AACrB,eAAO;;YAGL;AACF,YAAI,KAAK,aAAa;AAClB,kBAAa,KAAK,aAAa,eAAe,UACzC,KAAK,aAAa,eAAe,eAAgB,MAAM,+BAA+B,KAAK,aAAa;;AAGjH;AACI,eAAK,SAAS,MAAM,UAAU,aAAa,aAAa;YACpD,OAAO;cACH,UAAU,KAAK,aAAa;cAC5B,YAAY,KAAK,aAAa,aAC1B,KAAK,aAAa,aAClB;cACJ,OAAO,KAAK,mBAAmB;cAC/B,QAAQ,KAAK,mBAAmB;;;;AAMxC,YAAE,UAAU,iDAAiD,EAAE;AAC/D,gBAAM;;AAEV,YAAI,CAAC,KAAK;AACN,gBAAM,IAAI,MAAM;;AAGpB;AACI,eAAK,mBAAmB,YAAY,KAAK;;AAGzC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM,OAAO,IAAI,gBAAgB,KAAK;;AAGlE,aAAK,mBAAmB;AACxB,aAAK,WAAW;AAChB,eAAO,IAAI,QAAQ;AAEf,eAAK,mBAAmB,mBAAmB;AACvC;;;;YAIN;AACF,YAAI,KAAK;AACL,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC;AACA;AACI,gBAAM,WAAmB,KAAK;;AAG9B,gBAAM,IAAI,MAAM,4CAA4C,KAAK,UAAU;;AAE/E,YAAI,KAAK;AACL;AACI,mBAAO,CAAE,OAAO,KAAK,mBAAmB,MAAM,MAAM;;AAGpD,kBAAM,IAAI,MAAM,oCAAoC,EAAE;;AAGtD,gBAAI;;;AAIR,iBAAO,CAAE,OAAO,KAAK,MAAM;;;MAGnC;AAII,YAAI,KAAK,aAAa,eAAe,KAAK,aAAa,gBAClD,MAAK,mBAAmB,UAAU,KAAK,aAAa,eACjD,KAAK,mBAAmB,WAAW,KAAK,aAAa;AACzD,iBAAO;;AAEX,eAAO;;MAGX;AACI,eAAO,KAAK;AACR,gCAAsB,IAAI,UAAU,WAAW;AAC/C;AACA,yBAAe,OAAM,cAAc,eAAe,KAAK,SAAS,KAAK,YAAY,KAAK,UAAU;AAEhG,wBAAc,aAAa;AAC3B,iBAAO,aAAa,QAAQ,MAAM,MAAM;;;YAK1C;AACF,eAAQ,OAAM,KAAK,QAAQ;;MAG/B;AACI,uBAAe,KAAK,OAAO;AAC3B,eAAO,QAAQ,WAAS,MAAM;AAC9B;AACI,eAAK,mBAAmB,YAAY;;AAGpC,kBAAQ,IAAI;AACZ,eAAK,mBAAmB,MAAM;;AAElC,aAAK,WAAW;;MAGpB;AACI,cAAM,IAAI,MAAM;;;AC5LxB;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;iCAkBoC;MAmBhC;AACI,eAAO,IAAI,cAAc,MAAM;;;gCAYX;MACxB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,kBAAkB,UAAU;;MAEhD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;oCAGO;MAC5B;AACI;AACA,aAAK,WAAW;AAChB,aAAK,YAAY;AAEjB,aAAK,YAAY;;MAErB;AACI,eAAO,GAAG,KAAK,SAAS,uBAAuB,KAAK;;YAElD;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC,YAAI,YAAY;AACZ,cAAI,KAAK,cAAc;AACnB,mBAAO;;AAIX,eAAK,YAAY,KAAK,KAAK;AAC3B,eAAK,YAAY;AACjB,iBAAO;;AAEX,sBAAc,YAAY,MAAM,MAAM,KAAK;AAI3C,cAAM,KAAK,KAAK,YAAY,MAAM;AAClC,2BAAmB,MAAM,MAAM,GAAG;AAC9B,eAAK,YAAY,KAAK;;AAE1B,aAAK,YAAY,MAAM,MAAM,SAAS;AACtC,eAAO;;;AC/Ff;;;;;;;;;;;;;;;;;oCAoBuC;MAUnC;AACI,eAAO,IAAI,aAAa;;;+BAYL;MACvB;AACI;AACA,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,iBAAiB;;MAErC;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,KAAK;;;mCAyBM;MAC3B;AACI;AACA,aAAK,WAAW;AAChB,YAAI,OAAM,IAAI;AACV,eAAK,UAAU,IAAI,YAAY;;AAI/B,iBAAQ,iBAAkB;AAC1B,eAAK,UAAU,IAAI,cAAc;;;MAGzC;AACI,eAAO,GAAG,KAAK,SAAS;;YAEtB;AACF,4BAAoB,MAAM,KAAK,SAAS;AACxC;AACA,YAAI,YAAY;AACZ,iBAAO;;AAGP,kBAAQ,YAAY;;AAExB;AACA,YAAI,OAAM,IAAI;AACV,iBAAO,KAAK,QAAQ,OAAO,OAAO,CAAE,QAAQ;;AAG5C,iBAAO,KAAK,QAAQ,MAAM,OAAO,KAAK,MAAM;;AAEhD,aAAK,YAAY,KAAK;AACtB,eAAO;;;AC/Gf;;;;;;;;;;;;;;;;;oCA2BuC;MACnC,4BAA4B;AACxB;AACA,aAAK,OAAO;AACZ,aAAK,UAAU;AACf,gBAAa,gBAAgB,cACxB,QAAM,IAAI,gBACN,gBAAgB,QAAQ,gBAAgB,OACzC,QAAQ,MAAM;AAEtB,aAAK,SAAS,QAAQ,UAAU;AAEhC,aAAK,YAAY,QAAQ,aAAa,OAAO;;MAEjD;AACI,eAAO,cAAc,KAAK;;YAExB;AACF,YAAI,KAAK,UAAY,MAAK,gBAAgB,aACtC,KAAK,KAAK,aACV,KAAK,KAAK;AACV,iBAAO,CAAE,OAAO,MAAM,MAAM;;AAEhC,sBAAc,IAAI,QAAQ;AACtB,sBAAY,KAAK,SAAS,KAAK;AAC/B,cAAI,KAAK,gBAAgB;AAGrB,oBAAQ,IAAI,WAAW,KAAK,KAAK,MAAM,KAAK,QAAQ;;AAMpD,+BAAmB,IAAI;AACvB,uBAAW,SAAS;AAChB,0BAAW,WAAW;AAItB,kBAAI,iBAAgB;AAChB,wBAAO,IAAI,WAAW;;AAE1B,kBAAI,CAAE,kBAAgB;AAClB,uBAAO,OAAO,IAAI,UAAU;;AAEhC,sBAAQ;;AAEZ,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM;;AAE5B,uBAAW,UAAU;AACjB,qBAAO,OAAO,IAAI,MAAM,MAAM;;AAIlC,2BAAc,KAAK,KAAK,MAAM,KAAK,QAAQ;AAG3C,uBAAW,kBAAkB;;AAEjC,eAAK,SAAS;;AAElB,eAAO,CAAE,OAAQ,MAAM,OAAQ,MAAM;;;AC1F7C;;;;;;;;;;;;;;;;;AA0BO,mDAA+C;AAClD;AACA;AACA,UAAK,OAAO,QAAS;AACjB,oBAAY;;AAGZ,oBAAY,IAAI;AAChB,sBAAc,0BAA0B;;AAE5C,uBAAiB,MAAM,QAAW,WAAW;AAC7C,UAAI,SAAS;AACT,2BAAmB,IAAI,WAAW,MAAM,SAAS;AACjD,eAAO,IAAI,kBAAkB,YAAY;;AAGzC,cAAM,IAAI,MAAM,SAAS;;;AAIjC,sCAAkC;AAC9B,oBAAa;QACT,QAAQ,QAAQ;QAChB,SAAS,QAAQ;QACjB,MAAM,QAAQ;QACd,MAAM,QAAQ;QACd,aAAa,QAAQ;QACrB,OAAO,QAAQ;QACf,UAAU,QAAQ;QAClB,UAAU,QAAQ;QAClB,WAAW,QAAQ;;AAEvB,aAAO;;AC1DX;;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,OAAO,WAAW,YAAa,OAAO,OAAO,GAAG,OAAO;;ACrBnE;;;;;;;;;;;;;;;;;iCAyBoC;MAShC,8BAA6B;AACzB;AACA,aAAK,QAAQ;AACb,aAAK,UAAU;;YAEb;AACF,YAAI,YAAY,KAAK,UAAU,OAAM,IAAI;AAErC,qBAAW;AACX,eAAK,QAAQ,GAAG,aAAa,KAAK,MAAM,OAAO;;AAInD,eAAO,IAAI,kBAAkB,KAAK,OAAO,KAAK;;;AC/CtD;;;;;;;;;;;;;;;;;gCAwBmC;MAQ/B,+BAA+B;AAC3B;AACA,aAAK,MAAM;AACX,aAAK,cAAc;;YAMjB;AACF,YAAI,YAAY,KAAK;AACjB,iBAAQ,IAAI,eAAe,KAAK,KAAK,KAAK,aACrC;;AAGL,iBAAO,iBAAiB,KAAK,KAAK,KAAK;;;;AC/CnD;;;;;;;;;;;;;;;;;AAqGO,qCAAiC;AACpC,aAAO,IAAI,WAAW,IAAI,cAAc,SAAS;;AA0B9C;AACH,mBAAa,qBAAqB;AAClC,aAAO,sBAAsB,YAAY;;AA8DtC;AACH,aAAO,sBAAsB;AACzB,oBAAY,MAAM;AAClB,eAAO,qBAAqB,MAAM,IAAI;;;AAiCvC;AACH,aAAO,eAAe,OAAO,oBAAoB;;AAoC9C;AACH,aAAO,mBAAmB,OAAO;;AC1QrC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAiBO;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;ACvB1D;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAChC,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;iCAGkB;MAChC;AACI;AACA,aAAK,YAAY;AACjB,aAAK,WAAW;AAChB,aAAK,OAAO,IAAI,aAAY,MAAM;;MAEtC;AACI,YAAI,KAAK;AACL,eAAK,WAAW;AAChB,cAAI,OAAM,IAAI;AACV,kBAAkB;;;AAY1B,uBAAe;AACf,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;AACjD,eAAO;;MAQX;AACI;AACA,YAAI,UAAU,YAAY,UAAU,QAAQ,OAAO,SAAS,KACxD,UAAc,OAAO;AACrB,gCAAsB,OAAO,IAAI,OAAK,cAAkB;AACxD,kBAAQ,KAAK,MAAM,eAAe,OAAO;;AAGzC,kBAAQ,KAAK,MAAM,QAAQ,OAAO;;AAEtC,eAAO,CAAE,QAAQ,OAAO,OAAO;;MAGnC;AACI,2BAAmB,KAAK,KAAK,IAAI;AACjC,mBAAW;;MAGf;AACI,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;;;MAGnB;AACI,aAAK,KAAK,IAAI,QAAQ,CAAE,QAAQ,OAAO,UAAU;;MAErD;AACI,eAAO,KAAK,KAAK;;YAEf;AACF,eAAO,KAAK,SAAS;;MAEzB;AACI,eAAQ,OAAO,sBAAuB,KAAK,KAAK,IAAI;AACpD,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,iBAAO,wBAAoC,YAAY;;AAE3D,eAAO,KAAK,KAAK,IAAI,QAAQ;;MAEjC;AACI,sBAAa,KAAK,SAAS,EAAE;AAC7B,0BAAkB;AAClB,YAAI,EAAE,UAAU;AACZ;AAEI,0BAAc,MAAK,IAAI,OAAK,cAAkB;;AAG9C,kBAAM,IAAI,MAAM;;;AAGxB,eAAO,QAAU,EAAE,OAAO,EAAE,OAAO;;MAEvC;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,YAAI,KAAK,KAAK,IAAI;AACd,iBAAQ,sBAAuB,KAAK,KAAK,IAAI;AAC7C,cAAI,sBAAsB;AACtB,iBAAK,YAAY,mBAAmB,KAAK;AACzC,iBAAK,YAAY,mBAAmB,KAAK;;AAE7C,eAAK,KAAK,OAAO;;;MAGzB;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,KAAK,IAAI;AACd,6BAAmB,KAAK,KAAK,IAAI;AACjC,qBAAW;AACX,cAAI,WAAW,WAAW;AACtB,iBAAK,YAAY;;;;YAIvB;AACF,sBAAc;AACd;AACA,yBAAiB,SAAa;AAC9B,eAAO,CAAE;;MAEb;AACI,eAAO;UAEH,YAAY;UACZ,SAAS,CAAC;;;MAIlB;AACI,yBAAiB,GAAG;AACpB,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAU,IAAI;;AAEzB,yBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,sBAAY,SAAO,WAAW;AAC9B,yBAAe,IAAI,MAAM,IAAI;AAC7B,uBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,mBAAO,KAAK,IAAI,KAAK,QAAQ,KAAK,MAAM;;AAE5C,mBAAO,IAAI,KAAK,IAAI,GAAG,SAAS,GAAG;;AAEvC,eAAO,SAAO;;MAElB;AACI,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAe,QAAU,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE;AAC7C,qBAAa,SAAO;AACpB,qBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,eAAK,IAAI,EAAE,OAAO,KAAK,MAAM;;AAEjC,eAAO,SAAO;;MAElB;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,OAAS,GAAG,OAAO,MAAM,QAAQ;;AAE9C,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,yBAAe,QAAU,EAAE,OAAO,EAAE;AACpC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,SAAO,MAAM;AAC7B,yBAAe,SAAO,WAAW;AACjC,wBAAc,OAAO;AACrB,eAAK,QAAQ,QAAM,MAAM,MAAM,EAAE,MAAM,MAAM,IAAI,MAAM;AACvD,mBAAO,IAAI,KAAK,IAAI,GAAG,QAAQ,GAAG;;AAEtC,eAAO,SAAO;;MAElB;AACI,yBAAiB,GAAG;AAEpB,eAAO,KAAO,QAAU,KAAK;;MAEjC;AACI,yBAAiB,SAAS;AAC1B,qBAAa,QAAQ,IAAI,OAAK,KAAK,SAAS,EAAE;AAC9C,uBAAe,QAAU,QAAQ,GAAG,OAAO,QAAQ,GAAG;AACtD,2BAAmB,OAAO;AAC1B,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,2BAAiB,KAAK;AACtB,uBAAa,GAAG,IAAI,WAAW,QAAQ;AACnC,uBAAW,MAAM,SAAS;;;AAGlC,eAAO,OAAO;;MAElB;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAExE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,IAAO;AACjB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAO,GAAG;;MAErB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,IAAI,QAAQ;;MAExF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAW,YAAU,KAAK,MAAM,KAAI;AACpC,4BAAoB;AACpB,eAAO,KAAK,oBAAoB,GAAG,GAAG,aAAa;;MAEvD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU;AACV,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,oBAAO,MAAM,SAAS;;AAE1B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,sBAAW;AACX,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,qBAAQ,MAAM,SAAS;;AAE3B,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oBAAY;AAGZ,yBAAiB,EAAE,OAAO,WAAW;AACrC,qBAAa,GAAG,IAAI,UAAU,EAAE;AAC5B,uBAAa,WAAW,WAAW,IAAI;;AAE3C,qBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAkB,QAAU,GAAG;AAC/B,uBAAa,MAAS,WAAW,YAAY,OAAO;AACpD,uBAAY,KAAK,IAAI,GAAG,IAAI;AAC5B,cAAI,KAAK;;AAEb,eAAO,MAAS;;MAEpB;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,qBAAa,CAAC;AACd,oCAAwC,UAAU,MAAM,EAAE;AAC1D,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU;AAClC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,yBAAe;AACf,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;AACN,yBAAW;;;AAGnB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,oDAAoD,EAAE,OAAO,kBACzD;;AAExB,4BAAoB,YAAW,EAAE,OAAO;AACxC,uBAAe,OAAS,EAAE,OAAO;AACjC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,yBAAiB,EAAE,MAAM,EAAE,OAAO;AAClC,8BAAsB,WAClB,UAAU,IAAI,WAAW,IAAI,IAC7B,UAAU,IAAI;AAClB,qBAAa,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnC,uBAAa,GAAG,IAAI,UAAU;AAC1B,wBAAY,cAAc,GAAG;AAC7B,gBAAI,MAAM;AACN,mBAAK,OAAO,YAAY,IAAI,MAAM;;AAGlC,8BAAgB,cAAc,GAAG,IAAI;AACrC,mBAAK,OAAO,YAAY,MAAM,WAAW,KAAK,WAC1C,MAAM,OAAO,KAAK;;;;AAIlC,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,SAAS,OAAQ,IAAI;;;MAGrC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,OAAO,OAAQ,IAAI;;;MAGnC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAQ,QAAQ,OAAQ,IAAI;;;MAGpC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,QAAQ;AAC1C,iBAAO,QAAQ;;;MAGvB;AACI,yBAAiB,CAAC,WAAW,GAAG,IAAI;AACpC,uBAAe,KAAK,SAAS,UAAU;AACvC,wBAAgB,KAAK,SAAS,EAAE;AAChC,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,OAAS,EAAE,OAAO,YAAW,EAAE,OAAO,EAAE;AACvD,0BAAkB,KAAK,SAAS,OAAO;AACvC,qBAAY;AACZ,uBAAe,UAAU,SAAS,KAAK,UAAU,OAAO,KAAK,EAAE,SAAS,IACpE,IACA,eAAmB,EAAE,MAAM,MAAM;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAa,GAAG,IAAI,QAAQ;AACxB,gBAAI,OAAO,OAAO;AACd,wBAAU,YAAW,QAAQ;;AAG7B,wBAAU,YAAW,QAAQ;;;;AAIzC,eAAO;;MAEX;AACI,yBAAiB,CAAC,YAAY;AAC9B,yBAAiB,KAAK,SAAS,UAAU;AACzC,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,yBAAiB,GAAG;AACpB,sBAAc,KAAK,SAAS,EAAE;AAC9B,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,gBAAI,QAAQ;AACR,qBAAM;;;AAGd,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,sBAAY,OAAO;AACnB,cAAK,OAAO,KAAK,OAAO,KAAO,QAAQ,KAAK,QAAQ;AAChD,mBAAO;;AAGP,mBAAQ,OAAM,QAAQ;;;;MAIlC;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,gBAAgB,KAAK,IAAI,MAAM;;MAElF;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,qBAAU,MAAM;AAChB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,mBAAM,QAAO;;AAEjB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,OAAS,UAAU,EAAE;AACpC,2BAAmB,eAAmB;AACtC,qBAAa,KAAK,SAAS,OAAO;AAClC,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,yBAAe,IAAI;AACnB,uBAAa,MAAM;AACnB,uBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,0BAAc,MAAM,SAAS;AAC7B,qBAAS,UAAU;;AAEvB,eAAK,KAAK;;AAEd,eAAO;;MAEX;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO;AAC3C,uBAAa,OAAO;AACpB,iBAAO,OAAO;;;MAGtB;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,6BAAqB,IAAI,aAAa,EAAE;AACxC,uBAAe,KAAK,SAAS,EAAE;AAC/B,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,OAAO;AACjB,cAAI,KAAK;AACL,yBAAa,KAAK,SAAS;;AAG3B,yBAAa,KAAK,SAAS,KAAM,KAAI;;;AAG7C,eAAO,KAAK,WAAW,cAAc,EAAE,OAAO;;MAElD;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,eAAO,KAAK,oBAAoB,GAAG,GAAG,EAAE,OAAO,oBAAoB,KAAK,MAAM,QAAQ;;MAE1F;AACI,yBAAiB,GAAG;AACpB,eAAO,OAAK,KAAK,WAAW,IAAI;;MAEpC;AACI,yBAAiB,CAAC,GAAG,UAAU;AAC/B,yBAAiB,EAAE,MAAM;AACzB,8BAAsB,KAAK,SAAS,QAAQ;AAC5C,iBAAS,QAAQ,cAAc;AAC/B,uBAAe,QAAU,UAAU,EAAE;AACrC,qBAAa,KAAK,WAAW;AAC7B,qBAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AAC/B,yBAAe,OAAO,WAAW;AACjC,8BAAoB,OAAO;AAC3B,sBAAY,QAAQ,cAAc,OAAO;AACzC,gCAAsB,KAAK,WAAW;AACtC,iBAAO,OAAO,KAAK,KAAK,OAAO;;AAEnC,eAAO,OAAO;;MAElB;AACI,yBAAiB,CAAC,IAAI;AACtB,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAa,EAAE,QAAQ,WAAW,UACpC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,yBAAiB,GAAG;AACpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,QAAU,SAAS,UAAU,EAAE;AAC9C,2BAAmB,OAAO;AAC1B,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAChE,SAAS,SAAS,KAAK,SAAS,SAAS;AAC7C,mCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,iCAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,iCAAyB,SAAS,SAAS;AAC3C,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,oCAA0B,QAAQ;AAClC,mCAAyB,QAAQ,EAAE,QAAQ;AAC3C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,wCAA0B,oBAAoB,SAAS;AACvD,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,wCAAwB,oBAAoB,OAAO;AACnD,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,0CAAwB,kBAAkB,OAAO;AACjD,oCAAkB;AAClB,iCAAe;AACf,+BAAY;AACZ,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,yCAAqB,mBAAmB,SAAS,EAAE,QAAQ;AAC3D,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,yCAAmB,eAAe,OAAO,EAAE,QAAQ;AACnD,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,2CAAmB,aAAa,OAAO,EAAE,QAAQ;AACjD,sCAAc,QAAQ,aAAa;AACnC,4BAAK,aAAa,SAAS,QAAQ;AAC/B,wCAAc;mCAET,aAAa;AAClB,sCAAY;AACZ;;AAEJ,4BAAI,MAAM;AACN;;;AAGR,0BAAI,MAAM;AACN;;;AAGR,wBAAI,MAAM;AACN;;;AAGR,uCAAqB,kBAAkB;AACvC,6BAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;;AAMhE,eAAO,OAAO;;MAElB;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,8BAAsB,IAAK,eAAc,eAAe;AACxD,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW;;;;AAIvB,qBAAG,IAAI,UAAU,eAAe,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlF,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,eAAO,KAAK,OAAO,GAAG,UAAU,OAAO;;MAE3C;AACI,6BAAqB,QAAU,SAAS,UAAU;AAClD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,qBAAa,KAAK,WAAW;AAC7B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,8BAAkB,GAAG,SAAS,SAAS,UAAU,EAAE;AAC/C,mCAAqB,SAAS,cAAc;AAC5C,8BAAgB;AAChB,qBAAO,YAAY;AACf,6BAAa;;AAEjB,gCAAkB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AACpE,8BAAgB,GAAG,OAAO,SAAS,WAAW,EAAE;AAC5C,mCAAmB,OAAO,eAAe;AACzC,8BAAc;AACd,uBAAO,UAAU;AACb,6BAAW;;AAEf,gCAAgB,KAAK,IAAI,SAAS,UAAU,wBAAwB;AACpE,gCAAgB,GAAG,OAAO,SAAS,UAAU,EAAE;AAC3C,qCAAmB,OAAO,cAAc;AACxC,gCAAc;AACd,yBAAO,UAAU;AACb,+BAAW;;AAEf,kCAAgB,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAElE,iCAAe,OAAO;AACtB,oCAAkB;AAClB,oCAAkB,WAAW,SAAS,WAAW,UAAU;AACvD,mCAAe,SAAS;AACxB,oCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,mCAAa,OAAO;AACpB,sCAAgB,SAAS,OAAO,SAAS,QAAQ;AAC7C,qCAAa,OAAO;AACpB,sCAAc,KAAK,IAAI,OAAO,QAAQ,MAAM,MAAM;AAClD,4BAAI,SAAS;AACT,qCAAW;AACX,wCAAc,SAAS,wBACnB,uBACA,OAAO,wBAAwB;;;;;AAKnD,+BAAa,IAAI,aAAa,OAAO,QAAQ,MAAM,MAAM;;;;;;AAM7E,eAAO,aAAa;;MAExB;AACI,yBAAiB,CAAC,GAAG,IAAI;AACzB,6BAAqB,KAAK,mBAAmB,GAAG;AAChD,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,mBAAW,QAAU,EAAE,OAAO;AAC9B,0BAAkB,KAAK,WAAW;AAClC,sBAAc,KAAK,WAAW;AAC9B,yBAAiB,GAAG,QAAQ,SAAS,WAAW,EAAE;AAC9C,6BAAmB,GAAG,UAAU,SAAS,YAAY,EAAE;AACnD,+BAAmB,GAAG,UAAU,SAAS,SAAS,EAAE;AAChD,+BAAiB,GAAG,QAAQ,SAAS,UAAU,EAAE;AAC7C,iCAAiB,GAAG,QAAQ,SAAS,SAAS,EAAE;AAE5C,wCAAsB,UAAU;AAChC,sCAAoB,QAAQ;AAC5B,sCAAoB,QAAQ;AAC5B,gCAAc;AACd,oCAAkB,GAAG,SAAS,sBAAsB,UAAU;AAC1D,oCAAiB,iBAAgB,UAAU;AAC3C,wBAAI,UAAU,KAAK,WAAW,SAAS,YACnC,KAAK,MAAM,aAAa;AACxB;;AAEJ,oCAAgB,GAAG,OAAO,uBAAuB,QAAQ;AACrD,oCAAe,eAAc,QAAQ;AACrC,0BAAI,QAAQ,KAAK,SAAS,SAAS,aAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,sCAAgB,GAAG,OAAO,sBAAsB,QAAQ;AACpD,sCAAe,eAAc,QAAQ;AACrC,4BAAI,QAAQ,KAAK,SAAS,SAAS,YAC/B,KAAK,MAAM,WAAW;AACtB;;AAEJ,uCAAe,uBACX,wBAAwB,uBACxB,IACA,UAAU,IAAI,OAAO,SAAS,OAAO,OAAO;AAChD,uCAAe,SAAS,wBAAwB,uBAC5C,OAAO,uBAAuB;AAClC,qCAAa,WAAW,SAAS,IAAI;AACrC,4BAAI,SAAS;AACT;;AAEJ,sCAAc,MAAM,IAAI,OAAO,SAAS,OAAO,OAAO;AACtD,mCAAW,QAAQ;;;;AAI/B,qBAAG,IAAI,SAAS,OAAO,SAAS,OAAO,OAAO;;;;;;AAMlE,eAAO,GAAG;;MAEd;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,eAAmB,CAAC,OAAO,WAAW,UAAU;AAChF,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,wBAAgB;AAChB,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,qBAAa,GAAG,IAAI,OAAO;AACvB,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,mCAAuB,KAAK,MAAM;AAClC,4BAAgB,gBAAgB;AAChC,kCAAsB,KAAK,IAAI,YAAY,GAAG,KAAK,KAAK;AACxD,iCAAqB,IAAI,EAAE,QAAQ,KAAK,iBAAiB,EAAE,QAAQ;AACnE,iCAAqB,IAAI,EAAE,QAAQ,KAAK,gBAAgB,EAAE,QAAQ;AAClE,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,qCAAuB,KAAK,MAAM;AAClC,8BAAgB,gBAAgB;AAChC,oCAAsB,KAAK,IAAI,WAAW,GAAG,KAAK,KAAK;AACvD,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,oCAAsB,eAAe,iBAAiB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,qCAAuB,eAAe,gBAAgB,EAAE,QAAQ;AAChE,2BAAa,GAAG,IAAI,aAAa;AAG7B,gCAAgB,QAAQ,gBAAgB;AACxC,mCAAmB,QAAQ,gBAAgB;AAC3C,iCAAiB,QAAQ,iBAAiB;AAC1C,oCAAoB,QAAQ,iBAAiB;AAC7C,4BAAY,UAAW,YAAW,WAAW;AAC7C,+BAAe,aAAc,eAAc,cAAc;AACzD,iCAAiB,MAAO,UAAS,OAAO;AACxC,uBAAO,eAAe;;;;;AAKtC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU;;MAE1D;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAK3D,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AAItD,yBAAiB,KAAK,SAAS,GAAG;AAClC,qBAAa;AACb,qBAAa,GAAG,IAAI,OAAO;AACvB,0BAAgB,IAAI,EAAE,QAAQ;AAC9B,uBAAa,GAAG,IAAI,SAAS;AACzB,wBAAY,IAAI;AAChB,gCAAoB,KAAK,MAAM;AAC/B,mCAAuB,KAAK,IAAI,KAAK,KAAK,MAAM,UAAU;AAC1D,iCAAqB,UAAU,cAAc,EAAE,QAAQ;AACvD,oCAAwB,UAAU,iBAAiB,EAAE,QAAQ;AAC7D,4BAAgB,MAAM;AACtB,mCAAuB,IAAM;AAC7B,yBAAa,GAAG,IAAI,QAAQ;AACxB,0BAAY,IAAI;AAChB,mCAAqB,KAAK,MAAM;AAChC,oCAAsB,KAAK,IAAI,KAAK,KAAK,MAAM,SAAS;AACxD,8BAAgB,MAAM;AACtB,qCAAuB,IAAM;AAC7B,sCAAwB,eAAe,eAAe,EAAE,QAAQ;AAChE,uCAAyB,eAAe,gBAAgB,EAAE,QAAQ;AAClE,yCAA2B,kBAAkB,eAAe,EAAE,QAAQ;AACtE,0CAA4B,kBAAkB,gBAAgB,EAAE,QAAQ;AACxE,wDAA0C,iBAAiB;AAC3D,iDAAmC,iBAAiB;AACpD,iDAAmC,UAAU;AAC7C,0CAA4B,UAAU;AACtC,2BAAa,GAAG,IAAI,OAAO;AACvB,8BAAc,SAAS;AACvB,uBAAO,kBAAkB,MACrB,QAAQ;AACZ,uBAAO,mBAAmB,MAAM,QAAQ;AACxC,uBAAO,qBAAqB,MACxB,QAAQ;AACZ,uBAAO,sBAAsB,MAAM,QAAQ;;;;;AAK3D,eAAO,SAAY,QAAQ,CAAC,OAAO,QAAQ,SAAS,QAAQ,EAAE;;MAElE;AACI,yBAAiB,GAAG;AACpB,0DAAkD,EAAE;AACpD,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,QAAQ,YAAY,WAAW;AAC/D,mCAA2B;UACtB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,oCAA4B;UACvB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,sCAA8B,mBAAmB,KAAK,oBAAoB;AAC1E,2BAAmB;AACnB,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,WAAW;AAC3B,kCAAsB,wBAAwB;AAC9C,qCAAyB,KAAK,IAAI,YAAY,GAAG,eAAe,KAAK,MAAM,iBACvE,KAAK,MAAM;AACf,8BAAkB,cAAc,mBAAmB,EAAE,QAAQ;AAC7D,yBAAa,GAAG,IAAI,UAAU;AAC1B,oCAAsB,wBAAwB;AAC9C,uCAAyB,KAAK,IAAI,WAAW,GAAG,eAAe,KAAK,MAAM,iBACtE,KAAK,MAAM;AACf,gCAAkB,YAAY,mBAAmB,EAAE,QAAQ;AAC3D,2BAAa,GAAG,IAAI,aAAa;AAG7B,+BAAe,QAAQ,YAAY;AACnC,uBAAO,kBAAkB;;;;;AAKzC,eAAO,QAAU,QAAQ,CAAC,OAAO,WAAW,UAAU,cAAc,EAAE;;MAE1E;AACI,yBAAiB,CAAC,IAAI,IAAI;AAC1B,gDAAwC,EAAE;AAC1C,oCAA4B,GAAG;AAC/B,uBAAe,IAAI,aAAa,QAAQ,UAAU,SAAS;AAC3D,yBAAiB,KAAK,SAAS,GAAG;AAGlC,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAElD,qBAAa,GAAG,IAAI,OAAO;AACvB,8BAAoB,IAAI,EAAE,QAAQ;AAClC,uBAAa,GAAG,IAAI,SAAS;AACzB,8BAAkB,cAAc,IAAI,EAAE,QAAQ;AAE9C,+BAAmB,KAAK,MAAM,IAAI;AAClC,6BAAiB,KAAK,MAAM,aAAc,YAAY;AACtD,yBAAa,GAAG,IAAI,QAAQ;AACxB,gCAAkB,YAAY,IAAI,EAAE,QAAQ;AAE5C,iCAAmB,KAAK,MAAM,IAAI;AAClC,+BAAiB,KAAK,MAAM,aAAc,WAAW;AACrD,2BAAa,GAAG,IAAI,OAAO;AACvB,4BAAY;AAEZ,oCAAoB,GAAG,WAAW,WAAW;AACzC,8BAAY,WAAW;AAEvB,sBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,oCAAkB,cAAc,MAAM,GAAG,QAAQ;AACjD,wCAAsB,MAAM;AAC5B,2CAAyB,KAAK,IAAI,UAAU,GAAG,eAAe,KAAK,MAAM,iBACrE,KAAK,MAAM;AACf,sBAAI,MAAM;AACN;;AAEJ,sCAAoB,GAAG,WAAW,UAAU;AACxC,gCAAY,WAAW;AAEvB,wBAAI,MAAM,KAAK,OAAO;AAClB;;AAEJ,sCAAkB,YAAY,MAAM,GAAG,QAAQ;AAC/C,0CAAsB,MAAM;AAC5B,6CAAyB,KAAK,IAAI,SAAS,GAAG,eAAe,KAAK,MAAM,iBACpE,KAAK,MAAM;AACf,wBAAI,MAAM;AACN,+BAAS,SAAS,YAAY;;;;AAI1C,uBAAO,YAAY,KAAK;;;;;AAKxC,eAAO,SAAY,QAAQ,EAAE,OAAO,EAAE;;MAE1C;AACI,yBAAiB,GAAG;AACpB,yBAAiB,EAAE,MAAM;AACzB,qBAAa,WAAW;AACxB,wBAAgB,KAAK,SAAS,EAAE;AAChC,qBAAa,EAAE;AACf,uBAAe,IAAI,aAAa;AAChC;AACI,iCAAuB,SAAS;AAChC,+BAAqB,SAAS,iBAAiB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,+BAAqB,SAAS,iBAC1B,KAAK,IAAI,iBAAiB,aAAa;AAC3C,qBAAU;AACV,iBAAO,kBAAkB,cAAc;AACnC,sBAAU,QAAQ;AAClB,oBAAO,IAAI;;AAEf,iBAAO;;AAEX,0BAAkB,GAAG,SAAS,MAAM;AAChC,uBAAY,kBAAkB;AAC9B,sBAAY,QAAQ,UAAU,KAAK,IAAI,OAAO,QAAQ,MAAK,CAAC;AAC5D,iBAAO,UAAU;;AAErB,eAAO,SAAY,QAAQ,EAAE;;MAEjC;AACI,yBAAiB,IAAI;AACrB,yBAAiB,GAAG,MAAM;AAC1B,yBAAiB,KAAK,SAAS,GAAG;AAClC,iCAAyB,KAAK,SAAS,WAAW;AAClD,kCAA0B,KAAK,SAAS,YAAY;AACpD,uBAAe,IAAI,aAAa,GAAG;AACnC,qBAAa,GAAG;AAChB,0BAAkB,GAAG,SAAS,MAAM;AAChC,iCAAuB,SAAS;AAChC,6BAAoB,SAAS,iBAAkB,KAAK,IAAI,GAAG,iBAAiB;AAC5E,2BAAkB,SAAS,iBACvB,KAAK,IAAI,UAAU,iBAAiB,cAAc;AACtD,sBAAW;AACX,uBAAa,YAAY,IAAI,UAAU;AACnC,qBAAQ,KAAK,IAAI,iBAAiB,IAAI;;AAE1C,kBAAO,QAAQ,QAAO;AACtB,uBAAa,YAAY,IAAI,UAAU;AACnC,sBAAU,KAAK,QAAQ,OAAO,iBAAiB,KAC3C,kBAAkB,UAAU;AAChC,gBAAI,WAAW;AACX,qBAAO,KAAK,IAAI,OAAM,CAAC;;AAE3B,mBAAO,SAAS;AAChB,mBAAO,MAAM;;;AAGrB,eAAO,SAAY,QAAQ,GAAG;;MAElC;AACI,yBAAiB,QAAQ;AACzB,8BAAsB,aAAa,SAAS,SAAW;AACvD,0BAAkB,cAAc,MAAM;AACtC,0BAAkB,cAAc,MAAM;AACtC,oBAAY,OAAS,CAAC,WAAW,aAAa;AAC9C,wBAAgB,KAAK,SAAS,IAAI;AAClC,yBAAiB,KAAK,SAAS,cAAc;AAC7C,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,yBAAe,IAAI;AAGnB,sBAAY,IAAI,aAAa,YAAY;AACzC,cAAI,KAAK,SAAS;AAClB,2BAAiB,GAAG,QAAQ,IAAI,QAAQ,EAAE;AACtC,gBAAI,SAAS,IAAI,QAAQ,KAAK,SAAS,SAAS;;AAEpD,yBAAe,aAAgB,KAAK;AACpC,4BAAkB,IAAI;AACtB,8BAAoB,GAAG,WAAW,YAAY,EAAE;AAC5C,sBAAU;AAEV,oBAAQ,YAAY,YAAY,IAAI;AACpC,6BAAiB,GAAG,QAAQ,IAAI,QAAQ;AACpC,kBAAI,IAAI,IAAI;AACR,wBAAQ,YAAY,YAAY;AAChC;;;;;AAKhB,eAAO;;MAEX;AACI,yBAAiB,SAAS;AAC1B,oBAAY,IAAI,aAAa,QAAQ,OAAO;AAC5C,YAAI,KAAK;AACT,2BAAmB,KAAK,SAAS,QAAQ;AACzC,yBAAiB,GAAG,QAAQ,QAAQ,MAAM,EAAE;AACxC,cAAI,WAAW,UAAU,KAAK,WAAW,SAAS;AAC9C,gBAAI,QAAQ,QAAQ,WAAW,UAAU;;;AAGjD,eAAO,SAAY,KAAK,CAAC,QAAQ,MAAM,QAAQ;;MAEnD;AACI,yBAAiB,OAAO;AACxB,0BAAkB,KAAK,SAAS,MAAM;AACtC,2BAAmB,KAAK,SAAS,OAAO;AACxC,eAAO,0BAAwB,WAAW,YAAY,eAAe,cAAc;;MAEvF;AACI,gBAAY,eAAe,QAAQ,MAAM,+DAA+D;AACxG,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAoB,EAAE,MAAM;AAC5B,2BAAmB,EAAE,MAAM;AAC3B,2BAAmB,EAAE,MAAM;AAC3B,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,wBAAgB,KAAK,SAAS,EAAE;AAChC,uBAAe,IAAI,aAAa,YAAY,eAAe,cAAc;AACzE,wBAAgB;AAChB,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,uBAAa,GAAG,IAAI,cAAc,EAAE;AAChC,wBAAY,KAAK,MAAM,IAAI;AAC3B,4BAAiB,IAAI;AACrB,yBAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,0BAAY,KAAK,MAAM,IAAI;AAC3B,8BAAiB,IAAI;AACrB,8BAAiB,WAAU,YAAY,WAAW;AAClD,2BAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,4BAAY,IAAI;AAChB,iCAAiB,MAAM,aAAc,OAAM,aAAc,OAAM,cAAc;AAC7E,uBAAO,eAAe,QAAQ;;;;;AAK9C,eAAO,SAAY,QAAQ,CAAC,WAAW,cAAc,aAAa;;MAEtE;AACI,yBAAiB,4BAAwC,EAAE,OAAO,EAAE;AACpE,uBAAe,QAAU,UAAU;AACnC,sBAAc,KAAK,SAAS,EAAE;AAC9B,sBAAc,KAAK,SAAS,EAAE;AAC9B,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,+BAAuB,kBAA8B,EAAE,OAAO;AAC9D,wBAAgB,OAAO;AACvB,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,oBAAQ,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI7D,uBAAa,KAAK,WAAW;AAC7B,uBAAa,KAAK,WAAW;AAC7B,uBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AAClC,wBAAY,OAAO,WAAW;AAC9B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,yBAAa,IAAI,MAAM,CAAC,EAAE;AAC1B,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,KAAK,WAAW;AAC/B,oBAAQ,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG7C,eAAO,OAAO;;MAElB;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;;MACA;AACI,eAAO;;MAGX;AACI,eAAO,MAAM;;MAEjB;AACI,8DAAsD,OAAO;AAC7D,yBAAiB,MAAM,MAAM;AAC7B,wCAAgC;AAChC,uBAAe,QAAU,CAAC,UAAU,YAAY,WAAW,cAAc;AACzE,wBAAgB,KAAK,SAAS,MAAM;AACpC,2BAAmB,KAAK,SAAS,SAAS;AAC1C,0BAAkB,KAAK,SAAS,OAAO;AACvC,yBAAiB,OAAO;AACxB,0BAAkB,OAAO;AAIzB,qBAAa,GAAG,IAAI,UAAU;AAC1B,2BAAiB,IAAI;AACrB,qBAAW,QAAQ;AACnB,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,qBAAW,QAAQ,WAAW;AAC9B,uBAAa,WAAW;AACxB,cAAI,QAAQ;AACR;;AAEJ,8BAAqB,aAAa,IAC7B,MAAK,MAAO,eAAc,KAAM,cAAa,KAC9C;AACJ,6BAAoB,YAAY,IAAM,MAAK,MAAO,cAAa,KAAM,aAAY,KAAK;AACtF,uBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc,aAAa,IACvB,KAAM,eAAc,KAAK,IAAK,cAC9B,MAAO,MAAK,MAAO,eAAc;AACrC,gBAAI,OAAO,KAAK,OAAO,cAAc;AACjC,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAa,GAAG,IAAI,aAAa;AAC7B,8BAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,yBAAO,OAAO,OAAO;;;AAG7B;;AAEJ,gBAAI,WAAW;AACX,6BAAe,KAAK,MAAM;AAC1B,gCAAkB,KAAK,KAAK;AAC5B,4BAAc,OAAO;AACrB,2BAAa,GAAG,IAAI,WAAW;AAC3B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,gCAAgB,KAAK,MAAM;AAC3B,iCAAiB,KAAK,KAAK;AAC3B,8BAAc,OAAO;AACrB,6BAAa,GAAG,IAAI,aAAa;AAC7B,4BAAU,IAAI,UAAU,SAAS,KAAK,SAAS,SAAS,KACpD,OAAO,SAAS;AACpB,kCAAgB,UAAU;AAC1B,wBAAM,IAAI,WAAW,SAAS,KAAK,SAAS,SAAS,KACjD,OAAO,SAAS;AACpB,mCAAiB,UAAU;AAC3B,wBAAM,IAAI,UAAU,SAAS,KAAK,YAAY,SAAS,KACnD,OAAO,SAAS;AACpB,qCAAmB,UAAU;AAC7B,wBAAM,IAAI,WAAW,SAAS,KAAK,YAAY,SAAS,KACpD,OAAO,SAAS;AACpB,sCAAoB,UAAU;AAC9B,8BAAY,UAAW,YAAW,WAAW;AAC7C,iCAAe,aAAc,eAAc,cAAc;AACzD,wBAAM,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AAC9D,yBAAO,OAAO,OAAO,MAAQ,UAAS,OAAO;;;;AAKrD,2BAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,6BAAc,YAAY,IACtB,KAAM,cAAa,KAAK,IAAI,aAC5B,MAAO,MAAK,MAAO,cAAa;AACpC,oBAAI,OAAO,KAAK,OAAO,aAAa;AAChC,+BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAY,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACpE,2BAAO,OAAO,OAAO;;AAEzB;;AAEJ,iCAAiB,KAAK,MAAM;AAC5B,iCAAiB,KAAK,MAAM;AAC5B,6BAAa,GAAG,IAAI,aAAa;AAC7B,gCAAc,IAAI,WAAW,SAAS,KAClC,WAAW,SAAS,KAAK,OAAO,SAAS;AAC7C,iCAAe,IAAI,IAAI,UAAU,KAAK,IAAI,UAAU,KAAK,IAAI,UAAU;AACvE,yBAAO,OAAO,UAAU,UAAU;;;;;;AAMtD,eAAO,OAAO;;MAElB;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,cAAc,eAAe;AAC5H,+BAAuB;AACvB,eAAO,KAAK,QAAQ,eAAe,cAAc,aAAa,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEvI;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,YAAI,cAAc;AACd,iBAAO,QAAU,IAAI,aAAa,EAAE;;AAExC,wBAAe,IAAI,cAAa,CAAC,WAAW,YAAY,EAAE;AAC1D,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,sBAAc,KAAK,SAAS,EAAE;AAC9B,qBAAa,GAAG,IAAI,WAAW;AAC3B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,4BAAgB,MAAM,QAAQ;AAC9B,mBAAM,KAAK;;AAEf,cAAI,eAAe,KAAK,gBAAgB,EAAE,OAAO;AAC7C,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B,EAAE;;AAEvE,uBAAa,GAAG,IAAI,WAAW;AAC3B,oBAAO,OAAO,IAAI,YAAY,KAAK,MAAM,eAAe,YAAY;;;AAG5E,eAAO,QAAO,WAAW,QAAQ;;MAErC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,QAAU;AAC/B,+BAAuB;AACvB,eAAO,KAAK,QAAQ,SAAS,SAAS,OAAO,YAAY,WAAW,YAAY,WAAW,SAAS,cAAc;;MAEtH;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,MAAQ,EAAE,OAAO,GAAG,EAAE;;;MAGrC;AACI,uBAAe,mBAAuB,EAAE,OAAO,eAAmB,EAAE;AACpE,eAAO,KAAK,WAAW,QAAQ,EAAE,OAAO,EAAE;;MAE9C;AACI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,6BAAqB,CAAC,aAAa,WAAW;AAC9C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,4BAAoB,KAAK,SAAS,QAAQ;AAC1C,YAAI,eAAe;AACf,iBAAO,QAAU,IAAI,OAAO,QAAQ;;AAExC,wBAAe,IAAI,cAAa,cAAc,QAAQ;AACtD,gBAAO,OAAO,KAAK,KAAK,SAAS,aAAa,QAAQ;AACtD,qBAAa,GAAG,IAAI,YAAY;AAC5B,yBAAc;AACd,6BAAmB;AACnB,uBAAa,GAAG,IAAI,WAAW;AAC3B,wBAAY,YAAY,IAAI,YAAY;AACxC,mBAAM,KAAK;AACX,4BAAgB,MAAM,QAAQ;;AAElC,cAAI,eAAe,KAAK,gBAAgB,aAAa;AACjD,kBAAM,IAAI,MAAM,oBAAoB,8BAA6B;;AAErE,uBAAa,GAAG,IAAI,WAAW;AAC3B,gBAAI;AACA,sBAAO,OAAO,eAAe,YAAY,MACrC,YAAY,IAAI,YAAY;;AAGhC,sBAAO,OAAO,eAAe,YAAY,KAAK,QAAQ,SAAS,IAC3D,YAAY,KACZ,YAAY,IAAI,YAAY;;;;AAI5C,eAAO,QAAO,WAAW,QAAQ;;;ACp9CzC;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,IAAI,aAAa,KAAK;AAC3C,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,qBAAa,KAAK,KAAK,IAAI,KAAK;;AAEpC,aAAO;;AAEJ,kBAAY;AACf,aAAQ,KAAM,KAAK;AACnB,yBAAmB,KAAK;AACxB,yBAAmB,IAAI,aAAa,eAAmB,EAAE;AACzD,UAAI,EAAE,UAAU;AACZ,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,cAAc;;AAG7B,4BAAoB,WAAW,KAAK,IAAI,EAAE;AAC1C,sBAAa,YAAY,mBAAmB;AAC5C,sBAAa,YAAY,mBAAmB;AAC5C,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,yBAAiB,WAAW,KAAK,IAAI,MAAK,QAAQ;AAClD,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,wBAAa,SAAS;AACtB,wBAAa,SAAS;AACtB,uBAAa,KAAK,KAAK,MAAM,OAAM;;;AAG3C,aAAO,WAAW,WAAW,cAAc,EAAE,OAAO;;AAEjD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,yBAAiB,4BAAwC,QAAQ;AACjE,2BAAmB,SAAS;AAC5B,8BAAsB,gBAAoB;AAC1C,2BAAmB,eAAmB;AACtC,uBAAe,wBAA4B,OAAO;AAClD,sBAAc,OAAO;AACrB,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,yBAAiB,gBAAoB;AACrC,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,mBAAO,KAAK,IAAG,MAAM,IAAI,MAAM,SAAS,MAAM,IAAI,MAAM;;;AAI5D,uBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,mBAAO,KAAK,IAAG,MAAM,SAAS,MAAM;;;AAG5C,eAAO,CAAC,QAAQ;;;AClDxB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,uBAAiB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC/C,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,KAAK,IAAI,YAAY;AAI7C,eAAQ,qBAAqB;QACzB,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;QACpD,MAAM,SAAQ,eAAe,MAAK,OAAO,WAAW;;AAExD,aAAO;;AAEJ,0BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAsBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,mBAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,mBAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,uBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,6BAAqB,WAAW,KAAK;AACrC,eAAO,SAAQ,eAAe,EAAE,OAAO,SAAS;;AAEpD,UAAI,UAAU;AAIV,sBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAa,cAAkB,CAAC,IAAI,EAAE;AACtC,0CAAkC,6BAA6B,UAAW,MAAM,IAAK,IAAI,GAAG,EAAE,OAAO,IAAI,OAAO,MAAM;AACtH,eAAO,SAAQ,eAAe,aAAa,QAAQ;;AAEvD,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AA+BO;AACH,UAAI,eAAe;AACf,eAAO,EAAG,QAAQ;AACd,iBAAQ,GAAG,KAAM;AACjB,6BAAmB;AACnB,2BAAiB,CAAC,GAAG,IAAI;AACzB,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;AAG9D,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,2BAAmB;AACnB,YAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACvC,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,OAAK,CAAE,QAAQ,CAAE,GAAG,IAAK,SAAS,YAAY,OAAO,CAAE,OAAO;AAChF,gCAAsB,WAAW,KAAK,IAAI,UAAU;AACpD,wBAAc,cAAc,mBAAmB;AAC/C,wBAAc,cAAc,mBAAmB;AAC/C,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,4BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,gEAAsD,YAAY,EAAE,OAAO,EAAE,OAAO,WAAW,WAAW,WAAW;AACrH,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,6BAAmB,WAAW,eAAe,aAAa,WAAW;AACrE,yBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,YAAY,MAAM,aAAc,SAAS;AAClF,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO;;AAGP,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,wBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,yBAAe,SAAS,EAAE;AAC1B,4CAAkC,WAAW,EAAE,OAAO,EAAE,OAAO,OAAO,OAAO;AAC7E,iBAAO,WAAW,eAAe,aAAa,QAAQ;;;;AAQ3D;AACH,aAAO;AACH,4BAAoB,4BAAwC,QAAQ;AACpE,2BAAmB,eAAmB;AACtC,2BAAmB,YAAY;AAC/B,8BAAsB,gBAAoB;AAC1C,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,wBAA4B,WAAW;AAC9D,+BAAuB,kBAA8B,QAAQ;AAC7D,+BAAuB,kBAA8B,QAAQ;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,wBAAoC,WAAW;AAC7D,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB;AACrC,YAAI,eAAe,SAAS,eAAe,WAAW;AAClD,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,yBAAa,IAAI,MAAM;AACvB,yBAAa,IAAI,MAAM;AACvB,2BAAe,IAAG,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI,IAAI,MAAM,OAAO,IAAI,MAAM,OAAO,IAAI;AAC1F,2BAAe,KAAK,OAAO;AAC3B,2BAAe,KAAK,OAAO;;;AAI/B,uBAAa,GAAG,IAAI,eAAe,QAAQ;AACvC,wBAAY,YAAgB,GAAG,YAAY;AAC3C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,yBAAa,IAAI,MAAM,CAAC;AACxB,2BAAe,QAAQ,OAAK,KAAK,KAAK;AACtC,2BAAe,YAAgB,MAAM,OAAO;AAC5C,6BAAiB,IAAG,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI,IAAI,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI;AACpG,2BAAe,KAAK,SAAS;AAC7B,2BAAe,KAAK,SAAS;;;AAGrC,eAAO,CAAC,gBAAgB,gBAAgB;;;AC1HhD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,UAAU,IAAI;AAC5D,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAO;AACH,0BAAkB,wBAA4B,OAAO,OAAO;AAC5D,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;AA0BO;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,sBAAc,eAAmB,EAAE;AACnC,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,mBAAuB,QAAQ;AACjD,qBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAU,KAAK,IAAG,OAAO,IAAI;;AAEjC,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AAYnD;AACH,aAAO,EAAG,QAAQ,OAAO;AACrB,eAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,YAAI,EAAE,UAAU,YAAY,UAAU;AAClC,gBAAM,IAAI,MAAM;;AAEpB,2BAAmB;AACnB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,uBAAe,SAAS,EAAE;AAC1B,0BAAkB,UAAU,QAAQ,QAAQ;AAC5C,eAAO,WAAW,eAAe,EAAE,OAAO,QAAQ;;;AChE1D;;;;;;;;;;;;;;;;AAmBO,qBAAiB,sBAAsB,QAAQ,KAAK,KAAK;AACzD,mBAAa,wBAAwB,MAAM;AAC3C,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,KAAK,MAAM;AAC3D,oBAAc,wBAAwB,OAAO;AAC7C,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,sBAAsB,QAAQ,KAAK,IAAI;AACvD,kBAAY,wBAAwB,MAAK;AACzC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAiBO;AACH,mBAAa,wBAA4B,OAAO,eAAmB;AACnE,mBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AAC/B,uBAAe,IAAI;AACnB,mBAAU,MAAM;AAChB,qBAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,wBAAc,MAAM,SAAS;AAC7B,cAAI,QAAQ;AACR,mBAAM;;;AAGd,aAAK,KAAK;;AAEd,aAAO;;AC9BX;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,oBAAoB,SAAS;AAChF,gCAA4B,8BAA+B;AAC9D,aAAO;QACH,MAAM,QAAQ,QAAQ,QAAQ;QAC9B,MAAM,QAAQ,QAAQ,QAAQ;;;AAG/B,uBAAiB,iBAAiB,WAAU,cAAc;AAC1D,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAmBO,yBAAqB,6BAA8B,UAAW,MAAM,IAAK,IAAI;AAC7E,uBAAiB,iBAAiB,WAAU,cAAc,MAAsB;AAChF,4BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAmBO,sBAAkB,sBAAsB,QAAQ,IAAI,KAAK,KAAK;AAC9D,oBAAc,wBAAwB,QAAO;AAC7C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBO;AACH,0BAAoB,kBAA4B,OAAO,OAAO;AAC9D,qBAAe,eAAmB;AAClC,uBAAiB,gBAAoB;AACrC,UAAI;AACA,2BAAmB,mBAA6B,OAAO;AACvD,eAAO,KAAK,SAAS,YAAY,aAAa;;AAElD,sBAAgB,wBAA4B,OAAO;AACnD,mBAAa,GAAG,IAAI,QAAQ,EAAE;AAC1B,qBAAa,KAAK;AAClB,wBAAgB,gBAAoB;AACpC,oBAAY,YAAgB,GAAG,MAAM;AACrC,qBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,uBAAe,YAAgB,MAAM,MAAM,QAAQ;AACnD,gBAAQ,KAAK,KAAK;;AAEtB,aAAO;;AAEJ;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,OAAO,QAAS;AACxB,uBAAiB,GAAG;AACpB,8BAAwB,kBAA4B,GAAG,OAAO;AAC9D,yBAA6B,GAAG,QAAQ;AACxC,mBAAa,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACxC,sBAAgB,UAAU,MAAM,QAAQ,OAAO,EAAE,OAAO,EAAE;AAC1D,aAAO,SAAQ,eAAe,OAAO,EAAE,OAAO;;AAE3C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAmBO,kCAA8B,6BAA8B;AAC/D,mBAAa,IAAI;AACjB,aAAO,OAAO;;AAEX,gCAA0B,iBAAiB,oBAAmB;AAC9D,qCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA8B,oBAAoB,SAAS;AAC3E,2BAAuB,8BAA+B;AACzD,aAAO,CAAE,MAAM,QAAQ,OAAO,MAAM,QAAQ;;AAEzC,kBAAY,iBAAiB,MAAK,SAAS;AAC3C,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,OAAO;AACrB,oBAAc,eAAmB;AACjC,uBAAiB,gBAAoB;AACrC,yBAAmB,gBAAoB;AACvC,qBAAe,wBAA4B,OAAO,eAAmB;AACrE,mBAAa,GAAG,IAAI,OAAO,EAAE;AACzB,oBAAY,YAAgB,GAAG,OAAO;AAEtC,uBAAe,IAAI,MAAM,IAAI;AAC7B,sBAAa,GAAG,KAAI,OAAO,QAAQ;AAC/B,iBAAO,MAAK,IAAI,KAAK;;AAEzB,yBAAiB,YAAgB,QAAQ,OAAO;AAChD,eAAO,YAAY,MAAM;;AAE7B,aAAO;;ACjCX;;;;;;;;;;;;;;;;AAiBO;AAEH,oBAAc,gBAAoB,MAAM,OAAO;AAwD/C,uBAAiB,CAAC,GAAG,MAAM,IAAI;AAC/B,mBAAa,GAAG,IAAI,OAAO;AACvB,iBAAS,MAAM,MAAM;;AAEzB,eAAS,KAAK,MAAM;AACpB,mBAAa,QAAQ,GAAG,IAAI,MAAM,QAAQ;AACtC,iBAAS,MAAM,MAAM;;AAIzB,6BAAuB;AAGvB,sBAAgB,IAAI,WAAW,MAAM;AAErC,0BAAoB,IAAI,cAAa,UAAU,OAAO;AAGtD,4BAAsB;AACtB,yBAAmB,SAAS,OAAO,KAAK,SAAS,OAAO;AACxD,mBAAa,GAAG,IAAI,MAAM,QAAQ;AAE9B;AACA,YAAI;AAEA,oBAAU,OAAO,GAAG;;AAGpB,6BAAmB;AACnB,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAW,KAAK,YAAY,IAAI,GAAG,GAAG;;;AAG9C,oBAAU,WAAW,KAAK;;AAG9B,YAAI,eAAe,aAAa;AAC5B,kBAAQ,KAAK,eAAe;;AAG5B,8BAAoB,OAAO,KAAK,gBAAgB;AAChD,yBAAe,WAAW;AAC1B,kBAAQ,KAAK;AACb,wBAAc,KAAK;;;AAM3B,6BAAuB,SAAS;AAChC,qBAAe,KAAK,OAAO,KAAK,gBAAgB;AAChD,2BAAqB,IAAI,cAAa,gBAAgB;AACtD,oBAAc,QAAQ;AAClB,qBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,uBAAa,GAAG,IAAI,SAAS,IAAI;AAC7B,yBAAa,IAAI,YAAY,IAAI,GAAG,oBAAoB,IAAI,GAAG,GAAG;;;;AAM9E,0BAAoB,MAAM;AAC1B,kBAAY,SAAS,eAAe;AACpC,aAAO;QACH,cAAc,aAAa;QAC3B;QACA;;;AC9IR;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA2BA,qBAAgB,OAAO,MAAM,IAAI,kBAAkB;AC3BnD;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,MAAM,IAAI,KAAM,KAAK,IAAI,MAAM;AACxE,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBA,sBAAkB,6BAA6B,oBAAoB,SAAS,IAAI,SAAS,SAAS;AAC3F;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,SAAU;AACrB,uBAAiB,CAAC,GAAG,QAAQ;AAC7B,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAM,QAAQ;AAC7C,wCAAkC,UAAU,EAAE,OAAO,MAAM,OAAO,OAAO,OAAO,EAAE;AAClF,aAAO,SAAQ,eAAe,aAAa,EAAE,OAAO;;AAEjD,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,IAAI,GAAG;AACvD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,QAAO,QAAQ,KAAK,IAAI,KAAK,IAAI,GAAG,KAAK;AACvE,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAqBO;AACH,UAAI,gBAAe;AACf,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;iBAE5B,gBAAe;AACpB,eAAO,OAAK,CAAE,QAAQ,CAAE,IAAK;iBAExB,gBAAe;AACpB,eAAO,MAAI,CAAE,QAAQ,CAAE,IAAK;iBAEvB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,IAAK;iBAEzB,gBAAe;AACpB,eAAO,QAAM,CAAE,QAAQ,CAAE,GAAG,OAAO,yBAA0B;;AAEjE,YAAM,IAAI,MAAM,cAAc;;ACrClC;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,eAAQ,OAAO,EAAE;AACjB,oBAAc,SAAQ,KAAK,IAAI,EAAE;AACjC,UAAI,MAAM,sBAAsB;AAC5B,sBAAa,MAAM,mBAAmB;AACtC,sBAAa,MAAM,mBAAmB;AACtC,cAAK,QAAQ;AACb,cAAK,QAAQ;;AAEjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,KAAM;AACjB,aAAQ,YAAY,cAAe;AACnC,uBAAiB,CAAC,GAAG,IAAI;AACzB,oBAAc,EAAE,MAAM;AACtB,oBAAc,EAAE,MAAM;AACtB,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,0BAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,yBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,wBAAkB,eAAmB;AACrC,wBAAkB,eAAmB;AACrC,kCAA4B,cAAc,aAAa,cAAc,KAAK,cAAc;AACxF,cAAY,SAAS,KAAK,SAAS,KAAK,qBAAqB,MAAM,uJAEvC,oBAAoB;AAChD,gCAA0B,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AAC1F,uBAAiB,kBAAkB,OAAO,CAAC,aAAa;AACxD,cAAY,gBAAgB,aAAa,MAAM,kCAAkC,qBAC1E,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AACvB,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAC7B,uBAAiB,aAAa,CAAC,WAAW,aAAa,eACnD,CAAC,WAAW,aAAa;AAE7B,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,mBAAS,OAAO,CAAE,OAAO;AACjE,kBAAY,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,mBAAS,OAAO,CAAE,OAAO;AACjE,wBAAkB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACxD,sBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,uBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,uBAAiB,KAAK,IAAI,WAAW;AACrC,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,wBAAkB,SAAQ,KAAK,IAAI,IAAI,QAAQ;AAC/C,yBAAmB,gBAAoB,IAAI;AAC3C,yBAAmB,gBAAoB,IAAI;AAC3C,+CAAyC,aACrC,CAAC,WAAW,IAAI,GAAG,WAAW,MAC9B,CAAC,WAAW,IAAI,WAAW,IAAI;AACnC,+CAAyC,aACrC,CAAC,GAAG,WAAW,IAAI,WAAW,MAC9B,CAAC,WAAW,IAAI,GAAG,WAAW;AAClC,mBAAa,UAAU;AACvB,qBAAe,QAAO,CAAC,UAAU,SAAS,WAAW,IAAI;AACzD,sBAAgB,OAAO;AACvB,wBAAkB,SAAQ;AAC1B,oBAAc,GAAG,KAAK,UAAU;AAC5B,sBAAc,GAAG,KAAK,SAAS,MAAM;AACjC,wBAAc,GAAG,KAAK,UAAU,MAAM;AAClC,0BAAc,GAAG,KAAK,WAAW,MAAM;AAEnC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,6BAAe,KAAK,IAAI,KAAK,WAAW;AACxC,2BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAa,IAAI,IAAI,QAAQ;AACzB,6BAAU;AACV,+BAAa,IAAI,IAAI,QAAQ;AACzB,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,yCAAqB,KAAK,IAAI,IAAI,YAAY,KAAK;AACnD,iCAAa,UAAU,eAAe,IAAI,aAAa,IAAI;AAC3D,iCAAa,UAAU,IAAI,aAAa,IAAI,aAAa;AACzD,4BAAO,OAAO;;AAElB,0BAAQ,KAAK,OAAQ,KAAI,WAAW,OAAO;;;;;;;AAOnE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AAEtC,aAAO,SAAQ,eAAe,UAAU,OAAO,OAAO,OAAO;;AAE1D,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtGhB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,GAAG,MAAM,0BAA2B;AAC/C,aAAQ,YAAY,YAAY,2BAAe;AAC/C;AACA;AACA;AACA,4BAAsB;AACtB,wBAAkB,aAAY,CAAE,QAAQ,CAAE,GAAG,IAAK,OAAO,CAAE,YAAY,aAAc;AACrF,gBAAU;AACV,UAAI;AACA,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,SAAS,GAAG,OAAQ;AAChD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,UAAI;AACA,wBACI,kBAAgB,UAAS,SAAS,aAAY;AAClD,sBAAc,KAAK;AACnB,kBAAU;;AAEd,sBAAgB;AACZ,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA2B;MAC9B,YAAY;MACZ,aAAa;MACb,YAAY;;ACjDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO;AACH,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,2BAAsB,aAAa,QAAQ,OAAO,oBAC9C,OAAO;AACX,qBAAe,QAAO,SAAS,UAAU;AACzC,yBAAmB,OAAO;AAC1B,iCAA2B,SAAS,SAAS,KAAK,SAAS,SAAS,KAAK,SAAS,SAAS;AAC3F,+BAAyB,SAAS,SAAS,KAAK,SAAS,SAAS;AAClE,+BAAyB,SAAS,SAAS;AAC3C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,kCAA0B,IAAI;AAC9B,iCAAyB,IAAI,QAAQ;AACrC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,0BAAc,KAAK,IAAI,GAAG;AAC1B,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,oCAAwB,oBAAoB,KAAK;AACjD,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,4BAAc,KAAK,IAAI,GAAG;AAC1B,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,gCAAkB;AAClB,6BAAe;AACf,2BAAY;AACZ,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,iCAAiB,mBAAmB,KAAK,QAAQ;AACjD,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,mCAAiB,WAAW,KAAK,QAAQ;AACzC,gCAAc,QAAQ,WAAW;AACjC,sBAAK,aAAa,SAAS,QAAQ;AAC/B,kCAAc;6BAET,aAAa;AAClB,gCAAY;AACZ;;;AAGR,oBAAI,MAAM;AACN;;;AAGR,mCAAqB,kBAAkB,KAAK,mBAAmB;AAC/D,yBAAW,gBACP,aAAa,QAAQ,WAAW,SAAQ;;;;;AAK5D,aAAO;;AAEJ,mFAA+E,6BAA6B;AAC/G,2BAAqB,QAAO,SAAS,UAAU;AAC/C,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,qBAAe,SAAS,QAAQ;AAChC,sBAAgB,SAAS,QAAQ;AACjC,mBAAa,QAAO,QAAQ,OAAO;AACnC,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,wBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,6BAAiB,KAAK,eAAe;AACrC,wBAAY;AACZ,mBAAO,QAAQ;AACX,uBAAS;;AAGb,0BAAc,KAAK,IAAI,SAAS,UAAU,wBAAwB;AAClE,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,KAAK,cAAc;AACpC,0BAAY;AACZ,qBAAO,QAAQ;AACX,yBAAS;;AAEb,4BAAc,KAAK,IAAI,SAAS,SAAS,uBAAuB;AAChE,6BAAe,OAAO;AACtB,gCAAkB;AAClB,4BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,2BAAW,KAAK;AAChB,8BAAc,OAAO,KAAK,OAAO,MAAM;AACnC,6BAAW,KAAK;AAChB,gCAAc,KAAK,IAAI,GAAG,IAAI,IAAI;AAClC,sBAAI,QAAQ;AACR,+BAAW;AACX,wBAAI;AACA,oCAAc,sBACR,MAAI,SAAS,WAAW,MAAM,SAAS,UAAU,MAC/C,SAAS,aACT,IACH,MAAK,SAAS,UAAU,MAAM,SAAS,aAAa;;AAGzD,oCAAc,KAAK,uBAAuB;;;;;AAK1D,2BAAa,IAAI,aAAa,GAAG,IAAI,IAAI;;;;;AAKzD,aAAO;;AClIX;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,uBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,4BAAsB,IAAK,gBAAe;AAC1C,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW;;;AAGnB,iBAAG,IAAI,UAAU,eAAe,GAAG,KAAK,KAAK;;;;;AAK7D,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,eAAO,QAAQ,aAAM,uBAAa;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,uBAAiB,CAAC,GAAG,OAAM,WAAU,QAAO,SAAS;AACrD,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAC5C,sBAAgB,SAAQ,KAAK,IAAI,UAAS,QAAQ;AAClD,oBAAc,SAAQ,SAAQ,KAAK,IAAI,OAAM,QAAQ,SACjD,IAAI,aAAa,CAAC;AACtB,sBAAgB,SACZ,SAAQ,KAAK,IAAI,OAAO,QAAQ,SAChC,IAAI,aAAa,CAAC;AACtB,sBAAgB,IAAI,aAAa,MAAM;AACvC,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,4BAAsB,QAAQ;AAC9B,0BAAoB,MAAM;AAC1B,iBAAW;AACX,eAAS;AACT,eAAS;AACT,eAAS;AACT,mBAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAChC,gBAAQ,KAAK,QAAQ,UAChB,OAAM,KAAK,MAAM,SAAS,MAAM,QAC7B,KAAK,KAAK,QAAQ,QAAQ;AAClC,YAAI,QAAQ;AACR,iBAAO;;AAEX,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;AAET,YAAI,MAAM;AACN,eAAK;;;AAGb,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvEhB;;;;;;;;;;;;;;;;AAkBO,kBAAa,gBAAgB,cAAa;AAC7C,wBAAkB;AAClB,UAAI,KAAK,UAAU;AACf,eAAO,UAAU;;AAErB,aAAO,KAAK,UAAU,eAAe,UAAU,eAAe;;AAE3D,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC5BhB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,oBAAa,SAAQ,KAAK,IAAI,OAAM,QAAQ,mBAAmB;AAC/D,sBAAgB,SAAQ,KAAK,IAAI,MAAK,QAAQ;AAI9C,aAAO,SAAQ,eAAe,MAAK,OAAO,MAAK,OAAO;;AAEnD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,qBAAe,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACtE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,UAAI,QAAQ,GAAG,UAAU;AACrB,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9D,sBAAc,QAAQ,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9D,6BAAqB,SAAO,CAAE,QAAQ,OAAO,mBAAS,OAAO,CAAE,MAAM;AACrE,6BAAqB,SAAO,CAAE,QAAQ,OAAO,mBAAS,OAAO,CAAE,MAAM;AACrE,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AASX,uBAAiB,QAAQ,IAAI;AACzB,0BAAkB,eAAmB,EAAE,MAAM,MAAM;AACnD,sBAAc,CAAC,IAAI;AACnB,eAAO,UAAQ,CAAE,QAAQ,CAAE,GAAG,IAAK,mBAAS,OAAO,CAAE;;AAGzD,iBACI,kBAA6B,SAAS,IAAI,OAAK,EAAE,QAAQ;AAC7D,sBAAgB,wBAA4B,QAAQ,GAAG,OAAO,eAAmB;AACjF,UAAI,SAAS,GAAG,MAAM,OAAO;AAEzB,qBAAa;AACb,iBAAS,QAAQ;AACb,sBAAY,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACvC,uBAAa,eAAmB,EAAE;AAClC,kBAAQ,IAAI,KAAK;AACjB,oBAAU;;;AAId,wBAAgB;AAChB,iBAAS,QAAQ;AACb,wBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAW;AACX,yBAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,2BAAe,MAAM,SAAS,KAAK;AACnC,2BAAe,GAAG,MAAM,EAAE,MAAM,IAAI,EAAE;AAClC,sBAAQ,SAAS,OAAO,MAAM;;;AAGtC,uBAAa,EAAE,MAAM;;;AAG7B,4BAAsB,kBAA6B,QAAQ,IAAI,OAAK,EAAE,QAAQ;AAC9E,sBAAgB,SAAQ,eAAe,eAAe,OAAO,GAAG,OAAO;AACvE,eAAS,QAAQ,OAAK,SAAQ,8BAA8B;AAC5D,aAAO;;AAEJ,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AChGhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,YAAY,WAAW,mBAAoB;AACjE,uBAAiB,CAAC,GAAG,SAAS;AAC9B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB,OAAuB;AACxI,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,6BAAuB,SAAS,eAAe;AAC/C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,2BAAqB,SAAS;AAC9B,yBAAmB,iBAAiB,SAAS,KAAK,SAAS;AAC3D,yBAAmB,iBAAiB,SAAS,KAAK;AAClD,6BAAuB,iBAAiB,IAAI,SAAS;AACrD,2BAAqB,EAAE,QAAQ;AAC/B,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK,EAAE,QAAQ;AAC7D,yBAAmB,iBAAiB,EAAE,QAAQ,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,EAAE,QAAQ;AACtD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI;AACrB,yBAAiB,IAAI;AACrB,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK;AACjC,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK;AACjC,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK;AACjC,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK;AACjC,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW,KAAK;AACnC,gCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,0BAAM,WAAW,KAAK,mBAClB,OAAO,MAAM,WAAW;;AAEhC,8BAAY,SAAS;;;;;;;AAO7C,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO;;AAE7C,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,YAAY,iBAAiB,eAAgB;AACnE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,0BAAoB,yBAAqC;AACzD,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB,MAAK,iBAAiB,OAAuB;AAC/I,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,6BAAuB,SAAS,eAAe;AAC/C,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,0BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,4BAAc;AACd,2BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,eAAe;AACpC,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,wBAAI;AACA,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;AAGzB,iCAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;;AAKzC,iBAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;;AAK5C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrEhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,YAAY,SAAS,WAAK,YAAY,mBAAoB;AAClE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,4BAAsB,gBAAoB,OAAO;AACjD,wBAAkB,gBAAoB,GAAG;AACzC,wBAAkB,yBAAqC;AACvD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB,MAAK,iBAAiB,OAAO;AACnI,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,oBAAc,SAAS;AACvB,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,6BAAuB,gBAAgB;AACvC,2BAAqB,GAAG,QAAQ;AAChC,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK,GAAG,QAAQ;AAC/D,yBAAmB,iBAAiB,GAAG,QAAQ,KAAK;AACpD,6BAAuB,iBAAiB,IAAI,GAAG,QAAQ;AACvD,2BAAqB,UAAU;AAC/B,yBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,yBAAmB,iBAAiB,UAAU,KAAK;AACnD,6BAAuB,iBAAiB,IAAI,UAAU;AACtD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,eAAe,IAAI,aAAa,KAAK,aAAa;AACnE,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,kCAAc,SAAS,WAAW,iBAAiB;AACnD,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,+BAAiB,eAAe,IAAI,aAAa,KAC7C,aAAa,KAAK,iBAAiB;AACvC,uBAAS,YAAY;;;;;AAKrC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY;;AClFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,aAAc;AACpC,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,WAAW;AAC3F,aAAQ,aAAa,cAAc,aAAa,eAAe,gBAAgB,eAAe,WAAY;AAC1G,uBAAiB,QAAQ;AACzB,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,cAAc;AAC7C,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,eAAe;AAC9C,4BAAc,GAAG,KAAK,cAAc,EAAE;AAClC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,8BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,mCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAiB,KAAK,SAAS,cAAc;AAC7C,gCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,+BAAW,WAAW,KAAK;AAC3B,wBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,qCAAiB,WAAW,KAAK,cAAc;AAC/C,qCAAiB,WAAW,KAAK,SAAS;AAC1C,mCAAe;AACf,kCAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,mCAAa,MAAM,WAAW;AAC9B,oCAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAM,WAAW,OAAO,OAAO,MAAM,WAAW;;AAEpD,kCAAY,SAAS;;;;;;;;;AASrD,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAK,eAAgB;AACtC,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,gBAAoB,EAAE;AACvC,wBAAkB,gBAAoB,GAAG;AACzC,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,GAAmB;AAClG,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,0BAAoB,SAAS;AAC7B,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,mCAA6B;AAC7B,uBAAiB,SAAS,QAAQ;AAClC,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,YAAW,MAAM;AACtD,sBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,WAAW,MAAM;AAC/E,yBAAiB,KAAK;AACtB,sBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,wBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,2BAAiB,KAAK,OAAO;AAC7B,wBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,0BAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,6BAAiB,KAAK,OAAO;AAC7B,0BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAiB,KAAK,OAAO;AAC7B,4BAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,8BAAc;AACd,6BAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,mCAAiB,IAAI;AACrB,mCAAiB,IAAI;AACrB,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,KAAK,cAAc;AACnC,qCAAiB,KAAK,MAAM;AAC5B,qCAAiB,KAAK,OAAO;AAC7B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,KAAK,eAAe;AACpC,uCAAiB,KAAK,MAAM;AAC5B,uCAAiB,KAAK,OAAO;AAC7B,oCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,mCAAW,KAAK,KAAK,cAAc;AACnC,yCAAiB,KAAK,MAAM;AAC5B,yCAAiB,KAAK,OAAO;AAC7B,mCAAW,QAAQ,WAAW,MAAM,SAAS,WAAW;;;;;AAKxE,yBAAS,WAAW,MAAM;;;;;;AAM9C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,yCAAqC;MACxC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,WAAK,SAAS,cAAe;AACrC,uBAAiB,CAAC,KAAK;AACvB,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,GAAmB;AACtG,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,uCAAiC,GAAG;AACpC,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,uCAAiC;AACjC,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,2CAAqC;AACrC,aAAQ,WAAW,aAAa,cAAc,aAAa,YAAY,SAAS,UAAU,SAAS,aAAa,UAAU,WAAW,UAAU,aAAa,cAAc,eAAgB;AAC1L,uBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAEhC,wBAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAE5D,0BAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAE9D,4BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,iCAAiB,KAAK;AACtB,8BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,8BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,8BAAc;AACd,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,gCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,+BAAW,KAAK,eAAe;AAC/B,kCAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,iCAAW,KAAK,cAAc;AAC9B,uCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO;AAC3D,wCAAkB,QAAS,eAAc,IAAI,MACzC,QAAS,gBAAe,IAAI,MAC5B,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,oCAAc,GAAG,KAAK,aAAa,EAAE;AACjC,sCAAc,SAAS,WAAW;AAClC,uCAAe,UAAU,YAAY;AACrC,mCAAW,QAAQ;;;;;AAKnC,yBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK,MACpD;;;;;;AAMxB,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,wCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACrFhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,UAAW;AACtB,aAAQ,SAAS,WAAK,WAAW,mBAAoB;AACrD,uBAAiB,CAAC,GAAG,SAAS;AAC9B,uBAAiB,gBAAoB,EAAE;AACvC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB;AACjB,UAAI,cAAc;AACd,qBAAa,CAAC,GAAG;;AAErB,cAAY,gCAA4C,SAAS,aAAa,MAAM,gFAC9D,0BAA0B;AAChD,uBAAiB,mBAA+B,EAAE,OAAO,OAAO,OAAO,SAAS,YAAY,MAAK,iBAAiB;AAClH,aAAQ,cAAc,aAAa,gBAAgB,eAAe,WAAY;AAC9E,sBAAgB,QAAQ;AACxB,qBAAe,QAAQ;AACvB,oBAAc,SAAS,cAAc,SAAS;AAC9C,gBAAU,IAAI,cAAa,SAAS,UAAU,EAAE;AAChD,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAC9C,oBAAc,EAAE;AAChB,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,yBAAiB,IAAI,SAAS;AAC9B,yBAAiB,IAAI,EAAE,QAAQ;AAC/B,sBAAc,GAAG,KAAK,SAAS,WAAW,EAAE;AACxC,2BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,2BAAiB,KAAK,SAAS,eAAe;AAC9C,wBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,uBAAW,WAAW,KAAK;AAC3B,gBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,6BAAiB,KAAK,cAAc;AACpC,6BAAiB,WAAW,KAAK,SAAS;AAC1C,0BAAc,GAAG,KAAK,SAAS,UAAU,EAAE;AACvC,+BAAiB,WAAW,KAAK,EAAE,QAAQ;AAC3C,+BAAiB,KAAK,SAAS,cAAc;AAC7C,4BAAc,GAAG,KAAK,aAAa,EAAE;AACjC,2BAAW,WAAW,KAAK;AAC3B,oBAAI,KAAK,KAAK,MAAM,SAAS;AACzB;;AAEJ,iCAAiB,WAAW,KAAK,cAAc;AAC/C,iCAAiB,WAAW,KAAK,SAAS;AAC1C,+BAAe;AACf,+BAAe;AACf,8BAAc,GAAG,KAAK,SAAS,YAAY,EAAE;AACzC,+BAAa,MAAM,WAAW;AAC9B,+BAAa,GAAG,IAAI,OAAO,EAAE;AACzB,0BAAM,WAAW,MAAM,OAAO,MAAM,WAAW;;AAEnD,8BAAY;AACZ,8BAAY;;;;;;;AAOpC,aAAO,SAAQ,eAAe,EAAE,OAAO,EAAE,OAAO,EAAE;;AAE/C,yCAAoC;MACvC,YAAY;MACZ,aAAa;MACb,YAAY;;ACnFhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,MAAO;AAClB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,eAAgB;AAClE,uBAAiB,CAAC,GAAG,KAAK;AAC1B,uBAAiB,mBAA+B,EAAE,OAAO,aAAa,SAAS,WAAW,MAAK,iBAAiB;AAChH,aAAQ,cAAc,aAAa,cAAc,eAAgB;AACjE,iBAAW,IAAI,cAAa,SAAS,aAAa;AAClD,sBAAgB,SAAS,QAAQ;AACjC,qBAAe,SAAS,QAAQ;AAChC,oBAAc,SAAS,cAAc,SAAS;AAC9C,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,mBAAa,IAAI,cAAa,EAAE,OAAO,EAAE,OAAO;AAChD,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,IAAI,cAAa,GAAG,OAAO,GAAG,OAAO;AACnD,oBAAc,GAAG,KAAK,cAAc,EAAE;AAClC,sBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,UAAS,MAAM;AACpD,sBAAc,KAAK,IAAI,SAAS,WAAY,UAAS,WAAW,SAAS,MAAM;AAC/E,sBAAc,GAAG,KAAK,aAAa,EAAE;AACjC,wBAAc,KAAK,IAAI,GAAG,KAAK,KAAM,WAAU,MAAM;AACrD,wBAAc,KAAK,IAAI,SAAS,UAAW,UAAS,UAAU,UAAU,MAAM;AAC9E,wBAAc,GAAG,KAAK,SAAS,aAAa,EAAE;AAC1C,uBAAW,KAAK,MAAM,KAAK;AAC3B,uBAAW,KAAK;AAChB,0BAAc;AACd,yBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,KAAK,eAAe;AACpC,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,KAAK,cAAc;AACnC,6BAAW,KAAK,IAAI,GAAG,IAAI,IAAI,MAC3B,MAAM,IAAI,GAAG,IAAI,IAAI;;;;AAIrC,eAAG,IAAI,SAAS,IAAI,IAAI,IAAI;;;;AAIxC,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,sDAAkD;MACrD,YAAY;MACZ,aAAa;MACb,YAAY;;AC9DhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,UAAW;AACvB,aAAQ,SAAS,WAAW,WAAK,iBAAiB,cAAe;AACjE,uBAAiB,CAAC,IAAI,SAAS;AAC/B,wBAAkB,gBAAoB,GAAG;AACzC,4BAAsB,gBAAoB,OAAO;AACjD,uBAAiB,mBAA+B,YAAY,OAAO,OAAO,SAAS,WAAW,MAAK,iBAAiB;AACpH,iBAAW,IAAI,cAAa,SAAS,SAAS;AAC9C,uBAAiB,GAAG;AACpB,iCAA2B,GAAG;AAC9B,uBAAiB,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC7C,iCAA2B;AAC3B,wBAAkB,SAAQ,KAAK,IAAI,OAAO,QAAQ;AAClD,oCAA8B;AAC9B,aAAQ,WAAW,cAAc,aAAa,YAAY,UAAU,SAAS,aAAa,WAAW,UAAU,cAAc,eAAgB;AAC7I,qBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,sBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,oBAAc,cAAc;AAC5B,mBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,sBAAc,GAAG,KAAK,YAAY,EAAE;AAChC,wBAAc,GAAG,KAAK,UAAU,EAAE;AAC9B,6BAAiB,KAAK;AACtB,0BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,0BAAc,KAAK,IAAI,WAAY,gBAAe,YAAY;AAC9D,0BAAc,GAAG,KAAK,SAAS,EAAE;AAC7B,+BAAiB,KAAK;AACtB,4BAAc,KAAK,IAAI,GAAG,KAAK,KAAK,WAAW;AAC/C,4BAAc,KAAK,IAAI,UAAW,eAAc,YAAY;AAC5D,4BAAc;AACd,4BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,2BAAW,KAAK,eAAe;AAC/B,8BAAc,OAAO,KAAK,OAAO,EAAE;AAC/B,6BAAW,KAAK,cAAc;AAC9B,mCAAiB,OAAO,IAAI,OAAO,KAAK,OAAO;AAC/C,oCAAkB,QAAS,gBAAe,IAAI,MAC1C,QAAS,eAAc,IAAI,MAAM,QAAQ;AAC7C,gCAAc,GAAG,KAAK,OAAO,EAAE;AAC3B,+BAAW,KAAK,QAAQ;AACxB,kCAAc,SAAS,WAAW;AAClC,mCAAe,UAAU,YAAY;AACrC,+BAAW,QAAQ;;;;AAI/B,uBAAS,OAAO,IAAI,OAAO,KAAK,OAAO,KAAK,MAAM;;;;;AAKlE,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,qDAAiD;MACpD,YAAY;MACZ,aAAa;MACb,YAAY;;ACzEhB;;;;;;;;;;;;;;;;AAiBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,UAAW;AACtB,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,sBAAc,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC5C,sBAAc,EAAE,MAAM;AACtB,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,2BAAmB,OAAO,MAAM;AAChC,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,wBAAgB,eAAmB;AACnC,wBAAgB,SAAS;AACzB,2BAAmB,mBAAuB,EAAE,OAAO;AAKnD,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,uCAAe,YAAgB,CAAC,GAAG,KAAK,KAAK,IAAI,OAAO,gBAAoB,EAAE;AAC9E,4CAAoB,YAAgB,CAAC,GAAG,GAAG,IAAI,YAAY,gBAAoB,OAAO;AACtF,oCAAY,MAAM,UAAU,WAAW;AACvC,4BAAI,MAAM;AACN,mCAAS;;;;;;AAM7B,oCAAoB,YAAgB,CAAC,GAAG,MAAM,MAAM,IAAI,SAAS,gBAAoB;AACrF,2BAAW,eAAe;;;;;AAK1C,uBAAe,WAAW,MAAM,cAAkB,YAAY,EAAE,QAAQ,UAAU,EAAE;AACpF,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;AClEnD;;;;;;;;;;;;;;;;AAiBO,2CAAuC;MAC1C,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,kEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,OAAO,OAAO,OAAO;AAMtE,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,2BAAW;AACX,2BAAW;AACX,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,iCAAO;AACP,iCAAO;;;;;;AAM3B,2BAAU,MAAM,MAAM,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAK/D,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,OAAO,OAAO,OAAO;AAC5F,eAAO,CAAE,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;;ACtE5D;;;;;;;;;;;;;;;;AAiBO,0CAAsC;MACzC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,GAAG,QAAQ,MAAO;AAC1B,eAAQ,SAAS,WAAK,aAAc;AACpC,2BAAmB;AACnB,mBAAW,eAAmB,EAAE,OAAO,WAAW,KAAK,IAAI,EAAE,QAAQ;AACrE,wBAAgB,eAAmB,OAAO,OAAO,WAAW,KAAK,IAAI,OAAO,QAAQ;AACpF,eAAQ,WAAW,UAAU,SAAS,YAAY,WAAW,UAAU,SAAS,cAAc,aAAa,cAAc,aAAa,gBAAgB,eAAe,YAAa,uBAAmC,EAAE,OAAO,OAAO,OAAO,SAAS,MAAK,QAAyB;AACnR,gBAAY,GAAG,SAAS,SAAS,QAAQ,MAAM,YAAY,iEAClB,SAAS,mBAC3C,GAAG;AACV,oBAAY,eAAmB,UAAU,WAAW,KAAK,IAAI,GAAG,QAAQ;AAGxE,2BAAkB,2BAA+B,EAAE,OAAO,EAAE;AAM5D,qBAAa,GAAG,IAAI,WAAW,EAAE;AAC7B,0BAAgB,GAAG,OAAO,WAAW,EAAE;AACnC,yBAAa,OAAO,eAAe,QAAQ;AAC3C,4BAAgB,GAAG,OAAO,UAAU,EAAE;AAClC,2BAAa,OAAO,cAAc,QAAQ;AAC1C,2BAAa,GAAG,IAAI,YAAY,EAAE;AAC9B,6BAAa,OAAO;AACpB,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAc,OAAO,IAAK,IAAI;AAC9B,6BAAa,GAAG,IAAI,cAAc,EAAE;AAChC,8BAAY,OAAO,IAAI;AACvB,sBAAI,OAAO,KAAK,MAAM;AAClB,iCAAa,GAAG,IAAI,aAAa,EAAE;AAC/B,kCAAY,OAAO,IAAI;AACvB,0BAAI,OAAO,KAAK,MAAM;AAClB,oCAAY,GAAG,GAAG,KAAK,KAAK,KAAK,QAAQ,GAAG,GAAG;AAC/C,4BAAI,MAAM;AACN,mCAAS;AACT,mCAAS;AACT,mCAAS;;;;;;AAM7B,2BAAU,GAAG,QAAQ,QAAQ,MAAM,IAAI,GAAG,MAAM,MAAM;;;;;AAKtE,uBAAe,WAAW,MAAM,cAAkB,YAAW,EAAE,QAAQ,EAAE,OAAO,EAAE;AAClF,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;ACtElD;;;;;;;;;;;;;;;;AAmBO,oBAAgB,6BAA6B,UAAU,IAAI;AAC3D,kBAAY,iBAAiB,MAAK;AAClC,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxBhB;;;;;;;;;;;;;;;;AAkBA,cAAU;AACV,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACX,eAAW;AACJ,kBAAY,gBAAgB,KAAK;AACpC,oBAAa,KAAK,KAAK;AACvB,gBAAU,KAAK,IAAI;AACnB,gBAAU,IAAO,KAAM,IAAI;AAC3B,aAAO,QACF,KACQ,SAAK,IAAI,MAAM,IAAK,MAAM,IAAI,MAAM,IAAI,MAAM,IAC/C,KAAK,IAAI,CAAC,IAAI;;AAEvB,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AA8BO;AACH,yBAAmB,OAAM;AACzB,oBAAc,WAAW;AACzB,uBAAiB,WAAW;AAC5B,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,qBAAe,UAAU,mBAAmB;AAC5C,qBAAe,UAAU,mBAAmB;AAE5C,0BAAoB,CAAC,OAAO;AAC5B,yBAAmB,eAAmB;AACtC,yBAAmB,wBAA4B,WAAW;AAC1D,yBAAmB,wBAA4B,WAAW;AAC1D,mBAAa,GAAG,IAAI,OAAO;AAEvB,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,kBAAU,QAAM;UACZ,QAAQ,CAAE,GAAG;UACb,SAAS;UACT,OAAO,CAAE,OAAO,CAAC,GAAG,IAAI,MAAM,CAAC,GAAG;;AAEtC,uBAAc,UAAQ,CAAE,QAAQ,CAAE,MAAM,GAAG,MAAM,IAAK,SAAS;AAE/D,eAAQ,aAAM,eAAS,QAAQ,QAAO,SAAS;AAC/C,oBAAY,wBAAoC,OAAM;AACtD,qBAAa,GAAG,IAAI,UAAU;AAC1B,oBAAU,qBAAiC,KAAK;AAChD,qBAAW,IAAI,WAAW,KAAK,EAAE;AACjC,qBAAW,IAAI,WAAW,KAAK,EAAE;;AAErC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;AACzC,mBAAW,8BAA8B;;AAE7C,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,wBAAkB,WAAW,eAAe,aAAa,WAAW;AACpE,qBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AAChF,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO;;AAEJ;AACH,wBAAkB,eAAmB,OAAM;AAC3C,wBAAkB,WAAW,KAAK,IAAI,OAAM;AAC5C,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,uBAAiB,WAAW,KAAK,IAAI,UAAU,mBAAmB,KAAK,QAAQ;AAC/E,UAAI,cAAc;AACd,uBAAe,UAAU,UAAU,UAAU,WAAW,SAAS;AACjE,4BAAoB,CAAC,OAAM,MAAM,IAAI,OAAM,MAAM;AACjD,YAAI;AACA,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,aAAa,WAAW,OAAO;AAC1E,2BAAiB,WAAW,eAAe,IAAI,WAAW,mBAAuB,WAAW;AAC5F,+BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,WAAY,SAAS;AAClE,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,WAAY,SAAS;AAC1F,8BAAoB,WAAU,WAAW,CAAE,QAAQ,CAAE,GAAG,UAAU,GAAG,eAAgB,SAAS;AAC9F,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,8BAAoB,WAAW,KAAK,IAAI,YAAY,QAAQ;AAC5D,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,qBAAW,8BAA8B;AACzC,iBAAO,CAAE,MAAM,aAAa,MAAM;;AAEtC,eAAO;;AAGP,sBAAa,wBAAoC,UAAU;AAC3D,0BAAkB,yBAAyB,OAAM,WAAW;AAC5D,eAAO,wBAAoC;;;AAGnD;AACI,aAAQ,QAAO,OAAO,OAAO;;AAGjC;AACI,UAAI,SAAS;AACT,eAAO,CAAE,MAAM,UAAU,MAAM;;AAEnC,oBAAa,wBAAoC,UAAU;AAC3D,mBAAa,OAAO;AACpB,0BAAoB,sBAAkC;AACtD,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,yBAAmB,qBAAiC;AACpD,0BAAoB,WAAW;AAC/B,0BAAoB,WAAW;AAC/B,uBAAiB,CAAC,YAAY;AAC9B,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,0BAAoB,WAAW,eAAe,UAAU,WAAW;AACnE,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,aAAa,MAAM,cAAe,SAAS;AAE3F,2BAAqB,UAAU,cAAc,cAAc,MAAM,SAAS;AAC1E,4BAAsB,aAAa;AACnC,4BAAsB,aAAa;AACnC,yBAAmB,CAAC,cAAc;AAClC,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,4BAAsB,WAAW,eAAe,YAAY,WAAW;AACvE,8BAAwB,UAAQ;QAC5B,QAAQ,CAAE,MAAM,eAAe,MAAM;QACrC,SAAS;;AAEb,0BAAoB,UAAU,aAAa,aAAa,MAAM,SAAS;AACvE,2BAAqB,YAAY;AACjC,2BAAqB,YAAY;AACjC,wBAAkB,CAAC,aAAa;AAChC,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,2BAAqB,WAAW,eAAe,WAAW,WAAW;AACrE,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB,SAAS;AAC9F,gBAAU,WAAuB,MAAM;AACvC,qBAAe,CAAC,EAAE,KAAK;AACvB,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,wBAAkB,WAAW,eAAe,QAAQ,WAAW,EAAE;AACjE,0BAAoB,UAAQ,CAAE,QAAQ,CAAE,MAAM,WAAW,MAAM,YAAa,SAAS;AACrF,2BAAqB,WAAS,CAAE,QAAQ,CAAE,GAAG,aAAa,GAAG,iBAAkB,SAAS;AACxF,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,sBAAgB,MAAI;QAChB,QAAQ,CAAE,GAAG,iBAAiB,GAAG;QACjC,SAAS;;AAEb,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,0BAAoB,OAAK,CAAE,QAAQ,CAAE,OAAO,UAAW,SAAS;AAChE,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,oBAAc,SAAO;QACjB,QAAQ,CAAC,aAAa;QACtB,SAAS;QACT,OAAO,CAAE,MAAM;;AAEnB,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,wBAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,iBAAW,8BAA8B;AACzC,aAAO,CAAE,MAAM,WAAW,MAAM;;AAGpC;AACI,kBAAY,IAAI,aAAa,OAAO;AAEpC,mBAAa,GAAG,IAAI,MAAM;AACtB,oBAAW;AACX,oBAAW;AACX,qBAAa,GAAG,IAAI,MAAM;AACtB,oBAAU,UAAsB,IAAI,GAAG,MAAM;AAC7C,uBAAa,qBAAiC,OAAM;AACpD,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;AAC3C,mBAAQ,KAAK,OAAO,EAAE,OAAO,KAAK,OAAO,EAAE;;AAE/C,YAAI;AACA,mBAAQ;AACR,mBAAQ;;AAEZ,4BAAgC,KAAK,OAAM,OAAM;;AAErD,aAAO;;AChOX;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,OAAO;AACxC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,mBAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,mBAAS,SAAU;AAC3B,aAAQ,OAAO,OAAO,SAAU;AAChC,qBAAe,SAAS,YAAgB;AACxC,qBAAe,mBAAuB,QAAQ,eAAmB;AACjE,iBAAW,QAAQ,OAAO;AAC1B,aAAO,SAAQ,eAAe,OAAO,QAAQ;;AAE1C,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACI,UAAI,UAAU;AACV,eAAO,KAAK;;AAGZ,eAAO,KAAK;;;ACnCpB;;;;;;;;;;;;;;;;AAiBO,iCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,+BAAe,KAAK,MAAM,aAAa;AACvC,+BAAe,cAAc,YAAY,YAAY;AACrD,kCAAkB,UAAU;AAE5B,oBAAI,UAAU,KAAK,SAAS;AAExB,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,YAAY,mBAAmB;AAC9D,gCAAc,UAAU;;AAE5B,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACnD1D;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,OAAO;QAChB,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,SAAS,WAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,+BAA0B;MAC7B,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,GAAG,QAAQ,MAAM,0BAA2B;AACpD,aAAQ,SAAS,WAAK,YAAY,WAAW,iBAAiB,2BAAe;AAC7E,mBAAa,sBAAsB;QAC/B,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,SAAS,WAAK,YAAY,WAAW;;AAElD,UAAI;AACA,0BAAkB;AAClB,iBAAS,MAAI,CAAE,QAAQ,CAAE,GAAG,QAAQ,GAAG,OAAQ;AAC/C,iBAAQ,8BAA8B;;AAE1C,UAAI;AACA,0BAAkB;AAClB,iBACI,kBAAgB,UAAS,QAAQ,aAAY;AACjD,iBAAQ,8BAA8B;;AAE1C,aAAO;;AAEJ,wCAAmC;MACtC,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,eAAmB,OAAM;AAE3C,iCAA2B,OAAM,MAAM,OAAM,MAAM,SAAS;AAC5D,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ;QACpB,QAAQ,CAAE,GAAG;QACb;QACA,OAAO,CAAE,OAAO,CAAC,OAAO;;AAE5B,qBAAe,SAAS,SAAS,MAAM;AACvC,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,mBAAS,OAAO,CAAE,OAAO,OAAM;AACvF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACxChB;;;;;;;;;;;;;;;;AAkBO,uBAAiB,gBAAgB,UAAU,QAAQ,OAAO,SAAS,MAAM,IAAI,GAAG;AAChF,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,IAAI,QAAQ,WAAW,IAAI,GAAG;AAChF,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,OAAO,MAAM,MAAM,IAAI,GAAG;AACvE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO,QAAQ,KAAK,MAAM;AACxD,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,KAAK,IAAI,GAAG;AACnE,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBO,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,2BAAmB;AACnB,qBAAa,EAAE;AACf,sBAAc,OAAO;AACrB,yBAAiB,gBAAoB,kBAAkB;AACvD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,oBAAY,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC1C,YAAI,gBAAgB;AAChB,2BAAiB,IAAI,MAAM;AAC3B,uBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,qBAAS,KAAK,OAAO,aAAa;;AAEtC,kBAAQ,cAAc,OAAO,QAAQ,EAAE,OAAO,cAAc;AAC5D,iBAAO,kBAA8B,KAAK,QAAQ;AAClD,mBAAS;;AAEb,yBAAiB,GAAG;AACpB,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,QAAQ;AAClF,2BAAmB,eAAmB;AACtC,uBAAe,QAAQ,OAAO,YAAY,aAAa,EAAE;AACzD,uBAAe,WAAW,MAAM,QAAQ,aAAa,EAAE;AACvD,uBAAe;AACf,YAAI;AAEA,2BAAiB,sBAAkC,aAAa;AAChE,qBAAW;;AAEf,eAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;;ACxDnD;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F;AACA,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,cAAM,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGhC,wBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,yBAAgB,gBAAoB,EAAE;AACtC,wBAAe,OAAK,SAAS,EAAE,OAAO,EAAE,OAAO,UAAS,UAAU;AAClE,cAAM,SAAQ,eAAe,SAAS,UAAU,EAAE,OAAO,QAAO;;AAEpE,aAAO;;AAEJ,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,uBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,sBAAgB,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC3C,wBAAkB,QAAO,SAAS,UAAU,EAAE,OAAO,iBAAiB,SAAS,EAAE,OAAO,EAAE,OAAO,UAAU;AAC3G,2BAAqB,SAAS;AAC9B,0BAAoB,SAAS;AAC7B,6BAAuB,SAAS;AAChC,4BAAsB,SAAS;AAC/B,oCAA8B,SAAS;AACvC,mCAA6B,SAAS;AACtC,sBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,qBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,iBAAW,QAAO,EAAE,OAAO;AAC3B,qBAAe,SAAQ,KAAK,IAAI,GAAG,QAAQ;AAC3C,oBAAc,QAAO,GAAG,OAAO,WAAW;AAC1C,mBAAa,GAAG,IAAI,SAAS,WAAW,EAAE;AACtC,qBAAa,GAAG,IAAI,SAAS,YAAY,EAAE;AACvC,yBAAe,GAAG,MAAM,SAAS,UAAU,EAAE;AACzC,2BAAe,GAAG,MAAM,SAAS,SAAS,EAAE;AAExC,gCAAkB,MAAM;AACxB,gCAAkB,MAAM;AACxB,4BAAc;AACd,4BAAc,GAAG,KAAK,uBAAuB,MAAM;AAC/C,4BAAa,aAAY,MAAM;AAC/B,oBAAI,MAAM,KAAK,OAAO,SAAS,aAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,8BAAc,GAAG,KAAK,sBAAsB,MAAM;AAC9C,8BAAa,aAAY,MAAM;AAC/B,sBAAI,MAAM,KAAK,OAAO,SAAS,YAC3B,KAAK,MAAM,SAAS;AACpB;;AAEJ,iCAAe,wBAAwB,uBAAuB,IAC1D,UAAU,IAAI,GAAG,KAAK,KAAK;AAC/B,iCAAe,KAAK,uBAAuB;AAC3C,+BAAa,WAAW,SAAS,IAAI;AACrC,sBAAI,SAAS;AACT;;AAEJ,gCAAc,MAAM,IAAI,GAAG,KAAK,KAAK;AACrC,6BAAW,QAAQ;;;AAG3B,iBAAG,IAAI,SAAS,GAAG,KAAK,KAAK;;;;;AAK7C,aAAO,SAAQ,eAAe,GAAG,OAAO,GAAG,OAAO,GAAG;;AAElD,kCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChFhB;;;;;;;;;;;;;;;;AAkBO;AACH,sBAAgB,gBAAoB;AACpC,uBAAiB,OAAK,SAAS,QAAQ,OAAO,SAAS,UAAU;AACjE,2BAAqB,iBAAiB,SAAS,QAAQ,OAAO,UAAU,MAAM;AAC9E,aAAO,CAAC,SAAS,QAAQ,aAAa;;ACtB1C;;;;;;;;;;;;;;;;AAoBO,oCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,CAAC,GAAG,IAAI;AACtF,kCAA0B,sBAAsB,QAAQ,EAAE,OAAO,EAAE,OAAO,qBAAqB;AAC/F,6BAAqB,WAAW,MAAM,QAAQ,SAAS,UAAU,EAAE;AACnE,8BAAsB,WAAW,MAAM,SAAS,SAAS,UAAU,EAAE;AACrE,eAAO;UACH,CAAE,QAAQ,cAAc,OAAO,SAAS,UAAU,OAAO,EAAE;UAC3D,CAAE,QAAQ,eAAe,OAAO,SAAS,UAAU,OAAO;;;;ACnCtE;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,kBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,EAAE,MAAM;AAClD,qBAAe,SAAS,YAAY,IAAI;AACxC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,mBAAa,GAAG,IAAI,YAAY;AAC5B,sBAAa,YAAgB,GAAG,YAAY;AAC5C,sBAAa,GAAG,KAAI,YAAY;AAC5B,cAAI,QAAO,MAAK,MAAM;AAClB,oBAAO,MAAK,MAAM,MAAK,IAAI,QAAO,MAAK;qBAElC,QAAO,OAAM,IAAI;AACtB,oBAAO,MAAM,KAAI,MAAK,KAAK,IAAI,QAAO,MAAK;;;AAGnD,kBAAS,QAAO,IAAI,WAAU,IAAI,MAAM;AACxC,wBAAgB,YAAgB,SAAQ,OAAO;AAC/C,gBAAQ,KAAK,MAAM;;AAEvB,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,4BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtDhB;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC/BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AAEzB,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,yBAAiB,OAAO;AACxB,0BAAkB,WAAW,KAAK,IAAI,MAAM,QAAQ;AACpD,2BAAmB,WAAW,KAAK,IAAI,OAAO,QAAQ;AACtD,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,UAAU,iBAAkB;AACpC,uBAAiB,GAAG;AACpB,uBAAiB,SAAS,IAAI,WAAU,GAAE,KAAqB,EAAE,MAAM,KAAK,GAAE;AAC9E,oBAAc,SAAS,IAAI,QAAK,GAAE;AAClC,oBAAc,SAAQ,KAAK,IAAI,EAAE,QAAQ;AACzC,oBAAc,eAAmB,EAAE;AACnC,oBAAc,EAAE,MAAM;AACtB,uBAAiB,gBAAoB,EAAE;AACvC,yBAAmB,eAAmB;AACtC,yBAAmB,SAAS;AAC5B,4BAAsB,gBAAoB;AAC1C,sBAAgB,wBAA4B,EAAE,OAAO;AACrD,UAAI,kBAAkB;AAClB,gBAAQ,KAAK;;AAEjB,mBAAa,GAAG,IAAI,OAAO;AACvB,wBAAe,YAAgB,GAAG,OAAO;AACzC,0BAAkB,QAAO,IAAI,WAAU,IAAI,MAAM;AACjD,yBAAiB,YAAgB,WAAW,YAAY;AACxD,gBAAQ,YAAY,MAAM;;AAE9B,oBAAc,SAAQ,MAAM,SAAS,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,OAAO,UAAU,OAAO,EAAE;;AAE/C,yBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChDhB;;;;;;;;;;;;;;;;AAkBO,yBAAmB,gBAAgB,YAAY,QAAQ,IAAI;AAC3D,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAiBO,oCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,2BAAmB;AACnB,uBAAe,wBAA4B,OAAM,OAAO,eAAmB,OAAM;AACjF,8DAAsD,OAAM;AAC5D,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,iCAAyB;AACzB,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,KAAK,IAAI;AAC3B,0BAAkB,WAAW,KAAK,IAAI,OAAM,QAAQ;AACpD,4BAAoB,GAAG,WAAW,OAAO;AACrC,8BAAoB,WAAW,aAAa,cAAc;AAC1D,yBAAe,GAAG,MAAM,aAAa;AACjC,8BAAkB,MAAO,cAAa;AACtC,2BAAe,GAAG,MAAM,YAAY;AAChC,gCAAkB,MAAM;AACxB,iCAAmB,GAAG,UAAU,aAAa;AACzC,gCAAe,CAAC,OAAO,KAAK,KAAK;AACjC,0BAAU,QAAO;AACjB,0BAAU,QAAO;AAEjB,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,6BAAc,KAAI,WAAW,YAAa,KAAI,WAAW;AACzD,yBAAS,KAAK,MAAM,SAAS;AAC7B,yBAAS,KAAK,MAAM,SAAS;AAC7B,kCAAkB;AAClB,oBAAI,OAAO,cAAc;AACrB,sBAAI,YAAY;AACZ,kCAAc;;AAGd,kCAAc,UAAU;;;AAIhC,oBAAI,UAAU,KAAK,SAAS,cAAc,UAAU,KAChD,SAAS;AAET,2CAAyB,SAAU,cAAa;AAChD,2CAAyB,SAAS;AAClC,mCAAiB,cAAc,mBAAmB,mBAAmB;AACrE,gCAAc,UAAU;;AAE5B,+BAAe,cAAc,YAAY,YAAY;AACrD,uBAAO,UAAU;;;;;AAKjC,uBAAe,WAAW,MAAM,QAAQ,OAAM,OAAO,OAAM;AAC3D,eAAO,CAAE,QAAQ,OAAO,OAAM,OAAO,OAAO,OAAM;;;ACvE1D;;;;;;;;;;;;;;;;AAkBO,oBAAc,gBAAgB,OAAO;AAExC,oBAAa,KAAK,MAAM;AACxB,UAAI,KAAK,QAAO;AACZ,eAAO,KAAK,MAAM;iBAEb,KAAK,QAAO;AACjB,eAAO,KAAK,KAAK;;AAGjB,YAAI,QAAO,MAAQ;AACf,iBAAO;;AAGP,iBAAO,QAAO;;;;AAInB,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAkBA,uBAAmB;AACnB,kBAAc;AACP,mBAAa,gBAAgB,MAAM;AACtC,UAAI,MAAM;AACN,eAAO,QAAQ;;AAGf,eAAO,aAAc,MAAK,IAAI,MAAM;;;AAGrC,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAkBO,sBAAgB,gBAAgB,UAAS,QAAQ,IAAK,KAAI,KAAK,IAAI,CAAC;AACpE,2BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM;AACtC,UAAI,KAAK;AACL,eAAO;iBAEF,KAAK;AACV,eAAO;;AAGP,eAAO;;;AAGR,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,MAAK,QAAQ,KAAK,IAAI;AAClD,uBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,MAAM,QAAQ,KAAK,KAAK;AACrD,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAsBA,sBAAgB;AAChB,sBAAkB,KAAK,IAAI,aAAW;AAC/B,uBAAiB,gBAAgB,UAAU;AAG9C,uBAAiB,KAAK,CAAC;AAGvB,uBAAiB,KAAK;AACtB,mBAAa,KAAK,IAAI;AACtB;AACA,UAAI;AACA,iBAAS;iBAEJ;AACL,iBAAS;;AAGT,iBAAS,KAAK,IAAI,IAAM;;AAE5B,aAAO;;AAEJ,2BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/ChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,KAAM;AACd,aAAQ,QAAS;AACjB,uBAAiB,GAAG;AACpB,oBAAc,EAAE,MAAM;AACtB,uBAAiB,IAAI,MAAM;AAC3B,mBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,iBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,qBAAe,cAAc,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC7D,qBAAe,SAAQ,MAAM,QAAQ,UAAU,EAAE;AACjD,aAAO,CAAE,QAAQ,OAAO,UAAU,OAAO,EAAE;;AAExC,6BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACrChB;;;;;;;;;;;;;;;;AAqBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,YAAY,YAAa;AACjC,uBAAiB,CAAC,IAAI;AACtB,oBAAa,eAAmB;AAChC,+BAAyB,CAAC,CAAC,GAAG;AAC9B,uBAAiB,KAAK,GAAG;AACzB,mBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,yBAAiB,KAAK,CAAC,GAAG;;AAE9B,sBAAgB,aAAY,WAAW;QACnC,QAAQ,CAAE;QACV;QACA,OAAO,CAAE,UAAU,kBAAkB,eAAe;;AAExD,kCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,gDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,2BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,4BAAsB,CAAE,GAAG;AAC3B,2BAAqB,CAAE,OAAO;AAC9B,8BAAwB,UAAQ,CAAE,QAAQ,eAAe,mBAAS,OAAO;AACzE,8BAAwB,CAAE,GAAG;AAC7B,6BAAuB,CAAE,MAAM;AAC/B,uBAAiB,YAAU,CAAE,QAAQ,iBAAiB,mBAAS,OAAO;AACtE,kCAA4B,CAAE,GAAG;AACjC,iCAA2B,CAAE,OAAO;AACpC,qBAAe,UAAQ,CAAE,QAAQ,qBAAqB,mBAAS,OAAO;AACtE,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,iCAA6B;MAChC,YAAY;MACZ,aAAa;MACb,YAAY;;ACzDhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,0BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,KAAM;AACd,2BAAmB;AACnB,yBAAiB,GAAG;AACpB,uBAAe,WAAW,KAAK,IAAI,EAAE,QAAQ;AAC7C,0BAAkB,IAAI,aAAa,OAAO;AAC1C,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,wBAAc,OAAO;AACrB,oBAAU,KAAK,QAAQ;;AAE3B,uBAAe,WAAW,MAAM,WAAW,EAAE,OAAO,EAAE;AACtD,eAAO,CAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;;AChClD;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM;AACtC,wBAAkB;AAClB,UAAI,MAAM;AACN,eAAO;;AAGP,eAAO,KAAK,IAAI,IAAI,UAAU;;;AAG/B,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC9BhB;;;;;;;;;;;;;;;;AAkBO,kBAAY,gBAAgB,KAAK,QAAQ,KAAK,IAAI;AAClD,sBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAkBO,mBAAa,gBAAgB,OAAM,QAAQ,KAAK,KAAK;AACrD,wBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtBhB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,uBAAiB,GAAG;AACpB,qBAAe,SAAQ,KAAK,IAAI,EAAE,QAAQ;AAC1C,aAAQ,cAAc,aAAa,WAAY,WAAW,QAAQ,MAAM,EAAE,OAAO,EAAE;AACnF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,yBAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;AClChB;;;;;;;;;;;;;;;;AA6GA,2BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;ACxMnB;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAgBA,qBAAiB;AACjB,6BAAyB;MACrB,OAAO;MACP,WAAW;MACX,oBAAoB;MACpB,uBAAuB;MACvB,OAAO;MACP,SAAS;MACT,8BAA8B;;AAE3B;AACH,aAAO,SAAS;;AAEb;AACH,eAAS,gBAAgB;;AAEtB;AACH,UAAI,CAAE,iBAAgB;AAClB,uBAAe,yBAAyB;AACxC,YAAI,WAAW;AACX,mBAAS,gBAAgB;;AAGzB,kBAAQ,IAAI,2CAA2C;AACvD,iBAAO;;;AAGf,iBAAW,SAAS;AACpB,UAAI,GAAG;AACH,eAAO,SAAS;AAChB,eAAO,gBAAgB;;AAE3B,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,QAAQ,GAAG;AACd,SAAG,OAAO,GAAG;AACb,SAAG,OAAO,GAAG;AACb,SAAG,SAAS,GAAG;AACf,aAAO,SAAS;;AAEpB;AACI,UAAI,OAAO,oBAAoB,eAAe,iBAAiB;AAC3D,eAAO,IAAI,gBAAgB,KAAK;iBAE3B,OAAO,aAAa;AACzB,eAAO,SAAS,cAAc;;AAG9B,cAAM,IAAI,MAAM;;;AAGxB;AACI,UAAI,iBAAiB,KAAK,iBAAiB;AACvC,cAAM,IAAI,MAAM;;AAEpB,qBAAe,aAAa;AAC5B,aAAO,iBAAiB,oBAAoB;AACxC,WAAG;AACH,eAAO,SAAS;SACjB;AACH,UAAI,iBAAiB;AACjB,eAAQ,OAAO,WAAW,SAAS,qBAC/B,OAAO,WAAW,sBAAsB;;AAEhD,aAAO,OAAO,WAAW,UAAU;;ACnFvC;;;;;;;;;;;;;;;;AAiBO;AACP,IAAC;AAgBG,qBAAc,eAAc,WAAW,KAAK;AAiB5C,qBAAc,eAAc,kBAAkB,KAAK;OACpD,iBAAkB,iBAAgB;AAC9B;AACP,IAAC;AACG,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,YAAY,KAAK;AAC3C,oBAAa,cAAa,cAAc,KAAK;OAC9C,gBAAiB,gBAAe;AAC5B;AACP,IAAC;AACG,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,sBAAsB,KAAK;AACnE,2BAAoB,qBAAoB,8BAA8B,KAAK;AAC3E,2BAAoB,qBAAoB,wBAAwB,KAAK;AACrE,2BAAoB,qBAAoB,wBAAwB,KAAK;OACtE,uBAAwB,uBAAsB;AAC1C;AACH,aAAO,CAAC,SAAS;;AAEd;AACH,aAAO,aAAa;;AAEjB;AACH,aAAO,CAAC,UAAU,GAAG;;AAKlB;AACH,mBAAa,eAAmB;AAChC,2BAAqB,KAAK,KAAK,OAAO;AACtC,aAAO,qBAAyB;;AAE7B;AACH,UAAI,eAAe,uBAAuB;AACtC,cAAM,IAAI,MAAM,iBAAiB,uCAC1B;;AAEX,aAAO,eAAe;;AAEnB;AACH,2BAAqB,cAAc,SAAS,WAAW;AACvD,UAAI,OAAO,SAAS;AAChB,cAAM,IAAI,MAAM,kBAAkB,OAAO,sBAAsB;;AAEnE,gBAAU;AACV,qBAAe,GAAG,MAAM,cAAc,QAAQ,OAAO;AACjD,qBAAa,GAAG,IAAI,UAAU;AAC1B,iBAAO,SAAS,cAAc,MAAM;;;;AAIzC;AACH,aAAO;QACH,KAAK,IAAI,GAAG,KAAK,KAAK,UAAU;QAAK,KAAK,IAAI,GAAG,KAAK,KAAK,OAAO;;;AAGnE;AACH,qBAAe,uCAAuC,MAAM;AAC5D,aAAO,IAAI,IAAI;;AAEZ;AAIH,oBAAc;AACd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,8BAAsB,MAAM;AAC5B,kCAA0B,MAAM;AAChC,wCAAgC,MAAM;AACtC,oCAA4B,MAAM;AAClC,6BAAqB,MAAM;AAC3B,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,MAAM;AAC7B,2BAAmB,MAAM;;AAGzB,8BAAsB,GAAG;AACzB,kCAA0B,GAAG;AAC7B,wCAAgC,GAAG;AACnC,oCAA4B,MAAM;AAClC,6BAAqB,GAAG;AACxB,oCAA4B;AAC5B,6BAAqB;AACrB,+BAAuB,6BAA6B,OAChD,0BAA0B,iBAC1B;AACJ,2BAAmB,GAAG;;AAE1B,8BAAwB,GAAG;AAC3B,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;ACnKR;;;;;;;;;;;;;;;;AAmBO;AACH,0BAAoB;AACpB,UAAI,OAAM,QAAQ;AACd,wBAAgB;;AAEpB,aAAO;;AAEX;AACI,oBAAc,GAAG;AACjB,UAAI,UAAU,GAAG;AACb,cAAM,IAAI,MAAM,kBAAkB,qBAAqB,IAAI;;;AAInE,wBAAoB;AACpB,wBAAoB;AACb;AACH,UAAI,OAAM,QAAQ,mCAAmC,QAAQ,KACxD,cAAc,KAAK,IAAI,QAAQ,KAAK,IAAI,OAAO;AAChD,eAAO;;AAEX,aAAO;;AAEJ;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,sBAAsB;;;AAGlC;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,aAAa,gBAAgB,gBAAgB,gBAAgB;;AAE1F;AACH,2BAAqB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,gBAAgB;AAC9E,mBAAa,IAAI,MAAM,GAAG,aAAa,cAAc;AACrD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,cAAc,GAAG,oBAAoB;AAC3D,gBAAQ,IAAI,GAAG,iBAAiB;AAChC,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEJ;AACH,6BAAuB,YAAY,IAAI,MAAM,GAAG,aAAa,GAAG,kBAAkB;AAClF,mBAAa,IAAI,MAAM,GAAG,aAAa,gBAAgB;AACvD,mBAAa,IAAI,MAAM,GAAG,cAAc;AACxC,UAAI,GAAG,mBAAmB,gBAAgB,GAAG,oBAAoB;AAC7D,kCAA0B,sBAAsB,GAAG,iBAAiB;AACpE,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX,4BAAwB;AACxB;AACI,oCAA8B,gBAAgB,KAAK;AACnD,UAAI,yBAAyB;AACzB,gBAAQ,IAAI,wCAAwC;AACpD,gBAAQ,IAAI;AACZ;;AAEJ,yBAAmB,CAAC,sBAAsB;AAC1C,0BAAoB,aAAa,MAAM;AACvC,mBAAY,YAAY,OAAO,WAAW,SAAS;AACnD,mCAA6B,YAAY,IAAI,uBAAsB,UAAe,eAAa,GAAG,YAAY,QAAO;AACrH,0BAAoB;AACpB,mBAAa,GAAG,IAAI,qBAAqB,QAAQ;AAC7C,wBAAgB,KAAK,IAAI,qBAAqB,GAAG,QAAQ;;AAE7D,+BAAyB,qBAAqB,MAAM,GAAG,aAAa;AACpE,wBAAkB,qBAAqB,MAAM,aAAa,GAAG;AAC7D,8BAAwB,qBAAqB,MAAM;AACnD,cAAQ,IAAI,iBAAiB,KAAK;AAClC,cAAQ,IAAI,cAAc,MAAM,MAAM;AACtC,cAAQ,IAAI,MAAM,UAAc,UAAU,IAAI,kBAAkB;AAChE,cAAQ,IAAI,gBAAgB,KAAK;;AAE9B;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,mBAAa,IAAI,MAAM,GAAG,YAAY;AACtC,UAAI,GAAG,oBAAoB,SAAS,GAAG,iBAAiB;AACpD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB;AAC1C,UAAI,GAAG,oBAAoB,SAAS,GAAG,qBAAqB;AACxD,gBAAQ,IAAI,GAAG,kBAAkB;AACjC,cAAM,IAAI,MAAM;;;AAGjB;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc,OAAM,GAAG;AAC/D,aAAO;;AAEJ;AACH,sBAAe,YAAY,IAAI,MAAM,GAAG,gBAAgB;AACxD,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AAC9D,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB,OAAM,GAAG;AACvE,aAAO;;AAEJ;AACH,UAAI,OAAM,UAAU,qBAAqB;AACrC,eAAO;;AAEX,aAAO;;AAEJ;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,iBAAiB;;AAE9C;AACH,6BAAuB,OAAM,UAAU;AACvC,UAAK,SAAS,KAAO,UAAU;AAC3B,0BAAkB,IAAI,SAAS;AAC/B,cAAM,IAAI,MAAM,4BAA4B,YAAY;;AAE5D,UAAK,QAAQ,kBAAoB,SAAS;AACtC,0BAAkB,IAAI,SAAS;AAC/B,qBAAY,IAAI,kBAAkB;AAClC,cAAM,IAAI,MAAM,4BAA4B,YACxC,uDAAuD,OAAM;;;AAGlE;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,qBAAqB;;AAElD;AACH,kBAAY,GAAG,kBAAkB,SAAS;AAC1C,UAAI,QAAQ;AAGR,eAAO;;AAEX,mBAAa,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACtD,mBAAa,IAAI,MAAM,GAAG,oBAAoB,KAAK,qBAAqB,GAAG,OAAO,OAAO,mBAAmB;AAC5G,mBAAa,IAAI,MAAM,GAAG,wBAAwB;AAClD,aAAO;;AAEJ;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,0BAAoB,IAAI;AACxB,mBAAa,IAAI,MAAM,GAAG,cAAc,GAAG,WAAW;AACtD,mBAAa,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAElD;AACH,aAAO,YAAY,IAAI,MAAM,GAAG,mBAAmB,SAAS,cAAc,cAAc,cAAc;;AAEnG;AACH,aAAO,GAAG,mBAAmB,SAAS;;AAEnC;AACH,mBAAa,IAAI,MAAM,gBAAgB,IAAI,SAAS;AACpD,mBAAa,IAAI,MAAM,GAAG,UAAU,wBAAwB;;AAEzD;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;AACpE,mBAAa,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,GAAG,OAAO,OAAO,GAAG,OAAO;;AAEhE;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;;AAE1G;AACH,mBAAa,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AAC1D,mBAAa,IAAI,MAAM,GAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,MAAM;;AAEvG;AACH,qBAAe,GAAG,uBAAuB,GAAG;AAC5C,UAAI,WAAW,GAAG;AACd,cAAM,IAAI,MAAM,gCAAgC,2BAA2B,IAAI;;;AAGhF;AACH,cAAQ;aACC,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;aACN,GAAG;AACJ,iBAAO;;AAEP,iBAAO,iBAAiB;;;AAGpC;AACI,sBAAgB,aAAa,IAAI,MAAM;AACvC,UAAI,WAAW;AACX,cAAM,IAAI,MAAM;;AAEpB,aAAO;;AAEX;AACI,6BAAuB,GAAG,mCAAmC;AAC7D,4BAAsB,cAAc,GAAG;AACvC,UAAI,gBAAgB,GAAG,YAAY,gBAAgB;AAC/C,iCAAyB,2BAA2B;AACpD,cAAM,IAAI,MAAM,0BAA0B;;;AAG3C,6CAAyC;AAC5C,aAAO,eAAmB,MAAM,MAAM,GAAG,MAAM,SAAS;;AAErD;AACH,UAAI,MAAM,WAAW;AACjB,cAAM,MAAM;;AAEhB,aAAO;QACH,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;QAAG,MAAM,MAAM,SAAS;;;AAGtE;AACH,sBAAgB,CAAC,GAAG,GAAG;AACvB,uBAAiB,MAAM,WAAW,KAAM,MAAM,WAAW,KAAK,MAAM,OAAO;AAC3E,UAAI,CAAC;AACD,oBACI,CAAC,YAAY,QAAQ,GAAG,YAAY;;AAE5C,aAAO;;AAEJ,kEAA8D;AACjE,uBAAiB,OAAM,UAAU;AACjC,UAAI;AACA,qBAAa,aAAa;AAM1B,mBAAW,SAAS,IAAI,UAAU,KAAK,SAAS,SAAS,IACrD,mBAAuB,SAAS,MAChC,SAAS;AAGb,YAAI,SAAS,WAAW;AACpB,qBAAW,CAAC,GAAG,SAAS;;;AAIhC,UAAI,SAAS,WAAW;AACpB,8BAAsB,cAAkB;AACxC,mBAAW,cAAc;;AAE7B,iBAAW,eAAmB;AAC9B,UAAI,SAAS,UAAU,KAAK,QAAQ;AAChC,eAAO,CAAC,GAAG;iBAEN,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,MAAM;AACf,eAAO;iBAEF,SAAS,WAAW,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3D,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,IAAI,SAAS;iBAEvC,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,MAAM;AAC7B,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS;iBAEvC,SAAS,WAAW,KACzB,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM,cAC3C,SAAS,MAAM;AACf,eAAO,CAAC,SAAS,KAAK,SAAS,KAAK,SAAS,IAAI,SAAS;iBAErD,SAAS,WAAW,KAAK,SAAS,MAAM,cAC7C,SAAS,KAAK,SAAS,KAAK,SAAS,MAAM;AAC3C,eAAO,CAAC,SAAS,IAAI,SAAS,KAAK,SAAS,KAAK,SAAS;;AAG1D,YAAI;AAMA,2BAAiB,YAAY;AAC7B,qBAAW,UAAU;AACrB,cAAI,SAAS;AACT,aAAC,MAAM,QAAQ,YAAY;;AAE/B,iBAAO,WAAY,QAAO,KAAM,QAAO;AACvC,iBAAO,qBAAyB,MAAM,IAAI,OAAK,IAAI;;AAEvD,eAAO,qBAAyB;;;AAGxC;AACI,aAAO,IAAI,MAAM;;AAMd;AACH,eAAS,OAAO,MAAM;AACtB,eAAS,OAAO,MAAM;AACtB,UAAI,aAAiB,QAAQ;AACzB,eAAO;;AAEX,UAAI,CAAC,OAAO,UAAU,CAAC,OAAO;AAC1B,eAAO;;AAEX,UAAI,OAAO,OAAO,KAAK,OAAO,OAAO,KAAK,OAAO,OAAO,KACpD,OAAO,OAAO;AACd,eAAO;;AAEX,UAAI,OAAO,WAAW,OAAO;AACzB,2BAAmB,OAAO,MAAM,IAAI;AACpC,2BAAmB,OAAO,MAAM,IAAI;AACpC,YAAI,eAAe;AACf,iBAAO;;AAEX,YAAI,OAAO,eAAe,OAAO,eAC5B,QAAO,OAAO,KAAK,OAAO,OAAO;AAClC,iBAAO;;;AAGf,aAAO,OAAO,OAAO,OAAO,MAAM,OAAO,OAAO,OAAO,OAAO,OAAO;;AAKzE;AACA;AACO;AACH,UAAI,oBAAoB;AACpB,mBAAW,gBAAgB;AAC3B,2BAAmB,GAAG,aAAa,GAAG;;AAE1C,aAAO;;AAEJ;AACH,yBAAmB;;AAEhB;AACH,+BAAyB;;AAEtB;AACH,UAAI,0BAA0B;AAC1B,mBAAW,gBAAgB;AAC3B,iCAAyB,GAAG,aAAa,GAAG;;AAGhD,aAAO,KAAK,IAAI,IAAI;;AAEjB;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX;AACA,iBAAW,gBAAgB;AAC3B,UAAI,aAAa,IAAI,sCACjB,iBAAiB;AACjB,4BAAoB;iBAEf,aAAa,IAAI;AACtB,4BAAoB;;AAGpB,4BAAoB;;AAExB,aAAO;;AAEJ;AACH,kBAAY,GAAG,aAAa;AAC5B,aAAO,OAAO;;AAEX;AACH;AACI,mBAAW,gBAAgB;AAC3B,YAAI,MAAM;AACN,iBAAO;;;AAIX,gBAAQ,IAAI,sCAAsC;AAClD,eAAO;;AAEX,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAGf,oCAA8B,uCAAuC;AACrE,aAAO;;AAWJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAC3B,UAAI,iBAAiB;AACjB,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;AAEX,YAAI,CAAC,aAAa,IAAI;AAClB,iBAAO;;;AAIX,YAAI,aAAa,IAAI;AACjB,iBAAO,uCAAuC;;AAElD,wCAAgC;AAChC,YAAI,aAAa,IAAI;AACjB,4CAAkC,GAAG,aAAa;AAClD,iBAAO,2CAA2C,IAAI;;AAE1D,eAAO;;AAEX,oCAA8B,uCAAuC;AACrE,aAAO;;AAEX;AACI,wBAAkB,iBAAiB;AACnC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,qBAAqB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,kBAAkB;AAC3I,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEX;AAGI,wBAAkB,iBAAiB,IAAI;AACvC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,oBAAc;AACd,qBAAe;AACf,SAAG,WAAW,GAAG,YAAY,GAAG,UAAU,yBAAyB,OAAO,QAAQ,GAAG,UAAU,oBAAoB,UAAU,sBAAsB;AACnJ,0BAAoB,GAAG;AACvB,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AACtF,oCAA8B,GAAG,uBAAuB,GAAG,iBAAiB,GAAG;AAC/E,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AACnC,SAAG,cAAc;AACjB,SAAG,kBAAkB;AACrB,aAAO;;AAEJ;AACH,UAAI,iBAAiB;AACjB,eAAO;;AAEX,iBAAW,gBAAgB;AAE3B,wBAAkB,GAAG,aAAa;AAClC,aAAO;;AAEJ;AACH,UAAI,CAAC,MAAM,QAAQ;AACf,kBAAS,CAAC;;AAEd,cAAO,QAAQ;AACX,YAAI,KAAK;AACL,kBAAY,EAAE,UAAU,aAAa,MAAM,GAAG;;;;AChhB1D;;;;;;;;;;;;;;;;AAkBA,kBAAY;AAOZ,UAAI,aAAa,aAAa,MAAM,MAAI,UAAU,mBAAmB;AAErE,UAAI,aAAa,iBAAiB;AAC9B,UAAI,sBAAsB;AACtB,eAAO;iBAEF,sBAAsB;AAC3B,eAAO;;AAEX,aAAO;;AAGX,UAAI,aAAa,kCAAkC,MAAM;AACzD,UAAI,aAAa,0BAA0B,MAAM,MAAI,IAAI,qBAAqB;AAE9E,UAAI,aAAa,qBAAqB,MAAM;AAE5C,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,cAAc,MAAM,MAAI,QAAQ;AAEjD,UAAI,aAAa,4BAA4B,MAAM,MAAI,QAAQ;AAE/D,UAAI,aAAa,mBAAmB,MAAM,MAAI,QAAQ;AAGtD,UAAI,aAAa,4BAA4B,MAAM;AAEnD,UAAI,aAAa,gCAAgC,MAAM,MAAI,QAAQ;AAEnE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,+BAA+B,MAAM,MAAI,QAAQ;AAElE,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,uBAAuB,MAAM,MAAI,QAAQ;AAE1D,UAAI,aAAa,qBAAqB,MAAM,MAAI,QAAQ;AAExD,UAAI,aAAa,0BAA0B,MAAM,uBAAuB,MAAI,UAAU;AAEtF,UAAI,aAAa,gCAAgC,MAAM,uBAAuB,MAAI,UAAU;AAS5F,UAAI,aAAa,gDAAgD;AAC7D,2BAAqB,MAAI,UAAU;AACnC,UAAI,iBAAiB;AACjB,eAAO;;AAEX,aAAO,kCAAkC;;AAM7C,UAAI,aAAa,iDAAiD,MAAM,MAAI,UAAU,kDAAkD,KACpI,CAAC;AAIL,UAAI,aAAa,gCAAgC,MAAM,mCAAmC,MAAI,UAAU;AAKxG,UAAI,aAAa,gCAAgC;AAC7C,aAAO,MAAI,QAAQ,8BACf,QACA,MAAI,QAAQ;;AAMpB,UAAI,aAAa,gCAAgC,MAAM,8BAA8B,MAAI,UAAU;AAEnG,UAAI,aAAa,2BAA2B,MAAM,oBAAoB,MAAI,UAAU;AAIpF,UAAI,aAAa,6BAA6B;AAK1C,0BAAoB,MAAI,QAAQ;AAChC,aAAO,cAAc,IAAI;;AAS7B,UAAI,aAAa,kCAAkC;AAC/C,aAAO;OACR;AACC,UAAI,aAAY,KAAK,eAAc;AAC/B,cAAM,IAAI,MAAM,8FACsB;;;ACtI9C;;;;;;;;;;;;;;;;AAoBA,WAAQ,iCAAiC,qBAAqB,uBAAuB,qBAAqB,yBAAyB,yBAAyB,qBAAqB,qBAAqB,+BAA+B,yBAAyB,yBAAyB,qBAAqB,iCAAiC,6BAA+B;ACpB5W;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,UAAU,kBAAiB;;AAG7C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;yBAEC;;;;;;ACpCzB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,yBAAiB;AAEjB,aAAK,cAAc,QAAQ;AACvB,mBAAS,KAAK,SAAS,kBAAiB;;AAG5C,4BAAkB,KAAK,cAClB,IAAI;AACL,iBAAO,IAAI;WAEV,KAAK;AACV,aAAK,WAAW;;UAEd,SAAS,KAAK;;wBAEA;;;;;;ACtCxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,WAAY;AAC3C,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,aAAK,cAAc,CAAC,WAAW;AAC/B,uBAAgB,QAAO,QAAS,MAAM;AACtC,6BAAqB,YACjB,kBACA;AACJ,aAAK,WAAW;;;;;kCAKU;;;;;8BAKJ;wBACN;;0BAEE;;;;;;;;;;ACzC1B;;;;;;;;;;;;;;;;AAgBO;AACH,aAAO,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG,MAAM,IAAI,OAAK,GAAG,QAAQ;;AAEtE;AACH,UAAI,SAAS;AACT,eAAO,CAAC;;AAEZ,aAAO,eAAe,MAAM;;AAEzB;AACH,UAAI,SAAS;AACT,eAAO;;AAEX,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM;AACtB,mBAAU,KAAK;AACf,YAAI,IAAI,OAAO;AACX,qBAAU;;;AAGlB,aAAO;;ACpCX;;;;;;;;;;;;;;;;AAiBO;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAI,OAAM,UAAU,qBAAqB;AACrC,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAKf,2BAAmB;;;;;;;;;;;;AAcnB,2BAAmB;AACnB,sBAAc;;;;;;;;;;;AAYd,mBAAU;AACV,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,oBAAY;AACZ,iBAAS;AACT,uBAAe;AAEf,2BAAmB;;;;;;;;;AASnB,2BAAmB;;;;;;;;;;AAUnB,sBAAc;;;;;;;;;;AAUlB,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AClHR;;;;;;;;;;;;;;;;AAsBO,yEAAmE;AACtE,sBAAgB,gBAAoB;AACpC,aAAO,QACF,IAAI;AACL,sBAAc,OAAO,QAAO,QAAQ,YAAW;AAC/C,sBAAc,MAAM,QAAQ,SAAS,IACjC,OAAO,QAAO,IAAI,QAAQ,YAAW,QAAO,QAAQ,WACpD,YAAY,QAAO,QAAQ;AAC/B,eAAO,GAAG,UAAU;SAEnB,KAAK;;AAEd;AACI,UAAI,EAAE,WAAW;AACb,eAAO,GAAG,EAAE;;AAEhB,aAAO,MAAM,EAAE,UAAU,EAAE,KAAK;;AAM7B;AACH,UAAI,EAAE,WAAW,EAAE;AACf,cAAM,IAAI,MAAM,wDACL,EAAE,cAAc,EAAE;;AAEjC,qBAAe;AACf,0BAAoB,KAAK,MAAM,EAAE,SAAS;AAC1C,mCAA6B,EAAE,SAAS;AACxC,mBAAa,GAAG,IAAI,aAAa;AAC7B,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,uBAAe,EAAE,MAAM,IAAI,GAAG,IAAI,IAAI;AACtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,UAAI,yBAAyB;AACzB,qBAAa,EAAE,MAAM,cAAc;AACnC,qBAAa,EAAE,MAAM,cAAc;AACnC,YAAI,OAAO,WAAW;AAClB,mBAAS,OAAO,IAAI,OAAK,SAAS;AAClC,mBAAS,OAAO,IAAI,OAAK,SAAS;;AAEtC,eAAO,KAAK,GAAG,SAAS,YAAY,SAAS;;AAEjD,aAAO,OAAO,IAAI,UAAU,OAAO,MAAM,KAAK;;AAK3C;AACH,sBAAgB,gBAAoB,OAAO,IAAI,OAAK,EAAE;AACtD,aAAO;;wBAEa,QAAQ,mBAAmB,QAAQ;;;;AAIpD,iCAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC/EpC;;;;;;;;;;;;;;;;AAiBA,WAAM,wCAAuB;AAGtB;AACH,6BAAuB;AACvB,iBAAW,QAAQ;AACf,qBAAa,eAAmB,EAAE,UAAU;AAE5C,YAAI,EAAE,UAAU;AACZ,yBAAe,KAAK,iBAAiB,EAAE,OAAO,OAAO,IAAI,IAAI,UAAU;;AAGvE,yBAAe,KAAK,qBAAqB,EAAE;AAC3C,yBAAe,KAAK,qBAAqB,EAAE;;;AAGnD,iCAA2B,eAAe,KAAK;AAC/C,mCAA6B,WACxB,IAAI,OAAK,wBAAwB,GAAG,aAAa,qBACjD,KAAK;AACV,0BAAoB,YAAY;AAChC,mBAAa;AACb,wCAAkC,6BAA6B;AAC/D;AACA;AACA,yBAAmB,gBAAgB;AACnC,UAAI,YAAY;AACZ,gCACI,+BAA+B,YAAY,cAAc;AAC7D,uCAA+B,8BAA8B;;AAG7D,gCACI,yBAAyB,YAAY,cAAc;AACvD,uCAA+B,2BAA2B;;AAE9D,UAAI;AACA,wBAAgB;;AAEpB,qBAAe;QACX;QAAc;QAA2B;QACzC;QAAoB;QAAuB;QAAsB;QACnE,KAAK;AACP,aAAO;;AAEX;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,iBAAiB;aACvB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;aACnB;AACD,iBAAO,aAAa;;AAEpB,gBAAM,IAAI,MAAM,GAAG,MAAM;;;AAIrC;AACI,oBAAc,OAAO,UAAU;AAC/B,cAAQ,MAAM;aACL;AACD,iBAAO,uBAAuB;aAC7B;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;aACzB;AACD,iBAAO,mBAAmB;;AAE1B,iBAAO,mBAAmB;;;AAGtC,gFAA4E;AACxE,gBAAU;AACV,UAAI;AACA,eAAO,2BAA2B;;AAGlC,eAAO,qBAAqB;;AAEhC,sBAAgB,OAAO,UAAU;AACjC,uBAAiB,aAAa;AAC9B,UAAI,QAAQ,UAAU,SAAS;AAC3B,YAAI;AACA,iBAAO,+BAA+B,QAAQ;;AAG9C,iBAAO,yBAAyB,QAAQ;;;AAGhD,aAAO;;AAEX;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;aACxC;AACD,iBAAO,wBAAwB,UAAU;;AAEzC,iBAAO,wBAAwB,UAAU;;;AAGrD;AACI,cAAQ,SAAS;aACR;AACD,iBAAO;aACN;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;aAClC;AACD,iBAAO,kBAAkB,UAAU;;AAEnC,gBAAM,IAAI,MAAM,GAAG,SAAS;;;AAGxC;AACI,aAAO;;eAEI,KAAK;;;;AAIpB;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,aAAO;;QAEH,KAAK;;;;AAIb;AACI,4BAAsB,GAAG,KAAK;;;;MAI5B,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;MAuBL,KAAK;MACL,KAAK;MACL,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;MAyBL;MACA;MACA;;AAEF,aAAO;;AAEX,8BAA0B;;;;;;;;;;;;;AAa1B,8BAA0B;;;;;;;;;AAS1B,8BAA0B;;;;;;;;;;AAU1B,iCAA6B;;;;;;;;;;;;AAY7B;AACI,aAAO;;;;;;AAMX;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,UAAI,eAAe,OAAO;AACtB,eAAO;;sCAEuB,eAAe;;;;AAIjD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;;;AAIhD;AACI,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,UAAI,SAAS,OAAO;AAChB,eAAO;;kCAEmB,SAAS;;;;AAIvC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;4BACjC,SAAS;;;;AAIrC;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAChD,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,KAAK;AAChE,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;iCACxC,eAAe;;wBAExB;qBACH;;6BAEQ;4BACD;;;;;;AAM5B;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;QAClC;;;;;AAKR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,iCAA2B,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/D,4BAAsB,qBAAqB,KAAK,KAAK,MAAM,MAAM,SAAS,KAAK;AAC/E,2BAAqB;AACrB,oBAAc;AACd,oBAAa;AACb,mBAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AAClC,0BAAkB,MAAM,MAAM,SAAS,IAAI;AAC3C,kBAAU;aACL,eAAe;kBACV,OAAO;QACjB;AACA,kBAAS,IAAI,QAAQ;;AAEzB,aAAO;UACD,MAAM;;oCAEoB,eAAe,OAAO,eAAe;iCACxC,eAAe;;QAExC;;wBAEgB;qBACH;;6BAEQ;4BACD;;mBAET,MAAM,UAAU;;;;AAInC;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,OAAO;AACrG,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;QAClC;;;;;AAKR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,OAAO;AAC3G,aAAO;;kDAEuC,SAAS;+BAC5B,SAAS;;iCAEP,SAAS;;QAElC;;;;;;;AAOR;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,KAAK,MAAM,MAAM,OAAO;AACjH,aAAO;;;eAGI,SAAS,OAAO,SAAS;iCACP,SAAS;;QAElC;;;;;;;AAOR;AACI,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,aAAiB,OAAO;AACxB,eAAO;;8CAE+B,eAAe,OAAO,eAAe;;;;AAK/E,iCAA2B,KAAK,KAAK,MAAM,KAAK;AAUhD,aAAO;;;oCAGyB,eAAe,OAAO,eAAe;;iCAExC,eAAe;6BACnB;4BACD;;;;;;AAM5B;AACI,UAAI,aAAiB,OAAO;AACxB,eAAO;;0CAE2B,SAAS,OAAO,SAAS;;;;AAI/D,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,UAAI,MAAM,OAAO;AACb,eAAO;;;sCAGuB,SAAS,OAAO,SAAS;mCAC5B,SAAS;;;;;AAKxC,aAAO;;;oCAGyB,SAAS,OAAO,SAAS;iCAC5B,SAAS;wBAClB,MAAM;4BACF,MAAM;;;;;AAKlC;AACI,aAAO,SAAS;;AAEpB;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,mBAAa;AACb,aAAO;WACA;eACI,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AACpB,eAAO,SAAS,sBAAsB;;AAE1C,iCAA2B,UAAU,UAAU;AAC/C,UAAI,YAAY,KAAK,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,6BAAuB,UAAU,UAAU;AAC3C,qBAAe,yBAAyB;AACxC,aAAO;YACC;6BACiB,UAAU,UAAU;6BACpB;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,mBAAa;AACb,aAAO;WACA;;UAED,eAAe,OAAO,eAAe;eAChC,KAAK,aAAa;;;;AAIjC;AACI,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;UACJ,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,oBAAc,SAAS;AACvB,oBAAc,SAAS;AACvB,UAAI,UAAU,KAAK,UAAU;AACzB,eAAO;cACD;+BACiB;;;;AAI3B,qBAAe,yBAAyB;AACxC,UAAI,UAAU;AACV,eAAO;cACD;6CAC+B,oBAAoB;+BAClC;;;;AAI3B,UAAI,UAAU;AACV,eAAO;cACD;wCAC0B,oBAAoB;+BAC7B;;;;AAI3B,aAAO;YACC;6BACiB,UAAU,kBAAkB;6BAC5B;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,mBAAa;AACb,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,eAAO;aACF;qDACwC,cAAc;;iBAElD,KAAK,aAAa;;;;AAI/B,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,aAAO;WACA;iCACsB,iBAAiB,eAAe,OAAO,eAAe;eACxE,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,UAAI,YAAY,QAAQ,aAAiB,OAAO;AAC5C,yBAAgB,SAAS;AACzB,yBAAgB,SAAS;AACzB,eAAO;YACH;mDACuC,eAAc;6BACpC;;;;AAIzB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO;AACvB,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;qDACuC,MAAM;UACjD,kBAAkB;;;;AAIxB,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,qBAAe,yBAAyB;AACxC,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;4CACpB;6BACf;;;;AAIzB,UAAI,YAAY;AAEZ,eAAO;YACH;yCAC6B,iBAAiB,MAAM;uCACzB;6BACV;;;;AAIzB,aAAO;UACD;;wBAEc,MAAM,cAAc;2BACjB,YAAY;2BACZ;;;;AAI3B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,UAAI,MAAM,OAAO;AACb,8BAAsB,MAAM,MAAM;AAClC,yBAAiB,CAAC,GAAG;AACrB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,KAAK,OAAO;AAC5B,eAAO;UACL,2BAA2B;eACtB;mBACI,YAAY,kBAAkB,QAAQ;;;;AAIrD,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,KAAK;AAC1C,4BAAsB,eAAe,KAAK,KAAK,MAAM,KAAK;AAC1D,mBAAa;AACb,aAAO;WACA;;UAED,YAAY,YAAY,kBAAkB;eACrC,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM,KAAK,MAAM;AACjC,sBAAgB,MAAM;AACtB,aAAQ,UAAU,YAAa,cAAkB;AACjD,4BAAsB;AACtB,UAAI,cAAc,SAAS,MAAM;AAC7B,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO;AAC9B,eAAO;UACL,qBAAqB;gBACf;mBACG,YAAY,kBAAkB,QAAQ;;;;AAIrD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY;UACnC,kBAAkB;;;;AAIxB,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,yBAAmB,UAAU,UAAU;AACvC,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;gBACC;;oDAEoC;;4BAExB,cAAc;iCACT;;;;AAI7B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;YACH;8CACkC,MAAM;;qDAEC,cAAc;6BACtC;;;;AAIzB,qBAAe,yBAAyB;AACxC,aAAO;cACG;;4BAEc,mBAAmB,qBAAqB;+BACrC,YAAY;+BACZ;;;;AAI/B;AACI,oBAAc,UAAU,UAAU;AAClC,mBAAa,MAAM;AACnB,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,uBAAiB,UAAU,UAAU;AACrC,6BAAuB,CAAC,KAAK,KAAK,SAAS,KAAK,IAAI,KAAK,KAAK,SAAS,KAAK;AAC5E,sBAAgB,eAAe;AAC/B,sBAAgB,eAAe;AAC/B,2BAAqB,KAAK,KAAK,MAAM,OAAO,KAAK;AACjD,0BAAoB,eAAe,KAAK,KAAK,MAAM,OAAO,KAAK;AAC/D,mBAAa;AACb,mBAAY,OAAO,+BAA+B;AAClD,mBAAa,GAAG,IAAI,OAAO,GAAG;AAC1B,iBAAS,QAAQ,QAAQ;AACzB,yBAAiB,MAAM,OAAO,IAAI;AAClC,iBAAQ,IAAI,OAAO,qBAAqB;;AAE5C,mBAAa;AACb,aAAO;WACA,YAAY;oBACH;2BACO;kCACO;qDACmB,YAAY;eAClD,KAAK,aAAa;;;;AAIjC;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS;AACvC,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;iCAEmB,YAAY,YAAY;UAC/C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;uBAIS,YAAY;;0BAET,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;gCAEkB,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGrC,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB;oBACzB;6BACS,YAAY,oBAAoB;6BAChC;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU;AACjD,eAAO;QACP,qBAAqB;cACf;iBACG,YAAY,kBAAkB,QAAQ;;;;AAInD,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;iBAGG,YAAY,YAAY,YAAY;;UAE3C,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;gCAGkB,YAAY,YAAY;;0BAE9B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM,OAAO,MAAM;;;yBAGtB,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;0BAEc,mBAAmB,qBAAqB;qBAC7C,sBAAsB;6BACd,YAAY;6BACZ;;;;AAI7B;AACI,oBAAc,UAAU,UAAU;AAClC,sBAAgB,UAAU;AAC1B,uBAAiB,QAAQ,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACzE,aAAQ,UAAU,YAAa,cAAkB;AACjD,UAAI,SAAS,SAAS,MAAM;AACxB,6BAAqB,iBAAiB,WAAW;AACjD,uBAAe,CAAC,OAAO,OAAO,SAAS,UAAU,UAAU;AAC3D,eAAO;QACP,qBAAqB;cACf;;iBAEG,YAAY,kBAAkB,QAAQ;;;;AAInD,sBAAgB,MAAM;AACtB,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,sBAAgB,MAAM,KAAK;AAC3B,UAAI,UAAU,UAAU;AAEpB,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;mBAGlC;UACT,kBAAkB;;;;AAIxB,yBAAmB,UAAU,UAAU;AACvC,uBAAiB,UAAU,UAAU;AACrC,sBAAgB,SAAS;AACzB,sBAAgB,SAAS;AACzB,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;;iBAIG,YAAY,YAAY,YAAY;;;0BAG3B,cAAc;+BACT;;;;AAI3B,UAAI,YAAY,WAAW,cAAc;AAErC,eAAO;cACD;;;iBAGG,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM;iBACvC,MAAM,KAAK,MAAM,KAAK,MAAM;iBAC5B,MAAM,KAAK,MAAM;iBACjB,MAAM;;;yBAGE,cAAc;+BACR;;;;AAI3B,qBAAe,yBAAyB;AACxC,aAAO;YACC;;;0BAGc,mBAAmB,qBAAqB;qBAC7C,sBAAsB,sBAAsB;6BACpC,YAAY;6BACZ;;;;AAI7B;AACI,sBAAgB,UAAU;AAC1B,qBAAe,eAAmB,UAAU,UAAU;AACtD,UAAI,SAAS;AACT,eAAO,UAAU;;AAErB,aAAO;0BACe;;iBAET;;;;;AAKjB;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,mBAAa,kBAAkB;AAC/B,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,mBAAa;AACb,qBAAe,eAAmB,UAAU,UAAU;AACtD,4BAAsB,WAAW;AACjC,sBAAgB,eAAmB,aAAa;AAChD,6BAAuB,YAAY;AACnC,UAAI,WAAW,KAAK,CAAC,iBAAiB,CAAC;AACnC,iBAAS;;;iBAIJ,iBAAiB,CAAC;AACvB,YAAI,YAAY;AACZ,mBAAS;;;;AAKT,mBAAS;;;;iBAKR,cAAc;AACnB,qBAAa,SAAS;AACtB,qBAAa,SAAS;AACtB,YAAI,cAAc,QAAQ,QAAQ,MAAM,cAAc,QAAQ,QAAQ;AAClE,mBAAS;mBAEJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;mBAGJ,cAAc,QAAQ,QAAQ;AACnC,mBAAS;;;AAGjB,aAAO;WACA;QACH;QACA;8BACsB,kBAAkB;QACxC;;;;AAIR;AACI,sBAAgB,UAAU;AAC1B,6BAAuB,QAAQ,OAAO,GAAG,gBAAgB,QAAQ,MAAM;AACvE,uBAAiB,QAAQ,iBAAiB;AAC1C,0BAAoB,aAAa;AACjC,yBAAmB,UAAU,UAAU;AACvC,qBAAe,UAAU,UAAU,aAAa;AAChD,sBAAgB,aAAa,aAAa;AAC1C,UAAI,CAAC,UAAU,UAAU,aAAa,WAAW,WAC7C,UAAU,UAAU,cAAc,QAClC,aAAiB,YAAY;AAC7B,eAAO;cACD;+BACiB;;;;AAI3B,mBAAa,kBAAkB;AAC/B,4BAAsB,mBAAiB,UAAU,UAAU,cAAc,aAAa;AACtF,uBAAiB,UAAU;AAC3B;AACA,qBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC,UAAI,WAAW;AACX,wBAAgB;iBAEX,UAAU,KAAK,cAAc,UAAU;AAC5C,wBAAgB;;AAGhB,wBACI,cAAc,IAAI,OAAK,UAAU,OAAO,IAAI,kBACvC,KAAK;;AAElB,kCAA4B;AAC5B,UAAI,UAAU,KAAK,SAAS;AACxB,gCAAwB;;AAGxB,gCAAwB,UAAU,UAAU,aACvC,IAAI,UAAU,UAAU,OAAO,IAAI,aACnC,KAAK;;AAEd,aAAO;YACC;QACJ;QACA;kBACU,kBAAkB;;;;AAI7B;AACH,UAAI,QAAQ;AACR,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;iBAEF,SAAS;AACd,eAAO;;AAGP,cAAM,MAAM,gBAAgB;;;AAIpC;AAEI,2BAAqB,KAAK,MAAM,KAAK,UAAU;AAC/C,mBAAa,UAAU,eAAe;AACtC,aAAO;;AAEX;AACI,aAAO,SAAS,IAAI,OAAK,OAAO,IAAI,KAAK;;AC3sC7C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,gBAAY,MAAM,SAAS,GAAG,MAAM,aAAa,IAAG,OAAO,GAAG,gBAC1D,IAAG,MAAM;AACb,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB,KAAK,KAAK,SAAS;AACnC,aAAK,cAAc,MAAM,MAAM,GAAG;AAClC,YAAI,UAAU;AACV,eAAK,YAAY,KAAK;;AAE1B,YAAI,CAAC;AACD,eAAK,cAAc,KAAK;;AAE5B,yBAAiB,KAAK;AACtB,qBAAa,SAAS;AACtB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC;AACA;AACA,YAAI,YAAY;AACZ,uBAAa,OAAO;AACpB,iCAAuB,kBAAkB;AACzC,2BAAiB;UACnB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;UAChB,+BAA+B,kBAAkB,QAAO;YACtD,QAAO,OAAO;;AAGd,uBAAa;AACb,2BAAiB;UACnB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;UAChB;YACE,QAAO,OAAO;;AAElB,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,0BAAkB,MAAM,SAAS,aAAa;AAC9C,4BAAoB,SAAS,IAAI,OAAK,SAAS;AAC/C,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,2BAAmB,YAAY,cAAc,aAAa,GAAG,OAAO;AACpE,uBAAgB,QAAO,QAAS,gBAAgB;AAChD,kCAA0B,YAAY,KAAK;sDACG,WAAW;sDACX,WAAW;sDACX,WAAW;sDACX,WAAW;AACzD,2BAAmB;0BACD,WAAW;uCACE,WAAW;uCACX,WAAW;qDACG,WAAW;AACxD,8CAAsC,YAAY,KAAK;qCAC1B,YAAY;4CACL,SAAS;iDACJ,SAAS,MAAM,IAAI;;AAE5D,aAAK,WAAW;0BACE,YAAY;iCACL,SAAS;sCACJ,SAAS,MAAM,IAAI;;QAEjD;;UAEE;4BACkB,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;4BAC3C,QAAO,OAAO,QAAQ,SAAS,OAAO,KAAK;UAC7D;yCAC+B,wBAAwB;sBAC3C,wBAAwB,gBAAgB;;;2BAGnC;;8BAEG;;YAElB;6BACiB;;;mBAGV;;;;;;;;;;;;;;AChHnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,gBAAe;AAC1C,aAAK,WAAW;iCACS,WAAW;0CACF;;;;;;;;;;;;;;gCAcV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;mBACf;kDAC+B;;sCAEZ,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,8BAAsB,IAAK,eAAc,eAAe;AACxD,aAAK,WAAW;iCACS,aAAa,WAAW;0CACf;;;;;;;;;;;;;;;;;gCAiBV;oBACZ;gDAC4B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;;;;;;;;;;;AC1IjD;;;;;;;;;;;;;;;;AAiBA,8BAA0B;;;;AAQnB,oBAAgB;;;;;;;;;;;AAWhB,gBAAY;;;;;;;;;;AAUZ,+BAA2B;AAC3B,kBAAc;AACd,iBAAa;AACb,uBAAmB;AACnB,oBAAgB;AAChB,0BAAsB;AACtB,wBAAoB;AACpB,uBAAmB;AACnB,gBAAY,oBAAoB;;;AAGhC,gBAAY,oBAAoB;;;AAGhC,gBAAY;;AAEZ,oBAAgB;AAChB,kBAAc;;MAEjB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;UAEd;;;;;;;;;;;ACtEV;;;;;;;;;;;;;;;;AAmBA,gCAA0B;;;;;;AAMnB,sBAAgB;;;;;;;;;;;;;;;;;;;;;;AAsBhB,kBAAY;;;;;;;;;;;;;;MAef,sBAAoB;;;AAGjB,oBAAc;;;;AAId,sBAAgB;;;;AAIhB,oBAAc;;;AAGd,sBAAkB;;;AAGlB,mBAAa;;;AAGb,yBAAmB;;;AAGnB,sBAAgB;;;AAGhB,4BAAsB;;;AAGtB,0BAAoB;;;;;AAKpB,yBAAmB;;;;;;AAMnB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;AAGjB,kBAAY;;;MAIf,sBAAoB;;;;MAIpB,oDAAmD;AAC/C,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,uBAAuB;AAC5B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,4BAAwC,QAAQ;AACnE,qBAAa,KAAK,YAAY;AAC9B,qCAA6B;AAC7B,YAAI;AACA,cAAI,SAAS,KAAK,eAAmB,KAAK,iBAAiB;AACvD,qCAAyB;;;;;;AAOzB,0BAAc,kBAAkB;AAChC,qCAAyB;YAC7B;;AAEI,gBAAI,SAAS;AACT,wCAA0B;yCACL,KAAK,YAAY;;;;;AAMtC,+BAAiB,YAAY,UAAU;AACvC,wCAA0B;;iBAE7B,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;iBAEtD,SAAS,OAAO,cAAc,KAAK,YAAY,OAAO;;;;;;;;AAQ/D,aAAK,WAAW;;UAEd;;;;;;;;UAQA;;;;;;;AChLV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;MAepB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC1C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;MAgBpB;AACI,eAAO;AACH,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,MAAM,0BAA0B,cAAc;AAC5D,iBAAK,SAAS,MAAM,0BAA0B,cAAc;;AAEhE,gBAAM,GAAG,UAAU,KAAK,QAAQ;AAChC,gBAAM,GAAG,UAAU,KAAK,QAAQ;;;;AC7C5C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,+BAAuB,SAAS,eAAe;AAC/C,aAAK,WAAW;;;;;;;;;;;;8BAYM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;oBAIzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,aAAK,WAAW;iCACS,WAAW;;;;;0BAKlB;;wCAEc,mBAAmB;;;;;;;gCAO3B;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES,SAAS;;oBAEzB;;;;;;;;;;;;;;;;;;;MAmBhB;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,WAAW;;;;;;;;;;;8BAWM,SAAS;kCACL,SAAS;iCACV,iBAAiB;;kCAEhB,SAAS;;;;oCAIP,SAAS;mCACV,kBAAkB;;oCAEjB,SAAS;;;;sCAIP,SAAS;qCACV,iBAAiB;;sCAEhB,SAAS;;;;;;;;;;;;;;;;;MAiB3C;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,yBAAiB,cAAc,IAAI,SAAS,QAAQ;AACpD,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;gCAczB;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;oCAES;oDACgB;;wCAEZ,SAAS;;;;;;6BAMpB;;sCAES,SAAS;;;;;;;;;;;;;AC/P/C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;;;;;;;wBAOA;;;;;8BAKM,SAAS;kCACL,SAAS;iCACV,kBAAkB;;kCAEjB,SAAS;;;;oCAIP,SAAS;mCACV,iBAAiB;;oCAEhB,SAAS;;;;;;;;;;;;;;;;MAgBzC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,uBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,wBAAgB,cAAc,IAAI,SAAS,QAAQ;AACnD,2BAAmB,SAAS,cAAc,SAAS;AACnD,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;gCAYZ;gDACgB;;oCAEZ,SAAS;;;;;yBAKpB;;kCAES;kDACgB;;sCAEZ,SAAS;;;;;;2BAMpB;;;oCAGS;8BACN;;;;;;;;;;;;AC9G9B;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,kCAAkC;AAClF,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,+BAAuB,SAAS,eAAe;AAC/C,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,2BAAmB,iBAAiB,IAAI;AACxC,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;0BAKlB;;;2BAGC,mBAAmB;;;;;;;gCAOd;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;;;;;;;;oBAQhB;;;;;;;;;;;;;;;;;;;kBAmBF,4BAA4B;;oBAE1B;;0CAEsB;mCACP;;;kCAGD;mCACC;;;yBAGV,4BAA4B;;+BAEtB;+BACA;;;oBAGX;;wCAEoB;wCACA;;;;;gCAKR;gCACA;;;;;yBAKP,4BAA4B;;+BAEtB;+BACA;+BACA;;;oBAGX;;wCAEoB;wCACA;wCACA;;;;;gCAKR;gCACA;gCACA;;;;;;;;;;UAUtB;UACA;;;;;;;MAON;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,sCAA8B,KAAK,MAAM,SAAS,aAAa,KAAK;AACpE,wCAAgC,SAAS,aAAa;AACtD,aAAK,WAAW;oCACY,gBAAgB,iBAAiB;iCACpC,aAAa,WAAW;;;;;;;;;;;;;;;;gCAgBzB;qCACK;;gCAEL,SAAS;;;;kCAIP;uCACK;;kCAEL,SAAS;;;;oCAIP;yCACK;;oCAEL,SAAS;;;;sCAIP;;;;;;;;;;;;;;;;;oBAiBlB,4BAA4B;;4CAEJ;qCACP;2BACV,4BAA4B;;4CAEX;4CACA;;;qCAGP;qCACA;;;2BAGV,4BAA4B;;4CAEX;4CACA;4CACA;;;qCAGP;qCACA;qCACA;;;;;;;;;;;;AC1RrC;;;;;;;;;;;;;;;;;MAiBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,2BAAmB,SAAS,cAAc,SAAS;AACnD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;;cAEtB;;;;AAIF,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;wBAOpB;4BACI;;;;;;;;;gCASI;qCACK;;gCAEL;;;;kCAIE;uCACK;;kCAEL;;;;;;;;;;;UAWxB;UACA;;;;;;AClGV;;;;;;;;;;;;;;;;;MAkBI,gCAAgC,qBAAoB,2BAA2B;AAC3E,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS;AAC5B,yBAAiB,SAAS;AAC1B,yBAAiB,SAAS;AAC1B,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,6BAAqB;AACrB,uBAAe;AACf,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY;wBACJ,KAAK,IAAI;mBACd,KAAK;mBACL,KAAK;;;AAWhB,qBAAa,GAAG,IAAI,cAAc;AAC9B,4BAAkB,GAAG,SAAS,cAAc;AACxC,sBAAU,SAAS;AACnB,wBAAY;4BACA,IAAI;4BACJ,IAAI;;AAEhB,gBAAI,gBAAgB;AAChB,kBAAI,IAAI;AAEJ,oBAAI,UAAU,MAAM;AAQhB,8BAAY;;qCAEH,2CAA2C;2BACrD,KAAK;;;;uCAIO;6BACV,KAAK;;;2BAGP,KAAK;;;;qCAIK,2CAA2C;;;;;uCAKzC;;;;sBAIjB,KAAK,gCAAgC,KAAK;;sBAE1C,KAAK,yBAAyB,KAAK;;;;AAM7B,8BAAY;qCACH,+BAA+B;2BACzC,KAAK;;2BAEL,KAAK;;;oBAGZ,KAAK,cAAc,KAAK;;;AAGpB,oBAAI,IAAI,IAAI;AAMR,0CAAwB,UAAU,MAAM,IACpC,mBAAuB,iBACvB;AACJ,sBAAK,gBAAgB,MAAM,KAAK,UAAU,MAAM,KAC3C,gBAAgB,MAAM,KAAK,UAAU,MAAM;AAC5C,gCAAY;oCACR,UAAU,OAAO;;uCAEd;kDACW;6BACrB,KAAK,IAAI;;;AAKN,wBAAI,gBAAgB;AAChB,kCAAY;;yCAEP;oDACW;+BACrB,KAAK;;+BAEL,KAAK;;;;AAIJ,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;AAI3B,gCAAY;oCACR;;uCAEG;kDACW;6BACrB,KAAK,IAAI;;;sBAGhB,KAAK,IAAI,cAAc,KAAK,IAAI;;;;;;AAOlC,kBAAI,IAAI;AACJ,4BAAY;mCACD;;AAQX,oBAAI,UAAU,MAAM;AAChB,8BAAY;sCACF;iDACW;2BACtB,KAAK;;2BAEL,KAAK;;;6CAGa;2BAClB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;;wCAEJ;mDACW;;;sBAG7B,KAAK,IAAI,mBAAmB,KAAK,IAAI;;;;AAK/B,8BAAY;qCACH;2BACV,KAAK;;2BAEL,KAAK;;;kCAGE;iDACe;2BACtB,KAAK,IAAI;;2BAET,KAAK,IAAI;;;oBAGhB,KAAK;2BACE,KAAK,gBAAgB,KAAK,IAAI;;AAE7B,sBAAI,IAAI,IAAI;AACR,gCAAY;sBACtB,KAAK,IAAI;6BACF,KAAK,gBAAgB,KAAK,IAAI;;;;AAInC,4BAAY;;;AAGpB,gBAAI,IAAI;AACJ,0BAAY;0BACN,KAAK,YAAY,MAAM;gBACjC,KAAK,mBAAmB,KAAK,gBAAgB,KAAK;;AAE9C,kBAAI,IAAI,IAAI;AACR,4BAAY;4BACR,KAAK,IAAI,YAAY,MAAM,IAAI;kBACzC,KAAK,IAAI;8BACG,KAAK,IAAI,gBAAgB,KAAK,IAAI;;;;;AAKxD,qBAAa,GAAG,IAAI,cAAc;AAC9B,uBAAa,GAAG,IAAI,aAAa;AAC7B,wBAAY,gBAAgB,KAAK,SAAS,KAAK;;;AAGvD,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,aAAK,WAAW;QAChB;;oCAE4B,iBAAiB;iCACpB,WAAW;;;;;;;;;;;;;;;UAelC;;;UAGA;UACA;;;;;;ACvSV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS;AACxC,aAAK,cAAc;AACnB,wDAAgD;AAChD,2BAAoB;AACpB,wCAAgC;AAChC,aAAK,cAAc,CAAC,UAAU,YAAY,WAAW;AACrD,yBAAiB,WAAW,aAAa,IAAI;AAC7C,oDAA4C,CAAC,GAAG,cAAc,OAAO,GAAG,aAAa;AACrF,gDAAwC,aAAa,IACjD;UACI,GAAI,eAAc,KAAM,cAAa;UACrC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAE3B,8CAAsC,YAAY,IAC9C;UACI,GAAI,cAAa,KAAM,aAAY;UACnC;UACA,MAAM;YAEV;UACI;UACA;UACA,mBAAmB;;AAK3B,aAAK,WAAW;yCACiB;wCACD;;;;;;;;;;;;;;;;iCAgBP;;;;+BAIF;8BACD;;uBAEP;mCACY;4BACP;;;uBAGL;mCACY;4BACP;;;;;aAKf;;;;;;;;;;;;;;;;;;;;;;;;;;;;MCtFT;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,MAAM;AACnB,oBAAY,YAAY,QAAQ,QAAQ,UAAU,MAAM;AACxD,uBAAe,MAAM,MAAM,SAAS;AACpC,wBAAgB;AAChB,wBAAgB;AAIhB,YAAI;AACA,sBAAY,WAAU,UAAU,SAAS,MAAM;AAC/C,sBAAY,WAAU,YAAY;;AAGlC,sBAAY,WAAU,gBAAgB,WAAW;AACjD,sBAAa,WAAU,eAAe;;AAE1C,aAAK,WAAW;;;UAGd,kBAAkB;oBACR,cAAc,MAAM;sBAClB;;cAER;sBACQ;YACV,cAAc,MAAM;wBACR,UAAU,MAAM;;;;;;MAMpC;AACI,eAAO;AACH,cAAI,KAAK,SAAS;AACd,iBAAK,QAAQ,MAAM,mBAAmB,cAAc;;AAExD,gBAAM,GAAG,UAAU,KAAK,OAAO;;;;AAI3C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG,WAAW;iBAEhB,SAAS;AACd,eAAO,GAAG,WAAW,WAAW;iBAE3B,SAAS;AACd,eAAO,GAAG,WAAW,WAAW,WAAW;;AAG3C,cAAM,MAAM,2BAA2B;;;AAG/C;AACI,UAAI,SAAS;AACT,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;iBAEL,SAAS;AACd,eAAO,GAAG;;AAGV,cAAM,MAAM,2BAA2B;;;AC7E/C;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,mBAAmB,cAAc;AACtC,yBAAiB,iBAAiB;AAClC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd,mCAA+C,CAAC,KAAK,KAAK,MAAM;;;;;;iBAMzD,SAAS,OAAO,SAAS;wCACF,SAAS;;;;;;;;;;UAUvC,KAAK;;;;;AC/Cf;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,YAAY;AACjB,aAAK,aAAa;AAClB,aAAK,WAAW;;;;gBAIR,KAAK;gBACL,KAAK;gBACL,KAAK;;uBAEE;+BACQ;uBACR;+BACQ;mCACI;UACzB,KAAK;;;uBAGQ,KAAK;;;;;MAKxB;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;MAGf;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO,KAAK,YAAY;;AAGxB,iBAAO,KAAK,YAAY;;;MAGhC;AACI,YAAI,KAAK,eAAe;AACpB,iBAAO;;AAGP,iBAAO;;;;ACjFnB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,MAAM;AAC1B,aAAK,WAAW;;;;;;;;;ACpBxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;UAIE,KAAK;;;;;AC9Bf;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,aAAa;AAChC,qBAAa;AACb,aAAK,cAAc;AACnB,aAAK,WAAW;QAChB;;;;;UAKE,KAAK;;;;;ACjCf;;;;;;;;;;;;;;;;;MAmBI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;8BAUT;kCACI;iDACe,YAAY;wBACrC,KAAK;;;;;;;;;;;;;;UAcnB,KAAK,iBAAiB;;;;;ACxDhC;;;;;;;;;;;;;;;;;MAkCI,yDAAyD;AACrD,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAwB;AACxB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa;AACb,YAAI;AACA,mBAAS;;AAEb,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,4BAAgB,MAAM,IAAI;AAC1B,wBAAY;;gCAEI,SAAS,YAAY;gCACrB;kCACE,SAAS,YAAY;kCACrB;;;;;;;gCAOF;oCACI;kDACc,YAAY;yBACrC,KAAK;;;yBAGL;;yBAEA;;yBAEA;;yBAEA;;;;;;;AAOjB,aAAK,WAAW;QAChB,mBAA+B;;;;;;;;;;;UAW7B;;UAEA,KAAK,YAAY;;;;;AC9F3B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;MAQpB;AACI,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;;AAElE,gBAAM,GAAG,UAAU,KAAK,UAAU;;;;AClC9C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,4BAAoB,OAAO;AAC3B,oBAAY,QAAQ;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB,QAAQ;AAC7C,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,mBAAmB;;AAEnC,UAAI,SAAS;AACT,eAAO;;AAEX,4BAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,YAAI,MAAM;AACN,uBAAa,KAAK,kBAAkB,cAAc;;AAGlD,uBAAa,KAAK,GAAG,cAAc;;;AAG3C,aAAO,aAAa;;;MClDpB;AACI,aAAK,WAAW;AAChB,aAAK,UAAU;AACf,aAAK,gBAAgB,CAAC,KAAK;AAC3B,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,6BAAqB,KAAK,WAAW,IAAI,eAAe;AACxD,aAAK,WAAW;UACd,yBAAyB,eAAe,KAAK;;YAE3C;;gCAEoB,KAAK;;sCAEC;;;;;;;ACjBtC;;;;;;;;;;;;;;;;AAmBO;AACH,mBAAa;AACb,iCAA2B,GAAG,KAAK;;MAEjC,KAAK;MACL,KAAK;MACL,KAAK;;;;;;AAMP,aAAO,mBAA8B,IAAI;;AAEtC;AAEH,0BAAoB,IAAI,aAAa,CAAC,IAAI,GAAG,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,IAAI,GAAG,GAAG;AACnG,aAAO,yBAAoC,IAAI;;AAE5C;AAEH,oCAA8B,IAAI,YAAY,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG;AAC9D,aAAO,wBAAmC,IAAI;;AAElD;AACI,0BAA+B,OAAO;AACtC,sBAAgB,cAAyB;AACzC,oBAAc,GAAG;AACjB,mBAAwB,IAAI,MAAM,GAAG,YAAY,OAAO;AACxD,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,gBAAgB,GAAG;AAChF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,cAAc,OAAO,GAAG,oBAAoB,GAAG;AACpF,mBAAwB,IAAI,MAAM,GAAG,WAAW,OAAO,GAAG,gBAAgB,OAAO,QAAQ,GAAG,eAAe,aAAa;AACxH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,aAAO;;AAEJ;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,GAAG;;AAE/I;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,yCAAyC,gBAAgB,cAAc,oBAAoB,cAAc;;AAE1J;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,yCAAkD,MAAM;AAChF,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,GAAG;;AAE5H;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,wCAAwC,gBAAgB,GAAG,MAAM,GAAG;;AAErH;AACH,aAAO,cAAc;;AAElB;AACH,8BAAwB,uCAAgD,MAAM;AAC9E,aAAO,0BAA0B,IAAI,OAAO,QAAQ,+CAA+C,gBAAgB,GAAG,MAAM,cAAc;;AAEvI;AACH,wBAAkB;AAClB,uBAAiB,IAAI;AACrB,qBAAgB,IAAI,IAAM,IAAI;AAC9B,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,sBAAgB,mCAA8C,IAAI,SAAS,gBAAgB,cAAc,GAAG,QAAQ;AACpH,aAAO,WACH,mCAA8C,IAAI,SAAS,MAAM,cAAc,GAAG,QAAQ;;AAE3F;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE;AACA,UAAI,iBAAgB;AAChB,wBAAgB,IAAI,WAAW,QAAQ,SAAS;AAChD,wBAAgB,GAAG;AACnB,yBAAiB,GAAG;;AAGpB,wBAAgB,IAAI,aAAa,QAAQ,SAAS;AAClD,wBAAgB,GAAG;AACnB,yBAAiB,cAAc;;AAEnC,oBAAc,IAAI;AAClB,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,gBAAgB,OAAO,QAAQ,GAAG,GAAG,MAAM,eAAe;AAC5H,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AACH,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;AAChE,UAAI,OAAO,gBAAgB;AACvB,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,OAAO,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe,OAAO;;AAG7I,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;;AAE1G,mBAAwB,IAAI,MAAM,GAAG,YAAY,GAAG,YAAY;;AAE7D;AAEH,sBAAe,IAAI;AACnB,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AAEzE,4BAAsB;AACtB,6BAAuB;AACvB,8BAAwB,gBAAgB,iBAAiB,OAAO;AAChE,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB,iBAAiB,IAAI;AAG9F,mBAAwB,KAAK,MAAM,IAAI,WAAW,GAAG,GAAG,SAAS,MAAM,IAAI,MAAM,IAAI,OAAO;AAC5F,mBAAwB,KAAK,MAAM,IAAI,WAAW,IAAI,mBAAmB;AACzE,aAAO;;AAEJ;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa;AACxC,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,qBAAe,yCAAkD,MAAM;AACvE,0BAAoB;AACpB,6BAAuB,IAAI,WAAW,mCAA4C,OAAO,SAAS;AAClG,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,GAAG,GAAG,cAAc,uBAAuB,GAAG,eAAe;AAGnH,aAAO,IAAI,aAAa,eAAe;;AAEpC;AACH,kBAAY;AACZ,6BAAuB,IAAI,aAAa,sCAA+C,cAAc;AACrG,UAAI,WAAW,IAAI,mBAAmB;AACtC,UAAI,iBAAiB,IAAI,mBAAmB,GAAG;AAC/C,UAAI,WAAW,IAAI,mBAAmB;AACtC,aAAO;;AAEJ;AACH,yBAAmB,IAAI,aAAa,eAAe,eAAe;AAClE,mBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,GAAG,cAAc,cAAc,GAAG,MAAM,GAAG,OAAO;AACrG,aAAO;;AC1KX;;;;;;;;;;;;;;;;;MAsBI;AACI,aAAK,gBAAgB;AACrB,aAAK,UAAU;AACf,aAAK,WAAW;AAChB,aAAK,sBAAsB;AAC3B,aAAK,cAAc;AACnB,0BAAkB,OAAM,UAAU;AAClC,YAAI,MAAM;AACN,eAAK,KAAK;AACV,0BAAgB,WAAW;;AAG3B,eAAK,KAAK,gBAAgB;;AAG9B,iCAAyB;AACzB,wCAAgC;AAChC,YAAI,OAAM,UAAU,qBAAqB;AACrC,gCAAsB;AACtB,qCAA2B;AAC3B,eAAK,wBACD,oBAA+B,KAAK,IAAI;AAC5C,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;AAGpB,eAAK,4BAA4B,KAAK,GAAG,aAAa;AACtD,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,gCACD,oBAA+B,KAAK,IAAI;qBAEvC,OAAM,IAAI;AACf,kBAAM,IAAI,MAAM;;;AAKpB,+BAAqB;AACrB,cAAI,aAAwB,KAAK,IAAI;AACjC,iBAAK,4BACD,KAAK,GAAG,aAAa;qBAEpB,aAAwB,KAAK,IAAI;AACtC,iBAAK,gCACD,KAAK,GAAG,aAAa;;AAGzB,kBAAM,IAAI,MAAM;;;AAGxB,aAAK,eAAe,mBAA8B,KAAK;AACvD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,cAAc,kBAA6B,KAAK;AACrD,aAAK,gBACD,iBAA0B,KAAK,IAAI,KAAK;;UAE5C;AACA,eAAO,OAAM,QAAQ;;MAEzB;AACI,YAAI,KAAK;AACL;;AAEJ,YAAI,KAAK,WAAW;AAChB,kBAAQ,KAAK;;AAIjB,YAAI,KAAK,iBAAiB;AACtB,kBAAQ,KAAK;;AAKjB,mBAAW,KAAK;AAChB,qBAAwB,IAAI,MAAM,GAAG;AACrC,qBAAwB,IAAI,MAAM,GAAG,gBAAgB,GAAG,aAAa;AACrE,qBAAwB,IAAI,MAAM,GAAG,kBAAkB,KAAK;AAC5D,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,cAAc;AACjE,qBAAwB,IAAI,MAAM,GAAG,WAAW,GAAG,sBAAsB;AACzE,qBAAwB,IAAI,MAAM,GAAG,aAAa,KAAK;AACvD,aAAK,WAAW;;MAEpB;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,2BAAsC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE9E;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,iCAAoC,KAAK,IAAI,SAAS;;MAE1D;AACI,aAAK;AACL,mCAAsC,KAAK,IAAI,SAAS,OAAO,QAAQ,OAAM,KAAK;;MAEtF;AACI,aAAK;AACL,eAAO,iCAA4C,KAAK,IAAI,MAAM,SAAS,KAAK;;MAEpF;AACI,aAAK;AACL,eAAO,0BAAqC,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE7E;AACI,aAAK;AACL,YAAI,KAAK,kBAAkB;AACvB,4CAA6C,KAAK,IAAI,KAAK;AAC3D,eAAK,gBAAgB;;AAEzB,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;MAEjE;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,gDAA2D,KAAK,IAAI,MAAM,SAAS,KAAK;;MAE5I;AACI,eAAO,+BAA0C,KAAK,IAAI,SAAQ,OAAO,MAAM,SAAS,cAAc,cAAc,KAAK;;MAE7H;AACI,eAAO,gCAA2C,KAAK,IAAI,SAAQ;;MAEvE;AACI,aAAK,yBAAyB;AAC9B,uBAAe,8BAAyC,KAAK,IAAI,MAAM,SAAS,KAAK;AACrF,aAAK;AACL,eAAO;;MAEX;AACI,6BAAqB,KAAK,YAAY,KAAK;AAC3C,eAAO,KAAK,UAAU;;MAE1B;AACI;AACA;AACA,YAAI,OAAM,QAAQ;AACd,sBAAY;AACZ,uBAAa,IAAI,UAAU,IAAI,4BAA4B;AAC3D,aAAG;AACH,0BAAgB;AACZ,2BAAe,IAAI,eAAe,MAAM,GAAG;AAC3C,mBAAO,WAAW,IAAI,oBAClB,WAAW,IAAI;;AAEvB,kBAAQ;mBAEH,OAAM,UAAU,kDAAkD;AACvE,kBAAQ,KAAK;AACb,eAAK;AACL,0BAAgB,MAAM,KAAK,iBAAiB,OAAO,OAAM,UAAU;;AAOnE,0BAAgB,MAAM;;AAE1B,eAAO,CAAE,OAAO;;MAEpB;AACI,eAAO,KAAK,qBAAqB,SAAS,MAAM,sCAAiD,KAAK,IAAI,cAAc;;MAE5H;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,+BAAuB,qBAAgC,IAAI;AAC3D,6BAAqB,qBAA8B;AACnD,wBAAgB,cAAyB;AACzC,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,qBAAwB,IAAI,MAAM,GAAG,aAAa,SAAS;AAC3D,oBAAuB,IAAI;AAC3B,YAAI,KAAK;AACL,0BAA2B,IAAI;;AAEnC,YAAI,CAAC,KAAK;AACN,eAAK,WAAW;AAChB,eAAK,sBAAsB,kCAA6C,IAAI,KAAK,SAAS,KAAK;;AAEnG,eAAO;;MAEX;AACI,aAAK;AACL,YAAI,YAAY,KAAK;AACjB,eAAK,UAAU;;AAEnB,YAAI,WAAW;AACX,uBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,cAAc;;;MAGrE;AACI,aAAK;AACL,aAAK,UAAU;AACf,YAAK,KAAK,WAAW,QAAS,KAAK;AAC/B,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,WAAW;;MAE9D,uDAAuD;AACnD,aAAK;AACL,YAAI;AACA,iBAAO,iCAA4C,KAAK,IAAI,SAAS;;AAGrE,iBAAO,0BAAqC,KAAK,IAAI,SAAS;;;MAGtE;AACI,aAAK;AACL,eAAO,aAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,kBAAkB,SAAS;;MAErF;AACI,aAAK;AACL,eAAO,KAAK,GAAG,mBAAmB,SAAS;;MAE/C;AACI,aAAK;AACL,aAAK;AACL,2CAA8C,KAAK,IAAI,oBAAoB,iBAAiB;;MAEhG;AACI,aAAK,6BAA6B,qBAAqB,SAAS;;MAEpE;AACI,aAAK;AACL,gCAAwB,uCAAgD,MAAM;AAC9E,aAAK,6BAA6B,2BAA2B,OAAO;;MAExE;AACI,aAAK,iCAAiC,aAAa,UAAU,YAAY;;MAE7E;AACI,cAAM,IAAI,MAAM;;MAEpB;AACI,YAAI,KAAK,WAAW;AAChB,0BAA2B,KAAK,IAAI,KAAK;;AAE7C,4BAA+B,KAAK;;MAExC;AACI,aAAK;AACL,aAAK;AACL,mBAAW,KAAK;AAChB,YAAI,KAAK;AACL,eAAK;;AAET,qBAAwB,IAAI,MAAM,GAAG,aAAa,GAAG,WAAW,GAAG,GAAG,gBAAgB;;MAE1F;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG;;MAEnD;AACI,YAAI,KAAK,+BAA+B;AACpC,eAAK,8BACD,oBAA+B,KAAK,IAAI,OAAM,UAAU,oDAAoD,IACxG,oCACA;;AAEZ,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,yBAAc,IAAI;AAClB,cAAI,WAAW,KAAI,kBAAkB;AACrC,iBAAO;;AAEX,oBAAY,KAAK;AACjB,sBAAc,IAAI;AAClB,YAAI,cAAc,IAAI,kBAAkB;AACxC,eAAO;;MAEX;AACI,YAAI,OAAM,UAAU,oDAAoD;AACpE,sBAAY,KAAK;AACjB,uBAAY,KAAK;AACjB,cAAI,SAAS,KAAI;AACjB;;AAEJ,oBAAY,KAAK;AACjB,YAAI,YAAY,IAAI;;YAElB;AACF,cAAM,aAAiB,MAAM,KAAK,YAG9B,KAAK,iBAAiB,OAAO,OAAM,UAAU;AACjD,eAAO,KAAK,aAAa,OAAO,OAAM,UAAU;;MAEpD;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;AAG1B,sBAAY,KAAK;AACjB,mCAAyB,IAAI,kBAAkB,OAAO,IAAI;AAE1D,iBAAO,mBAAmB;;;MAGlC;AACI,YAAI,sBAAsB;AACtB,iBAAO;;AAEX,YAAI,sBAAsB;AACtB,sBAAY,KAAK;AACjB,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;AAG1B,sBAAY,KAAK;AACjB,4BAAkB,IAAI,kBAAkB,OAAO,IAAI;AACnD,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,KAAK,GAAG,aAAa,IAAI;;AAE7C,iBAAO,aAAa,CAAC,KAAK;;;MAGlC;AACI,eAAO,IAAI,QAAQ;AACf,eAAK,cAAc,MAAM,aAAa,iBAAiB,MAAM;;;MAGrE;AAEI,uBAAc,qBAAqB,KAAK,YAAY,IAAI,OAAK,EAAE;AAC/D,qBAAa,GAAG,KAAK,QAAO,EAAE;AAC1B,iBAAQ,aAAc,KAAK,YAAY;AACvC;;AAEJ,aAAK,cAAc,KAAK,YAAY,MAAM,SAAQ;;MAEtD;AACI,aAAK,YAAY,KAAK,CAAE,UAAU;AAClC,YAAI,KAAK,YAAY,SAAS;AAE1B;;AAGJ,qBAAiB;AACb,eAAK;AAEL,iBAAO,KAAK,YAAY,WAAW;;;MAG3C;AACI,aAAK;AACL,sCAAyC,KAAK,IAAI,SAAS,KAAK;AAChE,YAAI,KAAK;AACL,8BAA+B,KAAK;;;MAG5C;AACI,YAAI,KAAK,iBAAiB;AACtB,wCAAyC,KAAK,IAAI,KAAK,eAAe,KAAK;AAC3E,cAAI,KAAK;AACL,gCAA+B,KAAK;;;AAIxC,4CAA6C,KAAK,IAAI,KAAK;;;MAGnE;AACI,aAAK,yBAAyB;AAC9B,uBAAe;AACf,aAAK;AACL,eAAO;;MAEX;AACI,aAAK;AACL,mBAAW,KAAK;AAChB,sCAAyC,IAAI,gCAAgC,KAAK;AAClF,YAAI,KAAK;AACL,8BAA+B;;AAEnC,aAAK,gBAAgB;AACrB,qBAAwB,IAAI,MAAM,GAAG,SAAS,GAAG,GAAG,OAAO;AAC3D,qBAAwB,IAAI,MAAM,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAE9D;AACI,aAAK;AACL,qBAAwB,KAAK,IAAI,MAAM,KAAK,GAAG,QAAQ,GAAG,GAAG,OAAO;;MAExE;AACI,YAAI,KAAK;AACL,gBAAM,IAAI,MAAM;;;MAGxB;AACI,YAAI,KAAK,WAAW;AAChB,gBAAM,IAAI,MAAM;;;;AAUrB;AACH,cAAQ;AACR,aAAO,IAAI,IAAI,QAAQ,EAAE;AACrB,uBAAe,IAAI;AACnB,YAAI,CAAC;AACD;;;AAGR,aAAO,IAAI;;AC5cf;;;;;;;;;;;;;;;;AAkBO;AACH,uBAAiB,QAAQ;AACzB,yBAAmB,OAAO,IAAI;AAC1B,0BAAkB;UACd,cAAc,OAAM;UACpB,UAAU,OAAM,YAAY,OAAO,OAAM,QAAQ;UACjD,WAAW,OAAM;UACjB,UAAU,OAAM,YAAY,QAAQ,OAAM,QAAQ;UAClD,YAAY;;AAEhB,YAAI,OAAM,WAAW,QAAQ,OAAM,QAAQ,SAAS,QAChD,OAAM,QAAQ,MAAM,aAAa;AACjC,oBAAU,aAAa,OAAM,QAAQ,MAAM;;AAE/C,eAAO,CAAE,MAAM,QAAQ,cAAc,IAAI;;AAE7C,2BAAqB,WAAW,IAAI,OAAK,EAAE;AAC3C,2BAAqB;QACjB,cAAc,OAAO;QACrB,UAAU,OAAO,QAAQ;QACzB,WAAW;QACX,UAAU,OAAO,QAAQ;QACzB,YAAY;;AAEhB,qBAAe,WAA2B,YAAY,cAAc,UAAU,QAAQ;AACtF,2BAAqB,MAAM,cAAc;AAEzC,mBAAa;AACb,qBAAe,MAAM,mBAAmB,cAAc,OAAO;AAC7D,UAAI,OAAM,UAAU,qBAAqB;AACrC,iBAAS,MAAM,mBAAmB,cAAc,YAAY;;AAGhE,+BAAyB;AACzB,mBAAa,GAAG,IAAI,QAAQ,cAAc,QAAQ;AAC9C,wBAAgB,QAAQ,cAAc;AACtC,4BAAoB;AACpB,yBAAiB,WACb,MAAM,mBAAmB,cAAc,SAAS;AACpD,yBAAiB,SAAS,aACtB,MAAM,mBAAmB,cAAc,SAAS,WAAW;;AAEnE,aAAO;QACH;QACA;QACA;QACA;QACA;QACA;QACA;QACA;;;AAGR;AACI,UAAI,WAAW,WAAW,OAAO;AAC7B,cAAM,MAAM,4BAA4B,WAAW,wCAC1B,OAAO;;AAEpC,iBAAW,QAAQ;AACf,uBAAe,EAAE;AACjB,uBAAc,OAAO;AACrB,uBAAe,OAAM;AACrB,YAAI,CAAC,aAAiB,QAAQ;AAC1B,gBAAM,MAAM,2EACoB,cAAc;;AAGlD,YAAI,EAAE,aAAa,OAAM;AACrB;;AAEJ,0BAAkB,EAAE;AACpB,0BAAkB,OAAM,YAAY,OAAO,OAAM,QAAQ;AACzD,YAAI,CAAC,aAAiB,WAAW;AAC7B,gBAAM,MAAM,kFACgB,iBAAiB;;;;AAIlD;AACH,+BAAyB,OAAO,cAAc;AAC9C,+BAAyB,CAAC,OAAO,eAAe,CAAC;AACjD,qBAAe,OAAO,QAAQ;AAC9B,0BAAoB,OAAO,QAAQ;AACnC,UAAI,OAAO,QAAQ;AACf,cAAM,6BAA6B,QAAQ,YAAY,IAAI,YAAY;;AAGvE,cAAM,uBAAuB,QAAQ,YAAY,IAAI,YAAY;;AAErE,YAAM,WAAW,OAAO;AAExB,UAAI,OAAM,UAAU,qBAAqB;AACrC,YAAI,OAAO,WAAW;AAClB,gBAAM,GAAG,UAAU,OAAO,QAAQ;;;AAG1C,UAAI,OAAO,WAAW;AAClB,cAAM,GAAG,UAAU,OAAO,QAAQ;;AAGtC,aAAO,QAAQ;AACX,wBAAgB,OAAO,QAAQ,cAAc;AAC7C,uBAAe,OAAO,iBAAiB;AACvC,6BAAqB,OAAO,iBAAiB,SAAS;AACtD,YAAI,UAAU;AAEV;;AAEJ,YAAI,OAAM;AAEN,cAAI,eAAmB,OAAM,SAAS;AAClC,kBAAM,GAAG,UAAU,QAAQ,OAAM,cAAc;;AAG/C,uBAAW,OAAM;AACjB,gBAAI,CAAE,iBAAgB;AAClB,qBAAO,IAAI,aAAa;;AAE5B,kBAAM,GAAG,WAAW,QAAQ;;AAEhC;;AAGJ,YAAI,OAAM,QAAQ,SAAS,QAAQ,gBAAgB;AAC/C,gBAAM,GAAG,UAAU,cAAc,OAAM,QAAQ,MAAM;;AAEzD,cAAM,sBAAsB,OAAM,QAAQ,SAAS,QAAQ;;AAE/D,UAAI,eAAe;AACf,oBAAY,OAAO,OAAO;;AAE9B,YAAM;;AAEH;AACH,sBAAgB;AAChB,aAAO,OAAO,QAAQ,QAAQ;AAC1B,0BAAkB,EAAE,WAAW,QAAQ,EAAE,QAAQ,SAAS,QACtD,EAAE,QAAQ,MAAM,aAAa;AACjC,yBAAiB,EAAE,YAAY,YAAY,EAAE,QAAQ;AACrD,qBAAa,GAAG,EAAE,SAAS,YAAY;;AAE3C,0BAAoB,QAAQ;AAC5B,gBAAU,QAAQ,YAAY;AAE9B,aAAO,MAAM,YAAY,MAAM;AAC/B,aAAO;;ACnKX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,eAAQ,aAAa,YAAY,aAAa,cAAc,SAAS,UAAU,eAAe,gBAAgB,cAAe;AAC7H,eAAQ,MAAM,OAAQ;AACtB,iCAAyB,aAAa;AACtC,qBAAa;AACb,+BAAuB,eAAe;AACtC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe,iBAAiB,IAAI;AACpC,uBAAe;AACf,uBAAe,GAAG,OAAO,GAAG;AACxB,yBAAe,GAAG,OAAO,GAAG;AACxB,wBAAY;gCACI;yBACP;;4BAEG,YAAY,eAAe,YAAY;0CACzB,gBAAgB,kBAAkB;6BAC/C,2BAA2B;;sBAElC,WAAW;;qDAEoB,gBAAgB,kBAAkB;+BACxD,wCAAwC,wBAAwB;;wBAEvE,WAAW;;2CAEQ;;sBAErB;;2BAEK,MAAM,IAAI;;;;;2BAKV,MAAM,IAAI;;;;;;;;;;AAU7B,aAAK,WAAW;;;;;;;;;UASd;;UAEA,KAAK;;;;;AC9Ef;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;;wBASA,aAAa;;oCAED;;;;;0BAKV;;;;;;ACtD1B;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,cAAc,eAAe;AACnD,aAAK,cAAc;AACnB,aAAK,cAAc;AACnB,aAAK,QAAQ,WAAW;AACxB,aAAK,cAAc;AACnB,aAAK,OAAO;AACZ,aAAK,QAAQ;AACb,aAAK,OAAO;AACZ,aAAK,WAAW;;;;;;;;8BAQM,KAAK;oDACiB;yCACX,KAAK;0BACpB;;;sCAGY,KAAK;;;;;;;;;;;;;;;yBAelB,yBAAyB;;;;;;;yCAOT;0BACf;;;;0CAIgB;;;;;;;;;;;;;;;;;ACnE1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,oBAAY;AACZ,qBAAa,OAAO,KAAK;AACzB,aAAK,cAAc;AAKnB;AACA,sBAAc,SAAS,iBAAiB;AACxC,YAAI,SAAS;AACT,wBAAc,eAAe;mBAExB,SAAS;AACd,wBAAc,QAAQ;;AAGtB,wBAAc,WAAW,mBAAmB;;AAEhD,aAAK,WAAW;;;;;;;;gCAQQ,KAAK,YAAY;gCACjB,KAAK,YAAY;;;;;;;;;;;;;;;iCAehB;;;;;;;;;;;yBAWR,aAAa;;;6DAGuB;;;;;;;;;;;;;;;;;;;;;0CAqBnB;;;;;;ACnG1C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,wBAAwB,uBAAuB;AACjE,aAAK,WAAW;iCACS,WAAW;;;;;;;;;;;;;;gCAcZ;kBACd;gDAC8B;;oCAEZ,SAAS;;;;;kCAKX;kDACgB;;sCAEZ,SAAS;;;;;;;gCAOf;;;;qCAIK;;;;;;;;;;;;MAYjC;AACI,aAAK,gBAAgB,CAAC,MAAM;AAC5B,aAAK,cAAc,SAAS;AAC5B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,uBAAuB,IAAI,SAAS,QAAQ;AAC7D,uBAAe,wBAAwB,IAAI,SAAS,QAAQ;AAC5D,wBAAgB,uBAAuB,IAAI,SAAS,QAAQ;AAC5D,0BAAkB,uBAAuB,wBAAwB,uBAAuB;AACxF,aAAK,WAAW;iCACS,aAAa,WAAW;;;;;;;;;;;;;;;;;gCAiBzB;mBACb;gDAC6B;;oCAEZ,SAAS;;;;;kCAKX;sBACZ;kDAC4B;;sCAEZ,SAAS;;;;;;oCAMX;wBACZ;oDAC4B;;wCAEZ,SAAS;;;;;;;kCAOf;;;;;;yBAMT,2BAA2B;yBAC3B;;;;;;;;;;;;ACrJzB;;;;;;;;;;;;;;;;;MAiBI,sDAAsD,oBAAoB,iBAAiB,qBAAoB,2BAA2B;AACtI,aAAK,gBAAgB,CAAC,WAAW;AACjC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,0BAAkB,aAAa,OAAO,KAAK,OAAO;AAClD,sCAA8B,KAAK,KAAK,YAAY;AACpD,wBAAgB,aAAa,gBAAgB;AAC7C,wBAAgB,aAAa,gBAAgB;AAC7C,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,yBAAiB,aAAa,CAAC,UAAU,YAAY,CAAC,UAAU;AAChE,gCAAwB,6BAA6B;AACrD,YAAI;AACA,cAAI;AACA,gCAAoB;;YAExB;;;AAII,gCAAoB;YACxB;;;AAGA,mCAAyB;;AAE7B,+BAAuB,UAAU,oCAAoC;AACrE,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,YAAI;AACA,eAAK,cAAc,KAAK;;AAE5B,4BAAoB;AACpB,4BAAoB;AACpB,YAAI,OAAO,KAAK,OAAO;AACnB,0BAAgB,wBAAwB,OAAO,KAAK;mBAE/C,OAAO,KAAK,OAAO;AACxB,0BAAgB,wBAAwB,OAAO,KAAK;;AAExD,aAAK,WAAW;QAChB;;sCAE8B;;;;8BAIR;yBACL;yBACA;wCACe;wCACA;;;;uBAIjB,SAAS,QAAQ,SAAS;uBAC1B,SAAS,QAAQ,SAAS;;;;;;;;;UASvC;;UAEA;;;;;;;ACrFV;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,WAAW;AAC/B,aAAK,WAAW;;;;;;;;;;8BAUM,cAAc;;;;;;;;;;0BAUlB,cAAc;;;;MAIpC;AACI,eAAO;AACH,cAAI,KAAK,WAAW;AAChB,iBAAK,UAAU,MAAM,mBAAmB,cAAc;;AAE1D,gBAAM,GAAG,UAAU,KAAK,SAAS;;;;ACjD7C;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,CAAC,YAAY;AAChC,aAAK,WAAW;;;;8BAIM,oBAAoB;;;;;;ACxBlD;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AAEpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,YAAI,SAAS;AACT,eAAK,WAAW;;;;;;AAOhB,2BAAiB,YAAY,MAAM;AACnC,wBAAc,kBAAkB;AAChC,uCAA6B,wBAAwB,MAAM,aAAa;AACxE,0BAAc,SAAS,MAAM,YAAY,YAAY,SAAS,IAAI,YAAY,YAAY,SAAS,IAAI;AACvG,yBAAe,UAAU,aAAa;AACtC,eAAK,WAAW;;YAEhB;;eAEG;;;cAGD;;6BAEe;;;;;;;AAO7B;AACI,sBAAe;AACf,qBAAe,GAAG,OAAO,GAAG;AACxB,uBAAe,GAAG,OAAO,GAAG;AACxB,sBAAY,GAAG,QAAQ,IAAI,MAAM,UAAU,QAAQ,IAAI,MAAM;AAC7D,uBAAa,GAAG,IAAI,MAAM;AACtB,oBAAQ,GAAG,KAAK,KAAK,SAAS,IAAI,QAAQ;;AAE9C,kBAAO,KAAK;;;AAGpB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO,QAAQ,MAAM;;AAEzB,iBAAW;AACX,mBAAa,OAAO,GAAG,IAAI,MAAM;AAC7B,gBAAQ,GAAG,KAAK,SAAS,MAAM;AAC/B,YAAI,IAAI,OAAO;AACX,kBAAQ;;;AAGhB,aAAO;;AAEX;AACI,UAAI,SAAS;AACT,eAAO;;AAEX,wBAAkB,KAAK,MAAM;AAC7B,aAAO;cACG,UAAU;cACV,UAAU;;;;0BAIE;0BACA;;;AAG1B;AACI,mBAAa,MAAM;AACnB,2BAAqB,mBAAmB,MAAM;AAC9C,UAAI,SAAS;AACT,eAAO;wBACS,MAAM;;;AAG1B,aAAO,QAAQ,aAAa;8BACF,aAAa;8BACb,aAAa;uCACJ,aAAa;;AC3GpD;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,qBAAa,kBAAkB;AAC/B,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;8BAKU;;;;;;AAMlB;;AAEJ,aAAK,WAAW;QAChB,gBAAgB,QAAQ;QACxB,cAAc,QAAQ;;;UAGpB;;4BAEkB;;YAEhB;2BACe;;;;;;ACpD3B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,+BAAuB;UACnB,GAAG;UAAyB,GAAG,QAAO,OAAO;YAC7C;;UAEA,SAAS,IAAI,KAAK;;SAErB,QAAO,OAAO;YACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;UAC9C,SAAS,IAAI,KAAK,KAAK,QAAO,OAAO;cACnC;;AAEN,4BAAoB,SAAS,IACzB,4BACA;AACJ,uBAAe;AACf,qBAAa,OAAO,SAAS,IAAI,IAAI,GAAG,IAAI,GAAG;AAC3C,sBAAY;UACd,eAAe;cACX;mBACK,cAAc;;YAErB;mBACO,wBAAwB,OAAO,YAAY;;;;AAItD,oBAAa,SAAS,IAAI,OAAO;AACjC,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;AClEV;;;;;;;;;;;;;;;;;MAiBI,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,wCAAgC,cAAc,SAAS,oBAAoB,SAAS,mBAAmB,SAAS;AAChH,mCAA2B,SAAS,SAAS,mBAAmB,SAAS;AACzE,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;sCACU,iBAAiB;mCACpB,WAAW;;;;;;;;;;;;;;;;;;kCAkBZ;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;;;;;;;0BAUnB;;;mCAGS,mBAAoB,sBAAsB,0BAC7D,qBACA,QAAQ;;;;;;;AAOZ;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;oCACY,iBAAiB;iCACpB,WAAW;0CACF;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;kCAkBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;uCACK;;;;yCAIE;6CACI;6CACA;;;cAG/B;;;gCAGkB;gBAChB,6BAA6B;;;;;;;;cAQ/B;uBACS,6BAA6B;;;yCAGX;;;;;cAK3B;uBACS,6BAA6B;;;yCAGX;6CACI;;;;cAI/B;;;oBAGM;;;;;;MAMhB,qEAAqE,6BAA6B;AAC9F,aAAK,gBAAgB,CAAC;AACtB,YAAI,aAAa,SAAS;AACtB,gBAAM,IAAI,MAAM;;AAEpB,4BAAoB,SAAS;AAC7B,4BAAoB,SAAS;AAC7B,6BAAqB,SAAS;AAC9B,4BAAoB,SAAS;AAC7B,8BAAsB,SAAS;AAC/B,+BAAuB,SAAS;AAChC,8BAAsB,SAAS;AAC/B,qCAA6B,SAAS;AACtC,sCAA8B,SAAS;AACvC,qCAA6B,SAAS;AACtC,yBAAiB,SAAS,QAAQ;AAClC,uBAAe,SAAS,QAAQ;AAChC,wBAAgB,SAAS,QAAQ;AACjC,aAAK,cAAc,SAAS;AAC5B,0BAAkB,aAAa;AAC/B,kCAA0B;AAC1B,YAAI,CAAC;AAED,gCAAsB;;AAE1B,YAAI;AACA,6BAAkB;AAClB,eAAK,WAAW;;oBAER,gBAAgB,iBAAiB;mCAClB,aAAa,WAAW;;;;;;;;;;;;;;;;;;kCAkBzB;sBACZ;;;kCAGY,SAAS;;;;oCAIP;wBACZ;;;oCAGY,SAAS;;;;sCAIP;0BACZ;;;sCAGY,SAAS;;;;;;;;;;4BAUnB;;;qCAGS,mBACpB,sBACG,cAAc,SAAS,mBAAmB,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAC5G,UAAU,SAAS,oBAAoB,SAAS,mBAAmB,SAAS,oBAChF,QAAQ,2BAA2B;6BACtB;;;;;;;;AAQjB;;AAEJ,0BAAkB;AAClB,0BAAkB,GAAG,YAAY,YAAY;AAE7C,YAAI,aAAa;AACb,wBAAc;;AAElB,uCAA+B,KAAK,MAAM,cAAc,KAAK;AAC7D,yCAAiC,cAAc;AAC/C,8BAAsB;YAClB;;;wBAGY;;;AAGhB,aAAK,WAAW;;gBAER,gBAAgB,iBAAiB;iCAChB,aAAa,WAAW;0CACf;;;;;;8BAMZ,SAAS;;;;;;;;;;;;;;;;;;;kCAmBL;;;;gCAIF;oBACZ;;;gCAGY,SAAS;;;;kCAIP;oBACd;;;kCAGc,SAAS;;;;oCAIP;yCACK;;;;+CAIM;mDACI;mDACA;;;gBAGnC;;;kCAGkB;kBAChB,6BAA6B;;;;;;;;gBAQ/B;yBACS,6BAA6B;;;+CAGP;;;;;gBAK/B;yBACS,6BAA6B;;;+CAGP;mDACI;;;;gBAInC;;;sBAGM;;;;;;ACpZtB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,kCAA0B;AAC1B,wBAAgB;AAChB,YAAI,eAAe;AACf,gCAAsB;mBAEjB,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;mBAEP,eAAe;AAEpB,gCAAsB;AACtB,sBAAY;;AAEhB,0BAAkB,GAAG,cAAc,cAAc;AAEjD,YAAI,eAAe;AACf,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;mBAET,eAAe;AACpB,wBAAc;;AAElB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;YAChB,eAAe;;mBAER,eAAe;;;;wBAIV;;;AAGhB,sBAAc;AACd,YAAI,eAAe;AACf,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;mBAEL,eAAe;AACpB,gCAAsB;AACtB,0BAAgB;;;;;AAKhB,oBAAU;;AAEd,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;0CACkB;;;;UAIhC;;;;;;;;kCAQwB;;kCAEA;;;;;;8BAMJ;;YAElB,oBAAoB;;;;;;;YAOpB;;;iCAGqB;cACnB,4BAA4B;YAC9B,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;qBACS,4BAA4B;YACrC,oBAAoB;;;;;;;YAOpB;;oBAEQ;;;;;ACvJpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,uBAAe;AACf,qBAAa,GAAG,IAAI,GAAG;AACnB,uBAAa;AACb,cAAI,IAAI,MAAM;AACV,sBAAU;;AAEd,cAAI,IAAI;AACJ,sBAAU;;AAEd,sBAAY;UACd;UACA,IAAI,IAAI,4CAA4C;;;;;;mBAM3C;;UAET,IAAI,IAAI,MAAM;;;AAGhB,aAAK,WAAW;QAChB,uBAAuB;QACvB,mBAA+B;;;;;;;;qBAQlB,YAAY;qBACZ,YAAY;;UAEvB;;;;;;;AAOV;AACI,qCAA+B,mCAA+C,CAAC,KAAK,KAAK,MAAM;AAC/F,aAAO;;QAEH;;;;;ACrER;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;;sDAMkB,UAAU;;;;;;qDAMX,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7F9D;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AClCvD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;uCAChB;;;;;;;;;;;;;;;;;;;;;;gCAsBP,QAAQ;uCACD,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7DlD;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,cAAc,EAAE;AACrB,oCAA6B,EAAE;AAC/B,oCAA4B,GAAG;AAI/B,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,+BAAuB;UAClB,gBAAgB,UAAU,IAAK,UAAU,IAAI;UAC7C,gBAAgB,SAAS,IAAK,SAAS,IAAI;;AAEhD,4BAAoB,eAAe,KAAK,eAAe;AACvD,2BAAmB,eAAe,KAAK,eAAe;AACtD,+BAAuB,IAAI;AAC3B,8BAAsB,IAAI;AAG1B,0BAAmB,KAAK,KAAK,kBAAkB,IAAK;AACpD,yBAAkB,KAAK,KAAK,iBAAiB,IAAK;AAClD,aAAK,WAAW;;;;;;;;;;0CAUkB;yCACD;;6CAEI;4CACD;;oCAER;mCACD;;;;;;;;;;;;;;kCAcD;;;;;;;;oCAQE;;;;;sBAKd,eAAe;sCACC,eAAe;;;wBAG7B,eAAe;wCACC,eAAe;;;4BAG3B;kBACV;;;;4BAIU;kBACV;;;;;;;;;;;;;;;ACpGlB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,oDAA4C;AAC5C,aAAK,cAAc,CAAC,OAAO,WAAW,UAAU;AAChD,gCAAwB;UACnB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAEpD,iCAAyB;UACpB,gBAAgB,YAAY,IAAK,YAAY,IAAI;UACjD,gBAAgB,WAAW,IAAK,WAAW,IAAI;;AAGpD,0BAAkB,eAAe,QAAQ;AACzC,aAAK,WAAW;;YAEZ,gBAAgB,KAAK,iBAAiB;YACtC,gBAAgB,KAAK,iBAAiB;uCACX,gBAAgB;;;;;;;;;;;;;8DAaO;;;;;;;;;ACjD9D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,YAAI,SAAS;AACT,eAAK,WAAW;;;2BAGD,OAAO;;;AAGtB;;AAEJ,2BAAmB;AACf,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,eAAe;;AAEpC,iBAAO,UAAU;;AAErB,yBAAiB,OAAO,IAAI,UAAU,WAAW,IAAI,KAAK;AAC1D,qBAAa,kBAAkB;AAC/B,aAAK,WAAW;;UAEd;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa,OAAO;AACpB,YAAI,OAAO;AACP,gBAAM,IAAI,MAAM,kCAAkC;;AAEtD,aAAK,cAAc;AACnB,yBAAiB,YAAY,MAAM;AACnC,2BAAmB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AAC1E,wBAAgB,GAAG,SAAS,OAAO,YAAY,KAAK,YAAY,OAAO;AACvE,qBAAa,kBAAkB;AAC/B,YAAI,SAAS;AACT,eAAK,WAAW;;;;uCAIW,OAAO;cAChC,OAAO;eACN;2CAC4B,OAAO;kBAChC,OAAO;;;;;;AAOb,eAAK,WAAW;;YAEhB;;uBAEW,KAAK,SAAS;eACtB;yBACU,KAAK,SAAS;;eAExB;yBACU,KAAK,SAAS;iBACtB;2BACU,KAAK,SAAS;;;;;;;AAOjC;AACI,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,oBAAS,OAAO,KAAK,MAAM,UAAS,OAAO,KAAK;AAChD,iBAAO,WAAW;;AAEtB;AACI,gCAAsB,OAAO,IAAI,UAAU,WAAW,GAAG;AACzD,2BAAiB,cAAc,KAAK;AACpC,4BAAkB,cAAc,MAAM,IAAI,KAAK;AAC/C,iBAAO,mBAAmB,mBAAmB;;AAEjD;AACI,cAAI,KAAK,QAAQ,OAAO,MAAM,OAAO,OAAO;AACxC,mBAAO,GAAG,OAAO,QAAQ,UAAU;;AAGnC,mBAAO,GAAG,UAAU;;;;;AC7FpC;;;;;;;;;;;;;;;;;MAkBI,+FAA+F;AAC3F,aAAK,gBAAgB,CAAC,WAAW,WAAW;AAC5C,aAAK,cAAc;AACnB,4BAAoB,kBAAkB,QAAQ;AAC9C,sBAAc,kBAAkB,MAAM;AACtC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,4BAAoB;AACpB,YAAI,gBAAgB;AAChB,0BAAgB;mBAEX,gBAAgB;AACrB,0BAAgB;;AAEpB,+BAAuB,cAAc;AACrC,6BAAqB,WAAW,IAAI,eAAe;AACnD,aAAK,WAAW;UACd,yBAAyB,eAAe;;;YAGtC;;;gCAGoB;;kCAEE;kCACA;0CACQ;;;uBAGnB;;;;;;;;;ACtDvB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,KAAK;AAC3B,2BAAmB,UAAU;AAC7B,0BAAkB,UAAU;AAC5B,uBAAe,UAAU;AACzB,4BAAoB,UAAU;AAC9B,wBAAgB,cAAc,KAAK,KAAK,SAAS;AACjD,aAAK,cAAc,CAAC,WAAW;AAC/B,oCAA4B;AAC5B,4BAAoB;AACpB,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,8BAAsB;;;AAGtB,oCAA4B;AAC5B,YAAI,SAAS,aAAa;AACtB,kCAAwB;oCACA;;;;;AAK5B,wCAAgC;AAChC,YAAI,SAAS,aAAa;AACtB,sCAA4B;oCACJ;;;;;AAK5B,aAAK,WAAW;0CACkB;;;UAGhC;;;;;UAKA;;;;;;;;;YASE,yBAAyB;wDACmB;;;;8BAI1B;;;;;;;;;;;;;;;;YAgBlB;;;iCAGqB;cACnB,4BAA4B;;;;;;;;;;;;;;;;;YAiB9B;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;qBACS,4BAA4B;;;;;;;;;;;;;;;YAerC;;oBAEQ;;;;;AC9IpB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC,KAAK,KAAK;AAChC,aAAK,cAAc;AACnB;AACA;AACA,YAAI,OAAO;AACP,gBAAM,MAAM,kBAAkB;;AAElC,YAAI,SAAS;AACT,qBAAW;AACX,oBAAU;;AAGV,gCAAsB,CAAC,WAAW,WAAW,WAAW;AACxD,6BAAmB;AACnB,8BAAoB;AACpB,uBAAa,GAAG,IAAI,MAAM,QAAQ;AAC9B,wBAAY,KAAK,GAAG,cAAc;AAClC,gBAAI,IAAI;AACJ,yBAAW,KAAK,GAAG,cAAc;;;AAGzC,oBAAU,WAAW;AACrB,qBAAW,YAAY;;AAE3B,sBAAc,kBAAkB;AAChC,aAAK,WAAW;;UAEd;4BACkB;;2BAED;;2BAEA;;;;;;ACnD3B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,qBAAqB,KAAK;AAC9C,6BAAqB,YAAU,KAAK;AACpC;AACA,yBAAiB,SAAS,IAAI;AAC1B,iBAAO,aAAa,OAAO,cAAc,eAAe,OAAO;;AAEnE,gBAAO;UACL;UACA;UACA,SAAS,KAAK;;AAEhB,aAAK,WAAW;QAChB;;UAEE;8BACoB;;;;MAI1B;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;AAI/C,mBAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AACzC;AACI,UAAI,SAAS;AACT,eAAO;iBAEF,QAAQ;AACb,eAAO,OAAO,MAAM,GAAG,MAAM,IAAI,OAAK,eAAe,GAAG,KAAK;;AAG7D,cAAM,MAAM,oBAAoB;;;ACrExC;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,OAAO,SAAS;AACrB,sBAAc,kBAAkB,KAAK;AACrC,wBAAe,YAAY,UAAU,KAAK;AAC1C,0BAAkB,YAAY,aAAa,KAAK;AAChD,0BAAkB,KAAK,SAAS,IAAI,cAAc,QAAQ,UAAU,MAAM,IAAI;AAC9E,2BAAmB,wBAAwB,UAAU,YAAY;AACjE,yBAAiB;mBACN;cACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;YACT,UAAU,KAAK,OAAO;;;AAG1B,yBAAiB,KAAK,SAAS,IAAI,KAAK;UACtC,QAAO,KAAK,OAAO;cACf,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;YAClD,UAAU,KAAK,OAAO;qBACb;gBACL,QAAO,KAAK,OAAO,QAAQ,SAAS,KAAK,OAAO;cAClD,UAAU,KAAK,OAAO;uBACb;;;;AAIf,+BAAuB,KAAK,QAAQ,IAChC;cACE,SAAS,SAAS,IAAI,UAAU,SAAS,MAAM,aACjD,SAAS,IAAI,UAAU,GAAG,UAAU,QAAQ,QAAO,cAAc,OAC5D,KAAK;AACd,aAAK,WAAW;0BACE,KAAK;;UAErB;UACA;UACA;;UAEA;UACA;;;;;MAKN;AACI,YAAI,MAAM,WAAW,KAAK;AACtB,gBAAM,MAAM,aAAa,KAAK,wDACN,MAAM;;AAElC,eAAO;AACH,cAAI,KAAK,YAAY;AACjB,iBAAK,WAAW,MAAM,0BAA0B,cAAc;AAC9D,gBAAI,KAAK,YAAY;AAGjB;;;AAGR,gBAAM,GAAG,WAAW,KAAK,UAAU;;;;ACjF/C;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,qBAAa,KAAK;AAClB,2BAAmB,kBAAkB,KAAK;AAC1C,sBAAc,kBAAkB,KAAK;AACrC,wBAAgB;AAChB,YAAI,SAAS;AACT,sBAAY;;AAGZ,2BAAiB;AACjB,sBACI,KAAK,IAAI;AACL;AACA,mBAAO,KAAK,WAAW,IACnB,oBAAoB,cAAc,OAClC,UAAU,aAAa,gBAAgB,cAAc;aAExD,KAAK;;AAElB,aAAK,WAAW;QAChB,sBAAsB,cAAc;QACpC,wBAAwB,cAAc;;;UAGpC;yBACe;;;;;AC7CzB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,QAAQ;AACb,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;AAErB,aAAK,eAAe;AACpB,aAAK,aAAa;AAClB,aAAK,eAAe;;MAExB;AACI,gCAAwB,kCAAkC,OAAO;AACjE,yBAAiB,uBAAuB,SAAS,iBAAiB;AAClE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,SAAS,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AACjG,YAAI,KAAK,aAAa,UAAU,SAAS;AACrC,eAAK;AACL,eAAK;AACL,eAAK,iBAAiB;AACtB,eAAK;AACL,8BAAmB,KAAK,aAAa,UAAU;AAC/C,eAAK,aAAa,UAAU,KAAK;AACjC,iBAAO;;AAEX;AACA,YAAI,oBAAoB,oBAAoB;AACxC,uBAAa,KAAK,MAAM,0BAA0B,QAAQ,IAAI,QAAQ;mBAEjE,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;mBAE/D,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,2BAA2B,QAAQ,IAAI,QAAQ;mBAEzD,oBAAoB,oBAAoB;AAC7C,uBACI,KAAK,MAAM,iCAAiC,QAAQ,IAAI,QAAQ;;AAExE,aAAK,aAAa,UAAU,KAAK;AACjC,aAAK;AACL,aAAK,sBAAsB;AAC3B,aAAK;AACL,eAAO;;MAEX;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,gCAAwB,kCAAkC,gBAAgB;AAC1E,yBAAiB,uBAAuB,OAAO,iBAAiB;AAChE,YAAI,CAAE,aAAY,KAAK;AACnB,eAAK,aAAa,YAAY;;AAElC,yBAAiB,aAAa,OAAO,iBAAiB,KAAK,MAAM,IAAI,KAAK,MAAM,eAAe;AAC/F,mCAA2B,OAAM,IAAI;AACrC,YAAI,uBAAuB,MACvB,KAAK,qBAAqB;AAC1B,eAAK,MAAM,oBAAoB;AAC/B,eAAK,sBAAsB;;AAG3B,eAAK,aAAa,UAAU,KAAK;AACjC,eAAK;AACL,eAAK,iBAAiB;;AAE1B,aAAK;AACL,wBAAgB,KAAK,aAAa;AAClC,yBAAiB,QAAQ,QAAQ;AACjC,YAAI,WAAW;AACX,gBAAM,IAAI,MAAM;;AAGpB,gBAAQ,OAAO,UAAU;AACzB,aAAK;;MAET;AACI,YAAI,CAAC,KAAK;AACN;;AAEJ,sBAAc,KAAK,kBAAkB,KAAK;AAC1C,gBAAQ,IAAI,aAAa,GAAG,KAAK,qBAAqB,KAAK,mBAAmB,IAAI;AAClF,0BAAkB,KAAK,gBAAgB,KAAK;AAC5C,gBAAQ,IAAI,oBAAoB,KAAK;AACrC,gBAAQ,IAAI,iBAAiB,KAAK,kBAAkB,KAAK,MAAM,MAAM;;UAErE;AACA,eAAO,KAAK;;UAEZ;AACA,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,gBAAgB;AAErB;;AAEJ,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,+BAAuB,KAAK;AACxB,eAAK,aAAa,UAAU,QAAQ;AAChC,iBAAK,MAAM,oBAAoB;;;AAGvC,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,kBAAkB;AACvB,aAAK,kBAAkB;AACvB,aAAK,qBAAqB;AAC1B,aAAK,gBAAgB;;;AAG7B;AAEI,oBAAc;AACd,UAAI,mBAAmB,MAAM;AACzB,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;iBAEF,mBAAmB,GAAG;AAC3B,eAAO;iBAEF,mBAAmB,MAAM;AAC9B,eAAO;;AAEX,YAAM,IAAI,MAAM,2BAA2B;;AAExC;AAMH,6BAAuB,iCAAiC,iBAAiB;AACzE;AACA,UAAI;AACA,4CAAoC,uCAAuC,MAAM,IAAI,MAAM;AAC3F,sBAAc,cAAc;;AAG5B,gCAAwB,yCAAyC,MAAM,IAAI,MAAM;AACjF,sBAAc,QAAQ;;AAE1B,+BAAwB,0BAA0B,IAAI;AACtD,aAAO,cAAc;;AAEzB;AACI,cAAQ;aACC,oBAAoB;AACrB,iBAAO,wCAAwC;aAC9C,oBAAoB;AACrB,iBAAO,+CAA+C;aACrD,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,yCAAyC;aAC/C,oBAAoB;AACrB,iBAAO,+CAA+C;;AAEtD,gBAAM,IAAI,MAAM,iCAAiC;;;AAG7D;AACI,UAAI,OAAM,QAAQ;AACd,YAAI;AACA,iBAAO,oBAAoB;;AAE/B,eAAO,oBAAoB;;AAE/B,UAAI;AACA,eAAO,oBAAoB;;AAE/B,aAAO,oBAAoB;;AAE/B;AACI,UAAI,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;iBAEtB,mBAAmB,aAAa,UAAU,kBAAkB;AACjE,eAAO,+BAA+B;iBAEjC,mBAAmB,aAAa,YACrC,mBAAmB,aAAa;AAChC,eAAO,oBAAoB;;AAE/B,YAAM,IAAI,MAAM,gCAAgC;;AAEpD;AACI,aAAO,GAAG,aAAa,MAAM,aAAa,MAAM,mBAAmB;;AC1OvE;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,KAAK,KAAK;;AAEtC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,6BAAqB,kBAAgB;AACrC,aAAK,WAAW;;UAEd;yBACe;;;;;AAKzB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,iBAAiB;;AAEjC,UAAI,SAAS;AACT,eAAO,eAAe,OAAO;;AAEjC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW;AACnE,2BAAqB;AACrB,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,qBAAa,KAAK,QAAQ,cAAc,OAAO,OAAO;;AAE1D,aAAO,aAAa;;ACjDxB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;AAYV,gCAA0B;AACnB,mBAAe;AACf,gBAAY;AACZ,iBAAa,sBAAoB;;;AAGjC,kBAAc,sBAAoB;;;AAGlC,kBAAY;AACZ,iBAAa;;;uBAGG;kBACL;;;AAGX,0BAAsB;AACzB,aAAO,sBAAoB;mCACI;;;AAG5B,gBAAY;AACZ,iBAAa;AACb,kBAAc;AACd,iBAAa;;;;AAIb,mBAAe;AACf,mBAAe;AACf,sBAAkB;AAClB,kBAAc;;;;;;;;;;;;;;;;AAgBd,gBAAY;AACZ,kBAAc;AACd,gBAAY;;AAEZ,kBAAc;AACd,iBAAa;AACb,kBAAc;AACd,oBAAgB;AAchB,qBAAiB;;;;;;;;;;;;;;;;;;;;;AAqBjB,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;;;;AAMjC,iBAAa,sBAAoB;;;AAGjC,iBAAa;;;;AAIb,iBAAa;;;;AAIb,iBAAa;;;;AAIb,kBAAc,sBAAoB;AAClC,kBAAc,sBAAoB;;;AAGlC,kBAAc,sBAAoB;;;AAGlC,gBAAY;;;;cAIL;eACC;eACA;eACA;eACA;eACA;;;;;;;AAOR,uBAAmB;AACnB,wBAAoB;AACpB,kBAAc;ACjLrB;;;;;;;;;;;;;;;;AAgBO,qBAAe;AACf,kBAAY;;;;;;;;;;AAUZ,mBAAa;;;;;;;;;;;AAWb,oBAAc;;;;;;;;;;;AAWd,kBAAY;;;;;;;;;;;MAWf;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,WAAW;;UAEd;;;;;;;;;;;;ACnEV;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,qBAAa,YAAY;AACzB,yBAAiB,YAAY,MAAM;AACnC,sBAAc,kBAAkB;AAChC,6BAAqB,gBAAgB,MAAM;AAC3C,0BAAkB,SAAS,MAAM;AACjC,wBAAe,QAAQ,IAAI,OAAO,QAAQ,UAAU,KAAK;AACzD,aAAK,WAAW;;UAEd;kCACwB;;4CAEU;;;;;ACnC5C;;;;;;;;;;;;;;;;AAuBA,WAAM,gCAAmB;AACzB,oBAAc;AACd,mBAAa;AACb,uBAAiB;AACjB,wBAAkB;AAuEX,8BAAwB;AACxB,8BAAwB;AAC/B,yBAAqB;AACd;AACH,UAAI,gBAAgB;AAChB,eAAO,aAAa;;AAExB,mBAAa,gBAAgB;AAC7B,aAAO,aAAa;;AAExB,gEAA2D;AACvD,UAAI,gBAAe;AACf,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;iBAEF,gBAAe;AACpB,YAAI;AACA,iBAAO;;AAEX,eAAO;;AAEX,YAAM,IAAI,MAAM,cAAc;;AAIlC,uCAAmC;AAInC,mCAA+B;AAC/B;AACI,UAAI,OAAM,OAAO,UAAU;AACvB,eAAO;;AAEX,aAAQ,OAAM,OAAO,OAAO,SAAS,OAAM,OAAO,OAAO,QACrD,OAAO,mBACP,yBAAyB,OAAO;;AAKjC,wCAAoC;mCACL;MAClC;AACI;AAEA,aAAK,cAAc,IAAI;AAGvB,aAAK,kBAAkB,IAAI;AAG3B,aAAK,eAAe,IAAI;AACxB,aAAK,gBAAgB;AAErB,aAAK,eAAe;AAEpB,aAAK,iBAAiB;AACtB,aAAK,oBAAoB;AACzB,aAAK,wBAAwB;AAC7B,aAAK,iBAAiB;AACtB,aAAK,WAAW;AAChB,YAAI,CAAC,OAAM,QAAQ;AACf,gBAAM,IAAI,MAAM;;AAEpB,YAAI,SAAS;AACT,qBAAW,gBAAgB,OAAM,UAAU;AAC3C,eAAK,cAAc,eAAe,OAAM,UAAU;AAClD,eAAK,QAAQ,IAAI,aAAa;AAC9B,eAAK,SAAS,GAAG;AACjB,eAAK,sBAAsB;;AAG3B,eAAK,QAAQ;AACb,eAAK,cAAc;AACnB,eAAK,sBAAsB;AAC3B,eAAK,SAAS,MAAM,GAAG;;AAE3B,aAAK,iBAAiB,IAAI,eAAe,KAAK;AAC9C,aAAK,qBAAqB;AAC1B,aAAK,UAAU,IAAI,aAAY,MAAM;;MAEzC;AACI,eAAO,KAAK,QAAQ,eACf,MAAK,aAAa,KAAK,WAAW,eAAe,KAClD,KAAK;;MAEb;AACI,YAAI,OAAM,QAAQ,qCACd,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU,eAAe,UAAU;AACnC,gBAAM,IAAI,MAAM;;AAGpB,uBAAe;AACf,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;AAE3B,eAAO;;MAGX;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ;;MAGZ;AACI,YAAI,KAAK,QAAQ,IAAI;AACjB,0BAAgB,KAAK,QAAQ,IAAI;AACjC,kBAAQ;;;MAGhB;AACI,YAAI,OAAM,QAAQ;AACd,eAAK,uBAAuB;;AAEhC,YAAI,UAAU;AACV,gBAAM,IAAI,MAAM;;AAGpB,aAAK,QAAQ,IAAI,QAAQ;UACrB;UACA;UACA;UACA,OAAO,aAAa;UACpB,UAAU;UACV,uBAAuB;;;MAG/B;AACI,uBAAe,WAAW;AAC1B,YAAI,KAAK,QAAQ,IAAI;AACjB,8BAAoB,KAAK,QAAQ,IAAI;AACrC,sBAAY;AACZ,cAAI,YAAY,WAAW;AACvB,iBAAK,YAAY;;;;MAI7B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,oBAAoB,eAAO,OAAO,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,SAAS,IAAI;AAC/B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,UAAU;AACV,iBAAO;;AAEX,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ;AACA,YAAI,UAAU;AACV,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,6BAAmB,KAAK,SAAS,mBAAmB,KAAK;AACzD,mBAAS,wBAAoC,YAAY;;AAGzD,mBAAS,KAAK,qBAAqB;;AAEvC,YAAI;AACA,eAAK,kBAAkB,SAAa;;AAExC,eAAO,KAAK,qBAAqB,QAAQ;;YAEvC;AACF,YAAI,KAAK,YAAY,IAAI;AACrB,+BAAoB,KAAK,YAAY,IAAI;AACzC,iBAAO,IAAI,QAAQ,aAAW,aAAY,KAAK;;AAEnD,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,QAAQ,OAAO,eAAO,OAAO,oBAAoB,YAAa;AAItE,YAAI,UAAS;AACT;AACA,cAAI;AACA,sBAAU,IAAI,qBAAqB,OAAO;;AAG1C,sBAAU,IAAI,eAAe,OAAO;;AAExC,sBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,QAAQ,OAAO,SAAU;AACtE,wBAAa,KAAK,KAAK,IAAI;AAC3B,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,YAAI,UAAU;AACV,iBAAO,KAAK,qBAAqB;;AAErC,YAAI,CAAC,OAAM,QAAQ,mCACf,OAAM,UAAU,qBAAqB;AACrC,gBAAM,IAAI,MAAM;;AAGpB,sBAAa;AACb;AACA,YAAI,UAAU,eAAe,OAAM,IAAI;AAEnC,8BAAoB,KAAK,OAAO;AAChC,0BAAgB,KAAK,QAAQ,IAAI,kBAAkB;AACnD,oBAAS,KAAK,MAAM,wBAAwB,QAAQ,SAAS,GAAG,iBAA0B;;AAE9F,aAAK,YAAY,IAAI,QAAQ;AAC7B,YAAI,UAAU;AAEV,gBAAM,KAAK,MAAM;;AAGrB;AACA,YAAI,UAAU;AACV,qBAAW,MAAM,QAAQ,IAAI;YACzB,KAAK,KAAK,mBAAmB,KAAK;YAClC,KAAK,KAAK,mBAAmB,KAAK;;AAEtC,6BAAmB,GAAG;AACtB,6BAAmB,GAAG;AACtB,iBAAO,wBAAoC,YAAY;mBAElD,WAAU;AACf,iBAAO,KAAK,qBAAqB;;AAGjC,uBAAa,eAAmB;AAChC,iBAAO,KAAK,MAAM,gCAAgC,SAAQ;;AAE9D,YAAI,qBAAqB;AACrB,eAAK,8BAA8B;;AAEvC,0BAAkB,KAAK,qBAAqB,QAAQ;AACpD,4BAAoB,KAAK,YAAY,IAAI;AACzC,aAAK,YAAY,OAAO;AAExB,oBAAY,QAAQ,aAAW,QAAQ;AACvC,YAAI,KAAK,gBAAgB,IAAI;AACzB,eAAK,gBAAgB,OAAO;AAC5B,eAAK,YAAY;AACjB,eAAK;;AAET,eAAO;;MAEX;AACI,YAAI,UAAU;AACV;;AAEJ,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,sBAAY,OAAO;AACnB,cAAI,CAAC,iBAA4B;AAC7B,gBAAI,OAAM,QAAQ;AACd,oBAAM,MAAM,aAAa;;AAI7B,kBAAM,MAAM,aAAa;;;;MAIrC;AACI,eAAQ,OAAO,OAAO,YAAa,KAAK,QAAQ,IAAI;AACpD,qBAAa,eAAmB;AAChC,YAAI,OAAM,QAAQ;AACd,4BAAkB,KAAK,OAAO;AAC9B,2BAAgB,KAAK,QAAQ,IAAI,UAAU;AAC3C,wBAAa,KAAK,MACb,gCAAgC,SAAQ,SAAS,GAAG,iBAA0B,QAC9E,SAAS,GAAG;AACjB,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,iBAAiB,aAAa;AAC3E,4BAAoB,yBAAyB,aAAwB,SAAS;AAC9E,wBAAgB,yBACZ,IAAI,yBAAyB,eAC7B,IAAI,mBAAmB;AAC3B,uBAAe,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,aAAa,OAAO,UAAW;AACtF,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,qBAAa,KAAK,MACb,gDAAgD,QAAQ,SAAS,QAAQ,SAAS,IAAI,QAAQ,SAAS,IACvG,SAAS,GAAG;AACjB,aAAK,8BAA8B;AACnC,eAAO;;YAEL;AACF,gCAAwB,KAAK;AAC7B,gCAAwB;AACxB,4BAAoB;AACpB,YAAI,KAAK,sBAAsB;AAC3B,eAAK,qBAAqB;AAC1B,0BAAgB;;AAGhB,eAAK,aAAa,KAAK;;AAE3B,aAAK,eAAe;AACpB;AAEA,4CAAoC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,QAC3E,OAAO,OAAK,KAAK;AACtB,0CAAkC,SAAa,KAAK,aAAa,IAAI,OAAO,EAAE,OACzE,OAAO,OAAK,KAAK;AACtB,aAAK,eAAe;AACpB,YAAI;AACA,eAAK,qBAAqB;;AAE9B,oBAAY;UACR,cAAc,KAAK;UACnB,gBAAgB,KAAK;UACrB,UAAU;UACV,QAAQ;;AAEZ,YAAI,OAAM,UAAU,mDAAmD;AACnE,2BAAiB,MAAM,QAAQ,IAAI;AACnC,cAAI,cAAc,KAAS;AAC3B,cAAI,yBAAyB,MAAM,SAAS,IAAI,UAAW,EAAE,MAAM,0BAA0B,IAAI,IAAI,KAChG,IAAI,OAAK,GAAG,EAAE,SAAS,EAAE,MACzB,KAAK;;AAGV,cAAI,cAAc;YACd,OAAO;;;AAGf,aAAK,eAAe;AACpB,aAAK,iBAAiB;AACtB,eAAO;;MAEX;AACI,eAAO;UACH,YAAY;UACZ,eAAe,KAAK;UACpB,wBAAwB,KAAK,eAAe;UAC5C,mBAAmB,KAAK,eAAe;;;MAG/C;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM;;AAEtB,eAAO,CAAE,SAAS,QAAY,OAAO;;MAEzC;AACI,YAAI,OAAM,UAAU,mDAAmD;AACnE,eAAK,MAAM;AACX,iBAAO;;AAEX,cAAM,QAAQ;AACd,eAAO;;YAEL;AACF,YAAI,OAAM,UAAU,mDAAmD;AACnE,iBAAO,KAAK,MAAM,uBAAuB;;AAE7C,2BAAmB;AACnB,eAAO,WAAW,QAAQ,WAAW;;MAEzC;AACI,YAAI,KAAK,gBAAgB,IAAI;AACzB;;AAEJ,YAAI,KAAK,YAAY,IAAI;AACrB,eAAK,gBAAgB,IAAI;AACzB,eAAK;AACL;;AAGJ,YAAI,CAAC,KAAK,QAAQ,IAAI;AAClB;;AAMJ,YAAI,KAAK,QAAQ,IAAI,QAAQ,wBAAwB;AACjD,eAAK,QAAQ,IAAI,QAAQ;AACzB;;AAEJ,aAAK,eAAe;AACpB,eAAQ,sBAAuB,KAAK,QAAQ,IAAI;AAChD,YAAI,sBAAsB;AACtB,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;AACtD,eAAK,QAAQ,IAAI,mBAAmB,KAAK,QAAQ;AACjD,eAAK,8BAA8B,mBAAmB;;AAE1D,aAAK,QAAQ,OAAO;;MAExB;AACI,eAAQ,SAAS,OAAO,UAAU,OAAO,UAAU,iBAAU,KAAK,QAAQ,IAAI;AAC9E,oBAAY,UAAS,OAAM,cAAc;AACzC,yBAAiB,KAAK,aAAa,IAAI;AACvC,YAAI,WAAW;AACX,eAAK,aAAa,IAAI,KAAK,WAAW;;AAGtC,eAAK,aAAa,OAAO;AACzB,cAAI,WAAW;AACX,iBAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,iBAAK,eAAe,eAAe,SAAS,UAAU,OAAO;;;AAGrE,wBAAgB,KAAK,QAAQ,IAAI;AACjC,gBAAQ,UAAU;AAClB,gBAAQ,WAAW;AACnB,gBAAQ,WAAW;AACnB,gBAAQ,QAAQ;;MAEpB;AACI,aAAK,YAAY;AACjB,eAAO,KAAK,QAAQ,IAAI,QAAQ;;MAMpC;AACI,eAAO,KAAK,QAAQ,IAAI;;MAE5B;AACI,YAAI,CAAC,OAAM,QAAQ;AACf,iBAAO;;AAEX,YAAI,KAAK,cAAc;AACnB,eAAK,aAAa,WAAS,YAAY;;AAE3C,eAAO,KAAK;;MAShB,2CAA2C;AACvC,2BAAmB,KAAK;AACxB,YAAI,CAAC,KAAK,yBAAyB,cAAc;AAC7C,kBAAQ,KAAK;AAIb,eAAK,wBAAwB;;AAEjC,eAAO,cAAc,QACjB,OAAO,MAAM,YAAS,KAAK,QAAQ,IAAI,OAAM,QAAQ,WAAW,QAC5D,eAAmB,OAAM,SAAS;;MAE9C;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,OAAO,MAAM,EAAE,OAAO,EAAE;AAC1F,iBAAO,KAAK,WAAW,MAAM,EAAE,OAAO;;AAG1C,YAAI,eAAmB,UAAU;AAC7B,iBAAO,QAAO,IAAI,MAAM,EAAE;;AAE9B,eAAQ,YAAa,KAAK,QAAQ,IAAI,EAAE;AACxC,4BAAoB,kBAA4B,EAAE,OAAO,OAAO;AAChE,YAAI,YAAY,CAAC;AACb,0BAAgB,OAAM,QAAQ,iCAC1B,IAAI,mBAAmB,QACvB,IAAI,aAAa;AACrB,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;AAElD,aAAK,YAAY,EAAE;AACnB,eAAO,KAAK,aAAa,GAAG,OAAO;;MAEvC;AACI,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,kBAAU,KAAK,WAAW,MAAM,EAAE;AAClC,2BAAmB,KAAK,QAAQ,IAAI,EAAE;AAEtC,eAAO,OAAO,YAAY;AAC1B,mBAAW,QAAQ;AACnB,mBAAW,QAAQ,EAAE;AACrB,yBAAiB,mBAA6B,OAAO,EAAE;AACvD,YAAI,SAAS;AAGT,wBAAc,SAAS,MAAM;;AAEjC,mBAAW,QAAQ;UACf;UAEA,YAAY,SAAS,SAAS,SAAS,MAAM,cAAc,EAAE;;AAGjE,yBAAiB,KAAK,aAAa,IAAI,WAAW,MAAM,eAAe;AACvE,aAAK,aAAa,IAAI,WAAW,MAAM,YAAY,WAAW;AAC9D,eAAO;;MAEX;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,aAAa,GAAG,OAAO,KAAK;AAC9F,YAAI;AACA,iBAAO;;AAEX,yBAAiB,iBAA2B,OAAO,KAAK;AACxD,YAAI,SAAS,KAAK,UAAQ,SAAS;AAC/B,iBAAO,QAAO,IAAI;;AAEtB,wBAAgB,IAAI,oBAAoB,OAAO,SAAS;AACxD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,qBAAqB,EAAE,OAAO,QAClC,IAAI,eAAe,EAAE,OAAO;AAChC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,IAAI;AACtE,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,0BAAkB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACpD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAG3C,YAAK,iBAAgB,KAAK,gBAAgB,MACtC,YAAY;AACZ,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,cAAI;AACA,gBAAI,WAAU,GAAG,CAAC,GAAG,GAAG;;AAE5B,sBAAY,gBAAgB,IAAI,IAAI,EAAE,KAAK,OAAO,WAAW;AAC7D,uBAAa,gBAAgB,IAAI,IAAI;AACrC,sBAAY,gBAAgB,IAAI,EAAE,KAAK,OAAO,GAAG,aAAa;AAG9D,0BAAgB,KAAO,KAAK;AAC5B,iBAAO,QAAQ,IAAI,MAAM;;AAE7B,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY;AACzG,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C,kBAAmB,GAAG,GAAG,YAAY,YAAY,MAAM,yBAAY;AAC/D,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,4BAAoB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACtD,sBAAc,KAAK,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM;AAC3C,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,wBAAgB,IAAI,oBAAoB,EAAE,OAAO,EAAE,OAAO,CAAC,OAAO,aAAa,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/I,uBAAe,CAAC,GAAG;AACnB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS,QAAQ;;MAE/C;AACI,wBAAgB,OAAM,QAAQ,8BAC1B,IAAI,iBAAiB,EAAE,OAAO,QAAQ,MAAM,OAAO,QACnD,IAAI,WAAW,EAAE,OAAO,QAAQ,MAAM,OAAO;AACjD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,WAAW,OAAO,aAAa,MAAM,OAAO;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC,YAAY,aAAa;;MAEjE;AACI,YAAI,EAAE,UAAU;AACZ,wBAAa,KAAK,SAAS,EAAE;AAC7B,8BAAoB,MAAK,IAAI,OAAK,cAAkB;AACpD,sBAAY,QAAO,EAAE,OAAO,EAAE,OAAO;AACrC,iBAAO,OAAK,KAAK;;AAErB,wBAAgB,IAAI,YAAY,EAAE,OAAO;AACzC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,iBAAiB,EAAE,OAAO,UAAU,iBACxC,IAAI,WAAW,EAAE,OAAO,UAAU;AACtC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,UAAU,MAAM,KAAK,WAAW,OAAO,GAAG,SAAS;AAC9F,YAAI;AACA,iBAAO;;AAEX,wBAAgB,IAAI,cAAc,EAAE,OAAO,QAAQ,MAAM;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,yBAAiB,aAAyB,EAAE,OAAO,YAAY;AAC/D,yBAAiB,aAAyB,SAAS,QAAQ,WAAW;AACtE,iCAAyB,qBAAiC,EAAE,OAAO,YAAY;AAC/E,iCAAyB,qBAAiC,OAAO,WAAW;AAC5E,0BAAkB,cAA0B,kBAAkB,OAAO,WAAW;AAChF,eAAO,WAAU,EAAE,QAAQ,WAAW,UACjC,QAAQ,kBACR,MAAM,kBAAkB;;MAEjC;AACI,gBAAY,EAAE,QAAQ,GAAG,MAAM;AAE/B,sBAAa,WAAW,OAAO,UAAU,IAAI;AAC7C,iCAAyB,CAAC,CAAC,GAAG;AAC9B,yBAAiB,KAAK,GAAG;AACzB,qBAAa,IAAI,WAAW,QAAQ,IAAI,EAAE,MAAM,QAAQ,EAAE;AACtD,2BAAiB,KAAK,CAAC,GAAG;;AAE9B,wBAAgB,EAAE,IAAI;AACtB,oCAA4B,aAAyB,QAAQ,OAAO,YAAY,OAAM;AACtF,kDAA0C,aAAyB,oBAAoB,QAAQ,WAAW,QAAQ;AAClH,6BAAqB,qBAAiC,QAAQ,OAAO,YAAY,OAAM;AACvF,yBAAiB,WAAU,QAAQ,QAAQ,sBAAsB;AACjE,eAAO,SAAQ,UAAU;;MAE7B;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,0BAAsC;AACzD,wBAAgB,KAAK,KAAK,SAAS;AACnC,2BAAmB,CAAE,YAAY,QAAQ,WAAW;AACpD,wBAAgB,IAAI,cAAc,YAAY;AAC9C,uBAAe,KAAK,cAAc,SAAS,CAAC,IAAI;AAEhD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,OAAO,QAAQ,YAAY;;MAE3C,wCAAwC;AACpC,wBAAgB,EAAE,MAAM;AACxB,qBAAa,EAAE,MAAM;AACrB,YAAI,gBAAgB;AAChB,sBAAY,aAAa,MAAM;AAC/B,mBAAS,aAAa,MAAM;;AAEhC,2BAAmB,0BAAsC;AACzD,2BAAmB;UACf;UACA;UACA;UACA,SAAS,KAAK,KAAK,SAAS;;AAEhC,wBAAgB,IAAI,iBAAiB,YAAY,YAAY,gBAAgB;AAC7E,uBAAe,CAAC;AAChB,YAAI,gBAAgB;AAChB,iBAAO,KAAK;;AAEhB,uBAAe,KAAK,cAAc,SAAS,QAAQ;AAEnD,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,eAAO,KAAK,UAAU,GAAG,YAAY;;MAEzC,8CAA8C;AAC1C,wBAAgB,gBAAgB,OAAO,aAAa,QAAQ,EAAE;AAC9D,uBAAe,QAAQ,QAAQ,SAAS;AACxC,2BAAmB,0BAAsC;AACzD,wBAAgB,IAAI,uBAAuB,SAAS,YAAY,YAAY,gBAAgB;AAC5F,uBAAe,gBAAgB,OAAO,CAAC,KAAK,CAAC,GAAG;AAChD,uBAAe,KAAK,cAAc,SAAS,QAAQ;AACnD,YAAI,OAAO,SAAS,EAAE;AAClB,iBAAO,KAAK,gBAAgB,GAAG,YAAY;;AAE/C,eAAO;;MAEX;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,OAAO,aAAa,QAAQ;;MAExD;AACI,uBAAe,KAAK,mBAAmB,CAAC,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC1E,YAAI;AACA,iBAAO;;AAEX,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,4BAAoB,WAAc,EAAE;AACpC,eAAO,KAAK,OAAO,KAAK,QAAQ,aAAa,QAAQ;;MAEzD;AACI,mBAAW;AACX,4BAAoB,oBAAgC,CAAC,OAAO,EAAE;AAC9D,wBAAgB;AAChB,YAAI,eAAe;AACf,sBAAY,WAAU,GAAG;AACzB,iBAAO,kBAA8B,GAAG,EAAE,MAAM;;AAEpD,yBAAiB,eAAa,gBAAgB,UAAU,OAAO,MAAM;AACrE,uBAAe,eAAmB,CAAC,UAAU,MAAM;AACnD,oBAAY,UAAU,KAAK,IAAI;AAC/B,4BAAoB,WAAc,EAAE;AACpC,qBAAa,KAAK,aAAa,KAAK,sBAAsB,YAAY,aAAa,aAC9E,QAAQ;AACb,YAAI,eAAe;AACf,mBACI,WAAU,QAAQ,wBAAoC;;AAE9D,eAAO;;MAEX;AACI,0BAAkB,EAAE,MAAM;AAC1B,uBAAe,EAAE,MAAM;AACvB,2BAAmB,eAAa,8BAA8B,QAAQ;AACtE,0BAAkB,CAAE,YAAY,QAAQ,WAAW;AACnD,wBAAgB,IAAI,iBAAiB,WAAW;AAChD,uBAAe,KAAK,cAAc,SAAS,CAAC,GAAG,aAAa;AAE5D,YAAI,OAAO,MAAM,OAAO;AACpB,iBAAO;;AAEX,qBAAa,MAAM,GAAG,aAAa,KAAK,CAAC,SAAS;AAClD,eAAO,KAAK,aAAa,QAAQ,WAAW,YAAY,OAAO;;MAEnE;AACI,qBAAa,CAAC;AACd,oCAAwC,QAAQ,WAAW,OAAO,GAAG,gBAAgB,WAAW,MAAM,IAAI,MAAM,EAAE;AAClH,YAAI,CAAC,OAAM,QAAQ,wBAAwB,EAAE,QAAQ;AACjD,0CAAgC,2BAAuC,EAAE,OAAO;AAChF,yBAAe,eAAmB;AAClC,sBAAY,EAAE,KAAK,IAAI;AACvB,iBAAO,KAAK,UAAU,KAAK,YAAY,QAAQ;;AAEnD,eAAO,KAAK,gBAAgB,GAAG;;MAEnC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,eAAO,KAAK,gBAAgB,GAAG,MAAM;;MAEzC;AACI,YAAI,SAAS,EAAE,OAAO;AAClB,gBAAM,IAAI,MAAM,kDAAkD,EAAE,OAAO,kBACvD;;AAExB,qBAAa,EAAE,MAAM;AACrB,qBAAa;AAGb,qBAAa,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,SAAS,GAAG;AACjD,0BAAgB,IAAI,cAAc,EAAE,OAAO,OAAO;AAClD,8BAAoB,QAAQ,mBAAmB;AAC/C,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC,SAAS,OAAO,OAAO;AAC7D,qBAAW;;AAIf,YAAI;AACA,0BAAgB,IAAI,cAAc,EAAE,OAAO,WAAW;AACtD,6BAAmB;AACnB,mBAAS,KAAK,cAAc,SAAS,CAAC;AACtC,qBAAW;;AAEf,eAAO;;MAEX;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,SAA2B;;AAEhE,wBAAgB,IAAI,gBAAgB,OAAoB,EAAE,OAAO,EAAE;AACnE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,KAAK,GAAG;AAC7E,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,QAA0B;;AAE/D,wBAAgB,IAAI,gBAAgB,MAAmB,EAAE,OAAO,EAAE;AAClE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,SAAsB,EAAE,OAAO,EAAE;AACrE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,iBAAmC;;AAExE,wBAAgB,IAAI,gBAAgB,eAA4B,EAAE,OAAO,EAAE;AAC3E,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,eAAiC;;AAEtE,wBAAgB,IAAI,gBAAgB,aAA0B,EAAE,OAAO,EAAE;AACzE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,cAAgC;;AAErE,wBAAgB,IAAI,gBAAgB,YAAyB,EAAE,OAAO,EAAE;AACxE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,cAAc,UAAU,MAAM,EAAE,OAAO,EAAE;AAC7D,eAAO,KAAK,cAAc,SAAS,CAAC,WAAW,GAAG,IAAI,YAAW,EAAE,OAAO,EAAE;;MAEhF;AACI,cAAkB;AAElB,yBAAiB,UAAU;AAC3B,eAAO,YAAU,UAAU,OAAO;;MAEtC;AACI,sBAAc,EAAE;AAChB,eAAO,WAAS,OAAO,EAAE,OAAO,EAAE,OAAO,GAAG;;MAEhD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,uBAAe,KAAK,mBAAmB,CAAC,GAAG,IAAI,MAAM,KAAK,WAAW,QAAQ,GAAG;AAChF,YAAI;AACA,iBAAO;;AAEX,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oCAAwC,OAAO,MAAM,EAAE;AACvD,wCAAgC,2BAAuC,EAAE,OAAO;AAChF,uBAAe,eAAmB;AAClC,oBAAY,EAAE,KAAK,IAAI;AACvB,eAAO,KAAK,OAAO,KAAK,OAAO,IAAI,OAAO,QAAQ;;MAEtD;AACI,oBAAW;AACX,4BAAoB;AACpB,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,eAAe,GAAG,GAAG,WAA6B;;AAElE,wBAAgB,IAAI,gBAAgB,KAAI,EAAE,OAAO,EAAE;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,wBAAgB,IAAI,qBAAqB,EAAE,OAAO;AAClD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C,oDAAmD;AAC/C,wBAAgB,IAAI,sBAAsB,KAAI,EAAE,OAAO,EAAE,OAAO;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAK/C;AACI,eAAO;UACH,QAAQ,YAAY;UACpB,OAAO,YAAY;UACnB,OAAO,cAAc;;;MAG7B;AACI,YAAI,QAAQ,WAAW;AACnB,iBAAO,QAAQ;;AAGnB,YAAI,QAAQ,SAAS,OAAM,IAAI;AAC3B,2BAAiB,KAAK,MAAM,QAAQ,SAAS;AAC7C,2BAAiB,KAAK,KAAK,QAAQ,MAAM,GAAG;AAC5C,4BAAkB,KAAK,KAAK,QAAQ,MAAM;AAC1C,iBAAO,KAAK,KAAK,CAAC,UAAU;;AAEhC,sBAAc,QAAQ,IAAI,OAAK,EAAE,OAAO,OAAO,YAAY,YAAW,IAAI;AAC1E,uBAAe,QAAQ,IAAI,OAAK,EAAE;AAElC,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,kBAAkB,QAAQ,GAAG,OAAO,UACxC,IAAI,YAAY,QAAQ,GAAG,OAAO;AACtC,eAAO,KAAK,cAAc,SAAS,SAAS;;MAEhD;AACI,4BAAoB,OAAM,QAAQ;AAClC,wBAAgB,cACZ,IAAI,sBAAsB,OAAyB,EAAE,OAAO,EAAE,SAC9D,IAAI,gBAAgB,KAAkB,EAAE,OAAO,EAAE;AACrD,sBAAc,YAAW,EAAE,OAAO,EAAE;AACpC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG,IAAI;;MAE/C;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,YAAY,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACnE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,MAAe,EAAE;;AAElD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAgB,EAAE;;AAEnD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,qBAAa,gBAAoB,CAAC,MAAM,OAAO;AAG/C,yBAAiB,KAAI,QAAQ;AAC7B,8BAAsB,sBAAkC,SAAS,OAAO;AAGxE,kBAAU,IAAO,QAAQ,SAAS,QAAQ;AAC1C,kBAAU,KAAK,IAAI;AACnB,uBAAe,KAAK,IAAI,GAAG,MAAM,QAAQ;AAGzC,eAAO,IAAI,GAAG;;MAElB;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,WAAW,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AAClE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,YAAI,KAAK,mBAAmB,CAAC;AACzB,4BAAkB,aAAa,KAAK,QAAQ,IAAI,EAAE,QAAQ,QAAQ,EAAE;AACpE,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,qBAAqB,EAAE,OAAO;;AAG5C,oBAAU,IAAI,eAAe,EAAE,OAAO;;AAE1C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,SAA2B,EAAE,OAAO,MAAM,SACpE,IAAI,gBAAgB,OAAoB,EAAE,OAAO,MAAM;AAC3D,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,OAAqB,EAAE;;AAExD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,OAAM,QAAQ,kCAC1B,IAAI,sBAAsB,WAA6B,GAAG,OAAO,EAAE,SACnE,IAAI,gBAAgB,SAAsB,GAAG,OAAO,EAAE;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI;AACA,YAAI,OAAM,QAAQ;AACd,oBAAU,IAAI,kBAAkB,EAAE;;AAGlC,oBAAU,IAAI,YAAY,EAAE;;AAEhC,4BAAoB,QAAQ,mBAAmB,MAAK;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI,MAAM;;MAElD;AAEI,YAAI,KAAK,mBAAmB,CAAC,OAAO,EAAE,UAAU;AAC5C,4BAAkB,iBAAiB,KAAK,QAAQ,IAAI,EAAE,QAAQ;AAC9D,iBAAO,KAAK,WAAW,EAAE,OAAO,EAAE,OAAO;;AAE7C,YAAI,OAAM,QAAQ;AACd,iBAAO,KAAK,cAAc,GAAG,KAAc,EAAE;;AAEjD,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,KAAK,QAAQ,IAAI,EAAE;AACjC,wBAAgB,IAAI,kBAAkB,EAAE;AACxC,uBAAe;UACX,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;UAChE,KAAK,+BAA+B,GAAG,MAAM,mBAAmB;;AAEpE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,eAAe,EAAE,OAAO,KAAc;AAC1D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AAGI,uBAAe,EAAE;AACjB,yBAAiB,KAAK,QAAQ,IAAI,EAAE;AACpC,gCAAwB,SAAS;AACjC,4BAAoB,OAAO,KAAK,OAAO,KAAK,OAAO;AACnD,iCAAyB,SAAS;AAClC,+BAAuB,SAAS,eAAe;AAC/C,2BAAmB;AACnB,2BAAmB;AAGnB,0CAAmC,iBAAgB,KAAK,qBAAqB,MACzE,kBAAkB;AACtB,uCAA+B,OAAO,KAAK,MAAM,KAAK,CAAC,CAAC,SAAS;AACjE,YAAI,6BAA6B,CAAC,OAAM,QAAQ,0BAC5C,CAAC,OAAM,QAAQ,mCACf,CAAC;AACD,+BAAoB,iBAAiB,OAAO,KAAK,OAAO,KAAK,OAAO,KAChE,OAAO,KAAK,OAAO,KAAK,OAAO;AACnC,6BAAkB,SAAQ,GAAG,CAAC,GAAG,cAAa,SAAS;AACvD,kCAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,yBAAe,KAAK,iBAAiB;YACjC,GAAG;YACH,GAAG;YACH;YACA;YACA;YACA;YACA;;AAEJ,iBAAO,SAAQ,QAAQ,SAAS;;AAUpC,4BAAoB,iBAChB,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK,KACrC,OAAO,KAAK,OAAO,KAAM,QAAO,KAAK;AACzC,0BAAkB;UACd,QAAQ,EAAE;UACV,OAAO,CAAC,GAAG,aAAa,SAAS;UACjC,OAAO,EAAE;;AAUb,sCAA8B,SAAS;AACvC,iBAAS,QAAQ,SAAS,MAAM;AAChC,iBAAS,MAAM,SAAS,MAAM,SAAS;AACvC,gBAAY,cAAyB,SAAS,OAAO,UAAU,QAAQ,MAAM,kBAAkB,SAAS,YAAY,UAAU;AAC9H,+BAAuB,SAAQ,QAAQ,CAAC,GAAG,SAAS,YAAY,SAAS;AACzE,8BAAsB,KAAK,iBAAiB;UACxC,GAAG;UACH,GAAG;UACH;UACA;UACA;UACA;UACA;;AAEJ,qCAA6B,KAAK,QAAQ,IAAI,cAAc;AAC5D,gBAAY,qBAAqB,UAAU,MAAM;AAEjD,iBAAS,QAAQ;AAGjB,6BAAqB,QAAQ,SAAS;AACtC,eAAO,WAAS,qBAAqB,cAAc,QAAQ,SAAS,UAAU,cAAc;;MAEhG;AAOI,eAAQ,aAAa,cAAc,YAAY,UAAU,WAAW,cAAe;AACnF,+BAAuB,eAAe;AACtC,0BAAkB,cAAc,eAAe;AAC/C,wBAAgB,YAAY;AAC5B,2BAAmB,CAAC,WAAW;AAC/B,2BAAmB;AACnB,2BAAmB;AACnB,0BAAkB,EAAE,QAAQ,CAAC;AAC7B,sBAAc,OAAO,QAAQ,CAAC,GAAG,WAAW;AAC5C,8BAAsB,IAAI,oBAAoB,YAAY,UAAU,OAAO;AAC3E,uBAAe,KAAK,cAAc,eAAe,CAAC,YAAY,QAAQ;UAClE;UAAG,WAAW;UAAI,WAAW;;AAEjC,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,QAAQ;AACtF,8BAAsB,IAAI,oBAAoB,OAAO,OAAO,MAAM,OAAO,CAAC,GAAG,SAAS,SAAS,cAAc,YAAY,YAAY,SAAS,iBAAiB;AAC/J,uBAAe,CAAC,QAAQ;AACxB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,wBAAgB,KAAK,cAAc,eAAe;AAClD,YAAI;AACA,iBAAO,QAAQ,QAAQ,CAAC,GAAG,WAAW,UAAU,SAAS;;AAGzD,iBAAO,QAAQ,QAAQ,CAAC,GAAG,SAAS,aAAa,WAAW;;;MAGpE,aAAc,eAAO,QAAQ,UAAU,MAAM,yBAAY;AACrD,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE1E,YAAI,OAAM,QAAQ,wBAAwB,OAAM,MAAM,OAAO;AACzD,iBAAO,KAAK,iBAAiB,QAAO,QAAQ,UAAU,MAAM,aAAY;;AAE5E,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,gCAAwB,cAAa,6BAA6B,aAAY,SAAS;AACvF,wBAAgB,IAAI,cAAc,UAAU,SAAS,iBAAiB;AACtE,uBAAe,CAAC,QAAO;AACvB,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI,YAAI,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACxD,SAAS,mBAAmB,KAAK,SAAS,kBAAkB,KAC5D,SAAS,iBAAiB,KAAK,SAAS,gBAAgB,KACvD,UAAS,QAAQ,SAAS,UACvB,SAAS,QAAQ,SAAS;AAC9B,iBAAO,KAAK,eAAe,GAAG,QAAQ;;AAE1C,YAAI,OAAM,QAAQ,wBAAwB,EAAE,MAAM,OAAO;AACrD,iBAAO,KAAK,iBAAiB,GAAG,QAAQ;;AAE5C,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C,sBAAuB,eAAO,QAAQ,UAAU,MAAM,yBAAY;AAC9D,wCAAgC,OAAM,QAAQ,+BAC1C,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AACnD,gCAAwB,cACpB,6BAA6B,aAAY,2BACzC;AACJ,uBAAe,CAAC,QAAO;AACvB,wBAAgB,QAAQ;AACxB,0CAAkC,0BAA0B;AAC5D,YAAI;AACA,iBAAO,KAAK;;AAEhB,YAAI;AACA,iBAAO,KAAK;;AAEhB;AACA,YAAI;AACA,oBAAU,IAAI,6BAA6B,UAAU,SAAS,iBAAiB;AAC/E,iBAAO,KAAK,cAAc,SAAS;;AAEvC,kBAAU,IAAI,uBAAuB,UAAU,SAAS,iBAAiB;AACzE,eAAO,KAAK,cAAc,SAAS;;MAEvC;AACI;AACA,YAAI,OAAM,QAAQ,+BACd,SAAS,eAAe,KACxB,SAAS,cAAc,SAAS,eAAe;AAC/C,oBAAU,IAAI,6BAA6B;AAC3C,iBAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;AAE3C,kBAAU,IAAI,uBAAuB;AACrC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,+BAA+B;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,gCAAgC;AACpD,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,cAAc;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,wBAAgB,IAAI,sBAAsB;AAC1C,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,uBAAuB;AAC3C,eAAO,KAAK,cAAc,SAAS,CAAC,GAAG;;MAE3C;AACI,oBAAY,EAAE,MAAM;AACpB,yBAAiB,IAAI,MAAM,EAAE,OAAO;AACpC,uBAAe;AACf,qBAAa,GAAG,IAAI,EAAE,MAAM;AACxB,cAAI,MAAM;AACN,qBAAS,cAAc,EAAE,MAAM;;;AAGvC,sBAAc,IAAI,MAAM,EAAE,MAAM,KAAK;AACrC,qBAAa,EAAE,MAAM;AACrB,aAAK,QAAQ;AACb,oBAAY,IAAI,MAAM;AACtB,qBAAa,GAAG,IAAI,IAAI,QAAQ;AAC5B,gBAAM,QAAQ;AACd,cAAI,KAAK,KAAK,MAAM,GAAG,OAAO,MAAM,QAAQ;;AAEhD,eAAO;;MAEX;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,yCAAiC,IAAI,yBAAyB;AAC9D,eAAO,KAAK,cAAc,0BAA0B,CAAC,KAAK,EAAE;;MAEhE;AACI,wBAAgB,IAAI,cAAc,UAAU,OAAO;AACnD,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,6BAAqB;AACrB,0CAAkC,IAAI,cAAc,UAAU,OAAO;AACrE,mCAA2B,KAAK,cAAc,2BAA2B,CAAC;AAC1E,yCAAiC,IAAI,yBAAyB;AAC9D,uBAAe,KAAK,cAAc,0BAA0B,CAAC,IAAI,qBAAqB,EAAE;AACxF,2BAAmB;AACnB,eAAO;;MAEX;AACI,wBAAgB,OAAM,QAAQ,iCAC1B,IAAI,4BAA4B,EAAE,OAAO,WAAW,UAAU,gBAC9D,IAAI,sBAAsB,EAAE,OAAO,WAAW,UAAU;AAC5D,eAAO,KAAK,cAAc,SAAS,CAAC,IAAI;;MAE5C;AACI,wBAAgB,IAAI,8BAA8B,IAAI,GAAG;AACzD,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,6BAA6B,EAAE,OAAO,WAAW,UAAU;AAC/E,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,oCAAoC,IAAI,GAAG;AAC/D,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,sBAAc,aAAa,SAAS,SAAQ;AAC5C,0BAAkB,MAAM,MAAM;AAC9B,4BAAoB,MAAM,MAAM;AAChC,wBAAgB,IAAI,mBAAmB,WAAW,aAAa;AAC/D,4BAAoB,QAAQ,mBAAmB;AAC/C,eAAO,KAAK,cAAc,SAAS,CAAC,QAAQ,SAAS;;MAEzD;AACI,wBAAgB,IAAI,cAAc,QAAQ,MAAM,OAAO,SAAS;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,YAAY,EAAE;AAClC,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,wBAAgB,IAAI,qBAAqB,OAAM,OAAO,MAAM,OAAO,UAAU,QAAQ;AACrF,eAAO,KAAK,cAAc,SAAS,CAAC,QAAO,OAAO,WAAW;;MAEjE;AACI,gBAAY,YAAY,GAAG,MAAM,sDAAsD;AACvF,0BAAkB,EAAE,MAAM;AAC1B,4BAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,2BAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,6BAAqB,cAAc;AACnC,4BAAoB,aAAa;AACjC,4BAAoB,aAAc,aAAY;AAC9C,4BAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAC3C,wBAAgB,IAAI,oBAAoB,aAAa,WAAW;AAChE,eAAO,KAAK,cAAc,SAAS,CAAC;;MAExC;AACI,eAAO,QAAM,GAAG,YAAY;;MAEhC;AACI,eAAQ,WAAW,YAAY,WAAW,SAAS,cAAe,iBAA6B,SAAS,SAAS;AACjH,6BAAqB,CAAC,aAAa,WAAW;AAC9C,+BAAuB,QAAQ,QAAQ,CAAC,YAAY;AACpD,yBAAiB,QAAQ,QAAQ,CAAC,YAAY;AAC9C,YAAI,eAAe;AACf,iBAAO,eAA2B,QAAO,KAAK;;AAElD,6BAAqB,QAAO;AAC5B,wBAAgB,IAAI,eAAe,YAAY,WAAW,eAAe,MAAM,SAAS,MAAM,SAAS;AACvG,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU,gBAAgB;AACnE,eAAO,IAAI,QAAQ;;MAEvB;AACI,eAAQ,WAAW,YAAY,SAAS,cAAe,iBAA6B,cAAc,eAAe;AACjH,+BAAuB;AACvB,wBAAgB,IAAI,eAAe,YAAY,WAAW,cAAc,MAAM,aAAa,MAAM,SAAS,CAAC,YAAY,IAAI;AAC3H,oBAAY,KAAK,cAAc,SAAS,CAAC,cAAc,eAAe;AACtE,eAAO,IAAI,QAAQ;;MAEvB;AACI,6BAAqB,QAAQ;AAC7B,0BAAkB,aAAa,aAAa,SAAS;AACrD,6DAAqD,oBAAgC,GAAG;AACxF,+BAAuB,QAAQ,QAAQ,CAAC,WAAW;AACnD,yBAAiB,EAAE,QAAQ,CAAC,EAAE,OAAO,WAAW;AAChD,wBAAgB,IAAI,gBAAgB,WAAW,SAAS,CAAC,WAAW;AACpE,oBAAY,KAAK,cAAc,SAAS,CAAC,UAAU;AACnD,eAAO,IAAI,QAAQ;;MAEvB;AACI,gBAAQ,SAAS,YAAgB;AACjC,YAAI,UAAU;AAEV,yBAAe,mBAAuB,OAAO,eAAmB;AAChE,iBAAO,KAAK;AACZ,iBAAO,WAAS,WAAW,QAAQ,OAAO,OAAO;;AAGjD,0BAAgB,IAAI,YAAY,OAAO;AACvC,8BAAoB,QAAQ,mBAAmB;AAC/C,iBAAO,KAAK,cAAc,SAAS,IAAI,OAAO;;;MAGtD;AACI,YAAI,EAAE,UAAU;AACZ,gBAAM,IAAI,MAAM;;AAKhB,iBAAO,KAAK,KAAK,EAAE,OAAO,GAAG,EAAE;;;MAGvC;AACI,eAAO,KAAK,KAAK,EAAE,OAAO,EAAE,UAAU,WAAW,KAAK,GAAG,EAAE;;MAE/D;AAEI,eAAO,cAA0B,OAAO,MAAM;;MAElD;AACI,uBAAe,KAAK,MAAM,QAAQ,OAAO;AACzC,aAAK,QAAQ,IAAI,QAAQ,QAAQ;AACjC,eAAO,CAAE,QAAQ,OAAO;;MAE5B;AACI,eAAQ,UAAW,KAAK,eAAe,OAAO,OAAO;AACrD,eAAO,WAAS,qBAAqB,QAAQ,OAAO,OAAO;;MAE/D;AACI,wBAAgB,IAAI,cAAc,OAAM;AACxC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM;;MAExD;AACI,wBAAgB,IAAI,YAAY,OAAM;AACtC,4CAAoC;AACpC,eAAO,KAAK,gBAAgB,SAAS,CAAC,SAAQ,OAAM,OAAO,MAAwB;;MAEvF;AACI,6BAAqB;UACjB,YAAuB,OAAM;UAC7B,GAAG,YAAuB,OAAM;;AAEpC,wBAAgB;UACZ,OAAO,OAAM;UACb,OAAO;UACP,QAAQ,OAAM;;AAElB,+BAAuB;UACnB,YAAuB;UAAa,GAAG,YAAuB;;AAElE,wBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,8CAAsC;AACtC,uBAAe,KAAK,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAC7F,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;MAErE;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,UAAU,OAAO,SAAU;AACnC,0BAAkB,aAAwB;AAC1C;AACA,YAAI;AACA,oBAAU,IAAI,0BAA0B;;AAGxC,oBAAU,IAAI,oBAAoB;;AAEtC,8CAAsC;AACtC,oBAAY,KAAK,gBAAgB,SAAS,CAAC,CAAE,OAAO,WAAW,OAAO,UAAW,OAAO,MAAwB;AAChH,eAAO,CAAE,OAAO,OAAO,QAAQ,IAAI;;MAEvC,2FAA2F;AACvF,uBAAe,KAAK,eAAe,QAAQ,aAAa;AACxD,wBAAgB,KAAK,QAAQ,IAAI,OAAO;AACxC,YAAI,QAAQ;AACR,kBAAQ,WAAW;;AAEvB,YAAI,QAAQ,qBAAqB,cAAuB;AACpD,6BAAmB,iBAA0B,QAAQ;AAKrD,kBAAQ,WAAW,WAAW,IAAI,OAAK,IAAI;;AAE/C,YAAI,QAAQ,eAAe;AACvB,kBAAQ,QAAQ,QAAQ;;AAE5B,YAAI,eAAmB,OAAO,WAAW;AAGrC,kBAAQ,SACJ,wBAA4B,OAAO,OAAO;AAC9C,iBAAO;;AAEX,8BAAsB;AACtB,2BAAmB,OAAO,IAAI;AAC1B,cAAI,OAAM,UAAU;AAChB,kBAAM,IAAI,MAAM;;AAIpB,wBAAc,KAAK,QAAQ,IAAI,OAAM;AACrC,cAAI,QAAQ,WAAW;AACnB,gBAAI,CAAC,QAAQ,gBACT,eAAmB,OAAM,UACrB,OAAM,UAAU;AAMpB,qBAAO;gBACH,OAAO,OAAM;gBACb,SAAS;gBACT,WAAW;gBACX,eAAe,QAAQ;;;AAK/B,gBAAI,QAAQ;AACR,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ,OAAM;;qBAGrB,CAAC,CAAC,QAAQ,aAAa,CAAC,CAAC,QAAQ;AACtC,qBAAQ,QAAQ,WAAW,KAAK,aAAa,UACzC,KAAK,WAAW;AACpB,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;qBAE5B,QAAQ,YACb,CAAC,cAAyB,QAAQ,OAAO,OAAM;AAO/C,+BAAmB;AACnB,gCAAoB,OAAM;AAC1B,mBAAM,QAAQ,QAAQ;AACtB,qBAAQ,KAAK,cAAc,QAAO;AAClC,0BAAc,KAAK;AACnB,sBAAU,KAAK,QAAQ,IAAI,OAAM;AACjC,uBAAW,QAAQ;;AAEvB,eAAK,YAAY,OAAM;AACvB,iBAAO,CAAE,OAAO,OAAM,OAAO,SAAS,WAAW;;AAErD,aAAK,YAAY,OAAO;AACxB,2BAAmB,CAAE,OAAO,OAAO,OAAO,SAAS,SAAS,WAAW;AACvE,oBAAY,cAAyB,SAAS,YAAY;AAC1D,uBAAe,KAAK,iBAAiB,KAAK;AACtC,iBAAO,eAA0B,KAAK,OAAO,SAAS,YAAY;;AAEtE,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ,KAAK;;AAEjB,mBAAsB,KAAK,OAAO,QAAQ,YAAY,YAAY;AAClE,sBAAc,QAAQ,UAAQ,KAAK,8BAA8B;AACjE,YAAI;AACA,kBAAQ,KAAK,SAAS;AACtB,eAAK,aAAa,KAAK,CAAE,MAAM,QAAQ,YAAY,MAAM,OAAO,KAAK,aAAa;;AAEtF,YAAI,CAAC,OAAM,QAAQ,0BAA0B,QAAQ,YACjD,kCAAkC;AAClC,2BAAiB,KAAK,aAAa;AACnC,eAAK,8BAA8B;AACnC,iBAAO;;AAEX,eAAO;;MAEX,yFAAyF;AACrF,sBAAc,eAAe,OAAO,GAAG;AACvC,wBAAgB,KAAK,gBAAgB,SAAS,QAAQ,aAAa,aAAa;AAChF,eAAO,WAAS,qBAAqB,QAAQ,QAAQ,QAAQ,OAAO,QAAQ;;MAEhF;AACI,YAAI,CAAE,QAAO,KAAK;AACd,eAAK,YAAY,OAAO;;AAE5B,eAAO,KAAK,YAAY;;MAE5B;AACI,eAAO,KAAK;;MAEhB;AACI,YAAI,KAAK;AACL;;AAIJ,YAAI,CAAC,OAAM,QAAQ;AACf,0BAAgB,OAAO,KAAK,KAAK;AACjC,kBAAQ,QAAQ;AACZ,iBAAK,MAAM,cAAc,KAAK,YAAY,KAAK;AAC/C,mBAAO,KAAK,YAAY;;;AAGhC,aAAK,eAAe;AACpB,YAAI,KAAK,UAAU,QACd,QAAQ,sBAAuB,eAC5B,KAAK,kBAAkB;AAC3B,eAAK,OAAO;;AAGZ,eAAK,SAAS;;AAElB,YAAI,KAAK;AACL,eAAK,MAAM,UAAU;AACrB,eAAK,MAAM;;AAEf,aAAK,WAAW;;MAEpB;AACI,YAAI,KAAK,uBAAuB;AAC5B,eAAK,sBAAsB,KAAK;AAC5B,gBAAI,CAAC,OAAM,IAAI;AAGX,gCAAkB,OAAM,QAAQ;AAChC,qBAAM,IAAI,SAAS;AACnB,0CAA4B,KAAK,IAAI,QAAO,OAAO,WAAW;AAC9D,qBAAM,IAAI,SAAS;AACnB,kBAAI,sBAAsB;AACtB,uBAAO;;;AAGf,mBAAO;;;AAGf,eAAO,KAAK;;MAGhB;AACI,eAAO,KAAK,qBAAqB,KAAK,oBAAkB;;MAE5D;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,OAAO,OAAO,QAAQ,SAAS,OAAO,YAAa;AAC3D,YAAI,WAAW;AAEX;;AAEJ,kCAA0B,KAAK,gBAAgB;AAC/C;AACA,YAAI;AACA,kBAAQ;;AAEZ,uBAAe,QAAQ;AACvB,YAAI,YAAY;AACZ,qBAAW,gCAA2C,OAAO;AAC7D,kBAAQ,WAAW;;AAEvB,YAAI,UAAU;AACV,4BAAkB,aAAwB;AAC1C;AACA,sBAAY,SAAS,aAAa,SAAS;AAC3C,8BAAoB,kBAAkB;AACtC,cAAI;AACA,aAAC,OAAO,UAAU,uCAAgD,SAAS,IAAI,SAAS;AACxF,sBAAU,IAAI,0BAA0B,WAAW,CAAC,QAAQ,QAAQ;;AAGpE,sBACI,IAAI,oBAAoB,WAAW,CAAC,QAAQ,QAAQ;;AAE5D,uCAA6B,KAAK,eAAe,CAAC,QAAQ,QAAQ;AAClE,cAAI;AACA,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAGjB,iBAAK,QAAQ,IAAI,qBAAqB,QAAQ,QAC1C,aAAa;;AAErB,eAAK,MAAM,2BAA2B,KAAK,WAAW,qBAAqB,SAAS,OAAO,QAAQ;AAGnG,wCAA8B;AAC9B,sCAA4B,KAAK,gBAAgB,SAAS,CAAC,uBAAuB,OAAO,MAAM;AAE/F,gCAAsB,KAAK,QAAQ,IAAI,oBAAoB;AAC3D,kBAAQ,UAAU,cAAc;AAChC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,WAAW,cAAc;AACjC,kBAAQ,QAAQ,cAAc;AAC9B,eAAK,8BAA8B;AACnC,eAAK,QAAQ,OAAO,oBAAoB;AAExC,kBAAQ,SAAS;AACjB,cAAI;AACA,iBAAK,gBAAgB,SAAa;;;AAItC,6BAAmB,KAAK,eAAe,UAAU,OAAO,OAAO;AAC/D,kBAAQ,UAAU;;;MAG1B;AACI,wBAAgB,KAAK,QAAQ,IAAI;AACjC,eAAQ,SAAU;AAClB,aAAK,eAAe;AACpB,YAAI,iBAAiB;AACjB,kBAAQ,SAAS,oBAAoB,eAAe;;AAExD,eAAO,QAAQ;;MAEnB;AACI,aAAK,iBAAiB,KAAK,aAAa,UAAU;AAClD,YAAI,CAAC,KAAK,qBACN,KAAK,gBAAgB,KAAK,qBAAqB,OAAO;AACtD,qBAAY,MAAK,gBAAgB,OAAO,MAAM,QAAQ;AACtD,eAAK,oBAAoB;AACzB,kBAAQ,KAAK,6BAA6B;;AAG9C,eAAO,KAAK,eAAe,eAAe,UAAU,SAAS;;MAEjE;AACI,eAAO,MAAM,KAAK,MAAM,KAAK,iBAAqB;;MAEtD;AACI,YAAI,KAAK,mBAAmB;AACxB;AACI,mBAAO;;AAGP,gBAAI,OAAM,QAAQ;AACd,oBAAM,IAAI,MAAM;;;;AAI5B,eAAO;;;AAGf;AACI,UAAI,UAAU,aAAa,UAAU;AACjC,eAAO;iBAEF,UAAU,WAAW,UAAU;AACpC,uBAAgB,UAAU,UAAW,IAAI,WAAW,EAAE,UAClD,IAAI,WAAW,EAAE;AACrB,qBAAa,GAAG,IAAI,OAAO,QAAQ,EAAE;AACjC,iBAAO,KAAK,KAAK,MAAM,EAAE;;AAE7B,eAAO;;AAGP,cAAM,IAAI,MAAM,iBAAiB;;;ACjgEzC;AAEA,sBAAgB;ACFhB;;;;;;;;;;;;;;;;AA6BO;AACH,aAAM,IAAI,4BAA4B;;AC9B1C;;;;;;;;;;;;;;;;AAoBA,QAAI;AACA,uBAAgB,SAAS,MAAM,IAAI,oBAAoB;;AAMpD,kBAAc,CAAE;AC3BvB;;;;;;;;;;;;;;;;AAiBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,KAAM;AACd,eAAQ,OAAO,EAAE;AACjB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,EAAE,OAAO,OAAO,EAAE;;AAEjD,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;AA4BO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,aAAM,eAAS;AACvB,0BAAoB,SAAQ,eAAe,MAAK,OAAO;AACvD,uBAAgB,SAAQ,QAAQ,IAAI,YAAY;AAChD,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,6BAAuB,WAAS,CAAE,QAAQ,CAAE,GAAG,QAAQ;AACvD,uBAAiB,SAAQ,QAAQ,IAAI,eAAe;AACpD,eAAS;AACT,eAAQ,qBAAqB,CAAE,MAAM,gBAAgB,MAAM;AAC3D,aAAO;;AAEJ,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7ChB;;;;;;;;;;;;;;;;AAqBO,oCAAgC;AAChC,qCAAiC;;;;AAIjC,4CAAwC;;;;;;AAUxC;AACH,aAAO,EAAG,QAAQ;AACd,eAAQ,KAAM;AACd,6BAAqB;AACrB,wBAAgB,IAAI,eAAe,EAAE,OAAO;AAC5C,eAAO,aAAa,gBAAgB,SAAS,CAAC,IAAI,EAAE;;;AAarD,iCAA4B,WAAW,iBAAiB,mBAAmB,OAAO,kBAAkB,OAAO,eAAe;AAC7H,aAAO,EAAG,QAAQ;AACd,eAAQ,GAAG,KAAM;AACjB,6BAAqB;AACrB,YAAI,mBAAmB,EAAE,UAAU;AAC/B,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,iCAAqB;YACjB,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YACzD,CAAC,MAAM,mBAAmB,MAAM,MAAM,mBAAmB;YAC3D,IAAI;AACF,mCAAuB;AACvB,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,4BAAgB;cACZ,QAAQ,MAAM;cACd,OAAO,MAAM;cACb,OAAO,EAAE;;AAEb,6BAAgB,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;AAC1D,mBAAO,aAAa,gBAAgB,UAAS,CAAC,SAAS,UAAU,YAAW,MAAM,OAAO,MAAM;;AAEnG,gCAAsB,UAAQ,CAAE,QAAQ,CAAE,aAAM,cAAQ,SAAS;AACjE,uBAAa,8BAA8B;AAC3C,uBAAa,8BAA8B;AAE3C,iBAAO;;AAEX,uBAAe,SAAS,YAAW,EAAE,OAAO,EAAE;AAC9C,YAAI,aAAa,mBAAmB,CAAC,GAAG,OAAO,iBAAiB;AAC5D,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wBAAc,aAAa,QAAQ,IAAI,EAAE;AACzC,wCAA8B,cAAc,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AAC1F,sBAAY,aAAa,eAAe,UAAU;AAClD,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;AACjB,iBAAO;;AAEX,uCAA+B,OAAM,QAAQ,mCACzC,mBAAmB;AACvB;AACA,YAAI;AACA,oBAAU,IAAI,sBAAsB,iBAAiB,EAAE,OAAO,EAAE,OAAO;;AAGvE,oBAAU,IAAI,gBAAgB,WAAW,EAAE,OAAO,EAAE;;AAExD,eAAO,aAAa,gBAAgB,SAAS,CAAC,GAAG,IAAI;;;ACxG7D;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,kBAAc,2BAA2B;;;AAGzC,yBAAqB;;;MAIjB,kCAAkC;;;AAG/B,oBAAc,mBAAiB,CAAE,WAAW,OAAO,iBAAiB;AACpE,wBAAoB;MACvB,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI;;AAEjD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,iBAAU;AACtB,gBAAU;AACV,yBAAiB,CAAC,IAAI,SAAQ;AAC9B,aAAQ,YAAY,SAAS,aAAQ;AACrC,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB;AACjG,qCAA+B,IAAI,yBAAyB;AAC5D,aAAO,SAAQ,gBAAgB,wBAAwB,CAAC,KAAK,EAAE;;AAE5D,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;AChChB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,cAAc;AACnB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;wBACD;2DACmC;;;;;;AC3C3D;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,gBAAgB,CAAC,KAAK,QAAQ;AACnC,oCAAwC,QAAQ;AAChD,oCAAwC,QAAQ;AAChD,4BAAoB;AACpB,YAAI,eAAe;AACf,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,0BAAgB;;AAEpB,2BAAmB;AACnB,YAAI,cAAc;AACd,sCAAwC,QAAQ;AAChD,eAAK,cAAc,KAAK;AACxB,yBAAe;;AAEnB,aAAK,cAAc;AACnB,aAAK,WAAW;;wBAEA;uBACD;;;;;;yDAMkC;;;;;;;AC9CzD;;;;;;;;;;;;;;;;AAmBO,wBAAkB,EAAG,QAAQ,mBAAS;AACzC,aAAQ,GAAG,aAAM,qBAAU,QAAQ,iBAAU;AAC7C,cAAY,MAAK,MAAM,WAAW,UAAS,MAAM,QAAQ,MAAM;AAE/D,cAAY,UAAU,QAAQ,MAAK,MAAM,WAAW,OAAO,MAAM,QAAQ,MAAM;AAE/E,cAAY,UAAS,QAAQ,MAAK,MAAM,WAAW,OAAM,MAAM,QAAQ,MAAM;AAE7E,WAAM,mBAAoB;AAC1B,UAAI,mBAAmB;AACnB,0BAAkB;;AAEtB,0BAAoB,CAAC,GAAG,OAAM;AAC9B,wBAAkB;AAClB,UAAI,UAAU;AACV,sBAAc,OAAO;AACrB,oBAAY,KAAK;;AAErB,uBAAiB;AACjB,UAAI,UAAS;AACT,qBAAa,OAAM;AACnB,oBAAY,KAAK;;AAErB,sBAAgB,OAAM,QAAQ,8BAC1B,IAAI,uBAAuB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY,mBACzF,IAAI,iBAAiB,EAAE,OAAO,MAAK,OAAO,UAAS,OAAO,aAAa,YAAY;AACvF,qBAAe,SAAQ,gBAAgB,SAAS,aAAa,YAAY,GAAG;AAC5E,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;ACnDhB;;;;;;;;;;;;;;;;AAkBA,wBAAkB;AACX,uBAAiB,mBAAiB,CAAE,WAAW,aAAW,OAAO;AACjE,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAiBA,mBAAe;AACR;AACH,sBAAgB,IAAI,eAAe,OAAM,OAAO;AAChD,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,SAAQ;AACzD,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO,OAAO,OAAO;;ACrBvE;;;;;;;;;;;;;;;;AAuBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAElB,UAAI,UAAU;AACV,YAAI,EAAE,UAAU;AACZ,iBAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAGrC,4BAAoB,OAAS,EAAE;AAC/B,uBAAe,OAAK,CAAE,QAAQ,CAAE,IAAK,mBAAS,OAAO,CAAE,OAAO;AAC9D,uBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,QAAQ,MAAM,cAAe;AACtE,oBAAY;AACZ,iBAAQ,8BAA8B;AACtC,eAAO;;AAGX,UAAI,EAAE,UAAU;AACZ,yBAAiB,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC9C,uBAAe,OAAK,CAAE,QAAQ,CAAE,GAAG,WAAY,mBAAS,OAAO,CAAE;AACjE,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,CAAC,iBAAqB,EAAE,OAAO;AAG/B,uBAAe,WAAS,CAAE,QAAQ,CAAE,IAAK;AACzC,eAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,OAAO,OAAO;;AAEzD,UAAI,UAAU;AACV,eAAO,IAAI,GAAG;;AAElB,UAAI,UAAU;AACV,gCAAwB,SAAQ,eAAe,IAAI,QAAQ,wBAA4B,QAAQ;AAC/F,6BAAqB,CAAE,GAAG,GAAG,GAAG;AAChC,uBAAe,WAAS,CAAE,QAAQ,cAAc;AAChD,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,YAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;AAE5D,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpEhB;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,yBAAiB,CAAC,YAAY,QAAQ;AACtC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,wBAAc,QAAQ,IAAI;AAC1B,mBAAS,KAAK,iBAAiB,QAAQ,qBAClB,YAAY;;AAErC,0BAAkB,QAAQ;AAC1B,0BAAkB,QAAQ,QAAQ,SAAS;AAC3C,iBAAS,KAAK,sBAAsB,oBAAoB;AACxD,aAAK,WAAW;;;;;;UAMd,SAAS,KAAK;;;;;AC3CxB;;;;;;;;;;;;;;;;;MAoBI;AACI,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc;AACnB,aAAK,cAAc,kBAA6B,QAAQ;AACxD,sBAAc,KAAK;AACnB,qBAAa,MAAM;AACnB,sBAAc,kBAAkB;AAChC,wBAAe,YAAY,UAAU;AACrC,yBAAiB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,MAAM,GAAG;AACzD,aAAK,gBAAgB,OAAO,IAAI,UAAU,IAAI;AAC9C,wBAAgB,IAAI,MAAM,OAAO,SAAS;AAC1C,gBAAQ,KAAK,OAAO,GAAG;AACvB,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,kBAAQ,KAAK,QAAQ,IAAI,KAAK,OAAO,GAAG;;AAE5C,wBAAgB,SAAS;AACzB,6BAAqB,SAAS,MAAM;AACpC,4BAAoB,SAAS;AAC7B,8BAAsB,OAAO,aAAa,QAAQ;;oBAEtC,sBAAsB,aAAa;;AAE/C,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAChC,yBAAc,QAAQ,IAAI;AAK1B,6BAAmB;cACjB,aAAa,QAAQ,UAAU,cAAc,QAAQ,IAAI;;kBAErD,KAAK,gBAAgB,UAAU,SAAS;mBACvC,gBAAgB,cAAc,SAAS;;;AAGlD,0BAAkB,QAAQ;AAC1B,sBAAc,QAAQ,QAAQ,SAAS;AACvC,2BAAmB;;gBAEX,aAAa,gBAAgB,UAAU,SAAS;iBAC/C,gBAAgB,cAAc,SAAS;AAChD,aAAK,WAAW;uBACD,SAAS,IAAI,OAAK,SAAS;UACxC;;;;UAIA;sCAC4B;;UAE5B,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;UAGtB,QAAO,OAAO,QAAQ,QAAO,OAAO;cAChC,QAAO,OAAO,QAAQ,MAAM,OAAO;cACnC,QAAO,OAAO,QAAQ,MAAM,OAAO;gCACjB;;;;;;;AAkBhC;AACI,yBAAmB,SAAS,QAAQ;AACpC,kBAAY,SAAS,IAAI;AACrB,YAAI,QAAQ;AACR,iBAAO,GAAG,OAAO;;AAGjB,iBAAO;;;AAGf,aAAO,IAAI;;AChHf;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,wBAAkB,SAAQ,QAAQ,IAAI,OAAM;AAC5C,aAAO,WAAS,CAAE,QAAQ,CAAE,GAAG,UAAU,mBAAmB,OAAQ;;AAEjE,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC3BhB;;;;;;;;;;;;;;;;AAkBO;AACH,2BAAqB;QAAC,YAAY,OAAM;QACpC,GAAG,YAAY,OAAM;;AACzB,sBAAgB;QACZ,OAAO,OAAM;QACb,OAAO;QACP,QAAQ,OAAM;;AAElB,6BAAuB;QAAC,YAAY;QAChC,GAAG,YAAY;;AACnB,sBAAgB,IAAI,qBAAqB,gBAAgB;AACzD,4CAAsC;AACtC,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,UAAU,OAAM,OAAO,MAAwB;AAChG,aAAO,CAAE,QAAQ,OAAO,QAAQ,OAAO,YAAY,OAAO,OAAO;;AC/BrE;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,aAAQ,SAAU;AAClB,2BAAqB;AACrB,oBAAc,eAAmB,EAAE;AACnC,qBAAe,wBAA4B,OAAO;AAClD,qBAAe,eAAmB;AAClC,cAAY,UAAU,QAAQ,MAAM,kBAAkB,eAAe,sCACvD,EAAE,cAAc;AAE9B,uBAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,UAAI,SAAS,YAAY,CAAC,cAAc,EAAE,OAAO,WAC7C,CAAE,UAAS,YAAY,QAAQ,cAAc,SAAS,OAAO;AAC7D,eAAO,cAAc,GAAG,QAAQ;;AAEpC,mBAAa,OAAO,EAAE;AACtB,aAAO,CAAE,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAEhD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzChB;;;;;;;;;;;;;;;;AAuBO;AACH,oBAAc,OAAO,GAAG;AACxB,UAAI,UAAU;AACV,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC7D,sBAAc,OAAO,IAAI,OAAO,OAAK,CAAE,QAAQ,CAAE,OAAO,IAAK;AAC7D,6BAAqB,WAAW,OAAO,MAAM;AAC7C,6BAAqB,WAAW,OAAO,MAAM;AAC7C,wBAAe,UAAQ,CAAE,QAAQ,CAAE,MAAM,cAAc,MAAM,eAAgB;AAC7E,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,cAAM,QAAQ,OAAK,SAAQ,8BAA8B;AACzD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAO,SAAS,OAAM,UAAU;AAChC,yBAAiB,KAAK,MAAM,OAAO,SAAS;AAC5C,yBAAiB,WAAW,OAAO,MAAM,GAAG,WAAW,MAAM;AAC7D,0BAAkB,WAAW,OAAO,MAAM,WAAW,MAAM;AAC3D,wBAAe,WAAW,CAAC,UAAU,YAAY,MAAM;AACvD,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AACtC,eAAO;;AAEX,UAAI,OAAM,QAAQ,kCACd,OAAO,GAAG,MAAM,SAAS;AACzB,yBAAgB,IAAI,oBAAoB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAClE,eAAO,SAAQ,gBAAgB,UAAS,QAAQ;;AASpD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,wBAAkB,OAAO,IAAI,OAAK,UAAQ;QACtC,QAAQ,CAAE;QACV,OAAO,CAAE,OAAO,CAAC,IAAI,eAAmB,EAAE,MAAM,MAAM;QACtD;;AAEJ,sBAAgB,IAAI,cAAc,UAAU,IAAI,OAAK,EAAE;AACvD,qBAAe,SAAQ,gBAAgB,SAAS,WAAW;AAC3D,gBAAU,QAAQ,OAAK,SAAQ,8BAA8B;AAC7D,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,SAAU,OAAO,CAAE,OAAO,WAAY;AACpF,eAAQ,8BAA8B;AACtC,aAAO;;ACrEX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,QAAS;AACjB,oBAAc,gBAAoB,MAAM,OAAO,GAAG,OAAO;AACzD,uBAAiB,kBAA6B,OAAO,IAAI,OAAK,EAAE,QAAQ;AACxE,UAAI,eAAmB,cAAc;AACjC,eAAO,SAAQ,eAAe,UAAU,OAAO,GAAG,OAAO;;AAG7D,sBAAgB,OAAO,OAAO,OAAK,eAAmB,EAAE,SAAS;AACjE,UAAI,QAAQ,WAAW;AACnB,eAAO,QAAQ;;AAEnB,qBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,8BAAoC,QAAQ;AAC5C,aAAO,WAAW,SAAS,OAAO;;AAE/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAoBA,gBAAY;;;;;AAOZ,uBAAmB;;;;;;;;;;;;;;;;;;;AAmBZ,kBAAY,mBAAiB,CAAE,WAAW,KAAK,iBAAiB,YAAY,kBAAkB;AAC9F,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AClDhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC,QAAQ;AAC9B,yBAAiB,WAAW;AAC5B,aAAK,cAAc;AACnB,0CAAkC,UAAU,SAAS,KAAK,OAAO,UAAU,KAAK;AAChF,kCAA0B,UAAU,GAAG,eAAe;AACtD;AACA,YAAI,cAAc;AACd,qBAAW;mBAEN,cAAc;AACnB,qBAAW;;AAGX,gBAAM,IAAI,MAAM,sDAAsD;;AAE1E,aAAK,WAAW;yCACiB;;;UAG/B;;;;kDAIwC;;;;;;8BAMpB;;;;;;;;;yDAS2B;;;;;;;;;;;;;ACxDzD;;;;;;;;;;;;;;;;AAoBO;AACH,oBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,wBAAkB,eAAmB,EAAE;AAEvC,iCAA2B,EAAE,MAAM,EAAE,MAAM,SAAS;AACpD,oBAAc,YAAY;AAC1B,sBAAgB,UAAQ,CAAE,QAAQ,CAAE,IAAK,mBAAS,OAAO,CAAE,OAAO,CAAC,OAAO;AAC1E,qBAAe,QAAQ;AACvB,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,0BAAoB,IAAI,WAAW,QAAQ,QAAQ;AACnD,qBAAe;QACX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;QAEX;UACI,QAAQ,MAAM,mBAAmB,KAAK;UACtC,OAAO,MAAM,mBAAmB,KAAK;UACrC,OAAO;;;AAGf,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,uBAAiB,SAAQ,gBAAgB,aAAa,QAAQ;AAC9D,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY;AAC5E,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,oCAA8B,UAAQ,CAAE,QAAQ,CAAE,GAAG,gBAAiB,mBAAS,OAAO,CAAE,OAAO,EAAE;AACjG,eAAQ,8BAA8B;AACtC,aAAO;;ACjDX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,OAAqB;;AAExC,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAiBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,2BAAmB,WAAW;AAC9B,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;yBAKC;;uCAEc;;;;;;;;;;AC7BvC;;;;;;;;;;;;;;;;AAkBO,kCAA4B;MAC/B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ;AACnB,eAAQ,iBAAU;AAClB,6BAAqB;AACrB,wBAAgB,IAAI,qBAAqB,OAAM;AAC/C,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC1Bf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;uDAM+B,YAAY;;wBAE3C,KAAK;;;;;;;;;;;;;;;;;AC/B7B;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,qBAAa;AACb,gCAAyB;AACzB,aAAK,cAAc;AACnB,aAAK,WAAW;;;;;;;;;;;;;;;8BAeM,YAAY;4BACd,KAAK;;;;;;;;;;;;;;;;UAgBvB,KAAK;;;;;ACzDf;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,YAAY;MACZ,aAAa;MACb,YAAY;;AAEhB;AACA;AACI,aAAQ,QAAQ,mBAAS,SAAU;AACnC,WAAM,UAAW;AACjB,aAAQ,eAAgB;AACxB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,sBAAgB,OAAQ,qBAAsB,eAC1C,kBAAkB;AACtB,8BAAwB,UACpB;QACI,OAAO;QACP,OAAO;UAEX,CAAC,OAAO,OAAO,OAAO;AAC1B,uBAAiB,CAAC,QAAQ;AAC1B,uBAAiB,CAAC,QAAQ,OAAO;AACjC,UAAI,WAAW;AACX,YAAI,yBAAuB;AACvB,kCAAsB,SAAS,cAAc,UAAU,WAAW;;AAEtE,8BAAoB,OAAO,QAAQ;AACnC,8BAAoB,OAAO,SAAS;AACpC,8BAAoB,UAAU,QAAQ,GAAG,GAAG,OAAO;AACnD,iBAAS,sBAAoB;;AAEjC,8BAAwB,SAAQ,eAAe,UAAU;AAEzD,eAAQ,QAAQ,IAAI,gBAAgB,QAAQ,QAAQ,aAAa;AACjE,eAAQ,MAAM,yBAAyB,SAAQ,WAAW,gBAAgB,SAAS;AACnF,sBAAgB,OAAM,QAAQ,gBAC1B,IAAI,wBAAwB,YAC5B,IAAI,kBAAkB;AAC1B,kBAAY,SAAQ,gBAAgB,SAAS,CAAC,kBAAkB;AAChE,eAAQ,YAAY,gBAAgB;AACpC,aAAO;;AC7DX;;;;;;;;;;;;;;;;AAkBO;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,iBAAU;AAClB,aAAO,UAAQ,QAAO,MAAoB;;AAEvC,yBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY;;AC1BhB;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,eAAQ,YAAY,WAAW,QAAQ,WAAY;AACnD,aAAK,cAAc,CAAC,WAAW;AAC/B,sCAA8B,KAAK,MAAM,aAAa,KAAK;AAC3D,wCAAgC,aAAa;AAC7C,4BAAoB;AACpB,YAAI,WAAW;AACX,8BAAoB,IAAI;AACxB,0BAAgB,4BAA4B,OAAW,eAAe,YAAY,YAAY,KAC1F;;AAER,+BAAuB;AACvB,YAAI,SAAS,aAAa;AACtB,6BAAmB;oCACK;;;;;AAK5B,aAAK,WAAW;;;;UAId;;;;;;;;kCAQwB;;;;8BAIJ;;;;;;;;;YASlB;;;iCAGqB;cACnB,4BAA4B;;;YAG9B;qBACS,4BAA4B;;;;;YAKrC;qBACS,4BAA4B;;;;;;YAMrC;;;;;;;ACnFZ;;;;;;;;;;;;;;;;AAqBA;AACI,qBAAe;AACf,aAAO,OAAO,WAAW,KAAK,OAAO,OAAO,SAAS,GAAG,YAAY;AAChE,wBAAgB,OAAO,SAAS,OAAO,OAAO,SAAS,GAAG,UAAU,QAAQ;AAC5E,2BAAmB,0BAAsC;AACzD,eAAO,KAAK;UACR,QAAQ;UACR;UACA,SAAS,KAAK,KAAK,UAAU;;;AAGrC,aAAO;;AAEJ;AACH,8BAAwB,mBAAmB,EAAE;AAC7C,mBAAa;AACb,mBAAa,GAAG,IAAI,gBAAgB,QAAQ;AACxC,eAAQ,QAAQ,YAAY,WAAY,gBAAgB;AACxD;AACA;AACA,YAAI,kBAAkB;AAClB,oBAAU,MAAM,IACZ,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW,UACxE,IAAI,YAAY,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI;;AAGjE,oBAAU,IAAI,cAAc,CAAE,YAAY,QAAQ,WAAW,EAAE,MAAM,IAAI,UAAW;;AAExF,yBAAiB;AACjB,iBAAS,SAAQ,gBAAgB,SAAS,CAAC,SAAS;AACpD,YAAI,eAAe,WAAW,EAAE;AAC5B,mBAAQ,8BAA8B;;;AAG9C,aAAO;;ACvDX;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW;AACtF,sBAAgB,OAAO,eAAe,EAAE,OAAO,OAAO;AACtD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,sBAAc,kBAAkB,KAAK;AACrC,yBAAiB,kBAAkB;AACnC,aAAK,WAAW;;QAEhB;uBACe;;;;;AAKvB;AACI,mBAAa,OAAO;AACpB,UAAI,OAAO;AACP,cAAM,MAAM,sBAAsB;;AAEtC,4BAAsB,CAAC,WAAW,WAAW,WAAW,WAAW,WAAW;AAC9E,6BAAuB,IAAI,MAAM;AACjC,mBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,uBAAe,OAAO,MAAM,cAAc;;AAE9C,aAAO,eAAe;;AC9C1B;;;;;;;;;;;;;;;;;MAmBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,4BAAoB,IAAI,MAAM,OAAO;AACrC,qBAAa,GAAG,IAAI,YAAY,QAAQ;AACpC,sBAAY,KAAK,OAAO,OAAO;;AAEnC,aAAK,cAAc;AACnB,aAAK,OAAO,YAAY;AACxB,YAAI,KAAK,OAAO;AACZ,gBAAM,MAAM,6BAA6B,KAAK;;AAElD,sBAAc,kBAAkB,KAAK;AACrC,4BAAoB,eAAe,MAAM,KAAK;AAC9C,8BAAsB,IAAI,MAAM,KAAK;AACrC,qBAAa,GAAG,IAAI,OAAO,QAAQ;AAC/B,wBAAc,OAAO,MAAM,YAAY;;AAE3C,0BAAkB,QAAQ,cAAc,MAAM,IAAI;AAClD,2BAAmB,KAAK,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;AAChF,qBAAa,mBAAmB,cAAc,YAAY;AAC1D,aAAK,WAAW;;QAEhB;;oBAEY;WACT;sBACW;;UAEZ,YAAY,KAAK,OAAO;aACrB,YAAY,KAAK,OAAO,QAAQ,YAAY,KAAK,OAAO;sBAC/C;aACT;wBACW;;;;;;;;ACrDxB;;;;;;;;;;;;;;;;AAoBO;AACH,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,QACpC,IAAI,iBAAiB,EAAE,OAAO;AAClC,aAAO,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;;ACxBnD;;;;;;;;;;;;;;;;AAqBO,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,kBAAkB,YAAa;AACvC,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,kBAAkB,EAAE;AACzD,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,qCAA6B,gBAAgB;AAC7C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,uBAAe;AACf,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,mCAAuB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AAChF,uBAAW,aAAa,eAAe,UAAU,EAAE;AACnD,iCAAqB,aAAa,QAAQ,IAAI,SAAS;AACvD,yBAAa,SAAS;;AAGtB,uBAAW,gBAAc,GAAG,cAAc;;AAE9C,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,2CAAmC,2BAAuC,SAAS,OAAO;AAC1F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,aAAa;;AAE9D;AACA,YAAI;AACA,2BAAiB,aAAa,QAAQ,IAAI,SAAS;AACnD,yBAAe,SAAS;AACxB,4BAAkB,WAAW,QAAQ,eAAmB,cAAc,UAAU,EAAE;AAClF,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,UAAQ,UAAU,aAAa,UAAU;;AAEnD,YAAI;AACA,uBAAa,8BAA8B;;AAE/C,eAAO;;;AC3Ef;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,KAAM;AACd,yBAAiB,GAAG;AACpB,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,wBAAkB;AAClB,cAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW,MAAK;AAC9F,UAAI,SAAS,gBAAgB,KAAK,SAAS,iBAAiB,KACxD,aAAiB,SAAS,SAAS,SAAS;AAC5C,eAAO,WAAS,CAAE,QAAQ,CAAE,IAAK;;AAErC,6BAAuB,IAAI,cAAc,UAAU,OAAO;AAC1D,aAAO,SAAQ,gBAAgB,gBAAgB,CAAC,IAAI,EAAE;;AAEnD,4BAAsB;MACzB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvChB;;;;;;;;;;;;;;;;AAoBO;AACH,aAAQ,QAAQ,mBAAS,SAAU;AACnC,aAAQ,IAAI,eAAO,UAAW;AAC9B,gBAAU;AACV,yBAAiB,CAAC,QAAO,SAAS;AAClC,aAAQ,YAAY,SAAS,WAAK,mBAAoB;AACtD,uBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AACtG,2BAAqB;AACrB,sCAAgC,IAAI,cAAc,UAAU,OAAO;AACnE,gCAAyB,SAAQ,gBAAgB,yBAAyB,CAAC,IAAI,EAAE;AACjF,qCAA+B,IAAI,yBAAyB;AAC5D,qBAAe,SAAQ,gBAAgB,wBAAwB,CAAC,IAAI,oBAAmB,EAAE;AACzF,eAAQ,8BAA8B;AACtC,aAAO;;AAEJ,oCAA8B;MACjC,YAAY;MACZ,aAAa;MACb,YAAY;;ACtChB;;;;;;;;;;;;;;;;AAiBO;AACH,oBAAc,IAAI,cAAc,UAAU,OAAO;AACjD,yBAAmB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AACzD,gBAAU,IAAI,cAAc,UAAU,OAAO,MAAM,MAAM;AACzD,0BAAoB,SAAQ,gBAAgB,SAAS,CAAC,IAAI;AAC1D,aAAO,CAAC,YAAY;;ACtBxB;;;;;;;;;;;;;;;;AAmBO,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,YAAY,SAAS,WAAK,uBAAwB;AAC1D,6BAAqB;AACrB,gBAAY,EAAE,MAAM,WAAW,GAAG,MAAM,uDAAuD,EAAE,MAAM;AACvG,0BAAkB,CAAC,GAAG;AACtB,gBAAY,gCAA4C,SAAS,YAAY,MAAM,wEAChE,0BAA0B;AAC7C,yBAAiB,mBAA+B,EAAE,OAAO,YAAY,SAAS,WAAW;AACzF,kCAA0B,wBAAsB,GAAG,qBAAqB,UAAU;AAClF,eAAO,CAAC,QAAQ;;;AChCxB;;;;;;;;;;;;;;;;AAmBO;AACH,qBAAe,eAAmB;AAClC,oBAAc,eAAmB,EAAE;AACnC,wBAAkB,QAAQ;AAC1B,4BAAsB,UAAQ,CAAE,QAAQ,CAAE,IAAK,OAAO,CAAE,OAAO,CAAC,WAAW,UAAW;AACtF,sBAAgB,OAAO,eAAe,WAAW,QAAQ;AACzD,6BAAuB,UAAQ,CAAE,QAAQ,CAAE,GAAG,UAAW,OAAO,CAAE,OAAO,WAAY;AACrF,eAAQ,8BAA8B;AACtC,eAAQ,8BAA8B;AACtC,aAAO;;AC5BX;;;;;;;;;;;;;;;;AAmBO,uBAAmB;MACtB,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,UAAU,QAAS;AAC3B,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,gBAAoB,MAAM,EAAE;AAC7C,mBAAW;AACX,6BAAqB,oBAAgC,MAAM;AAC3D,sCAA8B,gBAAgB;AAC9C,mCAA2B,aAAa,mBAAmB,CAAC;AAC5D,8BAAsB;AACtB,wBAAgB;AAChB,YAAI;AACA,cAAI;AACA,6BAAiB,aAAa,QAAQ,IAAI,UAAU;AACpD,2BAAe,SAAS;AACxB,6BAAiB,IAAI,MAAM;AAC3B,yBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,uBAAS,KAAK,EAAE,MAAM,aAAa;;AAEvC,oCAAwB,iBAAiB,QAAQ,EAAE,OAAO,EAAE,OAAO,cAAc;AACjF,wBAAY,aAAa,eAAe,UAAU,EAAE;AACpD,kCAAsB,aAAa,QAAQ,IAAI,UAAU;AACzD,0BAAc,SAAS;;AAGvB,wBAAY,gBAAc,GAAG,cAAc;;AAE/C,wBAAc,KAAK;AACnB,iBAAO,kBAA8B,KAAK,QAAQ;;AAEtD,oCAAwC,OAAO,MAAM;AACrD,4CAAoC,2BAAuC,UAAU,OAAO;AAC5F,uBAAe;AACf,YAAI;AAEA,qBAAW,sBAAkC,cAAc;;AAE/D,oBAAY,SAAS,WAAW,aAAa,UAAU;AACvD,wBAAgB;AACZ,uBAAa,8BAA8B;;AAE/C,eAAO;;;AChEf;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,+BAAuB,CAAC,aAAa,aAAa,aAAa,aAAa,MAAM,GAAG;AACrF,uBAAe,SAAS,YAAY,IAAI;AACxC,YAAI,SAAS;AACT,eAAK,WAAW;sBACN;oBACF;;;;;wCAKoB;;4CAEI;;;;;AAKhC;;AAEJ,aAAK,WAAW;QAChB,iBAAiB,SAAS;QAC1B,eAAe,SAAS;;;UAGtB;8BACoB;;iDAEmB;;qDAEI;;;UAG3C;yBACe;;;;;AC1DzB;;;;;;;;;;;;;;;;;MA0DI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,eAAe;AACpB,aAAK,eAAe;AACpB,aAAK,cAAc,SAAS,IAAI,WAAU,GAAE,KAAqB,OAAO,KAAK,GAAE;AAC/E,qBAAa,OAAO;AACpB,sBAAc,kBAAkB;AAChC,sBAAc,SAAS,IAAI,QAAK,GAAE,IAAI,KAAK;AAC3C,oBAAY,SAAS,IAAI,WAAU,GAAE,KAAK,OAAO,IAAI,KAAK;AAC1D,wBAAe,YAAY,MAAM;AACjC,uBAAe,YAAY,UAAU;AACrC,uBAAe,GAAG,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;AAChE,0BAAkB,SAAS,IAAI,WAAW,QAAQ,OAAO,MAAM,IAAI;AACnE,uBAAe,SAAS,YAAY,IAAI;AACxC,uBAAe;AACf,YAAI,SAAS;AACT,2BAAiB;UACnB;;0CAEgC;;8CAEI;;;;AAIlC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;;AAK/C,2BAAiB;UACnB;UACA,cAAc;UACd,eAAe;UACf;;6CAEmC;kDACK;;;AAGtC,qBAAW;UACb;UACA;sCAC4B,OAAO,YAAY;UAC/C,QAAO,OAAO;aACX;YACD;wCAC4B,OAAO,YAAY;;;UAGjD,QAAO,OAAO;aACX,QAAO,OAAO,QAAQ,KAAK,YAAY,OAAO;YAC/C;wCAC4B,OAAO,YAAY;YAC/C,QAAO,OAAO;eACX;cACD;0CAC4B,OAAO,YAAY;;;;;AAKrD,aAAK,WAAW;cACV,iBAAiB,SAAS;cAC1B,eAAe,SAAS;;;UAG5B;;UAEA;;;;;;ACtIV;;;;;;;;;;;;;;;;AAmBO,gCAA4B,EAAG,QAAQ,mBAAS;AACnD,aAAQ,KAAM;AACd,aAAQ,UAAU,QAAS;AAC3B,sBAAgB,OAAM,QAAQ,iCAC1B,IAAI,uBAAuB,EAAE,OAAO,UAAU,QAC9C,IAAI,iBAAiB,EAAE,OAAO,UAAU;AAC5C,qBAAe,SAAQ,gBAAgB,SAAS,CAAC,IAAI,EAAE;AACvD,aAAO;;AAEJ,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY;;AC/BhB;;;;;;;;;;;;;;;;AAqBO,6BAAyB;MAC5B,MAAM;MACN,MAAM;;;MAGN;AACI,aAAK,gBAAgB,CAAC,SAAS,SAAS,SAAS;AACjD,aAAK,cAAc,4BAAwC,QAAQ;AACnE,aAAK,WAAW;;;UAGd;;;;;;;;;;;;;AChCV;;;;;;;;;;;;;;;;AAuBA,gBAAY;AACL;AACH,aAAQ,QAAQ,qBAAY;AAC5B,aAAQ,GAAG,KAAM;AACjB,oBAAc,YAAwB,EAAE,OAAO,EAAE;AACjD,UAAI,EAAE,UAAU;AACZ,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,4BAAoB,IAAI,uBAAuB,iBAAsC,MAAM,EAAE,OAAO,EAAE;AACtG,wBAAe;UACX;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;UAEb;YACI,QAAQ,MAAM,mBAAmB,KAAK;YACtC,OAAO,MAAM,mBAAmB,KAAK;YACrC,OAAO,EAAE;;;AAGjB,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,yBAAiB,SAAQ,gBAAgB,aAAa,SAAQ;AAC9D,8BAAsB,UAAQ,CAAE,QAAQ,CAAE,MAAM,UAAU,MAAM,WAAY;AAC5E,iBAAQ,8BAA8B;AACtC,iBAAQ,8BAA8B;AAEtC,eAAO;;AAEX,UAAI,SAAQ,mBAAmB,CAAC,GAAG;AAC/B,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sBAAc,SAAQ,QAAQ,IAAI,EAAE;AACpC,sCAA8B,gBAAY,EAAE,OAAO,EAAE,OAAO,MAAM,QAAQ,MAAM,QAAQ;AACxF,oBAAY,SAAQ,eAAe,UAAU;AAC7C,wBAAgB,SAAQ,QAAQ,IAAI,IAAI;AACxC,gBAAQ,SAAS;AACjB,eAAO;;AAEX;AACA,UAAI,OAAM,QAAQ;AACd,kBAAU,IAAI,sBAAsB,KAAK,EAAE,OAAO,EAAE;;AAGpD,kBAAU,IAAI,gBAAgB,KAAK,EAAE,OAAO,EAAE;;AAElD,aAAO,SAAQ,gBAAgB,SAAS,CAAC,GAAG,IAAI;;AAE7C,6BAAuB;MAC1B,YAAY;MACZ,aAAa;MACb,YAAY;;ACpFhB;;;;;;;;;;;;;;;;AAiBO,uCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,kBAAmB;AACxD,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,eAAO,wBAAqC,WAAW,YAAY,kBAAkB,iBAAiB;;;AC/B9G;;;;;;;;;;;;;;;;AAiBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,sBAAuB;AAC5E,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,eAAQ,iBAAiB,gBAAiB,0BAAwB,WAAW,YAAY,eAAe,cAAc,gBAAgB;AACtI,eAAO,CAAC,iBAAiB;;;AC9BjC;;;;;;;;;;;;;;;;AAkBA,sCAAgC;AACzB,wCAAkC;MACrC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,mBAAS;AAC5B,cAAkB;AAElB,eAAQ,OAAO,UAAW;AAC1B,eAAQ,eAAe,cAAc,gBAAgB,gBAAiB;AACtE,2BAAmB;AACnB,0BAAkB,WAAW,SAAS,MAAM;AAC5C,2BAAmB,WAAW,SAAS,OAAO;AAC9C,iCAAyB;AACzB,gCAAwB;AACxB,kCAA0B;AAC1B,gCAAwB;AACxB,eAAQ,iBAAiB,kBAAmB,0BAAwB,WAAW,YAAY,kBAAkB,iBAAiB,mBAAmB;AACjJ,eAAO,CAAC,iBAAiB;;;ACnCjC;;;;;;;;;;;;;;;;;MAkBI;AACI,aAAK,gBAAgB,CAAC;AACtB,aAAK,cAAc;AACnB,4BAAoB,WAAW;AAC/B,2BAAmB,WAAW;AAC9B,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,0BAAkB,KAAK,IAAI,SAAS,QAAQ;AAC5C,aAAK,cAAc;AACnB,mCAA2B,gBAA4B,QAAQ,aAAa;AAC5E,8BAAsB,QAAQ,QAAQ;AACtC,8BAAsB,QAAQ,QAAQ;AACtC,0BAAkB;AAClB,YAAI,OAAO,cAAc;AACrB,wBAAc,uBAAuB,UAAU,QAAQ;;AAGvD,wBAAc;2BACC,UAAU,KAAK;;;AAGlC,aAAK,WAAW;;;;;4CAKoB,oBAAoB,2BAA2B,oBAAoB;4CACnE,oBAAoB,2BAA2B,oBAAoB;iDAC9D;iDACA;YACrC;uCAC2B,yCAAyC;;;;;;;;AChDhF;;;;;;;;;;;;;;;;AAkBO,qCAA+B;MAClC,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,iBAAU;AAClB,eAAQ,SAAS,WAAW,UAAW;AACvC,6BAAqB;AACrB,wBAAgB,IAAI,cAAc,OAAM,OAAO,SAAS,WAAW;AACnE,uBAAe,aAAa,gBAAgB,SAAS,CAAC,SAAQ,OAAM;AACpE,eAAO;;;AC3Bf;;;;;;;;;;;;;;;;AAkBA,gBAAY,0BAA0B;;;AAG/B,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACzBhB;;;;;;;;;;;;;;;;AAkBA,mBAAe;AACR,qBAAe,kBAAgB;AAC/B,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAkBA,iCAA2B;AACpB,gCAA0B,mBAAiB,CAAE,WAAW,sBAAoB,iBAAiB;AAC7F,sCAAgC;MACnC,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBA,gBAAY;AACL,0BAAsB,mBAAiB;MAC1C,WAAW;MACX,iBAAiB;MACjB,iBAAiB;MACjB,eAAe;;AAEZ,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;AC7BhB;;;;;;;;;;;;;;;;AAkBA,gBAAY;AACL,kBAAY,kBAAgB;AAC5B,wBAAkB;MACrB,YAAY;MACZ,aAAa;MACb,YAAY;;ACvBhB;;;;;;;;;;;;;;;;AAmBO,8BAAwB;MAC3B,YAAY;MACZ,aAAa;MACb,YAAY,EAAG,QAAQ,OAAO;AAC1B,eAAQ,KAAM;AACd,eAAQ,QAAS;AACjB,6BAAqB;AACrB,sBAAc,EAAE,MAAM;AACtB,yBAAiB,IAAI,MAAM;AAC3B,qBAAa,GAAG,IAAI,SAAS,QAAQ;AACjC,mBAAS,KAAK,EAAE,MAAM,KAAK;;AAE/B;AACA,YAAI,aAAa,mBAAmB,CAAC;AACjC,2BAAiB,aAAa,QAAQ,IAAI,EAAE;AAC5C,yBAAe,SAAS;AACxB,4BAAkB,iBAAa,QAAQ,EAAE,OAAO,EAAE,OAAO,MAAM;AAC/D,gBAAM,aAAa,eAAe,UAAU,EAAE;AAC9C,0BAAgB,aAAa,QAAQ,IAAI,IAAI;AAC7C,kBAAQ,SAAS;;AAGjB,gBAAM,gBAAc,GAAG,MAAM;;AAEjC,eAAO;;;AC3Cf;;;;;;;;;;;;;;;;AAmBO;AACH,aAAQ,QAAQ,OAAO,qBAAY;AACnC,aAAQ,QAAS;AACjB,aAAQ,KAAM;AACd,yBAAiB,GAAG;AAEpB,cAAQ,KAAK,aAAa;AAC1B,qBAAe,SAAQ,SAAS,EAAE;AAClC,aAAQ,cAAc,aAAa,WAAY,cAAc,QAAQ,MAAM,EAAE,OAAO,EAAE;AACtF,aAAO;QACH,SAAQ,eAAe,aAAa,EAAE,OAAO;QAC7C,SAAQ,eAAe,CAAC,QAAQ,SAAS,SAAS;;;AAGnD,2BAAqB;MACxB,YAAY;MACZ,aAAa;MACb,YAAY;;ACpChB;;;;;;;;;;;;;;;;AAuDA,4BAAsB;MAClB;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;AAEJ,+BAA2B;AACvB,sBAAe;;AC/FnB;;;;;;;;;;;;;;;;ACAA;AAGA,sBAAgB;ACHhB;;;;;;;;;;;;;;;;sBAsCuB;MACrB,aAAa;MACb,oBAAoB;MACpB,sBAAsB;MACtB,aAAa;MACb,eAAe;MACf,kBAAkB;MAClB,MAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC7CV,IAAA;AAAA;;;ACAA,IAAA;AAAA;;;ACAA,IAAA;AAAA;;;ACAA,IAAA;AACA,sCAAqC;AACnC,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,uCAAgC,kCAAiC;AAEnE;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAM;AAA4B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAO;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAQ;AAA6B,YAAG,WAAW,UAAQ;AAAQ,qCAA2B,WAAW;AAAA;AAAQ,eAAO;AAAA;AAAQ,mBAAW,OAAO,mCAAgC,cAAY,iCAA8B;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;AAAA;AAAA;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;AAAA;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,mCAA2B,OAAO,6BAA2B;AAAM,UAAG;AAAwB,kBAAO,OAAO;AAAU,uBAAa,OAAO;AAAgB,yBAAe,OAAO;AAAA;AAAkB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;AAAA;AAAiB,eAAO,kBAAgB;AAAA;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;AAAA;AAAS,4BAAgB,YAAU;AAAA;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;AAAA;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;AAAA;AAAK,kBAAO,IAAI;AAAQ,iBAAO;AAAA;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;AAAA;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;AAAA;AAAA;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;AAAA;AAAS,eAAO,aAAW;AAAW,iBAAM;AAAA;AAA8B;AAAsB;AAAI,8BAAkB;AAAA;AAAmC,kBAAQ,MAAM;AAA2G,gBAAM;AAAA;AAAE,iBAAO,kBAAkB;AAAA,iBAAe;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;AAAA;AAAA;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;AAAA;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;AAAA;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;AAAA,mBAAmB,OAAO,aAAW;AAAa,uBAAW;AAAA;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;AAAA;AAAA;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;AAAA;AAAA,iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;AAAA,mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;AAAA;AAAI,YAAG;AAAY,4BAAgB;AAAA;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;AAAA;AAAQ,4BAAgB;AAAA;AAAG,YAAG;AAAqB,kBAAM;AAAqC,gBAAG,CAAC;AAAO,uBAAO;AAAc,gBAAG,CAAC;AAAS,yBAAS;AAAgB,uBAAS,SAAS,aAAa;AAAU,mBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;AAAA;AAAS,uBAAW;AAA8B,sBAAQ,MAAM,UAAS;AAAM,gBAAG,CAAC,IAAI;AAAQ,oBAAI,IAAI,WAAW;AAAA;AAAK,oBAAO,IAAI;AAAQ,mBAAO;AAAA;AAAA;AAAU,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;AAAA;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;AAAA;AAAA;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;AAAA;AAAO;AAAA;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;AAAA;AAAA;AAAO,yBAAe;AAAgB,mBAAS,QAAM;AAAA;AAAA;AAAA;AAAa,UAAG;AAAqB,YAAG,OAAO,gBAAc;AAAa,wBAAY,qBAAsB;AAAA;AAAA;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;AAAA;AAAA;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ,yBAAiB,QAAQ;AAAK,0BAAkB,QAAQ;AAAM,oCAA4B,QAAQ;AAAgB;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;AAAA;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY;AAAe,6BAAqB;AAAE,yBAAiB;AAAE,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;AAAA;AAAA;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;AAAA;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,QAAM,QAAM,QAAM,UAAW,QAAM;AAAG,sBAAS,KAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,KAAI,MAAI;AAAA;AAAK,iBAAO;AAAA,WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;AAAA;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;AAAA;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;AAAA;AAAS,oBAAM,KAAG,KAAK;AAAA;AAAA;AAAA;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;AAAA;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;AAAA;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;AAAA;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;AAAA;AAAA;AAAO;AAAoD,qBAAW,MAAI;AAAe,kBAAQ;AAAG,eAAM,CAAE,QAAK;AAAS,mBAAO,KAAK;AAAO,cAAG,CAAC;AAAG,mBAAO;AAAI,cAAG,CAAE,MAAG;AAAM,mBAAK,OAAO,aAAa;AAAI;AAAA;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,mBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;AAAA;AAAS,mBAAO,KAAK,SAAO;AAAG,cAAI,MAAG,QAAM;AAAK,iBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;AAAA;AAAQ,iBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;AAAA;AAAG,cAAG,KAAG;AAAO,mBAAK,OAAO,aAAa;AAAA;AAAS,qBAAO,KAAG;AAAM,mBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;AAAA;AAAA;AAAO,eAAO;AAAA;AAAI;AAA0C,eAAO,MAAI,kBAAkB,oBAAmB,KAAI,kBAAgB;AAAA;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,IAAI,QAAO,EAAE;AAAG,kBAAM,IAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,IAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;AAAA;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;AAAA,qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;AAAA,qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAA;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;AAAA;AAAS;AAAkD,eAAO,kBAAkB,KAAI,oBAAmB,QAAO;AAAA;AAAiB;AAA8B,kBAAQ;AAAE,qBAAU,GAAE,IAAE,IAAI,QAAO,EAAE;AAAG,kBAAM,IAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAM,gBAAE,QAAQ,MAAE,SAAO,MAAI,IAAI,WAAW,EAAE,KAAG;AAAK,cAAG,KAAG;AAAI,cAAE;AAAA,mBAAY,KAAG;AAAK,mBAAK;AAAA,mBAAU,KAAG;AAAM,mBAAK;AAAA;AAAO,mBAAK;AAAA;AAAE,eAAO;AAAA;AAAI;AAA0C,2BAAmB,IAAI,OAAM;AAAA;AAAQ,2BAAmB;AAAM;AAA6B,YAAG,IAAE,WAAS;AAAG,eAAG,WAAS,IAAE;AAAA;AAAS,eAAO;AAAA;AAAE;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAA;AAAK,uBAAe,oBAAiB,wBAAqB,sBAAmB,0BAAuB;AAAM,UAAG;AAAA;AAAyB,mCAA2B,OAAO,qBAAmB;AAAS,UAAG;AAAwB,qBAAW,OAAO;AAAc,kBAAO,OAAO;AAAA;AAAe,YAAG,OAAO;AAAe,uBAAW,OAAO;AAAA;AAAmB,uBAAW,IAAI,YAAY,OAAO,CAAC,SAAU,yBAAuB,gBAAe,SAAU,aAAW,gBAAe,QAAS;AAAO,cAAG,CAAE,YAAW,kBAAkB;AAAoB,gBAAI;AAA+N,gBAAG;AAAqB,sBAAQ,IAAI;AAAA;AAAqH,kBAAM,MAAM;AAAA;AAAA;AAAA;AAAgB,UAAG;AAAY,kBAAO,WAAW;AAAA;AAAO,+BAAuB,QAAO;AAAW,iCAA2B;AAAQ,UAAG,CAAC;AAAwB,4BAAoB,kBAAgB,KAAG;AAAA;AAAa;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;AAAA;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;AAAA;AAAW,qBAAO,cAAc,MAAK,SAAS;AAAA;AAAA;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;AAAA;AAAA;AAAA;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,UAAG;AAAuB,6BAAmB;AAAK;AAAkB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;AAAA;AAAY;AAAmB,YAAG;AAAuB;AAAO,6BAAqB;AAAA;AAAY;AAAmB,YAAG;AAAuB;AAAO,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAe;AAAyB,qBAAa,QAAQ;AAAA;AAAI;AAA0B,sBAAc,QAAQ;AAAA;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B,gBAAO,CAAC,wBAAuB;AAAuD;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAA;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;AAAA;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;AAAA;AAAA;AAAA;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;AAAA;AAAM,YAAG;AAAuB,kBAAQ,MAAM,yBAAwB,IAAI,QAAO;AAAO,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;AAAA;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,IAAI,WAAW,UAAQ,IAAI,QAAQ,YAAU;AAAA;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,2BAAmB;AAAuC,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;AAAA;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;AAAA;AAAY,cAAG;AAAY,mBAAO,WAAW;AAAA;AAAqB,kBAAK;AAAA;AAAA;AAA8D,gBAAM;AAAA;AAAA;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;AAAA;AAAI,mBAAO,SAAS;AAAA,aAAmB,MAAM;AAAW,mBAAO;AAAA;AAAA;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;AAAA;AAAA;AAAe;AAAsB,mBAAS,CAAC,GAAI;AAAe;AAA0C,yBAAY,SAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW;AAAO,cAAG,CAAC;AAAwB,mCAAqB,QAAQ,cAAc;AAAO,oBAAQ,cAAc,QAAQ;AAAY,sBAAQ,uBAAuB,GAAE;AAAW,oBAAG,CAAC,EAAE;AAAiB,sCAAoB;AAAA;AAAA;AAAA;AAAA;AAAyB,YAAG,CAAC;AAAwB,2BAAiB;AAAA;AAAoB;AAA2C,0BAAgB,OAAO,aAAY,OAAO;AAAA;AAAW;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;AAAA,aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;AAAA;AAAA;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;AAAA;AAAA;AAAA;AAAoC,mBAAO,uBAAuB;AAAA;AAAA;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;AAAA;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;AAAA;AAAA;AAAO;AAAmB,eAAM;AAAA;AAAG,uBAAe;AAAG;AAA0B,gBAAQ;AAAA;AAAc,UAAG,CAAC;AAAuB,mBAAW,KAAK,CAAC,MAAK;AAAW;AAAA;AAAwB,0BAAkB;AAAE,6CAAqC;AAAE,6CAAqC;AAAE;AAAoF,qBAAW,aAAW;AAAE,8BAAoB,sBAAoB;AAAE,8BAAoB,sBAAoB;AAAE,wBAAc;AAAW,2CAAiC;AAAoB,2CAAiC;AAAA;AAAoB,aAAO,4BAA0B;AAAuB,wBAAgB,CAAC,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,OAAM,IAAG,KAAI,IAAG,OAAM,IAAG,OAAM,GAAE,SAAQ,IAAG,OAAM,GAAE,QAAO,IAAG,QAAO,GAAE,aAAY,GAAE,QAAO,IAAG,QAAO,GAAE,QAAO,IAAG,SAAQ,KAAI,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,SAAQ,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,QAAO,IAAG,SAAQ,IAAG,OAAM,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,IAAG,OAAM,IAAG,MAAK,IAAG,QAAO,IAAG,QAAO,IAAG,OAAM,IAAG,QAAO,KAAI,UAAS,KAAI,QAAO,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,IAAG,QAAO,IAAG,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,WAAU,IAAG,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,OAAM,KAAI,OAAM,KAAI,QAAO,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,IAAG,MAAK,KAAI,QAAO,KAAI,OAAM,KAAI,QAAO,IAAG,WAAU,IAAG,SAAQ,KAAI,SAAQ,GAAE,UAAS,KAAI,QAAO,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,SAAQ,KAAI,UAAS,KAAI,QAAO,IAAG,WAAU,IAAG,cAAa,IAAG,OAAM,IAAG,YAAW,KAAI,cAAa,KAAI,YAAW,IAAG,SAAQ,IAAG,cAAa,GAAE,YAAW,IAAG,UAAS,IAAG,aAAY,IAAG,WAAU,KAAI,cAAa,IAAG,YAAW,GAAE,cAAa,IAAG,aAAY,IAAG,UAAS,IAAG,WAAU,IAAG,WAAU,KAAI,cAAa,IAAG,aAAY,IAAG,UAAS,GAAE,cAAa,IAAG,UAAS,IAAG,iBAAgB,IAAG,iBAAgB,KAAI,eAAc,GAAE,WAAU,IAAG,SAAQ,IAAG,UAAS,IAAG,cAAa,KAAI,QAAO,KAAI,QAAO,IAAG,QAAO,IAAG,SAAQ,KAAI,WAAU,KAAI,QAAO,IAAG,WAAU,IAAG,WAAU,IAAG,iBAAgB,IAAG,YAAW,IAAG,UAAS;AAAK,6CAAqC;AAAM;AAA4C,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK,QAAM,QAAM;AAAE,iBAAM;AAAI,YAAG,SAAO;AAAE,iBAAO;AAAE,YAAG,SAAO;AAAW,kBAAM;AAAS,oCAA0B,QAAQ,KAAK,qBAAoB,oCAAkC;AAAG,8BAAoB;AAAE,YAAG,yBAAuB;AAAM,2BAAe,QAAQ,gBAAgB,qBAAoB,oCAAkC,GAAE,uBAAsB;AAAG,cAAG,cAAY;AAAuB,cAAE;AAAM,8BAAgB;AAAE,gBAAG,SAAO;AAAE,qBAAO;AAAA;AAAA;AAAG,kBAAQ,QAAQ,OAAO,qBAAoB,QAAM,GAAE;AAAO,YAAG,OAAK;AAAE,iBAAO,MAAI;AAAgB,cAAK,iDAA+C;AAAA;AAAI,aAAO,4BAA0B;AAAuB;AAAoC,YAAG;AAAuB,gBAAK;AAAuF,YAAG,CAAC;AAAY,gBAAK;AAAoD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO;AAAY,gBAAQ,eAAe;AAAS,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,QAAQ,SAAQ;AAAG,gBAAQ,OAAO,UAAQ;AAAA;AAAU;AAAsC,YAAG;AAAuB,gBAAK;AAAyF,YAAG,CAAC;AAAY,gBAAK;AAAsD,sBAAY,QAAQ,SAAS;AAAa,gBAAQ,OAAO,YAAY,CAAC,KAAM;AAAA;AAAW;AAAuC,YAAG;AAAuB,gBAAK;AAA0F,YAAG,CAAC;AAAY,gBAAK;AAAuD,4BAAoB,cAAY,MAAI,KAAG;AAAE,sBAAY,QAAQ,SAAS;AAAa,YAAG;AAAS,uBAAW,QAAQ;AAAO,kBAAQ,mBAAmB;AAAA;AAAA;AAAS,oBAAY,CAAC,gBAAe,GAAE,gBAAe,CAAC,aAAY,GAAE,WAAU,IAAG,eAAc,IAAG,gBAAe,IAAG,aAAY;AAAW,+BAAuB,QAAQ,iBAAgB,CAAC,uBAAsB;AAAG,oDAA4C,QAAQ;AAAA,SAAkB,qBAAoB;AAAW,8BAAoB;AAAE,qBAAU,GAAE,IAAE,iBAAgB,EAAE;AAAG,kBAAQ;AAAA;AAAuB,gBAAQ,kBAAgB;AAAM,qBAAU,GAAE,IAAE,MAAI,GAAE,EAAE;AAAE,8BAAoB,QAAQ,kBAAgB,IAAE,KAAG;AAAE,4BAAoB,QAAQ,kBAAgB,MAAI,KAAG,QAAQ;AAAgB,sBAAY,QAAQ,kBAAgB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,wBAAc;AAAM,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAE,8BAAoB,YAAU,IAAE,KAAG;AAAE,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,OAAK,GAAE;AAAW,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE,QAAQ;AAAiB,gBAAQ,MAAM,qBAAoB,QAAQ,kBAAgB,MAAI,GAAE;AAAA,SAAK,YAAW;AAAA,SAAa,UAAS,IAAG,cAAa,MAAK,iBAAgB;AAAA,SAAa,iBAAgB;AAAW,YAAG,QAAQ,iBAAe;AAAM,iBAAM,QAAQ,aAAa,SAAO;AAAG,oBAAQ,aAAa;AAAA;AAAQ,kBAAQ,eAAa;AAAA;AAAK,YAAG,0BAAwB;AAAiB;AAAA,SAA4B,YAAW;AAAmB,iBAAO;AAAgB,YAAG;AAAI,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAU,kBAAQ,MAAM,qBAAoB,KAAG,KAAG,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ,MAAM,qBAAoB,KAAG,MAAI,GAAE;AAAG,kBAAQ;AAAkB,iCAAuB,KAAG,GAAE;AAAY,iCAAuB,GAAE,GAAE;AAAG,6BAAiB;AAAE,cAAG;AAAwB,wBAAY,CAAC,KAAM;AAAA;AAAA;AAAA,SAAY,cAAa;AAAW,gBAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAI,gBAAQ,MAAM,qBAAoB,mBAAiB,KAAG,GAAE;AAAG,+BAAuB,mBAAiB,GAAE;AAAY,2BAAiB,eAAa;AAAE,+BAAuB,GAAE,GAAE;AAAG,oBAAY,CAAC,KAAM;AAAA,SAAgB,qBAAoB;AAAW,sBAAa,QAAQ;AAAU,wBAAY,QAAQ,SAAS;AAAG,cAAG,WAAS,QAAQ;AAAQ,oBAAQ,mBAAmB,QAAQ;AAAA;AAAA;AAAS,gBAAQ,WAAS;AAAG,qBAAU,GAAE,IAAE,QAAQ,cAAc,QAAO,EAAE;AAAG,uBAAW,QAAQ,cAAc;AAAG,iBAAO;AAAA;AAAY,gBAAQ,gBAAc;AAAG,qBAAU,GAAE,IAAE,QAAQ,eAAe,QAAO,EAAE;AAAG,uBAAW,QAAQ,eAAe;AAAG,wBAAY,OAAO;AAAQ,kBAAQ,eAAe;AAAS,iBAAO;AAAA;AAAY,gBAAQ,iBAAe;AAAA,SAAI,gBAAe;AAAkB,YAAG,CAAC;AAAQ;AAAO,YAAG,QAAQ;AAAkB,0BAAc,oBAAoB,QAAQ,mBAAiB,OAAK;AAAG,8BAAoB,QAAQ,mBAAiB,OAAK,KAAG;AAAE,gBAAM;AAAW,gBAAM,QAAQ;AAAA;AAAkB,gBAAQ,mBAAiB;AAAE,YAAG,QAAQ,qBAAmB,QAAQ;AAAU,gBAAM,QAAQ;AAAW,gBAAQ,YAAU;AAAE,YAAG,QAAQ;AAAO,kBAAQ,OAAO,UAAQ;AAAA,SAAM,oBAAmB;AAAiB,eAAO,QAAQ,SAAS,OAAO,QAAQ;AAAQ,gBAAQ,cAAc,KAAK;AAAQ,gBAAQ,eAAe,OAAO,QAAQ,eAAe,QAAQ,SAAQ;AAAG,gBAAQ,eAAe,OAAO;AAAS,eAAO,UAAQ;AAAA,SAAW,uBAAsB;AAAA,SAAiB,wBAAuB;AAAmC,eAAO,YAAU;AAAY,kBAAM,EAAE;AAAQ,oBAAQ,EAAE;AAAO,cAAG,OAAO;AAAQ,oBAAQ,sCAAoC,OAAO,QAAQ;AAAiB,cAAG,EAAE,mBAAiB,EAAE,mBAAiB;AAAiB,yBAAW,QAAQ,SAAS,EAAE;AAAc,gBAAG;AAAQ,qBAAO,OAAO,YAAY,EAAE,MAAK,EAAE;AAAA;AAAsB,sBAAQ,MAAM,4CAA0C,MAAI,yBAAuB,EAAE,kBAAgB;AAAA;AAAuC,oBAAQ,sCAAoC;AAAU;AAAA;AAAO,cAAG,QAAM;AAA+B;AAAA,qBAAuD,QAAM;AAAe,2BAAe,EAAE;AAAA,qBAAc,QAAM;AAAiB,6BAAiB,EAAE;AAAA,qBAAmB,QAAM;AAAc,0BAAc,EAAE;AAAA,qBAAmB,QAAM;AAAgB,4BAAgB,EAAE;AAAA,qBAAmB,QAAM;AAAU,mBAAO,SAAO;AAAK,gBAAG;AAAkB,gCAAkB;AAAQ,gBAAG,OAAO;AAAY,qBAAO;AAAa,qBAAO,OAAO;AAAA;AAAA,qBAAoB,QAAM;AAAS,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;AAAA,qBAAiB,QAAM;AAAY,gBAAI,YAAU,EAAE,cAAY,OAAK,EAAE;AAAA,qBAAiB,QAAM;AAAS,kBAAM,YAAU,EAAE,cAAY,OAAK,EAAE;AAAA,qBAAiB,QAAM;AAAQ,2BAAa,OAAO,WAAS,QAAQ,KAAK,qBAAoB,OAAO,QAAQ,SAAO,MAAI;AAAG,gBAAG;AAAU,sBAAQ,mBAAmB;AAAA;AAAA,qBAAiB,QAAM;AAAc,oBAAQ,mBAAmB;AAAA,qBAAgB,QAAM;AAAkB,oBAAQ,sBAAsB,EAAE;AAAA,qBAAc,EAAE,KAAK,WAAS;AAAgB,mBAAO,YAAY,EAAE;AAAA;AAAW,gBAAI,oCAAkC;AAAA;AAAK,kBAAQ,sCAAoC;AAAA;AAAW,eAAO,UAAQ;AAAY,cAAI,4BAA0B,EAAE,WAAS,MAAI,EAAE,SAAO,OAAK,EAAE;AAAA;AAAU,YAAG;AAAqB,iBAAO,GAAG,WAAU;AAAe,mBAAO,UAAU,CAAC,MAAK;AAAA;AAAS,iBAAO,GAAG,SAAQ;AAAe,mBAAO,QAAQ;AAAA;AAAQ,iBAAO,GAAG,QAAO;AAAe,oBAAQ,IAAI;AAAA;AAAA;AAAoD,eAAO,YAAY,CAAC,KAAM,QAAO,WAAY,OAAO,0BAAwB,YAAW,YAAwB,YAAwB,cAA4B;AAAA,SAAmC,sBAAqB;AAAW,4BAAkB,WAAW;AAA6C,gBAAQ,cAAc,KAAK,IAAI,OAAO;AAAA,SAAiB,cAAa;AAAW,YAAG,QAAQ,cAAc,UAAQ;AAAG,kBAAQ;AAAuB,kBAAQ,uBAAuB,QAAQ,cAAc;AAAA;AAAI,YAAG,QAAQ,cAAc,SAAO;AAAE,iBAAO,QAAQ,cAAc;AAAA;AAAW,iBAAO;AAAA,SAAM,cAAa;AAAgB,gBAAM,YAAY,QAAM;AAAM,eAAM,YAAY,QAAM;AAAA;AAAA;AAAO;AAAgD,qBAAW,WAAS;AAAS,oBAAU;AAAS,qBAAa;AAAA;AAAU,aAAO,yBAAuB;AAAoB;AAA4B,eAAO;AAAA;AAAc,aAAO,sBAAoB;AAAiB;AAAsD,cAAM,uBAAqB,aAAa,aAAW,WAAS,CAAC,WAAS,aAAa,YAAU,oBAAmB,MAAK,OAAK,aAAa,QAAM;AAAA;AAAqB;AAAiC,yBAAe,MAAM,MAAK;AAAA;AAAM;AAAwB,UAAG;AAAqB,8BAAoB;AAAW,kBAAM,QAAQ;AAAY,iBAAO,EAAE,KAAG,MAAI,EAAE,KAAG;AAAA;AAAA,iBAAa;AAAwB,8BAAoB;AAAW,iBAAO,YAAY,QAAM,OAAO;AAAA;AAAA,iBAA0C,OAAO,YAAU;AAAa,8BAAoB;AAAA;AAAa,8BAAoB;AAAW,iBAAO,YAAY;AAAA;AAAO;AAAyB,4BAAoB,uBAAqB,KAAG;AAAM,eAAO;AAAA;AAAM;AAA2B,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,MAAK;AAAK,mBAAW,QAAQ,CAAC,MAAU;AAAA;AAAU;AAAuE,YAAG,kBAAgB;AAAc,sBAAY,CAAC,KAAM;AAAA,mBAAwC;AAAwB,sBAAY,CAAC,cAAe,gBAAe,KAAM;AAAA;AAA4B,wBAAY,QAAQ,SAAS;AAAgB,uBAAW,WAAS,QAAQ;AAAO,cAAG,CAAC;AAAQ;AAAA;AAAO,iBAAO,YAAY,CAAC,KAAM;AAAA;AAAuB,eAAO;AAAA;AAAE;AAAkB;AAAA;AAAQ;AAAqF,yBAAe,iBAAe;AAAE,oBAAU,YAAU;AAAA;AAAE;AAAkD,YAAG,QAAM,KAAG,OAAK,mBAAmB,UAAQ,OAAK;AAAK,iBAAM;AAAI,YAAG;AAAuB,oBAAQ,QAAQ,KAAK,qBAAoB,QAAM,GAAE,KAAI;AAAS,cAAG,QAAM;AAAY,mBAAM;AAAI,cAAG,QAAM;AAAY,mBAAM;AAAG,cAAG,QAAM;AAAK,mBAAO;AAAE,gBAAK,+CAA6C;AAAA;AAAS,0BAAc,QAAQ,KAAK,qBAAoB,QAAM;AAAG,cAAG,OAAK;AAAU,mBAAM;AAAG,qBAAS,YAAY;AAAM,qBAAS,OAAK;AAAQ,kBAAQ,MAAM,qBAAoB,oCAAkC,GAAE;AAAM,+BAAmB;AAAK,iBAAM,QAAM;AAAgB,mBAAK,YAAY;AAAM,gBAAG,OAAK;AAAM,qBAAM;AAAA;AAAI;AAA+C,mBAAK,QAAQ,KAAK,qBAAoB,oCAAkC;AAAA;AAAG,iBAAO;AAAA;AAAA;AAAG;AAA8C,eAAO,mCAAiC;AAAA;AAAE;AAA8C,eAAO,mCAAiC;AAAA;AAAE;AAA8C,2BAAmB,WAAW,MAAK,KAAI,MAAI;AAAA;AAAK;AAAyC,eAAO,UAAU;AAAA;AAAuB;AAAyD,0BAAgB,UAAU,SAAO;AAAE,oBAAU;AAAY,mBAAS,WAAW,cAAY;AAAG,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,8BAAoB,IAAE,KAAG,UAAU,IAAE;AAAA;AAAG,kBAAQ,0CAA0C,OAAM,aAAY,MAAK;AAAM,qBAAa;AAAO,eAAO;AAAA;AAAI,2DAAmD;AAAG;AAAsC,YAAG,CAAC,iBAAiB;AAAO,2BAAiB,QAAM;AAAA;AAAG,mBAAS,iBAAiB;AAAM,aAAK,SAAO;AAAE;AAAO,eAAM,KAAG,mBAAmB;AAAW,cAAG,OAAK,OAAK,OAAK;AAAK,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;AAAA;AAAO,kBAAI,MAAI,IAAE,CAAC;AAAE,iBAAK,KAAK,oBAAoB,OAAK;AAAI,mBAAK;AAAA;AAAA;AAAG,eAAO;AAAA;AAAK;AAAuE,uDAA+C,SAAO;AAAY,gBAAM,QAAM;AAAE,qBAAU,GAAE,IAAE,aAAY;AAAK,yDAA+C,KAAG,oBAAoB,IAAE;AAAA;AAAG,2BAAiB,QAAM;AAAE,mBAAS,CAAC,eAAa,qBAAqB,SAAO,WAAW,CAAC,QAAM;AAAG,YAAG;AAAc,uBAAW,+CAA+C;AAAG,0BAAc,+CAA+C;AAAG,0BAAc,iBAAiB,QAAO;AAAW,iBAAO,KAAK,MAAM,MAAK;AAAA;AAAW,eAAO,KAAK,MAAM,MAAK;AAAA;AAAgD;AAAqC,eAAO,mBAAmB;AAAA;AAAO;AAAyC;AAAI,qBAAW,KAAK,OAAK,QAAO,aAAW,UAAQ;AAAI,qCAA2B,WAAW;AAAQ,iBAAO;AAAA;AAAA;AAAA;AAAa;AAAgD,wBAAc,kBAAgB;AAAE,sBAAY;AAA4B,YAAG,iBAAe;AAAS,iBAAO;AAAA;AAAM,4BAAkB;AAAM,0BAAgB;AAAW,YAAG,gBAAc;AAAa,iBAAO;AAAA;AAAM,0BAAgB;AAAS,2BAAgB,GAAE,WAAS,GAAE,WAAS;AAAG,kCAAsB,UAAS,KAAE,MAAG;AAAS,8BAAkB,KAAK,IAAI,mBAAkB,gBAAc;AAAW,wBAAY,KAAK,IAAI,aAAY,QAAQ,KAAK,IAAI,aAAY,eAAc,oBAAmB;AAAgB,4BAAgB,0BAA0B;AAAS,cAAG;AAAa,mBAAO;AAAA;AAAA;AAAM,eAAO;AAAA;AAAM,qBAAa,CAAC,UAAS,GAAE,YAAW,GAAE,YAAW,GAAE,SAAQ,GAAE,YAAW,GAAE,wBAAuB,GAAE,mBAAkB,GAAE,uBAAsB,GAAE,wBAAuB,GAAE,uBAAsB,GAAE,YAAW,GAAE,2BAA0B,MAAK,iBAAgB,MAAK,iBAAgB,MAAK,gCAA+B,OAAM,yBAAwB;AAAW,qBAAU,SAAS,cAAc,SAAO,GAAE,KAAG,GAAE,EAAE;AAAG,mBAAS,eAAe;AAAA;AAAG,iBAAS,gBAAc;AAAG,iBAAS,gBAAc;AAAA,SAAI,8BAA6B;AAAW,YAAG,CAAC,SAAS;AAAgC,qBAAW,KAAK,SAAS;AAAyB,mBAAS,iCAA+B;AAAA;AAAA,SAAO,eAAc,IAAG,WAAU;AAA6C;AAA2C,cAAG,KAAK,UAAQ,KAAK;AAAO,mBAAO;AAAM,yBAAa;AAAM,gBAAG,KAAK,OAAI,KAAK;AAAG,qBAAO;AAAA;AAAM,iBAAO;AAAA;AAAK,sBAAa,SAAS;AAAe,qBAAS,SAAS,cAAc;AAAG,cAAG,KAAK,kBAAgB,kBAAgB,uBAAuB,KAAK,UAAS;AAAW;AAAA;AAAA;AAAQ,iBAAS,cAAc,KAAK,CAAC,gBAA8B,YAAsB;AAAoB,iBAAS,cAAc,KAAK;AAAc,iBAAO,EAAE,aAAW,EAAE;AAAA;AAAA,SAAc,qBAAoB;AAAyB,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,kBAAgB;AAAgB,qBAAS,cAAc,OAAO,GAAE;AAAG,cAAE;AAAA;AAAA;AAAA,SAAK,gCAA+B;AAAW,eAAO,SAAS,kBAAgB,SAAS,oBAAoB;AAAA,SAAqB,kBAAiB;AAAW,YAAG,CAAC,SAAS;AAAkC;AAAA;AAAO,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,qBAAS,SAAS,cAAc;AAAG,mBAAS,cAAc,OAAO,GAAE;AAAG,YAAE;AAAE,eAAK,eAAe,MAAM,MAAK,KAAK;AAAA;AAAA,SAAY,gBAAe,GAAE,qBAAoB,MAAK,eAAc,IAAG,2BAA0B;AAAiC,qBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,cAAG,SAAS,cAAc,GAAG,UAAQ,UAAS,EAAC,mBAAiB,mBAAiB,SAAS,cAAc,GAAG;AAAkB,qBAAS,eAAe;AAAA;AAAA;AAAA,SAAQ,gBAAe;AAAY,gBAAM,SAAS,cAAc;AAAG,UAAE,OAAO,oBAAoB,EAAE,iBAAgB,EAAE,mBAAkB,EAAE;AAAY,iBAAS,cAAc,OAAO,GAAE;AAAA,SAAI,yBAAwB;AAAuB,6BAAmB;AAA+B,YAAE,SAAS;AAAe,mBAAS,sBAAoB;AAAa,mBAAS;AAAmB,uBAAa,YAAY;AAAO,mBAAS;AAAmB,YAAE,SAAS;AAAA;AAAgB,YAAG,aAAa;AAAc,uBAAa,oBAAkB;AAAe,uBAAa,OAAO,iBAAiB,aAAa,iBAAgB,gBAAe,aAAa;AAAY,mBAAS,cAAc,KAAK;AAAc,mBAAS;AAAA;AAAoC,uBAAU,GAAE,IAAE,SAAS,cAAc,QAAO,EAAE;AAAG,gBAAG,SAAS,cAAc,GAAG,UAAQ,aAAa,UAAQ,SAAS,cAAc,GAAG,mBAAiB,aAAa;AAAiB,uBAAS,eAAe;AAAA;AAAA;AAAA;AAAA,SAAS,gCAA+B;AAAuE,uBAAa;AAAY,sBAAY,WAAW;AAAI,4BAAoB,WAAS,KAAG;AAAY,4BAAoB,UAAQ,KAAG,KAAG;AAAU,4BAAoB,UAAQ,KAAG,KAAG;AAAS,2CAAmC,cAAa,WAAU,kBAAiB,WAAU;AAAS,qBAAa;AAAA,SAAW,iCAAgC;AAAuB,gBAAO;AAAA,eAAmB;AAAE,mBAAO;AAAA,eAAO;AAAE,mBAAO,QAAQ;AAAA;AAA4C,mBAAO;AAAA;AAAA,SAAe,sBAAqB;AAAiB,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,UAAQ;AAAO,iBAAM;AAAU,YAAG,UAAQ;AAAO,iBAAM;AAAU,eAAO,UAAQ,OAAO,WAAS,OAAO,WAAS;AAAA,SAAI,mBAAkB;AAAW,eAAO,SAAS,qBAAmB,SAAS;AAAA;AAA0B;AAAmC,qBAAW,gBAAgB,YAAU;AAAE,sBAAY,QAAQ;AAAQ,qBAAa,UAAS,SAAQ;AAAQ,eAAO;AAAA;AAAQ;AAA0G,uBAAa;AAAY,sBAAY,WAAW;AAAI,8BAAoB;AAAE,YAAG;AAAc,4BAAgB,gBAAgB;AAAA;AAAc,4BAAoB,WAAS,KAAG;AAAgB,4BAAoB,UAAQ,KAAG,KAAG;AAAM,4BAAoB,UAAQ,KAAG,KAAG;AAAO,2CAAmC,cAAa,WAAU,GAAE,iBAAgB;AAAS,qBAAa;AAAA;AAAU;AAAuG,uBAAa,eAAa,aAAa,gBAAc;AAAG,iEAAyD,cAAa,cAAa,OAAM;AAAA;AAAQ;AAA2C,eAAO,UAAQ,IAAE,aAAa,WAAS;AAAA;AAAQ,+BAAuB,CAAC,GAAE,OAAO,aAAW,cAAY,WAAS,GAAE,OAAO,WAAS,cAAY,SAAO;AAAG;AAAmC,iBAAO,yBAAyB;AAAQ,yBAAe,mBAAmB,WAAU,QAAO,aAAW,cAAY,SAAS,cAAc,UAAQ;AAAW,eAAO;AAAA;AAAW;AAAyC,eAAO,kBAAkB;AAAA;AAAQ;AAAiF,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAO,iBAAM;AAAG,YAAG,OAAO;AAAiB,8BAAoB,OAAO,mBAAiB,KAAG;AAAM,8BAAoB,OAAO,kBAAgB,KAAG,KAAG;AAAA;AAAO,YAAG,OAAO,mBAAiB,CAAC,OAAO;AAA6B,cAAG,OAAO;AAAgB,qBAAO,OAAO;AAAgB,mCAAuB;AAAM,cAAG,OAAO,eAAa,OAAO,YAAY;AAAO,+BAAiB,OAAO,YAAY,MAAM,aAAa;AAAM,iCAAmB,aAAa,OAAK,KAAG,aAAa,OAAK,KAAG,aAAa,OAAK,OAAO,SAAO,aAAa,OAAK,OAAO;AAAA;AAAO,iBAAO,QAAM;AAAM,iBAAO,SAAO;AAAO,cAAG;AAAoB,mBAAO,YAAY,MAAM,SAAS,GAAE,GAAE,OAAM;AAAA;AAAA,mBAAiB,OAAO;AAAiB,6BAAiB,oBAAoB,OAAO,kBAAgB,KAAG;AAAG,gEAAsD,cAAa,QAAO,OAAM;AAAQ,iBAAO;AAAA;AAAO,iBAAM;AAAA;AAAG,eAAO;AAAA;AAAE;AAA8E,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,QAAO,OAAM;AAAQ,eAAO,mDAAmD,QAAO,OAAM;AAAA;AAAQ;AAAkE,qBAAW,wBAAwB;AAAQ,YAAG;AAAQ,iBAAO,mDAAmD,QAAO,OAAM;AAAA;AAAa,iBAAO,gDAAgD,QAAO,OAAM;AAAA;AAAA;AAAS;AAA0D,oBAAU,YAAU;AAAA;AAAE;AAAoD,mBAAS,WAAS;AAAE,eAAK,OAAK;AAAA;AAAE;AAAoD,kBAAQ,IAAI,aAAa;AAA0B,YAAG;AAAK,cAAI,yBAAuB;AAAwB,gBAAI,4BAA4B,OAAM;AAAA;AAAU,cAAI,yBAAuB;AAAqC,gBAAI,4BAA4B,MAAK,OAAM,OAAM;AAAA;AAAY,cAAI,2BAAyB;AAA4C,gBAAI,8BAA8B,MAAK,OAAM,MAAK,SAAQ;AAAA;AAAY,iBAAO;AAAA;AAAA;AAAG;AAAqD,kBAAQ,IAAI,aAAa;AAA2B,YAAG;AAAK,cAAI,uBAAqB;AAAW,mBAAO,IAAI;AAAA;AAA2B,cAAI,uBAAqB;AAAc,gBAAI,wBAAwB;AAAA;AAAM,cAAI,qBAAmB;AAAc,gBAAI,sBAAsB;AAAA;AAAM,cAAI,mBAAiB;AAAc,mBAAO,IAAI,oBAAoB;AAAA;AAAM,iBAAO;AAAA;AAAA;AAAG;AAAgD,kBAAQ,IAAI,aAAa;AAAsB,YAAG;AAAK,cAAI,iBAAe;AAAiB,gBAAI,oBAAoB,GAAE;AAAA;AAAO,iBAAO;AAAA;AAAA;AAAG,eAAO,CAAC,SAAQ,GAAE,WAAU,GAAE,SAAQ,IAAG,eAAc,IAAG,UAAS,IAAG,cAAa,IAAG,eAAc,IAAG,UAAS,IAAG,UAAS,IAAG,SAAQ,IAAG,MAAK,IAAG,UAAS,IAAG,gBAAe,MAAK,mBAAkB,IAAG,iBAAgB,IAAG,cAAa,IAAG,aAAY,IAAG,iBAAgB,GAAE,MAAK;AAAW,kCAAwB,IAAI,aAAa,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,yBAAyB,KAAG,oBAAoB,SAAS,GAAE,IAAE;AAAA;AAAG,gCAAsB,IAAI,WAAW,GAAG;AAAuB,qBAAU,GAAE,IAAE,GAAG,uBAAsB;AAAK,aAAG,uBAAuB,KAAG,kBAAkB,SAAS,GAAE,IAAE;AAAA;AAAA,SAAK,aAAY;AAAgC,YAAG,CAAC,GAAG;AAAW,aAAG,YAAU;AAAA;AAAA,SAAY,UAAS;AAAgB,kBAAQ,GAAG;AAAU,qBAAU,MAAM,QAAO,IAAE,KAAI;AAAK,gBAAM,KAAG;AAAA;AAAK,eAAO;AAAA,SAAK,uBAAsB,KAAI,0BAAyB,CAAC,IAAG,wBAAuB,CAAC,IAAG,WAAU;AAAqC,qBAAW;AAAG,qBAAU,GAAE,IAAE,OAAM,EAAE;AAAG,oBAAQ,SAAO,oBAAoB,SAAO,IAAE,KAAG,KAAG;AAAG,oBAAQ,aAAa,oBAAoB,SAAO,IAAE,KAAG,IAAG,MAAI,IAAE,SAAU;AAAA;AAAK,eAAO;AAAA,SAAQ,eAAc;AAAwC,kBAAQ,OAAO,WAAW,SAAQ;AAAwB,YAAG,CAAC;AAAI,iBAAO;AAAE,qBAAW,GAAG,gBAAgB,KAAI;AAAwB,eAAO;AAAA,SAAQ,iBAAgB;AAAqC,qBAAW,QAAQ;AAAG,4BAAoB,SAAO,KAAG,KAAG;AAAgB,sBAAY,CAAC,QAAc,YAAW,wBAAuB,SAAQ,uBAAuB,cAAa,OAAM;AAAK,YAAG,IAAI;AAAO,cAAI,OAAO,cAAY;AAAQ,WAAG,SAAS,UAAQ;AAAQ,YAAG,OAAO,uBAAuB,8BAA4B,eAAa,uBAAuB;AAA2B,aAAG,eAAe;AAAA;AAAS,eAAO;AAAA,SAAQ,oBAAmB;AAAwB,WAAG,iBAAe,GAAG,SAAS;AAAe,eAAO,MAAI,QAAM,GAAG,kBAAgB,GAAG,eAAe;AAAM,eAAM,CAAE,kBAAe,CAAC;AAAA,SAAQ,YAAW;AAAwB,eAAO,GAAG,SAAS;AAAA,SAAgB,eAAc;AAAwB,YAAG,GAAG,mBAAiB,GAAG,SAAS;AAAe,aAAG,iBAAe;AAAK,YAAG,OAAO,aAAW;AAAS,mBAAS,0BAA0B,GAAG,SAAS,eAAe,MAAM;AAAQ,YAAG,GAAG,SAAS,kBAAgB,GAAG,SAAS,eAAe,MAAM;AAAO,aAAG,SAAS,eAAe,MAAM,OAAO,cAAY;AAAU,cAAM,GAAG,SAAS,eAAe;AAAQ,WAAG,SAAS,iBAAe;AAAA,SAAM,gBAAe;AAAkB,YAAG,CAAC;AAAQ,oBAAQ,GAAG;AAAe,YAAG,QAAQ;AAAmB;AAAO,gBAAQ,qBAAmB;AAAK,qBAAU,QAAQ;AAAM,8CAAsC;AAAO,+CAAuC;AAAO,0CAAkC;AAAO,eAAM,wBAAsB,OAAM,aAAa;AAA4B,6CAAmC,CAAC,qBAAoB,0BAAyB,4BAA2B,2BAA0B,iCAAgC,uBAAsB,0BAAyB,kCAAiC,kBAAiB,sBAAqB,0BAAyB,4BAA2B,iCAAgC,oBAAmB,0BAAyB,sBAAqB,kCAAiC,+BAA8B,4BAA2B,YAAW,iCAAgC,4BAA2B,gCAA+B,iCAAgC,0BAAyB,sCAAqC,mCAAkC;AAAyC,mBAAS,OAAM,4BAA0B;AAAG,aAAK,QAAQ;AAAc,cAAG,+BAA+B,QAAQ,QAAM;AAAI,mBAAM,aAAa;AAAA;AAAA;AAAA,SAAS,sBAAqB;AAAkB,gBAAM,GAAG,SAAS;AAAS,qBAAW,GAAG,aAAa,WAAS,CAAC,UAAS,IAAG,kBAAiB,GAAE,oBAAmB,IAAG,2BAA0B;AAAI,qBAAW,OAAO;AAAS,0BAAgB,MAAM,oBAAoB,GAAE;AAAO,qBAAU,GAAE,IAAE,aAAY,EAAE;AAAG,kBAAM,MAAM,iBAAiB,GAAE;AAAG,qBAAS,EAAE;AAAK,iBAAO,mBAAiB,KAAK,IAAI,OAAO,kBAAiB,KAAK,SAAO;AAAG,cAAG,KAAK,MAAM,OAAK;AAAK,mBAAK,KAAK,MAAM,GAAE,KAAK,YAAY;AAAA;AAAM,oBAAQ,MAAM,mBAAmB,GAAE;AAAM,cAAG;AAAK,qBAAO,GAAG,SAAS,GAAG;AAAU,mBAAO,QAAM,CAAC,EAAE,MAAK;AAAI,eAAG,SAAS,MAAI;AAAI,yBAAU,GAAE,IAAE,EAAE,MAAK,EAAE;AAAG,sBAAM,OAAK,MAAI,IAAE;AAAI,oBAAI,MAAM,mBAAmB,GAAE;AAAG,mBAAG,GAAG,SAAS,GAAG;AAAU,iBAAG,SAAS,MAAI;AAAA;AAAA;AAAA;AAAA;AAAS,iDAAyC,CAAC,WAAU,aAAY;AAAoB;AAAgE,gCAAsB;AAAG,gBAAM,cAAY;AAAE,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,WAAS,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,aAAW,CAAC,CAAC,oBAAoB,IAAG,MAAG;AAAI,0BAAkB,eAAa,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,wBAAsB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,8BAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,qBAAmB,qCAAqC;AAAiB,0BAAkB,kCAAgC,CAAC,CAAC,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,eAAa,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,4BAA0B,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,sBAAoB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,2BAAyB,oBAAoB,IAAG,OAAI;AAAI,0BAAkB,+BAA6B,oBAAoB,IAAG,OAAI;AAAI,qBAAW,wBAAwB;AAAQ,YAAG,CAAC;AAAQ,iBAAM;AAAA;AAAG,YAAG,kBAAkB;AAAqB,iBAAM;AAAA;AAAG,4BAAkB,GAAG,cAAc,QAAO;AAAmB,eAAO;AAAA;AAAc;AAAiD,eAAO,oCAAoC,IAAG;AAAA;AAAI,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;AAAA,SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;AAAA,qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;AAAA,qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;AAAA;AAAA;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;AAAA;AAAA;AAAO,eAAO;AAAA,SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;AAAA,YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;AAAA;AAAI,YAAG,QAAM;AAAe,kBAAM;AAAA;AAAI,eAAO,cAAW,MAAI,MAAI;AAAA,SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;AAAA;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;AAAA;AAAG,eAAO,OAAK;AAAA,SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;AAAA,SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;AAAA,SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;AAAA,SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;AAAA;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAI,UAAC,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;AAAA;AAAO,kBAAO,KAAK;AAAA;AAAA,SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,oBAAoB,SAAS,UAAQ,KAAG;AAAG,eAAO;AAAA,SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;AAAA,SAAK,OAAM;AAAmB,eAAO;AAAA;AAAM;AAAuB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAI,eAAO;AAAA;AAAE;AAA8D,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,YAAW,aAAY,QAAO;AAAA;AAAW;AAAuC,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE,IAAG,KAAI,QAAO;AAAM,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,oBAAoB,MAAI,IAAE,KAAG;AAAG,oBAAQ,oBAAoB,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,mBAAmB,MAAI;AAAA;AAAI,iBAAK;AAAA;AAAI,4BAAoB,QAAM,KAAG;AAAI,eAAO;AAAA;AAAE;AAAuC,sBAAY,QAAQ,aAAa;AAAM,YAAG;AAAQ;AAAA;AAAU;AAA4C,YAAG,QAAQ,iBAAe;AAAM,kBAAQ,eAAa;AAAA;AAAG,gBAAQ,aAAa,KAAK;AAAW,qBAAW,SAAQ;AAAA;AAAA;AAAO;AAAsC,YAAG;AAAuB,gBAAK;AAAwF,qBAAW,QAAQ;AAAe,YAAG,OAAO,YAAU;AAAU,gBAAK;AAAkB,YAAG,CAAC,aAAa;AAAY,gBAAK;AAAkC,gBAAQ,eAAe,KAAK;AAAQ,wBAAc,QAAQ,MAAI;AAAG,qBAAU,GAAE,IAAE,KAAI,EAAE;AAAG,8BAAoB,YAAU,IAAE,KAAG,KAAG;AAAA;AAAE,wBAAc,aAAa,YAAU,aAAa;AAAU,sBAAY,QAAQ,SAAS,aAAa,eAAa,CAAC,QAAc,WAAU,aAAa,WAAU,WAAU,aAAa,WAAU,mBAAkB,aAAa,mBAAkB,QAAO,aAAa,aAAY,kBAAiB,aAAa;AAAa,kBAAQ,QAAQ,oBAAkB;AAAE,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,MAAG,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,QAAQ;AAAkB,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAI,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG,aAAa;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,KAAG,IAAG;AAAW,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAU,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAa,gBAAQ,MAAM,qBAAoB,MAAK,OAAI,MAAI,IAAG,aAAa;AAAW,0BAAgB;AAA8B,4BAAkB,cAAY;AAAG,gBAAQ,MAAM,qBAAoB,MAAK,QAAK,IAAG;AAAe,eAAO,UAAQ;AAAQ,kBAAQ,CAAC,KAAM,OAAM,eAAgB,aAAa,cAAa,KAAM,aAAa,KAAI,kBAAmB,aAAa,aAAY,cAAe,aAAa,aAAY,gBAAiB,aAAa,oBAAmB,WAAY,aAAa,WAAU,WAAY,aAAa;AAAW,eAAO,aAAW;AAAW,cAAI,OAAK,YAAY;AAAM,iBAAO,YAAY,KAAI,aAAa;AAAA;AAAe,YAAG,OAAO;AAAQ,iBAAO;AAAa,iBAAO,OAAO;AAAA;AAAA;AAAY;AAA0D,YAAG,CAAC,UAAQ,CAAC;AAAW,iBAAO,YAAY;AAAO,YAAG,CAAC;AAAQ,cAAI;AAA4D,iBAAO,YAAY;AAAA;AAAM,oBAAS,oBAAoB,SAAO,MAAI;AAAG,YAAG,UAAO;AAAQ,cAAI,+CAA6C,SAAO;AAAwE,iBAAO,YAAY;AAAA;AAAM,0BAAgB,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,wBAAc,QAAQ,KAAK,qBAAoB,SAAO,MAAI,MAAI;AAAG,YAAG;AAAO,8BAAoB,UAAQ,KAAG;AAAY,YAAG;AAAW,8BAAoB,cAAY,KAAG;AAAU,eAAO;AAAA;AAAE;AAAyB,eAAO,gBAAc;AAAA;AAAE,aAAO,mBAAiB;AAAc;AAA6D,YAAG,OAAO,sBAAoB;AAAa,cAAI;AAAuF,iBAAO;AAAA;AAAE,YAAG,CAAC;AAAa,cAAI;AAAqD,iBAAO;AAAA;AAAG,2BAAiB;AAAG,oBAAU;AAAE,YAAG,0BAAyB,cAAa,WAAS,KAAG;AAAQ,iBAAO,sCAAsC,WAAU,aAAY,MAAK,eAAc;AAAA;AAAK,YAAG;AAAM,iBAAO;AAAM,wBAAc;AAAE,wBAAc;AAAE,uBAAa;AAAE,0BAAgB;AAAE,wBAAc;AAAE,YAAG;AAAM,sBAAU,oBAAoB,QAAM;AAAG,uBAAW;AAAM,sBAAU,oBAAoB,OAAK,KAAG;AAAG,qBAAS,oBAAoB,OAAK,MAAI,OAAK;AAAE,6BAAiB,oBAAoB,OAAK,MAAI,OAAK;AAAE,cAAG;AAAc,kCAAoB,oBAAoB,OAAK,MAAI;AAAG,gCAAkB,oBAAoB,OAAK,MAAI;AAAG,kCAAoB,QAAQ,sCAAoC,QAAQ,sCAAoC;AAAgB,mCAAuB,iBAAgB,OAAK,IAAG,OAAK;AAAI,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;AAAG,gCAAoB,OAAK,MAAI,KAAG;AAAgB,gCAAoB,OAAK,MAAI,KAAG;AAAA;AAAmB,0BAAY,oBAAoB,OAAK,MAAI;AAAG,wBAAU,oBAAoB,OAAK,MAAI;AAAA;AAAA;AAAS,sBAAU;AAAA;AAAQ,gCAAsB,aAAW;AAAE,YAAG;AAAmB,sBAAU,UAAU,IAAG;AAAA;AAAgB,uBAAW;AAAU,kBAAO,YAAU;AAAA;AAAG,gCAAqB,QAAQ;AAAK,qBAAU,GAAE,IAAE,OAAK,GAAE,EAAE;AAAE,8BAAqB,sBAAkB,KAAG,KAAG;AAAE,4BAAoB,eAAa,KAAG;AAAiB,4BAAoB,oBAAiB,MAAI,KAAG;AAAiB,sBAAY,oBAAiB;AAAI,4BAAoB,WAAS,KAAG;AAAQ,2BAAiB,CAAC,WAAoB,WAAoB,mBAAoC,aAAwB,WAAoB,UAAkB,cAAa,eAAc,aAAY,mBAAiB,oBAAmB,iBAAgB,KAAQ;AAA2B,YAAG;AAAwB,uBAAa,MAAI;AAAc,sBAAY,cAAa;AAAA;AAAmB,yBAAe;AAAA;AAAc,eAAO;AAAA;AAAE;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;AAAA;AAAK;AAAwB,YAAG;AAAuB,iBAAO,oCAAoC,GAAE,GAAE;AAAM,gBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,8BAAgB;AAAW,mBAAO,cAAY;AAAA,eAAW;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAO;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAY;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAG,mBAAM;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAO;AAAA,eAAS;AAAA,eAAO;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAS;AAAA,eAAS;AAAA,eAAS;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAO;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAU;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAQ;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAgB;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAW;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAQ;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAU;AAAE,mBAAO;AAAA,eAAa;AAAE,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAW;AAAG,mBAAO;AAAA,eAAS;AAAG,mBAAO;AAAA,eAAS;AAAG,mBAAO;AAAA,eAAS;AAAG,mBAAO;AAAA,eAAS;AAAE,mBAAO;AAAA,eAAS;AAAI,mBAAO;AAAA,eAAQ;AAAG,mBAAO;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAQ;AAAE,mBAAO;AAAA,eAAO;AAAG,mBAAO;AAAA,eAAO;AAAI,gBAAG,OAAO,cAAY;AAAS,qBAAO,UAAU,0BAAwB;AAAE,mBAAO;AAAA;AAAA;AAAG,iBAAS;AAAI,eAAM;AAAA;AAAG,UAAG,CAAC;AAAuB,gBAAQ;AAAA;AAA2B,gBAAQ;AAAa;AAAU,SAAG;AAAO,iCAAyB,CAAC,MAAK,SAAQ,iDAAgD,WAAU,UAAS,WAAU;AAAU,0BAAkB,CAAC,GAAI,gBAAe,GAAI,cAAa,GAAI,kCAAiC,GAAI,QAAO,GAAI,mDAAkD,GAAI,wBAAuB,GAAI,wBAAuB,GAAI,qBAAoB,GAAI,oCAAmC,GAAI,oCAAmC,GAAI,wBAAuB,GAAI,+BAA8B,GAAI,uCAAsC,GAAI,yBAAwB,GAAI,qCAAoC,GAAI,uCAAsC,GAAI,6BAA4B,GAAI,kCAAiC,GAAI,WAAU,GAAI,UAAS,GAAI,WAAU,GAAI,gBAAe,QAAS,cAAY,OAAO,eAAc,GAAI,sBAAqB,GAAI,uBAAsB,GAAI,iBAAgB,GAAI,eAAc,GAAI,SAAQ,GAAI,UAAS,OAAQ;AAAW,gBAAQ;AAAa,aAAO,SAAO;AAAI,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,MAAM,MAAM,MAAK;AAAA;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,wCAAgC,OAAO,iCAA+B;AAAW,eAAO,+BAA4B,OAAO,iCAA+B,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,qCAA6B,OAAO,8BAA4B;AAAW,eAAO,4BAAyB,OAAO,8BAA4B,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,yDAAiD,OAAO,kDAAgD;AAAW,eAAO,gDAA6C,OAAO,kDAAgD,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,4DAAoD,OAAO,qDAAmD;AAAW,eAAO,mDAAgD,OAAO,qDAAmD,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,wDAAgD,OAAO,iDAA+C;AAAW,eAAO,+CAA4C,OAAO,iDAA+C,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iDAAyC,OAAO,0CAAwC;AAAW,eAAO,wCAAqC,OAAO,0CAAwC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,gDAAwC,OAAO,yCAAuC;AAAW,eAAO,uCAAoC,OAAO,yCAAuC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,gEAAwD,OAAO,yDAAuD;AAAW,eAAO,uDAAoD,OAAO,yDAAuD,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,kDAA0C,OAAO,2CAAyC;AAAW,eAAO,yCAAsC,OAAO,2CAAyC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sDAA8C,OAAO,+CAA6C;AAAW,eAAO,6CAA0C,OAAO,+CAA6C,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,+CAAuC,OAAO,wCAAsC;AAAW,eAAO,sCAAmC,OAAO,wCAAsC,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,OAAO,MAAM,MAAK;AAAA;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM,aAAO,aAAW;AAAQ,aAAO,aAAW;AAAQ,aAAO,mBAAiB;AAAc,aAAO,gBAAc;AAAW,aAAO,gBAAc;AAAW;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;AAAA;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;AAAA;AAAA;AAAS,eAAO;AAAA;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,SAAO;AAAI,aAAK,SAAO;AAAA;AAAO,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;AAAA;AAAW;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;AAAA;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B;AAAA;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;AAAA,eAAK;AAAG;AAAA,aAAS;AAAA;AAAQ;AAAA;AAAA;AAAS,aAAO,SAAO;AAAI,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;AAAA;AAAA;AAAS,UAAG,CAAC;AAAuB,wBAAc;AAAK,UAAG,CAAC;AAAuB;AAGx1tE,aAAO;AAAA;AAAA;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;AAAA,WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;AAAA;AAAA,WACxB,OAAO,YAAY;AAC1B,YAAQ,mCAAmC;AAAA;;;ACpBjD,IAAA;AACA,0BAAyB;AACvB,qBAAiB,OAAO,aAAa,eAAe,SAAS,gBAAgB,SAAS,cAAc,MAAM;AAC1G,QAAI,OAAO,eAAe;AAAa,mBAAa,cAAc;AAClE,WACF;AACE,2BAAoB,sBAAqB;AAE3C,mBAAW,OAAO,uBAAoB,cAAY,qBAAkB;AAAG,4BAAoB;AAAG;AAAQ,WAAI,OAAO;AAAQ,YAAG,OAAO,eAAe;AAAM,0BAAgB,OAAK,OAAO;AAAA;AAAA;AAAM,uBAAe;AAAG,wBAAgB;AAAiB,kBAAU;AAAyB,cAAM;AAAA;AAAS,+BAAuB;AAAM,kCAA0B;AAAM,gCAAwB;AAAM,iCAAyB;AAAM,2BAAmB,OAAO,WAAS;AAAS,8BAAsB,OAAO,kBAAgB;AAAW,4BAAoB,OAAO,YAAU,YAAU,OAAO,QAAQ,aAAW,YAAU,OAAO,QAAQ,SAAS,SAAO;AAAS,6BAAqB,CAAC,sBAAoB,CAAC,uBAAqB,CAAC;AAAsB,4BAAoB;AAAG;AAA0B,YAAG,OAAO;AAAe,iBAAO,OAAO,cAAc,MAAK;AAAA;AAAiB,eAAO,kBAAgB;AAAA;AAAK;AAA8C;AAAW;AAAa,UAAG;AAAqB,YAAG;AAAuB,4BAAgB,eAAgB,QAAQ,mBAAiB;AAAA;AAAS,4BAAgB,YAAU;AAAA;AAAI,gBAAM;AAAqC,cAAG,CAAC;AAAO,qBAAO;AAAc,cAAG,CAAC;AAAS,uBAAS;AAAgB,qBAAS,SAAS,aAAa;AAAU,iBAAO,OAAO,gBAAgB,UAAS,SAAO,OAAK;AAAA;AAAS,qBAAW;AAA8B,oBAAQ,MAAM,UAAS;AAAM,cAAG,CAAC,IAAI;AAAQ,kBAAI,IAAI,WAAW;AAAA;AAAK,kBAAO,IAAI;AAAQ,iBAAO;AAAA;AAAK,YAAG,QAAQ,QAAQ,SAAO;AAAG,wBAAY,QAAQ,QAAQ,GAAG,QAAQ,OAAM;AAAA;AAAK,qBAAW,QAAQ,QAAQ,MAAM;AAAG,gBAAQ,MAAM,qBAAoB;AAAa,cAAG,CAAE,eAAc;AAAa,kBAAM;AAAA;AAAA;AAAM,gBAAQ,MAAM,sBAAqB;AAAO,gBAAM;AAAiB,kBAAQ,QAAQ;AAAA;AAAS,eAAO,aAAW;AAAW,iBAAM;AAAA;AAAA,iBAAsC;AAAsB,YAAG,OAAO,QAAM;AAAa,kBAAM;AAAuB,mBAAO,KAAK;AAAA;AAAA;AAAI,qBAAW;AAAuB;AAAS,cAAG,OAAO,eAAa;AAAY,mBAAO,IAAI,WAAW,WAAW;AAAA;AAAI,kBAAK,KAAK,GAAE;AAAU,kBAAO,OAAO,UAAO;AAAU,iBAAO;AAAA;AAAM,YAAG,OAAO,cAAY;AAAa,uBAAW;AAAA,mBAAmB,OAAO,aAAW;AAAa,uBAAW;AAAA;AAAU,YAAG,OAAO,SAAO;AAAY,kBAAM;AAAiB,iBAAK;AAAA;AAAA;AAAS,YAAG,OAAO,UAAQ;AAAa,cAAG,OAAO,YAAU;AAAY,sBAAQ;AAAG,kBAAQ,MAAI;AAAM,kBAAQ,OAAK,QAAQ,QAAM,OAAO,aAAW,cAAY,WAAS;AAAA;AAAA,iBAAe,sBAAoB;AAAuB,YAAG;AAAuB,4BAAgB,KAAK,SAAS;AAAA,mBAAa,SAAS;AAAe,4BAAgB,SAAS,cAAc;AAAA;AAAI,YAAG;AAAY,4BAAgB;AAAA;AAAW,YAAG,gBAAgB,QAAQ,aAAW;AAAG,4BAAgB,gBAAgB,OAAO,GAAE,gBAAgB,YAAY,OAAK;AAAA;AAAQ,4BAAgB;AAAA;AAAG;AAAC,kBAAM;AAAyB,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAO,gBAAI,KAAK;AAAM,mBAAO,IAAI;AAAA;AAAc,cAAG;AAAuB,yBAAW;AAAyB,wBAAQ,IAAI;AAAe,kBAAI,KAAK,OAAM,KAAI;AAAO,kBAAI,eAAa;AAAc,kBAAI,KAAK;AAAM,qBAAO,IAAI,WAAW,IAAI;AAAA;AAAA;AAAW,sBAAU;AAAuC,sBAAQ,IAAI;AAAe,gBAAI,KAAK,OAAM,KAAI;AAAM,gBAAI,eAAa;AAAc,gBAAI,SAAO;AAAsB,kBAAG,IAAI,UAAQ,OAAK,IAAI,UAAQ,KAAG,IAAI;AAAU,uBAAO,IAAI;AAAU;AAAA;AAAO;AAAA;AAAW,gBAAI,UAAQ;AAAQ,gBAAI,KAAK;AAAA;AAAA;AAAO,yBAAe;AAAgB,mBAAS,QAAM;AAAA;AAAA;AAAA;AAAa,gBAAQ,OAAO,YAAU,QAAQ,IAAI,KAAK;AAAS,gBAAQ,OAAO,eAAa,QAAQ,KAAK,KAAK;AAAS,WAAI,OAAO;AAAiB,YAAG,gBAAgB,eAAe;AAAM,iBAAO,OAAK,gBAAgB;AAAA;AAAA;AAAM,wBAAgB;AAAK,UAAG,OAAO;AAAa,qBAAW,OAAO;AAAa,UAAG,OAAO;AAAe,sBAAY,OAAO;AAAe,UAAG,OAAO;AAAQ,gBAAM,OAAO;AAAQ;AAAe,UAAG,OAAO;AAAc,qBAAW,OAAO;AAAc;AAAkB,UAAG,OAAO;AAAiB,wBAAc,OAAO;AAAiB,UAAG,OAAO,gBAAc;AAAU,YAAI;AAAA;AAAmC;AAAe,sBAAc,IAAI,YAAY,MAAM,CAAC,SAAU,KAAI,SAAU,MAAI,GAAE,SAAU;AAAY,kBAAU;AAAM,uBAAe;AAAE;AAAgC,YAAG,CAAC;AAAW,gBAAM,uBAAqB;AAAA;AAAA;AAAO;AAAyB,mBAAS,OAAO,MAAI;AAAO,gBAAO,MAAK,kCAAgC,QAAM;AAA8B,eAAO;AAAA;AAAK;AAAoD,kBAAQ,CAAC,QAAS;AAAc,qBAAQ;AAAE,cAAG,QAAM,QAAM,QAAM,UAAW,QAAM;AAAG,sBAAS,KAAI,UAAQ,KAAG;AAAE,mBAAI,WAAW;AAAK,yBAAa,KAAI,MAAI;AAAA;AAAK,iBAAO;AAAA,WAAK,OAAQ;AAAc,qBAAQ,WAAW,IAAI;AAAQ,6BAAmB,KAAI;AAAK,iBAAO;AAAA;AAAM;AAAiC,cAAG,eAAa;AAAS,mBAAO,aAAa;AAAK,cAAG,eAAa;AAAU,mBAAO,QAAQ;AAAK,iBAAO;AAAA;AAAI,mBAAS,SAAS;AAAO,oBAAU;AAAG,oBAAU;AAAE,YAAG;AAAM,uBAAU,GAAE,IAAE,KAAK,QAAO;AAAK,4BAAc,IAAI,SAAS;AAAI,gBAAG;AAAW,kBAAG,UAAQ;AAAE,wBAAM;AAAY,oBAAM,KAAG,UAAU,KAAK;AAAA;AAAS,oBAAM,KAAG,KAAK;AAAA;AAAA;AAAA;AAAK,kBAAQ,KAAK,MAAM,MAAK;AAAO,cAAI,mBAAmB;AAAK,YAAG,UAAQ;AAAE,uBAAa;AAAO,eAAO;AAAA;AAAI;AAA+C,mBAAS,YAAU;AAAG,0BAAgB,SAAS,MAAM;AAAe,iBAAO,SAAO;AAAA;AAAW,yBAAe,eAAa;AAAS,YAAG,cAAY,eAAa,CAAC;AAAM,iBAAO,SAAS;AAAA;AAAO,eAAO;AAAW,iBAAO,MAAM,OAAM,YAAW,UAAS,WAAU;AAAA;AAAA;AAAO,wBAAgB,OAAO,gBAAc,cAAY,IAAI,YAAY,UAAQ;AAAU;AAAoD,qBAAW,MAAI;AAAe,qBAAW;AAAI,eAAM,KAAK,WAAS,CAAE,WAAQ;AAAQ,YAAE;AAAO,YAAG,SAAO,MAAI,MAAI,KAAK,YAAU;AAAa,iBAAO,YAAY,OAAO,KAAK,SAAS,KAAI;AAAA;AAAc,oBAAQ;AAAG,iBAAM,MAAI;AAAQ,qBAAO,KAAK;AAAO,gBAAG,CAAE,MAAG;AAAM,qBAAK,OAAO,aAAa;AAAI;AAAA;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,qBAAK,OAAO,aAAc,MAAG,OAAK,IAAE;AAAI;AAAA;AAAS,qBAAO,KAAK,SAAO;AAAG,gBAAI,MAAG,QAAM;AAAK,mBAAI,MAAG,OAAK,KAAG,MAAI,IAAE;AAAA;AAAQ,mBAAI,MAAG,MAAI,KAAG,MAAI,KAAG,MAAI,IAAE,KAAK,SAAO;AAAA;AAAG,gBAAG,KAAG;AAAO,qBAAK,OAAO,aAAa;AAAA;AAAS,uBAAO,KAAG;AAAM,qBAAK,OAAO,aAAa,QAAM,MAAI,IAAG,QAAM,KAAG;AAAA;AAAA;AAAA;AAAQ,eAAO;AAAA;AAAI;AAA0C,eAAO,MAAI,kBAAkB,QAAO,KAAI,kBAAgB;AAAA;AAAG;AAA4D,YAAG,CAAE,mBAAgB;AAAG,iBAAO;AAAE,uBAAa;AAAO,qBAAW,SAAO,kBAAgB;AAAE,qBAAU,GAAE,IAAE,IAAI,QAAO,EAAE;AAAG,kBAAM,IAAI,WAAW;AAAG,cAAG,KAAG,SAAO,KAAG;AAAO,qBAAO,IAAI,WAAW,EAAE;AAAG,gBAAE,QAAQ,MAAE,SAAO,MAAI,KAAG;AAAA;AAAK,cAAG,KAAG;AAAK,gBAAG,UAAQ;AAAO;AAAM,iBAAK,YAAU;AAAA,qBAAU,KAAG;AAAM,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAE,iBAAK,YAAU,MAAI,IAAE;AAAA,qBAAW,KAAG;AAAO,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAQ,gBAAG,SAAO,KAAG;AAAO;AAAM,iBAAK,YAAU,MAAI,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,KAAG;AAAG,iBAAK,YAAU,MAAI,KAAG,IAAE;AAAG,iBAAK,YAAU,MAAI,IAAE;AAAA;AAAA;AAAI,aAAK,UAAQ;AAAE,eAAO,SAAO;AAAA;AAAS;AAAkD,eAAO,kBAAkB,KAAI,QAAO,QAAO;AAAA;AAAiB;AAA0C,cAAM,IAAI,OAAM;AAAA;AAAQ;AAAsE;AAAyC,kBAAO;AAAI,eAAO,WAAS,QAAM,IAAI,UAAU;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,YAAU,SAAO,IAAI,WAAW;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,YAAY;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAK,eAAO,aAAW,UAAQ,IAAI,aAAa;AAAA;AAAK,mCAA2B,OAAO,qBAAmB;AAAS;AAAyC,eAAM,UAAU,SAAO;AAAG,yBAAa,UAAU;AAAQ,cAAG,OAAO,YAAU;AAAY,qBAAS;AAAQ;AAAA;AAAS,qBAAS,SAAS;AAAK,cAAG,OAAO,SAAO;AAAU,gBAAG,SAAS,QAAM;AAAW,qBAAO,aAAa;AAAA;AAAW,qBAAO,cAAc,MAAK,SAAS;AAAA;AAAA;AAAW,iBAAK,SAAS,QAAM,SAAU,OAAK,SAAS;AAAA;AAAA;AAAA;AAAO,yBAAiB;AAAG,uBAAe;AAAG,uBAAe;AAAG,0BAAkB;AAAG,+BAAuB;AAAM,0BAAkB;AAAM;AAAkB,YAAG,OAAO;AAAW,cAAG,OAAO,OAAO,aAAW;AAAW,mBAAO,YAAU,CAAC,OAAO;AAAW,iBAAM,OAAO,UAAU;AAAQ,wBAAY,OAAO,UAAU;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAc;AAAuB,6BAAmB;AAAK,6BAAqB;AAAA;AAAY;AAAmB,6BAAqB;AAAA;AAAY;AAAuB,wBAAc;AAAA;AAAK;AAAmB,YAAG,OAAO;AAAY,cAAG,OAAO,OAAO,cAAY;AAAW,mBAAO,aAAW,CAAC,OAAO;AAAY,iBAAM,OAAO,WAAW;AAAQ,yBAAa,OAAO,WAAW;AAAA;AAAA;AAAU,6BAAqB;AAAA;AAAe;AAAyB,qBAAa,QAAQ;AAAA;AAAI;AAA0B,sBAAc,QAAQ;AAAA;AAAI,sBAAc,KAAK;AAAK,uBAAe,KAAK;AAAM,4BAAoB;AAAE,iCAAyB;AAAK,kCAA0B;AAAK;AAA8B;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAA;AAAkB;AAAiC;AAAkB,YAAG,OAAO;AAA2B,iBAAO,0BAA0B;AAAA;AAAiB,YAAG,mBAAiB;AAAG,cAAG,yBAAuB;AAAM,0BAAc;AAAsB,mCAAqB;AAAA;AAAK,cAAG;AAAuB,2BAAa;AAAsB,oCAAsB;AAAK;AAAA;AAAA;AAAA;AAAa,aAAO,qBAAmB;AAAG,aAAO,qBAAmB;AAAG;AAAqB,YAAG,OAAO;AAAY,iBAAO,WAAW;AAAA;AAAM,gBAAM;AAAG,YAAI;AAAM,YAAI;AAAM,gBAAM;AAAK,qBAAW;AAAE,eAAK,WAAS,OAAK;AAA+C,cAAM,IAAI,YAAY,aAAa;AAAA;AAAM;AAA+B,eAAO,OAAO,UAAU,aAAW,IAAI,WAAW,UAAQ,IAAI,QAAQ,YAAU;AAAA;AAAE,0BAAkB;AAAwC;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,0BAAkB;AAAU;AAA6B,eAAO,UAAU,UAAS;AAAA;AAAe,2BAAmB;AAAyB,UAAG,CAAC,UAAU;AAAiB,yBAAe,WAAW;AAAA;AAAgB;AAAqB;AAAI,cAAG;AAAY,mBAAO,IAAI,WAAW;AAAA;AAAY,cAAG;AAAY,mBAAO,WAAW;AAAA;AAAqB,kBAAK;AAAA;AAAA;AAA8D,gBAAM;AAAA;AAAA;AAAM;AAA4B,YAAG,CAAC,cAAa,uBAAoB,0BAAwB,OAAO,UAAQ,cAAY,CAAC,UAAU;AAAiB,iBAAO,MAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,gBAAG,CAAC,SAAS;AAAO,oBAAK,yCAAuC,iBAAe;AAAA;AAAI,mBAAO,SAAS;AAAA,aAAmB,MAAM;AAAW,mBAAO;AAAA;AAAA;AAAc,eAAO,IAAI,QAAQ;AAAyB,kBAAQ;AAAA;AAAA;AAAe;AAAsB,mBAAS,CAAC,KAAM,eAAc,wBAAyB;AAAe;AAA0C,yBAAY,SAAS;AAAQ,iBAAO,SAAO;AAAQ,uBAAW,SAAQ;AAAU,qCAA2B,WAAW;AAAQ,8BAAoB;AAAA;AAAoB,yBAAiB;AAAoB;AAA2C,0BAAgB,OAAO;AAAA;AAAa;AAA0C,iBAAO,mBAAmB,KAAK;AAAiB,mBAAO,YAAY,YAAY,QAAO;AAAA,aAAQ,KAAK,UAAS;AAAiB,gBAAI,4CAA0C;AAAQ,kBAAM;AAAA;AAAA;AAAU;AAA4B,cAAG,CAAC,cAAY,OAAO,YAAY,yBAAuB,cAAY,CAAC,UAAU,mBAAiB,CAAC,UAAU,mBAAiB,OAAO,UAAQ;AAAY,kBAAM,gBAAe,CAAC,aAAY,gBAAgB,KAAK;AAAmB,2BAAW,YAAY,qBAAqB,UAAS;AAAM,qBAAO,OAAO,KAAK,2BAA0B;AAAiB,oBAAI,oCAAkC;AAAQ,oBAAI;AAA6C,uCAAuB;AAAA;AAAA;AAAA;AAAoC,mBAAO,uBAAuB;AAAA;AAAA;AAA4B,YAAG,OAAO;AAAoB;AAAI,2BAAY,OAAO,mBAAmB,MAAK;AAAiB,mBAAO;AAAA;AAAiB,gBAAI,wDAAsD;AAAG,mBAAO;AAAA;AAAA;AAAO;AAAmB,eAAM;AAAA;AAAG,iBAAW;AAAO;AAAuD,mCAA2B,WAAW;AAAA;AAAQ,iBAAS,CAAC,WAAU;AAAmB,0BAAgB;AAAgE,eAAO,YAAY,KAAK,UAAU,MAAM;AAAA,SAAI,gBAAe;AAA+B,iBAAO;AAAE,qBAAU,MAAM,SAAO,GAAE,KAAG,GAAE;AAAK,qBAAS,MAAM;AAAG,cAAG,SAAO;AAAK,kBAAM,OAAO,GAAE;AAAA,qBAAW,SAAO;AAAM,kBAAM,OAAO,GAAE;AAAG;AAAA,qBAAa;AAAI,kBAAM,OAAO,GAAE;AAAG;AAAA;AAAA;AAAM,YAAG;AAAgB,iBAAK,IAAG;AAAM,kBAAM,QAAQ;AAAA;AAAA;AAAO,eAAO;AAAA,SAAO,WAAU;AAAe,yBAAe,KAAK,OAAO,OAAK,qBAAkB,KAAK,OAAO,QAAM;AAAI,eAAK,KAAK,eAAe,KAAK,MAAM,KAAK,OAAO;AAAY,iBAAM,CAAC,CAAC;AAAA,YAAI,CAAC,YAAY,KAAK;AAAK,YAAG,CAAC,QAAM,CAAC;AAAY,iBAAK;AAAA;AAAI,YAAG,QAAM;AAAe,kBAAM;AAAA;AAAI,eAAO,cAAW,MAAI,MAAI;AAAA,SAAM,SAAQ;AAAe,qBAAW,KAAK,UAAU,cAAW,OAAO,UAAO,OAAO;AAAG,YAAG,CAAC,QAAM,CAAC;AAAK,iBAAM;AAAA;AAAI,YAAG;AAAK,gBAAI,IAAI,OAAO,GAAE,IAAI,SAAO;AAAA;AAAG,eAAO,OAAK;AAAA,SAAK,UAAS;AAAe,YAAG,SAAO;AAAI,iBAAM;AAAI,wBAAc,KAAK,YAAY;AAAK,YAAG,cAAY;AAAG,iBAAO;AAAK,eAAO,KAAK,OAAO,YAAU;AAAA,SAAI,SAAQ;AAAe,eAAO,KAAK,UAAU,MAAM;AAAA,SAAI,MAAK;AAAW,oBAAU,MAAM,UAAU,MAAM,KAAK,WAAU;AAAG,eAAO,KAAK,UAAU,MAAM,KAAK;AAAA,SAAO,OAAM;AAAc,eAAO,KAAK,UAAU,IAAE,MAAI;AAAA;AAAK,qBAAa,CAAC,UAAS,IAAG,SAAQ,CAAC,MAAK,IAAG,KAAI,WAAU;AAAsB,sBAAW,SAAS,QAAQ;AAAQ,YAAG,SAAO,KAAG,SAAO;AAAI,UAAC,YAAS,IAAE,MAAI,KAAK,kBAAkB,SAAO;AAAI,kBAAO,SAAO;AAAA;AAAO,kBAAO,KAAK;AAAA;AAAA,SAAQ,SAAQ,QAAU,KAAI;AAAW,iBAAS,WAAS;AAAE,kBAAQ,OAAO,SAAS,UAAQ,KAAG;AAAG,eAAO;AAAA,SAAK,QAAO;AAAc,kBAAQ,aAAa;AAAK,eAAO;AAAA,SAAK,OAAM;AAAmB,eAAO;AAAA;AAAM;AAAuB,eAAO;AAAA;AAAE;AAAA;AAA+D;AAAuC,kBAAQ;AAAE,qBAAU,GAAE,IAAE,QAAO;AAAK,oBAAQ,OAAO,MAAI,IAAE,KAAG;AAAG,oBAAQ,OAAO,MAAK,KAAE,IAAE,MAAI;AAAG,uBAAU,GAAE,IAAE,KAAI;AAAK,qBAAS,UAAU,IAAG,OAAO,MAAI;AAAA;AAAI,iBAAK;AAAA;AAAI,eAAO,QAAM,KAAG;AAAI,eAAO;AAAA;AAAE;AAAuB,aAAK;AAAA;AAAQ;AAA0B,cAAM;AAAA;AAAM;AAAoB,YAAE,CAAC;AAAE,eAAO,KAAG,IAAG,CAAC,WAAW,IAAG,OAAK,CAAC,UAAU,IAAE;AAAA;AAAK,0BAAkB,CAAC,iCAAkC,kCAAiC,UAAW,WAAU,SAAU,UAAS,UAAW,WAAU,WAAY,YAAW,QAAS;AAAS,gBAAQ;AAAa,aAAO,SAAO;AAAI,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,6BAAqB,OAAO,sBAAoB;AAAW,eAAO,oBAAiB,OAAO,sBAAoB,OAAO,OAAO,oBAAoB,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,mCAA2B,OAAO,4BAA0B;AAAW,eAAO,0BAAuB,OAAO,4BAA0B,OAAO,OAAO,0BAA0B,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,2BAAmB,OAAO,oBAAkB;AAAW,eAAO,kBAAe,OAAO,oBAAkB,OAAO,OAAO,kBAAkB,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,gBAAgB,MAAM,MAAK;AAAA;AAAY,kCAA0B,OAAO,2BAAyB;AAAW,eAAO,yBAAsB,OAAO,2BAAyB,OAAO,OAAO,yBAAyB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,wBAAgB,OAAO,iBAAe;AAAW,eAAO,eAAY,OAAO,iBAAe,OAAO,OAAO,eAAe,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,iCAAyB,OAAO,0BAAwB;AAAW,eAAO,wBAAqB,OAAO,0BAAwB,OAAO,OAAO,wBAAwB,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,4BAAoB,OAAO,qBAAmB;AAAW,eAAO,mBAAgB,OAAO,qBAAmB,OAAO,OAAO,mBAAmB,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,8BAAsB,OAAO,uBAAqB;AAAW,eAAO,qBAAkB,OAAO,uBAAqB,OAAO,OAAO,qBAAqB,MAAM,MAAK;AAAA;AAAY,mBAAW,OAAO,YAAU;AAAW,eAAO,UAAO,OAAO,YAAU,OAAO,OAAO,UAAU,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,aAAa,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,qBAAa,OAAO,cAAY;AAAW,eAAO,YAAS,OAAO,cAAY,OAAO,OAAO,YAAY,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,+BAAuB,OAAO,wBAAsB;AAAW,eAAO,sBAAmB,OAAO,wBAAsB,OAAO,OAAO,sBAAsB,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,iBAAS,OAAO,UAAQ;AAAW,eAAO,QAAK,OAAO,UAAQ,OAAO,OAAO,QAAQ,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,0BAAkB,OAAO,mBAAiB;AAAW,eAAO,iBAAc,OAAO,mBAAiB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,kBAAU,OAAO,WAAS;AAAW,eAAO,SAAM,OAAO,WAAS,OAAO,OAAO,SAAS,MAAM,MAAK;AAAA;AAAY,oBAAY,OAAO,aAAW;AAAW,eAAO,WAAQ,OAAO,aAAW,OAAO,OAAO,WAAW,MAAM,MAAK;AAAA;AAAY,sBAAc,OAAO,eAAa;AAAW,eAAO,aAAU,OAAO,eAAa,OAAO,OAAO,cAAc,MAAM,MAAK;AAAA;AAAY,uBAAe,OAAO,gBAAc;AAAW,eAAO,cAAW,OAAO,gBAAc,OAAO,OAAO,eAAe,MAAM,MAAK;AAAA;AAAY,yBAAiB,OAAO,kBAAgB;AAAW,eAAO,gBAAa,OAAO,kBAAgB,OAAO,OAAO,iBAAiB,MAAM,MAAK;AAAA;AAAY,aAAO,SAAO;AAAI,aAAO,WAAS;AAAM;AAAc,aAAO,UAAQ;AAAe,YAAG;AAAW,eAAK;AAAA;AAAa,oBAAQ,OAAO;AAAwB,iBAAO,0BAAwB;AAAW,gBAAG;AAAI;AAAM,iBAAK;AAAA;AAAA;AAAS,eAAO;AAAA;AAAQ;AAA4B,aAAK,OAAK;AAAa,aAAK,UAAQ,kCAAgC,SAAO;AAAI,aAAK,SAAO;AAAA;AAAO,uBAAe;AAAM,8BAAsB;AAAqB,YAAG,CAAC;AAAU;AAAM,YAAG,CAAC;AAAU,kCAAsB;AAAA;AAAW;AAAwB,4BAAkB,OAAO;AAAW;AAAI;AAAgB,oBAAQ;AAAE,eAAK,KAAI;AAAA;AAAe,cAAG,aAAa;AAAY;AAAA,qBAAe,KAAG;AAAU,4BAAc;AAAK;AAAA;AAAY,wBAAU;AAAE,gBAAG,KAAG,OAAO,MAAI,YAAU,EAAE;AAAO,sBAAM,CAAC,GAAE,EAAE;AAAA;AAAO,gBAAI,uBAAqB;AAAO,kBAAM,GAAE;AAAA;AAAA;AAAY,uBAAW;AAAA;AAAA;AAAM;AAAmB,eAAK,QAAM;AAAW,YAAG,kBAAgB;AAAG;AAAA;AAAO;AAAS,YAAG,kBAAgB;AAAE;AAAO;AAAiB,cAAG;AAAU;AAAO,sBAAU;AAAK,iBAAO,eAAa;AAAK,cAAG;AAAM;AAAO;AAAc;AAAU,cAAG,OAAO;AAAwB,mBAAO;AAA0B,cAAG;AAAa,qBAAS;AAAM;AAAA;AAAU,YAAG,OAAO;AAAc,iBAAO,aAAa;AAAc,qBAAW;AAAW,uBAAW;AAAW,qBAAO,aAAa;AAAA,eAAK;AAAG;AAAA,aAAS;AAAA;AAAQ;AAAA;AAAA;AAAS,aAAO,SAAO;AAAI;AAA+B,YAAG,YAAU,iBAAe,WAAS;AAAG;AAAA;AAAO,YAAG;AAAA;AAAqB,kBAAM;AAAK,uBAAW;AAAO;AAAc,cAAG,OAAO;AAAU,mBAAO,UAAU;AAAA;AAAQ,cAAM,QAAO,IAAI,WAAW;AAAA;AAAS,UAAG,OAAO;AAAY,YAAG,OAAO,OAAO,cAAY;AAAW,iBAAO,aAAW,CAAC,OAAO;AAAY,eAAM,OAAO,WAAW,SAAO;AAAG,iBAAO,WAAW;AAAA;AAAA;AAAS,yBAAiB;AAAK,UAAG,OAAO;AAAgB,uBAAa;AAAM,sBAAc;AAAK;AAGp30B,aAAO;AAAA;AAAA;AAIT,MAAI,OAAO,YAAY,YAAY,OAAO,WAAW;AAC/C,WAAO,UAAU;AAAA,WACV,OAAO,WAAW,cAAc,OAAO;AAC9C,WAAO,IAAI;AAAa,aAAO;AAAA;AAAA,WACxB,OAAO,YAAY;AAC1B,YAAQ,uBAAuB;AAAA;;;ACpBrC,IAAA;AAEA,wBAAsB;AAEtB;AACE,iBAAa,CAAE,SAAS,CAAC,YAAY,IAAI,YAAY,IAAI,SAAS,CAAC,GAAG;AACtE,oBAAgB;AAChB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,qBAAe,KAAK,QAAQ;AAC5B,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,uBAAiB,KAAK,MAAO,aAAY,SAAS,KAAK;AACvD,yBAAmB,KAAK,QAAQ;AAChC,uBAAiB,GAAG,QAAQ,UAAU;AACpC,wBAAgB,SAAU,SAAQ;AAClC,yBAAiB,GAAG,QAAQ,UAAU;AACpC,0BAAgB,SAAU,SAAQ;AAClC,uBAAa,GAAG,IAAI,YAAY;AAC9B,oBAAQ,KAAK,CAAC,SAAS;AAAA;AAAA;AAAA;AAAA;AAK/B,WAAO;AAAA;AAGT,qBAAmB;AACjB,QAAI,eAAe;AACnB,QAAI,WAAW;AACf,QAAI,SAAS;AAAA;AAGf,oBAAkB,oBAAqB;AAAA,IACrC;AAAA,IACA,YAAY,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;AAAA,IAClD,UAAU,GAAG,MAAM,gBAAgB,CAAC,GAAG,IAAI,CAAC,IAAI;AAAA;AAGlD,mBAAiB;AACf,mBAAe,GAAG,IAAI,IAAI,YAAY;AACtC,iBAAa,GAAG,IAAI,IAAI,UAAU;AAClC,2BAAuB,GAAG,SAAS,CAAC,QAAQ,OAAO;AACnD,WAAO,UAAU;AAAA;AAGnB;AACE,sBAAkB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAgB,GAAG,IAAI,WAAW;AAClC,qBAAiB,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACnD,+BAA2B,GAAG,IAAI,UAAU;AAC5C,8BAA0B,GAAG,IAAI,SAAS;AAC1C,wBAAoB,GAAG,IAAI,oBAAoB;AAC/C,mBAAe,GAAG,IAAI,mBAAmB;AACzC,iBAAa,GAAG,IAAI,mBAAmB;AACvC,4BAAwB,GAAG,IAAI,QAAQ;AACvC,0BAAsB,GAAG,IAAI,MAAM;AACnC,uBAAmB;AACnB,WAAO,GAAG,SAAS,CAAC,iBAAiB,gBAAgB;AAAA;AAGvD;AACE,WAAO,GAAG,KAAK;AACb,kBAAY,MAAK,SAAS,MAAK,SAAS;AACxC,aAAO,SAAS,KAAK,aAAa,eAAe;AAAA;AAAA;AA9DrD;AAAA,IAmEE;AACE,WAAK,iBAAiB;AACtB,WAAK,QAAQ,QAAO,SAAS;AAC7B,WAAK,SAAS,QAAO,SAAS;AAC9B,WAAK,cAAc,gBAAgB,QAAO,SAAS;AACnD,WAAK,UAAU,GAAG,SAAS,KAAK;AAChC,WAAK,YAAY,GAAG,SAAS,CAAC,KAAK,OAAO,KAAK;AAC/C,WAAK,SAAS;AACd,WAAK,aAAa;AAAA;AAAA,UAGd;AAEJ,UAAK,CAAC,cAAgB,WAAW,sBAAwB,WAAW,MAAM,WAAW,KAAO,WAAW,MAAM,KAAK,KAAO,WAAW,MAAM,KAAK;AAAI,eAAO;AAC1J,+CAAyC,GAAG,KAAK;AAC/C,6BAAqB,WAAW,eAAe,CAAC,KAAK,OAAO,KAAK;AAEjE,gCAAwB,GAAG,IAAI,aAAa,IAAI,QAAQ;AACxD,kCAA0B,KAAK,eAAe,QAAQ;AACtD;AAEA,YAAI,MAAM,QAAQ;AAChB,yBAAe,kBAAkB,KAAK,UAAU,EAAE,OAAO,EAAE;AAC3D,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,4BAAkB,GAAG,OAAO,CAAC,OAAO,IAAI,OAAO,KAAK;AACpD,0BAAe,GAAG,OAAO,CAAC,WAAW,YAAY;AACjD,uBAAa,QAAO,QAAQ;AAAA;AAE5B,uBAAa,kBAAkB;AAAA;AAEjC,8BAAsB,aAAa,YAAY,KAAK,SAAS,KAAK;AAClE,uBAAe,GAAG,MAAM,YAAY,CAAC,GAAG,IAAI,CAAC,IAAI;AACjD,0BAAkB,GAAG,QAAQ,QAAQ;AACrC,eAAO,CAAC,YAAY,eAAe;AAAA;AAErC,+BAAyB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,KAAK,OAAO,SAAS,UAAU,KAAK,OAAO,SAAS,cAAc,KAAK,OAAO,SAAS;AACrK,yBAAmB,iBAAiB;AACpC,uBAAiB;AACjB,+BAAyB,WAAW,IAAI,cAAc,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACzF,4BAAsB,iBAAiB,IAAI;AACzC,qBAAa,YAAY;AACzB,oBAAY;AACZ,eAAO;AAAA;AAGT,wBAAkB,OAAO;AACzB,6BAAuB;AACvB,sBAAgB;AACd,yBAAiB,WAAW;AAC5B,2BAAmB,UAAU;AAC7B,YAAI,aAAa,KAAK,OAAO,SAAS;AACpC,sBAAY,UAAU,cAAc;AACpC,yBAAe,KAAK,YAAY;AAChC,4BAAkB,GAAG,KAAK,MAAM,GAAG,MAAM,iBAAiB,CAAC,UAAU,gBAAgB,IAAI,CAAC,GAAG,KAAK,UAAU,QAAQ,CAAC,eAAe;AACpI,yBAAe,KAAK,CAAE,KAAK,WAAW,QAAQ;AAAA;AAAA;AAGlD,sBAAgB;AAChB,YAAM;AACN,aAAO;AACP,sBAAgB;AAChB,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa,CAAC,WAAW,MAAM,KAAK,KAAK,OAAO,WAAW,MAAM,KAAK,KAAK;AAAA;AAAA;AAAA,UAIzE;AACJ,aAAQ,OAAO,eAAgB,MAAM,KAAK,iBAAiB;AAC3D,oBAAc;AACd,0BAAmB;AACjB,6BAAqB,MAAK,UAAU;AACpC,0BAAkB,uBAAuB,OAAM;AAC/C,wBAAgB,SAAS;AACzB,gCAAwB,MAAK,YAAY;AACzC,uBAAe,MAAK;AACpB,6CAAqC;AACrC,gCAAwB,aACrB,IAAI,cAAe;AAAA,UACjB,UAAS,KAAK,OAAO,MAAM;AAAA,UAC3B,UAAS,KAAK,OAAO,MAAM;AAAA;AAEhC,+BAAuB;AAAA,UACrB,SAAS,QAAQ,MAAM,GAAG;AAAA,UAC1B,aAAa,QAAQ,MAAM;AAAA,UAC3B,WAAW;AAAA,UACX,aAAa;AAAA;AAEf,mBAAW,MAAK;AAChB,cAAK,UAAU;AACf,cAAK,YAAY;AACjB,kBAAU;AACV,cAAM,KAAK;AAAA;AAEb,aAAO;AAAA;AAAA;AAIX;AACE,sBAAkB,MAAM,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAClH,kBAAc,IAAI,eAAe,WAAW;AAE5C,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;AAAA;AAGT,UAAQ,OAAO;AACf,UAAQ,iBAAiB;AACzB,UAAQ,aAAa;AAAA;;;AC/KrB,IAAA;AAAA,UAAQ,mBAAmB;AAAA,IACzB,YAAY;AAAA,MACV;AAAA,MAAI;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MACtD;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MACvD;AAAA,MAAK;AAAA,MAAI;AAAA,MAAK;AAAA,MAAI;AAAA,MAAK;AAAA,MAAK;AAAA,MAAK;AAAA,MAAI;AAAA,MAAI;AAAA,MAAK;AAAA,MAAI;AAAA;AAAA,IAEpD,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK;AAAA,IAC7D,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;AAAA,IAC3D,gBAAgB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9D,gBAAgB,CAAC,IAAI,IAAI,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9D,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC/C,gBAAgB,CAAC,IAAI,GAAG,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACtD,gBAAgB,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC1C,gBAAgB,CAAC,KAAK,IAAI,KAAK,IAAI,IAAI,IAAI,IAAI,KAAK;AAAA,IACpD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC/C,gBAAgB,CAAC,KAAK,IAAI,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,gBAAgB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACzD,mBAAmB,CAAC,KAAK,IAAI,IAAI,KAAK,IAAI,KAAK,IAAI;AAAA,IACnD,mBAAmB,CAAC,IAAI,KAAK,IAAI,IAAI,IAAI;AAAA,IACzC,cAAc,CAAC,KAAK,KAAK,KAAK,KAAK;AAAA,IACnC,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC9C,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,eAAe,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACxD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IACtD,kBAAkB,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK;AAAA,IAC5C,aAAa,CAAC,KAAK,KAAK,KAAK,KAAK;AAAA,IAClC,mBAAmB,CAAC;AAAA,IACpB,SAAS,CAAC;AAAA,IACV,YAAY,CAAC;AAAA,IACb,iBAAiB,CAAC;AAAA,IAClB,gBAAgB,CAAC;AAAA,IACjB,YAAY,CAAC;AAAA,IACb,WAAW,CAAC;AAAA;AAEd,UAAQ,2BAA2B;AAAA,IACjC,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IACrD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG;AAAA,IACtD,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC9D,CAAE,KAAK,aAAa,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC9D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA,IAC7D,CAAE,KAAK,gBAAgB,SAAS,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI;AAAA;AAAA;;;AC/CvD,IAAA;AAEA;AACE,uBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,qBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,WAAO,CAAE,YAAY;AAAA;AAEvB,UAAQ,sBAAsB;AAE9B;AACE,WAAO;AAAA,MACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA,MAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA;AAAA;AAG9C,UAAQ,aAAa;AAErB;AACE,WAAO;AAAA,MACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA,MAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA;AAAA;AAGhE,UAAQ,eAAe;AAEvB;AACE,cAAU,OAAM,MAAM;AACtB,cAAU,OAAM,MAAM;AACtB,kBAAc,CAAC;AAAA,MACb,IAAI,WAAW,KAAK;AAAA,MAAG,IAAI,WAAW,KAAK;AAAA,MAAG,IAAI,SAAS,KAAK;AAAA,MAChE,IAAI,SAAS,KAAK;AAAA;AAEpB,WAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;AAAA;AAEnD,UAAQ,2BAA2B;AAEnC,qCAAkC;AAChC,mBAAe,cAAa;AAC5B,iBAAa,YAAW;AACxB,wBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,uBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,qBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;AAAA;AAEhD,UAAQ,aAAa;AAErB;AACE,oBAAgB,cAAa;AAC7B,iBAAa,YAAW;AACxB,oBAAgB,KAAK,IAAI,GAAG;AAC5B,qBAAiB,UAAU;AAC3B,uBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,qBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,WAAO,CAAE,YAAY,UAAU,WAAW,IAAI;AAAA;AAEhD,UAAQ,cAAc;AAAA;;;ACvDtB,IAAA;AAAA,UAAQ,kBAAkB,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AAKxD;AACE,WAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;AAAA;AAExE,UAAQ,mBAAmB;AAM3B;AACE,oBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,WAAO,kBAAiB;AAAA;AAE1B,UAAQ,kBAAkB;AAC1B;AACE,WAAO,MAAM,MAAM,KAAK;AAAA;AAE1B,UAAQ,eAAe;AACvB;AACE,WAAO,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AAAA;AAEvC;AACE,kBAAc;AACd,iBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAW,GAAG,KAAK,GAAG;AAAA;AAExB,WAAO;AAAA;AAET,UAAQ,MAAM;AACd;AACE,mBAAe;AACf,iBAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,aAAO,KAAK,IAAI,GAAG;AAAA;AAErB,WAAO;AAAA;AAET,UAAQ,qBAAqB;AAC7B;AACE,oBAAgB;AAChB,iBAAa,KAAK;AAClB,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK;AACb,qBAAe,GAAG,MAAM,MAAM;AAC5B,gBAAQ,KAAK,KAAK,KAAI,KAAK,MAAM,oBAAmB,MAAM;AAAA;AAAA;AAG9D,WAAO;AAAA;AAET;AACE,iBAAa,KAAK,IAAI;AACtB,iBAAa,KAAK,IAAI;AACtB,2BAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,8BAA0B,wBAAuB,OAAO,IAAI,OAAO;AACnE,qCAAiC,2BAA0B,mBAAmB;AAC9E,sCAAkC,wBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,WAAO,2BAA0B,0BAA0B;AAAA;AAE7D,UAAQ,sBAAsB;AAC9B;AACE,8BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,iCAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,gCAA4B;AAAA,MAC1B,CAAC,KAAI,kBAAkB,IAAI;AAAA,MAC3B,CAAC,KAAI,kBAAkB,IAAI;AAAA;AAE7B,WAAO;AAAA,MACL,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,MAChD,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,MAChD,CAAC,GAAG,GAAG;AAAA;AAAA;AAGX,UAAQ,wBAAwB;AAChC;AACE,WAAO;AAAA,MACL,KAAI,uBAAuB,eAAe;AAAA,MAC1C,KAAI,uBAAuB,eAAe;AAAA;AAAA;AAG9C,UAAQ,cAAc;AACtB;AACE,WAAO,KAAK,KAAO,GAAE,KAAK,EAAE,OAAO,IAAO,GAAE,KAAK,EAAE,OAAO;AAAA;AAE5D,UAAQ,0BAA0B;AAAA;;;ACvFlC,IAAA;AAEA,mBAA0B;AAC1B,oBAA2B;AAC3B,iBAAsB;AAEtB,0BAAwB;AACxB,2BAAyB;AACzB,kDAAgD,CAAC,kBAAkB,AAAU,2BAAiB,qBAAqB;AACnH,gCAA8B;AAC9B,+BAA6B;AAC7B,uDAAqD,CAAC,uBAAuB;AAC7E,2BAAyB,AAAU,2BAAiB;AACpD,0BAAwB,CAAC,iBAAiB,IAAI,iBAAiB,iBAAiB,SAAS;AACzF,4BAA0B,AAAU,2BAAiB;AACrD,2BAAyB,CAAC,kBAAkB,IAAI,kBAAkB,kBAAkB,SAAS;AAC7F,kCAAgC;AAChC,kCAAgC;AAChC,0BAAwB;AACxB,+BAA6B;AAG7B;AACE,iBAAa,GAAG,IAAI,AAAU,mCAAyB,QAAQ;AAC7D,aAAQ,KAAK,WAAY,AAAU,mCAAyB;AAC5D,8BAAwB,AAAU,2BAAiB,GAAG,SAAS;AAC/D,mCAA6B,QAAQ;AACrC,UAAI,wBAAwB,KAAK,SAAS;AACxC,qBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,wBAAc,QAAQ;AACtB,oBAAU,gBAAgB,MAAM;AAAA,YAC9B,UAAU,OAAO;AAAA,YAAI,UAAU,OAAO;AAAA,YACrC,WAAU,OAAO,KAAK,UAAU,gBAAgB,IAAI,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAhCrE;AAAA,IAwCE;AAEE,WAAK,cAAc;AACnB,WAAK,0BAA0B;AAC/B,WAAK,sBAAsB;AAC3B,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,YAAY,QAAO,KAAK;AAC7B,WAAK,aAAa,QAAO,KAAK;AAC9B,WAAK,WAAW,QAAO,KAAK;AAC5B,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;AAAA;AAAA,IAGvB;AACE,sBAAgB,AAAS,oBAAW,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAChF,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAY;AAAA,QAC7C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;AAAA,QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,aAAa;AAAA,QAAI,MAAM;AAAA;AAE3D,mCAA6B,AAAK,2BAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI,WAAY,CAAC,GAAG,AAAK,mBAAY,OAAO,uBAAuB,MAAM;AAC5G,oCAA8B,AAAK,6BAAsB;AACzD,wBAAkB,CAAC,GAAG,AAAS,sBAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI,YAAa;AACrG,gCAA0B;AAAA,QACxB,AAAK,WAAI,WAAW,sBAAsB;AAAA,QAC1C,AAAK,WAAI,WAAW,sBAAsB;AAAA;AAE5C,aAAO,cAAc,IAAI,WAAY;AAAA,QACnC,MAAM,KAAK,kBAAkB;AAAA,QAC7B,MAAM,KAAK,kBAAkB;AAAA,QAAI,MAAM;AAAA;AAAA;AAAA,IAI3C;AACE,uBAAiB,UAAU,gBAAgB,IAAI;AAC/C,wBAAkB,UAAU,iBAAiB,IAAI;AACjD,aAAO,WAAW;AAAA;AAAA,IAIpB,6EAA4E;AAC1E,kBAAY,AAAS,qBAAY,AAAS,oBAAW,KAAK,8BAA8B,CAAC,UAAU,sBAAsB,UAAU,wBAAwB,KAAK;AAChK,sBAAgB,AAAS,oBAAW;AACpC,iBAAW,GAAG,MAAM,cAAc,OAAM,CAAC;AAAA,QACvC,IAAI,WAAW,KAAK,KAAK;AAAA,QACzB,IAAI,WAAW,KAAK,KAAK;AAAA,QAAW,IAAI,SAAS,KAAK,KAAK;AAAA,QAC3D,IAAI,SAAS,KAAK,KAAK;AAAA,UACrB,CAAC,IAAI,CAAC,KAAK,UAAU,KAAK;AAC9B,UAAI;AACF,eAAO,GAAG,MAAM,cAAc;AAAA;AAEhC,aAAO,CAAE,KAAK,SAAS;AAAA;AAAA,IAIzB,iDAAiD;AAC/C,2BAAqB;AACrB,mBAAa,GAAG,IAAI,sBAAsB;AACxC,kBAAU,QAAQ,IAAI;AACtB,kBAAU,QAAQ,IAAI,IAAI;AAC1B,kBAAU,QAAQ,IAAI,IAAI;AAC1B,qBAAa,KAAK;AAAA,UACf,QACI,IAAK,IAAI,KAAK,WACd,IAAI,KAAK,YAAa,WAAW,KAAK,OAAO,WAAW;AAAA,UAC5D,IAAI,KAAK,WAAY,WAAW,KAAK,OAAO,WAAW;AAAA,UAAI;AAAA;AAAA;AAGhE,aAAO,CAAE,WAAW,cAAc,MAAM,aAAa,MAAM;AAAA;AAAA,IAI7D;AACE,2BAAqB,UAAU,AAAU,2BAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,2BAAqB,UAAU,AAAU,2BAAiB,GAAG,sBAAsB,0BAA0B;AAC7G,uBAAkB,gBAAe,gBAAgB;AAEjD,aAAO,WAAW,IAAI;AACpB,gBAAQ;AACR,YAAI,MAAM;AACR,cAAI;AAAA,mBACK,MAAM;AACf,cAAI;AAAA;AAEN,eAAO,CAAC,MAAM,IAAI,MAAM,IAAI;AAAA;AAAA;AAAA,UAI1B;AACJ,WAAK;AACL,wBAAkB;AAElB;AACA,UAAK,KAAK,UAAU,QAAO,SAAS,cAAe,CAAC,QAAO,KAAK;AAC9D,mBAAW,MAAM,KAAK,oBAAoB,iBAAiB;AAE3D,YAAK,MAAM,MAAM,OAAO,OAAS,MAAM,MAAM,OAAO;AAAM,eAAK,UAAU;AAAA;AAI3E,UAAI,YAAY,SAAS,SAAU,SAAS,MAAM,SAAS,KAAO,EAAC,QAAO,KAAK,WAAY,SAAS,MAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,SAAS;AAClL,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB,SAAS;AAC9B,eAAK,YAAY,KAAK,CAAE,YAAY,SAAS,IAAI,WAAW,YAAY,UAAU,SAAS,IAAI,SAAS,YAAY,WAAW,SAAS,WAAW,YAAY,SAAS;AAAA;AAE1K,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;AAAA;AAGjD,UAAI;AACF,YAAI,CAAC,YAAY,CAAC,SAAS,SAAU,SAAS,MAAM,WAAW;AAC7D,eAAK,cAAc;AACnB,eAAK,gBAAgB;AACrB,iBAAO;AAAA;AAET,wBAAgB,KAAK;AACnB,4BAAkB,AAAS,6BAAoB,CAAE,YAAY,KAAK,YAAY,GAAG,YAAY,UAAU,KAAK,YAAY,GAAG,WAAY,SAAS;AAChJ,8BAAoB,AAAS,oBAAW;AACxC,4BAAkB,KAAK,YAAY,GAAG,UAAU;AAChD,6BAAmB,KAAK,YAAY,GAAG;AACvC,eAAK,YAAY,KAAK,IAAK,aAAa,YAAY;AAAA;AAEtD,aAAK,0BAA0B;AAAA;AAEjC,UAAI,YAAY,SAAS;AACvB,iBAAS,MAAM,QAAQ;AACrB,qBAAW,IAAI,WAAW;AAC1B,qBAAW,IAAI,SAAS;AACxB,qBAAW,UAAU;AAAA;AAAA;AAMzB,oBAAc,GAAG,KAAK,MAAM,KAAK,YAAY,IAAI;AAC/C,oBAAY;AAEZ,0CAAkC,IAAI,UAAU,UAAU;AAC1D,8CAAsC;AACtC,YAAI,8BAA8B;AAChC,WAAC,cAAc,mBAAmB;AAAA;AAEpC,gBAAQ,AAAK,uBAAgB,IAAI,UAAU,eAAe,IAAI,UAAU;AACxE,2BAAmB,AAAS,sBAAa,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AACrF,qCAA6B,CAAC,WAAW,KAAK,MAAM,MAAM,IAAI,WAAW,KAAK,MAAM,MAAM;AAC1F,2BAAmB;AACnB,6BAA0B;AAC1B,YAAI,UAAU;AACZ,yBAAe,GAAG,MAAM,iBAAiB,OAAO,OAAO,GAAG;AAC1D,2BAAiB,AAAK,2BAAoB,CAAC,OAAO;AAAA;AAEpD,uBAAe,CAAE,YAAY,IAAI,YAAY,UAAU,IAAI;AAC3D,sBAAa,AAAS,kCAAyB,QAAQ,cAAc,CAAC,KAAK,YAAY,KAAK,YAAY,IAAI;AAG5G,YAAI,CAAC,QAAO,KAAK;AACf,8BAAmB;AAAA,YACjB,QAAQ;AAAA,YACR;AAAA,YACA,gBAAgB;AAAA,YAChB,YAAY,IAAI;AAAA,YAChB,OAAO;AAAA;AAET,iBAAO;AAAA;AAIT,uCAA+B,KAAK,aAAa,QAAQ;AACzD,8BAAsB,WAAW,WAAW;AAC5C,mBAAW;AACX,YAAI,gBAAgB,QAAO,SAAS;AAClC,iBAAO;AACP,iBAAO;AAAA;AAET,+BAAuB,GAAG,QAAQ,QAAQ,CAAC,IAAI;AAC/C,wBAAgB,eAAe;AAC/B,YAAI,QAAO,KAAK;AACd,iBAAQ,iBAAiB,yBAAyB,qBAAsB,KAAK,UAAU,WAAW,OAAM,gBAAgB,IAAI,gBAAgB,IAAI;AAChJ,iBAAQ,kBAAkB,0BAA0B,sBAAuB,KAAK,UAAU,WAAW,OAAM,iBAAiB,IAAI,iBAAiB;AACjJ,iCAAwB,KAAK,UAAU,QAAQ,GAAG,OAAO,CAAC,aAAa;AACvE,qCAA2B,eAAe;AAC1C,yBAAe;AACf,8BAAoB,mBAAmB,MAAM,GAAG,uBAAuB;AACvE,iBAAQ,6BAA6B,2BAA4B,KAAK,aAAa,aAAa,YAAY,gBAAgB;AAC5H,+BAAqB,mBAAmB,MAAM,uBAAuB;AACrE,iBAAQ,8BAA8B,4BAA6B,KAAK,aAAa,cAAc,aAAa;AAChH,gDAAsC,KAAK,iCAAiC;AAC5E,cAAI,KAAK,IAAI,iCAAiC;AAC5C,kCAAsB,WAAW,kBAAkB;AACnD,kCAAsB,WAAW,mBAAmB;AAAA,qBAE3C,gCAAgC;AACzC,kCAAsB,WAAW,kBAAkB,QAAQ,CAAC,aAAa;AAAA;AAEzE,kCAAsB,WAAW,mBAAmB,SAAS,CAAC,aAAa;AAAA;AAE7E,yCAA+B,KAAK,sBAAsB,WAAW,mBAAmB;AACxF,0CAAgC,KAAK,sBAAsB,WAAW,oBAAoB;AAC1F,sBAAY,UAAU,OAAO,wBAAwB,OAAO;AAAA;AAE9D,sCAA8B,KAAK,mBAAmB,WAAW,KAAK,OAAO;AAC7E,WAAG,QAAQ;AACX,6BAAqB,AAAS,oBAAW,KAAK,8BAA8B;AAC5E,kCAA0B,GAAG,SAAS;AACtC,2BAAmB;AAAA,UACjB,QAAQ;AAAA,UACR,KAAK;AAAA,UACL,gBAAgB;AAAA,UAChB,YAAY,IAAI;AAAA,UAChB,OAAO;AAAA;AAET,aAAK,YAAY,KAAK,IAAK,cAAc,WAAW,kBAAkB,aAAa,YAAY,IAAI,YAAY,gBAAgB;AAC/H,eAAO;AAAA;AAET,gBAAU,QAAQ,OAAO,OAAO,MAAM;AACtC,WAAK,gBAAgB,QAAQ;AAC7B,aAAO;AAAA;AAAA,IAGT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY,UAAU;AAAA;AAAA;AAGnC,UAAQ,WAAW;AAAA;;;AC9QnB,IAAA;AAAA,UAAQ,YAAY;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,kBAAkB;AAAA,IACnB,CAAC,gBAAgB;AAAA,IACjB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,kBAAkB;AAAA,IACnB,CAAC,iBAAiB;AAAA,IAClB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA,IACpB,CAAC,mBAAmB;AAAA;AAAA;;;ACpdtB,IAAA;AACA,oBAA2B;AAC3B,oBAA2B;AAC3B,eAAsB;AACtB,oBAA2B;AAJ3B;AAAA,IAQE;AACE,WAAK,WAAW,IAAS,cAAS,WAAW,gBAAgB,WAAW;AACxE,UAAI;AAAQ,aAAK,SAAS;AAAA;AAAA,UAGtB;AACJ,UAAI;AAAQ,aAAK,SAAS;AAC1B,0BAAoB,MAAM,KAAK,SAAS,QAAQ,OAAO;AACvD,sBAAgB;AAChB,+BAA0B,eAAe;AAEvC,YAAI,WAAW;AAAoB;AACnC,qBAAa,WAAW,SAAS,WAAW,OAAO,cAAc;AACjE,4BAAoB;AACpB,YAAI,QAAQ,KAAK,SAAS;AACxB,4BAA4B;AAC1B,gBAAI,KAAK,OAAO,KAAK,WAAW,IAAI,SAAS,YAAY;AACvD,0BAAY,OAAO,AAAU,2BAAiB,KAAK,IAAI,WAAW,KAAK;AAAA;AAAA;AAAA;AAI7E,gBAAQ,KAAK;AAAA,UACX,YAAY,WAAW,cAAc;AAAA,UACrC,KAAK,WAAW,MAAM,CAAC,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,IAAI,WAAW,IAAI,SAAS,KAAK,WAAW,IAAI,WAAW,MAAM;AAAA,UAC3M;AAAA,UACA;AAAA,UACA,OAAO,WAAW,QAAQ,GAAG,MAAM,WAAW,SAAS;AAAA;AAEzD,YAAI,WAAW;AAAQ,qBAAW,OAAO;AACzC,YAAI,WAAW;AAAO,qBAAW,MAAM;AAAA;AAEzC,aAAO;AAAA;AAAA;AAIX;AACE,mBAAe,MAAM,QAAQ,IAAI;AAAA,MAC/B,AAAU,eAAK;AAAA,MACf,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;AAAA,MAClF,eAAe,QAAO,KAAK,WAAW,CAAE,WAAW,QAAO,KAAK,UAAU,SAAS;AAAA;AAEpF,qBAAiB,IAAI,kBAAkB,OAAO,IAAI,OAAO,IAAI,OAAO,IAAI;AAExE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAE1E,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO;AAAA;AAGT,UAAQ,OAAO;AACf,UAAQ,oBAAoB;AAC5B,UAAQ,YAAY;AACpB,UAAQ,gBAA8B;AAAA;;;AC5DtC,IAAA;AAAA,sBAAoB;AAEpB;AACE,QAAI,CAAC,SAAQ,CAAC,MAAK;AAAS;AAC5B,uBAAmB;AACnB,iBAAa,MAAK,QACf,OAAO,OAAO,EAAE,eAAe,GAC/B,OAAO,UAAU,KAAK,EAAE,cAAc;AACzC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;AAAA,OACjC,OAAO,OAAO,EAAE,eAAe,GAC/B,KAAK,UAAU,EAAE,eAAe,EAAE;AACrC,oBAAgB,MAAK,QAClB,IAAI;AAAY,QAAE,KAAK;AAAG,aAAO;AAAA,OACjC,OAAO,OAAO,EAAE,qBAAqB,GACrC,KAAK,UAAU,EAAE,qBAAqB,EAAE;AAC3C,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,QAAI,QAAQ,SAAS;AAAY,cAAQ,SAAS;AAClD,gBAAY,CAAE,UAAU,MAAK,UAAU,YAAY,MAAK,YAAY,WAAW,MAAK,WAAW,cAAc,MAAK,QAAQ,QAAQ,eAAe,MAAM,kBAAkB,SAAS,kBAAkB;AACpM,gBAAY,QAAQ;AAEpB,YAAQ,IAAI,kBAAkB,MAAM;AAAA;AAGtC,UAAQ,MAAM;AAAA;;;ACxBd,IAAA;AACA,mBAAyB;AAEzB,iBAAe;AACf,aAAW,CAAE,KAAK;AAClB,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AAEjB;AACE,QAAI,CAAC,OAAO;AACV,aAAO,MAAM,MAAM,eAAe,QAAO,KAAK,IAAI;AAElD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,IAAI,UAAU,MAAM,YAAY;AAAA;AAEhF,WAAO,OAAO;AAAA;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,IAAI,cAAe,KAAK,OAAQ,KAAK,MAAM;AAClE,eAAS;AACT,aAAO;AAAA;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;AAAA,QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA,QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,IAAI,WAAW,QAAO,KAAK,IAAI;AAEnG,sBAAgB,GAAG,IAAI,QAAQ,CAAC;AAChC,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,IAAI;AAAS,iBAAO,MAAM,OAAO,IAAI,QAAQ;AAAA;AAE7D,2BAAmB,QAAO,KAAK,IAAI,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,IAAI,QAAQ,YAAY;AACnG,eAAO,WAAW,OAAO;AACzB,mBAAW,OAAO;AAClB,QAAQ,aAAI,OAAO;AAAA;AAErB,cAAQ;AAER,UAAI;AACF,sBAAa,KAAK;AAClB,YAAI,MAAM,KAAK,MAAM,KAAK,MAAK,MAAM;AAAA;AAEvC,WAAK;AAEL,aAAO;AACP,cAAQ;AAAA;AAAA;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;AAAA;;;AC9Df,IAAA;AACA,mBAAyB;AAEzB,iBAAe;AACf,aAAW,CAAE,QAAQ;AACrB,cAAY,OAAO;AACnB,oBAAkB;AAGlB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAE7B;AACE,QAAI,CAAC,OAAO;AACV,aAAO,SAAS,MAAM,eAAe,QAAO,KAAK,OAAO;AACxD,oBAAc,OAAO,OAAO,OAAO,GAAG,MAAM,OAAO;AAEnD,cAAQ,IAAI,sBAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,YAAY;AAAA;AAEnF,WAAO,OAAO;AAAA;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,OAAO,cAAe,KAAK,WAAW;AAC7D,eAAS;AACT,aAAO;AAAA;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;AAAA,QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA,QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,OAAO,WAAW,QAAO,KAAK,OAAO;AACzG;AACA,UAAI;AACF,kBAAU,GAAG,KAAK;AAChB,qCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,0BAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,4BAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,2BAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,4BAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,iBAAO,UAAU,IAAI,KAAK,IAAI;AAAA;AAAA;AAGhC,kBAAU,GAAG,IAAI,QAAQ,CAAC;AAAA;AAG5B,SAAG,QAAQ;AAEX;AACA,kBAAY;AAEZ,UAAI,CAAC,QAAO;AACV,YAAI,QAAO,KAAK,OAAO;AAAS,oBAAU,MAAM,OAAO,OAAO,QAAQ;AAAA;AAEtE,8BAAsB,QAAO,KAAK,OAAO,UAAU,MAAM,GAAG,QAAQ,MAAM,OAAO,OAAO,QAAQ,YAAY;AAC5G,kBAAU,cAAc,OAAO;AAC/B,sBAAc,OAAO;AACrB,QAAQ,aAAI,UAAU;AAAA;AAExB,cAAQ;AAER,UAAI;AACF,sBAAa,QAAQ;AACrB,YAAI;AAEF,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAI,MAAK,KAAK,MAAK,OAAO;AACnE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,KAAK,MAAK,KAAK,WAAW;AAC5C,gBAAI,aAAa;AAAA;AAAA;AAInB,6BAAmB,KAAK,MAAM,MAAM,KAAK,IAAK,MAAK,KAAK,QAAS;AACjE,cAAI,aAAa,QAAO,KAAK,OAAO;AAClC,gBAAI,SAAS,MAAK,MAAM,MAAM,WAAW;AACzC,gBAAI,aAAa,KAAK,IAAI,MAAM;AAAA;AAAA;AAAA;AAItC,cAAQ;AAER,aAAO;AACP,cAAQ;AAAA;AAAA;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;AAAA;;;AC3Ff,IAAA;AACA,mBAAyB;AAEzB,sBAAoB,CAAC,SAAS,WAAW,QAAQ,SAAS,OAAO,WAAW;AAC5E,iBAAe;AACf,aAAW;AACX,cAAY,OAAO;AAGnB,eAAa,CAAC,GAAG;AACjB,cAAY,CAAC,QAAQ,OAAQ;AAC7B,gBAAc;AAEd;AACE,QAAI,CAAC,OAAO;AACV,aAAO,UAAU,MAAM,eAAe,QAAO,KAAK,QAAQ;AAE1D,cAAQ,IAAI,sBAAsB,QAAO,KAAK,QAAQ,UAAU,MAAM,YAAY;AAAA;AAEpF,WAAO,OAAO;AAAA;AAGhB;AACE,QAAK,QAAQ,QAAO,KAAK,QAAQ,cAAgB,KAAK,SAAS;AAC7D,eAAS;AACT,aAAO;AAAA;AAET,YAAQ;AACR,WAAO,IAAI,QAAQ;AACjB,kBAAY,CAAC;AAAA,QACV,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,OAAM,MAAM,KAAK,KAAK,KAAM,OAAM,MAAM;AAAA,QACxC,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA,QAC3D,QAAM,MAAM,KAAM,OAAM,MAAM,KAAK,KAAK,MAAO,OAAM,MAAM;AAAA;AAE9D,qBAAe,GAAG,MAAM,cAAc,QAAO,KAAK,CAAC,IAAI,CAAC,QAAO,KAAK,QAAQ,WAAW,QAAO,KAAK,QAAQ;AAE3G,iCAA2B,GAAG,MAAM,QAAQ,GAAG;AAC/C,aAAO;AAEP,sBAAgB,GAAG,IAAI,KAAK,IAAI;AAChC,wBAAkB,GAAG,IAAI,OAAO,IAAI;AACpC,uBAAiB,GAAG,IAAI,MAAM,IAAI;AAClC,UAAI;AACJ,YAAM;AACN,WAAK;AACL,wBAAkB,GAAG,KAAK,CAAC,SAAS,WAAW;AAC/C,cAAQ;AACR,gBAAU;AACV,eAAS;AACT,wBAAkB,GAAG,KAAK,MAAM,UAAU,IAAI,KAAK,IAAI;AACvD,gBAAU;AACV,kBAAY;AACZ,UAAI,QAAO,KAAK,QAAQ;AACtB;AACA,YAAI,CAAC,QAAO;AACV,2BAAiB,MAAM,OAAO,QAAQ,QAAQ;AAC9C,kBAAO,SAAS;AAChB,aAAG,QAAQ;AAAA;AAEX,8BAAoB,MAAM,GAAG,QAAQ,MAAM,OAAO,QAAQ,QAAQ;AAClE,kBAAO,YAAY,OAAO;AAC1B,sBAAY,OAAO;AACnB,UAAQ,aAAI,WAAW;AAAA;AAEzB,qBAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,cAAI,QAAQ,MAAK,KAAK,QAAO,KAAK,QAAQ;AAAe,gBAAI,KAAK,CAAE,OAAO,KAAK,IAAI,MAAM,KAAK,MAAM,MAAM,QAAQ,MAAK,MAAM,MAAM,SAAS,YAAY;AAAA;AAE3J,YAAI,KAAK,UAAU,EAAE,QAAQ,EAAE;AAAA;AAEjC,gBAAU;AACV,aAAO;AACP,cAAQ;AAAA;AAAA;AAIZ,UAAQ,UAAU;AAClB,UAAQ,OAAO;AAAA;;;AC7Ef,IAAA;AAAA;AAAA,IAGE;AACE,WAAK,QAAQ;AACb,WAAK,eAAe;AAAA;AAAA,IAGtB;AACE,aAAO,GAAG,KAAK;AACb,wBAAgB,KAAK,gBAAgB,MAAM;AAC3C,wBAAgB,QAAQ,WAAW;AACnC,wBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAkB,QAAQ,IAAI,OAAO,EAAE,QAAQ,CAAC;AAChD,6BAAqB,KAAK,kBAAkB;AAC5C,eAAO;AAAA,UACL,eAAe,aAAa,QAAQ;AAAA,UACpC,SAAS,aAAa;AAAA,UACtB,iBAAiB,aAAa;AAAA,UAC9B,iBAAiB,aAAa;AAAA;AAAA;AAAA;AAAA,IAQpC;AACE,WAAK,MAAM;AAAA;AAAA;AAGf,UAAQ,YAAY;AAAA;;;AC/BpB,IAAA;AACA,oBAA2B;AAD3B,0BAGkC;AAAA,IAEhC;AAEE,aAAO,GAAG,KAAK,MAAM,GAAG,IAAI,OAAO,OAAO,IAAI;AAAA;AAAA,IAIhD;AACE,mEAA6D;AAC7D,aAAO,CAAE,SAAS,SAAS,iBAAiB;AAAA;AAAA;AAGhD,UAAQ,YAAY;AAAA;;;AChBpB,IAAA;AACA;AACE,WAAO,KAAK,MAAM,IAAI;AAAA;AAFxB;AAAA,IAKE;AACE,WAAK,gBAAgB,IAAI,MAAM;AAC/B,WAAK,mBAAmB;AACxB,WAAK,kBAAkB;AAAA;AAAA,IAGzB;AACE,WAAK,cAAc,EAAE,KAAK,oBAAoB;AAC9C,WAAK,KAAK,KAAK;AAAA;AAAA,IAGjB;AACE,mBAAY,KAAK,cAAc;AAC/B,WAAK,SAAS,GAAG,KAAK;AACtB,WAAK,KAAK;AACV,WAAK,cAAc,KAAK,mBAAmB,KAAK;AAChD,aAAO;AAAA;AAAA,IAGT;AACE,aAAO,KAAK,qBAAqB;AAAA;AAAA,IAGnC;AACE,aAAO,KAAK,mBAAmB;AAAA;AAAA,IAGjC;AACE,aAAO,KAAK,cAAc,MAAM,GAAG,KAAK,mBAAmB;AAAA;AAAA,IAG7D;AACE,aAAO,KAAK,cAAc;AAAA;AAAA,IAG5B;AACE,aAAO,IAAI,KAAK,KAAK,KAAK,KAAK,IAAI;AACjC,aAAK,SAAS,GAAG,KAAK;AACtB,YAAI,KAAK;AAAA;AAAA;AAAA,IAIb;AACE,aAAO,IAAI,KAAK,KAAK;AACnB,gBAAQ,IAAI;AACZ,YAAI,IAAI,KAAK,oBAAoB,KAAK,KAAK,GAAG,IAAI;AAAI;AACtD,YAAI,CAAC,KAAK,KAAK,GAAG;AAAI;AACtB,aAAK,SAAS,GAAG;AACjB,YAAI;AAAA;AAAA;AAAA,IAIR;AACE,aAAO,KAAK,gBAAgB,KAAK,cAAc;AAAA;AAAA,IAGjD;AACE,aAAO,KAAK,WAAW,KAAK,KAAK,WAAW;AAAA;AAAA,IAG9C;AACE,gBAAU,KAAK,cAAc;AAC7B,WAAK,cAAc,KAAK,KAAK,cAAc;AAC3C,WAAK,cAAc,KAAK;AAAA;AAAA;AAG5B,UAAQ,UAAU;AAAA;;;ACvElB,IAAA;AAAA,mBAA0B;AAE1B;AACE,4BAAwB,OAAO;AAC/B,uBAAmB;AACnB,mBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,iBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,wBAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,qBAAe,KAAK,IAAI,WAAW,oBAAoB;AACvD,mBAAa,KAAK,IAAI,WAAW,qBAAqB,GAAG;AACzD,0BAAoB,QAAQ,WAAW,MAAM,EAAE;AAC7C,YAAI,OAAO,IAAI,UAAU,UAAU,cAAc;AAC/C,yBAAe;AACf;AAAA;AAAA;AAGJ,UAAI,CAAC;AACH;AAAA;AAAA;AAGJ,WAAO;AAAA;AAOT;AACE,0CAAsC,OAAO;AAC7C,kBAAc,IAAa,iBAAQ,SAAS,QAAQ,cAAc,EAAG,WAAY;AACjF,wBAAoB,GAAG,WAAW,QAAQ,EAAE;AAC1C,0BAAoB,GAAG,WAAW,OAAO,EAAE;AACzC,8BAAsB,GAAG,aAAa,cAAc,EAAE;AACpD,wBAAc,OAAO,IAAI,UAAU,UAAU;AAE7C,cAAI,QAAQ;AAAgB;AAE5B,cAAI,4BAA4B,YAAY,OAAO,UAAU,UAAU,oBAAoB;AACzF,kBAAM,QAAQ,CAAE,OAAO,MAAM,CAAE,UAAU,UAAU,IAAI;AAAA;AAAA;AAAA;AAAA;AAK/D,WAAO;AAAA;AAET,UAAQ,0BAA0B;AAAA;;;AC7ClC,IAAA;AAAA,UAAQ,YAAY;AAAA,IAClB;AAAA,IAAQ;AAAA,IAAW;AAAA,IAAY;AAAA,IAAW;AAAA,IAAY;AAAA,IACtD;AAAA,IAAiB;AAAA,IAAa;AAAA,IAAc;AAAA,IAAa;AAAA,IACzD;AAAA,IAAW;AAAA,IAAY;AAAA,IAAY;AAAA,IAAa;AAAA,IAAa;AAAA;AAE/D,UAAQ,gBAAgB,QAAQ,UAAU;AAC1C,UAAQ,UAAU,QAAQ,UAAU,OAAO;AACzC,WAAO,aAAa;AACpB,WAAO;AAAA,KACN;AACH,6BAA2B;AAAA,IACzB,CAAC,WAAW;AAAA,IAAiB,CAAC,aAAa;AAAA,IAC3C,CAAC,aAAa;AAAA,IAAc,CAAC,WAAW;AAAA,IACxC,CAAC,YAAY;AAAA,IAAc,CAAC,YAAY;AAAA,IACxC,CAAC,cAAc;AAAA,IAAkB,CAAC,cAAc;AAAA,IAChD,CAAC,YAAY;AAAA,IAAc,CAAC,aAAa;AAAA,IACzC,CAAC,gBAAgB;AAAA,IAAkB,CAAC,WAAW;AAAA;AAQjD,UAAQ,YAAY;AAAA,IAClB,CAAC,QAAQ;AAAA,IAAY,CAAC,WAAW;AAAA,IAAY,CAAC,QAAQ;AAAA,IACtD,CAAC,YAAY;AAAA,IAAa,CAAC,QAAQ;AAAA,IACnC,CAAC,gBAAgB;AAAA,IAAc,CAAC,aAAa;AAAA,IAC7C,CAAC,gBAAgB;AAAA,IAAY,CAAC,WAAW;AAAA,IACzC,CAAC,YAAY;AAAA,IAAc,CAAC,QAAQ;AAAA,IACpC,CAAC,iBAAiB;AAAA,IAAe,CAAC,cAAc;AAAA,IAChD,CAAC,iBAAiB;AAAA,IAAa,CAAC,YAAY;AAAA,IAC5C,CAAC,aAAa;AAAA;AAEhB,UAAQ,uBAAuB,mBAAmB,IAAI,8BAA+B,CAAC,QAAQ,QAAQ,aAAa,QAAQ,QAAQ;AACnI,UAAQ,eAAe;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA;;;AC3DF,IAAA;AAAA,cAAqB;AAErB;AACE,WAAO;AAAA,MACL,GAAG,QAAQ,IAAI,GAAG,GAAG;AAAA,MACrB,GAAG,QAAQ,IAAI,GAAG,GAAG,WAAe;AAAA;AAAA;AAGxC,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,UAAU,UAAU,gBAAiB;AAC7C,WAAQ,GAAG,KAAM,eAAe,UAAU,UAAU,UAAU;AAC9D,WAAO;AAAA,MACL,GAAG,KAAK,WAAW,eAAe;AAAA,MAClC,GAAG,KAAK,WAAW,eAAe;AAAA;AAAA;AAGtC,UAAQ,iBAAiB;AAEzB;AACE,mBAAe,IAAI,MAAM;AACzB,iBAAa,GAAG,IAAI,MAAM;AACxB,aAAO,KAAK;AAAA;AAEd,WAAO;AAAA;AAET,UAAQ,YAAY;AAEpB;AACE,QAAI,IAAI;AAAK,aAAO;AACpB,QAAI,IAAI;AAAK,aAAO;AACpB,WAAO;AAAA;AAET,UAAQ,QAAQ;AAEhB;AACE,eAAW,KAAK;AAChB,eAAW,KAAK;AAChB,WAAO,KAAK,KAAK,KAAK;AAAA;AAExB,UAAQ,kBAAkB;AAE1B;AACE,WAAO,CAAE,GAAG,EAAE,IAAI,EAAE,GAAG,GAAG,EAAE,IAAI,EAAE;AAAA;AAEpC,UAAQ,aAAa;AAErB;AACE,WAAO,CAAE,GAAG,OAAM,EAAE,GAAG,MAAK,OAAM,GAAG,OAAM,EAAE,GAAG,MAAK;AAAA;AAEvD,UAAQ,cAAc;AAAA;;;ACnDtB,IAAA;AAAA,oBAA2B;AAC3B,kBAAyB;AAEzB,+BAA6B,AAAU,oBAAU,IAAI,qCAAsC,CAAC,AAAU,kBAAQ,iBAAiB,AAAU,kBAAQ;AACjJ,6BAA2B,qBAAqB,IAAI,sBAAsB;AAC1E,6BAA2B,qBAAqB,IAAI,qBAAqB;AACzE;AACE,qBAAiB,cAAc,MAAM,KAAK;AAC1C,WAAO;AAAA,MACL,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG;AAAA,MACvC,GAAG,cAAc,IAAI,MAAM,GAAG,MAAM,GAAG,WAAW;AAAA;AAAA;AAGtD;AACE,WAAO;AAAA,MACL,GAAG,AAAQ,cAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,SAAS;AAAA,MACjE,GAAG,AAAQ,cAAM,KAAK,MAAM,MAAM,IAAI,eAAe,GAAG,QAAQ;AAAA;AAAA;AAUpE,qJAAmJ;AACjJ,4BAAwB,aAAa;AAErC,kCAA8B,yBAAyB,eAAe,UAAU,cAAc,QAAQ;AACtG,yBAAqB,gBAAgB,QAAQ,uBAAuB;AACpE,2BAAuB,AAAQ,mBAAW,eAAe,UAAU;AACnE,yBAAqB;AACrB,iBAAa,GAAG,IAAI,kBAAkB;AACpC,oCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,0BAAoB,AAAQ,uBAAe,sBAAsB,GAAG,sBAAsB,GAAG,kBAAkB;AAC/G,uBAAiB,AAAQ,mBAAW;AAAA,QAClC,GAAG,sBAAsB,IAAI;AAAA,QAC7B,GAAG,sBAAsB,IAAI;AAAA,SAC5B,CAAE,GAAG,YAAY,GAAG,GAAG,YAAY;AAAA;AAExC,kCAA8B,yBAAyB,gBAAgB,cAAc,QAAQ;AAC7F,kBAAc,aAAa,IAAI,sBAAsB,GAAG,sBAAsB,GAAG;AACjF,WAAO,CAAE,UAAU,gBAAgB,MAAM,AAAU,oBAAU,mBAAmB;AAAA;AAQlF;AACE,qBAAiB,OAAO,MAAM;AAC9B,qBAAiB,mBAAmB;AACpC,8BAA0B,IAAI,MAAM;AAEpC,WAAQ,gBAAgB,oBAAqB;AAC7C,sBAAkB,AAAQ,uBAAe,UAAU,cAAc;AACjE,sBAAkB,SAAS,MAAM;AAAA,MAC/B,OAAO;AAAA,MACP,MAAM,AAAU,oBAAU,SAAS;AAAA,MACnC,UAAU;AAAA;AAIZ,oBAAgB,WAAW,GAAG,QAAQ,GAAG,EAAE;AACzC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;AAAA;AAAA;AAK/J,oBAAgB,GAAG,OAAO,UAAU,EAAE;AACpC,+BAAyB,mBAAmB;AAC5C,+BAAyB,mBAAmB;AAC5C,UAAI,kBAAkB,qBAAqB,CAAC,kBAAkB;AAC5D,0BAAkB,oBAAoB,yBAAyB,MAAM,kBAAkB,mBAAmB,kBAAkB,QAAQ,SAAS,cAAc;AAAA;AAAA;AAG/J,WAAO;AAAA;AAET,UAAQ,aAAa;AAAA;;;ACnFrB,IAAA;AAAA,qBAA4B;AAC5B,qBAA4B;AAC5B,kBAAyB;AAEzB,yEAAwE,GAAG;AACzE,WAAO,MAAM,KAAK,EAAG;AACnB,oCAA8B,UAAU,YAAY;AACpD,aAAO,AAAQ,wBAAgB,GAAG,GAAG,sBAAsB,GAAG,sBAAsB,MAAM;AAAA;AAAA;AAO9F;AACE,wCAAoC,kBAAkB,OAAO,UAAW,UAAU;AAChF,UAAI,CAAC,oCAAoC,eAAe,kBAAkB,UAAU;AAClF,kBAAU;AAAA;AAEZ,aAAO;AAAA,OACN;AACH,WAAO,8BAA8B,kBAAkB;AAAA;AAKzD,8BAA4B;AAwD5B,8JAA4J,iBAAiB;AAC3K,kBAAc;AACd,kBAAc,AAAW,mCAAwB,gBAAgB,qBAAqB;AACtF,6BAAyB,YAAY;AAGrC,WAAO,MAAM,SAAS,qBAAqB,CAAC,MAAM;AAEhD,mBAAa,MAAM;AAInB,8BAAwB,AAAQ,uBAAe,KAAK,MAAM,cAAc;AACxE,UAAI,oCAAoC,OAAO,kBAAkB,iBAAiB,KAAK,KAAK;AAAK;AAEjG,wBAAkB,AAAW,sBAAW,MAAM,cAAc,eAAe,cAAc,wBAAwB;AACjH,oBAAc,iBAAiB,OAAO,kBAAkB;AACxD,YAAM,KAAK,CAAE,WAAW;AAAA;AAE1B,WAAO;AAAA;AAET,UAAQ,sBAAsB;AAAA;;;ACvG9B,IAAA;AAAA,cAAqB;AAErB;AACE,WAAQ,IAAI,iBAAiB,IAAI;AAAA;AAGnC;AACE,WAAO,AAAI,yBAAqB,OAAO;AACrC,UAAI,gCAAgC,UAAU,WAAW,OAAO,UAAU,YAAY,OAAO;AAC3F,eAAO;AAAA;AAET,aAAO,KAAK,CAAC,UAAU,YAAY,UAAU;AAC7C,aAAO;AAAA,OACN;AAAA;AAEL,UAAQ,uBAAuB;AAE/B,SAAQ,mBAAmB,qBAAsB;AACjD;AACE,WAAO,UAAU,OAAO,EAAG,MAAM,MAAM,MAAM,QAAU,WAAY,GAAG,QAAW;AAAA,MAC/E,MAAM,KAAK,IAAI,MAAM;AAAA,MACrB,MAAM,KAAK,IAAI,MAAM;AAAA,MACrB,MAAM,KAAK,IAAI,MAAM;AAAA,MACrB,MAAM,KAAK,IAAI,MAAM;AAAA,QACnB;AAAA,MACF,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA;AAAA;AAGV,UAAQ,iBAAiB;AAEzB;AACE,WAAQ,MAAM,MAAM,MAAM,QAAS,eAAe;AAClD,WAAO,CAAC,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG,OAAQ,CAAE,GAAG,MAAM,GAAG;AAAA;AAE1F,UAAQ,uBAAuB;AAE/B;AACE,WAAO,QAAQ,IAAI,QAAQ,IAAI,aAAY,QAAO;AAAA;AAEpD,UAAQ,oBAAoB;AAE5B;AACE,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,WAAW,KAAK,UAAU,IAAI,EAAG,OAAO,MAAM,cAAgB;AAAA,QAC5D;AAAA,QACA;AAAA,QACA,UAAU,CAAE,GAAG,SAAS,IAAI,QAAQ,GAAG,SAAS,IAAI;AAAA;AAAA;AAAA;AAI1D,UAAQ,YAAY;AAEpB;AACE,kBAAc,OAAM,QAAQ;AAC5B,oBAAgB,MAAM,eAAe,CAAC,SAAS;AAC/C,UAAM;AACN,WAAO;AAAA;AAET,UAAQ,WAAW;AAEnB;AACE,wBAAoB,MAAM,IAAI,UAAU,UAAU,MAAM,SAAS,uBAAuB,QAAQ;AAChG,WAAO;AAAA;AAET,UAAQ,oBAAoB;AAAA;;;ACpE5B,IAAA;AACA,yBAAgC;AAChC,yBAAgC;AAChC,iBAAsB;AAHtB;AAAA,IAME;AACE,WAAK,YAAY;AACjB,WAAK,eAAe;AAAA;AAAA,UAGhB;AACJ,aAAO,IAAI,QAAQ;AACjB,uBAAe,MAAM,MAAM;AAC3B,sBAAc,MAAM,MAAM;AAC1B,wBAAgB,AAAK,gBAAS,OAAO,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACzE,oBAAY,KAAK,UAAU,QAAQ;AACnC,iCAAyB,MAAM,AAAK,yBAAkB,CAAC,IAAI,eAAe,IAAI,SAAS,IAAI,iBAAiB,IAAI;AAChH,6BAAqB,iBAAiB;AACtC,8BAAsB,iBAAiB;AACvC,uCAA+B,iBAAiB;AAChD,uCAA+B,iBAAiB;AAChD,sBAAc,MAAM,AAAe,mCAAoB,cAAc,eAAe,wBAAwB,wBAAwB,KAAK,cAAc,QAAO,KAAK,eAAe,QAAO,KAAK,gBAAgB,QAAO,KAAK;AAC1N,4BAAoB,AAAK,yBAAkB,OAAO,CAAC,QAAQ,QAAQ,CAAC,QAAO,KAAK,WAAW,QAAO,KAAK;AACvG,YAAI,cAAc;AAClB,YAAI,QAAQ;AACZ,YAAI,gBAAgB;AACpB,YAAI,gBAAgB;AACpB,gBAAQ;AACR,gBAAQ;AAAA;AAAA;AAAA,IAIZ;AACE,WAAK,UAAU;AAAA;AAAA;AAGnB,UAAQ,UAAU;AAElB;AACE,uBAAmB,MAAM,eAAe,QAAO,KAAK;AACpD,sBAAkB,IAAmB,yBAAU,YAAY,KAAK;AAEhE,YAAQ,IAAI,sBAAsB,QAAO,KAAK,UAAU,MAAM,YAAY;AAC1E,WAAO,IAAI,QAAQ;AAAA;AAErB,UAAQ,OAAO;AAAA;;;AC9Cf,IAAA;AAAA,yBAAgC;AAChC,uBAA8B;AAC9B,yBAAgC;AAChC,oBAA2B;AAC3B,iBAAsB;AAEtB,UAAQ,OAAoB;AAC5B,UAAQ,UAAuB;AAE/B,UAAQ,YAA2B;AACnC,UAAQ,sBAAqC;AAC7C,UAAQ,eAAyB;AACjC,UAAQ,UAAoB;AAC5B,UAAQ,YAAsB;AAC9B,UAAQ,YAAsB;AAC9B,UAAQ,uBAA4B;AACpC,UAAQ,iBAAsB;AAC9B,UAAQ,uBAA4B;AACpC,UAAQ,oBAAyB;AACjC,UAAQ,YAAiB;AAAA;;;ACnBzB,IAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAqBE;AACE,WAAK,QAAQ;AACb,WAAK,UAAU,iBAAiB,IAAI,YAAY,CAAC,OAAO,UAAU,OAAO;AACzE,WAAK,gBAAgB,GAAG,SAAS,KAAK;AACtC,WAAK,kBAAkB,GAAG,SAAS,CAAC,WAAW;AAC/C,WAAK,wBAAwB,GAAG,SAAS,CAAC,YAAY,GAAG,YAAY;AAAA;AAAA,IAGvE;AACE,aAAO,GAAG,KAAK;AACb,2BAAmB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAChD,yBAAiB,GAAG,MAAM,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI;AAC9C,gCAAwB,GAAG,IAAI,GAAG,IAAI,YAAY,KAAK,kBAAkB,KAAK;AAC9E,6BAAqB,GAAG,IAAI,UAAU,KAAK;AAC3C,4BAAoB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACvE,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,eAAe,KAAK;AACrE,eAAO,GAAG,SAAS,CAAC,aAAa,YAAY;AAAA;AAAA;AAAA,IAIjD;AACE,aAAO,GAAG,KAAK;AACb,0BAAkB,GAAG,IAAI,GAAG,IAAI,iBAAiB,QAAQ,CAAC,IAAI,GAAG,KAAK,KAAK,kBAAkB,KAAK,QAAQ;AAC1G,eAAO,GAAG,IAAI,WAAW,KAAK;AAAA;AAAA;AAAA,UAI5B;AACJ,sBAAgB,KAAK,MAAM,QAAQ;AACnC,0BAAoB,QAAQ;AAC5B,cAAQ;AACR,qBAAe,GAAG,KAAK,MAAM,GAAG,QAAQ,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI,KAAK;AAChF,wBAAkB,OAAO;AACzB,uBAAiB,GAAG,MAAM,aAAa,CAAC,GAAG,IAAI,CAAC,IAAI;AACpD,oBAAc,KAAK,eAAe;AAClC,eAAS;AACT,wBAAkB,MAAM,GAAG,MAAM,uBAAuB,OAAO,QAAQ,QAAO,UAAU,QAAO,cAAc,QAAO;AACpH,uBAAiB,UAAU;AAE3B,aAAO;AACP,gBAAU;AACV,oBAAc;AACd,6BAAuB;AACrB,YAAI,UAAU,aAAa,QAAO;AAChC,8BAAoB,GAAG,MAAM,OAAO,CAAC,UAAU,IAAI,CAAC,GAAG;AACvD,mCAAyB,GAAG,MAAM,aAAa,CAAC,UAAU,IAAI,CAAC,GAAG;AAClE,gCAAsB,GAAG,KAAK,MAAM,KAAK,mBAAmB,kBAAkB,UAAU,QAAQ,CAAC,IAAI;AACrG,2BAAiB;AACjB,gBAAM,KAAK,CAAE,KAAK,aAAa,eAAe,YAAY,UAAU;AAAA;AAAA;AAGxE,kBAAY;AACZ,YAAM;AACN,aAAO;AAAA;AAAA,UAGH;AACJ,0BAAoB,MAAM,MAAM;AAChC,yBAAmB,MAAM,MAAM;AAC/B,qBAAc,GAAG,KAAK,MAAM,MAAM,eAAe,CAAC,QAAO,WAAW,QAAO,YAAY,IAAI,OAAO,IAAI;AACtG,0BAAoB,MAAM,KAAK,SAAS,QAAO;AAC/C,aAAM;AACN,UAAI,CAAC,eAAe,YAAY,WAAW;AAAG,eAAO;AACrD,oBAAc;AACd,+BAAyB;AACvB,sBAAc,WAAW,IAAI;AAC7B,2BAAmB,MAAM,MAAM,GAAG;AAClC,yBAAiB,MAAM,MAAM,GAAG;AAChC,8BAAsB,WAAW,cAAc;AAC/C,mBAAW,IAAI;AACf,mBAAW,cAAc;AACzB,cAAM,KAAK,AAAI,oBAAoB,CAAE,YAAY,UAAU,eAAe,YAAY,WAAW,aAAc,CAAC,aAAa,QAAO,WAAW,cAAc,QAAO;AAAA;AAEtK,aAAO;AAAA;AAAA;AAGX,UAAQ,eAAe;AAAA;;;ACjGvB,IAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAqBA,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,gCAA8B,CAAC,GAAG;AAClC,kCAAgC;AAChC,4BAA0B,CAAC,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG;AAC/C,4CAA0C;AAC1C,qDAAmD;AA3BnD;AAAA,IA8BE;AACE,WAAK,cAAc;AACnB,WAAK,eAAe;AACpB,WAAK,YAAY;AACjB,WAAK,cAAc;AACnB,WAAK,UAAU;AACf,WAAK,gBAAgB;AAAA;AAAA,IAGvB;AACE,mCAA6B,cAAc,IAAI;AAC7C,sCAA8B,CAAC,GAAG,OAAO;AACzC,eAAO,AAAK,YAAY,uBAAuB;AAAA;AAEjD,4BAAsB,KAAK,8BAA8B;AACzD,aAAO,AAAI,WAAW,AAAI,YAAY,AAAI,SAAS,eAAe,yBAAyB;AAAA;AAAA,IAG7F;AACE,0BAAoB,KAAK,8BAA8B;AACvD,4BAAsB,AAAI,WAAW,AAAI,YAAY,AAAI,SAAS,aAAa,yBAAyB;AACxG,4BAAsB;AACtB,mBAAa,GAAG,IAAI,kBAAkB,QAAQ;AAC5C,sBAAc,KAAK,UAAU,kBAAkB,IAAI,MAAM,GAAG;AAAA;AAE9D,oBAAc,gBAAgB;AAC9B,aAAO;AAAA;AAAA,IAGT;AACE,sBAAgB,AAAI,WAAW;AAC/B,0BAAoB,CAAC,QAAQ,KAAK,KAAK,WAAW,QAAQ,KAAK,KAAK;AACpE,2BAAqB,UAAU,IAAI,WAAW;AAAA,QAC5C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;AAAA,QAC9C,YAAY,KAAM,OAAM,KAAK,KAAK,YAAY;AAAA,QAC9C,MAAM;AAAA;AAER,mCAA6B,AAAK,oBAAoB,OAAO,CAAC,GAAG;AACjE,4BAAsB,aAAa,IAAI;AACrC,wBAAgB,AAAK,YAAY,OAAO;AACxC,eAAO,CAAC,GAAG,SAAS,MAAM;AAAA;AAE5B,oCAA8B,AAAK,sBAAsB;AACzD,wBAAkB,CAAC,GAAG,AAAI,aAAa,OAAO;AAC9C,gCAA0B;AAAA,QACxB,AAAK,IAAI,WAAW,sBAAsB;AAAA,QAC1C,AAAK,IAAI,WAAW,sBAAsB;AAAA;AAE5C,aAAO,cAAc,IAAI,WAAW;AAAA,QAClC,MAAM,KAAK,kBAAkB;AAAA,QAC7B,MAAM,KAAK,kBAAkB;AAAA,QAC7B,MAAM;AAAA;AAAA;AAAA,UAIJ;AACJ,WAAK;AACL,wBAAkB;AAGlB;AACA,UAAK,KAAK,UAAU,QAAO,cAAe,CAAC,QAAO;AAChD,gBAAQ,MAAM,KAAK,YAAY,mBAAmB,QAAO;AAEzD,YAAK,OAAM,MAAM,OAAO,OAAS,OAAM,MAAM,OAAO;AAAM,eAAK,UAAU;AAAA;AAI3E,UAAI,SAAU,MAAM,SAAS,KAAQ,OAAM,WAAW,KAAK,iBAAmB,KAAK,kBAAkB,QAAO,YAAa,CAAC,QAAO;AAC/H,aAAK,cAAc;AACnB,aAAK,gBAAgB;AACrB,+BAAuB;AAAO,eAAK,YAAY,KAAK;AACpD,YAAI,KAAK,YAAY,SAAS;AAAG,wBAAc;AAAA;AAEjD,oBAAc;AAId,sBAAgB,KAAK;AACnB,2BAAmB,KAAK,YAAY;AACpC,YAAI,CAAC;AAAY;AACjB,YAAI,QAAO;AACT,wBAAc,AAAK,gBAAgB,WAAW,cAAc,oCAAoC,WAAW,cAAc;AACzH,6BAAmB,AAAI,aAAa;AACpC,uCAA6B,CAAC,WAAW,KAAK,OAAM,MAAM,IAAI,WAAW,KAAK,OAAM,MAAM;AAC1F,+BAAqB,GAAG,MAAM,iBAAiB,QAAO,OAAO,GAAG;AAChE,iCAAuB,AAAK,oBAAoB,CAAC,OAAO;AACxD,yBAAe,cAAc,KAAK,uBAAuB,WAAW,eAAe,kBAAkB;AACrG,+BAAqB,AAAI,yBAAyB,QAAQ,cAAc,CAAC,KAAK,WAAW,KAAK;AAC9F,4BAAkB,aAAa,IAAI;AACnC,uBAAa;AACb,uBAAa;AACb,0CAAgC,MAAM,KAAK,aAAa,QAAQ;AAChE,oBAAU;AACV,kCAAwB,WAAW,WAAW;AAC9C,qBAAW;AACX,cAAI,mBAAmB,QAAO;AAC5B,sCAA0B,GAAG,QAAQ,WAAW,CAAC,IAAI;AACrD,8BAAkB,kBAAkB;AACpC,sBAAU;AACV,8BAAkB;AAClB,2BAAe,KAAK,mBAAmB,WAAW,QAAQ,OAAO;AACjE,oCAAwB,KAAK,uBAAuB;AACpD,iBAAK,YAAY,KAAK;AACtB,2BAAe;AAAA,cACb,WAAW;AAAA,cACX,YAAY;AAAA,cACZ,KAAK;AAAA,gBACH,SAAS,gBAAgB;AAAA,gBACzB,aAAa,gBAAgB;AAAA;AAAA;AAGjC,kBAAM,KAAK;AAAA;AAEX,iBAAK,YAAY,KAAK;AAAA;AAExB,oBAAU;AAAA;AAEV,2BAAiB,AAAI,WAAW,AAAI,YAAY,AAAI,SAAS,YAAY,yBAAyB;AAClG,yBAAe;AAAA,YACb,YAAY,WAAW;AAAA,YACvB,KAAK;AAAA,cACH,SAAS,SAAS;AAAA,cAClB,aAAa,SAAS;AAAA;AAAA;AAG1B,gBAAM,KAAK;AAAA;AAAA;AAGf,WAAK,cAAc,KAAK,YAAY,OAAO,OAAO,MAAM;AACxD,WAAK,gBAAgB,MAAM;AAC3B,aAAO;AAAA;AAAA,IAIT;AACE,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,iBAAW,UAAU,IAAI,OAAO,EAAE;AAClC,yBAAmB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AACjD,uBAAiB,CAAC,KAAK,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG;AAC/C,aAAO,CAAE,YAAY;AAAA;AAAA;AAIzB,UAAQ,eAAe;AAAA;;;AC9KvB,IAAA;AAAA,UAAQ,UAAU;AAAA,IAChB;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA,IAEZ;AAAA,MACE,GAAG;AAAA,MACH,GAAG;AAAA,MACH,UAAU;AAAA,MACV,UAAU;AAAA;AAAA;AAAA;;;AC/viBd,IAAA;AAmBA,uBAA8B;AAC9B,mBAA0B;AAC1B,kBAAyB;AArBzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuBA,2BAAyB;AAAA,IACvB,OAAO,CAAC,GAAG,GAAG,GAAG;AAAA,IACjB,aAAa,CAAC,GAAG,GAAG,GAAG;AAAA,IACvB,cAAc,CAAC,GAAG,IAAI,IAAI;AAAA,IAC1B,YAAY,CAAC,IAAI,IAAI,IAAI;AAAA,IACzB,OAAO,CAAC,IAAI,IAAI,IAAI;AAAA,IACpB,UAAU,CAAC;AAAA;AA7Bb;AAAA,IAiCE;AACE,WAAK,WAAW;AAAA;AAAA,WAGX;AACL,aAAO;AAAA;AAAA,UAGH;AACJ,0BAAoB,MAAM,KAAK,SAAS,cAAc,OAAO;AAC7D,UAAI,CAAC;AAAa,eAAO;AACzB,oBAAc;AACd,+BAAyB;AACvB,4BAAoB;AACpB,YAAI,WAAW;AACb,4BAAkB,OAAO,KAAK;AAC5B,wBAAY,OAAO,iBAAiB,KAAK,IAAI,WAAW,WAAW,UAAU;AAAA;AAAA;AAGjF,cAAM,KAAK;AAAA,UACT,YAAY,WAAW;AAAA,UACvB,KAAK,WAAW,MAAM;AAAA,YACpB,WAAW,IAAI,QAAQ;AAAA,YACvB,WAAW,IAAI,QAAQ;AAAA,YACvB,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;AAAA,YACvD,WAAW,IAAI,YAAY,KAAK,WAAW,IAAI,QAAQ;AAAA,cACrD;AAAA,UACJ,WAAW,WAAW;AAAA,UACtB;AAAA;AAAA;AAGJ,aAAO;AAAA;AAAA;AAGX,UAAQ,WAAW;AAEnB;AACE,+CAA2C,MAAM,QAAQ,IAAI;AAAA,MAC3D,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAAA,MAC1F,eAAe,QAAO,SAAS,WAAW,CAAE,WAAW,QAAO,SAAS,UAAU,SAAS;AAAA;AAE5F,qBAAiB,IAAiB,0BAAa,mBAAmB,QAAO,WAAmB;AAC5F,iBAAa,IAAa,sBAAa,UAAU,eAAe,QAAO;AACvE,sBAAiB,IAAI,SAAS;AAE9B,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAE9E,YAAQ,IAAI,sBAAsB,QAAO,SAAS,UAAU,MAAM,YAAY;AAC9E,WAAO;AAAA;AAET,UAAQ,OAAO;AAAA;;;ACnFf,IAAA;AAAA,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,uBAAmB;AAEjB,wBAAkB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACzD,yBAAmB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC1D,mBAAa,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AACpD,UAAI,QAAQ,aAAa,cAAe,UAAU,SAAS,IAAI,KAAK,SAAS,KAAO,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAAA,eACnI,QAAQ,aAAc,UAAU,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAAA,eAC7E,QAAQ,cAAe,WAAW,SAAS,IAAI,KAAK,SAAS;AAAI,iBAAS,KAAK;AAGxF,2BAAqB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC5D,4BAAsB,KAAK,UAAU,KAAK,OAAQ,EAAE,SAAS;AAC7D,UAAI,gBAAgB;AAAe,iBAAS,KAAK,WAAY,aAAa,SAAS,IAAI,cAAc,SAAS,IAAK,SAAS;AAAA;AAE9H,WAAO;AAAA;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AAIjB,UAAI,MAAK,QAAQ,MAAK,KAAK,SAAS;AAClC,0BAAkB,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACpD,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK;AAAA;AACvC,mBAAS,KAAK,UAAU,YAAY,IAAI,UAAU;AACvD,yBAAiB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAC/G,YAAI,WAAW;AAAK,mBAAS,KAAK;AAClC,0BAAkB,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,KAAK,KAAK,MAAK,KAAK,KAAK;AAChH,YAAI,YAAY;AAAK,mBAAS,KAAK;AACnC,0BAAkB,KAAK,IAAI,KAAK,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,IAAI,MAAM,KAAK,IAAI,MAAK,KAAK,IAAI,KAAK,MAAK,KAAK,KAAK;AACjI,YAAI,YAAY;AAAI,mBAAS,KAAK,SAAS,KAAK,MAAM;AACtD,0BAAkB,MAAK,KAAK,KAAK;AACjC,YAAI,KAAK,IAAI,aAAa;AAAI,mBAAS,KAAK,QAAQ,YAAY,IAAI,OAAO;AAAA;AAAA;AAG/E,WAAO;AAAA;AAGT,UAAQ,OAAO;AACb,QAAI,CAAC;AAAK,aAAO;AACjB,qBAAiB;AACjB,wBAAmB;AACjB,sBAAgB;AAChB,kCAA4B,OAAO,QAAQ,MAAK;AAC9C,YAAI,WAAW;AAAY,kBAAQ,KAAK,CAAE,MAAM,OAAO,eAAe,UAAU,IAAI;AAAA;AAEtF,UAAI,WAAW,QAAQ,SAAS;AAC9B,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,wBAAgB,QAAQ,OAAO,aAAc,KAAK,SAAS,KAAK,EAAE,SAAS,KAAK,OAAO;AACvF,iBAAS,KAAK,GAAG,QAAQ,gBAAgB,QAAQ;AAAA;AAAA;AAGrD,WAAO;AAAA;AAAA;;;AC1DT,IAAA;AAOA,uBAAqB;AACnB,qBAAiB;AACf,gBAAU,IAAI,OAAO,QAAQ,SAAS,gBAAgB;AACtD,aAAO,QAAQ,GAAG;AAChB,mBAAW,QAAQ;AACnB,eAAO;AAAA;AAAA;AAIX,qBAAiB;AACf,qBAAe,GAAG,aAAa;AAC/B,SAAG,aAAa,QAAQ;AACxB,SAAG,cAAc;AAEjB,UAAI,CAAC,GAAG,mBAAmB,QAAQ,GAAG;AACpC,cAAM,IAAI,MAAM,6BAA6B,GAAG,iBAAiB;AAAA;AAEnE,aAAO;AAAA;AAGT,SAAK,UAAU;AACf,SAAK,YAAY;AAEjB,iBAAa,SAAS,cAAc,GAAG;AACvC,iBAAa,SAAS,gBAAgB,GAAG;AAEzC,SAAK,KAAK,GAAG;AACb,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,aAAa,KAAK,IAAI;AACzB,OAAG,YAAY,KAAK;AAEpB,QAAI,CAAC,GAAG,oBAAoB,KAAK,IAAI,GAAG;AACtC,YAAM,IAAI,MAAM,0BAA0B,GAAG,kBAAkB,KAAK;AAAA;AAGtE,OAAG,WAAW,KAAK;AAGnB,aAAS,cAAc,aAAa,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,UAAU,KAAK,GAAG,kBAAkB,KAAK,IAAI;AAAA;AAIpD,aAAS,cAAc,WAAW,KAAK;AACvC,aAAS,gBAAgB,WAAW,KAAK;AACzC,oBAAgB,KAAK;AACnB,WAAK,QAAQ,KAAK,GAAG,mBAAmB,KAAK,IAAI;AAAA;AAAA;AAIrD,2BAAyB;AACvB,QAAI,CAAC;AAAQ,eAAS;AACtB,qBAAiB;AACjB,yBAAqB;AACrB,uBAAmB;AACnB,mCAA+B;AAC/B,4BAAwB,CAAC,MAAM;AAC/B,uBAAmB;AACnB,iBAAa;AACb,kBAAc;AACd,wBAAoB;AACpB,0BAAsB;AACtB,oBAAgB,OAAO,UAAU,SAAS,cAAc;AAGxD,gCAA4B;AAE5B,eAAW,QAAQ,WAAW;AAC9B,QAAI,CAAC;AAAI,YAAM,IAAI,MAAM;AAEzB,SAAK,YAAY;AAEf,mBAAa,MAAM,UAAU,MAAM,KAAK,WAAW;AACnD,qBAAe,QAAQ;AAEvB,mBAAa,KAAK,CAAE,MAAM,QAAQ;AAAA;AAGpC,SAAK,QAAQ;AACX,qBAAe;AAAA;AAGjB,SAAK,QAAQ;AACX,cAAQ,OAAM,OAAO,OAAM;AAC3B,mBAAa;AAGb,UAAI,CAAC;AAAgB,yBAAiB,GAAG;AACzC,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,GAAG,MAAM,GAAG,eAAe;AAGpE,UAAI,aAAa,WAAW;AAE1B;AACA,eAAO;AAAA;AAGT,mBAAa,GAAG,IAAI,aAAa,QAAQ;AACvC,uBAAgB,MAAM,aAAa,SAAS;AAC5C,kBAAU,aAAa;AACvB,UAAE,KAAK,MAAM,MAAM,EAAE,QAAQ;AAAA;AAG/B,aAAO;AAAA;AAGT,oBAAgB;AAEd,UAAI,UAAU,UAAU,WAAW;AAAW;AAAA;AAE9C,cAAQ,QAAQ;AAChB,eAAS;AACT,cAAQ,SAAS;AACjB,gBAAU;AAGV,UAAI,CAAC;AAEH,yBAAiB,IAAI,aAAa;AAAA,UAChC;AAAA,UAAI;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UACrC;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAI;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA,UAAG;AAAA;AAGrC,QAAC,gBAAgB,GAAG,gBAAgB,GAAG,WAAW,GAAG,cAAc;AACnE,WAAG,WAAW,GAAG,cAAc,UAAU,GAAG;AAI5C,WAAG,YAAY,GAAG,gCAAgC;AAAA;AAGpD,SAAG,SAAS,GAAG,GAAG,QAAQ;AAG1B,0BAAoB,CAAC,MAAM;AAAA;AAG7B,gCAA4B;AAC1B,wBAAkB,SAAS,kBAAkB,UAC1C,0BAA0B,QAAQ;AAErC,aAAO,kBAAkB;AAAA;AAG3B,sCAAkC;AAChC,kBAAY,GAAG;AACf,SAAG,gBAAgB,GAAG,aAAa;AAEnC,2BAAqB,GAAG;AACxB,SAAG,iBAAiB,GAAG,cAAc;AAErC,sBAAgB,GAAG;AACnB,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,WAAW,GAAG,YAAY,GAAG,GAAG,MAAM,OAAO,QAAQ,GAAG,GAAG,MAAM,GAAG,eAAe;AAEtF,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,oBAAoB,GAAG;AAC1D,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AACtD,SAAG,cAAc,GAAG,YAAY,GAAG,gBAAgB,GAAG;AAEtD,SAAG,qBAAqB,GAAG,aAAa,GAAG,mBAAmB,GAAG,YAAY,SAAS;AAEtF,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,aAAO,CAAE,KAAK;AAAA;AAGhB,kBAAc;AACZ,mBAAa;AACb,mBAAa;AACb,kBAAY;AAGZ,UAAI,eAAe;AAEjB,iBAAS;AAAA;AAGT,iBAAS,oBAAoB,0BAA0B;AAAA;AAEzD;AAGA,UAAI,gBAAgB,CAAE,SAAQ,KAAK;AAGjC,iBAAS;AACT,gBAAQ,aAAa,MAAM;AAAA;AAG3B,mCAA4B,4BAA2B,KAAK;AAC5D,iBAAS,oBAAoB,0BAA0B;AAAA;AAIzD,SAAG,YAAY,GAAG,YAAY;AAC9B,SAAG,gBAAgB,GAAG,aAAa;AAEnC,SAAG,UAAU,gBAAgB,QAAQ,OAAQ,QAAQ,KAAK;AAC1D,SAAG,WAAW,GAAG,WAAW,GAAG;AAAA;AAGjC,2BAAuB;AACrB,UAAI,oBAAoB;AACtB,0BAAkB,oBAAoB;AACtC,WAAG,WAAW,gBAAgB;AAC9B,eAAO;AAAA;AAIT,wBAAkB,IAAI,aAAa,IAAI,OAAO,iBAAiB;AAE/D,wBAAkB,aAAa;AAC/B,uBAAiB,IAAI;AACrB,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,KAAK,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AACxF,SAAG,wBAAwB,gBAAgB,UAAU;AACrD,SAAG,oBAAoB,gBAAgB,UAAU,IAAI,GAAG,GAAG,OAAO,OAAO,UAAU,IAAI;AAEvF,0BAAoB,kBAAkB;AACtC,aAAO;AAAA;AAGT,eAAW,CAAE,cAAc;AAE3B,iBAAa;AACb,WAAO,kBAAkB;AAAA,MACvB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,WAAO,oBAAoB;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,kBAAc;AAKd,YAAQ,cAAc;AAEpB,gBAAU,IAAI,aAAa;AAC3B,QAAE,MAAM;AACR,QAAE,MAAM;AACR,QAAE,OAAO;AACT,QAAE,OAAO;AAGT,qBAAgB,EAAE,QAAQ,KAAK,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,KAAK,EAAE,QAAQ,IAC7H,QAAQ,YAAY,OAAO,gBAC3B,QAAQ,YAAY,OAAO;AAE/B,sBAAgB,eAAe;AAC/B,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC;AAAA;AAGF,YAAQ,YAAY,SAAS;AAC7B,YAAQ,YAAY,OAAO,aAAa;AAAA,MACtC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AACP,YAAQ,YAAY,OAAO,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,YAAQ,aAAa;AACnB,gBAAW,eAAc,KAAK;AAC9B,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,gBAAW,WAAU,KAAK,IAAI,IAAI;AAClC,gBAAY,KAAI,KAAK;AACrB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,cAAQ,WAAW;AAAA;AAGrB,YAAQ,WAAW;AACjB,gBAAW,WAAU,KAAK;AAC1B,gBAAU,OAAQ,KAAI;AAEtB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,WAAW;AACjB,cAAQ,SAAS;AAAA;AAGnB,YAAQ,MAAM;AACZ,iBAAY,aAAY,KAAK,MAAM,KAAK;AACxC,kBAAY,KAAK,IAAI;AACrB,kBAAY,KAAK,IAAI;AACrB,mBAAa;AACb,mBAAa;AACb,mBAAa;AAEb,cAAQ,YAAY;AAAA,QAClB,OAAO,MAAO,KAAI,QAAQ,MAAO,CAAC;AAAA,QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAC;AAAA,QAAO,OAAO,MAAO,CAAC,OAAQ,MAAO,KAAI;AAAA,QAAO;AAAA,QAAG;AAAA,QAC3H,OAAO,MAAO,CAAC,OAAQ,MAAO;AAAA,QAAQ,OAAO,MAAO,KAAI,QAAQ,MAAO;AAAA,QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;AAAA,QAAS;AAAA,QAAG;AAAA,QACzH,OAAO,MAAO,CAAC,OAAQ,MAAO,CAAE,KAAI;AAAA,QAAQ,OAAO,MAAO,CAAC,OAAQ,MAAO;AAAA,QAAO,OAAO,MAAO,KAAI,QAAQ,MAAO;AAAA,QAAO;AAAA,QAAG;AAAA,QAC5H;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,sBAAsB;AAC5B,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAW;AAAA,QAAW;AAAA,QAAW;AAAA,QAAG;AAAA,QACpC;AAAA,QAAW;AAAA,QAAW;AAAA,QAAW;AAAA,QAAG;AAAA,QACpC;AAAA,QAAW;AAAA,QAAW;AAAA,QAAW;AAAA,QAAG;AAAA,QACpC;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,QAAQ;AACd,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAO;AAAA,QAAW;AAAA,QAAY;AAAA,QAAG;AAAA,QACjC;AAAA,QAAO;AAAA,QAAW;AAAA,QAAY;AAAA,QAAG;AAAA,QACjC;AAAA,QAAO;AAAA,QAAW;AAAA,QAAY;AAAA,QAAG;AAAA,QACjC;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,UAAU;AAChB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACjE;AAAA,QAAuB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACnE;AAAA,QAAqB;AAAA,QAAsB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACnE;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,iBAAiB;AACvB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAoB;AAAA,QAAsB;AAAA,QAAG;AAAA,QACjE;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAG;AAAA,QACjE;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAG;AAAA,QAChE;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAsB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAsB;AAAA,QAAoB;AAAA,QAAsB;AAAA,QAAG;AAAA,QACnE;AAAA,QAAsB;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,cAAc;AACpB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAsB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAqB;AAAA,QAAoB;AAAA,QAAsB;AAAA,QAAG;AAAA,QAClE;AAAA,QAAoB;AAAA,QAAqB;AAAA,QAAmB;AAAA,QAAG;AAAA,QAC/D;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,WAAW;AACjB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAO;AAAA,QAAQ;AAAA,QAAQ;AAAA,QAAG;AAAA,QAC1B;AAAA,QAAQ;AAAA,QAAO;AAAA,QAAQ;AAAA,QAAG;AAAA,QAC1B;AAAA,QAAQ;AAAA,QAAQ;AAAA,QAAO;AAAA,QAAG;AAAA,QAC1B;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIhB,YAAQ,aAAa;AACnB,cAAQ,YAAY;AAAA,QAClB;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QACZ;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAOhB,YAAQ,cAAc;AACpB,gBAAU,IAAI,aAAa;AAC3B,yBAAmB,IAAI;AACvB,yBAAmB,IAAI;AAEvB,sBAAgB,eAAe,QAAQ,YAAY;AACnD,SAAG,WAAW,QAAQ,QAAQ,GAAG;AACjC,SAAG,UAAU,QAAQ,QAAQ,IAAI,YAAY;AAC7C;AAAA;AAGF,YAAQ,YAAY,SAAS;AAAA,MAC3B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAEP,YAAQ,cAAc;AACpB,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAG;AAAA,QAAG;AAAA,QACN;AAAA,QAAG;AAAA,QAAI;AAAA,QACP;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIV,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAI;AAAA,QAAG;AAAA,QACP;AAAA,QAAI;AAAA,QAAG;AAAA,QACP;AAAA,QAAI;AAAA,QAAG;AAAA;AAAA;AAIX,YAAQ,SAAS;AACf,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAI;AAAA,QAAI;AAAA,QACR;AAAA,QAAG;AAAA,QAAG;AAAA,QACN;AAAA,QAAG;AAAA,QAAG;AAAA;AAAA;AAIV,YAAQ,UAAU;AAChB,gBAAU,UAAU;AACpB,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B;AAAA,QAAG,KAAK;AAAA,QAAG;AAAA,QACX,KAAK;AAAA,QAAG,IAAI,IAAI;AAAA,QAAG,KAAK;AAAA,QACxB;AAAA,QAAG,KAAK;AAAA,QAAG;AAAA;AAAA;AAIf,YAAQ,SAAS;AACf,gBAAU,QAAQ;AAClB,cAAQ,YAAY,KAAK,MAAM;AAAA,QAC7B,KAAK;AAAA,QAAG,KAAK;AAAA,QAAG;AAAA,QAChB,KAAK;AAAA,QAAG;AAAA,QAAG,IAAI;AAAA,QACf;AAAA,QAAG,IAAI;AAAA,QAAG,IAAI;AAAA;AAAA;AAOlB,YAAQ,OAAO;AACb,wBAAmB,OAAO,IAAK;AAC/B,wBAAmB,OAAO,IAAK;AAE/B,sBAAgB,eAAe,QAAQ,KAAK;AAG5C,SAAG,UAAU,QAAQ,QAAQ,IAAI,GAAG;AACpC,YAAM,KAAK;AAGX,SAAG,UAAU,QAAQ,QAAQ,IAAI,WAAW;AAC5C;AAAA;AAGF,YAAQ,KAAK,SAAS;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAKP,YAAQ,WAAW;AACjB,wBAAmB,OAAQ;AAC3B,wBAAmB,OAAQ;AAE3B,sBAAgB,eAAe,QAAQ,SAAS;AAGhD,SAAG,UAAU,QAAQ,QAAQ,MAAM,WAAW;AAC9C;AAAA;AAGF,YAAQ,SAAS,SAAS;AAAA,MACxB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MAEA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,KAAK;AAAA;AAGT,UAAQ,SAAS;AAAA;;;AC7lBjB,IAAA;AACA,kBAAyB;AAGzB,iBAAe;AACf,kBAAgB;AAKhB;AACE;AACA,QAAI,iBAAiB,GAAG;AACtB,gBAAS,GAAG,MAAM;AAAA;AAElB,4BAAsB,MAAM,gBAAgB,MAAM,cAAc,MAAM,SAAU,MAAM,SAAU,MAAM,MAAM,KAAK;AACjH,6BAAuB,MAAM,iBAAiB,MAAM,eAAe,MAAM,UAAW,MAAM,SAAU,MAAM,MAAM,KAAK;AACrH,wBAAkB;AAClB,yBAAmB;AACnB,UAAI,QAAO,OAAO,QAAQ;AAAG,sBAAc,QAAO,OAAO;AAAA,eAChD,QAAO,OAAO,SAAS;AAAG,sBAAc,gBAAiB,SAAO,OAAO,SAAS;AACzF,UAAI,QAAO,OAAO,SAAS;AAAG,uBAAe,QAAO,OAAO;AAAA,eAClD,QAAO,OAAO,QAAQ;AAAG,uBAAe,iBAAkB,SAAO,OAAO,QAAQ;AACzF,UAAI,CAAC,YAAa,SAAS,UAAU,eAAiB,SAAS,WAAW;AACxE,mBAAY,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AAC9H,YAAI,SAAS,UAAU;AAAa,mBAAS,QAAQ;AACrD,YAAI,SAAS,WAAW;AAAc,mBAAS,SAAS;AAAA;AAE1D,kBAAY,SAAS,WAAW;AAChC,UAAI,iBAAiB;AAAW,YAAI,aAAa,OAAO,GAAG;AAAA;AACtD,YAAI,UAAU,OAAO,GAAG,GAAG,eAAe,gBAAgB,GAAG,GAAG,SAAS,OAAO,SAAS;AAC9F,UAAI,QAAO,OAAO;AAChB,YAAI,CAAC,KAAK,MAAM,CAAC,aAAc,SAAS,UAAU,UAAU,SAAW,SAAS,WAAW,UAAU;AACnG,sBAAa,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,SAAS,OAAO,SAAS,UAAU,SAAS,cAAc;AACrI,cAAI,UAAU,UAAU,SAAS;AAAO,sBAAU,QAAQ,SAAS;AACnE,cAAI,UAAU,WAAW,SAAS;AAAQ,sBAAU,SAAS,SAAS;AACtE,eAAK,KAAK,GAAG,IAAI,MAAM,aAAa,IAAY,eAAO,CAAE,QAAQ,cAAe;AAAA;AAElF,aAAK,GAAG;AACR,aAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,cAAc;AAAG,eAAK,GAAG,UAAU,WAAW,QAAO,OAAO;AAC9E,YAAI,QAAO,OAAO,SAAS;AAAG,eAAK,GAAG,UAAU,QAAQ,QAAO,OAAO;AACtE,YAAI,QAAO,OAAO,eAAe;AAAG,eAAK,GAAG,UAAU,cAAc,QAAO,OAAO;AAClF,YAAI,QAAO,OAAO,QAAQ;AAAG,eAAK,GAAG,UAAU,OAAO,QAAO,OAAO;AACpE,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAS,eAAK,GAAG,UAAU;AAC7C,YAAI,QAAO,OAAO;AAAO,eAAK,GAAG,UAAU;AAC3C,YAAI,QAAO,OAAO;AAAY,eAAK,GAAG,UAAU;AAChD,YAAI,QAAO,OAAO;AAAa,eAAK,GAAG,UAAU;AACjD,YAAI,QAAO,OAAO;AAAU,eAAK,GAAG,UAAU;AAC9C,YAAI,QAAO,OAAO,aAAa;AAAG,eAAK,GAAG,UAAU,YAAY,QAAO,OAAO;AAC9E,aAAK,GAAG,MAAM;AAGd,mBAAW;AACX,YAAI;AACF,2BAAiB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACrE,4BAAkB,IAAI,WAAW,UAAU,QAAQ,UAAU,SAAS;AACtE,aAAG,WAAW,GAAG,GAAG,UAAU,OAAO,UAAU,QAAQ,GAAG,MAAM,GAAG,eAAe;AAGlF,kBAAQ;AACR,uBAAa,UAAU,SAAS,GAAG,KAAK,GAAG;AACzC,yBAAa,GAAG,IAAI,UAAU,OAAO;AACnC,4BAAe,KAAI,IAAI,UAAU,SAAS;AAC1C,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;AAClC,wBAAU,OAAO,SAAS,QAAQ;AAAA;AAAA;AAGtC,oBAAU,OAAO;AAAA;AAAA;AAGnB,oBAAY;AAAA;AAEd;AACA,UAAI,UAAU;AACZ,sBAAc,CAAC,UAAU,QAAQ,UAAU,OAAO;AAClD,iBAAS,GAAG,SAAS,UAAU,MAAM,OAAO;AAAA,iBAClC,QAAO,YAAY,WAAa,qBAAqB;AAE/D,iBAAS,GAAG,QAAQ,WAAW;AAAA;AAG/B,2BAAoB,OAAO,oBAAoB,cAAe,IAAI,gBAAgB,aAAa,gBAAgB,SAAS,cAAc;AACtI,mBAAW,QAAQ;AACnB,mBAAW,SAAS;AACpB,wBAAgB,WAAW,WAAW;AACtC,gBAAQ,UAAU,WAAW,GAAG;AAChC,sBAAa,QAAQ,aAAa,GAAG,GAAG,aAAa;AACrD,iBAAS,GAAG,QAAQ,WAAW;AAAA;AAEjC,qBAAe,OAAO;AACtB,gBAAS,OAAO,WAAW;AAC3B,aAAO;AACP,aAAO;AAAA;AAET,WAAO,CAAE,iBAAQ,QAAQ,QAAO,OAAO,SAAS,YAAY;AAAA;AAG9D,UAAQ,UAAU;AAAA;;;AC5FlB,MAAA,KAAoB;;;ACVpB;;;;;;;;;;;;;;;;AAsBO,MAAM,kBAAkB;AACxB,wBAAwB;AAvB/B;EAiDE;AAAoB,SAAA,UAAA;AAAgC,SAAA,YAAA;AAH5C,SAAA,OAAO,IAAI;AACX,SAAA,eAAe;;EAIvB;AACE,QAAI,CAAC,KAAK,KAAK,IAAI;AACjB,WAAK,UAAU,SAAS,KAAK,SAAS;;AAExC,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,SAAK,KAAK,IAAI,QAAQ;;EAGxB;AACE,WAAO,KAAK,KAAK,IAAI;;EAGvB;AACE,SAAK;AACL,WAAO,KAAK,KAAK,OAAO;;EAG1B;AACE,WAAO,KAAK;;;AAzEhB;EAiGE;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,KAAK,qBAAqB,KAAK,kBAAkB;;EAG1D;AAGE,WAAO,kBAAkB;;EAG3B,kBACK,GAAG,GAAG,YAAY,YAAY,MAAM,YAAY;AAEnD,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B,aACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B,sBACK,OAAO,QAAQ,UAAU,MAAM,YAAY;AAE9C,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAE3B;AACE,WAAO,kBAAkB;;EAG3B;AAIE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAI3B;AACE,WAAO,kBAAkB;;EAG3B;AAGE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AAEE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;EAG3B;AACE,WAAO,kBAAkB;;;AAI7B;AACE,QAAM,IAAI,MACN,IAAI;;;;ACzpBV;;;;;;;;;;;;;;;;AAiCM,iBAAkB;AAEtB,gBAAc,MAAM;AACpB,aAAW;AACX,cAAY;AAEZ,SAAO,UAAU;AAEf,YAAS,KAAK,WAAW,UAAW;AAEpC;AAEA,WAAO,MAAM;AACb,UAAM,WAAW,MAAM;AACvB,UAAM,SAAS;;;AAKb;AACJ,SAAO,KAAK,IAAI,MAAK,KAAK,IAAI,GAAG;;AAG7B;AACJ,SAAO,MAAM,MAAM,IAAI,MAAM,MAAM;;AAG/B;AACJ,aAAU;AACV,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,YAAO,IAAI;;AAEb,SAAO;;AAUH;AACJ,YAAU,KAAK;AACf,SAAQ,IAAI,IAAM,KAAI,KAAK;;AAIvB;AACJ,eAAa;AACb,eAAa,GAAG,IAAI,EAAE,QAAQ;AAC5B,iBAAa,OAAO,EAAE,MAAM,OAAO,EAAE;AACrC,cAAU,OAAO;;AAEnB,SAAO;;AAkBH;AACJ,MAAI,CAAC;AACH,UAAM,IAAI,MAAM,OAAO,QAAQ,WAAW,MAAM;;;AAI9C,gEACuD;AAC3D,SACI,YAAY,QAAQ,SACpB,MAAM,qBAAqB,WAAW,cAAc;;AAGpD;AACJ,SACI,KAAK,MACL,MAAM;;AAsBN,+BAEsC,qBAAqB;AAC/D,MAAI,UAAU;AACZ,aAAS;;AAEX,MAAI,MAAM,QAAQ,QAAQ,aAAa,QAAQ,CAAC;AAC9C,iBAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,cAAQ,IAAI,IAAI,QAAQ;;;AAG1B,WAAO,KAAK;;AAEd,SAAO;;AAcH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO;;AAET,aAAW,MAAM;AACjB,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,YAAQ,MAAM;;AAEhB,SAAO;;AAGH;AACJ,SAAO,MAAM,WAAW;;AAGpB;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,MAAI,MAAM,QAAQ,MAAM;AACtB,WAAO;;AAGT,MAAI,GAAG,WAAW,GAAG;AACnB,WAAO;;AAET,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,QAAI,GAAG,OAAO,GAAG;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,SAAO,IAAI,MAAM;;AAGb;AAEJ,MAAK,KAAa,QAAQ;AAExB,WAAQ,KAAa,KAAK;;AAE5B,MAAI,MAAM;AACR,WAAO;aACE,MAAM;AACf,WAAO;;AAEP,gBAAY,KAAK,IAAI,IAAI;AACzB,WAAQ,OAAM,KAAM,OAAM;;;AAIxB;AACJ,gBAAc,KAAK,KAAK,KAAK,KAAK;AAClC,SAAO,CAAC,OAAO,KAAK,KAAK,OAAO;;AAe5B;AACJ,0BAAwB,IAAI,YAAY;AACxC,eAAa,GAAG,IAAI,GAAG,EAAE;AACvB,oBAAgB,KAAK;;AAEvB,UAAQ;AACR,SAAO;;AAGH;AACJ,MAAI,QAAQ,EAAE;AACZ,WAAO;;AAET,SAAO,IAAI,IAAI,OAAO,OAAO,EAAE;;AAG3B,wCACgC,aAAqB;AAEzD,SAAO,IAAI,QAAc;AACvB,mBAAe;AAEf,kBAAc;AACZ,UAAI;AACF;AACA;;AAGF;AAEA,0BAAoB,QAAQ;AAE5B,UAAI,cAAc,QAAQ,YAAY;AACpC;AACA;;AAEF,iBAAW,OAAO;;AAGpB;;;AAaE;AAEJ,kBAAgB;AAChB,oBAAkB;AAElB,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,MAAM;AACd,mBAAa,MAAM;eACV,MAAM,OAAO;AACtB,UAAI,gBAAgB;AAClB,cAAM,MACF,yDACmB,uBAAuB;;AAEhD,oBAAc;eACL,MAAM,KAAK;AACpB,YAAM,MAAM,gCAAgC,MAAM,aAAa;;;AAInE,MAAI,gBAAgB;AAClB,QAAI,OAAO,KAAK,SAAS;AACvB,YAAM,MAAM,QAAQ,yCAAyC;;AAE/D,WAAO;;AAGT,MAAI,cAAc;AAChB,UAAM,MACF,qCAAqC;;AAG3C,MAAI,OAAO,cAAc;AACvB,UAAM,MACF,wDACO,UAAU;;AAGvB,mBAAiB,MAAM;AACvB,WAAS,eAAe,OAAO;AAC/B,SAAO;;AAGH;AAEJ,eAAa,MAAM;AAGnB,SAAO,QAAQ,OAAO,MAAM,IAAI,UAAU,KAAK,GAAG,OAAO;AAGzD,SACI,KAAK,MAAM,QAAM,MAAM,CAAC,QAAQ,KAAK,OACrC,MACI,+CAA+C,SAAS,sBAC5C;AAGpB,SACI,KAAK,MAAM,QAAM,MAAM,MACvB,MAAM,0DACU;AAGpB,SAAO,KAAK,IAAI,OAAK,IAAI,IAAI,OAAO,IAAI;;AAIpC;AAEJ,mBAA2B;AAC3B,mBAA2B;AAC3B,uBAAqB,QAAQ,QAAQ,MAAM,QAAQ,SAAS,KAAK,WAAW;AAC5E,eAAc,QAAQ,QAAQ,eAC1B,OACA,eAAe,MAAM,OAAO;AAChC,UAAQ;AACR,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,QAAQ;AACV,UAAI,KAAK,OAAO,KAAK,MAAM,OAAO;AAChC,cAAM,IAAI,MACN,sBAAsB,oBAAoB,MAAM;;AAEtD,UAAK,MAAK,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,OAAO;AACnD,iBAAS,KAAK,MAAM;AACpB,iBAAS,KAAK;;AAEhB,UAAI,KAAK,MAAM;AACb;;;AAGJ,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;AACpB,eAAS,KAAK;;;AAGlB,SAAO,CAAC,UAAU;;AAGd;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;;AAExB,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa;AACb,MAAI,SAAS,QAAQ,UAAU;AAC7B,aAAS,IAAI,aAAa;aACjB,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,WAAW;aACf,UAAU;AACnB,aAAS,IAAI,MAAgB;;AAE7B,UAAM,IAAI,MAAM,qBAAqB;;AAEvC,SAAO;;AAGH;AAEJ,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAC1B,YAAM,MAAM,oBAAoB,iCAAiC;;;;AAMjE;AACJ,SAAO,UAAU,UAAU,UAAU,eAAe,UAAU,aAC1D,UAAU,WAAW,UAAU;;AAO/B;AACJ,MAAI,YAAY;AACd,WAAO;;AAET,MAAI,YAAY,aAAa,YAAY;AACvC,WAAO;;AAET,MAAI,YAAY,WAAW,YAAY,aAAa,YAAY;AAC9D,WAAO;;AAET,MAAI,YAAY,UAAU,YAAY;AACpC,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,aAAa,gBAAgB,aAAa,cAC7C,aAAa;;AAGb;AACJ,MAAI,UAAU,aAAa,UAAU;AACnC,WAAO;aACE,UAAU;AACnB,WAAO;aACE,UAAU;AACnB,WAAO;;AAEP,UAAM,IAAI,MAAM,iBAAiB;;;AAU/B;AACJ,MAAI,OAAO;AACT,WAAO;;AAET,cAAY;AACZ,MAAI,QAAQ,OAAK,SAAS,EAAE;AAC5B,SAAO;;AAIH;AACJ,SAAO,OAAO,UAAU,YAAY,iBAAiB;;AAGjD;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,SAAO,OAAO,UAAU;;AAGpB;AACJ,MAAI,MAAM,QAAQ;AAChB,WAAO,WAAW,OAAO;;AAE3B,MAAI,kBAAkB;AACpB,WAAO;aACE,kBAAkB,cAAc,kBAAkB;AAC3D,WAAO;aACE,SAAS;AAClB,WAAO;aACE,SAAS;AAClB,WAAO;aACE,UAAU;AACnB,WAAO;;AAET,SAAO;;AAGH;AACJ,SAAO,CAAC,CAAE,MAAK,EAAE,eAAe,EAAE,QAAQ,EAAE;;AAGxC;AACJ,eAAa,OAAO,IAAI,MAAM,EAAE;AAC9B,QAAI,OAAO,MAAM;AACf,aAAO;;;AAGX,SAAO;;AAGH;AACJ,eAAa,MAAM;AACnB,MAAI,OAAO;AACT,WAAO;;AAKT,kBAAgB,IAAI,MAAM,OAAO;AACjC,UAAQ,OAAO,KAAK,MAAM,OAAO;AACjC,eAAa,OAAO,GAAG,KAAK,GAAG,EAAE;AAC/B,YAAQ,KAAK,QAAQ,IAAI,KAAK,MAAM,IAAI;;AAE1C,SAAO;;AAGT;AACE,cAAY,IAAI;AAChB,MAAI,MAAM,WAAW;AACnB,cAAU,MAAM;AAChB,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,EAAE,SAAS;;;AAGtB,cAAU,MAAM;AAChB,iBAAa,MAAM,MAAM;AACzB,gBAAY,KAAK,OAAO,YAAY,MAAM;AAC1C,iBAAa,GAAG,IAAI,GAAG;AACrB,UAAI,KAAK,kBAAkB,SAAS,IAAI,KAAK,MAAM;;;AAGvD,SAAO;;AAIH;AACJ,MAAI,MAAM,WAAW;AAEnB,WAAO,EAAE;;AAEX,eAAa,MAAM,OAAO,YAAY,MAAM;AAC5C,MAAI,SAAS;AAEX,WAAO;;AAET,MAAI,SAAS,EAAE;AACb,UAAM,IAAI,MAAM,IAAI,wCAAwC,EAAE;;AAGhE,SAAO,kBAAkB,GAAG,OAAO;;AAG/B;AAEJ,gBAAc,oBAAoB,MAAM;AACxC,eAAa,GAAG,IAAI,MAAM,QAAQ;AAChC,UAAM,KAAK;;AAEb,SAAO;;AAGH;AAEJ,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,WAAO,IAAI,WAAW;;AAEtB,UAAM,IAAI,MAAM,qBAAqB;;;AASnC;AAEJ,eAAa,MAAM,OAAO,gBAAgB,OAAO,MAAM;AACvD,MAAI,SAAS,QAAQ,UAAU;AAC7B,WAAO,cAAc,OAAO,IAAI,aAAa;aACpC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;aAClC,UAAU;AACnB,WAAO,cAAc,OAAO,IAAI,WAAW;;AAE3C,UAAM,IAAI,MAAM,qBAAqB;;;AAInC;AACJ,QAAM,QAAQ;AACZ,WACI,OAAO,UAAU,YAAY,WAAW,GACxC,MACI,0EACU;;;AAYhB;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,KAAK;;AAEd,cAAY,KAAK,KAAK,SAAS;AAC/B,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,aAAS,QAAQ,KAAK,KAAK;;AAE7B,SAAO;;AAWH;AAEJ,MAAI,SAAS;AACX,WAAO;aACE,SAAS;AAClB,WAAO,CAAC;;AAEV,eAAuB,IAAI,MAAM;AACjC,eAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,SAAK,KAAK,KAAK,MAAM,QAAQ,QAAQ;AACrC,aAAS,KAAK,KAAK,QAAQ;;AAE7B,OAAK,KAAK,SAAS,KAAK;AACxB,SAAO;;AAQH;AAOJ,SAAO,UAAU,OAAO,QAAQ,OAAO,OAAO,SAAS;;;;AC1rBzD;;;;;;;;;;;;;;;;AAqBA,MAAM,4BAA4B;AArBlC;EAkDE;AAAmB,SAAA,SAAA;AATX,SAAA,QAAe;AACf,SAAA,eAAwD;AAExD,SAAA,WAAkB;AAOxB,SAAK;;EAGP;AACE,QAAI,KAAK,YAAY;AACnB,cAAQ,KACJ,YAAY,KAAK,oEACgB;;AAEvC,SAAK,eAAe;AACpB,SAAK,WAAW;;EAGlB;AAGE,SAAK,aAAa,YAAY,CAAC,cAAc;AAI7C,QAAI,KAAK,SAAS,aAAa;AAC7B,wBAAkB,KAAK,SAAS;AAChC,cAAQ,KACJ,qCAAqC,aAAa;AACtD,WAAK,IAAI,UAAU;;;QAIjB;AACJ,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,SAAK,MAAM,YAAY,MAAM,KAAK,aAAa;AAC/C,WAAO,KAAK,MAAM;;EAGpB;AACE,QAAI,YAAY,KAAK;AACnB,aAAO,KAAK,MAAM;;AAGpB,sBAAkB,KAAK,aAAa;AACpC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN,QAAQ;;AAId,SAAK,MAAM,YAAY;AAEvB,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK,IAAI;;EAGlB;AACE,WAAO,KAAK;;MAGV;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,mBAAmB;;AAEzB,SAAK,MAAM,YAAY;AACvB,QAAI,KAAK,aAAa,UAAU,WAAW;AACzC,WAAK,aAAa,UAAU,QAAQ;;;EAIhC;AACN,QAAI,KAAK,aAAa,aAAa;AACjC,YAAM,IAAI,MACN,yBAAyB;;AAE/B,WAAO,KAAK,aAAa,UAAU;;EAGrC;AACE,SAAK,QAAQ,OAAO,OAAO,IAAI;;EAGjC;AACE,SAAK,QAAQ;AACb,SAAK,WAAW;AAChB,SAAK;;EAGC;AACN,QAAI,OAAO,KAAK,WAAW,eACvB,OAAO,KAAK,OAAO,aAAa,eAChC,OAAO,KAAK,OAAO,SAAS,WAAW;AACzC;;AAGF,sBAAkB,eAAe,KAAK,OAAO,SAAS;AACtD,QAAI,6BAA6B;AAC/B,wBAAkB,UAAU,2BAA2B,MAAM;AAC7D,gBAAU,QAAQ;AAChB,6BAAqB,SAAS,MAAM;AACpC,aAAK,SAAS,OAAO,WAAW,KAAK;;;;;AAMvC;AACJ,iBAAe;AACf,cAAY,QAAQ,+BAA+B;AACjD,gBAAY,QAAQ,EAAE,IAAI,EAAE;AAC5B,WAAO,EAAE,KAAK;;AAEhB,SAAO;;AAGT;AAEE,SAAO,mBAAmB,SAAS,mBAAmB,SAAS;;AAGjE;AACE,UAAQ,MAAM;AACd,MAAI,UAAU,UAAU,UAAU;AAChC,WAAO,UAAU;aACR,GAAG,CAAE,YAAY;AAC1B,WAAO,CAAC;;AAEV,QAAM,IAAI,MACN,oCAAoC,kBAAkB;;AAWtD;AACJ,SAAO;;AAGF,UAAuB;AACxB;AACJ,QAAM;;;;AC/MR;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEE;AACJ,MAAI,mBAAmB;AAErB;AACA,QAAI,OAAQ,WAAY;AACtB,WAAK;eACI,OAAQ,WAAY;AAC7B,WAAK;eACI,OAAQ,YAAa;AAC9B,WAAK;eACI,OAAQ,SAAU;AAC3B,WAAK;;AAEL,YAAM,IAAI,MAAM;;AAElB,sBAAkB;;AAEpB,SAAO;;AAIT;AACE,aAAW;AACX,MAAI,GAAG,cAAc;AACnB,OAAG,aAAa,IAAI;;AAEtB,SAAO,GAAG;;AAUN;AACJ,oBAAkB;AAClB,MAAI,UAAU,IAAI;AAChB,WAAO,UAAU,IAAI;;AAErB,sBAAkB;AAClB,cAAU,IAAI,KAAK;AACnB,WAAO,UAAU,IAAI;;;;;ACzClB,MAAM,MAAM;ACSZ,MAAM,MAAM;AAGZ,aAAa;ACiBb,MAAM,SAAS;AC2Bf,MAAM,UAAU;ACsChB,MAAM,cAAc;ACuBpB,MAAM,OAAO;ACSb,MAAM,cAAc;AAOpB,gBAAgB;AAGhB,eAAe;AAMf,eAAe;ACoBf,MAAM,sBAAsB;ACoC5B,MAAM,MAAM;ACMZ,MAAM,SAAS;AAQf,sBAAsB;AAStB,qBAAqB;AAOrB,8BAA8B;ACsD9B,MAAM,MAAM;AAGZ,YAAY;ACSZ,MAAM,QAAQ;AAGd,YAAY;ACSZ,MAAM,OAAO;AAOb,sBAAsB;ACMtB,MAAM,WAAW;AAGjB,uBAAuB;AAOvB,iBAAiB;AAMjB,iBAAiB;AAGjB,gBAAgB;AAGhB,qBAAqB;AAGrB,iBAAiB;ACkBjB,MAAM,OAAO;AAGb,kBAAkB;ACSlB,MAAM,MAAM;ACMZ,MAAM,aAAa;ACiCnB,MAAM,MAAM;AAOZ,gBAAgB;AAGhB,gBAAgB;ACyDhB,MAAM,MAAM;AAOZ,gBAAgB;ACahB,MAAM,WAAW;AAGjB,eAAe;AAGf,iBAAiB;AAGjB,4BAA4B;AAS5B,4BAA4B;AAU5B,4BAA4B;AAU5B,iBAAiB;AAGjB,eAAe;AAQf,cAAc;ACUd,MAAM,MAAM;AAGZ,cAAc;ACwBd,MAAM,OAAO;AAGb,gBAAgB;ACiBhB,MAAM,iBAAiB;ACUvB,MAAM,QAAQ;AAGd,gBAAgB;ACShB,MAAM,QAAQ;AAGd,kBAAkB;AAMlB,iBAAiB;ACMjB,MAAM,QAAQ;AAMd,YAAY;ACSZ,MAAM,UAAU;ACMhB,MAAM,OAAO;AAGb,YAAY;ACcZ,MAAM,SAAS;AAOf,gBAAgB;AAMhB,0BAA0B;AAG1B,eAAe;AAGf,YAAY;ACUZ,MAAM,eAAe;ACgBrB,MAAM,OAAO;AAGb,aAAa;ACab,MAAM,YAAY;ACclB,MAAM,SAAS;ACaf,MAAM,YAAY;AAMlB,aAAa;ACeb,MAAM,mBAAmB;AAQzB,qBAAqB;AAerB,oBAAoB;AAgBpB,6BAA6B;;;ACt1BpC;;;;;;;;;;;;;;;;AAuBA,MAAM,iBACF,UAAU,kBAAkB,MAAM,IAAI;AAC1C,qBACI,UAAU,gBAAgB,MAAM,IAAI;AAoElC;AAEJ,cAAY,QAAQ,YAAY;AAChC,SAAO,eAAe,IAAI;;AAOtB;AACJ,SAAO,aAAa,IAAI;;AAGpB;AACJ,aAAW,eAAe;AAC1B,iBAA+B;AAE/B,SAAO;AACL,WAAO,MAAM,SAAS,GAAG;AACzB,QAAI;AACF;;AAEF,2BAAsB;AACtB,uBAAoB,IAAI,MAAM;AAC9B,QAAI,aAAY;AACd,aAAO,KAAK;;;AAGhB,SAAO;;AAcH;AACJ,SAAO,YAAY,eAAe;AAClC,cAAY,QAAQ,YAAY;AAChC,MAAI,eAAe,IAAI;AACrB,YAAQ,KACJ,eAAe,4BACX;;AAEV,iBAAe,IAAI,KAAK;;ACmE1B,iBAAiB;AACf,SAAO,GAAG,eAAe;;;;ACrN3B,MAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAyBM,2BACF;AACF,MAAI,UAAU;AACZ,WAAO,aAAa;;AAGtB,SAAO,aAAa,CAAC,QAAQ;;AAG/B;AACE,SAAQ,aAAa,gBAAgB,UAAU,aAC1C,aAAa,cAAc,UAAU,WACrC,aAAa,cAAc,UAAU;;AAGtC;AACJ,MAAI,UAAU;AACZ,UAAM,IAAI,MAAM;;AAElB,MAAI,MAAM,QAAQ;AAChB,QAAI,AAAK,QAAQ;;AAGnB,MAAI,MAAM,QAAQ;AAChB,IAAK,yBAAyB,GAAe;;AAE/C,MAAI,mBAAmB,GAAG;AACxB,WAAO;;AAET,MAAI,SAAS,QAAQ,UAAU,aAAa,UAAU;AACpD,WAAO,IAAI,aAAa;aACf,UAAU;AACnB,WAAO,IAAI,WAAW;aACb,UAAU;AACnB,iBAAa,IAAI,WAAY,EAAe;AAC5C,iBAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,UAAI,KAAK,MAAO,EAAe,QAAQ;AACrC,aAAK,KAAK;;;AAGd,WAAO;;AAEP,UAAM,IAAI,MAAM,qBAAqB;;;AAenC;AACJ,SAAO,MAAM,SAAS;;AAmBlB;AAEJ,SAAO,MAAM,SAAS,MAAM,MAAM;;AAW9B,oCAA6C;AACjD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,GAAG;;AAW5B,wCAAqD;AACzD,aAAW,YAAY;AACvB,SAAO,MAAM,SAAS,OAAO,OAAO;;;;AClItC;;;;;;;;;;;;;;;;;EAgCE,YAAoB;AAAA,SAAA,eAAA;AAAoC,SAAA,SAAA;AACtD,QAAI,UAAU;AACZ,WAAK,SAAS,IAAI;;;EAItB;AAEE;AACA,gCAA4B;AAC1B,gBAAU;;AAEZ,kBAAc,KAAK,aAAa,KAAK;AAErC,iBAAa,GAAG,IAAI,QAAQ,QAAQ;AAClC,qBAAe,QAAQ;AAGvB,aAAO,OAAO,KAAK;AACjB,kCAA0B,YAAY,OAAO,OAAO;;;AAIxD,0BAAsB;MACpB;MACA;MACA;MACA,QAAQ,MAAM,KAAK,YAAU,OAAO;MACpC,WAAW,MAAM,KACb,YAAU,OAAO,uBAAuB,OACpC,OAAO,wBACP;;AAEV,WAAO;;EAGT;AACE,WAAO,YAAY,SAAS,QAAQ,QAAQ,aAAa;AAEzD,YAAQ,QAAQ;AACd,cAAQ,IAAI,CAAC,OAAO,QAAQ,QAAQ,YAAY,KAAK;AACnD,aAAK,OAAO,iBACR,YAAY,QAAQ,eAAe,IAAI,eAAe,IAAI,QAC1D,eAAe;;;;;AAMrB;AAEJ,MAAI,UAAU;AAEZ,WAAO;;AAET,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,gBAAY,KAAK;AACjB,QAAI,MAAM,QAAQ,CAAC,SAAS;AAE1B,cAAQ,KAAK,SAAS,yBAAyB;AAC/C,aAAO;;;AAGX,SAAO;;AA/FT;EAmGE;AAIE,iBAAa,OAAO,WAAW,WAAW,AAAK,SAAS,GAAG,YAAY,KAC7B,OAAO;AACjD,uBAAmB,AAAK,SAAS,MAAM;AACvC,iBAAa,OAAO;AACpB,iBAAa,OAAO;AACpB,kBAAc,AAAK,SAAS,OAAO,MAAM,YAAY;AACrD,iCAA6B;AAE7B,wBAAmB;AACjB,oBAAc,OAAO;AACrB,UAAI,SAAS;AAGX,2BAAmB,MAAM,SAAS,OAAO;AACzC,0BAAkB,WAAW;AAC7B,kCACI,GAAG,UAAS,cAAc,YAAY,IAAI,aAAa;;;AAI/D,YAAQ,IACJ,KAAK,gBAAiB,UAAW,SAAS,WAAY,UAClD,4BAA6B,aACjC,oBAAoB,aAAa,cAAc,iBAC/C,gBAAgB;;;;;AC/HxB;;;;;;;;;;;;;;;;AA2CM,8BACF;AAGF,uBAAoD;AACpD,qBAAgD;AAChD,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,iBAAa,GAAG,GAAG,MAAM;;AAG3B,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AACxB,4BAAwB;AACtB,oBAAc,WAAW;AAEzB,0BAAoB;AACpB,mBAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,YAAI,aAAa,MAAM;AACrB,eAAK,QAAQ,QAAQ,YAAU,aAAa,OAAO,MAAM;AACzD,0BAAgB;AAChB,qBAAW,KAAK,MAAM;AACtB;;;AAIJ,UAAI;AACF;;;;AAMN,yBAAsD;AACtD,iBAAe,EAAE,MAAM;AACvB,mBAA8C;AAE9C,eAAa,MAAK,SAAS,GAAG,KAAK,GAAG;AACpC,iBAAa,MAAK;AAClB,uBAAmB,KAAK;AAGxB,iBAAa,GAAG,IAAI,KAAK,QAAQ,QAAQ;AACvC,UAAI,eAAe,KAAK,QAAQ,GAAG;AACjC,gCAAwB;AACtB,yBAAe,WAAW,WAAW,MAAM;AAC3C,mBAAS,KAAK,MAAM;;AAEtB;;;;AAMN,uBAAiC;AACjC,eAAa,GAAG,IAAI,MAAK,QAAQ;AAC/B,iBAAa,MAAK;AAElB,QAAI,WAAW,KAAK,OAAO,SAAS,KAAK;AAEvC,2BAAoD;AACpD,8BAAwB,KAAK;AAC3B,0BAAkB,KAAK,OAAO;AAC9B,YAAI,aAAa,UAAU;AACzB,uBAAa,aAAa;;;AAK9B,yBAAmB,OAAO,OAAO,IAAI;AACrC,iBAAW,SAAS;AACpB,iBAAW,UAAU,KAAK;AAE1B,mBAAa,KAAK;;;AAItB,SAAO;;AAUH;AAKJ,eAAa,aAAa,SAAS,GAAG,KAAK,GAAG;AAC5C,iBAAa,aAAa;AAE1B,gBAAsB;AACtB,SAAK,QAAQ,QAAQ;AACnB,yBAAmB,6BAA6B,EAAE;AAClD,UAAI,cAAc;AAChB,YAAI,KAAK;;AAIT,YAAI,KAAK;;;AAIb,QAAI,KAAK,YAAY;AACnB,YAAM,IAAI,MACN,4DACO,KAAK;;AAIlB,2BAAuB,KAAK,SAAS;AAErC,4BAAwB,KAAK;AAC3B,UAAI,CAAE,cAAa;AACjB,cAAM,IAAI,MACN,iCAAiC,yCACH,OAAO,KAAK;;AAIhD,iBAAW,KAAK,MAAM,eAAe;AACrC,UAAI,GAAG,UAAU;AACf,cAAM,IAAI,MACN,4BACI,KAAK,qCACN,iDAAiD,GAAG;;AAE7D,gBAAU,KAAK,OAAO;AACtB,UAAI,CAAC,AAAK,YAAY,GAAG,OAAO,EAAE;AAChC,cAAM,IAAI,MACN,4BACI,KAAK,sCACL,yBAAyB,GAAG,wDACL,EAAE;;AAGnC,UAAI,6BAA6B,EAAE,OAAO;AACxC,qCAA6B,EAAE,MAAM;;AAErC,4BAAoB,6BAA6B,EAAE;AACnD,qCAA6B,EAAE,MAAM,KAAI,aAAa;AACtD,oBAAY;;;;;;;AC5LpB;;;;;;;;;;;;;;;;AAqBA,MAAM,wBAAwB;AAE9B,mCAAmC;AAEnC,8BAA8B;AAExB;AAGJ,kBAAgB,eAAe;AAC/B,oBAAkB,wBAAwB,MAAM,OAAO,OAAO;AAC9D,eAAa,MAAM;AACnB,oBAAkB,kBAAkB,MAAM,OAAO,OAAO,SAAS;AACjE,gBAAc,CAAC;AACf,MAAI;AACF,UAAM,KAAK,YAAY;AACvB,UAAM,KAAK,WAAW;AACtB,UAAM,KAAK,aAAa;AACxB,UAAM,KAAK;;AAEb,QAAM,KAAK,UAAU,IAAI,OAAK,SAAS,GAAG,KAAK;AAC/C,SAAO,MAAM,KAAK;;AAGpB;AAGE,YAAU,cAAc;AACxB,kBAAgB,QAAQ,QAAQ,SAAS;AACzC,oBAAkB,IAAI,MAAM,SAAS,KAAK;AAC1C,eAAa,MAAM;AACnB,yBACI,UAAU,cAAc,oBAAoB,QAAQ;AAExD,MAAI,OAAO;AACT,mBAAe,GAAG,MAAM,IAAI,SAAS;AACnC,qBAAe,MAAM;AACrB,mBAAa,GAAG,IAAI,SAAS;AAC3B,kBAAU,KAAK,KAAK,IAChB,UAAU,IACV,YAAY,eAAe,SAAS,IAAI,GAAG,OAAO;;;;AAI5D,SAAO;;AAGT;AAEE;AACA,MAAI,MAAM,QAAQ;AAChB,aAAS,GAAG,WAAW,IAAI,GAAG,QAAQ,6BAC/B,WAAW,IAAI,GAAG,QAAQ;aACxB,SAAS;AAClB,aAAS,IAAI;aACJ,UAAU;AACnB,aAAS,gBAAgB;;AAEzB,aAAS,WAAW,IAAI,QAAQ,wBAAwB;;AAG1D,SAAO,SAAS,QAAQ;;AAG1B;AACE,SAAO,MAAM,IAAI,UAAU;;AAG7B,4EAEqD;AACnD,4BAA0B,UAAU,cAAc,IAAI;AAEtD,eAAa,MAAM;AACnB,eAAa,MAAM;AACnB,MAAI,SAAS;AACX,QAAI,UAAU;AACZ,2BAAqB,oBAAoB;AACzC,aAAO,CAAC,YAAY,aAAa,IAAI,GAAG;;AAE1C,QAAI,UAAU;AACZ,aAAO,CAAC,gBAAgB,KAAK;;AAE/B,WAAO,CAAC,KAAK,GAAG;;AAGlB,MAAI,SAAS;AACX,QAAI,OAAO;AACT,4BAAsB,6BAA6B;AAEnD,sBAAgB,MAAM,KAClB,KAAK,MAAM,GAAG;AAClB,qBAAe,MAAM,KAAqC,KAAK,MAC1D,QAAO,8BAA8B,mBACtC,OAAO;AACX,UAAI,UAAU;AACZ,oBAAY,oBAAoB;AAChC,mBAAW,oBAAoB;;AAEjC,aAAO;QACL,MACA,UAAU,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAChD,KAAK,QACV,YACA,SACK,IACG,UAAU,YACN,GAAG,UAAU,OAAO,6BAA6B,IAAI,QAC5D,KAAK,QACV;;;AAGJ,wBACI,UAAU,cAAc,oBAAoB,QACpB,MAAM,KAAoB;AAEtD,WAAO;MACL,MACA,YAAY,IAAI,UAAU,YAAY,GAAG,UAAU,IAAI,QAClD,KAAK,QACV;;;AAKJ,mBAAiB,MAAM,MAAM;AAC7B,qBAAmB,QAAQ,MAAM;AACjC,iBAAe,QAAQ,KAAK;AAC5B,gBAAwB;AACxB,MAAI,OAAO;AACT,iBAAa,GAAG,IAAI,4BAA4B;AAC9C,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD;;AAEN,UAAM,KAAK;AACX,iBAAa,OAAO,4BAA4B,IAAI,MAAM;AACxD,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGnB,iBAAa,GAAG,IAAI,MAAM;AACxB,oBAAc,IAAI;AAClB,kBAAY,QAAQ;AACpB,YAAM,KAAK,GAAG,kBACV,KAAK,MAAM,OAAO,MAAM,UAAU,OAAO,YAAY,WACrD,MAAM,OAAO;;;AAGrB,cAAY,SAAS,IAAI,MAAM;AAC/B,QAAM,KAAK,MAAM,MAAM,KAAK;AAC5B,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,UAAM,KAAK,MAAM,MAAM,KAAK;;AAE9B,mBAAiB;AACjB,eAAa,GAAG,IAAI,MAAM;AACxB,kBAAc;;AAEhB,QAAM,MAAM,SAAS,KACjB,MAAM,MAAM,MAAM,SAAS,KAAK,MAAO,UAAS,KAAK;AACzD,SAAO;;AAGT;AAEE,wBAA+C;AAC/C,eAAa,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,kBAAc,KAAK,CAAC,KAAK,IAAI,KAAK,IAAI;;AAExC,SAAO;;;;ACnMT;;;;;;;;;;;;;;;;;EA6CE,YAAY;AAA2B,SAAA,QAAA;AACrC,SAAK,QAAQ,MAAM;AACnB,SAAK,OAAO,AAAK,cAAc;AAE/B,QAAI,UAAU;AACZ,gBAAU,OAAO;AACjB,MAAK,OACD,MAAM,KAAK,MACX,MAAM,qBAAqB,qDACG,KAAK;;AAEzC,QAAI,UAAU;AACZ,YAAM,IAAI,MACN;;AAIN,SAAK,SAAS,UAAU,AAAK,kBAAkB,OAAO,KAAK;AAC3D,SAAK,UAAU,eAAe;;EAWhC;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEV,IAAK,OACD,KAAK,WAAW,KAAK,MACrB,MAAM,uCAAuC,KAAK,gCAC3B,KAAK;AAEhC,kBAAc,KAAK,WAAW;AAC9B,SAAK,OAAO,SAAS;;EAUvB;AACE,QAAI,KAAK,WAAW;AAClB,aAAO,CAAC;;AAEV,YAAQ;AACR,sBAAkB;AAChB,UAAI,MAAM,KAAK,OAAO,KAAK,MAAM;AAC/B,oBAAY,qCAAqC,wBAC3B,KAAK;AAC3B,cAAM,IAAI,MAAM;;AAElB;;AAEF,gBAAY,KAAK,KAAK,SAAS;AAC/B,kBAAa,GAAG,KAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,MAAK,KAAK;;AAElC,WAAO,KAAK,OAAO;;EAGrB;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,KAAK;;AAEd,gBAAY,KAAK,KAAK,SAAS;AAC/B,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,eAAS,KAAK,QAAQ,KAAK,KAAK;;AAElC,WAAO;;EAGT;AACE,QAAI,KAAK,SAAS;AAChB,aAAO;eACE,KAAK,SAAS;AACvB,aAAO,CAAC;;AAEV,iBAAuB,IAAI,MAAM,KAAK,MAAM;AAC5C,iBAAa,GAAG,IAAI,KAAK,SAAS,GAAG,EAAE;AACrC,WAAK,KAAK,KAAK,MAAM,QAAQ,KAAK,QAAQ;AAC1C,eAAS,KAAK,KAAK,KAAK,QAAQ;;AAElC,SAAK,KAAK,SAAS,KAAK;AACxB,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAQpB;AACE,WAAO,YAAY,WAAW,KAAK,QAAQ,KAAK,OAAO,KAAK;;;AAiChE,gBAAqC;AAErC,gBAA2B;AAE3B,2BAAkD;ACW5C,0BAA2B;AAC/B,cAAY;;ACgBR,iCAAkC;AACtC,yBAAuB;;AA1NzB;EA8QE;AAXA,SAAA,OAAO;AAoIG,SAAA,qBAAqB;AAxH7B,SAAK,QAAQ,MAAM;AACnB,SAAK,QAAQ,SAAS;AACtB,SAAK,OAAO,AAAK,cAAc;AAC/B,SAAK,UAAU,eAAe;AAC9B,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAY,KAAK,OAAO,IAAI,KAAK,KAAK,aAAa;;MAGtD;AACF,WAAO,KAAK,MAAM;;QAQd;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY;;EAOvD;AACE,WAAO,UAAU,OAAO,KAAK,OAAO,KAAK,OAAY,KAAK;;QAStD;AACJ,iBAAa,MAAM,KAAK;AACxB,WAAO,cAAc,KAAK,OAAO;;EASnC;AACE,WAAO,cAAc,KAAK,OAAO,KAAK;;QASlC;AACJ,SAAK;AACL,kBAAa,YAAY,KAAK,KAAK;AACnC,QAAI,KAAK,UAAU;AACjB,oBAAc,MAAM;AACpB;AACE,eAAO,MAAM,IAAI,OAAK,AAAK,aAAa;;AAExC,cAAM,IAAI,MACN;;;AAIR,WAAO;;EAST;AACE,SAAK;AACL,kBAAa,YAAY,SAAS,KAAK;AACvC,QAAI,KAAK,UAAU;AACjB;AACE,eAAQ,MAAsB,IAAI,OAAK,AAAK,aAAa;;AAGzD,cAAM,IAAI,MACN;;;AAIR,WAAO;;QAIH;AACJ,SAAK;AACL,kBAAa,MAAM,YAAY,KAAK,KAAK;AACzC,QAAI,KAAK,UAAU;AACjB,aAAO;;AAEP,aAAO,IAAI,WAAY,MAAoB;;;EAS/C;AACE,QAAI,KAAK;AACP;;AAEF,gBAAY,cAAc;AAC1B,SAAK,qBAAqB;;MAIxB;AACF,WAAO,KAAK;;EAGd;AACE,QAAI,KAAK;AACP,YAAM,IAAI,MAAM;;;EAYpB,gBAAgB;AACd,WAAO,UAAU,MAAM,MAAM;;EAO/B;AACE,SAAK;AACL,WAAO,UAAU,MAAM;;EAQzB,mBAAmB;AACjB,iBAAa,KAAK;AAClB,WAAO,eAAe,MAAM,KAAK,OAAO,KAAK,OAAO;;EAGtD;AACE,SAAK;AACL,WAAO,UAAU,KAAK,MAAW;;EAEnC,qBAAqB;AACnB,SAAK;AACL,WAAO,YAAY,aAAa,MAAM,WAAW,MAAM;;;AAI3D,OAAO,eAAe,QAAQ,OAAO,aAAa;EAChD,OAAO;AAML,WAAO,CAAC,CAAC,YAAY,SAAS,QAAQ,QAAQ,SAAS,YAAY,QAC/D,SAAS,mBAAmB;;;AAncpC,uBAueqD;EAGnD;AAGE,UACI,aAAa,OAAO,aAAa,OAAO,aAAa,QAAQ;AAH/B,SAAA,YAAA;AAIlC,SAAK,OAAO;;EAWd;AACE,QAAI,SAAS,UAAU,KAAK;AAC1B,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,QAAI,CAAC,AAAK,YAAY,SAAS,OAAO,KAAK;AACzC,YAAM,IAAI,MACN,2BAA2B,SAAS,8BACjB,KAAK;;AAE9B,gBAAY,cAAc;AAC1B,SAAK,SAAS,SAAS;AACvB,gBAAY,OAAO,MAAM;;EAG3B;AACE,gBAAY,gBAAgB;AAC5B,SAAK,qBAAqB;;;AAI9B,OAAO,eAAe,UAAU,OAAO,aAAa;EAClD,OAAO;AACL,WAAO,oBAAoB,UAAU,SAAS,UAAU,QACpD,SAAS,kBAAkB;;;;;ACnhBnC;;;;;;;;;;;;;;;;AAgEA,IAAY;AAAZ,AAAA;AACE,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;AACA,QAAA,QAAA;GAPU,QAAA,QAAI;AAqBhB;AAAA,AAAA;AACE,qBAAA,aAAA;AACA,qBAAA,WAAA;AACA,qBAAA,UAAA;AACA,qBAAA,eAAA;GAJG,qBAAA,qBAAiB;AAOtB;AAAA,AAAA;AACE,oBAAA,aAAA;AACA,oBAAA,WAAA;AACA,oBAAA,UAAA;AACA,oBAAA,eAAA;GAJG,oBAAA,oBAAgB;AAOrB;AAAA,AAAA;AACE,uBAAA,aAAA;AACA,uBAAA,WAAA;AACA,uBAAA,UAAA;AACA,uBAAA,eAAA;GAJG,uBAAA,uBAAmB;AAOxB;AAAA,AAAA;AACE,yBAAA,aAAA;AACA,yBAAA,WAAA;AACA,yBAAA,UAAA;AACA,yBAAA,eAAA;GAJG,yBAAA,yBAAqB;AAO1B,sBAAsB;EACpB,SAAW;EACX,OAAS;EACT,MAAQ;EACR,WAAa;;AAGT;AACJ,MAAI,UAAU,YAAY,UAAU;AAClC,QAAI,UAAU,YAAY,UAAU;AAClC,aAAO;;AAET,UAAM,IAAI,MAAM,kBAAkB,cAAc;;AAElD,SAAO,cAAc,OAAO;;;;AC/H9B;;;;;;;;;;;;;;;;AAsBM,wBAA2C;AAC/C,MAAI,EAAE,UAAU,EAAE;AAChB,WAAO,CAAC,GAAG;;AAEb,gBAAc,WAAW,EAAE,OAAO,EAAE;AACpC,SAAO,CAAC,EAAE,KAAK,QAAQ,EAAE,KAAK;;AC0B1B,+BAAgC;AACpC,eAAuB;AACvB,eAAa,IAAI;AACjB,sBAAoB,QAAQ,MAAM;AAClC,SAAO;;AAGT;AAEE,MAAI,aAAa;AACf;;AAEF,MAAI,qBAAqB;AACvB,SAAK,KAAK;AACV;;AAEF,MAAI,CAAC,WAAW;AACd;;AAGF,mBAAiB;AACjB,kBAAgB;AACd,gBAAY,SAAS;AACrB,QAAI,CAAC,KAAK,IAAI;AACZ,WAAK,IAAI;AACT,0BAAoB,KAAK,MAAM;;;;AAMrC;AACE,SAAO,MAAM,QAAQ,QAAQ,OAAO,QAAQ;;;;ACrF9C;;;;;;;;;;;;;;;;;EAmFA;AAEE,SAAA,sBAAwC;AAExC,SAAA,iBAAiB;AACjB,SAAA,WAAW;AACX,SAAA,aAAa;AACb,SAAA,mBAAmB;AACnB,SAAA,iBAAiB;AAMjB,SAAA,gBAAgB;AAGhB,SAAA,cAAc;AAId,SAAA,aAA2B;AAK3B,SAAA,oBAA8B;AAC9B,SAAA,cAAc;AAEd,SAAA,aAAa,IAAI;AAQjB,SAAA,YAAY;AACZ,SAAA,gBACI,CAAC,UAAU,GAAG,YAAY,GAAG,WAAW,GAAG,SAAS,IAAI,QAAQ;;EAEpE;AACE,eAAW,gBAAgB,KAAK;AAC9B,WAAK,oBAAoB,cAAc;;;;AA9H7C;EAmJE;AAAmB,SAAA,MAAA;AAbnB,SAAA,WAA0C;AAC1C,SAAA,kBAKI;AAKI,SAAA,uBAAuB;AAG7B,SAAK,QAAQ,IAAI;;QAGb;AACJ,QAAI,KAAK,sBAAsB;AAC7B,aAAO,KAAK,mBAAmB,KAAK;;;AAEtC,QAAI,KAAK,mBAAmB;AAC1B;;AAEF,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,sBAAgB,MAAM,KAAK,kBAAkB,aAAa;AAC1D,UAAI;AACF,cAAM,KAAK,WAAW;AACtB;;;AAIJ,UAAM,IAAI,MACN;;MAIF;AACF,QAAI,KAAK,sBAAsB;AAC7B,YAAM,IAAI,MACN,YAAY,KAAK;;AAIvB,QAAI,KAAK,mBAAmB;AAC1B,aAAO,MAAM,aAAa,KAAK;AAC/B,UAAI;AACF,cAAM,IAAI,MACN,iCAAiC;;AAIvC,WAAK,WAAW;;AAElB,WAAO,KAAK;;EAGd;AACE,WAAO,OAAO,KAAK,KAAK;;EAG1B;AACE,QAAI,CAAE,gBAAe,KAAK;AAGxB,UAAI,eAAe,KAAK;AACtB,eAAO,aAAa,KAAK,kBAAkB;AAC3C,YAAI;AAEF,iBAAO;;;AAGT,eAAO;;;AAGX,WAAO,KAAK,SAAS;;EAGvB;AAEE,QAAI,CAAE,gBAAe,KAAK;AACxB,aAAO;;AAET,WAAO,KAAK,gBAAgB,aAAa;;EAG3C,iDAGe;AACb,QAAI,eAAe,KAAK;AACtB,cAAQ,KACJ,GAAG;AAEP,aAAO;;AAET,SAAK,gBAAgB,eAAe,CAAC,SAAS;AAC9C,WAAO;;QAGH;AACJ,QAAI,KAAK,gBAAgB,gBAAgB;AACvC,YAAM,IAAI,MAAM,iBAAiB;;AAEnC,SAAK,cAAc;AACnB,QAAI,KAAK,SAAS,gBAAgB;AAChC,WAAK,kBAAkB;AACvB,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,qBAAe,YAAY,MAAM,UAAU;AAC3C,UAAI,CAAC;AACH,eAAO;;;AAGX,SAAK,kBAAkB,KAAK,SAAS;AACrC,SAAK;AAEL,SAAK,WAAW,IAAI,SAAS,KAAK;AAElC,WAAO;;EAGD;AACN,oBAAgB,qBAAqB,KAAK;AAC1C,YAAQ,QAAQ;AACd,UAAI,OAAO,aAAa;AACtB,eAAO,UAAU,KAAK;;;;EAKpB;AACN,oBAAgB,qBAAqB;AACrC,YAAQ,QAAQ;AACd,UAAI,OAAO,eAAe;AACxB,eAAO,YAAY,KAAK,SAAS;;;;EAW/B;AAEN,iCAA6B,KAAK,gBAAgB;AAClD,QAAI,wBAAwB;AAC1B,YAAM,IAAI,MACN,6BAA6B;;AAGnC;AACE,uBAAgB,qBAAqB;AAMrC,UAAI,YAAW,CAAE,qBAAmB,kBAC7B,OAAO,SAAQ,SAAS;AAC7B,0BAAkB,EAAE,KAAK;AACzB,wBACI,SACK,KAAK;AAEJ,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,SAAS,eAAe;AAC7B,eAAK,qBAAqB;AAC1B,iBAAO;WAER,MAAM;AAEL,cAAI,YAAY,KAAK;AACnB,mBAAO;;AAET,eAAK,qBAAqB;AAC1B,kBAAQ,KACJ,6BAA6B;AACjC,kBAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,iBAAO;;AAEjB,aAAK,qBAAqB;AAC1B,eAAO,CAAC,SAAS,WAAW;;AAE5B,aAAK,SAAS,eAAe;AAC7B,eAAO,CAAC,SAAS,MAAM,WAAW;;;AAGpC,cAAQ,KAAK,6BAA6B;AAC1C,cAAQ,KAAK,IAAI,SAAS,IAAI;AAC9B,aAAO,CAAC,SAAS,OAAO,WAAW;;;EAIvC;AACE,QAAI,CAAE,gBAAe,KAAK;AACxB,YAAM,IAAI,MAAM,GAAG;;AAErB,QAAI,KAAK,gBAAgB,eAAe,KAAK,sBAAsB;AAGjE,WAAK;;AAGP,QAAI,eAAe,KAAK;AACtB,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAGvB,WAAO,KAAK,gBAAgB;AAG5B,QAAI,KAAK,gBAAgB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,cAAc;AACnB,WAAK,kBAAkB;;;EAInB;AACN,QAAI,OAAO,KAAK,KAAK,iBAAiB,WAAW;AAC/C,YAAM,IAAI,MAAM;;AAElB,WAAO,OAAO,KAAK,KAAK,iBAAiB,KAAK;AAE5C,aAAO,KAAK,gBAAgB,GAAG,WAC3B,KAAK,gBAAgB,GAAG;;;EAIxB;AAEN,2BAAuB,KAAK;AAE5B,iBAAa,GAAG,IAAI,eAAe,QAAQ;AACzC,0BAAoB,eAAe;AACnC,aAAO,SAAS,aAAa,KAAK,kBAAkB;AACpD,UAAI,aAAa;AACf,eAAO,CAAC,MAAM,aAAa;;;AAG/B,UAAM,IAAI,MACN;;EAIN;AACE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAmB,KAAK;AACxB,mBAAe,KAAK,SAAS;AAG7B,eAAW,YAAY;AACvB,SAAK,UAAU;AACf,aAAQ,KAAK,QAAQ,QAAQ,KAAK,OAAO,KAAK;AAC9C,QAAI,KAAK;AAGP,WAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;;;EAIvE;AAEE,eAAmB;AACnB,QAAI,MAAM;AAER,UAAI,OAAO,aAAa;AACtB,cAAM,IAAI,MAAM;;AAElB,WAAK;;AAGL,UAAI,OAAO,aAAa,YAAY,CAAE,qBAAoB;AACxD,cAAM,IAAI,MACN;;AAGN,UAAI,OAAO,OAAO;AAChB,cAAM,IAAI,MACN;;AAGN,aAAO;;AAIT;AACA,WAAO,KAAK,UACR,MAAM,KAAK,WAAW,OAAO,MAAM,KAAK,SAAS,SAAS;AACxD,eAAS;AACT,UAAI,kBAAkB;AACpB,gBAAQ,MAAM;;AAEhB,aAAO;;;EAIP;AACN;AACA;AACE,kBAAY;AACZ;AACA,aAAO;;AAEP;AACA,YAAM;;;EAKF;AACN,WAAO,OAAO;;EAIR;AACN,WAAO,OAAO;;EAYR;AACN,cAAU,KAAK,qBAAqB,EAAE,QAAQ,EAAE,OAAO,EAAE;AACzD,mBAAe,CAAC;AAChB,iBAAa,QAAiB;MAC5B,GAAG;AACD,sBAAc;AACd,2BAAmB,CAAC,GAAG;AACvB,sBAAc,CAAC;AAEf,eAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAC5B,YAAoC,MAAiB,MACrD;;;AAGR,kBAAwB;AACxB,SAAK,YAAY,KAAK,MAAM,YAAY,MAAM,QAAQ,CAAC,IAAI,MAAM,OAAO;AACxE,WAAO;;EAgBT;AAGE,wBAA0B;AAC1B,0BAA4B;AAI5B,WAAO,KAAK,cACR,aAAa,QAAQ,eAAe,YAAY,OAAO,cACvD;;EAGE;AACN,WAAO,KAAK,IAAI,QAAQ;;EAGlB;AAGN,4BAAwB,KAAK,QAAQ;AAGrC,2BAAuB;AACvB,aAAS,QAAQ;AAGf,0BAAqB,KAAK,UAAU,cAAc,IAAI;;AAQxD,qBACI,KAAK,MAAM,kBAAkB,KAAK,MAAM,kBAAkB,SAAS;AACvE,0BACI,kBAAkB,mBAAmB,mBAAmB;AAC5D,QAAI,gBAAgB;AAClB,YAAM,IAAI,MACN,YAAY,KAAK,6CACb,0CAA0C;;;EAQtD;AAKE;AACA,gBAAsB;AACtB,qBAAiB,KAAK;AACtB,QAAI,cAAc;AAChB,mBACI,KAAK,MAAM,eAAe,OAAO,KAAK,MAAM,YAAY,OAAO;;AAGrE,8BAA0B,KAAK,MAAM;AACrC,+BAA2B,KAAK,MAAM;AAEtC,QAAI,KAAK;AACP,WAAK,MAAM,kBAAkB,KAAK;;AAGpC;AACA,mBAAe,UAAU,YAAY,KAAK;AAC1C;AACA,QAAI,UAAU;AACZ,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,OAAO,WAAW,CAAC,QAAQ,OAAO,SAAS,KAAK;AACtD,yBAAiB,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC7C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,2BAAmB,SAAS,IACxB,EAAE,QAAQ,OAAO,WACb,KAAK,qBAAqB,QAAQ,OAAO;AAMjD,YAAI;AACF,8BACI,KAAK,sBAAsB,YAAY,QAAQ;AACnD,cAAI,iBAAiB;AAKnB,gBAAI,iBAAiB;AACnB,8BAAgB;;AAElB,+BAAmB,WAAW,OAAO,UAAU,cAAc;AAC7D,4BAAiB,iBAAgB,IAAI,QAAQ,OAAO;;AAEtD,kBAAQ,KAAK,2BAA2B;;AAE1C,eAAO;;;AAGT,uBAA+B;AAI7B,YAAI,CAAC;AACH;;AAEF,gBAAQ,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;;AAGrD,oBAAa;AACX,iCAAyB,KAAK,QAAQ;AACtC,cAAM,KAAK,KAAK,MAAM,YAAY,KAAK,SAAS;AAChD,qBAAc,MAAM,QAAQ,OAAO,MAAM,CAAC;AAC1C,YAAI,KAAK;AACP,eAAK,sBAAsB,YAAY,kBAAkB;;AAE3D,eAAO;;;AAKX;AACA,SAAK,UACD,MAAM,KAAK,MAAM,eAAe,MAAM,KAAK,MAAM,eAAe;AAC9D,UAAI,CAAC,KAAK,IAAI,QAAQ,YAAY,CAAC,KAAK,MAAM;AAC5C,kBAAU;;AAEV,wBAAgB,KAAK,SAAS,cAC1B,YAAY,QAAQ,MAAM;AAC9B,YAAI,KAAK,IAAI,QAAQ;AACnB,eAAK,SAAS,iBAAiB;;AAEjC,kBAAU,cAAc;;;AAIhC,QAAI;AACF,WAAK,YACD,YAAY,QAAQ,SAAS,eAAe,OAAO;;AAGzD,QAAI,KAAK,MAAM;AACb,WAAK,MAAM,cAAc,QAAQ,KAAK;QACpC,MAAM;QACN,YAAY,KAAK,MAAM,WAAW;QAClC,oBAAoB,KAAK,MAAM;QAC/B,cAAc,KAAK,MAAM,aAAa;QACtC,sBAAsB,KAAK,MAAM;QACjC,aAAa,OAAO,KAAK,QAAQ,IAC7B,SAAO,OAAO,QAAQ,OAAO,OAAO,KAAK,QAAQ;QACrD,cAAc,QAAQ,IAAI,UAAQ,KAAK;QACvC,cAAc,cAAc;QAC5B,WAAW,cAAc;;;AAG7B,WAAQ,MAAM,QAAQ,OAAO,UAAU,QAAQ;;EAQzC;AACN,kBAAc,QAAQ,IAAI,aAAU,KAAK,KAAK,KAAK,MAAM;AACzD,WAAO;;EAaD;AAGN,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,2BAA+B,WAAW,gBAAgB;AAC1D,4BAAiC,WAAW,iBAAiB;AAI7D;AACA,UAAI,WAAW;AACb,QAAK,OACD,MAAM,QAAQ,SACd,MAAM;AAEV,6BAAqB,OAAO,KAAK,QAAQ,IAAI,SAAS,OAAO;;AAE7D,6BAAqB,aAAa,IAAI,eAAe,OAAO;;AAG9D,kCACI,QAAQ,OAAO,UAAU,cAAc;AAE3C,aAAO,mBAAmB,OAAO;;AAInC,WAAO;;EAQT;AAGE,QAAI,UAAU;AACZ,YAAM,IAAI,MAAM;;AAElB,YAAQ,SAAS;AACjB,eAAU,YAAW,KAAK;AAC1B,sBAAkB;AAClB,QAAI,UAAU,YAAY,AAAK,SAAS,OAAO;AAC7C,oBAAe,OAAoB,IAAI,OAAK,AAAK,aAAa;;AAEhE,mBAAe,SAAQ,MAAM,aAAa,OAAO;AACjD,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AAGf,QAAI,UAAU;AACZ,mBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,uBAAiB,qBAAqB;AACtC,WAAK,MAAM,YAAY,WAAW,KAAK;AACvC,WAAK,QAAQ;;AAEf,WAAO;;EAQT;AAGE,YAAQ,SAAS;AACjB,cAAU,IAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AAChD,SAAK,OAAO,GAAG;AACf,WAAO;;EAGT,uCACsC;AAEpC,WAAO,QAAQ,KAAK,iBAAiB;AACrC,QAAI,SAAS,QAAQ,UAAU,aAAa;AAC1C,qBAAe,aAAa,KAAK;;AAEnC,cAAU,IAAI,SAAS,cAAc,WAAW,MAAM,KAAK;AAC3D,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,YAAM,IAAI,MAAM,sBAAsB,EAAE;;AAE1C,SAAK,MAAM,oBAAoB,EAAE,QAAQ;AACzC,SAAK,OAAO,GAAG,KAAK;AACpB,WAAO;;EAGT;AACE,qBAAiB,KAAK,MAAM,WAAW,IAAI,EAAE,UACzC,KAAK,MAAM,WAAW,IAAI,EAAE,QAAQ,WACpC;AACJ,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,QAAI,aAAa;AACf,WAAK,MAAM;AAIX,kBAAY;AACZ,UAAI,EAAE,UAAU,eAAe,EAAE,UAAU;AACzC,gBAAQ,EAAE,OAAO,AAAK,gBAAgB,EAAE;;AAE1C,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;QAClC,SAAS,YAAW,KAAK;QACzB,OAAO,EAAE;QACT,OAAO,EAAE;QACT;QACA,UAAU;;AAEZ,WAAK,MAAM,YAAY;;AAGzB,SAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;AAEpC,QAAI,CAAE,cAAa;AACjB,WAAK,MAAM;;;EAIf;AACE,QAAI,CAAC,KAAK,MAAM,WAAW,IAAI,EAAE;AAC/B;;AAGF,SAAK,MAAM;AACX,QAAI,EAAE,UAAU;AACd,WAAK,MAAM;;AAEb,iBAAa,KAAK,MAAM,WAAW,IAAI,EAAE;AACzC,qBAAiB,KAAK;AAEtB,QAAI,YAAY;AAGd,UAAI,EAAE,UAAU;AACd,aAAK,MAAM,YAAY,KAAK;;AAE9B,WAAK,MAAM;AAEX,WAAK,QAAQ,YAAY,EAAE;AAC3B,WAAK,MAAM,WAAW,OAAO,EAAE;;AAE/B,WAAK,MAAM,WAAW,IAAI,EAAE,QAAQ;;;EAOxC;AACE,0BAAsB,KAAK,MAAM;AAC/B,gBAAU,KAAK,MAAM,oBAAoB;AACzC,WAAK,gBAAgB;;;EAIzB;AACE,SAAK,cAAc;AACnB,QAAI,KAAK,MAAM,oBAAoB,EAAE,SAAS;AAC5C,aAAO,KAAK,MAAM,oBAAoB,EAAE;;;EAI5C;AACE,iBAAa,KAAK,QAAQ;AAC1B,SAAK,aAAa,KAAK,MAAM;AAC7B,SAAK,iBAAiB,KAAK,MAAM;AACjC,SAAK,WAAW,KAAK,MAAM;AAC3B,QAAI,KAAK,MAAM,mBAAmB;AAChC,WAAK,aAAa;AAClB,UAAI,KAAK,WAAW;AAClB,aAAK,UAAU;;AAEjB,WAAK,QAAQ,KACT;;AAGN,WAAO;;QAGH;AAEJ,SAAK,MAAM,YAAY;AAEvB,uBAAmB,KAAK,MAAM;AAC9B,4BAAwB,KAAK,MAAM;AAEnC,SAAK,MAAM,cAAc,UAAU;AACnC,SAAK,MAAM,cAAc,SAAS,MAAM;AAExC,SAAK,MAAM,YAAY;AAEvB,SAAK,MAAM,cAAc,YAAY,KAAK,IACtC,GAAG,KAAK,MAAM,cAAc,QAAQ,IAAI,OAAK,EAAE;AACnD,SAAK,MAAM,cAAc,WAAW,KAAK,MAAM,WAAW;AAC1D,SAAK,MAAM,cAAc,aACrB,KAAK,MAAM,aAAa;AAC5B,yBAAqB,KAAK,MAAM,cAAc;AAC5C,aAAO,eAAe,MAAM,OAAO;AACnC,aAAO,YAAY,MAAM,OAAO;;AAElC,WAAO,KAAK,MAAM;;EAGpB;AACE,WAAO,KAAK,MAAM,gBAAgB,KAAK,KAAK,MAAM,gBAAgB;;EAG5D;AAGN,qBACI,CAAC,IAAI,KAAK,MAAM,kBAAkB,YAAY,QAAQ,SAAS;AAEnE,uBAAmB,YAAY;AAC/B,QAAI,cAAc;AAChB,sBAAgB,WAAW;;AAE7B,QAAI,iBAAiB;AACnB,eAAS,WAAW;AAGlB,cAAM,IAAI,IAAI;AACZ,cAAI,MAAM;AACR,2BAAe,QAAQ;AACvB,yBAAa,AAAK,oBAAoB,OAAO,MAAM,OAAO;AAC1D,mBAAO,KAAK,WAAW,MAAM,OAAO,OAAO,OAAO;;AAEpD,iBAAO;;AAIT,eAAO,cAAc,IAAI,SAAS,IAAI,MAAM,IAAI,IAAI,OAAO;;;AAG/D,SAAK,MAAM,WAAW,KAAK;;EAG7B;AACE,WAAO,OAAO;AACd,WAAO;;EAGD;AACN,QAAI,KAAK,MAAM,kBAAkB;AAC/B,WAAK,MAAM,aAAa;;AAE1B,SAAK,MAAM;;EAGL;AACN,SAAK,MAAM;;EAOb;AACE,sBAA8B;MAC5B,OAAO;MACP,MAAM;MACN,IAAI,KAAK,MAAM;;AAEjB,QAAI;AACF,gBAAU,OAAO;;AAEnB,SAAK,MAAM,WAAW,KAAK;AAC3B,SAAK,MAAM,cAAc;;EAO3B;AACE,mCAA+B,sBAAsB;AACrD,sCACI,IAAI,IAAI,uBAAuB,IAAI,OAAK,EAAE;AAG9C,iBAAa,GAAG,IAAI,KAAK,MAAM,YAAY,MAAM,QAAQ;AACvD,sBAAe,KAAK,MAAM,YAAY,MAAM;AAC5C,UAAI,CAAC,QAAO,QAAQ,CAAC,0BAA0B,IAAI,QAAO;AACxD,gBAAO;;;AAIX,qBAAiB,KAAK,MAAM,WAAW;AACvC,SAAK,MAAM,cAAc,KAAK,MAAM,WAAW,WAAW,IACtD,OACA,KAAK,MAAM,WAAW,KAAK,MAAM,WAAW,SAAS;AAGzD,2BAAuB,QAAQ;AAG7B,UAAI,CAAC,QAAO,QAAQ,QAAO,YAAY,SAAS;AAC9C,aAAK,MAAM;;;;EAWjB,wCAEuB;AACrB,IAAK,OACD,GAAG,SAAS,GAAG,MAAM;AACzB,QAAI,MAAM,QAAQ,GAAG,UAAU;AAC7B,YAAM,IAAI,MAAM,0CAA0C,GAAG;;AAG/D,cAAU,KAAK,UACX,MAAM,KAAK,aAAa,MAAM,KAAK,WACnC,MAAM,KAAK,KAAK,WAAW;AAE/B,IAAK,OACD,aAAa,QACb,MAAM;AAEV,yBAAqB,qBAAqB,KAAK,MAAM,YAAY,IAAI;AACrE,QAAI,CAAC,oBAAoB,aAAa,WAAW,KAAK,GAAG,SAAS;AAChE,YAAM,IAAI,MACN;;AAKN,WAAO,KAAK,KAAK,YAAY;AAC3B,qCAA6D;AAC7D,6BAAuB,EAAE,MAAO,MAAM,OAAQ,KAAK,EAAE,SAAS;AAG9D,6BACI,wBAAwB,cAExB,QAAK,KAAK,KAAK,KAEf;AACJ,oBAAc,GAAG,IAAI,OAAK,uBAAuB,EAAE;AAEnD,UAAI,KAAK,MAAM,kBAAkB;AAG/B,aAAK,MAAM,WAAW,QAAQ;AAC5B,gCAAqB,KAAK;AACxB,oBAAO;;;AAGX,aAAK,MAAM,aAAa;;AAE1B,aAAO,CAAC,OAAO,GAAG;;;EAItB;AAEE,IAAK,OACD,AAAK,WAAW,IAChB,MAAM;AACV,WAAO;AACL,MAAK,OACD,OAAO,MAAM,OAAK,aAAa,SAC/B,MAAM;AAGV;AAIA,uBAAiC;AACjC,aAAO,QAAQ;AACb,iBAAS,KAAK;;AAEhB,aAAO,KAAK,cACR;AACE,cAAM,EAAE,GAAG,CAAC,GAAG,QAAQ;AACvB,QAAK,OACD,IAAI,iBAAiB,QACrB,MAAM;AAEV,QAAK,OACD,AAAK,WAAW,IAAI,WACpB,MAAM;AAEV,eAAO,IAAI;SAEb,UACA;AACE,wBAAgB,IAAI,SAAS,IAAI;AACjC,sBACI,MAAM,QAAQ,WAAW,UAAU,CAAC;AACxC,QAAK,OACD,MAAM,WAAW,OAAO,QACxB,MAAM;AAGV,QAAK,OACD,MAAM,MAAM,OAAK,aAAa,SAC9B,MAAM;AAGV,wBAA+C;AAC/C,cAAM,QAAQ;AACZ,kBAAQ,KAAK,MAAM;;AAErB,eAAO;;;;EAKjB;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,SAAS;;EAE/B;AAEE,iBAAa,KAAK,MAAM,WAAW,IAAI;AACvC,WAAO,KAAK,QAAQ,KAAK;;QAGrB;AACJ,kBAAc;AACd,uBAAmB,MAAM,KAAK,QAAQ,KAAK;AAC3C,eAAW,SAAS,QAAQ;AAC5B,WAAO;;EASD;AACN,QAAI,KAAK,MAAM,eAAe;AAC5B,aAAO,UAAU,KAAK,MAAM,YAAY;AACxC,WAAK,MAAM,YAAY,MAAM,KAAK;;AAGpC,WAAO;;MAGL;AACF,WAAO,KAAK,MAAM;;EAOpB;AAEE,SAAK;AAEL,SAAK,MAAM;AACX,SAAK,IAAI;AACT,SAAK,QAAQ,IAAI;AAEjB,8BAA0B,KAAK;AAC7B,WAAK,yBAAyB;AAC9B,WAAK,SAAS,aAAa;AAC3B,aAAO,KAAK,SAAS;;AAEvB,SAAK,cAAc;AACnB,SAAK,kBAAkB;AACvB,SAAK,qBAAqB;;;AA/sBb,OAAA,eAAe;AAKf,OAAA,iBAAiB;AA8sBlC;AACE,iBAAe,mBAAmB,cAAc,QAAQ;AACxD,SAAO,OAAO,WAAW,QAAQ,OAAO;;AAGpC;AACJ,aAAW;AACX,MAAI,GAAG,aAAa;AAClB,yBAAoB,IAAI,YAAY;AACpC,OAAG,YAAY,IAAI,OAAO;;AAE5B,uBAAqB,GAAG,UAAU;AAIlC,mBAAiB,MAAM,GAAG;AAC1B,SAAO,GAAG;;AAGL,eAAe;AAQhB;AAEJ,iBAAe,CAAC,GAAG;AACnB,SAAO,OAAO,cAAc;AAC1B,gBAAY,SAAQ,IAAI,GAAG;AAC3B,SAAK,CAAC,GAAG;AACT,WAAO;KACN,QAAgC,MAAqB;;;;ACzrC1D;;;;;;;;;;;;;;;;AAuBM,oBAAqB;AACzB,kBAA4B;AAE5B,MAAI,aAAa;AACf,WAAO,UAAU,WAAW,KAAK,CAAC,IAAI;;AAExC,MAAI,CAAC,MAAM,QAAQ;AACjB,WAAO;;AAET,gBAAwB;AAExB,SAAO,MAAM,QAAQ,cACd,aAAa,cAAc,UAAU;AAC1C,UAAM,KAAK,UAAU;AACrB,gBAAY,UAAU;;AAExB,MAAI,MAAM,QAAQ,QACd,MAAM,QAAQ;AAChB,+BAA2B,KAAK,OAAO;;AAGzC,SAAO;;AAGT;AAEE,YAAU,WAAW;AACrB,MAAI,CAAE,MAAM,QAAQ,QAAS,CAAC,aAAa;AACzC,WACI,MAAM,WAAW,GACjB,MAAM,eAAe,QAAQ,KAAK,+DACU,MAAM;AACtD;;AAEF,SACI,MAAM,SAAS,GACf,MAAM,eAAe,QAAQ,KAAK,oDACR,IAAI;AAClC,SACI,IAAI,WAAW,MAAM,IACrB,MAAM,eAAe,QAAQ,KAAK,sBAAsB,MAAM,wBACrC,IAAI;AACjC,mBAAiB,MAAM,MAAM;AAC7B,eAAa,GAAG,IAAI,IAAI,QAAQ,EAAE;AAChC,+BAA2B,IAAI,IAAI,UAAU,QAAQ,OAAO;;;AAIhE;AAGE,MAAI,iBAAiB;AACnB;;AAEF,MAAI,kBAAkB,aAAa,kBAAkB,eACjD,kBAAkB,aAAa,gBAAgB;AACjD,UAAM,IAAI,MACN,aAAa,uBAAuB,yBAC9B,iCAAiC;;;AAIzC,kEAEiC;AACrC,MAAI,aAAa;AACf,gBAAY,cAAc,EAAE,OAAO,SAAS;AAC5C,WAAO;;AAET,sBAAoB,WAAW;AAG/B,MAAI,kBAAkB,YAClB,CAAC,QAAQ,SAAS,WAAW,QAAQ,iBAAiB;AACxD,oBAAgB;;AAElB,cAAY,cAAc,eAAe,SAAS;AAElD,MAAK,KAAK,QACL,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ,MAAM,OAAO,MAAM,YACtD,OAAO,MAAM,aAAa,OAAO,MAAM;AAC1C,iBAAa,KAAK,OAAO,SAAU,EAAS,YAAY;AACxD,UAAM,IAAI,MACN,aAAa,uBAAuB,0DACF;;AAExC,wBAAsB,WAAW,GAAG;AACpC,MAAI,CAAC,aAAa,MAAM,CAAC,MAAM,QAAQ;AACrC,QAAI,CAAC;;AAEP,yBAAuB;AACvB,iBAAe,kBAAkB,WAC7B,aAAa,GAAG,iBAChB,QAAQ,GAAe,IAAI;AAC/B,SAAO,OAAO,WAAW,QAAQ,eAAe;;;;ACrHlD;;;;;;;;;;;;;;;;AAmBO,MAAM,kBAAkB;AAOzB;AACJ,eAAa,OAAO,KAAK;AACzB,MAAI,KAAK,WAAW;AAClB,UAAM,IAAI,MACN,yGAEG,KAAK;;AAGd,eAAa,KAAK;AAClB,aAAW,EAAE;AAGb,MAAI,OAAO,SAAS;AAClB,aAAS,OAAO,UAAU,GAAG,OAAO,SAAS;;AAI/C,WAAS,SAAS;AAGlB,aAAW;AACT,WAAO,WAAW;AAClB;AACE,qBAAe,GAAG,GAAG;AACrB,UAAI,UAAU;AACZ,gBAAQ,MAAM;;AAEhB,aAAO,SAAS;AAChB,aAAO;;AAEP,aAAO,SAAS;AAChB,YAAM;;;AAGV,SAAO,eAAe,IAAI,QAAQ,CAAC,OAAO,QAAQ,cAAc;AAGhE,SAAO;;;;AChET;;;;;;;;;;;;;;;;AA8CA,kBAAoC;AAClC,gBAAc,gBAAgB,MAAM,QAAQ;AAC5C,gBAAc,gBAAgB,MAAM,QAAQ;AAC5C,EAAK,kBACD,MAAM,OAAO,MAAM,OACnB,yBAAyB,MAAM,aAAa,MAAM;AAGtD,kBAAqC;AACnC,WAAO,SAAQ,QAAQ,OAAO;;AAEhC,iBAA8B,CAAC,MAAM,OAAO,MAAM;AAClD,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAGN,gBAAgB,GAAG,CAAC;;;AC/D3B;;;;;;;;;;;;;;;;AAwBM,oBACF;AAEF,MAAI,SAAS;AACX,YAAQ,WAAW;;AAErB,MAAI,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ,WACxC,OAAO,WAAW,YAAY,OAAO,WAAW,aAChD,OAAO,WAAW;AACpB,UAAM,IAAI,MACN;;AAGN,MAAI,SAAS;AACX,uCAAmC;AAEnC,yBAAqB,cAAc;AACnC,yBAAqB,cAAc;AACnC,WACI,iBAAiB,cACjB,MACI,iCAAiC,kCAC9B,+BAA+B;AAE1C,iBAAa,GAAG,IAAI,cAAc,QAAQ,EAAE;AAC1C,uBAAiB,cAAc;AAC/B,gCAA0B,MAAM,cAAc,SAAS,IACnD,aAAa,cAAc,MAAM,MAAM,MACvC;AACJ,aACI,cAAc,OAAO,MAAM,MAAM,CAAC,mBAClC,MAAM,gDACE,qDACM;;;AAItB,MAAI,CAAC,aAAa,WAAW,CAAC,MAAM,QAAQ;AAC1C,aAAS,CAAC;;AAGZ,UAAQ,SAAS;AACjB,WAAS,UAAU,WACf,aAAa,QAAQ,SACrB,QAAQ,QAAoB,IAAI;AACpC,SAAO,OAAO,WAAW,QAAsB,OAAO;;;;AC1ExD;;;;;;;;;;;;;;;;AA+CM,gBACF,eAA+B;AAEjC,UAAQ,SAAS;AACjB,EAAK,mCAAmC;AACxC,SAAO,IAAI,aAAmB,OAAO,OAAO;;;;ACpD9C;;;;;;;;;;;;;;;;AAuCA,eAAiC;AAC/B,aAAW,gBAAgB,GAAG,KAAK;AAGnC,MAAI,CAAC,AAAK,aAAa;AACrB,UAAM,IAAI,MAAM,mCAAmC;;AAErD,MAAI,UAAU,YAAY,GAAG,UAAU,YACnC,UAAU,YAAY,GAAG,UAAU;AACrC,UAAM,IAAI,MAAM;;AAGlB,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAGtB,aAAa,GAAG,CAAC;;;AC3DxB;;;;;;;;;;;;;;;;AAuDA,kBACI;AACF,aAAW,gBAAgB,GAAG,KAAK,WAAW;AAE9C,iBAA8B,CAAC,GAAG;AAClC,gBAA4B,CAAC;AAC7B,kBACiB;AACf,YAAQ,AAAK,uBAAuB,OAAO,GAAG;AAC9C,IAAK,OACD,GAAG,SAAS,AAAK,cAAc,QAC/B,MAAM;AACV,SAAK,CAAC;AACN,WAAO,SAAQ,QAAQ,IAAI;;AAE7B,SAAO,OAAO,cACV,SAAS,QAAgC,MAAiB,SAC1D;;AAEC,gBAAgB,GAAG,CAAC;;;AC1E3B;;;;;;;;;;;;;;;;AA+CA,oBAAsC;AACpC,aAAW,gBAAgB,GAAG,KAAK;AAEnC,MAAI,QAAQ;AACV,WAAO,GAAG,MAAM,IAAI,UAAU,GAAG;;AAEnC,EAAK,OACD,GAAG,SAAS,KAAK,QACjB,MAAM,qCAAqC,GAAG,kCACb;AACrC,OAAK,QAAQ;AACX,IAAK,OACD,QAAQ,KAAK,OAAO,GAAG,MACvB,MAAM,+CAA+C,GAAG,OAAO,aAC/C;;AAGtB,MAAI,GAAG,QAAQ;AACb,WAAO,GAAG;;AAGZ,iBAAgC,CAAC,GAAG;AACpC,gBAA8B,CAAC;AAE/B,SAAO,OAAO,cACV,cAAW,SAAQ,UAAU,IAAI,OAAO,QACxC,MAAqB,WAAW;;AAG/B,kBAAkB,GAAG,CAAC;;;AC3D7B,MAAA;;;;ACUM,4BACF;AACF,MAAI,QAAO,OAAO;AAChB,UAAM,IAAI,MACN,4EACqB,QAAO;;AAElC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,8EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MACN,yEACsB,QAAQ;;AAEpC,MAAI,QAAQ,MAAM,QAAQ,OAAO,KAAK,QAAO;AAC3C,UAAM,IAAI,MACN,iEACG,QAAQ,MAAM,QAAQ,OAAO,UAAU,QAAO;;AAGvD,MAAI,QAAO,SAAS;AAClB,UAAM,IAAI,MACN,mEACiB,QAAO;;AAG9B,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAIrD,gBAAc;AACd,eAAa,GAAG,IAAI,aAAa,SAAS,GAAG,EAAE;AAC7C,eAAW,aAAa;;AAG1B,qBAAmB,QAAO;AAE1B,sBAAoB,aAAa;AACjC,cAAY;AAEZ,kBAAgB;AAChB,eAAa,WAAW,IAAI,QAAO,MAAM,EAAE;AACzC,iBAAa,WAAW;AACxB,gBAAY,KAAK,WAAW;;AAG9B,kBACI;IAAC,GAAG,eAAe,QAAO,OAAO,IAAI,YAAU,SAAS;IACvD;IAAG,MAAM,GAAG;AAEjB,SAAO,CAAC,aAAa,SAAS,WAAW;;;;AC/D3C,MAAA;;;;;;ACQM,6BACF;AACF,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,MAAM,QAAQ,OAAO,KAAK;AACxE,mBAAkB,QAAQ,OAAO,IAAK,QAAQ,OAAO,IAAI;AAEzD,qBAAmB,6FACyB,QAAQ,yBAC5B,QAAQ,iBAAiB,oBAC9B,2BAA2B;AAE9C,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MAAM,aAAa,kBAAkB;;AAEjD,MAAI,MAAM,SAAS,WAAY,SAAQ,OAAO;AAC5C,UAAM,IAAI,MACN,aACA,0BAA0B,WAAY,SAAQ,OAAO;;AAE3D,MAAI,QAAQ,SAAS,WAAW,MAAM,SAAS;AAC7C,UAAM,IAAI,MACN,aAAa,mBAAmB,WAAW,MAAM,SAAS;;AAEhE,eAAa,GAAG,IAAI,UAAU,EAAE;AAC9B,QAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM;AACrC,YAAM,IAAI,MACN,aACA,kBAAkB,OAAO,QAAQ,MAAM,wBAAwB,OAC3D,QAAQ,MAAM;;;AAG1B,eAAa,GAAG,IAAI,QAAQ,OAAO,UAAU,EAAE;AAC7C,QAAI,QAAQ,MAAM,IAAI,cAAc,MAAM,IAAI;AAC5C,YAAM,IAAI,MACN,aACA,kBAAkB,IAAI,cAClB,QAAQ,MAAM,IAAI,uBAAuB,IAAI,cAC7C,MAAM,IAAI;;;;AAmBlB;AAEJ,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,OAAO;AACjB,UAAM,IAAI,MACN,+EACqB,QAAQ;;AAEnC,MAAI,QAAQ,UAAU;AACpB,UAAM,IAAI,MAAM,0DACZ,QAAQ;;AAEd,MAAI,MAAM,SAAS;AACjB,UAAM,IAAI,MACN,6DAA6D;;AAGnE,MAAI,MAAM,WAAW;AACnB,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;AAEd,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,sDACZ,QAAQ;;;AAIhB,sBAAoB,OAAO,SAAS;;AAYhC;AAIJ,sBAAoB,QAAQ,MAAM;AAClC,oBAAmB,cAAc,IAAK,QAAQ,MAAM,cAAc,KAAK;AAKvE,kBAAgB,MAAM;AAEtB,kBAAgB;AAChB,eAAa,WAAW,IAAI,SAAS,EAAE;AACrC,iBAAa,MAAM;;AAGrB,uBAAsB,YAAY,IAAK,IAAI;AAC3C,qBAAmB,cAAc,QAAQ,SAAS;AAElD,kBAAgB,CAAC,GAAG,eAAe,MAAM,MAAM,GAAG,aAAa;AAC/D,qBAAmB,cAAc;AACjC,SAAO,CAAC,WAAW,YAAY,WAAW,SAAS;;;;ACnJrD,MAAA;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAoBM,2BACF;AACF,oBAAkB,MAAM,MAAM;AAC9B,EAAK,OACD,cAAc,MAAM,QACpB,MAAM,iBAAiB,+BAA+B,2CAClB;AACxC,EAAK,OACD,cAAc,KAAK,QACnB,MAAM,iBAAiB,8BAA8B,0CACjB;AAExC,eAAa,GAAG,IAAI,WAAW,EAAE;AAC/B,IAAK,OACD,MAAM,KAAK,KAAK,MAAM,MAAM,MAAM,IAClC,MAAM,iBAAiB,qBAAqB,aAAa,OACjD,MAAM,KAAK,KAAK,kCAAkC,OAChD,MAAM,MAAM;;;AAKxB;AACJ,eAAa;AACb,aAAW;AACX,SAAO,OAAO;AACZ,QAAI,OAAO;AACT,WAAK,KAAK;;AAEZ,YAAQ;AACR;;AAEF,SAAO;;AAIH;AAEJ,eAAa;AACb,kBAAgB,GAAG,OAAO,MAAM,QAAQ;AACtC,SAAK,QAAQ,KAAK,KAAM,KAAI,QAAQ,MAAM,SAAS,QAAQ;;AAE7D,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,eAAa,WAAW,QAAQ,IAAI,WAAW,QAAQ;AACrD,eAAW,KAAK;;AAElB,eAAa,GAAG,IAAI,eAAe;AACjC,QAAI,MAAM;AACR,iBAAW,0BAA0B;;AAErC,iBAAW,OACP,wBAAwB,GACxB;AACJ,iBAAW;;;AAGf,SAAO;;AAGT;AAGE,MAAI,kBAAkB;AACpB,WAAO;;AAGT,SAAO,iBAAkB,iBAAgB;;AAG3C;AACE,qBAAmB;AACnB,eAAa,GAAG,IAAI,eAAe;AACjC,eAAW,KAAK,yBAAyB;;AAE3C,SAAO;;AAIH;AAKJ,oBAAkB,WAAW;AAC7B,wBAAsB,IAAI,MAAM,4BACZ,IAAI,MAAM,gCACN,IAAI,MAAM;AAClC,MAAI,aAAa,UAAU,sBAAsB;AAC/C,sBAAkB,aAAa;AAI/B,0BAAsB,sBAAsB;AAC5C,sBAAkB,2BACd,WAAW,WAAW,eAAe,OAAO;AAChD,oBAAgB,0BACZ,SAAS,WAAW,eAAe,KAAK;AAC5C,wBACI,sBAAsB,SAAS,WAAW,eAAe;;AAE7D,oBAAgB,GAAG,OAAO,WAAW;AACnC,sBAAgB,QAAQ,aACpB,WAAW,OAAO,SAAS,YAAY,MAAM;AACjD,oBAAc,QACV,YAAY,SAAS,KAAK,SAAS,YAAY,MAAM;AACzD,wBAAkB,QAAQ,eAAe,SAAS,MAAM;;;AAI5D,SAAO;IACL,OAAO;IACP,KAAK;IACL,SAAS;;;AAMP;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ;;AAEnB,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,cAAc;AAClC,UAAI,YAAY,KAAK;AACnB,wBAAgB;;AAGlB,iBAAW,QAAQ;;;AAGvB,SAAO;;AAKH;AAGJ,qBAAmB,CAAC,GAAG;AACvB,qBAAmB,cAAc,eAAe;AAEhD,kBAAgB,GAAG,OAAO,WAAW,QAAQ;AAC3C,QAAI,WAAW,QAAQ,QAAQ;AAC7B,iBAAW,QAAQ,OAAO;;AAE1B,2BACI,gBAAgB,wBAAwB,eAAe;AAC3D,0BAAoB,YAAY;AAChC,UAAI,UAAU,KAAK;AACjB,wBAAgB,OAAO;;AAEzB,iBAAW,QAAQ;;;AAIvB,eAAa,GAAG,IAAI,WAAW,QAAQ;AAErC,qBAAiB,WAAW;AAC5B,QAAI,WAAW,KAAK;AAClB,iBAAW,MAAM;;AAEnB,eAAW,KAAK,AAAK,MAAM,GAAG,WAAW,IAAI,WAAW;;AAE1D,SAAO;;AAGH;AAEJ,eAAa,QAAQ;AACrB,MAAI,eAAgB,KAAK,QAAS,UAAU;AAC1C,aAAS;;AAGX,SAAO;;AAGH;AAIJ,cAAY,aAAa;AACzB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,YAAY,KAAK,QAAQ,eAAe,KAAK,QAAQ,SAAS;AAChE,QAAI,SAAS;AAIX,cAAQ,OAAO;;AAGf,cAAQ,OAAO;;;AAKnB,mBAAiB,WAAW;AAC5B,MAAI,QAAQ;AACV,aAAS;;AAIX,UAAQ,AAAK,MAAM,GAAG,OAAO,WAAW;AAExC,SAAO;;AAGH;AAIJ,aAAW,YAAY;AACvB,iBAAe,QAAQ,SAAS;AAIhC,MAAI,UAAW,KAAK,QAAS,eAAgB,KAAK,QAAS,QAAQ;AACjE,QAAI,SAAS;AAGX,aAAO,OAAO;;AAGd,aAAO,OAAO;;;AAKlB,mBAAiB,WAAW;AAC5B,MAAI,OAAO;AACT,YAAQ;;AAMV,MAAI,SAAS;AAEX,WAAO,AAAK,MAAM,GAAG,MAAM;;AAG3B,WAAO,AAAK,MAAM,IAAI,MAAM,WAAW;;AAGzC,SAAO;;AAOH;AAGJ,wBAAsB,KAAK;AAC3B,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,KAAK;AACZ,wBAAkB;AAClB;;;AAIJ,eAAa,kBAAkB,GAAG,IAAI,KAAK,QAAQ;AACjD,QAAI,MAAM,KAAK,KAAK,KAAK,OAAO,MAAM;AACpC,aAAO;;;AAGX,SAAO;;AAGH;AACJ,mBAAiB,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,KAAK;AAC9D,eAAa,GAAG,IAAI,MAAM,SAAS,GAAG;AACpC,kBAAc,MAAM,KAAK,QAAQ;;AAEnC,SAAO;;AAGH;AAGJ;AACA,gBAAc,EAAE,MAAM;AACtB,MAAI,OAAO,UAAU;AACnB,aAAS,CAAC,OAAO,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACrC,MAAM,SAAS;AACxB,aAAS,MAAM,OAAO,IAAI,MAAM,QAAQ,MAAM,QAAQ,KAAK;;AAE3D,aAAS,MAAM;;AAEjB,SAAO,QAAQ;AACb,IAAK,OACD,MAAM,IAAI,MAAM;;AAEtB;AACA,MAAI,QAAQ;AACV,YAAQ,IAAI,MAAM,OAAO,KAAK;aACrB,OAAO,SAAS;AACzB,YAAQ,CAAC,MAAM,GAAG,IAAI,MAAM,QAAQ,GAAG,KAAK;aACnC,KAAK,SAAS;AACvB,YAAQ,KAAK,OAAO,IAAI,MAAM,QAAQ,KAAK,QAAQ,KAAK;;AAExD,YAAQ;;AAEV,UAAQ,MAAM,IAAI;AAChB,QAAI,KAAK;AACP,aAAO;;AAEP,MAAK,OACD,MAAM,IACN,MAAM,qDACC,mCAAmC;AAC9C,aAAO,EAAE,MAAM,KAAK,OAAO;;;AAG/B,SAAO,CAAC,QAAQ;;;;ACjWlB;;;;;;;;;;;;;;;;AC6DM,yBAA0B;AAC9B,MAAI,MAAM,QAAQ;AAChB,YAAQ,KACJ,MAAM;;;AAId,wBAAwB;ACgBlB;AACJ,SAAO;;AC8QH,yBACF,0BACW;AACb,SAAO,OAAO,gBAAgB,MAAM,SAAS;;;;ACtW/C;;;;;;;;;;;;;;;;AAoDA,cAAgC;AAC9B,WAAS,gBAAgB,GAAG,KAAK;AACjC,WAAS,gBAAgB,GAAG,KAAK;AACjC,GAAC,IAAI,MAAM,eAAe,IAAI;AAE9B,kBAAqC;AACnC,gBAAY,SAAQ,SAAS,IAAI;AACjC,SAAK,CAAC,IAAI;AACV,WAAO;;AAET,iBAA+B,CAAC,GAAG,IAAI,GAAG;AAE1C,SAAO,OAAO,cACH,SAAS,QAAgC,MACzC;;AAEN,YAAY,GAAG,CAAC;;;ACpEvB;;;;;;;;;;;;;;;;AAuBM,8BAA+B;AACnC,eAAa,GAAG,IAAI,KAAK,QAAQ,EAAE;AACjC,QAAI,KAAK,KAAK,SAAS,IAAI,OAAO,OAAO,IAAI;AAC3C,aAAO;;;AAGX,SAAO;;AAGH;AAEJ,eAAa,UAAU,SAAS,UAAU;AAC1C,cAAY;AACZ,eAAa;AACb,kBAAgB;AACd,iBAAe,GAAG,MAAM,MAAM;AAC9B,QAAI,KAAK,QAAQ,SAAS;AACxB,UAAI,KAAK,UAAU;;AAEnB,UAAI,KAAK,UAAU;;;AAGvB,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,KAAK,QAAQ,SAAS;AACxB,eAAS,KAAK,OAAO;;;AAGzB,sBAAoB,KAAK,IAAI,SAAO,OAAO;AAC3C,SAAO,CAAC,UAAU;;AAGd;AAEJ,yBAAuB,KAAK,IAAI,OAAK;AACrC,SAAO,iBAAiB,OAAO,gBAAgB;;AAG3C;AAEJ,EAAK,OACD,qBAAqB,MAAM,OAC3B,MAAM,GAAG,uDACO,iBAAiB;;AAQjC;AAEJ,MAAI,qBAAqB,MAAM;AAC7B,WAAO;;AAET,iBAAyB;AACzB,eAAa,GAAG,IAAI,MAAM,EAAE;AAC1B,QAAI,KAAK,QAAQ,OAAO;AACtB,aAAO,KAAK;;;AAGhB,OAAK,QAAQ,UAAQ,OAAO,KAAK;AACjC,SAAO;;AAIH;AACJ,SAAO,KAAK,IAAI,aAAa,CAAC,GAAG,OAC5B,KAAK,UAAU,EAAE,KAAK,EAAE,IACxB,IAAI,OAAK,EAAE;;AAGZ;AACJ,cAAsB;AACtB,eAAa,OAAO,SAAS,IAAI,MAAM,EAAE;AACvC,QAAI,KAAK;;AAEX,SAAO;;;;AC3GT;;;;;;;;;;;;;;;;AAmGM,+BACF,qDAEiD;AAMnD,wBAAsB,WAAW;AACjC,uBACI,CAAC,GAAG,aAAa;AACrB,sBAAoB,wBAAwB;AAE5C,SAAO,kBACH,YAAY,cAAc,SAAS,WAAW,MAC9C,MAAyB,MAAsB;;AAG/C,qGAK2C;AAC/C,sCAAoC,gBAAgB;AAEpD;AACA,MAAI,eAAe;AACjB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACrD,eAAe;AACxB,kBAAc,CAAC,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAE9D,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,cAAc,OAC7D;;AAMA,qGAM4B;AAChC,mDAAiD,iBAAiB;AAElE;AACA;AACA,MAAI,eAAe;AACjB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;aACxD,eAAe;AACxB,kBAAc;AACd,kBACI,CAAC,aAAa,cAAc,aAAa,QAAQ,IAAI,QAAQ;;AAEjE,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,SAAO,kBACH,SAAS,aAAa,SAAS,WAAW,MAAK,OAAO,aACtD;;AAOA,qGAKiD,oBACN;AAC/C,mDAAiD,CAAC,IAAI,IAAI,IAAI;AAC9D,MAAI,eAAe;AACjB,KAAC,WAAW,UAAU,SAAS,cAAc;aACpC,eAAe;AACxB,KAAC,WAAW,YAAY,UAAU,WAAW;;AAE7C,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,wDAAsD;AACtD,sCAAoC,gBAAgB;AACpD,0CAAwC,gBAAgB;AAExD,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,WAAW,YAAY,iBACnC,MAAK,UAAU,SAAS,cAAc,aAAa,uBACnD,sBAAsB,cAAc;AAExC,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,WAAW;aACtC,eAAe;AACxB,eAAW,CAAC,WAAW,WAAW,UAAU;;AAG9C,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AA0CE,uFAKU,oBACiC;AAE/C,4DACI,CAAC,IAAI,IAAI,IAAI,IAAI;AACrB,MAAI,eAAe;AACjB,KAAC,WAAW,SAAS,UAAU,SAAS,cAAc;aAC7C,eAAe;AACxB,KAAC,WAAW,YAAY,SAAS,UAAU,WAAW;;AAEtD,UAAM,IAAI,MAAM,sBAAsB;;AAGxC,qEACI;AACJ,mDAAiD,iBAAiB;AAClE,yDACI,iBAAiB;AAErB,+BACI,uBAAuB,aAAa;AACxC,gCACI,uBAAuB,cAAc;AACzC,+BACI,uBAAuB,aAAa;AACxC,SAAO,SAAS,UAAU,WAAW,YAAY,mBAC7C,MAAK,SAAS,UAAU,SAAS,aAAa,cAAc,aAC5D,sBAAsB,uBAAuB,sBAC7C;AAEJ,sBAAoB,YAAY,iBAAiB,aAAa;AAE9D;AACA,MAAI,eAAe;AACjB,eAAW,CAAC,WAAW,aAAa,UAAU,WAAW;aAChD,eAAe;AACxB,eAAW,CAAC,WAAW,UAAU,WAAW,UAAU;;AAGxD,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,YAAY;;AAGtB;AAIE,MAAI,WAAW;AACb,cAAU,kBAAkB,SAAS,WAAW;;AAElD,qBAAmB,QAAQ;AAC3B,oBAAkB,QAAQ;AAC1B,oBAAkB,QAAQ;AAE1B,uBAAqB,iBAChB,cAAa,YAAY,IAAI,WAAW,SAAS,GAAG;AACzD,EAAK,OACD,AAAK,MAAM,eACX,MAAM,2BAA2B;AAGrC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,yBAAyB;AAGnC,qBAAmB,iBACd,aAAY,YAAY,IAAI,WAAW,SAAS,GAAG;AACxD,EAAK,OACD,AAAK,MAAM,aACX,MAAM,4BAA4B;AAGtC,SAAO,CAAC,cAAc,YAAY,YAAY;;AAG1C,qEAE4C;AAChD,6BAA2B,uBAAuB,WAAW;AAC7D,SAAO,KAAK,MACP,YAAW,KAAM,UAAS,KAAK,SAAS,sBAAsB;;AAGrE;AACE,MAAI,OAAO,UAAU;AACnB,WAAO,CAAC,OAAO,OAAO;;AAExB,MAAI,MAAM,WAAW;AACnB,WAAO,CAAC,MAAM,IAAI,MAAM,IAAI;;AAE9B,SAAO;;AAGT;AAEE,SAAO,OAAO,UAAU,WAAW,CAAC,OAAO,OAAO,SAAS;;AAc7D;AACE,MAAI,YAAY;AACd,WAAO;;AAGT,SAAO,aAAc,cAAa,KAAM,YAAW;;AAGrD;AAOE;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU,CAAC,KAAK,MAAK,QAAQ,MAAK,MAAM,MAAK,OAAO,MAAK,MAAM;AAC/D,qBAAiB,qBACb,CAAC,UAAU,UAAU,cAAc,cAAc,MAAK;AAC1D,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,2BACI,KAAK,IAAI,GAAI,aAAY,KAAK,eAAe,eAAe;AAChE,0BACI,KAAK,IAAI,GAAI,YAAW,KAAK,cAAc,cAAc;AAC7D,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAC9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;aAClC,SAAQ;AACjB,cAAU,CAAC,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM;AACvD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;aAC1C,OAAO,SAAQ;AACxB,gBAAY,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAC/D,mBAAe,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAClE,iBAAa,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AAChE,kBAAc,eAAe,iBAAiB,KAAI,GAAG,KAAK,KAAI,GAAG;AACjE,oBAAiB,QAAQ,KAAK,WAAW,KAAK,SAAS,KAAK,UAAU,IAClE,UACA;AACJ,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,MAAM;AAC3C,gBAAY,iBACP,YAAW,eAAe,MAAM,UAAU,eAAe,GAC1D;AACJ,eAAW,iBACN,WAAU,cAAc,OAAO,SAAS,cAAc,GAAG;;AAE9D,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,WAAW;;AAG9B;AAUE;AACA;AACA;AACA;AAEA,MAAI,OAAO,SAAQ;AACjB,oBAAiB,SAAQ,IAAK,UAAU;AACxC,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,qBAAiB,qBACb,CAAC,SAAS,UAAU,SAAS,IAAI,aAAa,GAAG,aAAa,MAC9D;AACJ,eAAW,SAAS;AACpB,gBAAY,SAAS;AACrB,eAAW,SAAS;aACX,SAAQ;AACjB,eAAW,KAAK,KAAK,UAAU;AAC/B,gBAAY,KAAK,KAAK,WAAW;AACjC,eAAW,KAAK,KAAK,UAAU;AAC/B,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,2BACK,aAAY,KAAK,eAAe,eAAe;AACpD,0BAAuB,YAAW,KAAK,cAAc,cAAc;AACnE,kBAAc,KAAK,MAAM,gBAAgB;AACzC,iBAAa,gBAAgB;AAC7B,gBAAY,KAAK,MAAM,iBAAiB;AACxC,mBAAe,iBAAiB;AAChC,iBAAa,KAAK,MAAM,gBAAgB;AACxC,kBAAc,gBAAgB;AAE9B,cAAU,CAAC,KAAK,QAAQ,MAAM,OAAO,OAAO,MAAM,MAAM;aAC/C,SAAQ;AACjB,cAAU;MACR,KAAK;MACL,QAAQ;MACR,MAAM;MACN,OAAO;MACP,OAAO;MACP,MAAM;MACN,MAAM;;AAER,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;AACnD,gBAAY,KAAK,KAAM,YAAW,eAAe,KAAK;AACtD,eAAW,KAAK,KAAM,WAAU,cAAc,KAAK;;AAEnD,UAAM,MAAM,8BAA8B;;AAE5C,SAAO,CAAC,SAAS,UAAU,WAAW;;AAQxC;AAEE,MAAI,CAAC;AACH,WAAO;;AAET,UAAQ;SACD;AAEH,aAAO,KAAK,MAAM;SACf;AAEH,aAAO,KAAK,KAAK;SACd;AACH,aAAO,KAAK,MAAM;;AAElB,YAAM,IAAI,MAAM,wBAAwB;;;AAIxC;AACJ,6BAA2B,gBAAgB;AAC3C,SAAO,SAAS,KAAK,SAAS,KAAK,SAAS;;AAGxC;AAEJ,SAAO,kBAAkB,YAAY,kBAAkB;;AAUnD;AAEJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO;;AAEP,UAAM,IAAI,MAAM,sBAAsB;;;;;AC5mB1C;;;;;;;;;;;;;;;;AAmBM,gCAAiC;AACrC,eAAa,OAAO,GAAG;AACvB,SAAO,QAAQ;AACb,IAAK,OACD,MAAM,WAAW,MACjB,MACI,kBAAkB,0BAA0B,gDAChB;;AAGtC,EAAK,OACD,QAAQ,KAAK,OAAO,MACpB,MAAM,kBAAkB,qCAAqC,OAAO;AAExE,qBAAmB,OAAO;AAC1B,SAAO,QAAQ;AACb,iBAAa,GAAG,IAAI,MAAM;AACxB,MAAK,OACA,MAAM,QAAU,MAAM,OAAO,WAAW,IACzC,MAAM,kBAAkB,2BAA2B,OAAO,gDACb,+CACN;;;;AAK3C;AACJ,sBAAoB,OAAO,GAAG;AAC9B,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,gBAAY,SAAS,OAAO,GAAG;;AAEjC,SAAO;;;;AClDT;;;;;;;;;;;;;;;;AA0BM,0BACF;AACF,iBAAe,QAAQ;AACvB,eAAuB;AACvB,eAAa,GAAG,IAAI,QAAQ;AAC1B,gBAAY,SAAS,IAAI;AACzB,cAAU,QAAQ,QAAQ;AAC1B,cAAU,SAAS,SAAS,SAAS,IAAI,MAAM;AAC/C,QAAI,IAAI,KAAK,MAAM;AACjB,WAAK,QAAQ;;;AAGjB,SAAO;;AAOH;AAEJ,iBAAyB;AACzB,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,kBAAc,QAAQ,QAAQ,SAAS,IAAI;AAC3C,oBAAgB,SAAS,SAAS,IAAI;AACtC,mBAAe,SAAS;AACxB,QAAI,SAAS,QAAS,UAAU,KAAK,SAAS;AAC5C,aAAO,QAAQ;;;AAGnB,SAAO;;AAGH;AAEJ,iBAAyB;AACzB,YAAU,KAAK,IAAI,OAAO,QAAQ,OAAO;AAEzC,eAAa,GAAG,IAAI,GAAG;AACrB,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,YAAQ,OAAO,OAAO,SAAS,IAAI;AACnC,QAAI,KAAK;AACP,UAAI;;AAEN,QAAI,MAAM;AACR,aAAO,QAAQ;eACN,MAAM;AACf,aAAO,QAAQ;eACN,MAAM;AACf,qBAAe,wDACR,cAAc;AACrB,YAAM,MAAM;;AAEZ,aAAO,QAAQ;;;AAGnB,SAAO;;;;ACrFT;;;;;;;;;;;;;;;;AAsCA,cAAgC;AAC9B,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,cAAU,SAAQ,IAAI;AACtB,SAAK,CAAC;AACN,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAE9B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,YAAY,GAAG,CAAC;;;ACtDvB,MAAA;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAuBO,MAAM,wBAAwB;AAS/B;AACJ,MAAI,UAAU;AACZ,WAAO;;AAET,SAAO,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;;;ACpCrD;;;;;;;;;;;;;;;;AA6BM,uCACF;AACF,aAAW;AACX;AAEA,MAAI,UAAU;AACZ,UAAM;AACN,WAAO;;AAEP,UAAM,eAAe,QAAQ,KAAK,MAAM,KAAK,KAAK;;AAGpD,SAAO,CAAC;AACN,QAAI,MAAM,eAAe,QAAQ;AAC/B,aAAO;;AAEP,YAAM,eAAe,QAAQ,MAAM;;;AAGvC,SAAO;;AAGH;AAEJ,mBAAiB;AACjB,eAAa,OAAO;AACpB,iBAAe,GAAG,MAAM,MAAM;AAC5B,QAAI,QAAQ;AACV,eAAS,KAAK,OAAO;;AAErB,eAAS,KAAK;;;AAGlB,SAAO;;AASH;AAEJ,kBAAgB,EAAE,MAAM;AAExB,sBAA8B;AAC9B,kBAAgB;AAChB,kBAAgB;AAChB,eAAa,GAAG,IAAI,MAAM;AACxB,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,eAAa,GAAG,IAAI,QAAQ,MAAM;AAChC,gBAAY,KAAK,QAAQ,MAAM;;AAGjC,eAAa,OAAO,GAAG,IAAI,EAAE,MAAM;AACjC,gBAAY,KAAK,EAAE,MAAM;AACzB,iBAAa,EAAE,MAAM;;AAGvB,SAAO,CAAC,WAAW,WAAW,SAAS;;;;AC5FzC;;;;;;;;;;;;;;;;AAqCM,gBACF;AACF,MAAM,cAAa,UAAU,UAAU,YAAa,MAAM,QAAQ,WAC9D,UAAU;AACZ,UAAM,IAAI,MACN;;AAGN,MAAI,UAAU,YAAY,aAAa,UACnC,CAAE,kBAAiB;AACrB,UAAM,IAAI,MACN;;AAGN,gBAAwB;AACxB,wBAAgC;AAChC,SAAO,WAAW,OAAO,OAAO,eAAe;;;;ACrDjD;;;;;;;;;;;;;;;;AA6DA,cACI,UAA8C,iBAAiB;AACjE,WAAS,gBAAgB,GAAG,KAAK;AACjC,MAAI,GAAG,UAAU;AACf,SAAK,KAAK,IAAI;;AAGhB,kBAAqC;AACnC,SAAK,CAAC;AACN,iBAAa,eAAe,MAAM,GAAG;AAErC,wBAAoB,mBAAmB,MAAM,GAAG;AAChD,wBAAoB;AACpB,oBAAgB;AAChB,QAAI,eAAe;AACjB,kBAAY,UAAU,IAAI;AAC1B,sBAAgB,iBAAiB,cAAc,QAAQ,GAAG;;AAE5D,gBAAY,SAAQ,IAAI,WAAW;AACnC,QAAI;AACF,uBAAiB,qBAAqB,MAAM,OAAO;AACnD,cAAQ,QAAQ,OAAO;;AAEzB,WAAO;;AAGT,iBAA0B,CAAC,GAAG;AAC9B,gBAAwB,CAAC,MAAM;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB,KAC1D;;AAGN,aAAY,GAAG,CAAC;;;AC/FvB;;;;;;;;;;;;;;;;AAqCM,eACF,eAAsC;AACxC,MAAI,UAAU;AACZ,iBAAa,MAAM,OAAO;AAC1B,iBAAa,MAAM,OAAO;AAC1B,WAAO,QAAQ,MAAM;;AAEvB,iBAAe,oBAAoB,cAAc,QAAQ;AACzD,SAAO,OAAO,WAAW,QAAQ,OAAO;;;;AC7C1C;;;;;;;;;;;;;;;;AA0CA,gBAAkC;AAChC,aAAW,gBAAgB,GAAG,KAAK;AACnC,iBAAe,gBAAgB,OAAO,SAAS;AAE/C,kBAAqC;AACnC,gBAAY,SAAQ,MAAM,IAAI;AAC9B,SAAK,CAAC,IAAI;AACV,WAAO;;AAGT,iBAA4B,CAAC,GAAG,IAAI,OAAO;AAC3C,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;;;AC1DzB;;;;;;;;;;;;;;;;AAwCM,kBAAmB;AACvB,gBAAc;AACd,wBAAsB,WAAW,QAAQ;AACzC,MAAI,cAAc,WAAW;AAC3B,UAAM,IAAI,MAAM;;AAElB,gBAAwB;AACxB,SAAO,WAAW,QAAQ,OAAO,eAAe;;;;AC/ClD;;;;;;;;;;;;;;;;AAwCA,eAAiC;AAC/B,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,KAAK;;AAGtB,iBAA2B,CAAC,GAAG;AAE/B,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,aAAa,GAAG,CAAC;;;AC5DxB;;;;;;;;;;;;;;;;AAwCA,gBAAkC;AAChC,aAAW,gBAAgB,GAAG,KAAK;AAEnC,kBAAqC;AACnC,SAAK,CAAC;AAEN,QAAI,GAAG,UAAU;AACf,aAAO,KAAK,IAAI;;AAGlB,WAAO,SAAQ,MAAM;;AAGvB,iBAA4B,CAAC,GAAG;AAEhC,SAAO,OAAO,cACH,SAAS,QAAgC,MAAiB;;AAIhE,cAAc,GAAG,CAAC;;;ACnCnB,0BACF,2BACO;AACT,mBAAiB;AACjB,MAAI,OAAQ,oBAAqB;AAC/B,WACI,EAAE,MAAM,QAAQ,oBAAoB,GACpC,MAAM;AACV,iBACI,IAAI,MAAM,iBAAiB,KAAK,EAAE,MAAM,QAAQ;;AAEpD,sBAAkB,gBAAgB,OAAO;AACvC,UAAI,UAAU;AACZ,iBAAS;;AAEX,aAAO;OACN;AACH,WACI,aAAa,GACb,MAAM;AACV,qBAAiB,gBAAgB,QAAQ;AAGzC,QAAI,aAAa;AACf,oBAAc,gBAAgB,OAAO,UAAU,IAAI,IAAI,IAAI,IAAI;AAC/D,sBAAgB,YAAY,EAAE,MAAM,QAAQ;;AAE9C,WACI,EAAE,MAAM,UAAU,gBAAgB,OAAO,UAAU,IAAI,IACvD,MAAM;AACV,iBAAa;;AAGf,SAAO;;;;AC1DT;;;;;;;;;;;;;;;;AAwCA,eAAiC,WAAyB;AACxD,aAAW,gBAAgB,GAAG,KAAK;AAEnC,iBAA2B,CAAC,GAAG;AAC/B,gBAAyB,CAAC;AAE1B,SAAO,OAAO,cACV,cAAW,SAAQ,KAAK,IAAI,QAAQ,QACpC,MAAiB,MAAM;;AAEtB,aAAa,GAAG,CAAC;;;AClDxB;;;;;;;;;;;;;;;;AA+BM,8BACF;AACF,MAAI,cAAc,QAAQ,eAAe;AACvC,WAAO;;AAET,MAAI,eAAe;AACjB,WAAO,IAAI,IAAI,KAAK;;AAEtB,QAAM,IAAI,MACN,gDAAgD;;AAIhD;AAEJ,YAAU;AACV,qBACI,AAAe,iBAAiB,KAAK,OAAO,aAAa;AAC7D,MAAI,WAAW,SAAS;AACtB,UAAM,KAAI,KAAK;;AAEjB,SAAO,QAAQ,KAAK,KAAK;;AAGrB;AAGJ,MAAI,eAAe;AACjB,WAAO;aACE,eAAe;AACxB,WAAO,KAAK;aACH,eAAe;AACxB,WAAO,IAAI;aACF,eAAe;AACxB,WAAO,MAAM;aACJ,eAAe;AACxB,WAAO,MAAM,GAAG;;AAElB,QAAM,IAAI,MAAM,4BAA4B;;AAIvC,mBAAmB;AACxB,uBAAqB,gBAAgB;AACrC,SAAO,CAAC,gBAAgB,eAAe;;;;AC3EzC,MAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;AAkBM,wBACF;AAEF,kBACI,aAAc,QAAO,WAAW,WAAW,SAAS,OAAO;AAC/D,kBACI,cAAe,QAAO,WAAW,WAAW,SAAS,OAAO;AAChE,SAAO,CAAC,SAAS;;;;ACzBnB;;;;;;;;;;;;;;;;AAyBM,qBACF,6CACe;AACjB,iBAAyB;AACzB,MAAI;AACF,eAAW,SAAS,OAAO,WAAW,MAAM;AAC5C,aAAS,KAAK,WAAW,KAAK;AAC9B,eAAW,SAAS,OAAO,WAAW,MAAM;;AAE5C,eAAW,SAAS,OAAO,WAAW;AACtC,0BAAsB,WAAW;AACjC,iBAAa,GAAG,IAAI,eAAe,EAAE;AACnC,iBACI,SAAS,OAAO,CAAC,WAAW,IAAI,KAAK,WAAW,IAAI,WAAW;;AAErE,eAAW,SAAS,OAAO,WAAW,MAAM,gBAAgB;;AAE9D,SAAO;;AAYH,kEAEa;AACjB,mBAAiB;AACjB,MAAI;AACF,aAAS,KAAK;AACd,iBAAa,iBAAiB,GAAG,IAAI,cAAc,EAAE;AACnD,UAAI,KAAK,IAAI;AACX,iBAAS,KAAK;AACd,iBAAS,KAAK,IAAK,kBAAiB;;AAEpC,iBAAS,KAAK;;;;AAIlB,gCAA4B;AAC5B,+BAA2B;AAC3B,iBAAa,GAAG,IAAI,cAAc,EAAE;AAClC,UAAI,KAAK,iBAAiB,IAAI,KAAK,IAAI,MAAM;AAC3C,2BAAmB,KAAK;;AAExB,4BAAoB,KAAK;;;AAG7B,aAAS,KAAK,GAAG;AACjB,aAAS,KAAK;AACd,aAAS,KAAK,GAAG;;AAEnB,SAAO;;AAYH,0EAEa;AACjB,2BAAyB;AAEzB,MAAI;AACF,qBAAiB,KAAK,WAAW,KAAK;;AAEtC,qBAAiB,KAAK,WAAW,KAAK;;AAGxC,eAAa,GAAG,IAAI,WAAW,QAAQ,EAAE;AACvC,QAAI,KAAK,WAAW;AAClB,UAAI;AACF,yBAAiB,KAAK,WAAW,IAAI,KAAK,WAAW;;AAErD,yBAAiB,KAAK,WAAW,KAAK,WAAW,IAAI;;;AAGvD,uBAAiB,KAAK,WAAW;;;AAIrC,SAAO;;AAOH;AAEJ,2BAAyB,CAAC;AAC1B,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,qBAAiB,KAAK,MAAM,GAAG;;AAEjC,SAAO;;AAcH;AAEJ,oBAAkB,eAAe,MAAM,GAAG;AAC1C,eAAa,GAAG,IAAI,YAAY,EAAE;AAChC,cAAU,KAAK,eAAe,IAAI,KAAK,MAAM,GAAG,KAAK,MAAM,GAAG;;AAGhE,SAAO;;;;ACvJT;;;;;;;;;;;;;;;;AAiBO,MAAM,kBAAkB;AACxB,mBAAmB;;;AClB1B;;;;;;;;;;;;;;;;AAiBO,MAAM,QAAQ;AACd,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;AACf,eAAe;;;ACtBtB;;;;;;;;;;;;;;;;AAmBM,iBAAkB;AACtB,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,KAAK,GAAG;;;AAId;AACJ,MAAI,CAAC,MAAM,QAAQ;AACjB,YAAQ,IAAI,GAAG;;;;;AC3BnB;;;;;;;;;;;;;;;;AAgCM,gCACF;AACF,MAAI,KAAK,WAAW,KAAK;AACvB,UAAM,IAAI,MACN,gEACG,KAAK,iBAAiB,KAAK;;AAEpC,iBAAe,IAAI,aAAa,KAAK,SAAS;AAC9C,eAAa,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,WAAO,KAAK,KAAK,IAAI;AACrB,WAAO,IAAI,KAAK,KAAK,IAAI;;AAE3B,SAAO;;AAiBH;AAEJ,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,IAAI,aAAa,SAAQ,SAAS;AAC/C,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,IAAI,KAAK,SAAQ;AACtB,SAAK,IAAI,KAAK,SAAQ,IAAI;;AAE5B,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,KAAK,SAAQ,SAAS;AACvC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAOV;AAEJ,cAAY,KAAK,MAAM,SAAQ,SAAS;AACxC,eAAa,IAAI,aAAa;AAC9B,eAAa,IAAI,aAAa;AAC9B,eAAa,GAAG,IAAI,SAAQ,QAAQ,KAAK;AACvC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ;AAClC,SAAK,KAAK,MAAM,IAAI,MAAM,SAAQ,IAAI;;AAExC,SAAO,CAAC,MAAM;;AAQV;AAEJ,eAAa,SAAQ,QAAQ;AAC7B,eAAa,SAAQ,QAAQ,IAAI;AACjC,SAAO,CAAC,MAAM;;AASV;AAEJ,QAAK,QAAQ,KAAK;AAClB,QAAK,QAAQ,IAAI,KAAK;;AAMlB;AAEJ,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,IAAI,aAAa,IAAI;AAClC,eAAa,GAAG,IAAI,KAAK,KAAK,IAAI,IAAI;AACpC,cAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,SAAK,KAAK,KAAK,IAAI;AACnB,SAAK,KAAK,KAAK,IAAI;;AAErB,SAAO,CAAC,MAAM;;AAMV;AAEJ,YAAW,WAAU,IAAI,MAAM,KAAK,KAAM,KAAI;AAC9C,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,SAAO,CAAC,MAAM;;;;ACvJhB;;;;;;;;;;;;;;;;ACwDM,oBACF;AACF,MAAI,UAAU;AACZ,QAAI,EAAE,UAAU;AACd,aAAO,EAAE;;AAEX,wBAAoB,MAAM,EAAE;AAC5B,mBAAe,KAAK,GAAG;AACvB,mBAAe,SAAQ,QAAQ,QAAQ;AACvC,gBAAY;AACZ,WAAO;AACP,WAAO;;AAGT,MAAI,CAAC,gBAAgB,EAAE,OAAO;AAG5B,WAAO,OAAO,qBAAqB,EAAE,QAAQ,EAAE,OAAO;;AAExD,MAAI,EAAE,UAAU;AACd,iBAAa,SAAQ,KAAK;AAC1B,mBAAe,KAAK,MAAM;AAC1B,SAAK;AACL,WAAO;;AAET,MAAI,UAAU;AACZ,WAAO,SAAQ,IAAI;aACV,UAAU;AACnB,iBAAa,OAAO,GAAG,EAAE;AACzB,mBAAe,SAAQ,SAAS,GAAG;AACnC,SAAK;AACL,WAAO;;AAEP,UAAM,IAAI,MAAM,iCAAiC,EAAE,YAAY;;;AAI7D;AAEJ,SAAO,OAAO,qBAAqB,EAAE,QAAQ,OAAO,EAAE;;AAGlD;AACJ,gBAAc,QAAO,SAAU,OAAM;AAErC,iBAAe,oBAAoB,KAAK;AACxC,SAAO,KAAK;AACZ,eAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,WAAO,KAAK,OAAO,IAAI,KAAK;;AAG9B,SAAO,SAAS,QAAQ;;;;AC3G1B;;;;;;;;;;;;;;;;AAkBA,IAAY;AAAZ,AAAA;AACE,YAAA,UAAA,aAAA,KAAA;AACA,YAAA,UAAA,WAAA,KAAA;AACA,YAAA,UAAA,UAAA,KAAA;AACA,YAAA,UAAA,YAAA,KAAA;AACA,YAAA,UAAA,eAAA,KAAA;GALU,YAAA,YAAQ;AASpB;AAAA,AAAA;AACE,qBAAA,mBAAA,YAAA,KAAA;AACA,qBAAA,mBAAA,UAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;AACA,qBAAA,mBAAA,WAAA,KAAA;GAJU,qBAAA,qBAAiB;;;AC3B7B;;;;;;;;;;;;;;;;AAuBA,IAAI;AAMJ;AACE,oBAAkB,SAAQ,KAAK,MAAM,cAAc,MAAiB;IAClE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,GAAG,MAAM,0BAA0B;AAE7C,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,SAAO,YAAY,YAAY,cAAc;AAC7C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,uDACQ,SAAS,MAAM;;AAE7B,aAAS,SAAS;;AAEpB,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,kBAAgB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AAClD,mBAAiB,aAAa,EAAE,MAAM,KAAK,EAAE,MAAM;AACnD,mBAAiB,EAAE,MAAM;AAEzB,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,EAAE;AAChE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,kBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,YAAY,YAAY,iBAAiB,QAAQ,0BACjD;AAEJ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC1Gd;;;;;;;;;;;;;;;;AAqBM,iCAAkC;AACtC;AAEA;AACE,gBACI,SAAQ,KAAK,MAAM,YAAY,MAAiB,CAAC,UAAU;;AAGjE;AAEE,WAAO,mBAAS,SAAS,MAAM;AAC/B,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,QAAI,aAAK,cAAc,IAAI,WAAW;AACpC,aAAO;;AAGT,cAAS,KAAK;AACd,WAAO;;AAGT,SAAO,CAAC,YAAY,aAAa,QAAQ,uBAAW;;;;AC7CtD;;;;;;;;;;;;;;;;AAoBO,MAAM,YAA0B,wBAAwB;;;ACpB/D;;;;;;;;;;;;;;;;AAuBM,kCACF;AAEF;AAKA;AACE,gBAAW,SAAQ,KAAK,MAAM,YAAY,MAAiB;MACzD;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;;AAIJ;AAEE,WAAO,mBAAS,UAAU;AAC1B,WAAO,GAAG,KAAK;AACf,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,uBAAmB,SAAS,OAAO,QAAQ,EAAE;AAC7C,qBAAiB,qBAAa,2BAA2B,EAAE,OAAO,EAAE;AACpE,gBAAY,SAAQ,WAAW,UAAU;AAGzC,QAAI,aAAK,cAAc,cAAc;AACnC,aAAO;;AAGT,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,wBAAmB,MAAM,UACrB,KAAK,aAAa,EAAE,MAAM,QAAQ,KAAK,aAAa,EAAE,MAAM,QAC5D,SAAS,EAAE,QAAQ;AAGvB,QAAI,2BAAyB,EAAE,UAAU;AACvC;AACA,aAAO;;AAGT,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,2BAAuB,qBAAa,iBAAiB,EAAE,OAAO;AAC9D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,4BAAwB,eAAe,MAAM,UAAU,MAAM;AAC7D,QAAI,mBAAmB;AACrB;AACA,aAAO;;AAEP,YAAM,IAAI,MACN,0DACiB,EAAE,SAAS;;;AAIpC,SAAO,CAAC,YAAY,aAAa,QAAQ,uBAAW;;;;ACvFtD;;;;;;;;;;;;;;;;AAqBA,MAAM,wBAAwB;AAEvB,kBACH,yBAAyB,KAAK;;;ACxBlC;;;;;;;;;;;;;;;;AAuBA,IAAI;AAIJ;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,cAAY,SAAQ,WAAW,OAAO,GAAG,OAAO,OAAO,GAAG;AAG1D,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,mBAAiB,OAAO,IAAI,OAAK,SAAQ,UAAU,IAAI,EAAE,QAAQ;AACjE,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAC9D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,eAAe,SAAS,QAAQ,SAAS,IAAI,QAAQ;AAE9D,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb;EACA,YAAY;;;;ACzDd;;;;;;;;;;;;;;;;AAsBM,kBAAmB;AAEvB,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACnCd;;;;;;;;;;;;;;;;AAwBA,IAAI;AAIJ;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAiB;IAC7D;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAIJ,SAAO,QAAQ,mBAAS,SAAS;AAGjC,+BAA6B,kBAAkB,OAAO,EAAE,OAAO,MAAM;AAErE,mBAAiB;AACjB,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,QAAI,KAAK,OAAO;AACd,mBAAa;;;AAGjB,mBAAiB,iBAAgB,OAAO,EAAE,OAAO,MAAM;AACvD,YAAU;IACR,QAAQ,OAAO,EAAE;IACjB,OAAO;IACP,OAAO,OAAO,EAAE;;AAGlB,MAAI;AACF,mBAAe,SAAS,CAAC,QAAQ;AACjC,WAAO,QAAQ;AACf,WAAO;;AAGT,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,gBACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,OAAO,WAC5D,KAAK;AACT,SAAO;;AAGT;AACE,mBAAiB,IAAI,MAAM,QAAQ;AACnC,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,QAAQ,KAAK;;AAE7B,SAAO;;AAGT;AAEE,mBAA2B;AAC3B,kBAA0B;AAC1B,eAAa,GAAG,IAAI,MAAM,QAAQ,EAAE;AAClC,QAAI,MAAM,OAAO;AACf,eAAS,KAAK,MAAM;;AAEtB,QAAI,MAAM,KAAK,QAAQ;AACrB,cAAQ,KAAK,KAAK;;;AAGtB,eAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,oBAAgB;AAChB,iBAAa,GAAG,IAAI,QAAQ,QAAQ,EAAE;AACpC,UAAI,QAAQ,MAAM,KACb,eAAc,MAAM,QAAQ,aAAa,QAAQ;AACpD,oBAAY;;;AAGhB,YAAQ,aAAa;;AAEvB,SAAO,CAAC,UAAU;;AAGb,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACrHb;;;;;;;;;;;;;;;;AA6BM,iCACF;AAMF,iBAAe,EAAE;AACjB,gBAAc,EAAE,MAAM;AAEtB,uBAAqB,aAAK,eAAe,MAAM;AAC/C,aAAW;AACX,uBAAqB,qBAAa,mBAAmB,MAAM;AAC3D,oBAAkB;AAClB,2BAAyB;AACzB,MAAI,gBAAgB;AAClB,qBAA2B,IAAI,MAAM;AACrC,iBAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,eAAS,KAAK,OAAO,aAAa;;AAGpC,WAAO,qBAAa,iBAAiB,KAAK,QAAQ;AAClD,kBACI,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,eAAe;AAEzD,gBAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,yBAAqB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAC/D,QAAI,iBAAiB;AACnB,2BAAqB;;;AAIzB,SAAO,CAAC,YAAY,aAAa,cAAc,MAAM;;;;AC7DvD;;;;;;;;;;;;;;;;AAwBA,IAAI;AAIJ;AACE,cAAW,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACrD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ;AACf,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,sBACrB,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,mBAAiB,MAAM,MAAM,MAAM,GAAG;AACtC,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,oBAAkB,aAAK,cAAc,IAAI;AACzC,oBAAkB,MAAM,MAAM,KAAK;AACnC,YAAS,SAAS,SAAS,MAAM,QAAQ,WAAW,WAAW;AAE/D,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;AC/Eb;;;;;;;;;;;;;;;;AAqBA,IAAI;AAMJ;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,mBAAiB,SAAS;AAE1B,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,MAAI,SAAS,kBAAkB,KAAK,SAAS,mBAAmB;AAC9D,UAAM,IAAI,MACN,0EACQ,SAAS,mBAAmB,SAAS;;AAGnD,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,cAAc,aAAa,UACjE;AACJ,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Fd;;;;;;;;;;;;;;;;AAqBM,kBAAkB;AAKtB,SAAO,QAAQ,SAAS;AACxB,SAAO,KAAK;AACZ,SAAO,SAAS;AAEhB,gBAAc,aAAK,cAAc,EAAE;AACnC,iBAAe,aAAK,uBAAuB,OAAO;AAElD,eAAK,OACD,UAAU,aAAK,cAAc,SAC7B,MAAM,cAAc,sBAAsB,EAAE;AAGhD,SAAO,CAAC,QAAQ,EAAE,QAAQ,OAAO,QAAQ,OAAO,EAAE;;AAG7C,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;;;;AC5Cd;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,GAAG,KAAK;AACf,SAAO,YAAY,cAAc;AAEjC,MAAI,EAAE,UAAU,aAAa,EAAE,UAAU;AACvC,UAAM,IAAI,MACN;;AAGN,gBAAc,EAAE,MAAM;AACtB,gBAAc,EAAE,MAAM;AAEtB,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AACtE,sBAAoB,aAAa,EAAE,MAAM,QAAQ,KAAK,EAAE,MAAM,QAAQ;AAEtE,qBAAmB,EAAE,MAAM,MAAM,GAAG;AACpC,qBAAmB,EAAE,MAAM,MAAM,GAAG;AAEpC,oBAAkB,aAAK,cAAc;AACrC,oBAAkB,aAAK,cAAc;AAErC,8BACI,cAAc,aAAa,cAAc,KAAK,cAAc;AAEhE,eAAK,OACD,SAAS,KAAK,SAAS,KAAK,qBAC5B,MAAM,uJAEsB,oBAAoB;AAEpD,4BACI,YAAY,YAAY,EAAE,MAAM,MAAM,GAAG,MAAM,EAAE,MAAM,MAAM,GAAG;AACpE,mBAAiB,kBAAkB,OAAO,CAAC,aAAa;AAExD,eAAK,OACD,gBAAgB,aAChB,MAAM,kCAAkC,qBACjC,uCAAuC,EAAE,aACzC,EAAE,wBAAwB,6BACV;AAE3B,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AACvD,mBAAiB,aAAa,CAAC,WAAW,aAAa,eACzB,CAAC,WAAW,aAAa;AAGvD,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,mBAAS,OAAO,CAAC,OAAO;AAC7D,cAAY,SAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,mBAAS,OAAO,CAAC,OAAO;AAE7D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,kBAAgB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACtD,mBAAiB,aAAa,IAAI,MAAM,KAAK,IAAI,MAAM;AACvD,mBAAiB,KAAK,IAAI,WAAW;AAErC,cAAY,SAAQ,WAAW,CAAC,UAAU,SAAS,WAAW,IAAI;AAClE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAC7D,sBAAoB,IAAI,WAAW,IAAI,WAAW,IAAI,OAAO;AAE7D,kBACI,OAAO,aAAa,IAAI,MAAM,QAAQ,OAAO,aAC7C,IAAI,MAAM,QAAQ,YAAY,YAAY;AAE9C,MAAI,QAAQ;AACZ,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Hd;;;;;;;;;;;;;;;;AAsBM,eACF;AAEF,SAAO,SAAS,IAAI,QAAQ,QAAQ,qBAAW;AAC/C,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,iBAAe,SAAQ,mBAAmB;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,IAAI;AACZ,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACpCd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEJ;AACE,aAAW,SAAQ,KAAK,MAAM,aAAa,MAAiB;IAC1D;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,cAAc,gBAAgB;AACrC,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WAAS,KAAK,cAAc,cAAc;AAC1C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACnDd;;;;;;;;;;;;;;;;AAqBA,gBACI;AACF,SAAO,QAAQ,qBAAW;AAE1B,eAAa,aAAK,eAAe,KAAK,MAAM,MAAM,OAAO,GAAG,OAAO;AAEnE,mBAAiB,qBAAa,gBAAgB,OAAO,IAAI,OAAK,EAAE,QAAQ;AAExE,cAAY,SAAQ,WAAW,UAAU,OAAO,GAAG;AAEnD,MAAI,aAAK,cAAc,cAAc;AACnC,WAAO;;AAIT,kBAAgB,OAAO,OAAO,OAAK,aAAK,cAAc,EAAE,SAAS;AACjE,MAAI,QAAQ,WAAW;AACrB,WAAO,QAAQ;;AAGjB,iBAAe,QAAQ,IAAI,OAAK,EAAE;AAClC,uBAAa,uBAAuB,QAAQ;AAE5C,mBAAiB,aAAK,cAAc,QAAQ,GAAG,MAAM,MAAM,GAAG;AAC9D,qBAAmB;AACnB,oBAAkB,QAAQ,IAAI;AAC5B,qBAAiB,aAAK,cAAc,MAAM,MAAM,MAAM;AACtD,oBAAgB;AAChB,WAAO;;AAET,iBAAe,QAAQ,IAAI,WAAS,SAAQ,mBAAmB;AAC/D,kBAAgB,SAAQ,mBAAmB;AAC3C,eAAa,GAAG,IAAI,UAAU;AAC5B,oBAAgB,IAAI;AACpB,iBAAa,GAAG,IAAI,OAAO,QAAQ;AACjC,uBAAiB,UAAU;AAC3B,uBAAiB,IAAI;AACrB,mBAAa,OAAO,GAAG,SAAS,UAAU,WAAW;AACrD,cAAQ,IAAI,MAAM;AAClB,mBAAa;;;AAGjB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACrEd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAQJ;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,iBAAiB,cAAc;AAC/D,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB,OAAO;AAEjC,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,oDACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,aACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACrGd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAUJ;AACE,4BAA0B,SAAQ,KAAK,MAAM,qBAAqB,MAAM;IACtE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,IAAI,UAAU;AACrB,SAAO,SAAS,WAAK,YAAY,iBAAiB,cAAc;AAEhE,oBAAkB;AAElB,sBAAoB,qBAAa,wBAAwB;AACzD,mBAAiB,qBAAa,kBAC1B,YAAY,OAAO,OAA2C,SAC9D,WAAW,MAAK,iBAAiB,OAAuB;AAC5D,SACE,WACA,cACA,aACA,YACA,UACA,SACA,aACA,WACA,UACA,cACA,eACE;AAEJ,iBAAe,eAAe,IAAI,SAAS,QAAQ;AACnD,kBAAgB,cAAc,IAAI,SAAS,QAAQ;AAEnD,yBAAuB,SAAS,eAAe;AAC/C,oBAAkB,aAAK,eAAe,SAAS;AAC/C,oBAAkB,aAAK,eAAe,GAAG;AACzC,gCAA8B,aAAK,eAAe,OAAO;AACzD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AACtD,uBAAqB,UAAU;AAC/B,qBAAmB,iBAAiB,UAAU,KAAK,UAAU;AAC7D,qBAAmB,iBAAiB,UAAU,KAAK;AACnD,yBAAuB,iBAAiB,IAAI,UAAU;AAEtD,cAAY,SAAQ,WAAW,SAAS,SAAS;AACjD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eAAa,SAAQ,UAAU,IAAI,GAAG,QAAQ;AAC9C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,0BACI,MAAM,UAAU,WAAW,cAAc,aAAa,UAAU,SAChE,YAAY,WAAW,UAAU,aAAa,cAAc,aAC5D,QAAQ,SAAS,OAAO,OAAO,OAAO,cAAc,YACpD,YAAY,gBAAgB,cAAc,YAAY,YACtD,gBAAgB;AACpB,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Hd;;;;;;;;;;;;;;;;AAqBO,MAAM,YAA0B,wBAAwB;;;ACrB/D;;;;;;;;;;;;;;;;AAwBA,IAAK;AAAL,AAAA;AACE,uBAAA,qBAAA,cAAA,KAAA;AACA,uBAAA,qBAAA,aAAA,KAAA;GAFG,uBAAA,uBAAmB;AAKxB;AAKA;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAe;IACnE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,QAAQ,oBAAoB,YAAY;AAC/C,SAAO,eAAO,OAAO,UAAU;AAE/B,mBAAiB,MAAM,MAAM;AAE7B,kCAAgC;AAChC,mBAAiB,CAAC,UAAU,YAAY,WAAW,OAAM,MAAM;AAE/D,mBAAiB,SAAQ,UAAU,IAAI,OAAM;AAC7C;AACA,MAAI,OAAM,UAAU;AAClB,iBAAa,MAAK,CAAC,mBAAS,QAAQ,CAAC,GAAG,SAAQ,OAAO,CAAC,OAAO;AAC/D,iBAAa,SAAQ,UAAU,IAAI,WAAW;;AAGhD,mBAAiB,WAAW;AAC5B,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,cAAY,SAAQ,WAAW,UAAU;AACzC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,2BAAyB,IAAI,WAAW,IAAI,WAAW,OAAM,OAAO;AAEpE,oBACI,UAAU,SAAS,UAAU,UAAU,kBAAkB,YACzD,WACA,oBAAoB,SACpB,oBAAoB;AAExB,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AChGd;;;;;;;;;;;;;;;;AAyBA,IAAI;AAGJ;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,MAAM,WAAW,qBAAW;AACnC,gBAAc,EAAE,MAAM;AAEtB,eAAK,OAAO,EAAE,UAAU,aAAa,EAAE,UAAU,SAC/C,MAAM,2BAA2B,EAAE;AAErC,sBAAoB,qBAAa,mBAAmB,CAAC,OAAO;AAC5D,kBAAgB;AAChB,MAAI,gBAAgB;AAClB,gBAAY,WAAU,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,MAAM,cAAc;;AAElE,uBAAqB,qBAAa,iBAAiB,GAAG,OAAO;AAC7D,uBAAa,2BAA2B,UAAU,CAAC,eAAe;AAElE,sBAAoB,SAAQ,WAAW,UAAU,OAAO,UAAU;AAClE,mBAAiB,UAAU,MAAM;AACjC,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,wBAAsB,SAAQ,UAAU,IAAI,YAAY,QAAQ;AAChE,aAAW,aAAa,YAAY,IAAI,GAAG,WAAU,IAAI,GAAG,UACjD,eAAe,SAAS,EAAE;AAGrC,YAAU;AACV,MAAI,gBAAgB;AAClB,4BAAwB,qBAAa,uBAAuB;AAC5D,UAAM,WACJ,CAAC,QAAQ,CAAC,GAAG,cAAc,OAAO,CAAC,MAAM,kBAAkB;AAC7D,aAAQ,YAAY,UAAU;AAC9B,aAAQ,YAAY,YAAY;;AAElC,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACjFd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAKJ;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,WAAW,cAAc;AAEhC,eAAK,OACD,YAAY,GACZ,MAAM,sDAAsD;AAEhE,oBAAkB,EAAE,MAAM;AAC1B,sBAAqB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AACnE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAClE,qBAAoB,eAAe,SAAU,EAAE,MAAM,KAAK,EAAE,MAAM;AAElE,uBAAqB,cAAc;AACnC,sBAAoB,aAAa;AACjC,sBAAoB,aAAc,aAAY;AAE9C,sBAAqB,eAAe,SAChC,CAAC,WAAW,cAAc,aAAa,eACvC,CAAC,WAAW,aAAa,cAAc;AAE3C,cAAY,SAAQ,WAAW,aAAa;AAE5C,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAClB,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAEhE,2BAAyB,IAAI,WAAW,IAAI,WAAW,aAAa;AACpE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,cAAc;AAEpE,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,uBAAqB,eAAe,SAAS,IAAI;AACjD,mBACI,KAAK,WAAW,cAAc,eAAe,EAAE,MAAM,SAAS,GAC9D,kBAAkB,iBAAiB,YAAY,QAAQ;AAE3D,SAAO;;AAGF,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC1Fd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAQJ;AACE,wBACI,SAAQ,KAAK,MAAM,uBAAuB,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,SAAO,GAAG,UAAU;AACpB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,SAAO,SAAS,WAAW,WAAK,mBAAmB;AAEnD,qBAAmB,aAAa,OAAO,CAAC,GAAG,KAAK;AAEhD,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAClD,YAA0C,MAAK,iBAChD;AAEJ,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAChC,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AAEzD,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,mEACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,UAAU,cACnD,aAAa,QAAQ,UAAU,WAAW,SAAS,WACnD,gBAAgB,eAAe,cAAc,aAAa,eAC1D,gBAAgB;AACpB,SAAO;;AAGF,oCAAkD;EACvD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC5Gd;;;;;;;;;;;;;;;;AAqBA,MAAM,yBAAwB;AACvB,kBACH,yBAAyB,KAAK;;;ACvBlC;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,oBACH,yBAAyB,OAAO,wBAAuB;;;ACtB3D;;;;;;;;;;;;;;;;AAoBO,MAAM,YAA0B,wBAAwB;;;ACpB/D;;;;;;;;;;;;;;;;AAsBA,cAAc;AACZ,SAAO,QAAQ,OAAO,OAAO,QAAQ,qBAAW;AAChD,cAAY,SAAQ,WAAW,OAAO;AACtC,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACjCd;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,sBAAoB,SAAQ,KAAK,MAAM,eAAe,MAAiB;IACrE;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAEJ,SAAO,QAAQ,qBAAW;AAC1B,SAAO,iBAAS;AAEhB,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,oBACI,SAAS,OAAO,aAAa,YAAY,aAAa;AAC1D,SAAO;;AAGF,4BAA0C;EAC/C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACxDb;;;;;;;;;;;;;;;;AAqBA,MAAM,yBAAwB;AACvB,uBACH,yBAAyB,UAAU;;;ACvBvC;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,kBAAgB,SAAQ,KAAK,MACzB,gBAAgB,MAChB,CAAC,UAAU,UAAU,UAAU,UAAU,UAAU,UAAU;;AAGnE;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,mBAAmB;AAC1B,SAAO,GAAG,MAAM,UAAU,QAAQ,SAAS;AAC3C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,iBAAe,SAAQ,UAAU,IAAI,KAAK,QAAQ;AAClD,qBAAmB,SAAQ,UAAU,IAAI,SAAS,QAAQ;AAC1D,mBAAiB,UAAU,OAAO,SAAQ,UAAU,IAAI,OAAO,QAAQ,KAAK;AAC5E,kBAAgB,SAAS,OAAO,SAAQ,UAAU,IAAI,MAAM,QAAQ,KAAK;AAEzE,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAE1C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBACI,KAAK,QAAQ,YAAY,UAAU,SAAS,iBAAiB;AACjE,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Dd;;;;;;;;;;;;;;;;AAuBA,IAAI;AASJ;AACE,oBAAkB,SAAQ,KAAK,MAAM,aAAa,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK;AAET,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,sDACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,2BAA2B,SAAS,wDACI;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,yDACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,kBACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,0BAAwC;EAC7C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Id;;;;;;;;;;;;;;;;AAuBA,IAAI;AASJ;AACE,6BACI,SAAQ,KAAK,MAAM,sBAAsB,MAAiB;IACxD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,GAAG,QAAQ,MAAM,0BAA0B;AAClD,SAAO,SAAS,WAAK,WAAW,YAAY,iBAAiB,cACzD;AAEJ,mBAAiB,qBAAa,kBACzB,EAAe,OAAQ,OAAoB,OAAO,SAAS,WAC5D,MAAK,iBAAiB;AAE1B,0BACI,kBAAkB;AACtB,MAAI,mBAAmB;AACrB,UAAM,IAAI,MACN,GAAG;;AAIT,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,yBAAuB,SAAS;AAEhC,eAAa;AACb,MAAI,QAAQ;AACV,qBAAiB,SAAQ,UAAU,IAAI,KAAK;AAC5C,QAAI,SAAS,MAAM,WAAW;AAC5B,YAAM,IAAI,MACN,+DACQ,SAAS,MAAM;;AAE7B,QAAI,SAAS,MAAM,OAAO;AACxB,YAAM,IAAI,MACN,oCAAoC,SAAS,wDACL;;AAE9C,aAAS,SAAS;;AAGpB,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,oBAAkB,SAAS,QAAQ,SAAS,SAAS,IAAI;AACzD,oBAAkB,SAAS;AAC3B,mBAAiB,SAAS;AAC1B,kBAAgB,SAAS;AAEzB,MAAI,eAAe;AACjB,UAAM,IAAI,MACN,kEACG;;AAGT,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,mCAAiC,0BAA0B,OACvD,IACA,SAAQ,UAAU,IAAI,uBAAuB,QAAQ;AACzD,2BACI,KAAK,WAAW,UAAU,SAAS,UAAU,cAAc,aAC3D,QAAQ,QAAQ,UAAU,WAAW,SAAS,WAAW,gBACzD,eAAe,cAAc,aAAa,eAAe,gBACzD,iBAAiB,0BAA0B;AAC/C,SAAO;;AAGF,mCAAiD;EACtD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Id;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,iBAAe,SAAQ,KAAK,MAAM,UAAU,MAAe;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,mBAAS,UAAU;AAC1B,SAAO,QAAQ,WAAW;AAE1B,uDACI,uBAAY,mBAAmB,QAAkB;AAErD,cAAY,SAAQ,WAAW,aAAa,OAAO;AACnD,MAAI,cAAc;AAChB,WAAO;;AAGT,uBAAqB,QAAQ;AAC7B,oBAAkB,aAAa,aAAa,SAAS;AAErD,gBAAc,SAAQ,UAAU,IAAI,OAAO;AAC3C,cAAY,MAAM;AAClB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,eACI,KAAK,SAAS,OAAO,QAAQ,WAAW,WAAW,WAAW,WAC9D,cAAc;AAElB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC5Ed;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAe;IACvD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,GAAG,WAAW;AACrB,SAAO,QAAQ;AAEf,mBAAiB,EAAE,MAAM;AACzB,WAAS,QAAQ,aAAK,cAAc,QAAQ;AAC5C,sBAAoB,EAAE,MAAM,SAAS;AAErC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,EAAE,WAAW;AAClC,WAAO;;AAGT,gBAAc,SAAQ,UAAU,IAAI,EAAE;AACtC,cAAY,MAAM;AAElB,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wBACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,EAAE,QAAQ;AAChE,0BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AAEjE,aACI,KAAK,SAAS,EAAE,QAAQ,eAAe,aAAa,WAAW,MAC/D,iBAAiB;AAGrB,qBAAmB,aAAK,eAAe,MAAM,EAAE,OAAO;AACtD,oBAAkB,qBAAa,aAAa,yBACxC,GAAa,SAAmB;AAEpC,MAAI,QAAQ,UAAU;AACtB,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACvFd;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,sBACH,yBAAyB,SAAS,wBAAuB;;;ACtB7D;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,2BACH,yBAAyB,cAAc,wBAAuB;;;ACtBlE;;;;;;;;;;;;;;;;AAmBA,MAAM,yBAAwB;AACvB,mBACH,yBAAyB,MAAM,wBAAuB;;;ACrB1D;;;;;;;;;;;;;;;;AAoBA,MAAM,yBAAwB;AACvB,wBACH,yBAAyB,WAAW,wBAAuB;;;ACtB/D;;;;;;;;;;;;;;;;AAmBO,MAAM,YAA0B,wBAAwB;;;ACnB/D;;;;;;;;;;;;;;;;AAmBA,MAAM,yBAAwB;AACvB,yBACH,yBAAyB,YAAY,wBAAuB;;;ACrBhE;;;;;;;;;;;;;;;;AAwBA,IAAI;AAEJ;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,wBAAwB,YAAY;AAC3C,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,YAAQ;AACR,cAAU;;AAGZ,oBAAkB,MAAM,MAAM;AAC9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Ed;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,sBACH,yBAAyB,SAAS;;;ACrBtC;;;;;;;;;;;;;;;;AAqBA,IAAI;AAOJ;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACzD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,OAAO,qBAAW;AAEjC,YAAU,OAAO;AACjB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAE5C,SAAO,YAAY,SAAS,WAAK,mBAAmB;AACpD,mBAAiB,qBAAa,kBAC1B,EAAE,OAAO,YAAY,SAAS,GAAmB,MAAK;AAE1D,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,iBAAe,SAAS,QAAQ;AAChC,mBAAiB,SAAS,QAAQ;AAClC,oBAAkB,SAAS,QAAQ;AACnC,kBAAgB,SAAS,QAAQ;AACjC,yBAAuB,SAAS;AAChC,wBAAsB,SAAS;AAC/B,uBAAqB,SAAS;AAC9B,sBAAoB,SAAS;AAC7B,wBAAsB,SAAS;AAC/B,yBAAuB,SAAS;AAEhC,MAAI,SAAS,eAAe;AAC1B,UAAM,IAAI,MACN,6CACG,SAAS;;AAGlB,cAAY,SAAQ,WAAW,SAAS,UAAU;AAClD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,cACI,KAAK,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,EAAE,MAAM,IAAI,cAAc,aACvD,QAAQ,UAAU,WAAW,SAAS,gBAAgB,eACtD,cAAc,aAAa,eAAe,gBAAgB;AAC9D,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Fd;;;;;;;;;;;;;;;;AAuBA,IAAI;AAEJ;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;;;AAId,oBAAkB,MAAM,MAAM;AAE9B,uBAAa,2BAA2B,OAAO,MAAM;AACrD,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AClFd;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,sBACH,yBAAyB,SAAS;;;ACrBtC;;;;;;;;;;;;;;;;AAqBA,MAAM,0BAAwB;AACvB,uBACH,yBAAyB,UAAU;;;ACvBvC;;;;;;;;;;;;;;;;AAmBO,MAAM,eAA6B,wBAAwB;;;ACnBlE;;;;;;;;;;;;;;;;AA8BM,2BACF;AACF,iBAAe,IAAI,WAAW,SAAQ,KAAK,OAAO,QAAQ,WAAW;AACrE,2BAAyB,OAAO;AAChC,uBAAqB,OAAO;AAC5B,0BAAwB,OAAO;AAC/B,wBAAsB,OAAO;AAE7B,WAAQ,KAAK,MAAM;AACnB,SAAO,CAAC,kBAAkB,cAAc,iBAAiB;;;;ACvC3D;;;;;;;;;;;;;;;;AAuBA,IAAI;AAIJ;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,kBAAkB;AACtD,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBACI,UAAS,SAAS,UAAU,eAAe,cAAc;AAE7D,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AACnB,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,SAAO;;AAGF,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX;;;;ACxEF;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,sBAChD;AACJ,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAG/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAEhD,6BAA2B,SAAQ,WAAW,IAAI,SAAS;AAE3D,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Ed;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,cAAW,SAAQ,KAAK,MACpB,qBACA,UACA;IACE;IACA;IACA;IACA;IACA;IACA;;;AAIR;AAKE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,cAAc,eAAe,gBAAgB,gBAAgB;AACpE,SAAO,OAAO,UAAU;AAExB,kBAAgB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AACpD,mBAAiB,SAAQ,UAAU,IAAI,OAAO,QAAQ;AAEtD,oBAAkB,UACd,SAAS,UAAU,eAAe,cAAc,gBAChD;AAEJ,SAAO,kBAAkB,cAAc,iBAAiB,iBACpD,kBAAkB,UAAS;AAI/B,WAAQ,KAAK,MAAM;AAEnB,gCACI,SAAQ,WAAW,CAAC,eAAe,SAAS;AAChD,+BACI,SAAQ,WAAW,CAAC,eAAe,WAAW;AAElD,SAAO,CAAC,uBAAuB;;AAG1B,kCAAgD;EACrD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC7Ed;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,uBACH,yBAAyB,UAAU,yBAAuB;;;ACrB9D;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,eAAa,SAAQ,KAAK,MAAM,QAAQ,MAAiB;IACvD;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,WAAW;AAClB,SAAO,OAAO,SAAS,YAAY;AAEnC,cAAY,SAAQ,WAAW,CAAC,GAAG,QAAQ,OAAO,QAAQ;AAC1D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,aAAW,WAAW,OAAO,SAAS,UAAU;AAEhD,SAAO;;AAGF,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACxDd;;;;;;;;;;;;;;;;AAqBA,kBAAkB;AAChB,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;;;;AChCd;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,SAAS,IAAI,mBAAS,QAAQ,UAAU,kBAAkB;AAEjE,mBAAiB,SAAS,IACtB,UAAU,EAAE,KAAqB,EAAE,MAAM,KAAK,EAAE;AACpD,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE3D,0BAAwB,SAAS,IAAI,cAAY,SAAS;AAC1D,2BAAyB,SAAS,IAAI,cAAY,SAAS;AAC3D,2BACI,IAAI,WAAW,IAAI,WAAW,iBAAiB;AACnD,4BACI,IAAI,WAAW,IAAI,WAAW,kBAAkB;AAEpD,YACI,KAAK,aAAa,EAAE,MAAM,QAAQ,SAAS,EAAE,QAAQ,kBACrD,mBAAmB,eAAe;AACtC,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACrEb;;;;;;;;;;;;;;;;AAmBA,MAAM,0BAAwB;AACvB,kBACH,yBAAyB,KAAK;;;ACrBlC;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEJ;AACE,cAAY,SAAQ,KAAK,MAAM,OAAO,MAAiB;IACrD;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,GAAG,SAAS;AACnB,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,oBAAkB,SAAQ,UAAU,IAAI,MAAM,QAAQ;AAEtD,cAAY,SAAQ,WAAW,EAAE,OAAO;AACxC,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAU,KAAK,WAAW;AAC1B,SAAO;;AAGF,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,MAAM,aAA2B,wBAAwB;;;ACnBhE;;;;;;;;;;;;;;;;AAmBO,MAAM,cAA4B,wBAAwB;;;ACnBjE;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,uBAAqB,SAAQ,KAAK,MAAM,gBAAgB,MAAe;IACrE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAKE,SAAO,mBAAS,QAAQ,SAAS;AAEjC,SAAO,UAAU;AACjB,SAAO,cAAc,QAAQ;AAC7B,gCAA8B;AAE9B,oDAAkD,OAAO;AACzD,mBAAiB,CAAC,OAAO,WAAW,UAAU;AAE9C,cAAY,SAAQ,UAAU,IAAI,OAAO;AACzC;AACA,MAAI,MAAM,UAAU;AAClB,iBACI,MAAK,CAAC,mBAAS,QAAQ,CAAC,GAAG,SAAS,OAAO,CAAC,OAAO;AACvD,YAAQ,SAAQ,UAAU,IAAI,WAAW;;AAE3C,cAAY,MAAM;AAElB,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,aAAK,cAAc,OAAO,WAAW;AACvC,WAAO;;AAET,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,OAAO,WAAW,UAAU,aAAa,WAAW,UACzD,eAAe,IAAI,GAAG;AAE1B,MAAI,cAAc;AAChB,aAAQ,YAAY,WAAW;;AAGjC,SAAO;;AAGF,6BAA2C;EAChD,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACtFd;;;;;;;;;;;;;;;;AAwBA,IAAI;AAIJ;AACE,gBAAc,SAAQ,KAAK,MAAM,SAAS,MAAM;IAC9C;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAGJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,SAAO,QAAQ;AAEf,eAAa,aAAK,eAAe,MAAM,EAAE;AAEzC,MAAI,EAAE,MAAM,WAAW;AACrB,WAAO,SAAS,CAAC,QAAQ,CAAC,IAAI;;AAGhC,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,oBAAkB,IAAI,WAAW,IAAI,WAAW,MAAM;AACtD,wBAAsB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAE7D,cACI,KAAK,WAAW,KAAK,QAAQ,eAAe,EAAE,MAAM,QAAQ;AAEhE,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,QAAQ;;AAGtD,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;ACrEb;;;;;;;;;;;;;;;;AAsBA,IAAI;AAKJ;AACE,eAAa,SAAQ,KAAK,MAAM,kBAAkB,MAAiB;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,iBAAS;AAChB,SAAO,SAAS,WAAW,UAAU;AAErC,cAAY,SAAQ,WAAW,OAAM,OAAO,OAAM;AAClD,kBAAgB,SAAQ,UAAU,IAAI,OAAM,QAAQ;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,wDAAsD,OAAM;AAE5D,6BACI,qBAAa,eAAe,QAAQ,aAAa;AAErD,sBAAoB,cAAc;AAClC,2BAAyB;AAEzB,qBAAmB,OAAO,cAAc,WACpC,CAAC,WAAW,WAAW,WAAW,cAAc,IAAI,oBACpD,CAAC,GAAG,WAAW;AACnB,oBAAkB,IAAI,WAAW,IAAI,WAAW,YAAY;AAE5D,aACI,SAAS,OAAO,aAAa,YAAY,aAAa,SAAS,SAC/D,SAAS,WAAW,WAAW,QAAQ;AAC3C,SAAO;;AAGF,+BAA6C;EAClD,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;AC/Eb;;;;;;;;;;;;;;;;AAmBO,MAAM,cAA4B,wBAAwB;;;ACnBjE;;;;;;;;;;;;;;;;AAuBA,IAAI;AAKJ;AACE,kBAAgB,SAAQ,KAAK,MAAM,WAAW,MAAe;IAC3D;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAIE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,SAAS,WAAW;AAC3B,SAAO,SAAS;AAEhB,cAAY,SAAQ,WAAW,OAAO,QAAQ;AAC9C,MAAI,aAAK,cAAc,WAAW;AAChC,WAAO;;AAGT,SAAO,WAAW,YAAY,WAAW,SAAS,cAC9C,wBAAa,gBAAgB,SAAS,SAAS;AAEnD,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,sBAAoB,SAAQ,UAAU,IAAI,QAAQ;AAClD,oBAAkB,YAAY;AAE9B,uBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,gBACI,WAAW,WAAW,SAAS,QAAQ,QAAQ,WAAW,YAC1D,WAAW,cAAc,YAAY;AAEzC,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC9Ed;;;;;;;;;;;;;;;;AAqBA,IAAI;AAIJ;AACE,eAAa,SAAQ,KAAK,MAAM,UAAU,MAAM;IAC9C;IACA;IACA;IACA;IACA;;;AAIJ;AACE,SAAO,QAAQ,qBAAW;AAC1B,SAAO,WAAW,GAAG,KAAK;AAE1B,sBAAoB,SAAQ,UAAU,IAAI,UAAU,QAAQ;AAC5D,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,gBAAc,UAAU,MAAM;AAC9B,gBAAc,EAAE,MAAM;AAEtB,iBAAe,UAAU,KAAK,QAAQ,KAAK,UAAU,IACjD,IACA,aAAK,cAAc,EAAE,MAAM,MAAM;AAErC,aAAW,aAAa,KAAK,KAAK,QAAQ;AAC1C,SAAO;;AAGF,uBAAqC;EAC1C,YAAY;EACZ,aAAa;EACb,YAAY;EACZ,WAAW;;;;AC5Db;;;;;;;;;;;;;;;;AAqBA,IAAI;AAEJ;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB,CAAC,UAAU;;AAGrE;AAEE,SAAO,mBAAS,SAAS,MAAM;AAC/B,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAGhD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK;AACd,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AC/Cd;;;;;;;;;;;;;;;;AAmBO,MAAM,YAA0B,wBAAwB;;;ACnB/D;;;;;;;;;;;;;;;;AAsBM,eACF;AACF,SAAO,SAAS,IAAI,QAAQ,OAAO,OAAO,qBAAW;AAErD,0BAAwB,mBAAW,iBAAiB,GAAG,OAAO;AAE9D,sBAAoB,mBAAW,iBAAiB,EAAE,OAAO,QAAQ;AACjE,gBAAc,SAAQ,mBAAmB;AACzC,cAAY,SAAQ,WAAW,OAAO,EAAE;AACxC,kBAAgB,SAAQ,mBAAmB;AAC3C,mBAAiB,aAAK,eAAe,EAAE;AACvC,MAAI;AACF,uBAAmB,mBAAW,kBAAkB,QAAQ;AACxD,YAAQ,IACJ,MAAM,SAAS,YAAY,aAAa,aAAK,cAAc;AAC/D,WAAO;;AAET,eAAa,EAAE,MAAM;AACrB,MAAI,SAAS;AACX,YACI,OAAO,SAAS,IAAI,SAAS,QAC7B;aACK,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SACjC,QAAoC;aAC/B,SAAS;AAClB,YACI,OAAO,SAAS,IAAI,SAAS,IAAI,SAAS,IAAI,SAC9C,QACA;;AAEJ,qBAAiB,OAAO,GAAG,SAAS,QAAQ;;AAE9C,SAAO;;AAGT;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,oBAAgB,IAAI,UAAU;AAC9B,YAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,iBAAa,KAAK;;;AAItB;AAIE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,sBAAgB,IAAI,WAAW,IAAI,WAAW;AAC9C,cAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,mBAAa,KAAK;;;;AAKxB;AAKE,kBAAgB;AAChB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,iBAAe,MAAM;AACrB,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,eAAa,SAAS,KAAK;AAC3B,iBAAe,MAAM;AAErB,eAAa,QAAQ,IAAI,MAAM;AAC7B,iBAAa,QAAQ,IAAI,MAAM;AAC7B,mBAAa,QAAQ,IAAI,MAAM;AAC7B,wBAAgB,IAAI,WAAW,IAAI,WAAW,IAAI,WAAW;AAC7D,gBAAQ,IAAI,MAAM,SAAS,SAAS,UAAU,KAAK,KAAK;AACxD,qBAAa,KAAK;;;;;AAM1B;AAGE,iBAAe,OAAO,MAAM,MAAM,OAAO;AACzC,eAAa,OAAO,MAAM,OAAO,MAAM,OAAO;AAC9C,eAAa,GAAG,IAAI,OAAO,MAAM,EAAE;AACjC,gBAAY,OAAO,WAAW;AAC9B,iBAAa,IAAI,IAAI,YAAY,MAAM,MAAM;AAC7C,YAAQ,KAAK,KAAK,IAAI,GAAG;;;AAItB,oBAAkC;EACvC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACrId;;;;;;;;;;;;;;;;AAqBA,IAAI;AAGJ;AACE,cAAW,SAAQ,KAAK,MAAM,SAAS,MAAiB;IACtD;IACA;IACA;IACA;;;AAIJ;AAGE,SAAO,mBAAS,SAAS,SAAS,QAAQ,QAAQ;AAClD,cAAY,SAAQ,UAAU,IAAI,OAAO,QAAQ;AACjD,cAAY,SAAQ,WAAW,OAAO,OAAO,OAAO;AACpD,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,mBAAiB,OAAO,MAAM;AAC9B,gBAAc,aAAK,cAAc,OAAO,SAAS;AAGjD,MAAI,aAAK,cAAc,IAAI,WAAW;AACpC,WAAO;;AAGT,YAAS,KAAK,OAAO,UAAU;AAC/B,SAAO;;AAGF,sBAAoC;EACzC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACzDd;;;;;;;;;;;;;;;;AAwBM,eACF;AACF,SAAO,QAAQ,OAAO,qBAAW;AACjC,SAAO,KAAK;AACZ,SAAO,iBAAiB,QAAQ;AAEhC,gBAAc,aAAK,eAAe,MAAM,EAAE,OAAO;AAEjD,qBAAmB,qBAAa,iBAAiB,GAAG,iBAAiB;AACrE,gBAAc,IAAI,MAAM,EAAE,MAAM,QAAQ,KAAK;AAC7C,eAAa,EAAE,MAAM;AACrB,SAAO,WAAW,IAAI;AACpB,uBAAmB,CAAC,GAAG;AACvB,eAAW,SAAS;AACpB,mBACI,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,MAAM,aAAa;AAC1D,UAAM,UAAU;AAChB,WAAO;;;AAIJ,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;;;AChDd;;;;;;;;;;;;;;;;AAoBO,MAAM,aAA2B,wBAAwB;;;ACpBhE;;;;;;;;;;;;;;;;AAmBO,MAAM,eAA6B,wBAAwB;;;ACnBlE;;;;;;;;;;;;;;;;AAkBA,MAAM,0BAAwB;AACvB,gCACH,yBAAyB,mBAAmB;;;ACpBhD;;;;;;;;;;;;;;;;AAuBA,IAAI;AAMJ;AACE,qBAAmB,SAAQ,KAAK,MAAM,cAAc,MAAe;IACjE;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;;AAIE;AAKJ,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,KAAK;AAEZ,OAAK,OAAO,KAAK,WAAW;AAC5B,MAAI,WAAW;AACb,cAAU,IAAI,MAAM,MAAM;;AAG5B,SAAO,WAAW,SAAS,cAAc,aAAa,kBAAkB;AAExE,uBAAqB,qBAAa,WAAW,WAAW;AACxD,MAAI,aAAa,SAAS;AACxB,UAAM,IAAI,MAAM;;AAGlB,MAAI,iBAAiB,KAAK,gBAAgB;AACxC,UAAM,IAAI,MACN;;AAGN,MAAI,iBAAiB,KAAK,mBAAmB;AAC3C,UAAM,IAAI,MACN;;AAGN,8BAA4B,EAAE,MAAM,SAAS,MAAM;AAGnD,qBAAmB,qBAAa,WAAW,WAAW;AACtD,mBAAiB,EAAE,MAAM;AACzB,aAAW,QAAQ;AACjB,UAAM,QAAQ;AACd,QAAI,QAAQ;AACZ,aAAS,OAAO,MAAM,GAAG;;AAG3B,oBAAkB,SAAQ,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,WAAW;AAElE,SACE,wBACA,oBACA,8BAEE,qBAAa,WAAW,kBACpB,UAAU,OAAO,cAAc,qBAAqB,OAAO,KAC3D,SAAS,WAAW,SAAS;AACrC,UAAQ;AACR,QAAM;AACN,YAAU;AAEV,qBAAmB,qBAAa,WAAW,WAAW;AAEtD,aAAW,QAAQ;AACjB,QAAI,QAAQ,MAAM,QAAQ;AAC1B,YAAQ,QAAQ;;AAIlB,eAAa,qBAAa,WAAW,gBAAgB,OAAO,KAAK;AAEjE,mBAAiB,KAAK,OAAO,aAAa,WAAW,QAAQ,UAAU;AAEvE,qBAAmB,QAAQ,MAAM,OAAK,MAAM;AAC5C,MAAI;AACF,oBAAgB,MAAM,CAAC,QAAQ,CAAC,IAAI,OAAO,CAAC,OAAO,OAAO;AAC1D,WAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,UAAU,OAAO,CAAC,OAAO,WAAW;;AAGlE,cAAY,SAAQ,WAAW,UAAU;AACzC,MAAI,CAAC,SAAS,KAAK,UAAQ,SAAS;AAClC,gBAAY,SAAQ,UAAU,IAAI,UAAU,QAAQ;AACpD,0BAAsB,IAAI,WACtB,IAAI,WAAW,aAAK,eAAe,UAAU,QAAQ;AACzD,uBAAmB,IAAI,WAAW,IAAI,WAAW,OAAO;AACxD,qBAAiB,IAAI,WAAW,IAAI,WAAW,KAAK;AACpD,yBAAqB,IAAI,WAAW,IAAI,WAAW,SAAS;AAE5D,6BAAyB,IAAI,WAAW,IAAI,WAAW,UAAU;AACjE,4BACI,IAAI,WAAW,IAAI,WAAW,aAAK,eAAe,WAAW;AACjE,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAEhD,qBACI,KAAK,eAAe,UAAU,MAAM,QAAQ,YAAY,UACxD,cAAc,kBAAkB,iBAAiB,SAAS,QAC1D;;AAGN,SAAO,SAAQ,CAAC,QAAQ,CAAC,GAAG,MAAM,OAAO,CAAC,OAAO,WAAW;;AAGvD,2BAAyC;EAC9C,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AChJd;;;;;;;;;;;;;;;;AAkBA,MAAM,0BAAwB;AACvB,kBACH,yBAAyB,KAAK;;;ACpBlC;;;;;;;;;;;;;;;;AAuBA,IAAI;AAEJ;AACE,YAAU,SAAQ,KAAK,MAAM,KAAK,MAAe,CAAC;;AAGpD;AAEE,SAAO,mBAAS,QAAQ,SAAS;AACjC,SAAO,MAAM,YAAY;AACzB,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,gBAAc;AACd,cAAY;AAEZ,SAAO,YAAY,MAAM,cAAc,sBACnC,wBAAwB,GAAG,MAAM;AAErC,sBAAoB;AACpB,MAAI;AACF,yBAAqB,SAAQ,UAAU,IAAI,WAAW,QAAQ;AAC9D,QAAI,iBAAiB;AAGnB,cAAQ;AACR,gBAAU;AACV,sBAAgB,qBAAa,iBACzB,cAAc,QAAQ,MAAM,MAAM;;;AAI1C,uBAAa,2BACT,OAAO,eAAe,MAAM,MAAM;AACtC,kCACI,qBAAa,0BAA0B,MAAM,OAAO;AACxD,qBAAmB,aAAK,cAAc;AAEtC,cAAY,SAAQ,WAAW,UAAU,MAAM;AAC/C,MAAI,aAAK,cAAc,MAAM,WAAW;AACtC,kBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,YAAQ,SAAS,YAAY;;AAG/B,MAAI;AAEF,aAAQ,YAAY,WAAW;;AAGjC,MAAI;AAEF,qBAAiB,qBAAa,qBAAqB,IAAI,OAAO;AAC9D,QAAI,QAAQ;;AAGd,SAAO;;AAGF,kBAAgC;EACrC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;ACpFd;;;;;;;;;;;;;;;;AAmBO,MAAM,aAA2B,wBAAwB;;;ACnBhE;;;;;;;;;;;;;;;;AAuBA,IAAI;AAIJ;AACE,aAAW,SAAQ,KAAK,MAAM,MAAM,MAAiB;IACnD;IACA;IACA;IACA;IACA;IACA;;;AAIJ;AAEE,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,KAAK;AACZ,cAAY,SAAQ,UAAU,IAAI,EAAE,QAAQ;AAC5C,SAAO,QAAQ;AAEf,mBAA2B,IAAI,MAAM,EAAE,MAAM;AAC7C,eAAa,GAAG,IAAI,SAAS,QAAQ;AACnC,aAAS,KAAK,EAAE,MAAM,KAAK,KAAK;;AAElC,sBAAoB,IAAI,WAAW,IAAI,WAAW,EAAE,OAAO;AAC3D,wBAAsB,IAAI,WAAW,IAAI,WAAW,UAAU;AAE9D,cAAY,SAAQ,WAAW,UAAU,EAAE;AAC3C,gBAAc,SAAQ,UAAU,IAAI,IAAI,QAAQ;AAChD,WACI,KAAK,aAAa,EAAE,MAAM,QAAQ,eAAe,SAAS,QAC1D,SAAS,IAAI,QAAQ;AACzB,SAAO;;AAGF,mBAAiC;EACtC,YAAY;EACZ,aAAa;EACb,WAAW;EACX,YAAY;;;;AChEd;;;;;;;;;;;;;;;;AAuBA,gBACI;AAEF,SAAO,QAAQ,mBAAS,SAAS;AACjC,SAAO,SAAS;AAChB,SAAO,QAAQ;AACf,qBAAmB,MAAM,MAAM;AAC/B,eAAa,MAAM,MAAM;AACzB,mBAA2B,IAAI,MAAM,OAAO;AAC5C,iBAAe;AACf,eAAa,GAAG,IAAI,MAAM;AACxB,QAAI,MAAM;AACR,eAAS,cAAc,MAAM,MAAM;;;AAGvC,eAA2B,IAAI,MAAM;AACrC,gBAAc,IAAI,MAAM,MAAM,KAAK;AACnC,eAAa,MAAM,MAAM;AACzB,OAAK,QAAQ;AACb,eAAa,GAAG,IAAI,KAAK,QAAQ;AAC/B,UAAM,QAAQ;AACd,SAAK,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,QAAQ,OAAO,CAAC,OAAO,OAAO;;AAE7D,SAAO,KAAK,IAAI,EAAE,QAAQ,WAAY,EAAC,QAAQ,OAAO,OAAO;;AAGxD,qBAAmC;EACxC,YAAY;EACZ,aAAa;EACb,YAAY;;;;ACpDd;;;;;;;;;;;;;;;;AAqBA,mBAAmB;AACjB,SAAO,SAAS,IAAI,qBAAW;AAC/B,cAAY,SAAQ,WAAW,EAAE,OAAO,EAAE;AAC1C,kBAAgB,SAAQ,mBAAmB;AAC3C,UAAQ,KAAK;AACb,SAAO;;AAGF,wBAAsC;EAC3C,YAAY;EACZ,aAAa;EACb,YAAY;;;;AChCd;;;;;;;;;;;;;;;;AAmGA,MAAM,gBAAgC;EACpC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;AAGF,2BAA2B;AACzB,iBAAe;;;;ACnLjB;;;;;;;;;;;;;;;;AAmBA,MAAM,OAAM;AAMZ,KAAI,aAIA,yBAAyB,YAAY,YAAY,SAAS,IAAI,WAAW;EACvE;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;EAAG;EACpD;EAAG;EAAI;EAAK;EAAK;EAAG;EAAG;EAAG;EAAG;EAAI;EAAG;EAAK;EAAI;EAAI;;AAOvD,KAAI,aAAa,gCAAgC;AAG/C,MAAI,KAAI,IAAI;AACV,WAAO;;AAGT;AAGE,QAAI,iBAAiB,MAAM,YAAY,IAAI,kBAAkB;AAG7D,WAAO,YAAY,SAAS,IAAI,WAAW;MACzC;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;MAAG;MACnE;MAAG;MAAI;MAAK;MAAK;MAAG;MAAI;MAAI;MAAG;MAAG;MAAG;MAAI;MAAI;MAAK;MAAI;MAAG;MAAG;MAAI;;;AAGlE,WAAO;;;;;ACnCX,MAAA,kCAAoC;;;ACrB7B,MAAM,qBAAqB;;;ACwBlC,MAAA,oBAAwB;AAxBxB;;;;;;;;;;;;;;;;AA0BA,sBAAsB;AA1BtB,0BAuCiC;EAK/B;AACE;AADiB,SAAA,OAAA;AAHX,SAAA,mBAAmB;AAKzB,SAAK,KAAK,KAAK;AACf,SAAK,YAAY,IAAI,YAAY,MAAM;;EAGzC;AAEE,mBAAe;AACf,SAAK,KAAK,QAAQ,QAAQ,OAAO;AACjC,WAAO;;EAGT;AACE,WAAO,KAAK,UAAU;;QAGlB;AACJ,kBAAc,aAAK;AACnB;AACA,qBAAiB,aAAK,QAAQ;AAC9B,WAAO,CAAC;;EAGV;AAGE,eAAW,KAAK;AAChB,QAAI,UAAU;AACZ,0BAAoB;AACpB,WAAK,UAAU,IACX,QAAQ,CAAC,IAAI,aAAa,OAAO,OAAO,cAAc;AAC1D;;AAGF,iBAAa,aAAK,cAAc;AAChC,qBAAiB,OAAO,aAAK,gBAAgB;AAC7C,yBAAqB,KAAK,KAAK,QAAQ;AAEvC,SAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AAErD,SAAK,KAAK,KAAK,eAAe,IAAI,MAAM;AAExC,QAAI,UAAU;AACZ,WAAK,KAAK,OAAO,IACb,IAAI,WACC,OAAmC,QACnC,OAAmC,YAAY,WACpD;;;QAIF;AACJ,WAAO,KAAK,SAAS;;EAGvB;AACE,WAAO,cAAc,OAAO,OAAO,eAC/B,KAAK,UAAU,IAAI;AACvB,QAAI,UAAU;AACZ,aAAO;;AAET,kBAAc,KAAK,KAAK,OAAO,MAC3B,cACA,eAAe,aAAK,cAAc,SAAS,aAAK,gBAAgB;AACpE,WAAO,qBAAqB,MAAM,QAAQ;;EAG5C;AACE,kBAAa,KAAK,UAAU,IAAI;AAChC,SAAK,KAAK,MAAM,MAAK;AACrB,SAAK,KAAK,KAAK,YAAY,MAAK;AAChC,SAAK,UAAU,OAAO;;EAGxB;AACE,WAAO;;EAKT;AACE,WAAO,KAAK,UAAU,IAAI,QAAQ;;EAGpC;AACE,SAAK,KAAK,KAAK;AACf,SAAK,OAAO;;EAGd;AACE,WAAO,CAAC,YAAY;;EAStB;AAEE;AACA,QAAI,gBAAgB;AAClB,eAAS,KAAK,MAAM,MAAmB,OAAO;;AAE9C,eAAS;AACT,iBAAW,KAAK;AAChB,WAAK,UAAU,IAAI,QAAQ,CAAC,IAAI,cAAc,OAAO;AACrD,mBAAa,aAAK,cAAc;AAChC,WAAK,KAAK,KAAK,eAAe,IAAI,MAAM;;AAE1C,WAAO,CAAC,QAAQ,OAAO;;EAGzB,oBAAoB,OAAO,OAAO;AAEhC,oBAAe,KAAK,KAAK,OAAO;AAChC,WAAO,gBAAgB,KAAK,UAAU,IAAI;AAC1C,iBAAa,aAAK,cAAc;AAChC,YAAQ;WACD;AACH,eAAO,IAAI,aAAa,SAAQ,cAAc;WAC3C;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;WACzC;AACH,eAAO,IAAI,WAAW,SAAQ,cAAc;;AAE5C,cAAM,IAAI,MAAM,iBAAiB;;;;AAKzC,gBAAgB,QAAQ;AACtB,SAAO,QAAQ,MAAM;AACrB,SAAO,IAAI,YAAY;GACtB;AAEH;AAEE,SAAO;AACL,iBAAK,MAAM,MAAM,CAAC,aAAa,gBAAgB,KAAK;AAClD,UAAI,CAAC,SAAS;AACZ,gBAAQ,IAAI,EAAE,uCAAuC;;AAEvD,eAAS,cAAc,KAAK;AAC1B,oBAAY,YAAY,QAAQ,SAAS,KAAK;AAC5C,mBAAS,OAAO;;;;AAItB,WAAO;;;AAUX;AAGE,MAAI,YAAY;AAGd,WAAO;;AAGT,aAA2B;AAC3B,MAAI,iBAAiB;AACnB,WAAO;aACE;AACT,WAAO;;AAGT,MAAI,eAAe;AACjB,QAAI,YAAY,SAAS;AACvB,aAAO,YAAY;;;AAIvB,SAAO,mBAAmB;;AAU5B;AACE,4CAA0C,MAAM,QAAQ,IAAI;IAC1D,MAAM,SAAS;IACf,MAAM,SAAS;;AAGjB,SAAO,IAAI,QAAQ;AACjB,0BAAyC;AAOzC,kBAAc,aAAa;AACzB,UAAI,KAAK,SAAS;AAChB,yBAAiB;AACjB,qBAAa,IAAI,KAAK,CAAC,WAAW,CAAC,MAAM;AACzC,eAAO,IAAI,gBAAgB;;AAG7B,UAAI,KAAK,SAAS;AAChB,eAAO,oBACH,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEhD,aAAO,SAAS;;AAMlB,QAAI;AACF,oBAAc,kBACV,0BAA0B,oBACtB,eAA0B,kBAC1B,kBAAkB,OAAO,iBAAiB;;AAEpD;AAEA,QAAI,oBAAoB,iBAAiB,YAAY;AACnD,aAAO,wCAAwB;AAC/B,WAAK,sBAAsB,IAAI,KAC3B,CAAC,qEACA,wCAAwB,aACzB,CAAC,MAAM;;AAGX,aAAO,0BAAY;;AAGrB,2BAA+B;AAE/B,SAAK,OAAO;MACV,MAAM,KAAK,MAAM,QAAQ,MAAM;MAC/B,gBAAgB,KAAK,MACjB,mBAAmB,MACnB;QACE;QACA;QACA;;MAEN,aAAa,KAAK,MAAM,gBAAgB,gBAAgB,CAAC;MACzD,SAAS,KAAK,MAAM,WAAW,gBAAgB;;AAEjD,sBAAkB;AAClB,SAAK,uBAAuB;AAC1B,oBAAc;AACd,oBAAc;AACd,cAAQ,CAAC;;AAEX,SAAK,UAAU;AACb,UAAI;AAEF;;AAEF,UAAI;AAGF;;AAEF,oBAAc;AACd,wBACI;AAEJ,aAAO,CAAC,SAAS;;;;AAKvB;AAEE,UAAQ;SACD;AACH,aAAO,IAAI,aAAa;SACrB;AACH,aAAO,IAAI,WAAW;SACnB;AACH,aAAO,IAAI,WAAW;;AAEtB,YAAM,IAAI,MAAM,iBAAiB;;;AAIvC,wBAAwB;EACtB;EAA0B;EAC1B;;AAIF,eAAuB;AACvB,qBAA6B;AAC7B,kBAAsD;AACtD,kBAAkB;AAClB,kBAAkB;ACoDZ,sBACF,oCACmB;AACrB,MAAI;AACF,UAAM,IAAI,MACN;;AAKN,MAAI,OAAO,oBAAoB;AAC7B,qBAAiB;;AAEjB,kBAAc;AACd,yBACI,gBAAgB,OAAO,UAAQ,YAAY,SAAS;AACxD,QAAI,aAAa,SAAS;AACxB,YAAM,IAAI,MACN,2DACG,aAAa,KAAK;;;AAM7B,gBAAc;;;;AC5ahB;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;;ACaA,MAAM,iBAAoB;;;ACb1B,IAAO,wBAAQ;AAAA,EACb;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EACpE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACtE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACxE;AAAA,EAAG;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACvE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC1E;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACzE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACxE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACzE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACzE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAC1E;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EACpE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAI;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACpE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACrE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACvE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAI;AAAA,EAAG;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACxE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EACtE;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA;;;ACvKnE,MAAA,WAA0B;AAC1B,YAAqB;AACrB,eAAwB;AACxB,gBAAyB;AACzB,gBAAyB;;;ACLzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkBA,oBAAoB;AAClB,SAAO;AAAA,IACL,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA,IAC1C,KAAK,IAAI,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA;AAAA;AAG9C;AACE,SAAO;AAAA,IACL,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA,IAC5D,IAAI,WAAW,KAAM,KAAI,SAAS,KAAK,IAAI,WAAW,MAAM;AAAA;AAAA;AAGhE;AACE,YAAU,OAAM,MAAM;AACtB,YAAU,OAAM,MAAM;AACtB,gBAAc,CAAC;AAAA,IACb,IAAI,WAAW,KAAK;AAAA,IACpB,IAAI,WAAW,KAAK;AAAA,IACpB,IAAI,SAAS,KAAK;AAAA,IAClB,IAAI,SAAS,KAAK;AAAA;AAEpB,SAAO,GAAG,MAAM,cAAc,QAAO,OAAO,CAAC,IAAI;AAAA;AAEnD;AACE,qBAAmB,CAAC,IAAI,WAAW,KAAK,OAAO,IAAI,IAAI,WAAW,KAAK,OAAO;AAC9E,mBAAiB,CAAC,IAAI,SAAS,KAAK,OAAO,IAAI,IAAI,SAAS,KAAK,OAAO;AACxE,wBAAsB,IAAI,cAAc,IAAI;AAC1C,wBAAoB,CAAC,MAAM,KAAK,OAAO,IAAI,MAAM,KAAK,OAAO;AAC7D,WAAO;AAAA;AAET,SAAO,CAAE,YAAY,UAAU,eAAe,YAAY,IAAI;AAAA;AAEhE,kCAAkC;AAChC,iBAAe,aAAa;AAC5B,eAAa,WAAW;AACxB,sBAAoB,CAAC,SAAS,KAAK,KAAK,GAAG,SAAS,KAAK,KAAK;AAC9D,qBAAmB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACxE,mBAAiB,CAAC,OAAO,KAAK,YAAY,IAAI,OAAO,KAAK,YAAY;AACtE,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;AAAA;AAEpD;AACE,kBAAgB,aAAa;AAC7B,eAAa,WAAW;AACxB,kBAAgB,KAAK,IAAI,GAAG;AAC5B,mBAAiB,UAAU;AAC3B,qBAAmB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACxD,mBAAiB,CAAC,QAAQ,KAAK,UAAU,QAAQ,KAAK;AACtD,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;AAAA;AAEpD;AACE,kBAAgB;AAAA,IACd,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA,IACjC,IAAI,SAAS,KAAK,IAAI,WAAW;AAAA;AAEnC,sBAAoB,CAAC,QAAQ,KAAK,YAAY,IAAI,QAAQ,KAAK,YAAY;AAC3E,qBAAmB,CAAC,IAAI,WAAW,KAAK,YAAY,IAAI,IAAI,WAAW,KAAK,YAAY;AACxF,mBAAiB,CAAC,IAAI,SAAS,KAAK,YAAY,IAAI,IAAI,SAAS,KAAK,YAAY;AAClF,SAAO,CAAE,YAAY,UAAU,eAAe,IAAI;AAAA;;;AC3EpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgBA,0BAA0B;AACxB,SAAO,QAAQ,IAAI,KAAK,KAAK,KAAK,MAAO,SAAQ,KAAK,MAAO,KAAI,KAAK;AAAA;AAExE;AACE,kBAAgB,KAAK,KAAK,IAAI,KAAK,MAAM,CAAE,QAAO,KAAK,OAAO,KAAK,OAAO,KAAK,OAAO;AACtF,SAAO,iBAAiB;AAAA;AAE1B,+BAA+B,UAAU,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,GAAG;AACvE;AACE,gBAAc;AACd,eAAa,GAAG,IAAI,GAAG,QAAQ;AAC7B,eAAW,GAAG,KAAK,GAAG;AAAA;AAExB,SAAO;AAAA;AAET;AACE,iBAAe;AACf,eAAa,GAAG,IAAI,IAAI,QAAQ;AAC9B,WAAO,KAAK,IAAI,GAAG;AAAA;AAErB,SAAO;AAAA;AAET;AACE,kBAAgB;AAChB,eAAa,KAAK;AAClB,iBAAe,GAAG,MAAM,MAAM;AAC5B,YAAQ,KAAK;AACb,mBAAe,GAAG,MAAM,MAAM;AAC5B,cAAQ,KAAK,KAAK,IAAI,KAAK,MAAM,mBAAmB,MAAM;AAAA;AAAA;AAG9D,SAAO;AAAA;AAET;AACE,eAAa,KAAK,IAAI;AACtB,eAAa,KAAK,IAAI;AACtB,yBAAuB,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,MAAM,IAAI,CAAC,GAAG,GAAG;AAClE,4BAA0B,uBAAuB,OAAO,IAAI,OAAO;AACnE,mCAAiC,0BAA0B,mBAAmB;AAC9E,oCAAkC,uBAAuB,CAAC,OAAO,IAAI,CAAC,OAAO;AAC7E,SAAO,0BAA0B,0BAA0B;AAAA;AAE7D;AACE,4BAA0B,CAAC,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG,KAAK,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AAClF,+BAA6B,CAAC,OAAO,GAAG,IAAI,OAAO,GAAG;AACtD,8BAA4B;AAAA,IAC1B,CAAC,IAAI,kBAAkB,IAAI;AAAA,IAC3B,CAAC,IAAI,kBAAkB,IAAI;AAAA;AAE7B,SAAO;AAAA,IACL,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,IAChD,kBAAkB,GAAG,OAAO,oBAAoB;AAAA,IAChD,CAAC,GAAG,GAAG;AAAA;AAAA;AAGX;AACE,SAAO;AAAA,IACL,IAAI,uBAAuB,eAAe;AAAA,IAC1C,IAAI,uBAAuB,eAAe;AAAA;AAAA;;;ACpE9C,MAAA,WAA0B;AAC1B,gBAAyB;AACzB,cAAuB;AACvB,gBAAyB;;;ACNzB,IAAO,iBAAQ;AAAA,EACb,SAAS;AAAA,EACT,UAAU;AAAA,EAEV,SAAS;AAAA,EACT,OAAO;AAAA,EAIP,SAAS;AAAA,EAIT,YAAY;AAAA,EAKZ,QAAQ;AAAA,EAIR,gBAAgB;AAAA,EAKhB,QAAQ;AAAA,IACN,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IAIR,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,UAAU;AAAA,IACV,WAAW;AAAA,IACX,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,KAAK;AAAA,IACL,UAAU;AAAA,IACV,OAAO;AAAA,IACP,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,aAAa;AAAA,IACb,UAAU;AAAA,IACV,UAAU;AAAA;AAAA,EAGZ,SAAS;AAAA,IACP,SAAS;AAAA;AAAA,EAGX,MAAM;AAAA,IACJ,SAAS;AAAA,IAIT,UAAU;AAAA,MACR,WAAW;AAAA,MAIX,WAAW;AAAA,MACX,UAAU;AAAA,MAEV,YAAY;AAAA,MAKZ,eAAe;AAAA,MACf,cAAc;AAAA,MAEd,gBAAgB;AAAA;AAAA,IAKlB,MAAM;AAAA,MACJ,SAAS;AAAA,MACT,WAAW;AAAA,MACX,WAAW;AAAA;AAAA,IAGb,MAAM;AAAA,MACJ,SAAS;AAAA,MACT,WAAW;AAAA,MACX,WAAW;AAAA;AAAA,IAGb,KAAK;AAAA,MACH,SAAS;AAAA,MACT,WAAW;AAAA,MAEX,WAAW;AAAA,MACX,YAAY;AAAA;AAAA,IAId,QAAQ;AAAA,MACN,SAAS;AAAA,MACT,eAAe;AAAA,MACf,WAAW;AAAA,MACX,WAAW;AAAA,MACX,YAAY;AAAA;AAAA,IAId,SAAS;AAAA,MACP,SAAS;AAAA,MACT,WAAW;AAAA,MACX,eAAe;AAAA,MACf,YAAY;AAAA,MACZ,WAAW;AAAA;AAAA;AAAA,EAIf,MAAM;AAAA,IACJ,SAAS;AAAA,IACT,WAAW;AAAA,IACX,WAAW;AAAA,IACX,eAAe;AAAA,IAEf,gBAAgB;AAAA,IAEhB,WAAW;AAAA;AAAA,EAGb,MAAM;AAAA,IACJ,SAAS;AAAA,IACT,WAAW;AAAA,IACX,YAAY;AAAA,IAKZ,eAAe;AAAA,IACf,cAAc;AAAA,IAEd,gBAAgB;AAAA,IAEhB,UAAU;AAAA,IAEV,WAAW;AAAA,IACX,UAAU;AAAA,MACR,WAAW;AAAA;AAAA,IAEb,UAAU;AAAA,MACR,WAAW;AAAA;AAAA;AAAA;;;;;;AC3IjB,MAAM,oBAAoB;AAAA,EACxB,MAAM,CAAE,UAAU,CAAE,YAAY,IAAK,KAAK,CAAE,YAAY,IAAK,QAAQ,CAAE,YAAY,IAAK,SAAS,CAAE,YAAY;AAAA,EAAO,MAAM,CAAE,YAAY;AAAA;AAI5I,aAAY;AACV,MAAI,OAAO,gBAAgB;AAAa,WAAO,YAAY;AAC3D,SAAO,SAAS,OAAO,QAAQ,OAAO,YAAY,MAAO;AAAA;AAI3D;AACE,mBAAiB,SAAS,OAAO,OAAO,QAAQ;AAChD,SAAO,QAAQ,OAAO;AACpB,WAAO,KAAK,OAAO,IAAI,QAAQ;AAC7B,mBAAa,KAAK;AAClB,mBAAa,IAAI;AACjB,UAAI,MAAM,QAAQ,SAAS,MAAM,QAAQ;AACvC,aAAK,OAAO,KAAK,OAAO,GAAG;AAAA,iBAClB,SAAS,SAAS,SAAS;AACpC,aAAK,OAAO,UAAU,MAAM;AAAA;AAE5B,aAAK,OAAO;AAAA;AAAA;AAGhB,WAAO;AAAA,KACN;AAAA;AAxCL;AAAA,EA4CE,yBAAyB;AACvB,SAAK,KAAK;AACV,SAAK,UAAc;AACnB,SAAK,SAAS,UAAiB,gBAAS;AACxC,SAAK,KAAK;AACV,SAAK,QAAQ;AACb,SAAK,aAAa;AAClB,SAAK,qBAAqB;AAC1B,SAAK,cAAc;AACnB,SAAK,WAAW;AAChB,SAAK,OAAO;AAEZ,SAAK,SAAS;AAAA,MACZ,UAAU;AAAA,MACV,SAAS;AAAA,MACT,UAAU;AAAA,MACV,MAAM;AAAA,MACN,KAAK;AAAA,MACL,QAAQ;AAAA,MACR,SAAS;AAAA;AAGX,SAAK,WAAW;AAChB,SAAK,MAAM;AACX,SAAK,SAAS;AACd,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,OAAO;AAAA;AAAA,EAId;AAEE,QAAI,OAAO,KAAK,OAAO;AAAS,cAAQ,IAAI,UAAU,GAAG;AAAA;AAAA,EAG3D;AACE,QAAI,KAAK,OAAO;AAAS,aAAe;AACxC,WAAO;AAAA;AAAA,EAIT;AACE,QAAI,CAAC,KAAK;AAAoB;AAC9B,oBAAgB,GAAG,SAAS,MAAM;AAClC,qBAAiB,KAAK;AACtB,SAAK,aAAa;AAClB,mBAAe,UAAU;AACzB,QAAI,WAAW;AAAG,WAAK,IAAI,GAAG,KAAK;AAAA;AAAA,EAIrC;AACE,QAAI,CAAC,KAAK;AAAa,aAAO;AAC9B,QAAI,CAAC;AAAO,aAAO;AACnB,QAAI,GAAG,IAAI,MAAM,WAAW,CAAE,kBAAiB,GAAG;AAChD,aAAO;AAAA;AAET;AACE,SAAG;AAAA;AAEH,aAAO;AAAA;AAET,WAAO;AAAA;AAAA,QAIH;AACJ,SAAK,QAAQ;AACb,sBAAkB;AAClB,QAAI;AAAY,WAAK,SAAS,UAAU,KAAK,QAAQ;AAErD,QAAI,KAAK;AACP,WAAK,aAAa;AAClB,WAAK,IAAI,YAAY,KAAK,kCAAkC,GAAG;AAC/D,WAAK,IAAI,kBAAkB,KAAK;AAChC,WAAK,IAAI,UAAU,GAAG,IAAI;AAC1B,WAAK,WAAW;AAAA;AAElB,QAAI,KAAK,OAAO;AACd;AAAA,QACE,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,UACV,MAAM,QAAQ,IAAI;AAAA,QACpB,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAU,AAAS,cAAK,KAAK,OAAO,QAAQ;AAAA,QACtF,KAAK,OAAO,OAAS,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,UAAW,AAAI,SAAK,KAAK,UAAU;AAAA,QACzG,KAAK,OAAO,UAAY,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,UAAW,AAAO,YAAK,KAAK,UAAU;AAAA,QAClH,KAAK,OAAO,WAAa,MAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,UAAW,AAAQ,aAAK,KAAK,UAAU;AAAA,QACrH,KAAK,OAAO,WAAY,MAAK,OAAO,KAAK,UAAU,AAAQ,aAAK,KAAK,UAAU;AAAA,QAC/E,KAAK,OAAO,YAAa,MAAK,OAAO,KAAK,UAAU,AAAS,cAAK,KAAK,OAAO,QAAQ;AAAA;AAAA;AAGxF,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAM,AAAS,cAAK,KAAK,OAAO;AAC9G,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,IAAI,WAAW,CAAC,KAAK,OAAO;AAAK,aAAK,OAAO,MAAM,MAAM,AAAI,SAAK,KAAK;AACxH,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO;AAAQ,aAAK,OAAO,SAAS,MAAM,AAAO,YAAK,KAAK;AACpI,UAAI,KAAK,OAAO,KAAK,WAAW,KAAK,OAAO,KAAK,QAAQ,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAM,AAAQ,aAAK,KAAK;AACxI,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAS,aAAK,OAAO,UAAU,MAAM,AAAQ,aAAK,KAAK;AACpG,UAAI,KAAK,OAAO,KAAK,WAAW,CAAC,KAAK,OAAO;AAAU,aAAK,OAAO,WAAW,MAAM,AAAS,cAAK,KAAK,OAAO;AAAA;AAEhH,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,QAAQ;AAAI,WAAK,KAAK,OAAO;AAAA;AAAA,QAIlD;AACJ,sBAAkB;AAClB,QAAI,KAAK,OAAO,WAAY,KAAK,OAAO,YAAY,MAAO,SAAU,GAAG,iBAAiB,KAAK,OAAO;AACnG,WAAK,QAAQ;AAWb,WAAK,IAAI,oBAAoB,KAAK,OAAO;AAEzC,UAAI,KAAK,OAAO,YAAY;AAC1B,aAAK,IAAI,uBAAuB,KAAK,OAAO;AAC5C,qBAAa,KAAK,OAAO;AACzB,qBAAa,MAAM,GAAG,MAAM,SAAS;AACrC,YAAI,CAAC;AAAM,eAAK,IAAI;AAAA;AAGtB,YAAM,GAAG,WAAW,KAAK,OAAO;AAChC,SAAG;AAIH,UAAI,KAAK,OAAO,YAAY;AAC1B,YAAI,KAAK,OAAO;AACd,eAAK,IAAI,mDAAmD,KAAK,OAAO;AACxE,aAAG,IAAI,IAAI,kCAAkC,KAAK,OAAO,aAAa,IAAI;AAAA;AAG5E,WAAG,IAAI,IAAI,4BAA4B;AAAA;AAEzC,YAAM,GAAG;AAAA;AAEX,oBAAgB,KAAK,MAAM,SAAQ;AACnC,QAAI,UAAW,MAAK,KAAK,WAAW;AAAI,WAAK,KAAK,UAAU;AAAA;AAAA,QAGxD;AAGJ;AACA;AACA;AACA;AACA,oBAAgB;AAChB,SAAK,QAAQ;AACb,gBAAY;AACZ,kBAAc,MAAM,KAAK,OAAO,SAAS,cAAc,OAAO,KAAK,OAAO;AAC1E,SAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AACpC,wBAAmB;AACjB,WAAK,QAAQ;AAEb,UAAI,CAAC,MAAK,SAAS,MAAK,MAAM;AAC5B,aAAK,IAAI,4BAA4B,MAAK;AAC1C;AAAA;AAGF,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAU,AAAI,YAAQ,MAAK,OAAO,KAAK,UAAU;AAAA;AAE/E,aAAK,QAAQ;AACb,oBAAY;AACZ,iBAAS,KAAK,OAAO,KAAK,IAAI,UAAU,MAAM,AAAI,YAAQ,MAAK,OAAO,KAAK,UAAU;AACrF,aAAK,KAAK,MAAM,KAAK,MAAM,SAAQ;AAAA;AAIrC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAU,AAAO,eAAQ,MAAK,OAAO,KAAK,UAAU;AAAA;AAExF,aAAK,QAAQ;AACb,oBAAY;AACZ,oBAAY,KAAK,OAAO,KAAK,OAAO,UAAU,MAAM,AAAO,eAAQ,MAAK,OAAO,KAAK,UAAU;AAC9F,aAAK,KAAK,SAAS,KAAK,MAAM,SAAQ;AAAA;AAGxC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAU,AAAQ,gBAAQ,MAAK,OAAO,KAAK,UAAU;AAAA;AAE3F,aAAK,QAAQ;AACb,oBAAY;AACZ,qBAAa,KAAK,OAAO,KAAK,QAAQ,UAAU,MAAM,AAAQ,gBAAQ,MAAK,OAAO,KAAK,UAAU;AACjG,aAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;AAAA;AAEzC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,SAAC,QAAQ,WAAW,cAAc,MAAM,QAAQ,IAAI,CAAC,QAAQ,WAAW;AAAA;AAG1E,WAAK,QAAQ;AAEb,YAAK,MAAM;AAIX,uBAAkB,MAAK,YAAY,eAAe,MAAK,YAAY,eAE/D,OAAO,KAAK,IAAI,KAAK,IAAI,MAAK,YAAY,YAAY,GAAG,KAAK,MAAK,YAAY,YAAY,GAAG,KAAK,KAAK,IAAI,MAAK,YAAY,aAAa,GAAG,KAAK,MAAK,YAAY,aAAa,GAAG,OACnL;AAGJ,cAAQ,KAAK;AAAA,QACX,YAAY,MAAK;AAAA,QACjB,KAAK,MAAK;AAAA,QACV,MAAM,MAAK;AAAA,QACX,aAAa,MAAK;AAAA,QAClB,KAAK,OAAO;AAAA,QACZ,QAAQ,UAAU;AAAA,QAClB,kBAAkB,UAAU;AAAA,QAC5B,SAAS;AAAA,QACT,MAAO,aAAa,IAAK,KAAK,MAAM,YAAY,MAAM;AAAA;AAExD,WAAK,QAAQ;AAAA;AAEf,SAAK,QAAQ;AACb,QAAI,KAAK,OAAO;AACd,UAAI,KAAK,KAAK;AAAM,eAAO,KAAK,KAAK;AACrC,UAAI,KAAK,KAAK;AAAK,eAAO,KAAK,KAAK;AACpC,UAAI,KAAK,KAAK;AAAQ,eAAO,KAAK,KAAK;AACvC,UAAI,KAAK,KAAK;AAAS,eAAO,KAAK,KAAK;AAAA;AAE1C,WAAO;AAAA;AAAA,QAGH,0BAA0B;AAC9B,SAAK,QAAQ;AACb,SAAK,SAAS,UAAU,KAAK,QAAQ;AACrC,qBAAgB,AAAM,cAAQ,OAAO,KAAK;AAC1C,aAAQ,OAAO;AACf,WAAO,SAAQ;AAAA;AAAA,QAIX,2BAA2B;AAC/B,SAAK,QAAQ;AACb;AAGA,SAAK,SAAS,UAAU,KAAK,QAAQ;AACrC,QAAI,CAAC,KAAK,OAAO;AAAgB,WAAK,SAAS,UAAU,KAAK,QAAQ;AAGtE,SAAK,QAAQ;AACb,kBAAc,KAAK,OAAO;AAC1B,QAAI;AACF,WAAK,IAAI,OAAO;AAChB,aAAO,CAAE;AAAA;AAIX,WAAO,IAAI,QAAQ;AACjB;AACA;AACA;AAEA,wBAAkB;AAGlB,YAAM,KAAK;AAGX,YAAM,KAAK;AAEX,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,kBAAY;AACZ,uBAAgB,AAAM,cAAQ,OAAO,KAAK;AAC1C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AAGb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,WAAW,SAAQ,UAAU;AACvE,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;AAAA;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,WAAW,SAAQ,UAAU;AAC7E,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AAAA;AAItC,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AACtG,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;AAAA;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,QAAQ,cAAc,SAAQ,QAAQ,KAAK,UAAU;AAC5G,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AAAA;AAEtC,WAAK,QAAQ;AAGb,WAAK,QAAQ;AACb,UAAI,KAAK,OAAO;AACd,kBAAU,KAAK,OAAO,KAAK,UAAU,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAC5G,YAAI,KAAK,KAAK;AAAM,iBAAO,KAAK,KAAK;AAAA;AAErC,aAAK,QAAQ;AACb,oBAAY;AACZ,kBAAU,KAAK,OAAO,KAAK,UAAU,MAAM,KAAK,OAAO,SAAS,cAAc,SAAQ,QAAQ,KAAK,OAAO,QAAQ;AAClH,aAAK,KAAK,OAAO,KAAK,MAAM,SAAQ;AAAA;AAKtC,UAAI,KAAK,OAAO;AACd,SAAC,SAAS,SAAS,WAAW,MAAM,QAAQ,IAAI,CAAC,SAAS,SAAS;AAAA;AAErE,eAAQ,OAAO;AAEf,UAAI,KAAK,OAAO;AAAQ,WAAG,SAAS;AACpC,WAAK,QAAQ;AAEb,uBAAiB;AACjB,UAAI,KAAK,OAAO,QAAQ;AACtB,oBAAY;AACZ,qBAAa,CAAE,MAAM,AAAQ,aAAK,UAAU,MAAM,AAAQ,aAAK,UAAU,MAAM,AAAQ,aAAK;AAC5F,YAAI,CAAC,KAAK,OAAO;AAAO,eAAK,KAAK,UAAU,KAAK,MAAM,SAAQ;AAAA,iBACtD,KAAK,KAAK;AAAS,iBAAO,KAAK,KAAK;AAAA;AAG/C,WAAK,KAAK,QAAQ,KAAK,MAAM,SAAQ;AACrC,WAAK,QAAQ;AACb,cAAQ,CAAE,MAAM,SAAS,MAAM,SAAS,MAAM,SAAS,SAAS,YAAY,aAAa,KAAK,MAAM,QAAQ,SAAQ;AAAA;AAAA;AAAA,QAIlH;AACJ,mBAAe,IAAI,UAAU,KAAK;AAClC,UAAM,KAAK,OAAO,QAAQ;AAC1B,SAAK,IAAI;AAAA;AAAA;",
"names": []
}
diff --git a/dist/human.esm.json b/dist/human.esm.json
index c986486c..f711d3d7 100644
--- a/dist/human.esm.json
+++ b/dist/human.esm.json
@@ -12600,7 +12600,7 @@
"imports": []
},
"src/human.js": {
- "bytes": 14550,
+ "bytes": 14787,
"imports": [
{
"path": "src/tf.js"
@@ -12695,7 +12695,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
- "bytes": 5460397
+ "bytes": 5461264
},
"dist/human.esm.js": {
"imports": [],
@@ -13433,13 +13433,13 @@
"bytesInOutput": 24
},
"src/human.js": {
- "bytesInOutput": 12120
+ "bytesInOutput": 12359
},
"src/human.js": {
"bytesInOutput": 0
}
},
- "bytes": 3443254
+ "bytes": 3443493
}
}
}
diff --git a/src/human.js b/src/human.js
index 2d32a664..578d1a2a 100644
--- a/src/human.js
+++ b/src/human.js
@@ -284,6 +284,14 @@ class Human {
return faceRes;
}
+ async image(input, userConfig = {}) {
+ this.state = 'image';
+ this.config = mergeDeep(this.config, userConfig);
+ const process = image.process(input, this.config);
+ process.tensor.dispose();
+ return process.canvas;
+ }
+
// main detect function
async detect(input, userConfig = {}) {
this.state = 'config';
diff --git a/wiki b/wiki
index 6b460e9f..5dcbe8ad 160000
--- a/wiki
+++ b/wiki
@@ -1 +1 @@
-Subproject commit 6b460e9f5252038ef7a94b044fdb789e35d610bd
+Subproject commit 5dcbe8ad56fc4dc21378046c225185e6203250eb